Report generated on 11-Jan-2023 at 18:14:12 by pytest-html v1.19.0
| Packages | {'pytest': '7.1.2', 'py': '1.11.0', 'pluggy': '0.13.1'} |
| Platform | Linux-5.16.5-051605-generic-x86_64-with-glibc2.29 |
| Plugins | {'xdist': '2.1.0', 'timeout': '1.3.2', 'html': '1.19.0', 'metadata': '1.7.0', 'json-report': '1.5.0', 'forked': '1.4.0', 'cov': '2.11.1'} |
| Python | 3.8.10 |
3316 tests ran in 77.83 seconds.
(Un)check the boxes to filter the results.
3279 passed, 0 skipped, 0 failed, 0 errors, 29 expected failures, 8 unexpected passes| Result | Test | Duration | Links |
|---|---|---|---|
| No results found. Try to check the filters | |||
| XFailed | pytorch_tests/test_clamp.py::TestClamp::test_clamp_min_greater[ ie_device:CPU - precision:FP32 ] | 0.02 | |
|
self = <test_clamp.TestClamp object at 0x7f397ec5de80>, ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.xfail(reason='OpenVINO clamp does not support min > max') def test_clamp_min_greater(self, ie_device, precision, ir_version): > self._test(*self.create_model(1.0, 0.0), ie_device, precision, ir_version) test_clamp.py:41: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ pytorch_layer_test_class.py:81: in _test compiled = core.compile_model(om, ie_device) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <openvino.runtime.ie_api.Core object at 0x7f397db6bef0> model = <Model: 'Model1073' inputs[ <ConstOutput: names[1] shape[?,?,?,?] type: f32> ] outputs[ <ConstOutput: names[4] shape[?,?,?,?] type: f32> ]> device_name = 'CPU', config = None def compile_model( self, model: Union[Model, str, Path], device_name: Optional[str] = None, config: Optional[dict] = None, ) -> CompiledModel: """Creates a compiled model. Creates a compiled model from a source Model object or reads model and creates a compiled model from IR / ONNX / PDPD file. This can be more efficient than using read_model + compile_model(model_in_memory_object) flow, especially for cases when caching is enabled and cached model is available. If device_name is not specified, the default OpenVINO device will be selected by AUTO plugin. Users can create as many compiled models as they need, and use them simultaneously (up to the limitation of the hardware resources). :param model: Model acquired from read_model function or a path to a model in IR / ONNX / PDPD format. :type model: Union[openvino.runtime.Model, str, pathlib.Path] :param device_name: Optional. Name of the device to load the model to. If not specified, the default OpenVINO device will be selected by AUTO plugin. :type device_name: str :param config: Optional dict of pairs: (property name, property value) relevant only for this load operation. :type config: dict, optional :return: A compiled model. :rtype: openvino.runtime.CompiledModel """ if device_name is None: return CompiledModel( super().compile_model(model, {} if config is None else config), ) return CompiledModel( > super().compile_model(model, device_name, {} if config is None else config), ) E RuntimeError: Check 'm_min <= m_max' failed at src/core/src/op/clamp.cpp:116: E While validating node 'v0::Clamp Clamp_413327 (Parameter_411629[0]:f32[?,?,?,?]) -> (dynamic[...])' with friendly_name 'Clamp_413327': E Attribute 'min' must be less or equal than 'max'. Got: 1 and 0 ../../../bin/intel64/Release/python_api/python3.8/openvino/runtime/ie_api.py:399: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_382.aten_clamp, %x.1 : Tensor): %self.max : float = prim::Constant[value=0.]() %self.min : float = prim::Constant[value=1.]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58280> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58280>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0, 7, 9, 1, 2], [5, 7, 4, 3, 9], [6, 7, 6, 3, 4], [1, 3, 7, 4, 3], [9, 2, 5, 2, 1]], dtype=int32), array([1.7387041], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1185.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397befe6d0> im = <openvino._pyopenvino.InputModel object at 0x7f397befe680> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282864 (Parameter_1282861[0]:i32[?,?], Parameter_1282862[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282864': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1185.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58310> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58310>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3], [7], [4], [6], [6]], [[7], [0], [1], [0]...], [[5], [9], [5], [4], [6]]], dtype=int32), array([1.3336351], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1187.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397befe860> im = <openvino._pyopenvino.InputModel object at 0x7f397befe6d0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282875 (Parameter_1282872[0]:i32[?,?,?], Parameter_1282873[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282875': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1187.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea583a0> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea583a0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6, 4, 6, 8, 1], [4, 3, 1, 0, 2], [5, 4, 6, 0, 7], [8, 5, 1, 4, 9], [9, 0, 4, 9, 8]]]], dtype=int32), array([3.1639423], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1189.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397bef92c0> im = <openvino._pyopenvino.InputModel object at 0x7f397bef9360> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282886 (Parameter_1282883[0]:i32[?,?,?,?], Parameter_1282884[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282886': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1189.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58430> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58430>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5], [7], [3], [1], [9]], [[9], [7], [9], [3]...), array([[2.0458498], [1.5871927], [4.113846 ], [3.8487723], [1.6470068]], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1191.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397bf0f720> im = <openvino._pyopenvino.InputModel object at 0x7f397bf0f680> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282897 (Parameter_1282894[0]:i32[?,?,?], Parameter_1282895[0]:f32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1282897': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1191.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea584c0> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea584c0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0.9024762, 7.6852713, 9.023791 , 1.7051649, 2.2346272], [5.300889 , 7.2831554, 4.199198 , 3.0300689, 9...13606], [9.43842 , 2.487206 , 5.3411736, 2.5002534, 1.9925333]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1193.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c50be00> im = <openvino._pyopenvino.InputModel object at 0x7f397c50bd60> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282908 (Parameter_1282905[0]:f32[?,?], Parameter_1282906[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282908': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1193.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58550> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58550>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3.9046357 ], [7.1402636 ], [4.994984 ], [6.257722 ], [6.658284 ]], ...783863 ], [5.0845184 ], [4.588719 ], [6.5071526 ]]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1195.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c51eea0> im = <openvino._pyopenvino.InputModel object at 0x7f397c51ee00> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282919 (Parameter_1282916[0]:f32[?,?,?], Parameter_1282917[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282919': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1195.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea585e0> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea585e0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6.135621 , 4.0394206 , 6.356177 , 8.904538 , 1.0480102 ], [4.6310563 , 3.520611 , 1.1811349 , ... [9.137553 , 0.66006887, 4.124831 , 9.031862 , 8.675047 ]]]], dtype=float32), array([3], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1197.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c51a9f0> im = <openvino._pyopenvino.InputModel object at 0x7f397c51a950> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282930 (Parameter_1282927[0]:f32[?,?,?,?], Parameter_1282928[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1282930': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1197.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58670> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58670>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5.636212 ], [7.975869 ], [3.8364692 ], [1.0077128 ], [9.030863 ]], ...7 ], [4.234903 ]]], dtype=float32), array([[2], [1], [4], [3], [1]], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1199.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c443c20> im = <openvino._pyopenvino.InputModel object at 0x7f397c443b80> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1282941 (Parameter_1282938[0]:f32[?,?,?], Parameter_1282939[0]:i32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1282941': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1199.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.01 | |
|
self = <test_div.TestDiv object at 0x7f397ea58700> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.int32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58700>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0, 7, 9, 1, 2], [5, 7, 4, 3, 9], [6, 7, 6, 3, 4], [1, 3, 7, 4, 3], [9, 2, 5, 2, 1]], dtype=int32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1201.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c45eb80> im = <openvino._pyopenvino.InputModel object at 0x7f397c45eae0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) om.validate_nodes_and_infer_types() # OV infer: core = Core() compiled = core.compile_model(om, ie_device) infer_res = compiled(inputs) if hasattr(self, 'skip_framework') and self.skip_framework: warnings.warn('Framework is skipped') return # Framework infer: torch_inps = [torch.from_numpy(inp) for inp in inputs] fw_res = model(*torch_inps) if not isinstance(fw_res, (tuple)): fw_res = (fw_res,) output_list = list(infer_res.values()) assert len(fw_res) == len( output_list), f'number of outputs not equal, {len(fw_res)} != {len(output_list)}' # check if results dtypes match for fw_tensor, ov_tensor in zip(fw_res, output_list): if not isinstance(fw_tensor, torch.Tensor): if np.isscalar(fw_tensor): assert fw_tensor == np.array(ov_tensor).item() else: if isinstance(fw_tensor, list): ov_tensor = ov_tensor.tolist() assert ov_tensor == fw_tensor assert type(fw_tensor) == type(ov_tensor) continue > assert torch.tensor(np.array(ov_tensor)).dtype == fw_tensor.dtype, f"dtype validation failed: {torch.tensor(np.array(ov_tensor)).dtype} != {fw_tensor.dtype}" E AssertionError: dtype validation failed: torch.int32 != torch.float32 pytorch_layer_test_class.py:109: AssertionError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1201.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.01 | |
|
self = <test_div.TestDiv object at 0x7f397ea58790> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.int32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58790>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3], [7], [4], [6], [6]], [[7], [0], [1], [0]... [6]], [[5], [9], [5], [4], [6]]], dtype=int32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1203.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c464a40> im = <openvino._pyopenvino.InputModel object at 0x7f397c4647c0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) om.validate_nodes_and_infer_types() # OV infer: core = Core() compiled = core.compile_model(om, ie_device) infer_res = compiled(inputs) if hasattr(self, 'skip_framework') and self.skip_framework: warnings.warn('Framework is skipped') return # Framework infer: torch_inps = [torch.from_numpy(inp) for inp in inputs] fw_res = model(*torch_inps) if not isinstance(fw_res, (tuple)): fw_res = (fw_res,) output_list = list(infer_res.values()) assert len(fw_res) == len( output_list), f'number of outputs not equal, {len(fw_res)} != {len(output_list)}' # check if results dtypes match for fw_tensor, ov_tensor in zip(fw_res, output_list): if not isinstance(fw_tensor, torch.Tensor): if np.isscalar(fw_tensor): assert fw_tensor == np.array(ov_tensor).item() else: if isinstance(fw_tensor, list): ov_tensor = ov_tensor.tolist() assert ov_tensor == fw_tensor assert type(fw_tensor) == type(ov_tensor) continue > assert torch.tensor(np.array(ov_tensor)).dtype == fw_tensor.dtype, f"dtype validation failed: {torch.tensor(np.array(ov_tensor)).dtype} != {fw_tensor.dtype}" E AssertionError: dtype validation failed: torch.int32 != torch.float32 pytorch_layer_test_class.py:109: AssertionError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1203.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
self = <test_div.TestDiv object at 0x7f397ea58820> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.int32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32' ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58820>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6, 4, 6, 8, 1], [4, 3, 1, 0, 2], [5, 4, 6, 0, 7], [8, 5, 1, 4, 9], [9, 0, 4, 9, 8]]]], dtype=int32), array([3], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1205.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c2a40e0> im = <openvino._pyopenvino.InputModel object at 0x7f397c2a4310> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) om.validate_nodes_and_infer_types() # OV infer: core = Core() compiled = core.compile_model(om, ie_device) infer_res = compiled(inputs) if hasattr(self, 'skip_framework') and self.skip_framework: warnings.warn('Framework is skipped') return # Framework infer: torch_inps = [torch.from_numpy(inp) for inp in inputs] fw_res = model(*torch_inps) if not isinstance(fw_res, (tuple)): fw_res = (fw_res,) output_list = list(infer_res.values()) assert len(fw_res) == len( output_list), f'number of outputs not equal, {len(fw_res)} != {len(output_list)}' # check if results dtypes match for fw_tensor, ov_tensor in zip(fw_res, output_list): if not isinstance(fw_tensor, torch.Tensor): if np.isscalar(fw_tensor): assert fw_tensor == np.array(ov_tensor).item() else: if isinstance(fw_tensor, list): ov_tensor = ov_tensor.tolist() assert ov_tensor == fw_tensor assert type(fw_tensor) == type(ov_tensor) continue > assert torch.tensor(np.array(ov_tensor)).dtype == fw_tensor.dtype, f"dtype validation failed: {torch.tensor(np.array(ov_tensor)).dtype} != {fw_tensor.dtype}" E AssertionError: dtype validation failed: torch.int32 != torch.float32 pytorch_layer_test_class.py:109: AssertionError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1205.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) ETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
self = <test_div.TestDiv object at 0x7f397ea588b0> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.int32'>, <class 'numpy.int32'>), rounding_mode = None, ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea588b0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5], [7], [3], [1], [9]], [[9], [7], [9], [3]...], [2], [4]]], dtype=int32), array([[2], [1], [4], [3], [1]], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1207.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397c272ef0> im = <openvino._pyopenvino.InputModel object at 0x7f397c272bd0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) om.validate_nodes_and_infer_types() # OV infer: core = Core() compiled = core.compile_model(om, ie_device) infer_res = compiled(inputs) if hasattr(self, 'skip_framework') and self.skip_framework: warnings.warn('Framework is skipped') return # Framework infer: torch_inps = [torch.from_numpy(inp) for inp in inputs] fw_res = model(*torch_inps) if not isinstance(fw_res, (tuple)): fw_res = (fw_res,) output_list = list(infer_res.values()) assert len(fw_res) == len( output_list), f'number of outputs not equal, {len(fw_res)} != {len(output_list)}' # check if results dtypes match for fw_tensor, ov_tensor in zip(fw_res, output_list): if not isinstance(fw_tensor, torch.Tensor): if np.isscalar(fw_tensor): assert fw_tensor == np.array(ov_tensor).item() else: if isinstance(fw_tensor, list): ov_tensor = ov_tensor.tolist() assert ov_tensor == fw_tensor assert type(fw_tensor) == type(ov_tensor) continue > assert torch.tensor(np.array(ov_tensor)).dtype == fw_tensor.dtype, f"dtype validation failed: {torch.tensor(np.array(ov_tensor)).dtype} != {fw_tensor.dtype}" E AssertionError: dtype validation failed: torch.int32 != torch.float32 pytorch_layer_test_class.py:109: AssertionError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1207.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58b80> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58b80>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0, 7, 9, 1, 2], [5, 7, 4, 3, 9], [6, 7, 6, 3, 4], [1, 3, 7, 4, 3], [9, 2, 5, 2, 1]], dtype=int32), array([1.7387041], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1217.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397bee7e50> im = <openvino._pyopenvino.InputModel object at 0x7f397bee7b80> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299324 (Parameter_1299321[0]:i32[?,?], Parameter_1299322[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299324': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1217.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58c10> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58c10>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3], [7], [4], [6], [6]], [[7], [0], [1], [0]...], [[5], [9], [5], [4], [6]]], dtype=int32), array([1.3336351], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1219.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397beb7ef0> im = <openvino._pyopenvino.InputModel object at 0x7f397beb7e50> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299336 (Parameter_1299333[0]:i32[?,?,?], Parameter_1299334[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299336': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1219.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58ca0> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58ca0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6, 4, 6, 8, 1], [4, 3, 1, 0, 2], [5, 4, 6, 0, 7], [8, 5, 1, 4, 9], [9, 0, 4, 9, 8]]]], dtype=int32), array([3.1639423], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1221.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ae99ae0> im = <openvino._pyopenvino.InputModel object at 0x7f397ae99bd0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299348 (Parameter_1299345[0]:i32[?,?,?,?], Parameter_1299346[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299348': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1221.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58d30> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'floor', ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58d30>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5], [7], [3], [1], [9]], [[9], [7], [9], [3]...), array([[2.0458498], [1.5871927], [4.113846 ], [3.8487723], [1.6470068]], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1223.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ae726d0> im = <openvino._pyopenvino.InputModel object at 0x7f397ae72630> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299360 (Parameter_1299357[0]:i32[?,?,?], Parameter_1299358[0]:f32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1299360': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1223.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58dc0> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58dc0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0.9024762, 7.6852713, 9.023791 , 1.7051649, 2.2346272], [5.300889 , 7.2831554, 4.199198 , 3.0300689, 9...13606], [9.43842 , 2.487206 , 5.3411736, 2.5002534, 1.9925333]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1225.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ae0b680> im = <openvino._pyopenvino.InputModel object at 0x7f397ae0b5e0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299372 (Parameter_1299369[0]:f32[?,?], Parameter_1299370[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299372': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1225.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58e50> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58e50>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3.9046357 ], [7.1402636 ], [4.994984 ], [6.257722 ], [6.658284 ]], ...783863 ], [5.0845184 ], [4.588719 ], [6.5071526 ]]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1227.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397adf9270> im = <openvino._pyopenvino.InputModel object at 0x7f397adf9360> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299384 (Parameter_1299381[0]:f32[?,?,?], Parameter_1299382[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299384': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1227.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58ee0> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'floor', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58ee0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6.135621 , 4.0394206 , 6.356177 , 8.904538 , 1.0480102 ], [4.6310563 , 3.520611 , 1.1811349 , ... [9.137553 , 0.66006887, 4.124831 , 9.031862 , 8.675047 ]]]], dtype=float32), array([3], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1229.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ae0adb0> im = <openvino._pyopenvino.InputModel object at 0x7f397ae0ac70> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299396 (Parameter_1299393[0]:f32[?,?,?,?], Parameter_1299394[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1299396': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1229.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea58f70> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'floor', ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea58f70>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5.636212 ], [7.975869 ], [3.8364692 ], [1.0077128 ], [9.030863 ]], ...7 ], [4.234903 ]]], dtype=float32), array([[2], [1], [4], [3], [1]], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1231.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397add42c0> im = <openvino._pyopenvino.InputModel object at 0x7f397add4360> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1299408 (Parameter_1299405[0]:f32[?,?,?], Parameter_1299406[0]:i32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1299408': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1231.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea694c0> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea694c0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0, 7, 9, 1, 2], [5, 7, 4, 3, 9], [6, 7, 6, 3, 4], [1, 3, 7, 4, 3], [9, 2, 5, 2, 1]], dtype=int32), array([1.7387041], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1249.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad750e0> im = <openvino._pyopenvino.InputModel object at 0x7f397ad75130> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315820 (Parameter_1315817[0]:i32[?,?], Parameter_1315818[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315820': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1249.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea69550> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea69550>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3], [7], [4], [6], [6]], [[7], [0], [1], [0]...], [[5], [9], [5], [4], [6]]], dtype=int32), array([1.3336351], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1251.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad9a4a0> im = <openvino._pyopenvino.InputModel object at 0x7f397ad9a450> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315833 (Parameter_1315830[0]:i32[?,?,?], Parameter_1315831[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315833': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1251.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea695e0> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea695e0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6, 4, 6, 8, 1], [4, 3, 1, 0, 2], [5, 4, 6, 0, 7], [8, 5, 1, 4, 9], [9, 0, 4, 9, 8]]]], dtype=int32), array([3.1639423], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1253.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad90e00> im = <openvino._pyopenvino.InputModel object at 0x7f397ad90d60> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315846 (Parameter_1315843[0]:i32[?,?,?,?], Parameter_1315844[0]:f32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315846': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1253.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea69670> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.int32'>, <class 'numpy.float32'>), rounding_mode = 'trunc', ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea69670>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5], [7], [3], [1], [9]], [[9], [7], [9], [3]...), array([[2.0458498], [1.5871927], [4.113846 ], [3.8487723], [1.6470068]], dtype=float32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1255.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad62f40> im = <openvino._pyopenvino.InputModel object at 0x7f397ad62f90> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315859 (Parameter_1315856[0]:i32[?,?,?], Parameter_1315857[0]:f32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1315859': E Arguments do not have the same element type (arg0 element type: i32, arg1 element type: f32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1255.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea69700> input_array = array([[0.90247622, 7.68527131, 9.02379164, 1.70516486, 2.2346272 ], [5.300889 , 7.2831553 , 4.19919755, 3.030... 3.90239147, 7.93656487, 4.72168371, 3.03136055], [9.43842077, 2.48720598, 5.34117367, 2.50025354, 1.99253338]]) other_array = array([1.73870404]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea69700>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[0.9024762, 7.6852713, 9.023791 , 1.7051649, 2.2346272], [5.300889 , 7.2831554, 4.199198 , 3.0300689, 9...13606], [9.43842 , 2.487206 , 5.3411736, 2.5002534, 1.9925333]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1257.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad63d10> im = <openvino._pyopenvino.InputModel object at 0x7f397ad63900> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315872 (Parameter_1315869[0]:f32[?,?], Parameter_1315870[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315872': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1257.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea69790> input_array = array([[[3.90463578], [7.14026347], [4.99498432], [6.25772194], [6.65828409]], ...49 ]], [[5.61983315], [9.78386344], [5.08451843], [4.58871909], [6.50715271]]]) other_array = array([1.33363507]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea69790>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[3.9046357 ], [7.1402636 ], [4.994984 ], [6.257722 ], [6.658284 ]], ...783863 ], [5.0845184 ], [4.588719 ], [6.5071526 ]]], dtype=float32), array([1], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1259.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397ad01630> im = <openvino._pyopenvino.InputModel object at 0x7f397ad015e0> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315885 (Parameter_1315882[0]:f32[?,?,?], Parameter_1315883[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315885': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1259.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea69820> input_array = array([[[[6.13562094, 4.03942058, 6.35617663, 8.90453805, 1.04801022], [4.63105617, 3.52061108, 1.18113494, 0...2233553, 1.27231074, 4.27402709, 9.03791957], [9.13755296, 0.66006885, 4.1248312 , 9.03186207, 8.67504731]]]]) other_array = array([3.16394232]), types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'trunc', ie_device = 'CPU' precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea69820>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[[6.135621 , 4.0394206 , 6.356177 , 8.904538 , 1.0480102 ], [4.6310563 , 3.520611 , 1.1811349 , ... [9.137553 , 0.66006887, 4.124831 , 9.031862 , 8.675047 ]]]], dtype=float32), array([3], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1261.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f397acf0c20> im = <openvino._pyopenvino.InputModel object at 0x7f397acf0b80> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315898 (Parameter_1315895[0]:f32[?,?,?,?], Parameter_1315896[0]:i32[?]) -> (dynamic[...])' with friendly_name 'Divide_1315898': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1261.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XFailed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.00 | |
|
self = <test_div.TestDiv object at 0x7f397ea698b0> input_array = array([[[5.63621207], [7.97586911], [3.83646915], [1.00771288], [9.03086258]], ...487]], [[0.73638931], [6.62069904], [5.47134331], [2.88457466], [4.23490289]]]) other_array = array([[2.04584977], [1.5871927 ], [4.113846 ], [3.8487724 ], [1.6470067 ]]) types = (<class 'numpy.float32'>, <class 'numpy.int32'>), rounding_mode = 'trunc', ie_device = 'CPU', precision = 'FP32', ir_version = '11' @pytest.mark.parametrize(("input_array", "other_array"), [ [10 * np.random.rand(5, 5), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform(low=1, high=5, size=(1))], [10 * np.random.rand(1, 1, 5, 5), np.random.uniform( low=1, high=5, size=(1))], [10 * np.random.rand(5, 5, 1), np.random.uniform( low=1, high=5, size=(5, 1))] ]) @pytest.mark.parametrize(("types"), [ (np.float32, np.float32), pytest.param((np.int32, np.float32), marks=pytest.mark.xfail), pytest.param((np.float32, np.int32), marks=pytest.mark.xfail), pytest.param((np.int32, np.int32), marks=pytest.mark.xfail) ]) @pytest.mark.parametrize('rounding_mode', ([ None, "floor", "trunc" ])) @pytest.mark.nightly def test_div(self, input_array, other_array, types, rounding_mode, ie_device, precision, ir_version): self.input_array = input_array self.input_type = types[0] self.other_array = other_array self.other_type = types[1] > self._test(*self.create_model(rounding_mode), ie_device, precision, ir_version) test_div.py:54: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <test_div.TestDiv object at 0x7f397ea698b0>, model = RecursiveScriptModule(original_name=aten_div), ref_net = None, kind = 'aten::div' ie_device = 'CPU', precision = 'FP32', ir_version = '11', infer_timeout = 60, dynamic_shapes = True, kwargs = {} inputs = (array([[[5.636212 ], [7.975869 ], [3.8364692 ], [1.0077128 ], [9.030863 ]], ...7 ], [4.234903 ]]], dtype=float32), array([[2], [1], [4], [3], [1]], dtype=int32)) graph = graph(%self : __torch__.test_div.___torch_mangle_1263.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 ...tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fe_manager = <FrontEndManager>, fe = <FrontEnd 'pytorch'> decoder = <openvino.frontend.pytorch.decoder.TorchScriptPythonDecoder object at 0x7f39781e25e0> im = <openvino._pyopenvino.InputModel object at 0x7f39781e2540> def _test(self, model, ref_net, kind, ie_device, precision, ir_version, infer_timeout=60, dynamic_shapes=True, **kwargs): """ :param enabled_transforms/disabled_transforms: string with idxs of transforms that should be enabled/disabled. Example: "transform_1,transform_2" """ import torch if 'kwargs_to_prepare_input' in kwargs and kwargs['kwargs_to_prepare_input']: inputs = self._prepare_input(**kwargs['kwargs_to_prepare_input']) else: inputs = self._prepare_input() with torch.no_grad(): model.eval() if not kwargs.get('trace_model', False): model = torch.jit.script(model) else: torch_inputs = [torch.from_numpy(inp) for inp in inputs] model = torch.jit.trace(model, torch_inputs) model = torch.jit.freeze(model) graph = model.inlined_graph print(graph) assert kind is None or self._check_kind_exist( graph, kind), "Operation type doesn't exist in provided graph" fe_manager = FrontEndManager() fe = fe_manager.load_by_framework('pytorch') decoder = TorchScriptPythonDecoder(model) im = fe.load(decoder) om = fe.convert(im) params = om.get_parameters() # todo: support lists and dicts for i in range(len(inputs)): inp = inputs[i] assert inp.dtype.name in self._type_map, f"Unknown type {inp.dtype}." params[i].set_element_type(self._type_map[inp.dtype.name]) shape = [-1] * len(inp.shape) if dynamic_shapes else inp.shape params[i].set_partial_shape(PartialShape(shape)) > om.validate_nodes_and_infer_types() E RuntimeError: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: E While validating node 'v1::Divide Divide_1315911 (Parameter_1315908[0]:f32[?,?,?], Parameter_1315909[0]:i32[?,?]) -> (dynamic[...])' with friendly_name 'Divide_1315911': E Arguments do not have the same element type (arg0 element type: f32, arg1 element type: i32). pytorch_layer_test_class.py:77: RuntimeError ----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1263.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1233.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[0 7 9 1 2] [5 7 4 3 9] [6 7 6 3 4] [1 3 7 4 3] [9 2 5 2 1]]; ov_res: [[0 7 9 1 2] [5 7 4 3 9] [6 7 6 3 4] [1 3 7 4 3] [9 2 5 2 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1235.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[3] [7] [4] [6] [6]] [[7] [0] [1] [0] [8]] [[9] [9] [7] [2] [4]] [[9] [2] [8] [1] [6]] [[5] [9] [5] [4] [6]]]; ov_res: [[[3] [7] [4] [6] [6]] [[7] [0] [1] [0] [8]] [[9] [9] [7] [2] [4]] [[9] [2] [8] [1] [6]] [[5] [9] [5] [4] [6]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1237.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[[2 1 2 2 0] [1 1 0 0 0] [1 1 2 0 2] [2 1 0 1 3] [3 0 1 3 2]]]]; ov_res: [[[[2 1 2 2 0] [1 1 0 0 0] [1 1 2 0 2] [2 1 0 1 3] [3 0 1 3 2]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1239.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2] [7] [0] [0] [9]] [[4] [7] [2] [1] [4]] [[2] [2] [0] [2] [0]] [[4] [8] [0] [0] [9]] [[0] [6] [1] [0] [4]]]; ov_res: [[[2] [7] [0] [0] [9]] [[4] [7] [2] [1] [4]] [[2] [2] [0] [2] [0]] [[4] [8] [0] [0] [9]] [[0] [6] [1] [0] [4]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1265.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[0 7 9 1 2] [5 7 4 3 9] [6 7 6 3 4] [1 3 7 4 3] [9 2 5 2 1]]; ov_res: [[0 7 9 1 2] [5 7 4 3 9] [6 7 6 3 4] [1 3 7 4 3] [9 2 5 2 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1267.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[3] [7] [4] [6] [6]] [[7] [0] [1] [0] [8]] [[9] [9] [7] [2] [4]] [[9] [2] [8] [1] [6]] [[5] [9] [5] [4] [6]]]; ov_res: [[[3] [7] [4] [6] [6]] [[7] [0] [1] [0] [8]] [[9] [9] [7] [2] [4]] [[9] [2] [8] [1] [6]] [[5] [9] [5] [4] [6]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1269.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[[2 1 2 2 0] [1 1 0 0 0] [1 1 2 0 2] [2 1 0 1 3] [3 0 1 3 2]]]]; ov_res: [[[[2 1 2 2 0] [1 1 0 0 0] [1 1 2 0 2] [2 1 0 1 3] [3 0 1 3 2]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| XPassed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.int32'>, <class 'numpy.int32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1271.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2] [7] [0] [0] [9]] [[4] [7] [2] [1] [4]] [[2] [2] [0] [2] [0]] [[4] [8] [0] [0] [9]] [[0] [6] [1] [0] [4]]]; ov_res: [[[2] [7] [0] [0] [9]] [[4] [7] [2] [1] [4]] [[2] [2] [0] [2] [0]] [[4] [8] [0] [0] [9]] [[0] [6] [1] [0] [4]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_avg_pool3d.py::TestAdaptiveAvgPool3D::test_adaptive_avg_pool3d[ ie_device:CPU - precision:FP32 - output_size:[5, 7, 9] - input_tensor:[[[[[ 0.6750274 -0.52947855 -1.0510274 ... -0.9260406 -1.761402 -0.9858571 ] [ 2.0246472 1.3003415 -0.13044687 ... -1.6204368 0.2818884 -0.60950613] [ 0.28208718 -1.2194207 1.0159653 ... -0.41373488 -0.99766797 -0.24041523] ... [ 0.48282078 -1.1239302 0.02843587 ... -0.71503603 0.6534662 0.4188135 ] [ 0.40406922 -1.4748852 1.2540164 ... 1.6250517 -0.7066055 -0.48428303] [-0.65493184 1.536664 -0.31111434 ... 0.557822 0.8869128 -1.6934036 ]] [[ 0.7292678 0.07061668 -1.0223473 ... 0.95373696 0.6173491 -0.45709842] [ 1.4305075 0.41382825 1.24594 ... -0.398382 1.1687646 1.747247 ] [ 0.8794458 0.7050558 -0.24334209 ... -0.9137401 1.7045393 -0.4218345 ] ... [ 0.57678795 -1.5589333 -0.40251887 ... -0.04396934 -0.06045668 0.7406191 ] [ 1.5756857 -2.0828176 1.3952135 ... -0.3303899 -0.4380001 -0.61964315] [-2.7773886 0.38021424 -0.956624 ... 0.67594725 -0.9423851 0.10215356]] [[ 0.09112854 -0.78757286 0.98372304 ... 0.7053841 -0.12594485 0.3758394 ] [-1.5005482 0.5242814 -1.0806792 ... -1.1540502 -1.216528 -1.0406891 ] [-1.8478785 1.6188104 0.60401744 ... -0.65600985 0.51704794 -0.3180054 ] ... [ 0.9267348 0.9337457 0.6681243 ... -0.11904679 -0.15711573 0.93636817] [ 0.57808864 -0.332161 -1.613622 ... 1.7955519 -0.02651354 -1.9947116 ] [ 0.3498302 -0.19556746 0.585111 ... -3.2975013 1.1621848 0.23655939]] ... [[ 2.1814766 -0.42563906 -0.07700434 ... -0.53128225 -0.03745885 0.94462293] [ 1.472312 -0.5113537 -1.272005 ... -0.97998345 0.7632413 -0.1564714 ] [ 0.7353792 -0.13438107 -0.3819322 ... -0.15469684 0.68157035 -0.7616137 ] ... [-0.1949445 0.59401196 1.1702275 ... -1.6014842 -0.20763052 -0.83426774] [ 0.626467 0.4832535 0.01102521 ... 1.0266385 -0.02824205 0.46031135] [-0.85450536 -0.49608487 -1.3702333 ... -1.3460124 0.15207903 -0.01099042]] [[-0.20366985 -0.6548867 -0.97249347 ... 0.46302593 -0.36954644 -0.69484496] [ 1.1971511 0.8974348 0.65310776 ... 0.18065329 -1.413187 -0.26280606] [ 0.8473137 0.26597053 0.46695563 ... 0.91817456 0.10147902 -0.5160176 ] ... [ 1.63903 0.79923904 -0.7654221 ... -1.3473555 -2.0917087 -0.6687809 ] [ 0.10943478 0.5097689 0.57067096 ... 1.1554998 2.3333564 0.09836157] [-0.15953125 -1.6072437 -1.7836018 ... 0.8449579 -0.46176457 0.8727772 ]] [[-0.20156522 -0.67464983 1.6955296 ... -1.2751569 0.73626053 -0.543582 ] [-0.1659209 0.92391646 -0.7679348 ... -0.07506115 0.28156137 0.39920726] [-2.094873 -1.3796912 0.643832 ... -0.04314093 0.32907686 -1.3907567 ] ... [-1.0032145 0.01588529 0.0404608 ... -0.29535833 -0.39608794 -1.6667883 ] [-0.2246718 -0.20661612 -0.29374757 ... -0.27083984 0.93433696 0.35732117] [ 2.6965034 0.31061867 1.7420624 ... -0.23209336 -0.93530625 0.6523891 ]]] [[[ 2.706187 -3.0322587 -1.9081064 ... 0.7283488 -0.04794742 0.91364354] [ 0.80018705 -0.7671929 -1.8022435 ... -1.5633122 1.4194466 1.1269051 ] [-0.29453647 -1.106829 -0.66306984 ... 0.8926227 0.18560319 0.49385738] ... [-0.34975246 1.1075768 0.96871674 ... -0.5546797 -0.81069183 -0.5858185 ] [ 0.11887803 -1.3348 2.0177402 ... 0.55222136 0.5303798 1.6923729 ] [-1.3927875 0.42775464 -0.629481 ... 0.92130643 0.02605388 -0.8802159 ]] [[-0.3206207 -0.7637518 -0.95880735 ... 0.0985759 0.839436 -0.62324774] [ 0.49020365 1.3907264 0.72230864 ... 0.6602552 -0.3843507 1.0993619 ] [-0.0558609 -0.05812433 0.92553127 ... -0.3669914 -0.12281457 0.06550856] ... [ 1.5886163 1.2332557 -0.38938886 ... -0.06936838 -0.2734803 -1.1043248 ] [-0.6271722 0.06817702 -0.19999726 ... -0.15770552 -1.0133241 -0.33918434] [ 0.7350949 -0.12656708 1.1939455 ... 0.21440715 -0.40823248 1.0185643 ]] [[ 0.7377359 -0.8300177 -1.0766298 ... -2.0571415 0.6365726 0.38393214] [ 0.09577733 -0.49613732 0.67632455 ... 1.0116483 -0.6291124 1.2298497 ] [ 1.9512712 0.29137802 0.70198286 ... -0.24776326 -0.83065665 0.81060606] ... [-0.7874076 -0.7132398 0.5118683 ... 1.120565 -0.8272467 -1.5021037 ] [ 1.1298773 -0.08396461 -0.29258165 ... -0.6440496 -0.9135012 0.628351 ] [-0.5922622 0.01449709 -0.34151635 ... -0.43161443 -1.8962408 -0.5323735 ]] ... [[ 1.7681766 2.3496604 -0.69343966 ... 0.6851386 0.03248706 0.5555838 ] [-0.8219531 -0.9320715 0.00665106 ... 0.6417402 0.80125844 0.2582212 ] [ 0.37478152 -1.5713156 1.2746387 ... 0.31453222 1.2137058 2.1733575 ] ... [ 2.0547493 0.56847346 -1.6452566 ... 0.8880564 1.2032093 -0.03186269] [-1.7024984 -2.2148924 -0.32915714 ... -0.82830536 -0.48574445 -0.2587135 ] [-0.43530554 -1.0800319 -0.9905766 ... 1.3704839 -0.8472254 1.1817372 ]] [[ 0.05986854 1.2856232 -0.6177747 ... -0.26461822 1.5861459 -0.15799014] [ 0.9962945 0.48416355 1.0785571 ... 1.1029189 -0.6692258 -0.31980598] [ 0.7508004 -1.9038049 2.3935516 ... 0.3934067 -0.909985 0.2512394 ] ... [ 1.2717466 1.2417549 0.803297 ... -0.25844917 -0.62372243 0.28100547] [-1.3979619 -0.6023846 -0.07263488 ... -0.5740154 0.69222885 -1.1171021 ] [-0.20074627 -1.9274722 -0.24093167 ... 0.07816572 0.5422447 -0.19835202]] [[ 0.5151694 0.19797453 0.71569234 ... 0.44548082 0.8424307 -1.0661782 ] [-1.5072691 -0.5466803 1.263944 ... 0.53587717 -0.40651295 -1.5978879 ] [ 0.36465913 -0.5635329 -0.46114 ... 0.03028747 1.786946 0.5123203 ] ... [ 0.06026702 -0.7654359 0.56040835 ... -2.0763602 0.26728344 -0.85051537] [ 2.050332 0.7479984 -0.6446392 ... -1.2944051 -1.7548354 0.2096386 ] [-0.5268953 0.33837456 1.3818275 ... 0.8184589 -0.37372634 0.34165382]]]]] ] | 0.10 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_avg_pool3d.___torch_mangle_0.aten_adaptive_avg_pool3d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[5, 7, 9]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1230:51 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor = aten::adaptive_avg_pool3d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1231:11 return (%9) fw_re: [[[[[ 7.64344752e-01 3.71782780e-02 -3.54043514e-01 -6.99145734e-01 -1.82411686e-01 4.73096728e-01 -5.82193583e-02 -2.10565269e-01 1.73181295e-04] [ 7.27061570e-01 3.85990113e-01 1.26246810e-01 -3.50771219e-01 1.59778357e-01 5.57128251e-01 -2.61940300e-01 -1.48596197e-01 3.29126954e-01] [ 5.52861214e-01 5.79627901e-02 -1.13731235e-01 -3.11828673e-01 2.03857392e-01 6.83170676e-01 1.84158720e-02 -2.75669724e-01 -3.07125092e-01] [ 5.60825288e-01 4.27327961e-01 -1.84976026e-01 -1.16388522e-01 1.47111133e-01 2.30594780e-02 -7.36731291e-02 -1.88810024e-02 -1.42966196e-01] [ 3.76488328e-01 2.52314210e-01 4.85002883e-02 4.45232153e-01 3.16352904e-01 -2.97187716e-01 -1.46434247e-01 1.64720103e-01 2.17067778e-01] [-4.00150329e-01 -4.95677412e-01 4.27137911e-01 -4.28221077e-02 -5.18542230e-01 -3.48523349e-01 5.14093153e-02 -1.99244916e-03 -6.20112084e-02] [-3.86673748e-01 -3.24166194e-02 2.05498919e-01 -2.49052525e-01 -4.78755921e-01 -3.08241606e-01 2.04703242e-01 1.66044161e-01 -4.86906797e-01]] [[ 1.44304320e-01 2.29953781e-01 -3.27027030e-02 -3.28103453e-01 -1.85763001e-01 1.05676251e-02 -2.32599452e-01 -3.95549424e-02 1.99982867e-01] [ 1.86030731e-01 5.41609108e-01 -9.64823738e-02 -3.25884134e-01 4.21223603e-02 2.15287935e-02 -2.67448545e-01 -1.06711686e-02 8.64252672e-02] [ 2.55714446e-01 1.85748056e-01 -4.30352360e-01 -6.11728787e-01 -4.61401075e-01 -2.84798175e-01 -1.23668492e-01 2.16140941e-01 7.90745765e-02] [ 9.16418359e-02 -1.99792162e-01 -2.71187216e-01 -2.32204184e-01 -6.06459916e-01 -3.22209448e-01 5.55506535e-02 1.90467119e-01 1.00139566e-01] [ 5.57505667e-01 2.56696850e-01 2.52541870e-01 9.99337956e-02 -3.38593215e-01 -1.41225964e-01 1.97817028e-01 2.13555679e-01 4.31085229e-02] [ 4.95036811e-01 2.67488956e-02 5.03966808e-02 -1.27308711e-01 -3.07549506e-01 -2.07661673e-01 1.51409179e-01 2.26060431e-02 -2.11553529e-01] [-5.61041497e-02 -2.71541983e-01 -2.24761054e-01 -4.14736301e-01 -4.27943617e-01 2.14223221e-01 1.92170739e-01 -3.22814316e-01 -2.11552933e-01]] [[ 2.63279468e-01 3.50479454e-01 2.50870049e-01 1.95572406e-01 -1.81066379e-01 -2.60294110e-01 -5.50676107e-01 -4.74750996e-01 -2.78541446e-01] [ 1.24243654e-01 4.94673520e-01 2.25953296e-01 3.76830906e-01 3.15050066e-01 1.77029833e-01 2.47674540e-01 1.50201753e-01 -9.69153643e-02] [-2.93571413e-01 -9.97753292e-02 -3.40490907e-01 -4.88188207e-01 -1.21103629e-01 -6.51314035e-02 -3.21943343e-01 8.35378319e-02 2.89749920e-01] [ 3.28323357e-02 -2.51280576e-01 -4.73925680e-01 -4.19081539e-01 -6.22728653e-02 2.28502795e-01 -1.38600305e-01 -7.48572052e-02 1.36475638e-01] [ 5.16240060e-01 1.09681308e-01 3.45774561e-01 8.93207639e-03 -3.75706106e-02 4.89601910e-01 3.09789032e-02 -3.59917581e-02 -9.62270796e-02] [ 6.95061445e-01 1.83209702e-01 -8.33913684e-04 -1.59783155e-01 -1.29047453e-01 1.58846378e-05 -3.25929850e-01 -3.10847580e-01 -1.52375206e-01] [ 4.82325613e-01 -4.21896875e-02 -6.81864500e-01 -7.71066964e-01 -2.01388866e-01 2.83003211e-01 -1.79664552e-01 -7.18640864e-01 1.26298368e-01]] [[ 4.41576511e-01 -1.64221600e-01 -4.81736772e-02 2.63651878e-01 2.49327030e-02 4.67261039e-02 -2.17358139e-03 -3.91463041e-01 -3.98803085e-01] [ 4.79575843e-01 1.02482252e-01 -4.79138903e-02 3.08916479e-01 3.36466402e-01 3.26092094e-01 2.56623119e-01 3.98800708e-02 -1.02974094e-01] [-4.34474647e-02 -1.66346565e-01 -2.99317211e-01 -2.69303590e-01 4.48243737e-01 4.59804267e-01 -3.37480545e-01 1.58929169e-01 4.06021088e-01] [-1.54238567e-01 -2.91614443e-01 -1.14739604e-01 -1.12118721e-02 6.90257773e-02 9.02645476e-03 -3.83501977e-01 -2.14214668e-01 2.34496728e-01] [ 1.38435857e-02 -1.91040352e-01 2.38252178e-01 3.96585375e-01 1.54618025e-01 2.61846215e-01 -2.75246948e-01 -6.11367881e-01 -2.73698241e-01] [ 4.00286764e-01 1.27041399e-01 9.96998027e-02 4.58172942e-03 2.07436625e-02 3.57270271e-01 -5.44189215e-02 -2.41576627e-01 -1.03201143e-01] [ 5.32748215e-02 -2.98475951e-01 -3.43537956e-01 -5.94951451e-01 -4.77696389e-01 -1.71044376e-02 -1.33817509e-01 3.33375782e-02 3.70379567e-01]] [[ 1.39726222e-01 1.37502968e-01 7.95801729e-02 2.56292999e-01 -2.73169279e-01 -5.88947952e-01 -1.51357055e-01 -1.83931321e-01 -2.33367190e-01] [ 6.14126772e-02 2.12948889e-01 4.01303202e-01 5.21886885e-01 -8.37526619e-02 -3.98731321e-01 5.26783355e-02 3.49445082e-02 -3.08930337e-01] [-4.35777903e-01 -2.03303993e-04 1.18148625e-01 -5.04992306e-02 -1.18968457e-01 -7.51363412e-02 -6.44017309e-02 2.39264548e-01 6.00957461e-02] [ 3.71610850e-01 4.55226094e-01 1.51530206e-01 8.39196444e-02 -1.50374740e-01 2.75481641e-02 4.18218046e-01 3.11207324e-01 4.19006437e-01] [ 1.62395328e-01 1.61040165e-02 -3.93378228e-01 -2.72413313e-01 4.55021784e-02 4.43692625e-01 2.24798724e-01 -2.98132181e-01 -3.81498277e-01] [ 2.04856962e-01 8.37799013e-02 -1.24479763e-01 -5.43266177e-01 -3.58353198e-01 2.87132889e-01 7.59382099e-02 2.73039192e-03 -1.37498721e-01] [ 1.78532854e-01 -9.47610587e-02 6.37027547e-02 -6.73541844e-01 -5.61133564e-01 -1.05871692e-01 -6.53691590e-02 4.21018392e-01 4.81433898e-01]]] [[[ 6.29350096e-02 -8.89915705e-01 -3.06525767e-01 4.86630648e-02 -9.18992981e-02 2.24664658e-02 -3.33179981e-02 2.18806520e-01 5.42905927e-01] [ 4.98216823e-02 -1.69861600e-01 -6.29338026e-02 -1.83750898e-01 -1.25182629e-01 1.57883018e-02 -1.29208535e-01 9.00573581e-02 4.85439658e-01] [-1.40305310e-01 1.10254765e-01 2.11830959e-01 -5.15896022e-01 1.84383988e-01 7.02780008e-01 1.26879022e-01 -2.27860108e-01 -4.82655793e-01] [-1.09375320e-01 -2.61900604e-01 -3.78593445e-01 -9.41464007e-01 -3.72483104e-01 2.14014336e-01 -7.33342245e-02 -1.51727974e-01 -4.27009076e-01] [ 1.59597993e-01 1.35026842e-01 -3.09228241e-01 -4.20318395e-01 3.16695899e-01 3.08748066e-01 -2.65773356e-01 5.25822416e-02 -1.50751024e-01] [ 2.25597382e-01 4.33910042e-01 1.19078070e-01 -2.27055922e-01 4.72849786e-01 7.51509547e-01 2.03468129e-01 -2.24581078e-01 -2.38008916e-01] [-2.66427785e-01 1.77096516e-01 -5.42887375e-02 -3.36587548e-01 6.01020008e-02 2.31848121e-01 3.78501922e-01 8.31382871e-02 7.83017427e-02]] [[ 1.38576925e-02 -1.22989357e-01 -1.78674459e-01 3.37345302e-02 3.09454173e-01 5.01288414e-01 4.32851791e-01 1.82356238e-01 2.77303100e-01] [ 1.14733689e-01 3.65094632e-01 1.48804531e-01 -2.31214598e-01 -8.30117539e-02 1.52003765e-01 3.04128915e-01 -6.03239536e-02 -1.54256821e-01] [ 1.12913586e-01 1.78448737e-01 5.52644320e-02 -5.16142249e-01 -1.45680845e-01 3.40993404e-01 4.03837822e-02 -3.79757673e-01 -4.66248482e-01] [-1.25332728e-01 -1.70578226e-01 -9.19189155e-02 -2.40286037e-01 -1.78462967e-01 7.06184506e-02 9.66244265e-02 7.02832788e-02 1.05171405e-01] [-3.78043540e-02 -7.61635825e-02 -2.03963459e-01 -4.86896783e-02 4.24437314e-01 1.29430309e-01 7.46868029e-02 1.50390327e-01 -4.03817296e-02] [ 2.38088518e-02 7.51358047e-02 -1.46241486e-02 2.14386821e-01 7.76474535e-01 6.37454033e-01 1.33525297e-01 -1.81803763e-01 -4.50020522e-01] [ 1.24028467e-01 -4.36262786e-02 -1.01774864e-01 1.96264967e-01 4.04226750e-01 3.35458368e-01 1.27248108e-01 -2.34422266e-01 -2.97189504e-01]] [[ 3.09338838e-01 3.53307933e-01 1.30612820e-01 2.38129795e-01 3.56293589e-01 1.29216909e-01 8.30760971e-02 7.00093448e-01 7.14471519e-01] [ 1.46410480e-01 4.52185810e-01 2.53654361e-01 -8.10207706e-03 2.69554496e-01 2.95496434e-01 2.14283913e-02 2.11858243e-01 1.14956331e-02] [ 3.09426606e-01 3.17468703e-01 1.70465216e-01 -3.19712669e-01 -2.84836680e-01 1.56171992e-01 3.96425277e-02 -4.60981801e-02 -4.49339986e-01] [ 1.31583408e-01 -1.28081784e-01 -4.25269693e-01 5.49719334e-02 1.57190636e-01 -1.04029320e-01 6.21030517e-02 3.37393045e-01 2.46182784e-01] [-9.85761434e-02 -1.09076321e-01 -2.17390656e-01 3.94744098e-01 5.46027482e-01 3.08050454e-01 2.76549488e-01 2.44722560e-01 4.31261778e-01] [ 8.09092596e-02 2.12142542e-02 -1.04270510e-01 5.55201530e-01 7.31885195e-01 5.47115743e-01 2.40040720e-02 -2.71221578e-01 -1.12775385e-01] [ 2.76363313e-01 -1.15973510e-01 -3.37694615e-01 2.73219705e-01 4.54374701e-01 3.24734271e-01 -7.02053308e-02 1.51477382e-03 -1.38825029e-01]] [[ 6.50174677e-01 4.93976802e-01 1.88286915e-01 1.76962331e-01 3.65914963e-02 1.80670813e-01 3.23879004e-01 6.25350058e-01 5.85604191e-01] [ 6.50506690e-02 3.51725936e-01 7.85969317e-01 1.93018556e-01 -9.40950289e-02 3.64435941e-01 5.28717339e-01 3.66443276e-01 4.98350352e-01] [-5.16172163e-02 -9.58345365e-03 3.45174462e-01 1.25909582e-01 1.21159054e-01 6.00551330e-02 -2.08675742e-01 -1.39368549e-01 -3.77905108e-02] [ 3.91393065e-01 -8.94711614e-02 -2.19078615e-01 2.14365438e-01 2.99531966e-01 2.46596220e-03 -2.82412708e-01 -5.26538156e-02 1.84785739e-01] [ 7.49172747e-01 1.85855806e-01 -3.45460981e-01 2.11878642e-01 1.02824785e-01 8.07500109e-02 2.61775285e-01 2.50467151e-01 5.33939779e-01] [ 1.15724780e-01 -2.37382233e-01 -5.60816050e-01 -2.15660259e-01 -2.06151590e-01 9.87838730e-02 1.27997175e-01 -2.29415074e-01 -9.89560708e-02] [-6.93420649e-01 -6.35863423e-01 -3.80642325e-01 -1.83335468e-01 6.76931664e-02 2.29914948e-01 -9.32744965e-02 -2.06436917e-01 -1.24266200e-01]] [[ 1.85643047e-01 4.82687473e-01 2.12118760e-01 -1.05515063e-01 -1.72101364e-01 -8.80564898e-02 2.98920721e-01 3.96562070e-01 -2.23628044e-01] [-2.40671217e-01 2.18132272e-01 1.02747828e-01 -7.74344802e-01 -7.72203803e-01 -1.72124356e-01 5.15162170e-01 2.32964054e-01 -1.69113994e-01] [-2.22714692e-01 -5.45559585e-01 -7.45451927e-01 -5.28593838e-01 1.04946852e-01 2.45766893e-01 3.86121869e-01 2.78336823e-01 2.85043418e-02] [ 3.54093760e-01 -1.85042068e-01 -5.55716574e-01 -5.75407505e-01 -2.62724876e-01 -1.36295706e-01 -1.82463095e-01 6.12144619e-02 4.05615978e-02] [ 6.24822557e-01 4.64128643e-01 1.53759062e-01 -4.46568906e-01 -6.28263056e-01 -2.44705044e-02 -1.61991999e-01 -2.44145960e-01 1.32152796e-01] [ 3.25789601e-01 1.58545494e-01 7.09467307e-02 -7.57112652e-02 -2.75302887e-01 2.52961516e-01 -6.29933998e-02 -7.02784419e-01 -3.62002373e-01] [-1.89844370e-01 -1.27482787e-01 1.64361540e-02 -8.56469423e-02 -1.51254386e-02 2.15882063e-01 6.04026392e-02 -2.33235493e-01 -2.07281247e-01]]]]]; ov_res: [[[[[ 7.64344752e-01 3.71782780e-02 -3.54043514e-01 -6.99145734e-01 -1.82411686e-01 4.73096728e-01 -5.82193583e-02 -2.10565269e-01 1.73181295e-04] [ 7.27061570e-01 3.85990113e-01 1.26246810e-01 -3.50771219e-01 1.59778357e-01 5.57128251e-01 -2.61940300e-01 -1.48596197e-01 3.29126954e-01] [ 5.52861214e-01 5.79627901e-02 -1.13731235e-01 -3.11828673e-01 2.03857392e-01 6.83170676e-01 1.84158720e-02 -2.75669724e-01 -3.07125092e-01] [ 5.60825288e-01 4.27327961e-01 -1.84976026e-01 -1.16388522e-01 1.47111133e-01 2.30594780e-02 -7.36731291e-02 -1.88810024e-02 -1.42966196e-01] [ 3.76488328e-01 2.52314210e-01 4.85002883e-02 4.45232153e-01 3.16352904e-01 -2.97187716e-01 -1.46434247e-01 1.64720103e-01 2.17067778e-01] [-4.00150329e-01 -4.95677412e-01 4.27137911e-01 -4.28221077e-02 -5.18542230e-01 -3.48523349e-01 5.14093153e-02 -1.99244916e-03 -6.20112084e-02] [-3.86673748e-01 -3.24166194e-02 2.05498919e-01 -2.49052525e-01 -4.78755921e-01 -3.08241606e-01 2.04703242e-01 1.66044161e-01 -4.86906797e-01]] [[ 1.44304320e-01 2.29953781e-01 -3.27027030e-02 -3.28103453e-01 -1.85763001e-01 1.05676251e-02 -2.32599452e-01 -3.95549424e-02 1.99982867e-01] [ 1.86030731e-01 5.41609108e-01 -9.64823738e-02 -3.25884134e-01 4.21223603e-02 2.15287935e-02 -2.67448545e-01 -1.06711686e-02 8.64252672e-02] [ 2.55714446e-01 1.85748056e-01 -4.30352360e-01 -6.11728787e-01 -4.61401075e-01 -2.84798175e-01 -1.23668492e-01 2.16140941e-01 7.90745765e-02] [ 9.16418359e-02 -1.99792176e-01 -2.71187186e-01 -2.32204169e-01 -6.06459916e-01 -3.22209448e-01 5.55506535e-02 1.90467119e-01 1.00139573e-01] [ 5.57505667e-01 2.56696850e-01 2.52541870e-01 9.99337956e-02 -3.38593215e-01 -1.41225964e-01 1.97817028e-01 2.13555679e-01 4.31085229e-02] [ 4.95036811e-01 2.67488956e-02 5.03966808e-02 -1.27308711e-01 -3.07549506e-01 -2.07661673e-01 1.51409179e-01 2.26060431e-02 -2.11553529e-01] [-5.61041497e-02 -2.71541983e-01 -2.24761054e-01 -4.14736301e-01 -4.27943617e-01 2.14223221e-01 1.92170739e-01 -3.22814316e-01 -2.11552933e-01]] [[ 2.63279468e-01 3.50479454e-01 2.50870049e-01 1.95572406e-01 -1.81066379e-01 -2.60294110e-01 -5.50676107e-01 -4.74750996e-01 -2.78541446e-01] [ 1.24243654e-01 4.94673520e-01 2.25953296e-01 3.76830906e-01 3.15050066e-01 1.77029833e-01 2.47674540e-01 1.50201753e-01 -9.69153643e-02] [-2.93571413e-01 -9.97753292e-02 -3.40490907e-01 -4.88188207e-01 -1.21103629e-01 -6.51314035e-02 -3.21943343e-01 8.35378319e-02 2.89749920e-01] [ 3.28323357e-02 -2.51280576e-01 -4.73925680e-01 -4.19081539e-01 -6.22728653e-02 2.28502795e-01 -1.38600305e-01 -7.48572052e-02 1.36475638e-01] [ 5.16240060e-01 1.09681308e-01 3.45774561e-01 8.93207639e-03 -3.75706106e-02 4.89601910e-01 3.09789032e-02 -3.59917581e-02 -9.62270796e-02] [ 6.95061445e-01 1.83209702e-01 -8.33913684e-04 -1.59783155e-01 -1.29047453e-01 1.58846378e-05 -3.25929850e-01 -3.10847580e-01 -1.52375206e-01] [ 4.82325613e-01 -4.21896875e-02 -6.81864500e-01 -7.71066964e-01 -2.01388866e-01 2.83003211e-01 -1.79664552e-01 -7.18640864e-01 1.26298368e-01]] [[ 4.41576511e-01 -1.64221600e-01 -4.81736772e-02 2.63651878e-01 2.49327030e-02 4.67261039e-02 -2.17358139e-03 -3.91463041e-01 -3.98803085e-01] [ 4.79575843e-01 1.02482252e-01 -4.79138903e-02 3.08916479e-01 3.36466402e-01 3.26092094e-01 2.56623119e-01 3.98800708e-02 -1.02974094e-01] [-4.34474647e-02 -1.66346565e-01 -2.99317211e-01 -2.69303590e-01 4.48243737e-01 4.59804267e-01 -3.37480545e-01 1.58929169e-01 4.06021088e-01] [-1.54238567e-01 -2.91614443e-01 -1.14739604e-01 -1.12118721e-02 6.90257773e-02 9.02645476e-03 -3.83501947e-01 -2.14214653e-01 2.34496728e-01] [ 1.38435857e-02 -1.91040352e-01 2.38252178e-01 3.96585375e-01 1.54618025e-01 2.61846215e-01 -2.75246948e-01 -6.11367881e-01 -2.73698241e-01] [ 4.00286764e-01 1.27041399e-01 9.96998027e-02 4.58172942e-03 2.07436625e-02 3.57270271e-01 -5.44189215e-02 -2.41576627e-01 -1.03201143e-01] [ 5.32748215e-02 -2.98475951e-01 -3.43537956e-01 -5.94951451e-01 -4.77696389e-01 -1.71044376e-02 -1.33817509e-01 3.33375782e-02 3.70379567e-01]] [[ 1.39726222e-01 1.37502968e-01 7.95801729e-02 2.56292999e-01 -2.73169279e-01 -5.88947952e-01 -1.51357055e-01 -1.83931321e-01 -2.33367190e-01] [ 6.14126772e-02 2.12948889e-01 4.01303202e-01 5.21886885e-01 -8.37526619e-02 -3.98731321e-01 5.26783355e-02 3.49445082e-02 -3.08930337e-01] [-4.35777903e-01 -2.03303993e-04 1.18148625e-01 -5.04992306e-02 -1.18968457e-01 -7.51363412e-02 -6.44017309e-02 2.39264548e-01 6.00957461e-02] [ 3.71610850e-01 4.55226094e-01 1.51530206e-01 8.39196444e-02 -1.50374740e-01 2.75481641e-02 4.18218046e-01 3.11207324e-01 4.19006437e-01] [ 1.62395328e-01 1.61040165e-02 -3.93378228e-01 -2.72413313e-01 4.55021784e-02 4.43692625e-01 2.24798724e-01 -2.98132181e-01 -3.81498277e-01] [ 2.04856962e-01 8.37799013e-02 -1.24479763e-01 -5.43266177e-01 -3.58353198e-01 2.87132889e-01 7.59382099e-02 2.73039192e-03 -1.37498721e-01] [ 1.78532854e-01 -9.47610587e-02 6.37027547e-02 -6.73541844e-01 -5.61133564e-01 -1.05871692e-01 -6.53691590e-02 4.21018392e-01 4.81433898e-01]]] [[[ 6.29350096e-02 -8.89915705e-01 -3.06525767e-01 4.86630648e-02 -9.18992981e-02 2.24664658e-02 -3.33179981e-02 2.18806520e-01 5.42905927e-01] [ 4.98216823e-02 -1.69861600e-01 -6.29338026e-02 -1.83750898e-01 -1.25182629e-01 1.57883018e-02 -1.29208535e-01 9.00573581e-02 4.85439658e-01] [-1.40305310e-01 1.10254765e-01 2.11830959e-01 -5.15896022e-01 1.84383988e-01 7.02780008e-01 1.26879022e-01 -2.27860108e-01 -4.82655793e-01] [-1.09375320e-01 -2.61900604e-01 -3.78593445e-01 -9.41464007e-01 -3.72483104e-01 2.14014336e-01 -7.33342245e-02 -1.51727974e-01 -4.27009076e-01] [ 1.59597993e-01 1.35026842e-01 -3.09228241e-01 -4.20318395e-01 3.16695899e-01 3.08748066e-01 -2.65773356e-01 5.25822416e-02 -1.50751024e-01] [ 2.25597382e-01 4.33910042e-01 1.19078070e-01 -2.27055922e-01 4.72849786e-01 7.51509547e-01 2.03468129e-01 -2.24581078e-01 -2.38008916e-01] [-2.66427785e-01 1.77096516e-01 -5.42887375e-02 -3.36587548e-01 6.01020008e-02 2.31848121e-01 3.78501922e-01 8.31382871e-02 7.83017427e-02]] [[ 1.38576925e-02 -1.22989357e-01 -1.78674459e-01 3.37345302e-02 3.09454173e-01 5.01288414e-01 4.32851791e-01 1.82356238e-01 2.77303100e-01] [ 1.14733689e-01 3.65094632e-01 1.48804531e-01 -2.31214598e-01 -8.30117539e-02 1.52003765e-01 3.04128915e-01 -6.03239536e-02 -1.54256821e-01] [ 1.12913586e-01 1.78448737e-01 5.52644320e-02 -5.16142249e-01 -1.45680845e-01 3.40993404e-01 4.03837822e-02 -3.79757673e-01 -4.66248482e-01] [-1.25332728e-01 -1.70578212e-01 -9.19189155e-02 -2.40286037e-01 -1.78462967e-01 7.06184506e-02 9.66244191e-02 7.02832788e-02 1.05171405e-01] [-3.78043540e-02 -7.61635825e-02 -2.03963459e-01 -4.86896783e-02 4.24437314e-01 1.29430309e-01 7.46868029e-02 1.50390327e-01 -4.03817296e-02] [ 2.38088518e-02 7.51358047e-02 -1.46241486e-02 2.14386821e-01 7.76474535e-01 6.37454033e-01 1.33525297e-01 -1.81803763e-01 -4.50020522e-01] [ 1.24028467e-01 -4.36262786e-02 -1.01774864e-01 1.96264967e-01 4.04226750e-01 3.35458368e-01 1.27248108e-01 -2.34422266e-01 -2.97189504e-01]] [[ 3.09338838e-01 3.53307933e-01 1.30612820e-01 2.38129795e-01 3.56293589e-01 1.29216909e-01 8.30760971e-02 7.00093448e-01 7.14471519e-01] [ 1.46410480e-01 4.52185810e-01 2.53654361e-01 -8.10207706e-03 2.69554496e-01 2.95496434e-01 2.14283913e-02 2.11858243e-01 1.14956331e-02] [ 3.09426606e-01 3.17468703e-01 1.70465216e-01 -3.19712669e-01 -2.84836680e-01 1.56171992e-01 3.96425277e-02 -4.60981801e-02 -4.49339986e-01] [ 1.31583408e-01 -1.28081784e-01 -4.25269693e-01 5.49719334e-02 1.57190636e-01 -1.04029320e-01 6.21030517e-02 3.37393045e-01 2.46182784e-01] [-9.85761434e-02 -1.09076321e-01 -2.17390656e-01 3.94744098e-01 5.46027482e-01 3.08050454e-01 2.76549488e-01 2.44722560e-01 4.31261778e-01] [ 8.09092596e-02 2.12142542e-02 -1.04270510e-01 5.55201530e-01 7.31885195e-01 5.47115743e-01 2.40040720e-02 -2.71221578e-01 -1.12775385e-01] [ 2.76363313e-01 -1.15973510e-01 -3.37694615e-01 2.73219705e-01 4.54374701e-01 3.24734271e-01 -7.02053308e-02 1.51477382e-03 -1.38825029e-01]] [[ 6.50174677e-01 4.93976802e-01 1.88286915e-01 1.76962331e-01 3.65914963e-02 1.80670813e-01 3.23879004e-01 6.25350058e-01 5.85604191e-01] [ 6.50506690e-02 3.51725936e-01 7.85969317e-01 1.93018556e-01 -9.40950289e-02 3.64435941e-01 5.28717339e-01 3.66443276e-01 4.98350352e-01] [-5.16172163e-02 -9.58345365e-03 3.45174462e-01 1.25909582e-01 1.21159054e-01 6.00551330e-02 -2.08675742e-01 -1.39368549e-01 -3.77905108e-02] [ 3.91393065e-01 -8.94711614e-02 -2.19078615e-01 2.14365438e-01 2.99531966e-01 2.46596220e-03 -2.82412708e-01 -5.26538156e-02 1.84785739e-01] [ 7.49172747e-01 1.85855806e-01 -3.45460981e-01 2.11878642e-01 1.02824785e-01 8.07500109e-02 2.61775285e-01 2.50467151e-01 5.33939779e-01] [ 1.15724780e-01 -2.37382233e-01 -5.60816050e-01 -2.15660259e-01 -2.06151590e-01 9.87838730e-02 1.27997175e-01 -2.29415074e-01 -9.89560708e-02] [-6.93420649e-01 -6.35863423e-01 -3.80642325e-01 -1.83335468e-01 6.76931664e-02 2.29914948e-01 -9.32744965e-02 -2.06436917e-01 -1.24266200e-01]] [[ 1.85643047e-01 4.82687473e-01 2.12118760e-01 -1.05515063e-01 -1.72101364e-01 -8.80564898e-02 2.98920721e-01 3.96562070e-01 -2.23628044e-01] [-2.40671217e-01 2.18132272e-01 1.02747828e-01 -7.74344802e-01 -7.72203803e-01 -1.72124356e-01 5.15162170e-01 2.32964054e-01 -1.69113994e-01] [-2.22714692e-01 -5.45559585e-01 -7.45451927e-01 -5.28593838e-01 1.04946852e-01 2.45766893e-01 3.86121869e-01 2.78336823e-01 2.85043418e-02] [ 3.54093760e-01 -1.85042068e-01 -5.55716574e-01 -5.75407505e-01 -2.62724876e-01 -1.36295706e-01 -1.82463095e-01 6.12144619e-02 4.05615978e-02] [ 6.24822557e-01 4.64128643e-01 1.53759062e-01 -4.46568906e-01 -6.28263056e-01 -2.44705044e-02 -1.61991999e-01 -2.44145960e-01 1.32152796e-01] [ 3.25789601e-01 1.58545494e-01 7.09467307e-02 -7.57112652e-02 -2.75302887e-01 2.52961516e-01 -6.29933998e-02 -7.02784419e-01 -3.62002373e-01] [-1.89844370e-01 -1.27482787e-01 1.64361540e-02 -8.56469423e-02 -1.51254386e-02 2.15882063e-01 6.04026392e-02 -2.33235493e-01 -2.07281247e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_avg_pool3d.py::TestAdaptiveAvgPool3D::test_adaptive_avg_pool3d[ ie_device:CPU - precision:FP32 - output_size:[5, 7, 9] - input_tensor:[[[[-1.6264967e+00 6.6762489e-01 5.6426209e-01 ... -9.6202148e-03 8.6895913e-01 -2.8210914e-01] [ 1.2952729e-01 -5.7686734e-01 -9.4330698e-01 ... 2.1639912e-01 -1.0495428e+00 -7.8401601e-01] [ 1.0100898e+00 1.7912003e+00 -5.4441977e-01 ... -4.4367367e-01 -1.0595398e+00 -3.9696923e-01] ... [ 6.0459385e-03 1.2612447e+00 1.3234926e+00 ... -5.5413771e-01 9.8731577e-01 -8.5629040e-01] [-5.7554501e-01 1.6232855e+00 2.3614517e-01 ... -8.0677903e-01 7.2041517e-01 1.4077156e+00] [ 1.3079628e+00 -1.5080163e+00 -9.6202070e-01 ... -1.0140289e+00 1.7379189e+00 -9.3763781e-01]] [[-1.7914748e+00 -6.3024980e-01 -7.7485037e-01 ... -1.9146409e-02 -8.5829127e-01 -2.5163388e+00] [ 4.6963930e-01 6.9672205e-02 -2.1891955e-02 ... -1.8014546e-01 1.3755190e+00 7.0208353e-01] [ 1.6126059e+00 -2.5179562e-01 -4.5254251e-01 ... 1.8497190e+00 6.0567564e-01 6.5712935e-01] ... [ 2.5552246e-01 -3.9526331e-01 3.4048507e-01 ... 3.1465545e-01 -3.0910134e-01 1.7038960e+00] [-1.0965911e+00 1.8537978e+00 1.1429275e+00 ... 5.4049897e-01 1.9797909e+00 -8.6621350e-01] [ 1.9171758e-01 1.0534225e+00 -1.2237380e+00 ... -9.4144005e-01 -4.4283459e-01 6.0608193e-02]] [[ 1.6753651e+00 1.4012706e-01 -2.2499661e+00 ... -9.2337126e-01 -1.5063344e-01 -1.5893238e+00] [ 6.3750917e-01 1.4754717e+00 -1.3736570e+00 ... -1.6413206e-01 5.9598410e-01 -3.8550240e-01] [-7.8134006e-01 -2.0615801e-01 -9.0507373e-02 ... -5.3757477e-01 -1.4066958e+00 4.1114375e-01] ... [ 1.5006626e-01 -2.2851874e-01 2.1890709e-01 ... 5.2427703e-01 -3.1858540e-01 -5.2220827e-01] [ 1.0643302e+00 -1.6559567e+00 -1.4868301e+00 ... 1.9768782e-02 7.2434837e-01 -1.5852779e+00] [ 1.2399938e+00 -2.6741400e-01 -3.0564722e-02 ... 5.3922915e-01 1.2729146e+00 1.1271335e+00]] ... [[ 1.5885572e+00 9.4203562e-01 -6.8538684e-01 ... -1.1507785e+00 -8.3785897e-01 -8.6712420e-02] [ 1.2498572e+00 8.3067399e-01 -6.4648646e-01 ... 9.1053194e-01 -3.9339137e-01 1.4996259e+00] [-3.2953930e-01 5.3091812e-01 2.5293729e-01 ... -1.8782215e+00 9.2482108e-01 9.2817169e-01] ... [-1.0795367e+00 1.2003300e+00 2.0364006e+00 ... 6.1729187e-01 1.1428548e-01 -5.1993376e-01] [ 8.0722392e-02 -1.0748487e+00 -9.1114514e-02 ... -3.7232175e-01 -7.8755081e-01 6.2107259e-01] [-5.9458297e-01 -6.1529315e-01 -8.2488000e-02 ... 5.9068668e-01 -1.1817505e+00 -5.4924041e-01]] [[-1.0390564e+00 5.6452018e-01 -9.3224742e-02 ... 1.1034650e+00 -4.4751313e-01 7.8361720e-01] [ 1.7040416e+00 1.7961637e+00 7.8463966e-01 ... 9.3157388e-02 -2.3142221e+00 -4.2660916e-01] [ 7.0620492e-02 -9.5159173e-01 -1.3499126e+00 ... 5.4640168e-01 -7.5860435e-01 4.5222700e-01] ... [ 3.7265688e-01 -1.3472339e+00 4.9543661e-01 ... -6.4650774e-01 -5.9616677e-02 -1.1693935e+00] [-1.5688444e+00 1.3746148e-01 -2.1254668e+00 ... -3.0991259e-01 8.6149117e-03 4.5495397e-01] [ 9.9037665e-01 -9.2660803e-01 1.2740724e-01 ... -7.5722575e-01 -9.3521249e-01 -2.9881439e-01]] [[ 1.4329792e+00 1.0823737e+00 -2.2568150e-01 ... -1.2559224e+00 6.4643627e-01 -4.6755424e-01] [-4.2169612e-02 -1.4642648e+00 -1.3292223e+00 ... -2.5427562e-01 2.0668862e+00 1.8133166e+00] [-6.4925271e-01 -2.6518452e-01 3.0918184e-01 ... -2.2356656e+00 2.0907493e+00 -9.3813986e-01] ... [-1.5452185e+00 3.7887573e-01 -7.3223871e-01 ... -5.3794318e-01 1.1241510e+00 5.9302664e-01] [ 6.5320235e-01 2.6313993e-01 -2.1295655e-01 ... 6.2353700e-01 -1.1235738e+00 -2.6756424e-01] [-6.2250566e-01 9.5377706e-02 -3.6029077e-01 ... 3.5572302e-01 5.7454664e-01 -4.0214607e-01]]] [[[-1.7321759e+00 6.4408481e-01 -8.8891453e-01 ... -1.3180741e+00 -8.2013726e-01 1.0563347e-01] [-1.8293746e+00 1.1841056e+00 1.3981042e+00 ... 6.0585201e-01 -1.5472317e+00 1.4064132e+00] [-1.4932953e+00 1.8723807e+00 -3.0616051e-01 ... 1.4629193e-01 5.1395887e-01 -1.1300068e+00] ... [ 7.2136807e-01 -4.3607244e-01 6.8369955e-01 ... 2.6249993e-01 8.9512748e-01 1.7162341e+00] [ 3.1019577e-01 3.7714550e-01 -1.6411633e+00 ... -1.1162665e+00 -2.0664115e+00 7.0759833e-01] [ 4.0817070e-01 -1.5197918e+00 -1.3483453e+00 ... 2.3490736e-01 1.0245870e+00 -8.8780987e-01]] [[-2.6410952e+00 -1.5706549e+00 -1.3813733e+00 ... 4.7794047e-01 -2.4694080e+00 2.8528017e-01] [ 4.0826070e-01 1.9100498e-01 -4.2800727e-01 ... -9.0724874e-01 1.5439166e+00 1.5604830e-01] [ 1.5704202e-02 -1.3119174e-03 -1.2856206e+00 ... 6.6664320e-01 -1.0355673e+00 -7.7261192e-01] ... [ 1.0600848e+00 3.7087935e-01 5.4382694e-01 ... 2.8970426e-01 -6.5082484e-01 -3.2557085e-01] [-1.7156125e+00 -8.6064422e-01 -7.7732250e-02 ... 1.6061682e-01 3.7308924e+00 -1.1471798e+00] [ 1.1094977e+00 3.1905465e-02 5.2448493e-01 ... -5.1771957e-01 -6.7429096e-01 8.3633494e-01]] [[-1.8256581e+00 -3.8787600e-01 -2.0312676e-01 ... 2.0535105e-01 4.9272388e-01 -1.0886678e+00] [ 5.7943588e-01 5.7171953e-01 -1.1599470e+00 ... -8.0488604e-01 -1.5798382e-01 2.8401586e-01] [ 2.3100257e+00 8.8424152e-01 -1.2121843e+00 ... 8.7549657e-01 1.7587594e+00 4.5910147e-01] ... [ 3.1728223e-01 2.0796604e+00 -2.6906416e-01 ... -2.2314141e+00 -6.1484176e-01 6.1257088e-01] [-3.1011307e-01 1.5328717e+00 4.8769197e-01 ... -1.8884116e-01 -6.2043905e-01 9.5490289e-01] [-6.5837115e-01 -6.9987938e-02 -7.9774491e-02 ... -5.4333471e-02 6.4292842e-01 5.2831626e-01]] ... [[ 4.7913942e-01 -1.4726624e-01 1.2172166e+00 ... 1.1058993e+00 9.3084764e-01 3.4024125e-01] [-7.9320735e-01 2.1973155e-01 -1.7405804e+00 ... 1.0078485e+00 -6.5975595e-01 -1.5517138e-01] [ 7.2676063e-01 -7.7852315e-01 -7.9987162e-01 ... 1.5176263e-01 1.1530857e+00 -7.9605985e-01] ... [-1.0321832e+00 6.2306595e-01 1.0167443e+00 ... -4.5511261e-02 7.7271336e-01 -1.1418898e+00] [ 2.3386879e-01 -1.1094706e+00 -6.6644841e-01 ... 3.9773324e-01 -1.4832149e+00 -2.1572460e-01] [ 1.5152471e+00 -3.6224727e-02 -9.1924542e-01 ... -1.6876988e-01 8.8966954e-01 -4.6001935e-01]] [[ 2.0776961e+00 -3.2056596e-02 -4.0627289e-01 ... 4.7864515e-01 1.1828252e+00 1.6700789e+00] [ 1.0545219e+00 2.7329382e-01 8.6101514e-01 ... 5.9483558e-02 6.7482710e-01 -9.9830681e-01] [-4.6479812e-01 4.7930333e-01 2.9433718e-01 ... 2.2353580e+00 -3.9456505e-02 -2.1957929e+00] ... [ 7.1570045e-01 -6.2667298e-01 8.1632096e-01 ... -4.5757708e-01 -8.8998564e-03 -6.1239028e-01] [ 1.1382580e+00 3.8459250e-01 8.2235563e-01 ... 2.8059819e+00 -6.8338484e-01 5.9043229e-01] [ 2.3534867e-01 -1.4525057e-01 -1.2863575e+00 ... 2.5161451e-01 8.2636720e-01 3.4671512e-01]] [[ 6.2538445e-02 9.6683896e-01 -5.6053832e-02 ... 1.9607599e-01 -8.9066643e-01 -1.8803213e-02] [ 4.1021982e-01 1.0211513e+00 -7.2397649e-01 ... -9.6066393e-02 -1.0011137e+00 4.5410568e-01] [-1.1773204e-01 -8.7835282e-01 -1.3575778e+00 ... -1.3470498e+00 1.6718143e+00 1.7671034e-01] ... [-1.7671937e+00 6.8233812e-01 -8.9596048e-02 ... 5.4680216e-01 -9.4474763e-01 -2.1486926e+00] [-6.5395468e-01 -8.3680123e-01 -6.1943895e-01 ... -2.4677377e+00 2.6189485e-01 -9.7922909e-01] [ 2.3114350e+00 -9.2848346e-02 -4.0800056e-01 ... -4.1519424e-01 2.6924491e-01 -1.5256592e+00]]]] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_avg_pool3d.___torch_mangle_2.aten_adaptive_avg_pool3d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[5, 7, 9]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1230:51 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor = aten::adaptive_avg_pool3d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1231:11 return (%9) fw_re: [[[[-4.11078125e-01 -2.05700904e-01 4.47476655e-02 -1.69859141e-01 -4.70591784e-01 -4.21437055e-01 -3.11489642e-01 4.30164039e-02 -3.17967027e-01] [ 5.31758964e-01 -1.16243958e-01 -1.37330830e-01 2.83321999e-02 -2.40186840e-01 -5.76821789e-02 2.83184946e-01 1.64301395e-01 6.29246235e-03] [ 6.74680993e-02 -6.15450740e-01 -3.40298384e-01 2.27787048e-02 2.29383767e-01 5.60287774e-01 5.56338191e-01 2.41562456e-01 -8.00236687e-02] [-4.87687260e-01 -5.15247285e-01 -1.96484968e-01 -7.80562833e-02 2.82750338e-01 6.17401361e-01 2.80259639e-01 1.41258523e-01 3.86048555e-01] [ 1.30436316e-01 3.85262728e-01 5.50504506e-01 -6.43253177e-02 2.71188170e-01 3.58636230e-01 -3.01383644e-01 2.34774426e-02 2.73618579e-01] [ 3.66562098e-01 9.23264444e-01 4.29226339e-01 4.69683111e-03 4.21573579e-01 2.94298202e-01 -2.34643877e-01 3.59082282e-01 5.95941067e-01] [ 3.56254220e-01 2.76975423e-01 -1.95488200e-01 1.38569742e-01 3.08814019e-01 4.11961041e-02 -3.12114358e-01 2.21692652e-01 4.57470357e-01]] [[ 1.24608554e-01 -2.37106774e-02 -2.22556293e-01 -3.29380959e-01 -1.67468190e-01 -9.53064393e-03 -1.78954661e-01 -2.32848618e-02 -3.44307452e-01] [ 1.55287698e-01 6.09426983e-02 -2.83623546e-01 -4.64842319e-01 -1.87882841e-01 1.35597482e-01 1.31035522e-01 1.19004630e-01 2.39846528e-01] [-3.32012363e-02 -9.70226005e-02 -4.42643017e-01 -7.01592445e-01 -2.39976421e-01 3.56159545e-02 1.73286155e-01 1.59891635e-01 -3.16801257e-02] [-1.02578156e-01 1.33310542e-01 -2.19021305e-01 -2.85004675e-01 1.39729664e-01 2.77548730e-01 2.65450150e-01 3.44355702e-01 2.01684579e-01] [-9.80541483e-02 -9.21111330e-02 2.18926474e-01 -4.08556253e-01 -6.05485678e-01 -1.57824740e-01 1.70159221e-01 7.29745701e-02 8.54149461e-02] [-1.12497680e-01 -2.55987912e-01 2.96776056e-01 1.20232999e-01 -1.96804062e-01 9.75910351e-02 3.02263141e-01 2.86120534e-01 -7.86425639e-03] [-4.05117273e-02 -3.79050255e-01 -1.78713500e-01 1.09601803e-01 1.61156729e-01 3.65624428e-01 2.25556970e-01 3.61515045e-01 1.89825907e-01]] [[-6.82219028e-01 1.81465968e-01 6.05051160e-01 -3.67499173e-01 -3.76244038e-01 -1.85056925e-01 -7.20764995e-02 -1.70398019e-02 2.51642950e-02] [-5.74485481e-01 -3.04929093e-02 3.07568759e-01 -1.51791215e-01 1.37001947e-01 2.53612131e-01 -2.50721037e-01 -9.59939063e-02 2.27851659e-01] [-6.10341251e-01 -4.72841948e-01 -2.77246535e-01 -5.65223753e-01 -2.30611444e-01 -1.03046060e-01 -6.34817541e-01 -3.76651257e-01 1.73615664e-02] [-1.38991982e-01 2.67608669e-02 -8.13835636e-02 -5.79442084e-01 -4.69679594e-01 -3.39641094e-01 -5.70946395e-01 -4.24633414e-01 -9.02074650e-02] [ 1.25219822e-02 2.22822785e-01 2.21825108e-01 -2.99877226e-01 -6.33953512e-01 -1.70533076e-01 7.76647776e-02 -3.27815682e-01 -4.81369719e-03] [-3.05830330e-01 -2.47552112e-01 3.38841081e-01 4.71294224e-01 4.87643778e-02 1.59045219e-01 5.06723404e-01 -1.55953318e-01 -4.44011122e-01] [-5.82489014e-01 -2.24826947e-01 2.65741765e-01 3.31071198e-01 2.74123043e-01 4.18277770e-01 4.89924550e-01 2.02489793e-02 -3.87563467e-01]] [[ 2.27483198e-01 1.55320525e-01 3.57105613e-01 3.77827317e-01 1.49772599e-01 1.41051918e-01 2.65656084e-01 -2.68143862e-01 -5.97135723e-02] [ 1.22284330e-01 -4.82768603e-02 -1.27416372e-01 1.58448017e-03 1.91767946e-01 -4.12587039e-02 -2.19587639e-01 -2.43932009e-01 1.17667615e-01] [-3.86839747e-01 -6.80956662e-01 -7.30824292e-01 -2.84938484e-01 8.31205472e-02 -3.48000735e-01 -5.09369612e-01 -2.06944764e-01 -1.43110588e-01] [-1.70365945e-01 -6.27074614e-02 6.46875724e-02 -3.34958225e-01 -1.93801925e-01 -1.62235752e-01 -5.20627558e-01 -4.10584122e-01 -3.31883341e-01] [-3.58334064e-01 3.76530975e-01 5.73259890e-01 6.35067821e-02 -1.49704531e-01 -3.18850666e-01 -4.17109698e-01 -2.15459898e-01 -3.08677822e-01] [-3.69048029e-01 9.32927895e-03 2.95555264e-01 1.78607151e-01 -1.26431257e-01 -3.21897715e-01 -2.19291463e-01 -2.20094800e-01 -3.32386404e-01] [-4.46840644e-01 -2.09609866e-01 2.34004989e-01 -7.79231489e-02 -5.13369329e-02 2.26100698e-01 4.79805283e-02 -3.52382064e-01 -4.81323004e-01]] [[ 5.04323423e-01 1.39412984e-01 1.24067523e-01 3.52959365e-01 2.21477553e-01 2.46153325e-02 -1.15103461e-01 -4.52485681e-02 2.06794679e-01] [ 2.47953162e-02 -3.08773845e-01 -5.53585172e-01 -3.50514293e-01 -1.38256550e-01 -4.25564319e-01 -5.13698816e-01 -9.56966281e-02 2.48200417e-01] [-3.73717725e-01 -3.82079959e-01 -4.15330470e-01 -1.76427588e-01 -1.82573736e-01 -4.08313572e-01 -5.63507318e-01 -2.38382339e-01 -7.21810907e-02] [-4.06116247e-01 2.17518911e-01 7.01859295e-01 1.86079383e-01 -2.43774578e-01 -2.82969028e-01 -5.18604577e-01 -1.26381502e-01 1.92922413e-01] [-4.99264300e-01 -1.94800645e-01 4.54177737e-01 5.69360964e-02 -7.78146148e-01 -6.79067314e-01 -5.32347679e-01 2.84594297e-03 1.40266567e-02] [-3.31995070e-01 -3.92872810e-01 3.18324119e-02 4.93752718e-01 -7.34742209e-02 -2.80239940e-01 -1.75601959e-01 -1.15156390e-01 -5.49252182e-02] [-1.22299999e-01 -3.75241995e-01 -1.52104944e-02 4.12247807e-01 1.63990885e-01 1.15313098e-01 4.63823751e-02 -1.95437878e-01 -2.48649448e-01]]] [[[-6.68230593e-01 -1.06456280e-01 -6.35193735e-02 1.14516601e-01 -1.15038455e-02 -3.21861774e-01 -4.37537104e-01 -5.54298759e-01 -1.67435661e-01] [ 4.34343256e-02 3.28061879e-01 -1.96014792e-01 -4.18605544e-02 9.64742601e-02 1.47895366e-02 5.87012693e-02 -1.67313218e-03 -1.08135089e-01] [-5.11693358e-02 3.53088491e-02 -8.83417189e-01 -6.91450059e-01 1.95116073e-01 3.91539991e-01 3.44469041e-01 5.69429398e-02 -3.08703035e-01] [ 1.81963027e-01 2.79057860e-01 -1.83552206e-01 -3.29300791e-01 1.55615151e-01 3.83302599e-01 8.18883106e-02 -7.27495253e-02 -3.27766091e-02] [ 4.29089159e-01 2.36997679e-01 5.69903433e-01 3.92142534e-01 4.16905075e-01 4.85567391e-01 5.67797720e-02 -7.41621852e-02 -3.92547287e-02] [-2.15819478e-02 -1.30007625e-01 7.82135129e-01 8.08394969e-01 8.59568566e-02 3.01927328e-02 -1.41356170e-01 1.88167274e-01 3.57483178e-01] [-2.32391730e-01 -5.64267576e-01 -3.95031273e-03 2.08486497e-01 2.36744322e-02 -1.35356054e-01 -4.19245899e-01 9.70393941e-02 1.90465063e-01]] [[-4.81857330e-01 -3.61052722e-01 -1.05390675e-01 2.69926488e-01 3.92139666e-02 -1.68746039e-01 -2.30609894e-01 4.74186428e-02 2.56973565e-01] [ 4.04538631e-01 -2.16584697e-01 -2.44637683e-01 2.53493220e-01 -9.52205658e-02 -6.87149540e-02 2.20670089e-01 2.96180993e-01 4.03339028e-01] [ 1.05932198e-01 -2.39895239e-01 -4.67910022e-01 -1.58421963e-01 -1.96131114e-02 -1.78112611e-01 9.77464393e-02 1.08450942e-01 -7.47875124e-02] [ 2.87243072e-03 1.71743020e-01 1.06687345e-04 -1.66949809e-01 5.38179837e-02 1.07048899e-01 -1.85486898e-01 -1.31065562e-01 1.53445944e-01] [ 8.08086932e-01 5.30181944e-01 1.74432382e-01 -1.22153401e-01 -3.00169647e-01 -1.36111379e-01 -2.80421048e-01 -4.50062484e-01 -2.06854463e-01] [ 4.29673344e-01 6.99830830e-01 4.41875130e-01 1.13421880e-01 -1.49840862e-01 -1.60541967e-01 -3.10832828e-01 -1.09782495e-01 -4.35285755e-02] [-1.77956626e-01 4.75798011e-01 5.42815804e-01 3.17318648e-01 1.37158275e-01 2.13256285e-01 1.30999148e-01 1.83699191e-01 2.93323338e-01]] [[-1.53253734e-01 1.66793883e-01 1.41884804e-01 3.34358156e-01 -1.30512893e-01 -3.80427659e-01 -3.69220197e-01 -4.31033708e-02 2.67090499e-01] [ 4.24684465e-01 2.12001443e-01 2.90882528e-01 7.21717894e-01 1.76975548e-01 8.48719776e-02 4.12733406e-01 1.45085052e-01 1.47570238e-01] [ 9.25796553e-02 -2.78494120e-01 -1.62647456e-01 5.79727054e-01 5.01904964e-01 -2.23694071e-01 -2.14069054e-01 -4.32630256e-03 4.93965626e-01] [-1.02611065e-01 -3.83535117e-01 -6.70377240e-02 2.82357901e-01 6.82559982e-02 -7.42295980e-02 -1.03176601e-01 1.00003004e-01 3.25172782e-01] [-1.34782404e-01 1.03156932e-01 9.34812576e-02 -3.72002214e-01 -5.80692053e-01 -2.37033144e-01 -2.35958815e-01 -1.97855204e-01 -2.42832452e-01] [ 2.74402559e-01 6.72176242e-01 1.99320152e-01 -1.39681876e-01 -1.90725192e-01 -2.65240908e-01 -4.86971915e-01 -3.66115451e-01 -2.74070531e-01] [-1.74153626e-01 1.18157923e-01 1.86826006e-01 3.81345570e-01 2.42188960e-01 5.61508000e-01 3.86303961e-01 -1.00885399e-01 6.85004294e-02]] [[ 2.51104027e-01 1.28650293e-01 1.82007328e-01 2.12272346e-01 1.97508976e-01 2.34074280e-01 1.22108102e-01 1.87264577e-01 9.03793201e-02] [ 3.51597935e-01 5.53024076e-02 2.01419786e-01 6.02068603e-01 5.26760519e-01 4.53763813e-01 4.98752981e-01 3.44065934e-01 -3.70038182e-01] [-5.58327325e-02 -1.15002729e-01 -7.34799206e-02 3.31593782e-01 4.98404890e-01 -4.41079885e-02 -1.70015842e-02 1.25751153e-01 -1.38665050e-01] [-2.76696030e-03 -2.07880363e-01 -1.55271096e-02 3.55692774e-01 2.70705640e-01 -1.71510339e-01 -1.11354299e-01 -1.64728656e-01 1.69744074e-01] [-1.19550407e-01 2.75427252e-01 2.18449578e-01 -1.65390447e-01 9.93904471e-02 3.68361712e-01 -2.81074154e-03 -2.64404565e-01 -5.80826737e-02] [-1.32743539e-02 1.70618013e-01 4.90851253e-02 -1.77518249e-01 1.65169463e-01 3.98420602e-01 2.13055864e-01 -3.65700275e-02 -2.09422544e-01] [ 1.68180466e-01 -5.19295633e-01 -4.97520685e-01 -2.10465252e-01 2.54238565e-02 4.89693165e-01 5.10174751e-01 1.91944733e-01 9.08690616e-02]] [[ 7.29275465e-01 2.37992421e-01 -1.90390050e-02 -4.08540368e-01 7.12274835e-02 6.38119340e-01 2.16496721e-01 7.55013078e-02 1.34118348e-01] [ 2.22200900e-01 -3.85078788e-03 -7.13882782e-03 -7.61159062e-02 1.48596242e-01 4.90542948e-01 2.63660014e-01 2.69724548e-01 -1.57151565e-01] [-2.39664644e-01 9.31193531e-02 -2.05720812e-01 -1.07509404e-01 5.70521057e-02 -4.13366586e-01 -2.21958607e-01 -5.28972447e-02 -4.84550238e-01] [ 2.32053205e-01 2.86657363e-01 -1.85851052e-01 3.13116424e-02 -3.99766350e-03 -2.01929614e-01 1.83472157e-01 -4.34604734e-02 9.82914343e-02] [ 1.30637586e-01 4.95958120e-01 3.65924627e-01 4.63344157e-01 4.54908103e-01 4.19879198e-01 4.26518500e-01 -1.02794871e-01 -3.00974786e-01] [-1.20466694e-01 6.66372404e-02 4.68832761e-01 5.33034503e-01 2.91549683e-01 3.22297394e-01 2.64588356e-01 -1.18458524e-01 -5.65627158e-01] [ 2.92597413e-01 -2.72718608e-01 -4.97000292e-02 4.62738842e-01 3.96950930e-01 2.39245221e-01 1.90136090e-01 1.06098309e-01 -1.11702353e-01]]]]; ov_res: [[[[-4.11078125e-01 -2.05700904e-01 4.47476655e-02 -1.69859141e-01 -4.70591784e-01 -4.21437055e-01 -3.11489642e-01 4.30164039e-02 -3.17967027e-01] [ 5.31758964e-01 -1.16243958e-01 -1.37330830e-01 2.83321999e-02 -2.40186840e-01 -5.76821789e-02 2.83184946e-01 1.64301395e-01 6.29246235e-03] [ 6.74680993e-02 -6.15450740e-01 -3.40298384e-01 2.27787048e-02 2.29383767e-01 5.60287774e-01 5.56338191e-01 2.41562456e-01 -8.00236687e-02] [-4.87687260e-01 -5.15247285e-01 -1.96484968e-01 -7.80562833e-02 2.82750338e-01 6.17401361e-01 2.80259639e-01 1.41258523e-01 3.86048555e-01] [ 1.30436316e-01 3.85262728e-01 5.50504506e-01 -6.43253177e-02 2.71188170e-01 3.58636230e-01 -3.01383644e-01 2.34774426e-02 2.73618579e-01] [ 3.66562098e-01 9.23264444e-01 4.29226339e-01 4.69683111e-03 4.21573579e-01 2.94298202e-01 -2.34643877e-01 3.59082282e-01 5.95941067e-01] [ 3.56254220e-01 2.76975423e-01 -1.95488200e-01 1.38569742e-01 3.08814019e-01 4.11961041e-02 -3.12114358e-01 2.21692652e-01 4.57470357e-01]] [[ 1.24608554e-01 -2.37106774e-02 -2.22556293e-01 -3.29380959e-01 -1.67468190e-01 -9.53064393e-03 -1.78954661e-01 -2.32848618e-02 -3.44307452e-01] [ 1.55287698e-01 6.09426983e-02 -2.83623546e-01 -4.64842319e-01 -1.87882841e-01 1.35597482e-01 1.31035522e-01 1.19004630e-01 2.39846528e-01] [-3.32012363e-02 -9.70226005e-02 -4.42643017e-01 -7.01592445e-01 -2.39976421e-01 3.56159545e-02 1.73286155e-01 1.59891635e-01 -3.16801257e-02] [-1.02578148e-01 1.33310542e-01 -2.19021291e-01 -2.85004675e-01 1.39729664e-01 2.77548730e-01 2.65450120e-01 3.44355702e-01 2.01684579e-01] [-9.80541483e-02 -9.21111330e-02 2.18926474e-01 -4.08556253e-01 -6.05485678e-01 -1.57824740e-01 1.70159221e-01 7.29745701e-02 8.54149461e-02] [-1.12497680e-01 -2.55987912e-01 2.96776056e-01 1.20232999e-01 -1.96804062e-01 9.75910351e-02 3.02263141e-01 2.86120534e-01 -7.86425639e-03] [-4.05117273e-02 -3.79050255e-01 -1.78713500e-01 1.09601803e-01 1.61156729e-01 3.65624428e-01 2.25556970e-01 3.61515045e-01 1.89825907e-01]] [[-6.82219028e-01 1.81465968e-01 6.05051160e-01 -3.67499173e-01 -3.76244038e-01 -1.85056925e-01 -7.20764995e-02 -1.70398019e-02 2.51642950e-02] [-5.74485481e-01 -3.04929093e-02 3.07568759e-01 -1.51791215e-01 1.37001947e-01 2.53612131e-01 -2.50721037e-01 -9.59939063e-02 2.27851659e-01] [-6.10341251e-01 -4.72841948e-01 -2.77246535e-01 -5.65223753e-01 -2.30611444e-01 -1.03046060e-01 -6.34817541e-01 -3.76651257e-01 1.73615664e-02] [-1.38991982e-01 2.67608669e-02 -8.13835636e-02 -5.79442084e-01 -4.69679594e-01 -3.39641094e-01 -5.70946395e-01 -4.24633414e-01 -9.02074650e-02] [ 1.25219822e-02 2.22822785e-01 2.21825108e-01 -2.99877226e-01 -6.33953512e-01 -1.70533076e-01 7.76647776e-02 -3.27815682e-01 -4.81369719e-03] [-3.05830330e-01 -2.47552112e-01 3.38841081e-01 4.71294224e-01 4.87643778e-02 1.59045219e-01 5.06723404e-01 -1.55953318e-01 -4.44011122e-01] [-5.82489014e-01 -2.24826947e-01 2.65741765e-01 3.31071198e-01 2.74123043e-01 4.18277770e-01 4.89924550e-01 2.02489793e-02 -3.87563467e-01]] [[ 2.27483198e-01 1.55320525e-01 3.57105613e-01 3.77827317e-01 1.49772599e-01 1.41051918e-01 2.65656084e-01 -2.68143862e-01 -5.97135723e-02] [ 1.22284330e-01 -4.82768603e-02 -1.27416372e-01 1.58448017e-03 1.91767946e-01 -4.12587039e-02 -2.19587639e-01 -2.43932009e-01 1.17667615e-01] [-3.86839747e-01 -6.80956662e-01 -7.30824292e-01 -2.84938484e-01 8.31205472e-02 -3.48000735e-01 -5.09369612e-01 -2.06944764e-01 -1.43110588e-01] [-1.70365945e-01 -6.27074540e-02 6.46875724e-02 -3.34958225e-01 -1.93801925e-01 -1.62235752e-01 -5.20627558e-01 -4.10584122e-01 -3.31883311e-01] [-3.58334064e-01 3.76530975e-01 5.73259890e-01 6.35067821e-02 -1.49704531e-01 -3.18850666e-01 -4.17109698e-01 -2.15459898e-01 -3.08677822e-01] [-3.69048029e-01 9.32927895e-03 2.95555264e-01 1.78607151e-01 -1.26431257e-01 -3.21897715e-01 -2.19291463e-01 -2.20094800e-01 -3.32386404e-01] [-4.46840644e-01 -2.09609866e-01 2.34004989e-01 -7.79231489e-02 -5.13369329e-02 2.26100698e-01 4.79805283e-02 -3.52382064e-01 -4.81323004e-01]] [[ 5.04323423e-01 1.39412984e-01 1.24067523e-01 3.52959365e-01 2.21477553e-01 2.46153325e-02 -1.15103461e-01 -4.52485681e-02 2.06794679e-01] [ 2.47953162e-02 -3.08773845e-01 -5.53585172e-01 -3.50514293e-01 -1.38256550e-01 -4.25564319e-01 -5.13698816e-01 -9.56966281e-02 2.48200417e-01] [-3.73717725e-01 -3.82079959e-01 -4.15330470e-01 -1.76427588e-01 -1.82573736e-01 -4.08313572e-01 -5.63507318e-01 -2.38382339e-01 -7.21810907e-02] [-4.06116247e-01 2.17518911e-01 7.01859295e-01 1.86079383e-01 -2.43774578e-01 -2.82969028e-01 -5.18604577e-01 -1.26381502e-01 1.92922413e-01] [-4.99264300e-01 -1.94800645e-01 4.54177737e-01 5.69360964e-02 -7.78146148e-01 -6.79067314e-01 -5.32347679e-01 2.84594297e-03 1.40266567e-02] [-3.31995070e-01 -3.92872810e-01 3.18324119e-02 4.93752718e-01 -7.34742209e-02 -2.80239940e-01 -1.75601959e-01 -1.15156390e-01 -5.49252182e-02] [-1.22299999e-01 -3.75241995e-01 -1.52104944e-02 4.12247807e-01 1.63990885e-01 1.15313098e-01 4.63823751e-02 -1.95437878e-01 -2.48649448e-01]]] [[[-6.68230593e-01 -1.06456280e-01 -6.35193735e-02 1.14516601e-01 -1.15038455e-02 -3.21861774e-01 -4.37537104e-01 -5.54298759e-01 -1.67435661e-01] [ 4.34343256e-02 3.28061879e-01 -1.96014792e-01 -4.18605544e-02 9.64742601e-02 1.47895366e-02 5.87012693e-02 -1.67313218e-03 -1.08135089e-01] [-5.11693358e-02 3.53088491e-02 -8.83417189e-01 -6.91450059e-01 1.95116073e-01 3.91539991e-01 3.44469041e-01 5.69429398e-02 -3.08703035e-01] [ 1.81963027e-01 2.79057860e-01 -1.83552206e-01 -3.29300791e-01 1.55615151e-01 3.83302599e-01 8.18883106e-02 -7.27495253e-02 -3.27766091e-02] [ 4.29089159e-01 2.36997679e-01 5.69903433e-01 3.92142534e-01 4.16905075e-01 4.85567391e-01 5.67797720e-02 -7.41621852e-02 -3.92547287e-02] [-2.15819478e-02 -1.30007625e-01 7.82135129e-01 8.08394969e-01 8.59568566e-02 3.01927328e-02 -1.41356170e-01 1.88167274e-01 3.57483178e-01] [-2.32391730e-01 -5.64267576e-01 -3.95031273e-03 2.08486497e-01 2.36744322e-02 -1.35356054e-01 -4.19245899e-01 9.70393941e-02 1.90465063e-01]] [[-4.81857330e-01 -3.61052722e-01 -1.05390675e-01 2.69926488e-01 3.92139666e-02 -1.68746039e-01 -2.30609894e-01 4.74186428e-02 2.56973565e-01] [ 4.04538631e-01 -2.16584697e-01 -2.44637683e-01 2.53493220e-01 -9.52205658e-02 -6.87149540e-02 2.20670089e-01 2.96180993e-01 4.03339028e-01] [ 1.05932198e-01 -2.39895239e-01 -4.67910022e-01 -1.58421963e-01 -1.96131114e-02 -1.78112611e-01 9.77464393e-02 1.08450942e-01 -7.47875124e-02] [ 2.87243072e-03 1.71743020e-01 1.06687345e-04 -1.66949809e-01 5.38179800e-02 1.07048899e-01 -1.85486883e-01 -1.31065562e-01 1.53445944e-01] [ 8.08086932e-01 5.30181944e-01 1.74432382e-01 -1.22153401e-01 -3.00169647e-01 -1.36111379e-01 -2.80421048e-01 -4.50062484e-01 -2.06854463e-01] [ 4.29673344e-01 6.99830830e-01 4.41875130e-01 1.13421880e-01 -1.49840862e-01 -1.60541967e-01 -3.10832828e-01 -1.09782495e-01 -4.35285755e-02] [-1.77956626e-01 4.75798011e-01 5.42815804e-01 3.17318648e-01 1.37158275e-01 2.13256285e-01 1.30999148e-01 1.83699191e-01 2.93323338e-01]] [[-1.53253734e-01 1.66793883e-01 1.41884804e-01 3.34358156e-01 -1.30512893e-01 -3.80427659e-01 -3.69220197e-01 -4.31033708e-02 2.67090499e-01] [ 4.24684465e-01 2.12001443e-01 2.90882528e-01 7.21717894e-01 1.76975548e-01 8.48719776e-02 4.12733406e-01 1.45085052e-01 1.47570238e-01] [ 9.25796553e-02 -2.78494120e-01 -1.62647456e-01 5.79727054e-01 5.01904964e-01 -2.23694071e-01 -2.14069054e-01 -4.32630256e-03 4.93965626e-01] [-1.02611065e-01 -3.83535117e-01 -6.70377240e-02 2.82357901e-01 6.82559982e-02 -7.42295980e-02 -1.03176601e-01 1.00003004e-01 3.25172782e-01] [-1.34782404e-01 1.03156932e-01 9.34812576e-02 -3.72002214e-01 -5.80692053e-01 -2.37033144e-01 -2.35958815e-01 -1.97855204e-01 -2.42832452e-01] [ 2.74402559e-01 6.72176242e-01 1.99320152e-01 -1.39681876e-01 -1.90725192e-01 -2.65240908e-01 -4.86971915e-01 -3.66115451e-01 -2.74070531e-01] [-1.74153626e-01 1.18157923e-01 1.86826006e-01 3.81345570e-01 2.42188960e-01 5.61508000e-01 3.86303961e-01 -1.00885399e-01 6.85004294e-02]] [[ 2.51104027e-01 1.28650293e-01 1.82007328e-01 2.12272346e-01 1.97508976e-01 2.34074280e-01 1.22108102e-01 1.87264577e-01 9.03793201e-02] [ 3.51597935e-01 5.53024076e-02 2.01419786e-01 6.02068603e-01 5.26760519e-01 4.53763813e-01 4.98752981e-01 3.44065934e-01 -3.70038182e-01] [-5.58327325e-02 -1.15002729e-01 -7.34799206e-02 3.31593782e-01 4.98404890e-01 -4.41079885e-02 -1.70015842e-02 1.25751153e-01 -1.38665050e-01] [-2.76696030e-03 -2.07880363e-01 -1.55271096e-02 3.55692774e-01 2.70705640e-01 -1.71510339e-01 -1.11354299e-01 -1.64728656e-01 1.69744074e-01] [-1.19550407e-01 2.75427252e-01 2.18449578e-01 -1.65390447e-01 9.93904471e-02 3.68361712e-01 -2.81074154e-03 -2.64404565e-01 -5.80826737e-02] [-1.32743539e-02 1.70618013e-01 4.90851253e-02 -1.77518249e-01 1.65169463e-01 3.98420602e-01 2.13055864e-01 -3.65700275e-02 -2.09422544e-01] [ 1.68180466e-01 -5.19295633e-01 -4.97520685e-01 -2.10465252e-01 2.54238565e-02 4.89693165e-01 5.10174751e-01 1.91944733e-01 9.08690616e-02]] [[ 7.29275465e-01 2.37992421e-01 -1.90390050e-02 -4.08540368e-01 7.12274835e-02 6.38119340e-01 2.16496721e-01 7.55013078e-02 1.34118348e-01] [ 2.22200900e-01 -3.85078788e-03 -7.13882782e-03 -7.61159062e-02 1.48596242e-01 4.90542948e-01 2.63660014e-01 2.69724548e-01 -1.57151565e-01] [-2.39664644e-01 9.31193531e-02 -2.05720812e-01 -1.07509404e-01 5.70521057e-02 -4.13366586e-01 -2.21958607e-01 -5.28972447e-02 -4.84550238e-01] [ 2.32053205e-01 2.86657363e-01 -1.85851052e-01 3.13116424e-02 -3.99766350e-03 -2.01929614e-01 1.83472157e-01 -4.34604734e-02 9.82914343e-02] [ 1.30637586e-01 4.95958120e-01 3.65924627e-01 4.63344157e-01 4.54908103e-01 4.19879198e-01 4.26518500e-01 -1.02794871e-01 -3.00974786e-01] [-1.20466694e-01 6.66372404e-02 4.68832761e-01 5.33034503e-01 2.91549683e-01 3.22297394e-01 2.64588356e-01 -1.18458524e-01 -5.65627158e-01] [ 2.92597413e-01 -2.72718608e-01 -4.97000292e-02 4.62738842e-01 3.96950930e-01 2.39245221e-01 1.90136090e-01 1.06098309e-01 -1.11702353e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_avg_pool3d.py::TestAdaptiveAvgPool3D::test_adaptive_avg_pool3d[ ie_device:CPU - precision:FP32 - output_size:7 - input_tensor:[[[[[ 0.6750274 -0.52947855 -1.0510274 ... -0.9260406 -1.761402 -0.9858571 ] [ 2.0246472 1.3003415 -0.13044687 ... -1.6204368 0.2818884 -0.60950613] [ 0.28208718 -1.2194207 1.0159653 ... -0.41373488 -0.99766797 -0.24041523] ... [ 0.48282078 -1.1239302 0.02843587 ... -0.71503603 0.6534662 0.4188135 ] [ 0.40406922 -1.4748852 1.2540164 ... 1.6250517 -0.7066055 -0.48428303] [-0.65493184 1.536664 -0.31111434 ... 0.557822 0.8869128 -1.6934036 ]] [[ 0.7292678 0.07061668 -1.0223473 ... 0.95373696 0.6173491 -0.45709842] [ 1.4305075 0.41382825 1.24594 ... -0.398382 1.1687646 1.747247 ] [ 0.8794458 0.7050558 -0.24334209 ... -0.9137401 1.7045393 -0.4218345 ] ... [ 0.57678795 -1.5589333 -0.40251887 ... -0.04396934 -0.06045668 0.7406191 ] [ 1.5756857 -2.0828176 1.3952135 ... -0.3303899 -0.4380001 -0.61964315] [-2.7773886 0.38021424 -0.956624 ... 0.67594725 -0.9423851 0.10215356]] [[ 0.09112854 -0.78757286 0.98372304 ... 0.7053841 -0.12594485 0.3758394 ] [-1.5005482 0.5242814 -1.0806792 ... -1.1540502 -1.216528 -1.0406891 ] [-1.8478785 1.6188104 0.60401744 ... -0.65600985 0.51704794 -0.3180054 ] ... [ 0.9267348 0.9337457 0.6681243 ... -0.11904679 -0.15711573 0.93636817] [ 0.57808864 -0.332161 -1.613622 ... 1.7955519 -0.02651354 -1.9947116 ] [ 0.3498302 -0.19556746 0.585111 ... -3.2975013 1.1621848 0.23655939]] ... [[ 2.1814766 -0.42563906 -0.07700434 ... -0.53128225 -0.03745885 0.94462293] [ 1.472312 -0.5113537 -1.272005 ... -0.97998345 0.7632413 -0.1564714 ] [ 0.7353792 -0.13438107 -0.3819322 ... -0.15469684 0.68157035 -0.7616137 ] ... [-0.1949445 0.59401196 1.1702275 ... -1.6014842 -0.20763052 -0.83426774] [ 0.626467 0.4832535 0.01102521 ... 1.0266385 -0.02824205 0.46031135] [-0.85450536 -0.49608487 -1.3702333 ... -1.3460124 0.15207903 -0.01099042]] [[-0.20366985 -0.6548867 -0.97249347 ... 0.46302593 -0.36954644 -0.69484496] [ 1.1971511 0.8974348 0.65310776 ... 0.18065329 -1.413187 -0.26280606] [ 0.8473137 0.26597053 0.46695563 ... 0.91817456 0.10147902 -0.5160176 ] ... [ 1.63903 0.79923904 -0.7654221 ... -1.3473555 -2.0917087 -0.6687809 ] [ 0.10943478 0.5097689 0.57067096 ... 1.1554998 2.3333564 0.09836157] [-0.15953125 -1.6072437 -1.7836018 ... 0.8449579 -0.46176457 0.8727772 ]] [[-0.20156522 -0.67464983 1.6955296 ... -1.2751569 0.73626053 -0.543582 ] [-0.1659209 0.92391646 -0.7679348 ... -0.07506115 0.28156137 0.39920726] [-2.094873 -1.3796912 0.643832 ... -0.04314093 0.32907686 -1.3907567 ] ... [-1.0032145 0.01588529 0.0404608 ... -0.29535833 -0.39608794 -1.6667883 ] [-0.2246718 -0.20661612 -0.29374757 ... -0.27083984 0.93433696 0.35732117] [ 2.6965034 0.31061867 1.7420624 ... -0.23209336 -0.93530625 0.6523891 ]]] [[[ 2.706187 -3.0322587 -1.9081064 ... 0.7283488 -0.04794742 0.91364354] [ 0.80018705 -0.7671929 -1.8022435 ... -1.5633122 1.4194466 1.1269051 ] [-0.29453647 -1.106829 -0.66306984 ... 0.8926227 0.18560319 0.49385738] ... [-0.34975246 1.1075768 0.96871674 ... -0.5546797 -0.81069183 -0.5858185 ] [ 0.11887803 -1.3348 2.0177402 ... 0.55222136 0.5303798 1.6923729 ] [-1.3927875 0.42775464 -0.629481 ... 0.92130643 0.02605388 -0.8802159 ]] [[-0.3206207 -0.7637518 -0.95880735 ... 0.0985759 0.839436 -0.62324774] [ 0.49020365 1.3907264 0.72230864 ... 0.6602552 -0.3843507 1.0993619 ] [-0.0558609 -0.05812433 0.92553127 ... -0.3669914 -0.12281457 0.06550856] ... [ 1.5886163 1.2332557 -0.38938886 ... -0.06936838 -0.2734803 -1.1043248 ] [-0.6271722 0.06817702 -0.19999726 ... -0.15770552 -1.0133241 -0.33918434] [ 0.7350949 -0.12656708 1.1939455 ... 0.21440715 -0.40823248 1.0185643 ]] [[ 0.7377359 -0.8300177 -1.0766298 ... -2.0571415 0.6365726 0.38393214] [ 0.09577733 -0.49613732 0.67632455 ... 1.0116483 -0.6291124 1.2298497 ] [ 1.9512712 0.29137802 0.70198286 ... -0.24776326 -0.83065665 0.81060606] ... [-0.7874076 -0.7132398 0.5118683 ... 1.120565 -0.8272467 -1.5021037 ] [ 1.1298773 -0.08396461 -0.29258165 ... -0.6440496 -0.9135012 0.628351 ] [-0.5922622 0.01449709 -0.34151635 ... -0.43161443 -1.8962408 -0.5323735 ]] ... [[ 1.7681766 2.3496604 -0.69343966 ... 0.6851386 0.03248706 0.5555838 ] [-0.8219531 -0.9320715 0.00665106 ... 0.6417402 0.80125844 0.2582212 ] [ 0.37478152 -1.5713156 1.2746387 ... 0.31453222 1.2137058 2.1733575 ] ... [ 2.0547493 0.56847346 -1.6452566 ... 0.8880564 1.2032093 -0.03186269] [-1.7024984 -2.2148924 -0.32915714 ... -0.82830536 -0.48574445 -0.2587135 ] [-0.43530554 -1.0800319 -0.9905766 ... 1.3704839 -0.8472254 1.1817372 ]] [[ 0.05986854 1.2856232 -0.6177747 ... -0.26461822 1.5861459 -0.15799014] [ 0.9962945 0.48416355 1.0785571 ... 1.1029189 -0.6692258 -0.31980598] [ 0.7508004 -1.9038049 2.3935516 ... 0.3934067 -0.909985 0.2512394 ] ... [ 1.2717466 1.2417549 0.803297 ... -0.25844917 -0.62372243 0.28100547] [-1.3979619 -0.6023846 -0.07263488 ... -0.5740154 0.69222885 -1.1171021 ] [-0.20074627 -1.9274722 -0.24093167 ... 0.07816572 0.5422447 -0.19835202]] [[ 0.5151694 0.19797453 0.71569234 ... 0.44548082 0.8424307 -1.0661782 ] [-1.5072691 -0.5466803 1.263944 ... 0.53587717 -0.40651295 -1.5978879 ] [ 0.36465913 -0.5635329 -0.46114 ... 0.03028747 1.786946 0.5123203 ] ... [ 0.06026702 -0.7654359 0.56040835 ... -2.0763602 0.26728344 -0.85051537] [ 2.050332 0.7479984 -0.6446392 ... -1.2944051 -1.7548354 0.2096386 ] [-0.5268953 0.33837456 1.3818275 ... 0.8184589 -0.37372634 0.34165382]]]]] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_avg_pool3d.___torch_mangle_4.aten_adaptive_avg_pool3d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %5 : int[] = prim::Constant[value=[7, 7, 7]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1230:51 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor = aten::adaptive_avg_pool3d(%input_tensor.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1231:11 return (%9) fw_re: [[[[[ 7.64344752e-01 3.71782780e-02 -5.45920670e-01 -1.82411686e-01 1.49470925e-01 -2.10565269e-01 1.73181295e-04] [ 7.27061570e-01 3.85990113e-01 -7.65044317e-02 1.59778357e-01 9.25610065e-02 -1.48596197e-01 3.29126954e-01] [ 5.52861214e-01 5.79627901e-02 -2.51868963e-01 2.03857392e-01 3.24230313e-01 -2.75669724e-01 -3.07125092e-01] [ 5.60825288e-01 4.27327961e-01 -1.18243136e-01 1.47111133e-01 4.38845009e-02 -1.88810024e-02 -1.42966196e-01] [ 3.76488328e-01 2.52314210e-01 3.08393985e-01 3.16352904e-01 -1.62781373e-01 1.64720103e-01 2.17067778e-01] [-4.00150329e-01 -4.95677412e-01 1.61047518e-01 -5.18542230e-01 -1.87710837e-01 -1.99244916e-03 -6.20112084e-02] [-3.86673748e-01 -3.24166194e-02 -5.09107001e-02 -4.78755921e-01 5.20819938e-03 1.66044161e-01 -4.86906797e-01]] [[ 1.21438652e-01 4.34737504e-02 -4.36491400e-01 -4.15591002e-01 1.40526265e-01 6.87911808e-02 1.33617446e-01] [ 2.77937800e-01 4.73489016e-01 -4.37803060e-01 -2.07588106e-01 -1.77061856e-01 -1.18544810e-01 2.67567784e-01] [ 5.52947581e-01 3.06946397e-01 -6.47969186e-01 -4.61315811e-01 -1.41209364e-02 4.31455851e-01 1.47825480e-01] [ 2.24457324e-01 -1.56379625e-01 -2.31048286e-01 -8.11224043e-01 -3.02798718e-01 2.60427266e-01 -7.19696432e-02] [ 3.07258189e-01 1.17242619e-01 3.84784728e-01 -2.72937506e-01 -4.11191493e-01 1.62342004e-02 -1.73021778e-02] [ 7.71413743e-02 -3.74121130e-01 -2.44103861e-03 -1.68417677e-01 -5.82089126e-02 7.75074735e-02 -2.02431694e-01] [-3.13014477e-01 -3.52531612e-01 -1.85736060e-01 -5.28200805e-01 6.16168380e-02 -1.75138250e-01 -3.15044463e-01]] [[-1.14071086e-01 2.56425977e-01 4.90679359e-03 -1.53884619e-01 -5.20985007e-01 -3.52015972e-01 -8.45584720e-02] [-1.49558574e-01 5.47228515e-01 5.66457212e-02 -1.80002525e-02 -2.97098249e-01 -2.11154491e-01 -3.95201713e-01] [-1.52636975e-01 2.42898896e-01 -3.93813640e-01 -5.36929846e-01 -4.08009678e-01 2.32177973e-01 9.34938490e-02] [-2.64949650e-01 -5.66177845e-01 -4.61297512e-01 -5.55183232e-01 -1.62358806e-01 3.45727712e-01 2.82321781e-01] [ 6.72482669e-01 2.60446578e-01 -9.23419073e-02 -4.97666836e-01 1.43478796e-01 3.12329829e-01 5.77651858e-02] [ 9.28714752e-01 3.71255368e-01 -4.83402796e-02 -1.26491070e-01 2.00027943e-01 1.43011063e-01 -2.70145178e-01] [ 2.78882056e-01 -2.49311298e-01 -3.87646109e-01 -2.06502959e-01 1.19842090e-01 -3.54867965e-01 -8.00950527e-02]] [[ 2.63279468e-01 3.50479454e-01 1.01929076e-01 -1.81066379e-01 -4.22510743e-01 -4.74750996e-01 -2.78541446e-01] [ 1.24243654e-01 4.94673520e-01 3.84539634e-01 3.15050066e-01 1.41245648e-01 1.50201753e-01 -9.69153643e-02] [-2.93571413e-01 -9.97753292e-02 -3.12013894e-01 -1.21103629e-01 -2.10344672e-01 8.35378319e-02 2.89749920e-01] [ 3.28323357e-02 -2.51280576e-01 -4.39561337e-01 -6.22728653e-02 -1.03044929e-02 -7.48572052e-02 1.36475638e-01] [ 5.16240060e-01 1.09681308e-01 7.21561760e-02 -3.75706106e-02 1.53965741e-01 -3.59917581e-02 -9.62270796e-02] [ 6.95061445e-01 1.83209702e-01 -5.14508486e-02 -1.29047453e-01 -2.52423316e-01 -3.10847580e-01 -1.52375206e-01] [ 4.82325613e-01 -4.21896875e-02 -6.25204980e-01 -2.01388866e-01 -8.34069252e-02 -7.18640864e-01 1.26298368e-01]] [[ 5.07861137e-01 -2.36727685e-01 -5.18120416e-02 -1.37092233e-01 -8.58451352e-02 -4.44812834e-01 -2.55656540e-01] [ 3.18379939e-01 -1.31710216e-01 -1.23285241e-01 1.59620911e-01 5.97482212e-02 8.64301249e-02 1.06855318e-01] [-8.49494040e-02 -1.88267529e-01 -2.15293959e-01 6.39271021e-01 1.49883494e-01 1.50079459e-01 3.88757318e-01] [-4.43377644e-01 -4.34784412e-01 -1.05761975e-01 2.34697580e-01 -1.27515644e-01 -2.19933316e-01 2.29874048e-02] [-4.23460394e-01 -1.09481454e-01 4.54925448e-01 3.10005605e-01 -1.58708632e-01 -4.63090777e-01 -1.43865928e-01] [ 2.18246073e-01 5.12799993e-02 2.92895079e-01 3.33425432e-01 -9.61267576e-02 -3.68588984e-01 -1.13705240e-01] [ 2.23358661e-01 -1.58913225e-01 -4.51643258e-01 -8.69420022e-02 -1.08585767e-01 -4.33999807e-01 2.00228021e-01]] [[ 4.94103134e-01 -2.95354962e-01 1.13178335e-01 3.55411887e-01 2.68887375e-02 -2.40567178e-01 -1.53306305e-01] [ 5.96228361e-01 -2.02540681e-03 1.68953404e-01 4.60421115e-01 1.43493295e-01 1.21564008e-02 -1.95475638e-01] [ 5.90243377e-02 -1.78068787e-01 -1.09380133e-01 5.62683523e-01 8.98528397e-02 4.76114899e-02 2.90068090e-01] [-3.51184756e-01 -3.29449773e-01 5.30865826e-02 6.73457682e-02 -3.20685804e-01 -2.21191362e-01 4.37448412e-01] [ 3.35256606e-02 -1.28439188e-01 8.30300003e-02 3.45453694e-02 -5.33562191e-02 -5.76960683e-01 -2.32355312e-01] [ 5.70782542e-01 4.21596885e-01 7.23777413e-02 -1.32743597e-01 2.52427340e-01 -9.51157808e-02 -1.17325060e-01] [-1.73555121e-01 -4.60305631e-01 -5.73412120e-01 -6.28870308e-01 -9.44257248e-03 4.59564060e-01 4.26986039e-01]] [[ 1.39726222e-01 1.37502968e-01 2.21546069e-01 -2.73169279e-01 -4.51510191e-01 -1.83931321e-01 -2.33367190e-01] [ 6.14126772e-02 2.12948889e-01 4.30921346e-01 -8.37526619e-02 -1.84102073e-01 3.49445082e-02 -3.08930337e-01] [-4.35777903e-01 -2.03303993e-04 5.85106760e-03 -1.18968457e-01 -4.93321121e-02 2.39264548e-01 6.00957461e-02] [ 3.71610850e-01 4.55226094e-01 1.47531822e-01 -1.50374740e-01 1.32050470e-01 3.11207324e-01 4.19006437e-01] [ 1.62395328e-01 1.61040165e-02 -2.61587232e-01 4.55021784e-02 1.79535642e-01 -2.98132181e-01 -3.81498277e-01] [ 2.04856962e-01 8.37799013e-02 -3.99513960e-01 -3.58353198e-01 1.28250778e-01 2.73039192e-03 -1.37498721e-01] [ 1.78532854e-01 -9.47610587e-02 -4.29412514e-01 -5.61133564e-01 5.42125814e-02 4.21018392e-01 4.81433898e-01]]] [[[ 6.29350096e-02 -8.89915705e-01 -2.96462029e-01 -9.18992981e-02 8.63326620e-03 2.18806520e-01 5.42905927e-01] [ 4.98216823e-02 -1.69861600e-01 -1.90623388e-01 -1.25182629e-01 -2.09265947e-02 9.00573581e-02 4.85439658e-01] [-1.40305310e-01 1.10254765e-01 -1.02533817e-01 1.84383988e-01 4.51263100e-01 -2.27860108e-01 -4.82655793e-01] [-1.09375320e-01 -2.61900604e-01 -5.81304193e-01 -3.72483104e-01 3.16969827e-02 -1.51727974e-01 -4.27009076e-01] [ 1.59597993e-01 1.35026842e-01 -2.29566410e-01 3.16695899e-01 5.73626272e-02 5.25822416e-02 -1.50751024e-01] [ 2.25597382e-01 4.33910042e-01 4.83852625e-02 4.72849786e-01 4.81878728e-01 -2.24581078e-01 -2.38008916e-01] [-2.66427785e-01 1.77096516e-01 -2.58744210e-02 6.01020008e-02 2.82084495e-01 8.31382871e-02 7.83017427e-02]] [[ 3.79894674e-02 -1.66998029e-01 3.33594680e-02 3.51692080e-01 3.94500703e-01 2.19854191e-02 3.19055200e-01] [ 4.51154232e-01 5.19248784e-01 1.64276972e-01 -3.17059368e-01 1.63930561e-02 -1.13723189e-01 1.54798985e-01] [ 1.21518642e-01 2.61290282e-01 -8.72807726e-02 -2.17140049e-01 -3.60896289e-02 -6.76172256e-01 -3.93117964e-01] [-3.27875257e-01 -2.15473473e-01 -2.45325625e-01 -3.38842899e-01 -4.46031392e-02 -1.88641548e-01 -2.82901227e-02] [-8.29198211e-03 -7.23859593e-02 -3.66268724e-01 7.48773217e-02 -2.86487732e-02 -3.68598476e-02 -4.07553673e-01] [ 2.26017758e-01 1.67661160e-02 -9.03058723e-02 4.45423573e-01 2.92389840e-01 -3.47263873e-01 -6.68101788e-01] [ 6.47100359e-02 2.89990865e-02 2.17451036e-01 2.76366472e-01 1.34490371e-01 -6.56282663e-01 -4.31992650e-01]] [[-7.87831396e-02 -2.33293504e-01 -1.20385326e-01 4.05310303e-01 3.33065301e-01 1.21794812e-01 2.99554706e-01] [-4.87675518e-02 1.75086632e-01 -1.26538739e-01 -7.36668259e-02 2.25950703e-01 -6.37482554e-02 -3.13598394e-01] [ 1.94680244e-01 -6.26772493e-02 -3.10904264e-01 -3.11662585e-01 6.69300407e-02 -2.15241536e-02 -1.56888336e-01] [-1.05081916e-01 -1.98682383e-01 -7.36056641e-02 -2.01421514e-01 9.88150295e-03 3.98674875e-01 5.43630838e-01] [-2.47608393e-01 -1.28598109e-01 6.95148334e-02 2.71119446e-01 2.06181064e-01 1.61880702e-01 2.15094388e-02] [-2.47146308e-01 2.36978754e-02 3.22875172e-01 8.17414999e-01 3.98947835e-01 -8.34708065e-02 -3.33741575e-01] [ 1.79851115e-01 -1.82384178e-01 1.04728006e-01 8.18493307e-01 3.12436074e-01 -1.81026548e-01 -3.53012174e-01]] [[ 3.09338838e-01 3.53307933e-01 2.38382056e-01 3.56293589e-01 1.41606256e-01 7.00093448e-01 7.14471519e-01] [ 1.46410480e-01 4.52185810e-01 1.58180356e-01 2.69554496e-01 2.04911113e-01 2.11858243e-01 1.14956331e-02] [ 3.09426606e-01 3.17468703e-01 -1.89780071e-01 -2.84836680e-01 1.39960781e-01 -4.60981801e-02 -4.49339986e-01] [ 1.31583408e-01 -1.28081784e-01 -1.75783262e-01 1.57190636e-01 3.84658836e-02 3.37393045e-01 2.46182784e-01] [-9.85761434e-02 -1.09076321e-01 8.00564960e-02 5.46027482e-01 3.23401064e-01 2.44722560e-01 4.31261778e-01] [ 8.09092596e-02 2.12142542e-02 2.09225371e-01 7.31885195e-01 2.25187197e-01 -2.71221578e-01 -1.12775385e-01] [ 2.76363313e-01 -1.15973510e-01 -4.20555510e-02 4.54374701e-01 7.30387196e-02 1.51477382e-03 -1.38825029e-01]] [[ 6.22018278e-01 4.62144017e-01 2.57640690e-01 1.49887279e-01 2.11981535e-01 7.18622565e-01 8.23515892e-01] [ 5.66443354e-02 2.71030486e-01 6.45247459e-01 1.57283530e-01 2.26415470e-01 5.60025573e-01 9.53497708e-01] [ 4.74220701e-02 2.87810147e-01 4.02605265e-01 -5.52027076e-02 -2.60816276e-01 -7.47593492e-02 5.63893467e-02] [ 1.59694627e-01 -8.53118077e-02 8.69832113e-02 2.96128958e-01 -1.79072559e-01 -1.81764677e-01 5.59825897e-02] [ 3.83103073e-01 -2.26845756e-01 -2.19902709e-01 2.17985779e-01 -1.50131196e-01 3.01272333e-01 5.49887538e-01] [ 1.09442770e-01 -5.27327418e-01 -5.39162457e-01 -1.96586743e-01 -2.73079902e-01 -2.48627856e-01 -5.24853244e-02] [-5.24060369e-01 -5.98367274e-01 -3.00721794e-01 1.89580947e-01 2.10873578e-02 -4.01983351e-01 -1.76276714e-01]] [[ 6.48720324e-01 3.70171189e-01 -5.33166043e-02 -1.88917160e-01 3.57856989e-01 4.89480615e-01 2.60834277e-01] [-3.27888131e-01 1.03796244e-01 4.51223850e-01 -2.18155310e-01 5.95527947e-01 3.61043960e-01 3.49845648e-01] [-3.39000702e-01 -3.25461030e-01 3.02350789e-01 4.65194046e-01 7.84640312e-02 -5.64189479e-02 8.63995180e-02] [ 5.95382273e-01 -4.65188138e-02 1.27171651e-01 3.63255769e-01 -7.56422952e-02 -1.22307248e-01 2.17043042e-01] [ 1.17392075e+00 3.46000642e-01 2.19097674e-01 1.69988364e-01 8.66620615e-02 3.93423498e-01 7.16628909e-01] [-9.76266190e-02 -2.81350046e-01 -4.15589452e-01 -3.21824342e-01 3.54982875e-02 1.65721029e-03 -4.25877124e-02] [-1.19516170e+00 -9.32260096e-01 -4.92788821e-01 -2.28612851e-02 1.33340642e-01 -6.52094185e-03 -6.13658577e-02]] [[ 1.85643047e-01 4.82687473e-01 1.33024842e-01 -1.72101364e-01 9.29338932e-02 3.96562070e-01 -2.23628044e-01] [-2.40671217e-01 2.18132272e-01 -1.59987167e-01 -7.72203803e-01 5.71245961e-02 2.32964054e-01 -1.69113994e-01] [-2.22714692e-01 -5.45559585e-01 -4.48205858e-01 1.04946852e-01 2.78617084e-01 2.78336823e-01 2.85043418e-02] [ 3.54093760e-01 -1.85042068e-01 -5.56494057e-01 -2.62724876e-01 -1.10775597e-01 6.12144619e-02 4.05615978e-02] [ 6.24822557e-01 4.64128643e-01 -1.64345205e-01 -6.28263056e-01 -2.59985417e-01 -2.44145960e-01 1.32152796e-01] [ 3.25789601e-01 1.58545494e-01 3.39510292e-03 -2.75302887e-01 -1.81628168e-01 -7.02784419e-01 -3.62002373e-01] [-1.89844370e-01 -1.27482787e-01 -2.17961613e-02 -1.51254386e-02 6.29383996e-02 -2.33235493e-01 -2.07281247e-01]]]]]; ov_res: [[[[[ 7.64344752e-01 3.71782780e-02 -5.45920670e-01 -1.82411686e-01 1.49470925e-01 -2.10565269e-01 1.73181295e-04] [ 7.27061570e-01 3.85990113e-01 -7.65044317e-02 1.59778357e-01 9.25610065e-02 -1.48596197e-01 3.29126954e-01] [ 5.52861214e-01 5.79627901e-02 -2.51868963e-01 2.03857392e-01 3.24230313e-01 -2.75669724e-01 -3.07125092e-01] [ 5.60825288e-01 4.27327961e-01 -1.18243136e-01 1.47111133e-01 4.38845009e-02 -1.88810024e-02 -1.42966196e-01] [ 3.76488328e-01 2.52314210e-01 3.08393985e-01 3.16352904e-01 -1.62781373e-01 1.64720103e-01 2.17067778e-01] [-4.00150329e-01 -4.95677412e-01 1.61047518e-01 -5.18542230e-01 -1.87710837e-01 -1.99244916e-03 -6.20112084e-02] [-3.86673748e-01 -3.24166194e-02 -5.09107001e-02 -4.78755921e-01 5.20819938e-03 1.66044161e-01 -4.86906797e-01]] [[ 1.21438652e-01 4.34737504e-02 -4.36491400e-01 -4.15591002e-01 1.40526265e-01 6.87911808e-02 1.33617446e-01] [ 2.77937800e-01 4.73489016e-01 -4.37803060e-01 -2.07588106e-01 -1.77061856e-01 -1.18544810e-01 2.67567784e-01] [ 5.52947581e-01 3.06946397e-01 -6.47969186e-01 -4.61315811e-01 -1.41209364e-02 4.31455851e-01 1.47825480e-01] [ 2.24457324e-01 -1.56379625e-01 -2.31048286e-01 -8.11224043e-01 -3.02798718e-01 2.60427266e-01 -7.19696432e-02] [ 3.07258189e-01 1.17242619e-01 3.84784728e-01 -2.72937506e-01 -4.11191493e-01 1.62342004e-02 -1.73021778e-02] [ 7.71413743e-02 -3.74121130e-01 -2.44103861e-03 -1.68417677e-01 -5.82089126e-02 7.75074735e-02 -2.02431694e-01] [-3.13014477e-01 -3.52531612e-01 -1.85736060e-01 -5.28200805e-01 6.16168380e-02 -1.75138250e-01 -3.15044463e-01]] [[-1.14071086e-01 2.56425977e-01 4.90679359e-03 -1.53884619e-01 -5.20985007e-01 -3.52015972e-01 -8.45584720e-02] [-1.49558574e-01 5.47228515e-01 5.66457212e-02 -1.80002525e-02 -2.97098249e-01 -2.11154491e-01 -3.95201713e-01] [-1.52636975e-01 2.42898896e-01 -3.93813640e-01 -5.36929846e-01 -4.08009678e-01 2.32177973e-01 9.34938490e-02] [-2.64949650e-01 -5.66177845e-01 -4.61297512e-01 -5.55183232e-01 -1.62358806e-01 3.45727712e-01 2.82321781e-01] [ 6.72482669e-01 2.60446578e-01 -9.23419073e-02 -4.97666836e-01 1.43478796e-01 3.12329829e-01 5.77651858e-02] [ 9.28714752e-01 3.71255368e-01 -4.83402796e-02 -1.26491070e-01 2.00027943e-01 1.43011063e-01 -2.70145178e-01] [ 2.78882056e-01 -2.49311298e-01 -3.87646109e-01 -2.06502959e-01 1.19842090e-01 -3.54867965e-01 -8.00950527e-02]] [[ 2.63279468e-01 3.50479454e-01 1.01929076e-01 -1.81066379e-01 -4.22510743e-01 -4.74750996e-01 -2.78541446e-01] [ 1.24243654e-01 4.94673520e-01 3.84539634e-01 3.15050066e-01 1.41245648e-01 1.50201753e-01 -9.69153643e-02] [-2.93571413e-01 -9.97753292e-02 -3.12013894e-01 -1.21103629e-01 -2.10344672e-01 8.35378319e-02 2.89749920e-01] [ 3.28323357e-02 -2.51280576e-01 -4.39561307e-01 -6.22728653e-02 -1.03044920e-02 -7.48572052e-02 1.36475638e-01] [ 5.16240060e-01 1.09681308e-01 7.21561760e-02 -3.75706106e-02 1.53965741e-01 -3.59917581e-02 -9.62270796e-02] [ 6.95061445e-01 1.83209702e-01 -5.14508486e-02 -1.29047453e-01 -2.52423316e-01 -3.10847580e-01 -1.52375206e-01] [ 4.82325613e-01 -4.21896875e-02 -6.25204980e-01 -2.01388866e-01 -8.34069252e-02 -7.18640864e-01 1.26298368e-01]] [[ 5.07861137e-01 -2.36727685e-01 -5.18120416e-02 -1.37092233e-01 -8.58451352e-02 -4.44812834e-01 -2.55656540e-01] [ 3.18379939e-01 -1.31710216e-01 -1.23285241e-01 1.59620911e-01 5.97482212e-02 8.64301249e-02 1.06855318e-01] [-8.49494040e-02 -1.88267529e-01 -2.15293959e-01 6.39271021e-01 1.49883494e-01 1.50079459e-01 3.88757318e-01] [-4.43377644e-01 -4.34784412e-01 -1.05761975e-01 2.34697580e-01 -1.27515644e-01 -2.19933316e-01 2.29874048e-02] [-4.23460394e-01 -1.09481454e-01 4.54925448e-01 3.10005605e-01 -1.58708632e-01 -4.63090777e-01 -1.43865928e-01] [ 2.18246073e-01 5.12799993e-02 2.92895079e-01 3.33425432e-01 -9.61267576e-02 -3.68588984e-01 -1.13705240e-01] [ 2.23358661e-01 -1.58913225e-01 -4.51643258e-01 -8.69420022e-02 -1.08585767e-01 -4.33999807e-01 2.00228021e-01]] [[ 4.94103134e-01 -2.95354962e-01 1.13178335e-01 3.55411887e-01 2.68887375e-02 -2.40567178e-01 -1.53306305e-01] [ 5.96228361e-01 -2.02540681e-03 1.68953404e-01 4.60421115e-01 1.43493295e-01 1.21564008e-02 -1.95475638e-01] [ 5.90243377e-02 -1.78068787e-01 -1.09380133e-01 5.62683523e-01 8.98528397e-02 4.76114899e-02 2.90068090e-01] [-3.51184756e-01 -3.29449773e-01 5.30865863e-02 6.73457682e-02 -3.20685804e-01 -2.21191362e-01 4.37448412e-01] [ 3.35256606e-02 -1.28439188e-01 8.30300003e-02 3.45453694e-02 -5.33562191e-02 -5.76960683e-01 -2.32355312e-01] [ 5.70782542e-01 4.21596885e-01 7.23777413e-02 -1.32743597e-01 2.52427340e-01 -9.51157808e-02 -1.17325060e-01] [-1.73555121e-01 -4.60305631e-01 -5.73412120e-01 -6.28870308e-01 -9.44257248e-03 4.59564060e-01 4.26986039e-01]] [[ 1.39726222e-01 1.37502968e-01 2.21546069e-01 -2.73169279e-01 -4.51510191e-01 -1.83931321e-01 -2.33367190e-01] [ 6.14126772e-02 2.12948889e-01 4.30921346e-01 -8.37526619e-02 -1.84102073e-01 3.49445082e-02 -3.08930337e-01] [-4.35777903e-01 -2.03303993e-04 5.85106760e-03 -1.18968457e-01 -4.93321121e-02 2.39264548e-01 6.00957461e-02] [ 3.71610850e-01 4.55226094e-01 1.47531822e-01 -1.50374740e-01 1.32050470e-01 3.11207324e-01 4.19006437e-01] [ 1.62395328e-01 1.61040165e-02 -2.61587232e-01 4.55021784e-02 1.79535642e-01 -2.98132181e-01 -3.81498277e-01] [ 2.04856962e-01 8.37799013e-02 -3.99513960e-01 -3.58353198e-01 1.28250778e-01 2.73039192e-03 -1.37498721e-01] [ 1.78532854e-01 -9.47610587e-02 -4.29412514e-01 -5.61133564e-01 5.42125814e-02 4.21018392e-01 4.81433898e-01]]] [[[ 6.29350096e-02 -8.89915705e-01 -2.96462029e-01 -9.18992981e-02 8.63326620e-03 2.18806520e-01 5.42905927e-01] [ 4.98216823e-02 -1.69861600e-01 -1.90623388e-01 -1.25182629e-01 -2.09265947e-02 9.00573581e-02 4.85439658e-01] [-1.40305310e-01 1.10254765e-01 -1.02533817e-01 1.84383988e-01 4.51263100e-01 -2.27860108e-01 -4.82655793e-01] [-1.09375320e-01 -2.61900604e-01 -5.81304193e-01 -3.72483104e-01 3.16969827e-02 -1.51727974e-01 -4.27009076e-01] [ 1.59597993e-01 1.35026842e-01 -2.29566410e-01 3.16695899e-01 5.73626272e-02 5.25822416e-02 -1.50751024e-01] [ 2.25597382e-01 4.33910042e-01 4.83852625e-02 4.72849786e-01 4.81878728e-01 -2.24581078e-01 -2.38008916e-01] [-2.66427785e-01 1.77096516e-01 -2.58744210e-02 6.01020008e-02 2.82084495e-01 8.31382871e-02 7.83017427e-02]] [[ 3.79894674e-02 -1.66998029e-01 3.33594680e-02 3.51692080e-01 3.94500703e-01 2.19854191e-02 3.19055200e-01] [ 4.51154232e-01 5.19248784e-01 1.64276972e-01 -3.17059368e-01 1.63930561e-02 -1.13723189e-01 1.54798985e-01] [ 1.21518642e-01 2.61290282e-01 -8.72807726e-02 -2.17140049e-01 -3.60896289e-02 -6.76172256e-01 -3.93117964e-01] [-3.27875257e-01 -2.15473473e-01 -2.45325625e-01 -3.38842899e-01 -4.46031392e-02 -1.88641548e-01 -2.82901227e-02] [-8.29198211e-03 -7.23859593e-02 -3.66268724e-01 7.48773217e-02 -2.86487732e-02 -3.68598476e-02 -4.07553673e-01] [ 2.26017758e-01 1.67661160e-02 -9.03058723e-02 4.45423573e-01 2.92389840e-01 -3.47263873e-01 -6.68101788e-01] [ 6.47100359e-02 2.89990865e-02 2.17451036e-01 2.76366472e-01 1.34490371e-01 -6.56282663e-01 -4.31992650e-01]] [[-7.87831396e-02 -2.33293504e-01 -1.20385326e-01 4.05310303e-01 3.33065301e-01 1.21794812e-01 2.99554706e-01] [-4.87675518e-02 1.75086632e-01 -1.26538739e-01 -7.36668259e-02 2.25950703e-01 -6.37482554e-02 -3.13598394e-01] [ 1.94680244e-01 -6.26772493e-02 -3.10904264e-01 -3.11662585e-01 6.69300407e-02 -2.15241536e-02 -1.56888336e-01] [-1.05081916e-01 -1.98682383e-01 -7.36056641e-02 -2.01421514e-01 9.88150295e-03 3.98674875e-01 5.43630838e-01] [-2.47608393e-01 -1.28598109e-01 6.95148334e-02 2.71119446e-01 2.06181064e-01 1.61880702e-01 2.15094388e-02] [-2.47146308e-01 2.36978754e-02 3.22875172e-01 8.17414999e-01 3.98947835e-01 -8.34708065e-02 -3.33741575e-01] [ 1.79851115e-01 -1.82384178e-01 1.04728006e-01 8.18493307e-01 3.12436074e-01 -1.81026548e-01 -3.53012174e-01]] [[ 3.09338838e-01 3.53307933e-01 2.38382056e-01 3.56293589e-01 1.41606256e-01 7.00093448e-01 7.14471519e-01] [ 1.46410480e-01 4.52185810e-01 1.58180356e-01 2.69554496e-01 2.04911113e-01 2.11858243e-01 1.14956331e-02] [ 3.09426606e-01 3.17468703e-01 -1.89780071e-01 -2.84836680e-01 1.39960781e-01 -4.60981801e-02 -4.49339986e-01] [ 1.31583408e-01 -1.28081784e-01 -1.75783262e-01 1.57190636e-01 3.84658836e-02 3.37393045e-01 2.46182784e-01] [-9.85761434e-02 -1.09076321e-01 8.00564960e-02 5.46027482e-01 3.23401064e-01 2.44722560e-01 4.31261778e-01] [ 8.09092596e-02 2.12142542e-02 2.09225371e-01 7.31885195e-01 2.25187197e-01 -2.71221578e-01 -1.12775385e-01] [ 2.76363313e-01 -1.15973510e-01 -4.20555510e-02 4.54374701e-01 7.30387196e-02 1.51477382e-03 -1.38825029e-01]] [[ 6.22018278e-01 4.62144017e-01 2.57640690e-01 1.49887279e-01 2.11981535e-01 7.18622565e-01 8.23515892e-01] [ 5.66443354e-02 2.71030486e-01 6.45247459e-01 1.57283530e-01 2.26415470e-01 5.60025573e-01 9.53497708e-01] [ 4.74220701e-02 2.87810147e-01 4.02605265e-01 -5.52027076e-02 -2.60816276e-01 -7.47593492e-02 5.63893467e-02] [ 1.59694627e-01 -8.53118077e-02 8.69832188e-02 2.96128958e-01 -1.79072559e-01 -1.81764677e-01 5.59825897e-02] [ 3.83103073e-01 -2.26845756e-01 -2.19902709e-01 2.17985779e-01 -1.50131196e-01 3.01272333e-01 5.49887538e-01] [ 1.09442770e-01 -5.27327418e-01 -5.39162457e-01 -1.96586743e-01 -2.73079902e-01 -2.48627856e-01 -5.24853244e-02] [-5.24060369e-01 -5.98367274e-01 -3.00721794e-01 1.89580947e-01 2.10873578e-02 -4.01983351e-01 -1.76276714e-01]] [[ 6.48720324e-01 3.70171189e-01 -5.33166043e-02 -1.88917160e-01 3.57856989e-01 4.89480615e-01 2.60834277e-01] [-3.27888131e-01 1.03796244e-01 4.51223850e-01 -2.18155310e-01 5.95527947e-01 3.61043960e-01 3.49845648e-01] [-3.39000702e-01 -3.25461030e-01 3.02350789e-01 4.65194046e-01 7.84640312e-02 -5.64189479e-02 8.63995180e-02] [ 5.95382273e-01 -4.65188138e-02 1.27171651e-01 3.63255769e-01 -7.56422952e-02 -1.22307248e-01 2.17043042e-01] [ 1.17392075e+00 3.46000642e-01 2.19097674e-01 1.69988364e-01 8.66620615e-02 3.93423498e-01 7.16628909e-01] [-9.76266190e-02 -2.81350046e-01 -4.15589452e-01 -3.21824342e-01 3.54982875e-02 1.65721029e-03 -4.25877124e-02] [-1.19516170e+00 -9.32260096e-01 -4.92788821e-01 -2.28612851e-02 1.33340642e-01 -6.52094185e-03 -6.13658577e-02]] [[ 1.85643047e-01 4.82687473e-01 1.33024842e-01 -1.72101364e-01 9.29338932e-02 3.96562070e-01 -2.23628044e-01] [-2.40671217e-01 2.18132272e-01 -1.59987167e-01 -7.72203803e-01 5.71245961e-02 2.32964054e-01 -1.69113994e-01] [-2.22714692e-01 -5.45559585e-01 -4.48205858e-01 1.04946852e-01 2.78617084e-01 2.78336823e-01 2.85043418e-02] [ 3.54093760e-01 -1.85042068e-01 -5.56494057e-01 -2.62724876e-01 -1.10775605e-01 6.12144619e-02 4.05615978e-02] [ 6.24822557e-01 4.64128643e-01 -1.64345205e-01 -6.28263056e-01 -2.59985417e-01 -2.44145960e-01 1.32152796e-01] [ 3.25789601e-01 1.58545494e-01 3.39510292e-03 -2.75302887e-01 -1.81628168e-01 -7.02784419e-01 -3.62002373e-01] [-1.89844370e-01 -1.27482787e-01 -2.17961613e-02 -1.51254386e-02 6.29383996e-02 -2.33235493e-01 -2.07281247e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_avg_pool3d.py::TestAdaptiveAvgPool3D::test_adaptive_avg_pool3d[ ie_device:CPU - precision:FP32 - output_size:7 - input_tensor:[[[[-1.6264967e+00 6.6762489e-01 5.6426209e-01 ... -9.6202148e-03 8.6895913e-01 -2.8210914e-01] [ 1.2952729e-01 -5.7686734e-01 -9.4330698e-01 ... 2.1639912e-01 -1.0495428e+00 -7.8401601e-01] [ 1.0100898e+00 1.7912003e+00 -5.4441977e-01 ... -4.4367367e-01 -1.0595398e+00 -3.9696923e-01] ... [ 6.0459385e-03 1.2612447e+00 1.3234926e+00 ... -5.5413771e-01 9.8731577e-01 -8.5629040e-01] [-5.7554501e-01 1.6232855e+00 2.3614517e-01 ... -8.0677903e-01 7.2041517e-01 1.4077156e+00] [ 1.3079628e+00 -1.5080163e+00 -9.6202070e-01 ... -1.0140289e+00 1.7379189e+00 -9.3763781e-01]] [[-1.7914748e+00 -6.3024980e-01 -7.7485037e-01 ... -1.9146409e-02 -8.5829127e-01 -2.5163388e+00] [ 4.6963930e-01 6.9672205e-02 -2.1891955e-02 ... -1.8014546e-01 1.3755190e+00 7.0208353e-01] [ 1.6126059e+00 -2.5179562e-01 -4.5254251e-01 ... 1.8497190e+00 6.0567564e-01 6.5712935e-01] ... [ 2.5552246e-01 -3.9526331e-01 3.4048507e-01 ... 3.1465545e-01 -3.0910134e-01 1.7038960e+00] [-1.0965911e+00 1.8537978e+00 1.1429275e+00 ... 5.4049897e-01 1.9797909e+00 -8.6621350e-01] [ 1.9171758e-01 1.0534225e+00 -1.2237380e+00 ... -9.4144005e-01 -4.4283459e-01 6.0608193e-02]] [[ 1.6753651e+00 1.4012706e-01 -2.2499661e+00 ... -9.2337126e-01 -1.5063344e-01 -1.5893238e+00] [ 6.3750917e-01 1.4754717e+00 -1.3736570e+00 ... -1.6413206e-01 5.9598410e-01 -3.8550240e-01] [-7.8134006e-01 -2.0615801e-01 -9.0507373e-02 ... -5.3757477e-01 -1.4066958e+00 4.1114375e-01] ... [ 1.5006626e-01 -2.2851874e-01 2.1890709e-01 ... 5.2427703e-01 -3.1858540e-01 -5.2220827e-01] [ 1.0643302e+00 -1.6559567e+00 -1.4868301e+00 ... 1.9768782e-02 7.2434837e-01 -1.5852779e+00] [ 1.2399938e+00 -2.6741400e-01 -3.0564722e-02 ... 5.3922915e-01 1.2729146e+00 1.1271335e+00]] ... [[ 1.5885572e+00 9.4203562e-01 -6.8538684e-01 ... -1.1507785e+00 -8.3785897e-01 -8.6712420e-02] [ 1.2498572e+00 8.3067399e-01 -6.4648646e-01 ... 9.1053194e-01 -3.9339137e-01 1.4996259e+00] [-3.2953930e-01 5.3091812e-01 2.5293729e-01 ... -1.8782215e+00 9.2482108e-01 9.2817169e-01] ... [-1.0795367e+00 1.2003300e+00 2.0364006e+00 ... 6.1729187e-01 1.1428548e-01 -5.1993376e-01] [ 8.0722392e-02 -1.0748487e+00 -9.1114514e-02 ... -3.7232175e-01 -7.8755081e-01 6.2107259e-01] [-5.9458297e-01 -6.1529315e-01 -8.2488000e-02 ... 5.9068668e-01 -1.1817505e+00 -5.4924041e-01]] [[-1.0390564e+00 5.6452018e-01 -9.3224742e-02 ... 1.1034650e+00 -4.4751313e-01 7.8361720e-01] [ 1.7040416e+00 1.7961637e+00 7.8463966e-01 ... 9.3157388e-02 -2.3142221e+00 -4.2660916e-01] [ 7.0620492e-02 -9.5159173e-01 -1.3499126e+00 ... 5.4640168e-01 -7.5860435e-01 4.5222700e-01] ... [ 3.7265688e-01 -1.3472339e+00 4.9543661e-01 ... -6.4650774e-01 -5.9616677e-02 -1.1693935e+00] [-1.5688444e+00 1.3746148e-01 -2.1254668e+00 ... -3.0991259e-01 8.6149117e-03 4.5495397e-01] [ 9.9037665e-01 -9.2660803e-01 1.2740724e-01 ... -7.5722575e-01 -9.3521249e-01 -2.9881439e-01]] [[ 1.4329792e+00 1.0823737e+00 -2.2568150e-01 ... -1.2559224e+00 6.4643627e-01 -4.6755424e-01] [-4.2169612e-02 -1.4642648e+00 -1.3292223e+00 ... -2.5427562e-01 2.0668862e+00 1.8133166e+00] [-6.4925271e-01 -2.6518452e-01 3.0918184e-01 ... -2.2356656e+00 2.0907493e+00 -9.3813986e-01] ... [-1.5452185e+00 3.7887573e-01 -7.3223871e-01 ... -5.3794318e-01 1.1241510e+00 5.9302664e-01] [ 6.5320235e-01 2.6313993e-01 -2.1295655e-01 ... 6.2353700e-01 -1.1235738e+00 -2.6756424e-01] [-6.2250566e-01 9.5377706e-02 -3.6029077e-01 ... 3.5572302e-01 5.7454664e-01 -4.0214607e-01]]] [[[-1.7321759e+00 6.4408481e-01 -8.8891453e-01 ... -1.3180741e+00 -8.2013726e-01 1.0563347e-01] [-1.8293746e+00 1.1841056e+00 1.3981042e+00 ... 6.0585201e-01 -1.5472317e+00 1.4064132e+00] [-1.4932953e+00 1.8723807e+00 -3.0616051e-01 ... 1.4629193e-01 5.1395887e-01 -1.1300068e+00] ... [ 7.2136807e-01 -4.3607244e-01 6.8369955e-01 ... 2.6249993e-01 8.9512748e-01 1.7162341e+00] [ 3.1019577e-01 3.7714550e-01 -1.6411633e+00 ... -1.1162665e+00 -2.0664115e+00 7.0759833e-01] [ 4.0817070e-01 -1.5197918e+00 -1.3483453e+00 ... 2.3490736e-01 1.0245870e+00 -8.8780987e-01]] [[-2.6410952e+00 -1.5706549e+00 -1.3813733e+00 ... 4.7794047e-01 -2.4694080e+00 2.8528017e-01] [ 4.0826070e-01 1.9100498e-01 -4.2800727e-01 ... -9.0724874e-01 1.5439166e+00 1.5604830e-01] [ 1.5704202e-02 -1.3119174e-03 -1.2856206e+00 ... 6.6664320e-01 -1.0355673e+00 -7.7261192e-01] ... [ 1.0600848e+00 3.7087935e-01 5.4382694e-01 ... 2.8970426e-01 -6.5082484e-01 -3.2557085e-01] [-1.7156125e+00 -8.6064422e-01 -7.7732250e-02 ... 1.6061682e-01 3.7308924e+00 -1.1471798e+00] [ 1.1094977e+00 3.1905465e-02 5.2448493e-01 ... -5.1771957e-01 -6.7429096e-01 8.3633494e-01]] [[-1.8256581e+00 -3.8787600e-01 -2.0312676e-01 ... 2.0535105e-01 4.9272388e-01 -1.0886678e+00] [ 5.7943588e-01 5.7171953e-01 -1.1599470e+00 ... -8.0488604e-01 -1.5798382e-01 2.8401586e-01] [ 2.3100257e+00 8.8424152e-01 -1.2121843e+00 ... 8.7549657e-01 1.7587594e+00 4.5910147e-01] ... [ 3.1728223e-01 2.0796604e+00 -2.6906416e-01 ... -2.2314141e+00 -6.1484176e-01 6.1257088e-01] [-3.1011307e-01 1.5328717e+00 4.8769197e-01 ... -1.8884116e-01 -6.2043905e-01 9.5490289e-01] [-6.5837115e-01 -6.9987938e-02 -7.9774491e-02 ... -5.4333471e-02 6.4292842e-01 5.2831626e-01]] ... [[ 4.7913942e-01 -1.4726624e-01 1.2172166e+00 ... 1.1058993e+00 9.3084764e-01 3.4024125e-01] [-7.9320735e-01 2.1973155e-01 -1.7405804e+00 ... 1.0078485e+00 -6.5975595e-01 -1.5517138e-01] [ 7.2676063e-01 -7.7852315e-01 -7.9987162e-01 ... 1.5176263e-01 1.1530857e+00 -7.9605985e-01] ... [-1.0321832e+00 6.2306595e-01 1.0167443e+00 ... -4.5511261e-02 7.7271336e-01 -1.1418898e+00] [ 2.3386879e-01 -1.1094706e+00 -6.6644841e-01 ... 3.9773324e-01 -1.4832149e+00 -2.1572460e-01] [ 1.5152471e+00 -3.6224727e-02 -9.1924542e-01 ... -1.6876988e-01 8.8966954e-01 -4.6001935e-01]] [[ 2.0776961e+00 -3.2056596e-02 -4.0627289e-01 ... 4.7864515e-01 1.1828252e+00 1.6700789e+00] [ 1.0545219e+00 2.7329382e-01 8.6101514e-01 ... 5.9483558e-02 6.7482710e-01 -9.9830681e-01] [-4.6479812e-01 4.7930333e-01 2.9433718e-01 ... 2.2353580e+00 -3.9456505e-02 -2.1957929e+00] ... [ 7.1570045e-01 -6.2667298e-01 8.1632096e-01 ... -4.5757708e-01 -8.8998564e-03 -6.1239028e-01] [ 1.1382580e+00 3.8459250e-01 8.2235563e-01 ... 2.8059819e+00 -6.8338484e-01 5.9043229e-01] [ 2.3534867e-01 -1.4525057e-01 -1.2863575e+00 ... 2.5161451e-01 8.2636720e-01 3.4671512e-01]] [[ 6.2538445e-02 9.6683896e-01 -5.6053832e-02 ... 1.9607599e-01 -8.9066643e-01 -1.8803213e-02] [ 4.1021982e-01 1.0211513e+00 -7.2397649e-01 ... -9.6066393e-02 -1.0011137e+00 4.5410568e-01] [-1.1773204e-01 -8.7835282e-01 -1.3575778e+00 ... -1.3470498e+00 1.6718143e+00 1.7671034e-01] ... [-1.7671937e+00 6.8233812e-01 -8.9596048e-02 ... 5.4680216e-01 -9.4474763e-01 -2.1486926e+00] [-6.5395468e-01 -8.3680123e-01 -6.1943895e-01 ... -2.4677377e+00 2.6189485e-01 -9.7922909e-01] [ 2.3114350e+00 -9.2848346e-02 -4.0800056e-01 ... -4.1519424e-01 2.6924491e-01 -1.5256592e+00]]]] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_avg_pool3d.___torch_mangle_6.aten_adaptive_avg_pool3d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %5 : int[] = prim::Constant[value=[7, 7, 7]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1230:51 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor = aten::adaptive_avg_pool3d(%input_tensor.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1231:11 return (%9) fw_re: [[[[-4.11078125e-01 -2.05700904e-01 -2.11221695e-01 -4.70591784e-01 -2.80334115e-01 4.30164039e-02 -3.17967027e-01] [ 5.31758964e-01 -1.16243958e-01 -1.44625291e-01 -2.40186840e-01 8.17367956e-02 1.64301395e-01 6.29246235e-03] [ 6.74680993e-02 -6.15450740e-01 -2.24316224e-01 2.29383767e-01 5.21265209e-01 2.41562456e-01 -8.00236687e-02] [-4.87687260e-01 -5.15247285e-01 -1.73766866e-01 2.82750338e-01 4.18116838e-01 1.41258523e-01 3.86048555e-01] [ 1.30436316e-01 3.85262728e-01 1.15801655e-01 2.71188170e-01 2.31071010e-01 2.34774426e-02 2.73618579e-01] [ 3.66562098e-01 9.23264444e-01 2.56718755e-01 4.21573579e-01 1.54051945e-01 3.59082282e-01 5.95941067e-01] [ 3.56254220e-01 2.76975423e-01 2.51559857e-02 3.08814019e-01 -1.57681689e-01 2.21692652e-01 4.57470357e-01]] [[ 2.55757511e-01 -4.20668006e-01 -5.02904832e-01 1.10304631e-01 1.42347336e-01 -4.05270904e-02 -3.53312850e-01] [ 3.78200561e-01 -1.06426075e-01 -5.57985961e-01 -1.52062863e-01 3.65979999e-01 2.67293692e-01 3.19417179e-01] [ 2.96634048e-01 -1.21713042e-01 -7.05308735e-01 -2.52432644e-01 3.03881615e-01 4.32887912e-01 -6.99227974e-02] [-4.82403254e-03 7.18983188e-02 -1.59808293e-01 3.96533817e-01 4.22089845e-01 5.23144424e-01 2.54361838e-01] [-2.67870605e-01 -1.00906879e-01 -1.83870509e-01 -3.13474596e-01 1.16674744e-01 2.85049468e-01 1.94978863e-01] [-6.57664239e-03 -2.63064355e-02 -2.82569434e-02 -2.77094245e-02 2.97467440e-01 4.34456587e-01 1.00831106e-01] [ 2.97912508e-01 -7.67944381e-02 -1.03882700e-01 1.61516741e-01 3.15136522e-01 4.61534530e-01 2.83808708e-01]] [[ 4.22214478e-01 1.34098992e-01 -5.26135743e-01 -1.86415270e-01 -3.59232724e-03 -7.46692866e-02 -3.54332775e-01] [-4.58365679e-03 1.73483804e-01 -2.34483108e-01 2.26075038e-01 1.30734280e-01 -2.77839065e-01 -5.77811860e-02] [-1.04217298e-01 2.36106068e-01 -2.68397003e-01 -9.72600356e-02 -5.32328971e-02 -3.85507226e-01 -2.38215610e-01] [ 1.03028893e-01 3.61780524e-01 -7.44940490e-02 -7.82438442e-02 -1.53549528e-02 1.90303862e-01 3.22370619e-01] [-6.45597577e-02 -1.54000998e-01 -2.88846999e-01 -1.22384739e+00 -2.28769019e-01 -1.51844770e-02 3.59994322e-02] [-2.45929748e-01 -7.51725316e-01 -2.94396672e-02 -7.83314824e-01 -3.76134776e-02 1.13450311e-01 -3.25342894e-01] [-3.11060935e-01 -9.21876669e-01 -2.30436683e-01 -7.68266618e-02 2.56370217e-01 4.00270730e-01 1.93319961e-01]] [[-6.82219028e-01 1.81465968e-01 8.85596573e-02 -3.76244038e-01 1.59274265e-02 -1.70398019e-02 2.51642950e-02] [-5.74485481e-01 -3.04929093e-02 5.39909303e-02 1.37001947e-01 7.52421990e-02 -9.59939063e-02 2.27851659e-01] [-6.10341251e-01 -4.72841948e-01 -4.16781098e-01 -2.30611444e-01 -3.45002621e-01 -3.76651257e-01 1.73615664e-02] [-1.38991982e-01 2.67608669e-02 -2.76354879e-01 -4.69679594e-01 -4.71651554e-01 -4.24633414e-01 -9.02074650e-02] [ 1.25219822e-02 2.22822785e-01 -1.03057452e-01 -6.33953512e-01 -1.19918287e-01 -3.27815682e-01 -4.81369719e-03] [-3.05830330e-01 -2.47552112e-01 2.34879017e-01 4.87643778e-02 3.61340195e-01 -1.55953318e-01 -4.44011122e-01] [-5.82489014e-01 -2.24826947e-01 1.70837238e-01 2.74123043e-01 5.15689075e-01 2.02489793e-02 -3.87563467e-01]] [[-3.69838402e-02 -1.48531541e-01 2.92487204e-01 1.10786825e-01 2.20172405e-01 -2.06576705e-01 2.11020544e-01] [-1.43977761e-01 -1.07327655e-01 1.33381337e-01 2.11606905e-01 -6.36210144e-02 -6.17396161e-02 5.57402492e-01] [-4.13531661e-01 -4.79044974e-01 -1.89616665e-01 1.24984413e-01 -3.98239046e-01 -6.59360141e-02 1.92584008e-01] [ 6.64357692e-02 -8.73799548e-02 -1.67591274e-01 -1.16208754e-01 -3.14430624e-01 -4.79068637e-01 -4.39152926e-01] [-2.25581288e-01 7.17657745e-01 5.90374231e-01 1.05709165e-01 -2.85940409e-01 -2.44517535e-01 -2.80079037e-01] [-2.52827078e-01 3.68969262e-01 4.42940265e-01 1.51953876e-01 3.04844249e-02 -2.04214439e-01 -4.02899474e-01] [-4.99309182e-01 3.39859650e-02 4.39395569e-02 -8.06810483e-02 3.04669648e-01 -2.79356122e-01 -6.25677288e-01]] [[ 9.54599142e-01 4.36616868e-01 3.74615222e-01 2.39396006e-01 8.50102827e-02 -3.79576206e-01 -2.77882993e-01] [ 6.12643003e-01 1.55917734e-01 -4.55295742e-02 2.08885297e-02 -3.87404650e-01 -3.58690888e-01 -1.09976605e-02] [-3.16354334e-01 -5.72413921e-01 -2.25011468e-01 2.47760296e-01 -2.78184265e-01 -1.26816347e-01 -2.09624812e-01] [-2.65600115e-01 7.24548101e-03 -7.93212035e-04 -7.96250533e-03 2.29348522e-02 -1.97853699e-01 -3.59452486e-01] [-4.29233730e-01 3.04713815e-01 4.61671114e-01 -1.85357198e-01 -3.81453902e-01 -1.70961767e-01 -5.25059462e-01] [-4.09911603e-01 -9.61294174e-02 5.71297407e-02 -5.05907953e-01 -5.70491791e-01 -1.79464668e-01 -1.67195976e-01] [-4.46452081e-01 -5.81368804e-01 -3.99308354e-01 -2.70910144e-01 -5.98229319e-02 -4.68084037e-01 -3.33490849e-01]] [[ 5.04323423e-01 1.39412984e-01 1.63348839e-01 2.21477553e-01 -9.72108543e-03 -4.52485681e-02 2.06794679e-01] [ 2.47953162e-02 -3.08773845e-01 -3.65785629e-01 -1.38256550e-01 -4.37908053e-01 -9.56966281e-02 2.48200417e-01] [-3.73717725e-01 -3.82079959e-01 -2.78034329e-01 -1.82573736e-01 -4.96239990e-01 -2.38382339e-01 -7.21810907e-02] [-4.06116247e-01 2.17518911e-01 2.68334299e-01 -2.43774578e-01 -3.08680832e-01 -1.26381502e-01 1.92922413e-01] [-4.99264300e-01 -1.94800645e-01 3.70416008e-02 -7.78146148e-01 -6.07918978e-01 2.84594297e-03 1.40266567e-02] [-3.31995070e-01 -3.92872810e-01 1.14566363e-01 -7.34742209e-02 -2.59395510e-01 -1.15156390e-01 -5.49252182e-02] [-1.22299999e-01 -3.75241995e-01 6.05562888e-02 1.63990885e-01 6.95522204e-02 -1.95437878e-01 -2.48649448e-01]]] [[[-6.68230593e-01 -1.06456280e-01 -3.20048630e-02 -1.15038455e-02 -3.09702039e-01 -5.54298759e-01 -1.67435661e-01] [ 4.34343256e-02 3.28061879e-01 -7.97140524e-02 9.64742601e-02 5.24878949e-02 -1.67313218e-03 -1.08135089e-01] [-5.11693358e-02 3.53088491e-02 -5.83348572e-01 1.95116073e-01 3.54127258e-01 5.69429398e-02 -3.08703035e-01] [ 1.81963027e-01 2.79057860e-01 -1.77309021e-01 1.55615151e-01 2.13276550e-01 -7.27495253e-02 -3.27766091e-02] [ 4.29089159e-01 2.36997679e-01 3.95179838e-01 4.16905075e-01 3.00545722e-01 -7.41621852e-02 -3.92547287e-02] [-2.15819478e-02 -1.30007625e-01 4.97982591e-01 8.59568566e-02 -1.34919686e-02 1.88167274e-01 3.57483178e-01] [-2.32391730e-01 -5.64267576e-01 -7.29053393e-02 2.36744322e-02 -1.93442523e-01 9.70393941e-02 1.90465063e-01]] [[-5.84357858e-01 -5.46032608e-01 5.86888194e-03 3.51045907e-01 1.42079130e-01 -2.02449352e-01 -1.19259328e-01] [ 6.19885087e-01 -3.05013120e-01 -2.98941374e-01 5.92061132e-02 1.70073375e-01 2.42391229e-01 2.79459834e-01] [ 3.02197874e-01 -3.83210957e-01 -6.45363152e-01 -2.25525230e-01 1.71102047e-01 6.80610910e-02 -3.68854672e-01] [ 1.60540313e-01 7.19346330e-02 -2.16744199e-01 1.70336053e-01 -1.21005764e-02 -3.44676226e-01 3.30383964e-02] [ 1.00583363e+00 5.21334887e-01 -1.12714387e-01 -4.73830756e-03 -2.25826010e-01 -6.18905842e-01 -7.31172413e-02] [ 3.09301078e-01 4.75936204e-01 1.10709220e-01 -2.29719266e-01 -2.48177901e-01 -1.56434327e-02 2.42438704e-01] [-1.17556766e-01 1.86101899e-01 6.82344660e-02 -2.72257775e-01 -1.19226091e-01 3.09851706e-01 5.31433105e-01]] [[-2.71225542e-01 -1.42950207e-01 -1.35649100e-01 4.14966196e-02 -1.52540684e-01 2.40477964e-01 4.45980698e-01] [ 5.30100703e-01 -1.34385213e-01 -1.09078348e-01 -4.27537084e-01 -1.05417997e-01 4.10803556e-01 6.18535340e-01] [ 2.77518511e-01 -1.00149095e-01 -9.76238176e-02 -6.79737106e-02 -1.46040425e-01 3.89803857e-01 3.31538975e-01] [-7.66330063e-02 2.35355005e-01 -3.24789956e-02 -1.82647899e-01 -6.51174262e-02 5.97050553e-03 2.05570295e-01] [ 8.09158146e-01 6.69970334e-01 -1.65905446e-01 -7.49691129e-01 -4.32973057e-01 -5.69598615e-01 -2.48080462e-01] [ 7.87671685e-01 1.05270505e+00 2.10069567e-02 -1.19950339e-01 -2.95731485e-01 -6.05972290e-01 -2.66207457e-01] [-8.75782222e-02 7.61445224e-01 4.27069336e-01 2.25853503e-01 2.46962667e-01 -6.18885830e-02 9.67654586e-02]] [[-1.53253734e-01 1.66793883e-01 1.65604025e-01 -1.30512893e-01 -4.04169470e-01 -4.31033708e-02 2.67090499e-01] [ 4.24684465e-01 2.12001443e-01 4.13745761e-01 1.76975548e-01 1.73315182e-01 1.45085052e-01 1.47570238e-01] [ 9.25796553e-02 -2.78494120e-01 2.09834382e-01 5.01904964e-01 -1.26375362e-01 -4.32630256e-03 4.93965626e-01] [-1.02611065e-01 -3.83535117e-01 5.51374219e-02 6.82559982e-02 -1.23109661e-01 1.00003004e-01 3.25172782e-01] [-1.34782404e-01 1.03156932e-01 -1.08776949e-01 -5.80692053e-01 -3.73336107e-01 -1.97855204e-01 -2.42832452e-01] [ 2.74402559e-01 6.72176242e-01 1.03143238e-01 -1.90725192e-01 -4.22061205e-01 -3.66115451e-01 -2.74070531e-01] [-1.74153626e-01 1.18157923e-01 2.22374365e-01 2.42188960e-01 3.21171552e-01 -1.00885399e-01 6.85004294e-02]] [[-4.50258777e-02 1.05977997e-01 2.89760411e-01 2.46634245e-01 1.29101053e-01 -1.85757577e-02 -1.80609077e-01] [ 3.59606832e-01 -1.55540079e-01 4.02721882e-01 7.24550009e-01 4.01282042e-01 1.49822369e-01 -2.35216171e-01] [-8.95981938e-02 -5.41799664e-01 2.28843033e-01 5.90548694e-01 -3.59337069e-02 3.87847945e-02 2.85700470e-01] [-1.75901830e-01 -7.02869415e-01 8.15015584e-02 2.17606425e-01 -1.42654106e-01 -8.07061866e-02 2.00882912e-01] [-2.44060814e-01 1.11867338e-01 -2.75773138e-01 -2.88990021e-01 3.68876345e-02 -2.52694637e-02 5.45177162e-02] [-2.21396267e-01 8.13525245e-02 -2.92873383e-01 -1.31211996e-01 -2.00516656e-02 -2.61870056e-01 -2.24853456e-01] [ 5.06521165e-02 -7.50860929e-01 -6.70565784e-01 -3.05627614e-01 1.23802967e-01 -1.12155229e-01 1.28737092e-03]] [[ 3.91481549e-01 3.06351259e-02 -1.66650806e-02 1.34551406e-01 3.12120318e-01 5.97577572e-01 3.73198241e-01] [ 8.96353349e-02 -1.48911789e-01 2.17453048e-01 4.11128163e-01 4.32098985e-01 5.72894096e-01 -3.77078831e-01] [-3.19628358e-01 1.29358023e-01 2.44964287e-01 4.41807985e-01 1.63984597e-01 2.87568331e-01 -4.45289850e-01] [-5.77710532e-02 2.57394433e-01 3.77013475e-01 2.48193428e-01 -6.79344460e-02 -1.99018136e-01 1.26573801e-01] [ 1.61753654e-01 5.83921969e-01 3.81866306e-01 2.84261554e-01 3.47658157e-01 -2.54939556e-01 -8.14560130e-02] [ 4.08948623e-02 1.57560915e-01 1.41123652e-01 4.43437338e-01 7.23774910e-01 1.62230074e-01 -3.47794831e-01] [ 2.77046144e-01 -3.69506121e-01 -3.15196902e-01 2.73942083e-01 6.71693742e-01 3.54499608e-01 -2.36449428e-02]] [[ 7.29275465e-01 2.37992421e-01 -2.99467593e-01 7.12274835e-02 4.78591055e-01 7.55013078e-02 1.34118348e-01] [ 2.22200900e-01 -3.85078788e-03 -1.27927437e-01 1.48596242e-01 3.98005754e-01 2.69724548e-01 -1.57151565e-01] [-2.39664644e-01 9.31193531e-02 -5.76739609e-02 5.70521057e-02 -1.89410925e-01 -5.28972447e-02 -4.84550238e-01] [ 2.32053205e-01 2.86657363e-01 7.98467845e-02 -3.99766350e-03 -8.40978324e-02 -4.34604734e-02 9.82914343e-02] [ 1.30637586e-01 4.95958120e-01 4.63901550e-01 4.54908103e-01 3.67665887e-01 -1.02794871e-01 -3.00974786e-01] [-1.20466694e-01 6.66372404e-02 4.32826489e-01 2.91549683e-01 2.50487328e-01 -1.18458524e-01 -5.65627158e-01] [ 2.92597413e-01 -2.72718608e-01 1.84205815e-01 3.96950930e-01 1.74052224e-01 1.06098309e-01 -1.11702353e-01]]]]; ov_res: [[[[-4.11078125e-01 -2.05700904e-01 -2.11221695e-01 -4.70591784e-01 -2.80334115e-01 4.30164039e-02 -3.17967027e-01] [ 5.31758964e-01 -1.16243958e-01 -1.44625291e-01 -2.40186840e-01 8.17367956e-02 1.64301395e-01 6.29246235e-03] [ 6.74680993e-02 -6.15450740e-01 -2.24316224e-01 2.29383767e-01 5.21265209e-01 2.41562456e-01 -8.00236687e-02] [-4.87687260e-01 -5.15247285e-01 -1.73766866e-01 2.82750338e-01 4.18116868e-01 1.41258523e-01 3.86048555e-01] [ 1.30436316e-01 3.85262728e-01 1.15801655e-01 2.71188170e-01 2.31071010e-01 2.34774426e-02 2.73618579e-01] [ 3.66562098e-01 9.23264444e-01 2.56718755e-01 4.21573579e-01 1.54051945e-01 3.59082282e-01 5.95941067e-01] [ 3.56254220e-01 2.76975423e-01 2.51559857e-02 3.08814019e-01 -1.57681689e-01 2.21692652e-01 4.57470357e-01]] [[ 2.55757511e-01 -4.20668006e-01 -5.02904832e-01 1.10304631e-01 1.42347336e-01 -4.05270904e-02 -3.53312850e-01] [ 3.78200561e-01 -1.06426075e-01 -5.57985961e-01 -1.52062863e-01 3.65979999e-01 2.67293692e-01 3.19417179e-01] [ 2.96634048e-01 -1.21713042e-01 -7.05308735e-01 -2.52432644e-01 3.03881615e-01 4.32887912e-01 -6.99227974e-02] [-4.82403254e-03 7.18983188e-02 -1.59808293e-01 3.96533817e-01 4.22089875e-01 5.23144424e-01 2.54361838e-01] [-2.67870605e-01 -1.00906879e-01 -1.83870509e-01 -3.13474596e-01 1.16674744e-01 2.85049468e-01 1.94978863e-01] [-6.57664239e-03 -2.63064355e-02 -2.82569434e-02 -2.77094245e-02 2.97467440e-01 4.34456587e-01 1.00831106e-01] [ 2.97912508e-01 -7.67944381e-02 -1.03882700e-01 1.61516741e-01 3.15136522e-01 4.61534530e-01 2.83808708e-01]] [[ 4.22214478e-01 1.34098992e-01 -5.26135743e-01 -1.86415270e-01 -3.59232724e-03 -7.46692866e-02 -3.54332775e-01] [-4.58365679e-03 1.73483804e-01 -2.34483108e-01 2.26075038e-01 1.30734280e-01 -2.77839065e-01 -5.77811860e-02] [-1.04217298e-01 2.36106068e-01 -2.68397003e-01 -9.72600356e-02 -5.32328971e-02 -3.85507226e-01 -2.38215610e-01] [ 1.03028893e-01 3.61780524e-01 -7.44940490e-02 -7.82438442e-02 -1.53549528e-02 1.90303862e-01 3.22370619e-01] [-6.45597577e-02 -1.54000998e-01 -2.88846999e-01 -1.22384739e+00 -2.28769019e-01 -1.51844770e-02 3.59994322e-02] [-2.45929748e-01 -7.51725316e-01 -2.94396672e-02 -7.83314824e-01 -3.76134776e-02 1.13450311e-01 -3.25342894e-01] [-3.11060935e-01 -9.21876669e-01 -2.30436683e-01 -7.68266618e-02 2.56370217e-01 4.00270730e-01 1.93319961e-01]] [[-6.82219028e-01 1.81465968e-01 8.85596573e-02 -3.76244038e-01 1.59274265e-02 -1.70398019e-02 2.51642950e-02] [-5.74485481e-01 -3.04929093e-02 5.39909303e-02 1.37001947e-01 7.52421990e-02 -9.59939063e-02 2.27851659e-01] [-6.10341251e-01 -4.72841948e-01 -4.16781098e-01 -2.30611444e-01 -3.45002621e-01 -3.76651257e-01 1.73615664e-02] [-1.38991982e-01 2.67608669e-02 -2.76354879e-01 -4.69679594e-01 -4.71651554e-01 -4.24633414e-01 -9.02074650e-02] [ 1.25219822e-02 2.22822785e-01 -1.03057452e-01 -6.33953512e-01 -1.19918287e-01 -3.27815682e-01 -4.81369719e-03] [-3.05830330e-01 -2.47552112e-01 2.34879017e-01 4.87643778e-02 3.61340195e-01 -1.55953318e-01 -4.44011122e-01] [-5.82489014e-01 -2.24826947e-01 1.70837238e-01 2.74123043e-01 5.15689075e-01 2.02489793e-02 -3.87563467e-01]] [[-3.69838402e-02 -1.48531541e-01 2.92487204e-01 1.10786825e-01 2.20172405e-01 -2.06576705e-01 2.11020544e-01] [-1.43977761e-01 -1.07327655e-01 1.33381337e-01 2.11606905e-01 -6.36210144e-02 -6.17396161e-02 5.57402492e-01] [-4.13531661e-01 -4.79044974e-01 -1.89616665e-01 1.24984413e-01 -3.98239046e-01 -6.59360141e-02 1.92584008e-01] [ 6.64357692e-02 -8.73799548e-02 -1.67591274e-01 -1.16208754e-01 -3.14430624e-01 -4.79068637e-01 -4.39152926e-01] [-2.25581288e-01 7.17657745e-01 5.90374231e-01 1.05709165e-01 -2.85940409e-01 -2.44517535e-01 -2.80079037e-01] [-2.52827078e-01 3.68969262e-01 4.42940265e-01 1.51953876e-01 3.04844249e-02 -2.04214439e-01 -4.02899474e-01] [-4.99309182e-01 3.39859650e-02 4.39395569e-02 -8.06810483e-02 3.04669648e-01 -2.79356122e-01 -6.25677288e-01]] [[ 9.54599142e-01 4.36616868e-01 3.74615222e-01 2.39396006e-01 8.50102827e-02 -3.79576206e-01 -2.77882993e-01] [ 6.12643003e-01 1.55917734e-01 -4.55295742e-02 2.08885297e-02 -3.87404650e-01 -3.58690888e-01 -1.09976605e-02] [-3.16354334e-01 -5.72413921e-01 -2.25011468e-01 2.47760296e-01 -2.78184265e-01 -1.26816347e-01 -2.09624812e-01] [-2.65600115e-01 7.24548101e-03 -7.93211977e-04 -7.96250533e-03 2.29348522e-02 -1.97853699e-01 -3.59452486e-01] [-4.29233730e-01 3.04713815e-01 4.61671114e-01 -1.85357198e-01 -3.81453902e-01 -1.70961767e-01 -5.25059462e-01] [-4.09911603e-01 -9.61294174e-02 5.71297407e-02 -5.05907953e-01 -5.70491791e-01 -1.79464668e-01 -1.67195976e-01] [-4.46452081e-01 -5.81368804e-01 -3.99308354e-01 -2.70910144e-01 -5.98229319e-02 -4.68084037e-01 -3.33490849e-01]] [[ 5.04323423e-01 1.39412984e-01 1.63348839e-01 2.21477553e-01 -9.72108543e-03 -4.52485681e-02 2.06794679e-01] [ 2.47953162e-02 -3.08773845e-01 -3.65785629e-01 -1.38256550e-01 -4.37908053e-01 -9.56966281e-02 2.48200417e-01] [-3.73717725e-01 -3.82079959e-01 -2.78034329e-01 -1.82573736e-01 -4.96239990e-01 -2.38382339e-01 -7.21810907e-02] [-4.06116247e-01 2.17518911e-01 2.68334270e-01 -2.43774578e-01 -3.08680832e-01 -1.26381502e-01 1.92922413e-01] [-4.99264300e-01 -1.94800645e-01 3.70416008e-02 -7.78146148e-01 -6.07918978e-01 2.84594297e-03 1.40266567e-02] [-3.31995070e-01 -3.92872810e-01 1.14566363e-01 -7.34742209e-02 -2.59395510e-01 -1.15156390e-01 -5.49252182e-02] [-1.22299999e-01 -3.75241995e-01 6.05562888e-02 1.63990885e-01 6.95522204e-02 -1.95437878e-01 -2.48649448e-01]]] [[[-6.68230593e-01 -1.06456280e-01 -3.20048630e-02 -1.15038455e-02 -3.09702039e-01 -5.54298759e-01 -1.67435661e-01] [ 4.34343256e-02 3.28061879e-01 -7.97140524e-02 9.64742601e-02 5.24878949e-02 -1.67313218e-03 -1.08135089e-01] [-5.11693358e-02 3.53088491e-02 -5.83348572e-01 1.95116073e-01 3.54127258e-01 5.69429398e-02 -3.08703035e-01] [ 1.81963027e-01 2.79057860e-01 -1.77309021e-01 1.55615151e-01 2.13276550e-01 -7.27495253e-02 -3.27766091e-02] [ 4.29089159e-01 2.36997679e-01 3.95179838e-01 4.16905075e-01 3.00545722e-01 -7.41621852e-02 -3.92547287e-02] [-2.15819478e-02 -1.30007625e-01 4.97982591e-01 8.59568566e-02 -1.34919686e-02 1.88167274e-01 3.57483178e-01] [-2.32391730e-01 -5.64267576e-01 -7.29053393e-02 2.36744322e-02 -1.93442523e-01 9.70393941e-02 1.90465063e-01]] [[-5.84357858e-01 -5.46032608e-01 5.86888194e-03 3.51045907e-01 1.42079130e-01 -2.02449352e-01 -1.19259328e-01] [ 6.19885087e-01 -3.05013120e-01 -2.98941374e-01 5.92061132e-02 1.70073375e-01 2.42391229e-01 2.79459834e-01] [ 3.02197874e-01 -3.83210957e-01 -6.45363152e-01 -2.25525230e-01 1.71102047e-01 6.80610910e-02 -3.68854672e-01] [ 1.60540313e-01 7.19346330e-02 -2.16744214e-01 1.70336053e-01 -1.21005755e-02 -3.44676226e-01 3.30383964e-02] [ 1.00583363e+00 5.21334887e-01 -1.12714387e-01 -4.73830756e-03 -2.25826010e-01 -6.18905842e-01 -7.31172413e-02] [ 3.09301078e-01 4.75936204e-01 1.10709220e-01 -2.29719266e-01 -2.48177901e-01 -1.56434327e-02 2.42438704e-01] [-1.17556766e-01 1.86101899e-01 6.82344660e-02 -2.72257775e-01 -1.19226091e-01 3.09851706e-01 5.31433105e-01]] [[-2.71225542e-01 -1.42950207e-01 -1.35649100e-01 4.14966196e-02 -1.52540684e-01 2.40477964e-01 4.45980698e-01] [ 5.30100703e-01 -1.34385213e-01 -1.09078348e-01 -4.27537084e-01 -1.05417997e-01 4.10803556e-01 6.18535340e-01] [ 2.77518511e-01 -1.00149095e-01 -9.76238176e-02 -6.79737106e-02 -1.46040425e-01 3.89803857e-01 3.31538975e-01] [-7.66330063e-02 2.35355005e-01 -3.24789956e-02 -1.82647899e-01 -6.51174262e-02 5.97050553e-03 2.05570295e-01] [ 8.09158146e-01 6.69970334e-01 -1.65905446e-01 -7.49691129e-01 -4.32973057e-01 -5.69598615e-01 -2.48080462e-01] [ 7.87671685e-01 1.05270505e+00 2.10069567e-02 -1.19950339e-01 -2.95731485e-01 -6.05972290e-01 -2.66207457e-01] [-8.75782222e-02 7.61445224e-01 4.27069336e-01 2.25853503e-01 2.46962667e-01 -6.18885830e-02 9.67654586e-02]] [[-1.53253734e-01 1.66793883e-01 1.65604025e-01 -1.30512893e-01 -4.04169470e-01 -4.31033708e-02 2.67090499e-01] [ 4.24684465e-01 2.12001443e-01 4.13745761e-01 1.76975548e-01 1.73315182e-01 1.45085052e-01 1.47570238e-01] [ 9.25796553e-02 -2.78494120e-01 2.09834382e-01 5.01904964e-01 -1.26375362e-01 -4.32630256e-03 4.93965626e-01] [-1.02611065e-01 -3.83535117e-01 5.51374219e-02 6.82559982e-02 -1.23109661e-01 1.00003004e-01 3.25172782e-01] [-1.34782404e-01 1.03156932e-01 -1.08776949e-01 -5.80692053e-01 -3.73336107e-01 -1.97855204e-01 -2.42832452e-01] [ 2.74402559e-01 6.72176242e-01 1.03143238e-01 -1.90725192e-01 -4.22061205e-01 -3.66115451e-01 -2.74070531e-01] [-1.74153626e-01 1.18157923e-01 2.22374365e-01 2.42188960e-01 3.21171552e-01 -1.00885399e-01 6.85004294e-02]] [[-4.50258777e-02 1.05977997e-01 2.89760411e-01 2.46634245e-01 1.29101053e-01 -1.85757577e-02 -1.80609077e-01] [ 3.59606832e-01 -1.55540079e-01 4.02721882e-01 7.24550009e-01 4.01282042e-01 1.49822369e-01 -2.35216171e-01] [-8.95981938e-02 -5.41799664e-01 2.28843033e-01 5.90548694e-01 -3.59337069e-02 3.87847945e-02 2.85700470e-01] [-1.75901830e-01 -7.02869415e-01 8.15015584e-02 2.17606425e-01 -1.42654106e-01 -8.07061866e-02 2.00882912e-01] [-2.44060814e-01 1.11867338e-01 -2.75773138e-01 -2.88990021e-01 3.68876345e-02 -2.52694637e-02 5.45177162e-02] [-2.21396267e-01 8.13525245e-02 -2.92873383e-01 -1.31211996e-01 -2.00516656e-02 -2.61870056e-01 -2.24853456e-01] [ 5.06521165e-02 -7.50860929e-01 -6.70565784e-01 -3.05627614e-01 1.23802967e-01 -1.12155229e-01 1.28737092e-03]] [[ 3.91481549e-01 3.06351259e-02 -1.66650806e-02 1.34551406e-01 3.12120318e-01 5.97577572e-01 3.73198241e-01] [ 8.96353349e-02 -1.48911789e-01 2.17453048e-01 4.11128163e-01 4.32098985e-01 5.72894096e-01 -3.77078831e-01] [-3.19628358e-01 1.29358023e-01 2.44964287e-01 4.41807985e-01 1.63984597e-01 2.87568331e-01 -4.45289850e-01] [-5.77710532e-02 2.57394433e-01 3.77013505e-01 2.48193428e-01 -6.79344460e-02 -1.99018136e-01 1.26573801e-01] [ 1.61753654e-01 5.83921969e-01 3.81866306e-01 2.84261554e-01 3.47658157e-01 -2.54939556e-01 -8.14560130e-02] [ 4.08948623e-02 1.57560915e-01 1.41123652e-01 4.43437338e-01 7.23774910e-01 1.62230074e-01 -3.47794831e-01] [ 2.77046144e-01 -3.69506121e-01 -3.15196902e-01 2.73942083e-01 6.71693742e-01 3.54499608e-01 -2.36449428e-02]] [[ 7.29275465e-01 2.37992421e-01 -2.99467593e-01 7.12274835e-02 4.78591055e-01 7.55013078e-02 1.34118348e-01] [ 2.22200900e-01 -3.85078788e-03 -1.27927437e-01 1.48596242e-01 3.98005754e-01 2.69724548e-01 -1.57151565e-01] [-2.39664644e-01 9.31193531e-02 -5.76739609e-02 5.70521057e-02 -1.89410925e-01 -5.28972447e-02 -4.84550238e-01] [ 2.32053205e-01 2.86657363e-01 7.98467845e-02 -3.99766350e-03 -8.40978324e-02 -4.34604734e-02 9.82914343e-02] [ 1.30637586e-01 4.95958120e-01 4.63901550e-01 4.54908103e-01 3.67665887e-01 -1.02794871e-01 -3.00974786e-01] [-1.20466694e-01 6.66372404e-02 4.32826489e-01 2.91549683e-01 2.50487328e-01 -1.18458524e-01 -5.65627158e-01] [ 2.92597413e-01 -2.72718608e-01 1.84205815e-01 3.96950930e-01 1.74052224e-01 1.06098309e-01 -1.11702353e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:False - output_size:[2, 2] - input_tensor:[[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_7.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : NoneType = prim::Constant() %4 : str = prim::Constant[value="AssertionError: "]() %self.output_size : int[] = prim::Constant[value=[2, 2]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%4, %3) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_13 (Squeeze_12[0]:i64[], Constant_2[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_13': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_2970 (Squeeze_2969[0]:i64[], Constant_2959[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_2970': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_5861 (Squeeze_5860[0]:i64[], Constant_5850[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_5861': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_8818 (Squeeze_8817[0]:i64[], Constant_8807[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_8818': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode fofw_re: [[[[1.503998 1.4051844] [1.4686248 0.7636548]]]]; ov_res: [[[[1.503998 1.4051844] [1.4686248 0.7636548]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:False - output_size:[2, 2] - input_tensor:[[[[-0.64478546 -0.3741585 -1.0708697 ... 0.48973268 -1.0591635 -1.5774981 ] [-0.2857027 1.0694387 -0.5624341 ... -0.7630078 0.24776463 -0.19389483] [-0.38613856 -0.9014857 1.1638839 ... -0.61674 0.47341952 1.5237972 ] ... [-0.7333577 -0.595691 0.67729104 ... -0.35381594 -0.3357668 0.25525248] [ 0.5771916 -0.49796262 -0.21202585 ... 0.30058077 -0.9445275 -0.8152361 ] [ 0.71690077 -0.7219977 -0.6807347 ... 0.9965143 -2.5183163 -1.4107021 ]] [[-1.56915 -0.23050629 0.70764357 ... 1.254578 -0.0954088 0.14791915] [-1.0580692 0.47977042 -1.5390373 ... -0.17142831 0.30606747 -1.1335951 ] [ 0.7084625 -0.76996017 0.69932795 ... 0.02337136 0.5034655 -0.1702112 ] ... [ 0.82389367 -0.18515633 0.06325097 ... -2.1818073 -0.31525046 0.27133113] [-1.2541213 -0.3536543 0.3677144 ... 0.01101819 0.8585423 -0.1560417 ] [-0.5868929 1.4622161 0.48472342 ... -0.18142745 -0.9951985 -2.1812372 ]] [[-0.76528305 -0.5603739 0.7264858 ... 0.28240818 -1.3476164 0.12854697] [-0.4305205 0.36473486 -0.49270633 ... -0.10797535 0.43953055 -0.01127821] [ 0.50493187 -0.5758869 1.5847708 ... 0.12571165 -1.0305319 1.0100759 ] ... [-0.5704164 0.8615231 -2.3057482 ... 0.49583668 -0.11194748 1.558341 ] [-0.6684554 0.18224888 1.0991089 ... 0.49193108 1.1186876 -0.37044573] [-0.91879624 -0.01040311 -0.52522415 ... -1.249136 0.13540666 -0.29738155]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_9.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : NoneType = prim::Constant() %4 : str = prim::Constant[value="AssertionError: "]() %self.output_size : int[] = prim::Constant[value=[2, 2]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%4, %3) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[3.1290963 2.3436837] [2.7081668 3.4506814]] [[2.7149408 2.7276275] [2.4556346 2.862138 ]] [[2.7193818 3.978786 ] [2.43149 3.0446322]]]]; ov_res: [[[[3.1290963 2.3436837] [2.7081668 3.4506814]] [[2.7149408 2.7276275] [2.4556346 2.862138 ]] [[2.7193818 3.978786 ] [2.43149 3.0446322]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:False - output_size:[4, 4] - input_tensor:[[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_11.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : NoneType = prim::Constant() %4 : str = prim::Constant[value="AssertionError: "]() %self.output_size : int[] = prim::Constant[value=[4, 4]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%4, %3) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]]; ov_res: [[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:False - output_size:[4, 4] - input_tensor:[[[[-0.64478546 -0.3741585 -1.0708697 ... 0.48973268 -1.0591635 -1.5774981 ] [-0.2857027 1.0694387 -0.5624341 ... -0.7630078 0.24776463 -0.19389483] [-0.38613856 -0.9014857 1.1638839 ... -0.61674 0.47341952 1.5237972 ] ... [-0.7333577 -0.595691 0.67729104 ... -0.35381594 -0.3357668 0.25525248] [ 0.5771916 -0.49796262 -0.21202585 ... 0.30058077 -0.9445275 -0.8152361 ] [ 0.71690077 -0.7219977 -0.6807347 ... 0.9965143 -2.5183163 -1.4107021 ]] [[-1.56915 -0.23050629 0.70764357 ... 1.254578 -0.0954088 0.14791915] [-1.0580692 0.47977042 -1.5390373 ... -0.17142831 0.30606747 -1.1335951 ] [ 0.7084625 -0.76996017 0.69932795 ... 0.02337136 0.5034655 -0.1702112 ] ... [ 0.82389367 -0.18515633 0.06325097 ... -2.1818073 -0.31525046 0.27133113] [-1.2541213 -0.3536543 0.3677144 ... 0.01101819 0.8585423 -0.1560417 ] [-0.5868929 1.4622161 0.48472342 ... -0.18142745 -0.9951985 -2.1812372 ]] [[-0.76528305 -0.5603739 0.7264858 ... 0.28240818 -1.3476164 0.12854697] [-0.4305205 0.36473486 -0.49270633 ... -0.10797535 0.43953055 -0.01127821] [ 0.50493187 -0.5758869 1.5847708 ... 0.12571165 -1.0305319 1.0100759 ] ... [-0.5704164 0.8615231 -2.3057482 ... 0.49583668 -0.11194748 1.558341 ] [-0.6684554 0.18224888 1.0991089 ... 0.49193108 1.1186876 -0.37044573] [-0.91879624 -0.01040311 -0.52522415 ... -1.249136 0.13540666 -0.29738155]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_13.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : NoneType = prim::Constant() %4 : str = prim::Constant[value="AssertionError: "]() %self.output_size : int[] = prim::Constant[value=[4, 4]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%4, %3) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[2.3312337 1.9965315 1.8220571 2.3436837] [3.1290963 2.2811618 1.7014374 1.8261592] [2.3108373 2.7081668 2.1926408 2.621987 ] [2.3846817 1.9827489 3.4506814 2.0623965]] [[1.6291069 2.2065125 2.4031942 1.9845312] [2.7149408 2.574161 2.6931984 2.7276275] [2.4556346 2.0782068 1.798618 2.862138 ] [1.5457374 2.114209 2.388686 2.0742347]] [[1.8740228 2.7193818 1.8731618 3.978786 ] [2.4271903 1.9179797 2.168956 2.8969069] [2.43149 2.2368093 1.7316782 2.4346251] [2.2611125 1.9488704 2.6090508 3.0446322]]]]; ov_res: [[[[2.3312337 1.9965315 1.8220571 2.3436837] [3.1290963 2.2811618 1.7014374 1.8261592] [2.3108373 2.7081668 2.1926408 2.621987 ] [2.3846817 1.9827489 3.4506814 2.0623965]] [[1.6291069 2.2065125 2.4031942 1.9845312] [2.7149408 2.574161 2.6931984 2.7276275] [2.4556346 2.0782068 1.798618 2.862138 ] [1.5457374 2.114209 2.388686 2.0742347]] [[1.8740228 2.7193818 1.8731618 3.978786 ] [2.4271903 1.9179797 2.168956 2.8969069] [2.43149 2.2368093 1.7316782 2.4346251] [2.2611125 1.9488704 2.6090508 3.0446322]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:True - output_size:[2, 2] - input_tensor:[[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_15.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[2, 2]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) r details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_11709 (Squeeze_11708[0]:i64[], Constant_11698[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_11709': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_13895 (Squeeze_13894[0]:i64[], Constant_13884[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_13895': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_16081 (Squeeze_16080[0]:i64[], Constant_16070[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_16081': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_18267 (Squeeze_18266[0]:i64[], Constant_18256[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_18267': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_20453 (Squeeze_20452[0]:i64[], Constant_20442[0]:i32[]) -> (dyfw_re: [[[[1.503998 1.4051844] [1.4686248 0.7636548]]]]; ov_res: [[[[1.503998 1.4051844] [1.4686248 0.7636548]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:True - output_size:[2, 2] - input_tensor:[[[[-0.64478546 -0.3741585 -1.0708697 ... 0.48973268 -1.0591635 -1.5774981 ] [-0.2857027 1.0694387 -0.5624341 ... -0.7630078 0.24776463 -0.19389483] [-0.38613856 -0.9014857 1.1638839 ... -0.61674 0.47341952 1.5237972 ] ... [-0.7333577 -0.595691 0.67729104 ... -0.35381594 -0.3357668 0.25525248] [ 0.5771916 -0.49796262 -0.21202585 ... 0.30058077 -0.9445275 -0.8152361 ] [ 0.71690077 -0.7219977 -0.6807347 ... 0.9965143 -2.5183163 -1.4107021 ]] [[-1.56915 -0.23050629 0.70764357 ... 1.254578 -0.0954088 0.14791915] [-1.0580692 0.47977042 -1.5390373 ... -0.17142831 0.30606747 -1.1335951 ] [ 0.7084625 -0.76996017 0.69932795 ... 0.02337136 0.5034655 -0.1702112 ] ... [ 0.82389367 -0.18515633 0.06325097 ... -2.1818073 -0.31525046 0.27133113] [-1.2541213 -0.3536543 0.3677144 ... 0.01101819 0.8585423 -0.1560417 ] [-0.5868929 1.4622161 0.48472342 ... -0.18142745 -0.9951985 -2.1812372 ]] [[-0.76528305 -0.5603739 0.7264858 ... 0.28240818 -1.3476164 0.12854697] [-0.4305205 0.36473486 -0.49270633 ... -0.10797535 0.43953055 -0.01127821] [ 0.50493187 -0.5758869 1.5847708 ... 0.12571165 -1.0305319 1.0100759 ] ... [-0.5704164 0.8615231 -2.3057482 ... 0.49583668 -0.11194748 1.558341 ] [-0.6684554 0.18224888 1.0991089 ... 0.49193108 1.1186876 -0.37044573] [-0.91879624 -0.01040311 -0.52522415 ... -1.249136 0.13540666 -0.29738155]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_17.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[2, 2]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[3.1290963 2.3436837] [2.7081668 3.4506814]] [[2.7149408 2.7276275] [2.4556346 2.862138 ]] [[2.7193818 3.978786 ] [2.43149 3.0446322]]]]; ov_res: [[[[3.1290963 2.3436837] [2.7081668 3.4506814]] [[2.7149408 2.7276275] [2.4556346 2.862138 ]] [[2.7193818 3.978786 ] [2.43149 3.0446322]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:True - output_size:[4, 4] - input_tensor:[[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_19.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[4, 4]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]]; ov_res: [[[[-0.37740767 0.31230924 -0.12982066 1.4051844 ] [-0.24432397 1.503998 -0.6281938 -0.38304567] [ 0.90241987 0.7124299 -1.5757058 -0.3502948 ] [ 1.0709201 1.4686248 0.04752192 0.7636548 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_adaptive_max_pool_2d.py::TestAdaptiveMaxPool2D::test_adaptive_max_pool2d[ ie_device:CPU - precision:FP32 - return_indices:True - output_size:[4, 4] - input_tensor:[[[[-0.64478546 -0.3741585 -1.0708697 ... 0.48973268 -1.0591635 -1.5774981 ] [-0.2857027 1.0694387 -0.5624341 ... -0.7630078 0.24776463 -0.19389483] [-0.38613856 -0.9014857 1.1638839 ... -0.61674 0.47341952 1.5237972 ] ... [-0.7333577 -0.595691 0.67729104 ... -0.35381594 -0.3357668 0.25525248] [ 0.5771916 -0.49796262 -0.21202585 ... 0.30058077 -0.9445275 -0.8152361 ] [ 0.71690077 -0.7219977 -0.6807347 ... 0.9965143 -2.5183163 -1.4107021 ]] [[-1.56915 -0.23050629 0.70764357 ... 1.254578 -0.0954088 0.14791915] [-1.0580692 0.47977042 -1.5390373 ... -0.17142831 0.30606747 -1.1335951 ] [ 0.7084625 -0.76996017 0.69932795 ... 0.02337136 0.5034655 -0.1702112 ] ... [ 0.82389367 -0.18515633 0.06325097 ... -2.1818073 -0.31525046 0.27133113] [-1.2541213 -0.3536543 0.3677144 ... 0.01101819 0.8585423 -0.1560417 ] [-0.5868929 1.4622161 0.48472342 ... -0.18142745 -0.9951985 -2.1812372 ]] [[-0.76528305 -0.5603739 0.7264858 ... 0.28240818 -1.3476164 0.12854697] [-0.4305205 0.36473486 -0.49270633 ... -0.10797535 0.43953055 -0.01127821] [ 0.50493187 -0.5758869 1.5847708 ... 0.12571165 -1.0305319 1.0100759 ] ... [-0.5704164 0.8615231 -2.3057482 ... 0.49583668 -0.11194748 1.558341 ] [-0.6684554 0.18224888 1.0991089 ... 0.49193108 1.1186876 -0.37044573] [-0.91879624 -0.01040311 -0.52522415 ... -1.249136 0.13540666 -0.29738155]]]] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_adaptive_max_pool_2d.___torch_mangle_21.aten_adaptive_max_pool2d, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=2]() %3 : str = prim::Constant[value="AssertionError: "]() %4 : NoneType = prim::Constant() %self.output_size : int[] = prim::Constant[value=[4, 4]]() %6 : int[] = aten::size(%input_tensor.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1120:50 %7 : int = aten::len(%6) # <string>:5:9 %8 : bool = aten::gt(%7, %2) # <string>:5:9 = prim::If(%8) # <string>:5:2 block0(): -> () block1(): = prim::RaiseException(%3, %4) # <string>:5:2 -> () %9 : Tensor, %10 : Tensor = aten::adaptive_max_pool2d(%input_tensor.1, %self.output_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1121:11 return (%9) fw_re: [[[[2.3312337 1.9965315 1.8220571 2.3436837] [3.1290963 2.2811618 1.7014374 1.8261592] [2.3108373 2.7081668 2.1926408 2.621987 ] [2.3846817 1.9827489 3.4506814 2.0623965]] [[1.6291069 2.2065125 2.4031942 1.9845312] [2.7149408 2.574161 2.6931984 2.7276275] [2.4556346 2.0782068 1.798618 2.862138 ] [1.5457374 2.114209 2.388686 2.0742347]] [[1.8740228 2.7193818 1.8731618 3.978786 ] [2.4271903 1.9179797 2.168956 2.8969069] [2.43149 2.2368093 1.7316782 2.4346251] [2.2611125 1.9488704 2.6090508 3.0446322]]]]; ov_res: [[[[2.3312337 1.9965315 1.8220571 2.3436837] [3.1290963 2.2811618 1.7014374 1.8261592] [2.3108373 2.7081668 2.1926408 2.621987 ] [2.3846817 1.9827489 3.4506814 2.0623965]] [[1.6291069 2.2065125 2.4031942 1.9845312] [2.7149408 2.574161 2.6931984 2.7276275] [2.4556346 2.0782068 1.798618 2.862138 ] [1.5457374 2.114209 2.388686 2.0742347]] [[1.8740228 2.7193818 1.8731618 3.978786 ] [2.4271903 1.9179797 2.168956 2.8969069] [2.43149 2.2368093 1.7316782 2.4346251] [2.2611125 1.9488704 2.6090508 3.0446322]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.11440673 0.7030934 -1.4509366 -1.511081 ] [-0.08547328 1.6554784 0.84120286 -1.6462803 ] [ 0.17332906 0.3294991 0.30007967 -1.869866 ]] [[ 0.28145665 0.3300694 0.09740213 -0.1613419 ] [-0.9557577 1.3279629 0.16332506 -0.68232745] [-1.0993817 0.25630575 -0.27054513 -0.376838 ]] [[-1.8979313 1.0407445 -0.68412405 0.5683432 ] [-0.9543146 -0.21203451 -0.1146967 -0.40573367] [ 0.71652013 -0.49653405 0.9129642 -0.60344183]] [[-0.48161447 1.0571262 -1.5543586 0.29951414] [-0.5873854 0.72151184 0.6262076 -0.10603213] [ 1.4789652 1.2359997 1.1941196 -0.91593224]] [[ 0.9184449 1.2760755 0.17642088 -1.208777 ] [-0.6612545 -0.54354084 -1.1850917 -0.5967279 ] [-0.1827366 -0.23732471 0.93439794 -0.33566168]]] [[[ 1.3712412 0.2027446 0.63376683 -1.4555378 ] [-1.1299776 -0.07659484 0.79980344 -0.19800454] [ 0.08541932 -0.00436225 -0.47744435 -1.4723907 ]] [[-0.59376734 -0.05889762 -0.6992119 -0.8286856 ] [-0.5643483 0.72308123 0.2150887 0.5759901 ] [ 0.61197764 0.33765855 0.6413203 -0.79738575]] [[-1.0122422 0.79367656 1.06604 1.7662873 ] [-0.53898376 -0.3510494 -0.85583705 -1.4623448 ] [-1.3412459 -0.32541496 1.4272239 -0.6224603 ]] [[ 0.55429524 1.5642756 0.350079 -1.6949313 ] [ 0.14330085 1.3993622 0.07396284 0.36646202] [-0.41736418 0.1704732 -0.7811821 -0.9199373 ]] [[ 0.9520105 -0.01843818 1.516323 1.5173719 ] [-0.5502519 2.4709747 1.2195027 -0.35427982] [ 0.663669 -0.10770386 0.9418491 1.9719846 ]]]] - alpha:-0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_22.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=-0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.74765855 -1.2327878 -0.14834899 0.53206515] [-0.40198463 0.07584524 2.4162068 -0.17281228] [ 1.0340817 0.5415279 1.8927702 0.27195024]] [[-0.1065053 0.8681095 -0.22222541 1.1582773 ] [-1.261424 -0.8696209 1.0613291 -1.4452015 ] [ 0.53775156 -0.852978 0.7368393 1.7922784 ]] [[ 0.61592305 0.20248425 0.4089806 -0.87903047] [ 0.7110836 -0.5266577 -1.3813865 -0.65067995] [-1.0642183 -0.86065316 -2.1948693 1.2306018 ]] [[-1.1835847 -0.67264926 1.5872607 -0.70856524] [ 1.681967 0.08280414 -0.18549742 -0.15613373] [-2.2002075 -1.650593 -0.4941938 1.8951368 ]] [[ 1.2301149 -1.6488905 0.83555216 1.702143 ] [ 0.3664175 1.0845236 0.94978905 0.09275672] [ 0.42822498 0.75567216 -1.2227464 -1.0156978 ]]] [[[-0.8985481 -0.8361894 -2.1715689 2.1637568 ] [ 1.608995 0.43430883 -0.27278695 0.2427239 ] [-1.1243495 -0.28954473 -0.973196 1.9613003 ]] [[ 1.1744746 0.81101394 2.161725 1.0105914 ] [-0.69885504 -0.9411807 -1.5736486 -1.8740096 ] [-1.2887365 -0.5491572 0.12807328 -0.83623135]] [[ 0.26174566 -3.358887 -1.8540466 0.57078946] [-0.54877555 0.9896411 1.8330772 1.1890931 ] [ 0.17299455 -0.5399296 -1.5392306 1.4706122 ]] [[-1.1652873 -2.0903928 -0.5969422 0.10984683] [-0.01016467 -0.86470383 0.26523912 0.08481185] [ 0.7617868 -1.1523068 0.85630476 0.8168754 ]] [[-0.46068233 0.66767085 -0.4769671 -0.03510338] [ 1.3366994 -3.0007257 -1.4946088 0.75244796] [ 0.12748408 0.59213215 1.2783723 -1.2337389 ]]]]; ov_res: [[[[ 0.74765855 -1.2327878 -0.14834899 0.53206515] [-0.40198463 0.07584524 2.4162068 -0.17281228] [ 1.0340817 0.5415279 1.8927702 0.27195024]] [[-0.1065053 0.8681095 -0.22222541 1.1582773 ] [-1.261424 -0.8696209 1.0613291 -1.4452015 ] [ 0.53775156 -0.852978 0.7368393 1.7922784 ]] [[ 0.61592305 0.20248425 0.4089806 -0.87903047] [ 0.7110836 -0.5266577 -1.3813865 -0.65067995] [-1.0642183 -0.86065316 -2.1948693 1.2306018 ]] [[-1.1835847 -0.67264926 1.5872607 -0.70856524] [ 1.681967 0.08280414 -0.18549742 -0.15613373] [-2.2002075 -1.650593 -0.4941938 1.8951368 ]] [[ 1.2301149 -1.6488905 0.83555216 1.702143 ] [ 0.3664175 1.0845236 0.94978905 0.09275672] [ 0.42822498 0.75567216 -1.2227464 -1.0156978 ]]] [[[-0.8985481 -0.8361894 -2.1715689 2.1637568 ] [ 1.608995 0.43430883 -0.27278695 0.2427239 ] [-1.1243495 -0.28954473 -0.973196 1.9613003 ]] [[ 1.1744746 0.81101394 2.161725 1.0105914 ] [-0.69885504 -0.9411807 -1.5736486 -1.8740096 ] [-1.2887365 -0.5491572 0.12807328 -0.83623135]] [[ 0.26174566 -3.358887 -1.8540466 0.57078946] [-0.54877555 0.9896411 1.8330772 1.1890931 ] [ 0.17299455 -0.5399296 -1.5392306 1.4706122 ]] [[-1.1652873 -2.0903928 -0.5969422 0.10984683] [-0.01016467 -0.86470383 0.26523912 0.08481185] [ 0.7617868 -1.1523068 0.85630476 0.8168754 ]] [[-0.46068233 0.66767085 -0.4769671 -0.03510338] [ 1.3366994 -3.0007257 -1.4946088 0.75244796] [ 0.12748408 0.59213215 1.2783723 -1.2337389 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.11440673 0.7030934 -1.4509366 -1.511081 ] [-0.08547328 1.6554784 0.84120286 -1.6462803 ] [ 0.17332906 0.3294991 0.30007967 -1.869866 ]] [[ 0.28145665 0.3300694 0.09740213 -0.1613419 ] [-0.9557577 1.3279629 0.16332506 -0.68232745] [-1.0993817 0.25630575 -0.27054513 -0.376838 ]] [[-1.8979313 1.0407445 -0.68412405 0.5683432 ] [-0.9543146 -0.21203451 -0.1146967 -0.40573367] [ 0.71652013 -0.49653405 0.9129642 -0.60344183]] [[-0.48161447 1.0571262 -1.5543586 0.29951414] [-0.5873854 0.72151184 0.6262076 -0.10603213] [ 1.4789652 1.2359997 1.1941196 -0.91593224]] [[ 0.9184449 1.2760755 0.17642088 -1.208777 ] [-0.6612545 -0.54354084 -1.1850917 -0.5967279 ] [-0.1827366 -0.23732471 0.93439794 -0.33566168]]] [[[ 1.3712412 0.2027446 0.63376683 -1.4555378 ] [-1.1299776 -0.07659484 0.79980344 -0.19800454] [ 0.08541932 -0.00436225 -0.47744435 -1.4723907 ]] [[-0.59376734 -0.05889762 -0.6992119 -0.8286856 ] [-0.5643483 0.72308123 0.2150887 0.5759901 ] [ 0.61197764 0.33765855 0.6413203 -0.79738575]] [[-1.0122422 0.79367656 1.06604 1.7662873 ] [-0.53898376 -0.3510494 -0.85583705 -1.4623448 ] [-1.3412459 -0.32541496 1.4272239 -0.6224603 ]] [[ 0.55429524 1.5642756 0.350079 -1.6949313 ] [ 0.14330085 1.3993622 0.07396284 0.36646202] [-0.41736418 0.1704732 -0.7811821 -0.9199373 ]] [[ 0.9520105 -0.01843818 1.516323 1.5173719 ] [-0.5502519 2.4709747 1.2195027 -0.35427982] [ 0.663669 -0.10770386 0.9418491 1.9719846 ]]]] - alpha:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_24.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=0]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.6859298 0.01370742 -0.5309467 -0.56890905] [ 1.2115197 -0.9892746 0.55676913 -0.6937502 ] [-0.7543024 -0.10290776 0.03095207 0.40518907]] [[ 0.57443583 -0.35447592 -1.9959118 -0.75353056] [ 0.03091968 -0.13464744 -0.59133375 0.0704401 ] [-0.6207483 0.7092086 0.57233065 0.5011025 ]] [[ 0.02289234 -0.29834276 -2.0309775 -0.3063342 ] [ 0.2411683 -0.99580276 -0.92726654 -0.31258902] [-1.1425731 -2.1311972 -1.510817 -0.44101843]] [[-0.15017632 -0.8199019 0.9489282 0.15852545] [-0.98386544 -0.1493596 2.0469484 -0.22374383] [-0.6462914 0.5129842 -1.3355601 -0.07387744]] [[-0.53500175 0.93805444 -0.08790021 0.00720591] [-0.6497848 -0.8023327 0.4520393 -1.8237425 ] [ 0.6053907 -0.23052749 0.9080535 1.3270329 ]]] [[[ 0.29213035 0.4036236 0.12154844 -0.10767645] [-1.0960672 0.73463774 -0.53817356 0.74013007] [-1.4568712 -0.02912998 -1.1644565 0.94993746]] [[ 0.91480535 -1.4895254 0.32347333 0.85302585] [-0.71295553 0.5080585 0.87530863 -2.5727015 ] [ 0.50677675 -1.0691493 0.9517623 0.3048871 ]] [[ 1.4016695 -0.74786085 -1.7392323 -0.26493767] [-1.640562 -0.22079036 0.6631698 1.1697938 ] [-0.5896452 -1.0619832 -1.1512854 2.2285175 ]] [[ 0.2835115 -1.1706113 -1.2386674 1.0869071 ] [ 0.74666476 -0.8852821 -1.9545625 -0.07410386] [-0.31082955 -0.56771505 1.9896123 -1.9235955 ]] [[-0.01182685 -1.2234368 -0.8201976 1.6796722 ] [ 0.30147758 0.38293967 -0.12890254 -0.51102984] [ 0.37614408 -0.5351136 -0.1361093 0.7440348 ]]]]; ov_res: [[[[ 0.6859298 0.01370742 -0.5309467 -0.56890905] [ 1.2115197 -0.9892746 0.55676913 -0.6937502 ] [-0.7543024 -0.10290776 0.03095207 0.40518907]] [[ 0.57443583 -0.35447592 -1.9959118 -0.75353056] [ 0.03091968 -0.13464744 -0.59133375 0.0704401 ] [-0.6207483 0.7092086 0.57233065 0.5011025 ]] [[ 0.02289234 -0.29834276 -2.0309775 -0.3063342 ] [ 0.2411683 -0.99580276 -0.92726654 -0.31258902] [-1.1425731 -2.1311972 -1.510817 -0.44101843]] [[-0.15017632 -0.8199019 0.9489282 0.15852545] [-0.98386544 -0.1493596 2.0469484 -0.22374383] [-0.6462914 0.5129842 -1.3355601 -0.07387744]] [[-0.53500175 0.93805444 -0.08790021 0.00720591] [-0.6497848 -0.8023327 0.4520393 -1.8237425 ] [ 0.6053907 -0.23052749 0.9080535 1.3270329 ]]] [[[ 0.29213035 0.4036236 0.12154844 -0.10767645] [-1.0960672 0.73463774 -0.53817356 0.74013007] [-1.4568712 -0.02912998 -1.1644565 0.94993746]] [[ 0.91480535 -1.4895254 0.32347333 0.85302585] [-0.71295553 0.5080585 0.87530863 -2.5727015 ] [ 0.50677675 -1.0691493 0.9517623 0.3048871 ]] [[ 1.4016695 -0.74786085 -1.7392323 -0.26493767] [-1.640562 -0.22079036 0.6631698 1.1697938 ] [-0.5896452 -1.0619832 -1.1512854 2.2285175 ]] [[ 0.2835115 -1.1706113 -1.2386674 1.0869071 ] [ 0.74666476 -0.8852821 -1.9545625 -0.07410386] [-0.31082955 -0.56771505 1.9896123 -1.9235955 ]] [[-0.01182685 -1.2234368 -0.8201976 1.6796722 ] [ 0.30147758 0.38293967 -0.12890254 -0.51102984] [ 0.37614408 -0.5351136 -0.1361093 0.7440348 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.11440673 0.7030934 -1.4509366 -1.511081 ] [-0.08547328 1.6554784 0.84120286 -1.6462803 ] [ 0.17332906 0.3294991 0.30007967 -1.869866 ]] [[ 0.28145665 0.3300694 0.09740213 -0.1613419 ] [-0.9557577 1.3279629 0.16332506 -0.68232745] [-1.0993817 0.25630575 -0.27054513 -0.376838 ]] [[-1.8979313 1.0407445 -0.68412405 0.5683432 ] [-0.9543146 -0.21203451 -0.1146967 -0.40573367] [ 0.71652013 -0.49653405 0.9129642 -0.60344183]] [[-0.48161447 1.0571262 -1.5543586 0.29951414] [-0.5873854 0.72151184 0.6262076 -0.10603213] [ 1.4789652 1.2359997 1.1941196 -0.91593224]] [[ 0.9184449 1.2760755 0.17642088 -1.208777 ] [-0.6612545 -0.54354084 -1.1850917 -0.5967279 ] [-0.1827366 -0.23732471 0.93439794 -0.33566168]]] [[[ 1.3712412 0.2027446 0.63376683 -1.4555378 ] [-1.1299776 -0.07659484 0.79980344 -0.19800454] [ 0.08541932 -0.00436225 -0.47744435 -1.4723907 ]] [[-0.59376734 -0.05889762 -0.6992119 -0.8286856 ] [-0.5643483 0.72308123 0.2150887 0.5759901 ] [ 0.61197764 0.33765855 0.6413203 -0.79738575]] [[-1.0122422 0.79367656 1.06604 1.7662873 ] [-0.53898376 -0.3510494 -0.85583705 -1.4623448 ] [-1.3412459 -0.32541496 1.4272239 -0.6224603 ]] [[ 0.55429524 1.5642756 0.350079 -1.6949313 ] [ 0.14330085 1.3993622 0.07396284 0.36646202] [-0.41736418 0.1704732 -0.7811821 -0.9199373 ]] [[ 0.9520105 -0.01843818 1.516323 1.5173719 ] [-0.5502519 2.4709747 1.2195027 -0.35427982] [ 0.663669 -0.10770386 0.9418491 1.9719846 ]]]] - alpha:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_26.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-0.09677966 1.9490135 -0.6882143 -0.0682506 ] [ 0.91922003 1.1634784 0.41140047 -1.7216301 ] [ 0.20125663 -1.8405589 -0.3698395 -1.0081432 ]] [[-0.3873798 1.097499 -1.2182838 0.8540922 ] [ 1.2999498 0.7566352 0.2826059 -1.320498 ] [-0.9634392 0.4827215 0.53059995 -0.53471607]] [[-1.3295991 0.24491107 2.2907624 -0.22698179] [ 0.22888768 -1.760462 -1.3379316 -0.00625454] [ 1.1986666 0.10377517 1.3324609 -0.7436497 ]] [[-1.0781665 -0.32152092 -0.17736751 1.193866 ] [ 1.3130981 -0.11836329 0.81601703 -0.3706697 ] [ 2.4382765 1.6372252 1.5437196 -0.6220008 ]] [[ 0.73764175 -0.0916366 0.5386809 -1.4370775 ] [-0.83946365 -0.4506963 0.09685147 0.7163626 ] [-1.6741796 0.0229791 1.339678 0.6238746 ]]] [[[-0.02098411 0.41294277 0.89174867 -1.7126237 ] [-0.8345835 -0.3523886 0.9462775 1.247133 ] [ 0.43511245 -1.2982045 -0.69126445 -0.502133 ]] [[-1.4564687 -1.5055609 -0.37194058 -1.2168046 ] [-0.63059366 -1.6493626 -0.4539097 1.0150199 ] [ 0.7257786 0.5641776 -0.11062256 0.9402983 ]] [[ 0.19645274 -0.37862304 0.9886719 1.9914315 ] [ 0.651263 -0.2853198 1.2437029 1.0884833 ] [-2.0073714 -0.8222859 2.7305622 -1.0386823 ]] [[-1.5648222 0.3332812 -0.9929886 0.27535677] [ 0.8976967 -1.1066322 0.2228492 2.3189278 ] [-2.2809165 -1.1869136 -0.9348786 -0.6083249 ]] [[-0.26051968 0.2077421 -1.4886076 -1.6701332 ] [ 0.14293599 -0.6367612 2.058217 -0.92589366] [ 1.2885814 0.66708755 0.20150968 -0.30093884]]]]; ov_res: [[[[-0.09677966 1.9490135 -0.6882143 -0.0682506 ] [ 0.91922003 1.1634784 0.41140047 -1.7216301 ] [ 0.20125663 -1.8405589 -0.3698395 -1.0081432 ]] [[-0.3873798 1.097499 -1.2182838 0.8540922 ] [ 1.2999498 0.7566352 0.2826059 -1.320498 ] [-0.9634392 0.4827215 0.53059995 -0.53471607]] [[-1.3295991 0.24491107 2.2907624 -0.22698179] [ 0.22888768 -1.760462 -1.3379316 -0.00625454] [ 1.1986666 0.10377517 1.3324609 -0.7436497 ]] [[-1.0781665 -0.32152092 -0.17736751 1.193866 ] [ 1.3130981 -0.11836329 0.81601703 -0.3706697 ] [ 2.4382765 1.6372252 1.5437196 -0.6220008 ]] [[ 0.73764175 -0.0916366 0.5386809 -1.4370775 ] [-0.83946365 -0.4506963 0.09685147 0.7163626 ] [-1.6741796 0.0229791 1.339678 0.6238746 ]]] [[[-0.02098411 0.41294277 0.89174867 -1.7126237 ] [-0.8345835 -0.3523886 0.9462775 1.247133 ] [ 0.43511245 -1.2982045 -0.69126445 -0.502133 ]] [[-1.4564687 -1.5055609 -0.37194058 -1.2168046 ] [-0.63059366 -1.6493626 -0.4539097 1.0150199 ] [ 0.7257786 0.5641776 -0.11062256 0.9402983 ]] [[ 0.19645274 -0.37862304 0.9886719 1.9914315 ] [ 0.651263 -0.2853198 1.2437029 1.0884833 ] [-2.0073714 -0.8222859 2.7305622 -1.0386823 ]] [[-1.5648222 0.3332812 -0.9929886 0.27535677] [ 0.8976967 -1.1066322 0.2228492 2.3189278 ] [-2.2809165 -1.1869136 -0.9348786 -0.6083249 ]] [[-0.26051968 0.2077421 -1.4886076 -1.6701332 ] [ 0.14293599 -0.6367612 2.058217 -0.92589366] [ 1.2885814 0.66708755 0.20150968 -0.30093884]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.11440673 0.7030934 -1.4509366 -1.511081 ] [-0.08547328 1.6554784 0.84120286 -1.6462803 ] [ 0.17332906 0.3294991 0.30007967 -1.869866 ]] [[ 0.28145665 0.3300694 0.09740213 -0.1613419 ] [-0.9557577 1.3279629 0.16332506 -0.68232745] [-1.0993817 0.25630575 -0.27054513 -0.376838 ]] [[-1.8979313 1.0407445 -0.68412405 0.5683432 ] [-0.9543146 -0.21203451 -0.1146967 -0.40573367] [ 0.71652013 -0.49653405 0.9129642 -0.60344183]] [[-0.48161447 1.0571262 -1.5543586 0.29951414] [-0.5873854 0.72151184 0.6262076 -0.10603213] [ 1.4789652 1.2359997 1.1941196 -0.91593224]] [[ 0.9184449 1.2760755 0.17642088 -1.208777 ] [-0.6612545 -0.54354084 -1.1850917 -0.5967279 ] [-0.1827366 -0.23732471 0.93439794 -0.33566168]]] [[[ 1.3712412 0.2027446 0.63376683 -1.4555378 ] [-1.1299776 -0.07659484 0.79980344 -0.19800454] [ 0.08541932 -0.00436225 -0.47744435 -1.4723907 ]] [[-0.59376734 -0.05889762 -0.6992119 -0.8286856 ] [-0.5643483 0.72308123 0.2150887 0.5759901 ] [ 0.61197764 0.33765855 0.6413203 -0.79738575]] [[-1.0122422 0.79367656 1.06604 1.7662873 ] [-0.53898376 -0.3510494 -0.85583705 -1.4623448 ] [-1.3412459 -0.32541496 1.4272239 -0.6224603 ]] [[ 0.55429524 1.5642756 0.350079 -1.6949313 ] [ 0.14330085 1.3993622 0.07396284 0.36646202] [-0.41736418 0.1704732 -0.7811821 -0.9199373 ]] [[ 0.9520105 -0.01843818 1.516323 1.5173719 ] [-0.5502519 2.4709747 1.2195027 -0.35427982] [ 0.663669 -0.10770386 0.9418491 1.9719846 ]]]] - alpha:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_28.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 1.03878379e+00 3.44995171e-01 -5.70621789e-01 -5.99345744e-01] [-2.23768696e-01 1.91431832e+00 8.82099986e-01 -1.54134345e+00] [-7.60791183e-01 -2.51718611e-01 8.19839478e-01 -1.87228429e+00]] [[ 2.81475186e-01 9.41252112e-01 2.31747955e-01 2.89365888e-01] [-8.68944645e-01 2.74146438e-01 6.73518360e-01 -1.43528676e+00] [-1.48393989e+00 7.40209699e-01 -1.23586148e-01 -1.85538042e+00]] [[-9.51414943e-01 1.59198308e+00 -1.76529455e+00 -1.03635907e+00] [-9.29011047e-01 3.13834965e-01 -1.38737583e+00 -7.97919154e-01] [ 6.95804000e-01 2.28762209e-01 1.85231042e+00 -2.51233786e-01]] [[-5.97739220e-01 5.27136803e-01 -1.62903106e+00 -5.89735031e-01] [-5.69470644e-01 2.12552726e-01 -5.60642004e-01 6.18039250e-01] [ 1.07185197e+00 9.10963893e-01 1.98487520e+00 -2.42914867e+00]] [[ 2.35719252e+00 6.30112529e-01 4.55995262e-01 -8.02798271e-02] [ 2.98246026e-01 -2.47468770e-01 4.67797279e-01 4.03196096e-01] [ 2.63704687e-01 1.32156819e-01 5.62650442e-01 -7.80186296e-01]]] [[[ 7.46567309e-01 1.46958864e+00 1.38295937e+00 -2.14079833e+00] [ 3.73865604e-01 2.43224755e-01 1.97517824e+00 -5.99664092e-01] [ 1.09798956e+00 -6.47001445e-01 1.04777694e-01 -1.91010380e+00]] [[-7.96809912e-01 -1.98869383e+00 -4.62677836e-01 -1.09339404e+00] [-1.64536309e+00 2.07507038e+00 -4.62056488e-01 8.53801370e-01] [-1.93788743e+00 7.27287531e-01 2.52317524e+00 -1.06738830e+00]] [[-1.90768278e+00 1.79610252e+00 3.00384116e+00 2.42160726e+00] [-9.20053959e-01 -2.21654609e-01 -7.49099553e-01 -1.20518136e+00] [-1.17296362e+00 -5.90532303e-01 4.31390405e-01 -3.50478470e-01]] [[-2.94859827e-01 2.75113583e+00 4.92014885e-01 -3.03184152e+00] [-1.13162696e-02 1.20109653e+00 -7.88447559e-01 -1.01257294e-01] [-1.41909719e-03 -1.28672972e-01 -2.14033842e+00 -1.33202481e+00]] [[ 6.20633125e-01 -5.43901026e-01 1.84407997e+00 2.10800958e+00] [-7.10964918e-01 5.58858633e-01 -4.86305952e-01 -2.02032924e+00] [ 3.63442421e-01 2.70973206e-01 1.56153035e+00 1.00984812e+00]]]]; ov_res: [[[[ 1.03878379e+00 3.44995171e-01 -5.70621789e-01 -5.99345744e-01] [-2.23768696e-01 1.91431832e+00 8.82099986e-01 -1.54134345e+00] [-7.60791183e-01 -2.51718611e-01 8.19839478e-01 -1.87228429e+00]] [[ 2.81475186e-01 9.41252112e-01 2.31747955e-01 2.89365888e-01] [-8.68944645e-01 2.74146438e-01 6.73518360e-01 -1.43528676e+00] [-1.48393989e+00 7.40209699e-01 -1.23586148e-01 -1.85538042e+00]] [[-9.51414943e-01 1.59198308e+00 -1.76529455e+00 -1.03635907e+00] [-9.29011047e-01 3.13834965e-01 -1.38737583e+00 -7.97919154e-01] [ 6.95804000e-01 2.28762209e-01 1.85231042e+00 -2.51233786e-01]] [[-5.97739220e-01 5.27136803e-01 -1.62903106e+00 -5.89735031e-01] [-5.69470644e-01 2.12552726e-01 -5.60642004e-01 6.18039250e-01] [ 1.07185197e+00 9.10963893e-01 1.98487520e+00 -2.42914867e+00]] [[ 2.35719252e+00 6.30112529e-01 4.55995262e-01 -8.02798271e-02] [ 2.98246026e-01 -2.47468770e-01 4.67797279e-01 4.03196096e-01] [ 2.63704687e-01 1.32156819e-01 5.62650442e-01 -7.80186296e-01]]] [[[ 7.46567309e-01 1.46958864e+00 1.38295937e+00 -2.14079833e+00] [ 3.73865604e-01 2.43224755e-01 1.97517824e+00 -5.99664092e-01] [ 1.09798956e+00 -6.47001445e-01 1.04777694e-01 -1.91010380e+00]] [[-7.96809912e-01 -1.98869383e+00 -4.62677836e-01 -1.09339404e+00] [-1.64536309e+00 2.07507038e+00 -4.62056488e-01 8.53801370e-01] [-1.93788743e+00 7.27287531e-01 2.52317524e+00 -1.06738830e+00]] [[-1.90768278e+00 1.79610252e+00 3.00384116e+00 2.42160726e+00] [-9.20053959e-01 -2.21654609e-01 -7.49099553e-01 -1.20518136e+00] [-1.17296362e+00 -5.90532303e-01 4.31390405e-01 -3.50478470e-01]] [[-2.94859827e-01 2.75113583e+00 4.92014885e-01 -3.03184152e+00] [-1.13162696e-02 1.20109653e+00 -7.88447559e-01 -1.01257294e-01] [-1.41909719e-03 -1.28672972e-01 -2.14033842e+00 -1.33202481e+00]] [[ 6.20633125e-01 -5.43901026e-01 1.84407997e+00 2.10800958e+00] [-7.10964918e-01 5.58858633e-01 -4.86305952e-01 -2.02032924e+00] [ 3.63442421e-01 2.70973206e-01 1.56153035e+00 1.00984812e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.11440673 0.7030934 -1.4509366 -1.511081 ] [-0.08547328 1.6554784 0.84120286 -1.6462803 ] [ 0.17332906 0.3294991 0.30007967 -1.869866 ]] [[ 0.28145665 0.3300694 0.09740213 -0.1613419 ] [-0.9557577 1.3279629 0.16332506 -0.68232745] [-1.0993817 0.25630575 -0.27054513 -0.376838 ]] [[-1.8979313 1.0407445 -0.68412405 0.5683432 ] [-0.9543146 -0.21203451 -0.1146967 -0.40573367] [ 0.71652013 -0.49653405 0.9129642 -0.60344183]] [[-0.48161447 1.0571262 -1.5543586 0.29951414] [-0.5873854 0.72151184 0.6262076 -0.10603213] [ 1.4789652 1.2359997 1.1941196 -0.91593224]] [[ 0.9184449 1.2760755 0.17642088 -1.208777 ] [-0.6612545 -0.54354084 -1.1850917 -0.5967279 ] [-0.1827366 -0.23732471 0.93439794 -0.33566168]]] [[[ 1.3712412 0.2027446 0.63376683 -1.4555378 ] [-1.1299776 -0.07659484 0.79980344 -0.19800454] [ 0.08541932 -0.00436225 -0.47744435 -1.4723907 ]] [[-0.59376734 -0.05889762 -0.6992119 -0.8286856 ] [-0.5643483 0.72308123 0.2150887 0.5759901 ] [ 0.61197764 0.33765855 0.6413203 -0.79738575]] [[-1.0122422 0.79367656 1.06604 1.7662873 ] [-0.53898376 -0.3510494 -0.85583705 -1.4623448 ] [-1.3412459 -0.32541496 1.4272239 -0.6224603 ]] [[ 0.55429524 1.5642756 0.350079 -1.6949313 ] [ 0.14330085 1.3993622 0.07396284 0.36646202] [-0.41736418 0.1704732 -0.7811821 -0.9199373 ]] [[ 0.9520105 -0.01843818 1.516323 1.5173719 ] [-0.5502519 2.4709747 1.2195027 -0.35427982] [ 0.663669 -0.10770386 0.9418491 1.9719846 ]]]] - alpha:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_30.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=2]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.3397895 1.792233 -3.3358927 -1.7787513 ] [ 1.4673506 4.782634 3.2933877 -3.0130343 ] [-0.04251719 -0.25116706 0.55038303 -3.6376789 ]] [[ 0.57641286 1.1412518 -2.0665011 1.1484733 ] [-1.7423878 2.4429905 -0.33126482 -2.2886763 ] [-0.93177474 -0.53032017 -0.36089715 0.03206962]] [[-3.4910994 0.34446847 -2.4567351 1.1703144 ] [-2.1826978 0.28442827 -0.7996146 -0.7993016 ] [-0.66015327 -2.722231 2.0712283 0.35647035]] [[-2.8784065 2.272729 -1.895634 0.30687588] [-2.681447 1.0656798 0.7678838 -0.6385467 ] [ 0.50436616 4.438839 1.105167 -2.6077013 ]] [[ 1.6683832 2.5214167 0.5218336 -1.5176814 ] [-2.555449 0.44988585 -1.4637996 -2.1990433 ] [-0.7961844 0.04091531 1.9040318 -0.03648829]]] [[[ 3.2438478 1.0211138 2.1004367 -2.1190221 ] [-0.31574464 0.52780116 2.8162212 -0.32823563] [ 0.51763886 -0.27890652 -1.8690351 -2.2174454 ]] [[-2.3401804 -1.1919297 -2.8545709 -3.4585075 ] [-0.8052472 1.7240928 0.2758037 0.04561889] [ 0.7358974 2.6732996 4.063579 -0.36069548]] [[-1.1718407 2.3122776 2.5839796 2.1251736 ] [-1.6225885 -2.4795966 -2.3882399 -2.383094 ] [-2.2757237 -0.20718825 2.5187151 -2.7049127 ]] [[ 0.72404224 4.339568 1.2389997 -2.9145982 ] [ 1.0657169 2.6723523 0.4494326 0.22685254] [-1.0034275 0.5719848 -3.0370197 -4.080281 ]] [[ 0.33506382 1.5011919 4.8977804 2.1166108 ] [ 0.7889235 5.7585087 2.2193468 -1.6067994 ] [ 1.262723 1.4687572 0.39181936 4.5098968 ]]]]; ov_res: [[[[ 0.3397895 1.792233 -3.3358927 -1.7787513 ] [ 1.4673506 4.782634 3.2933877 -3.0130343 ] [-0.04251719 -0.25116706 0.55038303 -3.6376789 ]] [[ 0.57641286 1.1412518 -2.0665011 1.1484733 ] [-1.7423878 2.4429905 -0.33126482 -2.2886763 ] [-0.93177474 -0.53032017 -0.36089715 0.03206962]] [[-3.4910994 0.34446847 -2.4567351 1.1703144 ] [-2.1826978 0.28442827 -0.7996146 -0.7993016 ] [-0.66015327 -2.722231 2.0712283 0.35647035]] [[-2.8784065 2.272729 -1.895634 0.30687588] [-2.681447 1.0656798 0.7678838 -0.6385467 ] [ 0.50436616 4.438839 1.105167 -2.6077013 ]] [[ 1.6683832 2.5214167 0.5218336 -1.5176814 ] [-2.555449 0.44988585 -1.4637996 -2.1990433 ] [-0.7961844 0.04091531 1.9040318 -0.03648829]]] [[[ 3.2438478 1.0211138 2.1004367 -2.1190221 ] [-0.31574464 0.52780116 2.8162212 -0.32823563] [ 0.51763886 -0.27890652 -1.8690351 -2.2174454 ]] [[-2.3401804 -1.1919297 -2.8545709 -3.4585075 ] [-0.8052472 1.7240928 0.2758037 0.04561889] [ 0.7358974 2.6732996 4.063579 -0.36069548]] [[-1.1718407 2.3122776 2.5839796 2.1251736 ] [-1.6225885 -2.4795966 -2.3882399 -2.383094 ] [-2.2757237 -0.20718825 2.5187151 -2.7049127 ]] [[ 0.72404224 4.339568 1.2389997 -2.9145982 ] [ 1.0657169 2.6723523 0.4494326 0.22685254] [-1.0034275 0.5719848 -3.0370197 -4.080281 ]] [[ 0.33506382 1.5011919 4.8977804 2.1166108 ] [ 0.7889235 5.7585087 2.2193468 -1.6067994 ] [ 1.262723 1.4687572 0.39181936 4.5098968 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.09049018 -1.5323927 0.49812433 -0.20038858] [-2.457309 -0.4119734 0.872772 -1.6950845 ] [ 0.45102027 -0.5713498 0.65475106 2.1807122 ]] [[ 0.19324401 -0.5345676 0.19971952 0.13243689] [ 0.10747942 1.4701145 -1.135472 -0.5038782 ] [ 0.31333825 1.0361656 -0.5531964 0.5488733 ]] [[ 0.25694898 -1.1148005 1.4387418 -0.68546957] [ 1.7608212 0.5498793 -1.8618677 1.8347455 ] [ 0.3365778 -1.5171782 -1.8877455 -0.08382453]] [[-1.0527123 -0.24266401 0.2806189 1.3526059 ] [-2.1633954 0.7213705 1.3026289 -0.6482584 ] [-0.9959042 -0.30667204 0.3698045 -0.8441346 ]] [[-0.47377717 0.01487178 1.3492866 0.98305863] [ 1.49794 -0.1452438 1.3143903 -0.1392767 ] [ 0.09662196 0.24876142 1.5023168 -0.35664776]]]] - alpha:-0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_32.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=-0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.05681908 0.55586743 -0.11953613 1.8604699 ] [ 1.946315 1.3582287 -0.48615637 -0.03083396] [ 0.7764243 -0.7369009 -0.5020869 -2.874889 ]] [[ 1.8178192 -0.9990593 1.0444577 -1.0217875 ] [-1.0194482 0.82433367 -0.3708415 0.02014451] [-0.22832632 -2.0843756 1.1840916 -0.3887134 ]] [[ 0.57082176 -0.38547635 -0.79258674 0.13625759] [-1.071312 -1.2946522 0.640073 -0.87200797] [-0.4205625 -0.51770574 1.1618264 -1.0211291 ]] [[ 0.3685642 0.7533858 -0.29012573 1.109765 ] [ 0.9164042 -1.3967826 -0.76734 1.9869447 ] [-1.041919 -0.86178315 0.16530126 0.71195865]] [[ 0.26522756 0.89090466 -1.2341667 0.05965778] [-0.39502338 -1.1025065 -1.6040312 -0.00572165] [-1.2950193 1.284685 -0.48155007 -0.41113013]]] [[[ 1.0201826 0.91748434 -0.78229344 -0.87820953] [ 1.3269087 0.31731507 0.8507844 0.10276252] [-1.1559912 -1.213044 1.0362482 -2.1598196 ]] [[ 1.1334401 -0.47591 1.039823 -0.24905926] [ 0.26393858 -1.6891294 1.7775749 1.4407824 ] [-0.22212663 -0.01493496 -0.08840719 -0.65304136]] [[ 0.63373184 -0.91877854 -1.269882 3.0477276 ] [-1.4777347 -0.5966568 0.59326327 0.09789985] [-0.5504444 0.12476194 1.7571772 0.24572116]] [[-0.21436507 1.9225647 1.1016986 -2.2665405 ] [ 1.9261986 -0.38347846 -2.6185489 2.506319 ] [ 0.46962622 -1.2280751 -1.7070794 1.0159947 ]] [[ 1.2850919 -1.3284523 -0.6416476 -0.35795605] [-1.0070285 -0.73429805 -0.57885146 0.1121605 ] [ 0.26205295 0.18305653 -0.85590893 -0.10737096]]]]; ov_res: [[[[ 0.05681908 0.55586743 -0.11953613 1.8604699 ] [ 1.946315 1.3582287 -0.48615637 -0.03083396] [ 0.7764243 -0.7369009 -0.5020869 -2.874889 ]] [[ 1.8178192 -0.9990593 1.0444577 -1.0217875 ] [-1.0194482 0.82433367 -0.3708415 0.02014451] [-0.22832632 -2.0843756 1.1840916 -0.3887134 ]] [[ 0.57082176 -0.38547635 -0.79258674 0.13625759] [-1.071312 -1.2946522 0.640073 -0.87200797] [-0.4205625 -0.51770574 1.1618264 -1.0211291 ]] [[ 0.3685642 0.7533858 -0.29012573 1.109765 ] [ 0.9164042 -1.3967826 -0.76734 1.9869447 ] [-1.041919 -0.86178315 0.16530126 0.71195865]] [[ 0.26522756 0.89090466 -1.2341667 0.05965778] [-0.39502338 -1.1025065 -1.6040312 -0.00572165] [-1.2950193 1.284685 -0.48155007 -0.41113013]]] [[[ 1.0201826 0.91748434 -0.78229344 -0.87820953] [ 1.3269087 0.31731507 0.8507844 0.10276252] [-1.1559912 -1.213044 1.0362482 -2.1598196 ]] [[ 1.1334401 -0.47591 1.039823 -0.24905926] [ 0.26393858 -1.6891294 1.7775749 1.4407824 ] [-0.22212663 -0.01493496 -0.08840719 -0.65304136]] [[ 0.63373184 -0.91877854 -1.269882 3.0477276 ] [-1.4777347 -0.5966568 0.59326327 0.09789985] [-0.5504444 0.12476194 1.7571772 0.24572116]] [[-0.21436507 1.9225647 1.1016986 -2.2665405 ] [ 1.9261986 -0.38347846 -2.6185489 2.506319 ] [ 0.46962622 -1.2280751 -1.7070794 1.0159947 ]] [[ 1.2850919 -1.3284523 -0.6416476 -0.35795605] [-1.0070285 -0.73429805 -0.57885146 0.1121605 ] [ 0.26205295 0.18305653 -0.85590893 -0.10737096]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.09049018 -1.5323927 0.49812433 -0.20038858] [-2.457309 -0.4119734 0.872772 -1.6950845 ] [ 0.45102027 -0.5713498 0.65475106 2.1807122 ]] [[ 0.19324401 -0.5345676 0.19971952 0.13243689] [ 0.10747942 1.4701145 -1.135472 -0.5038782 ] [ 0.31333825 1.0361656 -0.5531964 0.5488733 ]] [[ 0.25694898 -1.1148005 1.4387418 -0.68546957] [ 1.7608212 0.5498793 -1.8618677 1.8347455 ] [ 0.3365778 -1.5171782 -1.8877455 -0.08382453]] [[-1.0527123 -0.24266401 0.2806189 1.3526059 ] [-2.1633954 0.7213705 1.3026289 -0.6482584 ] [-0.9959042 -0.30667204 0.3698045 -0.8441346 ]] [[-0.47377717 0.01487178 1.3492866 0.98305863] [ 1.49794 -0.1452438 1.3143903 -0.1392767 ] [ 0.09662196 0.24876142 1.5023168 -0.35664776]]]] - alpha:0 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_34.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=0]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.05962345 0.01153879 -0.86078197 0.13133112] [ 0.22947682 0.04842539 -0.8750624 -0.11749276] [ 0.15275465 0.12835874 0.9950668 -1.5298804 ]] [[-0.64942455 0.23335843 0.2173386 -0.6862232 ] [ 1.1752553 1.702087 -0.81720275 0.2250142 ] [ 0.0486018 0.19434212 1.8733383 0.15909636]] [[ 0.38666645 -0.4061646 0.26109532 0.40978897] [ 0.01984755 0.5660345 -0.35928193 -1.4640006 ] [ 0.8660984 -0.15454857 -1.9827355 -1.9062128 ]] [[-1.1131474 0.03722375 0.63689214 0.58819294] [-0.5021509 0.98508286 -0.04289346 -0.7468031 ] [-0.9066816 -1.1277397 -0.11683682 0.45498863]] [[ 0.61951524 -0.5168671 -1.1406305 0.3125391 ] [ 0.15695924 0.08791803 -0.9898653 0.80009985] [ 0.30461127 -1.012313 -0.06897324 -0.552397 ]]] [[[ 1.0467758 -0.3525668 0.6716576 -1.5897183 ] [-0.06156952 -0.41328326 0.06660412 -0.5197805 ] [-1.2160772 -1.9011167 0.3705069 -0.3432224 ]] [[ 0.31979337 -0.1951212 -0.4263292 0.5326661 ] [-1.0547024 1.4702024 1.6769999 -2.0245981 ] [ 0.81166565 -2.4313471 -0.6182695 0.27955326]] [[ 1.3363451 0.97362924 -0.86531913 2.2708042 ] [-1.9839789 0.8585271 0.11366987 1.1003749 ] [ 1.8707746 -1.0031259 1.4212888 -0.6931079 ]] [[ 0.20050214 0.72544074 0.1990777 -0.1034205 ] [ 0.16480833 1.1677825 0.43296093 -0.8986132 ] [ 0.01186226 1.327103 -1.203255 1.3739345 ]] [[ 0.91233397 0.6140557 -1.2319982 0.2194756 ] [-2.1722512 -1.2446086 0.20358256 0.53087753] [ 0.6664147 -0.00746874 1.1200154 0.22980852]]]]; ov_res: [[[[ 0.05962345 0.01153879 -0.86078197 0.13133112] [ 0.22947682 0.04842539 -0.8750624 -0.11749276] [ 0.15275465 0.12835874 0.9950668 -1.5298804 ]] [[-0.64942455 0.23335843 0.2173386 -0.6862232 ] [ 1.1752553 1.702087 -0.81720275 0.2250142 ] [ 0.0486018 0.19434212 1.8733383 0.15909636]] [[ 0.38666645 -0.4061646 0.26109532 0.40978897] [ 0.01984755 0.5660345 -0.35928193 -1.4640006 ] [ 0.8660984 -0.15454857 -1.9827355 -1.9062128 ]] [[-1.1131474 0.03722375 0.63689214 0.58819294] [-0.5021509 0.98508286 -0.04289346 -0.7468031 ] [-0.9066816 -1.1277397 -0.11683682 0.45498863]] [[ 0.61951524 -0.5168671 -1.1406305 0.3125391 ] [ 0.15695924 0.08791803 -0.9898653 0.80009985] [ 0.30461127 -1.012313 -0.06897324 -0.552397 ]]] [[[ 1.0467758 -0.3525668 0.6716576 -1.5897183 ] [-0.06156952 -0.41328326 0.06660412 -0.5197805 ] [-1.2160772 -1.9011167 0.3705069 -0.3432224 ]] [[ 0.31979337 -0.1951212 -0.4263292 0.5326661 ] [-1.0547024 1.4702024 1.6769999 -2.0245981 ] [ 0.81166565 -2.4313471 -0.6182695 0.27955326]] [[ 1.3363451 0.97362924 -0.86531913 2.2708042 ] [-1.9839789 0.8585271 0.11366987 1.1003749 ] [ 1.8707746 -1.0031259 1.4212888 -0.6931079 ]] [[ 0.20050214 0.72544074 0.1990777 -0.1034205 ] [ 0.16480833 1.1677825 0.43296093 -0.8986132 ] [ 0.01186226 1.327103 -1.203255 1.3739345 ]] [[ 0.91233397 0.6140557 -1.2319982 0.2194756 ] [-2.1722512 -1.2446086 0.20358256 0.53087753] [ 0.6664147 -0.00746874 1.1200154 0.22980852]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.09049018 -1.5323927 0.49812433 -0.20038858] [-2.457309 -0.4119734 0.872772 -1.6950845 ] [ 0.45102027 -0.5713498 0.65475106 2.1807122 ]] [[ 0.19324401 -0.5345676 0.19971952 0.13243689] [ 0.10747942 1.4701145 -1.135472 -0.5038782 ] [ 0.31333825 1.0361656 -0.5531964 0.5488733 ]] [[ 0.25694898 -1.1148005 1.4387418 -0.68546957] [ 1.7608212 0.5498793 -1.8618677 1.8347455 ] [ 0.3365778 -1.5171782 -1.8877455 -0.08382453]] [[-1.0527123 -0.24266401 0.2806189 1.3526059 ] [-2.1633954 0.7213705 1.3026289 -0.6482584 ] [-0.9959042 -0.30667204 0.3698045 -0.8441346 ]] [[-0.47377717 0.01487178 1.3492866 0.98305863] [ 1.49794 -0.1452438 1.3143903 -0.1392767 ] [ 0.09662196 0.24876142 1.5023168 -0.35664776]]]] - alpha:0.5 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_36.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-0.01148931 -0.7729302 0.19168748 1.4349983 ] [-1.2305949 -0.82224226 -1.2557359 -0.08815545] [ 0.80723 0.6468679 0.9921815 2.5029202 ]] [[-0.29301906 -1.6526169 1.3424448 -0.02072155] [-0.6644482 0.47474325 -1.678887 -1.2341394 ] [-0.8708472 2.125574 -1.3538775 0.2923557 ]] [[ 0.894696 -1.1641464 0.5416312 -0.16243485] [ 1.5949259 0.26050454 -1.6880622 -0.04845184] [-0.07009485 -1.6219366 -0.32455766 1.1149393 ]] [[-1.7432594 -0.8879911 0.16171901 0.06488484] [ 0.79095733 0.540985 -2.762772 -0.53057754] [-1.0513961 0.08935277 -0.6157715 0.15400699]] [[ 1.7680094 -0.7412171 0.39261594 -0.02996561] [ 1.0679882 -1.1757617 0.842646 -1.3890139 ] [ 0.13647103 0.8946633 0.6943486 -0.26996452]]] [[[-0.24312215 -0.29524073 0.7593847 -0.3417421 ] [-1.3049332 -1.4882376 -0.5948018 -0.8475587 ] [ 0.47908157 0.9186491 -0.25630862 3.0178058 ]] [[ 0.27582195 -0.33901045 -0.5742575 -0.27504832] [ 0.5304947 2.2557316 -1.5956973 -0.6335987 ] [ 0.14924699 1.2822049 -0.02487227 -1.9515636 ]] [[-1.1687815 -0.03235412 0.2580599 -2.136341 ] [ 1.8738406 1.9457208 -2.4950325 -0.18740445] [-1.2577206 -0.53242326 -2.1821637 0.766951 ]] [[-0.3993141 -0.08741149 -1.0447552 1.0772874 ] [-1.7615162 0.4880696 0.87371206 -1.4787449 ] [-3.1292229 -0.98804307 0.21328074 -1.2377441 ]] [[ 1.1414628 0.36191845 0.5263924 0.4968783 ] [-0.07062048 -0.76341337 2.0714767 1.2974286 ] [ 1.3496299 0.70216644 2.2518878 -1.4144582 ]]]]; ov_res: [[[[-0.01148931 -0.7729302 0.19168748 1.4349983 ] [-1.2305949 -0.82224226 -1.2557359 -0.08815545] [ 0.80723 0.6468679 0.9921815 2.5029202 ]] [[-0.29301906 -1.6526169 1.3424448 -0.02072155] [-0.6644482 0.47474325 -1.678887 -1.2341394 ] [-0.8708472 2.125574 -1.3538775 0.2923557 ]] [[ 0.894696 -1.1641464 0.5416312 -0.16243485] [ 1.5949259 0.26050454 -1.6880622 -0.04845184] [-0.07009485 -1.6219366 -0.32455766 1.1149393 ]] [[-1.7432594 -0.8879911 0.16171901 0.06488484] [ 0.79095733 0.540985 -2.762772 -0.53057754] [-1.0513961 0.08935277 -0.6157715 0.15400699]] [[ 1.7680094 -0.7412171 0.39261594 -0.02996561] [ 1.0679882 -1.1757617 0.842646 -1.3890139 ] [ 0.13647103 0.8946633 0.6943486 -0.26996452]]] [[[-0.24312215 -0.29524073 0.7593847 -0.3417421 ] [-1.3049332 -1.4882376 -0.5948018 -0.8475587 ] [ 0.47908157 0.9186491 -0.25630862 3.0178058 ]] [[ 0.27582195 -0.33901045 -0.5742575 -0.27504832] [ 0.5304947 2.2557316 -1.5956973 -0.6335987 ] [ 0.14924699 1.2822049 -0.02487227 -1.9515636 ]] [[-1.1687815 -0.03235412 0.2580599 -2.136341 ] [ 1.8738406 1.9457208 -2.4950325 -0.18740445] [-1.2577206 -0.53242326 -2.1821637 0.766951 ]] [[-0.3993141 -0.08741149 -1.0447552 1.0772874 ] [-1.7615162 0.4880696 0.87371206 -1.4787449 ] [-3.1292229 -0.98804307 0.21328074 -1.2377441 ]] [[ 1.1414628 0.36191845 0.5263924 0.4968783 ] [-0.07062048 -0.76341337 2.0714767 1.2974286 ] [ 1.3496299 0.70216644 2.2518878 -1.4144582 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.09049018 -1.5323927 0.49812433 -0.20038858] [-2.457309 -0.4119734 0.872772 -1.6950845 ] [ 0.45102027 -0.5713498 0.65475106 2.1807122 ]] [[ 0.19324401 -0.5345676 0.19971952 0.13243689] [ 0.10747942 1.4701145 -1.135472 -0.5038782 ] [ 0.31333825 1.0361656 -0.5531964 0.5488733 ]] [[ 0.25694898 -1.1148005 1.4387418 -0.68546957] [ 1.7608212 0.5498793 -1.8618677 1.8347455 ] [ 0.3365778 -1.5171782 -1.8877455 -0.08382453]] [[-1.0527123 -0.24266401 0.2806189 1.3526059 ] [-2.1633954 0.7213705 1.3026289 -0.6482584 ] [-0.9959042 -0.30667204 0.3698045 -0.8441346 ]] [[-0.47377717 0.01487178 1.3492866 0.98305863] [ 1.49794 -0.1452438 1.3143903 -0.1392767 ] [ 0.09662196 0.24876142 1.5023168 -0.35664776]]]] - alpha:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_38.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.74711204 -0.1359402 1.4718236 0.13331419] [-2.6585693 -1.876727 -1.5228777 -2.032529 ] [-1.5944381 -0.29444936 -0.2318936 1.6958932 ]] [[-0.46828243 0.09527731 -0.09816912 -0.48976308] [ 0.5718369 3.5557597 -0.5277537 -1.3075876 ] [ 0.26833928 2.1739726 -1.9983983 -0.4719572 ]] [[ 0.8325652 -2.5295854 1.9846842 0.27632302] [ 2.4746163 1.1022536 -2.3254304 2.2667837 ] [-0.3221595 -1.2922233 -0.9089329 -0.5221053 ]] [[-0.66229045 -1.7985787 0.58478963 0.04960549] [-2.6505008 1.694733 3.6841507 -1.2571602 ] [-1.2646682 0.1915505 1.3941166 0.5820957 ]] [[-1.3496282 0.01007166 1.5682762 0.358961 ] [ 0.83856523 -0.27586806 1.6059531 -0.9418636 ] [ 1.5965031 1.527026 1.5733325 -1.2118936 ]]] [[[-0.21945933 -2.7654152 0.87179244 -0.1177396 ] [-0.7043196 0.7921784 0.8397783 -2.3436046 ] [-0.4374369 0.0668115 1.7645599 3.0459855 ]] [[-0.5835757 0.072954 0.06560197 -0.6441379 ] [-0.6280211 0.3526007 -2.136217 0.60233575] [ 2.4146035 0.5374808 0.0787366 0.81309724]] [[-1.0502341 -1.3962953 1.1420482 -1.9005947 ] [ 3.0362253 -1.9013708 -1.5668737 1.3123779 ] [ 0.59746885 -0.22648907 -3.0571148 -0.1433502 ]] [[ 0.7198957 -0.69544894 0.14636073 0.11573589] [-0.88018286 0.7087316 -1.1522623 0.03406507] [-1.576508 1.2616427 -0.00770614 1.3646605 ]] [[ 0.46795142 -0.12041721 2.406189 3.9903946 ] [ 3.5201526 0.27941185 0.890746 -0.06860204] [-0.03054816 -0.07370114 1.3118042 -0.17121473]]]]; ov_res: [[[[ 0.74711204 -0.1359402 1.4718236 0.13331419] [-2.6585693 -1.876727 -1.5228777 -2.032529 ] [-1.5944381 -0.29444936 -0.2318936 1.6958932 ]] [[-0.46828243 0.09527731 -0.09816912 -0.48976308] [ 0.5718369 3.5557597 -0.5277537 -1.3075876 ] [ 0.26833928 2.1739726 -1.9983983 -0.4719572 ]] [[ 0.8325652 -2.5295854 1.9846842 0.27632302] [ 2.4746163 1.1022536 -2.3254304 2.2667837 ] [-0.3221595 -1.2922233 -0.9089329 -0.5221053 ]] [[-0.66229045 -1.7985787 0.58478963 0.04960549] [-2.6505008 1.694733 3.6841507 -1.2571602 ] [-1.2646682 0.1915505 1.3941166 0.5820957 ]] [[-1.3496282 0.01007166 1.5682762 0.358961 ] [ 0.83856523 -0.27586806 1.6059531 -0.9418636 ] [ 1.5965031 1.527026 1.5733325 -1.2118936 ]]] [[[-0.21945933 -2.7654152 0.87179244 -0.1177396 ] [-0.7043196 0.7921784 0.8397783 -2.3436046 ] [-0.4374369 0.0668115 1.7645599 3.0459855 ]] [[-0.5835757 0.072954 0.06560197 -0.6441379 ] [-0.6280211 0.3526007 -2.136217 0.60233575] [ 2.4146035 0.5374808 0.0787366 0.81309724]] [[-1.0502341 -1.3962953 1.1420482 -1.9005947 ] [ 3.0362253 -1.9013708 -1.5668737 1.3123779 ] [ 0.59746885 -0.22648907 -3.0571148 -0.1433502 ]] [[ 0.7198957 -0.69544894 0.14636073 0.11573589] [-0.88018286 0.7087316 -1.1522623 0.03406507] [-1.576508 1.2616427 -0.00770614 1.3646605 ]] [[ 0.46795142 -0.12041721 2.406189 3.9903946 ] [ 3.5201526 0.27941185 0.890746 -0.06860204] [-0.03054816 -0.07370114 1.3118042 -0.17121473]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[[[[-0.09049018 -1.5323927 0.49812433 -0.20038858] [-2.457309 -0.4119734 0.872772 -1.6950845 ] [ 0.45102027 -0.5713498 0.65475106 2.1807122 ]] [[ 0.19324401 -0.5345676 0.19971952 0.13243689] [ 0.10747942 1.4701145 -1.135472 -0.5038782 ] [ 0.31333825 1.0361656 -0.5531964 0.5488733 ]] [[ 0.25694898 -1.1148005 1.4387418 -0.68546957] [ 1.7608212 0.5498793 -1.8618677 1.8347455 ] [ 0.3365778 -1.5171782 -1.8877455 -0.08382453]] [[-1.0527123 -0.24266401 0.2806189 1.3526059 ] [-2.1633954 0.7213705 1.3026289 -0.6482584 ] [-0.9959042 -0.30667204 0.3698045 -0.8441346 ]] [[-0.47377717 0.01487178 1.3492866 0.98305863] [ 1.49794 -0.1452438 1.3143903 -0.1392767 ] [ 0.09662196 0.24876142 1.5023168 -0.35664776]]]] - alpha:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_40.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=2]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-2.8435439e-02 -1.5682486e+00 1.2047645e+00 3.0236870e-02] [-4.2134829e+00 -9.6173346e-02 3.3699167e-01 -2.3695438e+00] [-2.4188071e-01 -1.1212720e+00 1.6433427e+00 5.4614797e+00]] [[-3.3875722e-01 -1.4352849e+00 2.3422979e-01 1.1345352e-01] [-3.8647425e-01 2.1447902e+00 -3.0516632e+00 -6.2451297e-01] [-7.0439023e-01 9.8667121e-01 -3.1763716e+00 7.8954446e-01]] [[ 1.0446310e-02 -2.5552814e+00 3.6949141e+00 -1.9862831e+00] [ 5.9616632e+00 4.0386546e-01 -4.4620299e+00 4.6061411e+00] [ 3.2260880e+00 -3.1298685e+00 -6.4926081e+00 8.0554509e-01]] [[-1.4288175e+00 8.3469349e-01 7.8478479e-01 3.7529583e+00] [-3.4988747e+00 2.2115216e+00 3.3871520e+00 2.0359683e-01] [-1.8079923e+00 8.3170807e-01 9.4217783e-01 -2.3521478e+00]] [[-7.4586481e-01 -5.2959555e-01 2.2823524e+00 3.0247216e+00] [ 3.4838119e+00 -2.4723262e-01 1.9316430e+00 -1.6211477e-01] [ 3.6163762e-02 7.0702714e-01 1.3676316e+00 -9.9990022e-01]]] [[[ 2.0128630e-01 -3.2027235e+00 1.3831568e+00 -5.3211421e-02] [-5.3510351e+00 -1.7437620e+00 1.7508298e+00 -2.5795732e+00] [-1.5886908e+00 -1.5365629e+00 6.7139035e-01 5.4776187e+00]] [[-6.2141716e-03 -1.1064917e+00 5.0070035e-01 -2.6088089e-02] [ 1.4374644e-01 2.0681596e+00 -1.5012932e+00 6.9404483e-01] [ 8.4329635e-01 3.4943013e+00 -8.9789063e-01 1.9607604e+00]] [[-3.7785387e-01 -1.8138238e+00 3.9967885e+00 -7.4734759e-01] [ 3.5514843e+00 2.0206220e+00 -7.5746703e-01 4.3183599e+00] [ 1.5970244e+00 -3.6396399e+00 -4.8328347e+00 3.5977393e-02]] [[-2.1088617e+00 -1.6544368e+00 7.0606887e-01 2.8968487e+00] [-4.1158533e+00 2.1839471e+00 2.9081290e+00 -4.3398350e-01] [-1.7485609e+00 8.2447827e-01 1.8087763e+00 -1.0141578e+00]] [[-2.1548042e+00 -1.7137310e-01 3.7874746e+00 2.3447804e+00] [ 3.3969741e+00 -4.2466387e-01 3.0086598e+00 8.9426726e-02] [-2.0652571e+00 6.2233227e-01 1.8416082e+00 6.1203963e-01]]]]; ov_res: [[[[-2.8435439e-02 -1.5682486e+00 1.2047645e+00 3.0236870e-02] [-4.2134829e+00 -9.6173346e-02 3.3699167e-01 -2.3695438e+00] [-2.4188071e-01 -1.1212720e+00 1.6433427e+00 5.4614797e+00]] [[-3.3875722e-01 -1.4352849e+00 2.3422979e-01 1.1345352e-01] [-3.8647425e-01 2.1447902e+00 -3.0516632e+00 -6.2451297e-01] [-7.0439023e-01 9.8667121e-01 -3.1763716e+00 7.8954446e-01]] [[ 1.0446310e-02 -2.5552814e+00 3.6949141e+00 -1.9862831e+00] [ 5.9616632e+00 4.0386546e-01 -4.4620299e+00 4.6061411e+00] [ 3.2260880e+00 -3.1298685e+00 -6.4926081e+00 8.0554509e-01]] [[-1.4288175e+00 8.3469349e-01 7.8478479e-01 3.7529583e+00] [-3.4988747e+00 2.2115216e+00 3.3871520e+00 2.0359683e-01] [-1.8079923e+00 8.3170807e-01 9.4217783e-01 -2.3521478e+00]] [[-7.4586481e-01 -5.2959555e-01 2.2823524e+00 3.0247216e+00] [ 3.4838119e+00 -2.4723262e-01 1.9316430e+00 -1.6211477e-01] [ 3.6163762e-02 7.0702714e-01 1.3676316e+00 -9.9990022e-01]]] [[[ 2.0128630e-01 -3.2027235e+00 1.3831568e+00 -5.3211421e-02] [-5.3510351e+00 -1.7437620e+00 1.7508298e+00 -2.5795732e+00] [-1.5886908e+00 -1.5365629e+00 6.7139035e-01 5.4776187e+00]] [[-6.2141716e-03 -1.1064917e+00 5.0070035e-01 -2.6088089e-02] [ 1.4374644e-01 2.0681596e+00 -1.5012932e+00 6.9404483e-01] [ 8.4329635e-01 3.4943013e+00 -8.9789063e-01 1.9607604e+00]] [[-3.7785387e-01 -1.8138238e+00 3.9967885e+00 -7.4734759e-01] [ 3.5514843e+00 2.0206220e+00 -7.5746703e-01 4.3183599e+00] [ 1.5970244e+00 -3.6396399e+00 -4.8328347e+00 3.5977393e-02]] [[-2.1088617e+00 -1.6544368e+00 7.0606887e-01 2.8968487e+00] [-4.1158533e+00 2.1839471e+00 2.9081290e+00 -4.3398350e-01] [-1.7485609e+00 8.2447827e-01 1.8087763e+00 -1.0141578e+00]] [[-2.1548042e+00 -1.7137310e-01 3.7874746e+00 2.3447804e+00] [ 3.3969741e+00 -4.2466387e-01 3.0086598e+00 8.9426726e-02] [-2.0652571e+00 6.2233227e-01 1.8416082e+00 6.1203963e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[-0.5907932] - alpha:-0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_42.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=-0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-1.48853421e-01 3.83106291e-01 7.41454601e-01 1.19348928e-01] [ 1.14717209e+00 -5.83996773e-01 -1.12078488e-02 1.12952983e+00] [ 1.12866545e+00 1.69211671e-01 -1.06262416e-01 4.93012339e-01]] [[-3.55315357e-01 9.73157167e-01 -2.82022864e-01 8.71127963e-01] [ 1.19588876e+00 -2.34121948e-01 5.31712174e-01 -7.15314984e-01] [ 1.25305748e+00 -1.94714952e+00 7.72960424e-01 -5.28083444e-01]] [[ 3.96590233e+00 4.43839341e-01 1.00617385e+00 -3.56618494e-01] [ 9.20565367e-01 2.78179258e-01 -3.12615544e-01 8.94405007e-01] [ 7.71061838e-01 2.14592242e+00 3.94338310e-01 -1.22361624e+00]] [[-8.23978186e-01 2.02296948e+00 -4.00423795e-01 8.64263028e-02] [-2.58246094e-01 9.40119267e-01 1.15017998e+00 -9.58912849e-01] [-7.62497783e-01 2.46638298e+00 -3.63645107e-01 1.23656467e-01]] [[ 5.02940714e-02 1.01972234e+00 7.41017103e-01 7.53461719e-01] [ 1.31663382e+00 8.66401911e-01 1.76338506e+00 -1.68309915e+00] [-5.17688513e-01 -1.50993764e-01 1.23526442e+00 3.88983160e-01]]] [[[ 1.16361392e+00 1.20620057e-01 1.26722133e+00 -2.36395210e-01] [ 9.24025178e-01 -1.32381916e-03 8.87807727e-01 5.73263943e-01] [-3.10910314e-01 2.24598885e+00 -1.31884837e+00 1.66201663e+00]] [[ 5.08813411e-02 -1.02765530e-01 2.49952063e-01 -2.00583547e-01] [ 7.85673857e-01 1.28641343e+00 2.01384401e+00 1.02347028e+00] [-5.37497103e-02 -1.06826222e+00 4.85851049e-01 7.62009025e-02]] [[-3.26439947e-01 6.92507386e-01 -1.24385262e+00 1.52451205e+00] [-6.83578610e-01 1.11884439e+00 -7.00999379e-01 3.26728404e-01] [ 1.43591857e+00 -8.02373886e-01 1.41493082e-02 -1.50041729e-01]] [[-1.10214615e+00 -1.37924027e+00 5.71965635e-01 1.36065185e-02] [ 2.17530417e+00 -6.00880623e-01 6.22337312e-02 1.32058334e+00] [-2.62074262e-01 8.58647466e-01 4.17522788e-01 1.05588722e+00]] [[-1.39853418e-01 2.64635873e+00 -1.60616487e-01 -9.74907875e-01] [ 5.21647930e-01 4.39038455e-01 1.12957823e+00 -5.36940992e-02] [ 1.35019946e+00 -6.82092190e-01 9.66164023e-02 -1.54666930e-01]]]]; ov_res: [[[[-1.48853421e-01 3.83106291e-01 7.41454601e-01 1.19348928e-01] [ 1.14717209e+00 -5.83996773e-01 -1.12078488e-02 1.12952983e+00] [ 1.12866545e+00 1.69211671e-01 -1.06262416e-01 4.93012339e-01]] [[-3.55315357e-01 9.73157167e-01 -2.82022864e-01 8.71127963e-01] [ 1.19588876e+00 -2.34121948e-01 5.31712174e-01 -7.15314984e-01] [ 1.25305748e+00 -1.94714952e+00 7.72960424e-01 -5.28083444e-01]] [[ 3.96590233e+00 4.43839341e-01 1.00617385e+00 -3.56618494e-01] [ 9.20565367e-01 2.78179258e-01 -3.12615544e-01 8.94405007e-01] [ 7.71061838e-01 2.14592242e+00 3.94338310e-01 -1.22361624e+00]] [[-8.23978186e-01 2.02296948e+00 -4.00423795e-01 8.64263028e-02] [-2.58246094e-01 9.40119267e-01 1.15017998e+00 -9.58912849e-01] [-7.62497783e-01 2.46638298e+00 -3.63645107e-01 1.23656467e-01]] [[ 5.02940714e-02 1.01972234e+00 7.41017103e-01 7.53461719e-01] [ 1.31663382e+00 8.66401911e-01 1.76338506e+00 -1.68309915e+00] [-5.17688513e-01 -1.50993764e-01 1.23526442e+00 3.88983160e-01]]] [[[ 1.16361392e+00 1.20620057e-01 1.26722133e+00 -2.36395210e-01] [ 9.24025178e-01 -1.32381916e-03 8.87807727e-01 5.73263943e-01] [-3.10910314e-01 2.24598885e+00 -1.31884837e+00 1.66201663e+00]] [[ 5.08813411e-02 -1.02765530e-01 2.49952063e-01 -2.00583547e-01] [ 7.85673857e-01 1.28641343e+00 2.01384401e+00 1.02347028e+00] [-5.37497103e-02 -1.06826222e+00 4.85851049e-01 7.62009025e-02]] [[-3.26439947e-01 6.92507386e-01 -1.24385262e+00 1.52451205e+00] [-6.83578610e-01 1.11884439e+00 -7.00999379e-01 3.26728404e-01] [ 1.43591857e+00 -8.02373886e-01 1.41493082e-02 -1.50041729e-01]] [[-1.10214615e+00 -1.37924027e+00 5.71965635e-01 1.36065185e-02] [ 2.17530417e+00 -6.00880623e-01 6.22337312e-02 1.32058334e+00] [-2.62074262e-01 8.58647466e-01 4.17522788e-01 1.05588722e+00]] [[-1.39853418e-01 2.64635873e+00 -1.60616487e-01 -9.74907875e-01] [ 5.21647930e-01 4.39038455e-01 1.12957823e+00 -5.36940992e-02] [ 1.35019946e+00 -6.82092190e-01 9.66164023e-02 -1.54666930e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[-0.5907932] - alpha:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_44.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=0]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[ 0.17989963 1.7559284 1.3416868 -0.62773114] [ 0.7414641 1.2804666 0.0656817 -0.6829626 ] [ 0.8079456 -1.1338924 -0.47873792 0.7139977 ]] [[ 0.43344408 0.6926789 -0.18218642 0.5857089 ] [ 2.1588528 -1.0604353 0.9484715 -1.108079 ] [ 0.6706802 -1.3983114 -1.4335289 1.1873939 ]] [[ 1.9050035 -0.18998528 -1.8214207 0.1412845 ] [ 0.41964573 -0.11304647 -0.90597516 -0.24690886] [ 0.3313948 -0.0881624 -0.6906605 -0.45072103]] [[-0.00964348 0.4557632 0.35923347 0.7699651 ] [-0.3681116 -0.69886917 -0.7680084 1.4047223 ] [ 1.6963176 -2.131791 -0.05477757 -0.4609524 ]] [[-0.966818 -0.7347629 1.423765 0.21934023] [ 0.67255044 0.5248343 -0.6291304 0.48668528] [-0.43611485 0.34057623 0.06706822 -1.7811605 ]]] [[[-0.11981948 -1.1860939 -0.38371742 -0.07386073] [ 2.3769085 0.76450074 -1.4538395 2.1793106 ] [ 0.12399109 0.45295116 -0.44806215 0.28196615]] [[ 0.9232336 -0.2355645 0.45976236 -0.28642452] [-0.17462587 1.856033 -0.6853989 -1.092584 ] [-0.63623625 0.40428677 -0.45996436 -1.2690142 ]] [[ 2.2018406 1.2336237 0.47262594 -1.1834433 ] [ 1.1296986 -1.1525688 1.807 -1.1667203 ] [ 0.6916571 1.1350238 -0.49286863 0.17551783]] [[ 1.9205106 -1.5312517 -1.346045 0.2588263 ] [ 1.3144646 1.9825557 -3.3760738 -1.599139 ] [-0.48132598 -0.7664993 -1.7159047 -1.4170349 ]] [[ 0.2821834 0.09704994 0.4164673 1.1185545 ] [-1.7257516 -2.0327702 -1.2233224 -0.6380812 ] [ 0.3880705 -2.3140204 -0.7178697 1.5611402 ]]]]; ov_res: [[[[ 0.17989963 1.7559284 1.3416868 -0.62773114] [ 0.7414641 1.2804666 0.0656817 -0.6829626 ] [ 0.8079456 -1.1338924 -0.47873792 0.7139977 ]] [[ 0.43344408 0.6926789 -0.18218642 0.5857089 ] [ 2.1588528 -1.0604353 0.9484715 -1.108079 ] [ 0.6706802 -1.3983114 -1.4335289 1.1873939 ]] [[ 1.9050035 -0.18998528 -1.8214207 0.1412845 ] [ 0.41964573 -0.11304647 -0.90597516 -0.24690886] [ 0.3313948 -0.0881624 -0.6906605 -0.45072103]] [[-0.00964348 0.4557632 0.35923347 0.7699651 ] [-0.3681116 -0.69886917 -0.7680084 1.4047223 ] [ 1.6963176 -2.131791 -0.05477757 -0.4609524 ]] [[-0.966818 -0.7347629 1.423765 0.21934023] [ 0.67255044 0.5248343 -0.6291304 0.48668528] [-0.43611485 0.34057623 0.06706822 -1.7811605 ]]] [[[-0.11981948 -1.1860939 -0.38371742 -0.07386073] [ 2.3769085 0.76450074 -1.4538395 2.1793106 ] [ 0.12399109 0.45295116 -0.44806215 0.28196615]] [[ 0.9232336 -0.2355645 0.45976236 -0.28642452] [-0.17462587 1.856033 -0.6853989 -1.092584 ] [-0.63623625 0.40428677 -0.45996436 -1.2690142 ]] [[ 2.2018406 1.2336237 0.47262594 -1.1834433 ] [ 1.1296986 -1.1525688 1.807 -1.1667203 ] [ 0.6916571 1.1350238 -0.49286863 0.17551783]] [[ 1.9205106 -1.5312517 -1.346045 0.2588263 ] [ 1.3144646 1.9825557 -3.3760738 -1.599139 ] [-0.48132598 -0.7664993 -1.7159047 -1.4170349 ]] [[ 0.2821834 0.09704994 0.4164673 1.1185545 ] [-1.7257516 -2.0327702 -1.2233224 -0.6380812 ] [ 0.3880705 -2.3140204 -0.7178697 1.5611402 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[-0.5907932] - alpha:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_46.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : float = prim::Constant[value=0.5]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-0.2913588 -1.2621055 0.59419525 -1.2943391 ] [-2.0988162 -0.3007367 -1.7399293 -0.7738024 ] [ 0.92846644 -0.17280081 0.44508442 0.25976983]] [[-1.2153428 -0.6429745 -0.42418224 0.15632159] [-0.1465589 -0.46682233 -1.0328714 0.07103267] [-0.70813525 1.713922 -1.4992361 0.14203107]] [[ 2.4408803 -0.47061825 -0.6793935 0.42812786] [ 0.34413937 -0.13815534 0.3150563 -0.64412713] [ 0.64165986 -1.6912273 1.0934296 -0.8466829 ]] [[ 0.4295322 1.3859855 0.02649936 0.08033308] [ 0.24501804 -2.0676804 0.9155692 0.08307728] [-0.00605112 -0.02114862 0.9660399 -1.1325343 ]] [[ 0.24806431 -1.1198087 1.0574995 -1.6355904 ] [-0.52179873 1.3573413 -2.5931308 0.31485263] [-1.2922068 -0.17208049 0.31083068 0.35502943]]] [[[-0.5677506 -1.1697997 -0.7787036 1.2744275 ] [ 0.20646635 -1.6071007 0.36919037 -0.01311213] [ 0.34185472 -2.6732233 -0.77957296 -0.24867816]] [[-1.0810813 0.46464792 -0.42610902 0.15261477] [-0.17851195 0.7844231 -0.8528758 0.04356024] [ 1.5557623 -0.5838732 -0.13271451 0.8379332 ]] [[ 0.08169857 0.11031345 -2.214178 -0.66680706] [-0.15304224 0.8118906 -0.9263134 -0.63334435] [-0.68797004 -1.0790819 -0.59538 -0.832258 ]] [[ 1.134567 1.0468411 1.082931 -1.1860852 ] [-0.76471823 -0.818102 -1.6520904 -0.22768478] [ 0.02235812 -0.35572308 -2.20001 0.18448207]] [[-0.13217682 1.0640318 -0.32781318 0.3693498 ] [-1.0705706 -0.87682354 -0.31442967 -0.604947 ] [ 0.18861428 0.50873125 -3.5090525 -0.88419116]]]]; ov_res: [[[[-0.2913588 -1.2621055 0.59419525 -1.2943391 ] [-2.0988162 -0.3007367 -1.7399293 -0.7738024 ] [ 0.92846644 -0.17280081 0.44508442 0.25976983]] [[-1.2153428 -0.6429745 -0.42418224 0.15632159] [-0.1465589 -0.46682233 -1.0328714 0.07103267] [-0.70813525 1.713922 -1.4992361 0.14203107]] [[ 2.4408803 -0.47061825 -0.6793935 0.42812786] [ 0.34413937 -0.13815534 0.3150563 -0.64412713] [ 0.64165986 -1.6912273 1.0934296 -0.8466829 ]] [[ 0.4295322 1.3859855 0.02649936 0.08033308] [ 0.24501804 -2.0676804 0.9155692 0.08307728] [-0.00605112 -0.02114862 0.9660399 -1.1325343 ]] [[ 0.24806431 -1.1198087 1.0574995 -1.6355904 ] [-0.52179873 1.3573413 -2.5931308 0.31485263] [-1.2922068 -0.17208049 0.31083068 0.35502943]]] [[[-0.5677506 -1.1697997 -0.7787036 1.2744275 ] [ 0.20646635 -1.6071007 0.36919037 -0.01311213] [ 0.34185472 -2.6732233 -0.77957296 -0.24867816]] [[-1.0810813 0.46464792 -0.42610902 0.15261477] [-0.17851195 0.7844231 -0.8528758 0.04356024] [ 1.5557623 -0.5838732 -0.13271451 0.8379332 ]] [[ 0.08169857 0.11031345 -2.214178 -0.66680706] [-0.15304224 0.8118906 -0.9263134 -0.63334435] [-0.68797004 -1.0790819 -0.59538 -0.832258 ]] [[ 1.134567 1.0468411 1.082931 -1.1860852 ] [-0.76471823 -0.818102 -1.6520904 -0.22768478] [ 0.02235812 -0.35572308 -2.20001 0.18448207]] [[-0.13217682 1.0640318 -0.32781318 0.3693498 ] [-1.0705706 -0.87682354 -0.31442967 -0.604947 ] [ 0.18861428 0.50873125 -3.5090525 -0.88419116]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[-0.5907932] - alpha:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_48.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-0.9961189 -0.7747843 -2.1761255 -0.87908316] [ 0.52584904 0.9400912 2.5820465 -0.76173794] [-0.7080645 -0.57783216 0.3999877 -0.28112665]] [[-1.8506033 0.27648532 -0.64086294 0.24995196] [-1.4067464 -0.5428022 -1.462988 2.1023602 ] [-1.7898462 0.81668943 1.0999341 -0.99336445]] [[-1.4147699 -0.18709904 0.903058 -0.5841079 ] [ 0.55301017 0.4874553 -1.1318095 -1.1631505 ] [-1.5303879 -0.537617 -0.42868668 -1.6727097 ]] [[-0.714809 -0.5879841 -2.565252 -0.0355314 ] [-0.9273917 -1.829762 -2.1150093 -3.844452 ] [-0.5386925 -0.05753112 -1.4570899 -0.6517692 ]] [[-0.74323034 -2.2245765 0.9599634 -1.222493 ] [-0.04336351 -1.2358292 -1.7733784 1.5710285 ] [ 0.12098831 0.50764626 -0.19975397 -0.7903766 ]]] [[[-0.8848227 -0.5353479 0.32022113 -0.84059155] [-0.18450198 -0.44258744 -3.4432561 -2.0509403 ] [-0.6425866 0.48990506 -0.6794069 -0.96331817]] [[-0.11148646 -1.4102714 -1.0852088 -0.32436565] [ 0.8909194 -0.70989794 0.47647494 -0.03135353] [-0.79151464 1.8469257 -0.94005823 -1.0018854 ]] [[-0.7199713 -0.624342 -0.19738182 -1.1996719 ] [ 0.69121677 -0.2019006 0.36550462 -0.72250944] [-1.6787131 -2.4372864 1.2685106 -0.27186468]] [[-0.7463816 0.31413454 -1.5302205 -0.27736893] [-1.1947734 -0.26344344 1.0491712 -0.80412835] [-1.1550715 -1.2620963 0.3188337 -0.5350583 ]] [[ 0.10997617 0.15916866 -1.2774599 1.313889 ] [-2.0296226 2.1440847 -0.8352204 -2.2202308 ] [-1.2031529 0.1181708 -2.6913664 -0.57390577]]]]; ov_res: [[[[-0.9961189 -0.7747843 -2.1761255 -0.87908316] [ 0.52584904 0.9400912 2.5820465 -0.76173794] [-0.7080645 -0.57783216 0.3999877 -0.28112665]] [[-1.8506033 0.27648532 -0.64086294 0.24995196] [-1.4067464 -0.5428022 -1.462988 2.1023602 ] [-1.7898462 0.81668943 1.0999341 -0.99336445]] [[-1.4147699 -0.18709904 0.903058 -0.5841079 ] [ 0.55301017 0.4874553 -1.1318095 -1.1631505 ] [-1.5303879 -0.537617 -0.42868668 -1.6727097 ]] [[-0.714809 -0.5879841 -2.565252 -0.0355314 ] [-0.9273917 -1.829762 -2.1150093 -3.844452 ] [-0.5386925 -0.05753112 -1.4570899 -0.6517692 ]] [[-0.74323034 -2.2245765 0.9599634 -1.222493 ] [-0.04336351 -1.2358292 -1.7733784 1.5710285 ] [ 0.12098831 0.50764626 -0.19975397 -0.7903766 ]]] [[[-0.8848227 -0.5353479 0.32022113 -0.84059155] [-0.18450198 -0.44258744 -3.4432561 -2.0509403 ] [-0.6425866 0.48990506 -0.6794069 -0.96331817]] [[-0.11148646 -1.4102714 -1.0852088 -0.32436565] [ 0.8909194 -0.70989794 0.47647494 -0.03135353] [-0.79151464 1.8469257 -0.94005823 -1.0018854 ]] [[-0.7199713 -0.624342 -0.19738182 -1.1996719 ] [ 0.69121677 -0.2019006 0.36550462 -0.72250944] [-1.6787131 -2.4372864 1.2685106 -0.27186468]] [[-0.7463816 0.31413454 -1.5302205 -0.27736893] [-1.1947734 -0.26344344 1.0491712 -0.80412835] [-1.1550715 -1.2620963 0.3188337 -0.5350583 ]] [[ 0.10997617 0.15916866 -1.2774599 1.313889 ] [-2.0296226 2.1440847 -0.8352204 -2.2202308 ] [-1.2031529 0.1181708 -2.6913664 -0.57390577]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_add.py::TestAdd::test_add[ ie_device:CPU - precision:FP32 - input_rhs:[-0.5907932] - alpha:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_add.___torch_mangle_50.aten_add, %lhs.1 : Tensor, %rhs.1 : Tensor): %self.alpha : int = prim::Constant[value=2]() %4 : Tensor = aten::add(%lhs.1, %rhs.1, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_add.py:27:23 return (%4) fw_re: [[[[-1.3825085 -0.46738756 -1.04493 -1.3596995 ] [-2.1686673 -1.7668107 -2.412395 -1.2704964 ] [-1.3662916 -0.25723094 -0.8528929 -1.1826906 ]] [[-1.1767789 -1.3291287 -0.6404797 -0.25655884] [-2.0171275 0.05885363 -2.5507686 -1.5011269 ] [-0.27623218 -1.2624073 -0.40932542 -0.7303964 ]] [[ 0.18875241 0.97836125 -1.8829424 -1.5543888 ] [-1.5753173 1.2437977 -1.3016852 -3.4102554 ] [-0.80293846 0.06453109 -1.6571732 -1.6069143 ]] [[-1.5515722 -3.3497329 0.00499463 -1.3739083 ] [-0.9306306 -0.1954261 -0.5079283 -0.4181729 ] [ 0.13600779 -2.432454 -1.124993 -1.861481 ]] [[-1.5729 -1.5573845 -0.36439055 -0.46251714] [-2.5720162 -2.0127237 -0.42535853 -2.810821 ] [-1.4531614 -1.1758909 -1.2015399 -1.6553723 ]]] [[[-2.1099815 -1.5272298 -0.3304248 -1.8767223 ] [-0.4472052 -1.2334836 -1.1000208 -0.37172276] [-0.8845964 -1.5065409 -0.71492136 -1.665286 ]] [[-2.5801208 -1.292036 1.5054568 -1.4935454 ] [-0.98572505 -1.9089663 0.21211839 0.4499687 ] [-1.2335615 -0.16483343 0.24739552 0.9192542 ]] [[ 0.08201742 -2.397967 -1.262728 -1.7580705 ] [-1.586659 -2.2985985 -1.020724 -0.8665571 ] [-2.9289355 -1.26957 -0.99795663 -1.0767711 ]] [[-0.96087945 -2.007571 -0.7470776 -0.5757049 ] [-2.972446 -0.3713917 -1.0551561 -0.36804438] [-3.7707624 -0.8030486 -2.1391656 -0.984802 ]] [[-0.32671064 -1.9917132 -1.3003076 -0.3519948 ] [-0.92598546 -0.45052767 2.091702 -0.6884269 ] [ 0.14723253 -1.4518617 0.6325966 -0.84935975]]]]; ov_res: [[[[-1.3825085 -0.46738756 -1.04493 -1.3596995 ] [-2.1686673 -1.7668107 -2.412395 -1.2704964 ] [-1.3662916 -0.25723094 -0.8528929 -1.1826906 ]] [[-1.1767789 -1.3291287 -0.6404797 -0.25655884] [-2.0171275 0.05885363 -2.5507686 -1.5011269 ] [-0.27623218 -1.2624073 -0.40932542 -0.7303964 ]] [[ 0.18875241 0.97836125 -1.8829424 -1.5543888 ] [-1.5753173 1.2437977 -1.3016852 -3.4102554 ] [-0.80293846 0.06453109 -1.6571732 -1.6069143 ]] [[-1.5515722 -3.3497329 0.00499463 -1.3739083 ] [-0.9306306 -0.1954261 -0.5079283 -0.4181729 ] [ 0.13600779 -2.432454 -1.124993 -1.861481 ]] [[-1.5729 -1.5573845 -0.36439055 -0.46251714] [-2.5720162 -2.0127237 -0.42535853 -2.810821 ] [-1.4531614 -1.1758909 -1.2015399 -1.6553723 ]]] [[[-2.1099815 -1.5272298 -0.3304248 -1.8767223 ] [-0.4472052 -1.2334836 -1.1000208 -0.37172276] [-0.8845964 -1.5065409 -0.71492136 -1.665286 ]] [[-2.5801208 -1.292036 1.5054568 -1.4935454 ] [-0.98572505 -1.9089663 0.21211839 0.4499687 ] [-1.2335615 -0.16483343 0.24739552 0.9192542 ]] [[ 0.08201742 -2.397967 -1.262728 -1.7580705 ] [-1.586659 -2.2985985 -1.020724 -0.8665571 ] [-2.9289355 -1.26957 -0.99795663 -1.0767711 ]] [[-0.96087945 -2.007571 -0.7470776 -0.5757049 ] [-2.972446 -0.3713917 -1.0551561 -0.36804438] [-3.7707624 -0.8030486 -2.1391656 -0.984802 ]] [[-0.32671064 -1.9917132 -1.3003076 -0.3519948 ] [-0.92598546 -0.45052767 2.091702 -0.6884269 ] [ 0.14723253 -1.4518617 0.6325966 -0.84935975]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.int32'> - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_51.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %4 : int = prim::Constant[value=1]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:28:23 return (%5) fw_re: [ 195 477 1352]; ov_res: [ 195 477 1352] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float32'> - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_53.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %4 : int = prim::Constant[value=1]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:28:23 return (%5) fw_re: [2242.9966 714.9528 1259.5046]; ov_res: [2242.9963 714.9528 1259.5046] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float64'> - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_55.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %4 : int = prim::Constant[value=1]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:28:23 return (%5) fw_re: [ 259.23738677 1867.79308935 610.6872319 ]; ov_res: [ 259.23736572 1867.79296875 610.68725586] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.int32'> - value:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_57.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : int = prim::Constant[value=1]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [ 31 238 290]; ov_res: [ 31 238 290] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.int32'> - value:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_59.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : int = prim::Constant[value=2]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [2369 1212 44]; ov_res: [2369 1212 44] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.int32'> - value:10 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_61.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : int = prim::Constant[value=10]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [15635 20180 756]; ov_res: [15635 20180 756] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.int32'> - value:110 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_63.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : int = prim::Constant[value=110]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [ 34339 105646 147863]; ov_res: [ 34339 105646 147863] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float32'> - value:2.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_65.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : float = prim::Constant[value=2.]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [2973.278 2469.009 2669.5005]; ov_res: [2973.278 2469.009 2669.5005] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float32'> - value:3.1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_67.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : float = prim::Constant[value=3.1000000000000001]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [1694.2025 197.2909 50.17815]; ov_res: [1694.2025 197.29088 50.17815] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float32'> - value:4.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_69.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : float = prim::Constant[value=4.5]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [3404.787 2654.298 584.5378]; ov_res: [3404.787 2654.298 584.5378] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float64'> - value:41.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_71.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : float = prim::Constant[value=41.5]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [21867.74075828 14675.55744348 13084.74100541]; ov_res: [21867.74023438 14675.55859375 13084.74121094] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addcmul.py::TestAddCMul::test_addcmul[ ie_device:CPU - precision:FP32 - input_type:<class 'numpy.float64'> - value:24.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addcmul.___torch_mangle_73.aten_addcmul, %x.1 : Tensor, %y.1 : Tensor, %z.1 : Tensor): %self.value : float = prim::Constant[value=24.5]() %5 : Tensor = aten::addcmul(%x.1, %y.1, %z.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addcmul.py:27:27 return (%5) fw_re: [23173.93723824 1809.6141399 23419.80459297]; ov_res: [23173.93554688 1809.61413574 23419.8046875 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_74.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : float = prim::Constant[value=1.]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[-0.61689276 0.83960485 1.345933 ] [ 0.72999835 -0.6065898 1.4401503 ] [-0.56719977 -0.6820189 1.0745364 ]]; ov_res: [[-0.61689276 0.83960485 1.345933 ] [ 0.72999835 -0.6065898 1.4401503 ] [-0.56719977 -0.6820189 1.0745364 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_76.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : float = prim::Constant[value=1.]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[-0.9758841 0.31755114] [ 0.8153827 -0.0198983 ]]; ov_res: [[-0.9758841 0.31755114] [ 0.8153827 -0.0198983 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_78.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : float = prim::Constant[value=1.]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[ 1.7324612e-03] [ 1.1892116e+00] [ 2.4668183e+00] [ 4.2982006e+00] [-1.7526795e+00] [ 6.6947466e-01] [-2.5279114e+00] [ 2.3343091e+00] [-1.2556292e+00] [-1.6114073e+00]]; ov_res: [[ 1.7324537e-03] [ 1.1892116e+00] [ 2.4668183e+00] [ 4.2982006e+00] [-1.7526796e+00] [ 6.6947466e-01] [-2.5279114e+00] [ 2.3343091e+00] [-1.2556293e+00] [-1.6114074e+00]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_80.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : float = prim::Constant[value=1.]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[3.9529254 1.0066953]]; ov_res: [[3.9529254 1.0066953]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_82.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : float = prim::Constant[value=1.]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[2.9544497]]; ov_res: [[2.9544497]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_84.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=0.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 2.266618 -0.7647199 1.1246762 ] [ 0.72691226 0.15140408 0.08814188] [-1.123626 0.6276304 0.98836684]]; ov_res: [[ 2.266618 -0.7647199 1.1246762 ] [ 0.72691226 0.15140408 0.08814188] [-1.123626 0.6276304 0.98836684]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_86.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=0.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 0.09098573 0.6017679 ] [ 0.18704364 -1.0947664 ]]; ov_res: [[ 0.09098573 0.6017679 ] [ 0.18704364 -1.0947664 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_88.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=0.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 0.32051778] [-1.1767833 ] [-0.40739444] [ 0.6183603 ] [-0.7889011 ] [-0.88417983] [-0.21910703] [-0.21294262] [ 1.043647 ] [ 1.1494545 ]]; ov_res: [[ 0.32051778] [-1.1767833 ] [-0.40739444] [ 0.6183603 ] [-0.7889011 ] [-0.88417983] [-0.21910703] [-0.21294262] [ 1.043647 ] [ 1.1494545 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_90.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=0.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-2.0408971 1.2120179]]; ov_res: [[-2.0408971 1.2120179]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_92.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=0.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[0.2059847]]; ov_res: [[0.2059847]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:0.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_94.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 0.5711755 -0.460998 0.07938769] [ 0.22235 0.6714377 0.2274615 ] [ 3.1989365 -2.9906037 0.6244 ]]; ov_res: [[ 0.5711755 -0.460998 0.07938769] [ 0.22235 0.6714377 0.2274615 ] [ 3.1989365 -2.9906037 0.6244 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:0.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_96.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-4.5677705 -1.1743505 ] [ 1.022454 0.49277243]]; ov_res: [[-4.5677705 -1.1743505 ] [ 1.022454 0.49277243]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:0.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_98.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.09088397] [-0.8848727 ] [-2.2111626 ] [ 1.2745205 ] [-5.527191 ] [ 1.0621948 ] [-3.6531956 ] [ 0.36248708] [ 0.88594306] [ 0.90456635]]; ov_res: [[-0.09088419] [-0.8848728 ] [-2.2111626 ] [ 1.2745205 ] [-5.527191 ] [ 1.0621948 ] [-3.6531956 ] [ 0.3624871 ] [ 0.88594294] [ 0.90456635]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:0.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_100.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-1.9795836 -0.08792957]]; ov_res: [[-1.9795836 -0.08792957]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:0.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_102.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[2.6393573]]; ov_res: [[2.6393573]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:2.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_104.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=2.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-4.0925474 -0.45841676 -0.18284048] [-0.7984646 4.657699 0.43666464] [-0.9090717 4.69512 -0.40495875]]; ov_res: [[-4.0925474 -0.45841676 -0.18284048] [-0.7984646 4.657699 0.43666464] [-0.9090717 4.69512 -0.40495875]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:2.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_106.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=2.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.04019002 0.9176247 ] [-1.1664644 -4.417061 ]]; ov_res: [[-0.04019002 0.9176247 ] [-1.1664644 -4.417061 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:2.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_108.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=2.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 2.8895767 ] [-3.89337 ] [ 2.7698061 ] [ 0.7152685 ] [ 0.06343675] [-1.6384957 ] [-1.745623 ] [ 5.8497267 ] [-0.13420098] [-1.1537905 ]]; ov_res: [[ 2.8895767 ] [-3.8933702 ] [ 2.769806 ] [ 0.71526855] [ 0.06343675] [-1.6384953 ] [-1.745623 ] [ 5.8497267 ] [-0.13420099] [-1.1537906 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:2.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_110.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=2.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.85705054 5.6835837 ]]; ov_res: [[-0.85705006 5.6835833 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1.0 - beta:2.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_112.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=2.]() %self.alpha : float = prim::Constant[value=1.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[1.2585771]]; ov_res: [[1.2585769]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:2.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_114.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=2.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 0.9403423 4.495206 3.503687 ] [ 0.5799253 0.3135103 -4.083647 ] [-1.9928002 -1.5143937 -4.5442486]]; ov_res: [[ 0.9403423 4.495206 3.503687 ] [ 0.5799253 0.3135103 -4.083647 ] [-1.9928002 -1.5143937 -4.5442486]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:2.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_116.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=2.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-9.537697 2.84329 ] [ 8.412182 2.8793612]]; ov_res: [[-9.537697 2.84329 ] [ 8.412182 2.8793612]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:2.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_118.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=2.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 2.8073006] [-0.8966143] [-2.1790152] [-2.5681522] [-3.9164336] [-1.7869301] [ 1.5708637] [-0.6728657] [-1.8614048] [ 0.6486283]]; ov_res: [[ 2.8073006] [-0.8966143] [-2.179015 ] [-2.5681522] [-3.9164336] [-1.7869301] [ 1.5708637] [-0.6728656] [-1.8614049] [ 0.6486284]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:2.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_120.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=2.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-2.9196944 -5.9355717]]; ov_res: [[-2.919695 -5.9355717]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:2.0 - beta:1.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_122.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=1.]() %self.alpha : float = prim::Constant[value=2.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[0.04406676]]; ov_res: [[0.04406679]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:-5.0 - beta:-6.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_124.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=-6.]() %self.alpha : float = prim::Constant[value=-5.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 3.8411179 -1.0570362 9.997208 ] [ 0.9737865 19.1843 -12.670036 ] [ 7.7762737 9.823335 -4.164127 ]]; ov_res: [[ 3.8411176 -1.0570362 9.997208 ] [ 0.9737866 19.1843 -12.670035 ] [ 7.776274 9.823336 -4.1641273]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:-5.0 - beta:-6.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_126.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=-6.]() %self.alpha : float = prim::Constant[value=-5.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[6.5236263 2.3207629] [3.5271306 1.644031 ]]; ov_res: [[6.5236263 2.320763 ] [3.5271306 1.644031 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:-5.0 - beta:-6.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_128.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=-6.]() %self.alpha : float = prim::Constant[value=-5.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-15.287403 ] [ 9.440469 ] [ 2.8739982 ] [ -8.029745 ] [ -6.5818276 ] [ 2.9289947 ] [ -2.997263 ] [ 15.321476 ] [ 13.702152 ] [ 0.38580805]]; ov_res: [[-15.287401 ] [ 9.44047 ] [ 2.8739977] [ -8.029743 ] [ -6.581826 ] [ 2.9289954] [ -2.997262 ] [ 15.321474 ] [ 13.702153 ] [ 0.3858078]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:-5.0 - beta:-6.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_130.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=-6.]() %self.alpha : float = prim::Constant[value=-5.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ -3.437079 -36.887 ]]; ov_res: [[ -3.4370782 -36.886997 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:-5.0 - beta:-6.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_132.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=-6.]() %self.alpha : float = prim::Constant[value=-5.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-31.083553]]; ov_res: [[-31.083557]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:3.0 - beta:4.0 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_134.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=4.]() %self.alpha : float = prim::Constant[value=3.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 3.0066822 -10.190558 15.92086 ] [ -0.91958666 0.8124983 -6.7029004 ] [ -5.869946 -13.457327 8.667655 ]]; ov_res: [[ 3.0066822 -10.190559 15.92086 ] [ -0.91958666 0.8124983 -6.7029004 ] [ -5.8699465 -13.457327 8.667656 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:3.0 - beta:4.0 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_136.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=4.]() %self.alpha : float = prim::Constant[value=3.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.167902 2.64791 ] [-4.9534106 3.1713488]]; ov_res: [[-0.16790271 2.6479096 ] [-4.9534106 3.1713483 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:3.0 - beta:4.0 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_138.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=4.]() %self.alpha : float = prim::Constant[value=3.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-9.216755 ] [ 6.533166 ] [ 1.2565373] [ 4.4974294] [ 6.697905 ] [14.157909 ] [-6.577432 ] [-2.7163744] [-0.7110946] [-5.6199465]]; ov_res: [[-9.216755 ] [ 6.533166 ] [ 1.2565374] [ 4.4974303] [ 6.697905 ] [14.157909 ] [-6.5774326] [-2.7163744] [-0.7110946] [-5.6199465]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:3.0 - beta:4.0 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_140.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=4.]() %self.alpha : float = prim::Constant[value=3.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-9.805816 2.2347178]]; ov_res: [[-9.805817 2.2347198]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:3.0 - beta:4.0 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_142.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=4.]() %self.alpha : float = prim::Constant[value=3.]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[8.025499]]; ov_res: [[8.025499]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.5 - beta:0.75 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_144.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.75]() %self.alpha : float = prim::Constant[value=0.5]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.3188574 -0.8787304 -2.1755207 ] [-0.9069514 -0.64565444 2.1090124 ] [ 1.8405007 -2.1470635 -1.6995182 ]]; ov_res: [[-0.3188574 -0.8787304 -2.1755207 ] [-0.9069514 -0.64565444 2.1090124 ] [ 1.8405006 -2.1470635 -1.6995182 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.5 - beta:0.75 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_146.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.75]() %self.alpha : float = prim::Constant[value=0.5]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-0.6563256 0.54494226] [-0.09018758 -1.7797095 ]]; ov_res: [[-0.6563255 0.54494226] [-0.09018758 -1.7797095 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.5 - beta:0.75 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_148.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.75]() %self.alpha : float = prim::Constant[value=0.5]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[ 1.5073266 ] [ 0.7508073 ] [ 0.6579256 ] [ 0.6889775 ] [-0.72609687] [ 0.6588692 ] [-0.30262512] [ 0.1696067 ] [ 1.5313461 ] [ 1.1995928 ]]; ov_res: [[ 1.5073265 ] [ 0.7508074 ] [ 0.6579256 ] [ 0.6889775 ] [-0.72609687] [ 0.6588692 ] [-0.30262515] [ 0.16960667] [ 1.5313461 ] [ 1.1995928 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.5 - beta:0.75 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_150.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.75]() %self.alpha : float = prim::Constant[value=0.5]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[-1.5032587 1.4687127]]; ov_res: [[-1.5032587 1.4687127]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:0.5 - beta:0.75 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_152.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.beta : float = prim::Constant[value=0.75]() %self.alpha : float = prim::Constant[value=0.5]() %6 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.beta, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%6) fw_re: [[0.15628853]]; ov_res: [[0.1562886]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1 - beta:1 - kwargs_to_prepare_input:{'input_shape': (3, 3), 'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_154.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[ 1.0518639 -1.5209837 0.2433655 ] [-2.5590978 0.89071214 2.4306767 ] [-3.5595536 -0.35263926 1.8011973 ]]; ov_res: [[ 1.0518639 -1.5209837 0.2433655 ] [-2.5590978 0.89071214 2.4306767 ] [-3.5595536 -0.35263926 1.8011973 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1 - beta:1 - kwargs_to_prepare_input:{'input_shape': (2, 2), 'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_156.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[ 0.10173994 -1.5873741 ] [ 2.0795155 -0.32208258]]; ov_res: [[ 0.10173994 -1.5873741 ] [ 2.0795155 -0.32208258]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1 - beta:1 - kwargs_to_prepare_input:{'input_shape': (10, 1), 'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_158.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[ 0.51950437] [-2.055748 ] [ 0.37857723] [ 3.6931949 ] [ 4.733595 ] [ 4.6546373 ] [ 1.4920038 ] [ 0.14610273] [-1.5254431 ] [-1.9588463 ]]; ov_res: [[ 0.5195045 ] [-2.055748 ] [ 0.3785771 ] [ 3.6931944 ] [ 4.733595 ] [ 4.654637 ] [ 1.492004 ] [ 0.14610279] [-1.5254431 ] [-1.9588461 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1 - beta:1 - kwargs_to_prepare_input:{'input_shape': (1, 2), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_160.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[3.6032777 2.6446018]]; ov_res: [[3.6032777 2.6446018]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_addmm.py::TestAddMM::test_addmm[ ie_device:CPU - precision:FP32 - alpha:1 - beta:1 - kwargs_to_prepare_input:{'input_shape': (1, 1), 'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_addmm.___torch_mangle_162.aten_addmm, %m0.1 : Tensor, %m1.1 : Tensor, %m2.1 : Tensor): %self.alpha : int = prim::Constant[value=1]() %5 : Tensor = aten::addmm(%m0.1, %m1.1, %m2.1, %self.alpha, %self.alpha) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_addmm.py:28:23 return (%5) fw_re: [[-2.6878598]]; ov_res: [[-2.6878598]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_163.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_164.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_166.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_168.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_170.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_172.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) namic[...])' with friendly_name 'Greater_20453': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_22639 (Squeeze_22638[0]:i64[], Constant_22628[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_22639': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_24825 (Squeeze_24824[0]:i64[], Constant_24814[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_24825': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::gt with schema: aten::gt.int(int a, int b) -> bool: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Greater Greater_27011 (Squeeze_27010[0]:i64[], Constant_27000[0]:i32[]) -> (dynamic[...])' with friendly_name 'Greater_27011': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ tyfw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:1 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_174.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_176.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_178.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0. 1.]; ov_res: [0. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_180.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0. 1.]; ov_res: [0. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_182.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_184.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_186.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:2 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_188.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_190.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_192.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0. 1. 2.]; ov_res: [0. 1. 2.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_194.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0. 1. 2.]; ov_res: [0. 1. 2.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_196.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_198.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_out() graph(%self : __torch__.test_arange.___torch_mangle_200.aten_arange_end_out, %x.1 : int): %2 : int[] = prim::Constant[value=[1]]() %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::zeros(%2, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:43 %6 : Tensor = aten::arange(%x.1, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:59:23 return (%6) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:True - end:3 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_202.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_204.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_206.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:float64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_208.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:int32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_210.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:int64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_212.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) pe (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant wifw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_214.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:1 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_216.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_218.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:float32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_220.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0. 1.]; ov_res: [0. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_222.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0. 1.]; ov_res: [0. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:int32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_224.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:int64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_226.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:int8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_228.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:2 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_230.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_232.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:float32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_234.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0. 1. 2.]; ov_res: [0. 1. 2.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:float64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_236.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0. 1. 2.]; ov_res: [0. 1. 2.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:int32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_238.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:int64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_240.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:int8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_242.aten_arange_end_dtype, %x.1 : int): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::arange(%x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%4) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_end_only[ ie_device:CPU - precision:FP32 - use_out:False - end:3 - dtype:uin8 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_244.aten_arange_end_dtype, %x.1 : int): %self.dtype : NoneType = prim::Constant() %3 : Tensor = aten::arange(%x.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:35:23 return (%3) fw_re: [0 1 2]; ov_res: [0 1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_245.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %self.dtype : NoneType = prim::Constant() %4 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:float32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_247.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:float64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_249.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) th schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for detailfw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_251.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_253.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0 - end:1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_255.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_257.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %self.dtype : NoneType = prim::Constant() %4 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%4) fw_re: [-1. 0.]; ov_res: [-1. 0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:float32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_259.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [-1. 0.]; ov_res: [-1. 0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:float64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_261.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [-1. 0.]; ov_res: [-1. 0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_263.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [-1 0]; ov_res: [-1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:int64 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_265.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [-1 0]; ov_res: [-1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:-1 - end:1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_267.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [-1 0]; ov_res: [-1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_269.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %self.dtype : NoneType = prim::Constant() %4 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%4) fw_re: [1. 2. 3. 4.]; ov_res: [1. 2. 3. 4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_271.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [1. 2. 3. 4.]; ov_res: [1. 2. 3. 4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_273.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [1. 2. 3. 4.]; ov_res: [1. 2. 3. 4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_275.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [1 2 3 4]; ov_res: [1 2 3 4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_277.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [1 2 3 4]; ov_res: [1 2 3 4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:1 - end:5 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_279.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [1 2 3 4]; ov_res: [1 2 3 4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_281.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %self.dtype : NoneType = prim::Constant() %4 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%4) fw_re: [0.5 1.5]; ov_res: [0.5 1.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_283.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0.5 1.5]; ov_res: [0.5 1.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_285.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0.5 1.5]; ov_res: [0.5 1.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_287.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_289.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) s) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIfw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end[ ie_device:CPU - precision:FP32 - start:0.5 - end:2.5 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_dtype() graph(%self : __torch__.test_arange.___torch_mangle_291.aten_arange_start_end_dtype, %x.1 : float, %y.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::arange(%x.1, %y.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:43:23 return (%5) fw_re: [0 1]; ov_res: [0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_292.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %self.dtype : NoneType = prim::Constant() %5 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_294.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_296.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_298.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_300.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:0 - end:1 - step:1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_302.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_304.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %self.dtype : NoneType = prim::Constant() %5 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%5) fw_re: [-2. -0.75 0.5 ]; ov_res: [-2. -0.75 0.5 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_306.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-2. -0.75 0.5 ]; ov_res: [-2. -0.75 0.5 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_308.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-2. -0.75 0.5 ]; ov_res: [-2. -0.75 0.5 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_310.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-2 -1 0]; ov_res: [-2 -1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_312.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-2 -1 0]; ov_res: [-2 -1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-2 - end:1 - step:1.25 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_314.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-2 -1 0]; ov_res: [-2 -1 0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_316.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %self.dtype : NoneType = prim::Constant() %5 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%5) fw_re: [ 1. 0. -1. -2. -3. -4.]; ov_res: [ 1. 0. -1. -2. -3. -4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_318.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [ 1. 0. -1. -2. -3. -4.]; ov_res: [ 1. 0. -1. -2. -3. -4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_320.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [ 1. 0. -1. -2. -3. -4.]; ov_res: [ 1. 0. -1. -2. -3. -4.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_322.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [ 1 0 -1 -2 -3 -4]; ov_res: [ 1 0 -1 -2 -3 -4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_324.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [ 1 0 -1 -2 -3 -4]; ov_res: [ 1 0 -1 -2 -3 -4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:-5 - step:-1 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_326.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [ 1 0 -1 -2 -3 -4]; ov_res: [ 1 0 -1 -2 -3 -4] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_328.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %self.dtype : NoneType = prim::Constant() %5 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%5) ND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no sfw_re: [1. 3. 5. 7. 9.]; ov_res: [1. 3. 5. 7. 9.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_330.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [1. 3. 5. 7. 9.]; ov_res: [1. 3. 5. 7. 9.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_332.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [1. 3. 5. 7. 9.]; ov_res: [1. 3. 5. 7. 9.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_334.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [1 3 5 7 9]; ov_res: [1 3 5 7 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_336.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [1 3 5 7 9]; ov_res: [1 3 5 7 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:1 - end:10 - step:2 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_338.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [1 3 5 7 9]; ov_res: [1 3 5 7 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_340.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %self.dtype : NoneType = prim::Constant() %5 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %self.dtype, %self.dtype, %self.dtype) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%5) fw_re: [-1. -3.]; ov_res: [-1. -3.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:float32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_342.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-1. -3.]; ov_res: [-1. -3.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:float64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_344.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-1. -3.]; ov_res: [-1. -3.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:int32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_346.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-1 -3]; ov_res: [-1 -3] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:int64 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_348.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-1 -3]; ov_res: [-1 -3] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_arange.py::TestExp::test_arange_start_end_step[ ie_device:CPU - precision:FP32 - start:-1 - end:-5 - step:-2 - dtype:int8 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- aten_arange_start_end_step_dtype() graph(%self : __torch__.test_arange.___torch_mangle_350.aten_arange_start_end_step_dtype, %x.1 : float, %y.1 : float, %z.1 : float): %4 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::arange(%x.1, %y.1, %z.1, %self.dtype, %4, %4, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_arange.py:51:23 return (%6) fw_re: [-1 -3]; ov_res: [-1 -3] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_ceil.py::TestCeil::test_ceil[ ie_device:CPU - precision:FP32 - inplace:False ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_ceil.___torch_mangle_351.aten_ceil, %x.1 : Tensor): %2 : Tensor = aten::ceil(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_ceil.py:22:26 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %2) return (%3) fw_re: [[[[ 1.2690773 0.6170897 -0.28825095 ... -0.04898492 1.2310829 -0.5205912 ] [-0.18924749 2.75933 -0.40044516 ... -1.3039743 -0.34867933 0.01808576] [ 0.585341 -0.3997631 0.3498593 ... 0.00666471 -0.04412789 1.5502791 ] ... [ 0.95066184 0.33912817 0.427288 ... 1.1860093 -0.11374697 -0.05280311] [-0.7347901 -1.1630015 -0.2475502 ... 1.5178826 -0.95389456 -0.53403515] [-0.6212277 -2.2106564 1.0353184 ... -1.3874794 -0.69815236 -0.19726607]] [[ 2.804149 0.8328028 -0.5582256 ... 1.355097 -0.1736737 -0.13020413] [ 0.1431629 -0.30146566 -1.0816159 ... 1.4423461 1.1400332 -0.54818213] [-0.91304594 0.25751966 -0.3541964 ... -1.0326333 -0.6336467 -0.34394294] ... [ 0.03576234 1.1312729 1.9666471 ... 1.3699337 -1.0349025 -0.07151587] [-0.50157326 -0.49492472 1.2034382 ... 2.5816436 -0.3554313 -0.11867522] [-0.5012972 -0.8925343 0.22559938 ... -1.4657121 0.3937098 -0.38559586]] [[-1.0058335 0.9137059 0.42063546 ... 1.2229877 -0.47666058 1.2863592 ] [-0.8834272 -0.01377251 -0.09959468 ... -0.36635855 -0.75215805 1.6989714 ] [-0.9435488 1.9351068 -0.28398582 ... 0.2888135 -0.09986418 -0.49040398] ... [ 0.58640283 -1.2145786 0.75110185 ... 0.7040771 -0.1949747 0.01234079] [ 0.16972253 0.36278075 -0.93905634 ... 0.50782037 0.40349624 -1.4074694 ] [-0.38221443 0.39539218 -1.3019844 ... 0.3039268 -1.1511027 1.6980376 ]]]]; ov_res: [[[[ 1.2690773 0.6170897 -0.28825095 ... -0.04898492 1.2310829 -0.5205912 ] [-0.18924749 2.75933 -0.40044516 ... -1.3039743 -0.34867933 0.01808576] [ 0.585341 -0.3997631 0.3498593 ... 0.00666471 -0.04412789 1.5502791 ] ... [ 0.95066184 0.33912817 0.427288 ... 1.1860093 -0.11374697 -0.05280311] [-0.7347901 -1.1630015 -0.2475502 ... 1.5178826 -0.95389456 -0.53403515] [-0.6212277 -2.2106564 1.0353184 ... -1.3874794 -0.69815236 -0.19726607]] [[ 2.804149 0.8328028 -0.5582256 ... 1.355097 -0.1736737 -0.13020413] [ 0.1431629 -0.30146566 -1.0816159 ... 1.4423461 1.1400332 -0.54818213] [-0.91304594 0.25751966 -0.3541964 ... -1.0326333 -0.6336467 -0.34394294] ... [ 0.03576234 1.1312729 1.9666471 ... 1.3699337 -1.0349025 -0.07151587] [-0.50157326 -0.49492472 1.2034382 ... 2.5816436 -0.3554313 -0.11867522] [-0.5012972 -0.8925343 0.22559938 ... -1.4657121 0.3937098 -0.38559586]] [[-1.0058335 0.9137059 0.42063546 ... 1.2229877 -0.47666058 1.2863592 ] [-0.8834272 -0.01377251 -0.09959468 ... -0.36635855 -0.75215805 1.6989714 ] [-0.9435488 1.9351068 -0.28398582 ... 0.2888135 -0.09986418 -0.49040398] ... [ 0.58640283 -1.2145786 0.75110185 ... 0.7040771 -0.1949747 0.01234079] [ 0.16972253 0.36278075 -0.93905634 ... 0.50782037 0.40349624 -1.4074694 ] [-0.38221443 0.39539218 -1.3019844 ... 0.3039268 -1.1511027 1.6980376 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 2. 1. -0. ... -0. 2. -0.] [-0. 3. -0. ... -1. -0. 1.] [ 1. -0. 1. ... 1. -0. 2.] ... [ 1. 1. 1. ... 2. -0. -0.] [-0. -1. -0. ... 2. -0. -0.] [-0. -2. 2. ... -1. -0. -0.]] [[ 3. 1. -0. ... 2. -0. -0.] [ 1. -0. -1. ... 2. 2. -0.] [-0. 1. -0. ... -1. -0. -0.] ... [ 1. 2. 2. ... 2. -1. -0.] [-0. -0. 2. ... 3. -0. -0.] [-0. -0. 1. ... -1. 1. -0.]] [[-1. 1. 1. ... 2. -0. 2.] [-0. -0. -0. ... -0. -0. 2.] [-0. 2. -0. ... 1. -0. -0.] ... [ 1. -1. 1. ... 1. -0. 1.] [ 1. 1. -0. ... 1. 1. -1.] [-0. 1. -1. ... 1. -1. 2.]]]]; ov_res: [[[[ 2. 1. -0. ... -0. 2. -0.] [-0. 3. -0. ... -1. -0. 1.] [ 1. -0. 1. ... 1. -0. 2.] ... [ 1. 1. 1. ... 2. -0. -0.] [-0. -1. -0. ... 2. -0. -0.] [-0. -2. 2. ... -1. -0. -0.]] [[ 3. 1. -0. ... 2. -0. -0.] [ 1. -0. -1. ... 2. 2. -0.] [-0. 1. -0. ... -1. -0. -0.] ... [ 1. 2. 2. ... 2. -1. -0.] [-0. -0. 2. ... 3. -0. -0.] [-0. -0. 1. ... -1. 1. -0.]] [[-1. 1. 1. ... 2. -0. 2.] [-0. -0. -0. ... -0. -0. 2.] [-0. 2. -0. ... 1. -0. -0.] ... [ 1. -1. 1. ... 1. -0. 1.] [ 1. 1. -0. ... 1. 1. -1.] [-0. 1. -1. ... 1. -1. 2.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_ceil.py::TestCeil::test_ceil[ ie_device:CPU - precision:FP32 - inplace:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_ceil.___torch_mangle_353.aten_ceil, %x.1 : Tensor): %2 : Tensor = aten::ceil_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_ceil.py:22:26 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %2) return (%3) fw_re: [[[[ 2. -1. 1. ... 1. -1. -0.] [-1. 1. -0. ... -0. -0. 1.] [ 1. -1. -0. ... 2. -2. 1.] ... [-1. -0. -1. ... -0. -0. 1.] [-0. -1. 2. ... 1. -2. 1.] [ 1. 1. 1. ... -0. -0. 1.]] [[ 2. -0. 1. ... -0. -0. 1.] [ 1. 1. -0. ... 1. 1. -0.] [-1. -0. 3. ... 1. 1. -0.] ... [-0. 1. 1. ... -0. -1. -1.] [ 1. 1. 1. ... 2. 2. -0.] [-0. 1. -1. ... -0. 2. -0.]] [[ 1. 1. -0. ... 1. -2. 1.] [ 1. 3. -0. ... 2. -1. 2.] [ 2. -0. -1. ... 2. 1. -0.] ... [-1. -0. -1. ... -1. -0. 1.] [-1. 2. 1. ... -0. 1. 2.] [-0. 1. 1. ... 1. 1. 1.]]]]; ov_res: [[[[ 2. -1. 1. ... 1. -1. -0.] [-1. 1. -0. ... -0. -0. 1.] [ 1. -1. -0. ... 2. -2. 1.] ... [-1. -0. -1. ... -0. -0. 1.] [-0. -1. 2. ... 1. -2. 1.] [ 1. 1. 1. ... -0. -0. 1.]] [[ 2. -0. 1. ... -0. -0. 1.] [ 1. 1. -0. ... 1. 1. -0.] [-1. -0. 3. ... 1. 1. -0.] ... [-0. 1. 1. ... -0. -1. -1.] [ 1. 1. 1. ... 2. 2. -0.] [-0. 1. -1. ... -0. 2. -0.]] [[ 1. 1. -0. ... 1. -2. 1.] [ 1. 3. -0. ... 2. -1. 2.] [ 2. -0. -1. ... 2. 1. -0.] ... [-1. -0. -1. ... -1. -0. 1.] [-1. 2. 1. ... -0. 1. 2.] [-0. 1. 1. ... 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 2. -1. 1. ... 1. -1. -0.] [-1. 1. -0. ... -0. -0. 1.] [ 1. -1. -0. ... 2. -2. 1.] ... [-1. -0. -1. ... -0. -0. 1.] [-0. -1. 2. ... 1. -2. 1.] [ 1. 1. 1. ... -0. -0. 1.]] [[ 2. -0. 1. ... -0. -0. 1.] [ 1. 1. -0. ... 1. 1. -0.] [-1. -0. 3. ... 1. 1. -0.] ... [-0. 1. 1. ... -0. -1. -1.] [ 1. 1. 1. ... 2. 2. -0.] [-0. 1. -1. ... -0. 2. -0.]] [[ 1. 1. -0. ... 1. -2. 1.] [ 1. 3. -0. ... 2. -1. 2.] [ 2. -0. -1. ... 2. 1. -0.] ... [-1. -0. -1. ... -1. -0. 1.] [-1. 2. 1. ... -0. 1. 2.] [-0. 1. 1. ... 1. 1. 1.]]]]; ov_res: [[[[ 2. -1. 1. ... 1. -1. -0.] [-1. 1. -0. ... -0. -0. 1.] [ 1. -1. -0. ... 2. -2. 1.] ... [-1. -0. -1. ... -0. -0. 1.] [-0. -1. 2. ... 1. -2. 1.] [ 1. 1. 1. ... -0. -0. 1.]] [[ 2. -0. 1. ... -0. -0. 1.] [ 1. 1. -0. ... 1. 1. -0.] [-1. -0. 3. ... 1. 1. -0.] ... [-0. 1. 1. ... -0. -1. -1.] [ 1. 1. 1. ... 2. 2. -0.] [-0. 1. -1. ... -0. 2. -0.]] [[ 1. 1. -0. ... 1. -2. 1.] [ 1. 3. -0. ... 2. -1. 2.] [ 2. -0. -1. ... 2. 1. -0.] ... [-1. -0. -1. ... -1. -0. 1.] [-1. 2. 1. ... -0. 1. 2.] [-0. 1. 1. ... 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:0.0 - maximum:1.0 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_354.aten_clamp, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={1}]() %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={0}]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[0.01042979 0.505914 0. ... 0. 0.14174134 0. ] [0. 0.5699378 0.35465938 ... 0. 0. 0. ] [0.0563492 0.9416447 0. ... 1. 0.01639015 0.9143563 ] ... [0. 0. 0. ... 0.12708366 0.6701346 1. ] [0.35773596 0. 0. ... 1. 1. 0. ] [0. 0.03618183 0. ... 0. 0. 1. ]] [[0. 0. 0.5923144 ... 0. 1. 0. ] [0.42113543 1. 0.8317414 ... 0. 0.636389 0. ] [0. 0. 0. ... 0. 0. 1. ] ... [0. 0. 0.31205663 ... 0.5644957 0. 0.50244844] [0.12894574 0. 0.64107054 ... 0.43851426 0.62412095 0. ] [1. 0.69401217 1. ... 1. 0. 1. ]] [[0. 0. 0.14424567 ... 0.538075 0.22893107 1. ] [0. 1. 0. ... 0. 0. 0.49827018] [1. 0.37205058 0.1885987 ... 0. 0.51518625 0. ] ... [0. 0.2443639 0. ... 0.30422363 0. 1. ] [0.6611302 0. 0. ... 0. 0.14628202 0. ] [0. 0. 1. ... 0. 0.3341026 0. ]]]]; ov_res: [[[[0.01042979 0.505914 0. ... 0. 0.14174134 0. ] [0. 0.5699378 0.35465938 ... 0. 0. 0. ] [0.0563492 0.9416447 0. ... 1. 0.01639015 0.9143563 ] ... [0. 0. 0. ... 0.12708366 0.6701346 1. ] [0.35773596 0. 0. ... 1. 1. 0. ] [0. 0.03618183 0. ... 0. 0. 1. ]] [[0. 0. 0.5923144 ... 0. 1. 0. ] [0.42113543 1. 0.8317414 ... 0. 0.636389 0. ] [0. 0. 0. ... 0. 0. 1. ] ... [0. 0. 0.31205663 ... 0.5644957 0. 0.50244844] [0.12894574 0. 0.64107054 ... 0.43851426 0.62412095 0. ] [1. 0.69401217 1. ... 1. 0. 1. ]] [[0. 0. 0.14424567 ... 0.538075 0.22893107 1. ] [0. 1. 0. ... 0. 0. 0.49827018] [1. 0.37205058 0.1885987 ... 0. 0.51518625 0. ] ... [0. 0.2443639 0. ... 0.30422363 0. 1. ] [0.6611302 0. 0. ... 0. 0.14628202 0. ] [0. 0. 1. ... 0. 0.3341026 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:-0.5 - maximum:1.5 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_356.aten_clamp, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={1.5}]() %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={-0.5}]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[-0.5 -0.5 -0.5 ... -0.5 -0.5 -0.3041856 ] [ 0.3776064 1.5 -0.5 ... 0.22787516 -0.4789925 1.1533448 ] [-0.5 1.2647655 0.19136555 ... -0.5 1.5 -0.06717673] ... [ 0.4628206 1.2725197 -0.10198466 ... -0.5 1.0741478 0.33497837] [-0.5 0.9372151 0.6701935 ... 0.38398746 0.16573632 -0.34152246] [-0.5 0.25874183 0.3800829 ... -0.13615112 1.5 0.15139951]] [[ 1.5 1.0170742 -0.40788665 ... 0.29615167 1.2439009 1.1335534 ] [-0.24638651 -0.5 0.7150232 ... 0.42406854 -0.5 0.05812654] [-0.5 1.4360175 0.27724162 ... -0.5 -0.5 -0.5 ] ... [ 1.5 -0.5 0.22920036 ... 0.30339083 -0.24840397 0.07778992] [ 0.4663121 0.77242696 -0.5 ... 1.0323672 -0.5 -0.5 ] [-0.5 -0.5 1.2979656 ... -0.5 1.5 -0.06919559]] [[ 0.51442915 0.15784794 -0.1644071 ... 0.8435306 -0.5 0.5754307 ] [-0.5 0.2738395 -0.5 ... -0.5 -0.5 0.24960564] [-0.3783221 -0.5 -0.5 ... -0.5 -0.5 1.5 ] ... [-0.5 0.9565316 -0.06165987 ... 0.3490173 -0.5 -0.5 ] [-0.46332756 -0.5 -0.09055247 ... -0.5 0.5523892 -0.5 ] [-0.36924165 -0.5 -0.5 ... 0.07242224 -0.11398429 -0.5 ]]]]; ov_res: [[[[-0.5 -0.5 -0.5 ... -0.5 -0.5 -0.3041856 ] [ 0.3776064 1.5 -0.5 ... 0.22787516 -0.4789925 1.1533448 ] [-0.5 1.2647655 0.19136555 ... -0.5 1.5 -0.06717673] ... [ 0.4628206 1.2725197 -0.10198466 ... -0.5 1.0741478 0.33497837] [-0.5 0.9372151 0.6701935 ... 0.38398746 0.16573632 -0.34152246] [-0.5 0.25874183 0.3800829 ... -0.13615112 1.5 0.15139951]] [[ 1.5 1.0170742 -0.40788665 ... 0.29615167 1.2439009 1.1335534 ] [-0.24638651 -0.5 0.7150232 ... 0.42406854 -0.5 0.05812654] [-0.5 1.4360175 0.27724162 ... -0.5 -0.5 -0.5 ] ... [ 1.5 -0.5 0.22920036 ... 0.30339083 -0.24840397 0.07778992] [ 0.4663121 0.77242696 -0.5 ... 1.0323672 -0.5 -0.5 ] [-0.5 -0.5 1.2979656 ... -0.5 1.5 -0.06919559]] [[ 0.51442915 0.15784794 -0.1644071 ... 0.8435306 -0.5 0.5754307 ] [-0.5 0.2738395 -0.5 ... -0.5 -0.5 0.24960564] [-0.3783221 -0.5 -0.5 ... -0.5 -0.5 1.5 ] ... [-0.5 0.9565316 -0.06165987 ... 0.3490173 -0.5 -0.5 ] [-0.46332756 -0.5 -0.09055247 ... -0.5 0.5523892 -0.5 ] [-0.36924165 -0.5 -0.5 ... 0.07242224 -0.11398429 -0.5 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:None - maximum:10.0 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_358.aten_clamp, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={10}]() %self.min : NoneType = prim::Constant() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[-1.73996341e+00 8.97132635e-01 -1.74378967e+00 ... -5.09714186e-01 7.47745872e-01 1.06037056e+00] [-1.52714813e+00 -1.88490786e-02 -8.07407200e-01 ... 1.39636075e+00 6.94841743e-02 -1.56125307e-01] [ 1.37989998e+00 6.83359385e-01 6.34869456e-01 ... 5.96598446e-01 -1.46411216e+00 -5.80770783e-02] ... [-8.83093655e-01 -1.48094296e-01 -3.82211775e-01 ... -8.02856684e-01 -9.73381996e-01 -4.20664757e-01] [-1.25338495e+00 -1.88912129e+00 -2.84834313e+00 ... 1.49943724e-01 -7.21094549e-01 8.84404540e-01] [-7.92483762e-02 6.10203803e-01 1.16720110e-01 ... -1.18537247e+00 2.61925384e-02 -3.14950079e-01]] [[-1.19476068e+00 -1.78953633e-01 8.16659391e-01 ... -3.37975286e-02 -1.41083646e+00 4.61330950e-01] [ 7.41691828e-01 -7.99803376e-01 8.39685500e-01 ... 1.17980826e+00 -1.85706556e-01 3.32338667e+00] [-2.53158092e+00 1.83857870e+00 6.40882477e-02 ... 8.38362705e-03 -8.05548608e-01 1.41682804e+00] ... [-1.06580341e+00 9.59868670e-01 3.62884104e-01 ... -1.07418573e+00 -6.19379461e-01 1.15935528e+00] [-6.48338318e-01 -1.15867686e+00 3.63806307e-01 ... 1.21582186e+00 4.92328480e-02 4.46215063e-01] [-1.10344565e+00 -1.12234867e+00 1.01787376e+00 ... -2.48405803e-02 5.43229401e-01 -1.45681664e-01]] [[ 5.56451082e-01 6.72243536e-01 -5.79775691e-01 ... 8.91887903e-01 1.01859701e+00 6.41092837e-01] [ 3.43673795e-01 -2.36803129e-01 4.93949354e-01 ... 7.04969347e-01 -5.82332134e-01 -5.54754555e-01] [-1.45076871e+00 8.92355517e-02 1.80674642e-01 ... 5.79030693e-01 -1.04058355e-01 3.61359149e-01] ... [-5.06062508e-02 -2.64754117e-01 1.43432522e+00 ... -4.32333015e-02 5.60635388e-01 2.12226701e+00] [ 8.54722738e-01 -1.59726155e+00 -2.07720235e-01 ... -3.76822829e-01 1.09652889e+00 6.13062680e-01] [ 9.44711719e-05 -9.11019504e-01 2.32933298e-01 ... -9.30570662e-01 -2.23342463e-01 1.88471520e+00]]]]; ov_res: [[[[-1.73996341e+00 8.97132635e-01 -1.74378967e+00 ... -5.09714186e-01 7.47745872e-01 1.06037056e+00] [-1.52714813e+00 -1.88490786e-02 -8.07407200e-01 ... 1.39636075e+00 6.94841743e-02 -1.56125307e-01] [ 1.37989998e+00 6.83359385e-01 6.34869456e-01 ... 5.96598446e-01 -1.46411216e+00 -5.80770783e-02] ... [-8.83093655e-01 -1.48094296e-01 -3.82211775e-01 ... -8.02856684e-01 -9.73381996e-01 -4.20664757e-01] [-1.25338495e+00 -1.88912129e+00 -2.84834313e+00 ... 1.49943724e-01 -7.21094549e-01 8.84404540e-01] [-7.92483762e-02 6.10203803e-01 1.16720110e-01 ... -1.18537247e+00 2.61925384e-02 -3.14950079e-01]] [[-1.19476068e+00 -1.78953633e-01 8.16659391e-01 ... -3.37975286e-02 -1.41083646e+00 4.61330950e-01] [ 7.41691828e-01 -7.99803376e-01 8.39685500e-01 ... 1.17980826e+00 -1.85706556e-01 3.32338667e+00] [-2.53158092e+00 1.83857870e+00 6.40882477e-02 ... 8.38362705e-03 -8.05548608e-01 1.41682804e+00] ... [-1.06580341e+00 9.59868670e-01 3.62884104e-01 ... -1.07418573e+00 -6.19379461e-01 1.15935528e+00] [-6.48338318e-01 -1.15867686e+00 3.63806307e-01 ... 1.21582186e+00 4.92328480e-02 4.46215063e-01] [-1.10344565e+00 -1.12234867e+00 1.01787376e+00 ... -2.48405803e-02 5.43229401e-01 -1.45681664e-01]] [[ 5.56451082e-01 6.72243536e-01 -5.79775691e-01 ... 8.91887903e-01 1.01859701e+00 6.41092837e-01] [ 3.43673795e-01 -2.36803129e-01 4.93949354e-01 ... 7.04969347e-01 -5.82332134e-01 -5.54754555e-01] [-1.45076871e+00 8.92355517e-02 1.80674642e-01 ... 5.79030693e-01 -1.04058355e-01 3.61359149e-01] ... [-5.06062508e-02 -2.64754117e-01 1.43432522e+00 ... -4.32333015e-02 5.60635388e-01 2.12226701e+00] [ 8.54722738e-01 -1.59726155e+00 -2.07720235e-01 ... -3.76822829e-01 1.09652889e+00 6.13062680e-01] [ 9.44711719e-05 -9.11019504e-01 2.32933298e-01 ... -9.30570662e-01 -2.23342463e-01 1.88471520e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:None - maximum:-10.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_360.aten_clamp, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={-10}]() %self.min : NoneType = prim::Constant() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]]]]; ov_res: [[[[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:10.0 - maximum:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_362.aten_clamp, %x.1 : Tensor): %self.max : NoneType = prim::Constant() %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={10}]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]]]]; ov_res: [[[[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:-10.0 - maximum:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_364.aten_clamp, %x.1 : Tensor): %self.max : NoneType = prim::Constant() %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={-10}]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[ 0.6934987 -2.3372803 -1.2062342 ... -0.07690034 1.9190893 0.41111374] [ 0.8954483 1.7606862 -0.34492078 ... 0.658538 0.1627962 -0.23291278] [-1.4534713 -0.9666978 1.0089257 ... 0.4052586 0.39692596 0.26020455] ... [-1.3255848 0.40046814 -2.2982538 ... 0.5355526 -0.09787536 0.37704197] [-2.1453714 0.46203208 0.8440214 ... -0.9429502 -2.2400455 -0.27701762] [ 0.653749 -0.5492926 1.3735955 ... 0.27689964 0.22066312 -0.3330563 ]] [[ 0.10145102 0.48494306 0.9356184 ... 1.3262261 -0.5288394 1.4318192 ] [ 1.0482303 1.9702791 -0.510769 ... -0.24000195 2.1645403 0.33564112] [-0.6381277 1.8244749 -0.16744372 ... -1.2384441 0.99500585 0.76536703] ... [-0.6993042 1.1099676 -0.2723365 ... -0.1086564 -1.7217797 -0.3783229 ] [-0.75956094 -0.15774447 -0.2642359 ... -0.795851 -0.7679694 0.8583678 ] [ 1.5552081 1.8374933 1.0023887 ... 1.5539944 -0.5679083 0.6758909 ]] [[ 1.148359 -0.23163854 -1.0290743 ... 0.2583467 0.48348913 -0.5453676 ] [-0.29680443 -0.26604488 1.4495623 ... 0.255099 -0.22470206 0.29632208] [ 0.5915338 -1.1914958 0.9596672 ... -0.14056788 -0.6280183 -1.3405517 ] ... [ 1.7152183 0.4681505 -0.2024182 ... 1.0624855 -1.0036204 1.0796741 ] [-0.6666056 1.68074 -0.3245945 ... -0.226774 -1.110169 -0.53884274] [-0.16936757 1.4414543 0.5535254 ... -0.73534626 0.22554216 0.8901126 ]]]]; ov_res: [[[[ 0.6934987 -2.3372803 -1.2062342 ... -0.07690034 1.9190893 0.41111374] [ 0.8954483 1.7606862 -0.34492078 ... 0.658538 0.1627962 -0.23291278] [-1.4534713 -0.9666978 1.0089257 ... 0.4052586 0.39692596 0.26020455] ... [-1.3255848 0.40046814 -2.2982538 ... 0.5355526 -0.09787536 0.37704197] [-2.1453714 0.46203208 0.8440214 ... -0.9429502 -2.2400455 -0.27701762] [ 0.653749 -0.5492926 1.3735955 ... 0.27689964 0.22066312 -0.3330563 ]] [[ 0.10145102 0.48494306 0.9356184 ... 1.3262261 -0.5288394 1.4318192 ] [ 1.0482303 1.9702791 -0.510769 ... -0.24000195 2.1645403 0.33564112] [-0.6381277 1.8244749 -0.16744372 ... -1.2384441 0.99500585 0.76536703] ... [-0.6993042 1.1099676 -0.2723365 ... -0.1086564 -1.7217797 -0.3783229 ] [-0.75956094 -0.15774447 -0.2642359 ... -0.795851 -0.7679694 0.8583678 ] [ 1.5552081 1.8374933 1.0023887 ... 1.5539944 -0.5679083 0.6758909 ]] [[ 1.148359 -0.23163854 -1.0290743 ... 0.2583467 0.48348913 -0.5453676 ] [-0.29680443 -0.26604488 1.4495623 ... 0.255099 -0.22470206 0.29632208] [ 0.5915338 -1.1914958 0.9596672 ... -0.14056788 -0.6280183 -1.3405517 ] ... [ 1.7152183 0.4681505 -0.2024182 ... 1.0624855 -1.0036204 1.0796741 ] [-0.6666056 1.68074 -0.3245945 ... -0.226774 -1.110169 -0.53884274] [-0.16936757 1.4414543 0.5535254 ... -0.73534626 0.22554216 0.8901126 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:True - minimum:100 - maximum:200 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_366.aten_clamp, %x.1 : Tensor): %self.max : Long(requires_grad=0, device=cpu) = prim::Constant[value={200}]() %self.min : Long(requires_grad=0, device=cpu) = prim::Constant[value={100}]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]]]]; ov_res: [[[[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:0.0 - maximum:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_368.aten_clamp, %x.1 : Tensor): %self.max : float = prim::Constant[value=1.]() %self.min : float = prim::Constant[value=0.]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[0. 0. 0. ... 1. 0. 0. ] [0. 0. 1. ... 0. 1. 0. ] [0. 0. 0. ... 0. 1. 0.9808778 ] ... [0. 0.631893 0. ... 0.2572852 0. 0. ] [0. 0.24199346 0. ... 0. 0.755894 0.11898739] [0.18231943 1. 0. ... 0. 0.91352534 0. ]] [[0.6263874 0.04845022 0.234147 ... 0. 1. 0.3311636 ] [0.17691481 1. 0. ... 0. 1. 0.11877368] [0. 0.25041804 0. ... 1. 0. 1. ] ... [0.07555532 1. 0. ... 0. 0.55218655 0. ] [0.01915799 0. 0.66463715 ... 0. 0. 0.8798359 ] [0. 1. 0. ... 0. 0.7659764 1. ]] [[0.25736016 0. 0. ... 0.35755962 0. 0. ] [0. 0. 0.91139674 ... 0. 0. 0. ] [0. 0. 0. ... 0. 0.660299 0. ] ... [0.49496794 0. 0. ... 0. 0.9270155 0.00402552] [0.757636 0.3232469 0.3856382 ... 0.2058015 0.46060422 0.2491827 ] [0.13513981 0. 0. ... 1. 1. 0. ]]]]; ov_res: [[[[0. 0. 0. ... 1. 0. 0. ] [0. 0. 1. ... 0. 1. 0. ] [0. 0. 0. ... 0. 1. 0.9808778 ] ... [0. 0.631893 0. ... 0.2572852 0. 0. ] [0. 0.24199346 0. ... 0. 0.755894 0.11898739] [0.18231943 1. 0. ... 0. 0.91352534 0. ]] [[0.6263874 0.04845022 0.234147 ... 0. 1. 0.3311636 ] [0.17691481 1. 0. ... 0. 1. 0.11877368] [0. 0.25041804 0. ... 1. 0. 1. ] ... [0.07555532 1. 0. ... 0. 0.55218655 0. ] [0.01915799 0. 0.66463715 ... 0. 0. 0.8798359 ] [0. 1. 0. ... 0. 0.7659764 1. ]] [[0.25736016 0. 0. ... 0.35755962 0. 0. ] [0. 0. 0.91139674 ... 0. 0. 0. ] [0. 0. 0. ... 0. 0.660299 0. ] ... [0.49496794 0. 0. ... 0. 0.9270155 0.00402552] [0.757636 0.3232469 0.3856382 ... 0.2058015 0.46060422 0.2491827 ] [0.13513981 0. 0. ... 1. 1. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:-0.5 - maximum:1.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_370.aten_clamp, %x.1 : Tensor): %self.max : float = prim::Constant[value=1.5]() %self.min : float = prim::Constant[value=-0.5]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[ 0.24951105 0.23746872 0.7625523 ... -0.46630535 0.1774051 -0.5 ] [-0.5 0.37852743 0.063776 ... 0.88490754 1.0179224 0.17748743] [ 1.2372175 -0.5 1.5 ... -0.5 0.58095133 0.182295 ] ... [ 0.16167332 -0.5 -0.17803253 ... 0.1719044 0.74486077 1.2378155 ] [-0.00910346 0.9885844 -0.49924397 ... 1.033337 1.0842164 -0.18505085] [ 0.18664426 1.5 -0.1666298 ... 0.06685644 0.06587908 0.15873389]] [[ 1.2477725 0.8128835 -0.5 ... 0.164771 -0.5 -0.5 ] [-0.2475939 -0.5 -0.13642232 ... 0.1879506 -0.13979682 0.00321098] [-0.17044863 -0.5 1.3939768 ... 1.5 0.6605868 -0.12467769] ... [ 0.29546452 1.1790972 0.24529044 ... 1.5 -0.5 0.8939036 ] [-0.5 -0.5 0.24782515 ... -0.5 1.5 -0.5 ] [-0.5 0.467425 -0.49905026 ... 0.46699968 -0.5 -0.4862841 ]] [[ 0.77031744 -0.5 -0.5 ... 0.15831913 -0.10728616 -0.11779305] [ 0.11572379 0.10801491 1.5 ... -0.5 -0.5 1.2247442 ] [ 0.14683482 -0.20751572 -0.5 ... -0.5 -0.5 -0.430188 ] ... [ 0.28242052 1.5 -0.5 ... -0.5 -0.5 -0.5 ] [-0.5 -0.5 -0.01740164 ... 0.5848001 0.03710768 0.08325887] [-0.5 0.6677298 -0.32298303 ... 0.3500785 1.2657403 0.4137359 ]]]]; ov_res: [[[[ 0.24951105 0.23746872 0.7625523 ... -0.46630535 0.1774051 -0.5 ] [-0.5 0.37852743 0.063776 ... 0.88490754 1.0179224 0.17748743] [ 1.2372175 -0.5 1.5 ... -0.5 0.58095133 0.182295 ] ... [ 0.16167332 -0.5 -0.17803253 ... 0.1719044 0.74486077 1.2378155 ] [-0.00910346 0.9885844 -0.49924397 ... 1.033337 1.0842164 -0.18505085] [ 0.18664426 1.5 -0.1666298 ... 0.06685644 0.06587908 0.15873389]] [[ 1.2477725 0.8128835 -0.5 ... 0.164771 -0.5 -0.5 ] [-0.2475939 -0.5 -0.13642232 ... 0.1879506 -0.13979682 0.00321098] [-0.17044863 -0.5 1.3939768 ... 1.5 0.6605868 -0.12467769] ... [ 0.29546452 1.1790972 0.24529044 ... 1.5 -0.5 0.8939036 ] [-0.5 -0.5 0.24782515 ... -0.5 1.5 -0.5 ] [-0.5 0.467425 -0.49905026 ... 0.46699968 -0.5 -0.4862841 ]] [[ 0.77031744 -0.5 -0.5 ... 0.15831913 -0.10728616 -0.11779305] [ 0.11572379 0.10801491 1.5 ... -0.5 -0.5 1.2247442 ] [ 0.14683482 -0.20751572 -0.5 ... -0.5 -0.5 -0.430188 ] ... [ 0.28242052 1.5 -0.5 ... -0.5 -0.5 -0.5 ] [-0.5 -0.5 -0.01740164 ... 0.5848001 0.03710768 0.08325887] [-0.5 0.6677298 -0.32298303 ... 0.3500785 1.2657403 0.4137359 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:None - maximum:10.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_372.aten_clamp, %x.1 : Tensor): %self.max : float = prim::Constant[value=10.]() %self.min : NoneType = prim::Constant() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[-0.29217732 -0.8276597 -0.6953213 ... -0.6491128 1.3566443 -0.7681098 ] [-0.36547318 -0.5091449 -0.1751801 ... -0.30824468 1.7497257 0.34176302] [ 1.1188661 -1.5209202 -1.1516783 ... 1.0536251 -0.25043505 0.14775975] ... [-1.2516425 0.34925583 -1.6680195 ... -1.6112579 0.22763641 0.57057256] [ 0.02852183 -0.37894547 1.1842948 ... -1.4929607 -0.59056956 1.1578094 ] [-1.3678331 -0.3872101 -0.03253042 ... -0.779672 0.04567566 0.20743872]] [[-0.672115 -0.07629071 -2.2539546 ... 1.4139012 0.06862113 -0.38568097] [ 0.07826194 0.66461945 0.8382318 ... -1.448687 1.4202143 -0.21622598] [ 0.4099103 0.29523504 -1.1095191 ... 1.255609 0.61672646 -1.0086366 ] ... [ 0.07226022 -0.02901642 0.3374081 ... -1.6803128 1.0080944 0.68667 ] [-0.22018266 -2.347409 1.9457829 ... -0.31421462 -0.9522087 -1.6320914 ] [ 0.12663132 0.75553954 -0.14621364 ... 0.9145173 0.29128438 -1.6456773 ]] [[-1.2046472 -0.02979499 0.56289864 ... 1.2752798 -0.9419264 -0.47471637] [-0.7003195 -0.10488365 0.40958703 ... 0.07911373 -1.6593425 -1.5717257 ] [ 0.9695952 0.9152592 0.5259669 ... -0.09181377 -0.04238385 0.23136276] ... [ 0.97141314 1.0402565 0.14575844 ... 0.43715778 -0.71150124 -0.90710497] [-0.78117126 0.71268135 0.79874134 ... 0.7017164 0.43770635 0.9756937 ] [ 2.0852585 1.5568733 -0.53190935 ... -0.7845907 -0.0025674 -1.8945771 ]]]]; ov_res: [[[[-0.29217732 -0.8276597 -0.6953213 ... -0.6491128 1.3566443 -0.7681098 ] [-0.36547318 -0.5091449 -0.1751801 ... -0.30824468 1.7497257 0.34176302] [ 1.1188661 -1.5209202 -1.1516783 ... 1.0536251 -0.25043505 0.14775975] ... [-1.2516425 0.34925583 -1.6680195 ... -1.6112579 0.22763641 0.57057256] [ 0.02852183 -0.37894547 1.1842948 ... -1.4929607 -0.59056956 1.1578094 ] [-1.3678331 -0.3872101 -0.03253042 ... -0.779672 0.04567566 0.20743872]] [[-0.672115 -0.07629071 -2.2539546 ... 1.4139012 0.06862113 -0.38568097] [ 0.07826194 0.66461945 0.8382318 ... -1.448687 1.4202143 -0.21622598] [ 0.4099103 0.29523504 -1.1095191 ... 1.255609 0.61672646 -1.0086366 ] ... [ 0.07226022 -0.02901642 0.3374081 ... -1.6803128 1.0080944 0.68667 ] [-0.22018266 -2.347409 1.9457829 ... -0.31421462 -0.9522087 -1.6320914 ] [ 0.12663132 0.75553954 -0.14621364 ... 0.9145173 0.29128438 -1.6456773 ]] [[-1.2046472 -0.02979499 0.56289864 ... 1.2752798 -0.9419264 -0.47471637] [-0.7003195 -0.10488365 0.40958703 ... 0.07911373 -1.6593425 -1.5717257 ] [ 0.9695952 0.9152592 0.5259669 ... -0.09181377 -0.04238385 0.23136276] ... [ 0.97141314 1.0402565 0.14575844 ... 0.43715778 -0.71150124 -0.90710497] [-0.78117126 0.71268135 0.79874134 ... 0.7017164 0.43770635 0.9756937 ] [ 2.0852585 1.5568733 -0.53190935 ... -0.7845907 -0.0025674 -1.8945771 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:None - maximum:-10.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_374.aten_clamp, %x.1 : Tensor): %self.max : float = prim::Constant[value=-10.]() %self.min : NoneType = prim::Constant() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) chema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Excfw_re: [[[[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]]]]; ov_res: [[[[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]] [[-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] ... [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.] [-10. -10. -10. ... -10. -10. -10.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:10.0 - maximum:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_376.aten_clamp, %x.1 : Tensor): %self.max : NoneType = prim::Constant() %self.min : float = prim::Constant[value=10.]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]]]]; ov_res: [[[[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]] [[10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] ... [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.] [10. 10. 10. ... 10. 10. 10.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:-10.0 - maximum:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_378.aten_clamp, %x.1 : Tensor): %self.max : NoneType = prim::Constant() %self.min : float = prim::Constant[value=-10.]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[-0.3920762 -0.62196773 -0.52539235 ... -0.31492952 0.3877526 0.04922557] [-0.48091173 0.3671077 1.7885765 ... -0.5913851 -1.3018435 0.7118846 ] [ 0.88851154 -0.08397198 -0.5373235 ... -0.607851 0.42384434 0.48103908] ... [-0.48502752 -0.53821087 1.1163578 ... 0.7974969 -0.26438034 0.23329219] [-0.14224775 -0.26345995 0.448379 ... 0.34628943 0.31207016 0.7708659 ] [ 0.2277117 1.8641828 -1.7241133 ... 0.7607711 1.3137639 0.37886715]] [[ 0.85607445 1.2973496 -1.0765389 ... -1.1485909 3.262737 -0.23388478] [ 0.10172811 0.09070783 -1.2638693 ... 0.14063416 0.93660295 0.3925541 ] [ 0.5153967 -1.6505752 1.1197144 ... -1.0109519 -0.5621177 -0.3139762 ] ... [ 0.38935965 0.50708145 -0.9255916 ... -0.36747062 -1.9751621 0.37935835] [ 0.7648331 -1.7949886 -0.46497995 ... 1.0848904 -1.2594315 0.27623713] [-0.5999262 -0.08541398 0.6534064 ... 0.5761036 0.4693331 0.36222738]] [[ 0.47497177 0.5666656 0.6383052 ... -0.27617705 0.6591644 -0.44285625] [-0.60283947 0.73853236 1.255591 ... -0.8315863 -0.18046674 0.49177918] [-0.2350577 0.36766034 -0.8714182 ... 0.96220624 0.7382939 -0.29037586] ... [-0.19090834 -0.8470843 0.3119486 ... 0.08941232 -0.9161326 0.99510854] [ 1.7731361 -1.007954 -0.37525058 ... 1.5484846 0.73584175 -0.62064606] [ 0.99037427 1.7470994 -1.3920045 ... -1.6528425 -0.0482593 -1.6119183 ]]]]; ov_res: [[[[-0.3920762 -0.62196773 -0.52539235 ... -0.31492952 0.3877526 0.04922557] [-0.48091173 0.3671077 1.7885765 ... -0.5913851 -1.3018435 0.7118846 ] [ 0.88851154 -0.08397198 -0.5373235 ... -0.607851 0.42384434 0.48103908] ... [-0.48502752 -0.53821087 1.1163578 ... 0.7974969 -0.26438034 0.23329219] [-0.14224775 -0.26345995 0.448379 ... 0.34628943 0.31207016 0.7708659 ] [ 0.2277117 1.8641828 -1.7241133 ... 0.7607711 1.3137639 0.37886715]] [[ 0.85607445 1.2973496 -1.0765389 ... -1.1485909 3.262737 -0.23388478] [ 0.10172811 0.09070783 -1.2638693 ... 0.14063416 0.93660295 0.3925541 ] [ 0.5153967 -1.6505752 1.1197144 ... -1.0109519 -0.5621177 -0.3139762 ] ... [ 0.38935965 0.50708145 -0.9255916 ... -0.36747062 -1.9751621 0.37935835] [ 0.7648331 -1.7949886 -0.46497995 ... 1.0848904 -1.2594315 0.27623713] [-0.5999262 -0.08541398 0.6534064 ... 0.5761036 0.4693331 0.36222738]] [[ 0.47497177 0.5666656 0.6383052 ... -0.27617705 0.6591644 -0.44285625] [-0.60283947 0.73853236 1.255591 ... -0.8315863 -0.18046674 0.49177918] [-0.2350577 0.36766034 -0.8714182 ... 0.96220624 0.7382939 -0.29037586] ... [-0.19090834 -0.8470843 0.3119486 ... 0.08941232 -0.9161326 0.99510854] [ 1.7731361 -1.007954 -0.37525058 ... 1.5484846 0.73584175 -0.62064606] [ 0.99037427 1.7470994 -1.3920045 ... -1.6528425 -0.0482593 -1.6119183 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClamp::test_clamp[ ie_device:CPU - precision:FP32 - as_tensors:False - minimum:100 - maximum:200 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_380.aten_clamp, %x.1 : Tensor): %self.max : int = prim::Constant[value=200]() %self.min : int = prim::Constant[value=100]() %4 : Tensor = aten::clamp(%x.1, %self.min, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:27:23 return (%4) fw_re: [[[[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]]]]; ov_res: [[[[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]] [[100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] ... [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.] [100. 100. 100. ... 100. 100. 100.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:True - minimum:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_383.aten_clamp_min, %x.1 : Tensor): %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={0}]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[0. 0.8101866 1.6455017 ... 0.5119735 0.7350155 0.36836177] [0. 0.74563193 1.0088687 ... 0.1332103 0. 0.927997 ] [0. 0. 0.01584637 ... 1.26083 0. 2.009859 ] ... [0. 1.589888 0.202993 ... 1.3315891 0. 1.9975455 ] [0. 0.43142363 0. ... 0.43235293 0.27535984 0.36457422] [0. 0.66749567 0.59363514 ... 0. 0. 0.3951953 ]] [[0.5869697 2.0399222 0. ... 0.6116265 1.3321385 1.3869331 ] [0.8798508 1.0216578 0. ... 0.6007739 1.1561568 0. ] [0. 0.39237422 3.334534 ... 0. 0. 0. ] ... [1.1503274 0. 1.5219141 ... 0.35066476 0. 0. ] [0. 0.56338716 0.1697398 ... 0. 0.03926974 0.3943366 ] [0.32843816 0.36747053 0. ... 0. 0.287214 0.46803418]] [[0. 0. 0. ... 0.5056607 1.0215373 1.1916617 ] [0.5279885 0.19515273 0.6644836 ... 1.2332833 0. 0. ] [0. 0. 0. ... 0. 0. 0.2895371 ] ... [0. 0. 0. ... 0. 0. 1.1777519 ] [0.7879362 0. 0. ... 0.45335457 0.64989775 1.9872121 ] [0.3201575 0. 0.23136654 ... 0. 1.21118 1.1160853 ]]]]; ov_res: [[[[0. 0.8101866 1.6455017 ... 0.5119735 0.7350155 0.36836177] [0. 0.74563193 1.0088687 ... 0.1332103 0. 0.927997 ] [0. 0. 0.01584637 ... 1.26083 0. 2.009859 ] ... [0. 1.589888 0.202993 ... 1.3315891 0. 1.9975455 ] [0. 0.43142363 0. ... 0.43235293 0.27535984 0.36457422] [0. 0.66749567 0.59363514 ... 0. 0. 0.3951953 ]] [[0.5869697 2.0399222 0. ... 0.6116265 1.3321385 1.3869331 ] [0.8798508 1.0216578 0. ... 0.6007739 1.1561568 0. ] [0. 0.39237422 3.334534 ... 0. 0. 0. ] ... [1.1503274 0. 1.5219141 ... 0.35066476 0. 0. ] [0. 0.56338716 0.1697398 ... 0. 0.03926974 0.3943366 ] [0.32843816 0.36747053 0. ... 0. 0.287214 0.46803418]] [[0. 0. 0. ... 0.5056607 1.0215373 1.1916617 ] [0.5279885 0.19515273 0.6644836 ... 1.2332833 0. 0. ] [0. 0. 0. ... 0. 0. 0.2895371 ] ... [0. 0. 0. ... 0. 0. 1.1777519 ] [0.7879362 0. 0. ... 0.45335457 0.64989775 1.9872121 ] [0.3201575 0. 0.23136654 ... 0. 1.21118 1.1160853 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:True - minimum:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_385.aten_clamp_min, %x.1 : Tensor): %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={1}]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[1. 1. 2.1887438 ... 1. 1. 1. ] [1. 1. 1.6675946 ... 1.3779863 1. 1. ] [1. 1. 1. ... 1.0390078 1. 1. ] ... [1.5451713 1. 1. ... 1. 1. 1. ] [1. 1. 1.4362417 ... 1. 1. 1.6639086] [1.1126695 1. 1. ... 1. 1. 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 2.28861 1.5112942] [1.0498741 1. 1. ... 1.4686002 1. 1. ] ... [1. 1.0784137 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1. ] [1.3804028 1. 1. ... 1. 1. 2.0049603]] [[1. 1. 1. ... 1.4410217 1. 1. ] [1. 1. 1. ... 1. 1.2632182 1. ] [1. 1. 1. ... 1. 2.043381 1. ] ... [1.2837253 1. 1. ... 1. 1. 1.9634188] [1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1.1417798]]]]; ov_res: [[[[1. 1. 2.1887438 ... 1. 1. 1. ] [1. 1. 1.6675946 ... 1.3779863 1. 1. ] [1. 1. 1. ... 1.0390078 1. 1. ] ... [1.5451713 1. 1. ... 1. 1. 1. ] [1. 1. 1.4362417 ... 1. 1. 1.6639086] [1.1126695 1. 1. ... 1. 1. 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 2.28861 1.5112942] [1.0498741 1. 1. ... 1.4686002 1. 1. ] ... [1. 1.0784137 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1. ] [1.3804028 1. 1. ... 1. 1. 2.0049603]] [[1. 1. 1. ... 1.4410217 1. 1. ] [1. 1. 1. ... 1. 1.2632182 1. ] [1. 1. 1. ... 1. 2.043381 1. ] ... [1.2837253 1. 1. ... 1. 1. 1.9634188] [1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1.1417798]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:True - minimum:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_387.aten_clamp_min, %x.1 : Tensor): %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={-1}]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[-1. -0.10339168 1.3453937 ... 0.31532493 0.1921799 -0.27889916] [-0.44581535 0.7607128 0.76772296 ... 1.3421855 -0.64798695 2.162374 ] [ 0.21401311 -1. -0.91275537 ... -0.01420969 -1. -1. ] ... [-0.09618142 -0.5495536 -1. ... -1. 0.34312195 -0.24051787] [-1. -1. 0.72154045 ... -1. 0.8441503 0.6519235 ] [ 1.473301 0.23102435 0.18564467 ... -1. -0.26746342 0.6640351 ]] [[-0.61062217 -0.6387085 0.3287861 ... -0.2132053 0.0113219 -0.24052325] [ 0.42072392 0.37607357 -0.66449445 ... -0.01703559 1.3823085 0.50671744] [-0.09165373 -0.52595603 -0.48620188 ... -0.1833882 0.33675727 -0.34896085] ... [-1. 2.1282864 -0.14927457 ... -0.25821245 0.35955372 -0.1886488 ] [-0.44075078 -0.9450367 1.8627794 ... 0.37798342 0.39417708 -0.6678747 ] [-1. -0.3638511 0.2830737 ... -0.6639321 -0.8591191 -1. ]] [[-0.649534 0.7152944 0.01287424 ... -1. -1. -1. ] [ 1.4320798 -0.45036432 1.5463417 ... -0.2974379 1.0038236 1.3905432 ] [-1. -0.3164564 0.5006338 ... 0.06357373 0.8809175 -1. ] ... [-0.13593885 -1. 0.51964015 ... 1.151239 -0.8860299 1.3161824 ] [ 2.8597505 0.8416117 -0.3272187 ... 0.75490296 -1. 0.6989394 ] [-0.9654292 0.20716749 0.42427143 ... -0.63604796 -0.49411288 -0.6524668 ]]]]; ov_res: [[[[-1. -0.10339168 1.3453937 ... 0.31532493 0.1921799 -0.27889916] [-0.44581535 0.7607128 0.76772296 ... 1.3421855 -0.64798695 2.162374 ] [ 0.21401311 -1. -0.91275537 ... -0.01420969 -1. -1. ] ... [-0.09618142 -0.5495536 -1. ... -1. 0.34312195 -0.24051787] [-1. -1. 0.72154045 ... -1. 0.8441503 0.6519235 ] [ 1.473301 0.23102435 0.18564467 ... -1. -0.26746342 0.6640351 ]] [[-0.61062217 -0.6387085 0.3287861 ... -0.2132053 0.0113219 -0.24052325] [ 0.42072392 0.37607357 -0.66449445 ... -0.01703559 1.3823085 0.50671744] [-0.09165373 -0.52595603 -0.48620188 ... -0.1833882 0.33675727 -0.34896085] ... [-1. 2.1282864 -0.14927457 ... -0.25821245 0.35955372 -0.1886488 ] [-0.44075078 -0.9450367 1.8627794 ... 0.37798342 0.39417708 -0.6678747 ] [-1. -0.3638511 0.2830737 ... -0.6639321 -0.8591191 -1. ]] [[-0.649534 0.7152944 0.01287424 ... -1. -1. -1. ] [ 1.4320798 -0.45036432 1.5463417 ... -0.2974379 1.0038236 1.3905432 ] [-1. -0.3164564 0.5006338 ... 0.06357373 0.8809175 -1. ] ... [-0.13593885 -1. 0.51964015 ... 1.151239 -0.8860299 1.3161824 ] [ 2.8597505 0.8416117 -0.3272187 ... 0.75490296 -1. 0.6989394 ] [-0.9654292 0.20716749 0.42427143 ... -0.63604796 -0.49411288 -0.6524668 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:True - minimum:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_389.aten_clamp_min, %x.1 : Tensor): %self.min : Float(requires_grad=0, device=cpu) = prim::Constant[value={0.5}]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[1.166072 1.2042778 0.5 ... 0.5 0.5 0.5 ] [0.5 0.5 0.5 ... 0.63816684 0.5 0.97606486] [0.5 0.989522 0.5 ... 0.5959499 1.022169 0.5 ] ... [0.5 0.5 0.5 ... 0.5 1.1248785 0.5 ] [0.5 0.5 1.5654159 ... 0.81793326 0.5 1.4460161 ] [0.5 0.91640997 0.5 ... 0.7914269 0.5 0.5 ]] [[0.5 0.5 0.5 ... 0.5 0.5 0.5 ] [0.5 1.4887209 0.5 ... 0.5 0.5 0.5 ] [1.9105654 0.5 0.5 ... 0.5 0.5 1.518609 ] ... [0.7444634 0.5 0.5 ... 0.5 0.5 0.5 ] [0.7675932 1.8858024 1.4484777 ... 0.5 0.5 0.5 ] [1.6654903 0.5 0.5 ... 1.070388 1.0479577 1.2374586 ]] [[0.5 0.5 0.5 ... 0.6935115 0.9569494 0.5 ] [0.5 0.5 0.5 ... 0.5 1.1320817 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.5 ] ... [0.5 0.5 0.5 ... 0.5 1.8371304 0.5 ] [2.397642 0.5 0.7957081 ... 0.5729602 0.5 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.9360495 ]]]]; ov_res: [[[[1.166072 1.2042778 0.5 ... 0.5 0.5 0.5 ] [0.5 0.5 0.5 ... 0.63816684 0.5 0.97606486] [0.5 0.989522 0.5 ... 0.5959499 1.022169 0.5 ] ... [0.5 0.5 0.5 ... 0.5 1.1248785 0.5 ] [0.5 0.5 1.5654159 ... 0.81793326 0.5 1.4460161 ] [0.5 0.91640997 0.5 ... 0.7914269 0.5 0.5 ]] [[0.5 0.5 0.5 ... 0.5 0.5 0.5 ] [0.5 1.4887209 0.5 ... 0.5 0.5 0.5 ] [1.9105654 0.5 0.5 ... 0.5 0.5 1.518609 ] ... [0.7444634 0.5 0.5 ... 0.5 0.5 0.5 ] [0.7675932 1.8858024 1.4484777 ... 0.5 0.5 0.5 ] [1.6654903 0.5 0.5 ... 1.070388 1.0479577 1.2374586 ]] [[0.5 0.5 0.5 ... 0.6935115 0.9569494 0.5 ] [0.5 0.5 0.5 ... 0.5 1.1320817 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.5 ] ... [0.5 0.5 0.5 ... 0.5 1.8371304 0.5 ] [2.397642 0.5 0.7957081 ... 0.5729602 0.5 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.9360495 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:False - minimum:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_391.aten_clamp_min, %x.1 : Tensor): %self.min : float = prim::Constant[value=0.]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[0. 0.19895318 0. ... 0. 0. 1.9220601 ] [0. 1.0860821 0. ... 1.4748325 1.0081521 0. ] [0.42931923 2.1848304 0.42788005 ... 0. 0. 0.01496043] ... [0. 0.14289619 0.37750146 ... 0.68022937 0. 0. ] [0.8259848 0. 0. ... 1.0765355 0.7950486 0.6811543 ] [0. 0. 0.7400017 ... 2.4804678 0. 2.1353872 ]] [[0. 1.1816138 0. ... 0. 0.1641538 0. ] [0.37582916 2.2054265 0. ... 0. 0. 0.9889123 ] [0. 0. 1.4981813 ... 0. 0. 0. ] ... [0.13937087 1.6020483 0.9249805 ... 0. 0.1531178 0. ] [0. 0. 0. ... 0. 0. 0. ] [1.339761 0. 2.2419586 ... 1.237899 0.06988762 0. ]] [[0. 0.03388905 0.20909595 ... 0. 0. 1.3232328 ] [0. 1.4899256 0.78911877 ... 0. 0. 1.4360759 ] [0. 0. 1.5288156 ... 0. 0. 0.38756478] ... [0. 0.06940528 1.97756 ... 0. 0. 0. ] [0.7852879 0.79091924 0. ... 0. 0.08548525 0.295106 ] [0.09318442 0. 0. ... 1.9423363 1.7718027 0. ]]]]; ov_res: [[[[0. 0.19895318 0. ... 0. 0. 1.9220601 ] [0. 1.0860821 0. ... 1.4748325 1.0081521 0. ] [0.42931923 2.1848304 0.42788005 ... 0. 0. 0.01496043] ... [0. 0.14289619 0.37750146 ... 0.68022937 0. 0. ] [0.8259848 0. 0. ... 1.0765355 0.7950486 0.6811543 ] [0. 0. 0.7400017 ... 2.4804678 0. 2.1353872 ]] [[0. 1.1816138 0. ... 0. 0.1641538 0. ] [0.37582916 2.2054265 0. ... 0. 0. 0.9889123 ] [0. 0. 1.4981813 ... 0. 0. 0. ] ... [0.13937087 1.6020483 0.9249805 ... 0. 0.1531178 0. ] [0. 0. 0. ... 0. 0. 0. ] [1.339761 0. 2.2419586 ... 1.237899 0.06988762 0. ]] [[0. 0.03388905 0.20909595 ... 0. 0. 1.3232328 ] [0. 1.4899256 0.78911877 ... 0. 0. 1.4360759 ] [0. 0. 1.5288156 ... 0. 0. 0.38756478] ... [0. 0.06940528 1.97756 ... 0. 0. 0. ] [0.7852879 0.79091924 0. ... 0. 0.08548525 0.295106 ] [0.09318442 0. 0. ... 1.9423363 1.7718027 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:False - minimum:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_393.aten_clamp_min, %x.1 : Tensor): %self.min : float = prim::Constant[value=1.]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[1. 1. 1. ... 1. 1. 1. ] [1. 1.2593993 1. ... 1. 1. 1. ] [2.0986345 1. 1. ... 2.663121 1.6565667 1. ] ... [1. 1.515148 1. ... 1. 1. 1.7446347] [1. 1.5826936 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1.5222018 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 2.6230428] [1. 1. 1. ... 1. 1. 1. ] ... [1. 1. 1. ... 1. 1. 1.7922782] [1.6724848 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1.435441 1. 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1.5184374] [1. 1. 1. ... 1. 1. 1. ] ... [1. 1. 1. ... 1.4192255 1. 1. ] [1. 1. 1. ... 1.1912681 1. 1. ] [1. 1. 1. ... 1. 1. 1. ]]]]; ov_res: [[[[1. 1. 1. ... 1. 1. 1. ] [1. 1.2593993 1. ... 1. 1. 1. ] [2.0986345 1. 1. ... 2.663121 1.6565667 1. ] ... [1. 1.515148 1. ... 1. 1. 1.7446347] [1. 1.5826936 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1.5222018 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 2.6230428] [1. 1. 1. ... 1. 1. 1. ] ... [1. 1. 1. ... 1. 1. 1.7922782] [1.6724848 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1.435441 1. 1. ]] [[1. 1. 1. ... 1. 1. 1. ] [1. 1. 1. ... 1. 1. 1.5184374] [1. 1. 1. ... 1. 1. 1. ] ... [1. 1. 1. ... 1.4192255 1. 1. ] [1. 1. 1. ... 1.1912681 1. 1. ] [1. 1. 1. ... 1. 1. 1. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:False - minimum:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_395.aten_clamp_min, %x.1 : Tensor): %self.min : float = prim::Constant[value=-1.]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[-1. -0.6719451 -0.17446586 ... -0.61473745 -1. 2.2565217 ] [-0.8473571 -0.76949775 -0.3016073 ... -1. -0.08211308 -0.01038196] [ 0.6010245 -0.06000487 -0.85664034 ... 0.2702727 0.5143319 0.4235172 ] ... [-0.631449 -1. -1. ... 1.4233164 1.6120203 -0.03485565] [-0.06479163 0.52440745 -1. ... -1. 1.024842 -1. ] [ 0.77270997 -1. 0.04756466 ... 1.7119586 -0.6845314 1.2296788 ]] [[ 0.34341323 -0.03891598 -0.28789076 ... 0.45130503 -1. 0.32787555] [-0.08431074 -0.76092416 0.92311877 ... 1.2663416 -1. -1. ] [-1. 0.87568384 0.2739713 ... -0.88594764 -0.53232455 0.63088065] ... [-1. 0.631833 -1. ... 0.03478754 -0.35453498 -0.33986926] [-0.10549006 -1. 0.06439488 ... 1.3543785 -0.36822942 -0.1105044 ] [-0.24509484 -0.8329766 0.54979986 ... -0.920162 1.0968577 1.9213134 ]] [[-1. 1.3783052 -1. ... -0.88448024 0.49409375 -0.9170564 ] [ 2.2229662 0.2684888 -0.37534285 ... -1. -0.19950679 1.6348065 ] [ 0.2782579 1.3212923 -0.04264905 ... -0.5508789 0.48484662 -1. ] ... [ 1.2847213 -0.68656844 1.8810997 ... 1.1288451 0.16064791 0.47033638] [ 1.5079751 0.07030354 0.18270549 ... -0.96954477 -1. 1.7533839 ] [-0.53792757 0.5035937 -0.9977107 ... 0.04171931 -1. -0.12797916]]]]; ov_res: [[[[-1. -0.6719451 -0.17446586 ... -0.61473745 -1. 2.2565217 ] [-0.8473571 -0.76949775 -0.3016073 ... -1. -0.08211308 -0.01038196] [ 0.6010245 -0.06000487 -0.85664034 ... 0.2702727 0.5143319 0.4235172 ] ... [-0.631449 -1. -1. ... 1.4233164 1.6120203 -0.03485565] [-0.06479163 0.52440745 -1. ... -1. 1.024842 -1. ] [ 0.77270997 -1. 0.04756466 ... 1.7119586 -0.6845314 1.2296788 ]] [[ 0.34341323 -0.03891598 -0.28789076 ... 0.45130503 -1. 0.32787555] [-0.08431074 -0.76092416 0.92311877 ... 1.2663416 -1. -1. ] [-1. 0.87568384 0.2739713 ... -0.88594764 -0.53232455 0.63088065] ... [-1. 0.631833 -1. ... 0.03478754 -0.35453498 -0.33986926] [-0.10549006 -1. 0.06439488 ... 1.3543785 -0.36822942 -0.1105044 ] [-0.24509484 -0.8329766 0.54979986 ... -0.920162 1.0968577 1.9213134 ]] [[-1. 1.3783052 -1. ... -0.88448024 0.49409375 -0.9170564 ] [ 2.2229662 0.2684888 -0.37534285 ... -1. -0.19950679 1.6348065 ] [ 0.2782579 1.3212923 -0.04264905 ... -0.5508789 0.48484662 -1. ] ... [ 1.2847213 -0.68656844 1.8810997 ... 1.1288451 0.16064791 0.47033638] [ 1.5079751 0.07030354 0.18270549 ... -0.96954477 -1. 1.7533839 ] [-0.53792757 0.5035937 -0.9977107 ... 0.04171931 -1. -0.12797916]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMin::test_clamp_min[ ie_device:CPU - precision:FP32 - as_tensor:False - minimum:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_397.aten_clamp_min, %x.1 : Tensor): %self.min : float = prim::Constant[value=0.5]() %3 : Tensor = aten::clamp_min(%x.1, %self.min) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:58:23 return (%3) fw_re: [[[[0.5 0.5 0.85735536 ... 0.5 2.0325012 1.3402885 ] [0.5 0.5 0.5 ... 1.3365679 0.8695136 0.5 ] [0.5 1.5341735 0.5 ... 0.8604813 0.6884777 0.62858236] ... [0.5 0.5 0.5 ... 0.5 0.5 0.807226 ] [0.5 0.5 0.5 ... 0.5 0.5 0.5 ] [2.2673843 0.5 0.5 ... 0.5 0.6715466 0.5185664 ]] [[0.5 0.5 0.5 ... 0.5 1.2480853 1.0802628 ] [0.5 0.5 1.030132 ... 0.5 1.418237 0.5 ] [0.90734345 0.5 0.5 ... 0.5 0.5 1.5152112 ] ... [0.7625911 0.5 0.5 ... 0.5 0.5 1.5978491 ] [0.5021981 0.5 0.5 ... 0.5454616 0.55656356 0.5 ] [0.5 0.5 0.5 ... 0.5534253 0.5 1.3308692 ]] [[0.5 2.245123 0.5 ... 1.4484245 0.5 0.5 ] [1.5074831 0.5 0.5 ... 0.5 1.1435778 0.5 ] [0.6279954 0.5 1.0618883 ... 0.5 0.8717451 0.5 ] ... [0.5 0.5342241 0.5 ... 0.5 0.5 1.8808117 ] [0.5 0.5 0.5 ... 1.4204634 0.5 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.8714015 ]]]]; ov_res: [[[[0.5 0.5 0.85735536 ... 0.5 2.0325012 1.3402885 ] [0.5 0.5 0.5 ... 1.3365679 0.8695136 0.5 ] [0.5 1.5341735 0.5 ... 0.8604813 0.6884777 0.62858236] ... [0.5 0.5 0.5 ... 0.5 0.5 0.807226 ] [0.5 0.5 0.5 ... 0.5 0.5 0.5 ] [2.2673843 0.5 0.5 ... 0.5 0.6715466 0.5185664 ]] [[0.5 0.5 0.5 ... 0.5 1.2480853 1.0802628 ] [0.5 0.5 1.030132 ... 0.5 1.418237 0.5 ] [0.90734345 0.5 0.5 ... 0.5 0.5 1.5152112 ] ... [0.7625911 0.5 0.5 ... 0.5 0.5 1.5978491 ] [0.5021981 0.5 0.5 ... 0.5454616 0.55656356 0.5 ] [0.5 0.5 0.5 ... 0.5534253 0.5 1.3308692 ]] [[0.5 2.245123 0.5 ... 1.4484245 0.5 0.5 ] [1.5074831 0.5 0.5 ... 0.5 1.1435778 0.5 ] [0.6279954 0.5 1.0618883 ... 0.5 0.8717451 0.5 ] ... [0.5 0.5342241 0.5 ... 0.5 0.5 1.8808117 ] [0.5 0.5 0.5 ... 1.4204634 0.5 0.5 ] [0.5 0.5 0.5 ... 0.5 0.5 0.8714015 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:True - maximum:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_398.aten_clamp_max, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={0}]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[ 0.0000000e+00 0.0000000e+00 -9.5956188e-01 ... -9.9226665e-01 0.0000000e+00 0.0000000e+00] [-1.5541688e-01 0.0000000e+00 0.0000000e+00 ... -2.8457448e-01 -8.3942813e-01 0.0000000e+00] [-2.2074292e+00 -1.0917175e+00 -1.0174474e+00 ... -1.8900368e-01 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 -4.5785704e-01 0.0000000e+00] [-4.1756326e-01 0.0000000e+00 0.0000000e+00 ... -2.4398349e-02 0.0000000e+00 -1.1771885e+00] [ 0.0000000e+00 0.0000000e+00 -6.6449322e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[-1.7173830e+00 -2.7127832e-01 0.0000000e+00 ... -6.0174298e-01 -5.3725505e-01 0.0000000e+00] [-1.0124056e+00 0.0000000e+00 0.0000000e+00 ... -1.5215058e+00 0.0000000e+00 -3.0841526e-01] [-1.4133774e+00 -1.3370676e-02 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 -1.0073773e+00] ... [-2.5472448e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 -2.5612122e-01 -1.9005969e+00] [-1.6912855e+00 -6.5812105e-01 0.0000000e+00 ... -1.5750542e-01 0.0000000e+00 -7.6326537e-01] [ 0.0000000e+00 -7.1225518e-01 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 -7.8838134e-01 0.0000000e+00 ... 0.0000000e+00 -2.3847975e-01 0.0000000e+00] [ 0.0000000e+00 -1.8148124e+00 -5.0300485e-01 ... -5.3900975e-01 -1.0622673e+00 0.0000000e+00] [-1.3371139e+00 -1.5096208e+00 -1.4597985e+00 ... -7.4101344e-04 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 -1.0702971e+00 -5.2031219e-01 ... -2.4894865e-01 0.0000000e+00 -6.4885491e-01] [ 0.0000000e+00 -6.6438085e-01 0.0000000e+00 ... -3.6099645e-01 -7.7767903e-01 0.0000000e+00] [ 0.0000000e+00 -8.5176855e-01 0.0000000e+00 ... -1.2752698e-01 -2.4138930e-01 -8.7199354e-01]]]]; ov_res: [[[[ 0.0000000e+00 0.0000000e+00 -9.5956188e-01 ... -9.9226665e-01 0.0000000e+00 0.0000000e+00] [-1.5541688e-01 0.0000000e+00 0.0000000e+00 ... -2.8457448e-01 -8.3942813e-01 0.0000000e+00] [-2.2074292e+00 -1.0917175e+00 -1.0174474e+00 ... -1.8900368e-01 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 -4.5785704e-01 0.0000000e+00] [-4.1756326e-01 0.0000000e+00 0.0000000e+00 ... -2.4398349e-02 0.0000000e+00 -1.1771885e+00] [ 0.0000000e+00 0.0000000e+00 -6.6449322e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[-1.7173830e+00 -2.7127832e-01 0.0000000e+00 ... -6.0174298e-01 -5.3725505e-01 0.0000000e+00] [-1.0124056e+00 0.0000000e+00 0.0000000e+00 ... -1.5215058e+00 0.0000000e+00 -3.0841526e-01] [-1.4133774e+00 -1.3370676e-02 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 -1.0073773e+00] ... [-2.5472448e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 -2.5612122e-01 -1.9005969e+00] [-1.6912855e+00 -6.5812105e-01 0.0000000e+00 ... -1.5750542e-01 0.0000000e+00 -7.6326537e-01] [ 0.0000000e+00 -7.1225518e-01 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 -7.8838134e-01 0.0000000e+00 ... 0.0000000e+00 -2.3847975e-01 0.0000000e+00] [ 0.0000000e+00 -1.8148124e+00 -5.0300485e-01 ... -5.3900975e-01 -1.0622673e+00 0.0000000e+00] [-1.3371139e+00 -1.5096208e+00 -1.4597985e+00 ... -7.4101344e-04 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 -1.0702971e+00 -5.2031219e-01 ... -2.4894865e-01 0.0000000e+00 -6.4885491e-01] [ 0.0000000e+00 -6.6438085e-01 0.0000000e+00 ... -3.6099645e-01 -7.7767903e-01 0.0000000e+00] [ 0.0000000e+00 -8.5176855e-01 0.0000000e+00 ... -1.2752698e-01 -2.4138930e-01 -8.7199354e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:True - maximum:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_400.aten_clamp_max, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={1}]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[ 9.0364045e-01 1.0000000e+00 -3.5953903e-01 ... -4.3629152e-01 3.0063027e-01 -5.2771932e-01] [-7.1154636e-01 -2.2436302e+00 -8.1760478e-01 ... 1.3895445e-01 1.8650347e-01 1.0000000e+00] [ 1.0000000e+00 -3.2733716e-02 -2.3912284e+00 ... 3.4242123e-01 7.9666692e-01 1.0000000e+00] ... [ 1.0000000e+00 1.0000000e+00 -9.8392296e-01 ... -1.3357795e+00 1.7728861e-01 2.7010623e-01] [-1.6864133e+00 1.0000000e+00 1.0000000e+00 ... -5.4156971e-01 3.0917913e-01 1.0000000e+00] [ 8.2383621e-01 -6.4886743e-01 -7.0626959e-02 ... -6.1270732e-01 7.3182559e-01 -1.5292083e+00]] [[ 2.6627043e-01 -5.7622582e-01 -2.1634975e-03 ... -9.1196132e-01 -8.7147486e-01 8.4038955e-01] [ 8.2090294e-01 -7.1471584e-01 1.0000000e+00 ... -2.8728634e-01 -4.7838092e-01 7.5826800e-01] [ 1.0000000e+00 1.0000000e+00 -7.3868543e-01 ... -3.9033160e-02 -1.3576277e+00 2.2634549e-01] ... [-1.1640475e+00 8.6273217e-01 -6.6601926e-01 ... 5.7986641e-01 8.0029595e-01 5.1695383e-01] [-3.5358208e-01 -1.1699001e+00 1.0000000e+00 ... 8.8356382e-01 1.0000000e+00 3.5999867e-01] [ 2.6915833e-01 -1.0214833e+00 -2.6220527e+00 ... -7.6310462e-01 -4.5720673e-01 -9.1112530e-01]] [[ 1.0000000e+00 6.8766272e-01 8.7359369e-02 ... 1.0000000e+00 -5.7802331e-02 8.6623523e-03] [-7.7836633e-01 4.8755869e-02 5.9889191e-01 ... -4.5011202e-01 -1.3931010e+00 1.2751015e-01] [-9.9697822e-01 1.0000000e+00 -2.7972424e-01 ... -6.7801511e-01 -1.2599071e+00 -1.8816763e-01] ... [ 2.6206946e-01 -2.6305739e-02 -1.1416603e+00 ... 4.5447066e-01 -1.7860140e-01 -4.9106050e-01] [ 1.0000000e+00 2.9695758e-01 1.0000000e+00 ... -1.6640017e+00 1.0000000e+00 8.8371730e-01] [-5.5796742e-01 9.4769914e-03 1.9402514e-01 ... -4.0248963e-01 -3.5670924e-01 8.1859082e-01]]]]; ov_res: [[[[ 9.0364045e-01 1.0000000e+00 -3.5953903e-01 ... -4.3629152e-01 3.0063027e-01 -5.2771932e-01] [-7.1154636e-01 -2.2436302e+00 -8.1760478e-01 ... 1.3895445e-01 1.8650347e-01 1.0000000e+00] [ 1.0000000e+00 -3.2733716e-02 -2.3912284e+00 ... 3.4242123e-01 7.9666692e-01 1.0000000e+00] ... [ 1.0000000e+00 1.0000000e+00 -9.8392296e-01 ... -1.3357795e+00 1.7728861e-01 2.7010623e-01] [-1.6864133e+00 1.0000000e+00 1.0000000e+00 ... -5.4156971e-01 3.0917913e-01 1.0000000e+00] [ 8.2383621e-01 -6.4886743e-01 -7.0626959e-02 ... -6.1270732e-01 7.3182559e-01 -1.5292083e+00]] [[ 2.6627043e-01 -5.7622582e-01 -2.1634975e-03 ... -9.1196132e-01 -8.7147486e-01 8.4038955e-01] [ 8.2090294e-01 -7.1471584e-01 1.0000000e+00 ... -2.8728634e-01 -4.7838092e-01 7.5826800e-01] [ 1.0000000e+00 1.0000000e+00 -7.3868543e-01 ... -3.9033160e-02 -1.3576277e+00 2.2634549e-01] ... [-1.1640475e+00 8.6273217e-01 -6.6601926e-01 ... 5.7986641e-01 8.0029595e-01 5.1695383e-01] [-3.5358208e-01 -1.1699001e+00 1.0000000e+00 ... 8.8356382e-01 1.0000000e+00 3.5999867e-01] [ 2.6915833e-01 -1.0214833e+00 -2.6220527e+00 ... -7.6310462e-01 -4.5720673e-01 -9.1112530e-01]] [[ 1.0000000e+00 6.8766272e-01 8.7359369e-02 ... 1.0000000e+00 -5.7802331e-02 8.6623523e-03] [-7.7836633e-01 4.8755869e-02 5.9889191e-01 ... -4.5011202e-01 -1.3931010e+00 1.2751015e-01] [-9.9697822e-01 1.0000000e+00 -2.7972424e-01 ... -6.7801511e-01 -1.2599071e+00 -1.8816763e-01] ... [ 2.6206946e-01 -2.6305739e-02 -1.1416603e+00 ... 4.5447066e-01 -1.7860140e-01 -4.9106050e-01] [ 1.0000000e+00 2.9695758e-01 1.0000000e+00 ... -1.6640017e+00 1.0000000e+00 8.8371730e-01] [-5.5796742e-01 9.4769914e-03 1.9402514e-01 ... -4.0248963e-01 -3.5670924e-01 8.1859082e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:True - maximum:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_402.aten_clamp_max, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={-1}]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[-1. -1. -1. ... -1. -1. -1.496274 ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1.0870152 ... -1.0470345 -1. -1. ] ... [-1.1653339 -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1.0061305 -1. ]] [[-1. -1.1536517 -1. ... -1. -1. -1. ] [-1.1703974 -1. -1.0358781 ... -1. -1. -1. ] [-1. -1. -1. ... -1.5182701 -1. -1. ] ... [-1. -1. -1. ... -1. -1. -1. ] [-1. -2.1999142 -2.2030597 ... -2.1186569 -1.1124148 -1. ] [-1. -1. -1.1463355 ... -2.3532183 -1. -1. ]] [[-1. -1. -1.8604605 ... -1. -1. -1. ] [-1. -1.5465537 -1. ... -1. -1. -1. ] [-1.6437442 -1. -1. ... -1. -1. -1. ] ... [-1. -1. -1. ... -1.700379 -1. -1.5754906] [-1.0640414 -1. -1. ... -2.826961 -1. -1. ] [-1. -1. -1. ... -1.7926764 -1. -1. ]]]]; ov_res: [[[[-1. -1. -1. ... -1. -1. -1.496274 ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1.0870152 ... -1.0470345 -1. -1. ] ... [-1.1653339 -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1.0061305 -1. ]] [[-1. -1.1536517 -1. ... -1. -1. -1. ] [-1.1703974 -1. -1.0358781 ... -1. -1. -1. ] [-1. -1. -1. ... -1.5182701 -1. -1. ] ... [-1. -1. -1. ... -1. -1. -1. ] [-1. -2.1999142 -2.2030597 ... -2.1186569 -1.1124148 -1. ] [-1. -1. -1.1463355 ... -2.3532183 -1. -1. ]] [[-1. -1. -1.8604605 ... -1. -1. -1. ] [-1. -1.5465537 -1. ... -1. -1. -1. ] [-1.6437442 -1. -1. ... -1. -1. -1. ] ... [-1. -1. -1. ... -1.700379 -1. -1.5754906] [-1.0640414 -1. -1. ... -2.826961 -1. -1. ] [-1. -1. -1. ... -1.7926764 -1. -1. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:True - maximum:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_404.aten_clamp_max, %x.1 : Tensor): %self.max : Float(requires_grad=0, device=cpu) = prim::Constant[value={0.5}]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[-8.5545093e-01 -8.5711688e-01 2.2430064e-01 ... 4.1306382e-01 -4.5756644e-01 -1.0170031e+00] [-1.9492926e-01 -6.0045132e-03 -4.6189085e-01 ... 5.0000000e-01 -4.5916167e-01 5.0000000e-01] [ 5.0000000e-01 -1.1539609e+00 5.0000000e-01 ... 5.0000000e-01 -9.1123290e-02 -4.7832227e-01] ... [-2.9095697e-01 -4.5869717e-01 5.0000000e-01 ... -8.5254572e-02 -1.3084327e+00 -1.7936764e+00] [-8.6635303e-01 -4.5917073e-01 5.0000000e-01 ... -6.9691408e-01 5.0000000e-01 5.0000000e-01] [ 5.0000000e-01 -1.0032303e+00 5.0000000e-01 ... -1.0490683e+00 -5.7307925e-02 5.0000000e-01]] [[-7.6075941e-01 -1.9230586e-01 -2.4779427e-01 ... -4.5561802e-02 5.0000000e-01 5.0000000e-01] [-2.3554775e-03 -9.6811187e-01 -4.7222078e-02 ... 5.0000000e-01 5.0000000e-01 -9.0652102e-01] [ 5.0000000e-01 -4.7161901e-01 5.0000000e-01 ... 1.0411958e-01 -5.5190140e-01 5.0000000e-01] ... [ 5.0000000e-01 5.0000000e-01 -1.2670493e+00 ... 5.0000000e-01 5.0000000e-01 -3.5069507e-01] [ 1.6819485e-01 -1.7461817e+00 -1.8035119e+00 ... 5.0000000e-01 -8.6518896e-01 5.0000000e-01] [-5.7438713e-01 5.0000000e-01 5.0000000e-01 ... -9.2428929e-01 -8.1704599e-01 -1.6181849e+00]] [[-7.2770876e-01 -1.2709790e+00 -1.9019970e+00 ... -1.2268033e+00 -9.9660857e-03 -6.5708530e-01] [ 5.0000000e-01 5.0000000e-01 5.0000000e-01 ... -4.5203526e-02 -3.2865005e+00 5.0000000e-01] [ 3.2342684e-01 5.0000000e-01 -5.3200233e-01 ... 5.0000000e-01 -1.3487877e+00 5.0000000e-01] ... [ 1.1918966e-01 -3.9094016e-01 1.7822480e-02 ... 1.8176399e-01 5.0000000e-01 -1.7031732e+00] [ 3.7100804e-01 6.3763954e-02 3.7406135e-01 ... -6.8452221e-01 5.0000000e-01 -1.5115808e-01] [ 5.0000000e-01 -1.4422955e-01 5.0000000e-01 ... 5.0000000e-01 -8.8946089e-02 -8.5188305e-01]]]]; ov_res: [[[[-8.5545093e-01 -8.5711688e-01 2.2430064e-01 ... 4.1306382e-01 -4.5756644e-01 -1.0170031e+00] [-1.9492926e-01 -6.0045132e-03 -4.6189085e-01 ... 5.0000000e-01 -4.5916167e-01 5.0000000e-01] [ 5.0000000e-01 -1.1539609e+00 5.0000000e-01 ... 5.0000000e-01 -9.1123290e-02 -4.7832227e-01] ... [-2.9095697e-01 -4.5869717e-01 5.0000000e-01 ... -8.5254572e-02 -1.3084327e+00 -1.7936764e+00] [-8.6635303e-01 -4.5917073e-01 5.0000000e-01 ... -6.9691408e-01 5.0000000e-01 5.0000000e-01] [ 5.0000000e-01 -1.0032303e+00 5.0000000e-01 ... -1.0490683e+00 -5.7307925e-02 5.0000000e-01]] [[-7.6075941e-01 -1.9230586e-01 -2.4779427e-01 ... -4.5561802e-02 5.0000000e-01 5.0000000e-01] [-2.3554775e-03 -9.6811187e-01 -4.7222078e-02 ... 5.0000000e-01 5.0000000e-01 -9.0652102e-01] [ 5.0000000e-01 -4.7161901e-01 5.0000000e-01 ... 1.0411958e-01 -5.5190140e-01 5.0000000e-01] ... [ 5.0000000e-01 5.0000000e-01 -1.2670493e+00 ... 5.0000000e-01 5.0000000e-01 -3.5069507e-01] [ 1.6819485e-01 -1.7461817e+00 -1.8035119e+00 ... 5.0000000e-01 -8.6518896e-01 5.0000000e-01] [-5.7438713e-01 5.0000000e-01 5.0000000e-01 ... -9.2428929e-01 -8.1704599e-01 -1.6181849e+00]] [[-7.2770876e-01 -1.2709790e+00 -1.9019970e+00 ... -1.2268033e+00 -9.9660857e-03 -6.5708530e-01] [ 5.0000000e-01 5.0000000e-01 5.0000000e-01 ... -4.5203526e-02 -3.2865005e+00 5.0000000e-01] [ 3.2342684e-01 5.0000000e-01 -5.3200233e-01 ... 5.0000000e-01 -1.3487877e+00 5.0000000e-01] ... [ 1.1918966e-01 -3.9094016e-01 1.7822480e-02 ... 1.8176399e-01 5.0000000e-01 -1.7031732e+00] [ 3.7100804e-01 6.3763954e-02 3.7406135e-01 ... -6.8452221e-01 5.0000000e-01 -1.5115808e-01] [ 5.0000000e-01 -1.4422955e-01 5.0000000e-01 ... 5.0000000e-01 -8.8946089e-02 -8.5188305e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:False - maximum:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_406.aten_clamp_max, %x.1 : Tensor): %self.max : float = prim::Constant[value=0.]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[ 0. -0.8472374 0. ... 0. 0. 0. ] [ 0. 0. -0.47365457 ... 0. -0.43525574 0. ] [ 0. -2.4499917 0. ... 0. 0. 0. ] ... [-0.5185583 0. 0. ... 0. -0.64173716 0. ] [-2.2798681 -0.52956045 -1.4308017 ... 0. -1.6075743 -1.269526 ] [-0.58441746 -0.01836593 -0.10782573 ... 0. -1.4021829 0. ]] [[-0.16100162 0. -0.50407666 ... -1.0150393 0. 0. ] [-0.15850464 -0.7043558 -1.249426 ... 0. -2.8693619 0. ] [ 0. -1.8383789 -1.2654508 ... -0.28220075 -0.4396655 -0.5906637 ] ... [-0.8151157 -0.0798288 0. ... -0.909026 -2.3358796 0. ] [ 0. 0. -0.94590336 ... -1.4305352 -0.571848 0. ] [ 0. -0.12481043 0. ... -0.37520248 -0.8075202 0. ]] [[-1.2721821 0. -0.18772213 ... 0. 0. 0. ] [-1.3023138 -0.18792145 0. ... 0. 0. -0.13252981] [ 0. 0. 0. ... -0.03084853 -0.6293571 -1.5882193 ] ... [ 0. 0. 0. ... -0.8045685 -1.6333072 0. ] [ 0. -0.22044668 -1.7913253 ... 0. 0. 0. ] [-0.8312816 -0.6801705 -1.4671395 ... -0.22973017 -1.4154167 -3.4911604 ]]]]; ov_res: [[[[ 0. -0.8472374 0. ... 0. 0. 0. ] [ 0. 0. -0.47365457 ... 0. -0.43525574 0. ] [ 0. -2.4499917 0. ... 0. 0. 0. ] ... [-0.5185583 0. 0. ... 0. -0.64173716 0. ] [-2.2798681 -0.52956045 -1.4308017 ... 0. -1.6075743 -1.269526 ] [-0.58441746 -0.01836593 -0.10782573 ... 0. -1.4021829 0. ]] [[-0.16100162 0. -0.50407666 ... -1.0150393 0. 0. ] [-0.15850464 -0.7043558 -1.249426 ... 0. -2.8693619 0. ] [ 0. -1.8383789 -1.2654508 ... -0.28220075 -0.4396655 -0.5906637 ] ... [-0.8151157 -0.0798288 0. ... -0.909026 -2.3358796 0. ] [ 0. 0. -0.94590336 ... -1.4305352 -0.571848 0. ] [ 0. -0.12481043 0. ... -0.37520248 -0.8075202 0. ]] [[-1.2721821 0. -0.18772213 ... 0. 0. 0. ] [-1.3023138 -0.18792145 0. ... 0. 0. -0.13252981] [ 0. 0. 0. ... -0.03084853 -0.6293571 -1.5882193 ] ... [ 0. 0. 0. ... -0.8045685 -1.6333072 0. ] [ 0. -0.22044668 -1.7913253 ... 0. 0. 0. ] [-0.8312816 -0.6801705 -1.4671395 ... -0.22973017 -1.4154167 -3.4911604 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:False - maximum:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_408.aten_clamp_max, %x.1 : Tensor): %self.max : float = prim::Constant[value=1.]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[-0.21656266 0.50532305 -1.182624 ... 0.0756081 -1.1562153 0.478942 ] [-0.9108892 0.33930457 -1.01383 ... 0.3823551 1. 1. ] [ 0.03956617 -0.28668118 0.19061643 ... 0.8737335 1. -1.7155728 ] ... [-1.2627879 1. -1.4344864 ... 0.78396267 0.54459554 -0.3712875 ] [-1.5290009 -0.29767406 -0.41490474 ... -2.3734615 -0.53088415 0.17746451] [ 0.8956667 -0.42694297 1. ... 1. -0.09749356 1. ]] [[-0.8669849 0.26024032 1. ... 0.26633254 1. 1. ] [-1.8752918 -0.6708318 0.03601398 ... -1.3512698 0.4174945 -0.7941099 ] [-1.048027 1. -1.1875676 ... -1.3723686 1. -0.21453802] ... [-0.81810194 0.04482345 1. ... -0.32877165 -1.587735 0.29166678] [ 1. 0.88471717 -1.065556 ... 1. 0.5518282 0.9695116 ] [-0.48388153 -1.202256 0.02956713 ... 0.9285604 1. 1. ]] [[ 1. -0.37133548 1. ... -1.7736982 -0.6260663 0.68202466] [-0.01435039 0.19142142 0.9370201 ... -1.0353847 -0.14762147 -0.8719151 ] [ 1. 1. 0.3706996 ... -1.8238624 0.38684356 0.57189834] ... [-2.2412794 0.32174066 0.45897287 ... 0.35056037 -1.5589659 0.15112391] [ 1. 0.65315336 0.05181086 ... 1. 0.11765409 -1.7151347 ] [ 1. 1. 1. ... 1. -1.9968709 -1.222589 ]]]]; ov_res: [[[[-0.21656266 0.50532305 -1.182624 ... 0.0756081 -1.1562153 0.478942 ] [-0.9108892 0.33930457 -1.01383 ... 0.3823551 1. 1. ] [ 0.03956617 -0.28668118 0.19061643 ... 0.8737335 1. -1.7155728 ] ... [-1.2627879 1. -1.4344864 ... 0.78396267 0.54459554 -0.3712875 ] [-1.5290009 -0.29767406 -0.41490474 ... -2.3734615 -0.53088415 0.17746451] [ 0.8956667 -0.42694297 1. ... 1. -0.09749356 1. ]] [[-0.8669849 0.26024032 1. ... 0.26633254 1. 1. ] [-1.8752918 -0.6708318 0.03601398 ... -1.3512698 0.4174945 -0.7941099 ] [-1.048027 1. -1.1875676 ... -1.3723686 1. -0.21453802] ... [-0.81810194 0.04482345 1. ... -0.32877165 -1.587735 0.29166678] [ 1. 0.88471717 -1.065556 ... 1. 0.5518282 0.9695116 ] [-0.48388153 -1.202256 0.02956713 ... 0.9285604 1. 1. ]] [[ 1. -0.37133548 1. ... -1.7736982 -0.6260663 0.68202466] [-0.01435039 0.19142142 0.9370201 ... -1.0353847 -0.14762147 -0.8719151 ] [ 1. 1. 0.3706996 ... -1.8238624 0.38684356 0.57189834] ... [-2.2412794 0.32174066 0.45897287 ... 0.35056037 -1.5589659 0.15112391] [ 1. 0.65315336 0.05181086 ... 1. 0.11765409 -1.7151347 ] [ 1. 1. 1. ... 1. -1.9968709 -1.222589 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:False - maximum:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_410.aten_clamp_max, %x.1 : Tensor): %self.max : float = prim::Constant[value=-1.]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -2.3524609 -1.2847928 -1. ] [-1. -2.0425127 -1.0672126 ... -1. -1.757577 -1. ] ... [-1. -1. -1. ... -1. -1. -1. ] [-1.7889476 -1.8033135 -1. ... -1. -1. -1. ] [-1.4676625 -1. -1. ... -1. -1. -1. ]] [[-1. -1.1237992 -1.1868446 ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1.2148267 -1.6895736 -1. ] ... [-1. -1. -1. ... -1. -1. -1.0251065] [-1. -1. -1.3997114 ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ]] [[-1.2067248 -1. -2.0581923 ... -1.0831802 -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] ... [-1. -1. -1. ... -2.03415 -1. -1. ] [-1. -2.3866646 -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ]]]]; ov_res: [[[[-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -2.3524609 -1.2847928 -1. ] [-1. -2.0425127 -1.0672126 ... -1. -1.757577 -1. ] ... [-1. -1. -1. ... -1. -1. -1. ] [-1.7889476 -1.8033135 -1. ... -1. -1. -1. ] [-1.4676625 -1. -1. ... -1. -1. -1. ]] [[-1. -1.1237992 -1.1868446 ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1.2148267 -1.6895736 -1. ] ... [-1. -1. -1. ... -1. -1. -1.0251065] [-1. -1. -1.3997114 ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ]] [[-1.2067248 -1. -2.0581923 ... -1.0831802 -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ] ... [-1. -1. -1. ... -2.03415 -1. -1. ] [-1. -2.3866646 -1. ... -1. -1. -1. ] [-1. -1. -1. ... -1. -1. -1. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clamp.py::TestClampMax::test_clamp[ ie_device:CPU - precision:FP32 - as_tensor:False - maximum:0.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clamp.___torch_mangle_412.aten_clamp_max, %x.1 : Tensor): %self.max : float = prim::Constant[value=0.5]() %3 : Tensor = aten::clamp_max(%x.1, %self.max) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clamp.py:84:23 return (%3) fw_re: [[[[-0.14287336 -0.61792696 0.39163128 ... 0.5 0.5 -2.302376 ] [ 0.43406147 0.5 -0.5177594 ... 0.5 -0.61381847 0.21819149] [ 0.32709125 -0.269608 -0.32060412 ... 0.5 0.1587596 0.06270938] ... [ 0.5 -1.3233654 -0.14332694 ... -0.9096611 0.5 0.5 ] [ 0.5 0.5 -1.1960721 ... 0.12291363 0.5 -0.55846506] [ 0.22215916 -0.8494919 -0.8170571 ... 0.5 0.5 0.5 ]] [[ 0.5 -0.09156027 -0.0741088 ... -1.1833586 -0.02016805 0.16891025] [-1.012606 0.5 0.5 ... 0.5 0.28113025 -0.02117401] [ 0.5 0.32492027 -0.36365283 ... 0.5 0.15831038 0.5 ] ... [-0.30101618 0.5 -1.2086933 ... -0.27717674 0.07292078 0.5 ] [-0.49598715 0.39709362 0.5 ... -1.0362749 0.5 -0.54319054] [ 0.3104946 0.15523764 -0.9246507 ... 0.5 -0.9479571 0.3784382 ]] [[-0.01814621 0.3345777 0.5 ... 0.5 0.5 -1.556322 ] [ 0.5 -0.51782346 -0.83108145 ... -2.0330474 -0.6085484 0.5 ] [-0.6781901 0.10542011 -0.81828225 ... -0.14066488 -1.132681 -1.2664568 ] ... [-0.25182652 -1.6595317 0.03960752 ... -0.5315526 0.5 0.4777809 ] [-0.9191312 -1.0220405 -0.72017556 ... -0.29950473 -0.6302205 -0.59697753] [-0.6437487 0.5 -0.55591995 ... 0.5 -1.0718994 0.5 ]]]]; ov_res: [[[[-0.14287336 -0.61792696 0.39163128 ... 0.5 0.5 -2.302376 ] [ 0.43406147 0.5 -0.5177594 ... 0.5 -0.61381847 0.21819149] [ 0.32709125 -0.269608 -0.32060412 ... 0.5 0.1587596 0.06270938] ... [ 0.5 -1.3233654 -0.14332694 ... -0.9096611 0.5 0.5 ] [ 0.5 0.5 -1.1960721 ... 0.12291363 0.5 -0.55846506] [ 0.22215916 -0.8494919 -0.8170571 ... 0.5 0.5 0.5 ]] [[ 0.5 -0.09156027 -0.0741088 ... -1.1833586 -0.02016805 0.16891025] [-1.012606 0.5 0.5 ... 0.5 0.28113025 -0.02117401] [ 0.5 0.32492027 -0.36365283 ... 0.5 0.15831038 0.5 ] ... [-0.30101618 0.5 -1.2086933 ... -0.27717674 0.07292078 0.5 ] [-0.49598715 0.39709362 0.5 ... -1.0362749 0.5 -0.54319054] [ 0.3104946 0.15523764 -0.9246507 ... 0.5 -0.9479571 0.3784382 ]] [[-0.01814621 0.3345777 0.5 ... 0.5 0.5 -1.556322 ] [ 0.5 -0.51782346 -0.83108145 ... -2.0330474 -0.6085484 0.5 ] [-0.6781901 0.10542011 -0.81828225 ... -0.14066488 -1.132681 -1.2664568 ] ... [-0.25182652 -1.6595317 0.03960752 ... -0.5315526 0.5 0.4777809 ] [-0.9191312 -1.0220405 -0.72017556 ... -0.29950473 -0.6302205 -0.59697753] [-0.6437487 0.5 -0.55591995 ... 0.5 -1.0718994 0.5 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_clone.py::TestClone::test_clone[ ie_device:CPU - precision:FP32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_clone.___torch_mangle_413.aten_clone, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::clone(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_clone.py:19:23 return (%3) fw_re: [[[[-1.491375 -0.3275168 -0.41495278 ... -0.9738665 -0.07704763 -0.67748326] [-1.4136677 -1.1768215 -0.56783086 ... -0.19026156 -1.0047336 0.60050046] [-0.98845756 -0.8827225 -1.0417165 ... 0.39878494 -1.2471635 -0.5425751 ] ... [ 1.4323945 -0.93640745 -0.4540182 ... -0.50905824 0.2490604 -0.71618575] [-0.20166437 0.616351 -0.8402541 ... 0.99683344 1.3176229 -0.5626417 ] [ 1.532995 -0.21689133 -0.85420376 ... 0.75203514 -0.1760585 0.75768155]] [[-1.1140883 -2.011141 0.6336734 ... 1.2395792 1.3454552 1.4790125 ] [ 0.08887322 1.5060226 -1.1299111 ... -2.7055123 1.011439 -0.38287896] [ 0.2458227 0.2149514 0.9796819 ... -1.0741985 -1.1848327 1.159082 ] ... [-0.9079441 -0.75668204 0.24383338 ... 1.1995856 0.2821424 -0.66894025] [-0.9328394 -1.1177312 -0.25469 ... 0.31818214 -0.13307302 0.03807255] [ 0.6658449 0.1613334 0.8422389 ... -0.08313628 -1.8070984 -1.1498095 ]] [[ 1.008004 0.06776302 0.35899717 ... -0.7818653 -1.3858384 0.38811928] [-0.24280375 -0.49696276 0.36702836 ... 0.9056431 0.24029268 0.954731 ] [ 0.6000442 -2.5968413 -0.3968736 ... 0.28553948 0.6186343 0.35786203] ... [ 0.6429716 -0.96625185 0.82642096 ... -0.07303166 0.5882671 0.16778621] [-0.66857314 -0.20267965 -1.4989282 ... 0.800372 -1.1530628 1.9004219 ] [-0.04629403 0.10282052 -1.215165 ... 2.5358899 0.84939784 0.60718364]]]]; ov_res: [[[[-1.491375 -0.3275168 -0.41495278 ... -0.9738665 -0.07704763 -0.67748326] [-1.4136677 -1.1768215 -0.56783086 ... -0.19026156 -1.0047336 0.60050046] [-0.98845756 -0.8827225 -1.0417165 ... 0.39878494 -1.2471635 -0.5425751 ] ... [ 1.4323945 -0.93640745 -0.4540182 ... -0.50905824 0.2490604 -0.71618575] [-0.20166437 0.616351 -0.8402541 ... 0.99683344 1.3176229 -0.5626417 ] [ 1.532995 -0.21689133 -0.85420376 ... 0.75203514 -0.1760585 0.75768155]] [[-1.1140883 -2.011141 0.6336734 ... 1.2395792 1.3454552 1.4790125 ] [ 0.08887322 1.5060226 -1.1299111 ... -2.7055123 1.011439 -0.38287896] [ 0.2458227 0.2149514 0.9796819 ... -1.0741985 -1.1848327 1.159082 ] ... [-0.9079441 -0.75668204 0.24383338 ... 1.1995856 0.2821424 -0.66894025] [-0.9328394 -1.1177312 -0.25469 ... 0.31818214 -0.13307302 0.03807255] [ 0.6658449 0.1613334 0.8422389 ... -0.08313628 -1.8070984 -1.1498095 ]] [[ 1.008004 0.06776302 0.35899717 ... -0.7818653 -1.3858384 0.38811928] [-0.24280375 -0.49696276 0.36702836 ... 0.9056431 0.24029268 0.954731 ] [ 0.6000442 -2.5968413 -0.3968736 ... 0.28553948 0.6186343 0.35786203] ... [ 0.6429716 -0.96625185 0.82642096 ... -0.07303166 0.5882671 0.16778621] [-0.66857314 -0.20267965 -1.4989282 ... 0.800372 -1.1530628 1.9004219 ] [-0.04629403 0.10282052 -1.215165 ... 2.5358899 0.84939784 0.60718364]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:eq ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_414.aten_eq, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::eq(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:19:23 return (%3) fw_re: [[[[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]] [[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]] [[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]]]]; ov_res: [[[[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]] [[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]] [[False False False ... False False False] [False False False ... False False False] [False False False ... False False False] ... [False False False ... False False False] [False False False ... False False False] [False False False ... False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:ne ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_415.aten_ne, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::ne(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:23:23 return (%3) fw_re: [[[[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]] [[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]] [[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]]]]; ov_res: [[[[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]] [[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]] [[ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True] ... [ True True True ... True True True] [ True True True ... True True True] [ True True True ... True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:lt ] | 0.08 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_416.aten_lt, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::lt(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:27:23 return (%3) fw_re: [[[[False False False ... False False False] [ True True True ... False False True] [ True False True ... False True False] ... [False True True ... True False True] [False False True ... True False True] [False False False ... True True False]] [[False True True ... False True True] [ True False False ... True True True] [False True False ... True False False] ... [ True True False ... True False False] [False True False ... False True False] [ True False True ... True True False]] [[False True False ... False True False] [ True True False ... True False False] [ True False True ... False False False] ... [False True False ... True False False] [False True True ... True False True] [False True False ... False True False]]]]; ov_res: [[[[False False False ... False False False] [ True True True ... False False True] [ True False True ... False True False] ... [False True True ... True False True] [False False True ... True False True] [False False False ... True True False]] [[False True True ... False True True] [ True False False ... True True True] [False True False ... True False False] ... [ True True False ... True False False] [False True False ... False True False] [ True False True ... True True False]] [[False True False ... False True False] [ True True False ... True False False] [ True False True ... False False False] ... [False True False ... True False False] [False True True ... True False True] [False True False ... False True False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:gt ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_417.aten_gt, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::gt(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:31:23 return (%3) fw_re: [[[[ True True False ... True True True] [False True False ... True False False] [ True False False ... False False False] ... [False True False ... True False True] [ True True True ... False True False] [ True False True ... True False False]] [[ True False False ... False True True] [ True True True ... True False False] [False False False ... False True True] ... [ True True False ... False True True] [False False False ... False True False] [ True True True ... True False False]] [[False True False ... True True False] [False False False ... False True True] [ True False False ... False False True] ... [False False True ... False True True] [ True False False ... False False True] [ True True True ... True False True]]]]; ov_res: [[[[ True True False ... True True True] [False True False ... True False False] [ True False False ... False False False] ... [False True False ... True False True] [ True True True ... False True False] [ True False True ... True False False]] [[ True False False ... False True True] [ True True True ... True False False] [False False False ... False True True] ... [ True True False ... False True True] [False False False ... False True False] [ True True True ... True False False]] [[False True False ... True True False] [False False False ... False True True] [ True False False ... False False True] ... [False False True ... False True True] [ True False False ... False False True] [ True True True ... True False True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:le ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_418.aten_le, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::le(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:35:23 return (%3) fw_re: [[[[False True False ... True True False] [False False False ... False False False] [False True True ... False False True] ... [ True True False ... True True False] [False False True ... False True True] [False False True ... False True True]] [[ True False False ... False True False] [False False True ... True False True] [ True True True ... False True True] ... [False False True ... False True False] [False False False ... True True False] [False True True ... False False True]] [[False True False ... False True False] [ True True False ... True False True] [False True False ... True True True] ... [ True False True ... True True True] [ True True True ... True False True] [False False True ... True False True]]]]; ov_res: [[[[False True False ... True True False] [False False False ... False False False] [False True True ... False False True] ... [ True True False ... True True False] [False False True ... False True True] [False False True ... False True True]] [[ True False False ... False True False] [False False True ... True False True] [ True True True ... False True True] ... [False False True ... False True False] [False False False ... True True False] [False True True ... False False True]] [[False True False ... False True False] [ True True False ... True False True] [False True False ... True True True] ... [ True False True ... True True True] [ True True True ... True False True] [False False True ... True False True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_comparision.py::TestComp::test_comp[ ie_device:CPU - precision:FP32 - op:ge ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_comparision.___torch_mangle_419.aten_ge, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::ge(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_comparision.py:39:23 return (%3) fw_re: [[[[ True True False ... False True False] [ True True False ... False False True] [ True False False ... True False False] ... [False True True ... True False True] [False True False ... True True False] [ True False True ... True False True]] [[ True True True ... False True False] [False True False ... True False False] [False False False ... True False False] ... [False True False ... True True False] [ True False False ... False True False] [False False True ... True False False]] [[False True False ... False True True] [ True False True ... False False True] [False False False ... True False False] ... [ True False False ... False False True] [False False True ... True True True] [ True True True ... True False False]]]]; ov_res: [[[[ True True False ... False True False] [ True True False ... False False True] [ True False False ... True False False] ... [False True True ... True False True] [False True False ... True True False] [ True False True ... True False True]] [[ True True True ... False True False] [False True False ... True False False] [False False False ... True False False] ... [False True False ... True True False] [ True False False ... False True False] [False False True ... True False False]] [[False True False ... False True True] [ True False True ... False False True] [False False False ... True False False] ... [ True False False ... False False True] [False False True ... True True True] [ True True True ... True False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_420.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={2.38398}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ -7.048944 -5.345335 -3.4923897 ... 0.4320594 -0.03134227 -13.265739 ] [ -0.9538543 8.555648 -3.5146046 ... 1.9837177 -5.576875 -4.642106 ] [ 7.074101 -0.10865688 6.1433973 ... 4.6550345 -0.29310703 7.0271134 ] ... [ 2.6768067 3.9777753 3.3263822 ... 1.7222311 6.0854287 3.64881 ] [ -0.20658374 2.2967868 2.8474438 ... 3.1014276 -1.318943 -6.964623 ] [ 7.5570626 -7.099342 -10.143547 ... 1.0044179 2.5430589 -4.498372 ]]] [[[ -7.8211055 -4.6914277 -8.847292 ... 7.2578244 -2.3333163 -9.534237 ] [ 0.61721706 -8.797083 -4.317289 ... 6.048905 -6.441873 3.8170176 ] [ 0.76442695 1.7577255 1.0396724 ... -8.030903 -2.3051705 13.005884 ] ... [ 2.959737 -0.7503693 5.7394032 ... -5.673186 -1.4089782 -0.269562 ] [ 4.4251003 -0.6270077 1.0063518 ... -2.93262 -2.8568544 -3.2315588 ] [ -4.50876 4.9999866 7.434914 ... 3.0890179 2.974449 2.8916261 ]]]]; ov_res: [[[[ -7.048944 -5.345335 -3.4923897 ... 0.4320594 -0.03134227 -13.265739 ] [ -0.9538543 8.555648 -3.5146046 ... 1.9837177 -5.576875 -4.642106 ] [ 7.074101 -0.10865688 6.1433973 ... 4.6550345 -0.29310703 7.0271134 ] ... [ 2.6768067 3.9777753 3.3263822 ... 1.7222311 6.0854287 3.64881 ] [ -0.20658374 2.2967868 2.8474438 ... 3.1014276 -1.318943 -6.964623 ] [ 7.5570626 -7.099342 -10.143547 ... 1.0044179 2.5430589 -4.498372 ]]] [[[ -7.8211055 -4.6914277 -8.847292 ... 7.2578244 -2.3333163 -9.534237 ] [ 0.61721706 -8.797083 -4.317289 ... 6.048905 -6.441873 3.8170176 ] [ 0.76442695 1.7577255 1.0396724 ... -8.030903 -2.3051705 13.005884 ] ... [ 2.959737 -0.7503693 5.7394032 ... -5.673186 -1.4089782 -0.269562 ] [ 4.4251003 -0.6270077 1.0063518 ... -2.93262 -2.8568544 -3.2315588 ] [ -4.50876 4.9999866 7.434914 ... 3.0890179 2.974449 2.8916261 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_422.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.957771}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%8) fw_re: [[[[-4.00980377e+00 8.21533012e+00 -2.14276791e+00 8.19332504e+00 2.08106613e+00 1.29528463e-01 -2.50728130e+00 2.86106825e+00 4.81582355e+00 -5.17032433e+00 5.59769440e+00 5.51105118e+00] [-8.77548575e-01 9.49228287e+00 -6.30508614e+00 5.96580219e+00 -2.43562078e+00 9.34974134e-01 1.06957853e-01 -2.19896483e+00 2.33401346e+00 7.99815750e+00 -2.75694537e+00 3.10759282e+00] [ 5.47192287e+00 4.04481411e+00 -7.52181530e+00 8.06243610e+00 8.30597520e-01 8.59501553e+00 -3.03164387e+00 -4.47041225e+00 1.15217566e+00 2.29617786e+00 -6.58039927e-01 2.91342187e+00] [-2.45398355e+00 -3.80681634e+00 -6.32320881e-01 1.16134024e+00 1.69645166e+00 -1.23752594e+00 2.97855759e+00 -4.40310287e+00 -2.33135128e+00 4.55558360e-01 2.71715069e+00 -9.70462132e+00] [ 6.15792561e+00 6.29663587e-01 -3.41420913e+00 1.43869627e+00 4.42871094e+00 -3.64635682e+00 6.98043251e+00 4.83653736e+00 5.21454144e+00 -1.66467500e+00 1.41383362e+00 -2.31646919e+00] [ 8.39574218e-01 3.59290624e+00 -4.65208244e+00 -4.37469482e+00 5.12158298e+00 6.78327084e+00 7.08353579e-01 1.81534076e+00 1.52872300e+00 1.21346116e+00 -4.65975761e+00 3.39853692e+00] [-8.53479958e+00 6.05292702e+00 3.62811947e+00 3.76156592e+00 1.57956231e+00 3.65805006e+00 2.55776787e+00 1.09411812e+00 8.80521297e+00 4.90307426e+00 -1.33331537e-01 5.87989616e+00] [-1.55983090e+00 -4.25809097e+00 1.05121768e+00 9.81280422e+00 -4.36400127e+00 6.74401569e+00 2.49566936e+00 -3.95658231e+00 -9.91723776e-01 -1.85218287e+00 6.17555499e-01 1.22117817e+00] [-1.60438180e+00 -6.02441549e-01 2.04912496e+00 7.26000488e-01 4.02363396e+00 -8.34265423e+00 2.69924879e+00 -4.07349348e-01 2.76952434e+00 2.93347263e+00 1.73207307e+00 -2.47834420e+00] [ 4.46207237e+00 -1.00156507e+01 2.64336514e+00 -1.47113729e+00 2.22943258e+00 -2.28008747e+00 -2.31175709e+00 -2.84939146e+00 1.34652209e+00 2.44088387e+00 2.12991476e+00 1.80105925e-01] [-1.28338695e+00 -1.52816105e+00 7.54448605e+00 2.84441614e+00 4.07856703e-03 9.10237217e+00 6.80443954e+00 -1.69810796e+00 -6.43828964e+00 -2.09609699e+00 5.00802135e+00 -3.74449992e+00] [-5.30405641e-01 -5.43689251e+00 -2.68121743e+00 5.23102760e+00 -5.76806784e-01 -9.16125417e-01 -3.25825572e+00 -6.66812062e-01 7.74179101e-02 9.98043346e+00 1.97415280e+00 -2.93936133e-01]]] [[[ 1.67832875e+00 -1.23701668e+00 -2.82283020e+00 -4.93254948e+00 -6.16533637e-01 -7.34526634e+00 -1.76132822e+00 1.64509773e+00 8.74946976e+00 -3.05330682e+00 -2.93532968e-01 5.86611366e+00] [-4.17903996e+00 6.65744543e-02 1.55865288e+00 6.71714401e+00 5.04699707e+00 -1.53956795e+00 7.57123590e-01 3.11740184e+00 8.12113822e-01 8.60575294e+00 8.06680298e+00 4.16793060e+00] [-2.96163869e+00 -1.40220928e+00 5.03327560e+00 1.80958462e+00 -1.73602700e-02 -3.23095775e+00 2.73855495e+00 4.76371956e+00 -4.41386700e+00 4.23015213e+00 8.25129795e+00 -9.43163037e-01] [-3.18647933e+00 5.89716816e+00 2.05164742e+00 -3.59543920e+00 -1.39493966e+00 -3.80783820e+00 2.66597509e+00 1.08143616e+01 -2.16893506e+00 -2.99322057e+00 1.68416309e+00 -4.30553818e+00] [-9.52852631e+00 2.30610561e+00 3.39282274e+00 -1.28049493e+00 -7.49206781e-01 -2.84546757e+00 4.55305004e+00 1.31450593e-01 -8.70757937e-01 1.26652026e+00 8.73594475e+00 1.93961620e-01] [-5.93960094e+00 -1.32347298e+00 -2.56848431e+00 -4.52935219e+00 2.27704310e+00 -2.01495099e+00 4.60135841e+00 6.04434681e+00 1.14656138e+00 -3.28990960e+00 -4.86906815e+00 -3.56315112e+00] [-3.81514287e+00 5.49508452e-01 4.43346167e+00 -4.26267433e+00 4.89024973e+00 -1.75534129e+00 2.39504814e+00 -1.31898618e+00 1.82800770e-01 1.40639806e+00 -3.73357368e+00 -5.17238379e-02] [ 5.04933000e-01 -2.26975727e+00 4.59630156e+00 4.75917530e+00 -3.19671226e+00 2.46191859e-01 -7.22045302e-01 -6.44387960e-01 3.71085906e+00 -6.30008125e+00 -6.28429985e+00 -6.36370087e+00] [-3.45870900e+00 -2.86414218e+00 -3.82369304e+00 -3.52634358e+00 -5.70779991e+00 -3.66597176e-01 -6.70494652e+00 -4.82268620e+00 -1.88741684e-01 -3.00217032e-01 -5.32595062e+00 -3.88506961e+00] [ 1.01618493e+00 -2.58229542e+00 3.04266405e+00 -7.28134918e+00 -4.06881046e+00 1.12669849e+01 -5.56680489e+00 -7.65339947e+00 -6.38811874e+00 -5.72320747e+00 -3.50549722e+00 3.81476951e+00] [ 7.56068826e-01 7.49190426e+00 6.07258320e+00 -9.08145332e+00 -4.83088207e+00 2.52061844e+00 3.06259990e+00 -1.19900560e+00 7.00944710e+00 3.99134231e+00 8.96421432e+00 -3.21302056e+00] [ 3.72003055e+00 1.13917055e+01 9.56836939e-01 9.39457119e-01 -1.26491380e+00 1.08652067e+00 3.62864804e+00 -3.06744599e+00 -1.45761514e+00 3.09733891e+00 4.28157330e+00 -8.63608122e-01]]]]; ov_res: [[[[-4.00980377e+00 8.21533012e+00 -2.14276791e+00 8.19332504e+00 2.08106613e+00 1.29528463e-01 -2.50728130e+00 2.86106825e+00 4.81582355e+00 -5.17032433e+00 5.59769440e+00 5.51105118e+00] [-8.77548575e-01 9.49228287e+00 -6.30508614e+00 5.96580219e+00 -2.43562078e+00 9.34974134e-01 1.06957853e-01 -2.19896483e+00 2.33401346e+00 7.99815750e+00 -2.75694537e+00 3.10759282e+00] [ 5.47192287e+00 4.04481411e+00 -7.52181530e+00 8.06243610e+00 8.30597520e-01 8.59501553e+00 -3.03164387e+00 -4.47041225e+00 1.15217566e+00 2.29617786e+00 -6.58039927e-01 2.91342187e+00] [-2.45398355e+00 -3.80681634e+00 -6.32320881e-01 1.16134024e+00 1.69645166e+00 -1.23752594e+00 2.97855759e+00 -4.40310287e+00 -2.33135128e+00 4.55558360e-01 2.71715069e+00 -9.70462132e+00] [ 6.15792561e+00 6.29663587e-01 -3.41420913e+00 1.43869627e+00 4.42871094e+00 -3.64635682e+00 6.98043251e+00 4.83653736e+00 5.21454144e+00 -1.66467500e+00 1.41383362e+00 -2.31646919e+00] [ 8.39574218e-01 3.59290624e+00 -4.65208244e+00 -4.37469482e+00 5.12158298e+00 6.78327084e+00 7.08353579e-01 1.81534076e+00 1.52872300e+00 1.21346116e+00 -4.65975761e+00 3.39853692e+00] [-8.53479958e+00 6.05292702e+00 3.62811947e+00 3.76156592e+00 1.57956231e+00 3.65805006e+00 2.55776787e+00 1.09411812e+00 8.80521297e+00 4.90307426e+00 -1.33331537e-01 5.87989616e+00] [-1.55983090e+00 -4.25809097e+00 1.05121768e+00 9.81280422e+00 -4.36400127e+00 6.74401569e+00 2.49566936e+00 -3.95658231e+00 -9.91723776e-01 -1.85218287e+00 6.17555499e-01 1.22117817e+00] [-1.60438180e+00 -6.02441549e-01 2.04912496e+00 7.26000488e-01 4.02363396e+00 -8.34265423e+00 2.69924879e+00 -4.07349348e-01 2.76952434e+00 2.93347263e+00 1.73207307e+00 -2.47834420e+00] [ 4.46207237e+00 -1.00156507e+01 2.64336514e+00 -1.47113729e+00 2.22943258e+00 -2.28008747e+00 -2.31175709e+00 -2.84939146e+00 1.34652209e+00 2.44088387e+00 2.12991476e+00 1.80105925e-01] [-1.28338695e+00 -1.52816105e+00 7.54448605e+00 2.84441614e+00 4.07856703e-03 9.10237217e+00 6.80443954e+00 -1.69810796e+00 -6.43828964e+00 -2.09609699e+00 5.00802135e+00 -3.74449992e+00] [-5.30405641e-01 -5.43689251e+00 -2.68121743e+00 5.23102760e+00 -5.76806784e-01 -9.16125417e-01 -3.25825572e+00 -6.66812062e-01 7.74179101e-02 9.98043346e+00 1.97415280e+00 -2.93936133e-01]]] [[[ 1.67832875e+00 -1.23701668e+00 -2.82283020e+00 -4.93254948e+00 -6.16533637e-01 -7.34526634e+00 -1.76132822e+00 1.64509773e+00 8.74946976e+00 -3.05330682e+00 -2.93532968e-01 5.86611366e+00] [-4.17903996e+00 6.65744543e-02 1.55865288e+00 6.71714401e+00 5.04699707e+00 -1.53956795e+00 7.57123590e-01 3.11740184e+00 8.12113822e-01 8.60575294e+00 8.06680298e+00 4.16793060e+00] [-2.96163869e+00 -1.40220928e+00 5.03327560e+00 1.80958462e+00 -1.73602700e-02 -3.23095775e+00 2.73855495e+00 4.76371956e+00 -4.41386700e+00 4.23015213e+00 8.25129795e+00 -9.43163037e-01] [-3.18647933e+00 5.89716816e+00 2.05164742e+00 -3.59543920e+00 -1.39493966e+00 -3.80783820e+00 2.66597509e+00 1.08143616e+01 -2.16893506e+00 -2.99322057e+00 1.68416309e+00 -4.30553818e+00] [-9.52852631e+00 2.30610561e+00 3.39282274e+00 -1.28049493e+00 -7.49206781e-01 -2.84546757e+00 4.55305004e+00 1.31450593e-01 -8.70757937e-01 1.26652026e+00 8.73594475e+00 1.93961620e-01] [-5.93960094e+00 -1.32347298e+00 -2.56848431e+00 -4.52935219e+00 2.27704310e+00 -2.01495099e+00 4.60135841e+00 6.04434681e+00 1.14656138e+00 -3.28990960e+00 -4.86906815e+00 -3.56315112e+00] [-3.81514287e+00 5.49508452e-01 4.43346167e+00 -4.26267433e+00 4.89024973e+00 -1.75534129e+00 2.39504814e+00 -1.31898618e+00 1.82800770e-01 1.40639806e+00 -3.73357368e+00 -5.17238379e-02] [ 5.04933000e-01 -2.26975727e+00 4.59630156e+00 4.75917530e+00 -3.19671226e+00 2.46191859e-01 -7.22045302e-01 -6.44387960e-01 3.71085906e+00 -6.30008125e+00 -6.28429985e+00 -6.36370087e+00] [-3.45870900e+00 -2.86414218e+00 -3.82369304e+00 -3.52634358e+00 -5.70779991e+00 -3.66597176e-01 -6.70494652e+00 -4.82268620e+00 -1.88741684e-01 -3.00217032e-01 -5.32595062e+00 -3.88506961e+00] [ 1.01618493e+00 -2.58229542e+00 3.04266405e+00 -7.28134918e+00 -4.06881046e+00 1.12669849e+01 -5.56680489e+00 -7.65339947e+00 -6.38811874e+00 -5.72320747e+00 -3.50549722e+00 3.81476951e+00] [ 7.56068826e-01 7.49190426e+00 6.07258320e+00 -9.08145332e+00 -4.83088207e+00 2.52061844e+00 3.06259990e+00 -1.19900560e+00 7.00944710e+00 3.99134231e+00 8.96421432e+00 -3.21302056e+00] [ 3.72003055e+00 1.13917055e+01 9.56836939e-01 9.39457119e-01 -1.26491380e+00 1.08652067e+00 3.62864804e+00 -3.06744599e+00 -1.45761514e+00 3.09733891e+00 4.28157330e+00 -8.63608122e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_424.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.56413}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%6) fw_re: [[[[ -1.8822069 -2.8905315 -10.521331 ... -1.421657 2.0387268 -1.9130511 ] [ 1.4113042 -10.038655 -3.892061 ... -5.085596 5.321458 -4.445813 ] [ -1.1287335 -3.3071985 9.545323 ... 2.2668834 -5.1522827 -9.18149 ] ... [ 6.0848265 -6.0509977 2.951372 ... -11.330367 -3.2681885 -2.3415427 ] [ -5.241714 -3.6407795 10.824932 ... -11.721278 10.357156 0.46215582] [ -0.68545645 -6.7926693 -3.2127695 ... 3.7169044 -18.33105 -7.935383 ]]] [[[ -0.7812636 3.1499746 -8.278668 ... -3.9525185 -2.8702395 -0.8471288 ] [ 3.2204254 -3.8574436 1.3012261 ... 4.0645275 10.490871 10.151764 ] [ -0.11662126 -1.2857506 7.064621 ... -2.4454381 0.27579486 -10.104292 ] ... [ -4.0648727 -4.9827127 5.569194 ... 3.66547 -5.296842 -7.01151 ] [ 1.5217512 -2.9970632 3.766988 ... -5.763727 6.796774 -6.3984203 ] [ -6.0459337 1.4054322 -7.092657 ... 6.2460213 -4.3869534 1.3640003 ]]]]; ov_res: [[[[ -1.8822069 -2.8905315 -10.521331 ... -1.421657 2.0387268 -1.9130511 ] [ 1.4113042 -10.038655 -3.892061 ... -5.085596 5.321458 -4.445813 ] [ -1.1287335 -3.3071985 9.545323 ... 2.2668834 -5.1522827 -9.18149 ] ... [ 6.0848265 -6.0509977 2.951372 ... -11.330367 -3.2681885 -2.3415427 ] [ -5.241714 -3.6407795 10.824932 ... -11.721278 10.357156 0.46215582] [ -0.68545645 -6.7926693 -3.2127695 ... 3.7169044 -18.33105 -7.935383 ]]] [[[ -0.7812636 3.1499746 -8.278668 ... -3.9525185 -2.8702395 -0.8471288 ] [ 3.2204254 -3.8574436 1.3012261 ... 4.0645275 10.490871 10.151764 ] [ -0.11662126 -1.2857506 7.064621 ... -2.4454381 0.27579486 -10.104292 ] ... [ -4.0648727 -4.9827127 5.569194 ... 3.66547 -5.296842 -7.01151 ] [ 1.5217512 -2.9970632 3.766988 ... -5.763727 6.796774 -6.3984203 ] [ -6.0459337 1.4054322 -7.092657 ... 6.2460213 -4.3869534 1.3640003 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_426.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.0998815}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%8) fw_re: [[[[ 4.89387417e+00 5.44197226e+00 -7.66255665e+00 5.56361914e-01 -6.27834892e+00 -2.61112952e+00 -5.17650080e+00 -1.33005476e+01 -5.71446991e+00 4.25419617e+00 1.45710354e+01 4.22464705e+00 9.62317407e-01 -1.85477698e+00 5.13959742e+00 -5.31607103e+00 -4.68808126e+00 -8.53631687e+00 4.70523787e+00 1.13199596e+01 5.10529852e+00] [ 2.66284823e+00 -5.71709967e+00 8.84475899e+00 -3.33107185e+00 -4.69897795e+00 1.06437778e+01 -6.08299303e+00 2.35979128e+00 6.47984028e+00 -9.61899519e-01 -1.03251104e+01 3.13845992e+00 6.83668232e+00 1.12382021e+01 -8.75946522e+00 -1.31457758e+01 2.95978379e+00 -5.86303949e+00 5.06905985e+00 -1.83328903e+00 -4.93509007e+00] [-4.78809929e+00 4.76352978e+00 6.77345324e+00 4.32678938e+00 -5.99530697e+00 8.57958984e+00 -1.74923456e+00 -6.06517267e+00 -7.80508852e+00 3.29841375e+00 -7.54578054e-01 -2.97765875e+00 -1.26874466e+01 1.62522583e+01 -3.58937979e+00 -1.33963585e+01 1.47051020e+01 5.90595865e+00 -2.47370934e+00 6.10508978e-01 -1.12591982e+01] [-1.37045825e+00 3.65488863e+00 3.58667970e-02 6.04950523e+00 3.52407789e+00 -3.66854042e-01 -5.59278250e+00 5.31841516e+00 -3.69076729e+00 -2.60123229e+00 -1.28174591e+01 7.78347969e+00 -7.38795757e-01 2.24563551e+00 -3.63196015e+00 -2.32637286e+00 1.34399023e+01 -1.67934865e-01 -1.18618908e+01 4.08815622e+00 5.73339748e+00] [-2.27757645e+00 7.73449945e+00 -9.64404464e-01 -1.56246023e+01 5.16051245e+00 -3.67666769e+00 3.30677104e+00 -9.37768269e+00 5.40233946e+00 6.03964853e+00 2.88834667e+00 -1.03731871e+01 1.52735710e+00 -2.43011355e-01 4.43008900e+00 1.55331726e+01 -8.67219067e+00 -1.09375277e+01 6.41479206e+00 5.05636597e+00 -7.81753969e+00] [ 1.97903705e+00 -1.73488510e+00 -5.53983879e+00 6.85405922e+00 5.56466532e+00 -9.05689240e+00 1.36785638e+00 -2.86264032e-01 1.89925611e+00 -8.89319420e+00 2.09637809e+00 3.73597288e+00 -1.74523392e+01 6.10152674e+00 1.82699661e+01 -4.93336630e+00 -6.47446346e+00 2.08821225e+00 -5.19232750e+00 -7.24060965e+00 6.16532516e+00] [ 7.43526983e+00 -5.71308613e+00 -3.38021660e+00 -2.49855852e+00 -1.74622583e+00 -1.45668113e+00 3.06162262e+00 -1.76011562e+00 -8.68506312e-01 -1.16859818e+00 -6.74890757e+00 7.54279077e-01 1.02876225e+01 -1.67466009e+00 9.15420818e+00 -4.12536669e+00 -7.49675322e+00 3.46274519e+00 -5.54365110e+00 -8.58022451e-01 -3.86615992e+00] [ 9.49737787e-01 -1.58198559e+00 -2.37888169e+00 -9.03153706e+00 3.79278946e+00 3.99854875e+00 -1.14658613e+01 2.06293300e-01 2.26685834e+00 8.79937887e-01 -1.14683211e+00 3.31014585e+00 4.69549942e+00 3.41557384e+00 -9.00672245e+00 -1.70712817e+00 2.97749805e+00 7.23540449e+00 8.19780731e+00 -5.97061539e+00 4.85511160e+00] [ 6.78940630e+00 4.34163237e+00 -8.18822861e+00 -9.39141655e+00 1.04750156e+01 4.30463314e+00 3.71287012e+00 4.96238279e+00 7.66074240e-01 5.00811768e+00 -6.99810171e+00 -1.28268528e+00 3.54412293e+00 -3.59862828e+00 -8.11623383e+00 2.88639832e+00 -3.77756190e+00 3.76468897e-01 8.28864098e+00 3.55245024e-02 -4.48632431e+00] [ 3.69338822e+00 -2.84787464e+00 1.14493093e+01 3.00573635e+00 -8.62820625e+00 1.13836952e-01 1.14040098e+01 2.01670527e+00 7.58773708e+00 -8.83570766e+00 1.03779209e+00 -3.36004233e+00 -9.02212429e+00 7.41741467e+00 -7.20935154e+00 -4.09632063e+00 9.48496014e-02 -9.30767250e+00 -7.26015997e+00 4.13982201e+00 1.01715612e+00] [ 6.26729345e+00 2.69136095e+00 -1.46229877e+01 1.55799627e+01 5.72590399e+00 -6.64136410e+00 1.69012594e+00 -2.69144207e-01 -5.98673582e+00 3.40855539e-01 -1.20704818e+00 -3.94913197e-01 5.62116683e-01 -3.20528221e+00 -4.24373341e+00 1.35378575e+00 1.88925946e+00 2.12009144e+00 4.51258516e+00 -4.68232346e+00 1.23729634e+00] [-2.43527532e+00 8.22629261e+00 -2.14098215e+00 -1.02635503e+00 1.19152176e+00 2.14535475e+00 1.29109287e+00 5.31385708e+00 1.51022034e+01 4.67431879e+00 -4.44907904e+00 -5.65865612e+00 -9.59642982e+00 6.65310526e+00 1.29115419e+01 8.15570593e-01 5.90748596e+00 1.25332775e+01 3.93149447e+00 -2.28987503e+00 -5.76952410e+00] [-1.76750088e+00 8.48746204e+00 -3.20423889e+00 5.24456930e+00 -8.30065429e-01 -5.57973528e+00 -3.46417642e+00 -7.16362143e+00 1.41042423e+00 -1.70232844e+00 3.31650710e+00 6.02801180e+00 1.79291975e+00 -4.21258974e+00 -1.08715594e+00 1.19626486e+00 3.11070800e+00 -5.69589949e+00 -3.78392673e+00 -1.06104393e+01 1.26603184e+01] [ 1.44244349e+00 8.45365107e-01 5.84082270e+00 -8.99171829e-01 -1.84565425e+00 -1.02162361e+01 -1.11447597e+00 5.24032354e+00 2.98866940e+00 -4.52827644e+00 -3.99145889e+00 -2.80630469e+00 -6.26542807e+00 -1.22305572e+00 -1.43621435e+01 -1.47550020e+01 5.07346916e+00 3.54850292e+00 -6.19672918e+00 4.13913584e+00 8.38359296e-01] [-3.62080812e+00 1.93371534e+00 2.34456301e-01 5.74109411e+00 1.52741928e+01 -1.08638072e+00 8.92667353e-01 2.16347426e-01 -5.01483631e+00 2.67887235e+00 2.38242984e+00 1.27852478e+01 5.44210148e+00 -8.68891335e+00 -1.23357906e+01 5.50155497e+00 3.97502327e+00 -7.52217710e-01 -5.64915323e+00 5.08245170e-01 1.14013968e+01] [-4.98241901e+00 9.64228213e-01 4.34997654e+00 -1.56156456e+00 4.89210796e+00 -3.38453794e+00 -3.44865274e+00 5.72052383e+00 8.00296307e+00 7.21434498e+00 4.62209845e+00 7.96918869e+00 -1.90252030e+00 4.67287207e+00 1.38653336e+01 3.05483437e+00 7.88788676e-01 -5.93918371e+00 -1.69207764e+00 7.73768187e+00 5.12584972e+00] [ 7.97432995e+00 -1.04248953e+01 -3.18021441e+00 -1.01046443e+00 -7.79724061e-01 -1.52454674e+00 -1.55995283e+01 5.21551752e+00 -3.30277944e+00 -3.18429112e+00 -1.12329292e+01 8.17710590e+00 6.49210310e+00 -1.85802317e+00 -1.15631664e+00 -9.53126049e+00 2.97269493e-01 2.65450096e+00 -1.44785500e+01 4.14742804e+00 -1.70612478e+00] [ 1.21757622e+01 -1.27604837e+01 -6.95025206e+00 5.72766733e+00 2.68446660e+00 2.27201176e+00 7.55772305e+00 -4.83836889e+00 2.88698673e+00 -6.27962971e+00 4.54977465e+00 2.19030809e+00 7.94021416e+00 -8.76382411e-01 8.88742805e-01 -3.21159816e+00 -1.64125896e+00 2.78155017e+00 -2.19092941e+00 -3.67486072e+00 -1.03472328e+01] [-7.97829866e+00 1.27699547e+01 8.14212978e-01 -1.87353249e+01 -6.37362242e-01 -1.57078075e+00 -1.22219048e-01 1.86205852e+00 4.00268793e+00 -5.85614443e+00 8.74165916e+00 6.40719318e+00 2.53598094e+00 -8.45652759e-01 -1.16332893e+01 8.59491634e+00 1.06889868e+01 3.30672169e+00 3.73441696e+00 1.59787893e+00 -1.01945448e+01] [-9.93416977e+00 -5.24168015e+00 -6.40124083e+00 -3.83785200e+00 2.30723166e+00 7.08045197e+00 -1.09885716e+00 -5.58050871e+00 1.53722858e+00 -1.06693144e+01 -6.69586039e+00 1.32666321e+01 5.39771557e+00 -9.13712978e+00 -6.20399952e-01 4.97408658e-01 -5.09841108e+00 4.69358158e+00 2.87733316e+00 -9.90002632e+00 -5.55530357e+00] [-1.61968553e+00 1.50782890e+01 -4.14070272e+00 9.07298660e+00 6.50290203e+00 -7.59006596e+00 -5.73598814e+00 -2.43371701e+00 3.85755920e+00 -3.28716964e-01 7.84431398e-01 -4.51662016e+00 2.94514298e-01 -8.57763958e+00 2.84364283e-01 -8.29961491e+00 2.17169046e+00 5.78995609e+00 2.42136049e+00 6.43954426e-03 1.53136253e+00]]] [[[-7.55619764e+00 1.41397314e+01 -1.64650822e+00 1.16359653e+01 -8.73427093e-01 8.30074430e-01 -5.66039026e-01 7.74497271e+00 2.20279360e+00 7.47567940e+00 1.49915409e+01 2.43208170e+00 8.15191269e+00 -7.41448450e+00 4.33176565e+00 -1.22359009e+01 -5.68564081e+00 5.72870255e+00 8.83196735e+00 -6.92298591e-01 -3.89865220e-01] [ 8.38147163e+00 -4.23829842e+00 -1.14075584e+01 1.33007107e+01 4.91796684e+00 4.21247435e+00 5.69258738e+00 -5.24305522e-01 6.17390585e+00 -4.02825975e+00 -2.33227396e+00 1.10324316e+01 -1.47956169e+00 -3.43736553e+00 1.31803489e+00 8.83996201e+00 9.78312206e+00 8.48989785e-01 -7.88367367e+00 9.13275528e+00 6.63723183e+00] [ 2.57928181e+00 -1.74943578e+00 8.60201955e-01 -1.17468946e-01 -2.75274396e+00 -6.00839043e+00 -3.94322157e+00 -1.39971626e+00 6.78583384e+00 -5.51915550e+00 6.26934958e+00 -4.47501659e+00 -6.84976339e+00 1.56346264e+01 -1.31133592e+00 1.11479645e+01 -7.49912214e+00 -3.50487423e+00 -1.89651692e+00 -9.57822227e+00 -1.23593092e+01] [-1.43677874e+01 -1.60993731e+00 1.15373564e+01 7.87893391e+00 -1.59374495e+01 3.99677372e+00 8.69547653e+00 -3.43036366e+00 -5.08063459e+00 -2.27382636e+00 8.44223857e-01 -3.91439706e-01 -1.28417301e+01 1.42208648e+00 5.03057528e+00 6.53204727e+00 4.32644412e-02 1.04450762e+00 1.83245575e+00 2.38078624e-01 -1.14255178e+00] [ 3.77399898e+00 -2.15771103e+00 6.28795815e+00 -6.48992157e+00 1.06447160e-01 -1.06959152e+01 2.09488750e+00 4.54839087e+00 -1.40708447e+01 3.23431158e+00 -5.81085968e+00 -4.71204424e+00 -6.35862112e-01 1.59138393e+00 4.67533016e+00 1.32205391e+01 3.81866789e+00 1.02303123e+01 -3.04799151e+00 -1.33758793e+01 5.18585014e+00] [ 8.77681351e+00 7.80035973e+00 -5.61301517e+00 7.46403933e+00 1.95844066e+00 1.80721784e+00 7.48377800e+00 7.18944359e+00 -7.51720953e+00 -6.33510351e+00 1.00860739e+01 -9.06656837e+00 -6.73584521e-01 -1.08423615e+00 9.91754913e+00 -4.76784527e-01 -3.11263347e+00 -1.42664051e+00 5.50383043e+00 -1.45376027e-01 7.66295969e-01] [-1.48410887e-01 -9.28707314e+00 -2.99501228e+00 -3.98445070e-01 3.15406770e-01 -3.12489176e+00 7.45325506e-01 3.58914399e+00 -4.86637402e+00 -2.70562577e+00 8.22095394e+00 -9.57660437e-01 -6.51502562e+00 -6.20323896e-01 2.99250436e+00 -1.84144306e+00 -5.78051186e+00 -3.83763313e-01 4.87412500e+00 -3.15363884e+00 4.00473452e+00] [ 3.23203468e+00 -1.97326052e+00 -3.77842116e+00 -3.29152107e+00 -1.38383980e+01 6.86238194e+00 1.37854929e+01 3.14344215e+00 7.55835116e-01 -1.09577787e+00 3.23429918e+00 6.74160624e+00 4.53828096e+00 1.16439974e+00 -1.14598665e+01 4.51392460e+00 -8.66778183e+00 -7.89633369e+00 1.57773721e+00 1.08911180e+00 2.79035378e+00] [-4.28374100e+00 -1.00304747e+00 -3.30435491e+00 5.23528385e+00 -1.13776760e+01 -3.24218345e+00 7.82768965e+00 -3.53245306e+00 -4.84994459e+00 -5.03171825e+00 -1.70453632e+00 1.35293818e+00 -2.36330223e+00 7.24864769e+00 2.18173003e+00 -4.66438591e-01 -2.57794142e+00 4.01098013e+00 -5.12584746e-01 -9.70916843e+00 -1.81639862e+00] [ 1.42329109e+00 -6.40845633e+00 -7.18236923e+00 -3.50437641e+00 1.09841366e+01 -5.32861900e+00 8.82496166e+00 1.14648962e+01 -6.95817327e+00 -1.25775852e+01 -1.60186231e+00 3.03546572e+00 -4.86297178e+00 8.55014229e+00 -3.26187539e+00 8.47143650e+00 -3.98949146e+00 5.69919169e-01 4.06594992e+00 1.60256636e+00 5.41618347e+00] [ 5.79892778e+00 5.09122133e+00 -2.20997620e+00 1.10062141e+01 1.47092724e+00 -5.00644922e+00 -8.76934719e+00 2.62635493e+00 1.40197344e+01 2.90847754e+00 4.52685690e+00 1.58662915e+00 5.86454439e+00 -2.59568542e-01 -4.81952620e+00 -7.77206659e+00 3.36437941e+00 6.13029242e+00 -6.79017258e+00 -1.94056618e+00 -1.28305566e+00] [-1.22097099e+00 1.37531549e-01 5.05400991e+00 1.38338220e+00 2.57171369e+00 6.96293449e+00 -2.73712650e-02 2.57523060e+00 -5.04370546e+00 -1.05934505e+01 2.51605010e+00 7.24628806e-01 4.30082560e+00 3.25220966e+00 7.45280647e+00 2.56069839e-01 2.69576621e+00 -6.29034758e-01 -3.06276727e+00 -7.59730434e+00 2.38655519e+00] [-9.08496857e+00 -1.96080291e+00 2.90430576e-01 -4.06109524e+00 7.37545490e+00 -2.86993551e+00 -5.42973423e+00 1.06512141e+00 -5.24254680e-01 6.48745894e-01 7.11416817e+00 -6.86429930e+00 9.36723518e+00 7.13522673e+00 -1.14497519e+01 5.63454628e+00 2.81493926e+00 -5.72103214e+00 3.24477530e+00 6.36539507e+00 1.41619730e+00] [-3.77578592e+00 -5.59124899e+00 9.49910545e+00 2.19063544e+00 7.91933489e+00 -2.16926289e+00 -3.41301513e+00 4.89749670e+00 -3.98284405e-01 6.22377968e+00 -1.71370137e+00 9.45366323e-02 -6.22311258e+00 4.29107070e-01 -2.50750041e+00 -4.18309331e-01 -1.25421953e+00 4.13641310e+00 -5.20201445e+00 -1.38347781e+00 9.09806919e+00] [ 8.62147522e+00 3.11219144e+00 -5.22566509e+00 -4.43779850e+00 8.00084591e+00 1.07049918e+00 -2.72569084e+00 8.67081404e-01 4.21101427e+00 5.28148860e-02 4.95688170e-01 5.34335566e+00 8.05400729e-01 4.15651894e+00 -6.96901608e+00 -5.75293350e+00 2.52178669e+00 1.08131428e+01 3.11837101e+00 -8.41665936e+00 5.65213871e+00] [-3.62524915e+00 -3.65624285e+00 4.61679667e-01 -9.36005974e+00 -7.41901875e+00 8.95084381e+00 3.54945481e-01 -1.20097990e+01 4.38177586e+00 3.63894176e+00 -1.17341604e+01 3.11391997e+00 -2.53527045e+00 -6.67736769e-01 5.41059494e+00 -1.71508670e+00 2.74557805e+00 -4.81067955e-01 -1.07706633e+01 1.17280936e+00 -7.93595362e+00] [ 4.29841608e-01 2.98934317e+00 4.81067467e+00 -4.70675051e-01 1.17919865e+01 -1.79134583e+00 -5.36439776e-01 1.82104135e+00 5.26735497e+00 -5.01910973e+00 -4.11176586e+00 9.11958575e-01 1.20607262e+01 6.96810436e+00 -1.17617693e+01 -6.47178555e+00 3.18083119e+00 -2.48767066e+00 -9.26097155e-01 1.27830195e+00 -8.04358768e+00] [-5.64700794e+00 2.28737497e+00 -6.86077404e+00 -1.49644107e-01 -7.73866034e+00 -1.64267182e+00 -1.71026576e+00 -3.90750504e+00 -4.85451174e+00 1.19839621e+01 6.33670378e+00 -6.02620316e+00 -1.48255825e+01 -1.70928407e+00 3.29172325e+00 -3.12012219e+00 4.06789446e+00 -2.36067677e+00 -4.91325712e+00 5.24801683e+00 3.25653195e+00] [-8.57421970e+00 -2.44724512e+00 -1.12918262e+01 1.59145803e+01 1.10974014e+00 -4.97461081e+00 -3.47031879e+00 -3.78175282e+00 2.26751566e+00 -9.40437317e+00 -3.60295385e-01 3.23657155e+00 3.88354588e+00 1.82026732e+00 1.45317249e+01 9.27123165e+00 -1.46596909e+01 -1.39575644e+01 5.78573322e+00 8.92488098e+00 -3.23078299e+00] [ 8.10099125e+00 3.06404901e+00 -8.63891506e+00 -5.27389860e+00 -9.61169526e-02 6.66282701e+00 -5.47395563e+00 -7.17090786e-01 1.03415232e+01 8.23936701e-01 -1.10292864e+01 2.75275135e+00 6.16344547e+00 -7.79126406e+00 -4.38296318e+00 1.26651692e+00 -4.16030407e+00 -2.04275250e+00 7.11505890e+00 -2.00768441e-01 -8.29615688e+00] [ 6.31589317e+00 -4.38554573e+00 1.01161170e+00 -8.32310867e+00 2.77355313e+00 4.64141369e+00 -2.28076673e+00 -4.11226702e+00 2.10228372e+00 -7.82306004e+00 1.21721087e+01 4.09947395e+00 -2.22170448e+00 -3.65067148e+00 2.24849200e+00 1.51384249e+01 9.46616936e+00 3.57325649e+00 6.03741455e+00 -9.27845097e+00 2.22468019e+00]]]]; ov_res: [[[[ 4.89387417e+00 5.44197226e+00 -7.66255665e+00 5.56361914e-01 -6.27834892e+00 -2.61112952e+00 -5.17650080e+00 -1.33005476e+01 -5.71446991e+00 4.25419617e+00 1.45710354e+01 4.22464705e+00 9.62317407e-01 -1.85477698e+00 5.13959742e+00 -5.31607103e+00 -4.68808126e+00 -8.53631687e+00 4.70523787e+00 1.13199596e+01 5.10529852e+00] [ 2.66284823e+00 -5.71709967e+00 8.84475899e+00 -3.33107185e+00 -4.69897795e+00 1.06437778e+01 -6.08299303e+00 2.35979128e+00 6.47984028e+00 -9.61899519e-01 -1.03251104e+01 3.13845992e+00 6.83668232e+00 1.12382021e+01 -8.75946522e+00 -1.31457758e+01 2.95978379e+00 -5.86303949e+00 5.06905985e+00 -1.83328903e+00 -4.93509007e+00] [-4.78809929e+00 4.76352978e+00 6.77345324e+00 4.32678938e+00 -5.99530697e+00 8.57958984e+00 -1.74923456e+00 -6.06517267e+00 -7.80508852e+00 3.29841375e+00 -7.54578054e-01 -2.97765875e+00 -1.26874466e+01 1.62522583e+01 -3.58937979e+00 -1.33963585e+01 1.47051020e+01 5.90595865e+00 -2.47370934e+00 6.10508978e-01 -1.12591982e+01] [-1.37045825e+00 3.65488863e+00 3.58667970e-02 6.04950523e+00 3.52407789e+00 -3.66854042e-01 -5.59278250e+00 5.31841516e+00 -3.69076729e+00 -2.60123229e+00 -1.28174591e+01 7.78347969e+00 -7.38795757e-01 2.24563551e+00 -3.63196015e+00 -2.32637286e+00 1.34399023e+01 -1.67934865e-01 -1.18618908e+01 4.08815622e+00 5.73339748e+00] [-2.27757645e+00 7.73449945e+00 -9.64404464e-01 -1.56246023e+01 5.16051245e+00 -3.67666769e+00 3.30677104e+00 -9.37768269e+00 5.40233946e+00 6.03964853e+00 2.88834667e+00 -1.03731871e+01 1.52735710e+00 -2.43011355e-01 4.43008900e+00 1.55331726e+01 -8.67219067e+00 -1.09375277e+01 6.41479206e+00 5.05636597e+00 -7.81753969e+00] [ 1.97903705e+00 -1.73488510e+00 -5.53983879e+00 6.85405922e+00 5.56466532e+00 -9.05689240e+00 1.36785638e+00 -2.86264032e-01 1.89925611e+00 -8.89319420e+00 2.09637809e+00 3.73597288e+00 -1.74523392e+01 6.10152674e+00 1.82699661e+01 -4.93336630e+00 -6.47446346e+00 2.08821225e+00 -5.19232750e+00 -7.24060965e+00 6.16532516e+00] [ 7.43526983e+00 -5.71308613e+00 -3.38021660e+00 -2.49855852e+00 -1.74622583e+00 -1.45668113e+00 3.06162262e+00 -1.76011562e+00 -8.68506312e-01 -1.16859818e+00 -6.74890757e+00 7.54279077e-01 1.02876225e+01 -1.67466009e+00 9.15420818e+00 -4.12536669e+00 -7.49675322e+00 3.46274519e+00 -5.54365110e+00 -8.58022451e-01 -3.86615992e+00] [ 9.49737787e-01 -1.58198559e+00 -2.37888169e+00 -9.03153706e+00 3.79278946e+00 3.99854875e+00 -1.14658613e+01 2.06293300e-01 2.26685834e+00 8.79937887e-01 -1.14683211e+00 3.31014585e+00 4.69549942e+00 3.41557384e+00 -9.00672245e+00 -1.70712817e+00 2.97749805e+00 7.23540449e+00 8.19780731e+00 -5.97061539e+00 4.85511160e+00] [ 6.78940630e+00 4.34163237e+00 -8.18822861e+00 -9.39141655e+00 1.04750156e+01 4.30463314e+00 3.71287012e+00 4.96238279e+00 7.66074240e-01 5.00811768e+00 -6.99810171e+00 -1.28268528e+00 3.54412293e+00 -3.59862828e+00 -8.11623383e+00 2.88639832e+00 -3.77756190e+00 3.76468897e-01 8.28864098e+00 3.55245024e-02 -4.48632431e+00] [ 3.69338822e+00 -2.84787464e+00 1.14493093e+01 3.00573635e+00 -8.62820625e+00 1.13836952e-01 1.14040098e+01 2.01670527e+00 7.58773708e+00 -8.83570766e+00 1.03779209e+00 -3.36004233e+00 -9.02212429e+00 7.41741467e+00 -7.20935154e+00 -4.09632063e+00 9.48496014e-02 -9.30767250e+00 -7.26015997e+00 4.13982201e+00 1.01715612e+00] [ 6.26729345e+00 2.69136095e+00 -1.46229877e+01 1.55799627e+01 5.72590399e+00 -6.64136410e+00 1.69012594e+00 -2.69144207e-01 -5.98673582e+00 3.40855539e-01 -1.20704818e+00 -3.94913197e-01 5.62116683e-01 -3.20528221e+00 -4.24373341e+00 1.35378575e+00 1.88925946e+00 2.12009144e+00 4.51258516e+00 -4.68232346e+00 1.23729634e+00] [-2.43527532e+00 8.22629261e+00 -2.14098215e+00 -1.02635503e+00 1.19152176e+00 2.14535475e+00 1.29109287e+00 5.31385708e+00 1.51022034e+01 4.67431879e+00 -4.44907904e+00 -5.65865612e+00 -9.59642982e+00 6.65310526e+00 1.29115419e+01 8.15570593e-01 5.90748596e+00 1.25332775e+01 3.93149447e+00 -2.28987503e+00 -5.76952410e+00] [-1.76750088e+00 8.48746204e+00 -3.20423889e+00 5.24456930e+00 -8.30065429e-01 -5.57973528e+00 -3.46417642e+00 -7.16362143e+00 1.41042423e+00 -1.70232844e+00 3.31650710e+00 6.02801180e+00 1.79291975e+00 -4.21258974e+00 -1.08715594e+00 1.19626486e+00 3.11070800e+00 -5.69589949e+00 -3.78392673e+00 -1.06104393e+01 1.26603184e+01] [ 1.44244349e+00 8.45365107e-01 5.84082270e+00 -8.99171829e-01 -1.84565425e+00 -1.02162361e+01 -1.11447597e+00 5.24032354e+00 2.98866940e+00 -4.52827644e+00 -3.99145889e+00 -2.80630469e+00 -6.26542807e+00 -1.22305572e+00 -1.43621435e+01 -1.47550020e+01 5.07346916e+00 3.54850292e+00 -6.19672918e+00 4.13913584e+00 8.38359296e-01] [-3.62080812e+00 1.93371534e+00 2.34456301e-01 5.74109411e+00 1.52741928e+01 -1.08638072e+00 8.92667353e-01 2.16347426e-01 -5.01483631e+00 2.67887235e+00 2.38242984e+00 1.27852478e+01 5.44210148e+00 -8.68891335e+00 -1.23357906e+01 5.50155497e+00 3.97502327e+00 -7.52217710e-01 -5.64915323e+00 5.08245170e-01 1.14013968e+01] [-4.98241901e+00 9.64228213e-01 4.34997654e+00 -1.56156456e+00 4.89210796e+00 -3.38453794e+00 -3.44865274e+00 5.72052383e+00 8.00296307e+00 7.21434498e+00 4.62209845e+00 7.96918869e+00 -1.90252030e+00 4.67287207e+00 1.38653336e+01 3.05483437e+00 7.88788676e-01 -5.93918371e+00 -1.69207764e+00 7.73768187e+00 5.12584972e+00] [ 7.97432995e+00 -1.04248953e+01 -3.18021441e+00 -1.01046443e+00 -7.79724061e-01 -1.52454674e+00 -1.55995283e+01 5.21551752e+00 -3.30277944e+00 -3.18429112e+00 -1.12329292e+01 8.17710590e+00 6.49210310e+00 -1.85802317e+00 -1.15631664e+00 -9.53126049e+00 2.97269493e-01 2.65450096e+00 -1.44785500e+01 4.14742804e+00 -1.70612478e+00] [ 1.21757622e+01 -1.27604837e+01 -6.95025206e+00 5.72766733e+00 2.68446660e+00 2.27201176e+00 7.55772305e+00 -4.83836889e+00 2.88698673e+00 -6.27962971e+00 4.54977465e+00 2.19030809e+00 7.94021416e+00 -8.76382411e-01 8.88742805e-01 -3.21159816e+00 -1.64125896e+00 2.78155017e+00 -2.19092941e+00 -3.67486072e+00 -1.03472328e+01] [-7.97829866e+00 1.27699547e+01 8.14212978e-01 -1.87353249e+01 -6.37362242e-01 -1.57078075e+00 -1.22219048e-01 1.86205852e+00 4.00268793e+00 -5.85614443e+00 8.74165916e+00 6.40719318e+00 2.53598094e+00 -8.45652759e-01 -1.16332893e+01 8.59491634e+00 1.06889868e+01 3.30672169e+00 3.73441696e+00 1.59787893e+00 -1.01945448e+01] [-9.93416977e+00 -5.24168015e+00 -6.40124083e+00 -3.83785200e+00 2.30723166e+00 7.08045197e+00 -1.09885716e+00 -5.58050871e+00 1.53722858e+00 -1.06693144e+01 -6.69586039e+00 1.32666321e+01 5.39771557e+00 -9.13712978e+00 -6.20399952e-01 4.97408658e-01 -5.09841108e+00 4.69358158e+00 2.87733316e+00 -9.90002632e+00 -5.55530357e+00] [-1.61968553e+00 1.50782890e+01 -4.14070272e+00 9.07298660e+00 6.50290203e+00 -7.59006596e+00 -5.73598814e+00 -2.43371701e+00 3.85755920e+00 -3.28716964e-01 7.84431398e-01 -4.51662016e+00 2.94514298e-01 -8.57763958e+00 2.84364283e-01 -8.29961491e+00 2.17169046e+00 5.78995609e+00 2.42136049e+00 6.43954426e-03 1.53136253e+00]]] [[[-7.55619764e+00 1.41397314e+01 -1.64650822e+00 1.16359653e+01 -8.73427093e-01 8.30074430e-01 -5.66039026e-01 7.74497271e+00 2.20279360e+00 7.47567940e+00 1.49915409e+01 2.43208170e+00 8.15191269e+00 -7.41448450e+00 4.33176565e+00 -1.22359009e+01 -5.68564081e+00 5.72870255e+00 8.83196735e+00 -6.92298591e-01 -3.89865220e-01] [ 8.38147163e+00 -4.23829842e+00 -1.14075584e+01 1.33007107e+01 4.91796684e+00 4.21247435e+00 5.69258738e+00 -5.24305522e-01 6.17390585e+00 -4.02825975e+00 -2.33227396e+00 1.10324316e+01 -1.47956169e+00 -3.43736553e+00 1.31803489e+00 8.83996201e+00 9.78312206e+00 8.48989785e-01 -7.88367367e+00 9.13275528e+00 6.63723183e+00] [ 2.57928181e+00 -1.74943578e+00 8.60201955e-01 -1.17468946e-01 -2.75274396e+00 -6.00839043e+00 -3.94322157e+00 -1.39971626e+00 6.78583384e+00 -5.51915550e+00 6.26934958e+00 -4.47501659e+00 -6.84976339e+00 1.56346264e+01 -1.31133592e+00 1.11479645e+01 -7.49912214e+00 -3.50487423e+00 -1.89651692e+00 -9.57822227e+00 -1.23593092e+01] [-1.43677874e+01 -1.60993731e+00 1.15373564e+01 7.87893391e+00 -1.59374495e+01 3.99677372e+00 8.69547653e+00 -3.43036366e+00 -5.08063459e+00 -2.27382636e+00 8.44223857e-01 -3.91439706e-01 -1.28417301e+01 1.42208648e+00 5.03057528e+00 6.53204727e+00 4.32644412e-02 1.04450762e+00 1.83245575e+00 2.38078624e-01 -1.14255178e+00] [ 3.77399898e+00 -2.15771103e+00 6.28795815e+00 -6.48992157e+00 1.06447160e-01 -1.06959152e+01 2.09488750e+00 4.54839087e+00 -1.40708447e+01 3.23431158e+00 -5.81085968e+00 -4.71204424e+00 -6.35862112e-01 1.59138393e+00 4.67533016e+00 1.32205391e+01 3.81866789e+00 1.02303123e+01 -3.04799151e+00 -1.33758793e+01 5.18585014e+00] [ 8.77681351e+00 7.80035973e+00 -5.61301517e+00 7.46403933e+00 1.95844066e+00 1.80721784e+00 7.48377800e+00 7.18944359e+00 -7.51720953e+00 -6.33510351e+00 1.00860739e+01 -9.06656837e+00 -6.73584521e-01 -1.08423615e+00 9.91754913e+00 -4.76784527e-01 -3.11263347e+00 -1.42664051e+00 5.50383043e+00 -1.45376027e-01 7.66295969e-01] [-1.48410887e-01 -9.28707314e+00 -2.99501228e+00 -3.98445070e-01 3.15406770e-01 -3.12489176e+00 7.45325506e-01 3.58914399e+00 -4.86637402e+00 -2.70562577e+00 8.22095394e+00 -9.57660437e-01 -6.51502562e+00 -6.20323896e-01 2.99250436e+00 -1.84144306e+00 -5.78051186e+00 -3.83763313e-01 4.87412500e+00 -3.15363884e+00 4.00473452e+00] [ 3.23203468e+00 -1.97326052e+00 -3.77842116e+00 -3.29152107e+00 -1.38383980e+01 6.86238194e+00 1.37854929e+01 3.14344215e+00 7.55835116e-01 -1.09577787e+00 3.23429918e+00 6.74160624e+00 4.53828096e+00 1.16439974e+00 -1.14598665e+01 4.51392460e+00 -8.66778183e+00 -7.89633369e+00 1.57773721e+00 1.08911180e+00 2.79035378e+00] [-4.28374100e+00 -1.00304747e+00 -3.30435491e+00 5.23528385e+00 -1.13776760e+01 -3.24218345e+00 7.82768965e+00 -3.53245306e+00 -4.84994459e+00 -5.03171825e+00 -1.70453632e+00 1.35293818e+00 -2.36330223e+00 7.24864769e+00 2.18173003e+00 -4.66438591e-01 -2.57794142e+00 4.01098013e+00 -5.12584746e-01 -9.70916843e+00 -1.81639862e+00] [ 1.42329109e+00 -6.40845633e+00 -7.18236923e+00 -3.50437641e+00 1.09841366e+01 -5.32861900e+00 8.82496166e+00 1.14648962e+01 -6.95817327e+00 -1.25775852e+01 -1.60186231e+00 3.03546572e+00 -4.86297178e+00 8.55014229e+00 -3.26187539e+00 8.47143650e+00 -3.98949146e+00 5.69919169e-01 4.06594992e+00 1.60256636e+00 5.41618347e+00] [ 5.79892778e+00 5.09122133e+00 -2.20997620e+00 1.10062141e+01 1.47092724e+00 -5.00644922e+00 -8.76934719e+00 2.62635493e+00 1.40197344e+01 2.90847754e+00 4.52685690e+00 1.58662915e+00 5.86454439e+00 -2.59568542e-01 -4.81952620e+00 -7.77206659e+00 3.36437941e+00 6.13029242e+00 -6.79017258e+00 -1.94056618e+00 -1.28305566e+00] [-1.22097099e+00 1.37531549e-01 5.05400991e+00 1.38338220e+00 2.57171369e+00 6.96293449e+00 -2.73712650e-02 2.57523060e+00 -5.04370546e+00 -1.05934505e+01 2.51605010e+00 7.24628806e-01 4.30082560e+00 3.25220966e+00 7.45280647e+00 2.56069839e-01 2.69576621e+00 -6.29034758e-01 -3.06276727e+00 -7.59730434e+00 2.38655519e+00] [-9.08496857e+00 -1.96080291e+00 2.90430576e-01 -4.06109524e+00 7.37545490e+00 -2.86993551e+00 -5.42973423e+00 1.06512141e+00 -5.24254680e-01 6.48745894e-01 7.11416817e+00 -6.86429930e+00 9.36723518e+00 7.13522673e+00 -1.14497519e+01 5.63454628e+00 2.81493926e+00 -5.72103214e+00 3.24477530e+00 6.36539507e+00 1.41619730e+00] [-3.77578592e+00 -5.59124899e+00 9.49910545e+00 2.19063544e+00 7.91933489e+00 -2.16926289e+00 -3.41301513e+00 4.89749670e+00 -3.98284405e-01 6.22377968e+00 -1.71370137e+00 9.45366323e-02 -6.22311258e+00 4.29107070e-01 -2.50750041e+00 -4.18309331e-01 -1.25421953e+00 4.13641310e+00 -5.20201445e+00 -1.38347781e+00 9.09806919e+00] [ 8.62147522e+00 3.11219144e+00 -5.22566509e+00 -4.43779850e+00 8.00084591e+00 1.07049918e+00 -2.72569084e+00 8.67081404e-01 4.21101427e+00 5.28148860e-02 4.95688170e-01 5.34335566e+00 8.05400729e-01 4.15651894e+00 -6.96901608e+00 -5.75293350e+00 2.52178669e+00 1.08131428e+01 3.11837101e+00 -8.41665936e+00 5.65213871e+00] [-3.62524915e+00 -3.65624285e+00 4.61679667e-01 -9.36005974e+00 -7.41901875e+00 8.95084381e+00 3.54945481e-01 -1.20097990e+01 4.38177586e+00 3.63894176e+00 -1.17341604e+01 3.11391997e+00 -2.53527045e+00 -6.67736769e-01 5.41059494e+00 -1.71508670e+00 2.74557805e+00 -4.81067955e-01 -1.07706633e+01 1.17280936e+00 -7.93595362e+00] [ 4.29841608e-01 2.98934317e+00 4.81067467e+00 -4.70675051e-01 1.17919865e+01 -1.79134583e+00 -5.36439776e-01 1.82104135e+00 5.26735497e+00 -5.01910973e+00 -4.11176586e+00 9.11958575e-01 1.20607262e+01 6.96810436e+00 -1.17617693e+01 -6.47178555e+00 3.18083119e+00 -2.48767066e+00 -9.26097155e-01 1.27830195e+00 -8.04358768e+00] [-5.64700794e+00 2.28737497e+00 -6.86077404e+00 -1.49644107e-01 -7.73866034e+00 -1.64267182e+00 -1.71026576e+00 -3.90750504e+00 -4.85451174e+00 1.19839621e+01 6.33670378e+00 -6.02620316e+00 -1.48255825e+01 -1.70928407e+00 3.29172325e+00 -3.12012219e+00 4.06789446e+00 -2.36067677e+00 -4.91325712e+00 5.24801683e+00 3.25653195e+00] [-8.57421970e+00 -2.44724512e+00 -1.12918262e+01 1.59145803e+01 1.10974014e+00 -4.97461081e+00 -3.47031879e+00 -3.78175282e+00 2.26751566e+00 -9.40437317e+00 -3.60295385e-01 3.23657155e+00 3.88354588e+00 1.82026732e+00 1.45317249e+01 9.27123165e+00 -1.46596909e+01 -1.39575644e+01 5.78573322e+00 8.92488098e+00 -3.23078299e+00] [ 8.10099125e+00 3.06404901e+00 -8.63891506e+00 -5.27389860e+00 -9.61169526e-02 6.66282701e+00 -5.47395563e+00 -7.17090786e-01 1.03415232e+01 8.23936701e-01 -1.10292864e+01 2.75275135e+00 6.16344547e+00 -7.79126406e+00 -4.38296318e+00 1.26651692e+00 -4.16030407e+00 -2.04275250e+00 7.11505890e+00 -2.00768441e-01 -8.29615688e+00] [ 6.31589317e+00 -4.38554573e+00 1.01161170e+00 -8.32310867e+00 2.77355313e+00 4.64141369e+00 -2.28076673e+00 -4.11226702e+00 2.10228372e+00 -7.82306004e+00 1.21721087e+01 4.09947395e+00 -2.22170448e+00 -3.65067148e+00 2.24849200e+00 1.51384249e+01 9.46616936e+00 3.57325649e+00 6.03741455e+00 -9.27845097e+00 2.22468019e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [0, 1], 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_428.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.842225}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 0.5579554 -3.9063015 10.789436 ... 7.3647857 -3.2733688 -3.7135234 ] [ 3.9247198 -6.0887322 8.756673 ... 0.05079895 8.755582 2.4758856 ] [ 6.320453 5.867108 -6.6147647 ... 9.648829 6.094006 0.05776596] ... [ -1.8215618 -8.296181 0.16972089 ... -7.689614 -0.26791126 -5.534183 ] [ 3.7462347 -1.8696487 -6.0094824 ... -7.9220986 1.4441051 1.8434792 ] [ 7.892657 5.075596 8.130753 ... 7.733668 2.5684686 12.496851 ]]] [[[ 6.4127054 2.3716557 -4.369218 ... -9.234116 -0.63720423 1.9936886 ] [ 1.8364799 -5.320376 -5.4925456 ... 2.5543385 10.349183 -0.05923015] [ 2.3744855 -5.1583133 -11.911541 ... -7.6925073 5.0279803 -6.082209 ] ... [ 6.291921 7.019231 0.21372521 ... 1.8753216 5.0091267 -1.2564695 ] [ -3.9990463 0.18116736 15.062628 ... -4.9271665 3.6921859 -8.338047 ] [ -0.848607 -10.962731 1.4631162 ... -4.3642673 2.2802248 1.198767 ]]]]; ov_res: [[[[ 0.5579554 -3.9063015 10.789436 ... 7.3647857 -3.2733688 -3.7135234 ] [ 3.9247198 -6.0887322 8.756673 ... 0.05079895 8.755582 2.4758856 ] [ 6.320453 5.867108 -6.6147647 ... 9.648829 6.094006 0.05776596] ... [ -1.8215618 -8.296181 0.16972089 ... -7.689614 -0.26791126 -5.534183 ] [ 3.7462347 -1.8696487 -6.0094824 ... -7.9220986 1.4441051 1.8434792 ] [ 7.892657 5.075596 8.130753 ... 7.733668 2.5684686 12.496851 ]]] [[[ 6.4127054 2.3716557 -4.369218 ... -9.234116 -0.63720423 1.9936886 ] [ 1.8364799 -5.320376 -5.4925456 ... 2.5543385 10.349183 -0.05923015] [ 2.3744855 -5.1583133 -11.911541 ... -7.6925073 5.0279803 -6.082209 ] ... [ 6.291921 7.019231 0.21372521 ... 1.8753216 5.0091267 -1.2564695 ] [ -3.9990463 0.18116736 15.062628 ... -4.9271665 3.6921859 -8.338047 ] [ -0.848607 -10.962731 1.4631162 ... -4.3642673 2.2802248 1.198767 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [1, 0], 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_430.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.383242}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 9.84404 -4.496846 3.1992617 ... 2.098421 4.438515 -0.825506 ] [ -4.1842647 0.10117704 3.195957 ... -3.8075366 -8.371972 -1.2680936 ] [-14.23673 -0.4977042 0.40098196 ... 2.5718968 -0.5259321 -3.8805745 ] ... [ -8.637742 2.9957838 -8.137669 ... -2.0026696 -1.4054949 -3.9210143 ] [ -2.208299 -7.1902966 3.0204322 ... 3.4201932 1.5575066 0.4508415 ] [ 0.68768173 -0.19886576 1.1697481 ... 5.3776894 3.444394 -2.137207 ]]] [[[ 0.5942546 -0.6482446 -0.4244812 ... -0.06281325 -4.583992 7.795166 ] [ 2.1948402 3.0809503 4.1958885 ... 3.3336794 -3.157277 3.7799103 ] [ -1.2076869 -5.182885 2.5418 ... 2.0412736 3.159565 -7.693006 ] ... [ -4.908928 -0.31428498 1.9938843 ... 1.9937892 3.5526774 2.334404 ] [ 0.23025614 -1.7600553 5.483246 ... -0.6701386 -2.6828835 -0.4166481 ] [ -9.7270355 7.063416 -2.0423906 ... 4.6477857 -4.776674 -4.9890165 ]]]]; ov_res: [[[[ 9.84404 -4.496846 3.1992617 ... 2.098421 4.438515 -0.825506 ] [ -4.1842647 0.10117704 3.195957 ... -3.8075366 -8.371972 -1.2680936 ] [-14.23673 -0.4977042 0.40098196 ... 2.5718968 -0.5259321 -3.8805745 ] ... [ -8.637742 2.9957838 -8.137669 ... -2.0026696 -1.4054949 -3.9210143 ] [ -2.208299 -7.1902966 3.0204322 ... 3.4201932 1.5575066 0.4508415 ] [ 0.68768173 -0.19886576 1.1697481 ... 5.3776894 3.444394 -2.137207 ]]] [[[ 0.5942546 -0.6482446 -0.4244812 ... -0.06281325 -4.583992 7.795166 ] [ 2.1948402 3.0809503 4.1958885 ... 3.3336794 -3.157277 3.7799103 ] [ -1.2076869 -5.182885 2.5418 ... 2.0412736 3.159565 -7.693006 ] ... [ -4.908928 -0.31428498 1.9938843 ... 1.9937892 3.5526774 2.334404 ] [ 0.23025614 -1.7600553 5.483246 ... -0.6701386 -2.6828835 -0.4166481 ] [ -9.7270355 7.063416 -2.0423906 ... 4.6477857 -4.776674 -4.9890165 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_432.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.461488}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 2.5851145 -4.1596956 -7.8081946 ... 4.8314476 -5.009482 7.531679 ] [ -1.6123827 2.9320908 -5.9259696 ... 6.1010313 -7.114729 2.90659 ] [ -0.673019 -10.458259 1.9616525 ... 0.8667443 0.06619927 1.02258 ] ... [ 2.9255626 -2.0222206 2.206696 ... 0.1500425 2.4924285 3.8071465 ] [ -4.428386 6.0606604 9.951482 ... 2.763926 7.093801 -0.5852596 ] [ -9.873534 6.087383 5.473879 ... -3.3489625 5.3715606 4.315738 ]]] [[[ 2.0677097 -1.1968892 5.060899 ... 6.083069 -9.614913 4.5422335 ] [ 2.6229584 -0.28572685 -2.6579983 ... 5.4536858 -5.5079813 2.7536433 ] [ -7.10565 -5.570211 -2.0338356 ... 6.3109546 -1.4733125 -2.1731274 ] ... [ 1.9480529 3.0098574 -4.6826735 ... -0.44741324 -5.9332204 -7.419066 ] [ 0.5391536 -18.679663 -2.6297169 ... -14.707734 8.247095 1.6167395 ] [ 2.276522 3.9100175 1.0989984 ... -4.6252575 4.038356 -0.3139015 ]]]]; ov_res: [[[[ 2.5851145 -4.1596956 -7.8081946 ... 4.8314476 -5.009482 7.531679 ] [ -1.6123827 2.9320908 -5.9259696 ... 6.1010313 -7.114729 2.90659 ] [ -0.673019 -10.458259 1.9616525 ... 0.8667443 0.06619927 1.02258 ] ... [ 2.9255626 -2.0222206 2.206696 ... 0.1500425 2.4924285 3.8071465 ] [ -4.428386 6.0606604 9.951482 ... 2.763926 7.093801 -0.5852596 ] [ -9.873534 6.087383 5.473879 ... -3.3489625 5.3715606 4.315738 ]]] [[[ 2.0677097 -1.1968892 5.060899 ... 6.083069 -9.614913 4.5422335 ] [ 2.6229584 -0.28572685 -2.6579983 ... 5.4536858 -5.5079813 2.7536433 ] [ -7.10565 -5.570211 -2.0338356 ... 6.3109546 -1.4733125 -2.1731274 ] ... [ 1.9480529 3.0098574 -4.6826735 ... -0.44741324 -5.9332204 -7.419066 ] [ 0.5391536 -18.679663 -2.6297169 ... -14.707734 8.247095 1.6167395 ] [ 2.276522 3.9100175 1.0989984 ... -4.6252575 4.038356 -0.3139015 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_434.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.272537}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ -9.91122 -0.6662543 -11.784888 ... 2.3882759 -4.869528 5.017713 ] [ 4.660713 2.0226285 -6.592467 ... 1.8053379 1.4413838 9.388158 ] [ 0.19112492 8.738279 3.093004 ... -2.384816 -1.8779275 3.383137 ] ... [ -1.8987224 1.4553035 -11.7468815 ... -10.594221 5.4234915 0.9174968 ] [ -0.8294337 -1.9678414 -4.5554457 ... -4.4722733 1.5880722 4.0170126 ] [ -1.9278663 -2.208918 -0.60698426 ... -4.8470683 6.654907 2.2221978 ]]] [[[ 0.35252485 2.3723571 -8.052022 ... -6.7661395 5.4690113 3.7630334 ] [ -3.5528197 14.191633 2.5677388 ... -10.701619 -5.251015 0.44776127] [ 1.8422422 -0.72027934 4.2920957 ... 1.6288004 -6.75905 -2.4875126 ] ... [ 4.3192015 -4.91669 -2.871357 ... -6.0546665 -2.9808702 -5.9440794 ] [ -5.60238 -3.746271 -8.392303 ... -0.2871586 6.2201996 -0.7484724 ] [ 0.38023886 -9.209383 -1.9855492 ... -2.631355 1.8362181 0.52306664]]]]; ov_res: [[[[ -9.91122 -0.6662543 -11.784888 ... 2.3882759 -4.869528 5.017713 ] [ 4.660713 2.0226285 -6.592467 ... 1.8053379 1.4413838 9.388158 ] [ 0.19112492 8.738279 3.093004 ... -2.384816 -1.8779275 3.383137 ] ... [ -1.8987224 1.4553035 -11.7468815 ... -10.594221 5.4234915 0.9174968 ] [ -0.8294337 -1.9678414 -4.5554457 ... -4.4722733 1.5880722 4.0170126 ] [ -1.9278663 -2.208918 -0.60698426 ... -4.8470683 6.654907 2.2221978 ]]] [[[ 0.35252485 2.3723571 -8.052022 ... -6.7661395 5.4690113 3.7630334 ] [ -3.5528197 14.191633 2.5677388 ... -10.701619 -5.251015 0.44776127] [ 1.8422422 -0.72027934 4.2920957 ... 1.6288004 -6.75905 -2.4875126 ] ... [ 4.3192015 -4.91669 -2.871357 ... -6.0546665 -2.9808702 -5.9440794 ] [ -5.60238 -3.746271 -8.392303 ... -0.2871586 6.2201996 -0.7484724 ] [ 0.38023886 -9.209383 -1.9855492 ... -2.631355 1.8362181 0.52306664]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_436.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 4.7940836 -2.5321236 10.081364 ... 0.63425124 0.9531372 -11.243677 ] [ 5.1177454 9.147642 3.2013648 ... -4.3602924 -6.0198894 -6.514556 ] [ 3.2625859 3.1473186 2.0250309 ... 3.9460483 -9.002951 8.181212 ] ... [ 1.5174748 3.313024 -0.36444378 ... -4.9865727 0.28666764 -4.202603 ] [ -7.825305 -7.146279 -3.974377 ... 1.3907936 -3.1283145 -8.361967 ] [ -3.0600376 -0.3461082 -6.3710365 ... -2.1134179 -7.566805 -12.586665 ]]] [[[ 2.157078 -6.0104437 -2.3482842 ... -9.117243 -10.633938 -2.0863025 ] [ -1.519382 -2.4433877 -0.593656 ... -7.978053 -6.8839316 -0.42859346] [ 4.2196274 2.4969194 -8.378132 ... 2.560664 3.7918668 4.0548077 ] ... [ 4.705851 -6.689149 0.1170415 ... 5.3520455 8.381615 -2.079575 ] [ 0.48308146 -6.2696123 1.6870329 ... -0.06789291 4.7710347 -4.48483 ] [ -6.0875163 2.8693829 1.9590158 ... -4.744964 -9.010258 -2.768267 ]]]]; ov_res: [[[[ 4.7940836 -2.5321236 10.081364 ... 0.63425124 0.9531372 -11.243677 ] [ 5.1177454 9.147642 3.2013648 ... -4.3602924 -6.0198894 -6.514556 ] [ 3.2625859 3.1473186 2.0250309 ... 3.9460483 -9.002951 8.181212 ] ... [ 1.5174748 3.313024 -0.36444378 ... -4.9865727 0.28666764 -4.202603 ] [ -7.825305 -7.146279 -3.974377 ... 1.3907936 -3.1283145 -8.361967 ] [ -3.0600376 -0.3461082 -6.3710365 ... -2.1134179 -7.566805 -12.586665 ]]] [[[ 2.157078 -6.0104437 -2.3482842 ... -9.117243 -10.633938 -2.0863025 ] [ -1.519382 -2.4433877 -0.593656 ... -7.978053 -6.8839316 -0.42859346] [ 4.2196274 2.4969194 -8.378132 ... 2.560664 3.7918668 4.0548077 ] ... [ 4.705851 -6.689149 0.1170415 ... 5.3520455 8.381615 -2.079575 ] [ 0.48308146 -6.2696123 1.6870329 ... -0.06789291 4.7710347 -4.48483 ] [ -6.0875163 2.8693829 1.9590158 ... -4.744964 -9.010258 -2.768267 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_438.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%8) fw_re: [[[[ -4.773125 -1.7330514 -1.8732417 -2.6528373 1.1437774 -2.8973897 3.7391021 7.244777 -1.0566268 2.4768927 9.752723 -3.219042 ] [ -2.2665675 -1.0959346 -14.2831545 9.985263 2.4327574 0.6790401 7.16713 -0.21872856 6.291728 0.8230052 -3.742551 -1.8004423 ] [ -3.4274976 -2.3982756 3.9572136 -0.9840483 -8.465811 2.661812 4.2769628 8.269113 1.795405 -2.1194773 11.043407 -4.8756337 ] [ -2.3226151 -1.6672583 -3.61927 -8.265487 -2.0422795 7.3207927 4.6293583 2.03844 -5.1448474 -2.0586953 -2.8035758 1.3174832 ] [ 1.178569 0.18361679 -0.2856744 -1.669605 5.7131834 -1.196744 7.153132 -8.305401 4.5593863 -3.719559 -1.0410762 -9.602439 ] [ -0.63420695 -2.8286488 -1.9986726 -9.497708 1.3250195 -3.3286 -8.833237 -0.32399768 7.5558276 4.6331964 4.2551208 2.4508011 ] [ 1.8167641 1.032013 -0.10459799 3.8504653 1.4861127 -0.32949013 1.750918 -11.948941 4.622811 -5.0188994 -1.8911151 -0.93718106] [ -2.8857348 7.618478 8.31966 -3.9026585 -1.9570236 8.900538 5.311997 5.802125 -5.3598094 -3.7615554 2.683176 -2.4477696 ] [ -3.9733193 6.0474343 1.7776047 8.80464 3.5486007 2.0770342 -3.0539222 0.8813452 5.510455 -8.714154 0.3751202 2.910928 ] [ -5.997704 -4.0750356 5.907782 2.5284033 13.2002325 -1.8300631 5.631663 3.0103285 -6.359537 -0.11953467 7.9055376 3.321523 ] [ -0.28571782 5.5900893 10.66187 3.4037826 6.513277 2.7649586 2.2122617 2.8070884 1.0697371 1.7200973 1.6125362 2.331503 ] [ -2.7558422 -6.8048897 -0.15629128 -3.8264484 1.9419231 -3.3858666 -5.974771 8.610826 0.37467885 6.2766404 4.3388376 1.2387583 ]]] [[[ -2.1433878 3.9222212 4.7228465 -0.62103117 -2.6911469 4.358542 -3.45687 -8.041304 -7.918404 0.522607 4.6097445 3.5010846 ] [ 3.1295257 -6.20263 0.54121506 0.37535307 -0.49940386 -5.467129 -9.321433 -2.084838 -3.076541 -0.75764596 -4.195066 -2.9340684 ] [ -0.96111643 -7.468078 2.1883929 -8.9815035 -0.47107023 0.81855583 1.4339681 -10.838368 10.721724 -1.579827 1.5741875 0.5768707 ] [ -5.6771626 7.0155587 -5.9426355 2.0833073 -3.9039793 4.597198 -4.8087745 7.9239388 -1.05017 -4.395751 5.573359 -4.573055 ] [ -2.0801568 9.210844 1.7044768 1.8737563 3.2300866 -2.1996522 -0.6588241 -2.2932153 -1.51301 -0.8890241 -2.7082343 -7.020465 ] [ -3.7072346 -0.46171162 -10.378258 1.3945119 5.051341 -6.110536 3.9397757 1.3386087 2.8816636 -2.3879206 2.9650404 -2.5315332 ] [ -4.391744 4.697202 3.96486 -6.5017385 -6.4524508 -0.46397534 -4.3495574 -6.2668567 0.8269229 2.5538847 1.0986092 5.689867 ] [ -1.074057 -0.72738695 -2.2265677 -2.789471 5.3016706 2.8216076 1.9939636 -1.1291897 -2.0769572 3.635667 3.0940506 5.5897 ] [ -7.1117897 4.7372584 1.6059264 4.8745775 1.4689257 -4.7265286 0.73070663 -3.1323006 -0.32630217 1.488627 -0.5642703 3.4395614 ] [ 3.1192174 3.8714395 -12.589825 -5.984871 8.781366 -5.010794 8.723484 6.7558475 1.4071192 -8.077848 -3.1888072 -6.31227 ] [ -4.6206484 4.522036 2.607869 3.5594969 4.2122874 -0.82227445 1.9355679 4.2513547 6.8086724 2.4045973 -4.8856654 -5.799113 ] [ -5.1021895 2.2101846 -4.523011 1.6892042 8.258326 -0.521545 -5.444612 6.9215927 -10.8385105 -7.2371874 1.501134 2.217203 ]]]]; ov_res: [[[[ -4.773125 -1.7330514 -1.8732417 -2.6528373 1.1437774 -2.8973897 3.7391021 7.244777 -1.0566268 2.4768927 9.752723 -3.219042 ] [ -2.2665675 -1.0959346 -14.2831545 9.985263 2.4327574 0.6790401 7.16713 -0.21872856 6.291728 0.8230052 -3.742551 -1.8004423 ] [ -3.4274976 -2.3982756 3.9572136 -0.9840483 -8.465811 2.661812 4.2769628 8.269113 1.795405 -2.1194773 11.043407 -4.8756337 ] [ -2.3226151 -1.6672583 -3.61927 -8.265487 -2.0422795 7.3207927 4.6293583 2.03844 -5.1448474 -2.0586953 -2.8035758 1.3174832 ] [ 1.178569 0.18361679 -0.2856744 -1.669605 5.7131834 -1.196744 7.153132 -8.305401 4.5593863 -3.719559 -1.0410762 -9.602439 ] [ -0.63420695 -2.8286488 -1.9986726 -9.497708 1.3250195 -3.3286 -8.833237 -0.32399768 7.5558276 4.6331964 4.2551208 2.4508011 ] [ 1.8167641 1.032013 -0.10459799 3.8504653 1.4861127 -0.32949013 1.750918 -11.948941 4.622811 -5.0188994 -1.8911151 -0.93718106] [ -2.8857348 7.618478 8.31966 -3.9026585 -1.9570236 8.900538 5.311997 5.802125 -5.3598094 -3.7615554 2.683176 -2.4477696 ] [ -3.9733193 6.0474343 1.7776047 8.80464 3.5486007 2.0770342 -3.0539222 0.8813452 5.510455 -8.714154 0.3751202 2.910928 ] [ -5.997704 -4.0750356 5.907782 2.5284033 13.2002325 -1.8300631 5.631663 3.0103285 -6.359537 -0.11953467 7.9055376 3.321523 ] [ -0.28571782 5.5900893 10.66187 3.4037826 6.513277 2.7649586 2.2122617 2.8070884 1.0697371 1.7200973 1.6125362 2.331503 ] [ -2.7558422 -6.8048897 -0.15629128 -3.8264484 1.9419231 -3.3858666 -5.974771 8.610826 0.37467885 6.2766404 4.3388376 1.2387583 ]]] [[[ -2.1433878 3.9222212 4.7228465 -0.62103117 -2.6911469 4.358542 -3.45687 -8.041304 -7.918404 0.522607 4.6097445 3.5010846 ] [ 3.1295257 -6.20263 0.54121506 0.37535307 -0.49940386 -5.467129 -9.321433 -2.084838 -3.076541 -0.75764596 -4.195066 -2.9340684 ] [ -0.96111643 -7.468078 2.1883929 -8.9815035 -0.47107023 0.81855583 1.4339681 -10.838368 10.721724 -1.579827 1.5741875 0.5768707 ] [ -5.6771626 7.0155587 -5.9426355 2.0833073 -3.9039793 4.597198 -4.8087745 7.9239388 -1.05017 -4.395751 5.573359 -4.573055 ] [ -2.0801568 9.210844 1.7044768 1.8737563 3.2300866 -2.1996522 -0.6588241 -2.2932153 -1.51301 -0.8890241 -2.7082343 -7.020465 ] [ -3.7072346 -0.46171162 -10.378258 1.3945119 5.051341 -6.110536 3.9397757 1.3386087 2.8816636 -2.3879206 2.9650404 -2.5315332 ] [ -4.391744 4.697202 3.96486 -6.5017385 -6.4524508 -0.46397534 -4.3495574 -6.2668567 0.8269229 2.5538847 1.0986092 5.689867 ] [ -1.074057 -0.72738695 -2.2265677 -2.789471 5.3016706 2.8216076 1.9939636 -1.1291897 -2.0769572 3.635667 3.0940506 5.5897 ] [ -7.1117897 4.7372584 1.6059264 4.8745775 1.4689257 -4.7265286 0.73070663 -3.1323006 -0.32630217 1.488627 -0.5642703 3.4395614 ] [ 3.1192174 3.8714395 -12.589825 -5.984871 8.781366 -5.010794 8.723484 6.7558475 1.4071192 -8.077848 -3.1888072 -6.31227 ] [ -4.6206484 4.522036 2.607869 3.5594969 4.2122874 -0.82227445 1.9355679 4.2513547 6.8086724 2.4045973 -4.8856654 -5.799113 ] [ -5.1021895 2.2101846 -4.523011 1.6892042 8.258326 -0.521545 -5.444612 6.9215927 -10.8385105 -7.2371874 1.501134 2.217203 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_440.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%6) fw_re: [[[[-3.8940926 5.5664387 1.0446379 ... 0.3053731 0.7158815 -1.6941811 ] [ 0.09032101 -0.3425651 1.0655183 ... 7.32968 3.26107 -2.049928 ] [ 3.6799762 -6.496044 5.166837 ... -1.4926251 -1.0832287 2.846353 ] ... [ 6.6356974 -1.7595803 1.3801225 ... 2.6776335 -0.02343633 -0.04642943] [-0.30351445 -0.7418627 -1.5792809 ... -1.0848975 -2.7548041 -1.1075219 ] [-1.9079233 -3.728379 0.2669901 ... -2.866418 -1.6067182 -1.7117372 ]]] [[[ 1.5366477 -2.5929136 0.33510414 ... -2.2886612 4.6646895 -2.3861434 ] [ 6.8465586 -6.1075006 -1.3612747 ... 6.296636 -5.3274093 3.1020167 ] [-5.995545 2.0633893 -9.442705 ... 1.3993232 -8.1856985 -0.68268126] ... [-0.34655574 -3.6254218 4.9103684 ... 1.2606051 2.4828315 -3.755072 ] [-4.836049 -7.1162567 -4.514818 ... 0.1955245 -4.161409 6.490102 ] [-4.373009 2.7850518 -3.1185935 ... -3.7916455 -2.599476 6.3888254 ]]]]; ov_res: [[[[-3.8940926 5.5664387 1.0446379 ... 0.3053731 0.7158815 -1.6941811 ] [ 0.09032101 -0.3425651 1.0655183 ... 7.32968 3.26107 -2.049928 ] [ 3.6799762 -6.496044 5.166837 ... -1.4926251 -1.0832287 2.846353 ] ... [ 6.6356974 -1.7595803 1.3801225 ... 2.6776335 -0.02343633 -0.04642943] [-0.30351445 -0.7418627 -1.5792809 ... -1.0848975 -2.7548041 -1.1075219 ] [-1.9079233 -3.728379 0.2669901 ... -2.866418 -1.6067182 -1.7117372 ]]] [[[ 1.5366477 -2.5929136 0.33510414 ... -2.2886612 4.6646895 -2.3861434 ] [ 6.8465586 -6.1075006 -1.3612747 ... 6.296636 -5.3274093 3.1020167 ] [-5.995545 2.0633893 -9.442705 ... 1.3993232 -8.1856985 -0.68268126] ... [-0.34655574 -3.6254218 4.9103684 ... 1.2606051 2.4828315 -3.755072 ] [-4.836049 -7.1162567 -4.514818 ... 0.1955245 -4.161409 6.490102 ] [-4.373009 2.7850518 -3.1185935 ... -3.7916455 -2.599476 6.3888254 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_442.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%8) fw_re: [[[[-2.31851912e+00 -4.78007650e+00 1.11740112e+01 -8.76638532e-01 3.42566204e+00 2.13403511e+00 -2.05268621e+00 6.38186514e-01 2.25114799e+00 7.42673349e+00 1.86167717e-01 -5.18786526e+00 1.16790056e+00 -4.74507189e+00 -1.88297141e+00 4.76870680e+00 -3.06013346e+00 -1.39066494e+00 7.40446997e+00 4.57422972e+00 8.01731110e+00] [ 1.99605608e+00 -4.64386940e+00 2.33327508e+00 -3.69411039e+00 -4.46740770e+00 1.66048348e+00 -5.25684261e+00 -1.94076765e+00 5.45245981e+00 -2.15959311e+00 8.15331554e+00 4.18079662e+00 3.63436866e+00 3.36782980e+00 1.50662098e+01 1.41708541e+00 3.62369776e+00 3.65371442e+00 -4.01749229e+00 -2.40825129e+00 2.64995843e-01] [-9.12664711e-01 -7.70505071e-01 -5.51220989e+00 1.39353418e+00 -1.55833292e+00 4.80008841e+00 1.42687154e+00 -2.69508314e+00 -4.18273896e-01 -4.68892908e+00 -3.49946404e+00 -3.93812209e-01 -9.00967026e+00 5.93459320e+00 -1.08872819e+00 -7.85095024e+00 1.32029648e+01 -1.23014069e+01 8.97782803e+00 -4.82895422e+00 -3.76047015e-01] [ 4.31006956e+00 -2.06066394e+00 9.10953999e+00 1.09677410e+00 7.93721616e-01 8.23297560e-01 3.63519120e+00 3.14355040e+00 -2.47698188e+00 -5.97364046e-02 -3.69252026e-01 1.72025979e+00 -1.14243484e+00 2.54033709e+00 -6.89068842e+00 5.43362093e+00 -8.07501316e+00 -2.12659717e+00 3.85274261e-01 -3.91534567e+00 1.20845568e+00] [-1.56596303e+00 -1.11248517e+00 -2.78041649e+00 -3.78330016e+00 1.16297544e-03 -3.35866857e+00 5.36704445e+00 -1.17490679e-01 -1.61870730e+00 -3.62025404e+00 -5.52719688e+00 8.79458427e-01 -3.39681721e+00 1.13112688e+00 -3.03253889e+00 -1.95141518e+00 5.61945581e+00 2.97264552e+00 -2.73582101e-01 6.24906731e+00 2.83552140e-01] [ 8.17054272e-01 5.08643723e+00 -2.04183531e+00 3.07148784e-01 -1.41051662e+00 -1.26850295e+00 -3.03236246e+00 2.58872414e+00 -7.76941478e-01 5.50909090e+00 4.21457005e+00 -3.69420886e+00 4.18779850e+00 2.97259045e+00 2.03905869e+00 2.57670140e+00 -2.57271504e+00 -4.49638653e+00 1.55914378e+00 2.99958682e+00 -5.97883892e+00] [-2.42178440e+00 -4.62700272e+00 -3.79681349e+00 -9.76625741e-01 -4.04813862e+00 -4.98542637e-01 2.13080335e+00 -2.19694662e+00 -6.97769690e+00 -4.48955727e+00 -2.02168417e+00 -3.17908764e+00 6.70217037e-01 1.04716930e+01 8.40210247e+00 -5.78014755e+00 4.67061186e+00 1.03744447e+00 1.18714123e+01 -3.46338606e+00 -7.66150379e+00] [-3.39371538e+00 -3.20126128e+00 3.86184573e+00 -1.49732649e+00 2.29825211e+00 7.04761207e-01 5.30036211e+00 -1.93029165e+00 -4.00651836e+00 -9.17537498e+00 -7.23922014e+00 -1.38604760e+00 2.07002187e+00 -4.47171442e-02 3.31633496e+00 7.08675337e+00 3.46221066e+00 6.46544838e+00 1.76080954e+00 2.42584658e+00 -6.27897072e+00] [-6.70211172e+00 1.64594638e+00 -7.66247940e+00 1.40871358e+00 -5.48247337e-01 5.87337637e+00 -6.23090172e+00 1.96482646e+00 -4.37086725e+00 6.19634008e+00 -2.85111094e+00 -8.70952666e-01 2.05345964e+00 -2.71263814e+00 -2.46973801e+00 1.14185133e+01 -5.55969179e-01 -5.16326487e-01 1.97374713e+00 -1.93472896e-02 -1.35493708e+00] [ 5.28666210e+00 3.73282099e+00 3.86354065e+00 -8.41805649e+00 9.90797520e+00 -1.45579615e+01 5.71684599e+00 -4.55139399e+00 -1.78001869e+00 1.24187934e+00 5.32654524e+00 4.01968098e+00 -6.15960073e+00 7.42034960e+00 -3.44581652e+00 1.07455146e+00 3.93341947e+00 -3.78835350e-01 -5.88257313e-01 -1.02471673e+00 -9.87584877e+00] [-2.15475392e+00 -1.15446270e+00 4.66883063e-01 6.11137009e+00 3.24589419e+00 -7.32751083e+00 4.45503235e+00 1.18816388e+00 3.38912129e+00 5.57479858e+00 -1.11808896e+00 -3.41180229e+00 -1.41590619e+00 4.21031904e+00 8.72262383e+00 4.07608479e-01 4.92811012e+00 -7.67978966e-01 4.60796833e+00 5.37674236e+00 -1.01926133e-01] [-9.03403568e+00 1.97965419e+00 -3.21156931e+00 -4.23320144e-01 1.82503331e+00 -3.57644057e+00 -1.91919386e+00 -1.76014709e+00 6.59222221e+00 -9.68794632e+00 -1.08358398e-01 1.56270909e+00 -2.48509645e-02 3.38232994e-01 -7.28377438e+00 -4.87808657e+00 -4.03192425e+00 1.36293077e+00 1.27227616e+00 3.67224860e+00 -3.27041483e+00] [-6.60722673e-01 1.34500790e+00 -8.90410519e+00 -7.36270380e+00 3.38593245e+00 -1.38218117e+00 1.13419533e+00 -4.98418713e+00 -8.00494385e+00 4.78613520e+00 2.65869808e+00 3.78701121e-01 -2.36896992e+00 -6.71313524e+00 -3.68998742e+00 -8.04067707e+00 -1.91412818e+00 2.49084067e+00 -9.20480919e+00 -1.59443879e+00 2.11647725e+00] [-7.19058466e+00 -4.93844652e+00 3.69247770e+00 -4.11935282e+00 -2.97090149e+00 3.13540363e+00 2.78230023e+00 -3.88048506e+00 -3.19953710e-01 -7.92795563e+00 -5.77537298e+00 2.90901089e+00 3.92056331e-02 -1.32542968e-01 9.46427047e-01 -2.98646420e-01 -9.45310020e+00 -3.84212303e+00 1.27916455e+00 -3.83316064e+00 6.63667870e+00] [-4.61003780e+00 -5.04329205e+00 -1.25105202e+00 -6.19418812e+00 -2.06018615e+00 -9.44802046e-01 -2.32098484e+00 3.33410120e+00 -3.32625896e-01 -3.39781642e-01 -1.90019608e+00 -4.89136839e+00 -7.55870676e+00 1.44461215e+00 7.30331898e-01 3.19840640e-01 -8.92760849e+00 2.76684928e+00 2.11249280e+00 -1.07218039e+00 1.80791646e-01] [-6.46601975e-01 3.30478477e+00 2.17855766e-01 -8.13406658e+00 2.45184064e+00 1.01534426e+00 2.55294657e+00 -2.58903909e+00 -3.76946902e+00 -7.05023575e+00 1.11813488e+01 -3.24998212e+00 4.07044697e+00 -3.04237175e+00 -1.18288743e+00 -1.09554644e+01 -6.45758343e+00 -1.20496199e-01 4.97295141e+00 9.93001747e+00 4.09335136e+00] [ 8.21224499e+00 -6.23467827e+00 -6.43883526e-01 1.65030789e+00 -1.32879150e+00 3.72924972e+00 3.31676053e-03 -5.48197508e+00 -5.94169974e-01 9.52547610e-01 -1.89770007e+00 5.63377666e+00 -1.60650229e+00 -5.44197369e+00 1.05927486e+01 -2.50566781e-01 2.50320840e+00 3.88160259e-01 -2.55204201e+00 -2.04542470e+00 3.64314079e+00] [ 1.28043056e+00 -3.22619867e+00 -1.24879026e+00 3.16535759e+00 1.94415367e+00 -7.63336229e+00 -4.11085248e-01 -3.06316209e+00 1.90262830e+00 6.18839860e-01 6.50934935e+00 5.60059786e+00 -4.24270868e+00 1.25346076e+00 4.98934984e+00 2.29239628e-01 7.00803623e-02 -3.98814702e+00 -3.56368750e-01 4.42047501e+00 -3.35090232e+00] [ 7.50013304e+00 -9.56288517e-01 -1.32994974e+00 -1.42639410e+00 2.99138933e-01 -3.83279634e+00 -1.06951499e+00 3.39682937e-01 4.29837346e-01 -6.32249737e+00 2.17938042e+00 -9.38039243e-01 7.68847167e-01 -2.73490953e+00 8.46993700e-02 -3.45457554e+00 4.30332661e+00 3.22881842e+00 -4.62874222e+00 -1.78701305e+00 -8.12896252e+00] [-3.72351575e+00 7.08341646e+00 5.19743729e+00 3.07880068e+00 -2.33680630e+00 -2.26350498e+00 7.08029222e+00 -1.67476213e+00 4.59254313e+00 3.10838890e+00 8.31407261e+00 -1.03671145e+00 -6.17465496e+00 9.88419950e-01 -5.38363636e-01 -5.14151144e+00 -3.78177714e+00 -1.61850345e+00 -8.61995411e+00 -2.66131759e+00 -9.06045377e-01] [ 4.25130939e+00 5.33972836e+00 -1.60526228e+00 2.06793332e+00 -2.68467498e+00 4.83834410e+00 5.88442898e+00 -4.93601656e+00 8.96216929e-01 -6.45171285e-01 -3.78772664e+00 8.63362432e-01 -9.32372856e+00 3.27524352e+00 -5.02582979e+00 8.91552925e+00 3.99692774e+00 -1.18899536e+00 1.35258341e+00 4.07383251e+00 -5.19343758e+00]]] [[[ 4.11654282e+00 -7.81091547e+00 -5.38600254e+00 1.57105625e+00 -4.24706507e+00 2.59224772e+00 -1.05287850e+00 2.00189090e+00 6.90802050e+00 -2.62803578e+00 1.02053738e+00 2.24587536e+00 6.57743835e+00 -4.46196124e-02 -4.80543566e+00 1.18741810e+00 -7.14647675e+00 2.53214288e+00 2.86831141e+00 -5.77805805e+00 1.74953079e+01] [-5.29326773e+00 3.78794163e-01 5.10193539e+00 6.09050655e+00 -3.26623380e-01 5.91485834e+00 -1.43328142e+00 3.60433602e+00 -2.16428614e+00 -7.24058151e+00 2.66997862e+00 -3.43268681e+00 -2.53366709e+00 6.34223461e+00 -1.25115728e+00 7.80219221e+00 -5.26272392e+00 -9.10816014e-01 -3.68578464e-01 -3.70857477e+00 2.26121068e+00] [ 2.11517811e+00 -9.52808380e+00 9.17672920e+00 -2.66519725e-01 -1.18248236e+00 1.03643780e+01 -2.17163682e+00 4.88413858e+00 -1.34449422e+00 1.86344516e+00 -3.76899672e+00 -4.62918425e+00 -1.94868386e+00 -5.36821175e+00 -7.98558187e+00 -7.95967484e+00 -2.91376650e-01 2.34106168e-01 2.41496325e+00 4.09946823e+00 4.36636543e+00] [ 1.74828053e+01 8.79430676e+00 3.11987233e+00 9.95799422e-01 4.11786032e+00 6.87282991e+00 -5.94850922e+00 1.17397237e+00 -9.72903550e-01 -3.84395838e-01 5.15247393e+00 1.04599333e+01 1.80132437e+00 1.24148905e+00 2.28909945e+00 -6.30153370e+00 6.81359720e+00 9.96381938e-01 -6.43875599e+00 2.05837131e+00 5.58175850e+00] [ 2.47477531e+00 1.04098082e+01 -9.54058111e-01 1.55706334e+00 -2.88185430e+00 -7.07532787e+00 -5.89903164e+00 5.33783579e+00 3.99702638e-01 -3.73398423e+00 3.43466544e+00 1.88729966e+00 -2.62088895e+00 -4.06114531e+00 -9.88141441e+00 -1.18384278e+00 3.34363055e+00 -5.42617702e+00 4.37098742e+00 -1.78746271e+00 3.21695971e+00] [ 3.51649332e+00 5.36813116e+00 1.14454365e+01 1.33428307e+01 -7.35360670e+00 1.19584095e+00 -4.39637136e+00 -3.11380935e+00 1.51403761e+00 3.86002898e+00 1.06376410e+00 1.61496222e+00 1.94048560e+00 -1.51478481e+00 7.83056164e+00 -7.05806303e+00 -1.07933688e+00 -5.96507263e+00 3.80077267e+00 -2.35401392e+00 -3.64972115e+00] [-3.87387300e+00 -2.11487818e+00 5.46954393e+00 -2.93938637e+00 -2.37011170e+00 8.16157913e+00 8.78374934e-01 -9.50632095e-01 4.79809552e-01 -1.96345970e-01 -1.19501486e+01 2.20450759e+00 1.76778555e+00 3.00430119e-01 2.08763218e+00 -3.71360159e+00 4.20325851e+00 -4.17180681e+00 -3.45134568e+00 -5.68735361e+00 -6.38277769e+00] [ 5.62238646e+00 4.89313126e+00 -8.68258572e+00 -9.65903923e-02 8.42158031e+00 -6.20588481e-01 4.61619759e+00 4.83170891e+00 -2.17307305e+00 -5.54425001e+00 3.37093306e+00 -9.56292057e+00 7.53576374e+00 -7.19604921e+00 5.65606296e-01 1.35390490e-01 2.51521730e+00 -4.26941395e+00 -6.31586933e+00 -2.56947160e+00 4.58979797e+00] [-2.23692560e+00 -4.50705004e+00 -4.04160881e+00 7.63853312e+00 -2.30002356e+00 4.37228966e+00 -7.51137495e-01 6.17790842e+00 -4.35582256e+00 -3.75055861e+00 9.04458165e-01 4.37144613e+00 1.46546018e+00 -1.47185123e+00 -2.82665038e+00 -6.30836916e+00 -5.36806822e+00 -2.75875640e+00 2.69793421e-01 -2.65950823e+00 1.21384251e+00] [-2.87172151e+00 8.56333447e+00 2.20485902e+00 4.04028320e+00 -5.50513220e+00 -1.96941912e+00 -1.70402431e+00 2.47805452e+00 5.30742109e-01 7.02190971e+00 -6.82926321e+00 2.34470320e+00 -1.88374937e+00 3.12738329e-01 -2.53161716e+00 -4.45610142e+00 -8.24508572e+00 1.92672908e+00 8.88616753e+00 5.40889025e-01 -9.75071788e-01] [ 6.72496736e-01 -6.88461590e+00 -5.53061819e+00 5.58583736e-01 4.05679655e+00 8.37988758e+00 1.71120688e-02 7.81118917e+00 -5.70860577e+00 1.68169808e+00 -3.88136029e+00 -5.34975576e+00 -3.02280068e+00 -1.04379034e+01 7.71957457e-01 -3.56110168e+00 4.29480791e+00 -2.79921740e-01 1.04508713e-01 -8.46713901e-01 1.34669149e+00] [-4.15631622e-01 1.06975830e+00 -5.87379313e+00 4.48423052e+00 1.31460726e+00 1.13752174e+01 -3.27626824e+00 1.81254113e+00 -1.18989694e+00 2.91548586e+00 4.07757750e-03 -4.69069862e+00 -1.85261655e+00 -3.12719941e+00 6.27015877e+00 8.48659039e+00 1.18691845e+01 -3.00382209e+00 -3.17730665e+00 -2.98723727e-01 1.18920860e+01] [ 1.96029639e+00 -3.31355762e+00 1.40815878e+00 -4.04709625e+00 -3.77114844e+00 -4.17356014e+00 -1.95576799e+00 -4.79650927e+00 -4.58658695e+00 -6.27494717e+00 -5.29386091e+00 -3.05467248e+00 -1.10683346e+00 -3.92019778e-01 -5.85813582e-01 -4.50355485e-02 -2.13197207e+00 -4.79972315e+00 7.21357155e+00 -3.83797741e+00 5.28285646e+00] [ 7.40485609e-01 -4.54275370e+00 4.24407291e+00 5.94988441e+00 1.33386707e+00 -7.30390644e+00 -4.34269142e+00 -4.79825783e+00 -5.69344759e+00 -6.36506915e-01 -1.47640741e+00 -9.04004478e+00 5.77564001e+00 6.24081516e+00 4.31459904e+00 3.30064559e+00 5.20446157e+00 3.18207121e+00 -1.11580944e+00 -4.01409912e+00 4.97670126e+00] [-3.72648895e-01 1.23591495e+00 -6.37986124e-01 1.65264443e-01 2.79615927e+00 -2.01388621e+00 -9.48799670e-01 -2.96140814e+00 -1.49039710e+00 1.31737065e+00 -3.48098469e+00 -4.30826330e+00 -6.85868359e+00 6.39732838e-01 1.79111362e+00 9.01195258e-02 5.46443939e+00 -2.40829015e+00 -2.51417041e+00 -2.83880496e+00 2.85635257e+00] [ 4.51818085e+00 -2.16119692e-01 -3.34605885e+00 2.55377817e+00 1.14393926e+00 -1.54012442e+00 6.05911374e-01 -3.88673544e-01 3.82017754e-02 -2.03775144e+00 -3.20248872e-01 1.87635934e+00 3.15772223e+00 6.25422764e+00 2.04155016e+00 6.48418331e+00 5.37026691e+00 -6.53154278e+00 3.50075698e+00 -2.02298999e+00 7.35041571e+00] [-6.37404397e-02 -3.55970073e+00 -1.03066301e+00 1.05667567e+00 -1.02089119e+00 -5.99459553e+00 9.45571065e-01 1.86700952e+00 -1.87658715e+00 1.44210982e+00 5.46693516e+00 1.25330400e+00 -4.46242094e+00 6.42003965e+00 -1.97240007e+00 -3.43857384e+00 8.28596652e-01 8.27469444e+00 -4.50987482e+00 -2.55823636e+00 -1.07992697e+00] [-4.57567358e+00 -2.77193022e+00 1.09475660e+01 7.60211802e+00 1.23868494e+01 1.76799214e+00 1.18523550e+00 -3.34743309e+00 -3.01580906e+00 6.64190912e+00 2.93071777e-01 4.08455944e+00 -6.09693348e-01 1.32882774e+00 -4.17607641e+00 -3.33016109e+00 6.29849720e+00 7.95894527e+00 5.46734571e+00 6.16865635e+00 -2.54085994e+00] [ 1.90985370e+00 -3.31190228e+00 1.34611750e+00 -9.12116146e+00 3.50972080e+00 -8.99085760e-01 -7.29408979e+00 3.59925175e+00 -4.48191977e+00 1.76155877e+00 8.79534912e+00 4.30213881e+00 -1.87172592e+00 -6.13116741e+00 -1.05064220e+01 -2.58046484e+00 -2.06202984e+00 8.29790497e+00 3.36749840e+00 7.53765583e+00 1.16347635e+00] [ 3.46446228e+00 4.03468657e+00 7.69968927e-02 3.62888455e+00 -1.04365263e+01 -2.80183864e+00 -5.17535329e-01 5.17209816e+00 2.97765374e+00 -3.29597831e+00 6.67454481e-01 -2.43983531e+00 1.94159165e-01 1.60460651e-01 4.18088961e+00 6.89845133e+00 3.40030742e+00 6.82840729e+00 -6.88202047e+00 -5.33920145e+00 -1.08938560e+01] [-1.53789914e+00 1.47029531e+00 -4.31067181e+00 -3.03168386e-01 -7.61873055e+00 2.03327942e+00 1.62738097e+00 -1.29114795e+00 3.93416286e-01 5.10281181e+00 -6.77613878e+00 3.20859766e+00 2.53698874e+00 5.71031141e+00 4.89243126e+00 8.69066143e+00 -4.89532375e+00 -2.81077623e+00 -1.49156466e+01 -1.98226297e+00 -6.12447643e+00]]]]; ov_res: [[[[-2.31851912e+00 -4.78007650e+00 1.11740112e+01 -8.76638532e-01 3.42566204e+00 2.13403511e+00 -2.05268621e+00 6.38186514e-01 2.25114799e+00 7.42673349e+00 1.86167717e-01 -5.18786526e+00 1.16790056e+00 -4.74507189e+00 -1.88297141e+00 4.76870680e+00 -3.06013346e+00 -1.39066494e+00 7.40446997e+00 4.57422972e+00 8.01731110e+00] [ 1.99605608e+00 -4.64386940e+00 2.33327508e+00 -3.69411039e+00 -4.46740770e+00 1.66048348e+00 -5.25684261e+00 -1.94076765e+00 5.45245981e+00 -2.15959311e+00 8.15331554e+00 4.18079662e+00 3.63436866e+00 3.36782980e+00 1.50662098e+01 1.41708541e+00 3.62369776e+00 3.65371442e+00 -4.01749229e+00 -2.40825129e+00 2.64995843e-01] [-9.12664711e-01 -7.70505071e-01 -5.51220989e+00 1.39353418e+00 -1.55833292e+00 4.80008841e+00 1.42687154e+00 -2.69508314e+00 -4.18273896e-01 -4.68892908e+00 -3.49946404e+00 -3.93812209e-01 -9.00967026e+00 5.93459320e+00 -1.08872819e+00 -7.85095024e+00 1.32029648e+01 -1.23014069e+01 8.97782803e+00 -4.82895422e+00 -3.76047015e-01] [ 4.31006956e+00 -2.06066394e+00 9.10953999e+00 1.09677410e+00 7.93721616e-01 8.23297560e-01 3.63519120e+00 3.14355040e+00 -2.47698188e+00 -5.97364046e-02 -3.69252026e-01 1.72025979e+00 -1.14243484e+00 2.54033709e+00 -6.89068842e+00 5.43362093e+00 -8.07501316e+00 -2.12659717e+00 3.85274261e-01 -3.91534567e+00 1.20845568e+00] [-1.56596303e+00 -1.11248517e+00 -2.78041649e+00 -3.78330016e+00 1.16297544e-03 -3.35866857e+00 5.36704445e+00 -1.17490679e-01 -1.61870730e+00 -3.62025404e+00 -5.52719688e+00 8.79458427e-01 -3.39681721e+00 1.13112688e+00 -3.03253889e+00 -1.95141518e+00 5.61945581e+00 2.97264552e+00 -2.73582101e-01 6.24906731e+00 2.83552140e-01] [ 8.17054272e-01 5.08643723e+00 -2.04183531e+00 3.07148784e-01 -1.41051662e+00 -1.26850295e+00 -3.03236246e+00 2.58872414e+00 -7.76941478e-01 5.50909090e+00 4.21457005e+00 -3.69420886e+00 4.18779850e+00 2.97259045e+00 2.03905869e+00 2.57670140e+00 -2.57271504e+00 -4.49638653e+00 1.55914378e+00 2.99958682e+00 -5.97883892e+00] [-2.42178440e+00 -4.62700272e+00 -3.79681349e+00 -9.76625741e-01 -4.04813862e+00 -4.98542637e-01 2.13080335e+00 -2.19694662e+00 -6.97769690e+00 -4.48955727e+00 -2.02168417e+00 -3.17908764e+00 6.70217037e-01 1.04716930e+01 8.40210247e+00 -5.78014755e+00 4.67061186e+00 1.03744447e+00 1.18714123e+01 -3.46338606e+00 -7.66150379e+00] [-3.39371538e+00 -3.20126128e+00 3.86184573e+00 -1.49732649e+00 2.29825211e+00 7.04761207e-01 5.30036211e+00 -1.93029165e+00 -4.00651836e+00 -9.17537498e+00 -7.23922014e+00 -1.38604760e+00 2.07002187e+00 -4.47171442e-02 3.31633496e+00 7.08675337e+00 3.46221066e+00 6.46544838e+00 1.76080954e+00 2.42584658e+00 -6.27897072e+00] [-6.70211172e+00 1.64594638e+00 -7.66247940e+00 1.40871358e+00 -5.48247337e-01 5.87337637e+00 -6.23090172e+00 1.96482646e+00 -4.37086725e+00 6.19634008e+00 -2.85111094e+00 -8.70952666e-01 2.05345964e+00 -2.71263814e+00 -2.46973801e+00 1.14185133e+01 -5.55969179e-01 -5.16326487e-01 1.97374713e+00 -1.93472896e-02 -1.35493708e+00] [ 5.28666210e+00 3.73282099e+00 3.86354065e+00 -8.41805649e+00 9.90797520e+00 -1.45579615e+01 5.71684599e+00 -4.55139399e+00 -1.78001869e+00 1.24187934e+00 5.32654524e+00 4.01968098e+00 -6.15960073e+00 7.42034960e+00 -3.44581652e+00 1.07455146e+00 3.93341947e+00 -3.78835350e-01 -5.88257313e-01 -1.02471673e+00 -9.87584877e+00] [-2.15475392e+00 -1.15446270e+00 4.66883063e-01 6.11137009e+00 3.24589419e+00 -7.32751083e+00 4.45503235e+00 1.18816388e+00 3.38912129e+00 5.57479858e+00 -1.11808896e+00 -3.41180229e+00 -1.41590619e+00 4.21031904e+00 8.72262383e+00 4.07608479e-01 4.92811012e+00 -7.67978966e-01 4.60796833e+00 5.37674236e+00 -1.01926133e-01] [-9.03403568e+00 1.97965419e+00 -3.21156931e+00 -4.23320144e-01 1.82503331e+00 -3.57644057e+00 -1.91919386e+00 -1.76014709e+00 6.59222221e+00 -9.68794632e+00 -1.08358398e-01 1.56270909e+00 -2.48509645e-02 3.38232994e-01 -7.28377438e+00 -4.87808657e+00 -4.03192425e+00 1.36293077e+00 1.27227616e+00 3.67224860e+00 -3.27041483e+00] [-6.60722673e-01 1.34500790e+00 -8.90410519e+00 -7.36270380e+00 3.38593245e+00 -1.38218117e+00 1.13419533e+00 -4.98418713e+00 -8.00494385e+00 4.78613520e+00 2.65869808e+00 3.78701121e-01 -2.36896992e+00 -6.71313524e+00 -3.68998742e+00 -8.04067707e+00 -1.91412818e+00 2.49084067e+00 -9.20480919e+00 -1.59443879e+00 2.11647725e+00] [-7.19058466e+00 -4.93844652e+00 3.69247770e+00 -4.11935282e+00 -2.97090149e+00 3.13540363e+00 2.78230023e+00 -3.88048506e+00 -3.19953710e-01 -7.92795563e+00 -5.77537298e+00 2.90901089e+00 3.92056331e-02 -1.32542968e-01 9.46427047e-01 -2.98646420e-01 -9.45310020e+00 -3.84212303e+00 1.27916455e+00 -3.83316064e+00 6.63667870e+00] [-4.61003780e+00 -5.04329205e+00 -1.25105202e+00 -6.19418812e+00 -2.06018615e+00 -9.44802046e-01 -2.32098484e+00 3.33410120e+00 -3.32625896e-01 -3.39781642e-01 -1.90019608e+00 -4.89136839e+00 -7.55870676e+00 1.44461215e+00 7.30331898e-01 3.19840640e-01 -8.92760849e+00 2.76684928e+00 2.11249280e+00 -1.07218039e+00 1.80791646e-01] [-6.46601975e-01 3.30478477e+00 2.17855766e-01 -8.13406658e+00 2.45184064e+00 1.01534426e+00 2.55294657e+00 -2.58903909e+00 -3.76946902e+00 -7.05023575e+00 1.11813488e+01 -3.24998212e+00 4.07044697e+00 -3.04237175e+00 -1.18288743e+00 -1.09554644e+01 -6.45758343e+00 -1.20496199e-01 4.97295141e+00 9.93001747e+00 4.09335136e+00] [ 8.21224499e+00 -6.23467827e+00 -6.43883526e-01 1.65030789e+00 -1.32879150e+00 3.72924972e+00 3.31676053e-03 -5.48197508e+00 -5.94169974e-01 9.52547610e-01 -1.89770007e+00 5.63377666e+00 -1.60650229e+00 -5.44197369e+00 1.05927486e+01 -2.50566781e-01 2.50320840e+00 3.88160259e-01 -2.55204201e+00 -2.04542470e+00 3.64314079e+00] [ 1.28043056e+00 -3.22619867e+00 -1.24879026e+00 3.16535759e+00 1.94415367e+00 -7.63336229e+00 -4.11085248e-01 -3.06316209e+00 1.90262830e+00 6.18839860e-01 6.50934935e+00 5.60059786e+00 -4.24270868e+00 1.25346076e+00 4.98934984e+00 2.29239628e-01 7.00803623e-02 -3.98814702e+00 -3.56368750e-01 4.42047501e+00 -3.35090232e+00] [ 7.50013304e+00 -9.56288517e-01 -1.32994974e+00 -1.42639410e+00 2.99138933e-01 -3.83279634e+00 -1.06951499e+00 3.39682937e-01 4.29837346e-01 -6.32249737e+00 2.17938042e+00 -9.38039243e-01 7.68847167e-01 -2.73490953e+00 8.46993700e-02 -3.45457554e+00 4.30332661e+00 3.22881842e+00 -4.62874222e+00 -1.78701305e+00 -8.12896252e+00] [-3.72351575e+00 7.08341646e+00 5.19743729e+00 3.07880068e+00 -2.33680630e+00 -2.26350498e+00 7.08029222e+00 -1.67476213e+00 4.59254313e+00 3.10838890e+00 8.31407261e+00 -1.03671145e+00 -6.17465496e+00 9.88419950e-01 -5.38363636e-01 -5.14151144e+00 -3.78177714e+00 -1.61850345e+00 -8.61995411e+00 -2.66131759e+00 -9.06045377e-01] [ 4.25130939e+00 5.33972836e+00 -1.60526228e+00 2.06793332e+00 -2.68467498e+00 4.83834410e+00 5.88442898e+00 -4.93601656e+00 8.96216929e-01 -6.45171285e-01 -3.78772664e+00 8.63362432e-01 -9.32372856e+00 3.27524352e+00 -5.02582979e+00 8.91552925e+00 3.99692774e+00 -1.18899536e+00 1.35258341e+00 4.07383251e+00 -5.19343758e+00]]] [[[ 4.11654282e+00 -7.81091547e+00 -5.38600254e+00 1.57105625e+00 -4.24706507e+00 2.59224772e+00 -1.05287850e+00 2.00189090e+00 6.90802050e+00 -2.62803578e+00 1.02053738e+00 2.24587536e+00 6.57743835e+00 -4.46196124e-02 -4.80543566e+00 1.18741810e+00 -7.14647675e+00 2.53214288e+00 2.86831141e+00 -5.77805805e+00 1.74953079e+01] [-5.29326773e+00 3.78794163e-01 5.10193539e+00 6.09050655e+00 -3.26623380e-01 5.91485834e+00 -1.43328142e+00 3.60433602e+00 -2.16428614e+00 -7.24058151e+00 2.66997862e+00 -3.43268681e+00 -2.53366709e+00 6.34223461e+00 -1.25115728e+00 7.80219221e+00 -5.26272392e+00 -9.10816014e-01 -3.68578464e-01 -3.70857477e+00 2.26121068e+00] [ 2.11517811e+00 -9.52808380e+00 9.17672920e+00 -2.66519725e-01 -1.18248236e+00 1.03643780e+01 -2.17163682e+00 4.88413858e+00 -1.34449422e+00 1.86344516e+00 -3.76899672e+00 -4.62918425e+00 -1.94868386e+00 -5.36821175e+00 -7.98558187e+00 -7.95967484e+00 -2.91376650e-01 2.34106168e-01 2.41496325e+00 4.09946823e+00 4.36636543e+00] [ 1.74828053e+01 8.79430676e+00 3.11987233e+00 9.95799422e-01 4.11786032e+00 6.87282991e+00 -5.94850922e+00 1.17397237e+00 -9.72903550e-01 -3.84395838e-01 5.15247393e+00 1.04599333e+01 1.80132437e+00 1.24148905e+00 2.28909945e+00 -6.30153370e+00 6.81359720e+00 9.96381938e-01 -6.43875599e+00 2.05837131e+00 5.58175850e+00] [ 2.47477531e+00 1.04098082e+01 -9.54058111e-01 1.55706334e+00 -2.88185430e+00 -7.07532787e+00 -5.89903164e+00 5.33783579e+00 3.99702638e-01 -3.73398423e+00 3.43466544e+00 1.88729966e+00 -2.62088895e+00 -4.06114531e+00 -9.88141441e+00 -1.18384278e+00 3.34363055e+00 -5.42617702e+00 4.37098742e+00 -1.78746271e+00 3.21695971e+00] [ 3.51649332e+00 5.36813116e+00 1.14454365e+01 1.33428307e+01 -7.35360670e+00 1.19584095e+00 -4.39637136e+00 -3.11380935e+00 1.51403761e+00 3.86002898e+00 1.06376410e+00 1.61496222e+00 1.94048560e+00 -1.51478481e+00 7.83056164e+00 -7.05806303e+00 -1.07933688e+00 -5.96507263e+00 3.80077267e+00 -2.35401392e+00 -3.64972115e+00] [-3.87387300e+00 -2.11487818e+00 5.46954393e+00 -2.93938637e+00 -2.37011170e+00 8.16157913e+00 8.78374934e-01 -9.50632095e-01 4.79809552e-01 -1.96345970e-01 -1.19501486e+01 2.20450759e+00 1.76778555e+00 3.00430119e-01 2.08763218e+00 -3.71360159e+00 4.20325851e+00 -4.17180681e+00 -3.45134568e+00 -5.68735361e+00 -6.38277769e+00] [ 5.62238646e+00 4.89313126e+00 -8.68258572e+00 -9.65903923e-02 8.42158031e+00 -6.20588481e-01 4.61619759e+00 4.83170891e+00 -2.17307305e+00 -5.54425001e+00 3.37093306e+00 -9.56292057e+00 7.53576374e+00 -7.19604921e+00 5.65606296e-01 1.35390490e-01 2.51521730e+00 -4.26941395e+00 -6.31586933e+00 -2.56947160e+00 4.58979797e+00] [-2.23692560e+00 -4.50705004e+00 -4.04160881e+00 7.63853312e+00 -2.30002356e+00 4.37228966e+00 -7.51137495e-01 6.17790842e+00 -4.35582256e+00 -3.75055861e+00 9.04458165e-01 4.37144613e+00 1.46546018e+00 -1.47185123e+00 -2.82665038e+00 -6.30836916e+00 -5.36806822e+00 -2.75875640e+00 2.69793421e-01 -2.65950823e+00 1.21384251e+00] [-2.87172151e+00 8.56333447e+00 2.20485902e+00 4.04028320e+00 -5.50513220e+00 -1.96941912e+00 -1.70402431e+00 2.47805452e+00 5.30742109e-01 7.02190971e+00 -6.82926321e+00 2.34470320e+00 -1.88374937e+00 3.12738329e-01 -2.53161716e+00 -4.45610142e+00 -8.24508572e+00 1.92672908e+00 8.88616753e+00 5.40889025e-01 -9.75071788e-01] [ 6.72496736e-01 -6.88461590e+00 -5.53061819e+00 5.58583736e-01 4.05679655e+00 8.37988758e+00 1.71120688e-02 7.81118917e+00 -5.70860577e+00 1.68169808e+00 -3.88136029e+00 -5.34975576e+00 -3.02280068e+00 -1.04379034e+01 7.71957457e-01 -3.56110168e+00 4.29480791e+00 -2.79921740e-01 1.04508713e-01 -8.46713901e-01 1.34669149e+00] [-4.15631622e-01 1.06975830e+00 -5.87379313e+00 4.48423052e+00 1.31460726e+00 1.13752174e+01 -3.27626824e+00 1.81254113e+00 -1.18989694e+00 2.91548586e+00 4.07757750e-03 -4.69069862e+00 -1.85261655e+00 -3.12719941e+00 6.27015877e+00 8.48659039e+00 1.18691845e+01 -3.00382209e+00 -3.17730665e+00 -2.98723727e-01 1.18920860e+01] [ 1.96029639e+00 -3.31355762e+00 1.40815878e+00 -4.04709625e+00 -3.77114844e+00 -4.17356014e+00 -1.95576799e+00 -4.79650927e+00 -4.58658695e+00 -6.27494717e+00 -5.29386091e+00 -3.05467248e+00 -1.10683346e+00 -3.92019778e-01 -5.85813582e-01 -4.50355485e-02 -2.13197207e+00 -4.79972315e+00 7.21357155e+00 -3.83797741e+00 5.28285646e+00] [ 7.40485609e-01 -4.54275370e+00 4.24407291e+00 5.94988441e+00 1.33386707e+00 -7.30390644e+00 -4.34269142e+00 -4.79825783e+00 -5.69344759e+00 -6.36506915e-01 -1.47640741e+00 -9.04004478e+00 5.77564001e+00 6.24081516e+00 4.31459904e+00 3.30064559e+00 5.20446157e+00 3.18207121e+00 -1.11580944e+00 -4.01409912e+00 4.97670126e+00] [-3.72648895e-01 1.23591495e+00 -6.37986124e-01 1.65264443e-01 2.79615927e+00 -2.01388621e+00 -9.48799670e-01 -2.96140814e+00 -1.49039710e+00 1.31737065e+00 -3.48098469e+00 -4.30826330e+00 -6.85868359e+00 6.39732838e-01 1.79111362e+00 9.01195258e-02 5.46443939e+00 -2.40829015e+00 -2.51417041e+00 -2.83880496e+00 2.85635257e+00] [ 4.51818085e+00 -2.16119692e-01 -3.34605885e+00 2.55377817e+00 1.14393926e+00 -1.54012442e+00 6.05911374e-01 -3.88673544e-01 3.82017754e-02 -2.03775144e+00 -3.20248872e-01 1.87635934e+00 3.15772223e+00 6.25422764e+00 2.04155016e+00 6.48418331e+00 5.37026691e+00 -6.53154278e+00 3.50075698e+00 -2.02298999e+00 7.35041571e+00] [-6.37404397e-02 -3.55970073e+00 -1.03066301e+00 1.05667567e+00 -1.02089119e+00 -5.99459553e+00 9.45571065e-01 1.86700952e+00 -1.87658715e+00 1.44210982e+00 5.46693516e+00 1.25330400e+00 -4.46242094e+00 6.42003965e+00 -1.97240007e+00 -3.43857384e+00 8.28596652e-01 8.27469444e+00 -4.50987482e+00 -2.55823636e+00 -1.07992697e+00] [-4.57567358e+00 -2.77193022e+00 1.09475660e+01 7.60211802e+00 1.23868494e+01 1.76799214e+00 1.18523550e+00 -3.34743309e+00 -3.01580906e+00 6.64190912e+00 2.93071777e-01 4.08455944e+00 -6.09693348e-01 1.32882774e+00 -4.17607641e+00 -3.33016109e+00 6.29849720e+00 7.95894527e+00 5.46734571e+00 6.16865635e+00 -2.54085994e+00] [ 1.90985370e+00 -3.31190228e+00 1.34611750e+00 -9.12116146e+00 3.50972080e+00 -8.99085760e-01 -7.29408979e+00 3.59925175e+00 -4.48191977e+00 1.76155877e+00 8.79534912e+00 4.30213881e+00 -1.87172592e+00 -6.13116741e+00 -1.05064220e+01 -2.58046484e+00 -2.06202984e+00 8.29790497e+00 3.36749840e+00 7.53765583e+00 1.16347635e+00] [ 3.46446228e+00 4.03468657e+00 7.69968927e-02 3.62888455e+00 -1.04365263e+01 -2.80183864e+00 -5.17535329e-01 5.17209816e+00 2.97765374e+00 -3.29597831e+00 6.67454481e-01 -2.43983531e+00 1.94159165e-01 1.60460651e-01 4.18088961e+00 6.89845133e+00 3.40030742e+00 6.82840729e+00 -6.88202047e+00 -5.33920145e+00 -1.08938560e+01] [-1.53789914e+00 1.47029531e+00 -4.31067181e+00 -3.03168386e-01 -7.61873055e+00 2.03327942e+00 1.62738097e+00 -1.29114795e+00 3.93416286e-01 5.10281181e+00 -6.77613878e+00 3.20859766e+00 2.53698874e+00 5.71031141e+00 4.89243126e+00 8.69066143e+00 -4.89532375e+00 -2.81077623e+00 -1.49156466e+01 -1.98226297e+00 -6.12447643e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [0, 1], 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_444.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 2.8435280e+00 2.5841572e+00 -1.6170774e+01 ... 9.8363676e+00 3.1913329e-02 2.2869241e+00] [ 1.6214819e+00 4.9879813e+00 2.2587924e+00 ... -5.2100477e+00 -4.6370997e+00 -1.0520364e+00] [ 4.5349860e-01 -5.1153302e+00 2.2800813e+00 ... -1.5098538e+01 7.4329653e+00 -7.4603853e+00] ... [-3.4229748e+00 5.7230639e+00 -1.3401700e+01 ... -5.2438874e+00 -8.1750412e+00 3.8649149e+00] [ 9.6792281e-03 7.8815279e+00 4.5078449e+00 ... 3.5554366e+00 4.8551049e+00 -1.9399685e+00] [ 3.4180369e+00 -3.3616335e+00 -2.1341431e+00 ... -6.4326034e+00 8.3719025e+00 1.5937406e+00]]] [[[-8.5692329e+00 -1.0226183e+01 1.5835861e+01 ... 7.2112352e-01 -3.8273201e+00 -5.1339579e+00] [ 1.3599886e+01 7.3184018e+00 2.9673741e+00 ... -5.9264221e+00 7.0337696e+00 -2.1994768e-01] [ 8.1216059e+00 -4.6715455e+00 1.1280414e+00 ... 5.1537138e-01 -9.0372795e-01 1.3380044e+00] ... [-1.1194558e+00 -1.4407245e+00 -2.5254872e+00 ... -1.3132143e+01 5.2058334e+00 1.4092065e+00] [-7.1808624e+00 -1.1494226e-01 2.1853468e+00 ... 6.2734323e+00 4.8407051e-01 -3.0631285e+00] [-7.7423882e-01 3.6815181e-01 -8.5529718e+00 ... -6.9171844e+00 6.2438524e-01 -5.8294744e+00]]]]; ov_res: [[[[ 2.8435280e+00 2.5841572e+00 -1.6170774e+01 ... 9.8363676e+00 3.1913329e-02 2.2869241e+00] [ 1.6214819e+00 4.9879813e+00 2.2587924e+00 ... -5.2100477e+00 -4.6370997e+00 -1.0520364e+00] [ 4.5349860e-01 -5.1153302e+00 2.2800813e+00 ... -1.5098538e+01 7.4329653e+00 -7.4603853e+00] ... [-3.4229748e+00 5.7230639e+00 -1.3401700e+01 ... -5.2438874e+00 -8.1750412e+00 3.8649149e+00] [ 9.6792281e-03 7.8815279e+00 4.5078449e+00 ... 3.5554366e+00 4.8551049e+00 -1.9399685e+00] [ 3.4180369e+00 -3.3616335e+00 -2.1341431e+00 ... -6.4326034e+00 8.3719025e+00 1.5937406e+00]]] [[[-8.5692329e+00 -1.0226183e+01 1.5835861e+01 ... 7.2112352e-01 -3.8273201e+00 -5.1339579e+00] [ 1.3599886e+01 7.3184018e+00 2.9673741e+00 ... -5.9264221e+00 7.0337696e+00 -2.1994768e-01] [ 8.1216059e+00 -4.6715455e+00 1.1280414e+00 ... 5.1537138e-01 -9.0372795e-01 1.3380044e+00] ... [-1.1194558e+00 -1.4407245e+00 -2.5254872e+00 ... -1.3132143e+01 5.2058334e+00 1.4092065e+00] [-7.1808624e+00 -1.1494226e-01 2.1853468e+00 ... 6.2734323e+00 4.8407051e-01 -3.0631285e+00] [-7.7423882e-01 3.6815181e-01 -8.5529718e+00 ... -6.9171844e+00 6.2438524e-01 -5.8294744e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': [1, 0], 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_446.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[-3.405704 -5.6538725 -3.1344142 ... 2.1206522 -6.3222995 2.9619596 ] [ 0.1696666 1.1153984 10.231753 ... 6.855359 6.947716 7.485535 ] [ 5.124613 -0.45538512 6.3838286 ... -2.2270994 -2.1996424 4.592257 ] ... [-1.053936 0.18542853 -2.053079 ... -4.801794 0.81775653 -4.3277416 ] [-3.2479792 -1.403265 3.7922297 ... -6.5525293 -3.2092965 -1.3276129 ] [-5.2453074 -1.1267802 1.6070516 ... -1.1381574 -3.853941 -0.08062506]]] [[[-2.6206288 1.7005897 -1.531074 ... 1.3099803 -3.378488 -5.4702206 ] [-3.9199193 0.06737137 -5.8751802 ... -3.711281 -5.9122972 -2.823523 ] [-0.43506253 2.0382514 0.9000207 ... -8.459744 -3.4496648 -0.49939367] ... [-6.701703 -0.05212884 -1.2032794 ... -2.5671372 0.91262627 2.670883 ] [ 6.5140066 5.3172398 -6.3608875 ... 0.28583574 2.0885794 -3.0077322 ] [ 3.1219008 -5.911757 -0.8028309 ... 0.18396626 5.779228 7.5470266 ]]]]; ov_res: [[[[-3.405704 -5.6538725 -3.1344142 ... 2.1206522 -6.3222995 2.9619596 ] [ 0.1696666 1.1153984 10.231753 ... 6.855359 6.947716 7.485535 ] [ 5.124613 -0.45538512 6.3838286 ... -2.2270994 -2.1996424 4.592257 ] ... [-1.053936 0.18542853 -2.053079 ... -4.801794 0.81775653 -4.3277416 ] [-3.2479792 -1.403265 3.7922297 ... -6.5525293 -3.2092965 -1.3276129 ] [-5.2453074 -1.1267802 1.6070516 ... -1.1381574 -3.853941 -0.08062506]]] [[[-2.6206288 1.7005897 -1.531074 ... 1.3099803 -3.378488 -5.4702206 ] [-3.9199193 0.06737137 -5.8751802 ... -3.711281 -5.9122972 -2.823523 ] [-0.43506253 2.0382514 0.9000207 ... -8.459744 -3.4496648 -0.49939367] ... [-6.701703 -0.05212884 -1.2032794 ... -2.5671372 0.91262627 2.670883 ] [ 6.5140066 5.3172398 -6.3608875 ... 0.28583574 2.0885794 -3.0077322 ] [ 3.1219008 -5.911757 -0.8028309 ... 0.18396626 5.779228 7.5470266 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_448.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 1.278956 -3.444351 -5.071169 ... -2.8741457 -2.9486096 -1.7700928 ] [ -0.55620414 0.37450832 7.386061 ... 3.4548855 2.5935051 1.0676752 ] [ 1.284746 -8.674387 -1.6239394 ... -2.771728 -0.52289635 0.48694745] ... [ -5.926327 15.691566 -5.996794 ... -1.9960272 6.031967 -0.4462881 ] [ -4.4249387 4.154877 4.146212 ... 4.9869347 1.1751432 -2.0329971 ] [ -2.4138186 2.7835162 1.2829431 ... -3.0664 7.440967 -1.305173 ]]] [[[ -1.2216215 -5.973563 5.9527125 ... 3.8621964 -3.7907019 5.5671663 ] [ 10.179059 5.617705 -7.1887527 ... -4.191956 -3.3327434 -2.0272176 ] [ -7.381129 1.3152903 -11.801707 ... -3.9705489 -1.0460947 -4.177549 ] ... [-10.37563 -0.2631545 0.12254113 ... -5.886618 6.6325626 -6.8629694 ] [ 3.8621485 -1.9699676 3.9232163 ... 10.401759 -7.207172 2.2545156 ] [ 4.167855 -0.69523096 -4.732916 ... 7.129084 -11.352012 5.0837336 ]]]]; ov_res: [[[[ 1.278956 -3.444351 -5.071169 ... -2.8741457 -2.9486096 -1.7700928 ] [ -0.55620414 0.37450832 7.386061 ... 3.4548855 2.5935051 1.0676752 ] [ 1.284746 -8.674387 -1.6239394 ... -2.771728 -0.52289635 0.48694745] ... [ -5.926327 15.691566 -5.996794 ... -1.9960272 6.031967 -0.4462881 ] [ -4.4249387 4.154877 4.146212 ... 4.9869347 1.1751432 -2.0329971 ] [ -2.4138186 2.7835162 1.2829431 ... -3.0664 7.440967 -1.305173 ]]] [[[ -1.2216215 -5.973563 5.9527125 ... 3.8621964 -3.7907019 5.5671663 ] [ 10.179059 5.617705 -7.1887527 ... -4.191956 -3.3327434 -2.0272176 ] [ -7.381129 1.3152903 -11.801707 ... -3.9705489 -1.0460947 -4.177549 ] ... [-10.37563 -0.2631545 0.12254113 ... -5.886618 6.6325626 -6.8629694 ] [ 3.8621485 -1.9699676 3.9232163 ... 10.401759 -7.207172 2.2545156 ] [ 4.167855 -0.69523096 -4.732916 ... 7.129084 -11.352012 5.0837336 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv2D::test_conv2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_450.aten_conv2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv2d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:31:23 return (%7) fw_re: [[[[ 3.3887665 1.8037705 1.2599387 ... 6.8613124 4.094219 -3.8484359 ] [ 0.83398736 -0.06220529 -1.3245598 ... 1.4664145 -8.119834 2.2383227 ] [-4.0755124 3.6176503 -1.7483141 ... 2.2134438 -5.1087832 1.1566517 ] ... [-7.512498 -5.6091022 2.3841276 ... -0.87546986 2.4024072 3.9163768 ] [-9.686443 2.9014807 0.12308023 ... -5.047758 4.2078195 -2.5897112 ] [-2.1311889 4.80203 -0.59712774 ... 2.6189728 -0.27044192 -0.7318075 ]]] [[[ 0.4997956 -6.6250587 0.5916924 ... 3.5966523 -0.2669585 -4.3517404 ] [-4.8012486 1.1922823 9.2714205 ... -0.9941274 -4.7573156 -1.5168614 ] [ 1.4296545 7.3291745 -0.666674 ... 7.322929 -6.900412 -8.107184 ] ... [-2.4799867 6.8849616 -4.7522216 ... 1.2045952 7.1979084 -1.2469883 ] [ 1.2084298 -2.2124913 -0.7899432 ... -2.4275055 -7.090752 8.015414 ] [-2.256455 0.14559388 3.3621628 ... 3.1520283 -0.01124401 -1.0209198 ]]]]; ov_res: [[[[ 3.3887665 1.8037705 1.2599387 ... 6.8613124 4.094219 -3.8484359 ] [ 0.83398736 -0.06220529 -1.3245598 ... 1.4664145 -8.119834 2.2383227 ] [-4.0755124 3.6176503 -1.7483141 ... 2.2134438 -5.1087832 1.1566517 ] ... [-7.512498 -5.6091022 2.3841276 ... -0.87546986 2.4024072 3.9163768 ] [-9.686443 2.9014807 0.12308023 ... -5.047758 4.2078195 -2.5897112 ] [-2.1311889 4.80203 -0.59712774 ... 2.6189728 -0.27044192 -0.7318075 ]]] [[[ 0.4997956 -6.6250587 0.5916924 ... 3.5966523 -0.2669585 -4.3517404 ] [-4.8012486 1.1922823 9.2714205 ... -0.9941274 -4.7573156 -1.5168614 ] [ 1.4296545 7.3291745 -0.666674 ... 7.322929 -6.900412 -8.107184 ] ... [-2.4799867 6.8849616 -4.7522216 ... 1.2045952 7.1979084 -1.2469883 ] [ 1.2084298 -2.2124913 -0.7899432 ... -2.4275055 -7.090752 8.015414 ] [-2.256455 0.14559388 3.3621628 ... 3.1520283 -0.01124401 -1.0209198 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_451.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.0872 -0.4230 0.9142 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[-2.7941103 -0.09167635 -0.8644599 1.1052102 3.1325946 7.323676 -2.0653915 0.92813456 0.2866307 1.5207486 -3.2958283 2.416827 6.9585466 2.0132544 -1.2610134 0.8119509 1.9842966 -3.0940232 3.4428577 -0.32185078 1.7543701 2.4089093 -4.081635 ] [ 0.86962605 0.34723097 -1.7701029 -3.8216636 -4.3460207 0.02069816 -0.6232462 -2.216663 -0.2117248 -1.197835 -2.2119718 -0.1669234 -2.567752 2.039581 0.5065724 4.1534686 1.247819 1.3068308 -3.838052 0.18631423 3.3631408 0.62451875 -1.7156609 ] [ 4.750279 -3.5661592 0.2615326 -2.7514129 -2.8285065 0.41751426 3.9970746 -0.6737419 -1.4674165 2.6513772 4.8759522 -2.2795198 1.2499051 6.197851 0.8454121 0.07403481 -8.334794 5.1555676 3.7199004 3.97012 0.6313522 -6.1610265 7.392057 ]] [[ 3.6265078 -0.6661087 -2.28257 2.4921908 2.1147487 5.781031 -3.4650931 4.195947 1.3722254 3.8178234 -0.842801 -1.266035 -0.58633053 0.704417 1.5932486 -2.004745 1.150096 4.178447 0.38222718 -0.5827249 4.343018 3.051131 -3.4763508 ] [ 0.91541255 -0.6097772 -1.8775758 -2.786505 -2.77646 0.39619875 0.92898965 4.652576 6.215578 -2.52659 -3.2103066 -1.6649501 2.3576722 -0.5413629 -4.1515594 -3.1973968 -4.5705404 -2.4935906 2.5219352 -0.9971156 -4.805725 1.2424158 -2.6086652 ] [-2.5029938 2.144259 -1.1629083 -3.357521 9.566189 3.5852313 -0.09938234 -4.5755777 -0.24690837 1.9285171 6.3829875 3.817891 -8.13162 1.3793101 5.7951393 -3.2822142 -0.5584689 4.3056927 -3.6881042 0.9626357 4.153475 -1.8560531 2.7130082 ]]]; ov_res: [[[-2.7941103 -0.09167635 -0.8644599 1.1052102 3.1325946 7.323676 -2.0653915 0.92813456 0.2866307 1.5207486 -3.2958283 2.416827 6.9585466 2.0132544 -1.2610134 0.8119509 1.9842966 -3.0940232 3.4428577 -0.32185078 1.7543701 2.4089093 -4.081635 ] [ 0.86962605 0.34723097 -1.7701029 -3.8216636 -4.3460207 0.02069816 -0.6232462 -2.216663 -0.2117248 -1.197835 -2.2119718 -0.1669234 -2.567752 2.039581 0.5065724 4.1534686 1.247819 1.3068308 -3.838052 0.18631423 3.3631408 0.62451875 -1.7156609 ] [ 4.750279 -3.5661592 0.2615326 -2.7514129 -2.8285065 0.41751426 3.9970746 -0.6737419 -1.4674165 2.6513772 4.8759522 -2.2795198 1.2499051 6.197851 0.8454121 0.07403481 -8.334794 5.1555676 3.7199004 3.97012 0.6313522 -6.1610265 7.392057 ]] [[ 3.6265078 -0.6661087 -2.28257 2.4921908 2.1147487 5.781031 -3.4650931 4.195947 1.3722254 3.8178234 -0.842801 -1.266035 -0.58633053 0.704417 1.5932486 -2.004745 1.150096 4.178447 0.38222718 -0.5827249 4.343018 3.051131 -3.4763508 ] [ 0.91541255 -0.6097772 -1.8775758 -2.786505 -2.77646 0.39619875 0.92898965 4.652576 6.215578 -2.52659 -3.2103066 -1.6649501 2.3576722 -0.5413629 -4.1515594 -3.1973968 -4.5705404 -2.4935906 2.5219352 -0.9971156 -4.805725 1.2424158 -2.6086652 ] [-2.5029938 2.144259 -1.1629083 -3.357521 9.566189 3.5852313 -0.09938234 -4.5755777 -0.24690837 1.9285171 6.3829875 3.817891 -8.13162 1.3793101 5.7951393 -3.2822142 -0.5584689 4.3056927 -3.6881042 0.9626357 4.153475 -1.8560531 2.7130082 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_453.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.3153 1.1464 0.1540 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%8) fw_re: [[[-0.92660105 -0.47386277 4.019828 2.3964977 8.27102 -3.8343625 -1.415511 5.9431596 3.071809 1.6739461 0.17697942 3.5642943 ] [ 1.9579477 1.8135173 2.0368047 1.7378228 3.8227587 1.7734978 1.4436667 2.8689961 1.0548779 3.0029373 1.4180453 0.24035043] [-0.9213463 -0.803923 1.1255177 -7.148758 4.635793 -1.3272246 -4.0305014 0.4194336 -5.441811 1.2507582 -1.4295222 1.3053644 ]] [[-3.4600534 -3.5892544 4.9395723 -1.6515759 -4.390011 4.774587 -1.9794201 4.338526 4.5318 3.8266296 -0.7187313 -2.7294536 ] [-1.4410149 0.04363132 5.3333883 -3.0630732 -1.9094788 3.2329783 -0.38555753 3.481151 3.0036013 2.1302843 0.73835206 -0.3501562 ] [ 1.0364096 -1.6641474 -0.36779317 6.42282 -7.262518 2.2464118 -1.782401 -0.9886863 -2.1338212 -0.3933225 -0.8701457 -2.0118685 ]]]; ov_res: [[[-0.92660105 -0.47386277 4.019828 2.3964977 8.27102 -3.8343625 -1.415511 5.9431596 3.071809 1.6739461 0.17697942 3.5642943 ] [ 1.9579477 1.8135173 2.0368047 1.7378228 3.8227587 1.7734978 1.4436667 2.8689961 1.0548779 3.0029373 1.4180453 0.24035043] [-0.9213463 -0.803923 1.1255177 -7.148758 4.635793 -1.3272246 -4.0305014 0.4194336 -5.441811 1.2507582 -1.4295222 1.3053644 ]] [[-3.4600534 -3.5892544 4.9395723 -1.6515759 -4.390011 4.774587 -1.9794201 4.338526 4.5318 3.8266296 -0.7187313 -2.7294536 ] [-1.4410149 0.04363132 5.3333883 -3.0630732 -1.9094788 3.2329783 -0.38555753 3.481151 3.0036013 2.1302843 0.73835206 -0.3501562 ] [ 1.0364096 -1.6641474 -0.36779317 6.42282 -7.262518 2.2464118 -1.782401 -0.9886863 -2.1338212 -0.3933225 -0.8701457 -2.0118685 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_455.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.9174 0.4559 1.3800 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%6) fw_re: [[[-3.0153408 -4.5104165 -1.5958946 -6.9421625 -3.932456 -1.8753262 1.440915 -3.7357287 -3.700748 2.319354 -1.5999489 4.858529 -3.3373618 0.36634386 -0.656307 -0.20098191 -2.362163 5.881691 -0.01970398 0.14016402 -2.9523506 -1.761009 1.7905132 -0.09654188 -0.44704196] [ 0.5898878 0.972821 -3.2538698 0.5067275 0.6308526 0.19782656 -2.910268 2.3089604 4.8136897 -0.6836218 3.6254416 -2.4989417 3.203051 0.31690788 -1.9375622 0.57103044 5.1864877 -1.7961247 -1.2156308 -2.014765 2.0549507 1.6273365 -1.3047898 -0.73692447 0.33551183] [ 2.1521254 7.9355173 8.441021 3.1377912 3.064487 -1.6114161 -2.3798757 -1.3568316 5.573674 -2.1882355 -4.592984 -1.6878288 -2.6619902 8.935976 2.9761543 -6.2625217 -3.526678 5.119309 3.5406895 2.9952939 1.7045732 -3.5867739 1.4920264 1.7926466 2.9149218 ]] [[-1.1640475 -3.208549 -1.0301529 -2.4068866 2.1695046 -4.6455836 -1.2495286 0.12456477 -4.803291 5.527423 -0.14131635 3.3494787 -1.4035168 -0.442348 4.0944276 -6.9804487 0.37530386 1.2634379 -3.164497 -1.5785162 -3.1621408 -1.691116 -1.6500005 1.4211143 -0.6848644 ] [-0.40488887 1.2668977 -0.9701101 -0.0211181 -4.5533648 1.7178915 2.0058808 -0.49869758 6.322117 -2.3140213 0.651325 -2.4186332 2.3087838 1.9121909 -5.8120337 4.8052406 1.7637684 -0.44254375 3.844655 2.5667405 1.9142001 -0.25856948 0.6691036 -1.9692945 0.4263484 ] [ 1.1559359 0.95296246 4.862271 6.742957 3.7052085 -2.3363667 5.631661 -2.48764 -1.3103094 3.222706 -1.7872665 -2.0073674 -2.1574864 6.8719406 5.931469 2.216778 5.165928 -0.15206301 3.3177156 6.146516 -0.25525212 -5.8657737 -0.55914927 1.3089865 2.8675435 ]]]; ov_res: [[[-3.0153408 -4.5104165 -1.5958946 -6.9421625 -3.932456 -1.8753262 1.440915 -3.7357287 -3.700748 2.319354 -1.5999489 4.858529 -3.3373618 0.36634386 -0.656307 -0.20098191 -2.362163 5.881691 -0.01970398 0.14016402 -2.9523506 -1.761009 1.7905132 -0.09654188 -0.44704196] [ 0.5898878 0.972821 -3.2538698 0.5067275 0.6308526 0.19782656 -2.910268 2.3089604 4.8136897 -0.6836218 3.6254416 -2.4989417 3.203051 0.31690788 -1.9375622 0.57103044 5.1864877 -1.7961247 -1.2156308 -2.014765 2.0549507 1.6273365 -1.3047898 -0.73692447 0.33551183] [ 2.1521254 7.9355173 8.441021 3.1377912 3.064487 -1.6114161 -2.3798757 -1.3568316 5.573674 -2.1882355 -4.592984 -1.6878288 -2.6619902 8.935976 2.9761543 -6.2625217 -3.526678 5.119309 3.5406895 2.9952939 1.7045732 -3.5867739 1.4920264 1.7926466 2.9149218 ]] [[-1.1640475 -3.208549 -1.0301529 -2.4068866 2.1695046 -4.6455836 -1.2495286 0.12456477 -4.803291 5.527423 -0.14131635 3.3494787 -1.4035168 -0.442348 4.0944276 -6.9804487 0.37530386 1.2634379 -3.164497 -1.5785162 -3.1621408 -1.691116 -1.6500005 1.4211143 -0.6848644 ] [-0.40488887 1.2668977 -0.9701101 -0.0211181 -4.5533648 1.7178915 2.0058808 -0.49869758 6.322117 -2.3140213 0.651325 -2.4186332 2.3087838 1.9121909 -5.8120337 4.8052406 1.7637684 -0.44254375 3.844655 2.5667405 1.9142001 -0.25856948 0.6691036 -1.9692945 0.4263484 ] [ 1.1559359 0.95296246 4.862271 6.742957 3.7052085 -2.3363667 5.631661 -2.48764 -1.3103094 3.222706 -1.7872665 -2.0073674 -2.1574864 6.8719406 5.931469 2.216778 5.165928 -0.15206301 3.3177156 6.146516 -0.25525212 -5.8657737 -0.55914927 1.3089865 2.8675435 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_457.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.4066 0.5295 0.1981 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%8) fw_re: [[[ 2.7667193e+00 -4.8501625e+00 -4.4300342e+00 -2.4404833e+00 -3.4767544e+00 5.4579854e+00 8.5791817e+00 -3.5835686e-01 3.8138580e+00 6.8694086e+00 -2.5252292e+00 3.2609084e+00 1.0603026e+00 -3.6807060e+00 -3.3524995e+00 -5.6293888e+00 -5.5513752e-01 1.7771077e+00 9.8948157e-01 3.8759742e+00 7.2585475e-01] [ 8.7356102e-01 7.4879050e+00 -3.8055315e+00 6.6082287e+00 1.0802310e+00 6.3761439e+00 -7.7507252e-01 -5.9420877e+00 -1.9923825e+00 -3.0930638e-03 -3.1429636e+00 -5.2469149e+00 6.6066494e+00 4.7399530e+00 3.2312171e+00 1.7802210e+00 1.0586026e+00 -5.5599065e+00 -1.1112237e+00 6.9677196e+00 -8.4142381e-01] [ 6.4330384e-02 2.6399972e+00 4.6691928e+00 -4.8393908e+00 -4.8813806e+00 -1.9754245e+00 -1.3143197e+00 -4.4325395e+00 2.7688279e+00 -3.8998735e-01 7.0810038e-01 5.2449112e+00 -4.9911934e-01 2.7438002e+00 -9.3256319e-01 -8.0246747e-01 -7.9389137e-01 -1.5278105e+00 2.5002122e+00 2.0277030e+00 2.9881403e+00]] [[-4.2829094e+00 2.0699592e+00 9.9522877e-01 2.7936473e+00 4.0665560e+00 -1.6540058e+00 2.4088850e+00 -1.2539232e+00 -1.8940237e+00 -4.4262251e-01 4.8755021e+00 3.8609800e+00 1.2382646e+00 1.2030393e-02 3.0879982e+00 1.3891223e-01 -9.2220294e-01 2.6906879e+00 -1.4095415e+00 2.9569316e+00 -2.6383836e+00] [ 5.1504049e+00 5.3984637e+00 2.5033798e+00 1.0024277e+00 -1.6340501e+00 4.3379745e-01 4.3305603e-01 3.8396115e+00 -8.5528851e+00 -2.8679132e-02 -4.7230873e+00 6.2588305e+00 6.0366869e-01 7.9815918e-01 1.0865179e+00 -3.1748116e+00 -2.2869768e+00 1.3037863e+00 9.2835503e+00 -4.4522085e+00 1.9717288e+00] [ 2.2615492e+00 -1.7484373e+00 -3.1742136e+00 -2.9809022e-01 -8.9868128e-01 9.2780334e-01 1.0354313e+00 -7.7323091e-01 2.0687432e+00 -4.6346194e-01 -1.8627857e-01 1.9711211e-02 -8.9428258e-01 -8.0018848e-01 7.9991144e-01 -6.6568726e-01 2.8535070e+00 -3.4888017e-01 1.3094387e+00 4.1192782e-01 -1.0704365e+00]]]; ov_res: [[[ 2.7667193e+00 -4.8501625e+00 -4.4300342e+00 -2.4404833e+00 -3.4767544e+00 5.4579854e+00 8.5791817e+00 -3.5835686e-01 3.8138580e+00 6.8694086e+00 -2.5252292e+00 3.2609084e+00 1.0603026e+00 -3.6807060e+00 -3.3524995e+00 -5.6293888e+00 -5.5513752e-01 1.7771077e+00 9.8948157e-01 3.8759742e+00 7.2585475e-01] [ 8.7356102e-01 7.4879050e+00 -3.8055315e+00 6.6082287e+00 1.0802310e+00 6.3761439e+00 -7.7507252e-01 -5.9420877e+00 -1.9923825e+00 -3.0930638e-03 -3.1429636e+00 -5.2469149e+00 6.6066494e+00 4.7399530e+00 3.2312171e+00 1.7802210e+00 1.0586026e+00 -5.5599065e+00 -1.1112237e+00 6.9677196e+00 -8.4142381e-01] [ 6.4330384e-02 2.6399972e+00 4.6691928e+00 -4.8393908e+00 -4.8813806e+00 -1.9754245e+00 -1.3143197e+00 -4.4325395e+00 2.7688279e+00 -3.8998735e-01 7.0810038e-01 5.2449112e+00 -4.9911934e-01 2.7438002e+00 -9.3256319e-01 -8.0246747e-01 -7.9389137e-01 -1.5278105e+00 2.5002122e+00 2.0277030e+00 2.9881403e+00]] [[-4.2829094e+00 2.0699592e+00 9.9522877e-01 2.7936473e+00 4.0665560e+00 -1.6540058e+00 2.4088850e+00 -1.2539232e+00 -1.8940237e+00 -4.4262251e-01 4.8755021e+00 3.8609800e+00 1.2382646e+00 1.2030393e-02 3.0879982e+00 1.3891223e-01 -9.2220294e-01 2.6906879e+00 -1.4095415e+00 2.9569316e+00 -2.6383836e+00] [ 5.1504049e+00 5.3984637e+00 2.5033798e+00 1.0024277e+00 -1.6340501e+00 4.3379745e-01 4.3305603e-01 3.8396115e+00 -8.5528851e+00 -2.8679132e-02 -4.7230873e+00 6.2588305e+00 6.0366869e-01 7.9815918e-01 1.0865179e+00 -3.1748116e+00 -2.2869768e+00 1.3037863e+00 9.2835503e+00 -4.4522085e+00 1.9717288e+00] [ 2.2615492e+00 -1.7484373e+00 -3.1742136e+00 -2.9809022e-01 -8.9868128e-01 9.2780334e-01 1.0354313e+00 -7.7323091e-01 2.0687432e+00 -4.6346194e-01 -1.8627857e-01 1.9711211e-02 -8.9428258e-01 -8.0018848e-01 7.9991144e-01 -6.6568726e-01 2.8535070e+00 -3.4888017e-01 1.3094387e+00 4.1192782e-01 -1.0704365e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_459.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.8634 -0.9701 -0.5020 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[ -0.93058646 0.38151753 -3.65876 -4.7910023 2.794702 -0.08116221 0.8379984 1.774998 -3.679432 -3.7779717 -2.0159001 -2.8672009 3.3794413 -0.6733053 1.6315639 2.1914525 0.05099821 3.8968735 4.4565086 -1.8175426 -4.217634 -1.271759 4.186089 5.3835645 -2.8695605 ] [ -3.08735 -0.23438472 0.08411539 4.0689163 0.51397943 1.1665391 -3.9350495 -0.9965264 -5.7063246 -4.872698 -4.742404 -2.4898348 5.4256926 5.868705 -1.529834 -3.2601776 -1.7613044 -5.792652 -5.7297363 -0.9581971 1.1930999 3.6084375 0.14570832 2.299623 -0.9789201 ] [ -1.9650588 -0.10614178 -1.9571846 -2.78345 -2.6417463 1.346695 0.14678985 0.77213246 4.5325856 6.4812045 2.3524363 -6.333225 -10.7864065 -5.1483474 -3.488693 0.38880438 6.0404058 2.245977 -1.4490986 -3.534928 -2.5677135 -3.1014555 -5.4937344 -1.0903347 3.2916148 ]] [[ -5.561698 -4.480405 2.117177 0.36307585 -4.792719 1.6293178 -1.9481375 0.15704775 4.190563 0.4289862 -3.053176 -1.2467549 -0.1177904 3.3451576 1.390446 -1.5096202 -1.2823255 4.2388735 -4.2895203 -3.2191207 2.542612 1.310287 -0.06753409 -3.359316 0.43299878] [ 1.7038852 4.336413 -1.74703 -9.052233 4.0557923 -1.7668207 1.6868719 -6.536642 -5.1901646 -6.010471 0.49225438 -2.944942 -3.4277477 -6.216239 -6.2316747 -0.99422735 -1.1645162 -6.173278 -2.1259725 -1.1017234 -3.014429 -3.5539017 -1.8894172 3.6448846 0.21513987] [ -2.2651644 1.1467955 4.6663885 -0.29322684 1.5516562 -3.3226829 -0.09746021 -1.529377 1.9231558 2.0530615 3.6742072 -0.10601938 2.3970854 -0.50685143 0.16037816 0.1796925 3.3491187 1.8700542 3.103472 1.4912128 -2.389336 -2.3231134 -4.087442 -1.2960324 -3.1819208 ]]]; ov_res: [[[ -0.93058646 0.38151753 -3.65876 -4.7910023 2.794702 -0.08116221 0.8379984 1.774998 -3.679432 -3.7779717 -2.0159001 -2.8672009 3.3794413 -0.6733053 1.6315639 2.1914525 0.05099821 3.8968735 4.4565086 -1.8175426 -4.217634 -1.271759 4.186089 5.3835645 -2.8695605 ] [ -3.08735 -0.23438472 0.08411539 4.0689163 0.51397943 1.1665391 -3.9350495 -0.9965264 -5.7063246 -4.872698 -4.742404 -2.4898348 5.4256926 5.868705 -1.529834 -3.2601776 -1.7613044 -5.792652 -5.7297363 -0.9581971 1.1930999 3.6084375 0.14570832 2.299623 -0.9789201 ] [ -1.9650588 -0.10614178 -1.9571846 -2.78345 -2.6417463 1.346695 0.14678985 0.77213246 4.5325856 6.4812045 2.3524363 -6.333225 -10.7864065 -5.1483474 -3.488693 0.38880438 6.0404058 2.245977 -1.4490986 -3.534928 -2.5677135 -3.1014555 -5.4937344 -1.0903347 3.2916148 ]] [[ -5.561698 -4.480405 2.117177 0.36307585 -4.792719 1.6293178 -1.9481375 0.15704775 4.190563 0.4289862 -3.053176 -1.2467549 -0.1177904 3.3451576 1.390446 -1.5096202 -1.2823255 4.2388735 -4.2895203 -3.2191207 2.542612 1.310287 -0.06753409 -3.359316 0.43299878] [ 1.7038852 4.336413 -1.74703 -9.052233 4.0557923 -1.7668207 1.6868719 -6.536642 -5.1901646 -6.010471 0.49225438 -2.944942 -3.4277477 -6.216239 -6.2316747 -0.99422735 -1.1645162 -6.173278 -2.1259725 -1.1017234 -3.014429 -3.5539017 -1.8894172 3.6448846 0.21513987] [ -2.2651644 1.1467955 4.6663885 -0.29322684 1.5516562 -3.3226829 -0.09746021 -1.529377 1.9231558 2.0530615 3.6742072 -0.10601938 2.3970854 -0.50685143 0.16037816 0.1796925 3.3491187 1.8700542 3.103472 1.4912128 -2.389336 -2.3231134 -4.087442 -1.2960324 -3.1819208 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_461.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.9822 0.4813 -1.0776 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[ 4.3584194e+00 -1.8092246e+00 2.4489284e-02 -2.9704003e+00 6.3461409e+00 -7.4028416e+00 -8.8698328e-01 -1.9873261e-01 -2.5864439e+00 8.7751079e-01 -1.1366262e+00 -1.2811607e-01 -9.4318838e+00 4.6730795e+00 1.1805258e+00 5.5728197e-02 1.7635710e+00 -4.7348204e+00 5.1232290e+00 9.2732406e-01 -3.9195573e+00 1.0481999e+00 -6.6478949e+00] [-4.1653323e+00 -5.9386963e-01 -6.9696468e-01 -5.0898590e+00 -2.7415404e+00 1.0980530e+00 -9.0154940e-01 2.8631430e+00 8.5713893e-01 1.0553961e+00 1.5330842e+00 -3.6720588e+00 -4.3198113e+00 -1.0654163e+00 3.4475586e+00 1.2076359e+00 3.5963795e+00 -1.4728916e-01 -7.2635585e-01 -8.0361962e-03 1.3764346e+00 1.1901637e+00 -9.9424738e-01] [-6.1039691e+00 4.6090980e+00 -5.1354322e+00 2.9139061e+00 -5.5759654e+00 1.8939966e+00 4.4840455e-02 -3.8042235e+00 1.3075711e+00 -1.7727551e+00 1.2910718e+00 -2.8470178e+00 5.7505064e+00 -4.7302513e+00 -1.5265956e+00 -8.0102569e-01 -3.4571252e+00 1.3051418e+00 -2.4698944e+00 1.8957311e+00 3.4742074e+00 1.3315033e+00 4.2118764e+00]] [[-1.6903877e-04 -5.0646276e+00 -5.4931335e+00 -2.7032065e+00 -2.4875293e+00 -6.2258065e-01 4.6442261e+00 -3.1116421e+00 -4.4225168e+00 3.3142097e+00 4.3480816e+00 -7.6197796e+00 2.1679873e+00 -4.7426683e-01 -3.1675413e+00 -4.6061707e+00 4.0420361e+00 9.6452343e-01 -4.0596385e+00 -9.9308509e-01 -1.8217924e+00 1.7771590e+00 8.1255388e-01] [-3.0543849e+00 6.3687801e-01 -3.7302136e-01 2.8547525e+00 1.5081718e+00 3.2346783e+00 5.0914869e+00 1.3455520e+00 1.2647684e+00 1.0833666e+00 1.3628678e+00 -5.7125622e-01 2.6012280e+00 4.5157905e+00 2.2653289e+00 2.1497450e+00 4.6925354e+00 3.8899672e+00 1.6746066e+00 3.1795597e+00 9.6640098e-01 7.2327590e-01 1.2965149e+00] [-3.1046367e-01 3.7603016e+00 1.3043762e+00 -4.2251744e+00 -9.9380910e-02 -6.2700386e+00 -1.7805188e+00 -2.5169425e+00 7.1777368e-01 -1.3352389e+00 -1.9332175e+00 1.9621285e+00 -3.4778743e+00 -2.2474012e+00 -6.6099839e+00 -1.8007665e+00 -6.1549897e+00 -4.5822816e+00 -9.1670001e-01 -1.4823712e+00 -2.8016167e+00 -6.7187864e-01 -3.6186662e+00]]]; ov_res: [[[ 4.3584194e+00 -1.8092246e+00 2.4489284e-02 -2.9704003e+00 6.3461409e+00 -7.4028416e+00 -8.8698328e-01 -1.9873261e-01 -2.5864439e+00 8.7751079e-01 -1.1366262e+00 -1.2811607e-01 -9.4318838e+00 4.6730795e+00 1.1805258e+00 5.5728197e-02 1.7635710e+00 -4.7348204e+00 5.1232290e+00 9.2732406e-01 -3.9195573e+00 1.0481999e+00 -6.6478949e+00] [-4.1653323e+00 -5.9386963e-01 -6.9696468e-01 -5.0898590e+00 -2.7415404e+00 1.0980530e+00 -9.0154940e-01 2.8631430e+00 8.5713893e-01 1.0553961e+00 1.5330842e+00 -3.6720588e+00 -4.3198113e+00 -1.0654163e+00 3.4475586e+00 1.2076359e+00 3.5963795e+00 -1.4728916e-01 -7.2635585e-01 -8.0361962e-03 1.3764346e+00 1.1901637e+00 -9.9424738e-01] [-6.1039691e+00 4.6090980e+00 -5.1354322e+00 2.9139061e+00 -5.5759654e+00 1.8939966e+00 4.4840455e-02 -3.8042235e+00 1.3075711e+00 -1.7727551e+00 1.2910718e+00 -2.8470178e+00 5.7505064e+00 -4.7302513e+00 -1.5265956e+00 -8.0102569e-01 -3.4571252e+00 1.3051418e+00 -2.4698944e+00 1.8957311e+00 3.4742074e+00 1.3315033e+00 4.2118764e+00]] [[-1.6903877e-04 -5.0646276e+00 -5.4931335e+00 -2.7032065e+00 -2.4875293e+00 -6.2258065e-01 4.6442261e+00 -3.1116421e+00 -4.4225168e+00 3.3142097e+00 4.3480816e+00 -7.6197796e+00 2.1679873e+00 -4.7426683e-01 -3.1675413e+00 -4.6061707e+00 4.0420361e+00 9.6452343e-01 -4.0596385e+00 -9.9308509e-01 -1.8217924e+00 1.7771590e+00 8.1255388e-01] [-3.0543849e+00 6.3687801e-01 -3.7302136e-01 2.8547525e+00 1.5081718e+00 3.2346783e+00 5.0914869e+00 1.3455520e+00 1.2647684e+00 1.0833666e+00 1.3628678e+00 -5.7125622e-01 2.6012280e+00 4.5157905e+00 2.2653289e+00 2.1497450e+00 4.6925354e+00 3.8899672e+00 1.6746066e+00 3.1795597e+00 9.6640098e-01 7.2327590e-01 1.2965149e+00] [-3.1046367e-01 3.7603016e+00 1.3043762e+00 -4.2251744e+00 -9.9380910e-02 -6.2700386e+00 -1.7805188e+00 -2.5169425e+00 7.1777368e-01 -1.3352389e+00 -1.9332175e+00 1.9621285e+00 -3.4778743e+00 -2.2474012e+00 -6.6099839e+00 -1.8007665e+00 -6.1549897e+00 -4.5822816e+00 -9.1670001e-01 -1.4823712e+00 -2.8016167e+00 -6.7187864e-01 -3.6186662e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_463.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[ 8.3381915e-01 -9.4215126e+00 -4.7361341e+00 -2.7429688e+00 -8.0482423e-02 -4.8660426e+00 -4.6242695e+00 5.2983294e+00 1.2557141e+00 -1.4650943e+00 5.0628095e+00 4.1101685e+00 1.2492996e+01 7.6474948e+00 -4.5067959e+00 -2.7933917e+00 3.4327226e+00 -9.4840908e+00 -9.2480097e+00 1.2529104e+01 8.4337254e+00 -9.5927775e-01 -7.8252378e+00] [ 1.9439138e-01 -1.8952811e+00 -4.2674012e+00 4.5692167e+00 -1.0622593e+00 4.2128377e+00 4.5410925e-01 1.1628883e+00 -3.3199401e+00 1.3279864e+00 -1.8084359e+00 -1.7239192e+00 -2.9399147e+00 2.7889738e+00 -3.1160578e-01 -1.4824926e+00 7.1103749e+00 -3.6522651e+00 2.4005969e+00 -5.8169818e+00 5.5189109e+00 2.0857096e+00 1.8700083e+00] [-2.4832678e+00 2.0680828e+00 6.5376526e-01 8.9646572e-01 2.9405041e+00 7.1300882e-01 2.2756934e+00 1.8750602e-01 -2.5678630e+00 -9.6970403e-01 1.1048233e-02 -1.5795124e+00 -3.3771830e+00 -4.0922875e+00 5.0809664e-01 -1.3597711e+00 -6.7210841e-01 1.7246841e+00 3.3241920e+00 -1.0257341e+00 -4.1115127e+00 1.4793842e+00 2.9677651e+00]] [[ 1.7215432e+00 -7.5114810e-01 -6.1053262e+00 -6.2568849e-01 -3.0121677e+00 -3.4288094e+00 7.5607796e+00 2.2503819e+00 -7.1925974e+00 5.1486588e+00 8.7332010e+00 3.4464574e+00 -7.3003244e-01 -5.3505731e-01 -5.1182008e-01 -4.9925647e+00 6.6336310e-01 1.0263058e+01 2.6969502e+00 -2.8972521e+00 -2.9024870e+00 -4.6249967e+00 1.5005653e+00] [ 3.4804084e+00 -2.5204058e+00 -8.5306090e-01 1.3000140e+00 -3.8740963e-01 -1.0006067e+00 4.5070496e+00 -3.0299178e-01 -3.4724944e+00 -7.9327536e-01 8.6905593e-01 -1.5724051e+00 1.1897845e+00 3.5723188e+00 -2.6699850e-01 1.3605627e+00 -2.8126543e+00 -2.4196599e+00 -1.8133438e+00 -1.6578832e+00 2.2481897e-01 9.8415709e-01 -5.0591407e+00] [-1.7015934e-01 1.8568439e+00 1.3574339e+00 1.6904383e+00 1.1421262e+00 -1.4547260e-01 -9.1966701e-01 -1.0020641e+00 -6.2581241e-02 -1.3658661e+00 -7.4538958e-01 -1.0757846e+00 -2.1283636e+00 1.1297812e+00 2.2512002e+00 8.6849749e-01 -4.8132959e-01 -3.5260963e+00 -2.8017635e+00 6.2322778e-01 7.3947030e-01 2.5759072e+00 -4.8573804e-01]]]; ov_res: [[[ 8.3381915e-01 -9.4215126e+00 -4.7361341e+00 -2.7429688e+00 -8.0482423e-02 -4.8660426e+00 -4.6242695e+00 5.2983294e+00 1.2557141e+00 -1.4650943e+00 5.0628095e+00 4.1101685e+00 1.2492996e+01 7.6474948e+00 -4.5067959e+00 -2.7933917e+00 3.4327226e+00 -9.4840908e+00 -9.2480097e+00 1.2529104e+01 8.4337254e+00 -9.5927775e-01 -7.8252378e+00] [ 1.9439138e-01 -1.8952811e+00 -4.2674012e+00 4.5692167e+00 -1.0622593e+00 4.2128377e+00 4.5410925e-01 1.1628883e+00 -3.3199401e+00 1.3279864e+00 -1.8084359e+00 -1.7239192e+00 -2.9399147e+00 2.7889738e+00 -3.1160578e-01 -1.4824926e+00 7.1103749e+00 -3.6522651e+00 2.4005969e+00 -5.8169818e+00 5.5189109e+00 2.0857096e+00 1.8700083e+00] [-2.4832678e+00 2.0680828e+00 6.5376526e-01 8.9646572e-01 2.9405041e+00 7.1300882e-01 2.2756934e+00 1.8750602e-01 -2.5678630e+00 -9.6970403e-01 1.1048233e-02 -1.5795124e+00 -3.3771830e+00 -4.0922875e+00 5.0809664e-01 -1.3597711e+00 -6.7210841e-01 1.7246841e+00 3.3241920e+00 -1.0257341e+00 -4.1115127e+00 1.4793842e+00 2.9677651e+00]] [[ 1.7215432e+00 -7.5114810e-01 -6.1053262e+00 -6.2568849e-01 -3.0121677e+00 -3.4288094e+00 7.5607796e+00 2.2503819e+00 -7.1925974e+00 5.1486588e+00 8.7332010e+00 3.4464574e+00 -7.3003244e-01 -5.3505731e-01 -5.1182008e-01 -4.9925647e+00 6.6336310e-01 1.0263058e+01 2.6969502e+00 -2.8972521e+00 -2.9024870e+00 -4.6249967e+00 1.5005653e+00] [ 3.4804084e+00 -2.5204058e+00 -8.5306090e-01 1.3000140e+00 -3.8740963e-01 -1.0006067e+00 4.5070496e+00 -3.0299178e-01 -3.4724944e+00 -7.9327536e-01 8.6905593e-01 -1.5724051e+00 1.1897845e+00 3.5723188e+00 -2.6699850e-01 1.3605627e+00 -2.8126543e+00 -2.4196599e+00 -1.8133438e+00 -1.6578832e+00 2.2481897e-01 9.8415709e-01 -5.0591407e+00] [-1.7015934e-01 1.8568439e+00 1.3574339e+00 1.6904383e+00 1.1421262e+00 -1.4547260e-01 -9.1966701e-01 -1.0020641e+00 -6.2581241e-02 -1.3658661e+00 -7.4538958e-01 -1.0757846e+00 -2.1283636e+00 1.1297812e+00 2.2512002e+00 8.6849749e-01 -4.8132959e-01 -3.5260963e+00 -2.8017635e+00 6.2322778e-01 7.3947030e-01 2.5759072e+00 -4.8573804e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_465.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%8) eption happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_Dfw_re: [[[-3.586825 -7.8048925 0.20042892 -0.3206136 3.1550176 3.8867812 3.6154644 0.48304725 -4.827426 6.830273 9.016389 0.83254504] [ 0.04606657 0.31448182 0.49972606 0.8450688 2.3581731 1.3789836 -0.02356712 0.5280924 -4.3208575 -0.87500274 -3.2406433 -1.3406943 ] [ 5.6770444 2.2034216 -6.738241 5.811279 -5.1493354 0.36105752 -0.69951487 -0.17218351 2.596311 -7.628641 -2.0591269 1.4401011 ]] [[ 3.380905 0.44674435 1.5828059 -0.99489516 -0.55800956 4.9081798 -1.5348434 1.9415779 0.57255816 6.934917 -7.002702 3.821483 ] [ 1.0852679 3.610579 -2.119178 0.20698333 -1.8568364 1.1159607 -0.593904 0.0851419 -0.47535354 1.7417356 -1.3269119 1.5447881 ] [ 1.0161386 3.3343303 0.5035545 5.273104 0.35320368 -3.1558797 1.883851 -1.9288495 -1.9751254 1.2054397 9.065398 -7.4517927 ]]]; ov_res: [[[-3.586825 -7.8048925 0.20042892 -0.3206136 3.1550176 3.8867812 3.6154644 0.48304725 -4.827426 6.830273 9.016389 0.83254504] [ 0.04606657 0.31448182 0.49972606 0.8450688 2.3581731 1.3789836 -0.02356712 0.5280924 -4.3208575 -0.87500274 -3.2406433 -1.3406943 ] [ 5.6770444 2.2034216 -6.738241 5.811279 -5.1493354 0.36105752 -0.69951487 -0.17218351 2.596311 -7.628641 -2.0591269 1.4401011 ]] [[ 3.380905 0.44674435 1.5828059 -0.99489516 -0.55800956 4.9081798 -1.5348434 1.9415779 0.57255816 6.934917 -7.002702 3.821483 ] [ 1.0852679 3.610579 -2.119178 0.20698333 -1.8568364 1.1159607 -0.593904 0.0851419 -0.47535354 1.7417356 -1.3269119 1.5447881 ] [ 1.0161386 3.3343303 0.5035545 5.273104 0.35320368 -3.1558797 1.883851 -1.9288495 -1.9751254 1.2054397 9.065398 -7.4517927 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_467.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%6) fw_re: [[[ 1.3418671 5.226958 -4.3664365 4.635674 -2.4392486 -0.19778341 4.2587366 3.0984786 1.4298484 3.0188081 -3.1508024 -3.9193993 2.1475716 3.5499582 -3.423525 0.81459224 -1.4974768 -6.083197 -1.0320067 -0.8187948 -3.8307643 -3.0369155 4.9542756 0.20152132 -1.9132285 ] [-1.3299598 0.45770043 -1.0380759 0.94727296 4.43735 -3.6979036 -0.63867164 0.7552398 -2.037713 5.1908054 0.23788866 -4.7672753 1.4170238 0.25623503 -0.62834316 0.6772754 0.3936867 -1.2053012 -1.3948405 0.27081013 2.1901538 -3.0914023 -0.05234965 0.6694473 0.86528146] [ 0.35974517 -2.1893322 4.2012787 -0.94090635 4.2238646 -2.1418328 -2.9401705 0.78737 2.858843 1.5315295 1.8482888 -2.5104563 -3.4080586 -0.75590867 2.9761884 -1.5845108 -0.14267439 0.3677907 -3.8381176 -1.2671131 0.47066972 -0.87276393 -5.2010207 1.5834707 2.8300824 ]] [[ 1.2718683 2.422781 -6.2734666 3.214889 -0.4753462 0.241056 3.2026463 -0.48673028 -0.68123525 -7.027214 -2.7843788 -2.0502598 -2.9083502 -3.4082458 -4.0954523 -4.3167605 -0.68106544 -2.3003047 1.7257477 -4.8044834 3.354496 0.16766658 1.8275379 1.0027479 -2.3763113 ] [-0.22521392 1.4885491 -3.3346574 -0.30043882 5.140027 -4.346242 0.41423577 0.35847065 -1.7407835 -1.2041668 -2.2173965 1.12514 1.6063888 -4.2273464 2.2398984 -0.97453123 -0.25705847 -2.0455282 2.7800636 -1.5061871 0.45349485 -1.4571989 -1.760537 4.3550987 0.19141248] [ 0.4170911 -1.4269086 1.6903623 -3.7073898 2.9211845 0.35982633 -2.2651322 0.22883897 -2.7928874 -2.2752175 -5.3356013 -0.9948445 0.10117723 -2.9652674 -2.900165 -0.82153565 -4.603786 -0.8321173 -1.3909388 2.0369487 -4.1075573 1.4533088 -1.3131083 3.4353046 7.3875284 ]]]; ov_res: [[[ 1.3418671 5.226958 -4.3664365 4.635674 -2.4392486 -0.19778341 4.2587366 3.0984786 1.4298484 3.0188081 -3.1508024 -3.9193993 2.1475716 3.5499582 -3.423525 0.81459224 -1.4974768 -6.083197 -1.0320067 -0.8187948 -3.8307643 -3.0369155 4.9542756 0.20152132 -1.9132285 ] [-1.3299598 0.45770043 -1.0380759 0.94727296 4.43735 -3.6979036 -0.63867164 0.7552398 -2.037713 5.1908054 0.23788866 -4.7672753 1.4170238 0.25623503 -0.62834316 0.6772754 0.3936867 -1.2053012 -1.3948405 0.27081013 2.1901538 -3.0914023 -0.05234965 0.6694473 0.86528146] [ 0.35974517 -2.1893322 4.2012787 -0.94090635 4.2238646 -2.1418328 -2.9401705 0.78737 2.858843 1.5315295 1.8482888 -2.5104563 -3.4080586 -0.75590867 2.9761884 -1.5845108 -0.14267439 0.3677907 -3.8381176 -1.2671131 0.47066972 -0.87276393 -5.2010207 1.5834707 2.8300824 ]] [[ 1.2718683 2.422781 -6.2734666 3.214889 -0.4753462 0.241056 3.2026463 -0.48673028 -0.68123525 -7.027214 -2.7843788 -2.0502598 -2.9083502 -3.4082458 -4.0954523 -4.3167605 -0.68106544 -2.3003047 1.7257477 -4.8044834 3.354496 0.16766658 1.8275379 1.0027479 -2.3763113 ] [-0.22521392 1.4885491 -3.3346574 -0.30043882 5.140027 -4.346242 0.41423577 0.35847065 -1.7407835 -1.2041668 -2.2173965 1.12514 1.6063888 -4.2273464 2.2398984 -0.97453123 -0.25705847 -2.0455282 2.7800636 -1.5061871 0.45349485 -1.4571989 -1.760537 4.3550987 0.19141248] [ 0.4170911 -1.4269086 1.6903623 -3.7073898 2.9211845 0.35982633 -2.2651322 0.22883897 -2.7928874 -2.2752175 -5.3356013 -0.9948445 0.10117723 -2.9652674 -2.900165 -0.82153565 -4.603786 -0.8321173 -1.3909388 2.0369487 -4.1075573 1.4533088 -1.3131083 3.4353046 7.3875284 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_469.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%8) fw_re: [[[-6.6111931e-03 -2.9438632e+00 -4.0084982e-01 -1.7570416e+00 -1.3463013e+00 9.1223761e-02 6.6116743e+00 3.7600620e+00 -3.1509514e+00 -5.7579207e-01 -1.3819517e+00 4.5847383e-01 -5.3171819e-01 -1.2477212e-01 -5.2029781e+00 -1.9009256e+00 1.1176312e+00 3.6893253e+00 -5.5226736e+00 3.7095363e+00 -4.1808043e+00] [-7.8678113e-01 1.7837068e+00 4.3243152e-01 -2.2232881e+00 -1.7584547e+00 -3.4562132e-01 2.1734784e+00 1.7081341e+00 -1.5387701e+00 -1.2084819e-01 3.8806537e-01 4.6517220e-01 4.3814969e+00 1.3633952e+00 -4.6839433e+00 -2.4763174e+00 4.7841225e+00 -1.0217547e+00 5.0605029e-01 -2.2807322e+00 -6.9857445e+00] [ 4.4134407e+00 1.8930434e+00 -4.1500049e+00 -8.0821806e-01 -4.2100105e+00 -1.0151435e+00 1.7288629e+00 -1.7369278e-01 1.0730947e+00 5.1463943e+00 1.1069547e+00 1.4901385e+00 3.5273399e+00 -6.4001101e-01 -7.7774525e-03 -6.1133938e+00 1.8822267e+00 -3.3041155e+00 1.6567990e+00 -1.6343945e+00 -1.1913695e+01]] [[ 3.8284934e+00 -2.1154270e+00 -4.0371623e+00 -6.5343337e+00 -3.7455194e+00 -4.1827512e+00 3.6229944e+00 5.7732737e-01 -1.0844680e+00 -2.9974046e+00 3.1602475e-01 9.1930616e-01 1.3948091e+00 2.4626679e+00 5.6643710e-02 2.7199605e+00 8.2906067e-01 5.1433930e+00 -5.8200936e+00 -2.7589240e+00 -1.6383119e+00] [ 1.1830425e+00 7.1992093e-01 8.4516120e-01 -8.6643302e-01 -2.8168306e+00 -5.0659380e+00 7.1266091e-01 -2.0065899e+00 5.1653810e+00 -5.2020888e+00 6.0134180e-02 -1.2990109e+00 -2.1431437e+00 2.5340860e+00 -2.2402513e+00 -1.9228518e+00 5.2611828e+00 5.5017676e+00 1.3861525e+00 4.7156248e+00 -4.7650809e+00] [-1.7229450e+00 5.2609830e+00 2.9546881e+00 6.5867982e+00 -6.6844125e+00 -3.4938941e+00 -4.7735519e+00 -1.3471315e+00 8.1293392e+00 -3.2304604e+00 1.0871511e+00 -1.7909639e+00 -3.1364145e+00 2.3130894e+00 -8.8949949e-01 -4.1124821e-01 7.2732544e+00 2.6721065e+00 3.5206351e+00 5.2167010e-01 -6.8673825e+00]]]; ov_res: [[[-6.6111931e-03 -2.9438632e+00 -4.0084982e-01 -1.7570416e+00 -1.3463013e+00 9.1223761e-02 6.6116743e+00 3.7600620e+00 -3.1509514e+00 -5.7579207e-01 -1.3819517e+00 4.5847383e-01 -5.3171819e-01 -1.2477212e-01 -5.2029781e+00 -1.9009256e+00 1.1176312e+00 3.6893253e+00 -5.5226736e+00 3.7095363e+00 -4.1808043e+00] [-7.8678113e-01 1.7837068e+00 4.3243152e-01 -2.2232881e+00 -1.7584547e+00 -3.4562132e-01 2.1734784e+00 1.7081341e+00 -1.5387701e+00 -1.2084819e-01 3.8806537e-01 4.6517220e-01 4.3814969e+00 1.3633952e+00 -4.6839433e+00 -2.4763174e+00 4.7841225e+00 -1.0217547e+00 5.0605029e-01 -2.2807322e+00 -6.9857445e+00] [ 4.4134407e+00 1.8930434e+00 -4.1500049e+00 -8.0821806e-01 -4.2100105e+00 -1.0151435e+00 1.7288629e+00 -1.7369278e-01 1.0730947e+00 5.1463943e+00 1.1069547e+00 1.4901385e+00 3.5273399e+00 -6.4001101e-01 -7.7774525e-03 -6.1133938e+00 1.8822267e+00 -3.3041155e+00 1.6567990e+00 -1.6343945e+00 -1.1913695e+01]] [[ 3.8284934e+00 -2.1154270e+00 -4.0371623e+00 -6.5343337e+00 -3.7455194e+00 -4.1827512e+00 3.6229944e+00 5.7732737e-01 -1.0844680e+00 -2.9974046e+00 3.1602475e-01 9.1930616e-01 1.3948091e+00 2.4626679e+00 5.6643710e-02 2.7199605e+00 8.2906067e-01 5.1433930e+00 -5.8200936e+00 -2.7589240e+00 -1.6383119e+00] [ 1.1830425e+00 7.1992093e-01 8.4516120e-01 -8.6643302e-01 -2.8168306e+00 -5.0659380e+00 7.1266091e-01 -2.0065899e+00 5.1653810e+00 -5.2020888e+00 6.0134180e-02 -1.2990109e+00 -2.1431437e+00 2.5340860e+00 -2.2402513e+00 -1.9228518e+00 5.2611828e+00 5.5017676e+00 1.3861525e+00 4.7156248e+00 -4.7650809e+00] [-1.7229450e+00 5.2609830e+00 2.9546881e+00 6.5867982e+00 -6.6844125e+00 -3.4938941e+00 -4.7735519e+00 -1.3471315e+00 8.1293392e+00 -3.2304604e+00 1.0871511e+00 -1.7909639e+00 -3.1364145e+00 2.3130894e+00 -8.8949949e-01 -4.1124821e-01 7.2732544e+00 2.6721065e+00 3.5206351e+00 5.2167010e-01 -6.8673825e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_471.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[-1.6749828e-01 1.9072456e+00 -1.6423098e+00 -2.1089842e+00 -8.3355856e-01 -3.8867486e-01 -5.8090031e-01 2.6406081e+00 2.3895730e-01 -1.3679516e+00 1.3391453e+00 -4.6351023e+00 4.0610552e+00 -3.1739583e+00 -4.1865402e-01 4.8404746e+00 -2.2273717e+00 2.9872842e+00 -1.9341313e+00 -2.5879819e+00 4.4306231e+00 -5.4993315e+00 4.3563828e+00 -3.1019831e+00 -2.1296701e-01] [-3.3911655e+00 1.8254013e+00 -5.2836943e+00 -6.8833542e+00 2.3490791e+00 7.9710016e+00 -3.5083883e+00 -1.2276203e+01 -4.3683062e+00 6.4113784e+00 1.5981541e+00 -5.1690168e+00 2.9343998e+00 5.8811884e+00 -2.6519701e+00 -2.3277931e+00 2.6825354e+00 1.8178590e-01 -1.6079053e-01 -7.6664553e+00 3.4883461e+00 4.0285544e+00 8.6268091e-01 8.9847460e+00 3.9803665e+00] [-3.8350911e+00 2.6776717e+00 -1.8965399e+00 -2.0364139e+00 -3.3153679e+00 -2.5517440e+00 -2.3090062e+00 5.6323022e-01 -2.9092584e+00 3.2741833e-01 1.2420459e+00 -5.3464824e-01 3.0305967e+00 1.5522002e+00 -1.0768791e-01 7.0730000e+00 6.3899411e-03 3.7672503e+00 -1.2869292e+00 -3.0544927e+00 8.3221805e-01 -2.7626500e+00 5.8356462e+00 -1.3845747e+00 2.7231258e-01]] [[-2.2312784e+00 1.7953645e+00 -2.0848155e+00 9.4723552e-01 -6.6519338e-01 1.5124609e+00 -2.8505604e+00 2.9723167e+00 8.2385600e-01 1.4039237e-02 3.5736527e+00 -2.9882414e+00 8.2724196e-01 -2.7375320e-01 4.0529017e+00 -1.1935020e-01 1.0954834e+00 9.9437588e-01 -2.2000453e+00 9.1201645e-01 1.4606909e-01 5.1241499e-01 -5.8912712e-01 -2.1870703e-01 -3.9596397e-02] [-3.4497988e+00 -3.5271938e+00 3.1493509e-01 -9.9348170e-01 -3.0555236e+00 1.2119812e+01 -3.6516767e+00 -3.0381553e+00 -2.1686003e+00 6.1758704e+00 -3.0906820e-01 4.7182293e+00 3.6179044e+00 -1.1576391e+00 -2.3026960e+00 -1.2611183e+00 6.3908178e-01 -1.0229304e+00 -1.6361090e+00 -2.8998814e+00 1.4964446e+00 -8.2137293e-01 8.9550990e-01 3.4401796e+00 3.4030714e+00] [-2.2779629e+00 -1.7175963e+00 -3.1806669e+00 1.5434679e-01 -1.3253826e+00 3.7346518e-01 -4.5166082e+00 2.5412938e-01 -1.5237489e+00 1.2455944e+00 5.2090173e+00 -3.2538733e-01 5.9866369e-01 -1.0240523e-01 2.2121370e+00 1.3579781e+00 1.3947759e+00 3.7698117e-01 -4.4225359e+00 -2.8322256e+00 1.0813483e+00 -1.6012331e+00 -2.2566295e-01 1.9878075e+00 3.9317915e-01]]]; ov_res: [[[-1.6749828e-01 1.9072456e+00 -1.6423098e+00 -2.1089842e+00 -8.3355856e-01 -3.8867486e-01 -5.8090031e-01 2.6406081e+00 2.3895730e-01 -1.3679516e+00 1.3391453e+00 -4.6351023e+00 4.0610552e+00 -3.1739583e+00 -4.1865402e-01 4.8404746e+00 -2.2273717e+00 2.9872842e+00 -1.9341313e+00 -2.5879819e+00 4.4306231e+00 -5.4993315e+00 4.3563828e+00 -3.1019831e+00 -2.1296701e-01] [-3.3911655e+00 1.8254013e+00 -5.2836943e+00 -6.8833542e+00 2.3490791e+00 7.9710016e+00 -3.5083883e+00 -1.2276203e+01 -4.3683062e+00 6.4113784e+00 1.5981541e+00 -5.1690168e+00 2.9343998e+00 5.8811884e+00 -2.6519701e+00 -2.3277931e+00 2.6825354e+00 1.8178590e-01 -1.6079053e-01 -7.6664553e+00 3.4883461e+00 4.0285544e+00 8.6268091e-01 8.9847460e+00 3.9803665e+00] [-3.8350911e+00 2.6776717e+00 -1.8965399e+00 -2.0364139e+00 -3.3153679e+00 -2.5517440e+00 -2.3090062e+00 5.6323022e-01 -2.9092584e+00 3.2741833e-01 1.2420459e+00 -5.3464824e-01 3.0305967e+00 1.5522002e+00 -1.0768791e-01 7.0730000e+00 6.3899411e-03 3.7672503e+00 -1.2869292e+00 -3.0544927e+00 8.3221805e-01 -2.7626500e+00 5.8356462e+00 -1.3845747e+00 2.7231258e-01]] [[-2.2312784e+00 1.7953645e+00 -2.0848155e+00 9.4723552e-01 -6.6519338e-01 1.5124609e+00 -2.8505604e+00 2.9723167e+00 8.2385600e-01 1.4039237e-02 3.5736527e+00 -2.9882414e+00 8.2724196e-01 -2.7375320e-01 4.0529017e+00 -1.1935020e-01 1.0954834e+00 9.9437588e-01 -2.2000453e+00 9.1201645e-01 1.4606909e-01 5.1241499e-01 -5.8912712e-01 -2.1870703e-01 -3.9596397e-02] [-3.4497988e+00 -3.5271938e+00 3.1493509e-01 -9.9348170e-01 -3.0555236e+00 1.2119812e+01 -3.6516767e+00 -3.0381553e+00 -2.1686003e+00 6.1758704e+00 -3.0906820e-01 4.7182293e+00 3.6179044e+00 -1.1576391e+00 -2.3026960e+00 -1.2611183e+00 6.3908178e-01 -1.0229304e+00 -1.6361090e+00 -2.8998814e+00 1.4964446e+00 -8.2137293e-01 8.9550990e-01 3.4401796e+00 3.4030714e+00] [-2.2779629e+00 -1.7175963e+00 -3.1806669e+00 1.5434679e-01 -1.3253826e+00 3.7346518e-01 -4.5166082e+00 2.5412938e-01 -1.5237489e+00 1.2455944e+00 5.2090173e+00 -3.2538733e-01 5.9866369e-01 -1.0240523e-01 2.2121370e+00 1.3579781e+00 1.3947759e+00 3.7698117e-01 -4.4225359e+00 -2.8322256e+00 1.0813483e+00 -1.6012331e+00 -2.2566295e-01 1.9878075e+00 3.9317915e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv1D::test_conv1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_473.aten_conv1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv1d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:79:23 return (%7) fw_re: [[[ 1.5277718e+00 3.2799682e-01 2.7063150e+00 7.5787055e-01 3.7829854e-02 -3.6438978e+00 -1.3264638e+00 -4.6027288e-01 -2.3331454e+00 -8.9076525e-01 -8.7672734e-01 -2.4556172e+00 -3.2656076e+00 1.6896614e+00 1.8923545e+00 1.5659204e+00 5.2432952e+00 1.6988354e+00 -3.4759822e+00 -5.8067255e+00 -2.0294392e+00 1.1795894e+00 -2.6519539e+00] [ 4.1470723e+00 1.1518843e+00 -5.2320614e+00 -7.8183584e+00 3.4737761e+00 1.0002352e+00 -1.2916282e+00 2.4128237e+00 1.4479481e+00 -5.5284315e-01 -1.6426359e+00 -3.4193947e+00 -4.1513085e+00 -4.2054629e+00 -2.7810946e+00 4.3972130e+00 4.6955233e+00 -1.2689071e+00 2.5944672e+00 -1.6892302e+00 -5.3301287e+00 -2.7730081e+00 -2.0911291e+00] [ 2.9770737e-03 -4.4214716e+00 -2.4517903e+00 4.5228000e+00 -7.5039989e-01 -3.6982083e+00 1.2270250e+00 -8.0335861e-01 -3.2705989e+00 1.9946111e+00 -5.5007738e-01 -5.1526892e-01 -2.3591828e+00 2.4073701e-02 3.1448138e+00 -1.2429104e+00 -2.1886830e+00 5.3134418e+00 -1.1354712e+00 -5.1437593e+00 -8.2609373e-01 9.9099541e-01 -4.3847817e-01]] [[ 3.0321484e+00 -6.1957389e-02 4.7252169e+00 -1.8756992e+00 2.6403039e+00 -1.9232793e-01 -1.1588116e+00 7.4178994e-01 -2.1590576e+00 5.5691081e-01 6.7174673e-01 2.3731163e+00 6.3188088e-01 4.0143356e+00 -1.8529408e+00 2.5726593e+00 1.3393527e+00 -9.2375499e-01 6.4175653e-01 1.4217969e+00 4.5622125e+00 2.0868890e+00 2.3609409e+00] [ 1.8226520e+00 -1.8791668e+00 2.6798742e+00 1.3995230e+00 3.5844276e+00 -2.2873666e+00 -4.1010227e+00 -1.7760156e+00 -3.7992377e+00 -1.4171458e+00 7.0862406e-01 3.1195357e-01 -7.5698549e-01 6.3967657e-01 7.7791262e-01 1.3226386e+00 -2.7752442e+00 1.1369178e+00 -9.2477858e-01 -2.5995479e+00 -1.3795694e+00 2.3454711e+00 1.9181526e+00] [ 8.0252755e-01 4.8606899e-01 6.5366387e+00 -1.2400120e+00 -2.2736046e+00 -1.7020807e+00 2.7709379e+00 3.3206632e+00 1.7591326e+00 2.0739536e+00 -1.0693419e+00 -6.4792788e-01 8.2330056e-02 5.4127417e+00 4.0422127e-01 8.3920918e-02 3.1838703e+00 -1.7207683e+00 -1.1333858e+00 -2.3430367e+00 3.9782400e+00 5.5862126e+00 1.6693537e+00]]]; ov_res: [[[ 1.5277718e+00 3.2799682e-01 2.7063150e+00 7.5787055e-01 3.7829854e-02 -3.6438978e+00 -1.3264638e+00 -4.6027288e-01 -2.3331454e+00 -8.9076525e-01 -8.7672734e-01 -2.4556172e+00 -3.2656076e+00 1.6896614e+00 1.8923545e+00 1.5659204e+00 5.2432952e+00 1.6988354e+00 -3.4759822e+00 -5.8067255e+00 -2.0294392e+00 1.1795894e+00 -2.6519539e+00] [ 4.1470723e+00 1.1518843e+00 -5.2320614e+00 -7.8183584e+00 3.4737761e+00 1.0002352e+00 -1.2916282e+00 2.4128237e+00 1.4479481e+00 -5.5284315e-01 -1.6426359e+00 -3.4193947e+00 -4.1513085e+00 -4.2054629e+00 -2.7810946e+00 4.3972130e+00 4.6955233e+00 -1.2689071e+00 2.5944672e+00 -1.6892302e+00 -5.3301287e+00 -2.7730081e+00 -2.0911291e+00] [ 2.9770737e-03 -4.4214716e+00 -2.4517903e+00 4.5228000e+00 -7.5039989e-01 -3.6982083e+00 1.2270250e+00 -8.0335861e-01 -3.2705989e+00 1.9946111e+00 -5.5007738e-01 -5.1526892e-01 -2.3591828e+00 2.4073701e-02 3.1448138e+00 -1.2429104e+00 -2.1886830e+00 5.3134418e+00 -1.1354712e+00 -5.1437593e+00 -8.2609373e-01 9.9099541e-01 -4.3847817e-01]] [[ 3.0321484e+00 -6.1957389e-02 4.7252169e+00 -1.8756992e+00 2.6403039e+00 -1.9232793e-01 -1.1588116e+00 7.4178994e-01 -2.1590576e+00 5.5691081e-01 6.7174673e-01 2.3731163e+00 6.3188088e-01 4.0143356e+00 -1.8529408e+00 2.5726593e+00 1.3393527e+00 -9.2375499e-01 6.4175653e-01 1.4217969e+00 4.5622125e+00 2.0868890e+00 2.3609409e+00] [ 1.8226520e+00 -1.8791668e+00 2.6798742e+00 1.3995230e+00 3.5844276e+00 -2.2873666e+00 -4.1010227e+00 -1.7760156e+00 -3.7992377e+00 -1.4171458e+00 7.0862406e-01 3.1195357e-01 -7.5698549e-01 6.3967657e-01 7.7791262e-01 1.3226386e+00 -2.7752442e+00 1.1369178e+00 -9.2477858e-01 -2.5995479e+00 -1.3795694e+00 2.3454711e+00 1.9181526e+00] [ 8.0252755e-01 4.8606899e-01 6.5366387e+00 -1.2400120e+00 -2.2736046e+00 -1.7020807e+00 2.7709379e+00 3.3206632e+00 1.7591326e+00 2.0739536e+00 -1.0693419e+00 -6.4792788e-01 8.2330056e-02 5.4127417e+00 4.0422127e-01 8.3920918e-02 3.1838703e+00 -1.7207683e+00 -1.1333858e+00 -2.3430367e+00 3.9782400e+00 5.5862126e+00 1.6693537e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_474.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0]]() %3 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.154463}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 5.20145798e+00 1.28707285e+01 -1.27329750e+01 ... -7.24644709e+00 -3.97348976e+00 -1.08089995e+00] [-1.04309607e+00 -4.00570154e+00 -1.12998476e+01 ... 1.22889614e+01 -5.37095118e+00 1.45562738e-01] [-5.97646618e+00 -3.81003857e+00 4.71991253e+00 ... -5.91609192e+00 -2.21232915e+00 -4.37204790e+00] ... [-3.53697538e-01 -7.08930731e+00 -5.11648273e+00 ... -6.01792669e+00 -3.47826123e+00 -4.89154434e+00] [ 5.54831076e+00 -1.06481667e+01 -8.40851784e+00 ... -2.02172661e+00 1.08282576e+01 -8.97207069e+00] [ 3.41533852e+00 1.83788371e+00 -7.03566551e+00 ... -5.30076408e+00 1.83186674e+00 -4.77437067e+00]] [[ 7.64767826e-03 1.79654884e+00 -1.05112305e+01 ... 2.09200573e+00 1.23335533e+01 -7.27778196e+00] [-9.22162247e+00 7.98868036e+00 1.28388672e+01 ... -1.85144401e+00 -7.32136297e+00 5.39875984e+00] [ 1.61818600e+00 -1.13020194e+00 9.49604511e+00 ... 1.08808565e+01 8.52526855e+00 -4.82450819e+00] ... [ 9.09640503e+00 1.47236395e+01 5.62604237e+00 ... 1.24315768e-01 5.51647282e+00 5.63663912e+00] [-2.15335693e+01 1.69673138e+01 -9.26077366e+00 ... -2.52225947e+00 3.03985429e+00 1.33039074e+01] [-3.71929264e+00 -6.33333778e+00 -1.79888511e+00 ... -2.03812122e+00 -1.05069046e+01 8.08730030e+00]] [[-4.59483385e+00 -1.16243706e+01 -1.47142200e+01 ... 4.90005636e+00 -5.88325071e+00 2.41704407e+01] [ 8.51549053e+00 1.21769834e+00 9.18110371e+00 ... 2.77173924e+00 8.72385025e+00 -2.36648331e+01] [ 3.87519550e+00 1.79798055e+00 4.07487011e+00 ... 1.57903004e+00 -1.31133547e+01 9.94576836e+00] ... [-1.02158470e+01 4.74823415e-01 -2.12417865e+00 ... -1.43210435e+00 6.67314053e-01 4.11593437e-01] [ 6.84471035e+00 -2.43902516e+00 1.72962809e+00 ... 1.54855289e+01 -8.28388786e+00 6.88131523e+00] [ 1.28036623e+01 -1.28026667e+01 2.58560991e+00 ... -9.48774529e+00 2.69873232e-01 -1.33668346e+01]] ... [[-7.29907990e-01 1.27610331e+01 -5.65331459e+00 ... -5.33995247e+00 -3.13824677e+00 1.72315102e+01] [ 1.88227386e+01 -1.23263998e+01 9.88125896e+00 ... 3.08332920e-01 -1.49677954e+01 1.34211311e+01] [ 9.41142023e-01 -8.73968983e+00 -1.06283207e+01 ... -1.10684032e+01 5.46560717e+00 6.93203688e+00] ... [ 3.55643082e+00 1.58207693e+01 -3.36339426e+00 ... -2.02923989e+00 3.23959804e+00 1.38039279e+00] [-7.59989643e+00 8.99882317e+00 5.78305340e+00 ... 7.50127745e+00 -5.36406040e-01 6.25599682e-01] [ 1.82732010e+00 -7.85170031e+00 -2.28145885e+00 ... -3.28145313e+00 -5.95175791e+00 -2.56729674e+00]] [[-2.11746144e+00 8.32372475e+00 1.18403416e+01 ... 9.59861469e+00 1.51514578e+01 1.19204879e+00] [-5.44822550e+00 -3.75418019e+00 1.10750618e+01 ... 3.70446920e-01 -3.61847043e+00 -8.01061809e-01] [-3.82235312e+00 -4.03388977e+00 -5.49153471e+00 ... -1.10902214e+01 -5.95266104e-01 5.26978433e-01] ... [-1.02962501e-01 -1.61604953e+00 6.49714279e+00 ... -5.11391020e+00 -4.11050510e+00 -1.13634806e+01] [-1.08511324e+01 1.70463753e+01 -4.29531842e-01 ... -2.18591070e+00 -1.13499079e+01 2.81053066e+00] [-1.63982639e+01 -1.58617983e+01 6.43189573e+00 ... -2.71595836e+00 -9.72036457e+00 -2.41419101e+00]] [[-1.95658803e+00 3.79884410e+00 -7.27304697e+00 ... 2.34058361e+01 5.98719931e+00 1.06346989e+00] [-1.27518034e+01 9.47817230e+00 -4.56927156e+00 ... 9.29610431e-01 -4.07592392e+00 1.04356766e-01] [-4.39687777e+00 -6.86289644e+00 -1.02202482e+01 ... -7.48518562e+00 5.31064272e-01 2.42245603e+00] ... [-5.24140596e+00 -2.02076137e-02 1.57598543e+01 ... 6.52890444e+00 8.60501945e-01 1.44506588e+01] [ 1.29810476e+01 1.00274668e+01 -9.37797737e+00 ... -1.39939384e+01 5.98680878e+00 1.70841956e+00] [ 1.03504896e+01 -4.09904909e+00 4.48137236e+00 ... 9.99095249e+00 -2.19619250e+00 1.32548070e+00]]]] [[[[-4.30574894e+00 -3.31937242e+00 -6.87400866e+00 ... 8.28898430e+00 -7.48979712e+00 1.87221165e+01] [ 2.98506093e+00 -1.86386371e+00 -4.95124531e+00 ... 9.85861492e+00 1.18408346e+00 4.90337706e+00] [ 6.69027376e+00 1.67610693e+00 -1.04785004e+01 ... 3.05834579e+00 -8.22463989e+00 4.78271902e-01] ... [ 7.28305674e+00 1.58266687e+01 6.69706488e+00 ... 1.67162323e+01 3.56843829e+00 -2.19594336e+00] [-9.41405201e+00 9.36695099e+00 -1.67313423e+01 ... -1.41826439e+00 2.98818588e+00 7.79477060e-02] [ 8.78250420e-01 -9.14765930e+00 1.90208697e+00 ... -5.86179972e+00 -1.14160004e+01 8.10482311e+00]] [[-1.07353096e+01 -3.30019903e+00 1.56091528e+01 ... -2.38637999e-01 -6.11176491e+00 7.18587160e+00] [-7.21684265e+00 1.41672730e+00 6.71833754e+00 ... -2.46218634e+00 -5.53815269e+00 4.91433096e+00] [ 7.48830032e+00 -1.58069506e-01 4.65259409e+00 ... 3.65903997e+00 -1.31505718e+01 6.13001490e+00] ... [-2.09141064e+00 -4.76844263e+00 4.02217817e+00 ... -5.33435488e+00 -1.00155029e+01 8.98576164e+00] [ 1.90966511e+00 -1.62870169e+00 2.69445992e+00 ... 1.17046185e+01 2.35448647e+00 3.43347454e+00] [ 1.53082705e+01 -1.27649870e+01 6.60074854e+00 ... 1.54445591e+01 4.36093235e+00 -6.00713730e+00]] [[ 2.30121021e+01 6.86023140e+00 1.74178600e+01 ... -1.53643055e+01 6.67024612e+00 -1.50220690e+01] [ 1.43944178e+01 1.93609214e+00 -1.20099378e+00 ... 1.05524673e+01 -1.83973455e+00 -1.95603390e+01] [ 3.87817621e+00 -6.19156504e+00 -6.01309013e+00 ... -8.60641575e+00 3.21509659e-01 -5.05383301e+00] ... [-7.92825699e+00 1.67788067e+01 -9.20450878e+00 ... 1.31145411e+01 -6.78353167e+00 6.31628275e+00] [ 3.33330750e+00 1.91297817e+00 2.72599101e+00 ... -3.60678411e+00 -5.12635827e-01 -7.85346222e+00] [-6.71152544e+00 2.67250657e-01 1.04357681e+01 ... -7.06670570e+00 9.88946855e-02 -1.60805092e+01]] ... [[ 1.03422279e+01 1.06731606e+00 -6.40529394e-01 ... -4.12430286e+00 2.13990803e+01 4.50953150e+00] [ 2.51353669e+00 -1.21977844e+01 -3.74334931e+00 ... -1.29663134e+01 1.09734259e+01 4.91694403e+00] [-1.60649338e+01 3.76909256e+00 -9.80188179e+00 ... -7.53903389e+00 -6.95637417e+00 5.65250921e+00] ... [ 1.24556494e+01 6.35249794e-01 -4.74725294e+00 ... -1.40855627e+01 -5.32103157e+00 -4.17408323e+00] [-8.54497075e-01 5.04777372e-01 5.46042538e+00 ... 2.41644049e+00 5.38711596e+00 7.23010159e+00] [ 6.60378647e+00 2.58931923e+00 -1.21246934e-01 ... -4.48669243e+00 -1.49500823e+00 7.14774430e-02]] [[-8.83549595e+00 1.70934486e+00 1.19598160e+01 ... 1.20072212e+01 5.14483118e+00 1.77794075e+01] [-1.48661041e+01 -2.73647785e+00 -5.09329367e+00 ... -1.13764172e+01 -8.03416157e+00 -7.00580120e-01] [-1.33772697e+01 2.29733753e+00 9.36497116e+00 ... -2.03717065e+00 1.36276255e+01 7.64320612e+00] ... [ 8.29149628e+00 -2.29066658e+00 2.59361458e+00 ... 1.22559538e+01 -1.08090353e+00 -5.36683023e-01] [ 3.71536314e-01 -5.71641803e-01 -9.29431534e+00 ... 1.17870178e+01 -1.46069980e+00 -4.48197651e+00] [-1.96900105e+00 -1.03496885e+01 2.28109193e+00 ... 6.93112707e+00 -2.50404215e+00 -1.38858175e+00]] [[ 8.77672386e+00 -1.00339928e+01 -3.02215290e+00 ... 2.39012489e+01 6.60760105e-01 4.29785824e+00] [ 1.68592739e+00 -1.75074029e+00 -1.29067183e+01 ... -2.25759101e+00 1.11283755e+00 -8.76883316e+00] [-4.22989893e+00 -4.47067738e+00 -8.66736698e+00 ... 4.39797544e+00 3.37572980e+00 3.76472926e+00] ... [-1.30728836e+01 1.28436069e+01 1.14238110e+01 ... -1.08789454e+01 -8.54986477e+00 -3.17636395e+00] [ 5.99642801e+00 -7.53780603e+00 7.15177715e-01 ... 4.29596663e+00 -6.29536867e+00 -5.32677317e+00] [ 4.31792545e+00 1.55773478e+01 6.24752998e+00 ... 6.03646576e-01 -2.82350278e+00 -1.25712233e+01]]]]]; ov_res: [[[[[ 5.20145798e+00 1.28707285e+01 -1.27329750e+01 ... -7.24644709e+00 -3.97348976e+00 -1.08089995e+00] [-1.04309607e+00 -4.00570154e+00 -1.12998476e+01 ... 1.22889614e+01 -5.37095118e+00 1.45562738e-01] [-5.97646618e+00 -3.81003857e+00 4.71991253e+00 ... -5.91609192e+00 -2.21232915e+00 -4.37204790e+00] ... [-3.53697538e-01 -7.08930731e+00 -5.11648273e+00 ... -6.01792669e+00 -3.47826123e+00 -4.89154434e+00] [ 5.54831076e+00 -1.06481667e+01 -8.40851784e+00 ... -2.02172661e+00 1.08282576e+01 -8.97207069e+00] [ 3.41533852e+00 1.83788371e+00 -7.03566551e+00 ... -5.30076408e+00 1.83186674e+00 -4.77437067e+00]] [[ 7.64767826e-03 1.79654884e+00 -1.05112305e+01 ... 2.09200573e+00 1.23335533e+01 -7.27778196e+00] [-9.22162247e+00 7.98868036e+00 1.28388672e+01 ... -1.85144401e+00 -7.32136297e+00 5.39875984e+00] [ 1.61818600e+00 -1.13020194e+00 9.49604511e+00 ... 1.08808565e+01 8.52526855e+00 -4.82450819e+00] ... [ 9.09640503e+00 1.47236395e+01 5.62604237e+00 ... 1.24315768e-01 5.51647282e+00 5.63663912e+00] [-2.15335693e+01 1.69673138e+01 -9.26077366e+00 ... -2.52225947e+00 3.03985429e+00 1.33039074e+01] [-3.71929264e+00 -6.33333778e+00 -1.79888511e+00 ... -2.03812122e+00 -1.05069046e+01 8.08730030e+00]] [[-4.59483385e+00 -1.16243706e+01 -1.47142200e+01 ... 4.90005636e+00 -5.88325071e+00 2.41704407e+01] [ 8.51549053e+00 1.21769834e+00 9.18110371e+00 ... 2.77173924e+00 8.72385025e+00 -2.36648331e+01] [ 3.87519550e+00 1.79798055e+00 4.07487011e+00 ... 1.57903004e+00 -1.31133547e+01 9.94576836e+00] ... [-1.02158470e+01 4.74823415e-01 -2.12417865e+00 ... -1.43210435e+00 6.67314053e-01 4.11593437e-01] [ 6.84471035e+00 -2.43902516e+00 1.72962809e+00 ... 1.54855289e+01 -8.28388786e+00 6.88131523e+00] [ 1.28036623e+01 -1.28026667e+01 2.58560991e+00 ... -9.48774529e+00 2.69873232e-01 -1.33668346e+01]] ... [[-7.29907990e-01 1.27610331e+01 -5.65331459e+00 ... -5.33995247e+00 -3.13824677e+00 1.72315102e+01] [ 1.88227386e+01 -1.23263998e+01 9.88125896e+00 ... 3.08332920e-01 -1.49677954e+01 1.34211311e+01] [ 9.41142023e-01 -8.73968983e+00 -1.06283207e+01 ... -1.10684032e+01 5.46560717e+00 6.93203688e+00] ... [ 3.55643082e+00 1.58207693e+01 -3.36339426e+00 ... -2.02923989e+00 3.23959804e+00 1.38039279e+00] [-7.59989643e+00 8.99882317e+00 5.78305340e+00 ... 7.50127745e+00 -5.36406040e-01 6.25599682e-01] [ 1.82732010e+00 -7.85170031e+00 -2.28145885e+00 ... -3.28145313e+00 -5.95175791e+00 -2.56729674e+00]] [[-2.11746144e+00 8.32372475e+00 1.18403416e+01 ... 9.59861469e+00 1.51514578e+01 1.19204879e+00] [-5.44822550e+00 -3.75418019e+00 1.10750618e+01 ... 3.70446920e-01 -3.61847043e+00 -8.01061809e-01] [-3.82235312e+00 -4.03388977e+00 -5.49153471e+00 ... -1.10902214e+01 -5.95266104e-01 5.26978433e-01] ... [-1.02962501e-01 -1.61604953e+00 6.49714279e+00 ... -5.11391020e+00 -4.11050510e+00 -1.13634806e+01] [-1.08511324e+01 1.70463753e+01 -4.29531842e-01 ... -2.18591070e+00 -1.13499079e+01 2.81053066e+00] [-1.63982639e+01 -1.58617983e+01 6.43189573e+00 ... -2.71595836e+00 -9.72036457e+00 -2.41419101e+00]] [[-1.95658803e+00 3.79884410e+00 -7.27304697e+00 ... 2.34058361e+01 5.98719931e+00 1.06346989e+00] [-1.27518034e+01 9.47817230e+00 -4.56927156e+00 ... 9.29610431e-01 -4.07592392e+00 1.04356766e-01] [-4.39687777e+00 -6.86289644e+00 -1.02202482e+01 ... -7.48518562e+00 5.31064272e-01 2.42245603e+00] ... [-5.24140596e+00 -2.02076137e-02 1.57598543e+01 ... 6.52890444e+00 8.60501945e-01 1.44506588e+01] [ 1.29810476e+01 1.00274668e+01 -9.37797737e+00 ... -1.39939384e+01 5.98680878e+00 1.70841956e+00] [ 1.03504896e+01 -4.09904909e+00 4.48137236e+00 ... 9.99095249e+00 -2.19619250e+00 1.32548070e+00]]]] [[[[-4.30574894e+00 -3.31937242e+00 -6.87400866e+00 ... 8.28898430e+00 -7.48979712e+00 1.87221165e+01] [ 2.98506093e+00 -1.86386371e+00 -4.95124531e+00 ... 9.85861492e+00 1.18408346e+00 4.90337706e+00] [ 6.69027376e+00 1.67610693e+00 -1.04785004e+01 ... 3.05834579e+00 -8.22463989e+00 4.78271902e-01] ... [ 7.28305674e+00 1.58266687e+01 6.69706488e+00 ... 1.67162323e+01 3.56843829e+00 -2.19594336e+00] [-9.41405201e+00 9.36695099e+00 -1.67313423e+01 ... -1.41826439e+00 2.98818588e+00 7.79477060e-02] [ 8.78250420e-01 -9.14765930e+00 1.90208697e+00 ... -5.86179972e+00 -1.14160004e+01 8.10482311e+00]] [[-1.07353096e+01 -3.30019903e+00 1.56091528e+01 ... -2.38637999e-01 -6.11176491e+00 7.18587160e+00] [-7.21684265e+00 1.41672730e+00 6.71833754e+00 ... -2.46218634e+00 -5.53815269e+00 4.91433096e+00] [ 7.48830032e+00 -1.58069506e-01 4.65259409e+00 ... 3.65903997e+00 -1.31505718e+01 6.13001490e+00] ... [-2.09141064e+00 -4.76844263e+00 4.02217817e+00 ... -5.33435488e+00 -1.00155029e+01 8.98576164e+00] [ 1.90966511e+00 -1.62870169e+00 2.69445992e+00 ... 1.17046185e+01 2.35448647e+00 3.43347454e+00] [ 1.53082705e+01 -1.27649870e+01 6.60074854e+00 ... 1.54445591e+01 4.36093235e+00 -6.00713730e+00]] [[ 2.30121021e+01 6.86023140e+00 1.74178600e+01 ... -1.53643055e+01 6.67024612e+00 -1.50220690e+01] [ 1.43944178e+01 1.93609214e+00 -1.20099378e+00 ... 1.05524673e+01 -1.83973455e+00 -1.95603390e+01] [ 3.87817621e+00 -6.19156504e+00 -6.01309013e+00 ... -8.60641575e+00 3.21509659e-01 -5.05383301e+00] ... [-7.92825699e+00 1.67788067e+01 -9.20450878e+00 ... 1.31145411e+01 -6.78353167e+00 6.31628275e+00] [ 3.33330750e+00 1.91297817e+00 2.72599101e+00 ... -3.60678411e+00 -5.12635827e-01 -7.85346222e+00] [-6.71152544e+00 2.67250657e-01 1.04357681e+01 ... -7.06670570e+00 9.88946855e-02 -1.60805092e+01]] ... [[ 1.03422279e+01 1.06731606e+00 -6.40529394e-01 ... -4.12430286e+00 2.13990803e+01 4.50953150e+00] [ 2.51353669e+00 -1.21977844e+01 -3.74334931e+00 ... -1.29663134e+01 1.09734259e+01 4.91694403e+00] [-1.60649338e+01 3.76909256e+00 -9.80188179e+00 ... -7.53903389e+00 -6.95637417e+00 5.65250921e+00] ... [ 1.24556494e+01 6.35249794e-01 -4.74725294e+00 ... -1.40855627e+01 -5.32103157e+00 -4.17408323e+00] [-8.54497075e-01 5.04777372e-01 5.46042538e+00 ... 2.41644049e+00 5.38711596e+00 7.23010159e+00] [ 6.60378647e+00 2.58931923e+00 -1.21246934e-01 ... -4.48669243e+00 -1.49500823e+00 7.14774430e-02]] [[-8.83549595e+00 1.70934486e+00 1.19598160e+01 ... 1.20072212e+01 5.14483118e+00 1.77794075e+01] [-1.48661041e+01 -2.73647785e+00 -5.09329367e+00 ... -1.13764172e+01 -8.03416157e+00 -7.00580120e-01] [-1.33772697e+01 2.29733753e+00 9.36497116e+00 ... -2.03717065e+00 1.36276255e+01 7.64320612e+00] ... [ 8.29149628e+00 -2.29066658e+00 2.59361458e+00 ... 1.22559538e+01 -1.08090353e+00 -5.36683023e-01] [ 3.71536314e-01 -5.71641803e-01 -9.29431534e+00 ... 1.17870178e+01 -1.46069980e+00 -4.48197651e+00] [-1.96900105e+00 -1.03496885e+01 2.28109193e+00 ... 6.93112707e+00 -2.50404215e+00 -1.38858175e+00]] [[ 8.77672386e+00 -1.00339928e+01 -3.02215290e+00 ... 2.39012489e+01 6.60760105e-01 4.29785824e+00] [ 1.68592739e+00 -1.75074029e+00 -1.29067183e+01 ... -2.25759101e+00 1.11283755e+00 -8.76883316e+00] [-4.22989893e+00 -4.47067738e+00 -8.66736698e+00 ... 4.39797544e+00 3.37572980e+00 3.76472926e+00] ... [-1.30728836e+01 1.28436069e+01 1.14238110e+01 ... -1.08789454e+01 -8.54986477e+00 -3.17636395e+00] [ 5.99642801e+00 -7.53780603e+00 7.15177715e-01 ... 4.29596663e+00 -6.29536867e+00 -5.32677317e+00] [ 4.31792545e+00 1.55773478e+01 6.24752998e+00 ... 6.03646576e-01 -2.82350278e+00 -1.25712233e+01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_476.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[2, 2, 2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.00281008}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%8) fw_re: [[[[[ 8.59476566e+00 4.05441475e+00 1.97686791e+00 ... -3.64007473e+00 -2.23861653e-02 -1.23000879e+01] [ 1.06877623e+01 -4.22743225e+00 8.58814430e+00 ... 6.44125462e-01 9.01936245e+00 5.27674389e+00] [-4.78663015e+00 -1.58312063e+01 -4.12888861e+00 ... -3.82635164e+00 1.03480043e+01 3.02869058e+00] ... [ 6.96227884e+00 3.60574222e+00 -1.40445051e+01 ... 1.76864064e+00 2.14751768e+00 -7.52408564e-01] [ 1.95647705e+00 9.99841595e+00 1.91472263e+01 ... 5.55377150e+00 -9.57953358e+00 -4.22882140e-01] [ 4.31015205e+00 3.63751364e+00 8.77832890e+00 ... 6.77751112e+00 -1.24251850e-01 -9.20987988e+00]] [[-1.47621117e+01 -1.23903542e+01 1.53638387e+00 ... 4.71296072e+00 -2.32237644e+01 -2.50877357e+00] [ 1.62501740e+00 -2.77967143e+00 -1.47383528e+01 ... 3.00770736e+00 -1.57331252e+00 -7.33343542e-01] [-1.41487753e+00 -1.06790371e+01 -1.58031092e+01 ... -2.28416748e+01 -1.56445491e+00 -1.62967300e+00] ... [ 2.48495436e+00 -1.34322739e+01 -1.34562807e+01 ... 8.97925568e+00 7.98512220e+00 -8.09291363e+00] [-3.74746060e+00 9.60375309e+00 2.91313782e+01 ... 4.51743460e+00 1.59897070e+01 6.61576033e-01] [-1.63819444e+00 1.42796507e+01 1.01177826e+01 ... 1.08103075e+01 -5.15235519e+00 -2.66077900e+00]] [[ 1.01946650e+01 1.85885239e+00 7.71893263e+00 ... -1.95692647e+00 9.04846954e+00 5.16385698e+00] [ 9.36285019e+00 -3.73407960e+00 1.29670846e+00 ... 9.55892563e-01 7.24156427e+00 -1.10054407e+01] [-1.17200375e+00 6.68146181e+00 -3.50230598e+00 ... 2.03210888e+01 4.46076536e+00 -1.39238338e+01] ... [ 1.08520994e+01 -2.77956796e+00 -1.25722218e+01 ... -1.26378860e+01 -9.89331055e+00 8.93981266e+00] [ 6.42884731e+00 6.27546597e+00 6.44883490e+00 ... 1.63078654e+00 3.29840350e+00 4.83060551e+00] [ 8.42541981e+00 1.31350479e+01 1.98051853e+01 ... -1.11429338e+01 -1.59857833e+00 -5.59479761e+00]] ... [[-6.60909319e+00 -3.91842031e+00 1.04415007e+01 ... -1.00433235e+01 -2.44255972e+00 -1.63185346e+00] [ 1.47788429e+01 -8.72560978e+00 6.24691725e-01 ... -1.71619439e+00 -4.41551256e+00 1.02303152e+01] [-3.09780329e-01 7.39618397e+00 1.10290403e+01 ... -8.84500027e+00 8.69304061e-01 9.94571686e+00] ... [ 1.92673702e+01 8.99641037e+00 4.97382343e-01 ... 5.06815529e+00 1.19726200e+01 4.97459030e+00] [ 2.21761197e-01 -5.06522560e+00 1.79160957e+01 ... 5.28851604e+00 -3.22939575e-01 1.52465582e+01] [-1.21860962e+01 4.82064915e+00 3.13603115e+00 ... 4.21630204e-01 9.37039471e+00 -1.70627060e+01]] [[-9.11202621e+00 5.43402719e+00 -5.70192194e+00 ... -2.86117191e+01 3.06391567e-01 7.45807314e+00] [ 3.69359255e+00 -1.24827642e+01 -1.47916718e+01 ... 1.07327356e+01 -2.07223072e+01 -2.11912365e+01] [ 1.51461868e+01 -5.39064360e+00 1.17971401e+01 ... -5.04497290e+00 7.04712582e+00 1.09799490e+01] ... [-4.65139675e+00 -1.38507509e+00 -3.84524852e-01 ... -2.02578354e+00 -8.21943378e+00 -1.55789461e+01] [-1.22029438e+01 -1.79377213e+01 2.32511973e+00 ... -2.28256926e-01 -8.12553692e+00 -3.84668970e+00] [-3.55860996e+00 1.26374960e+01 1.87453258e+00 ... 1.98273170e+00 -8.17925835e+00 8.56880951e+00]] [[-6.57046512e-02 2.69445610e+00 -3.69155359e+00 ... -2.44489983e-01 -8.28228569e+00 -3.24147195e-01] [-2.91827512e+00 -4.83394575e+00 1.91487670e+00 ... -1.17347565e+01 1.22929344e+01 4.42103910e+00] [ 1.05759490e+00 -1.07024736e+01 -4.60524035e+00 ... 6.86797953e+00 1.92392120e+01 -8.15087891e+00] ... [-2.85746336e+00 4.78355455e+00 1.31468258e+01 ... -7.06587029e+00 -5.70889425e+00 -7.87803650e+00] [-1.18612995e+01 -2.91050172e+00 1.54949980e+01 ... -5.18051243e+00 -1.57811079e+01 1.22405710e+01] [-1.82908916e+01 -1.05203807e+00 5.43203402e+00 ... -7.08347559e-01 -1.83839321e+00 -3.79314852e+00]]]] [[[[ 1.23344831e+01 -2.16739416e+00 3.39478683e+00 ... 1.56767249e+00 -1.49453449e+01 1.60798054e+01] [-3.50954843e+00 -4.62098658e-01 4.31770134e+00 ... 9.29352856e+00 -1.27288893e-01 -5.19294405e+00] [ 7.47207403e-01 -3.86628985e+00 1.59007826e+01 ... 7.36373091e+00 -1.22770357e+01 1.31439104e+01] ... [ 1.26638994e+01 7.66376317e-01 7.83895195e-01 ... 5.42584753e+00 4.88635361e-01 -6.43776274e+00] [-6.24635220e-01 -1.95694840e+00 -1.74309063e+01 ... 1.08292446e+01 3.11133909e+00 -2.62542057e+00] [-2.42330742e+00 8.02132607e+00 -8.03651237e+00 ... 6.01380205e+00 -8.53787231e+00 7.59738493e+00]] [[ 5.31797504e+00 3.41641843e-01 -1.58628817e+01 ... 1.28872375e+01 -5.01586056e+00 1.52700319e+01] [ 1.89828289e+00 1.10706930e+01 8.47057724e+00 ... -4.74212027e+00 1.08288012e+01 1.56840487e+01] [ 6.62522125e+00 9.32123184e-01 -4.33436918e+00 ... -7.75740528e+00 1.17154322e+01 1.27626734e+01] ... [-1.36738033e+01 6.31349993e+00 1.48842573e+01 ... 5.20129728e+00 2.38002110e+00 -3.46704721e+00] [ 3.09432316e+00 5.85550349e-03 -1.57828152e+00 ... -3.84696275e-02 9.21773529e+00 6.83100986e+00] [-5.64808941e+00 9.66084003e+00 -8.20544720e+00 ... -1.25824213e+00 8.05365562e+00 1.05831118e+01]] [[-8.49966145e+00 -9.17855167e+00 7.29709005e+00 ... 7.66525269e+00 2.62817216e+00 -1.88774891e+01] [ 2.38902020e+00 4.83730888e+00 -1.29273844e+00 ... 1.09613724e+01 3.64631295e+00 2.00671844e+01] [ 1.04280348e+01 1.42105360e+01 -6.62804365e+00 ... 3.60739613e+00 2.23357677e+00 8.83028507e+00] ... [ 1.47782892e-01 5.79810143e+00 2.70737076e+00 ... 5.21703720e+00 -4.60717535e+00 -1.63450825e+00] [-1.07759819e+01 1.64418926e+01 3.27477837e+00 ... 4.85682583e+00 -5.95284462e-01 1.24787521e+01] [ 1.46908846e+01 -3.46497655e+00 -1.40001309e+00 ... 1.84872246e+01 7.12430048e+00 1.16493368e+00]] ... [[-8.72747326e+00 1.69011059e+01 2.19078970e+00 ... -8.24213982e+00 5.12291431e+00 6.15156937e+00] [-1.57104511e+01 7.89260674e+00 -9.81583786e+00 ... -5.82081747e+00 2.54663634e+00 6.12082529e+00] [-1.29993315e+01 1.84093399e+01 -2.68114734e+00 ... 6.97043896e+00 1.47712374e+00 1.00219145e+01] ... [ 7.95048332e+00 6.74114895e+00 -1.57059610e+00 ... 7.25992966e+00 -1.05529366e+01 1.67485981e+01] [ 1.40397282e+01 -1.45079231e+00 -1.54425058e+01 ... -3.42837024e+00 2.78138161e+00 -2.70161581e+00] [-5.30031776e+00 -5.32057762e+00 -1.20917785e+00 ... 4.90260649e+00 1.91964893e+01 7.00868464e+00]] [[ 1.30203609e+01 5.77080488e+00 -5.36488199e+00 ... 4.76832438e+00 -1.90675507e+01 5.78928423e+00] [-7.25553179e+00 -1.45458162e+00 4.79925299e+00 ... -1.05305157e+01 1.84901714e+01 -7.25373983e+00] [ 2.07740192e+01 -1.00904312e+01 -1.97901440e+00 ... -1.24431372e+00 3.32884431e+00 -7.71542120e+00] ... [-6.17365170e+00 7.03798151e+00 -6.48081684e+00 ... 5.66556311e+00 -5.13838577e+00 -2.99630666e+00] [ 2.43073225e+00 2.88361883e+00 1.18127708e+01 ... -2.41141224e+00 4.99369335e+00 -3.74119067e+00] [ 7.21714401e+00 6.56957960e+00 -7.12009907e-01 ... -2.47940683e+00 5.46497726e+00 -1.18923926e+00]] [[-1.40338583e+01 -2.27005291e+01 -1.40052519e+01 ... 4.17573166e+00 -3.62266445e+00 2.02645607e+01] [-8.90956819e-01 -1.94293404e+00 8.21852112e+00 ... 3.05440450e+00 -9.95411396e+00 -1.33840132e+01] [-4.59642410e+00 7.79229450e+00 1.38445168e+01 ... 8.49133205e+00 1.80340981e+00 -1.29163611e+00] ... [-4.13394308e+00 5.13588953e+00 2.37634773e+01 ... 4.48665619e+00 -4.53836060e+00 -1.20665938e-01] [-5.00097656e+00 -4.91161060e+00 3.70782447e+00 ... 6.31351185e+00 5.49363852e+00 6.97585225e-01] [ 7.34333467e+00 1.38482666e+01 9.71714306e+00 ... -3.63025761e+00 -5.02160013e-01 1.22171516e+01]]]]]; ov_res: [[[[[ 8.59476566e+00 4.05441475e+00 1.97686791e+00 ... -3.64007473e+00 -2.23861653e-02 -1.23000879e+01] [ 1.06877623e+01 -4.22743225e+00 8.58814430e+00 ... 6.44125462e-01 9.01936245e+00 5.27674389e+00] [-4.78663015e+00 -1.58312063e+01 -4.12888861e+00 ... -3.82635164e+00 1.03480043e+01 3.02869058e+00] ... [ 6.96227884e+00 3.60574222e+00 -1.40445051e+01 ... 1.76864064e+00 2.14751768e+00 -7.52408564e-01] [ 1.95647705e+00 9.99841595e+00 1.91472263e+01 ... 5.55377150e+00 -9.57953358e+00 -4.22882140e-01] [ 4.31015205e+00 3.63751364e+00 8.77832890e+00 ... 6.77751112e+00 -1.24251850e-01 -9.20987988e+00]] [[-1.47621117e+01 -1.23903542e+01 1.53638387e+00 ... 4.71296072e+00 -2.32237644e+01 -2.50877357e+00] [ 1.62501740e+00 -2.77967143e+00 -1.47383528e+01 ... 3.00770736e+00 -1.57331252e+00 -7.33343542e-01] [-1.41487753e+00 -1.06790371e+01 -1.58031092e+01 ... -2.28416748e+01 -1.56445491e+00 -1.62967300e+00] ... [ 2.48495436e+00 -1.34322739e+01 -1.34562807e+01 ... 8.97925568e+00 7.98512220e+00 -8.09291363e+00] [-3.74746060e+00 9.60375309e+00 2.91313782e+01 ... 4.51743460e+00 1.59897070e+01 6.61576033e-01] [-1.63819444e+00 1.42796507e+01 1.01177826e+01 ... 1.08103075e+01 -5.15235519e+00 -2.66077900e+00]] [[ 1.01946650e+01 1.85885239e+00 7.71893263e+00 ... -1.95692647e+00 9.04846954e+00 5.16385698e+00] [ 9.36285019e+00 -3.73407960e+00 1.29670846e+00 ... 9.55892563e-01 7.24156427e+00 -1.10054407e+01] [-1.17200375e+00 6.68146181e+00 -3.50230598e+00 ... 2.03210888e+01 4.46076536e+00 -1.39238338e+01] ... [ 1.08520994e+01 -2.77956796e+00 -1.25722218e+01 ... -1.26378860e+01 -9.89331055e+00 8.93981266e+00] [ 6.42884731e+00 6.27546597e+00 6.44883490e+00 ... 1.63078654e+00 3.29840350e+00 4.83060551e+00] [ 8.42541981e+00 1.31350479e+01 1.98051853e+01 ... -1.11429338e+01 -1.59857833e+00 -5.59479761e+00]] ... [[-6.60909319e+00 -3.91842031e+00 1.04415007e+01 ... -1.00433235e+01 -2.44255972e+00 -1.63185346e+00] [ 1.47788429e+01 -8.72560978e+00 6.24691725e-01 ... -1.71619439e+00 -4.41551256e+00 1.02303152e+01] [-3.09780329e-01 7.39618397e+00 1.10290403e+01 ... -8.84500027e+00 8.69304061e-01 9.94571686e+00] ... [ 1.92673702e+01 8.99641037e+00 4.97382343e-01 ... 5.06815529e+00 1.19726200e+01 4.97459030e+00] [ 2.21761197e-01 -5.06522560e+00 1.79160957e+01 ... 5.28851604e+00 -3.22939575e-01 1.52465582e+01] [-1.21860962e+01 4.82064915e+00 3.13603115e+00 ... 4.21630204e-01 9.37039471e+00 -1.70627060e+01]] [[-9.11202621e+00 5.43402719e+00 -5.70192194e+00 ... -2.86117191e+01 3.06391567e-01 7.45807314e+00] [ 3.69359255e+00 -1.24827642e+01 -1.47916718e+01 ... 1.07327356e+01 -2.07223072e+01 -2.11912365e+01] [ 1.51461868e+01 -5.39064360e+00 1.17971401e+01 ... -5.04497290e+00 7.04712582e+00 1.09799490e+01] ... [-4.65139675e+00 -1.38507509e+00 -3.84524852e-01 ... -2.02578354e+00 -8.21943378e+00 -1.55789461e+01] [-1.22029438e+01 -1.79377213e+01 2.32511973e+00 ... -2.28256926e-01 -8.12553692e+00 -3.84668970e+00] [-3.55860996e+00 1.26374960e+01 1.87453258e+00 ... 1.98273170e+00 -8.17925835e+00 8.56880951e+00]] [[-6.57046512e-02 2.69445610e+00 -3.69155359e+00 ... -2.44489983e-01 -8.28228569e+00 -3.24147195e-01] [-2.91827512e+00 -4.83394575e+00 1.91487670e+00 ... -1.17347565e+01 1.22929344e+01 4.42103910e+00] [ 1.05759490e+00 -1.07024736e+01 -4.60524035e+00 ... 6.86797953e+00 1.92392120e+01 -8.15087891e+00] ... [-2.85746336e+00 4.78355455e+00 1.31468258e+01 ... -7.06587029e+00 -5.70889425e+00 -7.87803650e+00] [-1.18612995e+01 -2.91050172e+00 1.54949980e+01 ... -5.18051243e+00 -1.57811079e+01 1.22405710e+01] [-1.82908916e+01 -1.05203807e+00 5.43203402e+00 ... -7.08347559e-01 -1.83839321e+00 -3.79314852e+00]]]] [[[[ 1.23344831e+01 -2.16739416e+00 3.39478683e+00 ... 1.56767249e+00 -1.49453449e+01 1.60798054e+01] [-3.50954843e+00 -4.62098658e-01 4.31770134e+00 ... 9.29352856e+00 -1.27288893e-01 -5.19294405e+00] [ 7.47207403e-01 -3.86628985e+00 1.59007826e+01 ... 7.36373091e+00 -1.22770357e+01 1.31439104e+01] ... [ 1.26638994e+01 7.66376317e-01 7.83895195e-01 ... 5.42584753e+00 4.88635361e-01 -6.43776274e+00] [-6.24635220e-01 -1.95694840e+00 -1.74309063e+01 ... 1.08292446e+01 3.11133909e+00 -2.62542057e+00] [-2.42330742e+00 8.02132607e+00 -8.03651237e+00 ... 6.01380205e+00 -8.53787231e+00 7.59738493e+00]] [[ 5.31797504e+00 3.41641843e-01 -1.58628817e+01 ... 1.28872375e+01 -5.01586056e+00 1.52700319e+01] [ 1.89828289e+00 1.10706930e+01 8.47057724e+00 ... -4.74212027e+00 1.08288012e+01 1.56840487e+01] [ 6.62522125e+00 9.32123184e-01 -4.33436918e+00 ... -7.75740528e+00 1.17154322e+01 1.27626734e+01] ... [-1.36738033e+01 6.31349993e+00 1.48842573e+01 ... 5.20129728e+00 2.38002110e+00 -3.46704721e+00] [ 3.09432316e+00 5.85550349e-03 -1.57828152e+00 ... -3.84696275e-02 9.21773529e+00 6.83100986e+00] [-5.64808941e+00 9.66084003e+00 -8.20544720e+00 ... -1.25824213e+00 8.05365562e+00 1.05831118e+01]] [[-8.49966145e+00 -9.17855167e+00 7.29709005e+00 ... 7.66525269e+00 2.62817216e+00 -1.88774891e+01] [ 2.38902020e+00 4.83730888e+00 -1.29273844e+00 ... 1.09613724e+01 3.64631295e+00 2.00671844e+01] [ 1.04280348e+01 1.42105360e+01 -6.62804365e+00 ... 3.60739613e+00 2.23357677e+00 8.83028507e+00] ... [ 1.47782892e-01 5.79810143e+00 2.70737076e+00 ... 5.21703720e+00 -4.60717535e+00 -1.63450825e+00] [-1.07759819e+01 1.64418926e+01 3.27477837e+00 ... 4.85682583e+00 -5.95284462e-01 1.24787521e+01] [ 1.46908846e+01 -3.46497655e+00 -1.40001309e+00 ... 1.84872246e+01 7.12430048e+00 1.16493368e+00]] ... [[-8.72747326e+00 1.69011059e+01 2.19078970e+00 ... -8.24213982e+00 5.12291431e+00 6.15156937e+00] [-1.57104511e+01 7.89260674e+00 -9.81583786e+00 ... -5.82081747e+00 2.54663634e+00 6.12082529e+00] [-1.29993315e+01 1.84093399e+01 -2.68114734e+00 ... 6.97043896e+00 1.47712374e+00 1.00219145e+01] ... [ 7.95048332e+00 6.74114895e+00 -1.57059610e+00 ... 7.25992966e+00 -1.05529366e+01 1.67485981e+01] [ 1.40397282e+01 -1.45079231e+00 -1.54425058e+01 ... -3.42837024e+00 2.78138161e+00 -2.70161581e+00] [-5.30031776e+00 -5.32057762e+00 -1.20917785e+00 ... 4.90260649e+00 1.91964893e+01 7.00868464e+00]] [[ 1.30203609e+01 5.77080488e+00 -5.36488199e+00 ... 4.76832438e+00 -1.90675507e+01 5.78928423e+00] [-7.25553179e+00 -1.45458162e+00 4.79925299e+00 ... -1.05305157e+01 1.84901714e+01 -7.25373983e+00] [ 2.07740192e+01 -1.00904312e+01 -1.97901440e+00 ... -1.24431372e+00 3.32884431e+00 -7.71542120e+00] ... [-6.17365170e+00 7.03798151e+00 -6.48081684e+00 ... 5.66556311e+00 -5.13838577e+00 -2.99630666e+00] [ 2.43073225e+00 2.88361883e+00 1.18127708e+01 ... -2.41141224e+00 4.99369335e+00 -3.74119067e+00] [ 7.21714401e+00 6.56957960e+00 -7.12009907e-01 ... -2.47940683e+00 5.46497726e+00 -1.18923926e+00]] [[-1.40338583e+01 -2.27005291e+01 -1.40052519e+01 ... 4.17573166e+00 -3.62266445e+00 2.02645607e+01] [-8.90956819e-01 -1.94293404e+00 8.21852112e+00 ... 3.05440450e+00 -9.95411396e+00 -1.33840132e+01] [-4.59642410e+00 7.79229450e+00 1.38445168e+01 ... 8.49133205e+00 1.80340981e+00 -1.29163611e+00] ... [-4.13394308e+00 5.13588953e+00 2.37634773e+01 ... 4.48665619e+00 -4.53836060e+00 -1.20665938e-01] [-5.00097656e+00 -4.91161060e+00 3.70782447e+00 ... 6.31351185e+00 5.49363852e+00 6.97585225e-01] [ 7.34333467e+00 1.38482666e+01 9.71714306e+00 ... -3.63025761e+00 -5.02160013e-01 1.22171516e+01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_478.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.7459}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%6) fw_re: [[[[[-1.35429704e+00 7.95893431e+00 5.46407318e+00 ... -8.78319144e-01 -1.52823889e+00 -1.96784341e+00] [-2.02779722e+00 4.17900276e+00 -3.03845596e+00 ... -5.46717072e+00 1.34664431e+01 2.12230778e+00] [ 5.67873383e+00 1.09816265e+01 -5.51296592e-01 ... -8.14196968e+00 6.48490095e+00 9.45494747e+00] ... [ 1.35797536e+00 -7.28062510e-01 -1.03544579e+01 ... -8.13057423e+00 -1.00325689e+01 2.67081451e+00] [-9.26398754e+00 -1.18361979e+01 -4.84005451e+00 ... -6.17807770e+00 -1.69853554e+01 8.09381866e+00] [ 9.73742104e+00 1.57458663e+00 1.73870575e+00 ... 3.75325155e+00 4.53121185e-01 9.52653980e+00]] [[-6.11679411e+00 -1.94152374e+01 3.40726614e+00 ... -9.94303823e-01 -1.65192997e+00 2.92929316e+00] [ 1.47630334e+00 -3.08380651e+00 4.65446186e+00 ... -1.96039319e-01 -9.12528872e-01 -1.31352794e+00] [-1.05604124e+01 1.15018349e+01 3.39286923e+00 ... 5.37613988e-01 -1.90455544e+00 6.84821796e+00] ... [-1.84261954e+00 1.03009691e+01 9.79026318e+00 ... -5.75583076e+00 6.51202345e+00 -7.97793674e+00] [ 1.62618504e+01 2.85842276e+00 3.52973700e+00 ... 1.31896734e-02 -2.13299274e-01 3.27778888e+00] [ 8.06577778e+00 -8.00277328e+00 -7.57639980e+00 ... -7.32194328e+00 -3.96285152e+00 -6.30448723e+00]] [[-1.95507109e+00 -3.74593115e+00 -6.24401665e+00 ... -4.15983868e+00 4.67896700e+00 1.64781308e+00] [-2.77590156e-01 2.62379408e+00 1.63471279e+01 ... 4.72866917e+00 3.84649134e+00 6.62819815e+00] [ 2.79865265e-01 -1.01013327e+01 8.68010521e+00 ... 1.33571472e+01 3.01140571e+00 1.97519817e+01] ... [ 6.75855112e+00 -6.50637507e-01 -1.04632931e+01 ... 2.02873182e+00 -8.97072315e+00 3.30492091e+00] [ 8.74663258e+00 9.36030483e+00 9.85104084e+00 ... -3.62740469e+00 -7.48092079e+00 -1.11438169e+01] [ 2.80001259e+00 -1.18722022e+00 1.93167019e+01 ... -1.44947350e+00 7.62728751e-01 7.96627378e+00]] ... [[ 9.86599064e+00 1.31998358e+01 6.88353109e+00 ... -3.13115001e-01 5.62814951e-01 3.21917558e+00] [-3.62893295e+00 -1.22221975e+01 -1.28843288e+01 ... -4.29321146e+00 1.69807301e+01 3.04373693e+00] [-2.09495258e+00 1.32226813e+00 -2.59465313e+00 ... 6.20654106e-01 -4.97734666e-01 2.87301016e+00] ... [ 5.80047941e+00 1.54627266e+01 1.45201283e+01 ... -4.13066268e-01 -1.51859722e+01 9.08268833e+00] [-3.04410815e-01 -6.66396332e+00 5.85071468e+00 ... 3.22348595e-01 -1.38930016e+01 -1.36371815e+00] [-6.19142103e+00 4.17528534e+00 7.64539242e+00 ... 6.27772760e+00 3.37721705e+00 8.51932168e-01]] [[-3.06764793e+00 1.39115658e+01 1.10938540e+01 ... 8.20722389e+00 1.57204638e+01 8.74610138e+00] [-6.96946239e+00 5.45823240e+00 2.28300304e+01 ... -3.29024792e+00 2.81242371e-01 6.51988459e+00] [ 6.42763329e+00 1.53372335e+01 -2.89089632e+00 ... 1.05974846e+01 -1.11176224e+01 2.99141788e+00] ... [ 9.13973808e+00 2.82906175e+00 2.44909334e+00 ... 1.02300329e+01 1.13222723e+01 1.16468248e+01] [ 7.05108762e-01 1.00932264e+00 3.72202969e+00 ... 8.46797085e+00 9.20119953e+00 8.31568301e-01] [-2.36951113e+00 -4.25795317e+00 -8.74346542e+00 ... 3.26428223e+00 1.63165550e+01 -4.97325802e+00]] [[ 3.28902864e+00 3.30610085e+00 -1.53457654e+00 ... 7.95007563e+00 -7.39774799e+00 2.41767359e+00] [-1.29340804e+00 6.25167489e-01 -1.34527302e+01 ... 1.72665901e+01 1.01291590e+01 9.56323051e+00] [ 5.08991957e-01 2.15528641e+01 2.78716421e+00 ... 1.10599909e+01 8.05379868e-01 1.40502262e+01] ... [-4.48202801e+00 8.91528225e+00 -7.35544968e+00 ... -5.30306673e+00 -1.22754974e+01 -9.87792850e-01] [-5.03849936e+00 -1.20139873e+00 6.92108583e+00 ... 5.25903368e+00 -5.93508911e+00 2.50152206e+00] [ 6.32691669e+00 1.34296799e+01 -5.97525239e-01 ... -1.76447403e+00 6.50083971e+00 1.31240296e+00]]]] [[[[-9.64964509e-01 8.14516926e+00 -9.34062362e-01 ... -5.97926760e+00 1.01629496e+01 3.85931492e+00] [ 2.45292187e-02 6.53218555e+00 -1.95155704e+00 ... 1.94672155e+00 2.27909350e+00 3.29149103e+00] [-3.48911333e+00 3.70170403e+00 4.24785662e+00 ... -1.14600477e+01 5.32545233e+00 3.53908873e+00] ... [ 8.60004246e-01 -3.63509083e+00 -2.77436876e+00 ... -1.18612795e+01 1.84779143e+00 3.02719212e+00] [-5.01652908e+00 4.37564564e+00 9.34609795e+00 ... -1.48057499e+01 8.20840836e+00 6.01592827e+00] [ 4.87585926e+00 1.40450132e+00 6.26490021e+00 ... -4.35285521e+00 1.94099820e+00 1.06116047e+01]] [[-5.79378796e+00 -9.40188313e+00 3.28534484e-01 ... -5.17137170e-01 3.75994873e+00 9.03032207e+00] [-2.91813231e+00 1.22411513e+00 -9.27186489e+00 ... -6.70333743e-01 -5.45213556e+00 -1.78906071e+00] [-1.42007256e+01 -1.15761061e+01 1.00847235e+01 ... -1.73372900e+00 -3.67616272e+00 -6.92480850e+00] ... [ 2.29115200e+01 9.54954863e-01 -8.21845818e+00 ... -8.03676701e+00 5.71604633e+00 1.11072245e+01] [-1.57125628e+00 2.55929804e+00 -3.33676147e+00 ... -3.26012945e+00 -1.84046001e+01 -3.59651136e+00] [-1.01357257e+00 3.73961782e+00 -4.61877108e+00 ... -3.17654037e+00 3.60927343e+00 -4.29599905e+00]] [[ 1.03068709e-01 1.14437389e+00 -4.28804779e+00 ... -2.26203251e+00 -3.14511490e+00 6.38166857e+00] [-3.83772326e+00 4.32974052e+00 6.86638594e+00 ... 1.02050257e+01 -1.33328402e+00 1.38245907e+01] [ 6.13464117e-01 5.19770622e+00 2.00760460e+01 ... -4.16565537e-01 9.50688839e+00 -2.07056570e+00] ... [-8.47754860e+00 1.83618379e+00 -9.61642265e+00 ... 4.14262867e+00 -2.16314468e+01 -4.72062016e+00] [-1.09705172e+01 -1.65354145e+00 -6.49354935e+00 ... -7.01546669e+00 5.08613443e+00 -1.41903365e+00] [-6.56794071e+00 3.72899318e+00 3.93100548e+00 ... 1.34150815e+00 7.33140087e+00 3.01514769e+00]] ... [[ 4.97730970e-01 8.99378240e-01 4.43702221e+00 ... -3.65443230e-02 -9.78538895e+00 -1.63511503e+00] [ 9.09728348e-01 8.12626743e+00 3.75966215e+00 ... 1.00419083e+01 -7.96241379e+00 5.75624371e+00] [-1.27969074e+01 -1.03819370e+01 -4.74328327e+00 ... 7.20568180e-01 1.39200239e+01 3.95252657e+00] ... [ 3.48798704e+00 -1.85660725e+01 -6.35926247e+00 ... -4.12591696e+00 -2.29830217e+00 -4.17022896e+00] [ 4.42636585e+00 6.49447250e+00 1.99642792e+01 ... -1.07363682e+01 -1.04741755e+01 2.75295925e+00] [-4.80837941e-01 7.07014656e+00 1.34240017e+01 ... 3.33924866e+00 -1.97869551e+00 -3.46340299e-01]] [[-2.48201466e+00 -3.31910658e+00 -3.45567322e+00 ... -6.44906044e+00 -9.89741993e+00 -4.83092165e+00] [ 6.31376457e+00 1.04879570e+01 -8.01095366e-01 ... 5.70944118e+00 3.00596476e+00 5.94398165e+00] [ 2.46830540e+01 2.14214253e+00 -1.28751528e+00 ... 3.62560654e+00 1.92024720e+00 5.78132010e+00] ... [-5.36140060e+00 -1.52420342e+00 -7.75961399e+00 ... 4.16917849e+00 -1.15272903e+01 1.17454395e+01] [ 4.83174324e-01 4.92833281e+00 4.96246147e+00 ... 2.21268177e+00 8.86405289e-01 4.97089863e+00] [ 5.52526593e-01 -1.48674595e+00 -6.32198811e+00 ... 1.10985136e+00 -4.91724777e+00 -6.07769489e-02]] [[ 7.77524710e-01 -5.36304140e+00 6.88827872e-01 ... 8.83937454e+00 2.12890363e+00 -2.27971935e+00] [ 5.61915207e+00 -8.50205302e-01 -2.84672165e+00 ... -3.00328541e+00 -2.25910759e+00 4.88641930e+00] [ 3.55900431e+00 1.93792973e+01 8.47977638e-01 ... 1.03556433e+01 -6.54591751e+00 -2.76792955e+00] ... [ 7.04337311e+00 7.68785858e+00 -4.16623068e+00 ... -4.92501140e-01 1.06751549e+00 5.83682060e+00] [-7.47468090e+00 -1.13133688e+01 -2.50540686e+00 ... -5.42218590e+00 -1.37919891e+00 1.06635637e+01] [-2.30874157e+00 7.27878428e+00 3.20487118e+00 ... 5.63983345e+00 -4.06964827e+00 8.13970852e+00]]]]]; ov_res: [[[[[-1.35429704e+00 7.95893431e+00 5.46407318e+00 ... -8.78319144e-01 -1.52823889e+00 -1.96784341e+00] [-2.02779722e+00 4.17900276e+00 -3.03845596e+00 ... -5.46717072e+00 1.34664431e+01 2.12230778e+00] [ 5.67873383e+00 1.09816265e+01 -5.51296592e-01 ... -8.14196968e+00 6.48490095e+00 9.45494747e+00] ... [ 1.35797536e+00 -7.28062510e-01 -1.03544579e+01 ... -8.13057423e+00 -1.00325689e+01 2.67081451e+00] [-9.26398754e+00 -1.18361979e+01 -4.84005451e+00 ... -6.17807770e+00 -1.69853554e+01 8.09381866e+00] [ 9.73742104e+00 1.57458663e+00 1.73870575e+00 ... 3.75325155e+00 4.53121185e-01 9.52653980e+00]] [[-6.11679411e+00 -1.94152374e+01 3.40726614e+00 ... -9.94303823e-01 -1.65192997e+00 2.92929316e+00] [ 1.47630334e+00 -3.08380651e+00 4.65446186e+00 ... -1.96039319e-01 -9.12528872e-01 -1.31352794e+00] [-1.05604124e+01 1.15018349e+01 3.39286923e+00 ... 5.37613988e-01 -1.90455544e+00 6.84821796e+00] ... [-1.84261954e+00 1.03009691e+01 9.79026318e+00 ... -5.75583076e+00 6.51202345e+00 -7.97793674e+00] [ 1.62618504e+01 2.85842276e+00 3.52973700e+00 ... 1.31896734e-02 -2.13299274e-01 3.27778888e+00] [ 8.06577778e+00 -8.00277328e+00 -7.57639980e+00 ... -7.32194328e+00 -3.96285152e+00 -6.30448723e+00]] [[-1.95507109e+00 -3.74593115e+00 -6.24401665e+00 ... -4.15983868e+00 4.67896700e+00 1.64781308e+00] [-2.77590156e-01 2.62379408e+00 1.63471279e+01 ... 4.72866917e+00 3.84649134e+00 6.62819815e+00] [ 2.79865265e-01 -1.01013327e+01 8.68010521e+00 ... 1.33571472e+01 3.01140571e+00 1.97519817e+01] ... [ 6.75855112e+00 -6.50637507e-01 -1.04632931e+01 ... 2.02873182e+00 -8.97072315e+00 3.30492091e+00] [ 8.74663258e+00 9.36030483e+00 9.85104084e+00 ... -3.62740469e+00 -7.48092079e+00 -1.11438169e+01] [ 2.80001259e+00 -1.18722022e+00 1.93167019e+01 ... -1.44947350e+00 7.62728751e-01 7.96627378e+00]] ... [[ 9.86599064e+00 1.31998358e+01 6.88353109e+00 ... -3.13115001e-01 5.62814951e-01 3.21917558e+00] [-3.62893295e+00 -1.22221975e+01 -1.28843288e+01 ... -4.29321146e+00 1.69807301e+01 3.04373693e+00] [-2.09495258e+00 1.32226813e+00 -2.59465313e+00 ... 6.20654106e-01 -4.97734666e-01 2.87301016e+00] ... [ 5.80047941e+00 1.54627266e+01 1.45201283e+01 ... -4.13066268e-01 -1.51859722e+01 9.08268833e+00] [-3.04410815e-01 -6.66396332e+00 5.85071468e+00 ... 3.22348595e-01 -1.38930016e+01 -1.36371815e+00] [-6.19142103e+00 4.17528534e+00 7.64539242e+00 ... 6.27772760e+00 3.37721705e+00 8.51932168e-01]] [[-3.06764793e+00 1.39115658e+01 1.10938540e+01 ... 8.20722389e+00 1.57204638e+01 8.74610138e+00] [-6.96946239e+00 5.45823240e+00 2.28300304e+01 ... -3.29024792e+00 2.81242371e-01 6.51988459e+00] [ 6.42763329e+00 1.53372335e+01 -2.89089632e+00 ... 1.05974846e+01 -1.11176224e+01 2.99141788e+00] ... [ 9.13973808e+00 2.82906175e+00 2.44909334e+00 ... 1.02300329e+01 1.13222723e+01 1.16468248e+01] [ 7.05108762e-01 1.00932264e+00 3.72202969e+00 ... 8.46797085e+00 9.20119953e+00 8.31568301e-01] [-2.36951113e+00 -4.25795317e+00 -8.74346542e+00 ... 3.26428223e+00 1.63165550e+01 -4.97325802e+00]] [[ 3.28902864e+00 3.30610085e+00 -1.53457654e+00 ... 7.95007563e+00 -7.39774799e+00 2.41767359e+00] [-1.29340804e+00 6.25167489e-01 -1.34527302e+01 ... 1.72665901e+01 1.01291590e+01 9.56323051e+00] [ 5.08991957e-01 2.15528641e+01 2.78716421e+00 ... 1.10599909e+01 8.05379868e-01 1.40502262e+01] ... [-4.48202801e+00 8.91528225e+00 -7.35544968e+00 ... -5.30306673e+00 -1.22754974e+01 -9.87792850e-01] [-5.03849936e+00 -1.20139873e+00 6.92108583e+00 ... 5.25903368e+00 -5.93508911e+00 2.50152206e+00] [ 6.32691669e+00 1.34296799e+01 -5.97525239e-01 ... -1.76447403e+00 6.50083971e+00 1.31240296e+00]]]] [[[[-9.64964509e-01 8.14516926e+00 -9.34062362e-01 ... -5.97926760e+00 1.01629496e+01 3.85931492e+00] [ 2.45292187e-02 6.53218555e+00 -1.95155704e+00 ... 1.94672155e+00 2.27909350e+00 3.29149103e+00] [-3.48911333e+00 3.70170403e+00 4.24785662e+00 ... -1.14600477e+01 5.32545233e+00 3.53908873e+00] ... [ 8.60004246e-01 -3.63509083e+00 -2.77436876e+00 ... -1.18612795e+01 1.84779143e+00 3.02719212e+00] [-5.01652908e+00 4.37564564e+00 9.34609795e+00 ... -1.48057499e+01 8.20840836e+00 6.01592827e+00] [ 4.87585926e+00 1.40450132e+00 6.26490021e+00 ... -4.35285521e+00 1.94099820e+00 1.06116047e+01]] [[-5.79378796e+00 -9.40188313e+00 3.28534484e-01 ... -5.17137170e-01 3.75994873e+00 9.03032207e+00] [-2.91813231e+00 1.22411513e+00 -9.27186489e+00 ... -6.70333743e-01 -5.45213556e+00 -1.78906071e+00] [-1.42007256e+01 -1.15761061e+01 1.00847235e+01 ... -1.73372900e+00 -3.67616272e+00 -6.92480850e+00] ... [ 2.29115200e+01 9.54954863e-01 -8.21845818e+00 ... -8.03676701e+00 5.71604633e+00 1.11072245e+01] [-1.57125628e+00 2.55929804e+00 -3.33676147e+00 ... -3.26012945e+00 -1.84046001e+01 -3.59651136e+00] [-1.01357257e+00 3.73961782e+00 -4.61877108e+00 ... -3.17654037e+00 3.60927343e+00 -4.29599905e+00]] [[ 1.03068709e-01 1.14437389e+00 -4.28804779e+00 ... -2.26203251e+00 -3.14511490e+00 6.38166857e+00] [-3.83772326e+00 4.32974052e+00 6.86638594e+00 ... 1.02050257e+01 -1.33328402e+00 1.38245907e+01] [ 6.13464117e-01 5.19770622e+00 2.00760460e+01 ... -4.16565537e-01 9.50688839e+00 -2.07056570e+00] ... [-8.47754860e+00 1.83618379e+00 -9.61642265e+00 ... 4.14262867e+00 -2.16314468e+01 -4.72062016e+00] [-1.09705172e+01 -1.65354145e+00 -6.49354935e+00 ... -7.01546669e+00 5.08613443e+00 -1.41903365e+00] [-6.56794071e+00 3.72899318e+00 3.93100548e+00 ... 1.34150815e+00 7.33140087e+00 3.01514769e+00]] ... [[ 4.97730970e-01 8.99378240e-01 4.43702221e+00 ... -3.65443230e-02 -9.78538895e+00 -1.63511503e+00] [ 9.09728348e-01 8.12626743e+00 3.75966215e+00 ... 1.00419083e+01 -7.96241379e+00 5.75624371e+00] [-1.27969074e+01 -1.03819370e+01 -4.74328327e+00 ... 7.20568180e-01 1.39200239e+01 3.95252657e+00] ... [ 3.48798704e+00 -1.85660725e+01 -6.35926247e+00 ... -4.12591696e+00 -2.29830217e+00 -4.17022896e+00] [ 4.42636585e+00 6.49447250e+00 1.99642792e+01 ... -1.07363682e+01 -1.04741755e+01 2.75295925e+00] [-4.80837941e-01 7.07014656e+00 1.34240017e+01 ... 3.33924866e+00 -1.97869551e+00 -3.46340299e-01]] [[-2.48201466e+00 -3.31910658e+00 -3.45567322e+00 ... -6.44906044e+00 -9.89741993e+00 -4.83092165e+00] [ 6.31376457e+00 1.04879570e+01 -8.01095366e-01 ... 5.70944118e+00 3.00596476e+00 5.94398165e+00] [ 2.46830540e+01 2.14214253e+00 -1.28751528e+00 ... 3.62560654e+00 1.92024720e+00 5.78132010e+00] ... [-5.36140060e+00 -1.52420342e+00 -7.75961399e+00 ... 4.16917849e+00 -1.15272903e+01 1.17454395e+01] [ 4.83174324e-01 4.92833281e+00 4.96246147e+00 ... 2.21268177e+00 8.86405289e-01 4.97089863e+00] [ 5.52526593e-01 -1.48674595e+00 -6.32198811e+00 ... 1.10985136e+00 -4.91724777e+00 -6.07769489e-02]] [[ 7.77524710e-01 -5.36304140e+00 6.88827872e-01 ... 8.83937454e+00 2.12890363e+00 -2.27971935e+00] [ 5.61915207e+00 -8.50205302e-01 -2.84672165e+00 ... -3.00328541e+00 -2.25910759e+00 4.88641930e+00] [ 3.55900431e+00 1.93792973e+01 8.47977638e-01 ... 1.03556433e+01 -6.54591751e+00 -2.76792955e+00] ... [ 7.04337311e+00 7.68785858e+00 -4.16623068e+00 ... -4.92501140e-01 1.06751549e+00 5.83682060e+00] [-7.47468090e+00 -1.13133688e+01 -2.50540686e+00 ... -5.42218590e+00 -1.37919891e+00 1.06635637e+01] [-2.30874157e+00 7.27878428e+00 3.20487118e+00 ... 5.63983345e+00 -4.06964827e+00 8.13970852e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_480.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.18754}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%8) fw_re: [[[[[ 2.96597552e+00 -1.34911213e+01 -6.64796829e+00 ... 7.89505768e+00 -3.00913620e+00 -1.42246389e+01] [ 5.24178696e+00 4.88969994e+00 -1.58997154e+00 ... -5.22963619e+00 -3.22269535e+00 7.43090534e+00] [ 8.46979904e+00 -1.87672830e+00 -8.65994835e+00 ... -1.58950701e+01 -1.43546572e+01 -1.65703526e+01] ... [-1.55717754e+01 -5.94614863e-01 1.48671794e+00 ... -8.66718769e+00 6.23094845e+00 -2.18162155e+01] [ 3.12003779e+00 -6.71209335e+00 -8.41317844e+00 ... -1.95865309e+00 1.85842094e+01 6.06135368e+00] [-6.85764980e+00 -5.09175634e+00 -3.10130262e+00 ... 3.59194636e+00 -2.15991259e+00 -7.62619686e+00]] [[ 1.14694333e+00 -2.28525829e+00 -7.64160824e+00 ... -6.80464172e+00 -1.51649914e+01 1.85605564e+01] [-1.34467564e+01 3.09551835e+00 2.47306037e+00 ... 6.27754116e+00 -6.10554886e+00 -1.35191832e+01] [ 7.48982310e-01 -9.99927330e+00 -8.62792873e+00 ... 1.49580641e+01 4.00248909e+00 -5.65989113e+00] ... [-1.70539627e+01 6.70950031e+00 -2.48262525e-01 ... 1.18057623e+01 1.05110369e+01 -1.40860510e+00] [ 5.64364910e+00 8.77601624e+00 4.79115009e+00 ... 5.25023270e+00 -2.50178409e+00 3.57299590e+00] [ 2.42263293e+00 1.18041868e+01 1.53278894e+01 ... -6.31583214e+00 3.01167607e+00 6.30015945e+00]] [[-1.32343969e+01 4.88706493e+00 3.19073844e+00 ... -8.11548233e+00 -6.29925823e+00 5.36994362e+00] [ 2.61629915e+00 -1.85572872e+01 -4.54360914e+00 ... 5.27740097e+00 1.26821289e+01 -1.12632236e+01] [-1.22248707e+01 -8.39543438e+00 1.75001278e+01 ... 1.54485636e+01 -1.06499219e+00 2.92308736e+00] ... [-2.93290091e+00 1.42669535e+01 -1.21516008e+01 ... 5.63073635e+00 1.22271967e+00 1.02907205e+00] [-2.32470560e+00 7.14103103e-01 5.46120644e+00 ... -1.11344481e+00 5.95520878e+00 1.08047895e+01] [ 7.82616806e+00 -5.92812443e+00 -1.55323827e+00 ... -1.47634304e+00 -4.02792692e+00 -1.06483231e+01]] ... [[ 1.54261675e+01 9.81429672e+00 1.26281748e+01 ... -1.17284670e+01 -1.23986559e+01 -1.90099945e+01] [-1.27232275e+01 4.01548100e+00 3.87084603e+00 ... 2.00066328e-01 -4.96721935e+00 -6.79762650e+00] [-2.14218521e+01 1.12732248e+01 3.42032433e-01 ... -1.45137680e+00 1.20997257e+01 -8.94103432e+00] ... [-4.28378010e+00 1.49132657e+00 -3.35855317e+00 ... -1.72467136e+00 6.36400032e+00 -1.98988247e+00] [-1.59446783e+01 -2.29406338e+01 -7.63900089e+00 ... 6.84085083e+00 -5.95771122e+00 2.23394084e+00] [ 2.80545449e+00 -2.13331401e-01 1.97765617e+01 ... 3.52137876e+00 -1.33750763e+01 -1.10890827e+01]] [[ 2.71544504e+00 -1.25037498e+01 6.94776535e+00 ... -5.05891848e+00 -1.57247906e+01 -9.01757717e+00] [-1.66721458e+01 -3.51322436e+00 -2.59200211e+01 ... -5.08675098e+00 -1.49664679e+01 4.13318825e+00] [-2.17430439e+01 5.60551071e+00 -1.98418331e+00 ... -6.47668362e+00 -4.76935863e+00 1.39014263e+01] ... [ 1.55794249e+01 -4.86879492e+00 -5.71382427e+00 ... 4.65317917e+00 8.11510849e+00 -1.05713682e+01] [-9.96515179e+00 -5.45604992e+00 3.86567235e+00 ... 2.17606902e+00 7.59959412e+00 1.57842183e+00] [ 8.27290916e+00 3.21687531e+00 -1.64502001e+00 ... -7.54089546e+00 2.45993495e-01 -9.02921677e+00]] [[-6.87817383e+00 1.19080067e+00 8.93087387e-02 ... 1.41846046e+01 1.77029972e+01 -4.19347095e+00] [ 1.58883791e+01 -7.46613121e+00 -1.17918625e+01 ... 4.47189808e+00 -4.47212982e+00 3.52256560e+00] [-1.07699127e+01 2.46945214e+00 -1.23921645e+00 ... 6.86315918e+00 1.09093857e+01 4.34339046e+00] ... [ 3.39035344e+00 -3.90711844e-01 1.66644096e+01 ... -2.46980333e+00 5.00615025e+00 4.57339668e+00] [ 4.45839882e-01 -3.59740317e-01 4.90568733e+00 ... 7.08766460e+00 -7.16035175e+00 -3.43062639e+00] [ 8.75054741e+00 -1.71208382e+01 -2.06201077e+00 ... 3.74757218e+00 2.75221229e+00 1.52125731e+01]]]] [[[[ 8.98554707e+00 -1.29080353e+01 -1.21384535e+01 ... -1.16091328e+01 -5.49988937e+00 -1.61921787e+01] [-1.17920494e+01 -1.96905174e+01 -8.03376102e+00 ... 6.49650335e-01 -1.09724092e+01 1.07447958e+00] [-5.23492455e-01 -6.67154789e+00 2.66130257e+00 ... 8.68104458e-01 -3.95723128e+00 -1.97314053e+01] ... [-3.73042643e-01 9.87303925e+00 5.46636391e+00 ... 3.82055354e+00 6.41585350e+00 1.75820751e+01] [ 1.03055277e+01 1.00428843e+00 -1.54834354e+00 ... 5.83373165e+00 -9.97613788e-01 5.53568244e-01] [ 3.52885127e-01 -4.83370900e-01 -6.12711716e+00 ... -1.38377247e+01 -8.88768673e+00 -9.44777775e+00]] [[ 1.90165567e+00 -1.62790108e+01 1.89307537e+01 ... 6.26186013e-01 -1.04668407e+01 -4.41734695e+00] [-5.09942913e+00 -7.05133533e+00 3.25194716e+00 ... 2.02867985e+00 1.27520037e+00 6.37235641e+00] [-1.05331888e+01 -7.28762722e+00 -2.08782554e-01 ... -4.55509186e+00 4.87622452e+00 5.30663395e+00] ... [ 3.41434455e+00 -4.44838762e+00 -8.37961793e-01 ... -1.35705414e+01 -1.29502401e+01 -5.62105179e+00] [ 2.71817970e+00 4.10955071e-01 8.62147331e+00 ... 8.63614845e+00 9.52894926e-01 -8.94039273e-01] [ 6.12084770e+00 6.40909481e+00 9.15737629e+00 ... -4.80494499e-01 -4.58299255e+00 -1.17619066e+01]] [[ 6.11408424e+00 7.34976768e-01 2.40279388e+00 ... 4.52190495e+00 -2.98464894e+00 -4.78220940e+00] [ 3.02329326e+00 -1.41777887e+01 2.78280139e-01 ... -4.86571789e+00 3.18357921e+00 -3.51220298e+00] [-8.97804141e-01 3.90351224e+00 -8.82783532e-01 ... 9.40503597e+00 -2.68777704e+00 8.07703590e+00] ... [-4.66205120e+00 1.48510504e+01 2.63726711e-03 ... 9.13215160e-01 -1.38146925e+01 -1.68424320e+01] [ 5.35052395e+00 -2.72973013e+00 -4.07679462e+00 ... -4.93572426e+00 -3.89710999e+00 -1.56114388e+01] [-3.46429682e+00 2.80249357e-01 5.70867538e+00 ... -1.63659134e+01 8.43848991e+00 -7.02225399e+00]] ... [[-3.85364056e+00 -8.20754910e+00 5.76054215e-01 ... -7.55055141e+00 1.33232965e+01 -7.17947292e+00] [ 3.90266728e+00 1.43233738e+01 1.71980643e+00 ... -1.63343811e+00 -7.37617970e+00 -9.61406231e+00] [-6.68098354e+00 -6.21692467e+00 -5.80232263e-01 ... -8.54709053e+00 4.54018307e+00 4.66650009e+00] ... [-1.12882280e+01 -1.31564608e+01 -2.02768612e+00 ... 9.55560398e+00 9.91134262e+00 -1.45999451e+01] [-6.51472664e+00 8.25972652e+00 -1.45922146e+01 ... -1.73046148e+00 -8.57606220e+00 -2.06932259e+00] [-2.88605595e+00 -1.19941587e+01 6.06150150e+00 ... -6.18402100e+00 3.30057836e+00 -2.98659658e+00]] [[ 4.91979027e+00 -1.17089825e+01 1.16458168e+01 ... 1.01261683e+01 4.53157544e-01 -1.35881815e+01] [ 1.55903053e+00 -8.94303322e+00 1.99185829e+01 ... -4.20298672e+00 -4.05996704e+00 -8.14868641e+00] [-1.37613201e+01 9.60393429e+00 -1.21134770e+00 ... 8.03325653e-01 4.54892826e+00 -4.00045156e+00] ... [-5.06999111e+00 -6.86141586e+00 -2.25558853e+00 ... 5.20710468e+00 1.54659128e+01 1.77019329e+01] [ 3.47098088e+00 3.44268489e+00 -9.19919109e+00 ... 5.27056503e+00 1.00816326e+01 8.51505756e-01] [ 6.30050850e+00 1.64222765e+00 -1.80535831e+01 ... 6.10425568e+00 -1.99860058e+01 -1.54869747e+01]] [[-1.64412391e+00 1.05884695e+01 -1.68047595e+00 ... -5.11689949e+00 -1.23962231e+01 8.11274910e+00] [-6.11601162e+00 1.03314037e+01 -1.14917243e+00 ... 1.08848658e+01 -5.26079655e-01 -4.84093285e+00] [-6.06135368e+00 -5.40136051e+00 -3.41817927e+00 ... 7.21834779e-01 -6.67391396e+00 -1.37531269e+00] ... [-1.40837374e+01 -3.29012084e+00 -3.95190406e+00 ... -1.08906555e+01 -3.33159304e+00 -4.99076080e+00] [ 1.88569117e+00 -1.98736095e+00 -2.24760294e-01 ... -3.42582250e+00 -1.86369076e+01 -1.35842562e-02] [-6.30790055e-01 3.48752522e+00 7.65480900e+00 ... 6.36315465e-01 -8.37685108e-01 7.86370659e+00]]]]]; ov_res: [[[[[ 2.96597552e+00 -1.34911213e+01 -6.64796829e+00 ... 7.89505768e+00 -3.00913620e+00 -1.42246389e+01] [ 5.24178696e+00 4.88969994e+00 -1.58997154e+00 ... -5.22963619e+00 -3.22269535e+00 7.43090534e+00] [ 8.46979904e+00 -1.87672830e+00 -8.65994835e+00 ... -1.58950701e+01 -1.43546572e+01 -1.65703526e+01] ... [-1.55717754e+01 -5.94614863e-01 1.48671794e+00 ... -8.66718769e+00 6.23094845e+00 -2.18162155e+01] [ 3.12003779e+00 -6.71209335e+00 -8.41317844e+00 ... -1.95865309e+00 1.85842094e+01 6.06135368e+00] [-6.85764980e+00 -5.09175634e+00 -3.10130262e+00 ... 3.59194636e+00 -2.15991259e+00 -7.62619686e+00]] [[ 1.14694333e+00 -2.28525829e+00 -7.64160824e+00 ... -6.80464172e+00 -1.51649914e+01 1.85605564e+01] [-1.34467564e+01 3.09551835e+00 2.47306037e+00 ... 6.27754116e+00 -6.10554886e+00 -1.35191832e+01] [ 7.48982310e-01 -9.99927330e+00 -8.62792873e+00 ... 1.49580641e+01 4.00248909e+00 -5.65989113e+00] ... [-1.70539627e+01 6.70950031e+00 -2.48262525e-01 ... 1.18057623e+01 1.05110369e+01 -1.40860510e+00] [ 5.64364910e+00 8.77601624e+00 4.79115009e+00 ... 5.25023270e+00 -2.50178409e+00 3.57299590e+00] [ 2.42263293e+00 1.18041868e+01 1.53278894e+01 ... -6.31583214e+00 3.01167607e+00 6.30015945e+00]] [[-1.32343969e+01 4.88706493e+00 3.19073844e+00 ... -8.11548233e+00 -6.29925823e+00 5.36994362e+00] [ 2.61629915e+00 -1.85572872e+01 -4.54360914e+00 ... 5.27740097e+00 1.26821289e+01 -1.12632236e+01] [-1.22248707e+01 -8.39543438e+00 1.75001278e+01 ... 1.54485636e+01 -1.06499219e+00 2.92308736e+00] ... [-2.93290091e+00 1.42669535e+01 -1.21516008e+01 ... 5.63073635e+00 1.22271967e+00 1.02907205e+00] [-2.32470560e+00 7.14103103e-01 5.46120644e+00 ... -1.11344481e+00 5.95520878e+00 1.08047895e+01] [ 7.82616806e+00 -5.92812443e+00 -1.55323827e+00 ... -1.47634304e+00 -4.02792692e+00 -1.06483231e+01]] ... [[ 1.54261675e+01 9.81429672e+00 1.26281748e+01 ... -1.17284670e+01 -1.23986559e+01 -1.90099945e+01] [-1.27232275e+01 4.01548100e+00 3.87084603e+00 ... 2.00066328e-01 -4.96721935e+00 -6.79762650e+00] [-2.14218521e+01 1.12732248e+01 3.42032433e-01 ... -1.45137680e+00 1.20997257e+01 -8.94103432e+00] ... [-4.28378010e+00 1.49132657e+00 -3.35855317e+00 ... -1.72467136e+00 6.36400032e+00 -1.98988247e+00] [-1.59446783e+01 -2.29406338e+01 -7.63900089e+00 ... 6.84085083e+00 -5.95771122e+00 2.23394084e+00] [ 2.80545449e+00 -2.13331401e-01 1.97765617e+01 ... 3.52137876e+00 -1.33750763e+01 -1.10890827e+01]] [[ 2.71544504e+00 -1.25037498e+01 6.94776535e+00 ... -5.05891848e+00 -1.57247906e+01 -9.01757717e+00] [-1.66721458e+01 -3.51322436e+00 -2.59200211e+01 ... -5.08675098e+00 -1.49664679e+01 4.13318825e+00] [-2.17430439e+01 5.60551071e+00 -1.98418331e+00 ... -6.47668362e+00 -4.76935863e+00 1.39014263e+01] ... [ 1.55794249e+01 -4.86879492e+00 -5.71382427e+00 ... 4.65317917e+00 8.11510849e+00 -1.05713682e+01] [-9.96515179e+00 -5.45604992e+00 3.86567235e+00 ... 2.17606902e+00 7.59959412e+00 1.57842183e+00] [ 8.27290916e+00 3.21687531e+00 -1.64502001e+00 ... -7.54089546e+00 2.45993495e-01 -9.02921677e+00]] [[-6.87817383e+00 1.19080067e+00 8.93087387e-02 ... 1.41846046e+01 1.77029972e+01 -4.19347095e+00] [ 1.58883791e+01 -7.46613121e+00 -1.17918625e+01 ... 4.47189808e+00 -4.47212982e+00 3.52256560e+00] [-1.07699127e+01 2.46945214e+00 -1.23921645e+00 ... 6.86315918e+00 1.09093857e+01 4.34339046e+00] ... [ 3.39035344e+00 -3.90711844e-01 1.66644096e+01 ... -2.46980333e+00 5.00615025e+00 4.57339668e+00] [ 4.45839882e-01 -3.59740317e-01 4.90568733e+00 ... 7.08766460e+00 -7.16035175e+00 -3.43062639e+00] [ 8.75054741e+00 -1.71208382e+01 -2.06201077e+00 ... 3.74757218e+00 2.75221229e+00 1.52125731e+01]]]] [[[[ 8.98554707e+00 -1.29080353e+01 -1.21384535e+01 ... -1.16091328e+01 -5.49988937e+00 -1.61921787e+01] [-1.17920494e+01 -1.96905174e+01 -8.03376102e+00 ... 6.49650335e-01 -1.09724092e+01 1.07447958e+00] [-5.23492455e-01 -6.67154789e+00 2.66130257e+00 ... 8.68104458e-01 -3.95723128e+00 -1.97314053e+01] ... [-3.73042643e-01 9.87303925e+00 5.46636391e+00 ... 3.82055354e+00 6.41585350e+00 1.75820751e+01] [ 1.03055277e+01 1.00428843e+00 -1.54834354e+00 ... 5.83373165e+00 -9.97613788e-01 5.53568244e-01] [ 3.52885127e-01 -4.83370900e-01 -6.12711716e+00 ... -1.38377247e+01 -8.88768673e+00 -9.44777775e+00]] [[ 1.90165567e+00 -1.62790108e+01 1.89307537e+01 ... 6.26186013e-01 -1.04668407e+01 -4.41734695e+00] [-5.09942913e+00 -7.05133533e+00 3.25194716e+00 ... 2.02867985e+00 1.27520037e+00 6.37235641e+00] [-1.05331888e+01 -7.28762722e+00 -2.08782554e-01 ... -4.55509186e+00 4.87622452e+00 5.30663395e+00] ... [ 3.41434455e+00 -4.44838762e+00 -8.37961793e-01 ... -1.35705414e+01 -1.29502401e+01 -5.62105179e+00] [ 2.71817970e+00 4.10955071e-01 8.62147331e+00 ... 8.63614845e+00 9.52894926e-01 -8.94039273e-01] [ 6.12084770e+00 6.40909481e+00 9.15737629e+00 ... -4.80494499e-01 -4.58299255e+00 -1.17619066e+01]] [[ 6.11408424e+00 7.34976768e-01 2.40279388e+00 ... 4.52190495e+00 -2.98464894e+00 -4.78220940e+00] [ 3.02329326e+00 -1.41777887e+01 2.78280139e-01 ... -4.86571789e+00 3.18357921e+00 -3.51220298e+00] [-8.97804141e-01 3.90351224e+00 -8.82783532e-01 ... 9.40503597e+00 -2.68777704e+00 8.07703590e+00] ... [-4.66205120e+00 1.48510504e+01 2.63726711e-03 ... 9.13215160e-01 -1.38146925e+01 -1.68424320e+01] [ 5.35052395e+00 -2.72973013e+00 -4.07679462e+00 ... -4.93572426e+00 -3.89710999e+00 -1.56114388e+01] [-3.46429682e+00 2.80249357e-01 5.70867538e+00 ... -1.63659134e+01 8.43848991e+00 -7.02225399e+00]] ... [[-3.85364056e+00 -8.20754910e+00 5.76054215e-01 ... -7.55055141e+00 1.33232965e+01 -7.17947292e+00] [ 3.90266728e+00 1.43233738e+01 1.71980643e+00 ... -1.63343811e+00 -7.37617970e+00 -9.61406231e+00] [-6.68098354e+00 -6.21692467e+00 -5.80232263e-01 ... -8.54709053e+00 4.54018307e+00 4.66650009e+00] ... [-1.12882280e+01 -1.31564608e+01 -2.02768612e+00 ... 9.55560398e+00 9.91134262e+00 -1.45999451e+01] [-6.51472664e+00 8.25972652e+00 -1.45922146e+01 ... -1.73046148e+00 -8.57606220e+00 -2.06932259e+00] [-2.88605595e+00 -1.19941587e+01 6.06150150e+00 ... -6.18402100e+00 3.30057836e+00 -2.98659658e+00]] [[ 4.91979027e+00 -1.17089825e+01 1.16458168e+01 ... 1.01261683e+01 4.53157544e-01 -1.35881815e+01] [ 1.55903053e+00 -8.94303322e+00 1.99185829e+01 ... -4.20298672e+00 -4.05996704e+00 -8.14868641e+00] [-1.37613201e+01 9.60393429e+00 -1.21134770e+00 ... 8.03325653e-01 4.54892826e+00 -4.00045156e+00] ... [-5.06999111e+00 -6.86141586e+00 -2.25558853e+00 ... 5.20710468e+00 1.54659128e+01 1.77019329e+01] [ 3.47098088e+00 3.44268489e+00 -9.19919109e+00 ... 5.27056503e+00 1.00816326e+01 8.51505756e-01] [ 6.30050850e+00 1.64222765e+00 -1.80535831e+01 ... 6.10425568e+00 -1.99860058e+01 -1.54869747e+01]] [[-1.64412391e+00 1.05884695e+01 -1.68047595e+00 ... -5.11689949e+00 -1.23962231e+01 8.11274910e+00] [-6.11601162e+00 1.03314037e+01 -1.14917243e+00 ... 1.08848658e+01 -5.26079655e-01 -4.84093285e+00] [-6.06135368e+00 -5.40136051e+00 -3.41817927e+00 ... 7.21834779e-01 -6.67391396e+00 -1.37531269e+00] ... [-1.40837374e+01 -3.29012084e+00 -3.95190406e+00 ... -1.08906555e+01 -3.33159304e+00 -4.99076080e+00] [ 1.88569117e+00 -1.98736095e+00 -2.24760294e-01 ... -3.42582250e+00 -1.86369076e+01 -1.35842562e-02] [-6.30790055e-01 3.48752522e+00 7.65480900e+00 ... 6.36315465e-01 -8.37685108e-01 7.86370659e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 0], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_482.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.419171}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ -9.528698 -2.555324 -7.6471767 ... 5.8677073 5.387681 -2.0001519 ] [ -9.539812 2.8994346 -0.80499285 ... -9.027095 1.3518858 6.8553424 ] [ 5.1398273 -0.15500617 0.46160638 ... 1.1395605 -4.9965754 -6.3468227 ] ... [ -9.3227215 -20.763987 9.722861 ... -18.239233 0.7920957 9.860076 ] [ -3.5352325 19.909473 -11.258497 ... 9.690528 -0.6382181 9.262594 ] [ 1.8818979 -10.580314 -10.25158 ... -0.28209156 -1.2541225 -0.02590516]] [[ 3.4496987 5.9781775 -4.1797743 ... -3.6212337 -1.9772174 -3.842769 ] [ -8.447618 2.8287137 4.581065 ... 10.509782 11.512779 3.9437373 ] [ -4.288438 17.121187 0.9088172 ... 7.6645694 -8.434411 -3.628956 ] ... [ -4.79343 -7.211401 -1.3723547 ... 8.529506 -16.617668 -5.408733 ] [ -6.761781 6.1144466 -4.6186647 ... -0.02772146 12.837155 10.0802355 ] [ -2.391752 -1.6667671 -9.439676 ... -5.5733957 6.2668424 0.46579155]] [[ -3.9506161 12.486324 -1.9800348 ... -8.487075 11.808765 -12.194261 ] [-17.54395 6.1827765 -0.22532892 ... -8.889619 0.08709106 6.6881685 ] [ 17.165314 8.693614 0.213115 ... 9.976495 -17.506462 -3.9513147 ] ... [ -6.234418 -11.61825 -6.761995 ... -6.2838597 -5.6952386 -0.41607934] [ 8.563009 4.053449 9.965148 ... -1.3382413 5.8912163 -1.0888891 ] [ -4.3995743 -0.7230262 0.47513962 ... -2.7063231 -0.2601223 5.0136304 ]] ... [[ -9.661775 3.1396236 4.995597 ... -13.527454 8.178085 -4.4498305 ] [ 1.191823 -11.696738 15.90655 ... 8.465792 -2.9504108 -9.746221 ] [ -0.61168367 -6.515396 0.24686536 ... -3.750508 14.5644655 5.0990705 ] ... [ -6.9324617 -0.688948 25.186935 ... 3.894672 18.995344 13.99727 ] [ 10.938331 -17.233435 5.187125 ... -19.010069 7.1145062 3.0251591 ] [ -1.1255355 -7.670621 -2.279551 ... -4.1082187 -0.5873758 3.129266 ]] [[-11.299405 -3.0954397 6.2879944 ... 4.8114324 0.20532817 -3.223477 ] [ -9.421142 7.6169014 13.673935 ... -11.614825 2.841317 -4.097108 ] [ -8.398663 -12.749349 -5.87342 ... 16.62084 10.292412 -1.7923677 ] ... [ 7.8430505 -6.5017447 17.749687 ... -0.48094726 14.10597 -1.9517274 ] [ -6.789806 -0.47130942 -3.0857773 ... 3.384704 -6.3829827 -9.415233 ] [ -1.6573098 2.3115077 7.799008 ... -5.10023 8.651508 -10.694747 ]] [[ 5.532642 -3.0808196 -14.037323 ... -6.870268 0.55651665 3.8606088 ] [ 13.550843 2.9389956 2.6005454 ... 14.925025 11.42823 0.9693749 ] [ -6.371602 -2.319807 -5.290259 ... 11.994647 -12.761897 -4.3049464 ] ... [ -1.9432704 13.398377 8.795352 ... 0.1060012 -1.4901738 0.15289012] [ -8.935973 12.37555 -1.9993377 ... 9.19481 2.2350957 -1.7972062 ] [-11.187413 -0.2966913 -8.525449 ... 0.14070782 -14.058341 -1.5719395 ]]]] [[[[ 9.015579 -4.886725 -6.4041996 ... -3.4244192 10.903639 -8.558843 ] [ 10.730622 10.307397 1.4053521 ... -3.7137377 -2.902156 2.737737 ] [ -5.2921124 -0.4934157 -0.87965494 ... -4.391927 1.1631347 6.8982863 ] ... [ 4.4284425 12.503522 -11.7509 ... 5.5510864 -1.1181507 5.0644803 ] [ 0.561036 -3.2135115 0.6750319 ... -0.7559028 -2.015856 10.124668 ] [ -0.92640597 -6.3525863 4.915766 ... 1.1427453 -2.5146863 -1.737432 ]] [[-17.482798 -1.1529796 13.981005 ... 9.673893 9.916814 10.487277 ] [ 17.86904 4.9615235 -12.62971 ... -13.784655 3.8201473 -0.4037881 ] [ 0.14724034 -6.861382 11.959442 ... 14.892656 13.879806 5.899999 ] ... [ -2.110484 -1.3426161 -0.58903426 ... -7.8722134 5.427201 3.292914 ] [ 18.788424 -12.495233 -0.93474466 ... -4.209559 5.6271987 -14.885453 ] [ -4.8049235 0.44920903 -11.857381 ... -2.6614132 -10.421802 6.331762 ]] [[-10.063566 14.349298 -2.3051393 ... 6.474868 -5.3965034 -5.399579 ] [ 0.628256 -3.948714 3.9904623 ... -16.670801 6.3419495 5.1979103 ] [ 9.328081 0.43488473 9.494585 ... 0.8901402 -4.6548753 10.429258 ] ... [-12.287138 4.1376786 -15.592942 ... 1.1236689 -5.0826135 -8.702324 ] [ 11.853538 3.5478227 6.628532 ... 20.369816 -0.82270545 9.338424 ] [ 11.036289 -7.4634995 3.0183089 ... -0.08906519 1.0014162 5.8320584 ]] ... [[-10.271195 -2.437121 7.004829 ... 6.99483 2.2593007 0.12073022] [ 8.180264 12.601206 1.1090848 ... -6.84056 -2.2354763 6.2388005 ] [ 12.993026 -9.134995 -2.448499 ... 9.5729065 0.21946405 12.27451 ] ... [ 22.959713 6.063479 -7.85697 ... 15.622206 -5.907619 10.86564 ] [ 7.393219 -4.853102 -2.5623894 ... 0.90681714 5.163278 -11.8567705 ] [ -1.9709723 0.30220354 -4.290241 ... 1.2603552 9.458795 -3.3804948 ]] [[ -5.5969324 6.048704 7.108727 ... 0.8884067 0.44211382 -8.305478 ] [-12.112397 -11.353975 4.4103317 ... 11.066498 -6.672717 7.1489325 ] [ -5.034954 -5.0552154 -3.9305537 ... 5.467714 6.5886316 -10.8899 ] ... [ 11.286068 0.21386798 -2.7346725 ... 4.5108137 12.588215 -8.569898 ] [ 2.6513815 -3.6697037 -4.752642 ... -4.6323457 -6.8923144 -4.322252 ] [ 0.8426337 8.103249 11.086383 ... 6.297139 7.04153 3.3834562 ]] [[ 4.52114 1.7510676 -6.8738933 ... 2.3686886 1.1880243 -1.8923404 ] [-14.3813715 -0.8246239 11.498185 ... -12.685824 6.6888824 7.224985 ] [ 1.1458184 6.3254323 -13.765006 ... 13.2946825 -13.217347 0.5481083 ] ... [ -6.2595468 -11.084208 -0.95973545 ... -4.3124714 0.11058992 7.9859962 ] [ 18.703342 14.299738 -1.4063091 ... 1.1915891 -3.8622873 -10.408656 ] [ 10.149696 -3.5772834 2.8641138 ... -7.7170706 11.060318 9.608085 ]]]]]; ov_res: [[[[[ -9.528698 -2.555324 -7.6471767 ... 5.8677073 5.387681 -2.0001519 ] [ -9.539812 2.8994346 -0.80499285 ... -9.027095 1.3518858 6.8553424 ] [ 5.1398273 -0.15500617 0.46160638 ... 1.1395605 -4.9965754 -6.3468227 ] ... [ -9.3227215 -20.763987 9.722861 ... -18.239233 0.7920957 9.860076 ] [ -3.5352325 19.909473 -11.258497 ... 9.690528 -0.6382181 9.262594 ] [ 1.8818979 -10.580314 -10.25158 ... -0.28209156 -1.2541225 -0.02590516]] [[ 3.4496987 5.9781775 -4.1797743 ... -3.6212337 -1.9772174 -3.842769 ] [ -8.447618 2.8287137 4.581065 ... 10.509782 11.512779 3.9437373 ] [ -4.288438 17.121187 0.9088172 ... 7.6645694 -8.434411 -3.628956 ] ... [ -4.79343 -7.211401 -1.3723547 ... 8.529506 -16.617668 -5.408733 ] [ -6.761781 6.1144466 -4.6186647 ... -0.02772146 12.837155 10.0802355 ] [ -2.391752 -1.6667671 -9.439676 ... -5.5733957 6.2668424 0.46579155]] [[ -3.9506161 12.486324 -1.9800348 ... -8.487075 11.808765 -12.194261 ] [-17.54395 6.1827765 -0.22532892 ... -8.889619 0.08709106 6.6881685 ] [ 17.165314 8.693614 0.213115 ... 9.976495 -17.506462 -3.9513147 ] ... [ -6.234418 -11.61825 -6.761995 ... -6.2838597 -5.6952386 -0.41607934] [ 8.563009 4.053449 9.965148 ... -1.3382413 5.8912163 -1.0888891 ] [ -4.3995743 -0.7230262 0.47513962 ... -2.7063231 -0.2601223 5.0136304 ]] ... [[ -9.661775 3.1396236 4.995597 ... -13.527454 8.178085 -4.4498305 ] [ 1.191823 -11.696738 15.90655 ... 8.465792 -2.9504108 -9.746221 ] [ -0.61168367 -6.515396 0.24686536 ... -3.750508 14.5644655 5.0990705 ] ... [ -6.9324617 -0.688948 25.186935 ... 3.894672 18.995344 13.99727 ] [ 10.938331 -17.233435 5.187125 ... -19.010069 7.1145062 3.0251591 ] [ -1.1255355 -7.670621 -2.279551 ... -4.1082187 -0.5873758 3.129266 ]] [[-11.299405 -3.0954397 6.2879944 ... 4.8114324 0.20532817 -3.223477 ] [ -9.421142 7.6169014 13.673935 ... -11.614825 2.841317 -4.097108 ] [ -8.398663 -12.749349 -5.87342 ... 16.62084 10.292412 -1.7923677 ] ... [ 7.8430505 -6.5017447 17.749687 ... -0.48094726 14.10597 -1.9517274 ] [ -6.789806 -0.47130942 -3.0857773 ... 3.384704 -6.3829827 -9.415233 ] [ -1.6573098 2.3115077 7.799008 ... -5.10023 8.651508 -10.694747 ]] [[ 5.532642 -3.0808196 -14.037323 ... -6.870268 0.55651665 3.8606088 ] [ 13.550843 2.9389956 2.6005454 ... 14.925025 11.42823 0.9693749 ] [ -6.371602 -2.319807 -5.290259 ... 11.994647 -12.761897 -4.3049464 ] ... [ -1.9432704 13.398377 8.795352 ... 0.1060012 -1.4901738 0.15289012] [ -8.935973 12.37555 -1.9993377 ... 9.19481 2.2350957 -1.7972062 ] [-11.187413 -0.2966913 -8.525449 ... 0.14070782 -14.058341 -1.5719395 ]]]] [[[[ 9.015579 -4.886725 -6.4041996 ... -3.4244192 10.903639 -8.558843 ] [ 10.730622 10.307397 1.4053521 ... -3.7137377 -2.902156 2.737737 ] [ -5.2921124 -0.4934157 -0.87965494 ... -4.391927 1.1631347 6.8982863 ] ... [ 4.4284425 12.503522 -11.7509 ... 5.5510864 -1.1181507 5.0644803 ] [ 0.561036 -3.2135115 0.6750319 ... -0.7559028 -2.015856 10.124668 ] [ -0.92640597 -6.3525863 4.915766 ... 1.1427453 -2.5146863 -1.737432 ]] [[-17.482798 -1.1529796 13.981005 ... 9.673893 9.916814 10.487277 ] [ 17.86904 4.9615235 -12.62971 ... -13.784655 3.8201473 -0.4037881 ] [ 0.14724034 -6.861382 11.959442 ... 14.892656 13.879806 5.899999 ] ... [ -2.110484 -1.3426161 -0.58903426 ... -7.8722134 5.427201 3.292914 ] [ 18.788424 -12.495233 -0.93474466 ... -4.209559 5.6271987 -14.885453 ] [ -4.8049235 0.44920903 -11.857381 ... -2.6614132 -10.421802 6.331762 ]] [[-10.063566 14.349298 -2.3051393 ... 6.474868 -5.3965034 -5.399579 ] [ 0.628256 -3.948714 3.9904623 ... -16.670801 6.3419495 5.1979103 ] [ 9.328081 0.43488473 9.494585 ... 0.8901402 -4.6548753 10.429258 ] ... [-12.287138 4.1376786 -15.592942 ... 1.1236689 -5.0826135 -8.702324 ] [ 11.853538 3.5478227 6.628532 ... 20.369816 -0.82270545 9.338424 ] [ 11.036289 -7.4634995 3.0183089 ... -0.08906519 1.0014162 5.8320584 ]] ... [[-10.271195 -2.437121 7.004829 ... 6.99483 2.2593007 0.12073022] [ 8.180264 12.601206 1.1090848 ... -6.84056 -2.2354763 6.2388005 ] [ 12.993026 -9.134995 -2.448499 ... 9.5729065 0.21946405 12.27451 ] ... [ 22.959713 6.063479 -7.85697 ... 15.622206 -5.907619 10.86564 ] [ 7.393219 -4.853102 -2.5623894 ... 0.90681714 5.163278 -11.8567705 ] [ -1.9709723 0.30220354 -4.290241 ... 1.2603552 9.458795 -3.3804948 ]] [[ -5.5969324 6.048704 7.108727 ... 0.8884067 0.44211382 -8.305478 ] [-12.112397 -11.353975 4.4103317 ... 11.066498 -6.672717 7.1489325 ] [ -5.034954 -5.0552154 -3.9305537 ... 5.467714 6.5886316 -10.8899 ] ... [ 11.286068 0.21386798 -2.7346725 ... 4.5108137 12.588215 -8.569898 ] [ 2.6513815 -3.6697037 -4.752642 ... -4.6323457 -6.8923144 -4.322252 ] [ 0.8426337 8.103249 11.086383 ... 6.297139 7.04153 3.3834562 ]] [[ 4.52114 1.7510676 -6.8738933 ... 2.3686886 1.1880243 -1.8923404 ] [-14.3813715 -0.8246239 11.498185 ... -12.685824 6.6888824 7.224985 ] [ 1.1458184 6.3254323 -13.765006 ... 13.2946825 -13.217347 0.5481083 ] ... [ -6.2595468 -11.084208 -0.95973545 ... -4.3124714 0.11058992 7.9859962 ] [ 18.703342 14.299738 -1.4063091 ... 1.1915891 -3.8622873 -10.408656 ] [ 10.149696 -3.5772834 2.8641138 ... -7.7170706 11.060318 9.608085 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 0], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_484.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.13724}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 6.36987329e-01 -9.63720036e+00 -1.72769470e+01 ... 8.76874447e+00 2.21196342e+00 -5.87981319e+00] [ 6.92101192e+00 5.58502769e+00 1.48372154e+01 ... 6.65381336e+00 -2.88087702e+00 5.69698215e-01] [ 7.40840077e-01 -5.44098377e+00 -1.01874914e+01 ... -6.66202545e+00 9.47279549e+00 4.00552750e-02] ... [ 1.86753273e+00 -6.92297173e+00 -1.74479997e+00 ... -1.22539291e+01 -1.05165329e+01 1.90316620e+01] [-4.68631554e+00 2.05202007e+00 4.30543900e+00 ... 1.61573467e+01 1.47144175e+01 -1.07411356e+01] [-7.84886646e+00 -2.43137002e+00 -2.26864576e+00 ... -9.36744118e+00 3.26972485e-01 -2.39822865e-02]] [[ 5.52635098e+00 6.64638615e+00 4.40367985e+00 ... -2.90748239e+00 6.10836983e+00 -2.07888374e+01] [-1.65177178e+00 2.22679377e-01 -1.06870260e+01 ... -1.33575153e+01 -1.13385630e+00 2.84704685e+00] [-3.99806142e-01 -4.90352631e+00 1.19051762e+01 ... -7.55908585e+00 5.30933571e+00 2.15531063e+00] ... [-2.01849041e+01 3.15238404e+00 1.08798962e+01 ... -2.07362328e+01 1.09502792e+01 -4.61400032e+00] [-5.56686020e+00 4.34499645e+00 -1.39037123e+01 ... -1.38047552e+01 -6.48833275e+00 5.82092881e-01] [-2.30773945e+01 7.67267036e+00 7.41273594e+00 ... -6.25929832e+00 1.41514564e+00 -3.89577103e+00]] [[-1.42354107e+01 6.87237835e+00 -5.29343796e+00 ... -9.89238071e+00 9.05075836e+00 1.22259235e+01] [-5.35652447e+00 1.06867480e+00 3.75649476e+00 ... 1.03990278e+01 -3.28525543e-01 -9.46433163e+00] [-1.92604179e+01 4.58910561e+00 -2.01841335e+01 ... -1.66213856e+01 1.90525131e+01 1.57200336e+00] ... [-3.37719774e+00 1.95613265e-01 -2.02569604e+00 ... -5.04683685e+00 -1.24939566e+01 1.45993195e+01] [-1.36541119e+01 -1.12943964e+01 4.49185276e+00 ... -1.28948078e+01 -1.11444407e+01 5.28691864e+00] [-8.34688568e+00 -6.44304371e+00 -1.16742487e+01 ... -5.89496040e+00 -5.72715104e-01 -2.01135445e+01]] ... [[ 1.55699515e+00 -7.73873425e+00 1.60667191e+01 ... -8.74170780e+00 -1.39645853e+01 2.87543178e+00] [ 1.58493233e+01 1.02199860e+01 -7.27771759e+00 ... 1.20704355e+01 -6.03408527e+00 -9.20467377e+00] [-3.22145820e+00 -2.05317402e+01 1.72037315e+00 ... -7.94764805e+00 -5.82000923e+00 -7.73750496e+00] ... [ 2.60912943e+00 1.57761459e+01 6.64312840e-01 ... 5.86529350e+00 7.17042732e+00 6.09228611e-01] [-1.78389206e+01 -8.34425831e+00 1.91715660e+01 ... -1.83780766e+01 -9.82445061e-01 4.04783154e+00] [ 1.19980717e+01 -1.99336605e+01 1.79182949e+01 ... 8.28047085e+00 -5.04533577e+00 -9.38998127e+00]] [[ 3.82439303e+00 2.45518184e+00 -1.15608063e+01 ... -1.53764505e+01 -7.01271057e+00 -1.82768011e+00] [ 1.57001247e+01 9.34143782e-01 -3.33619404e+00 ... -1.12191839e+01 2.62659025e+00 -1.90391998e+01] [-2.25231342e+01 7.96727371e+00 -9.82120132e+00 ... -1.07366886e+01 2.22433376e+00 -2.44966221e+00] ... [ 7.42104435e+00 1.56905389e+00 -5.81150818e+00 ... -1.20475769e+01 1.39579897e+01 4.35276699e+00] [-8.78970146e+00 -1.05207243e+01 6.59881306e+00 ... -1.89143920e+00 -9.12171364e+00 3.20756650e+00] [ 3.69616771e+00 2.89098859e+00 -1.00801229e+01 ... 1.09157639e+01 -2.19421978e+01 4.73326683e+00]] [[-1.33327522e+01 -4.00812435e+00 1.05907607e+00 ... 4.95383739e-02 -9.65213418e-01 2.74451661e+00] [-6.84307861e+00 8.19391823e+00 4.84595108e+00 ... -4.75810146e+00 2.97196746e+00 -4.14968967e+00] [ 5.91181374e+00 5.23651409e+00 -3.38240623e+00 ... 6.63250732e+00 5.19824791e+00 3.78462815e+00] ... [-1.29464388e+01 -3.87386441e+00 3.00513911e+00 ... 3.33404064e-01 4.19768214e-01 -7.44332409e+00] [ 4.85575104e+00 4.22597218e+00 5.86181068e+00 ... 2.53685951e+00 -3.14018512e+00 5.34973621e+00] [ 2.61069918e+00 -9.08187962e+00 2.70950794e-01 ... -2.18689823e+00 8.06623340e-01 -8.02668858e+00]]]] [[[[ 2.08955135e+01 -7.01896381e+00 9.78680134e+00 ... 5.03035641e+00 6.16316319e+00 2.79714704e+00] [ 2.97484422e+00 -2.41261959e+00 -1.06731260e+00 ... 1.28394766e+01 -8.05323792e+00 1.06167002e+01] [-1.18962383e+01 8.92070389e+00 -5.35881138e+00 ... -5.74976063e+00 2.31813431e-01 -6.61215878e+00] ... [ 1.11451683e+01 -3.26365685e+00 -1.68825698e+00 ... -1.43176613e+01 1.93107452e+01 1.17975245e+01] [ 9.41888618e+00 1.20693302e+01 -8.52012539e+00 ... -1.15868483e+01 2.71431899e+00 -6.30278778e+00] [ 1.14130402e+01 -6.31439495e+00 -4.27362919e+00 ... 3.81188655e+00 -5.42225266e+00 -2.20274985e-01]] [[ 9.92437077e+00 1.29322338e+00 -1.72744503e+01 ... 1.04667139e+01 3.00241256e+00 -3.26838970e+00] [-7.89690208e+00 6.86426926e+00 -8.29679108e+00 ... -1.04728098e+01 1.43100805e+01 -1.35677099e+01] [ 7.77309036e+00 -7.07226849e+00 5.15861893e+00 ... -1.51429567e+01 8.62167358e+00 -8.45134258e+00] ... [-7.79534626e+00 7.33498287e+00 -4.28459167e+00 ... 7.99578381e+00 1.85762906e+00 5.37761211e+00] [-1.85397983e+00 -1.59234247e+01 3.23450203e+01 ... -4.67690992e+00 -5.94058037e+00 -1.16308975e+01] [ 6.59951210e+00 -1.26280127e+01 2.58507562e+00 ... -1.06494007e+01 1.85959759e+01 -8.11875153e+00]] [[ 1.00203867e+01 -2.56382871e+00 -3.53608632e+00 ... -1.74160061e+01 -8.47157478e+00 -2.77401028e+01] [-8.49166679e+00 -1.79458237e+01 1.09255781e+01 ... 1.32749748e+01 -1.22948036e+01 1.26736650e+01] [-8.11749744e+00 2.35752082e+00 -7.59867096e+00 ... 5.44181633e+00 -1.02610350e+01 6.95450783e+00] ... [ 2.45824318e+01 -1.11780329e+01 -1.10214176e+01 ... 3.70044351e+00 1.60119724e+01 -2.24320221e+00] [-6.25994492e+00 2.44601846e-01 -1.46693945e+01 ... -1.38501940e+01 2.82327104e+00 2.90214896e-01] [ 6.90583801e+00 -7.25193787e+00 -2.56289806e+01 ... 1.69198537e+00 8.78812504e+00 -1.44057684e+01]] ... [[-1.46888666e+01 1.07072496e+01 -8.99184132e+00 ... 1.55034790e+01 -3.29027843e+00 -5.58668613e+00] [ 2.16249561e+00 -4.22288179e+00 5.61390114e+00 ... -1.63564434e+01 3.50905633e+00 7.57599163e+00] [ 1.36884375e+01 -1.13388357e+01 -3.01998043e+01 ... -1.59174376e+01 -4.21417141e+00 -2.04151478e+01] ... [ 1.54251409e+00 -6.78816319e+00 1.14698544e+01 ... 1.59947300e+01 1.20748243e+01 -1.01965179e+01] [-1.40740550e+00 1.00460796e+01 -5.73567104e+00 ... -8.10292530e+00 -1.57453117e+01 3.45990252e+00] [-1.53760386e+01 1.08145261e+00 -1.24568768e+01 ... -1.01910150e+00 -1.45814466e+01 1.22492838e+01]] [[-4.93482733e+00 6.45053482e+00 3.03507257e+00 ... 1.51207104e+01 2.00714016e+00 -6.05027485e+00] [-9.33901119e+00 -3.68817866e-01 -2.28009915e+00 ... 1.52289467e+01 -8.55879211e+00 -9.80287552e+00] [-9.52582359e-01 1.08188934e+01 -7.89834404e+00 ... 1.47447834e+01 -1.63198109e+01 1.88939247e+01] ... [-9.59630871e+00 4.66305447e+00 -1.39644794e+01 ... -8.62683296e+00 -4.34478223e-01 -1.64398422e+01] [ 6.71227551e+00 2.78667760e+00 -1.05876722e+01 ... 1.83141327e+00 -3.22428226e+00 -1.40486631e+01] [-4.10425186e+00 3.71657395e+00 -4.74989748e+00 ... 2.08243084e+01 4.74063015e+00 9.88295746e+00]] [[-3.02015352e+00 -1.10758328e+00 -7.77812767e+00 ... 1.27497077e-01 -6.31881833e-01 6.55691624e+00] [ 9.15869808e+00 5.17654896e+00 7.67802811e+00 ... 3.27505088e+00 1.18435407e+00 -8.86927032e+00] [ 1.73948002e+00 9.17289925e+00 -3.24899983e+00 ... 1.43171663e+01 -7.30764198e+00 -1.32478752e+01] ... [-6.57772601e-01 1.55424237e-01 4.86723232e+00 ... -4.20766687e+00 3.34734607e+00 2.88939786e+00] [ 1.17307043e+00 -1.30163848e+00 3.21180129e+00 ... 5.38013554e+00 1.28852882e+01 -4.77139282e+00] [-4.30945253e+00 7.14667511e+00 -5.61293697e+00 ... 8.34920788e+00 -4.03179836e+00 -6.36091518e+00]]]]]; ov_res: [[[[[ 6.36987329e-01 -9.63720036e+00 -1.72769470e+01 ... 8.76874447e+00 2.21196342e+00 -5.87981319e+00] [ 6.92101192e+00 5.58502769e+00 1.48372154e+01 ... 6.65381336e+00 -2.88087702e+00 5.69698215e-01] [ 7.40840077e-01 -5.44098377e+00 -1.01874914e+01 ... -6.66202545e+00 9.47279549e+00 4.00552750e-02] ... [ 1.86753273e+00 -6.92297173e+00 -1.74479997e+00 ... -1.22539291e+01 -1.05165329e+01 1.90316620e+01] [-4.68631554e+00 2.05202007e+00 4.30543900e+00 ... 1.61573467e+01 1.47144175e+01 -1.07411356e+01] [-7.84886646e+00 -2.43137002e+00 -2.26864576e+00 ... -9.36744118e+00 3.26972485e-01 -2.39822865e-02]] [[ 5.52635098e+00 6.64638615e+00 4.40367985e+00 ... -2.90748239e+00 6.10836983e+00 -2.07888374e+01] [-1.65177178e+00 2.22679377e-01 -1.06870260e+01 ... -1.33575153e+01 -1.13385630e+00 2.84704685e+00] [-3.99806142e-01 -4.90352631e+00 1.19051762e+01 ... -7.55908585e+00 5.30933571e+00 2.15531063e+00] ... [-2.01849041e+01 3.15238404e+00 1.08798962e+01 ... -2.07362328e+01 1.09502792e+01 -4.61400032e+00] [-5.56686020e+00 4.34499645e+00 -1.39037123e+01 ... -1.38047552e+01 -6.48833275e+00 5.82092881e-01] [-2.30773945e+01 7.67267036e+00 7.41273594e+00 ... -6.25929832e+00 1.41514564e+00 -3.89577103e+00]] [[-1.42354107e+01 6.87237835e+00 -5.29343796e+00 ... -9.89238071e+00 9.05075836e+00 1.22259235e+01] [-5.35652447e+00 1.06867480e+00 3.75649476e+00 ... 1.03990278e+01 -3.28525543e-01 -9.46433163e+00] [-1.92604179e+01 4.58910561e+00 -2.01841335e+01 ... -1.66213856e+01 1.90525131e+01 1.57200336e+00] ... [-3.37719774e+00 1.95613265e-01 -2.02569604e+00 ... -5.04683685e+00 -1.24939566e+01 1.45993195e+01] [-1.36541119e+01 -1.12943964e+01 4.49185276e+00 ... -1.28948078e+01 -1.11444407e+01 5.28691864e+00] [-8.34688568e+00 -6.44304371e+00 -1.16742487e+01 ... -5.89496040e+00 -5.72715104e-01 -2.01135445e+01]] ... [[ 1.55699515e+00 -7.73873425e+00 1.60667191e+01 ... -8.74170780e+00 -1.39645853e+01 2.87543178e+00] [ 1.58493233e+01 1.02199860e+01 -7.27771759e+00 ... 1.20704355e+01 -6.03408527e+00 -9.20467377e+00] [-3.22145820e+00 -2.05317402e+01 1.72037315e+00 ... -7.94764805e+00 -5.82000923e+00 -7.73750496e+00] ... [ 2.60912943e+00 1.57761459e+01 6.64312840e-01 ... 5.86529350e+00 7.17042732e+00 6.09228611e-01] [-1.78389206e+01 -8.34425831e+00 1.91715660e+01 ... -1.83780766e+01 -9.82445061e-01 4.04783154e+00] [ 1.19980717e+01 -1.99336605e+01 1.79182949e+01 ... 8.28047085e+00 -5.04533577e+00 -9.38998127e+00]] [[ 3.82439303e+00 2.45518184e+00 -1.15608063e+01 ... -1.53764505e+01 -7.01271057e+00 -1.82768011e+00] [ 1.57001247e+01 9.34143782e-01 -3.33619404e+00 ... -1.12191839e+01 2.62659025e+00 -1.90391998e+01] [-2.25231342e+01 7.96727371e+00 -9.82120132e+00 ... -1.07366886e+01 2.22433376e+00 -2.44966221e+00] ... [ 7.42104435e+00 1.56905389e+00 -5.81150818e+00 ... -1.20475769e+01 1.39579897e+01 4.35276699e+00] [-8.78970146e+00 -1.05207243e+01 6.59881306e+00 ... -1.89143920e+00 -9.12171364e+00 3.20756650e+00] [ 3.69616771e+00 2.89098859e+00 -1.00801229e+01 ... 1.09157639e+01 -2.19421978e+01 4.73326683e+00]] [[-1.33327522e+01 -4.00812435e+00 1.05907607e+00 ... 4.95383739e-02 -9.65213418e-01 2.74451661e+00] [-6.84307861e+00 8.19391823e+00 4.84595108e+00 ... -4.75810146e+00 2.97196746e+00 -4.14968967e+00] [ 5.91181374e+00 5.23651409e+00 -3.38240623e+00 ... 6.63250732e+00 5.19824791e+00 3.78462815e+00] ... [-1.29464388e+01 -3.87386441e+00 3.00513911e+00 ... 3.33404064e-01 4.19768214e-01 -7.44332409e+00] [ 4.85575104e+00 4.22597218e+00 5.86181068e+00 ... 2.53685951e+00 -3.14018512e+00 5.34973621e+00] [ 2.61069918e+00 -9.08187962e+00 2.70950794e-01 ... -2.18689823e+00 8.06623340e-01 -8.02668858e+00]]]] [[[[ 2.08955135e+01 -7.01896381e+00 9.78680134e+00 ... 5.03035641e+00 6.16316319e+00 2.79714704e+00] [ 2.97484422e+00 -2.41261959e+00 -1.06731260e+00 ... 1.28394766e+01 -8.05323792e+00 1.06167002e+01] [-1.18962383e+01 8.92070389e+00 -5.35881138e+00 ... -5.74976063e+00 2.31813431e-01 -6.61215878e+00] ... [ 1.11451683e+01 -3.26365685e+00 -1.68825698e+00 ... -1.43176613e+01 1.93107452e+01 1.17975245e+01] [ 9.41888618e+00 1.20693302e+01 -8.52012539e+00 ... -1.15868483e+01 2.71431899e+00 -6.30278778e+00] [ 1.14130402e+01 -6.31439495e+00 -4.27362919e+00 ... 3.81188655e+00 -5.42225266e+00 -2.20274985e-01]] [[ 9.92437077e+00 1.29322338e+00 -1.72744503e+01 ... 1.04667139e+01 3.00241256e+00 -3.26838970e+00] [-7.89690208e+00 6.86426926e+00 -8.29679108e+00 ... -1.04728098e+01 1.43100805e+01 -1.35677099e+01] [ 7.77309036e+00 -7.07226849e+00 5.15861893e+00 ... -1.51429567e+01 8.62167358e+00 -8.45134258e+00] ... [-7.79534626e+00 7.33498287e+00 -4.28459167e+00 ... 7.99578381e+00 1.85762906e+00 5.37761211e+00] [-1.85397983e+00 -1.59234247e+01 3.23450203e+01 ... -4.67690992e+00 -5.94058037e+00 -1.16308975e+01] [ 6.59951210e+00 -1.26280127e+01 2.58507562e+00 ... -1.06494007e+01 1.85959759e+01 -8.11875153e+00]] [[ 1.00203867e+01 -2.56382871e+00 -3.53608632e+00 ... -1.74160061e+01 -8.47157478e+00 -2.77401028e+01] [-8.49166679e+00 -1.79458237e+01 1.09255781e+01 ... 1.32749748e+01 -1.22948036e+01 1.26736650e+01] [-8.11749744e+00 2.35752082e+00 -7.59867096e+00 ... 5.44181633e+00 -1.02610350e+01 6.95450783e+00] ... [ 2.45824318e+01 -1.11780329e+01 -1.10214176e+01 ... 3.70044351e+00 1.60119724e+01 -2.24320221e+00] [-6.25994492e+00 2.44601846e-01 -1.46693945e+01 ... -1.38501940e+01 2.82327104e+00 2.90214896e-01] [ 6.90583801e+00 -7.25193787e+00 -2.56289806e+01 ... 1.69198537e+00 8.78812504e+00 -1.44057684e+01]] ... [[-1.46888666e+01 1.07072496e+01 -8.99184132e+00 ... 1.55034790e+01 -3.29027843e+00 -5.58668613e+00] [ 2.16249561e+00 -4.22288179e+00 5.61390114e+00 ... -1.63564434e+01 3.50905633e+00 7.57599163e+00] [ 1.36884375e+01 -1.13388357e+01 -3.01998043e+01 ... -1.59174376e+01 -4.21417141e+00 -2.04151478e+01] ... [ 1.54251409e+00 -6.78816319e+00 1.14698544e+01 ... 1.59947300e+01 1.20748243e+01 -1.01965179e+01] [-1.40740550e+00 1.00460796e+01 -5.73567104e+00 ... -8.10292530e+00 -1.57453117e+01 3.45990252e+00] [-1.53760386e+01 1.08145261e+00 -1.24568768e+01 ... -1.01910150e+00 -1.45814466e+01 1.22492838e+01]] [[-4.93482733e+00 6.45053482e+00 3.03507257e+00 ... 1.51207104e+01 2.00714016e+00 -6.05027485e+00] [-9.33901119e+00 -3.68817866e-01 -2.28009915e+00 ... 1.52289467e+01 -8.55879211e+00 -9.80287552e+00] [-9.52582359e-01 1.08188934e+01 -7.89834404e+00 ... 1.47447834e+01 -1.63198109e+01 1.88939247e+01] ... [-9.59630871e+00 4.66305447e+00 -1.39644794e+01 ... -8.62683296e+00 -4.34478223e-01 -1.64398422e+01] [ 6.71227551e+00 2.78667760e+00 -1.05876722e+01 ... 1.83141327e+00 -3.22428226e+00 -1.40486631e+01] [-4.10425186e+00 3.71657395e+00 -4.74989748e+00 ... 2.08243084e+01 4.74063015e+00 9.88295746e+00]] [[-3.02015352e+00 -1.10758328e+00 -7.77812767e+00 ... 1.27497077e-01 -6.31881833e-01 6.55691624e+00] [ 9.15869808e+00 5.17654896e+00 7.67802811e+00 ... 3.27505088e+00 1.18435407e+00 -8.86927032e+00] [ 1.73948002e+00 9.17289925e+00 -3.24899983e+00 ... 1.43171663e+01 -7.30764198e+00 -1.32478752e+01] ... [-6.57772601e-01 1.55424237e-01 4.86723232e+00 ... -4.20766687e+00 3.34734607e+00 2.88939786e+00] [ 1.17307043e+00 -1.30163848e+00 3.21180129e+00 ... 5.38013554e+00 1.28852882e+01 -4.77139282e+00] [-4.30945253e+00 7.14667511e+00 -5.61293697e+00 ... 8.34920788e+00 -4.03179836e+00 -6.36091518e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 0, 1], 'dilations': 1, 'groups': 1} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_486.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.265184}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[-3.86250019e+00 -1.48330908e+01 3.51658016e-02 ... 5.57012081e-01 1.76136894e+01 -4.78115892e+00] [-4.77095604e+00 -1.18497062e+00 -2.04592648e+01 ... 4.11863470e+00 -7.58319807e+00 2.22775984e+00] [ 3.83969641e+00 1.80564392e+00 -2.81158113e+00 ... -5.07536840e+00 -5.48849535e+00 7.12445116e+00] ... [-8.48220444e+00 -3.13420486e+00 4.65617752e+00 ... 4.35645676e+00 -9.93285847e+00 1.49248695e+00] [ 7.44007969e+00 -1.93984509e+01 -2.39784002e-01 ... -1.40945492e+01 -1.01124640e+01 -1.78682823e+01] [-9.42275429e+00 1.31792426e+00 -1.11368561e+00 ... 1.12414408e+01 1.05419123e+00 -1.42022312e+00]] [[ 2.99438143e+00 1.51713572e+01 -1.46256237e+01 ... -8.56041050e+00 -8.66895103e+00 1.91398489e+00] [-4.58871651e+00 -7.44270182e+00 9.66701698e+00 ... -1.17018843e+01 -3.83797407e+00 -1.25833750e+01] [-9.94900036e+00 1.57359505e+00 1.21736526e-03 ... 1.48852754e+00 -1.68571401e+00 -9.25751626e-02] ... [ 5.87261081e-01 -3.51619053e+00 3.76726961e+00 ... -1.12863731e+01 -7.31616640e+00 -1.37311757e+00] [ 9.42112255e+00 2.21459532e+00 -1.61790383e+00 ... -4.42423153e+00 1.66085854e+01 1.01742525e+01] [-8.08932114e+00 2.30984478e+01 3.05985403e+00 ... 1.66791058e+01 1.54729271e+01 3.46055889e+00]] [[-5.00729418e+00 1.17709589e+01 -1.42472386e+00 ... 3.72371912e+00 1.30554171e+01 8.09408188e+00] [ 3.22389650e+00 -6.52721596e+00 5.94497585e+00 ... -1.24521189e+01 8.56354177e-01 -1.00446093e+00] [-5.34270954e+00 3.91732407e+00 4.54511356e+00 ... -7.84520864e+00 -3.63625002e+00 2.92478704e+00] ... [ 1.08568487e+01 1.28038609e+00 5.78213406e+00 ... 4.40044689e+00 -1.12256632e+01 3.75022125e+00] [-5.07396746e+00 -4.32147503e+00 -3.77026224e+00 ... -5.41862202e+00 4.42280436e+00 -7.41570377e+00] [ 2.39708334e-01 -6.38111115e+00 -4.50496912e+00 ... -2.06786366e+01 7.26323414e+00 1.14610424e+01]] ... [[ 5.52844238e+00 7.08186817e+00 -7.46070671e+00 ... 1.25185156e+01 -6.07991695e+00 2.82428056e-01] [ 1.73356616e+00 3.55562115e+00 2.03345299e+01 ... -1.02933130e+01 -1.50414884e+00 3.52783346e+00] [ 7.20824242e+00 1.02561579e+01 -6.92856312e+00 ... 5.35582972e+00 -1.67443504e+01 -4.84246850e-01] ... [ 4.53484440e+00 -1.07909813e+01 -2.35146427e+00 ... 8.43219185e+00 5.35562420e+00 -3.63818312e+00] [-1.28123713e+01 -1.15132487e+00 3.72848701e+00 ... 6.33735275e+00 -1.53695524e+00 -5.55157089e+00] [-1.56167984e+00 1.17355900e+01 9.30899334e+00 ... 1.43228121e+01 -1.82861252e+01 -6.55384254e+00]] [[ 5.07796478e+00 3.16069126e+00 -3.98833513e+00 ... 4.82798338e-01 1.17368288e+01 3.58553839e+00] [-6.13208055e-01 6.08696699e+00 -8.04083943e-01 ... 1.25243139e+00 -7.05900490e-02 1.06693211e+01] [ 1.14049089e+00 -1.58455343e+01 9.10722637e+00 ... -1.53474560e+01 7.83239031e+00 2.78856009e-01] ... [ 9.33691978e+00 7.72512197e+00 -7.59718704e+00 ... -3.26033545e+00 -3.61862755e+00 -9.81183434e+00] [-1.30634654e+00 -5.02376795e+00 6.16075873e-01 ... 1.26555290e+01 -8.56265450e+00 -4.20532846e+00] [-8.25570774e+00 4.58646154e+00 -4.14082766e+00 ... -1.10440235e+01 1.56012373e+01 -8.36793780e-02]] [[ 6.80014515e+00 -1.59307566e+01 -3.54824305e+00 ... 5.88112402e+00 -1.81147518e+01 1.59841661e+01] [ 4.07400036e+00 1.59028282e+01 -1.07106409e+01 ... 1.00332584e+01 -1.21160895e-01 -6.09188259e-01] [ 6.42516899e+00 -7.66744089e+00 1.30960579e+01 ... 9.04711056e+00 -6.09821844e+00 -1.09418516e+01] ... [ 8.93981075e+00 9.52379990e+00 8.01551521e-01 ... -9.77642822e+00 -6.94950294e+00 -2.85424089e+00] [-6.07890844e+00 1.61943455e+01 8.90764236e-01 ... 7.71461391e+00 7.19954252e+00 3.05108833e+00] [-2.09349871e+00 -7.65192842e+00 -1.00095356e+00 ... 2.78522110e+00 1.20195389e+01 1.21569824e+00]]]] [[[[-1.13220274e+00 -1.32993507e+01 3.03957224e+00 ... -2.09594297e+00 -1.03398790e+01 -1.24395151e+01] [-3.46348000e+00 -1.25973425e+01 5.99274492e+00 ... -2.67190838e+00 7.35115409e-01 3.73456907e+00] [-1.61511052e+00 9.10042667e+00 -1.21978292e+01 ... -8.21686268e+00 6.80770874e+00 1.50508442e+01] ... [-2.12128460e-01 8.62655926e+00 -4.28231287e+00 ... 6.12790871e+00 -1.25143778e+00 -8.48345661e+00] [ 4.60701323e+00 -1.14628172e+01 -5.55697727e+00 ... -5.37456512e+00 -7.32211161e+00 9.11568165e+00] [-3.41991186e+00 -7.02117682e+00 -7.65248060e+00 ... 3.33892059e+00 1.00136070e+01 2.72414875e+00]] [[ 4.62725592e+00 6.72032881e+00 -4.73031807e+00 ... 5.30213308e+00 8.41126919e+00 1.23759041e+01] [ 4.32657909e+00 2.99882770e-01 3.32574463e+00 ... -2.10286369e+01 -2.61166930e-01 -2.91827488e+00] [ 1.39587867e+00 6.16304398e+00 7.00212765e+00 ... 1.73386517e+01 -1.32457867e+01 5.19937897e+00] ... [-1.20589695e+01 4.21589041e+00 6.64098114e-02 ... -1.42932634e+01 -1.74571838e+01 7.20206785e+00] [-3.99948168e+00 -6.31886721e+00 -6.58130646e+00 ... 1.30061245e+01 5.70847178e+00 3.44095975e-01] [-5.87845445e-02 5.69513130e+00 6.13580942e-01 ... -2.95078278e+00 4.07342577e+00 -3.36225462e+00]] [[-6.55603123e+00 -1.10676088e+01 2.47868490e+00 ... -6.47651672e+00 9.73859489e-01 1.10833673e+01] [ 1.98306453e+00 1.10309649e+01 5.79485655e+00 ... 4.79700375e+00 -6.03471231e+00 -9.59872842e-01] [ 8.16824150e+00 5.53485489e+00 2.13234062e+01 ... -6.87196732e+00 3.26743174e+00 -7.90516019e-01] ... [-5.14947701e+00 -4.49007607e+00 1.44367838e+01 ... 4.08539391e+00 6.27789497e-01 -2.73471260e+00] [-1.54611671e+00 -1.66191444e+01 -4.09934044e+00 ... -5.36091614e+00 -5.89168882e+00 -1.47555504e+01] [ 1.37572317e+01 1.40279484e+00 1.12962618e+01 ... -3.68445778e+00 7.84033871e+00 -4.67142057e+00]] ... [[ 1.02401829e+00 -8.61611557e+00 -2.51301670e+00 ... 1.30812216e+01 -1.77469254e+01 8.12153053e+00] [-8.69330406e+00 2.76140118e+00 -9.93577957e+00 ... -2.64477587e+00 -3.16773367e+00 -2.69140339e+00] [ 7.25320625e+00 1.18777981e+01 2.83214235e+00 ... -6.45105982e+00 1.63353081e+01 -1.62816763e+00] ... [-7.15559006e+00 2.20907068e+00 -8.92515469e+00 ... -7.81303072e+00 -9.69680119e+00 2.66024923e+00] [ 8.82201576e+00 2.91670024e-01 4.34152424e-01 ... -7.94011831e+00 -4.60030377e-01 1.64845049e+00] [-1.25623012e+00 2.95447063e+00 3.07338285e+00 ... -2.47317028e+00 -3.79192257e+00 6.84048533e-01]] [[ 1.18219891e+01 2.44395256e+00 8.25958920e+00 ... 4.32195568e+00 -8.55974674e-01 -6.58072519e+00] [-6.64972901e-01 -4.66829062e+00 7.79816103e+00 ... -1.27244492e+01 -1.11187639e+01 8.97722340e+00] [ 9.47193980e-01 -4.45317459e+00 1.43359289e+01 ... -7.72091007e+00 -9.56295872e+00 4.16875744e+00] ... [ 1.07309427e+01 -8.72729111e+00 -2.48658752e+00 ... 1.39049664e+01 1.03111839e+00 -1.89263952e+00] [-4.67464566e-01 -5.87475538e-01 8.16493034e+00 ... 2.55435991e+00 3.51481140e-02 -3.94574356e+00] [ 3.92569590e+00 -5.10014105e+00 -1.58050108e+01 ... 1.31536865e+01 -4.26914930e+00 -9.07829762e+00]] [[-2.23006773e+00 8.84545517e+00 7.50565147e+00 ... -6.84335852e+00 3.79346132e+00 1.13146152e+01] [ 9.61849689e+00 4.23558664e+00 5.57920456e+00 ... 6.07947636e+00 1.60677135e+00 3.57486105e+00] [-2.29409409e+00 1.38944542e+00 6.13428593e+00 ... 3.45951843e+00 -1.54842978e+01 2.47713029e-01] ... [-7.66629648e+00 -3.15989208e+00 -7.78626299e+00 ... -1.34776622e-01 1.17610369e+01 -5.49678385e-01] [-7.90254545e+00 -4.38919687e+00 -6.18492174e+00 ... -1.86006558e+00 1.42808733e+01 7.46873975e-01] [-1.03427422e+00 -2.62353420e+00 -3.01188993e+00 ... 2.70947361e+00 1.27998245e+00 -8.10897064e+00]]]]]; ov_res: [[[[[-3.86250019e+00 -1.48330908e+01 3.51658016e-02 ... 5.57012081e-01 1.76136894e+01 -4.78115892e+00] [-4.77095604e+00 -1.18497062e+00 -2.04592648e+01 ... 4.11863470e+00 -7.58319807e+00 2.22775984e+00] [ 3.83969641e+00 1.80564392e+00 -2.81158113e+00 ... -5.07536840e+00 -5.48849535e+00 7.12445116e+00] ... [-8.48220444e+00 -3.13420486e+00 4.65617752e+00 ... 4.35645676e+00 -9.93285847e+00 1.49248695e+00] [ 7.44007969e+00 -1.93984509e+01 -2.39784002e-01 ... -1.40945492e+01 -1.01124640e+01 -1.78682823e+01] [-9.42275429e+00 1.31792426e+00 -1.11368561e+00 ... 1.12414408e+01 1.05419123e+00 -1.42022312e+00]] [[ 2.99438143e+00 1.51713572e+01 -1.46256237e+01 ... -8.56041050e+00 -8.66895103e+00 1.91398489e+00] [-4.58871651e+00 -7.44270182e+00 9.66701698e+00 ... -1.17018843e+01 -3.83797407e+00 -1.25833750e+01] [-9.94900036e+00 1.57359505e+00 1.21736526e-03 ... 1.48852754e+00 -1.68571401e+00 -9.25751626e-02] ... [ 5.87261081e-01 -3.51619053e+00 3.76726961e+00 ... -1.12863731e+01 -7.31616640e+00 -1.37311757e+00] [ 9.42112255e+00 2.21459532e+00 -1.61790383e+00 ... -4.42423153e+00 1.66085854e+01 1.01742525e+01] [-8.08932114e+00 2.30984478e+01 3.05985403e+00 ... 1.66791058e+01 1.54729271e+01 3.46055889e+00]] [[-5.00729418e+00 1.17709589e+01 -1.42472386e+00 ... 3.72371912e+00 1.30554171e+01 8.09408188e+00] [ 3.22389650e+00 -6.52721596e+00 5.94497585e+00 ... -1.24521189e+01 8.56354177e-01 -1.00446093e+00] [-5.34270954e+00 3.91732407e+00 4.54511356e+00 ... -7.84520864e+00 -3.63625002e+00 2.92478704e+00] ... [ 1.08568487e+01 1.28038609e+00 5.78213406e+00 ... 4.40044689e+00 -1.12256632e+01 3.75022125e+00] [-5.07396746e+00 -4.32147503e+00 -3.77026224e+00 ... -5.41862202e+00 4.42280436e+00 -7.41570377e+00] [ 2.39708334e-01 -6.38111115e+00 -4.50496912e+00 ... -2.06786366e+01 7.26323414e+00 1.14610424e+01]] ... [[ 5.52844238e+00 7.08186817e+00 -7.46070671e+00 ... 1.25185156e+01 -6.07991695e+00 2.82428056e-01] [ 1.73356616e+00 3.55562115e+00 2.03345299e+01 ... -1.02933130e+01 -1.50414884e+00 3.52783346e+00] [ 7.20824242e+00 1.02561579e+01 -6.92856312e+00 ... 5.35582972e+00 -1.67443504e+01 -4.84246850e-01] ... [ 4.53484440e+00 -1.07909813e+01 -2.35146427e+00 ... 8.43219185e+00 5.35562420e+00 -3.63818312e+00] [-1.28123713e+01 -1.15132487e+00 3.72848701e+00 ... 6.33735275e+00 -1.53695524e+00 -5.55157089e+00] [-1.56167984e+00 1.17355900e+01 9.30899334e+00 ... 1.43228121e+01 -1.82861252e+01 -6.55384254e+00]] [[ 5.07796478e+00 3.16069126e+00 -3.98833513e+00 ... 4.82798338e-01 1.17368288e+01 3.58553839e+00] [-6.13208055e-01 6.08696699e+00 -8.04083943e-01 ... 1.25243139e+00 -7.05900490e-02 1.06693211e+01] [ 1.14049089e+00 -1.58455343e+01 9.10722637e+00 ... -1.53474560e+01 7.83239031e+00 2.78856009e-01] ... [ 9.33691978e+00 7.72512197e+00 -7.59718704e+00 ... -3.26033545e+00 -3.61862755e+00 -9.81183434e+00] [-1.30634654e+00 -5.02376795e+00 6.16075873e-01 ... 1.26555290e+01 -8.56265450e+00 -4.20532846e+00] [-8.25570774e+00 4.58646154e+00 -4.14082766e+00 ... -1.10440235e+01 1.56012373e+01 -8.36793780e-02]] [[ 6.80014515e+00 -1.59307566e+01 -3.54824305e+00 ... 5.88112402e+00 -1.81147518e+01 1.59841661e+01] [ 4.07400036e+00 1.59028282e+01 -1.07106409e+01 ... 1.00332584e+01 -1.21160895e-01 -6.09188259e-01] [ 6.42516899e+00 -7.66744089e+00 1.30960579e+01 ... 9.04711056e+00 -6.09821844e+00 -1.09418516e+01] ... [ 8.93981075e+00 9.52379990e+00 8.01551521e-01 ... -9.77642822e+00 -6.94950294e+00 -2.85424089e+00] [-6.07890844e+00 1.61943455e+01 8.90764236e-01 ... 7.71461391e+00 7.19954252e+00 3.05108833e+00] [-2.09349871e+00 -7.65192842e+00 -1.00095356e+00 ... 2.78522110e+00 1.20195389e+01 1.21569824e+00]]]] [[[[-1.13220274e+00 -1.32993507e+01 3.03957224e+00 ... -2.09594297e+00 -1.03398790e+01 -1.24395151e+01] [-3.46348000e+00 -1.25973425e+01 5.99274492e+00 ... -2.67190838e+00 7.35115409e-01 3.73456907e+00] [-1.61511052e+00 9.10042667e+00 -1.21978292e+01 ... -8.21686268e+00 6.80770874e+00 1.50508442e+01] ... [-2.12128460e-01 8.62655926e+00 -4.28231287e+00 ... 6.12790871e+00 -1.25143778e+00 -8.48345661e+00] [ 4.60701323e+00 -1.14628172e+01 -5.55697727e+00 ... -5.37456512e+00 -7.32211161e+00 9.11568165e+00] [-3.41991186e+00 -7.02117682e+00 -7.65248060e+00 ... 3.33892059e+00 1.00136070e+01 2.72414875e+00]] [[ 4.62725592e+00 6.72032881e+00 -4.73031807e+00 ... 5.30213308e+00 8.41126919e+00 1.23759041e+01] [ 4.32657909e+00 2.99882770e-01 3.32574463e+00 ... -2.10286369e+01 -2.61166930e-01 -2.91827488e+00] [ 1.39587867e+00 6.16304398e+00 7.00212765e+00 ... 1.73386517e+01 -1.32457867e+01 5.19937897e+00] ... [-1.20589695e+01 4.21589041e+00 6.64098114e-02 ... -1.42932634e+01 -1.74571838e+01 7.20206785e+00] [-3.99948168e+00 -6.31886721e+00 -6.58130646e+00 ... 1.30061245e+01 5.70847178e+00 3.44095975e-01] [-5.87845445e-02 5.69513130e+00 6.13580942e-01 ... -2.95078278e+00 4.07342577e+00 -3.36225462e+00]] [[-6.55603123e+00 -1.10676088e+01 2.47868490e+00 ... -6.47651672e+00 9.73859489e-01 1.10833673e+01] [ 1.98306453e+00 1.10309649e+01 5.79485655e+00 ... 4.79700375e+00 -6.03471231e+00 -9.59872842e-01] [ 8.16824150e+00 5.53485489e+00 2.13234062e+01 ... -6.87196732e+00 3.26743174e+00 -7.90516019e-01] ... [-5.14947701e+00 -4.49007607e+00 1.44367838e+01 ... 4.08539391e+00 6.27789497e-01 -2.73471260e+00] [-1.54611671e+00 -1.66191444e+01 -4.09934044e+00 ... -5.36091614e+00 -5.89168882e+00 -1.47555504e+01] [ 1.37572317e+01 1.40279484e+00 1.12962618e+01 ... -3.68445778e+00 7.84033871e+00 -4.67142057e+00]] ... [[ 1.02401829e+00 -8.61611557e+00 -2.51301670e+00 ... 1.30812216e+01 -1.77469254e+01 8.12153053e+00] [-8.69330406e+00 2.76140118e+00 -9.93577957e+00 ... -2.64477587e+00 -3.16773367e+00 -2.69140339e+00] [ 7.25320625e+00 1.18777981e+01 2.83214235e+00 ... -6.45105982e+00 1.63353081e+01 -1.62816763e+00] ... [-7.15559006e+00 2.20907068e+00 -8.92515469e+00 ... -7.81303072e+00 -9.69680119e+00 2.66024923e+00] [ 8.82201576e+00 2.91670024e-01 4.34152424e-01 ... -7.94011831e+00 -4.60030377e-01 1.64845049e+00] [-1.25623012e+00 2.95447063e+00 3.07338285e+00 ... -2.47317028e+00 -3.79192257e+00 6.84048533e-01]] [[ 1.18219891e+01 2.44395256e+00 8.25958920e+00 ... 4.32195568e+00 -8.55974674e-01 -6.58072519e+00] [-6.64972901e-01 -4.66829062e+00 7.79816103e+00 ... -1.27244492e+01 -1.11187639e+01 8.97722340e+00] [ 9.47193980e-01 -4.45317459e+00 1.43359289e+01 ... -7.72091007e+00 -9.56295872e+00 4.16875744e+00] ... [ 1.07309427e+01 -8.72729111e+00 -2.48658752e+00 ... 1.39049664e+01 1.03111839e+00 -1.89263952e+00] [-4.67464566e-01 -5.87475538e-01 8.16493034e+00 ... 2.55435991e+00 3.51481140e-02 -3.94574356e+00] [ 3.92569590e+00 -5.10014105e+00 -1.58050108e+01 ... 1.31536865e+01 -4.26914930e+00 -9.07829762e+00]] [[-2.23006773e+00 8.84545517e+00 7.50565147e+00 ... -6.84335852e+00 3.79346132e+00 1.13146152e+01] [ 9.61849689e+00 4.23558664e+00 5.57920456e+00 ... 6.07947636e+00 1.60677135e+00 3.57486105e+00] [-2.29409409e+00 1.38944542e+00 6.13428593e+00 ... 3.45951843e+00 -1.54842978e+01 2.47713029e-01] ... [-7.66629648e+00 -3.15989208e+00 -7.78626299e+00 ... -1.34776622e-01 1.17610369e+01 -5.49678385e-01] [-7.90254545e+00 -4.38919687e+00 -6.18492174e+00 ... -1.86006558e+00 1.42808733e+01 7.46873975e-01] [-1.03427422e+00 -2.62353420e+00 -3.01188993e+00 ... 2.70947361e+00 1.27998245e+00 -8.10897064e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 1, 0], 'dilations': 1, 'groups': 1} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_488.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.36488}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 6.2703614 -2.2727995 0.47063172 ... 6.080203 0.4130761 1.557119 ] [ -0.35162103 -1.0786854 -0.58120453 ... 14.239116 6.3090076 4.8515253 ] [ 1.7368536 -4.284447 -3.9221015 ... -5.893578 -1.074057 -4.520928 ] ... [ 0.9983785 8.664003 1.26453 ... 11.165141 15.629477 -1.1277903 ] [ 7.5323644 15.152654 -12.559754 ... 1.4118093 7.841158 0.8323892 ] [ -0.02953005 9.038219 -4.899516 ... 10.747178 10.5033245 1.9028972 ]] [[ -1.4876641 0.50507927 3.0562315 ... 0.6807844 2.5778327 2.0315626 ] [ 0.6913297 -12.462407 -4.75836 ... 5.02089 -2.6684537 10.527057 ] [ 0.5166591 -4.816188 2.1111119 ... 1.7834829 -9.484907 -4.4783945 ] ... [ -7.7334228 5.1704 2.535925 ... 18.129236 -6.2843213 10.941278 ] [-10.30544 12.171223 2.4680924 ... 3.2990975 -1.219551 8.077473 ] [ -2.0264354 11.082814 2.9341297 ... 7.012271 10.527299 3.2253418 ]] [[ -0.5152327 -7.2029195 7.684756 ... 5.7170906 -5.9700727 3.757207 ] [ 3.2859182 -5.276175 -5.734831 ... 2.7846222 -13.83735 13.629613 ] [ -5.620727 -3.503962 -1.7898244 ... 5.847267 -3.837432 7.8069544 ] ... [ 13.638026 -2.7578435 -1.7602009 ... -8.911789 -1.3675748 7.0206084 ] [ 6.1219025 1.9068007 15.113273 ... 9.756296 -0.86446154 6.9212365 ] [ -0.67349184 -3.6190476 8.250706 ... 0.18734217 -2.486978 -4.442812 ]] ... [[ 1.4177511 4.600519 -0.670684 ... 6.172592 5.74097 0.9736185 ] [ 16.39762 -5.4865227 1.3237096 ... 10.828466 9.8385105 -10.157409 ] [ 12.429323 -0.30670524 -4.416463 ... -12.979877 -9.879151 -15.316682 ] ... [ 2.0713325 7.232358 13.574948 ... 0.92779547 -2.2137938 5.2780347 ] [ 2.5985503 -2.2252097 10.309432 ... -13.202335 -5.3719974 -1.458018 ] [ 0.5024228 -1.3936929 -3.2369761 ... -2.1870646 -0.9360355 -5.5356407 ]] [[ 4.9367204 -0.17806375 -7.682774 ... -3.327002 14.057258 8.685868 ] [ 13.572671 2.035025 5.06605 ... -0.6415957 9.430569 -3.5707088 ] [ 14.7505665 -0.63477325 16.780283 ... -16.747084 11.89795 -1.565395 ] ... [-10.886763 -0.882141 3.7137065 ... 1.5111846 -2.9359632 7.914943 ] [ -9.811147 1.3880221 2.5878036 ... 5.5955067 1.0065022 7.9944177 ] [ 2.7906203 5.769477 3.02942 ... -4.6678624 -3.166556 -0.95862806]] [[ 4.4668555 2.2198646 -4.075806 ... 12.296506 1.1994435 6.1855435 ] [ 10.454411 -0.8292662 9.101686 ... 0.33228946 5.8062754 2.6165233 ] [ 5.772358 0.72002494 14.04369 ... 10.189712 0.61484146 11.082312 ] ... [-10.500607 11.399196 -6.244939 ... 14.362085 4.402435 5.709056 ] [ 7.715148 1.7496797 3.374782 ... 12.842115 -13.479004 7.531246 ] [ 11.149027 1.3685465 16.79064 ... 6.456343 -8.300365 7.500781 ]]]] [[[[ 1.4878166 10.769787 3.649578 ... 8.847642 -1.7550622 1.39239 ] [ -0.10236478 12.635154 0.39661664 ... 8.490323 17.159737 8.890183 ] [ -4.4824123 4.362335 3.1825743 ... 0.43424118 -4.792406 -1.8195399 ] ... [ -7.00316 -5.233577 5.0680165 ... -3.9919891 8.048373 -12.053876 ] [ -1.9547273 -2.5386004 4.525787 ... -6.3433914 4.5700397 3.5098572 ] [ 2.8612592 -0.5172926 -2.562296 ... 4.838817 -5.2192135 1.5518583 ]] [[ 10.428559 3.333658 0.9472088 ... 1.1603671 12.362003 8.245916 ] [ 2.04395 13.04794 21.297468 ... 3.1821716 5.916553 14.119266 ] [ 10.337382 2.8698478 -15.508111 ... -14.660369 2.2335129 -4.9063797 ] ... [ 19.049257 -10.834274 8.612997 ... -1.102554 -6.2654924 18.229994 ] [ 7.349022 -2.2766385 -0.47761285 ... 6.2591157 -1.6276983 16.184769 ] [ -4.8558245 3.9516048 -4.146263 ... -6.2992396 1.6630689 0.5908963 ]] [[ 3.7035794 -1.9708227 5.3161964 ... 1.1753873 4.6816773 10.436497 ] [ -9.340581 1.4439185 14.432957 ... 4.3587804 -2.3203955 -6.207913 ] [ 3.801361 8.428314 11.843627 ... 5.996191 7.454514 11.867711 ] ... [ 10.972928 -0.43916368 6.924948 ... -2.334989 -5.037299 0.7369235 ] [ 0.5337013 4.7590084 7.188999 ... -5.2817388 1.2357314 -0.9544438 ] [ -3.4256291 -5.3525395 1.6059619 ... -2.520228 12.481418 6.56304 ]] ... [[ 4.6338444 -6.079284 7.358801 ... 11.843798 11.577389 2.6009455 ] [ 2.752184 -4.4238133 -3.2486305 ... 8.081146 -5.453348 2.6053472 ] [ 7.1599083 3.135889 -1.2652961 ... 16.298477 4.6996326 5.994856 ] ... [ -0.4939729 -0.6738142 -0.20520341 ... -21.179918 2.0347052 4.1541004 ] [ 16.50779 1.0896966 -2.1311774 ... -7.476545 12.112079 2.4334874 ] [ -2.0887733 -1.4701957 -7.5546784 ... 3.9183536 -7.4031377 11.925493 ]] [[ -0.06171644 -0.28561366 -6.9823985 ... -3.2242794 7.8339124 -10.665815 ] [ -7.747215 10.509322 2.0235949 ... -1.0736579 2.3730338 -2.3682213 ] [ -3.1341376 12.536086 27.271893 ... -13.679707 23.44944 -9.295973 ] ... [ -1.3877116 -1.0812794 -4.8104634 ... -5.593859 2.2312868 -11.205872 ] [ -3.2921147 -20.850641 -16.551773 ... -3.1124525 5.5643835 -6.0370197 ] [ -5.2124023 -3.3696184 -5.4500027 ... 6.1768913 -3.9900088 2.4605334 ]] [[ -9.668786 3.3797193 0.10308528 ... -2.5794563 2.743833 -1.6946343 ] [ -6.22236 1.2671251 8.565904 ... -0.7469765 4.6625423 1.4769751 ] [ -1.7571844 4.057435 5.51286 ... -6.719319 1.84125 -7.9303803 ] ... [-11.499813 -1.3583518 1.2147119 ... -9.444034 -0.992257 -1.3516341 ] [ -5.307009 -3.322956 -5.5409384 ... 1.309415 -0.87544715 5.7171087 ] [ -9.57355 -4.028866 -3.9589763 ... 0.7967398 -0.15561616 -5.474909 ]]]]]; ov_res: [[[[[ 6.2703614 -2.2727995 0.47063172 ... 6.080203 0.4130761 1.557119 ] [ -0.35162103 -1.0786854 -0.58120453 ... 14.239116 6.3090076 4.8515253 ] [ 1.7368536 -4.284447 -3.9221015 ... -5.893578 -1.074057 -4.520928 ] ... [ 0.9983785 8.664003 1.26453 ... 11.165141 15.629477 -1.1277903 ] [ 7.5323644 15.152654 -12.559754 ... 1.4118093 7.841158 0.8323892 ] [ -0.02953005 9.038219 -4.899516 ... 10.747178 10.5033245 1.9028972 ]] [[ -1.4876641 0.50507927 3.0562315 ... 0.6807844 2.5778327 2.0315626 ] [ 0.6913297 -12.462407 -4.75836 ... 5.02089 -2.6684537 10.527057 ] [ 0.5166591 -4.816188 2.1111119 ... 1.7834829 -9.484907 -4.4783945 ] ... [ -7.7334228 5.1704 2.535925 ... 18.129236 -6.2843213 10.941278 ] [-10.30544 12.171223 2.4680924 ... 3.2990975 -1.219551 8.077473 ] [ -2.0264354 11.082814 2.9341297 ... 7.012271 10.527299 3.2253418 ]] [[ -0.5152327 -7.2029195 7.684756 ... 5.7170906 -5.9700727 3.757207 ] [ 3.2859182 -5.276175 -5.734831 ... 2.7846222 -13.83735 13.629613 ] [ -5.620727 -3.503962 -1.7898244 ... 5.847267 -3.837432 7.8069544 ] ... [ 13.638026 -2.7578435 -1.7602009 ... -8.911789 -1.3675748 7.0206084 ] [ 6.1219025 1.9068007 15.113273 ... 9.756296 -0.86446154 6.9212365 ] [ -0.67349184 -3.6190476 8.250706 ... 0.18734217 -2.486978 -4.442812 ]] ... [[ 1.4177511 4.600519 -0.670684 ... 6.172592 5.74097 0.9736185 ] [ 16.39762 -5.4865227 1.3237096 ... 10.828466 9.8385105 -10.157409 ] [ 12.429323 -0.30670524 -4.416463 ... -12.979877 -9.879151 -15.316682 ] ... [ 2.0713325 7.232358 13.574948 ... 0.92779547 -2.2137938 5.2780347 ] [ 2.5985503 -2.2252097 10.309432 ... -13.202335 -5.3719974 -1.458018 ] [ 0.5024228 -1.3936929 -3.2369761 ... -2.1870646 -0.9360355 -5.5356407 ]] [[ 4.9367204 -0.17806375 -7.682774 ... -3.327002 14.057258 8.685868 ] [ 13.572671 2.035025 5.06605 ... -0.6415957 9.430569 -3.5707088 ] [ 14.7505665 -0.63477325 16.780283 ... -16.747084 11.89795 -1.565395 ] ... [-10.886763 -0.882141 3.7137065 ... 1.5111846 -2.9359632 7.914943 ] [ -9.811147 1.3880221 2.5878036 ... 5.5955067 1.0065022 7.9944177 ] [ 2.7906203 5.769477 3.02942 ... -4.6678624 -3.166556 -0.95862806]] [[ 4.4668555 2.2198646 -4.075806 ... 12.296506 1.1994435 6.1855435 ] [ 10.454411 -0.8292662 9.101686 ... 0.33228946 5.8062754 2.6165233 ] [ 5.772358 0.72002494 14.04369 ... 10.189712 0.61484146 11.082312 ] ... [-10.500607 11.399196 -6.244939 ... 14.362085 4.402435 5.709056 ] [ 7.715148 1.7496797 3.374782 ... 12.842115 -13.479004 7.531246 ] [ 11.149027 1.3685465 16.79064 ... 6.456343 -8.300365 7.500781 ]]]] [[[[ 1.4878166 10.769787 3.649578 ... 8.847642 -1.7550622 1.39239 ] [ -0.10236478 12.635154 0.39661664 ... 8.490323 17.159737 8.890183 ] [ -4.4824123 4.362335 3.1825743 ... 0.43424118 -4.792406 -1.8195399 ] ... [ -7.00316 -5.233577 5.0680165 ... -3.9919891 8.048373 -12.053876 ] [ -1.9547273 -2.5386004 4.525787 ... -6.3433914 4.5700397 3.5098572 ] [ 2.8612592 -0.5172926 -2.562296 ... 4.838817 -5.2192135 1.5518583 ]] [[ 10.428559 3.333658 0.9472088 ... 1.1603671 12.362003 8.245916 ] [ 2.04395 13.04794 21.297468 ... 3.1821716 5.916553 14.119266 ] [ 10.337382 2.8698478 -15.508111 ... -14.660369 2.2335129 -4.9063797 ] ... [ 19.049257 -10.834274 8.612997 ... -1.102554 -6.2654924 18.229994 ] [ 7.349022 -2.2766385 -0.47761285 ... 6.2591157 -1.6276983 16.184769 ] [ -4.8558245 3.9516048 -4.146263 ... -6.2992396 1.6630689 0.5908963 ]] [[ 3.7035794 -1.9708227 5.3161964 ... 1.1753873 4.6816773 10.436497 ] [ -9.340581 1.4439185 14.432957 ... 4.3587804 -2.3203955 -6.207913 ] [ 3.801361 8.428314 11.843627 ... 5.996191 7.454514 11.867711 ] ... [ 10.972928 -0.43916368 6.924948 ... -2.334989 -5.037299 0.7369235 ] [ 0.5337013 4.7590084 7.188999 ... -5.2817388 1.2357314 -0.9544438 ] [ -3.4256291 -5.3525395 1.6059619 ... -2.520228 12.481418 6.56304 ]] ... [[ 4.6338444 -6.079284 7.358801 ... 11.843798 11.577389 2.6009455 ] [ 2.752184 -4.4238133 -3.2486305 ... 8.081146 -5.453348 2.6053472 ] [ 7.1599083 3.135889 -1.2652961 ... 16.298477 4.6996326 5.994856 ] ... [ -0.4939729 -0.6738142 -0.20520341 ... -21.179918 2.0347052 4.1541004 ] [ 16.50779 1.0896966 -2.1311774 ... -7.476545 12.112079 2.4334874 ] [ -2.0887733 -1.4701957 -7.5546784 ... 3.9183536 -7.4031377 11.925493 ]] [[ -0.06171644 -0.28561366 -6.9823985 ... -3.2242794 7.8339124 -10.665815 ] [ -7.747215 10.509322 2.0235949 ... -1.0736579 2.3730338 -2.3682213 ] [ -3.1341376 12.536086 27.271893 ... -13.679707 23.44944 -9.295973 ] ... [ -1.3877116 -1.0812794 -4.8104634 ... -5.593859 2.2312868 -11.205872 ] [ -3.2921147 -20.850641 -16.551773 ... -3.1124525 5.5643835 -6.0370197 ] [ -5.2124023 -3.3696184 -5.4500027 ... 6.1768913 -3.9900088 2.4605334 ]] [[ -9.668786 3.3797193 0.10308528 ... -2.5794563 2.743833 -1.6946343 ] [ -6.22236 1.2671251 8.565904 ... -0.7469765 4.6625423 1.4769751 ] [ -1.7571844 4.057435 5.51286 ... -6.719319 1.84125 -7.9303803 ] ... [-11.499813 -1.3583518 1.2147119 ... -9.444034 -0.992257 -1.3516341 ] [ -5.307009 -3.322956 -5.5409384 ... 1.309415 -0.87544715 5.7171087 ] [ -9.57355 -4.028866 -3.9589763 ... 0.7967398 -0.15561616 -5.474909 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 1], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_490.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.674102}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 9.84639931e+00 -1.77242851e+00 2.01312470e+00 ... 2.78880787e+00 -1.27732630e+01 1.27712131e-01] [ 6.09676075e+00 2.19808364e+00 1.52367430e+01 ... -7.31296349e+00 -7.17732716e+00 -3.88162589e+00] [-1.48177624e-01 5.92441416e+00 -3.70600104e+00 ... -2.10844994e-01 2.98903871e+00 -6.45459712e-01] ... [ 6.79001236e+00 -9.67941821e-01 3.71089673e+00 ... 1.02813578e+00 -1.04736109e+01 3.79996276e+00] [ 4.93519831e+00 6.63667870e+00 -9.35773182e+00 ... 1.30289531e+00 7.20422077e+00 -2.44437313e+00] [ 4.74906492e+00 1.19860897e+01 7.12920094e+00 ... -9.97857761e+00 1.35570583e+01 -1.54000330e+01]] [[ 1.32873893e-01 -4.82485008e+00 9.48553944e+00 ... 5.62737989e+00 -7.24396884e-01 3.88701987e+00] [ 3.39361954e+00 -7.14155483e+00 1.86072063e+01 ... -3.93886876e+00 -5.99780846e+00 4.39372349e+00] [ 2.84649849e-01 8.00476742e+00 -7.17170763e+00 ... -7.44182348e+00 1.04185343e+01 6.37715626e+00] ... [-7.21926880e+00 1.16852436e+01 -6.06287909e+00 ... 3.05378985e+00 -1.49718676e+01 -2.75812411e+00] [ 5.70990801e-01 -1.25472565e+01 -3.61456180e+00 ... 8.65398216e+00 1.95049534e+01 4.53850412e+00] [ 4.10877562e+00 5.29303360e+00 4.78299499e-01 ... -7.09930706e+00 9.29192829e+00 -1.13610134e+01]] [[ 4.52177048e+00 -7.32714319e+00 3.85043454e+00 ... 1.15081530e+01 5.75032616e+00 -2.88100481e+00] [ 1.53253622e+01 2.92809010e+00 -6.55290604e+00 ... -5.21354294e+00 -7.87134027e+00 -1.30539298e+00] [-1.83322544e+01 1.18248491e+01 6.65551424e+00 ... -1.82307959e-02 3.24022323e-01 -1.07803574e+01] ... [ 2.01242042e+00 -4.93214226e+00 -1.12185144e+01 ... -1.03396597e+01 -1.11671324e+01 -6.92399311e+00] [ 2.98085976e+00 -6.15828896e+00 -8.72057438e+00 ... -2.02567244e+00 2.28248596e+01 7.81369257e+00] [ 3.62231398e+00 -1.09750652e+00 1.58524179e+01 ... -1.44216547e+01 -2.35339713e+00 6.10736275e+00]] ... [[-4.77354670e+00 1.20541925e+01 6.51964617e+00 ... 1.64994278e+01 5.77620602e+00 2.43041086e+00] [ 3.76010704e+00 -1.72837186e+00 3.32360816e+00 ... -5.06128168e+00 -1.79390087e+01 3.91374397e+00] [-7.51991034e+00 7.33621025e+00 -2.06205726e+00 ... -9.23154736e+00 1.03420105e+01 4.13927937e+00] ... [ 4.71731567e+00 -7.36435235e-01 -1.95579767e+00 ... -1.24211521e+01 5.75461864e+00 -5.14834821e-01] [ 6.67388821e+00 1.63298950e+01 1.27714939e+01 ... 8.43496037e+00 -7.76522398e+00 1.13132439e+01] [ 4.50456202e-01 4.83334875e+00 5.96804571e+00 ... -2.47087598e-01 -1.90192652e+00 3.09078765e+00]] [[ 2.19363976e+00 -6.44184017e+00 -3.45810151e+00 ... -2.21488118e-01 -8.64831161e+00 6.28967476e+00] [-2.41773963e+00 -8.00178909e+00 -7.82502985e+00 ... 3.14352536e+00 7.78997898e+00 7.27291536e+00] [ 3.79013896e-01 1.31366844e+01 -4.37431479e+00 ... 1.26018162e+01 1.82408905e+00 -4.28519666e-01] ... [ 5.42069864e+00 -2.24567490e+01 -3.08976007e+00 ... -4.30063820e+00 4.63336229e+00 -2.18430567e+00] [ 2.75732446e+00 7.14269829e+00 5.40708303e+00 ... -6.35298443e+00 -2.12721825e+01 -4.14907455e+00] [-3.52059770e+00 -1.80340004e+00 -1.69063747e-01 ... 1.17298861e+01 3.48457384e+00 7.55506706e+00]] [[ 9.48905468e+00 4.81856585e+00 1.01218376e+01 ... 7.76550245e+00 -6.83050728e+00 3.46151257e+00] [-7.56883287e+00 -7.77288580e+00 -2.65359497e+00 ... 1.23784084e+01 -1.07183094e+01 -8.85868931e+00] [-4.48606157e+00 4.67211866e+00 -9.21421909e+00 ... -3.57861018e+00 1.21322041e+01 -1.27882433e+00] ... [-5.56388283e+00 2.19767380e+00 5.44386864e+00 ... -1.05513172e+01 1.16310091e+01 -9.29812145e+00] [ 9.79424572e+00 -9.19227505e+00 -5.92441130e+00 ... 1.29491234e+01 -3.64334166e-01 -6.14805162e-01] [ 3.50651217e+00 5.65289259e+00 2.00661731e+00 ... 3.14403725e+00 -1.05552697e+00 -5.74731886e-01]]]] [[[[-6.19257164e+00 1.25063639e+01 6.85064507e+00 ... -1.84877548e+01 -1.71920757e+01 4.89843130e+00] [ 3.33765984e+00 9.21886730e+00 -3.76739335e+00 ... 1.45428295e+01 7.77934194e-02 4.12412453e+00] [-1.16272621e+01 2.23467851e+00 1.16267796e+01 ... -1.36893835e+01 -5.18738461e+00 -3.52137446e+00] ... [ 6.80773115e+00 9.71778011e+00 2.26040039e+01 ... 2.51105309e+00 -3.55019975e+00 -1.56880164e+00] [ 9.09865570e+00 -2.84426832e+00 -3.51980233e+00 ... 1.48561745e+01 -6.49864256e-01 7.60188913e+00] [-1.84130502e+00 1.28468828e+01 2.86468840e+00 ... 8.35461998e+00 1.00932693e+01 6.35659456e-01]] [[ 5.42261243e-01 2.76557475e-01 -1.45028114e+00 ... 8.69478512e+00 -8.01465690e-01 8.72930288e-02] [ 2.58571434e+00 1.81743298e+01 -1.50236797e+00 ... -6.00328064e+00 -1.55433760e+01 3.53485727e+00] [-8.41026783e+00 -8.54111862e+00 2.02930450e+01 ... 1.28146639e+01 -3.70352387e+00 1.70695877e+01] ... [ 1.48859227e+00 -4.43720055e+00 4.41704655e+00 ... 3.82384181e+00 -7.65202284e+00 1.84133381e-01] [ 5.48411083e+00 3.63925624e+00 3.07527328e+00 ... 1.70821738e+00 5.95396996e+00 1.00279875e+01] [-2.82254100e-01 -9.20741081e+00 -2.02880716e+00 ... 1.10237684e+01 6.71273613e+00 -2.56655025e+00]] [[-5.50169516e+00 -3.28073668e+00 -7.67842436e+00 ... -2.51476908e+00 4.21455574e+00 1.00343781e+01] [-9.83992755e-01 9.24819660e+00 6.87725163e+00 ... 1.34832668e+01 -5.69517136e+00 2.39978933e+00] [-3.97659802e+00 7.05047989e+00 1.03374395e+01 ... 4.10277814e-01 -3.70127845e+00 7.46540129e-01] ... [ 3.77182770e+00 1.40670729e+01 -3.76062083e+00 ... -9.42169094e+00 1.77999382e+01 9.99557400e+00] [ 6.53442478e+00 -7.22395325e+00 -5.29054594e+00 ... -2.48512821e+01 -1.68301964e+00 -9.33890283e-01] [-4.58561897e+00 2.67730021e+00 2.12056017e+00 ... 6.22532797e+00 2.44417739e+00 4.94840336e+00]] ... [[-1.22459531e+00 -3.71944880e+00 -7.60014963e+00 ... 1.88677368e+01 1.39175196e+01 -4.29209709e+00] [-2.72748327e+00 1.59089541e+00 1.32154055e+01 ... -1.43068895e+01 -5.13307238e+00 1.31482382e+01] [-1.23376160e+01 -1.46271143e+01 -1.16572595e+00 ... 1.00983038e+01 -6.36837006e+00 5.22750974e-01] ... [-1.56507111e+00 -7.27235270e+00 -4.90187311e+00 ... -7.46521235e+00 -1.42119813e+00 1.44824207e-01] [ 3.34985757e+00 9.52066660e-01 -8.36353207e+00 ... 8.04610538e+00 1.49196634e+01 2.27038288e+00] [-2.70789313e+00 1.01127977e+01 -1.13294134e+01 ... -9.22190094e+00 5.36943436e+00 7.93288088e+00]] [[ 1.38098612e+01 4.99085331e+00 7.30052471e+00 ... 1.33985386e+01 -1.64444494e+00 3.17077070e-01] [ 2.76502937e-01 1.66992340e+01 -1.41350784e+01 ... -4.18380404e+00 2.40293241e+00 9.24884081e-01] [-3.07485008e+00 2.19489498e+01 4.20776939e+00 ... -5.19971943e+00 2.94338036e+00 -1.88129854e+00] ... [ 1.07021046e+00 -8.89780521e+00 1.91953945e+01 ... -3.72596002e+00 -5.94629812e+00 1.69667220e+00] [-1.51159811e+00 -3.84940314e+00 -3.17013335e+00 ... 5.86522245e+00 -1.28003907e+00 1.38613710e+01] [ 6.23810434e+00 1.09164352e+01 -4.85449839e+00 ... -2.11958933e+00 2.50604868e+00 -6.50892353e+00]] [[-5.99978828e+00 -9.63808823e+00 1.19432240e+01 ... 1.41867495e+01 -5.51557112e+00 -4.34483707e-01] [-1.31974936e+00 4.26277304e+00 5.53954506e+00 ... -7.71038008e+00 -3.93302894e+00 8.07791328e+00] [-1.26689100e+01 -8.02976513e+00 -2.63045979e+01 ... 1.27698841e+01 2.04722214e+00 -1.82119703e+00] ... [-1.05954094e+01 -2.33045053e+00 6.55530071e+00 ... 1.96117353e+00 -4.56691885e+00 2.35696316e+00] [-9.71918774e+00 3.73186946e+00 -9.40388107e+00 ... 9.25310802e+00 4.48820066e+00 -1.11636238e+01] [-1.05979929e+01 7.11746264e+00 2.56449986e+00 ... 1.77066588e+00 1.05640001e+01 8.50454140e+00]]]]]; ov_res: [[[[[ 9.84639931e+00 -1.77242851e+00 2.01312470e+00 ... 2.78880787e+00 -1.27732630e+01 1.27712131e-01] [ 6.09676075e+00 2.19808364e+00 1.52367430e+01 ... -7.31296349e+00 -7.17732716e+00 -3.88162589e+00] [-1.48177624e-01 5.92441416e+00 -3.70600104e+00 ... -2.10844994e-01 2.98903871e+00 -6.45459712e-01] ... [ 6.79001236e+00 -9.67941821e-01 3.71089673e+00 ... 1.02813578e+00 -1.04736109e+01 3.79996276e+00] [ 4.93519831e+00 6.63667870e+00 -9.35773182e+00 ... 1.30289531e+00 7.20422077e+00 -2.44437313e+00] [ 4.74906492e+00 1.19860897e+01 7.12920094e+00 ... -9.97857761e+00 1.35570583e+01 -1.54000330e+01]] [[ 1.32873893e-01 -4.82485008e+00 9.48553944e+00 ... 5.62737989e+00 -7.24396884e-01 3.88701987e+00] [ 3.39361954e+00 -7.14155483e+00 1.86072063e+01 ... -3.93886876e+00 -5.99780846e+00 4.39372349e+00] [ 2.84649849e-01 8.00476742e+00 -7.17170763e+00 ... -7.44182348e+00 1.04185343e+01 6.37715626e+00] ... [-7.21926880e+00 1.16852436e+01 -6.06287909e+00 ... 3.05378985e+00 -1.49718676e+01 -2.75812411e+00] [ 5.70990801e-01 -1.25472565e+01 -3.61456180e+00 ... 8.65398216e+00 1.95049534e+01 4.53850412e+00] [ 4.10877562e+00 5.29303360e+00 4.78299499e-01 ... -7.09930706e+00 9.29192829e+00 -1.13610134e+01]] [[ 4.52177048e+00 -7.32714319e+00 3.85043454e+00 ... 1.15081530e+01 5.75032616e+00 -2.88100481e+00] [ 1.53253622e+01 2.92809010e+00 -6.55290604e+00 ... -5.21354294e+00 -7.87134027e+00 -1.30539298e+00] [-1.83322544e+01 1.18248491e+01 6.65551424e+00 ... -1.82307959e-02 3.24022323e-01 -1.07803574e+01] ... [ 2.01242042e+00 -4.93214226e+00 -1.12185144e+01 ... -1.03396597e+01 -1.11671324e+01 -6.92399311e+00] [ 2.98085976e+00 -6.15828896e+00 -8.72057438e+00 ... -2.02567244e+00 2.28248596e+01 7.81369257e+00] [ 3.62231398e+00 -1.09750652e+00 1.58524179e+01 ... -1.44216547e+01 -2.35339713e+00 6.10736275e+00]] ... [[-4.77354670e+00 1.20541925e+01 6.51964617e+00 ... 1.64994278e+01 5.77620602e+00 2.43041086e+00] [ 3.76010704e+00 -1.72837186e+00 3.32360816e+00 ... -5.06128168e+00 -1.79390087e+01 3.91374397e+00] [-7.51991034e+00 7.33621025e+00 -2.06205726e+00 ... -9.23154736e+00 1.03420105e+01 4.13927937e+00] ... [ 4.71731567e+00 -7.36435235e-01 -1.95579767e+00 ... -1.24211521e+01 5.75461864e+00 -5.14834821e-01] [ 6.67388821e+00 1.63298950e+01 1.27714939e+01 ... 8.43496037e+00 -7.76522398e+00 1.13132439e+01] [ 4.50456202e-01 4.83334875e+00 5.96804571e+00 ... -2.47087598e-01 -1.90192652e+00 3.09078765e+00]] [[ 2.19363976e+00 -6.44184017e+00 -3.45810151e+00 ... -2.21488118e-01 -8.64831161e+00 6.28967476e+00] [-2.41773963e+00 -8.00178909e+00 -7.82502985e+00 ... 3.14352536e+00 7.78997898e+00 7.27291536e+00] [ 3.79013896e-01 1.31366844e+01 -4.37431479e+00 ... 1.26018162e+01 1.82408905e+00 -4.28519666e-01] ... [ 5.42069864e+00 -2.24567490e+01 -3.08976007e+00 ... -4.30063820e+00 4.63336229e+00 -2.18430567e+00] [ 2.75732446e+00 7.14269829e+00 5.40708303e+00 ... -6.35298443e+00 -2.12721825e+01 -4.14907455e+00] [-3.52059770e+00 -1.80340004e+00 -1.69063747e-01 ... 1.17298861e+01 3.48457384e+00 7.55506706e+00]] [[ 9.48905468e+00 4.81856585e+00 1.01218376e+01 ... 7.76550245e+00 -6.83050728e+00 3.46151257e+00] [-7.56883287e+00 -7.77288580e+00 -2.65359497e+00 ... 1.23784084e+01 -1.07183094e+01 -8.85868931e+00] [-4.48606157e+00 4.67211866e+00 -9.21421909e+00 ... -3.57861018e+00 1.21322041e+01 -1.27882433e+00] ... [-5.56388283e+00 2.19767380e+00 5.44386864e+00 ... -1.05513172e+01 1.16310091e+01 -9.29812145e+00] [ 9.79424572e+00 -9.19227505e+00 -5.92441130e+00 ... 1.29491234e+01 -3.64334166e-01 -6.14805162e-01] [ 3.50651217e+00 5.65289259e+00 2.00661731e+00 ... 3.14403725e+00 -1.05552697e+00 -5.74731886e-01]]]] [[[[-6.19257164e+00 1.25063639e+01 6.85064507e+00 ... -1.84877548e+01 -1.71920757e+01 4.89843130e+00] [ 3.33765984e+00 9.21886730e+00 -3.76739335e+00 ... 1.45428295e+01 7.77934194e-02 4.12412453e+00] [-1.16272621e+01 2.23467851e+00 1.16267796e+01 ... -1.36893835e+01 -5.18738461e+00 -3.52137446e+00] ... [ 6.80773115e+00 9.71778011e+00 2.26040039e+01 ... 2.51105309e+00 -3.55019975e+00 -1.56880164e+00] [ 9.09865570e+00 -2.84426832e+00 -3.51980233e+00 ... 1.48561745e+01 -6.49864256e-01 7.60188913e+00] [-1.84130502e+00 1.28468828e+01 2.86468840e+00 ... 8.35461998e+00 1.00932693e+01 6.35659456e-01]] [[ 5.42261243e-01 2.76557475e-01 -1.45028114e+00 ... 8.69478512e+00 -8.01465690e-01 8.72930288e-02] [ 2.58571434e+00 1.81743298e+01 -1.50236797e+00 ... -6.00328064e+00 -1.55433760e+01 3.53485727e+00] [-8.41026783e+00 -8.54111862e+00 2.02930450e+01 ... 1.28146639e+01 -3.70352387e+00 1.70695877e+01] ... [ 1.48859227e+00 -4.43720055e+00 4.41704655e+00 ... 3.82384181e+00 -7.65202284e+00 1.84133381e-01] [ 5.48411083e+00 3.63925624e+00 3.07527328e+00 ... 1.70821738e+00 5.95396996e+00 1.00279875e+01] [-2.82254100e-01 -9.20741081e+00 -2.02880716e+00 ... 1.10237684e+01 6.71273613e+00 -2.56655025e+00]] [[-5.50169516e+00 -3.28073668e+00 -7.67842436e+00 ... -2.51476908e+00 4.21455574e+00 1.00343781e+01] [-9.83992755e-01 9.24819660e+00 6.87725163e+00 ... 1.34832668e+01 -5.69517136e+00 2.39978933e+00] [-3.97659802e+00 7.05047989e+00 1.03374395e+01 ... 4.10277814e-01 -3.70127845e+00 7.46540129e-01] ... [ 3.77182770e+00 1.40670729e+01 -3.76062083e+00 ... -9.42169094e+00 1.77999382e+01 9.99557400e+00] [ 6.53442478e+00 -7.22395325e+00 -5.29054594e+00 ... -2.48512821e+01 -1.68301964e+00 -9.33890283e-01] [-4.58561897e+00 2.67730021e+00 2.12056017e+00 ... 6.22532797e+00 2.44417739e+00 4.94840336e+00]] ... [[-1.22459531e+00 -3.71944880e+00 -7.60014963e+00 ... 1.88677368e+01 1.39175196e+01 -4.29209709e+00] [-2.72748327e+00 1.59089541e+00 1.32154055e+01 ... -1.43068895e+01 -5.13307238e+00 1.31482382e+01] [-1.23376160e+01 -1.46271143e+01 -1.16572595e+00 ... 1.00983038e+01 -6.36837006e+00 5.22750974e-01] ... [-1.56507111e+00 -7.27235270e+00 -4.90187311e+00 ... -7.46521235e+00 -1.42119813e+00 1.44824207e-01] [ 3.34985757e+00 9.52066660e-01 -8.36353207e+00 ... 8.04610538e+00 1.49196634e+01 2.27038288e+00] [-2.70789313e+00 1.01127977e+01 -1.13294134e+01 ... -9.22190094e+00 5.36943436e+00 7.93288088e+00]] [[ 1.38098612e+01 4.99085331e+00 7.30052471e+00 ... 1.33985386e+01 -1.64444494e+00 3.17077070e-01] [ 2.76502937e-01 1.66992340e+01 -1.41350784e+01 ... -4.18380404e+00 2.40293241e+00 9.24884081e-01] [-3.07485008e+00 2.19489498e+01 4.20776939e+00 ... -5.19971943e+00 2.94338036e+00 -1.88129854e+00] ... [ 1.07021046e+00 -8.89780521e+00 1.91953945e+01 ... -3.72596002e+00 -5.94629812e+00 1.69667220e+00] [-1.51159811e+00 -3.84940314e+00 -3.17013335e+00 ... 5.86522245e+00 -1.28003907e+00 1.38613710e+01] [ 6.23810434e+00 1.09164352e+01 -4.85449839e+00 ... -2.11958933e+00 2.50604868e+00 -6.50892353e+00]] [[-5.99978828e+00 -9.63808823e+00 1.19432240e+01 ... 1.41867495e+01 -5.51557112e+00 -4.34483707e-01] [-1.31974936e+00 4.26277304e+00 5.53954506e+00 ... -7.71038008e+00 -3.93302894e+00 8.07791328e+00] [-1.26689100e+01 -8.02976513e+00 -2.63045979e+01 ... 1.27698841e+01 2.04722214e+00 -1.82119703e+00] ... [-1.05954094e+01 -2.33045053e+00 6.55530071e+00 ... 1.96117353e+00 -4.56691885e+00 2.35696316e+00] [-9.71918774e+00 3.73186946e+00 -9.40388107e+00 ... 9.25310802e+00 4.48820066e+00 -1.11636238e+01] [-1.05979929e+01 7.11746264e+00 2.56449986e+00 ... 1.77066588e+00 1.05640001e+01 8.50454140e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 1], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_492.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.46007}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[-3.38013983e+00 8.06292820e+00 -6.00191164e+00 ... 8.81471443e+00 1.44715714e+00 1.08498306e+01] [-7.35026884e+00 1.99644542e+00 -9.11401176e+00 ... -4.20670938e+00 5.18204069e+00 5.27220154e+00] [-7.74023294e-01 -2.54518986e+00 -3.60615969e+00 ... -2.33379316e+00 3.07733321e+00 1.04379807e+01] ... [-1.59370899e+00 -7.83934736e+00 5.85380030e+00 ... 7.67927980e+00 1.31019855e+00 3.29377174e+00] [-6.51854277e-01 4.34146976e+00 -4.96887493e+00 ... 2.61142063e+00 5.67253590e-01 2.28775620e+00] [ 9.52106667e+00 -6.12103891e+00 3.82099462e+00 ... 5.65694714e+00 -1.14047170e+00 -7.52254486e-01]] [[-1.21839666e+00 9.50830162e-01 2.71936274e+00 ... 3.52699924e+00 6.46814108e-01 -3.31260872e+00] [-1.44707108e+01 -1.17863560e+00 -4.08576965e+00 ... 1.52466357e+00 -1.60022902e+00 -4.10337591e+00] [ 5.13298845e+00 1.05879421e+01 7.75280857e+00 ... 1.45197353e+01 -1.24467773e+01 4.78247929e+00] ... [-6.11494446e+00 -3.57109737e+00 5.82804728e+00 ... 1.78299828e+01 -2.56720448e+00 8.85674667e+00] [-2.53649831e+00 -2.13809156e+00 -4.97756910e+00 ... -6.53028727e-01 -5.28698444e+00 1.28773289e+01] [ 9.81473732e+00 7.21435022e+00 -7.31665468e+00 ... 5.71505308e+00 -2.04989815e+00 1.59236121e+00]] [[-4.72268963e+00 -1.38403072e+01 -6.65930891e+00 ... 1.43156958e+00 1.40578213e+01 -6.34219170e-01] [-2.83121943e-01 3.55368853e+00 -1.53216767e+00 ... 1.71298516e+00 -8.34420681e-01 -3.36546469e+00] [ 1.75147176e+00 -3.72762346e+00 9.86048508e+00 ... -3.94656420e+00 5.62607288e+00 7.48559904e+00] ... [ 4.69749403e+00 -7.85063219e+00 -2.32476711e-01 ... -1.21616764e+01 9.78387833e+00 -1.46623192e+01] [ 1.44642484e+00 7.40122604e+00 1.57288952e+01 ... 4.85700321e+00 -9.26543617e+00 8.76953411e+00] [-6.02775621e+00 1.16272402e+00 -2.02919559e+01 ... 6.94093180e+00 1.72013378e+00 -1.31472492e+01]] ... [[ 6.49455643e+00 -9.00572014e+00 8.59682083e+00 ... 8.60504341e+00 -8.73765755e+00 6.89353752e+00] [-1.43171768e+01 5.20735264e+00 -7.90286875e+00 ... -3.42986631e+00 -6.29922199e+00 -1.03689861e+00] [ 2.91810989e+00 -2.42643046e+00 1.71261096e+00 ... 1.95694888e+00 1.13538017e+01 5.73478985e+00] ... [-5.56162739e+00 6.64104986e+00 -3.07233477e+00 ... -1.00460434e+01 8.55345058e+00 3.11504030e+00] [-5.57467222e+00 7.65386105e+00 3.65593743e+00 ... -2.70219231e+00 2.98830914e+00 2.99844503e+00] [ 7.55484295e+00 -5.33541822e+00 -6.78947020e+00 ... 2.37369990e+00 -1.61062527e+01 5.03901863e+00]] [[-8.66848373e+00 -1.10379467e+01 -2.59359217e+00 ... 5.50973749e+00 -2.81966090e-01 4.62306499e+00] [-1.64914875e+01 2.50035906e+00 -4.44981289e+00 ... -5.14394403e-01 -3.45401335e+00 -8.47853899e-01] [-7.89465284e+00 -1.46915550e+01 -5.30174541e+00 ... -2.71367884e+00 2.38930869e+00 -1.85409021e+00] ... [ 3.99347854e+00 -5.14109898e+00 1.52582321e+01 ... -1.45623302e+01 7.51260340e-01 2.49231529e+00] [-8.45127106e+00 7.55078077e+00 -5.71905613e-01 ... 6.24481344e+00 -1.39938221e+01 -2.34121966e+00] [ 3.79937530e+00 5.90845013e+00 -3.62389088e-02 ... -6.62928629e+00 -1.83263922e+00 6.70575762e+00]] [[ 1.85948205e+00 4.60721731e+00 7.32161522e+00 ... 9.03680611e+00 2.49373913e+00 3.09364319e+00] [-4.29001474e+00 -8.13252831e+00 6.55038595e+00 ... 1.45747910e+01 -2.45464039e+00 -9.23121214e-01] [-4.52761650e-02 7.31712818e+00 5.25440598e+00 ... -2.20435238e+00 8.81774616e+00 1.40034151e+00] ... [ 1.98497856e+00 1.60134995e+00 -8.03346395e-01 ... 3.59144258e+00 -2.40372896e-01 -2.21539521e+00] [ 4.21117401e+00 -4.65031862e-02 -4.52187061e-02 ... -1.64556408e+01 1.50762711e+01 -1.07691364e+01] [ 3.13590264e+00 -5.05750775e-01 7.16174555e+00 ... 2.70040035e+00 3.27606726e+00 -1.15947056e+00]]]] [[[[ 1.41051731e+01 1.31906395e+01 2.54749417e+00 ... 6.40640306e+00 6.16706610e+00 -2.37556577e+00] [ 4.23935413e+00 6.53826761e+00 -2.13061690e+00 ... 1.21290302e+01 -9.76849079e-01 -7.60284901e-01] [-1.24286723e+00 6.76153421e+00 -3.81853104e+00 ... 1.60077775e+00 2.90453053e+00 1.59824097e+00] ... [-7.45826674e+00 -3.73510003e-01 1.78121591e+00 ... -2.60281134e+00 -3.05757475e+00 4.53020000e+00] [-1.69321632e+00 6.62563086e-01 -3.74094582e+00 ... -6.32470512e+00 5.30637741e+00 3.40710998e+00] [-3.68720865e+00 -1.58596683e+00 8.53479099e+00 ... 3.51752377e+00 -1.58772850e+00 5.49431419e+00]] [[ 6.86948252e+00 -8.03552055e+00 -2.94921780e+00 ... -1.31291080e+00 1.23829994e+01 -1.21886182e+00] [-7.58814764e+00 -2.19646764e+00 -5.39386225e+00 ... -6.38349724e+00 -6.44767714e+00 8.98104954e+00] [ 1.10762806e+01 -2.45768905e-01 -2.80282450e+00 ... -5.27272892e+00 -1.08502865e+00 8.93731213e+00] ... [ 4.18145418e-01 1.07782974e+01 9.32951355e+00 ... -7.48032618e+00 1.17643661e+01 1.17830324e+00] [ 1.63642168e+00 9.31803226e-01 -6.17879248e+00 ... 5.97725677e+00 3.22856569e+00 8.69817352e+00] [-4.21261597e+00 3.50980639e-01 1.01295757e+01 ... -2.40118575e+00 9.24262142e+00 4.29605007e+00]] [[ 3.16867948e+00 -1.38798161e+01 9.00225067e+00 ... -1.30766010e+00 7.40205717e+00 5.28997183e-02] [ 1.24639378e+01 -4.06765652e+00 2.95940924e+00 ... 2.61546469e+00 2.26629791e+01 3.88696098e+00] [ 4.54957867e+00 -3.59170055e+00 -4.68123102e+00 ... 6.10040522e+00 5.40421104e+00 -1.14704456e+01] ... [-5.50402069e+00 -1.23249111e+01 -2.27642584e+00 ... -3.43391323e+00 -1.49258518e+00 -8.88944435e+00] [-4.26100612e-01 -8.01873207e+00 1.82418573e+00 ... 6.23759031e+00 5.88167095e+00 1.68692970e+01] [-6.73587847e+00 1.52419651e+00 6.99909925e-02 ... 2.96651006e-01 4.93937111e+00 8.41809082e+00]] ... [[-4.82522964e+00 -2.70710754e+00 -1.10292149e+01 ... 2.89865494e+00 -3.43298101e+00 -5.21745396e+00] [ 7.24085450e-01 -1.04246712e+00 7.24142122e+00 ... 1.51597381e-01 1.07466745e+00 -2.43733859e+00] [-6.49309349e+00 -1.11521149e+01 -3.94197178e+00 ... 6.69792032e+00 -6.37629938e+00 1.41602726e+01] ... [ 6.68689775e+00 -1.49969316e+00 -4.11812878e+00 ... 4.28324509e+00 1.84991550e+00 -3.27998543e+00] [-2.02601504e+00 -1.94549203e-01 5.84265232e+00 ... 9.12903881e+00 2.70497799e+00 -9.13423729e+00] [ 4.14256954e+00 -4.11901188e+00 5.50086451e+00 ... 5.01255989e-02 -3.41062212e+00 7.16648865e+00]] [[ 4.11539936e+00 2.66115880e+00 1.82374973e+01 ... -9.65265989e-01 1.97912025e+00 -8.99937630e+00] [ 4.06923580e+00 -7.81071520e+00 -4.26395035e+00 ... 5.49419355e+00 -6.69613504e+00 3.83926511e+00] [ 9.07969594e-01 8.04226220e-01 -7.72678614e+00 ... -3.33869505e+00 5.36377907e+00 -5.86468697e+00] ... [ 5.43566418e+00 -2.63456154e+00 1.12409191e+01 ... -1.39192939e+00 -6.73793745e+00 6.59851408e+00] [-3.34179926e+00 -6.34515333e+00 -3.63513947e-01 ... 2.79423904e+00 4.84223366e-02 -6.51060200e+00] [-4.36644554e-01 9.30002117e+00 5.49272966e+00 ... 4.53547478e+00 7.83216953e-03 5.97157907e+00]] [[ 5.94461679e-01 -7.10164499e+00 -8.37675858e+00 ... -1.00911217e+01 -1.11579537e+00 -5.22561264e+00] [-2.38534045e+00 -6.55644464e+00 -1.05145478e+00 ... -3.25287676e+00 1.60544415e+01 4.95829701e-01] [ 1.16612029e+00 -2.30521154e+00 7.74342585e+00 ... -1.36418271e+00 1.00645065e+01 3.82979274e+00] ... [-1.65373087e-01 -3.36270094e+00 -2.78323221e+00 ... -2.07606840e+00 1.10899811e+01 1.77833247e+00] [ 1.04951878e+01 6.07247019e+00 9.18018627e+00 ... 5.77379084e+00 -3.11898708e-01 5.84165239e+00] [-6.39309263e+00 6.81918812e+00 3.44054651e+00 ... 7.72686672e+00 -3.59491730e+00 7.94832468e+00]]]]]; ov_res: [[[[[-3.38013983e+00 8.06292820e+00 -6.00191164e+00 ... 8.81471443e+00 1.44715714e+00 1.08498306e+01] [-7.35026884e+00 1.99644542e+00 -9.11401176e+00 ... -4.20670938e+00 5.18204069e+00 5.27220154e+00] [-7.74023294e-01 -2.54518986e+00 -3.60615969e+00 ... -2.33379316e+00 3.07733321e+00 1.04379807e+01] ... [-1.59370899e+00 -7.83934736e+00 5.85380030e+00 ... 7.67927980e+00 1.31019855e+00 3.29377174e+00] [-6.51854277e-01 4.34146976e+00 -4.96887493e+00 ... 2.61142063e+00 5.67253590e-01 2.28775620e+00] [ 9.52106667e+00 -6.12103891e+00 3.82099462e+00 ... 5.65694714e+00 -1.14047170e+00 -7.52254486e-01]] [[-1.21839666e+00 9.50830162e-01 2.71936274e+00 ... 3.52699924e+00 6.46814108e-01 -3.31260872e+00] [-1.44707108e+01 -1.17863560e+00 -4.08576965e+00 ... 1.52466357e+00 -1.60022902e+00 -4.10337591e+00] [ 5.13298845e+00 1.05879421e+01 7.75280857e+00 ... 1.45197353e+01 -1.24467773e+01 4.78247929e+00] ... [-6.11494446e+00 -3.57109737e+00 5.82804728e+00 ... 1.78299828e+01 -2.56720448e+00 8.85674667e+00] [-2.53649831e+00 -2.13809156e+00 -4.97756910e+00 ... -6.53028727e-01 -5.28698444e+00 1.28773289e+01] [ 9.81473732e+00 7.21435022e+00 -7.31665468e+00 ... 5.71505308e+00 -2.04989815e+00 1.59236121e+00]] [[-4.72268963e+00 -1.38403072e+01 -6.65930891e+00 ... 1.43156958e+00 1.40578213e+01 -6.34219170e-01] [-2.83121943e-01 3.55368853e+00 -1.53216767e+00 ... 1.71298516e+00 -8.34420681e-01 -3.36546469e+00] [ 1.75147176e+00 -3.72762346e+00 9.86048508e+00 ... -3.94656420e+00 5.62607288e+00 7.48559904e+00] ... [ 4.69749403e+00 -7.85063219e+00 -2.32476711e-01 ... -1.21616764e+01 9.78387833e+00 -1.46623192e+01] [ 1.44642484e+00 7.40122604e+00 1.57288952e+01 ... 4.85700321e+00 -9.26543617e+00 8.76953411e+00] [-6.02775621e+00 1.16272402e+00 -2.02919559e+01 ... 6.94093180e+00 1.72013378e+00 -1.31472492e+01]] ... [[ 6.49455643e+00 -9.00572014e+00 8.59682083e+00 ... 8.60504341e+00 -8.73765755e+00 6.89353752e+00] [-1.43171768e+01 5.20735264e+00 -7.90286875e+00 ... -3.42986631e+00 -6.29922199e+00 -1.03689861e+00] [ 2.91810989e+00 -2.42643046e+00 1.71261096e+00 ... 1.95694888e+00 1.13538017e+01 5.73478985e+00] ... [-5.56162739e+00 6.64104986e+00 -3.07233477e+00 ... -1.00460434e+01 8.55345058e+00 3.11504030e+00] [-5.57467222e+00 7.65386105e+00 3.65593743e+00 ... -2.70219231e+00 2.98830914e+00 2.99844503e+00] [ 7.55484295e+00 -5.33541822e+00 -6.78947020e+00 ... 2.37369990e+00 -1.61062527e+01 5.03901863e+00]] [[-8.66848373e+00 -1.10379467e+01 -2.59359217e+00 ... 5.50973749e+00 -2.81966090e-01 4.62306499e+00] [-1.64914875e+01 2.50035906e+00 -4.44981289e+00 ... -5.14394403e-01 -3.45401335e+00 -8.47853899e-01] [-7.89465284e+00 -1.46915550e+01 -5.30174541e+00 ... -2.71367884e+00 2.38930869e+00 -1.85409021e+00] ... [ 3.99347854e+00 -5.14109898e+00 1.52582321e+01 ... -1.45623302e+01 7.51260340e-01 2.49231529e+00] [-8.45127106e+00 7.55078077e+00 -5.71905613e-01 ... 6.24481344e+00 -1.39938221e+01 -2.34121966e+00] [ 3.79937530e+00 5.90845013e+00 -3.62389088e-02 ... -6.62928629e+00 -1.83263922e+00 6.70575762e+00]] [[ 1.85948205e+00 4.60721731e+00 7.32161522e+00 ... 9.03680611e+00 2.49373913e+00 3.09364319e+00] [-4.29001474e+00 -8.13252831e+00 6.55038595e+00 ... 1.45747910e+01 -2.45464039e+00 -9.23121214e-01] [-4.52761650e-02 7.31712818e+00 5.25440598e+00 ... -2.20435238e+00 8.81774616e+00 1.40034151e+00] ... [ 1.98497856e+00 1.60134995e+00 -8.03346395e-01 ... 3.59144258e+00 -2.40372896e-01 -2.21539521e+00] [ 4.21117401e+00 -4.65031862e-02 -4.52187061e-02 ... -1.64556408e+01 1.50762711e+01 -1.07691364e+01] [ 3.13590264e+00 -5.05750775e-01 7.16174555e+00 ... 2.70040035e+00 3.27606726e+00 -1.15947056e+00]]]] [[[[ 1.41051731e+01 1.31906395e+01 2.54749417e+00 ... 6.40640306e+00 6.16706610e+00 -2.37556577e+00] [ 4.23935413e+00 6.53826761e+00 -2.13061690e+00 ... 1.21290302e+01 -9.76849079e-01 -7.60284901e-01] [-1.24286723e+00 6.76153421e+00 -3.81853104e+00 ... 1.60077775e+00 2.90453053e+00 1.59824097e+00] ... [-7.45826674e+00 -3.73510003e-01 1.78121591e+00 ... -2.60281134e+00 -3.05757475e+00 4.53020000e+00] [-1.69321632e+00 6.62563086e-01 -3.74094582e+00 ... -6.32470512e+00 5.30637741e+00 3.40710998e+00] [-3.68720865e+00 -1.58596683e+00 8.53479099e+00 ... 3.51752377e+00 -1.58772850e+00 5.49431419e+00]] [[ 6.86948252e+00 -8.03552055e+00 -2.94921780e+00 ... -1.31291080e+00 1.23829994e+01 -1.21886182e+00] [-7.58814764e+00 -2.19646764e+00 -5.39386225e+00 ... -6.38349724e+00 -6.44767714e+00 8.98104954e+00] [ 1.10762806e+01 -2.45768905e-01 -2.80282450e+00 ... -5.27272892e+00 -1.08502865e+00 8.93731213e+00] ... [ 4.18145418e-01 1.07782974e+01 9.32951355e+00 ... -7.48032618e+00 1.17643661e+01 1.17830324e+00] [ 1.63642168e+00 9.31803226e-01 -6.17879248e+00 ... 5.97725677e+00 3.22856569e+00 8.69817352e+00] [-4.21261597e+00 3.50980639e-01 1.01295757e+01 ... -2.40118575e+00 9.24262142e+00 4.29605007e+00]] [[ 3.16867948e+00 -1.38798161e+01 9.00225067e+00 ... -1.30766010e+00 7.40205717e+00 5.28997183e-02] [ 1.24639378e+01 -4.06765652e+00 2.95940924e+00 ... 2.61546469e+00 2.26629791e+01 3.88696098e+00] [ 4.54957867e+00 -3.59170055e+00 -4.68123102e+00 ... 6.10040522e+00 5.40421104e+00 -1.14704456e+01] ... [-5.50402069e+00 -1.23249111e+01 -2.27642584e+00 ... -3.43391323e+00 -1.49258518e+00 -8.88944435e+00] [-4.26100612e-01 -8.01873207e+00 1.82418573e+00 ... 6.23759031e+00 5.88167095e+00 1.68692970e+01] [-6.73587847e+00 1.52419651e+00 6.99909925e-02 ... 2.96651006e-01 4.93937111e+00 8.41809082e+00]] ... [[-4.82522964e+00 -2.70710754e+00 -1.10292149e+01 ... 2.89865494e+00 -3.43298101e+00 -5.21745396e+00] [ 7.24085450e-01 -1.04246712e+00 7.24142122e+00 ... 1.51597381e-01 1.07466745e+00 -2.43733859e+00] [-6.49309349e+00 -1.11521149e+01 -3.94197178e+00 ... 6.69792032e+00 -6.37629938e+00 1.41602726e+01] ... [ 6.68689775e+00 -1.49969316e+00 -4.11812878e+00 ... 4.28324509e+00 1.84991550e+00 -3.27998543e+00] [-2.02601504e+00 -1.94549203e-01 5.84265232e+00 ... 9.12903881e+00 2.70497799e+00 -9.13423729e+00] [ 4.14256954e+00 -4.11901188e+00 5.50086451e+00 ... 5.01255989e-02 -3.41062212e+00 7.16648865e+00]] [[ 4.11539936e+00 2.66115880e+00 1.82374973e+01 ... -9.65265989e-01 1.97912025e+00 -8.99937630e+00] [ 4.06923580e+00 -7.81071520e+00 -4.26395035e+00 ... 5.49419355e+00 -6.69613504e+00 3.83926511e+00] [ 9.07969594e-01 8.04226220e-01 -7.72678614e+00 ... -3.33869505e+00 5.36377907e+00 -5.86468697e+00] ... [ 5.43566418e+00 -2.63456154e+00 1.12409191e+01 ... -1.39192939e+00 -6.73793745e+00 6.59851408e+00] [-3.34179926e+00 -6.34515333e+00 -3.63513947e-01 ... 2.79423904e+00 4.84223366e-02 -6.51060200e+00] [-4.36644554e-01 9.30002117e+00 5.49272966e+00 ... 4.53547478e+00 7.83216953e-03 5.97157907e+00]] [[ 5.94461679e-01 -7.10164499e+00 -8.37675858e+00 ... -1.00911217e+01 -1.11579537e+00 -5.22561264e+00] [-2.38534045e+00 -6.55644464e+00 -1.05145478e+00 ... -3.25287676e+00 1.60544415e+01 4.95829701e-01] [ 1.16612029e+00 -2.30521154e+00 7.74342585e+00 ... -1.36418271e+00 1.00645065e+01 3.82979274e+00] ... [-1.65373087e-01 -3.36270094e+00 -2.78323221e+00 ... -2.07606840e+00 1.10899811e+01 1.77833247e+00] [ 1.04951878e+01 6.07247019e+00 9.18018627e+00 ... 5.77379084e+00 -3.11898708e-01 5.84165239e+00] [-6.39309263e+00 6.81918812e+00 3.44054651e+00 ... 7.72686672e+00 -3.59491730e+00 7.94832468e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_494.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.34585}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 2.4049931e+00 2.2697372e+00 2.6974626e+00 ... -4.2965894e+00 7.2756882e+00 3.1530728e+00] [ 3.5672174e+00 -1.2945743e+00 -5.4063849e+00 ... 2.1187897e+00 -1.7108233e+00 6.5974325e-01] [ 3.9338837e+00 6.1029081e+00 4.4054127e+00 ... -4.5350885e+00 -1.8294945e+00 9.1263180e+00] ... [ 7.1581511e+00 8.8402920e+00 1.8732686e+00 ... 1.3926685e+01 -7.1518254e-01 -6.0034103e+00] [ 2.5005012e+00 6.2558599e+00 1.2359154e+00 ... 1.1551285e+01 -4.1994138e+00 8.7511692e+00] [ 7.2126784e+00 -7.2673788e+00 2.2361803e+00 ... -3.4424591e+00 2.5268769e+00 -2.2541361e+00]] [[ 6.5473685e+00 3.1010480e+00 6.4641480e+00 ... 3.3432345e+00 -1.0110151e+01 6.7627220e+00] [ 4.5918388e+00 4.1488132e+00 1.3519980e+00 ... 4.8877468e+00 1.9056799e+00 5.2115831e+00] [-3.4374194e+00 2.0704648e+00 -3.6765227e+00 ... -7.2754917e+00 4.1134453e+00 2.3061304e+00] ... [ 8.5736599e+00 -8.3651423e-02 -6.6825390e+00 ... 5.7811069e+00 -6.6395092e-01 -7.4806452e-02] [ 1.4987338e+00 -1.0603492e+00 1.0033605e+01 ... 5.8256607e+00 5.7480135e+00 9.6412792e+00] [ 5.5667334e+00 3.2194138e-02 -7.1919298e+00 ... 3.6171649e+00 -8.8775187e+00 3.6894617e+00]] [[ 6.0960236e+00 1.5222188e+01 1.3765068e+00 ... 5.0218062e+00 2.9656887e+00 -6.3963854e-01] [ 5.9092011e+00 -3.7603369e+00 1.1861646e+01 ... -7.2042379e+00 1.5473452e+01 4.5370407e+00] [-4.1211224e+00 1.3693440e+00 8.1460199e+00 ... 9.6268725e-01 -4.1430244e+00 4.2453661e+00] ... [ 1.4770228e+01 -2.2855513e+00 1.1239616e+00 ... 9.4995384e+00 -2.1255333e+00 2.6085441e+00] [-3.4774697e-01 -1.1834694e+01 -6.2720406e-01 ... 7.4318677e-01 4.2768722e+00 -1.9395530e+00] [-7.6458931e-02 9.1873913e+00 1.6557755e+00 ... 7.8824836e-01 1.0054774e+01 5.9248528e+00]] ... [[-5.9667764e+00 -9.2688608e-01 3.0533590e+00 ... 1.5512775e+00 7.8220496e+00 3.3155947e+00] [ 5.4095144e+00 1.6084528e+01 2.6048532e+00 ... -3.7238064e+00 5.8432341e+00 5.3357601e+00] [-1.6862762e-01 -4.0386271e-01 -1.5420890e+00 ... 2.1661427e+00 8.9818439e+00 1.3250662e+01] ... [-6.0907750e+00 3.8240769e+00 -3.5318413e+00 ... 1.3059155e+00 -7.1523581e+00 -4.2597241e+00] [-7.1354713e+00 -1.0440769e+01 -3.7194853e+00 ... 8.4002466e+00 4.0570745e+00 1.3550642e+00] [-3.7633657e-02 1.4551540e+01 8.3539143e+00 ... -2.4184954e+00 1.0665634e+00 -2.7953506e+00]] [[ 8.7263908e+00 7.8302616e-01 5.1163921e+00 ... -4.0885744e+00 1.1879849e+00 -4.3543339e-02] [-2.5917032e+00 4.9220314e+00 -5.4351711e-01 ... 3.7712700e+00 5.8932614e+00 -4.8879414e+00] [ 2.4306474e+00 -4.1027927e-01 -4.0286021e+00 ... -4.7402568e+00 6.3709121e+00 8.8850536e+00] ... [ 1.7022179e+01 -2.2047560e+00 6.5354185e+00 ... 8.0852537e+00 -3.4067340e+00 -1.4763637e+00] [-1.7003891e+00 2.3206184e+01 -6.7210245e-01 ... -6.4932699e+00 1.9730835e+01 -2.5985203e+00] [-2.1340933e+00 -9.1694031e+00 -1.6670537e+00 ... -5.9090676e+00 -8.1399164e+00 -1.6012988e+00]] [[-1.0207098e+00 5.2332382e+00 3.9128046e+00 ... -1.7475026e+00 1.3748072e+00 -9.6342206e-02] [-5.7535684e-01 -4.4673004e+00 2.0344479e+00 ... -4.8423038e+00 -2.3167098e+00 2.9927266e+00] [ 3.4170988e+00 -8.1597924e-01 6.0501142e+00 ... -1.3399601e+00 1.8646190e+00 -1.2678675e+01] ... [-2.2772498e+00 4.1465139e+00 -3.5723171e+00 ... 4.6048203e+00 1.4384510e+01 -7.9787331e+00] [-4.2502766e+00 -3.9156122e+00 5.4753509e+00 ... 3.5453773e-01 -6.4458113e+00 1.1899537e+00] [-1.8178449e+00 -8.3290939e+00 -1.3161955e+00 ... -6.6699862e-01 -7.4747133e+00 -4.2179317e+00]]]] [[[[ 4.7866430e+00 9.0146751e+00 -2.4392986e-01 ... 5.7681060e+00 -1.2092829e+00 -2.7371521e+00] [-7.9241467e-01 9.8864374e+00 -8.6658068e+00 ... -7.6709328e+00 -1.0339134e+00 4.0352983e+00] [-2.7333021e-01 1.5515054e+01 -7.9091024e+00 ... -4.5894423e+00 3.2799053e+00 3.5561662e+00] ... [-1.5677106e+00 -8.7667780e+00 -3.0885301e+00 ... 9.1427708e-01 -5.1133442e+00 1.2154975e+01] [ 3.2526684e+00 -2.4593987e+00 -2.3610451e+00 ... -1.0796593e+01 -7.6593666e+00 1.4476390e+01] [ 6.7643189e-01 2.2051945e+00 1.2442955e+01 ... -4.0251427e+00 -1.4570966e+00 4.7465599e-01]] [[ 1.8919299e+00 1.3503766e-01 -3.9586630e+00 ... 2.9732437e+00 -3.0905709e+00 -6.0655308e-01] [ 2.3670216e+00 1.8044881e+00 -5.9957376e+00 ... -2.1749306e+00 4.2400522e+00 4.2744579e+00] [ 3.5753065e-01 -4.6651235e+00 3.6080341e+00 ... 5.5407634e+00 -2.8780437e+00 -7.0463581e+00] ... [ 9.4226456e+00 -8.6121140e+00 -5.1113529e+00 ... -6.6719437e+00 6.6381559e+00 7.8060436e+00] [ 1.6376776e+00 3.7350533e+00 1.3000242e+00 ... -3.6667099e+00 1.1988191e+01 1.0136536e+01] [ 1.4543672e+00 -5.0705137e+00 -1.0259101e+00 ... -5.0996041e-01 7.5901365e+00 7.0285096e+00]] [[ 1.0274662e+00 7.3599796e+00 -2.7823496e+00 ... 2.3996198e+00 3.2748179e+00 6.1534066e+00] [ 4.0375919e+00 2.7398958e+00 1.4418795e+01 ... -2.1236777e-02 2.8176012e+00 -2.6446609e+00] [-1.4696973e+01 -5.2387297e-01 -4.9666772e+00 ... -4.5211911e+00 3.9862382e-01 8.0134094e-01] ... [-5.1686573e+00 -3.6745253e+00 -8.0917616e+00 ... 9.5733070e+00 5.3089609e+00 3.9512584e+00] [ 7.4863952e-01 -3.1400404e+00 1.8569836e+01 ... 4.8308926e+00 5.9590707e+00 4.8439484e+00] [-1.2763691e+00 1.1627555e+00 -4.2419257e+00 ... 2.7614748e+00 1.2248540e+01 4.8113365e+00]] ... [[ 1.0687935e-01 -4.7182755e+00 1.3985940e+00 ... 4.4727910e-01 -2.3944414e+00 5.2579498e-01] [-3.8563437e+00 3.1809838e+00 3.7151828e+00 ... -5.5135188e+00 -6.2447820e+00 2.4538159e+00] [ 1.1235987e+01 4.8013182e+00 1.0761728e+01 ... -2.2397931e+00 -4.7080278e+00 3.6281741e+00] ... [ 4.9501052e+00 9.2069921e+00 1.0719764e+01 ... -3.6019392e+00 1.0087638e+01 2.6675043e+00] [ 2.5834961e+00 -2.5574727e+00 5.0026379e+00 ... 4.1926036e+00 3.9905396e+00 5.1637840e+00] [-2.6522498e+00 6.5266848e-02 -5.9342051e-01 ... 1.6581394e+01 -1.0006377e+01 4.8177223e+00]] [[ 6.9332476e+00 4.7662312e-01 -3.8927784e+00 ... -4.4826450e+00 -3.8933816e+00 5.9963441e+00] [ 8.5904350e+00 1.3676580e+01 1.6830184e+00 ... -1.1143353e+01 -8.5711060e+00 -3.5008621e+00] [-5.1597795e+00 1.7793339e+01 1.4739319e+01 ... -1.1373522e+01 -6.1154928e+00 5.4322181e+00] ... [-3.5778923e+00 -2.1778758e+00 2.9179730e+00 ... -3.9774518e+00 8.4068346e+00 5.9084311e+00] [-1.9549825e+00 -5.1649251e+00 -3.9047670e+00 ... -1.9003103e+00 -9.0817904e-01 6.0035861e-01] [-9.4054966e+00 -7.6515808e+00 -1.5898101e+00 ... 1.5578861e+00 8.0376987e+00 6.6028291e-01]] [[ 9.9044442e-01 -3.8819284e+00 3.4875710e+00 ... 3.4064753e+00 3.6991408e+00 -3.5101805e+00] [ 5.1113472e+00 6.0481277e+00 -5.9069843e+00 ... 5.6933851e+00 4.4978962e+00 5.5893002e+00] [-3.0480342e+00 1.2368974e+01 -3.6407909e+00 ... -6.0585356e+00 -1.5450408e+01 1.3883686e+01] ... [ 1.8441010e+00 6.5346770e+00 -5.0539670e+00 ... 4.2988315e+00 2.3030772e+00 -1.7077398e+00] [-8.7188506e-01 9.0452232e+00 -1.0512246e+01 ... 1.1473133e+01 5.3611879e+00 1.0597515e+01] [-4.1298194e+00 -3.7384214e+00 1.9100727e+01 ... 1.7964630e+00 1.7617195e+00 6.6731375e-01]]]]]; ov_res: [[[[[ 2.4049931e+00 2.2697372e+00 2.6974626e+00 ... -4.2965894e+00 7.2756882e+00 3.1530728e+00] [ 3.5672174e+00 -1.2945743e+00 -5.4063849e+00 ... 2.1187897e+00 -1.7108233e+00 6.5974325e-01] [ 3.9338837e+00 6.1029081e+00 4.4054127e+00 ... -4.5350885e+00 -1.8294945e+00 9.1263180e+00] ... [ 7.1581511e+00 8.8402920e+00 1.8732686e+00 ... 1.3926685e+01 -7.1518254e-01 -6.0034103e+00] [ 2.5005012e+00 6.2558599e+00 1.2359154e+00 ... 1.1551285e+01 -4.1994138e+00 8.7511692e+00] [ 7.2126784e+00 -7.2673788e+00 2.2361803e+00 ... -3.4424591e+00 2.5268769e+00 -2.2541361e+00]] [[ 6.5473685e+00 3.1010480e+00 6.4641480e+00 ... 3.3432345e+00 -1.0110151e+01 6.7627220e+00] [ 4.5918388e+00 4.1488132e+00 1.3519980e+00 ... 4.8877468e+00 1.9056799e+00 5.2115831e+00] [-3.4374194e+00 2.0704648e+00 -3.6765227e+00 ... -7.2754917e+00 4.1134453e+00 2.3061304e+00] ... [ 8.5736599e+00 -8.3651423e-02 -6.6825390e+00 ... 5.7811069e+00 -6.6395092e-01 -7.4806452e-02] [ 1.4987338e+00 -1.0603492e+00 1.0033605e+01 ... 5.8256607e+00 5.7480135e+00 9.6412792e+00] [ 5.5667334e+00 3.2194138e-02 -7.1919298e+00 ... 3.6171649e+00 -8.8775187e+00 3.6894617e+00]] [[ 6.0960236e+00 1.5222188e+01 1.3765068e+00 ... 5.0218062e+00 2.9656887e+00 -6.3963854e-01] [ 5.9092011e+00 -3.7603369e+00 1.1861646e+01 ... -7.2042379e+00 1.5473452e+01 4.5370407e+00] [-4.1211224e+00 1.3693440e+00 8.1460199e+00 ... 9.6268725e-01 -4.1430244e+00 4.2453661e+00] ... [ 1.4770228e+01 -2.2855513e+00 1.1239616e+00 ... 9.4995384e+00 -2.1255333e+00 2.6085441e+00] [-3.4774697e-01 -1.1834694e+01 -6.2720406e-01 ... 7.4318677e-01 4.2768722e+00 -1.9395530e+00] [-7.6458931e-02 9.1873913e+00 1.6557755e+00 ... 7.8824836e-01 1.0054774e+01 5.9248528e+00]] ... [[-5.9667764e+00 -9.2688608e-01 3.0533590e+00 ... 1.5512775e+00 7.8220496e+00 3.3155947e+00] [ 5.4095144e+00 1.6084528e+01 2.6048532e+00 ... -3.7238064e+00 5.8432341e+00 5.3357601e+00] [-1.6862762e-01 -4.0386271e-01 -1.5420890e+00 ... 2.1661427e+00 8.9818439e+00 1.3250662e+01] ... [-6.0907750e+00 3.8240769e+00 -3.5318413e+00 ... 1.3059155e+00 -7.1523581e+00 -4.2597241e+00] [-7.1354713e+00 -1.0440769e+01 -3.7194853e+00 ... 8.4002466e+00 4.0570745e+00 1.3550642e+00] [-3.7633657e-02 1.4551540e+01 8.3539143e+00 ... -2.4184954e+00 1.0665634e+00 -2.7953506e+00]] [[ 8.7263908e+00 7.8302616e-01 5.1163921e+00 ... -4.0885744e+00 1.1879849e+00 -4.3543339e-02] [-2.5917032e+00 4.9220314e+00 -5.4351711e-01 ... 3.7712700e+00 5.8932614e+00 -4.8879414e+00] [ 2.4306474e+00 -4.1027927e-01 -4.0286021e+00 ... -4.7402568e+00 6.3709121e+00 8.8850536e+00] ... [ 1.7022179e+01 -2.2047560e+00 6.5354185e+00 ... 8.0852537e+00 -3.4067340e+00 -1.4763637e+00] [-1.7003891e+00 2.3206184e+01 -6.7210245e-01 ... -6.4932699e+00 1.9730835e+01 -2.5985203e+00] [-2.1340933e+00 -9.1694031e+00 -1.6670537e+00 ... -5.9090676e+00 -8.1399164e+00 -1.6012988e+00]] [[-1.0207098e+00 5.2332382e+00 3.9128046e+00 ... -1.7475026e+00 1.3748072e+00 -9.6342206e-02] [-5.7535684e-01 -4.4673004e+00 2.0344479e+00 ... -4.8423038e+00 -2.3167098e+00 2.9927266e+00] [ 3.4170988e+00 -8.1597924e-01 6.0501142e+00 ... -1.3399601e+00 1.8646190e+00 -1.2678675e+01] ... [-2.2772498e+00 4.1465139e+00 -3.5723171e+00 ... 4.6048203e+00 1.4384510e+01 -7.9787331e+00] [-4.2502766e+00 -3.9156122e+00 5.4753509e+00 ... 3.5453773e-01 -6.4458113e+00 1.1899537e+00] [-1.8178449e+00 -8.3290939e+00 -1.3161955e+00 ... -6.6699862e-01 -7.4747133e+00 -4.2179317e+00]]]] [[[[ 4.7866430e+00 9.0146751e+00 -2.4392986e-01 ... 5.7681060e+00 -1.2092829e+00 -2.7371521e+00] [-7.9241467e-01 9.8864374e+00 -8.6658068e+00 ... -7.6709328e+00 -1.0339134e+00 4.0352983e+00] [-2.7333021e-01 1.5515054e+01 -7.9091024e+00 ... -4.5894423e+00 3.2799053e+00 3.5561662e+00] ... [-1.5677106e+00 -8.7667780e+00 -3.0885301e+00 ... 9.1427708e-01 -5.1133442e+00 1.2154975e+01] [ 3.2526684e+00 -2.4593987e+00 -2.3610451e+00 ... -1.0796593e+01 -7.6593666e+00 1.4476390e+01] [ 6.7643189e-01 2.2051945e+00 1.2442955e+01 ... -4.0251427e+00 -1.4570966e+00 4.7465599e-01]] [[ 1.8919299e+00 1.3503766e-01 -3.9586630e+00 ... 2.9732437e+00 -3.0905709e+00 -6.0655308e-01] [ 2.3670216e+00 1.8044881e+00 -5.9957376e+00 ... -2.1749306e+00 4.2400522e+00 4.2744579e+00] [ 3.5753065e-01 -4.6651235e+00 3.6080341e+00 ... 5.5407634e+00 -2.8780437e+00 -7.0463581e+00] ... [ 9.4226456e+00 -8.6121140e+00 -5.1113529e+00 ... -6.6719437e+00 6.6381559e+00 7.8060436e+00] [ 1.6376776e+00 3.7350533e+00 1.3000242e+00 ... -3.6667099e+00 1.1988191e+01 1.0136536e+01] [ 1.4543672e+00 -5.0705137e+00 -1.0259101e+00 ... -5.0996041e-01 7.5901365e+00 7.0285096e+00]] [[ 1.0274662e+00 7.3599796e+00 -2.7823496e+00 ... 2.3996198e+00 3.2748179e+00 6.1534066e+00] [ 4.0375919e+00 2.7398958e+00 1.4418795e+01 ... -2.1236777e-02 2.8176012e+00 -2.6446609e+00] [-1.4696973e+01 -5.2387297e-01 -4.9666772e+00 ... -4.5211911e+00 3.9862382e-01 8.0134094e-01] ... [-5.1686573e+00 -3.6745253e+00 -8.0917616e+00 ... 9.5733070e+00 5.3089609e+00 3.9512584e+00] [ 7.4863952e-01 -3.1400404e+00 1.8569836e+01 ... 4.8308926e+00 5.9590707e+00 4.8439484e+00] [-1.2763691e+00 1.1627555e+00 -4.2419257e+00 ... 2.7614748e+00 1.2248540e+01 4.8113365e+00]] ... [[ 1.0687935e-01 -4.7182755e+00 1.3985940e+00 ... 4.4727910e-01 -2.3944414e+00 5.2579498e-01] [-3.8563437e+00 3.1809838e+00 3.7151828e+00 ... -5.5135188e+00 -6.2447820e+00 2.4538159e+00] [ 1.1235987e+01 4.8013182e+00 1.0761728e+01 ... -2.2397931e+00 -4.7080278e+00 3.6281741e+00] ... [ 4.9501052e+00 9.2069921e+00 1.0719764e+01 ... -3.6019392e+00 1.0087638e+01 2.6675043e+00] [ 2.5834961e+00 -2.5574727e+00 5.0026379e+00 ... 4.1926036e+00 3.9905396e+00 5.1637840e+00] [-2.6522498e+00 6.5266848e-02 -5.9342051e-01 ... 1.6581394e+01 -1.0006377e+01 4.8177223e+00]] [[ 6.9332476e+00 4.7662312e-01 -3.8927784e+00 ... -4.4826450e+00 -3.8933816e+00 5.9963441e+00] [ 8.5904350e+00 1.3676580e+01 1.6830184e+00 ... -1.1143353e+01 -8.5711060e+00 -3.5008621e+00] [-5.1597795e+00 1.7793339e+01 1.4739319e+01 ... -1.1373522e+01 -6.1154928e+00 5.4322181e+00] ... [-3.5778923e+00 -2.1778758e+00 2.9179730e+00 ... -3.9774518e+00 8.4068346e+00 5.9084311e+00] [-1.9549825e+00 -5.1649251e+00 -3.9047670e+00 ... -1.9003103e+00 -9.0817904e-01 6.0035861e-01] [-9.4054966e+00 -7.6515808e+00 -1.5898101e+00 ... 1.5578861e+00 8.0376987e+00 6.6028291e-01]] [[ 9.9044442e-01 -3.8819284e+00 3.4875710e+00 ... 3.4064753e+00 3.6991408e+00 -3.5101805e+00] [ 5.1113472e+00 6.0481277e+00 -5.9069843e+00 ... 5.6933851e+00 4.4978962e+00 5.5893002e+00] [-3.0480342e+00 1.2368974e+01 -3.6407909e+00 ... -6.0585356e+00 -1.5450408e+01 1.3883686e+01] ... [ 1.8441010e+00 6.5346770e+00 -5.0539670e+00 ... 4.2988315e+00 2.3030772e+00 -1.7077398e+00] [-8.7188506e-01 9.0452232e+00 -1.0512246e+01 ... 1.1473133e+01 5.3611879e+00 1.0597515e+01] [-4.1298194e+00 -3.7384214e+00 1.9100727e+01 ... 1.7964630e+00 1.7617195e+00 6.6731375e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_496.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.25045}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 7.12422466e+00 -5.21937513e+00 4.49308205e+00 ... 2.22399771e-01 7.71127892e+00 6.41730833e+00] [-7.79998779e+00 1.32814283e+01 4.62489796e+00 ... -9.73466396e+00 -4.50795412e+00 -1.84567585e+01] [ 2.96952158e-01 -1.01691332e+01 -4.31233406e+00 ... 8.92164350e-01 6.37490511e+00 1.30506039e+01] ... [-7.06910944e+00 9.58695316e+00 -4.38244820e+00 ... 6.79028940e+00 8.55528831e+00 6.66635561e+00] [-6.81015968e+00 6.18200970e+00 4.21193838e+00 ... -9.39897728e+00 -1.09386044e+01 -2.47626305e+00] [-6.88059950e+00 -7.50097752e+00 -3.23470545e+00 ... -5.75621080e+00 -8.29046726e+00 7.69425488e+00]] [[-4.81014204e+00 8.70364457e-02 1.60340538e+01 ... -2.17834020e+00 1.11746800e+00 4.37319279e+00] [ 2.08483011e-01 1.11320276e+01 2.79804778e+00 ... 1.16277351e+01 6.10301352e+00 8.87155652e-01] [ 5.30333161e-01 6.37555456e+00 -1.32912028e+00 ... 2.70151186e+00 -2.44688559e+00 -8.48588467e-01] ... [-9.82808685e+00 3.33267665e+00 1.57296681e+00 ... -6.22280538e-02 -4.90436172e+00 -1.03912992e+01] [ 3.95798874e+00 -1.09795523e+01 5.75906181e+00 ... 1.40064251e+00 -6.28678370e+00 -5.27896214e+00] [-2.30473557e+01 2.68560982e+00 2.38846135e+00 ... 3.03093493e-01 7.10641623e+00 -4.03319550e+00]] [[ 7.35042095e+00 -2.78438687e+00 1.12037983e+01 ... 4.44410276e+00 -8.37773609e+00 1.83194103e+01] [ 4.56259394e+00 1.63122177e+00 -1.73501396e+00 ... 4.00571394e+00 -7.04378009e-01 2.53823495e+00] [-9.23515081e-01 -3.33304316e-01 1.56703072e+01 ... 3.48521590e+00 -1.81506574e+00 8.31756592e+00] ... [-9.99520969e+00 8.19782257e+00 4.62218380e+00 ... -5.53781319e+00 -4.83265114e+00 -1.05893736e+01] [-3.03081012e+00 -9.09703970e-01 1.54252434e+01 ... -4.99363089e+00 -1.41852245e+01 2.49136806e+00] [-3.87618351e+00 1.62921314e+01 -7.51676893e+00 ... 6.42983675e-01 3.74923229e+00 5.31502151e+00]] ... [[-4.00493622e+00 1.02596941e+01 -7.59870672e+00 ... 7.79716969e-01 -6.79372311e+00 -7.61774540e+00] [ 2.16323876e+00 -4.59595251e+00 9.65293407e+00 ... -1.37458448e+01 1.22836409e+01 2.39822626e+00] [ 4.24458790e+00 7.03383327e-01 1.25030136e+01 ... 7.96363688e+00 -7.56327868e+00 1.53017006e+01] ... [ 4.14334917e+00 -1.39860058e+01 -1.35184593e+01 ... 9.33480549e+00 -6.15859604e+00 1.91216826e+00] [ 5.36310816e+00 1.05268412e+01 -9.83517647e+00 ... -1.12078142e+01 -4.32060450e-01 5.91502011e-01] [ 1.68727226e+01 -8.69414520e+00 -5.97798729e+00 ... -2.33319092e+00 1.84272919e+01 2.27100039e+00]] [[-7.01733398e+00 -6.56437016e+00 3.31787848e+00 ... 1.75927124e+01 1.13279505e+01 -6.25867653e+00] [ 1.09438629e+01 -5.67479944e+00 -2.12918053e+01 ... -7.21814632e+00 3.44599032e+00 6.46196175e+00] [ 9.05833054e+00 5.27437353e+00 3.00649738e+00 ... 7.94203520e-01 1.04026804e+01 2.69365478e+00] ... [-1.77522171e+00 -6.86617470e+00 1.47985058e+01 ... 2.86338067e+00 -1.46691501e-01 -8.28237534e-01] [ 4.66193438e+00 -8.46540356e+00 1.04717169e+01 ... 5.81513119e+00 1.99294940e-01 7.36388397e+00] [ 8.22815228e+00 9.46557045e+00 -9.68708801e+00 ... 2.87040019e+00 1.49596119e+01 9.44413185e+00]] [[ 2.77116871e+00 -2.87378573e+00 -1.52420454e+01 ... 9.65427756e-01 1.87322483e+01 -6.91342878e+00] [ 1.79111061e+01 -3.24861026e+00 -1.11971006e+01 ... 3.07590508e+00 1.25155687e+01 5.08662844e+00] [ 7.51910305e+00 -1.72763824e+00 2.19618392e+00 ... -4.29677391e+00 3.37834656e-01 -1.93406308e+00] ... [ 2.03315711e+00 -7.82006359e+00 1.39786787e+01 ... 1.00855756e+00 1.02808285e+01 8.92986584e+00] [-7.44123936e+00 2.64031005e+00 -7.05938625e+00 ... -5.23098850e+00 1.32972946e+01 4.53555584e-03] [ 9.84912753e-01 7.02891111e+00 2.25547981e+00 ... 2.34871626e+00 6.60149288e+00 -4.44058418e+00]]]] [[[[-1.97781646e+00 -8.12821293e+00 4.70917374e-02 ... 3.86015511e+00 6.30977571e-01 -4.73646832e+00] [ 1.15307646e+01 -4.84977293e+00 -1.41497889e+01 ... -1.05744143e+01 7.32731199e+00 7.43412590e+00] [ 6.77928114e+00 3.59760237e+00 -6.28689170e-01 ... -5.40121269e+00 -3.07610840e-01 -8.44514942e+00] ... [-1.09665422e+01 -6.81562185e+00 6.22166991e-01 ... 5.58787012e+00 -3.40627360e+00 -6.39100027e+00] [ 8.73462677e+00 1.53055019e+01 -2.50025768e+01 ... 8.20127964e-01 5.82430887e+00 1.55593710e+01] [-1.95714843e+00 1.03396873e+01 1.30402336e+01 ... -7.47513676e+00 -7.77174282e+00 7.01812696e+00]] [[ 6.60551882e+00 1.67725296e+01 -1.32646360e+01 ... -5.22300816e+00 -1.42598228e+01 1.44614506e+01] [ 3.54755020e+00 1.32901344e+01 -4.68854856e+00 ... -8.63834953e+00 -9.08851027e-02 7.39630103e-01] [ 9.59056187e+00 5.08644438e+00 7.16553640e+00 ... 8.30708313e+00 1.60161281e+00 2.33039522e+00] ... [ 7.54558706e+00 1.44078207e+01 -3.89282227e+00 ... 1.40872979e+00 4.36373043e+00 -3.38762313e-01] [ 9.50197506e+00 1.25686102e+01 -8.74184132e+00 ... 4.81220245e+00 -2.06883621e+01 9.75470781e-01] [ 1.11281929e+01 -8.83791327e-01 -9.89516544e+00 ... -7.19525623e+00 6.91604805e+00 1.37932813e+00]] [[-9.08729649e+00 2.35349804e-01 -8.60447311e+00 ... 1.06427622e+01 2.33042908e+00 -9.20604324e+00] [ 5.64889431e+00 -7.08697510e+00 -9.48588276e+00 ... -2.86857080e+00 -1.73904133e+00 1.93009090e+01] [-4.67974949e+00 -6.96562624e+00 -2.43758869e+00 ... -8.93188858e+00 -1.24750080e+01 -2.50586185e+01] ... [-9.85142994e+00 -9.33912754e+00 2.34884644e+00 ... 8.93852592e-01 -1.92331374e+00 -1.17553310e+01] [-5.34166098e+00 -1.02461042e+01 -7.55381393e+00 ... -1.77976876e-01 1.22275555e+00 1.04968653e+01] [-3.64110541e+00 3.13410020e+00 1.59755349e+00 ... 7.82227945e+00 -6.56612015e+00 -2.42827702e+00]] ... [[-2.46556354e+00 7.91820192e+00 -9.15748501e+00 ... -1.51497936e+00 1.07463455e+01 -2.54794312e+00] [ 1.73142600e+00 1.02583714e+01 -3.25111079e+00 ... -1.32683313e+00 -5.00967789e+00 1.68133450e+00] [ 5.92950439e+00 1.99325264e+00 -4.13682365e+00 ... -1.14011173e+01 1.16000404e+01 1.10909452e+01] ... [-9.19151402e+00 -8.21852016e+00 -4.22224134e-01 ... -1.63530483e+01 9.34826374e-01 -4.47358608e+00] [ 6.66970134e-01 5.30382252e+00 7.69805193e+00 ... -1.71470623e+01 -1.13033342e+00 -5.35273695e+00] [-9.58379841e+00 -2.93193173e+00 -1.25320566e+00 ... 1.42460175e+01 -1.17294960e+01 1.10423040e+01]] [[ 1.05792389e+01 1.03500757e+01 -1.86391199e+00 ... -6.10838771e-01 -3.49283028e+00 -3.84295034e+00] [ 4.88383961e+00 -9.84402466e+00 -9.37467575e+00 ... 6.62036753e+00 1.04394913e+01 -6.65681839e+00] [ 1.09725952e+01 1.75931513e+00 -1.66693735e+00 ... -3.11184001e+00 7.94058561e+00 4.80703592e+00] ... [ 2.63944054e+00 5.47237349e+00 2.73333073e+00 ... 5.41061544e+00 -4.99326897e+00 -8.91076183e+00] [-2.57721500e+01 -4.59685755e+00 -1.35530691e+01 ... 2.95411181e+00 1.63677502e+01 -7.55553913e+00] [ 2.26277027e+01 1.22149162e+01 7.75548267e+00 ... -5.21048021e+00 -2.56856899e+01 3.13957334e-01]] [[-8.67404842e+00 -1.07946110e+01 -1.65924931e+00 ... 4.06948900e+00 1.87915592e+01 9.41900158e+00] [ 1.95911865e+01 3.87098002e+00 2.81949431e-01 ... -2.48510861e+00 1.46002102e+01 -1.38316050e+01] [ 8.44224644e+00 -3.65305829e+00 7.34381580e+00 ... 1.58661375e+01 -2.84195805e+00 -3.27201158e-01] ... [-5.80688286e+00 5.05999947e+00 -8.97798443e+00 ... 1.35761442e+01 3.53180242e+00 6.04488850e+00] [ 3.80210876e+00 1.61046314e+00 7.86646366e+00 ... 2.12211162e-01 4.35839128e+00 -1.88186111e+01] [-1.32261896e+00 6.71208906e+00 4.31561947e+00 ... 8.38076591e+00 1.39448237e+00 2.88935035e-01]]]]]; ov_res: [[[[[ 7.12422466e+00 -5.21937513e+00 4.49308205e+00 ... 2.22399771e-01 7.71127892e+00 6.41730833e+00] [-7.79998779e+00 1.32814283e+01 4.62489796e+00 ... -9.73466396e+00 -4.50795412e+00 -1.84567585e+01] [ 2.96952158e-01 -1.01691332e+01 -4.31233406e+00 ... 8.92164350e-01 6.37490511e+00 1.30506039e+01] ... [-7.06910944e+00 9.58695316e+00 -4.38244820e+00 ... 6.79028940e+00 8.55528831e+00 6.66635561e+00] [-6.81015968e+00 6.18200970e+00 4.21193838e+00 ... -9.39897728e+00 -1.09386044e+01 -2.47626305e+00] [-6.88059950e+00 -7.50097752e+00 -3.23470545e+00 ... -5.75621080e+00 -8.29046726e+00 7.69425488e+00]] [[-4.81014204e+00 8.70364457e-02 1.60340538e+01 ... -2.17834020e+00 1.11746800e+00 4.37319279e+00] [ 2.08483011e-01 1.11320276e+01 2.79804778e+00 ... 1.16277351e+01 6.10301352e+00 8.87155652e-01] [ 5.30333161e-01 6.37555456e+00 -1.32912028e+00 ... 2.70151186e+00 -2.44688559e+00 -8.48588467e-01] ... [-9.82808685e+00 3.33267665e+00 1.57296681e+00 ... -6.22280538e-02 -4.90436172e+00 -1.03912992e+01] [ 3.95798874e+00 -1.09795523e+01 5.75906181e+00 ... 1.40064251e+00 -6.28678370e+00 -5.27896214e+00] [-2.30473557e+01 2.68560982e+00 2.38846135e+00 ... 3.03093493e-01 7.10641623e+00 -4.03319550e+00]] [[ 7.35042095e+00 -2.78438687e+00 1.12037983e+01 ... 4.44410276e+00 -8.37773609e+00 1.83194103e+01] [ 4.56259394e+00 1.63122177e+00 -1.73501396e+00 ... 4.00571394e+00 -7.04378009e-01 2.53823495e+00] [-9.23515081e-01 -3.33304316e-01 1.56703072e+01 ... 3.48521590e+00 -1.81506574e+00 8.31756592e+00] ... [-9.99520969e+00 8.19782257e+00 4.62218380e+00 ... -5.53781319e+00 -4.83265114e+00 -1.05893736e+01] [-3.03081012e+00 -9.09703970e-01 1.54252434e+01 ... -4.99363089e+00 -1.41852245e+01 2.49136806e+00] [-3.87618351e+00 1.62921314e+01 -7.51676893e+00 ... 6.42983675e-01 3.74923229e+00 5.31502151e+00]] ... [[-4.00493622e+00 1.02596941e+01 -7.59870672e+00 ... 7.79716969e-01 -6.79372311e+00 -7.61774540e+00] [ 2.16323876e+00 -4.59595251e+00 9.65293407e+00 ... -1.37458448e+01 1.22836409e+01 2.39822626e+00] [ 4.24458790e+00 7.03383327e-01 1.25030136e+01 ... 7.96363688e+00 -7.56327868e+00 1.53017006e+01] ... [ 4.14334917e+00 -1.39860058e+01 -1.35184593e+01 ... 9.33480549e+00 -6.15859604e+00 1.91216826e+00] [ 5.36310816e+00 1.05268412e+01 -9.83517647e+00 ... -1.12078142e+01 -4.32060450e-01 5.91502011e-01] [ 1.68727226e+01 -8.69414520e+00 -5.97798729e+00 ... -2.33319092e+00 1.84272919e+01 2.27100039e+00]] [[-7.01733398e+00 -6.56437016e+00 3.31787848e+00 ... 1.75927124e+01 1.13279505e+01 -6.25867653e+00] [ 1.09438629e+01 -5.67479944e+00 -2.12918053e+01 ... -7.21814632e+00 3.44599032e+00 6.46196175e+00] [ 9.05833054e+00 5.27437353e+00 3.00649738e+00 ... 7.94203520e-01 1.04026804e+01 2.69365478e+00] ... [-1.77522171e+00 -6.86617470e+00 1.47985058e+01 ... 2.86338067e+00 -1.46691501e-01 -8.28237534e-01] [ 4.66193438e+00 -8.46540356e+00 1.04717169e+01 ... 5.81513119e+00 1.99294940e-01 7.36388397e+00] [ 8.22815228e+00 9.46557045e+00 -9.68708801e+00 ... 2.87040019e+00 1.49596119e+01 9.44413185e+00]] [[ 2.77116871e+00 -2.87378573e+00 -1.52420454e+01 ... 9.65427756e-01 1.87322483e+01 -6.91342878e+00] [ 1.79111061e+01 -3.24861026e+00 -1.11971006e+01 ... 3.07590508e+00 1.25155687e+01 5.08662844e+00] [ 7.51910305e+00 -1.72763824e+00 2.19618392e+00 ... -4.29677391e+00 3.37834656e-01 -1.93406308e+00] ... [ 2.03315711e+00 -7.82006359e+00 1.39786787e+01 ... 1.00855756e+00 1.02808285e+01 8.92986584e+00] [-7.44123936e+00 2.64031005e+00 -7.05938625e+00 ... -5.23098850e+00 1.32972946e+01 4.53555584e-03] [ 9.84912753e-01 7.02891111e+00 2.25547981e+00 ... 2.34871626e+00 6.60149288e+00 -4.44058418e+00]]]] [[[[-1.97781646e+00 -8.12821293e+00 4.70917374e-02 ... 3.86015511e+00 6.30977571e-01 -4.73646832e+00] [ 1.15307646e+01 -4.84977293e+00 -1.41497889e+01 ... -1.05744143e+01 7.32731199e+00 7.43412590e+00] [ 6.77928114e+00 3.59760237e+00 -6.28689170e-01 ... -5.40121269e+00 -3.07610840e-01 -8.44514942e+00] ... [-1.09665422e+01 -6.81562185e+00 6.22166991e-01 ... 5.58787012e+00 -3.40627360e+00 -6.39100027e+00] [ 8.73462677e+00 1.53055019e+01 -2.50025768e+01 ... 8.20127964e-01 5.82430887e+00 1.55593710e+01] [-1.95714843e+00 1.03396873e+01 1.30402336e+01 ... -7.47513676e+00 -7.77174282e+00 7.01812696e+00]] [[ 6.60551882e+00 1.67725296e+01 -1.32646360e+01 ... -5.22300816e+00 -1.42598228e+01 1.44614506e+01] [ 3.54755020e+00 1.32901344e+01 -4.68854856e+00 ... -8.63834953e+00 -9.08851027e-02 7.39630103e-01] [ 9.59056187e+00 5.08644438e+00 7.16553640e+00 ... 8.30708313e+00 1.60161281e+00 2.33039522e+00] ... [ 7.54558706e+00 1.44078207e+01 -3.89282227e+00 ... 1.40872979e+00 4.36373043e+00 -3.38762313e-01] [ 9.50197506e+00 1.25686102e+01 -8.74184132e+00 ... 4.81220245e+00 -2.06883621e+01 9.75470781e-01] [ 1.11281929e+01 -8.83791327e-01 -9.89516544e+00 ... -7.19525623e+00 6.91604805e+00 1.37932813e+00]] [[-9.08729649e+00 2.35349804e-01 -8.60447311e+00 ... 1.06427622e+01 2.33042908e+00 -9.20604324e+00] [ 5.64889431e+00 -7.08697510e+00 -9.48588276e+00 ... -2.86857080e+00 -1.73904133e+00 1.93009090e+01] [-4.67974949e+00 -6.96562624e+00 -2.43758869e+00 ... -8.93188858e+00 -1.24750080e+01 -2.50586185e+01] ... [-9.85142994e+00 -9.33912754e+00 2.34884644e+00 ... 8.93852592e-01 -1.92331374e+00 -1.17553310e+01] [-5.34166098e+00 -1.02461042e+01 -7.55381393e+00 ... -1.77976876e-01 1.22275555e+00 1.04968653e+01] [-3.64110541e+00 3.13410020e+00 1.59755349e+00 ... 7.82227945e+00 -6.56612015e+00 -2.42827702e+00]] ... [[-2.46556354e+00 7.91820192e+00 -9.15748501e+00 ... -1.51497936e+00 1.07463455e+01 -2.54794312e+00] [ 1.73142600e+00 1.02583714e+01 -3.25111079e+00 ... -1.32683313e+00 -5.00967789e+00 1.68133450e+00] [ 5.92950439e+00 1.99325264e+00 -4.13682365e+00 ... -1.14011173e+01 1.16000404e+01 1.10909452e+01] ... [-9.19151402e+00 -8.21852016e+00 -4.22224134e-01 ... -1.63530483e+01 9.34826374e-01 -4.47358608e+00] [ 6.66970134e-01 5.30382252e+00 7.69805193e+00 ... -1.71470623e+01 -1.13033342e+00 -5.35273695e+00] [-9.58379841e+00 -2.93193173e+00 -1.25320566e+00 ... 1.42460175e+01 -1.17294960e+01 1.10423040e+01]] [[ 1.05792389e+01 1.03500757e+01 -1.86391199e+00 ... -6.10838771e-01 -3.49283028e+00 -3.84295034e+00] [ 4.88383961e+00 -9.84402466e+00 -9.37467575e+00 ... 6.62036753e+00 1.04394913e+01 -6.65681839e+00] [ 1.09725952e+01 1.75931513e+00 -1.66693735e+00 ... -3.11184001e+00 7.94058561e+00 4.80703592e+00] ... [ 2.63944054e+00 5.47237349e+00 2.73333073e+00 ... 5.41061544e+00 -4.99326897e+00 -8.91076183e+00] [-2.57721500e+01 -4.59685755e+00 -1.35530691e+01 ... 2.95411181e+00 1.63677502e+01 -7.55553913e+00] [ 2.26277027e+01 1.22149162e+01 7.75548267e+00 ... -5.21048021e+00 -2.56856899e+01 3.13957334e-01]] [[-8.67404842e+00 -1.07946110e+01 -1.65924931e+00 ... 4.06948900e+00 1.87915592e+01 9.41900158e+00] [ 1.95911865e+01 3.87098002e+00 2.81949431e-01 ... -2.48510861e+00 1.46002102e+01 -1.38316050e+01] [ 8.44224644e+00 -3.65305829e+00 7.34381580e+00 ... 1.58661375e+01 -2.84195805e+00 -3.27201158e-01] ... [-5.80688286e+00 5.05999947e+00 -8.97798443e+00 ... 1.35761442e+01 3.53180242e+00 6.04488850e+00] [ 3.80210876e+00 1.61046314e+00 7.86646366e+00 ... 2.12211162e-01 4.35839128e+00 -1.88186111e+01] [-1.32261896e+00 6.71208906e+00 4.31561947e+00 ... 8.38076591e+00 1.39448237e+00 2.88935035e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_498.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0]]() %3 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %3, %2, %3, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 5.17711699e-01 -1.77915895e+00 -3.58486152e+00 ... 1.17884645e+01 -2.83955789e+00 3.21654963e+00] [-6.08190584e+00 3.41765738e+00 -4.34891129e+00 ... 1.00319099e+01 -2.98559904e+00 -1.69611502e+00] [ 3.21225786e+00 -1.11600628e+01 2.03492451e+00 ... 2.46971413e-01 2.28806992e+01 8.70305634e+00] ... [ 1.41004620e+01 -5.64832926e+00 -5.81182432e+00 ... -4.29425335e+00 2.46133327e+00 1.20012331e+00] [ 2.25122595e+00 -7.18902111e-01 3.65449834e+00 ... -4.63339949e+00 1.74133867e-01 -1.39033632e+01] [ 7.37408924e+00 -1.18446112e+01 1.25614262e+01 ... 3.64462554e-01 -1.64783039e+01 7.04858541e+00]] [[ 7.12145090e+00 9.61103821e+00 -1.44901876e+01 ... -7.00976419e+00 6.72915316e+00 1.36787310e+01] [ 2.85608697e+00 -1.96975982e+00 1.04448261e+01 ... 8.41148758e+00 3.53010774e+00 -9.23745441e+00] [-2.77247238e+00 -2.09395790e+00 -9.73412800e+00 ... 6.38131714e+00 2.80878468e+01 -8.88609028e+00] ... [-1.51554155e+01 -3.66548014e+00 -1.86266212e+01 ... 1.56568766e+01 -5.21408319e+00 -1.26715994e+01] [-8.00110340e+00 -5.30332184e+00 -3.85041714e+00 ... 5.64697456e+00 2.36635756e+00 -6.26237297e+00] [ 1.31736078e+01 -1.98278260e+00 -3.02731466e+00 ... 2.79693484e+00 -1.11439257e+01 -6.17239618e+00]] [[ 5.99307775e+00 6.26581490e-01 -7.27016687e+00 ... -1.44190788e+00 -8.91383648e-01 -5.53817797e+00] [ 8.80220890e+00 1.09561169e+00 -4.60360289e+00 ... 2.67004180e+00 2.04990363e+00 -6.26686335e-01] [ 2.99203849e+00 -2.51080990e+01 -2.73136377e+00 ... -1.86580598e+00 4.42418194e+00 1.31153927e+01] ... [ 6.76618528e+00 -9.52930450e+00 1.60983162e+01 ... -5.25990105e+00 -3.72458148e+00 2.71345091e+00] [-5.51928806e+00 1.03261366e+01 -6.28220844e+00 ... 2.69889641e+00 -5.29520607e+00 -4.84282780e+00] [-5.17339885e-01 1.04010067e+01 -6.57591915e+00 ... -6.94437838e+00 -5.80310917e+00 -1.03930178e+01]] ... [[ 1.77412643e+01 -6.93453741e+00 2.87591982e+00 ... -5.84840298e+00 6.72450352e+00 1.78772240e+01] [ 1.38199339e+01 9.81240940e+00 -5.04672909e+00 ... 1.34982557e+01 -6.69712734e+00 -6.73787737e+00] [ 6.92052722e-01 -4.19104624e+00 -1.36733627e+00 ... 1.59190369e+00 1.34637985e+01 1.36828918e+01] ... [ 1.49435577e+01 -8.94898224e+00 3.13157344e+00 ... 6.49146414e+00 5.00129795e+00 1.14893544e+00] [ 1.18965559e+01 -4.16017914e+00 2.06860018e+00 ... -2.33942556e+00 -4.18224669e+00 -1.15370331e+01] [ 3.61959010e-01 -8.12495041e+00 4.01797819e+00 ... -1.76923478e+00 -7.70851254e-01 6.20390606e+00]] [[ 4.92400616e-01 -5.12459993e+00 1.43939877e+01 ... 1.21006660e+01 1.94455605e+01 7.75658798e+00] [-1.54301987e+01 -3.72565508e+00 1.04029665e+01 ... 2.76649427e+00 -9.04633582e-01 -6.15487623e+00] [-2.45558500e+00 5.03721285e+00 -3.54613161e+00 ... -1.10979109e+01 2.22946024e+00 1.80726147e+00] ... [ 5.94825649e+00 -1.08309317e+01 1.91258831e+01 ... 8.70278263e+00 -1.56453543e+01 -1.21838665e+01] [ 2.06000175e+01 1.43975058e+01 -1.81569850e+00 ... -5.49039125e+00 1.16908636e+01 -4.05284166e+00] [ 4.84782696e+00 -8.87034416e+00 3.16429257e-01 ... 3.09903908e+00 -1.40849102e+00 7.05589056e+00]] [[ 1.79466450e+00 -4.02146959e+00 4.88116169e+00 ... -7.16184902e+00 3.61143589e-01 1.04646721e+01] [-1.89003384e+00 7.44616365e+00 -1.31747351e+01 ... 2.08282661e+01 -2.89237094e+00 -9.96146488e+00] [-9.22249031e+00 -2.23443031e-01 -1.56473217e+01 ... -1.23234119e+01 -1.36993682e+00 9.36569309e+00] ... [ 2.43531480e-01 8.21570873e+00 -4.19694281e+00 ... 2.24137068e+00 -4.10701084e+00 -3.22664499e+00] [-1.76969528e+01 -2.79078269e+00 -1.16141806e+01 ... 1.81140995e+00 -1.01249208e+01 5.78637123e+00] [ 7.14501905e+00 -1.79296303e+01 1.24810114e+01 ... -1.09709244e+01 3.76104164e+00 1.07409725e+01]]]] [[[[ 1.06328430e+01 -9.94208908e+00 -1.22276897e+01 ... 1.06453285e+01 1.32643199e+00 -4.28299141e+00] [-8.40773869e+00 -6.35685921e-01 1.21995373e+01 ... 3.08268166e+00 1.11913767e+01 -3.60552883e+00] [-8.85529995e+00 2.58477616e+00 -5.05401754e+00 ... -1.08817549e+01 8.77164781e-01 -1.25456390e+01] ... [-5.41843510e+00 1.06971276e+00 -1.29171572e+01 ... -4.32802081e-01 1.99040723e+00 -2.33293171e+01] [-7.34009314e+00 2.82828450e+00 -7.82062232e-01 ... 8.13703346e+00 -5.38249397e+00 -1.28889580e+01] [ 1.96265924e+00 1.51776361e+00 -3.76864600e+00 ... -1.04893255e+01 9.11016369e+00 5.59072876e+00]] [[ 3.19820833e+00 6.80434656e+00 -1.11442776e+01 ... -1.35606947e+01 4.45008039e+00 -9.15423393e+00] [-6.27247810e+00 -6.64451361e+00 5.98423290e+00 ... 1.08932610e+01 2.04555941e+00 -3.42403483e+00] [-1.46929531e+01 -1.22550392e+00 -1.74423676e+01 ... 1.08475370e+01 -2.31699562e+01 -1.50186968e+01] ... [ 9.90715885e+00 -8.04682541e+00 2.69812942e+00 ... 4.25249052e+00 7.47170258e+00 8.80788612e+00] [ 1.05055275e+01 -6.39005542e-01 1.34370527e+01 ... -7.26666212e+00 6.26823759e+00 1.49139893e+00] [ 8.79569054e+00 5.78237772e+00 6.28216410e+00 ... -4.18730116e+00 -1.34959185e+00 8.65793610e+00]] [[ 4.21738768e+00 -6.39830303e+00 -1.08828745e+01 ... 2.01987267e+00 -5.03384924e+00 -1.16660814e+01] [ 6.67793703e+00 2.65003538e+00 5.29304695e+00 ... -1.56597543e+00 1.68088627e+00 1.68798809e+01] [-1.57152567e+01 4.28264111e-01 1.71756732e+00 ... -3.74964625e-01 -7.84726048e+00 5.82007408e+00] ... [ 5.72453880e+00 6.01577997e+00 -7.14660978e+00 ... -1.54825830e+00 -5.35926580e+00 2.80232596e+00] [-5.15335131e+00 3.06844831e+00 -1.58006735e+01 ... 1.13590086e+00 4.05548096e+00 2.05756998e+00] [ 3.22739673e+00 -1.98975253e+00 1.01240778e+01 ... -4.74136734e+00 -5.94866097e-01 -2.60650730e+00]] ... [[-7.21920443e+00 -7.22006679e-01 2.02548885e+00 ... 6.35705137e+00 -6.02963257e+00 -8.77625561e+00] [-4.88790607e+00 -2.45017743e+00 -1.09422007e+01 ... -1.24660645e+01 9.98974502e-01 5.91497755e+00] [ 6.11455774e+00 -9.99729061e+00 7.22950554e+00 ... 2.36917520e+00 7.08889389e+00 7.86775541e+00] ... [-1.27965677e+00 -1.64949188e+01 3.61250782e+00 ... -6.88514328e+00 -9.38624859e+00 3.39695692e+00] [ 1.74051876e+01 -2.94193382e+01 1.12078285e+01 ... -2.53756309e+00 -3.64908051e+00 1.21119785e+01] [-5.01975965e+00 1.96654546e+00 1.11339598e+01 ... 2.37969327e+00 -3.48046064e+00 4.90242243e+00]] [[-1.44448051e+01 1.07442932e+01 -2.10425735e+00 ... -8.08936024e+00 8.60916138e+00 2.72301579e+00] [-2.37348289e+01 1.25672693e+01 -6.72026014e+00 ... 8.71594048e+00 9.74142456e+00 3.61106205e+00] [-1.04353487e+00 -1.11141872e+01 2.34401941e+00 ... 3.19184828e+00 7.27353048e+00 -3.61585164e+00] ... [ 5.68885422e+00 6.95657635e+00 -9.56908131e+00 ... -5.51474667e+00 8.90117359e+00 -2.98655677e+00] [-6.12621117e+00 -8.32666016e+00 -2.80799770e+00 ... -1.19122543e+01 1.08133135e+01 1.11168489e+01] [-3.24432755e+00 1.37480223e+00 5.15065312e-01 ... 3.53667164e+00 -2.15014124e+00 -5.59395313e-01]] [[-1.32102060e+01 -1.23030385e-02 -6.40383625e+00 ... 4.89740896e+00 -1.52041888e+00 4.32885313e+00] [ 1.02338963e+01 -3.34159803e+00 -6.13141489e+00 ... -1.12495193e+01 3.73390913e+00 3.13449621e+00] [ 3.43098164e-01 1.66312943e+01 -2.96215892e+00 ... -7.06292772e+00 5.84667635e+00 6.14698172e+00] ... [-4.02341223e+00 -9.17418861e+00 -6.97236109e+00 ... 7.51531792e+00 2.49777722e+00 -1.16676207e+01] [ 8.98728561e+00 4.84575272e+00 -8.18815649e-01 ... -5.95212889e+00 2.58938169e+00 -9.01841927e+00] [-1.88885651e+01 1.58627033e+01 -1.25549068e+01 ... 6.70091915e+00 -1.20337210e+01 -4.14592457e+00]]]]]; ov_res: [[[[[ 5.17711699e-01 -1.77915895e+00 -3.58486152e+00 ... 1.17884645e+01 -2.83955789e+00 3.21654963e+00] [-6.08190584e+00 3.41765738e+00 -4.34891129e+00 ... 1.00319099e+01 -2.98559904e+00 -1.69611502e+00] [ 3.21225786e+00 -1.11600628e+01 2.03492451e+00 ... 2.46971413e-01 2.28806992e+01 8.70305634e+00] ... [ 1.41004620e+01 -5.64832926e+00 -5.81182432e+00 ... -4.29425335e+00 2.46133327e+00 1.20012331e+00] [ 2.25122595e+00 -7.18902111e-01 3.65449834e+00 ... -4.63339949e+00 1.74133867e-01 -1.39033632e+01] [ 7.37408924e+00 -1.18446112e+01 1.25614262e+01 ... 3.64462554e-01 -1.64783039e+01 7.04858541e+00]] [[ 7.12145090e+00 9.61103821e+00 -1.44901876e+01 ... -7.00976419e+00 6.72915316e+00 1.36787310e+01] [ 2.85608697e+00 -1.96975982e+00 1.04448261e+01 ... 8.41148758e+00 3.53010774e+00 -9.23745441e+00] [-2.77247238e+00 -2.09395790e+00 -9.73412800e+00 ... 6.38131714e+00 2.80878468e+01 -8.88609028e+00] ... [-1.51554155e+01 -3.66548014e+00 -1.86266212e+01 ... 1.56568766e+01 -5.21408319e+00 -1.26715994e+01] [-8.00110340e+00 -5.30332184e+00 -3.85041714e+00 ... 5.64697456e+00 2.36635756e+00 -6.26237297e+00] [ 1.31736078e+01 -1.98278260e+00 -3.02731466e+00 ... 2.79693484e+00 -1.11439257e+01 -6.17239618e+00]] [[ 5.99307775e+00 6.26581490e-01 -7.27016687e+00 ... -1.44190788e+00 -8.91383648e-01 -5.53817797e+00] [ 8.80220890e+00 1.09561169e+00 -4.60360289e+00 ... 2.67004180e+00 2.04990363e+00 -6.26686335e-01] [ 2.99203849e+00 -2.51080990e+01 -2.73136377e+00 ... -1.86580598e+00 4.42418194e+00 1.31153927e+01] ... [ 6.76618528e+00 -9.52930450e+00 1.60983162e+01 ... -5.25990105e+00 -3.72458148e+00 2.71345091e+00] [-5.51928806e+00 1.03261366e+01 -6.28220844e+00 ... 2.69889641e+00 -5.29520607e+00 -4.84282780e+00] [-5.17339885e-01 1.04010067e+01 -6.57591915e+00 ... -6.94437838e+00 -5.80310917e+00 -1.03930178e+01]] ... [[ 1.77412643e+01 -6.93453741e+00 2.87591982e+00 ... -5.84840298e+00 6.72450352e+00 1.78772240e+01] [ 1.38199339e+01 9.81240940e+00 -5.04672909e+00 ... 1.34982557e+01 -6.69712734e+00 -6.73787737e+00] [ 6.92052722e-01 -4.19104624e+00 -1.36733627e+00 ... 1.59190369e+00 1.34637985e+01 1.36828918e+01] ... [ 1.49435577e+01 -8.94898224e+00 3.13157344e+00 ... 6.49146414e+00 5.00129795e+00 1.14893544e+00] [ 1.18965559e+01 -4.16017914e+00 2.06860018e+00 ... -2.33942556e+00 -4.18224669e+00 -1.15370331e+01] [ 3.61959010e-01 -8.12495041e+00 4.01797819e+00 ... -1.76923478e+00 -7.70851254e-01 6.20390606e+00]] [[ 4.92400616e-01 -5.12459993e+00 1.43939877e+01 ... 1.21006660e+01 1.94455605e+01 7.75658798e+00] [-1.54301987e+01 -3.72565508e+00 1.04029665e+01 ... 2.76649427e+00 -9.04633582e-01 -6.15487623e+00] [-2.45558500e+00 5.03721285e+00 -3.54613161e+00 ... -1.10979109e+01 2.22946024e+00 1.80726147e+00] ... [ 5.94825649e+00 -1.08309317e+01 1.91258831e+01 ... 8.70278263e+00 -1.56453543e+01 -1.21838665e+01] [ 2.06000175e+01 1.43975058e+01 -1.81569850e+00 ... -5.49039125e+00 1.16908636e+01 -4.05284166e+00] [ 4.84782696e+00 -8.87034416e+00 3.16429257e-01 ... 3.09903908e+00 -1.40849102e+00 7.05589056e+00]] [[ 1.79466450e+00 -4.02146959e+00 4.88116169e+00 ... -7.16184902e+00 3.61143589e-01 1.04646721e+01] [-1.89003384e+00 7.44616365e+00 -1.31747351e+01 ... 2.08282661e+01 -2.89237094e+00 -9.96146488e+00] [-9.22249031e+00 -2.23443031e-01 -1.56473217e+01 ... -1.23234119e+01 -1.36993682e+00 9.36569309e+00] ... [ 2.43531480e-01 8.21570873e+00 -4.19694281e+00 ... 2.24137068e+00 -4.10701084e+00 -3.22664499e+00] [-1.76969528e+01 -2.79078269e+00 -1.16141806e+01 ... 1.81140995e+00 -1.01249208e+01 5.78637123e+00] [ 7.14501905e+00 -1.79296303e+01 1.24810114e+01 ... -1.09709244e+01 3.76104164e+00 1.07409725e+01]]]] [[[[ 1.06328430e+01 -9.94208908e+00 -1.22276897e+01 ... 1.06453285e+01 1.32643199e+00 -4.28299141e+00] [-8.40773869e+00 -6.35685921e-01 1.21995373e+01 ... 3.08268166e+00 1.11913767e+01 -3.60552883e+00] [-8.85529995e+00 2.58477616e+00 -5.05401754e+00 ... -1.08817549e+01 8.77164781e-01 -1.25456390e+01] ... [-5.41843510e+00 1.06971276e+00 -1.29171572e+01 ... -4.32802081e-01 1.99040723e+00 -2.33293171e+01] [-7.34009314e+00 2.82828450e+00 -7.82062232e-01 ... 8.13703346e+00 -5.38249397e+00 -1.28889580e+01] [ 1.96265924e+00 1.51776361e+00 -3.76864600e+00 ... -1.04893255e+01 9.11016369e+00 5.59072876e+00]] [[ 3.19820833e+00 6.80434656e+00 -1.11442776e+01 ... -1.35606947e+01 4.45008039e+00 -9.15423393e+00] [-6.27247810e+00 -6.64451361e+00 5.98423290e+00 ... 1.08932610e+01 2.04555941e+00 -3.42403483e+00] [-1.46929531e+01 -1.22550392e+00 -1.74423676e+01 ... 1.08475370e+01 -2.31699562e+01 -1.50186968e+01] ... [ 9.90715885e+00 -8.04682541e+00 2.69812942e+00 ... 4.25249052e+00 7.47170258e+00 8.80788612e+00] [ 1.05055275e+01 -6.39005542e-01 1.34370527e+01 ... -7.26666212e+00 6.26823759e+00 1.49139893e+00] [ 8.79569054e+00 5.78237772e+00 6.28216410e+00 ... -4.18730116e+00 -1.34959185e+00 8.65793610e+00]] [[ 4.21738768e+00 -6.39830303e+00 -1.08828745e+01 ... 2.01987267e+00 -5.03384924e+00 -1.16660814e+01] [ 6.67793703e+00 2.65003538e+00 5.29304695e+00 ... -1.56597543e+00 1.68088627e+00 1.68798809e+01] [-1.57152567e+01 4.28264111e-01 1.71756732e+00 ... -3.74964625e-01 -7.84726048e+00 5.82007408e+00] ... [ 5.72453880e+00 6.01577997e+00 -7.14660978e+00 ... -1.54825830e+00 -5.35926580e+00 2.80232596e+00] [-5.15335131e+00 3.06844831e+00 -1.58006735e+01 ... 1.13590086e+00 4.05548096e+00 2.05756998e+00] [ 3.22739673e+00 -1.98975253e+00 1.01240778e+01 ... -4.74136734e+00 -5.94866097e-01 -2.60650730e+00]] ... [[-7.21920443e+00 -7.22006679e-01 2.02548885e+00 ... 6.35705137e+00 -6.02963257e+00 -8.77625561e+00] [-4.88790607e+00 -2.45017743e+00 -1.09422007e+01 ... -1.24660645e+01 9.98974502e-01 5.91497755e+00] [ 6.11455774e+00 -9.99729061e+00 7.22950554e+00 ... 2.36917520e+00 7.08889389e+00 7.86775541e+00] ... [-1.27965677e+00 -1.64949188e+01 3.61250782e+00 ... -6.88514328e+00 -9.38624859e+00 3.39695692e+00] [ 1.74051876e+01 -2.94193382e+01 1.12078285e+01 ... -2.53756309e+00 -3.64908051e+00 1.21119785e+01] [-5.01975965e+00 1.96654546e+00 1.11339598e+01 ... 2.37969327e+00 -3.48046064e+00 4.90242243e+00]] [[-1.44448051e+01 1.07442932e+01 -2.10425735e+00 ... -8.08936024e+00 8.60916138e+00 2.72301579e+00] [-2.37348289e+01 1.25672693e+01 -6.72026014e+00 ... 8.71594048e+00 9.74142456e+00 3.61106205e+00] [-1.04353487e+00 -1.11141872e+01 2.34401941e+00 ... 3.19184828e+00 7.27353048e+00 -3.61585164e+00] ... [ 5.68885422e+00 6.95657635e+00 -9.56908131e+00 ... -5.51474667e+00 8.90117359e+00 -2.98655677e+00] [-6.12621117e+00 -8.32666016e+00 -2.80799770e+00 ... -1.19122543e+01 1.08133135e+01 1.11168489e+01] [-3.24432755e+00 1.37480223e+00 5.15065312e-01 ... 3.53667164e+00 -2.15014124e+00 -5.59395313e-01]] [[-1.32102060e+01 -1.23030385e-02 -6.40383625e+00 ... 4.89740896e+00 -1.52041888e+00 4.32885313e+00] [ 1.02338963e+01 -3.34159803e+00 -6.13141489e+00 ... -1.12495193e+01 3.73390913e+00 3.13449621e+00] [ 3.43098164e-01 1.66312943e+01 -2.96215892e+00 ... -7.06292772e+00 5.84667635e+00 6.14698172e+00] ... [-4.02341223e+00 -9.17418861e+00 -6.97236109e+00 ... 7.51531792e+00 2.49777722e+00 -1.16676207e+01] [ 8.98728561e+00 4.84575272e+00 -8.18815649e-01 ... -5.95212889e+00 2.58938169e+00 -9.01841927e+00] [-1.88885651e+01 1.58627033e+01 -1.25549068e+01 ... 6.70091915e+00 -1.20337210e+01 -4.14592457e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 2, 'pads': 0, 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_500.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[2, 2, 2]]() %self.dilations : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.dilations) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%8) fw_re: [[[[[-10.813376 -5.903463 14.192956 ... 9.2280245 -5.022155 5.6197157 ] [ -1.1405566 -20.277674 -1.6048416 ... 12.909248 13.41243 -20.222754 ] [ 12.902474 17.743013 4.9157 ... 2.6667926 2.0081499 -2.131286 ] ... [ 5.114377 6.9297137 -3.8280537 ... -7.7010436 18.559992 -5.1355824 ] [ 9.074237 -2.5615246 -13.48891 ... 3.8768258 22.136703 6.7176213 ] [ -6.7804904 -4.148428 -9.644743 ... 2.5303957 0.22784108 15.377027 ]] [[ -0.49107838 0.29355532 9.189094 ... 10.636828 4.8510137 5.8723187 ] [ -9.7008095 -5.758394 8.501069 ... 4.9023356 -12.285997 -10.696207 ] [ -5.9327393 3.1104174 -2.2234735 ... 9.947169 10.486643 -1.5615109 ] ... [ 4.653839 -22.565525 -6.4975963 ... 20.524754 12.919674 -8.874224 ] [ -0.40249082 -8.954956 4.62578 ... 2.7157085 2.6950703 16.50391 ] [ 3.155663 -10.092865 -10.428464 ... 1.3163201 -5.5412574 -4.828243 ]] [[ 9.085835 8.692121 -8.735603 ... -9.703045 -10.992484 0.08106556] [ 12.542413 -0.10888676 -17.743717 ... 8.149411 -32.21671 2.121927 ] [ -6.8247046 -5.261261 -12.435077 ... 7.774042 0.5417012 15.82804 ] ... [-12.886059 -3.3851378 14.180234 ... 11.165657 5.5108113 -7.693876 ] [-18.229397 -4.656882 -0.6422008 ... 5.8195634 28.520157 -5.6446404 ] [ -1.9134803 -7.6694365 -9.224641 ... -7.0756903 17.121317 -4.4306335 ]] ... [[-10.84536 2.7396083 6.646543 ... 3.808461 -3.1523438 5.0596642 ] [-21.68145 -1.9631369 3.0857322 ... 6.170568 0.6023156 -16.297667 ] [ 15.711621 4.0188155 0.2536181 ... 3.6609142 -6.752961 0.2614697 ] ... [ 19.204752 8.699041 -13.448994 ... -8.739857 -6.766188 -7.9966073 ] [ -2.9268606 -3.3655863 3.2245364 ... 7.790995 -3.5272572 5.116636 ] [ 7.0005493 -5.9141054 0.81385213 ... -5.1087103 7.657911 17.751617 ]] [[ 13.937616 -6.6694646 7.7594366 ... -13.793778 6.956314 23.934729 ] [ 5.3085475 -15.997611 -7.292663 ... -28.542496 5.898573 -14.893396 ] [-11.225325 -17.20381 -17.735289 ... -7.745356 1.6783822 -5.694785 ] ... [-11.147134 -22.111465 -7.126197 ... 0.9414536 -15.840831 -2.78999 ] [ 13.543319 5.1007237 9.57547 ... -6.5261436 16.037106 -2.1244884 ] [ -3.7318845 -7.2085314 10.786656 ... -2.8387568 2.089609 -28.821142 ]] [[ 10.605378 -13.44237 -10.496542 ... -19.007267 0.7046206 5.288547 ] [ 13.788632 -0.7972598 -0.45662475 ... -9.290627 7.0537896 -12.3533 ] [ -9.667974 16.256136 8.161859 ... -9.639413 -8.896338 -9.384163 ] ... [-11.060544 0.29847163 -16.64761 ... -10.4600935 -2.78191 18.140533 ] [-19.90071 12.069168 1.1854963 ... -3.2253938 -6.787144 3.8597689 ] [ -1.6746725 12.326749 8.544252 ... 3.5110385 -4.938698 12.729679 ]]]] [[[[ -8.65057 -23.180235 -9.515325 ... -14.201318 -5.587029 23.888199 ] [ -8.893626 -5.4130983 -5.8834567 ... -5.2010217 -4.4657474 27.734015 ] [ -1.378682 -5.2695656 -19.259977 ... -3.7526948 11.277707 -0.69819826] ... [ 16.36138 6.463255 -18.071516 ... 9.346048 1.7950337 -11.331501 ] [ 4.0873585 -1.6947467 1.1590093 ... 12.077212 25.618864 -9.459302 ] [ 2.2684863 2.261183 5.7757034 ... -20.213089 0.09989526 2.8424017 ]] [[ -5.0729613 -0.5938493 -9.575162 ... -2.0969517 0.59099346 -12.207224 ] [ 5.8873763 10.065952 -3.4271195 ... 4.659662 17.08935 7.7802534 ] [ -9.858449 -14.889004 -21.140627 ... -6.480668 8.372799 -3.8602202 ] ... [ -2.1117253 3.127363 2.0779798 ... -18.72612 18.860209 26.534822 ] [-16.590244 1.1750346 -5.782129 ... -11.611925 -19.759972 2.339648 ] [ -1.370567 -15.0525875 14.010058 ... 4.779302 -5.1890206 -7.890287 ]] [[-14.083304 14.820019 -6.4699883 ... -5.394891 9.119727 -5.6949077 ] [ 4.9586644 -3.5279658 6.296207 ... 14.179019 -0.96984464 -16.403402 ] [ 4.8723655 -5.436731 -15.248541 ... 2.1238449 2.8488424 10.024901 ] ... [ -7.0786796 10.979669 -6.462157 ... 13.177533 -17.60345 17.972889 ] [-12.871155 9.192351 5.4690523 ... 0.86498094 -5.384218 6.1458416 ] [ 7.378099 12.789011 8.6766615 ... 5.1916637 -6.9761405 6.014417 ]] ... [[ -3.5710022 -1.3207409 -12.548315 ... 9.82648 6.213642 -3.0153587 ] [ -0.55562544 -12.338579 -9.934497 ... 9.619516 13.417781 10.965321 ] [ -6.8750105 -1.2757207 10.772978 ... 17.1491 -2.18023 4.2388916 ] ... [-17.022121 -11.057195 -7.38026 ... 6.9637885 -9.433892 -5.3577423 ] [ 18.145882 -3.5449562 3.8854942 ... -1.2962661 4.9939284 14.96915 ] [-11.247902 5.7594175 1.2473804 ... 2.6466641 -2.7222812 -6.3721824 ]] [[ 2.653936 4.278816 2.5520852 ... 15.320627 -4.0579104 -3.5258582 ] [ -5.80778 -2.8079493 -9.533952 ... 4.883349 -3.3662643 -1.4064353 ] [-17.397268 5.230104 -10.831642 ... -13.801964 -4.707972 -4.0173197 ] ... [ 13.168101 -4.618216 10.873222 ... 2.3335977 5.781956 -5.805573 ] [ -5.4738617 17.778614 19.984612 ... 8.554577 1.62576 5.074783 ] [-14.769379 -1.4995279 -3.0425148 ... -18.264265 -0.47229263 19.740606 ]] [[-18.269741 2.789672 12.4015 ... -13.3386 -2.798387 21.244356 ] [ 0.230438 14.841132 1.3965809 ... -10.391007 -7.863318 4.2816777 ] [ -4.3891444 -3.482733 -0.22497045 ... 15.155997 -0.597877 -5.1315365 ] ... [ 17.148811 7.7434154 -7.2164464 ... -17.442822 13.53145 6.176556 ] [ -1.5049978 -0.7747472 -2.9343154 ... -15.741432 6.7030516 13.371281 ] [ -9.071495 -6.7510056 14.972941 ... -12.267815 -2.9143007 -11.038538 ]]]]]; ov_res: [[[[[-10.813376 -5.903463 14.192956 ... 9.2280245 -5.022155 5.6197157 ] [ -1.1405566 -20.277674 -1.6048416 ... 12.909248 13.41243 -20.222754 ] [ 12.902474 17.743013 4.9157 ... 2.6667926 2.0081499 -2.131286 ] ... [ 5.114377 6.9297137 -3.8280537 ... -7.7010436 18.559992 -5.1355824 ] [ 9.074237 -2.5615246 -13.48891 ... 3.8768258 22.136703 6.7176213 ] [ -6.7804904 -4.148428 -9.644743 ... 2.5303957 0.22784108 15.377027 ]] [[ -0.49107838 0.29355532 9.189094 ... 10.636828 4.8510137 5.8723187 ] [ -9.7008095 -5.758394 8.501069 ... 4.9023356 -12.285997 -10.696207 ] [ -5.9327393 3.1104174 -2.2234735 ... 9.947169 10.486643 -1.5615109 ] ... [ 4.653839 -22.565525 -6.4975963 ... 20.524754 12.919674 -8.874224 ] [ -0.40249082 -8.954956 4.62578 ... 2.7157085 2.6950703 16.50391 ] [ 3.155663 -10.092865 -10.428464 ... 1.3163201 -5.5412574 -4.828243 ]] [[ 9.085835 8.692121 -8.735603 ... -9.703045 -10.992484 0.08106556] [ 12.542413 -0.10888676 -17.743717 ... 8.149411 -32.21671 2.121927 ] [ -6.8247046 -5.261261 -12.435077 ... 7.774042 0.5417012 15.82804 ] ... [-12.886059 -3.3851378 14.180234 ... 11.165657 5.5108113 -7.693876 ] [-18.229397 -4.656882 -0.6422008 ... 5.8195634 28.520157 -5.6446404 ] [ -1.9134803 -7.6694365 -9.224641 ... -7.0756903 17.121317 -4.4306335 ]] ... [[-10.84536 2.7396083 6.646543 ... 3.808461 -3.1523438 5.0596642 ] [-21.68145 -1.9631369 3.0857322 ... 6.170568 0.6023156 -16.297667 ] [ 15.711621 4.0188155 0.2536181 ... 3.6609142 -6.752961 0.2614697 ] ... [ 19.204752 8.699041 -13.448994 ... -8.739857 -6.766188 -7.9966073 ] [ -2.9268606 -3.3655863 3.2245364 ... 7.790995 -3.5272572 5.116636 ] [ 7.0005493 -5.9141054 0.81385213 ... -5.1087103 7.657911 17.751617 ]] [[ 13.937616 -6.6694646 7.7594366 ... -13.793778 6.956314 23.934729 ] [ 5.3085475 -15.997611 -7.292663 ... -28.542496 5.898573 -14.893396 ] [-11.225325 -17.20381 -17.735289 ... -7.745356 1.6783822 -5.694785 ] ... [-11.147134 -22.111465 -7.126197 ... 0.9414536 -15.840831 -2.78999 ] [ 13.543319 5.1007237 9.57547 ... -6.5261436 16.037106 -2.1244884 ] [ -3.7318845 -7.2085314 10.786656 ... -2.8387568 2.089609 -28.821142 ]] [[ 10.605378 -13.44237 -10.496542 ... -19.007267 0.7046206 5.288547 ] [ 13.788632 -0.7972598 -0.45662475 ... -9.290627 7.0537896 -12.3533 ] [ -9.667974 16.256136 8.161859 ... -9.639413 -8.896338 -9.384163 ] ... [-11.060544 0.29847163 -16.64761 ... -10.4600935 -2.78191 18.140533 ] [-19.90071 12.069168 1.1854963 ... -3.2253938 -6.787144 3.8597689 ] [ -1.6746725 12.326749 8.544252 ... 3.5110385 -4.938698 12.729679 ]]]] [[[[ -8.65057 -23.180235 -9.515325 ... -14.201318 -5.587029 23.888199 ] [ -8.893626 -5.4130983 -5.8834567 ... -5.2010217 -4.4657474 27.734015 ] [ -1.378682 -5.2695656 -19.259977 ... -3.7526948 11.277707 -0.69819826] ... [ 16.36138 6.463255 -18.071516 ... 9.346048 1.7950337 -11.331501 ] [ 4.0873585 -1.6947467 1.1590093 ... 12.077212 25.618864 -9.459302 ] [ 2.2684863 2.261183 5.7757034 ... -20.213089 0.09989526 2.8424017 ]] [[ -5.0729613 -0.5938493 -9.575162 ... -2.0969517 0.59099346 -12.207224 ] [ 5.8873763 10.065952 -3.4271195 ... 4.659662 17.08935 7.7802534 ] [ -9.858449 -14.889004 -21.140627 ... -6.480668 8.372799 -3.8602202 ] ... [ -2.1117253 3.127363 2.0779798 ... -18.72612 18.860209 26.534822 ] [-16.590244 1.1750346 -5.782129 ... -11.611925 -19.759972 2.339648 ] [ -1.370567 -15.0525875 14.010058 ... 4.779302 -5.1890206 -7.890287 ]] [[-14.083304 14.820019 -6.4699883 ... -5.394891 9.119727 -5.6949077 ] [ 4.9586644 -3.5279658 6.296207 ... 14.179019 -0.96984464 -16.403402 ] [ 4.8723655 -5.436731 -15.248541 ... 2.1238449 2.8488424 10.024901 ] ... [ -7.0786796 10.979669 -6.462157 ... 13.177533 -17.60345 17.972889 ] [-12.871155 9.192351 5.4690523 ... 0.86498094 -5.384218 6.1458416 ] [ 7.378099 12.789011 8.6766615 ... 5.1916637 -6.9761405 6.014417 ]] ... [[ -3.5710022 -1.3207409 -12.548315 ... 9.82648 6.213642 -3.0153587 ] [ -0.55562544 -12.338579 -9.934497 ... 9.619516 13.417781 10.965321 ] [ -6.8750105 -1.2757207 10.772978 ... 17.1491 -2.18023 4.2388916 ] ... [-17.022121 -11.057195 -7.38026 ... 6.9637885 -9.433892 -5.3577423 ] [ 18.145882 -3.5449562 3.8854942 ... -1.2962661 4.9939284 14.96915 ] [-11.247902 5.7594175 1.2473804 ... 2.6466641 -2.7222812 -6.3721824 ]] [[ 2.653936 4.278816 2.5520852 ... 15.320627 -4.0579104 -3.5258582 ] [ -5.80778 -2.8079493 -9.533952 ... 4.883349 -3.3662643 -1.4064353 ] [-17.397268 5.230104 -10.831642 ... -13.801964 -4.707972 -4.0173197 ] ... [ 13.168101 -4.618216 10.873222 ... 2.3335977 5.781956 -5.805573 ] [ -5.4738617 17.778614 19.984612 ... 8.554577 1.62576 5.074783 ] [-14.769379 -1.4995279 -3.0425148 ... -18.264265 -0.47229263 19.740606 ]] [[-18.269741 2.789672 12.4015 ... -13.3386 -2.798387 21.244356 ] [ 0.230438 14.841132 1.3965809 ... -10.391007 -7.863318 4.2816777 ] [ -4.3891444 -3.482733 -0.22497045 ... 15.155997 -0.597877 -5.1315365 ] ... [ 17.148811 7.7434154 -7.2164464 ... -17.442822 13.53145 6.176556 ] [ -1.5049978 -0.7747472 -2.9343154 ... -15.741432 6.7030516 13.371281 ] [ -9.071495 -6.7510056 14.972941 ... -12.267815 -2.9143007 -11.038538 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 1, 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_502.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %6 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %2, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%6) fw_re: [[[[[ 5.42741776e-01 -7.62409449e+00 9.25206304e-01 ... 4.43499470e+00 -1.94793975e+00 6.16231084e-01] [-4.91196774e-02 -7.06900978e+00 -9.53208685e-01 ... -3.26815319e+00 -9.39316940e+00 1.66491759e+00] [-1.42206454e+00 -7.62910843e+00 6.51129484e+00 ... 5.06588602e+00 -9.15186024e+00 -5.95365620e+00] ... [-2.24157381e+00 -3.75116277e+00 -1.48330712e+00 ... 6.82259035e+00 -1.11887479e+00 1.07095766e+01] [ 6.40415192e+00 5.39849854e+00 5.06010723e+00 ... 7.37700033e+00 8.70308590e+00 -8.19375896e+00] [-7.65401065e-01 -6.13416016e-01 -1.25345697e+01 ... -1.13522739e+01 1.11083031e+01 4.22971821e+00]] [[-3.79321074e+00 -1.29356158e+00 5.65086842e+00 ... 1.76455808e+00 7.77830124e+00 3.88618171e-01] [ 1.03071394e+01 2.88795543e+00 3.27044892e+00 ... 7.45586777e+00 -1.70694809e+01 8.98446751e+00] [-3.23021460e+00 1.45408564e+01 -1.21290855e+01 ... -4.36974049e+00 8.31999969e+00 -3.96386003e+00] ... [ 1.40752184e+00 -1.12739143e+01 6.17473412e+00 ... -9.61105537e+00 1.05019779e+01 1.15906153e+01] [-1.51933622e+00 8.74870300e+00 4.86346817e+00 ... -4.35718727e+00 -7.64903116e+00 -1.12275147e+00] [ 1.54056919e+00 -7.01145363e+00 9.98596764e+00 ... 1.05516396e+01 -5.10973454e-01 3.54311872e+00]] [[-2.86623001e-01 2.16255951e+00 1.13137150e+00 ... 7.39985847e+00 6.53563917e-01 -7.90167952e+00] [ 6.14401865e+00 -4.43842506e+00 6.63971710e+00 ... -5.60128355e+00 1.22861338e+00 -4.02632284e+00] [ 1.66375697e+00 3.84517407e+00 -6.72032908e-02 ... 1.76711094e+00 1.33163393e+00 -1.72547340e+00] ... [ 8.13765907e+00 5.75024462e+00 3.47970629e+00 ... 1.34037981e+01 -1.68883055e-01 1.41830003e+00] [-5.42338133e+00 1.07884302e+01 -1.46946898e+01 ... -5.48605061e+00 9.94279861e+00 3.30724812e+00] [ 4.91756022e-01 2.02381611e+00 6.48837900e+00 ... 5.70696974e+00 -1.22669258e+01 -2.01502353e-01]] ... [[ 4.61662197e+00 -6.84811258e+00 4.76936579e+00 ... 1.11387148e+01 5.23816013e+00 -9.13490951e-01] [ 2.87393856e+00 1.62102871e+01 1.63416290e+01 ... -1.07038498e+01 3.78820539e-01 -4.31566143e+00] [ 5.13099241e+00 -1.00201142e+00 -1.67162609e+01 ... -6.06535530e+00 -1.11109695e+01 -3.48039174e+00] ... [-5.21850726e-03 6.19689798e+00 -1.26387453e+01 ... -8.40884972e+00 -3.50145817e+00 -2.04852467e+01] [ 5.42091036e+00 -6.24045134e+00 2.17909050e+00 ... 7.67664194e+00 -1.20351238e+01 1.32850313e+00] [ 6.90888977e+00 5.88855076e+00 6.91793299e+00 ... 1.63653266e+00 -7.84971833e-01 1.27390671e+00]] [[-1.77465832e+00 -8.43407822e+00 1.91103935e-01 ... 2.94683194e+00 -1.32911253e+01 3.01727319e+00] [ 4.48351383e+00 3.90475202e+00 4.03854990e+00 ... 8.26324940e+00 -1.17229595e+01 5.13837099e+00] [ 4.12818336e+00 -5.77989817e+00 -7.85517359e+00 ... -1.32822299e+00 8.08268642e+00 8.51512551e-01] ... [-3.11064076e+00 9.48500633e+00 8.04905796e+00 ... 8.68917274e+00 4.16610241e+00 -7.58011675e+00] [ 2.87405705e+00 6.63606024e+00 -1.03351784e+01 ... -5.68221378e+00 -7.10295248e+00 4.47358561e+00] [ 9.62095201e-01 1.18001270e+01 -3.62954354e+00 ... 1.21119013e+01 -1.58492827e+00 2.63474369e+00]] [[-1.60520875e+00 2.10518622e+00 -3.68966274e-02 ... -1.95865631e+01 3.68293715e+00 4.42294073e+00] [ 2.82825279e+00 -4.59482479e+00 7.05941391e+00 ... 7.98625529e-01 -8.00328445e+00 9.88494158e-01] [-6.05554163e-01 3.16314220e+00 -1.09232032e+00 ... 9.28945351e+00 5.80975199e+00 -1.95285034e+00] ... [-7.22749853e+00 -6.87552595e+00 -1.73184514e+00 ... -5.29494572e+00 -8.71921730e+00 -1.58572979e+01] [ 9.14692020e+00 -5.41877508e-01 5.99509048e+00 ... 8.06353241e-02 3.05047846e+00 1.39545784e+01] [-6.09720469e+00 -5.39618397e+00 -5.26093197e+00 ... 1.09882212e+01 -4.35497189e+00 6.54528081e-01]]]] [[[[-8.50163746e+00 3.74856162e+00 -1.65591497e-04 ... 4.56013966e+00 1.21184123e+00 -1.59690297e+00] [-1.62972260e+00 5.43058777e+00 9.76154423e+00 ... 4.78680563e+00 2.34536457e+00 7.18690038e-01] [ 9.44966733e-01 -1.21140175e+01 3.33618331e+00 ... -1.16753407e+01 -3.60527372e+00 2.05019450e+00] ... [-1.43904364e+00 2.25117946e+00 -6.91520882e+00 ... 2.26492286e+00 -3.70205474e+00 4.36074114e+00] [ 2.73295093e+00 7.68447399e-01 -4.62337017e+00 ... -1.18105078e+01 -3.00549173e+00 -1.20108242e+01] [ 2.96994877e+00 -4.20944309e+00 5.17156553e+00 ... 4.82473898e+00 -5.72959948e+00 2.43854070e+00]] [[ 6.07189512e+00 5.01738167e+00 6.46971762e-01 ... -2.01519928e+01 -3.58198643e+00 -6.02741814e+00] [-7.80989587e-01 -7.10302114e-01 -8.34922791e+00 ... 1.14619970e+01 7.24710846e+00 1.97799873e+00] [-5.39002705e+00 -7.99172211e+00 -1.70750504e+01 ... -9.63972569e+00 1.86407013e+01 2.63219333e+00] ... [ 9.04726982e+00 3.25787520e+00 -7.84516096e-01 ... 3.55337548e+00 4.43896055e+00 1.23097639e+01] [-5.18324995e+00 -4.93372393e+00 6.50338125e+00 ... -2.35890555e+00 -9.91040611e+00 1.31100111e+01] [ 5.75868940e+00 5.64993334e+00 -3.89345527e+00 ... 3.28846598e+00 -4.60968351e+00 3.15881419e+00]] [[ 6.73387671e+00 -9.14519668e-01 -8.07622433e-01 ... 1.33468583e-01 -5.79782438e+00 1.05309553e+01] [ 1.97342646e+00 1.08130512e+01 6.06644678e+00 ... 2.39412937e+01 -1.54240599e+01 -1.87171295e-01] [-4.98081875e+00 -6.46024418e+00 -1.43186712e+01 ... 2.54720631e+01 1.63476670e+00 8.81416893e+00] ... [ 1.05072660e+01 4.14017551e-02 -7.41734743e+00 ... -1.03467588e+01 2.44029164e+00 1.68767834e+00] [ 6.47248983e-01 -7.55722809e+00 -4.35645485e+00 ... -1.09948425e+01 1.38196478e+01 -5.19813871e+00] [ 1.63644767e+00 7.94533539e+00 -8.90108013e+00 ... 6.29711962e+00 -1.17984381e+01 4.29816914e+00]] ... [[ 3.84202480e+00 4.44931173e+00 -4.59312057e+00 ... 1.03149195e+01 -3.86426538e-01 -1.09272356e+01] [ 7.87032366e-01 -6.42268610e+00 1.15180051e+00 ... 8.70350266e+00 -8.67883086e-01 8.03343010e+00] [ 4.31568766e+00 1.30332270e+01 2.40755224e+00 ... 1.46935129e+00 -1.40436630e+01 1.11033125e+01] ... [ 5.53169823e+00 8.57809734e+00 -9.16316605e+00 ... -6.58714890e-01 6.06189775e+00 7.79242706e+00] [-9.33465004e+00 -1.15566266e+00 2.61557961e+00 ... -2.74831510e+00 7.08365536e+00 -6.14941931e+00] [-1.94113743e+00 5.95490098e-01 -1.02524424e+01 ... 8.68324101e-01 6.21227789e+00 -1.14561377e+01]] [[-6.85957670e-01 2.69119358e+00 -3.00783485e-01 ... -1.90829420e+00 -9.90390682e+00 -3.61516148e-01] [-2.51530409e-01 -5.86580944e+00 8.75312710e+00 ... 1.00834484e+01 8.10232162e-01 -3.85976863e+00] [ 2.56257105e+00 -4.20963573e+00 9.25269699e+00 ... -1.63032457e-01 8.88674545e+00 -5.97764921e+00] ... [-4.34380579e+00 -3.29942131e+00 3.02329254e+00 ... -1.10156851e+01 3.43132663e+00 -2.13345695e+00] [ 6.69517183e+00 -5.02208233e+00 1.97226238e+00 ... 9.02900982e+00 1.25143242e+01 7.96337605e+00] [-1.15575194e+00 -6.03590155e+00 -2.27540445e+00 ... -8.36651611e+00 -5.70520020e+00 6.09243572e-01]] [[ 3.34884477e+00 -9.26404834e-01 -6.53996229e+00 ... -3.36240458e+00 -2.79437876e+00 -4.49537849e+00] [-5.04930544e+00 1.45390856e+00 -4.05038977e+00 ... 1.46499195e+01 3.59727830e-01 1.04631348e+01] [ 1.07901466e+00 1.09875882e+00 9.92987633e+00 ... -2.13994193e+00 -1.70138645e+01 5.33589268e+00] ... [ 1.03541481e+00 1.47979221e+01 3.50028300e+00 ... 2.13811159e+00 -2.01276803e+00 -8.44489956e+00] [-6.04686689e+00 -1.78395033e+00 -6.70951796e+00 ... 5.59378815e+00 1.08071012e+01 1.09995413e+00] [-3.88136196e+00 -1.76732671e+00 -1.01525955e+01 ... -4.47026205e+00 -2.35435438e+00 -5.23625469e+00]]]]]; ov_res: [[[[[ 5.42741776e-01 -7.62409449e+00 9.25206304e-01 ... 4.43499470e+00 -1.94793975e+00 6.16231084e-01] [-4.91196774e-02 -7.06900978e+00 -9.53208685e-01 ... -3.26815319e+00 -9.39316940e+00 1.66491759e+00] [-1.42206454e+00 -7.62910843e+00 6.51129484e+00 ... 5.06588602e+00 -9.15186024e+00 -5.95365620e+00] ... [-2.24157381e+00 -3.75116277e+00 -1.48330712e+00 ... 6.82259035e+00 -1.11887479e+00 1.07095766e+01] [ 6.40415192e+00 5.39849854e+00 5.06010723e+00 ... 7.37700033e+00 8.70308590e+00 -8.19375896e+00] [-7.65401065e-01 -6.13416016e-01 -1.25345697e+01 ... -1.13522739e+01 1.11083031e+01 4.22971821e+00]] [[-3.79321074e+00 -1.29356158e+00 5.65086842e+00 ... 1.76455808e+00 7.77830124e+00 3.88618171e-01] [ 1.03071394e+01 2.88795543e+00 3.27044892e+00 ... 7.45586777e+00 -1.70694809e+01 8.98446751e+00] [-3.23021460e+00 1.45408564e+01 -1.21290855e+01 ... -4.36974049e+00 8.31999969e+00 -3.96386003e+00] ... [ 1.40752184e+00 -1.12739143e+01 6.17473412e+00 ... -9.61105537e+00 1.05019779e+01 1.15906153e+01] [-1.51933622e+00 8.74870300e+00 4.86346817e+00 ... -4.35718727e+00 -7.64903116e+00 -1.12275147e+00] [ 1.54056919e+00 -7.01145363e+00 9.98596764e+00 ... 1.05516396e+01 -5.10973454e-01 3.54311872e+00]] [[-2.86623001e-01 2.16255951e+00 1.13137150e+00 ... 7.39985847e+00 6.53563917e-01 -7.90167952e+00] [ 6.14401865e+00 -4.43842506e+00 6.63971710e+00 ... -5.60128355e+00 1.22861338e+00 -4.02632284e+00] [ 1.66375697e+00 3.84517407e+00 -6.72032908e-02 ... 1.76711094e+00 1.33163393e+00 -1.72547340e+00] ... [ 8.13765907e+00 5.75024462e+00 3.47970629e+00 ... 1.34037981e+01 -1.68883055e-01 1.41830003e+00] [-5.42338133e+00 1.07884302e+01 -1.46946898e+01 ... -5.48605061e+00 9.94279861e+00 3.30724812e+00] [ 4.91756022e-01 2.02381611e+00 6.48837900e+00 ... 5.70696974e+00 -1.22669258e+01 -2.01502353e-01]] ... [[ 4.61662197e+00 -6.84811258e+00 4.76936579e+00 ... 1.11387148e+01 5.23816013e+00 -9.13490951e-01] [ 2.87393856e+00 1.62102871e+01 1.63416290e+01 ... -1.07038498e+01 3.78820539e-01 -4.31566143e+00] [ 5.13099241e+00 -1.00201142e+00 -1.67162609e+01 ... -6.06535530e+00 -1.11109695e+01 -3.48039174e+00] ... [-5.21850726e-03 6.19689798e+00 -1.26387453e+01 ... -8.40884972e+00 -3.50145817e+00 -2.04852467e+01] [ 5.42091036e+00 -6.24045134e+00 2.17909050e+00 ... 7.67664194e+00 -1.20351238e+01 1.32850313e+00] [ 6.90888977e+00 5.88855076e+00 6.91793299e+00 ... 1.63653266e+00 -7.84971833e-01 1.27390671e+00]] [[-1.77465832e+00 -8.43407822e+00 1.91103935e-01 ... 2.94683194e+00 -1.32911253e+01 3.01727319e+00] [ 4.48351383e+00 3.90475202e+00 4.03854990e+00 ... 8.26324940e+00 -1.17229595e+01 5.13837099e+00] [ 4.12818336e+00 -5.77989817e+00 -7.85517359e+00 ... -1.32822299e+00 8.08268642e+00 8.51512551e-01] ... [-3.11064076e+00 9.48500633e+00 8.04905796e+00 ... 8.68917274e+00 4.16610241e+00 -7.58011675e+00] [ 2.87405705e+00 6.63606024e+00 -1.03351784e+01 ... -5.68221378e+00 -7.10295248e+00 4.47358561e+00] [ 9.62095201e-01 1.18001270e+01 -3.62954354e+00 ... 1.21119013e+01 -1.58492827e+00 2.63474369e+00]] [[-1.60520875e+00 2.10518622e+00 -3.68966274e-02 ... -1.95865631e+01 3.68293715e+00 4.42294073e+00] [ 2.82825279e+00 -4.59482479e+00 7.05941391e+00 ... 7.98625529e-01 -8.00328445e+00 9.88494158e-01] [-6.05554163e-01 3.16314220e+00 -1.09232032e+00 ... 9.28945351e+00 5.80975199e+00 -1.95285034e+00] ... [-7.22749853e+00 -6.87552595e+00 -1.73184514e+00 ... -5.29494572e+00 -8.71921730e+00 -1.58572979e+01] [ 9.14692020e+00 -5.41877508e-01 5.99509048e+00 ... 8.06353241e-02 3.05047846e+00 1.39545784e+01] [-6.09720469e+00 -5.39618397e+00 -5.26093197e+00 ... 1.09882212e+01 -4.35497189e+00 6.54528081e-01]]]] [[[[-8.50163746e+00 3.74856162e+00 -1.65591497e-04 ... 4.56013966e+00 1.21184123e+00 -1.59690297e+00] [-1.62972260e+00 5.43058777e+00 9.76154423e+00 ... 4.78680563e+00 2.34536457e+00 7.18690038e-01] [ 9.44966733e-01 -1.21140175e+01 3.33618331e+00 ... -1.16753407e+01 -3.60527372e+00 2.05019450e+00] ... [-1.43904364e+00 2.25117946e+00 -6.91520882e+00 ... 2.26492286e+00 -3.70205474e+00 4.36074114e+00] [ 2.73295093e+00 7.68447399e-01 -4.62337017e+00 ... -1.18105078e+01 -3.00549173e+00 -1.20108242e+01] [ 2.96994877e+00 -4.20944309e+00 5.17156553e+00 ... 4.82473898e+00 -5.72959948e+00 2.43854070e+00]] [[ 6.07189512e+00 5.01738167e+00 6.46971762e-01 ... -2.01519928e+01 -3.58198643e+00 -6.02741814e+00] [-7.80989587e-01 -7.10302114e-01 -8.34922791e+00 ... 1.14619970e+01 7.24710846e+00 1.97799873e+00] [-5.39002705e+00 -7.99172211e+00 -1.70750504e+01 ... -9.63972569e+00 1.86407013e+01 2.63219333e+00] ... [ 9.04726982e+00 3.25787520e+00 -7.84516096e-01 ... 3.55337548e+00 4.43896055e+00 1.23097639e+01] [-5.18324995e+00 -4.93372393e+00 6.50338125e+00 ... -2.35890555e+00 -9.91040611e+00 1.31100111e+01] [ 5.75868940e+00 5.64993334e+00 -3.89345527e+00 ... 3.28846598e+00 -4.60968351e+00 3.15881419e+00]] [[ 6.73387671e+00 -9.14519668e-01 -8.07622433e-01 ... 1.33468583e-01 -5.79782438e+00 1.05309553e+01] [ 1.97342646e+00 1.08130512e+01 6.06644678e+00 ... 2.39412937e+01 -1.54240599e+01 -1.87171295e-01] [-4.98081875e+00 -6.46024418e+00 -1.43186712e+01 ... 2.54720631e+01 1.63476670e+00 8.81416893e+00] ... [ 1.05072660e+01 4.14017551e-02 -7.41734743e+00 ... -1.03467588e+01 2.44029164e+00 1.68767834e+00] [ 6.47248983e-01 -7.55722809e+00 -4.35645485e+00 ... -1.09948425e+01 1.38196478e+01 -5.19813871e+00] [ 1.63644767e+00 7.94533539e+00 -8.90108013e+00 ... 6.29711962e+00 -1.17984381e+01 4.29816914e+00]] ... [[ 3.84202480e+00 4.44931173e+00 -4.59312057e+00 ... 1.03149195e+01 -3.86426538e-01 -1.09272356e+01] [ 7.87032366e-01 -6.42268610e+00 1.15180051e+00 ... 8.70350266e+00 -8.67883086e-01 8.03343010e+00] [ 4.31568766e+00 1.30332270e+01 2.40755224e+00 ... 1.46935129e+00 -1.40436630e+01 1.11033125e+01] ... [ 5.53169823e+00 8.57809734e+00 -9.16316605e+00 ... -6.58714890e-01 6.06189775e+00 7.79242706e+00] [-9.33465004e+00 -1.15566266e+00 2.61557961e+00 ... -2.74831510e+00 7.08365536e+00 -6.14941931e+00] [-1.94113743e+00 5.95490098e-01 -1.02524424e+01 ... 8.68324101e-01 6.21227789e+00 -1.14561377e+01]] [[-6.85957670e-01 2.69119358e+00 -3.00783485e-01 ... -1.90829420e+00 -9.90390682e+00 -3.61516148e-01] [-2.51530409e-01 -5.86580944e+00 8.75312710e+00 ... 1.00834484e+01 8.10232162e-01 -3.85976863e+00] [ 2.56257105e+00 -4.20963573e+00 9.25269699e+00 ... -1.63032457e-01 8.88674545e+00 -5.97764921e+00] ... [-4.34380579e+00 -3.29942131e+00 3.02329254e+00 ... -1.10156851e+01 3.43132663e+00 -2.13345695e+00] [ 6.69517183e+00 -5.02208233e+00 1.97226238e+00 ... 9.02900982e+00 1.25143242e+01 7.96337605e+00] [-1.15575194e+00 -6.03590155e+00 -2.27540445e+00 ... -8.36651611e+00 -5.70520020e+00 6.09243572e-01]] [[ 3.34884477e+00 -9.26404834e-01 -6.53996229e+00 ... -3.36240458e+00 -2.79437876e+00 -4.49537849e+00] [-5.04930544e+00 1.45390856e+00 -4.05038977e+00 ... 1.46499195e+01 3.59727830e-01 1.04631348e+01] [ 1.07901466e+00 1.09875882e+00 9.92987633e+00 ... -2.13994193e+00 -1.70138645e+01 5.33589268e+00] ... [ 1.03541481e+00 1.47979221e+01 3.50028300e+00 ... 2.13811159e+00 -2.01276803e+00 -8.44489956e+00] [-6.04686689e+00 -1.78395033e+00 -6.70951796e+00 ... 5.59378815e+00 1.08071012e+01 1.09995413e+00] [-3.88136196e+00 -1.76732671e+00 -1.01525955e+01 ... -4.47026205e+00 -2.35435438e+00 -5.23625469e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 0, 'dilations': 2, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_504.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %4, %3, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%8) fw_re: [[[[[-8.12845993e+00 -1.56515121e+01 -1.17026405e+01 ... -7.03215361e+00 1.67047577e+01 2.42516727e+01] [-1.20749250e-01 -1.04108553e+01 -1.58016043e+01 ... 3.21078253e+00 -8.28651428e+00 -9.99447536e+00] [ 1.84778347e+01 -1.07115784e+01 1.00176697e+01 ... 8.85141563e+00 1.29320498e+01 -8.79907894e+00] ... [ 9.15984726e+00 5.13623893e-01 1.69843044e+01 ... 6.74162912e+00 9.18414593e+00 -9.65589619e+00] [ 1.18158674e+01 6.24404430e+00 -1.61574631e+01 ... -6.94238901e+00 -1.19091439e+00 -4.59035248e-01] [ 5.73593473e+00 1.35280190e+01 3.66400242e+00 ... -6.60551929e+00 2.76150286e-01 -7.13848174e-01]] [[ 6.97758734e-01 -1.04818611e+01 -9.10104561e+00 ... -8.37179089e+00 -1.64735184e+01 9.76315308e+00] [-9.86333466e+00 -4.30650139e+00 -1.38659477e+01 ... -4.27379656e+00 8.20139217e+00 -2.68258393e-01] [-1.06338167e+01 -1.75905609e+01 -4.75990772e+00 ... -1.88154564e+01 -2.24216938e+01 1.00484359e+00] ... [ 3.98809409e+00 -5.16001642e-01 -2.24444941e-01 ... 1.17469053e+01 -1.13387947e+01 6.14230204e+00] [-1.38119090e+00 8.07114792e+00 -1.12930402e-01 ... -5.50007582e+00 -6.88809824e+00 2.00605717e+01] [ 4.22381020e+00 -2.39875603e+00 -2.39426780e+00 ... 2.51243711e+00 2.80381441e+00 1.11140184e+01]] [[-1.00934105e+01 -1.41097183e+01 1.09313574e+01 ... 1.98029089e+00 -1.11987782e+00 9.52680826e-01] [ 2.47894257e-01 5.84185934e+00 9.39672756e+00 ... 1.50656617e+00 -1.74377861e+01 2.09298706e+01] [-1.64233708e+00 -4.63842583e+00 5.11629105e+00 ... -2.35662246e+00 2.02320366e+01 6.97191143e+00] ... [ 5.73546124e+00 4.80195570e+00 -1.15693264e+01 ... -1.32624950e+01 5.24005508e+00 1.21141195e+01] [ 1.64821219e+00 5.68077040e+00 -3.39582968e+00 ... 1.49997702e+01 1.17458334e+01 9.35670567e+00] [ 7.43064260e+00 -1.00275211e+01 -2.21958027e+01 ... 7.13145494e+00 2.90260053e+00 6.60803509e+00]] ... [[ 4.15935373e+00 -2.39053345e+01 -9.69866931e-01 ... -1.02869339e+01 1.85526764e+00 1.94920075e+00] [-3.48946786e+00 -5.85236216e+00 -5.27088165e+00 ... 2.33532310e+00 -7.84945631e+00 -4.53437150e-01] [-2.96078897e+00 3.54313552e-01 1.83216584e+00 ... -1.30122814e+01 -8.63388538e+00 1.13725166e+01] ... [-1.13864813e+01 -2.28540936e+01 1.03889227e+00 ... -1.24521685e+01 4.30202818e+00 -8.84314728e+00] [-8.83389378e+00 -1.78184395e+01 -1.09105740e+01 ... 2.55283546e+00 -1.02106466e+01 -6.54185295e+00] [ 4.30764347e-01 7.51205444e+00 -9.52829456e+00 ... -3.37358284e+00 7.87909555e+00 -1.86812096e+01]] [[ 1.92177200e+00 -1.22501745e+01 -2.60051975e+01 ... 6.47008276e+00 -5.16026258e+00 9.05403423e+00] [ 1.07848301e+01 -1.41772795e+00 -1.65417366e+01 ... 2.46656299e+00 4.85029316e+00 -1.84645426e+00] [-1.26514864e+01 -1.79992409e+01 -1.42021596e+00 ... -1.61232243e+01 -1.33155137e-01 -1.25757277e+00] ... [ 1.48849993e+01 1.23636341e+01 -4.62980604e+00 ... 5.18963146e+00 1.01194410e+01 2.22625047e-01] [-2.92199755e+00 7.04681206e+00 1.55456841e+00 ... 4.64717197e+00 -1.66967964e+01 -2.52796245e+00] [-1.40772939e+00 -3.31734538e+00 -1.53361683e+01 ... -1.97064245e+00 -8.06161308e+00 5.29998350e+00]] [[-5.58246017e-01 8.30230331e+00 2.79766250e+00 ... -6.88733435e+00 2.75250649e+00 -4.33713913e+00] [ 2.52600408e+00 -7.39509773e+00 2.01623797e+00 ... 2.35492172e+01 1.08436430e+00 5.96866429e-01] [-7.56682444e+00 4.50427294e+00 -6.12658858e-01 ... 2.28717709e+00 2.73149669e-01 -2.81362653e+00] ... [ 1.88490639e+01 -9.93935013e+00 1.00299320e+01 ... -5.84000874e+00 -6.28590524e-01 7.43184900e+00] [-9.07858086e+00 1.08634253e+01 9.88260460e+00 ... 1.20942478e+01 -3.92582631e+00 1.38305211e+00] [ 2.36478710e+00 -2.19735947e+01 -1.14314117e+01 ... 1.29202108e+01 2.15367615e-01 -2.64121699e+00]]]] [[[[ 1.28258789e+00 1.23502350e+01 5.14004517e+00 ... -1.13511114e+01 -4.80706835e+00 -2.27419186e+00] [-1.87588406e+01 -4.13801241e+00 5.74871349e+00 ... -1.06524296e+01 1.54498138e+01 1.41080437e+01] [ 1.12184973e+01 -1.41489935e+01 -1.21333103e+01 ... 2.11297822e+00 -9.07422733e+00 5.64194775e+00] ... [-8.11138391e-01 3.35137582e+00 2.67410126e+01 ... -3.89017284e-01 -9.00129700e+00 -7.36877489e+00] [ 2.83224964e+00 -4.24614763e+00 -2.74790812e+00 ... 7.89947796e+00 -1.59015548e+00 -1.57619724e+01] [-1.04681292e+01 -2.21359921e+00 2.43585911e+01 ... 7.91402006e+00 5.90374184e+00 3.12647641e-01]] [[-1.97952259e+00 2.25960875e+00 4.40200377e+00 ... 8.31099319e+00 -6.91012323e-01 2.07448435e+00] [-9.95301437e+00 -8.09312534e+00 2.75530386e+00 ... 5.83135128e+00 -5.46129990e+00 1.05233450e+01] [ 1.55185759e+00 -1.81836205e+01 1.51779137e+01 ... 3.60835373e-01 -1.87285948e+00 4.20650005e+00] ... [-8.37714481e+00 7.50236273e+00 7.69301891e-01 ... 2.03264460e-01 1.39311771e+01 7.36334994e-02] [ 2.14415283e+01 4.26003218e+00 8.98045158e+00 ... 1.40041046e+01 -1.18746357e+01 1.09247637e+00] [ 5.71253443e+00 4.49640036e-01 3.43813807e-01 ... -1.26589031e+01 -2.21115613e+00 3.53415632e+00]] [[-5.87070894e+00 5.64882088e+00 1.11698401e+00 ... -4.53827095e+00 2.45440693e+01 7.33225584e+00] [ 7.57943726e+00 1.33901300e+01 3.90897679e+00 ... -3.67217124e-01 2.39985991e+00 8.56732082e+00] [-7.65935540e-01 -8.48246765e+00 8.87218475e+00 ... 3.59684443e+00 -4.00457573e+00 -8.01117992e+00] ... [-1.61262488e+00 -1.02497282e+01 -9.46300220e+00 ... 3.36204624e+00 1.03780994e+01 -3.77692294e+00] [-3.47192073e+00 -1.32951546e+00 -3.46461034e+00 ... 3.21348369e-01 4.57314491e+00 -6.28812790e+00] [ 9.50703526e+00 7.92552471e+00 -1.47491684e+01 ... -1.83390350e+01 2.68489552e+00 6.26476240e+00]] ... [[ 7.03989077e+00 -8.99844933e+00 -1.02047949e+01 ... -5.14952779e-01 -7.54280519e+00 2.26091824e+01] [-1.80510139e+01 1.01572762e+01 6.34963942e+00 ... -1.37768450e+01 5.29629374e+00 -1.74135704e+01] [ 8.30240440e+00 -4.85982895e+00 -1.76437569e+01 ... 9.37154961e+00 -1.30900412e+01 2.74028540e-01] ... [-5.00637722e+00 1.80547714e+01 -2.64530230e+00 ... -1.68526134e+01 -1.21300497e+01 1.69961662e+01] [ 2.66160280e-01 5.25260592e+00 2.57937860e+00 ... -1.23733921e+01 2.01713872e+00 8.11106873e+00] [ 1.72397060e+01 1.31415243e+01 -4.67459106e+00 ... 1.11494694e+01 6.45013380e+00 1.03630562e+01]] [[ 5.44155025e+00 4.42591906e+00 1.44047928e+01 ... -5.26506758e+00 -3.41632754e-01 7.84096813e+00] [-1.40093689e+01 4.47857571e+00 -8.64354670e-01 ... -3.49679613e+00 -4.03848553e+00 2.54431391e+00] [-1.72810531e+00 4.40077114e+00 -1.11065092e+01 ... 9.62549305e+00 -8.23832035e+00 3.49061799e+00] ... [ 1.85349541e+01 -1.98636417e+01 -3.80036092e+00 ... 1.12695999e+01 4.76421738e+00 2.31924653e+00] [-1.39039841e+01 -2.38188222e-01 -7.22462177e+00 ... 1.59288969e+01 -1.78090464e-02 -1.06002367e+00] [-1.20202341e+01 1.11637573e+01 -4.20193672e+00 ... -3.41902447e+00 2.77781906e+01 3.76442432e+00]] [[-2.73213792e+00 -2.22167563e+00 -1.29803019e+01 ... 1.49752140e+01 -1.15558739e+01 2.39972649e+01] [ 1.15886335e+01 1.75203133e+01 -1.12806702e+01 ... 1.85493755e+00 -8.53802681e+00 -8.64700317e+00] [ 7.76651859e+00 1.02866564e+01 4.83744219e-02 ... 6.19679260e+00 -8.07203770e+00 8.26761186e-01] ... [-1.17092009e+01 -7.89241171e+00 2.22761841e+01 ... 3.51663513e+01 -4.07756329e+00 8.48434162e+00] [ 3.73672175e+00 1.44940186e+01 1.68572006e+01 ... 5.95127010e+00 -7.03366423e+00 3.83488083e+00] [-1.72657070e+01 -5.58488941e+00 -5.95827150e+00 ... -8.21963692e+00 -6.37120247e+00 -1.90351105e+01]]]]]; ov_res: [[[[[-8.12845993e+00 -1.56515121e+01 -1.17026405e+01 ... -7.03215361e+00 1.67047577e+01 2.42516727e+01] [-1.20749250e-01 -1.04108553e+01 -1.58016043e+01 ... 3.21078253e+00 -8.28651428e+00 -9.99447536e+00] [ 1.84778347e+01 -1.07115784e+01 1.00176697e+01 ... 8.85141563e+00 1.29320498e+01 -8.79907894e+00] ... [ 9.15984726e+00 5.13623893e-01 1.69843044e+01 ... 6.74162912e+00 9.18414593e+00 -9.65589619e+00] [ 1.18158674e+01 6.24404430e+00 -1.61574631e+01 ... -6.94238901e+00 -1.19091439e+00 -4.59035248e-01] [ 5.73593473e+00 1.35280190e+01 3.66400242e+00 ... -6.60551929e+00 2.76150286e-01 -7.13848174e-01]] [[ 6.97758734e-01 -1.04818611e+01 -9.10104561e+00 ... -8.37179089e+00 -1.64735184e+01 9.76315308e+00] [-9.86333466e+00 -4.30650139e+00 -1.38659477e+01 ... -4.27379656e+00 8.20139217e+00 -2.68258393e-01] [-1.06338167e+01 -1.75905609e+01 -4.75990772e+00 ... -1.88154564e+01 -2.24216938e+01 1.00484359e+00] ... [ 3.98809409e+00 -5.16001642e-01 -2.24444941e-01 ... 1.17469053e+01 -1.13387947e+01 6.14230204e+00] [-1.38119090e+00 8.07114792e+00 -1.12930402e-01 ... -5.50007582e+00 -6.88809824e+00 2.00605717e+01] [ 4.22381020e+00 -2.39875603e+00 -2.39426780e+00 ... 2.51243711e+00 2.80381441e+00 1.11140184e+01]] [[-1.00934105e+01 -1.41097183e+01 1.09313574e+01 ... 1.98029089e+00 -1.11987782e+00 9.52680826e-01] [ 2.47894257e-01 5.84185934e+00 9.39672756e+00 ... 1.50656617e+00 -1.74377861e+01 2.09298706e+01] [-1.64233708e+00 -4.63842583e+00 5.11629105e+00 ... -2.35662246e+00 2.02320366e+01 6.97191143e+00] ... [ 5.73546124e+00 4.80195570e+00 -1.15693264e+01 ... -1.32624950e+01 5.24005508e+00 1.21141195e+01] [ 1.64821219e+00 5.68077040e+00 -3.39582968e+00 ... 1.49997702e+01 1.17458334e+01 9.35670567e+00] [ 7.43064260e+00 -1.00275211e+01 -2.21958027e+01 ... 7.13145494e+00 2.90260053e+00 6.60803509e+00]] ... [[ 4.15935373e+00 -2.39053345e+01 -9.69866931e-01 ... -1.02869339e+01 1.85526764e+00 1.94920075e+00] [-3.48946786e+00 -5.85236216e+00 -5.27088165e+00 ... 2.33532310e+00 -7.84945631e+00 -4.53437150e-01] [-2.96078897e+00 3.54313552e-01 1.83216584e+00 ... -1.30122814e+01 -8.63388538e+00 1.13725166e+01] ... [-1.13864813e+01 -2.28540936e+01 1.03889227e+00 ... -1.24521685e+01 4.30202818e+00 -8.84314728e+00] [-8.83389378e+00 -1.78184395e+01 -1.09105740e+01 ... 2.55283546e+00 -1.02106466e+01 -6.54185295e+00] [ 4.30764347e-01 7.51205444e+00 -9.52829456e+00 ... -3.37358284e+00 7.87909555e+00 -1.86812096e+01]] [[ 1.92177200e+00 -1.22501745e+01 -2.60051975e+01 ... 6.47008276e+00 -5.16026258e+00 9.05403423e+00] [ 1.07848301e+01 -1.41772795e+00 -1.65417366e+01 ... 2.46656299e+00 4.85029316e+00 -1.84645426e+00] [-1.26514864e+01 -1.79992409e+01 -1.42021596e+00 ... -1.61232243e+01 -1.33155137e-01 -1.25757277e+00] ... [ 1.48849993e+01 1.23636341e+01 -4.62980604e+00 ... 5.18963146e+00 1.01194410e+01 2.22625047e-01] [-2.92199755e+00 7.04681206e+00 1.55456841e+00 ... 4.64717197e+00 -1.66967964e+01 -2.52796245e+00] [-1.40772939e+00 -3.31734538e+00 -1.53361683e+01 ... -1.97064245e+00 -8.06161308e+00 5.29998350e+00]] [[-5.58246017e-01 8.30230331e+00 2.79766250e+00 ... -6.88733435e+00 2.75250649e+00 -4.33713913e+00] [ 2.52600408e+00 -7.39509773e+00 2.01623797e+00 ... 2.35492172e+01 1.08436430e+00 5.96866429e-01] [-7.56682444e+00 4.50427294e+00 -6.12658858e-01 ... 2.28717709e+00 2.73149669e-01 -2.81362653e+00] ... [ 1.88490639e+01 -9.93935013e+00 1.00299320e+01 ... -5.84000874e+00 -6.28590524e-01 7.43184900e+00] [-9.07858086e+00 1.08634253e+01 9.88260460e+00 ... 1.20942478e+01 -3.92582631e+00 1.38305211e+00] [ 2.36478710e+00 -2.19735947e+01 -1.14314117e+01 ... 1.29202108e+01 2.15367615e-01 -2.64121699e+00]]]] [[[[ 1.28258789e+00 1.23502350e+01 5.14004517e+00 ... -1.13511114e+01 -4.80706835e+00 -2.27419186e+00] [-1.87588406e+01 -4.13801241e+00 5.74871349e+00 ... -1.06524296e+01 1.54498138e+01 1.41080437e+01] [ 1.12184973e+01 -1.41489935e+01 -1.21333103e+01 ... 2.11297822e+00 -9.07422733e+00 5.64194775e+00] ... [-8.11138391e-01 3.35137582e+00 2.67410126e+01 ... -3.89017284e-01 -9.00129700e+00 -7.36877489e+00] [ 2.83224964e+00 -4.24614763e+00 -2.74790812e+00 ... 7.89947796e+00 -1.59015548e+00 -1.57619724e+01] [-1.04681292e+01 -2.21359921e+00 2.43585911e+01 ... 7.91402006e+00 5.90374184e+00 3.12647641e-01]] [[-1.97952259e+00 2.25960875e+00 4.40200377e+00 ... 8.31099319e+00 -6.91012323e-01 2.07448435e+00] [-9.95301437e+00 -8.09312534e+00 2.75530386e+00 ... 5.83135128e+00 -5.46129990e+00 1.05233450e+01] [ 1.55185759e+00 -1.81836205e+01 1.51779137e+01 ... 3.60835373e-01 -1.87285948e+00 4.20650005e+00] ... [-8.37714481e+00 7.50236273e+00 7.69301891e-01 ... 2.03264460e-01 1.39311771e+01 7.36334994e-02] [ 2.14415283e+01 4.26003218e+00 8.98045158e+00 ... 1.40041046e+01 -1.18746357e+01 1.09247637e+00] [ 5.71253443e+00 4.49640036e-01 3.43813807e-01 ... -1.26589031e+01 -2.21115613e+00 3.53415632e+00]] [[-5.87070894e+00 5.64882088e+00 1.11698401e+00 ... -4.53827095e+00 2.45440693e+01 7.33225584e+00] [ 7.57943726e+00 1.33901300e+01 3.90897679e+00 ... -3.67217124e-01 2.39985991e+00 8.56732082e+00] [-7.65935540e-01 -8.48246765e+00 8.87218475e+00 ... 3.59684443e+00 -4.00457573e+00 -8.01117992e+00] ... [-1.61262488e+00 -1.02497282e+01 -9.46300220e+00 ... 3.36204624e+00 1.03780994e+01 -3.77692294e+00] [-3.47192073e+00 -1.32951546e+00 -3.46461034e+00 ... 3.21348369e-01 4.57314491e+00 -6.28812790e+00] [ 9.50703526e+00 7.92552471e+00 -1.47491684e+01 ... -1.83390350e+01 2.68489552e+00 6.26476240e+00]] ... [[ 7.03989077e+00 -8.99844933e+00 -1.02047949e+01 ... -5.14952779e-01 -7.54280519e+00 2.26091824e+01] [-1.80510139e+01 1.01572762e+01 6.34963942e+00 ... -1.37768450e+01 5.29629374e+00 -1.74135704e+01] [ 8.30240440e+00 -4.85982895e+00 -1.76437569e+01 ... 9.37154961e+00 -1.30900412e+01 2.74028540e-01] ... [-5.00637722e+00 1.80547714e+01 -2.64530230e+00 ... -1.68526134e+01 -1.21300497e+01 1.69961662e+01] [ 2.66160280e-01 5.25260592e+00 2.57937860e+00 ... -1.23733921e+01 2.01713872e+00 8.11106873e+00] [ 1.72397060e+01 1.31415243e+01 -4.67459106e+00 ... 1.11494694e+01 6.45013380e+00 1.03630562e+01]] [[ 5.44155025e+00 4.42591906e+00 1.44047928e+01 ... -5.26506758e+00 -3.41632754e-01 7.84096813e+00] [-1.40093689e+01 4.47857571e+00 -8.64354670e-01 ... -3.49679613e+00 -4.03848553e+00 2.54431391e+00] [-1.72810531e+00 4.40077114e+00 -1.11065092e+01 ... 9.62549305e+00 -8.23832035e+00 3.49061799e+00] ... [ 1.85349541e+01 -1.98636417e+01 -3.80036092e+00 ... 1.12695999e+01 4.76421738e+00 2.31924653e+00] [-1.39039841e+01 -2.38188222e-01 -7.22462177e+00 ... 1.59288969e+01 -1.78090464e-02 -1.06002367e+00] [-1.20202341e+01 1.11637573e+01 -4.20193672e+00 ... -3.41902447e+00 2.77781906e+01 3.76442432e+00]] [[-2.73213792e+00 -2.22167563e+00 -1.29803019e+01 ... 1.49752140e+01 -1.15558739e+01 2.39972649e+01] [ 1.15886335e+01 1.75203133e+01 -1.12806702e+01 ... 1.85493755e+00 -8.53802681e+00 -8.64700317e+00] [ 7.76651859e+00 1.02866564e+01 4.83744219e-02 ... 6.19679260e+00 -8.07203770e+00 8.26761186e-01] ... [-1.17092009e+01 -7.89241171e+00 2.22761841e+01 ... 3.51663513e+01 -4.07756329e+00 8.48434162e+00] [ 3.73672175e+00 1.44940186e+01 1.68572006e+01 ... 5.95127010e+00 -7.03366423e+00 3.83488083e+00] [-1.72657070e+01 -5.58488941e+00 -5.95827150e+00 ... -8.21963692e+00 -6.37120247e+00 -1.90351105e+01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 0], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_506.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 9.89597291e-02 -1.96247840e+00 -6.73498774e+00 ... -3.04538178e+00 3.68159866e+00 3.57842970e+00] [-1.84553547e+01 -2.08886290e+00 -6.35196543e+00 ... -6.11658144e+00 -1.45732212e+01 8.98280907e+00] [ 8.87614822e+00 8.18371391e+00 -6.56620121e+00 ... -1.23672724e+01 -8.02785492e+00 -1.03593817e+01] ... [-1.12194242e+01 -4.48743391e+00 -1.36184001e+00 ... -3.63525295e+00 1.23958552e+00 1.85238111e+00] [ 6.16770124e+00 5.88587618e+00 -2.48432045e+01 ... -1.41151705e+01 1.13946228e+01 2.08920264e+00] [ 1.07062950e+01 1.66831589e+01 -1.14090526e+00 ... 1.83360070e-01 4.21739769e+00 -4.71848202e+00]] [[ 1.52994442e+01 -8.50543976e+00 -7.89086866e+00 ... 7.29766846e+00 -1.26439154e+00 7.37603426e+00] [-2.18126917e+00 -1.05093422e+01 1.24783583e+01 ... -3.17601728e+00 4.98858309e+00 -1.05960331e+01] [-9.92197454e-01 -5.85327911e+00 2.35618448e+00 ... 6.52699327e+00 -2.57491374e+00 1.02815456e+01] ... [ 2.29575276e+00 -3.36277533e+00 1.75932255e+01 ... -5.81526089e+00 1.79558945e+01 1.67414665e+01] [-9.14073467e+00 1.86612487e+00 5.29070187e+00 ... 3.67047834e+00 -5.50517273e+00 5.94465113e+00] [ 7.34761715e+00 6.26484394e+00 -6.26011801e+00 ... -4.63832289e-01 3.23174715e+00 -1.47536874e-01]] [[-1.31478930e+01 4.11509800e+00 -7.39875841e+00 ... -2.68732524e+00 -1.59708929e+00 -4.86747599e+00] [ 1.67280884e+01 6.90039015e+00 -1.48896861e+00 ... -4.13920105e-01 7.03753519e+00 -8.01581889e-02] [ 1.23421469e+01 3.51131558e+00 -1.11593800e+01 ... 6.92426586e+00 6.39749575e+00 -3.31556106e+00] ... [ 4.58822823e+00 1.16661186e+01 1.43570080e+01 ... -1.65343723e+01 6.82886362e-01 -9.86409950e+00] [ 8.81491089e+00 8.15400258e-02 -1.62563877e+01 ... 1.47487888e+01 6.88383758e-01 6.18900061e+00] [-6.19058132e+00 1.17454481e+01 9.41632366e+00 ... 1.26052580e+01 1.43244958e+00 -7.20885706e+00]] ... [[ 1.75760746e+01 4.12351274e+00 2.67642450e+00 ... 5.12944508e+00 -3.98953342e+00 -3.12764823e-01] [ 3.26529026e+00 5.85413599e+00 1.07425346e+01 ... -2.25051665e+00 -3.26817083e+00 9.29426765e+00] [-3.28528547e+00 -1.56378536e+01 -1.13806782e+01 ... -9.39856470e-01 1.14267361e+00 -2.17655492e+00] ... [ 9.66531467e+00 7.56883621e+00 -6.97185469e+00 ... -8.91703415e+00 2.23268723e+00 1.31178370e+01] [-8.75718689e+00 -1.14564810e+01 2.76895308e+00 ... -3.11993504e+00 -9.44110870e+00 4.97420120e+00] [ 1.42143564e+01 1.46815395e+00 -7.61735201e+00 ... 3.89967656e+00 -4.75464535e+00 -1.45754850e+00]] [[-1.85566926e+00 -1.02967939e+01 6.76283932e+00 ... 5.59230614e+00 9.04631042e+00 -1.73814917e+00] [-1.14709587e+01 1.29608595e+00 1.55520229e+01 ... -1.54997969e+01 -9.38064957e+00 -4.38969940e-01] [ 7.80861902e+00 -5.87201929e+00 -1.29401588e+01 ... -4.59689951e+00 -2.71944332e+00 1.24942188e+01] ... [ 1.08162870e+01 -9.32682323e+00 2.79913449e+00 ... -4.99072932e-02 -1.10534515e+01 -2.26744232e+01] [-3.27470875e+00 9.42687225e+00 1.05470467e+00 ... -7.72742510e+00 -1.87178612e+01 -2.14068546e+01] [-1.38552437e+01 -4.34731150e+00 4.95209074e+00 ... 1.11972418e+01 3.31306219e+00 1.71593018e+01]] [[ 1.48046747e-01 -6.07372236e+00 2.25249648e+00 ... 8.81998897e-01 6.38752937e-01 -4.33824253e+00] [ 3.69198775e+00 -1.58960521e-01 -8.41483784e+00 ... -9.27645969e+00 -8.49800777e+00 5.67290354e+00] [-8.06371307e+00 4.07659054e+00 -5.99557877e+00 ... -1.63436146e+01 7.81514168e+00 -1.92830963e+01] ... [ 1.38049583e+01 8.33897591e-01 6.37520361e+00 ... 2.75894427e+00 -6.77695131e+00 8.92536259e+00] [ 3.56582236e+00 -6.28159904e+00 5.26421642e+00 ... 7.15895844e+00 1.46610870e+01 5.00614500e+00] [-1.13336593e-01 4.27702618e+00 -5.04044485e+00 ... -3.90360266e-01 1.93790948e+00 -5.45720768e+00]]]] [[[[ 4.80641890e+00 -3.40413308e+00 5.48971534e-01 ... 2.47457266e+00 1.01621218e+01 1.24636221e+01] [-5.99544406e-01 2.03079844e+00 -7.72373676e-01 ... 8.09485245e+00 -6.89137578e-01 9.93476093e-01] [-3.87478065e+00 -7.06088543e+00 4.11367983e-01 ... 9.56138194e-01 -5.48042011e+00 2.99535656e+00] ... [ 2.01600766e+00 3.77556419e+00 2.40082479e+00 ... 4.09139442e+00 -1.26977577e+01 -3.28295052e-01] [-8.71553719e-01 1.35805292e+01 -8.33589196e-01 ... -2.50080566e+01 1.56999028e+00 -3.94536567e+00] [-8.16370487e+00 -5.20413733e+00 -5.90905905e+00 ... 4.30284929e+00 1.74968123e+00 1.93665104e+01]] [[ 1.17243099e+01 2.48230129e-01 1.31993949e-01 ... -4.94538164e+00 -7.37040579e-01 7.80342007e+00] [ 7.27990627e+00 1.57382936e+01 1.01618938e+01 ... 1.58561249e+01 1.06963177e+01 1.83680058e+01] [ 3.11460686e+00 -9.19372559e+00 -5.63212693e-01 ... 5.91887045e+00 1.31729326e+01 2.22590733e+00] ... [ 1.37012124e+00 1.14360914e+01 8.91288090e+00 ... -3.41060233e+00 -4.27053976e+00 5.47223663e+00] [ 8.46767998e+00 2.30003643e+00 1.37823992e+01 ... 1.93896942e+01 -2.61240840e-01 -1.37723293e+01] [ 6.53069925e+00 3.30913734e+00 -1.09649239e+01 ... -1.19067478e+01 -1.25270967e+01 -5.12587023e+00]] [[ 8.92893124e+00 -6.15010071e+00 1.58855236e+00 ... 6.63295269e+00 1.00540724e+01 6.92937422e+00] [-3.13477373e+00 2.14290867e+01 -5.44891691e+00 ... -1.97705154e+01 -1.60508900e+01 -2.83923817e+00] [ 2.49488068e+00 -1.63995552e+01 8.76140308e+00 ... 4.07751513e+00 -1.14666548e+01 -2.64163184e+00] ... [ 1.66052551e+01 -3.52537155e+00 8.88709068e-01 ... -2.47295594e+00 6.95270729e+00 -3.25843573e+00] [-7.03915024e+00 -5.91205120e+00 -3.29141212e+00 ... 1.58756218e+01 3.81238365e+00 3.15323281e+00] [-7.75584126e+00 1.41113937e+00 8.22459316e+00 ... 6.49177933e+00 -8.48090744e+00 1.37164030e+01]] ... [[ 7.27068424e+00 3.83419782e-01 2.00081754e+00 ... -1.35014391e+01 1.48819036e+01 6.41307974e+00] [-4.38762569e+00 -2.10368347e+00 2.19347382e+00 ... 1.53523626e+01 -6.33673143e+00 -8.58924294e+00] [ 4.39136457e+00 -1.96684539e+00 5.81569004e+00 ... 1.93536053e+01 -1.35924616e+01 -6.34180880e+00] ... [-8.25664425e+00 -5.76687145e+00 -1.94018161e+00 ... -1.04868793e+01 -1.74663293e+00 -5.66128016e+00] [ 1.14489031e+00 -6.57243252e+00 1.33062124e+01 ... 1.31487501e+00 -6.56829929e+00 -2.15991926e+00] [-8.39419305e-01 6.55701590e+00 -6.35403574e-01 ... 9.44006062e+00 5.33417368e+00 6.32719398e-02]] [[ 4.67604733e+00 -4.28329992e+00 -9.40643883e+00 ... -2.69414473e+00 -4.40880067e-05 2.40444040e+00] [ 1.00759087e+01 -1.15733528e+00 -1.11194193e-01 ... 1.10586243e+01 2.73611641e+00 5.04144669e+00] [ 1.22702370e+01 1.95759344e+00 9.61608315e+00 ... -3.61382794e+00 5.23720932e+00 1.08822155e+01] ... [-2.36148262e+00 -1.68257980e+01 -9.53286839e+00 ... 4.84149504e+00 -1.02285919e+01 9.95604610e+00] [ 1.83915806e+00 1.22053652e+01 1.07540321e+01 ... 6.76191664e+00 4.72745705e+00 -6.49176121e+00] [ 1.64884357e+01 8.40325642e+00 1.52957430e+01 ... 1.39525652e+01 2.41360113e-01 -8.54866445e-01]] [[-6.53245389e-01 -9.29259396e+00 -8.58174086e-01 ... 5.22451258e+00 -3.49676394e+00 -7.09017372e+00] [ 1.29590149e+01 2.63144898e+00 7.69202280e+00 ... 1.02430801e+01 6.30680227e+00 -1.06443977e+00] [-2.47636533e+00 4.56193495e+00 1.86594582e+00 ... 1.56691623e+00 -3.66291857e+00 1.62063370e+01] ... [-6.89831352e+00 -1.31526403e+01 -1.36926270e+01 ... -1.95258766e-01 -2.93682404e+01 9.22711790e-01] [ 3.86929572e-01 1.24762840e+01 1.50320303e+00 ... -5.94300866e-01 -7.54711390e+00 -1.85485611e+01] [-1.80139637e+00 4.76440716e+00 -1.98436821e+00 ... -9.60073471e+00 -5.02168369e+00 6.91372252e+00]]]]]; ov_res: [[[[[ 9.89597291e-02 -1.96247840e+00 -6.73498774e+00 ... -3.04538178e+00 3.68159866e+00 3.57842970e+00] [-1.84553547e+01 -2.08886290e+00 -6.35196543e+00 ... -6.11658144e+00 -1.45732212e+01 8.98280907e+00] [ 8.87614822e+00 8.18371391e+00 -6.56620121e+00 ... -1.23672724e+01 -8.02785492e+00 -1.03593817e+01] ... [-1.12194242e+01 -4.48743391e+00 -1.36184001e+00 ... -3.63525295e+00 1.23958552e+00 1.85238111e+00] [ 6.16770124e+00 5.88587618e+00 -2.48432045e+01 ... -1.41151705e+01 1.13946228e+01 2.08920264e+00] [ 1.07062950e+01 1.66831589e+01 -1.14090526e+00 ... 1.83360070e-01 4.21739769e+00 -4.71848202e+00]] [[ 1.52994442e+01 -8.50543976e+00 -7.89086866e+00 ... 7.29766846e+00 -1.26439154e+00 7.37603426e+00] [-2.18126917e+00 -1.05093422e+01 1.24783583e+01 ... -3.17601728e+00 4.98858309e+00 -1.05960331e+01] [-9.92197454e-01 -5.85327911e+00 2.35618448e+00 ... 6.52699327e+00 -2.57491374e+00 1.02815456e+01] ... [ 2.29575276e+00 -3.36277533e+00 1.75932255e+01 ... -5.81526089e+00 1.79558945e+01 1.67414665e+01] [-9.14073467e+00 1.86612487e+00 5.29070187e+00 ... 3.67047834e+00 -5.50517273e+00 5.94465113e+00] [ 7.34761715e+00 6.26484394e+00 -6.26011801e+00 ... -4.63832289e-01 3.23174715e+00 -1.47536874e-01]] [[-1.31478930e+01 4.11509800e+00 -7.39875841e+00 ... -2.68732524e+00 -1.59708929e+00 -4.86747599e+00] [ 1.67280884e+01 6.90039015e+00 -1.48896861e+00 ... -4.13920105e-01 7.03753519e+00 -8.01581889e-02] [ 1.23421469e+01 3.51131558e+00 -1.11593800e+01 ... 6.92426586e+00 6.39749575e+00 -3.31556106e+00] ... [ 4.58822823e+00 1.16661186e+01 1.43570080e+01 ... -1.65343723e+01 6.82886362e-01 -9.86409950e+00] [ 8.81491089e+00 8.15400258e-02 -1.62563877e+01 ... 1.47487888e+01 6.88383758e-01 6.18900061e+00] [-6.19058132e+00 1.17454481e+01 9.41632366e+00 ... 1.26052580e+01 1.43244958e+00 -7.20885706e+00]] ... [[ 1.75760746e+01 4.12351274e+00 2.67642450e+00 ... 5.12944508e+00 -3.98953342e+00 -3.12764823e-01] [ 3.26529026e+00 5.85413599e+00 1.07425346e+01 ... -2.25051665e+00 -3.26817083e+00 9.29426765e+00] [-3.28528547e+00 -1.56378536e+01 -1.13806782e+01 ... -9.39856470e-01 1.14267361e+00 -2.17655492e+00] ... [ 9.66531467e+00 7.56883621e+00 -6.97185469e+00 ... -8.91703415e+00 2.23268723e+00 1.31178370e+01] [-8.75718689e+00 -1.14564810e+01 2.76895308e+00 ... -3.11993504e+00 -9.44110870e+00 4.97420120e+00] [ 1.42143564e+01 1.46815395e+00 -7.61735201e+00 ... 3.89967656e+00 -4.75464535e+00 -1.45754850e+00]] [[-1.85566926e+00 -1.02967939e+01 6.76283932e+00 ... 5.59230614e+00 9.04631042e+00 -1.73814917e+00] [-1.14709587e+01 1.29608595e+00 1.55520229e+01 ... -1.54997969e+01 -9.38064957e+00 -4.38969940e-01] [ 7.80861902e+00 -5.87201929e+00 -1.29401588e+01 ... -4.59689951e+00 -2.71944332e+00 1.24942188e+01] ... [ 1.08162870e+01 -9.32682323e+00 2.79913449e+00 ... -4.99072932e-02 -1.10534515e+01 -2.26744232e+01] [-3.27470875e+00 9.42687225e+00 1.05470467e+00 ... -7.72742510e+00 -1.87178612e+01 -2.14068546e+01] [-1.38552437e+01 -4.34731150e+00 4.95209074e+00 ... 1.11972418e+01 3.31306219e+00 1.71593018e+01]] [[ 1.48046747e-01 -6.07372236e+00 2.25249648e+00 ... 8.81998897e-01 6.38752937e-01 -4.33824253e+00] [ 3.69198775e+00 -1.58960521e-01 -8.41483784e+00 ... -9.27645969e+00 -8.49800777e+00 5.67290354e+00] [-8.06371307e+00 4.07659054e+00 -5.99557877e+00 ... -1.63436146e+01 7.81514168e+00 -1.92830963e+01] ... [ 1.38049583e+01 8.33897591e-01 6.37520361e+00 ... 2.75894427e+00 -6.77695131e+00 8.92536259e+00] [ 3.56582236e+00 -6.28159904e+00 5.26421642e+00 ... 7.15895844e+00 1.46610870e+01 5.00614500e+00] [-1.13336593e-01 4.27702618e+00 -5.04044485e+00 ... -3.90360266e-01 1.93790948e+00 -5.45720768e+00]]]] [[[[ 4.80641890e+00 -3.40413308e+00 5.48971534e-01 ... 2.47457266e+00 1.01621218e+01 1.24636221e+01] [-5.99544406e-01 2.03079844e+00 -7.72373676e-01 ... 8.09485245e+00 -6.89137578e-01 9.93476093e-01] [-3.87478065e+00 -7.06088543e+00 4.11367983e-01 ... 9.56138194e-01 -5.48042011e+00 2.99535656e+00] ... [ 2.01600766e+00 3.77556419e+00 2.40082479e+00 ... 4.09139442e+00 -1.26977577e+01 -3.28295052e-01] [-8.71553719e-01 1.35805292e+01 -8.33589196e-01 ... -2.50080566e+01 1.56999028e+00 -3.94536567e+00] [-8.16370487e+00 -5.20413733e+00 -5.90905905e+00 ... 4.30284929e+00 1.74968123e+00 1.93665104e+01]] [[ 1.17243099e+01 2.48230129e-01 1.31993949e-01 ... -4.94538164e+00 -7.37040579e-01 7.80342007e+00] [ 7.27990627e+00 1.57382936e+01 1.01618938e+01 ... 1.58561249e+01 1.06963177e+01 1.83680058e+01] [ 3.11460686e+00 -9.19372559e+00 -5.63212693e-01 ... 5.91887045e+00 1.31729326e+01 2.22590733e+00] ... [ 1.37012124e+00 1.14360914e+01 8.91288090e+00 ... -3.41060233e+00 -4.27053976e+00 5.47223663e+00] [ 8.46767998e+00 2.30003643e+00 1.37823992e+01 ... 1.93896942e+01 -2.61240840e-01 -1.37723293e+01] [ 6.53069925e+00 3.30913734e+00 -1.09649239e+01 ... -1.19067478e+01 -1.25270967e+01 -5.12587023e+00]] [[ 8.92893124e+00 -6.15010071e+00 1.58855236e+00 ... 6.63295269e+00 1.00540724e+01 6.92937422e+00] [-3.13477373e+00 2.14290867e+01 -5.44891691e+00 ... -1.97705154e+01 -1.60508900e+01 -2.83923817e+00] [ 2.49488068e+00 -1.63995552e+01 8.76140308e+00 ... 4.07751513e+00 -1.14666548e+01 -2.64163184e+00] ... [ 1.66052551e+01 -3.52537155e+00 8.88709068e-01 ... -2.47295594e+00 6.95270729e+00 -3.25843573e+00] [-7.03915024e+00 -5.91205120e+00 -3.29141212e+00 ... 1.58756218e+01 3.81238365e+00 3.15323281e+00] [-7.75584126e+00 1.41113937e+00 8.22459316e+00 ... 6.49177933e+00 -8.48090744e+00 1.37164030e+01]] ... [[ 7.27068424e+00 3.83419782e-01 2.00081754e+00 ... -1.35014391e+01 1.48819036e+01 6.41307974e+00] [-4.38762569e+00 -2.10368347e+00 2.19347382e+00 ... 1.53523626e+01 -6.33673143e+00 -8.58924294e+00] [ 4.39136457e+00 -1.96684539e+00 5.81569004e+00 ... 1.93536053e+01 -1.35924616e+01 -6.34180880e+00] ... [-8.25664425e+00 -5.76687145e+00 -1.94018161e+00 ... -1.04868793e+01 -1.74663293e+00 -5.66128016e+00] [ 1.14489031e+00 -6.57243252e+00 1.33062124e+01 ... 1.31487501e+00 -6.56829929e+00 -2.15991926e+00] [-8.39419305e-01 6.55701590e+00 -6.35403574e-01 ... 9.44006062e+00 5.33417368e+00 6.32719398e-02]] [[ 4.67604733e+00 -4.28329992e+00 -9.40643883e+00 ... -2.69414473e+00 -4.40880067e-05 2.40444040e+00] [ 1.00759087e+01 -1.15733528e+00 -1.11194193e-01 ... 1.10586243e+01 2.73611641e+00 5.04144669e+00] [ 1.22702370e+01 1.95759344e+00 9.61608315e+00 ... -3.61382794e+00 5.23720932e+00 1.08822155e+01] ... [-2.36148262e+00 -1.68257980e+01 -9.53286839e+00 ... 4.84149504e+00 -1.02285919e+01 9.95604610e+00] [ 1.83915806e+00 1.22053652e+01 1.07540321e+01 ... 6.76191664e+00 4.72745705e+00 -6.49176121e+00] [ 1.64884357e+01 8.40325642e+00 1.52957430e+01 ... 1.39525652e+01 2.41360113e-01 -8.54866445e-01]] [[-6.53245389e-01 -9.29259396e+00 -8.58174086e-01 ... 5.22451258e+00 -3.49676394e+00 -7.09017372e+00] [ 1.29590149e+01 2.63144898e+00 7.69202280e+00 ... 1.02430801e+01 6.30680227e+00 -1.06443977e+00] [-2.47636533e+00 4.56193495e+00 1.86594582e+00 ... 1.56691623e+00 -3.66291857e+00 1.62063370e+01] ... [-6.89831352e+00 -1.31526403e+01 -1.36926270e+01 ... -1.95258766e-01 -2.93682404e+01 9.22711790e-01] [ 3.86929572e-01 1.24762840e+01 1.50320303e+00 ... -5.94300866e-01 -7.54711390e+00 -1.85485611e+01] [-1.80139637e+00 4.76440716e+00 -1.98436821e+00 ... -9.60073471e+00 -5.02168369e+00 6.91372252e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 0], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_508.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[-5.91880846e+00 -9.54316044e+00 2.05057755e+01 ... -1.35005884e+01 -4.51256454e-01 -7.69687700e+00] [ 3.13512301e+00 1.76562271e+01 9.85980225e+00 ... 1.22365808e+01 6.53585815e+00 1.24176300e+00] [-2.03547430e+00 4.39351320e+00 6.54543018e+00 ... 8.57546139e+00 1.34313326e+01 -2.92458606e+00] ... [ 2.17614822e+01 6.59728670e+00 -1.03482552e+01 ... 1.11405058e+01 -2.47329903e+00 -1.19428797e+01] [-2.35952830e+00 -2.67228270e+00 -9.67646790e+00 ... 9.70913982e+00 -4.05723429e+00 -4.29657936e+00] [-8.24605370e+00 1.74676208e+01 -4.45679516e-01 ... 2.74304914e+00 2.96960139e+00 4.67035198e+00]] [[-1.49921268e-01 2.31908822e+00 2.19192553e+00 ... 1.23372564e+01 -8.21719527e-01 -2.51471090e+00] [-3.03310871e+00 8.05676174e+00 1.07075825e+01 ... -9.16043222e-01 1.74943721e+00 6.27291632e+00] [-7.80642939e+00 1.06617279e+01 7.39684820e+00 ... 6.98810196e+00 7.16007757e+00 3.24887943e+00] ... [-2.59481025e+00 -9.75285530e+00 1.57219334e+01 ... 3.28485489e+00 -2.70842342e+01 3.43894339e+00] [-4.17790031e+00 1.30838680e+01 7.12824678e+00 ... -3.45870733e+00 8.68332481e+00 4.20652962e+00] [ 3.63025755e-01 -6.52177048e+00 -8.07346916e+00 ... 1.01863794e+01 -2.80336833e+00 -7.66424561e+00]] [[-3.83519387e+00 -5.33360183e-01 -6.62786293e+00 ... 1.85135250e+01 4.55123007e-01 2.23766279e+00] [ 1.52797613e+01 2.53639575e-02 5.54957867e+00 ... -1.04079399e+01 1.37802343e+01 -1.09015226e+01] [ 1.14611139e+01 9.23814869e+00 4.83134794e+00 ... -7.43821001e+00 4.44254112e+00 1.74380016e+00] ... [ 8.98501873e+00 5.08253956e+00 -1.31978951e+01 ... 3.97876835e+00 -7.35035753e+00 -1.41619654e+01] [-4.79985046e+00 4.25685835e+00 -3.70275117e-02 ... 4.86140776e+00 4.55617762e+00 -1.21151762e+01] [-7.34346032e-01 -9.52857494e+00 -4.72077131e+00 ... 1.66140518e+01 2.37453520e-01 2.17172837e+00]] ... [[ 5.90773439e+00 -7.42726421e+00 1.09700375e+01 ... -1.41603336e+01 1.92781901e+00 -5.38482761e+00] [-5.75239229e+00 -1.03075142e+01 -2.38497219e+01 ... -7.54018879e+00 -5.93104029e+00 -9.56779861e+00] [ 3.42111135e+00 -1.02522020e+01 -9.30107021e+00 ... 5.36268282e+00 -1.77883415e+01 -1.50341761e+00] ... [ 7.89250946e+00 -9.36727619e+00 -1.84965820e+01 ... -3.89731169e+00 3.53980494e+00 9.46985531e+00] [-1.24695311e+01 -2.01938457e+01 -1.64689079e-01 ... 1.16166697e+01 1.61048186e+00 -4.30013132e+00] [-5.92838585e-01 -7.20537547e-03 -2.86531234e+00 ... 4.78035593e+00 1.01217022e+01 -8.37302399e+00]] [[ 7.76079512e+00 -2.81463885e+00 -4.69410896e+00 ... 2.80317745e+01 -2.27442622e+00 3.95052624e+00] [-6.67510366e+00 9.40073967e+00 2.87819147e+00 ... 1.38558025e+01 -1.63397617e+01 -1.30689182e+01] [-1.76603394e+01 -1.01433935e+01 2.28126597e+00 ... -1.39115608e+00 -5.75479174e+00 -8.74168968e+00] ... [-1.09524994e+01 -1.76316242e+01 4.06287193e+00 ... -9.07491779e+00 -7.83025503e+00 6.48498917e+00] [-1.67762032e+01 8.72272491e+00 -4.96931601e+00 ... -6.96804667e+00 -1.48599052e+01 1.92920246e+01] [ 1.40625992e+01 1.19041185e+01 -2.50922346e+00 ... 1.53045464e+00 1.60620272e+00 -7.26904154e+00]] [[-7.04207754e+00 1.38098364e+01 8.05584145e+00 ... -4.57759666e+00 -5.65738535e+00 -8.93826485e+00] [ 8.29221535e+00 -3.99156284e+00 3.13334036e+00 ... -2.05379066e+01 -7.88939667e+00 -5.50895548e+00] [ 9.08623123e+00 4.33853483e+00 3.19283605e+00 ... -1.01682587e+01 1.23759687e+00 1.37616014e+01] ... [-2.50381565e+00 -5.94285965e+00 7.97647190e+00 ... 1.28861074e+01 1.25748796e+01 1.10854616e+01] [ 5.55194139e+00 -4.99629021e-01 -5.03715277e+00 ... 1.80147743e+01 5.21536052e-01 2.36017704e+00] [-2.17804742e+00 -1.23076811e+01 9.72980404e+00 ... -1.32723784e+00 9.06088924e+00 -1.69194341e+00]]]] [[[[ 4.32621479e+00 -1.73625412e+01 -6.58345401e-01 ... -2.39169431e+00 7.90617800e+00 -2.83684139e+01] [-1.98384285e-01 1.19579792e+00 -8.41327095e+00 ... 6.24690962e+00 2.93612313e+00 1.21771622e+01] [ 8.97043467e-01 -9.80871868e+00 1.19406664e+00 ... 5.98932219e+00 5.21583652e+00 2.10841217e+01] ... [ 3.37525439e+00 -3.14838243e+00 4.74521780e+00 ... -3.42642617e+00 4.74017191e+00 7.48450100e-01] [-1.09655771e+01 -1.12133789e+00 -3.76254797e+00 ... 2.32052875e+00 1.07770920e+00 7.03532219e+00] [ 3.02328229e-01 -1.26202707e+01 1.03396626e+01 ... -1.11767473e+01 -1.75515425e+00 2.16693902e+00]] [[-1.33539686e+01 -1.25823412e+01 9.63008642e-01 ... -3.14723969e+00 2.71649551e+00 4.62679833e-01] [ 1.75806510e+00 -4.00099182e+00 -1.63302994e+01 ... -1.34508524e+01 9.98520851e-01 5.42781878e+00] [-1.79376030e+00 -8.59867954e+00 -1.21171074e+01 ... 1.36836634e+01 -2.62365189e+01 6.64760542e+00] ... [-1.12493782e+01 -1.45744686e+01 7.75147629e+00 ... -3.93010759e+00 2.13434753e+01 2.08876705e+00] [-9.29024220e-01 -6.34875345e+00 -2.92763472e+00 ... 1.17883282e+01 7.94440889e+00 -3.85365449e-02] [ 7.41189480e+00 -6.58718777e+00 -2.77049828e+00 ... 3.39749837e+00 -1.57137764e+00 9.63418674e+00]] [[-9.12428951e+00 1.52831948e+00 -9.05148411e+00 ... 2.15008080e-01 1.27400732e+01 4.23547602e+00] [ 7.51774728e-01 3.45924306e+00 3.22951674e+00 ... 1.65216351e+00 -2.45854878e+00 4.58372736e+00] [-1.61255150e+01 1.19895315e+01 3.89254242e-01 ... 1.25120125e+01 6.05504572e-01 -1.69705045e+00] ... [-1.39699602e+01 1.16944580e+01 -3.62446046e+00 ... 1.32789879e+01 6.49537754e+00 -5.13599300e+00] [-7.76386738e+00 2.56767201e+00 -1.62308636e+01 ... 2.49658337e+01 -1.26306067e+01 1.26403546e+00] [-2.81698108e+00 -1.74054413e+01 1.19288540e+01 ... 3.13945508e+00 -1.19457092e+01 -4.96336079e+00]] ... [[ 2.04966240e+01 1.21911871e+00 1.30867844e+01 ... 7.19162846e+00 5.20386410e+00 5.34413671e+00] [-1.82810533e+00 -1.28690147e+00 2.49149323e+01 ... 1.22126389e+01 9.98484516e+00 9.95958519e+00] [ 2.05417995e+01 1.47699165e+00 2.87384915e+00 ... 3.40697503e+00 -1.30305982e+00 -5.09609175e+00] ... [ 5.28323507e+00 -1.87864056e+01 3.28665090e+00 ... -1.68688262e+00 1.10671091e+01 -1.18468370e+01] [ 2.12919664e+00 4.44190055e-02 -2.36068130e+00 ... -5.82683516e+00 -6.52219820e+00 -8.50521564e+00] [ 1.42311277e+01 -2.72295809e+00 -8.78503227e+00 ... 2.45562387e+00 1.62817497e+01 1.10285978e+01]] [[ 2.02165937e+00 -1.00065804e+01 -1.18965855e+01 ... 1.01243553e+01 7.73993552e-01 -4.79236984e+00] [-3.82344770e+00 -1.77840412e+00 3.61778665e+00 ... -3.88024569e+00 -8.20859241e+00 2.53015137e+00] [ 1.22728596e+01 8.67349625e+00 -1.15141048e+01 ... -1.59536147e+00 2.49996758e+00 9.92872775e-01] ... [-6.69076538e+00 5.68733990e-01 -1.45256176e+01 ... 6.10913134e+00 1.75324619e+00 -1.65136604e+01] [-4.33408213e+00 -7.56554842e+00 2.74070382e+00 ... 6.36111546e+00 -2.33489456e+01 -1.05116587e+01] [-9.23963642e+00 1.94548874e+01 1.15779524e+01 ... 4.74857187e+00 1.73460650e+00 6.73912048e+00]] [[-7.25483894e+00 -9.21203232e+00 2.97589588e+00 ... 1.35888548e+01 2.28352594e+00 8.18830299e+00] [-9.86439896e+00 5.73031473e+00 -5.71026707e+00 ... 8.77016306e-01 1.13523302e+01 1.05628672e+01] [ 1.27161324e-01 6.32981873e+00 4.75793076e+00 ... -1.01833420e+01 -7.40565777e+00 9.51868534e+00] ... [ 4.88129711e+00 -1.05751858e+01 -2.94163436e-01 ... 1.67282066e+01 1.03551254e+01 -4.67037022e-01] [-8.05368185e-01 4.93959332e+00 1.21810770e+01 ... -4.23160028e+00 5.78124666e+00 8.22529030e+00] [-2.13557434e+00 3.50168681e+00 -2.16636333e+01 ... 5.43572283e+00 -4.05450010e+00 -3.48875928e+00]]]]]; ov_res: [[[[[-5.91880846e+00 -9.54316044e+00 2.05057755e+01 ... -1.35005884e+01 -4.51256454e-01 -7.69687700e+00] [ 3.13512301e+00 1.76562271e+01 9.85980225e+00 ... 1.22365808e+01 6.53585815e+00 1.24176300e+00] [-2.03547430e+00 4.39351320e+00 6.54543018e+00 ... 8.57546139e+00 1.34313326e+01 -2.92458606e+00] ... [ 2.17614822e+01 6.59728670e+00 -1.03482552e+01 ... 1.11405058e+01 -2.47329903e+00 -1.19428797e+01] [-2.35952830e+00 -2.67228270e+00 -9.67646790e+00 ... 9.70913982e+00 -4.05723429e+00 -4.29657936e+00] [-8.24605370e+00 1.74676208e+01 -4.45679516e-01 ... 2.74304914e+00 2.96960139e+00 4.67035198e+00]] [[-1.49921268e-01 2.31908822e+00 2.19192553e+00 ... 1.23372564e+01 -8.21719527e-01 -2.51471090e+00] [-3.03310871e+00 8.05676174e+00 1.07075825e+01 ... -9.16043222e-01 1.74943721e+00 6.27291632e+00] [-7.80642939e+00 1.06617279e+01 7.39684820e+00 ... 6.98810196e+00 7.16007757e+00 3.24887943e+00] ... [-2.59481025e+00 -9.75285530e+00 1.57219334e+01 ... 3.28485489e+00 -2.70842342e+01 3.43894339e+00] [-4.17790031e+00 1.30838680e+01 7.12824678e+00 ... -3.45870733e+00 8.68332481e+00 4.20652962e+00] [ 3.63025755e-01 -6.52177048e+00 -8.07346916e+00 ... 1.01863794e+01 -2.80336833e+00 -7.66424561e+00]] [[-3.83519387e+00 -5.33360183e-01 -6.62786293e+00 ... 1.85135250e+01 4.55123007e-01 2.23766279e+00] [ 1.52797613e+01 2.53639575e-02 5.54957867e+00 ... -1.04079399e+01 1.37802343e+01 -1.09015226e+01] [ 1.14611139e+01 9.23814869e+00 4.83134794e+00 ... -7.43821001e+00 4.44254112e+00 1.74380016e+00] ... [ 8.98501873e+00 5.08253956e+00 -1.31978951e+01 ... 3.97876835e+00 -7.35035753e+00 -1.41619654e+01] [-4.79985046e+00 4.25685835e+00 -3.70275117e-02 ... 4.86140776e+00 4.55617762e+00 -1.21151762e+01] [-7.34346032e-01 -9.52857494e+00 -4.72077131e+00 ... 1.66140518e+01 2.37453520e-01 2.17172837e+00]] ... [[ 5.90773439e+00 -7.42726421e+00 1.09700375e+01 ... -1.41603336e+01 1.92781901e+00 -5.38482761e+00] [-5.75239229e+00 -1.03075142e+01 -2.38497219e+01 ... -7.54018879e+00 -5.93104029e+00 -9.56779861e+00] [ 3.42111135e+00 -1.02522020e+01 -9.30107021e+00 ... 5.36268282e+00 -1.77883415e+01 -1.50341761e+00] ... [ 7.89250946e+00 -9.36727619e+00 -1.84965820e+01 ... -3.89731169e+00 3.53980494e+00 9.46985531e+00] [-1.24695311e+01 -2.01938457e+01 -1.64689079e-01 ... 1.16166697e+01 1.61048186e+00 -4.30013132e+00] [-5.92838585e-01 -7.20537547e-03 -2.86531234e+00 ... 4.78035593e+00 1.01217022e+01 -8.37302399e+00]] [[ 7.76079512e+00 -2.81463885e+00 -4.69410896e+00 ... 2.80317745e+01 -2.27442622e+00 3.95052624e+00] [-6.67510366e+00 9.40073967e+00 2.87819147e+00 ... 1.38558025e+01 -1.63397617e+01 -1.30689182e+01] [-1.76603394e+01 -1.01433935e+01 2.28126597e+00 ... -1.39115608e+00 -5.75479174e+00 -8.74168968e+00] ... [-1.09524994e+01 -1.76316242e+01 4.06287193e+00 ... -9.07491779e+00 -7.83025503e+00 6.48498917e+00] [-1.67762032e+01 8.72272491e+00 -4.96931601e+00 ... -6.96804667e+00 -1.48599052e+01 1.92920246e+01] [ 1.40625992e+01 1.19041185e+01 -2.50922346e+00 ... 1.53045464e+00 1.60620272e+00 -7.26904154e+00]] [[-7.04207754e+00 1.38098364e+01 8.05584145e+00 ... -4.57759666e+00 -5.65738535e+00 -8.93826485e+00] [ 8.29221535e+00 -3.99156284e+00 3.13334036e+00 ... -2.05379066e+01 -7.88939667e+00 -5.50895548e+00] [ 9.08623123e+00 4.33853483e+00 3.19283605e+00 ... -1.01682587e+01 1.23759687e+00 1.37616014e+01] ... [-2.50381565e+00 -5.94285965e+00 7.97647190e+00 ... 1.28861074e+01 1.25748796e+01 1.10854616e+01] [ 5.55194139e+00 -4.99629021e-01 -5.03715277e+00 ... 1.80147743e+01 5.21536052e-01 2.36017704e+00] [-2.17804742e+00 -1.23076811e+01 9.72980404e+00 ... -1.32723784e+00 9.06088924e+00 -1.69194341e+00]]]] [[[[ 4.32621479e+00 -1.73625412e+01 -6.58345401e-01 ... -2.39169431e+00 7.90617800e+00 -2.83684139e+01] [-1.98384285e-01 1.19579792e+00 -8.41327095e+00 ... 6.24690962e+00 2.93612313e+00 1.21771622e+01] [ 8.97043467e-01 -9.80871868e+00 1.19406664e+00 ... 5.98932219e+00 5.21583652e+00 2.10841217e+01] ... [ 3.37525439e+00 -3.14838243e+00 4.74521780e+00 ... -3.42642617e+00 4.74017191e+00 7.48450100e-01] [-1.09655771e+01 -1.12133789e+00 -3.76254797e+00 ... 2.32052875e+00 1.07770920e+00 7.03532219e+00] [ 3.02328229e-01 -1.26202707e+01 1.03396626e+01 ... -1.11767473e+01 -1.75515425e+00 2.16693902e+00]] [[-1.33539686e+01 -1.25823412e+01 9.63008642e-01 ... -3.14723969e+00 2.71649551e+00 4.62679833e-01] [ 1.75806510e+00 -4.00099182e+00 -1.63302994e+01 ... -1.34508524e+01 9.98520851e-01 5.42781878e+00] [-1.79376030e+00 -8.59867954e+00 -1.21171074e+01 ... 1.36836634e+01 -2.62365189e+01 6.64760542e+00] ... [-1.12493782e+01 -1.45744686e+01 7.75147629e+00 ... -3.93010759e+00 2.13434753e+01 2.08876705e+00] [-9.29024220e-01 -6.34875345e+00 -2.92763472e+00 ... 1.17883282e+01 7.94440889e+00 -3.85365449e-02] [ 7.41189480e+00 -6.58718777e+00 -2.77049828e+00 ... 3.39749837e+00 -1.57137764e+00 9.63418674e+00]] [[-9.12428951e+00 1.52831948e+00 -9.05148411e+00 ... 2.15008080e-01 1.27400732e+01 4.23547602e+00] [ 7.51774728e-01 3.45924306e+00 3.22951674e+00 ... 1.65216351e+00 -2.45854878e+00 4.58372736e+00] [-1.61255150e+01 1.19895315e+01 3.89254242e-01 ... 1.25120125e+01 6.05504572e-01 -1.69705045e+00] ... [-1.39699602e+01 1.16944580e+01 -3.62446046e+00 ... 1.32789879e+01 6.49537754e+00 -5.13599300e+00] [-7.76386738e+00 2.56767201e+00 -1.62308636e+01 ... 2.49658337e+01 -1.26306067e+01 1.26403546e+00] [-2.81698108e+00 -1.74054413e+01 1.19288540e+01 ... 3.13945508e+00 -1.19457092e+01 -4.96336079e+00]] ... [[ 2.04966240e+01 1.21911871e+00 1.30867844e+01 ... 7.19162846e+00 5.20386410e+00 5.34413671e+00] [-1.82810533e+00 -1.28690147e+00 2.49149323e+01 ... 1.22126389e+01 9.98484516e+00 9.95958519e+00] [ 2.05417995e+01 1.47699165e+00 2.87384915e+00 ... 3.40697503e+00 -1.30305982e+00 -5.09609175e+00] ... [ 5.28323507e+00 -1.87864056e+01 3.28665090e+00 ... -1.68688262e+00 1.10671091e+01 -1.18468370e+01] [ 2.12919664e+00 4.44190055e-02 -2.36068130e+00 ... -5.82683516e+00 -6.52219820e+00 -8.50521564e+00] [ 1.42311277e+01 -2.72295809e+00 -8.78503227e+00 ... 2.45562387e+00 1.62817497e+01 1.10285978e+01]] [[ 2.02165937e+00 -1.00065804e+01 -1.18965855e+01 ... 1.01243553e+01 7.73993552e-01 -4.79236984e+00] [-3.82344770e+00 -1.77840412e+00 3.61778665e+00 ... -3.88024569e+00 -8.20859241e+00 2.53015137e+00] [ 1.22728596e+01 8.67349625e+00 -1.15141048e+01 ... -1.59536147e+00 2.49996758e+00 9.92872775e-01] ... [-6.69076538e+00 5.68733990e-01 -1.45256176e+01 ... 6.10913134e+00 1.75324619e+00 -1.65136604e+01] [-4.33408213e+00 -7.56554842e+00 2.74070382e+00 ... 6.36111546e+00 -2.33489456e+01 -1.05116587e+01] [-9.23963642e+00 1.94548874e+01 1.15779524e+01 ... 4.74857187e+00 1.73460650e+00 6.73912048e+00]] [[-7.25483894e+00 -9.21203232e+00 2.97589588e+00 ... 1.35888548e+01 2.28352594e+00 8.18830299e+00] [-9.86439896e+00 5.73031473e+00 -5.71026707e+00 ... 8.77016306e-01 1.13523302e+01 1.05628672e+01] [ 1.27161324e-01 6.32981873e+00 4.75793076e+00 ... -1.01833420e+01 -7.40565777e+00 9.51868534e+00] ... [ 4.88129711e+00 -1.05751858e+01 -2.94163436e-01 ... 1.67282066e+01 1.03551254e+01 -4.67037022e-01] [-8.05368185e-01 4.93959332e+00 1.21810770e+01 ... -4.23160028e+00 5.78124666e+00 8.22529030e+00] [-2.13557434e+00 3.50168681e+00 -2.16636333e+01 ... 5.43572283e+00 -4.05450010e+00 -3.48875928e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 0, 1], 'dilations': 1, 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_510.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 4.15229052e-01 5.40460825e-01 1.34678257e+00 ... -1.90253866e+00 -4.26740217e+00 1.26203709e+01] [-2.63027525e+00 -6.76475811e+00 8.57647133e+00 ... -1.14434729e+01 6.91683102e+00 -5.37227201e+00] [-2.84564948e+00 -5.72147751e+00 -1.15483074e+01 ... -2.09162092e+00 3.68882847e+00 5.69521427e+00] ... [ 4.36239052e+00 -6.13095284e+00 -9.96691608e+00 ... 9.09080505e+00 7.51433611e+00 8.46575356e+00] [ 2.75552750e+00 9.93263054e+00 -2.73066282e+00 ... -7.46492922e-01 -1.30094779e+00 -7.23371983e+00] [-2.03882790e+00 -1.63780057e+00 9.36272144e+00 ... 7.05380154e+00 -1.20613241e+00 9.79093790e-01]] [[ 8.10837209e-01 3.77660823e+00 7.84887552e-01 ... 2.03533983e+00 -1.26013479e+01 -1.28517637e+01] [ 5.46587038e+00 -1.89437103e+00 9.51081371e+00 ... 1.37161551e+01 8.34457397e+00 4.76290047e-01] [ 4.48542881e+00 6.92671680e+00 2.05778675e+01 ... 8.23186588e+00 -3.19659090e+00 -3.40803790e+00] ... [ 9.76399326e+00 1.72885513e+01 9.10603142e+00 ... 1.60019898e+00 1.01632500e+01 -3.01298094e+00] [-4.01968002e+00 -2.15781498e+00 -9.01909924e+00 ... 5.21983862e+00 -4.15188360e+00 -6.50796556e+00] [ 6.54722452e+00 6.73657227e+00 -3.68576241e+00 ... -2.50291038e+00 1.41916180e+01 5.57626188e-02]] [[-6.19859505e+00 -8.41195202e+00 -2.47736192e+00 ... -1.46724148e+01 -3.33942962e+00 -8.04480553e+00] [ 6.43360519e+00 -3.40557361e+00 -1.34122610e+00 ... 1.99896514e+00 -3.04529381e+00 -5.76220560e+00] [-5.32793427e+00 8.48269653e+00 8.83772278e+00 ... -2.88337755e+00 2.41965318e+00 1.46831381e+00] ... [-5.54388285e+00 -1.29618769e+01 -2.89367771e+01 ... -9.53601742e+00 9.11001396e+00 2.02380886e+01] [ 1.27946768e+01 9.29653263e+00 -1.30455742e+01 ... 1.04995441e+01 -2.77363086e+00 8.98650837e+00] [-6.94858980e+00 -1.02969170e+01 -5.02141762e+00 ... 1.60488453e+01 -3.04460764e+00 -6.33129716e-01]] ... [[-5.78852415e+00 2.54021943e-01 5.41367102e+00 ... 9.12486172e+00 7.77621031e+00 4.94241536e-01] [ 1.07484970e+01 4.53123426e+00 2.26632071e+00 ... 7.09703350e+00 -1.47613831e+01 -3.01894450e+00] [ 1.20337173e-01 -7.22953463e+00 -5.41766548e+00 ... -1.02038937e+01 -7.39529181e+00 4.19646406e+00] ... [-5.09492457e-01 1.58758438e+00 -7.20893431e+00 ... -1.90355694e+00 -6.89814043e+00 -3.04261541e+00] [-2.49862647e+00 8.79314959e-01 -1.11732519e+00 ... 1.97146189e+00 -5.02829504e+00 -1.88259614e+00] [ 8.43500805e+00 8.22729301e+00 5.92577457e-01 ... -1.63972778e+01 6.96546078e+00 1.28568983e+01]] [[-7.99991369e+00 5.34485388e+00 -3.41452241e+00 ... 4.28694963e+00 -1.31253843e+01 -2.93817854e+00] [-9.87075138e+00 -1.75157719e+01 3.32249141e+00 ... 4.08185387e+00 5.63310683e-01 4.61126900e+00] [ 1.27246046e+01 -1.13769569e+01 -1.15380535e+01 ... 1.13384628e+01 -3.15151763e+00 -4.00339556e+00] ... [-5.49058437e-01 -2.26694512e+00 2.17269063e+00 ... -7.58398342e+00 -5.65661192e-02 -1.22119701e+00] [-6.43700898e-01 -6.78063822e+00 -1.37205386e+00 ... -3.25119519e+00 2.63610578e+00 -3.50118518e+00] [-3.37086177e+00 -9.74577999e+00 8.10035610e+00 ... 1.09289396e+00 9.31915379e+00 -4.01440477e+00]] [[ 4.18824530e+00 1.10555954e+01 -2.74166298e+00 ... 1.09669838e+01 -7.03029203e+00 -2.14032125e+00] [-9.73450661e+00 1.14676237e+01 -1.04290762e+01 ... -1.62177637e-01 -1.06725788e+01 4.25526524e+00] [ 2.99658203e+00 3.53356028e+00 -1.60229909e+00 ... 8.98651314e+00 -1.14367275e+01 2.70386767e+00] ... [-1.70865035e+00 2.79737186e+00 -7.38179207e+00 ... -2.58162594e+00 -1.52952015e+00 -1.16488447e+01] [-5.94542551e+00 -7.93492675e-01 7.63201523e+00 ... -1.06770210e+01 1.04741220e+01 -3.32674241e+00] [-8.35117817e+00 1.51754391e+00 -7.96758592e-01 ... -3.35799503e+00 1.94402027e+01 -4.10153198e+00]]]] [[[[ 2.92922664e+00 -6.68162489e+00 -4.68869686e+00 ... -5.81809998e+00 -1.52173357e+01 1.11292143e+01] [ 5.35951710e+00 7.45219135e+00 7.75343466e+00 ... 6.13320494e+00 2.70831299e+00 -1.35435414e+00] [ 8.35828364e-01 1.48786759e+00 -1.09204874e+01 ... 1.21208072e+00 -5.43676329e+00 9.46098614e+00] ... [-4.63712215e+00 7.36264038e+00 3.16175485e+00 ... -2.08329105e+00 -5.67161751e+00 -7.29258776e+00] [-3.33052278e-01 7.61639881e+00 2.62694597e+00 ... -3.95935726e+00 -1.24745512e+00 -4.57344437e+00] [ 2.53779840e+00 9.62236691e+00 1.67020047e+00 ... 6.63930750e+00 -5.52939606e+00 3.02625561e+00]] [[-3.29375553e+00 5.26871014e+00 6.22992611e+00 ... -1.47016726e+01 -3.05950737e+00 1.37064428e+01] [-6.27764320e+00 -6.21703625e+00 -2.39355612e+00 ... 1.12920580e+01 -1.83419061e+00 -2.67402619e-01] [ 6.30051565e+00 9.26019096e+00 3.41354895e+00 ... -1.98094330e+01 -2.85097146e+00 -5.38107347e+00] ... [-2.45993495e+00 -1.76775341e+01 1.03282064e-01 ... 5.28558779e+00 -9.42838192e+00 -1.37010527e+00] [-3.76880121e+00 -1.79141796e+00 1.13589888e+01 ... 1.23959565e+00 3.58235097e+00 -3.00654626e+00] [-1.04009171e+01 1.42703896e+01 -2.21998215e+01 ... 5.74429512e+00 -6.41994810e+00 4.05392981e+00]] [[-6.22434521e+00 1.48173666e+00 -4.19619179e+00 ... 1.86802089e+00 -8.94631577e+00 4.74259233e+00] [ 2.75999188e+00 -3.36198950e+00 1.92053204e+01 ... -2.57407999e+00 -8.26707840e-01 2.76549959e+00] [ 1.72888357e-02 7.13448703e-01 -7.47382224e-01 ... 4.58234072e+00 9.97952557e+00 1.11469803e+01] ... [ 8.60140038e+00 8.18092537e+00 6.29037476e+00 ... -8.64166737e+00 -4.14307213e+00 4.49845600e+00] [ 3.17947888e+00 -6.69992781e+00 7.79081011e+00 ... 4.85696459e+00 9.27102661e+00 -3.83475375e+00] [ 9.21440220e+00 2.47326612e+00 -8.10796261e+00 ... -3.86820793e-01 -1.28005295e+01 8.49188614e+00]] ... [[ 1.50620890e+00 7.89585066e+00 -1.12552013e+01 ... 2.93576860e+00 -1.04721224e+00 7.32483685e-01] [-8.29465580e+00 -2.56941366e+00 -1.54259884e+00 ... -5.66180277e+00 3.92767668e+00 -2.55905223e+00] [ 4.34753752e+00 1.11110210e+00 -4.25306129e+00 ... -2.00850630e+00 5.39121962e+00 -1.17206707e+01] ... [-9.91539192e+00 1.14411860e+01 -8.30534554e+00 ... 7.08875418e+00 -7.99716282e+00 -5.14876747e+00] [-1.04912844e+01 -5.97464609e+00 -4.58448744e+00 ... 2.66370201e+00 1.02749853e+01 4.68253851e+00] [ 1.08159733e+00 9.44882274e-01 8.26920605e+00 ... 1.45979774e+00 1.11726961e+01 -1.76185107e+00]] [[ 9.99951780e-01 -5.95672607e+00 2.05584502e+00 ... -4.23915195e+00 1.20897026e+01 7.44573927e+00] [-5.16718292e+00 -4.30593824e+00 2.05536270e+00 ... -2.55829549e+00 1.38516064e+01 6.28140450e+00] [-4.63962650e+00 1.23790085e+00 -1.39583263e+01 ... -5.35500574e+00 1.08300734e+01 -5.54401614e-02] ... [-7.85516453e+00 -3.32790613e+00 -6.54954767e+00 ... 3.66222525e+00 -7.71708965e+00 -4.48989201e+00] [-3.48642635e+00 -2.81630778e+00 1.84669280e+00 ... -9.31148815e+00 -2.18498516e+01 1.07410069e+01] [ 1.27083373e+00 3.98539376e+00 2.99305630e+00 ... 2.92997599e+00 -4.39421082e+00 9.27544117e+00]] [[ 1.01910906e+01 -2.49031949e+00 -8.96019363e+00 ... 1.21498642e+01 -3.46861887e+00 1.21162195e+01] [ 5.64263296e+00 -1.17856541e+01 5.61230719e-01 ... -1.18063390e+00 1.10702887e+01 -2.91702211e-01] [-2.20695901e+00 -3.52211475e+00 2.91738176e+00 ... -4.89502335e+00 2.87960315e+00 4.52933884e+00] ... [ 4.75152588e+00 1.06516943e+01 -3.06197023e+00 ... 3.96490335e+00 2.07822752e+00 5.61560297e+00] [-1.47178733e+00 -2.08092347e-01 8.35014629e+00 ... 6.71183109e+00 -9.11443329e+00 -1.31505661e+01] [-2.11887121e+00 1.46240311e+01 -7.55583620e+00 ... 8.01295185e+00 2.03663492e+00 6.05858469e+00]]]]]; ov_res: [[[[[ 4.15229052e-01 5.40460825e-01 1.34678257e+00 ... -1.90253866e+00 -4.26740217e+00 1.26203709e+01] [-2.63027525e+00 -6.76475811e+00 8.57647133e+00 ... -1.14434729e+01 6.91683102e+00 -5.37227201e+00] [-2.84564948e+00 -5.72147751e+00 -1.15483074e+01 ... -2.09162092e+00 3.68882847e+00 5.69521427e+00] ... [ 4.36239052e+00 -6.13095284e+00 -9.96691608e+00 ... 9.09080505e+00 7.51433611e+00 8.46575356e+00] [ 2.75552750e+00 9.93263054e+00 -2.73066282e+00 ... -7.46492922e-01 -1.30094779e+00 -7.23371983e+00] [-2.03882790e+00 -1.63780057e+00 9.36272144e+00 ... 7.05380154e+00 -1.20613241e+00 9.79093790e-01]] [[ 8.10837209e-01 3.77660823e+00 7.84887552e-01 ... 2.03533983e+00 -1.26013479e+01 -1.28517637e+01] [ 5.46587038e+00 -1.89437103e+00 9.51081371e+00 ... 1.37161551e+01 8.34457397e+00 4.76290047e-01] [ 4.48542881e+00 6.92671680e+00 2.05778675e+01 ... 8.23186588e+00 -3.19659090e+00 -3.40803790e+00] ... [ 9.76399326e+00 1.72885513e+01 9.10603142e+00 ... 1.60019898e+00 1.01632500e+01 -3.01298094e+00] [-4.01968002e+00 -2.15781498e+00 -9.01909924e+00 ... 5.21983862e+00 -4.15188360e+00 -6.50796556e+00] [ 6.54722452e+00 6.73657227e+00 -3.68576241e+00 ... -2.50291038e+00 1.41916180e+01 5.57626188e-02]] [[-6.19859505e+00 -8.41195202e+00 -2.47736192e+00 ... -1.46724148e+01 -3.33942962e+00 -8.04480553e+00] [ 6.43360519e+00 -3.40557361e+00 -1.34122610e+00 ... 1.99896514e+00 -3.04529381e+00 -5.76220560e+00] [-5.32793427e+00 8.48269653e+00 8.83772278e+00 ... -2.88337755e+00 2.41965318e+00 1.46831381e+00] ... [-5.54388285e+00 -1.29618769e+01 -2.89367771e+01 ... -9.53601742e+00 9.11001396e+00 2.02380886e+01] [ 1.27946768e+01 9.29653263e+00 -1.30455742e+01 ... 1.04995441e+01 -2.77363086e+00 8.98650837e+00] [-6.94858980e+00 -1.02969170e+01 -5.02141762e+00 ... 1.60488453e+01 -3.04460764e+00 -6.33129716e-01]] ... [[-5.78852415e+00 2.54021943e-01 5.41367102e+00 ... 9.12486172e+00 7.77621031e+00 4.94241536e-01] [ 1.07484970e+01 4.53123426e+00 2.26632071e+00 ... 7.09703350e+00 -1.47613831e+01 -3.01894450e+00] [ 1.20337173e-01 -7.22953463e+00 -5.41766548e+00 ... -1.02038937e+01 -7.39529181e+00 4.19646406e+00] ... [-5.09492457e-01 1.58758438e+00 -7.20893431e+00 ... -1.90355694e+00 -6.89814043e+00 -3.04261541e+00] [-2.49862647e+00 8.79314959e-01 -1.11732519e+00 ... 1.97146189e+00 -5.02829504e+00 -1.88259614e+00] [ 8.43500805e+00 8.22729301e+00 5.92577457e-01 ... -1.63972778e+01 6.96546078e+00 1.28568983e+01]] [[-7.99991369e+00 5.34485388e+00 -3.41452241e+00 ... 4.28694963e+00 -1.31253843e+01 -2.93817854e+00] [-9.87075138e+00 -1.75157719e+01 3.32249141e+00 ... 4.08185387e+00 5.63310683e-01 4.61126900e+00] [ 1.27246046e+01 -1.13769569e+01 -1.15380535e+01 ... 1.13384628e+01 -3.15151763e+00 -4.00339556e+00] ... [-5.49058437e-01 -2.26694512e+00 2.17269063e+00 ... -7.58398342e+00 -5.65661192e-02 -1.22119701e+00] [-6.43700898e-01 -6.78063822e+00 -1.37205386e+00 ... -3.25119519e+00 2.63610578e+00 -3.50118518e+00] [-3.37086177e+00 -9.74577999e+00 8.10035610e+00 ... 1.09289396e+00 9.31915379e+00 -4.01440477e+00]] [[ 4.18824530e+00 1.10555954e+01 -2.74166298e+00 ... 1.09669838e+01 -7.03029203e+00 -2.14032125e+00] [-9.73450661e+00 1.14676237e+01 -1.04290762e+01 ... -1.62177637e-01 -1.06725788e+01 4.25526524e+00] [ 2.99658203e+00 3.53356028e+00 -1.60229909e+00 ... 8.98651314e+00 -1.14367275e+01 2.70386767e+00] ... [-1.70865035e+00 2.79737186e+00 -7.38179207e+00 ... -2.58162594e+00 -1.52952015e+00 -1.16488447e+01] [-5.94542551e+00 -7.93492675e-01 7.63201523e+00 ... -1.06770210e+01 1.04741220e+01 -3.32674241e+00] [-8.35117817e+00 1.51754391e+00 -7.96758592e-01 ... -3.35799503e+00 1.94402027e+01 -4.10153198e+00]]]] [[[[ 2.92922664e+00 -6.68162489e+00 -4.68869686e+00 ... -5.81809998e+00 -1.52173357e+01 1.11292143e+01] [ 5.35951710e+00 7.45219135e+00 7.75343466e+00 ... 6.13320494e+00 2.70831299e+00 -1.35435414e+00] [ 8.35828364e-01 1.48786759e+00 -1.09204874e+01 ... 1.21208072e+00 -5.43676329e+00 9.46098614e+00] ... [-4.63712215e+00 7.36264038e+00 3.16175485e+00 ... -2.08329105e+00 -5.67161751e+00 -7.29258776e+00] [-3.33052278e-01 7.61639881e+00 2.62694597e+00 ... -3.95935726e+00 -1.24745512e+00 -4.57344437e+00] [ 2.53779840e+00 9.62236691e+00 1.67020047e+00 ... 6.63930750e+00 -5.52939606e+00 3.02625561e+00]] [[-3.29375553e+00 5.26871014e+00 6.22992611e+00 ... -1.47016726e+01 -3.05950737e+00 1.37064428e+01] [-6.27764320e+00 -6.21703625e+00 -2.39355612e+00 ... 1.12920580e+01 -1.83419061e+00 -2.67402619e-01] [ 6.30051565e+00 9.26019096e+00 3.41354895e+00 ... -1.98094330e+01 -2.85097146e+00 -5.38107347e+00] ... [-2.45993495e+00 -1.76775341e+01 1.03282064e-01 ... 5.28558779e+00 -9.42838192e+00 -1.37010527e+00] [-3.76880121e+00 -1.79141796e+00 1.13589888e+01 ... 1.23959565e+00 3.58235097e+00 -3.00654626e+00] [-1.04009171e+01 1.42703896e+01 -2.21998215e+01 ... 5.74429512e+00 -6.41994810e+00 4.05392981e+00]] [[-6.22434521e+00 1.48173666e+00 -4.19619179e+00 ... 1.86802089e+00 -8.94631577e+00 4.74259233e+00] [ 2.75999188e+00 -3.36198950e+00 1.92053204e+01 ... -2.57407999e+00 -8.26707840e-01 2.76549959e+00] [ 1.72888357e-02 7.13448703e-01 -7.47382224e-01 ... 4.58234072e+00 9.97952557e+00 1.11469803e+01] ... [ 8.60140038e+00 8.18092537e+00 6.29037476e+00 ... -8.64166737e+00 -4.14307213e+00 4.49845600e+00] [ 3.17947888e+00 -6.69992781e+00 7.79081011e+00 ... 4.85696459e+00 9.27102661e+00 -3.83475375e+00] [ 9.21440220e+00 2.47326612e+00 -8.10796261e+00 ... -3.86820793e-01 -1.28005295e+01 8.49188614e+00]] ... [[ 1.50620890e+00 7.89585066e+00 -1.12552013e+01 ... 2.93576860e+00 -1.04721224e+00 7.32483685e-01] [-8.29465580e+00 -2.56941366e+00 -1.54259884e+00 ... -5.66180277e+00 3.92767668e+00 -2.55905223e+00] [ 4.34753752e+00 1.11110210e+00 -4.25306129e+00 ... -2.00850630e+00 5.39121962e+00 -1.17206707e+01] ... [-9.91539192e+00 1.14411860e+01 -8.30534554e+00 ... 7.08875418e+00 -7.99716282e+00 -5.14876747e+00] [-1.04912844e+01 -5.97464609e+00 -4.58448744e+00 ... 2.66370201e+00 1.02749853e+01 4.68253851e+00] [ 1.08159733e+00 9.44882274e-01 8.26920605e+00 ... 1.45979774e+00 1.11726961e+01 -1.76185107e+00]] [[ 9.99951780e-01 -5.95672607e+00 2.05584502e+00 ... -4.23915195e+00 1.20897026e+01 7.44573927e+00] [-5.16718292e+00 -4.30593824e+00 2.05536270e+00 ... -2.55829549e+00 1.38516064e+01 6.28140450e+00] [-4.63962650e+00 1.23790085e+00 -1.39583263e+01 ... -5.35500574e+00 1.08300734e+01 -5.54401614e-02] ... [-7.85516453e+00 -3.32790613e+00 -6.54954767e+00 ... 3.66222525e+00 -7.71708965e+00 -4.48989201e+00] [-3.48642635e+00 -2.81630778e+00 1.84669280e+00 ... -9.31148815e+00 -2.18498516e+01 1.07410069e+01] [ 1.27083373e+00 3.98539376e+00 2.99305630e+00 ... 2.92997599e+00 -4.39421082e+00 9.27544117e+00]] [[ 1.01910906e+01 -2.49031949e+00 -8.96019363e+00 ... 1.21498642e+01 -3.46861887e+00 1.21162195e+01] [ 5.64263296e+00 -1.17856541e+01 5.61230719e-01 ... -1.18063390e+00 1.10702887e+01 -2.91702211e-01] [-2.20695901e+00 -3.52211475e+00 2.91738176e+00 ... -4.89502335e+00 2.87960315e+00 4.52933884e+00] ... [ 4.75152588e+00 1.06516943e+01 -3.06197023e+00 ... 3.96490335e+00 2.07822752e+00 5.61560297e+00] [-1.47178733e+00 -2.08092347e-01 8.35014629e+00 ... 6.71183109e+00 -9.11443329e+00 -1.31505661e+01] [-2.11887121e+00 1.46240311e+01 -7.55583620e+00 ... 8.01295185e+00 2.03663492e+00 6.05858469e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 1, 0], 'dilations': 1, 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_512.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 1, 0]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 11.49018 -1.192007 8.97012 ... -15.332522 2.7507427 1.7792001 ] [ 5.9621415 -11.934994 -0.928572 ... -3.929049 5.2326064 -16.57141 ] [ 1.5918769 1.9206425 -5.5946336 ... -8.676465 -4.0778193 10.333128 ] ... [ 3.308841 -6.5891523 2.3346763 ... -7.3783593 -2.7651434 -3.3598292 ] [ -7.9149203 -1.4352038 8.897399 ... 0.34882239 3.0917299 7.419441 ] [ -4.4987473 0.5251831 5.253464 ... -0.31635615 -6.430366 -6.4738483 ]] [[-12.376097 2.4772408 -10.368836 ... 8.586217 11.985203 6.4003325 ] [-11.710702 2.4787378 4.2189436 ... -11.820572 -18.67808 -3.792234 ] [ -1.4949707 4.4454637 -11.7042465 ... -0.58482987 10.108365 3.7159185 ] ... [-22.314386 4.9361415 2.966355 ... 4.0146074 3.094203 9.896554 ] [ 7.276282 4.439114 -4.2503486 ... -14.668764 -0.48445323 -14.716387 ] [ 1.3247973 6.462116 -8.821069 ... 6.000392 3.5113394 11.7908325 ]] [[ 6.1367936 3.630838 -1.4033295 ... -0.18694903 -9.082844 -18.121965 ] [ 0.2702986 2.0141673 2.128251 ... 14.456542 -9.233819 4.2292676 ] [ -0.5427473 -0.7507301 -2.7043533 ... 4.6091537 -0.9497161 5.2203045 ] ... [ 8.752008 3.3527272 -12.014253 ... -11.947663 7.4378104 1.4044987 ] [ -3.5284038 1.5140637 -10.373251 ... 13.185036 3.7151468 4.5949397 ] [ 3.9164608 -8.682843 10.491461 ... 8.178732 -6.736229 14.350854 ]] ... [[ 8.418905 -5.0128727 9.954025 ... -0.34360978 7.6981945 15.497833 ] [ 10.139745 -6.1336637 -3.8589506 ... -4.9529653 -8.590303 1.515909 ] [ 13.589005 3.0209627 -8.417978 ... 0.10493435 14.57003 -12.032208 ] ... [-10.306814 7.7469306 -7.9055023 ... -5.7573633 -9.14312 12.210883 ] [ 1.1444976 10.727157 -3.816045 ... 13.382057 2.0483394 -0.0296323 ] [ 10.054046 -6.401689 1.8733993 ... 4.94475 0.24092744 -4.1296544 ]] [[ -1.4716003 -10.975058 -3.6235743 ... 0.06347409 -2.5985901 -5.380826 ] [ 6.2442837 5.1415935 -1.4086908 ... -1.5161624 8.686423 8.109338 ] [ 2.337688 -16.446756 6.348597 ... 2.0124743 -15.4375 18.322756 ] ... [ -8.976373 0.36130607 -5.5896883 ... 12.128437 -8.597108 2.5720057 ] [ 9.386317 -0.07892764 -3.5269387 ... 1.8431888 7.95322 -6.373118 ] [ 10.83921 -11.018883 8.218783 ... -6.2490773 2.2498071 5.348063 ]] [[ 1.5172428 7.695732 5.7558184 ... -4.486197 2.8249407 0.7211833 ] [ -7.387625 -9.042623 4.8154397 ... -0.06875467 -5.3747993 6.2115026 ] [ -4.3740015 0.2551759 6.480999 ... 6.877481 -4.938695 7.5016847 ] ... [ -0.36857212 -3.5450175 -5.225977 ... -11.329115 5.1215825 1.4415501 ] [ 5.6924796 -6.6138206 -5.410012 ... -6.3174667 2.9414623 2.9499078 ] [ -5.1095114 7.7687325 -4.6450987 ... -0.9758907 0.8612884 -2.2127626 ]]]] [[[[ -5.8485746 -2.175323 -11.230709 ... 0.20210344 -2.0346951 -0.5034314 ] [ -0.85525167 4.952007 1.5540503 ... -4.2280693 -3.358274 -3.1225345 ] [-15.639091 -0.5438931 -5.3606553 ... -10.269565 -12.99494 -2.9226582 ] ... [ -2.7606802 9.993704 9.1513605 ... -8.47605 -7.083028 -7.9514713 ] [ 4.5695953 10.852771 3.844045 ... 1.814845 6.342827 4.330642 ] [ -4.713891 -4.3265743 9.053219 ... 4.6305566 -3.2866561 5.2266383 ]] [[ -7.1601186 -4.9619 2.9553676 ... -7.466251 -4.5748086 -15.013751 ] [ -4.4891553 -5.3715777 5.925176 ... 7.0105696 15.630603 -1.5435362 ] [-11.370223 -5.1547036 0.43635368 ... -13.131887 11.514792 9.892739 ] ... [ -4.7054467 -9.723983 6.9953613 ... 3.7719114 12.601523 11.475211 ] [ -0.25837198 -15.146467 11.128178 ... -5.4967866 -12.391635 3.1756902 ] [ 6.25037 0.7667333 7.583645 ... 2.839785 3.109345 12.842564 ]] [[ 5.5739694 16.657295 8.014967 ... -1.2983361 11.625674 -4.0640655 ] [ 2.7807834 -8.001999 15.371323 ... -12.375216 -12.352065 -1.7389569 ] [ 14.044991 2.6260676 -7.5662932 ... 4.9371057 8.723525 2.9930983 ] ... [ 5.5478497 -9.5471325 -7.7052536 ... -9.772829 -8.070352 4.2409678 ] [ 9.570141 -0.03780324 2.3258908 ... -10.52202 0.555423 -4.607521 ] [ 1.3420589 8.076379 0.39525628 ... -13.310098 -3.4083233 3.8814278 ]] ... [[ -6.7414165 19.622158 -7.982702 ... -3.4327872 -2.1393101 -0.93298066] [ -2.257889 9.64248 22.33378 ... -10.054196 -4.2603083 -7.1711416 ] [ -5.508791 -8.591579 2.4305193 ... 0.7027539 6.9876604 -0.09877043] ... [ -6.4984407 -12.616167 -0.44576952 ... -1.6721455 -15.28107 15.61113 ] [ -0.9650927 9.013228 -5.9887495 ... 1.9223859 -14.105146 -3.1996493 ] [ -4.81602 -3.5492344 -5.2851915 ... -5.6550913 -2.9308758 7.6725936 ]] [[ 15.892652 -9.649683 -5.802841 ... -4.2673326 -4.7947235 2.205544 ] [ 15.966982 2.2187982 -3.6402926 ... -8.552319 -7.588008 1.513801 ] [ 12.610177 6.628372 3.4968598 ... -1.7871109 -6.68075 -1.678285 ] ... [ 3.5230129 -6.794803 -5.8434677 ... -2.1408715 0.04649275 -9.7812605 ] [ 2.0929396 -21.4647 3.100287 ... -10.466257 7.327133 -5.420397 ] [ 9.456555 -5.961079 -0.17448145 ... -11.818196 -0.5528537 -3.8462672 ]] [[ -1.9204684 5.0939007 3.763664 ... -12.167359 -3.6634414 8.88015 ] [ -2.487734 0.76790315 2.2056854 ... -4.4023023 -0.3443342 -2.9173303 ] [-20.692957 -8.37949 -5.745316 ... 0.9885225 -9.114986 -5.19834 ] ... [ 0.9542707 -8.25143 -4.13565 ... -0.07051567 0.24189316 0.46827957] [ -1.7725477 2.2101538 -0.2941165 ... -6.1084094 5.4079823 -5.675798 ] [ -3.316845 3.0138326 -0.5438594 ... -10.204174 1.5910271 -2.7871978 ]]]]]; ov_res: [[[[[ 11.49018 -1.192007 8.97012 ... -15.332522 2.7507427 1.7792001 ] [ 5.9621415 -11.934994 -0.928572 ... -3.929049 5.2326064 -16.57141 ] [ 1.5918769 1.9206425 -5.5946336 ... -8.676465 -4.0778193 10.333128 ] ... [ 3.308841 -6.5891523 2.3346763 ... -7.3783593 -2.7651434 -3.3598292 ] [ -7.9149203 -1.4352038 8.897399 ... 0.34882239 3.0917299 7.419441 ] [ -4.4987473 0.5251831 5.253464 ... -0.31635615 -6.430366 -6.4738483 ]] [[-12.376097 2.4772408 -10.368836 ... 8.586217 11.985203 6.4003325 ] [-11.710702 2.4787378 4.2189436 ... -11.820572 -18.67808 -3.792234 ] [ -1.4949707 4.4454637 -11.7042465 ... -0.58482987 10.108365 3.7159185 ] ... [-22.314386 4.9361415 2.966355 ... 4.0146074 3.094203 9.896554 ] [ 7.276282 4.439114 -4.2503486 ... -14.668764 -0.48445323 -14.716387 ] [ 1.3247973 6.462116 -8.821069 ... 6.000392 3.5113394 11.7908325 ]] [[ 6.1367936 3.630838 -1.4033295 ... -0.18694903 -9.082844 -18.121965 ] [ 0.2702986 2.0141673 2.128251 ... 14.456542 -9.233819 4.2292676 ] [ -0.5427473 -0.7507301 -2.7043533 ... 4.6091537 -0.9497161 5.2203045 ] ... [ 8.752008 3.3527272 -12.014253 ... -11.947663 7.4378104 1.4044987 ] [ -3.5284038 1.5140637 -10.373251 ... 13.185036 3.7151468 4.5949397 ] [ 3.9164608 -8.682843 10.491461 ... 8.178732 -6.736229 14.350854 ]] ... [[ 8.418905 -5.0128727 9.954025 ... -0.34360978 7.6981945 15.497833 ] [ 10.139745 -6.1336637 -3.8589506 ... -4.9529653 -8.590303 1.515909 ] [ 13.589005 3.0209627 -8.417978 ... 0.10493435 14.57003 -12.032208 ] ... [-10.306814 7.7469306 -7.9055023 ... -5.7573633 -9.14312 12.210883 ] [ 1.1444976 10.727157 -3.816045 ... 13.382057 2.0483394 -0.0296323 ] [ 10.054046 -6.401689 1.8733993 ... 4.94475 0.24092744 -4.1296544 ]] [[ -1.4716003 -10.975058 -3.6235743 ... 0.06347409 -2.5985901 -5.380826 ] [ 6.2442837 5.1415935 -1.4086908 ... -1.5161624 8.686423 8.109338 ] [ 2.337688 -16.446756 6.348597 ... 2.0124743 -15.4375 18.322756 ] ... [ -8.976373 0.36130607 -5.5896883 ... 12.128437 -8.597108 2.5720057 ] [ 9.386317 -0.07892764 -3.5269387 ... 1.8431888 7.95322 -6.373118 ] [ 10.83921 -11.018883 8.218783 ... -6.2490773 2.2498071 5.348063 ]] [[ 1.5172428 7.695732 5.7558184 ... -4.486197 2.8249407 0.7211833 ] [ -7.387625 -9.042623 4.8154397 ... -0.06875467 -5.3747993 6.2115026 ] [ -4.3740015 0.2551759 6.480999 ... 6.877481 -4.938695 7.5016847 ] ... [ -0.36857212 -3.5450175 -5.225977 ... -11.329115 5.1215825 1.4415501 ] [ 5.6924796 -6.6138206 -5.410012 ... -6.3174667 2.9414623 2.9499078 ] [ -5.1095114 7.7687325 -4.6450987 ... -0.9758907 0.8612884 -2.2127626 ]]]] [[[[ -5.8485746 -2.175323 -11.230709 ... 0.20210344 -2.0346951 -0.5034314 ] [ -0.85525167 4.952007 1.5540503 ... -4.2280693 -3.358274 -3.1225345 ] [-15.639091 -0.5438931 -5.3606553 ... -10.269565 -12.99494 -2.9226582 ] ... [ -2.7606802 9.993704 9.1513605 ... -8.47605 -7.083028 -7.9514713 ] [ 4.5695953 10.852771 3.844045 ... 1.814845 6.342827 4.330642 ] [ -4.713891 -4.3265743 9.053219 ... 4.6305566 -3.2866561 5.2266383 ]] [[ -7.1601186 -4.9619 2.9553676 ... -7.466251 -4.5748086 -15.013751 ] [ -4.4891553 -5.3715777 5.925176 ... 7.0105696 15.630603 -1.5435362 ] [-11.370223 -5.1547036 0.43635368 ... -13.131887 11.514792 9.892739 ] ... [ -4.7054467 -9.723983 6.9953613 ... 3.7719114 12.601523 11.475211 ] [ -0.25837198 -15.146467 11.128178 ... -5.4967866 -12.391635 3.1756902 ] [ 6.25037 0.7667333 7.583645 ... 2.839785 3.109345 12.842564 ]] [[ 5.5739694 16.657295 8.014967 ... -1.2983361 11.625674 -4.0640655 ] [ 2.7807834 -8.001999 15.371323 ... -12.375216 -12.352065 -1.7389569 ] [ 14.044991 2.6260676 -7.5662932 ... 4.9371057 8.723525 2.9930983 ] ... [ 5.5478497 -9.5471325 -7.7052536 ... -9.772829 -8.070352 4.2409678 ] [ 9.570141 -0.03780324 2.3258908 ... -10.52202 0.555423 -4.607521 ] [ 1.3420589 8.076379 0.39525628 ... -13.310098 -3.4083233 3.8814278 ]] ... [[ -6.7414165 19.622158 -7.982702 ... -3.4327872 -2.1393101 -0.93298066] [ -2.257889 9.64248 22.33378 ... -10.054196 -4.2603083 -7.1711416 ] [ -5.508791 -8.591579 2.4305193 ... 0.7027539 6.9876604 -0.09877043] ... [ -6.4984407 -12.616167 -0.44576952 ... -1.6721455 -15.28107 15.61113 ] [ -0.9650927 9.013228 -5.9887495 ... 1.9223859 -14.105146 -3.1996493 ] [ -4.81602 -3.5492344 -5.2851915 ... -5.6550913 -2.9308758 7.6725936 ]] [[ 15.892652 -9.649683 -5.802841 ... -4.2673326 -4.7947235 2.205544 ] [ 15.966982 2.2187982 -3.6402926 ... -8.552319 -7.588008 1.513801 ] [ 12.610177 6.628372 3.4968598 ... -1.7871109 -6.68075 -1.678285 ] ... [ 3.5230129 -6.794803 -5.8434677 ... -2.1408715 0.04649275 -9.7812605 ] [ 2.0929396 -21.4647 3.100287 ... -10.466257 7.327133 -5.420397 ] [ 9.456555 -5.961079 -0.17448145 ... -11.818196 -0.5528537 -3.8462672 ]] [[ -1.9204684 5.0939007 3.763664 ... -12.167359 -3.6634414 8.88015 ] [ -2.487734 0.76790315 2.2056854 ... -4.4023023 -0.3443342 -2.9173303 ] [-20.692957 -8.37949 -5.745316 ... 0.9885225 -9.114986 -5.19834 ] ... [ 0.9542707 -8.25143 -4.13565 ... -0.07051567 0.24189316 0.46827957] [ -1.7725477 2.2101538 -0.2941165 ... -6.1084094 5.4079823 -5.675798 ] [ -3.316845 3.0138326 -0.5438594 ... -10.204174 1.5910271 -2.7871978 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [0, 1, 1], 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_514.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[-2.77356172e+00 5.15080929e+00 -1.04172926e+01 ... 1.93637311e+00 -5.36546230e+00 -9.67934787e-01] [-1.80579376e+00 1.74220598e+00 1.19576178e+01 ... 8.81972980e+00 -1.91130114e+00 -1.18673220e+01] [-6.78611088e+00 -7.68482733e+00 1.38017168e+01 ... -7.35228109e+00 -8.20634747e+00 5.70330679e-01] ... [-7.46816874e+00 -3.51628876e+00 2.74106717e+00 ... 2.14565539e+00 9.88486195e+00 -4.38391113e+00] [ 4.93763876e+00 -9.81959152e+00 -2.02194524e+00 ... 4.39808220e-01 -2.59918046e+00 -3.55433434e-01] [ 1.22278433e+01 -5.07852793e-01 2.86175728e+00 ... -3.46596539e-01 6.45390868e-01 -1.09430609e+01]] [[-1.57243848e+00 -6.25907135e+00 4.81712055e+00 ... 4.34813499e+00 4.68744850e+00 7.06853151e+00] [-1.21045866e+01 6.13008404e+00 -9.25526524e+00 ... 8.90674305e+00 -9.62615776e+00 -8.11652064e-01] [-1.41001282e+01 1.60209846e+01 -1.63279572e+01 ... 1.01543255e+01 -9.27632236e+00 7.84433782e-01] ... [-1.69871712e+01 -4.73520011e-01 8.02731133e+00 ... -6.74377537e+00 -6.52403498e+00 -2.27064753e+00] [-1.00314903e+01 -8.37799644e+00 6.81811237e+00 ... 1.14468622e+01 3.07156992e+00 9.45669487e-02] [ 5.39692879e+00 6.00128651e+00 -2.55285215e+00 ... 1.61285324e+01 -1.03603325e+01 -3.26609588e+00]] [[ 7.40866232e+00 -2.64815235e+00 -9.00810242e+00 ... -2.18522340e-01 8.65981770e+00 -3.69916707e-01] [-7.86204433e+00 1.38742971e+00 -7.59493494e+00 ... 7.37534380e+00 -2.26617384e+00 4.63506651e+00] [ 5.68947458e+00 -7.94557953e+00 -1.43858109e+01 ... 9.54977894e+00 2.86970520e+00 -3.76844025e+00] ... [ 1.51942587e+01 4.46253633e+00 6.65434265e+00 ... -1.15540905e+01 -3.77229047e+00 -1.06670332e+01] [-2.39074650e+01 4.19726819e-01 1.43621225e+01 ... -2.61384702e+00 6.29998541e+00 -5.78807783e+00] [-9.12304592e+00 -6.37600005e-01 7.85311222e+00 ... 9.33469009e+00 -3.03691244e+00 -5.88674426e-01]] ... [[-9.44154072e+00 4.38246822e+00 -2.27193618e+00 ... -9.05549154e-02 -1.07860308e+01 9.53112507e+00] [-1.76184678e+00 -5.13496876e+00 7.83495998e+00 ... -2.93469882e+00 -3.05614185e+00 1.31999779e+01] [ 2.30311847e+00 1.99720740e-01 4.56668186e+00 ... 4.30499172e+00 6.07484293e+00 3.93881416e+00] ... [ 4.87980032e+00 2.08637857e+00 -1.70696907e+01 ... 6.72728634e+00 -4.64994431e+00 -1.24606199e+01] [-4.02083397e+00 -9.12042677e-01 1.00647802e+01 ... -6.33123875e+00 1.14368477e+01 4.37556219e+00] [-1.38940394e+00 6.01911974e+00 1.16194563e+01 ... 1.25602760e+01 -9.60205078e-01 5.15660286e+00]] [[-3.61966610e+00 9.25453091e+00 -5.07143927e+00 ... -2.12883162e+00 7.25271988e+00 -7.99873638e+00] [-1.29975910e+01 1.18115740e+01 -6.89155757e-01 ... -2.35449553e+00 -4.12922955e+00 5.88780165e+00] [-5.05051994e+00 1.68050432e+00 3.63274479e+00 ... -1.10628052e+01 9.38395214e+00 1.06341732e+00] ... [-4.20885754e+00 -1.46586418e+01 -1.15269747e+01 ... 1.26176381e+00 -1.89413280e+01 -6.30957484e-01] [ 1.38263779e+01 -1.46478453e+01 -1.02959757e+01 ... -6.58456981e-01 -5.77572107e+00 4.84936523e+00] [-2.47067046e+00 -7.39836550e+00 -6.15982389e+00 ... -4.75324583e+00 1.02108564e+01 6.52975416e+00]] [[ 1.68792143e-01 -2.38702744e-01 -9.23071766e+00 ... -4.38108969e+00 9.52263165e+00 6.27607703e-01] [-3.73830199e+00 -1.30620003e+01 6.01090765e+00 ... 3.07507372e+00 1.97295964e+00 7.33576393e+00] [ 8.18442822e+00 1.23943481e+01 -1.64773350e+01 ... 5.49942684e+00 3.97841311e+00 2.85372281e+00] ... [-1.70876908e+00 -1.44005890e+01 -1.66651192e+01 ... -9.10163021e+00 -1.12978191e+01 5.72857976e-01] [ 6.31475258e+00 -3.07357574e+00 -2.53456364e+01 ... 4.79463577e+00 -3.24290919e+00 -1.00418816e+01] [ 5.68640661e+00 9.02259254e+00 -7.39196157e+00 ... -7.76771426e-01 1.03593750e+01 8.92364597e+00]]]] [[[[-1.06978059e+00 5.40441036e+00 4.99090433e+00 ... 7.22161055e+00 -3.54385686e+00 3.46074438e+00] [-1.66484582e+00 -4.76540995e+00 -9.06016636e+00 ... -1.42843890e+00 -7.83975410e+00 -4.85451126e+00] [-7.88475275e+00 -5.59003592e+00 -1.16025620e+01 ... -6.17360592e+00 3.35906172e+00 4.04163837e+00] ... [ 1.37304115e+01 -4.73865652e+00 -1.51524267e+01 ... -9.63667202e+00 1.96600704e+01 4.50052973e-03] [-1.54663837e+00 3.08213472e+00 4.58020067e+00 ... 1.68117599e+01 5.16177654e+00 2.32618761e+00] [ 1.12709446e+01 -7.32741475e-01 3.48315072e+00 ... 2.83492966e+01 1.34937334e+01 -7.42237997e+00]] [[-1.88976908e+00 4.83021688e+00 -1.42275896e+01 ... 2.25061250e+00 -5.98629904e+00 1.00244102e+01] [-3.48226523e+00 -4.44969749e+00 7.49405050e+00 ... 2.82632780e+00 -3.82719469e+00 -2.24647713e+00] [-3.04952526e+00 7.83383369e+00 -1.94491255e+00 ... -9.67102528e+00 -1.66380978e+01 6.09357548e+00] ... [ 1.74488430e+01 -7.13988066e-01 -2.19284177e+00 ... 7.59182978e+00 -4.74837017e+00 5.10159969e+00] [ 3.21569467e+00 3.40363240e+00 -7.67210054e+00 ... 7.80200481e-01 -6.47691202e+00 6.47474480e+00] [ 1.09138622e+01 3.92803812e+00 8.16094398e+00 ... -8.14099121e+00 -6.18911266e+00 -1.59202075e+00]] [[-1.17326403e+00 -3.03425050e+00 2.43466854e+00 ... 4.07091141e+00 -1.88996184e+00 5.77809286e+00] [ 1.49300468e+00 -6.02386427e+00 2.23290944e+00 ... 1.88701763e+01 5.07193136e+00 -1.00400782e+01] [-3.58172512e+00 8.04373741e+00 -8.56921196e+00 ... 1.97681923e+01 -1.62264462e+01 -9.86346054e+00] ... [-1.00359285e+00 -1.12048750e+01 1.33014355e+01 ... -1.85331249e+01 9.96930122e-01 2.78830338e+00] [ 3.89442730e+00 -1.28947153e+01 3.64166945e-01 ... -8.89035130e+00 7.34132862e+00 -1.46648395e+00] [-9.72648048e+00 1.39645863e+00 6.57749271e+00 ... -1.77089870e+00 -2.10923767e+01 2.89507174e+00]] ... [[-4.08460760e+00 -3.75419426e+00 6.20157814e+00 ... 3.77953672e+00 -1.95140147e+00 -1.01894512e+01] [ 2.32498384e+00 -1.97422180e+01 -1.73154755e+01 ... -1.07822466e+01 1.84326839e+01 -3.14465117e+00] [-1.70243490e+00 1.09878063e+01 1.94811592e+01 ... -2.28286743e+00 1.20184984e+01 8.70852566e+00] ... [ 8.47280407e+00 -4.46178913e+00 -9.80603790e+00 ... -7.48240352e-01 5.55625820e+00 4.95456743e+00] [-3.55035734e+00 5.25429678e+00 9.08599186e+00 ... -1.46750593e+00 1.40639865e+00 -6.89128351e+00] [-4.90139127e-01 -7.07458925e+00 -1.78602040e+00 ... 5.19041681e+00 -5.15641034e-01 6.82801056e+00]] [[-1.02503693e+00 -1.02349777e+01 -6.54609299e+00 ... -8.66726398e+00 -1.01316690e+01 3.18476844e+00] [ 9.82555771e+00 -7.99589491e+00 -1.46775322e+01 ... -1.02436857e+01 -8.36694622e+00 8.95220470e+00] [ 1.12927580e+01 3.92657518e+00 7.09233665e+00 ... -5.20461607e+00 -2.27057385e+00 1.50264847e+00] ... [ 2.17579365e+00 -7.40376663e+00 7.42052603e+00 ... -2.33476048e+01 5.67514563e+00 4.40047407e+00] [-8.94950294e+00 -1.20543966e+01 1.99747711e-01 ... 4.35917664e+00 3.95931411e+00 1.49559755e+01] [ 8.69441986e+00 -1.45267296e+01 -1.74850922e+01 ... 1.05038605e+01 1.17907906e+00 1.69950092e+00]] [[ 2.37020016e+00 -8.23735523e+00 1.02159424e+01 ... 1.05168223e+00 2.90526360e-01 -1.10296977e+00] [ 8.42127609e+00 -1.00783176e+01 6.54883814e+00 ... 1.38730268e+01 -2.47918701e+00 2.92481661e-01] [ 9.22284508e+00 1.21481009e+01 -4.82380533e+00 ... -2.99286485e+00 -1.17734156e+01 8.99237251e+00] ... [-3.22469044e+00 -4.57901621e+00 -7.23698282e+00 ... 6.87101507e+00 -1.79951973e+01 -1.22053661e+01] [ 2.75234222e+00 -7.48762131e-01 8.63347626e+00 ... 3.62084222e+00 3.73161483e+00 8.15074730e+00] [ 2.50512624e+00 2.01699162e+00 -1.05925646e+01 ... -2.21255493e+00 8.79086494e+00 -3.89959121e+00]]]]]; ov_res: [[[[[-2.77356172e+00 5.15080929e+00 -1.04172926e+01 ... 1.93637311e+00 -5.36546230e+00 -9.67934787e-01] [-1.80579376e+00 1.74220598e+00 1.19576178e+01 ... 8.81972980e+00 -1.91130114e+00 -1.18673220e+01] [-6.78611088e+00 -7.68482733e+00 1.38017168e+01 ... -7.35228109e+00 -8.20634747e+00 5.70330679e-01] ... [-7.46816874e+00 -3.51628876e+00 2.74106717e+00 ... 2.14565539e+00 9.88486195e+00 -4.38391113e+00] [ 4.93763876e+00 -9.81959152e+00 -2.02194524e+00 ... 4.39808220e-01 -2.59918046e+00 -3.55433434e-01] [ 1.22278433e+01 -5.07852793e-01 2.86175728e+00 ... -3.46596539e-01 6.45390868e-01 -1.09430609e+01]] [[-1.57243848e+00 -6.25907135e+00 4.81712055e+00 ... 4.34813499e+00 4.68744850e+00 7.06853151e+00] [-1.21045866e+01 6.13008404e+00 -9.25526524e+00 ... 8.90674305e+00 -9.62615776e+00 -8.11652064e-01] [-1.41001282e+01 1.60209846e+01 -1.63279572e+01 ... 1.01543255e+01 -9.27632236e+00 7.84433782e-01] ... [-1.69871712e+01 -4.73520011e-01 8.02731133e+00 ... -6.74377537e+00 -6.52403498e+00 -2.27064753e+00] [-1.00314903e+01 -8.37799644e+00 6.81811237e+00 ... 1.14468622e+01 3.07156992e+00 9.45669487e-02] [ 5.39692879e+00 6.00128651e+00 -2.55285215e+00 ... 1.61285324e+01 -1.03603325e+01 -3.26609588e+00]] [[ 7.40866232e+00 -2.64815235e+00 -9.00810242e+00 ... -2.18522340e-01 8.65981770e+00 -3.69916707e-01] [-7.86204433e+00 1.38742971e+00 -7.59493494e+00 ... 7.37534380e+00 -2.26617384e+00 4.63506651e+00] [ 5.68947458e+00 -7.94557953e+00 -1.43858109e+01 ... 9.54977894e+00 2.86970520e+00 -3.76844025e+00] ... [ 1.51942587e+01 4.46253633e+00 6.65434265e+00 ... -1.15540905e+01 -3.77229047e+00 -1.06670332e+01] [-2.39074650e+01 4.19726819e-01 1.43621225e+01 ... -2.61384702e+00 6.29998541e+00 -5.78807783e+00] [-9.12304592e+00 -6.37600005e-01 7.85311222e+00 ... 9.33469009e+00 -3.03691244e+00 -5.88674426e-01]] ... [[-9.44154072e+00 4.38246822e+00 -2.27193618e+00 ... -9.05549154e-02 -1.07860308e+01 9.53112507e+00] [-1.76184678e+00 -5.13496876e+00 7.83495998e+00 ... -2.93469882e+00 -3.05614185e+00 1.31999779e+01] [ 2.30311847e+00 1.99720740e-01 4.56668186e+00 ... 4.30499172e+00 6.07484293e+00 3.93881416e+00] ... [ 4.87980032e+00 2.08637857e+00 -1.70696907e+01 ... 6.72728634e+00 -4.64994431e+00 -1.24606199e+01] [-4.02083397e+00 -9.12042677e-01 1.00647802e+01 ... -6.33123875e+00 1.14368477e+01 4.37556219e+00] [-1.38940394e+00 6.01911974e+00 1.16194563e+01 ... 1.25602760e+01 -9.60205078e-01 5.15660286e+00]] [[-3.61966610e+00 9.25453091e+00 -5.07143927e+00 ... -2.12883162e+00 7.25271988e+00 -7.99873638e+00] [-1.29975910e+01 1.18115740e+01 -6.89155757e-01 ... -2.35449553e+00 -4.12922955e+00 5.88780165e+00] [-5.05051994e+00 1.68050432e+00 3.63274479e+00 ... -1.10628052e+01 9.38395214e+00 1.06341732e+00] ... [-4.20885754e+00 -1.46586418e+01 -1.15269747e+01 ... 1.26176381e+00 -1.89413280e+01 -6.30957484e-01] [ 1.38263779e+01 -1.46478453e+01 -1.02959757e+01 ... -6.58456981e-01 -5.77572107e+00 4.84936523e+00] [-2.47067046e+00 -7.39836550e+00 -6.15982389e+00 ... -4.75324583e+00 1.02108564e+01 6.52975416e+00]] [[ 1.68792143e-01 -2.38702744e-01 -9.23071766e+00 ... -4.38108969e+00 9.52263165e+00 6.27607703e-01] [-3.73830199e+00 -1.30620003e+01 6.01090765e+00 ... 3.07507372e+00 1.97295964e+00 7.33576393e+00] [ 8.18442822e+00 1.23943481e+01 -1.64773350e+01 ... 5.49942684e+00 3.97841311e+00 2.85372281e+00] ... [-1.70876908e+00 -1.44005890e+01 -1.66651192e+01 ... -9.10163021e+00 -1.12978191e+01 5.72857976e-01] [ 6.31475258e+00 -3.07357574e+00 -2.53456364e+01 ... 4.79463577e+00 -3.24290919e+00 -1.00418816e+01] [ 5.68640661e+00 9.02259254e+00 -7.39196157e+00 ... -7.76771426e-01 1.03593750e+01 8.92364597e+00]]]] [[[[-1.06978059e+00 5.40441036e+00 4.99090433e+00 ... 7.22161055e+00 -3.54385686e+00 3.46074438e+00] [-1.66484582e+00 -4.76540995e+00 -9.06016636e+00 ... -1.42843890e+00 -7.83975410e+00 -4.85451126e+00] [-7.88475275e+00 -5.59003592e+00 -1.16025620e+01 ... -6.17360592e+00 3.35906172e+00 4.04163837e+00] ... [ 1.37304115e+01 -4.73865652e+00 -1.51524267e+01 ... -9.63667202e+00 1.96600704e+01 4.50052973e-03] [-1.54663837e+00 3.08213472e+00 4.58020067e+00 ... 1.68117599e+01 5.16177654e+00 2.32618761e+00] [ 1.12709446e+01 -7.32741475e-01 3.48315072e+00 ... 2.83492966e+01 1.34937334e+01 -7.42237997e+00]] [[-1.88976908e+00 4.83021688e+00 -1.42275896e+01 ... 2.25061250e+00 -5.98629904e+00 1.00244102e+01] [-3.48226523e+00 -4.44969749e+00 7.49405050e+00 ... 2.82632780e+00 -3.82719469e+00 -2.24647713e+00] [-3.04952526e+00 7.83383369e+00 -1.94491255e+00 ... -9.67102528e+00 -1.66380978e+01 6.09357548e+00] ... [ 1.74488430e+01 -7.13988066e-01 -2.19284177e+00 ... 7.59182978e+00 -4.74837017e+00 5.10159969e+00] [ 3.21569467e+00 3.40363240e+00 -7.67210054e+00 ... 7.80200481e-01 -6.47691202e+00 6.47474480e+00] [ 1.09138622e+01 3.92803812e+00 8.16094398e+00 ... -8.14099121e+00 -6.18911266e+00 -1.59202075e+00]] [[-1.17326403e+00 -3.03425050e+00 2.43466854e+00 ... 4.07091141e+00 -1.88996184e+00 5.77809286e+00] [ 1.49300468e+00 -6.02386427e+00 2.23290944e+00 ... 1.88701763e+01 5.07193136e+00 -1.00400782e+01] [-3.58172512e+00 8.04373741e+00 -8.56921196e+00 ... 1.97681923e+01 -1.62264462e+01 -9.86346054e+00] ... [-1.00359285e+00 -1.12048750e+01 1.33014355e+01 ... -1.85331249e+01 9.96930122e-01 2.78830338e+00] [ 3.89442730e+00 -1.28947153e+01 3.64166945e-01 ... -8.89035130e+00 7.34132862e+00 -1.46648395e+00] [-9.72648048e+00 1.39645863e+00 6.57749271e+00 ... -1.77089870e+00 -2.10923767e+01 2.89507174e+00]] ... [[-4.08460760e+00 -3.75419426e+00 6.20157814e+00 ... 3.77953672e+00 -1.95140147e+00 -1.01894512e+01] [ 2.32498384e+00 -1.97422180e+01 -1.73154755e+01 ... -1.07822466e+01 1.84326839e+01 -3.14465117e+00] [-1.70243490e+00 1.09878063e+01 1.94811592e+01 ... -2.28286743e+00 1.20184984e+01 8.70852566e+00] ... [ 8.47280407e+00 -4.46178913e+00 -9.80603790e+00 ... -7.48240352e-01 5.55625820e+00 4.95456743e+00] [-3.55035734e+00 5.25429678e+00 9.08599186e+00 ... -1.46750593e+00 1.40639865e+00 -6.89128351e+00] [-4.90139127e-01 -7.07458925e+00 -1.78602040e+00 ... 5.19041681e+00 -5.15641034e-01 6.82801056e+00]] [[-1.02503693e+00 -1.02349777e+01 -6.54609299e+00 ... -8.66726398e+00 -1.01316690e+01 3.18476844e+00] [ 9.82555771e+00 -7.99589491e+00 -1.46775322e+01 ... -1.02436857e+01 -8.36694622e+00 8.95220470e+00] [ 1.12927580e+01 3.92657518e+00 7.09233665e+00 ... -5.20461607e+00 -2.27057385e+00 1.50264847e+00] ... [ 2.17579365e+00 -7.40376663e+00 7.42052603e+00 ... -2.33476048e+01 5.67514563e+00 4.40047407e+00] [-8.94950294e+00 -1.20543966e+01 1.99747711e-01 ... 4.35917664e+00 3.95931411e+00 1.49559755e+01] [ 8.69441986e+00 -1.45267296e+01 -1.74850922e+01 ... 1.05038605e+01 1.17907906e+00 1.69950092e+00]] [[ 2.37020016e+00 -8.23735523e+00 1.02159424e+01 ... 1.05168223e+00 2.90526360e-01 -1.10296977e+00] [ 8.42127609e+00 -1.00783176e+01 6.54883814e+00 ... 1.38730268e+01 -2.47918701e+00 2.92481661e-01] [ 9.22284508e+00 1.21481009e+01 -4.82380533e+00 ... -2.99286485e+00 -1.17734156e+01 8.99237251e+00] ... [-3.22469044e+00 -4.57901621e+00 -7.23698282e+00 ... 6.87101507e+00 -1.79951973e+01 -1.22053661e+01] [ 2.75234222e+00 -7.48762131e-01 8.63347626e+00 ... 3.62084222e+00 3.73161483e+00 8.15074730e+00] [ 2.50512624e+00 2.01699162e+00 -1.05925646e+01 ... -2.21255493e+00 8.79086494e+00 -3.89959121e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': [1, 0, 1], 'dilations': 1, 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_516.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 1]]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 8.17271996e+00 -1.10945988e+01 -6.85854578e+00 ... 9.23736668e+00 5.78206921e+00 -1.47300375e+00] [ 1.01324377e+01 -8.20266056e+00 1.00218153e+00 ... 8.83229637e+00 6.20766115e+00 4.17662382e-01] [-1.69954121e+00 3.87885404e+00 7.69491482e+00 ... 6.70527697e+00 2.77217031e+00 -2.82407093e+00] ... [ 1.29888849e+01 -2.99557161e+00 6.89485502e+00 ... 1.70834792e+00 3.39000750e+00 5.57542229e+00] [-2.51569009e+00 -4.49199009e+00 1.10817564e+00 ... 1.76226532e+00 3.28064728e+00 -5.55313015e+00] [ 3.73971128e+00 3.27834582e+00 5.57240438e+00 ... 5.07202578e+00 2.02650571e+00 6.85832262e+00]] [[ 1.07167101e+01 1.42974043e+01 1.73010468e+00 ... -8.99285316e+00 -4.10915756e+00 -3.50310111e+00] [ 3.12508154e+00 -8.76965427e+00 -2.15890903e+01 ... 1.73849189e+00 1.74284053e+00 6.34776497e+00] [-1.80775762e+00 -1.70338840e+01 -1.08357229e+01 ... -8.47094345e+00 -5.15830278e+00 -5.99293947e+00] ... [-7.12467718e+00 1.18834114e+01 -2.71070266e+00 ... 3.97750998e+00 -1.12887478e+01 -1.90487778e+00] [ 5.38453579e+00 5.32905197e+00 9.92630100e+00 ... 6.38827741e-01 -1.60661626e+00 -1.94614494e+00] [-3.09074020e+00 6.29366732e+00 -2.59526587e+00 ... -5.64139366e+00 2.44902039e+00 5.56212366e-01]] [[-8.29193211e+00 4.93656540e+00 7.76527548e+00 ... 2.50904775e+00 1.64594555e+01 8.69315720e+00] [-3.26086974e+00 8.63858032e+00 1.17802107e+00 ... 9.77161980e+00 6.34248877e+00 -1.10365143e+01] [-3.65761018e+00 -1.18197746e+01 4.46662331e+00 ... -3.28493834e+00 3.61476946e+00 -4.45271283e-01] ... [ 8.09754968e-01 7.27751493e-01 2.16648436e+00 ... 1.25119858e+01 9.26685905e+00 4.31380844e+00] [-6.43237019e+00 7.25378466e+00 8.67111015e+00 ... 9.01884842e+00 1.12566519e+01 -2.18431091e+00] [ 4.57560444e+00 -2.85492063e+00 8.53182793e+00 ... -8.60847569e+00 -4.93121147e+00 5.49451780e+00]] ... [[ 7.03256464e+00 2.41444564e+00 4.84655190e+00 ... 4.78055537e-01 2.25941620e+01 -6.26341057e+00] [ 3.69509292e+00 -6.93416595e+00 1.59308839e+00 ... 4.75121832e+00 -1.29662714e+01 -8.85100365e+00] [ 7.67819023e+00 9.65167999e+00 6.20185089e+00 ... 5.46439552e+00 1.02062712e+01 5.30922842e+00] ... [ 2.99007535e+00 -3.74209672e-01 3.04529572e+00 ... 8.07945061e+00 3.72833312e-01 8.63480568e+00] [-4.50154400e+00 5.01541734e-01 -2.89687443e+00 ... -1.38398314e+01 -1.59591532e+00 4.25857449e+00] [-7.56831837e+00 1.17489882e+01 -1.10290928e+01 ... -1.30807364e+00 6.48419738e-01 1.87328112e+00]] [[-6.61811948e-01 4.74678087e+00 -1.87302411e+00 ... 1.89075546e+01 -1.10907717e+01 9.39944839e+00] [ 2.01052690e+00 7.70810223e+00 -5.22072649e+00 ... -9.52786827e+00 -1.17905188e+01 5.06883097e+00] [-3.77748275e+00 -8.55352402e+00 -4.27650547e+00 ... -3.27670240e+00 5.67926455e+00 -2.41629772e+01] ... [-1.45484838e+01 -2.21237469e+00 4.91167879e+00 ... -7.30262852e+00 9.92206669e+00 -8.21688652e-01] [-4.28897142e+00 1.10619316e+01 -1.29407463e+01 ... -4.40744311e-01 -1.51869011e+01 -2.86651683e+00] [ 6.67037296e+00 -1.18543291e+01 1.39550769e+00 ... 7.26234722e+00 2.43928209e-01 1.15713367e+01]] [[-9.40100479e+00 2.48356366e+00 2.78099489e+00 ... -1.39657390e+00 -7.42954350e+00 -5.68599796e+00] [-4.24299812e+00 5.46210051e+00 1.44010611e+01 ... -6.56583595e+00 -1.04278123e+00 8.01316547e+00] [ 1.64003539e+00 1.05756979e+01 -2.50929809e+00 ... 1.44593649e+01 4.27774096e+00 2.59854364e+00] ... [ 2.11017323e+00 -5.31161261e+00 -1.53286047e+01 ... -7.91201496e+00 4.66981030e+00 -2.53752851e+00] [ 5.35115767e+00 3.01863551e+00 1.21494141e+01 ... -5.26904964e+00 3.49731970e+00 -9.70495129e+00] [-3.14135480e+00 -8.19067717e-01 1.33602676e+01 ... 7.25747883e-01 5.46781197e-02 -4.27869892e+00]]]] [[[[ 2.75203973e-01 -9.15367222e+00 1.15148699e+00 ... 1.08850708e+01 -2.94775748e+00 3.25384378e-01] [-1.77906013e+00 1.10604458e+01 -8.67166710e+00 ... 1.25392962e+01 3.83455682e+00 2.44039202e+00] [-2.63424754e-01 4.81657267e+00 6.55749178e+00 ... 1.16495216e+00 1.65205598e+00 1.13370361e+01] ... [-3.63510203e+00 -4.13669014e+00 2.30641127e+00 ... 8.59537888e+00 -3.20710063e+00 -4.13965178e+00] [-1.43702519e+00 8.65615487e-01 -1.86466656e+01 ... -2.44962716e+00 8.69296074e+00 -4.70824718e+00] [ 4.05737591e+00 -8.61352324e-01 5.06966352e+00 ... -5.49851894e-01 -1.90177310e+00 5.71633244e+00]] [[-3.81364202e+00 -5.04797506e+00 5.16938782e+00 ... 7.67055798e+00 -6.95305943e-01 3.85738301e+00] [-8.33104992e+00 -7.29866409e+00 3.85280776e+00 ... 3.05849409e+00 8.90763092e+00 -1.52572155e+00] [-4.64707518e+00 8.53912449e+00 -2.89414716e+00 ... 7.55564094e-01 -1.27792854e+01 -3.77101302e+00] ... [-6.62458360e-01 8.29097748e+00 -4.02801657e+00 ... -5.99308872e+00 1.33154840e+01 8.27247620e+00] [ 1.49157488e+00 -6.41151190e+00 -3.13163090e+00 ... 8.43078995e+00 -1.82278478e+00 -1.07212865e+00] [ 1.83234715e+00 -9.15902495e-01 1.13572562e+00 ... -6.01854420e+00 4.18975496e+00 2.82678306e-01]] [[-1.05848560e+01 -4.24375725e+00 1.56930418e+01 ... -6.26916647e+00 -5.35315371e+00 -3.54399800e+00] [ 6.56127548e+00 1.66864383e+00 3.39374512e-01 ... -4.61517668e+00 4.88974285e+00 -6.16998339e+00] [ 8.44210684e-01 -1.70970345e+01 -1.10341492e+01 ... 6.01507664e+00 5.65484285e+00 -1.01488314e+01] ... [ 9.40714169e+00 4.36455965e+00 4.65930176e+00 ... 7.83377552e+00 3.95391774e+00 -2.39307547e+00] [-1.62727129e+00 5.54452991e+00 4.26982641e+00 ... -1.08323956e+00 -5.56837940e+00 7.40717258e-03] [-1.04148901e+00 9.41344643e+00 -1.70591602e+01 ... -9.32473755e+00 -1.27720366e+01 -2.83931518e+00]] ... [[ 2.36778235e+00 4.20417976e+00 -3.06815594e-01 ... -1.28844213e+00 1.18939066e+01 -8.30931950e+00] [-4.74648386e-01 -7.11129379e+00 6.62823391e+00 ... 7.35513830e+00 -1.51114788e+01 3.97529244e+00] [-4.27854204e+00 1.08916044e+01 -3.43041492e+00 ... -1.44855309e+00 -9.63319093e-02 -1.08229065e+01] ... [-7.69521236e-01 -7.21794415e+00 8.86188126e+00 ... -6.73599482e+00 -3.28934860e+00 9.58089113e-01] [ 7.43571043e+00 1.07904100e+01 -4.80599546e+00 ... -1.46966324e+01 8.26076031e+00 4.53958654e+00] [ 9.66730788e-02 8.52335930e+00 -5.92381668e+00 ... -1.07913113e+01 1.53800464e+00 -2.94498777e+00]] [[-3.28604174e+00 -1.11328325e+01 3.89700508e+00 ... -1.18070774e+01 -4.45796156e+00 -2.67422843e+00] [ 1.35592914e+00 -5.98911285e+00 -1.00133228e+01 ... -1.21218443e+00 1.13659248e+01 -1.23693790e+01] [ 5.47067976e+00 6.19905281e+00 1.96258330e+00 ... 6.73264503e+00 -8.09760380e+00 8.53432369e+00] ... [-9.08263588e+00 -3.85174680e+00 -2.00541840e+01 ... -3.40177846e+00 -1.74125366e+01 8.98890495e+00] [-3.21461171e-01 -7.41168833e+00 -5.01728678e+00 ... -1.51117957e+00 -1.99007976e+00 5.63785696e+00] [ 2.99253774e+00 -7.91422272e+00 1.03188047e+01 ... 4.53284836e+00 4.03871393e+00 5.09951782e+00]] [[-4.26212978e+00 6.86090708e+00 -3.06005454e+00 ... -4.79687357e+00 6.03069210e+00 5.04208136e+00] [-3.15405297e+00 8.72645283e+00 1.27992058e+00 ... 2.78743696e+00 -7.87038517e+00 -2.15463951e-01] [ 5.52890778e+00 1.54020607e+00 -1.06297004e+00 ... 4.49337912e+00 2.17182612e+00 -1.90435636e+00] ... [ 5.23764420e+00 5.93404770e+00 -2.51837635e+00 ... -1.25790453e+00 5.92809772e+00 1.65610480e+00] [-2.45645332e+00 1.01189833e+01 -9.86937714e+00 ... 7.39898682e+00 -7.42394018e+00 -2.39051485e+00] [ 7.30058491e-01 2.39217687e+00 2.20785618e+00 ... 1.52356267e+00 7.26894808e+00 4.39109373e+00]]]]]; ov_res: [[[[[ 8.17271996e+00 -1.10945988e+01 -6.85854578e+00 ... 9.23736668e+00 5.78206921e+00 -1.47300375e+00] [ 1.01324377e+01 -8.20266056e+00 1.00218153e+00 ... 8.83229637e+00 6.20766115e+00 4.17662382e-01] [-1.69954121e+00 3.87885404e+00 7.69491482e+00 ... 6.70527697e+00 2.77217031e+00 -2.82407093e+00] ... [ 1.29888849e+01 -2.99557161e+00 6.89485502e+00 ... 1.70834792e+00 3.39000750e+00 5.57542229e+00] [-2.51569009e+00 -4.49199009e+00 1.10817564e+00 ... 1.76226532e+00 3.28064728e+00 -5.55313015e+00] [ 3.73971128e+00 3.27834582e+00 5.57240438e+00 ... 5.07202578e+00 2.02650571e+00 6.85832262e+00]] [[ 1.07167101e+01 1.42974043e+01 1.73010468e+00 ... -8.99285316e+00 -4.10915756e+00 -3.50310111e+00] [ 3.12508154e+00 -8.76965427e+00 -2.15890903e+01 ... 1.73849189e+00 1.74284053e+00 6.34776497e+00] [-1.80775762e+00 -1.70338840e+01 -1.08357229e+01 ... -8.47094345e+00 -5.15830278e+00 -5.99293947e+00] ... [-7.12467718e+00 1.18834114e+01 -2.71070266e+00 ... 3.97750998e+00 -1.12887478e+01 -1.90487778e+00] [ 5.38453579e+00 5.32905197e+00 9.92630100e+00 ... 6.38827741e-01 -1.60661626e+00 -1.94614494e+00] [-3.09074020e+00 6.29366732e+00 -2.59526587e+00 ... -5.64139366e+00 2.44902039e+00 5.56212366e-01]] [[-8.29193211e+00 4.93656540e+00 7.76527548e+00 ... 2.50904775e+00 1.64594555e+01 8.69315720e+00] [-3.26086974e+00 8.63858032e+00 1.17802107e+00 ... 9.77161980e+00 6.34248877e+00 -1.10365143e+01] [-3.65761018e+00 -1.18197746e+01 4.46662331e+00 ... -3.28493834e+00 3.61476946e+00 -4.45271283e-01] ... [ 8.09754968e-01 7.27751493e-01 2.16648436e+00 ... 1.25119858e+01 9.26685905e+00 4.31380844e+00] [-6.43237019e+00 7.25378466e+00 8.67111015e+00 ... 9.01884842e+00 1.12566519e+01 -2.18431091e+00] [ 4.57560444e+00 -2.85492063e+00 8.53182793e+00 ... -8.60847569e+00 -4.93121147e+00 5.49451780e+00]] ... [[ 7.03256464e+00 2.41444564e+00 4.84655190e+00 ... 4.78055537e-01 2.25941620e+01 -6.26341057e+00] [ 3.69509292e+00 -6.93416595e+00 1.59308839e+00 ... 4.75121832e+00 -1.29662714e+01 -8.85100365e+00] [ 7.67819023e+00 9.65167999e+00 6.20185089e+00 ... 5.46439552e+00 1.02062712e+01 5.30922842e+00] ... [ 2.99007535e+00 -3.74209672e-01 3.04529572e+00 ... 8.07945061e+00 3.72833312e-01 8.63480568e+00] [-4.50154400e+00 5.01541734e-01 -2.89687443e+00 ... -1.38398314e+01 -1.59591532e+00 4.25857449e+00] [-7.56831837e+00 1.17489882e+01 -1.10290928e+01 ... -1.30807364e+00 6.48419738e-01 1.87328112e+00]] [[-6.61811948e-01 4.74678087e+00 -1.87302411e+00 ... 1.89075546e+01 -1.10907717e+01 9.39944839e+00] [ 2.01052690e+00 7.70810223e+00 -5.22072649e+00 ... -9.52786827e+00 -1.17905188e+01 5.06883097e+00] [-3.77748275e+00 -8.55352402e+00 -4.27650547e+00 ... -3.27670240e+00 5.67926455e+00 -2.41629772e+01] ... [-1.45484838e+01 -2.21237469e+00 4.91167879e+00 ... -7.30262852e+00 9.92206669e+00 -8.21688652e-01] [-4.28897142e+00 1.10619316e+01 -1.29407463e+01 ... -4.40744311e-01 -1.51869011e+01 -2.86651683e+00] [ 6.67037296e+00 -1.18543291e+01 1.39550769e+00 ... 7.26234722e+00 2.43928209e-01 1.15713367e+01]] [[-9.40100479e+00 2.48356366e+00 2.78099489e+00 ... -1.39657390e+00 -7.42954350e+00 -5.68599796e+00] [-4.24299812e+00 5.46210051e+00 1.44010611e+01 ... -6.56583595e+00 -1.04278123e+00 8.01316547e+00] [ 1.64003539e+00 1.05756979e+01 -2.50929809e+00 ... 1.44593649e+01 4.27774096e+00 2.59854364e+00] ... [ 2.11017323e+00 -5.31161261e+00 -1.53286047e+01 ... -7.91201496e+00 4.66981030e+00 -2.53752851e+00] [ 5.35115767e+00 3.01863551e+00 1.21494141e+01 ... -5.26904964e+00 3.49731970e+00 -9.70495129e+00] [-3.14135480e+00 -8.19067717e-01 1.33602676e+01 ... 7.25747883e-01 5.46781197e-02 -4.27869892e+00]]]] [[[[ 2.75203973e-01 -9.15367222e+00 1.15148699e+00 ... 1.08850708e+01 -2.94775748e+00 3.25384378e-01] [-1.77906013e+00 1.10604458e+01 -8.67166710e+00 ... 1.25392962e+01 3.83455682e+00 2.44039202e+00] [-2.63424754e-01 4.81657267e+00 6.55749178e+00 ... 1.16495216e+00 1.65205598e+00 1.13370361e+01] ... [-3.63510203e+00 -4.13669014e+00 2.30641127e+00 ... 8.59537888e+00 -3.20710063e+00 -4.13965178e+00] [-1.43702519e+00 8.65615487e-01 -1.86466656e+01 ... -2.44962716e+00 8.69296074e+00 -4.70824718e+00] [ 4.05737591e+00 -8.61352324e-01 5.06966352e+00 ... -5.49851894e-01 -1.90177310e+00 5.71633244e+00]] [[-3.81364202e+00 -5.04797506e+00 5.16938782e+00 ... 7.67055798e+00 -6.95305943e-01 3.85738301e+00] [-8.33104992e+00 -7.29866409e+00 3.85280776e+00 ... 3.05849409e+00 8.90763092e+00 -1.52572155e+00] [-4.64707518e+00 8.53912449e+00 -2.89414716e+00 ... 7.55564094e-01 -1.27792854e+01 -3.77101302e+00] ... [-6.62458360e-01 8.29097748e+00 -4.02801657e+00 ... -5.99308872e+00 1.33154840e+01 8.27247620e+00] [ 1.49157488e+00 -6.41151190e+00 -3.13163090e+00 ... 8.43078995e+00 -1.82278478e+00 -1.07212865e+00] [ 1.83234715e+00 -9.15902495e-01 1.13572562e+00 ... -6.01854420e+00 4.18975496e+00 2.82678306e-01]] [[-1.05848560e+01 -4.24375725e+00 1.56930418e+01 ... -6.26916647e+00 -5.35315371e+00 -3.54399800e+00] [ 6.56127548e+00 1.66864383e+00 3.39374512e-01 ... -4.61517668e+00 4.88974285e+00 -6.16998339e+00] [ 8.44210684e-01 -1.70970345e+01 -1.10341492e+01 ... 6.01507664e+00 5.65484285e+00 -1.01488314e+01] ... [ 9.40714169e+00 4.36455965e+00 4.65930176e+00 ... 7.83377552e+00 3.95391774e+00 -2.39307547e+00] [-1.62727129e+00 5.54452991e+00 4.26982641e+00 ... -1.08323956e+00 -5.56837940e+00 7.40717258e-03] [-1.04148901e+00 9.41344643e+00 -1.70591602e+01 ... -9.32473755e+00 -1.27720366e+01 -2.83931518e+00]] ... [[ 2.36778235e+00 4.20417976e+00 -3.06815594e-01 ... -1.28844213e+00 1.18939066e+01 -8.30931950e+00] [-4.74648386e-01 -7.11129379e+00 6.62823391e+00 ... 7.35513830e+00 -1.51114788e+01 3.97529244e+00] [-4.27854204e+00 1.08916044e+01 -3.43041492e+00 ... -1.44855309e+00 -9.63319093e-02 -1.08229065e+01] ... [-7.69521236e-01 -7.21794415e+00 8.86188126e+00 ... -6.73599482e+00 -3.28934860e+00 9.58089113e-01] [ 7.43571043e+00 1.07904100e+01 -4.80599546e+00 ... -1.46966324e+01 8.26076031e+00 4.53958654e+00] [ 9.66730788e-02 8.52335930e+00 -5.92381668e+00 ... -1.07913113e+01 1.53800464e+00 -2.94498777e+00]] [[-3.28604174e+00 -1.11328325e+01 3.89700508e+00 ... -1.18070774e+01 -4.45796156e+00 -2.67422843e+00] [ 1.35592914e+00 -5.98911285e+00 -1.00133228e+01 ... -1.21218443e+00 1.13659248e+01 -1.23693790e+01] [ 5.47067976e+00 6.19905281e+00 1.96258330e+00 ... 6.73264503e+00 -8.09760380e+00 8.53432369e+00] ... [-9.08263588e+00 -3.85174680e+00 -2.00541840e+01 ... -3.40177846e+00 -1.74125366e+01 8.98890495e+00] [-3.21461171e-01 -7.41168833e+00 -5.01728678e+00 ... -1.51117957e+00 -1.99007976e+00 5.63785696e+00] [ 2.99253774e+00 -7.91422272e+00 1.03188047e+01 ... 4.53284836e+00 4.03871393e+00 5.09951782e+00]] [[-4.26212978e+00 6.86090708e+00 -3.06005454e+00 ... -4.79687357e+00 6.03069210e+00 5.04208136e+00] [-3.15405297e+00 8.72645283e+00 1.27992058e+00 ... 2.78743696e+00 -7.87038517e+00 -2.15463951e-01] [ 5.52890778e+00 1.54020607e+00 -1.06297004e+00 ... 4.49337912e+00 2.17182612e+00 -1.90435636e+00] ... [ 5.23764420e+00 5.93404770e+00 -2.51837635e+00 ... -1.25790453e+00 5.92809772e+00 1.65610480e+00] [-2.45645332e+00 1.01189833e+01 -9.86937714e+00 ... 7.39898682e+00 -7.42394018e+00 -2.39051485e+00] [ 7.30058491e-01 2.39217687e+00 2.20785618e+00 ... 1.52356267e+00 7.26894808e+00 4.39109373e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'same', 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_518.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) ETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema)fw_re: [[[[[ 3.51161528e+00 7.09300375e+00 -7.18034410e+00 ... -9.05393982e+00 1.75137293e+00 -2.44122338e+00] [ 3.11276484e+00 -3.22951865e+00 -7.48398876e+00 ... -1.95807779e+00 -1.44051752e+01 -4.46565151e+00] [ 2.88970256e+00 -3.70923728e-01 9.68231297e+00 ... -2.75262761e+00 6.96819186e-01 3.81018734e+00] ... [-8.23611546e+00 9.67533493e+00 6.51898479e+00 ... -1.29139376e+01 1.03276606e+01 -7.02703619e+00] [-4.35021639e+00 -4.25998628e-01 -7.35249472e+00 ... -3.94118619e+00 2.86788201e+00 8.16967010e-01] [-2.29304790e+00 -9.90679550e+00 3.97189569e+00 ... 5.30421066e+00 -3.77675438e+00 9.68515694e-01]] [[ 7.12108278e+00 4.83766603e+00 -4.59988356e+00 ... -6.59785461e+00 7.19111013e+00 -6.45233631e+00] [ 7.18380690e+00 -9.44703519e-01 1.17955027e+01 ... 1.38449776e+00 1.44678101e-01 1.94381351e+01] [ 9.18569279e+00 1.23185425e+01 8.48758996e-01 ... 3.53077841e+00 1.50409555e+00 6.41662455e+00] ... [-6.49932671e+00 1.72703991e+01 1.05853243e+01 ... 4.65933353e-01 6.16010380e+00 1.05027294e+01] [-7.19196749e+00 2.84021497e+00 -7.28474331e+00 ... 1.75423641e+01 -4.64512444e+00 -1.01776838e+01] [ 2.53854966e+00 7.40859175e+00 2.02560282e+00 ... 8.68458652e+00 -3.62188458e+00 7.65583217e-01]] [[-6.42326498e+00 -5.10343885e+00 -7.94871235e+00 ... -1.14257205e+00 -4.14591217e+00 6.26234627e+00] [-3.62399721e+00 -1.73440247e+01 -5.10990286e+00 ... 9.51146030e+00 6.99035406e+00 -1.06839190e+01] [ 2.92604160e+00 7.44726896e+00 3.06988549e+00 ... 6.22221851e+00 1.29367485e+01 -5.21257877e+00] ... [ 4.01361847e+00 -3.00408673e+00 4.00700712e+00 ... 1.11282587e+01 -2.02779579e+01 -2.01981583e+01] [ 8.65305305e-01 -5.50756550e+00 -5.65207148e+00 ... -7.91904879e+00 -1.97208290e+01 3.53607011e+00] [-3.84015942e+00 4.38205624e+00 5.45879698e+00 ... 1.33235435e+01 -2.11217594e+00 -6.16313601e+00]] ... [[-6.22065115e+00 2.94673443e+00 6.21992016e+00 ... 2.62114978e+00 -1.00679331e-01 -4.24363947e+00] [ 4.59989548e+00 -1.81835098e+01 -1.89143353e+01 ... -1.94851506e+00 -1.01949759e+01 1.21402893e+01] [ 2.40596461e+00 1.67970982e+01 -2.34323463e+01 ... 1.38105917e+01 -2.38297176e+00 1.99740734e+01] ... [-3.31010675e+00 -6.56817245e+00 4.96691227e+00 ... -1.63329077e+00 -9.33715248e+00 -1.63828926e+01] [ 1.62124276e-01 -3.63663375e-01 1.10620985e+01 ... -2.39007902e+00 -8.03125286e+00 -1.46042541e-01] [ 1.04033890e+01 4.60203075e+00 1.20065117e+01 ... -4.11984015e+00 5.27532053e+00 8.06669140e+00]] [[ 5.06066990e+00 2.04133606e+00 -1.09272900e+01 ... -1.10975218e+01 8.27987862e+00 2.05688343e+01] [ 9.09739208e+00 -1.07633219e+01 -5.50641918e+00 ... 1.92438145e+01 -3.48603153e+00 1.40876579e+01] [-5.93625307e+00 9.19391155e+00 -1.66470051e+01 ... 6.10063219e+00 -2.03194122e+01 -5.22239208e-01] ... [-3.36291146e+00 6.53025723e+00 -9.83562768e-01 ... -7.48666859e+00 5.32047510e+00 -2.98348570e+00] [ 8.07059383e+00 6.06846952e+00 9.01149940e+00 ... 1.07910252e+01 -1.85653996e+00 3.25727201e+00] [-1.60648227e-01 -2.07134581e+00 -7.34138918e+00 ... 9.34791923e-01 9.08653355e+00 -2.56119561e+00]] [[ 4.75878641e-02 -5.80766869e+00 3.97620392e+00 ... -1.58808699e+01 -9.10859013e+00 1.33663340e+01] [ 1.90098703e+00 1.20766373e+01 2.32481289e+00 ... -1.11133347e+01 -6.23932457e+00 1.21304011e+00] [-2.98831749e+00 4.69735336e+00 8.28102207e+00 ... -2.93556285e+00 -1.91090679e+00 -1.98195229e+01] ... [ 3.32359743e+00 4.50669384e+00 1.58558989e+01 ... -8.17711353e+00 -3.46316791e+00 4.61726904e+00] [ 3.85377359e+00 6.41630602e+00 -2.02542901e+00 ... 1.10296841e+01 1.13042650e+01 -1.61400719e+01] [-5.38498640e+00 -2.46144366e+00 -2.25312042e+00 ... 1.22410660e+01 -4.00051832e+00 2.13524628e+00]]]] [[[[-2.68740344e+00 1.29245567e+01 2.91350555e+00 ... 3.84088707e+00 -3.09241652e+00 4.40179443e+00] [-4.29728937e+00 1.87432635e+00 4.81911182e+00 ... -2.65853767e+01 1.54320536e+01 8.18876171e+00] [ 8.28660965e+00 -6.52071857e+00 5.29116392e+00 ... 5.03691959e+00 -3.82449359e-01 1.38280287e+01] ... [ 9.80653477e+00 -1.19804823e+00 -6.12319040e+00 ... 4.36342716e+00 1.59902611e+01 -1.08994341e+01] [ 2.99276567e+00 -1.02208853e+01 -8.62610531e+00 ... -6.90950441e+00 -1.38457775e+01 -3.14617395e+00] [ 6.61808825e+00 4.19560528e+00 -2.76392555e+00 ... -9.99379921e+00 -1.94555302e+01 2.89647579e+00]] [[-6.53083849e+00 1.74851170e+01 5.87293482e+00 ... -3.82663345e+00 -1.36168659e+00 -2.54489279e+00] [ 9.17767525e+00 -1.85028243e+00 5.31003356e-01 ... 1.06042266e+00 -7.38658667e+00 6.02976894e+00] [-1.58417225e+00 -1.85141296e+01 1.51009712e+01 ... 6.87264204e+00 5.71222830e+00 -6.85628080e+00] ... [-3.36490512e-01 -5.18745089e+00 2.86321092e+00 ... -3.30961490e+00 5.48917174e-01 -5.61084414e+00] [ 2.76723003e+00 7.75586033e+00 -6.76988077e+00 ... -1.51267061e+01 7.46315777e-01 6.27237034e+00] [-3.86109614e+00 -5.67879486e+00 2.50756836e+00 ... -9.08884287e-01 -6.09238100e+00 2.95193124e+00]] [[-4.18681669e+00 4.20505911e-01 5.48561335e-01 ... 2.08772202e+01 3.74501705e+00 5.45012760e+00] [-6.73311424e+00 -8.89259398e-01 1.46729970e+00 ... 2.02443829e+01 2.47930360e+00 -6.35029697e+00] [-1.69891107e+00 -2.28476620e+00 -5.96729088e+00 ... -5.48136616e+00 4.98600769e+00 -5.93155718e+00] ... [-1.69235730e+00 2.06738529e+01 2.45693541e+00 ... -8.95634651e+00 -1.32848225e+01 4.49533939e+00] [ 3.81733239e-01 -1.02776403e+01 1.55790653e+01 ... -3.32901931e+00 -8.55336857e+00 -9.08552837e+00] [-3.19290876e+00 5.11204481e+00 3.68040085e-01 ... -2.65960240e+00 -1.20249672e+01 -9.73906422e+00]] ... [[ 8.28357315e+00 2.44027305e+00 -1.27322025e+01 ... 9.03983891e-01 1.38461571e+01 -6.73412704e+00] [-7.87907457e+00 1.23833246e+01 -1.08490553e+01 ... -1.43328400e+01 1.12694759e+01 -9.35050547e-01] [-2.30170107e+00 9.59460163e+00 9.83090878e+00 ... -1.16889715e+00 9.70931149e+00 1.30512609e+01] ... [ 2.07148838e+00 -1.14012985e+01 -6.82453918e+00 ... -1.93677044e+01 -6.83821249e+00 -1.21254330e+01] [-6.83365059e+00 1.22211609e+01 1.37468445e+00 ... 2.45380282e+00 5.71980095e+00 -1.24413548e+01] [ 6.78881550e+00 -7.84928751e+00 5.79659653e+00 ... -2.78422999e+00 -1.20829391e+01 -6.78534317e+00]] [[ 5.41083515e-01 -1.89441407e+00 -3.12243962e+00 ... -5.54424620e+00 4.87738419e+00 8.70015144e+00] [ 5.56495070e-01 1.43452415e+01 -9.14143658e+00 ... 6.23595858e+00 1.03211031e+01 -2.83959079e+00] [ 8.73149514e-01 7.81459808e-02 -1.86229956e+00 ... 1.01076031e+01 -2.19474840e+00 -6.51450014e+00] ... [-1.62528777e+00 -2.56773494e-02 -7.03410721e+00 ... -3.49609423e+00 -6.01163916e-02 -9.29967308e+00] [-4.26060820e+00 5.23507690e+00 -9.36119175e+00 ... 1.39843111e+01 -6.19687796e-01 7.31150985e-01] [-1.43208528e+00 -8.14563370e+00 1.15464439e+01 ... 7.33400047e-01 4.85474634e+00 3.06789970e+00]] [[-2.47227883e+00 -7.67551064e-01 6.07023716e+00 ... -1.49929011e+00 -3.44636655e+00 3.77366924e+00] [ 3.03789794e-01 -1.55437546e+01 8.88602161e+00 ... -5.20975236e-03 -4.89757919e+00 -5.79411554e+00] [-3.23178530e+00 -6.03489780e+00 -2.37322283e+00 ... 6.06111109e-01 -4.10188627e+00 -4.57574606e+00] ... [ 8.92574692e+00 6.61196756e+00 -5.41751242e+00 ... -8.39108562e+00 -6.81163216e+00 5.66084576e+00] [ 8.11650372e+00 4.90632677e+00 3.36172104e+00 ... 6.26217890e+00 -3.14254951e+00 -5.14004827e-01] [-4.22554970e+00 1.80094168e-01 -6.54123497e+00 ... -7.68177652e+00 -1.92544472e+00 7.70316792e+00]]]]]; ov_res: [[[[[ 3.51161528e+00 7.09300375e+00 -7.18034410e+00 ... -9.05393982e+00 1.75137293e+00 -2.44122338e+00] [ 3.11276484e+00 -3.22951865e+00 -7.48398876e+00 ... -1.95807779e+00 -1.44051752e+01 -4.46565151e+00] [ 2.88970256e+00 -3.70923728e-01 9.68231297e+00 ... -2.75262761e+00 6.96819186e-01 3.81018734e+00] ... [-8.23611546e+00 9.67533493e+00 6.51898479e+00 ... -1.29139376e+01 1.03276606e+01 -7.02703619e+00] [-4.35021639e+00 -4.25998628e-01 -7.35249472e+00 ... -3.94118619e+00 2.86788201e+00 8.16967010e-01] [-2.29304790e+00 -9.90679550e+00 3.97189569e+00 ... 5.30421066e+00 -3.77675438e+00 9.68515694e-01]] [[ 7.12108278e+00 4.83766603e+00 -4.59988356e+00 ... -6.59785461e+00 7.19111013e+00 -6.45233631e+00] [ 7.18380690e+00 -9.44703519e-01 1.17955027e+01 ... 1.38449776e+00 1.44678101e-01 1.94381351e+01] [ 9.18569279e+00 1.23185425e+01 8.48758996e-01 ... 3.53077841e+00 1.50409555e+00 6.41662455e+00] ... [-6.49932671e+00 1.72703991e+01 1.05853243e+01 ... 4.65933353e-01 6.16010380e+00 1.05027294e+01] [-7.19196749e+00 2.84021497e+00 -7.28474331e+00 ... 1.75423641e+01 -4.64512444e+00 -1.01776838e+01] [ 2.53854966e+00 7.40859175e+00 2.02560282e+00 ... 8.68458652e+00 -3.62188458e+00 7.65583217e-01]] [[-6.42326498e+00 -5.10343885e+00 -7.94871235e+00 ... -1.14257205e+00 -4.14591217e+00 6.26234627e+00] [-3.62399721e+00 -1.73440247e+01 -5.10990286e+00 ... 9.51146030e+00 6.99035406e+00 -1.06839190e+01] [ 2.92604160e+00 7.44726896e+00 3.06988549e+00 ... 6.22221851e+00 1.29367485e+01 -5.21257877e+00] ... [ 4.01361847e+00 -3.00408673e+00 4.00700712e+00 ... 1.11282587e+01 -2.02779579e+01 -2.01981583e+01] [ 8.65305305e-01 -5.50756550e+00 -5.65207148e+00 ... -7.91904879e+00 -1.97208290e+01 3.53607011e+00] [-3.84015942e+00 4.38205624e+00 5.45879698e+00 ... 1.33235435e+01 -2.11217594e+00 -6.16313601e+00]] ... [[-6.22065115e+00 2.94673443e+00 6.21992016e+00 ... 2.62114978e+00 -1.00679331e-01 -4.24363947e+00] [ 4.59989548e+00 -1.81835098e+01 -1.89143353e+01 ... -1.94851506e+00 -1.01949759e+01 1.21402893e+01] [ 2.40596461e+00 1.67970982e+01 -2.34323463e+01 ... 1.38105917e+01 -2.38297176e+00 1.99740734e+01] ... [-3.31010675e+00 -6.56817245e+00 4.96691227e+00 ... -1.63329077e+00 -9.33715248e+00 -1.63828926e+01] [ 1.62124276e-01 -3.63663375e-01 1.10620985e+01 ... -2.39007902e+00 -8.03125286e+00 -1.46042541e-01] [ 1.04033890e+01 4.60203075e+00 1.20065117e+01 ... -4.11984015e+00 5.27532053e+00 8.06669140e+00]] [[ 5.06066990e+00 2.04133606e+00 -1.09272900e+01 ... -1.10975218e+01 8.27987862e+00 2.05688343e+01] [ 9.09739208e+00 -1.07633219e+01 -5.50641918e+00 ... 1.92438145e+01 -3.48603153e+00 1.40876579e+01] [-5.93625307e+00 9.19391155e+00 -1.66470051e+01 ... 6.10063219e+00 -2.03194122e+01 -5.22239208e-01] ... [-3.36291146e+00 6.53025723e+00 -9.83562768e-01 ... -7.48666859e+00 5.32047510e+00 -2.98348570e+00] [ 8.07059383e+00 6.06846952e+00 9.01149940e+00 ... 1.07910252e+01 -1.85653996e+00 3.25727201e+00] [-1.60648227e-01 -2.07134581e+00 -7.34138918e+00 ... 9.34791923e-01 9.08653355e+00 -2.56119561e+00]] [[ 4.75878641e-02 -5.80766869e+00 3.97620392e+00 ... -1.58808699e+01 -9.10859013e+00 1.33663340e+01] [ 1.90098703e+00 1.20766373e+01 2.32481289e+00 ... -1.11133347e+01 -6.23932457e+00 1.21304011e+00] [-2.98831749e+00 4.69735336e+00 8.28102207e+00 ... -2.93556285e+00 -1.91090679e+00 -1.98195229e+01] ... [ 3.32359743e+00 4.50669384e+00 1.58558989e+01 ... -8.17711353e+00 -3.46316791e+00 4.61726904e+00] [ 3.85377359e+00 6.41630602e+00 -2.02542901e+00 ... 1.10296841e+01 1.13042650e+01 -1.61400719e+01] [-5.38498640e+00 -2.46144366e+00 -2.25312042e+00 ... 1.22410660e+01 -4.00051832e+00 2.13524628e+00]]]] [[[[-2.68740344e+00 1.29245567e+01 2.91350555e+00 ... 3.84088707e+00 -3.09241652e+00 4.40179443e+00] [-4.29728937e+00 1.87432635e+00 4.81911182e+00 ... -2.65853767e+01 1.54320536e+01 8.18876171e+00] [ 8.28660965e+00 -6.52071857e+00 5.29116392e+00 ... 5.03691959e+00 -3.82449359e-01 1.38280287e+01] ... [ 9.80653477e+00 -1.19804823e+00 -6.12319040e+00 ... 4.36342716e+00 1.59902611e+01 -1.08994341e+01] [ 2.99276567e+00 -1.02208853e+01 -8.62610531e+00 ... -6.90950441e+00 -1.38457775e+01 -3.14617395e+00] [ 6.61808825e+00 4.19560528e+00 -2.76392555e+00 ... -9.99379921e+00 -1.94555302e+01 2.89647579e+00]] [[-6.53083849e+00 1.74851170e+01 5.87293482e+00 ... -3.82663345e+00 -1.36168659e+00 -2.54489279e+00] [ 9.17767525e+00 -1.85028243e+00 5.31003356e-01 ... 1.06042266e+00 -7.38658667e+00 6.02976894e+00] [-1.58417225e+00 -1.85141296e+01 1.51009712e+01 ... 6.87264204e+00 5.71222830e+00 -6.85628080e+00] ... [-3.36490512e-01 -5.18745089e+00 2.86321092e+00 ... -3.30961490e+00 5.48917174e-01 -5.61084414e+00] [ 2.76723003e+00 7.75586033e+00 -6.76988077e+00 ... -1.51267061e+01 7.46315777e-01 6.27237034e+00] [-3.86109614e+00 -5.67879486e+00 2.50756836e+00 ... -9.08884287e-01 -6.09238100e+00 2.95193124e+00]] [[-4.18681669e+00 4.20505911e-01 5.48561335e-01 ... 2.08772202e+01 3.74501705e+00 5.45012760e+00] [-6.73311424e+00 -8.89259398e-01 1.46729970e+00 ... 2.02443829e+01 2.47930360e+00 -6.35029697e+00] [-1.69891107e+00 -2.28476620e+00 -5.96729088e+00 ... -5.48136616e+00 4.98600769e+00 -5.93155718e+00] ... [-1.69235730e+00 2.06738529e+01 2.45693541e+00 ... -8.95634651e+00 -1.32848225e+01 4.49533939e+00] [ 3.81733239e-01 -1.02776403e+01 1.55790653e+01 ... -3.32901931e+00 -8.55336857e+00 -9.08552837e+00] [-3.19290876e+00 5.11204481e+00 3.68040085e-01 ... -2.65960240e+00 -1.20249672e+01 -9.73906422e+00]] ... [[ 8.28357315e+00 2.44027305e+00 -1.27322025e+01 ... 9.03983891e-01 1.38461571e+01 -6.73412704e+00] [-7.87907457e+00 1.23833246e+01 -1.08490553e+01 ... -1.43328400e+01 1.12694759e+01 -9.35050547e-01] [-2.30170107e+00 9.59460163e+00 9.83090878e+00 ... -1.16889715e+00 9.70931149e+00 1.30512609e+01] ... [ 2.07148838e+00 -1.14012985e+01 -6.82453918e+00 ... -1.93677044e+01 -6.83821249e+00 -1.21254330e+01] [-6.83365059e+00 1.22211609e+01 1.37468445e+00 ... 2.45380282e+00 5.71980095e+00 -1.24413548e+01] [ 6.78881550e+00 -7.84928751e+00 5.79659653e+00 ... -2.78422999e+00 -1.20829391e+01 -6.78534317e+00]] [[ 5.41083515e-01 -1.89441407e+00 -3.12243962e+00 ... -5.54424620e+00 4.87738419e+00 8.70015144e+00] [ 5.56495070e-01 1.43452415e+01 -9.14143658e+00 ... 6.23595858e+00 1.03211031e+01 -2.83959079e+00] [ 8.73149514e-01 7.81459808e-02 -1.86229956e+00 ... 1.01076031e+01 -2.19474840e+00 -6.51450014e+00] ... [-1.62528777e+00 -2.56773494e-02 -7.03410721e+00 ... -3.49609423e+00 -6.01163916e-02 -9.29967308e+00] [-4.26060820e+00 5.23507690e+00 -9.36119175e+00 ... 1.39843111e+01 -6.19687796e-01 7.31150985e-01] [-1.43208528e+00 -8.14563370e+00 1.15464439e+01 ... 7.33400047e-01 4.85474634e+00 3.06789970e+00]] [[-2.47227883e+00 -7.67551064e-01 6.07023716e+00 ... -1.49929011e+00 -3.44636655e+00 3.77366924e+00] [ 3.03789794e-01 -1.55437546e+01 8.88602161e+00 ... -5.20975236e-03 -4.89757919e+00 -5.79411554e+00] [-3.23178530e+00 -6.03489780e+00 -2.37322283e+00 ... 6.06111109e-01 -4.10188627e+00 -4.57574606e+00] ... [ 8.92574692e+00 6.61196756e+00 -5.41751242e+00 ... -8.39108562e+00 -6.81163216e+00 5.66084576e+00] [ 8.11650372e+00 4.90632677e+00 3.36172104e+00 ... 6.26217890e+00 -3.14254951e+00 -5.14004827e-01] [-4.22554970e+00 1.80094168e-01 -6.54123497e+00 ... -7.68177652e+00 -1.92544472e+00 7.70316792e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convnd.py::TestConv3D::test_conv3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': 1, 'pads': 'valid', 'dilations': 1, 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convnd.___torch_mangle_520.aten_conv3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int = prim::Constant[value=1]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::conv3d(%x.1, %self.weight, %self.bias, %2, %self.pads, %2, %self.strides) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convnd.py:125:23 return (%7) fw_re: [[[[[ 1.14262018e+01 1.63769569e+01 5.39559364e+00 ... -2.30106468e+01 -9.41119862e+00 -1.72492485e+01] [-2.63072205e+00 4.33323956e+00 1.02905588e+01 ... -1.16041422e+01 -5.91134930e+00 -3.74069905e+00] [ 1.16444321e+01 -1.33444357e+01 4.69036198e+00 ... -7.20898986e-01 8.84782493e-01 -4.81057930e+00] ... [-5.44688988e+00 -1.32615128e+01 8.91366100e+00 ... 4.95707893e+00 -5.95072460e+00 -9.67060280e+00] [-3.44048738e+00 -3.00318033e-01 -3.71222591e+00 ... 6.89507675e+00 -5.66769505e+00 -1.64230120e+00] [ 2.22648764e+00 -1.98039043e+00 4.18234158e+00 ... 2.88020611e+00 1.38180418e+01 -5.41499710e+00]] [[-1.10704098e+01 1.19480200e+01 -9.75335979e+00 ... -9.17385197e+00 7.68156672e+00 -1.15068722e+01] [ 1.88516827e+01 -5.00455284e+00 -3.48714375e+00 ... -2.37083282e+01 1.67414665e+01 -1.69730434e+01] [ 5.95657778e+00 -2.77415924e+01 7.21080399e+00 ... 1.34314413e+01 9.28638279e-01 -9.39702511e+00] ... [ 1.16361628e+01 -8.93601120e-01 -5.84577227e+00 ... 1.79906178e+01 -6.04733992e+00 2.66462250e+01] [ 1.82019615e+01 -7.12701178e+00 1.31904507e+01 ... -3.03690934e+00 -2.61371822e+01 -1.03235359e+01] [ 1.14817858e+01 1.15197954e+01 5.91796494e+00 ... 3.41568023e-01 2.35853124e+00 2.01632462e+01]] [[ 2.66553187e+00 -1.11624515e+00 3.78072834e+00 ... -5.86916780e+00 2.51890683e+00 -6.91194105e+00] [-5.00098372e+00 -3.37319279e+00 7.75462627e+00 ... 1.77658165e+00 -2.58171940e+00 6.95331526e+00] [ 1.01390610e+01 -1.13029919e+01 -1.90346003e+00 ... -6.20332098e+00 1.39483586e-02 7.80871391e+00] ... [ 4.18696976e+00 1.14125605e+01 4.55983305e+00 ... -4.49240780e+00 1.28176708e+01 -1.10539036e+01] [-7.37659979e+00 9.51632786e+00 -6.51554298e+00 ... 9.72721386e+00 -1.14372015e+01 -2.26030874e+00] [-9.53900051e+00 -7.12908697e+00 8.04877567e+00 ... 5.15356350e+00 2.26371193e+01 -1.21854668e+01]] ... [[-1.19915199e+00 -2.40997016e-01 1.27994394e+01 ... -7.75015688e+00 -1.85454330e+01 6.36903867e-02] [ 1.57897215e+01 1.14251938e+01 1.38465118e+00 ... 3.97237229e+00 -1.34072218e+01 2.34214377e+00] [ 1.44359407e+01 -1.37440920e+00 3.83199191e+00 ... 6.97681093e+00 2.89562345e+00 5.74383020e+00] ... [ 4.62801743e+00 7.96977854e+00 -2.94926781e-02 ... 6.27433157e+00 1.17947683e+01 -2.52577591e+00] [-1.19347401e+01 7.80280888e-01 -3.57404184e+00 ... -2.32689309e+00 1.70726156e+00 1.50816989e+00] [ 7.51448750e-01 4.16537476e+00 5.69537354e+00 ... -1.05292034e+01 2.48584080e+01 2.59084439e+00]] [[-4.69225526e-01 -6.96634912e+00 -4.94210863e+00 ... -2.08414783e+01 2.96934009e+00 -1.84349670e+01] [-8.20884418e+00 -7.69758892e+00 -2.26877499e+00 ... 1.39954939e+01 6.94517565e+00 1.27303009e+01] [-7.44866705e+00 -5.88052750e+00 1.08635550e+01 ... 1.29819069e+01 2.72006679e+00 -8.62034321e+00] ... [ 4.25835705e+00 2.16605434e+01 -7.15420723e+00 ... -4.00732517e+00 -8.78264713e+00 4.61801863e+00] [ 2.53842771e-01 6.55247569e-01 -7.13552380e+00 ... -8.90211678e+00 -4.27121496e+00 -9.24624825e+00] [ 9.77080250e+00 -1.03221292e+01 4.90851927e+00 ... -6.99447346e+00 -5.67901993e+00 1.32647429e+01]] [[ 2.04331899e+00 5.35504389e+00 1.12920494e+01 ... -9.83806801e+00 -4.87487841e+00 2.14056435e+01] [-9.45401001e+00 -3.58468294e+00 2.83587646e+00 ... -4.24426937e+00 5.72580671e+00 2.85192299e+00] [-2.46770883e+00 6.26653290e+00 6.94782639e+00 ... 1.14825048e+01 -9.70317173e+00 1.04351902e+01] ... [-7.32485235e-01 2.13102746e+00 9.46906471e+00 ... 1.36257648e+01 4.64322662e+00 -2.75817680e+00] [ 2.31551933e+01 -3.77117133e+00 7.63827467e+00 ... -1.77455006e+01 -8.12349987e+00 1.68001688e+00] [-5.68611002e+00 4.60713530e+00 -2.08510990e+01 ... -7.87111712e+00 -5.68561506e+00 1.75193081e+01]]]] [[[[-4.22427797e+00 4.05192232e+00 -7.80986643e+00 ... -5.84449470e-01 1.26305761e+01 -6.41724586e+00] [-7.23308945e+00 -3.72505283e+00 2.06143570e+00 ... -1.50361013e+01 7.30676508e+00 -6.80276394e+00] [ 3.56411743e+00 -3.82066488e+00 -6.60436487e+00 ... -1.09739017e+01 1.13516808e+01 5.76062059e+00] ... [ 3.91553187e+00 8.90044594e+00 2.00448990e+00 ... 2.01030326e+00 -4.21742582e+00 2.84609532e+00] [ 1.13434429e+01 -6.19638491e+00 -1.40067101e+00 ... 3.97973108e+00 9.38099766e+00 -2.71120000e+00] [ 7.20725238e-01 -3.70146775e+00 -9.91222477e+00 ... -6.54618835e+00 1.32212842e+00 2.62313724e+00]] [[ 8.69802856e+00 2.47680640e+00 -1.69976902e+01 ... 2.00007896e+01 9.37302589e+00 -9.92765903e+00] [ 1.04225616e+01 2.45233864e-01 1.36528301e+01 ... -1.72629821e+00 1.35588779e+01 7.74229717e+00] [-1.12458572e+01 -4.06644535e+00 -2.99011540e+00 ... -3.44841743e+00 9.51906967e+00 2.33045429e-01] ... [ 4.92366171e+00 -8.09962082e+00 4.03202105e+00 ... 1.59168901e+01 -4.98804140e+00 1.28692713e+01] [-6.70019579e+00 9.10857868e+00 1.33078632e+01 ... -4.98297787e+00 3.89308190e+00 5.04434705e-01] [ 7.72972703e-01 -1.66012657e+00 6.33699322e+00 ... -4.20826769e+00 4.31583017e-01 -6.90203667e+00]] [[ 1.18413830e+00 1.21166477e+01 1.49870110e+00 ... -2.79454732e+00 -1.43589640e+01 6.97201490e+00] [ 8.97294140e+00 8.54583740e+00 -6.72315264e+00 ... 9.28220081e+00 1.13680553e+01 7.98744535e+00] [-2.90336633e+00 -9.64393711e+00 -2.00889182e+00 ... -6.19178200e+00 -4.00976300e-01 8.85624027e+00] ... [ 1.09515562e+01 2.57039404e+00 6.85488892e+00 ... 2.56868720e+00 -9.15625751e-01 -1.59220343e+01] [ 2.02344656e+00 -5.91229057e+00 9.99496460e+00 ... -2.26674595e+01 1.34935284e+01 -5.65271914e-01] [ 2.62897635e+00 -2.32090235e+00 3.92869353e+00 ... -5.72058201e-01 1.23768606e+01 -3.35736442e+00]] ... [[-5.55618763e+00 -9.46539211e+00 4.75831795e+00 ... 1.64667606e+01 -7.04289198e+00 2.06279144e+01] [ 3.31878018e+00 1.36763792e+01 -2.30013847e+00 ... 5.05357456e+00 1.00862350e+01 -8.86034203e+00] [ 1.87759972e+00 -5.08928013e+00 -1.63806236e+00 ... 1.79662609e+01 -2.48754025e+00 -6.48285532e+00] ... [ 2.16066074e+00 9.59879208e+00 2.38624859e+00 ... 4.16845560e+00 9.49000835e+00 -4.15303755e+00] [ 9.60263920e+00 -1.87294407e+01 -9.63491821e+00 ... 4.14537525e+00 7.47765779e+00 8.91731930e+00] [-1.38049722e+00 -3.73412061e+00 9.43454838e+00 ... -7.69746208e+00 2.55014534e+01 -1.23142872e+01]] [[-1.65053024e+01 -6.59447789e-01 5.36873844e-03 ... 4.76940250e+00 1.25786953e+01 1.19240255e+01] [ 2.13729930e+00 1.92805386e+00 4.50855112e+00 ... -1.79898491e+01 -1.56444120e+01 -2.80940418e+01] [-3.78473711e+00 1.92646936e-01 -5.22843075e+00 ... 5.68049383e+00 3.76730943e+00 1.06657953e+01] ... [ 1.80244052e+00 1.29708853e+01 4.42959690e+00 ... 1.09979191e+01 1.23953304e+01 1.45439110e+01] [ 1.90261707e-01 -8.16801167e+00 -4.61804867e+00 ... -2.38059968e-01 -6.98180676e+00 -1.55960119e+00] [-1.15819179e-01 1.37065372e+01 6.75186539e+00 ... -3.32458878e+00 5.55047321e+00 -6.03391266e+00]] [[ 7.36057854e+00 -9.70087290e-01 4.84766388e+00 ... 7.33421135e+00 -2.27321506e+00 2.70754433e+00] [ 1.13550949e+01 -1.74391603e+00 2.92013073e+00 ... 1.20018840e+00 3.02410245e+00 6.68711853e+00] [-7.60428131e-01 4.18426275e+00 -1.47618132e+01 ... -9.07536793e+00 1.20828629e+01 -9.49576378e-01] ... [ 1.80873299e+00 -5.39316559e+00 6.33805799e+00 ... 1.88494945e+00 -3.13328207e-01 1.05769806e+01] [-2.16393261e+01 1.42307281e+01 -3.97075176e-01 ... -3.71457148e+00 -7.53026485e+00 1.46212893e+01] [-1.45733042e+01 1.08650522e+01 -5.60701227e+00 ... 8.07930946e+00 -1.20215769e+01 -5.84669113e+00]]]]]; ov_res: [[[[[ 1.14262018e+01 1.63769569e+01 5.39559364e+00 ... -2.30106468e+01 -9.41119862e+00 -1.72492485e+01] [-2.63072205e+00 4.33323956e+00 1.02905588e+01 ... -1.16041422e+01 -5.91134930e+00 -3.74069905e+00] [ 1.16444321e+01 -1.33444357e+01 4.69036198e+00 ... -7.20898986e-01 8.84782493e-01 -4.81057930e+00] ... [-5.44688988e+00 -1.32615128e+01 8.91366100e+00 ... 4.95707893e+00 -5.95072460e+00 -9.67060280e+00] [-3.44048738e+00 -3.00318033e-01 -3.71222591e+00 ... 6.89507675e+00 -5.66769505e+00 -1.64230120e+00] [ 2.22648764e+00 -1.98039043e+00 4.18234158e+00 ... 2.88020611e+00 1.38180418e+01 -5.41499710e+00]] [[-1.10704098e+01 1.19480200e+01 -9.75335979e+00 ... -9.17385197e+00 7.68156672e+00 -1.15068722e+01] [ 1.88516827e+01 -5.00455284e+00 -3.48714375e+00 ... -2.37083282e+01 1.67414665e+01 -1.69730434e+01] [ 5.95657778e+00 -2.77415924e+01 7.21080399e+00 ... 1.34314413e+01 9.28638279e-01 -9.39702511e+00] ... [ 1.16361628e+01 -8.93601120e-01 -5.84577227e+00 ... 1.79906178e+01 -6.04733992e+00 2.66462250e+01] [ 1.82019615e+01 -7.12701178e+00 1.31904507e+01 ... -3.03690934e+00 -2.61371822e+01 -1.03235359e+01] [ 1.14817858e+01 1.15197954e+01 5.91796494e+00 ... 3.41568023e-01 2.35853124e+00 2.01632462e+01]] [[ 2.66553187e+00 -1.11624515e+00 3.78072834e+00 ... -5.86916780e+00 2.51890683e+00 -6.91194105e+00] [-5.00098372e+00 -3.37319279e+00 7.75462627e+00 ... 1.77658165e+00 -2.58171940e+00 6.95331526e+00] [ 1.01390610e+01 -1.13029919e+01 -1.90346003e+00 ... -6.20332098e+00 1.39483586e-02 7.80871391e+00] ... [ 4.18696976e+00 1.14125605e+01 4.55983305e+00 ... -4.49240780e+00 1.28176708e+01 -1.10539036e+01] [-7.37659979e+00 9.51632786e+00 -6.51554298e+00 ... 9.72721386e+00 -1.14372015e+01 -2.26030874e+00] [-9.53900051e+00 -7.12908697e+00 8.04877567e+00 ... 5.15356350e+00 2.26371193e+01 -1.21854668e+01]] ... [[-1.19915199e+00 -2.40997016e-01 1.27994394e+01 ... -7.75015688e+00 -1.85454330e+01 6.36903867e-02] [ 1.57897215e+01 1.14251938e+01 1.38465118e+00 ... 3.97237229e+00 -1.34072218e+01 2.34214377e+00] [ 1.44359407e+01 -1.37440920e+00 3.83199191e+00 ... 6.97681093e+00 2.89562345e+00 5.74383020e+00] ... [ 4.62801743e+00 7.96977854e+00 -2.94926781e-02 ... 6.27433157e+00 1.17947683e+01 -2.52577591e+00] [-1.19347401e+01 7.80280888e-01 -3.57404184e+00 ... -2.32689309e+00 1.70726156e+00 1.50816989e+00] [ 7.51448750e-01 4.16537476e+00 5.69537354e+00 ... -1.05292034e+01 2.48584080e+01 2.59084439e+00]] [[-4.69225526e-01 -6.96634912e+00 -4.94210863e+00 ... -2.08414783e+01 2.96934009e+00 -1.84349670e+01] [-8.20884418e+00 -7.69758892e+00 -2.26877499e+00 ... 1.39954939e+01 6.94517565e+00 1.27303009e+01] [-7.44866705e+00 -5.88052750e+00 1.08635550e+01 ... 1.29819069e+01 2.72006679e+00 -8.62034321e+00] ... [ 4.25835705e+00 2.16605434e+01 -7.15420723e+00 ... -4.00732517e+00 -8.78264713e+00 4.61801863e+00] [ 2.53842771e-01 6.55247569e-01 -7.13552380e+00 ... -8.90211678e+00 -4.27121496e+00 -9.24624825e+00] [ 9.77080250e+00 -1.03221292e+01 4.90851927e+00 ... -6.99447346e+00 -5.67901993e+00 1.32647429e+01]] [[ 2.04331899e+00 5.35504389e+00 1.12920494e+01 ... -9.83806801e+00 -4.87487841e+00 2.14056435e+01] [-9.45401001e+00 -3.58468294e+00 2.83587646e+00 ... -4.24426937e+00 5.72580671e+00 2.85192299e+00] [-2.46770883e+00 6.26653290e+00 6.94782639e+00 ... 1.14825048e+01 -9.70317173e+00 1.04351902e+01] ... [-7.32485235e-01 2.13102746e+00 9.46906471e+00 ... 1.36257648e+01 4.64322662e+00 -2.75817680e+00] [ 2.31551933e+01 -3.77117133e+00 7.63827467e+00 ... -1.77455006e+01 -8.12349987e+00 1.68001688e+00] [-5.68611002e+00 4.60713530e+00 -2.08510990e+01 ... -7.87111712e+00 -5.68561506e+00 1.75193081e+01]]]] [[[[-4.22427797e+00 4.05192232e+00 -7.80986643e+00 ... -5.84449470e-01 1.26305761e+01 -6.41724586e+00] [-7.23308945e+00 -3.72505283e+00 2.06143570e+00 ... -1.50361013e+01 7.30676508e+00 -6.80276394e+00] [ 3.56411743e+00 -3.82066488e+00 -6.60436487e+00 ... -1.09739017e+01 1.13516808e+01 5.76062059e+00] ... [ 3.91553187e+00 8.90044594e+00 2.00448990e+00 ... 2.01030326e+00 -4.21742582e+00 2.84609532e+00] [ 1.13434429e+01 -6.19638491e+00 -1.40067101e+00 ... 3.97973108e+00 9.38099766e+00 -2.71120000e+00] [ 7.20725238e-01 -3.70146775e+00 -9.91222477e+00 ... -6.54618835e+00 1.32212842e+00 2.62313724e+00]] [[ 8.69802856e+00 2.47680640e+00 -1.69976902e+01 ... 2.00007896e+01 9.37302589e+00 -9.92765903e+00] [ 1.04225616e+01 2.45233864e-01 1.36528301e+01 ... -1.72629821e+00 1.35588779e+01 7.74229717e+00] [-1.12458572e+01 -4.06644535e+00 -2.99011540e+00 ... -3.44841743e+00 9.51906967e+00 2.33045429e-01] ... [ 4.92366171e+00 -8.09962082e+00 4.03202105e+00 ... 1.59168901e+01 -4.98804140e+00 1.28692713e+01] [-6.70019579e+00 9.10857868e+00 1.33078632e+01 ... -4.98297787e+00 3.89308190e+00 5.04434705e-01] [ 7.72972703e-01 -1.66012657e+00 6.33699322e+00 ... -4.20826769e+00 4.31583017e-01 -6.90203667e+00]] [[ 1.18413830e+00 1.21166477e+01 1.49870110e+00 ... -2.79454732e+00 -1.43589640e+01 6.97201490e+00] [ 8.97294140e+00 8.54583740e+00 -6.72315264e+00 ... 9.28220081e+00 1.13680553e+01 7.98744535e+00] [-2.90336633e+00 -9.64393711e+00 -2.00889182e+00 ... -6.19178200e+00 -4.00976300e-01 8.85624027e+00] ... [ 1.09515562e+01 2.57039404e+00 6.85488892e+00 ... 2.56868720e+00 -9.15625751e-01 -1.59220343e+01] [ 2.02344656e+00 -5.91229057e+00 9.99496460e+00 ... -2.26674595e+01 1.34935284e+01 -5.65271914e-01] [ 2.62897635e+00 -2.32090235e+00 3.92869353e+00 ... -5.72058201e-01 1.23768606e+01 -3.35736442e+00]] ... [[-5.55618763e+00 -9.46539211e+00 4.75831795e+00 ... 1.64667606e+01 -7.04289198e+00 2.06279144e+01] [ 3.31878018e+00 1.36763792e+01 -2.30013847e+00 ... 5.05357456e+00 1.00862350e+01 -8.86034203e+00] [ 1.87759972e+00 -5.08928013e+00 -1.63806236e+00 ... 1.79662609e+01 -2.48754025e+00 -6.48285532e+00] ... [ 2.16066074e+00 9.59879208e+00 2.38624859e+00 ... 4.16845560e+00 9.49000835e+00 -4.15303755e+00] [ 9.60263920e+00 -1.87294407e+01 -9.63491821e+00 ... 4.14537525e+00 7.47765779e+00 8.91731930e+00] [-1.38049722e+00 -3.73412061e+00 9.43454838e+00 ... -7.69746208e+00 2.55014534e+01 -1.23142872e+01]] [[-1.65053024e+01 -6.59447789e-01 5.36873844e-03 ... 4.76940250e+00 1.25786953e+01 1.19240255e+01] [ 2.13729930e+00 1.92805386e+00 4.50855112e+00 ... -1.79898491e+01 -1.56444120e+01 -2.80940418e+01] [-3.78473711e+00 1.92646936e-01 -5.22843075e+00 ... 5.68049383e+00 3.76730943e+00 1.06657953e+01] ... [ 1.80244052e+00 1.29708853e+01 4.42959690e+00 ... 1.09979191e+01 1.23953304e+01 1.45439110e+01] [ 1.90261707e-01 -8.16801167e+00 -4.61804867e+00 ... -2.38059968e-01 -6.98180676e+00 -1.55960119e+00] [-1.15819179e-01 1.37065372e+01 6.75186539e+00 ... -3.32458878e+00 5.55047321e+00 -6.03391266e+00]] [[ 7.36057854e+00 -9.70087290e-01 4.84766388e+00 ... 7.33421135e+00 -2.27321506e+00 2.70754433e+00] [ 1.13550949e+01 -1.74391603e+00 2.92013073e+00 ... 1.20018840e+00 3.02410245e+00 6.68711853e+00] [-7.60428131e-01 4.18426275e+00 -1.47618132e+01 ... -9.07536793e+00 1.20828629e+01 -9.49576378e-01] ... [ 1.80873299e+00 -5.39316559e+00 6.33805799e+00 ... 1.88494945e+00 -3.13328207e-01 1.05769806e+01] [-2.16393261e+01 1.42307281e+01 -3.97075176e-01 ... -3.71457148e+00 -7.53026485e+00 1.46212893e+01] [-1.45733042e+01 1.08650522e+01 -5.60701227e+00 ... 8.07930946e+00 -1.20215769e+01 -5.84669113e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_521.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.1040 0.1973 0.8031 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-0.71563125 2.8368554 1.1468327 0.7499038 2.396932 3.0420055 1.9596992 0.3831572 -0.36451507 1.270297 -0.24486089] [ 0.4799139 2.0815916 -0.07717466 2.8705297 3.0633783 1.0530858 -1.9341793 2.425027 0.13860649 0.07174218 0.66346717] [-1.2493482 1.0941753 1.9979852 0.18972766 1.4413942 2.2201781 0.22388762 2.397252 0.5145375 1.7284459 -0.7039206 ] [-0.571769 4.629966 -1.0717332 1.6742675 0.4602958 0.3076161 2.2764137 -0.5868349 -0.16870141 3.7171175 0.12330866] [-0.18725109 2.4430907 2.4952703 2.9672744 -1.7930424 0.39626735 3.6836002 -2.662881 2.9905717 1.0561478 -0.56346476] [ 2.2694006 1.1355581 -1.3810015 4.048955 0.54769987 -1.446321 1.6963093 -1.8257446 4.4024487 1.1639724 0.15005964] [ 1.3309715 1.6333969 1.2135417 -3.4952958 -0.49108648 5.07685 2.284792 -0.0892446 1.8767763 -0.49929154 -0.6428622 ] [ 3.030626 -0.44465077 -1.2833033 3.0677814 1.1931423 3.4440837 2.6048975 1.5204208 -0.4727286 -0.67142 2.3634872 ] [ 1.5146205 -1.1563756 3.0673778 0.12340701 -0.14461875 -0.08715773 5.003274 -0.05190706 2.6468635 2.2880397 0.5661228 ] [ 0.31596577 2.094945 4.2237597 0.8219986 -1.0857103 -0.46933007 0.80933297 4.8078566 -0.15094078 -0.51969826 -1.1049473 ] [ 1.0312783 2.2809372 0.14979112 1.4154718 0.8717405 1.8351631 0.4517643 -0.1305654 1.0859518 -1.0769987 1.7979972 ]] [[ 0.833254 2.3224058 0.98093784 -0.24717447 -1.1326364 0.502362 -2.521175 -0.37287036 0.24782208 -0.5106715 1.9697086 ] [ 1.618868 -0.8924022 2.107023 2.67708 -0.05404803 -0.14642057 5.658244 3.6793528 -2.2430866 -3.8425217 -2.0374556 ] [ 0.43253863 0.82974434 1.5171334 0.7345848 3.919364 -1.1537075 0.47631207 -8.101598 3.6033955 0.39115387 -3.039258 ] [ 1.2960988 -3.8098226 -5.302312 2.3096766 2.8296938 1.177072 0.65363777 4.5990906 -4.6335106 -1.413307 6.412679 ] [ 4.009413 4.319413 1.0923706 -8.177465 0.8848789 2.1093583 -8.269886 2.0712907 1.2118323 -2.0142295 -5.4626827 ] [-0.85457253 -2.4134183 5.821442 3.8207462 -1.2502245 -5.4853497 -0.29592806 2.0446434 6.697548 1.5642099 3.302008 ] [ 2.135414 1.4188317 2.583087 -4.5970635 3.817093 6.4997644 -3.94776 3.435411 -2.4801958 1.7483727 2.007709 ] [-1.4114476 -0.5169196 0.02755657 -3.5023363 -3.3798418 1.9205104 2.0898774 -4.719943 -0.13674656 -5.921798 -4.3834443 ] [ 0.7380606 1.541683 0.39329508 2.703087 8.802436 4.2825494 4.721359 4.629365 0.72211206 7.841007 3.042471 ] [-1.8637381 6.4210567 0.91571164 -0.6187911 -3.6747942 1.1803896 -2.733104 3.948825 -1.8418539 -3.522869 -3.1044326 ] [ 2.668745 -2.4733257 0.97409415 -0.08456436 -1.3643963 1.0560105 0.4704771 -2.4642277 0.6326921 -3.5985494 -0.55344653]] [[-1.5947685 2.1841795 -0.16479808 -0.33274072 -0.55203825 2.1515481 -0.03162372 1.7234693 0.774298 1.796737 1.7300935 ] [-2.2102866 0.1022175 1.6191759 0.580503 -1.6053314 1.0596471 1.9360983 5.655469 -0.28501648 -0.41053122 1.1245291 ] [-2.0067658 -0.46118516 2.553318 0.8032079 2.967533 -0.32608598 -1.4236908 -1.5804927 6.297225 3.4266722 -2.1633503 ] [-3.409399 -0.5010318 -1.5874424 1.8282697 -2.0890298 -2.1799831 1.0805374 2.7575445 -1.0552149 3.7576213 6.518399 ] [-2.0729296 2.7973905 1.2045832 -1.05 1.7539253 -1.2567718 -0.8320791 3.125061 -0.97166175 0.5951026 -1.7791753 ] [ 0.51970583 1.3158593 -0.59822494 3.8687544 2.0111134 -4.0822377 2.6026866 -1.7434204 1.555728 2.6980991 2.6737611 ] [ 1.7971013 0.90094835 1.9888277 -2.6866014 2.2288587 -0.9132151 -5.1619897 -0.08586067 -2.6680522 2.946189 2.3856966 ] [ 2.6545691 1.2807832 1.2664444 -0.510984 -1.7964201 1.1027716 0.8072333 -4.933458 -1.355865 -4.539881 -0.2519862 ] [ 4.0207963 2.3891642 0.8784848 -2.1331608 2.6040146 -1.1076119 5.5546546 1.8458855 -3.8385367 -0.83293754 -0.17288798] [-2.1544611 3.1520474 2.761504 2.1578236 -1.107095 -3.4287724 -3.121709 4.7517495 -0.19024962 2.7252789 1.4153945 ] [-2.057409 -3.249106 1.5017796 1.2312486 0.6188314 0.76228875 -0.7003558 0.23763579 1.3299952 -0.7576116 0.07584667]]]]; ov_res: [[[[-0.71563125 2.8368554 1.1468327 0.7499038 2.396932 3.0420055 1.9596992 0.3831572 -0.36451495 1.270297 -0.24486089] [ 0.4799139 2.0815916 -0.07717478 2.8705297 3.0633783 1.0530857 -1.9341793 2.425027 0.13860643 0.07174218 0.66346717] [-1.2493484 1.0941752 1.9979851 0.18972749 1.4413942 2.2201781 0.2238875 2.397252 0.5145375 1.7284459 -0.7039206 ] [-0.5717689 4.629965 -1.0717332 1.6742675 0.46029586 0.30761606 2.276414 -0.58683467 -0.16870141 3.7171173 0.12330872] [-0.18725097 2.443091 2.49527 2.9672744 -1.7930422 0.3962674 3.6836002 -2.662881 2.9905715 1.0561478 -0.56346464] [ 2.2694006 1.1355581 -1.3810017 4.048955 0.5476999 -1.446321 1.6963093 -1.8257451 4.4024487 1.1639724 0.15005958] [ 1.3309715 1.6333969 1.2135416 -3.4952962 -0.4910866 5.07685 2.284792 -0.08924472 1.8767762 -0.49929154 -0.6428622 ] [ 3.030626 -0.44465077 -1.2833033 3.067782 1.1931422 3.444084 2.6048975 1.5204209 -0.47272873 -0.67142 2.3634872 ] [ 1.5146205 -1.1563759 3.0673776 0.12340653 -0.14461863 -0.08715785 5.003274 -0.05190694 2.6468635 2.2880397 0.5661228 ] [ 0.31596583 2.094945 4.2237597 0.82199866 -1.0857105 -0.46933007 0.80933285 4.8078566 -0.1509409 -0.5196984 -1.1049473 ] [ 1.0312783 2.2809372 0.14979118 1.4154718 0.8717405 1.8351631 0.45176435 -0.1305654 1.0859517 -1.0769987 1.7979972 ]] [[ 0.833254 2.3224056 0.98093796 -0.24717441 -1.1326364 0.50236213 -2.521175 -0.37287036 0.24782206 -0.5106715 1.9697086 ] [ 1.618868 -0.8924023 2.1070232 2.6770797 -0.054048 -0.14642054 5.658244 3.6793532 -2.2430866 -3.8425212 -2.0374556 ] [ 0.43253863 0.8297446 1.5171334 0.7345849 3.9193642 -1.1537075 0.47631207 -8.101597 3.6033957 0.39115387 -3.039258 ] [ 1.2960988 -3.8098226 -5.3023114 2.3096766 2.8296938 1.177072 0.65363777 4.5990915 -4.63351 -1.4133068 6.412679 ] [ 4.009413 4.3194127 1.0923706 -8.177465 0.88487875 2.1093583 -8.269886 2.0712905 1.2118324 -2.0142295 -5.4626827 ] [-0.85457253 -2.4134183 5.821442 3.8207462 -1.2502245 -5.4853497 -0.2959283 2.0446436 6.697547 1.5642098 3.3020077 ] [ 2.135414 1.4188318 2.583087 -4.597064 3.8170938 6.499764 -3.94776 3.4354107 -2.4801958 1.7483728 2.0077093 ] [-1.4114476 -0.5169196 0.02755657 -3.5023365 -3.3798418 1.9205104 2.0898771 -4.7199426 -0.1367465 -5.9217987 -4.3834443 ] [ 0.7380606 1.5416832 0.39329493 2.7030869 8.802436 4.2825494 4.7213583 4.6293645 0.72211206 7.8410068 3.0424707 ] [-1.8637381 6.4210567 0.9157119 -0.6187916 -3.6747944 1.1803895 -2.733104 3.9488246 -1.8418541 -3.5228689 -3.1044328 ] [ 2.668745 -2.473326 0.9740943 -0.08456433 -1.3643963 1.0560106 0.47047707 -2.4642277 0.632692 -3.5985494 -0.55344653]] [[-1.5947685 2.1841795 -0.16479814 -0.33274072 -0.55203825 2.1515481 -0.03162372 1.7234693 0.774298 1.796737 1.7300935 ] [-2.2102866 0.10221744 1.6191758 0.580503 -1.6053314 1.0596471 1.9360983 5.655469 -0.28501636 -0.4105311 1.1245291 ] [-2.0067658 -0.46118516 2.553318 0.80320776 2.9675329 -0.32608598 -1.4236908 -1.5804927 6.297225 3.426672 -2.1633503 ] [-3.409399 -0.50103194 -1.5874426 1.8282697 -2.0890298 -2.1799834 1.0805374 2.7575445 -1.0552149 3.7576213 6.5183983 ] [-2.0729296 2.7973905 1.2045829 -1.05 1.7539253 -1.2567718 -0.8320791 3.1250608 -0.9716616 0.5951024 -1.7791755 ] [ 0.5197058 1.3158593 -0.59822494 3.8687546 2.0111136 -4.082238 2.6026866 -1.7434204 1.555728 2.6980994 2.6737611 ] [ 1.7971013 0.9009484 1.9888277 -2.6866012 2.2288587 -0.9132151 -5.16199 -0.08586079 -2.668052 2.9461887 2.3856966 ] [ 2.6545691 1.280783 1.2664444 -0.510984 -1.7964201 1.1027718 0.8072333 -4.933458 -1.355865 -4.5398817 -0.2519862 ] [ 4.0207963 2.3891642 0.8784849 -2.1331608 2.6040146 -1.1076121 5.5546546 1.8458853 -3.8385372 -0.83293766 -0.17288804] [-2.1544611 3.1520474 2.761504 2.1578233 -1.107095 -3.4287724 -3.1217089 4.7517495 -0.19024926 2.7252789 1.4153945 ] [-2.057409 -3.249106 1.5017796 1.2312486 0.6188314 0.76228875 -0.7003558 0.23763573 1.3299952 -0.75761145 0.07584667]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_523.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3110 0.5177 -1.4283 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[ 8.4379494e-02 -3.9351084e+00 -6.7110217e-01 -2.8178270e+00 4.1003604e+00 2.8371904e+00 -3.8952005e-01 -3.1374257e+00 1.3282079e+00] [-1.5366045e+00 -7.3576169e+00 1.3159630e+00 -5.4893880e+00 3.9914157e+00 -6.5407020e-01 8.7448037e-01 5.5383692e+00 -4.1019750e+00] [-4.7827611e+00 2.1389475e+00 1.5468872e+00 1.7752123e+00 3.6176028e+00 2.9485195e+00 -2.0632246e+00 1.7007565e+00 -5.5761814e-01] [-3.2341249e+00 1.4351418e+00 9.9342954e-01 1.6526886e+00 -6.3995308e-01 1.3879773e-01 -3.5458181e+00 -4.3112164e+00 2.8898814e+00] [ 3.1049135e+00 8.1886292e-01 2.4371377e-01 -7.2286785e-01 5.6710949e+00 -3.0226829e+00 -2.1487176e-03 -2.1388860e+00 3.9726241e+00] [ 8.1873178e-02 8.1126821e-01 -2.7815440e+00 -1.5827821e+00 1.0921080e+00 -6.1218295e+00 5.7301383e+00 -2.9781733e+00 5.5417120e-01] [-1.2883899e+00 -2.0321205e+00 -9.4231486e-01 -2.5530484e+00 4.1201162e+00 -3.7849591e+00 2.3742604e+00 -4.7103300e+00 -3.2415683e+00] [ 1.0918181e+00 -2.5020442e+00 1.4781233e+00 5.4195225e-02 6.6254773e+00 -8.7452865e-01 -3.3228106e+00 -1.3085144e+00 4.7159939e+00] [ 1.8538783e+00 -9.3749955e-02 3.6446619e+00 2.1938422e+00 -3.5509157e+00 -4.1849613e+00 -1.4393290e+00 2.2012115e+00 1.2135483e+00]] [[ 2.6262710e+00 -3.5380886e+00 3.7157776e+00 -2.5183923e+00 -6.0812587e-01 1.6075282e+00 1.7788069e+00 3.4494054e+00 9.3394721e-01] [ 2.1394105e+00 2.7587845e+00 -3.4380786e+00 -3.3079846e+00 8.2123766e+00 5.9197969e+00 -5.0375090e+00 4.3062916e+00 -3.0058873e+00] [-5.4827137e+00 6.8173814e-01 2.2091606e+00 1.6021092e+00 -1.9699252e+00 7.8310432e+00 -2.5221176e+00 -3.4759986e+00 5.8576484e+00] [ 1.0827615e+00 7.3804331e-01 4.5190992e+00 -7.9045528e-01 -1.7598298e+00 5.5991125e-01 -2.4733043e-01 -3.9657199e+00 -1.1665356e+00] [-8.5421383e-02 6.8231196e+00 -4.8752480e+00 5.4490328e+00 5.2401366e+00 -6.8904552e+00 1.0160979e+01 -1.0031907e+01 1.7486410e+00] [-6.2832826e-01 -1.1806459e+00 1.8272755e+00 3.6257050e+00 2.2666452e+00 -5.5655676e-01 3.7193646e+00 3.4889299e-01 -2.2554488e+00] [ 1.8356931e-01 -1.4705391e+00 -6.1780339e-01 -2.5239058e+00 7.1969275e+00 -1.5584791e+00 2.9557240e+00 -2.0621095e+00 2.8728375e+00] [ 1.2413038e+00 -1.2451761e+00 2.5123513e+00 3.6987883e-01 -1.1697853e+00 -3.9923382e-01 -3.0625291e+00 9.5612466e-02 3.9335268e+00] [-1.2923591e+00 4.7308359e+00 1.0000820e+01 6.4175129e-03 1.7180326e+00 -9.6592970e+00 4.8688273e+00 5.5228934e+00 -2.4154689e+00]] [[-4.4161224e+00 -1.4981893e+00 8.6680555e-01 3.1135798e+00 -4.8769674e+00 -2.2350893e+00 -1.1747400e+00 -3.8405209e+00 7.7210565e+00] [-6.7971511e+00 1.8787079e+00 -2.4894843e+00 -1.1238203e+01 -1.2324245e+00 1.2359800e+00 2.8185549e+00 -8.8745570e-01 -2.1574540e+00] [ 2.5241518e-01 -1.2354831e+00 -5.9175177e+00 1.9532602e+00 8.1453362e+00 -3.3664429e-01 -1.2230630e+00 -2.7994156e+00 1.9312901e+00] [ 4.9112225e-01 -6.5385866e+00 6.1023560e+00 -4.8759937e+00 -1.7420255e+00 1.2461648e+00 -1.0945381e+01 8.5962129e-01 -3.5474463e+00] [-1.0990262e-01 -4.9600178e-01 -1.1325793e+00 -4.4188643e+00 2.7141991e+00 -2.5927110e+00 2.1051464e+00 -1.0153224e+00 -9.4612134e-01] [ 2.6785886e-01 -4.2160378e+00 -4.1634607e+00 -2.4432666e+00 -2.9368525e+00 2.2478876e+00 -3.4476991e+00 -4.3082800e+00 -8.0275142e-01] [-3.0658400e-01 -3.2130215e+00 -1.4980738e+00 -4.3524685e+00 -1.9141496e+00 5.4479074e+00 -3.0006778e+00 -5.7136168e+00 -4.1931620e+00] [-2.6011610e+00 -7.6611848e+00 -1.3504336e+00 -1.6791325e+00 3.3485456e+00 2.1099656e+00 -1.3173722e+00 -1.1183981e+01 9.1779842e+00] [-5.8520746e-01 2.9877865e-01 5.2480836e+00 1.8324745e-01 1.3550665e+00 -1.0257108e+00 -5.8284369e+00 3.2593417e+00 -6.9834095e-01]]]]; ov_res: [[[[ 8.4379494e-02 -3.9351082e+00 -6.7110240e-01 -2.8178270e+00 4.1003599e+00 2.8371909e+00 -3.8952014e-01 -3.1374254e+00 1.3282080e+00] [-1.5366048e+00 -7.3576169e+00 1.3159629e+00 -5.4893885e+00 3.9914153e+00 -6.5407038e-01 8.7448061e-01 5.5383692e+00 -4.1019750e+00] [-4.7827611e+00 2.1389472e+00 1.5468874e+00 1.7752125e+00 3.6176026e+00 2.9485192e+00 -2.0632246e+00 1.7007570e+00 -5.5761802e-01] [-3.2341247e+00 1.4351419e+00 9.9342954e-01 1.6526884e+00 -6.3995302e-01 1.3879773e-01 -3.5458181e+00 -4.3112164e+00 2.8898811e+00] [ 3.1049137e+00 8.1886268e-01 2.4371359e-01 -7.2286779e-01 5.6710949e+00 -3.0226829e+00 -2.1486580e-03 -2.1388862e+00 3.9726241e+00] [ 8.1873059e-02 8.1126845e-01 -2.7815442e+00 -1.5827824e+00 1.0921082e+00 -6.1218295e+00 5.7301383e+00 -2.9781735e+00 5.5417109e-01] [-1.2883898e+00 -2.0321202e+00 -9.4231486e-01 -2.5530486e+00 4.1201167e+00 -3.7849593e+00 2.3742607e+00 -4.7103300e+00 -3.2415681e+00] [ 1.0918182e+00 -2.5020442e+00 1.4781233e+00 5.4195285e-02 6.6254773e+00 -8.7452853e-01 -3.3228109e+00 -1.3085147e+00 4.7159939e+00] [ 1.8538783e+00 -9.3749881e-02 3.6446619e+00 2.1938422e+00 -3.5509157e+00 -4.1849613e+00 -1.4393290e+00 2.2012112e+00 1.2135483e+00]] [[ 2.6262708e+00 -3.5380886e+00 3.7157772e+00 -2.5183923e+00 -6.0812587e-01 1.6075280e+00 1.7788069e+00 3.4494057e+00 9.3394732e-01] [ 2.1394107e+00 2.7587845e+00 -3.4380786e+00 -3.3079841e+00 8.2123766e+00 5.9197965e+00 -5.0375090e+00 4.3062916e+00 -3.0058873e+00] [-5.4827137e+00 6.8173820e-01 2.2091606e+00 1.6021092e+00 -1.9699252e+00 7.8310432e+00 -2.5221174e+00 -3.4759986e+00 5.8576479e+00] [ 1.0827615e+00 7.3804325e-01 4.5190992e+00 -7.9045540e-01 -1.7598295e+00 5.5991113e-01 -2.4733043e-01 -3.9657195e+00 -1.1665359e+00] [-8.5421503e-02 6.8231196e+00 -4.8752480e+00 5.4490323e+00 5.2401361e+00 -6.8904557e+00 1.0160980e+01 -1.0031907e+01 1.7486413e+00] [-6.2832814e-01 -1.1806462e+00 1.8272755e+00 3.6257048e+00 2.2666450e+00 -5.5655676e-01 3.7193646e+00 3.4889299e-01 -2.2554488e+00] [ 1.8356943e-01 -1.4705391e+00 -6.1780339e-01 -2.5239055e+00 7.1969280e+00 -1.5584788e+00 2.9557240e+00 -2.0621095e+00 2.8728378e+00] [ 1.2413037e+00 -1.2451761e+00 2.5123513e+00 3.6987895e-01 -1.1697853e+00 -3.9923388e-01 -3.0625291e+00 9.5612347e-02 3.9335268e+00] [-1.2923589e+00 4.7308359e+00 1.0000820e+01 6.4175129e-03 1.7180324e+00 -9.6592970e+00 4.8688278e+00 5.5228934e+00 -2.4154687e+00]] [[-4.4161224e+00 -1.4981894e+00 8.6680555e-01 3.1135802e+00 -4.8769674e+00 -2.2350891e+00 -1.1747402e+00 -3.8405206e+00 7.7210565e+00] [-6.7971511e+00 1.8787076e+00 -2.4894843e+00 -1.1238203e+01 -1.2324245e+00 1.2359800e+00 2.8185554e+00 -8.8745570e-01 -2.1574540e+00] [ 2.5241506e-01 -1.2354829e+00 -5.9175177e+00 1.9532602e+00 8.1453362e+00 -3.3664429e-01 -1.2230630e+00 -2.7994151e+00 1.9312904e+00] [ 4.9112213e-01 -6.5385866e+00 6.1023564e+00 -4.8759933e+00 -1.7420253e+00 1.2461650e+00 -1.0945381e+01 8.5962129e-01 -3.5474463e+00] [-1.0990274e-01 -4.9600166e-01 -1.1325793e+00 -4.4188638e+00 2.7141991e+00 -2.5927110e+00 2.1051459e+00 -1.0153221e+00 -9.4612122e-01] [ 2.6785910e-01 -4.2160378e+00 -4.1634607e+00 -2.4432664e+00 -2.9368525e+00 2.2478876e+00 -3.4476991e+00 -4.3082805e+00 -8.0275142e-01] [-3.0658412e-01 -3.2130218e+00 -1.4980737e+00 -4.3524685e+00 -1.9141499e+00 5.4479074e+00 -3.0006778e+00 -5.7136164e+00 -4.1931620e+00] [-2.6011615e+00 -7.6611848e+00 -1.3504336e+00 -1.6791325e+00 3.3485460e+00 2.1099656e+00 -1.3173721e+00 -1.1183981e+01 9.1779842e+00] [-5.8520746e-01 2.9877877e-01 5.2480845e+00 1.8324709e-01 1.3550665e+00 -1.0257108e+00 -5.8284369e+00 3.2593417e+00 -6.9834119e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_525.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.3030 1.7517 -0.6129 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.6050 (2,1,.,.) = -1.0964 (3,1,.,.) = -1.5770 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-0.8205157 -0.6840712 -0.99891144 -1.2521309 -1.1664077 -1.2812476 -0.7153839 -1.6428834 -1.9832596 -0.61670977] [-0.13110471 -2.1210828 -0.96007717 -1.6188923 -0.89304626 -1.8651028 -1.1525749 -1.7929239 -1.326038 -1.1222081 ] [-2.6137142 -1.9755285 -1.3006657 -0.8901331 -0.6524096 -1.2255735 -2.1620429 -0.7671104 -0.97767866 -1.6050478 ] [-0.78821856 -0.17857778 -1.6049011 -1.9609379 -1.798632 -1.6905668 -1.6064706 -0.5679327 -1.0846051 -1.4233323 ] [-1.6126407 -0.8161907 -1.3520912 0.18649364 -1.5636749 -1.4680173 -1.4461834 -1.6992846 -1.7196667 -1.9196774 ] [-1.0522571 -1.5903239 -1.0715597 -1.6326234 -2.1522925 -1.3387978 -2.0712957 -0.91345763 -1.8964181 -1.4212474 ] [-1.1675928 -0.54880595 -0.8115287 -1.1600919 -1.3923653 -1.3530548 -0.97427607 -0.22949195 -0.66957104 -0.6569837 ] [-1.0651134 -1.4142132 -1.8485212 -2.0584817 -0.9484181 -1.4442523 -1.0919445 -0.7539856 -1.6426975 -2.5222821 ] [-0.8771757 -0.83539337 -1.6645 -1.6862324 -0.39386457 -1.0371993 -1.2613759 -1.2385172 -2.4516473 -1.8933487 ] [-0.5052573 -1.09676 -2.271151 -1.4729643 -1.3231138 -1.7928733 -0.8263829 -1.8522217 -1.8088839 -1.1231786 ]] [[ 0.9113708 0.7907026 2.140435 2.8612027 3.9723048 2.057137 1.3609447 2.1311722 1.9352969 2.149891 ] [ 2.2853272 0.93846744 0.57124364 3.0431483 0.6605674 3.0430775 2.2642221 1.5104103 1.9213948 1.2222452 ] [ 2.9757159 0.46888745 1.6968528 1.7843039 1.3806282 2.1428828 -0.36561656 1.3720694 2.7933269 2.521438 ] [ 0.1796112 1.6506298 1.4250939 1.4568589 0.82617503 1.4967426 1.2389052 2.4340868 2.1243355 2.7442741 ] [ 4.14071 1.7720242 1.2022974 2.021076 2.4844325 2.4153905 1.1246648 1.6695936 2.4896183 0.7996587 ] [ 1.8056515 2.9642153 1.2872831 1.3348882 2.6327114 3.5483341 2.1004982 1.4842141 1.2668874 1.8257086 ] [ 1.9787751 2.1407707 0.77798915 2.3223352 2.7121186 1.5051165 4.355002 -0.33431578 2.302908 1.9732963 ] [ 0.5779501 1.4252553 2.0459797 1.3220265 2.3372064 2.2086382 2.170397 2.3154507 0.03234458 1.9761785 ] [ 2.3220437 2.948381 1.1540885 0.21974504 1.7706124 2.2402048 2.016476 1.9431579 1.875647 1.2818772 ] [ 3.1526213 2.8812604 0.71466947 0.72649634 2.1575494 1.4240682 2.0558653 0.4095087 3.060161 0.9042439 ]] [[-1.4718665 0.6387576 -0.547154 1.3742061 0.47777283 0.68618596 -1.1534566 -0.20922011 -0.14385602 -0.9560708 ] [ 1.4285569 0.3113377 -1.9300808 -1.4947207 1.7209806 -1.9532119 -2.1941729 -2.5407615 1.7882941 -3.1958468 ] [ 3.5677154 0.16396755 -0.3265219 0.5626043 2.3530312 1.3534162 0.3321244 -0.31790528 -2.70349 -0.7927931 ] [-0.9850748 -0.5293397 0.62827134 -2.552271 1.2744075 -0.8303329 -0.34146133 0.13909692 -0.33015373 -1.3250167 ] [-0.03520495 0.36059737 0.8511212 -0.6573102 0.9398112 0.79468477 -1.2601823 -2.4583263 -2.0975602 -0.8099271 ] [ 2.3828893 -2.5982432 -0.5410757 -1.0519326 -3.619079 -0.9269602 0.84026396 -2.7188883 -1.2695214 1.0681714 ] [ 1.5994282 -1.4859631 1.1962821 -1.9121596 -1.1843592 -0.35457665 0.48949862 1.1051955 -0.07784122 0.906618 ] [-0.9020766 0.28731823 3.8050191 -1.9423251 1.0018696 -1.2708743 2.1278846 1.6806786 0.2002604 -0.26411912] [ 1.9810784 -3.1206615 -0.33801773 0.09597874 -0.7284352 -0.3276184 -1.5114753 -2.2761602 -1.4465942 2.1606438 ] [-0.7284196 -2.5151596 -2.3808718 0.9439837 -0.8816771 -1.8064566 -2.85188 -2.6932433 -1.1948473 -2.819703 ]]]]; ov_res: [[[[-0.8205157 -0.6840712 -0.99891144 -1.2521309 -1.1664077 -1.2812476 -0.7153839 -1.6428834 -1.9832596 -0.61670977] [-0.13110471 -2.1210828 -0.96007717 -1.6188923 -0.89304626 -1.8651028 -1.1525749 -1.7929239 -1.326038 -1.1222081 ] [-2.6137142 -1.9755285 -1.3006657 -0.8901331 -0.6524096 -1.2255735 -2.1620429 -0.7671104 -0.97767866 -1.6050478 ] [-0.78821856 -0.17857778 -1.6049011 -1.9609379 -1.798632 -1.6905668 -1.6064706 -0.5679327 -1.0846051 -1.4233323 ] [-1.6126407 -0.8161907 -1.3520912 0.18649364 -1.5636749 -1.4680173 -1.4461834 -1.6992846 -1.7196667 -1.9196774 ] [-1.0522571 -1.5903239 -1.0715597 -1.6326234 -2.1522925 -1.3387978 -2.0712957 -0.91345763 -1.8964181 -1.4212474 ] [-1.1675928 -0.54880595 -0.8115287 -1.1600919 -1.3923653 -1.3530548 -0.97427607 -0.22949195 -0.66957104 -0.6569837 ] [-1.0651134 -1.4142132 -1.8485212 -2.0584817 -0.9484181 -1.4442523 -1.0919445 -0.7539856 -1.6426975 -2.5222821 ] [-0.8771757 -0.83539337 -1.6645 -1.6862324 -0.39386457 -1.0371993 -1.2613759 -1.2385172 -2.4516473 -1.8933487 ] [-0.5052573 -1.09676 -2.271151 -1.4729643 -1.3231138 -1.7928733 -0.8263829 -1.8522217 -1.8088839 -1.1231786 ]] [[ 0.9113708 0.7907026 2.140435 2.8612027 3.9723048 2.057137 1.3609447 2.1311722 1.9352969 2.149891 ] [ 2.2853272 0.93846744 0.57124364 3.0431483 0.6605674 3.0430775 2.2642221 1.5104103 1.9213948 1.2222452 ] [ 2.9757159 0.46888745 1.6968528 1.7843039 1.3806282 2.1428828 -0.36561656 1.3720694 2.7933269 2.521438 ] [ 0.1796112 1.6506298 1.4250939 1.4568589 0.82617503 1.4967426 1.2389052 2.4340868 2.1243355 2.7442741 ] [ 4.14071 1.7720242 1.2022974 2.021076 2.4844325 2.4153905 1.1246648 1.6695936 2.4896183 0.7996587 ] [ 1.8056515 2.9642153 1.2872831 1.3348882 2.6327114 3.5483341 2.1004982 1.4842141 1.2668874 1.8257086 ] [ 1.9787751 2.1407707 0.77798915 2.3223352 2.7121186 1.5051165 4.355002 -0.33431578 2.302908 1.9732963 ] [ 0.5779501 1.4252553 2.0459797 1.3220265 2.3372064 2.2086382 2.170397 2.3154507 0.03234458 1.9761785 ] [ 2.3220437 2.948381 1.1540885 0.21974504 1.7706124 2.2402048 2.016476 1.9431579 1.875647 1.2818772 ] [ 3.1526213 2.8812604 0.71466947 0.72649634 2.1575494 1.4240682 2.0558653 0.4095087 3.060161 0.9042439 ]] [[-1.4718665 0.6387576 -0.547154 1.3742061 0.47777283 0.68618596 -1.1534566 -0.20922011 -0.14385602 -0.9560708 ] [ 1.4285569 0.3113377 -1.9300808 -1.4947207 1.7209806 -1.9532119 -2.1941729 -2.5407615 1.7882941 -3.1958468 ] [ 3.5677154 0.16396755 -0.3265219 0.5626043 2.3530312 1.3534162 0.3321244 -0.31790528 -2.70349 -0.7927931 ] [-0.9850748 -0.5293397 0.62827134 -2.552271 1.2744075 -0.8303329 -0.34146133 0.13909692 -0.33015373 -1.3250167 ] [-0.03520495 0.36059737 0.8511212 -0.6573102 0.9398112 0.79468477 -1.2601823 -2.4583263 -2.0975602 -0.8099271 ] [ 2.3828893 -2.5982432 -0.5410757 -1.0519326 -3.619079 -0.9269602 0.84026396 -2.7188883 -1.2695214 1.0681714 ] [ 1.5994282 -1.4859631 1.1962821 -1.9121596 -1.1843592 -0.35457665 0.48949862 1.1051955 -0.07784122 0.906618 ] [-0.9020766 0.28731823 3.8050191 -1.9423251 1.0018696 -1.2708743 2.1278846 1.6806786 0.2002604 -0.26411912] [ 1.9810784 -3.1206615 -0.33801773 0.09597874 -0.7284352 -0.3276184 -1.5114753 -2.2761602 -1.4465942 2.1606438 ] [-0.7284196 -2.5151596 -2.3808718 0.9439837 -0.8816771 -1.8064566 -2.85188 -2.6932433 -1.1948473 -2.819703 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_527.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5900 -0.0223 -0.2109 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4498 (2,1,.,.) = 0.9344 (3,1,.,.) = 0.4903 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[ 9.02784050e-01 8.99364471e-01 1.21901244e-01 1.48351595e-01 1.55425715e+00 6.33303046e-01 4.83898491e-01 2.24449992e-01 5.65804243e-01 7.52239764e-01] [ 1.40935433e+00 8.48509312e-01 -1.32067725e-01 4.82924908e-01 2.45121345e-01 9.13211465e-01 8.98912907e-01 9.05080974e-01 5.35360575e-01 9.50118482e-01] [ 8.64052415e-01 -3.76361936e-01 7.40353763e-01 1.10067368e+00 4.16225612e-01 8.53527665e-01 7.00962842e-01 6.22243404e-01 5.87511063e-01 4.96929735e-01] [ 2.20880911e-01 1.13456750e+00 1.56370357e-01 6.53492987e-01 6.17768764e-01 9.99861121e-01 1.39191818e+00 1.36573744e+00 -1.91945583e-01 4.30255115e-01] [ 1.55613143e-02 4.98038590e-01 1.03553510e+00 3.82014632e-01 4.25449647e-02 7.21405625e-01 1.18054733e-01 8.95819962e-01 3.83144587e-01 4.85555381e-01] [ 2.07240283e-01 -4.49057788e-01 5.82095623e-01 5.42533755e-01 3.82951885e-01 1.29849267e+00 3.92432332e-01 5.84264815e-01 1.17556000e+00 3.02498609e-01] [ 1.09655488e+00 -4.80701059e-01 7.66801894e-01 7.56750286e-01 -2.19130427e-01 9.62390900e-01 1.11986637e+00 1.39184320e+00 -1.91730008e-01 3.05904746e-01] [ 1.13377738e+00 2.54358888e-01 1.08044541e+00 6.63075626e-01 6.93877935e-01 1.46356240e-01 1.07626283e+00 6.66392565e-01 1.06231356e+00 5.55229902e-01] [ 6.62611187e-01 6.93966925e-01 2.67197937e-01 4.47394878e-01 1.09119080e-01 1.22253871e+00 4.54454571e-01 8.78682137e-01 5.66863239e-01 5.60971081e-01] [ 3.42713028e-01 7.84336567e-01 1.19180000e+00 1.44976914e+00 8.15968290e-02 7.20258594e-01 6.23905122e-01 1.92442954e-01 -6.49651556e-05 2.52174556e-01]] [[ 3.76678586e-01 -1.29021263e+00 1.11158085e+00 -7.64480293e-01 1.47073120e-01 -7.58619606e-01 -3.12491832e-03 1.90263057e+00 5.59004724e-01 -1.19898319e+00] [-3.38809639e-01 1.23162210e+00 1.24562494e-01 -1.98162705e-01 -5.46614707e-01 1.13049924e+00 7.49427080e-01 -9.41264033e-01 -7.73392975e-01 1.43310654e+00] [ 2.08696485e+00 7.40134656e-01 1.38165140e+00 1.00508177e+00 -1.25273120e+00 1.06075001e+00 1.99582160e+00 -7.49985635e-01 -8.66505325e-01 -1.02015138e+00] [-1.81366906e-01 -7.12727964e-01 5.97016633e-01 6.81942463e-01 -1.23348403e+00 -6.29135549e-01 -1.43590379e+00 3.86800110e-01 2.60183245e-01 -9.71424341e-01] [-2.61386689e-02 1.66312352e-01 -6.99455440e-01 1.35638916e+00 -1.46765387e+00 2.42222697e-02 -1.23114026e+00 -1.36474752e+00 -8.09818447e-01 3.37007731e-01] [ 5.22929907e-01 1.16552401e+00 4.42550927e-01 3.98652464e-01 1.46538603e+00 9.78841245e-01 2.58338720e-01 -1.59821880e+00 -1.30100119e+00 -2.02107477e+00] [-7.98047185e-01 8.14740300e-01 5.07498264e-01 -3.53065670e-01 5.51627159e-01 1.12402308e+00 -4.30200189e-01 -1.15535963e+00 -1.11742628e+00 -1.87017828e-01] [-9.41482604e-01 1.00871778e+00 2.70091802e-01 -5.23054123e-01 8.41700792e-01 -1.51808751e+00 3.97859037e-01 6.47229671e-01 5.05676627e-01 -7.14938879e-01] [-3.18191111e-01 -6.37600839e-01 1.33837116e+00 4.55836475e-01 4.49209213e-01 8.07098269e-01 2.26641417e-01 6.11627460e-01 -7.19753027e-01 1.12096953e+00] [-2.44838983e-01 -2.57947469e+00 3.21475685e-01 1.19338548e+00 -1.56777430e+00 1.29024312e-01 -1.46803691e-03 2.98170179e-01 1.26938617e+00 3.05812210e-01]] [[-1.37779188e+00 -2.80011207e-01 -7.99611568e-01 1.45956248e-01 -4.71441448e-01 -1.76245153e-01 -1.58778533e-01 -3.84622276e-01 -3.99216674e-02 -2.98083633e-01] [-9.93758906e-03 -2.29595244e-01 -8.54241326e-02 1.68044090e-01 -3.37401986e-01 2.17052385e-01 1.29453406e-01 3.34434628e-01 -7.80411959e-01 5.06088436e-01] [-6.47464633e-01 -9.62209404e-01 -9.55130309e-02 -4.54420179e-01 5.29703856e-01 -1.30124399e-02 -1.01286143e-01 -3.68229836e-01 8.51296961e-01 8.24352950e-02] [-1.40692964e-01 -1.10772848e+00 -6.92286968e-01 6.63589597e-01 1.39628172e-01 -1.03237711e-01 8.94966871e-02 -3.86294097e-01 -3.59278202e-01 -4.98937964e-01] [-7.17534482e-01 -7.64667755e-03 -9.73740458e-01 -1.23907514e-01 -8.35617900e-01 -8.50798249e-01 -7.00050592e-01 6.86511517e-01 -9.50547531e-02 8.66734460e-02] [ 1.99381426e-01 -8.24450701e-02 -1.64534837e-01 3.82874042e-01 -7.23610044e-01 -1.42582965e+00 1.78413361e-01 -2.37317814e-04 2.74132639e-01 -2.04548612e-01] [-3.63077730e-01 1.06896035e-01 -6.55254483e-01 1.78827584e-01 -1.00237481e-01 -6.54317021e-01 -2.05608636e-01 2.95010448e-01 5.07191896e-01 1.26858309e-01] [-4.60906953e-01 -1.01234663e+00 1.17389426e-01 -1.17114103e+00 -4.62075651e-01 -4.59325999e-01 -3.50694209e-01 -3.55348647e-01 -2.45966509e-01 1.58974379e-01] [ 7.01960698e-02 -1.49780822e+00 -3.55078727e-01 -6.16723716e-01 -1.43761694e+00 -7.27750540e-01 -7.09308147e-01 -9.65827882e-01 -7.48086810e-01 5.20930625e-02] [ 6.58132553e-01 -8.05744946e-01 -5.09995162e-01 -9.85563826e-03 -2.37910196e-01 4.86084558e-02 6.47295341e-02 -6.39580727e-01 9.80360806e-02 8.23952258e-02]]]]; ov_res: [[[[ 9.02784050e-01 8.99364471e-01 1.21901244e-01 1.48351595e-01 1.55425715e+00 6.33303046e-01 4.83898491e-01 2.24449992e-01 5.65804243e-01 7.52239764e-01] [ 1.40935433e+00 8.48509312e-01 -1.32067725e-01 4.82924908e-01 2.45121345e-01 9.13211465e-01 8.98912907e-01 9.05080974e-01 5.35360575e-01 9.50118482e-01] [ 8.64052415e-01 -3.76361936e-01 7.40353763e-01 1.10067368e+00 4.16225612e-01 8.53527665e-01 7.00962842e-01 6.22243404e-01 5.87511063e-01 4.96929735e-01] [ 2.20880911e-01 1.13456750e+00 1.56370357e-01 6.53492987e-01 6.17768764e-01 9.99861121e-01 1.39191818e+00 1.36573744e+00 -1.91945583e-01 4.30255115e-01] [ 1.55613143e-02 4.98038590e-01 1.03553510e+00 3.82014632e-01 4.25449647e-02 7.21405625e-01 1.18054733e-01 8.95819962e-01 3.83144587e-01 4.85555381e-01] [ 2.07240283e-01 -4.49057788e-01 5.82095623e-01 5.42533755e-01 3.82951885e-01 1.29849267e+00 3.92432332e-01 5.84264815e-01 1.17556000e+00 3.02498609e-01] [ 1.09655488e+00 -4.80701059e-01 7.66801894e-01 7.56750286e-01 -2.19130427e-01 9.62390900e-01 1.11986637e+00 1.39184320e+00 -1.91730008e-01 3.05904746e-01] [ 1.13377738e+00 2.54358888e-01 1.08044541e+00 6.63075626e-01 6.93877935e-01 1.46356240e-01 1.07626283e+00 6.66392565e-01 1.06231356e+00 5.55229902e-01] [ 6.62611187e-01 6.93966925e-01 2.67197937e-01 4.47394878e-01 1.09119080e-01 1.22253871e+00 4.54454571e-01 8.78682137e-01 5.66863239e-01 5.60971081e-01] [ 3.42713028e-01 7.84336567e-01 1.19180000e+00 1.44976914e+00 8.15968290e-02 7.20258594e-01 6.23905122e-01 1.92442954e-01 -6.49651556e-05 2.52174556e-01]] [[ 3.76678586e-01 -1.29021263e+00 1.11158085e+00 -7.64480293e-01 1.47073120e-01 -7.58619606e-01 -3.12491832e-03 1.90263057e+00 5.59004724e-01 -1.19898319e+00] [-3.38809639e-01 1.23162210e+00 1.24562494e-01 -1.98162705e-01 -5.46614707e-01 1.13049924e+00 7.49427080e-01 -9.41264033e-01 -7.73392975e-01 1.43310654e+00] [ 2.08696485e+00 7.40134656e-01 1.38165140e+00 1.00508177e+00 -1.25273120e+00 1.06075001e+00 1.99582160e+00 -7.49985635e-01 -8.66505325e-01 -1.02015138e+00] [-1.81366906e-01 -7.12727964e-01 5.97016633e-01 6.81942463e-01 -1.23348403e+00 -6.29135549e-01 -1.43590379e+00 3.86800110e-01 2.60183245e-01 -9.71424341e-01] [-2.61386689e-02 1.66312352e-01 -6.99455440e-01 1.35638916e+00 -1.46765387e+00 2.42222697e-02 -1.23114026e+00 -1.36474752e+00 -8.09818447e-01 3.37007731e-01] [ 5.22929907e-01 1.16552401e+00 4.42550927e-01 3.98652464e-01 1.46538603e+00 9.78841245e-01 2.58338720e-01 -1.59821880e+00 -1.30100119e+00 -2.02107477e+00] [-7.98047185e-01 8.14740300e-01 5.07498264e-01 -3.53065670e-01 5.51627159e-01 1.12402308e+00 -4.30200189e-01 -1.15535963e+00 -1.11742628e+00 -1.87017828e-01] [-9.41482604e-01 1.00871778e+00 2.70091802e-01 -5.23054123e-01 8.41700792e-01 -1.51808751e+00 3.97859037e-01 6.47229671e-01 5.05676627e-01 -7.14938879e-01] [-3.18191111e-01 -6.37600839e-01 1.33837116e+00 4.55836475e-01 4.49209213e-01 8.07098269e-01 2.26641417e-01 6.11627460e-01 -7.19753027e-01 1.12096953e+00] [-2.44838983e-01 -2.57947469e+00 3.21475685e-01 1.19338548e+00 -1.56777430e+00 1.29024312e-01 -1.46803691e-03 2.98170179e-01 1.26938617e+00 3.05812210e-01]] [[-1.37779188e+00 -2.80011207e-01 -7.99611568e-01 1.45956248e-01 -4.71441448e-01 -1.76245153e-01 -1.58778533e-01 -3.84622276e-01 -3.99216674e-02 -2.98083633e-01] [-9.93758906e-03 -2.29595244e-01 -8.54241326e-02 1.68044090e-01 -3.37401986e-01 2.17052385e-01 1.29453406e-01 3.34434628e-01 -7.80411959e-01 5.06088436e-01] [-6.47464633e-01 -9.62209404e-01 -9.55130309e-02 -4.54420179e-01 5.29703856e-01 -1.30124399e-02 -1.01286143e-01 -3.68229836e-01 8.51296961e-01 8.24352950e-02] [-1.40692964e-01 -1.10772848e+00 -6.92286968e-01 6.63589597e-01 1.39628172e-01 -1.03237711e-01 8.94966871e-02 -3.86294097e-01 -3.59278202e-01 -4.98937964e-01] [-7.17534482e-01 -7.64667755e-03 -9.73740458e-01 -1.23907514e-01 -8.35617900e-01 -8.50798249e-01 -7.00050592e-01 6.86511517e-01 -9.50547531e-02 8.66734460e-02] [ 1.99381426e-01 -8.24450701e-02 -1.64534837e-01 3.82874042e-01 -7.23610044e-01 -1.42582965e+00 1.78413361e-01 -2.37317814e-04 2.74132639e-01 -2.04548612e-01] [-3.63077730e-01 1.06896035e-01 -6.55254483e-01 1.78827584e-01 -1.00237481e-01 -6.54317021e-01 -2.05608636e-01 2.95010448e-01 5.07191896e-01 1.26858309e-01] [-4.60906953e-01 -1.01234663e+00 1.17389426e-01 -1.17114103e+00 -4.62075651e-01 -4.59325999e-01 -3.50694209e-01 -3.55348647e-01 -2.45966509e-01 1.58974379e-01] [ 7.01960698e-02 -1.49780822e+00 -3.55078727e-01 -6.16723716e-01 -1.43761694e+00 -7.27750540e-01 -7.09308147e-01 -9.65827882e-01 -7.48086810e-01 5.20930625e-02] [ 6.58132553e-01 -8.05744946e-01 -5.09995162e-01 -9.85563826e-03 -2.37910196e-01 4.86084558e-02 6.47295341e-02 -6.39580727e-01 9.80360806e-02 8.23952258e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_529.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.0213}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -2.0211 (2,1,.,.) = 0.01 * -8.0128 (3,1,.,.) = -2.1643 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 4.268242 -2.5942967 0.8096069 2.997494 -1.5802393 0.0768407 -2.3182359 -0.38467407] [ 3.2604437 7.479147 -2.1633885 0.38841808 -0.11073911 2.436781 3.7482371 3.5667725 ] [-0.34817553 -2.3389628 -3.214714 0.5128748 -2.4554029 3.7108924 2.919249 -0.11412632] [ 2.4413776 -1.62276 -2.7069278 2.8918295 -2.717002 0.45590758 -0.05821526 1.2998788 ] [ 2.9485683 3.0587068 -3.4215493 1.4184881 1.7520586 3.2060244 -2.219751 -2.1582177 ] [ 2.2822344 -1.0735807 -0.74789345 -2.0859792 -1.0256951 0.43737108 0.68137234 2.6446338 ] [-1.8072624 3.004582 2.7350917 1.0704198 5.905921 1.1737409 -0.61827564 -3.0516624 ] [ 4.39141 1.8303595 2.9420624 1.4697456 1.0229182 5.111679 4.8801384 0.7310692 ]]]]; ov_res: [[[[ 4.268242 -2.5942967 0.8096069 2.997494 -1.5802393 0.0768407 -2.3182359 -0.38467407] [ 3.2604437 7.479147 -2.1633885 0.38841808 -0.11073911 2.436781 3.7482371 3.5667725 ] [-0.34817553 -2.3389628 -3.214714 0.5128748 -2.4554029 3.7108924 2.919249 -0.11412632] [ 2.4413776 -1.62276 -2.7069278 2.8918295 -2.717002 0.45590758 -0.05821526 1.2998788 ] [ 2.9485683 3.0587068 -3.4215493 1.4184881 1.7520586 3.2060244 -2.219751 -2.1582177 ] [ 2.2822344 -1.0735807 -0.74789345 -2.0859792 -1.0256951 0.43737108 0.68137234 2.6446338 ] [-1.8072624 3.004582 2.7350917 1.0704198 5.905921 1.1737409 -0.61827564 -3.0516624 ] [ 4.39141 1.8303595 2.9420624 1.4697456 1.0229182 5.111679 4.8801384 0.7310692 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_531.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1617 -1.2042 -0.2974 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 8.1993 (2,1,.,.) = 0.5995 (3,1,.,.) = -0.6841 (1,2,.,.) = 0.5261 (2,2,.,.) = -0.2994 (3,2,.,.) = 0.2439 (1,3,.,.) = 1.1709 (2,3,.,.) = 0.9337 (3,3,.,.) = 0.9028 [ CPUFloatType{3,3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[-1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01] [-1.6170026e-01 -3.5983473e-02 7.6505959e-01 -8.8133812e-01 -1.2691000e+00 3.2323772e-01 6.4877421e-01 -2.9584625e+00 1.2634354e+00 -1.0741541e+00 2.4394009e+00 -1.6170026e-01] [-1.6170026e-01 -8.1140840e-01 6.3370198e-02 2.5469077e+00 1.5336007e-02 7.0826948e-01 -5.0990438e-01 5.3176826e-01 -1.9231166e+00 -7.5602216e-01 -6.6067004e-01 -1.6170026e-01] [-1.6170026e-01 -7.5840220e-02 8.5728437e-02 4.4260871e-01 -8.1938073e-02 4.7626978e-01 1.8645597e+00 8.0435678e-02 9.7935557e-01 -5.3641164e-01 9.8824695e-02 -1.6170026e-01] [-1.6170026e-01 -4.4301802e-01 -1.1251289e+00 6.3751942e-01 -1.1603534e-04 -1.5712534e-01 -6.3313150e-01 9.4708025e-01 -3.6069202e-01 1.2991208e+00 -1.0509328e+00 -1.6170026e-01] [-1.6170026e-01 4.0480548e-01 1.2330638e+00 -1.7977154e+00 -1.4673172e+00 6.4800006e-01 -3.2566312e-01 -6.6534758e-01 9.6300542e-01 -1.0131761e+00 1.5656595e+00 -1.6170026e-01] [-1.6170026e-01 1.3925389e+00 5.9149975e-01 2.2840254e-01 -1.1603522e+00 -1.6404479e+00 6.5395415e-01 1.9748061e+00 -2.5321990e-01 1.8473492e+00 3.3890021e-01 -1.6170026e-01] [-1.6170026e-01 -5.8949554e-01 -3.7041473e-01 -7.4536353e-01 1.6634769e+00 -3.2031441e-01 -3.5465962e-01 7.8701645e-01 1.9252613e+00 -1.7620021e+00 1.6249999e+00 -1.6170026e-01] [-1.6170026e-01 1.4227868e+00 1.7575814e+00 -8.0435663e-01 8.8127625e-01 -2.4164717e+00 -9.4351959e-01 -3.1381625e-01 7.1534038e-02 -1.8974829e-01 -3.0341476e-01 -1.6170026e-01] [-1.6170026e-01 -1.0395782e+00 -2.8438208e-01 3.2925707e-01 1.5919434e-01 -8.4812194e-02 -7.0153910e-01 -1.7445486e+00 -1.0833448e+00 -1.2211113e+00 -4.9987173e-01 -1.6170026e-01] [-1.6170026e-01 7.3906034e-01 -1.5222524e+00 9.5316744e-01 3.1365031e-01 1.6367519e+00 1.3598180e+00 9.9779451e-01 -8.4835097e-02 -1.5212595e+00 -3.3519068e+00 -1.6170026e-01] [-1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01]] [[-1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00] [-1.2041825e+00 -1.9093585e+00 -1.7951481e+00 -2.2275896e+00 -2.4708769e+00 -2.1053743e+00 -3.4321308e-01 -2.1644924e+00 -7.7388579e-01 -1.1722058e+00 1.0510291e+00 -1.2041825e+00] [-1.2041825e+00 -7.7667600e-01 -1.3061335e+00 1.0631949e+00 -1.1824474e+00 -9.2094445e-01 -6.6835380e-01 -7.0209640e-01 -3.4768305e+00 -1.0176828e+00 -2.1270308e+00 -1.2041825e+00] [-1.2041825e+00 -1.4735805e+00 -7.0363015e-01 -5.5627978e-01 -1.1590950e+00 1.1555628e+00 -2.4688113e-01 -1.5867543e+00 -9.2888594e-01 -1.4268522e+00 1.1506879e-01 -1.2041825e+00] [-1.2041825e+00 -2.5931702e+00 -6.9706613e-01 -1.2898726e+00 -1.3519646e+00 -8.0769384e-01 -1.7950895e+00 -1.6504830e+00 -2.4569142e-01 4.7299671e-01 -1.4208448e+00 -1.2041825e+00] [-1.2041825e+00 -1.5201888e+00 1.6287827e-01 -2.9052010e+00 -2.9396763e+00 -5.6236166e-01 -2.2613764e+00 -2.5195680e+00 -1.4269309e+00 -1.9931245e+00 -8.4886551e-02 -1.2041825e+00] [-1.2041825e+00 2.5821888e-01 -7.0656306e-01 -1.9869452e+00 -1.3889720e+00 -9.3938839e-01 -8.8111663e-01 -1.1364206e+00 -1.4187720e+00 5.3476179e-01 -7.3595601e-01 -1.2041825e+00] [-1.2041825e+00 -1.5596201e+00 -4.6699864e-01 -2.0820501e+00 5.6211913e-01 -1.0534214e+00 -1.3392199e+00 -1.6622796e+00 9.2340171e-01 -2.0044608e+00 9.0713441e-01 -1.2041825e+00] [-1.2041825e+00 3.8646102e-02 -1.7281249e+00 -2.5524218e+00 -1.2812608e+00 -4.2725426e-01 -2.0038986e+00 1.2292349e-01 -1.4172738e+00 -3.8276392e-01 -6.7708707e-01 -1.2041825e+00] [-1.2041825e+00 -1.4046780e+00 -1.4020841e+00 -1.2605748e+00 -1.6869417e+00 -9.6765351e-01 -1.1825304e+00 -2.5388951e+00 -2.2122536e+00 -2.0056424e+00 -6.2384677e-01 -1.2041825e+00] [-1.2041825e+00 -2.1444714e-01 -1.5798426e+00 -7.8653193e-01 2.2004831e-01 2.4892139e-01 -1.3588645e+00 -8.7712955e-01 -2.8824337e+00 -1.5802419e+00 -2.2405081e+00 -1.2041825e+00] [-1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00]] [[-2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01] [-2.9737398e-01 -6.8066013e-01 3.2046446e-01 -6.1056286e-02 2.7536336e-01 -2.7207589e-01 -2.5191674e-01 -2.6953819e+00 1.0833604e+00 -8.6658597e-01 -1.6486049e-03 -2.9737398e-01] [-2.9737398e-01 -1.9525372e+00 3.1313983e-01 1.1523652e-01 7.4413645e-01 -1.7884468e-01 -3.3252388e-01 1.2138137e+00 -3.1883025e-01 -1.3686998e+00 -1.0025697e+00 -2.9737398e-01] [-2.9737398e-01 -7.5975525e-01 4.3316901e-02 1.0771504e-01 7.1614408e-01 -1.1927780e+00 1.7647355e+00 5.4323483e-01 9.9591374e-02 -6.7949945e-01 -4.9916646e-01 -2.9737398e-01] [-2.9737398e-01 -1.1902277e-01 -1.8138293e+00 2.2898003e-01 3.5287353e-01 -3.9830545e-01 -7.7266634e-01 -2.0744944e-01 -5.5748403e-02 1.1733884e+00 -1.1316453e+00 -2.9737398e-01] [-2.9737398e-01 -2.0975682e-01 7.6426923e-01 5.2275312e-01 -3.4979737e-01 -6.9883674e-01 1.8128350e-01 -5.0702262e-01 2.0078642e+00 -4.6730405e-01 1.7068474e+00 -2.9737398e-01] [-2.9737398e-01 1.1909313e+00 2.4553707e-01 9.4621646e-01 -3.9842811e-01 -2.4633291e+00 7.1356452e-01 1.7709227e+00 -1.0034409e+00 1.6353936e+00 -1.1675662e+00 -2.9737398e-01] [-2.9737398e-01 -6.8214732e-01 -2.6720503e-01 3.8674274e-01 1.4917545e+00 -1.7554295e-01 -1.7914490e+00 7.6326346e-01 -4.3925047e-02 -2.1646063e+00 2.0571926e-01 -2.9737398e-01] [-2.9737398e-01 5.4813719e-01 9.1129982e-01 5.3414196e-02 1.7877603e+00 -2.3766351e+00 2.4229020e-02 -7.6388162e-01 1.8454894e-01 -2.0357828e+00 -6.6870880e-01 -2.9737398e-01] [-2.9737398e-01 -1.1543075e+00 -5.8958578e-01 4.6316889e-01 3.2666117e-02 9.0118194e-01 -2.3658153e-01 -1.1356076e+00 -9.6325147e-01 1.3022995e-01 -1.9983168e+00 -2.9737398e-01] [-2.9737398e-01 -6.5316457e-01 -1.7923961e+00 8.2699323e-01 7.7612960e-01 5.2345395e-02 1.8024442e+00 -1.0677608e+00 4.3378174e-02 -1.5908759e+00 -3.0683861e+00 -2.9737398e-01] [-2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01]]]]; ov_res: [[[[-1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01] [-1.6170026e-01 -3.5983473e-02 7.6505959e-01 -8.8133812e-01 -1.2691000e+00 3.2323772e-01 6.4877421e-01 -2.9584625e+00 1.2634354e+00 -1.0741541e+00 2.4394009e+00 -1.6170026e-01] [-1.6170026e-01 -8.1140840e-01 6.3370198e-02 2.5469077e+00 1.5336007e-02 7.0826948e-01 -5.0990438e-01 5.3176826e-01 -1.9231166e+00 -7.5602216e-01 -6.6067004e-01 -1.6170026e-01] [-1.6170026e-01 -7.5840220e-02 8.5728437e-02 4.4260871e-01 -8.1938073e-02 4.7626978e-01 1.8645597e+00 8.0435678e-02 9.7935557e-01 -5.3641164e-01 9.8824695e-02 -1.6170026e-01] [-1.6170026e-01 -4.4301802e-01 -1.1251289e+00 6.3751942e-01 -1.1603534e-04 -1.5712534e-01 -6.3313150e-01 9.4708025e-01 -3.6069202e-01 1.2991208e+00 -1.0509328e+00 -1.6170026e-01] [-1.6170026e-01 4.0480548e-01 1.2330638e+00 -1.7977154e+00 -1.4673172e+00 6.4800006e-01 -3.2566312e-01 -6.6534758e-01 9.6300542e-01 -1.0131761e+00 1.5656595e+00 -1.6170026e-01] [-1.6170026e-01 1.3925389e+00 5.9149975e-01 2.2840254e-01 -1.1603522e+00 -1.6404479e+00 6.5395415e-01 1.9748061e+00 -2.5321990e-01 1.8473492e+00 3.3890021e-01 -1.6170026e-01] [-1.6170026e-01 -5.8949554e-01 -3.7041473e-01 -7.4536353e-01 1.6634769e+00 -3.2031441e-01 -3.5465962e-01 7.8701645e-01 1.9252613e+00 -1.7620021e+00 1.6249999e+00 -1.6170026e-01] [-1.6170026e-01 1.4227868e+00 1.7575814e+00 -8.0435663e-01 8.8127625e-01 -2.4164717e+00 -9.4351959e-01 -3.1381625e-01 7.1534038e-02 -1.8974829e-01 -3.0341476e-01 -1.6170026e-01] [-1.6170026e-01 -1.0395782e+00 -2.8438208e-01 3.2925707e-01 1.5919434e-01 -8.4812194e-02 -7.0153910e-01 -1.7445486e+00 -1.0833448e+00 -1.2211113e+00 -4.9987173e-01 -1.6170026e-01] [-1.6170026e-01 7.3906034e-01 -1.5222524e+00 9.5316744e-01 3.1365031e-01 1.6367519e+00 1.3598180e+00 9.9779451e-01 -8.4835097e-02 -1.5212595e+00 -3.3519068e+00 -1.6170026e-01] [-1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01 -1.6170026e-01]] [[-1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00] [-1.2041825e+00 -1.9093585e+00 -1.7951481e+00 -2.2275896e+00 -2.4708769e+00 -2.1053743e+00 -3.4321308e-01 -2.1644924e+00 -7.7388579e-01 -1.1722058e+00 1.0510291e+00 -1.2041825e+00] [-1.2041825e+00 -7.7667600e-01 -1.3061335e+00 1.0631949e+00 -1.1824474e+00 -9.2094445e-01 -6.6835380e-01 -7.0209640e-01 -3.4768305e+00 -1.0176828e+00 -2.1270308e+00 -1.2041825e+00] [-1.2041825e+00 -1.4735805e+00 -7.0363015e-01 -5.5627978e-01 -1.1590950e+00 1.1555628e+00 -2.4688113e-01 -1.5867543e+00 -9.2888594e-01 -1.4268522e+00 1.1506879e-01 -1.2041825e+00] [-1.2041825e+00 -2.5931702e+00 -6.9706613e-01 -1.2898726e+00 -1.3519646e+00 -8.0769384e-01 -1.7950895e+00 -1.6504830e+00 -2.4569142e-01 4.7299671e-01 -1.4208448e+00 -1.2041825e+00] [-1.2041825e+00 -1.5201888e+00 1.6287827e-01 -2.9052010e+00 -2.9396763e+00 -5.6236166e-01 -2.2613764e+00 -2.5195680e+00 -1.4269309e+00 -1.9931245e+00 -8.4886551e-02 -1.2041825e+00] [-1.2041825e+00 2.5821888e-01 -7.0656306e-01 -1.9869452e+00 -1.3889720e+00 -9.3938839e-01 -8.8111663e-01 -1.1364206e+00 -1.4187720e+00 5.3476179e-01 -7.3595601e-01 -1.2041825e+00] [-1.2041825e+00 -1.5596201e+00 -4.6699864e-01 -2.0820501e+00 5.6211913e-01 -1.0534214e+00 -1.3392199e+00 -1.6622796e+00 9.2340171e-01 -2.0044608e+00 9.0713441e-01 -1.2041825e+00] [-1.2041825e+00 3.8646102e-02 -1.7281249e+00 -2.5524218e+00 -1.2812608e+00 -4.2725426e-01 -2.0038986e+00 1.2292349e-01 -1.4172738e+00 -3.8276392e-01 -6.7708707e-01 -1.2041825e+00] [-1.2041825e+00 -1.4046780e+00 -1.4020841e+00 -1.2605748e+00 -1.6869417e+00 -9.6765351e-01 -1.1825304e+00 -2.5388951e+00 -2.2122536e+00 -2.0056424e+00 -6.2384677e-01 -1.2041825e+00] [-1.2041825e+00 -2.1444714e-01 -1.5798426e+00 -7.8653193e-01 2.2004831e-01 2.4892139e-01 -1.3588645e+00 -8.7712955e-01 -2.8824337e+00 -1.5802419e+00 -2.2405081e+00 -1.2041825e+00] [-1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00 -1.2041825e+00]] [[-2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01] [-2.9737398e-01 -6.8066013e-01 3.2046446e-01 -6.1056286e-02 2.7536336e-01 -2.7207589e-01 -2.5191674e-01 -2.6953819e+00 1.0833604e+00 -8.6658597e-01 -1.6486049e-03 -2.9737398e-01] [-2.9737398e-01 -1.9525372e+00 3.1313983e-01 1.1523652e-01 7.4413645e-01 -1.7884468e-01 -3.3252388e-01 1.2138137e+00 -3.1883025e-01 -1.3686998e+00 -1.0025697e+00 -2.9737398e-01] [-2.9737398e-01 -7.5975525e-01 4.3316901e-02 1.0771504e-01 7.1614408e-01 -1.1927780e+00 1.7647355e+00 5.4323483e-01 9.9591374e-02 -6.7949945e-01 -4.9916646e-01 -2.9737398e-01] [-2.9737398e-01 -1.1902277e-01 -1.8138293e+00 2.2898003e-01 3.5287353e-01 -3.9830545e-01 -7.7266634e-01 -2.0744944e-01 -5.5748403e-02 1.1733884e+00 -1.1316453e+00 -2.9737398e-01] [-2.9737398e-01 -2.0975682e-01 7.6426923e-01 5.2275312e-01 -3.4979737e-01 -6.9883674e-01 1.8128350e-01 -5.0702262e-01 2.0078642e+00 -4.6730405e-01 1.7068474e+00 -2.9737398e-01] [-2.9737398e-01 1.1909313e+00 2.4553707e-01 9.4621646e-01 -3.9842811e-01 -2.4633291e+00 7.1356452e-01 1.7709227e+00 -1.0034409e+00 1.6353936e+00 -1.1675662e+00 -2.9737398e-01] [-2.9737398e-01 -6.8214732e-01 -2.6720503e-01 3.8674274e-01 1.4917545e+00 -1.7554295e-01 -1.7914490e+00 7.6326346e-01 -4.3925047e-02 -2.1646063e+00 2.0571926e-01 -2.9737398e-01] [-2.9737398e-01 5.4813719e-01 9.1129982e-01 5.3414196e-02 1.7877603e+00 -2.3766351e+00 2.4229020e-02 -7.6388162e-01 1.8454894e-01 -2.0357828e+00 -6.6870880e-01 -2.9737398e-01] [-2.9737398e-01 -1.1543075e+00 -5.8958578e-01 4.6316889e-01 3.2666117e-02 9.0118194e-01 -2.3658153e-01 -1.1356076e+00 -9.6325147e-01 1.3022995e-01 -1.9983168e+00 -2.9737398e-01] [-2.9737398e-01 -6.5316457e-01 -1.7923961e+00 8.2699323e-01 7.7612960e-01 5.2345395e-02 1.8024442e+00 -1.0677608e+00 4.3378174e-02 -1.5908759e+00 -3.0683861e+00 -2.9737398e-01] [-2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01 -2.9737398e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_533.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.104871}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2182 (2,1,.,.) = -1.5002 (3,1,.,.) = 0.9370 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-2.724004 1.0917988 0.98745644 -2.9917183 0.18545684 -1.1628232 0.03256335 -0.7197711 ] [ 1.2430714 1.3148742 0.7546729 -2.308232 0.8013447 -1.0423756 -2.8405213 -0.8378167 ] [-0.65489626 0.8030043 1.0860251 -0.05035176 -1.7282499 -0.64084893 -3.5554473 -0.22540165] [-1.6335616 1.668747 -0.7267622 1.3576133 0.478673 2.971408 1.460271 0.2980337 ]]]]; ov_res: [[[[-2.724004 1.0917988 0.98745644 -2.9917183 0.18545684 -1.1628232 0.03256335 -0.7197711 ] [ 1.2430714 1.3148742 0.7546729 -2.308232 0.8013447 -1.0423756 -2.8405213 -0.8378167 ] [-0.65489626 0.8030043 1.0860251 -0.05035176 -1.7282499 -0.64084893 -3.5554473 -0.22540165] [-1.6335616 1.668747 -0.7267622 1.3576133 0.478673 2.971408 1.460271 0.2980337 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_535.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5699 -1.6483 1.0280 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.3205 (2,1,.,.) = 0.9138 (3,1,.,.) = -0.4334 (1,2,.,.) = -0.5330 (2,2,.,.) = 0.7346 (3,2,.,.) = -0.6589 (1,3,.,.) = 0.3403 (2,3,.,.) = 0.001 * 1.5331 (3,3,.,.) = 1.2209 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -1.468396 0.55363166 1.2425234 -3.2486255 -1.3118708 -0.35788888 -1.2657319 -0.06645077 -4.0224857 -1.0269067 -0.5698526 ] [-0.5698526 -1.8572538 0.9927989 -1.1739578 -1.9463751 -0.88862497 1.3148599 -2.3702824 -0.87142384 -0.06922495 -1.3940337 -0.5698526 ] [-0.5698526 -0.3555717 -1.6999924 -0.9436716 -1.0502315 -3.6900597 0.4167322 -1.3513272 -3.2432363 0.2556365 -3.5349865 -0.5698526 ] [-0.5698526 -1.5517151 -0.69164085 -1.3641516 0.16558444 1.7140796 1.3897632 -0.6276978 -0.20333189 1.2483797 2.1359441 -0.5698526 ] [-0.5698526 -1.600272 -2.4118316 0.4828509 2.2845397 0.34247786 -0.09515327 -4.355628 -2.0557692 -1.8134283 -1.3172196 -0.5698526 ] [-0.5698526 -0.78673935 -2.4074993 -1.9433882 -1.1826477 -0.47904098 -0.57964104 -2.1397595 1.9540846 1.3258967 -1.1071057 -0.5698526 ] [-0.5698526 0.65897727 1.1331929 -1.560089 1.8341935 -0.11835948 0.9634452 -2.4143248 -0.83528316 -2.380114 -0.7328167 -0.5698526 ] [-0.5698526 1.2435079 -1.2251774 -1.3364902 0.5705308 -0.48972353 -0.18461382 0.0805276 -2.6146638 0.19669181 0.27456033 -0.5698526 ] [-0.5698526 -0.9673138 -0.07231104 -1.6154739 -0.26235956 1.9081306 -1.0353478 2.1334708 -0.04049844 -1.0898845 -0.41225588 -0.5698526 ] [-0.5698526 -0.46558863 -1.8178297 1.017997 -1.6324767 3.1859388 -2.8003397 -0.7287197 0.17961776 -0.47401148 -1.3890827 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ]] [[-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6492561 -2.1899078 -3.5018406 0.21924865 -0.76922446 -2.2332556 -1.2991449 -2.0337415 0.43106723 -1.4051832 -1.6483188 ] [-1.6483188 -0.67020535 -2.7844048 -1.2185161 -1.1677232 -1.2920444 -3.0334792 -0.36266112 -2.0955174 -1.7664229 -0.9347716 -1.6483188 ] [-1.6483188 -2.0063841 -1.190604 -1.7374055 -1.1322541 0.7518871 -2.0336525 -0.18829727 1.416565 -1.8510399 0.52337074 -1.6483188 ] [-1.6483188 -0.92373085 -1.7387986 -1.1637427 -1.981934 -3.7663229 -3.8709242 -1.7343749 -2.6046152 -3.1099224 -3.5505543 -1.6483188 ] [-1.6483188 -0.77696615 -0.6134666 -2.6029673 -3.4880075 -2.7004557 -2.1621478 1.3810716 -0.25842392 -0.5642016 -1.257822 -1.6483188 ] [-1.6483188 -1.7022198 -0.11619699 -0.13707078 -1.0740296 -1.1450324 -1.5650592 -0.5372801 -3.3815176 -3.2872758 -2.1215792 -1.6483188 ] [-1.6483188 -3.1461148 -2.584359 -1.6918514 -3.3173943 -2.027544 -2.8415003 -1.1882901 -1.0017779 -0.33343148 -1.7377075 -1.6483188 ] [-1.6483188 -2.67766 -1.0081499 -0.33009315 -3.2243726 -1.3008852 -2.3159623 -2.3017383 -0.34171927 -2.8476355 -3.0369473 -1.6483188 ] [-1.6483188 -1.8787065 -2.4754539 -1.0185361 -1.2984183 -2.7766395 -1.6902201 -4.1679707 -1.9630164 -0.8988158 -1.5984081 -1.6483188 ] [-1.6483188 -1.2960305 -0.79925346 -2.1225805 -1.9664772 -3.7074616 0.14472008 -1.0716925 -2.0302806 -2.2530131 -0.705852 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ]] [[ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.7602801 0.3940757 1.952571 -1.3335304 0.18489212 1.9915993 2.415897 0.6359174 -2.4808838 -0.83427525 1.0279727 ] [ 1.0279727 1.7037528 4.788373 -0.342947 -0.4965968 2.2906902 0.24432218 0.9599489 -1.0794461 3.2918296 1.3856697 1.0279727 ] [ 1.0279727 1.341291 -0.9485835 1.0359377 -0.30243862 -2.7137983 2.1562474 1.2871062 2.0941386 2.1368308 -0.20856321 1.0279727 ] [ 1.0279727 -0.08580792 1.4336358 0.48766875 2.9797492 3.1668155 0.68233037 1.0743471 -0.9002434 1.8245661 2.4754803 1.0279727 ] [ 1.0279727 0.92492884 0.5334577 1.2204745 1.8464922 -0.20957696 2.840302 -2.026912 -0.7864951 0.40911055 1.8394821 1.0279727 ] [ 1.0279727 1.3954968 -1.578454 1.0447828 -0.6341295 1.2493197 0.74815667 1.8072433 2.1502514 2.4984589 0.03461987 1.0279727 ] [ 1.0279727 0.82477075 0.23703808 0.26375473 1.8982925 0.96124285 -0.23022377 -1.5056303 1.1940961 -0.48906696 0.75439835 1.0279727 ] [ 1.0279727 1.9793848 0.72628784 1.6434795 3.1746964 1.505983 1.3045073 -1.3315518 -1.0379617 0.30766958 -0.9406806 1.0279727 ] [ 1.0279727 0.30051506 1.9970933 -0.5244931 2.388063 3.4276247 2.0621772 3.717842 -0.4382831 1.1978632 -0.3485248 1.0279727 ] [ 1.0279727 1.0100415 -4.178332 0.789613 -1.3066556 6.291048 0.483396 1.2210786 2.114514 2.1614604 -0.19699872 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ]]]]; ov_res: [[[[-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -1.468396 0.55363166 1.2425234 -3.2486255 -1.3118708 -0.35788888 -1.2657319 -0.06645077 -4.0224857 -1.0269067 -0.5698526 ] [-0.5698526 -1.8572538 0.9927989 -1.1739578 -1.9463751 -0.88862497 1.3148599 -2.3702824 -0.87142384 -0.06922495 -1.3940337 -0.5698526 ] [-0.5698526 -0.3555717 -1.6999924 -0.9436716 -1.0502315 -3.6900597 0.4167322 -1.3513272 -3.2432363 0.2556365 -3.5349865 -0.5698526 ] [-0.5698526 -1.5517151 -0.69164085 -1.3641516 0.16558444 1.7140796 1.3897632 -0.6276978 -0.20333189 1.2483797 2.1359441 -0.5698526 ] [-0.5698526 -1.600272 -2.4118316 0.4828509 2.2845397 0.34247786 -0.09515327 -4.355628 -2.0557692 -1.8134283 -1.3172196 -0.5698526 ] [-0.5698526 -0.78673935 -2.4074993 -1.9433882 -1.1826477 -0.47904098 -0.57964104 -2.1397595 1.9540846 1.3258967 -1.1071057 -0.5698526 ] [-0.5698526 0.65897727 1.1331929 -1.560089 1.8341935 -0.11835948 0.9634452 -2.4143248 -0.83528316 -2.380114 -0.7328167 -0.5698526 ] [-0.5698526 1.2435079 -1.2251774 -1.3364902 0.5705308 -0.48972353 -0.18461382 0.0805276 -2.6146638 0.19669181 0.27456033 -0.5698526 ] [-0.5698526 -0.9673138 -0.07231104 -1.6154739 -0.26235956 1.9081306 -1.0353478 2.1334708 -0.04049844 -1.0898845 -0.41225588 -0.5698526 ] [-0.5698526 -0.46558863 -1.8178297 1.017997 -1.6324767 3.1859388 -2.8003397 -0.7287197 0.17961776 -0.47401148 -1.3890827 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ] [-0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 -0.5698526 ]] [[-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6492561 -2.1899078 -3.5018406 0.21924865 -0.76922446 -2.2332556 -1.2991449 -2.0337415 0.43106723 -1.4051832 -1.6483188 ] [-1.6483188 -0.67020535 -2.7844048 -1.2185161 -1.1677232 -1.2920444 -3.0334792 -0.36266112 -2.0955174 -1.7664229 -0.9347716 -1.6483188 ] [-1.6483188 -2.0063841 -1.190604 -1.7374055 -1.1322541 0.7518871 -2.0336525 -0.18829727 1.416565 -1.8510399 0.52337074 -1.6483188 ] [-1.6483188 -0.92373085 -1.7387986 -1.1637427 -1.981934 -3.7663229 -3.8709242 -1.7343749 -2.6046152 -3.1099224 -3.5505543 -1.6483188 ] [-1.6483188 -0.77696615 -0.6134666 -2.6029673 -3.4880075 -2.7004557 -2.1621478 1.3810716 -0.25842392 -0.5642016 -1.257822 -1.6483188 ] [-1.6483188 -1.7022198 -0.11619699 -0.13707078 -1.0740296 -1.1450324 -1.5650592 -0.5372801 -3.3815176 -3.2872758 -2.1215792 -1.6483188 ] [-1.6483188 -3.1461148 -2.584359 -1.6918514 -3.3173943 -2.027544 -2.8415003 -1.1882901 -1.0017779 -0.33343148 -1.7377075 -1.6483188 ] [-1.6483188 -2.67766 -1.0081499 -0.33009315 -3.2243726 -1.3008852 -2.3159623 -2.3017383 -0.34171927 -2.8476355 -3.0369473 -1.6483188 ] [-1.6483188 -1.8787065 -2.4754539 -1.0185361 -1.2984183 -2.7766395 -1.6902201 -4.1679707 -1.9630164 -0.8988158 -1.5984081 -1.6483188 ] [-1.6483188 -1.2960305 -0.79925346 -2.1225805 -1.9664772 -3.7074616 0.14472008 -1.0716925 -2.0302806 -2.2530131 -0.705852 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ] [-1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 -1.6483188 ]] [[ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.7602801 0.3940757 1.952571 -1.3335304 0.18489212 1.9915993 2.415897 0.6359174 -2.4808838 -0.83427525 1.0279727 ] [ 1.0279727 1.7037528 4.788373 -0.342947 -0.4965968 2.2906902 0.24432218 0.9599489 -1.0794461 3.2918296 1.3856697 1.0279727 ] [ 1.0279727 1.341291 -0.9485835 1.0359377 -0.30243862 -2.7137983 2.1562474 1.2871062 2.0941386 2.1368308 -0.20856321 1.0279727 ] [ 1.0279727 -0.08580792 1.4336358 0.48766875 2.9797492 3.1668155 0.68233037 1.0743471 -0.9002434 1.8245661 2.4754803 1.0279727 ] [ 1.0279727 0.92492884 0.5334577 1.2204745 1.8464922 -0.20957696 2.840302 -2.026912 -0.7864951 0.40911055 1.8394821 1.0279727 ] [ 1.0279727 1.3954968 -1.578454 1.0447828 -0.6341295 1.2493197 0.74815667 1.8072433 2.1502514 2.4984589 0.03461987 1.0279727 ] [ 1.0279727 0.82477075 0.23703808 0.26375473 1.8982925 0.96124285 -0.23022377 -1.5056303 1.1940961 -0.48906696 0.75439835 1.0279727 ] [ 1.0279727 1.9793848 0.72628784 1.6434795 3.1746964 1.505983 1.3045073 -1.3315518 -1.0379617 0.30766958 -0.9406806 1.0279727 ] [ 1.0279727 0.30051506 1.9970933 -0.5244931 2.388063 3.4276247 2.0621772 3.717842 -0.4382831 1.1978632 -0.3485248 1.0279727 ] [ 1.0279727 1.0100415 -4.178332 0.789613 -1.3066556 6.291048 0.483396 1.2210786 2.114514 2.1614604 -0.19699872 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ] [ 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 1.0279727 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_537.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.03722}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * -1.2969 (2,1,.,.) = -1.4273 (3,1,.,.) = 0.8136 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-0.18097126 -2.23317 1.7495645 -2.0587578 -2.30575 -1.1258643 0.6508968 -2.5388298 -1.9074154 -4.483842 ] [-2.8459768 -2.0237 3.6590114 -2.2911172 0.30154157 -3.6599011 -1.2308495 -2.0091648 -1.8974447 -1.3776727 ] [-0.7017571 -1.1652712 0.7301394 -2.5773396 -4.3217325 -1.6963404 -0.26991606 -1.061049 3.0853996 2.0839396 ] [-0.91751766 -3.2125673 -1.8761569 -1.9688995 -3.0586057 -2.5621076 -0.668888 -3.609529 -2.64826 -1.7692618 ] [ 1.1745759 -1.5383945 -3.115869 -0.1856842 -0.1514746 -4.1271453 1.663926 -0.02483392 -0.7669872 1.1405782 ] [-2.880494 0.51740336 0.0656743 -2.972046 -0.8456764 -1.6564304 -0.9738499 -2.9634018 -2.2567406 0.91810787] [-2.1344929 -1.8130431 -1.7552879 -0.45930672 -0.9585274 -1.9404626 0.91133356 -1.2373971 -1.700522 1.967714 ] [-0.18757308 -1.6277289 -2.0455863 -2.0085142 -0.87883484 2.4383526 -2.5737772 -0.41246152 -0.1239695 0.89776087]]]]; ov_res: [[[[-0.18097126 -2.23317 1.7495645 -2.0587578 -2.30575 -1.1258643 0.6508968 -2.5388298 -1.9074154 -4.483842 ] [-2.8459768 -2.0237 3.6590114 -2.2911172 0.30154157 -3.6599011 -1.2308495 -2.0091648 -1.8974447 -1.3776727 ] [-0.7017571 -1.1652712 0.7301394 -2.5773396 -4.3217325 -1.6963404 -0.26991606 -1.061049 3.0853996 2.0839396 ] [-0.91751766 -3.2125673 -1.8761569 -1.9688995 -3.0586057 -2.5621076 -0.668888 -3.609529 -2.64826 -1.7692618 ] [ 1.1745759 -1.5383945 -3.115869 -0.1856842 -0.1514746 -4.1271453 1.663926 -0.02483392 -0.7669872 1.1405782 ] [-2.880494 0.51740336 0.0656743 -2.972046 -0.8456764 -1.6564304 -0.9738499 -2.9634018 -2.2567406 0.91810787] [-2.1344929 -1.8130431 -1.7552879 -0.45930672 -0.9585274 -1.9404626 0.91133356 -1.2373971 -1.700522 1.967714 ] [-0.18757308 -1.6277289 -2.0455863 -2.0085142 -0.87883484 2.4383526 -2.5737772 -0.41246152 -0.1239695 0.89776087]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_539.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=0.01 * -7.0895 -174.8852 -35.2383 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.3127 (2,1,.,.) = -0.2917 (3,1,.,.) = -1.5753 (1,2,.,.) = 0.5085 (2,2,.,.) = -0.8753 (3,2,.,.) = 0.1525 (1,3,.,.) = 0.6462 (2,3,.,.) = -1.5263 (3,3,.,.) = -0.2780 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-7.0895143e-02 3.1025359e-01 -6.2956071e-01 1.0890654e+00 1.3071733e+00 -5.3685540e-01 5.1413995e-01 5.1367694e-01 7.5897843e-01 -7.3095042e-01 5.6429970e-01 -7.0895143e-02] [-7.0895143e-02 -7.1573502e-01 -1.0266898e+00 -6.9947988e-01 -1.0280565e+00 7.7250922e-01 -6.1576694e-01 6.6433078e-01 1.0076650e+00 1.3119875e+00 2.9022518e-01 -7.0895143e-02] [-7.0895143e-02 -1.5360677e+00 8.4165430e-01 9.3584007e-01 5.4326594e-01 -8.8123642e-02 -5.6301087e-01 5.4365885e-01 -2.0830336e+00 -5.5775774e-01 -1.2712724e+00 -7.0895143e-02] [-7.0895143e-02 -1.1336893e-02 -1.6016090e+00 -1.6382647e+00 1.1594645e-01 -1.5485847e+00 9.1856635e-01 -1.2455469e+00 1.6101952e+00 -2.3042315e-01 4.2595670e-01 -7.0895143e-02] [-7.0895143e-02 1.3219881e-01 6.3373435e-01 -4.9830623e-02 7.9329282e-01 3.8498190e-01 -6.9044703e-01 -8.9061892e-01 1.7802376e-01 9.5439512e-01 -1.7178565e-01 -7.0895143e-02] [-7.0895143e-02 -8.9135224e-01 3.4361023e-01 1.4429256e+00 -1.8592455e+00 -4.4955933e-01 -1.9500399e-01 -2.9878816e-01 -1.1070764e+00 -4.6888179e-01 4.7154546e-01 -7.0895143e-02] [-7.0895143e-02 5.6849265e-01 -1.0526270e+00 -5.7373279e-01 -4.7132459e-01 -1.8990314e+00 5.6711066e-01 1.4205436e+00 4.5133740e-01 8.0386382e-01 -1.0434320e+00 -7.0895143e-02] [-7.0895143e-02 -9.5942366e-01 2.3980072e-01 4.2819098e-01 -1.4450700e+00 7.9860812e-01 -1.2312499e+00 -2.4496653e+00 -1.4721981e-01 -4.4704711e-01 -1.5288407e-01 -7.0895143e-02] [-7.0895143e-02 -4.6356446e-01 1.0724262e+00 -8.0671526e-02 5.3548831e-01 4.9098217e-01 -6.7704058e-01 1.6980120e+00 2.6091332e+00 2.3476737e+00 7.0340848e-01 -7.0895143e-02] [-7.0895143e-02 -2.1933737e-01 -8.3190858e-02 5.5651891e-01 -1.2038239e-01 -1.1487945e-01 1.2343874e+00 -6.8565685e-01 2.9071093e-01 -2.2923294e-01 8.2368606e-01 -7.0895143e-02]] [[-1.7488519e+00 -2.1707525e+00 -1.2828956e+00 -3.9623923e+00 -4.7133164e+00 -5.4098678e-01 -2.3895943e+00 -2.8425088e+00 -3.8417211e+00 -4.8874938e-01 -3.2279429e+00 -1.7488519e+00] [-1.7488519e+00 4.2638779e-02 5.3962910e-01 -4.8838055e-01 -1.9197905e-01 -2.9671173e+00 -4.9605763e-01 -3.5593534e+00 -4.6166477e+00 -4.8085942e+00 -2.1154318e+00 -1.7488519e+00] [-1.7488519e+00 8.1120694e-01 -3.3719687e+00 -3.7131362e+00 -2.9480586e+00 -1.2378620e+00 -1.3945222e+00 -3.7474363e+00 2.1384268e+00 -1.1995404e+00 7.5298870e-01 -1.7488519e+00] [-1.7488519e+00 -1.4626617e+00 1.5098804e+00 1.1468254e+00 -1.8686540e+00 1.3803197e+00 -3.0866165e+00 4.3757355e-01 -4.7369213e+00 -5.9044790e-01 -2.7406373e+00 -1.7488519e+00] [-1.7488519e+00 -2.3683367e+00 -2.8076301e+00 -2.1171613e+00 -3.0244598e+00 -2.6399107e+00 -1.5258392e+00 -2.3282170e-02 -1.8323023e+00 -4.4382353e+00 -1.4861439e+00 -1.7488519e+00] [-1.7488519e+00 -6.4408302e-02 -2.9360294e+00 -4.6703568e+00 2.5675426e+00 -6.6871989e-01 -1.5809169e+00 -1.2935550e+00 -1.9182682e-01 -1.2717974e+00 -2.5722890e+00 -1.7488519e+00] [-1.7488519e+00 -3.2491028e+00 -5.5808747e-01 -1.0426347e+00 -1.1432915e+00 2.3195710e+00 -3.3691988e+00 -4.4437971e+00 -2.7579975e+00 -3.1071348e+00 3.0508697e-01 -1.7488519e+00] [-1.7488519e+00 1.1035110e+00 -2.1331990e+00 -2.5264485e+00 6.0212433e-01 -2.9008760e+00 3.4408081e-01 2.5129194e+00 -1.6570215e+00 -4.2288566e-01 -1.7340977e+00 -1.7488519e+00] [-1.7488519e+00 -1.1379030e+00 -3.6258321e+00 -1.2387691e+00 -3.6343815e+00 -2.4394963e+00 -4.0331089e-01 -5.7234516e+00 -6.9697452e+00 -6.6671882e+00 -2.7008288e+00 -1.7488519e+00] [-1.7488519e+00 -2.4385238e+00 -2.2238390e+00 -2.3284721e+00 -1.8588829e+00 -1.1213641e+00 -4.2882566e+00 -1.6727209e-01 -1.8770391e+00 -1.4194309e+00 -3.8848457e+00 -1.7488519e+00]] [[-3.5238266e-01 -1.3649068e+00 9.2125893e-01 9.7790003e-02 -8.5761112e-01 1.5418953e-01 -1.6327906e-01 -2.3219013e+00 3.4081620e-01 -8.5504889e-02 5.5601805e-02 -3.5238266e-01] [-3.5238266e-01 -2.4822296e-01 -1.7275658e+00 3.7731117e-01 1.3172123e+00 -2.5744576e+00 -5.6421757e-03 -6.9688904e-01 1.0639405e+00 -1.2534394e+00 -1.4572024e+00 -3.5238266e-01] [-3.5238266e-01 8.8057971e-01 -1.6390786e+00 -2.1526124e+00 -8.5074162e-01 -1.2743182e-01 1.1975861e+00 1.5986786e+00 3.1056147e+00 2.6136786e-01 -1.3217846e+00 -3.5238266e-01] [-3.5238266e-01 -1.5842052e+00 2.2989649e-01 1.7648304e+00 -1.4593040e+00 5.9881550e-01 -4.6820760e+00 2.6113091e+00 -3.2716532e+00 -1.7907990e+00 -2.2898183e+00 -3.5238266e-01] [-3.5238266e-01 -1.7498037e-01 -1.4482977e+00 1.1158496e-01 -5.8795196e-01 -2.9041510e+00 6.4284170e-01 9.7289765e-01 -2.4958863e+00 1.8553920e+00 -1.2505968e+00 -3.5238266e-01] [-3.5238266e-01 3.0879056e-01 2.3341864e-01 -9.4277376e-01 1.1304259e-02 -9.4804943e-02 -6.0873437e-01 2.4759352e-01 5.6013554e-01 -1.3697408e-01 -1.1033250e+00 -3.5238266e-01] [-3.5238266e-01 -1.5683613e+00 1.1871452e+00 7.8094625e-01 2.3650265e+00 -1.5609384e-02 7.6961935e-01 -2.8228872e+00 7.0262170e-01 -1.5279599e+00 1.6676617e+00 -3.5238266e-01] [-3.5238266e-01 -3.6089783e+00 -5.1324499e-01 -8.4073961e-01 1.1404730e+00 -3.6070076e-01 1.8735390e+00 2.3135164e+00 6.2790793e-01 -1.6701121e+00 -1.7337759e-01 -3.5238266e-01] [-3.5238266e-01 -7.1455032e-02 -1.8309189e+00 -1.1098607e+00 -1.3898559e-01 -5.2297890e-02 -1.3042550e+00 4.0996641e-02 -2.2521114e+00 -2.5307792e-01 -2.1393728e+00 -3.5238266e-01] [-3.5238266e-01 1.6665537e+00 3.1153923e-01 -2.6492429e+00 1.1868092e+00 -2.3741820e+00 -2.4582124e+00 9.1557312e-01 -6.2475908e-01 -2.8760827e-01 -9.4235331e-01 -3.5238266e-01]]]]; ov_res: [[[[-7.0895143e-02 3.1025359e-01 -6.2956071e-01 1.0890654e+00 1.3071733e+00 -5.3685540e-01 5.1413995e-01 5.1367694e-01 7.5897843e-01 -7.3095042e-01 5.6429970e-01 -7.0895143e-02] [-7.0895143e-02 -7.1573502e-01 -1.0266898e+00 -6.9947988e-01 -1.0280565e+00 7.7250922e-01 -6.1576694e-01 6.6433078e-01 1.0076650e+00 1.3119875e+00 2.9022518e-01 -7.0895143e-02] [-7.0895143e-02 -1.5360677e+00 8.4165430e-01 9.3584007e-01 5.4326594e-01 -8.8123642e-02 -5.6301087e-01 5.4365885e-01 -2.0830336e+00 -5.5775774e-01 -1.2712724e+00 -7.0895143e-02] [-7.0895143e-02 -1.1336893e-02 -1.6016090e+00 -1.6382647e+00 1.1594645e-01 -1.5485847e+00 9.1856635e-01 -1.2455469e+00 1.6101952e+00 -2.3042315e-01 4.2595670e-01 -7.0895143e-02] [-7.0895143e-02 1.3219881e-01 6.3373435e-01 -4.9830623e-02 7.9329282e-01 3.8498190e-01 -6.9044703e-01 -8.9061892e-01 1.7802376e-01 9.5439512e-01 -1.7178565e-01 -7.0895143e-02] [-7.0895143e-02 -8.9135224e-01 3.4361023e-01 1.4429256e+00 -1.8592455e+00 -4.4955933e-01 -1.9500399e-01 -2.9878816e-01 -1.1070764e+00 -4.6888179e-01 4.7154546e-01 -7.0895143e-02] [-7.0895143e-02 5.6849265e-01 -1.0526270e+00 -5.7373279e-01 -4.7132459e-01 -1.8990314e+00 5.6711066e-01 1.4205436e+00 4.5133740e-01 8.0386382e-01 -1.0434320e+00 -7.0895143e-02] [-7.0895143e-02 -9.5942366e-01 2.3980072e-01 4.2819098e-01 -1.4450700e+00 7.9860812e-01 -1.2312499e+00 -2.4496653e+00 -1.4721981e-01 -4.4704711e-01 -1.5288407e-01 -7.0895143e-02] [-7.0895143e-02 -4.6356446e-01 1.0724262e+00 -8.0671526e-02 5.3548831e-01 4.9098217e-01 -6.7704058e-01 1.6980120e+00 2.6091332e+00 2.3476737e+00 7.0340848e-01 -7.0895143e-02] [-7.0895143e-02 -2.1933737e-01 -8.3190858e-02 5.5651891e-01 -1.2038239e-01 -1.1487945e-01 1.2343874e+00 -6.8565685e-01 2.9071093e-01 -2.2923294e-01 8.2368606e-01 -7.0895143e-02]] [[-1.7488519e+00 -2.1707525e+00 -1.2828956e+00 -3.9623923e+00 -4.7133164e+00 -5.4098678e-01 -2.3895943e+00 -2.8425088e+00 -3.8417211e+00 -4.8874938e-01 -3.2279429e+00 -1.7488519e+00] [-1.7488519e+00 4.2638779e-02 5.3962910e-01 -4.8838055e-01 -1.9197905e-01 -2.9671173e+00 -4.9605763e-01 -3.5593534e+00 -4.6166477e+00 -4.8085942e+00 -2.1154318e+00 -1.7488519e+00] [-1.7488519e+00 8.1120694e-01 -3.3719687e+00 -3.7131362e+00 -2.9480586e+00 -1.2378620e+00 -1.3945222e+00 -3.7474363e+00 2.1384268e+00 -1.1995404e+00 7.5298870e-01 -1.7488519e+00] [-1.7488519e+00 -1.4626617e+00 1.5098804e+00 1.1468254e+00 -1.8686540e+00 1.3803197e+00 -3.0866165e+00 4.3757355e-01 -4.7369213e+00 -5.9044790e-01 -2.7406373e+00 -1.7488519e+00] [-1.7488519e+00 -2.3683367e+00 -2.8076301e+00 -2.1171613e+00 -3.0244598e+00 -2.6399107e+00 -1.5258392e+00 -2.3282170e-02 -1.8323023e+00 -4.4382353e+00 -1.4861439e+00 -1.7488519e+00] [-1.7488519e+00 -6.4408302e-02 -2.9360294e+00 -4.6703568e+00 2.5675426e+00 -6.6871989e-01 -1.5809169e+00 -1.2935550e+00 -1.9182682e-01 -1.2717974e+00 -2.5722890e+00 -1.7488519e+00] [-1.7488519e+00 -3.2491028e+00 -5.5808747e-01 -1.0426347e+00 -1.1432915e+00 2.3195710e+00 -3.3691988e+00 -4.4437971e+00 -2.7579975e+00 -3.1071348e+00 3.0508697e-01 -1.7488519e+00] [-1.7488519e+00 1.1035110e+00 -2.1331990e+00 -2.5264485e+00 6.0212433e-01 -2.9008760e+00 3.4408081e-01 2.5129194e+00 -1.6570215e+00 -4.2288566e-01 -1.7340977e+00 -1.7488519e+00] [-1.7488519e+00 -1.1379030e+00 -3.6258321e+00 -1.2387691e+00 -3.6343815e+00 -2.4394963e+00 -4.0331089e-01 -5.7234516e+00 -6.9697452e+00 -6.6671882e+00 -2.7008288e+00 -1.7488519e+00] [-1.7488519e+00 -2.4385238e+00 -2.2238390e+00 -2.3284721e+00 -1.8588829e+00 -1.1213641e+00 -4.2882566e+00 -1.6727209e-01 -1.8770391e+00 -1.4194309e+00 -3.8848457e+00 -1.7488519e+00]] [[-3.5238266e-01 -1.3649068e+00 9.2125893e-01 9.7790003e-02 -8.5761112e-01 1.5418953e-01 -1.6327906e-01 -2.3219013e+00 3.4081620e-01 -8.5504889e-02 5.5601805e-02 -3.5238266e-01] [-3.5238266e-01 -2.4822296e-01 -1.7275658e+00 3.7731117e-01 1.3172123e+00 -2.5744576e+00 -5.6421757e-03 -6.9688904e-01 1.0639405e+00 -1.2534394e+00 -1.4572024e+00 -3.5238266e-01] [-3.5238266e-01 8.8057971e-01 -1.6390786e+00 -2.1526124e+00 -8.5074162e-01 -1.2743182e-01 1.1975861e+00 1.5986786e+00 3.1056147e+00 2.6136786e-01 -1.3217846e+00 -3.5238266e-01] [-3.5238266e-01 -1.5842052e+00 2.2989649e-01 1.7648304e+00 -1.4593040e+00 5.9881550e-01 -4.6820760e+00 2.6113091e+00 -3.2716532e+00 -1.7907990e+00 -2.2898183e+00 -3.5238266e-01] [-3.5238266e-01 -1.7498037e-01 -1.4482977e+00 1.1158496e-01 -5.8795196e-01 -2.9041510e+00 6.4284170e-01 9.7289765e-01 -2.4958863e+00 1.8553920e+00 -1.2505968e+00 -3.5238266e-01] [-3.5238266e-01 3.0879056e-01 2.3341864e-01 -9.4277376e-01 1.1304259e-02 -9.4804943e-02 -6.0873437e-01 2.4759352e-01 5.6013554e-01 -1.3697408e-01 -1.1033250e+00 -3.5238266e-01] [-3.5238266e-01 -1.5683613e+00 1.1871452e+00 7.8094625e-01 2.3650265e+00 -1.5609384e-02 7.6961935e-01 -2.8228872e+00 7.0262170e-01 -1.5279599e+00 1.6676617e+00 -3.5238266e-01] [-3.5238266e-01 -3.6089783e+00 -5.1324499e-01 -8.4073961e-01 1.1404730e+00 -3.6070076e-01 1.8735390e+00 2.3135164e+00 6.2790793e-01 -1.6701121e+00 -1.7337759e-01 -3.5238266e-01] [-3.5238266e-01 -7.1455032e-02 -1.8309189e+00 -1.1098607e+00 -1.3898559e-01 -5.2297890e-02 -1.3042550e+00 4.0996641e-02 -2.2521114e+00 -2.5307792e-01 -2.1393728e+00 -3.5238266e-01] [-3.5238266e-01 1.6665537e+00 3.1153923e-01 -2.6492429e+00 1.1868092e+00 -2.3741820e+00 -2.4582124e+00 9.1557312e-01 -6.2475908e-01 -2.8760827e-01 -9.4235331e-01 -3.5238266e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_541.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.0192 0.9911 1.3268 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4079 (2,1,.,.) = 0.01 * -3.4750 (3,1,.,.) = 0.3660 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-4.8029590e-01 -6.5180868e-01 -6.7239881e-02 -4.9431655e-01 2.7031818e-02 5.2152291e-02 1.7875433e-02 4.6097276e-01 2.2934237e-01 -3.8170713e-01] [-7.1951956e-01 -5.7282871e-01 2.8160197e-01 2.7184829e-01 2.7627876e-01 5.6212801e-01 -1.4629328e-01 5.5926806e-01 1.6489010e-02 3.1374690e-01] [ 2.4335063e-01 -1.7434034e-01 -2.8738976e-01 -1.8310206e-02 8.8743865e-01 -2.1466200e-01 3.4551442e-01 -1.8486330e-01 -3.5344410e-01 -1.4023513e-01] [-4.9008474e-02 -3.8662711e-01 -6.0756840e-02 2.0575219e-01 4.6477586e-01 1.9336399e-03 7.9387888e-02 -4.5388699e-02 -1.4875814e-01 -4.0765163e-01] [-9.0985782e-02 -1.3143593e-01 1.5840109e-01 1.7821963e-01 7.6228410e-01 2.8054884e-01 5.9906229e-02 -2.3705676e-01 -3.7975289e-02 -2.3622268e-01] [ 3.3878195e-01 -2.1996753e-01 -2.3418981e-01 -3.6372700e-01 4.1290069e-01 -3.2483569e-01 -5.6404114e-01 -2.3037475e-01 4.4916719e-01 -8.0872822e-01] [ 7.2468624e-02 3.0975002e-01 4.8111311e-01 5.7012880e-01 6.7888901e-02 -7.3277569e-01 4.4224170e-01 -3.2643676e-01 7.5188631e-01 2.6957297e-01] [ 8.3052647e-01 6.9860488e-01 -3.3296967e-01 5.1953185e-01 -1.3842066e-01 -5.2726847e-01 4.2782360e-01 3.6444700e-01 1.0744693e-01 -2.7165189e-01]] [[ 1.0127355e+00 1.0464517e+00 9.9597335e-01 9.9236679e-01 1.0129129e+00 1.0357401e+00 9.8815066e-01 9.7952390e-01 1.0158075e+00 9.4315231e-01] [ 1.0139823e+00 1.0332232e+00 1.0314485e+00 1.0220432e+00 1.0307987e+00 1.0043275e+00 9.7956836e-01 9.9342084e-01 9.2450804e-01 9.6231914e-01] [ 9.6816254e-01 9.7224224e-01 1.0425671e+00 9.8981106e-01 1.0034636e+00 9.9102068e-01 1.0460149e+00 9.8886603e-01 9.7221124e-01 1.0525688e+00] [ 9.8140550e-01 9.6813726e-01 9.9310648e-01 9.8934984e-01 1.0044138e+00 9.6557140e-01 9.9591202e-01 9.7925550e-01 9.3252498e-01 9.7974521e-01] [ 1.0137050e+00 1.0055653e+00 9.6178395e-01 9.8908395e-01 9.6860361e-01 1.0153052e+00 9.9987435e-01 9.8474354e-01 1.0057513e+00 9.6001995e-01] [ 9.7339904e-01 1.0122010e+00 9.7183448e-01 1.0107596e+00 9.8753679e-01 9.7916192e-01 9.4699448e-01 9.7078168e-01 1.0241545e+00 9.6616590e-01] [ 9.7926027e-01 9.9804461e-01 9.8569053e-01 1.0750617e+00 9.9041629e-01 9.8693436e-01 1.0092000e+00 8.9931846e-01 1.0671846e+00 9.4956970e-01] [ 9.9276888e-01 9.9907982e-01 1.0442224e+00 1.0206553e+00 9.8274088e-01 9.6700686e-01 9.7454959e-01 9.8043036e-01 9.8589313e-01 1.0107164e+00]] [[ 1.0351551e+00 1.5563315e+00 7.1938640e-01 1.2222897e+00 1.8151246e+00 1.6982089e+00 1.2390325e+00 1.0969344e+00 1.3767265e+00 1.6861289e+00] [ 1.4080802e+00 1.3284392e+00 1.0721221e+00 1.4883568e+00 1.3417666e+00 1.2262905e+00 7.8968978e-01 1.7763819e+00 1.1651232e+00 7.5033474e-01] [ 1.0933541e+00 8.8690943e-01 1.3697249e+00 6.0165691e-01 1.5804156e+00 9.2625272e-01 1.0005696e+00 1.4955332e+00 1.2130854e+00 1.5167774e+00] [ 1.8719122e+00 1.6417973e+00 6.1930877e-01 1.0549194e+00 1.1163429e+00 1.6734717e+00 1.5508935e+00 1.3195565e+00 1.9028993e+00 1.2764300e+00] [ 1.8490171e+00 1.6411884e+00 1.3957816e+00 8.2974732e-01 1.2140493e+00 1.1549969e+00 1.0287278e+00 1.3904083e+00 1.5964041e+00 1.3358022e+00] [ 1.6204526e+00 1.3530942e+00 1.1644847e+00 1.4367744e+00 1.4926981e+00 1.9937954e+00 1.1645273e+00 1.8217263e+00 1.0386385e+00 1.0746816e+00] [ 1.2837763e+00 1.2147834e+00 2.0553970e+00 1.0495828e+00 1.2800933e+00 1.4854468e+00 4.9100947e-01 1.4879575e+00 1.0385492e+00 1.5410516e+00] [ 1.7470912e+00 1.3097634e+00 1.0249941e+00 1.5083278e+00 1.0855898e+00 1.7090294e+00 8.7221909e-01 1.2887416e+00 1.5461711e+00 1.4649800e+00]]]]; ov_res: [[[[-4.8029590e-01 -6.5180868e-01 -6.7239881e-02 -4.9431655e-01 2.7031818e-02 5.2152291e-02 1.7875433e-02 4.6097276e-01 2.2934237e-01 -3.8170713e-01] [-7.1951956e-01 -5.7282871e-01 2.8160197e-01 2.7184829e-01 2.7627876e-01 5.6212801e-01 -1.4629328e-01 5.5926806e-01 1.6489010e-02 3.1374690e-01] [ 2.4335063e-01 -1.7434034e-01 -2.8738976e-01 -1.8310206e-02 8.8743865e-01 -2.1466200e-01 3.4551442e-01 -1.8486330e-01 -3.5344410e-01 -1.4023513e-01] [-4.9008474e-02 -3.8662711e-01 -6.0756840e-02 2.0575219e-01 4.6477586e-01 1.9336399e-03 7.9387888e-02 -4.5388699e-02 -1.4875814e-01 -4.0765163e-01] [-9.0985782e-02 -1.3143593e-01 1.5840109e-01 1.7821963e-01 7.6228410e-01 2.8054884e-01 5.9906229e-02 -2.3705676e-01 -3.7975289e-02 -2.3622268e-01] [ 3.3878195e-01 -2.1996753e-01 -2.3418981e-01 -3.6372700e-01 4.1290069e-01 -3.2483569e-01 -5.6404114e-01 -2.3037475e-01 4.4916719e-01 -8.0872822e-01] [ 7.2468624e-02 3.0975002e-01 4.8111311e-01 5.7012880e-01 6.7888901e-02 -7.3277569e-01 4.4224170e-01 -3.2643676e-01 7.5188631e-01 2.6957297e-01] [ 8.3052647e-01 6.9860488e-01 -3.3296967e-01 5.1953185e-01 -1.3842066e-01 -5.2726847e-01 4.2782360e-01 3.6444700e-01 1.0744693e-01 -2.7165189e-01]] [[ 1.0127355e+00 1.0464517e+00 9.9597335e-01 9.9236679e-01 1.0129129e+00 1.0357401e+00 9.8815066e-01 9.7952390e-01 1.0158075e+00 9.4315231e-01] [ 1.0139823e+00 1.0332232e+00 1.0314485e+00 1.0220432e+00 1.0307987e+00 1.0043275e+00 9.7956836e-01 9.9342084e-01 9.2450804e-01 9.6231914e-01] [ 9.6816254e-01 9.7224224e-01 1.0425671e+00 9.8981106e-01 1.0034636e+00 9.9102068e-01 1.0460149e+00 9.8886603e-01 9.7221124e-01 1.0525688e+00] [ 9.8140550e-01 9.6813726e-01 9.9310648e-01 9.8934984e-01 1.0044138e+00 9.6557140e-01 9.9591202e-01 9.7925550e-01 9.3252498e-01 9.7974521e-01] [ 1.0137050e+00 1.0055653e+00 9.6178395e-01 9.8908395e-01 9.6860361e-01 1.0153052e+00 9.9987435e-01 9.8474354e-01 1.0057513e+00 9.6001995e-01] [ 9.7339904e-01 1.0122010e+00 9.7183448e-01 1.0107596e+00 9.8753679e-01 9.7916192e-01 9.4699448e-01 9.7078168e-01 1.0241545e+00 9.6616590e-01] [ 9.7926027e-01 9.9804461e-01 9.8569053e-01 1.0750617e+00 9.9041629e-01 9.8693436e-01 1.0092000e+00 8.9931846e-01 1.0671846e+00 9.4956970e-01] [ 9.9276888e-01 9.9907982e-01 1.0442224e+00 1.0206553e+00 9.8274088e-01 9.6700686e-01 9.7454959e-01 9.8043036e-01 9.8589313e-01 1.0107164e+00]] [[ 1.0351551e+00 1.5563315e+00 7.1938640e-01 1.2222897e+00 1.8151246e+00 1.6982089e+00 1.2390325e+00 1.0969344e+00 1.3767265e+00 1.6861289e+00] [ 1.4080802e+00 1.3284392e+00 1.0721221e+00 1.4883568e+00 1.3417666e+00 1.2262905e+00 7.8968978e-01 1.7763819e+00 1.1651232e+00 7.5033474e-01] [ 1.0933541e+00 8.8690943e-01 1.3697249e+00 6.0165691e-01 1.5804156e+00 9.2625272e-01 1.0005696e+00 1.4955332e+00 1.2130854e+00 1.5167774e+00] [ 1.8719122e+00 1.6417973e+00 6.1930877e-01 1.0549194e+00 1.1163429e+00 1.6734717e+00 1.5508935e+00 1.3195565e+00 1.9028993e+00 1.2764300e+00] [ 1.8490171e+00 1.6411884e+00 1.3957816e+00 8.2974732e-01 1.2140493e+00 1.1549969e+00 1.0287278e+00 1.3904083e+00 1.5964041e+00 1.3358022e+00] [ 1.6204526e+00 1.3530942e+00 1.1644847e+00 1.4367744e+00 1.4926981e+00 1.9937954e+00 1.1645273e+00 1.8217263e+00 1.0386385e+00 1.0746816e+00] [ 1.2837763e+00 1.2147834e+00 2.0553970e+00 1.0495828e+00 1.2800933e+00 1.4854468e+00 4.9100947e-01 1.4879575e+00 1.0385492e+00 1.5410516e+00] [ 1.7470912e+00 1.3097634e+00 1.0249941e+00 1.5083278e+00 1.0855898e+00 1.7090294e+00 8.7221909e-01 1.2887416e+00 1.5461711e+00 1.4649800e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_543.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5981 -0.4493 0.6143 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.9931 (2,1,.,.) = 0.4331 (3,1,.,.) = -0.2367 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 5.98143101e-01 -7.19767809e-01 -1.38012218e+00 3.93483490e-01 2.24412394e+00 9.66214538e-01 -5.88359952e-01 7.53196478e-01 -6.34884238e-01 -6.06323600e-01 1.01788688e+00 5.98143101e-01] [ 5.98143101e-01 1.96697545e+00 -8.99451971e-03 1.60759628e-01 2.29285502e+00 1.44663006e-01 -9.34815407e-01 -2.80188417e+00 -1.99343145e-01 1.70270991e+00 1.08390653e+00 5.98143101e-01] [ 5.98143101e-01 -6.54543281e-01 7.37277806e-01 1.19052422e+00 5.80752313e-01 2.50010419e+00 8.34503293e-01 -2.39099503e-01 1.56652284e+00 -2.43934155e-01 7.10134447e-01 5.98143101e-01] [ 5.98143101e-01 1.98182797e+00 1.12743616e+00 2.57570356e-01 3.71884525e-01 6.43875003e-01 -6.20586157e-01 1.75989461e+00 -6.59844279e-01 -6.81391835e-01 7.41312563e-01 5.98143101e-01] [ 5.98143101e-01 5.23764133e-01 2.08521700e+00 -1.80064631e+00 -8.66037607e-01 -1.76077366e-01 8.11730325e-01 9.01603699e-03 7.30797768e-01 1.28269064e+00 2.18953204e+00 5.98143101e-01] [ 5.98143101e-01 -4.71162677e-01 1.79264879e+00 8.70262086e-01 3.33263338e-01 3.45942169e-01 -2.01076269e-03 2.05698586e+00 1.79954171e+00 1.42698717e+00 2.56789923e+00 5.98143101e-01] [ 5.98143101e-01 1.25013804e+00 -1.22579157e+00 1.52744555e+00 6.52780533e-01 2.80510807e+00 -4.23336983e-01 1.79872274e+00 1.39335537e+00 4.62701917e-02 6.41697764e-01 5.98143101e-01] [ 5.98143101e-01 -3.09145272e-01 -7.78174877e-01 2.03047007e-01 9.67299819e-01 8.00392509e-01 3.21902633e-01 3.02282423e-01 7.56365597e-01 -7.34537840e-02 4.08301651e-01 5.98143101e-01] [ 5.98143101e-01 -2.60834098e-01 1.20413280e+00 9.10521030e-01 8.93306613e-01 1.49004114e+00 7.00392604e-01 -3.52112830e-01 2.18637884e-01 1.79808962e+00 -8.62075210e-01 5.98143101e-01] [ 5.98143101e-01 1.03992462e+00 5.59336483e-01 -1.14027655e+00 -6.37260675e-01 2.80634856e+00 1.85970438e+00 2.05788225e-01 6.43178940e-01 2.42683601e+00 6.94022238e-01 5.98143101e-01]] [[-4.49345350e-01 -9.71346736e-01 -5.04985929e-01 -1.80011183e-01 -3.08104873e-01 -8.16565096e-01 4.70683873e-02 -4.61378664e-01 -3.73752236e-01 -3.80926996e-01 -7.90422797e-01 -4.49345350e-01] [-4.49345350e-01 -3.44122857e-01 -1.04486394e+00 -3.21094513e-01 -6.35182798e-01 -1.04771781e+00 -5.87887526e-01 -6.90344155e-01 3.69776785e-01 -4.48098987e-01 -5.58055162e-01 -4.49345350e-01] [-4.49345350e-01 2.14110434e-01 -9.24831986e-01 -2.55642921e-01 -5.94359398e-01 -4.52985555e-01 -8.69768858e-02 -2.48553738e-01 -1.72366238e+00 -5.54921389e-01 -5.83341122e-01 -4.49345350e-01] [-4.49345350e-01 -4.56986487e-01 -3.61999094e-01 -7.33139157e-01 -5.77084064e-01 -4.20770764e-01 4.67201293e-01 -5.55784583e-01 -1.12748265e+00 1.51742697e-01 -3.77110571e-01 -4.49345350e-01] [-4.49345350e-01 -4.94931638e-01 -7.31243610e-01 -6.89195514e-01 -2.94569254e-01 -6.30905628e-01 -6.89393640e-01 -4.94789332e-01 4.86463308e-01 -3.97284836e-01 -3.82272035e-01 -4.49345350e-01] [-4.49345350e-01 -8.62641096e-01 -1.63618696e+00 -1.30328250e+00 -7.39769220e-01 -7.07134008e-01 -9.66558933e-01 -6.33414984e-01 -3.28071535e-01 -3.90073776e-01 1.09185934e-01 -4.49345350e-01] [-4.49345350e-01 -4.19527173e-01 -3.87908816e-02 4.88259017e-01 -8.04072320e-01 -1.11401057e+00 -4.34931487e-01 -1.03295922e+00 -8.66182804e-01 -3.29813361e-01 -6.92987084e-01 -4.49345350e-01] [-4.49345350e-01 4.78124499e-01 -5.89378595e-01 5.27594686e-01 -4.65552032e-01 -2.30737507e-01 -3.16858739e-01 -5.80012143e-01 -5.37958622e-01 -8.08193564e-01 -5.05798995e-01 -4.49345350e-01] [-4.49345350e-01 -1.00313973e+00 -8.01149249e-01 -3.76428753e-01 -1.45111024e-01 -7.03091502e-01 -1.21881413e+00 -5.14938235e-01 -9.95557129e-01 -2.79241502e-01 6.48149252e-02 -4.49345350e-01] [-4.49345350e-01 3.82542372e-01 2.77418554e-01 -3.00080776e-02 1.30050480e-02 -4.44824100e-02 -6.64191365e-01 -1.40095603e+00 -4.02837873e-01 -9.28496122e-01 -1.09648871e+00 -4.49345350e-01]] [[ 6.14284217e-01 1.32242918e-01 3.50891739e-01 5.94483435e-01 9.10848022e-01 7.68716097e-01 5.92029095e-01 5.86740077e-01 9.08114731e-01 3.66401494e-01 8.81695151e-01 6.14284217e-01] [ 6.14284217e-01 6.53059959e-01 3.00553054e-01 4.20695961e-01 6.30935609e-01 3.77017409e-01 6.15596712e-01 5.93609631e-01 7.33200312e-01 8.96949887e-01 5.55117846e-01 6.14284217e-01] [ 6.14284217e-01 1.91044331e-01 9.39190149e-01 1.27946877e+00 6.61233187e-01 6.91518724e-01 6.66944325e-01 6.23831928e-01 5.14450788e-01 3.70322019e-01 7.84710348e-01 6.14284217e-01] [ 6.14284217e-01 5.19902468e-01 6.10897064e-01 7.22248852e-01 3.01177263e-01 8.42062950e-01 4.73865271e-01 6.03512347e-01 6.03014827e-01 7.78566062e-01 1.16454852e+00 6.14284217e-01] [ 6.14284217e-01 3.54161233e-01 5.97981632e-01 6.33744597e-01 7.87714362e-01 3.38452011e-01 3.98967236e-01 5.36677480e-01 3.19702387e-01 1.23503089e-01 7.21836209e-01 6.14284217e-01] [ 6.14284217e-01 6.97755158e-01 7.52584755e-01 8.83196771e-01 6.26143396e-01 8.76581132e-01 7.65763402e-01 5.07172763e-01 3.70901853e-01 6.06593192e-01 2.84473121e-01 6.14284217e-01] [ 6.14284217e-01 8.13841939e-01 7.62578666e-01 2.57092029e-01 9.94326532e-01 -4.73702550e-02 8.89074087e-01 8.41237724e-01 5.87156951e-01 8.24968696e-01 6.03415906e-01 6.14284217e-01] [ 6.14284217e-01 4.86772716e-01 3.27281892e-01 4.97868598e-01 7.73134172e-01 4.31633413e-01 5.71450114e-01 1.02203250e+00 7.28804708e-01 6.30264223e-01 4.59485680e-01 6.14284217e-01] [ 6.14284217e-01 6.35630071e-01 2.94166863e-01 9.60899591e-01 8.43189538e-01 6.30699575e-01 5.06972551e-01 7.42625713e-01 6.64455950e-01 2.43851304e-01 1.12621522e+00 6.14284217e-01] [ 6.14284217e-01 4.67162848e-01 5.46439111e-01 7.46095002e-01 1.06777823e+00 8.62918496e-01 7.72842646e-01 7.98108220e-01 7.83446431e-01 8.72327685e-01 4.87486124e-01 6.14284217e-01]]]]; ov_res: [[[[ 5.98143101e-01 -7.19767749e-01 -1.38012218e+00 3.93483490e-01 2.24412394e+00 9.66214538e-01 -5.88360012e-01 7.53196418e-01 -6.34884298e-01 -6.06323659e-01 1.01788688e+00 5.98143101e-01] [ 5.98143101e-01 1.96697545e+00 -8.99453089e-03 1.60759613e-01 2.29285502e+00 1.44662991e-01 -9.34815347e-01 -2.80188417e+00 -1.99343145e-01 1.70270991e+00 1.08390653e+00 5.98143101e-01] [ 5.98143101e-01 -6.54543340e-01 7.37277806e-01 1.19052422e+00 5.80752313e-01 2.50010419e+00 8.34503293e-01 -2.39099532e-01 1.56652284e+00 -2.43934169e-01 7.10134447e-01 5.98143101e-01] [ 5.98143101e-01 1.98182797e+00 1.12743628e+00 2.57570356e-01 3.71884525e-01 6.43875003e-01 -6.20586216e-01 1.75989461e+00 -6.59844279e-01 -6.81391835e-01 7.41312563e-01 5.98143101e-01] [ 5.98143101e-01 5.23764133e-01 2.08521724e+00 -1.80064642e+00 -8.66037548e-01 -1.76077381e-01 8.11730325e-01 9.01606586e-03 7.30797708e-01 1.28269064e+00 2.18953204e+00 5.98143101e-01] [ 5.98143101e-01 -4.71162677e-01 1.79264879e+00 8.70262086e-01 3.33263338e-01 3.45942169e-01 -2.01076246e-03 2.05698609e+00 1.79954171e+00 1.42698717e+00 2.56789923e+00 5.98143101e-01] [ 5.98143101e-01 1.25013793e+00 -1.22579157e+00 1.52744555e+00 6.52780533e-01 2.80510807e+00 -4.23336983e-01 1.79872274e+00 1.39335537e+00 4.62701917e-02 6.41697764e-01 5.98143101e-01] [ 5.98143101e-01 -3.09145272e-01 -7.78174937e-01 2.03047007e-01 9.67299819e-01 8.00392449e-01 3.21902633e-01 3.02282423e-01 7.56365597e-01 -7.34537616e-02 4.08301651e-01 5.98143101e-01] [ 5.98143101e-01 -2.60834068e-01 1.20413291e+00 9.10520971e-01 8.93306613e-01 1.49004114e+00 7.00392604e-01 -3.52112830e-01 2.18637884e-01 1.79808962e+00 -8.62075210e-01 5.98143101e-01] [ 5.98143101e-01 1.03992474e+00 5.59336483e-01 -1.14027655e+00 -6.37260616e-01 2.80634856e+00 1.85970438e+00 2.05788240e-01 6.43178940e-01 2.42683578e+00 6.94022238e-01 5.98143101e-01]] [[-4.49345350e-01 -9.71346736e-01 -5.04985929e-01 -1.80011168e-01 -3.08104873e-01 -8.16565096e-01 4.70683873e-02 -4.61378664e-01 -3.73752236e-01 -3.80926996e-01 -7.90422797e-01 -4.49345350e-01] [-4.49345350e-01 -3.44122857e-01 -1.04486394e+00 -3.21094513e-01 -6.35182798e-01 -1.04771793e+00 -5.87887526e-01 -6.90344155e-01 3.69776785e-01 -4.48098987e-01 -5.58055162e-01 -4.49345350e-01] [-4.49345350e-01 2.14110404e-01 -9.24831986e-01 -2.55642921e-01 -5.94359398e-01 -4.52985555e-01 -8.69768932e-02 -2.48553738e-01 -1.72366238e+00 -5.54921389e-01 -5.83341122e-01 -4.49345350e-01] [-4.49345350e-01 -4.56986487e-01 -3.61999094e-01 -7.33139157e-01 -5.77084124e-01 -4.20770764e-01 4.67201293e-01 -5.55784583e-01 -1.12748277e+00 1.51742697e-01 -3.77110571e-01 -4.49345350e-01] [-4.49345350e-01 -4.94931638e-01 -7.31243610e-01 -6.89195514e-01 -2.94569254e-01 -6.30905628e-01 -6.89393580e-01 -4.94789332e-01 4.86463279e-01 -3.97284836e-01 -3.82272035e-01 -4.49345350e-01] [-4.49345350e-01 -8.62641096e-01 -1.63618696e+00 -1.30328262e+00 -7.39769220e-01 -7.07133949e-01 -9.66558933e-01 -6.33414984e-01 -3.28071535e-01 -3.90073776e-01 1.09185919e-01 -4.49345350e-01] [-4.49345350e-01 -4.19527173e-01 -3.87908705e-02 4.88259017e-01 -8.04072320e-01 -1.11401045e+00 -4.34931487e-01 -1.03295922e+00 -8.66182864e-01 -3.29813331e-01 -6.92987084e-01 -4.49345350e-01] [-4.49345350e-01 4.78124529e-01 -5.89378595e-01 5.27594686e-01 -4.65552032e-01 -2.30737507e-01 -3.16858739e-01 -5.80012143e-01 -5.37958622e-01 -8.08193564e-01 -5.05798995e-01 -4.49345350e-01] [-4.49345350e-01 -1.00313973e+00 -8.01149249e-01 -3.76428753e-01 -1.45111024e-01 -7.03091562e-01 -1.21881402e+00 -5.14938235e-01 -9.95557129e-01 -2.79241502e-01 6.48149177e-02 -4.49345350e-01] [-4.49345350e-01 3.82542402e-01 2.77418524e-01 -3.00080795e-02 1.30050415e-02 -4.44824062e-02 -6.64191306e-01 -1.40095603e+00 -4.02837902e-01 -9.28496122e-01 -1.09648871e+00 -4.49345350e-01]] [[ 6.14284217e-01 1.32242933e-01 3.50891739e-01 5.94483435e-01 9.10848022e-01 7.68716097e-01 5.92029095e-01 5.86740077e-01 9.08114731e-01 3.66401464e-01 8.81695092e-01 6.14284217e-01] [ 6.14284217e-01 6.53059959e-01 3.00553054e-01 4.20695961e-01 6.30935609e-01 3.77017409e-01 6.15596712e-01 5.93609631e-01 7.33200312e-01 8.96949947e-01 5.55117905e-01 6.14284217e-01] [ 6.14284217e-01 1.91044345e-01 9.39190209e-01 1.27946877e+00 6.61233187e-01 6.91518724e-01 6.66944325e-01 6.23831928e-01 5.14450788e-01 3.70322019e-01 7.84710348e-01 6.14284217e-01] [ 6.14284217e-01 5.19902468e-01 6.10897064e-01 7.22248852e-01 3.01177263e-01 8.42062950e-01 4.73865271e-01 6.03512347e-01 6.03014827e-01 7.78566062e-01 1.16454852e+00 6.14284217e-01] [ 6.14284217e-01 3.54161233e-01 5.97981632e-01 6.33744597e-01 7.87714362e-01 3.38452011e-01 3.98967236e-01 5.36677480e-01 3.19702387e-01 1.23503082e-01 7.21836209e-01 6.14284217e-01] [ 6.14284217e-01 6.97755158e-01 7.52584755e-01 8.83196771e-01 6.26143396e-01 8.76581132e-01 7.65763342e-01 5.07172763e-01 3.70901853e-01 6.06593192e-01 2.84473121e-01 6.14284217e-01] [ 6.14284217e-01 8.13841939e-01 7.62578666e-01 2.57092029e-01 9.94326532e-01 -4.73702587e-02 8.89074087e-01 8.41237724e-01 5.87156951e-01 8.24968696e-01 6.03415906e-01 6.14284217e-01] [ 6.14284217e-01 4.86772746e-01 3.27281892e-01 4.97868568e-01 7.73134172e-01 4.31633383e-01 5.71450114e-01 1.02203250e+00 7.28804708e-01 6.30264223e-01 4.59485680e-01 6.14284217e-01] [ 6.14284217e-01 6.35630071e-01 2.94166863e-01 9.60899591e-01 8.43189538e-01 6.30699575e-01 5.06972551e-01 7.42625713e-01 6.64455950e-01 2.43851289e-01 1.12621522e+00 6.14284217e-01] [ 6.14284217e-01 4.67162848e-01 5.46439111e-01 7.46095002e-01 1.06777823e+00 8.62918496e-01 7.72842705e-01 7.98108220e-01 7.83446372e-01 8.72327685e-01 4.87486094e-01 6.14284217e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_545.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7422 -0.4508 1.4937 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 1.7038 (2,1,.,.) = -0.4195 (3,1,.,.) = 0.3745 [ CPUFloatType{3,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[ 0.44503075 1.0451181 -1.3617435 0.74333197 1.0417054 0.9555846 1.4180782 -1.026422 -0.07741457 0.95737356] [ 0.29388326 0.14196217 2.3261993 -0.6791043 0.9030009 1.5941951 -2.8324673 -0.4238013 -2.8726344 -1.4934032 ] [-3.2359784 -0.238325 -1.709245 0.40799648 1.4889076 -0.33726445 0.7945785 0.18505347 -0.97102517 0.44937152] [-1.4067223 -0.219935 -0.27343097 -0.5920311 0.32975525 -0.19266099 -1.3235362 1.0489485 -3.462303 0.89417464] [-1.3695693 1.1299131 -2.4634209 -0.716996 -0.2808994 -2.2745209 -0.3468601 -1.1121047 0.36666387 -1.8215535 ] [-1.3470103 -0.9153766 0.31932265 -2.3985171 -0.8379073 -1.5100634 -3.3005424 0.44144696 0.607698 -1.578956 ] [-2.959712 -1.0620439 -2.398556 2.356213 -0.8488511 0.16814929 -3.5697498 -0.15575743 -2.9470146 0.8712422 ] [ 1.7434404 0.7162842 -1.9363444 -0.88289064 -1.6328039 0.33571917 -2.8349855 0.7543939 0.52951545 -1.395253 ]] [[-0.2881685 -1.1970295 0.0319925 -0.5813753 -0.20975111 -0.9262196 0.04549214 -0.8085997 -0.39190227 -0.48099324] [-0.7648264 -0.35545278 -0.94555676 -0.00579366 0.18443981 -0.2272312 -0.63749063 -0.6279609 -0.6749122 -0.34526497] [-0.1928179 -0.97124374 0.19879976 -0.2309876 -0.04353642 -0.71027416 -0.25862196 -0.35464868 -0.33497322 0.47552183] [-0.370864 -0.20958942 -1.8150628 -0.41555312 -1.0871936 -0.61526346 -0.220682 0.13278458 0.34987643 0.02266037] [-0.9837611 0.13495788 -0.6096407 -0.4167034 -0.10960969 -0.93605876 -0.36355364 -0.25982082 -0.07563427 0.46351913] [-0.33456033 0.6558267 -0.3849925 0.22927842 -0.21104822 -0.52085084 -0.5003994 -0.44069183 0.01897424 -0.48095122] [-0.632739 -0.0117144 -0.2453396 -0.41788465 -0.28478473 -0.745582 -0.26054716 -0.1886361 -0.3502041 -0.31925568] [-0.7291818 -1.3833982 -0.9394823 -0.11254027 -0.6807038 -0.90152663 0.11233297 -0.92892164 -0.29131085 -0.23492764]] [[ 1.1119331 1.0764546 1.4731888 1.4742247 2.1446993 1.4894336 1.9584383 1.1909992 0.9481028 1.8127185 ] [ 1.0126898 2.1790063 1.642179 1.2771121 1.0689378 1.6166716 1.6731223 1.4279957 1.685265 2.245629 ] [ 1.6370173 1.7172334 1.7223169 1.9241741 1.1607928 1.4675702 1.1568223 1.488296 0.9641471 1.9912398 ] [ 1.3899037 1.4349617 1.3330344 1.2693689 2.1138694 1.2344801 1.9254179 1.5074698 2.0248454 1.233192 ] [ 1.4373566 1.2828324 1.41561 1.5828214 1.2938812 1.1775385 2.1396165 0.85527575 1.3502696 1.5018189 ] [ 1.420064 1.0470583 1.0919706 1.4850826 1.3454207 1.337799 1.7608844 1.470981 0.9752878 1.7730689 ] [ 1.1320972 1.5328754 1.6648058 0.8746122 1.5196738 1.234787 1.6498618 1.1672438 1.638873 1.8291962 ] [ 1.7400472 1.8245922 1.6401615 1.3453615 1.779428 1.4805889 1.6626999 1.5685666 1.1872315 1.5296328 ]]]]; ov_res: [[[[ 0.44503075 1.0451181 -1.3617435 0.74333197 1.0417054 0.9555846 1.4180782 -1.026422 -0.07741457 0.95737356] [ 0.29388326 0.14196217 2.3261993 -0.6791043 0.9030009 1.5941951 -2.8324673 -0.4238013 -2.8726344 -1.4934032 ] [-3.2359784 -0.238325 -1.709245 0.40799648 1.4889076 -0.33726445 0.7945785 0.18505347 -0.97102517 0.44937152] [-1.4067223 -0.219935 -0.27343097 -0.5920311 0.32975525 -0.19266099 -1.3235362 1.0489485 -3.462303 0.89417464] [-1.3695693 1.1299131 -2.4634209 -0.716996 -0.2808994 -2.2745209 -0.3468601 -1.1121047 0.36666387 -1.8215535 ] [-1.3470103 -0.9153766 0.31932265 -2.3985171 -0.8379073 -1.5100634 -3.3005424 0.44144696 0.607698 -1.578956 ] [-2.959712 -1.0620439 -2.398556 2.356213 -0.8488511 0.16814929 -3.5697498 -0.15575743 -2.9470146 0.8712422 ] [ 1.7434404 0.7162842 -1.9363444 -0.88289064 -1.6328039 0.33571917 -2.8349855 0.7543939 0.52951545 -1.395253 ]] [[-0.2881685 -1.1970295 0.0319925 -0.5813753 -0.20975111 -0.9262196 0.04549214 -0.8085997 -0.39190227 -0.48099324] [-0.7648264 -0.35545278 -0.94555676 -0.00579366 0.18443981 -0.2272312 -0.63749063 -0.6279609 -0.6749122 -0.34526497] [-0.1928179 -0.97124374 0.19879976 -0.2309876 -0.04353642 -0.71027416 -0.25862196 -0.35464868 -0.33497322 0.47552183] [-0.370864 -0.20958942 -1.8150628 -0.41555312 -1.0871936 -0.61526346 -0.220682 0.13278458 0.34987643 0.02266037] [-0.9837611 0.13495788 -0.6096407 -0.4167034 -0.10960969 -0.93605876 -0.36355364 -0.25982082 -0.07563427 0.46351913] [-0.33456033 0.6558267 -0.3849925 0.22927842 -0.21104822 -0.52085084 -0.5003994 -0.44069183 0.01897424 -0.48095122] [-0.632739 -0.0117144 -0.2453396 -0.41788465 -0.28478473 -0.745582 -0.26054716 -0.1886361 -0.3502041 -0.31925568] [-0.7291818 -1.3833982 -0.9394823 -0.11254027 -0.6807038 -0.90152663 0.11233297 -0.92892164 -0.29131085 -0.23492764]] [[ 1.1119331 1.0764546 1.4731888 1.4742247 2.1446993 1.4894336 1.9584383 1.1909992 0.9481028 1.8127185 ] [ 1.0126898 2.1790063 1.642179 1.2771121 1.0689378 1.6166716 1.6731223 1.4279957 1.685265 2.245629 ] [ 1.6370173 1.7172334 1.7223169 1.9241741 1.1607928 1.4675702 1.1568223 1.488296 0.9641471 1.9912398 ] [ 1.3899037 1.4349617 1.3330344 1.2693689 2.1138694 1.2344801 1.9254179 1.5074698 2.0248454 1.233192 ] [ 1.4373566 1.2828324 1.41561 1.5828214 1.2938812 1.1775385 2.1396165 0.85527575 1.3502696 1.5018189 ] [ 1.420064 1.0470583 1.0919706 1.4850826 1.3454207 1.337799 1.7608844 1.470981 0.9752878 1.7730689 ] [ 1.1320972 1.5328754 1.6648058 0.8746122 1.5196738 1.234787 1.6498618 1.1672438 1.638873 1.8291962 ] [ 1.7400472 1.8245922 1.6401615 1.3453615 1.779428 1.4805889 1.6626999 1.5685666 1.1872315 1.5296328 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_547.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.1748 -0.0151 0.2010 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.8752 (2,1,.,.) = 0.5233 (3,1,.,.) = -0.6133 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 3.2000232 -0.5644254 2.7814558 0.14391097 0.85302603 0.8561863 1.7590063 2.3497128 0.7163048 1.0005757 ] [ 0.23756519 2.6479526 1.1640396 1.6490062 3.49452 1.771831 0.27152112 0.3852228 2.2760782 1.3640552 ] [ 0.69995385 -0.37888616 1.2718731 0.76433337 1.2670273 0.9404648 -0.09602715 1.4494126 0.72628605 0.3594739 ] [ 1.2883191 -0.04994609 1.4766439 1.1460642 0.96038884 1.470222 0.6752627 1.8756338 -0.06997349 2.1027515 ] [ 3.471313 0.83787686 1.9187169 0.37617907 0.94247866 1.2311113 0.6954531 0.05221894 2.7224383 0.95810246] [ 2.4696615 0.46124306 -0.13344821 1.7036011 1.0519301 1.6937795 0.19327188 1.1138352 -1.0427344 2.5915391 ] [ 1.764323 3.0379379 1.8364189 0.02339998 1.5196047 0.60168 0.38227674 0.34169945 0.13923189 0.7436119 ] [ 1.0968634 0.9296641 1.6834844 2.1088216 1.6804837 0.70691705 3.884385 1.2573272 -0.6214108 1.6040589 ] [ 1.0264705 1.738336 0.33102772 1.4622375 1.0698503 0.45348123 0.23854576 1.7198614 0.7534795 1.6350826 ] [-0.14181077 0.8577075 0.34400067 2.9550438 1.5622472 0.31078044 2.3256812 -0.88681215 0.28225014 0.60157984]] [[ 0.5213682 1.1061361 0.1975229 -0.0946017 -0.7036747 -0.5813827 -0.5109598 -0.41377077 0.1835655 -0.86992276] [-0.22289412 -0.84670156 0.6090839 0.05497397 -0.49329793 -0.17487717 0.26861903 -0.05058089 -0.02784545 0.09619732] [ 0.20021309 -0.6167248 -0.05581706 0.49313405 -0.4980462 -0.26561603 -0.20002303 -0.5622051 -0.24273735 -0.04996761] [-1.6883751 -0.72744006 -0.14366485 0.6418137 0.45811507 0.07063368 -0.17515095 -0.08684709 0.83600867 -0.55619484] [-0.36187062 -0.4411957 -0.1719647 0.99749875 0.47168928 1.0526968 -1.0178981 -0.361165 0.25560346 0.8458449 ] [-0.21615674 -0.42052102 -0.5307797 -0.5452476 -0.2735906 -0.0263772 -0.10545383 0.02086917 0.11885177 -0.28208166] [-0.03233557 0.51017964 0.28631395 0.1502994 -0.35423 0.5750284 0.38930088 0.7082507 -0.45345384 -0.1438082 ] [-0.55560863 0.3748376 -0.4615077 -0.33541998 0.14413004 -0.26176777 -0.12997416 -0.66193026 -0.61180854 0.23707244] [ 0.301172 -0.37061706 -0.35355866 0.539189 -0.32629022 1.0411428 0.11710865 0.59445083 -1.0021808 -0.28246897] [ 0.09013969 0.5928283 1.34834 0.2785573 0.07342016 0.7100107 0.7532007 0.19382662 -0.40312973 -1.2831458 ]] [[ 0.9640598 -0.10940643 0.09504557 -0.03031181 0.38162532 0.21113479 0.2956501 0.9029211 1.7664835 -0.26642868] [ 1.2752376 0.53417397 -0.5805317 0.9412329 -0.3806312 -0.6330336 1.0830095 -0.13013013 0.7466071 -0.03434108] [-1.0042197 -0.93384683 0.89873147 -0.11764919 -0.3859464 -0.06575079 0.51034725 1.289703 0.45400974 -0.2580862 ] [ 0.899966 -0.49428356 0.23219566 0.9620922 -1.9952037 -0.05342321 0.267457 0.41909742 0.75305563 0.7029324 ] [ 0.6466871 0.13775206 0.37905568 -0.5156115 -0.2032252 -0.8134322 0.46815136 0.42013007 0.2019673 0.57554376] [ 0.25671986 0.20832634 0.4751211 -0.91592723 -0.08490352 -0.32176542 0.35554454 -0.7831565 0.7899938 0.2670782 ] [ 0.741288 1.102832 -0.4131192 1.1486084 0.62303066 1.1103398 0.9237743 -0.1006621 0.60582775 0.34410515] [-1.0316027 -0.7856474 0.39902326 0.04812099 0.3191499 0.63378817 0.9769891 0.34134096 0.01817198 -0.4101708 ] [-0.5717551 -0.28533992 -0.050541 0.00944403 -0.72682106 -0.5509126 0.23578794 0.3681846 1.0454391 0.5912342 ] [-0.42435628 -0.769525 -0.5615796 0.16477111 0.58968306 0.00923934 0.7800277 1.0992845 0.4633746 0.05970257]]]]; ov_res: [[[[ 3.2000232 -0.5644254 2.7814558 0.14391097 0.85302603 0.8561863 1.7590063 2.3497128 0.7163048 1.0005757 ] [ 0.23756519 2.6479526 1.1640396 1.6490062 3.49452 1.771831 0.27152112 0.3852228 2.2760782 1.3640552 ] [ 0.69995385 -0.37888616 1.2718731 0.76433337 1.2670273 0.9404648 -0.09602715 1.4494126 0.72628605 0.3594739 ] [ 1.2883191 -0.04994609 1.4766439 1.1460642 0.96038884 1.470222 0.6752627 1.8756338 -0.06997349 2.1027515 ] [ 3.471313 0.83787686 1.9187169 0.37617907 0.94247866 1.2311113 0.6954531 0.05221894 2.7224383 0.95810246] [ 2.4696615 0.46124306 -0.13344821 1.7036011 1.0519301 1.6937795 0.19327188 1.1138352 -1.0427344 2.5915391 ] [ 1.764323 3.0379379 1.8364189 0.02339998 1.5196047 0.60168 0.38227674 0.34169945 0.13923189 0.7436119 ] [ 1.0968634 0.9296641 1.6834844 2.1088216 1.6804837 0.70691705 3.884385 1.2573272 -0.6214108 1.6040589 ] [ 1.0264705 1.738336 0.33102772 1.4622375 1.0698503 0.45348123 0.23854576 1.7198614 0.7534795 1.6350826 ] [-0.14181077 0.8577075 0.34400067 2.9550438 1.5622472 0.31078044 2.3256812 -0.88681215 0.28225014 0.60157984]] [[ 0.5213682 1.1061361 0.1975229 -0.0946017 -0.7036747 -0.5813827 -0.5109598 -0.41377077 0.1835655 -0.86992276] [-0.22289412 -0.84670156 0.6090839 0.05497397 -0.49329793 -0.17487717 0.26861903 -0.05058089 -0.02784545 0.09619732] [ 0.20021309 -0.6167248 -0.05581706 0.49313405 -0.4980462 -0.26561603 -0.20002303 -0.5622051 -0.24273735 -0.04996761] [-1.6883751 -0.72744006 -0.14366485 0.6418137 0.45811507 0.07063368 -0.17515095 -0.08684709 0.83600867 -0.55619484] [-0.36187062 -0.4411957 -0.1719647 0.99749875 0.47168928 1.0526968 -1.0178981 -0.361165 0.25560346 0.8458449 ] [-0.21615674 -0.42052102 -0.5307797 -0.5452476 -0.2735906 -0.0263772 -0.10545383 0.02086917 0.11885177 -0.28208166] [-0.03233557 0.51017964 0.28631395 0.1502994 -0.35423 0.5750284 0.38930088 0.7082507 -0.45345384 -0.1438082 ] [-0.55560863 0.3748376 -0.4615077 -0.33541998 0.14413004 -0.26176777 -0.12997416 -0.66193026 -0.61180854 0.23707244] [ 0.301172 -0.37061706 -0.35355866 0.539189 -0.32629022 1.0411428 0.11710865 0.59445083 -1.0021808 -0.28246897] [ 0.09013969 0.5928283 1.34834 0.2785573 0.07342016 0.7100107 0.7532007 0.19382662 -0.40312973 -1.2831458 ]] [[ 0.9640598 -0.10940643 0.09504557 -0.03031181 0.38162532 0.21113479 0.2956501 0.9029211 1.7664835 -0.26642868] [ 1.2752376 0.53417397 -0.5805317 0.9412329 -0.3806312 -0.6330336 1.0830095 -0.13013013 0.7466071 -0.03434108] [-1.0042197 -0.93384683 0.89873147 -0.11764919 -0.3859464 -0.06575079 0.51034725 1.289703 0.45400974 -0.2580862 ] [ 0.899966 -0.49428356 0.23219566 0.9620922 -1.9952037 -0.05342321 0.267457 0.41909742 0.75305563 0.7029324 ] [ 0.6466871 0.13775206 0.37905568 -0.5156115 -0.2032252 -0.8134322 0.46815136 0.42013007 0.2019673 0.57554376] [ 0.25671986 0.20832634 0.4751211 -0.91592723 -0.08490352 -0.32176542 0.35554454 -0.7831565 0.7899938 0.2670782 ] [ 0.741288 1.102832 -0.4131192 1.1486084 0.62303066 1.1103398 0.9237743 -0.1006621 0.60582775 0.34410515] [-1.0316027 -0.7856474 0.39902326 0.04812099 0.3191499 0.63378817 0.9769891 0.34134096 0.01817198 -0.4101708 ] [-0.5717551 -0.28533992 -0.050541 0.00944403 -0.72682106 -0.5509126 0.23578794 0.3681846 1.0454391 0.5912342 ] [-0.42435628 -0.769525 -0.5615796 0.16477111 0.58968306 0.00923934 0.7800277 1.0992845 0.4633746 0.05970257]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_549.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.725264}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 6.1993 (2,1,.,.) = -0.7948 (3,1,.,.) = 0.4623 [ CPUFloatType{3,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.5843463 -0.4449179 -0.67135674 -0.46440306 -1.7692127 -2.1542466 0.15578443 -1.5879214 1.1411746 -1.1589592 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.32460862 -1.2708011 0.78674906 -1.3153002 -0.22220725 -1.073022 -1.1559081 -0.4219496 -0.86012113 0.21069956] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.8939948 0.03864241 -1.5746741 -0.5946819 -2.044906 -0.76056397 -0.8418674 -1.090534 -0.00441796 -2.0444553 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.95679766 0.69870025 -0.04001755 -2.2567358 -1.7814326 -0.15304136 -0.6787501 -0.80032974 -0.33113992 0.5335589 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.9465916 -0.24483135 -1.8309894 -0.8538082 -1.4413693 -0.5621017 -0.27763766 -0.5593454 -1.4932394 -1.3412242 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.7694065 -0.5490112 0.72196954 -1.5675807 -1.6335688 -1.6170924 0.10448092 -1.6832013 -2.19231 -1.8911765 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-0.13672984 -0.36457348 -0.93934214 -0.4977818 -1.3729655 -1.2519975 -0.5321306 -0.73691213 1.4461169 1.1115589 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-0.542465 0.06109172 -1.3049004 -0.72082263 -1.7996507 -0.49764097 -2.8120384 -3.8416243 -1.6030796 -0.7326083 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425]]]]; ov_res: [[[[-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.5843463 -0.4449179 -0.67135674 -0.46440306 -1.7692127 -2.1542466 0.15578443 -1.5879214 1.1411746 -1.1589592 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.32460862 -1.2708011 0.78674906 -1.3153002 -0.22220725 -1.073022 -1.1559081 -0.4219496 -0.86012113 0.21069956] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.8939948 0.03864241 -1.5746741 -0.5946819 -2.044906 -0.76056397 -0.8418674 -1.090534 -0.00441796 -2.0444553 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.95679766 0.69870025 -0.04001755 -2.2567358 -1.7814326 -0.15304136 -0.6787501 -0.80032974 -0.33113992 0.5335589 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-1.9465916 -0.24483135 -1.8309894 -0.8538082 -1.4413693 -0.5621017 -0.27763766 -0.5593454 -1.4932394 -1.3412242 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [ 0.7694065 -0.5490112 0.72196954 -1.5675807 -1.6335688 -1.6170924 0.10448092 -1.6832013 -2.19231 -1.8911765 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-0.13672984 -0.36457348 -0.93934214 -0.4977818 -1.3729655 -1.2519975 -0.5321306 -0.73691213 1.4461169 1.1115589 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425] [-0.542465 0.06109172 -1.3049004 -0.72082263 -1.7996507 -0.49764097 -2.8120384 -3.8416243 -1.6030796 -0.7326083 ] [-0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425 -0.72526425]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_551.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.0708 0.3703 0.0189 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 6.9491 (2,1,.,.) = -0.1122 (3,1,.,.) = 2.7876 (1,2,.,.) = -1.7868 (2,2,.,.) = 1.8945 (3,2,.,.) = -0.9798 (1,3,.,.) = -0.9220 (2,3,.,.) = -0.8033 (3,3,.,.) = 1.2877 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-2.5184531e+00 -3.3251786e+00 -1.9926333e+00 -2.5077121e+00 1.0154902e+00 -2.2421465e+00 -2.1491010e+00 4.7358203e+00 -1.1107665e+00 -3.4521842e+00] [-2.0297247e-01 -1.3434850e+00 -1.5638683e+00 -2.1476159e+00 -4.3002295e+00 -1.3544376e+00 -9.8872989e-01 -6.3430130e-01 -3.0740237e+00 1.1235653e+00] [-4.3084531e+00 1.5245303e+00 -3.5530839e+00 -1.0642999e+00 1.3391224e+00 -4.6915370e-01 -1.4844137e-01 2.0599952e+00 1.4165159e+00 -9.4359136e-01] [ 5.2131605e-01 -3.5240498e+00 -2.5112853e+00 -2.6736646e+00 -1.2190752e+00 2.8392968e+00 2.3083735e-01 -5.7868900e+00 2.3363919e+00 4.3786824e-01] [-1.6526779e+00 -2.2176366e+00 1.2074000e+00 -1.6533067e+00 -2.8030467e-01 -2.8925958e+00 1.2041509e-01 -3.6045680e+00 -3.3475180e+00 -2.2305322e+00]] [[ 1.0490341e+00 7.8784341e-01 -1.3825791e+00 3.8983452e+00 -6.8115240e-01 -6.8596303e-03 6.2195152e-01 -4.5528302e+00 -1.5787618e+00 3.8004754e+00] [ 1.6499877e+00 1.3752224e+00 1.6523614e+00 9.9522370e-01 -2.6270320e+00 -8.4380358e-02 2.1007143e-01 1.6316152e+00 1.1262528e+00 1.1826593e+00] [ 5.2265410e+00 -6.6399199e-01 -1.0685565e+00 -2.3182595e-01 -1.6168995e+00 -2.1761510e+00 2.6047918e-01 -1.9306772e+00 7.8408718e-01 1.7059210e-01] [ 3.1348658e+00 1.9695699e+00 2.6875570e+00 2.1723464e+00 1.2252665e+00 -2.5044329e+00 -5.7230401e-01 3.0317259e+00 -9.4769782e-01 -3.5034170e+00] [ 5.2969422e+00 2.3025210e-01 -1.3691974e+00 1.8583720e+00 -1.6553760e+00 1.7614696e+00 6.4032143e-01 2.0875881e+00 9.7238284e-01 -8.4745854e-01]] [[-1.0559756e-02 3.3289509e+00 3.1180053e+00 -6.6958380e+00 -4.6095753e+00 5.9296775e+00 6.2335839e+00 -1.2511048e+00 2.5381098e-02 -2.4131060e+00] [ 6.8842214e-01 2.2005577e+00 -4.8753009e+00 -2.9130344e+00 2.6727371e+00 2.3711095e+00 2.5188679e-01 -2.1610913e+00 -2.7067261e+00 1.1786542e+00] [-6.2484727e+00 -1.6516405e+00 -1.7711413e+00 1.1606371e+00 -2.5454278e+00 2.9310894e+00 3.4671059e+00 -1.5835967e+00 -3.5973081e-01 3.4628105e+00] [-7.9964342e+00 -5.0293900e-02 -5.4908983e-02 -6.0816830e-01 -2.5570917e+00 -1.2612133e+00 -1.6102632e+00 -1.8340135e+00 -5.8611524e-01 4.0900421e+00] [-7.4538741e+00 -5.5539466e-02 -8.0657405e-01 2.5914626e+00 4.6473608e+00 2.4922075e+00 -5.3862065e-01 4.3247380e+00 -2.4239359e+00 5.2444963e+00]]]]; ov_res: [[[[-2.5184531e+00 -3.3251786e+00 -1.9926333e+00 -2.5077121e+00 1.0154902e+00 -2.2421465e+00 -2.1491010e+00 4.7358203e+00 -1.1107665e+00 -3.4521842e+00] [-2.0297247e-01 -1.3434850e+00 -1.5638683e+00 -2.1476159e+00 -4.3002295e+00 -1.3544376e+00 -9.8872989e-01 -6.3430130e-01 -3.0740237e+00 1.1235653e+00] [-4.3084531e+00 1.5245303e+00 -3.5530839e+00 -1.0642999e+00 1.3391224e+00 -4.6915370e-01 -1.4844137e-01 2.0599952e+00 1.4165159e+00 -9.4359136e-01] [ 5.2131605e-01 -3.5240498e+00 -2.5112853e+00 -2.6736646e+00 -1.2190752e+00 2.8392968e+00 2.3083735e-01 -5.7868900e+00 2.3363919e+00 4.3786824e-01] [-1.6526779e+00 -2.2176366e+00 1.2074000e+00 -1.6533067e+00 -2.8030467e-01 -2.8925958e+00 1.2041509e-01 -3.6045680e+00 -3.3475180e+00 -2.2305322e+00]] [[ 1.0490341e+00 7.8784341e-01 -1.3825791e+00 3.8983452e+00 -6.8115240e-01 -6.8596303e-03 6.2195152e-01 -4.5528302e+00 -1.5787618e+00 3.8004754e+00] [ 1.6499877e+00 1.3752224e+00 1.6523614e+00 9.9522370e-01 -2.6270320e+00 -8.4380358e-02 2.1007143e-01 1.6316152e+00 1.1262528e+00 1.1826593e+00] [ 5.2265410e+00 -6.6399199e-01 -1.0685565e+00 -2.3182595e-01 -1.6168995e+00 -2.1761510e+00 2.6047918e-01 -1.9306772e+00 7.8408718e-01 1.7059210e-01] [ 3.1348658e+00 1.9695699e+00 2.6875570e+00 2.1723464e+00 1.2252665e+00 -2.5044329e+00 -5.7230401e-01 3.0317259e+00 -9.4769782e-01 -3.5034170e+00] [ 5.2969422e+00 2.3025210e-01 -1.3691974e+00 1.8583720e+00 -1.6553760e+00 1.7614696e+00 6.4032143e-01 2.0875881e+00 9.7238284e-01 -8.4745854e-01]] [[-1.0559756e-02 3.3289509e+00 3.1180053e+00 -6.6958380e+00 -4.6095753e+00 5.9296775e+00 6.2335839e+00 -1.2511048e+00 2.5381098e-02 -2.4131060e+00] [ 6.8842214e-01 2.2005577e+00 -4.8753009e+00 -2.9130344e+00 2.6727371e+00 2.3711095e+00 2.5188679e-01 -2.1610913e+00 -2.7067261e+00 1.1786542e+00] [-6.2484727e+00 -1.6516405e+00 -1.7711413e+00 1.1606371e+00 -2.5454278e+00 2.9310894e+00 3.4671059e+00 -1.5835967e+00 -3.5973081e-01 3.4628105e+00] [-7.9964342e+00 -5.0293900e-02 -5.4908983e-02 -6.0816830e-01 -2.5570917e+00 -1.2612133e+00 -1.6102632e+00 -1.8340135e+00 -5.8611524e-01 4.0900421e+00] [-7.4538741e+00 -5.5539466e-02 -8.0657405e-01 2.5914626e+00 4.6473608e+00 2.4922075e+00 -5.3862065e-01 4.3247380e+00 -2.4239359e+00 5.2444963e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_553.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.393555}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 2.2890 (2,1,.,.) = -0.2186 (3,1,.,.) = 0.9565 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[ 0.2881784 0.39355522 1.3663827 0.39355522 1.7848465 0.39355522 0.16962786 0.39355522 0.02823535 0.39355522 0.4846024 0.39355522 1.2724149 0.39355522 0.6447265 0.39355522 -0.2840308 0.39355522 0.29805607] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 1.8910339 0.39355522 -0.66873294 0.39355522 0.32361293 0.39355522 1.3392289 0.39355522 2.2904463 0.39355522 -1.4564538 0.39355522 -0.2641328 0.39355522 0.4977041 0.39355522 1.222353 0.39355522 -0.0490424 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-1.4711523 0.39355522 -0.8947874 0.39355522 1.4382281 0.39355522 1.4667346 0.39355522 0.21920222 0.39355522 -0.45926893 0.39355522 -0.15439987 0.39355522 0.7869396 0.39355522 1.5540566 0.39355522 -0.4720562 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 0.4215264 0.39355522 -0.686387 0.39355522 -0.60182863 0.39355522 0.39343178 0.39355522 -0.28283685 0.39355522 0.6602665 0.39355522 0.7272221 0.39355522 -0.30965275 0.39355522 1.0860966 0.39355522 1.6378796 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.07087177 0.39355522 0.18345085 0.39355522 -0.55520195 0.39355522 1.7703624 0.39355522 0.10852477 0.39355522 0.1542354 0.39355522 -0.33200556 0.39355522 1.2062471 0.39355522 0.49052563 0.39355522 -1.5373547 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 0.9922098 0.39355522 1.6657128 0.39355522 1.0322971 0.39355522 0.06032377 0.39355522 -0.5329594 0.39355522 0.63813955 0.39355522 0.97213477 0.39355522 -0.23734152 0.39355522 2.1225483 0.39355522 -0.2935732 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-1.2026775 0.39355522 0.18504114 0.39355522 0.8675225 0.39355522 1.1621726 0.39355522 0.7443542 0.39355522 1.4938028 0.39355522 3.2964842 0.39355522 -0.54551125 0.39355522 2.13483 0.39355522 0.6530181 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.9149545 0.39355522 0.48659617 0.39355522 -0.12232041 0.39355522 -0.06252444 0.39355522 1.9690738 0.39355522 2.1404212 0.39355522 0.34562764 0.39355522 -2.3088279 0.39355522 2.3696282 0.39355522 1.4420006 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.17272204 0.39355522 -0.8811371 0.39355522 -1.361809 0.39355522 1.5345137 0.39355522 0.96526587 0.39355522 -0.78455347 0.39355522 1.2541542 0.39355522 -0.24077976 0.39355522 1.5974216 0.39355522 1.546418 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.02813366 0.39355522 -0.02079466 0.39355522 1.3853196 0.39355522 -0.7814446 0.39355522 -0.8437092 0.39355522 0.5684032 0.39355522 -1.1375792 0.39355522 -1.062274 0.39355522 0.02522936 0.39355522 0.34743017]]]]; ov_res: [[[[ 0.2881784 0.39355522 1.3663827 0.39355522 1.7848465 0.39355522 0.16962786 0.39355522 0.02823535 0.39355522 0.4846024 0.39355522 1.2724149 0.39355522 0.6447265 0.39355522 -0.2840308 0.39355522 0.29805607] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 1.8910339 0.39355522 -0.66873294 0.39355522 0.32361293 0.39355522 1.3392289 0.39355522 2.2904463 0.39355522 -1.4564538 0.39355522 -0.2641328 0.39355522 0.4977041 0.39355522 1.222353 0.39355522 -0.0490424 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-1.4711523 0.39355522 -0.8947874 0.39355522 1.4382281 0.39355522 1.4667346 0.39355522 0.21920222 0.39355522 -0.45926893 0.39355522 -0.15439987 0.39355522 0.7869396 0.39355522 1.5540566 0.39355522 -0.4720562 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 0.4215264 0.39355522 -0.686387 0.39355522 -0.60182863 0.39355522 0.39343178 0.39355522 -0.28283685 0.39355522 0.6602665 0.39355522 0.7272221 0.39355522 -0.30965275 0.39355522 1.0860966 0.39355522 1.6378796 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.07087177 0.39355522 0.18345085 0.39355522 -0.55520195 0.39355522 1.7703624 0.39355522 0.10852477 0.39355522 0.1542354 0.39355522 -0.33200556 0.39355522 1.2062471 0.39355522 0.49052563 0.39355522 -1.5373547 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [ 0.9922098 0.39355522 1.6657128 0.39355522 1.0322971 0.39355522 0.06032377 0.39355522 -0.5329594 0.39355522 0.63813955 0.39355522 0.97213477 0.39355522 -0.23734152 0.39355522 2.1225483 0.39355522 -0.2935732 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-1.2026775 0.39355522 0.18504114 0.39355522 0.8675225 0.39355522 1.1621726 0.39355522 0.7443542 0.39355522 1.4938028 0.39355522 3.2964842 0.39355522 -0.54551125 0.39355522 2.13483 0.39355522 0.6530181 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.9149545 0.39355522 0.48659617 0.39355522 -0.12232041 0.39355522 -0.06252444 0.39355522 1.9690738 0.39355522 2.1404212 0.39355522 0.34562764 0.39355522 -2.3088279 0.39355522 2.3696282 0.39355522 1.4420006 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.17272204 0.39355522 -0.8811371 0.39355522 -1.361809 0.39355522 1.5345137 0.39355522 0.96526587 0.39355522 -0.78455347 0.39355522 1.2541542 0.39355522 -0.24077976 0.39355522 1.5974216 0.39355522 1.546418 ] [ 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522 0.39355522] [-0.02813366 0.39355522 -0.02079466 0.39355522 1.3853196 0.39355522 -0.7814446 0.39355522 -0.8437092 0.39355522 0.5684032 0.39355522 -1.1375792 0.39355522 -1.062274 0.39355522 0.02522936 0.39355522 0.34743017]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 2], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_555.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.0216 -0.9857 0.2196 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.8670 (2,1,.,.) = 1.0997 (3,1,.,.) = 0.3530 (1,2,.,.) = -0.4196 (2,2,.,.) = 0.2747 (3,2,.,.) = 0.8397 (1,3,.,.) = -0.7362 (2,3,.,.) = 0.4215 (3,3,.,.) = -0.1195 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 1.0668344 -0.09101379 2.0418577 1.3860338 2.729959 ] [ 1.1880835 -1.17423 0.8091762 2.0115492 1.821397 ] [ 2.9259295 1.3066078 1.1707727 0.23527402 -0.60022736] [ 1.5424294 1.1698778 1.5692044 2.673458 2.7187166 ] [ 0.62475634 2.284905 0.8615248 -0.725979 0.5702619 ]] [[-1.8272256 -1.3300515 -0.00559312 -2.13067 -1.3694088 ] [ 0.32042783 -2.0220587 -1.4817237 -1.4909822 -1.163756 ] [-0.49224263 -0.5624678 -2.398778 -1.8007679 -2.2810395 ] [-0.3547318 -0.9923567 -0.2432589 -0.87205297 0.10865802] [-0.4689594 -0.90465254 -1.7974703 -2.9177136 -1.0072346 ]] [[-0.87295264 -0.19949639 0.17845607 -0.1437617 -0.659685 ] [ 0.24948965 0.891511 0.7511959 0.46612856 -0.2799218 ] [ 0.4053188 1.0623605 -0.07379737 -0.02322064 0.5839006 ] [-0.3772446 -0.41980994 1.2752924 1.5080042 -0.5229683 ] [-2.3430681 -1.7890346 -0.17710102 0.16238286 1.0176349 ]]]]; ov_res: [[[[ 1.0668344 -0.09101379 2.0418577 1.3860338 2.729959 ] [ 1.1880835 -1.17423 0.8091762 2.0115492 1.821397 ] [ 2.9259295 1.3066078 1.1707727 0.23527402 -0.60022736] [ 1.5424294 1.1698778 1.5692044 2.673458 2.7187166 ] [ 0.62475634 2.284905 0.8615248 -0.725979 0.5702619 ]] [[-1.8272256 -1.3300515 -0.00559312 -2.13067 -1.3694088 ] [ 0.32042783 -2.0220587 -1.4817237 -1.4909822 -1.163756 ] [-0.49224263 -0.5624678 -2.398778 -1.8007679 -2.2810395 ] [-0.3547318 -0.9923567 -0.2432589 -0.87205297 0.10865802] [-0.4689594 -0.90465254 -1.7974703 -2.9177136 -1.0072346 ]] [[-0.87295264 -0.19949639 0.17845607 -0.1437617 -0.659685 ] [ 0.24948965 0.891511 0.7511959 0.46612856 -0.2799218 ] [ 0.4053188 1.0623605 -0.07379737 -0.02322064 0.5839006 ] [-0.3772446 -0.41980994 1.2752924 1.5080042 -0.5229683 ] [-2.3430681 -1.7890346 -0.17710102 0.16238286 1.0176349 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_557.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7422 0.8549 -0.1826 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * -5.1367 (2,1,.,.) = -0.2490 (3,1,.,.) = -0.1854 (1,2,.,.) = -0.8429 (2,2,.,.) = -0.1216 (3,2,.,.) = 0.4665 (1,3,.,.) = 0.2141 (2,3,.,.) = 0.6848 (3,3,.,.) = 3.2872 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 1.1691002e+00 -1.6599956e+00 1.2735908e+00 2.4846807e+00 1.6378903e+00 2.3734188e-01 7.3632812e-01 1.9942546e-01 3.1556460e-01 1.1592978e+00] [ 1.1904213e+00 -4.5703232e-02 4.4293046e-02 1.4074675e+00 3.0370805e-01 5.4769665e-01 -3.8060308e-02 1.8240068e+00 2.6738954e+00 1.1584784e+00] [ 1.0184326e+00 6.5851516e-01 1.8257272e+00 2.4792552e-02 7.4708730e-01 4.3298998e-01 -1.1345477e+00 2.4841990e+00 2.7461424e+00 2.7760332e+00] [-2.5766695e-01 -4.4393182e-02 -2.6071018e-01 -8.4009999e-01 8.1014609e-01 -8.6752474e-02 8.1372005e-01 3.1655991e+00 1.0155971e+00 -1.0083766e+00] [ 1.8430216e+00 5.8621186e-01 2.6104227e-01 2.1634445e+00 -5.5014533e-01 2.5021112e+00 9.1241223e-01 -6.6122538e-01 -1.6659842e+00 1.6415009e+00]] [[ 1.4053597e+00 2.0538849e-01 2.1735871e+00 1.0481733e-01 -3.7431717e-05 7.4480587e-01 8.2534444e-01 5.9863478e-01 1.2770873e+00 2.9493314e-01] [ 9.8880589e-02 5.3089404e-01 1.6505821e+00 5.7435727e-01 -3.6932999e-01 8.7642539e-01 1.3961957e+00 1.4488779e+00 1.1325246e+00 -5.1812941e-01] [ 1.7672185e+00 8.5234845e-01 2.2563028e+00 8.0452633e-01 1.3299745e-01 1.0170131e+00 -1.9682854e-01 1.4194849e+00 4.9235433e-01 1.2060766e+00] [ 7.1206796e-01 -4.9063200e-01 1.1549454e+00 1.2991276e+00 8.9378744e-01 6.3383031e-01 4.3019199e-01 1.6839417e+00 9.7632033e-01 -9.0205461e-01] [ 1.0638831e+00 9.5849025e-01 1.3917204e+00 8.5712004e-01 1.6340220e-01 1.4452498e+00 1.4704030e+00 -2.8861946e-01 1.0956311e+00 9.7449571e-01]] [[ 2.0200198e+00 2.4689630e-01 6.3718243e+00 -5.9443092e+00 -6.2326365e+00 3.4500569e-01 -6.5799010e-01 -1.7991023e-01 -5.9304558e-02 -2.7365353e+00] [-4.6903868e+00 -1.8118057e+00 3.4538605e+00 -1.7822814e+00 -6.1939163e+00 1.2135830e+00 2.1968098e+00 1.6933291e+00 -6.3195527e-01 -7.8365936e+00] [ 3.5618334e+00 -4.8100632e-01 4.8751478e+00 2.9837722e-01 -4.3088865e+00 -2.6765215e-01 -1.4731232e+00 -1.7247200e-03 -3.9400742e+00 -1.7500681e+00] [ 8.0656976e-01 -5.5697575e+00 2.3892250e+00 5.4251118e+00 -1.0615308e-01 -1.7276040e-01 -5.0291319e+00 1.1016449e-01 -9.7030723e-01 -5.0234222e+00] [ 7.8836924e-01 2.4917626e+00 2.6029117e+00 -4.8622400e-01 -1.3306046e-01 -5.9265184e-01 8.3625758e-01 -3.4947817e+00 4.5457716e+00 -1.3208033e+00]]]]; ov_res: [[[[ 1.1691002e+00 -1.6599956e+00 1.2735908e+00 2.4846807e+00 1.6378903e+00 2.3734188e-01 7.3632812e-01 1.9942546e-01 3.1556460e-01 1.1592978e+00] [ 1.1904213e+00 -4.5703232e-02 4.4293046e-02 1.4074675e+00 3.0370805e-01 5.4769665e-01 -3.8060308e-02 1.8240068e+00 2.6738954e+00 1.1584784e+00] [ 1.0184326e+00 6.5851516e-01 1.8257272e+00 2.4792552e-02 7.4708730e-01 4.3298998e-01 -1.1345477e+00 2.4841990e+00 2.7461424e+00 2.7760332e+00] [-2.5766695e-01 -4.4393182e-02 -2.6071018e-01 -8.4009999e-01 8.1014609e-01 -8.6752474e-02 8.1372005e-01 3.1655991e+00 1.0155971e+00 -1.0083766e+00] [ 1.8430216e+00 5.8621186e-01 2.6104227e-01 2.1634445e+00 -5.5014533e-01 2.5021112e+00 9.1241223e-01 -6.6122538e-01 -1.6659842e+00 1.6415009e+00]] [[ 1.4053597e+00 2.0538849e-01 2.1735871e+00 1.0481733e-01 -3.7431717e-05 7.4480587e-01 8.2534444e-01 5.9863478e-01 1.2770873e+00 2.9493314e-01] [ 9.8880589e-02 5.3089404e-01 1.6505821e+00 5.7435727e-01 -3.6932999e-01 8.7642539e-01 1.3961957e+00 1.4488779e+00 1.1325246e+00 -5.1812941e-01] [ 1.7672185e+00 8.5234845e-01 2.2563028e+00 8.0452633e-01 1.3299745e-01 1.0170131e+00 -1.9682854e-01 1.4194849e+00 4.9235433e-01 1.2060766e+00] [ 7.1206796e-01 -4.9063200e-01 1.1549454e+00 1.2991276e+00 8.9378744e-01 6.3383031e-01 4.3019199e-01 1.6839417e+00 9.7632033e-01 -9.0205461e-01] [ 1.0638831e+00 9.5849025e-01 1.3917204e+00 8.5712004e-01 1.6340220e-01 1.4452498e+00 1.4704030e+00 -2.8861946e-01 1.0956311e+00 9.7449571e-01]] [[ 2.0200198e+00 2.4689630e-01 6.3718243e+00 -5.9443092e+00 -6.2326365e+00 3.4500569e-01 -6.5799010e-01 -1.7991023e-01 -5.9304558e-02 -2.7365353e+00] [-4.6903868e+00 -1.8118057e+00 3.4538605e+00 -1.7822814e+00 -6.1939163e+00 1.2135830e+00 2.1968098e+00 1.6933291e+00 -6.3195527e-01 -7.8365936e+00] [ 3.5618334e+00 -4.8100632e-01 4.8751478e+00 2.9837722e-01 -4.3088865e+00 -2.6765215e-01 -1.4731232e+00 -1.7247200e-03 -3.9400742e+00 -1.7500681e+00] [ 8.0656976e-01 -5.5697575e+00 2.3892250e+00 5.4251118e+00 -1.0615308e-01 -1.7276040e-01 -5.0291319e+00 1.1016449e-01 -9.7030723e-01 -5.0234222e+00] [ 7.8836924e-01 2.4917626e+00 2.6029117e+00 -4.8622400e-01 -1.3306046e-01 -5.9265184e-01 8.3625758e-01 -3.4947817e+00 4.5457716e+00 -1.3208033e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_559.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.371586}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.3224 (2,1,.,.) = -2.4800 (3,1,.,.) = 1.1507 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-1.0730706 0.37158573 -2.412291 0.37158573 5.4725556 0.37158573 2.8070674 0.37158573 4.3466444 0.37158573 2.922512 0.37158573 -4.258847 0.37158573 -1.1664435 0.37158573 -6.3033457 0.37158573 -4.177393 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 0.6113697 0.37158573 -1.8497766 0.37158573 -2.2472186 0.37158573 1.6552087 0.37158573 0.630505 0.37158573 2.9890757 0.37158573 -0.8326472 0.37158573 -2.4693704 0.37158573 -3.0323133 0.37158573 3.8301582 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-2.4318757 0.37158573 3.783671 0.37158573 4.1683207 0.37158573 -4.572711 0.37158573 -1.9996315 0.37158573 1.4657233 0.37158573 0.28321266 0.37158573 -0.10638744 0.37158573 -1.096257 0.37158573 -1.4720346 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 0.67383873 0.37158573 -0.9116981 0.37158573 1.0661151 0.37158573 4.076431 0.37158573 1.416135 0.37158573 0.21992913 0.37158573 -1.9941145 0.37158573 -1.7097203 0.37158573 -0.661098 0.37158573 0.78159523] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-5.2181954 0.37158573 4.9692802 0.37158573 -2.4117303 0.37158573 1.5326916 0.37158573 -1.9093341 0.37158573 -0.44688648 0.37158573 -1.959762 0.37158573 3.595737 0.37158573 -0.96324706 0.37158573 -0.22192246] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-0.0316551 0.37158573 -2.2394094 0.37158573 5.030351 0.37158573 7.085465 0.37158573 0.08200791 0.37158573 3.7373877 0.37158573 -0.6553322 0.37158573 0.33600923 0.37158573 1.0531917 0.37158573 0.93004143] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-1.7193619 0.37158573 3.660102 0.37158573 5.091038 0.37158573 3.8877735 0.37158573 1.4978875 0.37158573 -4.57549 0.37158573 0.7525414 0.37158573 -0.03430349 0.37158573 -2.3659015 0.37158573 2.5216951 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-1.9823698 0.37158573 1.0736411 0.37158573 -1.5635123 0.37158573 3.0578885 0.37158573 -0.1478926 0.37158573 -1.548641 0.37158573 5.1557565 0.37158573 5.490573 0.37158573 -5.4275093 0.37158573 -0.1237407 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-3.5897822 0.37158573 5.2780805 0.37158573 0.55307513 0.37158573 1.4959315 0.37158573 6.1753974 0.37158573 2.014327 0.37158573 0.9032775 0.37158573 2.9018378 0.37158573 0.02858344 0.37158573 -1.1793813 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 4.8046603 0.37158573 -0.75957286 0.37158573 -1.659249 0.37158573 2.064369 0.37158573 -0.13314784 0.37158573 -0.737687 0.37158573 0.15380068 0.37158573 -3.8565507 0.37158573 -1.033775 0.37158573 2.2696943 ]]]]; ov_res: [[[[-1.0730706 0.37158573 -2.412291 0.37158573 5.4725556 0.37158573 2.8070674 0.37158573 4.3466444 0.37158573 2.922512 0.37158573 -4.258847 0.37158573 -1.1664435 0.37158573 -6.3033457 0.37158573 -4.177393 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 0.6113697 0.37158573 -1.8497766 0.37158573 -2.2472186 0.37158573 1.6552087 0.37158573 0.630505 0.37158573 2.9890757 0.37158573 -0.8326472 0.37158573 -2.4693704 0.37158573 -3.0323133 0.37158573 3.8301582 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-2.4318757 0.37158573 3.783671 0.37158573 4.1683207 0.37158573 -4.572711 0.37158573 -1.9996315 0.37158573 1.4657233 0.37158573 0.28321266 0.37158573 -0.10638744 0.37158573 -1.096257 0.37158573 -1.4720346 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 0.67383873 0.37158573 -0.9116981 0.37158573 1.0661151 0.37158573 4.076431 0.37158573 1.416135 0.37158573 0.21992913 0.37158573 -1.9941145 0.37158573 -1.7097203 0.37158573 -0.661098 0.37158573 0.78159523] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-5.2181954 0.37158573 4.9692802 0.37158573 -2.4117303 0.37158573 1.5326916 0.37158573 -1.9093341 0.37158573 -0.44688648 0.37158573 -1.959762 0.37158573 3.595737 0.37158573 -0.96324706 0.37158573 -0.22192246] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-0.0316551 0.37158573 -2.2394094 0.37158573 5.030351 0.37158573 7.085465 0.37158573 0.08200791 0.37158573 3.7373877 0.37158573 -0.6553322 0.37158573 0.33600923 0.37158573 1.0531917 0.37158573 0.93004143] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-1.7193619 0.37158573 3.660102 0.37158573 5.091038 0.37158573 3.8877735 0.37158573 1.4978875 0.37158573 -4.57549 0.37158573 0.7525414 0.37158573 -0.03430349 0.37158573 -2.3659015 0.37158573 2.5216951 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-1.9823698 0.37158573 1.0736411 0.37158573 -1.5635123 0.37158573 3.0578885 0.37158573 -0.1478926 0.37158573 -1.548641 0.37158573 5.1557565 0.37158573 5.490573 0.37158573 -5.4275093 0.37158573 -0.1237407 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [-3.5897822 0.37158573 5.2780805 0.37158573 0.55307513 0.37158573 1.4959315 0.37158573 6.1753974 0.37158573 2.014327 0.37158573 0.9032775 0.37158573 2.9018378 0.37158573 0.02858344 0.37158573 -1.1793813 ] [ 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573 0.37158573] [ 4.8046603 0.37158573 -0.75957286 0.37158573 -1.659249 0.37158573 2.064369 0.37158573 -0.13314784 0.37158573 -0.737687 0.37158573 0.15380068 0.37158573 -3.8565507 0.37158573 -1.033775 0.37158573 2.2696943 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [2, 2], 'groups': 1, 'output_padding': [1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_561.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.51375}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 1.6400 (2,1,.,.) = -1.0990 (3,1,.,.) = 1.8760 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.6364333 -1.5137511 -2.6308026 -1.5137511 -0.20774007 -1.5137511 -7.5916605 -1.5137511 -0.424196 -1.5137511 -4.4124303 -1.5137511 -2.9374986 -1.5137511 -2.3796275 -1.5137511 -5.271732 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -2.0421271 -1.5137511 -3.6015682 -1.5137511 -4.112012 -1.5137511 -2.8048558 -1.5137511 -5.291899 -1.5137511 -1.5011562 -1.5137511 -3.5448227 -1.5137511 -6.2495713 -1.5137511 -1.1498842 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 1.8887242 -1.5137511 -0.64087915 -1.5137511 -2.813389 -1.5137511 -3.928247 -1.5137511 -3.615922 -1.5137511 -2.4209445 -1.5137511 0.723627 -1.5137511 -4.2115974 -1.5137511 -2.5580807 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 1.3663684 -1.5137511 -1.9152566 -1.5137511 -3.0951846 -1.5137511 -4.022814 -1.5137511 -3.8560586 -1.5137511 0.5186292 -1.5137511 0.04546821 -1.5137511 -0.39852357 -1.5137511 -2.5057554 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -3.9904914 -1.5137511 -2.1207805 -1.5137511 -0.48714685 -1.5137511 -1.8024747 -1.5137511 -7.7188873 -1.5137511 -3.4188013 -1.5137511 -3.7837563 -1.5137511 -4.5243244 -1.5137511 2.1190243 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -4.81356 -1.5137511 3.7848167 -1.5137511 -1.1448224 -1.5137511 0.61713684 -1.5137511 0.96869457 -1.5137511 0.11835384 -1.5137511 -7.108473 -1.5137511 -3.092212 -1.5137511 1.8922423 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.8094466 -1.5137511 -1.7733307 -1.5137511 -2.7115774 -1.5137511 -0.29990613 -1.5137511 -1.3369092 -1.5137511 2.7639656 -1.5137511 -5.1009755 -1.5137511 -1.6080456 -1.5137511 -2.8168607 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.9989733 -1.5137511 -1.6208466 -1.5137511 -4.9751263 -1.5137511 -3.6687198 -1.5137511 -1.617491 -1.5137511 -8.195697 -1.5137511 -3.325648 -1.5137511 -3.0560572 -1.5137511 0.91703284] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.9021234 -1.5137511 -0.8476663 -1.5137511 -2.6894217 -1.5137511 0.970822 -1.5137511 1.4292277 -1.5137511 -3.7180982 -1.5137511 -3.9986954 -1.5137511 0.9260603 -1.5137511 -2.7513375 ]]]]; ov_res: [[[[-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.6364333 -1.5137511 -2.6308026 -1.5137511 -0.20774007 -1.5137511 -7.5916605 -1.5137511 -0.424196 -1.5137511 -4.4124303 -1.5137511 -2.9374986 -1.5137511 -2.3796275 -1.5137511 -5.271732 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -2.0421271 -1.5137511 -3.6015682 -1.5137511 -4.112012 -1.5137511 -2.8048558 -1.5137511 -5.291899 -1.5137511 -1.5011562 -1.5137511 -3.5448227 -1.5137511 -6.2495713 -1.5137511 -1.1498842 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 1.8887242 -1.5137511 -0.64087915 -1.5137511 -2.813389 -1.5137511 -3.928247 -1.5137511 -3.615922 -1.5137511 -2.4209445 -1.5137511 0.723627 -1.5137511 -4.2115974 -1.5137511 -2.5580807 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 1.3663684 -1.5137511 -1.9152566 -1.5137511 -3.0951846 -1.5137511 -4.022814 -1.5137511 -3.8560586 -1.5137511 0.5186292 -1.5137511 0.04546821 -1.5137511 -0.39852357 -1.5137511 -2.5057554 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -3.9904914 -1.5137511 -2.1207805 -1.5137511 -0.48714685 -1.5137511 -1.8024747 -1.5137511 -7.7188873 -1.5137511 -3.4188013 -1.5137511 -3.7837563 -1.5137511 -4.5243244 -1.5137511 2.1190243 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -4.81356 -1.5137511 3.7848167 -1.5137511 -1.1448224 -1.5137511 0.61713684 -1.5137511 0.96869457 -1.5137511 0.11835384 -1.5137511 -7.108473 -1.5137511 -3.092212 -1.5137511 1.8922423 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.8094466 -1.5137511 -1.7733307 -1.5137511 -2.7115774 -1.5137511 -0.29990613 -1.5137511 -1.3369092 -1.5137511 2.7639656 -1.5137511 -5.1009755 -1.5137511 -1.6080456 -1.5137511 -2.8168607 ] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.9989733 -1.5137511 -1.6208466 -1.5137511 -4.9751263 -1.5137511 -3.6687198 -1.5137511 -1.617491 -1.5137511 -8.195697 -1.5137511 -3.325648 -1.5137511 -3.0560572 -1.5137511 0.91703284] [-1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 -1.5137511 ] [-1.5137511 -0.9021234 -1.5137511 -0.8476663 -1.5137511 -2.6894217 -1.5137511 0.970822 -1.5137511 1.4292277 -1.5137511 -3.7180982 -1.5137511 -3.9986954 -1.5137511 0.9260603 -1.5137511 -2.7513375 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_563.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 4.66613263e-01 6.28504276e-01 2.61886311e+00 -8.06521416e-01 2.84306741e+00 -2.17153525e+00 7.68717408e-01 -1.84020340e-01 -3.26341295e+00 -3.97663355e-01 2.41240501e+00] [ 5.95083952e-01 -4.29056644e+00 -4.47906637e+00 1.66621685e-01 1.09168768e-01 -1.37321687e+00 -2.13195324e+00 7.99793303e-01 -4.70658875e+00 -5.94584465e-01 1.86085045e+00] [-4.36700583e+00 -2.26247025e+00 4.21581316e+00 -1.91260171e+00 1.89650977e+00 -2.56223226e+00 -2.64449167e+00 3.21469975e+00 2.74729729e-02 4.78294194e-01 -8.70964229e-01] [-6.17744446e-01 -1.62501311e+00 -4.42492104e+00 -5.44056273e+00 -2.11014223e+00 8.23804617e-01 1.03229678e+00 -6.65560961e-01 1.38521075e-01 6.38616610e+00 -2.45387077e+00] [-3.38242745e+00 -1.53334141e-02 -6.09306288e+00 2.12635136e+00 -1.83360434e+00 -2.52706587e-01 3.90268612e+00 -8.24742734e-01 -2.53207636e+00 -3.20851684e+00 -1.90223658e+00] [ 2.47250652e+00 3.02909875e+00 -3.14046144e+00 -9.46078300e-02 1.57642555e+00 -1.20002604e+00 3.04613400e+00 1.51561975e-01 -4.63283014e+00 5.12177372e+00 -2.48293376e+00] [-1.02182639e+00 1.56003565e-01 5.52013814e-01 2.83289576e+00 5.35492373e+00 -1.08791339e+00 -1.16389990e-03 4.50757384e-01 -2.40827274e+00 -5.49220920e-01 -3.69078636e+00] [ 3.48784953e-01 3.21347499e+00 -4.55262661e-02 -6.63543463e-01 3.18254352e+00 -9.36352134e-01 1.60320938e+00 -2.14841890e+00 2.05916023e+00 2.82189178e+00 -2.28087521e+00] [-7.81870306e-01 2.83808857e-01 -2.04322481e+00 -2.78505147e-01 1.71605611e+00 2.78200459e+00 -8.27261031e-01 5.00911474e-01 -1.74081612e+00 -1.22286284e+00 -3.74945593e+00] [ 8.71370852e-01 -6.41403794e-01 1.75470889e+00 1.66314077e+00 3.22096848e+00 3.00483704e-01 -4.27516699e+00 9.72420096e-01 5.18101406e+00 2.82121301e-01 1.35645974e+00] [-7.50833631e-01 -2.27329111e+00 4.18342352e-02 2.15067530e+00 2.10242271e-02 -1.88215756e+00 -1.42431891e+00 1.31189442e+00 3.34091210e+00 -7.18059659e-01 5.89128911e-01]] [[ 4.57348049e-01 -1.01281893e+00 -1.12526989e+00 6.50117218e-01 2.46159673e+00 -9.43347335e-01 6.29330456e-01 -1.32787931e+00 -4.04741859e+00 -2.12877369e+00 3.40604693e-01] [-2.19763398e+00 -1.41972494e+00 2.19126177e+00 -2.10364342e+00 -1.47457838e+00 -1.26648307e+00 -1.54412246e+00 4.45463657e+00 2.17495382e-01 -3.97550440e+00 -1.07379496e+00] [-1.08827114e-01 -2.87077951e+00 -7.07860041e+00 -5.24069500e+00 -1.08434510e+00 2.97185272e-01 2.19559669e-03 -5.92525363e-01 4.88450384e+00 2.57242131e+00 -3.79747361e-01] [-7.79868901e-01 -5.33864403e+00 2.82815671e+00 3.22191906e+00 3.73153305e+00 5.57832861e+00 1.69029355e+00 -2.34197512e-01 -6.16316080e+00 -2.72077894e+00 -1.16729212e+00] [ 9.63914871e-01 7.11093330e+00 3.99624586e+00 1.73468184e+00 -2.13121080e+00 -1.72724438e+00 3.84239626e+00 1.42398036e+00 -3.74190778e-01 1.58869290e+00 1.02891386e+00] [-1.13578486e+00 -1.06860662e+00 -4.22646379e+00 4.99270260e-01 2.88026190e+00 -2.88310623e+00 -8.04997683e-02 1.02231634e+00 -6.55217266e+00 -1.41247821e+00 -1.28374147e+00] [-1.74923718e+00 3.49346137e+00 2.76587248e+00 -1.22716165e+00 5.82119608e+00 3.11917925e+00 9.56730068e-01 6.18632507e+00 4.39610195e+00 4.43301821e+00 2.19891047e+00] [-4.11446184e-01 -4.31062269e+00 -6.25236702e+00 -2.44454646e+00 5.30060053e-01 1.51299620e+00 1.80232614e-01 -3.28782749e+00 -7.34574318e-01 6.65530443e-01 -9.36486781e-01] [-1.56195593e+00 3.20167375e+00 3.64851952e+00 1.40646255e+00 1.40851259e+00 7.95741379e-01 9.41945672e-01 1.72074258e-01 -2.74897337e+00 3.27984500e+00 9.22674417e-01] [ 5.17978668e-01 -1.09044170e+00 8.86552453e-01 -1.28015816e+00 -4.46292162e-02 1.51060474e+00 -1.19008231e+00 4.25416708e-01 1.15743423e+00 -6.60524368e-02 1.10803366e+00] [ 1.58213472e+00 3.20197821e-01 -1.28898036e+00 -2.79552889e+00 -1.42168570e+00 2.10631871e+00 5.90530336e-01 -4.19532871e+00 -1.92669868e-01 -4.93723810e-01 -6.20683491e-01]] [[-1.69674471e-01 -2.93141317e+00 2.53245687e+00 3.23912263e+00 -1.42613792e+00 -9.59963858e-01 2.95507097e+00 -2.68713379e+00 1.86732507e+00 -2.89062858e-01 1.14657454e-01] [-1.41303313e+00 3.54044914e+00 -1.96111608e+00 -5.71220732e+00 5.08302879e+00 -3.93175900e-01 3.33431768e+00 1.96898043e+00 -3.86759973e+00 -4.75705957e+00 6.65978909e-01] [-7.74937868e-02 -2.01201439e+00 -1.78315210e+00 7.17415524e+00 -3.54473853e+00 4.80145359e+00 -1.76190436e+00 5.99931002e-01 5.77461100e+00 -8.17553520e+00 7.52546489e-01] [-8.64389300e-01 -2.10866284e+00 9.05145073e+00 -3.64132166e+00 -2.32065129e+00 -1.41224563e+00 -1.94008553e+00 2.93862820e+00 -1.97537005e-01 5.25151443e+00 3.62741733e+00] [ 8.70287776e-01 -2.72337437e-01 -1.77205801e+00 -4.29275960e-01 3.45686436e+00 -6.81880057e-01 2.99361563e+00 -2.39754963e+00 -8.30354512e-01 -1.53017163e+00 -7.05600262e-01] [-2.30183315e+00 8.79993916e-01 1.34571505e+00 2.55175161e+00 -1.98817301e+00 -2.33850896e-01 -3.58502686e-01 2.61298895e-01 -1.93637955e+00 2.60606766e+00 2.48918891e-01] [-1.10612357e+00 3.07772779e+00 -4.70889044e+00 -2.18492731e-01 7.00850201e+00 -4.39381933e+00 2.58621073e+00 9.19416606e-01 -4.86502552e+00 5.87986708e-01 -9.09506798e-01] [-1.42956448e+00 -1.46056902e+00 1.99556196e+00 2.92412090e+00 9.48901415e-01 4.15680790e+00 -2.08739853e+00 1.04968190e+00 -3.02851725e+00 2.58315039e+00 2.36812568e+00] [-6.39314771e-01 2.23712969e+00 -2.00823140e+00 -2.29647207e+00 -3.29257202e+00 1.52535021e+00 2.70042920e+00 -1.08778644e+00 -8.16780329e-02 1.51589847e+00 -1.63662624e+00] [ 9.76585448e-02 2.30130643e-01 1.33036149e+00 -1.73418498e+00 -3.21487522e+00 2.84319830e+00 -8.77893716e-02 2.78079510e-01 -3.19395113e+00 6.16001010e-01 1.11859828e-01] [ 9.56784725e-01 -1.08486652e+00 -4.55488235e-01 -6.06697798e-02 1.01309538e+00 1.42622471e-01 -1.07385981e+00 -2.33555007e+00 3.55021572e+00 -5.90355158e-01 1.41864076e-01]]]]; ov_res: [[[[ 4.66613263e-01 6.28504276e-01 2.61886311e+00 -8.06521416e-01 2.84306741e+00 -2.17153525e+00 7.68717468e-01 -1.84020326e-01 -3.26341295e+00 -3.97663444e-01 2.41240501e+00] [ 5.95083952e-01 -4.29056597e+00 -4.47906590e+00 1.66621804e-01 1.09168664e-01 -1.37321734e+00 -2.13195324e+00 7.99793363e-01 -4.70658875e+00 -5.94584405e-01 1.86085045e+00] [-4.36700583e+00 -2.26247048e+00 4.21581316e+00 -1.91260183e+00 1.89650989e+00 -2.56223226e+00 -2.64449167e+00 3.21469951e+00 2.74730753e-02 4.78294224e-01 -8.70964229e-01] [-6.17744446e-01 -1.62501311e+00 -4.42492056e+00 -5.44056273e+00 -2.11014247e+00 8.23804259e-01 1.03229678e+00 -6.65561140e-01 1.38521090e-01 6.38616610e+00 -2.45387077e+00] [-3.38242745e+00 -1.53332874e-02 -6.09306288e+00 2.12635112e+00 -1.83360422e+00 -2.52706528e-01 3.90268636e+00 -8.24742854e-01 -2.53207636e+00 -3.20851707e+00 -1.90223658e+00] [ 2.47250652e+00 3.02909875e+00 -3.14046144e+00 -9.46076810e-02 1.57642543e+00 -1.20002627e+00 3.04613352e+00 1.51562050e-01 -4.63283014e+00 5.12177324e+00 -2.48293376e+00] [-1.02182627e+00 1.56003579e-01 5.52013755e-01 2.83289552e+00 5.35492420e+00 -1.08791304e+00 -1.16409070e-03 4.50757414e-01 -2.40827274e+00 -5.49221218e-01 -3.69078636e+00] [ 3.48784953e-01 3.21347547e+00 -4.55261692e-02 -6.63543403e-01 3.18254328e+00 -9.36352134e-01 1.60320938e+00 -2.14841890e+00 2.05916047e+00 2.82189155e+00 -2.28087497e+00] [-7.81870365e-01 2.83808917e-01 -2.04322457e+00 -2.78505117e-01 1.71605611e+00 2.78200507e+00 -8.27261031e-01 5.00911474e-01 -1.74081624e+00 -1.22286296e+00 -3.74945617e+00] [ 8.71370912e-01 -6.41403794e-01 1.75470889e+00 1.66314077e+00 3.22096825e+00 3.00483614e-01 -4.27516747e+00 9.72420156e-01 5.18101406e+00 2.82121271e-01 1.35645974e+00] [-7.50833631e-01 -2.27329111e+00 4.18342389e-02 2.15067530e+00 2.10242700e-02 -1.88215756e+00 -1.42431891e+00 1.31189442e+00 3.34091210e+00 -7.18059659e-01 5.89128911e-01]] [[ 4.57348049e-01 -1.01281893e+00 -1.12526989e+00 6.50117159e-01 2.46159673e+00 -9.43347335e-01 6.29330456e-01 -1.32787931e+00 -4.04741907e+00 -2.12877369e+00 3.40604693e-01] [-2.19763398e+00 -1.41972482e+00 2.19126177e+00 -2.10364318e+00 -1.47457862e+00 -1.26648307e+00 -1.54412258e+00 4.45463657e+00 2.17495382e-01 -3.97550392e+00 -1.07379496e+00] [-1.08826950e-01 -2.87077999e+00 -7.07860041e+00 -5.24069500e+00 -1.08434546e+00 2.97185570e-01 2.19558878e-03 -5.92525303e-01 4.88450336e+00 2.57242107e+00 -3.79747361e-01] [-7.79869020e-01 -5.33864355e+00 2.82815695e+00 3.22191906e+00 3.73153210e+00 5.57832813e+00 1.69029355e+00 -2.34197363e-01 -6.16316080e+00 -2.72077870e+00 -1.16729200e+00] [ 9.63914990e-01 7.11093283e+00 3.99624562e+00 1.73468173e+00 -2.13121057e+00 -1.72724438e+00 3.84239626e+00 1.42398024e+00 -3.74190837e-01 1.58869302e+00 1.02891386e+00] [-1.13578475e+00 -1.06860662e+00 -4.22646427e+00 4.99270320e-01 2.88026190e+00 -2.88310647e+00 -8.04997087e-02 1.02231634e+00 -6.55217314e+00 -1.41247809e+00 -1.28374147e+00] [-1.74923718e+00 3.49346161e+00 2.76587200e+00 -1.22716177e+00 5.82119656e+00 3.11917925e+00 9.56730008e-01 6.18632507e+00 4.39610195e+00 4.43301821e+00 2.19891047e+00] [-4.11446184e-01 -4.31062269e+00 -6.25236750e+00 -2.44454670e+00 5.30060112e-01 1.51299596e+00 1.80232540e-01 -3.28782749e+00 -7.34574318e-01 6.65530324e-01 -9.36486721e-01] [-1.56195605e+00 3.20167398e+00 3.64851928e+00 1.40646243e+00 1.40851259e+00 7.95741379e-01 9.41945791e-01 1.72074243e-01 -2.74897361e+00 3.27984500e+00 9.22674417e-01] [ 5.17978668e-01 -1.09044170e+00 8.86552453e-01 -1.28015840e+00 -4.46291305e-02 1.51060462e+00 -1.19008231e+00 4.25416589e-01 1.15743411e+00 -6.60524666e-02 1.10803366e+00] [ 1.58213472e+00 3.20197791e-01 -1.28898048e+00 -2.79552913e+00 -1.42168570e+00 2.10631871e+00 5.90530396e-01 -4.19532871e+00 -1.92669883e-01 -4.93723840e-01 -6.20683491e-01]] [[-1.69674471e-01 -2.93141294e+00 2.53245664e+00 3.23912263e+00 -1.42613804e+00 -9.59963977e-01 2.95507121e+00 -2.68713379e+00 1.86732519e+00 -2.89062947e-01 1.14657454e-01] [-1.41303325e+00 3.54044914e+00 -1.96111608e+00 -5.71220732e+00 5.08302879e+00 -3.93175989e-01 3.33431792e+00 1.96898043e+00 -3.86759973e+00 -4.75705957e+00 6.65978968e-01] [-7.74937794e-02 -2.01201439e+00 -1.78315234e+00 7.17415524e+00 -3.54473853e+00 4.80145311e+00 -1.76190424e+00 5.99931002e-01 5.77461147e+00 -8.17553520e+00 7.52546489e-01] [-8.64389181e-01 -2.10866261e+00 9.05145168e+00 -3.64132190e+00 -2.32065129e+00 -1.41224575e+00 -1.94008565e+00 2.93862820e+00 -1.97537065e-01 5.25151443e+00 3.62741733e+00] [ 8.70287716e-01 -2.72337407e-01 -1.77205801e+00 -4.29275990e-01 3.45686436e+00 -6.81880057e-01 2.99361563e+00 -2.39754987e+00 -8.30354452e-01 -1.53017163e+00 -7.05600262e-01] [-2.30183315e+00 8.79993856e-01 1.34571505e+00 2.55175161e+00 -1.98817313e+00 -2.33850822e-01 -3.58502567e-01 2.61298746e-01 -1.93637991e+00 2.60606766e+00 2.48918787e-01] [-1.10612357e+00 3.07772756e+00 -4.70888948e+00 -2.18492821e-01 7.00850105e+00 -4.39381933e+00 2.58621073e+00 9.19416666e-01 -4.86502504e+00 5.87986827e-01 -9.09506798e-01] [-1.42956460e+00 -1.46056902e+00 1.99556184e+00 2.92412066e+00 9.48901415e-01 4.15680790e+00 -2.08739877e+00 1.04968178e+00 -3.02851748e+00 2.58315063e+00 2.36812568e+00] [-6.39314830e-01 2.23712969e+00 -2.00823140e+00 -2.29647231e+00 -3.29257202e+00 1.52535021e+00 2.70042920e+00 -1.08778644e+00 -8.16780180e-02 1.51589847e+00 -1.63662636e+00] [ 9.76585373e-02 2.30130672e-01 1.33036137e+00 -1.73418498e+00 -3.21487546e+00 2.84319806e+00 -8.77893046e-02 2.78079510e-01 -3.19395065e+00 6.16000950e-01 1.11859888e-01] [ 9.56784725e-01 -1.08486652e+00 -4.55488265e-01 -6.06696978e-02 1.01309526e+00 1.42622560e-01 -1.07385981e+00 -2.33555007e+00 3.55021596e+00 -5.90355158e-01 1.41864076e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_565.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[-5.5996519e-01 -4.0888143e+00 1.0561184e+00 2.9911768e+00 -1.0968709e+00 4.8119822e+00 -2.3467152e+00 1.4363045e+00 1.5515362e+00] [-6.3770425e-01 2.7622848e+00 5.9919677e+00 3.1761622e-01 2.3957400e+00 -8.3255962e-02 5.0573378e+00 6.1761099e-01 -2.8008635e+00] [-1.0337429e+00 3.3885641e+00 3.3137412e+00 -9.1774249e-01 -3.5381994e+00 -2.4497402e-01 -1.0259581e+00 1.2764190e+00 -1.2745299e+00] [ 2.1916967e-02 -3.0526340e-01 -1.7174608e+00 -8.0165920e+00 -3.1270003e-01 1.5687711e+00 -3.7257032e+00 -6.6579986e-01 -1.5602632e+00] [ 2.8098929e+00 -4.9263434e+00 -6.1750197e-01 1.1630598e+00 3.6611624e+00 -1.4602519e+00 -2.8753585e-01 3.4071174e-01 -2.3260407e+00] [-5.0643229e+00 2.9333785e-02 1.3134141e+00 -2.2010179e+00 9.5648628e-01 -3.0744371e+00 -4.1216245e+00 -2.5689331e-01 -2.4937050e+00] [ 1.3680639e+00 3.1819668e-02 -2.3509676e+00 -3.5649371e+00 -3.1793871e+00 -4.3351836e+00 -3.1240389e+00 5.5174792e-01 -5.2691543e-01] [-1.9170938e-01 2.0694461e+00 -7.7800345e-01 -1.9531869e+00 1.5749022e+00 -3.2413054e+00 -3.7677276e-01 -1.6207978e+00 1.6520294e+00] [-3.5078329e-01 4.0444579e+00 -7.9112124e+00 2.7404840e+00 -3.8716538e+00 -1.7992352e-01 1.8446406e+00 3.6372111e+00 2.8326430e+00]] [[-9.8165947e-01 -3.2047336e+00 -1.5930015e+00 8.4266561e-01 -2.9359586e+00 6.1343536e+00 -1.7038208e+00 5.3845525e+00 -4.7303820e+00] [-4.1429267e+00 4.3248043e+00 5.3230982e+00 -4.8443637e+00 -8.0783689e-01 1.2281320e+00 1.8478509e+00 -4.5242081e+00 -4.6832876e+00] [ 2.6673248e+00 6.5974841e+00 -6.6331525e+00 -3.0697355e+00 7.8333488e+00 7.9681497e+00 6.7742734e+00 -4.4840298e+00 -1.9787171e+00] [-1.8647566e+00 -2.2639172e+00 -2.0010736e+00 -2.2307844e+00 -2.2976663e+00 -8.8094807e+00 2.7516934e-01 5.4044819e+00 8.0253725e+00] [-9.8865695e-02 -6.2461991e+00 -2.6132836e+00 1.1872836e+00 6.8191344e-01 -4.0966291e+00 -2.0162365e+00 -1.3072090e+01 -3.3288567e+00] [ 2.3768554e+00 3.7552242e+00 -2.5017488e+00 -3.2592349e+00 -9.2299557e+00 -1.0231016e+01 -2.0166543e-01 7.1841817e+00 3.7296178e+00] [-3.3947808e-01 -4.2520757e+00 -4.5528417e+00 -9.2662412e-01 2.2796614e+00 1.1228836e+00 3.1126094e+00 1.7325488e+00 -5.5359182e+00] [-3.5592685e+00 -1.8677653e+00 3.1685606e-02 1.1286480e+00 -3.8278036e+00 -4.9710956e+00 2.4890776e+00 -4.9495630e+00 2.1627455e+00] [ 3.8681552e+00 3.2851620e+00 -7.1375294e+00 -2.9058623e+00 -7.6739383e+00 6.0214181e+00 -1.7031642e+00 -2.5193040e+00 5.7928104e+00]] [[-3.4658389e+00 2.0331584e-01 -7.1466100e-01 -2.2483416e+00 -1.9338143e+00 4.5174518e+00 -2.0788376e+00 6.2691635e-01 -2.8476698e+00] [-1.1478753e+00 4.8231089e-01 1.5061822e+00 4.3956774e-01 1.0279964e+00 2.0017314e+00 2.3949757e+00 2.7409554e+00 2.7991909e-01] [-2.9976861e+00 -1.4480613e-01 4.7924304e+00 9.5113814e-01 -2.8334243e+00 -3.6290622e+00 -2.1843231e+00 1.9682407e+00 2.8739083e+00] [ 2.4124570e+00 -4.8210254e+00 3.7442989e+00 -4.7336888e+00 -1.9085573e+00 2.4311250e-01 -6.1756575e-01 -4.7000999e+00 -3.4813447e+00] [ 6.6303220e+00 6.8629300e-03 -2.8825102e+00 1.4398102e+00 3.0173783e+00 -9.3066072e-01 -1.0789827e+00 4.0285096e+00 4.1002735e-01] [-3.1659954e+00 -3.6603599e+00 -2.2734626e-01 1.0459179e+00 2.7560124e+00 2.9226136e+00 3.0248851e-01 -2.8994317e+00 -3.8121614e+00] [-2.9195949e-01 7.2953187e-02 -5.9638274e-01 -7.8508401e-01 -5.3305187e+00 -3.2030876e+00 -3.0817320e+00 -3.0269444e+00 8.3966321e-01] [ 7.3120648e-01 6.2851033e+00 -5.4674816e+00 6.3577706e-01 -2.5985816e+00 2.7697487e+00 -1.7993726e+00 -3.4859617e+00 3.8782902e+00] [-1.7722561e+00 5.3589358e+00 -5.0986838e+00 3.3523996e+00 -3.4495285e-01 -4.0734774e-01 -1.9640794e+00 6.3451824e+00 2.7685130e-01]]]]; ov_res: [[[[-5.5996531e-01 -4.0888143e+00 1.0561181e+00 2.9911768e+00 -1.0968709e+00 4.8119817e+00 -2.3467152e+00 1.4363045e+00 1.5515362e+00] [-6.3770425e-01 2.7622845e+00 5.9919672e+00 3.1761622e-01 2.3957400e+00 -8.3255962e-02 5.0573373e+00 6.1761111e-01 -2.8008633e+00] [-1.0337429e+00 3.3885643e+00 3.3137412e+00 -9.1774243e-01 -3.5381989e+00 -2.4497396e-01 -1.0259584e+00 1.2764190e+00 -1.2745297e+00] [ 2.1916967e-02 -3.0526325e-01 -1.7174608e+00 -8.0165920e+00 -3.1269974e-01 1.5687710e+00 -3.7257028e+00 -6.6579986e-01 -1.5602628e+00] [ 2.8098927e+00 -4.9263439e+00 -6.1750197e-01 1.1630598e+00 3.6611624e+00 -1.4602518e+00 -2.8753567e-01 3.4071174e-01 -2.3260410e+00] [-5.0643225e+00 2.9333815e-02 1.3134140e+00 -2.2010179e+00 9.5648611e-01 -3.0744371e+00 -4.1216240e+00 -2.5689346e-01 -2.4937053e+00] [ 1.3680640e+00 3.1819694e-02 -2.3509674e+00 -3.5649374e+00 -3.1793869e+00 -4.3351831e+00 -3.1240394e+00 5.5174798e-01 -5.2691543e-01] [-1.9170929e-01 2.0694461e+00 -7.7800351e-01 -1.9531869e+00 1.5749019e+00 -3.2413054e+00 -3.7677264e-01 -1.6207976e+00 1.6520293e+00] [-3.5078314e-01 4.0444579e+00 -7.9112129e+00 2.7404840e+00 -3.8716536e+00 -1.7992370e-01 1.8446404e+00 3.6372113e+00 2.8326430e+00]] [[-9.8166001e-01 -3.2047334e+00 -1.5930011e+00 8.4266537e-01 -2.9359586e+00 6.1343536e+00 -1.7038206e+00 5.3845530e+00 -4.7303820e+00] [-4.1429257e+00 4.3248043e+00 5.3230982e+00 -4.8443637e+00 -8.0783689e-01 1.2281322e+00 1.8478512e+00 -4.5242085e+00 -4.6832881e+00] [ 2.6673250e+00 6.5974841e+00 -6.6331525e+00 -3.0697360e+00 7.8333492e+00 7.9681501e+00 6.7742734e+00 -4.4840302e+00 -1.9787171e+00] [-1.8647568e+00 -2.2639170e+00 -2.0010738e+00 -2.2307839e+00 -2.2976663e+00 -8.8094807e+00 2.7516919e-01 5.4044824e+00 8.0253725e+00] [-9.8866060e-02 -6.2461996e+00 -2.6132834e+00 1.1872835e+00 6.8191350e-01 -4.0966291e+00 -2.0162370e+00 -1.3072090e+01 -3.3288567e+00] [ 2.3768556e+00 3.7552249e+00 -2.5017488e+00 -3.2592349e+00 -9.2299547e+00 -1.0231016e+01 -2.0166570e-01 7.1841822e+00 3.7296176e+00] [-3.3947799e-01 -4.2520757e+00 -4.5528412e+00 -9.2662424e-01 2.2796619e+00 1.1228836e+00 3.1126091e+00 1.7325490e+00 -5.5359182e+00] [-3.5592687e+00 -1.8677653e+00 3.1685878e-02 1.1286480e+00 -3.8278036e+00 -4.9710956e+00 2.4890773e+00 -4.9495621e+00 2.1627450e+00] [ 3.8681550e+00 3.2851617e+00 -7.1375284e+00 -2.9058619e+00 -7.6739383e+00 6.0214181e+00 -1.7031640e+00 -2.5193036e+00 5.7928100e+00]] [[-3.4658389e+00 2.0331591e-01 -7.1466088e-01 -2.2483416e+00 -1.9338144e+00 4.5174518e+00 -2.0788376e+00 6.2691641e-01 -2.8476701e+00] [-1.1478752e+00 4.8231113e-01 1.5061822e+00 4.3956751e-01 1.0279964e+00 2.0017314e+00 2.3949757e+00 2.7409551e+00 2.7991879e-01] [-2.9976861e+00 -1.4480610e-01 4.7924309e+00 9.5113838e-01 -2.8334241e+00 -3.6290624e+00 -2.1843228e+00 1.9682407e+00 2.8739085e+00] [ 2.4124570e+00 -4.8210249e+00 3.7442992e+00 -4.7336888e+00 -1.9085575e+00 2.4311239e-01 -6.1756575e-01 -4.7000999e+00 -3.4813445e+00] [ 6.6303220e+00 6.8628406e-03 -2.8825102e+00 1.4398100e+00 3.0173779e+00 -9.3066084e-01 -1.0789825e+00 4.0285096e+00 4.1002756e-01] [-3.1659956e+00 -3.6603601e+00 -2.2734632e-01 1.0459179e+00 2.7560124e+00 2.9226136e+00 3.0248824e-01 -2.8994315e+00 -3.8121614e+00] [-2.9195949e-01 7.2953209e-02 -5.9638268e-01 -7.8508401e-01 -5.3305182e+00 -3.2030878e+00 -3.0817323e+00 -3.0269444e+00 8.3966321e-01] [ 7.3120648e-01 6.2851033e+00 -5.4674816e+00 6.3577706e-01 -2.5985811e+00 2.7697489e+00 -1.7993726e+00 -3.4859617e+00 3.8782899e+00] [-1.7722560e+00 5.3589358e+00 -5.0986843e+00 3.3523996e+00 -3.4495285e-01 -4.0734774e-01 -1.9640793e+00 6.3451824e+00 2.7685124e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_567.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.5480 (2,1,.,.) = 0.5228 (3,1,.,.) = 0.9942 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-4.24184227e+00 1.16828012e+00 -4.42083329e-01 -6.26781166e-01 -1.64274663e-01 -1.96023810e+00 1.72312951e+00 1.19210601e+00 -1.35359001e+00 -1.30861735e+00] [-5.77006996e-01 4.78855640e-01 2.06377411e+00 6.05699778e-01 -1.60472929e-01 -3.75936508e-01 -1.15240604e-01 -8.43703076e-02 -4.65860814e-01 -2.42028141e+00] [-1.52990386e-01 2.30796397e-01 -3.02953577e+00 -5.58907807e-01 1.33590984e+00 4.45415705e-01 -8.13265324e-01 2.20678627e-01 -2.10178828e+00 9.54642296e-01] [-3.48093122e-01 2.98337030e+00 -9.14293468e-01 3.26704346e-02 1.57838082e+00 3.12469077e+00 1.51830167e-01 -1.12359619e+00 1.23513794e+00 6.39677048e-01] [-3.37764144e+00 6.94094419e-01 8.53683889e-01 -1.19413066e+00 -1.17485631e+00 1.31964713e-01 -1.16169882e+00 1.00970104e-01 1.90570474e-01 5.12168348e-01] [-2.13092613e+00 -2.09371400e+00 -1.91091850e-01 -6.22003078e-01 2.01984239e+00 1.19830441e+00 1.45048523e+00 -1.44186988e-01 2.32793736e+00 1.17095506e+00] [ 4.98824835e-01 1.27407777e+00 -1.88945568e+00 2.08337331e+00 8.66685092e-01 -1.89582729e+00 9.26797628e-01 -3.46491671e+00 -2.05058432e+00 -1.24231160e+00] [ 6.41284883e-02 -1.11522675e+00 -2.07779241e+00 9.76858556e-01 2.19846249e+00 -7.09708571e-01 3.06530666e+00 1.15008307e+00 1.86066556e+00 -4.52197582e-01] [-6.85263395e-01 5.82516074e-01 1.10096729e+00 4.92948771e-01 -1.13495469e+00 1.22503412e+00 -2.71710205e+00 -7.08123744e-02 -1.30492604e+00 -1.74614918e+00] [-1.12132657e+00 -3.61611724e-01 1.50013733e+00 -9.01820362e-02 8.55918705e-01 2.58035946e+00 -5.09542167e-01 -2.93630481e-01 9.44114208e-01 -6.64735258e-01]] [[-4.82673615e-01 -8.52847338e-01 8.70838463e-01 1.79866329e-01 -2.20715195e-01 6.19908154e-01 3.47288638e-01 5.12784198e-02 4.99858528e-01 3.78747076e-01] [ 1.04923554e-01 3.05091888e-01 5.05856685e-02 8.11295211e-02 -5.58758080e-01 2.16634363e-01 2.39698678e-01 1.27769783e-01 3.02570790e-01 -2.19600707e-01] [ 4.78237331e-01 -1.55158550e-01 -1.28049493e-01 8.02109484e-04 -2.12063462e-01 -7.83789009e-02 -3.73148829e-01 2.04669192e-01 4.29165006e-01 -3.13228250e-01] [ 4.87380385e-01 -7.79412210e-01 -4.30646539e-01 3.22540641e-01 6.19287431e-01 5.54896295e-01 5.62921353e-03 -1.94934934e-01 -4.04621124e-01 -1.95367083e-01] [ 6.49635553e-01 -2.48432644e-02 5.53858280e-01 9.86595917e-03 1.44038331e-02 -1.09810039e-01 4.43237811e-01 -1.30960229e-03 6.49863005e-01 -6.42366767e-01] [ 8.05611551e-01 -7.01078698e-02 -1.10821836e-01 -3.32816318e-02 7.97212958e-01 9.85933766e-02 2.48478696e-01 -7.59656727e-01 -1.95796803e-01 -3.21617067e-01] [-5.75193167e-01 3.63203697e-02 -5.68737268e-01 -8.37539554e-01 1.82011023e-01 4.95063812e-01 4.42461520e-01 1.41235322e-01 6.83237389e-02 1.33842111e-01] [-1.48819119e-01 5.34748197e-01 2.20940992e-01 4.11660075e-01 2.41460954e-03 4.77696329e-01 5.00175834e-01 -4.99306142e-01 9.78281617e-01 -4.01366979e-01] [-1.00307561e-01 -5.05068183e-01 3.24524462e-01 -2.93069668e-02 -9.88237143e-01 2.37840712e-01 -3.46039444e-01 -1.94494426e-01 -2.73186147e-01 -1.66585132e-01] [ 5.61174452e-01 -2.15651631e-01 -3.69575262e-01 -2.31093332e-01 2.49838397e-01 6.20372832e-01 4.69513983e-01 -2.53873348e-01 2.98917711e-01 -4.22580279e-02]] [[ 1.88741133e-01 1.14698136e+00 -2.31992149e+00 -2.35539749e-01 -6.74261537e-04 1.80349255e+00 1.24036705e+00 2.58473396e-01 1.02799964e+00 -9.72055554e-01] [-1.59498513e+00 -1.06482349e-01 4.84059721e-01 -5.48259020e-01 8.72808695e-01 -1.89237320e+00 -1.23802388e+00 6.67662382e-01 1.04345512e+00 -4.60780114e-02] [-2.80841708e-01 6.32729769e-01 -8.77149403e-01 7.69654453e-01 7.78873146e-01 1.19616592e+00 -9.34014857e-01 1.67478994e-01 2.94732243e-01 2.57250786e-01] [ 2.20384812e+00 -2.27778837e-01 1.08138621e+00 -2.40004137e-01 -1.84360057e-01 -1.18617880e+00 2.26170138e-01 -6.41208112e-01 7.00336993e-01 -1.63642764e+00] [ 1.12607980e+00 1.44604683e+00 -5.11145830e-01 1.32398397e-01 5.83592117e-01 4.43233728e-01 -3.41653466e-01 4.79897887e-01 1.25143379e-01 9.14093331e-02] [-1.30958998e+00 -5.68064824e-02 1.06314814e+00 -1.65300107e+00 -4.92968827e-01 -1.63275707e+00 -8.29927683e-01 -2.32455507e-01 -4.34015393e-01 1.10806024e+00] [-7.32061863e-01 1.29762661e+00 9.69071873e-03 -1.22447860e+00 1.02351940e+00 7.54741967e-01 -8.72441947e-01 4.43439454e-01 -1.15855753e+00 2.56313682e-01] [-8.20695907e-02 2.72967756e-01 -5.90661228e-01 1.93862796e-01 -9.42299187e-01 3.64922941e-01 -3.43747288e-01 2.73720813e+00 8.02861154e-01 3.11584890e-01] [ 2.10663855e-01 -1.67160356e+00 5.48960090e-01 1.68164968e-01 -2.17352659e-01 -2.63141370e+00 -1.69643641e-01 -4.43916231e-01 3.87567580e-02 -1.64491880e+00] [ 1.04911065e+00 -8.88237357e-02 -4.42956448e-01 1.38580605e-01 -1.20469439e+00 -1.28269839e+00 -4.38084686e-03 5.46988010e-01 1.23813939e+00 1.10996091e+00]]]]; ov_res: [[[[-4.24184227e+00 1.16828012e+00 -4.42083329e-01 -6.26781166e-01 -1.64274663e-01 -1.96023810e+00 1.72312951e+00 1.19210601e+00 -1.35359001e+00 -1.30861735e+00] [-5.77006996e-01 4.78855640e-01 2.06377411e+00 6.05699778e-01 -1.60472929e-01 -3.75936508e-01 -1.15240604e-01 -8.43703076e-02 -4.65860814e-01 -2.42028141e+00] [-1.52990386e-01 2.30796397e-01 -3.02953577e+00 -5.58907807e-01 1.33590984e+00 4.45415705e-01 -8.13265324e-01 2.20678627e-01 -2.10178828e+00 9.54642296e-01] [-3.48093122e-01 2.98337030e+00 -9.14293468e-01 3.26704346e-02 1.57838082e+00 3.12469077e+00 1.51830167e-01 -1.12359619e+00 1.23513794e+00 6.39677048e-01] [-3.37764144e+00 6.94094419e-01 8.53683889e-01 -1.19413066e+00 -1.17485631e+00 1.31964713e-01 -1.16169882e+00 1.00970104e-01 1.90570474e-01 5.12168348e-01] [-2.13092613e+00 -2.09371400e+00 -1.91091850e-01 -6.22003078e-01 2.01984239e+00 1.19830441e+00 1.45048523e+00 -1.44186988e-01 2.32793736e+00 1.17095506e+00] [ 4.98824835e-01 1.27407777e+00 -1.88945568e+00 2.08337331e+00 8.66685092e-01 -1.89582729e+00 9.26797628e-01 -3.46491671e+00 -2.05058432e+00 -1.24231160e+00] [ 6.41284883e-02 -1.11522675e+00 -2.07779241e+00 9.76858556e-01 2.19846249e+00 -7.09708571e-01 3.06530666e+00 1.15008307e+00 1.86066556e+00 -4.52197582e-01] [-6.85263395e-01 5.82516074e-01 1.10096729e+00 4.92948771e-01 -1.13495469e+00 1.22503412e+00 -2.71710205e+00 -7.08123744e-02 -1.30492604e+00 -1.74614918e+00] [-1.12132657e+00 -3.61611724e-01 1.50013733e+00 -9.01820362e-02 8.55918705e-01 2.58035946e+00 -5.09542167e-01 -2.93630481e-01 9.44114208e-01 -6.64735258e-01]] [[-4.82673615e-01 -8.52847338e-01 8.70838463e-01 1.79866329e-01 -2.20715195e-01 6.19908154e-01 3.47288638e-01 5.12784198e-02 4.99858528e-01 3.78747076e-01] [ 1.04923554e-01 3.05091888e-01 5.05856685e-02 8.11295211e-02 -5.58758080e-01 2.16634363e-01 2.39698678e-01 1.27769783e-01 3.02570790e-01 -2.19600707e-01] [ 4.78237331e-01 -1.55158550e-01 -1.28049493e-01 8.02109484e-04 -2.12063462e-01 -7.83789009e-02 -3.73148829e-01 2.04669192e-01 4.29165006e-01 -3.13228250e-01] [ 4.87380385e-01 -7.79412210e-01 -4.30646539e-01 3.22540641e-01 6.19287431e-01 5.54896295e-01 5.62921353e-03 -1.94934934e-01 -4.04621124e-01 -1.95367083e-01] [ 6.49635553e-01 -2.48432644e-02 5.53858280e-01 9.86595917e-03 1.44038331e-02 -1.09810039e-01 4.43237811e-01 -1.30960229e-03 6.49863005e-01 -6.42366767e-01] [ 8.05611551e-01 -7.01078698e-02 -1.10821836e-01 -3.32816318e-02 7.97212958e-01 9.85933766e-02 2.48478696e-01 -7.59656727e-01 -1.95796803e-01 -3.21617067e-01] [-5.75193167e-01 3.63203697e-02 -5.68737268e-01 -8.37539554e-01 1.82011023e-01 4.95063812e-01 4.42461520e-01 1.41235322e-01 6.83237389e-02 1.33842111e-01] [-1.48819119e-01 5.34748197e-01 2.20940992e-01 4.11660075e-01 2.41460954e-03 4.77696329e-01 5.00175834e-01 -4.99306142e-01 9.78281617e-01 -4.01366979e-01] [-1.00307561e-01 -5.05068183e-01 3.24524462e-01 -2.93069668e-02 -9.88237143e-01 2.37840712e-01 -3.46039444e-01 -1.94494426e-01 -2.73186147e-01 -1.66585132e-01] [ 5.61174452e-01 -2.15651631e-01 -3.69575262e-01 -2.31093332e-01 2.49838397e-01 6.20372832e-01 4.69513983e-01 -2.53873348e-01 2.98917711e-01 -4.22580279e-02]] [[ 1.88741133e-01 1.14698136e+00 -2.31992149e+00 -2.35539749e-01 -6.74261537e-04 1.80349255e+00 1.24036705e+00 2.58473396e-01 1.02799964e+00 -9.72055554e-01] [-1.59498513e+00 -1.06482349e-01 4.84059721e-01 -5.48259020e-01 8.72808695e-01 -1.89237320e+00 -1.23802388e+00 6.67662382e-01 1.04345512e+00 -4.60780114e-02] [-2.80841708e-01 6.32729769e-01 -8.77149403e-01 7.69654453e-01 7.78873146e-01 1.19616592e+00 -9.34014857e-01 1.67478994e-01 2.94732243e-01 2.57250786e-01] [ 2.20384812e+00 -2.27778837e-01 1.08138621e+00 -2.40004137e-01 -1.84360057e-01 -1.18617880e+00 2.26170138e-01 -6.41208112e-01 7.00336993e-01 -1.63642764e+00] [ 1.12607980e+00 1.44604683e+00 -5.11145830e-01 1.32398397e-01 5.83592117e-01 4.43233728e-01 -3.41653466e-01 4.79897887e-01 1.25143379e-01 9.14093331e-02] [-1.30958998e+00 -5.68064824e-02 1.06314814e+00 -1.65300107e+00 -4.92968827e-01 -1.63275707e+00 -8.29927683e-01 -2.32455507e-01 -4.34015393e-01 1.10806024e+00] [-7.32061863e-01 1.29762661e+00 9.69071873e-03 -1.22447860e+00 1.02351940e+00 7.54741967e-01 -8.72441947e-01 4.43439454e-01 -1.15855753e+00 2.56313682e-01] [-8.20695907e-02 2.72967756e-01 -5.90661228e-01 1.93862796e-01 -9.42299187e-01 3.64922941e-01 -3.43747288e-01 2.73720813e+00 8.02861154e-01 3.11584890e-01] [ 2.10663855e-01 -1.67160356e+00 5.48960090e-01 1.68164968e-01 -2.17352659e-01 -2.63141370e+00 -1.69643641e-01 -4.43916231e-01 3.87567580e-02 -1.64491880e+00] [ 1.04911065e+00 -8.88237357e-02 -4.42956448e-01 1.38580605e-01 -1.20469439e+00 -1.28269839e+00 -4.38084686e-03 5.46988010e-01 1.23813939e+00 1.10996091e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_569.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.6076 (2,1,.,.) = 1.6448 (3,1,.,.) = -1.2548 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[ 1.34910475e-02 -1.36070907e-01 8.29936624e-01 2.43800834e-01 -4.13580984e-02 2.87873924e-01 -7.35018015e-01 -3.21122259e-01 -3.46968114e-01 1.09408188e+00] [-6.25047863e-01 -1.22898677e-02 -2.41995901e-01 2.85947293e-01 -1.03344359e-01 4.37805755e-03 -4.86879438e-01 1.05431244e-01 1.48630887e-01 -2.83639312e-01] [ 1.58262640e-01 5.23235440e-01 -9.13256228e-01 -2.37801522e-01 -1.96293548e-01 1.05881028e-01 6.10954583e-01 -8.25770199e-02 4.76536006e-01 -4.84530121e-01] [ 6.05502546e-01 1.94195151e-01 1.18515837e+00 -1.12909235e-01 -8.49682018e-02 1.25409693e-01 -3.13092858e-01 7.61162221e-01 -3.34626347e-01 -1.89889088e-01] [-2.92137384e-01 -2.06196144e-01 5.98670185e-01 1.45200416e-01 5.93964756e-01 -2.01034412e-01 -9.44464326e-01 -7.85917882e-03 -1.02920437e+00 -1.00050807e+00] [-2.99638212e-01 6.63514674e-01 4.79013205e-01 5.05813420e-01 4.14877146e-01 -8.74382317e-01 1.13671832e-01 -2.13914871e-01 -6.21677637e-02 -9.06290174e-01] [-3.68011534e-01 2.08511919e-01 7.19122738e-02 -5.98920703e-01 -2.48936892e-01 5.41263819e-01 -4.08975393e-01 -1.53265321e+00 2.40404502e-01 1.37595713e-01] [ 4.50714044e-02 3.51043865e-02 2.21505433e-01 -9.24956203e-01 3.12223554e-01 3.59852135e-01 -5.65811276e-01 7.21186638e-01 -3.52450818e-01 -1.15632844e+00] [-7.72706792e-02 3.93644750e-01 9.99757111e-01 6.22442126e-01 7.47642398e-01 -2.01957487e-02 3.69063288e-01 -1.90505207e-01 3.87930334e-01 -2.84256876e-01] [ 1.73381424e+00 7.83771753e-01 -3.21775019e-01 -9.93733943e-01 1.26061425e-01 -3.15020382e-01 4.80074920e-02 5.01695514e-01 2.34809235e-01 6.32586181e-01]] [[ 8.66640687e-01 8.38663757e-01 -1.28357446e+00 -1.22708702e+00 3.99388671e-01 -4.67244357e-01 8.86063278e-01 -1.79141521e+00 3.20207453e+00 -1.67028710e-01] [-2.03651738e+00 -2.37213802e+00 1.27780259e+00 -1.60577536e+00 4.10190195e-01 1.52667177e+00 -1.83978224e+00 4.60479110e-01 -7.25226879e-01 4.33979690e-01] [ 1.16443062e+00 2.09554958e+00 7.88150489e-01 -3.35467488e-01 1.18923627e-01 -9.86305252e-02 8.75249505e-01 1.63595378e-01 -1.59527063e+00 1.19113159e+00] [-2.32099682e-01 -1.61828017e+00 2.03389287e+00 -1.16186213e+00 -1.89128375e+00 -4.79512334e-01 -3.66100430e-01 -1.41890454e+00 -1.57089353e+00 2.07856393e+00] [-3.85987967e-01 9.14578915e-01 -4.85824490e+00 -2.11451507e+00 4.76755500e-01 4.49413633e+00 -4.30630893e-01 9.73812342e-01 2.93707108e+00 3.70763755e+00] [ 1.56827837e-01 -1.28430855e+00 -1.04073608e+00 2.56028318e+00 9.89062548e-01 -2.83294916e-01 -2.43102813e+00 6.22005224e-01 -7.00420022e-01 1.96105504e+00] [-1.39644527e+00 1.40805721e+00 -1.72634518e+00 -9.14718807e-01 -1.24963021e+00 -1.80917770e-01 -2.66382933e-01 -1.20729089e+00 1.14267635e+00 -1.50003552e-01] [-1.32687044e+00 -2.01674628e+00 -2.29795218e-01 2.31392312e+00 1.56880319e+00 2.83919740e+00 1.23762929e+00 -4.31167752e-01 1.53338921e+00 1.12425625e+00] [-3.14217716e-01 2.16319919e+00 1.77319109e+00 -2.02004984e-01 -3.52712125e-01 2.76124895e-01 -2.48463273e-01 1.52364111e+00 -1.98744476e+00 -1.04458463e+00] [-5.99033654e-01 7.37255633e-01 1.52106196e-01 -2.13421440e+00 3.88184762e+00 -5.38802207e-01 3.36605191e-01 5.15282452e-01 -6.03606224e-01 -8.93849909e-01]] [[-1.64286351e+00 -3.01643538e+00 -1.15328526e+00 1.27698052e+00 -1.11451292e+00 -4.45264786e-01 -8.59042555e-02 1.79868269e+00 7.46378779e-01 1.56305420e+00] [ 9.80183244e-01 -1.26358044e+00 1.17381036e+00 -1.07979524e+00 -1.43339634e-01 -1.03395605e+00 1.23981595e+00 -2.79558539e+00 -2.55367398e-01 -1.49617827e+00] [-1.00838923e+00 1.84340179e+00 -2.07577538e+00 1.05871785e+00 -1.40297949e+00 2.40163016e+00 -9.74342346e-01 -6.46083355e-01 -3.24367857e+00 -1.47279263e+00] [ 4.78169560e-01 7.43024230e-01 -9.01062191e-01 -2.17394161e+00 1.64817178e+00 -1.04598475e+00 1.33330846e+00 -1.00392616e+00 -2.22667560e-01 5.18277466e-01] [-3.09499598e+00 -3.64802718e-01 -8.28300655e-01 2.65326440e-01 -7.79181063e-01 -5.72021902e-01 -1.13894808e+00 -7.01225996e-01 -3.11591983e-01 -7.93598145e-02] [-1.70517802e+00 1.59433949e+00 -4.54621948e-02 -1.95381200e+00 -2.55118787e-01 1.11817324e+00 1.13846886e+00 2.17664242e-01 -4.32730287e-01 1.26434159e+00] [-1.87963915e+00 1.13256299e+00 -3.99849653e-01 -3.68225902e-01 2.25871205e-01 3.34073037e-01 -3.60603482e-01 1.29133606e+00 1.08915174e+00 -3.15307856e+00] [-2.19879866e+00 1.87221706e-01 -6.28337443e-01 2.46510655e-01 8.92295778e-01 2.42506933e+00 -1.26248491e+00 2.19937146e-01 -1.69786000e+00 8.15837443e-01] [ 5.18415630e-01 1.33498728e+00 -1.16626807e-01 1.45002282e+00 1.64919639e+00 1.33854103e+00 5.25599718e-01 -6.29780471e-01 9.72820044e-01 2.43900585e+00] [-1.66146338e+00 -1.03427553e+00 -1.88479964e-02 -8.94508123e-01 -2.30065441e+00 1.26184940e-01 -1.33886123e+00 -1.57822937e-01 -1.57656682e+00 -2.78284669e-01]]]]; ov_res: [[[[ 1.34910475e-02 -1.36070907e-01 8.29936624e-01 2.43800834e-01 -4.13580984e-02 2.87873924e-01 -7.35018015e-01 -3.21122259e-01 -3.46968114e-01 1.09408188e+00] [-6.25047863e-01 -1.22898677e-02 -2.41995901e-01 2.85947293e-01 -1.03344359e-01 4.37805755e-03 -4.86879438e-01 1.05431244e-01 1.48630887e-01 -2.83639312e-01] [ 1.58262640e-01 5.23235440e-01 -9.13256228e-01 -2.37801522e-01 -1.96293548e-01 1.05881028e-01 6.10954583e-01 -8.25770199e-02 4.76536006e-01 -4.84530121e-01] [ 6.05502546e-01 1.94195151e-01 1.18515837e+00 -1.12909235e-01 -8.49682018e-02 1.25409693e-01 -3.13092858e-01 7.61162221e-01 -3.34626347e-01 -1.89889088e-01] [-2.92137384e-01 -2.06196144e-01 5.98670185e-01 1.45200416e-01 5.93964756e-01 -2.01034412e-01 -9.44464326e-01 -7.85917882e-03 -1.02920437e+00 -1.00050807e+00] [-2.99638212e-01 6.63514674e-01 4.79013205e-01 5.05813420e-01 4.14877146e-01 -8.74382317e-01 1.13671832e-01 -2.13914871e-01 -6.21677637e-02 -9.06290174e-01] [-3.68011534e-01 2.08511919e-01 7.19122738e-02 -5.98920703e-01 -2.48936892e-01 5.41263819e-01 -4.08975393e-01 -1.53265321e+00 2.40404502e-01 1.37595713e-01] [ 4.50714044e-02 3.51043865e-02 2.21505433e-01 -9.24956203e-01 3.12223554e-01 3.59852135e-01 -5.65811276e-01 7.21186638e-01 -3.52450818e-01 -1.15632844e+00] [-7.72706792e-02 3.93644750e-01 9.99757111e-01 6.22442126e-01 7.47642398e-01 -2.01957487e-02 3.69063288e-01 -1.90505207e-01 3.87930334e-01 -2.84256876e-01] [ 1.73381424e+00 7.83771753e-01 -3.21775019e-01 -9.93733943e-01 1.26061425e-01 -3.15020382e-01 4.80074920e-02 5.01695514e-01 2.34809235e-01 6.32586181e-01]] [[ 8.66640687e-01 8.38663757e-01 -1.28357446e+00 -1.22708702e+00 3.99388671e-01 -4.67244357e-01 8.86063278e-01 -1.79141521e+00 3.20207453e+00 -1.67028710e-01] [-2.03651738e+00 -2.37213802e+00 1.27780259e+00 -1.60577536e+00 4.10190195e-01 1.52667177e+00 -1.83978224e+00 4.60479110e-01 -7.25226879e-01 4.33979690e-01] [ 1.16443062e+00 2.09554958e+00 7.88150489e-01 -3.35467488e-01 1.18923627e-01 -9.86305252e-02 8.75249505e-01 1.63595378e-01 -1.59527063e+00 1.19113159e+00] [-2.32099682e-01 -1.61828017e+00 2.03389287e+00 -1.16186213e+00 -1.89128375e+00 -4.79512334e-01 -3.66100430e-01 -1.41890454e+00 -1.57089353e+00 2.07856393e+00] [-3.85987967e-01 9.14578915e-01 -4.85824490e+00 -2.11451507e+00 4.76755500e-01 4.49413633e+00 -4.30630893e-01 9.73812342e-01 2.93707108e+00 3.70763755e+00] [ 1.56827837e-01 -1.28430855e+00 -1.04073608e+00 2.56028318e+00 9.89062548e-01 -2.83294916e-01 -2.43102813e+00 6.22005224e-01 -7.00420022e-01 1.96105504e+00] [-1.39644527e+00 1.40805721e+00 -1.72634518e+00 -9.14718807e-01 -1.24963021e+00 -1.80917770e-01 -2.66382933e-01 -1.20729089e+00 1.14267635e+00 -1.50003552e-01] [-1.32687044e+00 -2.01674628e+00 -2.29795218e-01 2.31392312e+00 1.56880319e+00 2.83919740e+00 1.23762929e+00 -4.31167752e-01 1.53338921e+00 1.12425625e+00] [-3.14217716e-01 2.16319919e+00 1.77319109e+00 -2.02004984e-01 -3.52712125e-01 2.76124895e-01 -2.48463273e-01 1.52364111e+00 -1.98744476e+00 -1.04458463e+00] [-5.99033654e-01 7.37255633e-01 1.52106196e-01 -2.13421440e+00 3.88184762e+00 -5.38802207e-01 3.36605191e-01 5.15282452e-01 -6.03606224e-01 -8.93849909e-01]] [[-1.64286351e+00 -3.01643538e+00 -1.15328526e+00 1.27698052e+00 -1.11451292e+00 -4.45264786e-01 -8.59042555e-02 1.79868269e+00 7.46378779e-01 1.56305420e+00] [ 9.80183244e-01 -1.26358044e+00 1.17381036e+00 -1.07979524e+00 -1.43339634e-01 -1.03395605e+00 1.23981595e+00 -2.79558539e+00 -2.55367398e-01 -1.49617827e+00] [-1.00838923e+00 1.84340179e+00 -2.07577538e+00 1.05871785e+00 -1.40297949e+00 2.40163016e+00 -9.74342346e-01 -6.46083355e-01 -3.24367857e+00 -1.47279263e+00] [ 4.78169560e-01 7.43024230e-01 -9.01062191e-01 -2.17394161e+00 1.64817178e+00 -1.04598475e+00 1.33330846e+00 -1.00392616e+00 -2.22667560e-01 5.18277466e-01] [-3.09499598e+00 -3.64802718e-01 -8.28300655e-01 2.65326440e-01 -7.79181063e-01 -5.72021902e-01 -1.13894808e+00 -7.01225996e-01 -3.11591983e-01 -7.93598145e-02] [-1.70517802e+00 1.59433949e+00 -4.54621948e-02 -1.95381200e+00 -2.55118787e-01 1.11817324e+00 1.13846886e+00 2.17664242e-01 -4.32730287e-01 1.26434159e+00] [-1.87963915e+00 1.13256299e+00 -3.99849653e-01 -3.68225902e-01 2.25871205e-01 3.34073037e-01 -3.60603482e-01 1.29133606e+00 1.08915174e+00 -3.15307856e+00] [-2.19879866e+00 1.87221706e-01 -6.28337443e-01 2.46510655e-01 8.92295778e-01 2.42506933e+00 -1.26248491e+00 2.19937146e-01 -1.69786000e+00 8.15837443e-01] [ 5.18415630e-01 1.33498728e+00 -1.16626807e-01 1.45002282e+00 1.64919639e+00 1.33854103e+00 5.25599718e-01 -6.29780471e-01 9.72820044e-01 2.43900585e+00] [-1.66146338e+00 -1.03427553e+00 -1.88479964e-02 -8.94508123e-01 -2.30065441e+00 1.26184940e-01 -1.33886123e+00 -1.57822937e-01 -1.57656682e+00 -2.78284669e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_571.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.8477 (2,1,.,.) = -0.6769 (3,1,.,.) = 0.1543 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-3.446088 2.58573 3.7467473 2.3729565 -0.79847795 -4.774168 0.44000447 0.71200037] [-4.321741 -2.3774545 0.6132706 -2.0912964 1.634887 -1.3555703 -0.5417788 -3.2038147 ] [-0.11124898 1.7219998 2.1330943 -1.1814985 4.1430483 -2.078429 0.4060772 -1.6505226 ] [ 0.35435623 -1.0052942 2.6849213 0.950697 -1.9400349 2.3098133 5.2899556 1.0248973 ] [ 0.41644076 2.0719922 -0.51063645 -2.4993875 -1.8654158 4.9700985 1.1675668 1.1595244 ] [ 2.5621722 0.11064865 0.69625854 -2.1420105 -2.8045418 -1.8075029 -3.9600399 -3.5040352 ] [-1.4075688 0.55304396 -0.42240056 2.9814425 -0.97197145 -0.5113483 1.6879922 -4.494771 ] [ 3.1541302 -1.5284961 -1.887112 2.7933724 1.9787232 0.8985553 -1.4993988 2.5301077 ]]]]; ov_res: [[[[-3.446088 2.58573 3.7467473 2.3729565 -0.79847795 -4.774168 0.44000447 0.71200037] [-4.321741 -2.3774545 0.6132706 -2.0912964 1.634887 -1.3555703 -0.5417788 -3.2038147 ] [-0.11124898 1.7219998 2.1330943 -1.1814985 4.1430483 -2.078429 0.4060772 -1.6505226 ] [ 0.35435623 -1.0052942 2.6849213 0.950697 -1.9400349 2.3098133 5.2899556 1.0248973 ] [ 0.41644076 2.0719922 -0.51063645 -2.4993875 -1.8654158 4.9700985 1.1675668 1.1595244 ] [ 2.5621722 0.11064865 0.69625854 -2.1420105 -2.8045418 -1.8075029 -3.9600399 -3.5040352 ] [-1.4075688 0.55304396 -0.42240056 2.9814425 -0.97197145 -0.5113483 1.6879922 -4.494771 ] [ 3.1541302 -1.5284961 -1.887112 2.7933724 1.9787232 0.8985553 -1.4993988 2.5301077 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_573.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4647 (2,1,.,.) = 0.01 * 1.9498 (3,1,.,.) = -0.1845 (1,2,.,.) = 0.7835 (2,2,.,.) = -0.6415 (3,2,.,.) = 1.0483 (1,3,.,.) = -0.6030 (2,3,.,.) = -0.7021 (3,3,.,.) = -1.1549 [ CPUFloatType{3,3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.26781353 0.53990185 -0.2660801 -0.95437735 -0.19829775 -0.21663699 -1.2573141 1.9710997 1.8552682 0.68855953 0. ] [ 0. -1.399084 0.6034179 1.1784428 0.525949 0.87045217 -1.300304 0.3388417 1.3922731 -0.13336506 1.0881078 0. ] [ 0. -0.18399578 0.7600535 0.7492391 -0.9998026 2.426055 0.41790637 1.2951782 -0.71331966 -0.29196543 0.35129765 0. ] [ 0. -0.9309521 -0.59892267 -0.35281962 1.0007461 -1.7792234 -1.2605374 -0.56983835 0.20406184 0.57082283 -0.04159397 0. ] [ 0. 0.7573834 1.3876549 -0.93290013 -0.9986151 1.7722831 0.7664739 1.0196548 -0.6336163 0.21988384 2.0419247 0. ] [ 0. 0.35026464 -1.5258958 -0.21246576 -0.14387867 1.3506515 -0.59703344 0.39033222 0.16405633 -0.06765303 -0.22549641 0. ] [ 0. -1.5193055 -2.169607 -1.3901328 0.6079805 -1.7694232 -0.7846017 1.1969316 -0.4875501 -1.3696359 -0.5077498 0. ] [ 0. -1.8944623 0.67412984 1.7700511 -0.18902518 0.2823323 0.34389308 0.8351823 0.9156475 -0.23094013 -0.8903896 0. ] [ 0. -1.2669436 -0.5184954 -0.42991984 -0.73112845 -0.05526754 0.44979885 -2.4834468 1.5245185 0.43218127 -0.8272784 0. ] [ 0. 0.3439417 -0.6652504 0.51540685 0.9280525 1.0950962 -1.4075023 0.6231003 0.59205127 -0.22351412 -0.13240586 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.6625028 0.8402285 0.6556339 -0.85483027 1.2972585 0.13467239 -0.34708402 0.22661102 -0.4314533 -0.815416 0. ] [ 0. 0.41067186 0.9705064 1.2067566 -1.0870595 -1.6389854 -1.6350627 -0.1034972 -0.7300053 -2.1351004 1.0333339 0. ] [ 0. -0.9689394 -0.1795046 -0.63851756 0.32983544 0.03412433 0.46203333 -1.4064773 2.7523077 2.1090312 -0.239963 0. ] [ 0. 0.3840076 -0.45021886 0.17836559 -1.2358638 -0.32703355 -0.01297724 -0.01690946 -0.51045656 0.3747749 1.8896362 0. ] [ 0. -0.67168885 -0.96922 -0.18689205 0.22239287 -0.5438551 0.20687068 -0.21246745 1.6443002 0.15265386 -1.3776431 0. ] [ 0. 0.5764188 0.02139949 1.0408093 0.3499421 -0.2533719 1.1407093 -0.63263166 -0.46386158 1.450142 0.63812494 0. ] [ 0. 0.5866376 2.281606 -0.801869 0.7425354 0.3785901 1.684079 2.590605 -2.186343 0.34276545 -0.820611 0. ] [ 0. -0.5270749 -0.17363468 0.1069745 0.08863107 0.58141595 0.12972835 0.22661626 -0.01978536 -1.6413251 0.8087456 0. ] [ 0. 1.9314626 1.8897252 -0.54692286 -0.59049916 0.41873452 -0.08794861 1.0949943 0.6696457 1.6966769 0.66430044 0. ] [ 0. 0.92772484 0.60416913 -0.5070197 0.770654 -0.6526764 0.13077825 0.83770484 0.23194744 0.87053335 -0.5832393 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.8277131 1.2334317 -0.01813847 -1.9404666 -0.45508212 -0.45072597 -0.9322428 3.6972728 2.6278749 0.80495805 0. ] [ 0. -0.60529274 2.486601 2.2047114 -0.25578365 0.7174768 -1.6979936 1.5224466 -0.5123183 -1.0840222 1.256304 0. ] [ 0. 0.8995063 0.6306074 1.7156354 0.532775 4.05401 -0.57175976 -0.42690563 0.3046192 0.6580656 0.63491106 0. ] [ 0. -0.55291814 -1.0421883 -0.9967698 1.1786857 -1.4495496 -2.5933044 -0.8666294 0.35379174 0.22748108 0.4741915 0. ] [ 0. 1.3233935 1.5777907 -0.9188485 -2.2252324 1.6121366 1.9130514 1.0846978 0.13680816 -0.9688744 2.9908035 0. ] [ 0. 0.39800158 -0.6810892 0.43870914 -0.48530704 -0.13787015 -0.3782805 -0.7756908 0.03987869 1.309836 0.7984393 0. ] [ 0. -1.7607187 -2.2324371 -1.332181 -0.62047803 -1.4923759 -0.19568008 2.4698303 -1.1987363 -1.5843924 0.5717661 0. ] [ 0. -3.7774503 1.2919037 2.4832294 -1.2086345 0.02006203 -1.1547896 1.3619088 1.5697434 -0.5021049 0.48741722 0. ] [ 0. -0.604106 -2.1096663 0.30212244 -1.2126684 -0.41535553 -0.5618151 -2.9213655 1.5520892 0.2879084 -0.7295578 0. ] [ 0. 1.2204723 1.04902 0.68822324 2.6526964 2.1290808 0.06861957 -0.3999146 -0.10904697 -0.24235067 0.8750048 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.26781353 0.53990185 -0.2660801 -0.95437735 -0.19829775 -0.21663699 -1.2573141 1.9710997 1.8552682 0.68855953 0. ] [ 0. -1.399084 0.6034179 1.1784428 0.525949 0.87045217 -1.300304 0.3388417 1.3922731 -0.13336506 1.0881078 0. ] [ 0. -0.18399578 0.7600535 0.7492391 -0.9998026 2.426055 0.41790637 1.2951782 -0.71331966 -0.29196543 0.35129765 0. ] [ 0. -0.9309521 -0.59892267 -0.35281962 1.0007461 -1.7792234 -1.2605374 -0.56983835 0.20406184 0.57082283 -0.04159397 0. ] [ 0. 0.7573834 1.3876549 -0.93290013 -0.9986151 1.7722831 0.7664739 1.0196548 -0.6336163 0.21988384 2.0419247 0. ] [ 0. 0.35026464 -1.5258958 -0.21246576 -0.14387867 1.3506515 -0.59703344 0.39033222 0.16405633 -0.06765303 -0.22549641 0. ] [ 0. -1.5193055 -2.169607 -1.3901328 0.6079805 -1.7694232 -0.7846017 1.1969316 -0.4875501 -1.3696359 -0.5077498 0. ] [ 0. -1.8944623 0.67412984 1.7700511 -0.18902518 0.2823323 0.34389308 0.8351823 0.9156475 -0.23094013 -0.8903896 0. ] [ 0. -1.2669436 -0.5184954 -0.42991984 -0.73112845 -0.05526754 0.44979885 -2.4834468 1.5245185 0.43218127 -0.8272784 0. ] [ 0. 0.3439417 -0.6652504 0.51540685 0.9280525 1.0950962 -1.4075023 0.6231003 0.59205127 -0.22351412 -0.13240586 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.6625028 0.8402285 0.6556339 -0.85483027 1.2972585 0.13467239 -0.34708402 0.22661102 -0.4314533 -0.815416 0. ] [ 0. 0.41067186 0.9705064 1.2067566 -1.0870595 -1.6389854 -1.6350627 -0.1034972 -0.7300053 -2.1351004 1.0333339 0. ] [ 0. -0.9689394 -0.1795046 -0.63851756 0.32983544 0.03412433 0.46203333 -1.4064773 2.7523077 2.1090312 -0.239963 0. ] [ 0. 0.3840076 -0.45021886 0.17836559 -1.2358638 -0.32703355 -0.01297724 -0.01690946 -0.51045656 0.3747749 1.8896362 0. ] [ 0. -0.67168885 -0.96922 -0.18689205 0.22239287 -0.5438551 0.20687068 -0.21246745 1.6443002 0.15265386 -1.3776431 0. ] [ 0. 0.5764188 0.02139949 1.0408093 0.3499421 -0.2533719 1.1407093 -0.63263166 -0.46386158 1.450142 0.63812494 0. ] [ 0. 0.5866376 2.281606 -0.801869 0.7425354 0.3785901 1.684079 2.590605 -2.186343 0.34276545 -0.820611 0. ] [ 0. -0.5270749 -0.17363468 0.1069745 0.08863107 0.58141595 0.12972835 0.22661626 -0.01978536 -1.6413251 0.8087456 0. ] [ 0. 1.9314626 1.8897252 -0.54692286 -0.59049916 0.41873452 -0.08794861 1.0949943 0.6696457 1.6966769 0.66430044 0. ] [ 0. 0.92772484 0.60416913 -0.5070197 0.770654 -0.6526764 0.13077825 0.83770484 0.23194744 0.87053335 -0.5832393 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.8277131 1.2334317 -0.01813847 -1.9404666 -0.45508212 -0.45072597 -0.9322428 3.6972728 2.6278749 0.80495805 0. ] [ 0. -0.60529274 2.486601 2.2047114 -0.25578365 0.7174768 -1.6979936 1.5224466 -0.5123183 -1.0840222 1.256304 0. ] [ 0. 0.8995063 0.6306074 1.7156354 0.532775 4.05401 -0.57175976 -0.42690563 0.3046192 0.6580656 0.63491106 0. ] [ 0. -0.55291814 -1.0421883 -0.9967698 1.1786857 -1.4495496 -2.5933044 -0.8666294 0.35379174 0.22748108 0.4741915 0. ] [ 0. 1.3233935 1.5777907 -0.9188485 -2.2252324 1.6121366 1.9130514 1.0846978 0.13680816 -0.9688744 2.9908035 0. ] [ 0. 0.39800158 -0.6810892 0.43870914 -0.48530704 -0.13787015 -0.3782805 -0.7756908 0.03987869 1.309836 0.7984393 0. ] [ 0. -1.7607187 -2.2324371 -1.332181 -0.62047803 -1.4923759 -0.19568008 2.4698303 -1.1987363 -1.5843924 0.5717661 0. ] [ 0. -3.7774503 1.2919037 2.4832294 -1.2086345 0.02006203 -1.1547896 1.3619088 1.5697434 -0.5021049 0.48741722 0. ] [ 0. -0.604106 -2.1096663 0.30212244 -1.2126684 -0.41535553 -0.5618151 -2.9213655 1.5520892 0.2879084 -0.7295578 0. ] [ 0. 1.2204723 1.04902 0.68822324 2.6526964 2.1290808 0.06861957 -0.3999146 -0.10904697 -0.24235067 0.8750048 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_575.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.7972 (2,1,.,.) = 0.2958 (3,1,.,.) = 0.3917 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-0.7564361 -0.13750023 -0.46975696 -0.6095272 -0.44577697 0.04881741 0.8334571 -0.5944463 ] [-0.25194067 -0.86235136 0.5422584 1.5894282 -0.29973322 0.22867142 -0.5525056 0.16369107] [-0.3097976 1.093956 0.20274565 -0.9043462 0.3590709 1.2633736 -0.22965921 0.20190307] [-0.34080374 -2.0424957 0.5434297 -0.00892489 -0.35063976 -0.04430353 -0.8959457 -0.29574177]]]]; ov_res: [[[[-0.7564361 -0.13750023 -0.46975696 -0.6095272 -0.44577697 0.04881741 0.8334571 -0.5944463 ] [-0.25194067 -0.86235136 0.5422584 1.5894282 -0.29973322 0.22867142 -0.5525056 0.16369107] [-0.3097976 1.093956 0.20274565 -0.9043462 0.3590709 1.2633736 -0.22965921 0.20190307] [-0.34080374 -2.0424957 0.5434297 -0.00892489 -0.35063976 -0.04430353 -0.8959457 -0.29574177]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_577.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.5062 (2,1,.,.) = 0.7565 (3,1,.,.) = 0.6704 (1,2,.,.) = -0.7536 (2,2,.,.) = -0.2796 (3,2,.,.) = -0.9177 (1,3,.,.) = 2.5775 (2,3,.,.) = 1.8620 (3,3,.,.) = 0.9359 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 4.6596003 -1.7366495 0.763352 -1.8818761 -0.3982121 -1.3639334 -3.0480158 -5.8270216 1.0411386 3.55173 0. ] [ 0. -2.0055842 5.1817284 -2.719353 -4.401655 -3.0392036 -0.03744755 -4.1042366 -3.323186 -4.0539217 7.193033 0. ] [ 0. -2.2169864 0.64995956 -0.12056996 0.46321934 4.019516 -0.7395168 -2.0129766 11.57934 0.29758522 -3.3130393 0. ] [ 0. -2.5977063 -0.17918248 -4.2583575 -2.0496013 -1.1488997 -1.6952598 1.3197834 2.6567917 0.6583393 -0.7970425 0. ] [ 0. 2.5431406 0.8817863 -3.4387727 2.978582 1.2285067 -0.55480325 2.4813735 1.2951682 -4.220799 -4.3205647 0. ] [ 0. 1.5101573 -2.0644283 0.3333765 0.5867432 -0.74347556 -0.11814103 2.4512012 -0.9871371 -2.8783553 -3.5138996 0. ] [ 0. 4.7968264 -1.204726 0.97439796 -5.969633 5.547377 3.089112 -1.1319271 -2.792113 1.1485411 1.4877071 0. ] [ 0. 5.027697 4.8457727 -1.4608072 1.4972361 9.341504 2.2547212 0.05507383 0.40488806 2.8060927 3.331215 0. ] [ 0. -0.6083373 -1.6683981 -1.6908894 -3.284685 -0.09422886 -5.554579 5.6669 1.1478984 -1.584319 3.2231472 0. ] [ 0. 5.9253845 -0.653477 1.6261541 -7.027245 -0.61401725 1.1357526 -1.059214 -1.210774 -1.2633561 -2.7597847 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.9309219 -1.3023329 0.47839123 0.38804206 1.2862952 -2.8915167 -0.07536816 -1.000161 1.9994062 1.1021461 0. ] [ 0. -1.2447748 2.91648 -1.8303785 -4.0978208 -1.9762409 4.041466 -2.3811572 1.7121195 0.48841763 4.1400685 0. ] [ 0. -0.8082063 0.8340334 4.3885903 3.0392256 2.989626 1.2258964 -0.2850413 3.1608217 1.4391018 -1.0681148 0. ] [ 0. -4.5129232 -1.0486372 -3.2404816 -0.54689956 -3.453934 1.5427244 1.8874518 2.3730028 -1.7079009 1.2757969 0. ] [ 0. 0.4875342 0.9468657 -1.2942282 1.7608069 4.03581 -0.8631549 0.6718271 1.5840311 -3.4025064 -3.3194857 0. ] [ 0. -1.8909237 -3.265078 2.0673895 -1.2515899 -0.8039533 -0.02454847 -1.6063066 -0.2823968 -1.4708439 -2.8975754 0. ] [ 0. 2.8535614 0.12651896 1.6165547 -2.208187 1.7640743 0.42932945 -3.3895855 -1.8968562 2.4020722 1.5455804 0. ] [ 0. 1.4225414 0.3970613 -3.3774104 1.6025726 4.611832 0.51539856 -2.17481 2.1062386 3.5535772 0.39475206 0. ] [ 0. 0.41929466 -0.22034653 1.1615435 -1.9501426 0.7283043 -2.9937897 3.6074202 -0.972481 -0.87750894 1.0763712 0. ] [ 0. 1.6830033 -2.1955636 4.9829574 -3.6610808 -3.5601132 3.1148183 -1.4385468 -0.29530773 1.8290771 -1.2965509 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.01661133 -0.2800492 -0.9015261 -0.3526855 1.5940144 -1.6884855 0.6848016 -1.1054448 0.6021667 1.8275752 0. ] [ 0. -1.3501474 0.7761594 -0.35488337 -1.6396118 -1.6809847 1.1604478 -2.072908 1.0100132 0.6640717 1.697809 0. ] [ 0. 1.0762788 1.8153677 4.495584 1.0784266 0.73633647 3.2971513 0.15341948 1.6811467 0.25147566 -1.0357295 0. ] [ 0. -2.2658477 -0.3590345 -3.1509397 0.1610055 -2.1178207 2.340984 0.08933424 2.1079125 -0.20687315 1.0980921 0. ] [ 0. 0.30590677 -0.22932078 -1.9416237 1.2216893 2.0819004 0.4384187 0.72343296 0.58603287 -2.5274649 -2.1567857 0. ] [ 0. -0.28019166 -0.8933802 1.3271112 -1.6431473 0.14203379 0.9737708 -1.3834875 0.24476466 -0.7776511 -0.8801879 0. ] [ 0. 2.8634007 0.68411434 0.18656106 -1.4114873 0.2887656 0.5014993 -2.3281307 -0.88799715 1.9366727 0.7608087 0. ] [ 0. 0.23623993 0.3321263 -1.5488493 1.2494055 3.21713 0.49345994 -1.8935586 1.7412521 1.8844153 -0.8626553 0. ] [ 0. -0.09282709 -1.9084046 0.0407513 -0.22842215 0.17721651 -1.1888479 1.4254413 -1.5764117 -1.1335567 -0.2959205 0. ] [ 0. 1.0508446 -1.5047615 3.3154762 -2.5223653 -3.0558681 1.0152277 -1.8608537 -1.2921673 2.5876956 -0.44282788 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 4.6596003 -1.7366495 0.763352 -1.8818761 -0.3982121 -1.3639334 -3.0480158 -5.8270216 1.0411386 3.55173 0. ] [ 0. -2.0055842 5.1817284 -2.719353 -4.401655 -3.0392036 -0.03744755 -4.1042366 -3.323186 -4.0539217 7.193033 0. ] [ 0. -2.2169864 0.64995956 -0.12056996 0.46321934 4.019516 -0.7395168 -2.0129766 11.57934 0.29758522 -3.3130393 0. ] [ 0. -2.5977063 -0.17918248 -4.2583575 -2.0496013 -1.1488997 -1.6952598 1.3197834 2.6567917 0.6583393 -0.7970425 0. ] [ 0. 2.5431406 0.8817863 -3.4387727 2.978582 1.2285067 -0.55480325 2.4813735 1.2951682 -4.220799 -4.3205647 0. ] [ 0. 1.5101573 -2.0644283 0.3333765 0.5867432 -0.74347556 -0.11814103 2.4512012 -0.9871371 -2.8783553 -3.5138996 0. ] [ 0. 4.7968264 -1.204726 0.97439796 -5.969633 5.547377 3.089112 -1.1319271 -2.792113 1.1485411 1.4877071 0. ] [ 0. 5.027697 4.8457727 -1.4608072 1.4972361 9.341504 2.2547212 0.05507383 0.40488806 2.8060927 3.331215 0. ] [ 0. -0.6083373 -1.6683981 -1.6908894 -3.284685 -0.09422886 -5.554579 5.6669 1.1478984 -1.584319 3.2231472 0. ] [ 0. 5.9253845 -0.653477 1.6261541 -7.027245 -0.61401725 1.1357526 -1.059214 -1.210774 -1.2633561 -2.7597847 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.9309219 -1.3023329 0.47839123 0.38804206 1.2862952 -2.8915167 -0.07536816 -1.000161 1.9994062 1.1021461 0. ] [ 0. -1.2447748 2.91648 -1.8303785 -4.0978208 -1.9762409 4.041466 -2.3811572 1.7121195 0.48841763 4.1400685 0. ] [ 0. -0.8082063 0.8340334 4.3885903 3.0392256 2.989626 1.2258964 -0.2850413 3.1608217 1.4391018 -1.0681148 0. ] [ 0. -4.5129232 -1.0486372 -3.2404816 -0.54689956 -3.453934 1.5427244 1.8874518 2.3730028 -1.7079009 1.2757969 0. ] [ 0. 0.4875342 0.9468657 -1.2942282 1.7608069 4.03581 -0.8631549 0.6718271 1.5840311 -3.4025064 -3.3194857 0. ] [ 0. -1.8909237 -3.265078 2.0673895 -1.2515899 -0.8039533 -0.02454847 -1.6063066 -0.2823968 -1.4708439 -2.8975754 0. ] [ 0. 2.8535614 0.12651896 1.6165547 -2.208187 1.7640743 0.42932945 -3.3895855 -1.8968562 2.4020722 1.5455804 0. ] [ 0. 1.4225414 0.3970613 -3.3774104 1.6025726 4.611832 0.51539856 -2.17481 2.1062386 3.5535772 0.39475206 0. ] [ 0. 0.41929466 -0.22034653 1.1615435 -1.9501426 0.7283043 -2.9937897 3.6074202 -0.972481 -0.87750894 1.0763712 0. ] [ 0. 1.6830033 -2.1955636 4.9829574 -3.6610808 -3.5601132 3.1148183 -1.4385468 -0.29530773 1.8290771 -1.2965509 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.01661133 -0.2800492 -0.9015261 -0.3526855 1.5940144 -1.6884855 0.6848016 -1.1054448 0.6021667 1.8275752 0. ] [ 0. -1.3501474 0.7761594 -0.35488337 -1.6396118 -1.6809847 1.1604478 -2.072908 1.0100132 0.6640717 1.697809 0. ] [ 0. 1.0762788 1.8153677 4.495584 1.0784266 0.73633647 3.2971513 0.15341948 1.6811467 0.25147566 -1.0357295 0. ] [ 0. -2.2658477 -0.3590345 -3.1509397 0.1610055 -2.1178207 2.340984 0.08933424 2.1079125 -0.20687315 1.0980921 0. ] [ 0. 0.30590677 -0.22932078 -1.9416237 1.2216893 2.0819004 0.4384187 0.72343296 0.58603287 -2.5274649 -2.1567857 0. ] [ 0. -0.28019166 -0.8933802 1.3271112 -1.6431473 0.14203379 0.9737708 -1.3834875 0.24476466 -0.7776511 -0.8801879 0. ] [ 0. 2.8634007 0.68411434 0.18656106 -1.4114873 0.2887656 0.5014993 -2.3281307 -0.88799715 1.9366727 0.7608087 0. ] [ 0. 0.23623993 0.3321263 -1.5488493 1.2494055 3.21713 0.49345994 -1.8935586 1.7412521 1.8844153 -0.8626553 0. ] [ 0. -0.09282709 -1.9084046 0.0407513 -0.22842215 0.17721651 -1.1888479 1.4254413 -1.5764117 -1.1335567 -0.2959205 0. ] [ 0. 1.0508446 -1.5047615 3.3154762 -2.5223653 -3.0558681 1.0152277 -1.8608537 -1.2921673 2.5876956 -0.44282788 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_579.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.8848 (2,1,.,.) = -1.2649 (3,1,.,.) = -0.8916 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[ 1.7166135 -0.4704111 0.1516595 0.0386143 0.41881987 -2.1545088 1.3380823 0.88561237 0.96036905 -0.75049436] [-1.7289492 1.3338271 1.7467048 -3.5642939 -1.4842062 0.08229959 -0.37547195 2.220898 -0.5655018 -1.6586343 ] [-2.58527 -1.2516853 -1.6167428 -0.6690707 0.71787137 -2.176322 0.22493142 0.86164755 0.2506222 1.8846806 ] [-3.2449162 2.2738636 -0.7793884 1.3892158 2.3746805 -1.848093 1.263536 0.61528313 -1.8917538 1.1775553 ] [ 3.6146898 0.11513159 -1.0288225 -2.499577 -0.6369911 -4.611747 -1.400862 -2.3915634 -1.8124322 0.9591503 ] [ 0.45592293 -2.030537 -1.3469226 -0.63937247 0.46103188 -1.0901623 -0.66216356 -1.9898435 1.9049959 -2.4579985 ] [-0.2419448 2.9365535 -1.0412893 0.62694824 -2.454165 -1.0630243 2.1689353 -1.6328828 -0.42844263 -0.0804149 ] [ 0.6601694 -2.1505551 -4.0799413 0.85707587 -0.48958802 -0.67197454 -2.7850106 0.46976638 -0.10813121 -0.81037056]]]]; ov_res: [[[[ 1.7166135 -0.4704111 0.1516595 0.0386143 0.41881987 -2.1545088 1.3380823 0.88561237 0.96036905 -0.75049436] [-1.7289492 1.3338271 1.7467048 -3.5642939 -1.4842062 0.08229959 -0.37547195 2.220898 -0.5655018 -1.6586343 ] [-2.58527 -1.2516853 -1.6167428 -0.6690707 0.71787137 -2.176322 0.22493142 0.86164755 0.2506222 1.8846806 ] [-3.2449162 2.2738636 -0.7793884 1.3892158 2.3746805 -1.848093 1.263536 0.61528313 -1.8917538 1.1775553 ] [ 3.6146898 0.11513159 -1.0288225 -2.499577 -0.6369911 -4.611747 -1.400862 -2.3915634 -1.8124322 0.9591503 ] [ 0.45592293 -2.030537 -1.3469226 -0.63937247 0.46103188 -1.0901623 -0.66216356 -1.9898435 1.9049959 -2.4579985 ] [-0.2419448 2.9365535 -1.0412893 0.62694824 -2.454165 -1.0630243 2.1689353 -1.6328828 -0.42844263 -0.0804149 ] [ 0.6601694 -2.1505551 -4.0799413 0.85707587 -0.48958802 -0.67197454 -2.7850106 0.46976638 -0.10813121 -0.81037056]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_581.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.1416 (2,1,.,.) = -0.1045 (3,1,.,.) = -0.5415 (1,2,.,.) = -0.9026 (2,2,.,.) = -1.3778 (3,2,.,.) = 1.9924 (1,3,.,.) = -0.5749 (2,3,.,.) = 1.4349 (3,3,.,.) = -1.7061 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 0.0000000e+00 -6.5925777e-02 -1.7888083e+00 1.3321379e-01 -3.5425809e-01 -1.0116730e+00 2.0455513e+00 -6.1996102e-01 2.1066967e-01 2.9211539e-01 8.0171543e-01 0.0000000e+00] [ 0.0000000e+00 -1.3353240e+00 -1.4360056e-02 7.4358541e-01 1.3226849e+00 -6.9327474e-01 -3.4239101e-01 -1.5271012e+00 -9.1339886e-01 9.7644657e-01 3.6583740e-01 0.0000000e+00] [ 0.0000000e+00 1.0998393e+00 1.6478004e+00 2.1553793e+00 -1.4136945e+00 -1.1952811e-01 -7.1995866e-01 8.5470188e-01 -9.0869552e-01 4.3012559e-01 -6.3876820e-01 0.0000000e+00] [ 0.0000000e+00 6.3549757e-01 -6.7931402e-01 1.5179549e+00 2.6214262e-02 -1.4042617e+00 -1.4302379e+00 4.5506075e-01 2.1286001e+00 4.3738467e-01 7.3119152e-01 0.0000000e+00] [ 0.0000000e+00 -4.5595211e-01 -1.4169137e+00 4.4442227e-01 -1.6227508e-02 2.8152573e-01 1.8478702e-01 4.7111389e-01 2.4277635e+00 -1.0847141e-01 -2.2628405e+00 0.0000000e+00] [ 0.0000000e+00 1.7959843e+00 -1.8275517e+00 6.4358670e-01 1.6146615e+00 7.0751923e-01 -5.6669015e-01 1.3703296e+00 1.8777386e+00 1.5446140e+00 -3.7375239e-01 0.0000000e+00] [ 0.0000000e+00 -3.6989242e-01 9.6613817e-02 3.4826806e-01 -2.6285738e-01 6.6885400e-01 1.0203044e-01 -9.2166984e-01 1.9437611e-01 4.7386849e-01 1.0534334e+00 0.0000000e+00] [ 0.0000000e+00 -1.6301938e+00 3.6768356e-01 -1.9073007e+00 3.2317364e-01 -1.9942132e+00 -5.1346111e-01 -4.5178279e-01 -1.4880207e+00 -4.9301684e-01 1.6146668e+00 0.0000000e+00] [ 0.0000000e+00 1.4718657e+00 -1.3631967e+00 -1.3297940e+00 5.5999237e-01 -2.0581986e-01 9.8770940e-01 6.4559197e-01 -7.0850879e-01 7.1260011e-01 1.1773521e+00 0.0000000e+00] [ 0.0000000e+00 -2.5550830e-01 -3.2240930e-01 -1.2861320e+00 1.2416624e+00 6.1388606e-01 9.7294569e-01 1.6408371e+00 -1.9839209e-02 -1.6132727e-01 6.6218370e-01 0.0000000e+00]] [[ 0.0000000e+00 -3.1549349e+00 3.3696489e+00 9.0312564e-01 -7.0628160e-01 3.3233249e-01 9.8443776e-02 -1.2519921e+00 -3.3706367e+00 -1.5718441e+00 -2.9733167e+00 0.0000000e+00] [ 0.0000000e+00 -2.6949449e+00 -2.6113701e-01 4.2517681e+00 -5.1068068e-01 -6.1370675e-02 -2.8969769e+00 -8.0171627e-01 -3.2645123e+00 2.1487331e+00 -1.1536816e+00 0.0000000e+00] [ 0.0000000e+00 4.3681784e+00 4.4024476e-01 1.1904007e+00 -1.5149739e+00 -1.4578028e+00 6.2526321e-01 8.5824496e-01 -8.3329457e-01 -2.4314415e-01 1.0113708e+00 0.0000000e+00] [ 0.0000000e+00 1.8005213e+00 -1.8221300e+00 1.3149979e+00 -6.2295426e-02 -1.2393138e+00 4.9274960e-01 -2.4944949e+00 1.1173635e+00 -7.4618191e-02 6.5468842e-01 0.0000000e+00] [ 0.0000000e+00 -7.0402598e-01 1.8279894e-01 7.9557031e-01 -1.7421191e+00 3.8849871e+00 1.6154300e+00 2.6798515e+00 4.3035131e+00 -2.9039528e+00 -2.1171551e+00 0.0000000e+00] [ 0.0000000e+00 1.3869541e+00 -9.1729993e-01 3.6610944e+00 -2.1349871e+00 3.2662457e-01 1.1322311e+00 1.0625042e+00 -3.7594931e+00 5.4381504e+00 1.9517466e-01 0.0000000e+00] [ 0.0000000e+00 2.6263449e+00 -1.5136145e-01 3.0197066e-01 6.3738473e-02 4.1451941e+00 -2.9090378e-01 2.8265431e+00 3.6984584e+00 -1.3524606e+00 2.9319148e+00 0.0000000e+00] [ 0.0000000e+00 -2.2614945e-01 -2.3920798e+00 1.6025950e+00 4.7098494e+00 -1.5080417e+00 -1.4852097e+00 2.9491947e+00 3.6496499e-01 -2.5926909e+00 4.3410888e+00 0.0000000e+00] [ 0.0000000e+00 -1.6301869e-01 1.8212243e+00 -1.1322238e+00 -6.9069570e-01 7.1836692e-01 -3.4004003e-01 1.0987104e+00 1.3549671e+00 2.7610445e-01 -1.7820840e+00 0.0000000e+00] [ 0.0000000e+00 1.6820037e+00 -2.2748513e+00 -4.7450666e+00 -3.5457871e+00 -4.1617494e+00 -1.5344305e+00 1.3383318e-01 -2.0243338e-01 9.5758998e-01 1.9436023e+00 0.0000000e+00]] [[ 0.0000000e+00 3.7015114e+00 -3.9536128e+00 -5.9740824e-01 1.5175198e+00 -3.6298160e-02 -1.5379409e+00 5.6315547e-01 4.1812477e+00 1.6992325e+00 3.7566459e+00 0.0000000e+00] [ 0.0000000e+00 4.5635328e+00 1.5614250e-01 -5.9713616e+00 -6.0874104e-01 -8.4603447e-01 3.2267437e+00 1.2510957e+00 3.2680929e+00 -3.6055517e+00 1.5639324e+00 0.0000000e+00] [ 0.0000000e+00 -6.0708776e+00 1.7868054e-01 -1.7801979e+00 2.4995174e+00 2.3658385e+00 -1.0732701e+00 -1.4393516e+00 5.3399098e-01 1.3304598e+00 -1.1820984e+00 0.0000000e+00] [ 0.0000000e+00 -2.8673196e+00 3.0879705e+00 -2.6559756e+00 -9.5258570e-01 2.5750773e+00 3.1197780e-01 3.5569575e+00 -2.0878592e+00 -3.8068241e-01 -1.1858232e+00 0.0000000e+00] [ 0.0000000e+00 8.6899084e-01 1.0966135e+00 6.0123116e-01 1.2659861e+00 -6.4015708e+00 -1.8364096e+00 -3.5157199e+00 -6.4099841e+00 4.9244699e+00 3.6493027e+00 0.0000000e+00] [ 0.0000000e+00 -2.8394732e+00 2.4342873e+00 -5.0907755e+00 2.9083033e+00 -2.0668635e+00 -8.9818263e-01 -1.4327105e+00 5.3113713e+00 -7.0552430e+00 3.8010082e-01 0.0000000e+00] [ 0.0000000e+00 -2.7049115e+00 -5.0074798e-01 -1.7852056e+00 1.1808156e+00 -4.5481143e+00 7.0381403e-01 -3.8910899e+00 -4.9168539e+00 2.3774998e+00 -3.0243800e+00 0.0000000e+00] [ 0.0000000e+00 1.4590031e+00 3.4652655e+00 -1.9891611e+00 -4.8380179e+00 2.4611187e+00 1.7536454e+00 -4.5781784e+00 -1.0952104e+00 4.3893194e+00 -6.2735910e+00 0.0000000e+00] [ 0.0000000e+00 -4.5329991e-01 -8.7491316e-01 -8.3400823e-02 1.4218599e+00 -3.9448422e-01 1.4899997e-04 -3.3524492e+00 -1.4741987e+00 -1.4696316e+00 2.2799516e+00 0.0000000e+00] [ 0.0000000e+00 -2.0548313e+00 2.8441162e+00 6.4556413e+00 5.1902566e+00 4.8853054e+00 1.8091494e+00 -7.2419596e-01 6.0397780e-01 -1.0030895e+00 -1.9131262e+00 0.0000000e+00]]]]; ov_res: [[[[ 0.0000000e+00 -6.5925777e-02 -1.7888083e+00 1.3321379e-01 -3.5425809e-01 -1.0116730e+00 2.0455513e+00 -6.1996102e-01 2.1066967e-01 2.9211539e-01 8.0171543e-01 0.0000000e+00] [ 0.0000000e+00 -1.3353240e+00 -1.4360056e-02 7.4358541e-01 1.3226849e+00 -6.9327474e-01 -3.4239101e-01 -1.5271012e+00 -9.1339886e-01 9.7644657e-01 3.6583740e-01 0.0000000e+00] [ 0.0000000e+00 1.0998393e+00 1.6478004e+00 2.1553793e+00 -1.4136945e+00 -1.1952811e-01 -7.1995866e-01 8.5470188e-01 -9.0869552e-01 4.3012559e-01 -6.3876820e-01 0.0000000e+00] [ 0.0000000e+00 6.3549757e-01 -6.7931402e-01 1.5179549e+00 2.6214262e-02 -1.4042617e+00 -1.4302379e+00 4.5506075e-01 2.1286001e+00 4.3738467e-01 7.3119152e-01 0.0000000e+00] [ 0.0000000e+00 -4.5595211e-01 -1.4169137e+00 4.4442227e-01 -1.6227508e-02 2.8152573e-01 1.8478702e-01 4.7111389e-01 2.4277635e+00 -1.0847141e-01 -2.2628405e+00 0.0000000e+00] [ 0.0000000e+00 1.7959843e+00 -1.8275517e+00 6.4358670e-01 1.6146615e+00 7.0751923e-01 -5.6669015e-01 1.3703296e+00 1.8777386e+00 1.5446140e+00 -3.7375239e-01 0.0000000e+00] [ 0.0000000e+00 -3.6989242e-01 9.6613817e-02 3.4826806e-01 -2.6285738e-01 6.6885400e-01 1.0203044e-01 -9.2166984e-01 1.9437611e-01 4.7386849e-01 1.0534334e+00 0.0000000e+00] [ 0.0000000e+00 -1.6301938e+00 3.6768356e-01 -1.9073007e+00 3.2317364e-01 -1.9942132e+00 -5.1346111e-01 -4.5178279e-01 -1.4880207e+00 -4.9301684e-01 1.6146668e+00 0.0000000e+00] [ 0.0000000e+00 1.4718657e+00 -1.3631967e+00 -1.3297940e+00 5.5999237e-01 -2.0581986e-01 9.8770940e-01 6.4559197e-01 -7.0850879e-01 7.1260011e-01 1.1773521e+00 0.0000000e+00] [ 0.0000000e+00 -2.5550830e-01 -3.2240930e-01 -1.2861320e+00 1.2416624e+00 6.1388606e-01 9.7294569e-01 1.6408371e+00 -1.9839209e-02 -1.6132727e-01 6.6218370e-01 0.0000000e+00]] [[ 0.0000000e+00 -3.1549349e+00 3.3696489e+00 9.0312564e-01 -7.0628160e-01 3.3233249e-01 9.8443776e-02 -1.2519921e+00 -3.3706367e+00 -1.5718441e+00 -2.9733167e+00 0.0000000e+00] [ 0.0000000e+00 -2.6949449e+00 -2.6113701e-01 4.2517681e+00 -5.1068068e-01 -6.1370675e-02 -2.8969769e+00 -8.0171627e-01 -3.2645123e+00 2.1487331e+00 -1.1536816e+00 0.0000000e+00] [ 0.0000000e+00 4.3681784e+00 4.4024476e-01 1.1904007e+00 -1.5149739e+00 -1.4578028e+00 6.2526321e-01 8.5824496e-01 -8.3329457e-01 -2.4314415e-01 1.0113708e+00 0.0000000e+00] [ 0.0000000e+00 1.8005213e+00 -1.8221300e+00 1.3149979e+00 -6.2295426e-02 -1.2393138e+00 4.9274960e-01 -2.4944949e+00 1.1173635e+00 -7.4618191e-02 6.5468842e-01 0.0000000e+00] [ 0.0000000e+00 -7.0402598e-01 1.8279894e-01 7.9557031e-01 -1.7421191e+00 3.8849871e+00 1.6154300e+00 2.6798515e+00 4.3035131e+00 -2.9039528e+00 -2.1171551e+00 0.0000000e+00] [ 0.0000000e+00 1.3869541e+00 -9.1729993e-01 3.6610944e+00 -2.1349871e+00 3.2662457e-01 1.1322311e+00 1.0625042e+00 -3.7594931e+00 5.4381504e+00 1.9517466e-01 0.0000000e+00] [ 0.0000000e+00 2.6263449e+00 -1.5136145e-01 3.0197066e-01 6.3738473e-02 4.1451941e+00 -2.9090378e-01 2.8265431e+00 3.6984584e+00 -1.3524606e+00 2.9319148e+00 0.0000000e+00] [ 0.0000000e+00 -2.2614945e-01 -2.3920798e+00 1.6025950e+00 4.7098494e+00 -1.5080417e+00 -1.4852097e+00 2.9491947e+00 3.6496499e-01 -2.5926909e+00 4.3410888e+00 0.0000000e+00] [ 0.0000000e+00 -1.6301869e-01 1.8212243e+00 -1.1322238e+00 -6.9069570e-01 7.1836692e-01 -3.4004003e-01 1.0987104e+00 1.3549671e+00 2.7610445e-01 -1.7820840e+00 0.0000000e+00] [ 0.0000000e+00 1.6820037e+00 -2.2748513e+00 -4.7450666e+00 -3.5457871e+00 -4.1617494e+00 -1.5344305e+00 1.3383318e-01 -2.0243338e-01 9.5758998e-01 1.9436023e+00 0.0000000e+00]] [[ 0.0000000e+00 3.7015114e+00 -3.9536128e+00 -5.9740824e-01 1.5175198e+00 -3.6298160e-02 -1.5379409e+00 5.6315547e-01 4.1812477e+00 1.6992325e+00 3.7566459e+00 0.0000000e+00] [ 0.0000000e+00 4.5635328e+00 1.5614250e-01 -5.9713616e+00 -6.0874104e-01 -8.4603447e-01 3.2267437e+00 1.2510957e+00 3.2680929e+00 -3.6055517e+00 1.5639324e+00 0.0000000e+00] [ 0.0000000e+00 -6.0708776e+00 1.7868054e-01 -1.7801979e+00 2.4995174e+00 2.3658385e+00 -1.0732701e+00 -1.4393516e+00 5.3399098e-01 1.3304598e+00 -1.1820984e+00 0.0000000e+00] [ 0.0000000e+00 -2.8673196e+00 3.0879705e+00 -2.6559756e+00 -9.5258570e-01 2.5750773e+00 3.1197780e-01 3.5569575e+00 -2.0878592e+00 -3.8068241e-01 -1.1858232e+00 0.0000000e+00] [ 0.0000000e+00 8.6899084e-01 1.0966135e+00 6.0123116e-01 1.2659861e+00 -6.4015708e+00 -1.8364096e+00 -3.5157199e+00 -6.4099841e+00 4.9244699e+00 3.6493027e+00 0.0000000e+00] [ 0.0000000e+00 -2.8394732e+00 2.4342873e+00 -5.0907755e+00 2.9083033e+00 -2.0668635e+00 -8.9818263e-01 -1.4327105e+00 5.3113713e+00 -7.0552430e+00 3.8010082e-01 0.0000000e+00] [ 0.0000000e+00 -2.7049115e+00 -5.0074798e-01 -1.7852056e+00 1.1808156e+00 -4.5481143e+00 7.0381403e-01 -3.8910899e+00 -4.9168539e+00 2.3774998e+00 -3.0243800e+00 0.0000000e+00] [ 0.0000000e+00 1.4590031e+00 3.4652655e+00 -1.9891611e+00 -4.8380179e+00 2.4611187e+00 1.7536454e+00 -4.5781784e+00 -1.0952104e+00 4.3893194e+00 -6.2735910e+00 0.0000000e+00] [ 0.0000000e+00 -4.5329991e-01 -8.7491316e-01 -8.3400823e-02 1.4218599e+00 -3.9448422e-01 1.4899997e-04 -3.3524492e+00 -1.4741987e+00 -1.4696316e+00 2.2799516e+00 0.0000000e+00] [ 0.0000000e+00 -2.0548313e+00 2.8441162e+00 6.4556413e+00 5.1902566e+00 4.8853054e+00 1.8091494e+00 -7.2419596e-01 6.0397780e-01 -1.0030895e+00 -1.9131262e+00 0.0000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_583.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 1.0112 (2,1,.,.) = -0.4589 (3,1,.,.) = 0.1848 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-0.11851905 -0.99335176 0.2686371 -0.46741143 -0.2854359 -1.5891001 0.07439727 0.08584689 -0.01086106 -0.5576551 ] [ 1.2131599 0.7258838 -0.88935703 0.13625172 -1.3786077 0.1981497 0.20182103 -1.4013737 0.61725694 -0.13649637] [ 0.90060747 -0.87203324 -0.2535039 -0.08174574 1.9500625 0.9211949 1.0802301 0.71782047 -0.5338962 -0.6677028 ] [ 0.76125294 0.01181717 -1.7754773 -0.9993052 0.33886907 1.7452704 -0.22063188 0.37365398 0.2236271 -1.2240993 ] [ 1.8780006 -0.77367306 1.9473325 0.37634876 -1.3575019 -1.5344176 0.9809561 -0.3972498 0.60411656 -0.0858678 ] [ 0.15686841 0.40390813 0.08059171 -0.11569651 2.0666094 -0.31496033 -0.78959316 -0.15280019 2.97293 -0.8771217 ] [ 1.2535365 -0.19093901 -1.1497504 0.69471604 0.21054298 -0.56563 -0.8899396 -1.2977567 0.03747214 -0.46155268] [-2.1766212 -1.2382144 -0.07402954 -0.12849689 -0.6297272 0.65504134 -0.76390725 -0.1028968 -1.9287101 -0.80225295]] [[ 0.22602244 0.7025119 0.2653056 0.9374149 -0.05909162 0.03660535 -0.24491383 -0.22544318 0.37637082 -0.11597084] [ 0.49131325 -0.09406605 -0.7459731 -0.70133257 0.02622983 0.282222 0.29791808 0.9626314 0.62626946 -0.23535524] [ 0.12471362 -0.14790578 -0.04853361 -0.2572476 -0.20171879 -0.05294748 -0.04629932 -0.19835338 -0.13823165 0.04155339] [ 0.05636736 -0.11062281 -0.03078022 0.38900426 0.17894782 -1.0370848 0.66017526 -0.9142526 -0.21939483 -0.01267172] [-0.6552853 0.44779432 -0.7630488 -0.22044982 0.19063008 -0.18755639 0.9360728 -0.52191377 -0.47650468 0.07301957] [-0.22282538 -0.14287813 0.3699577 0.1611441 0.5368531 -0.3346675 -0.38900444 1.1763057 -0.39427912 0.21113117] [-0.36394587 -0.07911488 0.5363788 -0.18732543 0.0433741 -0.60890883 0.47475246 -0.16878667 0.22610362 -0.31242096] [-0.38067594 0.1020024 0.27922058 0.12058503 0.19650856 -0.64529306 0.05445877 0.32773623 -0.3880682 -0.5696084 ]] [[-0.02104096 -0.36192942 0.07886054 0.13448144 0.16907309 0.22006065 0.23700914 0.01395025 -0.00539769 0.02340801] [ 0.3300829 -0.18370828 -0.03549359 0.35338834 -0.40860695 -0.24799082 -0.08162463 -0.09989375 0.3980283 -0.00697307] [-0.1842028 0.03064473 -0.19432227 0.18530643 0.2647875 0.06941649 0.33605433 0.15228248 0.03416531 0.283446 ] [-0.07047891 0.06969538 0.02759551 -0.02029022 -0.04073764 0.23355423 -0.00778638 0.04322904 0.10837048 -0.19629656] [ 0.2457183 -0.09319951 0.4855104 0.22772804 0.03459517 0.6300376 -0.2521838 -0.41539305 0.21265161 -0.15361588] [ 0.32066095 0.10163655 0.053043 -0.19794483 0.23262359 0.19354318 0.03261797 -0.01981547 0.03758873 -0.02285081] [-0.02135571 0.06221221 0.22022831 0.44672 0.16258736 -0.27276328 0.46311504 0.3778696 -0.28345877 0.07312953] [ 0.11459567 -0.22742419 0.17995009 0.06169733 -0.02530372 0.41397706 -0.08608106 0.2879578 0.16521133 0.18823133]]]]; ov_res: [[[[-0.11851905 -0.99335176 0.2686371 -0.46741143 -0.2854359 -1.5891001 0.07439727 0.08584689 -0.01086106 -0.5576551 ] [ 1.2131599 0.7258838 -0.88935703 0.13625172 -1.3786077 0.1981497 0.20182103 -1.4013737 0.61725694 -0.13649637] [ 0.90060747 -0.87203324 -0.2535039 -0.08174574 1.9500625 0.9211949 1.0802301 0.71782047 -0.5338962 -0.6677028 ] [ 0.76125294 0.01181717 -1.7754773 -0.9993052 0.33886907 1.7452704 -0.22063188 0.37365398 0.2236271 -1.2240993 ] [ 1.8780006 -0.77367306 1.9473325 0.37634876 -1.3575019 -1.5344176 0.9809561 -0.3972498 0.60411656 -0.0858678 ] [ 0.15686841 0.40390813 0.08059171 -0.11569651 2.0666094 -0.31496033 -0.78959316 -0.15280019 2.97293 -0.8771217 ] [ 1.2535365 -0.19093901 -1.1497504 0.69471604 0.21054298 -0.56563 -0.8899396 -1.2977567 0.03747214 -0.46155268] [-2.1766212 -1.2382144 -0.07402954 -0.12849689 -0.6297272 0.65504134 -0.76390725 -0.1028968 -1.9287101 -0.80225295]] [[ 0.22602244 0.7025119 0.2653056 0.9374149 -0.05909162 0.03660535 -0.24491383 -0.22544318 0.37637082 -0.11597084] [ 0.49131325 -0.09406605 -0.7459731 -0.70133257 0.02622983 0.282222 0.29791808 0.9626314 0.62626946 -0.23535524] [ 0.12471362 -0.14790578 -0.04853361 -0.2572476 -0.20171879 -0.05294748 -0.04629932 -0.19835338 -0.13823165 0.04155339] [ 0.05636736 -0.11062281 -0.03078022 0.38900426 0.17894782 -1.0370848 0.66017526 -0.9142526 -0.21939483 -0.01267172] [-0.6552853 0.44779432 -0.7630488 -0.22044982 0.19063008 -0.18755639 0.9360728 -0.52191377 -0.47650468 0.07301957] [-0.22282538 -0.14287813 0.3699577 0.1611441 0.5368531 -0.3346675 -0.38900444 1.1763057 -0.39427912 0.21113117] [-0.36394587 -0.07911488 0.5363788 -0.18732543 0.0433741 -0.60890883 0.47475246 -0.16878667 0.22610362 -0.31242096] [-0.38067594 0.1020024 0.27922058 0.12058503 0.19650856 -0.64529306 0.05445877 0.32773623 -0.3880682 -0.5696084 ]] [[-0.02104096 -0.36192942 0.07886054 0.13448144 0.16907309 0.22006065 0.23700914 0.01395025 -0.00539769 0.02340801] [ 0.3300829 -0.18370828 -0.03549359 0.35338834 -0.40860695 -0.24799082 -0.08162463 -0.09989375 0.3980283 -0.00697307] [-0.1842028 0.03064473 -0.19432227 0.18530643 0.2647875 0.06941649 0.33605433 0.15228248 0.03416531 0.283446 ] [-0.07047891 0.06969538 0.02759551 -0.02029022 -0.04073764 0.23355423 -0.00778638 0.04322904 0.10837048 -0.19629656] [ 0.2457183 -0.09319951 0.4855104 0.22772804 0.03459517 0.6300376 -0.2521838 -0.41539305 0.21265161 -0.15361588] [ 0.32066095 0.10163655 0.053043 -0.19794483 0.23262359 0.19354318 0.03261797 -0.01981547 0.03758873 -0.02285081] [-0.02135571 0.06221221 0.22022831 0.44672 0.16258736 -0.27276328 0.46311504 0.3778696 -0.28345877 0.07312953] [ 0.11459567 -0.22742419 0.17995009 0.06169733 -0.02530372 0.41397706 -0.08608106 0.2879578 0.16521133 0.18823133]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_585.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.1305 (2,1,.,.) = 1.0491 (3,1,.,.) = 0.1870 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 0.00000000e+00 5.87631650e-02 2.24110752e-01 -1.60305753e-01 -1.79101173e-02 1.35557383e-01 8.91903341e-02 1.03252143e-01 -8.04056823e-02 -3.68212909e-02 -2.11248230e-02 0.00000000e+00] [ 0.00000000e+00 1.04450628e-01 1.76953420e-03 -7.85903931e-02 -1.00097559e-01 -8.06533918e-02 1.45804763e-01 -1.57377630e-01 8.78040344e-02 -1.30083546e-01 -2.07887311e-03 0.00000000e+00] [ 0.00000000e+00 -3.45465720e-01 -1.16701655e-01 -7.30717480e-02 -8.60265791e-02 2.19190463e-01 -8.03982839e-02 1.42987326e-01 -1.69982225e-01 -2.39334613e-01 -1.51289683e-02 0.00000000e+00] [ 0.00000000e+00 -7.94432834e-02 -2.99870130e-02 -5.64220883e-02 1.57824770e-01 6.81312382e-02 -3.00690550e-02 -6.82929829e-02 -4.96759191e-02 -1.27182573e-01 4.72665913e-02 0.00000000e+00] [ 0.00000000e+00 9.25630108e-02 -3.00657004e-01 -1.51711866e-01 -5.35095446e-02 1.18263908e-01 -6.71704859e-02 1.39597103e-01 2.57949591e-01 -1.05721876e-01 2.71373503e-02 0.00000000e+00] [ 0.00000000e+00 -7.35490546e-02 -4.92613390e-03 -1.21356361e-01 6.63867071e-02 -1.29107505e-01 -1.60821438e-01 5.72425611e-02 6.51222393e-02 4.55859303e-02 -3.76242884e-02 0.00000000e+00] [ 0.00000000e+00 -7.25138783e-02 1.49579281e-02 2.46341992e-03 -1.56320352e-02 7.67029300e-02 -6.27909377e-02 -1.43311352e-01 -3.82587016e-02 -2.58052528e-01 1.91238657e-01 0.00000000e+00] [ 0.00000000e+00 -1.52401164e-01 -1.53623030e-01 1.97727397e-01 6.82893544e-02 1.32083878e-01 6.11090362e-02 3.02157819e-01 -1.45269215e-01 -1.69951200e-01 2.21881568e-01 0.00000000e+00] [ 0.00000000e+00 -2.63438299e-02 1.37541950e-01 -1.19717441e-01 9.45924148e-02 1.65720627e-01 -1.81300312e-01 -1.84448600e-01 -1.57947510e-01 -9.16537866e-02 1.21793887e-02 0.00000000e+00] [ 0.00000000e+00 -5.89816459e-02 1.66266412e-01 1.17651626e-01 5.80531247e-02 -6.35710508e-02 -1.98840305e-01 4.07609046e-02 1.87278330e-01 -9.88384038e-02 7.68619031e-02 0.00000000e+00]] [[ 0.00000000e+00 -1.78233659e+00 3.76762062e-01 9.06847358e-01 4.15987074e-01 1.44691777e+00 -4.14900690e-01 1.77997780e+00 -9.87698972e-01 -1.20183790e+00 1.87009692e+00 0.00000000e+00] [ 0.00000000e+00 2.69238400e+00 -2.58848608e-01 2.74057150e-01 -7.93629408e-01 -7.26073205e-01 1.71821928e+00 -2.61550963e-01 -1.27609396e+00 -7.84234881e-01 -9.29369509e-01 0.00000000e+00] [ 0.00000000e+00 -2.90905070e+00 -1.61040246e-01 -1.36576295e+00 6.23561963e-02 -8.83298755e-01 5.40934317e-02 1.67761773e-01 -6.81683540e-01 -5.75916886e-01 -1.48672223e+00 0.00000000e+00] [ 0.00000000e+00 -8.30838382e-01 6.38813198e-01 4.32858884e-01 -9.75323081e-01 -3.15405071e-01 -1.07986414e+00 -6.95916042e-02 -7.28533566e-01 4.59287316e-01 2.60038877e+00 0.00000000e+00] [ 0.00000000e+00 -2.78296041e+00 -7.32797682e-01 -6.70631111e-01 1.02155232e+00 1.77952993e+00 -1.31288159e+00 3.42908233e-01 -1.79500115e+00 -8.47707033e-01 -6.66689217e-01 0.00000000e+00] [ 0.00000000e+00 -5.98944068e-01 -5.22988558e-01 -7.16452003e-02 -3.00179005e-01 -1.82026133e-01 -1.09576392e+00 1.52166069e-01 5.31186163e-01 -2.13438797e+00 3.99927795e-02 0.00000000e+00] [ 0.00000000e+00 -8.99385691e-01 1.31042218e+00 1.11937416e+00 1.10163975e+00 -1.73895574e+00 3.82951437e-03 1.41578162e+00 7.68551230e-01 2.45806798e-01 8.13192725e-01 0.00000000e+00] [ 0.00000000e+00 5.25077045e-01 -5.59467375e-01 2.88284928e-01 2.03301340e-01 3.50664735e-01 -2.28457734e-01 -2.03823161e+00 -2.31066480e-01 1.77263176e+00 -1.42396959e-02 0.00000000e+00] [ 0.00000000e+00 1.02085459e+00 1.30882704e+00 -2.77875096e-01 -1.60957754e-01 1.06227446e+00 1.12874591e+00 -1.67418623e+00 -7.05485940e-01 -3.70719194e-01 1.89656663e+00 0.00000000e+00] [ 0.00000000e+00 1.18705666e+00 1.42258871e+00 1.39813408e-01 9.56023872e-01 4.25434113e-01 -1.18317568e+00 -1.16348624e+00 2.45619997e-01 8.79104197e-01 8.50987494e-01 0.00000000e+00]] [[ 0.00000000e+00 3.67744446e-01 1.80931166e-01 -9.63172838e-02 3.15072745e-01 1.14529625e-01 1.52901202e-01 -3.89822364e-01 -4.91498530e-01 -1.04020804e-01 3.82850051e-01 0.00000000e+00] [ 0.00000000e+00 -3.95122677e-01 -2.02919886e-01 -2.12311074e-01 -3.20444435e-01 -2.54228771e-01 1.47128880e-01 6.75651655e-02 5.32354321e-03 -2.94648819e-02 -2.99924742e-02 0.00000000e+00] [ 0.00000000e+00 9.05750394e-02 -6.05555959e-02 1.25722185e-01 -5.37367873e-02 2.77384669e-01 1.39648348e-01 2.21378833e-01 8.68183225e-02 9.34472028e-03 3.04597706e-01 0.00000000e+00] [ 0.00000000e+00 2.38032624e-01 2.98239797e-01 -5.81373274e-02 -3.65661532e-02 9.77623276e-03 8.40955228e-02 -1.90829173e-01 -9.42212865e-02 -1.44278318e-01 -3.23088863e-03 0.00000000e+00] [ 0.00000000e+00 1.06401473e-01 1.60525069e-01 6.91984370e-02 -1.18081652e-01 -7.40493089e-02 -1.70160066e-02 1.79964483e-01 1.84099689e-01 -1.97001565e-02 5.04719198e-01 0.00000000e+00] [ 0.00000000e+00 1.81773722e-01 2.19064578e-01 4.05457199e-01 -7.91894868e-02 2.08159775e-01 -2.90418059e-01 5.40760458e-01 -3.54971848e-02 1.69606820e-01 3.17967743e-01 0.00000000e+00] [ 0.00000000e+00 -1.43782750e-01 4.87475172e-02 -1.56545117e-01 1.37761205e-01 -2.17978612e-01 -6.06301799e-02 1.46697789e-01 3.44836354e-01 1.95620716e-01 1.44757852e-01 0.00000000e+00] [ 0.00000000e+00 -1.78922907e-01 -1.23377427e-01 -2.47139931e-01 1.16440460e-01 1.30418405e-01 1.90153513e-02 -1.83349047e-02 -3.22547346e-01 1.02368154e-01 -1.75818596e-02 0.00000000e+00] [ 0.00000000e+00 -5.66167794e-02 8.74604434e-02 3.39278057e-02 2.59286556e-02 -2.10156694e-01 -2.37228528e-01 -1.30339369e-01 -3.72175723e-02 5.79332262e-02 -9.89593863e-02 0.00000000e+00] [ 0.00000000e+00 -2.31856704e-01 6.40094727e-02 2.37439782e-03 -1.94305982e-02 2.23078087e-01 1.99440103e-02 3.19460064e-01 7.48588219e-02 -4.12391685e-02 1.13360107e-01 0.00000000e+00]]]]; ov_res: [[[[ 0.00000000e+00 5.87631650e-02 2.24110752e-01 -1.60305753e-01 -1.79101173e-02 1.35557383e-01 8.91903341e-02 1.03252143e-01 -8.04056823e-02 -3.68212909e-02 -2.11248230e-02 0.00000000e+00] [ 0.00000000e+00 1.04450628e-01 1.76953420e-03 -7.85903931e-02 -1.00097559e-01 -8.06533918e-02 1.45804763e-01 -1.57377630e-01 8.78040344e-02 -1.30083546e-01 -2.07887311e-03 0.00000000e+00] [ 0.00000000e+00 -3.45465720e-01 -1.16701655e-01 -7.30717480e-02 -8.60265791e-02 2.19190463e-01 -8.03982839e-02 1.42987326e-01 -1.69982225e-01 -2.39334613e-01 -1.51289683e-02 0.00000000e+00] [ 0.00000000e+00 -7.94432834e-02 -2.99870130e-02 -5.64220883e-02 1.57824770e-01 6.81312382e-02 -3.00690550e-02 -6.82929829e-02 -4.96759191e-02 -1.27182573e-01 4.72665913e-02 0.00000000e+00] [ 0.00000000e+00 9.25630108e-02 -3.00657004e-01 -1.51711866e-01 -5.35095446e-02 1.18263908e-01 -6.71704859e-02 1.39597103e-01 2.57949591e-01 -1.05721876e-01 2.71373503e-02 0.00000000e+00] [ 0.00000000e+00 -7.35490546e-02 -4.92613390e-03 -1.21356361e-01 6.63867071e-02 -1.29107505e-01 -1.60821438e-01 5.72425611e-02 6.51222393e-02 4.55859303e-02 -3.76242884e-02 0.00000000e+00] [ 0.00000000e+00 -7.25138783e-02 1.49579281e-02 2.46341992e-03 -1.56320352e-02 7.67029300e-02 -6.27909377e-02 -1.43311352e-01 -3.82587016e-02 -2.58052528e-01 1.91238657e-01 0.00000000e+00] [ 0.00000000e+00 -1.52401164e-01 -1.53623030e-01 1.97727397e-01 6.82893544e-02 1.32083878e-01 6.11090362e-02 3.02157819e-01 -1.45269215e-01 -1.69951200e-01 2.21881568e-01 0.00000000e+00] [ 0.00000000e+00 -2.63438299e-02 1.37541950e-01 -1.19717441e-01 9.45924148e-02 1.65720627e-01 -1.81300312e-01 -1.84448600e-01 -1.57947510e-01 -9.16537866e-02 1.21793887e-02 0.00000000e+00] [ 0.00000000e+00 -5.89816459e-02 1.66266412e-01 1.17651626e-01 5.80531247e-02 -6.35710508e-02 -1.98840305e-01 4.07609046e-02 1.87278330e-01 -9.88384038e-02 7.68619031e-02 0.00000000e+00]] [[ 0.00000000e+00 -1.78233659e+00 3.76762062e-01 9.06847358e-01 4.15987074e-01 1.44691777e+00 -4.14900690e-01 1.77997780e+00 -9.87698972e-01 -1.20183790e+00 1.87009692e+00 0.00000000e+00] [ 0.00000000e+00 2.69238400e+00 -2.58848608e-01 2.74057150e-01 -7.93629408e-01 -7.26073205e-01 1.71821928e+00 -2.61550963e-01 -1.27609396e+00 -7.84234881e-01 -9.29369509e-01 0.00000000e+00] [ 0.00000000e+00 -2.90905070e+00 -1.61040246e-01 -1.36576295e+00 6.23561963e-02 -8.83298755e-01 5.40934317e-02 1.67761773e-01 -6.81683540e-01 -5.75916886e-01 -1.48672223e+00 0.00000000e+00] [ 0.00000000e+00 -8.30838382e-01 6.38813198e-01 4.32858884e-01 -9.75323081e-01 -3.15405071e-01 -1.07986414e+00 -6.95916042e-02 -7.28533566e-01 4.59287316e-01 2.60038877e+00 0.00000000e+00] [ 0.00000000e+00 -2.78296041e+00 -7.32797682e-01 -6.70631111e-01 1.02155232e+00 1.77952993e+00 -1.31288159e+00 3.42908233e-01 -1.79500115e+00 -8.47707033e-01 -6.66689217e-01 0.00000000e+00] [ 0.00000000e+00 -5.98944068e-01 -5.22988558e-01 -7.16452003e-02 -3.00179005e-01 -1.82026133e-01 -1.09576392e+00 1.52166069e-01 5.31186163e-01 -2.13438797e+00 3.99927795e-02 0.00000000e+00] [ 0.00000000e+00 -8.99385691e-01 1.31042218e+00 1.11937416e+00 1.10163975e+00 -1.73895574e+00 3.82951437e-03 1.41578162e+00 7.68551230e-01 2.45806798e-01 8.13192725e-01 0.00000000e+00] [ 0.00000000e+00 5.25077045e-01 -5.59467375e-01 2.88284928e-01 2.03301340e-01 3.50664735e-01 -2.28457734e-01 -2.03823161e+00 -2.31066480e-01 1.77263176e+00 -1.42396959e-02 0.00000000e+00] [ 0.00000000e+00 1.02085459e+00 1.30882704e+00 -2.77875096e-01 -1.60957754e-01 1.06227446e+00 1.12874591e+00 -1.67418623e+00 -7.05485940e-01 -3.70719194e-01 1.89656663e+00 0.00000000e+00] [ 0.00000000e+00 1.18705666e+00 1.42258871e+00 1.39813408e-01 9.56023872e-01 4.25434113e-01 -1.18317568e+00 -1.16348624e+00 2.45619997e-01 8.79104197e-01 8.50987494e-01 0.00000000e+00]] [[ 0.00000000e+00 3.67744446e-01 1.80931166e-01 -9.63172838e-02 3.15072745e-01 1.14529625e-01 1.52901202e-01 -3.89822364e-01 -4.91498530e-01 -1.04020804e-01 3.82850051e-01 0.00000000e+00] [ 0.00000000e+00 -3.95122677e-01 -2.02919886e-01 -2.12311074e-01 -3.20444435e-01 -2.54228771e-01 1.47128880e-01 6.75651655e-02 5.32354321e-03 -2.94648819e-02 -2.99924742e-02 0.00000000e+00] [ 0.00000000e+00 9.05750394e-02 -6.05555959e-02 1.25722185e-01 -5.37367873e-02 2.77384669e-01 1.39648348e-01 2.21378833e-01 8.68183225e-02 9.34472028e-03 3.04597706e-01 0.00000000e+00] [ 0.00000000e+00 2.38032624e-01 2.98239797e-01 -5.81373274e-02 -3.65661532e-02 9.77623276e-03 8.40955228e-02 -1.90829173e-01 -9.42212865e-02 -1.44278318e-01 -3.23088863e-03 0.00000000e+00] [ 0.00000000e+00 1.06401473e-01 1.60525069e-01 6.91984370e-02 -1.18081652e-01 -7.40493089e-02 -1.70160066e-02 1.79964483e-01 1.84099689e-01 -1.97001565e-02 5.04719198e-01 0.00000000e+00] [ 0.00000000e+00 1.81773722e-01 2.19064578e-01 4.05457199e-01 -7.91894868e-02 2.08159775e-01 -2.90418059e-01 5.40760458e-01 -3.54971848e-02 1.69606820e-01 3.17967743e-01 0.00000000e+00] [ 0.00000000e+00 -1.43782750e-01 4.87475172e-02 -1.56545117e-01 1.37761205e-01 -2.17978612e-01 -6.06301799e-02 1.46697789e-01 3.44836354e-01 1.95620716e-01 1.44757852e-01 0.00000000e+00] [ 0.00000000e+00 -1.78922907e-01 -1.23377427e-01 -2.47139931e-01 1.16440460e-01 1.30418405e-01 1.90153513e-02 -1.83349047e-02 -3.22547346e-01 1.02368154e-01 -1.75818596e-02 0.00000000e+00] [ 0.00000000e+00 -5.66167794e-02 8.74604434e-02 3.39278057e-02 2.59286556e-02 -2.10156694e-01 -2.37228528e-01 -1.30339369e-01 -3.72175723e-02 5.79332262e-02 -9.89593863e-02 0.00000000e+00] [ 0.00000000e+00 -2.31856704e-01 6.40094727e-02 2.37439782e-03 -1.94305982e-02 2.23078087e-01 1.99440103e-02 3.19460064e-01 7.48588219e-02 -4.12391685e-02 1.13360107e-01 0.00000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_587.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.7931 (2,1,.,.) = -1.5759 (3,1,.,.) = -1.9230 [ CPUFloatType{3,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[ 0.66149455 0.39042833 2.2737565 -1.477863 2.4953 1.2697164 -0.76383 -1.7308519 -1.7024528 -0.6145136 ] [ 0.7956705 -1.3822206 0.08299827 2.6051898 -1.7822983 1.1155009 2.0833383 -2.1803653 -1.1627299 1.1633615 ] [ 1.3082103 -1.1506859 2.1441414 0.58903503 0.72110134 0.819648 0.21147835 1.0941284 1.6388042 4.2091703 ] [ 6.170058 -0.46171287 -2.4686477 -1.7652624 0.4288662 -2.747968 -2.1265717 0.9651681 -0.3119354 -2.3155184 ] [ 2.4801793 2.1767502 -1.9298368 -2.1944542 -0.43193677 -1.6754855 -1.2539666 -2.8353446 -1.1829277 2.1195753 ] [-0.06879367 -0.22679065 1.0653133 -1.1540331 -0.48229063 -0.14589143 -0.42203948 -0.537626 -3.9801745 4.0953755 ] [-0.18084708 -1.1253865 3.1802855 0.46578377 4.069193 -0.9947571 -5.268199 4.173076 1.7000517 2.4153173 ] [ 2.4582858 0.7034141 0.9433797 0.23838076 -1.1954566 2.1376135 0.6389627 -1.3988053 1.574277 -3.5201156 ]] [[-1.0320387 1.4983264 0.9231394 -1.1196983 -2.2012541 -0.7158926 -0.12737906 -0.46996096 -0.36846766 -0.1871748 ] [ 0.29597685 2.2469254 2.6589735 -2.6220577 -1.358899 -0.90067065 -1.6161708 0.41086215 -1.3162618 0.9864826 ] [-0.6723523 -0.349524 2.221276 -0.61885613 -3.2661486 2.0057185 -1.7514582 1.5157928 0.27913418 -0.4390877 ] [ 1.5154352 -1.3313024 -0.036792 2.038765 -0.59278107 -2.237928 0.44368473 -0.19210665 1.1025884 -1.7990321 ] [-0.66085863 -0.31876278 -3.5954363 -0.99612683 -1.6903659 0.5284989 1.2129769 1.8954871 1.5489088 -0.842908 ] [ 2.2620184 1.6948701 -0.18568353 -0.6234526 1.8252053 -2.5969048 -1.1933916 -0.08509583 -1.8034015 -1.5171317 ] [-0.8078843 0.56918854 -0.7104974 0.89706165 -4.168637 -0.84508044 -0.43054762 -3.1958857 -0.2753442 2.1547565 ] [-1.0407237 -0.64408493 1.1606466 0.34209698 3.0106137 -0.8273815 1.5244218 1.0478469 -0.59393454 0.45147255]] [[ 1.2208319 -3.6102943 2.4556649 -0.11305835 1.5189672 0.1649637 1.565364 -1.0197253 -1.0454552 -0.7294358 ] [-1.5015299 1.0700889 3.0593822 -3.0147133 5.79306 -2.4050345 0.19399917 -0.4125945 -0.04783443 0.7264491 ] [ 1.9749556 1.7276804 -2.440473 1.3498594 1.1175733 -0.76466227 1.6619763 4.1257725 0.18937029 -0.6798199 ] [ 0.02207671 1.9156506 -1.1574147 -3.1069698 0.29043934 2.8353117 -3.951541 1.0023086 -1.1305474 -0.61637336] [ 1.3918695 0.37399167 -1.7213818 0.8366173 -1.502611 -3.9499927 0.84017783 1.1928452 -1.9847599 3.0893753 ] [ 0.5031965 0.12017 -3.36002 -2.0690577 0.7967482 1.476709 -2.6145222 -0.56800866 0.6889389 0.67175037] [ 1.0409999 0.35104883 2.656955 -0.9510119 2.1773872 1.617759 -0.54956365 0.22185399 -2.9893768 1.2339997 ] [ 2.9898093 -0.07795652 1.1234794 -1.0819147 0.409854 0.15736642 -1.4735093 0.03811401 0.05454697 1.2335471 ]]]]; ov_res: [[[[ 0.66149455 0.39042833 2.2737565 -1.477863 2.4953 1.2697164 -0.76383 -1.7308519 -1.7024528 -0.6145136 ] [ 0.7956705 -1.3822206 0.08299827 2.6051898 -1.7822983 1.1155009 2.0833383 -2.1803653 -1.1627299 1.1633615 ] [ 1.3082103 -1.1506859 2.1441414 0.58903503 0.72110134 0.819648 0.21147835 1.0941284 1.6388042 4.2091703 ] [ 6.170058 -0.46171287 -2.4686477 -1.7652624 0.4288662 -2.747968 -2.1265717 0.9651681 -0.3119354 -2.3155184 ] [ 2.4801793 2.1767502 -1.9298368 -2.1944542 -0.43193677 -1.6754855 -1.2539666 -2.8353446 -1.1829277 2.1195753 ] [-0.06879367 -0.22679065 1.0653133 -1.1540331 -0.48229063 -0.14589143 -0.42203948 -0.537626 -3.9801745 4.0953755 ] [-0.18084708 -1.1253865 3.1802855 0.46578377 4.069193 -0.9947571 -5.268199 4.173076 1.7000517 2.4153173 ] [ 2.4582858 0.7034141 0.9433797 0.23838076 -1.1954566 2.1376135 0.6389627 -1.3988053 1.574277 -3.5201156 ]] [[-1.0320387 1.4983264 0.9231394 -1.1196983 -2.2012541 -0.7158926 -0.12737906 -0.46996096 -0.36846766 -0.1871748 ] [ 0.29597685 2.2469254 2.6589735 -2.6220577 -1.358899 -0.90067065 -1.6161708 0.41086215 -1.3162618 0.9864826 ] [-0.6723523 -0.349524 2.221276 -0.61885613 -3.2661486 2.0057185 -1.7514582 1.5157928 0.27913418 -0.4390877 ] [ 1.5154352 -1.3313024 -0.036792 2.038765 -0.59278107 -2.237928 0.44368473 -0.19210665 1.1025884 -1.7990321 ] [-0.66085863 -0.31876278 -3.5954363 -0.99612683 -1.6903659 0.5284989 1.2129769 1.8954871 1.5489088 -0.842908 ] [ 2.2620184 1.6948701 -0.18568353 -0.6234526 1.8252053 -2.5969048 -1.1933916 -0.08509583 -1.8034015 -1.5171317 ] [-0.8078843 0.56918854 -0.7104974 0.89706165 -4.168637 -0.84508044 -0.43054762 -3.1958857 -0.2753442 2.1547565 ] [-1.0407237 -0.64408493 1.1606466 0.34209698 3.0106137 -0.8273815 1.5244218 1.0478469 -0.59393454 0.45147255]] [[ 1.2208319 -3.6102943 2.4556649 -0.11305835 1.5189672 0.1649637 1.565364 -1.0197253 -1.0454552 -0.7294358 ] [-1.5015299 1.0700889 3.0593822 -3.0147133 5.79306 -2.4050345 0.19399917 -0.4125945 -0.04783443 0.7264491 ] [ 1.9749556 1.7276804 -2.440473 1.3498594 1.1175733 -0.76466227 1.6619763 4.1257725 0.18937029 -0.6798199 ] [ 0.02207671 1.9156506 -1.1574147 -3.1069698 0.29043934 2.8353117 -3.951541 1.0023086 -1.1305474 -0.61637336] [ 1.3918695 0.37399167 -1.7213818 0.8366173 -1.502611 -3.9499927 0.84017783 1.1928452 -1.9847599 3.0893753 ] [ 0.5031965 0.12017 -3.36002 -2.0690577 0.7967482 1.476709 -2.6145222 -0.56800866 0.6889389 0.67175037] [ 1.0409999 0.35104883 2.656955 -0.9510119 2.1773872 1.617759 -0.54956365 0.22185399 -2.9893768 1.2339997 ] [ 2.9898093 -0.07795652 1.1234794 -1.0819147 0.409854 0.15736642 -1.4735093 0.03811401 0.05454697 1.2335471 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_589.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.5616 (2,1,.,.) = -0.2054 (3,1,.,.) = 0.1630 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 8.45287740e-01 -6.67749524e-01 5.36911130e-01 5.81932604e-01 2.38889515e-01 -9.48178530e-01 -8.64419281e-01 -2.79868662e-01 5.13318777e-01 4.21855062e-01] [ 1.09678900e+00 3.82514708e-02 2.75793850e-01 -2.48894468e-01 -4.55857545e-01 -6.72878996e-02 -2.10478574e-01 -2.73563296e-01 1.11796856e+00 5.60941041e-01] [-1.09589756e-01 -2.01890822e-02 5.20262599e-01 2.32314274e-01 1.47533774e-01 -5.19335985e-01 -4.31712836e-01 3.01825106e-01 -1.03970468e+00 -4.40022051e-01] [ 8.91066372e-01 8.33042935e-02 2.67329514e-01 -2.32926652e-01 -6.73214316e-01 -1.12482332e-01 -9.55020964e-01 -4.83059525e-01 -3.60004306e-01 6.09461129e-01] [ 1.10743776e-01 7.14407116e-02 3.34851414e-01 5.18384635e-01 7.51081407e-01 2.22411662e-01 -7.76037097e-01 -4.54537392e-01 -2.21760213e-01 -4.41937625e-01] [ 3.52911144e-01 -9.00334001e-01 7.77448043e-02 3.29264104e-01 -1.01465292e-01 -1.07786071e+00 3.54456872e-01 8.59316051e-01 -6.51156381e-02 1.24425143e-01] [ 5.91453537e-02 -1.08522499e+00 -1.10435948e-01 2.60741472e-01 3.72538954e-01 -8.40458572e-02 -6.60134017e-01 1.41055822e-01 -5.36822855e-01 8.43135655e-01] [ 1.31462216e-01 1.83133140e-01 2.43167371e-01 2.90972769e-01 9.99192774e-01 -2.93910116e-01 5.51909387e-01 -9.91749346e-01 9.80756402e-01 5.54713488e-01] [ 4.46889102e-01 -9.21411157e-01 -3.16886127e-01 -6.17053509e-01 4.22812402e-01 6.09563828e-01 2.25160837e-01 1.19459286e-01 -5.62802851e-01 -2.08673775e-01] [-3.77211541e-01 8.01297069e-01 -6.44015253e-01 -1.02037477e+00 9.45168883e-02 -8.92767012e-01 -1.95376948e-01 4.06733841e-01 7.70796418e-01 -5.58113515e-01]] [[-1.01027839e-01 1.37775436e-01 2.54406966e-03 1.73711665e-02 1.29399180e-01 -1.41903907e-01 -2.74542481e-01 2.92174488e-01 1.05676778e-01 -1.08333372e-01] [ 6.57490566e-02 4.22564000e-01 1.39711663e-01 -5.36366478e-02 -2.06385911e-01 1.37575492e-01 -6.99802712e-02 1.90588430e-01 8.60592127e-02 -9.43229049e-02] [ 1.66924864e-01 1.99819148e-01 3.35644811e-01 3.88232395e-02 1.85338967e-03 1.57832161e-01 -1.65070832e-01 -4.17639166e-02 1.25882059e-01 -8.91533867e-02] [-2.29102775e-01 -1.52190760e-01 1.08875930e-01 6.69626817e-02 -6.44181222e-02 -1.00652069e-01 3.57109532e-02 1.10226743e-01 -1.36832952e-01 3.05749536e-01] [-6.55920357e-02 -9.92167071e-02 -2.38277167e-01 1.37400499e-03 2.11877823e-01 1.36098517e-02 -2.99684424e-02 5.13954684e-02 7.35121667e-02 -3.15398186e-01] [-3.07348520e-01 4.63404274e-03 -1.56837717e-01 8.11687335e-02 -4.26818520e-01 9.25168116e-03 3.39708626e-01 -1.63238913e-01 -1.32782608e-02 2.25951299e-01] [ 8.65752175e-02 -4.87778969e-02 -1.40400499e-01 -6.68857172e-02 2.20753267e-01 -2.00658426e-01 -1.55489445e-01 7.79315457e-02 -8.43455791e-02 -1.98619753e-01] [ 1.48459300e-01 9.03394148e-02 1.46837860e-01 -2.82008111e-01 8.44799280e-02 6.30550385e-02 2.20402988e-04 -4.76107001e-02 -3.50312819e-03 8.07513297e-02] [-9.58686695e-02 3.70734602e-01 1.94897205e-01 -2.97661453e-01 6.61022812e-02 -1.32423222e-01 -6.36990294e-02 -3.53457034e-01 1.53000187e-02 -1.01743042e-01] [-4.73085716e-02 -1.94378495e-01 -3.63459960e-02 -1.01722971e-01 2.75972158e-01 2.17477143e-01 -1.78238153e-01 2.41710275e-01 3.05470496e-01 -5.34307025e-02]] [[ 2.09050685e-01 4.70821410e-02 1.25613296e-02 2.80156791e-01 -1.67312667e-01 7.35290274e-02 -1.32211939e-01 -8.16652700e-02 -1.34738594e-01 -1.85963109e-01] [-1.04124665e-01 -2.73556402e-03 -3.34687263e-01 1.28767593e-02 -1.54969513e-01 1.07349977e-01 8.89371634e-02 -4.47355323e-02 -4.39776778e-01 -1.33164927e-01] [-1.97908878e-02 2.24466518e-01 -2.29112387e-01 2.73605019e-01 1.74917623e-01 -1.28333107e-01 -2.05244318e-01 2.12170392e-01 -1.02849826e-01 -9.12683010e-02] [ 9.21503752e-02 -2.48012513e-01 -8.52033645e-02 -2.37054244e-01 3.41528170e-02 -1.74589515e-01 2.72505909e-01 -1.16572626e-01 3.36667776e-01 -1.67169631e-01] [-1.01119071e-01 1.22586966e-01 -2.07720026e-01 -1.64675608e-01 -6.86083883e-02 4.40982908e-01 2.54570581e-02 2.28215963e-01 5.18424399e-02 -2.18077332e-01] [-1.25486040e-02 1.91958565e-02 5.82430735e-02 -2.99938977e-01 -6.46516830e-02 -1.19929664e-01 -1.22641772e-01 2.19416264e-02 3.21827717e-02 -1.59569308e-02] [-2.42392153e-01 1.34436086e-01 -5.84866740e-02 -1.82360619e-01 1.01212606e-01 -2.02436686e-01 -1.64094374e-01 -1.20549008e-01 -8.38232785e-03 -1.47688925e-01] [-3.41131508e-01 -5.41620255e-01 2.68254969e-02 1.74887657e-01 1.57330986e-02 1.05517423e-02 1.23255193e-01 -2.26535648e-01 -3.83768342e-02 -3.91477570e-02] [ 1.66533127e-01 -4.58403379e-02 1.59917995e-01 -1.80604905e-01 1.33896708e-01 -1.03615709e-01 2.88637727e-01 2.18735889e-01 -3.38329673e-02 1.41234383e-01] [ 1.69074193e-01 7.70879015e-02 -4.43997048e-02 1.49671465e-01 -3.15979905e-02 1.92541495e-01 -1.09306410e-01 1.24773212e-01 2.22417414e-01 -4.02809471e-01]]]]; ov_res: [[[[ 8.45287740e-01 -6.67749524e-01 5.36911130e-01 5.81932604e-01 2.38889515e-01 -9.48178530e-01 -8.64419281e-01 -2.79868662e-01 5.13318777e-01 4.21855062e-01] [ 1.09678900e+00 3.82514708e-02 2.75793850e-01 -2.48894468e-01 -4.55857545e-01 -6.72878996e-02 -2.10478574e-01 -2.73563296e-01 1.11796856e+00 5.60941041e-01] [-1.09589756e-01 -2.01890822e-02 5.20262599e-01 2.32314274e-01 1.47533774e-01 -5.19335985e-01 -4.31712836e-01 3.01825106e-01 -1.03970468e+00 -4.40022051e-01] [ 8.91066372e-01 8.33042935e-02 2.67329514e-01 -2.32926652e-01 -6.73214316e-01 -1.12482332e-01 -9.55020964e-01 -4.83059525e-01 -3.60004306e-01 6.09461129e-01] [ 1.10743776e-01 7.14407116e-02 3.34851414e-01 5.18384635e-01 7.51081407e-01 2.22411662e-01 -7.76037097e-01 -4.54537392e-01 -2.21760213e-01 -4.41937625e-01] [ 3.52911144e-01 -9.00334001e-01 7.77448043e-02 3.29264104e-01 -1.01465292e-01 -1.07786071e+00 3.54456872e-01 8.59316051e-01 -6.51156381e-02 1.24425143e-01] [ 5.91453537e-02 -1.08522499e+00 -1.10435948e-01 2.60741472e-01 3.72538954e-01 -8.40458572e-02 -6.60134017e-01 1.41055822e-01 -5.36822855e-01 8.43135655e-01] [ 1.31462216e-01 1.83133140e-01 2.43167371e-01 2.90972769e-01 9.99192774e-01 -2.93910116e-01 5.51909387e-01 -9.91749346e-01 9.80756402e-01 5.54713488e-01] [ 4.46889102e-01 -9.21411157e-01 -3.16886127e-01 -6.17053509e-01 4.22812402e-01 6.09563828e-01 2.25160837e-01 1.19459286e-01 -5.62802851e-01 -2.08673775e-01] [-3.77211541e-01 8.01297069e-01 -6.44015253e-01 -1.02037477e+00 9.45168883e-02 -8.92767012e-01 -1.95376948e-01 4.06733841e-01 7.70796418e-01 -5.58113515e-01]] [[-1.01027839e-01 1.37775436e-01 2.54406966e-03 1.73711665e-02 1.29399180e-01 -1.41903907e-01 -2.74542481e-01 2.92174488e-01 1.05676778e-01 -1.08333372e-01] [ 6.57490566e-02 4.22564000e-01 1.39711663e-01 -5.36366478e-02 -2.06385911e-01 1.37575492e-01 -6.99802712e-02 1.90588430e-01 8.60592127e-02 -9.43229049e-02] [ 1.66924864e-01 1.99819148e-01 3.35644811e-01 3.88232395e-02 1.85338967e-03 1.57832161e-01 -1.65070832e-01 -4.17639166e-02 1.25882059e-01 -8.91533867e-02] [-2.29102775e-01 -1.52190760e-01 1.08875930e-01 6.69626817e-02 -6.44181222e-02 -1.00652069e-01 3.57109532e-02 1.10226743e-01 -1.36832952e-01 3.05749536e-01] [-6.55920357e-02 -9.92167071e-02 -2.38277167e-01 1.37400499e-03 2.11877823e-01 1.36098517e-02 -2.99684424e-02 5.13954684e-02 7.35121667e-02 -3.15398186e-01] [-3.07348520e-01 4.63404274e-03 -1.56837717e-01 8.11687335e-02 -4.26818520e-01 9.25168116e-03 3.39708626e-01 -1.63238913e-01 -1.32782608e-02 2.25951299e-01] [ 8.65752175e-02 -4.87778969e-02 -1.40400499e-01 -6.68857172e-02 2.20753267e-01 -2.00658426e-01 -1.55489445e-01 7.79315457e-02 -8.43455791e-02 -1.98619753e-01] [ 1.48459300e-01 9.03394148e-02 1.46837860e-01 -2.82008111e-01 8.44799280e-02 6.30550385e-02 2.20402988e-04 -4.76107001e-02 -3.50312819e-03 8.07513297e-02] [-9.58686695e-02 3.70734602e-01 1.94897205e-01 -2.97661453e-01 6.61022812e-02 -1.32423222e-01 -6.36990294e-02 -3.53457034e-01 1.53000187e-02 -1.01743042e-01] [-4.73085716e-02 -1.94378495e-01 -3.63459960e-02 -1.01722971e-01 2.75972158e-01 2.17477143e-01 -1.78238153e-01 2.41710275e-01 3.05470496e-01 -5.34307025e-02]] [[ 2.09050685e-01 4.70821410e-02 1.25613296e-02 2.80156791e-01 -1.67312667e-01 7.35290274e-02 -1.32211939e-01 -8.16652700e-02 -1.34738594e-01 -1.85963109e-01] [-1.04124665e-01 -2.73556402e-03 -3.34687263e-01 1.28767593e-02 -1.54969513e-01 1.07349977e-01 8.89371634e-02 -4.47355323e-02 -4.39776778e-01 -1.33164927e-01] [-1.97908878e-02 2.24466518e-01 -2.29112387e-01 2.73605019e-01 1.74917623e-01 -1.28333107e-01 -2.05244318e-01 2.12170392e-01 -1.02849826e-01 -9.12683010e-02] [ 9.21503752e-02 -2.48012513e-01 -8.52033645e-02 -2.37054244e-01 3.41528170e-02 -1.74589515e-01 2.72505909e-01 -1.16572626e-01 3.36667776e-01 -1.67169631e-01] [-1.01119071e-01 1.22586966e-01 -2.07720026e-01 -1.64675608e-01 -6.86083883e-02 4.40982908e-01 2.54570581e-02 2.28215963e-01 5.18424399e-02 -2.18077332e-01] [-1.25486040e-02 1.91958565e-02 5.82430735e-02 -2.99938977e-01 -6.46516830e-02 -1.19929664e-01 -1.22641772e-01 2.19416264e-02 3.21827717e-02 -1.59569308e-02] [-2.42392153e-01 1.34436086e-01 -5.84866740e-02 -1.82360619e-01 1.01212606e-01 -2.02436686e-01 -1.64094374e-01 -1.20549008e-01 -8.38232785e-03 -1.47688925e-01] [-3.41131508e-01 -5.41620255e-01 2.68254969e-02 1.74887657e-01 1.57330986e-02 1.05517423e-02 1.23255193e-01 -2.26535648e-01 -3.83768342e-02 -3.91477570e-02] [ 1.66533127e-01 -4.58403379e-02 1.59917995e-01 -1.80604905e-01 1.33896708e-01 -1.03615709e-01 2.88637727e-01 2.18735889e-01 -3.38329673e-02 1.41234383e-01] [ 1.69074193e-01 7.70879015e-02 -4.43997048e-02 1.49671465e-01 -3.15979905e-02 1.92541495e-01 -1.09306410e-01 1.24773212e-01 2.22417414e-01 -4.02809471e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_591.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -2.8067 (2,1,.,.) = -0.7223 (3,1,.,.) = -1.4181 [ CPUFloatType{3,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.05403429 -4.174418 -0.15654026 2.9153225 1.1474359 2.2972481 -1.7080609 -4.733381 3.3881202 -3.2669737 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-10.527515 -0.5014382 -1.6968313 3.4809804 -2.614936 -2.277981 2.5900548 2.4726856 2.3755238 -4.1974683 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 3.9297454 0.79174775 6.698982 4.2266192 1.144067 -0.21459427 -3.1851528 0.50694674 3.0652611 7.0158587 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -0.01272017 -1.5607502 -2.3429663 6.9718785 5.7930017 -0.62619203 0.07190559 4.0468464 0.59514886 -0.11250189] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0055594 -7.9745426 -3.5050495 5.0978255 0.8029781 6.913215 -0.1631113 2.6713417 3.7138813 3.903109 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -1.1382821 -8.105962 1.2313544 3.3615596 -6.851039 -1.5139992 3.1523736 2.102641 -2.9208724 0.75959826] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0207274 -3.7705998 -0.11341194 -2.5380142 -8.715492 -4.1843066 -5.8566785 2.4014099 -0.53757036 1.5001239 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -1.2753371 -0.6961926 1.2806779 -4.7137585 4.03336 0.6395495 2.3329616 -2.5810575 -4.1862826 4.6574636 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.05403429 -4.174418 -0.15654026 2.9153225 1.1474359 2.2972481 -1.7080609 -4.733381 3.3881202 -3.2669737 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-10.527515 -0.5014382 -1.6968313 3.4809804 -2.614936 -2.277981 2.5900548 2.4726856 2.3755238 -4.1974683 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 3.9297454 0.79174775 6.698982 4.2266192 1.144067 -0.21459427 -3.1851528 0.50694674 3.0652611 7.0158587 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -0.01272017 -1.5607502 -2.3429663 6.9718785 5.7930017 -0.62619203 0.07190559 4.0468464 0.59514886 -0.11250189] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0055594 -7.9745426 -3.5050495 5.0978255 0.8029781 6.913215 -0.1631113 2.6713417 3.7138813 3.903109 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -1.1382821 -8.105962 1.2313544 3.3615596 -6.851039 -1.5139992 3.1523736 2.102641 -2.9208724 0.75959826] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0207274 -3.7705998 -0.11341194 -2.5380142 -8.715492 -4.1843066 -5.8566785 2.4014099 -0.53757036 1.5001239 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ -1.2753371 -0.6961926 1.2806779 -4.7137585 4.03336 0.6395495 2.3329616 -2.5810575 -4.1862826 4.6574636 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_593.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.7603 (2,1,.,.) = -0.1387 (3,1,.,.) = 0.6117 (1,2,.,.) = 0.8785 (2,2,.,.) = 0.1211 (3,2,.,.) = 0.01 * 8.1341 (1,3,.,.) = 3.1226 (2,3,.,.) = 0.001 * 5.0335 (3,3,.,.) = 0.01 * 7.7806 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 4.37379503e+00 -3.70363045e+00 -6.25598669e+00 -8.29614639e-01 -5.50978541e-01 -2.22478852e-01 1.61656606e+00 -1.43596864e+00 3.98708075e-01 -3.37152004e-01] [-2.49582005e+00 5.25441170e+00 -3.77911639e+00 2.21003503e-01 -8.08392167e-01 2.76092434e+00 -2.37928653e+00 4.21879387e+00 -1.28780818e+00 2.95860815e+00] [ 8.40079069e-01 -1.95261157e+00 4.04229355e+00 -3.41445518e+00 2.92268348e+00 -1.60633063e+00 5.35845339e-01 1.85955673e-01 -3.71135473e+00 1.36899722e+00] [ 1.56682289e+00 4.49444723e+00 -5.95147324e+00 1.17341995e+00 2.57567954e+00 -2.47701764e+00 6.98202133e-01 3.44738692e-01 -2.76386762e+00 1.67670047e+00] [ 3.41702032e+00 -6.12291718e+00 9.88646150e-01 -1.07418811e+00 -4.53284359e+00 -9.74636674e-01 -2.81505156e+00 5.27583933e+00 5.77470511e-02 3.09659696e+00]] [[ 4.28649366e-01 5.45307295e-03 -5.02033681e-02 1.04336798e-01 5.13680242e-02 -7.70068495e-03 3.27997617e-02 -4.78442237e-02 -1.05919972e-01 -2.63198689e-02] [-2.86480915e-02 1.61291033e-01 3.11602503e-01 -1.50837768e-02 -7.69893639e-03 2.70592719e-01 6.10480905e-02 -3.46931666e-02 -2.13160470e-01 -2.78930855e-03] [-1.35686249e-01 -2.11631402e-01 8.31122473e-02 -3.23016167e-01 4.91466939e-01 1.35635883e-01 2.32091203e-01 1.10518642e-01 -1.19059950e-01 -1.68782368e-01] [ 7.73581713e-02 1.74983308e-01 -1.69208765e-01 -1.42904222e-01 1.36056423e-01 -1.09280668e-01 -1.43048406e-01 -1.48637384e-01 1.14133544e-01 -2.12553233e-01] [-7.88105577e-02 -1.79932192e-01 1.57212280e-02 -2.69071430e-01 2.15994447e-01 -2.21031114e-01 -1.82931170e-01 1.95365533e-01 -1.64714918e-01 1.71616331e-01]] [[-4.84757900e-01 7.54678913e-04 7.67640710e-01 -5.82180858e-01 -3.73323917e-01 1.71331435e-01 2.48326197e-01 -7.78998137e-01 4.78372544e-01 7.10330382e-02] [-2.75553226e-01 -3.97236466e-01 -7.18137562e-01 6.39696836e-01 -3.53292853e-01 -3.16566914e-01 -5.87358117e-01 4.63105887e-01 7.21722245e-01 1.06481719e+00] [ 4.84196991e-01 9.30293381e-01 -4.53407951e-02 5.91115773e-01 -7.78072059e-01 -4.60735738e-01 -1.20289040e+00 -1.40066221e-01 7.68727139e-02 6.21419728e-01] [-6.57475173e-01 -1.97065115e-01 1.97704598e-01 1.77824092e+00 -1.13565475e-02 3.24079633e-01 6.63609684e-01 1.15161502e+00 -3.47431093e-01 3.16493839e-01] [ 1.19991899e+00 4.83088642e-01 3.70933741e-01 5.84713697e-01 -6.67197585e-01 1.10218179e+00 9.63809192e-01 -7.24909604e-01 8.69628727e-01 -2.30590943e-02]]]]; ov_res: [[[[ 4.37379503e+00 -3.70363045e+00 -6.25598669e+00 -8.29614639e-01 -5.50978541e-01 -2.22478852e-01 1.61656606e+00 -1.43596864e+00 3.98708075e-01 -3.37152004e-01] [-2.49582005e+00 5.25441170e+00 -3.77911639e+00 2.21003503e-01 -8.08392167e-01 2.76092434e+00 -2.37928653e+00 4.21879387e+00 -1.28780818e+00 2.95860815e+00] [ 8.40079069e-01 -1.95261157e+00 4.04229355e+00 -3.41445518e+00 2.92268348e+00 -1.60633063e+00 5.35845339e-01 1.85955673e-01 -3.71135473e+00 1.36899722e+00] [ 1.56682289e+00 4.49444723e+00 -5.95147324e+00 1.17341995e+00 2.57567954e+00 -2.47701764e+00 6.98202133e-01 3.44738692e-01 -2.76386762e+00 1.67670047e+00] [ 3.41702032e+00 -6.12291718e+00 9.88646150e-01 -1.07418811e+00 -4.53284359e+00 -9.74636674e-01 -2.81505156e+00 5.27583933e+00 5.77470511e-02 3.09659696e+00]] [[ 4.28649366e-01 5.45307295e-03 -5.02033681e-02 1.04336798e-01 5.13680242e-02 -7.70068495e-03 3.27997617e-02 -4.78442237e-02 -1.05919972e-01 -2.63198689e-02] [-2.86480915e-02 1.61291033e-01 3.11602503e-01 -1.50837768e-02 -7.69893639e-03 2.70592719e-01 6.10480905e-02 -3.46931666e-02 -2.13160470e-01 -2.78930855e-03] [-1.35686249e-01 -2.11631402e-01 8.31122473e-02 -3.23016167e-01 4.91466939e-01 1.35635883e-01 2.32091203e-01 1.10518642e-01 -1.19059950e-01 -1.68782368e-01] [ 7.73581713e-02 1.74983308e-01 -1.69208765e-01 -1.42904222e-01 1.36056423e-01 -1.09280668e-01 -1.43048406e-01 -1.48637384e-01 1.14133544e-01 -2.12553233e-01] [-7.88105577e-02 -1.79932192e-01 1.57212280e-02 -2.69071430e-01 2.15994447e-01 -2.21031114e-01 -1.82931170e-01 1.95365533e-01 -1.64714918e-01 1.71616331e-01]] [[-4.84757900e-01 7.54678913e-04 7.67640710e-01 -5.82180858e-01 -3.73323917e-01 1.71331435e-01 2.48326197e-01 -7.78998137e-01 4.78372544e-01 7.10330382e-02] [-2.75553226e-01 -3.97236466e-01 -7.18137562e-01 6.39696836e-01 -3.53292853e-01 -3.16566914e-01 -5.87358117e-01 4.63105887e-01 7.21722245e-01 1.06481719e+00] [ 4.84196991e-01 9.30293381e-01 -4.53407951e-02 5.91115773e-01 -7.78072059e-01 -4.60735738e-01 -1.20289040e+00 -1.40066221e-01 7.68727139e-02 6.21419728e-01] [-6.57475173e-01 -1.97065115e-01 1.97704598e-01 1.77824092e+00 -1.13565475e-02 3.24079633e-01 6.63609684e-01 1.15161502e+00 -3.47431093e-01 3.16493839e-01] [ 1.19991899e+00 4.83088642e-01 3.70933741e-01 5.84713697e-01 -6.67197585e-01 1.10218179e+00 9.63809192e-01 -7.24909604e-01 8.69628727e-01 -2.30590943e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_595.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.6174 (2,1,.,.) = -0.2155 (3,1,.,.) = 0.8641 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-1.0509645 0. -0.5879073 0. 1.5678726 0. -0.52386683 0. -0.81183857 0. 1.9400569 0. 0.835052 0. 1.197428 0. 0.83231604 0. -0.4601322 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.7646693 0. -1.210063 0. -0.6582113 0. 0.74831015 0. 1.1023448 0. -0.33821678 0. -0.39691523 0. -1.0204142 0. -0.34253955 0. -0.35147196] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-2.2595387 0. -1.4292314 0. 2.8395207 0. 0.66118705 0. 1.2263145 0. -0.0287399 0. 0.50224906 0. 2.2220619 0. -0.64360076 0. 0.5683942 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.63726276 0. 1.2205796 0. 0.36139202 0. -1.5906681 0. -0.446621 0. -1.8076329 0. 0.7621044 0. -0.7569533 0. 1.308601 0. -0.22387803] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 1.6229905 0. 0.3809695 0. -0.44471467 0. 1.6603239 0. 1.0539143 0. -1.4930134 0. -2.2015467 0. -0.7817707 0. 0.91252226 0. -2.6485276 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.05709836 0. -1.525989 0. -0.60290235 0. 0.93874425 0. -1.2705328 0. -1.2901323 0. -0.5036211 0. 0.70355374 0. 1.240284 0. -1.8208715 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.83661973 0. 1.2864944 0. -0.8118561 0. -2.0823169 0. 0.32117277 0. 0.682336 0. -0.824112 0. -0.2335204 0. -0.21726285 0. 1.0173813 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.0061389 0. -1.3212618 0. 1.1822593 0. -1.8419251 0. 0.16811398 0. -1.2156643 0. 0.21032868 0. -0.7119203 0. 0.9183191 0. -0.4392874 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.79476213 0. -1.7645185 0. 2.3472166 0. 0.6121214 0. -1.0023693 0. -0.6953408 0. -1.0524542 0. 0.8071273 0. -0.3880722 0. 0.83166325] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.2313222 0. -0.21267633 0. -0.15172447 0. -0.21561894 0. 0.09976335 0. -1.0009643 0. -0.9704383 0. -1.1332036 0. -1.7130687 0. 0.7891828 ]]]]; ov_res: [[[[-1.0509645 0. -0.5879073 0. 1.5678726 0. -0.52386683 0. -0.81183857 0. 1.9400569 0. 0.835052 0. 1.197428 0. 0.83231604 0. -0.4601322 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.7646693 0. -1.210063 0. -0.6582113 0. 0.74831015 0. 1.1023448 0. -0.33821678 0. -0.39691523 0. -1.0204142 0. -0.34253955 0. -0.35147196] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-2.2595387 0. -1.4292314 0. 2.8395207 0. 0.66118705 0. 1.2263145 0. -0.0287399 0. 0.50224906 0. 2.2220619 0. -0.64360076 0. 0.5683942 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.63726276 0. 1.2205796 0. 0.36139202 0. -1.5906681 0. -0.446621 0. -1.8076329 0. 0.7621044 0. -0.7569533 0. 1.308601 0. -0.22387803] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 1.6229905 0. 0.3809695 0. -0.44471467 0. 1.6603239 0. 1.0539143 0. -1.4930134 0. -2.2015467 0. -0.7817707 0. 0.91252226 0. -2.6485276 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.05709836 0. -1.525989 0. -0.60290235 0. 0.93874425 0. -1.2705328 0. -1.2901323 0. -0.5036211 0. 0.70355374 0. 1.240284 0. -1.8208715 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.83661973 0. 1.2864944 0. -0.8118561 0. -2.0823169 0. 0.32117277 0. 0.682336 0. -0.824112 0. -0.2335204 0. -0.21726285 0. 1.0173813 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.0061389 0. -1.3212618 0. 1.1822593 0. -1.8419251 0. 0.16811398 0. -1.2156643 0. 0.21032868 0. -0.7119203 0. 0.9183191 0. -0.4392874 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.79476213 0. -1.7645185 0. 2.3472166 0. 0.6121214 0. -1.0023693 0. -0.6953408 0. -1.0524542 0. 0.8071273 0. -0.3880722 0. 0.83166325] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-1.2313222 0. -0.21267633 0. -0.15172447 0. -0.21561894 0. 0.09976335 0. -1.0009643 0. -0.9704383 0. -1.1332036 0. -1.7130687 0. 0.7891828 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 2], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_597.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.8571 (2,1,.,.) = -0.1448 (3,1,.,.) = 0.9817 (1,2,.,.) = -0.1381 (2,2,.,.) = -1.8193 (3,2,.,.) = -1.9514 (1,3,.,.) = -0.2540 (2,3,.,.) = 0.4077 (3,3,.,.) = 0.01 * 5.8817 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) : Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened fw_re: [[[[-0.36961776 1.84961 -0.39158002 -0.7011731 0.14457135] [ 1.0840985 0.8468163 1.0070691 0.03352354 -0.99829304] [-0.16885662 -0.96636343 -1.3580018 -0.79762435 -0.14447194] [ 0.8563691 1.2425203 0.8249556 -0.38940498 -1.1034365 ] [-0.1484632 -0.01916924 0.8318985 -0.34551924 0.12842481]] [[ 0.33256838 -0.10887676 0.70241886 0.40973112 -1.6881411 ] [-3.0504806 -0.5358765 -1.4938933 -1.6232867 -2.7936232 ] [ 3.0409925 1.0891154 2.2611537 -1.5400386 -3.8280847 ] [-1.9492859 4.3755546 -0.97255975 -0.7680427 0.34147826] [-0.8631464 -0.62347096 -0.45088765 0.62997276 -1.0238729 ]] [[-0.15796714 2.3232024 0.1734963 -0.52606595 -1.4595706 ] [-1.5403991 0.59718335 -0.14217609 -1.529591 -4.0266266 ] [ 2.7379189 -0.20810288 0.42165306 -2.546337 -3.9110737 ] [-0.76892436 5.8822117 0.14119892 -1.2577602 -1.1141152 ] [-1.0323658 -0.6288512 0.661825 0.15793255 -0.82220924]]]]; ov_res: [[[[-0.36961776 1.84961 -0.39158002 -0.7011731 0.14457135] [ 1.0840985 0.8468163 1.0070691 0.03352354 -0.99829304] [-0.16885662 -0.96636343 -1.3580018 -0.79762435 -0.14447194] [ 0.8563691 1.2425203 0.8249556 -0.38940498 -1.1034365 ] [-0.1484632 -0.01916924 0.8318985 -0.34551924 0.12842481]] [[ 0.33256838 -0.10887676 0.70241886 0.40973112 -1.6881411 ] [-3.0504806 -0.5358765 -1.4938933 -1.6232867 -2.7936232 ] [ 3.0409925 1.0891154 2.2611537 -1.5400386 -3.8280847 ] [-1.9492859 4.3755546 -0.97255975 -0.7680427 0.34147826] [-0.8631464 -0.62347096 -0.45088765 0.62997276 -1.0238729 ]] [[-0.15796714 2.3232024 0.1734963 -0.52606595 -1.4595706 ] [-1.5403991 0.59718335 -0.14217609 -1.529591 -4.0266266 ] [ 2.7379189 -0.20810288 0.42165306 -2.546337 -3.9110737 ] [-0.76892436 5.8822117 0.14119892 -1.2577602 -1.1141152 ] [-1.0323658 -0.6288512 0.661825 0.15793255 -0.82220924]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_599.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.7955 (2,1,.,.) = -0.7736 (3,1,.,.) = -0.2063 (1,2,.,.) = -0.5542 (2,2,.,.) = 0.1527 (3,2,.,.) = 0.6814 (1,3,.,.) = -1.3823 (2,3,.,.) = -0.8928 (3,3,.,.) = -0.3618 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[-0.20777318 -3.4564395 -2.2803853 1.627537 -0.973635 0.34268984 0.01636895 2.292149 1.8883479 1.3063542 ] [-0.43699148 2.5287526 1.905338 -2.4056585 -3.945605 2.9286554 -0.7610445 -3.2390506 1.0300585 1.0600235 ] [-1.8534559 -0.18178724 -1.5646038 -3.6441135 0.15836146 1.0969001 -0.26302385 0.36191097 -3.0882967 0.38760585] [ 1.1302156 0.5539996 3.7314677 0.48856223 0.8488572 3.7223778 0.7491113 -0.41952726 4.231278 1.3056805 ] [ 2.1220393 1.838984 0.9125781 0.09444612 3.0032656 -0.20117652 0.08796845 1.6782975 1.2240832 1.1614475 ]] [[ 0.5463789 -1.8692722 -0.8218625 0.6569977 -0.7116107 0.4635661 0.4357964 1.8560425 1.351637 0.5843314 ] [-0.37109673 1.0178627 1.2412614 -1.9799528 -2.9656186 2.4459069 -0.28038132 -1.8937657 0.3529903 0.7433279 ] [-0.5526877 0.32666957 -1.2289691 -1.6311907 -0.14802629 1.0535835 0.01334192 0.36750388 -1.8903737 -0.43533018] [ 0.6456829 1.0659027 2.2368786 0.98137903 -0.55294544 2.0296972 -0.08958924 0.2906398 3.1226904 0.77859074] [ 1.4490541 0.12868114 0.02412692 -0.6232908 1.627221 -0.05140319 0.7063022 1.8282936 1.1238991 1.4664382 ]] [[ 0.719576 -0.6309989 0.6891154 -0.5076036 -0.568108 0.54772556 0.5092123 0.55412936 0.5371035 0.3401696 ] [-0.14995185 -0.42324087 0.10298982 -0.68975365 -1.3473312 1.1820855 0.23826738 -0.45327666 -0.02943873 0.05790313] [ 0.83827204 0.10758197 -0.7875823 0.07469108 -0.93002474 0.49938905 0.2692672 0.14990032 -0.03872454 -0.82043 ] [-0.00660434 1.3408182 0.12899862 -0.14513236 -1.2100416 0.41233724 -1.3043201 0.5775553 1.1353445 0.31631595] [ 0.3697029 -1.1726375 -0.5308693 -1.0854197 0.25633767 0.36917514 0.24371748 0.8261401 0.97787035 1.0930102 ]]]]; ov_res: [[[[-0.20777318 -3.4564395 -2.2803853 1.627537 -0.973635 0.34268984 0.01636895 2.292149 1.8883479 1.3063542 ] [-0.43699148 2.5287526 1.905338 -2.4056585 -3.945605 2.9286554 -0.7610445 -3.2390506 1.0300585 1.0600235 ] [-1.8534559 -0.18178724 -1.5646038 -3.6441135 0.15836146 1.0969001 -0.26302385 0.36191097 -3.0882967 0.38760585] [ 1.1302156 0.5539996 3.7314677 0.48856223 0.8488572 3.7223778 0.7491113 -0.41952726 4.231278 1.3056805 ] [ 2.1220393 1.838984 0.9125781 0.09444612 3.0032656 -0.20117652 0.08796845 1.6782975 1.2240832 1.1614475 ]] [[ 0.5463789 -1.8692722 -0.8218625 0.6569977 -0.7116107 0.4635661 0.4357964 1.8560425 1.351637 0.5843314 ] [-0.37109673 1.0178627 1.2412614 -1.9799528 -2.9656186 2.4459069 -0.28038132 -1.8937657 0.3529903 0.7433279 ] [-0.5526877 0.32666957 -1.2289691 -1.6311907 -0.14802629 1.0535835 0.01334192 0.36750388 -1.8903737 -0.43533018] [ 0.6456829 1.0659027 2.2368786 0.98137903 -0.55294544 2.0296972 -0.08958924 0.2906398 3.1226904 0.77859074] [ 1.4490541 0.12868114 0.02412692 -0.6232908 1.627221 -0.05140319 0.7063022 1.8282936 1.1238991 1.4664382 ]] [[ 0.719576 -0.6309989 0.6891154 -0.5076036 -0.568108 0.54772556 0.5092123 0.55412936 0.5371035 0.3401696 ] [-0.14995185 -0.42324087 0.10298982 -0.68975365 -1.3473312 1.1820855 0.23826738 -0.45327666 -0.02943873 0.05790313] [ 0.83827204 0.10758197 -0.7875823 0.07469108 -0.93002474 0.49938905 0.2692672 0.14990032 -0.03872454 -0.82043 ] [-0.00660434 1.3408182 0.12899862 -0.14513236 -1.2100416 0.41233724 -1.3043201 0.5775553 1.1353445 0.31631595] [ 0.3697029 -1.1726375 -0.5308693 -1.0854197 0.25633767 0.36917514 0.24371748 0.8261401 0.97787035 1.0930102 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_601.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.5274 (2,1,.,.) = 0.01 * 2.5971 (3,1,.,.) = 0.6327 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[-1.9402864e+00 0.0000000e+00 2.8166728e+00 0.0000000e+00 -1.7810290e-01 0.0000000e+00 4.9477106e-01 0.0000000e+00 3.4438758e+00 0.0000000e+00 1.7750764e+00 0.0000000e+00 -3.7117012e+00 0.0000000e+00 3.1128566e+00 0.0000000e+00 1.0000821e-01 0.0000000e+00 2.3691933e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.2262390e+00 0.0000000e+00 -3.6037001e-01 0.0000000e+00 -1.4928244e+00 0.0000000e+00 2.6573434e+00 0.0000000e+00 -8.5828668e-01 0.0000000e+00 -8.2536113e-01 0.0000000e+00 3.9358754e+00 0.0000000e+00 -1.8075907e+00 0.0000000e+00 -3.0429337e+00 0.0000000e+00 -1.2971561e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.8306910e+00 0.0000000e+00 8.9796841e-02 0.0000000e+00 1.5669042e+00 0.0000000e+00 5.6368202e-01 0.0000000e+00 -2.8893399e+00 0.0000000e+00 -1.3816842e-01 0.0000000e+00 -1.3314203e+00 0.0000000e+00 2.1227093e+00 0.0000000e+00 2.9548356e-01 0.0000000e+00 -9.7335882e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.8015321e-01 0.0000000e+00 7.4251711e-01 0.0000000e+00 -2.6552677e+00 0.0000000e+00 2.3977010e+00 0.0000000e+00 -5.8974200e-01 0.0000000e+00 1.2732538e+00 0.0000000e+00 1.2801352e-01 0.0000000e+00 2.1853685e+00 0.0000000e+00 1.2051029e+00 0.0000000e+00 9.5304437e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.1680946e+00 0.0000000e+00 -1.1499645e+00 0.0000000e+00 -2.4270096e+00 0.0000000e+00 1.1336545e+00 0.0000000e+00 1.7330748e-01 0.0000000e+00 1.4164889e+00 0.0000000e+00 9.5014060e-01 0.0000000e+00 -5.5599856e-01 0.0000000e+00 -7.8344929e-01 0.0000000e+00 -1.6467156e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.1519132e-03 0.0000000e+00 -2.2430863e+00 0.0000000e+00 -2.3377943e+00 0.0000000e+00 -2.0700803e-01 0.0000000e+00 -1.2867670e+00 0.0000000e+00 -1.2101381e+00 0.0000000e+00 -4.4181207e-01 0.0000000e+00 2.2610531e+00 0.0000000e+00 1.8102871e-01 0.0000000e+00 9.5477575e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-7.3989207e-01 0.0000000e+00 -9.8851031e-01 0.0000000e+00 2.6322678e-01 0.0000000e+00 -1.7677060e+00 0.0000000e+00 -2.6982808e+00 0.0000000e+00 -2.0189542e-01 0.0000000e+00 2.3089814e+00 0.0000000e+00 2.0499344e+00 0.0000000e+00 1.9039184e+00 0.0000000e+00 2.3519032e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.5057158e-01 0.0000000e+00 3.0611260e+00 0.0000000e+00 -1.7367600e-01 0.0000000e+00 -5.0831091e-01 0.0000000e+00 1.8575847e+00 0.0000000e+00 -1.5089641e+00 0.0000000e+00 7.6852769e-01 0.0000000e+00 3.1434998e-01 0.0000000e+00 1.5977595e+00 0.0000000e+00 -2.3440623e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.1706055e-01 0.0000000e+00 -1.9922123e+00 0.0000000e+00 -2.3781516e-01 0.0000000e+00 8.9527287e-02 0.0000000e+00 1.7207627e+00 0.0000000e+00 1.7268562e+00 0.0000000e+00 1.5528934e+00 0.0000000e+00 1.5983460e+00 0.0000000e+00 2.0967148e-01 0.0000000e+00 -1.9308722e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-5.0088054e-01 0.0000000e+00 -1.2763643e+00 0.0000000e+00 9.8016077e-01 0.0000000e+00 -6.2013894e-01 0.0000000e+00 5.9136140e-01 0.0000000e+00 -1.8944429e-01 0.0000000e+00 -1.6010442e+00 0.0000000e+00 -1.0429507e+00 0.0000000e+00 -1.2458618e-02 0.0000000e+00 2.8415070e+00]]]]; ov_res: [[[[-1.9402864e+00 0.0000000e+00 2.8166728e+00 0.0000000e+00 -1.7810290e-01 0.0000000e+00 4.9477106e-01 0.0000000e+00 3.4438758e+00 0.0000000e+00 1.7750764e+00 0.0000000e+00 -3.7117012e+00 0.0000000e+00 3.1128566e+00 0.0000000e+00 1.0000821e-01 0.0000000e+00 2.3691933e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.2262390e+00 0.0000000e+00 -3.6037001e-01 0.0000000e+00 -1.4928244e+00 0.0000000e+00 2.6573434e+00 0.0000000e+00 -8.5828668e-01 0.0000000e+00 -8.2536113e-01 0.0000000e+00 3.9358754e+00 0.0000000e+00 -1.8075907e+00 0.0000000e+00 -3.0429337e+00 0.0000000e+00 -1.2971561e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.8306910e+00 0.0000000e+00 8.9796841e-02 0.0000000e+00 1.5669042e+00 0.0000000e+00 5.6368202e-01 0.0000000e+00 -2.8893399e+00 0.0000000e+00 -1.3816842e-01 0.0000000e+00 -1.3314203e+00 0.0000000e+00 2.1227093e+00 0.0000000e+00 2.9548356e-01 0.0000000e+00 -9.7335882e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.8015321e-01 0.0000000e+00 7.4251711e-01 0.0000000e+00 -2.6552677e+00 0.0000000e+00 2.3977010e+00 0.0000000e+00 -5.8974200e-01 0.0000000e+00 1.2732538e+00 0.0000000e+00 1.2801352e-01 0.0000000e+00 2.1853685e+00 0.0000000e+00 1.2051029e+00 0.0000000e+00 9.5304437e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.1680946e+00 0.0000000e+00 -1.1499645e+00 0.0000000e+00 -2.4270096e+00 0.0000000e+00 1.1336545e+00 0.0000000e+00 1.7330748e-01 0.0000000e+00 1.4164889e+00 0.0000000e+00 9.5014060e-01 0.0000000e+00 -5.5599856e-01 0.0000000e+00 -7.8344929e-01 0.0000000e+00 -1.6467156e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.1519132e-03 0.0000000e+00 -2.2430863e+00 0.0000000e+00 -2.3377943e+00 0.0000000e+00 -2.0700803e-01 0.0000000e+00 -1.2867670e+00 0.0000000e+00 -1.2101381e+00 0.0000000e+00 -4.4181207e-01 0.0000000e+00 2.2610531e+00 0.0000000e+00 1.8102871e-01 0.0000000e+00 9.5477575e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-7.3989207e-01 0.0000000e+00 -9.8851031e-01 0.0000000e+00 2.6322678e-01 0.0000000e+00 -1.7677060e+00 0.0000000e+00 -2.6982808e+00 0.0000000e+00 -2.0189542e-01 0.0000000e+00 2.3089814e+00 0.0000000e+00 2.0499344e+00 0.0000000e+00 1.9039184e+00 0.0000000e+00 2.3519032e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.5057158e-01 0.0000000e+00 3.0611260e+00 0.0000000e+00 -1.7367600e-01 0.0000000e+00 -5.0831091e-01 0.0000000e+00 1.8575847e+00 0.0000000e+00 -1.5089641e+00 0.0000000e+00 7.6852769e-01 0.0000000e+00 3.1434998e-01 0.0000000e+00 1.5977595e+00 0.0000000e+00 -2.3440623e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.1706055e-01 0.0000000e+00 -1.9922123e+00 0.0000000e+00 -2.3781516e-01 0.0000000e+00 8.9527287e-02 0.0000000e+00 1.7207627e+00 0.0000000e+00 1.7268562e+00 0.0000000e+00 1.5528934e+00 0.0000000e+00 1.5983460e+00 0.0000000e+00 2.0967148e-01 0.0000000e+00 -1.9308722e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-5.0088054e-01 0.0000000e+00 -1.2763643e+00 0.0000000e+00 9.8016077e-01 0.0000000e+00 -6.2013894e-01 0.0000000e+00 5.9136140e-01 0.0000000e+00 -1.8944429e-01 0.0000000e+00 -1.6010442e+00 0.0000000e+00 -1.0429507e+00 0.0000000e+00 -1.2458618e-02 0.0000000e+00 2.8415070e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [2, 2], 'groups': 1, 'output_padding': [1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_603.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.3945 (2,1,.,.) = -0.2941 (3,1,.,.) = -1.2387 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 1.7372332 0. -2.2853909 0. -0.98996705 0. 2.1445932 0. -0.53708667 0. -0.6193194 0. 0.9482813 0. 0.06234777 0. 0.26895168] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.43566105 0. -0.04925109 0. 0.94051474 0. 0.4669849 0. -0.75470877 0. -0.8590429 0. 1.4374433 0. 1.2801915 0. -0.99653435] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.68250054 0. 0.21443158 0. 0.3865678 0. 0.6557714 0. -0.781714 0. -0.28596282 0. -0.78027153 0. 3.0145288 0. -1.7334348 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.3349109 0. -2.414969 0. 0.4779333 0. 0.22456662 0. -0.3672094 0. 2.0857022 0. 1.7694684 0. 0.19360773 0. -0.7610319 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.70196056 0. -0.47140372 0. -0.5907841 0. -0.07665003 0. -0.22406162 0. -1.4295198 0. -1.3817506 0. 0.47431234 0. 0.675116 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.6869715 0. 2.6481268 0. -0.41759086 0. 0.3845925 0. 0.7030015 0. -0.34136686 0. -1.5241913 0. -1.346159 0. -1.0408844 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 1.3321834 0. 0.718883 0. 0.339474 0. -0.74716306 0. 1.581649 0. 1.2605683 0. -0.9215043 0. -1.0884691 0. -0.9079831 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.3483616 0. 0.90059704 0. -0.4056036 0. -0.25328842 0. 1.0579859 0. -1.0658439 0. -1.1725855 0. 2.0370035 0. 1.4163036 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.2274466 0. -0.07354226 0. -0.8634799 0. -1.5870347 0. 0.6904527 0. 1.5197263 0. 0.8282525 0. 3.1765795 0. -0.6515318 ]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 1.7372332 0. -2.2853909 0. -0.98996705 0. 2.1445932 0. -0.53708667 0. -0.6193194 0. 0.9482813 0. 0.06234777 0. 0.26895168] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.43566105 0. -0.04925109 0. 0.94051474 0. 0.4669849 0. -0.75470877 0. -0.8590429 0. 1.4374433 0. 1.2801915 0. -0.99653435] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.68250054 0. 0.21443158 0. 0.3865678 0. 0.6557714 0. -0.781714 0. -0.28596282 0. -0.78027153 0. 3.0145288 0. -1.7334348 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.3349109 0. -2.414969 0. 0.4779333 0. 0.22456662 0. -0.3672094 0. 2.0857022 0. 1.7694684 0. 0.19360773 0. -0.7610319 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.70196056 0. -0.47140372 0. -0.5907841 0. -0.07665003 0. -0.22406162 0. -1.4295198 0. -1.3817506 0. 0.47431234 0. 0.675116 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.6869715 0. 2.6481268 0. -0.41759086 0. 0.3845925 0. 0.7030015 0. -0.34136686 0. -1.5241913 0. -1.346159 0. -1.0408844 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 1.3321834 0. 0.718883 0. 0.339474 0. -0.74716306 0. 1.581649 0. 1.2605683 0. -0.9215043 0. -1.0884691 0. -0.9079831 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.3483616 0. 0.90059704 0. -0.4056036 0. -0.25328842 0. 1.0579859 0. -1.0658439 0. -1.1725855 0. 2.0370035 0. 1.4163036 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -1.2274466 0. -0.07354226 0. -0.8634799 0. -1.5870347 0. 0.6904527 0. 1.5197263 0. 0.8282525 0. 3.1765795 0. -0.6515318 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_604.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5625 0.5138 -1.6019 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ -1.8946378 -0.98390275 1.6312709 -4.4548984 -0.11917502 1.3156983 0.15033275 0.7474413 -0.5523371 -1.8885686 1.3432548 ] [ 2.663263 4.400459 0.8959237 0.9087254 0.7817312 0.8192327 -5.2254987 0.76701695 6.0058866 -2.7784922 -2.284633 ] [ 2.711896 -0.1866805 3.619106 3.3434503 6.35821 2.1681046 -0.51942784 -3.0700996 5.1565833 7.0495753 1.550484 ] [ 3.8905442 1.3150928 4.533424 -4.995259 -2.2392917 -0.41553438 1.8244596 2.3761435 -0.6068211 -1.5811651 1.2301569 ] [ -4.6242867 3.7947667 5.658134 -1.5259707 -1.7753391 -0.6852445 0.87324554 -0.606345 -2.9185944 -1.1052403 3.5219905 ] [ -2.4356802 -5.9923897 2.7089849 -2.315195 2.4544373 2.4209123 -5.281096 -3.0486217 -1.5614576 -3.7518442 1.5584059 ] [ 5.0201797 3.5228887 3.8570192 4.606525 4.022164 -0.70237595 -4.372901 1.4420505 -2.9047897 -1.9868939 3.180198 ] [ 0.2300443 2.05603 1.2229207 6.505369 -0.30943972 1.689122 -0.8563103 -2.4556167 -2.5328739 -4.2927685 2.8830895 ] [ 0.38262588 -0.18482965 -0.44572836 2.752536 -0.06913871 4.15886 -1.9530685 4.131989 4.1895175 -0.23190665 1.4122405 ] [ -3.2537796 6.334048 8.006406 5.696906 4.2854095 -1.0074873 -2.67565 4.0518303 1.2616673 -1.2313292 -1.2702372 ] [ -1.0610554 -2.4127567 -0.3993855 -3.102038 -2.5084684 -2.0349467 2.776328 -1.3698387 0.6277122 1.2446668 -0.35663074]] [[ -0.90683496 -2.1381466 -0.4545099 -1.9990456 -0.37986946 0.47766757 0.7250137 2.0069556 -1.0716573 -1.4091413 0.6248455 ] [ -2.2857604 1.3186545 2.3786879 -2.84608 -0.13929951 1.9200737 4.506015 -1.6382227 -2.877288 1.3891078 1.4082623 ] [ 0.23505771 4.2650805 -3.8851414 0.26317096 0.4790565 -0.67187417 -1.7993007 0.9046038 3.9574988 -1.8638127 -2.0805233 ] [ 0.62300557 -2.6411 1.5983002 1.4922736 6.095553 4.7483354 6.091084 -1.2176437 4.544805 1.1332525 -1.1991905 ] [ 1.7590601 -2.8468049 1.569637 -3.9304595 1.4636252 0.44933605 -2.954101 4.0947857 5.906508 -1.7979934 1.4366837 ] [ 1.5210979 -1.6204982 1.4948924 -1.6041081 -1.9748502 3.1794198 3.761105 0.4976511 2.899114 -0.10110998 2.5880246 ] [ -4.551932 -2.3152423 -0.1674465 -1.5122726 1.2683357 -2.213804 -1.8060501 5.73359 -1.647896 -6.5825286 -1.8212872 ] [ -0.38853604 -6.3973193 -1.4145277 6.182807 -2.6313214 2.0105712 -4.2496533 -0.408145 0.32285023 -0.6064638 4.447465 ] [ -2.3839917 0.40581304 -0.5027747 0.07905316 -1.1283231 2.0733204 2.2749472 0.20405841 -1.6668823 -3.763866 1.2143853 ] [ 4.0115356 -0.11431491 1.5253079 3.4920645 4.31933 -0.46474826 -2.6605923 1.5421364 0.4338026 -1.0212396 2.2100282 ] [ 1.4252179 1.6419889 4.124049 3.7085686 0.17937195 0.72566175 -6.136698 6.715673 -0.7217524 0.5331011 -2.5055573 ]] [[ 2.2359085 0.01712501 -1.4908392 3.6592116 -0.831499 -2.3095548 -2.8128943 0.3023535 0.4788741 -1.6810749 -0.27442908] [ -2.0973353 -4.098628 2.2738338 -6.152873 0.57627547 -1.9545184 -5.1908226 1.1428789 -0.7689137 -2.076512 -1.4086562 ] [ -3.0836544 -1.52091 -2.2761674 -1.9583721 -7.9213786 2.0026813 -3.6837487 -1.9908693 -4.2378287 -1.1461825 -2.946445 ] [ -1.2144779 0.5802382 -4.5000024 1.6068431 2.0197716 -4.5017734 1.6063217 -5.4985995 -2.7871876 0.87152445 -3.3584328 ] [ -0.5690999 4.380706 1.3388661 -9.334093 0.38457322 -0.4169613 -6.075868 1.1465338 -4.6705127 1.5549604 -0.5856215 ] [ -3.012761 -4.7459273 -0.5755316 2.2415166 -2.7921941 -7.597525 2.6190171 -4.9126086 4.1802363 -4.750239 -2.3010356 ] [ 2.2230039 1.815348 -0.83242476 -8.235963 0.2077328 3.357048 -4.852899 -5.3936014 -5.9769626 2.5037332 -1.7111145 ] [ -2.493391 0.13911736 -0.30120432 -3.5666838 -2.8472662 -5.0000653 -1.233704 2.0891294 -1.5933019 -7.1336846 -1.0459632 ] [ -1.0498797 2.1950388 1.5946902 4.885315 -2.2510195 -3.2769852 -1.0054303 -4.5664334 1.921122 2.5830593 0.04859316] [ -9.442388 -5.401145 -10.709789 -0.55532014 -2.9309642 4.169193 -1.0853592 -2.7834744 -2.7958827 -7.5108333 -0.71483713] [ -0.7400856 1.2218455 4.986627 -1.3563453 0.16556275 -7.109115 -0.544062 -2.9218793 -0.851872 -1.7117113 -3.3056622 ]]]]; ov_res: [[[[ -1.8946378 -0.98390263 1.6312709 -4.4548984 -0.1191752 1.3156983 0.15033272 0.74744123 -0.5523371 -1.8885686 1.3432548 ] [ 2.663263 4.400459 0.8959235 0.90872526 0.78173137 0.8192327 -5.225498 0.76701695 6.005887 -2.7784915 -2.2846327 ] [ 2.711896 -0.18668067 3.6191063 3.3434505 6.35821 2.1681046 -0.5194281 -3.0700996 5.156583 7.0495753 1.5504842 ] [ 3.8905444 1.3150927 4.5334234 -4.995259 -2.2392917 -0.4155345 1.8244596 2.3761437 -0.606821 -1.5811653 1.2301569 ] [ -4.6242867 3.7947664 5.6581345 -1.5259705 -1.7753391 -0.6852446 0.87324536 -0.6063449 -2.9185948 -1.1052399 3.5219903 ] [ -2.4356802 -5.992389 2.7089849 -2.315195 2.4544375 2.4209125 -5.2810965 -3.0486217 -1.5614574 -3.7518442 1.5584059 ] [ 5.02018 3.5228891 3.8570194 4.6065245 4.022164 -0.70237595 -4.3729005 1.4420505 -2.90479 -1.9868939 3.1801982 ] [ 0.23004425 2.05603 1.2229207 6.5053697 -0.3094396 1.689122 -0.85631055 -2.455617 -2.5328736 -4.2927685 2.8830893 ] [ 0.38262585 -0.1848296 -0.4457286 2.7525363 -0.06913859 4.1588593 -1.953068 4.131989 4.1895175 -0.2319066 1.4122404 ] [ -3.2537792 6.334048 8.006407 5.6969056 4.2854095 -1.0074868 -2.67565 4.0518303 1.2616673 -1.2313292 -1.2702372 ] [ -1.0610554 -2.4127564 -0.39938545 -3.1020377 -2.5084684 -2.0349467 2.776328 -1.3698387 0.6277122 1.2446668 -0.35663074]] [[ -0.90683496 -2.1381466 -0.45450985 -1.9990454 -0.3798694 0.47766757 0.7250137 2.0069556 -1.0716573 -1.4091413 0.6248455 ] [ -2.2857604 1.3186548 2.3786879 -2.84608 -0.13929957 1.9200736 4.506015 -1.6382229 -2.877288 1.3891077 1.4082623 ] [ 0.23505768 4.2650805 -3.8851414 0.26317126 0.47905657 -0.67187417 -1.7993007 0.90460384 3.9574986 -1.8638127 -2.0805233 ] [ 0.6230056 -2.6411 1.5983001 1.4922734 6.095553 4.748336 6.091084 -1.2176436 4.5448046 1.1332526 -1.1991905 ] [ 1.7590601 -2.8468046 1.5696368 -3.9304595 1.4636252 0.4493361 -2.954101 4.0947857 5.906508 -1.7979934 1.4366837 ] [ 1.521098 -1.6204982 1.4948921 -1.6041083 -1.9748502 3.1794195 3.7611048 0.49765128 2.8991141 -0.10110992 2.5880246 ] [ -4.5519314 -2.3152423 -0.16744673 -1.5122726 1.2683355 -2.213804 -1.8060501 5.7335906 -1.6478963 -6.5825286 -1.8212872 ] [ -0.38853604 -6.397319 -1.4145275 6.182807 -2.6313214 2.0105712 -4.249653 -0.40814507 0.32285023 -0.6064638 4.447465 ] [ -2.3839917 0.40581298 -0.50277495 0.07905334 -1.1283232 2.0733204 2.2749472 0.20405838 -1.6668825 -3.763866 1.2143853 ] [ 4.0115356 -0.11431503 1.5253079 3.4920647 4.31933 -0.4647482 -2.660592 1.5421364 0.43380287 -1.0212396 2.2100282 ] [ 1.4252179 1.6419889 4.124049 3.7085686 0.17937195 0.7256616 -6.136698 6.7156725 -0.7217523 0.5331011 -2.5055573 ]] [[ 2.2359085 0.01712477 -1.4908392 3.6592116 -0.83149904 -2.3095548 -2.8128943 0.3023535 0.4788741 -1.6810749 -0.27442908] [ -2.0973353 -4.098628 2.2738338 -6.152873 0.5762757 -1.9545184 -5.190822 1.1428789 -0.7689136 -2.076512 -1.4086562 ] [ -3.0836546 -1.52091 -2.2761676 -1.9583719 -7.9213786 2.0026803 -3.6837487 -1.9908694 -4.2378287 -1.1461825 -2.946445 ] [ -1.214478 0.58023846 -4.500002 1.6068431 2.0197716 -4.501773 1.6063217 -5.4986 -2.7871873 0.87152374 -3.3584328 ] [ -0.5691 4.380706 1.3388664 -9.334093 0.38457298 -0.41696155 -6.075868 1.1465338 -4.670513 1.5549604 -0.58562136] [ -3.012761 -4.745928 -0.5755317 2.2415166 -2.7921944 -7.597525 2.6190166 -4.9126086 4.1802363 -4.750239 -2.3010356 ] [ 2.2230039 1.8153487 -0.8324248 -8.235963 0.2077328 3.357048 -4.852899 -5.3936014 -5.9769626 2.503734 -1.7111145 ] [ -2.493391 0.13911772 -0.30120432 -3.5666833 -2.8472664 -5.0000653 -1.2337041 2.0891294 -1.5933019 -7.133684 -1.0459633 ] [ -1.0498797 2.1950393 1.5946907 4.8853154 -2.2510197 -3.2769856 -1.0054305 -4.5664334 1.9211222 2.5830593 0.04859328] [ -9.442388 -5.4011455 -10.709789 -0.55532014 -2.930964 4.1691933 -1.0853591 -2.7834744 -2.795883 -7.5108333 -0.71483713] [ -0.7400856 1.2218455 4.9866276 -1.3563453 0.16556263 -7.109115 -0.544062 -2.9218793 -0.85187197 -1.7117113 -3.3056622 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_606.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.5376 0.4007 -2.4474 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ 2.14632702e+00 2.66769505e+00 -1.90100217e+00 4.15054893e+00 -4.72465706e+00 -2.28072000e+00 -2.40388989e+00 -8.26672935e+00 -1.72386885e-01] [-6.87658215e+00 -4.03006601e+00 8.47699404e-01 1.98667383e+00 -7.20715475e+00 -1.73550689e+00 -7.97157347e-01 -3.21611381e+00 -3.62119699e+00] [-4.35372829e+00 -1.35823834e+00 -1.07512760e+01 3.09537983e+00 2.34872293e+00 -2.88635421e+00 -3.46716166e+00 -1.94795907e+00 -2.89609075e+00] [-7.77819514e-01 -4.74630356e+00 6.89143038e+00 -4.60906363e+00 -1.93329263e+00 -1.08525944e+01 2.65867805e+00 -1.38011289e+00 -4.46476793e+00] [ 4.91626501e+00 -1.32081044e+00 1.84626579e-02 -1.33168113e+00 -9.12184334e+00 5.73800945e+00 -6.19510889e+00 2.12867856e+00 -7.90319729e+00] [-5.35467720e+00 -2.29297757e-01 -2.33081126e+00 -7.40152025e+00 -5.17005348e+00 3.43075895e+00 2.66305161e+00 2.81482458e+00 -4.83358860e+00] [-3.44072104e-01 2.82199669e+00 -5.14324617e+00 -2.98395991e+00 -4.66288519e+00 1.70531988e+00 -1.69632804e+00 -3.23991084e+00 -5.34180593e+00] [ 1.72577167e+00 -1.47661686e+00 4.24935198e+00 -3.01015306e+00 3.73513579e-01 -5.30358458e+00 -3.60044694e+00 -3.46618605e+00 -5.26566839e+00] [-2.01972294e+00 2.12285995e-01 -2.95002270e+00 5.89195204e+00 1.74539065e+00 -1.16933994e+01 1.95995903e+00 -1.29139209e+00 -5.01238537e+00]] [[-7.10160613e-01 1.38106227e+00 2.97932720e+00 1.08111703e+00 -9.18995500e-01 6.15973830e-01 1.21061194e+00 3.21213365e+00 1.54704466e-01] [-3.71840864e-01 -4.11587328e-01 -1.66495132e+00 1.20991975e-01 1.96862626e+00 3.44922519e+00 3.57610583e+00 -4.73437786e+00 -2.96064830e+00] [ 4.04869127e+00 -8.29116225e-01 -1.15479290e+00 1.14478385e+00 2.10315704e+00 1.02067077e+00 4.91172218e+00 2.91880584e+00 2.00066471e+00] [-3.08691692e+00 1.12523532e+00 1.27889466e+00 7.18939662e-01 -5.46688795e-01 -1.50043941e+00 -1.47802758e+00 1.87754107e+00 1.78187597e+00] [ 1.89074373e+00 -3.43300372e-01 -9.08799887e-01 1.08090675e+00 3.97080350e+00 1.93361890e+00 -6.50727034e-01 7.28653550e-01 -2.60110617e+00] [-5.43202043e-01 -1.82331830e-01 -8.72688413e-01 1.06150091e+00 3.78742814e-01 1.46473432e+00 8.61724317e-02 9.97464776e-01 1.50779915e+00] [ 1.55666721e+00 1.33234191e+00 1.26097786e+00 6.50993943e-01 -2.62311101e+00 -4.14381218e+00 -8.49810600e-01 1.40920877e+00 -5.42542338e-03] [-1.63889885e+00 4.29047871e+00 2.36216640e+00 4.53646451e-01 1.29348099e+00 1.90299857e+00 6.02780104e-01 2.45606661e+00 2.73339176e+00] [ 6.87628567e-01 -2.58173585e+00 7.38912404e-01 2.71419835e+00 -7.72518158e-01 -1.25150055e-01 -3.37922305e-01 1.79998600e+00 -7.13083863e-01]] [[-2.49190497e+00 -1.07333314e+00 -1.80376649e+00 -2.16027689e+00 -2.00169563e+00 -4.56678677e+00 -2.91622925e+00 -5.04878879e-01 8.20622683e-01] [ 2.56428766e+00 4.18359995e-01 -8.01385283e-01 -5.18872070e+00 -6.83258057e+00 2.61236429e-01 -8.78031158e+00 -1.27804780e+00 1.03329945e+00] [-3.38515949e+00 2.92331123e+00 -3.86131096e+00 -2.67437911e+00 -1.57018089e+00 -1.57052946e+00 -2.51326632e+00 -2.72923994e+00 -6.55545712e+00] [-5.97045612e+00 -2.77825117e+00 1.45358324e-01 -1.60494947e+00 6.07668400e-01 -6.82699633e+00 1.51275158e-01 -2.35934472e+00 -1.30321872e+00] [ 1.42610693e+00 -4.85062265e+00 -3.14263439e+00 3.33822727e-01 -3.51322317e+00 -1.01438408e+01 -1.19129252e+00 -4.87299824e+00 -1.19995189e+00] [-3.62839079e+00 3.15222645e+00 -7.93396187e+00 2.09044218e-01 1.18758488e+00 -7.03494596e+00 -2.91631746e+00 -7.27600670e+00 -2.73596644e+00] [-2.02470946e+00 -3.09652567e+00 -5.61630058e+00 -6.60328722e+00 -1.07544255e+00 3.92222214e+00 -3.49582314e+00 -1.42916882e+00 -5.06934071e+00] [-2.73851919e+00 -6.97756481e+00 -4.00213575e+00 -3.95049691e+00 1.15419936e+00 5.73719978e-01 -3.53286314e+00 -6.86179519e-01 -2.07731342e+00] [ 2.68255234e+00 -3.53166413e+00 -2.68941736e+00 -7.78808594e+00 -3.68454313e+00 -7.84863830e-01 7.05292225e-01 -4.68173599e+00 -1.22960269e+00]]]]; ov_res: [[[[ 2.14632702e+00 2.66769505e+00 -1.90100217e+00 4.15054893e+00 -4.72465706e+00 -2.28072000e+00 -2.40388989e+00 -8.26672935e+00 -1.72386885e-01] [-6.87658215e+00 -4.03006601e+00 8.47699404e-01 1.98667431e+00 -7.20715475e+00 -1.73550677e+00 -7.97157228e-01 -3.21611357e+00 -3.62119699e+00] [-4.35372829e+00 -1.35823810e+00 -1.07512779e+01 3.09537983e+00 2.34872293e+00 -2.88635421e+00 -3.46716142e+00 -1.94795918e+00 -2.89609075e+00] [-7.77819633e-01 -4.74630356e+00 6.89143038e+00 -4.60906363e+00 -1.93329275e+00 -1.08525925e+01 2.65867805e+00 -1.38011289e+00 -4.46476746e+00] [ 4.91626501e+00 -1.32081032e+00 1.84626579e-02 -1.33168113e+00 -9.12184334e+00 5.73800993e+00 -6.19510889e+00 2.12867856e+00 -7.90319681e+00] [-5.35467720e+00 -2.29297638e-01 -2.33081126e+00 -7.40152025e+00 -5.17005348e+00 3.43075848e+00 2.66305208e+00 2.81482458e+00 -4.83358908e+00] [-3.44072342e-01 2.82199717e+00 -5.14324665e+00 -2.98396015e+00 -4.66288519e+00 1.70531964e+00 -1.69632804e+00 -3.23991108e+00 -5.34180546e+00] [ 1.72577214e+00 -1.47661686e+00 4.24935198e+00 -3.01015282e+00 3.73513818e-01 -5.30358410e+00 -3.60044670e+00 -3.46618605e+00 -5.26566792e+00] [-2.01972294e+00 2.12286115e-01 -2.95002246e+00 5.89195204e+00 1.74539065e+00 -1.16933994e+01 1.95995903e+00 -1.29139149e+00 -5.01238537e+00]] [[-7.10160375e-01 1.38106227e+00 2.97932744e+00 1.08111691e+00 -9.18995261e-01 6.15973830e-01 1.21061194e+00 3.21213388e+00 1.54704466e-01] [-3.71840805e-01 -4.11587328e-01 -1.66495132e+00 1.20991975e-01 1.96862626e+00 3.44922495e+00 3.57610607e+00 -4.73437738e+00 -2.96064830e+00] [ 4.04869127e+00 -8.29116106e-01 -1.15479279e+00 1.14478385e+00 2.10315728e+00 1.02067077e+00 4.91172171e+00 2.91880560e+00 2.00066447e+00] [-3.08691669e+00 1.12523532e+00 1.27889466e+00 7.18939602e-01 -5.46688914e-01 -1.50043917e+00 -1.47802782e+00 1.87754095e+00 1.78187597e+00] [ 1.89074361e+00 -3.43300372e-01 -9.08800006e-01 1.08090663e+00 3.97080326e+00 1.93361866e+00 -6.50727034e-01 7.28653669e-01 -2.60110617e+00] [-5.43202162e-01 -1.82332009e-01 -8.72688413e-01 1.06150091e+00 3.78742695e-01 1.46473444e+00 8.61725509e-02 9.97464657e-01 1.50779927e+00] [ 1.55666721e+00 1.33234203e+00 1.26097798e+00 6.50993884e-01 -2.62311053e+00 -4.14381266e+00 -8.49810600e-01 1.40920877e+00 -5.42545319e-03] [-1.63889861e+00 4.29047918e+00 2.36216617e+00 4.53646541e-01 1.29348099e+00 1.90299869e+00 6.02780104e-01 2.45606685e+00 2.73339176e+00] [ 6.87628627e-01 -2.58173561e+00 7.38912284e-01 2.71419859e+00 -7.72518277e-01 -1.25150055e-01 -3.37922066e-01 1.79998600e+00 -7.13083982e-01]] [[-2.49190521e+00 -1.07333314e+00 -1.80376649e+00 -2.16027689e+00 -2.00169539e+00 -4.56678677e+00 -2.91622949e+00 -5.04878998e-01 8.20622683e-01] [ 2.56428766e+00 4.18359995e-01 -8.01385283e-01 -5.18872070e+00 -6.83258057e+00 2.61236429e-01 -8.78031254e+00 -1.27804780e+00 1.03329945e+00] [-3.38515949e+00 2.92331123e+00 -3.86131072e+00 -2.67437887e+00 -1.57018065e+00 -1.57052970e+00 -2.51326656e+00 -2.72923994e+00 -6.55545712e+00] [-5.97045660e+00 -2.77825069e+00 1.45358324e-01 -1.60494936e+00 6.07668400e-01 -6.82699680e+00 1.51275158e-01 -2.35934472e+00 -1.30321860e+00] [ 1.42610693e+00 -4.85062265e+00 -3.14263439e+00 3.33822727e-01 -3.51322341e+00 -1.01438408e+01 -1.19129264e+00 -4.87299824e+00 -1.19995177e+00] [-3.62839079e+00 3.15222597e+00 -7.93396187e+00 2.09044218e-01 1.18758488e+00 -7.03494596e+00 -2.91631746e+00 -7.27600718e+00 -2.73596644e+00] [-2.02470946e+00 -3.09652543e+00 -5.61630106e+00 -6.60328722e+00 -1.07544231e+00 3.92222214e+00 -3.49582291e+00 -1.42916870e+00 -5.06934071e+00] [-2.73851943e+00 -6.97756433e+00 -4.00213623e+00 -3.95049715e+00 1.15419912e+00 5.73719978e-01 -3.53286314e+00 -6.86179519e-01 -2.07731318e+00] [ 2.68255281e+00 -3.53166437e+00 -2.68941736e+00 -7.78808546e+00 -3.68454313e+00 -7.84863830e-01 7.05292225e-01 -4.68173599e+00 -1.22960269e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_608.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.6046 -0.7957 -0.5708 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.3720 (2,1,.,.) = -0.6543 (3,1,.,.) = 0.2372 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-1.1218865 -0.8663422 -0.17158395 -0.80615294 -0.68811494 -0.32099944 -0.9049621 -1.1602769 -0.6429033 -0.78835416] [-0.66398114 -1.1468892 -0.11160529 0.17217398 -0.64295864 -0.29704913 -0.5475069 -0.40630114 -0.283923 -0.61551225] [-0.5036429 -0.5687251 -0.56060517 -0.86528045 -1.0265195 -0.1796582 -0.38622642 -0.7290962 -1.2891737 -1.2340125 ] [-0.81096584 -0.6364616 -0.42192173 0.0539369 -0.388605 -0.36083722 -0.03361511 -0.21077898 -0.8476325 -0.6339712 ] [-1.2492169 -0.01626641 -0.31240863 -0.97453254 -0.27635238 -0.6711602 -0.70973635 -0.90342855 0.18917131 -0.34632078] [-0.97017086 -0.8416921 -1.08333 -0.9750545 -0.7172046 -0.8612467 -0.3848856 -0.81467736 -1.1699542 -0.8625032 ] [-0.69638103 -0.70675105 -0.39545405 -0.5477047 -0.55979455 -1.1089187 -1.3577266 -0.5026057 -0.73564476 -0.67755306] [-0.51192546 -0.45083407 -0.15796468 -0.7976622 -0.5785366 -1.0097574 -0.9677099 -0.8650593 -0.12817109 -0.53802305] [-0.38296074 -0.5333176 -0.19341469 -0.7505946 -0.0887596 0.02232045 -0.41287228 -0.80507535 -0.43214947 -0.96808565] [-1.3820268 -0.2926108 -0.0275991 -0.49501204 -0.3268776 -0.852851 -1.0400044 -0.9418872 -0.51762545 -0.27181795]] [[-1.1915592 0.35800505 -0.5394275 -0.1517725 -1.0703942 -0.51192117 -0.6146817 -0.5550133 -0.49304128 -0.7452384 ] [-0.9838767 -0.9420609 0.06597167 1.1491191 -0.22720069 -0.44777587 0.03895414 -0.20604897 -0.787926 -0.2718684 ] [-0.7234151 0.04036397 -0.9596974 0.5176178 -2.1802425 0.24044895 -1.4307371 -0.14605397 0.08191347 0.17189491] [-0.8510126 -0.14687806 -0.9467824 -0.6247601 -0.7564553 -1.7457156 -1.069973 -1.5879552 0.26237988 -0.16133207] [-1.5193152 -0.53425145 -1.1216416 0.10024387 -1.5774907 -1.0585973 -1.0424035 0.06096214 -0.15385973 -0.792307 ] [-0.68339854 -0.8629177 -1.9933025 -0.91595817 -2.9959445 -0.74171305 -1.2436525 -0.8170359 -0.90577775 0.1538552 ] [-0.6969557 -0.59486735 -1.6472116 -1.8442296 -0.42697963 -1.6809134 -0.20502812 -0.76761633 -1.3525119 -0.5733678 ] [-1.0357299 -0.5957821 -0.3233883 -0.44315785 -1.8095757 -2.204741 -0.4689695 -0.87889826 -1.6878276 -0.9494633 ] [-0.32803172 -1.6789074 -0.7553798 -0.36345673 -1.1631975 -0.8794087 0.231228 -0.869219 0.14749604 -1.2918057 ] [-0.5579374 -0.96394575 -0.14633346 -0.26755434 -1.2047956 -0.8108028 -1.4257525 -1.29752 -1.3049076 -0.887074 ]] [[-0.35751262 -0.45185754 -0.69044 -0.41851008 -0.80116683 -0.8569275 -0.2681085 -0.20386177 -0.6895834 -0.6298265 ] [-0.75273013 -0.6459377 -0.8872334 -0.43614775 -0.56794554 -0.1689887 -0.2909167 -0.5548097 -0.6119244 -0.3276452 ] [-0.5809237 -0.5612137 -0.45028117 -0.6761521 -0.10497093 -0.04840994 -0.5056196 -0.5960916 -0.36599278 -0.4744246 ] [-0.3761144 -0.6757981 -0.55866456 -0.340754 -0.91340375 -0.6942102 -0.74738276 -0.77049893 -0.36941546 -0.24035522] [-0.5704329 -0.42309287 -0.4496377 -0.81202435 -0.5249011 -0.54828876 -0.8635597 -0.9207407 -0.6406882 -0.5704144 ] [-0.6348723 -0.85054773 -0.79543173 -0.8773198 -0.36970326 -0.8733919 -1.2054167 -0.47464684 -0.6016226 -0.8141531 ] [-0.6175629 -0.8114004 -0.66976184 -0.7512721 -0.62412995 -0.8114985 -0.6863291 -0.8747651 -0.47292733 -0.49929208] [-0.3899341 -0.8577033 -0.6442827 -0.7887669 -0.20101264 -0.88012975 -0.3850907 -0.52772707 -0.69511974 -0.5357682 ] [-0.5479102 -0.61976427 -0.51844907 -0.6633078 -0.611339 -1.0811675 -0.82697856 -0.6957769 -0.8060553 -0.3764748 ] [-0.91065145 -1.0233358 -0.31480426 -0.47194514 -0.64317393 -0.54422784 -0.5954092 -0.70863545 -0.09120211 -0.29247928]]]]; ov_res: [[[[-1.1218865 -0.8663422 -0.17158395 -0.80615294 -0.68811494 -0.32099944 -0.9049621 -1.1602769 -0.6429033 -0.78835416] [-0.66398114 -1.1468892 -0.11160529 0.17217398 -0.64295864 -0.29704913 -0.5475069 -0.40630114 -0.283923 -0.61551225] [-0.5036429 -0.5687251 -0.56060517 -0.86528045 -1.0265195 -0.1796582 -0.38622642 -0.7290962 -1.2891737 -1.2340125 ] [-0.81096584 -0.6364616 -0.42192173 0.0539369 -0.388605 -0.36083722 -0.03361511 -0.21077898 -0.8476325 -0.6339712 ] [-1.2492169 -0.01626641 -0.31240863 -0.97453254 -0.27635238 -0.6711602 -0.70973635 -0.90342855 0.18917131 -0.34632078] [-0.97017086 -0.8416921 -1.08333 -0.9750545 -0.7172046 -0.8612467 -0.3848856 -0.81467736 -1.1699542 -0.8625032 ] [-0.69638103 -0.70675105 -0.39545405 -0.5477047 -0.55979455 -1.1089187 -1.3577266 -0.5026057 -0.73564476 -0.67755306] [-0.51192546 -0.45083407 -0.15796468 -0.7976622 -0.5785366 -1.0097574 -0.9677099 -0.8650593 -0.12817109 -0.53802305] [-0.38296074 -0.5333176 -0.19341469 -0.7505946 -0.0887596 0.02232045 -0.41287228 -0.80507535 -0.43214947 -0.96808565] [-1.3820268 -0.2926108 -0.0275991 -0.49501204 -0.3268776 -0.852851 -1.0400044 -0.9418872 -0.51762545 -0.27181795]] [[-1.1915592 0.35800505 -0.5394275 -0.1517725 -1.0703942 -0.51192117 -0.6146817 -0.5550133 -0.49304128 -0.7452384 ] [-0.9838767 -0.9420609 0.06597167 1.1491191 -0.22720069 -0.44777587 0.03895414 -0.20604897 -0.787926 -0.2718684 ] [-0.7234151 0.04036397 -0.9596974 0.5176178 -2.1802425 0.24044895 -1.4307371 -0.14605397 0.08191347 0.17189491] [-0.8510126 -0.14687806 -0.9467824 -0.6247601 -0.7564553 -1.7457156 -1.069973 -1.5879552 0.26237988 -0.16133207] [-1.5193152 -0.53425145 -1.1216416 0.10024387 -1.5774907 -1.0585973 -1.0424035 0.06096214 -0.15385973 -0.792307 ] [-0.68339854 -0.8629177 -1.9933025 -0.91595817 -2.9959445 -0.74171305 -1.2436525 -0.8170359 -0.90577775 0.1538552 ] [-0.6969557 -0.59486735 -1.6472116 -1.8442296 -0.42697963 -1.6809134 -0.20502812 -0.76761633 -1.3525119 -0.5733678 ] [-1.0357299 -0.5957821 -0.3233883 -0.44315785 -1.8095757 -2.204741 -0.4689695 -0.87889826 -1.6878276 -0.9494633 ] [-0.32803172 -1.6789074 -0.7553798 -0.36345673 -1.1631975 -0.8794087 0.231228 -0.869219 0.14749604 -1.2918057 ] [-0.5579374 -0.96394575 -0.14633346 -0.26755434 -1.2047956 -0.8108028 -1.4257525 -1.29752 -1.3049076 -0.887074 ]] [[-0.35751262 -0.45185754 -0.69044 -0.41851008 -0.80116683 -0.8569275 -0.2681085 -0.20386177 -0.6895834 -0.6298265 ] [-0.75273013 -0.6459377 -0.8872334 -0.43614775 -0.56794554 -0.1689887 -0.2909167 -0.5548097 -0.6119244 -0.3276452 ] [-0.5809237 -0.5612137 -0.45028117 -0.6761521 -0.10497093 -0.04840994 -0.5056196 -0.5960916 -0.36599278 -0.4744246 ] [-0.3761144 -0.6757981 -0.55866456 -0.340754 -0.91340375 -0.6942102 -0.74738276 -0.77049893 -0.36941546 -0.24035522] [-0.5704329 -0.42309287 -0.4496377 -0.81202435 -0.5249011 -0.54828876 -0.8635597 -0.9207407 -0.6406882 -0.5704144 ] [-0.6348723 -0.85054773 -0.79543173 -0.8773198 -0.36970326 -0.8733919 -1.2054167 -0.47464684 -0.6016226 -0.8141531 ] [-0.6175629 -0.8114004 -0.66976184 -0.7512721 -0.62412995 -0.8114985 -0.6863291 -0.8747651 -0.47292733 -0.49929208] [-0.3899341 -0.8577033 -0.6442827 -0.7887669 -0.20101264 -0.88012975 -0.3850907 -0.52772707 -0.69511974 -0.5357682 ] [-0.5479102 -0.61976427 -0.51844907 -0.6633078 -0.611339 -1.0811675 -0.82697856 -0.6957769 -0.8060553 -0.3764748 ] [-0.91065145 -1.0233358 -0.31480426 -0.47194514 -0.64317393 -0.54422784 -0.5954092 -0.70863545 -0.09120211 -0.29247928]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_610.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7829 -0.3869 -0.4725 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 2.0903 (2,1,.,.) = -1.6375 (3,1,.,.) = -0.1592 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ 0.76750726 0.8103341 0.76358443 0.8024642 0.80575156 0.78933746 0.74729186 0.7636938 0.8096137 0.805553 ] [ 0.8095608 0.78086376 0.7583386 0.8008243 0.79603547 0.7727645 0.7802757 0.7781249 0.7680696 0.78267723] [ 0.7635263 0.76872534 0.7622181 0.79991955 0.7225155 0.7600312 0.7997335 0.81229997 0.75035113 0.75934035] [ 0.78225684 0.7520903 0.77791244 0.8189727 0.7630639 0.7781407 0.7936314 0.7707032 0.75202847 0.7834172 ] [ 0.78953815 0.7914458 0.75265515 0.77459383 0.77423286 0.79196876 0.8018813 0.7600567 0.77023935 0.7911126 ] [ 0.7693379 0.7678223 0.7863031 0.80202764 0.78882426 0.78210706 0.78132474 0.7889008 0.7596752 0.7985613 ] [ 0.7593891 0.7722589 0.7831522 0.8161969 0.7770688 0.80775136 0.7844159 0.7870061 0.79734004 0.7595494 ] [ 0.7574942 0.78994244 0.78396416 0.76794004 0.7819677 0.7853705 0.80787283 0.7581072 0.78394186 0.78965 ] [ 0.7947726 0.8178495 0.78924775 0.7940102 0.78258896 0.7755814 0.74688077 0.74018157 0.7676292 0.8146024 ] [ 0.7556646 0.77557534 0.80728334 0.76044303 0.7695343 0.81326497 0.8005801 0.79643404 0.7804985 0.7680229 ]] [[-2.063787 -0.55955535 1.9340609 1.8344415 0.27430996 0.8669365 1.609359 2.4746265 -1.4726273 0.36950797] [ 0.18820472 -1.1107857 -1.2164447 -0.94503933 -0.6481482 1.5763212 -1.0769136 -3.1978595 2.530655 -1.2604502 ] [ 0.7333316 -1.1018876 -2.268729 -1.5948635 -3.2396343 -0.19486901 0.06098307 -1.6705755 -2.2703776 0.49720505] [-1.839201 1.337585 -2.9311998 -0.4684903 -1.0019816 -1.5981824 -0.5884056 -1.1916338 -1.3966817 -0.07470389] [ 0.25941697 -0.71789163 -4.010691 0.25853556 1.1685852 1.2286915 -1.6770854 1.2263957 -0.9675461 -1.120319 ] [-3.1090007 0.5573726 -0.60733104 -0.54850084 -1.3565713 -1.05005 -0.4557059 0.31711838 0.390134 -0.53188086] [-1.0216663 0.7558274 1.4676788 1.8305285 -0.45984456 0.67227226 1.2427582 2.167286 0.17818828 -0.19858578] [ 0.7457456 -0.19217512 -2.903197 -0.40760967 -1.2564845 1.5957012 0.34909025 -2.1173415 -0.28805703 -0.40836915] [-1.4413229 0.5723263 -2.9716842 0.93562835 -0.09968774 -1.8939528 0.6940917 -0.25164017 -2.9927857 -1.1173761 ] [ 2.0698228 -1.2787573 -1.4942338 -2.7235446 -1.9944547 -0.19557016 1.0938454 -0.05325571 -1.2896031 -1.2116302 ]] [[-0.37968883 -0.6926843 -0.5383267 -0.5170036 -0.6908199 -0.5009334 -0.2095824 -0.6822407 -0.5907981 -0.6013604 ] [-0.87561834 -0.7316815 -0.46208388 -0.6910093 -0.22289214 -0.29043037 -0.35932964 -0.7679594 -0.34346625 -0.44135717] [-0.34192163 -0.62198573 -0.5817499 -0.67737204 -0.5757421 -0.59589034 -0.5370542 -0.4551465 -0.09025423 -0.32645226] [-0.07167605 -0.6264367 -0.3856122 -0.3904929 -0.70724213 -0.5087679 -0.52469814 -0.5530099 -0.43368438 -0.53023887] [-0.5167288 -0.47014922 -0.4873952 -0.22490518 -0.5112723 -0.60768 -0.32049182 -0.6983383 -0.58831453 -0.66747266] [-0.52495325 -0.45937213 -0.66625166 -0.3590739 -0.3799117 -0.25901654 -0.6158078 -0.65463763 -0.20103328 -0.47939774] [-0.35145316 -0.4251444 -0.25229192 -0.6537919 -0.34951517 -0.61950254 -0.5689453 -0.46358675 -0.26890898 -0.42552525] [-0.50470626 -0.45866367 -0.37715328 -0.35306224 -0.6007452 -0.37712112 -0.7656167 -0.46827662 -0.43561184 -0.38044426] [-0.60219455 -0.46024668 -0.2907074 -0.34814486 -0.47982538 -0.18369892 -0.47667852 -0.5041336 -0.5421678 -0.35097682] [-0.3514209 -0.5669993 -0.2785949 -0.34255305 -0.6686506 -0.53721666 -0.4043148 -0.37503648 -0.76224846 -0.24231888]]]]; ov_res: [[[[ 0.76750726 0.8103341 0.76358443 0.8024642 0.80575156 0.78933746 0.74729186 0.7636938 0.8096137 0.805553 ] [ 0.8095608 0.78086376 0.7583386 0.8008243 0.79603547 0.7727645 0.7802757 0.7781249 0.7680696 0.78267723] [ 0.7635263 0.76872534 0.7622181 0.79991955 0.7225155 0.7600312 0.7997335 0.81229997 0.75035113 0.75934035] [ 0.78225684 0.7520903 0.77791244 0.8189727 0.7630639 0.7781407 0.7936314 0.7707032 0.75202847 0.7834172 ] [ 0.78953815 0.7914458 0.75265515 0.77459383 0.77423286 0.79196876 0.8018813 0.7600567 0.77023935 0.7911126 ] [ 0.7693379 0.7678223 0.7863031 0.80202764 0.78882426 0.78210706 0.78132474 0.7889008 0.7596752 0.7985613 ] [ 0.7593891 0.7722589 0.7831522 0.8161969 0.7770688 0.80775136 0.7844159 0.7870061 0.79734004 0.7595494 ] [ 0.7574942 0.78994244 0.78396416 0.76794004 0.7819677 0.7853705 0.80787283 0.7581072 0.78394186 0.78965 ] [ 0.7947726 0.8178495 0.78924775 0.7940102 0.78258896 0.7755814 0.74688077 0.74018157 0.7676292 0.8146024 ] [ 0.7556646 0.77557534 0.80728334 0.76044303 0.7695343 0.81326497 0.8005801 0.79643404 0.7804985 0.7680229 ]] [[-2.063787 -0.55955535 1.9340609 1.8344415 0.27430996 0.8669365 1.609359 2.4746265 -1.4726273 0.36950797] [ 0.18820472 -1.1107857 -1.2164447 -0.94503933 -0.6481482 1.5763212 -1.0769136 -3.1978595 2.530655 -1.2604502 ] [ 0.7333316 -1.1018876 -2.268729 -1.5948635 -3.2396343 -0.19486901 0.06098307 -1.6705755 -2.2703776 0.49720505] [-1.839201 1.337585 -2.9311998 -0.4684903 -1.0019816 -1.5981824 -0.5884056 -1.1916338 -1.3966817 -0.07470389] [ 0.25941697 -0.71789163 -4.010691 0.25853556 1.1685852 1.2286915 -1.6770854 1.2263957 -0.9675461 -1.120319 ] [-3.1090007 0.5573726 -0.60733104 -0.54850084 -1.3565713 -1.05005 -0.4557059 0.31711838 0.390134 -0.53188086] [-1.0216663 0.7558274 1.4676788 1.8305285 -0.45984456 0.67227226 1.2427582 2.167286 0.17818828 -0.19858578] [ 0.7457456 -0.19217512 -2.903197 -0.40760967 -1.2564845 1.5957012 0.34909025 -2.1173415 -0.28805703 -0.40836915] [-1.4413229 0.5723263 -2.9716842 0.93562835 -0.09968774 -1.8939528 0.6940917 -0.25164017 -2.9927857 -1.1173761 ] [ 2.0698228 -1.2787573 -1.4942338 -2.7235446 -1.9944547 -0.19557016 1.0938454 -0.05325571 -1.2896031 -1.2116302 ]] [[-0.37968883 -0.6926843 -0.5383267 -0.5170036 -0.6908199 -0.5009334 -0.2095824 -0.6822407 -0.5907981 -0.6013604 ] [-0.87561834 -0.7316815 -0.46208388 -0.6910093 -0.22289214 -0.29043037 -0.35932964 -0.7679594 -0.34346625 -0.44135717] [-0.34192163 -0.62198573 -0.5817499 -0.67737204 -0.5757421 -0.59589034 -0.5370542 -0.4551465 -0.09025423 -0.32645226] [-0.07167605 -0.6264367 -0.3856122 -0.3904929 -0.70724213 -0.5087679 -0.52469814 -0.5530099 -0.43368438 -0.53023887] [-0.5167288 -0.47014922 -0.4873952 -0.22490518 -0.5112723 -0.60768 -0.32049182 -0.6983383 -0.58831453 -0.66747266] [-0.52495325 -0.45937213 -0.66625166 -0.3590739 -0.3799117 -0.25901654 -0.6158078 -0.65463763 -0.20103328 -0.47939774] [-0.35145316 -0.4251444 -0.25229192 -0.6537919 -0.34951517 -0.61950254 -0.5689453 -0.46358675 -0.26890898 -0.42552525] [-0.50470626 -0.45866367 -0.37715328 -0.35306224 -0.6007452 -0.37712112 -0.7656167 -0.46827662 -0.43561184 -0.38044426] [-0.60219455 -0.46024668 -0.2907074 -0.34814486 -0.47982538 -0.18369892 -0.47667852 -0.5041336 -0.5421678 -0.35097682] [-0.3514209 -0.5669993 -0.2785949 -0.34255305 -0.6686506 -0.53721666 -0.4043148 -0.37503648 -0.76224846 -0.24231888]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_612.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.57192}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 1.9444 (2,1,.,.) = -1.8046 (3,1,.,.) = -2.0641 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-2.5345578 -3.086836 -0.6033694 1.0726367 -8.836064 -3.6904955 -1.7303804 -2.6907096 ] [-2.0898705 -1.1559509 -0.56612825 -0.14142704 -0.9872618 2.1593242 5.489551 3.0387301 ] [-1.041703 -0.55215716 -2.14647 -5.2765603 -1.925936 -2.2735746 -4.310589 -0.5853645 ] [-0.4563545 -3.0602176 2.6218724 -1.0812519 -2.799295 -3.236728 -1.3585534 -1.8209352 ] [ 0.01003706 -1.413732 -3.6579013 3.9539576 -0.2897191 -1.9007676 0.32022142 -5.7683167 ] [-4.535598 1.4249643 -5.07633 -5.5744367 -0.17580152 0.97087777 -4.989146 8.142371 ] [-5.434936 -7.088791 2.602507 -3.9234767 1.0147601 -2.4999037 -4.2603955 -5.1851573 ] [-8.275089 -2.0196962 -0.3161931 -5.9612346 -7.245234 -2.1485837 -7.3741455 -2.8391469 ]]]]; ov_res: [[[[-2.5345578 -3.086836 -0.6033694 1.0726367 -8.836064 -3.6904955 -1.7303804 -2.6907096 ] [-2.0898705 -1.1559509 -0.56612825 -0.14142704 -0.9872618 2.1593242 5.489551 3.0387301 ] [-1.041703 -0.55215716 -2.14647 -5.2765603 -1.925936 -2.2735746 -4.310589 -0.5853645 ] [-0.4563545 -3.0602176 2.6218724 -1.0812519 -2.799295 -3.236728 -1.3585534 -1.8209352 ] [ 0.01003706 -1.413732 -3.6579013 3.9539576 -0.2897191 -1.9007676 0.32022142 -5.7683167 ] [-4.535598 1.4249643 -5.07633 -5.5744367 -0.17580152 0.97087777 -4.989146 8.142371 ] [-5.434936 -7.088791 2.602507 -3.9234767 1.0147601 -2.4999037 -4.2603955 -5.1851573 ] [-8.275089 -2.0196962 -0.3161931 -5.9612346 -7.245234 -2.1485837 -7.3741455 -2.8391469 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_614.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2688 -1.2395 -0.6774 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2351 (2,1,.,.) = -1.4050 (3,1,.,.) = 0.6128 (1,2,.,.) = 0.6396 (2,2,.,.) = 0.5270 (3,2,.,.) = 1.3290 (1,3,.,.) = 0.5169 (2,3,.,.) = -1.2746 (3,3,.,.) = -1.0772 [ CPUFloatType{3,3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01] [-2.68836915e-01 -1.72558427e+00 3.47537994e-02 1.32893777e+00 3.93610179e-01 -7.91890025e-02 -1.37303662e+00 5.26148975e-01 -4.44775343e-01 -9.65874612e-01 1.54859257e+00 -2.68836915e-01] [-2.68836915e-01 7.13493526e-01 7.11503923e-01 5.23012161e-01 -5.81120849e-01 2.88586915e-01 1.14694953e+00 -1.98834705e+00 -1.71201333e-01 1.18265009e+00 -9.45574403e-01 -2.68836915e-01] [-2.68836915e-01 -1.65094018e+00 5.29074728e-01 -8.92143011e-01 9.02569592e-01 2.09783733e-01 8.35095227e-01 3.80346000e-01 -4.23057079e-01 -7.97268093e-01 -1.29566550e+00 -2.68836915e-01] [-2.68836915e-01 -5.33317924e-01 -1.38676858e+00 -1.45104408e-01 1.24143028e+00 3.49308908e-01 -2.63445228e-01 4.52488244e-01 -4.19718623e-01 -1.17508054e+00 3.56634080e-01 -2.68836915e-01] [-2.68836915e-01 -1.48956990e+00 -8.13269377e-01 1.02222300e+00 -1.63220358e+00 -6.57933533e-01 -3.99463773e-01 6.76072299e-01 -9.94271994e-01 2.30432481e-01 -1.22206807e-01 -2.68836915e-01] [-2.68836915e-01 8.50688279e-01 4.93749499e-01 -4.82171953e-01 -7.04995513e-01 -6.51350617e-03 1.14345044e-01 -1.25642824e+00 -2.90683240e-01 -3.02222371e-03 5.92923224e-01 -2.68836915e-01] [-2.68836915e-01 1.50611544e+00 4.97595608e-01 -3.71638834e-01 -7.67962575e-01 -7.25291371e-01 -2.36487925e-01 -8.95589590e-01 4.42896366e-01 -2.30872929e-02 -8.41491818e-01 -2.68836915e-01] [-2.68836915e-01 -1.15919769e-01 -4.94575411e-01 6.48068309e-01 1.60104227e+00 -1.47913885e+00 -9.77693021e-01 -9.94766772e-01 -2.06958175e+00 3.45715106e-01 2.93488681e-01 -2.68836915e-01] [-2.68836915e-01 6.60621822e-02 -3.94814909e-02 2.33539224e-01 5.55543184e-01 6.13282084e-01 7.31867373e-01 -7.67296314e-01 6.65938199e-01 6.49662256e-01 -4.92462039e-01 -2.68836915e-01] [-2.68836915e-01 -7.54364133e-01 -5.62165082e-01 9.56956565e-01 -9.90050435e-01 6.33481860e-01 5.52994847e-01 3.01037729e-01 -5.75911164e-01 -2.05924344e+00 -1.83955646e+00 -2.68836915e-01] [-2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01]] [[-1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00] [-1.23948050e+00 -1.19485331e+00 6.02306008e-01 -2.53517389e+00 7.35283375e-01 -5.19251227e-01 -4.97370481e+00 -2.29514265e+00 -2.78405237e+00 -2.48410058e+00 -5.21627712e+00 -1.23948050e+00] [-1.23948050e+00 2.23023009e+00 -8.74222040e-01 -1.40358138e+00 -2.74302483e+00 -1.23327935e+00 -1.29597962e+00 -5.43307960e-01 -1.54842019e+00 -5.87521696e+00 -1.52347195e+00 -1.23948050e+00] [-1.23948050e+00 -2.41947174e+00 1.34653187e+00 -3.12415266e+00 -1.90700591e+00 4.32731271e-01 -2.73370934e+00 -1.51312804e+00 8.82376194e-01 -1.65381145e+00 -4.17207062e-01 -1.23948050e+00] [-1.23948050e+00 -6.52479708e-01 1.02510715e+00 1.09055209e+00 1.41645956e+00 -1.78875518e+00 1.39048648e+00 6.62758708e-01 -2.72902918e+00 -7.67102718e-01 -3.35210490e+00 -1.23948050e+00] [-1.23948050e+00 6.94210172e-01 3.27770948e-01 -3.22896302e-01 -3.68785119e+00 -2.53943825e+00 6.00791693e-01 2.25742340e+00 -1.54082024e+00 1.11360240e+00 -3.02127552e+00 -1.23948050e+00] [-1.23948050e+00 -2.39542317e+00 -5.96433818e-01 -1.18914509e+00 -3.32716250e+00 -2.94477940e-02 -4.15618706e+00 -2.24723125e+00 -1.12665415e+00 -3.51559925e+00 -3.26156998e+00 -1.23948050e+00] [-1.23948050e+00 -5.36654091e+00 1.57834673e+00 -2.76712894e+00 1.00264430e-01 -5.26361418e+00 6.30519390e-01 8.98632288e-01 -3.55103993e+00 -2.15419650e+00 -9.85255539e-01 -1.23948050e+00] [-1.23948050e+00 2.44551516e+00 3.03864908e+00 -2.79544783e+00 -4.68613911e+00 -7.05169201e-01 7.46155024e-01 -2.24225783e+00 2.03153896e+00 -1.35814953e+00 8.90264750e-01 -1.23948050e+00] [-1.23948050e+00 8.31360102e-01 -1.56634259e+00 8.04442406e-01 5.25148273e-01 -1.28838694e+00 -3.99384558e-01 -1.42974472e+00 -6.77517653e-02 -3.92856622e+00 -3.35005260e+00 -1.23948050e+00] [-1.23948050e+00 -1.79745936e+00 -2.90117788e+00 2.69020104e+00 8.23441505e-01 -1.94007325e+00 1.58667302e+00 -1.24855864e+00 -2.60781956e+00 -1.85986054e+00 -3.41346645e+00 -1.23948050e+00] [-1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00]] [[-6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01] [-6.77427411e-01 1.56893098e+00 1.70489728e+00 -7.04772234e-01 6.17968440e-01 -1.46759415e+00 -1.73405290e-01 -3.41027355e+00 -1.34023678e+00 2.91843176e-01 -3.91408920e+00 -6.77427411e-01] [-6.77427411e-01 1.07055354e+00 -1.29857910e+00 -1.75970352e+00 -1.69129980e+00 -2.39869905e+00 2.77819514e-01 -3.84434175e+00 2.07709026e+00 8.52164507e-01 -2.58523130e+00 -6.77427411e-01] [-6.77427411e-01 2.81519294e-02 2.66006851e+00 2.60834992e-01 7.20270038e-01 -3.24819803e+00 -2.09853029e+00 -3.34649277e+00 -2.07961941e+00 -7.82317817e-01 -1.14856803e+00 -6.77427411e-01] [-6.77427411e-01 6.27163291e-01 6.25619292e-02 2.85799623e-01 7.20107675e-01 -3.62066746e+00 -4.97297972e-01 1.15923357e+00 -6.41752064e-01 -2.10448074e+00 -8.44682157e-01 -6.77427411e-01] [-6.77427411e-01 3.28405237e+00 6.94033384e-01 -5.47658324e-01 -2.98328936e-01 -2.21386194e+00 1.23107827e+00 4.09492016e-01 -1.99276757e+00 -1.12899029e+00 -6.62121773e-02 -6.77427411e-01] [-6.77427411e-01 -5.75373590e-01 -2.38214064e+00 6.70213461e-01 -3.67701024e-01 9.43378329e-01 -1.41930914e+00 -1.41557693e+00 2.93949604e+00 3.35335493e-01 -5.19513011e-01 -6.77427411e-01] [-6.77427411e-01 -2.35426283e+00 2.88861513e+00 -3.18888378e+00 1.13412428e+00 -2.62390041e+00 -4.27369177e-01 -7.93332219e-01 4.10982013e-01 -2.31046128e+00 7.77569175e-01 -6.77427411e-01] [-6.77427411e-01 3.11765003e+00 7.27778554e-01 -1.12100363e-01 -1.51601100e+00 -3.01679564e+00 -1.10025501e+00 -3.09225988e+00 9.33938742e-01 -2.08464241e+00 1.63190401e+00 -6.77427411e-01] [-6.77427411e-01 3.43300581e-01 -5.17373514e+00 -4.61459398e-01 1.65859210e+00 2.16303766e-01 -2.84069681e+00 -3.31112051e+00 1.06570542e-01 8.97990465e-02 -6.03646231e+00 -6.77427411e-01] [-6.77427411e-01 -1.70849943e+00 -1.07990527e+00 2.71862650e+00 -4.70273495e-01 2.12371922e+00 -5.89521527e-01 5.76553941e-02 -1.63105488e+00 -5.36986065e+00 -1.38433123e+00 -6.77427411e-01] [-6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01]]]]; ov_res: [[[[-2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01] [-2.68836915e-01 -1.72558427e+00 3.47537994e-02 1.32893777e+00 3.93610179e-01 -7.91890025e-02 -1.37303662e+00 5.26148975e-01 -4.44775343e-01 -9.65874612e-01 1.54859257e+00 -2.68836915e-01] [-2.68836915e-01 7.13493526e-01 7.11503923e-01 5.23012161e-01 -5.81120849e-01 2.88586915e-01 1.14694953e+00 -1.98834705e+00 -1.71201333e-01 1.18265009e+00 -9.45574403e-01 -2.68836915e-01] [-2.68836915e-01 -1.65094018e+00 5.29074728e-01 -8.92143011e-01 9.02569592e-01 2.09783733e-01 8.35095227e-01 3.80346000e-01 -4.23057079e-01 -7.97268093e-01 -1.29566550e+00 -2.68836915e-01] [-2.68836915e-01 -5.33317924e-01 -1.38676858e+00 -1.45104408e-01 1.24143028e+00 3.49308908e-01 -2.63445228e-01 4.52488244e-01 -4.19718623e-01 -1.17508054e+00 3.56634080e-01 -2.68836915e-01] [-2.68836915e-01 -1.48956990e+00 -8.13269377e-01 1.02222300e+00 -1.63220358e+00 -6.57933533e-01 -3.99463773e-01 6.76072299e-01 -9.94271994e-01 2.30432481e-01 -1.22206807e-01 -2.68836915e-01] [-2.68836915e-01 8.50688279e-01 4.93749499e-01 -4.82171953e-01 -7.04995513e-01 -6.51350617e-03 1.14345044e-01 -1.25642824e+00 -2.90683240e-01 -3.02222371e-03 5.92923224e-01 -2.68836915e-01] [-2.68836915e-01 1.50611544e+00 4.97595608e-01 -3.71638834e-01 -7.67962575e-01 -7.25291371e-01 -2.36487925e-01 -8.95589590e-01 4.42896366e-01 -2.30872929e-02 -8.41491818e-01 -2.68836915e-01] [-2.68836915e-01 -1.15919769e-01 -4.94575411e-01 6.48068309e-01 1.60104227e+00 -1.47913885e+00 -9.77693021e-01 -9.94766772e-01 -2.06958175e+00 3.45715106e-01 2.93488681e-01 -2.68836915e-01] [-2.68836915e-01 6.60621822e-02 -3.94814909e-02 2.33539224e-01 5.55543184e-01 6.13282084e-01 7.31867373e-01 -7.67296314e-01 6.65938199e-01 6.49662256e-01 -4.92462039e-01 -2.68836915e-01] [-2.68836915e-01 -7.54364133e-01 -5.62165082e-01 9.56956565e-01 -9.90050435e-01 6.33481860e-01 5.52994847e-01 3.01037729e-01 -5.75911164e-01 -2.05924344e+00 -1.83955646e+00 -2.68836915e-01] [-2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01 -2.68836915e-01]] [[-1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00] [-1.23948050e+00 -1.19485331e+00 6.02306008e-01 -2.53517389e+00 7.35283375e-01 -5.19251227e-01 -4.97370481e+00 -2.29514265e+00 -2.78405237e+00 -2.48410058e+00 -5.21627712e+00 -1.23948050e+00] [-1.23948050e+00 2.23023009e+00 -8.74222040e-01 -1.40358138e+00 -2.74302483e+00 -1.23327935e+00 -1.29597962e+00 -5.43307960e-01 -1.54842019e+00 -5.87521696e+00 -1.52347195e+00 -1.23948050e+00] [-1.23948050e+00 -2.41947174e+00 1.34653187e+00 -3.12415266e+00 -1.90700591e+00 4.32731271e-01 -2.73370934e+00 -1.51312804e+00 8.82376194e-01 -1.65381145e+00 -4.17207062e-01 -1.23948050e+00] [-1.23948050e+00 -6.52479708e-01 1.02510715e+00 1.09055209e+00 1.41645956e+00 -1.78875518e+00 1.39048648e+00 6.62758708e-01 -2.72902918e+00 -7.67102718e-01 -3.35210490e+00 -1.23948050e+00] [-1.23948050e+00 6.94210172e-01 3.27770948e-01 -3.22896302e-01 -3.68785119e+00 -2.53943825e+00 6.00791693e-01 2.25742340e+00 -1.54082024e+00 1.11360240e+00 -3.02127552e+00 -1.23948050e+00] [-1.23948050e+00 -2.39542317e+00 -5.96433818e-01 -1.18914509e+00 -3.32716250e+00 -2.94477940e-02 -4.15618706e+00 -2.24723125e+00 -1.12665415e+00 -3.51559925e+00 -3.26156998e+00 -1.23948050e+00] [-1.23948050e+00 -5.36654091e+00 1.57834673e+00 -2.76712894e+00 1.00264430e-01 -5.26361418e+00 6.30519390e-01 8.98632288e-01 -3.55103993e+00 -2.15419650e+00 -9.85255539e-01 -1.23948050e+00] [-1.23948050e+00 2.44551516e+00 3.03864908e+00 -2.79544783e+00 -4.68613911e+00 -7.05169201e-01 7.46155024e-01 -2.24225783e+00 2.03153896e+00 -1.35814953e+00 8.90264750e-01 -1.23948050e+00] [-1.23948050e+00 8.31360102e-01 -1.56634259e+00 8.04442406e-01 5.25148273e-01 -1.28838694e+00 -3.99384558e-01 -1.42974472e+00 -6.77517653e-02 -3.92856622e+00 -3.35005260e+00 -1.23948050e+00] [-1.23948050e+00 -1.79745936e+00 -2.90117788e+00 2.69020104e+00 8.23441505e-01 -1.94007325e+00 1.58667302e+00 -1.24855864e+00 -2.60781956e+00 -1.85986054e+00 -3.41346645e+00 -1.23948050e+00] [-1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00 -1.23948050e+00]] [[-6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01] [-6.77427411e-01 1.56893098e+00 1.70489728e+00 -7.04772234e-01 6.17968440e-01 -1.46759415e+00 -1.73405290e-01 -3.41027355e+00 -1.34023678e+00 2.91843176e-01 -3.91408920e+00 -6.77427411e-01] [-6.77427411e-01 1.07055354e+00 -1.29857910e+00 -1.75970352e+00 -1.69129980e+00 -2.39869905e+00 2.77819514e-01 -3.84434175e+00 2.07709026e+00 8.52164507e-01 -2.58523130e+00 -6.77427411e-01] [-6.77427411e-01 2.81519294e-02 2.66006851e+00 2.60834992e-01 7.20270038e-01 -3.24819803e+00 -2.09853029e+00 -3.34649277e+00 -2.07961941e+00 -7.82317817e-01 -1.14856803e+00 -6.77427411e-01] [-6.77427411e-01 6.27163291e-01 6.25619292e-02 2.85799623e-01 7.20107675e-01 -3.62066746e+00 -4.97297972e-01 1.15923357e+00 -6.41752064e-01 -2.10448074e+00 -8.44682157e-01 -6.77427411e-01] [-6.77427411e-01 3.28405237e+00 6.94033384e-01 -5.47658324e-01 -2.98328936e-01 -2.21386194e+00 1.23107827e+00 4.09492016e-01 -1.99276757e+00 -1.12899029e+00 -6.62121773e-02 -6.77427411e-01] [-6.77427411e-01 -5.75373590e-01 -2.38214064e+00 6.70213461e-01 -3.67701024e-01 9.43378329e-01 -1.41930914e+00 -1.41557693e+00 2.93949604e+00 3.35335493e-01 -5.19513011e-01 -6.77427411e-01] [-6.77427411e-01 -2.35426283e+00 2.88861513e+00 -3.18888378e+00 1.13412428e+00 -2.62390041e+00 -4.27369177e-01 -7.93332219e-01 4.10982013e-01 -2.31046128e+00 7.77569175e-01 -6.77427411e-01] [-6.77427411e-01 3.11765003e+00 7.27778554e-01 -1.12100363e-01 -1.51601100e+00 -3.01679564e+00 -1.10025501e+00 -3.09225988e+00 9.33938742e-01 -2.08464241e+00 1.63190401e+00 -6.77427411e-01] [-6.77427411e-01 3.43300581e-01 -5.17373514e+00 -4.61459398e-01 1.65859210e+00 2.16303766e-01 -2.84069681e+00 -3.31112051e+00 1.06570542e-01 8.97990465e-02 -6.03646231e+00 -6.77427411e-01] [-6.77427411e-01 -1.70849943e+00 -1.07990527e+00 2.71862650e+00 -4.70273495e-01 2.12371922e+00 -5.89521527e-01 5.76553941e-02 -1.63105488e+00 -5.36986065e+00 -1.38433123e+00 -6.77427411e-01] [-6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01 -6.77427411e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_616.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={2.47804}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 2.0020 (2,1,.,.) = -0.4347 (3,1,.,.) = 0.3887 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 4.3699603 6.232904 6.4158773 3.9541636 -1.614964 8.054204 6.556745 2.8556373 ] [ 0.6470821 6.96746 3.556323 3.518928 2.456443 3.5573452 2.6421385 1.0849924 ] [ 0.78457534 4.2240467 7.6374946 1.7276213 2.1197257 0.40889168 7.344958 3.7526145 ] [ 4.7089815 2.4316218 4.200573 2.3865275 0.15048647 2.173328 2.2169461 4.2892256 ]]]]; ov_res: [[[[ 4.3699603 6.232904 6.4158773 3.9541636 -1.614964 8.054204 6.556745 2.8556373 ] [ 0.6470821 6.96746 3.556323 3.518928 2.456443 3.5573452 2.6421385 1.0849924 ] [ 0.78457534 4.2240467 7.6374946 1.7276213 2.1197257 0.40889168 7.344958 3.7526145 ] [ 4.7089815 2.4316218 4.200573 2.3865275 0.15048647 2.173328 2.2169461 4.2892256 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_618.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.1953 -0.3299 0.1438 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.8535 (2,1,.,.) = 0.7838 (3,1,.,.) = -0.6265 (1,2,.,.) = -0.1226 (2,2,.,.) = 0.3075 (3,2,.,.) = 1.2231 (1,3,.,.) = -0.4511 (2,3,.,.) = -0.5437 (3,3,.,.) = -0.3089 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 -6.97381616e-01 -9.11472857e-01 -7.66529322e-01 9.60437536e-01 8.17576051e-01 8.71075273e-01 7.76682436e-01 9.65032637e-01 2.16135889e-01 -1.20859408e+00 1.95290387e-01] [ 1.95290387e-01 -1.65358305e+00 -1.81991696e-01 -8.11208487e-02 1.59207270e-01 -1.20375371e+00 -1.16866946e+00 -2.49109387e+00 2.38430083e-01 4.00377601e-01 5.07873893e-01 1.95290387e-01] [ 1.95290387e-01 -6.49695873e-01 4.81534511e-01 -3.10827196e-01 8.91318142e-01 -1.08507085e+00 3.92622381e-01 -2.07440794e-01 1.59318894e-02 5.72150946e-02 -5.47835708e-01 1.95290387e-01] [ 1.95290387e-01 2.49023855e-01 1.05152464e+00 -1.26548111e-01 8.14747870e-01 8.61718714e-01 6.67832941e-02 3.70768756e-01 1.16766095e-02 1.08329654e+00 -1.13557506e+00 1.95290387e-01] [ 1.95290387e-01 1.56603247e-01 3.40760291e-01 -2.60131299e-01 -1.16962934e+00 3.15973341e-01 7.39538968e-01 1.85545459e-01 -6.65958822e-02 6.41186476e-01 1.82485491e-01 1.95290387e-01] [ 1.95290387e-01 2.18078017e+00 -2.33198315e-01 1.76849455e-01 -6.94145918e-01 8.18995833e-02 3.10216099e-02 4.34244514e-01 8.16277325e-01 -7.46219575e-01 6.05808258e-01 1.95290387e-01] [ 1.95290387e-01 -5.26685417e-01 -1.55761909e+00 -1.59995937e+00 1.19679660e-01 5.06617427e-01 -6.44102156e-01 -2.27615654e-01 1.13777900e+00 1.23038030e+00 -6.99445724e-01 1.95290387e-01] [ 1.95290387e-01 7.73754716e-02 4.22817290e-01 -8.57726634e-01 9.82980132e-01 -1.62234712e+00 1.87498808e+00 -1.72232091e-01 9.20972705e-01 -3.27421874e-02 -3.99330139e-01 1.95290387e-01] [ 1.95290387e-01 1.32555127e+00 -2.71893835e+00 8.49599957e-01 1.50204444e+00 -9.47857499e-02 1.97537065e+00 -4.80484247e-01 9.06667888e-01 -1.45906663e+00 4.21824276e-01 1.95290387e-01] [ 1.95290387e-01 -1.62179136e+00 5.08540392e-01 1.00934100e+00 -7.11545467e-01 1.50926232e-01 9.99215394e-02 1.08466768e+00 6.00269556e-01 1.49756289e+00 4.33905900e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01]] [[-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -1.72263578e-01 -4.90156353e-01 -2.81983316e-01 8.25355947e-01 2.98535228e-02 -1.16603684e+00 -6.69267774e-01 1.48222983e-01 -1.35765374e-01 -3.39578956e-01 -3.29866588e-01] [-3.29866588e-01 -3.47878367e-01 -2.35417455e-01 2.66446531e-01 3.02566767e-01 1.28418732e+00 -4.60951447e-01 1.06958485e+00 -2.30180120e+00 -2.30703384e-01 -4.34557259e-01 -3.29866588e-01] [-3.29866588e-01 1.05574441e+00 8.68528485e-02 -7.06113040e-01 -9.33868647e-01 1.72128499e-01 -1.92186022e+00 7.57559001e-01 -8.55522275e-01 -4.99007612e-01 -8.49429667e-01 -3.29866588e-01] [-3.29866588e-01 -7.18382299e-01 -2.87637115e-02 2.65535831e-01 -6.41869307e-02 -1.28567481e+00 8.27903569e-01 -1.52335787e+00 -5.87480068e-01 1.45196259e-01 9.32610631e-02 -3.29866588e-01] [-3.29866588e-01 -1.50370765e+00 9.58959162e-01 1.76546633e-01 1.29137564e+00 -2.39833069e+00 8.13573599e-03 -1.43384218e+00 2.97899842e-02 6.70544446e-01 -1.56656551e+00 -3.29866588e-01] [-3.29866588e-01 -2.01729178e+00 -4.94579911e-01 5.64534962e-02 4.69487965e-01 5.90567112e-01 6.65085018e-01 -5.68917632e-01 -8.17244768e-01 1.25080872e+00 1.59138465e+00 -3.29866588e-01] [-3.29866588e-01 8.03711593e-01 1.16361499e+00 -3.68740618e-01 -1.07052433e+00 -1.01044261e+00 7.37946451e-01 1.48416638e+00 -2.56230664e+00 -4.80941415e-01 -3.78939927e-01 -3.29866588e-01] [-3.29866588e-01 -1.70326185e+00 -1.02607369e+00 -9.58815038e-01 -1.92487597e+00 1.06440568e+00 -2.42385149e+00 -1.51123261e+00 -6.55550361e-02 5.26903868e-01 5.25515735e-01 -3.29866588e-01] [-3.29866588e-01 -1.02793324e+00 1.43834186e+00 3.31703842e-01 -1.55931377e+00 1.38354111e+00 -1.54394603e+00 7.49649942e-01 -2.23098350e+00 2.61124730e-01 -9.77674782e-01 -3.29866588e-01] [-3.29866588e-01 1.56858063e+00 -1.58264208e+00 -8.31525445e-01 1.12494111e+00 -2.43031442e-01 -1.12469482e+00 8.98224890e-01 -1.01813412e+00 -6.79980874e-01 9.75277007e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01]] [[ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 -1.83413422e+00 -1.27960086e+00 -2.07242918e+00 2.72292781e+00 -1.00202048e+00 1.38484871e+00 8.18848372e-01 2.72189069e+00 -5.46520472e-01 -7.13324398e-02 1.43846288e-01] [ 1.43846288e-01 -1.63798881e+00 -4.83364344e-01 -1.53627121e+00 5.93260050e-01 -7.89896011e-01 -1.83699846e+00 -3.77741146e+00 -1.57338607e+00 1.45116603e+00 -2.52812624e-01 1.43846288e-01] [ 1.43846288e-01 -1.43092442e+00 1.20340300e+00 8.64925861e-01 8.81933272e-01 5.13492644e-01 -2.11119652e-04 -7.65052080e-01 -3.39934587e-01 2.25758910e+00 4.33535755e-01 1.43846288e-01] [ 1.43846288e-01 -1.22946367e-01 3.26260567e+00 1.67532313e+00 7.38101423e-01 2.97456050e+00 4.04078066e-01 3.07030380e-02 -1.34890282e+00 -5.50565124e-01 -3.11628044e-01 1.43846288e-01] [ 1.43846288e-01 -3.28136945e+00 1.46644139e+00 -9.34441090e-01 1.09888935e+00 -3.58455479e-01 1.03423965e+00 3.40889320e-02 -7.63029158e-02 9.33266938e-01 -5.88148296e-01 1.43846288e-01] [ 1.43846288e-01 1.52025771e+00 -2.63947916e+00 9.15943503e-01 1.02433312e+00 -1.72046050e-01 -1.51328313e+00 9.22978401e-01 1.78784072e+00 -8.46401930e-01 1.52867794e+00 1.43846288e-01] [ 1.43846288e-01 -6.48470700e-01 -2.51468849e+00 -2.62501979e+00 -6.11510873e-01 -3.35379899e-01 1.04323614e+00 1.07332885e+00 3.91755462e-01 1.04766138e-01 4.29278314e-02 1.43846288e-01] [ 1.43846288e-01 -2.13614130e+00 1.39854443e+00 -1.54329777e+00 2.33970237e+00 -1.27509549e-01 -7.88568914e-01 -6.73979759e-01 1.41670346e+00 -1.09949374e+00 -7.44954586e-01 1.43846288e-01] [ 1.43846288e-01 1.87548256e+00 -1.40537632e+00 1.55060327e+00 -9.94045854e-01 1.21891332e+00 1.78176689e+00 -2.26988721e+00 4.66883838e-01 -2.72016644e+00 5.33114016e-01 1.43846288e-01] [ 1.43846288e-01 -2.96831894e+00 -7.33185947e-01 1.86967301e+00 1.46085811e+00 -6.13736868e-01 5.80398262e-01 2.06441092e+00 -1.01487577e+00 -1.56370327e-01 -9.91934299e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01]]]]; ov_res: [[[[ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 -6.97381616e-01 -9.11472857e-01 -7.66529322e-01 9.60437536e-01 8.17576051e-01 8.71075273e-01 7.76682436e-01 9.65032637e-01 2.16135889e-01 -1.20859408e+00 1.95290387e-01] [ 1.95290387e-01 -1.65358305e+00 -1.81991696e-01 -8.11208487e-02 1.59207270e-01 -1.20375371e+00 -1.16866946e+00 -2.49109387e+00 2.38430083e-01 4.00377601e-01 5.07873893e-01 1.95290387e-01] [ 1.95290387e-01 -6.49695873e-01 4.81534511e-01 -3.10827196e-01 8.91318142e-01 -1.08507085e+00 3.92622381e-01 -2.07440794e-01 1.59318894e-02 5.72150946e-02 -5.47835708e-01 1.95290387e-01] [ 1.95290387e-01 2.49023855e-01 1.05152464e+00 -1.26548111e-01 8.14747870e-01 8.61718714e-01 6.67832941e-02 3.70768756e-01 1.16766095e-02 1.08329654e+00 -1.13557506e+00 1.95290387e-01] [ 1.95290387e-01 1.56603247e-01 3.40760291e-01 -2.60131299e-01 -1.16962934e+00 3.15973341e-01 7.39538968e-01 1.85545459e-01 -6.65958822e-02 6.41186476e-01 1.82485491e-01 1.95290387e-01] [ 1.95290387e-01 2.18078017e+00 -2.33198315e-01 1.76849455e-01 -6.94145918e-01 8.18995833e-02 3.10216099e-02 4.34244514e-01 8.16277325e-01 -7.46219575e-01 6.05808258e-01 1.95290387e-01] [ 1.95290387e-01 -5.26685417e-01 -1.55761909e+00 -1.59995937e+00 1.19679660e-01 5.06617427e-01 -6.44102156e-01 -2.27615654e-01 1.13777900e+00 1.23038030e+00 -6.99445724e-01 1.95290387e-01] [ 1.95290387e-01 7.73754716e-02 4.22817290e-01 -8.57726634e-01 9.82980132e-01 -1.62234712e+00 1.87498808e+00 -1.72232091e-01 9.20972705e-01 -3.27421874e-02 -3.99330139e-01 1.95290387e-01] [ 1.95290387e-01 1.32555127e+00 -2.71893835e+00 8.49599957e-01 1.50204444e+00 -9.47857499e-02 1.97537065e+00 -4.80484247e-01 9.06667888e-01 -1.45906663e+00 4.21824276e-01 1.95290387e-01] [ 1.95290387e-01 -1.62179136e+00 5.08540392e-01 1.00934100e+00 -7.11545467e-01 1.50926232e-01 9.99215394e-02 1.08466768e+00 6.00269556e-01 1.49756289e+00 4.33905900e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01] [ 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01 1.95290387e-01]] [[-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -1.72263578e-01 -4.90156353e-01 -2.81983316e-01 8.25355947e-01 2.98535228e-02 -1.16603684e+00 -6.69267774e-01 1.48222983e-01 -1.35765374e-01 -3.39578956e-01 -3.29866588e-01] [-3.29866588e-01 -3.47878367e-01 -2.35417455e-01 2.66446531e-01 3.02566767e-01 1.28418732e+00 -4.60951447e-01 1.06958485e+00 -2.30180120e+00 -2.30703384e-01 -4.34557259e-01 -3.29866588e-01] [-3.29866588e-01 1.05574441e+00 8.68528485e-02 -7.06113040e-01 -9.33868647e-01 1.72128499e-01 -1.92186022e+00 7.57559001e-01 -8.55522275e-01 -4.99007612e-01 -8.49429667e-01 -3.29866588e-01] [-3.29866588e-01 -7.18382299e-01 -2.87637115e-02 2.65535831e-01 -6.41869307e-02 -1.28567481e+00 8.27903569e-01 -1.52335787e+00 -5.87480068e-01 1.45196259e-01 9.32610631e-02 -3.29866588e-01] [-3.29866588e-01 -1.50370765e+00 9.58959162e-01 1.76546633e-01 1.29137564e+00 -2.39833069e+00 8.13573599e-03 -1.43384218e+00 2.97899842e-02 6.70544446e-01 -1.56656551e+00 -3.29866588e-01] [-3.29866588e-01 -2.01729178e+00 -4.94579911e-01 5.64534962e-02 4.69487965e-01 5.90567112e-01 6.65085018e-01 -5.68917632e-01 -8.17244768e-01 1.25080872e+00 1.59138465e+00 -3.29866588e-01] [-3.29866588e-01 8.03711593e-01 1.16361499e+00 -3.68740618e-01 -1.07052433e+00 -1.01044261e+00 7.37946451e-01 1.48416638e+00 -2.56230664e+00 -4.80941415e-01 -3.78939927e-01 -3.29866588e-01] [-3.29866588e-01 -1.70326185e+00 -1.02607369e+00 -9.58815038e-01 -1.92487597e+00 1.06440568e+00 -2.42385149e+00 -1.51123261e+00 -6.55550361e-02 5.26903868e-01 5.25515735e-01 -3.29866588e-01] [-3.29866588e-01 -1.02793324e+00 1.43834186e+00 3.31703842e-01 -1.55931377e+00 1.38354111e+00 -1.54394603e+00 7.49649942e-01 -2.23098350e+00 2.61124730e-01 -9.77674782e-01 -3.29866588e-01] [-3.29866588e-01 1.56858063e+00 -1.58264208e+00 -8.31525445e-01 1.12494111e+00 -2.43031442e-01 -1.12469482e+00 8.98224890e-01 -1.01813412e+00 -6.79980874e-01 9.75277007e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01] [-3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01 -3.29866588e-01]] [[ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 -1.83413422e+00 -1.27960086e+00 -2.07242918e+00 2.72292781e+00 -1.00202048e+00 1.38484871e+00 8.18848372e-01 2.72189069e+00 -5.46520472e-01 -7.13324398e-02 1.43846288e-01] [ 1.43846288e-01 -1.63798881e+00 -4.83364344e-01 -1.53627121e+00 5.93260050e-01 -7.89896011e-01 -1.83699846e+00 -3.77741146e+00 -1.57338607e+00 1.45116603e+00 -2.52812624e-01 1.43846288e-01] [ 1.43846288e-01 -1.43092442e+00 1.20340300e+00 8.64925861e-01 8.81933272e-01 5.13492644e-01 -2.11119652e-04 -7.65052080e-01 -3.39934587e-01 2.25758910e+00 4.33535755e-01 1.43846288e-01] [ 1.43846288e-01 -1.22946367e-01 3.26260567e+00 1.67532313e+00 7.38101423e-01 2.97456050e+00 4.04078066e-01 3.07030380e-02 -1.34890282e+00 -5.50565124e-01 -3.11628044e-01 1.43846288e-01] [ 1.43846288e-01 -3.28136945e+00 1.46644139e+00 -9.34441090e-01 1.09888935e+00 -3.58455479e-01 1.03423965e+00 3.40889320e-02 -7.63029158e-02 9.33266938e-01 -5.88148296e-01 1.43846288e-01] [ 1.43846288e-01 1.52025771e+00 -2.63947916e+00 9.15943503e-01 1.02433312e+00 -1.72046050e-01 -1.51328313e+00 9.22978401e-01 1.78784072e+00 -8.46401930e-01 1.52867794e+00 1.43846288e-01] [ 1.43846288e-01 -6.48470700e-01 -2.51468849e+00 -2.62501979e+00 -6.11510873e-01 -3.35379899e-01 1.04323614e+00 1.07332885e+00 3.91755462e-01 1.04766138e-01 4.29278314e-02 1.43846288e-01] [ 1.43846288e-01 -2.13614130e+00 1.39854443e+00 -1.54329777e+00 2.33970237e+00 -1.27509549e-01 -7.88568914e-01 -6.73979759e-01 1.41670346e+00 -1.09949374e+00 -7.44954586e-01 1.43846288e-01] [ 1.43846288e-01 1.87548256e+00 -1.40537632e+00 1.55060327e+00 -9.94045854e-01 1.21891332e+00 1.78176689e+00 -2.26988721e+00 4.66883838e-01 -2.72016644e+00 5.33114016e-01 1.43846288e-01] [ 1.43846288e-01 -2.96831894e+00 -7.33185947e-01 1.86967301e+00 1.46085811e+00 -6.13736868e-01 5.80398262e-01 2.06441092e+00 -1.01487577e+00 -1.56370327e-01 -9.91934299e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01] [ 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01 1.43846288e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_620.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.28077}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.1874 (2,1,.,.) = 0.001 * 2.4152 (3,1,.,.) = -1.0547 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 1.0354435 0.50878227 0.05372314 0.0658329 -0.02148393 0.75234944 0.43322116 -1.2638454 -0.35406446 0.9970731 ] [-0.3847224 -0.9223364 0.8841837 -1.4133265 3.060261 1.8198318 0.65247905 1.326468 2.7488048 -0.65054566] [ 0.76071346 0.05097926 -0.7057714 -0.93513507 -0.44653338 -1.1163464 1.233361 1.3796082 -0.03346562 1.2142773 ] [ 0.3434799 -0.10066348 0.60905707 -0.721353 0.08909653 -0.16024071 0.36867228 -1.3812664 -0.7734483 2.226132 ] [ 0.80739945 -0.31696773 0.81473565 -0.2762373 1.2296696 -0.39896917 1.0951517 0.41676426 0.26567197 0.9365798 ] [ 0.8843341 1.7000024 -0.283428 1.2914107 0.4664063 -0.73896736 -0.6049858 -0.6781502 0.6644047 0.5205852 ] [ 1.0569975 -0.37708342 -0.6703703 -1.1740372 1.3058724 0.1576939 1.413343 1.7120724 1.4979265 1.0382665 ] [ 1.893729 0.1760251 -0.2659951 1.0921259 1.5761099 -0.28478873 0.58620346 -0.2206186 1.86724 1.5473588 ]]]]; ov_res: [[[[ 1.0354435 0.50878227 0.05372314 0.0658329 -0.02148393 0.75234944 0.43322116 -1.2638454 -0.35406446 0.9970731 ] [-0.3847224 -0.9223364 0.8841837 -1.4133265 3.060261 1.8198318 0.65247905 1.326468 2.7488048 -0.65054566] [ 0.76071346 0.05097926 -0.7057714 -0.93513507 -0.44653338 -1.1163464 1.233361 1.3796082 -0.03346562 1.2142773 ] [ 0.3434799 -0.10066348 0.60905707 -0.721353 0.08909653 -0.16024071 0.36867228 -1.3812664 -0.7734483 2.226132 ] [ 0.80739945 -0.31696773 0.81473565 -0.2762373 1.2296696 -0.39896917 1.0951517 0.41676426 0.26567197 0.9365798 ] [ 0.8843341 1.7000024 -0.283428 1.2914107 0.4664063 -0.73896736 -0.6049858 -0.6781502 0.6644047 0.5205852 ] [ 1.0569975 -0.37708342 -0.6703703 -1.1740372 1.3058724 0.1576939 1.413343 1.7120724 1.4979265 1.0382665 ] [ 1.893729 0.1760251 -0.2659951 1.0921259 1.5761099 -0.28478873 0.58620346 -0.2206186 1.86724 1.5473588 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_622.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.2067 -0.8347 -1.5077 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.5279 (2,1,.,.) = -0.4583 (3,1,.,.) = 0.4466 (1,2,.,.) = 0.9163 (2,2,.,.) = -0.5281 (3,2,.,.) = -0.4031 (1,3,.,.) = 0.6528 (2,3,.,.) = 0.5155 (3,3,.,.) = 1.4102 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0.20665196 -0.2176849 1.0043521 -0.3491082 0.91955125 0.22938743 1.4942601 -0.10956132 2.4937072 -1.3591197 0.5505835 0.20665196] [ 0.20665196 -0.7104722 0.8495201 -0.14886466 0.6543727 1.585483 0.10887916 -0.5538509 -1.6006972 1.1867155 1.7353125 0.20665196] [ 0.20665196 -0.3326939 0.21343288 0.68520486 -1.4074624 0.4276079 0.55057025 0.5993105 1.9867574 -2.002342 -0.34729996 0.20665196] [ 0.20665196 1.7117345 -1.2837216 -1.0553095 -0.03302306 0.29867616 0.00908028 -0.5501751 0.82332194 -1.2176453 -1.6301228 0.20665196] [ 0.20665196 -1.2857821 -0.8930954 1.3131455 2.5085237 1.0984446 0.33299136 -0.5216826 0.5924592 1.1470851 -0.10413328 0.20665196] [ 0.20665196 -0.27510238 2.7436023 0.04274482 -1.6001774 -0.794827 -1.1886197 -3.214377 -0.611094 2.752274 1.8883032 0.20665196] [ 0.20665196 -0.39809683 1.5101104 -0.19479078 -1.479605 -2.1658156 -1.4354393 1.7344033 0.4015194 -0.5359043 -1.3579572 0.20665196] [ 0.20665196 0.06596859 0.83721757 0.8674356 0.54076374 0.63493997 -0.9883684 2.2755682 1.5809531 -0.6110493 -1.6669071 0.20665196] [ 0.20665196 1.2491355 -0.2881073 -0.40814504 -0.8362552 -1.0216005 2.4639292 0.9982878 -0.55661094 -1.6987884 0.3954368 0.20665196] [ 0.20665196 -0.8328171 0.8143796 -0.75162613 -2.4361815 0.25504553 1.2721602 1.8744656 -1.5793386 1.8504986 0.13515052 0.20665196]] [[-0.8346681 -1.6862717 0.02650589 -0.39078635 -0.91475314 0.12279224 -1.0473318 -1.6440425 1.3122666 -1.2255852 -0.9457264 -0.8346681 ] [-0.8346681 -1.5549378 0.6478202 -1.5120273 -0.7984743 0.6009242 -0.26402974 0.24175233 0.02635896 0.6847308 -0.9680047 -0.8346681 ] [-0.8346681 -1.3555764 -0.7570843 -0.38721314 -1.9470696 0.49076754 -1.3496549 -1.5525842 -1.437789 0.9129372 -0.38570458 -0.8346681 ] [-0.8346681 0.09704304 -1.2583458 -1.1523865 -1.6549423 -2.647133 -1.4064143 0.16409642 -0.62277025 -1.0666449 -0.70566034 -0.8346681 ] [-0.8346681 -2.083524 -0.10569042 -1.0971217 -0.2621091 -1.315092 -1.2402651 -1.0007944 0.4861688 -1.2978925 -0.8934217 -0.8346681 ] [-0.8346681 -2.9840572 0.6683448 -1.1335342 -0.04535365 -1.0295203 -0.5458486 -1.13177 -2.68125 -0.609094 0.58580893 -0.8346681 ] [-0.8346681 1.2180362 -2.1344562 -0.55095685 0.629258 -0.54373705 -1.5399938 -1.510495 -1.0804322 -0.7626512 -2.0654309 -0.8346681 ] [-0.8346681 -1.0376023 -0.8837614 0.74132115 -0.22539228 -1.3915701 -1.7699755 -1.7578616 -1.2205846 -0.7412437 -1.3667421 -0.8346681 ] [-0.8346681 0.08363295 -1.6241663 -1.654012 -0.42232004 -0.4954153 -0.48027816 -1.4492997 -1.0570228 -0.19647431 -0.8937308 -0.8346681 ] [-0.8346681 0.25425524 -2.208416 -0.34012777 -0.8382754 0.8863805 0.5390782 0.11201978 -2.3355114 -0.31254274 -0.81100655 -0.8346681 ]] [[-1.5077226 -4.9029408 -2.0541556 0.1873734 -2.1364396 0.61838555 -1.1230726 -3.0392087 -0.3309387 -1.7110282 -0.9036984 -1.5077226 ] [-1.5077226 -0.26661348 -2.4230647 -0.6087095 -1.4972422 -0.11399901 -1.133948 -1.8296359 -1.3842578 -0.08690691 -4.009976 -1.5077226 ] [-1.5077226 -2.350738 -2.1098585 -0.6194655 -1.5465709 -0.6481839 -1.4503198 -0.5377659 -1.8634825 0.3978132 -2.254931 -1.5077226 ] [-1.5077226 0.2903 -2.1177473 -1.4470774 -2.9950507 -2.4293969 -2.888 3.4630282 -3.3842022 -0.60930705 -1.9452575 -1.5077226 ] [-1.5077226 -2.5828881 -1.1739575 -1.4432498 0.44181454 -2.357984 -0.30374694 -0.0210706 -0.87231463 -2.2056878 -1.9456238 -1.5077226 ] [-1.5077226 -2.9167538 1.5477352 -3.8539646 0.46162486 -3.1787028 -1.1962439 -4.945855 -2.6504197 -1.6329101 0.7193606 -1.5077226 ] [-1.5077226 1.292743 -2.180566 -0.69727576 -0.10155606 1.161691 -1.0748241 -1.5921824 -2.6926765 -1.648997 -2.1810787 -1.5077226 ] [-1.5077226 0.09927201 -1.4995965 0.32564354 -2.2984638 -2.8833694 -2.8470953 -3.6651337 -3.1682315 -2.0556417 -1.0145891 -1.5077226 ] [-1.5077226 -1.9823303 -4.2201433 -2.9573786 -2.5224364 0.37595105 0.27689636 -0.11406529 -2.4666939 -3.761536 0.1497842 -1.5077226 ] [-1.5077226 -4.0207253 -2.48561 -0.51497144 -1.9010954 -0.41793656 -1.4105229 0.13242793 -2.0673041 -1.0876194 -0.9327206 -1.5077226 ]]]]; ov_res: [[[[ 0.20665196 -0.2176849 1.0043521 -0.3491082 0.91955125 0.22938743 1.4942601 -0.10956132 2.4937072 -1.3591197 0.5505835 0.20665196] [ 0.20665196 -0.7104722 0.8495201 -0.14886466 0.6543727 1.585483 0.10887916 -0.5538509 -1.6006972 1.1867155 1.7353125 0.20665196] [ 0.20665196 -0.3326939 0.21343288 0.68520486 -1.4074624 0.4276079 0.55057025 0.5993105 1.9867574 -2.002342 -0.34729996 0.20665196] [ 0.20665196 1.7117345 -1.2837216 -1.0553095 -0.03302306 0.29867616 0.00908028 -0.5501751 0.82332194 -1.2176453 -1.6301228 0.20665196] [ 0.20665196 -1.2857821 -0.8930954 1.3131455 2.5085237 1.0984446 0.33299136 -0.5216826 0.5924592 1.1470851 -0.10413328 0.20665196] [ 0.20665196 -0.27510238 2.7436023 0.04274482 -1.6001774 -0.794827 -1.1886197 -3.214377 -0.611094 2.752274 1.8883032 0.20665196] [ 0.20665196 -0.39809683 1.5101104 -0.19479078 -1.479605 -2.1658156 -1.4354393 1.7344033 0.4015194 -0.5359043 -1.3579572 0.20665196] [ 0.20665196 0.06596859 0.83721757 0.8674356 0.54076374 0.63493997 -0.9883684 2.2755682 1.5809531 -0.6110493 -1.6669071 0.20665196] [ 0.20665196 1.2491355 -0.2881073 -0.40814504 -0.8362552 -1.0216005 2.4639292 0.9982878 -0.55661094 -1.6987884 0.3954368 0.20665196] [ 0.20665196 -0.8328171 0.8143796 -0.75162613 -2.4361815 0.25504553 1.2721602 1.8744656 -1.5793386 1.8504986 0.13515052 0.20665196]] [[-0.8346681 -1.6862717 0.02650589 -0.39078635 -0.91475314 0.12279224 -1.0473318 -1.6440425 1.3122666 -1.2255852 -0.9457264 -0.8346681 ] [-0.8346681 -1.5549378 0.6478202 -1.5120273 -0.7984743 0.6009242 -0.26402974 0.24175233 0.02635896 0.6847308 -0.9680047 -0.8346681 ] [-0.8346681 -1.3555764 -0.7570843 -0.38721314 -1.9470696 0.49076754 -1.3496549 -1.5525842 -1.437789 0.9129372 -0.38570458 -0.8346681 ] [-0.8346681 0.09704304 -1.2583458 -1.1523865 -1.6549423 -2.647133 -1.4064143 0.16409642 -0.62277025 -1.0666449 -0.70566034 -0.8346681 ] [-0.8346681 -2.083524 -0.10569042 -1.0971217 -0.2621091 -1.315092 -1.2402651 -1.0007944 0.4861688 -1.2978925 -0.8934217 -0.8346681 ] [-0.8346681 -2.9840572 0.6683448 -1.1335342 -0.04535365 -1.0295203 -0.5458486 -1.13177 -2.68125 -0.609094 0.58580893 -0.8346681 ] [-0.8346681 1.2180362 -2.1344562 -0.55095685 0.629258 -0.54373705 -1.5399938 -1.510495 -1.0804322 -0.7626512 -2.0654309 -0.8346681 ] [-0.8346681 -1.0376023 -0.8837614 0.74132115 -0.22539228 -1.3915701 -1.7699755 -1.7578616 -1.2205846 -0.7412437 -1.3667421 -0.8346681 ] [-0.8346681 0.08363295 -1.6241663 -1.654012 -0.42232004 -0.4954153 -0.48027816 -1.4492997 -1.0570228 -0.19647431 -0.8937308 -0.8346681 ] [-0.8346681 0.25425524 -2.208416 -0.34012777 -0.8382754 0.8863805 0.5390782 0.11201978 -2.3355114 -0.31254274 -0.81100655 -0.8346681 ]] [[-1.5077226 -4.9029408 -2.0541556 0.1873734 -2.1364396 0.61838555 -1.1230726 -3.0392087 -0.3309387 -1.7110282 -0.9036984 -1.5077226 ] [-1.5077226 -0.26661348 -2.4230647 -0.6087095 -1.4972422 -0.11399901 -1.133948 -1.8296359 -1.3842578 -0.08690691 -4.009976 -1.5077226 ] [-1.5077226 -2.350738 -2.1098585 -0.6194655 -1.5465709 -0.6481839 -1.4503198 -0.5377659 -1.8634825 0.3978132 -2.254931 -1.5077226 ] [-1.5077226 0.2903 -2.1177473 -1.4470774 -2.9950507 -2.4293969 -2.888 3.4630282 -3.3842022 -0.60930705 -1.9452575 -1.5077226 ] [-1.5077226 -2.5828881 -1.1739575 -1.4432498 0.44181454 -2.357984 -0.30374694 -0.0210706 -0.87231463 -2.2056878 -1.9456238 -1.5077226 ] [-1.5077226 -2.9167538 1.5477352 -3.8539646 0.46162486 -3.1787028 -1.1962439 -4.945855 -2.6504197 -1.6329101 0.7193606 -1.5077226 ] [-1.5077226 1.292743 -2.180566 -0.69727576 -0.10155606 1.161691 -1.0748241 -1.5921824 -2.6926765 -1.648997 -2.1810787 -1.5077226 ] [-1.5077226 0.09927201 -1.4995965 0.32564354 -2.2984638 -2.8833694 -2.8470953 -3.6651337 -3.1682315 -2.0556417 -1.0145891 -1.5077226 ] [-1.5077226 -1.9823303 -4.2201433 -2.9573786 -2.5224364 0.37595105 0.27689636 -0.11406529 -2.4666939 -3.761536 0.1497842 -1.5077226 ] [-1.5077226 -4.0207253 -2.48561 -0.51497144 -1.9010954 -0.41793656 -1.4105229 0.13242793 -2.0673041 -1.0876194 -0.9327206 -1.5077226 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_624.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.4500 0.3144 -0.3155 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2893 (2,1,.,.) = -0.5542 (3,1,.,.) = 0.01 * 2.7592 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0.26252896 0.17861536 0.06833994 0.3906152 0.33528668 0.7981845 0.56404644 -0.09475335 0.9909668 0.44965127] [ 0.44794348 0.4009907 0.82243466 0.62216914 0.44953942 -0.07229641 -0.25006196 0.527444 0.5748997 -0.1622118 ] [ 0.11107111 0.9160515 0.08662313 0.4819959 0.5209096 0.6582515 0.06640717 0.5138353 0.6984961 0.55442023] [ 0.25902513 0.31581864 0.7916798 0.39708337 0.9399121 0.87125015 0.3605702 0.12368664 0.96844757 0.44859996] [ 0.5758902 0.6048377 -0.07186398 0.68079436 0.01769295 0.41968027 1.0377727 0.7157166 0.23592928 0.8093325 ] [ 0.3533175 0.3138994 -0.28345922 0.21483089 0.3964934 0.3325083 0.59255207 0.76092815 0.881674 0.27905458] [ 0.65460217 0.20010959 0.63679224 0.7236047 0.31667343 -0.13056359 0.74153394 0.4027601 0.5738653 0.61481833] [ 0.72358596 0.360505 0.6427448 0.21538928 0.35320532 0.20781583 0.4749945 0.6649289 0.168405 0.21245211]] [[ 0.5595015 1.1084309 -0.5102315 0.01786417 -0.23729318 -0.13562223 0.5358858 1.4048903 0.39016992 0.40022734] [ 0.5408344 -0.45769954 0.63083565 -0.3862632 1.0894127 0.9922156 -0.2717207 -0.30915582 0.27995524 0.54665744] [-0.14400479 0.45479202 0.72956693 -0.99801403 0.45116562 -0.16909796 0.1717232 -0.27420223 -0.16394246 -0.79645747] [-0.14195466 1.0320162 0.6207588 1.1859224 -0.2824902 -0.3495316 0.312034 -0.00218615 0.8388565 -0.3266514 ] [ 0.8173503 0.46129042 0.60896134 0.5290464 0.8364157 -0.5763943 0.24338049 0.03464755 0.51230353 0.7710049 ] [ 0.54141235 0.18477419 0.26479572 1.754539 0.12071882 0.26506683 -0.1197874 1.3549378 -0.15621254 -0.6600757 ] [ 0.6923201 0.34128162 0.7930181 0.547832 1.2757804 -1.0742335 0.4276236 0.8644247 0.82536155 0.49901432] [ 0.15377957 0.59408665 -0.55933017 0.04933074 0.88926727 -1.129569 0.75051105 0.275174 1.5011606 1.1039162 ]] [[-0.32235697 -0.30650923 -0.3033573 -0.31640232 -0.35015666 -0.34335566 -0.28371537 -0.33027655 -0.2877567 -0.27506566] [-0.3468548 -0.31122553 -0.33532938 -0.28577423 -0.33617494 -0.3395812 -0.3186342 -0.35027876 -0.31530097 -0.3410062 ] [-0.33851904 -0.27748778 -0.36352518 -0.26901516 -0.2516281 -0.34114683 -0.32480934 -0.30751282 -0.28082517 -0.3065666 ] [-0.35465842 -0.30370048 -0.2712756 -0.30698046 -0.3090497 -0.28307834 -0.32715628 -0.30323312 -0.36777902 -0.28327578] [-0.30878 -0.2968764 -0.33688438 -0.30334112 -0.31420383 -0.34645274 -0.32463443 -0.3498159 -0.30401638 -0.34474424] [-0.32732296 -0.33851036 -0.32190368 -0.32196674 -0.3394095 -0.3073434 -0.2971828 -0.29423645 -0.34160936 -0.32325172] [-0.3351195 -0.30666772 -0.31121993 -0.3209278 -0.33903888 -0.25891 -0.2862169 -0.30029696 -0.3422892 -0.338933 ] [-0.31945226 -0.32116243 -0.28777152 -0.32895246 -0.28203687 -0.29762748 -0.33317766 -0.32816824 -0.311961 -0.29130924]]]]; ov_res: [[[[ 0.26252896 0.17861536 0.06833994 0.3906152 0.33528668 0.7981845 0.56404644 -0.09475335 0.9909668 0.44965127] [ 0.44794348 0.4009907 0.82243466 0.62216914 0.44953942 -0.07229641 -0.25006196 0.527444 0.5748997 -0.1622118 ] [ 0.11107111 0.9160515 0.08662313 0.4819959 0.5209096 0.6582515 0.06640717 0.5138353 0.6984961 0.55442023] [ 0.25902513 0.31581864 0.7916798 0.39708337 0.9399121 0.87125015 0.3605702 0.12368664 0.96844757 0.44859996] [ 0.5758902 0.6048377 -0.07186398 0.68079436 0.01769295 0.41968027 1.0377727 0.7157166 0.23592928 0.8093325 ] [ 0.3533175 0.3138994 -0.28345922 0.21483089 0.3964934 0.3325083 0.59255207 0.76092815 0.881674 0.27905458] [ 0.65460217 0.20010959 0.63679224 0.7236047 0.31667343 -0.13056359 0.74153394 0.4027601 0.5738653 0.61481833] [ 0.72358596 0.360505 0.6427448 0.21538928 0.35320532 0.20781583 0.4749945 0.6649289 0.168405 0.21245211]] [[ 0.5595015 1.1084309 -0.5102315 0.01786417 -0.23729318 -0.13562223 0.5358858 1.4048903 0.39016992 0.40022734] [ 0.5408344 -0.45769954 0.63083565 -0.3862632 1.0894127 0.9922156 -0.2717207 -0.30915582 0.27995524 0.54665744] [-0.14400479 0.45479202 0.72956693 -0.99801403 0.45116562 -0.16909796 0.1717232 -0.27420223 -0.16394246 -0.79645747] [-0.14195466 1.0320162 0.6207588 1.1859224 -0.2824902 -0.3495316 0.312034 -0.00218615 0.8388565 -0.3266514 ] [ 0.8173503 0.46129042 0.60896134 0.5290464 0.8364157 -0.5763943 0.24338049 0.03464755 0.51230353 0.7710049 ] [ 0.54141235 0.18477419 0.26479572 1.754539 0.12071882 0.26506683 -0.1197874 1.3549378 -0.15621254 -0.6600757 ] [ 0.6923201 0.34128162 0.7930181 0.547832 1.2757804 -1.0742335 0.4276236 0.8644247 0.82536155 0.49901432] [ 0.15377957 0.59408665 -0.55933017 0.04933074 0.88926727 -1.129569 0.75051105 0.275174 1.5011606 1.1039162 ]] [[-0.32235697 -0.30650923 -0.3033573 -0.31640232 -0.35015666 -0.34335566 -0.28371537 -0.33027655 -0.2877567 -0.27506566] [-0.3468548 -0.31122553 -0.33532938 -0.28577423 -0.33617494 -0.3395812 -0.3186342 -0.35027876 -0.31530097 -0.3410062 ] [-0.33851904 -0.27748778 -0.36352518 -0.26901516 -0.2516281 -0.34114683 -0.32480934 -0.30751282 -0.28082517 -0.3065666 ] [-0.35465842 -0.30370048 -0.2712756 -0.30698046 -0.3090497 -0.28307834 -0.32715628 -0.30323312 -0.36777902 -0.28327578] [-0.30878 -0.2968764 -0.33688438 -0.30334112 -0.31420383 -0.34645274 -0.32463443 -0.3498159 -0.30401638 -0.34474424] [-0.32732296 -0.33851036 -0.32190368 -0.32196674 -0.3394095 -0.3073434 -0.2971828 -0.29423645 -0.34160936 -0.32325172] [-0.3351195 -0.30666772 -0.31121993 -0.3209278 -0.33903888 -0.25891 -0.2862169 -0.30029696 -0.3422892 -0.338933 ] [-0.31945226 -0.32116243 -0.28777152 -0.32895246 -0.28203687 -0.29762748 -0.33317766 -0.32816824 -0.311961 -0.29130924]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_626.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.8157 1.3678 0.5895 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.1409 (2,1,.,.) = -0.5760 (3,1,.,.) = -0.7251 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-0.81567514 -0.7636918 -1.0482622 -0.7091595 -0.71210223 -0.8230043 -0.85597426 -0.589262 -0.8192689 -0.977616 -0.9252454 -0.81567514] [-0.81567514 -0.59642005 -0.7219794 -0.95705414 -0.97501016 -0.8765232 -0.8522771 -0.863655 -0.77228457 -0.7754446 -0.72760946 -0.81567514] [-0.81567514 -1.1810193 -0.8540476 -0.8048266 -0.93283916 -0.65308076 -0.6000943 -0.780919 -0.70898855 -0.5725835 -0.83959633 -0.81567514] [-0.81567514 -0.68102163 -0.869798 -0.6860188 -0.80966866 -1.2079991 -0.6310797 -1.0056263 -0.9024206 -0.75309485 -0.9775064 -0.81567514] [-0.81567514 -0.71972525 -0.79231846 -0.75337607 -0.95794713 -1.1677909 -0.94403625 -0.9514279 -0.9151396 -0.66849625 -0.8289637 -0.81567514] [-0.81567514 -0.8897432 -0.8897049 -1.3001482 -0.8975998 -0.81929344 -1.0966325 -0.8263832 -0.7378211 -0.72683644 -0.83274823 -0.81567514] [-0.81567514 -1.0074844 -0.65721333 -0.9018425 -1.1817851 -0.7044633 -0.7992631 -0.6949855 -0.65244836 -0.7955021 -0.66200125 -0.81567514] [-0.81567514 -0.65283465 -0.52037466 -0.8317886 -0.70836323 -0.72644854 -0.6332628 -1.2280052 -0.6313747 -0.71932817 -0.99096334 -0.81567514] [-0.81567514 -0.7090334 -1.1313055 -0.6804923 -0.86322594 -0.8320334 -0.77151936 -0.78761953 -0.6784489 -0.8542134 -0.7426911 -0.81567514] [-0.81567514 -0.8810684 -1.0719835 -0.83906394 -0.50572824 -0.6540992 -0.70351386 -0.7977363 -0.86700135 -0.8919145 -0.76195794 -0.81567514]] [[ 1.36781 1.900071 2.314101 1.8348216 1.4158645 1.5668992 1.9111695 1.5156367 1.8778105 1.9004074 0.68294954 1.36781 ] [ 1.36781 0.9551621 1.0948949 1.4751115 1.6343873 0.26664174 0.99834573 1.6632175 1.1322699 1.0607698 1.4798801 1.36781 ] [ 1.36781 1.1077479 1.0916934 1.1349578 2.7563229 0.24599373 1.3663538 -0.17950225 1.1432855 1.8194382 0.8274103 1.36781 ] [ 1.36781 1.3949212 1.8736436 0.77111197 0.8310177 0.8194145 1.329459 1.8005934 1.7399602 1.6836683 0.6646647 1.36781 ] [ 1.36781 1.2328931 1.5235103 1.0109832 0.9897984 2.5385566 1.1870468 1.8090224 2.4448295 1.1268821 1.4879677 1.36781 ] [ 1.36781 1.3202538 1.6413221 2.0830188 1.1342882 0.857939 2.1341953 0.6478128 2.1963253 1.3589643 0.9338227 1.36781 ] [ 1.36781 1.9432433 0.8436307 1.1771529 1.8372257 1.330006 0.63835466 1.3167554 1.7509396 0.679841 1.1103462 1.36781 ] [ 1.36781 0.5261365 0.7874132 1.6542267 2.066225 1.6706614 -0.6295048 1.4415166 1.4009764 0.8522281 1.5324744 1.36781 ] [ 1.36781 1.4592627 1.0351363 0.72315705 1.863262 1.610304 1.5348102 1.647405 1.219695 1.3663651 2.1525884 1.36781 ] [ 1.36781 1.3923386 -0.07673907 1.4561956 1.1369983 1.9028401 1.712723 1.0101788 1.8138354 1.402836 0.17446482 1.36781 ]] [[ 0.58945453 0.5008173 1.5676391 0.11389965 0.41412917 0.67888093 1.3335772 0.6926818 0.06198996 1.4584434 1.3184372 0.58945453] [ 0.58945453 -0.14633209 0.6933326 0.8545923 0.58633435 0.650264 -0.64822125 0.05891889 1.0970436 0.44533658 0.1710523 0.58945453] [ 0.58945453 0.95016843 0.666715 0.4929763 0.9650887 1.7374196 0.17250794 -0.41797948 0.40225682 0.11140606 0.09121355 0.58945453] [ 0.58945453 0.06614548 -0.39844066 0.67216635 -0.12746096 0.09834409 0.50791824 0.616667 0.88315964 -0.47395837 0.5978948 0.58945453] [ 0.58945453 -0.08696103 0.6874781 2.1382937 1.2620035 1.5515614 0.75745964 1.5473089 0.19463399 0.83685404 0.46673155 0.58945453] [ 0.58945453 1.0932872 0.06962389 -0.26732433 2.3912354 -0.0789749 0.5603402 0.20261663 0.60214186 0.47426033 0.2940648 0.58945453] [ 0.58945453 0.69198966 -0.18525118 1.7270058 -0.95810914 0.13138121 -0.37707955 0.62660027 -0.65116286 1.9648048 0.6539855 0.58945453] [ 0.58945453 -0.54460454 -0.8601339 0.6942005 1.1809511 -0.01374424 0.6511187 0.8168743 0.1222271 0.6510334 0.43299866 0.58945453] [ 0.58945453 -0.4057876 -0.52254105 -0.78895986 0.26050442 0.28571585 0.6173832 0.03982359 0.95461076 0.25811324 0.79766214 0.58945453] [ 0.58945453 0.58964694 -0.02968687 0.94236124 0.52420735 0.60364544 -0.08791131 1.5650473 -0.8183465 -0.73699224 0.41843748 0.58945453]]]]; ov_res: [[[[-0.81567514 -0.7636918 -1.0482622 -0.7091595 -0.71210223 -0.8230043 -0.85597426 -0.589262 -0.8192689 -0.977616 -0.9252454 -0.81567514] [-0.81567514 -0.59642005 -0.7219794 -0.95705414 -0.97501016 -0.8765232 -0.8522771 -0.863655 -0.77228457 -0.7754446 -0.72760946 -0.81567514] [-0.81567514 -1.1810192 -0.8540476 -0.8048266 -0.93283916 -0.65308076 -0.6000943 -0.780919 -0.70898855 -0.5725835 -0.83959633 -0.81567514] [-0.81567514 -0.68102163 -0.869798 -0.6860189 -0.80966866 -1.2079991 -0.6310797 -1.0056263 -0.9024206 -0.75309485 -0.9775064 -0.81567514] [-0.81567514 -0.71972525 -0.79231846 -0.75337607 -0.95794713 -1.1677909 -0.9440362 -0.9514279 -0.9151396 -0.66849625 -0.8289637 -0.81567514] [-0.81567514 -0.8897432 -0.8897049 -1.3001482 -0.8975998 -0.81929344 -1.0966325 -0.8263832 -0.7378211 -0.72683644 -0.83274823 -0.81567514] [-0.81567514 -1.0074844 -0.65721333 -0.9018425 -1.1817851 -0.7044633 -0.7992631 -0.6949855 -0.65244836 -0.7955021 -0.66200125 -0.81567514] [-0.81567514 -0.65283465 -0.52037466 -0.8317886 -0.70836323 -0.72644854 -0.6332628 -1.2280053 -0.63137466 -0.71932817 -0.9909634 -0.81567514] [-0.81567514 -0.7090334 -1.1313055 -0.6804923 -0.86322594 -0.8320334 -0.77151936 -0.78761953 -0.6784489 -0.8542134 -0.7426911 -0.81567514] [-0.81567514 -0.8810684 -1.0719835 -0.83906394 -0.50572824 -0.6540992 -0.70351386 -0.7977363 -0.86700135 -0.8919145 -0.76195794 -0.81567514]] [[ 1.36781 1.900071 2.314101 1.8348216 1.4158645 1.5668992 1.9111695 1.5156367 1.8778104 1.9004074 0.68294954 1.36781 ] [ 1.36781 0.9551621 1.0948949 1.4751115 1.6343873 0.26664168 0.99834573 1.6632175 1.1322699 1.0607698 1.4798801 1.36781 ] [ 1.36781 1.1077479 1.0916935 1.1349577 2.7563226 0.24599378 1.3663538 -0.17950223 1.1432855 1.8194382 0.8274103 1.36781 ] [ 1.36781 1.3949212 1.8736436 0.77111197 0.8310177 0.8194145 1.329459 1.8005933 1.7399602 1.6836683 0.6646647 1.36781 ] [ 1.36781 1.2328931 1.5235103 1.0109832 0.9897985 2.5385566 1.1870469 1.8090223 2.4448297 1.1268821 1.4879677 1.36781 ] [ 1.36781 1.3202538 1.6413221 2.0830188 1.1342882 0.857939 2.1341953 0.6478128 2.1963253 1.3589643 0.9338227 1.36781 ] [ 1.36781 1.9432433 0.8436307 1.1771529 1.8372257 1.330006 0.63835466 1.3167554 1.7509396 0.679841 1.1103462 1.36781 ] [ 1.36781 0.5261365 0.7874132 1.6542267 2.066225 1.6706616 -0.6295048 1.4415166 1.4009764 0.8522281 1.5324744 1.36781 ] [ 1.36781 1.4592627 1.0351363 0.72315705 1.863262 1.610304 1.5348102 1.647405 1.219695 1.3663651 2.1525884 1.36781 ] [ 1.36781 1.3923386 -0.0767391 1.4561956 1.1369983 1.9028401 1.7127229 1.0101788 1.8138354 1.402836 0.17446478 1.36781 ]] [[ 0.58945453 0.5008173 1.5676391 0.11389966 0.41412917 0.67888093 1.333577 0.69268185 0.06198996 1.4584434 1.3184372 0.58945453] [ 0.58945453 -0.14633209 0.6933326 0.8545923 0.58633435 0.650264 -0.6482212 0.05891886 1.0970436 0.44533655 0.1710523 0.58945453] [ 0.58945453 0.95016843 0.666715 0.4929763 0.9650888 1.7374196 0.17250794 -0.41797948 0.40225682 0.11140606 0.09121356 0.58945453] [ 0.58945453 0.06614546 -0.39844066 0.67216635 -0.12746094 0.09834409 0.50791824 0.616667 0.88315964 -0.47395834 0.5978948 0.58945453] [ 0.58945453 -0.08696102 0.6874781 2.138294 1.2620035 1.5515614 0.75745964 1.5473089 0.19463399 0.83685404 0.46673155 0.58945453] [ 0.58945453 1.0932872 0.06962386 -0.26732433 2.3912354 -0.07897488 0.5603402 0.20261665 0.60214186 0.47426033 0.2940648 0.58945453] [ 0.58945453 0.69198966 -0.18525118 1.7270058 -0.95810914 0.13138121 -0.37707958 0.62660027 -0.6511629 1.9648048 0.6539855 0.58945453] [ 0.58945453 -0.54460454 -0.8601339 0.6942005 1.1809511 -0.01374424 0.6511187 0.8168743 0.1222271 0.65103346 0.43299866 0.58945453] [ 0.58945453 -0.40578762 -0.52254105 -0.78895986 0.26050442 0.28571585 0.6173832 0.03982362 0.95461076 0.25811324 0.79766214 0.58945453] [ 0.58945453 0.58964694 -0.02968688 0.94236124 0.52420735 0.60364544 -0.08791129 1.5650471 -0.8183465 -0.7369922 0.41843748 0.58945453]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_628.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3108 0.8472 -0.2052 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 2.2381 (2,1,.,.) = 0.6030 (3,1,.,.) = -0.3453 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[-2.65022898e+00 1.08712041e+00 5.89443803e-01 3.69609737e+00 2.27871728e+00 -5.20100832e-01 -8.77634287e-01 -1.77838230e+00 -2.31505084e+00 -7.09157228e-01] [ 8.03537726e-01 -1.71425116e+00 -3.48313689e+00 -4.30453390e-01 -2.81535172e+00 3.71383858e+00 -2.87191606e+00 -2.01832557e+00 -1.64408052e+00 1.28193033e+00] [ 3.51909548e-01 -2.61301994e-01 2.63040471e+00 -7.27381110e-01 -2.58916795e-01 2.61642790e+00 6.04402661e-01 -9.87236261e-01 1.74245745e-01 -1.57349661e-01] [ 8.86373997e-01 -1.48696661e+00 -1.80536056e+00 1.40364254e+00 4.23796684e-01 2.44356298e+00 3.06857675e-01 -8.97585154e-01 7.69724369e-01 5.50940704e+00] [ 2.66688156e+00 3.15114880e+00 4.52956963e+00 3.54435176e-01 -1.84631062e+00 -2.94247794e+00 -3.71765828e+00 5.30154276e+00 1.70782340e+00 -1.19997609e+00] [-3.97486687e+00 -1.33721530e-02 -1.44200444e+00 -3.70664907e+00 8.13266873e-01 1.66247439e+00 4.17221737e+00 -5.32993126e+00 -1.99986935e+00 1.83403051e+00] [-1.63614774e+00 6.77300811e-01 -1.79162741e+00 4.01727343e+00 -1.57483399e-01 -3.74552250e+00 -3.10725498e+00 -9.49206471e-01 8.73488665e-01 3.93782234e+00] [-2.30169034e+00 -1.19714379e-01 -2.92282248e+00 1.97350562e+00 2.72366381e+00 2.91785568e-01 -2.39779568e+00 -1.81006849e+00 9.64694381e-01 3.80359602e+00]] [[ 6.72113895e-02 1.83112741e+00 1.84487045e-01 7.90278614e-01 1.86499023e+00 1.35323989e+00 1.65011573e+00 -3.24604869e-01 9.69587207e-01 7.78626382e-01] [ 1.28405929e-01 1.91364694e+00 6.41658545e-01 1.51853859e-01 1.21137810e+00 1.00035262e+00 -3.31443548e-03 -3.02851200e-04 1.40064192e+00 1.06839454e+00] [ 4.01144117e-01 1.41794372e+00 6.45670712e-01 1.46732330e+00 2.07688236e+00 1.29010987e+00 1.39936495e+00 3.81907344e-01 4.40409809e-01 8.86181951e-01] [ 2.33842254e-01 8.67312610e-01 4.08673197e-01 6.09048724e-01 1.33407998e+00 7.68494666e-01 1.38713944e+00 2.35752106e-01 7.53003001e-01 1.42974794e-01] [ 1.19701684e-01 3.21860850e-01 2.24542081e-01 1.22630858e+00 1.87081707e+00 6.76892459e-01 -6.90721273e-02 1.73628926e-01 9.41864371e-01 6.84371591e-01] [ 1.11288857e+00 1.24222112e+00 1.18129241e+00 6.31598413e-01 4.66241926e-01 1.20002103e+00 1.69083130e+00 -3.31136703e-01 1.13020468e+00 1.41197348e+00] [ 1.06160748e+00 1.22514272e+00 1.66782355e+00 1.63029766e+00 2.50634074e-01 1.25423276e+00 9.21386123e-01 1.18168426e+00 1.13064122e+00 8.56925189e-01] [ 3.76564682e-01 8.84802461e-01 8.54674816e-01 1.39854264e+00 1.15511680e+00 1.02946401e+00 1.66435695e+00 1.36087370e+00 1.36180663e+00 2.41839468e-01]] [[-1.46735653e-01 2.65722156e-01 -4.12070155e-02 4.96042818e-02 -3.04727495e-01 8.17606330e-01 -3.44942182e-01 -4.13628399e-01 -2.72919118e-01 -1.26409948e-01] [-4.55329776e-01 -4.67337668e-02 -4.39239442e-01 -1.15050219e-01 -5.15421331e-01 -2.47582525e-01 -3.02144051e-01 -1.15215644e-01 -7.09483087e-01 -4.25248027e-01] [-4.10512239e-02 -6.02865219e-01 3.45000923e-01 1.40266731e-01 -8.75573874e-01 -6.16894066e-01 -7.87101567e-01 -4.69621062e-01 6.11779869e-01 -4.92797852e-01] [ 1.86523631e-01 -1.53653666e-01 2.79794633e-02 1.74832344e-02 -5.31567037e-01 1.27916798e-01 -5.86372435e-01 -5.92725396e-01 -6.28074765e-01 -2.40383640e-01] [-3.90144050e-01 -7.29033887e-01 1.66732416e-01 1.19996950e-01 2.89072812e-01 -7.41814733e-01 -3.42219532e-01 1.04245543e-03 -1.05527930e-01 -4.48198617e-02] [-1.78562164e-01 -6.39111549e-02 -1.58991069e-01 -1.29933223e-01 -8.96347463e-01 -5.35462141e-01 4.55723524e-01 -2.40051493e-01 7.00621843e-01 -3.06063920e-01] [-1.40444607e-01 -1.15959890e-01 2.77188361e-01 3.49715412e-01 9.41979140e-02 -5.18628806e-02 -2.12523654e-01 -5.48748374e-01 1.76574811e-01 -1.22278780e-01] [-3.31693888e-01 2.62201011e-01 -8.22491765e-01 -4.25689220e-01 1.46526530e-01 2.13409960e-02 -1.84389055e-02 -4.02322829e-01 -5.91363430e-01 -8.47035527e-01]]]]; ov_res: [[[[-2.65022898e+00 1.08712041e+00 5.89443803e-01 3.69609737e+00 2.27871728e+00 -5.20100832e-01 -8.77634287e-01 -1.77838230e+00 -2.31505084e+00 -7.09157228e-01] [ 8.03537726e-01 -1.71425116e+00 -3.48313689e+00 -4.30453390e-01 -2.81535172e+00 3.71383858e+00 -2.87191606e+00 -2.01832557e+00 -1.64408052e+00 1.28193033e+00] [ 3.51909548e-01 -2.61301994e-01 2.63040471e+00 -7.27381110e-01 -2.58916795e-01 2.61642790e+00 6.04402661e-01 -9.87236261e-01 1.74245745e-01 -1.57349661e-01] [ 8.86373997e-01 -1.48696661e+00 -1.80536056e+00 1.40364254e+00 4.23796684e-01 2.44356298e+00 3.06857675e-01 -8.97585154e-01 7.69724369e-01 5.50940704e+00] [ 2.66688156e+00 3.15114880e+00 4.52956963e+00 3.54435176e-01 -1.84631062e+00 -2.94247794e+00 -3.71765828e+00 5.30154276e+00 1.70782340e+00 -1.19997609e+00] [-3.97486687e+00 -1.33721530e-02 -1.44200444e+00 -3.70664907e+00 8.13266873e-01 1.66247439e+00 4.17221737e+00 -5.32993126e+00 -1.99986935e+00 1.83403051e+00] [-1.63614774e+00 6.77300811e-01 -1.79162741e+00 4.01727343e+00 -1.57483399e-01 -3.74552250e+00 -3.10725498e+00 -9.49206471e-01 8.73488665e-01 3.93782234e+00] [-2.30169034e+00 -1.19714379e-01 -2.92282248e+00 1.97350562e+00 2.72366381e+00 2.91785568e-01 -2.39779568e+00 -1.81006849e+00 9.64694381e-01 3.80359602e+00]] [[ 6.72113895e-02 1.83112741e+00 1.84487045e-01 7.90278614e-01 1.86499023e+00 1.35323989e+00 1.65011573e+00 -3.24604869e-01 9.69587207e-01 7.78626382e-01] [ 1.28405929e-01 1.91364694e+00 6.41658545e-01 1.51853859e-01 1.21137810e+00 1.00035262e+00 -3.31443548e-03 -3.02851200e-04 1.40064192e+00 1.06839454e+00] [ 4.01144117e-01 1.41794372e+00 6.45670712e-01 1.46732330e+00 2.07688236e+00 1.29010987e+00 1.39936495e+00 3.81907344e-01 4.40409809e-01 8.86181951e-01] [ 2.33842254e-01 8.67312610e-01 4.08673197e-01 6.09048724e-01 1.33407998e+00 7.68494666e-01 1.38713944e+00 2.35752106e-01 7.53003001e-01 1.42974794e-01] [ 1.19701684e-01 3.21860850e-01 2.24542081e-01 1.22630858e+00 1.87081707e+00 6.76892459e-01 -6.90721273e-02 1.73628926e-01 9.41864371e-01 6.84371591e-01] [ 1.11288857e+00 1.24222112e+00 1.18129241e+00 6.31598413e-01 4.66241926e-01 1.20002103e+00 1.69083130e+00 -3.31136703e-01 1.13020468e+00 1.41197348e+00] [ 1.06160748e+00 1.22514272e+00 1.66782355e+00 1.63029766e+00 2.50634074e-01 1.25423276e+00 9.21386123e-01 1.18168426e+00 1.13064122e+00 8.56925189e-01] [ 3.76564682e-01 8.84802461e-01 8.54674816e-01 1.39854264e+00 1.15511680e+00 1.02946401e+00 1.66435695e+00 1.36087370e+00 1.36180663e+00 2.41839468e-01]] [[-1.46735653e-01 2.65722156e-01 -4.12070155e-02 4.96042818e-02 -3.04727495e-01 8.17606330e-01 -3.44942182e-01 -4.13628399e-01 -2.72919118e-01 -1.26409948e-01] [-4.55329776e-01 -4.67337668e-02 -4.39239442e-01 -1.15050219e-01 -5.15421331e-01 -2.47582525e-01 -3.02144051e-01 -1.15215644e-01 -7.09483087e-01 -4.25248027e-01] [-4.10512239e-02 -6.02865219e-01 3.45000923e-01 1.40266731e-01 -8.75573874e-01 -6.16894066e-01 -7.87101567e-01 -4.69621062e-01 6.11779869e-01 -4.92797852e-01] [ 1.86523631e-01 -1.53653666e-01 2.79794633e-02 1.74832344e-02 -5.31567037e-01 1.27916798e-01 -5.86372435e-01 -5.92725396e-01 -6.28074765e-01 -2.40383640e-01] [-3.90144050e-01 -7.29033887e-01 1.66732416e-01 1.19996950e-01 2.89072812e-01 -7.41814733e-01 -3.42219532e-01 1.04245543e-03 -1.05527930e-01 -4.48198617e-02] [-1.78562164e-01 -6.39111549e-02 -1.58991069e-01 -1.29933223e-01 -8.96347463e-01 -5.35462141e-01 4.55723524e-01 -2.40051493e-01 7.00621843e-01 -3.06063920e-01] [-1.40444607e-01 -1.15959890e-01 2.77188361e-01 3.49715412e-01 9.41979140e-02 -5.18628806e-02 -2.12523654e-01 -5.48748374e-01 1.76574811e-01 -1.22278780e-01] [-3.31693888e-01 2.62201011e-01 -8.22491765e-01 -4.25689220e-01 1.46526530e-01 2.13409960e-02 -1.84389055e-02 -4.02322829e-01 -5.91363430e-01 -8.47035527e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_630.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1272 -1.3734 0.8978 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.6215 (2,1,.,.) = 2.0016 (3,1,.,.) = 2.5124 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-1.00212526e+00 3.65736425e-01 -3.83394025e-02 3.23410451e-01 -6.65808260e-01 1.49593019e+00 -4.96669471e-01 6.62029147e-01 -2.00748786e-01 -3.94332737e-01] [-1.26709804e-01 6.43224239e-01 3.26834053e-01 -1.38265282e-01 7.82321870e-01 1.69195816e-01 -4.48699296e-02 -1.67400949e-02 -4.22093630e-01 -4.29599017e-01] [ 1.39260697e+00 -9.01543379e-01 -1.43316209e+00 -5.80904245e-01 -2.88936377e-01 -8.49944055e-01 3.14945638e-01 -4.45764601e-01 -8.10571909e-01 -1.30475551e-01] [-6.20230377e-01 9.67615321e-02 6.87622353e-02 -2.36440614e-01 -1.15695846e+00 -1.58532277e-01 -1.07345974e+00 4.47125196e-01 -3.82741570e-01 -5.53369939e-01] [ 1.00335050e+00 -1.34092169e-02 -5.76327503e-01 2.33932778e-01 -9.46458638e-01 6.28270805e-01 -2.74670869e-01 -2.12687207e-03 -2.07641348e-01 -6.84242547e-01] [ 9.93928984e-02 -1.14931367e-01 1.01302601e-01 -4.34144557e-01 1.08663881e+00 2.80050915e-02 -1.70429695e+00 6.15994513e-01 3.17911327e-01 9.69957113e-02] [ 6.24256253e-01 -7.34102547e-01 -6.91661611e-02 -3.72157276e-01 -3.29482079e-01 8.38307515e-02 -7.87208796e-01 7.00972080e-01 -5.78255177e-01 -6.05658174e-01] [ 8.61374974e-01 3.81823897e-01 -1.31723928e+00 2.98019022e-01 3.90301012e-02 -8.49866569e-01 1.26456857e+00 -5.08532166e-01 3.46137643e-01 1.81293190e-01] [-1.83038693e-02 9.65012014e-01 -9.28710639e-01 -7.31960297e-01 -7.42887557e-02 -1.01153874e+00 9.04617250e-01 -4.98245545e-02 -1.78898461e-02 -7.02261090e-01] [ 4.36707705e-01 -1.25139928e+00 3.02898586e-01 -3.22924107e-01 9.69396591e-01 -9.59858119e-01 -6.41544759e-01 4.70438272e-01 2.64405787e-01 -4.69834119e-01]] [[-3.44782782e+00 2.09088707e+00 2.07174921e+00 -7.14962184e-01 3.19164634e-01 -4.83454895e+00 -3.09863734e+00 2.44469956e-01 -1.17744458e+00 -8.01972687e-01] [-1.63847148e+00 -1.74341190e+00 -3.08592653e+00 -2.33274245e+00 -2.33525085e+00 1.34816527e-01 5.53459287e-01 -5.97212851e-01 1.08620560e+00 -1.18693507e+00] [-3.63236952e+00 -2.87505651e+00 -3.47041559e+00 -1.28669894e+00 1.33043659e+00 6.81267619e-01 -4.76490402e+00 2.47905612e+00 -2.48704642e-01 -6.45603466e+00] [-3.17642522e+00 -1.27818894e+00 3.53087813e-01 -1.77547598e+00 -5.85476971e+00 -4.57019901e+00 -1.92175674e+00 1.03185856e+00 -1.22135782e+00 -8.04655552e-01] [ 1.51613986e+00 -1.18220556e+00 2.98860908e+00 -2.66099763e+00 -2.48272395e+00 2.12136340e+00 -4.28553438e+00 1.38438389e-01 -3.67032576e+00 -3.51319265e+00] [-3.23014641e+00 4.54810560e-01 -1.00936449e+00 -3.49677491e+00 -9.74305272e-01 -3.47638607e+00 3.79602730e-01 1.50666654e+00 -2.81180739e+00 -2.49287486e+00] [-1.20231219e-01 -3.35472047e-01 -5.01014233e+00 -1.40249848e+00 -4.63581896e+00 1.50169170e+00 -9.82305110e-01 -4.04755640e+00 -2.90433955e+00 -1.10550463e+00] [-1.74811625e+00 -1.97540498e+00 -1.59927630e+00 -4.50697947e+00 6.86093032e-01 2.05130076e+00 -3.77465153e+00 -2.97102332e+00 -3.03637886e+00 -2.28717971e+00] [ 2.19784641e+00 -1.73056448e+00 6.81836665e-01 -6.87594786e-02 -1.31840718e+00 -1.39347827e+00 -1.42813480e+00 -3.74370384e+00 -4.17639542e+00 9.79219794e-01] [-2.75857353e+00 -8.77751708e-01 4.13055748e-01 -4.19446677e-01 -1.14850068e+00 -3.03966022e+00 7.71015704e-01 -9.24554110e-01 3.48351598e+00 2.06323171e+00]] [[ 1.57573760e+00 2.96263266e+00 2.71536183e+00 9.96369600e-01 -3.67375898e+00 -6.62991583e-01 3.28018874e-01 2.12196565e+00 -7.20002174e-01 2.04424047e+00] [-9.73403931e-01 8.73113751e-01 2.42745829e+00 5.54855919e+00 -3.79977083e+00 1.96930563e+00 -2.34542346e+00 2.24861908e+00 4.12383223e+00 1.49755251e+00] [ 5.64646673e+00 5.20583248e+00 2.81789064e+00 -2.18381214e+00 1.29669154e+00 -2.49285460e+00 2.26429963e+00 9.54792440e-01 1.87764555e-01 3.06126118e+00] [ 4.67842579e+00 2.80410242e+00 -3.37249613e+00 -9.34149325e-01 -8.57362092e-01 3.58767009e+00 -1.57359374e+00 -4.77095985e+00 8.73022842e+00 1.36107028e+00] [ 7.09996796e+00 -2.23486245e-01 1.06444991e+00 5.96894026e-01 2.22240853e+00 1.13867593e+00 2.59585428e+00 -2.03777337e+00 2.59103632e+00 3.93671346e+00] [ 4.69492853e-01 2.30535293e+00 5.20130968e+00 6.02653384e-01 -1.18368775e-01 1.73649991e+00 -1.00779128e+00 1.89958334e+00 3.05184811e-01 3.13917696e-01] [-6.23770571e+00 -2.68961906e+00 -9.58602965e-01 4.44189930e+00 -2.09704018e+00 -2.45620799e+00 1.47229946e+00 -1.72294870e-01 -1.26291692e+00 3.32638764e+00] [ 4.27951050e+00 -2.62730193e+00 3.00414777e+00 -6.41618252e-01 1.82839286e+00 3.43928242e+00 -2.24726748e+00 2.30130339e+00 3.57839799e+00 -4.69162285e-01] [-4.27687496e-01 -2.33180285e+00 3.27976108e+00 2.69075346e+00 -1.11355603e+00 -1.19576907e+00 -3.67167383e-01 -2.26214841e-01 2.60336328e+00 2.99100780e+00] [ 3.42646194e+00 4.08077955e+00 -2.33860588e+00 9.51105475e-01 1.06156516e+00 -4.88920540e-01 1.66122603e+00 3.73166609e+00 3.05319619e+00 2.87587094e+00]]]]; ov_res: [[[[-1.00212526e+00 3.65736425e-01 -3.83394025e-02 3.23410451e-01 -6.65808260e-01 1.49593019e+00 -4.96669471e-01 6.62029147e-01 -2.00748786e-01 -3.94332737e-01] [-1.26709804e-01 6.43224239e-01 3.26834053e-01 -1.38265282e-01 7.82321870e-01 1.69195816e-01 -4.48699296e-02 -1.67400949e-02 -4.22093630e-01 -4.29599017e-01] [ 1.39260697e+00 -9.01543379e-01 -1.43316209e+00 -5.80904245e-01 -2.88936377e-01 -8.49944055e-01 3.14945638e-01 -4.45764601e-01 -8.10571909e-01 -1.30475551e-01] [-6.20230377e-01 9.67615321e-02 6.87622353e-02 -2.36440614e-01 -1.15695846e+00 -1.58532277e-01 -1.07345974e+00 4.47125196e-01 -3.82741570e-01 -5.53369939e-01] [ 1.00335050e+00 -1.34092169e-02 -5.76327503e-01 2.33932778e-01 -9.46458638e-01 6.28270805e-01 -2.74670869e-01 -2.12687207e-03 -2.07641348e-01 -6.84242547e-01] [ 9.93928984e-02 -1.14931367e-01 1.01302601e-01 -4.34144557e-01 1.08663881e+00 2.80050915e-02 -1.70429695e+00 6.15994513e-01 3.17911327e-01 9.69957113e-02] [ 6.24256253e-01 -7.34102547e-01 -6.91661611e-02 -3.72157276e-01 -3.29482079e-01 8.38307515e-02 -7.87208796e-01 7.00972080e-01 -5.78255177e-01 -6.05658174e-01] [ 8.61374974e-01 3.81823897e-01 -1.31723928e+00 2.98019022e-01 3.90301012e-02 -8.49866569e-01 1.26456857e+00 -5.08532166e-01 3.46137643e-01 1.81293190e-01] [-1.83038693e-02 9.65012014e-01 -9.28710639e-01 -7.31960297e-01 -7.42887557e-02 -1.01153874e+00 9.04617250e-01 -4.98245545e-02 -1.78898461e-02 -7.02261090e-01] [ 4.36707705e-01 -1.25139928e+00 3.02898586e-01 -3.22924107e-01 9.69396591e-01 -9.59858119e-01 -6.41544759e-01 4.70438272e-01 2.64405787e-01 -4.69834119e-01]] [[-3.44782782e+00 2.09088707e+00 2.07174921e+00 -7.14962184e-01 3.19164634e-01 -4.83454895e+00 -3.09863734e+00 2.44469956e-01 -1.17744458e+00 -8.01972687e-01] [-1.63847148e+00 -1.74341190e+00 -3.08592653e+00 -2.33274245e+00 -2.33525085e+00 1.34816527e-01 5.53459287e-01 -5.97212851e-01 1.08620560e+00 -1.18693507e+00] [-3.63236952e+00 -2.87505651e+00 -3.47041559e+00 -1.28669894e+00 1.33043659e+00 6.81267619e-01 -4.76490402e+00 2.47905612e+00 -2.48704642e-01 -6.45603466e+00] [-3.17642522e+00 -1.27818894e+00 3.53087813e-01 -1.77547598e+00 -5.85476971e+00 -4.57019901e+00 -1.92175674e+00 1.03185856e+00 -1.22135782e+00 -8.04655552e-01] [ 1.51613986e+00 -1.18220556e+00 2.98860908e+00 -2.66099763e+00 -2.48272395e+00 2.12136340e+00 -4.28553438e+00 1.38438389e-01 -3.67032576e+00 -3.51319265e+00] [-3.23014641e+00 4.54810560e-01 -1.00936449e+00 -3.49677491e+00 -9.74305272e-01 -3.47638607e+00 3.79602730e-01 1.50666654e+00 -2.81180739e+00 -2.49287486e+00] [-1.20231219e-01 -3.35472047e-01 -5.01014233e+00 -1.40249848e+00 -4.63581896e+00 1.50169170e+00 -9.82305110e-01 -4.04755640e+00 -2.90433955e+00 -1.10550463e+00] [-1.74811625e+00 -1.97540498e+00 -1.59927630e+00 -4.50697947e+00 6.86093032e-01 2.05130076e+00 -3.77465153e+00 -2.97102332e+00 -3.03637886e+00 -2.28717971e+00] [ 2.19784641e+00 -1.73056448e+00 6.81836665e-01 -6.87594786e-02 -1.31840718e+00 -1.39347827e+00 -1.42813480e+00 -3.74370384e+00 -4.17639542e+00 9.79219794e-01] [-2.75857353e+00 -8.77751708e-01 4.13055748e-01 -4.19446677e-01 -1.14850068e+00 -3.03966022e+00 7.71015704e-01 -9.24554110e-01 3.48351598e+00 2.06323171e+00]] [[ 1.57573760e+00 2.96263266e+00 2.71536183e+00 9.96369600e-01 -3.67375898e+00 -6.62991583e-01 3.28018874e-01 2.12196565e+00 -7.20002174e-01 2.04424047e+00] [-9.73403931e-01 8.73113751e-01 2.42745829e+00 5.54855919e+00 -3.79977083e+00 1.96930563e+00 -2.34542346e+00 2.24861908e+00 4.12383223e+00 1.49755251e+00] [ 5.64646673e+00 5.20583248e+00 2.81789064e+00 -2.18381214e+00 1.29669154e+00 -2.49285460e+00 2.26429963e+00 9.54792440e-01 1.87764555e-01 3.06126118e+00] [ 4.67842579e+00 2.80410242e+00 -3.37249613e+00 -9.34149325e-01 -8.57362092e-01 3.58767009e+00 -1.57359374e+00 -4.77095985e+00 8.73022842e+00 1.36107028e+00] [ 7.09996796e+00 -2.23486245e-01 1.06444991e+00 5.96894026e-01 2.22240853e+00 1.13867593e+00 2.59585428e+00 -2.03777337e+00 2.59103632e+00 3.93671346e+00] [ 4.69492853e-01 2.30535293e+00 5.20130968e+00 6.02653384e-01 -1.18368775e-01 1.73649991e+00 -1.00779128e+00 1.89958334e+00 3.05184811e-01 3.13917696e-01] [-6.23770571e+00 -2.68961906e+00 -9.58602965e-01 4.44189930e+00 -2.09704018e+00 -2.45620799e+00 1.47229946e+00 -1.72294870e-01 -1.26291692e+00 3.32638764e+00] [ 4.27951050e+00 -2.62730193e+00 3.00414777e+00 -6.41618252e-01 1.82839286e+00 3.43928242e+00 -2.24726748e+00 2.30130339e+00 3.57839799e+00 -4.69162285e-01] [-4.27687496e-01 -2.33180285e+00 3.27976108e+00 2.69075346e+00 -1.11355603e+00 -1.19576907e+00 -3.67167383e-01 -2.26214841e-01 2.60336328e+00 2.99100780e+00] [ 3.42646194e+00 4.08077955e+00 -2.33860588e+00 9.51105475e-01 1.06156516e+00 -4.88920540e-01 1.66122603e+00 3.73166609e+00 3.05319619e+00 2.87587094e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_632.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.67096}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.1221 (2,1,.,.) = -0.2445 (3,1,.,.) = -1.0745 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.2959955 -0.8185172 0.48078787 0.52320206 -1.4074037 0.45120704 -3.8463874 -1.6529294 0.21940875 -0.98508596] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.0740336 1.4099554 -2.9855723 -0.88147557 -1.6753181 0.2956183 -3.7859726 -1.27505 -1.7400908 -5.3667016 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-3.1560104 -3.418068 -2.7943444 -3.0511885 -0.6469133 -0.0522033 -2.9920738 -2.050566 -2.0898216 -3.1267755 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-2.9259548 -2.9915333 -1.7545762 -3.9254885 -3.911848 1.2058715 -1.7424726 -0.39932418 -1.1419003 -0.9512275 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.4819623 -1.7415512 -1.4119158 -4.2146115 -0.85984784 -2.2650747 -1.4220777 -0.4544319 -2.5404313 -3.8748026 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-0.7323223 -2.4752703 -0.0658828 2.1060176 -0.08330071 -3.2233298 -3.0571191 -0.28963923 -0.93137795 1.3136722 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.5090944 -2.9079173 -3.1431324 -1.5120666 -2.0761158 -2.6512377 1.7285012 0.2633127 0.30193377 -1.0998702 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-3.8007574 -3.4539018 0.07121003 -2.4001784 -2.9843793 0.66983116 -1.1312394 -0.93081105 -0.593776 -1.7577163 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ]]]]; ov_res: [[[[-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.2959955 -0.8185172 0.48078787 0.52320206 -1.4074037 0.45120704 -3.8463874 -1.6529294 0.21940875 -0.98508596] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.0740336 1.4099554 -2.9855723 -0.88147557 -1.6753181 0.2956183 -3.7859726 -1.27505 -1.7400908 -5.3667016 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-3.1560104 -3.418068 -2.7943444 -3.0511885 -0.6469133 -0.0522033 -2.9920738 -2.050566 -2.0898216 -3.1267755 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-2.9259548 -2.9915333 -1.7545762 -3.9254885 -3.911848 1.2058715 -1.7424726 -0.39932418 -1.1419003 -0.9512275 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.4819623 -1.7415512 -1.4119158 -4.2146115 -0.85984784 -2.2650747 -1.4220777 -0.4544319 -2.5404313 -3.8748026 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-0.7323223 -2.4752703 -0.0658828 2.1060176 -0.08330071 -3.2233298 -3.0571191 -0.28963923 -0.93137795 1.3136722 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-1.5090944 -2.9079173 -3.1431324 -1.5120666 -2.0761158 -2.6512377 1.7285012 0.2633127 0.30193377 -1.0998702 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ] [-3.8007574 -3.4539018 0.07121003 -2.4001784 -2.9843793 0.66983116 -1.1312394 -0.93081105 -0.593776 -1.7577163 ] [-1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 -1.6709605 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_634.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.2102 0.5181 0.1307 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.4874 (2,1,.,.) = 0.6735 (3,1,.,.) = 0.01 * 5.1951 (1,2,.,.) = -1.1135 (2,2,.,.) = -0.6564 (3,2,.,.) = -1.2019 (1,3,.,.) = -0.3819 (2,3,.,.) = -0.1396 (3,3,.,.) = -1.1339 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-1.8512964e-01 -1.1253047e+00 -1.2152498e+00 -2.5937970e+00 -2.8197818e+00 1.1962938e-01 -1.1765013e+00 -2.7835515e+00 -1.2084159e+00 -2.3750132e-01] [-2.8402168e-01 -7.3529482e-03 -1.4521244e+00 -2.5626407e+00 -1.2003028e+00 -1.3012606e+00 -2.3987174e+00 1.2904371e+00 -2.1669865e+00 -2.8472590e+00] [-2.6471810e+00 -2.8960252e+00 1.7362958e+00 -2.2848463e+00 -1.1314033e+00 4.4479966e-02 -5.9265536e-01 -1.8335524e+00 -2.6055503e+00 -1.0060346e-01] [ 5.1512730e-01 6.4375639e-02 1.6489307e+00 -3.4047637e+00 -1.2551110e+00 4.5498300e-01 -1.7858384e+00 -1.2900648e+00 -8.2530087e-01 -6.1646730e-01] [-2.6209502e+00 -3.9172947e-01 -2.7619038e+00 -2.8259845e+00 5.0943983e-01 -1.0656137e+00 -1.1447046e+00 -2.1062148e+00 -5.0606614e-01 -4.3184433e+00]] [[ 2.6293590e+00 -2.1455288e-02 4.6987003e-01 -7.1514964e-01 4.4416580e-01 1.6670942e-02 1.0540642e+00 2.2964783e+00 1.0660021e+00 8.2132041e-02] [ 1.6361448e-01 -5.2378345e-01 2.3999017e-01 1.2687702e+00 -1.6623991e+00 -7.1039486e-01 1.2212653e+00 2.2307906e+00 -5.6480145e-01 5.6275260e-01] [ 1.1397004e-03 7.6090872e-02 7.3063934e-01 -4.0178275e-01 9.4675964e-01 1.4487951e+00 4.7019053e-01 5.1589644e-01 1.6058714e+00 1.7934570e+00] [ 1.2774227e+00 3.6711597e+00 1.1624334e+00 3.3728153e-01 9.7259766e-01 1.2111163e+00 -6.0847521e-02 1.6961402e-01 -8.3454311e-02 5.0914347e-02] [-8.5095501e-01 8.8698781e-01 -3.8037992e-01 6.1286539e-02 1.8600335e+00 1.7143316e+00 5.7483315e-01 -1.3099562e+00 5.8301127e-01 -2.8748065e-01]] [[ 1.6950213e+00 5.9600145e-01 1.1736072e+00 -2.0379043e+00 -1.1601359e+00 1.3269696e+00 6.1156213e-01 3.3144498e-01 4.4633141e-01 1.7823626e+00] [ 7.7830517e-01 3.6803228e-01 -4.1880974e-01 -3.7391481e-01 -9.9322522e-01 -2.1152968e+00 -3.0017737e-01 2.8217893e+00 -2.2373030e+00 -2.1670763e+00] [-2.6360166e+00 -1.4973741e+00 2.2231052e+00 -1.4219315e+00 1.5190473e+00 7.1767497e-01 5.8386731e-01 4.6213263e-01 -1.7657570e+00 1.7310784e+00] [ 2.2136941e+00 2.1916065e+00 4.2547555e+00 -2.5008993e+00 -5.1844120e-03 2.7561891e+00 -4.8168257e-01 -7.1756172e-01 -8.4128961e-02 1.2577190e-01] [-1.8693824e+00 1.7355599e+00 -6.1338603e-01 -5.4813302e-01 4.0065804e+00 9.9333692e-01 9.3113112e-01 -1.0152086e+00 6.1541724e-01 -2.6785755e+00]]]]; ov_res: [[[[-1.8512964e-01 -1.1253047e+00 -1.2152498e+00 -2.5937970e+00 -2.8197818e+00 1.1962938e-01 -1.1765013e+00 -2.7835515e+00 -1.2084159e+00 -2.3750132e-01] [-2.8402168e-01 -7.3529482e-03 -1.4521244e+00 -2.5626407e+00 -1.2003028e+00 -1.3012606e+00 -2.3987174e+00 1.2904371e+00 -2.1669865e+00 -2.8472590e+00] [-2.6471810e+00 -2.8960252e+00 1.7362958e+00 -2.2848463e+00 -1.1314033e+00 4.4479966e-02 -5.9265536e-01 -1.8335524e+00 -2.6055503e+00 -1.0060346e-01] [ 5.1512730e-01 6.4375639e-02 1.6489307e+00 -3.4047637e+00 -1.2551110e+00 4.5498300e-01 -1.7858384e+00 -1.2900648e+00 -8.2530087e-01 -6.1646730e-01] [-2.6209502e+00 -3.9172947e-01 -2.7619038e+00 -2.8259845e+00 5.0943983e-01 -1.0656137e+00 -1.1447046e+00 -2.1062148e+00 -5.0606614e-01 -4.3184433e+00]] [[ 2.6293590e+00 -2.1455288e-02 4.6987003e-01 -7.1514964e-01 4.4416580e-01 1.6670942e-02 1.0540642e+00 2.2964783e+00 1.0660021e+00 8.2132041e-02] [ 1.6361448e-01 -5.2378345e-01 2.3999017e-01 1.2687702e+00 -1.6623991e+00 -7.1039486e-01 1.2212653e+00 2.2307906e+00 -5.6480145e-01 5.6275260e-01] [ 1.1397004e-03 7.6090872e-02 7.3063934e-01 -4.0178275e-01 9.4675964e-01 1.4487951e+00 4.7019053e-01 5.1589644e-01 1.6058714e+00 1.7934570e+00] [ 1.2774227e+00 3.6711597e+00 1.1624334e+00 3.3728153e-01 9.7259766e-01 1.2111163e+00 -6.0847521e-02 1.6961402e-01 -8.3454311e-02 5.0914347e-02] [-8.5095501e-01 8.8698781e-01 -3.8037992e-01 6.1286539e-02 1.8600335e+00 1.7143316e+00 5.7483315e-01 -1.3099562e+00 5.8301127e-01 -2.8748065e-01]] [[ 1.6950213e+00 5.9600145e-01 1.1736072e+00 -2.0379043e+00 -1.1601359e+00 1.3269696e+00 6.1156213e-01 3.3144498e-01 4.4633141e-01 1.7823626e+00] [ 7.7830517e-01 3.6803228e-01 -4.1880974e-01 -3.7391481e-01 -9.9322522e-01 -2.1152968e+00 -3.0017737e-01 2.8217893e+00 -2.2373030e+00 -2.1670763e+00] [-2.6360166e+00 -1.4973741e+00 2.2231052e+00 -1.4219315e+00 1.5190473e+00 7.1767497e-01 5.8386731e-01 4.6213263e-01 -1.7657570e+00 1.7310784e+00] [ 2.2136941e+00 2.1916065e+00 4.2547555e+00 -2.5008993e+00 -5.1844120e-03 2.7561891e+00 -4.8168257e-01 -7.1756172e-01 -8.4128961e-02 1.2577190e-01] [-1.8693824e+00 1.7355599e+00 -6.1338603e-01 -5.4813302e-01 4.0065804e+00 9.9333692e-01 9.3113112e-01 -1.0152086e+00 6.1541724e-01 -2.6785755e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_636.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.690335}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.9035 (2,1,.,.) = 0.7322 (3,1,.,.) = 0.2992 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-1.3124125 0.6903351 -1.5157962 0.6903351 3.575426 0.6903351 0.2157143 0.6903351 3.844786 0.6903351 2.5612938 0.6903351 3.8723273 0.6903351 4.475744 0.6903351 2.4896936 0.6903351 -2.3461862 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.5760283 0.6903351 1.0945603 0.6903351 3.9290695 0.6903351 3.9761047 0.6903351 0.41930756 0.6903351 1.5424637 0.6903351 6.1415687 0.6903351 -0.10718149 0.6903351 -0.8618097 0.6903351 0.4702642 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [-1.0321851 0.6903351 0.9965734 0.6903351 -3.0850239 0.6903351 -2.3175983 0.6903351 1.7711442 0.6903351 2.7346504 0.6903351 1.057896 0.6903351 -2.500831 0.6903351 -2.5907526 0.6903351 2.2246673 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 1.3036822 0.6903351 2.1277323 0.6903351 -3.011732 0.6903351 -2.8202221 0.6903351 -0.761506 0.6903351 1.3742948 0.6903351 3.3094282 0.6903351 -0.11871147 0.6903351 -2.1997097 0.6903351 1.5639917 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 1.3380195 0.6903351 0.90542305 0.6903351 1.2269006 0.6903351 -1.4612823 0.6903351 3.5721235 0.6903351 1.7263927 0.6903351 0.46266365 0.6903351 0.30946746 0.6903351 0.34554794 0.6903351 2.0668578 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 0.60839605 0.6903351 3.0638552 0.6903351 0.5483096 0.6903351 0.48176926 0.6903351 -1.5559764 0.6903351 -2.9009876 0.6903351 -1.3016534 0.6903351 1.3142596 0.6903351 2.2918108 0.6903351 3.2544591 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 2.593336 0.6903351 0.76659507 0.6903351 1.4101348 0.6903351 -2.6701524 0.6903351 3.0418246 0.6903351 -1.0951228 0.6903351 -0.3538403 0.6903351 0.9122439 0.6903351 0.54899424 0.6903351 3.4440053 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 3.905031 0.6903351 1.3357098 0.6903351 -0.9711738 0.6903351 -0.39749366 0.6903351 1.639278 0.6903351 -0.27176636 0.6903351 -1.901953 0.6903351 -0.04614127 0.6903351 1.0409756 0.6903351 1.8635664 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.570691 0.6903351 1.3153372 0.6903351 -3.9171207 0.6903351 1.9748383 0.6903351 3.2675579 0.6903351 3.6257727 0.6903351 1.2379992 0.6903351 5.148472 0.6903351 0.01125985 0.6903351 0.55389726] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.840373 0.6903351 -0.8597471 0.6903351 2.6539855 0.6903351 -0.7894823 0.6903351 -0.5017975 0.6903351 2.3159776 0.6903351 0.9108701 0.6903351 -0.29673457 0.6903351 3.309088 0.6903351 0.69930506]]]]; ov_res: [[[[-1.3124125 0.6903351 -1.5157962 0.6903351 3.575426 0.6903351 0.2157143 0.6903351 3.844786 0.6903351 2.5612938 0.6903351 3.8723273 0.6903351 4.475744 0.6903351 2.4896936 0.6903351 -2.3461862 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.5760283 0.6903351 1.0945603 0.6903351 3.9290695 0.6903351 3.9761047 0.6903351 0.41930756 0.6903351 1.5424637 0.6903351 6.1415687 0.6903351 -0.10718149 0.6903351 -0.8618097 0.6903351 0.4702642 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [-1.0321851 0.6903351 0.9965734 0.6903351 -3.0850239 0.6903351 -2.3175983 0.6903351 1.7711442 0.6903351 2.7346504 0.6903351 1.057896 0.6903351 -2.500831 0.6903351 -2.5907526 0.6903351 2.2246673 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 1.3036822 0.6903351 2.1277323 0.6903351 -3.011732 0.6903351 -2.8202221 0.6903351 -0.761506 0.6903351 1.3742948 0.6903351 3.3094282 0.6903351 -0.11871147 0.6903351 -2.1997097 0.6903351 1.5639917 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 1.3380195 0.6903351 0.90542305 0.6903351 1.2269006 0.6903351 -1.4612823 0.6903351 3.5721235 0.6903351 1.7263927 0.6903351 0.46266365 0.6903351 0.30946746 0.6903351 0.34554794 0.6903351 2.0668578 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 0.60839605 0.6903351 3.0638552 0.6903351 0.5483096 0.6903351 0.48176926 0.6903351 -1.5559764 0.6903351 -2.9009876 0.6903351 -1.3016534 0.6903351 1.3142596 0.6903351 2.2918108 0.6903351 3.2544591 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 2.593336 0.6903351 0.76659507 0.6903351 1.4101348 0.6903351 -2.6701524 0.6903351 3.0418246 0.6903351 -1.0951228 0.6903351 -0.3538403 0.6903351 0.9122439 0.6903351 0.54899424 0.6903351 3.4440053 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 3.905031 0.6903351 1.3357098 0.6903351 -0.9711738 0.6903351 -0.39749366 0.6903351 1.639278 0.6903351 -0.27176636 0.6903351 -1.901953 0.6903351 -0.04614127 0.6903351 1.0409756 0.6903351 1.8635664 ] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.570691 0.6903351 1.3153372 0.6903351 -3.9171207 0.6903351 1.9748383 0.6903351 3.2675579 0.6903351 3.6257727 0.6903351 1.2379992 0.6903351 5.148472 0.6903351 0.01125985 0.6903351 0.55389726] [ 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 0.6903351 ] [ 4.840373 0.6903351 -0.8597471 0.6903351 2.6539855 0.6903351 -0.7894823 0.6903351 -0.5017975 0.6903351 2.3159776 0.6903351 0.9108701 0.6903351 -0.29673457 0.6903351 3.309088 0.6903351 0.69930506]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 2], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_638.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.9226 -0.0483 -1.5613 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 1.8688 (2,1,.,.) = -0.9902 (3,1,.,.) = 0.01 * 7.6436 (1,2,.,.) = -0.4528 (2,2,.,.) = -0.6220 (3,2,.,.) = 0.4167 (1,3,.,.) = 0.8176 (2,3,.,.) = 0.01 * -3.9121 (3,3,.,.) = -0.4373 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 4.996329 -0.08760244 0.53898305 3.1355276 2.681663 ] [-3.0020316 2.7260919 4.331857 -0.47933024 -0.06859076] [ 0.7369044 0.7065854 -1.3763068 2.8553786 2.7003596 ] [ 1.819823 0.57523763 2.1789005 0.5523771 1.6258752 ] [-0.8598855 0.3815531 1.4634106 0.5430401 3.1215963 ]] [[-1.2370483 -0.12940454 0.93173397 0.6152859 -2.0846527 ] [ 1.5275834 -1.8014152 -1.1199362 -0.50528955 2.0902479 ] [-1.0224627 1.7315141 -1.2966241 -1.3356359 -0.9466858 ] [ 0.93636554 -0.47226998 0.80039746 0.34975937 0.70439476] [-0.06458876 -0.50066435 0.39199105 -0.5753512 0.5742835 ]] [[-2.2952724 -1.6039786 -2.17402 -2.4289422 -1.0076222 ] [-1.1236658 -1.5443987 -1.6342263 -0.6069255 -2.5998697 ] [-0.5995682 -2.308619 0.07347071 -1.5185182 -2.1195102 ] [-2.326972 -1.4982982 -2.2462687 -1.2323073 -2.4707336 ] [-0.5956412 -1.0958693 -2.2274873 -1.2845566 -2.6098182 ]]]]; ov_res: [[[[ 4.996329 -0.08760244 0.53898305 3.1355276 2.681663 ] [-3.0020316 2.7260919 4.331857 -0.47933024 -0.06859076] [ 0.7369044 0.7065854 -1.3763068 2.8553786 2.7003596 ] [ 1.819823 0.57523763 2.1789005 0.5523771 1.6258752 ] [-0.8598855 0.3815531 1.4634106 0.5430401 3.1215963 ]] [[-1.2370483 -0.12940454 0.93173397 0.6152859 -2.0846527 ] [ 1.5275834 -1.8014152 -1.1199362 -0.50528955 2.0902479 ] [-1.0224627 1.7315141 -1.2966241 -1.3356359 -0.9466858 ] [ 0.93636554 -0.47226998 0.80039746 0.34975937 0.70439476] [-0.06458876 -0.50066435 0.39199105 -0.5753512 0.5742835 ]] [[-2.2952724 -1.6039786 -2.17402 -2.4289422 -1.0076222 ] [-1.1236658 -1.5443987 -1.6342263 -0.6069255 -2.5998697 ] [-0.5995682 -2.308619 0.07347071 -1.5185182 -2.1195102 ] [-2.326972 -1.4982982 -2.2462687 -1.2323073 -2.4707336 ] [-0.5956412 -1.0958693 -2.2274873 -1.2845566 -2.6098182 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_640.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5240 -0.3186 -0.1812 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.3318 (2,1,.,.) = 0.7627 (3,1,.,.) = 0.7555 (1,2,.,.) = 1.4337 (2,2,.,.) = -1.5704 (3,2,.,.) = -1.3047 (1,3,.,.) = -1.7238 (2,3,.,.) = 0.1553 (3,3,.,.) = 1.1869 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 2.4021041 0.96704495 0.8075122 0.65419704 3.8057044 -1.9439318 -1.9362013 4.551497 3.5371099 2.5980122 ] [ 2.5105722 -0.5068522 -1.3467479 0.49567264 1.7021532 0.3500383 2.366813 2.6015809 -1.7668524 -3.967743 ] [-2.5812922 2.2816172 2.9876246 0.22191805 2.927765 1.9082592 3.6090052 -3.0282302 1.2703971 3.3206625 ] [ 0.488252 -1.4564929 -2.1421454 -0.9334157 4.3135753 0.6600353 0.9931662 -1.7958338 3.8224201 -0.3052947 ] [-3.461021 -3.5956206 3.671972 -0.19220078 -0.45182276 4.273107 0.3723408 0.8905175 1.0208591 1.5640004 ]] [[-3.0834103 -0.25647888 0.5669974 -1.4963067 -4.2284827 1.0311315 0.0417881 1.0225799 -0.25642216 0.10828218] [-1.5099623 -0.1193682 -0.13805401 -0.03016773 0.92052555 3.5035012 -0.73989457 -1.0708992 0.04476863 1.3147076 ] [ 0.9826902 -0.7621179 -3.9573972 -2.3344288 0.0312767 -3.3339887 -0.04225186 1.0370313 0.22321376 -1.5352641 ] [-1.8903033 2.2897153 3.6311102 -0.9589077 -2.463235 -2.502579 -2.333569 1.8727887 -2.4476016 -1.6912992 ] [ 1.296468 6.2334003 -0.9910674 0.5656376 0.6591227 -2.3974862 -1.5856653 1.2342619 -0.6772165 0.01545396]] [[-2.509786 -0.7124138 0.07609536 -0.9996343 -3.7210784 0.8811723 1.3896214 -1.4040433 -1.2177328 -0.3434441 ] [-1.4443392 0.82061327 1.4824018 -0.06384892 -0.23617633 1.5223149 -1.6322093 -1.3951426 2.2382646 3.9680674 ] [ 1.5893631 -2.3027186 -3.9221802 -1.827455 0.13610418 -1.0649343 -2.3234887 2.9614973 -0.5204604 -2.4078333 ] [-0.6009254 2.1857176 3.0795321 0.06412201 -3.1560311 0.4027756 -0.19635808 2.703707 -3.1534975 -0.641206 ] [ 2.8893063 4.8007298 -1.3152243 -0.09893332 1.4673185 -2.926819 -0.50659436 0.43690246 -0.74549264 -0.24131115]]]]; ov_res: [[[[ 2.4021041 0.96704495 0.8075122 0.65419704 3.8057044 -1.9439318 -1.9362013 4.551497 3.5371099 2.5980122 ] [ 2.5105722 -0.5068522 -1.3467479 0.49567264 1.7021532 0.3500383 2.366813 2.6015809 -1.7668524 -3.967743 ] [-2.5812922 2.2816172 2.9876246 0.22191805 2.927765 1.9082592 3.6090052 -3.0282302 1.2703971 3.3206625 ] [ 0.488252 -1.4564929 -2.1421454 -0.9334157 4.3135753 0.6600353 0.9931662 -1.7958338 3.8224201 -0.3052947 ] [-3.461021 -3.5956206 3.671972 -0.19220078 -0.45182276 4.273107 0.3723408 0.8905175 1.0208591 1.5640004 ]] [[-3.0834103 -0.25647888 0.5669974 -1.4963067 -4.2284827 1.0311315 0.0417881 1.0225799 -0.25642216 0.10828218] [-1.5099623 -0.1193682 -0.13805401 -0.03016773 0.92052555 3.5035012 -0.73989457 -1.0708992 0.04476863 1.3147076 ] [ 0.9826902 -0.7621179 -3.9573972 -2.3344288 0.0312767 -3.3339887 -0.04225186 1.0370313 0.22321376 -1.5352641 ] [-1.8903033 2.2897153 3.6311102 -0.9589077 -2.463235 -2.502579 -2.333569 1.8727887 -2.4476016 -1.6912992 ] [ 1.296468 6.2334003 -0.9910674 0.5656376 0.6591227 -2.3974862 -1.5856653 1.2342619 -0.6772165 0.01545396]] [[-2.509786 -0.7124138 0.07609536 -0.9996343 -3.7210784 0.8811723 1.3896214 -1.4040433 -1.2177328 -0.3434441 ] [-1.4443392 0.82061327 1.4824018 -0.06384892 -0.23617633 1.5223149 -1.6322093 -1.3951426 2.2382646 3.9680674 ] [ 1.5893631 -2.3027186 -3.9221802 -1.827455 0.13610418 -1.0649343 -2.3234887 2.9614973 -0.5204604 -2.4078333 ] [-0.6009254 2.1857176 3.0795321 0.06412201 -3.1560311 0.4027756 -0.19635808 2.703707 -3.1534975 -0.641206 ] [ 2.8893063 4.8007298 -1.3152243 -0.09893332 1.4673185 -2.926819 -0.50659436 0.43690246 -0.74549264 -0.24131115]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_642.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.853381}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -2.2591 (2,1,.,.) = -1.2746 (3,1,.,.) = -0.5755 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-1.8885643 -0.853381 2.3051965 -0.853381 -2.855694 -0.853381 -1.4484189 -0.853381 3.6333582 -0.853381 0.5415986 -0.853381 1.7219245 -0.853381 -4.065902 -0.853381 -2.8008907 -0.853381 -4.7541437 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-6.916025 -0.853381 0.6643675 -0.853381 -5.492267 -0.853381 1.1469517 -0.853381 2.5193164 -0.853381 -4.9400125 -0.853381 -1.0586698 -0.853381 2.6405213 -0.853381 -1.4027829 -0.853381 -1.3040645 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-3.3931832 -0.853381 1.4327765 -0.853381 -0.05862641 -0.853381 2.0015104 -0.853381 -1.6030616 -0.853381 -0.50758684 -0.853381 1.9717374 -0.853381 0.8845369 -0.853381 1.8855901 -0.853381 -4.282222 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-0.20686549 -0.853381 3.096879 -0.853381 -2.4839096 -0.853381 -3.042952 -0.853381 0.08850485 -0.853381 1.4166968 -0.853381 3.2466753 -0.853381 -0.6848087 -0.853381 0.5474369 -0.853381 -2.8254857 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-1.4013448 -0.853381 2.2738163 -0.853381 -0.83201617 -0.853381 0.46180087 -0.853381 -0.1908955 -0.853381 -6.639335 -0.853381 1.075526 -0.853381 -0.42087284 -0.853381 2.5239286 -0.853381 -1.9240825 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [ 0.6022207 -0.853381 -2.555915 -0.853381 6.662973 -0.853381 1.7113769 -0.853381 1.9788992 -0.853381 -0.4518057 -0.853381 -5.5073395 -0.853381 -4.2075152 -0.853381 -2.3667028 -0.853381 1.5721741 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-7.0668106 -0.853381 1.4667864 -0.853381 -2.829835 -0.853381 -2.9919825 -0.853381 -2.6374834 -0.853381 2.498939 -0.853381 -3.395371 -0.853381 -2.4725604 -0.853381 0.9409302 -0.853381 -2.5297484 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-1.5856109 -0.853381 -2.0258482 -0.853381 -0.065925 -0.853381 -1.2448688 -0.853381 -0.9806402 -0.853381 -2.57855 -0.853381 -1.6479635 -0.853381 -2.2967675 -0.853381 -3.3806155 -0.853381 -1.0591369 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-2.901394 -0.853381 -1.6344473 -0.853381 5.8257804 -0.853381 0.9156105 -0.853381 -5.374533 -0.853381 -5.576057 -0.853381 -2.9812264 -0.853381 -1.4346645 -0.853381 0.29401666 -0.853381 0.95141155] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-3.7451813 -0.853381 -3.6074047 -0.853381 -6.7245755 -0.853381 -0.47057697 -0.853381 3.465195 -0.853381 -1.1384804 -0.853381 -1.9823754 -0.853381 -0.2988543 -0.853381 -3.1008136 -0.853381 0.95042044]]]]; ov_res: [[[[-1.8885643 -0.853381 2.3051965 -0.853381 -2.855694 -0.853381 -1.4484189 -0.853381 3.6333582 -0.853381 0.5415986 -0.853381 1.7219245 -0.853381 -4.065902 -0.853381 -2.8008907 -0.853381 -4.7541437 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-6.916025 -0.853381 0.6643675 -0.853381 -5.492267 -0.853381 1.1469517 -0.853381 2.5193164 -0.853381 -4.9400125 -0.853381 -1.0586698 -0.853381 2.6405213 -0.853381 -1.4027829 -0.853381 -1.3040645 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-3.3931832 -0.853381 1.4327765 -0.853381 -0.05862641 -0.853381 2.0015104 -0.853381 -1.6030616 -0.853381 -0.50758684 -0.853381 1.9717374 -0.853381 0.8845369 -0.853381 1.8855901 -0.853381 -4.282222 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-0.20686549 -0.853381 3.096879 -0.853381 -2.4839096 -0.853381 -3.042952 -0.853381 0.08850485 -0.853381 1.4166968 -0.853381 3.2466753 -0.853381 -0.6848087 -0.853381 0.5474369 -0.853381 -2.8254857 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-1.4013448 -0.853381 2.2738163 -0.853381 -0.83201617 -0.853381 0.46180087 -0.853381 -0.1908955 -0.853381 -6.639335 -0.853381 1.075526 -0.853381 -0.42087284 -0.853381 2.5239286 -0.853381 -1.9240825 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [ 0.6022207 -0.853381 -2.555915 -0.853381 6.662973 -0.853381 1.7113769 -0.853381 1.9788992 -0.853381 -0.4518057 -0.853381 -5.5073395 -0.853381 -4.2075152 -0.853381 -2.3667028 -0.853381 1.5721741 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-7.0668106 -0.853381 1.4667864 -0.853381 -2.829835 -0.853381 -2.9919825 -0.853381 -2.6374834 -0.853381 2.498939 -0.853381 -3.395371 -0.853381 -2.4725604 -0.853381 0.9409302 -0.853381 -2.5297484 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-1.5856109 -0.853381 -2.0258482 -0.853381 -0.065925 -0.853381 -1.2448688 -0.853381 -0.9806402 -0.853381 -2.57855 -0.853381 -1.6479635 -0.853381 -2.2967675 -0.853381 -3.3806155 -0.853381 -1.0591369 ] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-2.901394 -0.853381 -1.6344473 -0.853381 5.8257804 -0.853381 0.9156105 -0.853381 -5.374533 -0.853381 -5.576057 -0.853381 -2.9812264 -0.853381 -1.4346645 -0.853381 0.29401666 -0.853381 0.95141155] [-0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 -0.853381 ] [-3.7451813 -0.853381 -3.6074047 -0.853381 -6.7245755 -0.853381 -0.47057697 -0.853381 3.465195 -0.853381 -1.1384804 -0.853381 -1.9823754 -0.853381 -0.2988543 -0.853381 -3.1008136 -0.853381 0.95042044]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [2, 2], 'groups': 1, 'output_padding': [1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_644.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.34812}]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 2.2210 (2,1,.,.) = -1.4011 (3,1,.,.) = -1.9401 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 0.51762974 -1.348123 -1.6731964 -1.348123 -1.1451216 -1.348123 1.5055372 -1.348123 -5.661416 -1.348123 -5.8267016 -1.348123 -0.12264025 -1.348123 -2.5638323 -1.348123 -7.890868 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 1.2486147 -1.348123 0.7101346 -1.348123 -3.6823153 -1.348123 2.2833471 -1.348123 0.06720316 -1.348123 -0.74755764 -1.348123 1.6309713 -1.348123 -0.9586129 -1.348123 0.48140895] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 5.5395637 -1.348123 -3.3623567 -1.348123 3.1478596 -1.348123 -1.0316005 -1.348123 0.33182335 -1.348123 0.11381304 -1.348123 -6.238218 -1.348123 2.231731 -1.348123 -6.205407 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -5.811353 -1.348123 -3.0451922 -1.348123 -4.0297804 -1.348123 5.64558 -1.348123 -7.994067 -1.348123 2.4728246 -1.348123 -5.279687 -1.348123 -0.3447503 -1.348123 3.1954737 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 0.21886098 -1.348123 0.49631107 -1.348123 -0.463409 -1.348123 -4.3482485 -1.348123 -5.206514 -1.348123 -0.45538962 -1.348123 4.951646 -1.348123 -3.4825654 -1.348123 -1.619305 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 1.9457892 -1.348123 -7.1141653 -1.348123 -1.3983536 -1.348123 -1.9518235 -1.348123 -0.23468661 -1.348123 0.05007744 -1.348123 -6.6072054 -1.348123 -0.4749608 -1.348123 6.305185 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -3.0396204 -1.348123 -1.2670503 -1.348123 -0.85116434 -1.348123 -7.5217037 -1.348123 4.046559 -1.348123 3.342803 -1.348123 -5.438757 -1.348123 -7.90189 -1.348123 -0.8379454 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -3.1871834 -1.348123 -3.2402978 -1.348123 -4.1306496 -1.348123 3.7007914 -1.348123 -1.2403305 -1.348123 -3.39468 -1.348123 -2.4820147 -1.348123 -3.6179352 -1.348123 -1.1908144 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -2.3128479 -1.348123 -7.286462 -1.348123 -2.775967 -1.348123 7.126632 -1.348123 -0.18736911 -1.348123 -3.8715625 -1.348123 3.285779 -1.348123 2.6500106 -1.348123 -2.2536426 ]]]]; ov_res: [[[[-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 0.51762974 -1.348123 -1.6731964 -1.348123 -1.1451216 -1.348123 1.5055372 -1.348123 -5.661416 -1.348123 -5.8267016 -1.348123 -0.12264025 -1.348123 -2.5638323 -1.348123 -7.890868 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 1.2486147 -1.348123 0.7101346 -1.348123 -3.6823153 -1.348123 2.2833471 -1.348123 0.06720316 -1.348123 -0.74755764 -1.348123 1.6309713 -1.348123 -0.9586129 -1.348123 0.48140895] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 5.5395637 -1.348123 -3.3623567 -1.348123 3.1478596 -1.348123 -1.0316005 -1.348123 0.33182335 -1.348123 0.11381304 -1.348123 -6.238218 -1.348123 2.231731 -1.348123 -6.205407 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -5.811353 -1.348123 -3.0451922 -1.348123 -4.0297804 -1.348123 5.64558 -1.348123 -7.994067 -1.348123 2.4728246 -1.348123 -5.279687 -1.348123 -0.3447503 -1.348123 3.1954737 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 0.21886098 -1.348123 0.49631107 -1.348123 -0.463409 -1.348123 -4.3482485 -1.348123 -5.206514 -1.348123 -0.45538962 -1.348123 4.951646 -1.348123 -3.4825654 -1.348123 -1.619305 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 1.9457892 -1.348123 -7.1141653 -1.348123 -1.3983536 -1.348123 -1.9518235 -1.348123 -0.23468661 -1.348123 0.05007744 -1.348123 -6.6072054 -1.348123 -0.4749608 -1.348123 6.305185 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -3.0396204 -1.348123 -1.2670503 -1.348123 -0.85116434 -1.348123 -7.5217037 -1.348123 4.046559 -1.348123 3.342803 -1.348123 -5.438757 -1.348123 -7.90189 -1.348123 -0.8379454 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -3.1871834 -1.348123 -3.2402978 -1.348123 -4.1306496 -1.348123 3.7007914 -1.348123 -1.2403305 -1.348123 -3.39468 -1.348123 -2.4820147 -1.348123 -3.6179352 -1.348123 -1.1908144 ] [-1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 -1.348123 ] [-1.348123 -2.3128479 -1.348123 -7.286462 -1.348123 -2.775967 -1.348123 7.126632 -1.348123 -0.18736911 -1.348123 -3.8715625 -1.348123 3.285779 -1.348123 2.6500106 -1.348123 -2.2536426 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_646.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-2.33727932e-01 -7.21855640e-01 -3.25775146e-03 5.53950667e-02 7.81383038e-01 3.15085459e+00 -4.20959175e-01 2.92955065e+00 -5.66958070e-01 1.85682833e+00 1.01066172e+00] [ 7.27188408e-01 -2.88604999e+00 5.39656162e+00 1.24716604e+00 6.33861601e-01 -3.79030871e+00 -1.42339540e+00 3.01189232e+00 -5.20549488e+00 -4.72048664e+00 -2.18384027e+00] [-8.88182402e-01 3.14031911e+00 -2.89825964e+00 1.53607070e+00 -6.66993618e+00 -3.60084867e+00 2.54466605e+00 3.39083433e+00 -2.94249010e+00 -5.25549054e-02 -1.29592538e+00] [-1.71985233e+00 -1.15786374e+00 4.78204936e-01 -2.02392268e+00 1.50707793e+00 -1.85993791e-01 -2.64935112e+00 -1.54903305e+00 -4.67607021e+00 9.24418163e+00 -3.70700032e-01] [ 1.56596625e+00 1.30590987e+00 2.56332541e+00 -3.29430699e+00 8.10768127e-01 -2.75927472e+00 3.85731983e+00 -1.41575336e-01 -1.41827166e+00 -2.64870930e+00 -9.11686420e-01] [-3.68007350e+00 4.84467602e+00 4.11307096e-01 -1.91157126e+00 3.81596255e+00 3.40032339e-01 5.15442252e-01 2.95451283e+00 3.76204610e-01 -3.82559443e+00 -8.86155605e-01] [-3.14332747e+00 -7.50660896e-02 -2.66925502e+00 -3.00287962e+00 3.97469044e-01 -7.79989421e-01 -2.82640815e+00 -1.16733408e+00 1.14127648e+00 -4.66817331e+00 -8.93786371e-01] [ 2.50149202e+00 -2.46440411e-01 -3.74741197e-01 -4.91739750e-01 2.88211346e+00 4.85120535e-01 -1.26269722e+00 2.50689340e+00 1.43857431e+00 -5.02028179e+00 2.10587001e+00] [-1.12746763e+00 -1.52728295e+00 1.81939077e+00 2.13561440e+00 -5.84265828e-01 -2.24452353e+00 5.75542545e+00 -1.10371208e+00 -1.98412704e+00 -6.33730793e+00 1.16940057e+00] [ 3.87526989e-01 1.84892702e+00 2.56603098e+00 1.18560719e+00 -3.76542902e+00 5.81482983e+00 -2.62047982e+00 -4.64704275e+00 2.15331030e+00 -5.32912612e-02 2.51613879e+00] [-1.73835054e-01 -4.03636503e+00 1.72094619e+00 -2.46893954e+00 -6.25081301e-01 -2.13273335e+00 3.95474195e+00 -1.45659304e+00 4.16143608e+00 -1.85215294e+00 -1.19384915e-01]] [[-1.07682705e-01 -2.79040009e-01 1.01010799e+00 -4.96249974e-01 3.24162513e-01 -7.91868150e-01 -2.58924842e+00 1.46361661e+00 -2.80251932e+00 -1.76404142e+00 -6.90968096e-01] [ 2.34607950e-01 2.35986382e-01 -1.26810503e+00 5.79277396e-01 -2.40863061e+00 -3.38950109e+00 1.14916730e+00 5.65051019e-01 -3.32358456e+00 8.31439257e-01 5.10263681e-01] [-1.30145192e+00 2.47774303e-01 -2.27889276e+00 -2.61215115e+00 -2.64646649e+00 2.72281981e+00 2.90079832e+00 -8.90207440e-02 -9.81652558e-01 1.88936329e+00 -1.92884529e+00] [ 1.15176082e+00 -2.80150437e+00 4.88984466e-01 -3.28910208e+00 8.78593564e-01 -1.49681807e-01 1.38056231e+00 -1.93204188e+00 8.00665677e-01 -1.74108648e+00 -2.42645001e+00] [-2.08104491e+00 -9.15071964e-02 -6.64596260e-01 -5.49404430e+00 3.01124007e-01 -1.52000690e+00 5.56924045e-01 -9.60436106e-01 -1.59646797e+00 -4.17161369e+00 -5.55795431e-01] [-1.40190899e+00 1.50290787e+00 -2.16980124e+00 -1.39636850e+00 1.37896490e+00 1.10553145e+00 -1.44216907e+00 -9.37760890e-01 -1.61615872e+00 -3.04445434e+00 -3.94235939e-01] [ 5.87005019e-01 4.19620132e+00 -1.26465857e+00 1.81836975e+00 2.70392275e+00 -8.90020907e-01 -2.99589014e+00 8.52093935e-01 -1.34796011e+00 -2.13144803e+00 -8.62056911e-02] [ 3.20640385e-01 -2.88502312e+00 1.04642749e-01 1.32932544e+00 2.34533572e+00 -2.43198919e+00 3.03040075e+00 1.79999650e+00 1.64321566e+00 -4.01756334e+00 1.44045389e+00] [-1.55797482e+00 1.73548698e-01 1.61348844e+00 -4.24364269e-01 -1.73906565e+00 2.73904991e+00 2.50854516e+00 -9.46709514e-01 -1.98399210e+00 7.81239152e-01 2.44485378e+00] [-1.33074850e-01 -5.91639519e-01 3.49414617e-01 -1.85590172e+00 -3.17988276e-01 3.79256845e-01 -2.86464500e+00 -2.23838615e+00 2.14651155e+00 5.07552087e-01 -4.67703938e-01] [-2.06497765e+00 3.63418162e-01 1.22537386e+00 -1.21098113e+00 3.99790138e-01 2.36648262e-01 3.16918492e-02 1.68458238e-01 4.91836250e-01 -1.86926275e-01 1.72783315e-01]] [[-4.33234453e-01 -2.90269160e+00 -6.57007456e-01 2.16146779e+00 6.35846376e-01 1.97146678e+00 -4.94799465e-02 1.17042005e+00 2.94877291e-02 2.99174356e+00 2.43736267e+00] [ 7.81427979e-01 8.75493824e-01 -4.83051205e+00 5.15442133e+00 1.63779688e+00 1.88597727e+00 1.22735417e+00 -3.58383131e+00 1.54807842e+00 -4.39776087e+00 -4.07026863e+00] [-1.83668470e+00 6.01455808e-01 1.17196190e+00 -4.56932259e+00 1.03015900e-02 2.30662465e+00 -4.62007427e+00 -5.72503853e+00 -2.15196609e-02 -7.29231644e+00 -1.28039598e+00] [ 1.11399829e+00 -2.40584397e+00 1.35717428e+00 -2.65514255e+00 -1.03916895e+00 -3.72685528e+00 -1.27618051e+00 -8.78578377e+00 2.69858217e+00 -3.53838706e+00 4.64696884e+00] [-3.77092510e-01 4.44403076e+00 2.80994415e+00 3.00185180e+00 1.53671420e+00 -6.27321839e-01 -7.28400707e-01 2.50730705e+00 1.24762380e+00 2.68321085e+00 3.04720449e+00] [ 4.82271290e+00 3.29134464e-02 2.29416943e+00 9.65680313e+00 -4.23567966e-02 -3.93662357e+00 -1.87093139e+00 1.93631601e+00 2.76334524e+00 3.69210005e+00 -4.96950746e-02] [-3.46756101e-01 -4.01027727e+00 -6.30724049e+00 2.49440503e+00 -9.92724061e-01 -3.02314138e+00 -3.68225634e-01 1.17046928e+00 2.32115746e-01 1.23467064e+00 -1.15143168e+00] [-1.24119377e+00 -3.80612850e+00 1.00166583e+00 1.20255601e+00 -3.26908255e+00 -4.52367926e+00 4.90483665e+00 -1.56105745e+00 1.47481394e+00 9.17016983e-01 -1.89982414e+00] [ 1.17465353e+00 7.57938743e-01 -2.71808100e+00 2.33316827e+00 -9.09372568e-02 -2.39219189e-01 -3.65282869e+00 -4.17885160e+00 -1.12075148e+01 -6.07642174e-01 -3.78432560e+00] [ 3.94960022e+00 2.78333449e+00 -1.80791521e+00 1.34081483e-01 4.39555311e+00 -3.90700650e+00 2.80188978e-01 3.69633436e+00 -1.27563322e+00 6.08659506e-01 -3.87673402e+00] [-1.70217729e+00 5.35133266e+00 -1.13102508e+00 -3.35230112e-01 2.57838786e-01 -1.12445951e-01 -4.24472284e+00 3.40865684e+00 -1.24870086e+00 3.92875886e+00 4.93294030e-01]]]]; ov_res: [[[[-2.33727932e-01 -7.21855640e-01 -3.25783971e-03 5.53951710e-02 7.81383038e-01 3.15085435e+00 -4.20959175e-01 2.92955065e+00 -5.66958010e-01 1.85682845e+00 1.01066172e+00] [ 7.27188349e-01 -2.88604975e+00 5.39656115e+00 1.24716604e+00 6.33861542e-01 -3.79030848e+00 -1.42339540e+00 3.01189256e+00 -5.20549488e+00 -4.72048664e+00 -2.18384027e+00] [-8.88182402e-01 3.14031911e+00 -2.89825940e+00 1.53607082e+00 -6.66993666e+00 -3.60084844e+00 2.54466581e+00 3.39083433e+00 -2.94248986e+00 -5.25547788e-02 -1.29592526e+00] [-1.71985233e+00 -1.15786386e+00 4.78205025e-01 -2.02392316e+00 1.50707793e+00 -1.85993835e-01 -2.64935136e+00 -1.54903305e+00 -4.67607021e+00 9.24418163e+00 -3.70700091e-01] [ 1.56596625e+00 1.30590987e+00 2.56332517e+00 -3.29430699e+00 8.10768127e-01 -2.75927472e+00 3.85732031e+00 -1.41575322e-01 -1.41827166e+00 -2.64870906e+00 -9.11686480e-01] [-3.68007374e+00 4.84467602e+00 4.11306977e-01 -1.91157126e+00 3.81596279e+00 3.40032309e-01 5.15442193e-01 2.95451260e+00 3.76204610e-01 -3.82559419e+00 -8.86155665e-01] [-3.14332747e+00 -7.50661716e-02 -2.66925502e+00 -3.00287962e+00 3.97468954e-01 -7.79989362e-01 -2.82640839e+00 -1.16733408e+00 1.14127636e+00 -4.66817331e+00 -8.93786371e-01] [ 2.50149202e+00 -2.46440321e-01 -3.74741137e-01 -4.91739631e-01 2.88211346e+00 4.85120237e-01 -1.26269710e+00 2.50689316e+00 1.43857443e+00 -5.02028131e+00 2.10587025e+00] [-1.12746775e+00 -1.52728295e+00 1.81939077e+00 2.13561463e+00 -5.84265769e-01 -2.24452376e+00 5.75542593e+00 -1.10371244e+00 -1.98412716e+00 -6.33730841e+00 1.16940057e+00] [ 3.87526810e-01 1.84892702e+00 2.56603098e+00 1.18560696e+00 -3.76542926e+00 5.81482935e+00 -2.62048006e+00 -4.64704275e+00 2.15331006e+00 -5.32911234e-02 2.51613879e+00] [-1.73835054e-01 -4.03636503e+00 1.72094619e+00 -2.46893954e+00 -6.25081301e-01 -2.13273311e+00 3.95474195e+00 -1.45659304e+00 4.16143608e+00 -1.85215306e+00 -1.19384915e-01]] [[-1.07682705e-01 -2.79040009e-01 1.01010799e+00 -4.96249974e-01 3.24162543e-01 -7.91868210e-01 -2.58924818e+00 1.46361661e+00 -2.80251932e+00 -1.76404142e+00 -6.90968096e-01] [ 2.34607950e-01 2.35986441e-01 -1.26810491e+00 5.79277337e-01 -2.40863037e+00 -3.38950133e+00 1.14916742e+00 5.65050840e-01 -3.32358432e+00 8.31439137e-01 5.10263681e-01] [-1.30145192e+00 2.47774363e-01 -2.27889323e+00 -2.61215138e+00 -2.64646649e+00 2.72281981e+00 2.90079832e+00 -8.90207142e-02 -9.81652498e-01 1.88936305e+00 -1.92884541e+00] [ 1.15176082e+00 -2.80150437e+00 4.88984466e-01 -3.28910232e+00 8.78593504e-01 -1.49681836e-01 1.38056219e+00 -1.93204188e+00 8.00665617e-01 -1.74108648e+00 -2.42645001e+00] [-2.08104491e+00 -9.15073305e-02 -6.64596379e-01 -5.49404430e+00 3.01123947e-01 -1.52000678e+00 5.56924045e-01 -9.60436046e-01 -1.59646809e+00 -4.17161369e+00 -5.55795372e-01] [-1.40190899e+00 1.50290799e+00 -2.16980147e+00 -1.39636838e+00 1.37896490e+00 1.10553145e+00 -1.44216895e+00 -9.37760890e-01 -1.61615884e+00 -3.04445434e+00 -3.94235939e-01] [ 5.87004960e-01 4.19620132e+00 -1.26465857e+00 1.81836987e+00 2.70392251e+00 -8.90020847e-01 -2.99589014e+00 8.52093935e-01 -1.34796023e+00 -2.13144803e+00 -8.62056911e-02] [ 3.20640445e-01 -2.88502312e+00 1.04643032e-01 1.32932532e+00 2.34533572e+00 -2.43198919e+00 3.03040075e+00 1.79999638e+00 1.64321578e+00 -4.01756334e+00 1.44045401e+00] [-1.55797493e+00 1.73548684e-01 1.61348832e+00 -4.24364239e-01 -1.73906541e+00 2.73904991e+00 2.50854516e+00 -9.46709275e-01 -1.98399234e+00 7.81238914e-01 2.44485378e+00] [-1.33074850e-01 -5.91639161e-01 3.49414617e-01 -1.85590160e+00 -3.17988336e-01 3.79256874e-01 -2.86464500e+00 -2.23838615e+00 2.14651155e+00 5.07551849e-01 -4.67703968e-01] [-2.06497765e+00 3.63418192e-01 1.22537374e+00 -1.21098125e+00 3.99790108e-01 2.36648247e-01 3.16918343e-02 1.68458238e-01 4.91836280e-01 -1.86926275e-01 1.72783315e-01]] [[-4.33234453e-01 -2.90269160e+00 -6.57007337e-01 2.16146779e+00 6.35846376e-01 1.97146666e+00 -4.94799986e-02 1.17042017e+00 2.94878725e-02 2.99174356e+00 2.43736267e+00] [ 7.81427979e-01 8.75493765e-01 -4.83051157e+00 5.15442133e+00 1.63779688e+00 1.88597739e+00 1.22735429e+00 -3.58383131e+00 1.54807854e+00 -4.39776087e+00 -4.07026863e+00] [-1.83668482e+00 6.01455808e-01 1.17196202e+00 -4.56932259e+00 1.03018628e-02 2.30662441e+00 -4.62007427e+00 -5.72503805e+00 -2.15197075e-02 -7.29231644e+00 -1.28039598e+00] [ 1.11399817e+00 -2.40584421e+00 1.35717463e+00 -2.65514255e+00 -1.03916907e+00 -3.72685552e+00 -1.27618039e+00 -8.78578377e+00 2.69858217e+00 -3.53838730e+00 4.64696884e+00] [-3.77092510e-01 4.44403028e+00 2.80994463e+00 3.00185132e+00 1.53671432e+00 -6.27321959e-01 -7.28400588e-01 2.50730729e+00 1.24762392e+00 2.68321061e+00 3.04720449e+00] [ 4.82271290e+00 3.29139195e-02 2.29416919e+00 9.65680408e+00 -4.23567370e-02 -3.93662357e+00 -1.87093151e+00 1.93631613e+00 2.76334524e+00 3.69210005e+00 -4.96950559e-02] [-3.46756130e-01 -4.01027727e+00 -6.30724049e+00 2.49440479e+00 -9.92724001e-01 -3.02314115e+00 -3.68225664e-01 1.17046940e+00 2.32115731e-01 1.23467088e+00 -1.15143168e+00] [-1.24119389e+00 -3.80612874e+00 1.00166571e+00 1.20255601e+00 -3.26908255e+00 -4.52367926e+00 4.90483665e+00 -1.56105745e+00 1.47481394e+00 9.17016983e-01 -1.89982402e+00] [ 1.17465353e+00 7.57939041e-01 -2.71808100e+00 2.33316803e+00 -9.09371749e-02 -2.39219293e-01 -3.65282845e+00 -4.17885160e+00 -1.12075148e+01 -6.07642174e-01 -3.78432536e+00] [ 3.94959998e+00 2.78333449e+00 -1.80791521e+00 1.34081542e-01 4.39555264e+00 -3.90700650e+00 2.80189097e-01 3.69633412e+00 -1.27563334e+00 6.08659267e-01 -3.87673402e+00] [-1.70217729e+00 5.35133266e+00 -1.13102496e+00 -3.35230142e-01 2.57838815e-01 -1.12445988e-01 -4.24472284e+00 3.40865684e+00 -1.24870086e+00 3.92875910e+00 4.93294030e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2, 2], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_648.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, strides=[12, 4, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ -2.9060946 4.7292805 -0.03473075 -3.0745735 -1.4293377 -3.3951132 -0.68691355 3.241552 -4.925345 ] [ -2.9999907 0.36614835 -4.655768 -4.7729383 -3.6249952 1.284021 -4.604736 -1.2273176 -4.3622246 ] [ -4.7036915 2.9396775 -5.42576 -9.027281 8.716638 -6.626251 1.2781271 -3.8074837 -5.254539 ] [ 1.4820096 -4.040612 -7.611472 1.5281186 3.7639065 -3.8005223 -4.5580745 -0.8495984 -0.8489369 ] [ 4.496001 -1.7522199 6.0398383 0.20422235 2.1786797 -0.48334584 -2.073886 -0.13945323 2.1009645 ] [ 0.73471594 -5.243031 0.84891504 -2.2224457 -10.198094 7.4514933 -4.6957316 3.866161 8.925457 ] [ 9.209763 -5.2545805 -4.165382 -0.94798225 -0.03499871 6.564872 3.9240885 0.15660611 5.9711757 ] [ -0.2667281 -11.548815 7.087214 0.4328526 4.843751 1.7257929 11.586518 -3.0658996 -0.02483214] [ 3.2943676 -0.02966373 0.6575575 -6.112382 5.959931 -2.1918583 -2.0577123 -4.0746636 2.380155 ]] [[ -5.9442315 -1.7332845 -3.7567258 -0.54110414 -0.76337296 -0.18309624 -0.5101503 -1.8275106 -3.7874932 ] [ 1.3302703 1.1083217 3.0282192 0.02987143 -0.9341973 -0.82058406 2.070632 -0.7940871 4.421757 ] [ 1.5069464 -1.6710087 -1.4421903 0.6652455 0.02408542 -2.7241008 1.3386081 -0.8073809 -0.03002138] [ 0.4328167 0.70627445 1.7665285 0.11158621 -0.77090937 2.3477836 -3.5612152 -2.0976183 -0.4502997 ] [ -1.3504281 2.7949717 0.68279713 -1.1739125 -0.05529829 2.4179385 2.581743 1.6983097 2.2857077 ] [ 1.2771382 -1.7912799 -0.88582295 0.6293929 2.1486278 -4.811602 2.3620486 -4.4554515 -1.2519745 ] [ -3.099817 0.70267564 0.5833423 0.90098786 2.3258512 -0.4185166 1.9114157 -0.18616027 -2.717451 ] [ 0.9850641 3.2082546 -3.7601552 -0.44089815 -2.0032587 1.1727177 -2.983538 0.53225374 2.4222784 ] [ -4.511708 -0.45564866 -1.1863292 1.1142721 0.96244246 1.0677279 0.47204408 2.9113617 -2.0473046 ]] [[ -0.2047792 -2.31097 -0.5598054 3.987489 1.399735 4.954349 0.4088848 -1.7643238 2.4421644 ] [ 1.8526763 1.45191 2.3577192 -1.6748557 -1.4619609 3.0827355 -1.2781092 4.298508 3.216112 ] [ 1.8758938 -0.7146608 3.8074713 2.4229627 -2.7976604 -2.4492013 7.037678 -2.0486019 3.429945 ] [ -2.3126445 -0.6477113 -0.1142268 -3.1501107 -2.5072193 -3.2631118 -0.637767 -1.9125482 -0.5121623 ] [ -2.6180375 5.0761166 -2.3234751 -1.8669682 -1.2266331 2.4439273 -2.9151678 2.3527286 -0.6871468 ] [ -3.9300344 2.9031012 0.08808681 5.619689 -1.6240953 -0.28732222 -2.471916 -0.46425268 -2.184432 ] [ -3.8747282 3.9755297 0.88921136 -0.23674652 3.0665684 -6.0599933 -2.2698936 -3.80716 -2.7955074 ] [ 1.1586169 5.229229 -0.6122335 2.6707113 -4.1108446 -0.8863777 -6.0898075 3.653451 2.175237 ] [ -1.7991902 1.3237798 -3.9991431 5.823405 -0.19786486 4.3159475 0.8093017 0.2994299 -0.20432319]]]]; ov_res: [[[[ -2.9060946 4.72928 -0.03473087 -3.0745735 -1.429338 -3.3951128 -0.6869133 3.2415519 -4.9253445 ] [ -2.9999907 0.3661483 -4.655768 -4.7729383 -3.6249952 1.2840208 -4.6047363 -1.227318 -4.3622246 ] [ -4.703692 2.9396775 -5.4257607 -9.027281 8.716638 -6.626251 1.2781278 -3.8074837 -5.254539 ] [ 1.482009 -4.040612 -7.611472 1.5281186 3.7639065 -3.8005223 -4.558074 -0.8495984 -0.8489368 ] [ 4.4960012 -1.7522198 6.039838 0.20422211 2.1786802 -0.4833463 -2.0738857 -0.13945371 2.1009645 ] [ 0.7347156 -5.243031 0.8489149 -2.2224462 -10.198095 7.4514937 -4.6957316 3.8661613 8.925457 ] [ 9.209763 -5.2545805 -4.1653814 -0.94798243 -0.0349985 6.5648713 3.9240887 0.15660635 5.9711757 ] [ -0.26672798 -11.548815 7.0872135 0.43285254 4.8437514 1.7257924 11.586517 -3.0658996 -0.02483214] [ 3.2943673 -0.02966361 0.65755725 -6.112382 5.959931 -2.191858 -2.0577123 -4.0746636 2.3801553 ]] [[ -5.944231 -1.7332845 -3.756726 -0.54110414 -0.76337296 -0.1830962 -0.51015025 -1.8275106 -3.7874932 ] [ 1.3302703 1.1083217 3.0282192 0.0298714 -0.9341973 -0.8205841 2.0706322 -0.7940872 4.4217577 ] [ 1.5069464 -1.6710087 -1.4421904 0.6652455 0.02408548 -2.7241006 1.3386078 -0.807381 -0.03002141] [ 0.43281674 0.70627445 1.7665285 0.11158622 -0.7709095 2.3477833 -3.5612152 -2.0976186 -0.45029968] [ -1.3504282 2.794972 0.6827972 -1.1739125 -0.05529828 2.4179385 2.5817432 1.6983097 2.2857077 ] [ 1.2771382 -1.7912798 -0.88582295 0.629393 2.1486278 -4.811602 2.3620486 -4.4554515 -1.2519746 ] [ -3.099817 0.70267564 0.5833423 0.9009878 2.3258512 -0.41851658 1.9114156 -0.18616027 -2.7174509 ] [ 0.9850641 3.2082546 -3.7601552 -0.44089815 -2.0032587 1.1727177 -2.9835382 0.5322537 2.4222784 ] [ -4.5117073 -0.45564884 -1.1863294 1.1142722 0.9624425 1.0677279 0.47204402 2.9113617 -2.0473044 ]] [[ -0.20477945 -2.3109703 -0.5598056 3.987489 1.399735 4.9543495 0.40888482 -1.7643238 2.4421644 ] [ 1.8526762 1.4519101 2.3577192 -1.6748556 -1.461961 3.0827358 -1.278109 4.2985077 3.216112 ] [ 1.8758938 -0.71466094 3.807471 2.4229627 -2.7976604 -2.4492016 7.0376782 -2.0486023 3.4299455 ] [ -2.3126447 -0.6477114 -0.11422645 -3.1501107 -2.5072193 -3.2631116 -0.637767 -1.9125483 -0.5121624 ] [ -2.6180375 5.076116 -2.323475 -1.8669683 -1.2266331 2.4439268 -2.9151676 2.3527286 -0.6871468 ] [ -3.9300349 2.9031017 0.08808678 5.6196885 -1.6240952 -0.28732234 -2.471916 -0.46425268 -2.1844318 ] [ -3.8747277 3.9755304 0.889211 -0.23674636 3.0665684 -6.0599933 -2.2698934 -3.8071601 -2.7955072 ] [ 1.1586167 5.229229 -0.61223364 2.6707113 -4.110845 -0.8863777 -6.089807 3.653451 2.175237 ] [ -1.7991899 1.3237798 -3.9991426 5.8234057 -0.1978648 4.3159475 0.8093018 0.2994299 -0.20432328]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_650.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.1587 (2,1,.,.) = -0.8994 (3,1,.,.) = -0.4608 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ 2.02713713e-01 2.45377436e-01 -9.38663632e-02 4.69877012e-02 -1.85283095e-01 -4.56619501e-01 -2.54056394e-01 1.48685411e-01 -6.59127831e-02 2.22000495e-01] [ 1.04370326e-01 -1.42925844e-01 2.48319446e-03 -4.42066267e-02 2.40088582e-01 -7.12302327e-02 -1.12352721e-01 1.85120478e-01 1.12461343e-01 1.25457585e-01] [-9.36857760e-02 1.29086092e-01 -9.97959301e-02 -2.72818446e-01 -1.95279911e-01 -2.36727223e-01 3.97165073e-03 -8.30981061e-02 9.98515263e-02 -1.58795454e-02] [-1.35617316e-01 -2.81217635e-01 2.69723594e-01 1.13788888e-01 -1.14627384e-01 1.09242782e-01 -2.03287918e-02 6.55177087e-02 1.00133255e-01 5.79993762e-02] [ 2.39435107e-01 -4.99547981e-02 1.50101736e-01 -1.51534835e-02 8.46826360e-02 1.52725518e-01 8.58216882e-02 7.03200847e-02 1.03180267e-01 3.11753247e-02] [ 1.32589683e-01 -4.46052030e-02 -1.42197698e-01 1.26339942e-01 9.60749853e-03 1.18301556e-01 1.81287989e-01 1.94777921e-01 -3.04347761e-02 -4.14514951e-02] [ 7.80535787e-02 -2.76953913e-02 -1.63981259e-01 -1.06542654e-01 -7.83803500e-03 -2.15342324e-02 -2.61819929e-01 1.71103895e-01 1.27724200e-01 -1.81704864e-01] [ 9.01899021e-03 -5.60937375e-02 4.53327782e-02 -3.19577992e-01 -1.80500686e-01 -1.06881194e-01 -6.85866997e-02 2.30702564e-01 9.46415439e-02 -1.09815840e-02] [ 4.93617021e-02 7.81725571e-02 -1.47081437e-02 -1.66417241e-01 -2.70397123e-02 -2.47923791e-01 1.34748369e-01 3.45920287e-02 -1.21352434e-01 1.33754343e-01] [-3.37645441e-01 -3.75693701e-02 1.22152038e-01 2.11948883e-02 1.49407089e-01 -1.80654719e-01 1.59836143e-01 -2.28487805e-01 2.42387414e-01 3.57440531e-01]] [[ 3.27261329e-01 -8.50046933e-01 8.28859329e-01 5.92167452e-02 1.18248820e+00 9.51212227e-01 -1.18254471e+00 -6.42033219e-01 -8.77985179e-01 -1.17366099e+00] [-2.41539538e-01 3.80488366e-01 6.52074814e-01 3.46938044e-01 1.36591780e+00 -8.67187679e-01 -2.11187899e-01 -4.02760655e-01 2.05197811e-01 -2.73259699e-01] [ 6.29106402e-01 -1.26747414e-01 -2.58908957e-01 -8.60128999e-02 9.00302976e-02 1.31075120e+00 -1.11504507e+00 2.83169776e-01 -1.53965235e-01 -1.74060512e+00] [-2.01684743e-01 1.35744703e+00 -1.76239049e+00 6.11865997e-01 1.61560833e-01 1.24783158e-01 1.96938291e-01 -3.15379232e-01 1.00268161e+00 8.73076856e-01] [ 9.43474412e-01 -3.20953459e-01 4.75044161e-01 6.89233303e-01 -5.67702591e-01 2.18188763e-01 -4.15182412e-01 -2.43602600e-02 -1.68598455e-03 8.45741853e-02] [-1.06446075e+00 -5.05344808e-01 1.05314236e-02 -8.90858233e-01 -4.67942744e-01 -3.79250139e-01 9.47224021e-01 3.92295957e-01 -2.34453869e+00 -5.23136914e-01] [-9.72444355e-01 -2.92916447e-01 3.19369853e-01 1.73674986e-01 -1.59624553e+00 1.22138834e+00 -3.18770021e-01 -1.21840096e+00 -2.28448678e-02 2.54173070e-01] [-9.21310037e-02 -1.15936744e+00 5.75012624e-01 -1.44573283e+00 -1.96742397e-02 -3.01160365e-02 3.38518232e-01 -1.20551258e-01 8.00975621e-01 2.54354298e-01] [-4.96265352e-01 5.91405571e-01 1.57688653e+00 -1.26682568e+00 -1.27955925e+00 1.36985511e-01 -3.52427989e-01 1.22335374e+00 -2.12782234e-01 4.89837259e-01] [-2.51613200e-01 -2.96818793e-01 1.33591843e+00 6.48799300e-01 1.23538621e-01 9.94166672e-01 -1.04084283e-01 2.47011274e-01 3.93865526e-01 -3.30066204e-01]] [[-7.06463099e-01 -2.12442711e-01 -2.57143974e-01 4.26629722e-01 1.06694853e+00 -3.68895322e-01 -1.05309677e+00 -6.35291100e-01 -2.58739531e-01 1.02001512e-02] [-1.03513587e+00 -7.13826656e-01 4.54496056e-01 -3.24333422e-02 -5.49652219e-01 -3.82813543e-01 -1.05042737e-02 2.01198906e-01 1.05123147e-01 8.13541234e-01] [-1.50139302e-01 -2.19534397e-01 -3.31899822e-01 -4.92355675e-01 6.10056758e-01 -5.79050839e-01 3.55768353e-01 3.00203949e-01 2.46183529e-01 3.85139883e-01] [ 6.86614871e-01 7.37793326e-01 -2.95116276e-01 5.11429012e-01 2.77160108e-01 2.27184251e-01 -4.85894382e-01 3.38628471e-01 -1.35552943e-01 5.16314685e-01] [-9.62627530e-01 2.03968748e-01 -3.20903450e-01 -4.09790158e-01 1.86756104e-01 -5.39570153e-01 4.57538396e-01 5.66004515e-01 -5.86986095e-02 -3.11274976e-01] [-1.86760966e-02 2.21617550e-01 -5.81542432e-01 -6.04195185e-02 -6.25668317e-02 -8.00230168e-03 -1.44629665e-02 1.37464598e-01 -1.32749707e-01 -2.31487900e-01] [ 3.83822173e-01 6.43033803e-01 2.65348017e-01 2.56149620e-01 2.92489439e-01 -4.27195966e-01 3.13871384e-01 -1.36354342e-01 -9.09174681e-02 -3.37433398e-01] [-1.46468356e-01 2.04658762e-01 -4.27031875e-01 -5.13563693e-01 -1.07894182e-01 -1.28709331e-01 3.05779964e-01 -8.86444747e-02 -5.28479099e-01 -8.54737997e-01] [ 7.60755360e-01 -6.37118340e-01 -3.34579736e-01 -2.63872057e-01 3.87781322e-01 4.39877063e-01 3.44145685e-01 3.59170884e-02 -2.95540810e-01 -2.88589209e-01] [-1.94415689e-01 -7.29165852e-01 -7.07536638e-01 -1.13183804e-01 6.16004840e-02 3.17025930e-01 9.98600498e-02 3.79325837e-01 -9.70107540e-02 -1.11137144e-01]]]]; ov_res: [[[[ 2.02713713e-01 2.45377436e-01 -9.38663632e-02 4.69877012e-02 -1.85283095e-01 -4.56619501e-01 -2.54056394e-01 1.48685411e-01 -6.59127831e-02 2.22000495e-01] [ 1.04370326e-01 -1.42925844e-01 2.48319446e-03 -4.42066267e-02 2.40088582e-01 -7.12302327e-02 -1.12352721e-01 1.85120478e-01 1.12461343e-01 1.25457585e-01] [-9.36857760e-02 1.29086092e-01 -9.97959301e-02 -2.72818446e-01 -1.95279911e-01 -2.36727223e-01 3.97165073e-03 -8.30981061e-02 9.98515263e-02 -1.58795454e-02] [-1.35617316e-01 -2.81217635e-01 2.69723594e-01 1.13788888e-01 -1.14627384e-01 1.09242782e-01 -2.03287918e-02 6.55177087e-02 1.00133255e-01 5.79993762e-02] [ 2.39435107e-01 -4.99547981e-02 1.50101736e-01 -1.51534835e-02 8.46826360e-02 1.52725518e-01 8.58216882e-02 7.03200847e-02 1.03180267e-01 3.11753247e-02] [ 1.32589683e-01 -4.46052030e-02 -1.42197698e-01 1.26339942e-01 9.60749853e-03 1.18301556e-01 1.81287989e-01 1.94777921e-01 -3.04347761e-02 -4.14514951e-02] [ 7.80535787e-02 -2.76953913e-02 -1.63981259e-01 -1.06542654e-01 -7.83803500e-03 -2.15342324e-02 -2.61819929e-01 1.71103895e-01 1.27724200e-01 -1.81704864e-01] [ 9.01899021e-03 -5.60937375e-02 4.53327782e-02 -3.19577992e-01 -1.80500686e-01 -1.06881194e-01 -6.85866997e-02 2.30702564e-01 9.46415439e-02 -1.09815840e-02] [ 4.93617021e-02 7.81725571e-02 -1.47081437e-02 -1.66417241e-01 -2.70397123e-02 -2.47923791e-01 1.34748369e-01 3.45920287e-02 -1.21352434e-01 1.33754343e-01] [-3.37645441e-01 -3.75693701e-02 1.22152038e-01 2.11948883e-02 1.49407089e-01 -1.80654719e-01 1.59836143e-01 -2.28487805e-01 2.42387414e-01 3.57440531e-01]] [[ 3.27261329e-01 -8.50046933e-01 8.28859329e-01 5.92167452e-02 1.18248820e+00 9.51212227e-01 -1.18254471e+00 -6.42033219e-01 -8.77985179e-01 -1.17366099e+00] [-2.41539538e-01 3.80488366e-01 6.52074814e-01 3.46938044e-01 1.36591780e+00 -8.67187679e-01 -2.11187899e-01 -4.02760655e-01 2.05197811e-01 -2.73259699e-01] [ 6.29106402e-01 -1.26747414e-01 -2.58908957e-01 -8.60128999e-02 9.00302976e-02 1.31075120e+00 -1.11504507e+00 2.83169776e-01 -1.53965235e-01 -1.74060512e+00] [-2.01684743e-01 1.35744703e+00 -1.76239049e+00 6.11865997e-01 1.61560833e-01 1.24783158e-01 1.96938291e-01 -3.15379232e-01 1.00268161e+00 8.73076856e-01] [ 9.43474412e-01 -3.20953459e-01 4.75044161e-01 6.89233303e-01 -5.67702591e-01 2.18188763e-01 -4.15182412e-01 -2.43602600e-02 -1.68598455e-03 8.45741853e-02] [-1.06446075e+00 -5.05344808e-01 1.05314236e-02 -8.90858233e-01 -4.67942744e-01 -3.79250139e-01 9.47224021e-01 3.92295957e-01 -2.34453869e+00 -5.23136914e-01] [-9.72444355e-01 -2.92916447e-01 3.19369853e-01 1.73674986e-01 -1.59624553e+00 1.22138834e+00 -3.18770021e-01 -1.21840096e+00 -2.28448678e-02 2.54173070e-01] [-9.21310037e-02 -1.15936744e+00 5.75012624e-01 -1.44573283e+00 -1.96742397e-02 -3.01160365e-02 3.38518232e-01 -1.20551258e-01 8.00975621e-01 2.54354298e-01] [-4.96265352e-01 5.91405571e-01 1.57688653e+00 -1.26682568e+00 -1.27955925e+00 1.36985511e-01 -3.52427989e-01 1.22335374e+00 -2.12782234e-01 4.89837259e-01] [-2.51613200e-01 -2.96818793e-01 1.33591843e+00 6.48799300e-01 1.23538621e-01 9.94166672e-01 -1.04084283e-01 2.47011274e-01 3.93865526e-01 -3.30066204e-01]] [[-7.06463099e-01 -2.12442711e-01 -2.57143974e-01 4.26629722e-01 1.06694853e+00 -3.68895322e-01 -1.05309677e+00 -6.35291100e-01 -2.58739531e-01 1.02001512e-02] [-1.03513587e+00 -7.13826656e-01 4.54496056e-01 -3.24333422e-02 -5.49652219e-01 -3.82813543e-01 -1.05042737e-02 2.01198906e-01 1.05123147e-01 8.13541234e-01] [-1.50139302e-01 -2.19534397e-01 -3.31899822e-01 -4.92355675e-01 6.10056758e-01 -5.79050839e-01 3.55768353e-01 3.00203949e-01 2.46183529e-01 3.85139883e-01] [ 6.86614871e-01 7.37793326e-01 -2.95116276e-01 5.11429012e-01 2.77160108e-01 2.27184251e-01 -4.85894382e-01 3.38628471e-01 -1.35552943e-01 5.16314685e-01] [-9.62627530e-01 2.03968748e-01 -3.20903450e-01 -4.09790158e-01 1.86756104e-01 -5.39570153e-01 4.57538396e-01 5.66004515e-01 -5.86986095e-02 -3.11274976e-01] [-1.86760966e-02 2.21617550e-01 -5.81542432e-01 -6.04195185e-02 -6.25668317e-02 -8.00230168e-03 -1.44629665e-02 1.37464598e-01 -1.32749707e-01 -2.31487900e-01] [ 3.83822173e-01 6.43033803e-01 2.65348017e-01 2.56149620e-01 2.92489439e-01 -4.27195966e-01 3.13871384e-01 -1.36354342e-01 -9.09174681e-02 -3.37433398e-01] [-1.46468356e-01 2.04658762e-01 -4.27031875e-01 -5.13563693e-01 -1.07894182e-01 -1.28709331e-01 3.05779964e-01 -8.86444747e-02 -5.28479099e-01 -8.54737997e-01] [ 7.60755360e-01 -6.37118340e-01 -3.34579736e-01 -2.63872057e-01 3.87781322e-01 4.39877063e-01 3.44145685e-01 3.59170884e-02 -2.95540810e-01 -2.88589209e-01] [-1.94415689e-01 -7.29165852e-01 -7.07536638e-01 -1.13183804e-01 6.16004840e-02 3.17025930e-01 9.98600498e-02 3.79325837e-01 -9.70107540e-02 -1.11137144e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_652.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.7916 (2,1,.,.) = -0.1945 (3,1,.,.) = 0.1116 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-3.61067921e-01 -1.97401300e-01 6.42072856e-01 1.38921559e+00 1.57781214e-01 -6.02818668e-01 8.84409845e-01 7.10964203e-02 6.82778120e-01 -7.13104665e-01] [-5.75006045e-02 7.43892193e-01 -5.78998685e-01 -9.29398060e-01 -3.99793088e-01 1.12874091e+00 -6.06525600e-01 5.60918331e-01 -1.27292287e+00 -1.35660851e+00] [-1.34810400e+00 7.27710605e-01 6.72191679e-01 4.75694507e-01 -8.44561398e-01 -8.55822265e-01 -8.10531735e-01 7.78170764e-01 -6.95412278e-01 -2.55611241e-01] [-6.58310831e-01 -7.48013020e-01 -1.16643399e-01 3.15482974e-01 -4.93675292e-01 -1.45291197e+00 -3.74738544e-01 2.28127301e-01 -6.11924469e-01 8.37847471e-01] [ 5.28852828e-02 6.48073792e-01 6.80064380e-01 1.36559188e+00 3.07725370e-02 -3.01757127e-01 -1.04776680e+00 -6.78165197e-01 1.81294262e-01 -2.01201320e-01] [ 3.95292282e-01 2.23146543e-01 -1.06336720e-01 3.33569050e-01 7.94972479e-01 -9.20976698e-01 4.92542773e-01 1.02093613e+00 2.65748203e-01 1.42009699e+00] [-1.52690873e-01 2.19361082e-01 1.72936952e+00 -3.15366477e-01 -4.04782742e-01 -1.90974504e-01 -1.26782846e+00 1.26881456e+00 -3.42339545e-01 -1.08366084e+00] [ 7.03050077e-01 3.47093731e-01 6.76762402e-01 -7.50463367e-01 -1.64606035e+00 -2.82354069e+00 -7.61865616e-01 -7.20796824e-01 -3.55163962e-01 1.56042099e+00] [-8.41064692e-01 -9.55383778e-02 -1.26326835e+00 1.24871433e+00 -6.04673743e-01 5.87940328e-02 1.90399051e-01 -1.61429667e+00 3.42359066e-01 4.44812059e-01] [ 2.10682563e-02 -6.63483679e-01 1.10758281e+00 -2.44358823e-01 7.29344249e-01 2.20382094e+00 1.50185752e+00 -8.67152572e-01 -1.24483667e-01 -2.79430446e-04]] [[ 1.44529447e-01 -3.25622559e-01 -7.69953504e-02 2.14877307e-01 5.94702065e-02 -1.76521726e-02 -1.86858028e-02 -1.54495627e-01 -1.24853335e-01 -2.47592837e-01] [-9.52604488e-02 3.85479741e-02 2.25235730e-01 -3.43105108e-01 6.57877102e-02 2.48956889e-01 9.07454565e-02 1.02071859e-01 8.87561217e-03 -7.70098642e-02] [-7.97414929e-02 -2.49830447e-02 -2.07739636e-01 -2.69820809e-01 -2.79708225e-02 1.47737667e-01 1.63031921e-01 9.06271338e-02 -3.44108880e-01 2.41333261e-01] [ 2.43065059e-01 -2.94351071e-01 1.40869796e-01 8.71426314e-02 1.18429013e-01 -5.37578642e-01 -5.92897162e-02 -8.84562135e-02 -1.81157067e-01 -1.47659322e-02] [-1.87493071e-01 8.50369707e-02 -2.33688518e-01 7.45440498e-02 -1.46690886e-02 -1.49054557e-01 -6.21131435e-02 1.15148813e-01 -7.02634081e-02 4.37192500e-01] [-5.92256486e-02 2.94065084e-02 -2.96264410e-01 5.03687859e-01 1.12032034e-01 6.31143972e-02 1.09514035e-01 -1.14684932e-01 -6.59667328e-02 -4.01442312e-02] [ 6.22524284e-02 6.67750165e-02 -1.81746721e-01 2.85709347e-03 6.62724301e-02 2.70146579e-01 1.05975010e-01 -3.02984506e-01 1.50374115e-01 7.71752149e-02] [-9.60224122e-02 -4.58723038e-01 -2.86842342e-02 -1.30761296e-01 1.80896427e-02 1.38991117e-01 -1.20597117e-01 9.44684520e-02 9.50031541e-03 -7.24454736e-03] [ 1.65138647e-01 1.71117991e-01 -2.89359186e-02 2.04941571e-01 -1.08836055e-01 -2.64198154e-01 3.24813902e-01 2.24367723e-01 2.02055722e-01 -4.54388380e-01] [ 2.11280480e-01 -3.92453261e-02 3.85618210e-01 2.04284400e-01 -4.63686958e-02 -1.63378000e-01 7.63957918e-01 -1.42685845e-01 -1.35208920e-01 -3.22998673e-01]] [[-9.94168781e-03 -2.60865539e-01 2.25940660e-01 1.10314213e-01 -2.95240581e-02 9.75293368e-02 2.29907528e-01 -7.66583905e-02 4.33466472e-02 -9.64930281e-02] [ 2.01148242e-01 -1.42079040e-01 -4.87583317e-03 2.81658256e-03 -7.44845346e-02 2.83135902e-02 -2.81065479e-02 3.75271924e-02 1.47080705e-01 -2.49914732e-02] [ 1.24682300e-01 -2.92628892e-02 1.48051426e-01 7.16055110e-02 -8.93719345e-02 2.37398334e-02 -3.63201573e-02 -1.27721474e-01 -3.58086862e-02 -2.90585682e-02] [-9.23407376e-02 -1.22560672e-01 7.72002861e-02 3.28375027e-02 -2.32716911e-02 -5.03416136e-02 -2.99678519e-02 1.50345843e-02 -1.12500377e-01 1.64216027e-01] [-3.98791321e-02 1.89181250e-02 8.89746919e-02 4.89844829e-02 -6.40093088e-02 9.58637968e-02 2.19770864e-01 1.89135611e-01 -1.03558935e-01 -6.41926453e-02] [-1.22047169e-02 -4.01125383e-03 -2.68280655e-01 -5.68349436e-02 2.38542065e-01 -2.25879867e-02 5.40648960e-02 -5.72089851e-02 -1.56835169e-01 -6.89361319e-02] [ 1.05612151e-01 -4.27759066e-02 -6.16485998e-02 -3.41354273e-02 4.77256663e-02 -1.18503533e-02 -5.61673194e-02 8.23962223e-03 -4.23954315e-02 1.02170788e-01] [-5.29753119e-02 3.68713811e-02 -1.05833322e-01 3.68316658e-02 -1.17839694e-01 1.05226450e-01 -3.73446383e-02 -9.30920988e-02 1.81910582e-02 -9.37085971e-02] [ 5.18762954e-02 -2.36982144e-02 -3.37299444e-02 3.25011984e-02 -7.17445882e-03 3.89268845e-02 -2.86761433e-01 -7.87332878e-02 8.17824304e-02 -1.33686885e-01] [ 1.40132397e-01 3.04733776e-02 3.99481691e-02 1.29128486e-01 8.41264278e-02 9.61120650e-02 5.53459004e-02 -8.69520009e-02 -2.34892145e-02 2.15414017e-01]]]]; ov_res: [[[[-3.61067921e-01 -1.97401300e-01 6.42072856e-01 1.38921559e+00 1.57781214e-01 -6.02818668e-01 8.84409845e-01 7.10964203e-02 6.82778120e-01 -7.13104665e-01] [-5.75006045e-02 7.43892193e-01 -5.78998685e-01 -9.29398060e-01 -3.99793088e-01 1.12874091e+00 -6.06525600e-01 5.60918331e-01 -1.27292287e+00 -1.35660851e+00] [-1.34810400e+00 7.27710605e-01 6.72191679e-01 4.75694507e-01 -8.44561398e-01 -8.55822265e-01 -8.10531735e-01 7.78170764e-01 -6.95412278e-01 -2.55611241e-01] [-6.58310831e-01 -7.48013020e-01 -1.16643399e-01 3.15482974e-01 -4.93675292e-01 -1.45291197e+00 -3.74738544e-01 2.28127301e-01 -6.11924469e-01 8.37847471e-01] [ 5.28852828e-02 6.48073792e-01 6.80064380e-01 1.36559188e+00 3.07725370e-02 -3.01757127e-01 -1.04776680e+00 -6.78165197e-01 1.81294262e-01 -2.01201320e-01] [ 3.95292282e-01 2.23146543e-01 -1.06336720e-01 3.33569050e-01 7.94972479e-01 -9.20976698e-01 4.92542773e-01 1.02093613e+00 2.65748203e-01 1.42009699e+00] [-1.52690873e-01 2.19361082e-01 1.72936952e+00 -3.15366477e-01 -4.04782742e-01 -1.90974504e-01 -1.26782846e+00 1.26881456e+00 -3.42339545e-01 -1.08366084e+00] [ 7.03050077e-01 3.47093731e-01 6.76762402e-01 -7.50463367e-01 -1.64606035e+00 -2.82354069e+00 -7.61865616e-01 -7.20796824e-01 -3.55163962e-01 1.56042099e+00] [-8.41064692e-01 -9.55383778e-02 -1.26326835e+00 1.24871433e+00 -6.04673743e-01 5.87940328e-02 1.90399051e-01 -1.61429667e+00 3.42359066e-01 4.44812059e-01] [ 2.10682563e-02 -6.63483679e-01 1.10758281e+00 -2.44358823e-01 7.29344249e-01 2.20382094e+00 1.50185752e+00 -8.67152572e-01 -1.24483667e-01 -2.79430446e-04]] [[ 1.44529447e-01 -3.25622559e-01 -7.69953504e-02 2.14877307e-01 5.94702065e-02 -1.76521726e-02 -1.86858028e-02 -1.54495627e-01 -1.24853335e-01 -2.47592837e-01] [-9.52604488e-02 3.85479741e-02 2.25235730e-01 -3.43105108e-01 6.57877102e-02 2.48956889e-01 9.07454565e-02 1.02071859e-01 8.87561217e-03 -7.70098642e-02] [-7.97414929e-02 -2.49830447e-02 -2.07739636e-01 -2.69820809e-01 -2.79708225e-02 1.47737667e-01 1.63031921e-01 9.06271338e-02 -3.44108880e-01 2.41333261e-01] [ 2.43065059e-01 -2.94351071e-01 1.40869796e-01 8.71426314e-02 1.18429013e-01 -5.37578642e-01 -5.92897162e-02 -8.84562135e-02 -1.81157067e-01 -1.47659322e-02] [-1.87493071e-01 8.50369707e-02 -2.33688518e-01 7.45440498e-02 -1.46690886e-02 -1.49054557e-01 -6.21131435e-02 1.15148813e-01 -7.02634081e-02 4.37192500e-01] [-5.92256486e-02 2.94065084e-02 -2.96264410e-01 5.03687859e-01 1.12032034e-01 6.31143972e-02 1.09514035e-01 -1.14684932e-01 -6.59667328e-02 -4.01442312e-02] [ 6.22524284e-02 6.67750165e-02 -1.81746721e-01 2.85709347e-03 6.62724301e-02 2.70146579e-01 1.05975010e-01 -3.02984506e-01 1.50374115e-01 7.71752149e-02] [-9.60224122e-02 -4.58723038e-01 -2.86842342e-02 -1.30761296e-01 1.80896427e-02 1.38991117e-01 -1.20597117e-01 9.44684520e-02 9.50031541e-03 -7.24454736e-03] [ 1.65138647e-01 1.71117991e-01 -2.89359186e-02 2.04941571e-01 -1.08836055e-01 -2.64198154e-01 3.24813902e-01 2.24367723e-01 2.02055722e-01 -4.54388380e-01] [ 2.11280480e-01 -3.92453261e-02 3.85618210e-01 2.04284400e-01 -4.63686958e-02 -1.63378000e-01 7.63957918e-01 -1.42685845e-01 -1.35208920e-01 -3.22998673e-01]] [[-9.94168781e-03 -2.60865539e-01 2.25940660e-01 1.10314213e-01 -2.95240581e-02 9.75293368e-02 2.29907528e-01 -7.66583905e-02 4.33466472e-02 -9.64930281e-02] [ 2.01148242e-01 -1.42079040e-01 -4.87583317e-03 2.81658256e-03 -7.44845346e-02 2.83135902e-02 -2.81065479e-02 3.75271924e-02 1.47080705e-01 -2.49914732e-02] [ 1.24682300e-01 -2.92628892e-02 1.48051426e-01 7.16055110e-02 -8.93719345e-02 2.37398334e-02 -3.63201573e-02 -1.27721474e-01 -3.58086862e-02 -2.90585682e-02] [-9.23407376e-02 -1.22560672e-01 7.72002861e-02 3.28375027e-02 -2.32716911e-02 -5.03416136e-02 -2.99678519e-02 1.50345843e-02 -1.12500377e-01 1.64216027e-01] [-3.98791321e-02 1.89181250e-02 8.89746919e-02 4.89844829e-02 -6.40093088e-02 9.58637968e-02 2.19770864e-01 1.89135611e-01 -1.03558935e-01 -6.41926453e-02] [-1.22047169e-02 -4.01125383e-03 -2.68280655e-01 -5.68349436e-02 2.38542065e-01 -2.25879867e-02 5.40648960e-02 -5.72089851e-02 -1.56835169e-01 -6.89361319e-02] [ 1.05612151e-01 -4.27759066e-02 -6.16485998e-02 -3.41354273e-02 4.77256663e-02 -1.18503533e-02 -5.61673194e-02 8.23962223e-03 -4.23954315e-02 1.02170788e-01] [-5.29753119e-02 3.68713811e-02 -1.05833322e-01 3.68316658e-02 -1.17839694e-01 1.05226450e-01 -3.73446383e-02 -9.30920988e-02 1.81910582e-02 -9.37085971e-02] [ 5.18762954e-02 -2.36982144e-02 -3.37299444e-02 3.25011984e-02 -7.17445882e-03 3.89268845e-02 -2.86761433e-01 -7.87332878e-02 8.17824304e-02 -1.33686885e-01] [ 1.40132397e-01 3.04733776e-02 3.99481691e-02 1.29128486e-01 8.41264278e-02 9.61120650e-02 5.53459004e-02 -8.69520009e-02 -2.34892145e-02 2.15414017e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_654.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.9238 (2,1,.,.) = 2.0184 (3,1,.,.) = 0.7011 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[-1.3741504 -2.071314 1.8414874 0.5425666 1.7876887 2.1510434 1.9777081 -2.1318283 ] [ 2.1982098 -0.37749013 2.8337584 -0.01748536 -1.9465225 -1.1969422 -1.4797845 -0.12134144] [ 0.02165339 2.4031308 -2.3576283 1.6398956 4.522863 2.2456813 0.15161206 -0.45611662] [-2.9638515 -1.4139187 2.6390872 2.2171764 0.91799325 -0.9546346 -0.05432645 -4.1263766 ] [-0.57004756 -0.6976982 0.09241436 -0.443336 -1.593448 0.83534604 -1.4867977 3.4915864 ] [-2.55836 -2.253802 3.1116884 -1.789964 -1.5288823 0.64630234 3.3458178 -0.4710257 ] [ 1.4501163 -0.22187775 -0.05588527 3.3762019 -1.6783204 3.5083246 4.6440816 -3.049401 ] [-2.3452687 -3.3476648 0.43672493 1.0372702 0.37439623 0.0430171 -1.909182 4.21219 ]]]]; ov_res: [[[[-1.3741504 -2.071314 1.8414874 0.5425666 1.7876887 2.1510434 1.9777081 -2.1318283 ] [ 2.1982098 -0.37749013 2.8337584 -0.01748536 -1.9465225 -1.1969422 -1.4797845 -0.12134144] [ 0.02165339 2.4031308 -2.3576283 1.6398956 4.522863 2.2456813 0.15161206 -0.45611662] [-2.9638515 -1.4139187 2.6390872 2.2171764 0.91799325 -0.9546346 -0.05432645 -4.1263766 ] [-0.57004756 -0.6976982 0.09241436 -0.443336 -1.593448 0.83534604 -1.4867977 3.4915864 ] [-2.55836 -2.253802 3.1116884 -1.789964 -1.5288823 0.64630234 3.3458178 -0.4710257 ] [ 1.4501163 -0.22187775 -0.05588527 3.3762019 -1.6783204 3.5083246 4.6440816 -3.049401 ] [-2.3452687 -3.3476648 0.43672493 1.0372702 0.37439623 0.0430171 -1.909182 4.21219 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [1, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_656.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.2731 (2,1,.,.) = -0.7718 (3,1,.,.) = -1.0144 (1,2,.,.) = -0.6135 (2,2,.,.) = 0.2814 (3,2,.,.) = -1.5734 (1,3,.,.) = -0.8989 (2,3,.,.) = 1.0077 (3,3,.,.) = 1.0184 [ CPUFloatType{3,3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.22032475e-01 1.10020232e+00 -1.29252541e+00 -1.30300140e+00 -8.00735414e-01 1.65854049e+00 1.08063924e+00 -5.60946345e-01 1.23126495e+00 1.51174188e+00 0.00000000e+00] [ 0.00000000e+00 -2.38638997e-01 -1.65726840e+00 4.72493708e-01 1.07395840e+00 5.24892688e-01 2.65741229e-01 1.93676925e+00 -1.05592668e+00 4.81563270e-01 3.07142019e-01 0.00000000e+00] [ 0.00000000e+00 -6.29762709e-01 -9.21189010e-01 -1.05215883e+00 -4.91059013e-02 -1.37025285e+00 1.23891258e+00 4.17696267e-01 -4.67922390e-01 -5.74604452e-01 8.72638702e-01 0.00000000e+00] [ 0.00000000e+00 -4.26968634e-01 -1.57538271e+00 1.41005075e+00 6.21446490e-01 -4.02972192e-01 3.56667459e-01 1.34042531e-01 -6.09240770e-01 -4.10242438e-01 9.44385111e-01 0.00000000e+00] [ 0.00000000e+00 -3.22972052e-02 2.07385850e+00 2.93977737e-01 1.17380774e+00 -1.07619929e+00 1.42161809e-02 1.08452594e+00 8.19430947e-01 1.49147499e+00 -6.13277256e-01 0.00000000e+00] [ 0.00000000e+00 2.97533572e-01 -9.96779323e-01 -1.49996006e+00 1.10556328e+00 -9.85663459e-02 1.39409333e-01 8.49302530e-01 5.15105307e-01 -2.25088429e+00 -1.28427160e+00 0.00000000e+00] [ 0.00000000e+00 1.11692905e-01 -8.08897376e-01 -1.70982972e-01 6.25669122e-01 -2.33666730e+00 -1.19564366e+00 -1.83460176e+00 1.19657107e-01 -2.38573045e-01 -1.73667699e-01 0.00000000e+00] [ 0.00000000e+00 8.18711281e-01 8.88880014e-01 -4.68514383e-01 2.49341905e-01 -1.68561172e+00 -3.92265201e-01 4.03383048e-03 -2.77740538e-01 -3.16125035e-01 1.22887647e+00 0.00000000e+00] [ 0.00000000e+00 1.20761847e+00 5.40953040e-01 6.08009636e-01 2.52617925e-01 -2.42852020e+00 1.25240445e+00 9.27508235e-01 -4.44423258e-02 -1.84826827e+00 1.56501484e+00 0.00000000e+00] [ 0.00000000e+00 3.95776212e-01 1.03970730e+00 -2.46947572e-01 1.01699686e+00 -9.92034748e-02 1.26574337e-01 3.41253787e-01 -8.08063030e-01 -6.76466346e-01 9.29908514e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.42943299e+00 -1.53679371e+00 3.27917099e-01 1.33454919e+00 1.27993155e+00 -1.03248298e+00 -6.21424854e-01 3.02952994e-02 -1.03897357e+00 -1.74782848e+00 0.00000000e+00] [ 0.00000000e+00 1.28024268e+00 1.80208218e+00 6.78466409e-02 -1.68943918e+00 5.04507720e-01 -6.78844631e-01 -2.49516869e+00 1.07283354e+00 -1.83575261e+00 -2.36494020e-01 0.00000000e+00] [ 0.00000000e+00 1.58950901e+00 1.07156134e+00 2.11269307e+00 6.05236351e-01 1.12465072e+00 -1.41640568e+00 -1.47010636e+00 5.49634337e-01 -9.50108171e-02 -1.20942509e+00 0.00000000e+00] [ 0.00000000e+00 -6.59532189e-01 1.93251920e+00 -7.10612655e-01 -5.22487521e-01 1.02787960e+00 -2.11430058e-01 8.51161242e-01 3.23258936e-01 8.27281535e-01 -2.30781364e+00 0.00000000e+00] [ 0.00000000e+00 2.71133393e-01 -2.82369995e+00 -5.67211926e-01 -1.18209565e+00 1.29712033e+00 -3.87679040e-01 -6.33145690e-01 -1.38779402e+00 -2.58977795e+00 1.41439736e+00 0.00000000e+00] [ 0.00000000e+00 3.03279489e-01 9.99968469e-01 1.98318434e+00 -1.24725580e+00 8.25008228e-02 4.74743754e-01 -1.70359358e-01 -1.41581810e+00 1.77647424e+00 7.97479078e-02 0.00000000e+00] [ 0.00000000e+00 3.41802061e-01 1.31167936e+00 -1.58001125e-01 -7.56723762e-01 2.73523378e+00 1.20062125e+00 2.91234684e+00 -6.53870940e-01 -1.27762794e-01 8.63039121e-02 0.00000000e+00] [ 0.00000000e+00 -6.93840683e-01 -3.56174767e-01 5.14679015e-01 -4.53602999e-01 1.50176191e+00 -4.18363571e-01 -2.34014869e-01 1.84346884e-01 7.39562869e-01 -2.36228958e-01 0.00000000e+00] [ 0.00000000e+00 -1.48963010e+00 -4.02385443e-01 -8.35279226e-01 -3.43297757e-02 3.02459550e+00 -2.36390376e+00 -9.55758452e-01 1.29202318e+00 2.87228298e+00 -2.15492368e+00 0.00000000e+00] [ 0.00000000e+00 -1.39381099e+00 -4.26626951e-01 6.69914126e-01 -4.28969443e-01 1.63179785e-01 4.89324808e-01 -1.02776396e+00 1.55081677e+00 2.00372028e+00 -1.23866177e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.66023171e+00 -2.27489161e+00 -4.14121103e+00 1.08369207e+00 1.23883235e+00 2.50700498e+00 6.98790908e-01 -1.03365672e+00 9.05224144e-01 -2.12246513e+00 0.00000000e+00] [ 0.00000000e+00 1.34071898e+00 -4.68871355e-01 1.43825483e+00 -5.20451069e-01 3.11748433e+00 -7.16305375e-01 -2.59196877e+00 9.77743208e-01 -4.64891577e+00 -1.76209494e-01 0.00000000e+00] [ 0.00000000e+00 3.67626810e+00 1.66643798e+00 3.09692597e+00 2.71384025e+00 -8.21270347e-01 -1.52833545e+00 -1.20863616e+00 7.85741627e-01 -2.48212576e+00 -1.19523978e+00 0.00000000e+00] [ 0.00000000e+00 -4.22079182e+00 -4.80774134e-01 3.64742309e-01 -1.76375583e-01 2.58611751e+00 2.42504582e-01 3.50582314e+00 -1.02431500e+00 9.61970150e-01 -4.58438683e+00 0.00000000e+00] [ 0.00000000e+00 9.64531898e-01 -7.09388316e-01 -1.96724069e+00 -1.51879981e-01 3.67306322e-02 -1.17138898e+00 8.92407954e-01 -3.39717889e+00 -3.86086893e+00 2.32998443e+00 0.00000000e+00] [ 0.00000000e+00 3.28724527e+00 -5.08341074e-01 2.04700446e+00 -4.17129487e-01 2.80053973e-01 1.65643513e+00 1.64723074e+00 -2.87061763e+00 -1.57684636e+00 -3.46844840e+00 0.00000000e+00] [ 0.00000000e+00 2.11626697e+00 1.88075292e+00 -1.30851173e+00 2.79702526e-03 1.70852339e+00 -3.62239748e-01 4.54451227e+00 -1.88848102e+00 -1.38473392e+00 -1.08854127e+00 0.00000000e+00] [ 0.00000000e+00 -2.79649347e-01 1.98058796e+00 -9.76873696e-01 -5.75282931e-01 1.56402934e+00 -3.73585653e+00 -1.79548967e+00 1.74600315e+00 2.62545729e+00 2.54607606e+00 0.00000000e+00] [ 0.00000000e+00 -1.82386744e+00 1.53408003e+00 9.33657467e-01 2.47478530e-01 1.99909496e+00 -4.18136311e+00 -6.25715852e-01 3.65272021e+00 3.62820554e+00 -8.48435879e-01 0.00000000e+00] [ 0.00000000e+00 -2.47980404e+00 2.11798477e+00 4.81256604e-01 1.15001416e+00 -1.49277794e+00 7.87815869e-01 -3.57753325e+00 2.99364328e+00 5.54613352e+00 -1.93662262e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]; ov_res: [[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.22032475e-01 1.10020232e+00 -1.29252541e+00 -1.30300140e+00 -8.00735414e-01 1.65854049e+00 1.08063924e+00 -5.60946345e-01 1.23126495e+00 1.51174188e+00 0.00000000e+00] [ 0.00000000e+00 -2.38638997e-01 -1.65726840e+00 4.72493708e-01 1.07395840e+00 5.24892688e-01 2.65741229e-01 1.93676925e+00 -1.05592668e+00 4.81563270e-01 3.07142019e-01 0.00000000e+00] [ 0.00000000e+00 -6.29762709e-01 -9.21189010e-01 -1.05215883e+00 -4.91059013e-02 -1.37025285e+00 1.23891258e+00 4.17696267e-01 -4.67922390e-01 -5.74604452e-01 8.72638702e-01 0.00000000e+00] [ 0.00000000e+00 -4.26968634e-01 -1.57538271e+00 1.41005075e+00 6.21446490e-01 -4.02972192e-01 3.56667459e-01 1.34042531e-01 -6.09240770e-01 -4.10242438e-01 9.44385111e-01 0.00000000e+00] [ 0.00000000e+00 -3.22972052e-02 2.07385850e+00 2.93977737e-01 1.17380774e+00 -1.07619929e+00 1.42161809e-02 1.08452594e+00 8.19430947e-01 1.49147499e+00 -6.13277256e-01 0.00000000e+00] [ 0.00000000e+00 2.97533572e-01 -9.96779323e-01 -1.49996006e+00 1.10556328e+00 -9.85663459e-02 1.39409333e-01 8.49302530e-01 5.15105307e-01 -2.25088429e+00 -1.28427160e+00 0.00000000e+00] [ 0.00000000e+00 1.11692905e-01 -8.08897376e-01 -1.70982972e-01 6.25669122e-01 -2.33666730e+00 -1.19564366e+00 -1.83460176e+00 1.19657107e-01 -2.38573045e-01 -1.73667699e-01 0.00000000e+00] [ 0.00000000e+00 8.18711281e-01 8.88880014e-01 -4.68514383e-01 2.49341905e-01 -1.68561172e+00 -3.92265201e-01 4.03383048e-03 -2.77740538e-01 -3.16125035e-01 1.22887647e+00 0.00000000e+00] [ 0.00000000e+00 1.20761847e+00 5.40953040e-01 6.08009636e-01 2.52617925e-01 -2.42852020e+00 1.25240445e+00 9.27508235e-01 -4.44423258e-02 -1.84826827e+00 1.56501484e+00 0.00000000e+00] [ 0.00000000e+00 3.95776212e-01 1.03970730e+00 -2.46947572e-01 1.01699686e+00 -9.92034748e-02 1.26574337e-01 3.41253787e-01 -8.08063030e-01 -6.76466346e-01 9.29908514e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.42943299e+00 -1.53679371e+00 3.27917099e-01 1.33454919e+00 1.27993155e+00 -1.03248298e+00 -6.21424854e-01 3.02952994e-02 -1.03897357e+00 -1.74782848e+00 0.00000000e+00] [ 0.00000000e+00 1.28024268e+00 1.80208218e+00 6.78466409e-02 -1.68943918e+00 5.04507720e-01 -6.78844631e-01 -2.49516869e+00 1.07283354e+00 -1.83575261e+00 -2.36494020e-01 0.00000000e+00] [ 0.00000000e+00 1.58950901e+00 1.07156134e+00 2.11269307e+00 6.05236351e-01 1.12465072e+00 -1.41640568e+00 -1.47010636e+00 5.49634337e-01 -9.50108171e-02 -1.20942509e+00 0.00000000e+00] [ 0.00000000e+00 -6.59532189e-01 1.93251920e+00 -7.10612655e-01 -5.22487521e-01 1.02787960e+00 -2.11430058e-01 8.51161242e-01 3.23258936e-01 8.27281535e-01 -2.30781364e+00 0.00000000e+00] [ 0.00000000e+00 2.71133393e-01 -2.82369995e+00 -5.67211926e-01 -1.18209565e+00 1.29712033e+00 -3.87679040e-01 -6.33145690e-01 -1.38779402e+00 -2.58977795e+00 1.41439736e+00 0.00000000e+00] [ 0.00000000e+00 3.03279489e-01 9.99968469e-01 1.98318434e+00 -1.24725580e+00 8.25008228e-02 4.74743754e-01 -1.70359358e-01 -1.41581810e+00 1.77647424e+00 7.97479078e-02 0.00000000e+00] [ 0.00000000e+00 3.41802061e-01 1.31167936e+00 -1.58001125e-01 -7.56723762e-01 2.73523378e+00 1.20062125e+00 2.91234684e+00 -6.53870940e-01 -1.27762794e-01 8.63039121e-02 0.00000000e+00] [ 0.00000000e+00 -6.93840683e-01 -3.56174767e-01 5.14679015e-01 -4.53602999e-01 1.50176191e+00 -4.18363571e-01 -2.34014869e-01 1.84346884e-01 7.39562869e-01 -2.36228958e-01 0.00000000e+00] [ 0.00000000e+00 -1.48963010e+00 -4.02385443e-01 -8.35279226e-01 -3.43297757e-02 3.02459550e+00 -2.36390376e+00 -9.55758452e-01 1.29202318e+00 2.87228298e+00 -2.15492368e+00 0.00000000e+00] [ 0.00000000e+00 -1.39381099e+00 -4.26626951e-01 6.69914126e-01 -4.28969443e-01 1.63179785e-01 4.89324808e-01 -1.02776396e+00 1.55081677e+00 2.00372028e+00 -1.23866177e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.66023171e+00 -2.27489161e+00 -4.14121103e+00 1.08369207e+00 1.23883235e+00 2.50700498e+00 6.98790908e-01 -1.03365672e+00 9.05224144e-01 -2.12246513e+00 0.00000000e+00] [ 0.00000000e+00 1.34071898e+00 -4.68871355e-01 1.43825483e+00 -5.20451069e-01 3.11748433e+00 -7.16305375e-01 -2.59196877e+00 9.77743208e-01 -4.64891577e+00 -1.76209494e-01 0.00000000e+00] [ 0.00000000e+00 3.67626810e+00 1.66643798e+00 3.09692597e+00 2.71384025e+00 -8.21270347e-01 -1.52833545e+00 -1.20863616e+00 7.85741627e-01 -2.48212576e+00 -1.19523978e+00 0.00000000e+00] [ 0.00000000e+00 -4.22079182e+00 -4.80774134e-01 3.64742309e-01 -1.76375583e-01 2.58611751e+00 2.42504582e-01 3.50582314e+00 -1.02431500e+00 9.61970150e-01 -4.58438683e+00 0.00000000e+00] [ 0.00000000e+00 9.64531898e-01 -7.09388316e-01 -1.96724069e+00 -1.51879981e-01 3.67306322e-02 -1.17138898e+00 8.92407954e-01 -3.39717889e+00 -3.86086893e+00 2.32998443e+00 0.00000000e+00] [ 0.00000000e+00 3.28724527e+00 -5.08341074e-01 2.04700446e+00 -4.17129487e-01 2.80053973e-01 1.65643513e+00 1.64723074e+00 -2.87061763e+00 -1.57684636e+00 -3.46844840e+00 0.00000000e+00] [ 0.00000000e+00 2.11626697e+00 1.88075292e+00 -1.30851173e+00 2.79702526e-03 1.70852339e+00 -3.62239748e-01 4.54451227e+00 -1.88848102e+00 -1.38473392e+00 -1.08854127e+00 0.00000000e+00] [ 0.00000000e+00 -2.79649347e-01 1.98058796e+00 -9.76873696e-01 -5.75282931e-01 1.56402934e+00 -3.73585653e+00 -1.79548967e+00 1.74600315e+00 2.62545729e+00 2.54607606e+00 0.00000000e+00] [ 0.00000000e+00 -1.82386744e+00 1.53408003e+00 9.33657467e-01 2.47478530e-01 1.99909496e+00 -4.18136311e+00 -6.25715852e-01 3.65272021e+00 3.62820554e+00 -8.48435879e-01 0.00000000e+00] [ 0.00000000e+00 -2.47980404e+00 2.11798477e+00 4.81256604e-01 1.15001416e+00 -1.49277794e+00 7.87815869e-01 -3.57753325e+00 2.99364328e+00 5.54613352e+00 -1.93662262e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_658.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.4250 (2,1,.,.) = 1.7309 (3,1,.,.) = -0.4679 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0.23412545 1.9162996 -0.48911458 0.14581728 -1.7980108 -2.4500318 2.161103 0.8693265 ] [ 0.8049467 -1.0062155 0.99258655 1.2465346 0.13826743 -2.2264614 -1.392396 -0.16178967] [-2.4750037 -1.0296972 1.6154307 -2.2814143 -1.6513108 -0.97364587 1.9879935 2.4730515 ] [-2.5650513 0.39744073 -2.5802114 -2.1006455 -0.8713777 -1.0048288 -0.02750462 -0.95561725]]]]; ov_res: [[[[ 0.23412545 1.9162996 -0.48911458 0.14581728 -1.7980108 -2.4500318 2.161103 0.8693265 ] [ 0.8049467 -1.0062155 0.99258655 1.2465346 0.13826743 -2.2264614 -1.392396 -0.16178967] [-2.4750037 -1.0296972 1.6154307 -2.2814143 -1.6513108 -0.97364587 1.9879935 2.4730515 ] [-2.5650513 0.39744073 -2.5802114 -2.1006455 -0.8713777 -1.0048288 -0.02750462 -0.95561725]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [3, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_660.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4109 (2,1,.,.) = 0.01 * 9.1321 (3,1,.,.) = -1.2173 (1,2,.,.) = 0.7682 (2,2,.,.) = 1.0119 (3,2,.,.) = 0.3578 (1,3,.,.) = -0.7142 (2,3,.,.) = 1.3314 (3,3,.,.) = -0.9258 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.35336077 -1.623845 -1.1670908 0.08696204 -1.2007179 0.13615069 1.005855 0.3954729 1.0828449 0.7139768 0. ] [ 0. -0.1774716 -0.4613299 -1.2038366 0.6961924 0.05667878 -0.8016022 -0.603776 1.4995841 1.6101704 0.44105232 0. ] [ 0. 2.019987 1.5097389 -0.72006726 0.9910129 0.5023365 -0.3464634 -1.2625859 -0.63318115 2.0384583 -1.5654253 0. ] [ 0. 1.5857351 0.47857687 0.641122 0.5029413 2.4103022 1.248623 1.2015374 -1.5703355 0.9541975 -0.32132906 0. ] [ 0. 0.52963436 -1.2197232 -0.34069908 -1.519643 -0.8548815 -0.52316076 0.82854825 -2.8332543 -0.6774958 1.428331 0. ] [ 0. 0.33965757 -0.71478164 0.8155306 -0.22177392 0.11441465 0.861262 -0.3408299 0.67556965 -0.0784922 -0.64875966 0. ] [ 0. -1.4053109 -0.2600997 -0.32544938 -0.82347333 1.0216647 2.7063353 -2.9954927 0.29777923 -0.50850534 -0.56070954 0. ] [ 0. -2.5745656 -1.2718699 0.5066065 1.1811095 -1.121192 -1.0703181 -0.41824514 -0.30873805 1.9953777 -1.2121491 0. ] [ 0. 1.463261 0.22798285 0.69422203 1.2845671 -0.92031777 0.98461217 1.7775154 -0.06298271 0.62800646 -0.090663 0. ] [ 0. -0.41594496 -0.45942312 1.2474467 -0.19559117 -0.32089758 2.2538729 -1.0028057 -0.4585712 0.04529119 0.41952434 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -2.2390766 3.073684 -0.81890506 0.8903195 1.035596 -1.8455826 2.1408627 -0.44572777 -1.1129905 1.9561759 0. ] [ 0. -0.57007235 -0.21535584 -0.80222416 -1.3438169 0.81529033 2.151689 -0.6623482 -2.2188687 -0.54490113 -1.9851837 0. ] [ 0. 4.4077716 -2.4235954 -0.03912298 1.6249273 5.0016065 1.0002981 0.81904584 -3.7518241 0.86025155 -1.4490958 0. ] [ 0. -0.5735782 2.3604522 2.458056 -0.5217176 2.0314066 -1.2163477 2.7853703 0.09971846 -1.7448959 2.7330906 0. ] [ 0. -1.7812597 -1.3582072 -2.01722 -4.133873 0.36192092 -0.288477 -1.263275 0.9313769 -0.6432612 -1.7807647 0. ] [ 0. -1.4950218 -0.672243 -0.15257868 -1.752942 -0.24314639 -0.6572717 -2.9144845 -3.2986848 -4.028432 1.7198255 0. ] [ 0. 3.4065468 -2.3859458 0.02981682 -2.6390266 -1.1094683 -0.28938147 1.478799 1.1500758 -1.1010736 0.33599755 0. ] [ 0. 0.7808675 0.15684696 0.04056857 0.8855871 -1.8637847 -0.60319257 1.1185179 -0.05515101 1.4575126 -0.06875379 0. ] [ 0. -0.9576712 -0.3654686 0.02642349 0.33885026 3.1759353 2.4386644 0.2151056 1.2007898 -1.5822054 0.6400078 0. ] [ 0. 1.3888291 0.16129276 -0.66716945 -0.31299305 1.5013593 -0.63220346 0.65484184 1.4993899 -2.540559 2.5877326 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.06248597 -3.5610194 0.21157493 0.0225978 -1.7598032 -1.9459136 0.85097873 0.01531217 0.6689453 -1.8736577 0. ] [ 0. -1.3652368 -4.8030596 1.4881444 1.8564749 2.5897644 1.3467869 -0.1735082 2.3498023 -1.1969101 0.98735255 0. ] [ 0. -1.7061442 1.599302 0.46595028 -0.9621133 0.5739412 1.6364267 3.205731 0.24543075 1.337926 -1.4527986 0. ] [ 0. 3.8945427 -0.87196714 -0.29231748 0.1794465 0.76286393 3.3464797 -1.6882318 0.8761005 0.11655132 -1.4887294 0. ] [ 0. 0.523645 -0.5126848 1.1482499 1.3802382 -0.02644756 2.453969 -1.4518845 -0.9831948 -2.1416361 4.1243734 0. ] [ 0. 0.518667 -1.3217646 3.2669537 1.3493692 -1.0896815 -1.1824837 -0.47448644 0.62799287 -0.66815555 0.9193165 0. ] [ 0. -1.7378159 -0.839422 -0.11287262 3.1600304 -1.2969174 1.4477367 -0.77433765 -0.8409151 0.21051314 1.5719618 0. ] [ 0. -1.3860774 0.40438083 3.4428475 -1.9750537 -1.5308115 -1.2547998 1.6475594 1.8799696 -2.371803 -3.3925185 0. ] [ 0. -0.4747603 -1.403614 -0.39348346 0.13149536 -2.0105374 -0.5178822 0.95223105 -1.8251457 -0.82774657 1.3452168 0. ] [ 0. 0.48704138 1.6713966 0.84022975 -0.44875988 1.2365618 -0.4871429 -0.8815084 -0.5284212 0.9463652 -1.1785462 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.35336077 -1.623845 -1.1670908 0.08696204 -1.2007179 0.13615069 1.005855 0.3954729 1.0828449 0.7139768 0. ] [ 0. -0.1774716 -0.4613299 -1.2038366 0.6961924 0.05667878 -0.8016022 -0.603776 1.4995841 1.6101704 0.44105232 0. ] [ 0. 2.019987 1.5097389 -0.72006726 0.9910129 0.5023365 -0.3464634 -1.2625859 -0.63318115 2.0384583 -1.5654253 0. ] [ 0. 1.5857351 0.47857687 0.641122 0.5029413 2.4103022 1.248623 1.2015374 -1.5703355 0.9541975 -0.32132906 0. ] [ 0. 0.52963436 -1.2197232 -0.34069908 -1.519643 -0.8548815 -0.52316076 0.82854825 -2.8332543 -0.6774958 1.428331 0. ] [ 0. 0.33965757 -0.71478164 0.8155306 -0.22177392 0.11441465 0.861262 -0.3408299 0.67556965 -0.0784922 -0.64875966 0. ] [ 0. -1.4053109 -0.2600997 -0.32544938 -0.82347333 1.0216647 2.7063353 -2.9954927 0.29777923 -0.50850534 -0.56070954 0. ] [ 0. -2.5745656 -1.2718699 0.5066065 1.1811095 -1.121192 -1.0703181 -0.41824514 -0.30873805 1.9953777 -1.2121491 0. ] [ 0. 1.463261 0.22798285 0.69422203 1.2845671 -0.92031777 0.98461217 1.7775154 -0.06298271 0.62800646 -0.090663 0. ] [ 0. -0.41594496 -0.45942312 1.2474467 -0.19559117 -0.32089758 2.2538729 -1.0028057 -0.4585712 0.04529119 0.41952434 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -2.2390766 3.073684 -0.81890506 0.8903195 1.035596 -1.8455826 2.1408627 -0.44572777 -1.1129905 1.9561759 0. ] [ 0. -0.57007235 -0.21535584 -0.80222416 -1.3438169 0.81529033 2.151689 -0.6623482 -2.2188687 -0.54490113 -1.9851837 0. ] [ 0. 4.4077716 -2.4235954 -0.03912298 1.6249273 5.0016065 1.0002981 0.81904584 -3.7518241 0.86025155 -1.4490958 0. ] [ 0. -0.5735782 2.3604522 2.458056 -0.5217176 2.0314066 -1.2163477 2.7853703 0.09971846 -1.7448959 2.7330906 0. ] [ 0. -1.7812597 -1.3582072 -2.01722 -4.133873 0.36192092 -0.288477 -1.263275 0.9313769 -0.6432612 -1.7807647 0. ] [ 0. -1.4950218 -0.672243 -0.15257868 -1.752942 -0.24314639 -0.6572717 -2.9144845 -3.2986848 -4.028432 1.7198255 0. ] [ 0. 3.4065468 -2.3859458 0.02981682 -2.6390266 -1.1094683 -0.28938147 1.478799 1.1500758 -1.1010736 0.33599755 0. ] [ 0. 0.7808675 0.15684696 0.04056857 0.8855871 -1.8637847 -0.60319257 1.1185179 -0.05515101 1.4575126 -0.06875379 0. ] [ 0. -0.9576712 -0.3654686 0.02642349 0.33885026 3.1759353 2.4386644 0.2151056 1.2007898 -1.5822054 0.6400078 0. ] [ 0. 1.3888291 0.16129276 -0.66716945 -0.31299305 1.5013593 -0.63220346 0.65484184 1.4993899 -2.540559 2.5877326 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.06248597 -3.5610194 0.21157493 0.0225978 -1.7598032 -1.9459136 0.85097873 0.01531217 0.6689453 -1.8736577 0. ] [ 0. -1.3652368 -4.8030596 1.4881444 1.8564749 2.5897644 1.3467869 -0.1735082 2.3498023 -1.1969101 0.98735255 0. ] [ 0. -1.7061442 1.599302 0.46595028 -0.9621133 0.5739412 1.6364267 3.205731 0.24543075 1.337926 -1.4527986 0. ] [ 0. 3.8945427 -0.87196714 -0.29231748 0.1794465 0.76286393 3.3464797 -1.6882318 0.8761005 0.11655132 -1.4887294 0. ] [ 0. 0.523645 -0.5126848 1.1482499 1.3802382 -0.02644756 2.453969 -1.4518845 -0.9831948 -2.1416361 4.1243734 0. ] [ 0. 0.518667 -1.3217646 3.2669537 1.3493692 -1.0896815 -1.1824837 -0.47448644 0.62799287 -0.66815555 0.9193165 0. ] [ 0. -1.7378159 -0.839422 -0.11287262 3.1600304 -1.2969174 1.4477367 -0.77433765 -0.8409151 0.21051314 1.5719618 0. ] [ 0. -1.3860774 0.40438083 3.4428475 -1.9750537 -1.5308115 -1.2547998 1.6475594 1.8799696 -2.371803 -3.3925185 0. ] [ 0. -0.4747603 -1.403614 -0.39348346 0.13149536 -2.0105374 -0.5178822 0.95223105 -1.8251457 -0.82774657 1.3452168 0. ] [ 0. 0.48704138 1.6713966 0.84022975 -0.44875988 1.2365618 -0.4871429 -0.8815084 -0.5284212 0.9463652 -1.1785462 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_662.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.4986 (2,1,.,.) = -0.2073 (3,1,.,.) = 0.5236 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-0.17873868 0.87208074 2.3115578 1.6993827 -1.2750678 -0.04563376 2.036138 -0.8780993 -1.7727363 0.3230377 ] [ 1.1616656 -0.20071277 -0.02904926 1.7424134 -2.34685 1.6155803 -3.1797583 0.26850763 1.5052137 3.1906862 ] [-0.88074887 -0.8323905 1.264901 3.0612392 -2.8545845 -1.3775889 1.7490205 -1.7239034 -1.4644707 1.517725 ] [-0.2045732 -1.4288402 0.29997241 2.6922748 -0.58206534 -1.0265533 0.630303 -2.6604576 1.047315 0.7496016 ] [ 0.8461972 -1.7565812 -1.1897869 0.47929823 -1.3623204 -0.35541546 -1.6232797 3.4404132 -1.0758592 -0.92336905] [-0.7419528 0.18269022 -0.13704242 -0.39876646 0.75769633 -0.24202663 1.786633 -1.2828245 1.3146492 0.6749147 ] [ 2.06442 -2.5923965 -1.0294735 0.98347604 -0.93615943 1.3925334 -1.4269177 -0.22378458 -3.5909972 1.0177895 ] [-0.83151335 2.4904 1.6751605 0.53911644 2.4120505 -1.2416548 1.2637197 0.860817 1.3726691 0.58635783]]]]; ov_res: [[[[-0.17873868 0.87208074 2.3115578 1.6993827 -1.2750678 -0.04563376 2.036138 -0.8780993 -1.7727363 0.3230377 ] [ 1.1616656 -0.20071277 -0.02904926 1.7424134 -2.34685 1.6155803 -3.1797583 0.26850763 1.5052137 3.1906862 ] [-0.88074887 -0.8323905 1.264901 3.0612392 -2.8545845 -1.3775889 1.7490205 -1.7239034 -1.4644707 1.517725 ] [-0.2045732 -1.4288402 0.29997241 2.6922748 -0.58206534 -1.0265533 0.630303 -2.6604576 1.047315 0.7496016 ] [ 0.8461972 -1.7565812 -1.1897869 0.47929823 -1.3623204 -0.35541546 -1.6232797 3.4404132 -1.0758592 -0.92336905] [-0.7419528 0.18269022 -0.13704242 -0.39876646 0.75769633 -0.24202663 1.786633 -1.2828245 1.3146492 0.6749147 ] [ 2.06442 -2.5923965 -1.0294735 0.98347604 -0.93615943 1.3925334 -1.4269177 -0.22378458 -3.5909972 1.0177895 ] [-0.83151335 2.4904 1.6751605 0.53911644 2.4120505 -1.2416548 1.2637197 0.860817 1.3726691 0.58635783]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_664.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.3665 (2,1,.,.) = 0.01 * 6.0399 (3,1,.,.) = 0.3929 (1,2,.,.) = 0.4637 (2,2,.,.) = 0.01 * -4.8420 (3,2,.,.) = 0.3212 (1,3,.,.) = 1.4328 (2,3,.,.) = -0.7579 (3,3,.,.) = 0.2769 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0. -0.63228077 0.7620462 2.3164027 -2.7120512 -1.4215832 0.91813034 -0.3419867 0.72182375 2.097276 -2.3037226 0. ] [ 0. 0.13922337 1.0258002 -1.2007489 0.42433208 -0.3518106 1.6148144 -2.2049286 -0.47662282 1.986352 0.7487855 0. ] [ 0. -3.0569532 3.297346 0.73738146 0.23238298 -0.6108391 1.8420618 2.4978015 -0.1350221 1.282175 1.8713301 0. ] [ 0. 1.6688697 -1.2141542 2.0719564 -1.7773323 -1.8175023 1.3788089 1.4490521 3.5366619 -2.4246132 -0.03452189 0. ] [ 0. -0.73708975 0.02701491 0.28422418 -0.11274043 -2.1110659 2.935036 -1.1187271 -0.93481237 0.68831134 -0.42590338 0. ] [ 0. -1.0013994 0.8940431 3.4651155 -1.6135731 -1.073419 -1.4435999 0.39943007 1.6424364 1.0782061 -1.6390699 0. ] [ 0. -1.5015526 -4.163917 1.2422222 1.4990567 -0.21649958 2.1796904 1.9855924 2.1160605 -2.25286 1.1581151 0. ] [ 0. 0.20123973 -0.3212697 -1.937634 2.4429553 -0.74940455 -1.5560906 -0.07409532 0.29780495 1.2466317 -0.8834238 0. ] [ 0. -0.9824329 0.2854949 -0.29527083 0.6078029 1.8220543 -0.9002825 0.00515061 -1.1218853 3.099463 0.03527286 0. ] [ 0. -2.3917706 0.09177104 -1.6547135 1.9923433 -1.6350955 -1.3046796 0.56687015 -0.95871395 0.09198841 0.2375148 0. ]] [[ 0. 0.04475018 -0.31675276 -0.9548848 1.0540986 0.8375816 -0.42785317 0.52278936 -0.0808197 -0.8740974 1.0514085 0. ] [ 0. 0.17529742 -0.5460531 0.22793782 0.01307828 0.02977107 -1.0892447 1.1097709 0.38812006 -1.4027655 -0.1302682 0. ] [ 0. 1.1535994 -1.6079799 -0.4570617 0.1867406 0.32903206 -0.55741864 -1.3191794 -0.4414821 -0.45376563 -1.024869 0. ] [ 0. -1.0613028 0.5577004 -0.7930275 0.8468809 1.0505645 -0.9429207 -0.70549536 -1.5057427 1.3492597 0.01978191 0. ] [ 0. 0.25838873 -0.1930202 0.03326742 -0.05521427 0.85288787 -1.2511494 0.4758873 0.48639128 -0.36301947 0.14102529 0. ] [ 0. 0.33793455 -0.53461343 -1.4737594 0.8778973 0.9089709 0.7166965 0.4940382 -1.0686957 -0.5510495 1.0789878 0. ] [ 0. 0.5156919 2.0924675 -0.60067713 -0.61739904 0.26984453 -1.115299 -1.0354784 -0.77051985 0.55088055 -0.519741 0. ] [ 0. -0.4275569 0.02052168 1.2817181 -0.8845293 0.23209433 0.79320616 0.07084183 -0.2962412 -0.59034 0.45340267 0. ] [ 0. 0.7076461 0.22954625 0.13943265 -0.588896 -1.0435367 0.13941833 0.1171247 0.24431208 -1.5774596 -0.01571487 0. ] [ 0. 1.3403045 0.21523006 0.56890625 -0.7694161 1.040067 0.29146847 -0.37567675 0.32896808 0.12966894 0.09213541 0. ]] [[ 0. 0.22430982 0.01548914 -0.6637674 -0.9314018 0.04510571 -0.5507604 -0.56998336 -0.43260556 0.6700929 -0.21957651 0. ] [ 0. -1.1375066 0.6302172 -0.05021866 0.6227157 0.39990544 0.7903234 -0.05563187 -0.11432151 0.27862185 0.04307531 0. ] [ 0. -0.05476227 0.5020457 0.8735428 -1.1690228 0.6505781 -0.08300793 -0.4734273 -0.27989843 0.31736597 0.5344388 0. ] [ 0. 1.3064805 -0.43588588 0.24238981 -0.6014635 0.6185525 -0.24975765 -0.0077513 0.4574698 -1.2897308 -1.1302391 0. ] [ 0. 0.1866825 -0.15247338 -0.53405946 -0.14850785 -0.4395785 0.6595071 -0.1734731 -0.63444906 -0.94890314 0.053209 0. ] [ 0. -0.19234772 -0.09386185 0.16708727 -0.37641093 0.33531392 0.09098289 -0.6169644 -0.04674625 0.19462894 -0.9489916 0. ] [ 0. -0.48687026 -1.4399153 0.53863716 -0.44875622 0.804203 -0.36923608 0.38552874 -0.6733171 0.24966188 0.43913782 0. ] [ 0. 0.5766209 -0.2008557 0.5098954 0.30209088 -0.20662951 -0.2934163 0.52369505 -0.42093825 0.15798205 -0.5118814 0. ] [ 0. 0.505735 0.29446113 0.42311633 0.24697727 0.97453594 0.52418345 -0.09392403 0.7412592 0.7587196 0.02109715 0. ] [ 0. -0.9081499 0.76457685 -0.46988568 -0.9081996 -0.6694152 -0.53389895 -0.68430007 0.19371623 0.03863174 0.79468966 0. ]]]]; ov_res: [[[[ 0. -0.63228077 0.7620462 2.3164027 -2.7120512 -1.4215832 0.91813034 -0.3419867 0.72182375 2.097276 -2.3037226 0. ] [ 0. 0.13922337 1.0258002 -1.2007489 0.42433208 -0.3518106 1.6148144 -2.2049286 -0.47662282 1.986352 0.7487855 0. ] [ 0. -3.0569532 3.297346 0.73738146 0.23238298 -0.6108391 1.8420618 2.4978015 -0.1350221 1.282175 1.8713301 0. ] [ 0. 1.6688697 -1.2141542 2.0719564 -1.7773323 -1.8175023 1.3788089 1.4490521 3.5366619 -2.4246132 -0.03452189 0. ] [ 0. -0.73708975 0.02701491 0.28422418 -0.11274043 -2.1110659 2.935036 -1.1187271 -0.93481237 0.68831134 -0.42590338 0. ] [ 0. -1.0013994 0.8940431 3.4651155 -1.6135731 -1.073419 -1.4435999 0.39943007 1.6424364 1.0782061 -1.6390699 0. ] [ 0. -1.5015526 -4.163917 1.2422222 1.4990567 -0.21649958 2.1796904 1.9855924 2.1160605 -2.25286 1.1581151 0. ] [ 0. 0.20123973 -0.3212697 -1.937634 2.4429553 -0.74940455 -1.5560906 -0.07409532 0.29780495 1.2466317 -0.8834238 0. ] [ 0. -0.9824329 0.2854949 -0.29527083 0.6078029 1.8220543 -0.9002825 0.00515061 -1.1218853 3.099463 0.03527286 0. ] [ 0. -2.3917706 0.09177104 -1.6547135 1.9923433 -1.6350955 -1.3046796 0.56687015 -0.95871395 0.09198841 0.2375148 0. ]] [[ 0. 0.04475018 -0.31675276 -0.9548848 1.0540986 0.8375816 -0.42785317 0.52278936 -0.0808197 -0.8740974 1.0514085 0. ] [ 0. 0.17529742 -0.5460531 0.22793782 0.01307828 0.02977107 -1.0892447 1.1097709 0.38812006 -1.4027655 -0.1302682 0. ] [ 0. 1.1535994 -1.6079799 -0.4570617 0.1867406 0.32903206 -0.55741864 -1.3191794 -0.4414821 -0.45376563 -1.024869 0. ] [ 0. -1.0613028 0.5577004 -0.7930275 0.8468809 1.0505645 -0.9429207 -0.70549536 -1.5057427 1.3492597 0.01978191 0. ] [ 0. 0.25838873 -0.1930202 0.03326742 -0.05521427 0.85288787 -1.2511494 0.4758873 0.48639128 -0.36301947 0.14102529 0. ] [ 0. 0.33793455 -0.53461343 -1.4737594 0.8778973 0.9089709 0.7166965 0.4940382 -1.0686957 -0.5510495 1.0789878 0. ] [ 0. 0.5156919 2.0924675 -0.60067713 -0.61739904 0.26984453 -1.115299 -1.0354784 -0.77051985 0.55088055 -0.519741 0. ] [ 0. -0.4275569 0.02052168 1.2817181 -0.8845293 0.23209433 0.79320616 0.07084183 -0.2962412 -0.59034 0.45340267 0. ] [ 0. 0.7076461 0.22954625 0.13943265 -0.588896 -1.0435367 0.13941833 0.1171247 0.24431208 -1.5774596 -0.01571487 0. ] [ 0. 1.3403045 0.21523006 0.56890625 -0.7694161 1.040067 0.29146847 -0.37567675 0.32896808 0.12966894 0.09213541 0. ]] [[ 0. 0.22430982 0.01548914 -0.6637674 -0.9314018 0.04510571 -0.5507604 -0.56998336 -0.43260556 0.6700929 -0.21957651 0. ] [ 0. -1.1375066 0.6302172 -0.05021866 0.6227157 0.39990544 0.7903234 -0.05563187 -0.11432151 0.27862185 0.04307531 0. ] [ 0. -0.05476227 0.5020457 0.8735428 -1.1690228 0.6505781 -0.08300793 -0.4734273 -0.27989843 0.31736597 0.5344388 0. ] [ 0. 1.3064805 -0.43588588 0.24238981 -0.6014635 0.6185525 -0.24975765 -0.0077513 0.4574698 -1.2897308 -1.1302391 0. ] [ 0. 0.1866825 -0.15247338 -0.53405946 -0.14850785 -0.4395785 0.6595071 -0.1734731 -0.63444906 -0.94890314 0.053209 0. ] [ 0. -0.19234772 -0.09386185 0.16708727 -0.37641093 0.33531392 0.09098289 -0.6169644 -0.04674625 0.19462894 -0.9489916 0. ] [ 0. -0.48687026 -1.4399153 0.53863716 -0.44875622 0.804203 -0.36923608 0.38552874 -0.6733171 0.24966188 0.43913782 0. ] [ 0. 0.5766209 -0.2008557 0.5098954 0.30209088 -0.20662951 -0.2934163 0.52369505 -0.42093825 0.15798205 -0.5118814 0. ] [ 0. 0.505735 0.29446113 0.42311633 0.24697727 0.97453594 0.52418345 -0.09392403 0.7412592 0.7587196 0.02109715 0. ] [ 0. -0.9081499 0.76457685 -0.46988568 -0.9081996 -0.6694152 -0.53389895 -0.68430007 0.19371623 0.03863174 0.79468966 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_666.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.4395 (2,1,.,.) = -0.9126 (3,1,.,.) = 0.3539 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 2.0524907 -3.5104601 1.302126 -0.54883564 1.8206128 -2.3365228 0.05499646 -0.93296343 -2.1039238 -1.4344552 ] [-1.7001227 0.7844339 -0.23768038 -0.11565257 -1.7146025 -3.6658418 -0.2687302 -0.35649395 -2.315862 0.8668199 ] [-1.3060765 -1.7700299 2.6090758 -0.0523166 -0.6416734 2.2162266 -0.68469954 1.796978 -1.760207 1.6706855 ] [-0.60407555 -1.5735685 -1.3248177 0.0416075 -0.47540575 -0.9643801 -3.83576 -0.6575696 -0.37244967 1.5902117 ] [ 0.7877394 0.41619754 -1.73027 -0.49156103 0.19129887 3.0338404 -0.19468747 -2.8043344 0.64831847 1.9420764 ] [-0.2298466 0.14720567 -0.9879061 -0.64076084 -0.5791627 1.3643284 -0.40746945 -0.20774528 -0.3812325 -0.42174122] [-3.0273259 0.7825766 0.5055474 0.44951367 0.01082834 0.5493312 1.699089 -1.5448442 1.0503138 -1.022519 ] [-1.0704429 0.58745825 0.8725077 0.46950266 -1.6204581 -1.2063764 0.11116233 -1.0409024 -1.2633214 -3.4637237 ]] [[ 0.18039191 -1.7393073 -0.96315897 -0.49968398 0.4171438 2.1056807 1.0799313 -2.3614998 0.22779448 -0.8209611 ] [ 0.79373723 0.48657408 1.6068623 -1.6253221 -1.2217793 0.6134392 -0.4668408 1.2283028 0.17199552 0.54387903] [ 0.46699345 0.37370965 0.09714209 1.1592964 1.0460823 -0.63764715 -0.03352249 0.346232 0.49969405 -1.1785911 ] [ 1.2366936 -0.09599515 -0.4601027 0.13431478 0.19324036 0.27300832 0.7092904 -0.18076693 0.38786995 0.8932356 ] [-0.06682169 0.05994075 0.2998184 -0.23057078 0.57290274 -2.161395 0.577107 -0.9280177 0.33937332 2.2012632 ] [ 0.5916145 -0.94351965 -0.3485307 1.3773049 0.43875298 1.9044802 1.8899695 1.0422206 0.3864166 -0.27407038] [ 0.06811593 1.2602388 0.22719711 0.21955156 1.5782188 0.7530409 -0.24145107 -0.26806873 -0.6146291 1.1254075 ] [ 0.79498935 -0.42278796 0.49202606 -1.7809589 -0.5345287 -2.0945778 -1.0704969 1.3740292 -0.04886706 0.99119276]] [[-0.11275548 -0.21840999 -0.26778832 -0.16660365 0.4091145 0.14630304 -0.46062025 -0.17129283 0.18959583 0.24905868] [-0.18910453 -0.4481577 -0.24892606 0.5705262 -0.18485339 0.13120413 0.5095622 0.41652283 -0.37806112 -0.03334962] [ 0.06101386 -0.3050173 -0.07904672 -0.4315206 -0.17925343 0.11690082 0.29928285 0.15527348 -0.09969004 -0.24397014] [-0.09575098 -0.651502 0.11464722 0.21368769 0.1765341 -0.32002205 -0.12971482 0.7226766 0.22709706 -0.12264664] [ 0.47756425 -0.16086832 -0.13709345 0.07007359 -0.02860992 -0.93552375 -0.06980861 0.1595339 -0.12573673 -0.10141303] [ 0.343597 -0.19283381 -0.18329626 -0.1829466 0.45048383 -0.07764208 -0.10203336 -0.19308515 0.39962387 -0.15826023] [-0.33131015 -0.2081438 -0.43047237 -0.4707809 -0.7810009 0.13563532 0.6186023 -0.7391361 -0.20019755 -0.48133102] [ 0.25769114 -0.11569329 0.11148817 0.04584056 0.21260075 -0.09210883 -0.56883836 0.23034729 0.3535949 -0.13735199]]]]; ov_res: [[[[ 2.0524907 -3.5104601 1.302126 -0.54883564 1.8206128 -2.3365228 0.05499646 -0.93296343 -2.1039238 -1.4344552 ] [-1.7001227 0.7844339 -0.23768038 -0.11565257 -1.7146025 -3.6658418 -0.2687302 -0.35649395 -2.315862 0.8668199 ] [-1.3060765 -1.7700299 2.6090758 -0.0523166 -0.6416734 2.2162266 -0.68469954 1.796978 -1.760207 1.6706855 ] [-0.60407555 -1.5735685 -1.3248177 0.0416075 -0.47540575 -0.9643801 -3.83576 -0.6575696 -0.37244967 1.5902117 ] [ 0.7877394 0.41619754 -1.73027 -0.49156103 0.19129887 3.0338404 -0.19468747 -2.8043344 0.64831847 1.9420764 ] [-0.2298466 0.14720567 -0.9879061 -0.64076084 -0.5791627 1.3643284 -0.40746945 -0.20774528 -0.3812325 -0.42174122] [-3.0273259 0.7825766 0.5055474 0.44951367 0.01082834 0.5493312 1.699089 -1.5448442 1.0503138 -1.022519 ] [-1.0704429 0.58745825 0.8725077 0.46950266 -1.6204581 -1.2063764 0.11116233 -1.0409024 -1.2633214 -3.4637237 ]] [[ 0.18039191 -1.7393073 -0.96315897 -0.49968398 0.4171438 2.1056807 1.0799313 -2.3614998 0.22779448 -0.8209611 ] [ 0.79373723 0.48657408 1.6068623 -1.6253221 -1.2217793 0.6134392 -0.4668408 1.2283028 0.17199552 0.54387903] [ 0.46699345 0.37370965 0.09714209 1.1592964 1.0460823 -0.63764715 -0.03352249 0.346232 0.49969405 -1.1785911 ] [ 1.2366936 -0.09599515 -0.4601027 0.13431478 0.19324036 0.27300832 0.7092904 -0.18076693 0.38786995 0.8932356 ] [-0.06682169 0.05994075 0.2998184 -0.23057078 0.57290274 -2.161395 0.577107 -0.9280177 0.33937332 2.2012632 ] [ 0.5916145 -0.94351965 -0.3485307 1.3773049 0.43875298 1.9044802 1.8899695 1.0422206 0.3864166 -0.27407038] [ 0.06811593 1.2602388 0.22719711 0.21955156 1.5782188 0.7530409 -0.24145107 -0.26806873 -0.6146291 1.1254075 ] [ 0.79498935 -0.42278796 0.49202606 -1.7809589 -0.5345287 -2.0945778 -1.0704969 1.3740292 -0.04886706 0.99119276]] [[-0.11275548 -0.21840999 -0.26778832 -0.16660365 0.4091145 0.14630304 -0.46062025 -0.17129283 0.18959583 0.24905868] [-0.18910453 -0.4481577 -0.24892606 0.5705262 -0.18485339 0.13120413 0.5095622 0.41652283 -0.37806112 -0.03334962] [ 0.06101386 -0.3050173 -0.07904672 -0.4315206 -0.17925343 0.11690082 0.29928285 0.15527348 -0.09969004 -0.24397014] [-0.09575098 -0.651502 0.11464722 0.21368769 0.1765341 -0.32002205 -0.12971482 0.7226766 0.22709706 -0.12264664] [ 0.47756425 -0.16086832 -0.13709345 0.07007359 -0.02860992 -0.93552375 -0.06980861 0.1595339 -0.12573673 -0.10141303] [ 0.343597 -0.19283381 -0.18329626 -0.1829466 0.45048383 -0.07764208 -0.10203336 -0.19308515 0.39962387 -0.15826023] [-0.33131015 -0.2081438 -0.43047237 -0.4707809 -0.7810009 0.13563532 0.6186023 -0.7391361 -0.20019755 -0.48133102] [ 0.25769114 -0.11569329 0.11148817 0.04584056 0.21260075 -0.09210883 -0.56883836 0.23034729 0.3535949 -0.13735199]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 1], 'dilations': [1, 1], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_668.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.0437 (2,1,.,.) = 0.8065 (3,1,.,.) = 0.2717 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0. 0.45746648 0.20598365 1.4881537 -0.8703886 1.0118086 0.7634106 1.464859 -0.6847337 -0.61586297 1.2542973 0. ] [ 0. -1.1810629 0.4074048 -0.06146713 -0.38240603 0.61500937 -0.05027242 -0.8319415 1.1200951 -0.27835783 -1.1573731 0. ] [ 0. -0.94298005 0.76478463 -1.0062968 0.14714411 1.3127323 -0.06760669 -1.5747135 1.322702 -1.0819188 -0.8418187 0. ] [ 0. -1.4120944 0.56510067 -0.87645566 0.5274114 -0.3944974 -0.09664612 -0.6613808 0.33675689 -1.2654083 -1.2255508 0. ] [ 0. -0.10991719 -0.40869182 -0.8034193 -1.8516606 -0.7877444 -0.3100594 2.2082336 1.1197598 -0.19501318 0.7840755 0. ] [ 0. 0.3644931 -0.48974708 1.0700744 0.09898657 1.9233047 0.4866104 0.8665037 0.47135693 -0.46849295 0.26179764 0. ] [ 0. 0.14368057 -0.29332277 0.21000317 -1.5482244 0.20416537 1.2992097 -0.58386123 -0.18300155 -1.2931497 0.7239801 0. ] [ 0. -2.221391 -0.5165367 -0.18198976 0.554231 -0.0826643 -0.51580065 0.6364982 1.2970715 -0.02721568 -1.37454 0. ] [ 0. -1.9027234 -0.13519958 0.10878564 0.8892418 0.6872876 1.9091969 0.02440841 -1.0858428 1.0499347 -2.2349718 0. ] [ 0. -0.8526767 -1.7845788 -0.3614098 0.80401284 -0.01743722 1.6879432 0.9438562 0.5073246 -0.08579761 1.8729327 0. ]] [[ 0. -1.4002157 0.16437948 -1.5946125 0.26315132 -0.7992012 1.6388817 -0.05902012 0.65088934 0.80098003 0.1786918 0. ] [ 0. 0.53879255 -0.07123488 2.0122943 -0.340593 -0.764183 0.7444176 -0.7474939 0.42107397 -0.2659053 0.8490067 0. ] [ 0. -0.20245889 -1.2399964 0.08339358 -0.28450122 0.04346687 0.112969 0.37815708 0.8979451 -0.18870384 1.5989671 0. ] [ 0. -1.2764944 -0.6302489 -0.40030932 0.7088355 -0.4357738 -0.6964698 -1.2360944 0.14029267 -0.22805873 1.3830445 0. ] [ 0. 0.98652774 0.23895967 -0.5817571 -1.1538591 -0.87894666 -0.84355307 2.0419972 -0.2324511 -0.93683326 -1.009426 0. ] [ 0. 0.37297752 0.45495182 1.168548 1.1663538 0.12215419 -0.7133311 0.42078158 0.29226825 0.26306987 0.04121348 0. ] [ 0. -1.3712158 -0.50886345 0.6513938 1.4725077 -0.89256525 1.1383566 0.8593861 -0.24047476 -0.13642186 -0.31052008 0. ] [ 0. 0.0302314 0.52705353 -0.51339436 -1.0903449 -0.6456612 -0.47831655 0.9625481 0.39834487 0.32436764 -1.0587852 0. ] [ 0. -0.68434244 -0.13791895 0.43408728 -1.2133577 -0.24562915 -0.00685459 0.41005945 0.9000353 0.7129644 -0.73686594 0. ] [ 0. -0.16448739 1.2553496 -0.22236425 -0.813085 -1.025055 0.6803024 0.1264247 -0.23275357 0.40961424 0.8858481 0. ]] [[ 0. -0.14064291 0.20926143 0.08736808 -0.18990666 -0.08561962 -0.13167572 0.09046283 0.35124785 -0.3388207 -0.469269 0. ] [ 0. 0.16832574 0.09776726 -0.13651964 0.03079256 0.44407305 0.06752551 -0.9117811 -0.42860195 -0.5684872 -0.32246223 0. ] [ 0. 0.3274452 0.5513551 0.7056814 -0.4718445 0.36993033 0.25769177 0.01889784 0.43417525 -0.20044668 0.01397241 0. ] [ 0. -0.13385609 -0.13415352 -0.42468733 0.06291966 0.16681525 0.09809238 -0.42325434 0.12357424 0.15082793 0.26219445 0. ] [ 0. 0.558068 -0.00244289 0.62544894 0.30293754 0.2365947 -0.23954926 -0.5279667 0.19412765 -0.1191909 -0.25463483 0. ] [ 0. -0.02635941 0.3011986 0.35665497 0.3617722 0.4315928 0.39034885 -0.03018121 0.62907505 -0.17356442 0.00934553 0. ] [ 0. -0.2157527 0.03589783 0.37086648 -0.6448747 -0.19937132 0.36482617 -0.58715284 -0.33839926 0.09812187 0.12019491 0. ] [ 0. 0.05573096 -0.3383874 -0.43175277 0.05708633 0.01844675 0.32819074 -0.01615257 -0.02956247 0.07140748 -0.01355158 0. ] [ 0. -0.1265906 0.19046494 -0.19663474 -0.12730783 0.1614988 -0.04797295 -0.1976078 -0.12581462 0.18200627 0.18116663 0. ] [ 0. -0.20541565 -0.07835245 -0.42422718 -0.10349977 0.58707577 -0.3147668 0.00438191 -0.5012173 -0.33260098 0.16673893 0. ]]]]; ov_res: [[[[ 0. 0.45746648 0.20598365 1.4881537 -0.8703886 1.0118086 0.7634106 1.464859 -0.6847337 -0.61586297 1.2542973 0. ] [ 0. -1.1810629 0.4074048 -0.06146713 -0.38240603 0.61500937 -0.05027242 -0.8319415 1.1200951 -0.27835783 -1.1573731 0. ] [ 0. -0.94298005 0.76478463 -1.0062968 0.14714411 1.3127323 -0.06760669 -1.5747135 1.322702 -1.0819188 -0.8418187 0. ] [ 0. -1.4120944 0.56510067 -0.87645566 0.5274114 -0.3944974 -0.09664612 -0.6613808 0.33675689 -1.2654083 -1.2255508 0. ] [ 0. -0.10991719 -0.40869182 -0.8034193 -1.8516606 -0.7877444 -0.3100594 2.2082336 1.1197598 -0.19501318 0.7840755 0. ] [ 0. 0.3644931 -0.48974708 1.0700744 0.09898657 1.9233047 0.4866104 0.8665037 0.47135693 -0.46849295 0.26179764 0. ] [ 0. 0.14368057 -0.29332277 0.21000317 -1.5482244 0.20416537 1.2992097 -0.58386123 -0.18300155 -1.2931497 0.7239801 0. ] [ 0. -2.221391 -0.5165367 -0.18198976 0.554231 -0.0826643 -0.51580065 0.6364982 1.2970715 -0.02721568 -1.37454 0. ] [ 0. -1.9027234 -0.13519958 0.10878564 0.8892418 0.6872876 1.9091969 0.02440841 -1.0858428 1.0499347 -2.2349718 0. ] [ 0. -0.8526767 -1.7845788 -0.3614098 0.80401284 -0.01743722 1.6879432 0.9438562 0.5073246 -0.08579761 1.8729327 0. ]] [[ 0. -1.4002157 0.16437948 -1.5946125 0.26315132 -0.7992012 1.6388817 -0.05902012 0.65088934 0.80098003 0.1786918 0. ] [ 0. 0.53879255 -0.07123488 2.0122943 -0.340593 -0.764183 0.7444176 -0.7474939 0.42107397 -0.2659053 0.8490067 0. ] [ 0. -0.20245889 -1.2399964 0.08339358 -0.28450122 0.04346687 0.112969 0.37815708 0.8979451 -0.18870384 1.5989671 0. ] [ 0. -1.2764944 -0.6302489 -0.40030932 0.7088355 -0.4357738 -0.6964698 -1.2360944 0.14029267 -0.22805873 1.3830445 0. ] [ 0. 0.98652774 0.23895967 -0.5817571 -1.1538591 -0.87894666 -0.84355307 2.0419972 -0.2324511 -0.93683326 -1.009426 0. ] [ 0. 0.37297752 0.45495182 1.168548 1.1663538 0.12215419 -0.7133311 0.42078158 0.29226825 0.26306987 0.04121348 0. ] [ 0. -1.3712158 -0.50886345 0.6513938 1.4725077 -0.89256525 1.1383566 0.8593861 -0.24047476 -0.13642186 -0.31052008 0. ] [ 0. 0.0302314 0.52705353 -0.51339436 -1.0903449 -0.6456612 -0.47831655 0.9625481 0.39834487 0.32436764 -1.0587852 0. ] [ 0. -0.68434244 -0.13791895 0.43408728 -1.2133577 -0.24562915 -0.00685459 0.41005945 0.9000353 0.7129644 -0.73686594 0. ] [ 0. -0.16448739 1.2553496 -0.22236425 -0.813085 -1.025055 0.6803024 0.1264247 -0.23275357 0.40961424 0.8858481 0. ]] [[ 0. -0.14064291 0.20926143 0.08736808 -0.18990666 -0.08561962 -0.13167572 0.09046283 0.35124785 -0.3388207 -0.469269 0. ] [ 0. 0.16832574 0.09776726 -0.13651964 0.03079256 0.44407305 0.06752551 -0.9117811 -0.42860195 -0.5684872 -0.32246223 0. ] [ 0. 0.3274452 0.5513551 0.7056814 -0.4718445 0.36993033 0.25769177 0.01889784 0.43417525 -0.20044668 0.01397241 0. ] [ 0. -0.13385609 -0.13415352 -0.42468733 0.06291966 0.16681525 0.09809238 -0.42325434 0.12357424 0.15082793 0.26219445 0. ] [ 0. 0.558068 -0.00244289 0.62544894 0.30293754 0.2365947 -0.23954926 -0.5279667 0.19412765 -0.1191909 -0.25463483 0. ] [ 0. -0.02635941 0.3011986 0.35665497 0.3617722 0.4315928 0.39034885 -0.03018121 0.62907505 -0.17356442 0.00934553 0. ] [ 0. -0.2157527 0.03589783 0.37086648 -0.6448747 -0.19937132 0.36482617 -0.58715284 -0.33839926 0.09812187 0.12019491 0. ] [ 0. 0.05573096 -0.3383874 -0.43175277 0.05708633 0.01844675 0.32819074 -0.01615257 -0.02956247 0.07140748 -0.01355158 0. ] [ 0. -0.1265906 0.19046494 -0.19663474 -0.12730783 0.1614988 -0.04797295 -0.1976078 -0.12581462 0.18200627 0.18116663 0. ] [ 0. -0.20541565 -0.07835245 -0.42422718 -0.10349977 0.58707577 -0.3147668 0.00438191 -0.5012173 -0.33260098 0.16673893 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [1, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_670.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4871 (2,1,.,.) = 1.1613 (3,1,.,.) = -2.2859 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[ 1.00823355e+00 -7.19038486e-01 -2.63276517e-01 2.27708071e-01 3.62968206e-01 -3.02727986e-02 6.78249002e-02 1.65062904e-01 1.06812763e+00 -1.87815249e-01] [-3.18099946e-01 -4.58817154e-01 5.72030187e-01 1.02931368e+00 -4.20121461e-01 4.68349367e-01 6.33977279e-02 5.79155207e-01 5.19899487e-01 -7.91307569e-01] [ 4.11331117e-01 -4.80143100e-01 5.59937775e-01 2.22002506e-01 -4.90216076e-01 1.90809537e-02 -8.28662217e-01 5.14199257e-01 -2.95303047e-01 3.83653998e-01] [-2.83630133e-01 -4.57067251e-01 -2.30469946e-02 -5.14263771e-02 4.12545383e-01 1.16619420e+00 -1.05022386e-01 8.13971400e-01 3.70630682e-01 -6.71666265e-02] [ 6.12461984e-01 -2.98808157e-01 -1.87640116e-01 -8.89527202e-01 -1.23214133e-01 -1.29266128e-01 8.79874825e-02 -3.02675009e-01 6.39964759e-01 -3.97048384e-01] [ 5.13378978e-01 5.28385699e-01 -1.14873774e-01 -1.18301623e-01 -9.94074404e-01 4.03867424e-01 1.58681378e-01 -5.18411577e-01 -2.58843631e-01 3.57049614e-01] [-1.28975660e-01 1.74403042e-01 1.08822234e-01 -1.40414432e-01 5.87381959e-01 5.86289942e-01 -2.44293198e-01 -6.67821988e-02 7.77613342e-01 2.01792866e-01] [ 8.93220544e-01 -9.13516819e-01 -2.23522991e-01 -4.78612274e-01 6.53647363e-01 2.95895547e-01 3.27171952e-01 2.08487511e-01 9.48478281e-02 -1.35157019e-01]] [[ 3.66677821e-01 -6.90740108e-01 1.64835405e+00 -8.72256398e-01 -7.95036137e-01 1.91494596e+00 9.38383520e-01 1.95760112e-02 9.31091905e-01 -2.50529736e-01] [-3.00659612e-02 3.04618001e-01 5.23985445e-01 2.17375088e+00 -1.57734179e+00 -2.82050586e+00 -6.76203966e-01 -2.85149789e+00 -5.62933981e-01 1.97606587e+00] [-9.07010492e-03 -2.55242538e+00 2.94003248e-01 2.68192935e+00 -7.54497111e-01 1.12974966e+00 -7.47746110e-01 -6.75635934e-01 -1.73943031e+00 3.77448034e+00] [-1.81617528e-01 -1.54334199e+00 5.54189026e-01 4.48968261e-01 -2.33692145e+00 -1.16595936e+00 -2.22666383e+00 -1.02986193e+00 -3.74936789e-01 2.28620028e+00] [-5.10907054e-01 7.13361502e-01 -1.98485658e-01 -1.09335005e+00 -6.64542913e-01 -1.07381664e-01 6.11831069e-01 3.76615524e-01 4.85288575e-02 -1.98660743e+00] [-6.80150539e-02 -1.58350721e-01 1.75822496e-01 7.87391067e-02 7.59849072e-01 -2.21249193e-01 4.95395273e-01 -1.26085687e+00 1.63606599e-01 -4.18109179e-01] [ 6.03569448e-01 -2.06095770e-01 -1.34247720e+00 6.13253474e-01 4.41580832e-01 1.85626125e+00 -9.22260523e-01 -1.94738138e+00 2.56826639e+00 -1.68947506e+00] [-2.91850984e-01 2.02692196e-01 4.96075720e-01 -1.68375075e+00 1.66826999e+00 -2.32504845e+00 -4.46029931e-01 -2.03051463e-01 8.29712212e-01 2.41387159e-01]] [[ 1.00413883e+00 -4.58412886e+00 1.19610238e+00 3.43395519e+00 1.52047539e+00 7.99498260e-01 -5.28278255e+00 1.65342584e-01 3.29336691e+00 6.75012589e-01] [ 1.45105362e+00 -3.94277364e-01 -3.18850875e+00 -7.11378306e-02 5.16411400e+00 -4.31762218e-01 -5.89760244e-01 6.11949384e-01 -1.72761846e+00 1.18194091e+00] [ 2.70228887e+00 2.37739134e+00 -2.53967226e-01 5.79541826e+00 3.94188547e+00 -3.13502908e+00 4.07041423e-03 2.43106917e-01 2.63972378e+00 -2.83802152e+00] [ 1.28059268e+00 1.73185194e+00 -9.74338531e-01 3.04054832e+00 -1.28359175e+00 -1.50136161e+00 2.47253299e-01 -3.32013702e+00 -2.74221706e+00 -2.05359292e+00] [ 2.44053388e+00 4.03939635e-01 -1.90943110e+00 -5.00161827e-01 1.22214949e+00 3.14480066e+00 4.20040429e-01 7.06267834e-01 1.63352752e+00 -2.65201545e+00] [-7.32669473e-01 1.20351124e+00 1.77375734e+00 -3.19267821e+00 3.71881694e-01 -5.43741131e+00 5.66202521e-01 6.52312875e-01 4.00960684e+00 1.21244252e+00] [-8.82049263e-01 -2.41953802e+00 -3.96894860e+00 3.28341633e-01 6.37631953e-01 2.76824862e-01 -2.43815809e-01 2.68600798e+00 -2.21333194e+00 4.02673435e+00] [-1.53709972e+00 4.87813663e+00 2.02447557e+00 -8.32864285e-01 -2.51747799e+00 5.56944609e-01 -2.15195465e+00 -9.88584280e-01 1.35905719e+00 2.49821916e-01]]]]; ov_res: [[[[ 1.00823355e+00 -7.19038486e-01 -2.63276517e-01 2.27708071e-01 3.62968206e-01 -3.02727986e-02 6.78249002e-02 1.65062904e-01 1.06812763e+00 -1.87815249e-01] [-3.18099946e-01 -4.58817154e-01 5.72030187e-01 1.02931368e+00 -4.20121461e-01 4.68349367e-01 6.33977279e-02 5.79155207e-01 5.19899487e-01 -7.91307569e-01] [ 4.11331117e-01 -4.80143100e-01 5.59937775e-01 2.22002506e-01 -4.90216076e-01 1.90809537e-02 -8.28662217e-01 5.14199257e-01 -2.95303047e-01 3.83653998e-01] [-2.83630133e-01 -4.57067251e-01 -2.30469946e-02 -5.14263771e-02 4.12545383e-01 1.16619420e+00 -1.05022386e-01 8.13971400e-01 3.70630682e-01 -6.71666265e-02] [ 6.12461984e-01 -2.98808157e-01 -1.87640116e-01 -8.89527202e-01 -1.23214133e-01 -1.29266128e-01 8.79874825e-02 -3.02675009e-01 6.39964759e-01 -3.97048384e-01] [ 5.13378978e-01 5.28385699e-01 -1.14873774e-01 -1.18301623e-01 -9.94074404e-01 4.03867424e-01 1.58681378e-01 -5.18411577e-01 -2.58843631e-01 3.57049614e-01] [-1.28975660e-01 1.74403042e-01 1.08822234e-01 -1.40414432e-01 5.87381959e-01 5.86289942e-01 -2.44293198e-01 -6.67821988e-02 7.77613342e-01 2.01792866e-01] [ 8.93220544e-01 -9.13516819e-01 -2.23522991e-01 -4.78612274e-01 6.53647363e-01 2.95895547e-01 3.27171952e-01 2.08487511e-01 9.48478281e-02 -1.35157019e-01]] [[ 3.66677821e-01 -6.90740108e-01 1.64835405e+00 -8.72256398e-01 -7.95036137e-01 1.91494596e+00 9.38383520e-01 1.95760112e-02 9.31091905e-01 -2.50529736e-01] [-3.00659612e-02 3.04618001e-01 5.23985445e-01 2.17375088e+00 -1.57734179e+00 -2.82050586e+00 -6.76203966e-01 -2.85149789e+00 -5.62933981e-01 1.97606587e+00] [-9.07010492e-03 -2.55242538e+00 2.94003248e-01 2.68192935e+00 -7.54497111e-01 1.12974966e+00 -7.47746110e-01 -6.75635934e-01 -1.73943031e+00 3.77448034e+00] [-1.81617528e-01 -1.54334199e+00 5.54189026e-01 4.48968261e-01 -2.33692145e+00 -1.16595936e+00 -2.22666383e+00 -1.02986193e+00 -3.74936789e-01 2.28620028e+00] [-5.10907054e-01 7.13361502e-01 -1.98485658e-01 -1.09335005e+00 -6.64542913e-01 -1.07381664e-01 6.11831069e-01 3.76615524e-01 4.85288575e-02 -1.98660743e+00] [-6.80150539e-02 -1.58350721e-01 1.75822496e-01 7.87391067e-02 7.59849072e-01 -2.21249193e-01 4.95395273e-01 -1.26085687e+00 1.63606599e-01 -4.18109179e-01] [ 6.03569448e-01 -2.06095770e-01 -1.34247720e+00 6.13253474e-01 4.41580832e-01 1.85626125e+00 -9.22260523e-01 -1.94738138e+00 2.56826639e+00 -1.68947506e+00] [-2.91850984e-01 2.02692196e-01 4.96075720e-01 -1.68375075e+00 1.66826999e+00 -2.32504845e+00 -4.46029931e-01 -2.03051463e-01 8.29712212e-01 2.41387159e-01]] [[ 1.00413883e+00 -4.58412886e+00 1.19610238e+00 3.43395519e+00 1.52047539e+00 7.99498260e-01 -5.28278255e+00 1.65342584e-01 3.29336691e+00 6.75012589e-01] [ 1.45105362e+00 -3.94277364e-01 -3.18850875e+00 -7.11378306e-02 5.16411400e+00 -4.31762218e-01 -5.89760244e-01 6.11949384e-01 -1.72761846e+00 1.18194091e+00] [ 2.70228887e+00 2.37739134e+00 -2.53967226e-01 5.79541826e+00 3.94188547e+00 -3.13502908e+00 4.07041423e-03 2.43106917e-01 2.63972378e+00 -2.83802152e+00] [ 1.28059268e+00 1.73185194e+00 -9.74338531e-01 3.04054832e+00 -1.28359175e+00 -1.50136161e+00 2.47253299e-01 -3.32013702e+00 -2.74221706e+00 -2.05359292e+00] [ 2.44053388e+00 4.03939635e-01 -1.90943110e+00 -5.00161827e-01 1.22214949e+00 3.14480066e+00 4.20040429e-01 7.06267834e-01 1.63352752e+00 -2.65201545e+00] [-7.32669473e-01 1.20351124e+00 1.77375734e+00 -3.19267821e+00 3.71881694e-01 -5.43741131e+00 5.66202521e-01 6.52312875e-01 4.00960684e+00 1.21244252e+00] [-8.82049263e-01 -2.41953802e+00 -3.96894860e+00 3.28341633e-01 6.37631953e-01 2.76824862e-01 -2.43815809e-01 2.68600798e+00 -2.21333194e+00 4.02673435e+00] [-1.53709972e+00 4.87813663e+00 2.02447557e+00 -8.32864285e-01 -2.51747799e+00 5.56944609e-01 -2.15195465e+00 -9.88584280e-01 1.35905719e+00 2.49821916e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': [0, 0], 'dilations': [2, 2], 'groups': 3, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_672.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.3950 (2,1,.,.) = -1.3640 (3,1,.,.) = 0.8124 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-6.03840649e-01 -7.29003072e-01 4.10371348e-02 -3.68094504e-01 -2.94964671e-01 6.70815647e-01 5.89039698e-02 -9.84398201e-02 -4.22745973e-01 -8.50989297e-02] [ 6.34803772e-01 2.18406662e-01 2.47353509e-01 1.19333935e-03 4.64130014e-01 1.35834154e-03 5.62847912e-01 1.22860476e-01 -7.31998086e-02 -1.16705537e+00] [-3.18486780e-01 1.77409261e-01 9.48773026e-02 7.07216933e-02 -5.16323792e-03 -9.40935612e-02 3.16071957e-01 -1.43198714e-01 5.67559123e-01 -3.89924794e-01] [-6.01533771e-01 -2.07162693e-01 -2.88965613e-01 1.42379254e-01 -6.47614151e-03 -2.39827827e-01 9.33123052e-01 2.56673127e-01 -4.53857958e-01 5.96768670e-02] [-2.97947288e-01 -1.46560699e-01 -4.25735056e-01 -4.90475930e-02 5.20117819e-01 -2.91479677e-01 8.82678851e-02 1.31738469e-01 4.24937904e-01 -2.37652674e-01] [-4.79129165e-01 7.78781056e-01 -4.77237672e-01 -4.04799789e-01 -1.66534945e-01 -4.84206527e-02 -5.68468511e-01 1.32475600e-01 1.05270900e-01 -6.63112625e-02] [ 2.67103553e-01 3.31248552e-01 3.37136537e-01 1.14301719e-01 3.31860095e-01 -3.64291400e-01 4.18556809e-01 1.26018822e+00 -1.72333613e-01 2.14754596e-01] [-8.69108830e-03 2.92564221e-02 9.29614902e-02 3.25788468e-01 3.02910477e-01 3.21096867e-01 4.26586211e-01 6.16332471e-01 -9.92045224e-01 -2.59507805e-01] [ 2.32657462e-01 -1.27403289e-01 3.07876021e-01 -8.64911005e-02 -5.02816498e-01 2.81769812e-01 -1.09973870e-01 3.73534769e-01 -3.34303975e-01 -5.64978719e-01] [-8.61172795e-01 2.26916716e-01 5.30690402e-02 -9.38026384e-02 1.24691658e-01 4.33197111e-01 -1.64839715e-01 3.08349371e-01 -1.87369496e-01 2.33445433e-03]] [[-6.45202637e-01 2.23949224e-01 1.07889652e+00 -2.56746197e+00 -4.16058153e-01 -1.97323096e+00 2.29698920e+00 -1.07156801e+00 2.88536954e+00 -2.65663385e-01] [ 3.03080916e+00 1.38927853e+00 -1.22956622e+00 -7.81206042e-02 1.38785690e-01 4.05351162e-01 -5.84532917e-01 -1.36161876e+00 1.16914034e-01 -2.10067034e+00] [ 2.20707989e+00 1.95313036e+00 -2.31481481e+00 2.29592681e+00 -7.35194504e-01 3.48833084e-01 9.64347303e-01 -3.97602580e-02 -1.06937861e+00 -1.12331831e+00] [ 2.82154584e+00 1.51719105e+00 3.31469703e+00 7.83457577e-01 -1.30454227e-01 -9.55915689e-01 8.61102343e-02 1.13286912e+00 -3.46802592e-01 1.10514963e+00] [-3.78957200e+00 9.06023234e-02 2.80039966e-01 -1.13808858e+00 2.63623023e+00 1.59881330e+00 -8.87371957e-01 -1.54630077e+00 -1.22388291e+00 -9.01257813e-01] [ 2.38091186e-01 -1.36699986e+00 7.62411892e-01 1.65969837e+00 -2.55304480e+00 2.84143314e-02 2.98966378e-01 1.06030691e+00 -1.91890931e+00 -5.38002610e-01] [ 1.44055057e+00 -8.58556596e-05 -7.17558637e-02 -8.05632234e-01 7.98397288e-02 -1.67989564e+00 7.31957316e-01 6.14787452e-02 -9.79759812e-01 7.43445456e-01] [ 2.55331755e-01 6.81307971e-01 1.59322485e-01 -9.29943144e-01 6.56303346e-01 2.15406203e+00 4.56279486e-01 5.84749207e-02 -1.12585914e+00 -1.25356495e+00] [-1.61343420e+00 -7.68845156e-02 4.90340173e-01 -1.54303646e+00 3.86203438e-01 2.08910179e+00 -1.20447075e+00 3.08783948e-01 -4.65432763e-01 -3.17614108e-01] [ 2.77064532e-01 2.87652373e-01 -1.06315267e+00 1.42908263e+00 1.51303315e+00 -1.90193272e+00 -1.37745464e+00 -8.90773058e-01 -8.93453538e-01 -1.28373826e+00]] [[-2.08139032e-01 1.66352713e+00 5.32570601e-01 -1.47374615e-01 -1.67948961e+00 7.47650921e-01 8.72076213e-01 -4.15989190e-01 5.11810541e-01 -1.15084040e+00] [ 6.60036027e-01 2.12828493e+00 -5.37633061e-01 3.67555618e-01 4.22847360e-01 -5.08790672e-01 -4.61408645e-01 2.07008854e-01 -1.42066464e-01 -6.28594220e-01] [ 5.65782070e-01 -1.01286793e+00 -2.08010674e-02 1.43982815e-02 -2.90784866e-01 -1.23454654e+00 -8.05016041e-01 -4.00527686e-01 -1.50760189e-01 -3.42417397e-02] [ 3.97724599e-01 1.57708347e+00 2.27509797e-01 -2.91799843e-01 1.00984597e+00 9.48876888e-03 -4.13630128e-01 -1.70392588e-01 -8.13648582e-01 -7.02169418e-01] [-6.03156388e-01 1.04815984e+00 7.83824980e-01 7.97084391e-01 -1.38768458e+00 -1.41206753e+00 -2.26039097e-01 -1.03697681e+00 5.14863193e-01 -9.88698006e-01] [ 1.15749002e+00 1.49349272e+00 3.16910982e+00 3.87208723e-02 -4.53301936e-01 1.94892824e-01 8.47134531e-01 5.03115237e-01 -1.14137757e+00 -6.63592041e-01] [ 5.27743220e-01 9.85346019e-01 -1.58410871e+00 4.25992340e-01 3.48406620e-02 -2.36235887e-01 1.90718099e-01 -4.62643027e-01 -1.15333593e+00 8.89200449e-01] [-9.70823586e-01 9.68079031e-01 1.67045325e-01 2.92690277e-01 -9.22814727e-01 -8.47956479e-01 1.33583117e+00 6.04666293e-01 -9.77660835e-01 5.92290998e-01] [ 8.32126200e-01 -6.77701414e-01 4.61443871e-01 -4.43645507e-01 2.43313551e-01 -1.29030079e-01 -1.49242914e+00 9.97827768e-01 2.32124910e-01 -4.69700217e-01] [-1.69939965e-01 1.88901827e-01 -7.98597813e-01 -7.40390182e-01 -7.57683098e-01 -1.06315839e+00 -8.66362810e-01 -1.67935237e-01 -9.13940966e-02 3.85329306e-01]]]]; ov_res: [[[[-6.03840649e-01 -7.29003072e-01 4.10371348e-02 -3.68094504e-01 -2.94964671e-01 6.70815647e-01 5.89039698e-02 -9.84398201e-02 -4.22745973e-01 -8.50989297e-02] [ 6.34803772e-01 2.18406662e-01 2.47353509e-01 1.19333935e-03 4.64130014e-01 1.35834154e-03 5.62847912e-01 1.22860476e-01 -7.31998086e-02 -1.16705537e+00] [-3.18486780e-01 1.77409261e-01 9.48773026e-02 7.07216933e-02 -5.16323792e-03 -9.40935612e-02 3.16071957e-01 -1.43198714e-01 5.67559123e-01 -3.89924794e-01] [-6.01533771e-01 -2.07162693e-01 -2.88965613e-01 1.42379254e-01 -6.47614151e-03 -2.39827827e-01 9.33123052e-01 2.56673127e-01 -4.53857958e-01 5.96768670e-02] [-2.97947288e-01 -1.46560699e-01 -4.25735056e-01 -4.90475930e-02 5.20117819e-01 -2.91479677e-01 8.82678851e-02 1.31738469e-01 4.24937904e-01 -2.37652674e-01] [-4.79129165e-01 7.78781056e-01 -4.77237672e-01 -4.04799789e-01 -1.66534945e-01 -4.84206527e-02 -5.68468511e-01 1.32475600e-01 1.05270900e-01 -6.63112625e-02] [ 2.67103553e-01 3.31248552e-01 3.37136537e-01 1.14301719e-01 3.31860095e-01 -3.64291400e-01 4.18556809e-01 1.26018822e+00 -1.72333613e-01 2.14754596e-01] [-8.69108830e-03 2.92564221e-02 9.29614902e-02 3.25788468e-01 3.02910477e-01 3.21096867e-01 4.26586211e-01 6.16332471e-01 -9.92045224e-01 -2.59507805e-01] [ 2.32657462e-01 -1.27403289e-01 3.07876021e-01 -8.64911005e-02 -5.02816498e-01 2.81769812e-01 -1.09973870e-01 3.73534769e-01 -3.34303975e-01 -5.64978719e-01] [-8.61172795e-01 2.26916716e-01 5.30690402e-02 -9.38026384e-02 1.24691658e-01 4.33197111e-01 -1.64839715e-01 3.08349371e-01 -1.87369496e-01 2.33445433e-03]] [[-6.45202637e-01 2.23949224e-01 1.07889652e+00 -2.56746197e+00 -4.16058153e-01 -1.97323096e+00 2.29698920e+00 -1.07156801e+00 2.88536954e+00 -2.65663385e-01] [ 3.03080916e+00 1.38927853e+00 -1.22956622e+00 -7.81206042e-02 1.38785690e-01 4.05351162e-01 -5.84532917e-01 -1.36161876e+00 1.16914034e-01 -2.10067034e+00] [ 2.20707989e+00 1.95313036e+00 -2.31481481e+00 2.29592681e+00 -7.35194504e-01 3.48833084e-01 9.64347303e-01 -3.97602580e-02 -1.06937861e+00 -1.12331831e+00] [ 2.82154584e+00 1.51719105e+00 3.31469703e+00 7.83457577e-01 -1.30454227e-01 -9.55915689e-01 8.61102343e-02 1.13286912e+00 -3.46802592e-01 1.10514963e+00] [-3.78957200e+00 9.06023234e-02 2.80039966e-01 -1.13808858e+00 2.63623023e+00 1.59881330e+00 -8.87371957e-01 -1.54630077e+00 -1.22388291e+00 -9.01257813e-01] [ 2.38091186e-01 -1.36699986e+00 7.62411892e-01 1.65969837e+00 -2.55304480e+00 2.84143314e-02 2.98966378e-01 1.06030691e+00 -1.91890931e+00 -5.38002610e-01] [ 1.44055057e+00 -8.58556596e-05 -7.17558637e-02 -8.05632234e-01 7.98397288e-02 -1.67989564e+00 7.31957316e-01 6.14787452e-02 -9.79759812e-01 7.43445456e-01] [ 2.55331755e-01 6.81307971e-01 1.59322485e-01 -9.29943144e-01 6.56303346e-01 2.15406203e+00 4.56279486e-01 5.84749207e-02 -1.12585914e+00 -1.25356495e+00] [-1.61343420e+00 -7.68845156e-02 4.90340173e-01 -1.54303646e+00 3.86203438e-01 2.08910179e+00 -1.20447075e+00 3.08783948e-01 -4.65432763e-01 -3.17614108e-01] [ 2.77064532e-01 2.87652373e-01 -1.06315267e+00 1.42908263e+00 1.51303315e+00 -1.90193272e+00 -1.37745464e+00 -8.90773058e-01 -8.93453538e-01 -1.28373826e+00]] [[-2.08139032e-01 1.66352713e+00 5.32570601e-01 -1.47374615e-01 -1.67948961e+00 7.47650921e-01 8.72076213e-01 -4.15989190e-01 5.11810541e-01 -1.15084040e+00] [ 6.60036027e-01 2.12828493e+00 -5.37633061e-01 3.67555618e-01 4.22847360e-01 -5.08790672e-01 -4.61408645e-01 2.07008854e-01 -1.42066464e-01 -6.28594220e-01] [ 5.65782070e-01 -1.01286793e+00 -2.08010674e-02 1.43982815e-02 -2.90784866e-01 -1.23454654e+00 -8.05016041e-01 -4.00527686e-01 -1.50760189e-01 -3.42417397e-02] [ 3.97724599e-01 1.57708347e+00 2.27509797e-01 -2.91799843e-01 1.00984597e+00 9.48876888e-03 -4.13630128e-01 -1.70392588e-01 -8.13648582e-01 -7.02169418e-01] [-6.03156388e-01 1.04815984e+00 7.83824980e-01 7.97084391e-01 -1.38768458e+00 -1.41206753e+00 -2.26039097e-01 -1.03697681e+00 5.14863193e-01 -9.88698006e-01] [ 1.15749002e+00 1.49349272e+00 3.16910982e+00 3.87208723e-02 -4.53301936e-01 1.94892824e-01 8.47134531e-01 5.03115237e-01 -1.14137757e+00 -6.63592041e-01] [ 5.27743220e-01 9.85346019e-01 -1.58410871e+00 4.25992340e-01 3.48406620e-02 -2.36235887e-01 1.90718099e-01 -4.62643027e-01 -1.15333593e+00 8.89200449e-01] [-9.70823586e-01 9.68079031e-01 1.67045325e-01 2.92690277e-01 -9.22814727e-01 -8.47956479e-01 1.33583117e+00 6.04666293e-01 -9.77660835e-01 5.92290998e-01] [ 8.32126200e-01 -6.77701414e-01 4.61443871e-01 -4.43645507e-01 2.43313551e-01 -1.29030079e-01 -1.49242914e+00 9.97827768e-01 2.32124910e-01 -4.69700217e-01] [-1.69939965e-01 1.88901827e-01 -7.98597813e-01 -7.40390182e-01 -7.57683098e-01 -1.06315839e+00 -8.66362810e-01 -1.67935237e-01 -9.13940966e-02 3.85329306e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'bias_shape': [1], 'pads': [1, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_674.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.1422 (2,1,.,.) = 0.5767 (3,1,.,.) = 2.1379 [ CPUFloatType{3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 4.5767352e-01 2.8743167e+00 3.6042035e+00 -4.1697901e-01 9.6492440e-01 1.5092976e+00 -1.8989558e+00 -3.3739679e+00 -1.2543409e+00 2.2259779e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.0712923e-02 -4.2469282e+00 1.3016076e+00 -6.2850916e-01 -1.6929523e+00 -1.5047532e+00 -1.4482582e+00 8.7630111e-01 -2.4646460e-03 3.9786382e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.3504742e+00 -9.6446383e-01 -6.3627529e-01 -1.4457917e+00 -3.6359353e+00 -3.8647023e-01 9.9430144e-01 -2.0930924e+00 2.3278201e+00 1.0040829e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.4249772e+00 3.6804566e+00 -6.4789879e-01 1.2593488e+00 -3.3362660e+00 -3.6381831e+00 1.7177985e+00 3.6191442e+00 -2.5586808e-01 -1.3017353e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.0776210e+00 8.0454713e-01 -3.8752708e-01 7.8749734e-01 -3.6812489e+00 -1.2268320e+00 -9.7229594e-01 1.2571906e-02 1.8065300e+00 -2.1633248e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.9727587e-01 -7.2982490e-01 -1.9375234e+00 2.7447429e+00 -5.7426864e-01 -1.0875630e+00 1.7087718e+00 -1.4280653e+00 1.7853010e+00 1.4826247e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.2071443e+00 2.0719983e-02 1.6132867e+00 7.5099725e-01 -2.9940624e+00 1.1089245e+00 6.7958057e-02 -2.3235927e+00 8.7340184e-02 4.4911984e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-7.0530766e-01 2.9062822e+00 -6.1922420e-02 2.9075697e-02 -7.8138340e-01 1.9984108e+00 3.1606221e+00 2.6840718e+00 1.4459140e+00 1.8119330e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]; ov_res: [[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 4.5767352e-01 2.8743167e+00 3.6042035e+00 -4.1697901e-01 9.6492440e-01 1.5092976e+00 -1.8989558e+00 -3.3739679e+00 -1.2543409e+00 2.2259779e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.0712923e-02 -4.2469282e+00 1.3016076e+00 -6.2850916e-01 -1.6929523e+00 -1.5047532e+00 -1.4482582e+00 8.7630111e-01 -2.4646460e-03 3.9786382e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.3504742e+00 -9.6446383e-01 -6.3627529e-01 -1.4457917e+00 -3.6359353e+00 -3.8647023e-01 9.9430144e-01 -2.0930924e+00 2.3278201e+00 1.0040829e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.4249772e+00 3.6804566e+00 -6.4789879e-01 1.2593488e+00 -3.3362660e+00 -3.6381831e+00 1.7177985e+00 3.6191442e+00 -2.5586808e-01 -1.3017353e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.0776210e+00 8.0454713e-01 -3.8752708e-01 7.8749734e-01 -3.6812489e+00 -1.2268320e+00 -9.7229594e-01 1.2571906e-02 1.8065300e+00 -2.1633248e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.9727587e-01 -7.2982490e-01 -1.9375234e+00 2.7447429e+00 -5.7426864e-01 -1.0875630e+00 1.7087718e+00 -1.4280653e+00 1.7853010e+00 1.4826247e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.2071443e+00 2.0719983e-02 1.6132867e+00 7.5099725e-01 -2.9940624e+00 1.1089245e+00 6.7958057e-02 -2.3235927e+00 8.7340184e-02 4.4911984e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-7.0530766e-01 2.9062822e+00 -6.1922420e-02 2.9075697e-02 -7.8138340e-01 1.9984108e+00 3.1606221e+00 2.6840718e+00 1.4459140e+00 1.8119330e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_676.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.7105 (2,1,.,.) = -0.3204 (3,1,.,.) = -0.3868 (1,2,.,.) = 0.01 * 9.6610 (2,2,.,.) = -0.9286 (3,2,.,.) = 0.8238 (1,3,.,.) = 1.1374 (2,3,.,.) = -1.0098 (3,3,.,.) = 0.1349 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MEfw_re: [[[[ 0.16322193 -0.9214444 -0.05696816 -0.39860207 1.2400382 0.9632577 -0.11208382 0.33996397 -0.8819446 0.27183858] [-0.4080633 -0.25379652 1.2764573 -2.2846994 0.60774267 -4.201155 -0.5749452 -0.92685264 -2.257899 0.51827866] [ 2.1516407 0.3022912 0.38512686 0.7613185 0.17812213 -2.6454647 0.3746173 0.18078767 0.3730262 1.5022769 ] [-1.1986607 -1.3024818 0.58186454 0.13931149 0.33365506 -0.6714123 -0.32418004 1.3699309 0.8154461 -1.6367841 ] [-1.8752885 -1.4808774 -1.6454201 -0.6148497 2.333492 2.9199908 -0.642524 -1.8267053 -1.040346 1.0303307 ]] [[-0.16315804 0.5788279 -1.7987287 0.41623068 -0.13619046 -0.8952249 0.04269201 -0.5012322 0.85031766 -1.7030083 ] [ 1.096796 0.9365051 -1.9437484 0.8335534 0.543552 2.6933339 1.1332676 0.7736514 3.266047 -1.4213547 ] [-2.4333122 -0.242079 -0.70882 -0.09086279 -1.3141097 2.6902854 0.6067357 0.4956258 -0.4777175 -2.1354399 ] [ 1.3447788 0.24280728 -1.774191 -1.0494224 0.99702626 1.3799973 -0.7515449 0.38549018 -1.75446 1.0054854 ] [ 3.1121752 1.4816835 1.5186633 1.3330827 0.33493194 -2.1366534 1.9158298 1.03761 -0.5091562 -1.376329 ]] [[-0.03729195 0.31718972 1.817041 0.04770913 -0.7463119 0.17086612 0.18549302 0.45351166 -0.223878 1.5822244 ] [-0.9490618 -0.71367484 1.0303634 0.92639434 -1.0954528 0.5961743 -0.8333946 -0.01308207 -1.3735038 0.9507306 ] [ 0.40754244 -0.00542942 0.35966954 -0.54288375 1.2696079 -0.44244695 -1.0230938 -0.6806744 0.36034852 0.9368201 ] [-0.26507464 0.86128426 1.1659888 0.8346393 -1.4316413 -0.78347313 0.9839129 -1.4207164 1.1085682 0.327887 ] [-1.3499649 -0.3608443 -0.10516845 -1.071008 -2.4175277 -0.49077734 -1.1434128 0.5024933 1.4610167 0.4987939 ]]]]; ov_res: [[[[ 0.16322193 -0.9214444 -0.05696816 -0.39860207 1.2400382 0.9632577 -0.11208382 0.33996397 -0.8819446 0.27183858] [-0.4080633 -0.25379652 1.2764573 -2.2846994 0.60774267 -4.201155 -0.5749452 -0.92685264 -2.257899 0.51827866] [ 2.1516407 0.3022912 0.38512686 0.7613185 0.17812213 -2.6454647 0.3746173 0.18078767 0.3730262 1.5022769 ] [-1.1986607 -1.3024818 0.58186454 0.13931149 0.33365506 -0.6714123 -0.32418004 1.3699309 0.8154461 -1.6367841 ] [-1.8752885 -1.4808774 -1.6454201 -0.6148497 2.333492 2.9199908 -0.642524 -1.8267053 -1.040346 1.0303307 ]] [[-0.16315804 0.5788279 -1.7987287 0.41623068 -0.13619046 -0.8952249 0.04269201 -0.5012322 0.85031766 -1.7030083 ] [ 1.096796 0.9365051 -1.9437484 0.8335534 0.543552 2.6933339 1.1332676 0.7736514 3.266047 -1.4213547 ] [-2.4333122 -0.242079 -0.70882 -0.09086279 -1.3141097 2.6902854 0.6067357 0.4956258 -0.4777175 -2.1354399 ] [ 1.3447788 0.24280728 -1.774191 -1.0494224 0.99702626 1.3799973 -0.7515449 0.38549018 -1.75446 1.0054854 ] [ 3.1121752 1.4816835 1.5186633 1.3330827 0.33493194 -2.1366534 1.9158298 1.03761 -0.5091562 -1.376329 ]] [[-0.03729195 0.31718972 1.817041 0.04770913 -0.7463119 0.17086612 0.18549302 0.45351166 -0.223878 1.5822244 ] [-0.9490618 -0.71367484 1.0303634 0.92639434 -1.0954528 0.5961743 -0.8333946 -0.01308207 -1.3735038 0.9507306 ] [ 0.40754244 -0.00542942 0.35966954 -0.54288375 1.2696079 -0.44244695 -1.0230938 -0.6806744 0.36034852 0.9368201 ] [-0.26507464 0.86128426 1.1659888 0.8346393 -1.4316413 -0.78347313 0.9839129 -1.4207164 1.1085682 0.327887 ] [-1.3499649 -0.3608443 -0.10516845 -1.071008 -2.4175277 -0.49077734 -1.1434128 0.5024933 1.4610167 0.4987939 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 0] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_678.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -2.1170 (2,1,.,.) = 0.6736 (3,1,.,.) = 0.7525 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-4.424037 0. -0.99697804 0. -4.976512 0. 0.957621 0. 1.9527738 0. 0.7281888 0. 2.692259 0. 0.75729835 0. -1.954099 0. 1.7262535 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 1.99763 0. -1.6501528 0. -0.5854749 0. -1.8422757 0. 2.371499 0. -1.74562 0. -0.53789353 0. 2.152014 0. -0.03671638 0. 1.2912418 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.18497011 0. -1.3960053 0. 3.7507613 0. 4.0492573 0. 1.6057838 0. 1.5815525 0. -2.7555313 0. 0.69426614 0. 2.4409556 0. 3.3340373 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-3.3506067 0. -0.93236583 0. -2.5374033 0. 1.9739964 0. -1.4762406 0. 2.0619206 0. -0.08721137 0. -2.0906303 0. 1.2737844 0. -2.1570227 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0616093 0. -0.37851128 0. -3.032146 0. -3.7063746 0. -4.3008347 0. 1.1197933 0. -4.204852 0. -4.033396 0. -4.1123643 0. -2.764445 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.8165826 0. -0.5053562 0. -0.5798723 0. -0.5349606 0. 0.38954145 0. -1.2086827 0. -0.33194822 0. 0.45453325 0. -0.05315494 0. 2.5833297 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 3.6076024 0. 1.7992977 0. 2.954169 0. -1.1618725 0. -5.646893 0. 0.7242775 0. 0.8618226 0. -4.2324514 0. 1.2162864 0. -2.8092077 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-2.9547963 0. -4.044218 0. -3.0459673 0. 0.05659312 0. -3.5606823 0. 3.6131244 0. -4.460353 0. -1.0117663 0. -2.2284937 0. 1.681142 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.46938598 0. -2.5047522 0. -0.9939527 0. 2.100443 0. -1.3162031 0. 2.8709512 0. 1.0672511 0. 1.9279586 0. -0.23256932 0. 0.8673171 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.43313903 0. 0.41225928 0. -3.8101263 0. 2.6851337 0. 0.11147517 0. 0.62307626 0. 1.1731234 0. 0.9048967 0. -1.0956733 0. 0.35483587]]]]; ov_res: [[[[-4.424037 0. -0.99697804 0. -4.976512 0. 0.957621 0. 1.9527738 0. 0.7281888 0. 2.692259 0. 0.75729835 0. -1.954099 0. 1.7262535 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 1.99763 0. -1.6501528 0. -0.5854749 0. -1.8422757 0. 2.371499 0. -1.74562 0. -0.53789353 0. 2.152014 0. -0.03671638 0. 1.2912418 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.18497011 0. -1.3960053 0. 3.7507613 0. 4.0492573 0. 1.6057838 0. 1.5815525 0. -2.7555313 0. 0.69426614 0. 2.4409556 0. 3.3340373 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-3.3506067 0. -0.93236583 0. -2.5374033 0. 1.9739964 0. -1.4762406 0. 2.0619206 0. -0.08721137 0. -2.0906303 0. 1.2737844 0. -2.1570227 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 2.0616093 0. -0.37851128 0. -3.032146 0. -3.7063746 0. -4.3008347 0. 1.1197933 0. -4.204852 0. -4.033396 0. -4.1123643 0. -2.764445 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.8165826 0. -0.5053562 0. -0.5798723 0. -0.5349606 0. 0.38954145 0. -1.2086827 0. -0.33194822 0. 0.45453325 0. -0.05315494 0. 2.5833297 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 3.6076024 0. 1.7992977 0. 2.954169 0. -1.1618725 0. -5.646893 0. 0.7242775 0. 0.8618226 0. -4.2324514 0. 1.2162864 0. -2.8092077 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-2.9547963 0. -4.044218 0. -3.0459673 0. 0.05659312 0. -3.5606823 0. 3.6131244 0. -4.460353 0. -1.0117663 0. -2.2284937 0. 1.681142 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.46938598 0. -2.5047522 0. -0.9939527 0. 2.100443 0. -1.3162031 0. 2.8709512 0. 1.0672511 0. 1.9279586 0. -0.23256932 0. 0.8673171 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.43313903 0. 0.41225928 0. -3.8101263 0. 2.6851337 0. 0.11147517 0. 0.62307626 0. 1.1731234 0. 0.9048967 0. -1.0956733 0. 0.35483587]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 2], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_680.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.3494 (2,1,.,.) = -0.5707 (3,1,.,.) = 0.01 * 6.4791 (1,2,.,.) = -0.2823 (2,2,.,.) = 0.7336 (3,2,.,.) = 0.6315 (1,3,.,.) = -0.4811 (2,3,.,.) = 1.2938 (3,3,.,.) = -0.3382 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0.47528553 0.43676135 0.5323103 -0.05355794 0.494259 ] [ 0.10142441 0.13594463 -0.5915019 -0.43076497 -0.43144876] [-0.1976056 -0.23479924 0.4143167 0.07136983 0.2940005 ] [-0.8537321 -1.5653045 0.39414653 1.1108023 -0.40775332] [ 0.8027555 0.02837643 1.2917674 -0.766412 0.5436816 ]] [[-1.7646747 -1.4742752 1.7495944 -0.9377192 -3.5062957 ] [-1.9880782 0.91891617 0.86662453 2.3974524 0.66462666] [-0.7337059 0.8849082 -0.48180556 0.83199334 -3.222764 ] [-0.07793783 2.282001 0.43903586 -1.3545798 -1.6241822 ] [-0.6345271 0.5498023 -1.4196521 -1.0203669 0.18692626]] [[ 1.303426 0.44115478 0.03777746 -0.3592442 -0.5060404 ] [-0.0489099 -0.88736224 0.3162597 0.8442553 0.29116687] [-0.52478707 -0.59531695 0.09698046 -0.289668 0.7583491 ] [-0.54906327 -0.76276344 -0.0349392 0.8733183 0.49226478] [-1.3919362 1.158587 0.36536512 -0.7919237 -0.66833407]]]]; ov_res: [[[[ 0.47528553 0.43676135 0.5323103 -0.05355794 0.494259 ] [ 0.10142441 0.13594463 -0.5915019 -0.43076497 -0.43144876] [-0.1976056 -0.23479924 0.4143167 0.07136983 0.2940005 ] [-0.8537321 -1.5653045 0.39414653 1.1108023 -0.40775332] [ 0.8027555 0.02837643 1.2917674 -0.766412 0.5436816 ]] [[-1.7646747 -1.4742752 1.7495944 -0.9377192 -3.5062957 ] [-1.9880782 0.91891617 0.86662453 2.3974524 0.66462666] [-0.7337059 0.8849082 -0.48180556 0.83199334 -3.222764 ] [-0.07793783 2.282001 0.43903586 -1.3545798 -1.6241822 ] [-0.6345271 0.5498023 -1.4196521 -1.0203669 0.18692626]] [[ 1.303426 0.44115478 0.03777746 -0.3592442 -0.5060404 ] [-0.0489099 -0.88736224 0.3162597 0.8442553 0.29116687] [-0.52478707 -0.59531695 0.09698046 -0.289668 0.7583491 ] [-0.54906327 -0.76276344 -0.0349392 0.8733183 0.49226478] [-1.3919362 1.158587 0.36536512 -0.7919237 -0.66833407]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1], 'strides': [2, 1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': False} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_682.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, strides=[3, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4906 (2,1,.,.) = -0.9251 (3,1,.,.) = -2.1961 (1,2,.,.) = 2.0259 (2,2,.,.) = 0.8596 (3,2,.,.) = 0.01 * 3.7696 (1,3,.,.) = -0.7267 (2,3,.,.) = -0.2107 (3,3,.,.) = -0.2720 [ CPUFloatType{3,3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[-7.6251900e-01 1.1715911e-03 -4.3402681e+00 1.0414326e+00 1.8187944e+00 1.2924205e+00 -1.6523834e+00 1.8451568e+00 -2.3227484e+00 1.5998199e+00] [-8.6782879e-01 -7.7036828e-01 -1.0659385e+00 2.3645713e+00 -7.8710312e-01 1.4836222e+00 -1.9221771e+00 -1.3813984e+00 1.5160462e-01 -6.9915372e-01] [-1.7322161e+00 3.8589642e+00 -9.6748608e-01 1.3098346e+00 5.6367266e-01 3.3756487e+00 -8.5888064e-01 1.4729670e+00 9.6080184e-01 5.7776413e+00] [ 1.4488338e+00 2.1377680e+00 5.1073879e-01 9.0096331e-01 7.0437118e-03 -3.0299622e-01 2.8022137e-01 1.7872354e+00 -1.5965934e+00 -2.7577028e+00] [-2.3016419e+00 -2.4935017e+00 -2.5691870e-01 -4.4530683e+00 -3.7799730e+00 7.9509348e-01 4.7243282e-01 -1.4653006e+00 9.9412590e-01 7.7833235e-01]] [[-3.1198916e+00 -4.7686577e-01 -3.5279694e+00 -1.1330233e+00 -4.3201808e-02 2.2360358e-01 -9.0464644e-02 -1.0447409e+00 -1.4958528e+00 1.0838020e+00] [ 3.7812051e-01 2.7306873e-01 -1.9128114e+00 -6.9937062e-01 -6.1560071e-01 8.7808204e-01 -1.7363784e+00 7.7919137e-01 -9.8784697e-01 2.7147999e-01] [-1.2457424e+00 -6.1532266e-02 -9.7552347e-01 -8.1661636e-01 -1.3016682e+00 4.9709353e+00 2.9648341e-02 5.3853822e-01 -7.3495322e-01 2.6794233e+00] [ 1.5077220e+00 2.8847402e-01 1.5943656e+00 -2.4797045e-02 7.6494837e-01 -9.4085139e-01 6.1485499e-01 -3.0342907e-01 -4.8593903e-01 1.6682768e-01] [-1.4488448e+00 -1.0475211e+00 2.3456094e+00 1.6475831e-01 -1.9944466e+00 -1.0337468e+00 1.3561217e+00 -2.7199563e-01 1.9433495e+00 2.8880337e-01]] [[-5.0436950e+00 -5.4074490e-01 -3.5478230e+00 -2.7196190e+00 -1.3144863e+00 4.8032337e-01 4.9983397e-01 -3.4099994e+00 -7.6448691e-01 1.2952222e+00] [ 2.5358471e-01 1.0790972e+00 -2.8827140e+00 -2.8997231e+00 -1.1496319e-01 4.4246322e-01 -1.8777853e+00 2.2742910e+00 -2.0227826e+00 7.4126613e-01] [-1.2360619e+00 -2.5505614e+00 -1.1257643e+00 -2.7437327e+00 -2.8258913e+00 7.5595016e+00 9.3596652e-02 -4.1714850e-01 -1.5395526e+00 8.6575025e-01] [ 2.1036625e+00 -1.5453223e+00 2.8238542e+00 -2.4565892e-01 1.8553201e+00 -1.6333132e+00 9.2456293e-01 -2.1263838e+00 4.1264367e-01 2.6444790e+00] [-1.1682670e+00 -7.3352545e-02 4.7137918e+00 3.0965176e+00 -1.7045417e+00 -3.0559466e+00 2.1552920e+00 9.2485166e-01 3.4637647e+00 -9.2014790e-01]]]]; ov_res: [[[[-7.6251900e-01 1.1715911e-03 -4.3402681e+00 1.0414326e+00 1.8187944e+00 1.2924205e+00 -1.6523834e+00 1.8451568e+00 -2.3227484e+00 1.5998199e+00] [-8.6782879e-01 -7.7036828e-01 -1.0659385e+00 2.3645713e+00 -7.8710312e-01 1.4836222e+00 -1.9221771e+00 -1.3813984e+00 1.5160462e-01 -6.9915372e-01] [-1.7322161e+00 3.8589642e+00 -9.6748608e-01 1.3098346e+00 5.6367266e-01 3.3756487e+00 -8.5888064e-01 1.4729670e+00 9.6080184e-01 5.7776413e+00] [ 1.4488338e+00 2.1377680e+00 5.1073879e-01 9.0096331e-01 7.0437118e-03 -3.0299622e-01 2.8022137e-01 1.7872354e+00 -1.5965934e+00 -2.7577028e+00] [-2.3016419e+00 -2.4935017e+00 -2.5691870e-01 -4.4530683e+00 -3.7799730e+00 7.9509348e-01 4.7243282e-01 -1.4653006e+00 9.9412590e-01 7.7833235e-01]] [[-3.1198916e+00 -4.7686577e-01 -3.5279694e+00 -1.1330233e+00 -4.3201808e-02 2.2360358e-01 -9.0464644e-02 -1.0447409e+00 -1.4958528e+00 1.0838020e+00] [ 3.7812051e-01 2.7306873e-01 -1.9128114e+00 -6.9937062e-01 -6.1560071e-01 8.7808204e-01 -1.7363784e+00 7.7919137e-01 -9.8784697e-01 2.7147999e-01] [-1.2457424e+00 -6.1532266e-02 -9.7552347e-01 -8.1661636e-01 -1.3016682e+00 4.9709353e+00 2.9648341e-02 5.3853822e-01 -7.3495322e-01 2.6794233e+00] [ 1.5077220e+00 2.8847402e-01 1.5943656e+00 -2.4797045e-02 7.6494837e-01 -9.4085139e-01 6.1485499e-01 -3.0342907e-01 -4.8593903e-01 1.6682768e-01] [-1.4488448e+00 -1.0475211e+00 2.3456094e+00 1.6475831e-01 -1.9944466e+00 -1.0337468e+00 1.3561217e+00 -2.7199563e-01 1.9433495e+00 2.8880337e-01]] [[-5.0436950e+00 -5.4074490e-01 -3.5478230e+00 -2.7196190e+00 -1.3144863e+00 4.8032337e-01 4.9983397e-01 -3.4099994e+00 -7.6448691e-01 1.2952222e+00] [ 2.5358471e-01 1.0790972e+00 -2.8827140e+00 -2.8997231e+00 -1.1496319e-01 4.4246322e-01 -1.8777853e+00 2.2742910e+00 -2.0227826e+00 7.4126613e-01] [-1.2360619e+00 -2.5505614e+00 -1.1257643e+00 -2.7437327e+00 -2.8258913e+00 7.5595016e+00 9.3596652e-02 -4.1714850e-01 -1.5395526e+00 8.6575025e-01] [ 2.1036625e+00 -1.5453223e+00 2.8238542e+00 -2.4565892e-01 1.8553201e+00 -1.6333132e+00 9.2456293e-01 -2.1263838e+00 4.1264367e-01 2.6444790e+00] [-1.1682670e+00 -7.3352545e-02 4.7137918e+00 3.0965176e+00 -1.7045417e+00 -3.0559466e+00 2.1552920e+00 9.2485166e-01 3.4637647e+00 -9.2014790e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [0, 0], 'dilations': [1, 1], 'groups': 1, 'output_padding': [0, 0], 'transposed': True} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_684.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 7.9724 (2,1,.,.) = 0.2716 (3,1,.,.) = -0.1566 [ CPUFloatType{3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[ 0.52079076 0. -0.20163853 0. -0.43542343 0. 0.7624256 0. -0.5233125 0. 0.15586618 0. 0.01695061 0. -0.1353897 0. -0.23946688 0. 0.01117885] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.3093691 0. -0.06839533 0. -0.15661928 0. 0.01159517 0. -0.4357558 0. 0.29386148 0. -0.11816562 0. -0.6075041 0. 0.10775602 0. 0.04944444] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.26742315 0. -0.02396385 0. -0.1638431 0. 0.49362832 0. -0.22925232 0. 0.40487236 0. -0.0046766 0. 0.4612815 0. 0.23325448 0. -0.09319663] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.10166601 0. -0.23302907 0. -0.27235022 0. -0.2679318 0. 0.07148396 0. -0.7095061 0. 0.49760997 0. -0.25114065 0. -0.03141285 0. 0.8513894 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.18659505 0. -0.4083013 0. 0.7734347 0. -0.5524078 0. 0.20739439 0. 0.21523315 0. -0.35505497 0. -0.38977924 0. 0.01843891 0. -0.13068204] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.06139326 0. -0.5766117 0. -0.1184385 0. 0.15972482 0. -0.43993515 0. -0.28249353 0. 0.15884723 0. -0.1653519 0. -0.8220966 0. 0.02762798] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.01442475 0. -0.18893455 0. -0.01770894 0. 0.08671967 0. 0.17048088 0. 0.20233361 0. -0.06607748 0. -0.22778931 0. 0.56544703 0. -0.15318507] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.28300756 0. -0.1643033 0. 0.01275862 0. -0.03592987 0. 0.4782479 0. 0.28587613 0. 0.09573939 0. 0.18346655 0. -0.17104203 0. 0.2567375 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.31796235 0. 0.21289846 0. 0.16181403 0. -0.14605731 0. 0.07538124 0. 0.49682194 0. 0.03641859 0. 0.01458218 0. 0.35109663 0. -0.31244373] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.052003 0. 0.25211078 0. 0.07960951 0. 0.43181455 0. -0.1291355 0. -0.19065645 0. 0.00252662 0. 0.35840362 0. -0.14192617 0. -0.11272252]]]]; ov_res: [[[[ 0.52079076 0. -0.20163853 0. -0.43542343 0. 0.7624256 0. -0.5233125 0. 0.15586618 0. 0.01695061 0. -0.1353897 0. -0.23946688 0. 0.01117885] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.3093691 0. -0.06839533 0. -0.15661928 0. 0.01159517 0. -0.4357558 0. 0.29386148 0. -0.11816562 0. -0.6075041 0. 0.10775602 0. 0.04944444] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.26742315 0. -0.02396385 0. -0.1638431 0. 0.49362832 0. -0.22925232 0. 0.40487236 0. -0.0046766 0. 0.4612815 0. 0.23325448 0. -0.09319663] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.10166601 0. -0.23302907 0. -0.27235022 0. -0.2679318 0. 0.07148396 0. -0.7095061 0. 0.49760997 0. -0.25114065 0. -0.03141285 0. 0.8513894 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.18659505 0. -0.4083013 0. 0.7734347 0. -0.5524078 0. 0.20739439 0. 0.21523315 0. -0.35505497 0. -0.38977924 0. 0.01843891 0. -0.13068204] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.06139326 0. -0.5766117 0. -0.1184385 0. 0.15972482 0. -0.43993515 0. -0.28249353 0. 0.15884723 0. -0.1653519 0. -0.8220966 0. 0.02762798] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [-0.01442475 0. -0.18893455 0. -0.01770894 0. 0.08671967 0. 0.17048088 0. 0.20233361 0. -0.06607748 0. -0.22778931 0. 0.56544703 0. -0.15318507] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.28300756 0. -0.1643033 0. 0.01275862 0. -0.03592987 0. 0.4782479 0. 0.28587613 0. 0.09573939 0. 0.18346655 0. -0.17104203 0. 0.2567375 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.31796235 0. 0.21289846 0. 0.16181403 0. -0.14605731 0. 0.07538124 0. 0.49682194 0. 0.03641859 0. 0.01458218 0. 0.35109663 0. -0.31244373] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0.052003 0. 0.25211078 0. 0.07960951 0. 0.43181455 0. -0.1291355 0. -0.19065645 0. 0.00252662 0. 0.35840362 0. -0.14192617 0. -0.11272252]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution2d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'bias_shape': [1], 'pads': [1, 1], 'dilations': [2, 2], 'groups': 1, 'output_padding': [1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_686.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2409 (2,1,.,.) = 0.3967 (3,1,.,.) = -0.1275 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.136057 0. -0.50460774 0. -0.16706789 0. -0.50726545 0. -0.14173135 0. -0.24832952 0. 0.40174833 0. -0.6775882 0. -0.16722195] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.15650176 0. 0.36176366 0. 0.3406719 0. -0.689085 0. 0.16483118 0. -0.24664636 0. 0.26591605 0. -0.8544998 0. 0.20651087] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.45201504 0. 0.2304317 0. -0.47370988 0. 0.6365389 0. -0.33797967 0. 0.15518957 0. -0.00961449 0. -0.4750886 0. -0.5367456 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.13503018 0. 0.2149444 0. 0.5947081 0. -0.61377925 0. -0.557406 0. -0.3574713 0. -0.73492277 0. 0.24003552 0. 0.21965653] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.3371537 0. 0.46475947 0. -0.7821573 0. -0.11716036 0. -1.4408017 0. -0.5679279 0. -0.19637904 0. -0.01832069 0. -0.7344161 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.46377394 0. -0.50349027 0. -0.08730527 0. 0.5728105 0. 0.73251295 0. 0.6823917 0. -0.34911433 0. 0.31159174 0. 0.3625373 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.3357283 0. -0.05658957 0. 0.22495158 0. -0.33626416 0. -0.5746599 0. -0.18402563 0. 0.09563533 0. -0.7900935 0. -0.49150383] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.5147158 0. -0.6390505 0. -0.47228625 0. 1.12536 0. 0.3500432 0. 0.32180178 0. -0.1024104 0. 0.4282136 0. -0.6080053 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.02075227 0. -0.5086681 0. 0.27845144 0. -0.26166677 0. -0.7354475 0. -0.7147403 0. 0.01203312 0. 0.28139123 0. -0.18659362]]]]; ov_res: [[[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.136057 0. -0.50460774 0. -0.16706789 0. -0.50726545 0. -0.14173135 0. -0.24832952 0. 0.40174833 0. -0.6775882 0. -0.16722195] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.15650176 0. 0.36176366 0. 0.3406719 0. -0.689085 0. 0.16483118 0. -0.24664636 0. 0.26591605 0. -0.8544998 0. 0.20651087] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.45201504 0. 0.2304317 0. -0.47370988 0. 0.6365389 0. -0.33797967 0. 0.15518957 0. -0.00961449 0. -0.4750886 0. -0.5367456 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.13503018 0. 0.2149444 0. 0.5947081 0. -0.61377925 0. -0.557406 0. -0.3574713 0. -0.73492277 0. 0.24003552 0. 0.21965653] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.3371537 0. 0.46475947 0. -0.7821573 0. -0.11716036 0. -1.4408017 0. -0.5679279 0. -0.19637904 0. -0.01832069 0. -0.7344161 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.46377394 0. -0.50349027 0. -0.08730527 0. 0.5728105 0. 0.73251295 0. 0.6823917 0. -0.34911433 0. 0.31159174 0. 0.3625373 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0.3357283 0. -0.05658957 0. 0.22495158 0. -0.33626416 0. -0.5746599 0. -0.18402563 0. 0.09563533 0. -0.7900935 0. -0.49150383] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.5147158 0. -0.6390505 0. -0.47228625 0. 1.12536 0. 0.3500432 0. 0.32180178 0. -0.1024104 0. 0.4282136 0. -0.6080053 ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. -0.02075227 0. -0.5086681 0. 0.27845144 0. -0.26166677 0. -0.7354475 0. -0.7147403 0. 0.01203312 0. 0.28139123 0. -0.18659362]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_688.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7964 -1.6312 -0.4483 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 1.0100019 -0.76661366 0.36767015 -1.4863939 1.6750672 1.0666476 1.6592402 2.2718546 -0.79749244 1.9834158 -0.8050813 ] [-2.8751159 -4.3078995 -0.08997965 1.4966896 0.14871955 -4.322876 0.31668723 -3.5482726 -0.8966199 -3.859481 0.47488976] [ 2.0608358 1.3861443 0.09499496 -1.705599 -0.90999347 1.2534884 -4.154481 0.1397686 -0.18203151 1.9465982 -1.1775849 ]]]; ov_res: [[[ 1.0100019 -0.7666134 0.36767012 -1.4863939 1.6750671 1.0666475 1.6592404 2.2718549 -0.79749244 1.9834158 -0.8050813 ] [-2.8751159 -4.3078995 -0.08997965 1.4966896 0.14871967 -4.322876 0.31668723 -3.5482726 -0.8966198 -3.8594809 0.47488976] [ 2.0608358 1.3861442 0.09499496 -1.7055991 -0.90999347 1.2534884 -4.154481 0.13976854 -0.18203157 1.9465982 -1.1775849 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_690.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7082 -2.6009 2.5124 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[ 2.4415207e-01 -2.6945331e+00 2.4491835e+00 1.9253776e+00 -8.2616794e-01 -3.4462085e-01 -8.4557104e-01 9.8426610e-01 2.2264779e+00] [-4.3891215e-01 -3.3417451e+00 -2.4804602e+00 -2.2347348e+00 -4.0662980e+00 -1.7513691e+00 -2.4466064e+00 -2.9442363e+00 -1.1638992e+00] [ 3.4318759e+00 8.7344580e+00 -4.1620731e-03 7.0625081e+00 3.7980070e+00 1.4551401e-02 4.7260208e+00 5.2401960e-01 8.8090408e-01]]]; ov_res: [[[ 2.4415207e-01 -2.6945331e+00 2.4491835e+00 1.9253776e+00 -8.2616794e-01 -3.4462088e-01 -8.4557104e-01 9.8426598e-01 2.2264779e+00] [-4.3891215e-01 -3.3417451e+00 -2.4804602e+00 -2.2347348e+00 -4.0662980e+00 -1.7513691e+00 -2.4466064e+00 -2.9442363e+00 -1.1638992e+00] [ 3.4318757e+00 8.7344580e+00 -4.1623116e-03 7.0625076e+00 3.7980070e+00 1.4551640e-02 4.7260213e+00 5.2401972e-01 8.8090408e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_692.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1297 -0.5212 -0.0996 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.8707 (2,.,.) = -0.6760 (3,.,.) = -0.7009 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-1.5604498 -0.6367279 -0.96565926 0.03358707 -0.28177893 -2.402182 -0.66045725 -0.40755948 -0.5882527 -1.0010538 ] [-1.0966885 -0.7492199 0.6721538 -0.5556569 -0.0738253 -0.52967495 -0.17841753 -0.40557575 -0.5193844 -0.8273339 ] [ 0.8743956 -1.0484008 -0.60315895 -0.8026236 0.39286548 0.00763872 0.47950223 -0.9605341 1.8023694 -0.08000506]]]; ov_res: [[[-1.5604498 -0.6367279 -0.96565926 0.03358707 -0.28177893 -2.402182 -0.66045725 -0.40755948 -0.5882527 -1.0010538 ] [-1.0966885 -0.7492199 0.6721538 -0.5556569 -0.0738253 -0.52967495 -0.17841753 -0.40557575 -0.5193844 -0.8273339 ] [ 0.8743956 -1.0484008 -0.60315895 -0.8026236 0.39286548 0.00763872 0.47950223 -0.9605341 1.8023694 -0.08000506]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_694.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-2.7369 -0.0957 -0.4259 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9518 (2,.,.) = 0.2533 (3,.,.) = 1.4848 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-3.3807986 -2.292914 -2.265554 -2.759002 -2.4779925 -4.3036356 -3.6622677 -1.7958761 -2.8958528 -1.6882542 ] [-0.23884098 -0.27801535 0.07499319 -0.30625948 -0.0291483 0.01185219 -0.03145655 -0.24305329 -0.21005201 0.13783383] [-2.373762 -0.14263016 -0.5590617 -1.1660839 -2.7052073 -2.9505002 -0.08969235 0.37288564 -0.55358034 -3.4685123 ]]]; ov_res: [[[-3.3807986 -2.292914 -2.265554 -2.759002 -2.4779925 -4.3036356 -3.6622677 -1.7958761 -2.8958528 -1.6882542 ] [-0.23884098 -0.27801535 0.07499319 -0.30625948 -0.0291483 0.01185219 -0.03145655 -0.24305329 -0.21005201 0.13783383] [-2.373762 -0.14263016 -0.5590617 -1.1660839 -2.7052073 -2.9505002 -0.08969235 0.37288564 -0.55358034 -3.4685123 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_696.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-2.08041}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 2.1669 (2,.,.) = -0.1214 (3,.,.) = 0.01 * -3.0565 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-4.1671066 0.21635556 -3.1438267 -2.9534519 -3.3642893 1.6223111 -5.1560183 -1.3957062 ]]]; ov_res: [[[-4.1671066 0.21635556 -3.1438267 -2.9534519 -3.3642893 1.6223111 -5.1560183 -1.3957062 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_698.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.1473 -0.1059 0.2338 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.7314 0.5329 0.4958 (2,.,.) = -0.1938 0.6881 -0.8796 (3,.,.) = -0.5561 -0.5474 -1.2593 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-1.1473413 -2.982027 -3.324609 -3.1396222 -4.246998 -3.245186 -2.597249 -1.4652735 1.7972522 -3.6466138 -4.288123 -1.1473413 ] [-0.10594387 0.48822045 0.9308216 -0.64652056 1.6845834 -0.44745865 -0.04804748 -0.7364888 -0.944093 -0.08656245 -0.60715353 -0.10594387] [ 0.23378001 0.6053433 2.317598 -0.37232685 1.608266 1.443067 -1.6353086 0.83806366 -2.9504101 1.1836612 2.2794268 0.23378001]]]; ov_res: [[[-1.1473413 -2.982027 -3.324609 -3.1396222 -4.246998 -3.245186 -2.597249 -1.4652735 1.7972522 -3.6466138 -4.288123 -1.1473413 ] [-0.10594387 0.48822045 0.9308216 -0.64652056 1.6845834 -0.44745865 -0.04804748 -0.7364888 -0.944093 -0.08656245 -0.60715353 -0.10594387] [ 0.23378001 0.6053433 2.317598 -0.37232685 1.608266 1.443067 -1.6353086 0.83806366 -2.9504101 1.1836612 2.2794268 0.23378001]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_700.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.21462}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.6295 (2,.,.) = 0.4035 (3,.,.) = 0.3538 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[-0.95004255 -1.2850239 -0.21602541 -1.6915151 ]]]; ov_res: [[[-0.95004255 -1.2850239 -0.21602541 -1.6915151 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_702.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.2666 1.6226 -0.5738 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.3716 -0.5120 0.0786 (2,.,.) = -0.9164 -0.4620 -0.1009 (3,.,.) = 2.8665 -1.2406 0.9910 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-1.2665642 -1.2665642 -1.2665642 -3.028481 -1.0606555 -1.1123933 -1.4309392 -0.5682314 -0.20326066 -0.47935033 -0.63854223 -1.363928 -1.6637341 -1.2665642 -1.2665642 -1.2665642 ] [ 1.6226363 1.6226363 1.6226363 2.0514016 2.3171334 0.99499893 2.9447412 3.104353 0.8080301 0.95045024 1.5772878 2.2012868 0.33696723 1.6226363 1.6226363 1.6226363 ] [-0.57383215 -0.57383215 -0.57383215 -7.47057 -0.38393933 1.030744 -4.067832 -0.947286 5.735759 3.87528 1.4604982 -2.056477 -0.13266891 -0.57383215 -0.57383215 -0.57383215]]]; ov_res: [[[-1.2665642 -1.2665642 -1.2665642 -3.028481 -1.0606555 -1.1123933 -1.4309392 -0.5682314 -0.20326066 -0.47935033 -0.63854223 -1.363928 -1.6637341 -1.2665642 -1.2665642 -1.2665642 ] [ 1.6226363 1.6226363 1.6226363 2.0514016 2.3171334 0.99499893 2.9447412 3.104353 0.8080301 0.95045024 1.5772878 2.2012868 0.33696723 1.6226363 1.6226363 1.6226363 ] [-0.57383215 -0.57383215 -0.57383215 -7.47057 -0.38393933 1.030744 -4.067832 -0.947286 5.735759 3.87528 1.4604982 -2.056477 -0.13266891 -0.57383215 -0.57383215 -0.57383215]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_704.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.284073}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1684 (2,.,.) = 0.7379 (3,.,.) = -0.9733 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 1.6739212 -0.6485734 0.22603017 -0.8979016 -2.7718096 -0.7558998 0.5695642 0.5601061 ]]]; ov_res: [[[ 1.6739212 -0.6485734 0.22603017 -0.8979016 -2.7718096 -0.7558998 0.5695642 0.5601061 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_706.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.9241 0.3203 -0.9523 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.7897 -0.4134 -1.6526 (2,.,.) = 0.5751 0.8918 -1.1042 (3,.,.) = -1.1286 -0.4065 0.1414 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-0.989958 0.47676983 2.5085814 5.2706413 1.0802188 4.7165112 1.8774456 2.9508655 -1.5775607 2.104473 ] [-0.17299217 -0.4311409 2.646652 4.1316595 1.6502043 0.63205373 0.48471954 0.3209886 -0.1294876 -1.1038824 ] [ 0.7947824 -2.0190334 -2.8051987 -3.2462354 -0.61860645 -3.1703076 -1.03951 -1.0664735 -0.42530298 -1.1358817 ]]]; ov_res: [[[-0.989958 0.47676983 2.5085814 5.2706413 1.0802188 4.7165112 1.8774456 2.9508655 -1.5775607 2.104473 ] [-0.17299217 -0.4311409 2.646652 4.1316595 1.6502043 0.63205373 0.48471954 0.3209886 -0.1294876 -1.1038824 ] [ 0.7947824 -2.0190334 -2.8051987 -3.2462354 -0.61860645 -3.1703076 -1.03951 -1.0664735 -0.42530298 -1.1358817 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_708.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5951 0.7390 0.1623 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.2901 (2,.,.) = 0.8121 (3,.,.) = -0.1840 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-0.49990362 0.06318533 0.29997098 -0.5898923 0.56674904 2.123401 -0.4683388 1.777945 ] [ 1.1266971 -0.987123 1.5602651 0.4543969 0.50671005 -0.09803003 1.3978453 0.5034929 ] [ 0.41785705 0.34247062 0.09356118 0.09148332 0.04490993 0.09022375 0.22629009 0.4558617 ]]]; ov_res: [[[-0.49990362 0.06318533 0.29997098 -0.5898923 0.56674904 2.123401 -0.4683388 1.777945 ] [ 1.1266971 -0.987123 1.5602651 0.4543969 0.50671005 -0.09803003 1.3978453 0.5034929 ] [ 0.41785705 0.34247062 0.09356118 0.09148332 0.04490993 0.09022375 0.22629009 0.4558617 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_710.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.3181 -0.9669 -0.0156 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9184 (2,.,.) = 0.01 * 4.4862 (3,.,.) = 0.5383 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-1.31806457e+00 -1.36931443e+00 -2.17943072e+00 5.91195107e-01 -2.51747513e+00 -8.85791779e-02 -1.53526616e+00 -1.25585246e+00 -2.96439552e+00 -1.29148841e+00 -1.69859552e+00 -1.31806457e+00] [-9.66945767e-01 -1.09899354e+00 -1.00569999e+00 -1.00722432e+00 -9.63965952e-01 -9.77581918e-01 -9.54445064e-01 -1.05682683e+00 -9.58841681e-01 -9.72331285e-01 -9.64877903e-01 -9.66945767e-01] [-1.55645208e-02 -1.62862301e-01 -1.21873915e-01 2.53035516e-01 -4.17701244e-01 1.51031185e-03 5.58747828e-01 -5.33945739e-01 2.66425818e-01 4.21782792e-01 3.53176743e-01 -1.55645208e-02]]]; ov_res: [[[-1.3180646e+00 -1.3693146e+00 -2.1794307e+00 5.9119517e-01 -2.5174751e+00 -8.8579237e-02 -1.5352662e+00 -1.2558525e+00 -2.9643955e+00 -1.2914884e+00 -1.6985955e+00 -1.3180646e+00] [-9.6694577e-01 -1.0989935e+00 -1.0057000e+00 -1.0072243e+00 -9.6396595e-01 -9.7758192e-01 -9.5444506e-01 -1.0568268e+00 -9.5884168e-01 -9.7233129e-01 -9.6487790e-01 -9.6694577e-01] [-1.5564521e-02 -1.6286230e-01 -1.2187391e-01 2.5303555e-01 -4.1770121e-01 1.5103120e-03 5.5874783e-01 -5.3394574e-01 2.6642582e-01 4.2178279e-01 3.5317674e-01 -1.5564521e-02]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_712.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.1631 0.1391 0.6089 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.01 * 2.9572 (2,.,.) = 0.1723 (3,.,.) = -1.3996 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[-1.1731257 -1.132314 -1.1734091 -1.1404395 -1.2087524 -1.1640502 -1.1432557 -1.1786425 ] [ 0.07023937 0.34981948 0.23197487 0.33286938 0.01146598 0.22354245 0.23674786 0.31271163] [-0.6465818 -1.1765568 1.7025323 1.9371178 -2.0241063 3.3136609 1.6042862 -0.5301098 ]]]; ov_res: [[[-1.1731257 -1.132314 -1.1734091 -1.1404395 -1.2087524 -1.1640502 -1.1432557 -1.1786425 ] [ 0.07023937 0.34981948 0.23197487 0.33286938 0.01146598 0.22354245 0.23674786 0.31271163] [-0.6465818 -1.1765568 1.7025323 1.9371178 -2.0241063 3.3136609 1.6042862 -0.5301098 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_714.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.3906 1.1893 0.1185 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -2.4850 (2,.,.) = 0.6901 (3,.,.) = -0.1426 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 2.6997468 -1.3925225 2.007581 1.3263527 -1.9062694 -0.28076965 -1.578104 2.45922 1.01814 -2.7443979 ] [ 1.3220934 1.5486563 1.4587008 1.2399497 -0.00795732 1.0116853 0.95726645 1.3330526 0.00897338 -0.42192173] [ 0.03709175 0.06602138 0.19215295 0.02401738 0.14015444 0.4239465 0.46804744 0.18382643 0.21091086 -0.23003942]]]; ov_res: [[[ 2.6997468 -1.3925225 2.007581 1.3263527 -1.9062694 -0.28076965 -1.578104 2.45922 1.01814 -2.7443979 ] [ 1.3220934 1.5486563 1.4587008 1.2399497 -0.00795732 1.0116853 0.95726645 1.3330526 0.00897338 -0.42192173] [ 0.03709175 0.06602138 0.19215295 0.02401738 0.14015444 0.4239465 0.46804744 0.18382643 0.21091086 -0.23003942]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_716.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.735021}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.0141 (2,.,.) = -0.3586 (3,.,.) = 0.01 * 5.2930 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.pads, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[ 0.7350214 -0.5134453 0.7350214 2.7962697 0.7350214 2.1617787 0.7350214 0.3065503 0.7350214 0.3489995 0.7350214 1.7431672 0.7350214 -0.29692107 0.7350214 -0.16479999 0.7350214 ]]]; ov_res: [[[ 0.7350214 -0.5134453 0.7350214 2.7962697 0.7350214 2.1617787 0.7350214 0.3065503 0.7350214 0.3489995 0.7350214 1.7431672 0.7350214 -0.29692107 0.7350214 -0.16479999 0.7350214 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_718.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-2.2728 -1.3834 1.1428 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9488 -1.4163 1.2478 (2,.,.) = 0.9494 -0.8382 0.4537 (3,.,.) = -0.0309 -1.3892 0.5467 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-2.643148 -1.5674928 -2.0007327 -0.6995946 1.5448389 ] [-1.463884 -0.8498668 -1.5779266 -0.24642587 0.84520316] [ 1.3410785 1.7034523 2.342246 1.3687751 3.1665542 ]]]; ov_res: [[[-2.643148 -1.5674928 -2.0007327 -0.6995946 1.5448389 ] [-1.463884 -0.8498668 -1.5779266 -0.24642587 0.84520316] [ 1.3410785 1.7034523 2.342246 1.3687751 3.1665542 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_720.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.00628112}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.7789 (2,.,.) = -0.3977 (3,.,.) = 1.2588 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[-0.53922474 0.00628112 -0.14850086 0.00628112 0.69912374 0.00628112 -0.37177804 0.00628112 -0.10285675 0.00628112 -0.27410161 0.00628112 -1.4734522 0.00628112 0.26771396 0.00628112 -0.99524856 0.00628112 -3.3708405 ]]]; ov_res: [[[-0.53922474 0.00628112 -0.14850086 0.00628112 0.69912374 0.00628112 -0.37177804 0.00628112 -0.10285675 0.00628112 -0.27410161 0.00628112 -1.4734522 0.00628112 0.26771396 0.00628112 -0.99524856 0.00628112 -3.3708405 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_722.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3576 0.8778 1.5035 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 2.8320 0.2636 -0.2392 (2,.,.) = -0.4043 -0.8191 1.1239 (3,.,.) = 0.3155 -0.6416 0.5570 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 7.667857 -2.5345595 -0.7401982 -3.473865 2.8657312] [-2.0085225 3.351214 1.4566057 1.0364486 1.1467812] [ 1.1257464 2.5361586 2.143928 1.1097169 2.1309118]]]; ov_res: [[[ 7.667857 -2.5345595 -0.7401982 -3.473865 2.8657312] [-2.0085225 3.351214 1.4566057 1.0364486 1.1467812] [ 1.1257464 2.5361586 2.143928 1.1097169 2.1309118]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_724.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.0252 -0.5188 0.7649 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.0028 -0.8298 1.2729 (2,.,.) = 0.2387 -0.5605 -0.5245 (3,.,.) = 0.7637 0.9508 -0.2441 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-1.639757 0.48515224 -0.7810353 -2.3443766 1.3464483 2.3412337 -1.4814726 3.388773 1.7883272 -0.86778474] [-1.0265367 -1.444803 -0.6012676 0.6639206 -0.19498965 -0.18993253 0.11423266 -1.1536965 -0.24035415 0.43748885] [ 0.06211227 2.2332146 1.7100501 0.12520474 0.37651315 -1.4816866 -0.75125897 0.9527703 -1.6237428 3.3074942 ]]]; ov_res: [[[-1.639757 0.48515224 -0.7810353 -2.3443766 1.3464483 2.3412337 -1.4814726 3.388773 1.7883272 -0.86778474] [-1.0265367 -1.444803 -0.6012676 0.6639206 -0.19498965 -0.18993253 0.11423266 -1.1536965 -0.24035415 0.43748885] [ 0.06211227 2.2332146 1.7100501 0.12520474 0.37651315 -1.4816866 -0.75125897 0.9527703 -1.6237428 3.3074942 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_726.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.0583006}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1213 (2,.,.) = -0.3034 (3,.,.) = 0.7700 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[-0.00987405 -0.05830058 -1.0846976 -0.05830058 -1.3476038 -0.05830058 -0.2877383 -0.05830058 1.6801175 -0.05830058 0.55340207 -0.05830058 0.21648562 -0.05830058 -0.22528377 -0.05830058 0.2655298 -0.05830058 -1.4701933 ]]]; ov_res: [[[-0.00987405 -0.05830058 -1.0846976 -0.05830058 -1.3476038 -0.05830058 -0.2877383 -0.05830058 1.6801175 -0.05830058 0.55340207 -0.05830058 0.21648562 -0.05830058 -0.22528377 -0.05830058 0.2655298 -0.05830058 -1.4701933 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [2], 'groups': 1, 'output_padding': [1], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_728.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.0924909}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.5973 (2,.,.) = -1.0627 (3,.,.) = 1.2104 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.09249085 -0.15325539 0.09249085 3.2156203 0.09249085 -0.44360572 0.09249085 -0.23921257 0.09249085 1.5331421 0.09249085 1.9209752 0.09249085 1.175174 0.09249085 -0.30393836 0.09249085 5.345484 ]]]; ov_res: [[[ 0.09249085 -0.15325539 0.09249085 3.2156203 0.09249085 -0.44360572 0.09249085 -0.23921257 0.09249085 1.5331421 0.09249085 1.9209752 0.09249085 1.175174 0.09249085 -0.30393836 0.09249085 5.345484 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_730.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-4.493483 -1.3107848 2.0135436 -0.79062414 0.1440854 0.6805093 2.1408923 -6.397982 -1.1944208 2.107365 0.4503133 ] [-0.0562839 -0.42423847 0.9285964 -1.5843374 -2.1598408 2.1036806 -3.558379 0.51367486 0.7066089 0.25286195 0.26858047] [-0.47694233 -1.9719001 -0.1088024 -1.6832445 -0.04432525 0.58048254 1.2899947 -1.9499563 -1.318537 0.08165218 0.84539735]]]; ov_res: [[[-4.493483 -1.3107847 2.0135436 -0.7906242 0.14408533 0.6805093 2.1408923 -6.397982 -1.1944208 2.107365 0.4503133 ] [-0.0562839 -0.42423847 0.92859644 -1.5843372 -2.1598408 2.1036806 -3.558379 0.51367486 0.70660883 0.25286195 0.26858047] [-0.47694233 -1.9719001 -0.1088024 -1.6832445 -0.04432528 0.5804825 1.2899948 -1.9499563 -1.3185371 0.08165216 0.84539735]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_732.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-1.0051166 -2.2612991 -1.6172494 -1.9067315 -1.7497702 2.1880717 1.3442377 0.73800266 -0.25740433] [-0.29753765 0.39438868 -0.7341647 0.64467764 -3.7797182 0.72978324 1.1248951 0.47447145 1.070449 ] [-0.6133734 -0.2471858 -1.659396 -0.9785625 -3.243633 0.6364899 3.3665333 0.22672945 0.09946355]]]; ov_res: [[[-1.0051167 -2.261299 -1.6172493 -1.9067316 -1.7497702 2.1880717 1.3442376 0.73800266 -0.2574043 ] [-0.29753765 0.39438868 -0.7341647 0.64467776 -3.7797182 0.72978324 1.1248951 0.4744714 1.070449 ] [-0.6133734 -0.24718586 -1.6593962 -0.9785625 -3.243633 0.63649 3.3665333 0.2267294 0.09946355]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_734.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1856 (2,.,.) = 0.9793 (3,.,.) = 0.9962 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.16686781 0.28341275 0.4068214 0.07603738 -0.00782965 0.02644285 -0.06005413 -0.30701116 -0.1341623 -0.23980762] [-1.9590676 0.9296714 -0.07889515 -0.9429703 -0.28056794 0.39526197 0.31141055 1.3760418 -0.53208286 -0.53056395] [ 0.91092527 1.0059482 0.22165756 0.67032933 -1.376353 -1.4929088 0.21733877 -0.47276735 0.02038828 0.22557338]]]; ov_res: [[[ 0.16686781 0.28341275 0.4068214 0.07603738 -0.00782965 0.02644285 -0.06005413 -0.30701116 -0.1341623 -0.23980762] [-1.9590676 0.9296714 -0.07889515 -0.9429703 -0.28056794 0.39526197 0.31141055 1.3760418 -0.53208286 -0.53056395] [ 0.91092527 1.0059482 0.22165756 0.67032933 -1.376353 -1.4929088 0.21733877 -0.47276735 0.02038828 0.22557338]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_736.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.01 * 2.7628 (2,.,.) = 0.2076 (3,.,.) = 0.7267 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-5.8239447e-03 -4.6940867e-02 -2.2881394e-02 7.2358636e-04 -3.0853098e-02 -6.4745164e-03 8.8610835e-03 7.6015396e-03 -8.1176115e-03 -2.6356686e-02] [ 1.3735311e-01 -4.5888085e-02 1.9527861e-01 -1.7635697e-01 -5.4882672e-02 5.4567598e-04 2.6507691e-02 1.5116942e-01 7.5029403e-02 1.5508440e-01] [-9.4025928e-01 5.5122942e-01 6.8029636e-01 2.0273333e+00 -1.4536686e-01 3.4769318e-01 -1.3338940e-01 7.3810178e-01 4.8567390e-01 -4.3865934e-02]]]; ov_res: [[[-5.8239447e-03 -4.6940867e-02 -2.2881394e-02 7.2358636e-04 -3.0853098e-02 -6.4745164e-03 8.8610835e-03 7.6015396e-03 -8.1176115e-03 -2.6356686e-02] [ 1.3735311e-01 -4.5888085e-02 1.9527861e-01 -1.7635697e-01 -5.4882672e-02 5.4567598e-04 2.6507691e-02 1.5116942e-01 7.5029403e-02 1.5508440e-01] [-9.4025928e-01 5.5122942e-01 6.8029636e-01 2.0273333e+00 -1.4536686e-01 3.4769318e-01 -1.3338940e-01 7.3810178e-01 4.8567390e-01 -4.3865934e-02]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_738.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.7869 (2,.,.) = 0.4011 (3,.,.) = 0.4202 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.93713313 0.8632139 1.6126229 1.3315473 0.25671995 1.5574305 0.02492299 -0.7874755 ]]]; ov_res: [[[ 0.93713313 0.8632139 1.6126229 1.3315473 0.25671995 1.5574305 0.02492299 -0.7874755 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_740.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.8288 0.0891 1.4466 (2,.,.) = 0.8786 0.1037 -0.3377 (3,.,.) = -1.2594 -2.3941 -0.6058 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[ 0. -2.725253 0.2847289 1.2285712 -1.0165027 -1.6686084 3.9099987 -1.264666 1.297277 -0.92101747 -1.6224519 0. ] [ 0. 2.2548761 -0.97066575 -1.4989291 0.40635085 1.6929116 -1.2849294 0.9615125 -0.9758289 0.9831777 1.7241844 0. ] [ 0. -5.2586455 1.3473437 2.7972114 -0.8172248 -2.2355192 -3.0857465 0.95125747 -0.24055476 0.28076908 -3.7671273 0. ]]]; ov_res: [[[ 0. -2.725253 0.2847289 1.2285712 -1.0165027 -1.6686084 3.9099987 -1.264666 1.297277 -0.92101747 -1.6224519 0. ] [ 0. 2.2548761 -0.97066575 -1.4989291 0.40635085 1.6929116 -1.2849294 0.9615125 -0.9758289 0.9831777 1.7241844 0. ] [ 0. -5.2586455 1.3473437 2.7972114 -0.8172248 -2.2355192 -3.0857465 0.95125747 -0.24055476 0.28076908 -3.7671273 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_742.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.4899 (2,.,.) = -1.1932 (3,.,.) = -1.2937 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[ 1.2189919 -1.3078496 2.6524343 -0.6382438]]]; ov_res: [[[ 1.2189919 -1.3078496 2.6524343 -0.6382438]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_744.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.2623 -0.1791 0.1891 (2,.,.) = -0.2108 0.4646 0.4020 (3,.,.) = -0.6392 -0.1897 -1.6692 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0. 0. 0. -0.00334633 -0.06201303 0.292234 -0.23914017 -0.1765379 0.6410335 0.3351268 -0.01989892 0.61933756 0.14722307 0. 0. 0. ] [ 0. 0. 0. 0.29501635 -0.04607008 -0.90079516 0.04922516 -0.73850864 0.03522916 -0.10584592 1.045315 -0.4902648 0.36586437 0. 0. 0. ] [ 0. 0. 0. -0.51069015 0.35824925 0.651456 0.94267917 2.3550186 -2.6959724 -1.170315 -2.0739105 -1.5921568 -1.3419867 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. -0.00334633 -0.06201303 0.292234 -0.23914017 -0.1765379 0.6410335 0.3351268 -0.01989892 0.61933756 0.14722307 0. 0. 0. ] [ 0. 0. 0. 0.29501635 -0.04607008 -0.90079516 0.04922516 -0.73850864 0.03522916 -0.10584592 1.045315 -0.4902648 0.36586437 0. 0. 0. ] [ 0. 0. 0. -0.51069015 0.35824925 0.651456 0.94267917 2.3550186 -2.6959724 -1.170315 -2.0739105 -1.5921568 -1.3419867 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_746.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9909 (2,.,.) = -0.4305 (3,.,.) = 0.001 * -7.7814 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.80590403 0.61077887 -0.8458625 0.47616956 2.4946992 -2.4232593 1.3579652 -0.67887264]]]; ov_res: [[[ 0.80590403 0.61077887 -0.8458625 0.47616956 2.4946992 -2.4232593 1.3579652 -0.67887264]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_748.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.7454 0.0099 -0.4693 (2,.,.) = 0.01 * 2.4053 -53.6021 -67.1037 (3,.,.) = 0.4889 -0.5341 -0.9921 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-0.3890673 0.62256527 0.92796725 1.3495746 -0.9145168 1.8799174 -0.47818974 -0.64368546 -0.4478909 0.01553183] [ 0.365965 -0.7121403 0.5421087 0.24482505 0.14170705 0.5297538 0.36086646 1.209632 -0.12523317 -0.30800268] [ 0.98973715 -1.6894069 0.56369746 -1.0148572 0.74875474 0.04958446 0.9178723 1.9012245 0.3182975 -1.0095153 ]]]; ov_res: [[[-0.3890673 0.62256527 0.92796725 1.3495746 -0.9145168 1.8799174 -0.47818974 -0.64368546 -0.4478909 0.01553183] [ 0.365965 -0.7121403 0.5421087 0.24482505 0.14170705 0.5297538 0.36086646 1.209632 -0.12523317 -0.30800268] [ 0.98973715 -1.6894069 0.56369746 -1.0148572 0.74875474 0.04958446 0.9178723 1.9012245 0.3182975 -1.0095153 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_750.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.3489 (2,.,.) = -0.5609 (3,.,.) = 0.3484 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.01222265 -0.30063868 -0.21172439 0.3386126 0.39241964 -0.7498175 0.11237023 0.08689 ] [ 0.1202004 -0.6417945 -0.00278472 -0.26596147 0.23363689 0.06556455 1.0549506 0.04647134] [ 0.14958148 0.4326103 -0.1701161 0.2868321 0.4600044 -0.35456875 0.01422914 -0.40747648]]]; ov_res: [[[ 0.01222265 -0.30063868 -0.21172439 0.3386126 0.39241964 -0.7498175 0.11237023 0.08689 ] [ 0.1202004 -0.6417945 -0.00278472 -0.26596147 0.23363689 0.06556455 1.0549506 0.04647134] [ 0.14958148 0.4326103 -0.1701161 0.2868321 0.4600044 -0.35456875 0.01422914 -0.40747648]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_752.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.3898 (2,.,.) = -0.6780 (3,.,.) = 1.9594 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[ 0. 0.0960154 0.6191441 1.281099 -0.16073702 -0.11885945 0.26148263 0.02829712 0.5510316 0.0723954 0.53417456 0. ] [ 0. 0.07871029 -0.26030818 0.09683836 -1.1130415 -0.14355154 -0.5268323 -0.43644723 -0.60811216 0.02967306 0.27960658 0. ] [ 0. 3.5034556 -1.1183639 1.1870965 1.1387389 1.8546277 0.6342021 1.1561606 -0.433545 4.0556254 -0.5864616 0. ]]]; ov_res: [[[ 0. 0.0960154 0.6191441 1.281099 -0.16073702 -0.11885945 0.26148263 0.02829712 0.5510316 0.0723954 0.53417456 0. ] [ 0. 0.07871029 -0.26030818 0.09683836 -1.1130415 -0.14355154 -0.5268323 -0.43644723 -0.60811216 0.02967306 0.27960658 0. ] [ 0. 3.5034556 -1.1183639 1.1870965 1.1387389 1.8546277 0.6342021 1.1561606 -0.433545 4.0556254 -0.5864616 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_754.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.5970 (2,.,.) = -0.2000 (3,.,.) = -1.0363 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[ 0.08071904 -0.7918202 -0.80043167 0.7704309 0.11637066 -0.1378769 0.18313543 0.80088806] [ 0.00599241 0.26798338 -0.1014278 0.33977976 -0.14596803 -0.34560436 0.22814898 -0.3115027 ] [-0.23543254 -0.9106379 -0.0210885 1.0346396 0.1696893 -0.06359198 0.00850888 -1.097364 ]]]; ov_res: [[[ 0.08071904 -0.7918202 -0.80043167 0.7704309 0.11637066 -0.1378769 0.18313543 0.80088806] [ 0.00599241 0.26798338 -0.1014278 0.33977976 -0.14596803 -0.34560436 0.22814898 -0.3115027 ] [-0.23543254 -0.9106379 -0.0210885 1.0346396 0.1696893 -0.06359198 0.00850888 -1.097364 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_756.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.1348 (2,.,.) = -1.7465 (3,.,.) = 0.01 * 1.8297 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 1.17060445e-01 2.98664533e-02 -1.50688887e-01 -1.18160294e-02 -1.55070633e-01 1.04146667e-01 2.18061060e-01 -4.51275170e-01 1.87531665e-01 7.10793724e-03] [-1.58695877e+00 -3.62357259e-01 3.24798751e+00 -2.85277319e+00 -1.88577330e+00 1.53486347e+00 -2.22804213e+00 -7.50011384e-01 -8.26571703e-01 -6.25544861e-02] [-6.37934485e-04 -1.18899690e-02 5.81672508e-03 -3.02645750e-02 1.85089428e-02 1.93023179e-02 6.94712764e-03 5.34638437e-03 -3.65078487e-02 -3.37547772e-02]]]; ov_res: [[[ 1.17060445e-01 2.98664533e-02 -1.50688887e-01 -1.18160294e-02 -1.55070633e-01 1.04146667e-01 2.18061060e-01 -4.51275170e-01 1.87531665e-01 7.10793724e-03] [-1.58695877e+00 -3.62357259e-01 3.24798751e+00 -2.85277319e+00 -1.88577330e+00 1.53486347e+00 -2.22804213e+00 -7.50011384e-01 -8.26571703e-01 -6.25544861e-02] [-6.37934485e-04 -1.18899690e-02 5.81672508e-03 -3.02645750e-02 1.85089428e-02 1.93023179e-02 6.94712764e-03 5.34638437e-03 -3.65078487e-02 -3.37547772e-02]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_758.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.01 * 9.1051 (2,.,.) = -0.4358 (3,.,.) = -1.3428 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.pads, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) SSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to castfw_re: [[[ 0. 0.9715823 0. -0.5062872 0. -1.4089996 0. -0.16242778 0. 0.69710165 0. 0.5989115 0. 2.4540157 0. -0.08043452 0. ]]]; ov_res: [[[ 0. 0.9715823 0. -0.5062872 0. -1.4089996 0. -0.16242778 0. 0.69710165 0. 0.5989115 0. 2.4540157 0. -0.08043452 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_760.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.8105 1.1165 -0.8826 (2,.,.) = -1.0585 0.3050 1.5016 (3,.,.) = 1.3386 0.0175 -0.1084 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[-0.02820947 1.1848524 -0.46022767 -0.46754712 0.9472597 ] [-0.26900616 -1.8795217 0.10663495 -0.14238138 -1.6754705 ] [ 0.10119105 0.3151418 -1.0575373 0.18317364 2.1130252 ]]]; ov_res: [[[-0.02820947 1.1848524 -0.46022767 -0.46754712 0.9472597 ] [-0.26900616 -1.8795217 0.10663495 -0.14238138 -1.6754705 ] [ 0.10119105 0.3151418 -1.0575373 0.18317364 2.1130252 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_762.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.6506 (2,.,.) = 1.2600 (3,.,.) = -1.1477 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[-3.1317053 0. -1.0526519 0. 0.8542617 0. -3.0978181 0. 3.393139 0. -0.866173 0. -1.0647125 0. 0.10328311 0. -0.2212611 0. 0.7543615 ]]]; ov_res: [[[-3.1317053 0. -1.0526519 0. 0.8542617 0. -3.0978181 0. 3.393139 0. -0.866173 0. -1.0647125 0. 0.10328311 0. -0.2212611 0. 0.7543615 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_764.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -2.2136 0.6648 -0.1568 (2,.,.) = -0.5529 -0.5167 -0.8136 (3,.,.) = -0.1262 0.0519 0.4231 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0.6298849 -3.2369554 2.2805545 1.3438894 -1.9405372 ] [ 1.9466435 -0.38504514 0.10672776 0.7011279 -0.485192 ] [-0.8440413 -0.26617017 0.5417708 0.6277023 0.3770156 ]]]; ov_res: [[[ 0.6298849 -3.2369554 2.2805545 1.3438894 -1.9405372 ] [ 1.9466435 -0.38504514 0.10672776 0.7011279 -0.485192 ] [-0.8440413 -0.26617017 0.5417708 0.6277023 0.3770156 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_766.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.2902 1.2759 1.8108 (2,.,.) = 0.2750 0.3992 -0.8459 (3,.,.) = 0.0944 0.8035 -0.3455 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[-1.6217241 -2.0624814 4.2186127 -2.3882167 4.8123913 -1.3561217 2.3311834 1.5952594 0.03592412 4.3041844 ] [ 0.907201 0.75287503 -1.5690641 0.35539326 -1.8002671 -1.4680413 -1.8541371 -0.08131641 -0.21636963 -1.3459983 ] [ 0.55616486 0.16366395 -0.15992415 -0.36429232 -0.4305727 -1.9861957 -1.3165578 0.3267308 -0.05411041 -0.39260152]]]; ov_res: [[[-1.6217241 -2.0624814 4.2186127 -2.3882167 4.8123913 -1.3561217 2.3311834 1.5952594 0.03592412 4.3041844 ] [ 0.907201 0.75287503 -1.5690641 0.35539326 -1.8002671 -1.4680413 -1.8541371 -0.08131641 -0.21636963 -1.3459983 ] [ 0.55616486 0.16366395 -0.15992415 -0.36429232 -0.4305727 -1.9861957 -1.3165578 0.3267308 -0.05411041 -0.39260152]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_768.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.2246 (2,.,.) = 0.6749 (3,.,.) = -1.1533 [ CPUFloatType{3,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[ 0.27505642 0. -0.5668135 0. 1.1234454 0. -0.7021998 0. 0.05179681 0. -0.8612418 0. 2.0199816 0. 0.29904208 0. 1.994783 0. -1.9952233 ]]]; ov_res: [[[ 0.27505642 0. -0.5668135 0. 1.1234454 0. -0.7021998 0. 0.05179681 0. -0.8612418 0. 2.0199816 0. 0.29904208 0. 1.994783 0. -1.9952233 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [2], 'groups': 1, 'output_padding': [1], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_770.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.2705 (2,.,.) = -0.1987 (3,.,.) = -0.2632 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[ 0. 3.5143735 0. -0.9815688 0. -1.4750369 0. -0.27661732 0. -0.6326341 0. -0.2801978 0. -0.5424687 0. -0.655387 0. -0.37679267]]]; ov_res: [[[ 0. 3.5143735 0. -0.9815688 0. -1.4750369 0. -0.27661732 0. -0.6326341 0. -0.2801978 0. -0.5424687 0. -0.655387 0. -0.37679267]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_772.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5527 0.5030 -1.5132 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.5716138 3.0895681 -3.2576954 -6.635952 -3.1260953 0.7084652 -3.0061395 -0.9397497 -0.10355622 -0.27603507 0.649998 ] [-1.9700496 5.3992586 7.05151 0.24004251 -3.5011673 3.5692239 -2.9860828 3.2562637 3.5316622 0.32462037 1.1078105 ] [-0.59185565 -3.2696662 -2.0883923 0.23256421 0.49936688 -2.1077023 0.13058078 -1.7348858 -1.6718227 -1.2159797 -1.8617764 ]]]; ov_res: [[[ 0.5716138 3.0895684 -3.2576957 -6.635952 -3.1260953 0.7084652 -3.0061395 -0.9397497 -0.10355628 -0.27603507 0.649998 ] [-1.9700496 5.3992586 7.05151 0.24004275 -3.5011673 3.5692236 -2.9860828 3.2562637 3.5316622 0.3246203 1.1078105 ] [-0.59185565 -3.2696662 -2.0883923 0.23256421 0.49936688 -2.1077023 0.13058078 -1.7348858 -1.6718227 -1.2159797 -1.8617764 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_774.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.0304 0.5118 0.3617 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.20423001 -0.30510455 -2.0044167 1.8617384 2.305739 -2.7433534 3.1527243 -1.2144568 -1.0282972 ] [ 0.20857164 1.9829292 6.2432156 -3.2946033 -1.4338 2.963517 -4.472334 2.4956033 0.3709702 ] [ 1.0547445 0.55810636 2.0513139 1.854124 1.8562582 4.2456994 -1.5831499 1.5667585 0.6229964 ]]]; ov_res: [[[ 0.20423001 -0.30510452 -2.0044167 1.8617386 2.305739 -2.7433536 3.1527243 -1.2144568 -1.0282971 ] [ 0.20857164 1.9829292 6.2432156 -3.294603 -1.4338 2.963517 -4.472334 2.4956033 0.37097 ] [ 1.0547445 0.5581064 2.0513139 1.8541238 1.8562582 4.2456994 -1.5831497 1.5667585 0.6229964 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_776.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.5956 0.1335 -0.0499 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.8043 (2,.,.) = 0.01 * 8.2403 (3,.,.) = -1.5899 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 1.2794459 -0.20740795 4.16518 -0.31015623 4.7204647 0.41712976 3.5858097 -0.2477932 -3.090518 -2.1768365 ] [ 0.1504186 0.06045069 0.16230431 0.25470254 0.08206756 0.08795258 0.13017637 0.15350488 0.05688103 0.21300188] [ 1.1496472 -0.05096557 0.48696333 0.8847332 1.0569437 -1.3598237 0.0474503 -1.6470815 -1.2553926 -5.71257 ]]]; ov_res: [[[ 1.2794459 -0.20740795 4.16518 -0.31015623 4.7204647 0.41712976 3.5858097 -0.2477932 -3.090518 -2.1768365 ] [ 0.1504186 0.06045069 0.16230431 0.25470254 0.08206756 0.08795258 0.13017637 0.15350488 0.05688103 0.21300188] [ 1.1496472 -0.05096557 0.48696333 0.8847332 1.0569437 -1.3598237 0.0474503 -1.6470815 -1.2553926 -5.71257 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_778.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5096 1.2227 -0.0705 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.6750 (2,.,.) = 0.8195 (3,.,.) = -0.9131 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-1.0442460e-01 -2.9522836e-01 -2.2701354e-01 -5.5651045e-01 -7.8593773e-01 -3.6065951e-01 -4.8864886e-01 -4.7697055e-01 -6.8905550e-01 -1.9053121e+00] [ 1.6604985e+00 5.0176179e-01 1.5925378e+00 8.2996970e-01 2.2364018e+00 1.3087147e+00 1.3143290e+00 5.8204669e-01 -2.5551137e-01 1.6466165e+00] [-3.3717797e-04 -1.6918497e-02 -1.1733626e+00 -9.1640419e-01 -1.4826860e-01 -1.3191895e+00 5.2666646e-01 -7.0909947e-02 3.2135278e-01 8.9789480e-03]]]; ov_res: [[[-1.0442460e-01 -2.9522836e-01 -2.2701354e-01 -5.5651045e-01 -7.8593773e-01 -3.6065951e-01 -4.8864886e-01 -4.7697055e-01 -6.8905550e-01 -1.9053121e+00] [ 1.6604985e+00 5.0176179e-01 1.5925378e+00 8.2996970e-01 2.2364018e+00 1.3087147e+00 1.3143290e+00 5.8204669e-01 -2.5551137e-01 1.6466165e+00] [-3.3717797e-04 -1.6918497e-02 -1.1733626e+00 -9.1640419e-01 -1.4826860e-01 -1.3191895e+00 5.2666646e-01 -7.0909947e-02 3.2135278e-01 8.9789480e-03]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_780.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.230475}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.8856 (2,.,.) = -1.2395 (3,.,.) = 1.1032 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.79204303 -0.39838392 0.1239126 2.646414 -0.22749867 0.51548594 -0.08984175 -1.0252631 ]]]; ov_res: [[[ 0.79204303 -0.39838392 0.1239126 2.646414 -0.22749867 0.51548594 -0.08984175 -1.0252631 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_782.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.6909 -0.1279 -1.2150 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.2718 0.9846 -0.2987 (2,.,.) = -0.2854 -0.7807 0.2527 (3,.,.) = -0.6393 -0.3410 -0.5963 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.690919 -0.42495766 -0.02502882 0.46914464 -0.990178 -1.750773 -1.7993057 -1.5208609 -1.0659646 0.18740553 -1.4798799 -0.690919 ] [-0.12791911 -1.3081654 -0.12908334 -1.1226048 0.69898295 0.36670166 -0.19771 0.8612492 0.2561738 -0.23276708 0.25417584 -0.12791911] [-1.2149948 -3.130097 0.09603918 0.16157866 -0.350824 -1.1198912 -1.979418 -1.2165059 -1.7555614 -0.31829447 -0.22970325 -1.2149948 ]]]; ov_res: [[[-0.690919 -0.42495766 -0.02502882 0.46914464 -0.990178 -1.750773 -1.7993057 -1.5208609 -1.0659646 0.18740553 -1.4798799 -0.690919 ] [-0.12791911 -1.3081654 -0.12908334 -1.1226048 0.69898295 0.36670166 -0.19771 0.8612492 0.2561738 -0.23276708 0.25417584 -0.12791911] [-1.2149948 -3.130097 0.09603918 0.16157866 -0.350824 -1.1198912 -1.979418 -1.2165059 -1.7555614 -0.31829447 -0.22970325 -1.2149948 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_784.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.896745}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1380 (2,.,.) = 0.5983 (3,.,.) = 1.1818 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[-0.32972223 2.8547218 0.7689752 2.061527 ]]]; ov_res: [[[-0.32972223 2.8547218 0.7689752 2.061527 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_786.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 2.2894 0.0577 0.5730 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9524 -2.0604 -0.8154 (2,.,.) = -1.0949 -0.4992 -0.2087 (3,.,.) = -0.7232 0.2707 -0.4063 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 2.2893593 2.2893593 2.2893593 1.223735 1.5534 -0.94827676 3.7286325 -1.9773223 3.3390112 0.98602843 5.6989975 2.0837102 3.0864644 2.2893593 2.2893593 2.2893593 ] [ 0.05765976 0.05765976 0.05765976 0.53696173 -0.2818905 0.4201885 0.819541 -0.7632479 0.79942656 -1.0143998 -0.26903874 -0.13914144 -0.73296505 0.05765976 0.05765976 0.05765976] [ 0.5729729 0.5729729 0.5729729 0.8475543 0.7225989 0.773321 0.22260377 0.56310534 0.89964736 0.24563661 -0.493003 1.2985322 -0.11308926 0.5729729 0.5729729 0.5729729 ]]]; ov_res: [[[ 2.2893593 2.2893593 2.2893593 1.223735 1.5534 -0.94827676 3.7286325 -1.9773223 3.3390112 0.98602843 5.6989975 2.0837102 3.0864644 2.2893593 2.2893593 2.2893593 ] [ 0.05765976 0.05765976 0.05765976 0.53696173 -0.2818905 0.4201885 0.819541 -0.7632479 0.79942656 -1.0143998 -0.26903874 -0.13914144 -0.73296505 0.05765976 0.05765976 0.05765976] [ 0.5729729 0.5729729 0.5729729 0.8475543 0.7225989 0.773321 0.22260377 0.56310534 0.89964736 0.24563661 -0.493003 1.2985322 -0.11308926 0.5729729 0.5729729 0.5729729 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_788.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.806425}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.4136 (2,.,.) = -0.1943 (3,.,.) = 0.5945 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-1.3728167 -0.45037076 -1.3192143 -0.921018 -1.316318 -1.0944505 -0.24922019 -1.4306319 ]]]; ov_res: [[[-1.3728167 -0.45037076 -1.3192143 -0.921018 -1.316318 -1.0944505 -0.24922019 -1.4306319 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_790.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.1737 -2.7398 -1.0276 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.4188 -0.4292 0.1273 (2,.,.) = -1.9113 0.2300 2.5638 (3,.,.) = -1.1745 -0.5550 -1.3338 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 1.5355451 0.93266684 0.11937678 1.71831 1.4427111 0.36504036 1.4263464 1.1917567 2.1546683 0.69827396] [-1.7221345 -8.150302 5.7140284 -2.8527431 4.2945595 -2.2779577 -4.9206367 -3.1927724 -3.359186 0.24462032] [-5.102517 -1.0010899 -3.9989688 -2.371344 -0.19964504 0.21211028 -0.2698472 1.246386 -1.9133518 -0.69493496]]]; ov_res: [[[ 1.5355451 0.93266684 0.11937678 1.71831 1.4427111 0.36504036 1.4263464 1.1917567 2.1546683 0.69827396] [-1.7221345 -8.150302 5.7140284 -2.8527431 4.2945595 -2.2779577 -4.9206367 -3.1927724 -3.359186 0.24462032] [-5.102517 -1.0010899 -3.9989688 -2.371344 -0.19964504 0.21211028 -0.2698472 1.246386 -1.9133518 -0.69493496]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_792.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.9209 -1.7843 -1.3642 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1634 (2,.,.) = 0.7532 (3,.,.) = 1.1887 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.9503128 -1.1797987 -0.94033515 -1.1819986 -0.9762281 -0.6323566 -1.008053 -0.84791166] [-2.5060053 -1.9508617 -1.5064154 -1.394813 -3.3286471 -0.8022585 -1.404695 -2.5541985 ] [-0.57272094 0.05921733 -2.1338995 -0.69895405 -1.8420751 -0.32908595 -1.6937959 -1.6132858 ]]]; ov_res: [[[-0.9503128 -1.1797987 -0.94033515 -1.1819986 -0.9762281 -0.6323566 -1.008053 -0.84791166] [-2.5060053 -1.9508617 -1.5064154 -1.394813 -3.3286471 -0.8022585 -1.404695 -2.5541985 ] [-0.57272094 0.05921733 -2.1338995 -0.69895405 -1.8420751 -0.32908595 -1.6937959 -1.6132858 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_794.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.2867 0.5516 -0.4180 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.5795 (2,.,.) = 0.9747 (3,.,.) = 0.4866 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.28668508 0.92550623 2.214299 0.3157142 0.7510444 1.6376985 -2.0538979 -0.26320657 -0.13647217 -0.96848583 -1.8249062 0.28668508] [ 0.55156624 -0.19297016 0.8352653 0.69591606 -0.29620272 1.8932893 -0.55592716 1.1253791 0.8366741 0.29830885 1.9568318 0.55156624] [-0.4180318 -0.40789452 0.35774893 -0.48286015 -0.51679254 -0.27306724 -0.62599164 0.24296051 -0.20947477 -0.34305936 -0.5788431 -0.4180318 ]]]; ov_res: [[[ 0.28668508 0.9255063 2.214299 0.3157142 0.7510444 1.6376985 -2.0538976 -0.26320654 -0.13647217 -0.96848583 -1.8249062 0.28668508] [ 0.55156624 -0.19297014 0.8352653 0.6959161 -0.2962027 1.8932893 -0.5559271 1.1253791 0.8366741 0.29830885 1.9568318 0.55156624] [-0.4180318 -0.40789452 0.35774893 -0.48286012 -0.5167925 -0.27306724 -0.62599164 0.2429605 -0.20947477 -0.34305936 -0.5788431 -0.4180318 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_796.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.2872 -0.2958 -0.4335 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.3474 (2,.,.) = 0.9013 (3,.,.) = 1.5799 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.41255775 0.36259973 0.40775877 0.09530839 0.28289008 0.11576289 0.1505978 0.4361836 ] [-1.1848468 -1.2737814 -0.57007116 -1.3330818 -0.11897686 -0.45728123 -2.4855132 -0.199137 ] [-0.24598241 -0.9185579 -2.8021162 0.269346 -0.9715973 1.0874956 -1.0523441 -0.28601855]]]; ov_res: [[[ 0.41255775 0.36259973 0.40775877 0.09530839 0.28289008 0.11576289 0.1505978 0.4361836 ] [-1.1848468 -1.2737814 -0.57007116 -1.3330818 -0.11897686 -0.45728123 -2.4855132 -0.199137 ] [-0.24598241 -0.9185579 -2.8021162 0.269346 -0.9715973 1.0874956 -1.0523441 -0.28601855]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_798.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.4361 -1.8456 -1.7545 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.2307 (2,.,.) = -1.7850 (3,.,.) = 0.7233 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.16346078 0.8410957 0.21519521 0.5964946 0.517161 0.31982604 0.4724259 0.45356005 0.2739876 0.45331147] [-0.77924067 0.04684415 0.6299012 -0.41316524 -3.1698093 0.4147471 -2.4113123 0.17246826 -2.1933215 -1.6242493 ] [-2.303573 -1.5173088 -1.3105257 -1.7544717 -0.89050525 -1.6097534 -3.2090712 -2.6358857 -0.16406874 -2.8810253 ]]]; ov_res: [[[ 0.16346078 0.8410957 0.21519521 0.5964946 0.517161 0.31982604 0.4724259 0.45356005 0.2739876 0.45331147] [-0.77924067 0.04684415 0.6299012 -0.41316524 -3.1698093 0.4147471 -2.4113123 0.17246826 -2.1933215 -1.6242493 ] [-2.303573 -1.5173088 -1.3105257 -1.7544717 -0.89050525 -1.6097534 -3.2090712 -2.6358857 -0.16406874 -2.8810253 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_800.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.265092}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.3157 (2,.,.) = 0.01 * 2.9382 (3,.,.) = 0.7163 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.pads, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.26509193 -0.7307991 0.26509193 -0.18466458 0.26509193 0.8422469 0.26509193 -0.14884737 0.26509193 0.56580025 0.26509193 0.92719615 0.26509193 1.0428183 0.26509193 0.8701556 0.26509193]]]; ov_res: [[[ 0.26509193 -0.7307991 0.26509193 -0.18466458 0.26509193 0.8422469 0.26509193 -0.14884737 0.26509193 0.56580025 0.26509193 0.92719615 0.26509193 1.0428183 0.26509193 0.8701556 0.26509193]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_802.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.4163 2.4042 2.3659 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.3376 -0.1274 -1.7525 (2,.,.) = -0.4192 -0.7758 2.0799 (3,.,.) = -1.8782 -0.3676 0.6262 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 2.5499136 0.62681454 3.6739535 0.3826864 0.87064004] [ 1.9985294 1.7591443 -1.1357293 4.1190634 2.5142887 ] [ 5.6361732 3.0071683 -0.21807432 6.5479345 6.4874907 ]]]; ov_res: [[[ 2.5499136 0.62681454 3.6739535 0.3826864 0.87064004] [ 1.9985294 1.7591443 -1.1357293 4.1190634 2.5142887 ] [ 5.6361732 3.0071683 -0.21807432 6.5479345 6.4874907 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_804.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.03789}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.8296 (2,.,.) = -0.8284 (3,.,.) = -0.9910 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[-2.1673822 -1.0378902 -5.060918 -1.0378902 -6.5143175 -1.0378902 -1.7146273 -1.0378902 6.1176615 -1.0378902 -4.3005238 -1.0378902 -1.7275093 -1.0378902 -0.6086586 -1.0378902 -0.6393765 -1.0378902 -0.99187785]]]; ov_res: [[[-2.1673822 -1.0378902 -5.060918 -1.0378902 -6.5143175 -1.0378902 -1.7146273 -1.0378902 6.1176615 -1.0378902 -4.3005238 -1.0378902 -1.7275093 -1.0378902 -0.6086586 -1.0378902 -0.6393765 -1.0378902 -0.99187785]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_806.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7116 1.0569 1.3342 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.6426 0.0226 -0.5083 (2,.,.) = 1.9865 0.1790 -0.1246 (3,.,.) = -0.0406 1.3948 -0.4960 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[-4.0099363 0.3651235 -1.8256364 -1.8327518 -2.592476 ] [ 4.346643 0.9206548 1.4604322 2.399437 3.1113768] [-0.8107346 3.2164493 1.7882652 1.9391847 1.6647322]]]; ov_res: [[[-4.0099363 0.3651235 -1.8256364 -1.8327518 -2.592476 ] [ 4.346643 0.9206548 1.4604322 2.399437 3.1113768] [-0.8107346 3.2164493 1.7882652 1.9391847 1.6647322]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_808.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.6485 0.0889 0.7898 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1904 0.6386 0.8549 (2,.,.) = -0.8529 -1.5179 -0.4444 (3,.,.) = 0.1263 0.4879 -0.3916 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-1.616419 0.5117706 -1.2706506 -3.0945125 -2.084929 -1.11324 -2.468926 -3.347891 -1.5183372 1.5708445 ] [-3.1349456 -2.6468081 -0.9584176 3.802468 2.0988104 0.4784025 0.76034355 0.5496811 -0.77713937 -2.7782624 ] [ 2.1079469 1.0324793 1.289568 -0.08346224 0.15598714 -0.6163043 0.6007143 1.3479728 0.8569596 0.6292268 ]]]; ov_res: [[[-1.616419 0.5117706 -1.2706506 -3.0945125 -2.084929 -1.11324 -2.468926 -3.347891 -1.5183372 1.5708445 ] [-3.1349456 -2.6468081 -0.9584176 3.802468 2.0988104 0.4784025 0.76034355 0.5496811 -0.77713937 -2.7782624 ] [ 2.1079469 1.0324793 1.289568 -0.08346224 0.15598714 -0.6163043 0.6007143 1.3479728 0.8569596 0.6292268 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_810.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.666404}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.2204 (2,.,.) = 0.1577 (3,.,.) = 0.4033 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[-1.248375 0.66640353 1.1742274 0.66640353 0.90416956 0.66640353 -1.4524796 0.66640353 1.9108511 0.66640353 -1.081037 0.66640353 -0.63359106 0.66640353 2.0778086 0.66640353 -0.07169938 0.66640353 1.2781118 ]]]; ov_res: [[[-1.248375 0.66640353 1.1742274 0.66640353 0.90416956 0.66640353 -1.4524796 0.66640353 1.9108511 0.66640353 -1.081037 0.66640353 -0.63359106 0.66640353 2.0778086 0.66640353 -0.07169938 0.66640353 1.2781118 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [2], 'groups': 1, 'output_padding': [1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_812.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.5624}]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -2.1078 (2,.,.) = 0.01 * -6.3267 (3,.,.) = -0.9209 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.5623999 -3.6788416 0.5623999 -0.87185675 0.5623999 -2.3399363 0.5623999 3.1298993 0.5623999 2.764008 0.5623999 0.69099987 0.5623999 -2.750973 0.5623999 3.5909238 0.5623999 2.250023 ]]]; ov_res: [[[ 0.5623999 -3.6788416 0.5623999 -0.87185675 0.5623999 -2.3399363 0.5623999 3.1298993 0.5623999 2.764008 0.5623999 0.69099987 0.5623999 -2.750973 0.5623999 3.5909238 0.5623999 2.250023 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_814.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.748164 -0.9003734 2.4523487 -1.9452451 -1.2952236 0.3587973 0.3028971 0.06958079 -1.5150671 2.3837428 0.15594384] [-0.23751007 1.0782859 1.4566386 -2.4303594 -0.8369502 0.27474296 2.1382973 -0.00783157 1.7099979 1.5519379 -1.5095205 ] [-1.0072362 -0.64861745 1.5042117 -0.77325416 0.06121802 0.5180739 3.3880415 -0.72575754 1.7457082 2.9182372 -1.4281515 ]]]; ov_res: [[[-0.748164 -0.90037346 2.4523487 -1.945245 -1.2952237 0.35879737 0.30289707 0.06958085 -1.5150671 2.3837426 0.15594384] [-0.23751007 1.0782859 1.4566387 -2.4303596 -0.8369501 0.27474296 2.1382973 -0.00783159 1.7099979 1.5519379 -1.5095205 ] [-1.0072362 -0.6486175 1.5042115 -0.7732541 0.06121802 0.518074 3.3880415 -0.72575754 1.7457082 2.9182372 -1.4281515 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_816.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, strides=[6, 2, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.9217262 -2.0623295 -1.2487903 -2.6675532 1.5219669 -2.5848777 0.8255585 0.8305234 7.413837 ] [-1.1526788 0.99304205 0.8561177 -0.5083318 -0.9131282 -0.7073789 -0.11347353 -2.0925338 -2.6321042 ] [ 1.6612376 1.1042948 1.5938158 1.7385019 1.0305562 1.8368206 -1.5261433 0.6075461 0.16551104]]]; ov_res: [[[-0.92172635 -2.0623295 -1.24879 -2.6675534 1.5219669 -2.5848777 0.82555854 0.8305233 7.4138365 ] [-1.1526788 0.99304205 0.8561178 -0.5083318 -0.9131282 -0.7073789 -0.1134735 -2.0925338 -2.6321042 ] [ 1.6612376 1.1042948 1.5938156 1.7385019 1.0305562 1.8368206 -1.5261433 0.6075461 0.16551077]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_818.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.2305 (2,.,.) = -0.1093 (3,.,.) = 0.01 * 4.0967 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.01331654 0.28666082 0.3109837 0.05448913 -0.04799512 -0.2797714 -0.48340598 0.2705926 -0.02829687 -0.15053836] [ 0.08895204 0.14779904 -0.00438238 -0.03309898 0.06790707 -0.09772924 -0.09524398 0.11477692 0.2573466 -0.06628522] [-0.02971412 0.02995124 -0.07227009 0.01919141 0.04257603 -0.05932211 0.04789295 -0.03789052 0.0343286 0.0045208 ]]]; ov_res: [[[-0.01331654 0.28666082 0.3109837 0.05448913 -0.04799512 -0.2797714 -0.48340598 0.2705926 -0.02829687 -0.15053836] [ 0.08895204 0.14779904 -0.00438238 -0.03309898 0.06790707 -0.09772924 -0.09524398 0.11477692 0.2573466 -0.06628522] [-0.02971412 0.02995124 -0.07227009 0.01919141 0.04257603 -0.05932211 0.04789295 -0.03789052 0.0343286 0.0045208 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_820.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.5501 (2,.,.) = 0.6575 (3,.,.) = 1.6035 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.6283289 0.6073788 -1.2070949 0.41861147 -0.21611717 0.5596536 0.58488286 -0.1101265 0.93894655 0.77940696] [-0.1661466 0.5346094 0.32848775 0.3082947 -0.42513847 0.47085783 -0.31678078 0.671186 0.19483958 -0.10819567] [-1.7060326 0.4538224 -2.3709989 -0.3411086 -1.6311743 -0.8597679 0.63296264 0.9998638 -0.10075729 0.9631667 ]]]; ov_res: [[[ 0.6283289 0.6073788 -1.2070949 0.41861147 -0.21611717 0.5596536 0.58488286 -0.1101265 0.93894655 0.77940696] [-0.1661466 0.5346094 0.32848775 0.3082947 -0.42513847 0.47085783 -0.31678078 0.671186 0.19483958 -0.10819567] [-1.7060326 0.4538224 -2.3709989 -0.3411086 -1.6311743 -0.8597679 0.63296264 0.9998638 -0.10075729 0.9631667 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_822.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.4583 (2,.,.) = 0.6656 (3,.,.) = 0.2125 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0.2874557 -0.00812734 0.41071376 -0.4857145 -0.29723287 -0.6659417 0.64170694 -0.9898971 ]]]; ov_res: [[[ 0.2874557 -0.00812734 0.41071376 -0.4857145 -0.29723287 -0.6659417 0.64170694 -0.9898971 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_824.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.4942 -1.3837 1.3565 (2,.,.) = 1.7089 2.2803 -1.3025 (3,.,.) = -0.2838 0.6592 2.1729 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0. -0.77946186 0.02489181 -2.383055 0.9817795 -0.09664508 0.16388409 -0.15226306 -1.1863742 3.1688597 -1.8510787 0. ] [ 0. 2.0089018 0.9552139 5.72554 0.23507372 0.16775824 1.1161437 -1.2350504 2.2029805 -5.42785 1.5069773 0. ] [ 0. 1.5723612 4.607574 2.048974 1.8127407 -0.392951 2.1352792 -3.1257749 -3.916791 0.9390587 -2.561988 0. ]]]; ov_res: [[[ 0. -0.77946186 0.02489181 -2.383055 0.9817795 -0.09664508 0.16388409 -0.15226306 -1.1863742 3.1688597 -1.8510787 0. ] [ 0. 2.0089018 0.9552139 5.72554 0.23507372 0.16775824 1.1161437 -1.2350504 2.2029805 -5.42785 1.5069773 0. ] [ 0. 1.5723612 4.607574 2.048974 1.8127407 -0.392951 2.1352792 -3.1257749 -3.916791 0.9390587 -2.561988 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_826.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.9002 (2,.,.) = -1.0216 (3,.,.) = -0.2848 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.0546631 -2.2516952 -0.6084275 -0.07359987]]]; ov_res: [[[ 0.0546631 -2.2516952 -0.6084275 -0.07359987]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [3], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_828.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.5112 0.2243 0.3797 (2,.,.) = -1.4716 -0.2346 1.0403 (3,.,.) = -0.3995 0.0976 0.3869 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0. 0. 0. 1.0978427 0.7779458 0.6457812 -0.6560526 0.74721694 0.27373028 -0.19688079 -0.6332191 1.577416 1.0569597 0. 0. 0. ] [ 0. 0. 0. 0.7448914 -2.0690687 -0.06790961 -0.36266539 -3.8091953 1.9772719 0.18250167 -3.2346435 -5.9147706 -1.4619311 0. 0. 0. ] [ 0. 0. 0. 0.45341495 -0.5582294 0.16677193 -0.18556981 -1.1286083 0.47623277 -0.10105651 -1.1246759 -1.5082427 -0.16751887 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. 1.0978427 0.7779458 0.6457812 -0.6560526 0.74721694 0.27373028 -0.19688079 -0.6332191 1.577416 1.0569597 0. 0. 0. ] [ 0. 0. 0. 0.7448914 -2.0690687 -0.06790961 -0.36266539 -3.8091953 1.9772719 0.18250167 -3.2346435 -5.9147706 -1.4619311 0. 0. 0. ] [ 0. 0. 0. 0.45341495 -0.5582294 0.16677193 -0.18556981 -1.1286083 0.47623277 -0.10105651 -1.1246759 -1.5082427 -0.16751887 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_830.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.4561 (2,.,.) = 0.8063 (3,.,.) = -0.7147 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 1.63235 1.8560321 0.9484366 0.30369884 0.35986212 0.6697847 -2.1920645 0.6419211 ]]]; ov_res: [[[ 1.63235 1.8560321 0.9484366 0.30369884 0.35986212 0.6697847 -2.1920645 0.6419211 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_832.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.0424 1.0750 -1.0025 (2,.,.) = 1.5745 0.0354 0.8085 (3,.,.) = -1.3927 -0.7472 -0.6293 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-1.0801773 2.350933 -0.03674144 1.4366022 -1.0116665 -1.2455106 1.768469 -2.2087567 5.823915 1.2766238 ] [ 0.4113217 2.4880853 -0.91846246 1.5647054 -3.1855385 1.6810541 1.2147874 0.4404592 3.295714 0.8817495 ] [-0.39504075 -2.3812587 0.10205439 -1.2176137 2.4976733 -0.6492311 -1.1291004 -0.07846701 -4.645184 -1.1710528 ]]]; ov_res: [[[-1.0801773 2.350933 -0.03674144 1.4366022 -1.0116665 -1.2455106 1.768469 -2.2087567 5.823915 1.2766238 ] [ 0.4113217 2.4880853 -0.91846246 1.5647054 -3.1855385 1.6810541 1.2147874 0.4404592 3.295714 0.8817495 ] [-0.39504075 -2.3812587 0.10205439 -1.2176137 2.4976733 -0.6492311 -1.1291004 -0.07846701 -4.645184 -1.1710528 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_834.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.9775 (2,.,.) = 0.01 * -5.6636 (3,.,.) = 0.5954 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 1.1626501e+00 1.4329395e-01 -3.9551118e-01 -5.2466673e-01 -1.5515158e+00 2.2859980e-01 -4.7972208e-01 6.7334193e-01] [ 5.9707894e-04 1.4013764e-02 -1.1612837e-02 3.4778152e-02 -5.6031000e-02 -1.2579182e-02 3.2853957e-02 8.1150746e-03] [-4.5103306e-01 -2.5763866e-01 -1.0758201e+00 1.6112417e+00 4.8826724e-01 9.2477250e-01 1.2231315e+00 9.0212148e-01]]]; ov_res: [[[ 1.1626501e+00 1.4329395e-01 -3.9551118e-01 -5.2466673e-01 -1.5515158e+00 2.2859980e-01 -4.7972208e-01 6.7334193e-01] [ 5.9707894e-04 1.4013764e-02 -1.1612837e-02 3.4778152e-02 -5.6031000e-02 -1.2579182e-02 3.2853957e-02 8.1150746e-03] [-4.5103306e-01 -2.5763866e-01 -1.0758201e+00 1.6112417e+00 4.8826724e-01 9.2477250e-01 1.2231315e+00 9.0212148e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [1], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_836.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.5863 (2,.,.) = -1.1315 (3,.,.) = 0.01 * 7.8874 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0. 0.5062357 -0.81709856 -0.16746466 0.76211005 -0.15352534 0.16898085 -0.35053167 0.41098732 -0.4117436 -0.3032752 0. ] [ 0. -0.6247657 0.50709623 1.7224754 -2.2160163 1.2876788 1.5767311 1.0812757 -0.85350275 0.31197953 0.49514717 0. ] [ 0. 0.06903506 0.04413527 0.03069291 -0.07456117 -0.03454493 -0.01353706 -0.11106199 -0.134302 0.03291718 -0.02347573 0. ]]]; ov_res: [[[ 0. 0.5062357 -0.81709856 -0.16746466 0.76211005 -0.15352534 0.16898085 -0.35053167 0.41098732 -0.4117436 -0.3032752 0. ] [ 0. -0.6247657 0.50709623 1.7224754 -2.2160163 1.2876788 1.5767311 1.0812757 -0.85350275 0.31197953 0.49514717 0. ] [ 0. 0.06903506 0.04413527 0.03069291 -0.07456117 -0.03454493 -0.01353706 -0.11106199 -0.134302 0.03291718 -0.02347573 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [1], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_838.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -2.1482 (2,.,.) = 0.6083 (3,.,.) = -1.8982 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.3544645 -0.7193833 3.8068604 -0.42099184 1.2714353 2.0017984 3.681967 3.821437 ] [-1.1377277 -0.3484378 0.262122 0.08390917 0.41209155 -0.19267727 -0.7050729 -0.30320558] [ 0.70052475 3.6339517 3.891138 -4.8975105 -0.20853612 2.758846 2.2870064 1.8853002 ]]]; ov_res: [[[ 0.3544645 -0.7193833 3.8068604 -0.42099184 1.2714353 2.0017984 3.681967 3.821437 ] [-1.1377277 -0.3484378 0.262122 0.08390917 0.41209155 -0.19267727 -0.7050729 -0.30320558] [ 0.70052475 3.6339517 3.891138 -4.8975105 -0.20853612 2.758846 2.2870064 1.8853002 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': [0], 'dilations': [2], 'groups': 3, 'output_padding': [0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_840.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.7616 (2,.,.) = -0.7349 (3,.,.) = 1.0181 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversiofw_re: [[[ 0.531375 0.5620957 -0.48771727 1.3334615 0.25056458 0.3476873 0.17412047 0.41974244 0.3638665 0.66665685] [-0.89707154 1.130028 -0.5076674 -0.37254784 -0.6976857 -0.23720858 0.5077761 -0.92656696 0.53095454 -0.36867794] [ 1.0681986 -1.1705422 -0.24907045 0.61934024 -0.87554216 -0.45754048 -1.2252036 -0.29830456 0.41998062 0.45096025]]]; ov_res: [[[ 0.531375 0.5620957 -0.48771727 1.3334615 0.25056458 0.3476873 0.17412047 0.41974244 0.3638665 0.66665685] [-0.89707154 1.130028 -0.5076674 -0.37254784 -0.6976857 -0.23720858 0.5077761 -0.92656696 0.53095454 -0.36867794] [ 1.0681986 -1.1705422 -0.24907045 0.61934024 -0.87554216 -0.45754048 -1.2252036 -0.29830456 0.41998062 0.45096025]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_842.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.9270 (2,.,.) = 0.2832 (3,.,.) = -0.3780 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.pads, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0. 0.8085458 0. 0.4063062 0. 2.0161297 0. 0.5697478 0. -0.9737138 0. 1.5181501 0. -1.9031473 0. -0.38757923 0. ]]]; ov_res: [[[ 0. 0.8085458 0. 0.4063062 0. 2.0161297 0. 0.5697478 0. -0.9737138 0. 1.5181501 0. -1.9031473 0. -0.38757923 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 0] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_844.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.7684 -0.1276 -1.7189 (2,.,.) = 1.6038 0.2341 -0.0454 (3,.,.) = 0.1112 0.4025 -0.6284 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 1.4986385 0.1348568 0.485847 0.93876517 1.8558313 ] [ 1.459006 -3.445562 1.8674669 1.4716191 0.32822382] [ 1.3615739 -0.43642315 0.41661212 0.6730931 1.0432477 ]]]; ov_res: [[[ 1.4986385 0.1348568 0.485847 0.93876517 1.8558313 ] [ 1.459006 -3.445562 1.8674669 1.4716191 0.32822382] [ 1.3615739 -0.43642315 0.41661212 0.6730931 1.0432477 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_846.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.0243 (2,.,.) = -2.8430 (3,.,.) = 0.5576 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 1.021165 0. 0.00483768 0. 2.642421 0. 1.0163077 0. -0.2260505 0. -3.7786582 0. 1.8019986 0. 0.42704254 0. 4.028596 0. -0.9902305 ]]]; ov_res: [[[ 1.021165 0. 0.00483768 0. 2.642421 0. 1.0163077 0. -0.2260505 0. -3.7786582 0. 1.8019986 0. 0.42704254 0. 4.028596 0. -0.9902305 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [2], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_848.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.0502 -0.3958 2.2843 (2,.,.) = 0.2776 1.7620 -0.1729 (3,.,.) = 0.7813 -0.7647 0.6241 [ CPUFloatType{3,3,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 4.680939 -1.551967 4.1040635 -0.38165182 2.1771965 ] [-0.06683002 0.0258911 3.7296472 -3.598025 1.9301062 ] [ 0.55054706 -0.24139567 -0.6560937 -1.2084354 0.42607468]]]; ov_res: [[[ 4.680939 -1.551967 4.1040635 -0.38165182 2.1771965 ] [-0.06683002 0.0258911 3.7296472 -3.598025 1.9301062 ] [ 0.55054706 -0.24139567 -0.6560937 -1.2084354 0.42607468]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1], 'strides': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': False} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_850.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, strides=[3, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.7225 0.2271 0.5147 (2,.,.) = 0.3365 2.0742 -1.9892 (3,.,.) = 0.7949 1.1117 0.8223 [ CPUFloatType{3,3,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[-0.33218855 -1.8148023 2.1387887 0.05777408 -0.4450602 -2.4675238 -2.334299 -0.5173879 1.0263417 1.436211 ] [-1.9782847 -0.9907699 6.388343 -3.9607468 1.1340866 -1.057065 1.9790324 -1.3289509 3.628124 -0.27785307] [-0.61561215 1.0847465 0.35481164 -1.6736747 1.8882446 2.5046148 2.9616842 0.9127896 2.1931374 -4.1833425 ]]]; ov_res: [[[-0.33218855 -1.8148023 2.1387887 0.05777408 -0.4450602 -2.4675238 -2.334299 -0.5173879 1.0263417 1.436211 ] [-1.9782847 -0.9907699 6.388343 -3.9607468 1.1340866 -1.057065 1.9790324 -1.3289509 3.628124 -0.27785307] [-0.61561215 1.0847465 0.35481164 -1.6736747 1.8882446 2.5046148 2.9616842 0.9127896 2.1931374 -4.1833425 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [0], 'dilations': [1], 'groups': 1, 'output_padding': [0], 'transposed': True} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_852.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : int[] = prim::Constant[value=[0]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.4879 (2,.,.) = -0.6440 (3,.,.) = -0.8319 [ CPUFloatType{3,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[ 0.9119831 0. -0.68429935 0. 2.147384 0. 1.1965352 0. 0.53518546 0. 0.3725052 0. -1.2205814 0. -1.0715828 0. 0.51747376 0. -0.59099233]]]; ov_res: [[[ 0.9119831 0. -0.68429935 0. 2.147384 0. 1.1965352 0. 0.53518546 0. 0.3725052 0. -1.2205814 0. -1.0715828 0. 0.51747376 0. -0.59099233]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution1d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [2], 'bias_shape': [1], 'pads': [1], 'dilations': [2], 'groups': 1, 'output_padding': [1], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_854.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1]]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.4077 (2,.,.) = -0.7044 (3,.,.) = 0.9100 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[ 0. -0.2723605 0. 1.154091 0. -1.6734555 0. 0.8630796 0. -0.05616911 0. 0.04699923 0. 0.2599911 0. -0.8177062 0. -0.40080857]]]; ov_res: [[[ 0. -0.2723605 0. 1.154091 0. -1.6734555 0. 0.8630796 0. -0.05616911 0. 0.04699923 0. 0.2599911 0. -0.8177062 0. -0.40080857]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_856.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1068 -0.8437 1.8815 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.16486168e+00 1.95975447e+00 1.75549686e-01 ... 3.23982882e+00 -8.87901723e-01 2.55055976e+00] [ 1.92067194e+00 -3.63207912e+00 -6.42200768e-01 ... -1.17311954e+00 8.75703454e-01 -7.29946375e-01] [ 1.84456205e+00 2.82744312e+00 -1.22506666e+00 ... 8.28254879e-01 -8.30580533e-01 1.17595673e+00] ... [ 2.55808020e+00 2.13703966e+00 2.80489230e+00 ... 2.55771661e+00 -2.82649398e+00 3.03765154e+00] [-1.95917511e+00 -4.18445140e-01 -1.28088498e+00 ... 1.61853170e+00 1.84719026e-01 -1.27089620e+00] [ 3.96974564e-01 -6.57324046e-02 6.55971646e-01 ... -6.66315734e-01 -8.47731411e-01 4.54763830e-01]] [[-5.62918997e+00 2.68719721e+00 -2.82992840e+00 ... 1.95048738e+00 1.31529236e+00 -1.18407190e-01] [-6.80274057e+00 2.33407617e+00 5.44641733e-01 ... 3.02761745e+00 8.36944997e-01 2.28375387e+00] [ 2.03303671e+00 -4.35549259e+00 -6.56993091e-01 ... 4.13482010e-01 5.18986988e+00 8.11510980e-01] ... [-9.89940643e-01 -3.01117945e+00 -3.33957052e+00 ... -6.52679253e+00 1.82470322e+00 1.37743282e+00] [ 1.62510085e+00 3.12298477e-01 -4.52093065e-01 ... 1.95532107e+00 -2.00446546e-01 2.79520631e+00] [-1.31803370e+00 3.69028568e-01 -9.85817373e-01 ... -1.04391384e+00 3.18695545e-01 -9.58654940e-01]] [[ 7.08910584e-01 -2.80760813e+00 -1.45173764e+00 ... 6.55145884e-01 3.06199765e+00 1.75962162e+00] [ 1.53693652e+00 -1.77115035e+00 -2.49753809e+00 ... 9.89224613e-01 -3.02352011e-01 -2.82139373e+00] [-3.22700024e+00 -1.78373337e+00 -1.76027942e+00 ... -1.78870702e+00 -1.52302241e+00 -9.31358051e+00] ... [-2.47921050e-02 2.17695522e+00 -2.04855585e+00 ... 4.55000973e+00 -1.35780478e+00 -1.36994290e+00] [ 1.54895902e+00 -1.65439987e+00 -1.17191339e+00 ... 6.58905208e-01 -2.67278337e+00 2.03686666e+00] [-1.46801615e+00 4.45403457e-01 1.15604639e+00 ... -3.15132070e+00 1.65522742e+00 8.64961922e-01]] ... [[ 2.09128809e+00 -1.71001911e+00 -1.78775597e+00 ... 1.40174299e-01 1.75851822e+00 2.06416154e+00] [-3.10361385e+00 -4.74546552e-02 -1.77956557e+00 ... -4.98279095e-01 5.58266592e+00 -1.88735902e-01] [-2.93494081e+00 5.91802299e-01 -1.39236355e+00 ... -1.14779997e+00 6.38270378e-03 -1.15498281e+00] ... [-2.50639486e+00 8.86778891e-01 9.22033787e-02 ... 5.96916389e+00 -1.67272258e+00 2.96376050e-02] [ 5.27508855e-02 -7.07896054e-01 4.77005100e+00 ... 4.44923699e-01 -3.10060072e+00 1.92274070e+00] [ 1.48207426e+00 -2.87692904e+00 1.10484529e+00 ... -4.77245480e-01 9.82026279e-01 -1.14258122e+00]] [[-3.41814947e+00 1.56423867e-01 -1.03315639e+00 ... -2.21909761e+00 1.49954796e+00 2.27410674e+00] [-3.17706317e-01 3.13292146e-01 2.01222420e+00 ... -1.40788227e-01 -6.53785706e-01 2.56896675e-01] [-4.69751740e+00 -3.68220234e+00 -2.32200551e+00 ... -3.25731230e+00 -3.32414865e+00 -1.02657175e+00] ... [-7.35026121e-01 4.83422422e+00 -3.42900515e+00 ... -8.42004716e-01 2.00921965e+00 2.02825308e+00] [ 1.17052102e+00 -5.38515234e+00 5.72148752e+00 ... 1.18837070e+00 -1.14006591e+00 1.40913582e+00] [-2.35579997e-01 -1.44086456e+00 -3.75619173e-01 ... 9.04550612e-01 -4.90535796e-01 1.86667562e+00]] [[ 2.22809047e-01 3.12344575e+00 1.61529946e+00 ... -1.99353385e+00 1.97806215e+00 -3.33138406e-01] [ 2.98645234e+00 5.77102602e-01 3.48361897e+00 ... 2.97419429e-02 2.32206917e+00 1.60794759e+00] [-1.59830594e+00 1.45083022e+00 1.53284431e+00 ... 4.39400530e+00 7.29901016e-01 8.93280327e-01] ... [-9.58699524e-01 4.80426741e+00 -2.64128542e+00 ... -8.64004076e-01 -5.60667887e-02 -4.70197380e-01] [-1.97965312e+00 1.61919856e+00 -1.09873605e+00 ... 2.95620680e+00 2.22380090e+00 -2.95053124e+00] [ 8.06811988e-01 -1.50686312e+00 7.67561197e-02 ... -1.51698947e-01 -1.33401537e+00 6.33213967e-02]]] [[[-5.14570117e-01 -5.74399710e+00 -1.12444758e-02 ... -6.29427671e+00 -2.11266708e+00 -5.91908026e+00] [-5.17331839e+00 6.70931625e+00 -3.72106314e+00 ... -6.98837519e+00 -6.71725988e+00 -1.04214489e+00] [-1.00614367e+01 -5.73059320e+00 4.43092489e+00 ... -5.69228649e+00 4.01114225e+00 -7.19944239e-01] ... [-6.06196165e+00 -3.53886652e+00 -3.97500801e+00 ... 4.04000807e+00 2.10159421e-01 -3.37538433e+00] [ 1.93713415e+00 -5.66587019e+00 -3.80337143e+00 ... -3.36638069e+00 -2.82918739e+00 2.25883150e+00] [ 1.31018698e+00 -2.11543989e+00 -2.33998108e+00 ... 4.81999159e-01 -1.30393302e+00 2.36324668e-01]] [[ 3.12940931e+00 -2.44220257e+00 4.20858002e+00 ... -2.15843916e+00 -1.31743050e+00 1.05877149e+00] [ 4.10137892e+00 -7.37141943e+00 4.87956882e-01 ... -3.92442703e-01 -1.69857013e+00 -3.33195019e+00] [ 1.45836439e+01 9.91834402e-02 4.86783743e-01 ... -5.37869787e+00 -1.51436749e+01 -1.71221256e+00] ... [ 3.98224735e+00 4.85911608e+00 -7.54694819e-01 ... -1.07906401e+00 8.32207203e-01 -6.99475813e+00] [ 5.51737428e-01 5.73524714e+00 -8.98752332e-01 ... 1.09252672e+01 -6.32007551e+00 -3.30182123e+00] [ 1.41780138e-01 -1.24539578e+00 -1.46092904e+00 ... -1.23805523e+00 -4.69065571e+00 -7.10399985e-01]] [[-4.41448689e+00 2.46221972e+00 -7.30072403e+00 ... -3.41078281e+00 -1.00364447e+00 1.59015775e-01] [ 1.97716439e+00 -1.18484151e+00 5.61840534e+00 ... 9.29077816e+00 7.91777372e-02 -3.06839418e+00] [-5.55906439e+00 1.04208755e+01 -3.24733305e+00 ... 2.59448814e+00 1.55638063e+00 4.53677034e+00] ... [-3.99980736e+00 3.84053850e+00 -1.06282816e+01 ... -3.88082981e+00 -1.67293072e+00 7.79800653e+00] [-1.91653121e+00 2.18190432e+00 -4.41015661e-01 ... -8.71059704e+00 -3.67721605e+00 -4.71848536e+00] [-4.47307396e+00 1.24833524e+00 -3.86033714e-01 ... -3.25024033e+00 6.72716498e-02 -3.85772467e+00]] ... [[-7.49085248e-01 -6.52750111e+00 2.44617558e+00 ... -6.20284414e+00 -3.76079512e+00 7.80584860e+00] [-4.49884295e-01 2.45799255e+00 3.92433214e+00 ... -5.38096189e+00 -2.78516102e+00 6.26413465e-01] [ 6.67983675e+00 -9.12495899e+00 -6.68860555e-01 ... 5.30161953e+00 -5.96670294e+00 -2.89335871e+00] ... [-6.16836739e+00 -7.16161871e+00 7.83397627e+00 ... -7.72206903e-01 8.59120965e-01 -5.54122257e+00] [ 4.27480078e+00 3.06209922e-01 3.33863616e-01 ... -1.33696117e+01 8.24823570e+00 -1.38704824e+01] [ 6.61407828e-01 -3.13144302e+00 -1.70520604e+00 ... -2.93274164e+00 2.66180134e+00 -4.94853640e+00]] [[ 1.42972243e+00 -6.42278910e+00 1.58516109e+00 ... -3.25986004e+00 1.98125541e+00 -9.24234509e-01] [ 8.94839883e-01 -3.76688623e+00 4.65027094e-01 ... -1.30542088e+00 -6.45872021e+00 -1.23324275e-01] [ 5.73637056e+00 -7.87196636e+00 2.42636395e+00 ... -8.25974560e+00 7.85124183e-01 -1.28420429e+01] ... [ 6.20776701e+00 -1.29892912e+01 -5.38769722e+00 ... -2.93206549e+00 -5.23698616e+00 3.29839993e+00] [-3.01401711e+00 6.75638437e+00 -5.23836493e-01 ... 6.13560629e+00 -6.20777655e+00 -3.53362370e+00] [-4.70746219e-01 1.94997370e+00 -3.20397043e+00 ... -1.82682276e-01 5.23093820e-01 -5.67995167e+00]] [[-2.14128304e+00 -1.60115564e+00 -2.58552933e+00 ... -6.66512609e-01 -4.28527832e+00 -1.13651872e+00] [-2.79081464e-01 -3.46878004e+00 -1.30975604e+00 ... 9.59937811e-01 9.36082363e-01 -6.29448414e-01] [-2.31331038e+00 -5.67758560e-01 -5.01957798e+00 ... -3.30948687e+00 -3.17327595e+00 -1.36737216e+00] ... [-8.49819064e-01 1.45533144e-01 4.39285946e+00 ... -2.88345289e+00 -3.06033945e+00 4.24503088e+00] [-1.11922622e+00 -3.61087179e+00 -1.96317315e+00 ... -1.93394756e+00 1.65887249e+00 -2.50731754e+00] [ 4.33173656e-01 -1.35311532e+00 -1.24758220e+00 ... -1.97181165e-01 -2.39766431e+00 -5.60745537e-01]]] [[[ 2.18367219e+00 3.99744081e+00 1.58830869e+00 ... 4.35539389e+00 2.63528633e+00 3.94401932e+00] [ 1.82952738e+00 -5.86758494e-01 2.47937703e+00 ... 4.67433023e+00 4.31842995e+00 3.57260561e+00] [ 3.09778309e+00 4.03239441e+00 -1.44776952e+00 ... 4.17404938e+00 -2.78263450e-01 1.71991670e+00] ... [ 4.87576294e+00 4.40345764e+00 4.34139442e+00 ... -1.61094940e+00 1.98771811e+00 2.38801646e+00] [-2.70070195e-01 3.30910063e+00 2.00326562e+00 ... 7.14385509e-03 3.16476059e+00 8.21668029e-01] [ 1.16087961e+00 1.56484175e+00 4.66919422e-01 ... 2.27094173e+00 2.68211889e+00 1.13021612e+00]] [[-2.46972561e+00 4.96068239e+00 -2.80117226e+00 ... 1.48383784e+00 3.05708551e+00 3.56425095e+00] [-5.79504251e+00 2.01643848e+00 3.95292282e+00 ... 8.35630608e+00 4.43826151e+00 4.35110664e+00] [ 6.56291008e-01 2.86117435e+00 -3.66253328e+00 ... 2.78941727e+00 4.40743494e+00 4.40194702e+00] ... [ 1.60021901e+00 -1.89521372e+00 -8.00541639e-02 ... -4.01389456e+00 5.10691881e+00 3.39502215e+00] [ 9.30072486e-01 6.57101393e-01 2.75694942e+00 ... 2.58718634e+00 1.76387393e+00 5.16854000e+00] [ 1.44226003e+00 -1.96298003e-01 -1.19983757e+00 ... 1.52040327e+00 2.19322467e+00 1.98698306e+00]] [[ 2.30327511e+00 -9.33138728e-01 1.87229311e+00 ... 2.85309196e-01 2.71407700e+00 4.36236191e+00] [ 3.97591257e+00 5.93667388e-01 2.54193187e+00 ... 3.18030930e+00 2.31534624e+00 -8.07700276e-01] [-6.70994639e-01 2.63933539e-01 -2.93579197e+00 ... -4.20936942e-01 2.11518002e+00 -5.19986510e-01] ... [ 3.44421172e+00 -1.74875844e+00 5.21407795e+00 ... 3.93753481e+00 1.08331716e+00 -4.02783513e-01] [ 5.27533770e+00 2.14853144e+00 2.04995394e+00 ... 7.04056883e+00 -5.06987453e-01 1.71634626e+00] [ 3.34981441e-01 1.95932031e+00 1.09884501e+00 ... 1.63452554e+00 1.86713672e+00 2.31095505e+00]] ... [[ 3.37424517e+00 2.43894243e+00 -1.61491048e+00 ... 3.09009647e+00 3.22053695e+00 2.71308541e-01] [-1.70602882e+00 1.20489812e+00 -2.27213669e+00 ... 2.48655915e+00 7.67963028e+00 5.80100298e+00] [ 1.99233699e+00 2.56926680e+00 1.17001545e+00 ... 1.70447993e+00 4.12004280e+00 6.40535593e-01] ... [ 3.55142593e+00 4.61966038e+00 1.51970232e+00 ... 4.20458794e+00 -1.35501969e+00 9.75230813e-01] [-2.42494941e-01 4.83624268e+00 6.81972075e+00 ... 4.96854496e+00 -2.62758064e+00 7.85352325e+00] [ 3.01601219e+00 -7.75221467e-01 4.76914644e+00 ... 3.94453144e+00 -5.84050536e-01 5.06496906e+00]] [[-5.32888293e-01 4.99625874e+00 -1.91443121e+00 ... 3.49675512e+00 1.08486700e+00 -3.54555845e-02] [-1.57435811e+00 3.80897117e+00 3.67666197e+00 ... 1.00731599e+00 2.68432856e+00 5.14210415e+00] [-1.59106612e-01 3.02857161e+00 1.57060969e+00 ... -2.06977606e+00 -3.58182192e-02 4.26978636e+00] ... [ 8.05478334e-01 6.99620247e+00 -9.60164070e-02 ... 3.84308219e-01 5.08225584e+00 3.09151435e+00] [ 7.29126573e-01 2.45251369e+00 5.29892874e+00 ... 2.73630333e+00 1.02785146e+00 7.28295755e+00] [ 2.16427851e+00 -2.48075962e+00 4.75236750e+00 ... 1.14745510e+00 3.20360804e+00 2.82388783e+00]] [[ 1.61931372e+00 5.43346310e+00 1.71734595e+00 ... 1.67246771e+00 1.68195856e+00 1.03421211e+00] [ 4.71670675e+00 4.29856300e+00 6.35399294e+00 ... 1.03365302e+00 6.98279428e+00 3.60264516e+00] [ 1.23791206e+00 3.90627909e+00 3.44657326e+00 ... 4.23137903e+00 1.51707900e+00 6.11347103e+00] ... [ 2.65933251e+00 5.44710493e+00 1.36695993e+00 ... 3.17277145e+00 3.31355667e+00 1.87676728e+00] [ 1.08758974e+00 3.37312222e+00 -1.44765365e+00 ... 1.71104383e+00 4.58676338e+00 -3.85415912e-01] [ 2.13020253e+00 1.84736633e+00 2.93769526e+00 ... 3.90533447e+00 1.77719474e-01 1.36582065e+00]]]]]; ov_res: [[[[[-1.16486168e+00 1.95975447e+00 1.75549686e-01 ... 3.23982882e+00 -8.87901723e-01 2.55055976e+00] [ 1.92067170e+00 -3.63207912e+00 -6.42200768e-01 ... -1.17311954e+00 8.75703394e-01 -7.29946315e-01] [ 1.84456205e+00 2.82744288e+00 -1.22506666e+00 ... 8.28254819e-01 -8.30580533e-01 1.17595673e+00] ... [ 2.55808020e+00 2.13703966e+00 2.80489230e+00 ... 2.55771661e+00 -2.82649398e+00 3.03765178e+00] [-1.95917487e+00 -4.18445140e-01 -1.28088498e+00 ... 1.61853170e+00 1.84718996e-01 -1.27089620e+00] [ 3.96974564e-01 -6.57324046e-02 6.55971646e-01 ... -6.66315734e-01 -8.47731411e-01 4.54763830e-01]] [[-5.62918997e+00 2.68719697e+00 -2.82992840e+00 ... 1.95048761e+00 1.31529236e+00 -1.18407421e-01] [-6.80274057e+00 2.33407640e+00 5.44641852e-01 ... 3.02761745e+00 8.36944878e-01 2.28375387e+00] [ 2.03303671e+00 -4.35549212e+00 -6.56993210e-01 ... 4.13482070e-01 5.18987036e+00 8.11511159e-01] ... [-9.89940703e-01 -3.01117945e+00 -3.33957052e+00 ... -6.52679205e+00 1.82470322e+00 1.37743282e+00] [ 1.62510085e+00 3.12298477e-01 -4.52092797e-01 ... 1.95532084e+00 -2.00446948e-01 2.79520631e+00] [-1.31803370e+00 3.69028568e-01 -9.85817313e-01 ... -1.04391384e+00 3.18695515e-01 -9.58654940e-01]] [[ 7.08910525e-01 -2.80760813e+00 -1.45173764e+00 ... 6.55145705e-01 3.06199765e+00 1.75962162e+00] [ 1.53693652e+00 -1.77115059e+00 -2.49753809e+00 ... 9.89224494e-01 -3.02352011e-01 -2.82139373e+00] [-3.22700000e+00 -1.78373337e+00 -1.76027918e+00 ... -1.78870702e+00 -1.52302241e+00 -9.31358051e+00] ... [-2.47921795e-02 2.17695522e+00 -2.04855609e+00 ... 4.55000973e+00 -1.35780454e+00 -1.36994267e+00] [ 1.54895926e+00 -1.65439963e+00 -1.17191339e+00 ... 6.58905089e-01 -2.67278361e+00 2.03686643e+00] [-1.46801615e+00 4.45403457e-01 1.15604639e+00 ... -3.15132046e+00 1.65522742e+00 8.64961922e-01]] ... [[ 2.09128809e+00 -1.71001911e+00 -1.78775597e+00 ... 1.40174314e-01 1.75851822e+00 2.06416154e+00] [-3.10361385e+00 -4.74549010e-02 -1.77956581e+00 ... -4.98279065e-01 5.58266544e+00 -1.88735932e-01] [-2.93494034e+00 5.91802299e-01 -1.39236355e+00 ... -1.14780021e+00 6.38272613e-03 -1.15498281e+00] ... [-2.50639510e+00 8.86778653e-01 9.22033042e-02 ... 5.96916437e+00 -1.67272258e+00 2.96378732e-02] [ 5.27509600e-02 -7.07896054e-01 4.77005148e+00 ... 4.44923878e-01 -3.10060072e+00 1.92274046e+00] [ 1.48207402e+00 -2.87692904e+00 1.10484529e+00 ... -4.77245450e-01 9.82026279e-01 -1.14258122e+00]] [[-3.41814947e+00 1.56423956e-01 -1.03315639e+00 ... -2.21909761e+00 1.49954796e+00 2.27410674e+00] [-3.17706287e-01 3.13292205e-01 2.01222420e+00 ... -1.40788287e-01 -6.53785706e-01 2.56896734e-01] [-4.69751787e+00 -3.68220210e+00 -2.32200527e+00 ... -3.25731206e+00 -3.32414937e+00 -1.02657175e+00] ... [-7.35026240e-01 4.83422422e+00 -3.42900538e+00 ... -8.42004776e-01 2.00921965e+00 2.02825332e+00] [ 1.17052078e+00 -5.38515234e+00 5.72148800e+00 ... 1.18837047e+00 -1.14006567e+00 1.40913582e+00] [-2.35579982e-01 -1.44086456e+00 -3.75619262e-01 ... 9.04550612e-01 -4.90535825e-01 1.86667562e+00]] [[ 2.22809047e-01 3.12344575e+00 1.61529946e+00 ... -1.99353385e+00 1.97806215e+00 -3.33138406e-01] [ 2.98645234e+00 5.77102602e-01 3.48361897e+00 ... 2.97419727e-02 2.32206917e+00 1.60794759e+00] [-1.59830594e+00 1.45083022e+00 1.53284431e+00 ... 4.39400530e+00 7.29901016e-01 8.93280208e-01] ... [-9.58699524e-01 4.80426741e+00 -2.64128542e+00 ... -8.64004016e-01 -5.60667887e-02 -4.70197380e-01] [-1.97965312e+00 1.61919856e+00 -1.09873593e+00 ... 2.95620680e+00 2.22380090e+00 -2.95053124e+00] [ 8.06811988e-01 -1.50686312e+00 7.67561197e-02 ... -1.51698947e-01 -1.33401537e+00 6.33213967e-02]]] [[[-5.14570117e-01 -5.74399710e+00 -1.12444758e-02 ... -6.29427671e+00 -2.11266708e+00 -5.91908026e+00] [-5.17331839e+00 6.70931625e+00 -3.72106314e+00 ... -6.98837519e+00 -6.71725988e+00 -1.04214489e+00] [-1.00614367e+01 -5.73059273e+00 4.43092489e+00 ... -5.69228649e+00 4.01114225e+00 -7.19944239e-01] ... [-6.06196165e+00 -3.53886652e+00 -3.97500801e+00 ... 4.04000807e+00 2.10159421e-01 -3.37538433e+00] [ 1.93713415e+00 -5.66586971e+00 -3.80337143e+00 ... -3.36638069e+00 -2.82918715e+00 2.25883150e+00] [ 1.31018698e+00 -2.11543989e+00 -2.33998108e+00 ... 4.81999159e-01 -1.30393302e+00 2.36324668e-01]] [[ 3.12940931e+00 -2.44220233e+00 4.20858002e+00 ... -2.15843916e+00 -1.31743050e+00 1.05877149e+00] [ 4.10137844e+00 -7.37141943e+00 4.87956643e-01 ... -3.92442405e-01 -1.69857025e+00 -3.33195019e+00] [ 1.45836420e+01 9.91834998e-02 4.86783981e-01 ... -5.37869787e+00 -1.51436749e+01 -1.71221256e+00] ... [ 3.98224688e+00 4.85911512e+00 -7.54694700e-01 ... -1.07906449e+00 8.32207322e-01 -6.99475908e+00] [ 5.51737309e-01 5.73524666e+00 -8.98752689e-01 ... 1.09252672e+01 -6.32007599e+00 -3.30182123e+00] [ 1.41780257e-01 -1.24539590e+00 -1.46092916e+00 ... -1.23805523e+00 -4.69065571e+00 -7.10400045e-01]] [[-4.41448689e+00 2.46221972e+00 -7.30072403e+00 ... -3.41078281e+00 -1.00364447e+00 1.59015775e-01] [ 1.97716439e+00 -1.18484175e+00 5.61840534e+00 ... 9.29077911e+00 7.91777968e-02 -3.06839418e+00] [-5.55906439e+00 1.04208755e+01 -3.24733353e+00 ... 2.59448862e+00 1.55638063e+00 4.53676987e+00] ... [-3.99980736e+00 3.84053850e+00 -1.06282816e+01 ... -3.88082981e+00 -1.67293096e+00 7.79800653e+00] [-1.91653132e+00 2.18190432e+00 -4.41015601e-01 ... -8.71059704e+00 -3.67721605e+00 -4.71848583e+00] [-4.47307444e+00 1.24833524e+00 -3.86033624e-01 ... -3.25024033e+00 6.72716498e-02 -3.85772467e+00]] ... [[-7.49085128e-01 -6.52750063e+00 2.44617558e+00 ... -6.20284414e+00 -3.76079512e+00 7.80584860e+00] [-4.49883908e-01 2.45799255e+00 3.92433262e+00 ... -5.38096094e+00 -2.78516102e+00 6.26413465e-01] [ 6.67983627e+00 -9.12495995e+00 -6.68860376e-01 ... 5.30162048e+00 -5.96670294e+00 -2.89335871e+00] ... [-6.16836834e+00 -7.16161871e+00 7.83397722e+00 ... -7.72206903e-01 8.59121084e-01 -5.54122210e+00] [ 4.27480030e+00 3.06209803e-01 3.33863616e-01 ... -1.33696117e+01 8.24823570e+00 -1.38704815e+01] [ 6.61407948e-01 -3.13144302e+00 -1.70520616e+00 ... -2.93274164e+00 2.66180134e+00 -4.94853640e+00]] [[ 1.42972243e+00 -6.42278862e+00 1.58516133e+00 ... -3.25986004e+00 1.98125517e+00 -9.24234569e-01] [ 8.94839883e-01 -3.76688671e+00 4.65026736e-01 ... -1.30542088e+00 -6.45872068e+00 -1.23324156e-01] [ 5.73636961e+00 -7.87196636e+00 2.42636347e+00 ... -8.25974560e+00 7.85124421e-01 -1.28420429e+01] ... [ 6.20776606e+00 -1.29892912e+01 -5.38769722e+00 ... -2.93206596e+00 -5.23698616e+00 3.29839993e+00] [-3.01401663e+00 6.75638342e+00 -5.23836553e-01 ... 6.13560581e+00 -6.20777655e+00 -3.53362370e+00] [-4.70746279e-01 1.94997394e+00 -3.20397043e+00 ... -1.82682455e-01 5.23093820e-01 -5.67995167e+00]] [[-2.14128304e+00 -1.60115564e+00 -2.58552933e+00 ... -6.66512609e-01 -4.28527832e+00 -1.13651872e+00] [-2.79081404e-01 -3.46878004e+00 -1.30975604e+00 ... 9.59937930e-01 9.36082244e-01 -6.29448414e-01] [-2.31331038e+00 -5.67758441e-01 -5.01957798e+00 ... -3.30948687e+00 -3.17327595e+00 -1.36737204e+00] ... [-8.49819064e-01 1.45533204e-01 4.39285946e+00 ... -2.88345289e+00 -3.06033945e+00 4.24503088e+00] [-1.11922622e+00 -3.61087179e+00 -1.96317303e+00 ... -1.93394756e+00 1.65887272e+00 -2.50731754e+00] [ 4.33173656e-01 -1.35311532e+00 -1.24758220e+00 ... -1.97181165e-01 -2.39766431e+00 -5.60745537e-01]]] [[[ 2.18367219e+00 3.99744081e+00 1.58830869e+00 ... 4.35539389e+00 2.63528633e+00 3.94401932e+00] [ 1.82952738e+00 -5.86758494e-01 2.47937703e+00 ... 4.67432976e+00 4.31842995e+00 3.57260561e+00] [ 3.09778285e+00 4.03239489e+00 -1.44776952e+00 ... 4.17404938e+00 -2.78263211e-01 1.71991670e+00] ... [ 4.87576294e+00 4.40345764e+00 4.34139442e+00 ... -1.61094940e+00 1.98771811e+00 2.38801646e+00] [-2.70070195e-01 3.30910063e+00 2.00326562e+00 ... 7.14385509e-03 3.16476059e+00 8.21668148e-01] [ 1.16087961e+00 1.56484175e+00 4.66919422e-01 ... 2.27094173e+00 2.68211889e+00 1.13021612e+00]] [[-2.46972561e+00 4.96068239e+00 -2.80117178e+00 ... 1.48383796e+00 3.05708551e+00 3.56425095e+00] [-5.79504251e+00 2.01643825e+00 3.95292234e+00 ... 8.35630608e+00 4.43826199e+00 4.35110664e+00] [ 6.56290770e-01 2.86117435e+00 -3.66253328e+00 ... 2.78941727e+00 4.40743446e+00 4.40194702e+00] ... [ 1.60021889e+00 -1.89521444e+00 -8.00541639e-02 ... -4.01389456e+00 5.10691881e+00 3.39502192e+00] [ 9.30072486e-01 6.57101512e-01 2.75694919e+00 ... 2.58718634e+00 1.76387382e+00 5.16854000e+00] [ 1.44226003e+00 -1.96298003e-01 -1.19983757e+00 ... 1.52040327e+00 2.19322467e+00 1.98698306e+00]] [[ 2.30327511e+00 -9.33138967e-01 1.87229323e+00 ... 2.85309076e-01 2.71407700e+00 4.36236143e+00] [ 3.97591257e+00 5.93667388e-01 2.54193211e+00 ... 3.18030930e+00 2.31534600e+00 -8.07700276e-01] [-6.70994401e-01 2.63933659e-01 -2.93579197e+00 ... -4.20936942e-01 2.11517978e+00 -5.19986272e-01] ... [ 3.44421196e+00 -1.74875844e+00 5.21407843e+00 ... 3.93753481e+00 1.08331728e+00 -4.02784228e-01] [ 5.27533770e+00 2.14853168e+00 2.04995394e+00 ... 7.04056883e+00 -5.06987214e-01 1.71634603e+00] [ 3.34981441e-01 1.95932043e+00 1.09884501e+00 ... 1.63452554e+00 1.86713672e+00 2.31095505e+00]] ... [[ 3.37424517e+00 2.43894243e+00 -1.61491024e+00 ... 3.09009647e+00 3.22053695e+00 2.71308541e-01] [-1.70602834e+00 1.20489812e+00 -2.27213717e+00 ... 2.48655891e+00 7.67963028e+00 5.80100298e+00] [ 1.99233675e+00 2.56926680e+00 1.17001534e+00 ... 1.70447981e+00 4.12004280e+00 6.40535593e-01] ... [ 3.55142570e+00 4.61966085e+00 1.51970220e+00 ... 4.20458794e+00 -1.35501969e+00 9.75230813e-01] [-2.42494702e-01 4.83624315e+00 6.81972075e+00 ... 4.96854448e+00 -2.62758064e+00 7.85352325e+00] [ 3.01601219e+00 -7.75221467e-01 4.76914644e+00 ... 3.94453144e+00 -5.84050536e-01 5.06496906e+00]] [[-5.32888293e-01 4.99625874e+00 -1.91443145e+00 ... 3.49675512e+00 1.08486700e+00 -3.54555845e-02] [-1.57435811e+00 3.80897117e+00 3.67666197e+00 ... 1.00731587e+00 2.68432832e+00 5.14210415e+00] [-1.59106612e-01 3.02857208e+00 1.57060981e+00 ... -2.06977606e+00 -3.58177423e-02 4.26978588e+00] ... [ 8.05478334e-01 6.99620295e+00 -9.60165262e-02 ... 3.84308338e-01 5.08225536e+00 3.09151411e+00] [ 7.29126453e-01 2.45251369e+00 5.29892874e+00 ... 2.73630285e+00 1.02785158e+00 7.28295803e+00] [ 2.16427851e+00 -2.48075962e+00 4.75236750e+00 ... 1.14745510e+00 3.20360804e+00 2.82388783e+00]] [[ 1.61931372e+00 5.43346310e+00 1.71734595e+00 ... 1.67246771e+00 1.68195856e+00 1.03421211e+00] [ 4.71670675e+00 4.29856300e+00 6.35399294e+00 ... 1.03365302e+00 6.98279428e+00 3.60264516e+00] [ 1.23791206e+00 3.90627909e+00 3.44657326e+00 ... 4.23137903e+00 1.51707900e+00 6.11347055e+00] ... [ 2.65933251e+00 5.44710493e+00 1.36695993e+00 ... 3.17277122e+00 3.31355667e+00 1.87676728e+00] [ 1.08758974e+00 3.37312222e+00 -1.44765365e+00 ... 1.71104383e+00 4.58676338e+00 -3.85415912e-01] [ 2.13020253e+00 1.84736633e+00 2.93769526e+00 ... 3.90533447e+00 1.77719474e-01 1.36582065e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_858.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.9562 -0.3353 0.6505 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[-3.7861814 -0.73842394 2.1401377 ... -2.195821 0.9933983 -1.1498761 ] [-1.7576759 -2.7697353 -6.2176766 ... -0.61733973 -1.7379075 -6.769763 ] [-3.7608712 -0.8522682 -0.3903793 ... -2.9514232 -1.4629031 0.6560706 ] ... [ 4.457705 -5.47894 -5.070668 ... -2.9638128 0.9226161 -3.0650077 ] [ 5.51103 -4.133684 1.3103746 ... -3.0961742 -1.8395166 -1.4387447 ] [-5.699277 -2.2585423 -6.167118 ... 3.1263623 -0.79031026 -3.1511593 ]] [[-0.59322 -0.86943233 -4.559965 ... -4.0884867 -1.0311027 -5.4197435 ] [-2.9758763 -0.8431051 -4.9342923 ... -3.7013364 -6.9083076 -1.5774297 ] [ 1.7670017 -7.0241895 -4.840575 ... -6.3957624 -1.983522 -2.5094948 ] ... [-4.7821646 0.7090539 -4.6005497 ... -0.9217353 -1.5906612 -4.0955205 ] [ 1.6396629 -5.7119446 2.3074093 ... 1.5728394 -3.2674656 0.35515893] [ 1.0669099 -4.213846 -3.34261 ... -0.8760053 -6.0514336 1.4067413 ]] [[ 2.3957868 -7.782257 -3.4836059 ... 0.05206072 -5.5935545 4.532078 ] [-2.5646508 -2.560885 -1.8533597 ... -8.417081 -7.9717464 0.695819 ] [-1.2206738 -1.718999 -0.9277357 ... -1.4909816 -2.7580853 -1.5992223 ] ... [-3.5524292 1.320418 -5.6872582 ... -6.2600694 -1.9543521 -2.5006597 ] [-6.4478483 3.8742313 0.9114436 ... -0.63257766 0.05722272 -4.805943 ] [-1.6543777 -4.555456 -3.6118193 ... -2.5025187 -5.362851 0.21429765]] ... [[-5.1843457 -6.1899347 -2.8004415 ... -2.2475448 -4.2896094 -1.7003254 ] [-3.477624 -7.098173 -1.6055212 ... -0.88027453 1.1214565 -0.970211 ] [ 0.21190298 -2.018347 -5.470886 ... 2.2326517 -2.8548157 -1.9910939 ] ... [ 0.40343583 1.5216063 -1.9032624 ... 1.325364 -2.5503507 -3.1874626 ] [-2.7849007 -0.5483736 0.21743214 ... -0.8052732 -0.12040186 -1.2243171 ] [-0.34219265 1.1523858 -1.567641 ... -2.5263438 1.908955 -2.2636664 ]] [[ 1.23657 -1.7443601 -2.360214 ... -0.43035603 -4.275284 -3.9714289 ] [ 2.016343 1.85398 -1.985933 ... 0.18538535 -4.077783 -0.9730195 ] [ 1.7689818 3.2162151 -0.15767515 ... 0.26051128 -5.6717954 0.237445 ] ... [-2.5248914 -4.806305 -3.0838475 ... -4.18235 1.3772925 -0.13811564] [-3.6436892 -7.114528 -7.2523966 ... -4.5625577 -3.6747599 -5.3975606 ] [ 4.2712245 -5.19431 -4.0386543 ... -4.6925354 -0.5904105 -0.6991378 ]] [[ 0.7523087 0.43974555 -2.8326867 ... -3.4241474 1.814284 0.3858863 ] [-5.895745 -0.9785345 -2.833514 ... -1.8449471 -4.3373213 -2.8406003 ] [-5.7747455 -7.1296043 3.1081362 ... -3.42403 -1.4195256 -2.7198539 ] ... [-2.9990978 -1.8609517 4.487154 ... 2.2183018 2.5134497 2.77591 ] [-2.1470406 1.097864 0.9035922 ... -0.7339324 -6.767225 -0.58415544] [-1.5390663 -3.1259522 -0.56280375 ... 1.563918 2.1534982 0.20376551]]] [[[ 0.34649158 0.07169655 -3.1851676 ... -4.9618983 -4.824468 -2.1151335 ] [ 2.6015823 -1.9646497 -1.6262641 ... -7.085723 -4.2892003 7.023801 ] [ 0.41492462 1.2285793 0.993213 ... -2.7892597 -0.14649738 0.02444869] ... [ 0.4533491 2.4703622 -5.7753716 ... 6.657312 -2.5151663 -6.3761773 ] [-5.126672 -4.94943 -1.2131336 ... -1.3976686 -3.6377728 -4.130706 ] [-6.878858 -4.5827203 -2.4007714 ... -1.4136298 -0.3676769 0.08414924]] [[ 2.1462955 -2.2881737 3.9116883 ... 4.475226 -1.6121652 2.9547744 ] [-4.4361267 1.5818949 4.098995 ... 2.7060304 -4.405692 -2.8905342 ] [-3.2295134 0.48514295 -0.20827736 ... 5.2686715 1.2388573 -7.965775 ] ... [-1.957674 -1.0920985 -0.57600415 ... -2.4240763 3.9729486 -3.6623569 ] [-0.10914518 6.1130886 5.265101 ... -2.5116138 1.8680444 -3.5852451 ] [-0.1556911 3.0304248 4.7428327 ... -2.7431588 0.9377547 2.339038 ]] [[-2.1076412 9.398719 1.1454184 ... 0.43063825 8.935925 -8.495989 ] [ 3.199047 -2.0516174 -1.7577178 ... 4.4770093 1.7031322 0.75809306] [-0.75796497 -3.3137982 5.041923 ... -0.87886727 2.4641187 -5.6313314 ] ... [ 2.4241478 -0.93716884 -6.720623 ... -2.6835713 2.7089546 -2.8244371 ] [ 2.1474302 -5.427581 -1.0385903 ... 2.1183348 -1.8281281 -0.93530166] [-4.559045 2.2184408 -1.2605186 ... 3.419835 -5.1896086 -0.6305607 ]] ... [[ 4.7434063 3.954965 2.9887958 ... -0.6974555 3.0767741 2.887664 ] [ 1.4765255 0.37754893 -2.7472768 ... -1.9341674 3.731039 -0.25167778] [-3.2436047 0.70892674 -3.0127602 ... 0.03377038 -2.4797256 -9.8004055 ] ... [ 2.479422 2.260609 -3.8813603 ... 4.6822205 -3.7999074 4.5642023 ] [ 1.6766777 -3.4043748 -0.16941515 ... -1.9425371 -4.396598 0.9903777 ] [ 0.01831284 -1.5710442 3.1383996 ... -0.44349927 -8.852837 -1.332134 ]] [[ 8.964002 1.2149646 -3.5660572 ... 0.7352422 1.1361673 4.6340194 ] [-6.4185333 -5.9186926 -4.0068884 ... -6.979161 -6.9585776 -2.8223438 ] [-2.7893367 -3.4139209 -1.8331828 ... -3.50459 -0.8671958 7.357801 ] ... [ 0.07247058 -3.0399828 5.3109865 ... 5.099536 2.68744 -0.18477096] [ 2.9926057 0.78328246 1.125792 ... 0.18376273 -2.5631633 -1.8825698 ] [-7.883274 0.07116342 -0.22405215 ... 7.7667055 5.5119653 -5.4001017 ]] [[-6.310782 -5.81264 -1.6518605 ... 0.07089785 -5.7058315 1.7637231 ] [ 1.8004551 4.781398 -3.1506052 ... -1.0367686 -3.435567 -1.2562704 ] [ 9.131703 6.258735 -2.9525583 ... -1.5192645 3.5122821 1.9405613 ] ... [ 1.7863946 2.1954412 4.8035474 ... 2.8554199 -4.8810687 -1.7489054 ] [-5.9291224 -5.149299 -1.7480347 ... -3.4711647 1.9743588 -0.62414575] [-3.439253 -0.42484418 -4.7212586 ... -3.4030313 -0.39483207 -2.1114278 ]]] [[[-3.145982 3.4110773 2.1639729 ... -1.8726664 3.0002997 -2.4713788 ] [-1.6146202 6.332633 0.82052994 ... 0.5978523 0.98708427 5.4158764 ] [ 3.5745606 1.3147457 -1.7959588 ... 4.1923885 -0.2813133 1.8843782 ] ... [11.159612 -0.17193025 -4.687222 ... 7.321456 -5.805949 0.9199629 ] [ 2.5026069 -0.60962003 -2.4087365 ... 8.119709 -4.642959 9.113121 ] [ 0.04110616 -2.8317723 -2.4755764 ... 3.5736861 -1.3411057 9.082112 ]] [[ 0.24272016 0.61477095 0.5340516 ... 0.4492663 0.5379537 5.084464 ] [ 6.4627957 2.0768013 1.5691566 ... -3.4136782 -5.791447 6.3587904 ] [ 0.8407215 1.9044976 -4.0300665 ... 1.8719435 -0.3154652 1.6977441 ] ... [ 1.093864 3.1973975 -3.2050004 ... 7.429135 4.3736567 1.1110754 ] [ 0.7669772 2.3566465 -4.7856655 ... 5.9393706 -0.91646904 6.386288 ] [ 1.9699235 -0.73398453 5.4618425 ... -6.897592 1.6518905 4.1596093 ]] [[ 0.7073848 3.88428 4.666315 ... -3.83497 3.942663 4.707746 ] [ 2.2181556 2.6327624 -0.06032407 ... -1.970176 1.6831505 2.2166374 ] [-1.5118594 5.5946035 8.71348 ... 2.6659577 0.43050462 -5.071025 ] ... [ 6.665579 2.5687485 -6.096491 ... -1.8639479 5.6557956 -6.265675 ] [-1.2683415 7.609112 -3.671968 ... -1.4761341 1.7011209 -3.2657127 ] [-3.8515844 4.701999 4.366387 ... -3.3874779 6.155339 -5.268693 ]] ... [[-3.1719878 -1.3380527 0.21143392 ... -3.4105306 -1.3714206 4.4747825 ] [-3.908215 7.483357 -1.3531172 ... -4.46583 -3.4926987 0.12526608] [-0.16058415 3.459353 1.3506944 ... 3.6318598 3.3046181 -3.489346 ] ... [-2.4375668 -4.343722 0.6129519 ... 4.4806027 1.1138754 2.6459692 ] [-1.1657763 10.108735 -5.1630073 ... 1.8646724 -0.4972257 -2.7536902 ] [-1.8815572 6.753519 3.563735 ... -3.710518 0.5899491 -2.7731447 ]] [[ 4.6838393 9.679125 -1.5407097 ... 2.6910865 -3.5697608 2.7545636 ] [-0.5181021 7.1290936 0.31083035 ... -3.6949391 -4.7609954 2.232103 ] [ 1.1859007 -0.04514098 1.56766 ... -4.517863 -3.3085165 1.2915092 ] ... [ 0.7967929 1.0307235 -2.4699144 ... -0.08800095 0.9439077 -0.08166784] [ 0.80068904 -2.6378112 -5.7562633 ... 1.7402792 0.5317044 -0.37500805] [-0.02595025 -0.6805187 -0.1354245 ... 1.8970382 -5.369601 -3.1675146 ]] [[ 3.2782166 10.48339 0.43524432 ... -2.2560275 -0.47493404 7.1319623 ] [ 2.2836845 1.4645929 2.7444909 ... -5.0347567 -2.2679849 1.3226738 ] [ 4.1504245 -2.2790003 -6.6463943 ... -2.850156 0.90017486 -1.7677722 ] ... [ 3.3544445 6.7981896 2.8488646 ... 2.4068007 -6.941788 0.12750757] [ 1.4070599 -0.8457677 5.052655 ... 3.772768 -2.1685443 3.999768 ] [-4.4801674 1.9825628 1.8512177 ... -2.089668 -2.3236067 -3.8244896 ]]]]]; ov_res: [[[[[-3.7861812 -0.73842406 2.1401377 ... -2.1958208 0.99339855 -1.149876 ] [-1.7576755 -2.7697353 -6.217676 ... -0.61733985 -1.7379075 -6.7697635 ] [-3.7608712 -0.8522681 -0.3903793 ... -2.951423 -1.4629033 0.6560706 ] ... [ 4.457705 -5.47894 -5.0706687 ... -2.9638128 0.92261636 -3.0650077 ] [ 5.51103 -4.133684 1.3103749 ... -3.0961742 -1.8395166 -1.4387445 ] [-5.699277 -2.2585423 -6.1671185 ... 3.1263628 -0.7903105 -3.1511593 ]] [[-0.59322 -0.8694321 -4.559965 ... -4.0884867 -1.0311027 -5.4197435 ] [-2.9758763 -0.8431052 -4.934293 ... -3.7013364 -6.9083076 -1.5774297 ] [ 1.7670017 -7.024189 -4.840575 ... -6.395763 -1.9835219 -2.5094953 ] ... [-4.7821646 0.7090539 -4.6005497 ... -0.9217354 -1.5906613 -4.0955205 ] [ 1.6396626 -5.7119446 2.3074098 ... 1.5728394 -3.2674656 0.35515916] [ 1.0669101 -4.2138467 -3.34261 ... -0.8760052 -6.051433 1.406741 ]] [[ 2.3957868 -7.7822576 -3.4836059 ... 0.05206072 -5.5935545 4.5320773 ] [-2.5646505 -2.560885 -1.8533595 ... -8.417081 -7.971746 0.6958195 ] [-1.2206738 -1.718999 -0.9277357 ... -1.4909816 -2.7580853 -1.5992223 ] ... [-3.5524294 1.320418 -5.6872582 ... -6.2600694 -1.9543521 -2.5006595 ] [-6.4478483 3.8742313 0.9114438 ... -0.63257766 0.05722249 -4.805943 ] [-1.6543778 -4.555456 -3.611819 ... -2.5025187 -5.3628516 0.21429741]] ... [[-5.1843457 -6.1899347 -2.8004413 ... -2.247545 -4.289609 -1.7003254 ] [-3.4776244 -7.0981727 -1.6055212 ... -0.88027465 1.1214565 -0.9702109 ] [ 0.21190298 -2.0183473 -5.4708858 ... 2.2326522 -2.8548157 -1.9910939 ] ... [ 0.40343583 1.5216058 -1.9032624 ... 1.3253635 -2.5503507 -3.1874623 ] [-2.7849007 -0.54837346 0.21743214 ... -0.8052734 -0.12040174 -1.2243171 ] [-0.34219253 1.1523856 -1.5676409 ... -2.5263438 1.9089552 -2.2636664 ]] [[ 1.2365702 -1.7443602 -2.360214 ... -0.4303559 -4.2752843 -3.9714289 ] [ 2.0163436 1.8539797 -1.985933 ... 0.18538535 -4.077783 -0.9730198 ] [ 1.768982 3.2162156 -0.15767491 ... 0.26051128 -5.6717954 0.237445 ] ... [-2.5248911 -4.8063045 -3.0838475 ... -4.18235 1.3772925 -0.13811564] [-3.6436892 -7.1145287 -7.252397 ... -4.5625577 -3.6747599 -5.3975606 ] [ 4.271225 -5.19431 -4.0386543 ... -4.6925354 -0.5904105 -0.6991377 ]] [[ 0.7523087 0.43974555 -2.8326867 ... -3.4241474 1.814284 0.38588583] [-5.8957443 -0.9785344 -2.833514 ... -1.8449471 -4.3373218 -2.8406005 ] [-5.774745 -7.1296043 3.1081362 ... -3.4240303 -1.4195254 -2.7198539 ] ... [-2.9990978 -1.8609519 4.487154 ... 2.2183022 2.5134497 2.77591 ] [-2.1470406 1.097864 0.9035922 ... -0.7339324 -6.767225 -0.5841553 ] [-1.5390663 -3.1259522 -0.56280386 ... 1.5639182 2.1534982 0.20376551]]] [[[ 0.34649158 0.07169661 -3.185167 ... -4.9618983 -4.8244677 -2.1151328 ] [ 2.6015825 -1.9646494 -1.6262641 ... -7.085723 -4.2892003 7.0238004 ] [ 0.41492468 1.2285795 0.99321276 ... -2.7892592 -0.14649732 0.02444869] ... [ 0.45334923 2.4703622 -5.7753716 ... 6.657312 -2.5151663 -6.3761773 ] [-5.126672 -4.9494295 -1.2131338 ... -1.3976686 -3.6377728 -4.130706 ] [-6.878858 -4.5827208 -2.4007716 ... -1.41363 -0.3676769 0.08414912]] [[ 2.1462955 -2.2881737 3.9116888 ... 4.475226 -1.6121652 2.9547744 ] [-4.4361267 1.5818951 4.0989947 ... 2.7060304 -4.4056926 -2.8905342 ] [-3.2295136 0.485143 -0.20827748 ... 5.2686715 1.2388573 -7.965775 ] ... [-1.9576738 -1.0920985 -0.5760044 ... -2.4240763 3.972949 -3.6623566 ] [-0.10914518 6.1130886 5.265101 ... -2.5116138 1.8680441 -3.5852451 ] [-0.15569128 3.0304248 4.742833 ... -2.7431588 0.93775445 2.3390384 ]] [[-2.1076412 9.398719 1.1454184 ... 0.43063802 8.9359255 -8.495989 ] [ 3.1990473 -2.0516171 -1.7577178 ... 4.47701 1.7031322 0.7580928 ] [-0.75796473 -3.3137984 5.041923 ... -0.8788671 2.4641187 -5.6313305 ] ... [ 2.4241478 -0.93716884 -6.720623 ... -2.6835718 2.7089548 -2.8244376 ] [ 2.1474304 -5.427581 -1.0385902 ... 2.1183345 -1.8281281 -0.9353014 ] [-4.559045 2.2184415 -1.2605183 ... 3.4198349 -5.1896086 -0.63056093]] ... [[ 4.7434053 3.9549656 2.9887953 ... -0.69745547 3.076774 2.887664 ] [ 1.4765258 0.3775488 -2.7472763 ... -1.9341671 3.7310395 -0.25167766] [-3.243605 0.70892674 -3.0127602 ... 0.03377038 -2.4797258 -9.8004055 ] ... [ 2.479422 2.2606087 -3.8813603 ... 4.6822205 -3.7999074 4.5642023 ] [ 1.6766777 -3.4043746 -0.16941516 ... -1.9425371 -4.396598 0.9903776 ] [ 0.01831236 -1.571044 3.1384003 ... -0.44349927 -8.852837 -1.332134 ]] [[ 8.964002 1.2149642 -3.5660572 ... 0.73524266 1.1361675 4.6340203 ] [-6.418533 -5.9186926 -4.0068884 ... -6.9791603 -6.958578 -2.822344 ] [-2.789337 -3.4139209 -1.8331828 ... -3.50459 -0.86719567 7.357801 ] ... [ 0.07247049 -3.039983 5.3109865 ... 5.099536 2.6874402 -0.18477108] [ 2.9926054 0.78328246 1.1257923 ... 0.18376225 -2.5631633 -1.8825698 ] [-7.883274 0.07116354 -0.22405215 ... 7.7667065 5.5119658 -5.400101 ]] [[-6.3107824 -5.81264 -1.6518602 ... 0.07089752 -5.7058315 1.7637231 ] [ 1.8004549 4.781398 -3.1506054 ... -1.0367687 -3.435567 -1.2562704 ] [ 9.131703 6.258735 -2.9525588 ... -1.5192645 3.512282 1.9405613 ] ... [ 1.7863948 2.1954417 4.8035474 ... 2.8554199 -4.8810687 -1.7489052 ] [-5.9291224 -5.1492996 -1.7480347 ... -3.471165 1.9743583 -0.6241454 ] [-3.4392538 -0.4248444 -4.7212596 ... -3.4030313 -0.39483213 -2.1114283 ]]] [[[-3.145982 3.4110773 2.163973 ... -1.8726664 3.0002997 -2.471379 ] [-1.6146202 6.332633 0.8205299 ... 0.59785277 0.987084 5.415877 ] [ 3.5745606 1.3147459 -1.7959588 ... 4.192388 -0.28131312 1.8843782 ] ... [11.159613 -0.17193055 -4.6872225 ... 7.3214555 -5.805949 0.9199631 ] [ 2.5026064 -0.60962015 -2.4087365 ... 8.119709 -4.6429586 9.11312 ] [ 0.04110616 -2.831772 -2.4755757 ... 3.573686 -1.3411059 9.082112 ]] [[ 0.24272016 0.61477095 0.5340516 ... 0.44926643 0.53795373 5.084464 ] [ 6.4627957 2.076801 1.5691566 ... -3.4136782 -5.791447 6.3587904 ] [ 0.8407212 1.9044976 -4.0300665 ... 1.8719435 -0.31546497 1.6977439 ] ... [ 1.0938637 3.1973977 -3.2050009 ... 7.429135 4.3736567 1.1110752 ] [ 0.7669772 2.3566465 -4.7856665 ... 5.9393706 -0.91646916 6.3862886 ] [ 1.9699235 -0.73398453 5.4618425 ... -6.897591 1.6518903 4.1596093 ]] [[ 0.7073851 3.8842795 4.666315 ... -3.83497 3.942663 4.707746 ] [ 2.2181559 2.6327627 -0.06032372 ... -1.9701762 1.6831505 2.2166376 ] [-1.5118587 5.5946026 8.71348 ... 2.6659575 0.43050474 -5.0710254 ] ... [ 6.6655784 2.5687485 -6.0964913 ... -1.8639481 5.6557956 -6.265675 ] [-1.2683415 7.6091123 -3.671968 ... -1.4761341 1.7011209 -3.2657123 ] [-3.8515844 4.701999 4.3663864 ... -3.3874779 6.1553392 -5.268693 ]] ... [[-3.1719873 -1.3380527 0.21143422 ... -3.41053 -1.3714209 4.474782 ] [-3.9082146 7.483357 -1.3531172 ... -4.46583 -3.4926987 0.12526608] [-0.16058421 3.459353 1.3506943 ... 3.6318603 3.3046181 -3.4893465 ] ... [-2.4375665 -4.343722 0.6129517 ... 4.4806027 1.1138756 2.6459694 ] [-1.1657763 10.108734 -5.1630073 ... 1.8646724 -0.49722558 -2.7536902 ] [-1.881557 6.7535195 3.563735 ... -3.710518 0.5899492 -2.7731445 ]] [[ 4.6838393 9.679125 -1.5407097 ... 2.6910865 -3.5697603 2.7545636 ] [-0.5181019 7.1290946 0.3108303 ... -3.6949391 -4.760996 2.232103 ] [ 1.1859007 -0.0451411 1.5676594 ... -4.5178633 -3.3085165 1.2915096 ] ... [ 0.7967928 1.0307235 -2.4699142 ... -0.08800095 0.9439078 -0.08166772] [ 0.8006891 -2.637812 -5.7562633 ... 1.7402792 0.53170455 -0.37500817] [-0.02595037 -0.6805188 -0.13542438 ... 1.8970382 -5.369601 -3.1675146 ]] [[ 3.2782164 10.48339 0.43524435 ... -2.2560272 -0.47493404 7.1319623 ] [ 2.283685 1.464593 2.7444906 ... -5.0347567 -2.267985 1.3226737 ] [ 4.150425 -2.2790005 -6.646394 ... -2.850156 0.90017474 -1.7677722 ] ... [ 3.3544447 6.7981887 2.848864 ... 2.4068005 -6.941788 0.12750769] [ 1.4070598 -0.8457677 5.052655 ... 3.7727678 -2.1685448 3.999768 ] [-4.480167 1.9825625 1.8512177 ... -2.0896678 -2.323607 -3.8244896 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_860.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.8489 -1.0474 0.2179 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.6810 (2,1,1,.,.) = -1.6048 (3,1,1,.,.) = 0.4029 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 1.25891685e+00 1.95653951e+00 1.98588359e+00 ... 1.55537522e+00 1.78404498e+00 3.00107813e+00] [ 9.81506526e-01 1.89060462e+00 2.28161430e+00 ... 3.05258584e+00 1.30505216e+00 1.61105537e+00] [ 1.84756875e+00 1.50888991e+00 1.49568474e+00 ... 1.06601548e+00 1.65329921e+00 2.97947884e+00] ... [ 7.56551385e-01 1.92326009e+00 1.35202825e+00 ... 1.12918890e+00 2.43312335e+00 1.47224319e+00] [ 1.52979553e+00 3.07136464e+00 1.66623497e+00 ... 1.83018339e+00 1.68354356e+00 1.67759407e+00] [ 1.91476035e+00 9.53161001e-01 3.12537956e+00 ... 1.39554977e+00 2.62617040e+00 2.29116344e+00]] [[ 1.56499052e+00 1.56993532e+00 1.53375363e+00 ... 2.20872068e+00 9.96384084e-01 1.24166465e+00] [ 1.75859892e+00 1.79686785e+00 2.97171497e+00 ... 3.55834961e-01 1.44628167e+00 1.52364981e+00] [ 2.25410891e+00 1.46053839e+00 1.81898391e+00 ... 2.19559479e+00 1.76158559e+00 1.22004819e+00] ... [ 2.89333916e+00 1.09120309e+00 1.60963666e+00 ... 1.69413447e-01 9.82254386e-01 2.23934507e+00] [ 1.71665156e+00 -5.07389665e-01 1.22466731e+00 ... 1.75241041e+00 2.35466886e+00 1.78600025e+00] [ 1.21592426e+00 2.69618011e+00 1.99586320e+00 ... 2.13635182e+00 2.23794007e+00 8.38610888e-01]] [[ 1.45544696e+00 1.63298368e+00 2.03639054e+00 ... 1.87356663e+00 2.03786397e+00 1.68241119e+00] [ 2.34768677e+00 1.87839496e+00 1.08712208e+00 ... 1.49891233e+00 1.76035357e+00 1.87601745e+00] [ 1.24233866e+00 8.76626790e-01 2.13546085e+00 ... 1.41679180e+00 1.83070838e+00 2.39617705e+00] ... [ 2.28267241e+00 1.47688723e+00 1.42396617e+00 ... 1.64582062e+00 1.60523975e+00 2.02078438e+00] [ 2.13443351e+00 1.10769272e+00 2.24900889e+00 ... 1.36424184e+00 2.23356295e+00 4.98826504e-01] [ 1.86560154e+00 1.70577693e+00 1.76136744e+00 ... 2.96451902e+00 9.31547642e-01 2.46616364e+00]] ... [[ 1.58097172e+00 1.90793109e+00 2.10335875e+00 ... 1.24253941e+00 1.80101252e+00 2.46061659e+00] [ 1.98825598e+00 1.74074697e+00 2.02114367e+00 ... 2.26273799e+00 1.42565346e+00 1.76362681e+00] [ 1.91512573e+00 1.69614494e+00 1.96156359e+00 ... 1.01080596e+00 2.12732172e+00 1.41030598e+00] ... [ 2.07628179e+00 1.43507910e+00 1.24069715e+00 ... 1.14545226e+00 1.88053703e+00 1.06854272e+00] [ 1.80538940e+00 3.16373849e+00 4.06160975e+00 ... 3.34287357e+00 2.24037600e+00 1.62715757e+00] [ 2.59068203e+00 1.70729649e+00 2.12126899e+00 ... 3.02318549e+00 8.75127196e-01 1.44308114e+00]] [[ 1.88704741e+00 1.94626486e+00 1.10008907e+00 ... 1.88639188e+00 1.55291712e+00 2.19770813e+00] [ 2.43028450e+00 1.70693839e+00 3.31225824e+00 ... 1.36321092e+00 2.17641544e+00 3.06355834e-01] [ 1.74323297e+00 1.85414004e+00 1.13656974e+00 ... 1.69262350e+00 1.44008315e+00 1.43421602e+00] ... [ 1.84592116e+00 1.49331558e+00 2.44289374e+00 ... 1.47783208e+00 1.23806357e+00 2.48098588e+00] [ 1.90843296e+00 2.56585050e+00 2.66200924e+00 ... 1.07355499e+00 8.61859143e-01 1.58755732e+00] [ 1.99960780e+00 1.03758538e+00 1.84538424e+00 ... 2.23130918e+00 1.77120161e+00 1.72764611e+00]] [[ 2.05501270e+00 9.87521589e-01 1.58279991e+00 ... 1.98338866e+00 2.21555948e+00 2.13417029e+00] [ 1.99572968e+00 3.48513556e+00 9.79872584e-01 ... 6.22085810e-01 4.75251675e-03 1.27875805e+00] [ 2.10039091e+00 1.19648027e+00 2.85322523e+00 ... 2.22103405e+00 2.02595043e+00 1.70845222e+00] ... [ 1.89758253e+00 2.42891455e+00 1.04459882e+00 ... 1.82246447e+00 2.25994658e+00 1.85044014e+00] [ 8.21009040e-01 2.59912395e+00 2.31239128e+00 ... 2.71850824e+00 1.88633943e+00 8.54207993e-01] [ 1.49922144e+00 6.04425669e-01 2.07253957e+00 ... 2.01444745e+00 1.98703432e+00 1.68210328e+00]]] [[[-1.89274645e+00 -1.34434438e+00 2.27816343e-01 ... -2.36256540e-01 -3.02935195e+00 -3.86978579e+00] [-1.43702793e+00 -2.96185017e+00 1.84425247e+00 ... -2.16958284e-01 -7.51673698e-01 1.03822720e+00] [ 6.02662921e-01 -2.30322742e+00 -9.39988792e-01 ... 5.07598042e-01 -5.77466488e-01 -6.72793269e-01] ... [-1.23451364e+00 -1.67488933e+00 1.47160530e-01 ... -1.98963881e-01 -1.43339312e+00 -8.95589530e-01] [-5.19363523e-01 4.92438912e-01 -2.15198612e+00 ... 2.05763340e+00 -3.32935238e+00 -3.06940031e+00] [-6.65909886e-01 -1.59132552e+00 1.78748250e-01 ... 1.60560071e+00 1.32393682e+00 3.57631207e-01]] [[-5.53330064e-01 -1.90562832e+00 1.04715121e+00 ... 3.85535598e-01 -4.41679120e-01 -2.89483404e+00] [-1.31468594e+00 -1.27190280e+00 -8.46252561e-01 ... -2.38442969e+00 -4.54693973e-01 3.59978437e+00] [ 1.17828786e+00 -2.08978558e+00 5.32487392e-01 ... -3.00053906e+00 -3.64839840e+00 -1.43362379e+00] ... [-1.64266944e+00 7.15780258e-03 3.94136071e-01 ... 6.58522844e-01 -4.75861597e+00 -2.38934135e+00] [-1.70013857e+00 1.46407187e+00 -1.77194977e+00 ... -2.63043165e+00 -1.40398324e+00 1.07998955e+00] [-1.16383243e+00 1.30963290e+00 3.69158149e-01 ... 1.11701488e-02 1.13007426e-01 2.81129003e-01]] [[-4.51377296e+00 -3.83570004e+00 -2.74556947e+00 ... -1.75489545e-01 -2.07681060e-01 -1.98464429e+00] [-2.45303774e+00 5.77015877e-02 3.68236661e-01 ... -2.43151367e-01 9.58519340e-01 7.43455052e-01] [-2.26366234e+00 -2.06357121e+00 7.91641712e-01 ... -2.30332565e+00 -4.65656614e+00 -8.91255379e-01] ... [ 1.65201700e+00 -6.28191471e-01 -1.46268702e+00 ... -2.58391476e+00 -1.31459785e+00 -6.29704535e-01] [-3.38382578e+00 -8.69489014e-01 5.32996178e-01 ... -5.28393328e-01 -2.87784910e+00 -1.23799026e+00] [-1.97136259e+00 -1.64107442e+00 8.05955648e-01 ... -2.72985697e-02 -2.76473951e+00 -9.31673050e-01]] ... [[-2.72076035e+00 -3.81451368e+00 1.71061397e-01 ... -2.22479963e+00 -1.84022403e+00 -9.74879742e-01] [-2.31727064e-01 -2.50874043e+00 1.67236412e+00 ... -2.25081182e+00 7.50741959e-02 -2.13347149e+00] [ 2.24428535e-01 8.68260384e-01 -8.99921298e-01 ... -1.85386729e+00 1.07291591e+00 -4.51735079e-01] ... [-2.60747957e+00 1.10773313e+00 1.88503611e+00 ... -1.83037376e+00 -3.45790863e+00 -1.58020663e+00] [-2.67505646e-02 -5.72367191e-01 -1.44710648e+00 ... -1.99189305e+00 -1.43741786e+00 -2.38314533e+00] [-3.65861237e-01 -1.86858559e+00 -1.61948621e+00 ... -3.01612329e+00 -1.15048885e+00 -1.79338145e+00]] [[-2.86502647e+00 -3.71086645e+00 -7.43300319e-01 ... -4.07065916e+00 -3.66544664e-01 -1.84717035e+00] [-1.24169850e+00 -3.20096159e+00 -1.99918318e+00 ... -3.05982685e+00 -2.74528193e+00 -1.57490063e+00] [-3.28415394e+00 -2.78492498e+00 -9.90800858e-02 ... -2.96077013e+00 -2.47553587e-02 4.61546183e-02] ... [-1.01577199e+00 -1.07310104e+00 -4.93167222e-01 ... 9.65551972e-01 -2.36301661e+00 1.37494600e+00] [-1.02366149e-01 -4.14576530e+00 -7.67473221e-01 ... -4.10639954e+00 -2.18897104e+00 -3.25956917e+00] [-5.98678231e-01 -1.91318488e+00 -2.11587191e+00 ... -2.05059826e-01 2.29350471e+00 -1.10605764e+00]] [[ 1.45488489e+00 -1.15511215e+00 -3.14815378e+00 ... -7.20782995e-01 -5.14217234e+00 -2.01468158e+00] [-2.52328873e+00 -2.89808333e-01 1.97206986e+00 ... -2.53566504e-02 -7.72586346e+00 -2.11819935e+00] [ 1.36397421e+00 2.85671616e+00 -8.32089067e-01 ... -4.78689849e-01 -2.91891432e+00 -3.09184837e+00] ... [-2.01429033e+00 9.19020057e-01 -4.41329813e+00 ... -4.95284200e-01 -1.70951414e+00 -7.68680573e-01] [-2.15755606e+00 1.36418641e+00 -4.84906852e-01 ... 3.28647614e+00 -3.63047004e-01 7.61014581e-01] [ 2.00174236e+00 1.73262799e+00 -4.74270916e+00 ... 2.78752804e-01 -9.85567331e-01 -1.96430552e+00]]] [[[-7.71976888e-01 4.71328259e-01 9.55587387e-01 ... 3.86954546e-01 -5.63006639e-01 5.39955676e-01] [-2.04467699e-01 1.20088607e-01 -5.08299589e-01 ... 3.69843006e-01 2.69119233e-01 -5.23079932e-01] [ 1.75181627e-01 4.54123169e-02 2.21983567e-01 ... -1.58921435e-01 -1.10850886e-01 3.49283904e-01] ... [-4.32810634e-02 -1.77362785e-01 -6.91077560e-02 ... 1.65225714e-02 5.04213691e-01 5.38107753e-01] [ 3.96842241e-01 1.07999399e-01 -5.08114278e-01 ... 8.34755421e-01 -3.78297031e-01 -5.23044050e-01] [ 3.05563897e-01 2.89178669e-01 -7.26758689e-02 ... 8.92326087e-02 -7.96484798e-02 -1.29906699e-01]] [[-3.82050753e-01 4.32758093e-01 2.11820081e-01 ... -1.12258330e-01 1.51031584e-01 5.21780133e-01] [ 8.47654045e-02 3.21828246e-01 3.08520466e-01 ... 3.68754089e-01 -3.95003974e-01 1.27850726e-01] [ 1.23056960e+00 6.91431999e-01 3.33320647e-01 ... -1.08114228e-01 4.53824431e-01 -3.07700634e-02] ... [-3.77957582e-01 2.41323709e-01 1.59252673e-01 ... 6.81682885e-01 -1.22660711e-01 -4.37930644e-01] [ 3.94117415e-01 3.07586014e-01 1.56738758e-01 ... 4.67492908e-01 4.37486440e-01 9.73569080e-02] [ 8.84274483e-01 8.36103737e-01 -5.71568906e-01 ... 6.34784222e-01 -3.04087043e-01 3.31904709e-01]] [[-3.46766412e-01 6.54784024e-01 4.68548954e-01 ... 2.11574808e-01 2.10567564e-01 -1.59609124e-01] [ 3.07163715e-01 4.89341259e-01 -7.47574866e-03 ... 4.31290150e-01 9.58131552e-01 6.03705704e-01] [-1.40394047e-01 1.20989069e-01 -1.30744770e-01 ... 5.68057954e-01 1.21912408e+00 -2.02835605e-01] ... [-2.72476017e-01 -1.87585071e-01 5.20938098e-01 ... 3.82310510e-01 -1.35312989e-01 2.10871354e-01] [ 6.58309460e-01 -3.18512678e-01 1.07144676e-01 ... 2.31823370e-01 -1.84502229e-01 -2.64391452e-02] [-2.86836177e-02 -4.57895696e-01 2.28675574e-01 ... 1.95750207e-01 -1.65265545e-01 1.08849056e-01]] ... [[ 1.25192499e+00 6.08467937e-01 -4.56036747e-01 ... 1.01864688e-01 8.35955918e-01 2.88339853e-02] [ 4.35345590e-01 4.54844296e-01 1.23326704e-01 ... 2.94317096e-01 9.19124782e-02 1.57716036e-01] [ 5.59532642e-01 4.37666655e-01 5.73169708e-01 ... -1.06671751e-02 4.47258234e-01 4.25606489e-01] ... [ 7.19911098e-01 -4.41321433e-01 -5.41561186e-01 ... 4.29392099e-01 1.33395955e-01 1.36525691e-01] [ 5.36535621e-01 2.85259336e-01 7.92252719e-01 ... 6.84975386e-01 6.78394914e-01 1.75077856e-01] [ 1.90867662e-01 -2.18249843e-01 -1.23612806e-01 ... 7.18754053e-01 1.84754610e-01 -1.52645424e-01]] [[ 3.73557270e-01 -3.07337642e-01 -2.19423160e-01 ... 7.81280935e-01 6.12310171e-01 7.40960717e-01] [-5.55934250e-01 4.95469630e-01 -1.84621707e-01 ... 1.22015864e-01 8.40206385e-01 -2.75449574e-01] [ 1.10173665e-01 6.05031013e-01 6.88811183e-01 ... 2.39396304e-01 -2.50506401e-01 2.36297265e-01] ... [ 4.29560900e-01 3.96961242e-01 5.63219905e-01 ... -2.93235719e-01 5.57335496e-01 -3.77841413e-01] [ 4.37271059e-01 5.39189339e-01 1.51492000e-01 ... -3.15401495e-01 1.99245647e-01 -3.09445798e-01] [ 9.12641525e-01 3.30471396e-01 -3.58248889e-01 ... 2.26930529e-02 -4.65833008e-01 1.22711509e-02]] [[ 2.92464435e-01 2.31378078e-01 3.92943740e-01 ... 2.87627846e-01 3.99284244e-01 -2.86707133e-02] [-4.85812902e-01 -1.02952644e-01 2.29390025e-01 ... 5.66492260e-01 3.99963140e-01 6.82051837e-01] [-2.89262354e-01 -1.21092007e-01 1.02637343e-01 ... 4.39393729e-01 5.97300947e-01 -2.01624349e-01] ... [ 1.01934755e+00 -7.35256732e-01 2.78481752e-01 ... 7.92423487e-01 3.53622615e-01 -4.12972391e-01] [ 7.32576370e-01 6.93269670e-01 -1.53679028e-01 ... 3.40238214e-01 -2.44728133e-01 3.73806953e-01] [-8.55707884e-01 -1.00836024e-01 7.57339954e-01 ... 3.17466438e-01 5.39176166e-01 3.03006023e-01]]]]]; ov_res: [[[[[ 1.25891685e+00 1.95653951e+00 1.98588359e+00 ... 1.55537522e+00 1.78404498e+00 3.00107813e+00] [ 9.81506526e-01 1.89060462e+00 2.28161430e+00 ... 3.05258584e+00 1.30505216e+00 1.61105537e+00] [ 1.84756875e+00 1.50888991e+00 1.49568474e+00 ... 1.06601548e+00 1.65329921e+00 2.97947884e+00] ... [ 7.56551385e-01 1.92326009e+00 1.35202825e+00 ... 1.12918890e+00 2.43312335e+00 1.47224319e+00] [ 1.52979553e+00 3.07136464e+00 1.66623497e+00 ... 1.83018339e+00 1.68354356e+00 1.67759407e+00] [ 1.91476035e+00 9.53161001e-01 3.12537956e+00 ... 1.39554977e+00 2.62617040e+00 2.29116344e+00]] [[ 1.56499052e+00 1.56993532e+00 1.53375363e+00 ... 2.20872068e+00 9.96384084e-01 1.24166465e+00] [ 1.75859892e+00 1.79686785e+00 2.97171497e+00 ... 3.55834961e-01 1.44628167e+00 1.52364981e+00] [ 2.25410891e+00 1.46053839e+00 1.81898391e+00 ... 2.19559479e+00 1.76158559e+00 1.22004819e+00] ... [ 2.89333916e+00 1.09120309e+00 1.60963666e+00 ... 1.69413447e-01 9.82254386e-01 2.23934507e+00] [ 1.71665156e+00 -5.07389665e-01 1.22466731e+00 ... 1.75241041e+00 2.35466886e+00 1.78600025e+00] [ 1.21592426e+00 2.69618011e+00 1.99586320e+00 ... 2.13635182e+00 2.23794007e+00 8.38610888e-01]] [[ 1.45544696e+00 1.63298368e+00 2.03639054e+00 ... 1.87356663e+00 2.03786397e+00 1.68241119e+00] [ 2.34768677e+00 1.87839496e+00 1.08712208e+00 ... 1.49891233e+00 1.76035357e+00 1.87601745e+00] [ 1.24233866e+00 8.76626790e-01 2.13546085e+00 ... 1.41679180e+00 1.83070838e+00 2.39617705e+00] ... [ 2.28267241e+00 1.47688723e+00 1.42396617e+00 ... 1.64582062e+00 1.60523975e+00 2.02078438e+00] [ 2.13443351e+00 1.10769272e+00 2.24900889e+00 ... 1.36424184e+00 2.23356295e+00 4.98826504e-01] [ 1.86560154e+00 1.70577693e+00 1.76136744e+00 ... 2.96451902e+00 9.31547642e-01 2.46616364e+00]] ... [[ 1.58097172e+00 1.90793109e+00 2.10335875e+00 ... 1.24253941e+00 1.80101252e+00 2.46061659e+00] [ 1.98825598e+00 1.74074697e+00 2.02114367e+00 ... 2.26273799e+00 1.42565346e+00 1.76362681e+00] [ 1.91512573e+00 1.69614494e+00 1.96156359e+00 ... 1.01080596e+00 2.12732172e+00 1.41030598e+00] ... [ 2.07628179e+00 1.43507910e+00 1.24069715e+00 ... 1.14545226e+00 1.88053703e+00 1.06854272e+00] [ 1.80538940e+00 3.16373849e+00 4.06160975e+00 ... 3.34287357e+00 2.24037600e+00 1.62715757e+00] [ 2.59068203e+00 1.70729649e+00 2.12126899e+00 ... 3.02318549e+00 8.75127196e-01 1.44308114e+00]] [[ 1.88704741e+00 1.94626486e+00 1.10008907e+00 ... 1.88639188e+00 1.55291712e+00 2.19770813e+00] [ 2.43028450e+00 1.70693839e+00 3.31225824e+00 ... 1.36321092e+00 2.17641544e+00 3.06355834e-01] [ 1.74323297e+00 1.85414004e+00 1.13656974e+00 ... 1.69262350e+00 1.44008315e+00 1.43421602e+00] ... [ 1.84592116e+00 1.49331558e+00 2.44289374e+00 ... 1.47783208e+00 1.23806357e+00 2.48098588e+00] [ 1.90843296e+00 2.56585050e+00 2.66200924e+00 ... 1.07355499e+00 8.61859143e-01 1.58755732e+00] [ 1.99960780e+00 1.03758538e+00 1.84538424e+00 ... 2.23130918e+00 1.77120161e+00 1.72764611e+00]] [[ 2.05501270e+00 9.87521589e-01 1.58279991e+00 ... 1.98338866e+00 2.21555948e+00 2.13417029e+00] [ 1.99572968e+00 3.48513556e+00 9.79872584e-01 ... 6.22085810e-01 4.75251675e-03 1.27875805e+00] [ 2.10039091e+00 1.19648027e+00 2.85322523e+00 ... 2.22103405e+00 2.02595043e+00 1.70845222e+00] ... [ 1.89758253e+00 2.42891455e+00 1.04459882e+00 ... 1.82246447e+00 2.25994658e+00 1.85044014e+00] [ 8.21009040e-01 2.59912395e+00 2.31239128e+00 ... 2.71850824e+00 1.88633943e+00 8.54207993e-01] [ 1.49922144e+00 6.04425669e-01 2.07253957e+00 ... 2.01444745e+00 1.98703432e+00 1.68210328e+00]]] [[[-1.89274645e+00 -1.34434438e+00 2.27816343e-01 ... -2.36256540e-01 -3.02935195e+00 -3.86978579e+00] [-1.43702793e+00 -2.96185017e+00 1.84425247e+00 ... -2.16958284e-01 -7.51673698e-01 1.03822720e+00] [ 6.02662921e-01 -2.30322742e+00 -9.39988792e-01 ... 5.07598042e-01 -5.77466488e-01 -6.72793269e-01] ... [-1.23451364e+00 -1.67488933e+00 1.47160530e-01 ... -1.98963881e-01 -1.43339312e+00 -8.95589530e-01] [-5.19363523e-01 4.92438912e-01 -2.15198612e+00 ... 2.05763340e+00 -3.32935238e+00 -3.06940031e+00] [-6.65909886e-01 -1.59132552e+00 1.78748250e-01 ... 1.60560071e+00 1.32393682e+00 3.57631207e-01]] [[-5.53330064e-01 -1.90562832e+00 1.04715121e+00 ... 3.85535598e-01 -4.41679120e-01 -2.89483404e+00] [-1.31468594e+00 -1.27190280e+00 -8.46252561e-01 ... -2.38442969e+00 -4.54693973e-01 3.59978437e+00] [ 1.17828786e+00 -2.08978558e+00 5.32487392e-01 ... -3.00053906e+00 -3.64839840e+00 -1.43362379e+00] ... [-1.64266944e+00 7.15780258e-03 3.94136071e-01 ... 6.58522844e-01 -4.75861597e+00 -2.38934135e+00] [-1.70013857e+00 1.46407187e+00 -1.77194977e+00 ... -2.63043165e+00 -1.40398324e+00 1.07998955e+00] [-1.16383243e+00 1.30963290e+00 3.69158149e-01 ... 1.11701488e-02 1.13007426e-01 2.81129003e-01]] [[-4.51377296e+00 -3.83570004e+00 -2.74556947e+00 ... -1.75489545e-01 -2.07681060e-01 -1.98464429e+00] [-2.45303774e+00 5.77015877e-02 3.68236661e-01 ... -2.43151367e-01 9.58519340e-01 7.43455052e-01] [-2.26366234e+00 -2.06357121e+00 7.91641712e-01 ... -2.30332565e+00 -4.65656614e+00 -8.91255379e-01] ... [ 1.65201700e+00 -6.28191471e-01 -1.46268702e+00 ... -2.58391476e+00 -1.31459785e+00 -6.29704535e-01] [-3.38382578e+00 -8.69489014e-01 5.32996178e-01 ... -5.28393328e-01 -2.87784910e+00 -1.23799026e+00] [-1.97136259e+00 -1.64107442e+00 8.05955648e-01 ... -2.72985697e-02 -2.76473951e+00 -9.31673050e-01]] ... [[-2.72076035e+00 -3.81451368e+00 1.71061397e-01 ... -2.22479963e+00 -1.84022403e+00 -9.74879742e-01] [-2.31727064e-01 -2.50874043e+00 1.67236412e+00 ... -2.25081182e+00 7.50741959e-02 -2.13347149e+00] [ 2.24428535e-01 8.68260384e-01 -8.99921298e-01 ... -1.85386729e+00 1.07291591e+00 -4.51735079e-01] ... [-2.60747957e+00 1.10773313e+00 1.88503611e+00 ... -1.83037376e+00 -3.45790863e+00 -1.58020663e+00] [-2.67505646e-02 -5.72367191e-01 -1.44710648e+00 ... -1.99189305e+00 -1.43741786e+00 -2.38314533e+00] [-3.65861237e-01 -1.86858559e+00 -1.61948621e+00 ... -3.01612329e+00 -1.15048885e+00 -1.79338145e+00]] [[-2.86502647e+00 -3.71086645e+00 -7.43300319e-01 ... -4.07065916e+00 -3.66544664e-01 -1.84717035e+00] [-1.24169850e+00 -3.20096159e+00 -1.99918318e+00 ... -3.05982685e+00 -2.74528193e+00 -1.57490063e+00] [-3.28415394e+00 -2.78492498e+00 -9.90800858e-02 ... -2.96077013e+00 -2.47553587e-02 4.61546183e-02] ... [-1.01577199e+00 -1.07310104e+00 -4.93167222e-01 ... 9.65551972e-01 -2.36301661e+00 1.37494600e+00] [-1.02366149e-01 -4.14576530e+00 -7.67473221e-01 ... -4.10639954e+00 -2.18897104e+00 -3.25956917e+00] [-5.98678231e-01 -1.91318488e+00 -2.11587191e+00 ... -2.05059826e-01 2.29350471e+00 -1.10605764e+00]] [[ 1.45488489e+00 -1.15511215e+00 -3.14815378e+00 ... -7.20782995e-01 -5.14217234e+00 -2.01468158e+00] [-2.52328873e+00 -2.89808333e-01 1.97206986e+00 ... -2.53566504e-02 -7.72586346e+00 -2.11819935e+00] [ 1.36397421e+00 2.85671616e+00 -8.32089067e-01 ... -4.78689849e-01 -2.91891432e+00 -3.09184837e+00] ... [-2.01429033e+00 9.19020057e-01 -4.41329813e+00 ... -4.95284200e-01 -1.70951414e+00 -7.68680573e-01] [-2.15755606e+00 1.36418641e+00 -4.84906852e-01 ... 3.28647614e+00 -3.63047004e-01 7.61014581e-01] [ 2.00174236e+00 1.73262799e+00 -4.74270916e+00 ... 2.78752804e-01 -9.85567331e-01 -1.96430552e+00]]] [[[-7.71976888e-01 4.71328259e-01 9.55587387e-01 ... 3.86954546e-01 -5.63006639e-01 5.39955676e-01] [-2.04467699e-01 1.20088607e-01 -5.08299589e-01 ... 3.69843006e-01 2.69119233e-01 -5.23079932e-01] [ 1.75181627e-01 4.54123169e-02 2.21983567e-01 ... -1.58921435e-01 -1.10850886e-01 3.49283904e-01] ... [-4.32810634e-02 -1.77362785e-01 -6.91077560e-02 ... 1.65225714e-02 5.04213691e-01 5.38107753e-01] [ 3.96842241e-01 1.07999399e-01 -5.08114278e-01 ... 8.34755421e-01 -3.78297031e-01 -5.23044050e-01] [ 3.05563897e-01 2.89178669e-01 -7.26758689e-02 ... 8.92326087e-02 -7.96484798e-02 -1.29906699e-01]] [[-3.82050753e-01 4.32758093e-01 2.11820081e-01 ... -1.12258330e-01 1.51031584e-01 5.21780133e-01] [ 8.47654045e-02 3.21828246e-01 3.08520466e-01 ... 3.68754089e-01 -3.95003974e-01 1.27850726e-01] [ 1.23056960e+00 6.91431999e-01 3.33320647e-01 ... -1.08114228e-01 4.53824431e-01 -3.07700634e-02] ... [-3.77957582e-01 2.41323709e-01 1.59252673e-01 ... 6.81682885e-01 -1.22660711e-01 -4.37930644e-01] [ 3.94117415e-01 3.07586014e-01 1.56738758e-01 ... 4.67492908e-01 4.37486440e-01 9.73569080e-02] [ 8.84274483e-01 8.36103737e-01 -5.71568906e-01 ... 6.34784222e-01 -3.04087043e-01 3.31904709e-01]] [[-3.46766412e-01 6.54784024e-01 4.68548954e-01 ... 2.11574808e-01 2.10567564e-01 -1.59609124e-01] [ 3.07163715e-01 4.89341259e-01 -7.47574866e-03 ... 4.31290150e-01 9.58131552e-01 6.03705704e-01] [-1.40394047e-01 1.20989069e-01 -1.30744770e-01 ... 5.68057954e-01 1.21912408e+00 -2.02835605e-01] ... [-2.72476017e-01 -1.87585071e-01 5.20938098e-01 ... 3.82310510e-01 -1.35312989e-01 2.10871354e-01] [ 6.58309460e-01 -3.18512678e-01 1.07144676e-01 ... 2.31823370e-01 -1.84502229e-01 -2.64391452e-02] [-2.86836177e-02 -4.57895696e-01 2.28675574e-01 ... 1.95750207e-01 -1.65265545e-01 1.08849056e-01]] ... [[ 1.25192499e+00 6.08467937e-01 -4.56036747e-01 ... 1.01864688e-01 8.35955918e-01 2.88339853e-02] [ 4.35345590e-01 4.54844296e-01 1.23326704e-01 ... 2.94317096e-01 9.19124782e-02 1.57716036e-01] [ 5.59532642e-01 4.37666655e-01 5.73169708e-01 ... -1.06671751e-02 4.47258234e-01 4.25606489e-01] ... [ 7.19911098e-01 -4.41321433e-01 -5.41561186e-01 ... 4.29392099e-01 1.33395955e-01 1.36525691e-01] [ 5.36535621e-01 2.85259336e-01 7.92252719e-01 ... 6.84975386e-01 6.78394914e-01 1.75077856e-01] [ 1.90867662e-01 -2.18249843e-01 -1.23612806e-01 ... 7.18754053e-01 1.84754610e-01 -1.52645424e-01]] [[ 3.73557270e-01 -3.07337642e-01 -2.19423160e-01 ... 7.81280935e-01 6.12310171e-01 7.40960717e-01] [-5.55934250e-01 4.95469630e-01 -1.84621707e-01 ... 1.22015864e-01 8.40206385e-01 -2.75449574e-01] [ 1.10173665e-01 6.05031013e-01 6.88811183e-01 ... 2.39396304e-01 -2.50506401e-01 2.36297265e-01] ... [ 4.29560900e-01 3.96961242e-01 5.63219905e-01 ... -2.93235719e-01 5.57335496e-01 -3.77841413e-01] [ 4.37271059e-01 5.39189339e-01 1.51492000e-01 ... -3.15401495e-01 1.99245647e-01 -3.09445798e-01] [ 9.12641525e-01 3.30471396e-01 -3.58248889e-01 ... 2.26930529e-02 -4.65833008e-01 1.22711509e-02]] [[ 2.92464435e-01 2.31378078e-01 3.92943740e-01 ... 2.87627846e-01 3.99284244e-01 -2.86707133e-02] [-4.85812902e-01 -1.02952644e-01 2.29390025e-01 ... 5.66492260e-01 3.99963140e-01 6.82051837e-01] [-2.89262354e-01 -1.21092007e-01 1.02637343e-01 ... 4.39393729e-01 5.97300947e-01 -2.01624349e-01] ... [ 1.01934755e+00 -7.35256732e-01 2.78481752e-01 ... 7.92423487e-01 3.53622615e-01 -4.12972391e-01] [ 7.32576370e-01 6.93269670e-01 -1.53679028e-01 ... 3.40238214e-01 -2.44728133e-01 3.73806953e-01] [-8.55707884e-01 -1.00836024e-01 7.57339954e-01 ... 3.17466438e-01 5.39176166e-01 3.03006023e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_862.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.5211 -0.1493 1.3893 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.6623 (2,1,1,.,.) = -0.4941 (3,1,1,.,.) = -1.4224 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[-1.34863496e+00 -1.49779689e+00 -1.36741602e+00 ... -2.62059379e+00 -2.15964890e+00 -2.26154423e+00] [-2.41466451e+00 -1.65874147e+00 -2.28193355e+00 ... -1.48111737e+00 -7.77323127e-01 -1.87138402e+00] [-1.43096733e+00 -7.73435235e-01 -9.79626000e-01 ... -1.06670940e+00 -1.12376320e+00 -2.30447149e+00] ... [-2.15642738e+00 -8.86426568e-01 -7.92904437e-01 ... -1.77406645e+00 -1.37545586e+00 -6.32085204e-01] [-2.84115839e+00 -2.69308043e+00 -9.74452555e-01 ... -1.01762164e+00 -1.05712533e+00 -2.37994313e+00] [-1.32083297e+00 -1.08417380e+00 -1.10967159e+00 ... -1.38096309e+00 -1.79984474e+00 -1.48821723e+00]] [[-2.65490651e-01 -2.40560484e+00 -5.89481115e-01 ... -2.52573848e-01 -1.37955546e+00 -8.91074836e-01] [-1.50553405e+00 -1.03129709e+00 -1.10737002e+00 ... -2.04601979e+00 -9.20120299e-01 -1.12702632e+00] [-1.22331715e+00 -5.57413042e-01 -1.88301492e+00 ... -2.27775383e+00 -5.08391976e-01 -2.18336439e+00] ... [-1.83507371e+00 -2.48410845e+00 -8.18021476e-01 ... -1.31169069e+00 -1.26819730e+00 -2.08381891e+00] [-2.21051812e+00 -1.26243877e+00 -6.21392012e-01 ... -1.46139884e+00 -2.44325829e+00 -1.27310407e+00] [-1.14583492e-01 -1.34680820e+00 -1.05950594e+00 ... -2.31759095e+00 -1.44832850e+00 -2.04039073e+00]] [[-1.93842101e+00 -1.61067855e+00 -8.44649017e-01 ... -1.35214543e+00 2.09236145e-01 -1.56592083e+00] [-1.46070874e+00 -8.45112324e-01 -1.12452185e+00 ... -1.19617820e+00 -1.15661597e+00 -2.46155000e+00] [-1.35184038e+00 -9.51431990e-02 -1.69719577e+00 ... -2.52341032e-01 -1.85125256e+00 -7.99959779e-01] ... [-1.83968985e+00 -9.91563618e-01 -1.95758414e+00 ... -2.49887538e+00 -1.83993649e+00 -1.44737351e+00] [-2.34951258e+00 -2.35335088e+00 -1.79128885e+00 ... -1.33738232e+00 -1.78953433e+00 -1.37193513e+00] [-2.13360834e+00 -1.33292449e+00 -2.55797362e+00 ... -2.32307363e+00 -1.08953190e+00 -1.63136625e+00]] ... [[ 6.67722464e-01 -1.39185750e+00 -1.40287519e+00 ... -1.68281567e+00 -1.16337800e+00 -1.55456245e+00] [-1.54661477e+00 -1.91463590e+00 -1.09015727e+00 ... -1.80851161e+00 -1.33835900e+00 -1.11438751e+00] [-8.26344252e-01 -8.58257294e-01 -1.56622672e+00 ... -1.91311014e+00 -1.88736033e+00 -7.90015161e-01] ... [-1.54414523e+00 -1.05921376e+00 -2.85192060e+00 ... -1.90386844e+00 -2.25992799e+00 -2.96060467e+00] [-1.07520723e+00 -1.00956106e+00 -1.10525513e+00 ... -1.53405905e+00 -1.46898007e+00 -1.54042947e+00] [-2.15245032e+00 -7.88168252e-01 -1.90712929e+00 ... -1.21723938e+00 -1.41766989e+00 -9.60109830e-01]] [[-9.99107420e-01 -1.11519921e+00 -2.53641891e+00 ... -1.59617651e+00 -1.86538899e+00 -2.49586964e+00] [-1.41633642e+00 -2.02337337e+00 -5.78334212e-01 ... -1.79252350e+00 -1.65407169e+00 -1.66958117e+00] [-2.96302867e+00 -1.31814682e+00 -1.15832222e+00 ... -1.48303056e+00 -2.09034491e+00 -1.37673926e+00] ... [-1.68090463e+00 -7.44305074e-01 -7.23431587e-01 ... -2.84353471e+00 -9.09735441e-01 -2.19293761e+00] [-2.17987251e+00 -2.73414993e+00 -1.31739700e+00 ... -1.10158157e+00 -1.12404776e+00 -1.72764659e+00] [-2.13164997e+00 -1.45869911e+00 -1.18237853e+00 ... -1.72067153e+00 -9.26648855e-01 -1.87998962e+00]] [[-1.63843489e+00 -1.04756308e+00 -1.32524908e+00 ... -2.00557971e+00 -1.77837372e+00 -1.39709485e+00] [-1.41073787e+00 -2.26350856e+00 -9.53317523e-01 ... -1.44432366e+00 -8.64731908e-01 -2.45860958e+00] [-2.98124027e+00 -2.35410905e+00 -1.75958157e+00 ... -7.64243782e-01 -2.17257619e+00 -1.14312208e+00] ... [-7.28678465e-01 -1.80736792e+00 -9.73522007e-01 ... -1.01838779e+00 -1.35918188e+00 -4.71082330e-01] [-1.45240402e+00 -2.22331095e+00 -2.51006150e+00 ... 4.13736105e-02 -1.49386096e+00 -1.98313963e+00] [-2.25880098e+00 -1.73974216e+00 -1.84322977e+00 ... -1.44854665e+00 -1.47873795e+00 -1.88588870e+00]]] [[[-1.16993713e+00 4.04719830e-01 -1.31885982e+00 ... -6.82019413e-01 -8.38359892e-01 2.10783780e-01] [-6.28302574e-01 -5.47941089e-01 -4.64452416e-01 ... -1.02791142e+00 1.41237617e-01 3.23097706e-01] [-6.74074292e-01 3.79326403e-01 -1.86170369e-01 ... 2.02160925e-01 -3.58934283e-01 -6.81843460e-01] ... [ 5.03273785e-01 -3.14739108e-01 -3.06281209e-01 ... -1.86103731e-01 -2.48605117e-01 -6.94611073e-02] [ 3.27249050e-01 -1.17112732e+00 -1.13655996e+00 ... 1.95313990e-01 -1.36710405e-01 -2.15184331e-01] [-3.52673143e-01 -1.40419960e-01 -1.00974274e+00 ... 5.21597415e-02 -5.27451694e-01 -2.18252689e-01]] [[ 1.51791751e-01 2.99678445e-01 -2.02530131e-01 ... 8.74906480e-01 1.50157660e-02 -3.71899188e-01] [ 7.22121537e-01 -2.09080309e-01 1.88999683e-01 ... -2.86207974e-01 7.65483260e-01 -5.11711359e-01] [-8.12733829e-01 2.70175397e-01 -2.64931694e-02 ... -2.91473567e-01 -3.89340699e-01 2.39550799e-01] ... [-4.48900282e-01 -3.95019114e-01 5.79995990e-01 ... -6.85741425e-01 -1.79985374e-01 -9.66834128e-01] [-7.83460140e-02 9.60423768e-01 2.68915892e-01 ... -2.32319206e-01 2.66337752e-01 -5.24197459e-01] [ 3.31631452e-02 -6.68864727e-01 -6.52818501e-01 ... -1.87384039e-01 -5.23148298e-01 -1.10575378e+00]] [[-2.28078097e-01 6.47998005e-02 3.76663506e-02 ... -7.61485219e-01 8.91623199e-01 -9.71974969e-01] [-3.46838206e-01 -8.92892629e-02 -5.49111128e-01 ... 4.34189290e-02 -8.92073810e-01 -3.10221314e-01] [-6.38350427e-01 -4.19947535e-01 1.41573668e-01 ... 4.86647606e-01 1.55683547e-01 -6.08140230e-01] ... [-4.41092849e-01 -7.09392130e-01 -3.36736947e-01 ... -2.17582852e-01 7.06940353e-01 -7.46444389e-02] [-1.17042470e+00 -2.72220165e-01 -8.42469156e-01 ... -8.41684461e-01 2.43261576e-01 1.48329407e-01] [-9.36946213e-01 7.80171305e-02 -1.26582608e-01 ... 5.33124804e-02 -8.75831246e-01 7.49694347e-01]] ... [[-6.01030767e-01 -1.59233972e-01 -6.71921492e-01 ... 1.10963941e-01 -2.39590153e-01 -1.36721015e+00] [-4.08830732e-01 -2.26193249e-01 -1.15263104e+00 ... 6.47505522e-01 -2.55195916e-01 -4.62672785e-02] [ 2.95614034e-01 -1.34457159e+00 -5.54393768e-01 ... -3.59654605e-01 5.08978665e-02 -8.16987693e-01] ... [-1.51706904e-01 -1.64671242e-03 -4.77826059e-01 ... 9.66942757e-02 -2.95307606e-01 1.83173537e-01] [ 5.49938083e-01 5.35764754e-01 2.58024365e-01 ... -7.51758933e-01 2.07455069e-01 5.75436532e-01] [-9.56265926e-01 -2.16136709e-01 -1.28338039e-02 ... -6.91419393e-02 -3.85739431e-02 9.33178842e-01]] [[-5.16604483e-01 -1.01614821e+00 -4.47255254e-01 ... -1.00421584e+00 -4.99392539e-01 -4.58841652e-01] [-5.31612992e-01 8.44038904e-01 1.01145482e+00 ... -7.26734102e-03 -3.47924680e-01 -6.97351217e-01] [-3.97869349e-01 -5.02092004e-01 -4.01619613e-01 ... 1.79229379e-02 7.36709416e-01 -7.84057528e-02] ... [-2.47252941e-01 -1.22575246e-01 1.85866535e-01 ... 4.87334132e-02 -1.16145825e+00 -4.47307348e-01] [-3.52102399e-01 -3.67803812e-01 -1.62069261e-01 ... -1.81445137e-01 -9.22950566e-01 -1.07466435e+00] [ 6.93098366e-01 -2.50550896e-01 -5.47040582e-01 ... -2.71741718e-01 -8.42157304e-02 1.00206995e+00]] [[ 1.94934309e-01 -4.27424818e-01 -5.26268244e-01 ... 2.79554039e-01 -7.18262553e-01 -5.25130391e-01] [ 6.68434799e-02 1.70960814e-01 4.93920326e-01 ... 1.17613614e-01 -2.25167781e-01 -2.89049149e-01] [-1.84823543e-01 -2.28794515e-02 1.62744313e-01 ... 4.17688429e-01 5.33888698e-01 1.91639394e-01] ... [ 1.72580361e-01 -3.18438053e-01 -9.09799755e-01 ... -1.88566789e-01 -9.08680260e-01 4.82719660e-01] [ 1.28369546e+00 -5.87145925e-01 -3.96705508e-01 ... -5.59648722e-02 -1.87294722e-01 2.35463381e-02] [ 4.25752699e-01 1.61805958e-01 3.47350061e-01 ... 1.77270174e-01 -2.33028710e-01 1.60238981e-01]]] [[[ 6.29047811e-01 9.02037263e-01 2.86454344e+00 ... 3.22973680e+00 2.24032593e+00 -8.74750495e-01] [ 1.08006072e+00 7.22549796e-01 3.13702750e+00 ... 2.75627041e+00 -3.48214269e-01 2.71071053e+00] [-1.13124692e+00 4.45409358e-01 2.01757884e+00 ... 1.63903689e+00 1.38192761e+00 1.61117792e-01] ... [ 2.83083296e+00 2.69648314e+00 7.52240896e-01 ... 3.53466868e-01 1.34049916e+00 2.06107259e+00] [ 2.95809197e+00 3.61104012e-02 9.22901154e-01 ... 1.33557773e+00 1.80297017e+00 3.25279403e+00] [ 3.91580760e-01 1.39569378e+00 2.79459071e+00 ... 7.27772713e-04 3.86129236e+00 9.72028852e-01]] [[ 7.37205684e-01 -5.12719750e-01 -1.01015210e-01 ... 2.29463243e+00 -4.84221816e-01 2.87375450e-01] [ 3.18029833e+00 1.58419359e+00 -5.61494470e-01 ... 3.44360447e+00 3.48939228e+00 6.34486198e-01] [ 2.77447701e+00 2.43669033e+00 1.05946708e+00 ... 1.99509847e+00 9.44904149e-01 9.63887811e-01] ... [ 7.14870334e-01 1.53818822e+00 2.06051016e+00 ... 1.46804118e+00 1.33948755e+00 1.31684124e+00] [ 4.43501806e+00 4.63097620e+00 2.47886324e+00 ... 3.63847113e+00 1.12058425e+00 -9.36652780e-01] [ 1.47063899e+00 7.21262574e-01 3.21975136e+00 ... 3.38437557e-01 1.53418970e+00 2.97841549e-01]] [[ 8.90822411e-02 1.80323589e+00 6.37210965e-01 ... -8.18783641e-01 -6.44228578e-01 -7.08728433e-01] [ 1.34186935e+00 9.62139010e-01 2.43368196e+00 ... 2.04846716e+00 2.09051299e+00 2.80504155e+00] [ 1.35119915e+00 1.31303310e-01 1.96226490e+00 ... -3.84540558e-02 9.41835165e-01 1.97510123e+00] ... [ 9.69762564e-01 4.16348219e-01 1.44774401e+00 ... 2.21891022e+00 9.39873457e-01 2.43771863e+00] [ 1.47739112e+00 5.13247848e-01 1.35060406e+00 ... 2.68992424e+00 1.21774352e+00 1.80095208e+00] [ 8.84290755e-01 -3.87309670e-01 2.46743417e+00 ... -2.55110264e-01 1.09304368e+00 1.84100258e+00]] ... [[ 6.77953064e-01 -8.83271575e-01 1.82467508e+00 ... 2.51220584e+00 1.41192424e+00 1.28061044e+00] [ 2.51216125e+00 1.46550143e+00 2.38778877e+00 ... 1.38161373e+00 4.04343653e+00 1.04608464e+00] [ 3.51748514e+00 1.32256556e+00 2.21944761e+00 ... 4.05505300e-01 1.16958141e+00 2.86639667e+00] ... [-7.28428006e-01 -2.02852011e+00 2.66275477e+00 ... 7.92855620e-01 1.05834699e+00 1.19800997e+00] [ 2.01124716e+00 9.89259243e-01 3.18156672e+00 ... 6.40517473e-01 -5.70106745e-01 1.14345002e+00] [ 3.58168697e+00 1.55075109e+00 4.13302946e+00 ... 1.81803823e+00 4.10856771e+00 -1.39127016e-01]] [[-1.56920254e+00 2.06595421e+00 1.33582425e+00 ... 3.03829670e+00 -6.18058562e-01 1.60720992e+00] [-2.09753633e-01 9.25622821e-01 4.82049882e-01 ... 2.18580031e+00 4.58731651e-01 3.13513017e+00] [ 2.16876364e+00 7.55405307e-01 2.91710138e-01 ... 6.16881311e-01 1.70981526e-01 7.99706578e-01] ... [ 4.41644335e+00 4.09716988e+00 3.24160933e+00 ... 1.89771104e+00 2.10939431e+00 1.40547454e+00] [ 1.85214961e+00 4.51217353e-01 1.51505685e+00 ... 1.73793077e+00 -1.67672384e+00 2.98316908e+00] [ 2.31637907e+00 2.47045636e-01 1.01093173e-01 ... 1.98519707e+00 1.49553907e+00 2.60613155e+00]] [[ 8.74922454e-01 2.87079811e-02 3.53102160e+00 ... 8.60773563e-01 3.06083536e+00 7.43278980e-01] [ 2.23124027e+00 2.21345687e+00 2.74677849e+00 ... 1.93480098e+00 1.57599664e+00 1.09553218e-01] [-1.24832034e-01 -1.40224457e-01 1.87264895e+00 ... 9.33475494e-01 1.59179509e+00 8.14989030e-01] ... [ 2.01970339e-01 3.68268633e+00 2.22756958e+00 ... 4.87456656e+00 2.19549656e+00 1.40119410e+00] [-9.60204601e-02 4.16837072e+00 1.04594946e+00 ... 1.73755765e-01 2.66284251e+00 4.95522320e-01] [-6.13642335e-01 9.33425844e-01 6.16250217e-01 ... -3.72507572e-01 -1.71382546e-01 2.02585459e-02]]]]]; ov_res: [[[[[-1.34863496e+00 -1.49779689e+00 -1.36741602e+00 ... -2.62059379e+00 -2.15964890e+00 -2.26154423e+00] [-2.41466451e+00 -1.65874147e+00 -2.28193355e+00 ... -1.48111737e+00 -7.77323127e-01 -1.87138402e+00] [-1.43096733e+00 -7.73435235e-01 -9.79626000e-01 ... -1.06670940e+00 -1.12376320e+00 -2.30447149e+00] ... [-2.15642762e+00 -8.86426568e-01 -7.92904437e-01 ... -1.77406633e+00 -1.37545586e+00 -6.32085204e-01] [-2.84115815e+00 -2.69308043e+00 -9.74452555e-01 ... -1.01762164e+00 -1.05712533e+00 -2.37994313e+00] [-1.32083309e+00 -1.08417380e+00 -1.10967159e+00 ... -1.38096309e+00 -1.79984474e+00 -1.48821723e+00]] [[-2.65490651e-01 -2.40560484e+00 -5.89481115e-01 ... -2.52573848e-01 -1.37955546e+00 -8.91074836e-01] [-1.50553405e+00 -1.03129709e+00 -1.10737002e+00 ... -2.04601979e+00 -9.20120299e-01 -1.12702644e+00] [-1.22331715e+00 -5.57413042e-01 -1.88301492e+00 ... -2.27775383e+00 -5.08391976e-01 -2.18336439e+00] ... [-1.83507359e+00 -2.48410845e+00 -8.18021476e-01 ... -1.31169069e+00 -1.26819730e+00 -2.08381891e+00] [-2.21051812e+00 -1.26243877e+00 -6.21392012e-01 ... -1.46139884e+00 -2.44325829e+00 -1.27310407e+00] [-1.14583515e-01 -1.34680820e+00 -1.05950594e+00 ... -2.31759095e+00 -1.44832850e+00 -2.04039073e+00]] [[-1.93842101e+00 -1.61067855e+00 -8.44649017e-01 ... -1.35214543e+00 2.09236100e-01 -1.56592083e+00] [-1.46070874e+00 -8.45112324e-01 -1.12452185e+00 ... -1.19617820e+00 -1.15661597e+00 -2.46155000e+00] [-1.35184038e+00 -9.51431766e-02 -1.69719577e+00 ... -2.52341032e-01 -1.85125268e+00 -7.99959779e-01] ... [-1.83968985e+00 -9.91563618e-01 -1.95758426e+00 ... -2.49887538e+00 -1.83993649e+00 -1.44737351e+00] [-2.34951234e+00 -2.35335088e+00 -1.79128885e+00 ... -1.33738232e+00 -1.78953433e+00 -1.37193513e+00] [-2.13360834e+00 -1.33292449e+00 -2.55797362e+00 ... -2.32307363e+00 -1.08953190e+00 -1.63136625e+00]] ... [[ 6.67722464e-01 -1.39185750e+00 -1.40287519e+00 ... -1.68281567e+00 -1.16337800e+00 -1.55456245e+00] [-1.54661477e+00 -1.91463590e+00 -1.09015727e+00 ... -1.80851161e+00 -1.33835900e+00 -1.11438751e+00] [-8.26344252e-01 -8.58257294e-01 -1.56622672e+00 ... -1.91311014e+00 -1.88736033e+00 -7.90015161e-01] ... [-1.54414523e+00 -1.05921376e+00 -2.85192060e+00 ... -1.90386844e+00 -2.25992799e+00 -2.96060491e+00] [-1.07520723e+00 -1.00956106e+00 -1.10525513e+00 ... -1.53405905e+00 -1.46898007e+00 -1.54042947e+00] [-2.15245032e+00 -7.88168252e-01 -1.90712929e+00 ... -1.21723938e+00 -1.41766989e+00 -9.60109830e-01]] [[-9.99107420e-01 -1.11519921e+00 -2.53641891e+00 ... -1.59617651e+00 -1.86538899e+00 -2.49586964e+00] [-1.41633642e+00 -2.02337337e+00 -5.78334212e-01 ... -1.79252350e+00 -1.65407169e+00 -1.66958117e+00] [-2.96302867e+00 -1.31814682e+00 -1.15832222e+00 ... -1.48303056e+00 -2.09034491e+00 -1.37673926e+00] ... [-1.68090463e+00 -7.44305074e-01 -7.23431587e-01 ... -2.84353471e+00 -9.09735441e-01 -2.19293761e+00] [-2.17987275e+00 -2.73414993e+00 -1.31739700e+00 ... -1.10158157e+00 -1.12404788e+00 -1.72764659e+00] [-2.13164997e+00 -1.45869911e+00 -1.18237853e+00 ... -1.72067153e+00 -9.26648855e-01 -1.87998962e+00]] [[-1.63843489e+00 -1.04756308e+00 -1.32524908e+00 ... -2.00557971e+00 -1.77837372e+00 -1.39709485e+00] [-1.41073787e+00 -2.26350856e+00 -9.53317523e-01 ... -1.44432366e+00 -8.64731908e-01 -2.45860958e+00] [-2.98124027e+00 -2.35410905e+00 -1.75958157e+00 ... -7.64243782e-01 -2.17257619e+00 -1.14312208e+00] ... [-7.28678465e-01 -1.80736792e+00 -9.73522007e-01 ... -1.01838779e+00 -1.35918188e+00 -4.71082330e-01] [-1.45240402e+00 -2.22331095e+00 -2.51006150e+00 ... 4.13735844e-02 -1.49386096e+00 -1.98313963e+00] [-2.25880098e+00 -1.73974216e+00 -1.84322977e+00 ... -1.44854665e+00 -1.47873795e+00 -1.88588870e+00]]] [[[-1.16993713e+00 4.04719830e-01 -1.31885982e+00 ... -6.82019413e-01 -8.38359892e-01 2.10783795e-01] [-6.28302574e-01 -5.47941089e-01 -4.64452416e-01 ... -1.02791142e+00 1.41237617e-01 3.23097706e-01] [-6.74074292e-01 3.79326403e-01 -1.86170369e-01 ... 2.02160925e-01 -3.58934253e-01 -6.81843460e-01] ... [ 5.03273785e-01 -3.14739108e-01 -3.06281209e-01 ... -1.86103731e-01 -2.48605117e-01 -6.94611073e-02] [ 3.27249050e-01 -1.17112732e+00 -1.13655996e+00 ... 1.95313990e-01 -1.36710405e-01 -2.15184331e-01] [-3.52673143e-01 -1.40419960e-01 -1.00974262e+00 ... 5.21597378e-02 -5.27451694e-01 -2.18252674e-01]] [[ 1.51791751e-01 2.99678445e-01 -2.02530131e-01 ... 8.74906421e-01 1.50157707e-02 -3.71899188e-01] [ 7.22121537e-01 -2.09080309e-01 1.88999683e-01 ... -2.86207974e-01 7.65483260e-01 -5.11711359e-01] [-8.12733829e-01 2.70175397e-01 -2.64931694e-02 ... -2.91473567e-01 -3.89340699e-01 2.39550814e-01] ... [-4.48900282e-01 -3.95019114e-01 5.79995990e-01 ... -6.85741425e-01 -1.79985374e-01 -9.66834128e-01] [-7.83460140e-02 9.60423768e-01 2.68915892e-01 ... -2.32319206e-01 2.66337752e-01 -5.24197400e-01] [ 3.31631452e-02 -6.68864727e-01 -6.52818501e-01 ... -1.87384039e-01 -5.23148298e-01 -1.10575378e+00]] [[-2.28078097e-01 6.47998005e-02 3.76663469e-02 ... -7.61485219e-01 8.91623259e-01 -9.71974969e-01] [-3.46838206e-01 -8.92892703e-02 -5.49111128e-01 ... 4.34189253e-02 -8.92073810e-01 -3.10221344e-01] [-6.38350427e-01 -4.19947535e-01 1.41573653e-01 ... 4.86647606e-01 1.55683532e-01 -6.08140230e-01] ... [-4.41092849e-01 -7.09392130e-01 -3.36736947e-01 ... -2.17582852e-01 7.06940353e-01 -7.46444389e-02] [-1.17042470e+00 -2.72220165e-01 -8.42469156e-01 ... -8.41684461e-01 2.43261591e-01 1.48329422e-01] [-9.36946213e-01 7.80171305e-02 -1.26582608e-01 ... 5.33124767e-02 -8.75831246e-01 7.49694347e-01]] ... [[-6.01030767e-01 -1.59233972e-01 -6.71921492e-01 ... 1.10963956e-01 -2.39590153e-01 -1.36721015e+00] [-4.08830732e-01 -2.26193234e-01 -1.15263093e+00 ... 6.47505522e-01 -2.55195916e-01 -4.62672785e-02] [ 2.95614034e-01 -1.34457171e+00 -5.54393768e-01 ... -3.59654635e-01 5.08978702e-02 -8.16987693e-01] ... [-1.51706904e-01 -1.64670683e-03 -4.77826059e-01 ... 9.66942683e-02 -2.95307606e-01 1.83173537e-01] [ 5.49938083e-01 5.35764754e-01 2.58024365e-01 ... -7.51758933e-01 2.07455069e-01 5.75436532e-01] [-9.56265926e-01 -2.16136709e-01 -1.28338113e-02 ... -6.91419393e-02 -3.85739468e-02 9.33178902e-01]] [[-5.16604483e-01 -1.01614821e+00 -4.47255254e-01 ... -1.00421584e+00 -4.99392539e-01 -4.58841652e-01] [-5.31612933e-01 8.44038904e-01 1.01145470e+00 ... -7.26734661e-03 -3.47924680e-01 -6.97351217e-01] [-3.97869378e-01 -5.02092004e-01 -4.01619613e-01 ... 1.79229397e-02 7.36709416e-01 -7.84057528e-02] ... [-2.47252941e-01 -1.22575246e-01 1.85866520e-01 ... 4.87334095e-02 -1.16145825e+00 -4.47307348e-01] [-3.52102429e-01 -3.67803812e-01 -1.62069261e-01 ... -1.81445137e-01 -9.22950566e-01 -1.07466424e+00] [ 6.93098366e-01 -2.50550896e-01 -5.47040641e-01 ... -2.71741718e-01 -8.42157304e-02 1.00206983e+00]] [[ 1.94934309e-01 -4.27424818e-01 -5.26268244e-01 ... 2.79554039e-01 -7.18262553e-01 -5.25130451e-01] [ 6.68434873e-02 1.70960829e-01 4.93920326e-01 ... 1.17613606e-01 -2.25167781e-01 -2.89049149e-01] [-1.84823543e-01 -2.28794441e-02 1.62744328e-01 ... 4.17688400e-01 5.33888698e-01 1.91639394e-01] ... [ 1.72580361e-01 -3.18438083e-01 -9.09799755e-01 ... -1.88566789e-01 -9.08680260e-01 4.82719630e-01] [ 1.28369546e+00 -5.87145925e-01 -3.96705508e-01 ... -5.59648760e-02 -1.87294722e-01 2.35463325e-02] [ 4.25752699e-01 1.61805958e-01 3.47350061e-01 ... 1.77270174e-01 -2.33028725e-01 1.60238966e-01]]] [[[ 6.29047811e-01 9.02037263e-01 2.86454320e+00 ... 3.22973657e+00 2.24032593e+00 -8.74750555e-01] [ 1.08006072e+00 7.22549796e-01 3.13702750e+00 ... 2.75627041e+00 -3.48214239e-01 2.71071076e+00] [-1.13124692e+00 4.45409358e-01 2.01757884e+00 ... 1.63903689e+00 1.38192761e+00 1.61117762e-01] ... [ 2.83083296e+00 2.69648290e+00 7.52240896e-01 ... 3.53466839e-01 1.34049916e+00 2.06107259e+00] [ 2.95809197e+00 3.61104608e-02 9.22901094e-01 ... 1.33557773e+00 1.80297017e+00 3.25279403e+00] [ 3.91580731e-01 1.39569378e+00 2.79459071e+00 ... 7.27731094e-04 3.86129212e+00 9.72028911e-01]] [[ 7.37205684e-01 -5.12719750e-01 -1.01015262e-01 ... 2.29463243e+00 -4.84221756e-01 2.87375420e-01] [ 3.18029833e+00 1.58419359e+00 -5.61494410e-01 ... 3.44360447e+00 3.48939228e+00 6.34486198e-01] [ 2.77447677e+00 2.43669033e+00 1.05946696e+00 ... 1.99509847e+00 9.44904149e-01 9.63887751e-01] ... [ 7.14870334e-01 1.53818822e+00 2.06051016e+00 ... 1.46804118e+00 1.33948755e+00 1.31684124e+00] [ 4.43501806e+00 4.63097620e+00 2.47886324e+00 ... 3.63847136e+00 1.12058425e+00 -9.36652899e-01] [ 1.47063899e+00 7.21262574e-01 3.21975136e+00 ... 3.38437498e-01 1.53418970e+00 2.97841519e-01]] [[ 8.90822634e-02 1.80323589e+00 6.37210965e-01 ... -8.18783641e-01 -6.44228578e-01 -7.08728433e-01] [ 1.34186935e+00 9.62139010e-01 2.43368196e+00 ... 2.04846716e+00 2.09051299e+00 2.80504155e+00] [ 1.35119915e+00 1.31303266e-01 1.96226490e+00 ... -3.84539999e-02 9.41835105e-01 1.97510123e+00] ... [ 9.69762564e-01 4.16348219e-01 1.44774401e+00 ... 2.21891022e+00 9.39873457e-01 2.43771863e+00] [ 1.47739112e+00 5.13247848e-01 1.35060406e+00 ... 2.68992424e+00 1.21774352e+00 1.80095208e+00] [ 8.84290755e-01 -3.87309670e-01 2.46743417e+00 ... -2.55110204e-01 1.09304368e+00 1.84100258e+00]] ... [[ 6.77953064e-01 -8.83271694e-01 1.82467496e+00 ... 2.51220584e+00 1.41192424e+00 1.28061044e+00] [ 2.51216149e+00 1.46550143e+00 2.38778877e+00 ... 1.38161373e+00 4.04343653e+00 1.04608464e+00] [ 3.51748514e+00 1.32256556e+00 2.21944761e+00 ... 4.05505270e-01 1.16958129e+00 2.86639667e+00] ... [-7.28428125e-01 -2.02852035e+00 2.66275477e+00 ... 7.92855620e-01 1.05834699e+00 1.19800997e+00] [ 2.01124716e+00 9.89259183e-01 3.18156695e+00 ... 6.40517473e-01 -5.70106745e-01 1.14345002e+00] [ 3.58168697e+00 1.55075109e+00 4.13302946e+00 ... 1.81803823e+00 4.10856771e+00 -1.39127046e-01]] [[-1.56920254e+00 2.06595421e+00 1.33582425e+00 ... 3.03829646e+00 -6.18058503e-01 1.60721004e+00] [-2.09753618e-01 9.25622761e-01 4.82049912e-01 ... 2.18580031e+00 4.58731681e-01 3.13513017e+00] [ 2.16876388e+00 7.55405307e-01 2.91710168e-01 ... 6.16881311e-01 1.70981571e-01 7.99706578e-01] ... [ 4.41644335e+00 4.09716988e+00 3.24160933e+00 ... 1.89771104e+00 2.10939431e+00 1.40547454e+00] [ 1.85214961e+00 4.51217353e-01 1.51505685e+00 ... 1.73793077e+00 -1.67672396e+00 2.98316908e+00] [ 2.31637907e+00 2.47045651e-01 1.01093180e-01 ... 1.98519695e+00 1.49553907e+00 2.60613155e+00]] [[ 8.74922454e-01 2.87079606e-02 3.53102160e+00 ... 8.60773563e-01 3.06083560e+00 7.43278980e-01] [ 2.23124027e+00 2.21345687e+00 2.74677825e+00 ... 1.93480098e+00 1.57599664e+00 1.09553255e-01] [-1.24832094e-01 -1.40224457e-01 1.87264895e+00 ... 9.33475494e-01 1.59179509e+00 8.14989030e-01] ... [ 2.01970294e-01 3.68268657e+00 2.22756958e+00 ... 4.87456656e+00 2.19549632e+00 1.40119410e+00] [-9.60204005e-02 4.16837072e+00 1.04594946e+00 ... 1.73755810e-01 2.66284251e+00 4.95522290e-01] [-6.13642395e-01 9.33425844e-01 6.16250217e-01 ... -3.72507542e-01 -1.71382576e-01 2.02584863e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_864.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.05212}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.1094 (2,1,1,.,.) = 0.4901 (3,1,1,.,.) = -1.2457 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 7.7764654e-01 -4.6073914e-02 1.6815271e+00 -1.2567137e+00 1.2556705e+00 3.2126584e+00 1.5010712e+00 6.7704457e-01] [ 8.0290389e-01 3.9233913e+00 1.4012516e-01 -3.0679929e-01 1.1613258e+00 1.7422800e+00 -2.1121979e-01 3.6431842e+00] [ 1.7540832e+00 -8.4636414e-01 1.6014988e+00 1.4244189e+00 -1.3564819e+00 6.7892838e-01 2.5561571e+00 1.9863966e+00] [ 2.8057559e+00 4.6554685e-02 -8.7624598e-01 2.0286057e+00 5.3777277e-01 1.6410236e+00 2.2053831e+00 1.1680394e+00] [ 3.9619060e+00 7.3218024e-01 2.1610832e+00 2.2667446e+00 6.4933044e-01 1.2746127e+00 2.5865667e+00 1.3037726e+00] [ 3.8303965e-01 1.9158304e+00 -5.0998867e-01 -5.3211188e-01 6.0936946e-01 3.4671450e+00 3.3274193e+00 1.0146999e+00] [ 2.9695179e+00 2.9881120e+00 1.6894518e+00 1.4684718e+00 2.6982532e+00 5.3323239e-01 1.6654739e+00 2.2831788e+00] [ 8.4053457e-02 -9.5285165e-01 -8.3605719e-01 1.3131599e+00 1.6410072e+00 -5.8685279e-01 8.6465955e-01 2.2110896e+00]] [[ 2.9003818e+00 4.3878942e+00 1.1717333e+00 4.5636892e-03 1.8102845e+00 -7.0376778e-01 6.7753017e-01 1.2540605e+00] [-4.9600923e-01 2.0124722e-01 2.1279113e+00 8.7944180e-01 2.7716881e-01 2.9923949e+00 3.2550430e+00 4.6816432e-01] [ 3.9411235e-01 1.4133356e+00 1.7817301e+00 1.2163956e+00 -9.4074452e-01 1.4670260e+00 2.8303685e+00 -2.5954115e-01] [ 1.8197486e+00 7.8459293e-01 2.0160460e+00 1.4158882e+00 5.7068694e-01 3.8704643e+00 -3.8772976e-01 3.6360326e+00] [ 2.7206945e-01 -2.0184159e-02 1.5776190e+00 -7.0903540e-02 -5.8683228e-01 1.0672938e+00 -5.1194215e-01 2.1075416e-01] [ 1.1761379e+00 1.5822754e+00 3.0437355e+00 4.1698394e+00 -3.4046268e-01 3.0511522e+00 2.8584415e-01 3.2025278e-01] [-1.2837039e+00 -3.5936773e-01 3.5058635e-01 1.3398141e+00 -2.3620641e-01 9.4267023e-01 -1.9074821e-01 1.9579542e+00] [ 1.2439384e+00 9.3700284e-01 2.5540466e+00 -3.7167692e-01 1.3472418e+00 4.8471659e-01 2.2909355e+00 1.9612503e+00]] [[ 1.2122358e+00 3.6336379e+00 2.9720278e+00 3.3017855e+00 2.0191777e+00 1.0514089e+00 -1.5868970e+00 3.2069402e+00] [ 2.6759849e+00 7.3181760e-01 1.3684223e+00 3.2875490e+00 1.7760661e+00 2.2163997e+00 1.8321805e+00 5.9487170e-01] [-1.5508617e+00 1.2059426e+00 -2.0290084e+00 2.7897352e-01 3.5866547e+00 2.2395363e+00 2.7416096e+00 1.0954174e+00] [ 1.9806217e+00 -3.1788814e-01 2.9444212e-01 3.0042281e+00 3.3527188e+00 1.7681592e+00 1.2595073e+00 2.0513314e-01] [-7.3279512e-01 1.6539953e+00 -1.0332958e+00 5.8068371e-01 -1.8138829e+00 -1.0999643e+00 -6.5024495e-01 7.9600441e-01] [ 2.2276533e-01 3.7685723e+00 -1.9009137e-01 1.1247244e+00 -1.8918085e-01 -1.3525761e+00 2.4439363e+00 1.4342418e+00] [ 6.1387330e-01 1.2064739e+00 1.5969417e+00 1.5753587e+00 1.3591934e+00 -6.3722849e-01 3.6785836e+00 8.6220992e-01] [ 2.2773366e+00 5.6066227e-01 1.3795078e+00 1.3870071e+00 1.0035335e+00 -2.5107884e+00 8.5550106e-01 3.5234962e+00]] [[ 1.8106779e+00 2.3240700e+00 2.0254867e+00 1.3612021e+00 -1.5389117e+00 4.3689054e-01 1.2130908e+00 1.5203915e+00] [-3.2772315e-01 2.7061167e+00 2.2917938e+00 1.1837283e+00 5.0364107e-01 3.6821184e+00 -1.2190415e+00 2.6043212e-01] [-1.5217936e-01 8.9020371e-02 4.1023130e+00 1.0772876e+00 2.0145422e-01 5.7051307e-01 -1.0350682e+00 5.7967669e-01] [ 3.0504708e+00 1.7114961e-01 8.0530947e-01 8.4008032e-01 1.6039066e+00 1.9870985e+00 1.9493229e+00 1.5957683e+00] [-9.3406057e-01 1.0638379e+00 1.8451238e+00 3.8503313e+00 2.9944301e+00 2.6085730e+00 2.4804053e+00 2.6136093e+00] [ 1.3252521e+00 1.1795245e+00 -9.8818195e-01 1.1020621e+00 -1.7540823e+00 3.0087543e+00 -6.5576398e-01 9.1370219e-01] [-7.4067211e-01 -6.6513813e-01 1.1998271e+00 1.4261166e+00 1.4502470e+00 2.0416002e+00 4.1344452e+00 1.5023460e+00] [ 8.2278550e-01 3.7791401e-01 2.6800323e-01 1.1868434e+00 4.0851932e+00 3.1131635e+00 -4.3791544e-01 -1.3246504e+00]] [[ 1.1409054e+00 2.6634669e+00 1.2808871e+00 -1.2439960e+00 2.1870699e+00 1.2193410e+00 1.4079827e+00 2.2824411e+00] [ 9.1459107e-01 1.9001520e+00 2.4880154e+00 4.6252716e-01 8.9632785e-01 -6.5569878e-03 5.3453636e-01 1.6740234e+00] [ 5.8943594e-01 1.2765646e+00 1.8003030e+00 1.5862858e-01 1.0043120e+00 -6.6621816e-01 -1.4838463e+00 3.6034126e+00] [ 6.6854358e-02 -3.0298412e-01 -1.2886697e+00 2.3218770e+00 5.1044327e-01 3.2538152e+00 2.9895453e+00 1.3911265e+00] [ 2.8191266e+00 4.3812221e-01 2.2298408e+00 1.6948057e+00 2.6104467e+00 3.7956053e-01 1.5826690e+00 2.5532621e-01] [ 7.9098827e-01 -1.2275256e+00 -9.5402181e-01 2.3251538e+00 4.8500834e+00 1.1293073e+00 1.1099199e+00 3.3469200e-02] [ 1.8179302e+00 2.2175660e+00 3.9773979e+00 2.5104833e+00 3.1586032e+00 2.2880859e+00 1.7649800e-01 3.1334186e-01] [ 1.7455956e+00 5.9650254e-01 1.5446550e-01 9.4455749e-01 2.3580132e+00 1.8843853e-01 1.0887587e+00 3.3822852e-01]] [[ 8.0834270e-01 1.7590597e+00 1.5121500e+00 5.5326760e-02 1.2870784e+00 1.6718289e+00 5.7664680e-01 1.5026267e+00] [ 6.5253246e-01 1.6009259e-01 8.6994076e-01 -4.5036805e-01 2.4705544e+00 -1.7418003e-01 -1.0880983e-01 -1.0938386e+00] [ 1.7472559e-01 -7.1724474e-01 -5.5123889e-01 3.5277677e-01 1.0411221e+00 1.7654687e-01 1.6135688e+00 1.2652518e+00] [ 4.1969519e+00 1.5713189e+00 1.1223587e+00 3.7700230e-01 1.0588853e+00 -1.3133503e+00 5.6982529e-01 4.7094291e-01] [ 2.2360764e+00 1.9845803e+00 -4.6200883e-01 -8.3488441e-01 -1.0350794e+00 7.7782595e-01 5.1675439e-02 -1.6370500e+00] [ 2.6438594e-02 -1.1462772e-01 3.9683118e+00 9.7034007e-01 3.9563527e+00 8.6337674e-01 1.0615546e+00 1.0375748e+00] [ 6.3148165e-01 2.5531259e+00 -3.3011162e-01 2.9880147e+00 -2.6571050e+00 1.1556230e+00 1.6637137e+00 1.6819711e+00] [ 3.4162412e+00 2.7065454e+00 2.6277238e-01 1.7154336e-02 3.6328673e-01 -2.8627157e-01 4.6839714e+00 2.0811110e+00]] [[ 2.9686189e+00 -2.6624417e-01 1.2268063e+00 2.5831504e+00 2.2612138e+00 6.5596414e-01 1.3007126e+00 -1.6939248e+00] [ 1.4911265e+00 -2.6580775e-01 3.2355089e+00 2.8576860e+00 7.6538110e-01 4.6059108e-01 1.3567272e+00 1.0867532e+00] [ 2.1784842e-01 2.1796482e+00 -6.7703164e-01 2.6331983e+00 1.6009347e+00 2.6316690e+00 9.0764606e-01 1.8347083e+00] [-2.0153074e+00 -2.0607102e-01 1.7488127e+00 1.5623319e+00 2.5775437e+00 1.3061433e+00 4.0345341e-01 1.5736756e+00] [ 3.2112269e+00 1.4446106e+00 1.8369007e+00 1.7402409e+00 1.8961082e+00 1.4742976e+00 5.7563818e-01 1.3827233e+00] [ 2.6566234e+00 2.3391473e+00 1.3711028e+00 1.1094646e+00 3.3285508e+00 1.7347229e+00 4.7458166e-01 4.0434194e-01] [ 8.7944269e-03 3.8937597e+00 -9.3603611e-02 -9.4736409e-01 4.6421748e-01 -1.3348627e-01 2.1443923e+00 1.0139948e+00] [-3.3913851e-03 1.5703616e+00 4.1152984e-01 2.1344099e+00 1.2566555e-01 1.4858906e+00 1.9379604e+00 -3.8005054e-01]] [[ 6.9708693e-01 -3.6097300e-01 1.2380311e+00 -8.5347295e-01 1.6208851e+00 1.2080165e+00 1.1134106e+00 1.1815650e+00] [ 2.5787618e+00 2.5749478e+00 7.2927737e-01 -6.6386700e-02 9.9481761e-02 7.7421832e-01 6.9539452e-01 -6.5244389e-01] [ 2.7712772e+00 2.3062289e+00 1.6885372e+00 6.4398324e-01 -1.0327619e+00 1.4809005e+00 2.3082416e+00 3.9229665e+00] [ 1.1780783e+00 7.6113343e-02 1.0933200e+00 1.6076880e+00 1.0449477e+00 2.6256807e+00 2.8602948e+00 8.7420940e-02] [ 8.2575083e-03 1.2191868e+00 7.9286391e-01 1.4065022e+00 2.4430285e+00 1.3067946e+00 2.0236347e+00 1.2686224e+00] [ 1.2409070e+00 2.2434053e+00 3.5681505e+00 6.7721897e-01 2.3849926e+00 -5.5024004e-01 -5.7275116e-01 2.4596176e+00] [-2.3641882e+00 7.4104607e-01 2.1719129e+00 1.8979902e+00 2.2585099e+00 2.2148888e+00 1.9470692e-02 -9.1937947e-01] [-4.5107412e-01 2.6542878e+00 1.3702363e+00 -2.3876524e-01 2.1303039e+00 2.9816108e+00 2.4591045e+00 1.1348763e+00]]]]]; ov_res: [[[[[ 7.7764654e-01 -4.6073914e-02 1.6815271e+00 -1.2567137e+00 1.2556705e+00 3.2126584e+00 1.5010712e+00 6.7704457e-01] [ 8.0290389e-01 3.9233913e+00 1.4012516e-01 -3.0679929e-01 1.1613258e+00 1.7422800e+00 -2.1121979e-01 3.6431842e+00] [ 1.7540832e+00 -8.4636414e-01 1.6014988e+00 1.4244189e+00 -1.3564819e+00 6.7892838e-01 2.5561571e+00 1.9863966e+00] [ 2.8057559e+00 4.6554685e-02 -8.7624598e-01 2.0286057e+00 5.3777277e-01 1.6410236e+00 2.2053831e+00 1.1680394e+00] [ 3.9619060e+00 7.3218024e-01 2.1610832e+00 2.2667446e+00 6.4933044e-01 1.2746127e+00 2.5865667e+00 1.3037726e+00] [ 3.8303965e-01 1.9158304e+00 -5.0998867e-01 -5.3211188e-01 6.0936946e-01 3.4671450e+00 3.3274193e+00 1.0146999e+00] [ 2.9695179e+00 2.9881120e+00 1.6894518e+00 1.4684718e+00 2.6982532e+00 5.3323239e-01 1.6654739e+00 2.2831788e+00] [ 8.4053457e-02 -9.5285165e-01 -8.3605719e-01 1.3131599e+00 1.6410072e+00 -5.8685279e-01 8.6465955e-01 2.2110896e+00]] [[ 2.9003818e+00 4.3878942e+00 1.1717333e+00 4.5636892e-03 1.8102845e+00 -7.0376778e-01 6.7753017e-01 1.2540605e+00] [-4.9600923e-01 2.0124722e-01 2.1279113e+00 8.7944180e-01 2.7716881e-01 2.9923949e+00 3.2550430e+00 4.6816432e-01] [ 3.9411235e-01 1.4133356e+00 1.7817301e+00 1.2163956e+00 -9.4074452e-01 1.4670260e+00 2.8303685e+00 -2.5954115e-01] [ 1.8197486e+00 7.8459293e-01 2.0160460e+00 1.4158882e+00 5.7068694e-01 3.8704643e+00 -3.8772976e-01 3.6360326e+00] [ 2.7206945e-01 -2.0184159e-02 1.5776190e+00 -7.0903540e-02 -5.8683228e-01 1.0672938e+00 -5.1194215e-01 2.1075416e-01] [ 1.1761379e+00 1.5822754e+00 3.0437355e+00 4.1698394e+00 -3.4046268e-01 3.0511522e+00 2.8584415e-01 3.2025278e-01] [-1.2837039e+00 -3.5936773e-01 3.5058635e-01 1.3398141e+00 -2.3620641e-01 9.4267023e-01 -1.9074821e-01 1.9579542e+00] [ 1.2439384e+00 9.3700284e-01 2.5540466e+00 -3.7167692e-01 1.3472418e+00 4.8471659e-01 2.2909355e+00 1.9612503e+00]] [[ 1.2122358e+00 3.6336379e+00 2.9720278e+00 3.3017855e+00 2.0191777e+00 1.0514089e+00 -1.5868970e+00 3.2069402e+00] [ 2.6759849e+00 7.3181760e-01 1.3684223e+00 3.2875490e+00 1.7760661e+00 2.2163997e+00 1.8321805e+00 5.9487170e-01] [-1.5508617e+00 1.2059426e+00 -2.0290084e+00 2.7897352e-01 3.5866547e+00 2.2395363e+00 2.7416096e+00 1.0954174e+00] [ 1.9806217e+00 -3.1788814e-01 2.9444212e-01 3.0042281e+00 3.3527188e+00 1.7681592e+00 1.2595073e+00 2.0513314e-01] [-7.3279512e-01 1.6539953e+00 -1.0332958e+00 5.8068371e-01 -1.8138829e+00 -1.0999643e+00 -6.5024495e-01 7.9600441e-01] [ 2.2276533e-01 3.7685723e+00 -1.9009137e-01 1.1247244e+00 -1.8918085e-01 -1.3525761e+00 2.4439363e+00 1.4342418e+00] [ 6.1387330e-01 1.2064739e+00 1.5969417e+00 1.5753587e+00 1.3591934e+00 -6.3722849e-01 3.6785836e+00 8.6220992e-01] [ 2.2773366e+00 5.6066227e-01 1.3795078e+00 1.3870071e+00 1.0035335e+00 -2.5107884e+00 8.5550106e-01 3.5234962e+00]] [[ 1.8106779e+00 2.3240700e+00 2.0254867e+00 1.3612021e+00 -1.5389117e+00 4.3689054e-01 1.2130908e+00 1.5203915e+00] [-3.2772315e-01 2.7061167e+00 2.2917938e+00 1.1837283e+00 5.0364107e-01 3.6821184e+00 -1.2190415e+00 2.6043212e-01] [-1.5217936e-01 8.9020371e-02 4.1023130e+00 1.0772876e+00 2.0145422e-01 5.7051307e-01 -1.0350682e+00 5.7967669e-01] [ 3.0504708e+00 1.7114961e-01 8.0530947e-01 8.4008032e-01 1.6039066e+00 1.9870985e+00 1.9493229e+00 1.5957683e+00] [-9.3406057e-01 1.0638379e+00 1.8451238e+00 3.8503313e+00 2.9944301e+00 2.6085730e+00 2.4804053e+00 2.6136093e+00] [ 1.3252521e+00 1.1795245e+00 -9.8818195e-01 1.1020621e+00 -1.7540823e+00 3.0087543e+00 -6.5576398e-01 9.1370219e-01] [-7.4067211e-01 -6.6513813e-01 1.1998271e+00 1.4261166e+00 1.4502470e+00 2.0416002e+00 4.1344452e+00 1.5023460e+00] [ 8.2278550e-01 3.7791401e-01 2.6800323e-01 1.1868434e+00 4.0851932e+00 3.1131635e+00 -4.3791544e-01 -1.3246504e+00]] [[ 1.1409054e+00 2.6634669e+00 1.2808871e+00 -1.2439960e+00 2.1870699e+00 1.2193410e+00 1.4079827e+00 2.2824411e+00] [ 9.1459107e-01 1.9001520e+00 2.4880154e+00 4.6252716e-01 8.9632785e-01 -6.5569878e-03 5.3453636e-01 1.6740234e+00] [ 5.8943594e-01 1.2765646e+00 1.8003030e+00 1.5862858e-01 1.0043120e+00 -6.6621816e-01 -1.4838463e+00 3.6034126e+00] [ 6.6854358e-02 -3.0298412e-01 -1.2886697e+00 2.3218770e+00 5.1044327e-01 3.2538152e+00 2.9895453e+00 1.3911265e+00] [ 2.8191266e+00 4.3812221e-01 2.2298408e+00 1.6948057e+00 2.6104467e+00 3.7956053e-01 1.5826690e+00 2.5532621e-01] [ 7.9098827e-01 -1.2275256e+00 -9.5402181e-01 2.3251538e+00 4.8500834e+00 1.1293073e+00 1.1099199e+00 3.3469200e-02] [ 1.8179302e+00 2.2175660e+00 3.9773979e+00 2.5104833e+00 3.1586032e+00 2.2880859e+00 1.7649800e-01 3.1334186e-01] [ 1.7455956e+00 5.9650254e-01 1.5446550e-01 9.4455749e-01 2.3580132e+00 1.8843853e-01 1.0887587e+00 3.3822852e-01]] [[ 8.0834270e-01 1.7590597e+00 1.5121500e+00 5.5326760e-02 1.2870784e+00 1.6718289e+00 5.7664680e-01 1.5026267e+00] [ 6.5253246e-01 1.6009259e-01 8.6994076e-01 -4.5036805e-01 2.4705544e+00 -1.7418003e-01 -1.0880983e-01 -1.0938386e+00] [ 1.7472559e-01 -7.1724474e-01 -5.5123889e-01 3.5277677e-01 1.0411221e+00 1.7654687e-01 1.6135688e+00 1.2652518e+00] [ 4.1969519e+00 1.5713189e+00 1.1223587e+00 3.7700230e-01 1.0588853e+00 -1.3133503e+00 5.6982529e-01 4.7094291e-01] [ 2.2360764e+00 1.9845803e+00 -4.6200883e-01 -8.3488441e-01 -1.0350794e+00 7.7782595e-01 5.1675439e-02 -1.6370500e+00] [ 2.6438594e-02 -1.1462772e-01 3.9683118e+00 9.7034007e-01 3.9563527e+00 8.6337674e-01 1.0615546e+00 1.0375748e+00] [ 6.3148165e-01 2.5531259e+00 -3.3011162e-01 2.9880147e+00 -2.6571050e+00 1.1556230e+00 1.6637137e+00 1.6819711e+00] [ 3.4162412e+00 2.7065454e+00 2.6277238e-01 1.7154336e-02 3.6328673e-01 -2.8627157e-01 4.6839714e+00 2.0811110e+00]] [[ 2.9686189e+00 -2.6624417e-01 1.2268063e+00 2.5831504e+00 2.2612138e+00 6.5596414e-01 1.3007126e+00 -1.6939248e+00] [ 1.4911265e+00 -2.6580775e-01 3.2355089e+00 2.8576860e+00 7.6538110e-01 4.6059108e-01 1.3567272e+00 1.0867532e+00] [ 2.1784842e-01 2.1796482e+00 -6.7703164e-01 2.6331983e+00 1.6009347e+00 2.6316690e+00 9.0764606e-01 1.8347083e+00] [-2.0153074e+00 -2.0607102e-01 1.7488127e+00 1.5623319e+00 2.5775437e+00 1.3061433e+00 4.0345341e-01 1.5736756e+00] [ 3.2112269e+00 1.4446106e+00 1.8369007e+00 1.7402409e+00 1.8961082e+00 1.4742976e+00 5.7563818e-01 1.3827233e+00] [ 2.6566234e+00 2.3391473e+00 1.3711028e+00 1.1094646e+00 3.3285508e+00 1.7347229e+00 4.7458166e-01 4.0434194e-01] [ 8.7944269e-03 3.8937597e+00 -9.3603611e-02 -9.4736409e-01 4.6421748e-01 -1.3348627e-01 2.1443923e+00 1.0139948e+00] [-3.3913851e-03 1.5703616e+00 4.1152984e-01 2.1344099e+00 1.2566555e-01 1.4858906e+00 1.9379604e+00 -3.8005054e-01]] [[ 6.9708693e-01 -3.6097300e-01 1.2380311e+00 -8.5347295e-01 1.6208851e+00 1.2080165e+00 1.1134106e+00 1.1815650e+00] [ 2.5787618e+00 2.5749478e+00 7.2927737e-01 -6.6386700e-02 9.9481761e-02 7.7421832e-01 6.9539452e-01 -6.5244389e-01] [ 2.7712772e+00 2.3062289e+00 1.6885372e+00 6.4398324e-01 -1.0327619e+00 1.4809005e+00 2.3082416e+00 3.9229665e+00] [ 1.1780783e+00 7.6113343e-02 1.0933200e+00 1.6076880e+00 1.0449477e+00 2.6256807e+00 2.8602948e+00 8.7420940e-02] [ 8.2575083e-03 1.2191868e+00 7.9286391e-01 1.4065022e+00 2.4430285e+00 1.3067946e+00 2.0236347e+00 1.2686224e+00] [ 1.2409070e+00 2.2434053e+00 3.5681505e+00 6.7721897e-01 2.3849926e+00 -5.5024004e-01 -5.7275116e-01 2.4596176e+00] [-2.3641882e+00 7.4104607e-01 2.1719129e+00 1.8979902e+00 2.2585099e+00 2.2148888e+00 1.9470692e-02 -9.1937947e-01] [-4.5107412e-01 2.6542878e+00 1.3702363e+00 -2.3876524e-01 2.1303039e+00 2.9816108e+00 2.4591045e+00 1.1348763e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_866.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.1914 -0.9383 0.9775 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.1771 (2,1,1,.,.) = -0.2987 (3,1,1,.,.) = 0.8891 (1,2,1,.,.) = -0.4259 (2,2,1,.,.) = 0.01 * 3.4039 (3,2,1,.,.) = 0.5980 (1,3,1,.,.) = 0.9755 (2,3,1,.,.) = 0.9709 (3,3,1,.,.) = 0.8732 [ CPUFloatType{3,3,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] ... [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -2.110557 -0.2445972 ... -1.2038215 0.5259205 -1.1914415 ] [-1.1914415 -1.4996899 -2.2772703 ... -2.9739819 -1.2891479 -1.1914415 ] ... [-1.1914415 0.6518545 0.23677301 ... -0.9154839 -0.31815875 -1.1914415 ] [-1.1914415 -2.8628936 -2.0342214 ... -1.0549283 -3.2141106 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.0836954 0.58888566 ... -1.1116191 -0.22797072 -1.1914415 ] [-1.1914415 -1.9070444 -0.8756852 ... -1.8214612 -2.766931 -1.1914415 ] ... [-1.1914415 -1.1774668 -1.3326846 ... -1.6377032 0.08289373 -1.1914415 ] [-1.1914415 -2.6713471 0.86302996 ... -1.7634295 -1.0417227 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] ... [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -2.3229141 -1.1453646 ... -0.2922889 0.10705781 -1.1914415 ] [-1.1914415 -1.1634865 -1.3941518 ... 0.2713884 0.52536345 -1.1914415 ] ... [-1.1914415 -0.31330913 -1.7200179 ... -0.7706653 1.1555045 -1.1914415 ] [-1.1914415 -2.200663 -2.0047064 ... -0.8917654 -1.9885526 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.5411081 -0.66282874 ... -2.4243774 -0.21191567 -1.1914415 ] [-1.1914415 -0.20423281 -1.1792762 ... -0.27453303 -0.9346415 -1.1914415 ] ... [-1.1914415 -2.2803867 -0.6466864 ... -1.1499155 -1.7802573 -1.1914415 ] [-1.1914415 -2.2678757 -1.757288 ... -1.3014687 -5.396552 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] ... [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]]] [[[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] ... [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -2.3780909 -0.1980524 ... -1.3060002 0.7829171 -0.9382898 ] [-0.9382898 -0.9091023 -1.0612458 ... -1.5217309 -0.7286452 -0.9382898 ] ... [-0.9382898 0.586528 0.2750799 ... -0.60634565 -0.5725278 -0.9382898 ] [-0.9382898 -1.5541142 -1.5042634 ... -0.3325109 -2.3540606 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.3081807 -0.55581474 ... -0.7644867 -0.02237564 -0.9382898 ] [-0.9382898 -0.64078295 -0.47523835 ... -1.8512964 -2.170347 -0.9382898 ] ... [-0.9382898 -1.3397683 -1.2618322 ... -0.6858988 -0.60973793 -0.9382898 ] [-0.9382898 -1.5929651 0.49850923 ... -0.56926453 0.3711576 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] ... [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.9285026 -0.4598905 ... -0.5007833 0.07258171 -0.9382898 ] [-0.9382898 -1.3704692 -0.6781014 ... 1.0197427 -0.24082059 -0.9382898 ] ... [-0.9382898 -1.1348464 0.14491326 ... 0.00781512 0.78559035 -0.9382898 ] [-0.9382898 -1.0987364 -1.6157842 ... -0.9781478 -1.1968298 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.0135403 -1.97085 ... -2.4566038 0.3277138 -0.9382898 ] [-0.9382898 -0.16355318 -1.4463835 ... 1.4589558 -1.5035424 -0.9382898 ] ... [-0.9382898 -1.6815178 -1.1231959 ... -0.35531723 -0.8853859 -0.9382898 ] [-0.9382898 -1.9978886 -0.9796169 ... -1.7678387 -4.2418385 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] ... [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]]] [[[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] ... [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.56870687 0.3978545 ... -0.7974733 2.4911761 0.97750795] [ 0.97750795 2.526348 1.1837171 ... 1.9629006 3.7605343 0.97750795] ... [ 0.97750795 3.1066337 2.7242155 ... 1.8837016 2.1803198 0.97750795] [ 0.97750795 -0.9483268 -2.170402 ... 1.3578714 0.80186534 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 2.2242055 2.743625 ... 3.0683393 1.4839448 0.97750795] [ 0.97750795 0.77160186 1.4828131 ... 0.6154566 3.0525265 0.97750795] ... [ 0.97750795 0.4338532 2.193231 ... 0.3663085 1.8680501 0.97750795] [ 0.97750795 -0.66655684 2.7300305 ... 0.6591623 1.4892569 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] ... [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 -0.7057923 1.1004915 ... -0.33160603 -1.4789139 0.97750795] [ 0.97750795 3.5499082 0.27644068 ... 1.9599903 0.02134502 0.97750795] ... [ 0.97750795 2.2454712 -1.1563784 ... -0.43631434 3.902657 0.97750795] [ 0.97750795 0.6101961 0.23814034 ... -0.2249614 0.13914633 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 1.3474923 2.7589262 ... -1.3185898 1.6839321 0.97750795] [ 0.97750795 1.4370809 1.9525731 ... 0.54352295 -2.8663282 0.97750795] ... [ 0.97750795 -2.509366 2.3815348 ... 0.6509187 0.56414384 0.97750795] [ 0.97750795 -0.97735345 1.4037826 ... 0.32375544 -3.4082909 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] ... [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]]]]]; ov_res: [[[[[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] ... [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -2.110557 -0.2445972 ... -1.2038215 0.5259205 -1.1914415 ] [-1.1914415 -1.4996899 -2.2772703 ... -2.9739819 -1.2891479 -1.1914415 ] ... [-1.1914415 0.6518545 0.23677301 ... -0.9154839 -0.31815875 -1.1914415 ] [-1.1914415 -2.8628936 -2.0342214 ... -1.0549283 -3.2141106 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.0836954 0.58888566 ... -1.1116191 -0.22797072 -1.1914415 ] [-1.1914415 -1.9070444 -0.8756852 ... -1.8214612 -2.766931 -1.1914415 ] ... [-1.1914415 -1.1774668 -1.3326846 ... -1.6377032 0.08289373 -1.1914415 ] [-1.1914415 -2.6713471 0.86302996 ... -1.7634295 -1.0417227 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] ... [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -2.3229141 -1.1453646 ... -0.2922889 0.10705781 -1.1914415 ] [-1.1914415 -1.1634865 -1.3941518 ... 0.2713884 0.52536345 -1.1914415 ] ... [-1.1914415 -0.31330913 -1.7200179 ... -0.7706653 1.1555045 -1.1914415 ] [-1.1914415 -2.200663 -2.0047064 ... -0.8917654 -1.9885526 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.5411081 -0.66282874 ... -2.4243774 -0.21191567 -1.1914415 ] [-1.1914415 -0.20423281 -1.1792762 ... -0.27453303 -0.9346415 -1.1914415 ] ... [-1.1914415 -2.2803867 -0.6466864 ... -1.1499155 -1.7802573 -1.1914415 ] [-1.1914415 -2.2678757 -1.757288 ... -1.3014687 -5.396552 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]] [[-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] ... [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ] [-1.1914415 -1.1914415 -1.1914415 ... -1.1914415 -1.1914415 -1.1914415 ]]] [[[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] ... [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -2.3780909 -0.1980524 ... -1.3060002 0.7829171 -0.9382898 ] [-0.9382898 -0.9091023 -1.0612458 ... -1.5217309 -0.7286452 -0.9382898 ] ... [-0.9382898 0.586528 0.2750799 ... -0.60634565 -0.5725278 -0.9382898 ] [-0.9382898 -1.5541142 -1.5042634 ... -0.3325109 -2.3540606 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.3081807 -0.55581474 ... -0.7644867 -0.02237564 -0.9382898 ] [-0.9382898 -0.64078295 -0.47523835 ... -1.8512964 -2.170347 -0.9382898 ] ... [-0.9382898 -1.3397683 -1.2618322 ... -0.6858988 -0.60973793 -0.9382898 ] [-0.9382898 -1.5929651 0.49850923 ... -0.56926453 0.3711576 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] ... [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.9285026 -0.4598905 ... -0.5007833 0.07258171 -0.9382898 ] [-0.9382898 -1.3704692 -0.6781014 ... 1.0197427 -0.24082059 -0.9382898 ] ... [-0.9382898 -1.1348464 0.14491326 ... 0.00781512 0.78559035 -0.9382898 ] [-0.9382898 -1.0987364 -1.6157842 ... -0.9781478 -1.1968298 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -1.0135403 -1.97085 ... -2.4566038 0.3277138 -0.9382898 ] [-0.9382898 -0.16355318 -1.4463835 ... 1.4589558 -1.5035424 -0.9382898 ] ... [-0.9382898 -1.6815178 -1.1231959 ... -0.35531723 -0.8853859 -0.9382898 ] [-0.9382898 -1.9978886 -0.9796169 ... -1.7678387 -4.2418385 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]] [[-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] ... [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ] [-0.9382898 -0.9382898 -0.9382898 ... -0.9382898 -0.9382898 -0.9382898 ]]] [[[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] ... [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.56870687 0.3978545 ... -0.7974733 2.4911761 0.97750795] [ 0.97750795 2.526348 1.1837171 ... 1.9629006 3.7605343 0.97750795] ... [ 0.97750795 3.1066337 2.7242155 ... 1.8837016 2.1803198 0.97750795] [ 0.97750795 -0.9483268 -2.170402 ... 1.3578714 0.80186534 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 2.2242055 2.743625 ... 3.0683393 1.4839448 0.97750795] [ 0.97750795 0.77160186 1.4828131 ... 0.6154566 3.0525265 0.97750795] ... [ 0.97750795 0.4338532 2.193231 ... 0.3663085 1.8680501 0.97750795] [ 0.97750795 -0.66655684 2.7300305 ... 0.6591623 1.4892569 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] ... [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 -0.7057923 1.1004915 ... -0.33160603 -1.4789139 0.97750795] [ 0.97750795 3.5499082 0.27644068 ... 1.9599903 0.02134502 0.97750795] ... [ 0.97750795 2.2454712 -1.1563784 ... -0.43631434 3.902657 0.97750795] [ 0.97750795 0.6101961 0.23814034 ... -0.2249614 0.13914633 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 1.3474923 2.7589262 ... -1.3185898 1.6839321 0.97750795] [ 0.97750795 1.4370809 1.9525731 ... 0.54352295 -2.8663282 0.97750795] ... [ 0.97750795 -2.509366 2.3815348 ... 0.6509187 0.56414384 0.97750795] [ 0.97750795 -0.97735345 1.4037826 ... 0.32375544 -3.4082909 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]] [[ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] ... [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795] [ 0.97750795 0.97750795 0.97750795 ... 0.97750795 0.97750795 0.97750795]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_868.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.134914}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.4777 (2,1,1,.,.) = -0.3531 (3,1,1,.,.) = 0.4490 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[-0.25517115 -0.23402727 1.082563 0.59343684] [ 0.8418927 1.6613421 -1.169064 -0.79703414] [-0.61576366 -0.7478266 -0.7388609 0.39615148] [-0.7363103 -0.27430663 1.0794033 -0.6324639 ] [-1.081384 1.368357 -0.24780104 0.06058921] [ 0.16131787 -0.7457757 -0.7156645 -0.75129247] [ 0.46546042 0.46926278 -1.1548939 0.8067093 ] [ 0.04443899 0.22160624 0.63269943 0.06526852]] [[ 0.13846226 0.7064615 0.98851633 -0.6548336 ] [ 0.19411363 -1.1975564 1.0361053 -0.54752696] [-0.35104203 0.6078589 -0.62866473 -2.3873398 ] [ 1.0367229 -0.3837308 0.15619318 0.3427341 ] [-1.3077298 -0.8403596 -0.9974046 0.46623182] [-0.59204006 -0.8589991 -1.0019588 -0.14978191] [ 0.75715363 1.1839437 -1.9494447 -0.18735756] [ 0.5750323 -0.32123762 -0.13404508 -0.6458849 ]] [[ 1.4893999 -0.4913631 -0.00764662 0.75654805] [ 0.24135964 -0.00309649 -0.42371678 0.8304961 ] [-0.47114807 -0.5446737 -0.41118813 -0.23875163] [-0.47465008 -0.6085506 0.09855095 1.5854456 ] [-0.5929223 0.14213093 -0.91380596 0.17998828] [-0.54689914 0.6732729 1.5610352 0.67092353] [-0.5223876 -0.7692412 -1.1892287 1.4760382 ] [-0.6138905 -0.17226325 0.08044223 -0.34068137]] [[-0.40259218 -0.91385984 -0.17637846 -1.5341333 ] [-0.04084975 -0.16664267 0.17025717 -0.3552746 ] [-0.23624194 0.1676368 -0.81906104 -1.3855637 ] [ 0.54090905 -0.5686853 -0.00942706 1.1437571 ] [-0.38561738 0.22132446 -0.9478208 -1.0648481 ] [ 0.06318991 -0.02204439 -0.4055627 0.2980191 ] [ 0.13430594 -1.0630069 -1.0887367 -0.22985831] [-0.4640652 1.2977983 -1.4463905 -0.38795018]]]]]; ov_res: [[[[[-0.25517115 -0.23402727 1.082563 0.59343684] [ 0.8418927 1.6613421 -1.169064 -0.79703414] [-0.61576366 -0.7478266 -0.7388609 0.39615148] [-0.7363103 -0.27430663 1.0794033 -0.6324639 ] [-1.081384 1.368357 -0.24780104 0.06058921] [ 0.16131787 -0.7457757 -0.7156645 -0.75129247] [ 0.46546042 0.46926278 -1.1548939 0.8067093 ] [ 0.04443899 0.22160624 0.63269943 0.06526852]] [[ 0.13846226 0.7064615 0.98851633 -0.6548336 ] [ 0.19411363 -1.1975564 1.0361053 -0.54752696] [-0.35104203 0.6078589 -0.62866473 -2.3873398 ] [ 1.0367229 -0.3837308 0.15619318 0.3427341 ] [-1.3077298 -0.8403596 -0.9974046 0.46623182] [-0.59204006 -0.8589991 -1.0019588 -0.14978191] [ 0.75715363 1.1839437 -1.9494447 -0.18735756] [ 0.5750323 -0.32123762 -0.13404508 -0.6458849 ]] [[ 1.4893999 -0.4913631 -0.00764662 0.75654805] [ 0.24135964 -0.00309649 -0.42371678 0.8304961 ] [-0.47114807 -0.5446737 -0.41118813 -0.23875163] [-0.47465008 -0.6085506 0.09855095 1.5854456 ] [-0.5929223 0.14213093 -0.91380596 0.17998828] [-0.54689914 0.6732729 1.5610352 0.67092353] [-0.5223876 -0.7692412 -1.1892287 1.4760382 ] [-0.6138905 -0.17226325 0.08044223 -0.34068137]] [[-0.40259218 -0.91385984 -0.17637846 -1.5341333 ] [-0.04084975 -0.16664267 0.17025717 -0.3552746 ] [-0.23624194 0.1676368 -0.81906104 -1.3855637 ] [ 0.54090905 -0.5686853 -0.00942706 1.1437571 ] [-0.38561738 0.22132446 -0.9478208 -1.0648481 ] [ 0.06318991 -0.02204439 -0.4055627 0.2980191 ] [ 0.13430594 -1.0630069 -1.0887367 -0.22985831] [-0.4640652 1.2977983 -1.4463905 -0.38795018]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_870.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.8108 -0.9649 -0.8463 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.8587 (2,1,1,.,.) = 0.4481 (3,1,1,.,.) = -0.5938 (1,2,1,.,.) = -0.1503 (2,2,1,.,.) = 1.7940 (3,2,1,.,.) = -0.3011 (1,3,1,.,.) = 0.5780 (2,3,1,.,.) = 1.3601 (3,3,1,.,.) = -0.4976 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] ... [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]]] [[[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] ... [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]]] [[[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] ... [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]]]]]; ov_res: [[[[[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] ... [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]] [[ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] ... [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ] [ 0.8108294 0.8108294 0.8108294 ... 0.8108294 0.8108294 0.8108294 ]]] [[[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] ... [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]] [[-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] ... [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055] [-0.96490055 -0.96490055 -0.96490055 ... -0.96490055 -0.96490055 -0.96490055]]] [[[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] ... [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]] [[-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] ... [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ] [-0.8463144 -0.8463144 -0.8463144 ... -0.8463144 -0.8463144 -0.8463144 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_872.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.38349}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * 1.9290 (2,1,1,.,.) = -0.4180 (3,1,1,.,.) = 0.9884 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 5.43997586e-01 1.34563649e+00 2.70711565e+00 1.70345974e+00 1.01301813e+00 -5.66076159e-01 8.68808031e-01 2.11861706e+00 1.10256279e+00 8.30205679e-02] [-2.21503139e-01 2.16996956e+00 3.84903097e+00 1.03983474e+00 2.01740384e+00 1.71596432e+00 1.66802502e+00 1.01559424e+00 3.16110611e+00 3.07437754e+00] [-1.75548196e-01 5.90493560e-01 -2.71430492e-01 1.52052510e+00 -7.65203238e-02 2.87871170e+00 7.49678612e-02 1.89556813e+00 -8.80234122e-01 1.17427063e+00] [ 2.07978392e+00 7.15585470e-01 6.85552716e-01 7.85731554e-01 1.30626857e+00 2.92259622e+00 2.50143862e+00 1.05945659e+00 1.50239468e-03 3.34712625e-01] [ 4.24613953e-02 -3.12986374e-02 1.28589249e+00 9.12578106e-01 5.42262256e-01 1.75772381e+00 2.09275150e+00 9.69068050e-01 1.33367944e+00 1.34754527e+00] [ 1.10272622e+00 1.71567202e+00 2.65575767e-01 2.69687557e+00 7.48155355e-01 1.46964371e+00 2.32361460e+00 2.96391821e+00 6.36063159e-01 1.71401739e-01] [ 9.09400940e-01 1.66696286e+00 2.08077693e+00 3.03455067e+00 4.18482113e+00 1.47578514e+00 -2.87186146e-01 8.24648201e-01 1.43675280e+00 8.05708885e-01] [ 2.01073265e+00 3.02395701e-01 2.87695551e+00 9.32769775e-01 -6.59545779e-01 5.62407851e-01 7.96671271e-01 1.91918612e+00 4.83483255e-01 -1.29901528e-01] [-5.11878252e-01 1.37536800e+00 -6.06801152e-01 1.59961998e+00 1.54155374e+00 1.97275710e+00 8.54187489e-01 7.08399296e-01 -1.12414396e+00 3.07087064e-01] [ 6.92705572e-01 8.50482881e-01 1.96417582e+00 -5.19394517e-01 1.77313375e+00 3.99551582e+00 1.41842997e+00 1.69133067e-01 1.54390430e+00 3.18426704e+00]] [[ 1.26970780e+00 1.16373718e+00 1.38322031e+00 5.52031696e-01 -5.88830829e-01 2.32512522e+00 2.70698404e+00 -2.33935714e-01 2.90847015e+00 1.05352736e+00] [ 8.96880627e-01 2.49636126e+00 1.08385289e+00 1.75417984e+00 2.91899729e+00 8.12156022e-01 2.12858343e+00 3.01667356e+00 1.64117455e+00 8.89583468e-01] [ 2.81443596e+00 1.38493776e+00 1.84688210e-01 1.56932425e+00 -3.95623088e-01 -1.01787937e+00 6.88270211e-01 2.28385019e+00 7.58281648e-01 1.13628149e-01] [ 1.91627765e+00 1.10293257e+00 1.25831842e+00 1.82751203e+00 -2.22694993e-01 2.14370704e+00 8.23861957e-01 2.36960888e+00 3.65208030e-01 9.24258232e-01] [ 2.62210822e+00 1.19972825e-01 6.90882444e-01 5.65412223e-01 -5.61360717e-01 2.05632424e+00 4.05525446e+00 2.62071323e+00 -1.32635295e+00 -5.54456711e-02] [ 1.21959984e+00 1.02664793e+00 1.22212696e+00 1.57202148e+00 2.45064926e+00 2.36417723e+00 1.06254375e+00 1.66898966e+00 1.59295189e+00 1.08762574e+00] [ 2.03610373e+00 3.06305599e+00 8.89560103e-01 3.20605111e+00 2.00307417e+00 2.26370907e+00 1.11962259e+00 2.03167057e+00 1.73269475e+00 2.15090847e+00] [ 1.43511772e+00 1.50066483e+00 2.13175678e+00 1.85871065e+00 9.94479835e-01 2.07780194e+00 1.23529088e+00 2.56623220e+00 -5.07474422e-01 2.23843479e+00] [ 9.26351607e-01 2.82271242e+00 9.18781400e-01 4.70113873e-01 3.19356012e+00 2.47230887e-01 -1.37529457e+00 1.99078345e+00 2.57552481e+00 1.39875698e+00] [ 1.37419438e+00 2.59942675e+00 -8.20933461e-01 2.36903906e+00 1.82970524e+00 1.97901356e+00 8.95239472e-01 1.43852305e+00 1.47161913e+00 1.51323557e+00]] [[ 1.96080828e+00 -7.79820681e-02 3.92333364e+00 2.81386304e+00 1.12079883e+00 4.88938451e-01 6.28818095e-01 -1.40050411e-01 2.36318111e+00 1.90439439e+00] [ 1.16349614e+00 1.24732876e+00 3.92978334e+00 3.02205825e+00 6.64340496e-01 2.99121857e+00 1.92103434e+00 -2.95031071e-01 8.23011398e-01 2.36362433e+00] [ 9.89926338e-01 6.06027782e-01 -7.78293967e-01 2.39445853e+00 2.54009271e+00 1.43959558e+00 1.89052129e+00 2.27695942e+00 2.10401750e+00 4.37338650e-01] [ 1.95528531e+00 7.29262054e-01 1.95744860e+00 1.28175068e+00 -1.32513523e-01 1.40636551e+00 1.46017623e+00 3.99223328e-01 2.63799143e+00 1.17454600e+00] [ 1.22658217e+00 2.06126046e+00 2.17206526e+00 1.94892979e+00 2.86751151e+00 1.82625055e-01 1.55060053e+00 2.29785609e+00 1.27854621e+00 8.87067556e-01] [ 1.94629836e+00 9.82362628e-01 3.13589931e-01 1.72606254e+00 4.15331662e-01 6.56857848e-01 1.30346262e+00 3.11536121e+00 1.82313979e+00 1.91488886e+00] [ 1.61911297e+00 3.01244903e+00 1.18775833e+00 2.57372904e+00 8.51387143e-01 1.59117246e+00 2.02023840e+00 3.28879952e-01 6.00502253e-01 8.22923064e-01] [ 1.30432212e+00 2.39040995e+00 1.80059278e+00 -8.26131105e-02 1.91002560e+00 3.68123198e+00 2.59024096e+00 2.92121768e+00 -8.57800364e-01 1.95299840e+00] [ 2.23674679e+00 7.68538713e-02 1.11522806e+00 2.88926840e+00 -2.03033328e-01 1.17075324e-01 1.42604434e+00 2.27182341e+00 9.78387594e-02 -9.40906167e-01] [ 1.70590091e+00 -2.66353488e-01 2.02079105e+00 1.66537344e+00 -2.54595280e-03 1.26733935e+00 8.85629892e-01 1.06418800e+00 3.35051680e+00 -3.80660772e-01]] [[ 1.18509603e+00 4.29682541e+00 9.38022316e-01 2.15929317e+00 5.58819950e-01 1.87224400e+00 1.08684111e+00 1.04020047e+00 1.14910054e+00 1.82463753e+00] [ 1.96816528e+00 2.29240322e+00 1.55354321e+00 8.81556869e-01 8.21328163e-01 3.22026610e-01 1.36827171e+00 3.17556286e+00 7.64946818e-01 3.08374214e+00] [ 3.00622368e+00 -5.41436911e-01 1.05450630e+00 2.30776095e+00 1.43058074e+00 1.50978327e+00 2.06770468e+00 2.36132073e+00 8.93389583e-01 1.88729346e+00] [ 2.25423431e+00 8.47030282e-01 1.19963729e+00 1.18954062e+00 2.72807741e+00 1.55315888e+00 2.17650199e+00 2.70910454e+00 4.55060911e+00 2.60473323e+00] [-7.65156627e-01 2.05584002e+00 5.95580459e-01 1.98098183e-01 -5.88530540e-01 1.60924041e+00 1.12560201e+00 6.56300187e-01 1.10107195e+00 -1.87241793e-01] [ 1.17113328e+00 1.85962343e+00 2.36846328e-01 1.48801148e+00 1.31846416e+00 1.82413888e+00 -1.55453086e-01 1.70464873e-01 1.36297822e-01 2.32013917e+00] [ 1.05837440e+00 3.27731800e+00 2.54201984e+00 1.08541024e+00 4.26485491e+00 1.30054569e+00 8.16438079e-01 2.04120064e+00 1.76433086e-01 2.59438729e+00] [ 8.33055496e-01 -6.26718402e-01 2.18282557e+00 -8.85238767e-01 -5.46156287e-01 -1.55172002e+00 9.05736029e-01 -3.74926567e-01 5.62475324e-01 1.43149459e+00] [ 1.97258925e+00 9.05995667e-01 9.52187598e-01 1.77684307e+00 1.01699698e+00 2.23463488e+00 2.38320637e+00 3.82031202e-01 4.70263124e-01 1.46337438e+00] [-6.42262578e-01 3.31909800e+00 1.09123397e+00 1.11800432e-02 6.29222870e-01 3.14267588e+00 1.65196943e+00 2.88032532e-01 1.73868942e+00 2.10662818e+00]] [[ 2.97724938e+00 3.79808784e-01 2.15981364e+00 2.12395740e+00 -3.96121025e-01 9.18297410e-01 2.95661545e+00 1.48013067e+00 1.91982794e+00 4.71130788e-01] [ 1.78024232e+00 1.05898142e+00 1.84188664e+00 2.03542185e+00 6.54297352e-01 2.18834162e+00 2.39407539e+00 1.28723013e+00 2.31797719e+00 1.15263641e+00] [ 7.93294549e-01 -1.25297070e-01 1.53520226e-01 4.63060558e-01 3.45660830e+00 1.53430402e+00 3.48551035e-01 8.37920785e-01 1.76660335e+00 3.50093722e-01] [ 8.82535994e-01 -1.41971111e-02 1.51986015e+00 6.02431655e-01 1.39660561e+00 1.30585194e+00 5.18498063e-01 2.13912058e+00 1.34329677e+00 -5.58730364e-02] [ 5.88482320e-01 3.12457991e+00 4.53650475e-01 1.55768967e+00 2.75274372e+00 1.73844624e+00 7.17302561e-02 1.07237220e-01 1.22334504e+00 9.90036309e-01] [ 1.99459767e+00 4.35689628e-01 2.57764316e+00 1.07068467e+00 1.13111687e+00 9.74459648e-01 1.69009984e+00 2.48040199e+00 1.14715004e+00 -2.83687353e-01] [ 1.20513535e+00 1.76139104e+00 1.66235638e+00 1.77656806e+00 9.24966216e-01 -1.07236540e+00 3.09490061e+00 9.50965643e-01 6.99545205e-01 2.02792811e+00] [ 1.32444298e+00 3.18789172e+00 3.42142391e+00 -1.11018777e-01 2.69172549e-01 2.29007769e+00 2.92698646e+00 1.99348199e+00 6.66219354e-01 -2.37762308e+00] [ 2.02020240e+00 -4.39596057e-01 2.93792033e+00 6.10214233e-01 -1.65941119e-01 1.95936227e+00 3.60838413e-01 7.07766473e-01 6.78299427e-01 2.38263607e-03] [ 1.80799484e+00 2.47298717e+00 1.30141652e+00 6.47410274e-01 1.80500555e+00 3.21813583e-01 3.07524538e+00 1.45894456e+00 3.72609138e-01 -5.88446856e-02]] [[ 2.13321829e+00 9.64106202e-01 -5.98927021e-01 1.41819882e+00 2.06139302e+00 2.42547369e+00 7.67333090e-01 1.39223611e+00 4.88504052e-01 1.23666620e+00] [ 2.46793413e+00 2.63345003e-01 1.33163762e+00 1.57768762e+00 3.27755141e+00 2.40957856e+00 2.48935890e+00 2.29811096e+00 2.24347901e+00 3.77948046e+00] [ 2.19750214e+00 2.31341887e+00 1.11686444e+00 1.26246929e+00 2.79035568e+00 6.29558563e-01 2.54216433e+00 3.47254395e-01 1.15465868e+00 2.93698072e+00] [ 1.80117726e+00 1.95936298e+00 2.79938149e+00 9.28599596e-01 -4.44978952e-01 1.39730275e+00 4.12822723e-01 1.21164048e+00 -5.15612125e-01 4.77914691e-01] [ 1.95137131e+00 2.88676023e+00 -1.89476848e-01 2.22890377e+00 2.20069885e+00 3.75337887e+00 9.58275795e-02 1.74054766e+00 1.34857070e+00 -1.26863968e+00] [ 2.39876270e+00 2.36714458e+00 2.11603427e+00 1.35216773e+00 1.18652916e+00 2.89610195e+00 3.34287357e+00 9.99736190e-01 3.99910545e+00 1.20131588e+00] [ 1.09634185e+00 9.86390710e-01 1.70924413e+00 1.18840575e-01 2.24469924e+00 -6.75280452e-01 2.57866788e+00 1.87800419e+00 1.22850215e+00 1.88350260e+00] [ 1.37280953e+00 1.56035089e+00 1.91396499e+00 5.26532650e-01 1.88235641e+00 1.45351839e+00 7.89904952e-01 2.37500787e-01 -1.98826468e+00 2.58402658e+00] [ 1.60006177e+00 4.23308611e-02 1.60994768e-01 8.44116926e-01 2.61575365e+00 -7.23245144e-02 1.85875833e+00 -5.49651384e-02 7.97893763e-01 1.11730528e+00] [ 1.49734211e+00 1.66010439e+00 -1.15673482e+00 7.73759961e-01 2.03485370e+00 2.29322577e+00 2.17252970e+00 2.08200431e+00 2.11832047e-01 3.58068943e+00]] [[ 2.66182494e+00 1.61497390e+00 2.12352610e+00 1.93635988e+00 -2.39142776e-01 8.17414522e-02 1.57455218e+00 1.63828969e-01 2.02247143e+00 1.97108936e+00] [ 2.84842634e+00 1.93292952e+00 2.57707930e+00 9.99719858e-01 -5.16782165e-01 1.23519158e+00 4.36556339e-02 1.86307621e+00 7.42180586e-01 6.00717068e-02] [ 5.94070017e-01 3.31682253e+00 1.88486528e+00 2.37553954e+00 2.09518456e+00 1.90097439e+00 2.50025153e-01 4.17599320e-01 1.97097921e+00 1.21865118e+00] [ 7.02617884e-01 1.74143827e+00 -5.36797047e-02 2.72850919e+00 1.27433586e+00 9.71575379e-01 2.60670519e+00 1.17615175e+00 2.83053875e-01 1.88517869e+00] [ 2.77636099e+00 1.09547842e+00 1.44965947e+00 1.19397223e+00 1.19462788e+00 2.82991624e+00 -4.22003031e-01 1.23543417e+00 8.47450972e-01 1.58556235e+00] [ 8.02561939e-01 2.39508224e+00 5.55049062e-01 7.03048706e-03 -1.15494096e+00 2.53846884e-01 1.48027718e+00 9.01797891e-01 6.62818193e-01 2.37365294e+00] [ 1.66904378e+00 1.14829957e+00 6.37259007e-01 7.78948784e-01 3.50729084e+00 2.01258326e+00 1.29642761e+00 4.02937710e-01 3.69053078e+00 1.79901814e+00] [ 2.73316288e+00 9.84085798e-01 3.62906814e-01 -4.01957273e-01 1.38093722e+00 2.55169940e+00 1.61729300e+00 3.55490685e+00 1.02188659e+00 5.17633915e-01] [ 1.20258129e+00 1.12446952e+00 9.13092971e-01 1.22375333e+00 1.34525001e+00 1.30597699e+00 2.86741734e+00 1.52374530e+00 1.39319909e+00 2.20880508e+00] [ 1.24480987e+00 2.86467791e-01 2.26258564e+00 2.34145427e+00 5.65914273e-01 2.13668489e+00 2.87238169e+00 -2.89212227e-01 1.55172896e+00 2.92021298e+00]] [[-1.47976744e+00 2.35298944e+00 3.85651588e-02 1.57819879e+00 2.65603924e+00 7.26798832e-01 1.81004488e+00 1.12577510e+00 -3.23392630e-01 2.71901035e+00] [ 6.53004646e-01 1.94204259e+00 2.05796337e+00 1.09343469e+00 2.80486250e+00 1.86401141e+00 3.52596164e-01 8.17319930e-01 1.94850504e+00 2.87462854e+00] [ 7.32701778e-01 -4.38789964e-01 7.04761088e-01 2.50275970e+00 1.54450786e+00 1.99664521e+00 7.01283216e-02 2.17615390e+00 1.78910804e+00 9.90215123e-01] [ 2.49251413e+00 5.94939530e-01 3.01454902e-01 2.39248800e+00 1.80426431e+00 -7.90865541e-01 1.85682750e+00 -1.20308280e-01 1.67750216e+00 -8.52408409e-02] [ 1.26656449e+00 1.18440092e+00 -5.73581934e-01 2.03782105e+00 1.43547273e+00 -7.14551210e-02 1.52800775e+00 1.87547207e+00 2.57304192e-01 3.79662132e+00] [ 1.28767359e+00 1.22840464e+00 2.26192808e+00 2.86635089e+00 9.05721128e-01 1.09684038e+00 2.83136439e+00 8.49011838e-01 1.12914956e+00 2.20421863e+00] [ 2.71472549e+00 1.02001405e+00 2.13555861e+00 2.84132385e+00 -3.93866777e-01 1.82049727e+00 2.03651786e+00 1.83022666e+00 2.37006450e+00 2.52136230e+00] [ 1.70690525e+00 2.21487188e+00 3.44952106e+00 1.26029563e+00 9.27629471e-01 1.18545926e+00 8.09466243e-01 1.55772650e+00 1.36530936e+00 4.63422298e-01] [ 2.01054811e-01 1.67427993e+00 1.33058906e-01 2.88663912e+00 1.69612503e+00 1.00480533e+00 3.91706944e+00 8.99483681e-01 1.59783554e+00 8.17440629e-01] [ 3.12190247e+00 3.43967438e-01 1.97742689e+00 1.35793865e+00 8.23435068e-01 4.70834017e-01 -1.51643038e-01 1.81579351e+00 1.95324290e+00 3.16739559e-01]]]]]; ov_res: [[[[[ 5.43997586e-01 1.34563649e+00 2.70711565e+00 1.70345974e+00 1.01301813e+00 -5.66076159e-01 8.68808031e-01 2.11861706e+00 1.10256279e+00 8.30205679e-02] [-2.21503139e-01 2.16996956e+00 3.84903097e+00 1.03983474e+00 2.01740384e+00 1.71596432e+00 1.66802502e+00 1.01559424e+00 3.16110611e+00 3.07437754e+00] [-1.75548196e-01 5.90493560e-01 -2.71430492e-01 1.52052510e+00 -7.65203238e-02 2.87871170e+00 7.49678612e-02 1.89556813e+00 -8.80234122e-01 1.17427063e+00] [ 2.07978392e+00 7.15585470e-01 6.85552716e-01 7.85731554e-01 1.30626857e+00 2.92259622e+00 2.50143862e+00 1.05945659e+00 1.50239468e-03 3.34712625e-01] [ 4.24613953e-02 -3.12986374e-02 1.28589249e+00 9.12578106e-01 5.42262256e-01 1.75772381e+00 2.09275150e+00 9.69068050e-01 1.33367944e+00 1.34754527e+00] [ 1.10272622e+00 1.71567202e+00 2.65575767e-01 2.69687557e+00 7.48155355e-01 1.46964371e+00 2.32361460e+00 2.96391821e+00 6.36063159e-01 1.71401739e-01] [ 9.09400940e-01 1.66696286e+00 2.08077693e+00 3.03455067e+00 4.18482113e+00 1.47578514e+00 -2.87186146e-01 8.24648201e-01 1.43675280e+00 8.05708885e-01] [ 2.01073265e+00 3.02395701e-01 2.87695551e+00 9.32769775e-01 -6.59545779e-01 5.62407851e-01 7.96671271e-01 1.91918612e+00 4.83483255e-01 -1.29901528e-01] [-5.11878252e-01 1.37536800e+00 -6.06801152e-01 1.59961998e+00 1.54155374e+00 1.97275710e+00 8.54187489e-01 7.08399296e-01 -1.12414396e+00 3.07087064e-01] [ 6.92705572e-01 8.50482881e-01 1.96417582e+00 -5.19394517e-01 1.77313375e+00 3.99551582e+00 1.41842997e+00 1.69133067e-01 1.54390430e+00 3.18426704e+00]] [[ 1.26970780e+00 1.16373718e+00 1.38322031e+00 5.52031696e-01 -5.88830829e-01 2.32512522e+00 2.70698404e+00 -2.33935714e-01 2.90847015e+00 1.05352736e+00] [ 8.96880627e-01 2.49636126e+00 1.08385289e+00 1.75417984e+00 2.91899729e+00 8.12156022e-01 2.12858343e+00 3.01667356e+00 1.64117455e+00 8.89583468e-01] [ 2.81443596e+00 1.38493776e+00 1.84688210e-01 1.56932425e+00 -3.95623088e-01 -1.01787937e+00 6.88270211e-01 2.28385019e+00 7.58281648e-01 1.13628149e-01] [ 1.91627765e+00 1.10293257e+00 1.25831842e+00 1.82751203e+00 -2.22694993e-01 2.14370704e+00 8.23861957e-01 2.36960888e+00 3.65208030e-01 9.24258232e-01] [ 2.62210822e+00 1.19972825e-01 6.90882444e-01 5.65412223e-01 -5.61360717e-01 2.05632424e+00 4.05525446e+00 2.62071323e+00 -1.32635295e+00 -5.54456711e-02] [ 1.21959984e+00 1.02664793e+00 1.22212696e+00 1.57202148e+00 2.45064926e+00 2.36417723e+00 1.06254375e+00 1.66898966e+00 1.59295189e+00 1.08762574e+00] [ 2.03610373e+00 3.06305599e+00 8.89560103e-01 3.20605111e+00 2.00307417e+00 2.26370907e+00 1.11962259e+00 2.03167057e+00 1.73269475e+00 2.15090847e+00] [ 1.43511772e+00 1.50066483e+00 2.13175678e+00 1.85871065e+00 9.94479835e-01 2.07780194e+00 1.23529088e+00 2.56623220e+00 -5.07474422e-01 2.23843479e+00] [ 9.26351607e-01 2.82271242e+00 9.18781400e-01 4.70113873e-01 3.19356012e+00 2.47230887e-01 -1.37529457e+00 1.99078345e+00 2.57552481e+00 1.39875698e+00] [ 1.37419438e+00 2.59942675e+00 -8.20933461e-01 2.36903906e+00 1.82970524e+00 1.97901356e+00 8.95239472e-01 1.43852305e+00 1.47161913e+00 1.51323557e+00]] [[ 1.96080828e+00 -7.79820681e-02 3.92333364e+00 2.81386304e+00 1.12079883e+00 4.88938451e-01 6.28818095e-01 -1.40050411e-01 2.36318111e+00 1.90439439e+00] [ 1.16349614e+00 1.24732876e+00 3.92978334e+00 3.02205825e+00 6.64340496e-01 2.99121857e+00 1.92103434e+00 -2.95031071e-01 8.23011398e-01 2.36362433e+00] [ 9.89926338e-01 6.06027782e-01 -7.78293967e-01 2.39445853e+00 2.54009271e+00 1.43959558e+00 1.89052129e+00 2.27695942e+00 2.10401750e+00 4.37338650e-01] [ 1.95528531e+00 7.29262054e-01 1.95744860e+00 1.28175068e+00 -1.32513523e-01 1.40636551e+00 1.46017623e+00 3.99223328e-01 2.63799143e+00 1.17454600e+00] [ 1.22658217e+00 2.06126046e+00 2.17206526e+00 1.94892979e+00 2.86751151e+00 1.82625055e-01 1.55060053e+00 2.29785609e+00 1.27854621e+00 8.87067556e-01] [ 1.94629836e+00 9.82362628e-01 3.13589931e-01 1.72606254e+00 4.15331662e-01 6.56857848e-01 1.30346262e+00 3.11536121e+00 1.82313979e+00 1.91488886e+00] [ 1.61911297e+00 3.01244903e+00 1.18775833e+00 2.57372904e+00 8.51387143e-01 1.59117246e+00 2.02023840e+00 3.28879952e-01 6.00502253e-01 8.22923064e-01] [ 1.30432212e+00 2.39040995e+00 1.80059278e+00 -8.26131105e-02 1.91002560e+00 3.68123198e+00 2.59024096e+00 2.92121768e+00 -8.57800364e-01 1.95299840e+00] [ 2.23674679e+00 7.68538713e-02 1.11522806e+00 2.88926840e+00 -2.03033328e-01 1.17075324e-01 1.42604434e+00 2.27182341e+00 9.78387594e-02 -9.40906167e-01] [ 1.70590091e+00 -2.66353488e-01 2.02079105e+00 1.66537344e+00 -2.54595280e-03 1.26733935e+00 8.85629892e-01 1.06418800e+00 3.35051680e+00 -3.80660772e-01]] [[ 1.18509603e+00 4.29682541e+00 9.38022316e-01 2.15929317e+00 5.58819950e-01 1.87224400e+00 1.08684111e+00 1.04020047e+00 1.14910054e+00 1.82463753e+00] [ 1.96816528e+00 2.29240322e+00 1.55354321e+00 8.81556869e-01 8.21328163e-01 3.22026610e-01 1.36827171e+00 3.17556286e+00 7.64946818e-01 3.08374214e+00] [ 3.00622368e+00 -5.41436911e-01 1.05450630e+00 2.30776095e+00 1.43058074e+00 1.50978327e+00 2.06770468e+00 2.36132073e+00 8.93389583e-01 1.88729346e+00] [ 2.25423431e+00 8.47030282e-01 1.19963729e+00 1.18954062e+00 2.72807741e+00 1.55315888e+00 2.17650199e+00 2.70910454e+00 4.55060911e+00 2.60473323e+00] [-7.65156627e-01 2.05584002e+00 5.95580459e-01 1.98098183e-01 -5.88530540e-01 1.60924041e+00 1.12560201e+00 6.56300187e-01 1.10107195e+00 -1.87241793e-01] [ 1.17113328e+00 1.85962343e+00 2.36846328e-01 1.48801148e+00 1.31846416e+00 1.82413888e+00 -1.55453086e-01 1.70464873e-01 1.36297822e-01 2.32013917e+00] [ 1.05837440e+00 3.27731800e+00 2.54201984e+00 1.08541024e+00 4.26485491e+00 1.30054569e+00 8.16438079e-01 2.04120064e+00 1.76433086e-01 2.59438729e+00] [ 8.33055496e-01 -6.26718402e-01 2.18282557e+00 -8.85238767e-01 -5.46156287e-01 -1.55172002e+00 9.05736029e-01 -3.74926567e-01 5.62475324e-01 1.43149459e+00] [ 1.97258925e+00 9.05995667e-01 9.52187598e-01 1.77684307e+00 1.01699698e+00 2.23463488e+00 2.38320637e+00 3.82031202e-01 4.70263124e-01 1.46337438e+00] [-6.42262578e-01 3.31909800e+00 1.09123397e+00 1.11800432e-02 6.29222870e-01 3.14267588e+00 1.65196943e+00 2.88032532e-01 1.73868942e+00 2.10662818e+00]] [[ 2.97724938e+00 3.79808784e-01 2.15981364e+00 2.12395740e+00 -3.96121025e-01 9.18297410e-01 2.95661545e+00 1.48013067e+00 1.91982794e+00 4.71130788e-01] [ 1.78024232e+00 1.05898142e+00 1.84188664e+00 2.03542185e+00 6.54297352e-01 2.18834162e+00 2.39407539e+00 1.28723013e+00 2.31797719e+00 1.15263641e+00] [ 7.93294549e-01 -1.25297070e-01 1.53520226e-01 4.63060558e-01 3.45660830e+00 1.53430402e+00 3.48551035e-01 8.37920785e-01 1.76660335e+00 3.50093722e-01] [ 8.82535994e-01 -1.41971111e-02 1.51986015e+00 6.02431655e-01 1.39660561e+00 1.30585194e+00 5.18498063e-01 2.13912058e+00 1.34329677e+00 -5.58730364e-02] [ 5.88482320e-01 3.12457991e+00 4.53650475e-01 1.55768967e+00 2.75274372e+00 1.73844624e+00 7.17302561e-02 1.07237220e-01 1.22334504e+00 9.90036309e-01] [ 1.99459767e+00 4.35689628e-01 2.57764316e+00 1.07068467e+00 1.13111687e+00 9.74459648e-01 1.69009984e+00 2.48040199e+00 1.14715004e+00 -2.83687353e-01] [ 1.20513535e+00 1.76139104e+00 1.66235638e+00 1.77656806e+00 9.24966216e-01 -1.07236540e+00 3.09490061e+00 9.50965643e-01 6.99545205e-01 2.02792811e+00] [ 1.32444298e+00 3.18789172e+00 3.42142391e+00 -1.11018777e-01 2.69172549e-01 2.29007769e+00 2.92698646e+00 1.99348199e+00 6.66219354e-01 -2.37762308e+00] [ 2.02020240e+00 -4.39596057e-01 2.93792033e+00 6.10214233e-01 -1.65941119e-01 1.95936227e+00 3.60838413e-01 7.07766473e-01 6.78299427e-01 2.38263607e-03] [ 1.80799484e+00 2.47298717e+00 1.30141652e+00 6.47410274e-01 1.80500555e+00 3.21813583e-01 3.07524538e+00 1.45894456e+00 3.72609138e-01 -5.88446856e-02]] [[ 2.13321829e+00 9.64106202e-01 -5.98927021e-01 1.41819882e+00 2.06139302e+00 2.42547369e+00 7.67333090e-01 1.39223611e+00 4.88504052e-01 1.23666620e+00] [ 2.46793413e+00 2.63345003e-01 1.33163762e+00 1.57768762e+00 3.27755141e+00 2.40957856e+00 2.48935890e+00 2.29811096e+00 2.24347901e+00 3.77948046e+00] [ 2.19750214e+00 2.31341887e+00 1.11686444e+00 1.26246929e+00 2.79035568e+00 6.29558563e-01 2.54216433e+00 3.47254395e-01 1.15465868e+00 2.93698072e+00] [ 1.80117726e+00 1.95936298e+00 2.79938149e+00 9.28599596e-01 -4.44978952e-01 1.39730275e+00 4.12822723e-01 1.21164048e+00 -5.15612125e-01 4.77914691e-01] [ 1.95137131e+00 2.88676023e+00 -1.89476848e-01 2.22890377e+00 2.20069885e+00 3.75337887e+00 9.58275795e-02 1.74054766e+00 1.34857070e+00 -1.26863968e+00] [ 2.39876270e+00 2.36714458e+00 2.11603427e+00 1.35216773e+00 1.18652916e+00 2.89610195e+00 3.34287357e+00 9.99736190e-01 3.99910545e+00 1.20131588e+00] [ 1.09634185e+00 9.86390710e-01 1.70924413e+00 1.18840575e-01 2.24469924e+00 -6.75280452e-01 2.57866788e+00 1.87800419e+00 1.22850215e+00 1.88350260e+00] [ 1.37280953e+00 1.56035089e+00 1.91396499e+00 5.26532650e-01 1.88235641e+00 1.45351839e+00 7.89904952e-01 2.37500787e-01 -1.98826468e+00 2.58402658e+00] [ 1.60006177e+00 4.23308611e-02 1.60994768e-01 8.44116926e-01 2.61575365e+00 -7.23245144e-02 1.85875833e+00 -5.49651384e-02 7.97893763e-01 1.11730528e+00] [ 1.49734211e+00 1.66010439e+00 -1.15673482e+00 7.73759961e-01 2.03485370e+00 2.29322577e+00 2.17252970e+00 2.08200431e+00 2.11832047e-01 3.58068943e+00]] [[ 2.66182494e+00 1.61497390e+00 2.12352610e+00 1.93635988e+00 -2.39142776e-01 8.17414522e-02 1.57455218e+00 1.63828969e-01 2.02247143e+00 1.97108936e+00] [ 2.84842634e+00 1.93292952e+00 2.57707930e+00 9.99719858e-01 -5.16782165e-01 1.23519158e+00 4.36556339e-02 1.86307621e+00 7.42180586e-01 6.00717068e-02] [ 5.94070017e-01 3.31682253e+00 1.88486528e+00 2.37553954e+00 2.09518456e+00 1.90097439e+00 2.50025153e-01 4.17599320e-01 1.97097921e+00 1.21865118e+00] [ 7.02617884e-01 1.74143827e+00 -5.36797047e-02 2.72850919e+00 1.27433586e+00 9.71575379e-01 2.60670519e+00 1.17615175e+00 2.83053875e-01 1.88517869e+00] [ 2.77636099e+00 1.09547842e+00 1.44965947e+00 1.19397223e+00 1.19462788e+00 2.82991624e+00 -4.22003031e-01 1.23543417e+00 8.47450972e-01 1.58556235e+00] [ 8.02561939e-01 2.39508224e+00 5.55049062e-01 7.03048706e-03 -1.15494096e+00 2.53846884e-01 1.48027718e+00 9.01797891e-01 6.62818193e-01 2.37365294e+00] [ 1.66904378e+00 1.14829957e+00 6.37259007e-01 7.78948784e-01 3.50729084e+00 2.01258326e+00 1.29642761e+00 4.02937710e-01 3.69053078e+00 1.79901814e+00] [ 2.73316288e+00 9.84085798e-01 3.62906814e-01 -4.01957273e-01 1.38093722e+00 2.55169940e+00 1.61729300e+00 3.55490685e+00 1.02188659e+00 5.17633915e-01] [ 1.20258129e+00 1.12446952e+00 9.13092971e-01 1.22375333e+00 1.34525001e+00 1.30597699e+00 2.86741734e+00 1.52374530e+00 1.39319909e+00 2.20880508e+00] [ 1.24480987e+00 2.86467791e-01 2.26258564e+00 2.34145427e+00 5.65914273e-01 2.13668489e+00 2.87238169e+00 -2.89212227e-01 1.55172896e+00 2.92021298e+00]] [[-1.47976744e+00 2.35298944e+00 3.85651588e-02 1.57819879e+00 2.65603924e+00 7.26798832e-01 1.81004488e+00 1.12577510e+00 -3.23392630e-01 2.71901035e+00] [ 6.53004646e-01 1.94204259e+00 2.05796337e+00 1.09343469e+00 2.80486250e+00 1.86401141e+00 3.52596164e-01 8.17319930e-01 1.94850504e+00 2.87462854e+00] [ 7.32701778e-01 -4.38789964e-01 7.04761088e-01 2.50275970e+00 1.54450786e+00 1.99664521e+00 7.01283216e-02 2.17615390e+00 1.78910804e+00 9.90215123e-01] [ 2.49251413e+00 5.94939530e-01 3.01454902e-01 2.39248800e+00 1.80426431e+00 -7.90865541e-01 1.85682750e+00 -1.20308280e-01 1.67750216e+00 -8.52408409e-02] [ 1.26656449e+00 1.18440092e+00 -5.73581934e-01 2.03782105e+00 1.43547273e+00 -7.14551210e-02 1.52800775e+00 1.87547207e+00 2.57304192e-01 3.79662132e+00] [ 1.28767359e+00 1.22840464e+00 2.26192808e+00 2.86635089e+00 9.05721128e-01 1.09684038e+00 2.83136439e+00 8.49011838e-01 1.12914956e+00 2.20421863e+00] [ 2.71472549e+00 1.02001405e+00 2.13555861e+00 2.84132385e+00 -3.93866777e-01 1.82049727e+00 2.03651786e+00 1.83022666e+00 2.37006450e+00 2.52136230e+00] [ 1.70690525e+00 2.21487188e+00 3.44952106e+00 1.26029563e+00 9.27629471e-01 1.18545926e+00 8.09466243e-01 1.55772650e+00 1.36530936e+00 4.63422298e-01] [ 2.01054811e-01 1.67427993e+00 1.33058906e-01 2.88663912e+00 1.69612503e+00 1.00480533e+00 3.91706944e+00 8.99483681e-01 1.59783554e+00 8.17440629e-01] [ 3.12190247e+00 3.43967438e-01 1.97742689e+00 1.35793865e+00 8.23435068e-01 4.70834017e-01 -1.51643038e-01 1.81579351e+00 1.95324290e+00 3.16739559e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_874.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7155 1.1710 -2.4456 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.3864 (2,1,1,.,.) = 0.5608 (3,1,1,.,.) = -0.7971 (1,2,1,.,.) = 2.2340 (2,2,1,.,.) = 0.5614 (3,2,1,.,.) = -0.6398 (1,3,1,.,.) = 0.01 * -2.8633 (2,3,1,.,.) = 1.2539 (3,3,1,.,.) = 0.01 * 8.2137 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 3.6522827 -0.15336311 -1.1494113 ... -0.12225509 0.33543777 -0.38739446] [-3.7048955 -0.12517995 -1.6546385 ... 2.6058278 1.9980797 -0.1450314 ] ... [-1.1555052 0.345778 -2.4508247 ... 0.29327238 -0.47990292 -2.4365487 ] [ 1.61785 -2.1035948 -3.3465996 ... 2.1707 -0.18409646 -3.4524488 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-0.22962666 -2.2273421 -0.73136 ... 0.49852788 2.3347073 -2.102014 ] [ 2.4383526 -4.9551063 -4.3879232 ... 3.9598303 -0.35771474 -3.518107 ] ... [ 2.0497808 -1.9396204 2.8999953 ... -4.3620615 2.7741094 -0.3417191 ] [-2.5954626 2.352687 -4.0034933 ... -0.68473285 -0.7773702 4.816616 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-1.5742404 -2.2300978 -3.0783467 ... -4.4490585 0.1585117 -3.6775289 ] [-1.5367923 1.2293289 3.5477724 ... -0.28054577 -2.1804438 -1.9803758 ] ... [-2.1019268 -2.8157964 0.8971497 ... 0.8881732 -1.1294849 -0.14475429] [ 3.8718023 0.2843793 -0.11334133 ... -0.00765443 1.5617317 -1.4049703 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] ... [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 3.6817408 -1.6922278 0.11389858 ... 0.83670294 1.5248874 0.1221348 ] [ 3.4193673 1.5404791 -5.5231724 ... -2.3430643 -4.536428 -1.1794997 ] ... [-0.7738597 -0.29603583 0.27259922 ... -0.9461598 -1.8152645 -1.6506374 ] [-1.4502304 -2.7849722 1.6004113 ... 1.8307034 -4.197447 1.6322266 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-1.1929588 0.28452706 -4.5372963 ... -3.8918715 2.1333508 0.79263246] [-2.0986009 1.8358291 -2.6228435 ... -2.1200428 2.5189672 1.6745144 ] ... [-5.9522715 1.1943085 -0.44820312 ... -4.9775767 -1.2498837 0.95073426] [-1.8125281 1.5554572 1.3654858 ... -3.694283 -1.9123644 -2.5415435 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 2.259138 -3.4559927 1.3994848 ... 1.8144699 -3.652896 -0.5814525 ] [ 2.8473358 0.11682814 2.3767753 ... 0.26829988 -0.30894274 1.4022104 ] ... [-1.4401643 -0.6832086 -2.0150437 ... -2.856327 -2.188881 2.9203649 ] [-0.4177903 0.05312622 -2.015356 ... 1.9031688 -2.5250425 0.700655 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]]] [[[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 2.9332967 0.565685 2.3649802 ... 3.4187503 2.0412064 0.5264803 ] [ 5.0938025 -1.853497 2.144293 ... 0.3760962 1.174622 2.2081146 ] ... [ 2.0240881 1.412227 0.6087446 ... 2.8766966 1.0933937 1.9257426 ] [ 3.575054 2.4099479 1.1751884 ... 1.7156017 1.892617 1.0936859 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.93515325 0.39973497 1.1653771 ... 0.812175 -0.9105334 1.1309892 ] [ 1.321817 1.4078648 0.4339887 ... 0.2676202 0.85053706 2.1206245 ] ... [ 2.0701058 1.1277994 0.03049326 ... -0.7991966 3.2601979 -0.0672307 ] [ 1.8217906 3.3094308 -0.3998984 ... -1.5324297 0.11059642 1.9135792 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.33043706 -0.5722462 -0.6012068 ... -0.97086334 3.419397 -1.9281893 ] [ 2.0387223 -0.7708181 1.9327283 ... 0.5900806 0.69187725 -0.20526338] ... [ 1.8955135 -1.6013961 4.207649 ... -0.05520308 0.1350503 1.4661968 ] [ 2.494532 -0.8154237 4.6434264 ... 1.0109419 2.434088 4.720125 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] ... [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.03814888 1.6988113 1.5946045 ... 2.5881166 0.04960847 1.7966251 ] [ 1.776999 1.5568033 -2.7412007 ... -0.99205065 -1.1908324 2.2989225 ] ... [ 1.6627082 0.09042788 0.30432755 ... 2.0542226 3.0160265 1.7001693 ] [ 1.5251364 -0.9492459 -0.16326213 ... 2.1950026 0.12931168 2.3438628 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 3.1696079 2.3059616 -2.4890676 ... -2.588308 3.607102 1.870077 ] [ 1.3265024 1.9003744 -0.73355377 ... 1.027206 0.5284773 1.1022093 ] ... [-1.431359 3.3153253 -0.42798698 ... -1.493 0.5313027 3.1271186 ] [ 0.82212573 2.6827712 -0.4770578 ... -1.5895803 3.2379801 2.8753886 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 1.862465 2.152293 3.259771 ... -0.898932 -1.7465308 0.33136868] [ 2.9997115 1.5656137 2.9861567 ... 1.3459029 0.76564014 3.6427743 ] ... [-0.41746294 1.8606155 0.4942068 ... -0.75223863 0.6954143 4.7849016 ] [ 1.091001 1.9068835 1.2366631 ... 3.5655744 1.4906899 0.63956386] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]]] [[[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.8302424 -1.7396145 -1.9662094 ... -2.9974985 -2.6458378 -2.912993 ] [-1.9529383 -2.0667384 -0.8738462 ... -2.795956 -2.4194193 -2.2027395 ] ... [-3.4757242 -2.3894536 -2.8259323 ... -3.3162363 -2.9930446 -1.5629852 ] [-3.2279537 -3.4682724 -1.8449569 ... -3.523253 -1.9965663 -1.7411021 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.5466142 -2.192539 -2.4253423 ... -3.5399685 -3.5008943 -1.1623334 ] [-2.977379 -1.2888864 -2.14503 ... -3.035428 -2.2436943 -1.7529335 ] ... [-3.705768 -2.86243 -4.088171 ... -1.9018857 -3.39781 -3.5879579 ] [-1.4495516 -3.7276607 -1.07218 ... -1.5759225 -2.7728815 -3.993977 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.64748 -1.6430994 -0.9161662 ... -0.7333418 -2.708974 -1.7628176 ] [-1.2914032 -2.2446897 -4.2053638 ... -2.778206 -2.4667783 -2.188759 ] ... [-1.5002908 -2.4499114 -3.066747 ... -3.110539 -1.0648085 -2.7579393 ] [-4.1087565 -2.3385785 -3.780562 ... -2.9278653 -2.0456097 -2.4474187 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] ... [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.1963663 -2.6846025 -1.5563409 ... -3.7025278 -3.2781363 -3.279642 ] [-2.5313087 -2.7674074 -1.3247794 ... -0.81711733 -1.5962899 -1.93 ] ... [-2.3646429 -2.1160343 -2.3462102 ... -2.4840631 -0.9260117 -1.876831 ] [-1.8642747 -2.380619 -3.9093783 ... -4.8655396 -0.6035892 -1.5284436 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.3539417 -2.2247066 -0.20812726 ... -1.3737863 -2.7965302 -2.7204552 ] [-3.315349 -3.2629447 -1.3564246 ... -1.6412487 -3.417741 -3.028305 ] ... [-0.3656721 -3.8827572 -1.531791 ... -1.3219306 -1.9966772 -2.912211 ] [-2.262646 -4.0759964 -3.988352 ... -0.9655591 -2.348301 -1.7962232 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.254851 -1.7814863 -4.947215 ... -3.7235398 -2.1214752 -2.6385157 ] [-4.3528576 -1.6393123 -3.148658 ... -2.5151367 -2.6407115 -3.5539315 ] ... [-1.1780186 -1.1432953 -1.3815292 ... -1.7657824 -2.2085645 -3.8653703 ] [-3.6650562 -2.426237 -1.7173171 ... -3.106511 -0.71988165 -4.0016856 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]]]]]; ov_res: [[[[[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 3.6522827 -0.15336311 -1.1494113 ... -0.12225509 0.33543777 -0.38739446] [-3.7048955 -0.12517995 -1.6546385 ... 2.6058278 1.9980797 -0.1450314 ] ... [-1.1555052 0.345778 -2.4508247 ... 0.29327238 -0.47990292 -2.4365487 ] [ 1.61785 -2.1035948 -3.3465996 ... 2.1707 -0.18409646 -3.4524488 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-0.22962666 -2.2273421 -0.73136 ... 0.49852788 2.3347073 -2.102014 ] [ 2.4383526 -4.9551063 -4.3879232 ... 3.9598303 -0.35771474 -3.518107 ] ... [ 2.0497808 -1.9396204 2.8999953 ... -4.3620615 2.7741094 -0.3417191 ] [-2.5954626 2.352687 -4.0034933 ... -0.68473285 -0.7773702 4.816616 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-1.5742404 -2.2300978 -3.0783467 ... -4.4490585 0.1585117 -3.6775289 ] [-1.5367923 1.2293289 3.5477724 ... -0.28054577 -2.1804438 -1.9803758 ] ... [-2.1019268 -2.8157964 0.8971497 ... 0.8881732 -1.1294849 -0.14475429] [ 3.8718023 0.2843793 -0.11334133 ... -0.00765443 1.5617317 -1.4049703 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] ... [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 3.6817408 -1.6922278 0.11389858 ... 0.83670294 1.5248874 0.1221348 ] [ 3.4193673 1.5404791 -5.5231724 ... -2.3430643 -4.536428 -1.1794997 ] ... [-0.7738597 -0.29603583 0.27259922 ... -0.9461598 -1.8152645 -1.6506374 ] [-1.4502304 -2.7849722 1.6004113 ... 1.8307034 -4.197447 1.6322266 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [-1.1929588 0.28452706 -4.5372963 ... -3.8918715 2.1333508 0.79263246] [-2.0986009 1.8358291 -2.6228435 ... -2.1200428 2.5189672 1.6745144 ] ... [-5.9522715 1.1943085 -0.44820312 ... -4.9775767 -1.2498837 0.95073426] [-1.8125281 1.5554572 1.3654858 ... -3.694283 -1.9123644 -2.5415435 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]] [[-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ] [ 2.259138 -3.4559927 1.3994848 ... 1.8144699 -3.652896 -0.5814525 ] [ 2.8473358 0.11682814 2.3767753 ... 0.26829988 -0.30894274 1.4022104 ] ... [-1.4401643 -0.6832086 -2.0150437 ... -2.856327 -2.188881 2.9203649 ] [-0.4177903 0.05312622 -2.015356 ... 1.9031688 -2.5250425 0.700655 ] [-0.7155179 -0.7155179 -0.7155179 ... -0.7155179 -0.7155179 -0.7155179 ]]] [[[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 2.9332967 0.565685 2.3649802 ... 3.4187503 2.0412064 0.5264803 ] [ 5.0938025 -1.853497 2.144293 ... 0.3760962 1.174622 2.2081146 ] ... [ 2.0240881 1.412227 0.6087446 ... 2.8766966 1.0933937 1.9257426 ] [ 3.575054 2.4099479 1.1751884 ... 1.7156017 1.892617 1.0936859 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.93515325 0.39973497 1.1653771 ... 0.812175 -0.9105334 1.1309892 ] [ 1.321817 1.4078648 0.4339887 ... 0.2676202 0.85053706 2.1206245 ] ... [ 2.0701058 1.1277994 0.03049326 ... -0.7991966 3.2601979 -0.0672307 ] [ 1.8217906 3.3094308 -0.3998984 ... -1.5324297 0.11059642 1.9135792 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.33043706 -0.5722462 -0.6012068 ... -0.97086334 3.419397 -1.9281893 ] [ 2.0387223 -0.7708181 1.9327283 ... 0.5900806 0.69187725 -0.20526338] ... [ 1.8955135 -1.6013961 4.207649 ... -0.05520308 0.1350503 1.4661968 ] [ 2.494532 -0.8154237 4.6434264 ... 1.0109419 2.434088 4.720125 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] ... [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 0.03814888 1.6988113 1.5946045 ... 2.5881166 0.04960847 1.7966251 ] [ 1.776999 1.5568033 -2.7412007 ... -0.99205065 -1.1908324 2.2989225 ] ... [ 1.6627082 0.09042788 0.30432755 ... 2.0542226 3.0160265 1.7001693 ] [ 1.5251364 -0.9492459 -0.16326213 ... 2.1950026 0.12931168 2.3438628 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 3.1696079 2.3059616 -2.4890676 ... -2.588308 3.607102 1.870077 ] [ 1.3265024 1.9003744 -0.73355377 ... 1.027206 0.5284773 1.1022093 ] ... [-1.431359 3.3153253 -0.42798698 ... -1.493 0.5313027 3.1271186 ] [ 0.82212573 2.6827712 -0.4770578 ... -1.5895803 3.2379801 2.8753886 ] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]] [[ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ] [ 1.862465 2.152293 3.259771 ... -0.898932 -1.7465308 0.33136868] [ 2.9997115 1.5656137 2.9861567 ... 1.3459029 0.76564014 3.6427743 ] ... [-0.41746294 1.8606155 0.4942068 ... -0.75223863 0.6954143 4.7849016 ] [ 1.091001 1.9068835 1.2366631 ... 3.5655744 1.4906899 0.63956386] [ 1.1710296 1.1710296 1.1710296 ... 1.1710296 1.1710296 1.1710296 ]]] [[[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.8302424 -1.7396145 -1.9662094 ... -2.9974985 -2.6458378 -2.912993 ] [-1.9529383 -2.0667384 -0.8738462 ... -2.795956 -2.4194193 -2.2027395 ] ... [-3.4757242 -2.3894536 -2.8259323 ... -3.3162363 -2.9930446 -1.5629852 ] [-3.2279537 -3.4682724 -1.8449569 ... -3.523253 -1.9965663 -1.7411021 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.5466142 -2.192539 -2.4253423 ... -3.5399685 -3.5008943 -1.1623334 ] [-2.977379 -1.2888864 -2.14503 ... -3.035428 -2.2436943 -1.7529335 ] ... [-3.705768 -2.86243 -4.088171 ... -1.9018857 -3.39781 -3.5879579 ] [-1.4495516 -3.7276607 -1.07218 ... -1.5759225 -2.7728815 -3.993977 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.64748 -1.6430994 -0.9161662 ... -0.7333418 -2.708974 -1.7628176 ] [-1.2914032 -2.2446897 -4.2053638 ... -2.778206 -2.4667783 -2.188759 ] ... [-1.5002908 -2.4499114 -3.066747 ... -3.110539 -1.0648085 -2.7579393 ] [-4.1087565 -2.3385785 -3.780562 ... -2.9278653 -2.0456097 -2.4474187 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] ... [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-3.1963663 -2.6846025 -1.5563409 ... -3.7025278 -3.2781363 -3.279642 ] [-2.5313087 -2.7674074 -1.3247794 ... -0.81711733 -1.5962899 -1.93 ] ... [-2.3646429 -2.1160343 -2.3462102 ... -2.4840631 -0.9260117 -1.876831 ] [-1.8642747 -2.380619 -3.9093783 ... -4.8655396 -0.6035892 -1.5284436 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.3539417 -2.2247066 -0.20812726 ... -1.3737863 -2.7965302 -2.7204552 ] [-3.315349 -3.2629447 -1.3564246 ... -1.6412487 -3.417741 -3.028305 ] ... [-0.3656721 -3.8827572 -1.531791 ... -1.3219306 -1.9966772 -2.912211 ] [-2.262646 -4.0759964 -3.988352 ... -0.9655591 -2.348301 -1.7962232 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]] [[-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ] [-2.254851 -1.7814863 -4.947215 ... -3.7235398 -2.1214752 -2.6385157 ] [-4.3528576 -1.6393123 -3.148658 ... -2.5151367 -2.6407115 -3.5539315 ] ... [-1.1780186 -1.1432953 -1.3815292 ... -1.7657824 -2.2085645 -3.8653703 ] [-3.6650562 -2.426237 -1.7173171 ... -3.106511 -0.71988165 -4.0016856 ] [-2.4455974 -2.4455974 -2.4455974 ... -2.4455974 -2.4455974 -2.4455974 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_876.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.6064 0.0328 0.7097 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.7085 (2,1,1,.,.) = -0.4904 (3,1,1,.,.) = -1.2622 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 5.90447307e-01 -5.67338467e-02 -7.34716535e-01 ... -4.78794575e-01 -5.17450571e-01 -1.25468969e+00] [-2.13734794e+00 5.31581044e-01 -1.82392812e+00 ... -3.87913257e-01 -7.05064893e-01 -3.71741116e-01] [-1.05975604e+00 -1.17158222e+00 -1.08853650e+00 ... -8.10481668e-01 -1.15671813e+00 -8.42176914e-01] ... [-1.03736436e+00 -9.28291917e-01 -4.20682877e-01 ... -3.87408674e-01 6.24571443e-02 -1.44551134e+00] [-2.53169918e+00 -2.29730701e+00 -1.51138520e+00 ... -8.51164401e-01 -2.90658236e-01 -4.17971075e-01] [-6.14302874e-01 -2.38971615e+00 -3.63312811e-01 ... -9.84369338e-01 -1.03156388e+00 6.65032029e-01]] [[-6.76940680e-01 -2.18573308e+00 -3.77301931e-01 ... -2.95303375e-01 -8.39353442e-01 -7.11699247e-01] [-1.11723709e+00 -2.68363237e-01 -1.33632624e+00 ... -8.54342163e-01 3.35322738e-01 -5.05692959e-01] [-7.27208197e-01 -3.20593804e-01 3.86798501e-01 ... -5.08779883e-01 6.44704103e-02 -9.18168128e-01] ... [-1.61736858e+00 -8.20075929e-01 -4.83548850e-01 ... -7.81305194e-01 -8.29485536e-01 6.14216328e-01] [-7.60918617e-01 -2.24837971e+00 8.87413859e-01 ... -4.63174760e-01 -1.42810547e+00 -1.34478998e+00] [-7.54932165e-02 -5.45542121e-01 4.84222054e-01 ... -4.95417297e-01 -8.03007960e-01 -7.12877035e-01]] [[-4.59916353e-01 -7.40197003e-01 -3.99304211e-01 ... 8.57826471e-02 -2.15849209e+00 -1.20544934e+00] [ 2.33268201e-01 -4.20270503e-01 -1.83034658e+00 ... 7.16717720e-01 2.41077125e-01 -5.72324514e-01] [-8.26982379e-01 3.52751255e-01 -9.08326089e-01 ... -6.73609018e-01 -1.03114641e+00 -4.83212411e-01] ... [-2.48523951e-02 -5.20504773e-01 -6.60302579e-01 ... -4.44531530e-01 -1.00063705e+00 -6.52318060e-01] [-5.76580167e-02 -8.67732048e-01 -1.51991081e+00 ... -1.02960467e-02 -1.20997345e+00 3.90194654e-01] [-1.87617809e-01 -3.19716960e-01 2.70452559e-01 ... -6.64875507e-02 -1.52402067e+00 -7.98481822e-01]] ... [[-5.89121103e-01 -8.14302802e-01 7.21640944e-01 ... 1.93402469e-01 -3.74897480e-01 -8.51761103e-01] [ 9.12300944e-02 -9.64650869e-01 -9.01415110e-01 ... 3.55628610e-01 -5.18866539e-01 -7.00829566e-01] [-7.40543664e-01 3.32167745e-02 -7.21066654e-01 ... -1.86481893e+00 -9.91674185e-01 -4.42755252e-01] ... [-1.19307685e+00 4.79796529e-01 -6.64501309e-01 ... -3.21629375e-01 -4.67494577e-01 6.27569675e-01] [-7.75597513e-01 -2.57860988e-01 -2.94346952e+00 ... -1.75868368e+00 -4.05116081e-02 -5.90998352e-01] [-4.98810083e-01 -1.01401508e-01 -1.55239952e+00 ... -2.85856873e-01 -4.46705818e-02 -1.07808363e+00]] [[-1.44912601e-01 -5.06176472e-01 -8.65561843e-01 ... 4.40778494e-01 -3.92758489e-01 -7.05095232e-01] [-6.19515955e-01 -4.24055606e-01 -7.34772682e-01 ... -2.00067252e-01 -2.65348768e+00 -6.09429955e-01] [ 3.63779128e-01 2.78696597e-01 -3.17750216e-01 ... -1.66703308e+00 -2.25429177e-01 1.51163876e-01] ... [-1.84818649e+00 2.72808194e-01 1.85992122e-02 ... 8.86747360e-01 -1.49858463e+00 -1.12037110e+00] [-9.23862338e-01 -1.57178903e+00 -1.56695724e-01 ... -4.72178668e-01 -1.94293106e+00 -2.24191737e+00] [-1.96006298e+00 -1.97251368e+00 8.93164873e-02 ... -1.16779137e+00 -5.66042721e-01 -7.33343005e-01]] [[ 5.02640724e-01 -1.54040903e-01 -3.70267183e-01 ... -7.34718621e-01 -8.01722884e-01 -1.26636457e+00] [-7.39035726e-01 3.75197351e-01 1.77628994e-02 ... -6.82112694e-01 -2.46822119e-01 -4.47181761e-01] [-1.80817926e+00 -1.17177606e+00 -1.21997774e-01 ... -2.07966566e-03 -7.26372719e-01 -4.15506899e-01] ... [-8.29662979e-01 -6.77922606e-01 -9.12967384e-01 ... -6.58155084e-01 -9.83519197e-01 1.84381366e-01] [-1.84441417e-01 -1.52899647e+00 -3.09552610e-01 ... -1.01895213e+00 -1.57905102e-01 -7.67051578e-01] [-2.60512471e-01 -7.37529278e-01 -1.69744217e+00 ... -4.35813487e-01 -6.57408714e-01 -1.19977564e-01]]] [[[ 1.62450403e-01 5.16863286e-01 -2.04897881e-01 ... 7.87326932e-01 -7.05105811e-02 -1.78722456e-01] [-2.13702843e-01 -4.52564597e-01 -1.69770718e-01 ... 7.98798800e-01 -6.60286546e-01 1.05807096e-01] [ 3.63242812e-02 -3.41124058e-01 -3.86294350e-02 ... -5.58937192e-01 -6.33406818e-01 -8.20568874e-02] ... [-3.09155107e-01 1.55007511e-01 -4.60161567e-01 ... -1.69889554e-01 2.72197098e-01 9.81700048e-02] [ 7.32807279e-01 -1.55492201e-01 -1.08211353e-01 ... -2.12753117e-01 -7.40985870e-02 4.84558165e-01] [ 6.21695936e-01 -1.47078827e-01 7.78407305e-02 ... -4.49258983e-01 7.03204513e-01 7.18767583e-01]] [[-6.09758258e-01 4.41537142e-01 -8.92448604e-01 ... 2.85042394e-02 3.34936440e-01 4.30049479e-01] [ 3.61012518e-01 -2.09521398e-01 -4.52209592e-01 ... -6.67084992e-01 8.93620074e-01 3.45740139e-01] [ 9.78467703e-01 3.95505250e-01 6.29926145e-01 ... 5.17688692e-02 -2.29044035e-01 6.81879461e-01] ... [-4.06906426e-01 -3.55174720e-01 1.11788787e-01 ... 4.79397118e-01 5.09443805e-02 2.59916276e-01] [ 5.34349740e-01 -2.03115165e-01 1.51409000e-01 ... 3.51142526e-01 2.05498889e-01 2.45908573e-01] [ 7.14486003e-01 7.17312992e-01 3.89695346e-01 ... -5.52193880e-01 9.04017091e-02 8.89513642e-02]] [[ 2.33922735e-01 -8.86696503e-02 -3.19383562e-01 ... 4.05053377e-01 6.92063570e-01 1.07924312e-01] [ 9.30388033e-01 2.75453627e-01 -2.08693966e-01 ... -5.39292216e-01 4.14512694e-01 1.05059767e+00] [ 1.77393034e-02 7.18775988e-02 -6.14455819e-01 ... 6.18109584e-01 5.68084978e-02 -2.28277400e-01] ... [ 2.31848463e-01 -8.49998653e-01 -5.68712175e-01 ... 3.69899809e-01 -3.21062386e-01 -5.11033759e-02] [-2.65204251e-01 -6.06623173e-01 1.24232851e-01 ... 3.68296623e-01 8.18208456e-02 -9.81219560e-02] [ 3.05231094e-01 -9.50390160e-01 1.03779219e-01 ... -9.37466919e-01 1.20460540e-01 -5.96010759e-02]] ... [[ 9.94625807e-01 -3.13316464e-01 2.41933838e-02 ... -8.31271470e-01 5.88663220e-01 -1.51013732e+00] [-1.77272364e-01 4.77030933e-01 5.04035950e-01 ... -1.43710636e-02 -2.82721341e-01 8.79394114e-02] [ 6.33237138e-03 -1.04702199e+00 6.13392532e-01 ... 1.80225596e-02 -5.81286848e-02 -6.29944921e-01] ... [-2.93986976e-01 2.52672076e-01 4.49302435e-01 ... -5.38130552e-02 4.31085467e-01 -9.93121326e-01] [ 1.44113362e-01 4.77833390e-01 -5.81344962e-01 ... -5.33652365e-01 4.54925656e-01 -1.31988496e-01] [-1.61714196e-01 -2.11383820e-01 3.44335079e-01 ... -2.08174810e-01 -6.55778870e-02 5.09234488e-01]] [[-4.56532061e-01 4.99882102e-01 -3.55969608e-01 ... -5.53501427e-01 -6.41961634e-01 4.26206112e-01] [-7.08066151e-02 1.40615717e-01 9.37136829e-01 ... -5.49847186e-01 1.52593255e-01 -8.84600043e-01] [ 9.24536586e-02 -2.32671604e-01 -7.72973523e-02 ... -5.73550701e-01 -6.15302473e-04 9.00791287e-02] ... [ 6.99682385e-02 7.01149367e-03 4.10902381e-01 ... 7.65711546e-01 -2.10750908e-01 1.00496568e-01] [-7.42917240e-01 4.04838026e-01 -2.51623511e-01 ... 5.21025956e-01 -6.74211681e-01 -5.13664484e-01] [ 6.97794139e-01 2.49175839e-02 3.86607826e-01 ... 1.85547486e-01 1.69268370e-01 -1.47842824e-01]] [[-6.61644995e-01 -8.67304146e-01 -1.06161579e-01 ... -3.77817988e-01 -3.67787629e-02 -2.18789980e-01] [-1.44584686e-01 -7.24512994e-01 1.07078906e-02 ... 7.32322872e-01 1.06669683e-02 5.07900238e-01] [ 3.28147948e-01 2.16199942e-02 -3.16309035e-01 ... -8.80980268e-02 -5.05989611e-01 5.15627339e-02] ... [-4.18533206e-01 -2.95638032e-02 7.79711306e-01 ... -3.08832973e-02 3.59918773e-01 6.08999193e-01] [-5.36377728e-01 -6.60229087e-01 -2.76622713e-01 ... -1.20202616e-01 3.59997690e-01 -3.10959578e-01] [ 4.32878174e-02 3.58123600e-01 -4.61758673e-01 ... 4.38949168e-01 -3.23799849e-01 -2.05096677e-01]]] [[[ 2.29563618e+00 -3.75070810e-01 -1.40507662e+00 ... 7.23785460e-01 5.41442752e-01 1.78068912e+00] [ 6.48933053e-02 1.39789760e+00 3.65883589e-01 ... -5.61785817e-01 2.93884373e+00 1.94486439e-01] [ 1.77230263e+00 1.94876885e+00 6.06751919e-01 ... 2.98313975e-01 2.31588745e+00 1.14887810e+00] ... [ 2.36235529e-01 1.87647212e+00 1.39087975e+00 ... 6.69244826e-01 8.22108388e-01 -8.17403913e-01] [ 7.78486252e-01 2.16441393e+00 3.51858348e-01 ... -3.94868374e-01 -2.26531029e-01 -3.09588242e+00] [ 4.11048412e-01 1.82320118e-01 -2.54403353e-01 ... 1.12668395e+00 -2.56647587e-01 -3.05339575e-01]] [[ 1.90955055e+00 2.28047991e+00 1.19619250e-01 ... 4.57723379e-01 4.24216181e-01 3.43831301e+00] [ 1.31788635e+00 6.60958469e-01 7.30744958e-01 ... 1.65343046e+00 1.12790954e+00 -8.29336405e-01] [-1.36056054e+00 1.67509222e+00 5.14089406e-01 ... -1.38729811e-01 1.20878935e+00 -1.67024720e+00] ... [ 2.42446613e+00 -6.09684110e-01 2.97665149e-01 ... 4.03544664e+00 3.15957928e+00 2.64378071e+00] [-3.36696744e-01 2.00257063e+00 8.77051771e-01 ... 2.06287479e+00 -1.05636370e+00 1.66544867e+00] [ 1.07492530e+00 1.54820263e-01 4.52505469e-01 ... 1.05867147e+00 1.28902435e+00 6.61968172e-01]] [[ 1.11337149e+00 -6.57422185e-01 1.28702211e+00 ... 3.94398987e-01 1.33582294e+00 -1.88902795e-01] [-1.42013681e+00 6.43598795e-01 -1.78911555e+00 ... 6.13261044e-01 3.01414871e+00 -9.86125946e-01] [ 1.46556735e-01 2.06519127e-01 -2.92978644e-01 ... 1.53660989e+00 2.63684243e-01 1.09478343e+00] ... [ 4.78813648e-02 1.21321130e+00 1.17202067e+00 ... -9.96001601e-01 -9.61014748e-01 2.77283907e+00] [ 5.56808233e-01 -2.43086159e-01 -2.80907929e-01 ... 5.92696011e-01 -1.28419638e-01 -1.45924890e+00] [ 3.54823208e+00 9.69892681e-01 -1.15347373e+00 ... 7.78343678e-01 -9.46532011e-01 2.90185165e+00]] ... [[ 1.56657434e+00 1.72483301e+00 5.03467083e-01 ... 1.72915685e+00 2.39098358e+00 1.99948323e+00] [ 2.89108944e+00 3.11816025e+00 6.55612350e-02 ... -1.58134162e+00 2.42034054e+00 2.54238844e+00] [ 4.30581391e-01 -2.33829021e-02 6.99632645e-01 ... 4.57094371e-01 1.12633181e+00 2.88828909e-01] ... [-3.91535759e-01 7.04558253e-01 3.22187662e-01 ... 1.97522700e-01 2.23152828e+00 1.04631162e+00] [ 2.98431873e+00 -9.65409756e-01 7.34646320e-02 ... -2.08222151e-01 3.32869196e+00 1.45431733e+00] [ 1.97096360e+00 -6.95482016e-01 1.49562049e+00 ... 9.83170867e-01 4.36983734e-01 1.14030659e-01]] [[ 1.87918174e+00 -1.97266281e-01 3.37164497e+00 ... 1.13703454e+00 2.34472334e-01 3.08507085e-02] [ 1.38044977e+00 -1.41101205e+00 2.05074966e-01 ... -1.11102760e-01 6.82706714e-01 2.46542841e-01] [ 2.38024330e+00 1.52943087e+00 -1.99852526e+00 ... -8.40001702e-01 2.35757303e+00 8.47341061e-01] ... [ 1.17117202e+00 2.34439325e+00 3.46686411e+00 ... -1.19140196e+00 1.02325594e+00 2.13044405e+00] [-2.27920532e+00 1.21600294e+00 -1.85036910e+00 ... -2.05633879e+00 -1.21320534e+00 -3.34138513e-01] [ 2.61797875e-01 5.59698820e-01 3.41729283e-01 ... 1.18182218e+00 -1.35848272e+00 -6.72620535e-03]] [[ 1.75342882e+00 1.31156778e+00 -5.29704809e-01 ... -1.77405679e+00 3.01124334e-01 -2.38129234e+00] [ 8.55564594e-01 1.26871693e+00 2.57454228e+00 ... 1.55915642e+00 -1.25915682e+00 1.58978176e+00] [ 9.19852853e-01 4.40936029e-01 3.25373840e+00 ... 2.25452948e+00 2.54630947e+00 1.03173208e+00] ... [ 5.91487288e-01 1.18688524e+00 1.19348419e+00 ... 1.14732778e+00 -3.08702230e-01 5.38298845e-01] [ 2.07159805e+00 -5.11799693e-01 -1.50895202e+00 ... 1.21802032e+00 8.64494920e-01 2.39695740e+00] [-1.94426537e-01 -3.10927153e-01 9.23276305e-01 ... 1.75295615e+00 1.60685849e+00 1.22355294e+00]]]]]; ov_res: [[[[[ 5.90447307e-01 -5.67338467e-02 -7.34716535e-01 ... -4.78794575e-01 -5.17450571e-01 -1.25468969e+00] [-2.13734794e+00 5.31581044e-01 -1.82392812e+00 ... -3.87913257e-01 -7.05064893e-01 -3.71741116e-01] [-1.05975604e+00 -1.17158222e+00 -1.08853650e+00 ... -8.10481668e-01 -1.15671813e+00 -8.42176914e-01] ... [-1.03736436e+00 -9.28291917e-01 -4.20682877e-01 ... -3.87408674e-01 6.24571443e-02 -1.44551134e+00] [-2.53169918e+00 -2.29730701e+00 -1.51138520e+00 ... -8.51164401e-01 -2.90658236e-01 -4.17971075e-01] [-6.14302874e-01 -2.38971615e+00 -3.63312811e-01 ... -9.84369338e-01 -1.03156388e+00 6.65032029e-01]] [[-6.76940680e-01 -2.18573308e+00 -3.77301931e-01 ... -2.95303375e-01 -8.39353442e-01 -7.11699247e-01] [-1.11723709e+00 -2.68363237e-01 -1.33632624e+00 ... -8.54342163e-01 3.35322738e-01 -5.05692959e-01] [-7.27208197e-01 -3.20593804e-01 3.86798501e-01 ... -5.08779883e-01 6.44704103e-02 -9.18168128e-01] ... [-1.61736858e+00 -8.20075929e-01 -4.83548850e-01 ... -7.81305194e-01 -8.29485536e-01 6.14216328e-01] [-7.60918617e-01 -2.24837971e+00 8.87413859e-01 ... -4.63174760e-01 -1.42810547e+00 -1.34478998e+00] [-7.54932165e-02 -5.45542121e-01 4.84222054e-01 ... -4.95417297e-01 -8.03007960e-01 -7.12877035e-01]] [[-4.59916353e-01 -7.40197003e-01 -3.99304211e-01 ... 8.57826471e-02 -2.15849209e+00 -1.20544934e+00] [ 2.33268201e-01 -4.20270503e-01 -1.83034658e+00 ... 7.16717720e-01 2.41077125e-01 -5.72324514e-01] [-8.26982379e-01 3.52751255e-01 -9.08326089e-01 ... -6.73609018e-01 -1.03114641e+00 -4.83212411e-01] ... [-2.48523951e-02 -5.20504773e-01 -6.60302579e-01 ... -4.44531530e-01 -1.00063705e+00 -6.52318060e-01] [-5.76580167e-02 -8.67732048e-01 -1.51991081e+00 ... -1.02960467e-02 -1.20997345e+00 3.90194654e-01] [-1.87617809e-01 -3.19716960e-01 2.70452559e-01 ... -6.64875507e-02 -1.52402067e+00 -7.98481822e-01]] ... [[-5.89121103e-01 -8.14302802e-01 7.21640944e-01 ... 1.93402469e-01 -3.74897480e-01 -8.51761103e-01] [ 9.12300944e-02 -9.64650869e-01 -9.01415110e-01 ... 3.55628610e-01 -5.18866539e-01 -7.00829566e-01] [-7.40543664e-01 3.32167745e-02 -7.21066654e-01 ... -1.86481893e+00 -9.91674185e-01 -4.42755252e-01] ... [-1.19307685e+00 4.79796529e-01 -6.64501309e-01 ... -3.21629375e-01 -4.67494577e-01 6.27569675e-01] [-7.75597513e-01 -2.57860988e-01 -2.94346952e+00 ... -1.75868368e+00 -4.05116081e-02 -5.90998352e-01] [-4.98810083e-01 -1.01401508e-01 -1.55239952e+00 ... -2.85856873e-01 -4.46705818e-02 -1.07808363e+00]] [[-1.44912601e-01 -5.06176472e-01 -8.65561843e-01 ... 4.40778494e-01 -3.92758489e-01 -7.05095232e-01] [-6.19515955e-01 -4.24055606e-01 -7.34772682e-01 ... -2.00067252e-01 -2.65348768e+00 -6.09429955e-01] [ 3.63779128e-01 2.78696597e-01 -3.17750216e-01 ... -1.66703308e+00 -2.25429177e-01 1.51163876e-01] ... [-1.84818649e+00 2.72808194e-01 1.85992122e-02 ... 8.86747360e-01 -1.49858463e+00 -1.12037110e+00] [-9.23862338e-01 -1.57178903e+00 -1.56695724e-01 ... -4.72178668e-01 -1.94293106e+00 -2.24191737e+00] [-1.96006298e+00 -1.97251368e+00 8.93164873e-02 ... -1.16779137e+00 -5.66042721e-01 -7.33343005e-01]] [[ 5.02640724e-01 -1.54040903e-01 -3.70267183e-01 ... -7.34718621e-01 -8.01722884e-01 -1.26636457e+00] [-7.39035726e-01 3.75197351e-01 1.77628994e-02 ... -6.82112694e-01 -2.46822119e-01 -4.47181761e-01] [-1.80817926e+00 -1.17177606e+00 -1.21997774e-01 ... -2.07966566e-03 -7.26372719e-01 -4.15506899e-01] ... [-8.29662979e-01 -6.77922606e-01 -9.12967384e-01 ... -6.58155084e-01 -9.83519197e-01 1.84381366e-01] [-1.84441417e-01 -1.52899647e+00 -3.09552610e-01 ... -1.01895213e+00 -1.57905102e-01 -7.67051578e-01] [-2.60512471e-01 -7.37529278e-01 -1.69744217e+00 ... -4.35813487e-01 -6.57408714e-01 -1.19977564e-01]]] [[[ 1.62450403e-01 5.16863286e-01 -2.04897881e-01 ... 7.87326932e-01 -7.05105811e-02 -1.78722456e-01] [-2.13702843e-01 -4.52564597e-01 -1.69770718e-01 ... 7.98798800e-01 -6.60286546e-01 1.05807096e-01] [ 3.63242812e-02 -3.41124058e-01 -3.86294350e-02 ... -5.58937192e-01 -6.33406818e-01 -8.20568874e-02] ... [-3.09155107e-01 1.55007511e-01 -4.60161567e-01 ... -1.69889554e-01 2.72197098e-01 9.81700048e-02] [ 7.32807279e-01 -1.55492201e-01 -1.08211353e-01 ... -2.12753117e-01 -7.40985870e-02 4.84558165e-01] [ 6.21695936e-01 -1.47078827e-01 7.78407305e-02 ... -4.49258983e-01 7.03204513e-01 7.18767583e-01]] [[-6.09758258e-01 4.41537142e-01 -8.92448604e-01 ... 2.85042394e-02 3.34936440e-01 4.30049479e-01] [ 3.61012518e-01 -2.09521398e-01 -4.52209592e-01 ... -6.67084992e-01 8.93620074e-01 3.45740139e-01] [ 9.78467703e-01 3.95505250e-01 6.29926145e-01 ... 5.17688692e-02 -2.29044035e-01 6.81879461e-01] ... [-4.06906426e-01 -3.55174720e-01 1.11788787e-01 ... 4.79397118e-01 5.09443805e-02 2.59916276e-01] [ 5.34349740e-01 -2.03115165e-01 1.51409000e-01 ... 3.51142526e-01 2.05498889e-01 2.45908573e-01] [ 7.14486003e-01 7.17312992e-01 3.89695346e-01 ... -5.52193880e-01 9.04017091e-02 8.89513642e-02]] [[ 2.33922735e-01 -8.86696503e-02 -3.19383562e-01 ... 4.05053377e-01 6.92063570e-01 1.07924312e-01] [ 9.30388033e-01 2.75453627e-01 -2.08693966e-01 ... -5.39292216e-01 4.14512694e-01 1.05059767e+00] [ 1.77393034e-02 7.18775988e-02 -6.14455819e-01 ... 6.18109584e-01 5.68084978e-02 -2.28277400e-01] ... [ 2.31848463e-01 -8.49998653e-01 -5.68712175e-01 ... 3.69899809e-01 -3.21062386e-01 -5.11033759e-02] [-2.65204251e-01 -6.06623173e-01 1.24232851e-01 ... 3.68296623e-01 8.18208456e-02 -9.81219560e-02] [ 3.05231094e-01 -9.50390160e-01 1.03779219e-01 ... -9.37466919e-01 1.20460540e-01 -5.96010759e-02]] ... [[ 9.94625807e-01 -3.13316464e-01 2.41933838e-02 ... -8.31271470e-01 5.88663220e-01 -1.51013732e+00] [-1.77272364e-01 4.77030933e-01 5.04035950e-01 ... -1.43710636e-02 -2.82721341e-01 8.79394114e-02] [ 6.33237138e-03 -1.04702199e+00 6.13392532e-01 ... 1.80225596e-02 -5.81286848e-02 -6.29944921e-01] ... [-2.93986976e-01 2.52672076e-01 4.49302435e-01 ... -5.38130552e-02 4.31085467e-01 -9.93121326e-01] [ 1.44113362e-01 4.77833390e-01 -5.81344962e-01 ... -5.33652365e-01 4.54925656e-01 -1.31988496e-01] [-1.61714196e-01 -2.11383820e-01 3.44335079e-01 ... -2.08174810e-01 -6.55778870e-02 5.09234488e-01]] [[-4.56532061e-01 4.99882102e-01 -3.55969608e-01 ... -5.53501427e-01 -6.41961634e-01 4.26206112e-01] [-7.08066151e-02 1.40615717e-01 9.37136829e-01 ... -5.49847186e-01 1.52593255e-01 -8.84600043e-01] [ 9.24536586e-02 -2.32671604e-01 -7.72973523e-02 ... -5.73550701e-01 -6.15302473e-04 9.00791287e-02] ... [ 6.99682385e-02 7.01149367e-03 4.10902381e-01 ... 7.65711546e-01 -2.10750908e-01 1.00496568e-01] [-7.42917240e-01 4.04838026e-01 -2.51623511e-01 ... 5.21025956e-01 -6.74211681e-01 -5.13664484e-01] [ 6.97794139e-01 2.49175839e-02 3.86607826e-01 ... 1.85547486e-01 1.69268370e-01 -1.47842824e-01]] [[-6.61644995e-01 -8.67304146e-01 -1.06161579e-01 ... -3.77817988e-01 -3.67787629e-02 -2.18789980e-01] [-1.44584686e-01 -7.24512994e-01 1.07078906e-02 ... 7.32322872e-01 1.06669683e-02 5.07900238e-01] [ 3.28147948e-01 2.16199942e-02 -3.16309035e-01 ... -8.80980268e-02 -5.05989611e-01 5.15627339e-02] ... [-4.18533206e-01 -2.95638032e-02 7.79711306e-01 ... -3.08832973e-02 3.59918773e-01 6.08999193e-01] [-5.36377728e-01 -6.60229087e-01 -2.76622713e-01 ... -1.20202616e-01 3.59997690e-01 -3.10959578e-01] [ 4.32878174e-02 3.58123600e-01 -4.61758673e-01 ... 4.38949168e-01 -3.23799849e-01 -2.05096677e-01]]] [[[ 2.29563618e+00 -3.75070810e-01 -1.40507662e+00 ... 7.23785460e-01 5.41442752e-01 1.78068912e+00] [ 6.48933053e-02 1.39789760e+00 3.65883589e-01 ... -5.61785817e-01 2.93884373e+00 1.94486439e-01] [ 1.77230263e+00 1.94876885e+00 6.06751919e-01 ... 2.98313975e-01 2.31588745e+00 1.14887810e+00] ... [ 2.36235529e-01 1.87647212e+00 1.39087975e+00 ... 6.69244826e-01 8.22108388e-01 -8.17403913e-01] [ 7.78486252e-01 2.16441393e+00 3.51858348e-01 ... -3.94868374e-01 -2.26531029e-01 -3.09588242e+00] [ 4.11048412e-01 1.82320118e-01 -2.54403353e-01 ... 1.12668395e+00 -2.56647587e-01 -3.05339575e-01]] [[ 1.90955055e+00 2.28047991e+00 1.19619250e-01 ... 4.57723379e-01 4.24216181e-01 3.43831301e+00] [ 1.31788635e+00 6.60958469e-01 7.30744958e-01 ... 1.65343046e+00 1.12790954e+00 -8.29336405e-01] [-1.36056054e+00 1.67509222e+00 5.14089406e-01 ... -1.38729811e-01 1.20878935e+00 -1.67024720e+00] ... [ 2.42446613e+00 -6.09684110e-01 2.97665149e-01 ... 4.03544664e+00 3.15957928e+00 2.64378071e+00] [-3.36696744e-01 2.00257063e+00 8.77051771e-01 ... 2.06287479e+00 -1.05636370e+00 1.66544867e+00] [ 1.07492530e+00 1.54820263e-01 4.52505469e-01 ... 1.05867147e+00 1.28902435e+00 6.61968172e-01]] [[ 1.11337149e+00 -6.57422185e-01 1.28702211e+00 ... 3.94398987e-01 1.33582294e+00 -1.88902795e-01] [-1.42013681e+00 6.43598795e-01 -1.78911555e+00 ... 6.13261044e-01 3.01414871e+00 -9.86125946e-01] [ 1.46556735e-01 2.06519127e-01 -2.92978644e-01 ... 1.53660989e+00 2.63684243e-01 1.09478343e+00] ... [ 4.78813648e-02 1.21321130e+00 1.17202067e+00 ... -9.96001601e-01 -9.61014748e-01 2.77283907e+00] [ 5.56808233e-01 -2.43086159e-01 -2.80907929e-01 ... 5.92696011e-01 -1.28419638e-01 -1.45924890e+00] [ 3.54823208e+00 9.69892681e-01 -1.15347373e+00 ... 7.78343678e-01 -9.46532011e-01 2.90185165e+00]] ... [[ 1.56657434e+00 1.72483301e+00 5.03467083e-01 ... 1.72915685e+00 2.39098358e+00 1.99948323e+00] [ 2.89108944e+00 3.11816025e+00 6.55612350e-02 ... -1.58134162e+00 2.42034054e+00 2.54238844e+00] [ 4.30581391e-01 -2.33829021e-02 6.99632645e-01 ... 4.57094371e-01 1.12633181e+00 2.88828909e-01] ... [-3.91535759e-01 7.04558253e-01 3.22187662e-01 ... 1.97522700e-01 2.23152828e+00 1.04631162e+00] [ 2.98431873e+00 -9.65409756e-01 7.34646320e-02 ... -2.08222151e-01 3.32869196e+00 1.45431733e+00] [ 1.97096360e+00 -6.95482016e-01 1.49562049e+00 ... 9.83170867e-01 4.36983734e-01 1.14030659e-01]] [[ 1.87918174e+00 -1.97266281e-01 3.37164497e+00 ... 1.13703454e+00 2.34472334e-01 3.08507085e-02] [ 1.38044977e+00 -1.41101205e+00 2.05074966e-01 ... -1.11102760e-01 6.82706714e-01 2.46542841e-01] [ 2.38024330e+00 1.52943087e+00 -1.99852526e+00 ... -8.40001702e-01 2.35757303e+00 8.47341061e-01] ... [ 1.17117202e+00 2.34439325e+00 3.46686411e+00 ... -1.19140196e+00 1.02325594e+00 2.13044405e+00] [-2.27920532e+00 1.21600294e+00 -1.85036910e+00 ... -2.05633879e+00 -1.21320534e+00 -3.34138513e-01] [ 2.61797875e-01 5.59698820e-01 3.41729283e-01 ... 1.18182218e+00 -1.35848272e+00 -6.72620535e-03]] [[ 1.75342882e+00 1.31156778e+00 -5.29704809e-01 ... -1.77405679e+00 3.01124334e-01 -2.38129234e+00] [ 8.55564594e-01 1.26871693e+00 2.57454228e+00 ... 1.55915642e+00 -1.25915682e+00 1.58978176e+00] [ 9.19852853e-01 4.40936029e-01 3.25373840e+00 ... 2.25452948e+00 2.54630947e+00 1.03173208e+00] ... [ 5.91487288e-01 1.18688524e+00 1.19348419e+00 ... 1.14732778e+00 -3.08702230e-01 5.38298845e-01] [ 2.07159805e+00 -5.11799693e-01 -1.50895202e+00 ... 1.21802032e+00 8.64494920e-01 2.39695740e+00] [-1.94426537e-01 -3.10927153e-01 9.23276305e-01 ... 1.75295615e+00 1.60685849e+00 1.22355294e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 1], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_878.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.6663 0.8809 0.2625 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.1325 (2,1,1,.,.) = -0.3371 (3,1,1,.,.) = -0.3953 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.6980921 -1.5039829 ... -1.5649773 -1.5362592 -1.6662811 ] [-1.6662811 -1.7792202 -1.6629664 ... -1.8580091 -1.5652851 -1.6662811 ] ... [-1.6662811 -1.6713872 -1.5048428 ... -1.7115164 -1.7528512 -1.6662811 ] [-1.6662811 -1.8178155 -1.8730173 ... -1.70438 -1.7261381 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.4177157 -1.7251049 ... -1.5800419 -1.6219466 -1.6662811 ] [-1.6662811 -1.6658517 -1.8717743 ... -1.7289423 -1.6886011 -1.6662811 ] ... [-1.6662811 -1.7935638 -1.595108 ... -1.986069 -1.7212054 -1.6662811 ] [-1.6662811 -1.7722201 -1.4202467 ... -1.581013 -1.7139498 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.4546138 -1.6964799 ... -1.9768724 -1.6395745 -1.6662811 ] [-1.6662811 -1.7539002 -1.6896259 ... -1.6203303 -1.9464605 -1.6662811 ] ... [-1.6662811 -1.4560896 -1.7521731 ... -1.6406528 -1.8358539 -1.6662811 ] [-1.6662811 -1.8067603 -1.6511141 ... -1.656973 -1.7659005 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] ... [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.5858452 -1.7668188 ... -1.6695843 -1.8633614 -1.6662811 ] [-1.6662811 -1.6326494 -1.733886 ... -1.7101979 -1.7784159 -1.6662811 ] ... [-1.6662811 -1.8887815 -1.6632613 ... -1.6593585 -1.6063613 -1.6662811 ] [-1.6662811 -1.4681976 -1.8572224 ... -1.8976583 -1.7033384 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.7039545 -1.7942019 ... -1.7753885 -1.6134429 -1.6662811 ] [-1.6662811 -1.5407029 -1.9886124 ... -1.8470243 -1.6287534 -1.6662811 ] ... [-1.6662811 -1.7599385 -1.8348811 ... -1.3839666 -1.7017987 -1.6662811 ] [-1.6662811 -1.8846828 -1.9665059 ... -1.4685369 -1.691436 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.9250561 -1.8656641 ... -1.5657606 -1.275629 -1.6662811 ] [-1.6662811 -1.742924 -1.6082945 ... -1.7144841 -1.6993679 -1.6662811 ] ... [-1.6662811 -1.7733339 -1.8415577 ... -1.5756029 -1.637428 -1.6662811 ] [-1.6662811 -1.56747 -1.5923384 ... -1.8712568 -1.7336977 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]]] [[[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 0.81661326 0.572156 ... 1.0304112 0.44463727 0.88090193] [ 0.88090193 0.97230864 1.3322576 ... 0.35559684 0.6859089 0.88090193] ... [ 0.88090193 1.0500522 0.9167581 ... 0.30932736 0.43275803 0.88090193] [ 0.88090193 0.9987312 0.89902776 ... 0.75536656 0.8328512 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 0.91330767 0.50085247 ... 0.8700781 0.50849766 0.88090193] [ 0.88090193 1.3448907 1.051336 ... 1.1344047 1.6225255 0.88090193] ... [ 0.88090193 1.3613089 0.8823448 ... 1.0759029 0.5149466 0.88090193] [ 0.88090193 0.8989275 0.5071056 ... 1.0235946 0.33939505 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.060937 1.0590717 ... 0.8418095 1.0556606 0.88090193] [ 0.88090193 0.2686116 0.9786256 ... 0.88624847 1.0384262 0.88090193] ... [ 0.88090193 1.233009 0.67543745 ... 0.5156734 0.3474797 0.88090193] [ 0.88090193 1.1394032 0.8879671 ... 1.761497 1.1448703 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] ... [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.6569003 0.8110268 ... 0.724162 1.2158096 0.88090193] [ 0.88090193 1.4812462 1.4678152 ... 0.8009762 1.1678724 0.88090193] ... [ 0.88090193 1.0127871 0.39428398 ... 0.07133746 0.79184043 0.88090193] [ 0.88090193 0.8331674 0.9107715 ... 0.95327836 0.08134359 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.0847181 0.8715904 ... 0.62115896 1.1000602 0.88090193] [ 0.88090193 -0.00923073 1.0447198 ... 1.366096 0.49439365 0.88090193] ... [ 0.88090193 1.5386277 1.6153029 ... 1.4518701 0.37706834 0.88090193] [ 0.88090193 1.5833983 1.080812 ... 1.3620152 0.71455455 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.2802306 0.5463299 ... 1.8780892 0.97699815 0.88090193] [ 0.88090193 1.019481 1.5051186 ... 0.96767604 1.1560384 0.88090193] ... [ 0.88090193 1.075311 0.59388685 ... 1.4126413 0.47650108 0.88090193] [ 0.88090193 0.7774895 1.0571104 ... 0.85321796 0.7442906 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]]] [[[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.15018636 0.02781816 ... -0.34382805 0.34448713 0.26251826] [ 0.26251826 0.27108693 0.3308964 ... 0.13026404 1.418764 0.26251826] ... [ 0.26251826 0.49111938 0.3529365 ... 0.6396855 0.5395752 0.26251826] [ 0.26251826 0.06409214 0.16497505 ... 0.30559665 0.36743796 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.52309066 0.9653424 ... 0.5859315 0.56037545 0.26251826] [ 0.26251826 -0.01876533 -0.09599304 ... 0.06439918 0.44560987 0.26251826] ... [ 0.26251826 0.353537 0.72009134 ... 0.8618164 1.4952887 0.26251826] [ 0.26251826 0.08907501 0.18889967 ... -0.5734154 0.7532412 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.32148832 0.27211386 ... 0.06924552 -0.2234551 0.26251826] [ 0.26251826 0.753253 -0.2214157 ... 0.23271783 0.46990994 0.26251826] ... [ 0.26251826 0.39353603 -0.15461251 ... 0.8554975 0.19939728 0.26251826] [ 0.26251826 0.14743355 0.08564363 ... -0.35845116 0.4286508 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] ... [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.50678015 0.38439083 ... 0.03095147 0.32982618 0.26251826] [ 0.26251826 1.0190514 0.8414817 ... -0.05773288 0.7213663 0.26251826] ... [ 0.26251826 -0.22896692 0.9054315 ... 0.15425643 0.16368374 0.26251826] [ 0.26251826 0.8062775 0.36040783 ... 0.8086729 -0.62264633 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.6642548 -0.00789651 ... 0.2131768 0.6322675 0.26251826] [ 0.26251826 0.55482805 0.33689362 ... -0.01104781 0.15375456 0.26251826] ... [ 0.26251826 0.14022645 0.29524234 ... 0.46107334 -0.12378085 0.26251826] [ 0.26251826 0.31551278 -0.29385218 ... 0.25233284 0.58704674 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 -0.42208478 0.36622334 ... -0.41788343 0.27622107 0.26251826] [ 0.26251826 -0.27610943 0.11927879 ... 0.03101966 0.46260592 0.26251826] ... [ 0.26251826 0.43776155 -0.25418147 ... 1.3052133 0.35119626 0.26251826] [ 0.26251826 0.70137596 0.7351286 ... -0.26658586 0.54531264 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]]]]]; ov_res: [[[[[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.6980921 -1.5039829 ... -1.5649773 -1.5362592 -1.6662811 ] [-1.6662811 -1.7792202 -1.6629664 ... -1.8580091 -1.5652851 -1.6662811 ] ... [-1.6662811 -1.6713872 -1.5048428 ... -1.7115164 -1.7528512 -1.6662811 ] [-1.6662811 -1.8178155 -1.8730173 ... -1.70438 -1.7261381 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.4177157 -1.7251049 ... -1.5800419 -1.6219466 -1.6662811 ] [-1.6662811 -1.6658517 -1.8717743 ... -1.7289423 -1.6886011 -1.6662811 ] ... [-1.6662811 -1.7935638 -1.595108 ... -1.986069 -1.7212054 -1.6662811 ] [-1.6662811 -1.7722201 -1.4202467 ... -1.581013 -1.7139498 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.4546138 -1.6964799 ... -1.9768724 -1.6395745 -1.6662811 ] [-1.6662811 -1.7539002 -1.6896259 ... -1.6203303 -1.9464605 -1.6662811 ] ... [-1.6662811 -1.4560896 -1.7521731 ... -1.6406528 -1.8358539 -1.6662811 ] [-1.6662811 -1.8067603 -1.6511141 ... -1.656973 -1.7659005 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] ... [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.5858452 -1.7668186 ... -1.6695843 -1.8633614 -1.6662811 ] [-1.6662811 -1.6326494 -1.733886 ... -1.7101979 -1.7784159 -1.6662811 ] ... [-1.6662811 -1.8887815 -1.6632613 ... -1.6593585 -1.6063613 -1.6662811 ] [-1.6662811 -1.4681976 -1.8572224 ... -1.8976583 -1.7033384 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.7039545 -1.794202 ... -1.7753885 -1.6134429 -1.6662811 ] [-1.6662811 -1.5407029 -1.9886124 ... -1.8470243 -1.6287534 -1.6662811 ] ... [-1.6662811 -1.7599385 -1.8348811 ... -1.3839666 -1.7017987 -1.6662811 ] [-1.6662811 -1.8846828 -1.9665059 ... -1.4685369 -1.691436 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]] [[-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ] [-1.6662811 -1.9250561 -1.8656641 ... -1.5657606 -1.275629 -1.6662811 ] [-1.6662811 -1.742924 -1.6082945 ... -1.7144841 -1.6993679 -1.6662811 ] ... [-1.6662811 -1.7733339 -1.8415577 ... -1.5756029 -1.637428 -1.6662811 ] [-1.6662811 -1.56747 -1.5923384 ... -1.8712568 -1.7336978 -1.6662811 ] [-1.6662811 -1.6662811 -1.6662811 ... -1.6662811 -1.6662811 -1.6662811 ]]] [[[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 0.81661326 0.572156 ... 1.0304112 0.44463727 0.88090193] [ 0.88090193 0.97230864 1.3322576 ... 0.35559687 0.6859089 0.88090193] ... [ 0.88090193 1.050052 0.9167581 ... 0.30932736 0.43275803 0.88090193] [ 0.88090193 0.9987312 0.89902776 ... 0.75536656 0.8328512 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 0.91330767 0.5008524 ... 0.8700781 0.50849766 0.88090193] [ 0.88090193 1.3448907 1.051336 ... 1.1344047 1.6225256 0.88090193] ... [ 0.88090193 1.3613089 0.8823448 ... 1.0759029 0.5149465 0.88090193] [ 0.88090193 0.8989275 0.5071056 ... 1.0235946 0.33939505 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.060937 1.0590717 ... 0.84180945 1.0556606 0.88090193] [ 0.88090193 0.26861158 0.9786256 ... 0.88624847 1.0384262 0.88090193] ... [ 0.88090193 1.233009 0.6754374 ... 0.51567346 0.34747967 0.88090193] [ 0.88090193 1.1394032 0.8879671 ... 1.761497 1.1448703 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] ... [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.6569003 0.8110268 ... 0.724162 1.2158096 0.88090193] [ 0.88090193 1.4812462 1.467815 ... 0.8009762 1.1678724 0.88090193] ... [ 0.88090193 1.0127871 0.39428398 ... 0.07133748 0.7918405 0.88090193] [ 0.88090193 0.8331674 0.9107715 ... 0.95327836 0.08134361 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.0847181 0.8715904 ... 0.62115896 1.1000602 0.88090193] [ 0.88090193 -0.0092307 1.0447198 ... 1.366096 0.49439365 0.88090193] ... [ 0.88090193 1.5386277 1.6153029 ... 1.4518701 0.37706837 0.88090193] [ 0.88090193 1.5833982 1.080812 ... 1.3620152 0.71455455 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]] [[ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193] [ 0.88090193 1.2802306 0.5463299 ... 1.8780892 0.97699815 0.88090193] [ 0.88090193 1.019481 1.5051186 ... 0.96767604 1.1560384 0.88090193] ... [ 0.88090193 1.075311 0.59388685 ... 1.4126412 0.47650108 0.88090193] [ 0.88090193 0.7774895 1.0571104 ... 0.85321796 0.7442906 0.88090193] [ 0.88090193 0.88090193 0.88090193 ... 0.88090193 0.88090193 0.88090193]]] [[[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.15018636 0.02781815 ... -0.34382808 0.3444871 0.26251826] [ 0.26251826 0.27108693 0.3308964 ... 0.13026404 1.418764 0.26251826] ... [ 0.26251826 0.49111938 0.3529365 ... 0.6396856 0.5395752 0.26251826] [ 0.26251826 0.06409214 0.16497505 ... 0.30559665 0.36743796 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.52309066 0.9653424 ... 0.5859315 0.56037545 0.26251826] [ 0.26251826 -0.01876534 -0.09599303 ... 0.06439918 0.44560987 0.26251826] ... [ 0.26251826 0.353537 0.7200913 ... 0.8618164 1.4952886 0.26251826] [ 0.26251826 0.08907501 0.18889965 ... -0.5734154 0.7532412 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.32148832 0.27211386 ... 0.06924552 -0.2234551 0.26251826] [ 0.26251826 0.753253 -0.22141571 ... 0.23271783 0.46990994 0.26251826] ... [ 0.26251826 0.39353603 -0.1546125 ... 0.8554974 0.19939728 0.26251826] [ 0.26251826 0.14743355 0.08564364 ... -0.35845113 0.42865077 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] ... [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.5067802 0.38439083 ... 0.03095147 0.32982618 0.26251826] [ 0.26251826 1.0190514 0.8414817 ... -0.05773287 0.72136635 0.26251826] ... [ 0.26251826 -0.2289669 0.90543145 ... 0.15425643 0.16368374 0.26251826] [ 0.26251826 0.80627745 0.36040783 ... 0.80867296 -0.62264633 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 0.6642548 -0.00789651 ... 0.2131768 0.6322675 0.26251826] [ 0.26251826 0.5548281 0.33689365 ... -0.0110478 0.15375456 0.26251826] ... [ 0.26251826 0.14022645 0.29524234 ... 0.46107334 -0.12378085 0.26251826] [ 0.26251826 0.31551278 -0.29385215 ... 0.25233284 0.5870467 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]] [[ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826] [ 0.26251826 -0.42208478 0.36622334 ... -0.41788343 0.27622107 0.26251826] [ 0.26251826 -0.2761094 0.11927878 ... 0.03101965 0.46260592 0.26251826] ... [ 0.26251826 0.43776155 -0.25418144 ... 1.3052133 0.35119626 0.26251826] [ 0.26251826 0.70137596 0.7351286 ... -0.26658586 0.5453126 0.26251826] [ 0.26251826 0.26251826 0.26251826 ... 0.26251826 0.26251826 0.26251826]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [2, 2, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_880.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2203 -0.9424 -0.3451 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 2.2144 (2,1,1,.,.) = -1.4406 (3,1,1,.,.) = 0.7826 [ CPUFloatType{3,1,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[[ 9.67532098e-01 -2.98662710e+00 9.49503124e-01 ... 2.85164070e+00 9.91184264e-02 1.00873411e+00] [-2.04011965e+00 8.46919596e-01 -7.26775587e-01 ... -2.07201290e+00 1.03589368e+00 -4.35209990e-01] [-5.57524157e+00 -1.05426192e+00 -1.90727782e+00 ... 3.60205865e+00 1.52762055e+00 -1.10175824e+00] ... [-2.94942904e+00 -2.31356695e-01 -2.29585791e+00 ... -7.55192423e+00 -4.53498960e-01 1.00542104e+00] [-2.25694466e+00 -3.15536880e+00 5.18996429e+00 ... -4.23374593e-01 2.38395882e+00 -9.97806013e-01] [-1.76585913e+00 -3.91727829e+00 -2.03600693e+00 ... -7.51254141e-01 9.78189528e-01 -9.71569061e-01]] [[ 2.65218449e+00 3.59615266e-01 -1.59829676e+00 ... -9.24102664e-01 -3.64971709e+00 -5.04112482e+00] [-1.81227803e-01 -2.79674172e+00 -1.80960310e+00 ... -1.15166390e+00 9.39948380e-01 2.56808400e+00] [ 1.72857702e+00 -1.71907294e+00 -3.66544068e-01 ... 6.89094114e+00 3.95248151e+00 -3.93541604e-01] ... [-4.99686432e+00 -2.88279533e+00 1.08368888e-01 ... -3.60844636e+00 4.86445546e-01 -2.86194181e+00] [ 1.97765028e+00 -2.99380112e+00 -1.88057077e+00 ... 8.29922974e-01 -1.21140254e+00 1.81729242e-01] [-3.87888265e+00 -1.53438032e-01 -1.07502866e+00 ... -8.43683541e-01 9.76770341e-01 2.65447712e+00]] [[ 6.76585913e-01 9.52353656e-01 -3.71069133e-01 ... 6.72328621e-02 2.08183980e+00 -6.87077999e-01] [-3.14504981e+00 -4.54078674e-01 -1.68662536e+00 ... -8.74882519e-01 2.29336429e+00 -2.47887945e+00] [ 3.67987752e-01 1.26579213e+00 -3.28106022e+00 ... -1.93942451e+00 3.12996197e+00 1.29123259e+00] ... [-8.70932519e-01 3.19161844e+00 5.95695317e-01 ... 3.23335975e-02 -3.49386692e-01 -3.54580021e+00] [ 7.36096203e-01 1.83574688e+00 -1.69577003e+00 ... -3.90143061e+00 1.96190760e-01 -5.09546548e-02] [ 1.01707137e+00 -1.89654863e+00 -2.76312619e-01 ... -2.04780984e+00 -2.31145883e+00 -9.08954144e-01]] ... [[-3.96861017e-01 -1.30849791e+00 -3.88447881e-01 ... -2.26963013e-01 6.74878418e-01 -1.19523704e+00] [-3.49529743e+00 2.29647064e+00 -1.18212700e-02 ... -5.06062865e-01 -3.75329703e-01 -3.49683356e+00] [-1.10180962e+00 -1.66901314e+00 -2.90051651e+00 ... 2.57309866e+00 -5.38252974e+00 -2.19432402e+00] ... [-2.54079318e+00 -1.84778166e+00 -1.93407869e+00 ... 6.14711404e-01 -1.68041098e+00 -2.04225326e+00] [-3.31019068e+00 3.44935000e-01 2.04664969e+00 ... 4.49617803e-01 -9.11541224e-01 -4.05217350e-01] [-3.19701314e-01 2.01751277e-01 -8.53624940e-03 ... -9.90115643e-01 -5.30649948e+00 -2.44869065e+00]] [[-4.53747797e+00 -2.29044676e-01 1.42950881e+00 ... 4.71540022e+00 -1.19094051e-01 -2.76269007e+00] [ 2.13938069e+00 5.34701824e-01 -1.95986664e+00 ... -1.87511218e+00 -1.01930058e+00 -7.80982375e-02] [ 2.03309536e+00 -1.72485948e+00 9.33903456e-03 ... -3.65564203e+00 -2.46168518e+00 -4.09429598e+00] ... [ 8.07297170e-01 1.78609526e+00 2.61787271e+00 ... 1.06263089e+00 2.68992972e+00 -2.40767908e+00] [ 1.50441074e+00 1.96822181e-01 2.46283627e+00 ... 1.85549676e+00 1.83860981e+00 -2.54003811e+00] [-1.47229284e-01 2.51232958e+00 -5.57148361e+00 ... -2.06202316e+00 2.06884217e+00 3.40668142e-01]] [[ 1.41677988e+00 -1.80100048e+00 1.89904940e+00 ... -1.43633509e+00 1.40938029e-01 3.63867378e+00] [-5.12587577e-02 4.51493859e-01 -3.28351998e+00 ... 6.79948092e-01 -2.76555562e+00 -3.72143388e+00] [-1.16781390e+00 -3.18469256e-02 1.00339878e+00 ... -4.26230431e-01 2.58511591e+00 -3.43589991e-01] ... [-9.45907831e-01 -1.49212885e+00 -7.91205764e-01 ... 6.32374644e-01 -1.33954644e+00 1.45351446e+00] [ 2.58177161e-01 -2.11981916e+00 -2.90957952e+00 ... 1.18174866e-01 -8.36430132e-01 3.27544975e+00] [-1.25411689e+00 2.51683068e+00 -4.98399198e-01 ... 6.35435939e-01 -2.26241279e+00 -1.50726688e+00]]] [[[ 1.29269290e+00 4.63169098e-01 1.72596693e+00 ... 1.16319823e+00 1.25322819e+00 2.29993248e+00] [ 3.87136221e-01 -3.81357312e+00 -3.09331989e+00 ... -1.46046829e+00 -2.51804256e+00 4.97763753e-02] [-8.50572705e-01 -5.19053578e-01 -2.36375332e+00 ... -2.12032962e+00 1.49186611e-01 -1.54984772e-01] ... [-6.79064274e-01 -1.88268065e+00 -2.65440524e-01 ... 7.32495427e-01 -4.58195686e+00 -4.23500419e-01] [ 1.71580148e+00 -3.28674459e+00 -3.65012884e+00 ... 2.67969728e-01 -2.22428226e+00 -1.83126390e-01] [-9.80424225e-01 -5.27872682e-01 1.13553739e+00 ... -3.46166420e+00 1.99305415e+00 -6.17231727e-02]] [[-3.05220962e+00 -1.07279658e+00 -1.63767588e+00 ... -2.06748724e+00 -3.71264100e-01 1.22403264e-01] [ 6.18453860e-01 -1.12842214e+00 2.71885633e-01 ... 2.00212097e+00 2.52207518e-01 -2.45877218e+00] [-1.77423429e+00 -2.23266101e+00 -3.40077353e+00 ... -3.35753822e+00 -1.08972836e+00 -1.93047833e+00] ... [-5.94729185e-02 3.44308972e-01 -1.16001368e+00 ... -1.72638047e+00 -8.17173719e-03 9.27066803e-02] [-2.08054399e+00 -1.55028749e+00 -1.31013846e+00 ... -8.32122087e-01 -9.95397508e-01 6.04388952e-01] [-2.14783478e+00 -1.41916668e+00 4.49615955e-01 ... 2.47383356e-01 -1.30569696e+00 -1.17832935e+00]] [[ 4.01452184e-02 -2.52397871e+00 -1.84991300e-01 ... -3.09495044e+00 -6.36820138e-01 1.13573432e-01] [-1.09375560e+00 -2.04278755e+00 -3.31790257e+00 ... -3.45789409e+00 -2.43131185e+00 -2.62888241e+00] [-1.55335927e+00 -1.69225240e+00 -4.03637469e-01 ... 6.61951542e-01 2.44481182e+00 -6.27731442e-01] ... [-1.86494160e+00 1.38591695e+00 1.37488484e-01 ... 1.05399132e+00 1.14817500e+00 -2.74423027e+00] [-1.92652774e+00 -1.22408795e+00 6.71438694e-01 ... -3.95936847e-01 3.44782829e-01 -1.46054149e+00] [-2.42462873e+00 -4.95225340e-01 -5.46269238e-01 ... -3.06545329e+00 2.41308808e-02 -1.89402390e+00]] ... [[-1.65439212e+00 -4.86127853e+00 -1.55425787e-01 ... 5.72113514e-01 8.04097176e-01 -1.50811779e+00] [-4.91512001e-01 2.90224075e-01 3.27070332e+00 ... -2.49267149e+00 -2.66257191e+00 -4.04341817e-01] [ 1.50783682e+00 -1.24216533e+00 -5.91862082e-01 ... -1.26659870e-01 -3.83653522e+00 -1.01368570e+00] ... [-2.21591616e+00 -9.33390141e-01 1.44246840e+00 ... -1.10387540e+00 7.67457485e-02 -5.40516376e-02] [-1.97081006e+00 -4.59484369e-01 -1.83060288e+00 ... 4.90401864e-01 -1.31269324e+00 -1.20520508e+00] [-4.37859714e-01 -1.80862427e-01 -1.50683880e+00 ... -2.01467133e+00 -1.12482786e+00 -9.52964664e-01]] [[-6.16277337e-01 -3.01043844e+00 2.84583116e+00 ... -8.25928092e-01 -1.72592115e+00 -8.29999030e-01] [-2.01886177e+00 -5.34975111e-01 2.43902206e-04 ... 6.39460325e-01 -1.72254431e+00 -3.00746322e+00] [-9.41385508e-01 -1.47701263e+00 -1.15196812e+00 ... -1.63929105e-01 -2.17381525e+00 -3.52052307e+00] ... [-4.51166248e+00 -2.21466684e+00 -1.04971480e+00 ... -2.19001293e-01 5.89897156e-01 -3.17021990e+00] [ 1.69858217e+00 -8.77675653e-01 -1.73148799e+00 ... -1.08042121e-01 2.54507184e-01 -1.11269212e+00] [-2.73774242e+00 -1.14965117e+00 8.74838591e-01 ... -7.02025533e-01 -5.28275132e-01 -1.09877050e+00]] [[-7.04008222e-01 -1.58724046e+00 -7.29481936e-01 ... -1.34079206e+00 -7.89240062e-01 -3.13646078e-01] [-1.59607279e+00 -1.75020480e+00 -2.68796921e-01 ... 4.68473673e-01 -1.52003968e+00 8.04626703e-01] [ 8.93759489e-01 -3.00213122e+00 -7.91854799e-01 ... 8.79917622e-01 -4.00057435e-01 -2.81163597e+00] ... [-2.60984373e+00 1.01293874e+00 2.52313495e-01 ... 6.27720952e-01 -2.45439625e+00 -9.51321125e-02] [ 2.02083588e-01 -3.80280674e-01 -1.53737640e+00 ... 1.87772465e+00 -2.47241545e+00 -1.56727588e+00] [-1.81988728e+00 -2.52942657e+00 2.45683217e+00 ... 6.45961404e-01 -1.71158230e+00 1.27783418e-01]]] [[[-6.80219889e-01 -1.39751446e+00 -1.09372997e+00 ... 7.30625153e-01 3.73024851e-01 -7.52113700e-01] [ 6.98371291e-01 -6.04701400e-01 -8.79749775e-01 ... -3.61597657e-01 -1.67836177e+00 -1.23202848e+00] [-6.46073878e-01 8.57872963e-02 -8.16658258e-01 ... 1.48822606e+00 2.22303897e-01 -7.19545484e-02] ... [ 6.85497999e-01 -1.35706556e+00 -7.43648410e-01 ... -6.12154186e-01 -1.21265650e-01 4.33875889e-01] [ 1.66615278e-01 -3.99916917e-01 -1.62911862e-01 ... -7.35420108e-01 -7.98455954e-01 -6.74987853e-01] [-1.04207385e+00 2.25974590e-01 -1.43216908e-01 ... 2.05548716e+00 -6.04961157e-01 -6.59133077e-01]] [[-1.01059413e+00 3.90656978e-01 -9.28555846e-01 ... 3.44982117e-01 -1.07487488e+00 2.45887011e-01] [-1.52350557e+00 2.03424484e-01 -1.10100889e+00 ... -3.84333789e-01 -5.99278390e-01 6.55687153e-02] [ 1.14880455e+00 -6.27578199e-02 -1.09391546e+00 ... -1.16130447e+00 1.88167483e-01 -1.29211557e+00] ... [-3.24568152e-03 4.16533351e-02 -8.60354781e-01 ... 1.86145574e-01 -9.17788029e-01 3.53048712e-01] [ 6.44965768e-01 7.51415491e-01 -1.90048635e-01 ... 3.84611458e-01 -2.30223089e-01 -2.38715708e-01] [-1.25759447e+00 -1.21898830e+00 9.72624421e-02 ... -1.25094402e+00 -1.19030878e-01 -1.22194946e-01]] [[ 1.33987522e+00 3.32484990e-01 -4.84569430e-01 ... 2.61587530e-01 -2.92658120e-01 -1.61362946e+00] [-1.64917529e+00 -9.73517656e-01 -1.80204010e+00 ... -1.30493551e-01 -1.77996147e+00 3.73160630e-01] [-5.07632494e-01 2.05142528e-01 1.60096884e-02 ... -1.19154561e+00 -4.14096087e-01 -8.89355898e-01] ... [ 1.35177553e-01 -1.47164315e-01 8.12204599e-01 ... -2.38175535e+00 -5.26616991e-01 -8.07637215e-01] [ 5.79002142e-01 -4.73436773e-01 -9.98483062e-01 ... -9.31345940e-01 -1.43405390e+00 -9.01558042e-01] [ 2.67344207e-01 -4.71672595e-01 4.93890077e-01 ... -9.46708441e-01 2.89001435e-01 6.00069642e-01]] ... [[-8.52331758e-01 -1.21206796e+00 -2.04174042e-01 ... -7.63193846e-01 5.62007785e-01 -4.47090089e-01] [ 1.99881405e-01 -6.10387683e-01 -3.54401886e-01 ... -1.29167616e-01 -1.03267586e+00 -1.93934649e-01] [-7.69353092e-01 -5.98813772e-01 1.72203183e-02 ... -7.77760446e-02 -1.88148662e-01 -1.23584878e+00] ... [-8.99783731e-01 -7.10776925e-01 -2.89053142e-01 ... 2.48227805e-01 6.41968250e-01 -1.64403990e-01] [-8.90588284e-01 -1.07344568e+00 -2.36441255e-01 ... -6.71507478e-01 1.19635952e+00 5.35314441e-01] [-1.12873435e+00 -1.09946406e+00 -7.50677168e-01 ... 6.26592755e-01 1.64858377e+00 1.21553123e-01]] [[ 8.59039545e-01 -7.12998986e-01 -6.00123286e-01 ... 9.02998090e-01 9.65920091e-01 7.91485190e-01] [-3.72357607e-01 -8.81560445e-01 5.45401096e-01 ... 2.34839171e-01 -1.03981113e+00 -1.41098845e+00] [ 6.39706492e-01 -5.68002999e-01 -7.57695913e-01 ... -7.05015421e-01 -3.50183874e-01 -1.29384446e+00] ... [-3.24055552e-03 -4.42930341e-01 -1.15343094e+00 ... -5.53146005e-02 -6.79668844e-01 1.01421386e-01] [ 2.09265977e-01 -9.79534030e-01 1.86818212e-01 ... -5.80157876e-01 -5.99942446e-01 1.02549040e+00] [ 2.27774531e-01 -8.94498229e-01 2.20424026e-01 ... 2.81460315e-01 -5.53078651e-01 -6.61071658e-01]] [[-2.12420374e-01 -2.94117928e-02 -8.67774844e-01 ... -8.10162961e-01 -1.99464664e-01 -5.20269990e-01] [ 5.49218535e-01 7.80711174e-02 -1.97165132e+00 ... -1.07538867e+00 1.79364115e-01 -6.10580921e-01] [-1.02900326e+00 -1.66404831e+00 4.90940064e-01 ... 4.04331595e-01 -6.96766615e-01 -9.02532697e-01] ... [ 3.36815268e-01 -1.78465113e-01 -6.06721520e-01 ... -2.01558858e-01 1.18399352e-01 -6.21393561e-01] [-3.21897119e-01 -2.80795276e-01 5.44374287e-02 ... -1.95740628e+00 -5.43202043e-01 -1.72195935e+00] [-4.84005421e-01 -7.67426372e-01 -1.27320349e+00 ... -4.70122516e-01 -6.14266157e-01 1.41114986e+00]]]]]; ov_res: [[[[[ 9.67532098e-01 -2.98662710e+00 9.49503124e-01 ... 2.85164070e+00 9.91184264e-02 1.00873411e+00] [-2.04011965e+00 8.46919596e-01 -7.26775587e-01 ... -2.07201290e+00 1.03589368e+00 -4.35209990e-01] [-5.57524157e+00 -1.05426192e+00 -1.90727782e+00 ... 3.60205865e+00 1.52762055e+00 -1.10175824e+00] ... [-2.94942904e+00 -2.31356695e-01 -2.29585791e+00 ... -7.55192423e+00 -4.53498960e-01 1.00542104e+00] [-2.25694466e+00 -3.15536880e+00 5.18996429e+00 ... -4.23374593e-01 2.38395882e+00 -9.97806013e-01] [-1.76585913e+00 -3.91727829e+00 -2.03600693e+00 ... -7.51254141e-01 9.78189528e-01 -9.71569061e-01]] [[ 2.65218449e+00 3.59615266e-01 -1.59829676e+00 ... -9.24102664e-01 -3.64971709e+00 -5.04112482e+00] [-1.81227803e-01 -2.79674172e+00 -1.80960310e+00 ... -1.15166390e+00 9.39948380e-01 2.56808400e+00] [ 1.72857702e+00 -1.71907294e+00 -3.66544068e-01 ... 6.89094114e+00 3.95248151e+00 -3.93541604e-01] ... [-4.99686432e+00 -2.88279533e+00 1.08368888e-01 ... -3.60844636e+00 4.86445546e-01 -2.86194181e+00] [ 1.97765028e+00 -2.99380112e+00 -1.88057077e+00 ... 8.29922974e-01 -1.21140254e+00 1.81729242e-01] [-3.87888265e+00 -1.53438032e-01 -1.07502866e+00 ... -8.43683541e-01 9.76770341e-01 2.65447712e+00]] [[ 6.76585913e-01 9.52353656e-01 -3.71069133e-01 ... 6.72328621e-02 2.08183980e+00 -6.87077999e-01] [-3.14504981e+00 -4.54078674e-01 -1.68662536e+00 ... -8.74882519e-01 2.29336429e+00 -2.47887945e+00] [ 3.67987752e-01 1.26579213e+00 -3.28106022e+00 ... -1.93942451e+00 3.12996197e+00 1.29123259e+00] ... [-8.70932519e-01 3.19161844e+00 5.95695317e-01 ... 3.23335975e-02 -3.49386692e-01 -3.54580021e+00] [ 7.36096203e-01 1.83574688e+00 -1.69577003e+00 ... -3.90143061e+00 1.96190760e-01 -5.09546548e-02] [ 1.01707137e+00 -1.89654863e+00 -2.76312619e-01 ... -2.04780984e+00 -2.31145883e+00 -9.08954144e-01]] ... [[-3.96861017e-01 -1.30849791e+00 -3.88447881e-01 ... -2.26963013e-01 6.74878418e-01 -1.19523704e+00] [-3.49529743e+00 2.29647064e+00 -1.18212700e-02 ... -5.06062865e-01 -3.75329703e-01 -3.49683356e+00] [-1.10180962e+00 -1.66901314e+00 -2.90051651e+00 ... 2.57309866e+00 -5.38252974e+00 -2.19432402e+00] ... [-2.54079318e+00 -1.84778166e+00 -1.93407869e+00 ... 6.14711404e-01 -1.68041098e+00 -2.04225326e+00] [-3.31019068e+00 3.44935000e-01 2.04664969e+00 ... 4.49617803e-01 -9.11541224e-01 -4.05217350e-01] [-3.19701314e-01 2.01751277e-01 -8.53624940e-03 ... -9.90115643e-01 -5.30649948e+00 -2.44869065e+00]] [[-4.53747797e+00 -2.29044676e-01 1.42950881e+00 ... 4.71540022e+00 -1.19094051e-01 -2.76269007e+00] [ 2.13938069e+00 5.34701824e-01 -1.95986664e+00 ... -1.87511218e+00 -1.01930058e+00 -7.80982375e-02] [ 2.03309536e+00 -1.72485948e+00 9.33903456e-03 ... -3.65564203e+00 -2.46168518e+00 -4.09429598e+00] ... [ 8.07297170e-01 1.78609526e+00 2.61787271e+00 ... 1.06263089e+00 2.68992972e+00 -2.40767908e+00] [ 1.50441074e+00 1.96822181e-01 2.46283627e+00 ... 1.85549676e+00 1.83860981e+00 -2.54003811e+00] [-1.47229284e-01 2.51232958e+00 -5.57148361e+00 ... -2.06202316e+00 2.06884217e+00 3.40668142e-01]] [[ 1.41677988e+00 -1.80100048e+00 1.89904940e+00 ... -1.43633509e+00 1.40938029e-01 3.63867378e+00] [-5.12587577e-02 4.51493859e-01 -3.28351998e+00 ... 6.79948092e-01 -2.76555562e+00 -3.72143388e+00] [-1.16781390e+00 -3.18469256e-02 1.00339878e+00 ... -4.26230431e-01 2.58511591e+00 -3.43589991e-01] ... [-9.45907831e-01 -1.49212885e+00 -7.91205764e-01 ... 6.32374644e-01 -1.33954644e+00 1.45351446e+00] [ 2.58177161e-01 -2.11981916e+00 -2.90957952e+00 ... 1.18174866e-01 -8.36430132e-01 3.27544975e+00] [-1.25411689e+00 2.51683068e+00 -4.98399198e-01 ... 6.35435939e-01 -2.26241279e+00 -1.50726688e+00]]] [[[ 1.29269290e+00 4.63169098e-01 1.72596693e+00 ... 1.16319823e+00 1.25322819e+00 2.29993248e+00] [ 3.87136221e-01 -3.81357312e+00 -3.09331989e+00 ... -1.46046829e+00 -2.51804256e+00 4.97763753e-02] [-8.50572705e-01 -5.19053578e-01 -2.36375332e+00 ... -2.12032962e+00 1.49186611e-01 -1.54984772e-01] ... [-6.79064274e-01 -1.88268065e+00 -2.65440524e-01 ... 7.32495427e-01 -4.58195686e+00 -4.23500419e-01] [ 1.71580148e+00 -3.28674459e+00 -3.65012884e+00 ... 2.67969728e-01 -2.22428226e+00 -1.83126390e-01] [-9.80424225e-01 -5.27872682e-01 1.13553739e+00 ... -3.46166420e+00 1.99305415e+00 -6.17231727e-02]] [[-3.05220962e+00 -1.07279658e+00 -1.63767588e+00 ... -2.06748724e+00 -3.71264100e-01 1.22403264e-01] [ 6.18453860e-01 -1.12842214e+00 2.71885633e-01 ... 2.00212097e+00 2.52207518e-01 -2.45877218e+00] [-1.77423429e+00 -2.23266101e+00 -3.40077353e+00 ... -3.35753822e+00 -1.08972836e+00 -1.93047833e+00] ... [-5.94729185e-02 3.44308972e-01 -1.16001368e+00 ... -1.72638047e+00 -8.17173719e-03 9.27066803e-02] [-2.08054399e+00 -1.55028749e+00 -1.31013846e+00 ... -8.32122087e-01 -9.95397508e-01 6.04388952e-01] [-2.14783478e+00 -1.41916668e+00 4.49615955e-01 ... 2.47383356e-01 -1.30569696e+00 -1.17832935e+00]] [[ 4.01452184e-02 -2.52397871e+00 -1.84991300e-01 ... -3.09495044e+00 -6.36820138e-01 1.13573432e-01] [-1.09375560e+00 -2.04278755e+00 -3.31790257e+00 ... -3.45789409e+00 -2.43131185e+00 -2.62888241e+00] [-1.55335927e+00 -1.69225240e+00 -4.03637469e-01 ... 6.61951542e-01 2.44481182e+00 -6.27731442e-01] ... [-1.86494160e+00 1.38591695e+00 1.37488484e-01 ... 1.05399132e+00 1.14817500e+00 -2.74423027e+00] [-1.92652774e+00 -1.22408795e+00 6.71438694e-01 ... -3.95936847e-01 3.44782829e-01 -1.46054149e+00] [-2.42462873e+00 -4.95225340e-01 -5.46269238e-01 ... -3.06545329e+00 2.41308808e-02 -1.89402390e+00]] ... [[-1.65439212e+00 -4.86127853e+00 -1.55425787e-01 ... 5.72113514e-01 8.04097176e-01 -1.50811779e+00] [-4.91512001e-01 2.90224075e-01 3.27070332e+00 ... -2.49267149e+00 -2.66257191e+00 -4.04341817e-01] [ 1.50783682e+00 -1.24216533e+00 -5.91862082e-01 ... -1.26659870e-01 -3.83653522e+00 -1.01368570e+00] ... [-2.21591616e+00 -9.33390141e-01 1.44246840e+00 ... -1.10387540e+00 7.67457485e-02 -5.40516376e-02] [-1.97081006e+00 -4.59484369e-01 -1.83060288e+00 ... 4.90401864e-01 -1.31269324e+00 -1.20520508e+00] [-4.37859714e-01 -1.80862427e-01 -1.50683880e+00 ... -2.01467133e+00 -1.12482786e+00 -9.52964664e-01]] [[-6.16277337e-01 -3.01043844e+00 2.84583116e+00 ... -8.25928092e-01 -1.72592115e+00 -8.29999030e-01] [-2.01886177e+00 -5.34975111e-01 2.43902206e-04 ... 6.39460325e-01 -1.72254431e+00 -3.00746322e+00] [-9.41385508e-01 -1.47701263e+00 -1.15196812e+00 ... -1.63929105e-01 -2.17381525e+00 -3.52052307e+00] ... [-4.51166248e+00 -2.21466684e+00 -1.04971480e+00 ... -2.19001293e-01 5.89897156e-01 -3.17021990e+00] [ 1.69858217e+00 -8.77675653e-01 -1.73148799e+00 ... -1.08042121e-01 2.54507184e-01 -1.11269212e+00] [-2.73774242e+00 -1.14965117e+00 8.74838591e-01 ... -7.02025533e-01 -5.28275132e-01 -1.09877050e+00]] [[-7.04008222e-01 -1.58724046e+00 -7.29481936e-01 ... -1.34079206e+00 -7.89240062e-01 -3.13646078e-01] [-1.59607279e+00 -1.75020480e+00 -2.68796921e-01 ... 4.68473673e-01 -1.52003968e+00 8.04626703e-01] [ 8.93759489e-01 -3.00213122e+00 -7.91854799e-01 ... 8.79917622e-01 -4.00057435e-01 -2.81163597e+00] ... [-2.60984373e+00 1.01293874e+00 2.52313495e-01 ... 6.27720952e-01 -2.45439625e+00 -9.51321125e-02] [ 2.02083588e-01 -3.80280674e-01 -1.53737640e+00 ... 1.87772465e+00 -2.47241545e+00 -1.56727588e+00] [-1.81988728e+00 -2.52942657e+00 2.45683217e+00 ... 6.45961404e-01 -1.71158230e+00 1.27783418e-01]]] [[[-6.80219889e-01 -1.39751446e+00 -1.09372997e+00 ... 7.30625153e-01 3.73024851e-01 -7.52113700e-01] [ 6.98371291e-01 -6.04701400e-01 -8.79749775e-01 ... -3.61597657e-01 -1.67836177e+00 -1.23202848e+00] [-6.46073878e-01 8.57872963e-02 -8.16658258e-01 ... 1.48822606e+00 2.22303897e-01 -7.19545484e-02] ... [ 6.85497999e-01 -1.35706556e+00 -7.43648410e-01 ... -6.12154186e-01 -1.21265650e-01 4.33875889e-01] [ 1.66615278e-01 -3.99916917e-01 -1.62911862e-01 ... -7.35420108e-01 -7.98455954e-01 -6.74987853e-01] [-1.04207385e+00 2.25974590e-01 -1.43216908e-01 ... 2.05548716e+00 -6.04961157e-01 -6.59133077e-01]] [[-1.01059413e+00 3.90656978e-01 -9.28555846e-01 ... 3.44982117e-01 -1.07487488e+00 2.45887011e-01] [-1.52350557e+00 2.03424484e-01 -1.10100889e+00 ... -3.84333789e-01 -5.99278390e-01 6.55687153e-02] [ 1.14880455e+00 -6.27578199e-02 -1.09391546e+00 ... -1.16130447e+00 1.88167483e-01 -1.29211557e+00] ... [-3.24568152e-03 4.16533351e-02 -8.60354781e-01 ... 1.86145574e-01 -9.17788029e-01 3.53048712e-01] [ 6.44965768e-01 7.51415491e-01 -1.90048635e-01 ... 3.84611458e-01 -2.30223089e-01 -2.38715708e-01] [-1.25759447e+00 -1.21898830e+00 9.72624421e-02 ... -1.25094402e+00 -1.19030878e-01 -1.22194946e-01]] [[ 1.33987522e+00 3.32484990e-01 -4.84569430e-01 ... 2.61587530e-01 -2.92658120e-01 -1.61362946e+00] [-1.64917529e+00 -9.73517656e-01 -1.80204010e+00 ... -1.30493551e-01 -1.77996147e+00 3.73160630e-01] [-5.07632494e-01 2.05142528e-01 1.60096884e-02 ... -1.19154561e+00 -4.14096087e-01 -8.89355898e-01] ... [ 1.35177553e-01 -1.47164315e-01 8.12204599e-01 ... -2.38175535e+00 -5.26616991e-01 -8.07637215e-01] [ 5.79002142e-01 -4.73436773e-01 -9.98483062e-01 ... -9.31345940e-01 -1.43405390e+00 -9.01558042e-01] [ 2.67344207e-01 -4.71672595e-01 4.93890077e-01 ... -9.46708441e-01 2.89001435e-01 6.00069642e-01]] ... [[-8.52331758e-01 -1.21206796e+00 -2.04174042e-01 ... -7.63193846e-01 5.62007785e-01 -4.47090089e-01] [ 1.99881405e-01 -6.10387683e-01 -3.54401886e-01 ... -1.29167616e-01 -1.03267586e+00 -1.93934649e-01] [-7.69353092e-01 -5.98813772e-01 1.72203183e-02 ... -7.77760446e-02 -1.88148662e-01 -1.23584878e+00] ... [-8.99783731e-01 -7.10776925e-01 -2.89053142e-01 ... 2.48227805e-01 6.41968250e-01 -1.64403990e-01] [-8.90588284e-01 -1.07344568e+00 -2.36441255e-01 ... -6.71507478e-01 1.19635952e+00 5.35314441e-01] [-1.12873435e+00 -1.09946406e+00 -7.50677168e-01 ... 6.26592755e-01 1.64858377e+00 1.21553123e-01]] [[ 8.59039545e-01 -7.12998986e-01 -6.00123286e-01 ... 9.02998090e-01 9.65920091e-01 7.91485190e-01] [-3.72357607e-01 -8.81560445e-01 5.45401096e-01 ... 2.34839171e-01 -1.03981113e+00 -1.41098845e+00] [ 6.39706492e-01 -5.68002999e-01 -7.57695913e-01 ... -7.05015421e-01 -3.50183874e-01 -1.29384446e+00] ... [-3.24055552e-03 -4.42930341e-01 -1.15343094e+00 ... -5.53146005e-02 -6.79668844e-01 1.01421386e-01] [ 2.09265977e-01 -9.79534030e-01 1.86818212e-01 ... -5.80157876e-01 -5.99942446e-01 1.02549040e+00] [ 2.27774531e-01 -8.94498229e-01 2.20424026e-01 ... 2.81460315e-01 -5.53078651e-01 -6.61071658e-01]] [[-2.12420374e-01 -2.94117928e-02 -8.67774844e-01 ... -8.10162961e-01 -1.99464664e-01 -5.20269990e-01] [ 5.49218535e-01 7.80711174e-02 -1.97165132e+00 ... -1.07538867e+00 1.79364115e-01 -6.10580921e-01] [-1.02900326e+00 -1.66404831e+00 4.90940064e-01 ... 4.04331595e-01 -6.96766615e-01 -9.02532697e-01] ... [ 3.36815268e-01 -1.78465113e-01 -6.06721520e-01 ... -2.01558858e-01 1.18399352e-01 -6.21393561e-01] [-3.21897119e-01 -2.80795276e-01 5.44374287e-02 ... -1.95740628e+00 -5.43202043e-01 -1.72195935e+00] [-4.84005421e-01 -7.67426372e-01 -1.27320349e+00 ... -4.70122516e-01 -6.14266157e-01 1.41114986e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [2, 2, 2], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_882.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.9059 -1.3402 0.5339 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * 7.5321 (2,1,1,.,.) = -3.2743 (3,1,1,.,.) = -1.0410 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 8.71334612e-01 9.31028962e-01 8.42233717e-01 ... 7.78648019e-01 8.75995874e-01 8.56521845e-01] [ 8.55640709e-01 7.74089217e-01 9.17263508e-01 ... 7.68396914e-01 8.50086033e-01 9.73872542e-01] [ 8.48882139e-01 1.07572770e+00 9.32159185e-01 ... 8.96382570e-01 9.25052762e-01 9.49416757e-01] ... [ 8.55168164e-01 9.53345716e-01 9.42327380e-01 ... 9.01177764e-01 9.32987392e-01 7.79494047e-01] [ 8.58430684e-01 8.55926037e-01 8.59944165e-01 ... 9.80642140e-01 8.33587646e-01 1.04402518e+00] [ 1.03599024e+00 9.33633685e-01 8.28535318e-01 ... 9.40716505e-01 9.59231317e-01 9.93817329e-01]] [[ 8.58633101e-01 9.57530916e-01 9.65800345e-01 ... 9.03395593e-01 8.96371603e-01 8.76138270e-01] [ 9.25243080e-01 7.74771214e-01 9.67315197e-01 ... 9.41284060e-01 8.71925056e-01 9.91729736e-01] [ 9.61118400e-01 7.58564830e-01 7.88696408e-01 ... 1.01194382e+00 8.29628944e-01 9.18444037e-01] ... [ 9.92483616e-01 8.83627951e-01 9.11927998e-01 ... 9.28810477e-01 9.82913256e-01 9.24229681e-01] [ 9.33217466e-01 8.76944423e-01 8.83273482e-01 ... 8.22605431e-01 7.69025207e-01 8.78269613e-01] [ 8.45048547e-01 9.38295424e-01 9.93185759e-01 ... 9.59266424e-01 8.98648441e-01 8.80281448e-01]] [[ 9.88409281e-01 8.30261528e-01 1.05106187e+00 ... 1.04756224e+00 8.63886833e-01 9.08720613e-01] [ 8.71000946e-01 9.38474536e-01 8.97306263e-01 ... 8.17797363e-01 9.23822343e-01 9.04016674e-01] [ 9.51382101e-01 1.02633619e+00 9.89798784e-01 ... 9.36388791e-01 9.03791964e-01 8.90939772e-01] ... [ 8.96262407e-01 8.67558599e-01 9.07933056e-01 ... 1.01910067e+00 8.71822476e-01 9.31002140e-01] [ 8.07308018e-01 1.04353988e+00 9.89668190e-01 ... 8.63199592e-01 9.60310161e-01 9.00899589e-01] [ 9.32357073e-01 9.47118282e-01 9.44639266e-01 ... 9.77054477e-01 9.28617120e-01 1.05021644e+00]] ... [[ 8.99957061e-01 8.21653128e-01 9.05838013e-01 ... 8.61024439e-01 7.98253417e-01 1.04904354e+00] [ 9.40586507e-01 1.00002551e+00 8.05439711e-01 ... 9.40915644e-01 8.94230723e-01 8.70119274e-01] [ 8.98842573e-01 1.02667499e+00 7.64312685e-01 ... 8.06104660e-01 8.39700460e-01 9.33607519e-01] ... [ 8.46475303e-01 7.73115754e-01 8.68094981e-01 ... 9.56444442e-01 1.00677288e+00 9.34078157e-01] [ 1.00052702e+00 8.63330126e-01 9.81262445e-01 ... 7.83282876e-01 9.76113856e-01 8.58021140e-01] [ 8.03837478e-01 9.07005966e-01 8.69628310e-01 ... 7.89660394e-01 9.79756773e-01 8.76724720e-01]] [[ 9.96446788e-01 7.36965954e-01 7.45493770e-01 ... 1.00632429e+00 1.02882397e+00 8.35688531e-01] [ 9.14612174e-01 8.24499130e-01 9.43233132e-01 ... 8.91958117e-01 8.74211669e-01 9.17216659e-01] [ 8.01791191e-01 9.31713283e-01 9.97861862e-01 ... 8.60653639e-01 9.82238233e-01 8.49341810e-01] ... [ 9.53142285e-01 9.35672283e-01 1.00793695e+00 ... 9.75745320e-01 9.97823834e-01 8.92114937e-01] [ 8.04065466e-01 9.66390491e-01 9.79028344e-01 ... 7.96541691e-01 1.13080287e+00 8.94266069e-01] [ 7.36577392e-01 8.38021398e-01 8.33142161e-01 ... 1.06163287e+00 8.09995055e-01 8.74682069e-01]] [[ 8.76132071e-01 9.89267707e-01 9.26118851e-01 ... 9.95107889e-01 8.79219413e-01 9.59842026e-01] [ 8.89704406e-01 8.68255794e-01 7.45194316e-01 ... 9.51763690e-01 9.63231444e-01 9.75810885e-01] [ 8.80225122e-01 9.95340526e-01 9.37299371e-01 ... 8.38631749e-01 9.70184147e-01 9.49622154e-01] ... [ 1.01853776e+00 8.49023461e-01 7.93348968e-01 ... 8.38332236e-01 8.00890505e-01 9.47646737e-01] [ 8.07370305e-01 9.26957548e-01 8.84039342e-01 ... 8.21347713e-01 9.71681178e-01 8.65528226e-01] [ 8.33012581e-01 9.19504344e-01 9.56277192e-01 ... 1.07936060e+00 8.00453484e-01 9.22515392e-01]]] [[[-4.19488549e-01 -3.25545073e+00 7.01840448e+00 ... 3.51715803e+00 3.18577766e+00 1.64866388e+00] [-1.11683941e+00 -7.18371570e-01 -5.44433212e+00 ... -5.18242407e+00 -5.56512690e+00 -7.71027625e-01] [ 6.32512331e+00 -3.97739697e+00 -4.36198282e+00 ... 4.63717937e-01 -3.34901190e+00 6.82347417e-01] ... [-1.74205208e+00 -9.01217461e-01 -3.25071049e+00 ... -9.65098095e+00 3.25264835e+00 -7.67714560e-01] [-3.21898508e+00 -8.31033587e-01 7.71993995e-01 ... -4.52138758e+00 2.69489956e+00 -6.23138428e-01] [-5.20293903e+00 -1.96105838e-01 2.06819892e-01 ... -3.70177221e+00 -5.48719454e+00 -3.17678022e+00]] [[-2.66370106e+00 -3.19181752e+00 -2.16983175e+00 ... -4.12553263e+00 6.76231742e-01 -1.66244161e+00] [-4.98913050e-01 -2.20790267e+00 -3.62952423e+00 ... -5.69974136e+00 -4.20471907e-01 -1.29549062e+00] [-1.75095034e+00 1.46815419e-01 -1.01880705e+00 ... -2.03637934e+00 -3.18827486e+00 -6.41656351e+00] ... [-2.83428812e+00 -3.89016151e+00 -6.54106379e-01 ... -4.12494469e+00 -7.37664104e-01 1.34337533e+00] [-4.80275631e+00 -2.60378885e+00 -1.64980364e+00 ... -4.76351833e+00 -1.73593080e+00 -1.40811574e+00] [-2.52260685e-01 -3.76915598e+00 -5.07244444e+00 ... 1.02434516e-01 1.81438720e+00 9.01840925e-02]] [[ 1.13882458e+00 1.06734550e+00 2.15474963e-01 ... 8.92363787e-02 -1.81473362e+00 -2.21915221e+00] [ 1.97776759e+00 3.00017405e+00 -1.89547133e+00 ... -7.05520213e-01 1.16918242e+00 -5.48044980e-01] [-1.15600471e+01 -4.68366337e+00 -5.06234407e+00 ... -5.35277414e+00 -1.21344256e+00 -5.11791754e+00] ... [-1.42887795e+00 -6.34412432e+00 6.56150341e-01 ... 7.47816920e-01 1.43786681e+00 5.61895847e-01] [-8.34662974e-01 -4.96053171e+00 -2.40812683e+00 ... -6.73600006e+00 -3.35065174e+00 -5.24471569e+00] [-3.46728086e-01 -4.18661499e+00 2.73821926e+00 ... -4.68508434e+00 -8.20897198e+00 3.09573317e+00]] ... [[ 2.19074869e+00 -5.54343939e+00 -3.09547663e+00 ... 1.32572925e+00 -7.72498608e+00 -6.66035771e-01] [ 1.24408543e+00 7.25595355e-01 -1.38224638e+00 ... -9.33125854e-01 -4.89041996e+00 2.25395393e+00] [ 2.00529242e+00 -1.77821410e+00 1.25342524e+00 ... 3.21078730e+00 -1.44819999e+00 1.22607529e+00] ... [ 1.65959680e+00 -2.83803272e+00 1.50224483e+00 ... -2.77946854e+00 -8.20391059e-01 -7.38099635e-01] [-2.61207509e+00 4.31154442e+00 -8.35252702e-01 ... -3.32980108e+00 2.12192249e+00 -3.06921434e+00] [-5.88907337e+00 -8.30505085e+00 -1.99411964e+00 ... -4.04748201e+00 5.47176600e+00 -1.89276183e+00]] [[-3.25433230e+00 -1.04977837e+01 -1.51393580e+00 ... -2.63447475e+00 -1.75761068e+00 -2.36233854e+00] [-7.25441992e-01 -4.76244879e+00 -4.71017981e+00 ... -2.14147711e+00 -2.40439558e+00 -7.61128247e-01] [ 1.05273044e+00 2.96681499e+00 -9.47772861e-01 ... -1.01309071e+01 -3.88793039e+00 1.98457086e+00] ... [ 2.64819956e+00 3.69905758e+00 -1.77555573e+00 ... -3.03251004e+00 -4.68197680e+00 2.39625549e+00] [-2.85006046e+00 -1.39661539e+00 2.34405613e+00 ... -3.02225542e+00 -1.54815102e+00 1.02175939e+00] [-3.85890913e+00 -3.47688389e+00 -9.29333091e-01 ... -7.10611629e+00 1.70882833e+00 2.05848575e-01]] [[ 3.33927345e+00 2.85601997e+00 -3.45360899e+00 ... -5.54876661e+00 -2.37645769e+00 1.06398332e+00] [-5.24080753e+00 5.37263584e+00 2.65949368e-01 ... 4.30252314e+00 1.67466390e+00 -1.99333429e+00] [-2.33746243e+00 -9.34478569e+00 -1.92254519e+00 ... -3.41030359e-02 -4.67777443e+00 -7.01319361e+00] ... [-7.51249850e-01 -2.35981560e+00 -2.02433825e+00 ... -3.27330732e+00 -5.46543026e+00 1.75095308e+00] [ 1.95735252e+00 5.02173805e+00 4.57162762e+00 ... 5.75396776e+00 4.69678926e+00 2.01951122e+00] [-4.08445168e+00 1.99485219e+00 -2.13790751e+00 ... -2.48456144e+00 4.12572813e+00 1.98437250e+00]]] [[[-4.61742580e-01 -7.69338012e-02 2.17528582e+00 ... 6.15853429e-01 1.84927881e-01 1.86393094e+00] [ 5.40592492e-01 -8.35750163e-01 -5.58404148e-01 ... -5.18166721e-01 4.41380471e-01 1.40640426e+00] [-4.25901949e-01 -8.11040342e-01 6.38338685e-01 ... 1.92138124e+00 1.16326642e+00 2.70207930e+00] ... [ 1.31571162e+00 -4.11614001e-01 -4.95436490e-01 ... 9.51052129e-01 -8.13388765e-01 6.16567433e-01] [ 1.83558464e-03 -4.09714639e-01 1.09671092e+00 ... -1.69114470e-02 1.05767751e+00 -8.95912588e-01] [ 3.73512387e-01 1.44736314e+00 1.19463110e+00 ... 2.95498371e-01 1.67145729e-02 1.02472687e+00]] [[ 4.83215898e-01 4.27224308e-01 1.21048880e+00 ... 1.89516377e+00 3.48261416e-01 1.05895507e+00] [ 4.98029411e-01 9.17928696e-01 1.36770380e+00 ... 7.13122129e-01 2.06305242e+00 -2.13554668e+00] [-2.43159056e-01 1.66366786e-01 7.31489599e-01 ... -7.30229557e-01 -1.91160512e+00 3.24891925e-01] ... [ 4.23990101e-01 1.83030820e+00 3.85624826e-01 ... 3.74591649e-01 7.50600755e-01 -1.50694585e+00] [ 1.59332538e+00 -9.23592389e-01 1.09454441e+00 ... -8.21476281e-01 -1.83956027e-02 -5.26630461e-01] [ 1.99575877e+00 1.32873535e+00 1.80471802e+00 ... 1.91563439e+00 2.83342719e-01 4.96996313e-01]] [[ 1.67429328e+00 1.62461448e+00 6.98935270e-01 ... 7.65496492e-02 7.17494249e-01 2.05255121e-01] [-5.61325252e-01 6.57831728e-01 1.45864463e+00 ... -5.11584461e-01 1.40470433e+00 -1.15785241e-01] [ 4.74772662e-01 -6.18655384e-01 3.79188746e-01 ... 5.57423234e-01 5.33657908e-01 3.71947944e-01] ... [ 2.08623314e+00 1.53988576e+00 -2.73269415e-02 ... -6.01376832e-01 1.91018701e-01 -8.45934749e-02] [ 2.77534008e+00 -5.09489715e-01 -1.67956233e+00 ... 1.34887671e+00 1.15160167e-01 6.32394075e-01] [ 1.70012045e+00 1.53199005e+00 1.50221539e+00 ... -1.43820453e+00 1.89125121e-01 1.50686115e-01]] ... [[-1.49208283e+00 -9.73181903e-01 5.32346785e-01 ... 7.83709645e-01 -3.81921709e-01 1.92903590e+00] [ 1.17269099e+00 2.02064681e+00 3.86083126e-02 ... -8.03692937e-02 1.15589809e+00 8.35992575e-01] [-1.07197881e-01 3.55354607e-01 -1.30467939e+00 ... -9.14039433e-01 2.27918983e+00 1.43061686e+00] ... [-9.52215254e-01 -7.44769990e-01 1.28455806e+00 ... 1.20897746e+00 -9.87300873e-02 -4.58718657e-01] [ 6.53889835e-01 5.22641003e-01 -3.60812664e-01 ... -2.33378649e-01 -3.20201576e-01 1.11780429e+00] [ 8.06231320e-01 1.52619958e+00 1.40765524e+00 ... -6.61062896e-01 9.25431132e-01 4.19605792e-01]] [[ 1.43252277e+00 -2.00051761e+00 2.97465026e-01 ... -2.59998620e-01 1.97735190e+00 1.59427166e+00] [ 1.41327178e+00 -1.04552603e+00 1.80133367e+00 ... 2.02998161e-01 -1.03986526e+00 2.23990917e+00] [ 1.95566058e+00 -1.01304603e+00 1.60495591e+00 ... 5.87346733e-01 1.01969016e+00 1.62212157e+00] ... [ 1.00675499e+00 -1.01297545e+00 -2.67601609e-02 ... 5.40694654e-01 4.19784278e-01 6.52343035e-01] [-7.57695615e-01 1.26748109e+00 1.15779877e+00 ... 1.21547997e+00 1.21602499e+00 2.31186199e+00] [-3.09428990e-01 8.58391464e-01 1.26414537e+00 ... -3.68717790e-01 8.08341980e-01 5.85606694e-01]] [[ 1.45311832e+00 4.57353294e-01 5.99984705e-01 ... 1.48328805e+00 -1.02281880e+00 1.42800188e+00] [-1.32855940e+00 3.74952137e-01 1.37707865e+00 ... 1.54076886e+00 -1.11564088e+00 1.85182405e+00] [ 5.92318475e-01 1.73328817e-01 2.53137374e+00 ... 6.66097403e-01 4.92281616e-02 -7.17528760e-01] ... [-1.36256886e+00 1.83500171e-01 -6.88027143e-02 ... 2.83919621e+00 9.79855597e-01 5.89180470e-01] [ 2.25561881e+00 3.96149844e-01 2.25201875e-01 ... 8.38372409e-01 1.35764122e-01 8.05904865e-01] [-2.82750654e+00 -1.65773213e-01 9.23438072e-01 ... 5.90666831e-02 2.73360038e+00 5.13498247e-01]]]]]; ov_res: [[[[[ 8.7133461e-01 9.3102896e-01 8.4223372e-01 ... 7.7864802e-01 8.7599587e-01 8.5652184e-01] [ 8.5564071e-01 7.7408916e-01 9.1726351e-01 ... 7.6839691e-01 8.5008603e-01 9.7387254e-01] [ 8.4888214e-01 1.0757277e+00 9.3215919e-01 ... 8.9638257e-01 9.2505276e-01 9.4941676e-01] ... [ 8.5516816e-01 9.5334572e-01 9.4232738e-01 ... 9.0117776e-01 9.3298739e-01 7.7949405e-01] [ 8.5843068e-01 8.5592604e-01 8.5994416e-01 ... 9.8064214e-01 8.3358765e-01 1.0440251e+00] [ 1.0359902e+00 9.3363369e-01 8.2853532e-01 ... 9.4071651e-01 9.5923132e-01 9.9381733e-01]] [[ 8.5863310e-01 9.5753092e-01 9.6580034e-01 ... 9.0339559e-01 8.9637160e-01 8.7613827e-01] [ 9.2524308e-01 7.7477121e-01 9.6731514e-01 ... 9.4128406e-01 8.7192506e-01 9.9172980e-01] [ 9.6111840e-01 7.5856483e-01 7.8869641e-01 ... 1.0119438e+00 8.2962894e-01 9.1844404e-01] ... [ 9.9248362e-01 8.8362795e-01 9.1192800e-01 ... 9.2881048e-01 9.8291326e-01 9.2422968e-01] [ 9.3321747e-01 8.7694442e-01 8.8327348e-01 ... 8.2260543e-01 7.6902521e-01 8.7826961e-01] [ 8.4504855e-01 9.3829542e-01 9.9318576e-01 ... 9.5926636e-01 8.9864844e-01 8.8028145e-01]] [[ 9.8840928e-01 8.3026153e-01 1.0510619e+00 ... 1.0475622e+00 8.6388683e-01 9.0872061e-01] [ 8.7100095e-01 9.3847454e-01 8.9730626e-01 ... 8.1779736e-01 9.2382234e-01 9.0401667e-01] [ 9.5138210e-01 1.0263362e+00 9.8979878e-01 ... 9.3638879e-01 9.0379196e-01 8.9093977e-01] ... [ 8.9626241e-01 8.6755860e-01 9.0793306e-01 ... 1.0191007e+00 8.7182248e-01 9.3100214e-01] [ 8.0730802e-01 1.0435399e+00 9.8966819e-01 ... 8.6319959e-01 9.6031016e-01 9.0089959e-01] [ 9.3235707e-01 9.4711828e-01 9.4463927e-01 ... 9.7705448e-01 9.2861712e-01 1.0502164e+00]] ... [[ 8.9995706e-01 8.2165313e-01 9.0583801e-01 ... 8.6102444e-01 7.9825348e-01 1.0490435e+00] [ 9.4058651e-01 1.0000255e+00 8.0543965e-01 ... 9.4091564e-01 8.9423072e-01 8.7011927e-01] [ 8.9884257e-01 1.0266750e+00 7.6431268e-01 ... 8.0610466e-01 8.3970046e-01 9.3360752e-01] ... [ 8.4647530e-01 7.7311569e-01 8.6809498e-01 ... 9.5644444e-01 1.0067729e+00 9.3407816e-01] [ 1.0005270e+00 8.6333013e-01 9.8126245e-01 ... 7.8328288e-01 9.7611386e-01 8.5802114e-01] [ 8.0383748e-01 9.0700597e-01 8.6962831e-01 ... 7.8966039e-01 9.7975677e-01 8.7672472e-01]] [[ 9.9644679e-01 7.3696595e-01 7.4549383e-01 ... 1.0063243e+00 1.0288240e+00 8.3568853e-01] [ 9.1461217e-01 8.2449913e-01 9.4323313e-01 ... 8.9195812e-01 8.7421167e-01 9.1721666e-01] [ 8.0179119e-01 9.3171328e-01 9.9786186e-01 ... 8.6065364e-01 9.8223823e-01 8.4934181e-01] ... [ 9.5314229e-01 9.3567228e-01 1.0079370e+00 ... 9.7574532e-01 9.9782383e-01 8.9211494e-01] [ 8.0406547e-01 9.6639049e-01 9.7902834e-01 ... 7.9654169e-01 1.1308029e+00 8.9426607e-01] [ 7.3657739e-01 8.3802140e-01 8.3314216e-01 ... 1.0616329e+00 8.0999506e-01 8.7468207e-01]] [[ 8.7613207e-01 9.8926771e-01 9.2611885e-01 ... 9.9510789e-01 8.7921941e-01 9.5984203e-01] [ 8.8970441e-01 8.6825579e-01 7.4519426e-01 ... 9.5176369e-01 9.6323144e-01 9.7581089e-01] [ 8.8022512e-01 9.9534053e-01 9.3729937e-01 ... 8.3863175e-01 9.7018415e-01 9.4962215e-01] ... [ 1.0185378e+00 8.4902346e-01 7.9334897e-01 ... 8.3833224e-01 8.0089051e-01 9.4764674e-01] [ 8.0737036e-01 9.2695755e-01 8.8403934e-01 ... 8.2134771e-01 9.7168118e-01 8.6552823e-01] [ 8.3301258e-01 9.1950434e-01 9.5627719e-01 ... 1.0793606e+00 8.0045348e-01 9.2251539e-01]]] [[[-4.1948855e-01 -3.2554505e+00 7.0184045e+00 ... 3.5171580e+00 3.1857774e+00 1.6486639e+00] [-1.1168394e+00 -7.1837157e-01 -5.4443321e+00 ... -5.1824241e+00 -5.5651274e+00 -7.7102762e-01] [ 6.3251233e+00 -3.9773970e+00 -4.3619828e+00 ... 4.6371788e-01 -3.3490121e+00 6.8234742e-01] ... [-1.7420521e+00 -9.0121746e-01 -3.2507105e+00 ... -9.6509809e+00 3.2526484e+00 -7.6771456e-01] [-3.2189851e+00 -8.3103359e-01 7.7199388e-01 ... -4.5213876e+00 2.6948993e+00 -6.2313843e-01] [-5.2029390e+00 -1.9610588e-01 2.0681995e-01 ... -3.7017722e+00 -5.4871945e+00 -3.1767802e+00]] [[-2.6637013e+00 -3.1918175e+00 -2.1698318e+00 ... -4.1255326e+00 6.7623168e-01 -1.6624416e+00] [-4.9891305e-01 -2.2079027e+00 -3.6295242e+00 ... -5.6997414e+00 -4.2047191e-01 -1.2954906e+00] [-1.7509503e+00 1.4681548e-01 -1.0188071e+00 ... -2.0363793e+00 -3.1882749e+00 -6.4165635e+00] ... [-2.8342881e+00 -3.8901615e+00 -6.5410638e-01 ... -4.1249447e+00 -7.3766410e-01 1.3433752e+00] [-4.8027563e+00 -2.6037889e+00 -1.6498036e+00 ... -4.7635183e+00 -1.7359308e+00 -1.4081157e+00] [-2.5226071e-01 -3.7691562e+00 -5.0724444e+00 ... 1.0243448e-01 1.8143872e+00 9.0184048e-02]] [[ 1.1388246e+00 1.0673454e+00 2.1547495e-01 ... 8.9236327e-02 -1.8147336e+00 -2.2191522e+00] [ 1.9777677e+00 3.0001738e+00 -1.8954713e+00 ... -7.0552021e-01 1.1691825e+00 -5.4804498e-01] [-1.1560047e+01 -4.6836634e+00 -5.0623441e+00 ... -5.3527741e+00 -1.2134426e+00 -5.1179175e+00] ... [-1.4288779e+00 -6.3441243e+00 6.5615034e-01 ... 7.4781698e-01 1.4378669e+00 5.6189585e-01] [-8.3466297e-01 -4.9605317e+00 -2.4081271e+00 ... -6.7360001e+00 -3.3506517e+00 -5.2447157e+00] [-3.4672809e-01 -4.1866150e+00 2.7382190e+00 ... -4.6850843e+00 -8.2089720e+00 3.0957332e+00]] ... [[ 2.1907489e+00 -5.5434394e+00 -3.0954764e+00 ... 1.3257291e+00 -7.7249866e+00 -6.6603577e-01] [ 1.2440854e+00 7.2559547e-01 -1.3822464e+00 ... -9.3312585e-01 -4.8904200e+00 2.2539539e+00] [ 2.0052922e+00 -1.7782141e+00 1.2534252e+00 ... 3.2107873e+00 -1.4482000e+00 1.2260752e+00] ... [ 1.6595968e+00 -2.8380327e+00 1.5022449e+00 ... -2.7794688e+00 -8.2039106e-01 -7.3809963e-01] [-2.6120751e+00 4.3115444e+00 -8.3525270e-01 ... -3.3298011e+00 2.1219227e+00 -3.0692143e+00] [-5.8890734e+00 -8.3050508e+00 -1.9941196e+00 ... -4.0474820e+00 5.4717655e+00 -1.8927618e+00]] [[-3.2543323e+00 -1.0497784e+01 -1.5139357e+00 ... -2.6344748e+00 -1.7576107e+00 -2.3623385e+00] [-7.2544199e-01 -4.7624488e+00 -4.7101798e+00 ... -2.1414771e+00 -2.4043956e+00 -7.6112825e-01] [ 1.0527304e+00 2.9668150e+00 -9.4777280e-01 ... -1.0130907e+01 -3.8879306e+00 1.9845707e+00] ... [ 2.6481998e+00 3.6990576e+00 -1.7755557e+00 ... -3.0325100e+00 -4.6819768e+00 2.3962553e+00] [-2.8500605e+00 -1.3966154e+00 2.3440559e+00 ... -3.0222554e+00 -1.5481510e+00 1.0217593e+00] [-3.8589091e+00 -3.4768836e+00 -9.2933303e-01 ... -7.1061168e+00 1.7088282e+00 2.0584860e-01]] [[ 3.3392732e+00 2.8560197e+00 -3.4536088e+00 ... -5.5487671e+00 -2.3764577e+00 1.0639833e+00] [-5.2408075e+00 5.3726358e+00 2.6594940e-01 ... 4.3025231e+00 1.6746640e+00 -1.9933343e+00] [-2.3374624e+00 -9.3447857e+00 -1.9225452e+00 ... -3.4103017e-02 -4.6777744e+00 -7.0131936e+00] ... [-7.5124985e-01 -2.3598158e+00 -2.0243382e+00 ... -3.2733073e+00 -5.4654303e+00 1.7509531e+00] [ 1.9573526e+00 5.0217376e+00 4.5716276e+00 ... 5.7539678e+00 4.6967893e+00 2.0195112e+00] [-4.0844517e+00 1.9948521e+00 -2.1379075e+00 ... -2.4845614e+00 4.1257281e+00 1.9843725e+00]]] [[[-4.6174261e-01 -7.6933779e-02 2.1752858e+00 ... 6.1585343e-01 1.8492788e-01 1.8639308e+00] [ 5.4059249e-01 -8.3575022e-01 -5.5840415e-01 ... -5.1816666e-01 4.4138047e-01 1.4064043e+00] [-4.2590198e-01 -8.1104034e-01 6.3833869e-01 ... 1.9213811e+00 1.1632664e+00 2.7020793e+00] ... [ 1.3157116e+00 -4.1161403e-01 -4.9543643e-01 ... 9.5105213e-01 -8.1338876e-01 6.1656743e-01] [ 1.8356054e-03 -4.0971464e-01 1.0967109e+00 ... -1.6911419e-02 1.0576775e+00 -8.9591259e-01] [ 3.7351239e-01 1.4473631e+00 1.1946312e+00 ... 2.9549834e-01 1.6714564e-02 1.0247269e+00]] [[ 4.8321590e-01 4.2722431e-01 1.2104888e+00 ... 1.8951638e+00 3.4826145e-01 1.0589551e+00] [ 4.9802941e-01 9.1792864e-01 1.3677038e+00 ... 7.1312213e-01 2.0630524e+00 -2.1355469e+00] [-2.4315906e-01 1.6636679e-01 7.3148960e-01 ... -7.3022962e-01 -1.9116051e+00 3.2489192e-01] ... [ 4.2399010e-01 1.8303082e+00 3.8562483e-01 ... 3.7459165e-01 7.5060076e-01 -1.5069458e+00] [ 1.5933254e+00 -9.2359239e-01 1.0945444e+00 ... -8.2147628e-01 -1.8395625e-02 -5.2663046e-01] [ 1.9957588e+00 1.3287354e+00 1.8047180e+00 ... 1.9156344e+00 2.8334272e-01 4.9699631e-01]] [[ 1.6742933e+00 1.6246146e+00 6.9893527e-01 ... 7.6549649e-02 7.1749425e-01 2.0525514e-01] [-5.6132525e-01 6.5783173e-01 1.4586446e+00 ... -5.1158452e-01 1.4047043e+00 -1.1578526e-01] [ 4.7477266e-01 -6.1865538e-01 3.7918875e-01 ... 5.5742323e-01 5.3365791e-01 3.7194797e-01] ... [ 2.0862331e+00 1.5398856e+00 -2.7326928e-02 ... -6.0137683e-01 1.9101872e-01 -8.4593460e-02] [ 2.7753401e+00 -5.0948977e-01 -1.6795623e+00 ... 1.3488767e+00 1.1516016e-01 6.3239408e-01] [ 1.7001206e+00 1.5319902e+00 1.5022154e+00 ... -1.4382045e+00 1.8912512e-01 1.5068613e-01]] ... [[-1.4920830e+00 -9.7318184e-01 5.3234679e-01 ... 7.8370959e-01 -3.8192171e-01 1.9290358e+00] [ 1.1726910e+00 2.0206468e+00 3.8608305e-02 ... -8.0369316e-02 1.1558981e+00 8.3599257e-01] [-1.0719791e-01 3.5535461e-01 -1.3046794e+00 ... -9.1403943e-01 2.2791898e+00 1.4306169e+00] ... [-9.5221531e-01 -7.4476999e-01 1.2845581e+00 ... 1.2089773e+00 -9.8730072e-02 -4.5871866e-01] [ 6.5388983e-01 5.2264100e-01 -3.6081269e-01 ... -2.3337865e-01 -3.2020155e-01 1.1178043e+00] [ 8.0623132e-01 1.5261996e+00 1.4076551e+00 ... -6.6106290e-01 9.2543113e-01 4.1960579e-01]] [[ 1.4325227e+00 -2.0005176e+00 2.9746503e-01 ... -2.5999862e-01 1.9773518e+00 1.5942715e+00] [ 1.4132718e+00 -1.0455260e+00 1.8013335e+00 ... 2.0299816e-01 -1.0398654e+00 2.2399092e+00] [ 1.9556606e+00 -1.0130461e+00 1.6049559e+00 ... 5.8734673e-01 1.0196902e+00 1.6221215e+00] ... [ 1.0067550e+00 -1.0129756e+00 -2.6760152e-02 ... 5.4069465e-01 4.1978428e-01 6.5234309e-01] [-7.5769567e-01 1.2674811e+00 1.1577989e+00 ... 1.2154800e+00 1.2160250e+00 2.3118620e+00] [-3.0942899e-01 8.5839146e-01 1.2641454e+00 ... -3.6871779e-01 8.0834198e-01 5.8560669e-01]] [[ 1.4531183e+00 4.5735329e-01 5.9998471e-01 ... 1.4832880e+00 -1.0228188e+00 1.4280018e+00] [-1.3285595e+00 3.7495211e-01 1.3770787e+00 ... 1.5407690e+00 -1.1156408e+00 1.8518242e+00] [ 5.9231848e-01 1.7332883e-01 2.5313737e+00 ... 6.6609734e-01 4.9228150e-02 -7.1752882e-01] ... [-1.3625689e+00 1.8350017e-01 -6.8802692e-02 ... 2.8391962e+00 9.7985560e-01 5.8918047e-01] [ 2.2556188e+00 3.9614984e-01 2.2520189e-01 ... 8.3837241e-01 1.3576414e-01 8.0590481e-01] [-2.8275065e+00 -1.6577323e-01 9.2343807e-01 ... 5.9066676e-02 2.7336004e+00 5.1349825e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_884.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.112401}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.9935 (2,1,1,.,.) = 0.1305 (3,1,1,.,.) = -0.1556 [ CPUFloatType{3,1,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] [[ 1.0182133 0.01897627 -0.83214796 ... -1.4032731 1.2493883 0.753247 ] [ 0.98699164 1.0281332 0.09046625 ... 0.8566576 0.52976096 -1.3156073 ] [-0.05765829 1.5584571 0.5136752 ... 1.3837546 -1.1844116 0.84687865] ... [ 0.21076211 -1.2963579 -0.12202031 ... -0.80570084 -1.1043491 0.33876246] [-1.4399632 0.8962007 0.28741473 ... 0.07809758 -1.1474212 1.3756613 ] [ 0.26746818 0.36454642 -0.80464214 ... 1.1647084 -0.48306683 1.6230606 ]] [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] ... [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] [[ 0.5089146 0.09506617 -0.08277486 ... -0.48623922 0.08306488 1.30668 ] [-0.51388973 -1.7929986 -0.60839176 ... 0.6713146 0.9201087 -0.35779428] [-0.40816882 -0.16499984 -1.9247984 ... 0.9928586 1.4293566 1.4698032 ] ... [ 0.17690155 0.7146134 0.4135535 ... 2.1134508 -0.6588344 1.9849979 ] [-0.06558206 0.9261155 0.681739 ... -0.44929102 -1.1893731 -1.7981528 ] [ 1.2502799 0.34785542 0.7840805 ... 0.20004472 1.6472653 1.0769659 ]] [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]]]]]; ov_res: [[[[[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] [[ 1.0182133 0.01897627 -0.83214796 ... -1.4032731 1.2493883 0.753247 ] [ 0.98699164 1.0281332 0.09046625 ... 0.8566576 0.52976096 -1.3156073 ] [-0.05765829 1.5584571 0.5136752 ... 1.3837546 -1.1844116 0.84687865] ... [ 0.21076211 -1.2963579 -0.12202031 ... -0.80570084 -1.1043491 0.33876246] [-1.4399632 0.8962007 0.28741473 ... 0.07809758 -1.1474212 1.3756613 ] [ 0.26746818 0.36454642 -0.80464214 ... 1.1647084 -0.48306683 1.6230606 ]] [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] ... [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]] [[ 0.5089146 0.09506617 -0.08277486 ... -0.48623922 0.08306488 1.30668 ] [-0.51388973 -1.7929986 -0.60839176 ... 0.6713146 0.9201087 -0.35779428] [-0.40816882 -0.16499984 -1.9247984 ... 0.9928586 1.4293566 1.4698032 ] ... [ 0.17690155 0.7146134 0.4135535 ... 2.1134508 -0.6588344 1.9849979 ] [-0.06558206 0.9261155 0.681739 ... -0.44929102 -1.1893731 -1.7981528 ] [ 1.2502799 0.34785542 0.7840805 ... 0.20004472 1.6472653 1.0769659 ]] [[ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] ... [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093] [ 0.11240093 0.11240093 0.11240093 ... 0.11240093 0.11240093 0.11240093]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_886.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7874 0.2323 -0.1284 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.8111 (2,1,1,.,.) = 1.2135 (3,1,1,.,.) = 0.2122 (1,2,1,.,.) = -0.3043 (2,2,1,.,.) = -1.0693 (3,2,1,.,.) = 0.3400 (1,3,1,.,.) = 0.4232 (2,3,1,.,.) = 1.2535 (3,3,1,.,.) = -0.4226 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 1.65135241e+00 1.29133821e+00 1.72502661e+00 ... -3.33117056e+00 -7.43942082e-01 3.39849651e-01] [-1.69890165e+00 -2.55657434e-02 -1.23231959e+00 ... -1.78878856e+00 -5.16136789e+00 -1.22409463e+00] [-1.09543228e+00 -2.44258702e-01 1.30545378e+00 ... -3.29470682e+00 -5.84511995e-01 -2.70009613e+00] ... [-9.75402653e-01 -3.18053532e+00 -8.73112619e-01 ... -2.03562784e+00 -1.79195309e+00 2.85183930e+00] [-2.58082366e+00 -1.67886961e+00 -3.15414762e+00 ... -2.46958065e+00 -2.43127108e+00 2.56707692e+00] [-2.48486578e-01 2.33343339e+00 1.32699943e+00 ... -3.18962002e+00 -4.97434527e-01 -1.43027472e+00]] [[-2.97742128e+00 -3.38510132e+00 -4.97972727e-01 ... -1.91236758e+00 1.31322622e-01 -2.26066518e+00] [-4.38951397e+00 1.71895099e+00 -4.56913710e-02 ... -1.43330646e+00 2.84107828e+00 3.68980825e-01] [-3.80763292e+00 -9.41293240e-01 5.21151447e+00 ... -1.11791730e-01 -1.19233656e+00 1.34464002e+00] ... [ 1.23439360e+00 1.69119358e+00 -2.83263731e+00 ... -4.00235558e+00 1.44421399e-01 1.03914082e-01] [-3.36335254e+00 -9.12878275e-01 -2.35710740e+00 ... -5.48470163e+00 -1.42044830e+00 7.30638444e-01] [ 1.20728493e+00 -1.43862188e+00 -2.77531624e+00 ... 1.75610757e+00 3.40469062e-01 -7.58363247e-01]] [[-5.20330858e+00 -4.75179404e-01 2.66904593e+00 ... -3.32282639e+00 -1.71051097e+00 -5.76483250e+00] [-1.69056988e+00 -3.11520958e+00 -3.22134709e+00 ... -3.18429804e+00 -1.49399412e+00 -2.63346553e-01] [ 1.81714916e+00 -3.04207754e+00 -5.66836262e+00 ... -1.60897636e+00 -2.16296744e+00 2.43022490e+00] ... [-2.29971242e+00 1.46070099e+00 -3.21069896e-01 ... -9.55070615e-01 -7.97045827e-01 -4.66113389e-01] [-4.03312969e+00 -1.78529954e+00 -9.68207717e-01 ... -4.03257668e-01 -2.15897512e+00 1.13610983e-01] [ 1.79805493e+00 -3.03718877e+00 -1.79812431e+00 ... -1.53321385e+00 -1.73580897e+00 1.56691670e+00]] [[-2.79817414e+00 -8.32184911e-01 -4.24663830e+00 ... -4.60990572e+00 3.08902681e-01 1.66114950e+00] [ 1.37225151e+00 -2.06703043e+00 -2.45559239e+00 ... 3.62245739e-01 -7.71705270e-01 1.37137055e+00] [ 1.61174726e+00 5.80861568e-02 -1.25420702e+00 ... 9.17706490e-02 -1.12734377e+00 2.71639287e-01] ... [ 1.29116368e+00 -1.14148033e+00 -3.65877843e+00 ... -2.18155861e+00 2.72650099e+00 -1.58602190e+00] [-4.99802542e+00 -2.30110645e+00 -2.03033495e+00 ... -4.83561182e+00 2.68671393e+00 -4.49200064e-01] [-2.19506264e+00 -1.85498095e+00 -1.52934408e+00 ... 7.88706720e-01 -2.60431409e+00 1.48492503e+00]] [[-1.58154607e+00 1.85900354e+00 -1.83218765e+00 ... 2.39491010e+00 -4.36272711e-01 2.55123520e+00] [ 1.35193706e+00 -1.08884025e+00 -7.56577253e-01 ... -5.26830316e-01 -2.49853659e+00 -3.55641437e+00] [-3.05321932e+00 -1.01024389e+00 -8.06661725e-01 ... -1.64274693e+00 3.85571539e-01 -6.95751786e-01] ... [-4.81083214e-01 9.58890498e-01 2.23407960e+00 ... 8.70963871e-01 -1.76133454e-01 5.00028908e-01] [ 1.31674767e+00 -1.65949333e+00 2.39283252e+00 ... 1.25120997e+00 7.39285409e-01 -1.84289742e+00] [-1.33045626e+00 -4.34087610e+00 1.02917218e+00 ... -1.99906516e+00 -2.22713947e+00 -2.61085010e+00]]] [[[ 3.10567927e+00 1.00018287e+00 2.59014082e+00 ... -2.70719337e+00 -6.56498790e-01 5.10337830e-01] [-4.91605669e-01 1.68832374e+00 1.29069567e+00 ... -5.89275241e-01 -4.93387556e+00 9.33907032e-01] [-2.80650854e-02 -8.59182596e-01 3.65286040e+00 ... -1.61026871e+00 -1.93460464e+00 -2.51183319e+00] ... [ 8.68403554e-01 -1.90047598e+00 2.05988646e-01 ... 3.69854122e-01 5.99593401e-01 3.38830185e+00] [-3.44643211e+00 -1.56170881e+00 -7.59543419e-01 ... -1.67936134e+00 9.17513669e-03 2.32908821e+00] [ 4.25400883e-02 1.19748855e+00 5.11111403e+00 ... -1.68655813e+00 -8.47142339e-01 3.93902510e-02]] [[-1.14568734e+00 -2.57574677e+00 -1.58109188e+00 ... -1.40170360e+00 -4.14401799e-01 -2.24106848e-01] [-4.23305655e+00 1.83232558e+00 -5.35408854e-01 ... -4.55646217e-03 3.13859653e+00 2.34783840e+00] [-2.83519316e+00 5.30168414e-03 5.50477934e+00 ... 3.57209563e+00 -1.24262363e-01 8.38118315e-01] ... [-1.13363349e+00 3.58965564e+00 3.53681892e-02 ... -5.23969889e+00 9.80576277e-01 1.60632074e-01] [-2.44463849e+00 2.16265202e+00 -1.14934099e+00 ... -2.83311343e+00 1.33785963e+00 4.32942057e+00] [ 1.87555909e+00 -8.12578559e-01 1.64407825e+00 ... 2.63980818e+00 3.07193971e+00 2.26571113e-01]] [[-4.41514444e+00 -1.00730628e-01 3.58277512e+00 ... -1.39420879e+00 3.54696840e-01 -2.80901718e+00] [-2.28098273e+00 -1.23102355e+00 -2.42682600e+00 ... -2.59573364e+00 1.00360060e+00 -6.97690010e-01] [ 1.24508369e+00 -1.36972523e+00 -4.14446640e+00 ... -1.56844282e+00 -1.97625089e+00 3.59272575e+00] ... [-1.72817814e+00 3.09115005e+00 -1.06310964e+00 ... 1.56741834e+00 6.73182845e-01 -1.63721716e+00] [-1.90013933e+00 -1.60134017e+00 4.49099183e-01 ... -1.04665673e+00 -9.89129901e-01 -7.46770501e-01] [ 2.63448167e+00 -3.99298638e-01 -8.64332080e-01 ... -9.38636184e-01 9.43049908e-01 1.81072795e+00]] [[-2.05328178e+00 -1.07581401e+00 -1.09347486e+00 ... -2.27255130e+00 -4.79920536e-01 3.95873618e+00] [ 2.28633714e+00 6.03832603e-01 -5.62561274e-01 ... 2.03643751e+00 1.01792738e-01 4.09247541e+00] [ 4.45931911e+00 1.89632595e+00 9.36389089e-01 ... 2.28583887e-01 -1.76791334e+00 -3.45586449e-01] ... [ 2.31689143e+00 1.73756003e+00 -3.36900091e+00 ... -1.13758266e+00 3.87951136e+00 3.20320666e-01] [-3.65701103e+00 -5.81366897e-01 -6.53396487e-01 ... -4.11587858e+00 3.18194866e+00 4.86954510e-01] [-2.26238060e+00 -2.22854316e-02 -1.20330620e+00 ... 1.64969277e+00 -3.69633865e+00 9.51730728e-01]] [[-4.44701880e-01 5.56815434e+00 -1.07038826e-01 ... 3.16703010e+00 -4.39385682e-01 4.80567098e-01] [ 4.12417233e-01 2.76931196e-01 1.47210568e-01 ... 5.43087304e-01 -2.44228029e+00 -2.77678061e+00] [-1.00429606e+00 1.31120610e+00 -1.11351502e+00 ... 2.49324545e-01 -7.82705784e-01 1.06465292e+00] ... [ 1.30680025e+00 2.39799118e+00 3.48349118e+00 ... 8.34645867e-01 -5.97840667e-01 3.11622787e+00] [ 1.77444768e+00 -4.89962250e-01 5.10455191e-01 ... 1.11582077e+00 1.88175726e+00 -1.63271308e+00] [-2.36094832e+00 -2.53776312e+00 3.84357858e+00 ... -9.98336673e-01 -1.74884164e+00 -2.64997935e+00]]] [[[-4.54651624e-01 3.78953636e-01 -2.01336414e-01 ... 2.07369804e-01 2.91070759e-01 2.12910265e-01] [-1.91516876e-01 -5.22377014e-01 -8.23126078e-01 ... -1.75390214e-01 3.57615203e-01 -6.02736473e-01] [-1.47606567e-01 6.13839746e-01 -8.01305056e-01 ... -3.08412015e-01 9.01898265e-01 3.99379015e-01] ... [-4.80852276e-01 -1.82164162e-01 -1.04391076e-01 ... -7.75999784e-01 -7.22481370e-01 -5.01884818e-02] [ 8.13664854e-01 3.01853269e-01 -6.63643420e-01 ... 5.46255857e-02 -7.90245712e-01 3.25502396e-01] [ 1.53401345e-01 7.35523999e-01 -1.44522226e+00 ... -2.36825958e-01 5.19103050e-01 -3.46423805e-01]] [[-4.62198317e-01 1.23328477e-01 8.49597096e-01 ... 1.14767566e-01 5.59898198e-01 -5.05861461e-01] [ 3.95756721e-01 1.62801862e-01 5.58588564e-01 ... -2.88061321e-01 3.14126909e-02 -6.32618666e-01] [ 1.53531134e-03 -1.03238977e-01 -6.77761883e-02 ... -1.42995429e+00 -1.28389999e-01 4.77953076e-01] ... [ 1.33925307e+00 -6.49183691e-01 -8.66744757e-01 ... 1.05991304e+00 -9.23024341e-02 2.35581905e-01] [ 7.96165913e-02 -1.06848955e+00 -1.33191124e-01 ... -6.49245858e-01 -9.15089607e-01 -1.37945497e+00] [-1.17763653e-02 9.25110430e-02 -1.67235601e+00 ... -1.58167511e-01 -1.00190163e+00 -8.67774040e-02]] [[ 1.41557246e-01 1.41309589e-01 -1.97171897e-01 ... -4.57416743e-01 -5.96315086e-01 -8.58853281e-01] [ 6.71996236e-01 -3.88895929e-01 3.21846306e-02 ... 1.44105434e-01 -7.88862228e-01 5.86240590e-01] [ 5.04063606e-01 -3.32166791e-01 -1.81888491e-01 ... 3.07274163e-01 3.58905196e-01 -2.89438337e-01] ... [ 1.35125548e-01 -5.10445595e-01 6.11523032e-01 ... -7.97199786e-01 -3.60964537e-01 8.33104908e-01] [-5.23418605e-01 2.31360137e-01 -3.44766855e-01 ... 6.19246006e-01 -1.01184100e-01 7.50580728e-01] [-1.82531536e-01 -8.21446419e-01 -7.46544302e-02 ... 1.13469288e-01 -8.37448001e-01 1.07753992e-01]] [[ 7.24192709e-02 4.26871419e-01 -9.83730316e-01 ... -6.45535111e-01 6.96989000e-01 -8.07752669e-01] [-1.15046822e-01 -8.70431364e-01 -4.57596719e-01 ... -4.70561624e-01 -5.87760210e-02 -1.00483370e+00] [-1.09892559e+00 -5.43274879e-01 -6.41696572e-01 ... 2.01516032e-01 6.48746073e-01 5.39985478e-01] ... [-2.45143458e-01 -9.68735456e-01 2.99488813e-01 ... -1.17145717e-01 -3.26859236e-01 -5.31639338e-01] [-1.17912039e-01 -4.79446381e-01 -2.55195796e-01 ... 1.24951363e-01 -2.96950862e-02 -7.16921389e-02] [ 3.53049219e-01 -4.81071681e-01 2.09177107e-01 ... -6.08978420e-02 8.70096684e-01 4.18468893e-01]] [[-1.92419022e-01 -1.46614254e+00 -2.99543023e-01 ... -1.05470665e-01 4.13444221e-01 1.19460273e+00] [ 7.49685585e-01 -3.01603138e-01 -6.11123815e-02 ... -1.06850691e-01 3.29664677e-01 5.54872304e-02] [-4.88563269e-01 -7.92983651e-01 4.92423356e-01 ... -4.76486266e-01 8.56863976e-01 -5.15247285e-01] ... [-4.83262539e-01 -3.60893637e-01 -3.83371592e-01 ... 3.13310981e-01 5.61390936e-01 -9.26602364e-01] [ 5.20018190e-02 -1.01549841e-01 1.11798143e+00 ... 2.97020584e-01 -2.56879926e-01 2.61328161e-01] [ 8.38946044e-01 -4.23497826e-01 -1.04585814e+00 ... -1.07639156e-01 1.60754383e-01 4.23094869e-01]]]]]; ov_res: [[[[[ 1.65135241e+00 1.29133821e+00 1.72502661e+00 ... -3.33117056e+00 -7.43942082e-01 3.39849651e-01] [-1.69890165e+00 -2.55657434e-02 -1.23231959e+00 ... -1.78878856e+00 -5.16136789e+00 -1.22409463e+00] [-1.09543228e+00 -2.44258702e-01 1.30545378e+00 ... -3.29470682e+00 -5.84511995e-01 -2.70009613e+00] ... [-9.75402653e-01 -3.18053532e+00 -8.73112619e-01 ... -2.03562784e+00 -1.79195309e+00 2.85183930e+00] [-2.58082366e+00 -1.67886961e+00 -3.15414762e+00 ... -2.46958065e+00 -2.43127108e+00 2.56707692e+00] [-2.48486578e-01 2.33343339e+00 1.32699943e+00 ... -3.18962002e+00 -4.97434527e-01 -1.43027472e+00]] [[-2.97742128e+00 -3.38510132e+00 -4.97972727e-01 ... -1.91236758e+00 1.31322622e-01 -2.26066518e+00] [-4.38951397e+00 1.71895099e+00 -4.56913710e-02 ... -1.43330646e+00 2.84107828e+00 3.68980825e-01] [-3.80763292e+00 -9.41293240e-01 5.21151447e+00 ... -1.11791730e-01 -1.19233656e+00 1.34464002e+00] ... [ 1.23439360e+00 1.69119358e+00 -2.83263731e+00 ... -4.00235558e+00 1.44421399e-01 1.03914082e-01] [-3.36335254e+00 -9.12878275e-01 -2.35710740e+00 ... -5.48470163e+00 -1.42044830e+00 7.30638444e-01] [ 1.20728493e+00 -1.43862188e+00 -2.77531624e+00 ... 1.75610757e+00 3.40469062e-01 -7.58363247e-01]] [[-5.20330858e+00 -4.75179404e-01 2.66904593e+00 ... -3.32282639e+00 -1.71051097e+00 -5.76483250e+00] [-1.69056988e+00 -3.11520958e+00 -3.22134709e+00 ... -3.18429804e+00 -1.49399412e+00 -2.63346553e-01] [ 1.81714916e+00 -3.04207754e+00 -5.66836262e+00 ... -1.60897636e+00 -2.16296744e+00 2.43022490e+00] ... [-2.29971242e+00 1.46070099e+00 -3.21069896e-01 ... -9.55070615e-01 -7.97045827e-01 -4.66113389e-01] [-4.03312969e+00 -1.78529954e+00 -9.68207717e-01 ... -4.03257668e-01 -2.15897512e+00 1.13610983e-01] [ 1.79805493e+00 -3.03718877e+00 -1.79812431e+00 ... -1.53321385e+00 -1.73580897e+00 1.56691670e+00]] [[-2.79817414e+00 -8.32184911e-01 -4.24663830e+00 ... -4.60990572e+00 3.08902681e-01 1.66114950e+00] [ 1.37225151e+00 -2.06703043e+00 -2.45559239e+00 ... 3.62245739e-01 -7.71705270e-01 1.37137055e+00] [ 1.61174726e+00 5.80861568e-02 -1.25420702e+00 ... 9.17706490e-02 -1.12734377e+00 2.71639287e-01] ... [ 1.29116368e+00 -1.14148033e+00 -3.65877843e+00 ... -2.18155861e+00 2.72650099e+00 -1.58602190e+00] [-4.99802542e+00 -2.30110645e+00 -2.03033495e+00 ... -4.83561182e+00 2.68671393e+00 -4.49200064e-01] [-2.19506264e+00 -1.85498095e+00 -1.52934408e+00 ... 7.88706720e-01 -2.60431409e+00 1.48492503e+00]] [[-1.58154607e+00 1.85900354e+00 -1.83218765e+00 ... 2.39491010e+00 -4.36272711e-01 2.55123520e+00] [ 1.35193706e+00 -1.08884025e+00 -7.56577253e-01 ... -5.26830316e-01 -2.49853659e+00 -3.55641437e+00] [-3.05321932e+00 -1.01024389e+00 -8.06661725e-01 ... -1.64274693e+00 3.85571539e-01 -6.95751786e-01] ... [-4.81083214e-01 9.58890498e-01 2.23407960e+00 ... 8.70963871e-01 -1.76133454e-01 5.00028908e-01] [ 1.31674767e+00 -1.65949333e+00 2.39283252e+00 ... 1.25120997e+00 7.39285409e-01 -1.84289742e+00] [-1.33045626e+00 -4.34087610e+00 1.02917218e+00 ... -1.99906516e+00 -2.22713947e+00 -2.61085010e+00]]] [[[ 3.10567927e+00 1.00018287e+00 2.59014082e+00 ... -2.70719337e+00 -6.56498790e-01 5.10337830e-01] [-4.91605669e-01 1.68832374e+00 1.29069567e+00 ... -5.89275241e-01 -4.93387556e+00 9.33907032e-01] [-2.80650854e-02 -8.59182596e-01 3.65286040e+00 ... -1.61026871e+00 -1.93460464e+00 -2.51183319e+00] ... [ 8.68403554e-01 -1.90047598e+00 2.05988646e-01 ... 3.69854122e-01 5.99593401e-01 3.38830185e+00] [-3.44643211e+00 -1.56170881e+00 -7.59543419e-01 ... -1.67936134e+00 9.17513669e-03 2.32908821e+00] [ 4.25400883e-02 1.19748855e+00 5.11111403e+00 ... -1.68655813e+00 -8.47142339e-01 3.93902510e-02]] [[-1.14568734e+00 -2.57574677e+00 -1.58109188e+00 ... -1.40170360e+00 -4.14401799e-01 -2.24106848e-01] [-4.23305655e+00 1.83232558e+00 -5.35408854e-01 ... -4.55646217e-03 3.13859653e+00 2.34783840e+00] [-2.83519316e+00 5.30168414e-03 5.50477934e+00 ... 3.57209563e+00 -1.24262363e-01 8.38118315e-01] ... [-1.13363349e+00 3.58965564e+00 3.53681892e-02 ... -5.23969889e+00 9.80576277e-01 1.60632074e-01] [-2.44463849e+00 2.16265202e+00 -1.14934099e+00 ... -2.83311343e+00 1.33785963e+00 4.32942057e+00] [ 1.87555909e+00 -8.12578559e-01 1.64407825e+00 ... 2.63980818e+00 3.07193971e+00 2.26571113e-01]] [[-4.41514444e+00 -1.00730628e-01 3.58277512e+00 ... -1.39420879e+00 3.54696840e-01 -2.80901718e+00] [-2.28098273e+00 -1.23102355e+00 -2.42682600e+00 ... -2.59573364e+00 1.00360060e+00 -6.97690010e-01] [ 1.24508369e+00 -1.36972523e+00 -4.14446640e+00 ... -1.56844282e+00 -1.97625089e+00 3.59272575e+00] ... [-1.72817814e+00 3.09115005e+00 -1.06310964e+00 ... 1.56741834e+00 6.73182845e-01 -1.63721716e+00] [-1.90013933e+00 -1.60134017e+00 4.49099183e-01 ... -1.04665673e+00 -9.89129901e-01 -7.46770501e-01] [ 2.63448167e+00 -3.99298638e-01 -8.64332080e-01 ... -9.38636184e-01 9.43049908e-01 1.81072795e+00]] [[-2.05328178e+00 -1.07581401e+00 -1.09347486e+00 ... -2.27255130e+00 -4.79920536e-01 3.95873618e+00] [ 2.28633714e+00 6.03832603e-01 -5.62561274e-01 ... 2.03643751e+00 1.01792738e-01 4.09247541e+00] [ 4.45931911e+00 1.89632595e+00 9.36389089e-01 ... 2.28583887e-01 -1.76791334e+00 -3.45586449e-01] ... [ 2.31689143e+00 1.73756003e+00 -3.36900091e+00 ... -1.13758266e+00 3.87951136e+00 3.20320666e-01] [-3.65701103e+00 -5.81366897e-01 -6.53396487e-01 ... -4.11587858e+00 3.18194866e+00 4.86954510e-01] [-2.26238060e+00 -2.22854316e-02 -1.20330620e+00 ... 1.64969277e+00 -3.69633865e+00 9.51730728e-01]] [[-4.44701880e-01 5.56815434e+00 -1.07038826e-01 ... 3.16703010e+00 -4.39385682e-01 4.80567098e-01] [ 4.12417233e-01 2.76931196e-01 1.47210568e-01 ... 5.43087304e-01 -2.44228029e+00 -2.77678061e+00] [-1.00429606e+00 1.31120610e+00 -1.11351502e+00 ... 2.49324545e-01 -7.82705784e-01 1.06465292e+00] ... [ 1.30680025e+00 2.39799118e+00 3.48349118e+00 ... 8.34645867e-01 -5.97840667e-01 3.11622787e+00] [ 1.77444768e+00 -4.89962250e-01 5.10455191e-01 ... 1.11582077e+00 1.88175726e+00 -1.63271308e+00] [-2.36094832e+00 -2.53776312e+00 3.84357858e+00 ... -9.98336673e-01 -1.74884164e+00 -2.64997935e+00]]] [[[-4.54651624e-01 3.78953636e-01 -2.01336414e-01 ... 2.07369804e-01 2.91070759e-01 2.12910265e-01] [-1.91516876e-01 -5.22377014e-01 -8.23126078e-01 ... -1.75390214e-01 3.57615203e-01 -6.02736473e-01] [-1.47606567e-01 6.13839746e-01 -8.01305056e-01 ... -3.08412015e-01 9.01898265e-01 3.99379015e-01] ... [-4.80852276e-01 -1.82164162e-01 -1.04391076e-01 ... -7.75999784e-01 -7.22481370e-01 -5.01884818e-02] [ 8.13664854e-01 3.01853269e-01 -6.63643420e-01 ... 5.46255857e-02 -7.90245712e-01 3.25502396e-01] [ 1.53401345e-01 7.35523999e-01 -1.44522226e+00 ... -2.36825958e-01 5.19103050e-01 -3.46423805e-01]] [[-4.62198317e-01 1.23328477e-01 8.49597096e-01 ... 1.14767566e-01 5.59898198e-01 -5.05861461e-01] [ 3.95756721e-01 1.62801862e-01 5.58588564e-01 ... -2.88061321e-01 3.14126909e-02 -6.32618666e-01] [ 1.53531134e-03 -1.03238977e-01 -6.77761883e-02 ... -1.42995429e+00 -1.28389999e-01 4.77953076e-01] ... [ 1.33925307e+00 -6.49183691e-01 -8.66744757e-01 ... 1.05991304e+00 -9.23024341e-02 2.35581905e-01] [ 7.96165913e-02 -1.06848955e+00 -1.33191124e-01 ... -6.49245858e-01 -9.15089607e-01 -1.37945497e+00] [-1.17763653e-02 9.25110430e-02 -1.67235601e+00 ... -1.58167511e-01 -1.00190163e+00 -8.67774040e-02]] [[ 1.41557246e-01 1.41309589e-01 -1.97171897e-01 ... -4.57416743e-01 -5.96315086e-01 -8.58853281e-01] [ 6.71996236e-01 -3.88895929e-01 3.21846306e-02 ... 1.44105434e-01 -7.88862228e-01 5.86240590e-01] [ 5.04063606e-01 -3.32166791e-01 -1.81888491e-01 ... 3.07274163e-01 3.58905196e-01 -2.89438337e-01] ... [ 1.35125548e-01 -5.10445595e-01 6.11523032e-01 ... -7.97199786e-01 -3.60964537e-01 8.33104908e-01] [-5.23418605e-01 2.31360137e-01 -3.44766855e-01 ... 6.19246006e-01 -1.01184100e-01 7.50580728e-01] [-1.82531536e-01 -8.21446419e-01 -7.46544302e-02 ... 1.13469288e-01 -8.37448001e-01 1.07753992e-01]] [[ 7.24192709e-02 4.26871419e-01 -9.83730316e-01 ... -6.45535111e-01 6.96989000e-01 -8.07752669e-01] [-1.15046822e-01 -8.70431364e-01 -4.57596719e-01 ... -4.70561624e-01 -5.87760210e-02 -1.00483370e+00] [-1.09892559e+00 -5.43274879e-01 -6.41696572e-01 ... 2.01516032e-01 6.48746073e-01 5.39985478e-01] ... [-2.45143458e-01 -9.68735456e-01 2.99488813e-01 ... -1.17145717e-01 -3.26859236e-01 -5.31639338e-01] [-1.17912039e-01 -4.79446381e-01 -2.55195796e-01 ... 1.24951363e-01 -2.96950862e-02 -7.16921389e-02] [ 3.53049219e-01 -4.81071681e-01 2.09177107e-01 ... -6.08978420e-02 8.70096684e-01 4.18468893e-01]] [[-1.92419022e-01 -1.46614254e+00 -2.99543023e-01 ... -1.05470665e-01 4.13444221e-01 1.19460273e+00] [ 7.49685585e-01 -3.01603138e-01 -6.11123815e-02 ... -1.06850691e-01 3.29664677e-01 5.54872304e-02] [-4.88563269e-01 -7.92983651e-01 4.92423356e-01 ... -4.76486266e-01 8.56863976e-01 -5.15247285e-01] ... [-4.83262539e-01 -3.60893637e-01 -3.83371592e-01 ... 3.13310981e-01 5.61390936e-01 -9.26602364e-01] [ 5.20018190e-02 -1.01549841e-01 1.11798143e+00 ... 2.97020584e-01 -2.56879926e-01 2.61328161e-01] [ 8.38946044e-01 -4.23497826e-01 -1.04585814e+00 ... -1.07639156e-01 1.60754383e-01 4.23094869e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_888.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.66623}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.5132 (2,1,1,.,.) = 1.8592 (3,1,1,.,.) = 0.5275 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 2.1402133 -1.6662283 -0.7038541 ... 1.444741 -1.6662283 -5.5698214 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.80965984 -1.6662283 0.17944193 ... -2.6854253 -1.6662283 -2.7953763 ] ... [-2.8324623 -1.6662283 -2.8767323 ... -6.787063 -1.6662283 1.0056179 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-4.1080036 -1.6662283 -1.5642617 ... 0.6826649 -1.6662283 -1.4090708 ]] [[-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] ... [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ]] [[-1.2924633 -1.6662283 -0.4799893 ... -2.9063654 -1.6662283 -0.16110551] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.1165098 -1.6662283 -0.4222659 ... 1.8822262 -1.6662283 -2.1573744 ] ... [-0.14558375 -1.6662283 -3.7392507 ... -1.629286 -1.6662283 -2.0224776 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-2.1600566 -1.6662283 -2.0112994 ... -1.4924757 -1.6662283 -1.7405214 ]] ... [[-2.071136 -1.6662283 -1.9720528 ... 1.0713665 -1.6662283 -1.0433786 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-2.8821313 -1.6662283 -0.44623435 ... -0.35195673 -1.6662283 -2.3276286 ] ... [-1.7966163 -1.6662283 -0.5972463 ... 0.89678025 -1.6662283 -3.6184435 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.8014946 -1.6662283 -4.421563 ... 1.2951472 -1.6662283 1.3203351 ]] [[-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] ... [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ]] [[-0.872684 -1.6662283 -1.545181 ... -3.0447946 -1.6662283 -0.9326939 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.5346652 -1.6662283 -1.4275904 ... -3.1115928 -1.6662283 -2.5766633 ] ... [-3.9692638 -1.6662283 -2.9036007 ... -4.1175666 -1.6662283 -4.4437904 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.51564837 -1.6662283 -1.2839485 ... -5.2278585 -1.6662283 0.04284453]]]]]; ov_res: [[[[[ 2.1402133 -1.6662283 -0.7038541 ... 1.444741 -1.6662283 -5.5698214 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.80965984 -1.6662283 0.17944193 ... -2.6854253 -1.6662283 -2.7953763 ] ... [-2.8324623 -1.6662283 -2.8767323 ... -6.787063 -1.6662283 1.0056179 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-4.1080036 -1.6662283 -1.5642617 ... 0.6826649 -1.6662283 -1.4090708 ]] [[-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] ... [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ]] [[-1.2924633 -1.6662283 -0.4799893 ... -2.9063654 -1.6662283 -0.16110551] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.1165098 -1.6662283 -0.4222659 ... 1.8822262 -1.6662283 -2.1573744 ] ... [-0.14558375 -1.6662283 -3.7392507 ... -1.629286 -1.6662283 -2.0224776 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-2.1600566 -1.6662283 -2.0112994 ... -1.4924757 -1.6662283 -1.7405214 ]] ... [[-2.071136 -1.6662283 -1.9720528 ... 1.0713665 -1.6662283 -1.0433786 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-2.8821313 -1.6662283 -0.44623435 ... -0.35195673 -1.6662283 -2.3276286 ] ... [-1.7966163 -1.6662283 -0.5972463 ... 0.89678025 -1.6662283 -3.6184435 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.8014946 -1.6662283 -4.421563 ... 1.2951472 -1.6662283 1.3203351 ]] [[-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] ... [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ]] [[-0.872684 -1.6662283 -1.545181 ... -3.0447946 -1.6662283 -0.9326939 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-1.5346652 -1.6662283 -1.4275904 ... -3.1115928 -1.6662283 -2.5766633 ] ... [-3.9692638 -1.6662283 -2.9036007 ... -4.1175666 -1.6662283 -4.4437904 ] [-1.6662283 -1.6662283 -1.6662283 ... -1.6662283 -1.6662283 -1.6662283 ] [-0.51564837 -1.6662283 -1.2839485 ... -5.2278585 -1.6662283 0.04284453]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 2, 2], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_890.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.3206 -2.1646 0.4168 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.7448 (2,1,1,.,.) = -0.4631 (3,1,1,.,.) = -1.8412 (1,2,1,.,.) = -0.7051 (2,2,1,.,.) = -1.2844 (3,2,1,.,.) = 1.9720 (1,3,1,.,.) = 1.0432 (2,3,1,.,.) = 0.6272 (3,3,1,.,.) = -0.7472 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.82630038e+00 -1.52479625e+00 -2.94357324e+00 -3.50909042e+00 -1.86806309e+00] [-2.06279802e+00 -2.81279302e+00 -2.58838844e+00 -3.23195672e+00 -2.54678011e+00] [-8.66908729e-01 9.87839699e-03 -9.50119615e-01 -3.53291810e-01 -2.80349731e+00] [-1.19850969e+00 -2.53482342e+00 -1.19077408e+00 9.74896550e-01 -1.29946876e+00] [-4.38119590e-01 -2.85311794e+00 -7.11286664e-01 -1.18674648e+00 -9.31717277e-01]] [[ 6.49338365e-01 6.49623990e-01 3.24144244e-01 -1.42084455e+00 -3.31734037e+00] [ 1.02492082e+00 7.56600738e-01 -2.30709410e+00 -9.31684256e-01 -3.18681526e+00] [-2.74156666e+00 3.03726792e-01 -3.69773722e+00 -4.49909019e+00 -1.87839794e+00] [-1.18632674e+00 -1.41268599e+00 -4.25676346e-01 -3.39804602e+00 -3.28184366e-02] [-2.05506086e+00 -1.05703652e+00 4.78251576e-01 -4.50968742e-01 -4.18841481e-01]] [[-2.84081125e+00 -2.97851491e+00 -2.01760006e+00 -2.59489727e+00 -2.54831839e+00] [ 8.32977295e-02 9.06489730e-01 -9.83054698e-01 -3.46674204e+00 -1.44770873e+00] [ 6.35372162e-01 -1.71470213e+00 -1.74145699e-02 -3.00496125e+00 -2.87422037e+00] [-5.80694139e-01 -9.88644361e-02 -1.42140388e+00 2.61290193e-01 -9.50138032e-01] [-5.57959139e-01 -1.08723402e-01 5.52647114e-01 -3.80244315e-01 -7.68668234e-01]] [[-5.42514265e-01 -2.78893042e+00 1.98021412e-01 1.13968241e+00 -3.44532490e+00] [ 1.59994686e+00 -3.79068661e+00 -1.65997839e+00 -1.78273690e+00 -2.80054092e-01] [-6.13416851e-01 7.83615947e-01 -1.51244557e+00 -1.23754358e+00 1.46446407e+00] [-1.69810498e+00 -2.52308941e+00 1.20913184e+00 -1.04230046e-01 1.49336326e+00] [-1.02289152e+00 5.40516019e-01 -1.21903658e-01 1.22248805e+00 2.77996182e-01]] [[-1.64657176e+00 -2.23885798e+00 -2.28857279e-01 -2.14866495e+00 -1.19200957e+00] [-2.46244287e+00 1.51334131e+00 -1.96724510e+00 -3.32326937e+00 -2.57613182e+00] [-2.83683157e+00 -1.88762343e+00 -2.09078074e+00 -4.17169690e-01 -5.51159918e-01] [-1.66741455e+00 -1.68612742e+00 -1.22535932e+00 -1.32833767e+00 -1.20504332e+00] [-1.07438183e+00 -7.70003200e-01 -3.88298416e+00 7.14331865e-02 -3.25348186e+00]]] [[[-1.87687993e+00 -2.44367123e-01 -6.51075840e-01 -5.09159184e+00 -2.33357620e+00] [-1.12447739e-01 -2.10683084e+00 -2.25051761e+00 -3.70456600e+00 -2.66277122e+00] [-7.86471963e-01 -2.40783978e+00 -2.41217017e+00 -3.93575335e+00 -2.61899042e+00] [ 4.26556826e-01 -2.73162866e+00 -1.14327013e+00 1.74404097e+00 -3.14431167e+00] [-3.12245607e+00 -3.67399693e+00 -2.35813475e+00 -8.65820885e-01 -3.96452093e+00]] [[-1.53331733e+00 -2.30325675e+00 1.54848099e-01 -2.15419936e+00 -4.95985603e+00] [-2.74893379e+00 3.63498449e-01 -5.06931210e+00 -2.19032264e+00 -2.88241124e+00] [-3.59515190e+00 -2.05322862e-01 -4.01140308e+00 -4.97858810e+00 -2.83024883e+00] [-2.91323709e+00 -1.87010074e+00 -1.73047245e+00 -3.55140853e+00 -2.36945724e+00] [-4.67938852e+00 -3.03088260e+00 -5.11548162e-01 -1.62156427e+00 -4.36614037e+00]] [[-3.82487011e+00 -4.01506138e+00 -1.44905114e+00 -4.70211124e+00 -3.15866089e+00] [-3.29560566e+00 -2.62585592e+00 -3.35910487e+00 -3.27298093e+00 -2.14556956e+00] [-1.11626053e+00 -2.14829159e+00 -1.00781202e+00 -2.85711479e+00 -3.06788349e+00] [-3.04875493e+00 -2.50272179e+00 -2.34546614e+00 -3.28186846e+00 -8.56698036e-01] [-2.34122801e+00 -2.06451249e+00 -8.49760652e-01 -1.40917647e+00 -7.77447462e-01]] [[-2.35048985e+00 -3.77602983e+00 -8.78713250e-01 2.06373215e-01 -6.93439865e+00] [ 5.15768290e-01 -4.12037277e+00 -3.67690897e+00 -2.96762919e+00 -7.86767244e-01] [-5.31411171e-03 -2.17899299e+00 -3.35602713e+00 -6.61398053e-01 1.81598544e+00] [-2.57866001e+00 -2.77184701e+00 -5.16797185e-01 -1.87137592e+00 4.85517502e-01] [-2.11808062e+00 -5.81884384e-01 -1.06201935e+00 -8.69811893e-01 -3.10551000e+00]] [[-1.68545794e+00 -3.55774927e+00 -2.80438352e+00 1.53042316e-01 -2.70869207e+00] [-1.22719395e+00 -1.97672081e+00 -2.51319289e+00 -3.36716986e+00 -3.71179533e+00] [-4.60331917e+00 -3.22900772e+00 -1.76630795e+00 -5.88910580e-02 -2.77910066e+00] [-3.09488535e+00 -6.25461221e-01 -1.45326209e+00 -1.14488137e+00 -1.34197474e+00] [-2.23680806e+00 -1.67331195e+00 -1.34475780e+00 -8.32509279e-01 -2.53208351e+00]]] [[[-4.49138284e-01 1.30095065e+00 5.81639147e+00 5.20610952e+00 6.25833333e-01] [ 7.24899054e-01 3.96487808e+00 4.42958641e+00 4.70530605e+00 2.32820129e+00] [ 1.36329865e+00 -2.44824827e-01 -6.22608364e-02 -1.76541698e+00 1.60475016e+00] [-1.43935382e+00 3.75360680e+00 -8.03795338e-01 -5.67451572e+00 4.00685215e+00] [-1.70611107e+00 2.50667286e+00 -1.45667815e+00 -2.03076458e+00 7.60443926e-01]] [[-2.61454964e+00 -4.43673277e+00 -2.28408575e+00 5.12702525e-01 2.95663786e+00] [-4.34327316e+00 -3.94163418e+00 1.22503924e+00 -2.91904211e-01 3.68121243e+00] [ 3.96955061e+00 -2.26510525e+00 4.45301056e+00 6.83867359e+00 2.33723462e-01] [ 1.37355852e+00 1.52498698e+00 -3.30198717e+00 5.41519403e+00 -2.13127995e+00] [ 2.36587358e+00 -1.51558042e-01 -5.26108217e+00 6.56065702e-01 -9.96587753e-01]] [[ 1.26623464e+00 5.01209927e+00 2.37546921e+00 1.90779030e+00 3.04455042e+00] [-1.50793016e-01 -2.43973589e+00 9.84480977e-02 6.34748888e+00 -1.05457771e+00] [-4.16874886e+00 6.29672527e-01 9.46214020e-01 1.87379706e+00 5.19681931e+00] [ 3.92618597e-01 -4.69768286e+00 3.11443615e+00 -2.06148005e+00 -1.56897426e+00] [ 6.46973968e-01 2.70833254e-01 -1.94217551e+00 -2.46503496e+00 -5.07472098e-01]] [[-1.41174138e-01 2.88102961e+00 -1.16884172e+00 -5.78145695e+00 5.09453964e+00] [-2.97623301e+00 5.57594204e+00 1.78137422e+00 1.21845663e+00 -1.78725135e+00] [-9.39871550e-01 -5.16583204e+00 -6.82255387e-01 2.75239992e+00 -3.86713457e+00] [ 1.41332126e+00 3.88158941e+00 -4.17817974e+00 -2.45605326e+00 -5.32070446e+00] [ 1.46259093e+00 -2.84738064e+00 -1.27093041e+00 -5.03899050e+00 -3.60146570e+00]] [[ 2.07957411e+00 3.81988478e+00 -3.11722517e+00 1.70183289e+00 2.96304131e+00] [ 1.04295421e+00 -6.94955397e+00 2.17333364e+00 4.68282604e+00 2.75372314e+00] [ 1.27594256e+00 7.70697236e-01 4.73788887e-01 1.23999381e+00 4.51503754e-01] [ 1.17972946e+00 7.10075259e-01 -8.36279631e-01 3.95716459e-01 -1.24831438e+00] [ 6.84752107e-01 -1.51727617e+00 3.16275358e+00 -6.48395777e-01 4.28333855e+00]]]]]; ov_res: [[[[[-1.82630038e+00 -1.52479625e+00 -2.94357324e+00 -3.50909042e+00 -1.86806309e+00] [-2.06279802e+00 -2.81279302e+00 -2.58838844e+00 -3.23195672e+00 -2.54678011e+00] [-8.66908729e-01 9.87839699e-03 -9.50119615e-01 -3.53291810e-01 -2.80349731e+00] [-1.19850969e+00 -2.53482342e+00 -1.19077408e+00 9.74896550e-01 -1.29946876e+00] [-4.38119590e-01 -2.85311794e+00 -7.11286664e-01 -1.18674648e+00 -9.31717277e-01]] [[ 6.49338365e-01 6.49623990e-01 3.24144244e-01 -1.42084455e+00 -3.31734037e+00] [ 1.02492082e+00 7.56600738e-01 -2.30709410e+00 -9.31684256e-01 -3.18681526e+00] [-2.74156666e+00 3.03726792e-01 -3.69773722e+00 -4.49909019e+00 -1.87839794e+00] [-1.18632674e+00 -1.41268599e+00 -4.25676346e-01 -3.39804602e+00 -3.28184366e-02] [-2.05506086e+00 -1.05703652e+00 4.78251576e-01 -4.50968742e-01 -4.18841481e-01]] [[-2.84081125e+00 -2.97851491e+00 -2.01760006e+00 -2.59489727e+00 -2.54831839e+00] [ 8.32977295e-02 9.06489730e-01 -9.83054698e-01 -3.46674204e+00 -1.44770873e+00] [ 6.35372162e-01 -1.71470213e+00 -1.74145699e-02 -3.00496125e+00 -2.87422037e+00] [-5.80694139e-01 -9.88644361e-02 -1.42140388e+00 2.61290193e-01 -9.50138032e-01] [-5.57959139e-01 -1.08723402e-01 5.52647114e-01 -3.80244315e-01 -7.68668234e-01]] [[-5.42514265e-01 -2.78893042e+00 1.98021412e-01 1.13968241e+00 -3.44532490e+00] [ 1.59994686e+00 -3.79068661e+00 -1.65997839e+00 -1.78273690e+00 -2.80054092e-01] [-6.13416851e-01 7.83615947e-01 -1.51244557e+00 -1.23754358e+00 1.46446407e+00] [-1.69810498e+00 -2.52308941e+00 1.20913184e+00 -1.04230046e-01 1.49336326e+00] [-1.02289152e+00 5.40516019e-01 -1.21903658e-01 1.22248805e+00 2.77996182e-01]] [[-1.64657176e+00 -2.23885798e+00 -2.28857279e-01 -2.14866495e+00 -1.19200957e+00] [-2.46244287e+00 1.51334131e+00 -1.96724510e+00 -3.32326937e+00 -2.57613182e+00] [-2.83683157e+00 -1.88762343e+00 -2.09078074e+00 -4.17169690e-01 -5.51159918e-01] [-1.66741455e+00 -1.68612742e+00 -1.22535932e+00 -1.32833767e+00 -1.20504332e+00] [-1.07438183e+00 -7.70003200e-01 -3.88298416e+00 7.14331865e-02 -3.25348186e+00]]] [[[-1.87687993e+00 -2.44367123e-01 -6.51075840e-01 -5.09159184e+00 -2.33357620e+00] [-1.12447739e-01 -2.10683084e+00 -2.25051761e+00 -3.70456600e+00 -2.66277122e+00] [-7.86471963e-01 -2.40783978e+00 -2.41217017e+00 -3.93575335e+00 -2.61899042e+00] [ 4.26556826e-01 -2.73162866e+00 -1.14327013e+00 1.74404097e+00 -3.14431167e+00] [-3.12245607e+00 -3.67399693e+00 -2.35813475e+00 -8.65820885e-01 -3.96452093e+00]] [[-1.53331733e+00 -2.30325675e+00 1.54848099e-01 -2.15419936e+00 -4.95985603e+00] [-2.74893379e+00 3.63498449e-01 -5.06931210e+00 -2.19032264e+00 -2.88241124e+00] [-3.59515190e+00 -2.05322862e-01 -4.01140308e+00 -4.97858810e+00 -2.83024883e+00] [-2.91323709e+00 -1.87010074e+00 -1.73047245e+00 -3.55140853e+00 -2.36945724e+00] [-4.67938852e+00 -3.03088260e+00 -5.11548162e-01 -1.62156427e+00 -4.36614037e+00]] [[-3.82487011e+00 -4.01506138e+00 -1.44905114e+00 -4.70211124e+00 -3.15866089e+00] [-3.29560566e+00 -2.62585592e+00 -3.35910487e+00 -3.27298093e+00 -2.14556956e+00] [-1.11626053e+00 -2.14829159e+00 -1.00781202e+00 -2.85711479e+00 -3.06788349e+00] [-3.04875493e+00 -2.50272179e+00 -2.34546614e+00 -3.28186846e+00 -8.56698036e-01] [-2.34122801e+00 -2.06451249e+00 -8.49760652e-01 -1.40917647e+00 -7.77447462e-01]] [[-2.35048985e+00 -3.77602983e+00 -8.78713250e-01 2.06373215e-01 -6.93439865e+00] [ 5.15768290e-01 -4.12037277e+00 -3.67690897e+00 -2.96762919e+00 -7.86767244e-01] [-5.31411171e-03 -2.17899299e+00 -3.35602713e+00 -6.61398053e-01 1.81598544e+00] [-2.57866001e+00 -2.77184701e+00 -5.16797185e-01 -1.87137592e+00 4.85517502e-01] [-2.11808062e+00 -5.81884384e-01 -1.06201935e+00 -8.69811893e-01 -3.10551000e+00]] [[-1.68545794e+00 -3.55774927e+00 -2.80438352e+00 1.53042316e-01 -2.70869207e+00] [-1.22719395e+00 -1.97672081e+00 -2.51319289e+00 -3.36716986e+00 -3.71179533e+00] [-4.60331917e+00 -3.22900772e+00 -1.76630795e+00 -5.88910580e-02 -2.77910066e+00] [-3.09488535e+00 -6.25461221e-01 -1.45326209e+00 -1.14488137e+00 -1.34197474e+00] [-2.23680806e+00 -1.67331195e+00 -1.34475780e+00 -8.32509279e-01 -2.53208351e+00]]] [[[-4.49138284e-01 1.30095065e+00 5.81639147e+00 5.20610952e+00 6.25833333e-01] [ 7.24899054e-01 3.96487808e+00 4.42958641e+00 4.70530605e+00 2.32820129e+00] [ 1.36329865e+00 -2.44824827e-01 -6.22608364e-02 -1.76541698e+00 1.60475016e+00] [-1.43935382e+00 3.75360680e+00 -8.03795338e-01 -5.67451572e+00 4.00685215e+00] [-1.70611107e+00 2.50667286e+00 -1.45667815e+00 -2.03076458e+00 7.60443926e-01]] [[-2.61454964e+00 -4.43673277e+00 -2.28408575e+00 5.12702525e-01 2.95663786e+00] [-4.34327316e+00 -3.94163418e+00 1.22503924e+00 -2.91904211e-01 3.68121243e+00] [ 3.96955061e+00 -2.26510525e+00 4.45301056e+00 6.83867359e+00 2.33723462e-01] [ 1.37355852e+00 1.52498698e+00 -3.30198717e+00 5.41519403e+00 -2.13127995e+00] [ 2.36587358e+00 -1.51558042e-01 -5.26108217e+00 6.56065702e-01 -9.96587753e-01]] [[ 1.26623464e+00 5.01209927e+00 2.37546921e+00 1.90779030e+00 3.04455042e+00] [-1.50793016e-01 -2.43973589e+00 9.84480977e-02 6.34748888e+00 -1.05457771e+00] [-4.16874886e+00 6.29672527e-01 9.46214020e-01 1.87379706e+00 5.19681931e+00] [ 3.92618597e-01 -4.69768286e+00 3.11443615e+00 -2.06148005e+00 -1.56897426e+00] [ 6.46973968e-01 2.70833254e-01 -1.94217551e+00 -2.46503496e+00 -5.07472098e-01]] [[-1.41174138e-01 2.88102961e+00 -1.16884172e+00 -5.78145695e+00 5.09453964e+00] [-2.97623301e+00 5.57594204e+00 1.78137422e+00 1.21845663e+00 -1.78725135e+00] [-9.39871550e-01 -5.16583204e+00 -6.82255387e-01 2.75239992e+00 -3.86713457e+00] [ 1.41332126e+00 3.88158941e+00 -4.17817974e+00 -2.45605326e+00 -5.32070446e+00] [ 1.46259093e+00 -2.84738064e+00 -1.27093041e+00 -5.03899050e+00 -3.60146570e+00]] [[ 2.07957411e+00 3.81988478e+00 -3.11722517e+00 1.70183289e+00 2.96304131e+00] [ 1.04295421e+00 -6.94955397e+00 2.17333364e+00 4.68282604e+00 2.75372314e+00] [ 1.27594256e+00 7.70697236e-01 4.73788887e-01 1.23999381e+00 4.51503754e-01] [ 1.17972946e+00 7.10075259e-01 -8.36279631e-01 3.95716459e-01 -1.24831438e+00] [ 6.84752107e-01 -1.51727617e+00 3.16275358e+00 -6.48395777e-01 4.28333855e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_892.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.1033 -0.4297 -1.5270 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.1954 (2,1,1,.,.) = 0.6068 (3,1,1,.,.) = 1.0751 (1,2,1,.,.) = 2.0845 (2,2,1,.,.) = 0.3139 (3,2,1,.,.) = 0.7306 (1,3,1,.,.) = 0.5179 (2,3,1,.,.) = 1.1613 (3,3,1,.,.) = 0.4432 [ CPUFloatType{3,3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 0.10333444 1.0165606 -3.4728615 ... -5.8384113 2.9797437 0.10333444] [ 0.10333444 -0.8111611 -1.0518489 ... -3.1624374 3.8833826 0.10333444] [ 0.10333444 -1.9011142 1.0561426 ... -0.63772964 -0.15248932 0.10333444] ... [ 0.10333444 1.131737 4.7270036 ... -0.01132475 -2.3891962 0.10333444] [ 0.10333444 1.6198881 3.6312132 ... -0.76009244 0.13810205 0.10333444] [ 0.10333444 1.5646253 1.0193409 ... 2.445154 0.5364987 0.10333444]] [[ 0.10333444 -3.2017288 -0.53504187 ... -0.4805054 -3.36502 0.10333444] [ 0.10333444 1.0411589 0.88656205 ... 0.64656824 6.0966544 0.10333444] [ 0.10333444 -2.831418 -0.03460018 ... -1.2658541 0.47816616 0.10333444] ... [ 0.10333444 0.45153522 -1.185643 ... 0.99038917 2.4864187 0.10333444] [ 0.10333444 0.5515673 0.52734697 ... 0.4800467 1.4314332 0.10333444] [ 0.10333444 0.7716286 0.77737486 ... -2.8864875 -3.6534185 0.10333444]] [[ 0.10333444 2.985113 -0.08418451 ... -4.2492876 3.3459663 0.10333444] [ 0.10333444 -3.2374012 1.2760417 ... -0.1224806 0.75170594 0.10333444] [ 0.10333444 1.1068267 -0.21574126 ... 2.110329 2.7360256 0.10333444] ... [ 0.10333444 3.5528574 2.5197835 ... 2.7166348 1.6006694 0.10333444] [ 0.10333444 -0.14978759 1.558699 ... 1.6708223 2.1809845 0.10333444] [ 0.10333444 0.45174026 2.6433864 ... -0.13642995 0.6711615 0.10333444]] [[ 0.10333444 -1.9982488 -2.8433552 ... 1.0913838 -2.886905 0.10333444] [ 0.10333444 3.845825 0.9766811 ... -0.07250506 -1.5219419 0.10333444] [ 0.10333444 -0.7981831 1.2417132 ... 4.1840043 -1.4128097 0.10333444] ... [ 0.10333444 0.8986232 1.1104217 ... 0.531368 1.6608139 0.10333444] [ 0.10333444 0.3049598 0.76393914 ... 1.4208584 -1.2677988 0.10333444] [ 0.10333444 1.5470934 2.8634882 ... -0.06313699 1.5720544 0.10333444]] [[ 0.10333444 -0.70005834 3.4990184 ... 0.4106027 -0.21788083 0.10333444] [ 0.10333444 0.35171342 1.0596652 ... -3.0475879 1.0088981 0.10333444] [ 0.10333444 -2.3803542 2.9151416 ... 0.2990712 -0.285307 0.10333444] ... [ 0.10333444 -0.24345748 2.0953906 ... 3.7264805 5.1989474 0.10333444] [ 0.10333444 1.5859954 -1.9308677 ... 0.8278721 2.5414379 0.10333444] [ 0.10333444 0.6332985 0.94603133 ... -1.7543049 -2.590681 0.10333444]]] [[[-0.42965767 -1.037133 -1.9265584 ... -2.5993202 1.9131498 -0.42965767] [-0.42965767 -3.2012084 -1.7031232 ... -3.0381722 -0.4007458 -0.42965767] [-0.42965767 -2.3935292 0.02173308 ... -0.45881408 -1.5633008 -0.42965767] ... [-0.42965767 3.0478113 1.7141623 ... -2.1622727 -2.0768561 -0.42965767] [-0.42965767 -0.15413961 -0.7131861 ... -1.4116863 0.04592869 -0.42965767] [-0.42965767 -0.87563705 -0.49739018 ... 0.35611227 -1.3164475 -0.42965767]] [[-0.42965767 -2.3830473 0.436857 ... -1.3375894 0.36067787 -0.42965767] [-0.42965767 -2.309793 -0.21216899 ... -0.30715755 1.5484515 -0.42965767] [-0.42965767 -1.1316744 -0.02517137 ... -2.6202598 1.133178 -0.42965767] ... [-0.42965767 1.6278374 -0.07213834 ... 0.96757865 -0.8106327 -0.42965767] [-0.42965767 0.6373457 -0.67793953 ... -1.4883429 -0.40811613 -0.42965767] [-0.42965767 1.5784292 1.3507364 ... -0.57802916 -1.4636989 -0.42965767]] [[-0.42965767 0.12565807 -0.57447064 ... -1.1807333 1.1905192 -0.42965767] [-0.42965767 -0.23023146 -0.39634502 ... 0.05249763 -0.02550453 -0.42965767] [-0.42965767 -0.947232 -0.9723232 ... -0.5765275 1.4559026 -0.42965767] ... [-0.42965767 -1.0750161 -0.7975699 ... 1.4080397 0.12647304 -0.42965767] [-0.42965767 -1.2676111 -0.47272825 ... 2.0831792 1.2912631 -0.42965767] [-0.42965767 -0.6789963 2.1709096 ... -3.2037334 0.70690525 -0.42965767]] [[-0.42965767 -2.081699 -1.2017075 ... 0.14996448 -4.2350335 -0.42965767] [-0.42965767 -1.6982934 1.291685 ... -1.20191 -0.6204492 -0.42965767] [-0.42965767 -2.1325502 -1.3738421 ... -2.0089347 -0.19652626 -0.42965767] ... [-0.42965767 -0.25986892 -0.62018937 ... 0.16234657 0.30302748 -0.42965767] [-0.42965767 -1.6501809 1.33192 ... -0.48821276 -2.2724907 -0.42965767] [-0.42965767 0.33567688 1.4325475 ... -0.5718181 1.871567 -0.42965767]] [[-0.42965767 -0.03312981 -1.0160588 ... 0.74231815 -0.6145626 -0.42965767] [-0.42965767 0.0077104 0.3721027 ... -3.6546082 -0.9788637 -0.42965767] [-0.42965767 -0.8763804 0.11816522 ... 1.1990882 -0.03473455 -0.42965767] ... [-0.42965767 0.04316628 1.5820701 ... 2.9054217 1.510584 -0.42965767] [-0.42965767 0.5527278 -2.3193243 ... 1.4905611 0.75180185 -0.42965767] [-0.42965767 -0.64450306 -0.32864603 ... -1.4628435 -2.2360842 -0.42965767]]] [[[-1.526995 0.21805286 -3.5495853 ... -3.0247715 1.1944802 -1.526995 ] [-1.526995 -2.4935913 -1.7281933 ... -2.6227818 -0.6845906 -1.526995 ] [-1.526995 -3.2918978 -1.8232272 ... -1.2496445 -3.1347644 -1.526995 ] ... [-1.526995 1.7523513 0.5154834 ... -1.3020062 -2.837358 -1.526995 ] [-1.526995 -0.2692002 -1.1874086 ... -1.6636896 -1.7858591 -1.526995 ] [-1.526995 -1.7099804 -1.4031935 ... -1.0874084 -2.5138183 -1.526995 ]] [[-1.526995 -2.8422022 -2.2583852 ... -2.3257766 -2.4883096 -1.526995 ] [-1.526995 -0.9190861 -0.9660964 ... -0.59404725 1.6605344 -1.526995 ] [-1.526995 -2.675387 -2.9122314 ... -2.5172963 -0.9820635 -1.526995 ] ... [-1.526995 -0.5117837 -2.39322 ... -0.12070167 -0.8469653 -1.526995 ] [-1.526995 -1.9580996 -1.2759757 ... -3.0471973 -0.7010377 -1.526995 ] [-1.526995 -0.38624334 0.7194319 ... -2.845531 -2.8624043 -1.526995 ]] [[-1.526995 -0.8749224 -0.8731099 ... -2.993801 -0.21669316 -1.526995 ] [-1.526995 -1.8073796 -1.940743 ... -2.877688 -0.08123708 -1.526995 ] [-1.526995 -2.0064588 -2.7290845 ... -2.2849956 0.14281034 -1.526995 ] ... [-1.526995 -1.8284702 -1.5824294 ... 0.9193499 0.23644352 -1.526995 ] [-1.526995 -1.9524813 -1.2626584 ... -0.0198195 0.57556796 -1.526995 ] [-1.526995 -0.49597538 1.609482 ... -1.971086 -1.7521572 -1.526995 ]] [[-1.526995 -2.7823586 -1.6667672 ... -1.0944793 -4.809583 -1.526995 ] [-1.526995 -1.4066046 0.6894109 ... -1.0755652 -1.5474828 -1.526995 ] [-1.526995 -2.8106906 -2.3039882 ... -1.283514 -1.7707859 -1.526995 ] ... [-1.526995 -1.9314932 -1.5065187 ... -0.89046925 -1.8470666 -1.526995 ] [-1.526995 -2.9592834 1.1499987 ... -0.7472295 -3.6481829 -1.526995 ] [-1.526995 -0.6596877 0.983161 ... -1.5056424 0.46657133 -1.526995 ]] [[-1.526995 -1.0451775 -0.45616782 ... -0.33058155 -2.02541 -1.526995 ] [-1.526995 -2.1915607 -0.09469318 ... -5.0790772 -1.3649056 -1.526995 ] [-1.526995 -1.8419394 -0.35489738 ... -0.22613895 -0.5427873 -1.526995 ] ... [-1.526995 -2.4128528 0.49599576 ... 1.527219 -0.6570105 -1.526995 ] [-1.526995 -2.1804986 -4.0211067 ... -0.15509951 0.14708388 -1.526995 ] [-1.526995 -2.528133 -2.9652596 ... -2.1538105 -3.2595563 -1.526995 ]]]]]; ov_res: [[[[[ 0.10333444 1.0165606 -3.4728615 ... -5.8384113 2.9797437 0.10333444] [ 0.10333444 -0.8111611 -1.0518489 ... -3.1624374 3.8833826 0.10333444] [ 0.10333444 -1.9011142 1.0561426 ... -0.63772964 -0.15248932 0.10333444] ... [ 0.10333444 1.131737 4.7270036 ... -0.01132475 -2.3891962 0.10333444] [ 0.10333444 1.6198881 3.6312132 ... -0.76009244 0.13810205 0.10333444] [ 0.10333444 1.5646253 1.0193409 ... 2.445154 0.5364987 0.10333444]] [[ 0.10333444 -3.2017288 -0.53504187 ... -0.4805054 -3.36502 0.10333444] [ 0.10333444 1.0411589 0.88656205 ... 0.64656824 6.0966544 0.10333444] [ 0.10333444 -2.831418 -0.03460018 ... -1.2658541 0.47816616 0.10333444] ... [ 0.10333444 0.45153522 -1.185643 ... 0.99038917 2.4864187 0.10333444] [ 0.10333444 0.5515673 0.52734697 ... 0.4800467 1.4314332 0.10333444] [ 0.10333444 0.7716286 0.77737486 ... -2.8864875 -3.6534185 0.10333444]] [[ 0.10333444 2.985113 -0.08418451 ... -4.2492876 3.3459663 0.10333444] [ 0.10333444 -3.2374012 1.2760417 ... -0.1224806 0.75170594 0.10333444] [ 0.10333444 1.1068267 -0.21574126 ... 2.110329 2.7360256 0.10333444] ... [ 0.10333444 3.5528574 2.5197835 ... 2.7166348 1.6006694 0.10333444] [ 0.10333444 -0.14978759 1.558699 ... 1.6708223 2.1809845 0.10333444] [ 0.10333444 0.45174026 2.6433864 ... -0.13642995 0.6711615 0.10333444]] [[ 0.10333444 -1.9982488 -2.8433552 ... 1.0913838 -2.886905 0.10333444] [ 0.10333444 3.845825 0.9766811 ... -0.07250506 -1.5219419 0.10333444] [ 0.10333444 -0.7981831 1.2417132 ... 4.1840043 -1.4128097 0.10333444] ... [ 0.10333444 0.8986232 1.1104217 ... 0.531368 1.6608139 0.10333444] [ 0.10333444 0.3049598 0.76393914 ... 1.4208584 -1.2677988 0.10333444] [ 0.10333444 1.5470934 2.8634882 ... -0.06313699 1.5720544 0.10333444]] [[ 0.10333444 -0.70005834 3.4990184 ... 0.4106027 -0.21788083 0.10333444] [ 0.10333444 0.35171342 1.0596652 ... -3.0475879 1.0088981 0.10333444] [ 0.10333444 -2.3803542 2.9151416 ... 0.2990712 -0.285307 0.10333444] ... [ 0.10333444 -0.24345748 2.0953906 ... 3.7264805 5.1989474 0.10333444] [ 0.10333444 1.5859954 -1.9308677 ... 0.8278721 2.5414379 0.10333444] [ 0.10333444 0.6332985 0.94603133 ... -1.7543049 -2.590681 0.10333444]]] [[[-0.42965767 -1.037133 -1.9265584 ... -2.5993202 1.9131498 -0.42965767] [-0.42965767 -3.2012084 -1.7031232 ... -3.0381722 -0.4007458 -0.42965767] [-0.42965767 -2.3935292 0.02173308 ... -0.45881408 -1.5633008 -0.42965767] ... [-0.42965767 3.0478113 1.7141623 ... -2.1622727 -2.0768561 -0.42965767] [-0.42965767 -0.15413961 -0.7131861 ... -1.4116863 0.04592869 -0.42965767] [-0.42965767 -0.87563705 -0.49739018 ... 0.35611227 -1.3164475 -0.42965767]] [[-0.42965767 -2.3830473 0.436857 ... -1.3375894 0.36067787 -0.42965767] [-0.42965767 -2.309793 -0.21216899 ... -0.30715755 1.5484515 -0.42965767] [-0.42965767 -1.1316744 -0.02517137 ... -2.6202598 1.133178 -0.42965767] ... [-0.42965767 1.6278374 -0.07213834 ... 0.96757865 -0.8106327 -0.42965767] [-0.42965767 0.6373457 -0.67793953 ... -1.4883429 -0.40811613 -0.42965767] [-0.42965767 1.5784292 1.3507364 ... -0.57802916 -1.4636989 -0.42965767]] [[-0.42965767 0.12565807 -0.57447064 ... -1.1807333 1.1905192 -0.42965767] [-0.42965767 -0.23023146 -0.39634502 ... 0.05249763 -0.02550453 -0.42965767] [-0.42965767 -0.947232 -0.9723232 ... -0.5765275 1.4559026 -0.42965767] ... [-0.42965767 -1.0750161 -0.7975699 ... 1.4080397 0.12647304 -0.42965767] [-0.42965767 -1.2676111 -0.47272825 ... 2.0831792 1.2912631 -0.42965767] [-0.42965767 -0.6789963 2.1709096 ... -3.2037334 0.70690525 -0.42965767]] [[-0.42965767 -2.081699 -1.2017075 ... 0.14996448 -4.2350335 -0.42965767] [-0.42965767 -1.6982934 1.291685 ... -1.20191 -0.6204492 -0.42965767] [-0.42965767 -2.1325502 -1.3738421 ... -2.0089347 -0.19652626 -0.42965767] ... [-0.42965767 -0.25986892 -0.62018937 ... 0.16234657 0.30302748 -0.42965767] [-0.42965767 -1.6501809 1.33192 ... -0.48821276 -2.2724907 -0.42965767] [-0.42965767 0.33567688 1.4325475 ... -0.5718181 1.871567 -0.42965767]] [[-0.42965767 -0.03312981 -1.0160588 ... 0.74231815 -0.6145626 -0.42965767] [-0.42965767 0.0077104 0.3721027 ... -3.6546082 -0.9788637 -0.42965767] [-0.42965767 -0.8763804 0.11816522 ... 1.1990882 -0.03473455 -0.42965767] ... [-0.42965767 0.04316628 1.5820701 ... 2.9054217 1.510584 -0.42965767] [-0.42965767 0.5527278 -2.3193243 ... 1.4905611 0.75180185 -0.42965767] [-0.42965767 -0.64450306 -0.32864603 ... -1.4628435 -2.2360842 -0.42965767]]] [[[-1.526995 0.21805286 -3.5495853 ... -3.0247715 1.1944802 -1.526995 ] [-1.526995 -2.4935913 -1.7281933 ... -2.6227818 -0.6845906 -1.526995 ] [-1.526995 -3.2918978 -1.8232272 ... -1.2496445 -3.1347644 -1.526995 ] ... [-1.526995 1.7523513 0.5154834 ... -1.3020062 -2.837358 -1.526995 ] [-1.526995 -0.2692002 -1.1874086 ... -1.6636896 -1.7858591 -1.526995 ] [-1.526995 -1.7099804 -1.4031935 ... -1.0874084 -2.5138183 -1.526995 ]] [[-1.526995 -2.8422022 -2.2583852 ... -2.3257766 -2.4883096 -1.526995 ] [-1.526995 -0.9190861 -0.9660964 ... -0.59404725 1.6605344 -1.526995 ] [-1.526995 -2.675387 -2.9122314 ... -2.5172963 -0.9820635 -1.526995 ] ... [-1.526995 -0.5117837 -2.39322 ... -0.12070167 -0.8469653 -1.526995 ] [-1.526995 -1.9580996 -1.2759757 ... -3.0471973 -0.7010377 -1.526995 ] [-1.526995 -0.38624334 0.7194319 ... -2.845531 -2.8624043 -1.526995 ]] [[-1.526995 -0.8749224 -0.8731099 ... -2.993801 -0.21669316 -1.526995 ] [-1.526995 -1.8073796 -1.940743 ... -2.877688 -0.08123708 -1.526995 ] [-1.526995 -2.0064588 -2.7290845 ... -2.2849956 0.14281034 -1.526995 ] ... [-1.526995 -1.8284702 -1.5824294 ... 0.9193499 0.23644352 -1.526995 ] [-1.526995 -1.9524813 -1.2626584 ... -0.0198195 0.57556796 -1.526995 ] [-1.526995 -0.49597538 1.609482 ... -1.971086 -1.7521572 -1.526995 ]] [[-1.526995 -2.7823586 -1.6667672 ... -1.0944793 -4.809583 -1.526995 ] [-1.526995 -1.4066046 0.6894109 ... -1.0755652 -1.5474828 -1.526995 ] [-1.526995 -2.8106906 -2.3039882 ... -1.283514 -1.7707859 -1.526995 ] ... [-1.526995 -1.9314932 -1.5065187 ... -0.89046925 -1.8470666 -1.526995 ] [-1.526995 -2.9592834 1.1499987 ... -0.7472295 -3.6481829 -1.526995 ] [-1.526995 -0.6596877 0.983161 ... -1.5056424 0.46657133 -1.526995 ]] [[-1.526995 -1.0451775 -0.45616782 ... -0.33058155 -2.02541 -1.526995 ] [-1.526995 -2.1915607 -0.09469318 ... -5.0790772 -1.3649056 -1.526995 ] [-1.526995 -1.8419394 -0.35489738 ... -0.22613895 -0.5427873 -1.526995 ] ... [-1.526995 -2.4128528 0.49599576 ... 1.527219 -0.6570105 -1.526995 ] [-1.526995 -2.1804986 -4.0211067 ... -0.15509951 0.14708388 -1.526995 ] [-1.526995 -2.528133 -2.9652596 ... -2.1538105 -3.2595563 -1.526995 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 1] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_894.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.604137}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.2579 (2,1,1,.,.) = 0.2906 (3,1,1,.,.) = -0.1832 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 2.1118822e+00 6.0413694e-01 -6.0675323e-01 ... -1.5209216e-01 6.0413694e-01 9.3561077e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 7.9030025e-01 6.0413694e-01 1.6511850e+00 ... -4.3852854e-01 6.0413694e-01 1.9619294e+00] ... [-1.1673070e+00 6.0413694e-01 -4.8708034e-01 ... -4.1355693e-01 6.0413694e-01 -9.7832561e-02] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.6618543e+00 6.0413694e-01 -1.1653618e+00 ... 1.0583441e+00 6.0413694e-01 2.3540458e-01]] [[ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] ... [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01]] [[ 7.3586667e-01 6.0413694e-01 4.8436373e-01 ... 5.2996308e-01 6.0413694e-01 -6.1043811e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.5872054e+00 6.0413694e-01 6.3349497e-01 ... 1.0222086e+00 6.0413694e-01 2.4072289e-01] ... [-5.0721133e-01 6.0413694e-01 3.0334744e-01 ... 1.4525549e+00 6.0413694e-01 -9.8262978e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.3608563e+00 6.0413694e-01 -2.6929379e-04 ... 3.0275600e+00 6.0413694e-01 7.9493558e-01]] ... [[ 1.8815387e+00 6.0413694e-01 3.0772981e-01 ... 2.3207569e+00 6.0413694e-01 1.2503637e+00] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-1.0902975e+00 6.0413694e-01 2.6829574e+00 ... 5.6684774e-01 6.0413694e-01 1.3483063e+00] ... [ 6.3739884e-01 6.0413694e-01 1.9143786e+00 ... 1.5665176e+00 6.0413694e-01 -4.7742331e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-9.5428157e-01 6.0413694e-01 -9.3885374e-01 ... -1.4347899e+00 6.0413694e-01 -7.0509708e-01]] [[ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] ... [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01]] [[-1.6997762e+00 6.0413694e-01 -2.5010917e+00 ... 2.7161515e+00 6.0413694e-01 -1.7412331e+00] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-1.2642416e+00 6.0413694e-01 -1.4911022e+00 ... -6.0358703e-02 6.0413694e-01 1.8895323e+00] ... [ 1.7920945e+00 6.0413694e-01 -6.4494240e-01 ... 1.5394027e+00 6.0413694e-01 2.8614673e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.8271732e-01 6.0413694e-01 -9.9695778e-01 ... -7.6749337e-01 6.0413694e-01 -2.5230503e-01]]]]]; ov_res: [[[[[ 2.1118822e+00 6.0413694e-01 -6.0675323e-01 ... -1.5209216e-01 6.0413694e-01 9.3561077e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 7.9030025e-01 6.0413694e-01 1.6511850e+00 ... -4.3852854e-01 6.0413694e-01 1.9619294e+00] ... [-1.1673070e+00 6.0413694e-01 -4.8708034e-01 ... -4.1355693e-01 6.0413694e-01 -9.7832561e-02] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.6618543e+00 6.0413694e-01 -1.1653618e+00 ... 1.0583441e+00 6.0413694e-01 2.3540458e-01]] [[ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] ... [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01]] [[ 7.3586667e-01 6.0413694e-01 4.8436373e-01 ... 5.2996308e-01 6.0413694e-01 -6.1043811e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.5872054e+00 6.0413694e-01 6.3349497e-01 ... 1.0222086e+00 6.0413694e-01 2.4072289e-01] ... [-5.0721133e-01 6.0413694e-01 3.0334744e-01 ... 1.4525549e+00 6.0413694e-01 -9.8262978e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.3608563e+00 6.0413694e-01 -2.6929379e-04 ... 3.0275600e+00 6.0413694e-01 7.9493558e-01]] ... [[ 1.8815387e+00 6.0413694e-01 3.0772981e-01 ... 2.3207569e+00 6.0413694e-01 1.2503637e+00] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-1.0902975e+00 6.0413694e-01 2.6829574e+00 ... 5.6684774e-01 6.0413694e-01 1.3483063e+00] ... [ 6.3739884e-01 6.0413694e-01 1.9143786e+00 ... 1.5665176e+00 6.0413694e-01 -4.7742331e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-9.5428157e-01 6.0413694e-01 -9.3885374e-01 ... -1.4347899e+00 6.0413694e-01 -7.0509708e-01]] [[ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] ... [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01]] [[-1.6997762e+00 6.0413694e-01 -2.5010917e+00 ... 2.7161515e+00 6.0413694e-01 -1.7412331e+00] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [-1.2642416e+00 6.0413694e-01 -1.4911022e+00 ... -6.0358703e-02 6.0413694e-01 1.8895323e+00] ... [ 1.7920945e+00 6.0413694e-01 -6.4494240e-01 ... 1.5394027e+00 6.0413694e-01 2.8614673e-01] [ 6.0413694e-01 6.0413694e-01 6.0413694e-01 ... 6.0413694e-01 6.0413694e-01 6.0413694e-01] [ 1.8271732e-01 6.0413694e-01 -9.9695778e-01 ... -7.6749337e-01 6.0413694e-01 -2.5230503e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [2, 2, 2], 'groups': 1, 'output_padding': [1, 1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_896.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.32664}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.2263 (2,1,1,.,.) = -1.2071 (3,1,1,.,.) = 0.4802 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -2.5903447 -1.326644 ... 2.06753 -1.326644 -1.1116914 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 0.04857576 -1.326644 ... 0.8173218 -1.326644 -2.9625053 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 0.01678181 -1.326644 ... -2.1033437 -1.326644 -1.2964742 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] ... [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.7782929 -1.326644 ... 3.2464085 -1.326644 -3.1706266 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 1.3349216 -1.326644 ... 0.7950835 -1.326644 0.03977597] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -4.2333374 -1.326644 ... 1.8643656 -1.326644 -3.3158722 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 0.47331977 -1.326644 ... 2.6455958 -1.326644 -4.4055595 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 1.9717023 -1.326644 ... -1.9778955 -1.326644 -0.84637594] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -0.4102912 -1.326644 ... 2.4150069 -1.326644 -1.838582 ]]]]]; ov_res: [[[[[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -2.5903447 -1.326644 ... 2.06753 -1.326644 -1.1116914 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 0.04857576 -1.326644 ... 0.8173218 -1.326644 -2.9625053 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 0.01678181 -1.326644 ... -2.1033437 -1.326644 -1.2964742 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] ... [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.7782929 -1.326644 ... 3.2464085 -1.326644 -3.1706266 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 1.3349216 -1.326644 ... 0.7950835 -1.326644 0.03977597] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -4.2333374 -1.326644 ... 1.8643656 -1.326644 -3.3158722 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ]] [[-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 0.47331977 -1.326644 ... 2.6455958 -1.326644 -4.4055595 ] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] ... [-1.326644 1.9717023 -1.326644 ... -1.9778955 -1.326644 -0.84637594] [-1.326644 -1.326644 -1.326644 ... -1.326644 -1.326644 -1.326644 ] [-1.326644 -0.4102912 -1.326644 ... 2.4150069 -1.326644 -1.838582 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_898.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 1.17858365e-01 -4.81179386e-01 -4.67822030e-02 ... 7.77069271e-01 -4.77218896e-01 5.50208211e-01] [ 5.30461848e-01 -1.69273555e-01 -1.09262049e+00 ... 2.20646739e+00 -6.22719646e-01 2.68254828e+00] [ 1.11773515e+00 -2.90243506e-01 -9.45536435e-01 ... -1.88669991e+00 3.38906169e-01 4.43383574e-01] ... [-3.42210829e-01 -1.00867581e+00 3.56215358e-01 ... 2.01780868e+00 1.44475865e+00 1.51334226e-01] [-1.45087409e+00 -3.11604023e+00 1.95927119e+00 ... 2.65752435e+00 1.10225952e+00 1.12398791e+00] [-7.01606154e-01 4.93190706e-01 8.12551796e-01 ... 1.24476755e+00 -1.73787564e-01 6.61256850e-01]] [[ 1.38543916e+00 1.47012198e+00 -1.19231856e+00 ... -1.39949799e-01 1.60361850e+00 -1.08602536e+00] [-1.69888854e-01 -2.69138575e+00 -1.43930531e+00 ... -1.86345935e+00 -7.72312260e+00 -3.03348136e+00] [ 6.30319405e+00 -1.31540775e+00 -1.34959221e-01 ... -1.54035091e-01 -2.67176771e+00 2.93307185e-01] ... [ 1.40603352e+00 1.61915553e+00 -2.60166192e+00 ... -4.64600182e+00 -4.58180046e+00 3.88760042e+00] [ 2.00933409e+00 -2.05697012e+00 1.88068211e+00 ... -4.24910545e+00 2.18794346e+00 3.93843412e+00] [-3.05395842e+00 2.53866220e+00 -2.86359477e+00 ... 3.09988165e+00 4.17812395e+00 -2.92131090e+00]] [[-3.95514321e+00 -4.14739251e-01 3.49485326e+00 ... -3.07365203e+00 -8.98903728e-01 -8.42827916e-01] [-3.47016335e+00 7.09520769e+00 -1.48654103e+00 ... 3.24973732e-01 -3.50557256e+00 2.89624786e+00] [ 7.70499945e-01 4.15577054e-01 7.86869144e+00 ... -1.22724199e+00 7.65213132e-01 9.22816634e-01] ... [ 8.60633612e-01 -4.27710533e-01 -5.42078543e+00 ... 3.19648075e+00 -4.27101135e+00 -4.68745184e+00] [ 5.64658308e+00 -3.95985413e+00 -1.77618086e+00 ... -3.79719734e-02 -1.12267475e+01 -1.94021225e+00] [ 3.98787165e+00 -3.14057469e+00 2.63001609e+00 ... 1.73002577e+00 -3.68893385e-01 -1.92976892e-01]] ... [[ 8.52194071e-01 -1.20420277e-01 -9.79137778e-01 ... -2.19868302e+00 5.10123253e-01 4.61317444e+00] [ 9.34090137e-01 -5.15983677e+00 1.76228416e+00 ... -4.19289255e+00 -5.34754276e-01 8.89638662e-02] [-2.25002837e+00 -2.74425030e-01 8.93918991e-01 ... 1.73520005e+00 1.87520456e+00 2.48784280e+00] ... [-6.18280768e-01 1.14694238e+00 -1.62220240e-01 ... -8.44141185e-01 3.53814983e+00 3.29126167e+00] [ 1.79572761e+00 4.75926638e-01 9.22255576e-01 ... -1.29251337e+00 -7.00545979e+00 1.83425200e+00] [-1.63037443e+00 1.67559528e+00 -5.19997120e+00 ... -4.88586998e+00 4.83886898e-01 -9.38027799e-01]] [[-4.74626398e+00 -9.41717505e-01 -2.92371798e+00 ... 6.52460754e-02 3.04048109e+00 -2.82787180e+00] [-1.16534674e+00 8.32140446e-01 -4.81327820e+00 ... 2.08222568e-02 5.03751326e+00 2.41246486e+00] [-9.84353423e-01 -3.40995836e+00 -3.92596483e+00 ... -6.72047973e-01 -4.47679520e+00 -5.62557364e+00] ... [-2.74776340e+00 4.37424755e+00 1.70362592e+00 ... 1.55900657e+00 2.89737844e+00 -6.19426441e+00] [ 1.78976941e+00 -1.68559289e+00 3.40843487e+00 ... 2.03043413e+00 -5.96539259e-01 -1.20326304e+00] [-6.67121887e+00 -2.15269279e+00 -3.13242078e-01 ... -2.34598732e+00 5.13126945e+00 1.42445564e-02]] [[ 1.14881837e+00 -3.95380020e+00 1.87692463e+00 ... 2.66444981e-01 -7.57667601e-01 5.09748101e-01] [ 4.08528900e+00 6.30186844e+00 -4.09547758e+00 ... -4.95785999e+00 -1.42822528e+00 1.30422044e+00] [-1.76014876e+00 9.27046537e-01 1.20301783e+00 ... 4.63987398e+00 -2.84871650e+00 4.14909887e+00] ... [-1.06030607e+00 -2.34050894e+00 2.50511217e+00 ... 1.32704389e+00 -4.11944437e+00 8.77843440e-01] [-4.27799797e+00 -1.74099684e+00 -4.31523228e+00 ... 1.53552365e+00 1.00860262e+00 1.72832537e+00] [-2.60804510e+00 -1.41101766e+00 2.28919315e+00 ... -2.06912875e+00 3.32624984e+00 -1.23181045e+00]]] [[[-9.95909870e-01 1.30608883e-02 1.28988922e+00 ... -2.82850838e+00 -2.71372050e-01 -2.01702952e+00] [-1.43554592e+00 1.70101017e-01 -6.56077623e-01 ... 9.59823668e-01 1.47347522e+00 2.18282938e+00] [ 2.77155471e+00 -1.70645928e+00 -1.46558797e+00 ... -8.99852157e-01 -7.65290618e-01 -1.57996035e+00] ... [ 1.43742359e+00 1.53235006e+00 -2.91708589e-01 ... -4.78199631e-01 1.14828384e+00 -7.67142892e-01] [-2.50329280e+00 -1.33433044e+00 -2.38679433e+00 ... 2.24554151e-01 -7.13926554e-03 -2.13722134e+00] [ 5.12701929e-01 7.83368886e-01 -1.30926687e-02 ... 4.70989287e-01 1.63376793e-01 1.33644092e+00]] [[ 2.20339060e+00 1.87199485e+00 -1.52713001e+00 ... 2.64850903e+00 3.94072008e+00 3.38691425e+00] [-5.40493679e+00 1.38514209e+00 1.36169374e+00 ... -1.79992461e+00 -3.72113228e+00 -3.14837885e+00] [ 4.37912273e+00 -2.89246321e-01 1.08656085e+00 ... -4.36102676e+00 -2.32142878e+00 2.20001125e+00] ... [ 9.45371807e-01 1.83049512e+00 -3.13285494e+00 ... -1.08322453e+00 -2.76512456e+00 -2.51772571e+00] [ 3.59816456e+00 -3.02706385e+00 2.97026181e+00 ... -4.86942482e+00 -4.73186159e+00 4.86952066e+00] [ 9.18809712e-01 1.24084866e+00 -1.60723472e+00 ... 1.43047416e+00 6.55559659e-01 1.50987756e+00]] [[-1.56616485e+00 -2.52029562e+00 4.49356461e+00 ... -1.80330193e+00 2.57364202e+00 -3.61219001e+00] [-4.35684204e+00 3.83952618e+00 -9.47454691e-01 ... -3.20282698e-01 -2.28916693e+00 4.23672318e-01] [-3.01518738e-02 -3.52171564e+00 9.58227730e+00 ... -3.84632802e+00 -4.22554255e-01 1.62432957e+00] ... [-4.23535681e+00 1.22810352e+00 -1.44681263e+00 ... 7.62780666e-01 3.24207306e+00 6.03333235e-01] [ 6.00442648e-01 8.38527083e-01 -2.97857976e+00 ... -1.40205729e+00 -8.63836098e+00 -1.04471564e+00] [ 7.53973424e-01 -2.21794772e+00 -1.72834635e+00 ... 1.80564451e+00 -4.42891419e-01 2.84096766e+00]] ... [[-3.80998731e-01 1.92146671e+00 -2.74731207e+00 ... -1.28579962e+00 4.27570105e-01 2.52979589e+00] [-2.71432352e+00 -5.48297405e+00 -3.73738551e+00 ... -5.29890203e+00 -2.15031719e+00 -4.43848276e+00] [-1.29422832e+00 2.45895028e+00 -6.83228731e-01 ... -1.88797450e+00 7.18717289e+00 9.20349717e-01] ... [ 5.34544110e-01 -4.95820999e-01 -5.75060463e+00 ... -6.43898678e+00 6.26260853e+00 1.68487787e+00] [-2.15279508e+00 -1.34287274e+00 1.91545725e-01 ... 1.14511502e+00 2.03821969e+00 1.53994143e+00] [ 1.21350038e+00 -1.05078435e+00 -2.78952122e-01 ... -2.31030369e+00 1.70142007e+00 -6.53022885e-01]] [[-1.07670426e-01 4.29000407e-01 3.00658047e-01 ... -1.42493784e-01 5.22890627e-01 -4.92867327e+00] [-1.82306063e+00 -2.60970521e+00 -1.07562041e+00 ... 1.07316494e+00 6.07278204e+00 2.42463374e+00] [ 1.58548450e+00 7.79475719e-02 -6.30131865e+00 ... 4.21714401e+00 1.43588364e+00 -2.12248278e+00] ... [-5.19598198e+00 3.88381553e+00 -3.64485335e+00 ... 5.84603667e-01 2.84700203e+00 -3.30457449e+00] [ 4.60041094e+00 1.71076632e+00 5.47795630e+00 ... 3.92629743e+00 -5.28868961e+00 -3.52565861e+00] [-3.22730064e+00 -2.18893433e+00 3.76234472e-01 ... 1.81899548e+00 3.53606248e+00 -6.09884024e-01]] [[-1.30579245e+00 -4.35983706e+00 3.06803656e+00 ... 3.58581924e+00 -1.74527869e-01 2.88656783e+00] [ 2.33197546e+00 3.52871132e+00 -3.32712501e-01 ... -4.92770147e+00 -6.06075704e-01 1.68728304e+00] [-2.99723595e-01 4.39990807e+00 -7.72579074e-01 ... 3.98597860e+00 -1.98027825e+00 7.02076149e+00] ... [ 3.50909853e+00 4.00985122e-01 7.08188057e-01 ... 7.52917588e-01 -5.41731656e-01 2.54807305e+00] [ 8.63847375e-01 -7.47833610e-01 -2.62915850e+00 ... 2.82648110e+00 -2.93811512e+00 2.78264427e+00] [-2.29993653e+00 -1.19661605e+00 7.08995163e-02 ... 5.20069063e-01 3.00262690e+00 -3.71618830e-02]]] [[[ 9.45975065e-01 -1.19651556e-01 2.69643813e-01 ... 2.94277501e+00 1.32165539e+00 8.33353639e-01] [-1.28893244e+00 1.49787986e+00 2.75793493e-01 ... 1.59671211e+00 9.35421109e-01 6.23292148e-01] [ 8.22039962e-01 -6.14546120e-01 -9.19223547e-01 ... -2.95618105e+00 -1.49239123e+00 1.50972843e+00] ... [-6.69961989e-01 3.30437392e-01 -4.77365553e-02 ... 6.93242669e-01 6.54887676e-01 -6.30583048e-01] [ 1.16492569e+00 -2.25940371e+00 2.15348363e+00 ... -9.95376229e-01 -1.13770235e+00 9.84696746e-01] [-3.52023542e-01 5.71056604e-01 -2.74637133e-01 ... 6.90368056e-01 2.61674315e-01 3.46678525e-01]] [[ 2.66103125e+00 -1.92748451e+00 -2.24784803e+00 ... 2.51835299e+00 3.19416940e-01 -4.92228866e-01] [ 5.47705412e-01 2.57141066e+00 -1.83510876e+00 ... -1.96665883e+00 -1.84756160e+00 -2.10802031e+00] [-1.23060417e+00 -7.27822876e+00 1.76435637e+00 ... -4.12493181e+00 -6.40729427e+00 -5.85880876e-01] ... [-3.50958157e+00 -8.97248089e-01 -2.34350228e+00 ... -2.24745393e+00 2.26198339e+00 -5.37354946e-01] [ 1.84128451e+00 -1.78900555e-01 -2.18781328e+00 ... -4.22377157e+00 -4.40105867e+00 5.93815088e-01] [ 1.62397194e+00 1.20606661e+00 -7.49037147e-01 ... 2.79826641e-01 1.10056782e+00 1.79886866e+00]] [[-8.46679449e-01 -8.54794621e-01 2.61524379e-01 ... 2.45588094e-01 -1.89956808e+00 -4.68865156e-01] [-3.19614840e+00 8.27980280e-01 2.73523331e+00 ... -4.46213722e+00 -3.43717313e+00 -8.24282467e-01] [ 7.03825951e-01 1.25350118e-01 1.94264114e-01 ... -4.24157083e-01 -5.42309284e-01 -1.16911709e+00] ... [-1.07565367e+00 8.72014403e-01 -5.55561364e-01 ... 1.50518417e-01 5.09772396e+00 7.76622057e-01] [ 1.63267636e+00 -1.66747260e+00 -9.32681799e-01 ... 1.72545254e+00 -4.96361303e+00 -3.54722214e+00] [ 1.84775162e+00 -2.03485894e+00 -1.71404767e+00 ... 1.21419156e+00 -2.75911546e+00 2.23412323e+00]] ... [[ 1.17442060e+00 1.83109951e+00 2.24024820e+00 ... 1.29857802e+00 -1.24335051e+00 -1.25360584e+00] [-2.76070762e+00 -2.66730642e+00 -1.62133741e+00 ... -2.89289165e+00 4.73834181e+00 3.75510597e+00] [-2.78072953e-01 1.93415844e+00 -1.16551101e+00 ... -4.48294687e+00 4.11884165e+00 -7.76528001e-01] ... [ 5.48897386e-01 1.50705552e+00 -3.33075523e+00 ... -1.78265858e+00 8.95568132e-02 3.58295023e-01] [ 6.14425182e-01 -6.03390276e-01 -2.69696474e+00 ... -2.01980782e+00 -4.80509520e-01 -1.91630912e+00] [ 1.48990893e+00 -1.03078532e+00 2.02630341e-01 ... -3.24212861e+00 1.81480789e+00 6.18125916e-01]] [[-1.77487016e+00 -1.75334203e+00 2.73210716e+00 ... 3.65733743e+00 6.63835406e-02 2.00051117e+00] [-5.72146893e+00 -1.63267899e+00 -1.53442240e+00 ... -1.83470058e+00 4.35225248e+00 -1.41914105e+00] [ 1.55367148e+00 1.06938076e+00 -3.32559991e+00 ... 4.63477993e+00 2.50231934e+00 1.13163948e+00] ... [ 3.32159519e-01 5.98264122e+00 -9.66613412e-01 ... -3.34106445e-01 1.87595284e+00 -2.68984365e+00] [-1.00391650e+00 2.50726080e+00 9.90424395e-01 ... 3.17063951e+00 -3.25345111e+00 -4.12856102e+00] [-1.19097471e+00 -2.40958166e+00 2.21422100e+00 ... 3.07998061e+00 2.20467329e+00 -6.23551369e-01]] [[-7.73690522e-01 3.21715683e-01 4.36993062e-01 ... 2.61131436e-01 1.42657965e-01 1.08891571e+00] [ 1.10532296e+00 -1.38541460e+00 2.45141864e-01 ... -2.49110460e+00 3.35958004e-02 1.84300378e-01] [ 1.71115303e+00 2.17338037e+00 -2.43553162e+00 ... -1.48814797e-01 -2.38712144e+00 6.11359692e+00] ... [ 1.35347903e-01 -6.53994620e-01 -1.46505392e+00 ... -1.28251344e-01 1.44443774e+00 2.66405058e+00] [ 2.75558889e-01 -2.21304297e+00 3.51585329e-01 ... 3.15149188e-01 -2.53515959e+00 1.33084881e+00] [-1.41354346e+00 -6.11712933e-01 -1.06965375e+00 ... 1.91414165e+00 2.93790960e+00 9.64138508e-01]]]]]; ov_res: [[[[[ 1.17858365e-01 -4.81179386e-01 -4.67822030e-02 ... 7.77069271e-01 -4.77218896e-01 5.50208211e-01] [ 5.30461848e-01 -1.69273540e-01 -1.09262061e+00 ... 2.20646739e+00 -6.22719646e-01 2.68254828e+00] [ 1.11773515e+00 -2.90243566e-01 -9.45536494e-01 ... -1.88669991e+00 3.38906139e-01 4.43383545e-01] ... [-3.42210799e-01 -1.00867581e+00 3.56215268e-01 ... 2.01780868e+00 1.44475865e+00 1.51334211e-01] [-1.45087409e+00 -3.11604023e+00 1.95927131e+00 ... 2.65752459e+00 1.10225952e+00 1.12398779e+00] [-7.01606154e-01 4.93190706e-01 8.12551796e-01 ... 1.24476755e+00 -1.73787564e-01 6.61256850e-01]] [[ 1.38543916e+00 1.47012198e+00 -1.19231844e+00 ... -1.39949754e-01 1.60361850e+00 -1.08602536e+00] [-1.69888750e-01 -2.69138551e+00 -1.43930531e+00 ... -1.86345923e+00 -7.72312307e+00 -3.03348160e+00] [ 6.30319357e+00 -1.31540751e+00 -1.34959146e-01 ... -1.54035032e-01 -2.67176771e+00 2.93307275e-01] ... [ 1.40603340e+00 1.61915529e+00 -2.60166192e+00 ... -4.64600182e+00 -4.58180046e+00 3.88760066e+00] [ 2.00933385e+00 -2.05697036e+00 1.88068211e+00 ... -4.24910593e+00 2.18794298e+00 3.93843412e+00] [-3.05395865e+00 2.53866243e+00 -2.86359477e+00 ... 3.09988165e+00 4.17812395e+00 -2.92131090e+00]] [[-3.95514321e+00 -4.14739221e-01 3.49485350e+00 ... -3.07365179e+00 -8.98903787e-01 -8.42827916e-01] [-3.47016335e+00 7.09520769e+00 -1.48654115e+00 ... 3.24973732e-01 -3.50557256e+00 2.89624763e+00] [ 7.70500004e-01 4.15577203e-01 7.86869097e+00 ... -1.22724211e+00 7.65213013e-01 9.22816515e-01] ... [ 8.60633790e-01 -4.27710891e-01 -5.42078543e+00 ... 3.19648099e+00 -4.27101088e+00 -4.68745232e+00] [ 5.64658308e+00 -3.95985389e+00 -1.77618098e+00 ... -3.79721299e-02 -1.12267485e+01 -1.94021261e+00] [ 3.98787165e+00 -3.14057469e+00 2.63001609e+00 ... 1.73002577e+00 -3.68893355e-01 -1.92976952e-01]] ... [[ 8.52194071e-01 -1.20420299e-01 -9.79137719e-01 ... -2.19868302e+00 5.10123193e-01 4.61317444e+00] [ 9.34090018e-01 -5.15983677e+00 1.76228416e+00 ... -4.19289160e+00 -5.34754455e-01 8.89641419e-02] [-2.25002837e+00 -2.74425089e-01 8.93918991e-01 ... 1.73519993e+00 1.87520480e+00 2.48784280e+00] ... [-6.18280768e-01 1.14694262e+00 -1.62220433e-01 ... -8.44141424e-01 3.53814936e+00 3.29126143e+00] [ 1.79572773e+00 4.75926727e-01 9.22255695e-01 ... -1.29251349e+00 -7.00545931e+00 1.83425188e+00] [-1.63037467e+00 1.67559528e+00 -5.19997120e+00 ... -4.88586998e+00 4.83886898e-01 -9.38027859e-01]] [[-4.74626398e+00 -9.41717446e-01 -2.92371774e+00 ... 6.52460679e-02 3.04048133e+00 -2.82787156e+00] [-1.16534674e+00 8.32140207e-01 -4.81327868e+00 ... 2.08224226e-02 5.03751230e+00 2.41246510e+00] [-9.84353364e-01 -3.40995836e+00 -3.92596507e+00 ... -6.72047973e-01 -4.47679520e+00 -5.62557364e+00] ... [-2.74776387e+00 4.37424803e+00 1.70362604e+00 ... 1.55900633e+00 2.89737868e+00 -6.19426489e+00] [ 1.78976929e+00 -1.68559301e+00 3.40843511e+00 ... 2.03043365e+00 -5.96539319e-01 -1.20326304e+00] [-6.67121887e+00 -2.15269303e+00 -3.13242078e-01 ... -2.34598732e+00 5.13126945e+00 1.42445005e-02]] [[ 1.14881837e+00 -3.95380020e+00 1.87692463e+00 ... 2.66444981e-01 -7.57667601e-01 5.09748101e-01] [ 4.08528900e+00 6.30186844e+00 -4.09547758e+00 ... -4.95785999e+00 -1.42822528e+00 1.30422032e+00] [-1.76014864e+00 9.27046657e-01 1.20301783e+00 ... 4.63987398e+00 -2.84871650e+00 4.14909887e+00] ... [-1.06030619e+00 -2.34050918e+00 2.50511241e+00 ... 1.32704377e+00 -4.11944437e+00 8.77843440e-01] [-4.27799749e+00 -1.74099696e+00 -4.31523275e+00 ... 1.53552365e+00 1.00860262e+00 1.72832537e+00] [-2.60804510e+00 -1.41101766e+00 2.28919315e+00 ... -2.06912875e+00 3.32624984e+00 -1.23181045e+00]]] [[[-9.95909870e-01 1.30608883e-02 1.28988922e+00 ... -2.82850838e+00 -2.71372050e-01 -2.01702952e+00] [-1.43554580e+00 1.70101017e-01 -6.56077623e-01 ... 9.59823668e-01 1.47347534e+00 2.18282938e+00] [ 2.77155471e+00 -1.70645940e+00 -1.46558797e+00 ... -8.99852157e-01 -7.65290618e-01 -1.57996035e+00] ... [ 1.43742359e+00 1.53234994e+00 -2.91708618e-01 ... -4.78199631e-01 1.14828384e+00 -7.67142892e-01] [-2.50329280e+00 -1.33433044e+00 -2.38679433e+00 ... 2.24554136e-01 -7.13927671e-03 -2.13722134e+00] [ 5.12701929e-01 7.83368886e-01 -1.30926687e-02 ... 4.70989287e-01 1.63376793e-01 1.33644092e+00]] [[ 2.20339036e+00 1.87199473e+00 -1.52713001e+00 ... 2.64850879e+00 3.94072008e+00 3.38691425e+00] [-5.40493727e+00 1.38514221e+00 1.36169374e+00 ... -1.79992497e+00 -3.72113252e+00 -3.14837909e+00] [ 4.37912273e+00 -2.89245933e-01 1.08656085e+00 ... -4.36102676e+00 -2.32142854e+00 2.20001149e+00] ... [ 9.45371747e-01 1.83049524e+00 -3.13285470e+00 ... -1.08322442e+00 -2.76512456e+00 -2.51772571e+00] [ 3.59816456e+00 -3.02706361e+00 2.97026181e+00 ... -4.86942530e+00 -4.73186159e+00 4.86952066e+00] [ 9.18809652e-01 1.24084866e+00 -1.60723472e+00 ... 1.43047416e+00 6.55559659e-01 1.50987756e+00]] [[-1.56616473e+00 -2.52029562e+00 4.49356508e+00 ... -1.80330181e+00 2.57364178e+00 -3.61219001e+00] [-4.35684204e+00 3.83952665e+00 -9.47453976e-01 ... -3.20282847e-01 -2.28916693e+00 4.23672348e-01] [-3.01517863e-02 -3.52171612e+00 9.58227634e+00 ... -3.84632778e+00 -4.22554195e-01 1.62432945e+00] ... [-4.23535681e+00 1.22810352e+00 -1.44681251e+00 ... 7.62780726e-01 3.24207354e+00 6.03333294e-01] [ 6.00442588e-01 8.38527322e-01 -2.97857976e+00 ... -1.40205765e+00 -8.63836193e+00 -1.04471564e+00] [ 7.53973365e-01 -2.21794772e+00 -1.72834635e+00 ... 1.80564451e+00 -4.42891449e-01 2.84096766e+00]] ... [[-3.80998731e-01 1.92146671e+00 -2.74731207e+00 ... -1.28579950e+00 4.27570045e-01 2.52979612e+00] [-2.71432352e+00 -5.48297358e+00 -3.73738551e+00 ... -5.29890251e+00 -2.15031695e+00 -4.43848276e+00] [-1.29422808e+00 2.45895028e+00 -6.83228970e-01 ... -1.88797438e+00 7.18717289e+00 9.20349956e-01] ... [ 5.34544170e-01 -4.95820940e-01 -5.75060463e+00 ... -6.43898726e+00 6.26260853e+00 1.68487775e+00] [-2.15279531e+00 -1.34287274e+00 1.91545904e-01 ... 1.14511514e+00 2.03821993e+00 1.53994143e+00] [ 1.21350026e+00 -1.05078435e+00 -2.78952181e-01 ... -2.31030369e+00 1.70141995e+00 -6.53022885e-01]] [[-1.07670359e-01 4.29000378e-01 3.00658047e-01 ... -1.42493770e-01 5.22890627e-01 -4.92867279e+00] [-1.82306051e+00 -2.60970497e+00 -1.07562041e+00 ... 1.07316494e+00 6.07278109e+00 2.42463350e+00] [ 1.58548439e+00 7.79475346e-02 -6.30131817e+00 ... 4.21714354e+00 1.43588305e+00 -2.12248302e+00] ... [-5.19598198e+00 3.88381553e+00 -3.64485335e+00 ... 5.84603608e-01 2.84700179e+00 -3.30457473e+00] [ 4.60041094e+00 1.71076632e+00 5.47795630e+00 ... 3.92629743e+00 -5.28868961e+00 -3.52565861e+00] [-3.22730041e+00 -2.18893433e+00 3.76234472e-01 ... 1.81899536e+00 3.53606248e+00 -6.09884024e-01]] [[-1.30579245e+00 -4.35983706e+00 3.06803656e+00 ... 3.58581924e+00 -1.74527869e-01 2.88656783e+00] [ 2.33197546e+00 3.52871132e+00 -3.32712561e-01 ... -4.92770147e+00 -6.06075644e-01 1.68728292e+00] [-2.99723625e-01 4.39990759e+00 -7.72578955e-01 ... 3.98597860e+00 -1.98027837e+00 7.02076149e+00] ... [ 3.50909829e+00 4.00985181e-01 7.08188057e-01 ... 7.52917647e-01 -5.41731656e-01 2.54807281e+00] [ 8.63847315e-01 -7.47833669e-01 -2.62915850e+00 ... 2.82648110e+00 -2.93811560e+00 2.78264403e+00] [-2.29993653e+00 -1.19661605e+00 7.08995163e-02 ... 5.20069063e-01 3.00262690e+00 -3.71618830e-02]]] [[[ 9.45975065e-01 -1.19651556e-01 2.69643813e-01 ... 2.94277501e+00 1.32165539e+00 8.33353639e-01] [-1.28893244e+00 1.49787986e+00 2.75793463e-01 ... 1.59671211e+00 9.35421109e-01 6.23292089e-01] [ 8.22039962e-01 -6.14546180e-01 -9.19223547e-01 ... -2.95618105e+00 -1.49239123e+00 1.50972855e+00] ... [-6.69961989e-01 3.30437422e-01 -4.77365442e-02 ... 6.93242729e-01 6.54887617e-01 -6.30583107e-01] [ 1.16492569e+00 -2.25940347e+00 2.15348363e+00 ... -9.95376229e-01 -1.13770235e+00 9.84696746e-01] [-3.52023542e-01 5.71056604e-01 -2.74637133e-01 ... 6.90368056e-01 2.61674315e-01 3.46678525e-01]] [[ 2.66103125e+00 -1.92748451e+00 -2.24784803e+00 ... 2.51835299e+00 3.19416970e-01 -4.92228895e-01] [ 5.47705412e-01 2.57141066e+00 -1.83510876e+00 ... -1.96665871e+00 -1.84756136e+00 -2.10802031e+00] [-1.23060405e+00 -7.27822828e+00 1.76435649e+00 ... -4.12493181e+00 -6.40729380e+00 -5.85880816e-01] ... [-3.50958133e+00 -8.97247970e-01 -2.34350228e+00 ... -2.24745369e+00 2.26198363e+00 -5.37354887e-01] [ 1.84128451e+00 -1.78900555e-01 -2.18781352e+00 ... -4.22377157e+00 -4.40105867e+00 5.93815088e-01] [ 1.62397206e+00 1.20606661e+00 -7.49037206e-01 ... 2.79826671e-01 1.10056794e+00 1.79886854e+00]] [[-8.46679449e-01 -8.54794681e-01 2.61524379e-01 ... 2.45588109e-01 -1.89956808e+00 -4.68865067e-01] [-3.19614792e+00 8.27980220e-01 2.73523331e+00 ... -4.46213675e+00 -3.43717313e+00 -8.24282348e-01] [ 7.03825891e-01 1.25349954e-01 1.94264054e-01 ... -4.24156994e-01 -5.42309284e-01 -1.16911709e+00] ... [-1.07565355e+00 8.72014403e-01 -5.55561364e-01 ... 1.50518417e-01 5.09772396e+00 7.76621997e-01] [ 1.63267648e+00 -1.66747272e+00 -9.32681978e-01 ... 1.72545242e+00 -4.96361303e+00 -3.54722166e+00] [ 1.84775162e+00 -2.03485918e+00 -1.71404779e+00 ... 1.21419156e+00 -2.75911546e+00 2.23412323e+00]] ... [[ 1.17442060e+00 1.83109951e+00 2.24024820e+00 ... 1.29857814e+00 -1.24335063e+00 -1.25360584e+00] [-2.76070762e+00 -2.66730666e+00 -1.62133765e+00 ... -2.89289188e+00 4.73834229e+00 3.75510573e+00] [-2.78072834e-01 1.93415844e+00 -1.16551089e+00 ... -4.48294640e+00 4.11884165e+00 -7.76528060e-01] ... [ 5.48897445e-01 1.50705552e+00 -3.33075523e+00 ... -1.78265846e+00 8.95568803e-02 3.58295023e-01] [ 6.14425063e-01 -6.03390276e-01 -2.69696474e+00 ... -2.01980782e+00 -4.80509579e-01 -1.91630912e+00] [ 1.48990905e+00 -1.03078532e+00 2.02630430e-01 ... -3.24212861e+00 1.81480801e+00 6.18125916e-01]] [[-1.77487016e+00 -1.75334203e+00 2.73210716e+00 ... 3.65733743e+00 6.63835630e-02 2.00051117e+00] [-5.72146845e+00 -1.63267887e+00 -1.53442252e+00 ... -1.83470058e+00 4.35225201e+00 -1.41914117e+00] [ 1.55367136e+00 1.06938088e+00 -3.32560015e+00 ... 4.63477993e+00 2.50231934e+00 1.13163948e+00] ... [ 3.32159638e-01 5.98264074e+00 -9.66613352e-01 ... -3.34106475e-01 1.87595260e+00 -2.68984342e+00] [-1.00391662e+00 2.50726056e+00 9.90424335e-01 ... 3.17063928e+00 -3.25345135e+00 -4.12856150e+00] [-1.19097483e+00 -2.40958166e+00 2.21422100e+00 ... 3.07998061e+00 2.20467305e+00 -6.23551369e-01]] [[-7.73690522e-01 3.21715683e-01 4.36993062e-01 ... 2.61131436e-01 1.42657965e-01 1.08891571e+00] [ 1.10532296e+00 -1.38541424e+00 2.45141834e-01 ... -2.49110484e+00 3.35958116e-02 1.84300378e-01] [ 1.71115291e+00 2.17338037e+00 -2.43553162e+00 ... -1.48814768e-01 -2.38712168e+00 6.11359644e+00] ... [ 1.35347962e-01 -6.53994501e-01 -1.46505392e+00 ... -1.28251359e-01 1.44443762e+00 2.66405058e+00] [ 2.75558770e-01 -2.21304321e+00 3.51585358e-01 ... 3.15149188e-01 -2.53515959e+00 1.33084881e+00] [-1.41354346e+00 -6.11712933e-01 -1.06965375e+00 ... 1.91414165e+00 2.93790960e+00 9.64138508e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_900.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[-2.6516142e+00 -4.3301096e+00 1.3684772e+00 ... -3.5879016e+00 -1.0727316e+00 -3.9003322e+00] [ 4.0153241e+00 -1.1006821e+00 1.9219669e+00 ... 4.7306113e+00 -1.2414588e+00 2.1784887e+00] [-3.4820957e+00 4.2530417e+00 7.8972852e-01 ... -1.0050437e+00 7.8246337e-01 -1.3845506e+00] ... [ 2.1745837e+00 2.6426442e+00 3.2779086e+00 ... 5.8143315e+00 1.8450327e-01 1.3146909e+00] [-8.9231938e-01 -1.2797886e+00 1.3260236e+00 ... 3.1141908e+00 -1.7083523e-01 -3.7267935e+00] [ 2.1200399e+00 -3.9075849e+00 7.0703119e-01 ... 4.5365682e-01 -1.5751981e+00 2.0423437e-02]] [[ 3.0714593e+00 -5.8808455e+00 -1.7189759e-01 ... 1.7976873e+00 -3.2342305e+00 3.2654505e+00] [ 2.4749994e+00 -9.6527839e-01 -2.5808448e-01 ... 2.9661900e-01 -1.9517598e+00 6.1392915e-01] [ 1.3612903e+00 -3.0834665e+00 -5.5458260e-01 ... -3.6158392e+00 -1.7492037e+00 -7.4239963e-01] ... [ 3.0130837e+00 -4.9750967e+00 1.4607599e+00 ... -1.7822421e+00 7.6576807e-02 2.0235765e+00] [-6.9694489e-01 -1.9774791e+00 2.0132847e+00 ... 1.2104145e+00 -3.2066712e+00 -1.1496425e+00] [ 4.9462795e-01 -2.1804476e+00 -1.7269208e+00 ... -2.0390744e+00 1.8786001e+00 4.7263384e-01]] [[-2.0948939e+00 -5.0919032e+00 2.4126337e+00 ... 2.3072379e+00 -3.3691127e+00 -1.8500386e+00] [ 3.3785577e+00 6.0720863e+00 3.0880079e+00 ... -3.6744864e+00 -2.0996629e-01 1.5587236e+00] [-4.9425235e+00 -4.0914676e-01 -1.7991523e+00 ... -1.4522237e+00 -4.6641798e+00 -1.9091551e+00] ... [ 2.1348612e+00 2.5034881e+00 3.4869399e+00 ... -5.7468057e+00 -2.6546187e+00 -4.2204165e+00] [-2.8224728e+00 -1.2830976e+00 -5.0448484e+00 ... 2.3002388e+00 3.8292589e+00 -8.1317317e-01] [-1.2311931e+00 2.7721543e+00 -1.1044065e+00 ... -3.1338055e+00 2.2010174e+00 2.6179454e+00]] ... [[ 2.9629107e+00 6.0744019e+00 -2.3797531e+00 ... -1.3269422e+00 -4.2328548e+00 -5.5918312e+00] [-2.0797389e+00 1.6684841e+00 -1.4775790e+00 ... 3.4425232e+00 -2.1616952e+00 3.5913270e+00] [ 6.8618006e-01 -1.3203927e+00 5.6860787e-01 ... 1.7701457e-01 3.2237198e+00 -3.5778162e+00] ... [ 6.3862699e-01 -1.4888335e+00 1.9621742e+00 ... 5.1301283e-01 -4.4555988e+00 4.0822282e+00] [ 1.9378358e+00 -3.4743586e+00 2.3900216e+00 ... -4.0644064e+00 -3.1138361e+00 1.6118364e+00] [ 2.7125747e+00 1.9294994e+00 -1.5399361e+00 ... 8.1851238e-01 3.7458763e+00 3.7285864e+00]] [[-6.4854922e+00 -1.1522797e+00 -1.0474364e-01 ... 4.4714570e+00 1.0116181e+00 -9.9752039e-01] [ 4.0279561e-01 9.9726778e-01 -1.6476197e+00 ... -4.6378624e-01 -3.6801245e+00 8.0479360e-01] [ 1.9263428e+00 -1.4490936e+00 -1.1649594e+00 ... -2.5879135e+00 1.2052089e+00 2.9211206e+00] ... [ 6.4968266e+00 -6.9779763e+00 -1.5369529e+00 ... -3.6055112e-01 -2.3837891e+00 -2.5414081e+00] [-1.3153559e+00 1.9209510e+00 -1.9211046e+00 ... 2.7902287e-01 -1.7714039e+00 1.5563499e+00] [ 4.2676134e+00 -2.7952003e-01 -3.1701989e+00 ... 3.0531108e+00 4.1774197e+00 -2.4845462e+00]] [[ 1.4284300e+00 5.5150442e+00 -2.4587074e-01 ... -2.0104349e+00 2.9047887e+00 3.6365564e+00] [-1.2389705e+00 -6.5467113e-01 -3.0397193e+00 ... -1.0828775e+00 -5.4570694e+00 3.5375130e+00] [-1.7057338e+00 1.8409890e+00 5.1420659e-01 ... -2.6593086e-01 3.5136979e+00 -1.4971503e+00] ... [-1.9387422e-02 7.7559367e-02 -6.8330067e-01 ... -2.5313375e+00 -4.7718067e+00 1.5047874e+00] [ 2.4929028e-03 -2.9764087e+00 -3.3281546e+00 ... 1.5011376e+00 4.5417166e+00 5.0284715e+00] [ 1.5974736e+00 2.4346387e+00 -2.9625022e+00 ... -1.0418018e+00 3.4945643e-01 -2.8454692e+00]]] [[[ 8.0964222e+00 -6.8154526e+00 -9.1706878e-01 ... 4.6041597e-02 9.1318406e-02 -1.8277967e+00] [ 1.6372455e+00 3.4533165e+00 -3.3197808e+00 ... 2.1586707e-01 -2.9207089e+00 2.9491997e+00] [ 2.4647582e+00 -1.0131117e+00 3.3526258e+00 ... -2.5018520e+00 -3.7316475e+00 -1.0581647e+00] ... [ 1.5738076e+00 -5.1726403e+00 2.1291766e+00 ... 6.2886739e+00 1.0426061e+00 -2.0701606e+00] [ 8.5548484e-01 1.2457706e+00 1.2375274e+00 ... -9.3974739e-01 7.3561144e-01 2.2896399e+00] [ 2.5744901e+00 -2.2157714e+00 4.3911180e+00 ... -2.6280103e+00 -5.3522577e+00 1.5284144e+00]] [[-2.9681685e+00 -1.2962667e+00 4.4380579e+00 ... -2.4143214e+00 3.2601088e-01 3.3195169e+00] [ 1.2590432e+00 -9.4111073e-01 -2.2771156e+00 ... 6.4103155e+00 1.7084919e+00 -2.6648254e+00] [-5.5577798e+00 3.6903121e+00 -1.3353071e+00 ... -1.1140268e+00 -3.5880404e+00 -5.3433138e-01] ... [ 2.8984962e+00 2.3498654e+00 1.9938200e+00 ... -1.8264591e+00 -6.4368802e-01 1.5522915e+00] [ 1.9401031e+00 1.0621350e-01 1.2093948e+00 ... 4.8757276e-01 -3.4837747e+00 7.9805893e-01] [-5.3861032e+00 -9.4680119e-01 -3.2566907e+00 ... 3.8111553e-01 4.7163353e+00 2.5396907e-01]] [[-6.7374414e-01 -3.6660593e+00 -1.5452993e-01 ... -1.4514193e-01 1.9832240e+00 -6.6193290e-02] [-1.8922762e+00 4.6105671e+00 -2.3144453e+00 ... -2.6732943e+00 -5.0244288e+00 -1.7689706e+00] [ 1.0983764e-01 -4.1003323e+00 -9.3746936e-01 ... 1.0878834e+00 1.0321031e+01 9.6360624e-01] ... [-9.1063166e+00 -2.6842916e+00 2.2455177e+00 ... -1.2765653e+00 -9.2385542e-01 5.6023021e+00] [-2.5594530e+00 -2.7744997e+00 8.9142650e-01 ... -2.5979230e-02 1.0689040e+00 9.3929458e-01] [ 3.3105912e+00 -5.7824879e+00 3.1580265e+00 ... -6.5250117e-01 -1.7023814e+00 4.2955484e+00]] ... [[-6.8563781e+00 1.6598293e-01 2.9921765e+00 ... 1.0937661e+00 -8.0495453e+00 -3.5261240e+00] [ 3.3967510e-01 -5.4078693e+00 -1.3583943e-01 ... 3.9047377e+00 3.5033484e+00 -5.9820896e-01] [-6.6556678e+00 -2.3799496e+00 -5.0589776e-01 ... -1.0662925e-01 1.1259644e+00 -4.0866213e+00] ... [ 5.1499133e+00 -7.6383309e+00 -2.8132432e+00 ... -1.6137865e+00 -3.1912851e+00 -7.7778769e-01] [ 5.7420840e+00 3.0172255e+00 -4.7432804e+00 ... 1.4022943e+00 -2.2479000e+00 -1.7015628e+00] [-2.5719931e+00 2.6412175e+00 -2.8100533e+00 ... 4.6223550e+00 2.5165110e+00 -3.5667722e+00]] [[ 3.4543068e+00 3.4268870e+00 -2.3488586e+00 ... 6.9062281e+00 5.0208515e-01 2.5365136e+00] [-1.7226788e+00 1.2522483e+00 3.1046474e-01 ... -3.7833135e+00 -1.0711510e+00 7.8215108e+00] [-2.0970725e-01 3.0261016e+00 8.4434948e+00 ... -3.9558291e+00 2.8845778e+00 3.7709665e+00] ... [ 2.1900151e+00 2.2882085e+00 -2.4950187e+00 ... 2.5770533e+00 -6.4434366e+00 8.8290030e-01] [-5.4226108e+00 4.0175954e-01 -1.4501618e+00 ... -2.9885988e+00 4.2708139e+00 1.0050281e+00] [ 1.8900257e+00 1.7671771e+00 -1.8814787e+00 ... -3.4034069e+00 1.4524986e+00 -1.6760749e+00]] [[ 6.7017668e-01 1.8565274e+00 -5.7864299e+00 ... -1.9292885e+00 -2.1466284e+00 5.9159279e+00] [ 3.0916705e+00 -2.6131055e+00 -2.6076753e+00 ... 1.0455501e+00 -3.0077312e+00 -3.2420259e+00] [ 3.0815580e+00 -6.2519264e+00 2.0937564e+00 ... 3.4718725e-01 5.8735800e+00 -5.0872059e+00] ... [-8.0640936e-01 -1.7217903e+00 3.1011274e+00 ... -5.2147923e+00 2.6079807e+00 2.8856319e-01] [-1.3697003e+00 -4.2340622e+00 2.5437289e-01 ... 4.2744455e-01 -1.1742786e+00 1.3976170e+00] [-6.4666986e+00 3.6558146e+00 -1.9758986e+00 ... 3.5769410e+00 1.4224169e+00 3.8296206e+00]]] [[[-4.6225518e-01 -4.6693168e+00 3.4389739e+00 ... -5.7661762e+00 -8.0671877e-01 -3.4917731e+00] [-1.0373266e+00 2.8835043e-01 -2.5909393e+00 ... 3.0008390e+00 -4.1206164e+00 1.6734008e+00] [-6.6973025e-01 1.3364815e+00 4.3641424e+00 ... -5.6434317e+00 -3.7530596e+00 1.7589287e-01] ... [-1.3789973e+00 -2.2299151e+00 -2.8890738e-01 ... 1.9901549e+00 5.7071102e-01 -5.2414384e+00] [ 1.2044688e+00 1.8904012e-01 2.5882638e+00 ... 2.5817373e+00 1.4482269e+00 1.8412584e+00] [ 3.5252535e-01 -8.4799260e-01 1.9035517e+00 ... 2.6131563e+00 -5.3976688e+00 2.5916467e+00]] [[ 4.1630311e+00 -4.5106921e+00 3.5754964e+00 ... -1.4222878e+00 -9.3931854e-02 4.5520496e+00] [ 4.6895475e+00 -3.7023926e+00 -4.2556953e+00 ... 7.7998252e+00 -9.6244192e-01 -1.6582483e+00] [-2.4036918e+00 -1.9834870e-01 -2.0739315e+00 ... -4.7923484e+00 -3.5665269e+00 2.0911148e+00] ... [ 2.7947612e+00 -5.0469905e-01 2.9684370e+00 ... 3.1039829e+00 -1.5444260e+00 -1.8886955e+00] [ 2.9237847e+00 -5.0873203e+00 2.4623201e+00 ... -3.1840377e+00 -8.3406305e-01 -1.4338418e+00] [-1.4510707e+00 -3.1859167e+00 -2.8601246e+00 ... -1.8867643e+00 -2.4107759e-01 3.6839986e+00]] [[-2.1804879e+00 -7.1033621e+00 4.5160580e+00 ... 2.1796148e+00 1.9738984e+00 3.7693655e+00] [-2.2890624e-01 1.0715485e+00 -2.5681627e+00 ... 1.5037907e+00 -6.7734523e+00 -4.0117488e+00] [ 1.8807939e+00 1.8010077e-01 3.3370705e+00 ... -1.1226629e+00 6.2425132e+00 1.4868309e+00] ... [-2.3470213e-01 -5.4863501e-01 1.2183298e+00 ... -3.4939799e+00 -2.6969113e+00 1.5695157e+00] [-3.3042212e+00 1.2693778e+00 2.7553305e-01 ... 1.4268221e+00 1.0458632e+00 -2.1359131e+00] [ 3.8766924e-01 -3.3789847e+00 -6.5885097e-01 ... -1.4809543e+00 4.0145898e+00 1.3232985e+00]] ... [[-3.8378227e+00 4.2379246e+00 2.5235400e+00 ... -5.3921576e+00 -5.4501643e+00 -3.9528675e+00] [ 2.6437304e+00 4.2836833e+00 -2.2856240e+00 ... 2.2112248e+00 2.0446937e+00 -2.4597392e+00] [-5.1082072e+00 -1.6370429e+00 -1.0223440e+00 ... 5.5080738e+00 -2.4382713e+00 -4.0979142e+00] ... [-2.9715137e+00 -3.3450651e+00 -3.4778926e+00 ... -6.8320793e-01 8.6731702e-01 3.1084106e+00] [ 4.0645518e+00 6.9837475e-01 1.8756413e+00 ... -2.5022137e+00 -3.0063457e+00 1.6859572e+00] [-1.2773187e-01 3.0111132e+00 6.8722516e-01 ... 5.9374332e-01 3.9393394e-03 2.9369612e+00]] [[-1.0796475e+00 3.0360336e+00 -5.5769262e+00 ... 6.2865572e+00 -2.5053859e+00 -2.3598425e+00] [-7.7478561e+00 -3.6413972e+00 2.9617527e-01 ... -5.7913013e+00 2.6030290e+00 3.7734749e+00] [ 2.2008626e+00 1.6880519e+00 6.8779749e-01 ... -3.5825667e+00 -9.1587484e-02 1.1179976e+00] ... [ 8.2915182e+00 -4.8593836e+00 -1.0932676e+00 ... 3.4880590e+00 -9.3499403e+00 1.9281340e+00] [-4.2162385e+00 -8.5606515e-01 -4.0800123e+00 ... -2.4224988e-01 5.5188432e+00 -3.4455328e+00] [ 3.9474862e+00 4.3200083e+00 1.0846868e+00 ... 2.1904054e+00 3.1557610e+00 -6.9781262e-01]] [[-4.7113568e-01 3.0179508e+00 -2.1351578e+00 ... 2.3670418e+00 5.6466460e-01 4.7969885e+00] [ 1.9543089e+00 3.6430349e+00 1.2680680e-01 ... -4.3294716e+00 1.5372638e-01 3.4405744e+00] [-6.2967724e-01 -7.6403731e-01 3.8951373e+00 ... -5.3700500e+00 2.7164354e+00 -1.6040168e+00] ... [ 2.8561456e+00 -3.2006984e+00 -3.9167234e-01 ... -2.1851304e+00 -2.4316540e+00 -1.3825432e+00] [ 1.5813402e+00 -2.7866881e+00 -2.7098644e+00 ... 4.3237710e-01 -5.4047745e-01 3.5714669e+00] [-8.1213939e-01 1.7599576e+00 -4.8400869e+00 ... 1.1640502e+00 1.7362036e+00 1.7232905e-01]]]]]; ov_res: [[[[[-2.65161419e+00 -4.33010960e+00 1.36847723e+00 ... -3.58790159e+00 -1.07273161e+00 -3.90033221e+00] [ 4.01532412e+00 -1.10068226e+00 1.92196691e+00 ... 4.73061085e+00 -1.24145901e+00 2.17848873e+00] [-3.48209548e+00 4.25304174e+00 7.89728522e-01 ... -1.00504398e+00 7.82463610e-01 -1.38455057e+00] ... [ 2.17458344e+00 2.64264417e+00 3.27790809e+00 ... 5.81433201e+00 1.84503347e-01 1.31469083e+00] [-8.92319381e-01 -1.27978837e+00 1.32602358e+00 ... 3.11419129e+00 -1.70835108e-01 -3.72679353e+00] [ 2.12004018e+00 -3.90758491e+00 7.07031190e-01 ... 4.53656822e-01 -1.57519782e+00 2.04234961e-02]] [[ 3.07145929e+00 -5.88084602e+00 -1.71897590e-01 ... 1.79768729e+00 -3.23423052e+00 3.26545048e+00] [ 2.47499967e+00 -9.65278447e-01 -2.58084595e-01 ... 2.96619028e-01 -1.95175982e+00 6.13929272e-01] [ 1.36129034e+00 -3.08346653e+00 -5.54582715e-01 ... -3.61583924e+00 -1.74920392e+00 -7.42399633e-01] ... [ 3.01308370e+00 -4.97509670e+00 1.46075988e+00 ... -1.78224242e+00 7.65767917e-02 2.02357650e+00] [-6.96944773e-01 -1.97747934e+00 2.01328444e+00 ... 1.21041453e+00 -3.20667124e+00 -1.14964247e+00] [ 4.94627833e-01 -2.18044758e+00 -1.72692084e+00 ... -2.03907466e+00 1.87860000e+00 4.72633958e-01]] [[-2.09489369e+00 -5.09190369e+00 2.41263342e+00 ... 2.30723786e+00 -3.36911249e+00 -1.85003889e+00] [ 3.37855768e+00 6.07208586e+00 3.08800817e+00 ... -3.67448640e+00 -2.09966287e-01 1.55872357e+00] [-4.94252396e+00 -4.09146875e-01 -1.79915202e+00 ... -1.45222366e+00 -4.66417980e+00 -1.90915525e+00] ... [ 2.13486147e+00 2.50348806e+00 3.48693967e+00 ... -5.74680567e+00 -2.65461874e+00 -4.22041655e+00] [-2.82247281e+00 -1.28309774e+00 -5.04484844e+00 ... 2.30023885e+00 3.82925892e+00 -8.13173234e-01] [-1.23119307e+00 2.77215433e+00 -1.10440660e+00 ... -3.13380527e+00 2.20101762e+00 2.61794543e+00]] ... [[ 2.96291065e+00 6.07440186e+00 -2.37975311e+00 ... -1.32694221e+00 -4.23285484e+00 -5.59183121e+00] [-2.07973886e+00 1.66848409e+00 -1.47757900e+00 ... 3.44252276e+00 -2.16169524e+00 3.59132695e+00] [ 6.86179996e-01 -1.32039273e+00 5.68607926e-01 ... 1.77014574e-01 3.22371984e+00 -3.57781625e+00] ... [ 6.38626933e-01 -1.48883355e+00 1.96217418e+00 ... 5.13012886e-01 -4.45559883e+00 4.08222818e+00] [ 1.93783557e+00 -3.47435856e+00 2.39002156e+00 ... -4.06440639e+00 -3.11383605e+00 1.61183667e+00] [ 2.71257472e+00 1.92949939e+00 -1.53993607e+00 ... 8.18512380e-01 3.74587631e+00 3.72858644e+00]] [[-6.48549223e+00 -1.15227985e+00 -1.04743637e-01 ... 4.47145700e+00 1.01161826e+00 -9.97520566e-01] [ 4.02795494e-01 9.97267902e-01 -1.64761984e+00 ... -4.63786334e-01 -3.68012476e+00 8.04793715e-01] [ 1.92634273e+00 -1.44909382e+00 -1.16495955e+00 ... -2.58791327e+00 1.20520878e+00 2.92112064e+00] ... [ 6.49682617e+00 -6.97797585e+00 -1.53695261e+00 ... -3.60551059e-01 -2.38378906e+00 -2.54140806e+00] [-1.31535602e+00 1.92095125e+00 -1.92110455e+00 ... 2.79022872e-01 -1.77140403e+00 1.55634987e+00] [ 4.26761389e+00 -2.79520035e-01 -3.17019892e+00 ... 3.05311108e+00 4.17741966e+00 -2.48454642e+00]] [[ 1.42842984e+00 5.51504469e+00 -2.45870739e-01 ... -2.01043510e+00 2.90478897e+00 3.63655686e+00] [-1.23897052e+00 -6.54671133e-01 -3.03971934e+00 ... -1.08287752e+00 -5.45706987e+00 3.53751326e+00] [-1.70573378e+00 1.84098911e+00 5.14207065e-01 ... -2.65930980e-01 3.51369786e+00 -1.49715030e+00] ... [-1.93873625e-02 7.75593668e-02 -6.83300912e-01 ... -2.53133750e+00 -4.77180672e+00 1.50478745e+00] [ 2.49314122e-03 -2.97640872e+00 -3.32815456e+00 ... 1.50113773e+00 4.54171658e+00 5.02847147e+00] [ 1.59747350e+00 2.43463874e+00 -2.96250248e+00 ... -1.04180169e+00 3.49456519e-01 -2.84546947e+00]]] [[[ 8.09642220e+00 -6.81545305e+00 -9.17068720e-01 ... 4.60417159e-02 9.13184062e-02 -1.82779670e+00] [ 1.63724554e+00 3.45331645e+00 -3.31978059e+00 ... 2.15866834e-01 -2.92070889e+00 2.94919944e+00] [ 2.46475816e+00 -1.01311183e+00 3.35262561e+00 ... -2.50185180e+00 -3.73164749e+00 -1.05816472e+00] ... [ 1.57380748e+00 -5.17264032e+00 2.12917662e+00 ... 6.28867388e+00 1.04260612e+00 -2.07016063e+00] [ 8.55484843e-01 1.24577081e+00 1.23752737e+00 ... -9.39747274e-01 7.35611498e-01 2.28963995e+00] [ 2.57449055e+00 -2.21577144e+00 4.39111853e+00 ... -2.62801027e+00 -5.35225725e+00 1.52841449e+00]] [[-2.96816826e+00 -1.29626667e+00 4.43805790e+00 ... -2.41432166e+00 3.26011002e-01 3.31951737e+00] [ 1.25904334e+00 -9.41110730e-01 -2.27711606e+00 ... 6.41031599e+00 1.70849204e+00 -2.66482520e+00] [-5.55777979e+00 3.69031215e+00 -1.33530724e+00 ... -1.11402667e+00 -3.58804059e+00 -5.34331322e-01] ... [ 2.89849615e+00 2.34986544e+00 1.99381995e+00 ... -1.82645893e+00 -6.43687963e-01 1.55229151e+00] [ 1.94010305e+00 1.06213443e-01 1.20939493e+00 ... 4.87573117e-01 -3.48377419e+00 7.98058748e-01] [-5.38610315e+00 -9.46800947e-01 -3.25669074e+00 ... 3.81115645e-01 4.71633530e+00 2.53968924e-01]] [[-6.73744142e-01 -3.66605902e+00 -1.54529810e-01 ... -1.45141751e-01 1.98322380e+00 -6.61930963e-02] [-1.89227629e+00 4.61056662e+00 -2.31444550e+00 ... -2.67329454e+00 -5.02442837e+00 -1.76897061e+00] [ 1.09837934e-01 -4.10033274e+00 -9.37469423e-01 ... 1.08788347e+00 1.03210297e+01 9.63606119e-01] ... [-9.10631657e+00 -2.68429160e+00 2.24551797e+00 ... -1.27656484e+00 -9.23855186e-01 5.60230207e+00] [-2.55945325e+00 -2.77449989e+00 8.91426563e-01 ... -2.59792600e-02 1.06890392e+00 9.39294636e-01] [ 3.31059122e+00 -5.78248835e+00 3.15802646e+00 ... -6.52501166e-01 -1.70238161e+00 4.29554844e+00]] ... [[-6.85637808e+00 1.65982932e-01 2.99217629e+00 ... 1.09376609e+00 -8.04954529e+00 -3.52612352e+00] [ 3.39675158e-01 -5.40786982e+00 -1.35839790e-01 ... 3.90473771e+00 3.50334859e+00 -5.98208904e-01] [-6.65566730e+00 -2.37994933e+00 -5.05897880e-01 ... -1.06629014e-01 1.12596464e+00 -4.08662081e+00] ... [ 5.14991379e+00 -7.63833094e+00 -2.81324315e+00 ... -1.61378646e+00 -3.19128513e+00 -7.77787805e-01] [ 5.74208403e+00 3.01722550e+00 -4.74328041e+00 ... 1.40229428e+00 -2.24790025e+00 -1.70156276e+00] [-2.57199311e+00 2.64121771e+00 -2.81005335e+00 ... 4.62235498e+00 2.51651096e+00 -3.56677222e+00]] [[ 3.45430684e+00 3.42688680e+00 -2.34885836e+00 ... 6.90622854e+00 5.02085030e-01 2.53651404e+00] [-1.72267878e+00 1.25224829e+00 3.10464859e-01 ... -3.78331399e+00 -1.07115102e+00 7.82151031e+00] [-2.09707215e-01 3.02610183e+00 8.44349480e+00 ... -3.95582914e+00 2.88457751e+00 3.77096629e+00] ... [ 2.19001508e+00 2.28820801e+00 -2.49501872e+00 ... 2.57705307e+00 -6.44343710e+00 8.82900238e-01] [-5.42261076e+00 4.01759624e-01 -1.45016170e+00 ... -2.98859835e+00 4.27081442e+00 1.00502813e+00] [ 1.89002573e+00 1.76717699e+00 -1.88147855e+00 ... -3.40340686e+00 1.45249879e+00 -1.67607462e+00]] [[ 6.70176446e-01 1.85652697e+00 -5.78643036e+00 ... -1.92928839e+00 -2.14662838e+00 5.91592789e+00] [ 3.09167027e+00 -2.61310554e+00 -2.60767531e+00 ... 1.04555011e+00 -3.00773120e+00 -3.24202561e+00] [ 3.08155823e+00 -6.25192547e+00 2.09375668e+00 ... 3.47186655e-01 5.87358046e+00 -5.08720541e+00] ... [-8.06409478e-01 -1.72179031e+00 3.10112786e+00 ... -5.21479273e+00 2.60798073e+00 2.88563013e-01] [-1.36970055e+00 -4.23406219e+00 2.54372805e-01 ... 4.27444637e-01 -1.17427874e+00 1.39761698e+00] [-6.46669817e+00 3.65581465e+00 -1.97589862e+00 ... 3.57694077e+00 1.42241669e+00 3.82962060e+00]]] [[[-4.62255061e-01 -4.66931725e+00 3.43897438e+00 ... -5.76617575e+00 -8.06718767e-01 -3.49177313e+00] [-1.03732646e+00 2.88350403e-01 -2.59093928e+00 ... 3.00083923e+00 -4.12061644e+00 1.67340076e+00] [-6.69730365e-01 1.33648145e+00 4.36414242e+00 ... -5.64343166e+00 -3.75305963e+00 1.75892815e-01] ... [-1.37899733e+00 -2.22991538e+00 -2.88907260e-01 ... 1.99015486e+00 5.70710897e-01 -5.24143934e+00] [ 1.20446885e+00 1.89040065e-01 2.58826423e+00 ... 2.58173704e+00 1.44822681e+00 1.84125829e+00] [ 3.52525204e-01 -8.47992599e-01 1.90355170e+00 ... 2.61315608e+00 -5.39766836e+00 2.59164691e+00]] [[ 4.16303062e+00 -4.51069164e+00 3.57549644e+00 ... -1.42228770e+00 -9.39318463e-02 4.55204964e+00] [ 4.68954706e+00 -3.70239282e+00 -4.25569630e+00 ... 7.79982519e+00 -9.62441921e-01 -1.65824783e+00] [-2.40369177e+00 -1.98348641e-01 -2.07393169e+00 ... -4.79234743e+00 -3.56652689e+00 2.09111500e+00] ... [ 2.79476118e+00 -5.04699051e-01 2.96843719e+00 ... 3.10398269e+00 -1.54442585e+00 -1.88869572e+00] [ 2.92378473e+00 -5.08732033e+00 2.46232033e+00 ... -3.18403769e+00 -8.34063053e-01 -1.43384182e+00] [-1.45107055e+00 -3.18591642e+00 -2.86012459e+00 ... -1.88676441e+00 -2.41077483e-01 3.68399906e+00]] [[-2.18048811e+00 -7.10336256e+00 4.51605701e+00 ... 2.17961454e+00 1.97389853e+00 3.76936555e+00] [-2.28906408e-01 1.07154846e+00 -2.56816292e+00 ... 1.50379097e+00 -6.77345228e+00 -4.01174879e+00] [ 1.88079393e+00 1.80100739e-01 3.33707023e+00 ... -1.12266278e+00 6.24251366e+00 1.48683131e+00] ... [-2.34702244e-01 -5.48634946e-01 1.21833014e+00 ... -3.49398065e+00 -2.69691133e+00 1.56951571e+00] [-3.30422115e+00 1.26937771e+00 2.75533050e-01 ... 1.42682219e+00 1.04586303e+00 -2.13591313e+00] [ 3.87669176e-01 -3.37898445e+00 -6.58850849e-01 ... -1.48095441e+00 4.01458979e+00 1.32329822e+00]] ... [[-3.83782244e+00 4.23792410e+00 2.52354002e+00 ... -5.39215755e+00 -5.45016527e+00 -3.95286703e+00] [ 2.64373064e+00 4.28368330e+00 -2.28562403e+00 ... 2.21122503e+00 2.04469371e+00 -2.45973921e+00] [-5.10820723e+00 -1.63704276e+00 -1.02234387e+00 ... 5.50807381e+00 -2.43827152e+00 -4.09791422e+00] ... [-2.97151399e+00 -3.34506536e+00 -3.47789192e+00 ... -6.83208168e-01 8.67317080e-01 3.10841060e+00] [ 4.06455183e+00 6.98374629e-01 1.87564135e+00 ... -2.50221372e+00 -3.00634599e+00 1.68595719e+00] [-1.27731830e-01 3.01111317e+00 6.87225044e-01 ... 5.93743384e-01 3.93927982e-03 2.93696117e+00]] [[-1.07964706e+00 3.03603363e+00 -5.57692623e+00 ... 6.28655720e+00 -2.50538540e+00 -2.35984230e+00] [-7.74785566e+00 -3.64139748e+00 2.96175361e-01 ... -5.79130173e+00 2.60302877e+00 3.77347493e+00] [ 2.20086265e+00 1.68805182e+00 6.87797368e-01 ... -3.58256721e+00 -9.15875584e-02 1.11799765e+00] ... [ 8.29151726e+00 -4.85938311e+00 -1.09326756e+00 ... 3.48805928e+00 -9.34994125e+00 1.92813385e+00] [-4.21623850e+00 -8.56065214e-01 -4.08001280e+00 ... -2.42249817e-01 5.51884317e+00 -3.44553185e+00] [ 3.94748569e+00 4.32000828e+00 1.08468652e+00 ... 2.19040537e+00 3.15576100e+00 -6.97812557e-01]] [[-4.71135557e-01 3.01795077e+00 -2.13515782e+00 ... 2.36704159e+00 5.64664543e-01 4.79698849e+00] [ 1.95430875e+00 3.64303541e+00 1.26806661e-01 ... -4.32947159e+00 1.53726295e-01 3.44057441e+00] [-6.29677117e-01 -7.64037430e-01 3.89513707e+00 ... -5.37004995e+00 2.71643543e+00 -1.60401690e+00] ... [ 2.85614538e+00 -3.20069838e+00 -3.91672373e-01 ... -2.18513083e+00 -2.43165398e+00 -1.38254333e+00] [ 1.58134019e+00 -2.78668809e+00 -2.70986438e+00 ... 4.32377040e-01 -5.40477455e-01 3.57146740e+00] [-8.12139332e-01 1.75995767e+00 -4.84008694e+00 ... 1.16404998e+00 1.73620343e+00 1.72329023e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_902.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.7177 (2,1,1,.,.) = -1.2755 (3,1,1,.,.) = -0.6522 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.53062031e-01 1.58317909e-02 1.56429410e-01 ... 5.99880796e-03 -1.01914741e-01 -8.34853128e-02] [ 1.68369138e+00 -8.35016012e-01 -1.94938600e+00 ... -7.58680031e-02 -1.51023734e+00 -1.04580855e+00] [-7.35100210e-01 -8.30538332e-01 -2.11169526e-01 ... 8.85628909e-02 6.08428001e-01 1.87041730e-01] ... [-3.28313977e-01 1.65890977e-01 -2.69477814e-01 ... -7.76814520e-01 9.63003710e-02 -7.69505426e-02] [ 4.90091667e-02 3.58722031e-01 -2.02892685e+00 ... 1.51472235e+00 -3.63577962e-01 5.03182337e-02] [ 8.68473351e-01 -7.12631106e-01 7.68689692e-01 ... 2.51427978e-01 6.76425517e-01 2.47478187e-01]] [[-1.85305044e-01 -3.79020512e-01 -1.27965724e+00 ... 1.01526670e-01 8.86826932e-01 5.35843432e-01] [-6.14403963e-01 -3.94746453e-01 1.64688453e-01 ... 1.24703646e+00 -2.87718624e-01 1.74626899e+00] [-3.24848086e-01 2.18612775e-01 9.47311670e-02 ... -6.12419367e-01 -5.43745220e-01 5.10322452e-01] ... [ 5.86114645e-01 -8.93992722e-01 1.05223012e+00 ... 1.13525331e+00 3.82289902e-04 -1.79995582e-01] [-5.96201479e-01 7.21127121e-03 -7.95548186e-02 ... -3.07626635e-01 8.81130636e-01 1.68775707e-01] [ 1.16458163e-01 -8.20997506e-02 3.38975132e-01 ... -4.25391734e-01 -7.72285879e-01 2.75277138e-01]] [[ 5.81636786e-01 -9.14519608e-01 2.74687052e-01 ... -2.54829884e-01 8.70307744e-01 -1.35122299e-01] [-8.27286541e-02 -4.78449136e-01 -3.01426053e-01 ... -4.90594566e-01 2.01065391e-02 -5.61607599e-01] [ 1.25465500e+00 -1.34234533e-01 -8.72224987e-01 ... 3.56000602e-01 -4.45626378e-01 7.69487470e-02] ... [-2.99068481e-01 -3.57563883e-01 -6.65243387e-01 ... 1.11281621e+00 -6.12892807e-01 1.23084463e-01] [-7.28180110e-01 -1.60135806e-01 7.45128214e-01 ... -8.78427088e-01 -9.20090735e-01 -9.59094226e-01] [-8.14482570e-01 1.06826758e+00 2.99548090e-01 ... -1.33595597e-02 -2.44015098e-01 5.68526924e-01]] ... [[ 2.84624815e-01 -1.70923918e-01 -1.39542532e+00 ... 2.86841821e-02 -9.92990315e-01 7.21617877e-01] [ 1.23335890e-01 -2.52231419e-01 1.37471929e-01 ... -8.90452385e-01 -1.27199328e+00 -1.14306495e-01] [-1.09888077e+00 -7.75471330e-01 -4.66555238e-01 ... 1.15284705e+00 6.43775463e-01 -2.07388783e+00] ... [ 1.99962795e-01 -1.03248656e+00 -1.44246531e+00 ... 9.06323083e-03 3.08616698e-01 -2.40999535e-01] [-1.15343380e+00 -9.47006047e-03 -6.41593277e-01 ... 5.16010523e-01 2.14536786e-01 -7.37483263e-01] [ 5.38234189e-02 -3.23746055e-01 -5.06312370e-01 ... 4.49206442e-01 9.80330527e-01 2.36441672e-01]] [[ 9.71758664e-01 4.88385037e-02 -7.53446817e-01 ... 2.98851877e-01 2.74953157e-01 8.48501682e-01] [-5.94329059e-01 -2.79873699e-01 -5.94767630e-01 ... -3.95502329e-01 4.08736140e-01 -7.01990664e-01] [ 1.22876957e-01 5.26999593e-01 1.45163119e+00 ... -4.18279886e-01 1.27438679e-01 2.39890367e-01] ... [-4.38177019e-01 -3.60078841e-01 -8.78932357e-01 ... -1.08823299e+00 4.18880969e-01 7.23790765e-01] [-1.73836425e-02 4.55525428e-01 -6.49276733e-01 ... -3.18518639e-01 -9.63719845e-01 -3.90950263e-01] [ 1.63279772e-01 1.93696833e+00 -1.25779259e+00 ... 3.79584491e-01 -1.14852130e+00 2.80875832e-01]] [[-2.64409214e-01 1.27735376e-01 9.89402533e-01 ... 5.72332978e-01 -2.19566703e-01 2.69688725e-01] [ 6.32774413e-01 5.44807196e-01 2.54699916e-01 ... 1.06645536e+00 -1.12841380e+00 -1.41206634e+00] [-1.18353784e+00 8.74715805e-01 9.40983415e-01 ... -1.18610427e-01 -1.37234044e+00 1.25229263e+00] ... [ 7.40885854e-01 4.15402018e-02 -7.58776605e-01 ... -6.81472242e-01 -2.22972155e-01 5.12173653e-01] [-3.81598502e-01 5.13570547e-01 -3.75107646e-01 ... -4.77368124e-02 5.71920156e-01 -6.65732503e-01] [-6.67476892e-01 9.73606467e-01 1.10859478e+00 ... -8.34002435e-01 2.65303791e-01 -6.32365882e-01]]] [[[-1.11465967e+00 -7.45366156e-01 1.41956067e+00 ... 1.22968698e+00 -1.37644184e+00 9.02504846e-02] [ 1.55656946e+00 1.77072227e+00 1.06674457e+00 ... 1.30894005e+00 -6.71188056e-01 -1.97490895e+00] [-3.14252526e-01 2.27490932e-01 -1.21054798e-01 ... -6.27080083e-01 2.44243884e+00 1.11908376e-01] ... [-1.62217283e+00 -2.22634840e+00 1.90732443e+00 ... 1.48038670e-01 1.66925275e+00 5.44400930e-01] [ 7.38453150e-01 6.19433463e-01 7.56737769e-01 ... -2.45180011e-01 -3.16219330e+00 8.32493454e-02] [ 2.13165903e+00 -3.49072784e-01 8.38903368e-01 ... 4.04552519e-01 -6.07731938e-01 7.06298277e-02]] [[ 9.93928373e-01 1.34964287e+00 -2.07968757e-01 ... 9.05230045e-01 6.18416607e-01 -1.92999649e+00] [-5.26482224e-01 -3.64136279e-01 -6.00947142e-01 ... 1.57972574e+00 -1.69077086e+00 1.33048534e+00] [-9.96401131e-01 1.07727572e-02 3.93076599e-01 ... -1.04662323e+00 1.53694725e+00 8.46309662e-01] ... [ 6.16531253e-01 7.21889377e-01 1.17687881e+00 ... -7.12060213e-01 9.82194617e-02 1.64246511e+00] [ 4.52134669e-01 1.61795449e-02 5.00618041e-01 ... -3.32759321e-01 4.43861187e-01 3.87036167e-02] [-4.59398270e-01 -1.43574989e+00 1.01329529e+00 ... -1.84506810e+00 2.67337821e-02 -6.50108516e-01]] [[-1.20400083e+00 -1.18603110e-01 -1.61492968e+00 ... -4.85996485e-01 -4.13283825e-01 -4.20507848e-01] [ 1.25861871e+00 7.60595500e-01 8.76084492e-02 ... -3.63834083e-01 -2.09483910e+00 5.73013425e-01] [-1.05669928e+00 -1.20365083e+00 -4.59129721e-01 ... 2.12365627e+00 -2.85769272e+00 -4.25732553e-01] ... [ 1.97446787e+00 1.33249924e-01 -1.40990806e+00 ... -7.55662024e-01 -7.83808053e-01 2.30981946e+00] [-1.59108901e+00 2.49273443e+00 9.50837791e-01 ... -1.15495780e-03 1.31875539e+00 2.03970957e+00] [ 9.38372672e-01 -1.77835608e+00 -3.85898381e-01 ... -1.42981172e+00 1.12904072e+00 -1.76323235e-01]] ... [[ 1.48769045e+00 -3.02217269e+00 -1.27247954e-02 ... 7.10890412e-01 -1.23452745e-01 2.16546923e-01] [ 1.23985219e+00 -1.14506996e+00 2.19842583e-01 ... 2.14476728e+00 8.63653898e-01 1.85058677e+00] [ 2.77979517e+00 3.62937525e-02 1.44188237e+00 ... -1.02649558e+00 -2.75528073e-01 1.08045908e-02] ... [-1.20180273e+00 -1.28805041e+00 5.40724218e-01 ... 1.38834521e-01 -1.05173302e+00 -5.47998309e-01] [ 9.27287877e-01 -8.83804142e-01 -1.27450538e+00 ... -4.30509150e-01 -1.39771903e+00 1.40043235e+00] [-6.93569779e-01 -1.31535244e+00 -2.82085919e+00 ... 2.57698345e+00 3.38343680e-01 -1.29760313e+00]] [[ 1.55645752e+00 2.31559202e-01 1.52336195e-01 ... 7.35002398e-01 1.71130288e+00 -9.69983041e-01] [-1.86612725e-01 -2.04577637e+00 4.89372373e-01 ... 4.92941916e-01 -1.31500375e+00 -1.92815483e-01] [-1.18953419e+00 -8.81697237e-01 -7.73492396e-01 ... 4.11193341e-01 1.37312353e+00 7.18526125e-01] ... [-4.07418966e-01 -9.24095213e-01 3.23191375e-01 ... 8.09842825e-01 -2.09559059e+00 -3.80261362e-01] [ 5.70128918e-01 -3.09125423e-01 -1.16059631e-02 ... 5.26461840e-01 1.61286145e-02 -2.77598286e+00] [ 2.67322928e-01 8.96987796e-01 -2.11075950e+00 ... -5.13011813e-02 3.19727004e-01 3.22219759e-01]] [[ 1.35190642e+00 2.30480403e-01 9.30089429e-02 ... 1.24161899e+00 -1.20338142e-01 1.27173591e+00] [-4.91796464e-01 5.79502583e-01 2.13093591e+00 ... 1.47450268e-01 -4.41169858e-01 -1.54827392e+00] [ 8.19092810e-01 1.35271525e+00 -1.28970012e-01 ... 9.32775158e-03 8.98895919e-01 -1.54178584e+00] ... [-8.30139041e-01 6.02692544e-01 -8.84728968e-01 ... -2.55071974e+00 -1.28117180e+00 -4.43952590e-01] [ 1.34887576e+00 3.38432789e-01 -1.16437542e+00 ... 1.57072890e+00 -5.00582494e-02 -2.83140033e-01] [-1.20714641e+00 7.62146235e-01 -8.34530056e-01 ... 1.25835729e+00 -2.15224102e-01 -1.50722766e+00]]] [[[ 1.12085605e+00 4.62704688e-01 -5.23403645e-01 ... -7.00118601e-01 5.95840514e-01 -5.99838555e-01] [ 1.03641677e+00 5.21676958e-01 4.89400119e-01 ... -8.03405941e-01 1.10814035e-01 -5.75440042e-02] [ 6.54426157e-01 5.20815194e-01 -1.13016039e-01 ... 4.20461148e-02 7.51910567e-01 5.05566478e-01] ... [ 4.12648283e-02 3.37118149e-01 -4.35773224e-01 ... -3.84863377e-01 -4.36576784e-01 -4.23110843e-01] [-6.01145804e-01 6.04537368e-01 -1.79151297e-01 ... 5.75326443e-01 4.02182490e-01 -1.77474156e-01] [-8.54833364e-01 1.10076316e-01 6.27474844e-01 ... 6.64565742e-01 -7.80814111e-01 5.78252912e-01]] [[-1.26282051e-01 1.01184607e-01 -1.08270094e-01 ... -6.79350972e-01 1.84036523e-01 3.40067655e-01] [-4.88521665e-01 -1.64089948e-01 4.67093498e-01 ... 5.08190729e-02 4.11095053e-01 1.74390003e-01] [ 1.84459150e-01 2.03893349e-01 7.22966552e-01 ... 8.07888567e-01 4.24798727e-01 -3.44280809e-01] ... [-5.96384048e-01 5.97880781e-01 -1.60836726e-01 ... 5.35691679e-01 1.48618326e-01 3.86011988e-01] [ 1.89872220e-01 -1.54333997e+00 -1.41304478e-01 ... 7.42682159e-01 -5.12775593e-02 -3.46120417e-01] [-1.25610459e+00 1.09224297e-01 1.66064158e-01 ... 4.05903608e-01 -2.03670546e-01 -3.00417155e-01]] [[ 6.39477193e-01 -1.24602944e-01 8.87072623e-01 ... 2.95480013e-01 -6.25774920e-01 1.56424865e-01] [ 2.56532311e-01 -6.34708405e-01 -4.43990678e-01 ... 1.16421473e+00 9.35715377e-01 6.80620372e-02] [-3.66196692e-01 9.13315058e-01 1.44719601e+00 ... -9.05777276e-01 9.44279879e-02 2.35297248e-01] ... [-5.67216039e-01 -7.06585824e-01 1.26531109e-01 ... -5.72752476e-01 1.13141894e-01 -2.21412495e-01] [-1.01398520e-01 -4.07880783e-01 -6.54542267e-01 ... 4.39464629e-01 2.68325031e-01 1.36064804e+00] [-9.20751750e-01 4.62710947e-01 -1.59022078e-01 ... 9.71721485e-02 -5.78603864e-01 -4.55323696e-01]] ... [[-9.12173152e-01 5.45995533e-01 -5.02788842e-01 ... -1.00928295e+00 -1.00789058e+00 3.70357215e-01] [-9.81619596e-01 -6.35439277e-01 1.91471010e-01 ... -5.94571352e-01 7.01818764e-01 -2.21632034e-01] [-2.63287336e-01 7.63213873e-01 -1.99599177e-01 ... 7.90977836e-01 5.45871317e-01 -1.92285955e-01] ... [-3.79903883e-01 6.13493383e-01 5.79178870e-01 ... -2.55101889e-01 6.58530593e-01 -2.88404316e-01] [-5.71619928e-01 -4.19687957e-01 -2.14096963e-01 ... 2.74062365e-01 -1.10870600e+00 8.09993222e-02] [ 5.38139045e-01 -2.04999462e-01 -6.42352700e-01 ... 6.91368401e-01 -1.29112971e+00 4.54659671e-01]] [[-5.88104665e-01 -3.54789793e-01 -4.96028095e-01 ... -5.77940702e-01 -6.58785045e-01 -2.30491176e-01] [ 3.21739972e-01 -4.64235365e-01 -5.32378137e-01 ... 4.18693870e-01 -6.40090048e-01 -4.51011032e-01] [-5.41298762e-02 -1.13169793e-02 2.97691375e-01 ... 3.72889996e-01 -7.79043376e-01 1.81591064e-01] ... [ 1.50490844e+00 -1.68962032e-01 5.66652119e-01 ... -8.47221792e-01 -1.05181110e+00 7.46058449e-02] [-3.58669311e-02 5.56252122e-01 2.69867063e-01 ... -7.08055049e-02 -7.58126304e-02 -4.41712171e-01] [ 3.11496675e-01 8.17058265e-01 5.03254652e-01 ... -2.81012774e-01 -3.05782288e-01 7.44775832e-01]] [[-1.38161510e-01 1.36122093e-01 -7.43190289e-01 ... -5.06781507e-03 -2.69384921e-01 -9.27915752e-01] [-8.68254006e-01 9.36060011e-01 1.69360757e-01 ... 3.46346438e-01 5.40851533e-01 -1.23750877e+00] [-8.82460117e-01 -2.57935792e-01 -4.13022041e-01 ... -5.71295857e-01 3.87398094e-01 -2.91430295e-01] ... [-1.63561928e+00 8.08238164e-02 -1.09615314e+00 ... 2.87715435e-01 -2.52100796e-01 -3.81788984e-02] [-3.43509138e-01 -9.44463015e-01 -4.84728664e-01 ... 1.30100417e+00 6.03700161e-01 -9.21082735e-01] [ 1.98593751e-01 -2.35544771e-01 3.89909968e-02 ... -1.25454867e+00 -2.00527925e-02 -4.23452407e-01]]]]]; ov_res: [[[[[-1.53062031e-01 1.58317909e-02 1.56429410e-01 ... 5.99880796e-03 -1.01914741e-01 -8.34853128e-02] [ 1.68369138e+00 -8.35016012e-01 -1.94938600e+00 ... -7.58680031e-02 -1.51023734e+00 -1.04580855e+00] [-7.35100210e-01 -8.30538332e-01 -2.11169526e-01 ... 8.85628909e-02 6.08428001e-01 1.87041730e-01] ... [-3.28313977e-01 1.65890977e-01 -2.69477814e-01 ... -7.76814520e-01 9.63003710e-02 -7.69505426e-02] [ 4.90091667e-02 3.58722031e-01 -2.02892685e+00 ... 1.51472235e+00 -3.63577962e-01 5.03182337e-02] [ 8.68473351e-01 -7.12631106e-01 7.68689692e-01 ... 2.51427978e-01 6.76425517e-01 2.47478187e-01]] [[-1.85305044e-01 -3.79020512e-01 -1.27965724e+00 ... 1.01526670e-01 8.86826932e-01 5.35843432e-01] [-6.14403963e-01 -3.94746453e-01 1.64688453e-01 ... 1.24703646e+00 -2.87718624e-01 1.74626899e+00] [-3.24848086e-01 2.18612775e-01 9.47311670e-02 ... -6.12419367e-01 -5.43745220e-01 5.10322452e-01] ... [ 5.86114645e-01 -8.93992722e-01 1.05223012e+00 ... 1.13525331e+00 3.82289902e-04 -1.79995582e-01] [-5.96201479e-01 7.21127121e-03 -7.95548186e-02 ... -3.07626635e-01 8.81130636e-01 1.68775707e-01] [ 1.16458163e-01 -8.20997506e-02 3.38975132e-01 ... -4.25391734e-01 -7.72285879e-01 2.75277138e-01]] [[ 5.81636786e-01 -9.14519608e-01 2.74687052e-01 ... -2.54829884e-01 8.70307744e-01 -1.35122299e-01] [-8.27286541e-02 -4.78449136e-01 -3.01426053e-01 ... -4.90594566e-01 2.01065391e-02 -5.61607599e-01] [ 1.25465500e+00 -1.34234533e-01 -8.72224987e-01 ... 3.56000602e-01 -4.45626378e-01 7.69487470e-02] ... [-2.99068481e-01 -3.57563883e-01 -6.65243387e-01 ... 1.11281621e+00 -6.12892807e-01 1.23084463e-01] [-7.28180110e-01 -1.60135806e-01 7.45128214e-01 ... -8.78427088e-01 -9.20090735e-01 -9.59094226e-01] [-8.14482570e-01 1.06826758e+00 2.99548090e-01 ... -1.33595597e-02 -2.44015098e-01 5.68526924e-01]] ... [[ 2.84624815e-01 -1.70923918e-01 -1.39542532e+00 ... 2.86841821e-02 -9.92990315e-01 7.21617877e-01] [ 1.23335890e-01 -2.52231419e-01 1.37471929e-01 ... -8.90452385e-01 -1.27199328e+00 -1.14306495e-01] [-1.09888077e+00 -7.75471330e-01 -4.66555238e-01 ... 1.15284705e+00 6.43775463e-01 -2.07388783e+00] ... [ 1.99962795e-01 -1.03248656e+00 -1.44246531e+00 ... 9.06323083e-03 3.08616698e-01 -2.40999535e-01] [-1.15343380e+00 -9.47006047e-03 -6.41593277e-01 ... 5.16010523e-01 2.14536786e-01 -7.37483263e-01] [ 5.38234189e-02 -3.23746055e-01 -5.06312370e-01 ... 4.49206442e-01 9.80330527e-01 2.36441672e-01]] [[ 9.71758664e-01 4.88385037e-02 -7.53446817e-01 ... 2.98851877e-01 2.74953157e-01 8.48501682e-01] [-5.94329059e-01 -2.79873699e-01 -5.94767630e-01 ... -3.95502329e-01 4.08736140e-01 -7.01990664e-01] [ 1.22876957e-01 5.26999593e-01 1.45163119e+00 ... -4.18279886e-01 1.27438679e-01 2.39890367e-01] ... [-4.38177019e-01 -3.60078841e-01 -8.78932357e-01 ... -1.08823299e+00 4.18880969e-01 7.23790765e-01] [-1.73836425e-02 4.55525428e-01 -6.49276733e-01 ... -3.18518639e-01 -9.63719845e-01 -3.90950263e-01] [ 1.63279772e-01 1.93696833e+00 -1.25779259e+00 ... 3.79584491e-01 -1.14852130e+00 2.80875832e-01]] [[-2.64409214e-01 1.27735376e-01 9.89402533e-01 ... 5.72332978e-01 -2.19566703e-01 2.69688725e-01] [ 6.32774413e-01 5.44807196e-01 2.54699916e-01 ... 1.06645536e+00 -1.12841380e+00 -1.41206634e+00] [-1.18353784e+00 8.74715805e-01 9.40983415e-01 ... -1.18610427e-01 -1.37234044e+00 1.25229263e+00] ... [ 7.40885854e-01 4.15402018e-02 -7.58776605e-01 ... -6.81472242e-01 -2.22972155e-01 5.12173653e-01] [-3.81598502e-01 5.13570547e-01 -3.75107646e-01 ... -4.77368124e-02 5.71920156e-01 -6.65732503e-01] [-6.67476892e-01 9.73606467e-01 1.10859478e+00 ... -8.34002435e-01 2.65303791e-01 -6.32365882e-01]]] [[[-1.11465967e+00 -7.45366156e-01 1.41956067e+00 ... 1.22968698e+00 -1.37644184e+00 9.02504846e-02] [ 1.55656946e+00 1.77072227e+00 1.06674457e+00 ... 1.30894005e+00 -6.71188056e-01 -1.97490895e+00] [-3.14252526e-01 2.27490932e-01 -1.21054798e-01 ... -6.27080083e-01 2.44243884e+00 1.11908376e-01] ... [-1.62217283e+00 -2.22634840e+00 1.90732443e+00 ... 1.48038670e-01 1.66925275e+00 5.44400930e-01] [ 7.38453150e-01 6.19433463e-01 7.56737769e-01 ... -2.45180011e-01 -3.16219330e+00 8.32493454e-02] [ 2.13165903e+00 -3.49072784e-01 8.38903368e-01 ... 4.04552519e-01 -6.07731938e-01 7.06298277e-02]] [[ 9.93928373e-01 1.34964287e+00 -2.07968757e-01 ... 9.05230045e-01 6.18416607e-01 -1.92999649e+00] [-5.26482224e-01 -3.64136279e-01 -6.00947142e-01 ... 1.57972574e+00 -1.69077086e+00 1.33048534e+00] [-9.96401131e-01 1.07727572e-02 3.93076599e-01 ... -1.04662323e+00 1.53694725e+00 8.46309662e-01] ... [ 6.16531253e-01 7.21889377e-01 1.17687881e+00 ... -7.12060213e-01 9.82194617e-02 1.64246511e+00] [ 4.52134669e-01 1.61795449e-02 5.00618041e-01 ... -3.32759321e-01 4.43861187e-01 3.87036167e-02] [-4.59398270e-01 -1.43574989e+00 1.01329529e+00 ... -1.84506810e+00 2.67337821e-02 -6.50108516e-01]] [[-1.20400083e+00 -1.18603110e-01 -1.61492968e+00 ... -4.85996485e-01 -4.13283825e-01 -4.20507848e-01] [ 1.25861871e+00 7.60595500e-01 8.76084492e-02 ... -3.63834083e-01 -2.09483910e+00 5.73013425e-01] [-1.05669928e+00 -1.20365083e+00 -4.59129721e-01 ... 2.12365627e+00 -2.85769272e+00 -4.25732553e-01] ... [ 1.97446787e+00 1.33249924e-01 -1.40990806e+00 ... -7.55662024e-01 -7.83808053e-01 2.30981946e+00] [-1.59108901e+00 2.49273443e+00 9.50837791e-01 ... -1.15495780e-03 1.31875539e+00 2.03970957e+00] [ 9.38372672e-01 -1.77835608e+00 -3.85898381e-01 ... -1.42981172e+00 1.12904072e+00 -1.76323235e-01]] ... [[ 1.48769045e+00 -3.02217269e+00 -1.27247954e-02 ... 7.10890412e-01 -1.23452745e-01 2.16546923e-01] [ 1.23985219e+00 -1.14506996e+00 2.19842583e-01 ... 2.14476728e+00 8.63653898e-01 1.85058677e+00] [ 2.77979517e+00 3.62937525e-02 1.44188237e+00 ... -1.02649558e+00 -2.75528073e-01 1.08045908e-02] ... [-1.20180273e+00 -1.28805041e+00 5.40724218e-01 ... 1.38834521e-01 -1.05173302e+00 -5.47998309e-01] [ 9.27287877e-01 -8.83804142e-01 -1.27450538e+00 ... -4.30509150e-01 -1.39771903e+00 1.40043235e+00] [-6.93569779e-01 -1.31535244e+00 -2.82085919e+00 ... 2.57698345e+00 3.38343680e-01 -1.29760313e+00]] [[ 1.55645752e+00 2.31559202e-01 1.52336195e-01 ... 7.35002398e-01 1.71130288e+00 -9.69983041e-01] [-1.86612725e-01 -2.04577637e+00 4.89372373e-01 ... 4.92941916e-01 -1.31500375e+00 -1.92815483e-01] [-1.18953419e+00 -8.81697237e-01 -7.73492396e-01 ... 4.11193341e-01 1.37312353e+00 7.18526125e-01] ... [-4.07418966e-01 -9.24095213e-01 3.23191375e-01 ... 8.09842825e-01 -2.09559059e+00 -3.80261362e-01] [ 5.70128918e-01 -3.09125423e-01 -1.16059631e-02 ... 5.26461840e-01 1.61286145e-02 -2.77598286e+00] [ 2.67322928e-01 8.96987796e-01 -2.11075950e+00 ... -5.13011813e-02 3.19727004e-01 3.22219759e-01]] [[ 1.35190642e+00 2.30480403e-01 9.30089429e-02 ... 1.24161899e+00 -1.20338142e-01 1.27173591e+00] [-4.91796464e-01 5.79502583e-01 2.13093591e+00 ... 1.47450268e-01 -4.41169858e-01 -1.54827392e+00] [ 8.19092810e-01 1.35271525e+00 -1.28970012e-01 ... 9.32775158e-03 8.98895919e-01 -1.54178584e+00] ... [-8.30139041e-01 6.02692544e-01 -8.84728968e-01 ... -2.55071974e+00 -1.28117180e+00 -4.43952590e-01] [ 1.34887576e+00 3.38432789e-01 -1.16437542e+00 ... 1.57072890e+00 -5.00582494e-02 -2.83140033e-01] [-1.20714641e+00 7.62146235e-01 -8.34530056e-01 ... 1.25835729e+00 -2.15224102e-01 -1.50722766e+00]]] [[[ 1.12085605e+00 4.62704688e-01 -5.23403645e-01 ... -7.00118601e-01 5.95840514e-01 -5.99838555e-01] [ 1.03641677e+00 5.21676958e-01 4.89400119e-01 ... -8.03405941e-01 1.10814035e-01 -5.75440042e-02] [ 6.54426157e-01 5.20815194e-01 -1.13016039e-01 ... 4.20461148e-02 7.51910567e-01 5.05566478e-01] ... [ 4.12648283e-02 3.37118149e-01 -4.35773224e-01 ... -3.84863377e-01 -4.36576784e-01 -4.23110843e-01] [-6.01145804e-01 6.04537368e-01 -1.79151297e-01 ... 5.75326443e-01 4.02182490e-01 -1.77474156e-01] [-8.54833364e-01 1.10076316e-01 6.27474844e-01 ... 6.64565742e-01 -7.80814111e-01 5.78252912e-01]] [[-1.26282051e-01 1.01184607e-01 -1.08270094e-01 ... -6.79350972e-01 1.84036523e-01 3.40067655e-01] [-4.88521665e-01 -1.64089948e-01 4.67093498e-01 ... 5.08190729e-02 4.11095053e-01 1.74390003e-01] [ 1.84459150e-01 2.03893349e-01 7.22966552e-01 ... 8.07888567e-01 4.24798727e-01 -3.44280809e-01] ... [-5.96384048e-01 5.97880781e-01 -1.60836726e-01 ... 5.35691679e-01 1.48618326e-01 3.86011988e-01] [ 1.89872220e-01 -1.54333997e+00 -1.41304478e-01 ... 7.42682159e-01 -5.12775593e-02 -3.46120417e-01] [-1.25610459e+00 1.09224297e-01 1.66064158e-01 ... 4.05903608e-01 -2.03670546e-01 -3.00417155e-01]] [[ 6.39477193e-01 -1.24602944e-01 8.87072623e-01 ... 2.95480013e-01 -6.25774920e-01 1.56424865e-01] [ 2.56532311e-01 -6.34708405e-01 -4.43990678e-01 ... 1.16421473e+00 9.35715377e-01 6.80620372e-02] [-3.66196692e-01 9.13315058e-01 1.44719601e+00 ... -9.05777276e-01 9.44279879e-02 2.35297248e-01] ... [-5.67216039e-01 -7.06585824e-01 1.26531109e-01 ... -5.72752476e-01 1.13141894e-01 -2.21412495e-01] [-1.01398520e-01 -4.07880783e-01 -6.54542267e-01 ... 4.39464629e-01 2.68325031e-01 1.36064804e+00] [-9.20751750e-01 4.62710947e-01 -1.59022078e-01 ... 9.71721485e-02 -5.78603864e-01 -4.55323696e-01]] ... [[-9.12173152e-01 5.45995533e-01 -5.02788842e-01 ... -1.00928295e+00 -1.00789058e+00 3.70357215e-01] [-9.81619596e-01 -6.35439277e-01 1.91471010e-01 ... -5.94571352e-01 7.01818764e-01 -2.21632034e-01] [-2.63287336e-01 7.63213873e-01 -1.99599177e-01 ... 7.90977836e-01 5.45871317e-01 -1.92285955e-01] ... [-3.79903883e-01 6.13493383e-01 5.79178870e-01 ... -2.55101889e-01 6.58530593e-01 -2.88404316e-01] [-5.71619928e-01 -4.19687957e-01 -2.14096963e-01 ... 2.74062365e-01 -1.10870600e+00 8.09993222e-02] [ 5.38139045e-01 -2.04999462e-01 -6.42352700e-01 ... 6.91368401e-01 -1.29112971e+00 4.54659671e-01]] [[-5.88104665e-01 -3.54789793e-01 -4.96028095e-01 ... -5.77940702e-01 -6.58785045e-01 -2.30491176e-01] [ 3.21739972e-01 -4.64235365e-01 -5.32378137e-01 ... 4.18693870e-01 -6.40090048e-01 -4.51011032e-01] [-5.41298762e-02 -1.13169793e-02 2.97691375e-01 ... 3.72889996e-01 -7.79043376e-01 1.81591064e-01] ... [ 1.50490844e+00 -1.68962032e-01 5.66652119e-01 ... -8.47221792e-01 -1.05181110e+00 7.46058449e-02] [-3.58669311e-02 5.56252122e-01 2.69867063e-01 ... -7.08055049e-02 -7.58126304e-02 -4.41712171e-01] [ 3.11496675e-01 8.17058265e-01 5.03254652e-01 ... -2.81012774e-01 -3.05782288e-01 7.44775832e-01]] [[-1.38161510e-01 1.36122093e-01 -7.43190289e-01 ... -5.06781507e-03 -2.69384921e-01 -9.27915752e-01] [-8.68254006e-01 9.36060011e-01 1.69360757e-01 ... 3.46346438e-01 5.40851533e-01 -1.23750877e+00] [-8.82460117e-01 -2.57935792e-01 -4.13022041e-01 ... -5.71295857e-01 3.87398094e-01 -2.91430295e-01] ... [-1.63561928e+00 8.08238164e-02 -1.09615314e+00 ... 2.87715435e-01 -2.52100796e-01 -3.81788984e-02] [-3.43509138e-01 -9.44463015e-01 -4.84728664e-01 ... 1.30100417e+00 6.03700161e-01 -9.21082735e-01] [ 1.98593751e-01 -2.35544771e-01 3.89909968e-02 ... -1.25454867e+00 -2.00527925e-02 -4.23452407e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_904.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.6834 (2,1,1,.,.) = -0.6048 (3,1,1,.,.) = -1.2246 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[-8.35360959e-02 -1.09544718e+00 -1.31558612e-01 ... -9.55187082e-01 9.89167809e-01 6.15758419e-01] [-1.48442698e+00 -9.11651134e-01 -6.11121356e-01 ... 1.13106990e+00 -4.91107374e-01 -2.03232199e-01] [ 9.38818678e-02 -5.49476027e-01 -2.57884443e-01 ... 5.68081260e-01 5.12291133e-01 8.14008892e-01] ... [-3.10371220e-01 -4.84239876e-01 -9.72485960e-01 ... 1.35701358e+00 -7.29031920e-01 1.06060040e+00] [ 5.68775594e-01 -3.39438736e-01 -1.25378275e+00 ... 7.89584160e-01 -1.01903844e+00 6.76867604e-01] [ 5.14630377e-01 8.47223401e-02 4.45192397e-01 ... 2.15270445e-02 -1.46624118e-01 -2.01704919e-01]] [[-1.49750912e+00 9.54487249e-02 -6.62604332e-01 ... 2.02924633e+00 8.15315366e-01 -1.22946147e-02] [-5.51310182e-01 6.33377850e-01 -1.74458015e+00 ... 1.11268066e-01 -3.15208733e-01 5.15113473e-01] [-1.33494115e+00 7.88329184e-01 -8.04582953e-01 ... 3.89781296e-01 5.91602862e-01 -3.14458609e-01] ... [ 3.97824824e-01 -1.01128504e-01 -6.37244821e-01 ... -1.17314589e+00 2.18378499e-01 3.46411437e-01] [ 7.02847838e-01 -6.33969530e-02 -1.10562181e+00 ... -3.92821044e-01 5.50121725e-01 -2.19222918e-01] [-4.06215250e-01 -3.13111782e-01 3.21560144e-01 ... 2.46554449e-01 3.31890583e-01 -8.38951766e-01]] [[-3.36589336e-01 1.62358414e-02 -1.07310688e+00 ... -8.36738408e-01 2.46148869e-01 -9.23737049e-01] [-7.60522127e-01 1.33566058e+00 9.96915281e-01 ... 6.13133311e-01 3.56904477e-01 -1.08087385e+00] [-1.25105614e-02 5.91236889e-01 7.92162538e-01 ... 6.90946460e-01 6.71339691e-01 -2.15724885e-01] ... [-4.68819827e-01 -3.85479443e-02 1.79889590e-01 ... -5.71372986e-01 6.92040920e-01 -1.09150028e+00] [ 9.24953938e-01 7.20405877e-01 3.35011721e-01 ... -2.31187075e-01 -1.90904409e-01 6.89440548e-01] [-1.40521324e+00 6.27579570e-01 7.52997875e-01 ... -5.49847782e-01 1.30645502e+00 -2.27671027e-01]] ... [[-4.43258196e-01 4.03950930e-01 -7.95864105e-01 ... -3.88532519e-01 -1.00913942e+00 5.48244655e-01] [-6.67117834e-02 -1.11053744e-02 7.09466696e-01 ... 4.32895541e-01 -2.24690184e-01 4.92015302e-01] [-4.71188307e-01 3.50725859e-01 -6.84899807e-01 ... -3.03280830e-01 8.58549476e-02 -1.13510954e+00] ... [-8.68432939e-01 9.44631159e-01 -5.94579354e-02 ... -1.04583286e-01 1.48133337e-01 -5.81844091e-01] [-4.42814201e-01 3.46044809e-01 6.47388339e-01 ... -2.99390286e-01 -4.95087177e-01 9.54722106e-01] [ 8.64966094e-01 -1.02628803e+00 -8.39780450e-01 ... 3.88048589e-01 -5.88019788e-01 2.03241065e-01]] [[-6.75368786e-01 2.11715841e+00 8.44548285e-01 ... -1.89390734e-01 -6.31287575e-01 3.50469559e-01] [ 1.01203620e+00 1.72995076e-01 -1.06651925e-01 ... -6.81720197e-01 1.27720430e-01 -5.91553748e-01] [-7.01494098e-01 7.43475616e-01 -6.37425065e-01 ... 1.61681056e-01 -6.98834300e-01 9.11365509e-01] ... [-6.91377461e-01 -1.06277847e+00 -3.10459770e-02 ... 8.81099284e-01 -6.56152189e-01 -1.81012630e-01] [ 6.25623643e-01 4.56761152e-01 -3.69156301e-01 ... -6.43175781e-01 6.33178428e-02 -3.34070981e-01] [ 1.48795474e+00 -7.80797899e-02 1.59216487e+00 ... -5.46561837e-01 -5.69144666e-01 1.35266638e+00]] [[ 5.91566205e-01 6.76132679e-01 1.74070895e-01 ... -2.51685834e+00 -8.57699394e-01 3.04054171e-01] [-1.19780540e+00 -7.19274163e-01 -1.89920962e-01 ... 1.15889004e-02 -2.37103142e-02 -4.36281711e-01] [-4.87316668e-01 4.93633360e-01 1.53309271e-01 ... -1.18725076e-01 -4.38493758e-01 -3.41534704e-01] ... [-1.75501749e-01 -4.02439862e-01 3.85181725e-01 ... -2.28466451e-01 -4.82135713e-01 1.53570354e-01] [ 1.11163533e+00 -1.92180946e-01 3.43811065e-01 ... 3.56226474e-01 -4.75920141e-01 -1.41099751e+00] [ 1.25938341e-01 3.64999384e-01 -4.28092450e-01 ... 1.13284147e+00 6.77326381e-01 -1.80288285e-01]]] [[[-4.43354160e-01 -1.24482298e+00 -8.48702192e-01 ... 4.93686423e-02 7.62076318e-01 -3.09209377e-01] [ 7.65928745e-01 -1.14928877e+00 7.42386222e-01 ... -1.81298703e-01 8.01030099e-02 9.12323594e-01] [-6.51857629e-02 -2.52493501e-01 6.37833476e-01 ... 9.62221622e-01 -3.29662152e-02 -1.08895987e-01] ... [-2.57059783e-01 9.82739627e-02 -1.13580540e-01 ... 7.46270120e-01 -8.55811179e-01 -1.18613899e+00] [-8.96717161e-02 4.58476484e-01 2.84306884e-01 ... 5.62611103e-01 8.99612457e-02 2.41966411e-01] [ 1.19595870e-01 -5.10138571e-02 -1.14545727e+00 ... 5.14107108e-01 -1.01617813e+00 4.12918210e-01]] [[ 1.37232780e-01 5.05232513e-01 6.53577149e-01 ... 4.37550008e-01 -2.60664105e-01 -3.57389659e-01] [-3.39354187e-01 1.51644766e-01 -3.92866522e-01 ... -7.41760358e-02 8.38317275e-01 -1.28924921e-01] [-4.24620330e-01 2.84428298e-01 -2.72370517e-01 ... 6.05262756e-01 7.95590520e-01 1.16782415e+00] ... [ 7.23377839e-02 -1.11974791e-01 8.85609686e-01 ... -7.15966046e-01 5.35829484e-01 3.62518728e-01] [-4.63661015e-01 4.10204172e-01 -1.66869730e-01 ... -4.73687381e-01 -3.45573038e-01 -3.96932542e-01] [ 1.81207091e-01 5.17964423e-01 -2.96847194e-01 ... -2.47623384e-01 8.74783620e-02 2.52014697e-01]] [[-7.55145431e-01 -5.29977739e-01 3.65762152e-02 ... -6.13794744e-01 -8.90346766e-01 1.33442771e+00] [-2.97685534e-01 -1.91969544e-01 -5.20180881e-01 ... 5.31415761e-01 4.10922498e-01 -1.14845788e+00] [-3.49748105e-01 1.34670734e-01 3.92118871e-01 ... -1.75692588e-01 -4.59689766e-01 -2.64515340e-01] ... [-2.28272840e-01 -9.37715769e-02 1.68152526e-01 ... -1.01718569e+00 9.25668597e-01 5.35284579e-01] [-8.91582489e-01 -1.52082890e-01 -9.43590641e-01 ... 9.47797775e-01 -5.52970171e-01 1.23750582e-01] [ 8.78890634e-01 4.54014421e-01 -1.03081159e-01 ... 6.17344439e-01 3.29868942e-01 -6.60968602e-01]] ... [[-4.78140563e-01 3.23253393e-01 5.58438122e-01 ... 2.80790359e-01 -8.20539966e-02 -2.41147861e-01] [ 1.59776163e+00 1.49026997e-02 -2.71768659e-01 ... -1.76713720e-01 1.42248437e-01 -6.08253837e-01] [ 7.22260118e-01 -1.05823672e+00 -3.93345580e-02 ... 2.01183662e-01 5.82317309e-03 -1.48983079e-03] ... [ 2.16527492e-01 5.24151683e-01 -5.64526677e-01 ... -9.88813639e-01 -5.27498603e-01 1.67037964e+00] [-3.24264169e-01 3.61721843e-01 3.62639427e-01 ... -3.23921323e-01 6.57275558e-01 -2.77571157e-02] [-4.79999930e-01 -2.26360142e-01 -1.70647979e-01 ... 1.08045317e-01 -1.28103483e+00 3.60934854e-01]] [[-1.11878443e+00 8.30559790e-01 -7.34020829e-01 ... 3.55352372e-01 1.83297038e-01 -1.80314213e-01] [-3.54953349e-01 5.74458241e-01 9.37654912e-01 ... -4.37070936e-01 8.71910810e-01 -2.71150649e-01] [ 4.51785892e-01 -2.52800167e-01 -9.78091240e-01 ... -7.19662964e-01 -9.84903157e-01 7.76869655e-01] ... [ 4.62862700e-01 1.12915778e+00 6.12008154e-01 ... -2.93582249e-02 4.94210333e-01 -3.76627088e-01] [-7.36447498e-02 1.86354488e-01 6.65692845e-03 ... -7.42468417e-01 3.38219441e-02 5.12249351e-01] [ 2.38419205e-01 7.96456158e-01 -6.79738373e-02 ... -1.32206976e-01 -1.51659548e-01 -2.79449016e-01]] [[ 2.78062671e-01 2.22676858e-01 -6.35641456e-01 ... 1.02745020e+00 -2.21112609e-01 -8.55790615e-01] [ 5.63903511e-01 -4.36198205e-01 7.30519712e-01 ... -5.40029764e-01 -8.98168206e-01 9.96740401e-01] [ 7.36705065e-01 5.11764288e-01 7.02696562e-01 ... -7.01285601e-02 -5.33094890e-02 -3.96782845e-01] ... [ 1.14625537e+00 -4.49620456e-01 9.02048171e-01 ... 7.24428296e-01 1.37320697e+00 3.79490018e-01] [ 8.01665783e-02 -5.00501394e-01 2.84319222e-01 ... 2.40936335e-02 -8.92850339e-01 -1.03824830e+00] [ 1.73701808e-01 2.87101716e-01 -2.11533234e-01 ... 7.73665428e-01 8.07117391e-03 -2.47344270e-01]]] [[[-9.20154095e-01 -2.69453526e+00 3.38627577e-01 ... 1.62552997e-01 1.46267080e+00 8.87411177e-01] [-7.68895924e-01 1.69308171e-01 5.22924483e-01 ... -6.41324937e-01 2.81363636e-01 5.81927001e-01] [-2.07975786e-02 2.79797578e+00 -9.00157213e-01 ... 3.06896329e-01 4.37441111e-01 -1.83554900e+00] ... [ 1.10812175e+00 2.09908471e-01 -6.17941841e-04 ... -3.04829925e-01 2.53041796e-02 1.26851439e-01] [ 2.89852083e-01 -8.13733399e-01 -1.38362205e+00 ... 1.10439837e+00 2.22928867e-01 -6.03658184e-02] [-1.49663305e+00 1.19935632e+00 9.97072086e-02 ... -1.56909394e+00 1.02159810e+00 1.12882292e+00]] [[ 7.96173692e-01 1.33881783e-02 -2.76139170e-01 ... -9.70372200e-01 2.61012363e+00 1.20086324e+00] [-1.54629040e+00 1.30131042e+00 -7.85016194e-02 ... 2.10386181e+00 -1.27489638e+00 3.36216211e-01] [ 1.71023652e-01 -1.21860467e-01 -9.75199580e-01 ... -5.51263630e-01 1.30834866e+00 6.82281017e-01] ... [ 2.35617065e+00 1.57659799e-01 -1.29119322e-01 ... -6.33582115e-01 -9.80954170e-01 6.50667071e-01] [-1.00492013e+00 -5.14862001e-01 -2.00481820e+00 ... 1.39219448e-01 -6.10267162e-01 -4.58943397e-01] [ 1.77824712e+00 -2.25664705e-01 -6.88922703e-01 ... -2.29414964e+00 3.95735323e-01 3.00452352e-01]] [[ 8.96050751e-01 -8.10394526e-01 1.44082379e+00 ... 2.84172177e-01 -4.04704213e-01 2.46994948e+00] [ 7.61665225e-01 5.07007957e-01 -1.28472045e-01 ... 1.80973196e+00 1.06248327e-01 4.14542586e-01] [ 1.65926874e-01 -8.82877186e-02 -1.34051275e+00 ... 2.04430223e-01 -1.33675039e+00 -1.00664973e-01] ... [-1.58804998e-01 -5.29956341e-01 3.34630936e-01 ... -1.09768343e+00 9.09189939e-01 -6.34497702e-01] [ 1.00651276e+00 1.30713093e+00 -2.31839705e+00 ... -3.33308369e-01 -5.74479759e-01 -4.73202199e-01] [-2.65102834e-01 -6.89847291e-01 -7.34062374e-01 ... 2.56406951e+00 -1.68935955e-01 -1.39786804e+00]] ... [[-1.30788410e+00 1.66597295e+00 -5.01755893e-01 ... -6.80976510e-01 3.31195998e+00 -3.35702226e-02] [-2.12291384e+00 -1.77147508e+00 1.04911542e+00 ... -4.91617322e-01 -1.40454781e+00 -1.54666197e+00] [-2.65379524e+00 -2.55011439e-01 -2.64195681e-01 ... -8.42431247e-01 5.93228996e-01 -4.98974949e-01] ... [-6.00140214e-01 3.14513110e-02 1.05124390e+00 ... 2.49306634e-01 -8.37083757e-01 6.54752195e-01] [ 7.54673898e-01 -6.63030922e-01 3.71227473e-01 ... -7.46067762e-01 -1.06732750e+00 -1.07200217e+00] [-8.38858485e-01 -1.42186081e+00 3.96626711e-01 ... -2.71908402e-01 -1.14708805e+00 8.49790812e-01]] [[-2.57455325e+00 8.95982087e-01 -7.10383952e-01 ... -4.08452541e-01 -2.64639449e+00 1.13810730e+00] [ 6.02774210e-02 -1.55394420e-01 1.33474243e+00 ... -6.31489098e-01 5.25288701e-01 3.29189628e-01] [ 1.27751672e+00 -1.86347508e+00 2.06044570e-01 ... 9.01448250e-01 -2.98058361e-01 1.29576638e-01] ... [-3.18140656e-01 9.67519283e-01 -3.78799558e+00 ... 7.25144386e-01 -1.73078370e+00 4.51183289e-01] [ 1.37245810e+00 1.78498304e+00 -1.91334224e+00 ... 1.69997156e+00 2.03437018e+00 -1.39873111e+00] [-1.47734892e+00 -2.28575561e-02 4.17147338e-01 ... 1.42460740e+00 9.56643283e-01 -2.74770856e-01]] [[-8.98070395e-01 -4.54710841e-01 -1.09644139e+00 ... 2.09240675e-01 -4.09004778e-01 -1.37377763e+00] [-5.07011354e-01 -1.03226614e+00 -1.88780713e+00 ... 5.74068666e-01 1.94634604e+00 6.89761579e-01] [ 6.63399756e-01 -1.18930078e+00 3.54812384e-01 ... -1.68251288e+00 1.72575021e+00 1.94085872e+00] ... [-1.83227211e-01 -1.42512047e+00 -1.02958930e+00 ... 2.42967829e-01 2.33356094e+00 -7.99596757e-02] [ 3.68044555e-01 3.53812814e-01 1.94924510e+00 ... 4.93586361e-02 1.12254465e+00 1.18728602e+00] [ 2.85294461e+00 1.68057513e+00 1.06891108e+00 ... -1.81011736e+00 -7.47522235e-01 -5.39557457e-01]]]]]; ov_res: [[[[[-8.35360959e-02 -1.09544718e+00 -1.31558612e-01 ... -9.55187082e-01 9.89167809e-01 6.15758419e-01] [-1.48442698e+00 -9.11651134e-01 -6.11121356e-01 ... 1.13106990e+00 -4.91107374e-01 -2.03232199e-01] [ 9.38818678e-02 -5.49476027e-01 -2.57884443e-01 ... 5.68081260e-01 5.12291133e-01 8.14008892e-01] ... [-3.10371220e-01 -4.84239876e-01 -9.72485960e-01 ... 1.35701358e+00 -7.29031920e-01 1.06060040e+00] [ 5.68775594e-01 -3.39438736e-01 -1.25378275e+00 ... 7.89584160e-01 -1.01903844e+00 6.76867604e-01] [ 5.14630377e-01 8.47223401e-02 4.45192397e-01 ... 2.15270445e-02 -1.46624118e-01 -2.01704919e-01]] [[-1.49750912e+00 9.54487249e-02 -6.62604332e-01 ... 2.02924633e+00 8.15315366e-01 -1.22946147e-02] [-5.51310182e-01 6.33377850e-01 -1.74458015e+00 ... 1.11268066e-01 -3.15208733e-01 5.15113473e-01] [-1.33494115e+00 7.88329184e-01 -8.04582953e-01 ... 3.89781296e-01 5.91602862e-01 -3.14458609e-01] ... [ 3.97824824e-01 -1.01128504e-01 -6.37244821e-01 ... -1.17314589e+00 2.18378499e-01 3.46411437e-01] [ 7.02847838e-01 -6.33969530e-02 -1.10562181e+00 ... -3.92821044e-01 5.50121725e-01 -2.19222918e-01] [-4.06215250e-01 -3.13111782e-01 3.21560144e-01 ... 2.46554449e-01 3.31890583e-01 -8.38951766e-01]] [[-3.36589336e-01 1.62358414e-02 -1.07310688e+00 ... -8.36738408e-01 2.46148869e-01 -9.23737049e-01] [-7.60522127e-01 1.33566058e+00 9.96915281e-01 ... 6.13133311e-01 3.56904477e-01 -1.08087385e+00] [-1.25105614e-02 5.91236889e-01 7.92162538e-01 ... 6.90946460e-01 6.71339691e-01 -2.15724885e-01] ... [-4.68819827e-01 -3.85479443e-02 1.79889590e-01 ... -5.71372986e-01 6.92040920e-01 -1.09150028e+00] [ 9.24953938e-01 7.20405877e-01 3.35011721e-01 ... -2.31187075e-01 -1.90904409e-01 6.89440548e-01] [-1.40521324e+00 6.27579570e-01 7.52997875e-01 ... -5.49847782e-01 1.30645502e+00 -2.27671027e-01]] ... [[-4.43258196e-01 4.03950930e-01 -7.95864105e-01 ... -3.88532519e-01 -1.00913942e+00 5.48244655e-01] [-6.67117834e-02 -1.11053744e-02 7.09466696e-01 ... 4.32895541e-01 -2.24690184e-01 4.92015302e-01] [-4.71188307e-01 3.50725859e-01 -6.84899807e-01 ... -3.03280830e-01 8.58549476e-02 -1.13510954e+00] ... [-8.68432939e-01 9.44631159e-01 -5.94579354e-02 ... -1.04583286e-01 1.48133337e-01 -5.81844091e-01] [-4.42814201e-01 3.46044809e-01 6.47388339e-01 ... -2.99390286e-01 -4.95087177e-01 9.54722106e-01] [ 8.64966094e-01 -1.02628803e+00 -8.39780450e-01 ... 3.88048589e-01 -5.88019788e-01 2.03241065e-01]] [[-6.75368786e-01 2.11715841e+00 8.44548285e-01 ... -1.89390734e-01 -6.31287575e-01 3.50469559e-01] [ 1.01203620e+00 1.72995076e-01 -1.06651925e-01 ... -6.81720197e-01 1.27720430e-01 -5.91553748e-01] [-7.01494098e-01 7.43475616e-01 -6.37425065e-01 ... 1.61681056e-01 -6.98834300e-01 9.11365509e-01] ... [-6.91377461e-01 -1.06277847e+00 -3.10459770e-02 ... 8.81099284e-01 -6.56152189e-01 -1.81012630e-01] [ 6.25623643e-01 4.56761152e-01 -3.69156301e-01 ... -6.43175781e-01 6.33178428e-02 -3.34070981e-01] [ 1.48795474e+00 -7.80797899e-02 1.59216487e+00 ... -5.46561837e-01 -5.69144666e-01 1.35266638e+00]] [[ 5.91566205e-01 6.76132679e-01 1.74070895e-01 ... -2.51685834e+00 -8.57699394e-01 3.04054171e-01] [-1.19780540e+00 -7.19274163e-01 -1.89920962e-01 ... 1.15889004e-02 -2.37103142e-02 -4.36281711e-01] [-4.87316668e-01 4.93633360e-01 1.53309271e-01 ... -1.18725076e-01 -4.38493758e-01 -3.41534704e-01] ... [-1.75501749e-01 -4.02439862e-01 3.85181725e-01 ... -2.28466451e-01 -4.82135713e-01 1.53570354e-01] [ 1.11163533e+00 -1.92180946e-01 3.43811065e-01 ... 3.56226474e-01 -4.75920141e-01 -1.41099751e+00] [ 1.25938341e-01 3.64999384e-01 -4.28092450e-01 ... 1.13284147e+00 6.77326381e-01 -1.80288285e-01]]] [[[-4.43354160e-01 -1.24482298e+00 -8.48702192e-01 ... 4.93686423e-02 7.62076318e-01 -3.09209377e-01] [ 7.65928745e-01 -1.14928877e+00 7.42386222e-01 ... -1.81298703e-01 8.01030099e-02 9.12323594e-01] [-6.51857629e-02 -2.52493501e-01 6.37833476e-01 ... 9.62221622e-01 -3.29662152e-02 -1.08895987e-01] ... [-2.57059783e-01 9.82739627e-02 -1.13580540e-01 ... 7.46270120e-01 -8.55811179e-01 -1.18613899e+00] [-8.96717161e-02 4.58476484e-01 2.84306884e-01 ... 5.62611103e-01 8.99612457e-02 2.41966411e-01] [ 1.19595870e-01 -5.10138571e-02 -1.14545727e+00 ... 5.14107108e-01 -1.01617813e+00 4.12918210e-01]] [[ 1.37232780e-01 5.05232513e-01 6.53577149e-01 ... 4.37550008e-01 -2.60664105e-01 -3.57389659e-01] [-3.39354187e-01 1.51644766e-01 -3.92866522e-01 ... -7.41760358e-02 8.38317275e-01 -1.28924921e-01] [-4.24620330e-01 2.84428298e-01 -2.72370517e-01 ... 6.05262756e-01 7.95590520e-01 1.16782415e+00] ... [ 7.23377839e-02 -1.11974791e-01 8.85609686e-01 ... -7.15966046e-01 5.35829484e-01 3.62518728e-01] [-4.63661015e-01 4.10204172e-01 -1.66869730e-01 ... -4.73687381e-01 -3.45573038e-01 -3.96932542e-01] [ 1.81207091e-01 5.17964423e-01 -2.96847194e-01 ... -2.47623384e-01 8.74783620e-02 2.52014697e-01]] [[-7.55145431e-01 -5.29977739e-01 3.65762152e-02 ... -6.13794744e-01 -8.90346766e-01 1.33442771e+00] [-2.97685534e-01 -1.91969544e-01 -5.20180881e-01 ... 5.31415761e-01 4.10922498e-01 -1.14845788e+00] [-3.49748105e-01 1.34670734e-01 3.92118871e-01 ... -1.75692588e-01 -4.59689766e-01 -2.64515340e-01] ... [-2.28272840e-01 -9.37715769e-02 1.68152526e-01 ... -1.01718569e+00 9.25668597e-01 5.35284579e-01] [-8.91582489e-01 -1.52082890e-01 -9.43590641e-01 ... 9.47797775e-01 -5.52970171e-01 1.23750582e-01] [ 8.78890634e-01 4.54014421e-01 -1.03081159e-01 ... 6.17344439e-01 3.29868942e-01 -6.60968602e-01]] ... [[-4.78140563e-01 3.23253393e-01 5.58438122e-01 ... 2.80790359e-01 -8.20539966e-02 -2.41147861e-01] [ 1.59776163e+00 1.49026997e-02 -2.71768659e-01 ... -1.76713720e-01 1.42248437e-01 -6.08253837e-01] [ 7.22260118e-01 -1.05823672e+00 -3.93345580e-02 ... 2.01183662e-01 5.82317309e-03 -1.48983079e-03] ... [ 2.16527492e-01 5.24151683e-01 -5.64526677e-01 ... -9.88813639e-01 -5.27498603e-01 1.67037964e+00] [-3.24264169e-01 3.61721843e-01 3.62639427e-01 ... -3.23921323e-01 6.57275558e-01 -2.77571157e-02] [-4.79999930e-01 -2.26360142e-01 -1.70647979e-01 ... 1.08045317e-01 -1.28103483e+00 3.60934854e-01]] [[-1.11878443e+00 8.30559790e-01 -7.34020829e-01 ... 3.55352372e-01 1.83297038e-01 -1.80314213e-01] [-3.54953349e-01 5.74458241e-01 9.37654912e-01 ... -4.37070936e-01 8.71910810e-01 -2.71150649e-01] [ 4.51785892e-01 -2.52800167e-01 -9.78091240e-01 ... -7.19662964e-01 -9.84903157e-01 7.76869655e-01] ... [ 4.62862700e-01 1.12915778e+00 6.12008154e-01 ... -2.93582249e-02 4.94210333e-01 -3.76627088e-01] [-7.36447498e-02 1.86354488e-01 6.65692845e-03 ... -7.42468417e-01 3.38219441e-02 5.12249351e-01] [ 2.38419205e-01 7.96456158e-01 -6.79738373e-02 ... -1.32206976e-01 -1.51659548e-01 -2.79449016e-01]] [[ 2.78062671e-01 2.22676858e-01 -6.35641456e-01 ... 1.02745020e+00 -2.21112609e-01 -8.55790615e-01] [ 5.63903511e-01 -4.36198205e-01 7.30519712e-01 ... -5.40029764e-01 -8.98168206e-01 9.96740401e-01] [ 7.36705065e-01 5.11764288e-01 7.02696562e-01 ... -7.01285601e-02 -5.33094890e-02 -3.96782845e-01] ... [ 1.14625537e+00 -4.49620456e-01 9.02048171e-01 ... 7.24428296e-01 1.37320697e+00 3.79490018e-01] [ 8.01665783e-02 -5.00501394e-01 2.84319222e-01 ... 2.40936335e-02 -8.92850339e-01 -1.03824830e+00] [ 1.73701808e-01 2.87101716e-01 -2.11533234e-01 ... 7.73665428e-01 8.07117391e-03 -2.47344270e-01]]] [[[-9.20154095e-01 -2.69453526e+00 3.38627577e-01 ... 1.62552997e-01 1.46267080e+00 8.87411177e-01] [-7.68895924e-01 1.69308171e-01 5.22924483e-01 ... -6.41324937e-01 2.81363636e-01 5.81927001e-01] [-2.07975786e-02 2.79797578e+00 -9.00157213e-01 ... 3.06896329e-01 4.37441111e-01 -1.83554900e+00] ... [ 1.10812175e+00 2.09908471e-01 -6.17941841e-04 ... -3.04829925e-01 2.53041796e-02 1.26851439e-01] [ 2.89852083e-01 -8.13733399e-01 -1.38362205e+00 ... 1.10439837e+00 2.22928867e-01 -6.03658184e-02] [-1.49663305e+00 1.19935632e+00 9.97072086e-02 ... -1.56909394e+00 1.02159810e+00 1.12882292e+00]] [[ 7.96173692e-01 1.33881783e-02 -2.76139170e-01 ... -9.70372200e-01 2.61012363e+00 1.20086324e+00] [-1.54629040e+00 1.30131042e+00 -7.85016194e-02 ... 2.10386181e+00 -1.27489638e+00 3.36216211e-01] [ 1.71023652e-01 -1.21860467e-01 -9.75199580e-01 ... -5.51263630e-01 1.30834866e+00 6.82281017e-01] ... [ 2.35617065e+00 1.57659799e-01 -1.29119322e-01 ... -6.33582115e-01 -9.80954170e-01 6.50667071e-01] [-1.00492013e+00 -5.14862001e-01 -2.00481820e+00 ... 1.39219448e-01 -6.10267162e-01 -4.58943397e-01] [ 1.77824712e+00 -2.25664705e-01 -6.88922703e-01 ... -2.29414964e+00 3.95735323e-01 3.00452352e-01]] [[ 8.96050751e-01 -8.10394526e-01 1.44082379e+00 ... 2.84172177e-01 -4.04704213e-01 2.46994948e+00] [ 7.61665225e-01 5.07007957e-01 -1.28472045e-01 ... 1.80973196e+00 1.06248327e-01 4.14542586e-01] [ 1.65926874e-01 -8.82877186e-02 -1.34051275e+00 ... 2.04430223e-01 -1.33675039e+00 -1.00664973e-01] ... [-1.58804998e-01 -5.29956341e-01 3.34630936e-01 ... -1.09768343e+00 9.09189939e-01 -6.34497702e-01] [ 1.00651276e+00 1.30713093e+00 -2.31839705e+00 ... -3.33308369e-01 -5.74479759e-01 -4.73202199e-01] [-2.65102834e-01 -6.89847291e-01 -7.34062374e-01 ... 2.56406951e+00 -1.68935955e-01 -1.39786804e+00]] ... [[-1.30788410e+00 1.66597295e+00 -5.01755893e-01 ... -6.80976510e-01 3.31195998e+00 -3.35702226e-02] [-2.12291384e+00 -1.77147508e+00 1.04911542e+00 ... -4.91617322e-01 -1.40454781e+00 -1.54666197e+00] [-2.65379524e+00 -2.55011439e-01 -2.64195681e-01 ... -8.42431247e-01 5.93228996e-01 -4.98974949e-01] ... [-6.00140214e-01 3.14513110e-02 1.05124390e+00 ... 2.49306634e-01 -8.37083757e-01 6.54752195e-01] [ 7.54673898e-01 -6.63030922e-01 3.71227473e-01 ... -7.46067762e-01 -1.06732750e+00 -1.07200217e+00] [-8.38858485e-01 -1.42186081e+00 3.96626711e-01 ... -2.71908402e-01 -1.14708805e+00 8.49790812e-01]] [[-2.57455325e+00 8.95982087e-01 -7.10383952e-01 ... -4.08452541e-01 -2.64639449e+00 1.13810730e+00] [ 6.02774210e-02 -1.55394420e-01 1.33474243e+00 ... -6.31489098e-01 5.25288701e-01 3.29189628e-01] [ 1.27751672e+00 -1.86347508e+00 2.06044570e-01 ... 9.01448250e-01 -2.98058361e-01 1.29576638e-01] ... [-3.18140656e-01 9.67519283e-01 -3.78799558e+00 ... 7.25144386e-01 -1.73078370e+00 4.51183289e-01] [ 1.37245810e+00 1.78498304e+00 -1.91334224e+00 ... 1.69997156e+00 2.03437018e+00 -1.39873111e+00] [-1.47734892e+00 -2.28575561e-02 4.17147338e-01 ... 1.42460740e+00 9.56643283e-01 -2.74770856e-01]] [[-8.98070395e-01 -4.54710841e-01 -1.09644139e+00 ... 2.09240675e-01 -4.09004778e-01 -1.37377763e+00] [-5.07011354e-01 -1.03226614e+00 -1.88780713e+00 ... 5.74068666e-01 1.94634604e+00 6.89761579e-01] [ 6.63399756e-01 -1.18930078e+00 3.54812384e-01 ... -1.68251288e+00 1.72575021e+00 1.94085872e+00] ... [-1.83227211e-01 -1.42512047e+00 -1.02958930e+00 ... 2.42967829e-01 2.33356094e+00 -7.99596757e-02] [ 3.68044555e-01 3.53812814e-01 1.94924510e+00 ... 4.93586361e-02 1.12254465e+00 1.18728602e+00] [ 2.85294461e+00 1.68057513e+00 1.06891108e+00 ... -1.81011736e+00 -7.47522235e-01 -5.39557457e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_906.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.0777 (2,1,1,.,.) = 0.9203 (3,1,1,.,.) = -1.0542 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-9.78492737e-01 -6.87300026e-01 -2.67645335e+00 1.64293230e+00 -2.39402160e-01 -4.56451513e-02 1.38739061e+00 -1.41258925e-01] [-2.84702718e-01 2.18169403e+00 1.12655520e-01 -2.88178116e-01 -2.23041868e+00 3.33900595e+00 -5.66859879e-02 -2.98282528e+00] [-1.22625756e+00 9.96622622e-01 2.23360240e-01 5.25868595e-01 8.78744185e-01 -3.59677649e+00 3.45676482e-01 -3.26119989e-01] [ 1.33118499e-02 9.15483117e-01 -4.06670570e+00 -3.65729153e-01 5.61948270e-02 8.72191906e-01 5.04102707e-01 2.04911423e+00] [ 2.63048679e-01 -1.61945403e+00 -2.30548531e-01 -1.80984592e+00 -3.70279476e-02 5.93851030e-01 -2.59841204e+00 -3.29106927e-01] [-2.20781970e+00 2.28148520e-01 -3.56259823e-01 1.72969091e+00 -1.03657329e+00 -1.85106015e+00 2.37531424e+00 2.23665333e+00] [ 1.99810576e+00 1.59816360e+00 1.42197311e+00 1.47197270e+00 -2.03876162e+00 5.41258872e-01 2.26109672e+00 -2.35548139e+00] [ 1.69526577e-01 1.88722456e+00 -3.46373177e+00 2.85492206e+00 -1.72091174e+00 -2.65986252e+00 6.54436350e-02 -1.28854430e+00]] [[-4.65590090e-01 -2.92473960e+00 8.91381681e-01 1.65349162e+00 2.77348995e+00 -5.51852733e-02 -2.40903544e+00 2.71663857e+00] [ 5.67171216e-01 1.78949907e-01 4.58327346e-02 4.76786375e-01 -1.05323839e+00 -2.27979875e+00 1.80244803e-01 1.41596591e+00] [-6.66381776e-01 -2.23055553e+00 -2.36653268e-01 -2.25604922e-02 -6.66710913e-01 -2.04171991e+00 3.18401098e+00 -1.53075051e+00] [ 3.85938811e+00 -2.08176112e+00 -1.86710373e-01 1.25228262e+00 -1.02633679e+00 1.68038177e+00 3.21790338e-01 -2.36483335e+00] [ 1.20122516e+00 -9.40807521e-01 -7.64638782e-01 8.87886137e-02 -3.33926177e+00 8.13324332e-01 -2.68856144e+00 1.43881655e+00] [ 1.22295928e+00 -6.55556982e-03 2.05217862e+00 2.27573848e+00 6.62280738e-01 -9.43003356e-01 3.39184165e+00 -3.66452193e+00] [-1.24090481e+00 2.99249721e+00 -2.16239476e+00 4.36356640e+00 8.93496752e-01 3.26999068e+00 1.30533242e+00 -5.23318686e-02] [ 9.28491652e-01 4.25534278e-01 -8.20119441e-01 -3.82692933e+00 1.13262928e+00 8.50476027e-01 8.71781111e-01 6.90460443e-01]] [[ 1.84345579e+00 5.83128095e-01 3.22828740e-01 2.47172856e+00 -2.97470957e-01 1.53400183e-01 -1.52913630e+00 3.40751123e+00] [-8.04260850e-01 -2.90027785e+00 1.19353163e+00 1.83256173e+00 3.35064554e+00 -1.69489458e-01 1.98396170e+00 -4.92194319e+00] [-5.87112941e-02 1.13413204e-03 -8.29308927e-01 8.92439246e-01 1.37817383e+00 5.53037167e-01 1.23624277e+00 6.86203599e-01] [-4.50166821e-01 -7.36731812e-02 4.87352610e-01 3.88109922e-01 -4.95175086e-02 -8.16813529e-01 1.58032918e+00 -2.05182850e-01] [-1.23976052e+00 2.55664778e+00 7.30877876e-01 2.17820144e+00 2.03443241e+00 7.89748549e-01 -2.77545005e-01 -1.88523197e+00] [ 8.13659072e-01 1.66988790e+00 -2.49116048e-01 -1.96792865e+00 -2.11627388e+00 -9.34509397e-01 2.28846714e-01 -1.11243486e+00] [ 3.46889567e+00 -1.54344395e-01 -5.46647310e-01 1.21178830e+00 7.49590755e-01 -1.60547769e+00 -3.31481075e+00 -1.60698581e+00] [ 1.56185055e+00 -1.28325121e-02 -1.70315051e+00 3.65822501e-02 8.28304946e-01 2.72423059e-01 1.65151143e+00 1.80681944e-02]] [[-3.87389898e-01 -4.52143097e+00 -1.48752972e-01 2.50448495e-01 -3.55073929e-01 1.20933843e+00 -9.43392396e-01 -8.73863101e-02] [-4.21661556e-01 1.89047563e+00 2.25028729e+00 -1.88012648e+00 -4.70376730e-01 1.43590540e-01 1.42058790e-01 1.73723137e+00] [-9.84360039e-01 -3.91638219e-01 1.63651454e+00 7.62743115e-01 -1.78765166e+00 -1.44702661e+00 7.21761703e-01 -2.86611104e+00] [ 1.63759601e+00 2.38732561e-01 5.02573252e-01 2.69035995e-02 -4.49632198e-01 -2.42551589e+00 1.15183055e+00 -8.53391811e-02] [-1.94819772e+00 2.85677743e+00 6.42728567e-01 5.46165824e-01 -1.58028591e+00 3.78232896e-01 -9.05074924e-03 2.34013462e+00] [ 1.31501079e+00 3.84137535e+00 1.29986107e+00 -2.41731048e-01 -1.72523701e+00 -3.31675744e+00 9.13230002e-01 3.23394346e+00] [-1.14386761e+00 -9.60136712e-01 -7.76867986e-01 3.39789838e-01 1.21275568e+00 1.21727598e+00 3.71144861e-01 -1.91346920e+00] [-3.78076315e-01 -3.98210198e-01 -1.16268802e+00 -6.21080756e-01 -2.46800208e+00 -2.01351786e+00 2.14450598e+00 6.86945975e-01]] [[-1.13552964e+00 1.56496739e+00 -3.31710070e-01 2.08439732e+00 2.61926103e+00 4.94179189e-01 -1.02732384e+00 -1.91128403e-01] [ 2.42448390e-01 -5.61524510e-01 -2.53810239e+00 -1.01601708e+00 4.35476720e-01 3.91263103e+00 3.77855062e-01 -3.21140200e-01] [-3.23591971e+00 -3.75099325e+00 -3.51113588e-01 -2.74807358e+00 -9.13392603e-01 1.06060266e-01 -9.86374557e-01 -7.34336948e+00] [-1.94719124e+00 -1.11207247e+00 -1.32939899e+00 -1.19017124e+00 1.13991559e+00 -2.19560742e+00 7.35862315e-01 -1.15369475e+00] [-1.82935250e+00 -1.53843248e+00 -1.34842038e+00 1.48233747e+00 2.62710524e+00 2.22049665e+00 8.21465552e-01 -6.04840279e-01] [ 1.25383878e+00 1.70986757e-01 1.22999239e+00 -4.24895734e-02 -8.15767288e-01 -5.45756400e-01 -8.95872831e-01 -5.20828128e-01] [-3.71686363e+00 -5.26809978e+00 -1.60307968e+00 -5.35011709e-01 2.47201189e-01 -2.42027082e-02 1.37624681e+00 -2.10307503e+00] [-2.64994884e+00 7.68516421e-01 1.83160377e+00 7.76947916e-01 -1.07566750e+00 1.10167956e+00 -2.19571257e+00 8.88429940e-01]] [[ 3.40810508e-01 1.39789915e+00 5.39730847e-01 1.20552087e+00 -1.42779303e+00 -3.81793761e+00 -2.13922429e+00 -1.59613073e+00] [-1.20651104e-01 1.65026322e-01 6.69996291e-02 1.70747563e-02 -4.41813111e-01 -3.83954191e+00 2.93283653e+00 1.81346044e-01] [-1.82943344e+00 -6.00522816e-01 8.32531512e-01 2.75867438e+00 -6.03501439e-01 -2.96858406e+00 -3.45338434e-01 -1.12822604e+00] [-3.47541404e+00 -2.35755201e-02 1.62704360e+00 -3.61546087e+00 -1.14496395e-01 -1.17168653e+00 -4.23857689e-01 2.07412314e+00] [-6.41388059e-01 -3.86220455e+00 1.23873673e-01 1.95574677e+00 1.58455718e+00 7.14309394e-01 4.17971611e+00 -2.17526793e+00] [-1.66548276e+00 1.43811166e+00 2.02234054e+00 -6.24941647e-01 8.20497349e-02 1.51936531e+00 1.52253962e+00 1.13461864e+00] [ 3.05645514e+00 -1.79531670e+00 -3.41996336e+00 -1.92671227e+00 -8.87629449e-01 2.62358367e-01 -2.10726237e+00 6.36560857e-01] [-2.78166711e-01 9.81971383e-01 -1.37484789e+00 4.59031612e-01 6.94565594e-01 2.33106399e+00 -1.09193146e+00 -1.10120654e+00]] [[ 1.57454860e+00 -2.48747253e+00 1.78001881e+00 2.57766575e-01 -3.13206244e+00 1.16877770e+00 -1.21355271e+00 -2.05504751e+00] [-3.35643339e+00 -2.63205719e+00 -5.04622984e+00 1.75971282e+00 3.65347296e-01 1.03231740e+00 5.06013918e+00 1.07341552e+00] [ 1.35140216e+00 1.01818621e+00 -1.60865533e+00 7.38165200e-01 -1.74774075e+00 3.62345695e-01 1.53633928e+00 3.10915470e+00] [-2.86057305e+00 -1.76741767e+00 2.47491908e+00 4.89509761e-01 -3.41528147e-01 1.13267553e+00 -1.79170656e+00 1.60699308e+00] [ 5.55055022e-01 -3.44828558e+00 -2.34557700e+00 5.96516895e+00 1.22494650e+00 5.33406019e+00 7.60835290e-01 -3.63312870e-01] [-5.62584639e-01 -2.66548777e+00 -2.98256111e+00 -3.51435959e-01 -1.02385306e+00 2.50951838e+00 3.83583158e-01 -1.57804632e+00] [ 1.52162659e+00 -2.02450180e+00 -6.96063042e-01 -2.08689228e-01 -1.15179265e+00 -1.72526622e+00 4.72223371e-01 -3.45046282e+00] [ 6.35298908e-01 1.24354267e+00 3.54297304e+00 -3.81496549e+00 -7.50340998e-01 3.78409266e+00 -6.02795899e-01 2.07308388e+00]] [[ 2.45279744e-02 7.36794770e-01 3.52743506e-01 -1.24199212e-01 -2.40107134e-01 -7.14205682e-01 -7.19848424e-02 -4.31208193e-01] [ 1.11103117e+00 1.81745231e+00 -1.66206920e+00 -6.94732298e-04 -2.15776896e+00 -1.24238634e+00 -3.59452105e+00 6.87866986e-01] [-1.45079255e+00 1.24293828e+00 -1.65657663e+00 1.77987099e+00 1.62974000e+00 7.49688029e-01 -1.93357038e+00 -2.44179457e-01] [-8.11453760e-01 2.26390257e-01 1.44459021e+00 -1.39968669e+00 -1.17627203e+00 -1.23116112e+00 2.18054748e+00 2.97759676e+00] [-2.45171356e+00 -9.89412308e-01 1.72914433e+00 1.74760258e+00 3.69837451e+00 -2.72028542e+00 6.43238366e-01 -8.26225758e-01] [-8.58898997e-01 -3.80520654e+00 8.59801650e-01 4.92828377e-02 1.57516491e+00 3.95751745e-01 3.05188990e+00 -1.51434803e+00] [-1.54124582e+00 1.45798790e+00 1.02004230e+00 -1.43514824e+00 1.35023034e+00 2.57854271e+00 -1.03688657e+00 -5.63710392e-01] [-9.73229468e-01 2.50511503e+00 -8.86868358e-01 1.49082854e-01 -1.20704329e+00 -1.13360465e+00 -1.36590648e+00 7.02975094e-01]]]]]; ov_res: [[[[[-9.78492737e-01 -6.87300026e-01 -2.67645335e+00 1.64293230e+00 -2.39402160e-01 -4.56451513e-02 1.38739061e+00 -1.41258925e-01] [-2.84702718e-01 2.18169403e+00 1.12655520e-01 -2.88178116e-01 -2.23041868e+00 3.33900595e+00 -5.66859879e-02 -2.98282528e+00] [-1.22625756e+00 9.96622622e-01 2.23360240e-01 5.25868595e-01 8.78744185e-01 -3.59677649e+00 3.45676482e-01 -3.26119989e-01] [ 1.33118499e-02 9.15483117e-01 -4.06670570e+00 -3.65729153e-01 5.61948270e-02 8.72191906e-01 5.04102707e-01 2.04911423e+00] [ 2.63048679e-01 -1.61945403e+00 -2.30548531e-01 -1.80984592e+00 -3.70279476e-02 5.93851030e-01 -2.59841204e+00 -3.29106927e-01] [-2.20781970e+00 2.28148520e-01 -3.56259823e-01 1.72969091e+00 -1.03657329e+00 -1.85106015e+00 2.37531424e+00 2.23665333e+00] [ 1.99810576e+00 1.59816360e+00 1.42197311e+00 1.47197270e+00 -2.03876162e+00 5.41258872e-01 2.26109672e+00 -2.35548139e+00] [ 1.69526577e-01 1.88722456e+00 -3.46373177e+00 2.85492206e+00 -1.72091174e+00 -2.65986252e+00 6.54436350e-02 -1.28854430e+00]] [[-4.65590090e-01 -2.92473960e+00 8.91381681e-01 1.65349162e+00 2.77348995e+00 -5.51852733e-02 -2.40903544e+00 2.71663857e+00] [ 5.67171216e-01 1.78949907e-01 4.58327346e-02 4.76786375e-01 -1.05323839e+00 -2.27979875e+00 1.80244803e-01 1.41596591e+00] [-6.66381776e-01 -2.23055553e+00 -2.36653268e-01 -2.25604922e-02 -6.66710913e-01 -2.04171991e+00 3.18401098e+00 -1.53075051e+00] [ 3.85938811e+00 -2.08176112e+00 -1.86710373e-01 1.25228262e+00 -1.02633679e+00 1.68038177e+00 3.21790338e-01 -2.36483335e+00] [ 1.20122516e+00 -9.40807521e-01 -7.64638782e-01 8.87886137e-02 -3.33926177e+00 8.13324332e-01 -2.68856144e+00 1.43881655e+00] [ 1.22295928e+00 -6.55556982e-03 2.05217862e+00 2.27573848e+00 6.62280738e-01 -9.43003356e-01 3.39184165e+00 -3.66452193e+00] [-1.24090481e+00 2.99249721e+00 -2.16239476e+00 4.36356640e+00 8.93496752e-01 3.26999068e+00 1.30533242e+00 -5.23318686e-02] [ 9.28491652e-01 4.25534278e-01 -8.20119441e-01 -3.82692933e+00 1.13262928e+00 8.50476027e-01 8.71781111e-01 6.90460443e-01]] [[ 1.84345579e+00 5.83128095e-01 3.22828740e-01 2.47172856e+00 -2.97470957e-01 1.53400183e-01 -1.52913630e+00 3.40751123e+00] [-8.04260850e-01 -2.90027785e+00 1.19353163e+00 1.83256173e+00 3.35064554e+00 -1.69489458e-01 1.98396170e+00 -4.92194319e+00] [-5.87112941e-02 1.13413204e-03 -8.29308927e-01 8.92439246e-01 1.37817383e+00 5.53037167e-01 1.23624277e+00 6.86203599e-01] [-4.50166821e-01 -7.36731812e-02 4.87352610e-01 3.88109922e-01 -4.95175086e-02 -8.16813529e-01 1.58032918e+00 -2.05182850e-01] [-1.23976052e+00 2.55664778e+00 7.30877876e-01 2.17820144e+00 2.03443241e+00 7.89748549e-01 -2.77545005e-01 -1.88523197e+00] [ 8.13659072e-01 1.66988790e+00 -2.49116048e-01 -1.96792865e+00 -2.11627388e+00 -9.34509397e-01 2.28846714e-01 -1.11243486e+00] [ 3.46889567e+00 -1.54344395e-01 -5.46647310e-01 1.21178830e+00 7.49590755e-01 -1.60547769e+00 -3.31481075e+00 -1.60698581e+00] [ 1.56185055e+00 -1.28325121e-02 -1.70315051e+00 3.65822501e-02 8.28304946e-01 2.72423059e-01 1.65151143e+00 1.80681944e-02]] [[-3.87389898e-01 -4.52143097e+00 -1.48752972e-01 2.50448495e-01 -3.55073929e-01 1.20933843e+00 -9.43392396e-01 -8.73863101e-02] [-4.21661556e-01 1.89047563e+00 2.25028729e+00 -1.88012648e+00 -4.70376730e-01 1.43590540e-01 1.42058790e-01 1.73723137e+00] [-9.84360039e-01 -3.91638219e-01 1.63651454e+00 7.62743115e-01 -1.78765166e+00 -1.44702661e+00 7.21761703e-01 -2.86611104e+00] [ 1.63759601e+00 2.38732561e-01 5.02573252e-01 2.69035995e-02 -4.49632198e-01 -2.42551589e+00 1.15183055e+00 -8.53391811e-02] [-1.94819772e+00 2.85677743e+00 6.42728567e-01 5.46165824e-01 -1.58028591e+00 3.78232896e-01 -9.05074924e-03 2.34013462e+00] [ 1.31501079e+00 3.84137535e+00 1.29986107e+00 -2.41731048e-01 -1.72523701e+00 -3.31675744e+00 9.13230002e-01 3.23394346e+00] [-1.14386761e+00 -9.60136712e-01 -7.76867986e-01 3.39789838e-01 1.21275568e+00 1.21727598e+00 3.71144861e-01 -1.91346920e+00] [-3.78076315e-01 -3.98210198e-01 -1.16268802e+00 -6.21080756e-01 -2.46800208e+00 -2.01351786e+00 2.14450598e+00 6.86945975e-01]] [[-1.13552964e+00 1.56496739e+00 -3.31710070e-01 2.08439732e+00 2.61926103e+00 4.94179189e-01 -1.02732384e+00 -1.91128403e-01] [ 2.42448390e-01 -5.61524510e-01 -2.53810239e+00 -1.01601708e+00 4.35476720e-01 3.91263103e+00 3.77855062e-01 -3.21140200e-01] [-3.23591971e+00 -3.75099325e+00 -3.51113588e-01 -2.74807358e+00 -9.13392603e-01 1.06060266e-01 -9.86374557e-01 -7.34336948e+00] [-1.94719124e+00 -1.11207247e+00 -1.32939899e+00 -1.19017124e+00 1.13991559e+00 -2.19560742e+00 7.35862315e-01 -1.15369475e+00] [-1.82935250e+00 -1.53843248e+00 -1.34842038e+00 1.48233747e+00 2.62710524e+00 2.22049665e+00 8.21465552e-01 -6.04840279e-01] [ 1.25383878e+00 1.70986757e-01 1.22999239e+00 -4.24895734e-02 -8.15767288e-01 -5.45756400e-01 -8.95872831e-01 -5.20828128e-01] [-3.71686363e+00 -5.26809978e+00 -1.60307968e+00 -5.35011709e-01 2.47201189e-01 -2.42027082e-02 1.37624681e+00 -2.10307503e+00] [-2.64994884e+00 7.68516421e-01 1.83160377e+00 7.76947916e-01 -1.07566750e+00 1.10167956e+00 -2.19571257e+00 8.88429940e-01]] [[ 3.40810508e-01 1.39789915e+00 5.39730847e-01 1.20552087e+00 -1.42779303e+00 -3.81793761e+00 -2.13922429e+00 -1.59613073e+00] [-1.20651104e-01 1.65026322e-01 6.69996291e-02 1.70747563e-02 -4.41813111e-01 -3.83954191e+00 2.93283653e+00 1.81346044e-01] [-1.82943344e+00 -6.00522816e-01 8.32531512e-01 2.75867438e+00 -6.03501439e-01 -2.96858406e+00 -3.45338434e-01 -1.12822604e+00] [-3.47541404e+00 -2.35755201e-02 1.62704360e+00 -3.61546087e+00 -1.14496395e-01 -1.17168653e+00 -4.23857689e-01 2.07412314e+00] [-6.41388059e-01 -3.86220455e+00 1.23873673e-01 1.95574677e+00 1.58455718e+00 7.14309394e-01 4.17971611e+00 -2.17526793e+00] [-1.66548276e+00 1.43811166e+00 2.02234054e+00 -6.24941647e-01 8.20497349e-02 1.51936531e+00 1.52253962e+00 1.13461864e+00] [ 3.05645514e+00 -1.79531670e+00 -3.41996336e+00 -1.92671227e+00 -8.87629449e-01 2.62358367e-01 -2.10726237e+00 6.36560857e-01] [-2.78166711e-01 9.81971383e-01 -1.37484789e+00 4.59031612e-01 6.94565594e-01 2.33106399e+00 -1.09193146e+00 -1.10120654e+00]] [[ 1.57454860e+00 -2.48747253e+00 1.78001881e+00 2.57766575e-01 -3.13206244e+00 1.16877770e+00 -1.21355271e+00 -2.05504751e+00] [-3.35643339e+00 -2.63205719e+00 -5.04622984e+00 1.75971282e+00 3.65347296e-01 1.03231740e+00 5.06013918e+00 1.07341552e+00] [ 1.35140216e+00 1.01818621e+00 -1.60865533e+00 7.38165200e-01 -1.74774075e+00 3.62345695e-01 1.53633928e+00 3.10915470e+00] [-2.86057305e+00 -1.76741767e+00 2.47491908e+00 4.89509761e-01 -3.41528147e-01 1.13267553e+00 -1.79170656e+00 1.60699308e+00] [ 5.55055022e-01 -3.44828558e+00 -2.34557700e+00 5.96516895e+00 1.22494650e+00 5.33406019e+00 7.60835290e-01 -3.63312870e-01] [-5.62584639e-01 -2.66548777e+00 -2.98256111e+00 -3.51435959e-01 -1.02385306e+00 2.50951838e+00 3.83583158e-01 -1.57804632e+00] [ 1.52162659e+00 -2.02450180e+00 -6.96063042e-01 -2.08689228e-01 -1.15179265e+00 -1.72526622e+00 4.72223371e-01 -3.45046282e+00] [ 6.35298908e-01 1.24354267e+00 3.54297304e+00 -3.81496549e+00 -7.50340998e-01 3.78409266e+00 -6.02795899e-01 2.07308388e+00]] [[ 2.45279744e-02 7.36794770e-01 3.52743506e-01 -1.24199212e-01 -2.40107134e-01 -7.14205682e-01 -7.19848424e-02 -4.31208193e-01] [ 1.11103117e+00 1.81745231e+00 -1.66206920e+00 -6.94732298e-04 -2.15776896e+00 -1.24238634e+00 -3.59452105e+00 6.87866986e-01] [-1.45079255e+00 1.24293828e+00 -1.65657663e+00 1.77987099e+00 1.62974000e+00 7.49688029e-01 -1.93357038e+00 -2.44179457e-01] [-8.11453760e-01 2.26390257e-01 1.44459021e+00 -1.39968669e+00 -1.17627203e+00 -1.23116112e+00 2.18054748e+00 2.97759676e+00] [-2.45171356e+00 -9.89412308e-01 1.72914433e+00 1.74760258e+00 3.69837451e+00 -2.72028542e+00 6.43238366e-01 -8.26225758e-01] [-8.58898997e-01 -3.80520654e+00 8.59801650e-01 4.92828377e-02 1.57516491e+00 3.95751745e-01 3.05188990e+00 -1.51434803e+00] [-1.54124582e+00 1.45798790e+00 1.02004230e+00 -1.43514824e+00 1.35023034e+00 2.57854271e+00 -1.03688657e+00 -5.63710392e-01] [-9.73229468e-01 2.50511503e+00 -8.86868358e-01 1.49082854e-01 -1.20704329e+00 -1.13360465e+00 -1.36590648e+00 7.02975094e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_908.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.4984 (2,1,1,.,.) = 2.2978 (3,1,1,.,.) = 0.8085 (1,2,1,.,.) = 1.9882 (2,2,1,.,.) = 0.01 * -3.4830 (3,2,1,.,.) = -0.9665 (1,3,1,.,.) = -1.1578 (2,3,1,.,.) = 0.9579 (3,3,1,.,.) = 1.1908 [ CPUFloatType{3,3,1,1,1} ]]() %8 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%8) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 5.025253 -2.4419734 ... 1.3944832 -1.680111 0. ] [ 0. 1.7713405 -4.4400506 ... -1.1641746 -1.8317038 0. ] ... [ 0. 2.2515337 -0.87502426 ... -2.3559403 5.2502103 0. ] [ 0. 1.9079918 0.1261814 ... 1.0520841 -0.53963655 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.085662 6.768356 ... -3.3543174 -1.9530324 0. ] [ 0. 2.9056263 -0.880599 ... 0.6992572 -3.262848 0. ] ... [ 0. -2.459616 -6.8127275 ... -4.9656315 -3.9819539 0. ] [ 0. 3.9210823 0.56984514 ... -1.2658739 -1.0100871 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -5.1757226 1.4076337 ... 1.9325354 -1.8195008 0. ] [ 0. 1.138834 5.1020775 ... -0.13567984 -1.0240884 0. ] ... [ 0. 2.879526 -2.2743018 ... -1.2853012 3.504027 0. ] [ 0. 1.3253835 -0.9701155 ... 1.0364082 -1.7827315 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.6286101 1.5284191 ... 0.17516406 4.0994 0. ] [ 0. 3.8032274 -2.1921546 ... 1.62908 -3.8590791 0. ] ... [ 0. 3.9617045 4.6117997 ... -0.34222275 0.19702077 0. ] [ 0. 0.90647876 -0.5061157 ... -1.7186338 -2.078112 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -5.605594 -1.4489601 ... -4.934941 2.1825929 0. ] [ 0. -0.8417364 3.835253 ... 2.994169 2.1710076 0. ] ... [ 0. -0.6402961 2.3711903 ... 0.07212793 -2.5700338 0. ] [ 0. -1.2627243 -2.3683553 ... 1.8197908 1.602411 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0409207 -4.7692375 ... 1.8315268 0.852544 0. ] [ 0. 1.7218829 -0.12253701 ... -2.021826 4.4373345 0. ] ... [ 0. 1.3678694 7.4724364 ... 0.1706778 2.3620856 0. ] [ 0. -4.253355 -2.0234356 ... 1.4819279 -0.08898301 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 5.1288414 0.45406458 ... -0.2691452 -1.1320477 0. ] [ 0. -2.3441603 1.3444153 ... -1.5209514 -1.3230102 0. ] ... [ 0. -0.1135477 -0.11476496 ... 3.0610104 -4.1004996 0. ] [ 0. -1.5095274 -2.2812386 ... -2.0876677 3.0935895 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.7878008 -1.1617203 ... -0.92519027 -4.255997 0. ] [ 0. -2.2054708 3.3004556 ... -0.61596483 4.920357 0. ] ... [ 0. -3.1512032 -1.9232534 ... -0.10364415 -1.0516723 0. ] [ 0. 0.98823637 0.40915602 ... 0.5566974 2.0463705 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.0765965 0.7524112 ... -0.05277037 1.3777347 0. ] [ 0. -1.0705142 2.3870363 ... 1.2133998 0.5518187 0. ] ... [ 0. -1.027488 0.603763 ... 1.2094079 -2.5593903 0. ] [ 0. 0.09569488 0.24101558 ... 0.51112413 1.7273406 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.2260451 -3.7540114 ... 0.8274494 1.4774774 0. ] [ 0. -1.7345157 0.26413172 ... -0.9212826 1.8870233 0. ] ... [ 0. 2.3698325 4.1420174 ... 2.9392962 2.27272 0. ] [ 0. -2.483372 -1.1692878 ... 0.55765504 -0.03949919 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.613957 -0.9518621 ... -1.0144823 0.5681666 0. ] [ 0. -1.0041025 -2.8720036 ... -0.5656647 0.40688434 0. ] ... [ 0. -2.230806 1.9910523 ... 0.75441015 -2.112752 0. ] [ 0. -0.86911047 1.3061635 ... -0.4786678 1.4054482 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.2462664 -0.8584057 ... 0.19004023 -1.8170319 0. ] [ 0. -2.4685788 1.7872516 ... -0.4300391 2.1761842 0. ] ... [ 0. -2.4839804 -1.7333959 ... -0.39020336 0.1325255 0. ] [ 0. -0.57423747 -0.2948969 ... 1.022952 1.1333412 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 5.025253 -2.4419734 ... 1.3944832 -1.680111 0. ] [ 0. 1.7713405 -4.4400506 ... -1.1641746 -1.8317038 0. ] ... [ 0. 2.2515337 -0.87502426 ... -2.3559403 5.2502103 0. ] [ 0. 1.9079918 0.1261814 ... 1.0520841 -0.53963655 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.085662 6.768356 ... -3.3543174 -1.9530324 0. ] [ 0. 2.9056263 -0.880599 ... 0.6992572 -3.262848 0. ] ... [ 0. -2.459616 -6.8127275 ... -4.9656315 -3.9819539 0. ] [ 0. 3.9210823 0.56984514 ... -1.2658739 -1.0100871 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -5.1757226 1.4076337 ... 1.9325354 -1.8195008 0. ] [ 0. 1.138834 5.1020775 ... -0.13567984 -1.0240884 0. ] ... [ 0. 2.879526 -2.2743018 ... -1.2853012 3.504027 0. ] [ 0. 1.3253835 -0.9701155 ... 1.0364082 -1.7827315 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.6286101 1.5284191 ... 0.17516406 4.0994 0. ] [ 0. 3.8032274 -2.1921546 ... 1.62908 -3.8590791 0. ] ... [ 0. 3.9617045 4.6117997 ... -0.34222275 0.19702077 0. ] [ 0. 0.90647876 -0.5061157 ... -1.7186338 -2.078112 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -5.605594 -1.4489601 ... -4.934941 2.1825929 0. ] [ 0. -0.8417364 3.835253 ... 2.994169 2.1710076 0. ] ... [ 0. -0.6402961 2.3711903 ... 0.07212793 -2.5700338 0. ] [ 0. -1.2627243 -2.3683553 ... 1.8197908 1.602411 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0409207 -4.7692375 ... 1.8315268 0.852544 0. ] [ 0. 1.7218829 -0.12253701 ... -2.021826 4.4373345 0. ] ... [ 0. 1.3678694 7.4724364 ... 0.1706778 2.3620856 0. ] [ 0. -4.253355 -2.0234356 ... 1.4819279 -0.08898301 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 5.1288414 0.45406458 ... -0.2691452 -1.1320477 0. ] [ 0. -2.3441603 1.3444153 ... -1.5209514 -1.3230102 0. ] ... [ 0. -0.1135477 -0.11476496 ... 3.0610104 -4.1004996 0. ] [ 0. -1.5095274 -2.2812386 ... -2.0876677 3.0935895 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.7878008 -1.1617203 ... -0.92519027 -4.255997 0. ] [ 0. -2.2054708 3.3004556 ... -0.61596483 4.920357 0. ] ... [ 0. -3.1512032 -1.9232534 ... -0.10364415 -1.0516723 0. ] [ 0. 0.98823637 0.40915602 ... 0.5566974 2.0463705 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.0765965 0.7524112 ... -0.05277037 1.3777347 0. ] [ 0. -1.0705142 2.3870363 ... 1.2133998 0.5518187 0. ] ... [ 0. -1.027488 0.603763 ... 1.2094079 -2.5593903 0. ] [ 0. 0.09569488 0.24101558 ... 0.51112413 1.7273406 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.2260451 -3.7540114 ... 0.8274494 1.4774774 0. ] [ 0. -1.7345157 0.26413172 ... -0.9212826 1.8870233 0. ] ... [ 0. 2.3698325 4.1420174 ... 2.9392962 2.27272 0. ] [ 0. -2.483372 -1.1692878 ... 0.55765504 -0.03949919 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.613957 -0.9518621 ... -1.0144823 0.5681666 0. ] [ 0. -1.0041025 -2.8720036 ... -0.5656647 0.40688434 0. ] ... [ 0. -2.230806 1.9910523 ... 0.75441015 -2.112752 0. ] [ 0. -0.86911047 1.3061635 ... -0.4786678 1.4054482 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.2462664 -0.8584057 ... 0.19004023 -1.8170319 0. ] [ 0. -2.4685788 1.7872516 ... -0.4300391 2.1761842 0. ] ... [ 0. -2.4839804 -1.7333959 ... -0.39020336 0.1325255 0. ] [ 0. -0.57423747 -0.2948969 ... 1.022952 1.1333412 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_910.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.2496 (2,1,1,.,.) = -0.2900 (3,1,1,.,.) = -0.6032 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 0.96349555 1.1670686 -0.22070585 0.08406665] [ 1.242167 -0.5651405 -0.6699094 1.6595069 ] [ 1.0680642 -0.2763708 -2.0246992 0.7303653 ] [-0.6954039 1.6698686 -1.227969 -0.32251763] [ 0.34236327 -2.5082889 0.8661761 0.88001513] [ 1.6770974 -1.3930385 -1.0920507 2.560437 ] [ 0.20187595 -0.4678251 -0.9897491 -0.65850234] [-2.4162455 0.417898 -0.46741298 1.8622141 ]] [[-2.5017467 1.0592753 0.05876662 -2.2903612 ] [-0.6240149 0.85856706 0.42348677 -2.1404178 ] [-0.6710632 -0.55194 0.7380159 -2.8102436 ] [-2.7655303 0.3385198 -0.45788577 1.8722421 ] [ 0.50957733 0.20321077 3.1400752 -0.58082265] [ 1.0245175 0.4279416 1.0717158 1.4012697 ] [ 2.087136 2.2274573 -1.2392755 1.2026532 ] [-0.45898345 0.18296337 -0.04788545 0.52800405]] [[-0.16099723 1.9712114 -1.6089543 -2.0664008 ] [-0.85048175 1.0624827 1.9003886 1.4902945 ] [ 2.1880076 0.1803578 0.70641583 -2.9170587 ] [ 3.0640018 2.558217 0.72407854 -1.0966966 ] [ 1.6499203 -1.6048837 1.6972796 -0.38565367] [-0.5544629 1.5665188 0.02096349 1.6154361 ] [ 0.3805543 -1.2611403 -2.0100641 -0.35554156] [-0.63055813 -2.1491892 1.5015316 -2.3058727 ]] [[ 0.06541573 0.928426 -2.0374079 -0.46967795] [-0.19245565 -1.1369094 -0.33023423 -1.4973905 ] [-0.4924171 1.588823 -1.1173285 -1.1862242 ] [-1.9194813 -2.0146596 1.4578011 0.12734757] [-1.4696654 -0.5077183 1.9072989 -1.4205949 ] [ 0.09399753 0.8149158 0.4478232 -1.2378308 ] [ 0.820354 -2.9322672 -0.84456396 0.3233559 ] [-0.11662529 0.00473025 1.0897151 -0.2904321 ]]]]]; ov_res: [[[[[ 0.96349555 1.1670686 -0.22070585 0.08406665] [ 1.242167 -0.5651405 -0.6699094 1.6595069 ] [ 1.0680642 -0.2763708 -2.0246992 0.7303653 ] [-0.6954039 1.6698686 -1.227969 -0.32251763] [ 0.34236327 -2.5082889 0.8661761 0.88001513] [ 1.6770974 -1.3930385 -1.0920507 2.560437 ] [ 0.20187595 -0.4678251 -0.9897491 -0.65850234] [-2.4162455 0.417898 -0.46741298 1.8622141 ]] [[-2.5017467 1.0592753 0.05876662 -2.2903612 ] [-0.6240149 0.85856706 0.42348677 -2.1404178 ] [-0.6710632 -0.55194 0.7380159 -2.8102436 ] [-2.7655303 0.3385198 -0.45788577 1.8722421 ] [ 0.50957733 0.20321077 3.1400752 -0.58082265] [ 1.0245175 0.4279416 1.0717158 1.4012697 ] [ 2.087136 2.2274573 -1.2392755 1.2026532 ] [-0.45898345 0.18296337 -0.04788545 0.52800405]] [[-0.16099723 1.9712114 -1.6089543 -2.0664008 ] [-0.85048175 1.0624827 1.9003886 1.4902945 ] [ 2.1880076 0.1803578 0.70641583 -2.9170587 ] [ 3.0640018 2.558217 0.72407854 -1.0966966 ] [ 1.6499203 -1.6048837 1.6972796 -0.38565367] [-0.5544629 1.5665188 0.02096349 1.6154361 ] [ 0.3805543 -1.2611403 -2.0100641 -0.35554156] [-0.63055813 -2.1491892 1.5015316 -2.3058727 ]] [[ 0.06541573 0.928426 -2.0374079 -0.46967795] [-0.19245565 -1.1369094 -0.33023423 -1.4973905 ] [-0.4924171 1.588823 -1.1173285 -1.1862242 ] [-1.9194813 -2.0146596 1.4578011 0.12734757] [-1.4696654 -0.5077183 1.9072989 -1.4205949 ] [ 0.09399753 0.8149158 0.4478232 -1.2378308 ] [ 0.820354 -2.9322672 -0.84456396 0.3233559 ] [-0.11662529 0.00473025 1.0897151 -0.2904321 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_912.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.5994 (2,1,1,.,.) = -0.3769 (3,1,1,.,.) = 0.01 * 4.9372 (1,2,1,.,.) = -0.1809 (2,2,1,.,.) = 0.01 * -3.6918 (3,2,1,.,.) = 0.2088 (1,3,1,.,.) = -1.6460 (2,3,1,.,.) = -0.6528 (3,3,1,.,.) = -1.0665 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_914.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.1226 (2,1,1,.,.) = 0.8078 (3,1,1,.,.) = 0.2960 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[-8.24096382e-01 -2.54954958e+00 -2.22099495e+00 -1.68521845e+00 -4.48679894e-01 -1.63584924e+00 9.76666808e-02 1.77285039e+00 1.40688396e+00 -1.20530844e-01] [-2.20888972e+00 7.06445694e-01 2.24661708e+00 -8.17734361e-01 1.52112532e+00 -1.49974659e-01 -1.35043478e+00 -1.64935827e+00 -1.84150422e+00 2.10667300e+00] [ 3.51892412e-01 -1.17006910e+00 -2.51874852e+00 -3.40829402e-01 -1.87747169e+00 1.11903489e+00 1.68637908e+00 -2.65039015e+00 1.27648771e+00 1.27567351e+00] [-4.75651741e-01 2.77332020e+00 -3.23792756e-01 -2.75718427e+00 3.64288270e-01 -1.55083513e+00 5.25988817e-01 2.00789785e+00 -1.17090702e+00 -1.40015900e+00] [-9.60539579e-01 -1.02546513e+00 5.13406731e-02 1.86176908e+00 3.13048625e+00 -3.28258276e-01 -2.61753201e+00 -6.69655859e-01 -1.47236919e+00 -1.07415128e+00] [ 4.92764384e-01 5.27243555e-01 1.80535960e+00 -1.08089459e+00 9.72534359e-01 -3.87211949e-01 -8.28826964e-01 -2.06749487e+00 2.27894783e+00 1.29396808e+00] [-1.22135019e+00 1.09257019e+00 -1.24598074e+00 -1.09827662e+00 1.15983284e+00 7.09729254e-01 -1.11255741e+00 -1.47966015e+00 1.32367387e-01 2.88215613e+00] [-8.04137588e-01 3.51049721e-01 7.72525668e-02 1.11002445e+00 1.91645935e-01 1.00621819e+00 -1.26802814e+00 -3.32821131e-01 3.11134905e-01 2.83951640e-01] [-5.50957024e-01 -2.70032120e+00 1.80554748e+00 -2.01225758e+00 -1.06333637e+00 -5.09582460e-01 6.25199199e-01 8.02674651e-01 -1.14492729e-01 -1.33098102e+00] [ 5.38378358e-01 2.16588378e+00 1.63622808e+00 8.61799717e-01 -1.33125246e+00 -7.72812683e-03 -1.11783302e+00 -1.17903423e+00 9.29917276e-01 1.42016065e+00]] [[-9.64531600e-01 -1.29207408e+00 -1.74850273e+00 2.17033195e+00 -1.39883566e+00 1.56398869e+00 1.79975259e+00 -2.49210191e+00 1.97383070e+00 7.39248335e-01] [-1.73440146e+00 -7.79259503e-01 -5.11512935e-01 2.47635484e+00 4.01310802e-01 -2.58615327e+00 2.51849860e-01 5.44716418e-01 7.00747082e-03 1.62880230e+00] [ 4.53157097e-01 -1.14510393e+00 -2.53985167e+00 -8.46142709e-01 2.87517428e-01 -8.58354270e-01 -1.66358531e+00 5.92031442e-02 -9.54171360e-01 -9.44171026e-02] [ 5.73876739e-01 1.87547672e+00 4.09466662e-02 -2.96896338e+00 -1.09626162e+00 -2.66666222e+00 -4.40259337e-01 -1.43090427e+00 3.50427479e-01 3.62358832e+00] [ 2.47421646e+00 1.02977264e+00 -1.06313109e+00 1.05242336e+00 -1.12122452e+00 7.44869053e-01 -2.00418830e+00 2.20468402e+00 -1.52113426e+00 1.33193660e+00] [-2.10387659e+00 2.79229140e+00 -9.88999009e-01 -1.39976394e+00 -9.44649041e-01 -7.86573410e-01 5.29369116e-01 -2.12635398e+00 -1.55978644e+00 6.54472530e-01] [-7.90811479e-01 1.45686936e+00 1.96742088e-01 1.33595026e+00 -8.42764080e-01 5.98761559e-01 -3.20253432e-01 6.01524711e-01 7.23231733e-01 7.84037173e-01] [ 1.77756083e+00 7.71643579e-01 -9.29947197e-01 1.44449425e+00 -1.86171412e+00 -5.58576047e-01 3.50662321e-01 2.13352442e+00 3.59974980e-01 2.34080553e+00] [ 5.77722192e-01 3.91597480e-01 8.46124530e-01 4.87065583e-01 -3.28837824e+00 -3.85124892e-01 6.77852094e-01 1.77458763e+00 -1.32674897e+00 7.17711329e-01] [ 3.82409513e-01 -2.66043019e+00 2.23091078e+00 -2.09468317e+00 -5.20484328e-01 -1.22957730e+00 1.55958378e+00 7.29080796e-01 -4.81869131e-01 -7.18771935e-01]] [[ 5.14432847e-01 -3.95166469e+00 -6.93225861e-01 -3.27943593e-01 -6.84936106e-01 9.57094371e-01 -2.07141805e+00 -1.88178837e+00 1.27195686e-01 9.62763429e-01] [-9.39972997e-01 2.52250314e+00 -5.02106786e-01 1.55360699e+00 3.34704369e-01 -1.44283056e+00 -6.77913487e-01 -1.12023883e-01 -1.93692899e+00 5.53355277e-01] [-8.10234427e-01 2.31341457e+00 -2.02710223e+00 -4.45143819e-01 -9.61699843e-01 9.84546319e-02 -5.34648955e-01 6.85253263e-01 -3.59181571e+00 -4.87931212e-03] [-1.11269534e+00 2.75371885e+00 7.14333534e-01 -2.29177618e+00 -2.11584353e+00 7.40145564e-01 9.05127525e-01 1.32164013e+00 -8.51308346e-01 5.09801209e-02] [ 5.76095223e-01 -5.77714324e-01 -6.59522355e-01 1.03950477e+00 -1.82908630e+00 1.70250088e-01 -4.75369811e-01 2.76280139e-02 2.39461589e+00 -5.38406491e-01] [ 6.99952900e-01 2.09929061e+00 -6.02142751e-01 1.07863116e+00 -7.47009754e-01 -1.06485224e+00 8.81012022e-01 -2.00508857e+00 -4.44049984e-01 -9.52269912e-01] [ 5.18643148e-02 1.60777104e+00 5.60437322e-01 -1.98333561e+00 1.47206533e+00 -2.39752984e+00 -9.31088448e-01 1.91444993e+00 -1.83852768e+00 2.20073089e-01] [ 1.74216047e-01 8.02571058e-01 1.00641561e+00 -6.88960731e-01 8.30715448e-02 -3.11934888e-01 -6.52959406e-01 1.15411615e+00 -1.64801145e+00 6.76349699e-01] [-2.41006255e-01 2.39969540e+00 -7.53920436e-01 1.79320049e+00 -1.69362748e+00 1.23029006e+00 5.32186925e-01 1.14986384e-02 4.98045802e-01 -8.58307719e-01] [-1.80251789e+00 -1.07948983e+00 3.89170021e-01 3.32775712e-01 8.79186392e-01 5.13209939e-01 -1.65471685e+00 -1.51718163e+00 1.10833311e+00 4.99528289e-01]] [[ 3.03271323e-01 7.12548256e-01 -1.06366789e+00 -2.17369819e+00 -4.93721873e-01 8.63843560e-01 2.34697148e-01 -3.49327707e+00 3.83112848e-01 1.09116328e+00] [-1.50050342e+00 -7.98059762e-01 -8.24088454e-01 2.52632856e+00 1.79354346e+00 -5.14928363e-02 1.47871470e+00 4.08913881e-01 6.35564566e-01 -1.22370076e+00] [-9.94371772e-01 -1.26799393e+00 -4.84313458e-01 1.50976312e+00 1.56733748e-02 -5.26341140e-01 -7.20412061e-02 -2.89074373e+00 1.63390672e+00 1.33448958e+00] [ 1.64606020e-01 -2.16074729e+00 -1.07754540e+00 -1.07759702e+00 2.33287156e-01 -2.66492292e-02 6.98998094e-01 7.18533844e-02 -1.91606358e-01 6.37801945e-01] [-2.07689738e+00 4.93243843e-01 9.69632447e-01 -7.02855229e-01 5.18185437e-01 2.39999965e-01 4.07024920e-01 4.82027352e-01 3.40849221e-01 -7.61354923e-01] [ 2.04906225e-01 -5.82010806e-01 -3.98979932e-01 1.93512475e+00 2.24461627e+00 -1.06029415e+00 1.48179814e-01 -5.26762187e-01 1.20675838e+00 -1.82125747e+00] [-2.00088763e+00 -9.18447793e-01 4.23743501e-02 1.39978611e+00 -1.25254595e+00 -1.49258777e-01 -2.59697819e+00 -8.41294289e-01 4.66259271e-01 1.08277760e-01] [ 7.54425049e-01 8.81013691e-01 -1.04529321e+00 -4.59997088e-01 -2.11718529e-02 -3.86334252e+00 -4.21028435e-01 2.00572491e+00 -3.80880199e-02 -3.07339448e-02] [ 7.06391752e-01 4.44392890e-01 -1.38842535e+00 -2.66775250e+00 1.77720892e+00 3.26985073e+00 3.81003380e-01 -1.12618661e+00 8.32956314e-01 4.17032051e+00] [-1.89245892e+00 9.09935474e-01 -3.39513588e+00 -2.06581807e+00 4.98991132e-01 2.23875713e+00 -5.62698960e-01 -1.91131616e+00 9.40914333e-01 -3.67284715e-02]] [[-1.90694654e+00 7.28556693e-01 1.31136715e-01 -9.44792449e-01 5.10607898e-01 -8.43257308e-02 -6.26447052e-02 -2.92880297e-01 2.72496104e-01 3.81223291e-01] [ 1.41627657e+00 1.28821123e+00 2.28924942e+00 -6.34484172e-01 2.27706599e+00 -1.93513405e+00 4.52198774e-01 -8.18330720e-02 3.07674384e+00 -8.36632177e-02] [-1.54903364e+00 -1.25494003e+00 1.52926159e+00 3.38795018e+00 3.00685811e+00 2.32998347e+00 1.65063396e-01 1.31321669e-01 -1.49226296e+00 4.80199605e-01] [ 2.17192203e-01 -1.26466990e+00 -6.15245700e-01 -1.02540874e+00 -1.62103391e+00 -1.10626841e+00 1.10374250e-01 -1.26803219e+00 2.34397101e+00 -2.14434648e+00] [ 1.94255483e+00 -7.75114179e-01 -1.66212821e+00 -2.13391209e+00 -2.40343285e+00 1.06258595e+00 1.19880661e-02 -2.84049630e-01 -9.47936356e-01 -1.93133748e+00] [ 6.08488798e-01 -7.76315153e-01 3.05697441e-01 -5.88080704e-01 -2.84068298e+00 1.34695530e+00 -1.66180956e+00 -1.51159620e+00 -3.29913187e+00 2.08284831e+00] [ 2.52289802e-01 1.09987867e+00 1.16001248e+00 -1.57010651e+00 2.63581848e+00 1.94304681e+00 -1.60523999e+00 -8.72603118e-01 1.38527691e+00 6.37212634e-01] [-8.97634625e-01 1.00156760e+00 1.29983330e+00 5.33188522e-01 -6.00969419e-02 1.71436286e+00 5.56491971e-01 -1.23382962e+00 1.74784875e+00 -1.46071494e+00] [-3.53917456e+00 8.73211622e-01 7.98134089e-01 6.36968374e-01 -5.40907502e-01 1.00800395e+00 -9.40640688e-01 -8.21503550e-02 -1.03048074e+00 2.32626987e+00] [ 6.74698651e-01 -1.32348943e+00 5.06324470e-01 -1.37720597e+00 5.21581650e-01 2.20882869e+00 -9.61296260e-01 -1.72326958e+00 4.45847921e-02 6.75564468e-01]] [[-1.10799909e+00 -1.10491443e+00 8.04688334e-01 4.94864613e-01 -2.10416126e+00 -7.46613324e-01 1.50511348e+00 -1.55169070e-01 -2.11197639e+00 -1.67545772e+00] [-4.64213103e-01 -1.53489721e+00 -6.68280050e-02 9.99139249e-01 9.04689878e-02 1.32315648e+00 1.94332945e+00 2.30488157e+00 -1.05017412e+00 5.70966005e-01] [ 1.67045867e+00 1.41767991e+00 4.43595976e-01 4.96420026e-01 3.62298250e+00 -3.14717007e+00 -1.15953159e+00 -1.29387426e+00 4.57930177e-01 -4.49173599e-01] [-1.17484236e+00 3.33744466e-01 1.20783293e+00 6.03539608e-02 6.92968130e-01 -1.07323432e+00 2.71129274e+00 1.07229304e+00 2.91852206e-01 -1.43850148e+00] [-1.89133048e+00 -2.20492649e+00 -3.06414723e-01 -4.87546176e-01 -1.41538322e+00 7.62524605e-01 -3.06507736e-03 -4.71263766e-01 1.03149843e+00 1.44283903e+00] [ 1.25346470e+00 1.34791243e+00 -5.01634777e-01 -1.08340168e+00 -1.01672375e+00 -3.27780986e+00 1.41665554e+00 2.20758200e+00 -6.55885339e-01 2.72950077e+00] [ 1.40120602e+00 -1.71306515e+00 8.31310987e-01 2.30942488e+00 -2.02855825e+00 9.03726697e-01 -7.71370754e-02 -6.88743055e-01 -1.00957930e+00 -1.33536613e+00] [ 3.29313374e+00 -3.84618640e-01 -8.92451584e-01 -4.81227279e-01 -2.00511646e+00 -1.57530367e+00 1.66051662e+00 -9.44021165e-01 1.55333042e+00 -8.84082198e-01] [-4.17711467e-01 3.60261053e-01 2.60720062e+00 -1.12365842e+00 1.65454650e+00 -2.96288848e-01 2.07839355e-01 -6.49258435e-01 3.66936834e-03 -1.84981179e+00] [ 6.54296875e-01 -1.07158613e+00 7.29272366e-01 -7.83991277e-01 2.30743027e+00 1.80065799e+00 5.04044533e-01 1.60688794e+00 5.36200643e-01 1.08131504e+00]] [[-8.04002583e-01 6.14476562e-01 -1.25551558e+00 1.03390825e+00 -6.72895432e-01 1.94338453e+00 -1.23875892e+00 -5.58478057e-01 2.06605840e+00 2.03667060e-01] [-2.01454687e+00 3.46108103e+00 2.41667342e+00 2.32123399e+00 -3.70389867e+00 2.15615347e-01 -3.26614332e+00 3.17221260e+00 4.87415463e-01 9.10572350e-01] [ 1.76816106e+00 -1.16240263e+00 7.73776472e-02 1.75288633e-01 -2.48256087e+00 1.44235992e+00 -2.96654153e+00 -2.73725152e-01 5.05260885e-01 3.40079665e-02] [-3.32375258e-01 -1.28891206e+00 -1.60575855e+00 6.72218204e-01 -5.48519135e-01 9.16826427e-01 -3.46596301e-01 7.24856183e-02 1.76553023e+00 -1.26140642e+00] [-1.43923044e+00 3.27707529e-01 -1.84906352e+00 -2.15207905e-01 -2.80736470e+00 8.31080258e-01 -7.35069036e-01 2.97021776e-01 3.75101447e-01 4.54937786e-01] [-6.90974817e-02 -1.17989469e+00 -8.56983840e-01 -1.03861415e+00 -3.56012225e+00 -1.28069293e+00 -2.78863966e-01 1.09246457e+00 7.11654067e-01 -1.58127159e-01] [-1.67739308e+00 -2.60701966e+00 2.82092333e+00 1.30600870e-01 8.09110880e-01 1.61045277e+00 -3.80206943e-01 -1.91123533e+00 2.21021986e+00 -1.55157149e+00] [ 9.34218824e-01 -2.16271833e-01 5.08793175e-01 8.03092957e-01 2.29041910e+00 -1.31489444e+00 2.85964274e+00 -4.13634658e-01 -2.55190063e+00 -4.82059032e-01] [ 1.23814714e+00 1.34612405e+00 9.00119960e-01 6.79909050e-01 -1.05695403e+00 -2.48618269e+00 7.46525750e-02 1.40750480e+00 -9.65452671e-01 -8.26123059e-01] [ 1.76358032e+00 1.30356812e+00 1.68301833e+00 -9.94541824e-01 5.59959650e-01 2.26594150e-01 3.24361563e-01 7.06298053e-01 9.37368929e-01 -7.43213832e-01]] [[ 9.74758744e-01 1.97524536e+00 -5.30622229e-02 -1.82021964e+00 -5.39855719e-01 6.50416911e-01 3.00797725e+00 5.71964145e-01 -2.14324817e-01 -1.56059813e+00] [-8.94474328e-01 -1.71513033e+00 -1.22841001e+00 -8.09170365e-01 2.69657493e+00 -1.70105410e+00 1.79378259e+00 6.65734291e-01 1.63924968e+00 -1.37532055e+00] [ 3.73192847e-01 1.13045895e+00 8.32207620e-01 -1.11395669e+00 -1.28980291e+00 1.99034739e+00 1.30071259e+00 -4.44140762e-01 3.57336760e-01 -1.45638132e+00] [-5.02180159e-01 2.61791492e+00 1.71272850e+00 5.27610004e-01 -1.15297246e+00 -2.14914393e+00 -3.48099202e-01 2.41068721e-01 1.45103812e+00 1.11684215e+00] [ 1.72899354e+00 -1.45165288e+00 -4.76409076e-03 -3.07762176e-01 -9.64929521e-01 2.54905891e+00 -2.03930569e+00 3.26469123e-01 1.41819751e+00 -1.85696745e+00] [ 6.99619234e-01 2.90598750e+00 -1.33235538e+00 2.68788457e-01 9.14937913e-01 -4.60509330e-01 8.67341101e-01 -1.92258167e+00 -1.35745275e+00 5.74931264e-01] [ 9.30326045e-01 1.77153587e+00 1.64542198e-01 -3.32835585e-01 6.09930813e-01 -6.04636133e-01 -1.86162388e+00 -4.01414692e-01 -2.29500741e-01 1.31920183e+00] [ 2.12032938e+00 -2.16162586e+00 2.60442162e+00 9.41479981e-01 4.57508385e-01 -2.74337620e-01 -6.68287694e-01 -2.35814810e-01 -8.63556683e-01 1.32660043e+00] [-1.00075781e+00 -4.90452617e-01 2.59839845e+00 7.82236636e-01 1.66879761e+00 -1.10238135e+00 -3.15232307e-01 -1.01291388e-01 -8.86277616e-01 1.42248714e+00] [ 1.77490008e+00 1.41630089e+00 7.93802023e-01 2.16829944e+00 2.05327487e+00 -4.75764185e-01 1.20646024e+00 1.06107664e+00 -2.46636009e+00 -3.55208933e-01]]]]]; ov_res: [[[[[-8.24096382e-01 -2.54954958e+00 -2.22099495e+00 -1.68521845e+00 -4.48679894e-01 -1.63584924e+00 9.76666808e-02 1.77285039e+00 1.40688396e+00 -1.20530844e-01] [-2.20888972e+00 7.06445694e-01 2.24661708e+00 -8.17734361e-01 1.52112532e+00 -1.49974659e-01 -1.35043478e+00 -1.64935827e+00 -1.84150422e+00 2.10667300e+00] [ 3.51892412e-01 -1.17006910e+00 -2.51874852e+00 -3.40829402e-01 -1.87747169e+00 1.11903489e+00 1.68637908e+00 -2.65039015e+00 1.27648771e+00 1.27567351e+00] [-4.75651741e-01 2.77332020e+00 -3.23792756e-01 -2.75718427e+00 3.64288270e-01 -1.55083513e+00 5.25988817e-01 2.00789785e+00 -1.17090702e+00 -1.40015900e+00] [-9.60539579e-01 -1.02546513e+00 5.13406731e-02 1.86176908e+00 3.13048625e+00 -3.28258276e-01 -2.61753201e+00 -6.69655859e-01 -1.47236919e+00 -1.07415128e+00] [ 4.92764384e-01 5.27243555e-01 1.80535960e+00 -1.08089459e+00 9.72534359e-01 -3.87211949e-01 -8.28826964e-01 -2.06749487e+00 2.27894783e+00 1.29396808e+00] [-1.22135019e+00 1.09257019e+00 -1.24598074e+00 -1.09827662e+00 1.15983284e+00 7.09729254e-01 -1.11255741e+00 -1.47966015e+00 1.32367387e-01 2.88215613e+00] [-8.04137588e-01 3.51049721e-01 7.72525668e-02 1.11002445e+00 1.91645935e-01 1.00621819e+00 -1.26802814e+00 -3.32821131e-01 3.11134905e-01 2.83951640e-01] [-5.50957024e-01 -2.70032120e+00 1.80554748e+00 -2.01225758e+00 -1.06333637e+00 -5.09582460e-01 6.25199199e-01 8.02674651e-01 -1.14492729e-01 -1.33098102e+00] [ 5.38378358e-01 2.16588378e+00 1.63622808e+00 8.61799717e-01 -1.33125246e+00 -7.72812683e-03 -1.11783302e+00 -1.17903423e+00 9.29917276e-01 1.42016065e+00]] [[-9.64531600e-01 -1.29207408e+00 -1.74850273e+00 2.17033195e+00 -1.39883566e+00 1.56398869e+00 1.79975259e+00 -2.49210191e+00 1.97383070e+00 7.39248335e-01] [-1.73440146e+00 -7.79259503e-01 -5.11512935e-01 2.47635484e+00 4.01310802e-01 -2.58615327e+00 2.51849860e-01 5.44716418e-01 7.00747082e-03 1.62880230e+00] [ 4.53157097e-01 -1.14510393e+00 -2.53985167e+00 -8.46142709e-01 2.87517428e-01 -8.58354270e-01 -1.66358531e+00 5.92031442e-02 -9.54171360e-01 -9.44171026e-02] [ 5.73876739e-01 1.87547672e+00 4.09466662e-02 -2.96896338e+00 -1.09626162e+00 -2.66666222e+00 -4.40259337e-01 -1.43090427e+00 3.50427479e-01 3.62358832e+00] [ 2.47421646e+00 1.02977264e+00 -1.06313109e+00 1.05242336e+00 -1.12122452e+00 7.44869053e-01 -2.00418830e+00 2.20468402e+00 -1.52113426e+00 1.33193660e+00] [-2.10387659e+00 2.79229140e+00 -9.88999009e-01 -1.39976394e+00 -9.44649041e-01 -7.86573410e-01 5.29369116e-01 -2.12635398e+00 -1.55978644e+00 6.54472530e-01] [-7.90811479e-01 1.45686936e+00 1.96742088e-01 1.33595026e+00 -8.42764080e-01 5.98761559e-01 -3.20253432e-01 6.01524711e-01 7.23231733e-01 7.84037173e-01] [ 1.77756083e+00 7.71643579e-01 -9.29947197e-01 1.44449425e+00 -1.86171412e+00 -5.58576047e-01 3.50662321e-01 2.13352442e+00 3.59974980e-01 2.34080553e+00] [ 5.77722192e-01 3.91597480e-01 8.46124530e-01 4.87065583e-01 -3.28837824e+00 -3.85124892e-01 6.77852094e-01 1.77458763e+00 -1.32674897e+00 7.17711329e-01] [ 3.82409513e-01 -2.66043019e+00 2.23091078e+00 -2.09468317e+00 -5.20484328e-01 -1.22957730e+00 1.55958378e+00 7.29080796e-01 -4.81869131e-01 -7.18771935e-01]] [[ 5.14432847e-01 -3.95166469e+00 -6.93225861e-01 -3.27943593e-01 -6.84936106e-01 9.57094371e-01 -2.07141805e+00 -1.88178837e+00 1.27195686e-01 9.62763429e-01] [-9.39972997e-01 2.52250314e+00 -5.02106786e-01 1.55360699e+00 3.34704369e-01 -1.44283056e+00 -6.77913487e-01 -1.12023883e-01 -1.93692899e+00 5.53355277e-01] [-8.10234427e-01 2.31341457e+00 -2.02710223e+00 -4.45143819e-01 -9.61699843e-01 9.84546319e-02 -5.34648955e-01 6.85253263e-01 -3.59181571e+00 -4.87931212e-03] [-1.11269534e+00 2.75371885e+00 7.14333534e-01 -2.29177618e+00 -2.11584353e+00 7.40145564e-01 9.05127525e-01 1.32164013e+00 -8.51308346e-01 5.09801209e-02] [ 5.76095223e-01 -5.77714324e-01 -6.59522355e-01 1.03950477e+00 -1.82908630e+00 1.70250088e-01 -4.75369811e-01 2.76280139e-02 2.39461589e+00 -5.38406491e-01] [ 6.99952900e-01 2.09929061e+00 -6.02142751e-01 1.07863116e+00 -7.47009754e-01 -1.06485224e+00 8.81012022e-01 -2.00508857e+00 -4.44049984e-01 -9.52269912e-01] [ 5.18643148e-02 1.60777104e+00 5.60437322e-01 -1.98333561e+00 1.47206533e+00 -2.39752984e+00 -9.31088448e-01 1.91444993e+00 -1.83852768e+00 2.20073089e-01] [ 1.74216047e-01 8.02571058e-01 1.00641561e+00 -6.88960731e-01 8.30715448e-02 -3.11934888e-01 -6.52959406e-01 1.15411615e+00 -1.64801145e+00 6.76349699e-01] [-2.41006255e-01 2.39969540e+00 -7.53920436e-01 1.79320049e+00 -1.69362748e+00 1.23029006e+00 5.32186925e-01 1.14986384e-02 4.98045802e-01 -8.58307719e-01] [-1.80251789e+00 -1.07948983e+00 3.89170021e-01 3.32775712e-01 8.79186392e-01 5.13209939e-01 -1.65471685e+00 -1.51718163e+00 1.10833311e+00 4.99528289e-01]] [[ 3.03271323e-01 7.12548256e-01 -1.06366789e+00 -2.17369819e+00 -4.93721873e-01 8.63843560e-01 2.34697148e-01 -3.49327707e+00 3.83112848e-01 1.09116328e+00] [-1.50050342e+00 -7.98059762e-01 -8.24088454e-01 2.52632856e+00 1.79354346e+00 -5.14928363e-02 1.47871470e+00 4.08913881e-01 6.35564566e-01 -1.22370076e+00] [-9.94371772e-01 -1.26799393e+00 -4.84313458e-01 1.50976312e+00 1.56733748e-02 -5.26341140e-01 -7.20412061e-02 -2.89074373e+00 1.63390672e+00 1.33448958e+00] [ 1.64606020e-01 -2.16074729e+00 -1.07754540e+00 -1.07759702e+00 2.33287156e-01 -2.66492292e-02 6.98998094e-01 7.18533844e-02 -1.91606358e-01 6.37801945e-01] [-2.07689738e+00 4.93243843e-01 9.69632447e-01 -7.02855229e-01 5.18185437e-01 2.39999965e-01 4.07024920e-01 4.82027352e-01 3.40849221e-01 -7.61354923e-01] [ 2.04906225e-01 -5.82010806e-01 -3.98979932e-01 1.93512475e+00 2.24461627e+00 -1.06029415e+00 1.48179814e-01 -5.26762187e-01 1.20675838e+00 -1.82125747e+00] [-2.00088763e+00 -9.18447793e-01 4.23743501e-02 1.39978611e+00 -1.25254595e+00 -1.49258777e-01 -2.59697819e+00 -8.41294289e-01 4.66259271e-01 1.08277760e-01] [ 7.54425049e-01 8.81013691e-01 -1.04529321e+00 -4.59997088e-01 -2.11718529e-02 -3.86334252e+00 -4.21028435e-01 2.00572491e+00 -3.80880199e-02 -3.07339448e-02] [ 7.06391752e-01 4.44392890e-01 -1.38842535e+00 -2.66775250e+00 1.77720892e+00 3.26985073e+00 3.81003380e-01 -1.12618661e+00 8.32956314e-01 4.17032051e+00] [-1.89245892e+00 9.09935474e-01 -3.39513588e+00 -2.06581807e+00 4.98991132e-01 2.23875713e+00 -5.62698960e-01 -1.91131616e+00 9.40914333e-01 -3.67284715e-02]] [[-1.90694654e+00 7.28556693e-01 1.31136715e-01 -9.44792449e-01 5.10607898e-01 -8.43257308e-02 -6.26447052e-02 -2.92880297e-01 2.72496104e-01 3.81223291e-01] [ 1.41627657e+00 1.28821123e+00 2.28924942e+00 -6.34484172e-01 2.27706599e+00 -1.93513405e+00 4.52198774e-01 -8.18330720e-02 3.07674384e+00 -8.36632177e-02] [-1.54903364e+00 -1.25494003e+00 1.52926159e+00 3.38795018e+00 3.00685811e+00 2.32998347e+00 1.65063396e-01 1.31321669e-01 -1.49226296e+00 4.80199605e-01] [ 2.17192203e-01 -1.26466990e+00 -6.15245700e-01 -1.02540874e+00 -1.62103391e+00 -1.10626841e+00 1.10374250e-01 -1.26803219e+00 2.34397101e+00 -2.14434648e+00] [ 1.94255483e+00 -7.75114179e-01 -1.66212821e+00 -2.13391209e+00 -2.40343285e+00 1.06258595e+00 1.19880661e-02 -2.84049630e-01 -9.47936356e-01 -1.93133748e+00] [ 6.08488798e-01 -7.76315153e-01 3.05697441e-01 -5.88080704e-01 -2.84068298e+00 1.34695530e+00 -1.66180956e+00 -1.51159620e+00 -3.29913187e+00 2.08284831e+00] [ 2.52289802e-01 1.09987867e+00 1.16001248e+00 -1.57010651e+00 2.63581848e+00 1.94304681e+00 -1.60523999e+00 -8.72603118e-01 1.38527691e+00 6.37212634e-01] [-8.97634625e-01 1.00156760e+00 1.29983330e+00 5.33188522e-01 -6.00969419e-02 1.71436286e+00 5.56491971e-01 -1.23382962e+00 1.74784875e+00 -1.46071494e+00] [-3.53917456e+00 8.73211622e-01 7.98134089e-01 6.36968374e-01 -5.40907502e-01 1.00800395e+00 -9.40640688e-01 -8.21503550e-02 -1.03048074e+00 2.32626987e+00] [ 6.74698651e-01 -1.32348943e+00 5.06324470e-01 -1.37720597e+00 5.21581650e-01 2.20882869e+00 -9.61296260e-01 -1.72326958e+00 4.45847921e-02 6.75564468e-01]] [[-1.10799909e+00 -1.10491443e+00 8.04688334e-01 4.94864613e-01 -2.10416126e+00 -7.46613324e-01 1.50511348e+00 -1.55169070e-01 -2.11197639e+00 -1.67545772e+00] [-4.64213103e-01 -1.53489721e+00 -6.68280050e-02 9.99139249e-01 9.04689878e-02 1.32315648e+00 1.94332945e+00 2.30488157e+00 -1.05017412e+00 5.70966005e-01] [ 1.67045867e+00 1.41767991e+00 4.43595976e-01 4.96420026e-01 3.62298250e+00 -3.14717007e+00 -1.15953159e+00 -1.29387426e+00 4.57930177e-01 -4.49173599e-01] [-1.17484236e+00 3.33744466e-01 1.20783293e+00 6.03539608e-02 6.92968130e-01 -1.07323432e+00 2.71129274e+00 1.07229304e+00 2.91852206e-01 -1.43850148e+00] [-1.89133048e+00 -2.20492649e+00 -3.06414723e-01 -4.87546176e-01 -1.41538322e+00 7.62524605e-01 -3.06507736e-03 -4.71263766e-01 1.03149843e+00 1.44283903e+00] [ 1.25346470e+00 1.34791243e+00 -5.01634777e-01 -1.08340168e+00 -1.01672375e+00 -3.27780986e+00 1.41665554e+00 2.20758200e+00 -6.55885339e-01 2.72950077e+00] [ 1.40120602e+00 -1.71306515e+00 8.31310987e-01 2.30942488e+00 -2.02855825e+00 9.03726697e-01 -7.71370754e-02 -6.88743055e-01 -1.00957930e+00 -1.33536613e+00] [ 3.29313374e+00 -3.84618640e-01 -8.92451584e-01 -4.81227279e-01 -2.00511646e+00 -1.57530367e+00 1.66051662e+00 -9.44021165e-01 1.55333042e+00 -8.84082198e-01] [-4.17711467e-01 3.60261053e-01 2.60720062e+00 -1.12365842e+00 1.65454650e+00 -2.96288848e-01 2.07839355e-01 -6.49258435e-01 3.66936834e-03 -1.84981179e+00] [ 6.54296875e-01 -1.07158613e+00 7.29272366e-01 -7.83991277e-01 2.30743027e+00 1.80065799e+00 5.04044533e-01 1.60688794e+00 5.36200643e-01 1.08131504e+00]] [[-8.04002583e-01 6.14476562e-01 -1.25551558e+00 1.03390825e+00 -6.72895432e-01 1.94338453e+00 -1.23875892e+00 -5.58478057e-01 2.06605840e+00 2.03667060e-01] [-2.01454687e+00 3.46108103e+00 2.41667342e+00 2.32123399e+00 -3.70389867e+00 2.15615347e-01 -3.26614332e+00 3.17221260e+00 4.87415463e-01 9.10572350e-01] [ 1.76816106e+00 -1.16240263e+00 7.73776472e-02 1.75288633e-01 -2.48256087e+00 1.44235992e+00 -2.96654153e+00 -2.73725152e-01 5.05260885e-01 3.40079665e-02] [-3.32375258e-01 -1.28891206e+00 -1.60575855e+00 6.72218204e-01 -5.48519135e-01 9.16826427e-01 -3.46596301e-01 7.24856183e-02 1.76553023e+00 -1.26140642e+00] [-1.43923044e+00 3.27707529e-01 -1.84906352e+00 -2.15207905e-01 -2.80736470e+00 8.31080258e-01 -7.35069036e-01 2.97021776e-01 3.75101447e-01 4.54937786e-01] [-6.90974817e-02 -1.17989469e+00 -8.56983840e-01 -1.03861415e+00 -3.56012225e+00 -1.28069293e+00 -2.78863966e-01 1.09246457e+00 7.11654067e-01 -1.58127159e-01] [-1.67739308e+00 -2.60701966e+00 2.82092333e+00 1.30600870e-01 8.09110880e-01 1.61045277e+00 -3.80206943e-01 -1.91123533e+00 2.21021986e+00 -1.55157149e+00] [ 9.34218824e-01 -2.16271833e-01 5.08793175e-01 8.03092957e-01 2.29041910e+00 -1.31489444e+00 2.85964274e+00 -4.13634658e-01 -2.55190063e+00 -4.82059032e-01] [ 1.23814714e+00 1.34612405e+00 9.00119960e-01 6.79909050e-01 -1.05695403e+00 -2.48618269e+00 7.46525750e-02 1.40750480e+00 -9.65452671e-01 -8.26123059e-01] [ 1.76358032e+00 1.30356812e+00 1.68301833e+00 -9.94541824e-01 5.59959650e-01 2.26594150e-01 3.24361563e-01 7.06298053e-01 9.37368929e-01 -7.43213832e-01]] [[ 9.74758744e-01 1.97524536e+00 -5.30622229e-02 -1.82021964e+00 -5.39855719e-01 6.50416911e-01 3.00797725e+00 5.71964145e-01 -2.14324817e-01 -1.56059813e+00] [-8.94474328e-01 -1.71513033e+00 -1.22841001e+00 -8.09170365e-01 2.69657493e+00 -1.70105410e+00 1.79378259e+00 6.65734291e-01 1.63924968e+00 -1.37532055e+00] [ 3.73192847e-01 1.13045895e+00 8.32207620e-01 -1.11395669e+00 -1.28980291e+00 1.99034739e+00 1.30071259e+00 -4.44140762e-01 3.57336760e-01 -1.45638132e+00] [-5.02180159e-01 2.61791492e+00 1.71272850e+00 5.27610004e-01 -1.15297246e+00 -2.14914393e+00 -3.48099202e-01 2.41068721e-01 1.45103812e+00 1.11684215e+00] [ 1.72899354e+00 -1.45165288e+00 -4.76409076e-03 -3.07762176e-01 -9.64929521e-01 2.54905891e+00 -2.03930569e+00 3.26469123e-01 1.41819751e+00 -1.85696745e+00] [ 6.99619234e-01 2.90598750e+00 -1.33235538e+00 2.68788457e-01 9.14937913e-01 -4.60509330e-01 8.67341101e-01 -1.92258167e+00 -1.35745275e+00 5.74931264e-01] [ 9.30326045e-01 1.77153587e+00 1.64542198e-01 -3.32835585e-01 6.09930813e-01 -6.04636133e-01 -1.86162388e+00 -4.01414692e-01 -2.29500741e-01 1.31920183e+00] [ 2.12032938e+00 -2.16162586e+00 2.60442162e+00 9.41479981e-01 4.57508385e-01 -2.74337620e-01 -6.68287694e-01 -2.35814810e-01 -8.63556683e-01 1.32660043e+00] [-1.00075781e+00 -4.90452617e-01 2.59839845e+00 7.82236636e-01 1.66879761e+00 -1.10238135e+00 -3.15232307e-01 -1.01291388e-01 -8.86277616e-01 1.42248714e+00] [ 1.77490008e+00 1.41630089e+00 7.93802023e-01 2.16829944e+00 2.05327487e+00 -4.75764185e-01 1.20646024e+00 1.06107664e+00 -2.46636009e+00 -3.55208933e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_916.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.1652 (2,1,1,.,.) = 0.4910 (3,1,1,.,.) = 0.5094 (1,2,1,.,.) = -0.3614 (2,2,1,.,.) = 0.0001 * 4.9497 (3,2,1,.,.) = 0.4258 (1,3,1,.,.) = 0.3215 (2,3,1,.,.) = 1.0286 (3,3,1,.,.) = 1.1304 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [-0.8213233 0.76411617 0.47491837 ... 0.05929625 -0.07728352 0.04102718] [ 0.38573802 0.03913755 -0.45796663 ... -0.07924233 -0.60125357 -0.05090134] ... [-0.31480664 0.8753429 0.06997065 ... 0.5735726 0.6942908 0.15649143] [-0.56035525 0.28980452 -0.4407964 ... -0.12108279 -0.4099648 -0.01874043] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.2692166 0.12263107 0.15534209 ... 0.596656 -0.18367335 -1.1108742 ] [-0.4624378 0.7776881 0.2661266 ... 0.2677241 -0.25971916 -0.36912203] ... [ 0.3423925 0.4435032 0.8799335 ... -0.27135393 -0.1330443 -0.17066209] [ 0.10589708 0.03279484 0.7593855 ... -0.6940509 0.09961501 -0.9291231 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.07517561 0.06480819 0.13253717 ... 0.03721193 0.27737898 0.48972154] [-0.97646415 -0.57832414 -0.07905649 ... -0.56059337 0.10612484 -0.06027628] ... [-0.4016845 -0.5913345 0.36384287 ... 0.14649905 -0.42547786 -1.0273557 ] [-0.6330997 0.19617747 0.16232033 ... -0.3430211 0.572764 0.04945805] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.0932193 1.4527067 -0.27627608 ... 0.48619035 -0.44438863 -0.02835392] [-0.46434537 0.18393037 -0.08921589 ... 0.7639285 -1.0396305 0.52877283] ... [ 0.3655982 -0.2242698 -0.3247903 ... 0.21734159 -0.85124683 -0.10396023] [-0.2525503 -0.41079208 -0.30335033 ... -0.7013914 -0.89413136 0.36630276] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.45444876 -0.58917785 0.31839395 ... 0.5501267 -0.9813728 0.74034667] [-0.74045926 0.29198432 0.12633635 ... -0.29899028 0.16518256 0.35647872] ... [-0.8763807 -0.21373706 -0.46077964 ... -0.14814663 0.13688438 -0.54804933] [ 0.94223297 0.21862757 -1.4070559 ... -0.5815098 0.21518856 -0.4602546 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.4699336 0.3448707 -0.40391785 ... 0.35236663 -0.08978821 0.24055886] [ 0.5232327 -0.39820546 0.30702004 ... 0.42830127 0.18604341 -0.19190544] ... [-1.0623648 0.6594892 0.10305125 ... -0.5745458 0.20905745 0.47321397] [-0.22288303 0.60115373 0.21861947 ... -0.28819183 -0.49404892 -1.1913259 ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [-1.4633913 0.9118736 -0.12151574 ... 1.7064735 0.6535465 0.9739029 ] [ 0.50943893 0.3037974 -1.0325639 ... 0.30721313 0.45985162 0.5256344 ] ... [-0.3991848 0.6756856 0.7864565 ... 2.0467117 3.180469 -0.56742334] [-1.2729342 0.630528 -0.6816716 ... -0.48900628 -0.5039479 -0.3922339 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.02673496 0.81952864 -0.67798984 ... 2.6524892 -1.6315188 -1.7233633 ] [-1.3468425 0.57022786 -0.47231403 ... 1.2119275 -1.177864 0.14778872] ... [-1.927277 0.43038502 1.7267904 ... 2.025887 0.8724565 -1.0078954 ] [-1.1958905 1.7511444 -1.22427 ... 1.9361439 0.0949003 -0.9591412 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.89540285 -1.1037691 -0.34010693 ... -1.4928747 -1.2495607 0.21048012] [-1.008122 -0.14721593 -0.29538414 ... -0.7875532 0.9007972 0.24011491] ... [-1.4464014 -1.0241925 1.2618159 ... -1.4640607 -0.10289839 -0.11823521] [-0.5505867 1.5801101 0.6768775 ... -0.4830137 2.381727 0.46468228] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.5446665 1.1404188 -0.5454459 ... -0.37795654 -1.2630433 0.01063778] [-0.61141646 0.0291807 0.74259007 ... -0.9674055 -2.5070398 0.10920208] ... [ 0.92750674 1.4425147 1.1022202 ... 0.2613216 -2.4972706 1.3168355 ] [-0.4820598 -0.03541962 -0.21634579 ... 0.09315679 -0.66293114 -0.72664547] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.9299028 -0.16997851 -1.3085258 ... -0.8374327 -1.3198192 -0.24600571] [-1.4581933 0.56592757 0.9791373 ... 1.8628532 1.0751759 -0.07005841] ... [-0.8337481 0.03462959 0.5839596 ... -0.940846 0.67254645 -0.42687532] [ 3.7094529 1.1286256 0.3550881 ... -0.11592367 -0.6322858 0.4064866 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.07448329 -0.12867638 1.4953773 ... 0.28306714 0.64962006 0.64689213] [ 0.3228528 -0.36195958 0.16064888 ... -0.79437023 -1.4182258 -0.8550327 ] ... [-1.2977643 1.0281578 -1.4543189 ... 0.61853814 -0.06896252 -0.30787295] [-0.28820398 0.5945489 -0.5821058 ... 0.8238958 2.1551237 -1.7281665 ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [-1.1885706 0.8612895 -0.43360534 ... 2.4503458 0.5512706 1.1274198 ] [ 0.3963785 0.31823164 -1.1654942 ... 0.39660993 1.2783047 1.2299359 ] ... [-0.5845659 0.655 0.31898937 ... 2.267402 3.501388 -0.41545728] [-1.0221508 0.7090054 -0.33881155 ... -0.44751447 -0.34166184 -0.55487716] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.280731 1.1526852 -0.688242 ... 2.827134 -2.0957298 -1.4371506 ] [-1.2640808 0.01780097 -0.92395276 ... 1.4203231 -1.6025195 0.36891574] ... [-2.6848433 0.34202373 1.500539 ... 2.938748 1.4094093 -0.94342977] [-2.0223527 2.3363485 -2.1191816 ... 2.9631944 -0.5400285 -0.5115365 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.5799038 -1.5006351 -0.22218056 ... -2.2229428 -1.8858675 -0.19846532] [-0.22141333 0.45201147 -0.19785552 ... -0.5464233 0.88442844 0.16100405] ... [-1.4936175 -0.54623187 0.71980983 ... -1.9765303 0.09571113 0.26677498] [-0.48018304 1.6949998 0.3683715 ... -0.51614887 2.3278725 0.64992064] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.4440662 0.9072568 -0.35776836 ... -1.2879567 -0.93737924 -0.4133367 ] [-0.10962775 -0.20760787 1.144433 ... -1.7673478 -1.9300838 -0.23427577] ... [ 0.86451787 2.0419416 1.9930946 ... 0.3981283 -2.4564955 1.6178124 ] [-0.66767955 -0.21380903 0.13607287 ... 0.09376638 -0.4975656 -1.2604733 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.864979 0.34027043 -2.0052252 ... -1.9838278 -1.0358793 -0.85132325] [-1.2118095 0.27757457 0.8536674 ... 2.6657217 1.3999908 0.25400206] ... [-0.511845 0.12649232 1.2415853 ... -1.509274 0.44621253 0.26483238] [ 3.4962423 1.2807187 1.4271356 ... 0.7056135 -0.86356914 0.7161814 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.20785508 -0.42517042 1.9041935 ... 0.17746131 0.26369894 0.9391638 ] [-0.12892647 -0.24295846 -0.06578119 ... -1.3251526 -2.0422795 -1.0483977 ] ... [-0.794205 1.1199272 -1.3202723 ... 0.7119887 -0.22913326 -0.33337575] [-0.44270205 0.25368467 -0.47754028 ... 1.4148886 2.5738277 -1.3637978 ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [-0.8213233 0.76411617 0.47491837 ... 0.05929625 -0.07728352 0.04102718] [ 0.38573802 0.03913755 -0.45796663 ... -0.07924233 -0.60125357 -0.05090134] ... [-0.31480664 0.8753429 0.06997065 ... 0.5735726 0.6942908 0.15649143] [-0.56035525 0.28980452 -0.4407964 ... -0.12108279 -0.4099648 -0.01874043] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.2692166 0.12263107 0.15534209 ... 0.596656 -0.18367335 -1.1108742 ] [-0.4624378 0.7776881 0.2661266 ... 0.2677241 -0.25971916 -0.36912203] ... [ 0.3423925 0.4435032 0.8799335 ... -0.27135393 -0.1330443 -0.17066209] [ 0.10589708 0.03279484 0.7593855 ... -0.6940509 0.09961501 -0.9291231 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.07517561 0.06480819 0.13253717 ... 0.03721193 0.27737898 0.48972154] [-0.97646415 -0.57832414 -0.07905649 ... -0.56059337 0.10612484 -0.06027628] ... [-0.4016845 -0.5913345 0.36384287 ... 0.14649905 -0.42547786 -1.0273557 ] [-0.6330997 0.19617747 0.16232033 ... -0.3430211 0.572764 0.04945805] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.0932193 1.4527067 -0.27627608 ... 0.48619035 -0.44438863 -0.02835392] [-0.46434537 0.18393037 -0.08921589 ... 0.7639285 -1.0396305 0.52877283] ... [ 0.3655982 -0.2242698 -0.3247903 ... 0.21734159 -0.85124683 -0.10396023] [-0.2525503 -0.41079208 -0.30335033 ... -0.7013914 -0.89413136 0.36630276] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.45444876 -0.58917785 0.31839395 ... 0.5501267 -0.9813728 0.74034667] [-0.74045926 0.29198432 0.12633635 ... -0.29899028 0.16518256 0.35647872] ... [-0.8763807 -0.21373706 -0.46077964 ... -0.14814663 0.13688438 -0.54804933] [ 0.94223297 0.21862757 -1.4070559 ... -0.5815098 0.21518856 -0.4602546 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.4699336 0.3448707 -0.40391785 ... 0.35236663 -0.08978821 0.24055886] [ 0.5232327 -0.39820546 0.30702004 ... 0.42830127 0.18604341 -0.19190544] ... [-1.0623648 0.6594892 0.10305125 ... -0.5745458 0.20905745 0.47321397] [-0.22288303 0.60115373 0.21861947 ... -0.28819183 -0.49404892 -1.1913259 ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [-1.4633913 0.9118736 -0.12151574 ... 1.7064735 0.6535465 0.9739029 ] [ 0.50943893 0.3037974 -1.0325639 ... 0.30721313 0.45985162 0.5256344 ] ... [-0.3991848 0.6756856 0.7864565 ... 2.0467117 3.180469 -0.56742334] [-1.2729342 0.630528 -0.6816716 ... -0.48900628 -0.5039479 -0.3922339 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.02673496 0.81952864 -0.67798984 ... 2.6524892 -1.6315188 -1.7233633 ] [-1.3468425 0.57022786 -0.47231403 ... 1.2119275 -1.177864 0.14778872] ... [-1.927277 0.43038502 1.7267904 ... 2.025887 0.8724565 -1.0078954 ] [-1.1958905 1.7511444 -1.22427 ... 1.9361439 0.0949003 -0.9591412 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.89540285 -1.1037691 -0.34010693 ... -1.4928747 -1.2495607 0.21048012] [-1.008122 -0.14721593 -0.29538414 ... -0.7875532 0.9007972 0.24011491] ... [-1.4464014 -1.0241925 1.2618159 ... -1.4640607 -0.10289839 -0.11823521] [-0.5505867 1.5801101 0.6768775 ... -0.4830137 2.381727 0.46468228] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.5446665 1.1404188 -0.5454459 ... -0.37795654 -1.2630433 0.01063778] [-0.61141646 0.0291807 0.74259007 ... -0.9674055 -2.5070398 0.10920208] ... [ 0.92750674 1.4425147 1.1022202 ... 0.2613216 -2.4972706 1.3168355 ] [-0.4820598 -0.03541962 -0.21634579 ... 0.09315679 -0.66293114 -0.72664547] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.9299028 -0.16997851 -1.3085258 ... -0.8374327 -1.3198192 -0.24600571] [-1.4581933 0.56592757 0.9791373 ... 1.8628532 1.0751759 -0.07005841] ... [-0.8337481 0.03462959 0.5839596 ... -0.940846 0.67254645 -0.42687532] [ 3.7094529 1.1286256 0.3550881 ... -0.11592367 -0.6322858 0.4064866 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.07448329 -0.12867638 1.4953773 ... 0.28306714 0.64962006 0.64689213] [ 0.3228528 -0.36195958 0.16064888 ... -0.79437023 -1.4182258 -0.8550327 ] ... [-1.2977643 1.0281578 -1.4543189 ... 0.61853814 -0.06896252 -0.30787295] [-0.28820398 0.5945489 -0.5821058 ... 0.8238958 2.1551237 -1.7281665 ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [-1.1885706 0.8612895 -0.43360534 ... 2.4503458 0.5512706 1.1274198 ] [ 0.3963785 0.31823164 -1.1654942 ... 0.39660993 1.2783047 1.2299359 ] ... [-0.5845659 0.655 0.31898937 ... 2.267402 3.501388 -0.41545728] [-1.0221508 0.7090054 -0.33881155 ... -0.44751447 -0.34166184 -0.55487716] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-0.280731 1.1526852 -0.688242 ... 2.827134 -2.0957298 -1.4371506 ] [-1.2640808 0.01780097 -0.92395276 ... 1.4203231 -1.6025195 0.36891574] ... [-2.6848433 0.34202373 1.500539 ... 2.938748 1.4094093 -0.94342977] [-2.0223527 2.3363485 -2.1191816 ... 2.9631944 -0.5400285 -0.5115365 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.5799038 -1.5006351 -0.22218056 ... -2.2229428 -1.8858675 -0.19846532] [-0.22141333 0.45201147 -0.19785552 ... -0.5464233 0.88442844 0.16100405] ... [-1.4936175 -0.54623187 0.71980983 ... -1.9765303 0.09571113 0.26677498] [-0.48018304 1.6949998 0.3683715 ... -0.51614887 2.3278725 0.64992064] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.4440662 0.9072568 -0.35776836 ... -1.2879567 -0.93737924 -0.4133367 ] [-0.10962775 -0.20760787 1.144433 ... -1.7673478 -1.9300838 -0.23427577] ... [ 0.86451787 2.0419416 1.9930946 ... 0.3981283 -2.4564955 1.6178124 ] [-0.66767955 -0.21380903 0.13607287 ... 0.09376638 -0.4975656 -1.2604733 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [-1.864979 0.34027043 -2.0052252 ... -1.9838278 -1.0358793 -0.85132325] [-1.2118095 0.27757457 0.8536674 ... 2.6657217 1.3999908 0.25400206] ... [-0.511845 0.12649232 1.2415853 ... -1.509274 0.44621253 0.26483238] [ 3.4962423 1.2807187 1.4271356 ... 0.7056135 -0.86356914 0.7161814 ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0.20785508 -0.42517042 1.9041935 ... 0.17746131 0.26369894 0.9391638 ] [-0.12892647 -0.24295846 -0.06578119 ... -1.3251526 -2.0422795 -1.0483977 ] ... [-0.794205 1.1199272 -1.3202723 ... 0.7119887 -0.22913326 -0.33337575] [-0.44270205 0.25368467 -0.47754028 ... 1.4148886 2.5738277 -1.3637978 ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_918.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.3499 (2,1,1,.,.) = 0.8211 (3,1,1,.,.) = 1.4762 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 2.54397154e-01 -3.79970372e-02 -2.58203715e-01 ... -5.36072738e-02 -5.94089031e-01 3.16812843e-01] [-1.25034034e-01 -2.39331141e-01 4.93847616e-02 ... -2.02754632e-01 -3.99243861e-01 -3.69683176e-01] [ 1.72669768e-01 -7.59652555e-01 4.49063838e-01 ... -6.31100833e-01 -5.14138639e-01 4.89363149e-02] ... [-3.99746865e-01 -4.18584377e-01 1.66478574e-01 ... -2.76011616e-01 -3.56163859e-01 -2.51995306e-02] [-2.01429829e-01 -7.87892714e-02 3.60353768e-01 ... 4.24074009e-02 -3.05445064e-02 -2.18064204e-01] [-1.96069241e-01 4.12614495e-01 8.61690119e-02 ... 3.71900201e-02 1.00747012e-01 1.52967528e-01]] [[ 2.40906999e-01 2.01707636e-03 3.80218118e-01 ... -2.28240520e-01 -7.31589377e-01 3.19959909e-01] [-4.03405949e-02 -1.78791761e-01 -1.80314537e-02 ... 5.01583576e-01 4.15213376e-01 9.84836817e-02] [-3.84018689e-01 1.49883991e-02 -4.26982017e-03 ... -1.92427114e-01 3.10251355e-01 2.92272627e-01] ... [-1.68924525e-01 -1.29940510e-01 -2.15617493e-01 ... 3.07121396e-01 9.29481685e-02 4.56327379e-01] [-7.86614269e-02 -3.66013736e-01 -1.32687222e-02 ... -2.70997435e-01 3.94997329e-01 -2.67908454e-01] [-1.39422730e-01 2.71853656e-01 -4.08017308e-01 ... 2.84909960e-02 -2.47045770e-01 -2.00918391e-01]] [[ 4.00902003e-01 4.86632250e-02 8.87123123e-02 ... 2.35491931e-01 -8.60712901e-02 -4.36477631e-01] [ 1.31339818e-01 5.05318820e-01 1.94051519e-01 ... 3.86353165e-01 -5.17718643e-02 -3.54835913e-02] [ 3.32142800e-01 -7.08144605e-01 -3.25219423e-01 ... -7.50016212e-01 -1.78073108e-01 1.25151366e-01] ... [ 7.20876992e-01 1.46146014e-01 2.17460707e-01 ... -3.93255562e-01 -5.16953111e-01 3.82688522e-01] [-5.34633398e-01 3.52848560e-01 5.26029095e-02 ... 3.14573705e-01 2.36496076e-01 5.05521476e-01] [ 5.74054658e-01 2.52462834e-01 -3.08387149e-02 ... 3.21688037e-03 1.60037279e-01 4.95334029e-01]] ... [[-3.61077279e-01 1.24960124e-01 -1.24313524e-02 ... 4.34496552e-01 7.61204064e-01 7.45704845e-02] [-3.10022205e-01 2.25499883e-01 -2.25295871e-01 ... -1.25596784e-02 6.02243602e-01 -3.94731313e-02] [-6.51700944e-02 4.03718650e-01 3.39784235e-01 ... 1.90030828e-01 1.46238536e-01 3.18256885e-01] ... [-3.30136359e-01 -5.15914321e-01 3.51746768e-01 ... 1.93093196e-01 1.74272776e-01 -5.01987457e-01] [ 6.37865007e-01 -1.78555652e-01 -2.88225770e-01 ... -2.56901830e-01 -8.13442841e-02 1.48380101e-01] [-4.00218815e-01 1.98392168e-01 7.99476504e-01 ... -2.13696763e-01 2.71091968e-01 1.36379361e-01]] [[-2.55008876e-01 -4.42687720e-01 1.68534786e-01 ... 4.07928109e-01 -4.31876570e-01 -3.49341959e-01] [ 4.41162698e-02 1.75537542e-01 7.90186878e-03 ... -4.68450546e-01 -3.24690640e-01 -2.52177626e-01] [ 2.98113048e-01 3.02719101e-02 -7.23942369e-02 ... -5.48931837e-01 3.21528107e-01 4.71446067e-01] ... [ 7.43721962e-01 2.50500739e-01 -8.24045897e-01 ... -3.48976880e-01 1.05392486e-01 1.84402261e-02] [-6.80435419e-01 -5.42924583e-01 1.93107381e-01 ... -5.02875984e-01 4.86373335e-01 -1.43923700e-01] [ 2.26396546e-01 4.52684134e-01 2.72636771e-01 ... 3.07897866e-01 1.59553051e-01 4.89669032e-02]] [[-1.48623630e-01 4.93328273e-02 7.82295823e-01 ... -2.62267608e-02 -2.23829374e-01 2.62582093e-01] [-1.30862445e-01 4.03461814e-01 -1.71734199e-01 ... 3.21678013e-01 -1.09536000e-01 7.72845075e-02] [-4.46051180e-01 1.14530645e-01 -4.63274688e-01 ... -4.52087611e-01 -2.03179698e-02 1.16584208e-02] ... [ 4.15998250e-01 -3.56838405e-01 -7.42200017e-01 ... -7.74687648e-01 -9.44757223e-01 9.14678872e-01] [-1.10610269e-01 5.93278110e-01 -1.99978828e-01 ... 1.86908945e-01 -2.32264549e-01 1.11034745e-02] [-1.52107567e-01 -6.32618904e-01 2.28747576e-02 ... -1.03988484e-01 6.01689279e-01 -8.41733441e-02]]] [[[-2.27510999e-03 1.44277728e+00 -1.11018896e+00 ... -2.39166588e-01 -6.84700727e-01 8.06480050e-01] [-4.74613339e-01 5.53776622e-01 7.23865628e-01 ... -1.06676209e+00 -2.84907877e-01 6.81607962e-01] [-1.58901885e-01 -6.21734321e-01 -6.27829313e-01 ... -5.50120429e-04 7.29782820e-01 1.09796393e+00] ... [ 9.82479274e-01 -2.74143100e-01 -1.33018017e+00 ... -3.03228050e-02 1.00193238e+00 5.41759133e-01] [-3.81832361e-01 -1.09194899e+00 -1.39754343e+00 ... 6.86921835e-01 1.13238919e+00 5.57566404e-01] [-3.53655607e-01 1.46258637e-01 -6.69144332e-01 ... -4.42405343e-01 5.39653957e-01 4.40330029e-01]] [[ 2.17176020e-01 -5.53859234e-01 -9.73352119e-02 ... -1.20817697e+00 8.15301836e-02 -3.79170328e-01] [-1.07026410e+00 7.99581826e-01 3.44189167e-01 ... -4.97766107e-01 -1.95048943e-01 -1.09330142e+00] [-1.89887345e+00 8.47734213e-01 6.15452111e-01 ... 2.62449980e-01 8.12182426e-01 9.92617533e-02] ... [ 6.05473936e-01 -7.20759392e-01 1.81652486e+00 ... -7.98003435e-01 2.39396274e-01 3.60270977e-01] [ 1.39368474e+00 -3.95793498e-01 7.84595847e-01 ... -6.38804138e-01 -2.96504200e-01 6.31995678e-01] [-3.46830010e-01 3.05331320e-01 -4.79830563e-01 ... 8.56609523e-01 2.55788535e-01 2.13867262e-01]] [[ 3.93812716e-01 4.22927022e-01 3.90852183e-01 ... -1.76465201e+00 6.96812391e-01 6.69339180e-01] [-3.92160863e-02 -7.44031310e-01 2.79282033e-01 ... -3.02248597e-01 3.36213797e-01 -2.73222029e-01] [ 8.63304555e-01 1.69903851e+00 -3.47310156e-01 ... 6.94712520e-01 8.91643465e-02 -2.28211567e-01] ... [-4.56490926e-02 -1.67661265e-01 1.19858587e+00 ... -9.15507019e-01 -6.02899671e-01 -3.57314013e-02] [-5.66445887e-01 9.51256335e-01 -6.35488391e-01 ... -1.25734851e-01 -4.06957299e-01 1.39708126e+00] [ 5.44832110e-01 7.98148930e-01 1.02100277e+00 ... -1.33067703e+00 -3.12122643e-01 1.45500124e-01]] ... [[-7.49882102e-01 4.90601361e-01 4.45114374e-01 ... 6.39995277e-01 -1.50320148e+00 4.26139683e-02] [ 7.17682168e-02 4.37286198e-01 -1.53047943e+00 ... -4.37472910e-01 1.94498986e-01 4.33660865e-01] [-4.63916123e-01 -6.76635683e-01 1.34507072e+00 ... 2.55067736e-01 -1.01361668e+00 -1.16602921e+00] ... [ 4.43570137e-01 -7.99907446e-01 -4.40760583e-01 ... -1.08106506e+00 2.11531073e-01 2.08134994e-01] [-4.39486414e-01 -1.96803808e+00 -1.12417376e+00 ... -6.72573566e-01 2.95478612e-01 9.32046056e-01] [ 2.03047585e+00 -1.10597801e+00 2.21807331e-01 ... 1.22141108e-01 8.43771815e-01 -2.02801544e-02]] [[-7.48609662e-01 6.21864915e-01 -1.80703735e+00 ... -1.30686820e-01 -3.88790369e-02 2.57836193e-01] [-2.88257539e-01 -8.42182115e-02 1.70145094e+00 ... 7.24817887e-02 5.56084692e-01 4.54280615e-01] [-2.12428927e-01 1.55877757e+00 -3.37593347e-01 ... -1.00303970e-01 4.68336225e-01 -3.65613908e-01] ... [ 4.08877790e-01 -1.17359310e-01 6.78735748e-02 ... 1.57922411e+00 1.05510032e+00 -1.57031107e+00] [-7.82263875e-01 -1.82204998e+00 9.01483059e-01 ... 7.51175940e-01 -3.77882123e-01 6.88868344e-01] [-1.83486953e-01 -1.42859221e+00 -2.55572796e-01 ... -6.45539641e-01 -6.39461502e-02 -8.44211951e-02]] [[ 6.20984375e-01 2.88984060e-01 6.95281565e-01 ... -1.45449163e-02 1.17048526e+00 4.26221669e-01] [-1.63255185e-01 2.63433635e-01 -1.28705609e+00 ... -1.01657581e+00 -9.30801213e-01 -1.27647960e+00] [ 8.49149674e-02 -1.14400542e+00 2.51909140e-02 ... 5.90500593e-01 -1.87623084e+00 1.14655271e-01] ... [ 5.11485457e-01 -1.13345340e-01 -4.81225252e-02 ... 1.37026325e-01 -8.69404554e-01 -1.43795565e-01] [-7.25859344e-01 -8.57191443e-01 1.10155451e+00 ... 1.91147514e-02 -2.64257312e-01 6.57203376e-01] [-6.62004113e-01 3.89533460e-01 -5.03079653e-01 ... -4.85980719e-01 -4.02233660e-01 -1.80663496e-01]]] [[[ 2.32014346e+00 1.38921905e+00 -1.17446637e+00 ... -1.85321224e+00 -1.26433039e+00 2.03369558e-01] [ 1.30753291e+00 -1.15146971e+00 7.66586065e-01 ... -7.94172287e-02 -7.27152050e-01 4.98751163e-01] [ 9.39217396e-03 1.94608140e+00 2.82182753e-01 ... 1.10222161e+00 -1.84995139e+00 1.29145890e-01] ... [-4.25996810e-01 -9.12925720e-01 -5.84165514e-01 ... 8.28743398e-01 1.74147856e+00 -7.18597114e-01] [ 1.13496065e+00 -3.13410014e-02 -2.67455995e-01 ... -3.47777188e-01 -7.23953426e-01 3.33878189e-01] [ 2.39214921e+00 4.21557367e-01 -2.80433536e-01 ... -4.29212749e-01 4.54106688e-01 -1.03036530e-01]] [[ 3.55071354e+00 3.21774840e+00 -6.14131913e-02 ... 2.17457768e-02 -2.42340922e+00 -3.19556803e-01] [ 3.22478652e+00 -1.15089262e+00 -2.58487988e+00 ... 2.22296536e-01 -2.63277149e+00 -6.55047596e-02] [ 1.67495334e+00 -4.82188165e-01 1.58564281e+00 ... -9.02993381e-01 3.41097653e-01 1.43662798e+00] ... [ 3.46461320e+00 1.39829338e+00 3.59220743e-01 ... -1.17188942e+00 8.97394836e-01 -2.01552582e+00] [ 1.59335101e+00 1.29400086e+00 4.29139948e+00 ... -5.51612973e-01 1.31896746e+00 7.76375949e-01] [ 3.64677131e-01 -3.81167293e-01 1.49653256e+00 ... 3.06258583e+00 2.05565190e+00 1.46645761e+00]] [[ 1.86242712e+00 1.43744294e-02 -2.81042624e+00 ... -2.35966325e-01 8.56141388e-01 -7.27646172e-01] [ 6.55598581e-01 1.78258932e+00 -8.60794485e-01 ... 3.50970954e-01 1.06299353e+00 -2.09405422e-01] [-1.86685538e+00 2.78282833e+00 4.23673332e-01 ... 6.64356425e-02 2.02252460e+00 4.39340174e-01] ... [-8.65123689e-01 -5.69471657e-01 -3.22742629e+00 ... -1.10569382e+00 1.00991845e+00 -1.54018092e+00] [ 1.92387795e+00 5.73690593e-01 1.38104177e+00 ... 8.92624438e-01 -2.00525451e+00 -8.20066854e-02] [ 1.72319394e-02 1.36405253e+00 -2.86556304e-01 ... -7.65089571e-01 2.35437825e-01 2.52769560e-01]] ... [[ 1.14376628e+00 -1.95646858e+00 -9.71802831e-01 ... -1.77016228e-01 -3.46003056e-01 -3.30181050e+00] [ 1.25171769e+00 -6.05178237e-01 -1.72466838e+00 ... -6.68451726e-01 -6.17337823e-01 -1.00476027e+00] [-3.34252566e-01 -7.51369178e-01 -1.61989176e+00 ... 1.13793433e+00 -1.07178652e+00 -7.82040298e-01] ... [-2.21732795e-01 7.06143975e-01 2.25097466e+00 ... 6.58141375e-01 -1.43811539e-01 1.53182656e-01] [ 2.21336126e+00 -1.65788221e+00 -4.11864662e+00 ... 5.62705755e-01 -1.41852713e+00 3.82572556e+00] [ 1.69892776e+00 3.87547106e-01 2.97975755e+00 ... 1.79095519e+00 1.94623619e-01 -8.49077225e-01]] [[-2.45809150e+00 1.05229628e+00 -4.29034799e-01 ... 2.70521927e+00 1.06440771e+00 -2.88315225e+00] [ 1.87531435e+00 3.63591522e-01 -1.93490529e+00 ... -1.28849834e-01 -3.23381233e+00 -2.06064796e+00] [-5.04218400e-01 3.11148190e+00 -3.45846683e-01 ... 2.33424142e-01 -8.11549485e-01 -6.71174526e-01] ... [ 4.39203888e-01 -1.32803929e+00 -6.66900933e-01 ... 6.75914645e-01 7.98095763e-02 2.37822747e+00] [-1.19965196e+00 -6.50515407e-02 -2.22307175e-01 ... 2.72544622e-01 1.08954453e+00 8.97007167e-01] [ 9.84557942e-02 1.78630099e-01 4.59286720e-01 ... -1.45422530e+00 1.23463225e+00 6.28100634e-01]] [[-3.27212989e-01 -1.92427993e+00 -2.37349081e+00 ... 1.07490313e+00 -1.69134438e+00 3.33452964e+00] [ 3.70555878e-01 -1.29428506e+00 -7.51532495e-01 ... 1.23358655e+00 -5.85953355e-01 -1.19142091e+00] [-6.93546236e-02 -1.97520312e-02 -1.93336964e-01 ... 3.53281593e+00 -1.57694507e+00 -1.98712599e+00] ... [-1.67317128e+00 -2.62745571e+00 2.46446514e+00 ... 4.60782468e-01 -9.67706382e-01 -2.84545571e-01] [ 1.25011697e-01 2.21357822e+00 -1.44761860e+00 ... -4.61398631e-01 -3.29847479e+00 7.48131037e-01] [ 1.34333372e+00 7.49783516e-01 6.32733583e-01 ... -3.69118422e-01 9.66062248e-01 -8.20515603e-02]]]]]; ov_res: [[[[[ 2.54397154e-01 -3.79970372e-02 -2.58203715e-01 ... -5.36072738e-02 -5.94089031e-01 3.16812843e-01] [-1.25034034e-01 -2.39331141e-01 4.93847616e-02 ... -2.02754632e-01 -3.99243861e-01 -3.69683176e-01] [ 1.72669768e-01 -7.59652555e-01 4.49063838e-01 ... -6.31100833e-01 -5.14138639e-01 4.89363149e-02] ... [-3.99746865e-01 -4.18584377e-01 1.66478574e-01 ... -2.76011616e-01 -3.56163859e-01 -2.51995306e-02] [-2.01429829e-01 -7.87892714e-02 3.60353768e-01 ... 4.24074009e-02 -3.05445064e-02 -2.18064204e-01] [-1.96069241e-01 4.12614495e-01 8.61690119e-02 ... 3.71900201e-02 1.00747012e-01 1.52967528e-01]] [[ 2.40906999e-01 2.01707636e-03 3.80218118e-01 ... -2.28240520e-01 -7.31589377e-01 3.19959909e-01] [-4.03405949e-02 -1.78791761e-01 -1.80314537e-02 ... 5.01583576e-01 4.15213376e-01 9.84836817e-02] [-3.84018689e-01 1.49883991e-02 -4.26982017e-03 ... -1.92427114e-01 3.10251355e-01 2.92272627e-01] ... [-1.68924525e-01 -1.29940510e-01 -2.15617493e-01 ... 3.07121396e-01 9.29481685e-02 4.56327379e-01] [-7.86614269e-02 -3.66013736e-01 -1.32687222e-02 ... -2.70997435e-01 3.94997329e-01 -2.67908454e-01] [-1.39422730e-01 2.71853656e-01 -4.08017308e-01 ... 2.84909960e-02 -2.47045770e-01 -2.00918391e-01]] [[ 4.00902003e-01 4.86632250e-02 8.87123123e-02 ... 2.35491931e-01 -8.60712901e-02 -4.36477631e-01] [ 1.31339818e-01 5.05318820e-01 1.94051519e-01 ... 3.86353165e-01 -5.17718643e-02 -3.54835913e-02] [ 3.32142800e-01 -7.08144605e-01 -3.25219423e-01 ... -7.50016212e-01 -1.78073108e-01 1.25151366e-01] ... [ 7.20876992e-01 1.46146014e-01 2.17460707e-01 ... -3.93255562e-01 -5.16953111e-01 3.82688522e-01] [-5.34633398e-01 3.52848560e-01 5.26029095e-02 ... 3.14573705e-01 2.36496076e-01 5.05521476e-01] [ 5.74054658e-01 2.52462834e-01 -3.08387149e-02 ... 3.21688037e-03 1.60037279e-01 4.95334029e-01]] ... [[-3.61077279e-01 1.24960124e-01 -1.24313524e-02 ... 4.34496552e-01 7.61204064e-01 7.45704845e-02] [-3.10022205e-01 2.25499883e-01 -2.25295871e-01 ... -1.25596784e-02 6.02243602e-01 -3.94731313e-02] [-6.51700944e-02 4.03718650e-01 3.39784235e-01 ... 1.90030828e-01 1.46238536e-01 3.18256885e-01] ... [-3.30136359e-01 -5.15914321e-01 3.51746768e-01 ... 1.93093196e-01 1.74272776e-01 -5.01987457e-01] [ 6.37865007e-01 -1.78555652e-01 -2.88225770e-01 ... -2.56901830e-01 -8.13442841e-02 1.48380101e-01] [-4.00218815e-01 1.98392168e-01 7.99476504e-01 ... -2.13696763e-01 2.71091968e-01 1.36379361e-01]] [[-2.55008876e-01 -4.42687720e-01 1.68534786e-01 ... 4.07928109e-01 -4.31876570e-01 -3.49341959e-01] [ 4.41162698e-02 1.75537542e-01 7.90186878e-03 ... -4.68450546e-01 -3.24690640e-01 -2.52177626e-01] [ 2.98113048e-01 3.02719101e-02 -7.23942369e-02 ... -5.48931837e-01 3.21528107e-01 4.71446067e-01] ... [ 7.43721962e-01 2.50500739e-01 -8.24045897e-01 ... -3.48976880e-01 1.05392486e-01 1.84402261e-02] [-6.80435419e-01 -5.42924583e-01 1.93107381e-01 ... -5.02875984e-01 4.86373335e-01 -1.43923700e-01] [ 2.26396546e-01 4.52684134e-01 2.72636771e-01 ... 3.07897866e-01 1.59553051e-01 4.89669032e-02]] [[-1.48623630e-01 4.93328273e-02 7.82295823e-01 ... -2.62267608e-02 -2.23829374e-01 2.62582093e-01] [-1.30862445e-01 4.03461814e-01 -1.71734199e-01 ... 3.21678013e-01 -1.09536000e-01 7.72845075e-02] [-4.46051180e-01 1.14530645e-01 -4.63274688e-01 ... -4.52087611e-01 -2.03179698e-02 1.16584208e-02] ... [ 4.15998250e-01 -3.56838405e-01 -7.42200017e-01 ... -7.74687648e-01 -9.44757223e-01 9.14678872e-01] [-1.10610269e-01 5.93278110e-01 -1.99978828e-01 ... 1.86908945e-01 -2.32264549e-01 1.11034745e-02] [-1.52107567e-01 -6.32618904e-01 2.28747576e-02 ... -1.03988484e-01 6.01689279e-01 -8.41733441e-02]]] [[[-2.27510999e-03 1.44277728e+00 -1.11018896e+00 ... -2.39166588e-01 -6.84700727e-01 8.06480050e-01] [-4.74613339e-01 5.53776622e-01 7.23865628e-01 ... -1.06676209e+00 -2.84907877e-01 6.81607962e-01] [-1.58901885e-01 -6.21734321e-01 -6.27829313e-01 ... -5.50120429e-04 7.29782820e-01 1.09796393e+00] ... [ 9.82479274e-01 -2.74143100e-01 -1.33018017e+00 ... -3.03228050e-02 1.00193238e+00 5.41759133e-01] [-3.81832361e-01 -1.09194899e+00 -1.39754343e+00 ... 6.86921835e-01 1.13238919e+00 5.57566404e-01] [-3.53655607e-01 1.46258637e-01 -6.69144332e-01 ... -4.42405343e-01 5.39653957e-01 4.40330029e-01]] [[ 2.17176020e-01 -5.53859234e-01 -9.73352119e-02 ... -1.20817697e+00 8.15301836e-02 -3.79170328e-01] [-1.07026410e+00 7.99581826e-01 3.44189167e-01 ... -4.97766107e-01 -1.95048943e-01 -1.09330142e+00] [-1.89887345e+00 8.47734213e-01 6.15452111e-01 ... 2.62449980e-01 8.12182426e-01 9.92617533e-02] ... [ 6.05473936e-01 -7.20759392e-01 1.81652486e+00 ... -7.98003435e-01 2.39396274e-01 3.60270977e-01] [ 1.39368474e+00 -3.95793498e-01 7.84595847e-01 ... -6.38804138e-01 -2.96504200e-01 6.31995678e-01] [-3.46830010e-01 3.05331320e-01 -4.79830563e-01 ... 8.56609523e-01 2.55788535e-01 2.13867262e-01]] [[ 3.93812716e-01 4.22927022e-01 3.90852183e-01 ... -1.76465201e+00 6.96812391e-01 6.69339180e-01] [-3.92160863e-02 -7.44031310e-01 2.79282033e-01 ... -3.02248597e-01 3.36213797e-01 -2.73222029e-01] [ 8.63304555e-01 1.69903851e+00 -3.47310156e-01 ... 6.94712520e-01 8.91643465e-02 -2.28211567e-01] ... [-4.56490926e-02 -1.67661265e-01 1.19858587e+00 ... -9.15507019e-01 -6.02899671e-01 -3.57314013e-02] [-5.66445887e-01 9.51256335e-01 -6.35488391e-01 ... -1.25734851e-01 -4.06957299e-01 1.39708126e+00] [ 5.44832110e-01 7.98148930e-01 1.02100277e+00 ... -1.33067703e+00 -3.12122643e-01 1.45500124e-01]] ... [[-7.49882102e-01 4.90601361e-01 4.45114374e-01 ... 6.39995277e-01 -1.50320148e+00 4.26139683e-02] [ 7.17682168e-02 4.37286198e-01 -1.53047943e+00 ... -4.37472910e-01 1.94498986e-01 4.33660865e-01] [-4.63916123e-01 -6.76635683e-01 1.34507072e+00 ... 2.55067736e-01 -1.01361668e+00 -1.16602921e+00] ... [ 4.43570137e-01 -7.99907446e-01 -4.40760583e-01 ... -1.08106506e+00 2.11531073e-01 2.08134994e-01] [-4.39486414e-01 -1.96803808e+00 -1.12417376e+00 ... -6.72573566e-01 2.95478612e-01 9.32046056e-01] [ 2.03047585e+00 -1.10597801e+00 2.21807331e-01 ... 1.22141108e-01 8.43771815e-01 -2.02801544e-02]] [[-7.48609662e-01 6.21864915e-01 -1.80703735e+00 ... -1.30686820e-01 -3.88790369e-02 2.57836193e-01] [-2.88257539e-01 -8.42182115e-02 1.70145094e+00 ... 7.24817887e-02 5.56084692e-01 4.54280615e-01] [-2.12428927e-01 1.55877757e+00 -3.37593347e-01 ... -1.00303970e-01 4.68336225e-01 -3.65613908e-01] ... [ 4.08877790e-01 -1.17359310e-01 6.78735748e-02 ... 1.57922411e+00 1.05510032e+00 -1.57031107e+00] [-7.82263875e-01 -1.82204998e+00 9.01483059e-01 ... 7.51175940e-01 -3.77882123e-01 6.88868344e-01] [-1.83486953e-01 -1.42859221e+00 -2.55572796e-01 ... -6.45539641e-01 -6.39461502e-02 -8.44211951e-02]] [[ 6.20984375e-01 2.88984060e-01 6.95281565e-01 ... -1.45449163e-02 1.17048526e+00 4.26221669e-01] [-1.63255185e-01 2.63433635e-01 -1.28705609e+00 ... -1.01657581e+00 -9.30801213e-01 -1.27647960e+00] [ 8.49149674e-02 -1.14400542e+00 2.51909140e-02 ... 5.90500593e-01 -1.87623084e+00 1.14655271e-01] ... [ 5.11485457e-01 -1.13345340e-01 -4.81225252e-02 ... 1.37026325e-01 -8.69404554e-01 -1.43795565e-01] [-7.25859344e-01 -8.57191443e-01 1.10155451e+00 ... 1.91147514e-02 -2.64257312e-01 6.57203376e-01] [-6.62004113e-01 3.89533460e-01 -5.03079653e-01 ... -4.85980719e-01 -4.02233660e-01 -1.80663496e-01]]] [[[ 2.32014346e+00 1.38921905e+00 -1.17446637e+00 ... -1.85321224e+00 -1.26433039e+00 2.03369558e-01] [ 1.30753291e+00 -1.15146971e+00 7.66586065e-01 ... -7.94172287e-02 -7.27152050e-01 4.98751163e-01] [ 9.39217396e-03 1.94608140e+00 2.82182753e-01 ... 1.10222161e+00 -1.84995139e+00 1.29145890e-01] ... [-4.25996810e-01 -9.12925720e-01 -5.84165514e-01 ... 8.28743398e-01 1.74147856e+00 -7.18597114e-01] [ 1.13496065e+00 -3.13410014e-02 -2.67455995e-01 ... -3.47777188e-01 -7.23953426e-01 3.33878189e-01] [ 2.39214921e+00 4.21557367e-01 -2.80433536e-01 ... -4.29212749e-01 4.54106688e-01 -1.03036530e-01]] [[ 3.55071354e+00 3.21774840e+00 -6.14131913e-02 ... 2.17457768e-02 -2.42340922e+00 -3.19556803e-01] [ 3.22478652e+00 -1.15089262e+00 -2.58487988e+00 ... 2.22296536e-01 -2.63277149e+00 -6.55047596e-02] [ 1.67495334e+00 -4.82188165e-01 1.58564281e+00 ... -9.02993381e-01 3.41097653e-01 1.43662798e+00] ... [ 3.46461320e+00 1.39829338e+00 3.59220743e-01 ... -1.17188942e+00 8.97394836e-01 -2.01552582e+00] [ 1.59335101e+00 1.29400086e+00 4.29139948e+00 ... -5.51612973e-01 1.31896746e+00 7.76375949e-01] [ 3.64677131e-01 -3.81167293e-01 1.49653256e+00 ... 3.06258583e+00 2.05565190e+00 1.46645761e+00]] [[ 1.86242712e+00 1.43744294e-02 -2.81042624e+00 ... -2.35966325e-01 8.56141388e-01 -7.27646172e-01] [ 6.55598581e-01 1.78258932e+00 -8.60794485e-01 ... 3.50970954e-01 1.06299353e+00 -2.09405422e-01] [-1.86685538e+00 2.78282833e+00 4.23673332e-01 ... 6.64356425e-02 2.02252460e+00 4.39340174e-01] ... [-8.65123689e-01 -5.69471657e-01 -3.22742629e+00 ... -1.10569382e+00 1.00991845e+00 -1.54018092e+00] [ 1.92387795e+00 5.73690593e-01 1.38104177e+00 ... 8.92624438e-01 -2.00525451e+00 -8.20066854e-02] [ 1.72319394e-02 1.36405253e+00 -2.86556304e-01 ... -7.65089571e-01 2.35437825e-01 2.52769560e-01]] ... [[ 1.14376628e+00 -1.95646858e+00 -9.71802831e-01 ... -1.77016228e-01 -3.46003056e-01 -3.30181050e+00] [ 1.25171769e+00 -6.05178237e-01 -1.72466838e+00 ... -6.68451726e-01 -6.17337823e-01 -1.00476027e+00] [-3.34252566e-01 -7.51369178e-01 -1.61989176e+00 ... 1.13793433e+00 -1.07178652e+00 -7.82040298e-01] ... [-2.21732795e-01 7.06143975e-01 2.25097466e+00 ... 6.58141375e-01 -1.43811539e-01 1.53182656e-01] [ 2.21336126e+00 -1.65788221e+00 -4.11864662e+00 ... 5.62705755e-01 -1.41852713e+00 3.82572556e+00] [ 1.69892776e+00 3.87547106e-01 2.97975755e+00 ... 1.79095519e+00 1.94623619e-01 -8.49077225e-01]] [[-2.45809150e+00 1.05229628e+00 -4.29034799e-01 ... 2.70521927e+00 1.06440771e+00 -2.88315225e+00] [ 1.87531435e+00 3.63591522e-01 -1.93490529e+00 ... -1.28849834e-01 -3.23381233e+00 -2.06064796e+00] [-5.04218400e-01 3.11148190e+00 -3.45846683e-01 ... 2.33424142e-01 -8.11549485e-01 -6.71174526e-01] ... [ 4.39203888e-01 -1.32803929e+00 -6.66900933e-01 ... 6.75914645e-01 7.98095763e-02 2.37822747e+00] [-1.19965196e+00 -6.50515407e-02 -2.22307175e-01 ... 2.72544622e-01 1.08954453e+00 8.97007167e-01] [ 9.84557942e-02 1.78630099e-01 4.59286720e-01 ... -1.45422530e+00 1.23463225e+00 6.28100634e-01]] [[-3.27212989e-01 -1.92427993e+00 -2.37349081e+00 ... 1.07490313e+00 -1.69134438e+00 3.33452964e+00] [ 3.70555878e-01 -1.29428506e+00 -7.51532495e-01 ... 1.23358655e+00 -5.85953355e-01 -1.19142091e+00] [-6.93546236e-02 -1.97520312e-02 -1.93336964e-01 ... 3.53281593e+00 -1.57694507e+00 -1.98712599e+00] ... [-1.67317128e+00 -2.62745571e+00 2.46446514e+00 ... 4.60782468e-01 -9.67706382e-01 -2.84545571e-01] [ 1.25011697e-01 2.21357822e+00 -1.44761860e+00 ... -4.61398631e-01 -3.29847479e+00 7.48131037e-01] [ 1.34333372e+00 7.49783516e-01 6.32733583e-01 ... -3.69118422e-01 9.66062248e-01 -8.20515603e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 1], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_920.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.2433 (2,1,1,.,.) = -1.3575 (3,1,1,.,.) = -0.3986 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) n of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compilfw_re: [[[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.58088756e+00 2.66480520e-02 ... -4.16459590e-01 -8.26288581e-01 0.00000000e+00] [ 0.00000000e+00 2.08793426e+00 -5.92433631e-01 ... -7.55085289e-01 -3.03699762e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.11943603e+00 9.87911880e-01 ... 4.02636856e-01 -2.70905685e+00 0.00000000e+00] [ 0.00000000e+00 6.95525482e-02 1.40918326e+00 ... -6.52664244e-01 -5.70700347e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.05304027e-01 3.69609892e-01 ... -1.70407057e+00 1.06584489e+00 0.00000000e+00] [ 0.00000000e+00 -1.75146925e+00 -7.68578887e-01 ... -6.76329553e-01 1.46096730e+00 0.00000000e+00] ... [ 0.00000000e+00 2.56747633e-01 1.22288632e+00 ... -2.41946840e+00 -1.23588002e+00 0.00000000e+00] [ 0.00000000e+00 -1.64447272e+00 1.15525687e+00 ... 7.84620363e-03 -2.62887120e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.82018363e-01 8.07088196e-01 ... 4.53315347e-01 -3.65782559e-01 0.00000000e+00] [ 0.00000000e+00 1.50064743e+00 -1.44951433e-01 ... 1.98942199e-01 -1.11365604e+00 0.00000000e+00] ... [ 0.00000000e+00 -4.43910211e-01 1.06761241e+00 ... -4.30649936e-01 7.40665793e-01 0.00000000e+00] [ 0.00000000e+00 9.88078713e-01 -9.73339677e-02 ... 2.19385311e-01 -1.85632980e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.00752807e+00 8.36687863e-01 ... 1.75444257e+00 -1.14351869e+00 0.00000000e+00] [ 0.00000000e+00 -5.08576870e-01 6.74331546e-01 ... 2.30861282e+00 1.47527084e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.60512996e+00 -3.79424721e-01 ... 2.02898455e+00 1.09953690e+00 0.00000000e+00] [ 0.00000000e+00 1.27031744e-01 -7.07657456e-01 ... 2.19038868e+00 9.44900393e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.24401045e-01 -1.02003765e+00 ... -2.42539930e+00 3.33323538e-01 0.00000000e+00] [ 0.00000000e+00 1.75491031e-02 -1.08773184e+00 ... 2.17915569e-02 -2.08357072e+00 0.00000000e+00] ... [ 0.00000000e+00 2.10274768e+00 3.07954460e-01 ... 5.11657715e-01 4.05482483e+00 0.00000000e+00] [ 0.00000000e+00 -2.37733102e+00 5.77997029e-01 ... 1.86794829e-02 9.21144664e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.54135466e+00 -1.65666774e-01 ... 4.75707054e-01 3.51073146e-01 0.00000000e+00] [ 0.00000000e+00 -2.69310164e+00 -9.11460519e-01 ... 7.68010139e-01 1.83869421e+00 0.00000000e+00] ... [ 0.00000000e+00 2.00142890e-01 -2.69064128e-01 ... 2.25360608e+00 -2.08496261e+00 0.00000000e+00] [ 0.00000000e+00 -3.25918019e-01 -1.06642246e+00 ... -1.79734266e+00 7.46220112e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.29131949e+00 2.19879818e+00 ... 2.24145561e-01 -1.64907885e+00 0.00000000e+00] [ 0.00000000e+00 1.42243195e+00 -6.59180880e-01 ... 3.39475656e+00 2.01705217e+00 0.00000000e+00] ... [ 0.00000000e+00 2.02094030e+00 -1.79186627e-01 ... 2.36970246e-01 -9.16398168e-01 0.00000000e+00] [ 0.00000000e+00 -9.16677594e-01 -1.92799374e-01 ... -8.85094464e-01 1.10033059e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.00539017e-01 1.15472662e+00 ... -6.00478172e-01 1.10239434e+00 0.00000000e+00] [ 0.00000000e+00 1.47495437e+00 -7.79851899e-02 ... 9.23625827e-01 2.74759459e+00 0.00000000e+00] ... [ 0.00000000e+00 1.46289158e+00 5.54660857e-01 ... 3.91598880e-01 1.98571965e-01 0.00000000e+00] [ 0.00000000e+00 -9.99500513e-01 2.80471087e-01 ... 1.60808337e+00 -3.11139543e-02 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.51867688e-01 -3.68111990e-02 ... -8.20184350e-01 1.32534933e+00 0.00000000e+00] [ 0.00000000e+00 -6.36177957e-01 -7.31999516e-01 ... 9.25806522e-01 1.99694228e+00 0.00000000e+00] ... [ 0.00000000e+00 2.31731042e-01 6.55757904e-01 ... 1.61068809e+00 -9.97654676e-01 0.00000000e+00] [ 0.00000000e+00 7.94418633e-01 9.59440053e-01 ... -3.31839681e-01 9.29767132e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.26680183e+00 3.15333277e-01 ... -9.09708917e-01 -2.33206719e-01 0.00000000e+00] [ 0.00000000e+00 -1.32841155e-01 -8.80464911e-02 ... -1.52642012e+00 2.20527887e-01 0.00000000e+00] ... [ 0.00000000e+00 -7.49061406e-02 -1.22336066e+00 ... -1.28348780e+00 8.85165393e-01 0.00000000e+00] [ 0.00000000e+00 -5.47939777e-01 1.94658291e+00 ... -6.83943748e-01 -5.16851366e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.04515779e-01 -2.85665798e+00 ... -1.94962966e+00 -4.19251025e-01 0.00000000e+00] [ 0.00000000e+00 2.35187665e-01 7.32801318e-01 ... 1.44534683e+00 1.24836290e+00 0.00000000e+00] ... [ 0.00000000e+00 -5.92984781e-02 3.65540534e-01 ... 1.62720788e+00 -1.42520177e+00 0.00000000e+00] [ 0.00000000e+00 1.22281957e+00 -9.59480464e-01 ... -2.86400467e-01 -2.60597229e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.20921591e-01 2.23137641e+00 ... -6.42118573e-01 7.86331356e-01 0.00000000e+00] [ 0.00000000e+00 -9.70573604e-01 -2.07246494e+00 ... -2.76452088e+00 8.03890646e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.79471910e-01 -3.22532624e-01 ... -9.01119947e-01 2.91604370e-01 0.00000000e+00] [ 0.00000000e+00 -5.11105023e-02 8.17874432e-01 ... 1.73904192e+00 1.70720249e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.11885065e-01 4.28040236e-01 ... 3.99990529e-01 4.00811166e-01 0.00000000e+00] [ 0.00000000e+00 -4.46438700e-01 -3.42729628e-01 ... 1.47920236e-01 2.47938573e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.96010843e-01 1.08612075e-01 ... 6.40018135e-02 -2.85214216e-01 0.00000000e+00] [ 0.00000000e+00 -3.31100285e-01 -5.13834596e-01 ... -4.17646706e-01 5.18306136e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.42187953e-01 4.60048348e-01 ... -6.24520719e-01 6.67816281e-01 0.00000000e+00] [ 0.00000000e+00 -8.16062570e-01 3.82960349e-01 ... 8.19868818e-02 -5.32681458e-02 0.00000000e+00] ... [ 0.00000000e+00 -4.81495321e-01 4.21137810e-02 ... 1.17972769e-01 2.57700533e-01 0.00000000e+00] [ 0.00000000e+00 -3.83709639e-01 5.45143969e-02 ... 3.10208976e-01 -5.48438132e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.49990517e-01 8.61733109e-02 ... -1.39750643e-02 2.05394253e-01 0.00000000e+00] [ 0.00000000e+00 1.04234695e-01 1.15621462e-01 ... 1.33764878e-01 -2.82786608e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.10312650e-01 -2.56462842e-01 ... 5.86334229e-01 3.61002237e-01 0.00000000e+00] [ 0.00000000e+00 -9.90303516e-01 4.00580138e-01 ... 8.23727667e-01 4.45074558e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.94369435e-01 -5.17620472e-04 ... -5.23769222e-02 1.36923358e-01 0.00000000e+00] [ 0.00000000e+00 -3.26148659e-01 3.59353632e-01 ... 5.27874194e-02 1.93563655e-01 0.00000000e+00] ... [ 0.00000000e+00 2.41398811e-01 -4.99475002e-01 ... -4.17470545e-01 -1.89909905e-01 0.00000000e+00] [ 0.00000000e+00 4.61811125e-02 -3.70890021e-01 ... 5.51688552e-01 2.63860792e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.39464876e-02 1.54631808e-01 ... -1.74224392e-01 -1.88936189e-01 0.00000000e+00] [ 0.00000000e+00 2.70800721e-02 -5.64237177e-01 ... -1.17487058e-01 1.42406691e-02 0.00000000e+00] ... [ 0.00000000e+00 -9.64111626e-01 2.79686421e-01 ... 8.89193192e-02 -2.16291800e-01 0.00000000e+00] [ 0.00000000e+00 1.21988499e+00 -1.16860472e-01 ... 3.27675015e-01 -4.31043565e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.85364199e-02 4.04733270e-01 ... -1.63755089e-01 2.37693280e-01 0.00000000e+00] [ 0.00000000e+00 -7.32075810e-01 -2.94952821e-02 ... 9.94575739e-01 -2.73306608e-01 0.00000000e+00] ... [ 0.00000000e+00 5.69221497e-01 9.06077862e-01 ... -6.84188008e-01 -5.00221908e-01 0.00000000e+00] [ 0.00000000e+00 1.43261686e-01 2.23494366e-01 ... -1.13742001e-01 5.53775728e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.58088756e+00 2.66480520e-02 ... -4.16459590e-01 -8.26288581e-01 0.00000000e+00] [ 0.00000000e+00 2.08793426e+00 -5.92433631e-01 ... -7.55085289e-01 -3.03699762e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.11943603e+00 9.87911880e-01 ... 4.02636856e-01 -2.70905685e+00 0.00000000e+00] [ 0.00000000e+00 6.95525482e-02 1.40918326e+00 ... -6.52664244e-01 -5.70700347e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.05304027e-01 3.69609892e-01 ... -1.70407057e+00 1.06584489e+00 0.00000000e+00] [ 0.00000000e+00 -1.75146925e+00 -7.68578887e-01 ... -6.76329553e-01 1.46096730e+00 0.00000000e+00] ... [ 0.00000000e+00 2.56747633e-01 1.22288632e+00 ... -2.41946840e+00 -1.23588002e+00 0.00000000e+00] [ 0.00000000e+00 -1.64447272e+00 1.15525687e+00 ... 7.84620363e-03 -2.62887120e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.82018363e-01 8.07088196e-01 ... 4.53315347e-01 -3.65782559e-01 0.00000000e+00] [ 0.00000000e+00 1.50064743e+00 -1.44951433e-01 ... 1.98942199e-01 -1.11365604e+00 0.00000000e+00] ... [ 0.00000000e+00 -4.43910211e-01 1.06761241e+00 ... -4.30649936e-01 7.40665793e-01 0.00000000e+00] [ 0.00000000e+00 9.88078713e-01 -9.73339677e-02 ... 2.19385311e-01 -1.85632980e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.00752807e+00 8.36687863e-01 ... 1.75444257e+00 -1.14351869e+00 0.00000000e+00] [ 0.00000000e+00 -5.08576870e-01 6.74331546e-01 ... 2.30861282e+00 1.47527084e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.60512996e+00 -3.79424721e-01 ... 2.02898455e+00 1.09953690e+00 0.00000000e+00] [ 0.00000000e+00 1.27031744e-01 -7.07657456e-01 ... 2.19038868e+00 9.44900393e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.24401045e-01 -1.02003765e+00 ... -2.42539930e+00 3.33323538e-01 0.00000000e+00] [ 0.00000000e+00 1.75491031e-02 -1.08773184e+00 ... 2.17915569e-02 -2.08357072e+00 0.00000000e+00] ... [ 0.00000000e+00 2.10274768e+00 3.07954460e-01 ... 5.11657715e-01 4.05482483e+00 0.00000000e+00] [ 0.00000000e+00 -2.37733102e+00 5.77997029e-01 ... 1.86794829e-02 9.21144664e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.54135466e+00 -1.65666774e-01 ... 4.75707054e-01 3.51073146e-01 0.00000000e+00] [ 0.00000000e+00 -2.69310164e+00 -9.11460519e-01 ... 7.68010139e-01 1.83869421e+00 0.00000000e+00] ... [ 0.00000000e+00 2.00142890e-01 -2.69064128e-01 ... 2.25360608e+00 -2.08496261e+00 0.00000000e+00] [ 0.00000000e+00 -3.25918019e-01 -1.06642246e+00 ... -1.79734266e+00 7.46220112e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.29131949e+00 2.19879818e+00 ... 2.24145561e-01 -1.64907885e+00 0.00000000e+00] [ 0.00000000e+00 1.42243195e+00 -6.59180880e-01 ... 3.39475656e+00 2.01705217e+00 0.00000000e+00] ... [ 0.00000000e+00 2.02094030e+00 -1.79186627e-01 ... 2.36970246e-01 -9.16398168e-01 0.00000000e+00] [ 0.00000000e+00 -9.16677594e-01 -1.92799374e-01 ... -8.85094464e-01 1.10033059e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.00539017e-01 1.15472662e+00 ... -6.00478172e-01 1.10239434e+00 0.00000000e+00] [ 0.00000000e+00 1.47495437e+00 -7.79851899e-02 ... 9.23625827e-01 2.74759459e+00 0.00000000e+00] ... [ 0.00000000e+00 1.46289158e+00 5.54660857e-01 ... 3.91598880e-01 1.98571965e-01 0.00000000e+00] [ 0.00000000e+00 -9.99500513e-01 2.80471087e-01 ... 1.60808337e+00 -3.11139543e-02 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.51867688e-01 -3.68111990e-02 ... -8.20184350e-01 1.32534933e+00 0.00000000e+00] [ 0.00000000e+00 -6.36177957e-01 -7.31999516e-01 ... 9.25806522e-01 1.99694228e+00 0.00000000e+00] ... [ 0.00000000e+00 2.31731042e-01 6.55757904e-01 ... 1.61068809e+00 -9.97654676e-01 0.00000000e+00] [ 0.00000000e+00 7.94418633e-01 9.59440053e-01 ... -3.31839681e-01 9.29767132e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.26680183e+00 3.15333277e-01 ... -9.09708917e-01 -2.33206719e-01 0.00000000e+00] [ 0.00000000e+00 -1.32841155e-01 -8.80464911e-02 ... -1.52642012e+00 2.20527887e-01 0.00000000e+00] ... [ 0.00000000e+00 -7.49061406e-02 -1.22336066e+00 ... -1.28348780e+00 8.85165393e-01 0.00000000e+00] [ 0.00000000e+00 -5.47939777e-01 1.94658291e+00 ... -6.83943748e-01 -5.16851366e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.04515779e-01 -2.85665798e+00 ... -1.94962966e+00 -4.19251025e-01 0.00000000e+00] [ 0.00000000e+00 2.35187665e-01 7.32801318e-01 ... 1.44534683e+00 1.24836290e+00 0.00000000e+00] ... [ 0.00000000e+00 -5.92984781e-02 3.65540534e-01 ... 1.62720788e+00 -1.42520177e+00 0.00000000e+00] [ 0.00000000e+00 1.22281957e+00 -9.59480464e-01 ... -2.86400467e-01 -2.60597229e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.20921591e-01 2.23137641e+00 ... -6.42118573e-01 7.86331356e-01 0.00000000e+00] [ 0.00000000e+00 -9.70573604e-01 -2.07246494e+00 ... -2.76452088e+00 8.03890646e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.79471910e-01 -3.22532624e-01 ... -9.01119947e-01 2.91604370e-01 0.00000000e+00] [ 0.00000000e+00 -5.11105023e-02 8.17874432e-01 ... 1.73904192e+00 1.70720249e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.11885065e-01 4.28040236e-01 ... 3.99990529e-01 4.00811166e-01 0.00000000e+00] [ 0.00000000e+00 -4.46438700e-01 -3.42729628e-01 ... 1.47920236e-01 2.47938573e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.96010843e-01 1.08612075e-01 ... 6.40018135e-02 -2.85214216e-01 0.00000000e+00] [ 0.00000000e+00 -3.31100285e-01 -5.13834596e-01 ... -4.17646706e-01 5.18306136e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.42187953e-01 4.60048348e-01 ... -6.24520719e-01 6.67816281e-01 0.00000000e+00] [ 0.00000000e+00 -8.16062570e-01 3.82960349e-01 ... 8.19868818e-02 -5.32681458e-02 0.00000000e+00] ... [ 0.00000000e+00 -4.81495321e-01 4.21137810e-02 ... 1.17972769e-01 2.57700533e-01 0.00000000e+00] [ 0.00000000e+00 -3.83709639e-01 5.45143969e-02 ... 3.10208976e-01 -5.48438132e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.49990517e-01 8.61733109e-02 ... -1.39750643e-02 2.05394253e-01 0.00000000e+00] [ 0.00000000e+00 1.04234695e-01 1.15621462e-01 ... 1.33764878e-01 -2.82786608e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.10312650e-01 -2.56462842e-01 ... 5.86334229e-01 3.61002237e-01 0.00000000e+00] [ 0.00000000e+00 -9.90303516e-01 4.00580138e-01 ... 8.23727667e-01 4.45074558e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.94369435e-01 -5.17620472e-04 ... -5.23769222e-02 1.36923358e-01 0.00000000e+00] [ 0.00000000e+00 -3.26148659e-01 3.59353632e-01 ... 5.27874194e-02 1.93563655e-01 0.00000000e+00] ... [ 0.00000000e+00 2.41398811e-01 -4.99475002e-01 ... -4.17470545e-01 -1.89909905e-01 0.00000000e+00] [ 0.00000000e+00 4.61811125e-02 -3.70890021e-01 ... 5.51688552e-01 2.63860792e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.39464876e-02 1.54631808e-01 ... -1.74224392e-01 -1.88936189e-01 0.00000000e+00] [ 0.00000000e+00 2.70800721e-02 -5.64237177e-01 ... -1.17487058e-01 1.42406691e-02 0.00000000e+00] ... [ 0.00000000e+00 -9.64111626e-01 2.79686421e-01 ... 8.89193192e-02 -2.16291800e-01 0.00000000e+00] [ 0.00000000e+00 1.21988499e+00 -1.16860472e-01 ... 3.27675015e-01 -4.31043565e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.85364199e-02 4.04733270e-01 ... -1.63755089e-01 2.37693280e-01 0.00000000e+00] [ 0.00000000e+00 -7.32075810e-01 -2.94952821e-02 ... 9.94575739e-01 -2.73306608e-01 0.00000000e+00] ... [ 0.00000000e+00 5.69221497e-01 9.06077862e-01 ... -6.84188008e-01 -5.00221908e-01 0.00000000e+00] [ 0.00000000e+00 1.43261686e-01 2.23494366e-01 ... -1.13742001e-01 5.53775728e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [2, 2, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_922.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.2241 (2,1,1,.,.) = 1.0768 (3,1,1,.,.) = -0.9572 [ CPUFloatType{3,1,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[[ 3.73267114e-01 -1.12457298e-01 1.14092194e-01 ... -3.49326611e-01 7.37164693e-04 1.59933284e-01] [-2.22802367e-02 -3.38907957e-01 -1.73341289e-01 ... 3.66709590e-01 3.01151812e-01 3.25820595e-01] [ 1.56140968e-01 -1.41910061e-01 1.03700101e-01 ... 1.77821800e-01 9.12117958e-02 6.60555214e-02] ... [ 2.88245618e-01 -1.42417908e-01 1.27119586e-01 ... -7.47762173e-02 -2.22903088e-01 -1.92018688e-01] [ 7.50758424e-02 -1.20359518e-01 -8.95398334e-02 ... -1.26320750e-01 -1.43622518e-01 -3.55674326e-02] [ 4.25766222e-02 -1.58418208e-01 -2.33955886e-02 ... 1.34368718e-01 -1.43983364e-01 -2.16093007e-02]] [[-2.34764218e-02 1.87555566e-01 -1.18939839e-01 ... 1.44572079e-01 3.26162219e-01 8.52584764e-02] [-7.75464401e-02 -5.50532266e-02 1.35092482e-01 ... -3.79392385e-01 -3.33859801e-01 8.33723247e-02] [-1.19721256e-01 -5.63373007e-02 1.05518796e-01 ... 2.28401255e-02 1.37662962e-01 -6.84784278e-02] ... [ 4.54269201e-01 -2.93893367e-01 -1.37981474e-01 ... 8.76603276e-02 -1.65936798e-01 2.16524098e-02] [-3.77518982e-01 2.04386979e-01 -2.52814859e-01 ... -2.30731279e-01 -1.70699865e-01 3.48408282e-01] [ 7.30066299e-02 1.19153127e-01 -4.99679893e-01 ... 1.30268723e-01 4.05340120e-02 -1.74935862e-01]] [[ 5.77132940e-01 -3.56877148e-01 -4.37394917e-01 ... 1.01754054e-01 2.52393223e-02 -3.79058152e-01] [-3.22267681e-01 3.82172838e-02 -6.47278056e-02 ... 3.72297987e-02 1.34993777e-01 -2.49747679e-01] [-7.76639208e-02 1.14171080e-01 -3.23829539e-02 ... -5.05216539e-01 3.29356045e-02 2.38882303e-02] ... [-1.56232372e-01 2.14782096e-02 6.70337453e-02 ... -2.65351653e-01 -1.66176111e-01 1.38005346e-01] [-2.50132252e-02 6.36446327e-02 -9.43731219e-02 ... -9.39616188e-03 -1.15331337e-01 2.89768815e-01] [ 1.35753721e-01 -2.72983819e-01 1.10880710e-01 ... -1.01684429e-01 -6.37245774e-02 -9.63476151e-02]] ... [[ 1.37965187e-01 1.80236742e-01 -1.66228473e-01 ... -1.87287718e-01 2.83262283e-01 -2.09834948e-02] [ 3.11765522e-02 -4.06072199e-01 -9.77973118e-02 ... -1.71581224e-01 1.02411680e-01 -2.70598650e-01] [-2.73213506e-01 -1.95575476e-01 4.98701781e-01 ... -1.53320273e-02 3.35985571e-01 9.52289179e-02] ... [-4.01579231e-01 2.40558311e-01 -5.29294200e-02 ... -1.93684027e-01 -1.67163443e-02 7.73061067e-02] [ 2.56693102e-02 -3.23729187e-01 -7.00009940e-03 ... 1.27053047e-02 -2.58207977e-01 -5.99096119e-02] [ 1.00311220e-01 -2.57769555e-01 1.19478963e-01 ... 1.38259321e-01 -1.29807547e-01 -3.28116983e-01]] [[-1.77806035e-01 5.84704131e-02 2.64748633e-01 ... -3.04262847e-01 -7.46726692e-02 3.76505256e-01] [-6.93920404e-02 2.27368057e-01 4.63837199e-02 ... -2.27933705e-01 1.21003844e-01 2.49943867e-01] [ 1.56210899e-01 -1.96978107e-01 7.57068172e-02 ... 2.24299416e-01 -5.65088987e-02 -4.52935807e-02] ... [ 2.73245394e-01 -3.89903039e-02 1.72177047e-01 ... -2.65653670e-01 6.46864027e-02 4.93415631e-02] [ 1.21738717e-01 -6.46853596e-02 -2.67758429e-01 ... 1.06208131e-01 -6.08576983e-02 3.01249027e-02] [-1.98601820e-02 -2.40771119e-02 1.08769313e-01 ... 4.53904942e-02 -6.91244304e-02 -1.52587235e-01]] [[ 2.20405594e-01 -3.18557084e-01 3.32269967e-01 ... 2.87162364e-01 2.82624543e-01 3.32855433e-01] [-1.25916824e-01 9.99481753e-02 -2.01475382e-01 ... -4.06682417e-02 -1.02813028e-01 2.18908235e-01] [ 2.45513797e-01 5.68310311e-03 1.26188025e-01 ... -1.42215192e-01 1.20838620e-01 -1.85456108e-02] ... [ 1.41887352e-01 6.58984706e-02 -3.62930633e-02 ... -3.26587290e-01 1.06370613e-01 -1.38989510e-02] [ 6.63288474e-01 -6.00602515e-02 1.11676618e-01 ... -4.20953445e-02 4.87470664e-02 4.51443076e-01] [ 1.59371808e-01 1.61100533e-02 8.41005147e-02 ... 5.33685200e-02 -1.99852705e-01 5.70327193e-02]]] [[[ 4.87205297e-01 -5.98859608e-01 1.36741865e+00 ... -1.84894657e+00 -4.87445652e-01 -4.68733937e-01] [ 7.15833664e-01 9.50898409e-01 1.29646516e+00 ... -1.15696239e+00 -7.92253494e-01 1.52597964e+00] [-3.90975356e-01 -1.11549020e+00 -1.57381415e+00 ... -1.34201741e+00 1.28084922e+00 3.30065161e-01] ... [-5.67121625e-01 -1.78033803e-02 -4.20419037e-01 ... -1.41310120e+00 1.35347521e+00 -3.29624176e-01] [-6.48191988e-01 2.65308283e-02 1.56968999e+00 ... -4.11515534e-01 2.68137550e+00 8.16538274e-01] [ 2.31738299e-01 -2.75164396e-01 -9.34483349e-01 ... 2.90074557e-01 -4.38996911e-01 -1.10847771e+00]] [[-2.03699064e+00 7.12337375e-01 -3.97465289e-01 ... -1.09908175e+00 -1.43745136e+00 4.71512884e-01] [ 6.19040251e-01 -6.63187146e-01 -6.86949253e-01 ... 9.14631903e-01 -1.45255315e+00 -1.82621479e-01] [ 5.86533785e-01 -1.87763476e+00 1.48008299e+00 ... -1.57041359e+00 -1.95917994e-01 -1.07627594e+00] ... [-1.69538474e+00 2.88968658e+00 1.05837429e+00 ... 5.99170327e-01 -1.48949564e+00 2.50515246e+00] [ 8.57077420e-01 5.80907941e-01 -1.09335864e+00 ... -6.43132702e-02 -1.04487389e-02 -2.92902052e-01] [ 4.49855775e-01 7.73679852e-01 -1.70117056e+00 ... 8.13676953e-01 -1.32248628e+00 -1.09757014e-01]] [[ 1.02809072e+00 -4.20330733e-01 -6.83421716e-02 ... -6.68071806e-01 1.98124123e+00 -5.09537876e-01] [-1.72089458e-01 -6.37032628e-01 -4.86859441e-01 ... -1.23891258e+00 -5.02069414e-01 1.10033000e+00] [ 2.19574642e+00 9.23960865e-01 -1.41483355e+00 ... 1.29718423e-01 -8.34581375e-01 -3.55285406e-01] ... [-3.20220768e-01 -1.33176291e+00 1.26360381e+00 ... 4.66359138e-01 -6.02576911e-01 1.25394654e+00] [ 1.42319620e+00 7.56094754e-01 -2.78029829e-01 ... -4.32690352e-01 1.22119479e-01 1.14080918e+00] [-7.34829366e-01 -3.41526209e-03 -8.40378225e-01 ... -1.88872349e+00 1.29078889e+00 7.81236887e-01]] ... [[ 1.71737559e-02 1.38292646e+00 -5.11290669e-01 ... -1.42864794e-01 1.61133587e+00 -6.63869917e-01] [-1.70215145e-01 1.02884066e+00 1.30805755e+00 ... 4.95993316e-01 2.19686985e+00 2.68875718e-01] [-3.65937561e-01 -2.31057644e+00 4.19945091e-01 ... -1.52651697e-01 -7.66969085e-01 3.34098309e-01] ... [-3.40532809e-01 -1.21234678e-01 -1.58260897e-01 ... 8.95506322e-01 5.94472170e-01 9.70971465e-01] [-8.69272172e-01 1.53237224e+00 -5.15780523e-02 ... -5.60721643e-02 -4.34577733e-01 -1.71450329e+00] [-5.99089026e-01 -1.13468982e-01 5.23354709e-01 ... 1.18011653e+00 -7.83363521e-01 1.87556684e+00]] [[ 3.37035209e-02 9.11824822e-01 6.62954077e-02 ... 2.35850859e+00 7.28806973e-01 8.89477074e-01] [ 1.87417138e+00 1.64307261e+00 3.28216434e-01 ... -2.03063846e+00 -3.06453854e-02 -4.77208942e-01] [-9.53966498e-01 6.96905077e-01 -1.21828163e+00 ... 2.46901721e-01 -8.06853175e-01 -1.94989860e+00] ... [-1.29519057e+00 -1.35561788e+00 9.21227276e-01 ... 7.08294928e-01 7.52083719e-01 9.78578720e-03] [-2.66568154e-01 -4.12928820e-01 7.41882384e-01 ... 1.06861651e+00 -1.70110667e+00 -1.84367061e+00] [ 2.06632423e+00 -1.69946048e-02 1.96566379e+00 ... -9.64349031e-01 -1.19499624e+00 6.10589862e-01]] [[ 9.27069783e-02 -2.71933079e-01 1.20636272e+00 ... 5.99925339e-01 9.48502302e-01 3.04424524e-01] [ 6.26376629e-01 -1.14834559e+00 -2.30060959e+00 ... -1.20244622e+00 4.18181777e-01 5.16636491e-01] [-1.00004807e-01 -1.74243462e+00 -3.50469351e-01 ... -1.24505544e+00 -8.53471816e-01 2.34571815e+00] ... [ 4.71306294e-01 -9.82926965e-01 -5.54326892e-01 ... 1.41430646e-01 1.54197395e+00 1.14683557e+00] [-8.92275512e-01 -5.89616060e-01 8.15471828e-01 ... 2.97391355e-01 -4.86783266e-01 -4.07825142e-01] [ 6.65456831e-01 1.14903021e+00 -8.43117237e-01 ... -1.47496390e+00 -1.77205288e+00 1.12155117e-01]]] [[[ 9.12467062e-01 -1.29643440e+00 4.85249102e-01 ... 1.18392539e+00 7.07935274e-01 1.03634584e+00] [ 1.40635923e-01 1.91558802e+00 3.41037691e-01 ... 2.35405612e+00 1.55845094e+00 1.09099925e+00] [-2.95622088e-02 -9.84707177e-01 3.54973376e-02 ... -5.61726153e-01 1.72723643e-02 2.27251709e-01] ... [-9.57658112e-01 -1.11977315e+00 1.54681116e-01 ... -3.92255157e-01 1.19563192e-01 8.30184817e-01] [ 7.46112049e-01 -4.11399633e-01 7.81340778e-01 ... -5.92536151e-01 2.42193127e+00 2.35462260e+00] [-1.18715309e-01 -3.23935747e-01 2.11941862e+00 ... 1.06176579e+00 1.88088547e-02 2.35078543e-01]] [[-1.30608153e+00 6.47714674e-01 9.97206450e-01 ... -1.76998943e-01 -6.45292401e-01 1.48906589e+00] [ 1.55389929e+00 -4.81407434e-01 -7.00225353e-01 ... 1.14959610e+00 3.74437332e-01 9.19644296e-01] [ 1.00528586e+00 -7.57730484e-01 1.08251345e+00 ... 2.06275240e-01 6.43040061e-01 -1.64887226e+00] ... [-3.47783804e-01 -1.70148730e-01 -1.65670419e+00 ... -7.03389719e-02 9.83669341e-01 -4.12567884e-01] [-6.29124582e-01 7.24781871e-01 9.07112360e-01 ... -6.11789227e-01 -3.45674962e-01 5.88840663e-01] [-2.23536801e+00 6.98576570e-01 -9.77596581e-01 ... -8.08620751e-01 -9.40236270e-01 -1.34058669e-01]] [[ 2.05186510e+00 -2.15009809e+00 -8.71821642e-01 ... 1.94948506e+00 -1.00803959e+00 -1.26579189e+00] [ 7.85152078e-01 -7.49638528e-02 -8.45047474e-01 ... -1.40382624e+00 2.59759184e-02 4.74511117e-01] [-1.99613705e-01 7.62784064e-01 -5.00372946e-01 ... -3.80047083e-01 1.02855787e-01 4.63816792e-01] ... [ 5.92780532e-03 -3.55907083e-01 9.48162824e-02 ... -1.36240816e+00 -5.14925718e-02 -6.91856384e-01] [-3.33695769e-01 8.73467028e-01 1.22158086e+00 ... -1.55123222e+00 3.87715101e-01 -1.25113988e+00] [ 4.07000601e-01 -1.48274148e+00 -2.77097255e-01 ... -1.33410025e+00 -5.79697430e-01 -2.50967324e-01]] ... [[ 1.40958011e-01 7.62664795e-01 9.17429984e-01 ... 9.44361210e-01 1.11196673e+00 1.25120446e-01] [ 1.45585084e+00 1.47875869e+00 -3.62867117e-01 ... 1.91657972e+00 -1.06068254e+00 5.05856574e-01] [-2.86620945e-01 -2.99682319e-02 2.09262967e+00 ... -5.18915117e-01 2.86607504e-01 -1.69409484e-01] ... [ 8.80192667e-02 6.39734805e-01 -5.58465719e-01 ... 4.64912266e-01 2.11746454e+00 -7.74497807e-01] [ 3.15629631e-01 2.23650634e-01 -2.50366354e+00 ... -2.50093174e+00 1.00693733e-01 -1.54933608e+00] [-8.82463574e-01 -2.69876897e-01 1.36970818e-01 ... -9.57835257e-01 -8.32785964e-01 2.06023708e-01]] [[ 1.94201112e+00 1.64310026e+00 1.34082794e+00 ... 3.18741560e-01 1.35682917e+00 1.47702515e+00] [ 8.82715881e-01 1.41307497e+00 1.37659073e+00 ... 2.14158583e+00 -4.64534968e-01 -6.86857045e-01] [-3.48566741e-01 6.66245520e-01 4.92601573e-01 ... 6.83150887e-01 -7.29527354e-01 -9.52729523e-01] ... [-2.39481911e-01 -8.40684950e-01 2.99418068e+00 ... 4.85054374e-01 -8.89009416e-01 6.06734097e-01] [-1.83610529e-01 -6.87217116e-01 4.82173204e-01 ... -1.85734808e+00 5.72883785e-01 1.57704145e-01] [ 9.23532993e-04 2.12681127e+00 -6.12163544e-01 ... -1.01342332e+00 7.27427006e-01 -9.74918962e-01]] [[-1.01220179e+00 1.55301780e-01 -3.49814608e-03 ... -1.13706812e-02 3.06129217e-01 -1.43725485e-01] [ 1.17777741e+00 -1.29442549e+00 -1.42705843e-01 ... 1.26184261e+00 -3.31060827e-01 -4.77488562e-02] [-4.27111238e-01 3.61669064e-02 7.32012331e-01 ... -8.88374686e-01 9.26310956e-01 -2.01123095e+00] ... [ 1.59117326e-01 6.19588435e-01 4.17964906e-01 ... 2.02596951e+00 1.63849607e-01 -4.37832892e-01] [-3.71501327e-01 -2.02064201e-01 1.27482736e+00 ... -1.30490577e+00 1.03110826e+00 -1.19762707e+00] [ 1.22547850e-01 -6.86540365e-01 -6.34176612e-01 ... 1.73866010e+00 -7.35413969e-01 -1.01770058e-01]]]]]; ov_res: [[[[[ 3.73267114e-01 -1.12457298e-01 1.14092194e-01 ... -3.49326611e-01 7.37164693e-04 1.59933284e-01] [-2.22802367e-02 -3.38907957e-01 -1.73341289e-01 ... 3.66709590e-01 3.01151812e-01 3.25820595e-01] [ 1.56140968e-01 -1.41910061e-01 1.03700101e-01 ... 1.77821800e-01 9.12117958e-02 6.60555214e-02] ... [ 2.88245618e-01 -1.42417908e-01 1.27119586e-01 ... -7.47762173e-02 -2.22903088e-01 -1.92018688e-01] [ 7.50758424e-02 -1.20359518e-01 -8.95398334e-02 ... -1.26320750e-01 -1.43622518e-01 -3.55674326e-02] [ 4.25766222e-02 -1.58418208e-01 -2.33955886e-02 ... 1.34368718e-01 -1.43983364e-01 -2.16093007e-02]] [[-2.34764218e-02 1.87555566e-01 -1.18939839e-01 ... 1.44572079e-01 3.26162219e-01 8.52584764e-02] [-7.75464401e-02 -5.50532266e-02 1.35092482e-01 ... -3.79392385e-01 -3.33859801e-01 8.33723247e-02] [-1.19721256e-01 -5.63373007e-02 1.05518796e-01 ... 2.28401255e-02 1.37662962e-01 -6.84784278e-02] ... [ 4.54269201e-01 -2.93893367e-01 -1.37981474e-01 ... 8.76603276e-02 -1.65936798e-01 2.16524098e-02] [-3.77518982e-01 2.04386979e-01 -2.52814859e-01 ... -2.30731279e-01 -1.70699865e-01 3.48408282e-01] [ 7.30066299e-02 1.19153127e-01 -4.99679893e-01 ... 1.30268723e-01 4.05340120e-02 -1.74935862e-01]] [[ 5.77132940e-01 -3.56877148e-01 -4.37394917e-01 ... 1.01754054e-01 2.52393223e-02 -3.79058152e-01] [-3.22267681e-01 3.82172838e-02 -6.47278056e-02 ... 3.72297987e-02 1.34993777e-01 -2.49747679e-01] [-7.76639208e-02 1.14171080e-01 -3.23829539e-02 ... -5.05216539e-01 3.29356045e-02 2.38882303e-02] ... [-1.56232372e-01 2.14782096e-02 6.70337453e-02 ... -2.65351653e-01 -1.66176111e-01 1.38005346e-01] [-2.50132252e-02 6.36446327e-02 -9.43731219e-02 ... -9.39616188e-03 -1.15331337e-01 2.89768815e-01] [ 1.35753721e-01 -2.72983819e-01 1.10880710e-01 ... -1.01684429e-01 -6.37245774e-02 -9.63476151e-02]] ... [[ 1.37965187e-01 1.80236742e-01 -1.66228473e-01 ... -1.87287718e-01 2.83262283e-01 -2.09834948e-02] [ 3.11765522e-02 -4.06072199e-01 -9.77973118e-02 ... -1.71581224e-01 1.02411680e-01 -2.70598650e-01] [-2.73213506e-01 -1.95575476e-01 4.98701781e-01 ... -1.53320273e-02 3.35985571e-01 9.52289179e-02] ... [-4.01579231e-01 2.40558311e-01 -5.29294200e-02 ... -1.93684027e-01 -1.67163443e-02 7.73061067e-02] [ 2.56693102e-02 -3.23729187e-01 -7.00009940e-03 ... 1.27053047e-02 -2.58207977e-01 -5.99096119e-02] [ 1.00311220e-01 -2.57769555e-01 1.19478963e-01 ... 1.38259321e-01 -1.29807547e-01 -3.28116983e-01]] [[-1.77806035e-01 5.84704131e-02 2.64748633e-01 ... -3.04262847e-01 -7.46726692e-02 3.76505256e-01] [-6.93920404e-02 2.27368057e-01 4.63837199e-02 ... -2.27933705e-01 1.21003844e-01 2.49943867e-01] [ 1.56210899e-01 -1.96978107e-01 7.57068172e-02 ... 2.24299416e-01 -5.65088987e-02 -4.52935807e-02] ... [ 2.73245394e-01 -3.89903039e-02 1.72177047e-01 ... -2.65653670e-01 6.46864027e-02 4.93415631e-02] [ 1.21738717e-01 -6.46853596e-02 -2.67758429e-01 ... 1.06208131e-01 -6.08576983e-02 3.01249027e-02] [-1.98601820e-02 -2.40771119e-02 1.08769313e-01 ... 4.53904942e-02 -6.91244304e-02 -1.52587235e-01]] [[ 2.20405594e-01 -3.18557084e-01 3.32269967e-01 ... 2.87162364e-01 2.82624543e-01 3.32855433e-01] [-1.25916824e-01 9.99481753e-02 -2.01475382e-01 ... -4.06682417e-02 -1.02813028e-01 2.18908235e-01] [ 2.45513797e-01 5.68310311e-03 1.26188025e-01 ... -1.42215192e-01 1.20838620e-01 -1.85456108e-02] ... [ 1.41887352e-01 6.58984706e-02 -3.62930633e-02 ... -3.26587290e-01 1.06370613e-01 -1.38989510e-02] [ 6.63288474e-01 -6.00602515e-02 1.11676618e-01 ... -4.20953445e-02 4.87470664e-02 4.51443076e-01] [ 1.59371808e-01 1.61100533e-02 8.41005147e-02 ... 5.33685200e-02 -1.99852705e-01 5.70327193e-02]]] [[[ 4.87205297e-01 -5.98859608e-01 1.36741865e+00 ... -1.84894657e+00 -4.87445652e-01 -4.68733937e-01] [ 7.15833664e-01 9.50898409e-01 1.29646516e+00 ... -1.15696239e+00 -7.92253494e-01 1.52597964e+00] [-3.90975356e-01 -1.11549020e+00 -1.57381415e+00 ... -1.34201741e+00 1.28084922e+00 3.30065161e-01] ... [-5.67121625e-01 -1.78033803e-02 -4.20419037e-01 ... -1.41310120e+00 1.35347521e+00 -3.29624176e-01] [-6.48191988e-01 2.65308283e-02 1.56968999e+00 ... -4.11515534e-01 2.68137550e+00 8.16538274e-01] [ 2.31738299e-01 -2.75164396e-01 -9.34483349e-01 ... 2.90074557e-01 -4.38996911e-01 -1.10847771e+00]] [[-2.03699064e+00 7.12337375e-01 -3.97465289e-01 ... -1.09908175e+00 -1.43745136e+00 4.71512884e-01] [ 6.19040251e-01 -6.63187146e-01 -6.86949253e-01 ... 9.14631903e-01 -1.45255315e+00 -1.82621479e-01] [ 5.86533785e-01 -1.87763476e+00 1.48008299e+00 ... -1.57041359e+00 -1.95917994e-01 -1.07627594e+00] ... [-1.69538474e+00 2.88968658e+00 1.05837429e+00 ... 5.99170327e-01 -1.48949564e+00 2.50515246e+00] [ 8.57077420e-01 5.80907941e-01 -1.09335864e+00 ... -6.43132702e-02 -1.04487389e-02 -2.92902052e-01] [ 4.49855775e-01 7.73679852e-01 -1.70117056e+00 ... 8.13676953e-01 -1.32248628e+00 -1.09757014e-01]] [[ 1.02809072e+00 -4.20330733e-01 -6.83421716e-02 ... -6.68071806e-01 1.98124123e+00 -5.09537876e-01] [-1.72089458e-01 -6.37032628e-01 -4.86859441e-01 ... -1.23891258e+00 -5.02069414e-01 1.10033000e+00] [ 2.19574642e+00 9.23960865e-01 -1.41483355e+00 ... 1.29718423e-01 -8.34581375e-01 -3.55285406e-01] ... [-3.20220768e-01 -1.33176291e+00 1.26360381e+00 ... 4.66359138e-01 -6.02576911e-01 1.25394654e+00] [ 1.42319620e+00 7.56094754e-01 -2.78029829e-01 ... -4.32690352e-01 1.22119479e-01 1.14080918e+00] [-7.34829366e-01 -3.41526209e-03 -8.40378225e-01 ... -1.88872349e+00 1.29078889e+00 7.81236887e-01]] ... [[ 1.71737559e-02 1.38292646e+00 -5.11290669e-01 ... -1.42864794e-01 1.61133587e+00 -6.63869917e-01] [-1.70215145e-01 1.02884066e+00 1.30805755e+00 ... 4.95993316e-01 2.19686985e+00 2.68875718e-01] [-3.65937561e-01 -2.31057644e+00 4.19945091e-01 ... -1.52651697e-01 -7.66969085e-01 3.34098309e-01] ... [-3.40532809e-01 -1.21234678e-01 -1.58260897e-01 ... 8.95506322e-01 5.94472170e-01 9.70971465e-01] [-8.69272172e-01 1.53237224e+00 -5.15780523e-02 ... -5.60721643e-02 -4.34577733e-01 -1.71450329e+00] [-5.99089026e-01 -1.13468982e-01 5.23354709e-01 ... 1.18011653e+00 -7.83363521e-01 1.87556684e+00]] [[ 3.37035209e-02 9.11824822e-01 6.62954077e-02 ... 2.35850859e+00 7.28806973e-01 8.89477074e-01] [ 1.87417138e+00 1.64307261e+00 3.28216434e-01 ... -2.03063846e+00 -3.06453854e-02 -4.77208942e-01] [-9.53966498e-01 6.96905077e-01 -1.21828163e+00 ... 2.46901721e-01 -8.06853175e-01 -1.94989860e+00] ... [-1.29519057e+00 -1.35561788e+00 9.21227276e-01 ... 7.08294928e-01 7.52083719e-01 9.78578720e-03] [-2.66568154e-01 -4.12928820e-01 7.41882384e-01 ... 1.06861651e+00 -1.70110667e+00 -1.84367061e+00] [ 2.06632423e+00 -1.69946048e-02 1.96566379e+00 ... -9.64349031e-01 -1.19499624e+00 6.10589862e-01]] [[ 9.27069783e-02 -2.71933079e-01 1.20636272e+00 ... 5.99925339e-01 9.48502302e-01 3.04424524e-01] [ 6.26376629e-01 -1.14834559e+00 -2.30060959e+00 ... -1.20244622e+00 4.18181777e-01 5.16636491e-01] [-1.00004807e-01 -1.74243462e+00 -3.50469351e-01 ... -1.24505544e+00 -8.53471816e-01 2.34571815e+00] ... [ 4.71306294e-01 -9.82926965e-01 -5.54326892e-01 ... 1.41430646e-01 1.54197395e+00 1.14683557e+00] [-8.92275512e-01 -5.89616060e-01 8.15471828e-01 ... 2.97391355e-01 -4.86783266e-01 -4.07825142e-01] [ 6.65456831e-01 1.14903021e+00 -8.43117237e-01 ... -1.47496390e+00 -1.77205288e+00 1.12155117e-01]]] [[[ 9.12467062e-01 -1.29643440e+00 4.85249102e-01 ... 1.18392539e+00 7.07935274e-01 1.03634584e+00] [ 1.40635923e-01 1.91558802e+00 3.41037691e-01 ... 2.35405612e+00 1.55845094e+00 1.09099925e+00] [-2.95622088e-02 -9.84707177e-01 3.54973376e-02 ... -5.61726153e-01 1.72723643e-02 2.27251709e-01] ... [-9.57658112e-01 -1.11977315e+00 1.54681116e-01 ... -3.92255157e-01 1.19563192e-01 8.30184817e-01] [ 7.46112049e-01 -4.11399633e-01 7.81340778e-01 ... -5.92536151e-01 2.42193127e+00 2.35462260e+00] [-1.18715309e-01 -3.23935747e-01 2.11941862e+00 ... 1.06176579e+00 1.88088547e-02 2.35078543e-01]] [[-1.30608153e+00 6.47714674e-01 9.97206450e-01 ... -1.76998943e-01 -6.45292401e-01 1.48906589e+00] [ 1.55389929e+00 -4.81407434e-01 -7.00225353e-01 ... 1.14959610e+00 3.74437332e-01 9.19644296e-01] [ 1.00528586e+00 -7.57730484e-01 1.08251345e+00 ... 2.06275240e-01 6.43040061e-01 -1.64887226e+00] ... [-3.47783804e-01 -1.70148730e-01 -1.65670419e+00 ... -7.03389719e-02 9.83669341e-01 -4.12567884e-01] [-6.29124582e-01 7.24781871e-01 9.07112360e-01 ... -6.11789227e-01 -3.45674962e-01 5.88840663e-01] [-2.23536801e+00 6.98576570e-01 -9.77596581e-01 ... -8.08620751e-01 -9.40236270e-01 -1.34058669e-01]] [[ 2.05186510e+00 -2.15009809e+00 -8.71821642e-01 ... 1.94948506e+00 -1.00803959e+00 -1.26579189e+00] [ 7.85152078e-01 -7.49638528e-02 -8.45047474e-01 ... -1.40382624e+00 2.59759184e-02 4.74511117e-01] [-1.99613705e-01 7.62784064e-01 -5.00372946e-01 ... -3.80047083e-01 1.02855787e-01 4.63816792e-01] ... [ 5.92780532e-03 -3.55907083e-01 9.48162824e-02 ... -1.36240816e+00 -5.14925718e-02 -6.91856384e-01] [-3.33695769e-01 8.73467028e-01 1.22158086e+00 ... -1.55123222e+00 3.87715101e-01 -1.25113988e+00] [ 4.07000601e-01 -1.48274148e+00 -2.77097255e-01 ... -1.33410025e+00 -5.79697430e-01 -2.50967324e-01]] ... [[ 1.40958011e-01 7.62664795e-01 9.17429984e-01 ... 9.44361210e-01 1.11196673e+00 1.25120446e-01] [ 1.45585084e+00 1.47875869e+00 -3.62867117e-01 ... 1.91657972e+00 -1.06068254e+00 5.05856574e-01] [-2.86620945e-01 -2.99682319e-02 2.09262967e+00 ... -5.18915117e-01 2.86607504e-01 -1.69409484e-01] ... [ 8.80192667e-02 6.39734805e-01 -5.58465719e-01 ... 4.64912266e-01 2.11746454e+00 -7.74497807e-01] [ 3.15629631e-01 2.23650634e-01 -2.50366354e+00 ... -2.50093174e+00 1.00693733e-01 -1.54933608e+00] [-8.82463574e-01 -2.69876897e-01 1.36970818e-01 ... -9.57835257e-01 -8.32785964e-01 2.06023708e-01]] [[ 1.94201112e+00 1.64310026e+00 1.34082794e+00 ... 3.18741560e-01 1.35682917e+00 1.47702515e+00] [ 8.82715881e-01 1.41307497e+00 1.37659073e+00 ... 2.14158583e+00 -4.64534968e-01 -6.86857045e-01] [-3.48566741e-01 6.66245520e-01 4.92601573e-01 ... 6.83150887e-01 -7.29527354e-01 -9.52729523e-01] ... [-2.39481911e-01 -8.40684950e-01 2.99418068e+00 ... 4.85054374e-01 -8.89009416e-01 6.06734097e-01] [-1.83610529e-01 -6.87217116e-01 4.82173204e-01 ... -1.85734808e+00 5.72883785e-01 1.57704145e-01] [ 9.23532993e-04 2.12681127e+00 -6.12163544e-01 ... -1.01342332e+00 7.27427006e-01 -9.74918962e-01]] [[-1.01220179e+00 1.55301780e-01 -3.49814608e-03 ... -1.13706812e-02 3.06129217e-01 -1.43725485e-01] [ 1.17777741e+00 -1.29442549e+00 -1.42705843e-01 ... 1.26184261e+00 -3.31060827e-01 -4.77488562e-02] [-4.27111238e-01 3.61669064e-02 7.32012331e-01 ... -8.88374686e-01 9.26310956e-01 -2.01123095e+00] ... [ 1.59117326e-01 6.19588435e-01 4.17964906e-01 ... 2.02596951e+00 1.63849607e-01 -4.37832892e-01] [-3.71501327e-01 -2.02064201e-01 1.27482736e+00 ... -1.30490577e+00 1.03110826e+00 -1.19762707e+00] [ 1.22547850e-01 -6.86540365e-01 -6.34176612e-01 ... 1.73866010e+00 -7.35413969e-01 -1.01770058e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [2, 2, 2], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_924.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.3653 (2,1,1,.,.) = -0.1674 (3,1,1,.,.) = -0.2806 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 3.62412405e+00 1.05039251e+00 -1.66077375e+00 ... 5.84177732e-01 2.41997808e-01 6.67321026e-01] [ 6.33128732e-02 -1.39426517e+00 1.22723210e+00 ... 3.23346630e-02 -1.27090871e+00 -2.18246840e-02] [-2.14976358e+00 1.37457490e+00 1.82425737e+00 ... -1.14352353e-01 -2.79146361e+00 1.43361092e-01] ... [-9.77719605e-01 -2.13296843e+00 5.86055040e-01 ... 2.70950079e+00 8.88788283e-01 2.51482534e+00] [ 2.20446920e+00 -1.65311646e+00 -3.59370410e-02 ... -2.74344635e+00 6.56705439e-01 1.05436540e+00] [ 1.64005661e+00 -2.19310665e+00 1.53387105e+00 ... 4.15756732e-01 -1.66641581e+00 -2.79473424e+00]] [[-1.78787029e+00 -5.01533151e-01 -3.35565376e+00 ... -1.50869036e+00 9.47350264e-01 -4.36994940e-01] [-6.55425489e-01 2.02274346e+00 1.51960090e-01 ... 3.10664535e-01 2.60180384e-01 -1.20439994e+00] [ 1.04353502e-01 -2.44925404e+00 -1.24230373e+00 ... 2.34067827e-01 1.51241422e+00 7.75938928e-01] ... [ 6.09931231e-01 -1.87454283e+00 -8.11970055e-01 ... 6.95384681e-01 -1.62604243e-01 -2.06934619e+00] [-1.06820822e-01 6.85751513e-02 -2.82439065e+00 ... -1.61408931e-01 4.20201987e-01 1.32463515e-01] [-1.20983434e+00 2.00711474e-01 1.01379657e+00 ... -2.87654221e-01 1.48587930e+00 -8.41343820e-01]] [[-4.79396045e-01 1.20001590e+00 -1.51556444e+00 ... 4.29379314e-01 -5.45401216e-01 1.04441237e+00] [ 7.69122005e-01 -2.69417405e-01 -2.13580155e+00 ... -5.76911569e-01 -1.53703654e+00 -1.45796001e+00] [ 6.73423171e-01 -3.74032199e-01 -2.16627553e-01 ... 7.65017688e-01 1.09708011e+00 1.82253134e+00] ... [-6.75177157e-01 -1.82623434e+00 5.63510787e-03 ... 1.43348467e+00 7.43337393e-01 2.27994132e+00] [-3.68396223e-01 5.09446442e-01 4.71537262e-01 ... -7.39233136e-01 -6.21237099e-01 1.19370651e+00] [-1.98660326e+00 -2.31523037e+00 5.60325682e-01 ... -1.17275620e+00 -8.15700352e-01 -2.76095772e+00]] ... [[ 1.53321970e+00 -1.45063698e+00 -4.61313814e-01 ... 1.85426033e+00 7.86931038e-01 4.39504206e-01] [-3.59515518e-01 -8.07617426e-01 7.25237608e-01 ... 1.40098363e-01 2.83391547e+00 5.30889094e-01] [ 1.69011816e-01 -3.95425893e-02 -3.13753039e-01 ... 3.44456673e-01 -1.81329584e+00 1.66821563e+00] ... [ 1.85330117e+00 -3.99193227e-01 -1.67970002e+00 ... 1.61320901e+00 5.81185043e-01 8.18115845e-02] [-2.95067430e+00 4.29312825e-01 1.27015364e+00 ... 2.56875575e-01 -2.12621164e+00 -1.09676786e-01] [-9.73952770e-01 -6.45009518e-01 1.22924423e+00 ... 2.59078457e-03 2.18868360e-01 1.05590057e+00]] [[-3.82762663e-02 1.91542673e+00 -1.71411467e+00 ... 1.45424402e+00 -1.59373951e+00 1.37659073e+00] [-5.89911640e-01 9.46765423e-01 9.52494800e-01 ... 1.04165423e+00 1.36826307e-01 -1.63066471e+00] [-4.94641006e-01 5.50902843e-01 2.07660985e+00 ... -2.82971263e+00 -1.23559141e+00 -1.18337488e+00] ... [ 2.01785135e+00 1.97059608e+00 -7.35647231e-02 ... -1.63925111e+00 5.62260330e-01 -3.29128742e-01] [-1.38895440e+00 -1.29805577e+00 -5.22994220e-01 ... -1.59861398e+00 1.76258579e-01 1.04574716e+00] [ 7.20605195e-01 3.58009070e-01 7.64176011e-01 ... 1.37106156e+00 -1.93002617e+00 1.83049634e-01]] [[ 2.22604752e+00 -1.15303266e+00 -9.22778487e-01 ... -2.39525819e+00 -3.15784514e-01 -1.50576401e+00] [-2.16043264e-01 -1.23157728e+00 2.57475317e-01 ... -1.57117379e+00 9.97237742e-01 -1.61335487e-02] [ 2.44914961e+00 1.14073181e+00 -6.09808505e-01 ... 4.18409795e-01 9.89403188e-01 -2.07450175e+00] ... [-1.32542551e+00 4.25486982e-01 9.71454620e-01 ... 1.36133361e+00 1.72174799e+00 1.16139933e-01] [ 8.77377912e-02 1.47425771e+00 -3.27077180e-01 ... -1.75247288e+00 3.93402934e-01 2.94567108e+00] [-1.40441382e+00 -1.89169496e-01 1.59776735e+00 ... -4.74692672e-01 1.46972942e+00 -1.84448385e+00]]] [[[ 5.84718771e-02 1.80564821e-01 9.92179513e-02 ... -9.30021703e-02 -1.59897149e-01 -1.46841779e-01] [ 6.87027499e-02 -1.95504978e-01 -1.66471750e-01 ... 7.06769824e-02 -2.68751055e-01 -1.36532128e-01] [ 4.59867626e-01 2.51597047e-01 1.01965390e-01 ... -1.23568185e-01 2.19986573e-01 -1.32653579e-01] ... [-2.28313893e-01 -8.07839409e-02 -3.06106389e-01 ... -6.87452629e-02 3.99873243e-05 -2.09014401e-01] [ 8.58484507e-02 1.15560740e-02 8.56876820e-02 ... 2.52591640e-01 1.18110828e-01 5.60099073e-02] [-1.23739734e-01 -3.48518670e-01 4.36096907e-01 ... -2.99623869e-02 -1.08291171e-01 3.17409664e-01]] [[ 3.01133073e-03 2.78845161e-01 -1.59438431e-01 ... -9.05590951e-02 -1.42496392e-01 7.27099255e-02] [ 1.48422509e-01 2.21616149e-01 -4.32395577e-01 ... 1.29510492e-01 -1.00358106e-01 -7.41546825e-02] [-7.80813396e-02 8.38947073e-02 -9.99870300e-02 ... -1.06405877e-01 2.81898558e-01 5.02362736e-02] ... [ 9.18882638e-02 -1.21814683e-01 -2.38039225e-01 ... -9.99986678e-02 8.83533582e-02 -2.65369229e-02] [-1.80612177e-01 3.28999013e-02 -1.15335315e-01 ... 3.34048234e-02 -9.67091769e-02 9.60387439e-02] [ 2.07097083e-01 1.68112621e-01 -2.87486389e-02 ... -8.61784518e-02 2.45632961e-01 -2.20646322e-01]] [[ 9.72795561e-02 5.47110066e-02 4.35458450e-03 ... 1.98467970e-02 -4.71016541e-02 -2.87450582e-01] [ 1.96776554e-01 1.43269241e-01 -3.85448635e-01 ... 9.35276672e-02 2.33166441e-01 1.12190396e-01] [-6.02332316e-02 -1.50897250e-01 -1.54469997e-01 ... -7.07668066e-02 -5.59717650e-03 -1.62498653e-01] ... [-1.28934681e-01 -7.61313438e-02 1.28316656e-01 ... 2.46270999e-01 -1.81317225e-01 3.05810511e-01] [ 2.03776747e-01 1.54957563e-01 -2.57831335e-01 ... 6.09912574e-02 2.39989251e-01 -2.85926253e-01] [ 2.99555920e-02 -2.09360849e-02 -3.51121998e-03 ... 1.32579103e-01 -1.56961083e-01 7.55903171e-03]] ... [[ 1.91295862e-01 2.07399428e-01 -1.18663490e-01 ... -1.96159017e-05 6.20866716e-02 3.97808366e-02] [ 1.00100763e-01 7.60732070e-02 3.32705081e-02 ... -3.63458902e-01 8.22806731e-04 -3.76603246e-01] [-2.43731171e-01 4.22570944e-01 4.78994250e-02 ... -1.45603850e-01 6.02323096e-03 1.41694382e-01] ... [-3.42806093e-02 3.33693326e-01 2.97410763e-04 ... 1.83113784e-01 -1.50048524e-01 1.81212369e-02] [-3.60386640e-01 2.13385463e-01 8.96980837e-02 ... 1.48392115e-02 2.61391457e-02 -5.87500110e-02] [ 1.88304372e-02 4.50793616e-02 -1.54959008e-01 ... 8.89114439e-02 -1.36097774e-01 -6.58896565e-02]] [[-1.18406624e-01 7.08925128e-02 3.39781344e-01 ... -3.80267911e-02 -4.63342341e-03 6.43948000e-03] [ 2.28246108e-01 1.22479573e-01 4.15442437e-02 ... -1.67776734e-01 1.02546670e-01 -1.06127255e-01] [ 2.37564072e-02 -4.55024004e-01 2.35329960e-02 ... 1.18520789e-01 2.14345574e-01 6.75112978e-02] ... [-1.15075424e-01 6.40555993e-02 8.14000964e-02 ... 1.45928264e-02 -9.44066718e-02 1.70722663e-01] [ 1.27805486e-01 -1.59156129e-01 -1.00712471e-01 ... -1.84769079e-01 3.56511056e-01 -1.21123560e-01] [ 2.26535961e-01 1.88053235e-01 9.17486474e-02 ... -2.22823381e-01 -1.55794069e-01 1.20179333e-01]] [[-1.07973807e-01 -2.27705222e-02 -1.09546632e-01 ... 4.57843505e-02 6.54509589e-02 -1.12330928e-01] [-1.53820187e-01 3.72413725e-01 2.91120201e-01 ... -2.23019749e-01 -3.27899098e-01 5.46143688e-02] [-2.18710899e-02 3.84936109e-03 4.40842658e-03 ... -1.09259591e-01 -1.83812752e-02 -3.58047038e-01] ... [ 1.70578197e-01 -1.11728802e-01 2.99207810e-02 ... 1.81943431e-01 -2.66085248e-02 1.25170648e-02] [ 1.03320852e-01 4.80632819e-02 3.04793328e-01 ... -2.21934468e-02 -1.20028406e-01 2.66262084e-01] [ 1.58975776e-02 -2.59294420e-01 4.55021486e-02 ... -3.21097851e-01 -1.64453872e-02 -4.29091901e-01]]] [[[-1.22538239e-01 -2.14013204e-01 -3.69478434e-01 ... -7.02846870e-02 -1.28559366e-01 -2.76213616e-01] [ 1.00583903e-01 -1.65407266e-02 -4.51811939e-01 ... -2.53540695e-01 6.60862997e-02 -2.54986703e-01] [-5.98736256e-02 -7.28845969e-03 -4.38395828e-01 ... 5.59264064e-01 3.15685004e-01 -1.64282471e-01] ... [-2.26082027e-01 2.70957321e-01 -1.87870711e-01 ... -1.14162685e-03 -2.48999238e-01 -1.04536124e-01] [ 6.93352297e-02 2.46455774e-01 -4.11635220e-01 ... -1.95422564e-02 -2.93376803e-01 3.17420810e-01] [-1.95732519e-01 3.69961420e-03 -1.47227019e-01 ... -1.26686469e-01 3.20033878e-01 3.45652580e-01]] [[-3.25616211e-01 -5.52305691e-02 -1.19178295e-01 ... 2.85640687e-01 5.69465935e-01 1.78580776e-01] [-5.33556759e-01 1.63239047e-01 -1.79313734e-01 ... 6.33145988e-01 -1.74564391e-01 2.04947874e-01] [ 2.02163205e-01 -4.43659395e-01 -9.01362300e-02 ... -1.39939219e-01 -3.16846743e-02 -3.57105702e-01] ... [-3.32357809e-02 -4.15968299e-01 -1.20526589e-02 ... 1.67526186e-01 2.79649347e-01 -1.76151320e-01] [-3.13714474e-01 -1.10765606e-01 5.59248328e-01 ... 6.49157390e-02 5.14359698e-02 -1.76730081e-01] [ 5.58104455e-01 6.63639545e-01 1.77478686e-01 ... -6.76881522e-02 -1.79721117e-01 4.87317294e-01]] [[-3.03354442e-01 9.14386958e-02 -2.21311271e-01 ... 1.81572571e-01 2.20442101e-01 -1.45004943e-01] [-1.39638066e-01 -4.13385600e-01 7.71643221e-02 ... 1.76953599e-01 -1.80329457e-01 4.46754813e-01] [-3.14238518e-02 3.24145406e-01 3.49804536e-02 ... -3.57041717e-01 1.61612868e-01 -2.32808083e-01] ... [ 8.38435069e-02 -4.15131480e-01 1.01582989e-01 ... -1.05867296e-01 -1.37504488e-01 -8.75607282e-02] [ 9.59339291e-02 -2.53369123e-01 -4.32632044e-02 ... 2.34581262e-01 1.46662012e-01 -2.09076613e-01] [-2.59029604e-02 -4.12498534e-01 -8.74003991e-02 ... -9.83693004e-02 3.84630829e-01 5.62909842e-01]] ... [[ 1.11992404e-01 2.51585692e-01 4.96092081e-01 ... -2.08384871e-01 4.04814243e-01 1.73784018e-01] [ 1.70169845e-01 -1.72761694e-01 -8.44610035e-02 ... -2.72410288e-02 1.47586793e-01 -1.96715612e-02] [ 3.20254236e-01 -1.73460506e-02 -1.83924228e-01 ... -1.17462426e-02 -3.13720405e-02 4.78549004e-02] ... [ 5.14103711e-01 3.51372510e-02 1.61123484e-01 ... -1.30334258e-01 -1.90261424e-01 5.85944772e-01] [ 4.81735677e-01 -2.27399230e-01 5.55812895e-01 ... 9.69523117e-02 9.90490541e-02 -2.33881757e-01] [-4.20346335e-02 1.45105675e-01 -3.79031718e-01 ... -2.05615968e-01 3.57835203e-01 -2.13510856e-01]] [[ 3.00583750e-01 5.72431922e-01 1.22999124e-01 ... -2.19144255e-01 4.60154444e-01 3.43531340e-01] [ 1.29598215e-01 8.76696631e-02 -2.22673431e-01 ... 3.31896767e-02 1.89942524e-01 4.02291417e-01] [ 4.99048233e-01 3.16029638e-01 3.15081686e-01 ... 5.55949211e-01 -9.22364667e-02 -1.48443162e-01] ... [ 7.82047659e-02 -1.33468896e-01 -1.44903824e-01 ... 2.91348517e-01 2.38495022e-01 1.88328147e-01] [-1.03153311e-01 -3.34604144e-01 2.01851159e-01 ... 1.39234230e-01 2.77842909e-01 -2.14089602e-01] [ 1.01749502e-01 3.15104097e-01 -4.90294546e-01 ... -4.34026897e-01 2.54129976e-01 1.18893735e-01]] [[-1.85597241e-01 2.05360785e-01 -2.66172439e-01 ... -3.06895792e-01 3.83205146e-01 3.35793942e-01] [ 1.40021190e-01 -6.32443428e-01 1.53035536e-01 ... 1.75628275e-01 5.14504671e-01 2.93811243e-02] [-7.86039457e-02 -2.45381951e-01 -5.54697681e-03 ... -5.29598892e-02 2.10294083e-01 -1.07845761e-01] ... [ 3.15331817e-01 1.31201386e-01 2.60192037e-01 ... -5.16559422e-01 6.48946762e-02 -4.04849142e-01] [ 3.81732613e-01 -1.32960021e-01 1.48826644e-01 ... 5.97297214e-02 1.50882080e-01 8.45497176e-02] [ 5.58367930e-02 -1.92878366e-01 -3.65441501e-01 ... -4.06014711e-01 -4.10183007e-03 5.25624335e-01]]]]]; ov_res: [[[[[ 3.62412405e+00 1.05039251e+00 -1.66077375e+00 ... 5.84177732e-01 2.41997808e-01 6.67321026e-01] [ 6.33128732e-02 -1.39426517e+00 1.22723210e+00 ... 3.23346630e-02 -1.27090871e+00 -2.18246840e-02] [-2.14976358e+00 1.37457490e+00 1.82425737e+00 ... -1.14352353e-01 -2.79146361e+00 1.43361092e-01] ... [-9.77719605e-01 -2.13296843e+00 5.86055040e-01 ... 2.70950079e+00 8.88788283e-01 2.51482534e+00] [ 2.20446920e+00 -1.65311646e+00 -3.59370410e-02 ... -2.74344635e+00 6.56705439e-01 1.05436540e+00] [ 1.64005661e+00 -2.19310665e+00 1.53387105e+00 ... 4.15756732e-01 -1.66641581e+00 -2.79473424e+00]] [[-1.78787029e+00 -5.01533151e-01 -3.35565376e+00 ... -1.50869036e+00 9.47350264e-01 -4.36994940e-01] [-6.55425489e-01 2.02274346e+00 1.51960090e-01 ... 3.10664535e-01 2.60180384e-01 -1.20439994e+00] [ 1.04353502e-01 -2.44925404e+00 -1.24230373e+00 ... 2.34067827e-01 1.51241422e+00 7.75938928e-01] ... [ 6.09931231e-01 -1.87454283e+00 -8.11970055e-01 ... 6.95384681e-01 -1.62604243e-01 -2.06934619e+00] [-1.06820822e-01 6.85751513e-02 -2.82439065e+00 ... -1.61408931e-01 4.20201987e-01 1.32463515e-01] [-1.20983434e+00 2.00711474e-01 1.01379657e+00 ... -2.87654221e-01 1.48587930e+00 -8.41343820e-01]] [[-4.79396045e-01 1.20001590e+00 -1.51556444e+00 ... 4.29379314e-01 -5.45401216e-01 1.04441237e+00] [ 7.69122005e-01 -2.69417405e-01 -2.13580155e+00 ... -5.76911569e-01 -1.53703654e+00 -1.45796001e+00] [ 6.73423171e-01 -3.74032199e-01 -2.16627553e-01 ... 7.65017688e-01 1.09708011e+00 1.82253134e+00] ... [-6.75177157e-01 -1.82623434e+00 5.63510787e-03 ... 1.43348467e+00 7.43337393e-01 2.27994132e+00] [-3.68396223e-01 5.09446442e-01 4.71537262e-01 ... -7.39233136e-01 -6.21237099e-01 1.19370651e+00] [-1.98660326e+00 -2.31523037e+00 5.60325682e-01 ... -1.17275620e+00 -8.15700352e-01 -2.76095772e+00]] ... [[ 1.53321970e+00 -1.45063698e+00 -4.61313814e-01 ... 1.85426033e+00 7.86931038e-01 4.39504206e-01] [-3.59515518e-01 -8.07617426e-01 7.25237608e-01 ... 1.40098363e-01 2.83391547e+00 5.30889094e-01] [ 1.69011816e-01 -3.95425893e-02 -3.13753039e-01 ... 3.44456673e-01 -1.81329584e+00 1.66821563e+00] ... [ 1.85330117e+00 -3.99193227e-01 -1.67970002e+00 ... 1.61320901e+00 5.81185043e-01 8.18115845e-02] [-2.95067430e+00 4.29312825e-01 1.27015364e+00 ... 2.56875575e-01 -2.12621164e+00 -1.09676786e-01] [-9.73952770e-01 -6.45009518e-01 1.22924423e+00 ... 2.59078457e-03 2.18868360e-01 1.05590057e+00]] [[-3.82762663e-02 1.91542673e+00 -1.71411467e+00 ... 1.45424402e+00 -1.59373951e+00 1.37659073e+00] [-5.89911640e-01 9.46765423e-01 9.52494800e-01 ... 1.04165423e+00 1.36826307e-01 -1.63066471e+00] [-4.94641006e-01 5.50902843e-01 2.07660985e+00 ... -2.82971263e+00 -1.23559141e+00 -1.18337488e+00] ... [ 2.01785135e+00 1.97059608e+00 -7.35647231e-02 ... -1.63925111e+00 5.62260330e-01 -3.29128742e-01] [-1.38895440e+00 -1.29805577e+00 -5.22994220e-01 ... -1.59861398e+00 1.76258579e-01 1.04574716e+00] [ 7.20605195e-01 3.58009070e-01 7.64176011e-01 ... 1.37106156e+00 -1.93002617e+00 1.83049634e-01]] [[ 2.22604752e+00 -1.15303266e+00 -9.22778487e-01 ... -2.39525819e+00 -3.15784514e-01 -1.50576401e+00] [-2.16043264e-01 -1.23157728e+00 2.57475317e-01 ... -1.57117379e+00 9.97237742e-01 -1.61335487e-02] [ 2.44914961e+00 1.14073181e+00 -6.09808505e-01 ... 4.18409795e-01 9.89403188e-01 -2.07450175e+00] ... [-1.32542551e+00 4.25486982e-01 9.71454620e-01 ... 1.36133361e+00 1.72174799e+00 1.16139933e-01] [ 8.77377912e-02 1.47425771e+00 -3.27077180e-01 ... -1.75247288e+00 3.93402934e-01 2.94567108e+00] [-1.40441382e+00 -1.89169496e-01 1.59776735e+00 ... -4.74692672e-01 1.46972942e+00 -1.84448385e+00]]] [[[ 5.84718771e-02 1.80564821e-01 9.92179513e-02 ... -9.30021703e-02 -1.59897149e-01 -1.46841779e-01] [ 6.87027499e-02 -1.95504978e-01 -1.66471750e-01 ... 7.06769824e-02 -2.68751055e-01 -1.36532128e-01] [ 4.59867626e-01 2.51597047e-01 1.01965390e-01 ... -1.23568185e-01 2.19986573e-01 -1.32653579e-01] ... [-2.28313893e-01 -8.07839409e-02 -3.06106389e-01 ... -6.87452629e-02 3.99873243e-05 -2.09014401e-01] [ 8.58484507e-02 1.15560740e-02 8.56876820e-02 ... 2.52591640e-01 1.18110828e-01 5.60099073e-02] [-1.23739734e-01 -3.48518670e-01 4.36096907e-01 ... -2.99623869e-02 -1.08291171e-01 3.17409664e-01]] [[ 3.01133073e-03 2.78845161e-01 -1.59438431e-01 ... -9.05590951e-02 -1.42496392e-01 7.27099255e-02] [ 1.48422509e-01 2.21616149e-01 -4.32395577e-01 ... 1.29510492e-01 -1.00358106e-01 -7.41546825e-02] [-7.80813396e-02 8.38947073e-02 -9.99870300e-02 ... -1.06405877e-01 2.81898558e-01 5.02362736e-02] ... [ 9.18882638e-02 -1.21814683e-01 -2.38039225e-01 ... -9.99986678e-02 8.83533582e-02 -2.65369229e-02] [-1.80612177e-01 3.28999013e-02 -1.15335315e-01 ... 3.34048234e-02 -9.67091769e-02 9.60387439e-02] [ 2.07097083e-01 1.68112621e-01 -2.87486389e-02 ... -8.61784518e-02 2.45632961e-01 -2.20646322e-01]] [[ 9.72795561e-02 5.47110066e-02 4.35458450e-03 ... 1.98467970e-02 -4.71016541e-02 -2.87450582e-01] [ 1.96776554e-01 1.43269241e-01 -3.85448635e-01 ... 9.35276672e-02 2.33166441e-01 1.12190396e-01] [-6.02332316e-02 -1.50897250e-01 -1.54469997e-01 ... -7.07668066e-02 -5.59717650e-03 -1.62498653e-01] ... [-1.28934681e-01 -7.61313438e-02 1.28316656e-01 ... 2.46270999e-01 -1.81317225e-01 3.05810511e-01] [ 2.03776747e-01 1.54957563e-01 -2.57831335e-01 ... 6.09912574e-02 2.39989251e-01 -2.85926253e-01] [ 2.99555920e-02 -2.09360849e-02 -3.51121998e-03 ... 1.32579103e-01 -1.56961083e-01 7.55903171e-03]] ... [[ 1.91295862e-01 2.07399428e-01 -1.18663490e-01 ... -1.96159017e-05 6.20866716e-02 3.97808366e-02] [ 1.00100763e-01 7.60732070e-02 3.32705081e-02 ... -3.63458902e-01 8.22806731e-04 -3.76603246e-01] [-2.43731171e-01 4.22570944e-01 4.78994250e-02 ... -1.45603850e-01 6.02323096e-03 1.41694382e-01] ... [-3.42806093e-02 3.33693326e-01 2.97410763e-04 ... 1.83113784e-01 -1.50048524e-01 1.81212369e-02] [-3.60386640e-01 2.13385463e-01 8.96980837e-02 ... 1.48392115e-02 2.61391457e-02 -5.87500110e-02] [ 1.88304372e-02 4.50793616e-02 -1.54959008e-01 ... 8.89114439e-02 -1.36097774e-01 -6.58896565e-02]] [[-1.18406624e-01 7.08925128e-02 3.39781344e-01 ... -3.80267911e-02 -4.63342341e-03 6.43948000e-03] [ 2.28246108e-01 1.22479573e-01 4.15442437e-02 ... -1.67776734e-01 1.02546670e-01 -1.06127255e-01] [ 2.37564072e-02 -4.55024004e-01 2.35329960e-02 ... 1.18520789e-01 2.14345574e-01 6.75112978e-02] ... [-1.15075424e-01 6.40555993e-02 8.14000964e-02 ... 1.45928264e-02 -9.44066718e-02 1.70722663e-01] [ 1.27805486e-01 -1.59156129e-01 -1.00712471e-01 ... -1.84769079e-01 3.56511056e-01 -1.21123560e-01] [ 2.26535961e-01 1.88053235e-01 9.17486474e-02 ... -2.22823381e-01 -1.55794069e-01 1.20179333e-01]] [[-1.07973807e-01 -2.27705222e-02 -1.09546632e-01 ... 4.57843505e-02 6.54509589e-02 -1.12330928e-01] [-1.53820187e-01 3.72413725e-01 2.91120201e-01 ... -2.23019749e-01 -3.27899098e-01 5.46143688e-02] [-2.18710899e-02 3.84936109e-03 4.40842658e-03 ... -1.09259591e-01 -1.83812752e-02 -3.58047038e-01] ... [ 1.70578197e-01 -1.11728802e-01 2.99207810e-02 ... 1.81943431e-01 -2.66085248e-02 1.25170648e-02] [ 1.03320852e-01 4.80632819e-02 3.04793328e-01 ... -2.21934468e-02 -1.20028406e-01 2.66262084e-01] [ 1.58975776e-02 -2.59294420e-01 4.55021486e-02 ... -3.21097851e-01 -1.64453872e-02 -4.29091901e-01]]] [[[-1.22538239e-01 -2.14013204e-01 -3.69478434e-01 ... -7.02846870e-02 -1.28559366e-01 -2.76213616e-01] [ 1.00583903e-01 -1.65407266e-02 -4.51811939e-01 ... -2.53540695e-01 6.60862997e-02 -2.54986703e-01] [-5.98736256e-02 -7.28845969e-03 -4.38395828e-01 ... 5.59264064e-01 3.15685004e-01 -1.64282471e-01] ... [-2.26082027e-01 2.70957321e-01 -1.87870711e-01 ... -1.14162685e-03 -2.48999238e-01 -1.04536124e-01] [ 6.93352297e-02 2.46455774e-01 -4.11635220e-01 ... -1.95422564e-02 -2.93376803e-01 3.17420810e-01] [-1.95732519e-01 3.69961420e-03 -1.47227019e-01 ... -1.26686469e-01 3.20033878e-01 3.45652580e-01]] [[-3.25616211e-01 -5.52305691e-02 -1.19178295e-01 ... 2.85640687e-01 5.69465935e-01 1.78580776e-01] [-5.33556759e-01 1.63239047e-01 -1.79313734e-01 ... 6.33145988e-01 -1.74564391e-01 2.04947874e-01] [ 2.02163205e-01 -4.43659395e-01 -9.01362300e-02 ... -1.39939219e-01 -3.16846743e-02 -3.57105702e-01] ... [-3.32357809e-02 -4.15968299e-01 -1.20526589e-02 ... 1.67526186e-01 2.79649347e-01 -1.76151320e-01] [-3.13714474e-01 -1.10765606e-01 5.59248328e-01 ... 6.49157390e-02 5.14359698e-02 -1.76730081e-01] [ 5.58104455e-01 6.63639545e-01 1.77478686e-01 ... -6.76881522e-02 -1.79721117e-01 4.87317294e-01]] [[-3.03354442e-01 9.14386958e-02 -2.21311271e-01 ... 1.81572571e-01 2.20442101e-01 -1.45004943e-01] [-1.39638066e-01 -4.13385600e-01 7.71643221e-02 ... 1.76953599e-01 -1.80329457e-01 4.46754813e-01] [-3.14238518e-02 3.24145406e-01 3.49804536e-02 ... -3.57041717e-01 1.61612868e-01 -2.32808083e-01] ... [ 8.38435069e-02 -4.15131480e-01 1.01582989e-01 ... -1.05867296e-01 -1.37504488e-01 -8.75607282e-02] [ 9.59339291e-02 -2.53369123e-01 -4.32632044e-02 ... 2.34581262e-01 1.46662012e-01 -2.09076613e-01] [-2.59029604e-02 -4.12498534e-01 -8.74003991e-02 ... -9.83693004e-02 3.84630829e-01 5.62909842e-01]] ... [[ 1.11992404e-01 2.51585692e-01 4.96092081e-01 ... -2.08384871e-01 4.04814243e-01 1.73784018e-01] [ 1.70169845e-01 -1.72761694e-01 -8.44610035e-02 ... -2.72410288e-02 1.47586793e-01 -1.96715612e-02] [ 3.20254236e-01 -1.73460506e-02 -1.83924228e-01 ... -1.17462426e-02 -3.13720405e-02 4.78549004e-02] ... [ 5.14103711e-01 3.51372510e-02 1.61123484e-01 ... -1.30334258e-01 -1.90261424e-01 5.85944772e-01] [ 4.81735677e-01 -2.27399230e-01 5.55812895e-01 ... 9.69523117e-02 9.90490541e-02 -2.33881757e-01] [-4.20346335e-02 1.45105675e-01 -3.79031718e-01 ... -2.05615968e-01 3.57835203e-01 -2.13510856e-01]] [[ 3.00583750e-01 5.72431922e-01 1.22999124e-01 ... -2.19144255e-01 4.60154444e-01 3.43531340e-01] [ 1.29598215e-01 8.76696631e-02 -2.22673431e-01 ... 3.31896767e-02 1.89942524e-01 4.02291417e-01] [ 4.99048233e-01 3.16029638e-01 3.15081686e-01 ... 5.55949211e-01 -9.22364667e-02 -1.48443162e-01] ... [ 7.82047659e-02 -1.33468896e-01 -1.44903824e-01 ... 2.91348517e-01 2.38495022e-01 1.88328147e-01] [-1.03153311e-01 -3.34604144e-01 2.01851159e-01 ... 1.39234230e-01 2.77842909e-01 -2.14089602e-01] [ 1.01749502e-01 3.15104097e-01 -4.90294546e-01 ... -4.34026897e-01 2.54129976e-01 1.18893735e-01]] [[-1.85597241e-01 2.05360785e-01 -2.66172439e-01 ... -3.06895792e-01 3.83205146e-01 3.35793942e-01] [ 1.40021190e-01 -6.32443428e-01 1.53035536e-01 ... 1.75628275e-01 5.14504671e-01 2.93811243e-02] [-7.86039457e-02 -2.45381951e-01 -5.54697681e-03 ... -5.29598892e-02 2.10294083e-01 -1.07845761e-01] ... [ 3.15331817e-01 1.31201386e-01 2.60192037e-01 ... -5.16559422e-01 6.48946762e-02 -4.04849142e-01] [ 3.81732613e-01 -1.32960021e-01 1.48826644e-01 ... 5.97297214e-02 1.50882080e-01 8.45497176e-02] [ 5.58367930e-02 -1.92878366e-01 -3.65441501e-01 ... -4.06014711e-01 -4.10183007e-03 5.25624335e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_926.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.3323 (2,1,1,.,.) = 1.5403 (3,1,1,.,.) = -1.3860 [ CPUFloatType{3,1,1,1,1} ]]() %11 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%11) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-2.5107982 1.6380702 -0.37048072 ... 4.705942 1.5880954 4.144036 ] [-2.5706317 0.4523579 -1.7788332 ... -0.3322006 -2.8326132 0.46937484] [ 0.15759458 3.6539145 0.92522514 ... -0.5064584 0.3755544 1.5984886 ] ... [-2.6947858 1.6554008 2.7784305 ... -0.14678971 -4.593977 -4.313774 ] [-0.83954805 1.5731715 0.9273533 ... 2.3370569 -2.1164787 0.9005791 ] [ 1.9658885 1.8765513 -1.4782624 ... 1.17223 0.08578255 -0.08878588]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.75468147 0.8276685 -1.1553642 ... 1.5182737 -1.7692709 -0.6891434 ] [ 1.4820236 -0.7684915 2.2108612 ... -1.61388 0.5492928 -1.7918421 ] [ 2.8886034 0.6338084 0.20236345 ... 3.4267097 1.0176816 -0.1801296 ] ... [ 0.7395654 0.79132926 2.7264423 ... 0.8275983 -1.0240661 -3.3097134 ] [-1.7794516 -4.988656 -0.8416724 ... 1.8252685 0.6695745 -0.675378 ] [ 4.4548736 -1.1057085 2.5019772 ... -1.2841426 -1.8471305 -1.1510131 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-2.5107982 1.6380702 -0.37048072 ... 4.705942 1.5880954 4.144036 ] [-2.5706317 0.4523579 -1.7788332 ... -0.3322006 -2.8326132 0.46937484] [ 0.15759458 3.6539145 0.92522514 ... -0.5064584 0.3755544 1.5984886 ] ... [-2.6947858 1.6554008 2.7784305 ... -0.14678971 -4.593977 -4.313774 ] [-0.83954805 1.5731715 0.9273533 ... 2.3370569 -2.1164787 0.9005791 ] [ 1.9658885 1.8765513 -1.4782624 ... 1.17223 0.08578255 -0.08878588]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.75468147 0.8276685 -1.1553642 ... 1.5182737 -1.7692709 -0.6891434 ] [ 1.4820236 -0.7684915 2.2108612 ... -1.61388 0.5492928 -1.7918421 ] [ 2.8886034 0.6338084 0.20236345 ... 3.4267097 1.0176816 -0.1801296 ] ... [ 0.7395654 0.79132926 2.7264423 ... 0.8275983 -1.0240661 -3.3097134 ] [-1.7794516 -4.988656 -0.8416724 ... 1.8252685 0.6695745 -0.675378 ] [ 4.4548736 -1.1057085 2.5019772 ... -1.2841426 -1.8471305 -1.1510131 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_928.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * 8.0916 (2,1,1,.,.) = -0.7590 (3,1,1,.,.) = -0.1470 (1,2,1,.,.) = -0.7710 (2,2,1,.,.) = 1.1974 (3,2,1,.,.) = 1.0654 (1,3,1,.,.) = -0.7427 (2,3,1,.,.) = 0.01 * -5.7579 (3,3,1,.,.) = 0.4167 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.09523304e-01 7.41640389e-01 6.79822803e-01 ... -1.27409720e+00 -6.01120889e-01 -9.89734113e-01] [-6.93297684e-01 3.09606969e-01 1.16303913e-01 ... 9.05367911e-01 -7.02157617e-01 -1.63532150e+00] [ 5.75789690e-01 -1.35213375e+00 1.20759988e+00 ... -8.53945315e-01 6.71342090e-02 -2.53717512e-01] ... [ 1.14150298e+00 -2.64779747e-01 -7.87062466e-01 ... 2.58428907e+00 -9.08492446e-01 9.55191433e-01] [ 9.59964573e-01 9.69154239e-01 -1.78774208e-01 ... 2.06181884e+00 8.01845729e-01 2.00730726e-01] [ 4.35907915e-02 5.34190178e-01 -8.07547987e-01 ... 1.04590189e+00 -1.45633733e+00 -5.12863219e-01]] [[-8.65505189e-02 -1.09249794e+00 -4.88212883e-01 ... -2.74347949e+00 2.97009259e-01 -1.51951993e+00] [ 8.86732697e-01 2.47968012e-03 1.19062877e+00 ... -5.31708539e-01 -1.62810639e-01 -1.28027236e+00] [-9.19064164e-01 8.82806778e-01 9.29910913e-02 ... -3.59990388e-01 -3.77345711e-01 -7.12318003e-01] ... [ 1.66771173e+00 5.46451211e-01 -1.25206900e+00 ... -9.64966357e-01 -1.99065065e+00 2.04173088e+00] [-1.33007479e+00 -1.73956192e+00 -3.96926045e-01 ... 3.46729964e-01 9.94393229e-01 -3.25604439e-01] [-1.29168546e+00 1.39059946e-02 -3.86432678e-01 ... -1.33524394e+00 -2.83942819e+00 1.89810431e+00]] [[ 3.60071748e-01 1.24509744e-01 1.48792517e+00 ... 1.72567677e+00 3.87311518e-01 1.01330765e-01] [ 2.17185068e+00 7.54482687e-01 -4.83548850e-01 ... -8.21224093e-01 8.81611854e-02 1.92845345e+00] [ 9.33419049e-01 1.87447274e+00 1.11541748e+00 ... 4.36958492e-01 1.33537650e-01 1.29775000e+00] ... [ 2.68904448e-01 1.79440475e+00 1.76753962e+00 ... 2.20071650e+00 1.17789125e+00 1.02565956e+00] [ 6.71703339e-01 4.38084662e-01 1.51546860e+00 ... 6.64969608e-02 -6.62431121e-01 -1.86778831e+00] [ 1.06752229e+00 -1.82008147e-01 -1.51566844e-02 ... -5.79278648e-01 1.06250584e+00 7.25466847e-01]] [[ 2.91921526e-01 7.16288447e-01 -5.84745526e-01 ... 1.95488051e-01 -5.25796354e-01 -7.54655659e-01] [-2.22346298e-02 -1.19951987e+00 2.12480521e+00 ... -7.11240053e-01 -3.98158044e-01 -1.01469803e+00] [ 4.56855267e-01 1.67888868e+00 -7.56296337e-01 ... 9.17051852e-01 1.58628047e+00 1.42197859e+00] ... [ 2.60120966e-02 4.60645348e-01 -2.52770901e-01 ... 8.35694075e-01 6.03061736e-01 -9.67955828e-01] [-7.48725891e-01 -1.84423018e+00 -5.88814735e-01 ... 9.03714001e-01 -2.16216993e+00 -1.16536915e+00] [ 2.40198874e+00 4.82783377e-01 -2.96458602e-01 ... -3.00824970e-01 6.03684485e-02 -1.25602639e+00]] [[ 1.22460639e+00 -1.70838952e+00 8.40912163e-02 ... 8.90476942e-01 -9.87824738e-01 -5.59939146e-01] [-3.87890130e-01 -1.17735338e+00 -1.58911407e+00 ... 5.32942414e-02 7.64632046e-01 2.82128066e-01] [-3.92770141e-01 2.03692098e-03 1.04860151e+00 ... 3.89366478e-01 3.66731063e-02 1.01762366e+00] ... [ 1.59756374e+00 -3.06028366e-01 -1.55380070e-01 ... -1.36792094e-01 1.30042648e+00 1.09104133e+00] [ 7.65053451e-01 1.25102103e+00 8.34755525e-02 ... -9.33623314e-01 1.82991207e+00 8.64439666e-01] [ 4.77334470e-01 -6.01859502e-02 -1.59580564e+00 ... -8.21390331e-01 -2.85121985e-02 1.21023691e+00]]] [[[ 2.34696960e+00 -3.06416243e-01 -8.07192028e-02 ... 1.36115003e+00 4.02418882e-01 2.19869089e+00] [-1.95032731e-01 -1.14137995e+00 7.62519985e-02 ... 7.24355102e-01 -7.34984636e-01 1.49350536e+00] [-1.17820847e+00 5.32489419e-01 -2.09589696e+00 ... 1.41987789e+00 -6.83985949e-02 1.45001756e-02] ... [-5.90034068e-01 -1.26383555e+00 -1.60611712e-03 ... -4.60949707e+00 1.56065643e+00 -9.12828982e-01] [-9.49947417e-01 -2.21842122e+00 8.73953402e-02 ... -2.22624588e+00 -2.30743751e-01 -1.10941768e+00] [ 7.73673296e-01 1.22604370e+00 -1.72346458e-01 ... 9.61311813e-03 5.69956541e-01 2.13773417e+00]] [[ 9.52586532e-03 -1.70310378e+00 -9.99241918e-02 ... 2.85841584e+00 -1.35499203e+00 7.02470124e-01] [ 6.21302128e-01 1.02036428e+00 -5.51634550e-01 ... -3.21238749e-02 5.19225836e-01 2.56647646e-01] [ 8.27766061e-01 -7.07903802e-01 1.20266248e-02 ... 7.43561506e-01 7.17124999e-01 -2.11041480e-01] ... [-1.71411061e+00 3.93566340e-01 1.16976678e+00 ... 2.93434918e-01 7.71708369e-01 -1.82215965e+00] [ 4.58636105e-01 2.95581889e+00 6.02500141e-01 ... -1.57993168e-01 -9.19415772e-01 6.07437730e-01] [ 2.50986433e+00 9.31484818e-01 1.66194830e-02 ... 2.31072688e+00 2.30902934e+00 -7.77664423e-01]] [[-3.09565991e-01 1.99011731e+00 -1.69964659e+00 ... 8.18267465e-02 5.66929102e-01 -1.97224170e-01] [-5.22347152e-01 6.62472427e-01 1.59096897e+00 ... -5.54255843e-01 -8.69187176e-01 -2.40389442e+00] [ 1.12653279e+00 -1.48512387e+00 -1.75667763e+00 ... 9.27983463e-01 5.03746033e-01 -1.46631551e+00] ... [-4.16993618e-01 -2.01198959e+00 -1.77194071e+00 ... -1.93512774e+00 -1.82483256e+00 -1.23922992e+00] [ 1.97120547e+00 5.61225951e-01 -2.04567099e+00 ... 1.75030506e+00 -2.73186594e-01 2.03996968e+00] [-9.80382740e-01 2.92125988e+00 9.47004974e-01 ... -2.27761433e-01 -2.53577685e+00 1.09256232e+00]] [[ 1.01647151e+00 -7.23971903e-01 1.79439616e+00 ... 6.26602173e-01 1.39377344e+00 4.24229592e-01] [ 8.77689481e-01 1.16645074e+00 -4.69269603e-01 ... 5.35828173e-02 7.08858490e-01 1.12496471e+00] [ 5.84332049e-01 -2.38428712e+00 -2.39533618e-01 ... 3.12768191e-01 -2.51604915e+00 -5.97238302e-01] ... [ 2.82924861e-01 -9.92734849e-01 -7.31094897e-01 ... -1.14384234e+00 -8.52635086e-01 -8.79826769e-02] [ 1.30415809e+00 2.50029755e+00 1.79774597e-01 ... -2.43614480e-01 -1.63395345e-01 -1.53756738e-01] [-2.32644841e-01 1.19819157e-01 2.44560051e+00 ... 2.16007739e-01 -1.00625014e+00 1.62299907e+00]] [[-2.78441954e+00 1.00909770e+00 9.67792988e-01 ... -7.40169466e-01 1.06732833e+00 8.01530182e-01] [ 1.65520179e+00 8.34234774e-01 -4.58148010e-02 ... 9.02213752e-02 4.91346449e-01 -6.59981608e-01] [-8.46790493e-01 3.00889045e-01 -1.93568811e-01 ... -5.44471622e-01 -1.53452682e+00 -9.49773550e-01] ... [-8.42057467e-01 2.60370111e+00 -1.15560305e+00 ... -9.21487033e-01 -1.95712709e+00 -6.29182220e-01] [-7.07417309e-01 -2.72961617e+00 1.17519844e+00 ... 1.32614076e+00 -8.15888762e-01 -2.91532606e-01] [ 1.21021748e-01 1.73219338e-01 2.73931551e+00 ... 7.79924393e-01 -2.06069974e-03 -2.18145895e+00]]] [[[ 9.60993528e-01 -8.99872839e-01 -3.32522392e-01 ... 1.20839739e+00 7.89341986e-01 1.50380421e+00] [ 2.73765236e-01 -1.59175903e-01 -3.24100882e-01 ... -2.09987670e-01 2.41046865e-02 1.68830228e+00] [-8.41506302e-01 1.02182651e+00 -1.93284273e+00 ... 1.01019573e+00 1.99053381e-02 4.27729875e-01] ... [-6.13239050e-01 -5.99301338e-01 4.30163383e-01 ... -3.57231617e+00 1.31134367e+00 -1.07462084e+00] [-1.38645387e+00 -1.85380840e+00 8.13335106e-02 ... -2.08237553e+00 -5.43097615e-01 -3.60654861e-01] [-5.34965657e-02 1.73985958e-01 5.76451421e-01 ... -6.87036872e-01 1.25057840e+00 1.43220961e+00]] [[ 6.46650255e-01 2.61105955e-01 3.68437499e-01 ... 2.98445582e+00 -7.18169451e-01 1.22043002e+00] [-6.74879432e-01 2.18276858e-01 -1.36533344e+00 ... 3.18198085e-01 -3.21701914e-01 1.07160628e+00] [ 9.24365580e-01 -2.79594362e-01 1.20403014e-01 ... 2.08838314e-01 6.51617706e-01 3.00899327e-01] ... [-1.92351389e+00 -6.04818985e-02 1.09254599e+00 ... 8.35396826e-01 1.67260087e+00 -2.34829330e+00] [ 8.08402777e-01 2.26995707e+00 1.96245864e-01 ... 2.80446671e-02 -4.93188858e-01 4.07299221e-01] [ 1.46301150e+00 2.95774937e-01 4.21794355e-01 ... 1.84869790e+00 2.73369026e+00 -1.52401686e+00]] [[-5.95431253e-02 2.17235580e-01 -1.99665987e+00 ... -1.20431972e+00 -1.29919767e-01 1.73860162e-01] [-1.26009619e+00 -2.36448452e-01 1.21313655e+00 ... 3.40740949e-01 -2.71443397e-01 -2.48892832e+00] [-2.92597473e-01 -1.48268497e+00 -1.68462825e+00 ... -1.45115942e-01 -1.43780783e-01 -1.60645974e+00] ... [-3.21842134e-01 -2.04937482e+00 -2.39231014e+00 ... -2.04026628e+00 -1.05513096e+00 -1.01149440e+00] [ 3.57139379e-01 -4.38564681e-02 -1.99985147e+00 ... 5.13210535e-01 1.57798767e-01 1.65514708e+00] [-1.08279026e+00 1.12692201e+00 1.68583497e-01 ... -6.74425513e-02 -1.70272970e+00 -4.36569422e-01]] [[ 2.77631968e-01 -9.65225160e-01 1.01572287e+00 ... 2.88481653e-01 9.60158765e-01 6.34530902e-01] [ 4.54835534e-01 1.45237887e+00 -1.66995382e+00 ... 7.50836670e-01 6.28459692e-01 1.08207703e+00] [-2.64313310e-01 -1.89945734e+00 1.96949214e-01 ... -2.36364588e-01 -1.65138173e+00 -7.57797301e-01] ... [-3.43426578e-02 -7.81235099e-01 -8.56541246e-02 ... -1.04208350e+00 -6.05921984e-01 5.60390651e-01] [ 1.17497575e+00 2.14560723e+00 5.22365391e-01 ... -3.95574749e-01 1.29062068e+00 7.81754673e-01] [-1.70916677e+00 -3.92321348e-01 1.09317553e+00 ... 2.81356931e-01 -3.69055986e-01 1.36266959e+00]] [[-1.57747495e+00 1.64874709e+00 5.19263327e-01 ... -8.16524506e-01 1.03656507e+00 7.38394856e-01] [ 9.27526057e-01 1.19368899e+00 1.00812984e+00 ... -1.52869388e-01 -3.42029274e-01 -2.19430923e-01] [-1.20342439e-02 1.43250853e-01 -6.52021110e-01 ... -6.23051763e-01 -2.96960652e-01 -5.65857649e-01] ... [-1.51794672e+00 6.83259785e-01 -3.72085482e-01 ... -7.31597424e-01 -1.32846642e+00 -1.01995850e+00] [-7.86058605e-01 -2.14300060e+00 5.35313368e-01 ... 7.92515457e-01 -1.48550296e+00 -8.48460197e-01] [ 1.94886327e-02 6.15887165e-01 2.27939272e+00 ... 7.50584841e-01 3.00502330e-01 -1.55962980e+00]]]]]; ov_res: [[[[[-1.09523304e-01 7.41640389e-01 6.79822803e-01 ... -1.27409720e+00 -6.01120889e-01 -9.89734113e-01] [-6.93297684e-01 3.09606969e-01 1.16303913e-01 ... 9.05367911e-01 -7.02157617e-01 -1.63532150e+00] [ 5.75789690e-01 -1.35213375e+00 1.20759988e+00 ... -8.53945315e-01 6.71342090e-02 -2.53717512e-01] ... [ 1.14150298e+00 -2.64779747e-01 -7.87062466e-01 ... 2.58428907e+00 -9.08492446e-01 9.55191433e-01] [ 9.59964573e-01 9.69154239e-01 -1.78774208e-01 ... 2.06181884e+00 8.01845729e-01 2.00730726e-01] [ 4.35907915e-02 5.34190178e-01 -8.07547987e-01 ... 1.04590189e+00 -1.45633733e+00 -5.12863219e-01]] [[-8.65505189e-02 -1.09249794e+00 -4.88212883e-01 ... -2.74347949e+00 2.97009259e-01 -1.51951993e+00] [ 8.86732697e-01 2.47968012e-03 1.19062877e+00 ... -5.31708539e-01 -1.62810639e-01 -1.28027236e+00] [-9.19064164e-01 8.82806778e-01 9.29910913e-02 ... -3.59990388e-01 -3.77345711e-01 -7.12318003e-01] ... [ 1.66771173e+00 5.46451211e-01 -1.25206900e+00 ... -9.64966357e-01 -1.99065065e+00 2.04173088e+00] [-1.33007479e+00 -1.73956192e+00 -3.96926045e-01 ... 3.46729964e-01 9.94393229e-01 -3.25604439e-01] [-1.29168546e+00 1.39059946e-02 -3.86432678e-01 ... -1.33524394e+00 -2.83942819e+00 1.89810431e+00]] [[ 3.60071748e-01 1.24509744e-01 1.48792517e+00 ... 1.72567677e+00 3.87311518e-01 1.01330765e-01] [ 2.17185068e+00 7.54482687e-01 -4.83548850e-01 ... -8.21224093e-01 8.81611854e-02 1.92845345e+00] [ 9.33419049e-01 1.87447274e+00 1.11541748e+00 ... 4.36958492e-01 1.33537650e-01 1.29775000e+00] ... [ 2.68904448e-01 1.79440475e+00 1.76753962e+00 ... 2.20071650e+00 1.17789125e+00 1.02565956e+00] [ 6.71703339e-01 4.38084662e-01 1.51546860e+00 ... 6.64969608e-02 -6.62431121e-01 -1.86778831e+00] [ 1.06752229e+00 -1.82008147e-01 -1.51566844e-02 ... -5.79278648e-01 1.06250584e+00 7.25466847e-01]] [[ 2.91921526e-01 7.16288447e-01 -5.84745526e-01 ... 1.95488051e-01 -5.25796354e-01 -7.54655659e-01] [-2.22346298e-02 -1.19951987e+00 2.12480521e+00 ... -7.11240053e-01 -3.98158044e-01 -1.01469803e+00] [ 4.56855267e-01 1.67888868e+00 -7.56296337e-01 ... 9.17051852e-01 1.58628047e+00 1.42197859e+00] ... [ 2.60120966e-02 4.60645348e-01 -2.52770901e-01 ... 8.35694075e-01 6.03061736e-01 -9.67955828e-01] [-7.48725891e-01 -1.84423018e+00 -5.88814735e-01 ... 9.03714001e-01 -2.16216993e+00 -1.16536915e+00] [ 2.40198874e+00 4.82783377e-01 -2.96458602e-01 ... -3.00824970e-01 6.03684485e-02 -1.25602639e+00]] [[ 1.22460639e+00 -1.70838952e+00 8.40912163e-02 ... 8.90476942e-01 -9.87824738e-01 -5.59939146e-01] [-3.87890130e-01 -1.17735338e+00 -1.58911407e+00 ... 5.32942414e-02 7.64632046e-01 2.82128066e-01] [-3.92770141e-01 2.03692098e-03 1.04860151e+00 ... 3.89366478e-01 3.66731063e-02 1.01762366e+00] ... [ 1.59756374e+00 -3.06028366e-01 -1.55380070e-01 ... -1.36792094e-01 1.30042648e+00 1.09104133e+00] [ 7.65053451e-01 1.25102103e+00 8.34755525e-02 ... -9.33623314e-01 1.82991207e+00 8.64439666e-01] [ 4.77334470e-01 -6.01859502e-02 -1.59580564e+00 ... -8.21390331e-01 -2.85121985e-02 1.21023691e+00]]] [[[ 2.34696960e+00 -3.06416243e-01 -8.07192028e-02 ... 1.36115003e+00 4.02418882e-01 2.19869089e+00] [-1.95032731e-01 -1.14137995e+00 7.62519985e-02 ... 7.24355102e-01 -7.34984636e-01 1.49350536e+00] [-1.17820847e+00 5.32489419e-01 -2.09589696e+00 ... 1.41987789e+00 -6.83985949e-02 1.45001756e-02] ... [-5.90034068e-01 -1.26383555e+00 -1.60611712e-03 ... -4.60949707e+00 1.56065643e+00 -9.12828982e-01] [-9.49947417e-01 -2.21842122e+00 8.73953402e-02 ... -2.22624588e+00 -2.30743751e-01 -1.10941768e+00] [ 7.73673296e-01 1.22604370e+00 -1.72346458e-01 ... 9.61311813e-03 5.69956541e-01 2.13773417e+00]] [[ 9.52586532e-03 -1.70310378e+00 -9.99241918e-02 ... 2.85841584e+00 -1.35499203e+00 7.02470124e-01] [ 6.21302128e-01 1.02036428e+00 -5.51634550e-01 ... -3.21238749e-02 5.19225836e-01 2.56647646e-01] [ 8.27766061e-01 -7.07903802e-01 1.20266248e-02 ... 7.43561506e-01 7.17124999e-01 -2.11041480e-01] ... [-1.71411061e+00 3.93566340e-01 1.16976678e+00 ... 2.93434918e-01 7.71708369e-01 -1.82215965e+00] [ 4.58636105e-01 2.95581889e+00 6.02500141e-01 ... -1.57993168e-01 -9.19415772e-01 6.07437730e-01] [ 2.50986433e+00 9.31484818e-01 1.66194830e-02 ... 2.31072688e+00 2.30902934e+00 -7.77664423e-01]] [[-3.09565991e-01 1.99011731e+00 -1.69964659e+00 ... 8.18267465e-02 5.66929102e-01 -1.97224170e-01] [-5.22347152e-01 6.62472427e-01 1.59096897e+00 ... -5.54255843e-01 -8.69187176e-01 -2.40389442e+00] [ 1.12653279e+00 -1.48512387e+00 -1.75667763e+00 ... 9.27983463e-01 5.03746033e-01 -1.46631551e+00] ... [-4.16993618e-01 -2.01198959e+00 -1.77194071e+00 ... -1.93512774e+00 -1.82483256e+00 -1.23922992e+00] [ 1.97120547e+00 5.61225951e-01 -2.04567099e+00 ... 1.75030506e+00 -2.73186594e-01 2.03996968e+00] [-9.80382740e-01 2.92125988e+00 9.47004974e-01 ... -2.27761433e-01 -2.53577685e+00 1.09256232e+00]] [[ 1.01647151e+00 -7.23971903e-01 1.79439616e+00 ... 6.26602173e-01 1.39377344e+00 4.24229592e-01] [ 8.77689481e-01 1.16645074e+00 -4.69269603e-01 ... 5.35828173e-02 7.08858490e-01 1.12496471e+00] [ 5.84332049e-01 -2.38428712e+00 -2.39533618e-01 ... 3.12768191e-01 -2.51604915e+00 -5.97238302e-01] ... [ 2.82924861e-01 -9.92734849e-01 -7.31094897e-01 ... -1.14384234e+00 -8.52635086e-01 -8.79826769e-02] [ 1.30415809e+00 2.50029755e+00 1.79774597e-01 ... -2.43614480e-01 -1.63395345e-01 -1.53756738e-01] [-2.32644841e-01 1.19819157e-01 2.44560051e+00 ... 2.16007739e-01 -1.00625014e+00 1.62299907e+00]] [[-2.78441954e+00 1.00909770e+00 9.67792988e-01 ... -7.40169466e-01 1.06732833e+00 8.01530182e-01] [ 1.65520179e+00 8.34234774e-01 -4.58148010e-02 ... 9.02213752e-02 4.91346449e-01 -6.59981608e-01] [-8.46790493e-01 3.00889045e-01 -1.93568811e-01 ... -5.44471622e-01 -1.53452682e+00 -9.49773550e-01] ... [-8.42057467e-01 2.60370111e+00 -1.15560305e+00 ... -9.21487033e-01 -1.95712709e+00 -6.29182220e-01] [-7.07417309e-01 -2.72961617e+00 1.17519844e+00 ... 1.32614076e+00 -8.15888762e-01 -2.91532606e-01] [ 1.21021748e-01 1.73219338e-01 2.73931551e+00 ... 7.79924393e-01 -2.06069974e-03 -2.18145895e+00]]] [[[ 9.60993528e-01 -8.99872839e-01 -3.32522392e-01 ... 1.20839739e+00 7.89341986e-01 1.50380421e+00] [ 2.73765236e-01 -1.59175903e-01 -3.24100882e-01 ... -2.09987670e-01 2.41046865e-02 1.68830228e+00] [-8.41506302e-01 1.02182651e+00 -1.93284273e+00 ... 1.01019573e+00 1.99053381e-02 4.27729875e-01] ... [-6.13239050e-01 -5.99301338e-01 4.30163383e-01 ... -3.57231617e+00 1.31134367e+00 -1.07462084e+00] [-1.38645387e+00 -1.85380840e+00 8.13335106e-02 ... -2.08237553e+00 -5.43097615e-01 -3.60654861e-01] [-5.34965657e-02 1.73985958e-01 5.76451421e-01 ... -6.87036872e-01 1.25057840e+00 1.43220961e+00]] [[ 6.46650255e-01 2.61105955e-01 3.68437499e-01 ... 2.98445582e+00 -7.18169451e-01 1.22043002e+00] [-6.74879432e-01 2.18276858e-01 -1.36533344e+00 ... 3.18198085e-01 -3.21701914e-01 1.07160628e+00] [ 9.24365580e-01 -2.79594362e-01 1.20403014e-01 ... 2.08838314e-01 6.51617706e-01 3.00899327e-01] ... [-1.92351389e+00 -6.04818985e-02 1.09254599e+00 ... 8.35396826e-01 1.67260087e+00 -2.34829330e+00] [ 8.08402777e-01 2.26995707e+00 1.96245864e-01 ... 2.80446671e-02 -4.93188858e-01 4.07299221e-01] [ 1.46301150e+00 2.95774937e-01 4.21794355e-01 ... 1.84869790e+00 2.73369026e+00 -1.52401686e+00]] [[-5.95431253e-02 2.17235580e-01 -1.99665987e+00 ... -1.20431972e+00 -1.29919767e-01 1.73860162e-01] [-1.26009619e+00 -2.36448452e-01 1.21313655e+00 ... 3.40740949e-01 -2.71443397e-01 -2.48892832e+00] [-2.92597473e-01 -1.48268497e+00 -1.68462825e+00 ... -1.45115942e-01 -1.43780783e-01 -1.60645974e+00] ... [-3.21842134e-01 -2.04937482e+00 -2.39231014e+00 ... -2.04026628e+00 -1.05513096e+00 -1.01149440e+00] [ 3.57139379e-01 -4.38564681e-02 -1.99985147e+00 ... 5.13210535e-01 1.57798767e-01 1.65514708e+00] [-1.08279026e+00 1.12692201e+00 1.68583497e-01 ... -6.74425513e-02 -1.70272970e+00 -4.36569422e-01]] [[ 2.77631968e-01 -9.65225160e-01 1.01572287e+00 ... 2.88481653e-01 9.60158765e-01 6.34530902e-01] [ 4.54835534e-01 1.45237887e+00 -1.66995382e+00 ... 7.50836670e-01 6.28459692e-01 1.08207703e+00] [-2.64313310e-01 -1.89945734e+00 1.96949214e-01 ... -2.36364588e-01 -1.65138173e+00 -7.57797301e-01] ... [-3.43426578e-02 -7.81235099e-01 -8.56541246e-02 ... -1.04208350e+00 -6.05921984e-01 5.60390651e-01] [ 1.17497575e+00 2.14560723e+00 5.22365391e-01 ... -3.95574749e-01 1.29062068e+00 7.81754673e-01] [-1.70916677e+00 -3.92321348e-01 1.09317553e+00 ... 2.81356931e-01 -3.69055986e-01 1.36266959e+00]] [[-1.57747495e+00 1.64874709e+00 5.19263327e-01 ... -8.16524506e-01 1.03656507e+00 7.38394856e-01] [ 9.27526057e-01 1.19368899e+00 1.00812984e+00 ... -1.52869388e-01 -3.42029274e-01 -2.19430923e-01] [-1.20342439e-02 1.43250853e-01 -6.52021110e-01 ... -6.23051763e-01 -2.96960652e-01 -5.65857649e-01] ... [-1.51794672e+00 6.83259785e-01 -3.72085482e-01 ... -7.31597424e-01 -1.32846642e+00 -1.01995850e+00] [-7.86058605e-01 -2.14300060e+00 5.35313368e-01 ... 7.92515457e-01 -1.48550296e+00 -8.48460197e-01] [ 1.94886327e-02 6.15887165e-01 2.27939272e+00 ... 7.50584841e-01 3.00502330e-01 -1.55962980e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 0] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_930.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.3653 (2,1,1,.,.) = 0.001 * -2.1628 (3,1,1,.,.) = 1.1432 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 1.4810998 0. 1.9991238 ... -0.58698 0. 0.5441099 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.2931943 0. 1.4320701 ... 1.7311468 0. -0.6370641 ] ... [ 1.131531 0. 1.6366051 ... -0.91328603 0. 0.57856894] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2101306 0. -1.4107734 ... 0.69019276 0. 0.5256164 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.1739117 0. -2.4273639 ... -0.83572775 0. -2.1482491 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.0609692 0. 0.9548266 ... -0.6807142 0. 0.28282377] ... [-1.7841485 0. -1.0749468 ... 0.25125355 0. 1.3825381 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0304472 0. -0.6630697 ... -0.54700685 0. 2.310412 ]] ... [[-0.68547475 0. 0.6764502 ... -0.11672218 0. -2.3911376 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.6021961 0. -0.68293357 ... -0.41793242 0. 1.463069 ] ... [ 1.3354623 0. 0.27179152 ... -0.99513584 0. 1.007866 ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.0629263 0. -0.5863576 ... 0.21757539 0. -0.05518465]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.825355 0. 0.30480176 ... 0.77556664 0. 1.0231119 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.9401258 0. 1.5342432 ... 1.1831386 0. -0.92521113] ... [-1.095099 0. 1.4404516 ... 0.8275145 0. 0.05485604] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2954916 0. -1.2505298 ... 0.39521772 0. 1.4377688 ]]]]]; ov_res: [[[[[ 1.4810998 0. 1.9991238 ... -0.58698 0. 0.5441099 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.2931943 0. 1.4320701 ... 1.7311468 0. -0.6370641 ] ... [ 1.131531 0. 1.6366051 ... -0.91328603 0. 0.57856894] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2101306 0. -1.4107734 ... 0.69019276 0. 0.5256164 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.1739117 0. -2.4273639 ... -0.83572775 0. -2.1482491 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.0609692 0. 0.9548266 ... -0.6807142 0. 0.28282377] ... [-1.7841485 0. -1.0749468 ... 0.25125355 0. 1.3825381 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0304472 0. -0.6630697 ... -0.54700685 0. 2.310412 ]] ... [[-0.68547475 0. 0.6764502 ... -0.11672218 0. -2.3911376 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.6021961 0. -0.68293357 ... -0.41793242 0. 1.463069 ] ... [ 1.3354623 0. 0.27179152 ... -0.99513584 0. 1.007866 ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.0629263 0. -0.5863576 ... 0.21757539 0. -0.05518465]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-0.825355 0. 0.30480176 ... 0.77556664 0. 1.0231119 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.9401258 0. 1.5342432 ... 1.1831386 0. -0.92521113] ... [-1.095099 0. 1.4404516 ... 0.8275145 0. 0.05485604] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2954916 0. -1.2505298 ... 0.39521772 0. 1.4377688 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 2, 2], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_932.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.7836 (2,1,1,.,.) = 1.9926 (3,1,1,.,.) = 0.9282 (1,2,1,.,.) = -1.4365 (2,2,1,.,.) = 0.2334 (3,2,1,.,.) = 0.01 * 1.9567 (1,3,1,.,.) = 1.3948 (2,3,1,.,.) = 0.01 * 1.2507 (3,3,1,.,.) = -2.0117 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[-1.2197301e+00 7.3614436e-01 -1.1627654e+00 -2.0217607e+00 1.4740433e+00] [-1.9565518e+00 2.5090394e+00 2.4390287e+00 -2.7703610e-01 2.4950857e+00] [ 4.5492787e+00 1.3031622e+00 5.8430928e-01 8.7441111e-01 -7.2614676e-01] [-1.4465666e+00 -3.2253990e-01 5.1928699e-01 4.4762740e+00 -9.2693514e-01] [-9.4391346e-01 3.2354219e+00 -7.1179670e-01 -9.4970459e-01 -1.9983964e+00]] [[-5.8970523e-01 3.1969359e+00 1.9924344e+00 4.2681427e+00 -1.8711773e+00] [-9.5675516e-01 4.3528670e-01 2.8804049e-01 -1.0073961e+00 -1.5361938e+00] [-9.5478076e-01 1.0781238e+00 1.6372104e+00 1.4257023e+00 -1.1773226e+00] [-1.5208412e+00 -1.2256769e+00 5.6878503e-02 1.6862720e+00 -2.5432234e+00] [ 9.4945967e-02 1.8211225e+00 -9.5457911e-01 -7.0367587e-01 -4.8634571e-01]] [[-6.3959008e-01 -1.2124979e+00 -3.1620154e+00 3.1968284e+00 4.5226259e+00] [ 1.0068481e-01 -2.9240251e+00 -1.5256828e+00 2.5950134e-01 -2.9532764e+00] [ 3.6713901e+00 4.5348051e-01 -1.5888492e+00 1.1155676e-02 -1.7816112e+00] [-6.7505056e-01 3.2357210e-01 -4.7247190e+00 -4.9490333e+00 1.9336685e+00] [-5.2211819e+00 4.9057007e+00 1.7496370e+00 -1.7942526e+00 -7.2553676e-01]] [[-2.0749636e+00 3.6135674e-01 -3.5826485e+00 1.8725176e+00 3.0053964e+00] [ 1.7241611e+00 -2.8442460e-01 -5.5551195e-01 -2.5644689e+00 -1.2022921e+00] [-2.2751970e+00 1.8716027e+00 -4.2905688e-01 -2.7390428e+00 8.4505880e-01] [-1.3791573e+00 -1.9661391e+00 -4.8259254e+00 -4.0296444e-01 1.5607963e+00] [ 1.5230241e+00 -2.1594943e-01 -8.2172877e-01 1.7040867e+00 1.7762412e+00]] [[-6.3508880e-01 9.6946490e-01 -1.0944467e+00 -3.6078982e+00 -6.3861203e-01] [-1.7424241e-01 -1.3208995e+00 1.0282730e+00 4.4257692e-03 7.9946417e-01] [ 6.0687631e-02 1.3976704e-01 -6.2467885e-01 4.2958817e+00 1.2903945e+00] [ 7.9391092e-02 9.2721224e-01 1.2979960e+00 -2.2263707e-01 -3.2211683e+00] [-6.2778389e-01 -2.1579876e+00 -2.2715445e+00 -4.8919684e-01 5.2070701e-01]]] [[[ 2.0807614e+00 -1.3387731e+00 3.8801582e+00 -5.1287746e-01 -1.2484065e+00] [-5.0298768e-01 5.7225579e-01 -3.0049570e+00 1.3006978e+00 6.5960479e-01] [ 1.5698278e-01 3.7089160e-01 -1.7752974e+00 1.1770976e+00 1.5144492e+00] [ 3.4139440e+00 -2.8498793e+00 -2.1229603e+00 2.1916239e+00 2.9909822e-01] [ 9.9017882e-01 1.2837174e+00 7.4018312e-01 -3.6593750e-01 -1.3391112e+00]] [[-1.3616402e+00 5.0471514e-01 -2.6678016e+00 2.3456810e+00 -1.0258532e+00] [ 3.8588569e+00 -8.2310039e-01 2.5410507e+00 -1.7477505e+00 8.3949673e-01] [-1.8862736e-01 -1.6436677e+00 2.7916616e-01 -6.4024514e-01 8.0436319e-01] [-3.3016381e+00 1.5215137e+00 1.8756042e+00 2.8997309e+00 -2.2470152e+00] [-1.0343839e+00 -1.9732263e+00 2.7336717e-01 9.8324555e-01 2.3166864e+00]] [[ 1.8042693e+00 -4.0082045e+00 -2.3393660e+00 1.6136724e-01 9.6964203e-02] [-3.1003323e+00 -1.2029926e+00 1.6019684e+00 -2.4831507e-01 -3.0998061e+00] [ 1.6072705e+00 4.3354836e+00 1.8905721e+00 1.6131446e-01 -1.1283268e+00] [ 2.5375695e+00 1.6068670e+00 1.1604203e+00 -1.7425506e+00 2.6618621e-01] [ 8.8915735e-01 1.5701482e+00 8.2974994e-01 1.1434216e+00 8.4256470e-01]] [[ 1.1452307e+00 -1.7187186e+00 -2.8219392e+00 1.0850017e+00 3.0330069e+00] [ 4.0510044e-01 1.1266038e+00 2.6932454e+00 -1.7208321e-01 4.9812344e-01] [-1.5927333e+00 3.3661234e+00 2.0979875e-01 2.2989056e+00 1.1205587e+00] [-5.1725185e-01 -2.3978500e+00 -3.1228871e+00 -3.5818901e+00 2.1274445e+00] [-3.3621147e-01 5.5976498e-01 3.9674377e-01 1.2950716e+00 2.3985758e+00]] [[-2.9109755e+00 -7.8768879e-01 8.4175438e-01 1.5045086e+00 -3.5142229e+00] [-2.6968377e+00 -2.6320326e+00 -3.2970828e-01 -1.3439660e+00 -1.5267813e+00] [ 2.6310718e-01 -2.1339006e+00 -1.4453711e-01 2.2129240e+00 -1.7039086e+00] [-8.8326961e-01 -4.5344296e+00 8.5100621e-01 2.1256430e+00 -2.8499041e+00] [ 1.9296662e+00 -2.4713268e+00 -8.2051522e-01 -1.2535825e+00 -1.4970303e+00]]] [[[ 3.6090133e+00 -2.0923452e+00 2.5579712e+00 1.9413208e+00 -1.8385129e+00] [-1.9415892e+00 -8.8255870e-01 -4.8844981e+00 2.4401867e+00 -7.6491547e-01] [-2.5068612e+00 -5.3961670e-01 -3.3940570e+00 1.1659664e+00 6.5219730e-01] [ 2.0609138e+00 -4.5209721e-01 -1.9957299e+00 -5.3619957e+00 -1.3408814e-01] [ 4.4355025e+00 3.8044938e-01 -2.7872400e+00 -1.5933961e+00 1.0298058e+00]] [[-9.4604649e-02 -2.3365808e+00 -2.5796764e+00 -2.6323559e+00 5.1050156e-01] [ 4.8775855e-02 -1.2616212e+00 1.4963235e+00 -8.8049811e-01 -3.5302046e-01] [-1.3486575e-01 -3.6845992e+00 3.7039638e-01 -1.4370999e+00 8.7430614e-01] [-3.1307592e+00 2.9673612e+00 1.9131117e+00 -8.9452493e-01 -1.9785912e+00] [-1.0549151e-01 -1.7908063e+00 1.5017073e+00 -9.9975747e-01 9.4083899e-01]] [[-3.0382299e-01 -2.2180178e+00 -1.9373053e+00 -3.7114301e-01 -2.7046118e+00] [-3.5206583e+00 3.3578124e+00 4.2405119e+00 -9.2633700e-01 7.7251363e-01] [-2.2377446e+00 3.7848272e+00 1.5659324e+00 7.0771247e-01 -9.1418588e-01] [ 7.0822768e-02 8.6304003e-01 5.9513456e-01 7.0493877e-01 -1.5636305e+00] [ 6.7448697e+00 -1.0012969e+00 2.0474465e+00 2.4760165e+00 1.8495336e+00]] [[ 1.5634857e+00 -2.8299880e+00 1.4970605e+00 -9.8393220e-01 2.2411423e+00] [ 1.6766067e+00 1.0745523e+00 1.1858357e+00 5.9343678e-01 -6.7855930e-01] [-8.4720975e-01 1.3414435e+00 1.2008967e+00 1.4254763e+00 9.4615901e-01] [ 1.3469023e+00 8.6196357e-01 -1.3291410e-01 -4.7211242e+00 -1.6566180e-01] [ 9.8128152e-01 -1.8682624e+00 -5.2974761e-01 -3.5275695e+00 1.2897007e+00]] [[-5.0322795e-01 -2.8811262e+00 -1.8198717e-01 4.3825359e+00 1.5293379e+00] [ 1.1752242e-01 1.1218099e-01 3.2022686e+00 -7.3072934e-01 8.6098653e-01] [ 1.7291746e+00 1.5004992e-01 3.3828519e-02 -7.4820572e-01 3.1439355e-01] [ 8.1930029e-01 -3.5905359e+00 1.4172468e+00 3.8353353e+00 1.0983165e-01] [ 2.1730845e+00 4.6327606e-01 1.0827856e+00 -3.1881871e+00 -1.4847460e+00]]]]]; ov_res: [[[[[-1.2197301e+00 7.3614436e-01 -1.1627654e+00 -2.0217607e+00 1.4740433e+00] [-1.9565518e+00 2.5090394e+00 2.4390287e+00 -2.7703610e-01 2.4950857e+00] [ 4.5492787e+00 1.3031622e+00 5.8430928e-01 8.7441111e-01 -7.2614676e-01] [-1.4465666e+00 -3.2253990e-01 5.1928699e-01 4.4762740e+00 -9.2693514e-01] [-9.4391346e-01 3.2354219e+00 -7.1179670e-01 -9.4970459e-01 -1.9983964e+00]] [[-5.8970523e-01 3.1969359e+00 1.9924344e+00 4.2681427e+00 -1.8711773e+00] [-9.5675516e-01 4.3528670e-01 2.8804049e-01 -1.0073961e+00 -1.5361938e+00] [-9.5478076e-01 1.0781238e+00 1.6372104e+00 1.4257023e+00 -1.1773226e+00] [-1.5208412e+00 -1.2256769e+00 5.6878503e-02 1.6862720e+00 -2.5432234e+00] [ 9.4945967e-02 1.8211225e+00 -9.5457911e-01 -7.0367587e-01 -4.8634571e-01]] [[-6.3959008e-01 -1.2124979e+00 -3.1620154e+00 3.1968284e+00 4.5226259e+00] [ 1.0068481e-01 -2.9240251e+00 -1.5256828e+00 2.5950134e-01 -2.9532764e+00] [ 3.6713901e+00 4.5348051e-01 -1.5888492e+00 1.1155676e-02 -1.7816112e+00] [-6.7505056e-01 3.2357210e-01 -4.7247190e+00 -4.9490333e+00 1.9336685e+00] [-5.2211819e+00 4.9057007e+00 1.7496370e+00 -1.7942526e+00 -7.2553676e-01]] [[-2.0749636e+00 3.6135674e-01 -3.5826485e+00 1.8725176e+00 3.0053964e+00] [ 1.7241611e+00 -2.8442460e-01 -5.5551195e-01 -2.5644689e+00 -1.2022921e+00] [-2.2751970e+00 1.8716027e+00 -4.2905688e-01 -2.7390428e+00 8.4505880e-01] [-1.3791573e+00 -1.9661391e+00 -4.8259254e+00 -4.0296444e-01 1.5607963e+00] [ 1.5230241e+00 -2.1594943e-01 -8.2172877e-01 1.7040867e+00 1.7762412e+00]] [[-6.3508880e-01 9.6946490e-01 -1.0944467e+00 -3.6078982e+00 -6.3861203e-01] [-1.7424241e-01 -1.3208995e+00 1.0282730e+00 4.4257692e-03 7.9946417e-01] [ 6.0687631e-02 1.3976704e-01 -6.2467885e-01 4.2958817e+00 1.2903945e+00] [ 7.9391092e-02 9.2721224e-01 1.2979960e+00 -2.2263707e-01 -3.2211683e+00] [-6.2778389e-01 -2.1579876e+00 -2.2715445e+00 -4.8919684e-01 5.2070701e-01]]] [[[ 2.0807614e+00 -1.3387731e+00 3.8801582e+00 -5.1287746e-01 -1.2484065e+00] [-5.0298768e-01 5.7225579e-01 -3.0049570e+00 1.3006978e+00 6.5960479e-01] [ 1.5698278e-01 3.7089160e-01 -1.7752974e+00 1.1770976e+00 1.5144492e+00] [ 3.4139440e+00 -2.8498793e+00 -2.1229603e+00 2.1916239e+00 2.9909822e-01] [ 9.9017882e-01 1.2837174e+00 7.4018312e-01 -3.6593750e-01 -1.3391112e+00]] [[-1.3616402e+00 5.0471514e-01 -2.6678016e+00 2.3456810e+00 -1.0258532e+00] [ 3.8588569e+00 -8.2310039e-01 2.5410507e+00 -1.7477505e+00 8.3949673e-01] [-1.8862736e-01 -1.6436677e+00 2.7916616e-01 -6.4024514e-01 8.0436319e-01] [-3.3016381e+00 1.5215137e+00 1.8756042e+00 2.8997309e+00 -2.2470152e+00] [-1.0343839e+00 -1.9732263e+00 2.7336717e-01 9.8324555e-01 2.3166864e+00]] [[ 1.8042693e+00 -4.0082045e+00 -2.3393660e+00 1.6136724e-01 9.6964203e-02] [-3.1003323e+00 -1.2029926e+00 1.6019684e+00 -2.4831507e-01 -3.0998061e+00] [ 1.6072705e+00 4.3354836e+00 1.8905721e+00 1.6131446e-01 -1.1283268e+00] [ 2.5375695e+00 1.6068670e+00 1.1604203e+00 -1.7425506e+00 2.6618621e-01] [ 8.8915735e-01 1.5701482e+00 8.2974994e-01 1.1434216e+00 8.4256470e-01]] [[ 1.1452307e+00 -1.7187186e+00 -2.8219392e+00 1.0850017e+00 3.0330069e+00] [ 4.0510044e-01 1.1266038e+00 2.6932454e+00 -1.7208321e-01 4.9812344e-01] [-1.5927333e+00 3.3661234e+00 2.0979875e-01 2.2989056e+00 1.1205587e+00] [-5.1725185e-01 -2.3978500e+00 -3.1228871e+00 -3.5818901e+00 2.1274445e+00] [-3.3621147e-01 5.5976498e-01 3.9674377e-01 1.2950716e+00 2.3985758e+00]] [[-2.9109755e+00 -7.8768879e-01 8.4175438e-01 1.5045086e+00 -3.5142229e+00] [-2.6968377e+00 -2.6320326e+00 -3.2970828e-01 -1.3439660e+00 -1.5267813e+00] [ 2.6310718e-01 -2.1339006e+00 -1.4453711e-01 2.2129240e+00 -1.7039086e+00] [-8.8326961e-01 -4.5344296e+00 8.5100621e-01 2.1256430e+00 -2.8499041e+00] [ 1.9296662e+00 -2.4713268e+00 -8.2051522e-01 -1.2535825e+00 -1.4970303e+00]]] [[[ 3.6090133e+00 -2.0923452e+00 2.5579712e+00 1.9413208e+00 -1.8385129e+00] [-1.9415892e+00 -8.8255870e-01 -4.8844981e+00 2.4401867e+00 -7.6491547e-01] [-2.5068612e+00 -5.3961670e-01 -3.3940570e+00 1.1659664e+00 6.5219730e-01] [ 2.0609138e+00 -4.5209721e-01 -1.9957299e+00 -5.3619957e+00 -1.3408814e-01] [ 4.4355025e+00 3.8044938e-01 -2.7872400e+00 -1.5933961e+00 1.0298058e+00]] [[-9.4604649e-02 -2.3365808e+00 -2.5796764e+00 -2.6323559e+00 5.1050156e-01] [ 4.8775855e-02 -1.2616212e+00 1.4963235e+00 -8.8049811e-01 -3.5302046e-01] [-1.3486575e-01 -3.6845992e+00 3.7039638e-01 -1.4370999e+00 8.7430614e-01] [-3.1307592e+00 2.9673612e+00 1.9131117e+00 -8.9452493e-01 -1.9785912e+00] [-1.0549151e-01 -1.7908063e+00 1.5017073e+00 -9.9975747e-01 9.4083899e-01]] [[-3.0382299e-01 -2.2180178e+00 -1.9373053e+00 -3.7114301e-01 -2.7046118e+00] [-3.5206583e+00 3.3578124e+00 4.2405119e+00 -9.2633700e-01 7.7251363e-01] [-2.2377446e+00 3.7848272e+00 1.5659324e+00 7.0771247e-01 -9.1418588e-01] [ 7.0822768e-02 8.6304003e-01 5.9513456e-01 7.0493877e-01 -1.5636305e+00] [ 6.7448697e+00 -1.0012969e+00 2.0474465e+00 2.4760165e+00 1.8495336e+00]] [[ 1.5634857e+00 -2.8299880e+00 1.4970605e+00 -9.8393220e-01 2.2411423e+00] [ 1.6766067e+00 1.0745523e+00 1.1858357e+00 5.9343678e-01 -6.7855930e-01] [-8.4720975e-01 1.3414435e+00 1.2008967e+00 1.4254763e+00 9.4615901e-01] [ 1.3469023e+00 8.6196357e-01 -1.3291410e-01 -4.7211242e+00 -1.6566180e-01] [ 9.8128152e-01 -1.8682624e+00 -5.2974761e-01 -3.5275695e+00 1.2897007e+00]] [[-5.0322795e-01 -2.8811262e+00 -1.8198717e-01 4.3825359e+00 1.5293379e+00] [ 1.1752242e-01 1.1218099e-01 3.2022686e+00 -7.3072934e-01 8.6098653e-01] [ 1.7291746e+00 1.5004992e-01 3.3828519e-02 -7.4820572e-01 3.1439355e-01] [ 8.1930029e-01 -3.5905359e+00 1.4172468e+00 3.8353353e+00 1.0983165e-01] [ 2.1730845e+00 4.6327606e-01 1.0827856e+00 -3.1881871e+00 -1.4847460e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_934.aten__convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.1836 (2,1,1,.,.) = -0.5969 (3,1,1,.,.) = -0.8392 (1,2,1,.,.) = 0.01 * -5.0913 (2,2,1,.,.) = 0.9765 (3,2,1,.,.) = 0.4679 (1,3,1,.,.) = 0.3806 (2,3,1,.,.) = -2.1222 (3,3,1,.,.) = 0.1660 [ CPUFloatType{3,3,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups, %self.transposed, %self.transposed, %self.transposed, %self.transposed) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[ 0.00000000e+00 6.07677579e-01 -4.98035461e-01 ... 3.83503251e-02 4.19921316e-02 0.00000000e+00] [ 0.00000000e+00 -3.21919113e-01 -2.39517033e-01 ... 1.37461758e+00 -1.92281738e-01 0.00000000e+00] [ 0.00000000e+00 1.00990796e+00 -3.45414072e-01 ... -4.69799116e-02 -1.12949125e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.56823611e-01 -7.22501457e-01 ... 4.91833478e-01 5.88528514e-01 0.00000000e+00] [ 0.00000000e+00 2.45435461e-02 -2.31548503e-01 ... -4.07919616e-01 -3.51473093e-01 0.00000000e+00] [ 0.00000000e+00 -7.62419105e-02 9.23397064e-01 ... 6.62008449e-02 -5.35099357e-02 0.00000000e+00]] [[ 0.00000000e+00 7.75723606e-02 9.21344087e-02 ... -8.93764913e-01 1.11455284e-01 0.00000000e+00] [ 0.00000000e+00 4.19285595e-02 -1.94640040e-01 ... -5.75754881e-01 -8.14692080e-01 0.00000000e+00] [ 0.00000000e+00 2.38989294e-01 1.35515198e-01 ... 3.92767370e-01 -8.95074531e-02 0.00000000e+00] ... [ 0.00000000e+00 1.24334760e-01 1.96556777e-01 ... 7.86401518e-03 2.15838343e-01 0.00000000e+00] [ 0.00000000e+00 -1.53252289e-01 -4.93834704e-01 ... -6.56956155e-03 -2.25910872e-01 0.00000000e+00] [ 0.00000000e+00 1.38595715e-01 7.10183501e-01 ... 8.02283436e-02 -3.49665970e-01 0.00000000e+00]] [[ 0.00000000e+00 -3.03798225e-02 -5.08500874e-01 ... 5.13120234e-01 -7.56996155e-01 0.00000000e+00] [ 0.00000000e+00 2.07596943e-01 -3.03137571e-01 ... -4.55567628e-01 3.75746161e-01 0.00000000e+00] [ 0.00000000e+00 6.29108906e-01 8.32929313e-02 ... -3.80754352e-01 -1.92620248e-01 0.00000000e+00] ... [ 0.00000000e+00 3.05684328e-01 -1.21845375e-03 ... 1.87315822e-01 3.23275000e-01 0.00000000e+00] [ 0.00000000e+00 2.66783703e-02 -3.72568190e-01 ... -1.90498769e-01 3.42266518e-03 0.00000000e+00] [ 0.00000000e+00 5.45189023e-01 -4.63719405e-02 ... -1.65366661e-02 -1.14213571e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.66506656e-02 4.74887379e-02 ... 2.79833794e-01 4.88813877e-01 0.00000000e+00] [ 0.00000000e+00 6.37545049e-01 -2.42971063e-01 ... 3.29489082e-01 4.99786250e-02 0.00000000e+00] [ 0.00000000e+00 -1.55193031e-01 3.36663097e-01 ... 6.75642848e-01 -4.44658279e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.99927247e-01 -4.80919719e-01 ... -7.53770649e-01 -4.35909182e-01 0.00000000e+00] [ 0.00000000e+00 -5.53571641e-01 3.24663311e-01 ... 8.63920674e-02 1.77414734e-02 0.00000000e+00] [ 0.00000000e+00 6.54108524e-02 1.38112098e-01 ... 2.35701650e-01 8.51091504e-01 0.00000000e+00]] [[ 0.00000000e+00 2.69985765e-01 -1.59955025e-01 ... -1.39005827e-02 -4.94284123e-01 0.00000000e+00] [ 0.00000000e+00 -4.65972543e-01 1.92538559e-01 ... -3.01856279e-01 -7.09425032e-01 0.00000000e+00] [ 0.00000000e+00 -8.57834578e-01 -1.85156241e-01 ... 7.87725627e-01 2.71536410e-01 0.00000000e+00] ... [ 0.00000000e+00 -7.38868773e-01 -8.58589530e-01 ... -3.99793267e-01 2.29476929e-01 0.00000000e+00] [ 0.00000000e+00 -4.46592957e-01 -2.84167707e-01 ... 6.18836991e-02 1.78463966e-01 0.00000000e+00] [ 0.00000000e+00 -2.04761654e-01 -1.70442954e-01 ... -1.38044963e-02 -5.75175583e-02 0.00000000e+00]]] [[[ 0.00000000e+00 -4.98208380e+00 2.14026737e+00 ... 3.49255912e-02 -2.05888525e-02 0.00000000e+00] [ 0.00000000e+00 1.63994765e+00 1.86248100e+00 ... -8.41103840e+00 2.08656621e+00 0.00000000e+00] [ 0.00000000e+00 -4.43680811e+00 9.65239823e-01 ... 5.37520409e-01 9.61926877e-01 0.00000000e+00] ... [ 0.00000000e+00 2.84582806e+00 4.98960304e+00 ... -3.71545839e+00 -2.86082911e+00 0.00000000e+00] [ 0.00000000e+00 -2.32694462e-01 8.66732478e-01 ... 2.35156822e+00 1.80212045e+00 0.00000000e+00] [ 0.00000000e+00 8.04846823e-01 -4.31953955e+00 ... -2.13937545e+00 3.96107137e-02 0.00000000e+00]] [[ 0.00000000e+00 4.28442895e-01 -1.41205347e+00 ... 5.44440842e+00 -1.61077309e+00 0.00000000e+00] [ 0.00000000e+00 5.36430717e-01 8.60367239e-01 ... 3.02033496e+00 5.12853479e+00 0.00000000e+00] [ 0.00000000e+00 -6.49230719e-01 -6.50146008e-01 ... -2.02252603e+00 1.73129153e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.67697394e-01 -4.64645386e-01 ... -1.79456139e+00 -3.19464564e+00 0.00000000e+00] [ 0.00000000e+00 1.14238667e+00 2.06563401e+00 ... 1.70292303e-01 1.21140814e+00 0.00000000e+00] [ 0.00000000e+00 -7.99253523e-01 -3.49179912e+00 ... 1.34143174e+00 1.81312370e+00 0.00000000e+00]] [[ 0.00000000e+00 6.20456755e-01 2.54794645e+00 ... -2.21578121e+00 2.69074345e+00 0.00000000e+00] [ 0.00000000e+00 -2.22029614e+00 2.39758611e+00 ... 1.40167236e+00 -2.23261547e+00 0.00000000e+00] [ 0.00000000e+00 -3.92777729e+00 8.17202151e-01 ... 2.02360702e+00 1.70688081e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.22589588e+00 2.30524633e-02 ... -4.65005338e-01 -1.44664502e+00 0.00000000e+00] [ 0.00000000e+00 4.81703550e-01 2.78980803e+00 ... 1.09857202e+00 -1.23366439e+00 0.00000000e+00] [ 0.00000000e+00 -2.02829266e+00 4.65658218e-01 ... 8.62107351e-02 -1.04005858e-01 0.00000000e+00]] [[ 0.00000000e+00 -7.51359701e-01 -7.13073134e-01 ... -1.24163342e+00 -2.25682402e+00 0.00000000e+00] [ 0.00000000e+00 -3.87960315e+00 1.67196548e+00 ... -3.38709641e+00 -3.86505574e-02 0.00000000e+00] [ 0.00000000e+00 1.13166165e+00 -2.76702332e+00 ... -4.97874784e+00 2.91040993e+00 0.00000000e+00] ... [ 0.00000000e+00 3.42832112e+00 3.01345205e+00 ... 3.95759988e+00 2.51492405e+00 0.00000000e+00] [ 0.00000000e+00 2.91961575e+00 1.08550571e-01 ... 1.70733005e-01 -9.06610906e-01 0.00000000e+00] [ 0.00000000e+00 -8.48357260e-01 -9.81083095e-01 ... -8.84312689e-01 -5.23479939e+00 0.00000000e+00]] [[ 0.00000000e+00 -5.97009242e-01 1.37020302e+00 ... -7.30704665e-01 2.69591093e+00 0.00000000e+00] [ 0.00000000e+00 2.85501385e+00 -5.13875306e-01 ... 1.52854013e+00 3.44424033e+00 0.00000000e+00] [ 0.00000000e+00 5.79638815e+00 1.05426753e+00 ... -4.02059317e+00 -1.53603506e+00 0.00000000e+00] ... [ 0.00000000e+00 5.23142147e+00 4.12926912e+00 ... 1.31019366e+00 2.42213801e-01 0.00000000e+00] [ 0.00000000e+00 1.64612556e+00 1.33373666e+00 ... -2.62750477e-01 -3.12225163e-01 0.00000000e+00] [ 0.00000000e+00 1.56216574e+00 6.68234766e-01 ... -4.20698039e-02 1.72155225e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -1.01800382e+00 -4.79209721e-01 ... -1.58318627e+00 6.74981356e-01 0.00000000e+00] [ 0.00000000e+00 -2.69403398e-01 4.59899724e-01 ... 5.44476688e-01 1.02974129e+00 0.00000000e+00] [ 0.00000000e+00 -1.86966196e-01 -1.16764829e-02 ... 7.33130932e-01 4.06396270e-01 0.00000000e+00] ... [ 0.00000000e+00 -3.25806737e-01 9.91551638e-01 ... -4.46260691e-01 -5.78251302e-01 0.00000000e+00] [ 0.00000000e+00 1.39607036e+00 -7.95527324e-02 ... 1.60856128e+00 1.65135920e+00 0.00000000e+00] [ 0.00000000e+00 -2.90804714e-01 -2.35478067e+00 ... 1.48085976e+00 4.97422367e-01 0.00000000e+00]] [[ 0.00000000e+00 -2.15728104e-01 3.80885959e-01 ... 4.67560560e-01 1.79220349e-01 0.00000000e+00] [ 0.00000000e+00 1.16152108e+00 -1.28677332e+00 ... 4.51930225e-01 -7.10751116e-01 0.00000000e+00] [ 0.00000000e+00 -8.24979365e-01 3.50046813e-01 ... 7.80233145e-01 1.66048849e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.13519466e+00 -5.97514538e-03 ... -1.04983997e+00 -2.21707511e+00 0.00000000e+00] [ 0.00000000e+00 1.10454857e+00 4.29035276e-01 ... -4.04764861e-01 -1.14370859e+00 0.00000000e+00] [ 0.00000000e+00 2.34419727e+00 5.18599391e-01 ... -1.14357817e+00 -9.86464098e-02 0.00000000e+00]] [[ 0.00000000e+00 -2.61587501e-01 -1.57325238e-01 ... -1.43281984e+00 -1.01526177e+00 0.00000000e+00] [ 0.00000000e+00 6.00420952e-01 -1.56150341e-01 ... -1.56557524e+00 -1.12638682e-01 0.00000000e+00] [ 0.00000000e+00 -6.28199577e-01 -1.21024239e+00 ... 1.63952184e+00 6.74372911e-01 0.00000000e+00] ... [ 0.00000000e+00 6.67355582e-03 -4.34632421e-01 ... -2.30999589e+00 -1.01827629e-01 0.00000000e+00] [ 0.00000000e+00 -1.57200113e-01 8.65302026e-01 ... 2.61526918e+00 4.62141559e-02 0.00000000e+00] [ 0.00000000e+00 -8.25352430e-01 -7.83067405e-01 ... 2.13156557e+00 -1.70434558e+00 0.00000000e+00]] [[ 0.00000000e+00 1.19568360e+00 6.20738685e-01 ... 1.34481168e+00 8.85111392e-01 0.00000000e+00] [ 0.00000000e+00 -4.21271682e-01 1.92694890e+00 ... -1.46536618e-01 1.75511575e+00 0.00000000e+00] [ 0.00000000e+00 -3.00132483e-01 -6.31322682e-01 ... 4.50261347e-02 1.36098936e-01 0.00000000e+00] ... [ 0.00000000e+00 1.69670951e+00 -1.07949130e-01 ... 6.29314125e-01 1.55830193e+00 0.00000000e+00] [ 0.00000000e+00 -1.87566829e+00 1.36299956e+00 ... -1.76031148e+00 4.28286612e-01 0.00000000e+00] [ 0.00000000e+00 8.89738500e-01 -3.16144764e-01 ... -5.01201570e-01 -1.76670086e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.40441358e-01 1.35257208e+00 ... -1.09723642e-01 4.95778263e-01 0.00000000e+00] [ 0.00000000e+00 -1.19346702e+00 4.71004277e-01 ... 9.58093643e-01 -1.09255493e+00 0.00000000e+00] [ 0.00000000e+00 -4.36573327e-01 -3.19605410e-01 ... -8.42588663e-01 -1.82059869e-01 0.00000000e+00] ... [ 0.00000000e+00 -9.19005275e-02 1.61083579e+00 ... 1.22603782e-01 2.24125385e+00 0.00000000e+00] [ 0.00000000e+00 4.03567314e-01 1.60283089e+00 ... -5.36880255e-01 1.33336020e+00 0.00000000e+00] [ 0.00000000e+00 1.57063043e+00 1.08221419e-01 ... 2.66826093e-01 -1.11045790e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 6.07677579e-01 -4.98035461e-01 ... 3.83503251e-02 4.19921316e-02 0.00000000e+00] [ 0.00000000e+00 -3.21919113e-01 -2.39517033e-01 ... 1.37461758e+00 -1.92281738e-01 0.00000000e+00] [ 0.00000000e+00 1.00990796e+00 -3.45414072e-01 ... -4.69799116e-02 -1.12949125e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.56823611e-01 -7.22501457e-01 ... 4.91833478e-01 5.88528514e-01 0.00000000e+00] [ 0.00000000e+00 2.45435461e-02 -2.31548503e-01 ... -4.07919616e-01 -3.51473093e-01 0.00000000e+00] [ 0.00000000e+00 -7.62419105e-02 9.23397064e-01 ... 6.62008449e-02 -5.35099357e-02 0.00000000e+00]] [[ 0.00000000e+00 7.75723606e-02 9.21344087e-02 ... -8.93764913e-01 1.11455284e-01 0.00000000e+00] [ 0.00000000e+00 4.19285595e-02 -1.94640040e-01 ... -5.75754881e-01 -8.14692080e-01 0.00000000e+00] [ 0.00000000e+00 2.38989294e-01 1.35515198e-01 ... 3.92767370e-01 -8.95074531e-02 0.00000000e+00] ... [ 0.00000000e+00 1.24334760e-01 1.96556777e-01 ... 7.86401518e-03 2.15838343e-01 0.00000000e+00] [ 0.00000000e+00 -1.53252289e-01 -4.93834704e-01 ... -6.56956155e-03 -2.25910872e-01 0.00000000e+00] [ 0.00000000e+00 1.38595715e-01 7.10183501e-01 ... 8.02283436e-02 -3.49665970e-01 0.00000000e+00]] [[ 0.00000000e+00 -3.03798225e-02 -5.08500874e-01 ... 5.13120234e-01 -7.56996155e-01 0.00000000e+00] [ 0.00000000e+00 2.07596943e-01 -3.03137571e-01 ... -4.55567628e-01 3.75746161e-01 0.00000000e+00] [ 0.00000000e+00 6.29108906e-01 8.32929313e-02 ... -3.80754352e-01 -1.92620248e-01 0.00000000e+00] ... [ 0.00000000e+00 3.05684328e-01 -1.21845375e-03 ... 1.87315822e-01 3.23275000e-01 0.00000000e+00] [ 0.00000000e+00 2.66783703e-02 -3.72568190e-01 ... -1.90498769e-01 3.42266518e-03 0.00000000e+00] [ 0.00000000e+00 5.45189023e-01 -4.63719405e-02 ... -1.65366661e-02 -1.14213571e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.66506656e-02 4.74887379e-02 ... 2.79833794e-01 4.88813877e-01 0.00000000e+00] [ 0.00000000e+00 6.37545049e-01 -2.42971063e-01 ... 3.29489082e-01 4.99786250e-02 0.00000000e+00] [ 0.00000000e+00 -1.55193031e-01 3.36663097e-01 ... 6.75642848e-01 -4.44658279e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.99927247e-01 -4.80919719e-01 ... -7.53770649e-01 -4.35909182e-01 0.00000000e+00] [ 0.00000000e+00 -5.53571641e-01 3.24663311e-01 ... 8.63920674e-02 1.77414734e-02 0.00000000e+00] [ 0.00000000e+00 6.54108524e-02 1.38112098e-01 ... 2.35701650e-01 8.51091504e-01 0.00000000e+00]] [[ 0.00000000e+00 2.69985765e-01 -1.59955025e-01 ... -1.39005827e-02 -4.94284123e-01 0.00000000e+00] [ 0.00000000e+00 -4.65972543e-01 1.92538559e-01 ... -3.01856279e-01 -7.09425032e-01 0.00000000e+00] [ 0.00000000e+00 -8.57834578e-01 -1.85156241e-01 ... 7.87725627e-01 2.71536410e-01 0.00000000e+00] ... [ 0.00000000e+00 -7.38868773e-01 -8.58589530e-01 ... -3.99793267e-01 2.29476929e-01 0.00000000e+00] [ 0.00000000e+00 -4.46592957e-01 -2.84167707e-01 ... 6.18836991e-02 1.78463966e-01 0.00000000e+00] [ 0.00000000e+00 -2.04761654e-01 -1.70442954e-01 ... -1.38044963e-02 -5.75175583e-02 0.00000000e+00]]] [[[ 0.00000000e+00 -4.98208380e+00 2.14026737e+00 ... 3.49255912e-02 -2.05888525e-02 0.00000000e+00] [ 0.00000000e+00 1.63994765e+00 1.86248100e+00 ... -8.41103840e+00 2.08656621e+00 0.00000000e+00] [ 0.00000000e+00 -4.43680811e+00 9.65239823e-01 ... 5.37520409e-01 9.61926877e-01 0.00000000e+00] ... [ 0.00000000e+00 2.84582806e+00 4.98960304e+00 ... -3.71545839e+00 -2.86082911e+00 0.00000000e+00] [ 0.00000000e+00 -2.32694462e-01 8.66732478e-01 ... 2.35156822e+00 1.80212045e+00 0.00000000e+00] [ 0.00000000e+00 8.04846823e-01 -4.31953955e+00 ... -2.13937545e+00 3.96107137e-02 0.00000000e+00]] [[ 0.00000000e+00 4.28442895e-01 -1.41205347e+00 ... 5.44440842e+00 -1.61077309e+00 0.00000000e+00] [ 0.00000000e+00 5.36430717e-01 8.60367239e-01 ... 3.02033496e+00 5.12853479e+00 0.00000000e+00] [ 0.00000000e+00 -6.49230719e-01 -6.50146008e-01 ... -2.02252603e+00 1.73129153e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.67697394e-01 -4.64645386e-01 ... -1.79456139e+00 -3.19464564e+00 0.00000000e+00] [ 0.00000000e+00 1.14238667e+00 2.06563401e+00 ... 1.70292303e-01 1.21140814e+00 0.00000000e+00] [ 0.00000000e+00 -7.99253523e-01 -3.49179912e+00 ... 1.34143174e+00 1.81312370e+00 0.00000000e+00]] [[ 0.00000000e+00 6.20456755e-01 2.54794645e+00 ... -2.21578121e+00 2.69074345e+00 0.00000000e+00] [ 0.00000000e+00 -2.22029614e+00 2.39758611e+00 ... 1.40167236e+00 -2.23261547e+00 0.00000000e+00] [ 0.00000000e+00 -3.92777729e+00 8.17202151e-01 ... 2.02360702e+00 1.70688081e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.22589588e+00 2.30524633e-02 ... -4.65005338e-01 -1.44664502e+00 0.00000000e+00] [ 0.00000000e+00 4.81703550e-01 2.78980803e+00 ... 1.09857202e+00 -1.23366439e+00 0.00000000e+00] [ 0.00000000e+00 -2.02829266e+00 4.65658218e-01 ... 8.62107351e-02 -1.04005858e-01 0.00000000e+00]] [[ 0.00000000e+00 -7.51359701e-01 -7.13073134e-01 ... -1.24163342e+00 -2.25682402e+00 0.00000000e+00] [ 0.00000000e+00 -3.87960315e+00 1.67196548e+00 ... -3.38709641e+00 -3.86505574e-02 0.00000000e+00] [ 0.00000000e+00 1.13166165e+00 -2.76702332e+00 ... -4.97874784e+00 2.91040993e+00 0.00000000e+00] ... [ 0.00000000e+00 3.42832112e+00 3.01345205e+00 ... 3.95759988e+00 2.51492405e+00 0.00000000e+00] [ 0.00000000e+00 2.91961575e+00 1.08550571e-01 ... 1.70733005e-01 -9.06610906e-01 0.00000000e+00] [ 0.00000000e+00 -8.48357260e-01 -9.81083095e-01 ... -8.84312689e-01 -5.23479939e+00 0.00000000e+00]] [[ 0.00000000e+00 -5.97009242e-01 1.37020302e+00 ... -7.30704665e-01 2.69591093e+00 0.00000000e+00] [ 0.00000000e+00 2.85501385e+00 -5.13875306e-01 ... 1.52854013e+00 3.44424033e+00 0.00000000e+00] [ 0.00000000e+00 5.79638815e+00 1.05426753e+00 ... -4.02059317e+00 -1.53603506e+00 0.00000000e+00] ... [ 0.00000000e+00 5.23142147e+00 4.12926912e+00 ... 1.31019366e+00 2.42213801e-01 0.00000000e+00] [ 0.00000000e+00 1.64612556e+00 1.33373666e+00 ... -2.62750477e-01 -3.12225163e-01 0.00000000e+00] [ 0.00000000e+00 1.56216574e+00 6.68234766e-01 ... -4.20698039e-02 1.72155225e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -1.01800382e+00 -4.79209721e-01 ... -1.58318627e+00 6.74981356e-01 0.00000000e+00] [ 0.00000000e+00 -2.69403398e-01 4.59899724e-01 ... 5.44476688e-01 1.02974129e+00 0.00000000e+00] [ 0.00000000e+00 -1.86966196e-01 -1.16764829e-02 ... 7.33130932e-01 4.06396270e-01 0.00000000e+00] ... [ 0.00000000e+00 -3.25806737e-01 9.91551638e-01 ... -4.46260691e-01 -5.78251302e-01 0.00000000e+00] [ 0.00000000e+00 1.39607036e+00 -7.95527324e-02 ... 1.60856128e+00 1.65135920e+00 0.00000000e+00] [ 0.00000000e+00 -2.90804714e-01 -2.35478067e+00 ... 1.48085976e+00 4.97422367e-01 0.00000000e+00]] [[ 0.00000000e+00 -2.15728104e-01 3.80885959e-01 ... 4.67560560e-01 1.79220349e-01 0.00000000e+00] [ 0.00000000e+00 1.16152108e+00 -1.28677332e+00 ... 4.51930225e-01 -7.10751116e-01 0.00000000e+00] [ 0.00000000e+00 -8.24979365e-01 3.50046813e-01 ... 7.80233145e-01 1.66048849e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.13519466e+00 -5.97514538e-03 ... -1.04983997e+00 -2.21707511e+00 0.00000000e+00] [ 0.00000000e+00 1.10454857e+00 4.29035276e-01 ... -4.04764861e-01 -1.14370859e+00 0.00000000e+00] [ 0.00000000e+00 2.34419727e+00 5.18599391e-01 ... -1.14357817e+00 -9.86464098e-02 0.00000000e+00]] [[ 0.00000000e+00 -2.61587501e-01 -1.57325238e-01 ... -1.43281984e+00 -1.01526177e+00 0.00000000e+00] [ 0.00000000e+00 6.00420952e-01 -1.56150341e-01 ... -1.56557524e+00 -1.12638682e-01 0.00000000e+00] [ 0.00000000e+00 -6.28199577e-01 -1.21024239e+00 ... 1.63952184e+00 6.74372911e-01 0.00000000e+00] ... [ 0.00000000e+00 6.67355582e-03 -4.34632421e-01 ... -2.30999589e+00 -1.01827629e-01 0.00000000e+00] [ 0.00000000e+00 -1.57200113e-01 8.65302026e-01 ... 2.61526918e+00 4.62141559e-02 0.00000000e+00] [ 0.00000000e+00 -8.25352430e-01 -7.83067405e-01 ... 2.13156557e+00 -1.70434558e+00 0.00000000e+00]] [[ 0.00000000e+00 1.19568360e+00 6.20738685e-01 ... 1.34481168e+00 8.85111392e-01 0.00000000e+00] [ 0.00000000e+00 -4.21271682e-01 1.92694890e+00 ... -1.46536618e-01 1.75511575e+00 0.00000000e+00] [ 0.00000000e+00 -3.00132483e-01 -6.31322682e-01 ... 4.50261347e-02 1.36098936e-01 0.00000000e+00] ... [ 0.00000000e+00 1.69670951e+00 -1.07949130e-01 ... 6.29314125e-01 1.55830193e+00 0.00000000e+00] [ 0.00000000e+00 -1.87566829e+00 1.36299956e+00 ... -1.76031148e+00 4.28286612e-01 0.00000000e+00] [ 0.00000000e+00 8.89738500e-01 -3.16144764e-01 ... -5.01201570e-01 -1.76670086e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.40441358e-01 1.35257208e+00 ... -1.09723642e-01 4.95778263e-01 0.00000000e+00] [ 0.00000000e+00 -1.19346702e+00 4.71004277e-01 ... 9.58093643e-01 -1.09255493e+00 0.00000000e+00] [ 0.00000000e+00 -4.36573327e-01 -3.19605410e-01 ... -8.42588663e-01 -1.82059869e-01 0.00000000e+00] ... [ 0.00000000e+00 -9.19005275e-02 1.61083579e+00 ... 1.22603782e-01 2.24125385e+00 0.00000000e+00] [ 0.00000000e+00 4.03567314e-01 1.60283089e+00 ... -5.36880255e-01 1.33336020e+00 0.00000000e+00] [ 0.00000000e+00 1.57063043e+00 1.08221419e-01 ... 2.66826093e-01 -1.11045790e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_936.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.9676 (2,1,1,.,.) = -1.1708 (3,1,1,.,.) = 2.0589 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%10) fw_re: [[[[[-1.0809574 0. 1.4641362 ... -1.473025 0. 2.6927612 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 4.0768476 0. -1.7084867 ... -1.4336716 0. 1.0646124 ] ... [ 4.1418176 0. -0.03324941 ... 0.2751263 0. -0.03581057] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.4201602 0. 2.1662457 ... -5.0104723 0. 0.65255314]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-2.8200285 0. -1.3304021 ... -1.024673 0. -0.6284717 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.3152019 0. 0.54398227 ... -3.2263944 0. 2.227578 ] ... [-4.580754 0. 2.6868446 ... 5.042747 0. 0.90516317] [ 0. 0. 0. ... 0. 0. 0. ] [ 2.1558158 0. -4.9238186 ... 2.5554504 0. -0.40669417]] ... [[ 3.032094 0. -0.49646157 ... -6.0613346 0. -2.5558374 ] [ 0. 0. 0. ... 0. 0. 0. ] [-3.0757473 0. 1.2387769 ... 1.4979607 0. 1.5251838 ] ... [ 4.245283 0. -2.6186757 ... -1.5738157 0. 2.4856317 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.42882594 0. -0.59211093 ... 2.9590952 0. -0.81017077]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-4.648563 0. -3.7174582 ... 1.0416825 0. 2.85007 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.8536421 0. -0.550329 ... 3.7605526 0. 2.5013025 ] ... [ 0.98902386 0. 1.6237185 ... 3.1094303 0. 0.03915061] [ 0. 0. 0. ... 0. 0. 0. ] [-0.4861708 0. -1.418027 ... -1.3102846 0. -2.431224 ]]]]]; ov_res: [[[[[-1.0809574 0. 1.4641362 ... -1.473025 0. 2.6927612 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 4.0768476 0. -1.7084867 ... -1.4336716 0. 1.0646124 ] ... [ 4.1418176 0. -0.03324941 ... 0.2751263 0. -0.03581057] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.4201602 0. 2.1662457 ... -5.0104723 0. 0.65255314]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-2.8200285 0. -1.3304021 ... -1.024673 0. -0.6284717 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.3152019 0. 0.54398227 ... -3.2263944 0. 2.227578 ] ... [-4.580754 0. 2.6868446 ... 5.042747 0. 0.90516317] [ 0. 0. 0. ... 0. 0. 0. ] [ 2.1558158 0. -4.9238186 ... 2.5554504 0. -0.40669417]] ... [[ 3.032094 0. -0.49646157 ... -6.0613346 0. -2.5558374 ] [ 0. 0. 0. ... 0. 0. 0. ] [-3.0757473 0. 1.2387769 ... 1.4979607 0. 1.5251838 ] ... [ 4.245283 0. -2.6186757 ... -1.5738157 0. 2.4856317 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.42882594 0. -0.59211093 ... 2.9590952 0. -0.81017077]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-4.648563 0. -3.7174582 ... 1.0416825 0. 2.85007 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.8536421 0. -0.550329 ... 3.7605526 0. 2.5013025 ] ... [ 0.98902386 0. 1.6237185 ... 3.1094303 0. 0.03915061] [ 0. 0. 0. ... 0. 0. 0. ] [-0.4861708 0. -1.418027 ... -1.3102846 0. -2.431224 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:True - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [2, 2, 2], 'groups': 1, 'output_padding': [1, 1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_938.aten__convolution, %x.1 : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:167:139 %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -2.2319 (2,1,1,.,.) = -0.2925 (3,1,1,.,.) = 1.2110 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::_convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:166:23 return (%9) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.5106635 0. ... 2.2174098 0. -3.9751678 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -4.5150347 0. ... -0.63009757 0. -2.1587553 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.2261212 0. ... 0.9395769 0. -0.7362898 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.261034 0. ... 2.4516037 0. 0.39406297] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -5.6629477 0. ... 4.370538 0. -3.6646469 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.5801954 0. ... 1.3900416 0. -0.47283772]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.43878916 0. ... -2.4738426 0. 0.20138504] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 1.9737924 0. ... 3.9087424 0. -2.606629 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.9139061 0. ... 1.196918 0. 1.2747757 ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.5106635 0. ... 2.2174098 0. -3.9751678 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -4.5150347 0. ... -0.63009757 0. -2.1587553 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.2261212 0. ... 0.9395769 0. -0.7362898 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.261034 0. ... 2.4516037 0. 0.39406297] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -5.6629477 0. ... 4.370538 0. -3.6646469 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.5801954 0. ... 1.3900416 0. -0.47283772]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.43878916 0. ... -2.4738426 0. 0.20138504] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 1.9737924 0. ... 3.9087424 0. -2.606629 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.9139061 0. ... 1.196918 0. 1.2747757 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_940.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2517 -1.0973 0.2409 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 1.86013126e+00 -1.50934851e+00 5.05293131e-01 ... -1.97249964e-01 -3.45604920e+00 -6.67136908e-03] [-9.40700531e-01 -1.44499004e+00 -2.02032542e+00 ... -3.16211581e-02 1.65285921e+00 4.30228382e-01] [ 2.71630406e+00 4.40557927e-01 2.01383781e+00 ... 2.29227209e+00 7.18442798e-01 -1.02436173e+00] ... [ 8.39127421e-01 -1.29551792e+00 -2.57442832e+00 ... 7.06403852e-02 -1.67280567e+00 -3.06530762e+00] [-9.44598436e-01 4.63434905e-01 6.13890409e-01 ... 5.53898931e-01 1.09409928e+00 -4.24730539e+00] [-1.31523466e+00 -1.45972168e+00 -4.07614529e-01 ... -9.65204954e-01 -5.43520868e-01 2.22182012e+00]] [[ 1.56623030e+00 -1.83615196e+00 8.86293769e-01 ... 4.58520263e-01 9.38942134e-02 -4.68537807e-01] [-4.13273907e+00 -5.55026770e-01 -3.13738489e+00 ... -4.46727943e+00 -3.87645870e-01 1.68355560e+00] [-1.46059978e+00 -1.18407404e+00 3.78914022e+00 ... 6.55305099e+00 -5.34609509e+00 -2.20199871e+00] ... [ 2.58621931e+00 -1.02820516e+00 -6.69262600e+00 ... 1.81631112e+00 4.12403631e+00 -6.88898373e+00] [-7.89609075e-01 4.74963951e+00 7.81133533e-01 ... 1.34211135e+00 2.88721728e+00 5.20749378e+00] [-2.74192542e-01 -5.50937891e-01 -1.54752815e+00 ... -2.28477859e+00 -1.38997853e+00 1.41835785e+00]] [[ 5.97055674e-01 1.11116648e+00 -2.86921382e-01 ... 2.58123493e+00 -2.08195043e+00 -2.60071850e+00] [ 3.65669751e+00 -2.96297383e+00 2.40605593e+00 ... -6.50808048e+00 -2.03668332e+00 1.47960651e+00] [-1.49384189e+00 -3.15936518e+00 2.25322533e+00 ... 4.50001925e-01 -9.32638347e-02 -2.30632037e-01] ... [-2.07197714e+00 -4.17323065e+00 -1.67056620e+00 ... -1.77812147e+00 4.28835869e+00 5.73316574e-01] [-2.83688247e-01 -2.01792270e-01 5.38354158e+00 ... 3.91782284e+00 -3.39757442e+00 -1.38264227e+00] [ 3.84485751e-01 9.21312809e-01 -3.46514285e-02 ... -2.96136713e+00 -2.42968845e+00 4.18733686e-01]] ... [[ 2.22923183e+00 3.09772754e+00 -1.97910142e+00 ... -2.03363919e+00 -5.32658398e-02 -1.90918338e+00] [-1.24162102e+00 -6.09151936e+00 -2.80062294e+00 ... -5.56342077e+00 -4.64322090e+00 1.53528368e+00] [ 3.80642700e+00 -1.13630271e+00 3.36417437e+00 ... -1.36740887e+00 9.63271260e-01 -2.21410942e+00] ... [ 7.28593779e+00 5.73434305e+00 -2.13942575e+00 ... 2.40528679e+00 -1.43466508e+00 3.68992496e+00] [-2.81419277e+00 3.52712125e-01 -1.91839063e+00 ... 1.08666253e+00 -1.94536769e+00 -2.81958699e+00] [ 1.19745779e+00 -4.39670950e-01 2.47515583e+00 ... 6.30677223e-01 -2.96130985e-01 3.12239504e+00]] [[-2.74943662e+00 -3.64906740e+00 -1.99258518e+00 ... 4.11683178e+00 -1.93858647e+00 1.21937943e+00] [-1.61521888e+00 5.43317199e-01 1.61194348e+00 ... 1.10800672e+00 -9.25537109e-01 4.31169367e+00] [-3.58605337e+00 3.61091822e-01 4.83817101e+00 ... -1.43259537e+00 1.78805208e+00 -2.74837208e+00] ... [-4.99825478e+00 -1.14517295e+00 -1.14585471e+00 ... -1.39468277e+00 -4.09381151e+00 5.75734663e+00] [ 1.20065367e+00 -7.18019605e-01 -7.97857285e-01 ... 1.85412097e+00 3.94013137e-01 -2.40976405e+00] [ 1.90092623e-01 1.87083769e+00 2.60901952e+00 ... -1.22022593e+00 4.63187784e-01 1.17982984e+00]] [[ 1.10331380e+00 9.14408267e-02 -1.76257014e+00 ... -4.24828339e+00 2.41962743e+00 9.30416584e-01] [-4.59028721e+00 -8.24533224e-01 5.93692064e-01 ... 4.29300261e+00 -1.28976858e+00 -1.89554024e+00] [ 1.26503611e+00 3.94160557e+00 -1.10325396e+00 ... 9.53261614e-01 3.59949255e+00 -2.46317565e-01] ... [-5.18311381e-01 -5.64910412e-01 -2.96666116e-01 ... 1.58490825e+00 -3.29437351e+00 -4.00533825e-02] [-2.71647871e-01 -2.17736673e+00 3.48671961e+00 ... 8.56298804e-01 2.26629329e+00 -2.60270268e-01] [-6.42977357e-01 2.20814258e-01 6.72930121e-01 ... -7.86228538e-01 -2.53912300e-01 -5.88473558e-01]]] [[[-1.94478393e-01 -3.11481762e+00 -9.90876794e-01 ... -1.45608938e+00 -1.96186781e+00 -4.55061615e-01] [-3.69199181e+00 -5.18826783e-01 -4.06531477e+00 ... 5.40041924e-01 9.26526308e-01 1.24133706e-01] [-1.14191294e-01 5.22241473e-01 1.34517407e+00 ... 1.89980960e+00 -6.03385687e-01 -1.21376073e+00] ... [ 2.98519254e-01 -1.64504457e+00 -4.82608318e+00 ... -6.97048485e-01 1.21132135e+00 -4.51011658e+00] [-3.48540187e+00 2.10092378e+00 -4.15147781e-01 ... -3.05103302e-01 3.24436784e-01 -5.96790314e-01] [-1.97570276e+00 -3.36869550e+00 -1.15163374e+00 ... -2.34322691e+00 -1.15473557e+00 4.34240055e+00]] [[ 1.53232288e+00 -2.24752569e+00 -3.61546099e-01 ... -2.82154799e-01 -5.81837177e+00 -1.73026156e+00] [-1.39216602e+00 1.02900505e-01 -1.65276861e+00 ... -6.19797993e+00 3.77137065e+00 -9.32850838e-01] [ 3.02880597e+00 -3.74674821e+00 3.38370156e+00 ... 1.97507453e+00 -6.34928823e-01 -5.27615738e+00] ... [ 4.15151596e+00 -3.47750974e+00 -1.80523181e+00 ... -2.42446136e+00 -1.29165888e+00 -6.22710323e+00] [-4.73551273e+00 2.73120785e+00 4.12478065e+00 ... -3.10909176e+00 -2.37278223e-01 -4.05526161e-02] [ 9.85635757e-01 -1.97364962e+00 -2.27890038e+00 ... -4.88290310e+00 -1.90958440e+00 -2.50811863e+00]] [[ 1.40239024e+00 -1.51684046e-01 -1.77888656e+00 ... 1.04342651e+00 -2.36056709e+00 -1.79793358e+00] [-1.33209360e+00 -2.09204030e+00 2.44441485e+00 ... -5.55730438e+00 -1.70362186e+00 3.31005454e+00] [-7.08998394e+00 1.03528953e+00 -2.70456886e+00 ... 5.60899925e+00 -2.82151031e+00 -1.03659832e+00] ... [-4.16218853e+00 -5.86079979e+00 -3.31843114e+00 ... -1.07145405e+00 4.52887154e+00 -8.25164795e-01] [-5.09733856e-01 -2.42713952e+00 1.98166776e+00 ... 3.82208705e+00 -1.82929635e+00 2.60137248e+00] [-7.00295925e-01 -9.79662657e-01 -3.28792930e-01 ... -6.90577126e+00 -1.73582792e+00 8.90768290e-01]] ... [[-1.18451536e+00 2.54636216e+00 -1.38686204e+00 ... 1.64729285e+00 -7.93289661e-01 -3.88347483e+00] [ 9.17567015e-01 -3.20462155e+00 4.58103657e-01 ... -2.70757842e+00 -5.16506195e+00 1.33369017e+00] [-1.75534511e+00 -2.41019487e+00 -6.52855158e-01 ... 1.62339211e-01 8.54066491e-01 -5.39753437e+00] ... [ 7.38577008e-01 1.43676043e-01 4.09593868e+00 ... -1.32239509e+00 -4.37782097e+00 2.44854569e+00] [-3.80895305e+00 -3.19688249e+00 6.59803867e-01 ... -3.52363658e+00 3.97339034e+00 -7.57813358e+00] [ 1.84623313e+00 -6.06209755e-01 2.91184592e+00 ... 6.98166013e-01 -5.98855197e-01 3.82713294e+00]] [[-2.30511427e-01 -1.83322346e+00 -4.67041969e+00 ... -1.84560835e+00 4.11942601e-01 -3.70272470e+00] [-3.39159513e+00 8.04175496e-01 1.53980327e+00 ... -2.50412989e+00 -3.66999125e+00 -4.88545895e-01] [-1.76298618e-02 5.21780396e+00 -9.24849510e-02 ... -3.61840463e+00 3.78820014e+00 -3.21299171e+00] ... [-2.95034218e+00 1.84659958e-02 -1.34524477e+00 ... 1.30167007e-01 -5.82147980e+00 3.53005385e+00] [ 4.61601496e-01 -2.07850790e+00 -7.11135447e-01 ... -2.61546755e+00 4.17755890e+00 -6.69758320e+00] [-1.68730688e+00 4.29720163e-01 3.30334902e-01 ... -1.83072877e+00 -9.41112101e-01 -4.38692570e-01]] [[-6.70467615e-02 -3.90979433e+00 -3.86106038e+00 ... -1.84094489e-01 -2.53767633e+00 -1.53666282e+00] [-9.96163964e-01 -3.57698369e+00 1.21826530e-01 ... 2.48769212e+00 -3.65142536e+00 -2.35778928e+00] [-1.89834762e+00 2.51344657e+00 -1.47455812e+00 ... -5.79924297e+00 9.55617666e-01 -9.88026261e-01] ... [-5.34669399e-01 2.66354537e+00 3.01692724e-01 ... 1.20623422e+00 -1.33094859e+00 9.96007919e-02] [ 1.62973523e-01 -5.32630682e-02 -4.40471268e+00 ... -2.00352240e+00 1.53952360e+00 -2.50724506e+00] [-1.49541092e+00 -1.74949968e+00 -1.98179901e+00 ... -1.01610970e+00 -2.96789336e+00 -1.41670990e+00]]] [[[-1.15370095e+00 1.43493354e+00 -8.20474625e-01 ... 5.94307899e-01 1.47527659e+00 1.61675274e-01] [ 6.92898512e-01 -8.19349647e-01 -9.63892817e-01 ... 8.22365522e-01 -7.14856744e-01 1.31887889e+00] [ 9.72347856e-01 7.83180833e-01 -1.55328894e+00 ... 2.35628343e+00 1.88818610e+00 2.60542059e+00] ... [-1.20697486e+00 2.87451935e+00 -2.70654321e+00 ... 7.52727747e-01 1.72150230e+00 1.68838191e+00] [ 2.41567755e+00 -1.16304922e+00 1.16137314e+00 ... 3.85388196e-01 -3.64159733e-01 1.84277630e+00] [-2.30985618e+00 -1.34455895e+00 9.24684107e-02 ... -3.44891518e-01 -3.29872757e-01 -1.33146107e+00]] [[ 2.00797498e-01 -2.83121061e+00 1.09021574e-01 ... -2.27974057e+00 2.14324999e+00 1.71643865e+00] [ 4.38227081e+00 -3.47931433e+00 2.96686125e+00 ... 1.85297680e+00 -4.16832399e+00 -2.65190005e-03] [-2.48317719e-02 -5.39433193e+00 1.97159481e+00 ... -1.69110489e+00 9.66896296e-01 8.03161502e-01] ... [ 4.04770517e+00 -9.72298980e-02 3.79191208e+00 ... 3.98119807e-01 4.26383823e-01 5.85568666e-01] [ 8.79274607e-01 2.33681887e-01 -1.12742186e-04 ... -3.65541369e-01 -3.39017177e+00 -6.37679148e+00] [-7.42810965e-03 9.89579320e-01 -2.22015166e+00 ... 1.24138033e+00 7.68150926e-01 -1.62678576e+00]] [[ 2.82671601e-01 -4.25548106e-01 1.38126373e+00 ... 2.07210854e-01 -3.70393848e+00 -3.03534538e-01] [ 3.02578521e+00 4.98879403e-01 -1.26741815e+00 ... 2.60126257e+00 -1.36559224e+00 -1.37256980e-02] [ 2.00506306e+00 1.38318312e+00 6.21331573e-01 ... -4.85828400e+00 1.22478157e-01 2.36930299e+00] ... [ 7.64374733e-01 6.61064029e-01 3.92359042e+00 ... 5.77957201e+00 -1.86201954e+00 4.44244832e-01] [-1.80654788e+00 -1.26293993e+00 -3.97861385e+00 ... 3.44966745e+00 -1.45433378e+00 2.61630559e+00] [ 1.02370179e+00 -1.98766804e+00 3.17735612e-01 ... 3.99071074e+00 -6.26753867e-02 4.83452654e+00]] ... [[ 3.49050224e-01 3.74068230e-01 8.29431176e-01 ... 1.24852598e+00 -4.92603689e-01 5.88387012e-01] [ 3.23680639e+00 4.38186407e+00 1.97180343e+00 ... 2.51361752e+00 -1.19355106e+00 -4.65042257e+00] [ 1.16891634e+00 -2.34039497e+00 -2.94805080e-01 ... -2.15220928e+00 -8.06538105e-01 -5.40723896e+00] ... [ 4.25640488e+00 3.56900311e+00 -7.93105841e-01 ... -1.36155200e+00 5.41808653e+00 -3.21017218e+00] [ 3.87900400e+00 6.01505995e+00 2.72310615e+00 ... -7.44835258e-01 -1.04121834e-01 7.47999787e-01] [-8.10768247e-01 2.17875314e+00 1.17303157e+00 ... -4.54809666e+00 1.68665695e+00 -2.43903184e+00]] [[-5.41647911e-01 3.02814484e+00 1.59793818e+00 ... 2.37480688e+00 9.69792902e-02 8.91308069e-01] [-1.50429654e+00 1.15862918e+00 -7.37690449e-01 ... -3.54816771e+00 2.10952473e+00 -3.03217459e+00] [ 1.20303631e+00 -3.49452710e+00 6.68551803e-01 ... 2.13016343e+00 2.16458052e-01 2.88930917e+00] ... [ 7.51026750e-01 9.79829431e-01 -9.55353856e-01 ... 6.10079527e+00 -2.44289303e+00 -6.90752268e-01] [-1.59974313e+00 -1.70944548e+00 3.69293308e+00 ... -1.37020254e+00 -5.64042759e+00 4.85862303e+00] [ 6.40711486e-02 9.32884216e-01 -2.57960033e+00 ... -1.78800917e+00 1.05118662e-01 -2.63836670e+00]] [[-6.84237480e-01 5.47101974e-01 -1.36169708e+00 ... -7.38101125e-01 -3.46229970e-02 8.00834894e-01] [ 2.40837073e+00 -4.85262012e+00 -3.59739923e+00 ... 3.20264339e+00 -4.92420942e-01 -1.21986675e+00] [-1.02226347e-01 -1.99253750e+00 -8.97651017e-02 ... 3.55456066e+00 -5.50675750e-01 -1.60438085e+00] ... [ 2.01837927e-01 -3.54708880e-01 2.94058299e+00 ... 1.14882648e+00 -1.85497427e+00 2.56987834e+00] [-1.62301958e-02 2.96064520e+00 2.47888923e-01 ... -7.28967428e-01 7.85673857e-01 2.30104828e+00] [ 1.14027452e+00 5.72874367e-01 -1.29974020e+00 ... 4.28793430e-01 7.41269946e-01 7.16494083e-01]]]]]; ov_res: [[[[[ 1.86013126e+00 -1.50934851e+00 5.05293131e-01 ... -1.97249964e-01 -3.45604920e+00 -6.67136908e-03] [-9.40700531e-01 -1.44499016e+00 -2.02032542e+00 ... -3.16211581e-02 1.65285921e+00 4.30228382e-01] [ 2.71630430e+00 4.40557867e-01 2.01383781e+00 ... 2.29227209e+00 7.18442678e-01 -1.02436173e+00] ... [ 8.39127421e-01 -1.29551804e+00 -2.57442832e+00 ... 7.06403852e-02 -1.67280567e+00 -3.06530762e+00] [-9.44598436e-01 4.63434845e-01 6.13890409e-01 ... 5.53898931e-01 1.09409928e+00 -4.24730539e+00] [-1.31523466e+00 -1.45972168e+00 -4.07614529e-01 ... -9.65204954e-01 -5.43520868e-01 2.22182012e+00]] [[ 1.56623030e+00 -1.83615208e+00 8.86293769e-01 ... 4.58520263e-01 9.38941240e-02 -4.68537778e-01] [-4.13273954e+00 -5.55026531e-01 -3.13738465e+00 ... -4.46727896e+00 -3.87646139e-01 1.68355572e+00] [-1.46059990e+00 -1.18407393e+00 3.78913975e+00 ... 6.55305052e+00 -5.34609461e+00 -2.20199871e+00] ... [ 2.58621907e+00 -1.02820516e+00 -6.69262648e+00 ... 1.81631136e+00 4.12403631e+00 -6.88898373e+00] [-7.89609075e-01 4.74963951e+00 7.81133533e-01 ... 1.34211135e+00 2.88721728e+00 5.20749378e+00] [-2.74192542e-01 -5.50937891e-01 -1.54752827e+00 ... -2.28477859e+00 -1.38997841e+00 1.41835785e+00]] [[ 5.97055674e-01 1.11116636e+00 -2.86921442e-01 ... 2.58123493e+00 -2.08195019e+00 -2.60071850e+00] [ 3.65669727e+00 -2.96297407e+00 2.40605617e+00 ... -6.50808048e+00 -2.03668332e+00 1.47960651e+00] [-1.49384165e+00 -3.15936542e+00 2.25322533e+00 ... 4.50001746e-01 -9.32637900e-02 -2.30631918e-01] ... [-2.07197738e+00 -4.17323112e+00 -1.67056620e+00 ... -1.77812183e+00 4.28835821e+00 5.73316455e-01] [-2.83688217e-01 -2.01792479e-01 5.38354158e+00 ... 3.91782284e+00 -3.39757442e+00 -1.38264251e+00] [ 3.84485811e-01 9.21312928e-01 -3.46514434e-02 ... -2.96136713e+00 -2.42968822e+00 4.18733686e-01]] ... [[ 2.22923207e+00 3.09772754e+00 -1.97910166e+00 ... -2.03363943e+00 -5.32658100e-02 -1.90918362e+00] [-1.24162102e+00 -6.09151983e+00 -2.80062318e+00 ... -5.56342173e+00 -4.64322042e+00 1.53528357e+00] [ 3.80642748e+00 -1.13630247e+00 3.36417484e+00 ... -1.36740887e+00 9.63271260e-01 -2.21410942e+00] ... [ 7.28593779e+00 5.73434305e+00 -2.13942575e+00 ... 2.40528679e+00 -1.43466496e+00 3.68992543e+00] [-2.81419277e+00 3.52712363e-01 -1.91839051e+00 ... 1.08666253e+00 -1.94536781e+00 -2.81958699e+00] [ 1.19745767e+00 -4.39670861e-01 2.47515583e+00 ... 6.30677223e-01 -2.96130955e-01 3.12239504e+00]] [[-2.74943662e+00 -3.64906764e+00 -1.99258530e+00 ... 4.11683226e+00 -1.93858635e+00 1.21937943e+00] [-1.61521876e+00 5.43317318e-01 1.61194348e+00 ... 1.10800660e+00 -9.25536990e-01 4.31169367e+00] [-3.58605337e+00 3.61091822e-01 4.83817101e+00 ... -1.43259561e+00 1.78805232e+00 -2.74837232e+00] ... [-4.99825478e+00 -1.14517295e+00 -1.14585483e+00 ... -1.39468277e+00 -4.09381151e+00 5.75734663e+00] [ 1.20065379e+00 -7.18019605e-01 -7.97857165e-01 ... 1.85412097e+00 3.94013077e-01 -2.40976405e+00] [ 1.90092623e-01 1.87083769e+00 2.60901952e+00 ... -1.22022593e+00 4.63187724e-01 1.17982984e+00]] [[ 1.10331380e+00 9.14408267e-02 -1.76257014e+00 ... -4.24828339e+00 2.41962743e+00 9.30416584e-01] [-4.59028721e+00 -8.24533343e-01 5.93692064e-01 ... 4.29300261e+00 -1.28976870e+00 -1.89554036e+00] [ 1.26503611e+00 3.94160557e+00 -1.10325396e+00 ... 9.53261733e-01 3.59949279e+00 -2.46317565e-01] ... [-5.18311322e-01 -5.64910352e-01 -2.96666086e-01 ... 1.58490837e+00 -3.29437351e+00 -4.00533825e-02] [-2.71647900e-01 -2.17736650e+00 3.48671961e+00 ... 8.56298804e-01 2.26629353e+00 -2.60270268e-01] [-6.42977357e-01 2.20814258e-01 6.72930121e-01 ... -7.86228538e-01 -2.53912300e-01 -5.88473558e-01]]] [[[-1.94478393e-01 -3.11481762e+00 -9.90876794e-01 ... -1.45608938e+00 -1.96186781e+00 -4.55061615e-01] [-3.69199181e+00 -5.18826842e-01 -4.06531477e+00 ... 5.40041924e-01 9.26526546e-01 1.24133706e-01] [-1.14191294e-01 5.22241473e-01 1.34517407e+00 ... 1.89980960e+00 -6.03385687e-01 -1.21376073e+00] ... [ 2.98519254e-01 -1.64504445e+00 -4.82608318e+00 ... -6.97048485e-01 1.21132135e+00 -4.51011658e+00] [-3.48540187e+00 2.10092354e+00 -4.15147722e-01 ... -3.05103302e-01 3.24436784e-01 -5.96790135e-01] [-1.97570276e+00 -3.36869550e+00 -1.15163374e+00 ... -2.34322691e+00 -1.15473557e+00 4.34240055e+00]] [[ 1.53232288e+00 -2.24752569e+00 -3.61546159e-01 ... -2.82154858e-01 -5.81837177e+00 -1.73026156e+00] [-1.39216590e+00 1.02900624e-01 -1.65276861e+00 ... -6.19797993e+00 3.77137065e+00 -9.32850778e-01] [ 3.02880597e+00 -3.74674797e+00 3.38370204e+00 ... 1.97507453e+00 -6.34928703e-01 -5.27615738e+00] ... [ 4.15151596e+00 -3.47750974e+00 -1.80523181e+00 ... -2.42446136e+00 -1.29165912e+00 -6.22710228e+00] [-4.73551273e+00 2.73120785e+00 4.12478065e+00 ... -3.10909152e+00 -2.37278044e-01 -4.05527353e-02] [ 9.85635757e-01 -1.97364950e+00 -2.27890038e+00 ... -4.88290310e+00 -1.90958428e+00 -2.50811887e+00]] [[ 1.40239024e+00 -1.51683986e-01 -1.77888656e+00 ... 1.04342651e+00 -2.36056709e+00 -1.79793358e+00] [-1.33209360e+00 -2.09204006e+00 2.44441462e+00 ... -5.55730438e+00 -1.70362186e+00 3.31005454e+00] [-7.08998394e+00 1.03528953e+00 -2.70456839e+00 ... 5.60899925e+00 -2.82151031e+00 -1.03659832e+00] ... [-4.16218853e+00 -5.86079979e+00 -3.31843090e+00 ... -1.07145405e+00 4.52887154e+00 -8.25164855e-01] [-5.09733856e-01 -2.42713952e+00 1.98166800e+00 ... 3.82208610e+00 -1.82929635e+00 2.60137248e+00] [-7.00295925e-01 -9.79662657e-01 -3.28792930e-01 ... -6.90577126e+00 -1.73582792e+00 8.90768290e-01]] ... [[-1.18451548e+00 2.54636216e+00 -1.38686204e+00 ... 1.64729309e+00 -7.93289542e-01 -3.88347483e+00] [ 9.17567015e-01 -3.20462179e+00 4.58103538e-01 ... -2.70757818e+00 -5.16506290e+00 1.33368993e+00] [-1.75534523e+00 -2.41019487e+00 -6.52855277e-01 ... 1.62339211e-01 8.54066372e-01 -5.39753437e+00] ... [ 7.38576889e-01 1.43676162e-01 4.09593868e+00 ... -1.32239497e+00 -4.37782097e+00 2.44854569e+00] [-3.80895305e+00 -3.19688272e+00 6.59804225e-01 ... -3.52363658e+00 3.97339082e+00 -7.57813358e+00] [ 1.84623313e+00 -6.06209755e-01 2.91184592e+00 ... 6.98166013e-01 -5.98855317e-01 3.82713342e+00]] [[-2.30511427e-01 -1.83322346e+00 -4.67041969e+00 ... -1.84560847e+00 4.11942601e-01 -3.70272446e+00] [-3.39159513e+00 8.04175377e-01 1.53980327e+00 ... -2.50412989e+00 -3.66999149e+00 -4.88545954e-01] [-1.76299810e-02 5.21780396e+00 -9.24850702e-02 ... -3.61840439e+00 3.78819966e+00 -3.21299148e+00] ... [-2.95034170e+00 1.84662342e-02 -1.34524477e+00 ... 1.30167127e-01 -5.82147980e+00 3.53005385e+00] [ 4.61601377e-01 -2.07850790e+00 -7.11135507e-01 ... -2.61546755e+00 4.17755795e+00 -6.69758320e+00] [-1.68730688e+00 4.29720163e-01 3.30334902e-01 ... -1.83072877e+00 -9.41112101e-01 -4.38692629e-01]] [[-6.70467615e-02 -3.90979433e+00 -3.86106038e+00 ... -1.84094489e-01 -2.53767633e+00 -1.53666282e+00] [-9.96163964e-01 -3.57698369e+00 1.21826530e-01 ... 2.48769212e+00 -3.65142512e+00 -2.35778952e+00] [-1.89834762e+00 2.51344633e+00 -1.47455812e+00 ... -5.79924297e+00 9.55617666e-01 -9.88026321e-01] ... [-5.34669459e-01 2.66354537e+00 3.01692724e-01 ... 1.20623446e+00 -1.33094859e+00 9.96006727e-02] [ 1.62973642e-01 -5.32630682e-02 -4.40471268e+00 ... -2.00352240e+00 1.53952360e+00 -2.50724506e+00] [-1.49541092e+00 -1.74949968e+00 -1.98179901e+00 ... -1.01610970e+00 -2.96789336e+00 -1.41670990e+00]]] [[[-1.15370095e+00 1.43493354e+00 -8.20474625e-01 ... 5.94307899e-01 1.47527659e+00 1.61675274e-01] [ 6.92898512e-01 -8.19349527e-01 -9.63892817e-01 ... 8.22365522e-01 -7.14856744e-01 1.31887877e+00] [ 9.72347856e-01 7.83180833e-01 -1.55328882e+00 ... 2.35628343e+00 1.88818610e+00 2.60542059e+00] ... [-1.20697486e+00 2.87451911e+00 -2.70654321e+00 ... 7.52727747e-01 1.72150230e+00 1.68838203e+00] [ 2.41567731e+00 -1.16304934e+00 1.16137314e+00 ... 3.85388196e-01 -3.64159793e-01 1.84277618e+00] [-2.30985618e+00 -1.34455895e+00 9.24684107e-02 ... -3.44891518e-01 -3.29872757e-01 -1.33146107e+00]] [[ 2.00797468e-01 -2.83121061e+00 1.09021544e-01 ... -2.27974033e+00 2.14324999e+00 1.71643865e+00] [ 4.38227081e+00 -3.47931409e+00 2.96686125e+00 ... 1.85297704e+00 -4.16832399e+00 -2.65184045e-03] [-2.48317719e-02 -5.39433241e+00 1.97159469e+00 ... -1.69110513e+00 9.66896296e-01 8.03161502e-01] ... [ 4.04770517e+00 -9.72298980e-02 3.79191208e+00 ... 3.98119807e-01 4.26383942e-01 5.85568666e-01] [ 8.79274607e-01 2.33682305e-01 -1.12712383e-04 ... -3.65541369e-01 -3.39017177e+00 -6.37679100e+00] [-7.42805004e-03 9.89579439e-01 -2.22015166e+00 ... 1.24138033e+00 7.68150926e-01 -1.62678564e+00]] [[ 2.82671571e-01 -4.25548106e-01 1.38126373e+00 ... 2.07210854e-01 -3.70393848e+00 -3.03534478e-01] [ 3.02578521e+00 4.98879492e-01 -1.26741803e+00 ... 2.60126257e+00 -1.36559236e+00 -1.37256980e-02] [ 2.00506306e+00 1.38318300e+00 6.21331453e-01 ... -4.85828352e+00 1.22478113e-01 2.36930299e+00] ... [ 7.64374495e-01 6.61064029e-01 3.92359066e+00 ... 5.77957201e+00 -1.86201954e+00 4.44245070e-01] [-1.80654788e+00 -1.26293993e+00 -3.97861385e+00 ... 3.44966722e+00 -1.45433378e+00 2.61630583e+00] [ 1.02370179e+00 -1.98766780e+00 3.17735612e-01 ... 3.99071074e+00 -6.26753271e-02 4.83452654e+00]] ... [[ 3.49050224e-01 3.74068230e-01 8.29431176e-01 ... 1.24852598e+00 -4.92603630e-01 5.88386953e-01] [ 3.23680639e+00 4.38186359e+00 1.97180355e+00 ... 2.51361752e+00 -1.19355106e+00 -4.65042305e+00] [ 1.16891634e+00 -2.34039521e+00 -2.94805020e-01 ... -2.15220928e+00 -8.06538224e-01 -5.40723848e+00] ... [ 4.25640488e+00 3.56900358e+00 -7.93105721e-01 ... -1.36155200e+00 5.41808605e+00 -3.21017170e+00] [ 3.87900400e+00 6.01505995e+00 2.72310615e+00 ... -7.44835258e-01 -1.04121923e-01 7.47999907e-01] [-8.10768247e-01 2.17875314e+00 1.17303157e+00 ... -4.54809666e+00 1.68665695e+00 -2.43903184e+00]] [[-5.41647911e-01 3.02814484e+00 1.59793818e+00 ... 2.37480688e+00 9.69792902e-02 8.91307950e-01] [-1.50429642e+00 1.15862906e+00 -7.37690568e-01 ... -3.54816771e+00 2.10952473e+00 -3.03217459e+00] [ 1.20303643e+00 -3.49452686e+00 6.68551683e-01 ... 2.13016319e+00 2.16457948e-01 2.88930917e+00] ... [ 7.51026630e-01 9.79829431e-01 -9.55353856e-01 ... 6.10079575e+00 -2.44289303e+00 -6.90752149e-01] [-1.59974325e+00 -1.70944548e+00 3.69293356e+00 ... -1.37020242e+00 -5.64042807e+00 4.85862303e+00] [ 6.40711784e-02 9.32884336e-01 -2.57960033e+00 ... -1.78800893e+00 1.05118632e-01 -2.63836670e+00]] [[-6.84237480e-01 5.47101974e-01 -1.36169708e+00 ... -7.38101125e-01 -3.46229970e-02 8.00834894e-01] [ 2.40837073e+00 -4.85262012e+00 -3.59739923e+00 ... 3.20264363e+00 -4.92420942e-01 -1.21986687e+00] [-1.02226347e-01 -1.99253774e+00 -8.97651017e-02 ... 3.55456066e+00 -5.50675869e-01 -1.60438085e+00] ... [ 2.01837957e-01 -3.54708880e-01 2.94058299e+00 ... 1.14882648e+00 -1.85497427e+00 2.56987834e+00] [-1.62301958e-02 2.96064520e+00 2.47888863e-01 ... -7.28967428e-01 7.85673857e-01 2.30104828e+00] [ 1.14027452e+00 5.72874367e-01 -1.29974020e+00 ... 4.28793430e-01 7.41269946e-01 7.16494083e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_942.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.0098 0.2260 0.2021 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-8.3505821e-01 -8.4916867e-02 -5.5687819e+00 ... -4.2795599e-01 3.1006932e+00 3.3388944e+00] [-2.4666959e-01 4.4331613e+00 -6.6037998e+00 ... 4.5809035e+00 4.9073684e-01 -1.2137514e+00] [ 2.8570976e+00 -1.6628516e+00 5.5248399e+00 ... 4.9692569e+00 4.8763137e+00 -4.0611110e+00] ... [-1.8407413e+00 -9.6712351e-01 -2.8852401e+00 ... -4.8984137e+00 -1.0111970e+00 -3.9855125e+00] [ 2.3495178e+00 -2.1714606e+00 -6.0533226e-01 ... -2.6109765e+00 -3.3467968e+00 -3.7655180e+00] [-1.4803143e+00 -1.8006715e+00 -3.3922613e+00 ... 1.9800061e+00 -1.0095682e+00 -2.9063318e+00]] [[-5.1716475e+00 1.3806800e+00 -1.5921214e+00 ... 1.1549153e+00 -1.4558097e+00 7.4057589e+00] [ 3.2981603e+00 6.0422921e-01 -3.8171501e+00 ... -3.4891512e+00 -2.0269680e+00 2.0814745e+00] [-4.6016293e+00 -1.1782672e+00 6.0842161e+00 ... 4.4626851e+00 -5.9656157e+00 -1.8101473e+00] ... [-6.3858604e+00 8.1197059e-01 -2.0241258e+00 ... 4.2908421e+00 3.4564431e+00 3.2691286e+00] [-2.8987997e+00 -3.7062337e+00 3.9754863e+00 ... -5.5203056e-01 -2.2509756e+00 -2.1984453e+00] [-5.4894388e-01 9.6877921e-01 1.0607495e+01 ... 1.0907743e+00 -4.6316071e+00 -1.5905313e-01]] [[-7.8996525e+00 7.0585871e-01 2.2658103e+00 ... 1.2208385e+00 -2.1040914e+00 -3.2225531e-01] [ 1.0623890e+00 -1.9098622e+00 -2.5149620e+00 ... -1.2084436e+00 -3.2770658e-01 -1.2536042e+00] [ 4.3357134e+00 1.3879797e+00 3.7831566e+00 ... 1.7716098e-01 -6.2093115e+00 -5.0194516e+00] ... [-1.2178164e+00 1.7268940e+00 -1.3560719e+00 ... -2.8309441e+00 3.5083477e+00 -4.1887054e+00] [-2.4900670e+00 -4.7512751e+00 -3.4982860e+00 ... -2.9928505e+00 4.6386909e+00 -3.3442585e+00] [ 1.6666538e+00 -2.3673437e+00 -3.9407018e-01 ... 2.3469627e+00 2.3248918e-01 6.7077184e-01]] ... [[ 1.2481238e+00 1.7132224e+00 -7.3087549e-01 ... 2.4731021e+00 1.1877229e+00 -1.3992304e+00] [-6.0190594e-01 4.8433032e+00 1.6386101e+00 ... -1.9560138e+00 1.4707054e+00 -2.4081538e+00] [-3.1306781e-02 9.6910167e-01 6.9331205e-01 ... 4.9460258e+00 -2.9792533e+00 8.7459755e-01] ... [ 5.8155651e+00 2.2302864e+00 4.9456501e+00 ... 6.3170438e+00 -6.0156951e+00 7.6232731e-01] [-1.3340179e+00 2.4152042e-01 2.9951551e+00 ... 1.8065500e+00 -1.9767482e+00 -2.3394041e+00] [ 8.0790281e-01 2.4803619e+00 -2.7512586e-01 ... -5.8232188e+00 5.9117217e+00 9.0806758e-01]] [[ 4.7660670e+00 -3.0323470e+00 -2.0642445e+00 ... 4.2668867e+00 -2.3842292e+00 2.4751306e+00] [ 9.6438277e-01 2.6231307e-01 -2.3223424e+00 ... -7.3786426e-01 -3.3518994e+00 -3.2318335e+00] [ 3.0642638e+00 1.0308784e+00 2.1390779e+00 ... -3.5702150e+00 4.1004739e+00 2.7241242e+00] ... [ 1.6483151e+00 4.7134269e-02 8.6249277e-02 ... -8.8205385e+00 -3.4949237e-01 -3.2976440e-01] [ 5.7879043e+00 -2.5837586e+00 5.4731340e+00 ... 7.1985412e+00 -2.8494337e+00 -4.2065473e+00] [-8.7175865e+00 -2.4298034e+00 -1.9503504e+00 ... -1.6229582e-01 5.9819002e+00 -1.9989785e+00]] [[-1.1792222e+00 -2.5814278e+00 9.2052374e+00 ... 2.0534534e+00 -6.5529716e-01 -7.7803025e+00] [-3.5843761e+00 1.8716762e+00 -2.3109944e+00 ... 3.7495794e+00 -1.7908227e+00 -4.4554858e+00] [-4.4787836e+00 -1.7518777e+00 1.2104331e+00 ... 1.6495492e+00 -5.4992208e+00 8.7969303e+00] ... [-3.9943883e-01 2.6318362e+00 -4.1821036e+00 ... -4.7388620e+00 -6.0833950e+00 -2.3428385e+00] [ 7.8024001e+00 1.9898921e+00 2.1165595e+00 ... 8.3713841e-01 -4.9319654e+00 -1.6171126e-02] [ 3.9011390e+00 5.2345014e-01 3.5795289e-01 ... -3.7067180e+00 9.6611750e-01 -4.6418152e+00]]] [[[ 1.1468478e+00 3.8341346e+00 3.8569219e+00 ... -1.2873964e+00 -3.3328792e-01 6.9858332e+00] [-5.7525578e+00 1.9872364e+00 1.3774621e+00 ... -2.1654382e+00 5.6932688e+00 -1.0662589e+01] [-7.1770024e-01 1.7278100e+00 1.3360828e+00 ... -7.2604191e-01 4.0830717e+00 2.8618112e-01] ... [ 1.2070086e+00 -3.7245822e+00 3.6454046e+00 ... 2.2858028e+00 -2.8138003e-01 -3.1484499e+00] [-1.6143684e+00 3.4004459e+00 5.8955288e+00 ... 1.0713087e+00 1.8408562e+00 3.0591943e+00] [-6.0134137e-01 2.3118446e+00 -1.9464879e+00 ... -3.1443408e+00 2.2920535e+00 9.5714915e-01]] [[-4.9325671e+00 -4.7999206e+00 -2.8235337e-01 ... 4.1384706e+00 9.9911728e+00 1.1858706e+00] [ 2.4448619e+00 -1.3845023e+00 3.2717257e+00 ... 4.5341187e+00 1.8355422e+00 -7.7572894e-01] [ 2.8905194e+00 5.9820948e+00 -5.3878450e+00 ... -2.7014914e+00 -5.0938568e+00 8.7859452e-01] ... [-2.3917129e+00 -4.1492801e+00 6.5289359e+00 ... -6.8400645e-01 7.6304440e+00 4.3412523e+00] [ 9.0933335e-01 -3.7413040e-01 -2.9153696e-01 ... 3.6752088e+00 -4.0679846e+00 -3.2597089e+00] [ 3.0054412e+00 -2.2659364e+00 1.4439557e+00 ... 3.4403963e+00 8.6613359e+00 2.5561926e-01]] [[-1.7424432e+00 1.3842633e+00 2.1106948e-01 ... 6.7179351e+00 -7.1213841e+00 -9.1216242e-01] [-4.6825166e+00 3.7643766e+00 3.0884240e+00 ... -8.1221473e-01 2.1292865e+00 -1.1865656e+00] [-3.8615141e+00 1.6860971e-01 6.8278468e-01 ... 7.5522532e+00 3.6504269e-01 7.8714740e-01] ... [ 3.5654011e+00 -5.8332562e-01 4.0232861e-01 ... 1.9572324e-01 -5.7270603e+00 -4.3689084e+00] [-1.1532229e+00 5.3833876e+00 -3.6055839e+00 ... -3.6127951e+00 5.4767494e+00 -2.6186237e+00] [-1.3869303e-01 4.9858780e+00 -3.9455228e+00 ... -1.1066570e+00 -4.6350069e+00 6.6247516e+00]] ... [[-2.3967602e+00 1.2420601e+00 -2.6546993e+00 ... 2.2932348e+00 -7.5194740e+00 -5.0990731e-03] [ 1.5419906e+00 -5.5130041e-01 5.5716866e-01 ... 1.0591471e+00 -6.2657893e-01 -2.3898516e+00] [ 3.4009080e+00 5.0071287e-01 3.5316679e+00 ... 6.4967513e+00 -5.2100401e+00 -1.9099262e+00] ... [ 4.2287631e+00 5.3305650e+00 3.1742496e+00 ... 3.0450583e+00 -2.6515508e+00 6.2181821e+00] [ 2.3731494e+00 -1.0155733e+00 2.0170221e+00 ... -2.1126363e-01 7.6868236e-01 1.8094455e+00] [ 3.2908549e+00 -4.7260925e-01 2.7239163e+00 ... 1.6393548e+00 -1.6588703e+00 1.0040324e+00]] [[ 1.4623802e+00 -1.2491177e+00 3.2541990e+00 ... -1.7048655e+00 -2.3412597e+00 1.5033077e+00] [-1.4467754e+00 -2.5507696e+00 -7.8839171e-01 ... -8.2923374e+00 4.6394114e+00 9.6328754e+00] [ 4.2545514e+00 5.6305366e+00 4.0535188e+00 ... -2.8996134e+00 9.6032085e+00 1.0884833e+00] ... [-1.7034180e+00 -2.3620553e+00 -3.6472874e+00 ... 4.1296673e+00 6.9690881e+00 1.8337302e+00] [-2.8616354e-01 6.0888368e-01 2.4903185e+00 ... -2.5233326e+00 -3.8246965e+00 2.3489497e+00] [ 4.6894550e+00 -3.2075813e+00 -3.3029494e+00 ... -6.7396080e-01 4.2788978e+00 9.4947875e-01]] [[ 3.4088347e+00 3.0390105e+00 3.6608400e+00 ... 3.0346584e+00 -2.7331283e+00 9.3385973e+00] [-1.2112927e+00 -8.7389412e+00 2.5843980e+00 ... -7.8275770e-02 -3.4862497e+00 3.6357462e-01] [ 1.4087428e-01 -4.0213904e+00 -1.3275764e+00 ... 5.1955419e+00 2.7201812e+00 -2.2350826e+00] ... [ 2.9823272e+00 -1.2840346e+00 1.1699901e+00 ... 8.2214069e-01 6.0158930e+00 3.8061378e+00] [-9.6992099e-01 8.0983000e+00 1.2287549e+00 ... -9.4213736e-01 2.5138776e+00 2.1887931e-01] [-3.6470046e+00 -3.5718021e+00 2.4253609e+00 ... 4.3608465e+00 1.8659573e+00 1.7997122e+00]]] [[[ 3.3602304e+00 -6.5551949e+00 -4.0229267e-01 ... 2.7720046e-01 -3.4994564e+00 5.3228106e+00] [-1.0923375e+00 3.3500926e+00 -9.4420046e-01 ... 3.1535149e+00 -7.0317369e+00 5.1083241e+00] [-2.0863209e+00 4.4667974e+00 -8.6917067e+00 ... -1.5970873e+00 -3.1451995e+00 1.0782186e+01] ... [ 2.1287603e+00 -2.8170824e+00 -4.8678988e-01 ... 3.1564298e+00 1.0639138e+00 -1.5929725e+00] [ 1.3330663e+00 1.8759319e+00 3.4570343e+00 ... 2.1635902e+00 -4.0802889e+00 7.4081853e-02] [-1.5364448e+00 -4.6375060e-01 -2.5390913e+00 ... 2.2560203e+00 -4.3919349e+00 2.6201594e+00]] [[-2.2902341e+00 9.3354559e-01 -5.9619794e+00 ... -1.8182282e+00 -3.2211921e+00 1.8532476e+00] [ 3.8451357e+00 -4.5452499e+00 -1.5104102e+00 ... -2.4670103e+00 1.5866148e-01 -1.0456583e+00] [-6.3130808e+00 9.1746569e-01 -9.5305115e-01 ... 3.5036592e+00 6.4161211e-01 8.7060046e-01] ... [-2.7110512e+00 -3.6645088e+00 -2.6869993e+00 ... -6.0486641e+00 -3.0465262e+00 -4.6562344e-01] [-1.1977656e-01 4.4762192e+00 2.4998875e+00 ... 2.4267685e+00 -6.2862287e+00 -3.4004998e+00] [ 1.8725729e+00 5.9990034e+00 -1.8556347e+00 ... 1.9635420e+00 -2.3189902e+00 2.7997367e+00]] [[ 1.8146025e+00 1.6934206e+00 -1.5257045e+00 ... 1.4581805e+00 -3.5297413e+00 9.1352539e+00] [-2.2739697e+00 -2.9296496e+00 3.3171501e+00 ... -3.2618539e+00 1.1080381e+00 3.2814980e+00] [-2.3952303e+00 -5.8583221e+00 3.3145866e+00 ... -3.0174234e+00 7.4097067e-01 3.2812598e-01] ... [ 1.0324861e+00 -2.4656382e+00 -7.4588609e+00 ... 1.4042926e+00 -5.2881322e+00 1.0602034e+00] [-3.1260808e+00 -6.6238151e+00 1.4484228e+00 ... 1.1704509e+00 -5.9904375e+00 -3.7557012e-01] [ 1.1074432e+00 1.1137562e+00 6.2175817e+00 ... 5.1696825e+00 -8.1012573e+00 -4.0049181e+00]] ... [[ 1.3793437e+00 -3.6076374e+00 9.4096529e-01 ... -2.2971961e+00 5.8538904e+00 3.7363043e+00] [-1.1832801e+00 -4.2472758e+00 -1.1461322e+00 ... -5.3340912e+00 1.7162580e+00 2.5702808e+00] [ 1.2464935e+00 -4.4759893e-01 -5.7223349e+00 ... 5.2919116e+00 4.6764288e+00 1.6673344e+00] ... [ 2.5607541e+00 3.4277403e+00 -5.9087491e+00 ... 3.4171784e+00 -2.3343155e+00 2.7342017e+00] [-7.3216753e+00 5.6698265e+00 -6.4143724e+00 ... 2.5379045e+00 3.4969695e+00 2.5927646e+00] [-4.0309281e+00 -1.7342080e-01 -2.2824087e+00 ... 1.2243098e+00 6.3334155e+00 1.9857365e+00]] [[-1.0774566e+00 2.4842839e+00 1.5953268e+00 ... -1.4122550e+00 3.4551342e+00 -2.5912688e+00] [-3.1773293e+00 -2.7011876e+00 2.4841497e+00 ... -7.0822343e-02 -4.7999215e-01 2.7327161e+00] [-5.3732343e+00 -7.6288086e-01 -6.3049191e-01 ... -2.8436083e-01 -1.4154161e+00 -4.5487409e+00] ... [-4.2592921e+00 5.3846806e-01 1.6565293e+00 ... 3.8081300e-01 4.2519689e+00 1.7928666e+00] [-1.3258736e+00 2.9710078e+00 5.5964932e+00 ... 3.4158268e+00 -3.2843328e+00 -1.4018432e+00] [-4.7238374e+00 8.2869463e+00 -4.0580707e+00 ... -1.5762691e-01 -1.6903754e+00 -1.0939000e+00]] [[-7.2068697e-01 2.4526830e-01 2.6511112e-01 ... 5.9696827e+00 1.1978580e+00 -2.3335381e+00] [-5.1170319e-01 3.9788620e+00 1.9451402e+00 ... 1.1359953e+00 -4.5271111e+00 -2.0447383e+00] [ 3.9003704e+00 1.3825492e+00 4.9150686e+00 ... -1.5400826e+00 2.7408998e+00 2.8193357e+00] ... [-1.0219972e-01 1.9358779e+00 1.2501146e+00 ... 1.1104785e+00 -6.1981859e+00 2.5019963e+00] [ 3.6626449e+00 4.1612644e+00 -1.3833519e+00 ... 1.6497302e-01 2.1758413e+00 1.9288279e+00] [ 2.9564540e+00 -1.8646605e+00 -7.2688228e-01 ... 2.5650680e+00 -3.7519226e+00 -2.5433247e+00]]]]]; ov_res: [[[[[-8.35057974e-01 -8.49167481e-02 -5.56878185e+00 ... -4.27956045e-01 3.10069346e+00 3.33889413e+00] [-2.46669471e-01 4.43316126e+00 -6.60380030e+00 ... 4.58090353e+00 4.90736783e-01 -1.21375144e+00] [ 2.85709786e+00 -1.66285157e+00 5.52484035e+00 ... 4.96925735e+00 4.87631369e+00 -4.06111050e+00] ... [-1.84074116e+00 -9.67123508e-01 -2.88524032e+00 ... -4.89841366e+00 -1.01119709e+00 -3.98551297e+00] [ 2.34951758e+00 -2.17146111e+00 -6.05332255e-01 ... -2.61097646e+00 -3.34679675e+00 -3.76551747e+00] [-1.48031378e+00 -1.80067146e+00 -3.39226127e+00 ... 1.98000610e+00 -1.00956810e+00 -2.90633202e+00]] [[-5.17164803e+00 1.38067997e+00 -1.59212124e+00 ... 1.15491545e+00 -1.45580971e+00 7.40575886e+00] [ 3.29816031e+00 6.04229212e-01 -3.81715012e+00 ... -3.48915124e+00 -2.02696800e+00 2.08147430e+00] [-4.60162878e+00 -1.17826724e+00 6.08421612e+00 ... 4.46268463e+00 -5.96561623e+00 -1.81014740e+00] ... [-6.38586044e+00 8.11971068e-01 -2.02412558e+00 ... 4.29084206e+00 3.45644307e+00 3.26912832e+00] [-2.89879942e+00 -3.70623374e+00 3.97548652e+00 ... -5.52030563e-01 -2.25097561e+00 -2.19844532e+00] [-5.48943877e-01 9.68779206e-01 1.06074953e+01 ... 1.09077430e+00 -4.63160706e+00 -1.59052894e-01]] [[-7.89965248e+00 7.05858469e-01 2.26581073e+00 ... 1.22083843e+00 -2.10409141e+00 -3.22255224e-01] [ 1.06238925e+00 -1.90986204e+00 -2.51496196e+00 ... -1.20844364e+00 -3.27706337e-01 -1.25360394e+00] [ 4.33571339e+00 1.38797998e+00 3.78315687e+00 ... 1.77160904e-01 -6.20931149e+00 -5.01945162e+00] ... [-1.21781611e+00 1.72689402e+00 -1.35607195e+00 ... -2.83094382e+00 3.50834775e+00 -4.18870544e+00] [-2.49006724e+00 -4.75127506e+00 -3.49828649e+00 ... -2.99285054e+00 4.63869095e+00 -3.34425855e+00] [ 1.66665351e+00 -2.36734366e+00 -3.94070119e-01 ... 2.34696269e+00 2.32489273e-01 6.70771956e-01]] ... [[ 1.24812365e+00 1.71322227e+00 -7.30875492e-01 ... 2.47310209e+00 1.18772292e+00 -1.39923024e+00] [-6.01905942e-01 4.84330320e+00 1.63861012e+00 ... -1.95601380e+00 1.47070587e+00 -2.40815377e+00] [-3.13066617e-02 9.69101787e-01 6.93311572e-01 ... 4.94602585e+00 -2.97925329e+00 8.74597073e-01] ... [ 5.81556511e+00 2.23028636e+00 4.94565010e+00 ... 6.31704426e+00 -6.01569510e+00 7.62327075e-01] [-1.33401763e+00 2.41520420e-01 2.99515557e+00 ... 1.80655003e+00 -1.97674823e+00 -2.33940434e+00] [ 8.07902694e-01 2.48036194e+00 -2.75126040e-01 ... -5.82321930e+00 5.91172171e+00 9.08067346e-01]] [[ 4.76606655e+00 -3.03234673e+00 -2.06424451e+00 ... 4.26688671e+00 -2.38422918e+00 2.47513103e+00] [ 9.64382768e-01 2.62313306e-01 -2.32234287e+00 ... -7.37864256e-01 -3.35189939e+00 -3.23183322e+00] [ 3.06426382e+00 1.03087854e+00 2.13907886e+00 ... -3.57021475e+00 4.10047436e+00 2.72412443e+00] ... [ 1.64831519e+00 4.71340306e-02 8.62493962e-02 ... -8.82053757e+00 -3.49492252e-01 -3.29764396e-01] [ 5.78790426e+00 -2.58375859e+00 5.47313404e+00 ... 7.19854164e+00 -2.84943366e+00 -4.20654726e+00] [-8.71758652e+00 -2.42980313e+00 -1.95035040e+00 ... -1.62295818e-01 5.98190022e+00 -1.99897873e+00]] [[-1.17922223e+00 -2.58142781e+00 9.20523834e+00 ... 2.05345345e+00 -6.55297041e-01 -7.78030205e+00] [-3.58437562e+00 1.87167645e+00 -2.31099463e+00 ... 3.74957967e+00 -1.79082274e+00 -4.45548582e+00] [-4.47878361e+00 -1.75187767e+00 1.21043265e+00 ... 1.64954948e+00 -5.49922085e+00 8.79692936e+00] ... [-3.99438828e-01 2.63183618e+00 -4.18210411e+00 ... -4.73886204e+00 -6.08339453e+00 -2.34283853e+00] [ 7.80240059e+00 1.98989236e+00 2.11655951e+00 ... 8.37138295e-01 -4.93196535e+00 -1.61710065e-02] [ 3.90113902e+00 5.23450136e-01 3.57953012e-01 ... -3.70671797e+00 9.66117501e-01 -4.64181519e+00]]] [[[ 1.14684784e+00 3.83413482e+00 3.85692191e+00 ... -1.28739631e+00 -3.33287627e-01 6.98583412e+00] [-5.75255728e+00 1.98723638e+00 1.37746215e+00 ... -2.16543818e+00 5.69326878e+00 -1.06625900e+01] [-7.17700481e-01 1.72781014e+00 1.33608258e+00 ... -7.26041794e-01 4.08307171e+00 2.86181569e-01] ... [ 1.20700848e+00 -3.72458220e+00 3.64540410e+00 ... 2.28580284e+00 -2.81380028e-01 -3.14844990e+00] [-1.61436856e+00 3.40044618e+00 5.89552879e+00 ... 1.07130885e+00 1.84085596e+00 3.05919456e+00] [-6.01341605e-01 2.31184459e+00 -1.94648790e+00 ... -3.14434028e+00 2.29205394e+00 9.57149029e-01]] [[-4.93256760e+00 -4.79992056e+00 -2.82353312e-01 ... 4.13847065e+00 9.99117374e+00 1.18587053e+00] [ 2.44486165e+00 -1.38450229e+00 3.27172565e+00 ... 4.53411818e+00 1.83554232e+00 -7.75728822e-01] [ 2.89051938e+00 5.98209429e+00 -5.38784456e+00 ... -2.70149159e+00 -5.09385681e+00 8.78594398e-01] ... [-2.39171314e+00 -4.14928007e+00 6.52893591e+00 ... -6.84006214e-01 7.63044405e+00 4.34125185e+00] [ 9.09333348e-01 -3.74130458e-01 -2.91537017e-01 ... 3.67520881e+00 -4.06798458e+00 -3.25970888e+00] [ 3.00544143e+00 -2.26593637e+00 1.44395566e+00 ... 3.44039631e+00 8.66133595e+00 2.55619377e-01]] [[-1.74244344e+00 1.38426340e+00 2.11069643e-01 ... 6.71793461e+00 -7.12138367e+00 -9.12162423e-01] [-4.68251657e+00 3.76437712e+00 3.08842397e+00 ... -8.12214851e-01 2.12928677e+00 -1.18656564e+00] [-3.86151457e+00 1.68609709e-01 6.82784855e-01 ... 7.55225277e+00 3.65042686e-01 7.87147403e-01] ... [ 3.56540132e+00 -5.83325744e-01 4.02328968e-01 ... 1.95723221e-01 -5.72705984e+00 -4.36890841e+00] [-1.15322292e+00 5.38338757e+00 -3.60558367e+00 ... -3.61279535e+00 5.47674894e+00 -2.61862350e+00] [-1.38692945e-01 4.98587847e+00 -3.94552326e+00 ... -1.10665691e+00 -4.63500643e+00 6.62475157e+00]] ... [[-2.39676046e+00 1.24206007e+00 -2.65469933e+00 ... 2.29323483e+00 -7.51947308e+00 -5.09896874e-03] [ 1.54199040e+00 -5.51300287e-01 5.57168722e-01 ... 1.05914688e+00 -6.26578927e-01 -2.38985157e+00] [ 3.40090823e+00 5.00712931e-01 3.53166771e+00 ... 6.49675083e+00 -5.21004009e+00 -1.90992641e+00] ... [ 4.22876310e+00 5.33056498e+00 3.17424965e+00 ... 3.04505873e+00 -2.65155101e+00 6.21818256e+00] [ 2.37314939e+00 -1.01557314e+00 2.01702213e+00 ... -2.11263746e-01 7.68682361e-01 1.80944550e+00] [ 3.29085517e+00 -4.72608954e-01 2.72391629e+00 ... 1.63935518e+00 -1.65887022e+00 1.00403249e+00]] [[ 1.46238005e+00 -1.24911773e+00 3.25419903e+00 ... -1.70486546e+00 -2.34125972e+00 1.50330770e+00] [-1.44677520e+00 -2.55076957e+00 -7.88391709e-01 ... -8.29233837e+00 4.63941145e+00 9.63287544e+00] [ 4.25455189e+00 5.63053656e+00 4.05351877e+00 ... -2.89961338e+00 9.60320854e+00 1.08848310e+00] ... [-1.70341778e+00 -2.36205530e+00 -3.64728737e+00 ... 4.12966728e+00 6.96908808e+00 1.83373034e+00] [-2.86163419e-01 6.08883619e-01 2.49031854e+00 ... -2.52333283e+00 -3.82469654e+00 2.34894967e+00] [ 4.68945503e+00 -3.20758152e+00 -3.30294943e+00 ... -6.73960686e-01 4.27889776e+00 9.49478745e-01]] [[ 3.40883470e+00 3.03901029e+00 3.66083956e+00 ... 3.03465819e+00 -2.73312950e+00 9.33859825e+00] [-1.21129298e+00 -8.73894024e+00 2.58439803e+00 ... -7.82757998e-02 -3.48624969e+00 3.63574386e-01] [ 1.40874475e-01 -4.02138996e+00 -1.32757664e+00 ... 5.19554234e+00 2.72018075e+00 -2.23508239e+00] ... [ 2.98232698e+00 -1.28403485e+00 1.16999018e+00 ... 8.22140455e-01 6.01589251e+00 3.80613804e+00] [-9.69920874e-01 8.09829998e+00 1.22875464e+00 ... -9.42137361e-01 2.51387763e+00 2.18879372e-01] [-3.64700508e+00 -3.57180142e+00 2.42536092e+00 ... 4.36084604e+00 1.86595714e+00 1.79971206e+00]]] [[[ 3.36023068e+00 -6.55519438e+00 -4.02292669e-01 ... 2.77200460e-01 -3.49945688e+00 5.32281065e+00] [-1.09233761e+00 3.35009265e+00 -9.44200456e-01 ... 3.15351510e+00 -7.03173685e+00 5.10832357e+00] [-2.08632064e+00 4.46679688e+00 -8.69170666e+00 ... -1.59708714e+00 -3.14519978e+00 1.07821865e+01] ... [ 2.12876034e+00 -2.81708193e+00 -4.86789763e-01 ... 3.15642953e+00 1.06391382e+00 -1.59297252e+00] [ 1.33306658e+00 1.87593186e+00 3.45703435e+00 ... 2.16359019e+00 -4.08028936e+00 7.40818232e-02] [-1.53644466e+00 -4.63750362e-01 -2.53909111e+00 ... 2.25602031e+00 -4.39193535e+00 2.62015939e+00]] [[-2.29023409e+00 9.33545589e-01 -5.96197987e+00 ... -1.81822824e+00 -3.22119188e+00 1.85324776e+00] [ 3.84513593e+00 -4.54524994e+00 -1.51040995e+00 ... -2.46701050e+00 1.58661455e-01 -1.04565847e+00] [-6.31308031e+00 9.17465627e-01 -9.53051150e-01 ... 3.50365973e+00 6.41612053e-01 8.70600462e-01] ... [-2.71105099e+00 -3.66450930e+00 -2.68699932e+00 ... -6.04866362e+00 -3.04652643e+00 -4.65623260e-01] [-1.19776621e-01 4.47621918e+00 2.49988747e+00 ... 2.42676854e+00 -6.28622866e+00 -3.40050006e+00] [ 1.87257278e+00 5.99900341e+00 -1.85563469e+00 ... 1.96354210e+00 -2.31899023e+00 2.79973698e+00]] [[ 1.81460261e+00 1.69342077e+00 -1.52570462e+00 ... 1.45818055e+00 -3.52974129e+00 9.13525295e+00] [-2.27397013e+00 -2.92964935e+00 3.31715035e+00 ... -3.26185417e+00 1.10803807e+00 3.28149819e+00] [-2.39523053e+00 -5.85832310e+00 3.31458640e+00 ... -3.01742315e+00 7.40970731e-01 3.28125954e-01] ... [ 1.03248596e+00 -2.46563816e+00 -7.45886040e+00 ... 1.40429282e+00 -5.28813219e+00 1.06020331e+00] [-3.12608075e+00 -6.62381554e+00 1.44842267e+00 ... 1.17045081e+00 -5.99043798e+00 -3.75570059e-01] [ 1.10744286e+00 1.11375618e+00 6.21758223e+00 ... 5.16968155e+00 -8.10125732e+00 -4.00491810e+00]] ... [[ 1.37934351e+00 -3.60763741e+00 9.40965235e-01 ... -2.29719639e+00 5.85389042e+00 3.73630452e+00] [-1.18328023e+00 -4.24727583e+00 -1.14613247e+00 ... -5.33409166e+00 1.71625793e+00 2.57028103e+00] [ 1.24649358e+00 -4.47598994e-01 -5.72233486e+00 ... 5.29191113e+00 4.67642879e+00 1.66733444e+00] ... [ 2.56075382e+00 3.42774010e+00 -5.90874910e+00 ... 3.41717863e+00 -2.33431554e+00 2.73420143e+00] [-7.32167530e+00 5.66982651e+00 -6.41437244e+00 ... 2.53790474e+00 3.49696970e+00 2.59276485e+00] [-4.03092766e+00 -1.73420802e-01 -2.28240871e+00 ... 1.22430968e+00 6.33341551e+00 1.98573649e+00]] [[-1.07745659e+00 2.48428416e+00 1.59532678e+00 ... -1.41225493e+00 3.45513463e+00 -2.59126830e+00] [-3.17732906e+00 -2.70118785e+00 2.48414946e+00 ... -7.08225816e-02 -4.79992151e-01 2.73271608e+00] [-5.37323475e+00 -7.62880802e-01 -6.30491912e-01 ... -2.84360707e-01 -1.41541648e+00 -4.54874086e+00] ... [-4.25929213e+00 5.38468003e-01 1.65652931e+00 ... 3.80813003e-01 4.25196886e+00 1.79286659e+00] [-1.32587349e+00 2.97100759e+00 5.59649372e+00 ... 3.41582656e+00 -3.28433275e+00 -1.40184331e+00] [-4.72383738e+00 8.28694630e+00 -4.05807018e+00 ... -1.57626793e-01 -1.69037545e+00 -1.09390020e+00]] [[-7.20686853e-01 2.45268226e-01 2.65111178e-01 ... 5.96968222e+00 1.19785821e+00 -2.33353853e+00] [-5.11703193e-01 3.97886181e+00 1.94514048e+00 ... 1.13599503e+00 -4.52711058e+00 -2.04473829e+00] [ 3.90037036e+00 1.38254905e+00 4.91506815e+00 ... -1.54008257e+00 2.74089980e+00 2.81933570e+00] ... [-1.02199838e-01 1.93587744e+00 1.25011456e+00 ... 1.11047840e+00 -6.19818592e+00 2.50199628e+00] [ 3.66264462e+00 4.16126442e+00 -1.38335192e+00 ... 1.64973035e-01 2.17584109e+00 1.92882776e+00] [ 2.95645404e+00 -1.86466074e+00 -7.26882219e-01 ... 2.56506824e+00 -3.75192237e+00 -2.54332447e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_944.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.0486 0.4606 -1.2486 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.4837 (2,1,1,.,.) = 0.01 * -7.7083 (3,1,1,.,.) = -1.0358 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 1.2577405 1.4492676 0.966554 ... 0.6801015 1.4350556 0.8240104 ] [ 0.48248273 0.6957102 1.1954955 ... 0.7647625 1.4801962 0.7720783 ] [ 0.78154826 1.6771281 0.6052047 ... 0.41464734 0.53463143 1.5097936 ] ... [ 1.2114776 1.1780787 1.0563196 ... 1.1591338 1.1274209 0.7808914 ] [ 1.1967787 0.22626835 1.3922675 ... 0.84816396 1.5672071 0.8753809 ] [ 1.6230667 -0.22139192 1.8985832 ... 1.684192 1.4503536 1.0057927 ]] [[ 0.54499 0.79149926 1.0216244 ... 1.5039115 1.565653 1.1791453 ] [ 1.3437029 1.3489282 0.8371485 ... 1.7488482 -0.08145356 -0.17755747] [ 1.1854066 1.2629925 0.8988714 ... 1.5483967 0.3525229 1.1417483 ] ... [ 0.25465167 0.99586844 1.0451726 ... 0.7452361 1.37466 1.1355139 ] [ 0.8356616 1.4565219 0.90065104 ... 0.5334146 1.5803893 0.523806 ] [ 1.5275207 0.8585091 0.45398086 ... 0.59940684 1.6543813 1.8209163 ]] [[ 0.48184723 0.8628652 1.0740727 ... 0.33935338 0.38350058 0.17637914] [ 1.1292416 1.3630295 0.87636685 ... 0.87756926 0.7818909 0.52776504] [ 1.1128203 1.4566889 0.80055445 ... 1.3334078 1.0830334 0.7259344 ] ... [ 0.8570918 1.5238853 0.70583403 ... 0.6335967 0.97220784 1.4808495 ] [ 1.1667219 1.2418392 1.6027142 ... 1.9416133 -0.08546972 1.499519 ] [ 1.0132822 0.3826117 1.3895538 ... 1.1298857 1.5081333 1.6342344 ]] ... [[ 1.4660864 1.2429754 1.4250488 ... 0.8461974 1.1647077 1.4911357 ] [ 1.2454662 1.5042052 1.2710114 ... 0.96773726 0.8989666 0.3270738 ] [ 1.6534669 0.75600886 0.97017694 ... 1.3307445 1.1843472 1.2913975 ] ... [ 0.8528639 0.8660532 1.0271264 ... 1.7097101 1.4238203 1.412112 ] [ 1.4121029 0.93372905 0.79162526 ... 1.1956211 0.9963694 0.2914428 ] [ 1.601414 0.89233375 1.1479919 ... 0.554479 1.010599 1.42858 ]] [[ 0.6654595 1.7613635 1.2995248 ... 1.4845428 1.0846444 0.9881011 ] [ 1.4586664 1.5172585 0.39480257 ... 1.4156797 1.7602811 1.6914322 ] [ 1.3029059 0.30211592 1.7280222 ... 1.6006083 0.9195148 1.5427451 ] ... [ 1.806761 1.2044426 0.4269029 ... 0.7920666 0.85370576 1.755367 ] [ 1.2588282 0.7834405 1.1822437 ... 1.1218793 1.4788028 1.4186714 ] [ 0.953322 1.0335183 0.72188914 ... 0.81815815 1.9808793 0.590402 ]] [[ 0.46141398 1.2220576 0.821264 ... 1.7509011 0.8781558 1.0616838 ] [ 0.7269778 0.3455274 0.6634237 ... 1.1114197 0.6726006 1.7709641 ] [ 0.6275697 0.9482337 1.3940563 ... 0.6772013 1.1683582 1.3714831 ] ... [ 1.1022589 0.7839978 0.34851134 ... 1.2492863 0.77182853 1.319187 ] [ 0.3727908 1.1878105 1.8886642 ... 0.4602664 1.203496 0.9825048 ] [ 1.9077532 -0.7738397 0.9422095 ... 1.2558398 1.335453 0.5415733 ]]] [[[ 0.57636875 0.42135584 0.28334785 ... 0.526597 0.4778011 0.45046747] [ 0.44630125 0.4762008 0.5117436 ... 0.413252 0.43296054 0.30430788] [ 0.42696226 0.43559813 0.46479306 ... 0.4724846 0.42924723 0.5019203 ] ... [ 0.40148982 0.50163674 0.5187945 ... 0.4638959 0.40930563 0.4364009 ] [ 0.5912392 0.39768308 0.4830938 ... 0.4102939 0.48960787 0.282548 ] [ 0.44222635 0.52613556 0.37885362 ... 0.5671391 0.50948733 0.59752905]] [[ 0.59648013 0.46199584 0.46864855 ... 0.5369628 0.3390319 0.49473456] [ 0.3879994 0.52196276 0.42951578 ... 0.5323551 0.42638832 0.53803563] [ 0.40921652 0.5606525 0.46613866 ... 0.46012226 0.43712816 0.5446175 ] ... [ 0.44202736 0.3895976 0.47061047 ... 0.40004155 0.5907403 0.47858033] [ 0.48163462 0.43552655 0.442399 ... 0.4339489 0.5203217 0.54963756] [ 0.47231328 0.50524145 0.32374138 ... 0.53741 0.3638966 0.42563993]] [[ 0.5533067 0.45126614 0.37727615 ... 0.40176588 0.5262119 0.5268433 ] [ 0.3827854 0.44405594 0.38036767 ... 0.5496805 0.49351004 0.42766505] [ 0.4968018 0.4980953 0.5618521 ... 0.36978096 0.43283623 0.52699196] ... [ 0.4069599 0.37807226 0.45334715 ... 0.62622714 0.5574154 0.5386221 ] [ 0.5756511 0.50947386 0.5927563 ... 0.63408965 0.48144057 0.47037268] [ 0.57037246 0.5753915 0.5835413 ... 0.46811053 0.45323935 0.59505844]] ... [[ 0.35202745 0.50402904 0.4634498 ... 0.38766295 0.41897437 0.4678986 ] [ 0.47170356 0.4801865 0.33832693 ... 0.5185386 0.505303 0.34275088] [ 0.53628176 0.3518864 0.4172768 ... 0.45711744 0.44977742 0.45464697] ... [ 0.46420833 0.3968832 0.53890795 ... 0.43391433 0.5985746 0.47855833] [ 0.47805738 0.39459446 0.46300468 ... 0.43328702 0.39673674 0.378353 ] [ 0.43282232 0.5085316 0.45954835 ... 0.5533857 0.48340148 0.33344907]] [[ 0.47014952 0.58484536 0.36563784 ... 0.364981 0.5561542 0.42616534] [ 0.47008023 0.5271977 0.5564053 ... 0.5691913 0.44705468 0.46527824] [ 0.44951147 0.58391887 0.40555435 ... 0.48555347 0.48510376 0.44489604] ... [ 0.39589703 0.47931373 0.46959656 ... 0.5447604 0.5044297 0.4996769 ] [ 0.3257724 0.6085076 0.38906503 ... 0.45354724 0.3937832 0.46211526] [ 0.5512911 0.47026885 0.4269181 ... 0.4577367 0.4312832 0.42097276]] [[ 0.38136113 0.60504425 0.38271222 ... 0.31859452 0.38735485 0.5438899 ] [ 0.20390591 0.49293137 0.4621454 ... 0.55706733 0.47052622 0.48848835] [ 0.39458662 0.4281964 0.5332496 ... 0.42718935 0.43351868 0.3287735 ] ... [ 0.48360115 0.4767717 0.37772614 ... 0.485085 0.46132177 0.50243914] [ 0.54288346 0.49072006 0.53133935 ... 0.47153714 0.44202504 0.60880125] [ 0.44399846 0.49897194 0.3940897 ... 0.50656694 0.33769062 0.3571508 ]]] [[[-0.49928445 -1.2499856 -0.9280646 ... -0.4040379 -1.3455877 0.08472395] [-0.3164189 0.05111611 -1.4175414 ... -0.514928 -0.8573611 1.1335877 ] [-2.1214385 -2.5537486 -0.91095483 ... -0.48902518 -1.569043 -3.3126822 ] ... [-1.326687 -0.26162124 -1.7397614 ... -2.4409711 -1.9779 -1.9652722 ] [-1.7535309 -0.20227885 0.39815664 ... -0.87359625 0.06955516 -1.9919119 ] [-0.20264208 -0.46925598 -1.7026955 ... 0.37763894 -1.0379124 -0.8812083 ]] [[-1.9222127 -1.2240471 -1.5786831 ... -1.8731978 -1.1919364 -1.9330467 ] [-0.8482877 -0.6732634 -0.76347506 ... -0.78838277 -0.5697377 -0.81106555] [-1.2033991 -0.15690756 -2.2298875 ... -1.5705845 -1.9523762 0.01840878] ... [-1.4553615 -1.6131761 -0.59971285 ... -0.1882776 -1.6431625 0.63617134] [-0.8384495 -3.139434 -0.72321224 ... -1.0205145 -0.3929364 -2.2720003 ] [-2.0616553 -0.5957963 -0.9049258 ... -1.9508317 -0.75285864 0.8610457 ]] [[-2.394939 -0.93760383 0.19331288 ... -0.49651533 -1.8249459 -0.40588707] [-2.064229 -2.3221693 -1.2563032 ... 0.18965244 0.03402507 -1.3665828 ] [-1.6177207 -1.3048927 -0.32966167 ... -0.97418904 -2.779408 -0.21288276] ... [-1.1361923 -0.1329037 -0.75860834 ... -0.9608843 -0.57008976 -1.0809919 ] [-2.008361 -0.73111445 -2.4571252 ... -0.8473367 -2.1322227 -0.32413638] [ 0.092489 -2.0095701 -0.9555545 ... -2.346354 -0.04532075 -0.24502444]] ... [[-1.3684576 -1.4586006 -0.0769881 ... -1.4236683 0.37268603 -1.8032479 ] [-0.12476134 -1.7968509 -2.7846963 ... -0.62546057 -3.822348 -3.0972817 ] [-1.5479817 -2.1609025 -0.3240685 ... 0.2361505 -0.3320127 -3.4388676 ] ... [-1.9697926 -0.9771267 -1.9169271 ... -0.8521252 -0.8784378 -1.3636698 ] [-2.1083298 -1.5779419 -1.3159362 ... -1.4993135 -1.8004766 -3.4522004 ] [-0.29812294 -0.9826032 -2.9798803 ... -3.151847 -2.4596162 -1.4873711 ]] [[-0.7081746 -1.5308661 -0.36165076 ... -1.3987403 -1.2656788 0.4145987 ] [-0.7639563 -1.4097853 -0.12857413 ... -0.9843633 -1.2860498 -2.5661497 ] [-1.4594443 -3.9156075 -2.7585583 ... -1.4214585 -0.480887 -1.0837324 ] ... [ 0.01382315 0.18434393 -0.09357405 ... -1.597917 -1.128297 -1.7042458 ] [-1.0591707 -2.3569345 -1.0293068 ... -1.0810133 -3.3020515 -1.1755373 ] [-0.4226784 -0.57919806 -0.3442793 ... -2.342999 -0.8060739 -1.9787717 ]] [[-1.1140727 0.540437 -2.2014036 ... -2.5517802 -0.16520107 -1.9323354 ] [-1.7058271 -1.3961773 -2.3588705 ... -0.7926618 -3.0446749 -1.634181 ] [ 0.02986264 -0.6528927 -0.810568 ... -2.6075537 -1.7657409 -0.12324429] ... [-1.0078573 -1.7512207 -2.0563364 ... -1.0580591 -1.3645713 -1.2789255 ] [-1.6685398 -1.4897267 -0.79355335 ... -0.6408203 -1.147849 -1.055767 ] [-0.6267686 -0.60699207 -0.94624555 ... -0.8176421 -0.5065849 0.01046336]]]]]; ov_res: [[[[[ 1.2577405 1.4492676 0.966554 ... 0.6801015 1.4350556 0.8240104 ] [ 0.48248273 0.6957102 1.1954955 ... 0.7647625 1.4801962 0.7720783 ] [ 0.78154826 1.6771281 0.6052047 ... 0.41464734 0.53463143 1.5097936 ] ... [ 1.2114776 1.1780787 1.0563196 ... 1.1591338 1.1274209 0.7808914 ] [ 1.1967787 0.22626835 1.3922675 ... 0.84816396 1.5672071 0.8753809 ] [ 1.6230667 -0.22139192 1.8985832 ... 1.684192 1.4503536 1.0057927 ]] [[ 0.54499 0.79149926 1.0216244 ... 1.5039115 1.565653 1.1791453 ] [ 1.3437029 1.3489282 0.8371485 ... 1.7488482 -0.08145356 -0.17755747] [ 1.1854066 1.2629925 0.8988714 ... 1.5483967 0.3525229 1.1417483 ] ... [ 0.25465167 0.99586844 1.0451726 ... 0.7452361 1.37466 1.1355139 ] [ 0.8356616 1.4565219 0.90065104 ... 0.5334146 1.5803893 0.523806 ] [ 1.5275207 0.8585091 0.45398086 ... 0.59940684 1.6543813 1.8209163 ]] [[ 0.48184723 0.8628652 1.0740727 ... 0.33935338 0.38350058 0.17637914] [ 1.1292416 1.3630295 0.87636685 ... 0.87756926 0.7818909 0.52776504] [ 1.1128203 1.4566889 0.80055445 ... 1.3334078 1.0830334 0.7259344 ] ... [ 0.8570918 1.5238853 0.70583403 ... 0.6335967 0.97220784 1.4808495 ] [ 1.1667219 1.2418392 1.6027142 ... 1.9416133 -0.08546972 1.499519 ] [ 1.0132822 0.3826117 1.3895538 ... 1.1298857 1.5081333 1.6342344 ]] ... [[ 1.4660864 1.2429754 1.4250488 ... 0.8461974 1.1647077 1.4911357 ] [ 1.2454662 1.5042052 1.2710114 ... 0.96773726 0.8989666 0.3270738 ] [ 1.6534669 0.75600886 0.97017694 ... 1.3307445 1.1843472 1.2913975 ] ... [ 0.8528639 0.8660532 1.0271264 ... 1.7097101 1.4238203 1.412112 ] [ 1.4121029 0.93372905 0.79162526 ... 1.1956211 0.9963694 0.2914428 ] [ 1.601414 0.89233375 1.1479919 ... 0.554479 1.010599 1.42858 ]] [[ 0.6654595 1.7613635 1.2995248 ... 1.4845428 1.0846444 0.9881011 ] [ 1.4586664 1.5172585 0.39480257 ... 1.4156797 1.7602811 1.6914322 ] [ 1.3029059 0.30211592 1.7280222 ... 1.6006083 0.9195148 1.5427451 ] ... [ 1.806761 1.2044426 0.4269029 ... 0.7920666 0.85370576 1.755367 ] [ 1.2588282 0.7834405 1.1822437 ... 1.1218793 1.4788028 1.4186714 ] [ 0.953322 1.0335183 0.72188914 ... 0.81815815 1.9808793 0.590402 ]] [[ 0.46141398 1.2220576 0.821264 ... 1.7509011 0.8781558 1.0616838 ] [ 0.7269778 0.3455274 0.6634237 ... 1.1114197 0.6726006 1.7709641 ] [ 0.6275697 0.9482337 1.3940563 ... 0.6772013 1.1683582 1.3714831 ] ... [ 1.1022589 0.7839978 0.34851134 ... 1.2492863 0.77182853 1.319187 ] [ 0.3727908 1.1878105 1.8886642 ... 0.4602664 1.203496 0.9825048 ] [ 1.9077532 -0.7738397 0.9422095 ... 1.2558398 1.335453 0.5415733 ]]] [[[ 0.57636875 0.42135584 0.28334785 ... 0.526597 0.4778011 0.45046747] [ 0.44630125 0.4762008 0.5117436 ... 0.413252 0.43296054 0.30430788] [ 0.42696226 0.43559813 0.46479306 ... 0.4724846 0.42924723 0.5019203 ] ... [ 0.40148982 0.50163674 0.5187945 ... 0.4638959 0.40930563 0.4364009 ] [ 0.5912392 0.39768308 0.4830938 ... 0.4102939 0.48960787 0.282548 ] [ 0.44222635 0.52613556 0.37885362 ... 0.5671391 0.50948733 0.59752905]] [[ 0.59648013 0.46199584 0.46864855 ... 0.5369628 0.3390319 0.49473456] [ 0.3879994 0.52196276 0.42951578 ... 0.5323551 0.42638832 0.53803563] [ 0.40921652 0.5606525 0.46613866 ... 0.46012226 0.43712816 0.5446175 ] ... [ 0.44202736 0.3895976 0.47061047 ... 0.40004155 0.5907403 0.47858033] [ 0.48163462 0.43552655 0.442399 ... 0.4339489 0.5203217 0.54963756] [ 0.47231328 0.50524145 0.32374138 ... 0.53741 0.3638966 0.42563993]] [[ 0.5533067 0.45126614 0.37727615 ... 0.40176588 0.5262119 0.5268433 ] [ 0.3827854 0.44405594 0.38036767 ... 0.5496805 0.49351004 0.42766505] [ 0.4968018 0.4980953 0.5618521 ... 0.36978096 0.43283623 0.52699196] ... [ 0.4069599 0.37807226 0.45334715 ... 0.62622714 0.5574154 0.5386221 ] [ 0.5756511 0.50947386 0.5927563 ... 0.63408965 0.48144057 0.47037268] [ 0.57037246 0.5753915 0.5835413 ... 0.46811053 0.45323935 0.59505844]] ... [[ 0.35202745 0.50402904 0.4634498 ... 0.38766295 0.41897437 0.4678986 ] [ 0.47170356 0.4801865 0.33832693 ... 0.5185386 0.505303 0.34275088] [ 0.53628176 0.3518864 0.4172768 ... 0.45711744 0.44977742 0.45464697] ... [ 0.46420833 0.3968832 0.53890795 ... 0.43391433 0.5985746 0.47855833] [ 0.47805738 0.39459446 0.46300468 ... 0.43328702 0.39673674 0.378353 ] [ 0.43282232 0.5085316 0.45954835 ... 0.5533857 0.48340148 0.33344907]] [[ 0.47014952 0.58484536 0.36563784 ... 0.364981 0.5561542 0.42616534] [ 0.47008023 0.5271977 0.5564053 ... 0.5691913 0.44705468 0.46527824] [ 0.44951147 0.58391887 0.40555435 ... 0.48555347 0.48510376 0.44489604] ... [ 0.39589703 0.47931373 0.46959656 ... 0.5447604 0.5044297 0.4996769 ] [ 0.3257724 0.6085076 0.38906503 ... 0.45354724 0.3937832 0.46211526] [ 0.5512911 0.47026885 0.4269181 ... 0.4577367 0.4312832 0.42097276]] [[ 0.38136113 0.60504425 0.38271222 ... 0.31859452 0.38735485 0.5438899 ] [ 0.20390591 0.49293137 0.4621454 ... 0.55706733 0.47052622 0.48848835] [ 0.39458662 0.4281964 0.5332496 ... 0.42718935 0.43351868 0.3287735 ] ... [ 0.48360115 0.4767717 0.37772614 ... 0.485085 0.46132177 0.50243914] [ 0.54288346 0.49072006 0.53133935 ... 0.47153714 0.44202504 0.60880125] [ 0.44399846 0.49897194 0.3940897 ... 0.50656694 0.33769062 0.3571508 ]]] [[[-0.49928445 -1.2499856 -0.9280646 ... -0.4040379 -1.3455877 0.08472395] [-0.3164189 0.05111611 -1.4175414 ... -0.514928 -0.8573611 1.1335877 ] [-2.1214385 -2.5537486 -0.91095483 ... -0.48902518 -1.569043 -3.3126822 ] ... [-1.326687 -0.26162124 -1.7397614 ... -2.4409711 -1.9779 -1.9652722 ] [-1.7535309 -0.20227885 0.39815664 ... -0.87359625 0.06955516 -1.9919119 ] [-0.20264208 -0.46925598 -1.7026955 ... 0.37763894 -1.0379124 -0.8812083 ]] [[-1.9222127 -1.2240471 -1.5786831 ... -1.8731978 -1.1919364 -1.9330467 ] [-0.8482877 -0.6732634 -0.76347506 ... -0.78838277 -0.5697377 -0.81106555] [-1.2033991 -0.15690756 -2.2298875 ... -1.5705845 -1.9523762 0.01840878] ... [-1.4553615 -1.6131761 -0.59971285 ... -0.1882776 -1.6431625 0.63617134] [-0.8384495 -3.139434 -0.72321224 ... -1.0205145 -0.3929364 -2.2720003 ] [-2.0616553 -0.5957963 -0.9049258 ... -1.9508317 -0.75285864 0.8610457 ]] [[-2.394939 -0.93760383 0.19331288 ... -0.49651533 -1.8249459 -0.40588707] [-2.064229 -2.3221693 -1.2563032 ... 0.18965244 0.03402507 -1.3665828 ] [-1.6177207 -1.3048927 -0.32966167 ... -0.97418904 -2.779408 -0.21288276] ... [-1.1361923 -0.1329037 -0.75860834 ... -0.9608843 -0.57008976 -1.0809919 ] [-2.008361 -0.73111445 -2.4571252 ... -0.8473367 -2.1322227 -0.32413638] [ 0.092489 -2.0095701 -0.9555545 ... -2.346354 -0.04532075 -0.24502444]] ... [[-1.3684576 -1.4586006 -0.0769881 ... -1.4236683 0.37268603 -1.8032479 ] [-0.12476134 -1.7968509 -2.7846963 ... -0.62546057 -3.822348 -3.0972817 ] [-1.5479817 -2.1609025 -0.3240685 ... 0.2361505 -0.3320127 -3.4388676 ] ... [-1.9697926 -0.9771267 -1.9169271 ... -0.8521252 -0.8784378 -1.3636698 ] [-2.1083298 -1.5779419 -1.3159362 ... -1.4993135 -1.8004766 -3.4522004 ] [-0.29812294 -0.9826032 -2.9798803 ... -3.151847 -2.4596162 -1.4873711 ]] [[-0.7081746 -1.5308661 -0.36165076 ... -1.3987403 -1.2656788 0.4145987 ] [-0.7639563 -1.4097853 -0.12857413 ... -0.9843633 -1.2860498 -2.5661497 ] [-1.4594443 -3.9156075 -2.7585583 ... -1.4214585 -0.480887 -1.0837324 ] ... [ 0.01382315 0.18434393 -0.09357405 ... -1.597917 -1.128297 -1.7042458 ] [-1.0591707 -2.3569345 -1.0293068 ... -1.0810133 -3.3020515 -1.1755373 ] [-0.4226784 -0.57919806 -0.3442793 ... -2.342999 -0.8060739 -1.9787717 ]] [[-1.1140727 0.540437 -2.2014036 ... -2.5517802 -0.16520107 -1.9323354 ] [-1.7058271 -1.3961773 -2.3588705 ... -0.7926618 -3.0446749 -1.634181 ] [ 0.02986264 -0.6528927 -0.810568 ... -2.6075537 -1.7657409 -0.12324429] ... [-1.0078573 -1.7512207 -2.0563364 ... -1.0580591 -1.3645713 -1.2789255 ] [-1.6685398 -1.4897267 -0.79355335 ... -0.6408203 -1.147849 -1.055767 ] [-0.6267686 -0.60699207 -0.94624555 ... -0.8176421 -0.5065849 0.01046336]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_946.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.8478 -0.6785 0.0703 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.9411 (2,1,1,.,.) = 1.6944 (3,1,1,.,.) = 0.3800 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-5.53551793e-01 -7.75206149e-01 -2.72750092e+00 ... 1.10141635e-02 -7.09746957e-01 -5.02537906e-01] [-8.56902480e-01 -1.30071259e+00 1.84867072e+00 ... -1.57528186e+00 -1.30167079e+00 -3.31244588e-01] [-1.08902335e+00 -1.36885786e+00 -1.35765529e+00 ... 4.22600627e-01 1.15987182e+00 -4.97771412e-01] ... [-1.10144067e+00 -5.25798976e-01 -9.20492470e-01 ... -7.60427535e-01 -1.26357889e+00 -2.14600301e+00] [-8.45427334e-01 6.71422482e-01 9.01526213e-03 ... -1.87469244e+00 -1.28021526e+00 -6.29146934e-01] [ 4.91094828e-01 5.07202864e-01 -7.64737070e-01 ... -1.08987689e-01 -7.73114979e-01 -9.53704119e-01]] [[-2.57272720e-02 -2.60370445e+00 -1.28400540e+00 ... -2.47747540e-01 -1.40716910e+00 -9.37791288e-01] [-9.21383977e-01 -1.36108947e+00 3.24821472e-02 ... -2.27270794e+00 -5.64168453e-01 -1.33368254e+00] [-1.71739554e+00 -3.34389985e-01 -1.66610646e+00 ... -1.44291735e+00 -1.42444003e+00 1.65063739e-02] ... [-1.10633421e+00 -1.64350712e+00 8.00294280e-01 ... -1.92372346e+00 -1.50649953e+00 -6.82872593e-01] [-2.85238147e+00 -1.07435656e+00 2.07176924e-01 ... -1.93747997e+00 -2.60918999e+00 5.16375780e-01] [-4.59577829e-01 -3.39712024e-01 -1.98567092e-01 ... -1.54388189e+00 -3.04835737e-01 -1.18968821e+00]] [[-7.56949246e-01 -1.39970183e-01 -1.57338595e+00 ... -5.94514251e-01 -1.40087771e+00 -1.69709921e+00] [-1.84738183e+00 4.64807391e-01 -1.61202967e-01 ... -3.83419991e-02 5.10578275e-01 -7.71549225e-01] [ 2.61582732e-01 -2.53402996e+00 -2.49171436e-01 ... -1.03798366e+00 -1.64150381e+00 -1.62702298e+00] ... [-1.53263128e+00 -3.27122808e-01 -1.53464377e+00 ... -1.19874024e+00 -1.00406861e+00 -5.73716223e-01] [-1.11279511e+00 -1.94594979e-01 -2.56174803e-03 ... -1.17964137e+00 -2.87066936e-01 3.68821025e-02] [ 7.31237173e-01 1.98611498e-01 -1.77458334e+00 ... 2.70577431e-01 -1.37026465e+00 -2.46538639e-01]] ... [[ 9.29502487e-01 -8.07586014e-01 -1.69355989e+00 ... 4.55840588e-01 -1.62755847e-02 -6.46777391e-01] [ 9.03925538e-01 1.86350751e+00 -1.74976206e+00 ... 7.81606436e-01 -4.30903822e-01 -1.34861469e+00] [-6.53216243e-01 -1.77635467e+00 2.31871843e-01 ... -2.67236042e+00 4.50720906e-01 -1.35796535e+00] ... [ 2.08979845e-03 -3.26015890e-01 2.49446034e-01 ... -1.15022719e+00 -1.17171907e+00 -2.93360734e+00] [-5.12767196e-01 -1.04548740e+00 -1.29931986e+00 ... -5.67637563e-01 1.50260925e+00 -1.50205004e+00] [-2.21143579e+00 4.18341756e-02 -2.10166407e+00 ... 9.02518749e-01 -1.32536626e+00 -3.75616252e-01]] [[-2.18440580e+00 -1.32479751e+00 2.49562860e-01 ... -2.51737928e+00 -6.57502294e-01 -6.44326091e-01] [-2.77730346e-01 -4.85849380e-03 -1.71182668e+00 ... 7.03497171e-01 -9.73064542e-01 -5.85270345e-01] [-1.25797570e-01 -1.03331828e+00 -1.89002728e+00 ... -4.13713336e-01 -5.19278526e-01 -2.82076180e-01] ... [-1.10220242e+00 -1.34570277e+00 -2.20974207e+00 ... 5.82465887e-01 6.98568463e-01 -1.25344157e+00] [-9.56661701e-02 3.96537304e-01 -2.85900354e-01 ... -9.38622057e-01 -1.47802782e+00 1.58544779e-02] [-9.18026567e-01 -8.26688886e-01 -2.24115086e+00 ... -4.78712887e-01 -1.35183978e+00 4.54689264e-02]] [[-1.47380543e+00 -1.48418796e+00 2.51596212e-01 ... -1.54743361e+00 -1.60077405e+00 -3.83765697e-01] [ 3.01402211e-01 -1.78228283e+00 -1.19772840e+00 ... -8.18460345e-01 -4.29996103e-01 7.64551044e-01] [ 4.05279636e-01 -6.45376444e-01 -1.34078085e-01 ... 1.20588470e+00 -1.05592763e+00 -6.56261086e-01] ... [-1.05056500e+00 -1.83134913e+00 2.28111506e-01 ... -2.04726863e+00 3.05366874e-01 -1.11239815e+00] [-6.89331293e-01 -1.05170393e+00 -1.85548699e+00 ... -8.32390487e-01 -2.06925535e+00 1.13718975e+00] [-1.46454108e+00 -7.40560710e-01 -2.58075190e+00 ... -2.97567546e-01 -2.19984472e-01 -4.99257177e-01]]] [[[-9.87365246e-01 -2.43589115e+00 1.95771837e+00 ... -7.10784256e-01 -4.96560335e-01 -1.14389515e+00] [-8.97322357e-01 -6.70917690e-01 -1.61469197e+00 ... 6.50112450e-01 -7.16647267e-01 -3.94271255e-01] [ 7.27853119e-01 -9.66133237e-01 -4.74840546e+00 ... -1.40925670e+00 9.06095326e-01 7.44807661e-01] ... [-1.35323632e+00 -1.49415493e+00 1.97910595e+00 ... 3.24006367e+00 5.94622910e-01 -1.04351342e+00] [ 8.83658707e-01 -3.61130452e+00 -1.81044745e+00 ... 5.55608213e-01 1.89108014e-01 -1.87289625e-01] [-2.94560289e+00 -3.57884109e-01 6.73880577e-02 ... 7.77573764e-01 -7.43116021e-01 1.06993365e+00]] [[-1.09229410e+00 -3.50723296e-01 -5.57304621e-01 ... 3.03160608e-01 5.70261896e-01 -1.61359644e+00] [ 2.31838512e+00 -2.92376065e+00 2.95962691e-02 ... 8.35981667e-01 -9.27689731e-01 -1.05922043e+00] [ 1.52949047e+00 -4.23334002e-01 4.97765243e-01 ... 7.47813642e-01 -4.35425699e-01 -2.57708883e+00] ... [-3.35148454e-01 -5.07575572e-01 2.85342336e-01 ... -6.69730783e-01 1.54422939e-01 -2.39966130e+00] [-2.13711095e+00 -3.75392580e+00 -2.69582629e+00 ... -8.29065681e-01 -9.53209043e-01 -6.30942464e-01] [ 1.57214284e+00 -5.48117447e+00 -1.44564509e-01 ... 1.30519986e-01 1.05091190e+00 -3.28548503e+00]] [[ 2.53077030e-01 -1.80053711e+00 -4.63211507e-01 ... 1.12151837e+00 -1.59490705e+00 -2.11145163e+00] [-3.03745246e+00 -1.54860210e+00 -9.51457620e-01 ... 2.90092528e-01 1.47431540e+00 -5.46300077e+00] [ 1.94693923e+00 -9.38020468e-01 -2.92070240e-01 ... 9.70852315e-01 7.72666156e-01 -1.91268563e+00] ... [-1.99672651e+00 1.07659507e+00 6.79102540e-02 ... 5.01798987e-02 -2.08904839e+00 -4.04883909e+00] [-1.57536328e+00 -2.44522810e-01 -1.48322129e+00 ... -1.09574294e+00 1.30734038e+00 -2.06895757e+00] [-1.62481952e+00 -6.28206611e-01 -9.49923754e-01 ... -3.42313290e+00 -1.73374224e+00 -1.64310169e+00]] ... [[-1.19686484e+00 -6.06502831e-01 -6.65507889e+00 ... -6.80448949e-01 -2.20705462e+00 1.35082543e-01] [ 3.35623908e+00 -6.01540506e-01 -2.24769449e+00 ... -6.75384820e-01 -3.12522173e-01 -1.04282045e+00] [ 2.53967237e+00 -1.21193206e+00 -1.51402378e+00 ... -3.30181074e+00 1.49065757e+00 -1.06770957e+00] ... [-1.75319648e+00 1.07476413e-01 1.26162243e+00 ... 9.14937079e-01 4.12151992e-01 8.52451324e-02] [-9.40979719e-01 -9.78011250e-01 -1.62253571e+00 ... -2.55907863e-01 -7.76519656e-01 -2.81639385e+00] [-6.24358416e-01 6.22775853e-01 -1.76844692e+00 ... -4.80586201e-01 7.20957577e-01 -1.10311270e+00]] [[ 1.53694415e+00 4.82495368e-01 -2.07532078e-01 ... 3.75631404e+00 -1.81171751e+00 -2.30033803e+00] [ 2.51826215e+00 3.50225508e-01 -3.11359096e+00 ... -2.96195722e+00 -5.65328777e-01 -2.57247138e+00] [-6.13244176e-02 -7.80651987e-01 1.12810230e+00 ... 1.86717582e+00 1.02053261e+00 2.45132875e+00] ... [ 2.27147877e-01 -2.63225079e-01 5.94603479e-01 ... 2.65242720e+00 -1.04520273e+00 1.19620919e+00] [ 2.99510658e-01 1.18440723e+00 -2.77649498e+00 ... -2.76005888e+00 6.37412071e-04 4.00150359e-01] [ 1.41140580e-01 4.39971387e-01 5.43950856e-01 ... 1.75354481e-01 -5.42919517e-01 -2.73391771e+00]] [[ 6.01304114e-01 1.12374663e-01 3.09355855e-01 ... 4.82034385e-01 -4.41557616e-01 -6.80283010e-01] [-3.32842946e+00 -2.65823007e-02 -3.42052412e+00 ... 3.34108412e-01 8.49374592e-01 -3.03290486e-02] [ 9.20754492e-01 -1.34316051e+00 -2.46976137e+00 ... -5.34837127e-01 4.30081546e-01 -1.40955937e+00] ... [-2.49464893e+00 -1.56197810e+00 4.98863041e-01 ... -2.41487145e+00 -1.58710885e+00 -5.83953810e+00] [-1.94371879e-01 2.20067906e+00 -3.42452693e+00 ... -2.07298827e+00 2.69307435e-01 -1.05341518e+00] [ 1.22054982e+00 -2.45673847e+00 -2.22845125e+00 ... -3.88951397e+00 5.93432724e-01 -2.08166909e+00]]] [[[ 4.23245400e-01 -6.59503222e-01 2.52658963e-01 ... 1.69390351e-01 5.01386642e-01 -1.14839017e+00] [ 3.94526690e-01 2.63716541e-02 -2.98362225e-02 ... -1.97949022e-01 1.87402666e-02 3.06194648e-02] [-1.94493830e-01 3.25562268e-01 2.22934663e-01 ... 4.72024791e-02 -5.93861789e-02 -1.03692919e-01] ... [-1.54962540e-01 -7.34702200e-02 5.93736291e-01 ... 7.92068481e-01 -2.08992630e-01 -3.27036887e-01] [ 1.90758377e-01 4.30528820e-02 1.65791474e-02 ... -4.24554944e-01 1.40340596e-01 1.61144525e-01] [-5.13099909e-01 -4.59269911e-01 2.53444761e-01 ... -7.75080681e-01 -8.44189823e-02 3.02954227e-01]] [[-2.49350876e-01 -1.10895824e+00 -3.83912832e-01 ... -2.94366568e-01 7.70915821e-02 -2.07891703e-01] [ 5.14771223e-01 9.87349153e-02 1.67238578e-01 ... 5.39167106e-01 1.67415351e-01 -1.74790859e-01] [ 5.56946635e-01 -2.15302318e-01 2.83864647e-01 ... -3.21333468e-01 2.80479312e-01 -2.22663060e-02] ... [ 8.53129327e-02 2.58991480e-01 2.95495808e-01 ... 1.73374504e-01 -1.79076090e-01 -3.02606612e-01] [ 2.12810889e-01 3.80721658e-01 8.84870291e-01 ... 5.06388783e-01 3.43502730e-01 4.70534116e-01] [ 7.39822626e-01 7.77080208e-02 1.29467234e-01 ... -5.43617606e-01 -2.98862457e-01 4.72598612e-01]] [[ 1.75165623e-01 2.20337138e-01 1.28428251e-01 ... -1.08841360e-02 2.96697974e-01 -9.28517580e-02] [ 2.22274542e-01 3.54540437e-01 -2.58410573e-01 ... 4.72906470e-01 -6.67615294e-01 -3.44309986e-01] [ 3.47741246e-01 1.10978439e-01 -3.33457977e-01 ... 4.73951101e-01 -2.09389538e-01 3.81708443e-02] ... [-4.79684681e-01 6.61646485e-01 1.81000590e-01 ... -2.01466590e-01 9.62985829e-02 2.19365239e-01] [ 4.34423387e-01 5.51274717e-01 5.61153531e-01 ... -1.16282016e-01 -8.57651383e-02 1.17353179e-01] [ 5.30566163e-02 7.68290609e-02 7.01262951e-01 ... 4.62623030e-01 2.96824634e-01 9.26558256e-01]] ... [[-2.07404345e-02 1.52710482e-01 1.35282099e-01 ... -2.17249840e-01 -2.57229120e-01 5.04008889e-01] [-6.01960897e-01 -5.05240440e-01 7.09688663e-01 ... 3.08462679e-01 8.42283294e-02 2.13757813e-01] [-3.12986434e-01 -3.05132002e-01 6.06883168e-01 ... 3.31300199e-01 -1.59158438e-01 2.32436240e-01] ... [ 4.85108048e-01 3.11834067e-02 2.43900999e-01 ... 8.76111984e-01 -2.25944251e-01 2.64291465e-01] [ 2.93391973e-01 -4.51808304e-01 2.13788539e-01 ... -9.67786908e-02 5.19654512e-01 4.71196771e-02] [ 5.57683706e-01 1.17300965e-01 5.03802180e-01 ... 1.01789892e+00 -1.15857315e+00 5.01098156e-01]] [[ 3.27618182e-01 -1.16952389e-01 7.11760879e-01 ... 7.19472766e-01 5.54508790e-02 -7.15956807e-01] [-6.76299334e-01 1.79222792e-01 6.67492300e-02 ... -4.63982433e-01 3.35754454e-01 1.23322196e-02] [-2.73515806e-02 5.31609505e-02 2.74982333e-01 ... 6.95805907e-01 3.50094020e-01 1.52319223e-01] ... [-3.30262363e-01 8.70428920e-01 -1.17429465e-01 ... -9.73052084e-02 -3.00507635e-01 2.03287795e-01] [ 3.06137390e-02 1.49011075e-01 6.09076500e-01 ... 2.68884540e-01 -6.98210448e-02 -2.34977275e-01] [ 2.14709267e-01 2.09042624e-01 9.31735560e-02 ... 3.65820974e-01 -2.64156878e-01 -7.29749918e-01]] [[-2.74116993e-01 -5.35703897e-01 -5.83670735e-01 ... 1.64529026e-01 1.99515089e-01 7.67422915e-01] [-3.05725455e-01 -2.25210309e-01 4.17109668e-01 ... 2.97393203e-01 4.90351111e-01 3.27390999e-01] [-1.27655193e-01 3.46941352e-01 -2.43670374e-01 ... -3.57223302e-03 -1.87708855e-01 2.06762984e-01] ... [-7.80757189e-01 -5.59432954e-02 -1.21810496e-01 ... 2.47987583e-02 4.12692219e-01 -2.36289889e-01] [ 1.31902620e-01 2.20407411e-01 -3.11885983e-01 ... -1.19648114e-01 -7.23347962e-02 3.00149262e-01] [ 9.76197347e-02 -6.03087425e-01 3.72561634e-01 ... 4.61328059e-01 2.31060252e-01 -1.39741158e+00]]]]]; ov_res: [[[[[-5.53551853e-01 -7.75206149e-01 -2.72750092e+00 ... 1.10141821e-02 -7.09746897e-01 -5.02537906e-01] [-8.56902480e-01 -1.30071259e+00 1.84867072e+00 ... -1.57528186e+00 -1.30167091e+00 -3.31244558e-01] [-1.08902335e+00 -1.36885786e+00 -1.35765529e+00 ... 4.22600687e-01 1.15987194e+00 -4.97771412e-01] ... [-1.10144055e+00 -5.25798976e-01 -9.20492470e-01 ... -7.60427535e-01 -1.26357889e+00 -2.14600301e+00] [-8.45427334e-01 6.71422482e-01 9.01525747e-03 ... -1.87469244e+00 -1.28021526e+00 -6.29146934e-01] [ 4.91094857e-01 5.07202923e-01 -7.64737070e-01 ... -1.08987711e-01 -7.73114979e-01 -9.53704119e-01]] [[-2.57272851e-02 -2.60370421e+00 -1.28400552e+00 ... -2.47747540e-01 -1.40716898e+00 -9.37791288e-01] [-9.21383917e-01 -1.36108947e+00 3.24821621e-02 ... -2.27270770e+00 -5.64168513e-01 -1.33368254e+00] [-1.71739566e+00 -3.34389985e-01 -1.66610646e+00 ... -1.44291735e+00 -1.42444003e+00 1.65063590e-02] ... [-1.10633409e+00 -1.64350712e+00 8.00294280e-01 ... -1.92372346e+00 -1.50649953e+00 -6.82872593e-01] [-2.85238147e+00 -1.07435656e+00 2.07176983e-01 ... -1.93747997e+00 -2.60918999e+00 5.16375721e-01] [-4.59577829e-01 -3.39712054e-01 -1.98567107e-01 ... -1.54388201e+00 -3.04835737e-01 -1.18968809e+00]] [[-7.56949246e-01 -1.39970183e-01 -1.57338595e+00 ... -5.94514251e-01 -1.40087771e+00 -1.69709921e+00] [-1.84738171e+00 4.64807332e-01 -1.61202967e-01 ... -3.83419879e-02 5.10578275e-01 -7.71549225e-01] [ 2.61582702e-01 -2.53402996e+00 -2.49171421e-01 ... -1.03798366e+00 -1.64150381e+00 -1.62702310e+00] ... [-1.53263128e+00 -3.27122808e-01 -1.53464377e+00 ... -1.19874024e+00 -1.00406861e+00 -5.73716223e-01] [-1.11279523e+00 -1.94594994e-01 -2.56175757e-03 ... -1.17964137e+00 -2.87066966e-01 3.68821323e-02] [ 7.31237173e-01 1.98611498e-01 -1.77458334e+00 ... 2.70577401e-01 -1.37026465e+00 -2.46538669e-01]] ... [[ 9.29502428e-01 -8.07586014e-01 -1.69356000e+00 ... 4.55840588e-01 -1.62755698e-02 -6.46777391e-01] [ 9.03925478e-01 1.86350751e+00 -1.74976206e+00 ... 7.81606495e-01 -4.30903822e-01 -1.34861469e+00] [-6.53216243e-01 -1.77635467e+00 2.31871828e-01 ... -2.67236042e+00 4.50720906e-01 -1.35796535e+00] ... [ 2.08977959e-03 -3.26015890e-01 2.49446020e-01 ... -1.15022719e+00 -1.17171907e+00 -2.93360734e+00] [-5.12767255e-01 -1.04548740e+00 -1.29931986e+00 ... -5.67637563e-01 1.50260937e+00 -1.50205004e+00] [-2.21143579e+00 4.18341495e-02 -2.10166407e+00 ... 9.02518749e-01 -1.32536626e+00 -3.75616252e-01]] [[-2.18440604e+00 -1.32479751e+00 2.49562904e-01 ... -2.51737905e+00 -6.57502294e-01 -6.44326091e-01] [-2.77730346e-01 -4.85847099e-03 -1.71182668e+00 ... 7.03497112e-01 -9.73064601e-01 -5.85270345e-01] [-1.25797600e-01 -1.03331828e+00 -1.89002728e+00 ... -4.13713336e-01 -5.19278526e-01 -2.82076180e-01] ... [-1.10220230e+00 -1.34570277e+00 -2.20974207e+00 ... 5.82465827e-01 6.98568523e-01 -1.25344157e+00] [-9.56661478e-02 3.96537304e-01 -2.85900384e-01 ... -9.38622057e-01 -1.47802782e+00 1.58544667e-02] [-9.18026567e-01 -8.26688886e-01 -2.24115086e+00 ... -4.78712887e-01 -1.35183966e+00 4.54689190e-02]] [[-1.47380543e+00 -1.48418796e+00 2.51596212e-01 ... -1.54743361e+00 -1.60077405e+00 -3.83765697e-01] [ 3.01402241e-01 -1.78228295e+00 -1.19772840e+00 ... -8.18460345e-01 -4.29996103e-01 7.64551044e-01] [ 4.05279577e-01 -6.45376444e-01 -1.34078071e-01 ... 1.20588470e+00 -1.05592763e+00 -6.56261086e-01] ... [-1.05056500e+00 -1.83134925e+00 2.28111476e-01 ... -2.04726863e+00 3.05366874e-01 -1.11239815e+00] [-6.89331293e-01 -1.05170393e+00 -1.85548699e+00 ... -8.32390487e-01 -2.06925511e+00 1.13718975e+00] [-1.46454108e+00 -7.40560710e-01 -2.58075190e+00 ... -2.97567517e-01 -2.19984487e-01 -4.99257177e-01]]] [[[-9.87365246e-01 -2.43589115e+00 1.95771837e+00 ... -7.10784256e-01 -4.96560335e-01 -1.14389527e+00] [-8.97322357e-01 -6.70917690e-01 -1.61469209e+00 ... 6.50112450e-01 -7.16647267e-01 -3.94271255e-01] [ 7.27853179e-01 -9.66133237e-01 -4.74840546e+00 ... -1.40925658e+00 9.06095326e-01 7.44807720e-01] ... [-1.35323632e+00 -1.49415493e+00 1.97910607e+00 ... 3.24006367e+00 5.94622970e-01 -1.04351342e+00] [ 8.83658767e-01 -3.61130452e+00 -1.81044757e+00 ... 5.55608213e-01 1.89108014e-01 -1.87289625e-01] [-2.94560289e+00 -3.57884109e-01 6.73880279e-02 ... 7.77573764e-01 -7.43116021e-01 1.06993353e+00]] [[-1.09229410e+00 -3.50723296e-01 -5.57304621e-01 ... 3.03160608e-01 5.70261896e-01 -1.61359632e+00] [ 2.31838512e+00 -2.92376065e+00 2.95962747e-02 ... 8.35981667e-01 -9.27689731e-01 -1.05922043e+00] [ 1.52949047e+00 -4.23334002e-01 4.97765303e-01 ... 7.47813702e-01 -4.35425699e-01 -2.57708883e+00] ... [-3.35148454e-01 -5.07575572e-01 2.85342366e-01 ... -6.69730783e-01 1.54422939e-01 -2.39966130e+00] [-2.13711095e+00 -3.75392556e+00 -2.69582629e+00 ... -8.29065681e-01 -9.53209043e-01 -6.30942464e-01] [ 1.57214296e+00 -5.48117447e+00 -1.44564524e-01 ... 1.30520001e-01 1.05091202e+00 -3.28548503e+00]] [[ 2.53077030e-01 -1.80053723e+00 -4.63211507e-01 ... 1.12151837e+00 -1.59490705e+00 -2.11145163e+00] [-3.03745246e+00 -1.54860210e+00 -9.51457679e-01 ... 2.90092498e-01 1.47431540e+00 -5.46300077e+00] [ 1.94693935e+00 -9.38020468e-01 -2.92070240e-01 ... 9.70852315e-01 7.72666156e-01 -1.91268551e+00] ... [-1.99672651e+00 1.07659507e+00 6.79102689e-02 ... 5.01798876e-02 -2.08904839e+00 -4.04883909e+00] [-1.57536328e+00 -2.44522810e-01 -1.48322129e+00 ... -1.09574306e+00 1.30734050e+00 -2.06895757e+00] [-1.62481952e+00 -6.28206611e-01 -9.49923754e-01 ... -3.42313266e+00 -1.73374236e+00 -1.64310169e+00]] ... [[-1.19686472e+00 -6.06502831e-01 -6.65507889e+00 ... -6.80448949e-01 -2.20705462e+00 1.35082558e-01] [ 3.35623884e+00 -6.01540506e-01 -2.24769449e+00 ... -6.75384820e-01 -3.12522173e-01 -1.04282045e+00] [ 2.53967237e+00 -1.21193206e+00 -1.51402378e+00 ... -3.30181074e+00 1.49065769e+00 -1.06770957e+00] ... [-1.75319660e+00 1.07476436e-01 1.26162231e+00 ... 9.14937019e-01 4.12151963e-01 8.52451101e-02] [-9.40979719e-01 -9.78011250e-01 -1.62253571e+00 ... -2.55907863e-01 -7.76519656e-01 -2.81639361e+00] [-6.24358416e-01 6.22775912e-01 -1.76844692e+00 ... -4.80586201e-01 7.20957637e-01 -1.10311258e+00]] [[ 1.53694415e+00 4.82495308e-01 -2.07532093e-01 ... 3.75631404e+00 -1.81171751e+00 -2.30033803e+00] [ 2.51826215e+00 3.50225568e-01 -3.11359096e+00 ... -2.96195722e+00 -5.65328777e-01 -2.57247138e+00] [-6.13244176e-02 -7.80651987e-01 1.12810218e+00 ... 1.86717594e+00 1.02053261e+00 2.45132875e+00] ... [ 2.27147862e-01 -2.63225079e-01 5.94603479e-01 ... 2.65242743e+00 -1.04520261e+00 1.19620919e+00] [ 2.99510688e-01 1.18440723e+00 -2.77649474e+00 ... -2.76005888e+00 6.37393096e-04 4.00150359e-01] [ 1.41140580e-01 4.39971358e-01 5.43950796e-01 ... 1.75354466e-01 -5.42919457e-01 -2.73391771e+00]] [[ 6.01304114e-01 1.12374693e-01 3.09355855e-01 ... 4.82034385e-01 -4.41557616e-01 -6.80283010e-01] [-3.32842946e+00 -2.65822727e-02 -3.42052412e+00 ... 3.34108442e-01 8.49374652e-01 -3.03290449e-02] [ 9.20754492e-01 -1.34316051e+00 -2.46976137e+00 ... -5.34837127e-01 4.30081517e-01 -1.40955937e+00] ... [-2.49464893e+00 -1.56197822e+00 4.98862982e-01 ... -2.41487145e+00 -1.58710885e+00 -5.83953857e+00] [-1.94371879e-01 2.20067906e+00 -3.42452693e+00 ... -2.07298827e+00 2.69307464e-01 -1.05341518e+00] [ 1.22054970e+00 -2.45673847e+00 -2.22845125e+00 ... -3.88951397e+00 5.93432724e-01 -2.08166909e+00]]] [[[ 4.23245400e-01 -6.59503281e-01 2.52658933e-01 ... 1.69390351e-01 5.01386642e-01 -1.14839017e+00] [ 3.94526690e-01 2.63716560e-02 -2.98362225e-02 ... -1.97949022e-01 1.87402647e-02 3.06194648e-02] [-1.94493845e-01 3.25562268e-01 2.22934663e-01 ... 4.72024791e-02 -5.93861751e-02 -1.03692919e-01] ... [-1.54962540e-01 -7.34702274e-02 5.93736291e-01 ... 7.92068541e-01 -2.08992630e-01 -3.27036887e-01] [ 1.90758362e-01 4.30528820e-02 1.65791474e-02 ... -4.24554944e-01 1.40340611e-01 1.61144525e-01] [-5.13099968e-01 -4.59269911e-01 2.53444761e-01 ... -7.75080740e-01 -8.44189823e-02 3.02954227e-01]] [[-2.49350891e-01 -1.10895824e+00 -3.83912832e-01 ... -2.94366568e-01 7.70915821e-02 -2.07891703e-01] [ 5.14771223e-01 9.87349153e-02 1.67238578e-01 ... 5.39167106e-01 1.67415351e-01 -1.74790859e-01] [ 5.56946635e-01 -2.15302318e-01 2.83864647e-01 ... -3.21333468e-01 2.80479312e-01 -2.22663060e-02] ... [ 8.53129327e-02 2.58991480e-01 2.95495808e-01 ... 1.73374504e-01 -1.79076090e-01 -3.02606612e-01] [ 2.12810889e-01 3.80721658e-01 8.84870350e-01 ... 5.06388783e-01 3.43502730e-01 4.70534116e-01] [ 7.39822626e-01 7.77080208e-02 1.29467234e-01 ... -5.43617547e-01 -2.98862457e-01 4.72598612e-01]] [[ 1.75165609e-01 2.20337138e-01 1.28428251e-01 ... -1.08841332e-02 2.96697974e-01 -9.28517506e-02] [ 2.22274542e-01 3.54540437e-01 -2.58410573e-01 ... 4.72906470e-01 -6.67615235e-01 -3.44309986e-01] [ 3.47741246e-01 1.10978439e-01 -3.33457977e-01 ... 4.73951101e-01 -2.09389538e-01 3.81708443e-02] ... [-4.79684681e-01 6.61646485e-01 1.81000590e-01 ... -2.01466590e-01 9.62985829e-02 2.19365239e-01] [ 4.34423387e-01 5.51274717e-01 5.61153531e-01 ... -1.16282023e-01 -8.57651308e-02 1.17353179e-01] [ 5.30566163e-02 7.68290609e-02 7.01262951e-01 ... 4.62623030e-01 2.96824664e-01 9.26558316e-01]] ... [[-2.07404364e-02 1.52710482e-01 1.35282099e-01 ... -2.17249855e-01 -2.57229120e-01 5.04008889e-01] [-6.01960897e-01 -5.05240440e-01 7.09688663e-01 ... 3.08462679e-01 8.42283294e-02 2.13757813e-01] [-3.12986434e-01 -3.05132002e-01 6.06883228e-01 ... 3.31300199e-01 -1.59158438e-01 2.32436240e-01] ... [ 4.85108048e-01 3.11834067e-02 2.43900999e-01 ... 8.76111984e-01 -2.25944236e-01 2.64291465e-01] [ 2.93391973e-01 -4.51808333e-01 2.13788539e-01 ... -9.67786908e-02 5.19654512e-01 4.71196771e-02] [ 5.57683766e-01 1.17300965e-01 5.03802180e-01 ... 1.01789892e+00 -1.15857315e+00 5.01098156e-01]] [[ 3.27618182e-01 -1.16952397e-01 7.11760819e-01 ... 7.19472706e-01 5.54508790e-02 -7.15956867e-01] [-6.76299393e-01 1.79222792e-01 6.67492300e-02 ... -4.63982433e-01 3.35754454e-01 1.23322187e-02] [-2.73515824e-02 5.31609468e-02 2.74982303e-01 ... 6.95805907e-01 3.50094020e-01 1.52319223e-01] ... [-3.30262363e-01 8.70428860e-01 -1.17429465e-01 ... -9.73052010e-02 -3.00507635e-01 2.03287795e-01] [ 3.06137390e-02 1.49011090e-01 6.09076440e-01 ... 2.68884540e-01 -6.98210374e-02 -2.34977275e-01] [ 2.14709267e-01 2.09042624e-01 9.31735560e-02 ... 3.65820974e-01 -2.64156878e-01 -7.29749858e-01]] [[-2.74116993e-01 -5.35703838e-01 -5.83670795e-01 ... 1.64529040e-01 1.99515089e-01 7.67422915e-01] [-3.05725455e-01 -2.25210294e-01 4.17109668e-01 ... 2.97393203e-01 4.90351111e-01 3.27390999e-01] [-1.27655193e-01 3.46941352e-01 -2.43670389e-01 ... -3.57223232e-03 -1.87708855e-01 2.06762984e-01] ... [-7.80757189e-01 -5.59432954e-02 -1.21810496e-01 ... 2.47987583e-02 4.12692219e-01 -2.36289904e-01] [ 1.31902620e-01 2.20407411e-01 -3.11885983e-01 ... -1.19648114e-01 -7.23347962e-02 3.00149262e-01] [ 9.76197347e-02 -6.03087485e-01 3.72561634e-01 ... 4.61328059e-01 2.31060252e-01 -1.39741158e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_948.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.287598}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.6034 (2,1,1,.,.) = 2.4575 (3,1,1,.,.) = 0.5905 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 5.3768322e-02 -3.0128236e+00 -3.0044503e+00 -2.7580729e-01 3.8022146e-02 -2.0600326e+00 -3.0931506e-01 2.5984907e+00] [-2.0827711e+00 -2.2103727e+00 1.2111701e-01 -1.7496327e+00 -2.2115748e+00 2.5731585e+00 -1.4448282e+00 -8.2927103e+00] [ 2.5185230e+00 8.1624830e-01 5.1463675e+00 -1.0905519e+00 5.4666328e+00 1.6528111e+00 1.8189907e-01 -1.8123008e+00] [-8.2844758e-01 5.0841200e-01 5.1277626e-01 2.6280928e+00 -3.7990286e+00 1.8410097e+00 1.6491051e+00 5.5331922e-01] [ 2.2868104e+00 -3.2711306e-01 -3.3756316e-02 9.1291869e-01 -1.2276769e+00 -3.0171525e+00 -1.3103297e-01 3.7761374e+00] [ 7.6544368e-01 -1.8777841e+00 4.7156649e+00 -5.0787330e-03 2.7691188e+00 1.7161628e+00 -1.6238753e+00 2.6226928e+00] [ 5.4471540e+00 5.7203436e+00 1.4325444e+00 3.3232803e+00 -2.2168994e+00 1.2760152e+00 1.8828830e-01 -2.9178991e+00] [ 5.2417150e+00 -3.7296741e+00 -2.4772844e+00 -3.1329741e+00 -1.8603634e+00 -3.4536827e+00 2.0920482e+00 2.3994210e+00]] [[ 1.7919815e+00 -5.5925827e+00 1.8736498e+00 -2.3103893e+00 3.8116550e+00 -6.2951458e-01 -2.8580444e+00 5.5447068e+00] [ 8.2490559e+00 3.1791718e+00 -5.1923954e-01 -2.3005517e+00 -2.0347829e+00 5.3695526e+00 1.5371052e+00 1.9919673e-01] [ 3.5345230e+00 1.1651169e+00 3.7004342e+00 -2.7407131e+00 -8.0024409e-01 -7.8062505e-02 -3.1098049e+00 4.3030071e-01] [ 5.9531802e-01 -2.6399784e+00 -8.7121487e-01 5.3589139e+00 1.4091026e+00 -2.8106633e-01 -8.1149507e-01 2.5852375e+00] [ 2.3035729e+00 -3.7890937e+00 1.4386247e+00 -2.7838669e+00 -1.4659081e+00 -5.5927577e+00 1.6986492e-01 -1.2518427e-01] [ 4.1730018e+00 -1.6622572e+00 -9.9109066e-01 6.4326410e+00 -9.2062342e-01 -8.3719778e-01 8.6985695e-01 -4.1138120e+00] [-6.5166864e+00 1.8778318e-01 1.7244676e+00 1.4187141e-01 -8.0342519e-01 1.5385454e+00 3.5802410e+00 2.0921063e+00] [ 4.1272936e+00 8.5737407e-01 2.6518941e-02 -4.6227145e+00 3.1541481e+00 -1.0676829e+00 2.0157015e+00 -7.4745264e+00]] [[ 1.5527030e+00 4.2631559e+00 -5.4733605e+00 -9.9370015e-01 1.4680578e+00 -7.1406841e-01 2.6293330e+00 4.8856821e+00] [ 4.7501624e-01 1.4079932e+00 -2.8825221e+00 -3.4194987e+00 -5.1347089e-01 1.4522936e+00 -2.2385964e+00 5.2975416e-01] [-1.8306276e+00 6.8852763e+00 2.8850455e+00 4.2707543e+00 -2.7724440e+00 -1.1011919e+00 2.6419017e+00 5.0425100e+00] [-2.7060463e+00 2.5490632e+00 4.9416101e-01 1.8507922e+00 -4.3759828e+00 1.7738588e+00 -3.2337778e+00 -4.7384653e+00] [ 1.9983063e+00 3.9623754e+00 -2.7378132e+00 2.5531757e+00 -1.7711879e+00 -5.4803939e+00 -7.0772445e-01 2.2333890e-01] [ 4.4761100e+00 -2.3987642e-01 4.9422812e+00 2.5007207e+00 -4.8171477e+00 2.9707608e+00 -3.1690475e-01 -9.0565431e-01] [-3.1085572e-01 -1.6363895e+00 6.8146741e-01 -6.3502946e+00 1.9036763e+00 4.5403891e+00 1.5836682e+00 -3.1566182e-01] [ 8.3517587e-01 -2.8691441e-02 -5.2406588e+00 -1.2369270e+00 -1.2052459e+00 1.3150930e+00 6.0712111e-01 1.3766818e-01]] [[ 3.5657754e+00 2.9509823e+00 -2.9148259e+00 -1.3018566e+00 -3.5014272e+00 3.1416457e+00 8.2355666e-01 1.7442617e+00] [-2.0762518e-01 5.7614498e+00 -2.7592757e+00 -6.0867329e+00 8.6545267e+00 1.9477521e+00 1.6344851e-01 3.4706298e-01] [-1.2104168e+00 -5.5069976e+00 1.6116997e+00 2.3656645e+00 5.3317618e+00 1.4740919e-01 1.2341621e+00 -1.3090631e+00] [-4.7187415e-01 1.6054373e+00 2.6899202e+00 1.2067592e+00 2.1521726e+00 1.9801235e+00 -3.7913373e-01 3.8901093e+00] [-2.8069682e+00 -3.7570283e-01 5.4080218e-01 3.6964695e+00 1.5551679e+00 2.7383673e+00 -2.9988797e+00 1.4796133e-01] [ 2.3541031e+00 5.7292497e-01 -4.8618822e+00 -4.3168025e+00 -3.1952417e+00 4.0648937e-02 -2.8674419e+00 2.5758719e+00] [ 1.0470545e+00 2.7618752e+00 3.0303111e+00 6.5809011e-02 3.9105147e-01 -4.1186363e-02 8.1828251e+00 1.5163440e+00] [-1.4370402e+00 8.9454949e-01 5.3719740e+00 9.1811156e-01 1.3630178e+00 4.4081402e+00 2.9980514e+00 8.3815432e+00]] [[ 9.2132521e-01 4.7695026e+00 8.0947483e-01 1.8465285e+00 3.2990868e+00 2.1413534e+00 4.1598301e+00 -4.2539120e+00] [ 3.1137443e+00 -1.9214205e+00 1.4575810e+00 1.3091171e+00 -2.1384029e+00 2.6988690e+00 -7.0238888e-02 -2.8080556e-01] [ 3.2273855e+00 4.5142074e+00 2.9241824e+00 -4.0474563e+00 3.0574903e+00 2.9058931e+00 1.1772537e+01 3.3562922e+00] [-8.6540759e-01 2.6518149e+00 -1.6633409e-01 -6.1341805e+00 -1.5206496e+00 2.1291869e+00 -6.9850862e-01 2.1088486e+00] [ 1.6681577e+00 6.3769650e+00 1.6765949e+00 4.8835306e+00 3.0208912e+00 -4.8007593e-01 -4.5583072e-01 3.2975835e-01] [ 3.3288968e-01 -9.4490707e-02 6.3427019e-01 -1.9644223e+00 2.5377848e+00 8.1538856e-03 -4.2391968e+00 -4.9649119e-02] [ 4.1631973e-01 -8.0352461e-01 2.9554896e+00 3.4064195e+00 1.4002589e+00 3.9765067e+00 -2.9549399e-01 6.9851226e-01] [ 2.3744359e+00 -3.7212617e+00 1.3113087e+00 3.6578436e+00 -2.4354305e+00 -2.5079443e+00 -6.4531674e+00 7.9991269e-01]] [[-1.1048429e+00 -1.4971764e+00 2.7172315e+00 -1.7947186e+00 -1.4283817e+00 -5.1472836e+00 1.7942458e+00 5.2464914e+00] [ 1.2580810e+00 -1.4022774e+00 -1.3107003e+00 -3.9665773e+00 5.6617543e-02 -5.7924271e-01 -2.1213377e+00 9.5137177e+00] [ 4.3843989e+00 -5.9659748e+00 -1.5722773e+00 -5.8501782e+00 5.0571737e+00 -9.4323194e-01 -2.9166682e+00 2.3404582e+00] [-5.7534623e-01 -2.3826768e+00 -3.9971774e+00 9.9177742e-01 9.4789445e-01 3.3238463e+00 3.2761568e-01 1.6292996e+00] [-6.5564191e-01 4.7431598e+00 -1.0334270e+00 4.8441586e+00 -5.6360307e+00 8.7166798e-01 2.1947317e+00 2.3211854e+00] [-2.7927051e+00 3.0421031e+00 1.0299792e+00 5.1695447e+00 1.4423069e+00 -3.9426200e+00 -1.3470362e+00 1.1525825e+00] [-2.4003923e+00 -3.4800389e-01 -1.8826712e+00 -5.6089640e-01 2.5807285e+00 1.5522887e+00 -4.3982606e+00 -7.3758459e-01] [-3.0825377e+00 3.9463863e+00 3.9576290e+00 -1.6319842e+00 -2.8977253e+00 1.3036022e+00 -6.0891342e+00 1.3282517e+00]] [[ 3.3756034e+00 -1.0082083e+00 -1.5602268e+00 3.6796544e+00 -1.4880133e+00 2.5625072e+00 -1.2042617e+00 4.4783192e+00] [ 9.1509306e-01 -4.6295600e+00 3.6283870e+00 1.3792512e+00 7.1280944e-01 -4.7330351e+00 1.3473120e+00 8.4318519e-01] [-1.7901346e-01 -6.8109310e-01 -1.5012336e+00 -3.4176717e+00 -1.6706522e+00 -2.6142664e+00 6.6439235e-01 -3.8933396e-02] [ 1.1719964e+00 6.7689185e+00 -1.1636848e+00 -5.1457005e+00 1.6788565e+00 3.4004426e-01 9.8644841e-01 -3.4565940e+00] [ 1.9421084e+00 4.6198149e+00 3.3584273e+00 4.6369948e+00 -2.4115269e+00 -2.3490264e+00 1.2134408e+00 -3.2962341e+00] [-2.5687840e+00 3.4177661e-01 -2.3791161e+00 2.9112782e+00 2.8923390e+00 -6.3298903e+00 5.2938933e+00 -3.8064344e+00] [ 6.3621300e-01 -1.7183403e+00 1.6775236e+00 -4.5368123e+00 -2.8993366e+00 2.3571804e+00 4.7437663e+00 3.1197207e+00] [ 4.0188313e+00 -3.7003553e+00 -1.4174724e+00 1.6248784e+00 6.5064340e+00 3.5051091e+00 5.7020621e+00 -2.8687627e+00]] [[ 2.2243963e-01 -1.7584366e+00 1.8996632e+00 5.5618703e-01 2.1981833e+00 2.7907345e+00 -2.1300395e+00 -3.1876857e+00] [-3.2574642e+00 2.5080383e-01 1.5235742e+00 -4.9901196e-01 6.0617948e-01 -2.8575749e+00 2.0464427e+00 -2.0717096e+00] [ 5.7962489e+00 -2.6116371e-03 2.0336938e+00 -3.7984507e+00 2.4001610e+00 -2.6220417e+00 1.9135976e+00 4.4925933e+00] [-2.9444442e+00 2.6613777e+00 6.7764521e-01 -8.9063108e-02 4.8887193e-01 -8.4041584e-01 -1.7261258e+00 -7.4090695e-01] [-1.0121113e+00 7.9761410e-01 3.4187775e+00 -5.6996322e+00 -5.5791173e+00 -2.6698296e+00 4.6991067e+00 3.4766023e+00] [-1.8945569e+00 -2.0339572e-01 5.1781449e+00 6.8906245e+00 4.2684355e+00 -1.9616166e+00 3.1566610e+00 -3.9597981e+00] [-6.1056888e-01 3.0355530e+00 2.8707438e+00 1.3722954e+00 -1.9091295e+00 4.2615519e+00 3.7335558e+00 3.2255440e+00] [-3.4077242e-01 -2.9062793e+00 1.9993424e-01 -3.1009579e+00 2.0294693e+00 7.5756699e-01 -8.3917201e-01 1.2203106e+00]]]]]; ov_res: [[[[[ 5.3768322e-02 -3.0128236e+00 -3.0044503e+00 -2.7580729e-01 3.8022146e-02 -2.0600326e+00 -3.0931506e-01 2.5984907e+00] [-2.0827711e+00 -2.2103727e+00 1.2111701e-01 -1.7496327e+00 -2.2115748e+00 2.5731585e+00 -1.4448282e+00 -8.2927103e+00] [ 2.5185230e+00 8.1624830e-01 5.1463675e+00 -1.0905519e+00 5.4666328e+00 1.6528111e+00 1.8189907e-01 -1.8123008e+00] [-8.2844758e-01 5.0841200e-01 5.1277626e-01 2.6280928e+00 -3.7990286e+00 1.8410097e+00 1.6491051e+00 5.5331922e-01] [ 2.2868104e+00 -3.2711306e-01 -3.3756316e-02 9.1291869e-01 -1.2276769e+00 -3.0171525e+00 -1.3103297e-01 3.7761374e+00] [ 7.6544368e-01 -1.8777841e+00 4.7156649e+00 -5.0787330e-03 2.7691188e+00 1.7161628e+00 -1.6238753e+00 2.6226928e+00] [ 5.4471540e+00 5.7203436e+00 1.4325444e+00 3.3232803e+00 -2.2168994e+00 1.2760152e+00 1.8828830e-01 -2.9178991e+00] [ 5.2417150e+00 -3.7296741e+00 -2.4772844e+00 -3.1329741e+00 -1.8603634e+00 -3.4536827e+00 2.0920482e+00 2.3994210e+00]] [[ 1.7919815e+00 -5.5925827e+00 1.8736498e+00 -2.3103893e+00 3.8116550e+00 -6.2951458e-01 -2.8580444e+00 5.5447068e+00] [ 8.2490559e+00 3.1791718e+00 -5.1923954e-01 -2.3005517e+00 -2.0347829e+00 5.3695526e+00 1.5371052e+00 1.9919673e-01] [ 3.5345230e+00 1.1651169e+00 3.7004342e+00 -2.7407131e+00 -8.0024409e-01 -7.8062505e-02 -3.1098049e+00 4.3030071e-01] [ 5.9531802e-01 -2.6399784e+00 -8.7121487e-01 5.3589139e+00 1.4091026e+00 -2.8106633e-01 -8.1149507e-01 2.5852375e+00] [ 2.3035729e+00 -3.7890937e+00 1.4386247e+00 -2.7838669e+00 -1.4659081e+00 -5.5927577e+00 1.6986492e-01 -1.2518427e-01] [ 4.1730018e+00 -1.6622572e+00 -9.9109066e-01 6.4326410e+00 -9.2062342e-01 -8.3719778e-01 8.6985695e-01 -4.1138120e+00] [-6.5166864e+00 1.8778318e-01 1.7244676e+00 1.4187141e-01 -8.0342519e-01 1.5385454e+00 3.5802410e+00 2.0921063e+00] [ 4.1272936e+00 8.5737407e-01 2.6518941e-02 -4.6227145e+00 3.1541481e+00 -1.0676829e+00 2.0157015e+00 -7.4745264e+00]] [[ 1.5527030e+00 4.2631559e+00 -5.4733605e+00 -9.9370015e-01 1.4680578e+00 -7.1406841e-01 2.6293330e+00 4.8856821e+00] [ 4.7501624e-01 1.4079932e+00 -2.8825221e+00 -3.4194987e+00 -5.1347089e-01 1.4522936e+00 -2.2385964e+00 5.2975416e-01] [-1.8306276e+00 6.8852763e+00 2.8850455e+00 4.2707543e+00 -2.7724440e+00 -1.1011919e+00 2.6419017e+00 5.0425100e+00] [-2.7060463e+00 2.5490632e+00 4.9416101e-01 1.8507922e+00 -4.3759828e+00 1.7738588e+00 -3.2337778e+00 -4.7384653e+00] [ 1.9983063e+00 3.9623754e+00 -2.7378132e+00 2.5531757e+00 -1.7711879e+00 -5.4803939e+00 -7.0772445e-01 2.2333890e-01] [ 4.4761100e+00 -2.3987642e-01 4.9422812e+00 2.5007207e+00 -4.8171477e+00 2.9707608e+00 -3.1690475e-01 -9.0565431e-01] [-3.1085572e-01 -1.6363895e+00 6.8146741e-01 -6.3502946e+00 1.9036763e+00 4.5403891e+00 1.5836682e+00 -3.1566182e-01] [ 8.3517587e-01 -2.8691441e-02 -5.2406588e+00 -1.2369270e+00 -1.2052459e+00 1.3150930e+00 6.0712111e-01 1.3766818e-01]] [[ 3.5657754e+00 2.9509823e+00 -2.9148259e+00 -1.3018566e+00 -3.5014272e+00 3.1416457e+00 8.2355666e-01 1.7442617e+00] [-2.0762518e-01 5.7614498e+00 -2.7592757e+00 -6.0867329e+00 8.6545267e+00 1.9477521e+00 1.6344851e-01 3.4706298e-01] [-1.2104168e+00 -5.5069976e+00 1.6116997e+00 2.3656645e+00 5.3317618e+00 1.4740919e-01 1.2341621e+00 -1.3090631e+00] [-4.7187415e-01 1.6054373e+00 2.6899202e+00 1.2067592e+00 2.1521726e+00 1.9801235e+00 -3.7913373e-01 3.8901093e+00] [-2.8069682e+00 -3.7570283e-01 5.4080218e-01 3.6964695e+00 1.5551679e+00 2.7383673e+00 -2.9988797e+00 1.4796133e-01] [ 2.3541031e+00 5.7292497e-01 -4.8618822e+00 -4.3168025e+00 -3.1952417e+00 4.0648937e-02 -2.8674419e+00 2.5758719e+00] [ 1.0470545e+00 2.7618752e+00 3.0303111e+00 6.5809011e-02 3.9105147e-01 -4.1186363e-02 8.1828251e+00 1.5163440e+00] [-1.4370402e+00 8.9454949e-01 5.3719740e+00 9.1811156e-01 1.3630178e+00 4.4081402e+00 2.9980514e+00 8.3815432e+00]] [[ 9.2132521e-01 4.7695026e+00 8.0947483e-01 1.8465285e+00 3.2990868e+00 2.1413534e+00 4.1598301e+00 -4.2539120e+00] [ 3.1137443e+00 -1.9214205e+00 1.4575810e+00 1.3091171e+00 -2.1384029e+00 2.6988690e+00 -7.0238888e-02 -2.8080556e-01] [ 3.2273855e+00 4.5142074e+00 2.9241824e+00 -4.0474563e+00 3.0574903e+00 2.9058931e+00 1.1772537e+01 3.3562922e+00] [-8.6540759e-01 2.6518149e+00 -1.6633409e-01 -6.1341805e+00 -1.5206496e+00 2.1291869e+00 -6.9850862e-01 2.1088486e+00] [ 1.6681577e+00 6.3769650e+00 1.6765949e+00 4.8835306e+00 3.0208912e+00 -4.8007593e-01 -4.5583072e-01 3.2975835e-01] [ 3.3288968e-01 -9.4490707e-02 6.3427019e-01 -1.9644223e+00 2.5377848e+00 8.1538856e-03 -4.2391968e+00 -4.9649119e-02] [ 4.1631973e-01 -8.0352461e-01 2.9554896e+00 3.4064195e+00 1.4002589e+00 3.9765067e+00 -2.9549399e-01 6.9851226e-01] [ 2.3744359e+00 -3.7212617e+00 1.3113087e+00 3.6578436e+00 -2.4354305e+00 -2.5079443e+00 -6.4531674e+00 7.9991269e-01]] [[-1.1048429e+00 -1.4971764e+00 2.7172315e+00 -1.7947186e+00 -1.4283817e+00 -5.1472836e+00 1.7942458e+00 5.2464914e+00] [ 1.2580810e+00 -1.4022774e+00 -1.3107003e+00 -3.9665773e+00 5.6617543e-02 -5.7924271e-01 -2.1213377e+00 9.5137177e+00] [ 4.3843989e+00 -5.9659748e+00 -1.5722773e+00 -5.8501782e+00 5.0571737e+00 -9.4323194e-01 -2.9166682e+00 2.3404582e+00] [-5.7534623e-01 -2.3826768e+00 -3.9971774e+00 9.9177742e-01 9.4789445e-01 3.3238463e+00 3.2761568e-01 1.6292996e+00] [-6.5564191e-01 4.7431598e+00 -1.0334270e+00 4.8441586e+00 -5.6360307e+00 8.7166798e-01 2.1947317e+00 2.3211854e+00] [-2.7927051e+00 3.0421031e+00 1.0299792e+00 5.1695447e+00 1.4423069e+00 -3.9426200e+00 -1.3470362e+00 1.1525825e+00] [-2.4003923e+00 -3.4800389e-01 -1.8826712e+00 -5.6089640e-01 2.5807285e+00 1.5522887e+00 -4.3982606e+00 -7.3758459e-01] [-3.0825377e+00 3.9463863e+00 3.9576290e+00 -1.6319842e+00 -2.8977253e+00 1.3036022e+00 -6.0891342e+00 1.3282517e+00]] [[ 3.3756034e+00 -1.0082083e+00 -1.5602268e+00 3.6796544e+00 -1.4880133e+00 2.5625072e+00 -1.2042617e+00 4.4783192e+00] [ 9.1509306e-01 -4.6295600e+00 3.6283870e+00 1.3792512e+00 7.1280944e-01 -4.7330351e+00 1.3473120e+00 8.4318519e-01] [-1.7901346e-01 -6.8109310e-01 -1.5012336e+00 -3.4176717e+00 -1.6706522e+00 -2.6142664e+00 6.6439235e-01 -3.8933396e-02] [ 1.1719964e+00 6.7689185e+00 -1.1636848e+00 -5.1457005e+00 1.6788565e+00 3.4004426e-01 9.8644841e-01 -3.4565940e+00] [ 1.9421084e+00 4.6198149e+00 3.3584273e+00 4.6369948e+00 -2.4115269e+00 -2.3490264e+00 1.2134408e+00 -3.2962341e+00] [-2.5687840e+00 3.4177661e-01 -2.3791161e+00 2.9112782e+00 2.8923390e+00 -6.3298903e+00 5.2938933e+00 -3.8064344e+00] [ 6.3621300e-01 -1.7183403e+00 1.6775236e+00 -4.5368123e+00 -2.8993366e+00 2.3571804e+00 4.7437663e+00 3.1197207e+00] [ 4.0188313e+00 -3.7003553e+00 -1.4174724e+00 1.6248784e+00 6.5064340e+00 3.5051091e+00 5.7020621e+00 -2.8687627e+00]] [[ 2.2243963e-01 -1.7584366e+00 1.8996632e+00 5.5618703e-01 2.1981833e+00 2.7907345e+00 -2.1300395e+00 -3.1876857e+00] [-3.2574642e+00 2.5080383e-01 1.5235742e+00 -4.9901196e-01 6.0617948e-01 -2.8575749e+00 2.0464427e+00 -2.0717096e+00] [ 5.7962489e+00 -2.6116371e-03 2.0336938e+00 -3.7984507e+00 2.4001610e+00 -2.6220417e+00 1.9135976e+00 4.4925933e+00] [-2.9444442e+00 2.6613777e+00 6.7764521e-01 -8.9063108e-02 4.8887193e-01 -8.4041584e-01 -1.7261258e+00 -7.4090695e-01] [-1.0121113e+00 7.9761410e-01 3.4187775e+00 -5.6996322e+00 -5.5791173e+00 -2.6698296e+00 4.6991067e+00 3.4766023e+00] [-1.8945569e+00 -2.0339572e-01 5.1781449e+00 6.8906245e+00 4.2684355e+00 -1.9616166e+00 3.1566610e+00 -3.9597981e+00] [-6.1056888e-01 3.0355530e+00 2.8707438e+00 1.3722954e+00 -1.9091295e+00 4.2615519e+00 3.7335558e+00 3.2255440e+00] [-3.4077242e-01 -2.9062793e+00 1.9993424e-01 -3.1009579e+00 2.0294693e+00 7.5756699e-01 -8.3917201e-01 1.2203106e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_950.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2445 0.5433 -0.8680 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.1146 (2,1,1,.,.) = 1.0112 (3,1,1,.,.) = -1.1122 (1,2,1,.,.) = -0.6498 (2,2,1,.,.) = -0.5488 (3,2,1,.,.) = -2.1601 (1,3,1,.,.) = 0.7540 (2,3,1,.,.) = 0.2928 (3,3,1,.,.) = 1.6287 [ CPUFloatType{3,3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] ... [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -3.5225358e+00 -8.9631331e-01 ... 4.3823874e-01 1.6054877e+00 -2.4446948e-01] [-2.4446948e-01 -2.2611544e-01 -5.1717812e-01 ... -1.6416097e+00 -4.5520607e-01 -2.4446948e-01] ... [-2.4446948e-01 -5.2317345e-01 -4.7204156e+00 ... 7.3470658e-01 -6.7762548e-01 -2.4446948e-01] [-2.4446948e-01 1.4324920e+00 8.6904472e-01 ... -4.9686319e-01 -1.3383234e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 5.8594155e-01 7.6599520e-01 ... -3.2532454e-01 1.0244064e+00 -2.4446948e-01] [-2.4446948e-01 2.7105849e+00 -2.4041853e+00 ... 1.6200534e+00 1.3579679e+00 -2.4446948e-01] ... [-2.4446948e-01 -5.8995724e-02 -2.1012900e+00 ... -7.9092902e-01 -1.4360608e+00 -2.4446948e-01] [-2.4446948e-01 -4.0212774e-01 -2.8231546e-01 ... -5.5723339e-03 -1.3210766e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] ... [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.1861839e+00 -2.4842887e-01 ... 4.2775679e-01 5.3341490e-01 -2.4446948e-01] [-2.4446948e-01 -5.4775780e-01 2.7713305e-01 ... -1.4569471e+00 -6.1768520e-01 -2.4446948e-01] ... [-2.4446948e-01 2.6094347e-01 -4.3279302e-01 ... -1.1924634e+00 -6.5655023e-01 -2.4446948e-01] [-2.4446948e-01 -7.2041923e-01 2.1626029e+00 ... -1.4880933e+00 3.0521100e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -1.5586265e+00 -2.0977671e+00 ... 1.1064526e+00 -4.4003263e+00 -2.4446948e-01] [-2.4446948e-01 4.9458379e-01 5.0492185e-01 ... 2.3453541e+00 1.9670516e+00 -2.4446948e-01] ... [-2.4446948e-01 1.6433035e+00 -9.7477779e-02 ... -8.3945698e-01 -1.3888735e+00 -2.4446948e-01] [-2.4446948e-01 -9.8903602e-01 1.6539042e+00 ... -1.0870200e+00 1.2004968e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] ... [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]]] [[[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] ... [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 -1.0294425e+00 1.1997259e+00 ... 4.4947624e-02 1.1965096e+00 5.4332978e-01] [ 5.4332978e-01 5.1284552e-02 -6.4631718e-01 ... 1.4194779e+00 -5.8047825e-01 5.4332978e-01] ... [ 5.4332978e-01 2.4552825e-01 7.5896800e-01 ... 1.5477602e+00 -4.3071473e-01 5.4332978e-01] [ 5.4332978e-01 1.3190148e+00 -1.2766466e+00 ... 7.6184380e-01 -3.8839322e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 2.5702509e-01 -7.3598486e-01 ... 1.1676425e-01 -1.0422187e+00 5.4332978e-01] [ 5.4332978e-01 3.9412203e-01 1.6143968e+00 ... 3.1860486e-01 -2.5717390e-01 5.4332978e-01] ... [ 5.4332978e-01 1.4349024e+00 -4.0019745e-01 ... -5.8300096e-01 1.4261326e+00 5.4332978e-01] [ 5.4332978e-01 7.3211843e-01 5.7102084e-02 ... -4.6366197e-01 2.3136456e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] ... [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 -1.4310539e-02 9.5969892e-01 ... 4.0566760e-01 -2.2249749e+00 5.4332978e-01] [ 5.4332978e-01 4.3804884e-01 1.4112712e+00 ... 1.9264498e+00 6.9249904e-01 5.4332978e-01] ... [ 5.4332978e-01 1.3772765e+00 -1.7271872e+00 ... 8.5674822e-01 9.1040969e-01 5.4332978e-01] [ 5.4332978e-01 6.1422759e-01 1.8700063e-02 ... 2.6690273e+00 1.3204587e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 1.1455140e+00 -1.0918422e+00 ... -7.1137947e-01 1.8657293e+00 5.4332978e-01] [ 5.4332978e-01 1.2867811e-01 1.1216760e+00 ... -4.7028023e-01 5.4496396e-01 5.4332978e-01] ... [ 5.4332978e-01 -3.8120311e-01 -1.6242164e-01 ... 2.0920084e+00 1.8238633e+00 5.4332978e-01] [ 5.4332978e-01 -6.1746603e-01 7.1400106e-01 ... 2.4983742e+00 1.1478593e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] ... [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]]] [[[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] ... [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -9.2478275e+00 -1.9492927e+00 ... 5.8160013e-01 3.7447541e+00 -8.6799592e-01] [-8.6799592e-01 -1.5616198e+00 -2.8804228e+00 ... -2.3517368e+00 -2.1992016e+00 -8.6799592e-01] ... [-8.6799592e-01 -2.1946852e+00 -9.9960127e+00 ... 2.2295656e+00 -3.4240341e+00 -8.6799592e-01] [-8.6799592e-01 2.8327475e+00 -6.9663125e-01 ... -1.8882601e+00 -4.0025344e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 9.0167016e-01 -7.3928738e-01 ... -1.5959982e+00 -1.5515268e-02 -8.6799592e-01] [-8.6799592e-01 5.0230479e+00 -4.3280306e+00 ... 2.7417893e+00 1.1770387e+00 -8.6799592e-01] ... [-8.6799592e-01 1.2058561e+00 -5.8957767e+00 ... -3.7369678e+00 -2.1158669e+00 -8.6799592e-01] [-8.6799592e-01 -6.9535941e-01 -1.5909774e+00 ... -1.3398283e+00 -9.8055029e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] ... [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -5.5299249e+00 -3.6951917e-01 ... 5.4811960e-01 -2.6988261e+00 -8.6799592e-01] [-8.6799592e-01 -1.7933717e+00 7.0396405e-01 ... -1.7899848e+00 -1.7594323e+00 -8.6799592e-01] ... [-8.6799592e-01 1.1110840e+00 -4.0840955e+00 ... -2.4270477e+00 -1.2236564e+00 -8.6799592e-01] [-8.6799592e-01 -1.4904364e+00 3.8699858e+00 ... -1.0615598e+00 6.4988933e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -2.9368863e+00 -6.1622210e+00 ... 3.6301452e-01 -7.5720820e+00 -8.6799592e-01] [-8.6799592e-01 1.5056711e-01 1.4596255e+00 ... 3.6820314e+00 3.9788115e+00 -8.6799592e-01] ... [-8.6799592e-01 2.0753579e+00 -1.7544148e+00 ... 1.8653315e-01 -1.6033013e+00 -8.6799592e-01] [-8.6799592e-01 -3.6898770e+00 3.0254259e+00 ... -1.3615513e-01 2.3354568e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] ... [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]]]]]; ov_res: [[[[[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] ... [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -3.5225358e+00 -8.9631331e-01 ... 4.3823874e-01 1.6054877e+00 -2.4446948e-01] [-2.4446948e-01 -2.2611544e-01 -5.1717812e-01 ... -1.6416097e+00 -4.5520607e-01 -2.4446948e-01] ... [-2.4446948e-01 -5.2317345e-01 -4.7204156e+00 ... 7.3470658e-01 -6.7762548e-01 -2.4446948e-01] [-2.4446948e-01 1.4324920e+00 8.6904472e-01 ... -4.9686319e-01 -1.3383234e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 5.8594155e-01 7.6599520e-01 ... -3.2532454e-01 1.0244064e+00 -2.4446948e-01] [-2.4446948e-01 2.7105849e+00 -2.4041853e+00 ... 1.6200534e+00 1.3579679e+00 -2.4446948e-01] ... [-2.4446948e-01 -5.8995724e-02 -2.1012900e+00 ... -7.9092902e-01 -1.4360608e+00 -2.4446948e-01] [-2.4446948e-01 -4.0212774e-01 -2.8231546e-01 ... -5.5723339e-03 -1.3210766e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] ... [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.1861839e+00 -2.4842887e-01 ... 4.2775679e-01 5.3341490e-01 -2.4446948e-01] [-2.4446948e-01 -5.4775780e-01 2.7713305e-01 ... -1.4569471e+00 -6.1768520e-01 -2.4446948e-01] ... [-2.4446948e-01 2.6094347e-01 -4.3279302e-01 ... -1.1924634e+00 -6.5655023e-01 -2.4446948e-01] [-2.4446948e-01 -7.2041923e-01 2.1626029e+00 ... -1.4880933e+00 3.0521100e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -1.5586265e+00 -2.0977671e+00 ... 1.1064526e+00 -4.4003263e+00 -2.4446948e-01] [-2.4446948e-01 4.9458379e-01 5.0492185e-01 ... 2.3453541e+00 1.9670516e+00 -2.4446948e-01] ... [-2.4446948e-01 1.6433035e+00 -9.7477779e-02 ... -8.3945698e-01 -1.3888735e+00 -2.4446948e-01] [-2.4446948e-01 -9.8903602e-01 1.6539042e+00 ... -1.0870200e+00 1.2004968e+00 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]] [[-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] ... [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01] [-2.4446948e-01 -2.4446948e-01 -2.4446948e-01 ... -2.4446948e-01 -2.4446948e-01 -2.4446948e-01]]] [[[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] ... [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 -1.0294425e+00 1.1997259e+00 ... 4.4947624e-02 1.1965096e+00 5.4332978e-01] [ 5.4332978e-01 5.1284552e-02 -6.4631718e-01 ... 1.4194779e+00 -5.8047825e-01 5.4332978e-01] ... [ 5.4332978e-01 2.4552825e-01 7.5896800e-01 ... 1.5477602e+00 -4.3071473e-01 5.4332978e-01] [ 5.4332978e-01 1.3190148e+00 -1.2766466e+00 ... 7.6184380e-01 -3.8839322e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 2.5702509e-01 -7.3598486e-01 ... 1.1676425e-01 -1.0422187e+00 5.4332978e-01] [ 5.4332978e-01 3.9412203e-01 1.6143968e+00 ... 3.1860486e-01 -2.5717390e-01 5.4332978e-01] ... [ 5.4332978e-01 1.4349024e+00 -4.0019745e-01 ... -5.8300096e-01 1.4261326e+00 5.4332978e-01] [ 5.4332978e-01 7.3211843e-01 5.7102084e-02 ... -4.6366197e-01 2.3136456e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] ... [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 -1.4310539e-02 9.5969892e-01 ... 4.0566760e-01 -2.2249749e+00 5.4332978e-01] [ 5.4332978e-01 4.3804884e-01 1.4112712e+00 ... 1.9264498e+00 6.9249904e-01 5.4332978e-01] ... [ 5.4332978e-01 1.3772765e+00 -1.7271872e+00 ... 8.5674822e-01 9.1040969e-01 5.4332978e-01] [ 5.4332978e-01 6.1422759e-01 1.8700063e-02 ... 2.6690273e+00 1.3204587e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 1.1455140e+00 -1.0918422e+00 ... -7.1137947e-01 1.8657293e+00 5.4332978e-01] [ 5.4332978e-01 1.2867811e-01 1.1216760e+00 ... -4.7028023e-01 5.4496396e-01 5.4332978e-01] ... [ 5.4332978e-01 -3.8120311e-01 -1.6242164e-01 ... 2.0920084e+00 1.8238633e+00 5.4332978e-01] [ 5.4332978e-01 -6.1746603e-01 7.1400106e-01 ... 2.4983742e+00 1.1478593e+00 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]] [[ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] ... [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01] [ 5.4332978e-01 5.4332978e-01 5.4332978e-01 ... 5.4332978e-01 5.4332978e-01 5.4332978e-01]]] [[[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] ... [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -9.2478275e+00 -1.9492927e+00 ... 5.8160013e-01 3.7447541e+00 -8.6799592e-01] [-8.6799592e-01 -1.5616198e+00 -2.8804228e+00 ... -2.3517368e+00 -2.1992016e+00 -8.6799592e-01] ... [-8.6799592e-01 -2.1946852e+00 -9.9960127e+00 ... 2.2295656e+00 -3.4240341e+00 -8.6799592e-01] [-8.6799592e-01 2.8327475e+00 -6.9663125e-01 ... -1.8882601e+00 -4.0025344e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 9.0167016e-01 -7.3928738e-01 ... -1.5959982e+00 -1.5515268e-02 -8.6799592e-01] [-8.6799592e-01 5.0230479e+00 -4.3280306e+00 ... 2.7417893e+00 1.1770387e+00 -8.6799592e-01] ... [-8.6799592e-01 1.2058561e+00 -5.8957767e+00 ... -3.7369678e+00 -2.1158669e+00 -8.6799592e-01] [-8.6799592e-01 -6.9535941e-01 -1.5909774e+00 ... -1.3398283e+00 -9.8055029e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] ... [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -5.5299249e+00 -3.6951917e-01 ... 5.4811960e-01 -2.6988261e+00 -8.6799592e-01] [-8.6799592e-01 -1.7933717e+00 7.0396405e-01 ... -1.7899848e+00 -1.7594323e+00 -8.6799592e-01] ... [-8.6799592e-01 1.1110840e+00 -4.0840955e+00 ... -2.4270477e+00 -1.2236564e+00 -8.6799592e-01] [-8.6799592e-01 -1.4904364e+00 3.8699858e+00 ... -1.0615598e+00 6.4988933e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -2.9368863e+00 -6.1622210e+00 ... 3.6301452e-01 -7.5720820e+00 -8.6799592e-01] [-8.6799592e-01 1.5056711e-01 1.4596255e+00 ... 3.6820314e+00 3.9788115e+00 -8.6799592e-01] ... [-8.6799592e-01 2.0753579e+00 -1.7544148e+00 ... 1.8653315e-01 -1.6033013e+00 -8.6799592e-01] [-8.6799592e-01 -3.6898770e+00 3.0254259e+00 ... -1.3615513e-01 2.3354568e+00 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]] [[-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] ... [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01] [-8.6799592e-01 -8.6799592e-01 -8.6799592e-01 ... -8.6799592e-01 -8.6799592e-01 -8.6799592e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_952.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.536524}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.2278 (2,1,1,.,.) = 0.3891 (3,1,1,.,.) = 0.01 * 5.3341 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-0.54954624 -0.47023904 -0.2734487 -1.0558972 ] [-0.16186634 -0.87726194 -0.89194167 -0.5712919 ] [-1.2348108 -0.7313633 -0.9714359 -0.14324915] [-0.1362553 -0.83210695 -0.3126274 -0.48684242] [-1.0048927 -0.5846844 0.09235811 -0.4097011 ] [-0.51354235 -0.8680656 -1.1455431 -0.5617783 ] [-0.36897013 -0.2984246 -0.77431655 -0.31639287] [-1.1804647 -0.60639274 -0.58560604 -0.6330674 ]] [[-0.10883951 -0.64239925 -0.27937657 -0.59354293] [-0.0598968 -0.90172446 -1.2656813 -0.24793568] [-1.0355138 -0.5680964 -0.6934533 0.15487146] [-0.3329339 -0.8561135 0.19602609 -0.14721778] [-1.2315028 -0.92664814 -0.32702172 0.2520061 ] [-0.26122388 -0.28407013 0.19923449 -0.40257895] [-0.53919476 -0.3265087 -0.8185253 -0.7048122 ] [ 0.06886208 -1.3020182 -1.4264357 0.3410663 ]] [[-1.0432429 -0.38639504 -0.14727443 -0.4696626 ] [-1.1165948 0.1730982 -1.1206412 -0.3671439 ] [-0.8564834 -0.3209582 -0.65001035 -1.3966746 ] [ 0.09640771 -0.0268423 -1.364497 -0.999869 ] [-0.6828493 -0.29630515 -0.28762782 -0.67001766] [-0.7273525 -0.39134276 0.43889105 -0.22973719] [ 0.29471135 0.08039027 0.0340178 -0.87654483] [ 0.34915584 0.08407629 -0.06384134 -0.7847209 ]] [[-0.9271172 -0.2949718 0.05276 -1.3944457 ] [-0.42565066 -0.6630093 -1.411182 -0.02378434] [-0.43848178 -0.36087668 -1.4226718 -0.64076734] [-0.7661969 0.70955575 0.05479163 -0.7391506 ] [-0.85899895 -0.8724216 -0.04419035 -1.0765892 ] [-0.98875296 -0.82329774 0.63371956 -0.2726079 ] [ 0.18718725 -0.16605899 -0.3347589 -0.7135259 ] [-0.35385156 -0.24825892 0.04183847 -0.07382402]]]]]; ov_res: [[[[[-0.54954624 -0.47023904 -0.2734487 -1.0558972 ] [-0.16186634 -0.87726194 -0.89194167 -0.5712919 ] [-1.2348108 -0.7313633 -0.9714359 -0.14324915] [-0.1362553 -0.83210695 -0.3126274 -0.48684242] [-1.0048927 -0.5846844 0.09235811 -0.4097011 ] [-0.51354235 -0.8680656 -1.1455431 -0.5617783 ] [-0.36897013 -0.2984246 -0.77431655 -0.31639287] [-1.1804647 -0.60639274 -0.58560604 -0.6330674 ]] [[-0.10883951 -0.64239925 -0.27937657 -0.59354293] [-0.0598968 -0.90172446 -1.2656813 -0.24793568] [-1.0355138 -0.5680964 -0.6934533 0.15487146] [-0.3329339 -0.8561135 0.19602609 -0.14721778] [-1.2315028 -0.92664814 -0.32702172 0.2520061 ] [-0.26122388 -0.28407013 0.19923449 -0.40257895] [-0.53919476 -0.3265087 -0.8185253 -0.7048122 ] [ 0.06886208 -1.3020182 -1.4264357 0.3410663 ]] [[-1.0432429 -0.38639504 -0.14727443 -0.4696626 ] [-1.1165948 0.1730982 -1.1206412 -0.3671439 ] [-0.8564834 -0.3209582 -0.65001035 -1.3966746 ] [ 0.09640771 -0.0268423 -1.364497 -0.999869 ] [-0.6828493 -0.29630515 -0.28762782 -0.67001766] [-0.7273525 -0.39134276 0.43889105 -0.22973719] [ 0.29471135 0.08039027 0.0340178 -0.87654483] [ 0.34915584 0.08407629 -0.06384134 -0.7847209 ]] [[-0.9271172 -0.2949718 0.05276 -1.3944457 ] [-0.42565066 -0.6630093 -1.411182 -0.02378434] [-0.43848178 -0.36087668 -1.4226718 -0.64076734] [-0.7661969 0.70955575 0.05479163 -0.7391506 ] [-0.85899895 -0.8724216 -0.04419035 -1.0765892 ] [-0.98875296 -0.82329774 0.63371956 -0.2726079 ] [ 0.18718725 -0.16605899 -0.3347589 -0.7135259 ] [-0.35385156 -0.24825892 0.04183847 -0.07382402]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_954.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7052 -1.3589 -1.0093 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.001 * 2.1806 (2,1,1,.,.) = 0.9607 (3,1,1,.,.) = -1.5329 (1,2,1,.,.) = -1.3683 (2,2,1,.,.) = 0.9093 (3,2,1,.,.) = 0.7250 (1,3,1,.,.) = 0.3268 (2,3,1,.,.) = 1.5864 (3,3,1,.,.) = -1.6441 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] ... [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]]] [[[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] ... [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]]] [[[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] ... [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]]]]]; ov_res: [[[[[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] ... [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]] [[ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] ... [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473] [ 0.70516473 0.70516473 0.70516473 ... 0.70516473 0.70516473 0.70516473]]] [[[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] ... [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]] [[-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] ... [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ] [-1.3589159 -1.3589159 -1.3589159 ... -1.3589159 -1.3589159 -1.3589159 ]]] [[[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] ... [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]] [[-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] ... [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ] [-1.0092758 -1.0092758 -1.0092758 ... -1.0092758 -1.0092758 -1.0092758 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_956.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.609588}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.6760 (2,1,1,.,.) = -1.5521 (3,1,1,.,.) = 1.0199 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 2.81880140e+00 2.78686571e+00 -1.03350925e+00 -6.31318390e-01 -2.97893524e+00 2.57464617e-01 -2.49438357e+00 -1.30114698e+00 -4.61942136e-01 3.45582509e+00] [ 7.25291789e-01 6.90330148e-01 9.59054232e-02 -8.85948360e-01 2.22485495e+00 1.59413838e+00 -9.04667437e-01 1.45731187e+00 4.74417591e+00 2.43803692e+00] [-1.55332160e+00 -1.39328432e+00 1.32442272e+00 -1.26231074e+00 1.14131451e+00 -1.63144231e+00 -2.79289675e+00 -4.07758999e+00 2.06577611e+00 -1.95462704e-01] [-2.60964918e+00 -1.58494759e+00 5.66159058e+00 5.61538219e-01 -2.72679543e+00 -2.44528818e+00 1.26585758e+00 -4.22990263e-01 -3.86717534e+00 -1.66365576e+00] [-4.72097349e+00 1.83874488e+00 -1.61509490e+00 -1.42489529e+00 -3.57183874e-01 6.52791619e-01 3.33339185e-01 -3.87959599e-01 2.25434756e+00 -8.85487497e-01] [ 3.96098566e+00 -1.46131158e+00 3.60065961e+00 2.38174939e+00 2.81914353e+00 4.61358607e-01 -1.98281503e+00 1.34543383e+00 1.18618011e+00 8.89534533e-01] [ 7.39822626e-01 4.44255304e+00 1.84168696e-01 4.99921232e-01 -3.80090475e-01 2.52428484e+00 -1.97808695e+00 -6.96538031e-01 1.91866589e+00 -1.46217465e-01] [ 9.28976655e-01 1.67641830e+00 1.24663472e+00 -3.15537047e+00 1.76507682e-01 3.62785864e+00 -2.80433321e+00 5.62746584e-01 -5.27767849e+00 2.87126136e+00] [ 2.99356729e-01 2.57593483e-01 -2.95875621e+00 -1.02653790e+00 -2.09868050e+00 -1.34003162e-03 1.19373345e+00 2.05137753e+00 -3.50095391e+00 1.53827047e+00] [-4.45028973e+00 2.50812626e+00 -3.45133138e+00 1.93317604e+00 2.29399681e+00 2.17641473e+00 2.65390182e+00 1.90369070e-01 3.05644178e+00 -1.37572503e+00]] [[ 6.21617079e+00 -1.00402522e+00 2.70180130e+00 3.38826323e+00 1.84252405e+00 1.28100121e+00 5.15145779e-01 1.32728648e+00 1.98016095e+00 1.44381499e+00] [ 1.13922441e+00 3.36870265e+00 1.58629715e+00 -1.02382207e+00 -1.27575374e+00 7.06736565e-01 6.36260927e-01 2.98586941e+00 -4.91749644e-02 4.44757998e-01] [ 2.33302641e+00 3.09269953e+00 1.52114952e+00 -3.76448417e+00 1.81782198e+00 1.85411263e+00 1.71126390e+00 3.46231639e-01 -1.52994061e+00 3.04132175e+00] [ 2.84809709e+00 7.63495564e-02 2.02760911e+00 3.34930730e+00 2.41243631e-01 1.16643286e+00 3.52394176e+00 -1.55249333e+00 -6.46027029e-01 1.90172887e+00] [ 6.52243257e-01 1.03069127e+00 3.03367162e+00 4.35495901e+00 8.74834895e-01 1.50039673e+00 4.04998016e+00 -7.16498315e-01 1.49830294e+00 4.20832485e-01] [-5.01506865e-01 1.81308842e+00 3.52156967e-01 2.37899113e+00 -5.50204337e-01 3.55342007e+00 3.15070033e+00 -3.22797918e+00 1.45266891e+00 1.56808424e+00] [ 2.65652204e+00 2.47797042e-01 3.35085064e-01 3.14515233e-02 2.21584797e+00 -1.28644419e+00 9.45631862e-01 -3.48151326e+00 3.24034953e+00 7.33177125e-01] [ 5.35143256e-01 8.72998476e-01 -2.21289635e-01 2.31362605e+00 2.99950385e+00 6.24162138e-01 2.35207963e+00 -5.25597155e-01 -3.99910951e+00 -2.21442723e+00] [ 1.91515684e+00 5.35553122e+00 1.03437543e+00 5.28180182e-01 -1.01958823e+00 -3.62464309e+00 -3.45831275e+00 -3.62605035e-01 1.04211390e-01 -2.54080105e+00] [-1.02107310e+00 -2.53306270e+00 2.00656652e+00 -3.89432192e-01 -1.64097548e-01 -1.07092452e+00 -1.23379064e+00 -2.40057707e-02 1.95080101e-01 -1.72098351e+00]] [[ 2.78815150e+00 6.72766209e-01 2.60679245e+00 -2.14154720e+00 1.10972977e+00 3.14215994e+00 5.53379238e-01 -2.46403074e+00 -1.81085587e+00 -4.34028208e-01] [ 1.60843062e+00 -2.15086007e+00 -3.23654771e+00 -5.81967890e-01 1.57465792e+00 4.61209106e+00 -3.52715564e+00 2.08595610e+00 -6.14852011e-01 4.43839133e-01] [ 3.64581060e+00 2.63446069e+00 -1.42437840e+00 7.89804399e-01 1.31376779e+00 -1.79473138e+00 1.00701690e+00 -8.85880589e-02 -3.35124969e-01 9.02370930e-01] [-1.54961324e+00 8.32190514e-01 -1.99374032e+00 -3.14692080e-01 -3.74589181e+00 8.22351515e-01 7.59193182e-01 -1.11606455e+00 -1.17507267e+00 -2.80474043e+00] [ 2.65574425e-01 1.60811520e+00 4.22168016e-01 -4.04417574e-01 2.03575897e+00 -6.29975021e-01 3.67481995e+00 -1.52120447e+00 5.11601925e-01 -1.19422174e+00] [-2.12083340e+00 2.09423184e+00 3.34810805e+00 -4.64460671e-01 -1.08445585e-01 1.66487598e+00 -6.38568938e-01 -9.63376939e-01 -2.34694386e+00 7.28681028e-01] [-2.48593926e+00 -2.69260645e+00 2.08565295e-01 -8.73907268e-01 1.47080731e+00 1.75498581e+00 2.72217304e-01 3.35455632e+00 1.04129434e+00 1.61602545e+00] [-1.16732025e+00 -4.98312664e+00 4.67689276e-01 6.42348707e-01 -1.56110620e+00 -1.66369367e+00 2.45033050e+00 -6.11629307e-01 -1.01769066e+00 -3.71645427e+00] [-5.75816214e-01 -1.06685042e-01 3.13770294e-01 1.25978500e-01 -8.54555964e-02 -1.99456096e+00 -2.79421449e-01 3.98020673e+00 1.33632874e+00 -1.28854442e+00] [-5.26690066e-01 2.86022353e+00 -1.25190377e+00 -3.78286302e-01 6.11039996e-01 3.35477686e+00 3.73232651e+00 -7.60503352e-01 -1.31623077e+00 1.43117309e+00]] [[ 1.93230438e+00 -2.94842720e-01 2.93135548e+00 2.35749841e+00 1.41512108e+00 -2.14636445e-01 3.45832396e+00 1.47437561e+00 -8.22438419e-01 9.85588670e-01] [ 1.36284590e+00 2.38964081e+00 -1.03899908e+00 2.21778536e+00 -2.67630911e+00 1.73975396e+00 1.82181692e+00 5.44675231e-01 8.65743041e-01 1.92746902e+00] [ 7.32772887e-01 1.87878698e-01 -7.31489062e-02 9.09855425e-01 3.56461716e+00 5.19973636e-01 -3.45740318e-02 4.25459909e+00 -1.68094933e-01 -4.60117626e+00] [-1.33022189e-01 -2.37511659e+00 5.22078323e+00 -1.22170806e+00 3.13832688e+00 3.92448306e+00 1.76671171e+00 8.48646164e-01 4.98335004e-01 -1.13377595e+00] [ 2.82629776e+00 1.57872748e+00 -6.16387308e-01 -5.55021942e-01 1.47519970e+00 1.76291674e-01 -7.17148364e-01 1.87712479e+00 -2.59728527e+00 8.50000739e-01] [ 1.61700082e+00 3.72896600e+00 1.87977552e+00 5.05116999e-01 -3.30213714e+00 2.66994596e+00 3.11253595e+00 -8.06544363e-01 2.52897811e+00 -7.60007799e-01] [-6.31576777e-02 7.04763889e-01 1.40070033e+00 -1.11010218e+00 1.29231262e+00 6.71676636e-01 1.54204464e+00 4.22998905e+00 5.28607368e-01 4.76967752e-01] [-1.24062657e+00 3.72546458e+00 2.74802113e+00 2.36463046e+00 -1.89113951e+00 1.13869452e+00 4.05700397e+00 3.55474472e+00 1.26018453e+00 -2.85545397e+00] [ 9.70217705e-01 -1.28745317e-01 1.18956161e+00 3.00977540e+00 1.49744987e+00 6.57789350e-01 3.20034528e+00 -1.41527128e+00 7.63228238e-01 1.20156908e+00] [ 9.05951798e-01 2.05543470e+00 3.60298800e+00 3.42315483e+00 1.43359256e+00 4.87495375e+00 2.74006176e+00 5.97137392e-01 2.82257128e+00 2.22664213e+00]] [[ 4.06532669e+00 -1.57056546e+00 -8.93912971e-01 1.58292741e-01 8.61447215e-01 7.87977338e-01 -2.95731008e-01 2.34776044e+00 -1.78245378e+00 1.13082290e+00] [ 5.91165304e-01 3.20750862e-01 9.89188075e-01 4.20193815e+00 1.37546897e+00 1.63767171e+00 5.56860268e-01 -5.37570894e-01 3.66017723e+00 -7.26853192e-01] [ 9.83463168e-01 -2.23980165e+00 1.65935969e+00 8.43185425e-01 1.67548716e-01 2.84922957e-01 5.10505974e-01 -1.37727571e+00 2.14612579e+00 9.61387515e-01] [-1.46226764e+00 8.71135950e-01 1.35480857e+00 8.02462876e-01 -1.41042805e+00 -2.68483162e-03 3.03509831e-01 -2.61476135e+00 3.65326571e+00 3.25224972e+00] [ 3.35400701e-01 1.89157605e+00 6.75079942e-01 2.78425097e+00 1.24977124e+00 -1.63139534e+00 6.18952751e-01 -2.73638844e-01 2.69165397e-01 -1.40902781e+00] [-9.54492271e-01 3.48774493e-01 2.97782230e+00 -2.80939484e+00 1.35551929e+00 -1.39491558e+00 1.82791442e-01 9.64262962e-01 -9.05750692e-01 1.34120739e+00] [-1.55726886e+00 1.06628168e+00 2.31037664e+00 1.69964600e+00 -1.76952410e+00 2.84864140e+00 1.34193826e+00 -1.80729413e+00 5.05411029e-02 -2.07826138e+00] [ 2.57273507e+00 2.22602248e+00 5.38747728e-01 3.11037469e+00 1.04104090e+00 -6.05716884e-01 2.28650379e+00 1.09967494e+00 -1.30304241e+00 4.73017073e+00] [ 7.12597191e-01 -3.17042089e+00 2.55486560e+00 3.86612916e+00 -1.57190180e+00 1.82150006e+00 2.82010627e+00 -1.25940442e+00 7.05145454e+00 -1.64216971e+00] [-1.87519503e+00 -1.96117997e-01 1.82972360e+00 1.14704633e+00 1.57699847e+00 -1.73607230e+00 3.09023547e+00 1.25357425e+00 -9.88857090e-01 -1.31894827e-01]] [[ 4.21124649e+00 -4.25684392e-01 -6.84761703e-01 9.56406951e-01 3.81782269e+00 -1.08541346e+00 1.73019028e+00 -1.47308898e+00 4.77648354e+00 2.49577665e+00] [-2.51354074e+00 -1.68612099e+00 3.96819949e+00 1.58213139e+00 -2.08789766e-01 1.05580533e+00 6.46736920e-01 2.80410320e-01 1.14071739e+00 -2.40873551e+00] [ 3.70393968e+00 2.18304181e+00 1.26045853e-01 2.77172041e+00 2.37529397e+00 -2.00346327e+00 8.15785885e-01 -3.13061213e+00 3.05868816e+00 -2.55802774e+00] [-7.43246138e-01 -1.81606960e+00 3.38055801e+00 -6.86098516e-01 2.04548478e+00 2.41533542e+00 1.92483854e+00 2.40390158e+00 -1.10905743e+00 -3.97068572e+00] [-2.34003484e-01 1.35815501e+00 1.02118182e+00 3.38027239e+00 -6.99197352e-01 -1.30132580e+00 -5.15353739e-01 2.61909866e+00 -1.69886708e+00 1.79909492e+00] [ 3.99605393e-01 -3.19888973e+00 4.65981483e+00 -1.57133269e+00 2.52589011e+00 -7.90789545e-01 2.16774511e+00 -1.51351047e+00 1.37333465e+00 -3.50800753e-01] [ 2.99790549e+00 1.73126668e-01 -8.11817706e-01 -1.19014049e+00 2.19037366e+00 1.83586335e+00 -5.31399846e-02 1.68331432e+00 6.97166920e-02 -3.08792758e+00] [ 5.92566073e-01 1.94667292e+00 -3.15935421e+00 -1.86402297e+00 1.15686059e-02 9.79733467e-02 5.98185253e+00 2.23651528e+00 1.78208947e+00 1.03550792e+00] [-6.29850328e-01 -2.62402654e+00 3.35698748e+00 1.27675146e-01 2.12783337e+00 -1.64466619e+00 -1.09111452e+00 -7.41249740e-01 1.36729598e-01 -1.69108629e+00] [ 1.07217729e-01 7.71433830e-01 4.32382202e+00 -1.46105647e+00 5.05760145e+00 1.84822798e+00 8.24239194e-01 2.27089143e+00 -1.00130844e+00 6.50648594e-01]] [[-2.60252929e+00 1.78625083e+00 2.35133481e+00 8.83300424e-01 -1.01586437e+00 -6.14167452e-02 1.88302684e+00 1.51104951e+00 -4.27727163e-01 1.20425344e+00] [-1.00590634e+00 -5.40777445e-02 -1.97945237e-01 -1.02519393e+00 -2.06516671e+00 1.32321405e+00 -3.22007477e-01 5.83595562e+00 -1.38371921e+00 3.60870099e+00] [ 2.66128153e-01 -2.46533704e+00 2.97301507e+00 1.42134786e+00 -2.69033003e+00 1.35660791e+00 -2.58088052e-01 -1.01477313e+00 -3.56499457e+00 2.66809750e+00] [ 5.38892567e-01 -8.59144330e-02 9.23874259e-01 -1.15747571e+00 1.39773583e+00 -1.51980519e-02 7.38427758e-01 2.05436802e+00 1.40615690e+00 6.99691534e-01] [ 7.46222496e-01 -1.53135657e-02 8.22148085e-01 5.94133198e-01 1.22005332e+00 2.66486096e+00 1.78737783e+00 4.17448640e-01 4.55715835e-01 2.78895831e+00] [ 1.29331589e+00 2.49941558e-01 2.10087568e-01 4.23995304e+00 -1.30406439e-01 -2.74323285e-01 2.52745581e+00 -2.69047594e+00 2.16418910e+00 9.70722258e-01] [ 1.47289395e+00 2.40694880e+00 3.84933114e-01 -1.54673934e-01 1.30845499e+00 5.15709877e-01 -4.31894922e+00 -2.75534511e+00 2.47170424e+00 -1.33925748e+00] [-2.24856591e+00 2.53072596e+00 -3.00178230e-01 -2.68981600e+00 -8.49984705e-01 2.15383887e+00 -1.04577422e+00 2.40313816e+00 2.54179430e+00 1.29295659e+00] [ 4.56459284e-01 6.63787544e-01 -1.12947583e-01 3.90736055e+00 -1.05715334e-01 -2.41490769e+00 2.02216029e+00 1.52794933e+00 -2.78159451e+00 2.06475019e+00] [-1.46204209e+00 3.16789031e+00 -2.07029533e+00 -3.05636704e-01 -3.09447598e+00 2.81874108e+00 9.96552229e-01 -2.01795173e+00 -4.13194609e+00 -2.15823627e+00]] [[ 9.71555054e-01 -4.56353605e-01 -2.90240765e-01 1.57186127e+00 -1.06639218e+00 1.64001107e+00 -3.34804416e-01 -1.06469011e+00 -1.47198343e+00 3.59649277e+00] [-1.43063664e-01 -1.12947607e+00 -8.18181634e-02 2.57318735e+00 2.42193580e-01 2.40320468e+00 -1.91952562e+00 -8.62450302e-01 2.27396846e+00 3.85072684e+00] [ 2.76757193e+00 -2.36520767e+00 -1.47285151e+00 -3.75374138e-01 3.88358355e-01 -8.21981609e-01 -7.58084834e-01 -2.84505868e+00 6.41258359e-02 2.55187893e+00] [ 3.64718020e-01 6.13430858e-01 4.04221416e-01 9.99842763e-01 1.19762337e+00 1.09743106e+00 -2.17133403e+00 -1.74284959e+00 5.76300621e-01 -4.21868503e-01] [ 1.22154140e+00 2.02747416e+00 2.67580533e+00 -1.36286497e+00 -1.43210340e+00 1.65672946e+00 2.45822334e+00 -1.99148476e-01 -2.47615981e+00 -1.08326018e-01] [-3.06731367e+00 3.73758483e+00 6.00663126e-01 1.63451743e+00 2.00050020e+00 1.40325356e+00 -2.40999031e+00 4.67343950e+00 3.56040478e-01 -1.71222520e+00] [-2.81309724e+00 3.24810028e+00 -6.70463860e-01 3.60418940e+00 6.22915983e+00 2.74995518e+00 1.58514071e+00 3.83731723e+00 1.83132100e+00 1.50780708e-01] [-7.32970893e-01 2.51930666e+00 7.06344843e-01 5.97859478e+00 -9.74963307e-02 1.73831630e+00 1.33638930e+00 3.93761969e+00 3.18958139e+00 -1.46940053e-01] [ 4.56198502e+00 -2.97584414e-01 2.30829787e+00 1.57527328e+00 1.37224627e+00 1.10265732e-01 1.12775266e+00 9.75237310e-01 3.71820390e-01 -3.86357188e+00] [ 4.17087746e+00 3.62591505e+00 1.01342368e+00 9.41438675e-01 2.22254127e-01 1.15634322e+00 4.98772055e-01 6.08156204e-01 2.65863013e+00 1.23209977e+00]]]]]; ov_res: [[[[[ 2.81880140e+00 2.78686571e+00 -1.03350925e+00 -6.31318390e-01 -2.97893524e+00 2.57464617e-01 -2.49438357e+00 -1.30114698e+00 -4.61942136e-01 3.45582509e+00] [ 7.25291789e-01 6.90330148e-01 9.59054232e-02 -8.85948360e-01 2.22485495e+00 1.59413838e+00 -9.04667437e-01 1.45731187e+00 4.74417591e+00 2.43803692e+00] [-1.55332160e+00 -1.39328432e+00 1.32442272e+00 -1.26231074e+00 1.14131451e+00 -1.63144231e+00 -2.79289675e+00 -4.07758999e+00 2.06577611e+00 -1.95462704e-01] [-2.60964918e+00 -1.58494759e+00 5.66159058e+00 5.61538219e-01 -2.72679543e+00 -2.44528818e+00 1.26585758e+00 -4.22990263e-01 -3.86717534e+00 -1.66365576e+00] [-4.72097349e+00 1.83874488e+00 -1.61509490e+00 -1.42489529e+00 -3.57183874e-01 6.52791619e-01 3.33339185e-01 -3.87959599e-01 2.25434756e+00 -8.85487497e-01] [ 3.96098566e+00 -1.46131158e+00 3.60065961e+00 2.38174939e+00 2.81914353e+00 4.61358607e-01 -1.98281503e+00 1.34543383e+00 1.18618011e+00 8.89534533e-01] [ 7.39822626e-01 4.44255304e+00 1.84168696e-01 4.99921232e-01 -3.80090475e-01 2.52428484e+00 -1.97808695e+00 -6.96538031e-01 1.91866589e+00 -1.46217465e-01] [ 9.28976655e-01 1.67641830e+00 1.24663472e+00 -3.15537047e+00 1.76507682e-01 3.62785864e+00 -2.80433321e+00 5.62746584e-01 -5.27767849e+00 2.87126136e+00] [ 2.99356729e-01 2.57593483e-01 -2.95875621e+00 -1.02653790e+00 -2.09868050e+00 -1.34003162e-03 1.19373345e+00 2.05137753e+00 -3.50095391e+00 1.53827047e+00] [-4.45028973e+00 2.50812626e+00 -3.45133138e+00 1.93317604e+00 2.29399681e+00 2.17641473e+00 2.65390182e+00 1.90369070e-01 3.05644178e+00 -1.37572503e+00]] [[ 6.21617079e+00 -1.00402522e+00 2.70180130e+00 3.38826323e+00 1.84252405e+00 1.28100121e+00 5.15145779e-01 1.32728648e+00 1.98016095e+00 1.44381499e+00] [ 1.13922441e+00 3.36870265e+00 1.58629715e+00 -1.02382207e+00 -1.27575374e+00 7.06736565e-01 6.36260927e-01 2.98586941e+00 -4.91749644e-02 4.44757998e-01] [ 2.33302641e+00 3.09269953e+00 1.52114952e+00 -3.76448417e+00 1.81782198e+00 1.85411263e+00 1.71126390e+00 3.46231639e-01 -1.52994061e+00 3.04132175e+00] [ 2.84809709e+00 7.63495564e-02 2.02760911e+00 3.34930730e+00 2.41243631e-01 1.16643286e+00 3.52394176e+00 -1.55249333e+00 -6.46027029e-01 1.90172887e+00] [ 6.52243257e-01 1.03069127e+00 3.03367162e+00 4.35495901e+00 8.74834895e-01 1.50039673e+00 4.04998016e+00 -7.16498315e-01 1.49830294e+00 4.20832485e-01] [-5.01506865e-01 1.81308842e+00 3.52156967e-01 2.37899113e+00 -5.50204337e-01 3.55342007e+00 3.15070033e+00 -3.22797918e+00 1.45266891e+00 1.56808424e+00] [ 2.65652204e+00 2.47797042e-01 3.35085064e-01 3.14515233e-02 2.21584797e+00 -1.28644419e+00 9.45631862e-01 -3.48151326e+00 3.24034953e+00 7.33177125e-01] [ 5.35143256e-01 8.72998476e-01 -2.21289635e-01 2.31362605e+00 2.99950385e+00 6.24162138e-01 2.35207963e+00 -5.25597155e-01 -3.99910951e+00 -2.21442723e+00] [ 1.91515684e+00 5.35553122e+00 1.03437543e+00 5.28180182e-01 -1.01958823e+00 -3.62464309e+00 -3.45831275e+00 -3.62605035e-01 1.04211390e-01 -2.54080105e+00] [-1.02107310e+00 -2.53306270e+00 2.00656652e+00 -3.89432192e-01 -1.64097548e-01 -1.07092452e+00 -1.23379064e+00 -2.40057707e-02 1.95080101e-01 -1.72098351e+00]] [[ 2.78815150e+00 6.72766209e-01 2.60679245e+00 -2.14154720e+00 1.10972977e+00 3.14215994e+00 5.53379238e-01 -2.46403074e+00 -1.81085587e+00 -4.34028208e-01] [ 1.60843062e+00 -2.15086007e+00 -3.23654771e+00 -5.81967890e-01 1.57465792e+00 4.61209106e+00 -3.52715564e+00 2.08595610e+00 -6.14852011e-01 4.43839133e-01] [ 3.64581060e+00 2.63446069e+00 -1.42437840e+00 7.89804399e-01 1.31376779e+00 -1.79473138e+00 1.00701690e+00 -8.85880589e-02 -3.35124969e-01 9.02370930e-01] [-1.54961324e+00 8.32190514e-01 -1.99374032e+00 -3.14692080e-01 -3.74589181e+00 8.22351515e-01 7.59193182e-01 -1.11606455e+00 -1.17507267e+00 -2.80474043e+00] [ 2.65574425e-01 1.60811520e+00 4.22168016e-01 -4.04417574e-01 2.03575897e+00 -6.29975021e-01 3.67481995e+00 -1.52120447e+00 5.11601925e-01 -1.19422174e+00] [-2.12083340e+00 2.09423184e+00 3.34810805e+00 -4.64460671e-01 -1.08445585e-01 1.66487598e+00 -6.38568938e-01 -9.63376939e-01 -2.34694386e+00 7.28681028e-01] [-2.48593926e+00 -2.69260645e+00 2.08565295e-01 -8.73907268e-01 1.47080731e+00 1.75498581e+00 2.72217304e-01 3.35455632e+00 1.04129434e+00 1.61602545e+00] [-1.16732025e+00 -4.98312664e+00 4.67689276e-01 6.42348707e-01 -1.56110620e+00 -1.66369367e+00 2.45033050e+00 -6.11629307e-01 -1.01769066e+00 -3.71645427e+00] [-5.75816214e-01 -1.06685042e-01 3.13770294e-01 1.25978500e-01 -8.54555964e-02 -1.99456096e+00 -2.79421449e-01 3.98020673e+00 1.33632874e+00 -1.28854442e+00] [-5.26690066e-01 2.86022353e+00 -1.25190377e+00 -3.78286302e-01 6.11039996e-01 3.35477686e+00 3.73232651e+00 -7.60503352e-01 -1.31623077e+00 1.43117309e+00]] [[ 1.93230438e+00 -2.94842720e-01 2.93135548e+00 2.35749841e+00 1.41512108e+00 -2.14636445e-01 3.45832396e+00 1.47437561e+00 -8.22438419e-01 9.85588670e-01] [ 1.36284590e+00 2.38964081e+00 -1.03899908e+00 2.21778536e+00 -2.67630911e+00 1.73975396e+00 1.82181692e+00 5.44675231e-01 8.65743041e-01 1.92746902e+00] [ 7.32772887e-01 1.87878698e-01 -7.31489062e-02 9.09855425e-01 3.56461716e+00 5.19973636e-01 -3.45740318e-02 4.25459909e+00 -1.68094933e-01 -4.60117626e+00] [-1.33022189e-01 -2.37511659e+00 5.22078323e+00 -1.22170806e+00 3.13832688e+00 3.92448306e+00 1.76671171e+00 8.48646164e-01 4.98335004e-01 -1.13377595e+00] [ 2.82629776e+00 1.57872748e+00 -6.16387308e-01 -5.55021942e-01 1.47519970e+00 1.76291674e-01 -7.17148364e-01 1.87712479e+00 -2.59728527e+00 8.50000739e-01] [ 1.61700082e+00 3.72896600e+00 1.87977552e+00 5.05116999e-01 -3.30213714e+00 2.66994596e+00 3.11253595e+00 -8.06544363e-01 2.52897811e+00 -7.60007799e-01] [-6.31576777e-02 7.04763889e-01 1.40070033e+00 -1.11010218e+00 1.29231262e+00 6.71676636e-01 1.54204464e+00 4.22998905e+00 5.28607368e-01 4.76967752e-01] [-1.24062657e+00 3.72546458e+00 2.74802113e+00 2.36463046e+00 -1.89113951e+00 1.13869452e+00 4.05700397e+00 3.55474472e+00 1.26018453e+00 -2.85545397e+00] [ 9.70217705e-01 -1.28745317e-01 1.18956161e+00 3.00977540e+00 1.49744987e+00 6.57789350e-01 3.20034528e+00 -1.41527128e+00 7.63228238e-01 1.20156908e+00] [ 9.05951798e-01 2.05543470e+00 3.60298800e+00 3.42315483e+00 1.43359256e+00 4.87495375e+00 2.74006176e+00 5.97137392e-01 2.82257128e+00 2.22664213e+00]] [[ 4.06532669e+00 -1.57056546e+00 -8.93912971e-01 1.58292741e-01 8.61447215e-01 7.87977338e-01 -2.95731008e-01 2.34776044e+00 -1.78245378e+00 1.13082290e+00] [ 5.91165304e-01 3.20750862e-01 9.89188075e-01 4.20193815e+00 1.37546897e+00 1.63767171e+00 5.56860268e-01 -5.37570894e-01 3.66017723e+00 -7.26853192e-01] [ 9.83463168e-01 -2.23980165e+00 1.65935969e+00 8.43185425e-01 1.67548716e-01 2.84922957e-01 5.10505974e-01 -1.37727571e+00 2.14612579e+00 9.61387515e-01] [-1.46226764e+00 8.71135950e-01 1.35480857e+00 8.02462876e-01 -1.41042805e+00 -2.68483162e-03 3.03509831e-01 -2.61476135e+00 3.65326571e+00 3.25224972e+00] [ 3.35400701e-01 1.89157605e+00 6.75079942e-01 2.78425097e+00 1.24977124e+00 -1.63139534e+00 6.18952751e-01 -2.73638844e-01 2.69165397e-01 -1.40902781e+00] [-9.54492271e-01 3.48774493e-01 2.97782230e+00 -2.80939484e+00 1.35551929e+00 -1.39491558e+00 1.82791442e-01 9.64262962e-01 -9.05750692e-01 1.34120739e+00] [-1.55726886e+00 1.06628168e+00 2.31037664e+00 1.69964600e+00 -1.76952410e+00 2.84864140e+00 1.34193826e+00 -1.80729413e+00 5.05411029e-02 -2.07826138e+00] [ 2.57273507e+00 2.22602248e+00 5.38747728e-01 3.11037469e+00 1.04104090e+00 -6.05716884e-01 2.28650379e+00 1.09967494e+00 -1.30304241e+00 4.73017073e+00] [ 7.12597191e-01 -3.17042089e+00 2.55486560e+00 3.86612916e+00 -1.57190180e+00 1.82150006e+00 2.82010627e+00 -1.25940442e+00 7.05145454e+00 -1.64216971e+00] [-1.87519503e+00 -1.96117997e-01 1.82972360e+00 1.14704633e+00 1.57699847e+00 -1.73607230e+00 3.09023547e+00 1.25357425e+00 -9.88857090e-01 -1.31894827e-01]] [[ 4.21124649e+00 -4.25684392e-01 -6.84761703e-01 9.56406951e-01 3.81782269e+00 -1.08541346e+00 1.73019028e+00 -1.47308898e+00 4.77648354e+00 2.49577665e+00] [-2.51354074e+00 -1.68612099e+00 3.96819949e+00 1.58213139e+00 -2.08789766e-01 1.05580533e+00 6.46736920e-01 2.80410320e-01 1.14071739e+00 -2.40873551e+00] [ 3.70393968e+00 2.18304181e+00 1.26045853e-01 2.77172041e+00 2.37529397e+00 -2.00346327e+00 8.15785885e-01 -3.13061213e+00 3.05868816e+00 -2.55802774e+00] [-7.43246138e-01 -1.81606960e+00 3.38055801e+00 -6.86098516e-01 2.04548478e+00 2.41533542e+00 1.92483854e+00 2.40390158e+00 -1.10905743e+00 -3.97068572e+00] [-2.34003484e-01 1.35815501e+00 1.02118182e+00 3.38027239e+00 -6.99197352e-01 -1.30132580e+00 -5.15353739e-01 2.61909866e+00 -1.69886708e+00 1.79909492e+00] [ 3.99605393e-01 -3.19888973e+00 4.65981483e+00 -1.57133269e+00 2.52589011e+00 -7.90789545e-01 2.16774511e+00 -1.51351047e+00 1.37333465e+00 -3.50800753e-01] [ 2.99790549e+00 1.73126668e-01 -8.11817706e-01 -1.19014049e+00 2.19037366e+00 1.83586335e+00 -5.31399846e-02 1.68331432e+00 6.97166920e-02 -3.08792758e+00] [ 5.92566073e-01 1.94667292e+00 -3.15935421e+00 -1.86402297e+00 1.15686059e-02 9.79733467e-02 5.98185253e+00 2.23651528e+00 1.78208947e+00 1.03550792e+00] [-6.29850328e-01 -2.62402654e+00 3.35698748e+00 1.27675146e-01 2.12783337e+00 -1.64466619e+00 -1.09111452e+00 -7.41249740e-01 1.36729598e-01 -1.69108629e+00] [ 1.07217729e-01 7.71433830e-01 4.32382202e+00 -1.46105647e+00 5.05760145e+00 1.84822798e+00 8.24239194e-01 2.27089143e+00 -1.00130844e+00 6.50648594e-01]] [[-2.60252929e+00 1.78625083e+00 2.35133481e+00 8.83300424e-01 -1.01586437e+00 -6.14167452e-02 1.88302684e+00 1.51104951e+00 -4.27727163e-01 1.20425344e+00] [-1.00590634e+00 -5.40777445e-02 -1.97945237e-01 -1.02519393e+00 -2.06516671e+00 1.32321405e+00 -3.22007477e-01 5.83595562e+00 -1.38371921e+00 3.60870099e+00] [ 2.66128153e-01 -2.46533704e+00 2.97301507e+00 1.42134786e+00 -2.69033003e+00 1.35660791e+00 -2.58088052e-01 -1.01477313e+00 -3.56499457e+00 2.66809750e+00] [ 5.38892567e-01 -8.59144330e-02 9.23874259e-01 -1.15747571e+00 1.39773583e+00 -1.51980519e-02 7.38427758e-01 2.05436802e+00 1.40615690e+00 6.99691534e-01] [ 7.46222496e-01 -1.53135657e-02 8.22148085e-01 5.94133198e-01 1.22005332e+00 2.66486096e+00 1.78737783e+00 4.17448640e-01 4.55715835e-01 2.78895831e+00] [ 1.29331589e+00 2.49941558e-01 2.10087568e-01 4.23995304e+00 -1.30406439e-01 -2.74323285e-01 2.52745581e+00 -2.69047594e+00 2.16418910e+00 9.70722258e-01] [ 1.47289395e+00 2.40694880e+00 3.84933114e-01 -1.54673934e-01 1.30845499e+00 5.15709877e-01 -4.31894922e+00 -2.75534511e+00 2.47170424e+00 -1.33925748e+00] [-2.24856591e+00 2.53072596e+00 -3.00178230e-01 -2.68981600e+00 -8.49984705e-01 2.15383887e+00 -1.04577422e+00 2.40313816e+00 2.54179430e+00 1.29295659e+00] [ 4.56459284e-01 6.63787544e-01 -1.12947583e-01 3.90736055e+00 -1.05715334e-01 -2.41490769e+00 2.02216029e+00 1.52794933e+00 -2.78159451e+00 2.06475019e+00] [-1.46204209e+00 3.16789031e+00 -2.07029533e+00 -3.05636704e-01 -3.09447598e+00 2.81874108e+00 9.96552229e-01 -2.01795173e+00 -4.13194609e+00 -2.15823627e+00]] [[ 9.71555054e-01 -4.56353605e-01 -2.90240765e-01 1.57186127e+00 -1.06639218e+00 1.64001107e+00 -3.34804416e-01 -1.06469011e+00 -1.47198343e+00 3.59649277e+00] [-1.43063664e-01 -1.12947607e+00 -8.18181634e-02 2.57318735e+00 2.42193580e-01 2.40320468e+00 -1.91952562e+00 -8.62450302e-01 2.27396846e+00 3.85072684e+00] [ 2.76757193e+00 -2.36520767e+00 -1.47285151e+00 -3.75374138e-01 3.88358355e-01 -8.21981609e-01 -7.58084834e-01 -2.84505868e+00 6.41258359e-02 2.55187893e+00] [ 3.64718020e-01 6.13430858e-01 4.04221416e-01 9.99842763e-01 1.19762337e+00 1.09743106e+00 -2.17133403e+00 -1.74284959e+00 5.76300621e-01 -4.21868503e-01] [ 1.22154140e+00 2.02747416e+00 2.67580533e+00 -1.36286497e+00 -1.43210340e+00 1.65672946e+00 2.45822334e+00 -1.99148476e-01 -2.47615981e+00 -1.08326018e-01] [-3.06731367e+00 3.73758483e+00 6.00663126e-01 1.63451743e+00 2.00050020e+00 1.40325356e+00 -2.40999031e+00 4.67343950e+00 3.56040478e-01 -1.71222520e+00] [-2.81309724e+00 3.24810028e+00 -6.70463860e-01 3.60418940e+00 6.22915983e+00 2.74995518e+00 1.58514071e+00 3.83731723e+00 1.83132100e+00 1.50780708e-01] [-7.32970893e-01 2.51930666e+00 7.06344843e-01 5.97859478e+00 -9.74963307e-02 1.73831630e+00 1.33638930e+00 3.93761969e+00 3.18958139e+00 -1.46940053e-01] [ 4.56198502e+00 -2.97584414e-01 2.30829787e+00 1.57527328e+00 1.37224627e+00 1.10265732e-01 1.12775266e+00 9.75237310e-01 3.71820390e-01 -3.86357188e+00] [ 4.17087746e+00 3.62591505e+00 1.01342368e+00 9.41438675e-01 2.22254127e-01 1.15634322e+00 4.98772055e-01 6.08156204e-01 2.65863013e+00 1.23209977e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_958.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.8206 -0.2552 -0.1425 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.2646 (2,1,1,.,.) = 0.2854 (3,1,1,.,.) = 0.6375 (1,2,1,.,.) = 0.2280 (2,2,1,.,.) = -1.8948 (3,2,1,.,.) = 1.2235 (1,3,1,.,.) = 0.8647 (2,3,1,.,.) = -1.5788 (3,3,1,.,.) = 0.3631 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [-2.92791843e+00 -2.80009365e+00 1.54280400e+00 ... -1.83336651e+00 -8.39250565e-01 1.12932062e+00] [ 2.41343117e+00 3.16209435e-01 -1.21140373e+00 ... 3.03629303e+00 1.73404312e+00 5.08477569e-01] ... [ 2.14616442e+00 -1.24980056e+00 -8.18786025e-01 ... 5.56195307e+00 3.23372412e+00 1.97521067e+00] [ 2.46327353e+00 6.02865219e-01 2.00722885e+00 ... 4.08537388e+00 2.96537781e+00 5.16546774e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 1.29096532e+00 3.28975248e+00 1.40462852e+00 ... 1.28357720e+00 2.10197020e+00 3.19002211e-01] [ 1.29631698e+00 -6.73696876e-01 -2.25617075e+00 ... -4.80177402e-01 -7.97092915e-04 -9.16770101e-01] ... [ 1.28056169e+00 3.32573748e+00 1.72680688e+00 ... -1.29483688e+00 4.83082563e-01 3.52826929e+00] [ 6.60626888e-01 -8.57089877e-01 1.06271088e+00 ... 2.16659546e+00 3.77119160e+00 -4.76941252e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [-5.15700817e-01 -5.69635153e-01 5.31541228e-01 ... 1.68468976e+00 -8.00499320e-02 7.06638575e-01] [ 2.57324743e+00 1.60527945e-01 -2.25879812e+00 ... -7.17636108e-01 3.51595783e+00 2.87329245e+00] ... [ 2.25318956e+00 -8.12312722e-01 2.70753026e-01 ... 1.21929753e+00 -2.29665613e+00 4.25206512e-01] [ 2.21655750e+00 6.11647487e-01 1.30602551e+00 ... 3.03222084e+00 1.19968224e+00 1.04100597e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] ... [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 1.48610282e+00 1.93336284e+00 -1.68181860e+00 ... 3.47328138e+00 1.09162343e+00 1.24736249e-01] [ 1.41708899e+00 -4.70229387e-01 -1.23247015e+00 ... 2.97242260e+00 -3.90303493e-01 -1.34980285e+00] ... [ 2.22809839e+00 -7.62929440e-01 2.14570999e+00 ... 8.53684425e-01 5.96958399e-02 1.20888829e-01] [-9.95282769e-01 -2.01383352e+00 1.13598120e+00 ... 2.33107042e+00 -1.88537323e+00 2.11719370e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 2.43705368e+00 2.14350152e+00 -1.75764740e+00 ... 2.85176659e+00 -3.64163041e-01 -1.15609765e-02] [ 3.38298607e+00 1.23334789e+00 -2.15912867e+00 ... 6.32698238e-01 5.83478332e-01 1.69724667e+00] ... [ 2.47502828e+00 -8.97954106e-01 3.75374222e+00 ... 2.53829002e+00 9.34148908e-01 -1.49064672e+00] [-1.79618120e-01 3.40864277e+00 -1.88919961e+00 ... -2.44525552e-01 2.64160335e-01 -6.82171822e-01] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 2.96219540e+00 3.60844040e+00 6.19248152e-02 ... 6.09356940e-01 -9.41277742e-02 -2.95559406e-01] [-6.84299111e-01 1.62058258e+00 2.33320379e+00 ... 2.30347490e+00 1.47334409e+00 -6.79057717e-01] ... [ 7.39059210e-01 2.33694494e-01 2.24575710e+00 ... 1.99348450e+00 2.38278687e-01 -4.56491947e-01] [-2.03691387e+00 -1.27852643e+00 -1.16251194e+00 ... -2.96742439e-01 1.39714026e+00 1.81811953e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]]] [[[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 5.93398905e+00 6.74613833e-01 -1.90843225e+00 ... 3.47006917e+00 1.94014430e+00 -1.31661534e+00] [-4.83091402e+00 -1.01444221e+00 1.63759089e+00 ... 6.13394320e-01 -2.79344964e+00 -1.62852621e+00] ... [ 5.14843702e-01 2.33914042e+00 1.14676499e+00 ... -7.01992369e+00 2.37120223e+00 -7.62162030e-01] [-4.95480919e+00 8.69310439e-01 -2.45303535e+00 ... -3.18174219e+00 -1.80204535e+00 -5.12052917e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 3.12629867e+00 -4.31254864e+00 1.90366626e+00 ... -1.26436800e-01 -2.61645168e-01 -1.67418885e+00] [-2.37222528e+00 1.39348817e+00 1.27362537e+00 ... 2.28264308e+00 -2.34998798e+00 4.34015131e+00] ... [-1.45119429e+00 -3.42707086e+00 8.32959473e-01 ... 1.84153938e+00 -6.11182392e-01 -4.59709167e+00] [-1.80480051e+00 1.26938963e+00 1.73775625e+00 ... -1.67774343e+00 -1.12323904e+00 3.79352927e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 1.52904916e+00 -1.24961305e+00 2.14177036e+00 ... -3.18078089e+00 -2.33432084e-01 -9.92157578e-01] [-9.82967496e-01 1.82276964e+00 3.49950767e+00 ... -8.89049470e-02 -3.62429523e+00 -5.71931648e+00] ... [ 5.51728070e-01 2.84615755e+00 1.84393978e+00 ... 1.00906992e+00 2.18413424e+00 2.13389301e+00] [-3.18557882e+00 9.20670688e-01 -5.44040346e+00 ... -4.57171726e+00 -1.69234514e+00 7.26247907e-01] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] ... [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-2.32933903e+00 -2.81715465e+00 1.20356774e+00 ... -1.75775576e+00 -3.42144752e+00 2.29131532e+00] [-1.34127092e+00 7.03045666e-01 -1.44127893e+00 ... 1.19712543e+00 -4.05812836e+00 -1.77330089e+00] ... [ 3.22884989e+00 1.43821430e+00 2.68046618e+00 ... -3.09070563e+00 6.41235828e-01 -1.74719667e+00] [-4.85731214e-02 4.59882164e+00 -5.95738649e-01 ... -5.13620090e+00 4.23794127e+00 -4.38709259e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-3.07415152e+00 -3.84623981e+00 1.72750425e+00 ... -1.68097901e+00 1.28173184e+00 4.91524518e-01] [-1.25662589e+00 -2.45153022e+00 1.62982225e+00 ... 1.37748766e+00 -4.54866171e+00 -2.99913764e-01] ... [ 1.54746938e+00 -3.04815722e+00 -3.60414386e+00 ... -5.62290907e+00 -7.70640731e-01 2.38377643e+00] [-3.22257161e-01 -2.01981306e+00 3.11858439e+00 ... 6.01004660e-01 -6.98059797e-01 3.22622895e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-5.02590322e+00 -3.63608623e+00 6.16492867e-01 ... -6.96925819e-01 4.08101380e-01 1.73614168e+00] [-2.57223797e+00 -4.63076115e+00 -1.65917802e+00 ... -4.20237482e-01 1.49270558e+00 3.59388900e+00] ... [ 2.28538901e-01 1.16829085e+00 -2.55313492e+00 ... -1.87931806e-02 5.88940680e-02 2.36035442e+00] [ 2.73453689e+00 4.95526743e+00 9.28738296e-01 ... 7.72631168e-03 -4.03525925e+00 7.18740165e-01] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]]] [[[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-1.48632896e+00 1.59619606e+00 3.74171019e-01 ... -8.11168134e-01 -3.17261934e-01 1.38668716e-02] [ 1.88149202e+00 7.49608457e-01 1.20746002e-01 ... -2.48759437e+00 1.19998562e+00 1.19348443e+00] ... [-1.63088596e+00 -1.79117218e-01 1.97961628e-02 ... 9.94312227e-01 -3.70176315e+00 -7.27437258e-01] [ 1.87941682e+00 -8.54944050e-01 3.05243969e-01 ... -4.48700368e-01 -8.36870849e-01 8.75017643e-02] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-2.71310377e+00 1.01352215e+00 -2.12757659e+00 ... -5.71216047e-01 -7.98587143e-01 1.47283268e+00] [ 1.22947598e+00 -1.26472890e-01 9.90542352e-01 ... -1.04001677e+00 1.81574988e+00 -1.78923666e+00] ... [ 4.20587003e-01 4.06960487e-01 -1.28820789e+00 ... -8.71574059e-02 4.54196155e-01 9.36383545e-01] [ 1.06530511e+00 -1.92241013e-01 -1.64327133e+00 ... -1.84650198e-02 -1.55673909e+00 9.73493159e-01] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-1.80613622e-01 1.33815289e+00 -1.74556601e+00 ... 1.30626762e+00 4.51572776e-01 3.90290082e-01] [-9.19385493e-01 -1.16188848e+00 -3.85748029e-01 ... 7.71530926e-01 3.11432302e-01 1.90215552e+00] ... [-1.58955812e+00 -1.09447598e+00 -1.14562166e+00 ... -1.14010775e+00 4.61246848e-01 -1.65233004e+00] [ 9.12624538e-01 -7.27970779e-01 3.10578632e+00 ... 1.10308516e+00 6.35631680e-01 -9.97797370e-01] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] ... [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 8.56685817e-01 1.00871181e+00 7.34645784e-01 ... -1.07520103e+00 1.76548350e+00 -1.35817993e+00] [ 3.27332020e-01 1.52163729e-01 2.25489402e+00 ... -2.64290094e+00 3.23973823e+00 2.40323448e+00] ... [-3.47734523e+00 -5.01731277e-01 -3.14839435e+00 ... 1.77640772e+00 4.81700152e-02 1.39009690e+00] [ 1.04981911e+00 -1.34273422e+00 -1.72280982e-01 ... 2.18730140e+00 -1.40245652e+00 1.90560472e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 4.34754193e-01 1.16759896e+00 3.59761357e-01 ... -7.46892571e-01 -3.29834402e-01 -2.58302510e-01] [-1.27615643e+00 8.04307580e-01 6.75627708e-01 ... -1.14177930e+00 3.05636144e+00 -8.72671545e-01] ... [-2.52361369e+00 3.11501122e+00 2.36593410e-01 ... 2.42564583e+00 1.17822394e-01 3.22599411e-02] [ 6.34220183e-01 -7.39736974e-01 -7.55816519e-01 ... -3.38184237e-02 7.40945160e-01 -1.39447188e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 1.39134896e+00 3.61484647e-01 -2.61958212e-01 ... 1.50820509e-01 3.67512047e-01 -6.99618638e-01] [ 2.66301012e+00 2.18482900e+00 -3.32509279e-01 ... -1.09909976e+00 -1.83658743e+00 -1.57185650e+00] ... [-4.17699933e-01 -6.22772515e-01 5.74372590e-01 ... -1.03243911e+00 -3.80091146e-02 -1.06145942e+00] [-3.67892593e-01 -2.37942290e+00 4.82176542e-01 ... 4.11522150e-01 2.30833888e+00 -1.59421241e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]]]]]; ov_res: [[[[[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [-2.92791843e+00 -2.80009365e+00 1.54280400e+00 ... -1.83336651e+00 -8.39250565e-01 1.12932062e+00] [ 2.41343117e+00 3.16209435e-01 -1.21140373e+00 ... 3.03629303e+00 1.73404312e+00 5.08477569e-01] ... [ 2.14616442e+00 -1.24980056e+00 -8.18786025e-01 ... 5.56195307e+00 3.23372412e+00 1.97521067e+00] [ 2.46327353e+00 6.02865219e-01 2.00722885e+00 ... 4.08537388e+00 2.96537781e+00 5.16546774e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 1.29096532e+00 3.28975248e+00 1.40462852e+00 ... 1.28357720e+00 2.10197020e+00 3.19002211e-01] [ 1.29631698e+00 -6.73696876e-01 -2.25617075e+00 ... -4.80177402e-01 -7.97092915e-04 -9.16770101e-01] ... [ 1.28056169e+00 3.32573748e+00 1.72680688e+00 ... -1.29483688e+00 4.83082563e-01 3.52826929e+00] [ 6.60626888e-01 -8.57089877e-01 1.06271088e+00 ... 2.16659546e+00 3.77119160e+00 -4.76941252e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [-5.15700817e-01 -5.69635153e-01 5.31541228e-01 ... 1.68468976e+00 -8.00499320e-02 7.06638575e-01] [ 2.57324743e+00 1.60527945e-01 -2.25879812e+00 ... -7.17636108e-01 3.51595783e+00 2.87329245e+00] ... [ 2.25318956e+00 -8.12312722e-01 2.70753026e-01 ... 1.21929753e+00 -2.29665613e+00 4.25206512e-01] [ 2.21655750e+00 6.11647487e-01 1.30602551e+00 ... 3.03222084e+00 1.19968224e+00 1.04100597e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] ... [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 1.48610282e+00 1.93336284e+00 -1.68181860e+00 ... 3.47328138e+00 1.09162343e+00 1.24736249e-01] [ 1.41708899e+00 -4.70229387e-01 -1.23247015e+00 ... 2.97242260e+00 -3.90303493e-01 -1.34980285e+00] ... [ 2.22809839e+00 -7.62929440e-01 2.14570999e+00 ... 8.53684425e-01 5.96958399e-02 1.20888829e-01] [-9.95282769e-01 -2.01383352e+00 1.13598120e+00 ... 2.33107042e+00 -1.88537323e+00 2.11719370e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 2.43705368e+00 2.14350152e+00 -1.75764740e+00 ... 2.85176659e+00 -3.64163041e-01 -1.15609765e-02] [ 3.38298607e+00 1.23334789e+00 -2.15912867e+00 ... 6.32698238e-01 5.83478332e-01 1.69724667e+00] ... [ 2.47502828e+00 -8.97954106e-01 3.75374222e+00 ... 2.53829002e+00 9.34148908e-01 -1.49064672e+00] [-1.79618120e-01 3.40864277e+00 -1.88919961e+00 ... -2.44525552e-01 2.64160335e-01 -6.82171822e-01] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]] [[ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01] [ 2.96219540e+00 3.60844040e+00 6.19248152e-02 ... 6.09356940e-01 -9.41277742e-02 -2.95559406e-01] [-6.84299111e-01 1.62058258e+00 2.33320379e+00 ... 2.30347490e+00 1.47334409e+00 -6.79057717e-01] ... [ 7.39059210e-01 2.33694494e-01 2.24575710e+00 ... 1.99348450e+00 2.38278687e-01 -4.56491947e-01] [-2.03691387e+00 -1.27852643e+00 -1.16251194e+00 ... -2.96742439e-01 1.39714026e+00 1.81811953e+00] [ 8.20611596e-01 8.20611596e-01 8.20611596e-01 ... 8.20611596e-01 8.20611596e-01 8.20611596e-01]]] [[[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 5.93398905e+00 6.74613833e-01 -1.90843225e+00 ... 3.47006917e+00 1.94014430e+00 -1.31661534e+00] [-4.83091402e+00 -1.01444221e+00 1.63759089e+00 ... 6.13394320e-01 -2.79344964e+00 -1.62852621e+00] ... [ 5.14843702e-01 2.33914042e+00 1.14676499e+00 ... -7.01992369e+00 2.37120223e+00 -7.62162030e-01] [-4.95480919e+00 8.69310439e-01 -2.45303535e+00 ... -3.18174219e+00 -1.80204535e+00 -5.12052917e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 3.12629867e+00 -4.31254864e+00 1.90366626e+00 ... -1.26436800e-01 -2.61645168e-01 -1.67418885e+00] [-2.37222528e+00 1.39348817e+00 1.27362537e+00 ... 2.28264308e+00 -2.34998798e+00 4.34015131e+00] ... [-1.45119429e+00 -3.42707086e+00 8.32959473e-01 ... 1.84153938e+00 -6.11182392e-01 -4.59709167e+00] [-1.80480051e+00 1.26938963e+00 1.73775625e+00 ... -1.67774343e+00 -1.12323904e+00 3.79352927e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [ 1.52904916e+00 -1.24961305e+00 2.14177036e+00 ... -3.18078089e+00 -2.33432084e-01 -9.92157578e-01] [-9.82967496e-01 1.82276964e+00 3.49950767e+00 ... -8.89049470e-02 -3.62429523e+00 -5.71931648e+00] ... [ 5.51728070e-01 2.84615755e+00 1.84393978e+00 ... 1.00906992e+00 2.18413424e+00 2.13389301e+00] [-3.18557882e+00 9.20670688e-01 -5.44040346e+00 ... -4.57171726e+00 -1.69234514e+00 7.26247907e-01] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] ... [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-2.32933903e+00 -2.81715465e+00 1.20356774e+00 ... -1.75775576e+00 -3.42144752e+00 2.29131532e+00] [-1.34127092e+00 7.03045666e-01 -1.44127893e+00 ... 1.19712543e+00 -4.05812836e+00 -1.77330089e+00] ... [ 3.22884989e+00 1.43821430e+00 2.68046618e+00 ... -3.09070563e+00 6.41235828e-01 -1.74719667e+00] [-4.85731214e-02 4.59882164e+00 -5.95738649e-01 ... -5.13620090e+00 4.23794127e+00 -4.38709259e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-3.07415152e+00 -3.84623981e+00 1.72750425e+00 ... -1.68097901e+00 1.28173184e+00 4.91524518e-01] [-1.25662589e+00 -2.45153022e+00 1.62982225e+00 ... 1.37748766e+00 -4.54866171e+00 -2.99913764e-01] ... [ 1.54746938e+00 -3.04815722e+00 -3.60414386e+00 ... -5.62290907e+00 -7.70640731e-01 2.38377643e+00] [-3.22257161e-01 -2.01981306e+00 3.11858439e+00 ... 6.01004660e-01 -6.98059797e-01 3.22622895e+00] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]] [[-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01] [-5.02590322e+00 -3.63608623e+00 6.16492867e-01 ... -6.96925819e-01 4.08101380e-01 1.73614168e+00] [-2.57223797e+00 -4.63076115e+00 -1.65917802e+00 ... -4.20237482e-01 1.49270558e+00 3.59388900e+00] ... [ 2.28538901e-01 1.16829085e+00 -2.55313492e+00 ... -1.87931806e-02 5.88940680e-02 2.36035442e+00] [ 2.73453689e+00 4.95526743e+00 9.28738296e-01 ... 7.72631168e-03 -4.03525925e+00 7.18740165e-01] [-2.55153477e-01 -2.55153477e-01 -2.55153477e-01 ... -2.55153477e-01 -2.55153477e-01 -2.55153477e-01]]] [[[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-1.48632896e+00 1.59619606e+00 3.74171019e-01 ... -8.11168134e-01 -3.17261934e-01 1.38668716e-02] [ 1.88149202e+00 7.49608457e-01 1.20746002e-01 ... -2.48759437e+00 1.19998562e+00 1.19348443e+00] ... [-1.63088596e+00 -1.79117218e-01 1.97961628e-02 ... 9.94312227e-01 -3.70176315e+00 -7.27437258e-01] [ 1.87941682e+00 -8.54944050e-01 3.05243969e-01 ... -4.48700368e-01 -8.36870849e-01 8.75017643e-02] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-2.71310377e+00 1.01352215e+00 -2.12757659e+00 ... -5.71216047e-01 -7.98587143e-01 1.47283268e+00] [ 1.22947598e+00 -1.26472890e-01 9.90542352e-01 ... -1.04001677e+00 1.81574988e+00 -1.78923666e+00] ... [ 4.20587003e-01 4.06960487e-01 -1.28820789e+00 ... -8.71574059e-02 4.54196155e-01 9.36383545e-01] [ 1.06530511e+00 -1.92241013e-01 -1.64327133e+00 ... -1.84650198e-02 -1.55673909e+00 9.73493159e-01] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [-1.80613622e-01 1.33815289e+00 -1.74556601e+00 ... 1.30626762e+00 4.51572776e-01 3.90290082e-01] [-9.19385493e-01 -1.16188848e+00 -3.85748029e-01 ... 7.71530926e-01 3.11432302e-01 1.90215552e+00] ... [-1.58955812e+00 -1.09447598e+00 -1.14562166e+00 ... -1.14010775e+00 4.61246848e-01 -1.65233004e+00] [ 9.12624538e-01 -7.27970779e-01 3.10578632e+00 ... 1.10308516e+00 6.35631680e-01 -9.97797370e-01] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] ... [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 8.56685817e-01 1.00871181e+00 7.34645784e-01 ... -1.07520103e+00 1.76548350e+00 -1.35817993e+00] [ 3.27332020e-01 1.52163729e-01 2.25489402e+00 ... -2.64290094e+00 3.23973823e+00 2.40323448e+00] ... [-3.47734523e+00 -5.01731277e-01 -3.14839435e+00 ... 1.77640772e+00 4.81700152e-02 1.39009690e+00] [ 1.04981911e+00 -1.34273422e+00 -1.72280982e-01 ... 2.18730140e+00 -1.40245652e+00 1.90560472e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 4.34754193e-01 1.16759896e+00 3.59761357e-01 ... -7.46892571e-01 -3.29834402e-01 -2.58302510e-01] [-1.27615643e+00 8.04307580e-01 6.75627708e-01 ... -1.14177930e+00 3.05636144e+00 -8.72671545e-01] ... [-2.52361369e+00 3.11501122e+00 2.36593410e-01 ... 2.42564583e+00 1.17822394e-01 3.22599411e-02] [ 6.34220183e-01 -7.39736974e-01 -7.55816519e-01 ... -3.38184237e-02 7.40945160e-01 -1.39447188e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]] [[-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01] [ 1.39134896e+00 3.61484647e-01 -2.61958212e-01 ... 1.50820509e-01 3.67512047e-01 -6.99618638e-01] [ 2.66301012e+00 2.18482900e+00 -3.32509279e-01 ... -1.09909976e+00 -1.83658743e+00 -1.57185650e+00] ... [-4.17699933e-01 -6.22772515e-01 5.74372590e-01 ... -1.03243911e+00 -3.80091146e-02 -1.06145942e+00] [-3.67892593e-01 -2.37942290e+00 4.82176542e-01 ... 4.11522150e-01 2.30833888e+00 -1.59421241e+00] [-1.42452076e-01 -1.42452076e-01 -1.42452076e-01 ... -1.42452076e-01 -1.42452076e-01 -1.42452076e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_960.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7643 1.3799 0.2135 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.2522 (2,1,1,.,.) = 0.7066 (3,1,1,.,.) = 1.9680 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 0.8059145 0.8581577 0.8616549 ... 0.92933494 0.3730989 0.49719167] [ 0.7764978 0.8691451 0.7183995 ... 0.90992224 0.50379086 0.48111188] [ 0.7683988 0.6084186 0.55608785 ... 1.1378381 0.77197236 0.9671886 ] ... [ 0.79902184 0.95404226 1.0435319 ... 1.0785289 0.65842843 0.4951297 ] [ 1.1319902 0.6654943 1.1700214 ... 0.7904399 0.647961 1.2205449 ] [-0.02243948 1.1031816 0.9802902 ... 0.6285314 0.8377434 0.69384485]] [[ 0.721739 1.1214157 1.4690197 ... 0.9642429 0.24054891 0.5604802 ] [ 1.0408832 0.7403606 0.6628803 ... 0.6600999 0.9032265 1.0863405 ] [ 0.4048886 0.67283 0.08725005 ... 0.6278255 1.1513693 0.49541184] ... [ 0.28124332 0.5373306 0.9537966 ... 0.5772033 1.1104035 1.029099 ] [ 0.9726026 0.9462481 1.0102324 ... 0.77182496 0.5764779 0.6066591 ] [ 0.86989295 0.49391246 0.7415444 ... 1.1655561 0.55243415 1.1520895 ]] [[ 0.9284562 0.4164368 0.41810414 ... 0.6047625 0.82946193 0.4560421 ] [ 0.8405815 0.7077676 1.0800782 ... 0.2922127 0.64566386 0.89554715] [ 0.85105443 0.7573736 0.31324247 ... 0.8299173 1.1246887 1.1414996 ] ... [ 0.95420945 0.7853238 0.85755175 ... 1.033823 1.050623 0.7115462 ] [ 0.76198673 0.6962469 1.0429076 ... 0.8168715 0.73821455 0.8350922 ] [ 0.9921055 1.0151322 0.6180471 ... 0.70474327 0.8736682 0.7614856 ]] ... [[ 1.079751 0.6773894 0.7990759 ... 1.1922654 0.85482633 0.9004704 ] [ 1.1669374 0.96240425 0.3818197 ... 1.0888884 0.5995459 0.6593632 ] [ 0.83335716 0.60744345 0.3423653 ... 0.24544477 1.4674405 0.5455128 ] ... [ 0.67890257 0.7234865 0.6251906 ... 0.4859251 0.4051286 1.0747185 ] [ 0.57646406 0.6512026 0.9542117 ... 0.76368487 0.46455967 0.9384957 ] [ 0.74467295 1.1884295 1.0142157 ... 1.0990041 0.67639685 1.0985655 ]] [[ 0.86379945 1.0083582 0.9939424 ... 0.5692008 1.1280142 0.80196136] [ 0.59817445 0.60690075 0.85909164 ... 0.9062175 0.835609 0.6888987 ] [ 0.8337892 0.8668068 0.7112773 ... 0.6005394 0.379683 0.83753777] ... [ 0.9470067 1.1005927 0.9024443 ... 0.64869887 0.91216516 1.1235459 ] [ 0.5403262 0.8562797 0.77860266 ... 0.9644275 1.4531169 0.61751306] [ 0.7527765 0.73111445 1.2703238 ... 0.72387904 0.9340072 1.2269258 ]] [[ 0.66485715 0.7429582 0.613856 ... 0.87134236 1.3365763 1.0246502 ] [ 1.1566974 0.37913522 0.9248654 ... 0.5507611 0.92506 0.9021785 ] [ 0.53582525 0.9907959 1.1982871 ... 0.80247635 0.9837475 0.6850573 ] ... [ 0.6528043 0.6580963 0.8935573 ... 0.7164005 0.9789203 0.45711565] [ 0.692383 0.8355433 0.76473904 ... 0.29506248 0.48320785 0.6501014 ] [ 0.40266746 1.0864296 0.83264935 ... 0.8665624 0.60954463 0.92982143]]] [[[ 1.3640536 1.9728161 2.3439865 ... 1.9666193 1.5668724 2.3191504 ] [ 1.2187529 1.1687751 1.2346185 ... 1.5395912 1.2043984 0.251189 ] [ 1.5007702 3.0930514 1.5376847 ... 2.0304887 1.8069314 1.2900437 ] ... [ 1.0964357 1.4854654 1.6931297 ... 1.6610851 1.7816837 1.2627265 ] [ 1.7540499 1.9718077 1.0072767 ... 0.5530893 1.7765954 1.4638083 ] [ 0.1991849 2.080161 0.87044 ... 2.4715195 1.76466 1.4263837 ]] [[ 1.8775759 2.3810115 1.6392581 ... 2.0020998 -0.06410372 1.9847867 ] [ 1.2808927 1.5939863 0.8606965 ... 1.0405911 1.760619 1.0151953 ] [ 1.4818716 1.2220187 1.639729 ... 1.6806698 -0.18420374 1.7204034 ] ... [ 1.43003 1.1348455 1.7977512 ... 0.69244593 0.7893446 1.501508 ] [ 2.0741222 2.9278777 0.67289066 ... 0.83947855 0.4407714 1.2524527 ] [ 1.6432014 0.5196497 1.3192106 ... 2.7669232 2.1383405 1.579217 ]] [[ 0.83889395 0.963778 2.2289836 ... 1.5556436 2.0998766 1.4566817 ] [ 1.6179668 0.18973625 1.938013 ... -0.07281399 1.5362377 1.1753443 ] [ 1.7455231 2.29707 1.8660961 ... 1.2673157 1.051067 1.861392 ] ... [ 2.3711429 1.5784938 0.91805243 ... 0.9835302 0.8432749 1.3430808 ] [ 1.2956009 1.3157054 0.5651843 ... 1.7549417 0.6708782 1.6066669 ] [ 1.1526034 1.1633446 1.2839218 ... 2.1515045 0.94802225 2.0445204 ]] ... [[ 1.4513775 1.3654063 2.1007211 ... 1.6588278 1.2319202 1.8526088 ] [ 1.1787925 0.9483334 -0.22236502 ... 1.7736108 2.5582366 1.2706511 ] [ 0.9012172 1.1249466 1.1400805 ... 1.6605248 1.4738908 0.64467895] ... [ 0.5029026 1.4900612 0.95403993 ... 1.2824392 1.1177847 0.02169847] [ 1.3007336 1.7828165 2.3144865 ... 1.0828878 -0.5176543 1.6089224 ] [ 0.62747073 2.13232 2.5936718 ... -0.04382801 1.8058591 0.76965284]] [[ 1.9977086 1.801785 1.2888803 ... 0.94848025 0.24910975 1.1151483 ] [ 1.1319232 2.2367396 2.2243807 ... 1.4327662 2.5280132 1.8615162 ] [ 1.5329775 1.4839058 0.7523669 ... 1.3322848 2.6525803 1.6073929 ] ... [ 1.6869476 0.6874348 1.336363 ... 2.3272665 1.4876558 2.1368442 ] [ 1.4579982 0.7812674 1.2860649 ... 2.4128156 1.4148344 0.95291007] [ 1.4301174 3.5133557 0.6868188 ... 1.559159 2.367858 1.7936977 ]] [[ 0.24950743 1.4347292 2.9149742 ... 1.6558318 0.86100394 1.2227532 ] [ 1.6034951 0.08780265 2.5079415 ... 1.2960907 1.3208444 0.49378246] [ 2.0560775 0.9461795 0.89510965 ... 1.5349162 0.59134185 1.3086202 ] ... [ 0.7165059 1.1597711 2.0540113 ... 1.9089761 0.36375618 0.6216927 ] [ 2.4073453 1.8255014 2.4657292 ... 1.6969734 0.9569481 0.79320455] [ 1.1911831 2.373402 1.5007002 ... 1.905096 1.5575511 1.4562931 ]]] [[[ 2.1350193 -0.52424383 -0.52502626 ... -0.62540907 1.5223393 -1.9036462 ] [ 0.95298374 -2.3742843 1.2833444 ... 1.621863 0.22159676 3.5141857 ] [ 1.3868709 0.49976385 1.0308721 ... -2.6696084 -1.4408922 0.1915305 ] ... [ 1.0684752 1.331138 -1.1448979 ... 2.6836684 -0.6011026 -0.5955556 ] [ 2.0247626 1.6504573 -3.4479825 ... 0.2069636 -3.2353413 -2.823192 ] [-4.321213 1.6624572 0.58444834 ... -1.9894667 0.5758709 0.49376675]] [[ 4.024741 0.48313072 0.18847956 ... -1.3559495 -1.3074746 3.4574676 ] [-1.7434974 -4.781385 1.9098291 ... -0.8575281 -2.1535866 -0.840062 ] [-0.29679644 0.93220466 -4.056655 ... 0.8307675 -1.8074172 -2.294017 ] ... [ 2.1079571 0.41970223 2.7002842 ... 3.6899433 0.8606595 0.7654275 ] [ 2.219809 -0.34555835 -2.2159524 ... 0.6754465 0.3985864 -0.9065976 ] [-2.5810795 4.20112 2.162086 ... -1.948998 2.7415915 0.05225006]] [[ 4.061448 0.7379591 0.10742327 ... 0.9427986 -1.0645769 2.6259658 ] [-0.44946837 -2.2629225 -0.4238726 ... -1.1772531 -1.86029 0.25083655] [ 0.66134477 -0.56509995 -1.509292 ... 2.1239295 1.144763 2.7950027 ] ... [ 1.0002156 0.37501484 -0.2571483 ... -0.17294168 -0.9168465 -0.2581496 ] [-0.03336315 -0.7051195 -3.1691809 ... 0.16526552 1.6585382 -3.0037794 ] [-0.8698529 -1.2049359 -3.47226 ... -0.5069012 2.684542 1.3870739 ]] ... [[ 0.22279939 -0.9158968 -1.0592246 ... -0.94983435 -0.8147049 -1.2747854 ] [-2.3401325 -1.2726218 1.8959649 ... 2.6151783 0.7126522 6.192548 ] [ 0.86694115 3.2480936 1.9517139 ... -2.4102054 0.7210972 3.9346735 ] ... [ 1.7914517 -2.0429409 0.40017664 ... 0.4839458 -0.09322178 -0.36194223] [ 0.210387 0.18487257 3.3971105 ... 0.4331996 1.374383 -0.61632246] [ 1.0015702 -1.4333725 3.12322 ... -0.74306834 4.4846077 0.9239156 ]] [[ 0.54146343 4.9594326 -1.1093241 ... 1.7394828 -2.4363813 1.2202601 ] [ 2.981694 0.24266447 -2.7637641 ... -0.6787212 0.59094685 -1.7315725 ] [ 2.9524262 2.8477488 1.1408923 ... -0.40335602 -0.14443675 1.7416399 ] ... [-1.9003971 -2.1328516 -2.5403426 ... 4.7203956 -1.3251591 3.5817165 ] [-1.4874415 4.117216 -1.6866642 ... 1.4209946 0.6165445 -2.2464795 ] [-0.6499679 0.10580824 0.8536914 ... 0.6822082 -0.30703944 -0.63771266]] [[-2.358116 1.4870216 -2.39947 ... -1.0590633 2.5476027 -2.7066712 ] [ 1.1889853 -2.5406675 1.350081 ... 0.8538245 1.2835413 3.196006 ] [ 0.43897343 -1.8879871 -2.4819372 ... -2.5980074 -0.08377782 -1.3839072 ] ... [ 2.489173 1.0875719 -2.0905766 ... -0.27886766 -4.0974565 -2.7552814 ] [-1.8452353 0.6344941 1.0567794 ... -2.5685847 -1.0824755 -1.6736344 ] [ 4.013385 0.14750057 3.087635 ... -1.2192986 0.04364876 -4.7516193 ]]]]]; ov_res: [[[[[ 0.8059145 0.8581577 0.8616549 ... 0.92933494 0.3730989 0.49719167] [ 0.7764978 0.8691451 0.7183995 ... 0.90992224 0.50379086 0.48111188] [ 0.7683988 0.6084186 0.55608785 ... 1.1378381 0.77197236 0.9671886 ] ... [ 0.79902184 0.95404226 1.0435319 ... 1.0785289 0.65842843 0.4951297 ] [ 1.1319902 0.6654943 1.1700214 ... 0.7904399 0.647961 1.2205449 ] [-0.02243948 1.1031816 0.9802902 ... 0.6285314 0.8377434 0.69384485]] [[ 0.721739 1.1214157 1.4690197 ... 0.9642429 0.24054891 0.5604802 ] [ 1.0408832 0.7403606 0.6628803 ... 0.6600999 0.9032265 1.0863405 ] [ 0.4048886 0.67283 0.08725005 ... 0.6278255 1.1513693 0.49541184] ... [ 0.28124332 0.5373306 0.9537966 ... 0.5772033 1.1104035 1.029099 ] [ 0.9726026 0.9462481 1.0102324 ... 0.77182496 0.5764779 0.6066591 ] [ 0.86989295 0.49391246 0.7415444 ... 1.1655561 0.55243415 1.1520895 ]] [[ 0.9284562 0.4164368 0.41810414 ... 0.6047625 0.82946193 0.4560421 ] [ 0.8405815 0.7077676 1.0800782 ... 0.2922127 0.64566386 0.89554715] [ 0.85105443 0.7573736 0.31324247 ... 0.8299173 1.1246887 1.1414996 ] ... [ 0.95420945 0.7853238 0.85755175 ... 1.033823 1.050623 0.7115462 ] [ 0.76198673 0.6962469 1.0429076 ... 0.8168715 0.73821455 0.8350922 ] [ 0.9921055 1.0151322 0.6180471 ... 0.70474327 0.8736682 0.7614856 ]] ... [[ 1.079751 0.6773894 0.7990759 ... 1.1922654 0.85482633 0.9004704 ] [ 1.1669374 0.96240425 0.3818197 ... 1.0888884 0.5995459 0.6593632 ] [ 0.83335716 0.60744345 0.3423653 ... 0.24544477 1.4674405 0.5455128 ] ... [ 0.67890257 0.7234865 0.6251906 ... 0.4859251 0.4051286 1.0747185 ] [ 0.57646406 0.6512026 0.9542117 ... 0.76368487 0.46455967 0.9384957 ] [ 0.74467295 1.1884295 1.0142157 ... 1.0990041 0.67639685 1.0985655 ]] [[ 0.86379945 1.0083582 0.9939424 ... 0.5692008 1.1280142 0.80196136] [ 0.59817445 0.60690075 0.85909164 ... 0.9062175 0.835609 0.6888987 ] [ 0.8337892 0.8668068 0.7112773 ... 0.6005394 0.379683 0.83753777] ... [ 0.9470067 1.1005927 0.9024443 ... 0.64869887 0.91216516 1.1235459 ] [ 0.5403262 0.8562797 0.77860266 ... 0.9644275 1.4531169 0.61751306] [ 0.7527765 0.73111445 1.2703238 ... 0.72387904 0.9340072 1.2269258 ]] [[ 0.66485715 0.7429582 0.613856 ... 0.87134236 1.3365763 1.0246502 ] [ 1.1566974 0.37913522 0.9248654 ... 0.5507611 0.92506 0.9021785 ] [ 0.53582525 0.9907959 1.1982871 ... 0.80247635 0.9837475 0.6850573 ] ... [ 0.6528043 0.6580963 0.8935573 ... 0.7164005 0.9789203 0.45711565] [ 0.692383 0.8355433 0.76473904 ... 0.29506248 0.48320785 0.6501014 ] [ 0.40266746 1.0864296 0.83264935 ... 0.8665624 0.60954463 0.92982143]]] [[[ 1.3640536 1.9728161 2.3439865 ... 1.9666193 1.5668724 2.3191504 ] [ 1.2187529 1.1687751 1.2346185 ... 1.5395912 1.2043984 0.251189 ] [ 1.5007702 3.0930514 1.5376847 ... 2.0304887 1.8069314 1.2900437 ] ... [ 1.0964357 1.4854654 1.6931297 ... 1.6610851 1.7816837 1.2627265 ] [ 1.7540499 1.9718077 1.0072767 ... 0.5530893 1.7765954 1.4638083 ] [ 0.1991849 2.080161 0.87044 ... 2.4715195 1.76466 1.4263837 ]] [[ 1.8775759 2.3810115 1.6392581 ... 2.0020998 -0.06410372 1.9847867 ] [ 1.2808927 1.5939863 0.8606965 ... 1.0405911 1.760619 1.0151953 ] [ 1.4818716 1.2220187 1.639729 ... 1.6806698 -0.18420374 1.7204034 ] ... [ 1.43003 1.1348455 1.7977512 ... 0.69244593 0.7893446 1.501508 ] [ 2.0741222 2.9278777 0.67289066 ... 0.83947855 0.4407714 1.2524527 ] [ 1.6432014 0.5196497 1.3192106 ... 2.7669232 2.1383405 1.579217 ]] [[ 0.83889395 0.963778 2.2289836 ... 1.5556436 2.0998766 1.4566817 ] [ 1.6179668 0.18973625 1.938013 ... -0.07281399 1.5362377 1.1753443 ] [ 1.7455231 2.29707 1.8660961 ... 1.2673157 1.051067 1.861392 ] ... [ 2.3711429 1.5784938 0.91805243 ... 0.9835302 0.8432749 1.3430808 ] [ 1.2956009 1.3157054 0.5651843 ... 1.7549417 0.6708782 1.6066669 ] [ 1.1526034 1.1633446 1.2839218 ... 2.1515045 0.94802225 2.0445204 ]] ... [[ 1.4513775 1.3654063 2.1007211 ... 1.6588278 1.2319202 1.8526088 ] [ 1.1787925 0.9483334 -0.22236502 ... 1.7736108 2.5582366 1.2706511 ] [ 0.9012172 1.1249466 1.1400805 ... 1.6605248 1.4738908 0.64467895] ... [ 0.5029026 1.4900612 0.95403993 ... 1.2824392 1.1177847 0.02169847] [ 1.3007336 1.7828165 2.3144865 ... 1.0828878 -0.5176543 1.6089224 ] [ 0.62747073 2.13232 2.5936718 ... -0.04382801 1.8058591 0.76965284]] [[ 1.9977086 1.801785 1.2888803 ... 0.94848025 0.24910975 1.1151483 ] [ 1.1319232 2.2367396 2.2243807 ... 1.4327662 2.5280132 1.8615162 ] [ 1.5329775 1.4839058 0.7523669 ... 1.3322848 2.6525803 1.6073929 ] ... [ 1.6869476 0.6874348 1.336363 ... 2.3272665 1.4876558 2.1368442 ] [ 1.4579982 0.7812674 1.2860649 ... 2.4128156 1.4148344 0.95291007] [ 1.4301174 3.5133557 0.6868188 ... 1.559159 2.367858 1.7936977 ]] [[ 0.24950743 1.4347292 2.9149742 ... 1.6558318 0.86100394 1.2227532 ] [ 1.6034951 0.08780265 2.5079415 ... 1.2960907 1.3208444 0.49378246] [ 2.0560775 0.9461795 0.89510965 ... 1.5349162 0.59134185 1.3086202 ] ... [ 0.7165059 1.1597711 2.0540113 ... 1.9089761 0.36375618 0.6216927 ] [ 2.4073453 1.8255014 2.4657292 ... 1.6969734 0.9569481 0.79320455] [ 1.1911831 2.373402 1.5007002 ... 1.905096 1.5575511 1.4562931 ]]] [[[ 2.1350193 -0.52424383 -0.52502626 ... -0.62540907 1.5223393 -1.9036462 ] [ 0.95298374 -2.3742843 1.2833444 ... 1.621863 0.22159676 3.5141857 ] [ 1.3868709 0.49976385 1.0308721 ... -2.6696084 -1.4408922 0.1915305 ] ... [ 1.0684752 1.331138 -1.1448979 ... 2.6836684 -0.6011026 -0.5955556 ] [ 2.0247626 1.6504573 -3.4479825 ... 0.2069636 -3.2353413 -2.823192 ] [-4.321213 1.6624572 0.58444834 ... -1.9894667 0.5758709 0.49376675]] [[ 4.024741 0.48313072 0.18847956 ... -1.3559495 -1.3074746 3.4574676 ] [-1.7434974 -4.781385 1.9098291 ... -0.8575281 -2.1535866 -0.840062 ] [-0.29679644 0.93220466 -4.056655 ... 0.8307675 -1.8074172 -2.294017 ] ... [ 2.1079571 0.41970223 2.7002842 ... 3.6899433 0.8606595 0.7654275 ] [ 2.219809 -0.34555835 -2.2159524 ... 0.6754465 0.3985864 -0.9065976 ] [-2.5810795 4.20112 2.162086 ... -1.948998 2.7415915 0.05225006]] [[ 4.061448 0.7379591 0.10742327 ... 0.9427986 -1.0645769 2.6259658 ] [-0.44946837 -2.2629225 -0.4238726 ... -1.1772531 -1.86029 0.25083655] [ 0.66134477 -0.56509995 -1.509292 ... 2.1239295 1.144763 2.7950027 ] ... [ 1.0002156 0.37501484 -0.2571483 ... -0.17294168 -0.9168465 -0.2581496 ] [-0.03336315 -0.7051195 -3.1691809 ... 0.16526552 1.6585382 -3.0037794 ] [-0.8698529 -1.2049359 -3.47226 ... -0.5069012 2.684542 1.3870739 ]] ... [[ 0.22279939 -0.9158968 -1.0592246 ... -0.94983435 -0.8147049 -1.2747854 ] [-2.3401325 -1.2726218 1.8959649 ... 2.6151783 0.7126522 6.192548 ] [ 0.86694115 3.2480936 1.9517139 ... -2.4102054 0.7210972 3.9346735 ] ... [ 1.7914517 -2.0429409 0.40017664 ... 0.4839458 -0.09322178 -0.36194223] [ 0.210387 0.18487257 3.3971105 ... 0.4331996 1.374383 -0.61632246] [ 1.0015702 -1.4333725 3.12322 ... -0.74306834 4.4846077 0.9239156 ]] [[ 0.54146343 4.9594326 -1.1093241 ... 1.7394828 -2.4363813 1.2202601 ] [ 2.981694 0.24266447 -2.7637641 ... -0.6787212 0.59094685 -1.7315725 ] [ 2.9524262 2.8477488 1.1408923 ... -0.40335602 -0.14443675 1.7416399 ] ... [-1.9003971 -2.1328516 -2.5403426 ... 4.7203956 -1.3251591 3.5817165 ] [-1.4874415 4.117216 -1.6866642 ... 1.4209946 0.6165445 -2.2464795 ] [-0.6499679 0.10580824 0.8536914 ... 0.6822082 -0.30703944 -0.63771266]] [[-2.358116 1.4870216 -2.39947 ... -1.0590633 2.5476027 -2.7066712 ] [ 1.1889853 -2.5406675 1.350081 ... 0.8538245 1.2835413 3.196006 ] [ 0.43897343 -1.8879871 -2.4819372 ... -2.5980074 -0.08377782 -1.3839072 ] ... [ 2.489173 1.0875719 -2.0905766 ... -0.27886766 -4.0974565 -2.7552814 ] [-1.8452353 0.6344941 1.0567794 ... -2.5685847 -1.0824755 -1.6736344 ] [ 4.013385 0.14750057 3.087635 ... -1.2192986 0.04364876 -4.7516193 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 1], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_962.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.1156 -1.0470 0.9972 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.6736 (2,1,1,.,.) = -1.5135 (3,1,1,.,.) = -0.3459 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 3.27853501e-01 -1.59349561e-01 ... 8.04681778e-02 -1.45296246e-01 1.15588784e-01] [ 1.15588784e-01 4.90350395e-01 5.02954364e-01 ... 9.23688829e-01 -1.44244105e-01 1.15588784e-01] ... [ 1.15588784e-01 1.56067491e+00 4.90564048e-01 ... 3.04329336e-01 -1.63487887e+00 1.15588784e-01] [ 1.15588784e-01 -9.07585740e-01 -3.77826691e-02 ... 2.57467926e-01 5.06604910e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -7.40677536e-01 -1.26474351e-02 ... 1.13331452e-01 5.11919141e-01 1.15588784e-01] [ 1.15588784e-01 -5.25749087e-01 1.08292770e+00 ... 3.02547693e-01 2.64407218e-01 1.15588784e-01] ... [ 1.15588784e-01 4.25560772e-02 -9.34373736e-02 ... 8.84919405e-01 6.85293853e-01 1.15588784e-01] [ 1.15588784e-01 1.00513005e+00 6.98056042e-01 ... 7.90173590e-01 1.19730747e+00 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -8.04608822e-01 3.93352777e-01 ... -2.89542615e-01 -1.22702527e+00 1.15588784e-01] [ 1.15588784e-01 -1.02428049e-01 8.79904449e-01 ... 2.73889452e-02 2.08065063e-01 1.15588784e-01] ... [ 1.15588784e-01 7.98180848e-02 -4.71292555e-01 ... -4.99217153e-01 -5.07415593e-01 1.15588784e-01] [ 1.15588784e-01 6.43926263e-01 1.79756057e+00 ... -1.88999087e-01 2.54079580e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] ... [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -2.02837169e-01 -1.77144617e-01 ... -4.12240475e-02 -1.19026124e-01 1.15588784e-01] [ 1.15588784e-01 -1.74881160e-01 3.33303571e-01 ... 7.22481847e-01 -4.54516530e-01 1.15588784e-01] ... [ 1.15588784e-01 1.26918823e-01 5.25434375e-01 ... 1.53533116e-01 -5.41270673e-01 1.15588784e-01] [ 1.15588784e-01 2.62432724e-01 -6.93077207e-01 ... 2.05588937e-01 6.15489841e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -1.10951459e+00 6.15629733e-01 ... 1.16470158e+00 -4.20438588e-01 1.15588784e-01] [ 1.15588784e-01 3.32314730e-01 1.04694796e+00 ... 4.99776274e-01 -3.35046411e-01 1.15588784e-01] ... [ 1.15588784e-01 -1.85351849e-01 -2.96583921e-01 ... 4.08376575e-01 -6.93958342e-01 1.15588784e-01] [ 1.15588784e-01 -4.26524520e-01 1.19487262e+00 ... 7.63515711e-01 -1.69034660e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 5.24430692e-01 -4.99040931e-02 ... -9.29483891e-01 4.71193761e-01 1.15588784e-01] [ 1.15588784e-01 1.01814449e-01 -5.39143324e-01 ... 1.53424457e-01 -9.21897173e-01 1.15588784e-01] ... [ 1.15588784e-01 9.63898420e-01 1.37723601e+00 ... 2.32526302e-01 -9.28398132e-01 1.15588784e-01] [ 1.15588784e-01 5.16867936e-01 5.29347360e-01 ... -1.23321247e+00 -6.18819833e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]]] [[[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -2.01710224e+00 2.91764736e-03 ... -3.57051373e+00 1.36001301e+00 -1.04698515e+00] [-1.04698515e+00 -2.35781956e+00 -9.65864658e-02 ... -2.51804447e+00 -1.36707830e+00 -1.04698515e+00] ... [-1.04698515e+00 -1.31229925e+00 -1.56179953e+00 ... 2.70304060e+00 -2.87426424e+00 -1.04698515e+00] [-1.04698515e+00 -1.90022767e+00 -8.93655539e-01 ... -2.00757265e+00 4.01871681e-01 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 1.09134245e+00 1.04063296e+00 ... -5.20269573e-01 -2.17797852e+00 -1.04698515e+00] [-1.04698515e+00 2.59766579e-01 -5.24684012e-01 ... 1.69658995e+00 -1.71384144e+00 -1.04698515e+00] ... [-1.04698515e+00 4.68795419e-01 -4.65597928e-01 ... 2.07699347e+00 5.08036494e-01 -1.04698515e+00] [-1.04698515e+00 -2.89506650e+00 -9.11730707e-01 ... -1.16343009e+00 1.63911533e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -1.20733476e+00 -3.85454440e+00 ... -1.16444612e+00 3.51469946e+00 -1.04698515e+00] [-1.04698515e+00 -1.60186875e+00 -3.59019017e+00 ... 3.41249466e-01 3.34529638e-01 -1.04698515e+00] ... [-1.04698515e+00 -1.43397951e+00 -1.57516801e+00 ... -9.13796306e-01 -9.74601924e-01 -1.04698515e+00] [-1.04698515e+00 -3.12437248e+00 -1.86562657e+00 ... 1.46149659e+00 -1.84735203e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] ... [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 2.46347666e-01 -1.05514574e+00 ... -2.24386668e+00 -1.93508852e+00 -1.04698515e+00] [-1.04698515e+00 -1.44246173e+00 4.81928229e-01 ... 1.58558249e-01 -8.49980235e-01 -1.04698515e+00] ... [-1.04698515e+00 -2.04616618e+00 4.47549820e-02 ... -2.01158667e+00 -3.75131297e+00 -1.04698515e+00] [-1.04698515e+00 1.70741487e+00 3.06404829e-01 ... -1.86387706e+00 -1.99207449e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -4.70842004e-01 -8.17897320e-01 ... -1.43914914e+00 1.32549500e+00 -1.04698515e+00] [-1.04698515e+00 -2.34417009e+00 -2.17186141e+00 ... -1.30027485e+00 -2.35225534e+00 -1.04698515e+00] ... [-1.04698515e+00 -7.48957098e-01 -2.03465390e+00 ... -3.30511642e+00 -1.17364407e+00 -1.04698515e+00] [-1.04698515e+00 9.09307718e-01 5.74062228e-01 ... -3.63481236e+00 -3.36425328e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -1.85301256e+00 1.05826044e+00 ... -2.89294243e-01 -2.25241971e+00 -1.04698515e+00] [-1.04698515e+00 -1.42571115e+00 -5.51396370e-01 ... 1.38720274e+00 -3.22991610e+00 -1.04698515e+00] ... [-1.04698515e+00 1.98425198e+00 -2.19903135e+00 ... -5.34138501e-01 -1.26907396e+00 -1.04698515e+00] [-1.04698515e+00 -8.05895090e-01 -1.11255324e+00 ... -1.34413588e+00 2.77120304e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]]] [[[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.08554804e+00 2.58059323e-01 ... 6.45338535e-01 2.44805634e-01 9.97217476e-01] [ 9.97217476e-01 8.70580316e-01 1.09584486e+00 ... 1.12943959e+00 1.11292744e+00 9.97217476e-01] ... [ 9.97217476e-01 6.84014440e-01 1.13963389e+00 ... 1.29800522e+00 1.14902890e+00 9.97217476e-01] [ 9.97217476e-01 1.03720427e+00 7.80857086e-01 ... 1.43333626e+00 1.56609511e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.10281777e+00 8.51848125e-01 ... 1.74694777e-01 9.38803494e-01 9.97217476e-01] [ 9.97217476e-01 1.30956018e+00 9.24463809e-01 ... 8.88178587e-01 1.63043976e+00 9.97217476e-01] ... [ 9.97217476e-01 6.36769891e-01 5.25691688e-01 ... 7.91682720e-01 9.61314380e-01 9.97217476e-01] [ 9.97217476e-01 7.79001117e-01 1.23089135e+00 ... 1.54626930e+00 9.44117188e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.75865841e+00 6.10534847e-01 ... 5.73638797e-01 1.11649418e+00 9.97217476e-01] [ 9.97217476e-01 1.02379429e+00 1.42139030e+00 ... 2.76528656e-01 1.07330108e+00 9.97217476e-01] ... [ 9.97217476e-01 1.57651138e+00 3.44335198e-01 ... 1.55140209e+00 9.03666914e-01 9.97217476e-01] [ 9.97217476e-01 1.34914374e+00 1.58517861e+00 ... 9.86756921e-01 7.73926258e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] ... [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.90128803e+00 5.27144670e-01 ... 9.62174833e-01 3.14848959e-01 9.97217476e-01] [ 9.97217476e-01 1.10385716e+00 5.72455049e-01 ... 1.03994954e+00 1.00509417e+00 9.97217476e-01] ... [ 9.97217476e-01 9.03320432e-01 1.55814254e+00 ... 7.30087876e-01 3.86615515e-01 9.97217476e-01] [ 9.97217476e-01 9.81003702e-01 1.36622214e+00 ... 1.69160223e+00 3.99575353e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.28610349e+00 1.00864375e+00 ... 7.44782567e-01 3.86996806e-01 9.97217476e-01] [ 9.97217476e-01 6.51872575e-01 6.85965598e-01 ... 1.30984497e+00 9.98467565e-01 9.97217476e-01] ... [ 9.97217476e-01 1.08035254e+00 1.60178423e+00 ... 7.62215436e-01 8.34190607e-01 9.97217476e-01] [ 9.97217476e-01 7.49539495e-01 1.31975257e+00 ... 1.29164028e+00 1.13714206e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 8.05583477e-01 1.04912305e+00 ... 1.51916540e+00 1.50486922e+00 9.97217476e-01] [ 9.97217476e-01 2.66046882e-01 7.94344306e-01 ... 1.06859827e+00 7.59015560e-01 9.97217476e-01] ... [ 9.97217476e-01 9.20656919e-01 6.72609985e-01 ... 1.10929000e+00 5.47159493e-01 9.97217476e-01] [ 9.97217476e-01 5.89189410e-01 1.33701146e+00 ... 3.86964023e-01 1.02961493e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]]]]]; ov_res: [[[[[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 3.27853531e-01 -1.59349546e-01 ... 8.04681778e-02 -1.45296246e-01 1.15588784e-01] [ 1.15588784e-01 4.90350395e-01 5.02954423e-01 ... 9.23688829e-01 -1.44244090e-01 1.15588784e-01] ... [ 1.15588784e-01 1.56067491e+00 4.90564048e-01 ... 3.04329365e-01 -1.63487887e+00 1.15588784e-01] [ 1.15588784e-01 -9.07585740e-01 -3.77826728e-02 ... 2.57467926e-01 5.06604970e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -7.40677536e-01 -1.26474285e-02 ... 1.13331452e-01 5.11919200e-01 1.15588784e-01] [ 1.15588784e-01 -5.25749087e-01 1.08292770e+00 ... 3.02547693e-01 2.64407218e-01 1.15588784e-01] ... [ 1.15588784e-01 4.25560772e-02 -9.34373662e-02 ... 8.84919405e-01 6.85293853e-01 1.15588784e-01] [ 1.15588784e-01 1.00513017e+00 6.98056042e-01 ... 7.90173590e-01 1.19730747e+00 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -8.04608822e-01 3.93352777e-01 ... -2.89542615e-01 -1.22702527e+00 1.15588784e-01] [ 1.15588784e-01 -1.02428049e-01 8.79904449e-01 ... 2.73889489e-02 2.08065063e-01 1.15588784e-01] ... [ 1.15588784e-01 7.98180774e-02 -4.71292585e-01 ... -4.99217123e-01 -5.07415593e-01 1.15588784e-01] [ 1.15588784e-01 6.43926263e-01 1.79756057e+00 ... -1.88999102e-01 2.54079580e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] ... [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -2.02837184e-01 -1.77144617e-01 ... -4.12240475e-02 -1.19026124e-01 1.15588784e-01] [ 1.15588784e-01 -1.74881175e-01 3.33303541e-01 ... 7.22481847e-01 -4.54516500e-01 1.15588784e-01] ... [ 1.15588784e-01 1.26918823e-01 5.25434315e-01 ... 1.53533116e-01 -5.41270673e-01 1.15588784e-01] [ 1.15588784e-01 2.62432724e-01 -6.93077207e-01 ... 2.05588937e-01 6.15489841e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 -1.10951459e+00 6.15629733e-01 ... 1.16470158e+00 -4.20438588e-01 1.15588784e-01] [ 1.15588784e-01 3.32314730e-01 1.04694796e+00 ... 4.99776274e-01 -3.35046411e-01 1.15588784e-01] ... [ 1.15588784e-01 -1.85351834e-01 -2.96583921e-01 ... 4.08376575e-01 -6.93958342e-01 1.15588784e-01] [ 1.15588784e-01 -4.26524550e-01 1.19487262e+00 ... 7.63515711e-01 -1.69034660e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]] [[ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01] [ 1.15588784e-01 5.24430692e-01 -4.99040931e-02 ... -9.29483891e-01 4.71193761e-01 1.15588784e-01] [ 1.15588784e-01 1.01814449e-01 -5.39143324e-01 ... 1.53424457e-01 -9.21897233e-01 1.15588784e-01] ... [ 1.15588784e-01 9.63898420e-01 1.37723601e+00 ... 2.32526317e-01 -9.28398192e-01 1.15588784e-01] [ 1.15588784e-01 5.16867936e-01 5.29347360e-01 ... -1.23321247e+00 -6.18819833e-01 1.15588784e-01] [ 1.15588784e-01 1.15588784e-01 1.15588784e-01 ... 1.15588784e-01 1.15588784e-01 1.15588784e-01]]] [[[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -2.01710224e+00 2.91763549e-03 ... -3.57051373e+00 1.36001289e+00 -1.04698515e+00] [-1.04698515e+00 -2.35781980e+00 -9.65864360e-02 ... -2.51804423e+00 -1.36707830e+00 -1.04698515e+00] ... [-1.04698515e+00 -1.31229925e+00 -1.56179941e+00 ... 2.70304060e+00 -2.87426424e+00 -1.04698515e+00] [-1.04698515e+00 -1.90022767e+00 -8.93655598e-01 ... -2.00757265e+00 4.01871681e-01 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 1.09134257e+00 1.04063284e+00 ... -5.20269573e-01 -2.17797875e+00 -1.04698515e+00] [-1.04698515e+00 2.59766549e-01 -5.24684012e-01 ... 1.69658995e+00 -1.71384144e+00 -1.04698515e+00] ... [-1.04698515e+00 4.68795449e-01 -4.65597898e-01 ... 2.07699347e+00 5.08036494e-01 -1.04698515e+00] [-1.04698515e+00 -2.89506650e+00 -9.11730707e-01 ... -1.16343009e+00 1.63911545e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -1.20733476e+00 -3.85454440e+00 ... -1.16444612e+00 3.51469922e+00 -1.04698515e+00] [-1.04698515e+00 -1.60186875e+00 -3.59019017e+00 ... 3.41249436e-01 3.34529698e-01 -1.04698515e+00] ... [-1.04698515e+00 -1.43397951e+00 -1.57516801e+00 ... -9.13796306e-01 -9.74601924e-01 -1.04698515e+00] [-1.04698515e+00 -3.12437248e+00 -1.86562645e+00 ... 1.46149671e+00 -1.84735203e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] ... [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 2.46347621e-01 -1.05514574e+00 ... -2.24386668e+00 -1.93508852e+00 -1.04698515e+00] [-1.04698515e+00 -1.44246173e+00 4.81928229e-01 ... 1.58558235e-01 -8.49980235e-01 -1.04698515e+00] ... [-1.04698515e+00 -2.04616618e+00 4.47550379e-02 ... -2.01158667e+00 -3.75131297e+00 -1.04698515e+00] [-1.04698515e+00 1.70741487e+00 3.06404769e-01 ... -1.86387718e+00 -1.99207449e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -4.70842004e-01 -8.17897320e-01 ... -1.43914914e+00 1.32549489e+00 -1.04698515e+00] [-1.04698515e+00 -2.34417033e+00 -2.17186141e+00 ... -1.30027473e+00 -2.35225558e+00 -1.04698515e+00] ... [-1.04698515e+00 -7.48957098e-01 -2.03465390e+00 ... -3.30511642e+00 -1.17364407e+00 -1.04698515e+00] [-1.04698515e+00 9.09307778e-01 5.74062228e-01 ... -3.63481236e+00 -3.36425328e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]] [[-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00] [-1.04698515e+00 -1.85301244e+00 1.05826044e+00 ... -2.89294213e-01 -2.25241971e+00 -1.04698515e+00] [-1.04698515e+00 -1.42571104e+00 -5.51396430e-01 ... 1.38720262e+00 -3.22991610e+00 -1.04698515e+00] ... [-1.04698515e+00 1.98425198e+00 -2.19903135e+00 ... -5.34138501e-01 -1.26907396e+00 -1.04698515e+00] [-1.04698515e+00 -8.05895090e-01 -1.11255324e+00 ... -1.34413588e+00 2.77120304e+00 -1.04698515e+00] [-1.04698515e+00 -1.04698515e+00 -1.04698515e+00 ... -1.04698515e+00 -1.04698515e+00 -1.04698515e+00]]] [[[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.08554804e+00 2.58059323e-01 ... 6.45338595e-01 2.44805604e-01 9.97217476e-01] [ 9.97217476e-01 8.70580256e-01 1.09584486e+00 ... 1.12943959e+00 1.11292744e+00 9.97217476e-01] ... [ 9.97217476e-01 6.84014380e-01 1.13963389e+00 ... 1.29800522e+00 1.14902890e+00 9.97217476e-01] [ 9.97217476e-01 1.03720427e+00 7.80857086e-01 ... 1.43333614e+00 1.56609511e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.10281777e+00 8.51848125e-01 ... 1.74694777e-01 9.38803494e-01 9.97217476e-01] [ 9.97217476e-01 1.30956018e+00 9.24463809e-01 ... 8.88178587e-01 1.63043976e+00 9.97217476e-01] ... [ 9.97217476e-01 6.36769891e-01 5.25691688e-01 ... 7.91682780e-01 9.61314380e-01 9.97217476e-01] [ 9.97217476e-01 7.79001117e-01 1.23089135e+00 ... 1.54626930e+00 9.44117188e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.75865841e+00 6.10534847e-01 ... 5.73638797e-01 1.11649418e+00 9.97217476e-01] [ 9.97217476e-01 1.02379429e+00 1.42139018e+00 ... 2.76528656e-01 1.07330108e+00 9.97217476e-01] ... [ 9.97217476e-01 1.57651138e+00 3.44335228e-01 ... 1.55140209e+00 9.03666914e-01 9.97217476e-01] [ 9.97217476e-01 1.34914374e+00 1.58517861e+00 ... 9.86756921e-01 7.73926258e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] ... [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.90128803e+00 5.27144670e-01 ... 9.62174833e-01 3.14848989e-01 9.97217476e-01] [ 9.97217476e-01 1.10385716e+00 5.72455108e-01 ... 1.03994954e+00 1.00509417e+00 9.97217476e-01] ... [ 9.97217476e-01 9.03320432e-01 1.55814254e+00 ... 7.30087876e-01 3.86615515e-01 9.97217476e-01] [ 9.97217476e-01 9.81003702e-01 1.36622214e+00 ... 1.69160223e+00 3.99575353e-01 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 1.28610349e+00 1.00864375e+00 ... 7.44782567e-01 3.86996835e-01 9.97217476e-01] [ 9.97217476e-01 6.51872575e-01 6.85965598e-01 ... 1.30984497e+00 9.98467565e-01 9.97217476e-01] ... [ 9.97217476e-01 1.08035254e+00 1.60178423e+00 ... 7.62215436e-01 8.34190607e-01 9.97217476e-01] [ 9.97217476e-01 7.49539495e-01 1.31975257e+00 ... 1.29164028e+00 1.13714206e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]] [[ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01] [ 9.97217476e-01 8.05583477e-01 1.04912317e+00 ... 1.51916540e+00 1.50486922e+00 9.97217476e-01] [ 9.97217476e-01 2.66046911e-01 7.94344306e-01 ... 1.06859827e+00 7.59015560e-01 9.97217476e-01] ... [ 9.97217476e-01 9.20656919e-01 6.72609985e-01 ... 1.10929000e+00 5.47159493e-01 9.97217476e-01] [ 9.97217476e-01 5.89189470e-01 1.33701146e+00 ... 3.86964053e-01 1.02961493e+00 9.97217476e-01] [ 9.97217476e-01 9.97217476e-01 9.97217476e-01 ... 9.97217476e-01 9.97217476e-01 9.97217476e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [2, 2, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_964.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3605 -0.2014 0.6170 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.6207 (2,1,1,.,.) = -1.3458 (3,1,1,.,.) = 0.2342 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[ 2.24304080e-01 -7.98018813e-01 5.55161238e-02 ... -1.45507073e+00 -5.75508773e-02 9.91475284e-02] [ 1.46923900e-01 -1.10727191e-01 6.77148998e-02 ... 2.37253010e-01 -1.33638009e-01 -6.04715586e-01] [-1.33518851e+00 3.01671386e-01 1.22563452e-01 ... 3.46181393e-01 2.92128623e-01 7.55278409e-01] ... [-1.50830746e+00 1.07424229e-01 2.12362528e-01 ... -1.25366271e+00 -4.32231009e-01 3.88041615e-01] [-8.26045275e-01 -9.73901629e-01 -7.55674958e-01 ... 4.37779605e-01 8.92189682e-01 5.24460018e-01] [ 8.12807381e-01 -2.18869284e-01 -1.30161834e+00 ... -5.01603425e-01 3.77690315e-01 4.67469633e-01]] [[-1.03231788e+00 -2.54883975e-01 -6.66374207e-01 ... 1.51744485e-01 -3.80528331e-01 -9.66520011e-01] [ 4.91214991e-01 -3.10789526e-01 -6.44496202e-01 ... -1.69904634e-01 -9.56791282e-01 -5.91251612e-01] [-1.00692117e+00 -5.05975544e-01 2.55796373e-01 ... 3.93784046e-03 -1.87956557e-01 2.34708488e-02] ... [-6.64070666e-01 -2.13431418e-02 -4.71446812e-02 ... 2.88306773e-01 -2.31192410e-02 7.79230595e-02] [-6.24734223e-01 -8.93350184e-01 9.57181752e-01 ... 7.50509918e-01 -1.07434630e+00 -3.39821577e-01] [ 1.05202645e-01 5.31953990e-01 -9.00509596e-01 ... -1.35596681e+00 -5.84782362e-01 -6.21674359e-01]] [[-1.05218315e+00 -1.09028828e+00 -1.15292323e+00 ... 6.81896269e-01 -2.40048707e-01 -4.89344895e-02] [-6.92683697e-01 -4.67948586e-01 7.37493813e-01 ... -1.03733110e+00 -7.89088249e-01 -6.11755550e-02] [-2.28954554e-01 -5.91427207e-01 -1.17886558e-01 ... -4.55329627e-01 -1.19190276e+00 -7.93167353e-01] ... [ 1.73238039e-01 -8.04484606e-01 -9.42738593e-01 ... 2.98136413e-01 -5.96811891e-01 -1.88359573e-01] [-2.76264697e-01 2.71529138e-01 2.15510190e-01 ... -3.89673591e-01 3.65902781e-02 -1.99343547e-01] [-9.21236873e-01 -1.35243344e+00 -6.21696413e-01 ... -4.62427825e-01 5.97240031e-01 -6.48701310e-01]] ... [[-4.96370763e-01 -1.06027150e+00 -2.71082699e-01 ... -7.83537865e-01 -1.77303529e+00 1.65945292e-03] [-1.17444968e+00 -3.95119011e-01 -5.80878854e-02 ... -3.70284259e-01 -6.99868262e-01 -4.05379951e-01] [ 1.37557715e-01 -5.12698293e-02 -9.96112823e-03 ... -2.75101721e-01 -1.65035343e+00 -2.32882991e-01] ... [-3.87486875e-01 -6.22966707e-01 -2.98257589e-01 ... -5.65715730e-02 9.45207477e-03 -2.31993407e-01] [-2.25647950e+00 -2.19606221e-01 -1.54836845e+00 ... 1.57020211e-01 -5.41126728e-01 9.92676318e-01] [ 2.20376670e-01 8.88765752e-02 3.64802837e-01 ... -2.13641316e-01 -1.51129150e+00 -1.12054932e+00]] [[ 3.17431986e-01 -1.25480556e+00 -7.09370792e-01 ... -3.41915190e-01 -4.26827341e-01 -1.23833847e+00] [ 4.51320887e-01 -3.53296995e-01 7.14777648e-01 ... -5.92809677e-01 -5.73355317e-01 -1.87330201e-01] [-6.76269174e-01 -1.32822379e-01 9.88484204e-01 ... -8.79855335e-01 4.66478169e-02 -5.79079747e-01] ... [ 4.64005113e-01 -1.56891525e-01 1.79417074e-01 ... -5.31493962e-01 -7.48866439e-01 -9.00525451e-01] [-1.99348426e+00 3.97412956e-01 8.81663263e-01 ... 5.34475744e-01 2.30765164e-01 -9.71127152e-01] [-1.10348582e+00 -1.37631083e+00 -4.33788121e-01 ... -6.52845442e-01 -4.56902087e-01 -8.01635683e-01]] [[-2.31396094e-01 -3.11445177e-01 -2.26337567e-01 ... 5.28721511e-01 1.77761674e-01 -7.50330091e-02] [-1.98074722e+00 -6.86993957e-01 -3.75282884e-01 ... -1.75801325e+00 -2.50668168e-01 4.45700586e-02] [-5.93568623e-01 -1.15217316e+00 -1.15844381e+00 ... -1.75705361e+00 -9.87818360e-01 -2.42947966e-01] ... [-5.57004213e-01 -1.92574397e-01 -2.36618891e-01 ... -4.21759486e-03 3.64537895e-01 -4.93713081e-01] [-1.67365134e-01 7.37435877e-01 -1.32553935e-01 ... -1.01566625e+00 -1.23376459e-01 2.40187466e-01] [-6.44050837e-01 -3.77464443e-01 -7.83237755e-01 ... 4.96013761e-01 -5.23339748e-01 3.03398311e-01]]] [[[-2.37611723e+00 2.00858164e+00 1.00020081e-01 ... -4.66700047e-02 2.52994013e+00 -1.03351903e+00] [-7.16152012e-01 -1.08666444e+00 8.84579182e-01 ... -6.72068238e-01 -1.88762021e+00 5.06948531e-01] [-1.00991404e+00 -1.94256783e+00 3.07150900e-01 ... -1.71660471e+00 -1.71351790e+00 -2.76648521e-01] ... [ 5.76154470e-01 -1.43915176e+00 3.04225862e-01 ... -2.74863291e+00 -1.60466707e+00 -6.28167152e-01] [ 8.41430783e-01 -3.05868268e-01 7.20597565e-01 ... 6.79248571e-02 -1.30872285e+00 -1.88119066e+00] [ 2.85077751e-01 -4.91584510e-01 9.91781950e-02 ... -2.42702818e+00 9.68413591e-01 4.41465914e-01]] [[-1.52704012e+00 -4.43272829e-01 -4.00373638e-01 ... 2.45559722e-01 1.29174221e+00 -6.30850494e-01] [-1.61797929e+00 -2.88049555e+00 -3.24455237e+00 ... -1.46544778e+00 3.99807036e-01 -1.78012717e+00] [-5.47269404e-01 -1.62543321e+00 -1.79248899e-02 ... -1.11857200e+00 -6.13039553e-01 -1.54034007e+00] ... [-7.70773649e-01 -2.04835463e+00 -1.25529242e+00 ... -4.56070185e-01 2.46857595e+00 -2.73081017e+00] [-4.38067883e-01 2.03887343e+00 5.50141513e-01 ... 5.96296489e-01 -1.35438800e+00 -6.95308208e-01] [ 1.71127021e+00 1.15174258e+00 2.27230936e-01 ... -1.15611196e+00 -3.19602561e+00 9.39399600e-01]] [[ 1.13219810e+00 1.04633784e+00 -1.02838051e+00 ... -1.59666383e+00 1.19738877e-01 1.24353856e-01] [ 5.46927929e-01 -6.78297758e-01 1.24690026e-01 ... 3.06970179e-01 1.24917650e+00 1.35225058e+00] [-2.80824929e-01 -2.53517061e-01 -1.75401473e+00 ... 9.00803208e-02 -1.60237694e+00 1.76324475e+00] ... [ 3.92535627e-01 1.08654642e+00 6.77209139e-01 ... 4.01707113e-01 -1.64357275e-01 -7.04743862e-02] [-8.04301426e-02 1.83953190e+00 -5.58726013e-01 ... 2.67028522e+00 -1.04485607e+00 -6.78225875e-01] [ 2.58651042e+00 2.27614969e-01 -1.13410391e-01 ... -6.62256241e-01 4.80834067e-01 -2.16109705e+00]] ... [[ 1.50937879e+00 2.02997279e+00 1.93425953e-01 ... -1.26612484e+00 -1.86058685e-01 -1.53459281e-01] [-1.28238678e+00 3.58015656e-01 1.60230267e+00 ... -2.71256471e+00 -4.10930812e-01 -9.74167228e-01] [-1.08226693e+00 -1.13136888e+00 -7.68184006e-01 ... 6.47365630e-01 8.34833741e-01 1.19127297e+00] ... [-1.55598152e+00 -4.43487555e-01 -1.99125648e+00 ... -1.33581710e+00 2.96002245e+00 4.79753673e-01] [-1.16697061e+00 -1.59685838e+00 -3.34796786e-01 ... 9.96094823e-01 -1.73185372e+00 -1.82170331e+00] [ 2.28691518e-01 -7.26849854e-01 -9.88392711e-01 ... -8.82965684e-01 -5.30216992e-02 2.65855575e+00]] [[-4.67394829e-01 3.98268342e-01 5.61568260e-01 ... 2.91550815e-01 -1.75953829e+00 -3.05906892e-01] [ 7.20388770e-01 -1.97407103e+00 3.78812730e-01 ... 4.02498305e-01 -1.91964781e+00 -1.37615657e+00] [-1.23763144e+00 3.17409444e+00 1.08121502e+00 ... -2.37340498e+00 1.78634048e-01 -1.02924204e+00] ... [-1.11302756e-01 -1.46113336e+00 9.65704203e-01 ... 1.43955004e+00 -3.58383030e-01 -3.05432528e-01] [ 1.16625357e+00 -2.20996213e+00 -4.67978746e-01 ... -8.39306712e-01 4.08556283e-01 -1.64050317e+00] [ 5.23177266e-01 1.17724574e+00 -1.69364369e+00 ... 1.60396194e+00 1.62275839e+00 -9.34935451e-01]] [[ 4.91169691e-01 -7.84136415e-01 9.90492821e-01 ... -1.77931142e+00 8.21002483e-01 1.65050173e+00] [-4.40549761e-01 1.92454964e-01 -1.43637732e-01 ... 1.66468811e+00 6.21475339e-01 -6.04764968e-02] [-5.27042508e-01 1.82964396e+00 -1.80446446e+00 ... 2.19354391e+00 9.68467355e-01 -4.61490065e-01] ... [-8.63113642e-01 5.95806360e-01 2.49409795e+00 ... -7.40652144e-01 -2.86385834e-01 -1.63142049e+00] [-5.81736028e-01 -4.34413940e-01 -8.87556314e-01 ... 1.12359929e+00 -2.41908669e-01 -1.01377988e+00] [-3.31824327e+00 -4.57128882e-03 -1.13895500e+00 ... -2.30492806e+00 -2.21673816e-01 -3.50913405e-01]]] [[[ 6.02679670e-01 8.10526371e-01 6.08906269e-01 ... 3.33181262e-01 7.26828516e-01 9.28584576e-01] [ 6.04251444e-01 5.68554282e-01 7.15329587e-01 ... 8.57733607e-01 3.84836376e-01 6.26368761e-01] [ 8.08884740e-01 2.85942167e-01 2.41503626e-01 ... 3.10947448e-01 2.14758694e-01 1.25456762e+00] ... [ 6.54897034e-01 9.78813171e-02 1.12082183e-01 ... 4.50797677e-01 5.02066851e-01 7.22995818e-01] [ 6.03033900e-01 3.75875771e-01 3.50480258e-01 ... 3.98016691e-01 3.41589779e-01 9.71326053e-01] [ 6.45350575e-01 6.08975291e-01 8.41728270e-01 ... 2.58106560e-01 8.92964244e-01 5.61544657e-01]] [[ 5.94408810e-01 5.81432641e-01 5.85212588e-01 ... 4.86502647e-01 7.06480563e-01 5.01397133e-01] [ 5.77908933e-01 7.36246765e-01 7.48661041e-01 ... 5.32408059e-01 1.08525920e+00 5.48982739e-01] [ 5.90263546e-01 9.67417598e-01 5.89161456e-01 ... 3.33573997e-01 4.87322271e-01 2.79591650e-01] ... [ 8.77640009e-01 4.87757802e-01 4.82256114e-01 ... 2.97750235e-01 5.54990590e-01 7.33494639e-01] [ 7.48502791e-01 6.43399358e-01 5.48757732e-01 ... 1.98681891e-01 4.78987485e-01 1.10833132e+00] [ 5.39481580e-01 5.07129848e-01 5.48889935e-01 ... 8.82892251e-01 4.07521576e-01 9.86700654e-01]] [[ 4.39917147e-01 5.87910414e-01 5.19560993e-01 ... 5.84320247e-01 6.09358251e-01 4.50804740e-01] [ 8.08722436e-01 9.82204080e-01 4.57794994e-01 ... 5.37313461e-01 6.75159156e-01 6.35121644e-01] [ 4.72523332e-01 8.74975204e-01 6.89904213e-01 ... 8.12193930e-01 2.40003139e-01 4.93483305e-01] ... [ 5.85357547e-01 8.30360889e-01 3.43198091e-01 ... 3.38015944e-01 5.38470626e-01 1.17183924e+00] [ 2.63943672e-01 6.84731424e-01 4.77242708e-01 ... 6.49494171e-01 4.92439270e-01 7.78426170e-01] [ 6.03810787e-01 3.88186395e-01 7.69719005e-01 ... 1.06123424e+00 5.52675426e-01 1.07310545e+00]] ... [[ 7.44110107e-01 1.65739268e-01 4.84383106e-02 ... 5.87342203e-01 1.87005252e-01 5.87216794e-01] [ 8.32114816e-01 6.21435344e-01 8.90933037e-01 ... 1.00226676e+00 5.87383628e-01 8.44882250e-01] [ 6.94451094e-01 4.94802982e-01 7.67275214e-01 ... 5.47147393e-01 9.79887962e-01 3.96403521e-01] ... [ 8.73748183e-01 1.62861437e-01 7.96107769e-01 ... 2.48100579e-01 6.20995224e-01 4.55359221e-01] [ 3.41067404e-01 4.72359627e-01 4.44529325e-01 ... 5.06425738e-01 6.62948310e-01 7.13600397e-01] [ 1.60409570e-01 3.36586386e-01 1.22188962e+00 ... 6.32125735e-01 2.32767880e-01 9.47459280e-01]] [[ 4.27173436e-01 7.62726665e-02 3.76825213e-01 ... 9.92562950e-01 -1.97392702e-03 6.58682048e-01] [ 2.81349778e-01 5.17731547e-01 7.90392995e-01 ... 9.88745570e-01 5.53058147e-01 9.51033294e-01] [ 5.77270150e-01 5.09890914e-01 4.95917976e-01 ... 3.31728131e-01 2.76220024e-01 8.18791151e-01] ... [ 2.10268110e-01 3.94305289e-01 6.84868097e-01 ... 5.37749887e-01 1.79951817e-01 6.54960096e-01] [ 3.09848607e-01 7.51597583e-01 6.09628975e-01 ... 4.90154266e-01 4.40705538e-01 5.65246344e-01] [ 5.60937941e-01 9.13701594e-01 8.67738247e-01 ... 8.87709737e-01 7.76316524e-01 6.70292735e-01]] [[ 2.96650648e-01 5.17909169e-01 8.43931258e-01 ... 6.14240825e-01 5.69550693e-01 6.07621074e-01] [ 4.02661383e-01 9.45839047e-01 9.28323627e-01 ... 2.05021143e-01 8.75535131e-01 4.84989047e-01] [ 5.26536644e-01 8.24655294e-01 6.63737178e-01 ... 2.55585790e-01 1.15159154e+00 7.07798362e-01] ... [ 1.52831674e-02 7.77184010e-01 4.31843638e-01 ... 6.56655014e-01 7.02896833e-01 5.65141261e-01] [ 7.33371675e-01 7.27347076e-01 6.93169475e-01 ... 4.37209845e-01 5.04861116e-01 5.67116022e-01] [ 7.74085760e-01 6.05847239e-01 9.45959568e-01 ... 5.83472729e-01 3.37054640e-01 8.98354113e-01]]]]]; ov_res: [[[[[ 2.24304080e-01 -7.98018813e-01 5.55161238e-02 ... -1.45507073e+00 -5.75508773e-02 9.91475284e-02] [ 1.46923900e-01 -1.10727191e-01 6.77148998e-02 ... 2.37253010e-01 -1.33638009e-01 -6.04715586e-01] [-1.33518851e+00 3.01671386e-01 1.22563452e-01 ... 3.46181393e-01 2.92128623e-01 7.55278409e-01] ... [-1.50830746e+00 1.07424229e-01 2.12362528e-01 ... -1.25366271e+00 -4.32231009e-01 3.88041615e-01] [-8.26045275e-01 -9.73901629e-01 -7.55674958e-01 ... 4.37779605e-01 8.92189682e-01 5.24460018e-01] [ 8.12807381e-01 -2.18869284e-01 -1.30161834e+00 ... -5.01603425e-01 3.77690315e-01 4.67469633e-01]] [[-1.03231788e+00 -2.54883975e-01 -6.66374207e-01 ... 1.51744485e-01 -3.80528331e-01 -9.66520011e-01] [ 4.91214991e-01 -3.10789526e-01 -6.44496202e-01 ... -1.69904634e-01 -9.56791282e-01 -5.91251612e-01] [-1.00692117e+00 -5.05975544e-01 2.55796373e-01 ... 3.93784046e-03 -1.87956557e-01 2.34708488e-02] ... [-6.64070666e-01 -2.13431418e-02 -4.71446812e-02 ... 2.88306773e-01 -2.31192410e-02 7.79230595e-02] [-6.24734223e-01 -8.93350184e-01 9.57181752e-01 ... 7.50509918e-01 -1.07434630e+00 -3.39821577e-01] [ 1.05202645e-01 5.31953990e-01 -9.00509596e-01 ... -1.35596681e+00 -5.84782362e-01 -6.21674359e-01]] [[-1.05218315e+00 -1.09028828e+00 -1.15292323e+00 ... 6.81896269e-01 -2.40048707e-01 -4.89344895e-02] [-6.92683697e-01 -4.67948586e-01 7.37493813e-01 ... -1.03733110e+00 -7.89088249e-01 -6.11755550e-02] [-2.28954554e-01 -5.91427207e-01 -1.17886558e-01 ... -4.55329627e-01 -1.19190276e+00 -7.93167353e-01] ... [ 1.73238039e-01 -8.04484606e-01 -9.42738593e-01 ... 2.98136413e-01 -5.96811891e-01 -1.88359573e-01] [-2.76264697e-01 2.71529138e-01 2.15510190e-01 ... -3.89673591e-01 3.65902781e-02 -1.99343547e-01] [-9.21236873e-01 -1.35243344e+00 -6.21696413e-01 ... -4.62427825e-01 5.97240031e-01 -6.48701310e-01]] ... [[-4.96370763e-01 -1.06027150e+00 -2.71082699e-01 ... -7.83537865e-01 -1.77303529e+00 1.65945292e-03] [-1.17444968e+00 -3.95119011e-01 -5.80878854e-02 ... -3.70284259e-01 -6.99868262e-01 -4.05379951e-01] [ 1.37557715e-01 -5.12698293e-02 -9.96112823e-03 ... -2.75101721e-01 -1.65035343e+00 -2.32882991e-01] ... [-3.87486875e-01 -6.22966707e-01 -2.98257589e-01 ... -5.65715730e-02 9.45207477e-03 -2.31993407e-01] [-2.25647950e+00 -2.19606221e-01 -1.54836845e+00 ... 1.57020211e-01 -5.41126728e-01 9.92676318e-01] [ 2.20376670e-01 8.88765752e-02 3.64802837e-01 ... -2.13641316e-01 -1.51129150e+00 -1.12054932e+00]] [[ 3.17431986e-01 -1.25480556e+00 -7.09370792e-01 ... -3.41915190e-01 -4.26827341e-01 -1.23833847e+00] [ 4.51320887e-01 -3.53296995e-01 7.14777648e-01 ... -5.92809677e-01 -5.73355317e-01 -1.87330201e-01] [-6.76269174e-01 -1.32822379e-01 9.88484204e-01 ... -8.79855335e-01 4.66478169e-02 -5.79079747e-01] ... [ 4.64005113e-01 -1.56891525e-01 1.79417074e-01 ... -5.31493962e-01 -7.48866439e-01 -9.00525451e-01] [-1.99348426e+00 3.97412956e-01 8.81663263e-01 ... 5.34475744e-01 2.30765164e-01 -9.71127152e-01] [-1.10348582e+00 -1.37631083e+00 -4.33788121e-01 ... -6.52845442e-01 -4.56902087e-01 -8.01635683e-01]] [[-2.31396094e-01 -3.11445177e-01 -2.26337567e-01 ... 5.28721511e-01 1.77761674e-01 -7.50330091e-02] [-1.98074722e+00 -6.86993957e-01 -3.75282884e-01 ... -1.75801325e+00 -2.50668168e-01 4.45700586e-02] [-5.93568623e-01 -1.15217316e+00 -1.15844381e+00 ... -1.75705361e+00 -9.87818360e-01 -2.42947966e-01] ... [-5.57004213e-01 -1.92574397e-01 -2.36618891e-01 ... -4.21759486e-03 3.64537895e-01 -4.93713081e-01] [-1.67365134e-01 7.37435877e-01 -1.32553935e-01 ... -1.01566625e+00 -1.23376459e-01 2.40187466e-01] [-6.44050837e-01 -3.77464443e-01 -7.83237755e-01 ... 4.96013761e-01 -5.23339748e-01 3.03398311e-01]]] [[[-2.37611723e+00 2.00858164e+00 1.00020081e-01 ... -4.66700047e-02 2.52994013e+00 -1.03351903e+00] [-7.16152012e-01 -1.08666444e+00 8.84579182e-01 ... -6.72068238e-01 -1.88762021e+00 5.06948531e-01] [-1.00991404e+00 -1.94256783e+00 3.07150900e-01 ... -1.71660471e+00 -1.71351790e+00 -2.76648521e-01] ... [ 5.76154470e-01 -1.43915176e+00 3.04225862e-01 ... -2.74863291e+00 -1.60466707e+00 -6.28167152e-01] [ 8.41430783e-01 -3.05868268e-01 7.20597565e-01 ... 6.79248571e-02 -1.30872285e+00 -1.88119066e+00] [ 2.85077751e-01 -4.91584510e-01 9.91781950e-02 ... -2.42702818e+00 9.68413591e-01 4.41465914e-01]] [[-1.52704012e+00 -4.43272829e-01 -4.00373638e-01 ... 2.45559722e-01 1.29174221e+00 -6.30850494e-01] [-1.61797929e+00 -2.88049555e+00 -3.24455237e+00 ... -1.46544778e+00 3.99807036e-01 -1.78012717e+00] [-5.47269404e-01 -1.62543321e+00 -1.79248899e-02 ... -1.11857200e+00 -6.13039553e-01 -1.54034007e+00] ... [-7.70773649e-01 -2.04835463e+00 -1.25529242e+00 ... -4.56070185e-01 2.46857595e+00 -2.73081017e+00] [-4.38067883e-01 2.03887343e+00 5.50141513e-01 ... 5.96296489e-01 -1.35438800e+00 -6.95308208e-01] [ 1.71127021e+00 1.15174258e+00 2.27230936e-01 ... -1.15611196e+00 -3.19602561e+00 9.39399600e-01]] [[ 1.13219810e+00 1.04633784e+00 -1.02838051e+00 ... -1.59666383e+00 1.19738877e-01 1.24353856e-01] [ 5.46927929e-01 -6.78297758e-01 1.24690026e-01 ... 3.06970179e-01 1.24917650e+00 1.35225058e+00] [-2.80824929e-01 -2.53517061e-01 -1.75401473e+00 ... 9.00803208e-02 -1.60237694e+00 1.76324475e+00] ... [ 3.92535627e-01 1.08654642e+00 6.77209139e-01 ... 4.01707113e-01 -1.64357275e-01 -7.04743862e-02] [-8.04301426e-02 1.83953190e+00 -5.58726013e-01 ... 2.67028522e+00 -1.04485607e+00 -6.78225875e-01] [ 2.58651042e+00 2.27614969e-01 -1.13410391e-01 ... -6.62256241e-01 4.80834067e-01 -2.16109705e+00]] ... [[ 1.50937879e+00 2.02997279e+00 1.93425953e-01 ... -1.26612484e+00 -1.86058685e-01 -1.53459281e-01] [-1.28238678e+00 3.58015656e-01 1.60230267e+00 ... -2.71256471e+00 -4.10930812e-01 -9.74167228e-01] [-1.08226693e+00 -1.13136888e+00 -7.68184006e-01 ... 6.47365630e-01 8.34833741e-01 1.19127297e+00] ... [-1.55598152e+00 -4.43487555e-01 -1.99125648e+00 ... -1.33581710e+00 2.96002245e+00 4.79753673e-01] [-1.16697061e+00 -1.59685838e+00 -3.34796786e-01 ... 9.96094823e-01 -1.73185372e+00 -1.82170331e+00] [ 2.28691518e-01 -7.26849854e-01 -9.88392711e-01 ... -8.82965684e-01 -5.30216992e-02 2.65855575e+00]] [[-4.67394829e-01 3.98268342e-01 5.61568260e-01 ... 2.91550815e-01 -1.75953829e+00 -3.05906892e-01] [ 7.20388770e-01 -1.97407103e+00 3.78812730e-01 ... 4.02498305e-01 -1.91964781e+00 -1.37615657e+00] [-1.23763144e+00 3.17409444e+00 1.08121502e+00 ... -2.37340498e+00 1.78634048e-01 -1.02924204e+00] ... [-1.11302756e-01 -1.46113336e+00 9.65704203e-01 ... 1.43955004e+00 -3.58383030e-01 -3.05432528e-01] [ 1.16625357e+00 -2.20996213e+00 -4.67978746e-01 ... -8.39306712e-01 4.08556283e-01 -1.64050317e+00] [ 5.23177266e-01 1.17724574e+00 -1.69364369e+00 ... 1.60396194e+00 1.62275839e+00 -9.34935451e-01]] [[ 4.91169691e-01 -7.84136415e-01 9.90492821e-01 ... -1.77931142e+00 8.21002483e-01 1.65050173e+00] [-4.40549761e-01 1.92454964e-01 -1.43637732e-01 ... 1.66468811e+00 6.21475339e-01 -6.04764968e-02] [-5.27042508e-01 1.82964396e+00 -1.80446446e+00 ... 2.19354391e+00 9.68467355e-01 -4.61490065e-01] ... [-8.63113642e-01 5.95806360e-01 2.49409795e+00 ... -7.40652144e-01 -2.86385834e-01 -1.63142049e+00] [-5.81736028e-01 -4.34413940e-01 -8.87556314e-01 ... 1.12359929e+00 -2.41908669e-01 -1.01377988e+00] [-3.31824327e+00 -4.57128882e-03 -1.13895500e+00 ... -2.30492806e+00 -2.21673816e-01 -3.50913405e-01]]] [[[ 6.02679670e-01 8.10526371e-01 6.08906269e-01 ... 3.33181262e-01 7.26828516e-01 9.28584576e-01] [ 6.04251444e-01 5.68554282e-01 7.15329587e-01 ... 8.57733607e-01 3.84836376e-01 6.26368761e-01] [ 8.08884740e-01 2.85942167e-01 2.41503626e-01 ... 3.10947448e-01 2.14758694e-01 1.25456762e+00] ... [ 6.54897034e-01 9.78813171e-02 1.12082183e-01 ... 4.50797677e-01 5.02066851e-01 7.22995818e-01] [ 6.03033900e-01 3.75875771e-01 3.50480258e-01 ... 3.98016691e-01 3.41589779e-01 9.71326053e-01] [ 6.45350575e-01 6.08975291e-01 8.41728270e-01 ... 2.58106560e-01 8.92964244e-01 5.61544657e-01]] [[ 5.94408810e-01 5.81432641e-01 5.85212588e-01 ... 4.86502647e-01 7.06480563e-01 5.01397133e-01] [ 5.77908933e-01 7.36246765e-01 7.48661041e-01 ... 5.32408059e-01 1.08525920e+00 5.48982739e-01] [ 5.90263546e-01 9.67417598e-01 5.89161456e-01 ... 3.33573997e-01 4.87322271e-01 2.79591650e-01] ... [ 8.77640009e-01 4.87757802e-01 4.82256114e-01 ... 2.97750235e-01 5.54990590e-01 7.33494639e-01] [ 7.48502791e-01 6.43399358e-01 5.48757732e-01 ... 1.98681891e-01 4.78987485e-01 1.10833132e+00] [ 5.39481580e-01 5.07129848e-01 5.48889935e-01 ... 8.82892251e-01 4.07521576e-01 9.86700654e-01]] [[ 4.39917147e-01 5.87910414e-01 5.19560993e-01 ... 5.84320247e-01 6.09358251e-01 4.50804740e-01] [ 8.08722436e-01 9.82204080e-01 4.57794994e-01 ... 5.37313461e-01 6.75159156e-01 6.35121644e-01] [ 4.72523332e-01 8.74975204e-01 6.89904213e-01 ... 8.12193930e-01 2.40003139e-01 4.93483305e-01] ... [ 5.85357547e-01 8.30360889e-01 3.43198091e-01 ... 3.38015944e-01 5.38470626e-01 1.17183924e+00] [ 2.63943672e-01 6.84731424e-01 4.77242708e-01 ... 6.49494171e-01 4.92439270e-01 7.78426170e-01] [ 6.03810787e-01 3.88186395e-01 7.69719005e-01 ... 1.06123424e+00 5.52675426e-01 1.07310545e+00]] ... [[ 7.44110107e-01 1.65739268e-01 4.84383106e-02 ... 5.87342203e-01 1.87005252e-01 5.87216794e-01] [ 8.32114816e-01 6.21435344e-01 8.90933037e-01 ... 1.00226676e+00 5.87383628e-01 8.44882250e-01] [ 6.94451094e-01 4.94802982e-01 7.67275214e-01 ... 5.47147393e-01 9.79887962e-01 3.96403521e-01] ... [ 8.73748183e-01 1.62861437e-01 7.96107769e-01 ... 2.48100579e-01 6.20995224e-01 4.55359221e-01] [ 3.41067404e-01 4.72359627e-01 4.44529325e-01 ... 5.06425738e-01 6.62948310e-01 7.13600397e-01] [ 1.60409570e-01 3.36586386e-01 1.22188962e+00 ... 6.32125735e-01 2.32767880e-01 9.47459280e-01]] [[ 4.27173436e-01 7.62726665e-02 3.76825213e-01 ... 9.92562950e-01 -1.97392702e-03 6.58682048e-01] [ 2.81349778e-01 5.17731547e-01 7.90392995e-01 ... 9.88745570e-01 5.53058147e-01 9.51033294e-01] [ 5.77270150e-01 5.09890914e-01 4.95917976e-01 ... 3.31728131e-01 2.76220024e-01 8.18791151e-01] ... [ 2.10268110e-01 3.94305289e-01 6.84868097e-01 ... 5.37749887e-01 1.79951817e-01 6.54960096e-01] [ 3.09848607e-01 7.51597583e-01 6.09628975e-01 ... 4.90154266e-01 4.40705538e-01 5.65246344e-01] [ 5.60937941e-01 9.13701594e-01 8.67738247e-01 ... 8.87709737e-01 7.76316524e-01 6.70292735e-01]] [[ 2.96650648e-01 5.17909169e-01 8.43931258e-01 ... 6.14240825e-01 5.69550693e-01 6.07621074e-01] [ 4.02661383e-01 9.45839047e-01 9.28323627e-01 ... 2.05021143e-01 8.75535131e-01 4.84989047e-01] [ 5.26536644e-01 8.24655294e-01 6.63737178e-01 ... 2.55585790e-01 1.15159154e+00 7.07798362e-01] ... [ 1.52831674e-02 7.77184010e-01 4.31843638e-01 ... 6.56655014e-01 7.02896833e-01 5.65141261e-01] [ 7.33371675e-01 7.27347076e-01 6.93169475e-01 ... 4.37209845e-01 5.04861116e-01 5.67116022e-01] [ 7.74085760e-01 6.05847239e-01 9.45959568e-01 ... 5.83472729e-01 3.37054640e-01 8.98354113e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [2, 2, 2], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_966.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1535 -0.1837 -0.3584 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.1596 (2,1,1,.,.) = 0.1297 (3,1,1,.,.) = -0.3206 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-2.87316918e-01 -3.95633653e-02 -8.59279782e-02 ... -3.53216261e-01 -2.31427550e-01 -3.70740145e-01] [-1.27034530e-01 -3.03857386e-01 -1.74096406e-01 ... -4.01321530e-01 -1.24705970e-01 -4.38642949e-02] [ 1.35389179e-01 -3.72271121e-01 -2.65109062e-01 ... -2.26645052e-01 4.54846472e-02 -3.00142348e-01] ... [ 3.00970823e-02 -1.82992652e-01 -1.59821615e-01 ... -1.60193652e-01 -1.16379276e-01 8.59222859e-02] [-3.61922160e-02 -3.25035959e-01 -2.81025767e-02 ... -3.37185144e-01 -3.03731501e-01 -1.86050832e-02] [-3.59052658e-01 -3.23805869e-01 -4.52325717e-02 ... -1.39491558e-02 -3.67657065e-01 -6.44037127e-02]] [[-3.36364210e-02 -1.11969262e-01 -1.54447719e-01 ... -3.20572555e-01 5.12763709e-02 -1.91021860e-01] [-1.40502900e-01 -9.63985920e-02 -7.85305575e-02 ... 1.94573849e-02 -2.14516222e-02 -7.26017952e-02] [-3.95249784e-01 -2.93585658e-02 -3.81076097e-01 ... 1.23891205e-01 -3.48904207e-02 -1.02150150e-01] ... [-7.90496469e-02 -2.14884698e-01 -2.05274507e-01 ... -4.14837271e-01 -1.07088871e-01 3.87078375e-02] [-1.45702064e-03 -5.62616289e-02 -1.93969771e-01 ... -1.20713681e-01 -2.81452537e-01 -1.99975222e-01] [-2.09450200e-01 -3.07408601e-01 -1.03213243e-01 ... -1.51478186e-01 3.01226526e-02 -2.32481688e-01]] [[ 1.63269401e-01 1.33869648e-02 -3.19024026e-01 ... -1.44898474e-01 -4.36289608e-03 -3.27583134e-01] [-8.68264437e-02 -2.35893160e-01 -1.20352313e-01 ... -3.68211567e-01 -3.65407318e-01 -2.49133736e-01] [-3.81770194e-01 1.46642417e-01 -3.78678560e-01 ... -3.33045363e-01 -1.07418001e-02 5.90123236e-03] ... [-4.76241529e-01 -1.57420710e-01 -1.82671472e-01 ... -2.20466882e-01 1.19143337e-01 -1.78123027e-01] [-2.50701219e-01 3.97854596e-02 -7.06272051e-02 ... 1.21229142e-02 4.07618433e-02 -4.02306736e-01] [-3.62425238e-01 5.95273077e-03 -2.49358013e-01 ... -2.00235218e-01 -2.70205855e-01 -3.92520458e-01]] ... [[-3.75572443e-01 -1.21237122e-01 1.03320628e-02 ... -4.13516730e-01 -2.19319761e-02 -2.74982840e-01] [-1.67598352e-01 -5.73024452e-01 -1.40619189e-01 ... -3.26631308e-01 -2.65743583e-02 -2.69508511e-01] [ 4.56546247e-03 -4.74520981e-01 1.83685929e-01 ... -4.35384154e-01 -4.40724015e-01 -1.62922949e-01] ... [-1.42367184e-01 -2.29109034e-01 -3.39351118e-01 ... -1.98935226e-01 -2.12875217e-01 9.43226218e-02] [-2.53253341e-01 -4.09904048e-02 -8.22551548e-02 ... -1.28669038e-01 -1.60680458e-01 -1.28937662e-02] [-2.09391624e-01 -1.89397246e-01 5.81133962e-02 ... -9.42347869e-02 -3.27176750e-02 -2.31942415e-01]] [[-5.62644303e-02 -2.40986526e-01 -1.43693179e-01 ... -1.70324981e-01 -4.48602960e-02 -9.03929099e-02] [-2.72996336e-01 -2.01816887e-01 -8.93111303e-02 ... -3.44350427e-01 -1.41765773e-01 -1.52716577e-01] [-4.86819863e-01 -5.52434623e-02 -1.89793751e-01 ... -1.86383665e-01 -2.86794424e-01 -1.54063120e-01] ... [-1.37495860e-01 -3.31436694e-01 7.18789697e-02 ... -2.11968645e-01 5.57674170e-02 -2.28899524e-01] [-3.91185105e-01 -1.20562822e-01 -4.23976779e-01 ... -3.01245511e-01 -8.42388570e-02 -2.19248295e-01] [-2.14059860e-01 -4.41488445e-01 -4.74109977e-01 ... -3.40020061e-02 -1.47153974e-01 -2.27887392e-01]] [[-3.87801170e-01 -4.23734099e-01 -1.29242510e-01 ... -2.28570223e-01 -1.18784517e-01 -2.93784812e-02] [-2.56843388e-01 -2.18745202e-01 -4.22525078e-01 ... -1.12438791e-01 -3.47554207e-01 -1.38487697e-01] [-1.17836677e-01 -6.50557518e-01 2.11848468e-01 ... -4.72668767e-01 -1.41506165e-01 -2.36939311e-01] ... [-4.45149034e-01 -1.43966973e-02 3.29053104e-02 ... 9.27283615e-02 -3.10362667e-01 -1.40776515e-01] [-3.64759713e-02 -2.29486823e-01 -4.40980941e-01 ... -1.17226526e-01 -1.96898729e-02 2.61224002e-01] [ 1.28915817e-01 1.58687085e-01 4.81505096e-02 ... -2.26527110e-01 2.41009355e-01 -2.49118924e-01]]] [[[-1.41410619e-01 -3.70137036e-01 -2.89343059e-01 ... -1.79122165e-01 -1.66045919e-01 -1.60062492e-01] [-3.11174154e-01 -2.85452843e-01 -1.31316543e-01 ... -3.12417269e-01 -2.14002505e-01 -1.33986294e-01] [ 3.75181586e-02 -2.08299428e-01 -1.32279336e-01 ... -1.58386260e-01 -1.33456558e-01 -3.56450230e-02] ... [-2.60663718e-01 -1.16483472e-01 -2.44577289e-01 ... -1.64623663e-01 1.13491043e-01 -1.78827643e-01] [-2.32103914e-01 -1.43543035e-02 -2.76655525e-01 ... -1.92286238e-01 -1.18226036e-01 -2.02955842e-01] [ 1.31870165e-01 -3.92743051e-02 -1.51916310e-01 ... -3.04256827e-02 -4.10005778e-01 -3.63770336e-01]] [[-1.54372990e-01 -1.66031599e-01 -2.94444084e-01 ... -7.32725486e-02 -3.60250294e-01 -1.83389515e-01] [-3.52562904e-01 -2.70228088e-01 -1.93876654e-01 ... -1.36795521e-01 5.27913719e-02 -1.54914171e-01] [ 3.59174609e-03 -3.51226091e-01 -3.04967910e-01 ... -3.82388532e-02 -1.18429177e-01 -6.55247495e-02] ... [-9.68336537e-02 -1.19422160e-01 -1.67299807e-02 ... -1.78826094e-01 -1.37219653e-01 -2.93168157e-01] [-1.26972020e-01 -1.52354062e-01 -1.68186069e-01 ... -2.95257807e-01 -1.81472033e-01 1.09468251e-02] [-2.26424366e-01 -7.87797421e-02 -4.29183841e-01 ... -4.35802758e-01 -2.28726476e-01 -1.98690951e-01]] [[-1.72627687e-01 -3.02164733e-01 -3.84462774e-01 ... -4.63950634e-03 -2.87970960e-01 -1.33351207e-01] [ 1.25371769e-01 -3.58000308e-01 -5.51136434e-02 ... -2.17989787e-01 6.41102344e-02 1.52071103e-01] [-2.35563114e-01 -3.58850807e-02 -3.01946938e-01 ... -1.42599970e-01 -2.04677582e-01 -9.69064012e-02] ... [-3.85747880e-01 -1.75574854e-01 -3.05350661e-01 ... -1.91058263e-01 -3.36656988e-01 -1.56034082e-01] [-2.28444368e-01 -1.54409543e-01 -1.24923289e-01 ... -2.77897328e-01 -2.78914392e-01 -2.22084150e-01] [-1.68225169e-03 -2.27942958e-01 -2.78758973e-01 ... -4.72304225e-02 5.84348738e-02 -3.41106892e-01]] ... [[-5.30731678e-02 -1.46927297e-01 1.09669641e-01 ... -3.65364701e-02 -3.37040007e-01 -2.39647180e-01] [-2.86630809e-01 -9.43397880e-02 -3.44230950e-01 ... -1.14639111e-01 -3.76542985e-01 -8.03431422e-02] [-1.58760592e-01 -1.54307857e-01 -1.44805163e-01 ... -9.09085497e-02 -2.84504116e-01 -2.49795333e-01] ... [-2.35305920e-01 -3.96848142e-01 -2.47983038e-01 ... -2.14100718e-01 -1.89103320e-01 -1.07276022e-01] [-2.08779931e-01 -2.32569724e-02 -1.69129595e-01 ... -1.96545765e-01 -1.39051408e-01 -9.61179286e-02] [-1.62541837e-01 -5.19014895e-03 -1.84639841e-01 ... -2.36413836e-01 -1.89398631e-01 -1.90129310e-01]] [[-2.61374772e-01 -1.80579409e-01 -8.81496817e-02 ... -1.64279267e-01 -8.09411332e-02 -3.26053590e-01] [-3.56607676e-01 -2.76432246e-01 -2.82994956e-01 ... -2.58606404e-01 -2.54374534e-01 -1.68079376e-01] [-1.34731844e-01 -2.14457005e-01 -2.90792704e-01 ... 1.22796789e-01 -2.62125939e-01 -1.35960311e-01] ... [-2.19094247e-01 -1.13468513e-01 -1.56978279e-01 ... -3.29091638e-01 -2.52954841e-01 -5.89963794e-02] [-2.69495487e-01 -3.29841822e-02 2.91183591e-02 ... -4.73925173e-02 -2.34350175e-01 -4.71750796e-01] [ 7.18011409e-02 -4.17488396e-01 -1.44065559e-01 ... -1.21735349e-01 -1.44580454e-01 -1.52746975e-01]] [[-2.44213223e-01 5.07532060e-02 -2.62446821e-01 ... -1.84369728e-01 -2.17920601e-01 -3.16919029e-01] [-2.08823621e-01 6.93159252e-02 -3.71527374e-01 ... -3.42869818e-01 -3.40069711e-01 -9.63513777e-02] [-4.16961551e-01 3.15923244e-02 -3.06961298e-01 ... -3.53734553e-01 -1.02832764e-01 -5.39968073e-01] ... [-2.76939213e-01 -5.17998189e-02 -2.60714054e-01 ... -9.97014791e-02 -1.49333879e-01 -8.11016560e-02] [-4.86724794e-01 -1.97790682e-01 -2.13322908e-01 ... -1.41844183e-01 -6.18380308e-03 -2.41835207e-01] [-1.02993757e-01 -1.90746337e-01 -1.26211628e-01 ... -3.78130734e-01 -3.84064734e-01 -5.97414598e-02]]] [[[-2.45902702e-01 -5.06667256e-01 2.09485143e-01 ... -4.20077056e-01 -8.71911407e-01 -1.90073043e-01] [-7.44428396e-01 -2.57061839e-01 2.36148983e-01 ... -6.18503630e-01 -2.62717009e-01 -1.69444606e-01] [ 1.54618531e-01 -2.54947960e-01 4.15761203e-01 ... 6.49445951e-02 -8.91713262e-01 -2.44218230e-01] ... [-2.06360236e-01 -1.34897292e-01 -2.83678114e-01 ... -1.06647384e+00 -8.22198033e-01 -4.66292411e-01] [ 2.66677767e-01 -4.25873756e-01 -6.30641162e-01 ... -2.00919434e-01 -8.97179604e-01 -1.40901685e-01] [-8.90706062e-01 -1.49210215e-01 -1.53134987e-01 ... 2.05800265e-01 -1.33453190e-01 -9.10108328e-01]] [[-4.85690892e-01 -1.39205649e-01 -6.04726255e-01 ... -3.48433375e-01 -3.87335181e-01 -3.13446850e-01] [-5.86227417e-01 -4.70131636e-01 -7.85605311e-01 ... -8.64874423e-02 -6.50638342e-01 -4.06679183e-01] [-5.45517564e-01 -5.75005352e-01 -2.85941839e-01 ... -3.86716574e-01 -9.00617003e-01 -5.39654970e-01] ... [-6.53115749e-01 -2.18767613e-01 -6.10573292e-01 ... -5.41176081e-01 -5.19046664e-01 -6.68859303e-01] [-6.76511526e-01 -4.47009802e-01 -1.49604946e-01 ... -1.64658487e-01 -1.13114774e+00 -1.22383296e-01] [-4.55231249e-01 -9.76798534e-02 -1.67229965e-01 ... -7.15321839e-01 -6.75682306e-01 -2.40509495e-01]] [[-4.32558358e-01 -2.02907473e-01 -5.89027107e-01 ... -4.26451802e-01 -4.83658612e-02 -2.02033818e-01] [-1.87461108e-01 -2.16434941e-01 -8.50526214e-01 ... -2.62438655e-01 -3.79998386e-01 -1.21170282e-03] [ 3.30455601e-02 -1.57149553e-01 3.59549910e-01 ... -6.30841255e-02 -3.49728853e-01 -7.29687929e-01] ... [-6.20845735e-01 -2.71750748e-01 -2.16170371e-01 ... -1.38091803e-01 -9.10333037e-01 -9.38845277e-02] [-5.19210458e-01 -3.05597126e-01 -4.43406701e-02 ... -4.75047797e-01 2.60160297e-01 -5.10985374e-01] [-6.40359998e-01 -5.15536904e-01 -7.97230124e-01 ... 1.39542520e-02 -6.45482540e-03 -6.36635661e-01]] ... [[-5.40707648e-01 -4.36246604e-01 -5.33043742e-02 ... -3.78153324e-01 4.10433859e-01 -1.99414760e-01] [-5.06076694e-01 -8.07004869e-02 -8.58294785e-01 ... -1.16486415e-01 -7.73912907e-01 -5.00440478e-01] [-1.13715434e+00 -1.95950180e-01 -4.69394326e-02 ... -7.22995520e-01 -6.13699555e-01 -6.70736551e-01] ... [-4.44987595e-01 -7.84791410e-02 -2.03541279e-01 ... -8.76011848e-01 -9.47979927e-01 -4.84725267e-01] [-2.99421966e-01 -6.06005490e-02 -2.27044821e-01 ... 1.41029388e-01 -5.54824531e-01 -1.01559258e+00] [-3.03708851e-01 -1.08055800e-01 -9.91143942e-01 ... -4.37107205e-01 -1.48248374e-02 -3.53058875e-01]] [[ 1.40590221e-01 -3.11905354e-01 -4.71498013e-01 ... -7.23562837e-01 -5.64235687e-01 -4.87123668e-01] [-6.81434393e-01 -5.55198431e-01 -4.04477030e-01 ... -8.11039507e-01 -5.99731445e-01 -1.71993524e-01] [-2.28287280e-01 -3.40352803e-01 -5.73158622e-01 ... -5.94178975e-01 -1.14483520e-01 -1.74387231e-01] ... [-2.79955834e-01 -1.99096859e-01 -5.79381883e-01 ... -2.56619096e-01 -4.10051078e-01 -5.83629847e-01] [-3.19056392e-01 -1.79158732e-01 -6.29448891e-02 ... -4.82237816e-01 -3.54920357e-01 4.12322581e-02] [-1.26399374e+00 -3.76817107e-01 -8.81887674e-01 ... -6.08473182e-01 3.76760095e-01 -3.13585103e-01]] [[-2.24782914e-01 2.26146877e-02 -6.37080133e-01 ... 7.81977773e-02 5.43157160e-02 -6.85724616e-01] [-6.34910882e-01 -4.95549947e-01 -5.25052667e-01 ... -3.69221240e-01 -7.68963516e-01 -1.43495843e-01] [-4.92037594e-01 -1.36794195e-01 -2.21412346e-01 ... -2.03275129e-01 -6.61638021e-01 -3.18913668e-01] ... [ 1.59043401e-01 -3.93284768e-01 -7.91073680e-01 ... -4.11037356e-01 -1.63305372e-01 -3.48360837e-01] [-8.07855844e-01 -1.03418386e+00 2.29616731e-01 ... -2.59342104e-01 6.72673285e-02 -3.24361414e-01] [ 2.23874718e-01 -3.90712887e-01 -1.60194755e-01 ... -7.56520271e-01 -9.01559591e-02 2.39455909e-01]]]]]; ov_res: [[[[[-2.87316918e-01 -3.95633616e-02 -8.59279782e-02 ... -3.53216261e-01 -2.31427550e-01 -3.70740145e-01] [-1.27034530e-01 -3.03857386e-01 -1.74096406e-01 ... -4.01321530e-01 -1.24705970e-01 -4.38642949e-02] [ 1.35389179e-01 -3.72271150e-01 -2.65109062e-01 ... -2.26645052e-01 4.54846472e-02 -3.00142348e-01] ... [ 3.00970804e-02 -1.82992652e-01 -1.59821615e-01 ... -1.60193652e-01 -1.16379276e-01 8.59222859e-02] [-3.61922160e-02 -3.25035959e-01 -2.81025693e-02 ... -3.37185144e-01 -3.03731501e-01 -1.86050776e-02] [-3.59052628e-01 -3.23805869e-01 -4.52325717e-02 ... -1.39491549e-02 -3.67657065e-01 -6.44037127e-02]] [[-3.36364247e-02 -1.11969262e-01 -1.54447719e-01 ... -3.20572555e-01 5.12763746e-02 -1.91021860e-01] [-1.40502900e-01 -9.63985920e-02 -7.85305575e-02 ... 1.94573794e-02 -2.14516260e-02 -7.26017952e-02] [-3.95249784e-01 -2.93585639e-02 -3.81076068e-01 ... 1.23891197e-01 -3.48904207e-02 -1.02150150e-01] ... [-7.90496469e-02 -2.14884683e-01 -2.05274507e-01 ... -4.14837271e-01 -1.07088871e-01 3.87078337e-02] [-1.45701668e-03 -5.62616289e-02 -1.93969771e-01 ... -1.20713688e-01 -2.81452537e-01 -1.99975237e-01] [-2.09450200e-01 -3.07408601e-01 -1.03213243e-01 ... -1.51478186e-01 3.01226564e-02 -2.32481688e-01]] [[ 1.63269401e-01 1.33869685e-02 -3.19024026e-01 ... -1.44898474e-01 -4.36289934e-03 -3.27583104e-01] [-8.68264437e-02 -2.35893145e-01 -1.20352313e-01 ... -3.68211567e-01 -3.65407318e-01 -2.49133736e-01] [-3.81770194e-01 1.46642417e-01 -3.78678560e-01 ... -3.33045363e-01 -1.07418019e-02 5.90123283e-03] ... [-4.76241529e-01 -1.57420710e-01 -1.82671472e-01 ... -2.20466882e-01 1.19143337e-01 -1.78123027e-01] [-2.50701219e-01 3.97854559e-02 -7.06272051e-02 ... 1.21229161e-02 4.07618433e-02 -4.02306765e-01] [-3.62425238e-01 5.95272612e-03 -2.49358013e-01 ... -2.00235218e-01 -2.70205855e-01 -3.92520458e-01]] ... [[-3.75572443e-01 -1.21237122e-01 1.03320591e-02 ... -4.13516730e-01 -2.19319798e-02 -2.74982840e-01] [-1.67598352e-01 -5.73024452e-01 -1.40619189e-01 ... -3.26631278e-01 -2.65743639e-02 -2.69508511e-01] [ 4.56546852e-03 -4.74520981e-01 1.83685929e-01 ... -4.35384154e-01 -4.40724015e-01 -1.62922949e-01] ... [-1.42367199e-01 -2.29109034e-01 -3.39351147e-01 ... -1.98935226e-01 -2.12875217e-01 9.43226144e-02] [-2.53253341e-01 -4.09904011e-02 -8.22551548e-02 ... -1.28669038e-01 -1.60680458e-01 -1.28937662e-02] [-2.09391624e-01 -1.89397246e-01 5.81133924e-02 ... -9.42347869e-02 -3.27176750e-02 -2.31942415e-01]] [[-5.62644266e-02 -2.40986526e-01 -1.43693179e-01 ... -1.70324981e-01 -4.48602922e-02 -9.03929099e-02] [-2.72996336e-01 -2.01816887e-01 -8.93111303e-02 ... -3.44350427e-01 -1.41765773e-01 -1.52716577e-01] [-4.86819863e-01 -5.52434623e-02 -1.89793751e-01 ... -1.86383665e-01 -2.86794424e-01 -1.54063120e-01] ... [-1.37495860e-01 -3.31436664e-01 7.18789622e-02 ... -2.11968645e-01 5.57674170e-02 -2.28899524e-01] [-3.91185105e-01 -1.20562822e-01 -4.23976779e-01 ... -3.01245481e-01 -8.42388570e-02 -2.19248295e-01] [-2.14059860e-01 -4.41488445e-01 -4.74109977e-01 ... -3.40020023e-02 -1.47153974e-01 -2.27887392e-01]] [[-3.87801141e-01 -4.23734099e-01 -1.29242495e-01 ... -2.28570223e-01 -1.18784517e-01 -2.93784775e-02] [-2.56843388e-01 -2.18745202e-01 -4.22525078e-01 ... -1.12438791e-01 -3.47554207e-01 -1.38487697e-01] [-1.17836677e-01 -6.50557518e-01 2.11848468e-01 ... -4.72668767e-01 -1.41506165e-01 -2.36939296e-01] ... [-4.45149034e-01 -1.43966926e-02 3.29053141e-02 ... 9.27283689e-02 -3.10362667e-01 -1.40776500e-01] [-3.64759713e-02 -2.29486823e-01 -4.40980941e-01 ... -1.17226526e-01 -1.96898747e-02 2.61224002e-01] [ 1.28915817e-01 1.58687085e-01 4.81505021e-02 ... -2.26527110e-01 2.41009340e-01 -2.49118909e-01]]] [[[-1.41410604e-01 -3.70137036e-01 -2.89343029e-01 ... -1.79122165e-01 -1.66045919e-01 -1.60062492e-01] [-3.11174154e-01 -2.85452843e-01 -1.31316543e-01 ... -3.12417269e-01 -2.14002505e-01 -1.33986294e-01] [ 3.75181586e-02 -2.08299428e-01 -1.32279336e-01 ... -1.58386260e-01 -1.33456543e-01 -3.56450230e-02] ... [-2.60663718e-01 -1.16483472e-01 -2.44577289e-01 ... -1.64623663e-01 1.13491036e-01 -1.78827643e-01] [-2.32103914e-01 -1.43543044e-02 -2.76655525e-01 ... -1.92286238e-01 -1.18226036e-01 -2.02955842e-01] [ 1.31870180e-01 -3.92743051e-02 -1.51916310e-01 ... -3.04256864e-02 -4.10005778e-01 -3.63770336e-01]] [[-1.54372990e-01 -1.66031599e-01 -2.94444084e-01 ... -7.32725486e-02 -3.60250264e-01 -1.83389515e-01] [-3.52562904e-01 -2.70228088e-01 -1.93876654e-01 ... -1.36795521e-01 5.27913719e-02 -1.54914171e-01] [ 3.59175331e-03 -3.51226091e-01 -3.04967910e-01 ... -3.82388532e-02 -1.18429177e-01 -6.55247495e-02] ... [-9.68336537e-02 -1.19422160e-01 -1.67299751e-02 ... -1.78826094e-01 -1.37219653e-01 -2.93168157e-01] [-1.26972035e-01 -1.52354062e-01 -1.68186069e-01 ... -2.95257807e-01 -1.81472033e-01 1.09468196e-02] [-2.26424366e-01 -7.87797421e-02 -4.29183811e-01 ... -4.35802758e-01 -2.28726476e-01 -1.98690936e-01]] [[-1.72627687e-01 -3.02164733e-01 -3.84462774e-01 ... -4.63950960e-03 -2.87970960e-01 -1.33351207e-01] [ 1.25371754e-01 -3.58000308e-01 -5.51136434e-02 ... -2.17989787e-01 6.41102344e-02 1.52071103e-01] [-2.35563114e-01 -3.58850770e-02 -3.01946908e-01 ... -1.42599970e-01 -2.04677582e-01 -9.69064012e-02] ... [-3.85747880e-01 -1.75574854e-01 -3.05350661e-01 ... -1.91058263e-01 -3.36656988e-01 -1.56034082e-01] [-2.28444353e-01 -1.54409543e-01 -1.24923289e-01 ... -2.77897328e-01 -2.78914362e-01 -2.22084150e-01] [-1.68224843e-03 -2.27942958e-01 -2.78758973e-01 ... -4.72304299e-02 5.84348701e-02 -3.41106892e-01]] ... [[-5.30731715e-02 -1.46927297e-01 1.09669626e-01 ... -3.65364663e-02 -3.37040037e-01 -2.39647180e-01] [-2.86630809e-01 -9.43397880e-02 -3.44230920e-01 ... -1.14639111e-01 -3.76542985e-01 -8.03431422e-02] [-1.58760592e-01 -1.54307857e-01 -1.44805163e-01 ... -9.09085497e-02 -2.84504116e-01 -2.49795333e-01] ... [-2.35305920e-01 -3.96848142e-01 -2.47983038e-01 ... -2.14100718e-01 -1.89103320e-01 -1.07276022e-01] [-2.08779931e-01 -2.32569668e-02 -1.69129595e-01 ... -1.96545765e-01 -1.39051408e-01 -9.61179286e-02] [-1.62541837e-01 -5.19014150e-03 -1.84639841e-01 ... -2.36413836e-01 -1.89398631e-01 -1.90129310e-01]] [[-2.61374772e-01 -1.80579409e-01 -8.81496817e-02 ... -1.64279267e-01 -8.09411332e-02 -3.26053590e-01] [-3.56607676e-01 -2.76432246e-01 -2.82994956e-01 ... -2.58606404e-01 -2.54374534e-01 -1.68079376e-01] [-1.34731844e-01 -2.14457005e-01 -2.90792704e-01 ... 1.22796781e-01 -2.62125939e-01 -1.35960311e-01] ... [-2.19094232e-01 -1.13468513e-01 -1.56978279e-01 ... -3.29091638e-01 -2.52954841e-01 -5.89963831e-02] [-2.69495517e-01 -3.29841785e-02 2.91183628e-02 ... -4.73925136e-02 -2.34350190e-01 -4.71750826e-01] [ 7.18011335e-02 -4.17488426e-01 -1.44065559e-01 ... -1.21735349e-01 -1.44580454e-01 -1.52746990e-01]] [[-2.44213223e-01 5.07532023e-02 -2.62446821e-01 ... -1.84369728e-01 -2.17920601e-01 -3.16919029e-01] [-2.08823621e-01 6.93159327e-02 -3.71527404e-01 ... -3.42869818e-01 -3.40069711e-01 -9.63513777e-02] [-4.16961521e-01 3.15923169e-02 -3.06961298e-01 ... -3.53734553e-01 -1.02832764e-01 -5.39968073e-01] ... [-2.76939213e-01 -5.17998151e-02 -2.60714054e-01 ... -9.97014791e-02 -1.49333879e-01 -8.11016560e-02] [-4.86724824e-01 -1.97790682e-01 -2.13322908e-01 ... -1.41844183e-01 -6.18379703e-03 -2.41835222e-01] [-1.02993757e-01 -1.90746337e-01 -1.26211628e-01 ... -3.78130734e-01 -3.84064734e-01 -5.97414561e-02]]] [[[-2.45902702e-01 -5.06667256e-01 2.09485158e-01 ... -4.20077056e-01 -8.71911407e-01 -1.90073043e-01] [-7.44428396e-01 -2.57061839e-01 2.36148968e-01 ... -6.18503630e-01 -2.62716979e-01 -1.69444606e-01] [ 1.54618561e-01 -2.54947960e-01 4.15761203e-01 ... 6.49445876e-02 -8.91713262e-01 -2.44218245e-01] ... [-2.06360236e-01 -1.34897292e-01 -2.83678114e-01 ... -1.06647384e+00 -8.22197974e-01 -4.66292411e-01] [ 2.66677767e-01 -4.25873756e-01 -6.30641162e-01 ... -2.00919434e-01 -8.97179604e-01 -1.40901685e-01] [-8.90706122e-01 -1.49210215e-01 -1.53134987e-01 ... 2.05800265e-01 -1.33453190e-01 -9.10108268e-01]] [[-4.85690922e-01 -1.39205649e-01 -6.04726255e-01 ... -3.48433375e-01 -3.87335181e-01 -3.13446850e-01] [-5.86227417e-01 -4.70131636e-01 -7.85605252e-01 ... -8.64874423e-02 -6.50638342e-01 -4.06679183e-01] [-5.45517564e-01 -5.75005352e-01 -2.85941839e-01 ... -3.86716574e-01 -9.00617063e-01 -5.39655030e-01] ... [-6.53115749e-01 -2.18767613e-01 -6.10573292e-01 ... -5.41176021e-01 -5.19046724e-01 -6.68859303e-01] [-6.76511467e-01 -4.47009802e-01 -1.49604946e-01 ... -1.64658487e-01 -1.13114774e+00 -1.22383296e-01] [-4.55231220e-01 -9.76798460e-02 -1.67229965e-01 ... -7.15321839e-01 -6.75682306e-01 -2.40509495e-01]] [[-4.32558328e-01 -2.02907473e-01 -5.89027107e-01 ... -4.26451802e-01 -4.83658537e-02 -2.02033818e-01] [-1.87461108e-01 -2.16434941e-01 -8.50526214e-01 ... -2.62438655e-01 -3.79998386e-01 -1.21169141e-03] [ 3.30455564e-02 -1.57149553e-01 3.59549880e-01 ... -6.30841181e-02 -3.49728853e-01 -7.29687929e-01] ... [-6.20845735e-01 -2.71750748e-01 -2.16170371e-01 ... -1.38091803e-01 -9.10333037e-01 -9.38845351e-02] [-5.19210458e-01 -3.05597126e-01 -4.43406701e-02 ... -4.75047797e-01 2.60160297e-01 -5.10985434e-01] [-6.40359998e-01 -5.15536904e-01 -7.97230065e-01 ... 1.39542483e-02 -6.45482587e-03 -6.36635661e-01]] ... [[-5.40707648e-01 -4.36246604e-01 -5.33043854e-02 ... -3.78153324e-01 4.10433888e-01 -1.99414760e-01] [-5.06076694e-01 -8.07004794e-02 -8.58294785e-01 ... -1.16486415e-01 -7.73912966e-01 -5.00440478e-01] [-1.13715434e+00 -1.95950180e-01 -4.69394438e-02 ... -7.22995460e-01 -6.13699555e-01 -6.70736492e-01] ... [-4.44987595e-01 -7.84791484e-02 -2.03541279e-01 ... -8.76011848e-01 -9.47979867e-01 -4.84725267e-01] [-2.99421966e-01 -6.06005602e-02 -2.27044821e-01 ... 1.41029403e-01 -5.54824531e-01 -1.01559258e+00] [-3.03708822e-01 -1.08055808e-01 -9.91143942e-01 ... -4.37107235e-01 -1.48248486e-02 -3.53058875e-01]] [[ 1.40590206e-01 -3.11905354e-01 -4.71498013e-01 ... -7.23562837e-01 -5.64235687e-01 -4.87123668e-01] [-6.81434393e-01 -5.55198431e-01 -4.04477030e-01 ... -8.11039507e-01 -5.99731445e-01 -1.71993524e-01] [-2.28287280e-01 -3.40352803e-01 -5.73158622e-01 ... -5.94178975e-01 -1.14483513e-01 -1.74387231e-01] ... [-2.79955834e-01 -1.99096859e-01 -5.79381883e-01 ... -2.56619066e-01 -4.10051078e-01 -5.83629847e-01] [-3.19056392e-01 -1.79158732e-01 -6.29449040e-02 ... -4.82237816e-01 -3.54920357e-01 4.12322655e-02] [-1.26399374e+00 -3.76817107e-01 -8.81887734e-01 ... -6.08473241e-01 3.76760125e-01 -3.13585103e-01]] [[-2.24782914e-01 2.26146970e-02 -6.37080133e-01 ... 7.81977698e-02 5.43157011e-02 -6.85724616e-01] [-6.34910882e-01 -4.95549947e-01 -5.25052667e-01 ... -3.69221240e-01 -7.68963516e-01 -1.43495843e-01] [-4.92037565e-01 -1.36794195e-01 -2.21412346e-01 ... -2.03275129e-01 -6.61638021e-01 -3.18913668e-01] ... [ 1.59043387e-01 -3.93284768e-01 -7.91073620e-01 ... -4.11037356e-01 -1.63305372e-01 -3.48360837e-01] [-8.07855904e-01 -1.03418386e+00 2.29616731e-01 ... -2.59342104e-01 6.72673434e-02 -3.24361414e-01] [ 2.23874703e-01 -3.90712887e-01 -1.60194755e-01 ... -7.56520212e-01 -9.01559517e-02 2.39455923e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_968.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.158847}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.7503 (2,1,1,.,.) = -0.8202 (3,1,1,.,.) = 0.2213 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] [[-1.23107588e+00 4.78446245e+00 -1.21905887e+00 ... 3.33432674e-01 -5.36977649e-01 2.30332899e+00] [-2.05947542e+00 -1.74086130e+00 7.97022939e-01 ... 4.52288294e+00 -2.18896389e+00 1.63614064e-01] [ 1.56418478e+00 1.06248164e+00 2.12695050e+00 ... -1.26704514e-01 9.87381339e-01 -3.88992965e-01] ... [ 2.84229457e-01 2.39502358e+00 -1.22035217e+00 ... -7.21531391e-01 2.46407485e+00 1.78143382e-03] [ 1.33976758e+00 -4.86597270e-01 3.52033758e+00 ... 1.14121646e-01 -3.15483832e+00 1.62631822e+00] [-5.57619452e-01 3.07538319e+00 -1.60593140e+00 ... -3.37205887e+00 4.48592949e+00 -3.14769840e+00]] [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] ... [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] [[ 3.82257253e-01 -4.23555565e+00 -2.65853906e+00 ... -6.44366145e-01 4.76807863e-01 -1.63230956e+00] [ 2.68628502e+00 1.22407532e+00 1.78166962e+00 ... 1.53459537e+00 2.73610353e-01 3.17851543e+00] [-3.25748491e+00 2.45256996e+00 -1.41958404e+00 ... 2.41805482e+00 -1.68568254e+00 -7.14210272e-01] ... [-1.41871810e+00 2.69058537e+00 8.79698396e-01 ... -2.13908219e+00 -3.45158386e+00 -1.07957733e+00] [-1.71412742e+00 2.18353796e+00 -9.38048005e-01 ... -4.13056898e+00 1.57498133e+00 -5.37545204e-01] [ 2.64127302e+00 -1.71001482e+00 -2.43949115e-01 ... -2.06285834e+00 -2.13416362e+00 1.01240766e+00]] [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]]]]]; ov_res: [[[[[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] [[-1.23107588e+00 4.78446245e+00 -1.21905887e+00 ... 3.33432674e-01 -5.36977649e-01 2.30332899e+00] [-2.05947542e+00 -1.74086130e+00 7.97022939e-01 ... 4.52288294e+00 -2.18896389e+00 1.63614064e-01] [ 1.56418478e+00 1.06248164e+00 2.12695050e+00 ... -1.26704514e-01 9.87381339e-01 -3.88992965e-01] ... [ 2.84229457e-01 2.39502358e+00 -1.22035217e+00 ... -7.21531391e-01 2.46407485e+00 1.78143382e-03] [ 1.33976758e+00 -4.86597270e-01 3.52033758e+00 ... 1.14121646e-01 -3.15483832e+00 1.62631822e+00] [-5.57619452e-01 3.07538319e+00 -1.60593140e+00 ... -3.37205887e+00 4.48592949e+00 -3.14769840e+00]] [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] ... [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]] [[ 3.82257253e-01 -4.23555565e+00 -2.65853906e+00 ... -6.44366145e-01 4.76807863e-01 -1.63230956e+00] [ 2.68628502e+00 1.22407532e+00 1.78166962e+00 ... 1.53459537e+00 2.73610353e-01 3.17851543e+00] [-3.25748491e+00 2.45256996e+00 -1.41958404e+00 ... 2.41805482e+00 -1.68568254e+00 -7.14210272e-01] ... [-1.41871810e+00 2.69058537e+00 8.79698396e-01 ... -2.13908219e+00 -3.45158386e+00 -1.07957733e+00] [-1.71412742e+00 2.18353796e+00 -9.38048005e-01 ... -4.13056898e+00 1.57498133e+00 -5.37545204e-01] [ 2.64127302e+00 -1.71001482e+00 -2.43949115e-01 ... -2.06285834e+00 -2.13416362e+00 1.01240766e+00]] [[-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] ... [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01] [-1.58847123e-01 -1.58847123e-01 -1.58847123e-01 ... -1.58847123e-01 -1.58847123e-01 -1.58847123e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_970.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5575 -0.7819 -0.2527 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.2456 (2,1,1,.,.) = 0.1097 (3,1,1,.,.) = 1.2138 (1,2,1,.,.) = -0.9934 (2,2,1,.,.) = 0.1191 (3,2,1,.,.) = 0.9988 (1,3,1,.,.) = 0.3710 (2,3,1,.,.) = -1.2108 (3,3,1,.,.) = 0.4301 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 3.84361982e-01 1.31299996e+00 -1.14699614e+00 ... 2.14828205e+00 3.32784414e-01 -7.45532513e-02] [-1.52583539e-01 2.50129938e-01 1.17899120e+00 ... 1.35944188e+00 1.44010216e-01 2.00501680e+00] [ 1.74825042e-01 2.14929849e-01 -1.14852536e+00 ... -1.04967606e+00 -1.14358640e+00 2.78038383e+00] ... [ 2.21488476e+00 9.49596584e-01 2.51983404e+00 ... 1.51657772e+00 -1.26897752e-01 8.58595014e-01] [-4.25759792e-01 7.69467652e-02 1.23012877e+00 ... -9.36652541e-01 -6.47225380e-01 -1.74741054e+00] [ 4.10331786e-01 1.00992668e+00 1.66192424e+00 ... 1.23533058e+00 1.46575069e+00 -2.92046487e-01]] [[-6.78482294e-01 1.12707686e+00 2.61752152e+00 ... -4.13417816e-02 -8.95256042e-01 1.19249380e+00] [ 1.92712438e+00 5.54135621e-01 1.36225724e+00 ... -7.10783243e-01 -1.35178006e+00 -2.96193719e-01] [ 1.34341121e+00 -2.10427642e-02 -7.46026158e-01 ... 1.39571953e+00 -4.43689823e-01 1.22917509e+00] ... [-8.72606277e-01 8.04584742e-01 -1.37366712e+00 ... -1.88493490e+00 3.33724916e-01 -5.92303872e-02] [ 8.34987164e-01 1.51695406e+00 -2.04882145e-01 ... 7.99021959e-01 -2.97266960e-01 9.11793232e-01] [ 1.83882982e-01 3.24345279e+00 2.27488637e-01 ... -1.61057186e+00 1.02286935e+00 5.91276586e-01]] [[ 1.94395804e+00 2.59244174e-01 9.06020999e-01 ... 3.25857162e-01 4.15294886e-01 1.61498439e+00] [-7.04813600e-02 -6.11706614e-01 -1.80626893e+00 ... -3.32108617e-01 -8.73137474e-01 1.02296710e+00] [ 5.61884820e-01 2.44155598e+00 1.61707139e+00 ... 1.09469867e+00 1.41948867e+00 6.12501383e-01] ... [ 2.12284416e-01 1.63837254e+00 2.01191759e+00 ... -2.41347730e-01 1.69716209e-01 -1.12878025e+00] [ 1.89601195e+00 2.39544320e+00 1.14340413e+00 ... 7.55316079e-01 1.31475949e+00 6.28168404e-01] [ 9.23881173e-01 9.61276829e-01 3.82154965e+00 ... 1.22469079e+00 3.27907801e+00 -1.59835517e-01]] [[ 8.10200810e-01 6.91146195e-01 2.45362520e+00 ... 2.02804863e-01 2.49412465e+00 -1.65740299e+00] [ 2.10537624e+00 -3.85657549e-02 2.96705365e-01 ... -2.88230085e+00 1.64927471e+00 1.18793523e+00] [-8.32325816e-02 1.97044873e+00 2.49298662e-01 ... 8.44908595e-01 6.98524177e-01 1.72937655e+00] ... [ 1.23956168e+00 1.40535325e-01 -1.17003143e+00 ... 1.42237782e+00 1.55493021e-02 9.19294059e-01] [ 1.31298316e+00 1.32464695e+00 -6.53126478e-01 ... 2.19714928e+00 -3.55783165e-01 -8.76654387e-02] [ 1.95692748e-01 7.21863389e-01 1.52128172e+00 ... 1.97047246e+00 2.42312938e-01 4.80182946e-01]] [[-1.49024487e-01 6.44903302e-01 4.93815303e-01 ... 4.27668273e-01 9.99201894e-01 1.00773305e-01] [-3.30144823e-01 5.05049467e-01 -3.28070045e-01 ... 1.29890752e+00 1.52598131e+00 -3.23587656e-03] [ 1.79529989e+00 2.26639509e+00 1.65692008e+00 ... -1.13125205e+00 7.38523006e-01 -1.16377759e+00] ... [ 1.65238154e+00 1.55869794e+00 -9.11051273e-01 ... -3.70418966e-01 2.40927547e-01 -4.06389236e-01] [ 4.87578511e-02 -1.41068387e+00 7.09559262e-01 ... -7.50453711e-01 -5.87180972e-01 -1.06445062e+00] [ 2.00851488e+00 -1.24568534e+00 1.40579605e+00 ... 1.79667330e+00 1.14735603e+00 -1.07903242e-01]]] [[[ 1.99620783e-01 -1.42819571e+00 -1.51268327e+00 ... -3.36069345e+00 -1.20479405e+00 -1.70409632e+00] [-4.94579166e-01 -3.93772572e-01 -2.74397349e+00 ... -1.47132659e+00 3.60179305e-01 -1.11085534e+00] [-3.86855960e-01 -4.86557096e-01 -7.73469210e-01 ... -5.35187721e-02 -7.29496717e-01 -1.18796551e+00] ... [-3.25646967e-01 -2.06658125e+00 1.27740145e-01 ... -3.16263497e-01 1.35479629e-01 -1.42330551e+00] [-2.45662391e-01 1.17856085e+00 -2.02881670e+00 ... 9.19862390e-02 3.38436961e-01 4.19915676e-01] [-1.44709086e+00 -9.14970756e-01 -2.80464792e+00 ... 2.80902267e-01 8.18747282e-01 -3.85002494e-01]] [[ 5.76908946e-01 2.88518429e-01 -2.30111146e+00 ... -1.37003303e+00 7.48422742e-01 -1.76793790e+00] [-2.72757602e+00 1.72542155e-01 -3.89294624e+00 ... -2.45370150e-01 1.31132305e+00 -1.90979838e+00] [-1.62736893e+00 -9.06352758e-01 -1.30885887e+00 ... -1.62982345e-01 -2.65540659e-01 -9.51743782e-01] ... [-2.42827368e+00 -1.42755246e+00 1.19193995e+00 ... 2.14508486e+00 8.50152612e-01 4.60042357e-02] [-5.42568266e-01 -1.78034282e+00 -1.69270778e+00 ... -7.13278890e-01 -3.14636469e+00 -3.15297318e+00] [-9.26571727e-01 -2.31311798e+00 -2.42969275e+00 ... -4.86053199e-01 -2.06076431e+00 4.52891350e-01]] [[-4.02444005e-02 -6.03335440e-01 -3.34476042e+00 ... -1.05039382e+00 -2.62533605e-01 -3.76742363e+00] [-2.05273056e+00 -6.24930918e-01 1.35366380e+00 ... 7.54868746e-01 -1.53364992e+00 -1.61460352e+00] [ 4.40674305e-01 -2.27603817e+00 -3.81056786e+00 ... -3.00276756e+00 -1.24379027e+00 -3.26302558e-01] ... [ 1.26732838e+00 -1.36343312e+00 -9.74652767e-01 ... 3.89253259e-01 -1.04415548e+00 1.22710216e+00] [-1.04507315e+00 -7.33131528e-01 3.96751761e-01 ... -2.53808165e+00 5.70322871e-02 -5.53864598e-01] [-7.96593368e-01 -2.23314428e+00 -2.10024238e+00 ... -1.38421249e+00 -2.11060953e+00 -3.34632814e-01]] [[-8.85878921e-01 -5.57498813e-01 -2.74067068e+00 ... -1.14579856e+00 -2.99636316e+00 3.95798326e-01] [-1.53212929e+00 1.56505501e+00 -1.47034931e+00 ... -2.11502719e+00 -1.95285094e+00 8.44722271e-01] [-1.44139922e+00 -3.68975306e+00 -1.09106994e+00 ... -3.40112329e-01 -1.79019141e+00 -2.10491490e+00] ... [-3.33828735e+00 1.24605405e+00 1.00350440e-01 ... -2.15319681e+00 -3.07790130e-01 -1.73945487e-01] [-9.58922446e-01 5.52725077e-01 -1.15248680e+00 ... -1.91861069e+00 1.43256724e-01 1.42469251e+00] [-2.94079161e+00 1.11858368e-01 -1.25661385e+00 ... -1.46557307e+00 -2.12991953e+00 -2.07006407e+00]] [[ 1.11663139e+00 -1.90595686e+00 -1.54658067e+00 ... -1.32424581e+00 -3.25088835e+00 2.47931957e-01] [-1.58224034e+00 -1.40176582e+00 3.86812449e-01 ... -2.04055214e+00 4.88194227e-01 -1.40168583e+00] [ 8.61892700e-01 -2.97765732e+00 9.92201090e-01 ... 4.16904211e-01 5.57113767e-01 4.24442649e-01] ... [-1.14159834e+00 -1.51982200e+00 2.18856859e+00 ... -1.28762376e+00 6.54416680e-02 -3.03860098e-01] [-1.37938392e+00 -2.52945721e-01 -8.05755079e-01 ... 1.96184933e-01 -1.34775925e+00 -1.53911972e+00] [-1.74621105e+00 6.96089983e-01 -2.09772801e+00 ... -3.26394176e+00 -7.01897323e-01 -5.46553493e-01]]] [[[-6.87409163e-01 3.79038244e-01 2.88657141e+00 ... -8.24448824e-01 4.00013775e-01 1.19424999e-01] [-6.57434583e-01 -1.66304541e+00 -1.32717907e-01 ... -2.65603352e+00 3.31832260e-01 -2.12732911e+00] [-2.58127856e+00 -1.37757123e+00 2.63292599e+00 ... 5.73323965e-01 3.16605377e+00 -3.17616510e+00] ... [-3.00540209e+00 5.54195881e-01 -3.00986123e+00 ... -1.52605069e+00 -1.46517467e+00 -2.44179785e-01] [ 5.38315058e-01 1.34802282e-01 2.82167584e-01 ... 1.88298416e+00 8.84935737e-01 1.36736238e+00] [ 7.44824767e-01 -9.60302353e-01 -1.63726306e+00 ... -1.35513186e+00 -2.78595567e+00 -3.73008609e-01]] [[-1.55313241e+00 -1.25190008e+00 -1.66259408e+00 ... -4.81155813e-02 -3.05593610e-01 -2.11752504e-02] [-5.31738460e-01 -2.00136244e-01 -3.84328961e-02 ... 1.51364899e+00 6.56104326e-01 2.22378874e+00] [-2.13805509e+00 1.44577909e+00 -5.31617820e-01 ... -1.13852990e+00 7.66074419e-01 -7.72893548e-01] ... [ 2.34687686e+00 -4.61094946e-01 8.18980098e-01 ... 2.28846097e+00 -5.08059621e-01 1.25392318e+00] [-3.49081695e-01 -2.38585210e+00 5.72798014e-01 ... -1.05089188e+00 2.39339781e+00 1.68194342e+00] [-2.95449346e-02 -3.20506525e+00 3.20614964e-01 ... 2.45767164e+00 -1.36216342e-01 -1.38298380e+00]] [[-2.53130627e+00 -7.29185641e-01 1.38347435e+00 ... 1.57513022e+00 4.43944007e-01 1.31521535e+00] [ 5.33465505e-01 1.07213211e+00 3.98321778e-01 ... -7.35970855e-01 2.35305619e+00 3.72054368e-01] [-3.47917199e+00 -1.55541301e+00 4.92240101e-01 ... -2.03233361e-02 -2.08975339e+00 -1.84980720e-01] ... [-1.93329442e+00 -2.18332338e+00 -1.77815545e+00 ... 1.42590368e+00 1.23389351e+00 -4.52148199e-01] [-2.45025039e+00 -3.66186810e+00 -2.29509756e-01 ... 1.14461803e+00 -1.95264435e+00 -1.38550448e+00] [ 7.58269072e-01 9.89444971e-01 -4.06449413e+00 ... -1.02350688e+00 -3.31753635e+00 -1.17176324e-01]] [[-1.74809957e+00 3.68810385e-01 -2.59312320e+00 ... -5.79389185e-02 -1.99147415e+00 2.78847575e+00] [-1.61338425e+00 -2.05515981e+00 -3.44877779e-01 ... 3.12729144e+00 4.10362810e-01 -1.40710700e+00] [ 1.73794007e+00 -8.57628226e-01 -3.81865501e-01 ... -4.89664763e-01 8.53758574e-01 -3.22995329e+00] ... [ 1.76775622e+00 -1.03933132e+00 7.32252955e-01 ... -4.14816469e-01 -4.41064954e-01 -3.23076487e-01] [-5.92392564e-01 -3.68169260e+00 3.79795623e+00 ... -1.33096492e+00 3.99381906e-01 -1.78290820e+00] [ 1.02697277e+00 -1.73819077e+00 -6.72072172e-04 ... -3.64516044e+00 1.81735545e-01 2.90854120e+00]] [[ 3.42764586e-01 2.32352948e+00 1.05499327e+00 ... 7.51222074e-02 -8.78515363e-01 8.58120441e-01] [ 2.06650376e+00 -8.59535933e-01 3.48302513e-01 ... 2.32484967e-01 -9.54629660e-01 -1.37253308e+00] [-1.61849010e+00 -1.62597644e+00 -2.15445399e+00 ... 2.13982201e+00 -1.98263919e+00 5.34105003e-02] ... [-1.03839695e+00 8.47521305e-01 -8.17932487e-01 ... 3.05459768e-01 -8.02687407e-01 1.46959996e+00] [ 2.45790541e-01 2.17370796e+00 -7.00292826e-01 ... -6.64270401e-01 1.38417506e+00 3.83859181e+00] [-2.11713457e+00 -1.04510558e+00 -6.98139548e-01 ... -1.65742683e+00 9.96446371e-01 1.77418709e+00]]]]]; ov_res: [[[[[ 3.84361982e-01 1.31299996e+00 -1.14699614e+00 ... 2.14828205e+00 3.32784414e-01 -7.45532513e-02] [-1.52583539e-01 2.50129938e-01 1.17899120e+00 ... 1.35944188e+00 1.44010216e-01 2.00501680e+00] [ 1.74825042e-01 2.14929849e-01 -1.14852536e+00 ... -1.04967606e+00 -1.14358640e+00 2.78038383e+00] ... [ 2.21488476e+00 9.49596584e-01 2.51983404e+00 ... 1.51657772e+00 -1.26897752e-01 8.58595014e-01] [-4.25759792e-01 7.69467652e-02 1.23012877e+00 ... -9.36652541e-01 -6.47225380e-01 -1.74741054e+00] [ 4.10331786e-01 1.00992668e+00 1.66192424e+00 ... 1.23533058e+00 1.46575069e+00 -2.92046487e-01]] [[-6.78482294e-01 1.12707686e+00 2.61752152e+00 ... -4.13417816e-02 -8.95256042e-01 1.19249380e+00] [ 1.92712438e+00 5.54135621e-01 1.36225724e+00 ... -7.10783243e-01 -1.35178006e+00 -2.96193719e-01] [ 1.34341121e+00 -2.10427642e-02 -7.46026158e-01 ... 1.39571953e+00 -4.43689823e-01 1.22917509e+00] ... [-8.72606277e-01 8.04584742e-01 -1.37366712e+00 ... -1.88493490e+00 3.33724916e-01 -5.92303872e-02] [ 8.34987164e-01 1.51695406e+00 -2.04882145e-01 ... 7.99021959e-01 -2.97266960e-01 9.11793232e-01] [ 1.83882982e-01 3.24345279e+00 2.27488637e-01 ... -1.61057186e+00 1.02286935e+00 5.91276586e-01]] [[ 1.94395804e+00 2.59244174e-01 9.06020999e-01 ... 3.25857162e-01 4.15294886e-01 1.61498439e+00] [-7.04813600e-02 -6.11706614e-01 -1.80626893e+00 ... -3.32108617e-01 -8.73137474e-01 1.02296710e+00] [ 5.61884820e-01 2.44155598e+00 1.61707139e+00 ... 1.09469867e+00 1.41948867e+00 6.12501383e-01] ... [ 2.12284416e-01 1.63837254e+00 2.01191759e+00 ... -2.41347730e-01 1.69716209e-01 -1.12878025e+00] [ 1.89601195e+00 2.39544320e+00 1.14340413e+00 ... 7.55316079e-01 1.31475949e+00 6.28168404e-01] [ 9.23881173e-01 9.61276829e-01 3.82154965e+00 ... 1.22469079e+00 3.27907801e+00 -1.59835517e-01]] [[ 8.10200810e-01 6.91146195e-01 2.45362520e+00 ... 2.02804863e-01 2.49412465e+00 -1.65740299e+00] [ 2.10537624e+00 -3.85657549e-02 2.96705365e-01 ... -2.88230085e+00 1.64927471e+00 1.18793523e+00] [-8.32325816e-02 1.97044873e+00 2.49298662e-01 ... 8.44908595e-01 6.98524177e-01 1.72937655e+00] ... [ 1.23956168e+00 1.40535325e-01 -1.17003143e+00 ... 1.42237782e+00 1.55493021e-02 9.19294059e-01] [ 1.31298316e+00 1.32464695e+00 -6.53126478e-01 ... 2.19714928e+00 -3.55783165e-01 -8.76654387e-02] [ 1.95692748e-01 7.21863389e-01 1.52128172e+00 ... 1.97047246e+00 2.42312938e-01 4.80182946e-01]] [[-1.49024487e-01 6.44903302e-01 4.93815303e-01 ... 4.27668273e-01 9.99201894e-01 1.00773305e-01] [-3.30144823e-01 5.05049467e-01 -3.28070045e-01 ... 1.29890752e+00 1.52598131e+00 -3.23587656e-03] [ 1.79529989e+00 2.26639509e+00 1.65692008e+00 ... -1.13125205e+00 7.38523006e-01 -1.16377759e+00] ... [ 1.65238154e+00 1.55869794e+00 -9.11051273e-01 ... -3.70418966e-01 2.40927547e-01 -4.06389236e-01] [ 4.87578511e-02 -1.41068387e+00 7.09559262e-01 ... -7.50453711e-01 -5.87180972e-01 -1.06445062e+00] [ 2.00851488e+00 -1.24568534e+00 1.40579605e+00 ... 1.79667330e+00 1.14735603e+00 -1.07903242e-01]]] [[[ 1.99620783e-01 -1.42819571e+00 -1.51268327e+00 ... -3.36069345e+00 -1.20479405e+00 -1.70409632e+00] [-4.94579166e-01 -3.93772572e-01 -2.74397349e+00 ... -1.47132659e+00 3.60179305e-01 -1.11085534e+00] [-3.86855960e-01 -4.86557096e-01 -7.73469210e-01 ... -5.35187721e-02 -7.29496717e-01 -1.18796551e+00] ... [-3.25646967e-01 -2.06658125e+00 1.27740145e-01 ... -3.16263497e-01 1.35479629e-01 -1.42330551e+00] [-2.45662391e-01 1.17856085e+00 -2.02881670e+00 ... 9.19862390e-02 3.38436961e-01 4.19915676e-01] [-1.44709086e+00 -9.14970756e-01 -2.80464792e+00 ... 2.80902267e-01 8.18747282e-01 -3.85002494e-01]] [[ 5.76908946e-01 2.88518429e-01 -2.30111146e+00 ... -1.37003303e+00 7.48422742e-01 -1.76793790e+00] [-2.72757602e+00 1.72542155e-01 -3.89294624e+00 ... -2.45370150e-01 1.31132305e+00 -1.90979838e+00] [-1.62736893e+00 -9.06352758e-01 -1.30885887e+00 ... -1.62982345e-01 -2.65540659e-01 -9.51743782e-01] ... [-2.42827368e+00 -1.42755246e+00 1.19193995e+00 ... 2.14508486e+00 8.50152612e-01 4.60042357e-02] [-5.42568266e-01 -1.78034282e+00 -1.69270778e+00 ... -7.13278890e-01 -3.14636469e+00 -3.15297318e+00] [-9.26571727e-01 -2.31311798e+00 -2.42969275e+00 ... -4.86053199e-01 -2.06076431e+00 4.52891350e-01]] [[-4.02444005e-02 -6.03335440e-01 -3.34476042e+00 ... -1.05039382e+00 -2.62533605e-01 -3.76742363e+00] [-2.05273056e+00 -6.24930918e-01 1.35366380e+00 ... 7.54868746e-01 -1.53364992e+00 -1.61460352e+00] [ 4.40674305e-01 -2.27603817e+00 -3.81056786e+00 ... -3.00276756e+00 -1.24379027e+00 -3.26302558e-01] ... [ 1.26732838e+00 -1.36343312e+00 -9.74652767e-01 ... 3.89253259e-01 -1.04415548e+00 1.22710216e+00] [-1.04507315e+00 -7.33131528e-01 3.96751761e-01 ... -2.53808165e+00 5.70322871e-02 -5.53864598e-01] [-7.96593368e-01 -2.23314428e+00 -2.10024238e+00 ... -1.38421249e+00 -2.11060953e+00 -3.34632814e-01]] [[-8.85878921e-01 -5.57498813e-01 -2.74067068e+00 ... -1.14579856e+00 -2.99636316e+00 3.95798326e-01] [-1.53212929e+00 1.56505501e+00 -1.47034931e+00 ... -2.11502719e+00 -1.95285094e+00 8.44722271e-01] [-1.44139922e+00 -3.68975306e+00 -1.09106994e+00 ... -3.40112329e-01 -1.79019141e+00 -2.10491490e+00] ... [-3.33828735e+00 1.24605405e+00 1.00350440e-01 ... -2.15319681e+00 -3.07790130e-01 -1.73945487e-01] [-9.58922446e-01 5.52725077e-01 -1.15248680e+00 ... -1.91861069e+00 1.43256724e-01 1.42469251e+00] [-2.94079161e+00 1.11858368e-01 -1.25661385e+00 ... -1.46557307e+00 -2.12991953e+00 -2.07006407e+00]] [[ 1.11663139e+00 -1.90595686e+00 -1.54658067e+00 ... -1.32424581e+00 -3.25088835e+00 2.47931957e-01] [-1.58224034e+00 -1.40176582e+00 3.86812449e-01 ... -2.04055214e+00 4.88194227e-01 -1.40168583e+00] [ 8.61892700e-01 -2.97765732e+00 9.92201090e-01 ... 4.16904211e-01 5.57113767e-01 4.24442649e-01] ... [-1.14159834e+00 -1.51982200e+00 2.18856859e+00 ... -1.28762376e+00 6.54416680e-02 -3.03860098e-01] [-1.37938392e+00 -2.52945721e-01 -8.05755079e-01 ... 1.96184933e-01 -1.34775925e+00 -1.53911972e+00] [-1.74621105e+00 6.96089983e-01 -2.09772801e+00 ... -3.26394176e+00 -7.01897323e-01 -5.46553493e-01]]] [[[-6.87409163e-01 3.79038244e-01 2.88657141e+00 ... -8.24448824e-01 4.00013775e-01 1.19424999e-01] [-6.57434583e-01 -1.66304541e+00 -1.32717907e-01 ... -2.65603352e+00 3.31832260e-01 -2.12732911e+00] [-2.58127856e+00 -1.37757123e+00 2.63292599e+00 ... 5.73323965e-01 3.16605377e+00 -3.17616510e+00] ... [-3.00540209e+00 5.54195881e-01 -3.00986123e+00 ... -1.52605069e+00 -1.46517467e+00 -2.44179785e-01] [ 5.38315058e-01 1.34802282e-01 2.82167584e-01 ... 1.88298416e+00 8.84935737e-01 1.36736238e+00] [ 7.44824767e-01 -9.60302353e-01 -1.63726306e+00 ... -1.35513186e+00 -2.78595567e+00 -3.73008609e-01]] [[-1.55313241e+00 -1.25190008e+00 -1.66259408e+00 ... -4.81155813e-02 -3.05593610e-01 -2.11752504e-02] [-5.31738460e-01 -2.00136244e-01 -3.84328961e-02 ... 1.51364899e+00 6.56104326e-01 2.22378874e+00] [-2.13805509e+00 1.44577909e+00 -5.31617820e-01 ... -1.13852990e+00 7.66074419e-01 -7.72893548e-01] ... [ 2.34687686e+00 -4.61094946e-01 8.18980098e-01 ... 2.28846097e+00 -5.08059621e-01 1.25392318e+00] [-3.49081695e-01 -2.38585210e+00 5.72798014e-01 ... -1.05089188e+00 2.39339781e+00 1.68194342e+00] [-2.95449346e-02 -3.20506525e+00 3.20614964e-01 ... 2.45767164e+00 -1.36216342e-01 -1.38298380e+00]] [[-2.53130627e+00 -7.29185641e-01 1.38347435e+00 ... 1.57513022e+00 4.43944007e-01 1.31521535e+00] [ 5.33465505e-01 1.07213211e+00 3.98321778e-01 ... -7.35970855e-01 2.35305619e+00 3.72054368e-01] [-3.47917199e+00 -1.55541301e+00 4.92240101e-01 ... -2.03233361e-02 -2.08975339e+00 -1.84980720e-01] ... [-1.93329442e+00 -2.18332338e+00 -1.77815545e+00 ... 1.42590368e+00 1.23389351e+00 -4.52148199e-01] [-2.45025039e+00 -3.66186810e+00 -2.29509756e-01 ... 1.14461803e+00 -1.95264435e+00 -1.38550448e+00] [ 7.58269072e-01 9.89444971e-01 -4.06449413e+00 ... -1.02350688e+00 -3.31753635e+00 -1.17176324e-01]] [[-1.74809957e+00 3.68810385e-01 -2.59312320e+00 ... -5.79389185e-02 -1.99147415e+00 2.78847575e+00] [-1.61338425e+00 -2.05515981e+00 -3.44877779e-01 ... 3.12729144e+00 4.10362810e-01 -1.40710700e+00] [ 1.73794007e+00 -8.57628226e-01 -3.81865501e-01 ... -4.89664763e-01 8.53758574e-01 -3.22995329e+00] ... [ 1.76775622e+00 -1.03933132e+00 7.32252955e-01 ... -4.14816469e-01 -4.41064954e-01 -3.23076487e-01] [-5.92392564e-01 -3.68169260e+00 3.79795623e+00 ... -1.33096492e+00 3.99381906e-01 -1.78290820e+00] [ 1.02697277e+00 -1.73819077e+00 -6.72072172e-04 ... -3.64516044e+00 1.81735545e-01 2.90854120e+00]] [[ 3.42764586e-01 2.32352948e+00 1.05499327e+00 ... 7.51222074e-02 -8.78515363e-01 8.58120441e-01] [ 2.06650376e+00 -8.59535933e-01 3.48302513e-01 ... 2.32484967e-01 -9.54629660e-01 -1.37253308e+00] [-1.61849010e+00 -1.62597644e+00 -2.15445399e+00 ... 2.13982201e+00 -1.98263919e+00 5.34105003e-02] ... [-1.03839695e+00 8.47521305e-01 -8.17932487e-01 ... 3.05459768e-01 -8.02687407e-01 1.46959996e+00] [ 2.45790541e-01 2.17370796e+00 -7.00292826e-01 ... -6.64270401e-01 1.38417506e+00 3.83859181e+00] [-2.11713457e+00 -1.04510558e+00 -6.98139548e-01 ... -1.65742683e+00 9.96446371e-01 1.77418709e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 0] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_972.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.32929}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * -8.2302 (2,1,1,.,.) = 1.4717 (3,1,1,.,.) = 1.5990 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-1.0772129 -1.3292859 -6.2052164 ... -2.3269591 -1.3292859 0.7440758 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-0.02032876 -1.3292859 3.9238107 ... -2.6429672 -1.3292859 -3.606884 ] ... [ 0.69932294 -1.3292859 -1.1287339 ... 0.7924762 -1.3292859 -3.7265506 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-6.0002594 -1.3292859 0.7512417 ... 1.4512296 -1.3292859 -0.6920156 ]] [[-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] ... [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ]] [[-3.1448436 -1.3292859 3.5927193 ... -2.7186913 -1.3292859 1.1289804 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3344891 -1.3292859 0.9038098 ... -2.8411279 -1.3292859 -7.157734 ] ... [-1.2711631 -1.3292859 -3.9840434 ... -1.5147074 -1.3292859 -0.94107604] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-2.128591 -1.3292859 -0.5123243 ... 1.2639909 -1.3292859 1.297189 ]] ... [[-4.3708735 -1.3292859 -1.2786666 ... 1.9443874 -1.3292859 -2.2750883 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-0.11574495 -1.3292859 -4.724045 ... 0.43010843 -1.3292859 -0.5985517 ] ... [-1.6742218 -1.3292859 -2.93637 ... -2.5669806 -1.3292859 1.8821237 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-2.5038772 -1.3292859 -3.8283825 ... -1.7258394 -1.3292859 0.22030294]] [[-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] ... [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ]] [[-2.209001 -1.3292859 -4.497629 ... -4.406885 -1.3292859 -1.2747855 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [ 1.8419237 -1.3292859 1.2963433 ... -1.6858262 -1.3292859 1.0999644 ] ... [ 3.721532 -1.3292859 -4.270134 ... -1.5754824 -1.3292859 -0.01635909] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-3.5277839 -1.3292859 -4.139304 ... 0.8216021 -1.3292859 -2.4857032 ]]]]]; ov_res: [[[[[-1.0772129 -1.3292859 -6.2052164 ... -2.3269591 -1.3292859 0.7440758 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-0.02032876 -1.3292859 3.9238107 ... -2.6429672 -1.3292859 -3.606884 ] ... [ 0.69932294 -1.3292859 -1.1287339 ... 0.7924762 -1.3292859 -3.7265506 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-6.0002594 -1.3292859 0.7512417 ... 1.4512296 -1.3292859 -0.6920156 ]] [[-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] ... [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ]] [[-3.1448436 -1.3292859 3.5927193 ... -2.7186913 -1.3292859 1.1289804 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3344891 -1.3292859 0.9038098 ... -2.8411279 -1.3292859 -7.157734 ] ... [-1.2711631 -1.3292859 -3.9840434 ... -1.5147074 -1.3292859 -0.94107604] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-2.128591 -1.3292859 -0.5123243 ... 1.2639909 -1.3292859 1.297189 ]] ... [[-4.3708735 -1.3292859 -1.2786666 ... 1.9443874 -1.3292859 -2.2750883 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-0.11574495 -1.3292859 -4.724045 ... 0.43010843 -1.3292859 -0.5985517 ] ... [-1.6742218 -1.3292859 -2.93637 ... -2.5669806 -1.3292859 1.8821237 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-2.5038772 -1.3292859 -3.8283825 ... -1.7258394 -1.3292859 0.22030294]] [[-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] ... [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ]] [[-2.209001 -1.3292859 -4.497629 ... -4.406885 -1.3292859 -1.2747855 ] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [ 1.8419237 -1.3292859 1.2963433 ... -1.6858262 -1.3292859 1.0999644 ] ... [ 3.721532 -1.3292859 -4.270134 ... -1.5754824 -1.3292859 -0.01635909] [-1.3292859 -1.3292859 -1.3292859 ... -1.3292859 -1.3292859 -1.3292859 ] [-3.5277839 -1.3292859 -4.139304 ... 0.8216021 -1.3292859 -2.4857032 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 2, 2], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_974.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.8741 0.9398 0.1408 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.5297 (2,1,1,.,.) = -0.7408 (3,1,1,.,.) = -0.3984 (1,2,1,.,.) = 1.5869 (2,2,1,.,.) = -1.7086 (3,2,1,.,.) = -1.0848 (1,3,1,.,.) = 0.01 * 7.3202 (2,3,1,.,.) = 0.8344 (3,3,1,.,.) = -0.9615 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-1.69716549e+00 6.03425503e-01 8.68709683e-02 1.03563547e+00 3.77397919e+00] [ 3.05758667e+00 8.27224612e-01 1.66011810e+00 -9.41533864e-01 4.29048634e+00] [ 3.13425303e+00 1.87549615e+00 -1.48398328e+00 -1.41662419e-01 1.73878169e+00] [-3.11224878e-01 -2.46900499e-01 1.71423411e+00 2.62220562e-01 1.08573985e+00] [ 2.14779902e+00 3.33140039e+00 5.09484291e+00 1.56002378e+00 1.28064024e+00]] [[ 1.28290641e+00 3.35060239e-01 6.99321508e-01 3.23923659e+00 -2.67312944e-01] [ 1.39005566e+00 3.50790930e+00 8.50214899e-01 2.22869778e+00 1.23230910e+00] [-2.94884658e+00 2.34248018e+00 1.09322226e+00 -2.01307535e-02 2.42621017e+00] [ 1.51914883e+00 1.81957984e+00 -3.06022584e-01 3.73342371e+00 1.28499126e+00] [ 1.88185811e+00 2.84060407e+00 -4.88923728e-01 3.84644747e-01 -3.53277183e+00]] [[ 8.19428384e-01 -1.50804329e+00 1.73848438e+00 3.44231796e+00 2.07127500e+00] [ 3.91027045e+00 9.78820264e-01 3.38735580e-01 4.88219082e-01 1.51843143e+00] [ 3.41082335e+00 -1.05232596e-02 -2.47749090e-02 -2.70030916e-01 -3.10676241e+00] [-4.54721153e-01 4.00066614e+00 -1.63383484e-02 1.09492445e+00 -4.76815164e-01] [ 8.81350160e-01 -1.81486917e+00 3.85027671e+00 2.45853376e+00 2.97114444e+00]] [[ 4.05335617e+00 1.57220840e+00 1.39452958e+00 8.59295845e-01 6.69609189e-01] [ 9.78006244e-01 -3.85023057e-01 -1.21807837e+00 -1.75262594e+00 2.79697967e+00] [-9.48490918e-01 1.73200059e+00 8.03398967e-01 -6.92578733e-01 3.26101136e+00] [ 2.92227960e+00 -2.23738027e+00 1.91569662e+00 3.51137710e+00 2.15419745e+00] [ 6.95738792e-02 1.58922565e+00 -3.48982155e-01 -1.37626100e+00 1.45091844e+00]] [[-1.36094511e-01 -1.40507960e+00 2.49489260e+00 1.00427508e-01 -8.98604810e-01] [-1.22490406e+00 1.68020797e+00 2.94106293e+00 7.28335381e-01 4.55886507e+00] [-1.23758578e+00 1.78543615e+00 9.06362951e-01 -1.19501233e+00 3.75718445e-01] [ 2.99850774e+00 -6.17092907e-01 -8.33074749e-01 -9.20491755e-01 1.71404541e+00] [ 2.90044022e+00 2.25103951e+00 1.36402762e+00 2.22053218e+00 7.49148250e-01]]] [[[ 5.76937103e+00 1.95133793e+00 -4.41095829e-02 5.99763513e-01 -2.23996210e+00] [-2.88708639e+00 -2.25138521e+00 -2.82163906e+00 2.63925409e+00 -1.16660583e+00] [-1.65007341e+00 1.41189158e-01 3.77007198e+00 3.01578999e+00 -4.06294227e-01] [ 3.44294596e+00 2.09254789e+00 7.23909140e-02 3.65303421e+00 2.68059826e+00] [ 9.08195972e-04 -1.67545474e+00 -2.24372053e+00 -7.05408812e-01 1.23734260e+00]] [[ 1.44573736e+00 1.67300177e+00 2.16448545e-01 -1.66965044e+00 9.99337912e-01] [ 2.95766211e+00 -3.51617098e-01 1.74229670e+00 -1.81272209e+00 -3.96021485e-01] [ 3.68795681e+00 8.85932803e-01 1.30163527e+00 2.40388942e+00 1.00605595e+00] [ 1.99964106e+00 1.88514900e+00 7.10076451e-01 -4.03269243e+00 -1.78616762e-01] [ 2.27109218e+00 7.57846236e-02 1.48172808e+00 2.81724548e+00 1.84860253e+00]] [[ 1.54506063e+00 2.84097862e+00 -8.10596585e-01 -1.59242022e+00 -2.96040773e-01] [-5.57155848e+00 2.67409992e+00 1.50625885e+00 5.14133883e+00 -4.02223825e-01] [-7.04492807e-01 4.54264307e+00 2.10671139e+00 4.62578917e+00 4.16688728e+00] [ 1.12752438e-01 1.42569900e-01 -2.28252530e-01 -3.69457579e+00 3.85629749e+00] [ 1.96280181e+00 3.73626375e+00 -2.85842752e+00 -3.98889828e+00 -3.48718882e-01]] [[-3.55265570e+00 3.40321600e-01 3.06532860e-01 1.50243831e+00 -8.41266513e-01] [-5.63441753e-01 1.96960270e-01 3.41880703e+00 5.93538761e+00 5.81964850e-02] [ 2.38439035e+00 4.55808520e-01 1.11943007e+00 4.43435907e+00 -1.40549791e+00] [-9.63927031e-01 2.16619253e+00 8.66791368e-01 1.78272820e+00 -1.70400226e+00] [ 1.76007080e+00 8.66275430e-01 1.16968727e+00 3.62449551e+00 3.62806797e+00]] [[-2.22636580e-01 1.93269444e+00 -8.65820408e-01 2.21520114e+00 1.36018276e+00] [ 2.97564030e+00 2.95994043e+00 -1.59585583e+00 1.64593911e+00 -2.41799307e+00] [ 3.01750374e+00 -5.65313935e-01 -2.55894899e-01 3.12790728e+00 1.98605132e+00] [-9.70177293e-01 4.19579792e+00 3.97888041e+00 4.41698790e+00 -3.09260273e+00] [ 1.18430138e-01 -2.45808887e+00 -1.93286777e-01 8.36714864e-01 4.52941924e-01]]] [[[ 1.14701688e+00 1.95768416e+00 4.92914855e-01 -6.07694268e-01 3.89919162e-01] [-1.37563825e+00 3.76493740e+00 -1.27433324e+00 1.84654367e+00 -1.61734152e+00] [-2.60814524e+00 -1.10426927e+00 2.92697453e+00 3.44067335e-01 1.21769166e+00] [ 1.52733386e+00 9.01214004e-01 -1.50569439e-01 -5.27163148e-01 1.73610210e+00] [ 1.04701829e+00 -1.49751198e+00 -1.17898893e+00 -1.63745296e+00 -7.51978993e-01]] [[-1.19236279e+00 5.04889905e-01 3.46160620e-01 -2.61470699e+00 2.05533838e+00] [ 1.45974505e+00 -2.24390912e+00 9.37529325e-01 -2.72400403e+00 3.04144430e+00] [ 1.98069692e+00 -1.94697499e+00 3.83605838e+00 2.33758903e+00 2.99852431e-01] [ 2.27886096e-01 -8.37117434e-01 2.74277472e+00 -1.67252457e+00 -3.75860482e-02] [-3.48560119e+00 -8.31031084e-01 8.60597372e-01 2.32187796e+00 2.51210523e+00]] [[-1.40664709e+00 1.36643732e+00 -9.84497786e-01 -1.66611147e+00 -1.65175796e+00] [-6.97022796e-01 1.21210063e+00 -4.50439751e-03 1.21478832e+00 -1.76774192e+00] [-1.75361013e+00 1.66614950e+00 3.42740953e-01 5.33962190e-01 1.54230106e+00] [ 1.09559393e+00 -6.31227374e-01 -1.65443182e-01 1.81713820e+00 -2.78949261e-01] [ 1.00406416e-01 2.48409486e+00 -5.57045341e-01 -4.52365488e-01 6.36892080e-01]] [[-2.61862969e+00 4.79987264e-01 -6.20916247e-01 -4.88052666e-02 -4.42191929e-01] [ 1.38057411e+00 6.96957111e-04 1.25820208e+00 2.46110010e+00 -1.23768413e+00] [ 1.30509627e+00 -1.25723469e+00 4.51342940e-01 1.32922387e+00 -1.56647098e+00] [-5.30800074e-02 4.05232859e+00 -2.16675115e+00 -4.16816139e+00 3.11646938e-01] [-6.91030502e-01 3.06867778e-01 -1.50609040e+00 2.33185649e+00 1.58301449e+00]] [[ 2.34396130e-01 8.40611935e-01 -2.51473993e-01 1.13480294e+00 -1.54081142e+00] [-8.22214246e-01 -2.48835534e-01 -1.50100321e-01 -2.17471838e+00 -3.08452296e+00] [-3.36006999e-01 1.61752665e+00 -5.44848442e-01 -3.12666506e-01 1.45176888e+00] [-8.97838831e-01 -5.03331542e-01 1.46102142e+00 -3.65781635e-01 -1.56145203e+00] [-2.16222554e-01 1.99567109e-01 -1.84850216e+00 -2.27930903e+00 -3.70847136e-01]]]]]; ov_res: [[[[[-1.69716549e+00 6.03425503e-01 8.68709683e-02 1.03563547e+00 3.77397919e+00] [ 3.05758667e+00 8.27224612e-01 1.66011810e+00 -9.41533864e-01 4.29048634e+00] [ 3.13425303e+00 1.87549615e+00 -1.48398328e+00 -1.41662419e-01 1.73878169e+00] [-3.11224878e-01 -2.46900499e-01 1.71423411e+00 2.62220562e-01 1.08573985e+00] [ 2.14779902e+00 3.33140039e+00 5.09484291e+00 1.56002378e+00 1.28064024e+00]] [[ 1.28290641e+00 3.35060239e-01 6.99321508e-01 3.23923659e+00 -2.67312944e-01] [ 1.39005566e+00 3.50790930e+00 8.50214899e-01 2.22869778e+00 1.23230910e+00] [-2.94884658e+00 2.34248018e+00 1.09322226e+00 -2.01307535e-02 2.42621017e+00] [ 1.51914883e+00 1.81957984e+00 -3.06022584e-01 3.73342371e+00 1.28499126e+00] [ 1.88185811e+00 2.84060407e+00 -4.88923728e-01 3.84644747e-01 -3.53277183e+00]] [[ 8.19428384e-01 -1.50804329e+00 1.73848438e+00 3.44231796e+00 2.07127500e+00] [ 3.91027045e+00 9.78820264e-01 3.38735580e-01 4.88219082e-01 1.51843143e+00] [ 3.41082335e+00 -1.05232596e-02 -2.47749090e-02 -2.70030916e-01 -3.10676241e+00] [-4.54721153e-01 4.00066614e+00 -1.63383484e-02 1.09492445e+00 -4.76815164e-01] [ 8.81350160e-01 -1.81486917e+00 3.85027671e+00 2.45853376e+00 2.97114444e+00]] [[ 4.05335617e+00 1.57220840e+00 1.39452958e+00 8.59295845e-01 6.69609189e-01] [ 9.78006244e-01 -3.85023057e-01 -1.21807837e+00 -1.75262594e+00 2.79697967e+00] [-9.48490918e-01 1.73200059e+00 8.03398967e-01 -6.92578733e-01 3.26101136e+00] [ 2.92227960e+00 -2.23738027e+00 1.91569662e+00 3.51137710e+00 2.15419745e+00] [ 6.95738792e-02 1.58922565e+00 -3.48982155e-01 -1.37626100e+00 1.45091844e+00]] [[-1.36094511e-01 -1.40507960e+00 2.49489260e+00 1.00427508e-01 -8.98604810e-01] [-1.22490406e+00 1.68020797e+00 2.94106293e+00 7.28335381e-01 4.55886507e+00] [-1.23758578e+00 1.78543615e+00 9.06362951e-01 -1.19501233e+00 3.75718445e-01] [ 2.99850774e+00 -6.17092907e-01 -8.33074749e-01 -9.20491755e-01 1.71404541e+00] [ 2.90044022e+00 2.25103951e+00 1.36402762e+00 2.22053218e+00 7.49148250e-01]]] [[[ 5.76937103e+00 1.95133793e+00 -4.41095829e-02 5.99763513e-01 -2.23996210e+00] [-2.88708639e+00 -2.25138521e+00 -2.82163906e+00 2.63925409e+00 -1.16660583e+00] [-1.65007341e+00 1.41189158e-01 3.77007198e+00 3.01578999e+00 -4.06294227e-01] [ 3.44294596e+00 2.09254789e+00 7.23909140e-02 3.65303421e+00 2.68059826e+00] [ 9.08195972e-04 -1.67545474e+00 -2.24372053e+00 -7.05408812e-01 1.23734260e+00]] [[ 1.44573736e+00 1.67300177e+00 2.16448545e-01 -1.66965044e+00 9.99337912e-01] [ 2.95766211e+00 -3.51617098e-01 1.74229670e+00 -1.81272209e+00 -3.96021485e-01] [ 3.68795681e+00 8.85932803e-01 1.30163527e+00 2.40388942e+00 1.00605595e+00] [ 1.99964106e+00 1.88514900e+00 7.10076451e-01 -4.03269243e+00 -1.78616762e-01] [ 2.27109218e+00 7.57846236e-02 1.48172808e+00 2.81724548e+00 1.84860253e+00]] [[ 1.54506063e+00 2.84097862e+00 -8.10596585e-01 -1.59242022e+00 -2.96040773e-01] [-5.57155848e+00 2.67409992e+00 1.50625885e+00 5.14133883e+00 -4.02223825e-01] [-7.04492807e-01 4.54264307e+00 2.10671139e+00 4.62578917e+00 4.16688728e+00] [ 1.12752438e-01 1.42569900e-01 -2.28252530e-01 -3.69457579e+00 3.85629749e+00] [ 1.96280181e+00 3.73626375e+00 -2.85842752e+00 -3.98889828e+00 -3.48718882e-01]] [[-3.55265570e+00 3.40321600e-01 3.06532860e-01 1.50243831e+00 -8.41266513e-01] [-5.63441753e-01 1.96960270e-01 3.41880703e+00 5.93538761e+00 5.81964850e-02] [ 2.38439035e+00 4.55808520e-01 1.11943007e+00 4.43435907e+00 -1.40549791e+00] [-9.63927031e-01 2.16619253e+00 8.66791368e-01 1.78272820e+00 -1.70400226e+00] [ 1.76007080e+00 8.66275430e-01 1.16968727e+00 3.62449551e+00 3.62806797e+00]] [[-2.22636580e-01 1.93269444e+00 -8.65820408e-01 2.21520114e+00 1.36018276e+00] [ 2.97564030e+00 2.95994043e+00 -1.59585583e+00 1.64593911e+00 -2.41799307e+00] [ 3.01750374e+00 -5.65313935e-01 -2.55894899e-01 3.12790728e+00 1.98605132e+00] [-9.70177293e-01 4.19579792e+00 3.97888041e+00 4.41698790e+00 -3.09260273e+00] [ 1.18430138e-01 -2.45808887e+00 -1.93286777e-01 8.36714864e-01 4.52941924e-01]]] [[[ 1.14701688e+00 1.95768416e+00 4.92914855e-01 -6.07694268e-01 3.89919162e-01] [-1.37563825e+00 3.76493740e+00 -1.27433324e+00 1.84654367e+00 -1.61734152e+00] [-2.60814524e+00 -1.10426927e+00 2.92697453e+00 3.44067335e-01 1.21769166e+00] [ 1.52733386e+00 9.01214004e-01 -1.50569439e-01 -5.27163148e-01 1.73610210e+00] [ 1.04701829e+00 -1.49751198e+00 -1.17898893e+00 -1.63745296e+00 -7.51978993e-01]] [[-1.19236279e+00 5.04889905e-01 3.46160620e-01 -2.61470699e+00 2.05533838e+00] [ 1.45974505e+00 -2.24390912e+00 9.37529325e-01 -2.72400403e+00 3.04144430e+00] [ 1.98069692e+00 -1.94697499e+00 3.83605838e+00 2.33758903e+00 2.99852431e-01] [ 2.27886096e-01 -8.37117434e-01 2.74277472e+00 -1.67252457e+00 -3.75860482e-02] [-3.48560119e+00 -8.31031084e-01 8.60597372e-01 2.32187796e+00 2.51210523e+00]] [[-1.40664709e+00 1.36643732e+00 -9.84497786e-01 -1.66611147e+00 -1.65175796e+00] [-6.97022796e-01 1.21210063e+00 -4.50439751e-03 1.21478832e+00 -1.76774192e+00] [-1.75361013e+00 1.66614950e+00 3.42740953e-01 5.33962190e-01 1.54230106e+00] [ 1.09559393e+00 -6.31227374e-01 -1.65443182e-01 1.81713820e+00 -2.78949261e-01] [ 1.00406416e-01 2.48409486e+00 -5.57045341e-01 -4.52365488e-01 6.36892080e-01]] [[-2.61862969e+00 4.79987264e-01 -6.20916247e-01 -4.88052666e-02 -4.42191929e-01] [ 1.38057411e+00 6.96957111e-04 1.25820208e+00 2.46110010e+00 -1.23768413e+00] [ 1.30509627e+00 -1.25723469e+00 4.51342940e-01 1.32922387e+00 -1.56647098e+00] [-5.30800074e-02 4.05232859e+00 -2.16675115e+00 -4.16816139e+00 3.11646938e-01] [-6.91030502e-01 3.06867778e-01 -1.50609040e+00 2.33185649e+00 1.58301449e+00]] [[ 2.34396130e-01 8.40611935e-01 -2.51473993e-01 1.13480294e+00 -1.54081142e+00] [-8.22214246e-01 -2.48835534e-01 -1.50100321e-01 -2.17471838e+00 -3.08452296e+00] [-3.36006999e-01 1.61752665e+00 -5.44848442e-01 -3.12666506e-01 1.45176888e+00] [-8.97838831e-01 -5.03331542e-01 1.46102142e+00 -3.65781635e-01 -1.56145203e+00] [-2.16222554e-01 1.99567109e-01 -1.84850216e+00 -2.27930903e+00 -3.70847136e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_976.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.2125 -0.8420 0.2697 [ CPUFloatType{3} ]]() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.1907 (2,1,1,.,.) = 0.7435 (3,1,1,.,.) = 1.3389 (1,2,1,.,.) = -0.4154 (2,2,1,.,.) = 2.5226 (3,2,1,.,.) = 0.5224 (1,3,1,.,.) = -0.6290 (2,3,1,.,.) = 1.1365 (3,3,1,.,.) = 0.1603 [ CPUFloatType{3,3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[ 0.21249352 -0.21941324 0.5569558 ... -0.91407514 -0.34377426 0.21249352] [ 0.21249352 -0.2514581 0.28445733 ... 0.40342763 0.56757325 0.21249352] [ 0.21249352 -0.5429621 0.9301483 ... -0.27216774 0.26386705 0.21249352] ... [ 0.21249352 -0.4471262 -0.18811397 ... 0.9115232 0.49574733 0.21249352] [ 0.21249352 0.38710922 1.1794574 ... -0.83850026 -0.4811024 0.21249352] [ 0.21249352 0.59456414 0.14480856 ... -0.42933536 -0.6578901 0.21249352]] [[ 0.21249352 0.88020706 1.1410058 ... -0.9602337 -0.36886978 0.21249352] [ 0.21249352 -0.79275787 0.4985785 ... -0.8484994 0.78655976 0.21249352] [ 0.21249352 -0.81169486 -0.031927 ... 0.26280493 0.49892867 0.21249352] ... [ 0.21249352 -0.06337635 1.1482246 ... -0.5850442 -0.22283451 0.21249352] [ 0.21249352 -0.7227093 0.38963145 ... -0.42657864 0.7389109 0.21249352] [ 0.21249352 -1.1136402 1.3169658 ... 1.6863755 0.05316572 0.21249352]] [[ 0.21249352 -0.14205174 0.7464607 ... -0.17101516 -0.66389054 0.21249352] [ 0.21249352 -0.93495667 -0.7665057 ... 1.0033724 0.09816619 0.21249352] [ 0.21249352 0.89205724 0.5284907 ... -1.6134742 0.99510217 0.21249352] ... [ 0.21249352 1.2460858 0.58218724 ... 0.05984779 0.31681946 0.21249352] [ 0.21249352 1.1216972 1.0155885 ... 0.12124316 0.05174552 0.21249352] [ 0.21249352 -0.48712838 0.13510874 ... 0.527267 -0.44185764 0.21249352]] [[ 0.21249352 1.692332 0.72875637 ... -0.3316431 -0.842046 0.21249352] [ 0.21249352 0.74292123 0.06628519 ... -0.46369076 1.0332007 0.21249352] [ 0.21249352 1.849741 0.9931528 ... -0.24577193 1.9089507 0.21249352] ... [ 0.21249352 0.18095185 1.3763934 ... 0.80625534 -0.43152863 0.21249352] [ 0.21249352 0.14329584 -1.4602715 ... -0.97493947 -0.23311998 0.21249352] [ 0.21249352 -0.6583017 0.13422343 ... 0.9433658 -0.01344556 0.21249352]] [[ 0.21249352 1.2073393 0.23397669 ... -0.7270159 0.32987472 0.21249352] [ 0.21249352 1.3936594 -0.8023939 ... -0.19594105 1.0751783 0.21249352] [ 0.21249352 0.10795981 -0.12118085 ... 0.3678288 0.10445637 0.21249352] ... [ 0.21249352 -0.15095125 1.6760088 ... 0.01386851 -0.16679822 0.21249352] [ 0.21249352 -0.19768612 1.3089557 ... 0.23577893 -0.30700994 0.21249352] [ 0.21249352 0.72110337 0.13633016 ... 0.6843352 -0.28901172 0.21249352]]] [[[-0.8419662 0.38061506 -1.445138 ... 0.82474357 -1.1456513 -0.8419662 ] [-0.8419662 1.1715047 -1.1231965 ... -0.6911497 -2.424057 -0.8419662 ] [-0.8419662 2.4751632 -4.0914936 ... -1.3981624 -0.8610437 -0.8419662 ] ... [-0.8419662 3.1924076 -1.8028471 ... -2.500945 -1.5896354 -0.8419662 ] [-0.8419662 -2.765585 -4.2791896 ... 1.979353 1.2581513 -0.8419662 ] [-0.8419662 -2.3401434 -2.262491 ... -0.9997817 0.02516294 -0.8419662 ]] [[-0.8419662 -1.101802 -3.528939 ... 3.6672597 2.5144126 -0.8419662 ] [-0.8419662 2.8463893 -1.6236273 ... 0.69297963 -4.9019136 -0.8419662 ] [-0.8419662 2.2568634 -0.7255535 ... -0.8392375 -0.91784066 -0.8419662 ] ... [-0.8419662 0.59199446 -5.53707 ... 1.2461047 -1.4162414 -0.8419662 ] [-0.8419662 3.5247679 -2.036927 ... -0.07029885 0.09275395 -0.8419662 ] [-0.8419662 1.6130662 -4.042251 ... -3.8609648 1.4621031 -0.8419662 ]] [[-0.8419662 0.21136278 -3.3511584 ... 0.6747102 3.1756892 -0.8419662 ] [-0.8419662 4.2572775 1.8009801 ... -5.657462 -2.04458 -0.8419662 ] [-0.8419662 -3.4587274 -1.0761127 ... 5.799354 -2.764055 -0.8419662 ] ... [-0.8419662 -1.5711858 -0.3498646 ... -0.66254675 0.5393651 -0.8419662 ] [-0.8419662 -6.983708 -0.44149446 ... -1.672128 1.3650248 -0.8419662 ] [-0.8419662 1.870264 -1.1564019 ... -0.34339756 -0.30219412 -0.8419662 ]] [[-0.8419662 -5.0418615 -3.2120159 ... 1.1638477 0.18689817 -0.8419662 ] [-0.8419662 -1.4773278 -0.6541475 ... 4.167783 -5.7489147 -0.8419662 ] [-0.8419662 -7.202835 -4.4085827 ... 0.22408873 -6.3874445 -0.8419662 ] ... [-0.8419662 0.8107572 -4.526613 ... -3.777861 -0.3683438 -0.8419662 ] [-0.8419662 -2.5474758 3.3164272 ... 5.7294364 1.9755099 -0.8419662 ] [-0.8419662 3.3193617 0.0923835 ... -4.1149635 -2.845898 -0.8419662 ]] [[-0.8419662 -5.1658664 1.5964456 ... 1.8061678 0.96586686 -0.8419662 ] [-0.8419662 -4.444094 2.866776 ... 0.7162835 -4.6154437 -0.8419662 ] [-0.8419662 0.0424931 2.2906663 ... -2.1285331 -1.972722 -0.8419662 ] ... [-0.8419662 -1.0643237 -5.248872 ... 1.47733 1.7369437 -0.8419662 ] [-0.8419662 -0.7617203 -2.1327572 ... 0.2716747 1.8457968 -0.8419662 ] [-0.8419662 -2.4993203 0.6686991 ... -4.1511703 4.2565494 -0.8419662 ]]] [[[ 0.26968017 1.2654167 -0.22977641 ... 1.8566548 0.87558913 0.26968017] [ 0.26968017 0.97978437 0.8333564 ... -0.3111308 -0.4206032 0.26968017] [ 0.26968017 2.6006904 -1.844046 ... 0.3739432 -1.1347165 0.26968017] ... [ 0.26968017 2.435846 -1.3888284 ... -0.78309786 0.5514067 0.26968017] [ 0.26968017 0.19042934 0.7018968 ... 1.1120139 1.4658576 0.26968017] [ 0.26968017 -0.1013487 -0.05820221 ... 0.9704486 0.57324845 0.26968017]] [[ 0.26968017 -1.8457056 -0.32772508 ... 3.2687519 2.1376367 0.26968017] [ 0.26968017 1.0682299 -0.42435697 ... 1.198724 -2.1159303 0.26968017] [ 0.26968017 2.406004 0.1104034 ... 2.4641767 -0.3456281 0.26968017] ... [ 0.26968017 1.9385873 -1.8559262 ... 1.3587254 -0.6094543 0.26968017] [ 0.26968017 3.2473638 1.7172887 ... -1.077655 -0.1498375 0.26968017] [ 0.26968017 1.7282732 -0.77008 ... -3.3060148 2.2819078 0.26968017]] [[ 0.26968017 0.77056766 -1.4881122 ... -0.65170455 4.192835 0.26968017] [ 0.26968017 2.0924435 -0.716694 ... -0.938604 -0.8770853 0.26968017] [ 0.26968017 -1.2615976 -0.7760664 ... 0.96416664 -0.47391298 0.26968017] ... [ 0.26968017 -3.077347 -0.6712297 ... -1.8088771 0.5246073 0.26968017] [ 0.26968017 -0.02265948 -0.9386581 ... 1.6698827 0.58032227 0.26968017] [ 0.26968017 1.4022343 -1.6001407 ... -0.3434722 0.9047543 0.26968017]] [[ 0.26968017 -0.57638943 -0.6196563 ... 0.40625164 1.3304564 0.26968017] [ 0.26968017 0.44594887 -0.25311223 ... 1.5372806 -1.5146376 0.26968017] [ 0.26968017 -2.8182833 0.9903139 ... 1.0319097 -4.0855923 0.26968017] ... [ 0.26968017 1.4069916 0.05913159 ... -1.6273847 2.2211914 0.26968017] [ 0.26968017 -0.47913203 2.1879063 ... 3.357184 0.8234242 0.26968017] [ 0.26968017 2.2851992 -0.94226515 ... -0.97185445 1.8847516 0.26968017]] [[ 0.26968017 -0.44538948 2.6376178 ... -0.590003 -0.38976982 0.26968017] [ 0.26968017 1.0870651 2.8849108 ... 1.8117688 0.6703589 0.26968017] [ 0.26968017 1.5424618 1.7681469 ... -0.2628512 0.8788692 0.26968017] ... [ 0.26968017 0.12665302 -0.9155997 ... 1.0396272 -1.307869 0.26968017] [ 0.26968017 -0.57163227 -1.4972023 ... 1.1028614 3.088454 0.26968017] [ 0.26968017 -0.24447075 0.01790109 ... -0.23691043 2.5336847 0.26968017]]]]]; ov_res: [[[[[ 0.21249352 -0.21941324 0.5569558 ... -0.91407514 -0.34377426 0.21249352] [ 0.21249352 -0.2514581 0.28445733 ... 0.40342763 0.56757325 0.21249352] [ 0.21249352 -0.5429621 0.9301483 ... -0.27216774 0.26386705 0.21249352] ... [ 0.21249352 -0.4471262 -0.18811397 ... 0.9115232 0.49574733 0.21249352] [ 0.21249352 0.38710922 1.1794574 ... -0.83850026 -0.4811024 0.21249352] [ 0.21249352 0.59456414 0.14480856 ... -0.42933536 -0.6578901 0.21249352]] [[ 0.21249352 0.88020706 1.1410058 ... -0.9602337 -0.36886978 0.21249352] [ 0.21249352 -0.79275787 0.4985785 ... -0.8484994 0.78655976 0.21249352] [ 0.21249352 -0.81169486 -0.031927 ... 0.26280493 0.49892867 0.21249352] ... [ 0.21249352 -0.06337635 1.1482246 ... -0.5850442 -0.22283451 0.21249352] [ 0.21249352 -0.7227093 0.38963145 ... -0.42657864 0.7389109 0.21249352] [ 0.21249352 -1.1136402 1.3169658 ... 1.6863755 0.05316572 0.21249352]] [[ 0.21249352 -0.14205174 0.7464607 ... -0.17101516 -0.66389054 0.21249352] [ 0.21249352 -0.93495667 -0.7665057 ... 1.0033724 0.09816619 0.21249352] [ 0.21249352 0.89205724 0.5284907 ... -1.6134742 0.99510217 0.21249352] ... [ 0.21249352 1.2460858 0.58218724 ... 0.05984779 0.31681946 0.21249352] [ 0.21249352 1.1216972 1.0155885 ... 0.12124316 0.05174552 0.21249352] [ 0.21249352 -0.48712838 0.13510874 ... 0.527267 -0.44185764 0.21249352]] [[ 0.21249352 1.692332 0.72875637 ... -0.3316431 -0.842046 0.21249352] [ 0.21249352 0.74292123 0.06628519 ... -0.46369076 1.0332007 0.21249352] [ 0.21249352 1.849741 0.9931528 ... -0.24577193 1.9089507 0.21249352] ... [ 0.21249352 0.18095185 1.3763934 ... 0.80625534 -0.43152863 0.21249352] [ 0.21249352 0.14329584 -1.4602715 ... -0.97493947 -0.23311998 0.21249352] [ 0.21249352 -0.6583017 0.13422343 ... 0.9433658 -0.01344556 0.21249352]] [[ 0.21249352 1.2073393 0.23397669 ... -0.7270159 0.32987472 0.21249352] [ 0.21249352 1.3936594 -0.8023939 ... -0.19594105 1.0751783 0.21249352] [ 0.21249352 0.10795981 -0.12118085 ... 0.3678288 0.10445637 0.21249352] ... [ 0.21249352 -0.15095125 1.6760088 ... 0.01386851 -0.16679822 0.21249352] [ 0.21249352 -0.19768612 1.3089557 ... 0.23577893 -0.30700994 0.21249352] [ 0.21249352 0.72110337 0.13633016 ... 0.6843352 -0.28901172 0.21249352]]] [[[-0.8419662 0.38061506 -1.445138 ... 0.82474357 -1.1456513 -0.8419662 ] [-0.8419662 1.1715047 -1.1231965 ... -0.6911497 -2.424057 -0.8419662 ] [-0.8419662 2.4751632 -4.0914936 ... -1.3981624 -0.8610437 -0.8419662 ] ... [-0.8419662 3.1924076 -1.8028471 ... -2.500945 -1.5896354 -0.8419662 ] [-0.8419662 -2.765585 -4.2791896 ... 1.979353 1.2581513 -0.8419662 ] [-0.8419662 -2.3401434 -2.262491 ... -0.9997817 0.02516294 -0.8419662 ]] [[-0.8419662 -1.101802 -3.528939 ... 3.6672597 2.5144126 -0.8419662 ] [-0.8419662 2.8463893 -1.6236273 ... 0.69297963 -4.9019136 -0.8419662 ] [-0.8419662 2.2568634 -0.7255535 ... -0.8392375 -0.91784066 -0.8419662 ] ... [-0.8419662 0.59199446 -5.53707 ... 1.2461047 -1.4162414 -0.8419662 ] [-0.8419662 3.5247679 -2.036927 ... -0.07029885 0.09275395 -0.8419662 ] [-0.8419662 1.6130662 -4.042251 ... -3.8609648 1.4621031 -0.8419662 ]] [[-0.8419662 0.21136278 -3.3511584 ... 0.6747102 3.1756892 -0.8419662 ] [-0.8419662 4.2572775 1.8009801 ... -5.657462 -2.04458 -0.8419662 ] [-0.8419662 -3.4587274 -1.0761127 ... 5.799354 -2.764055 -0.8419662 ] ... [-0.8419662 -1.5711858 -0.3498646 ... -0.66254675 0.5393651 -0.8419662 ] [-0.8419662 -6.983708 -0.44149446 ... -1.672128 1.3650248 -0.8419662 ] [-0.8419662 1.870264 -1.1564019 ... -0.34339756 -0.30219412 -0.8419662 ]] [[-0.8419662 -5.0418615 -3.2120159 ... 1.1638477 0.18689817 -0.8419662 ] [-0.8419662 -1.4773278 -0.6541475 ... 4.167783 -5.7489147 -0.8419662 ] [-0.8419662 -7.202835 -4.4085827 ... 0.22408873 -6.3874445 -0.8419662 ] ... [-0.8419662 0.8107572 -4.526613 ... -3.777861 -0.3683438 -0.8419662 ] [-0.8419662 -2.5474758 3.3164272 ... 5.7294364 1.9755099 -0.8419662 ] [-0.8419662 3.3193617 0.0923835 ... -4.1149635 -2.845898 -0.8419662 ]] [[-0.8419662 -5.1658664 1.5964456 ... 1.8061678 0.96586686 -0.8419662 ] [-0.8419662 -4.444094 2.866776 ... 0.7162835 -4.6154437 -0.8419662 ] [-0.8419662 0.0424931 2.2906663 ... -2.1285331 -1.972722 -0.8419662 ] ... [-0.8419662 -1.0643237 -5.248872 ... 1.47733 1.7369437 -0.8419662 ] [-0.8419662 -0.7617203 -2.1327572 ... 0.2716747 1.8457968 -0.8419662 ] [-0.8419662 -2.4993203 0.6686991 ... -4.1511703 4.2565494 -0.8419662 ]]] [[[ 0.26968017 1.2654167 -0.22977641 ... 1.8566548 0.87558913 0.26968017] [ 0.26968017 0.97978437 0.8333564 ... -0.3111308 -0.4206032 0.26968017] [ 0.26968017 2.6006904 -1.844046 ... 0.3739432 -1.1347165 0.26968017] ... [ 0.26968017 2.435846 -1.3888284 ... -0.78309786 0.5514067 0.26968017] [ 0.26968017 0.19042934 0.7018968 ... 1.1120139 1.4658576 0.26968017] [ 0.26968017 -0.1013487 -0.05820221 ... 0.9704486 0.57324845 0.26968017]] [[ 0.26968017 -1.8457056 -0.32772508 ... 3.2687519 2.1376367 0.26968017] [ 0.26968017 1.0682299 -0.42435697 ... 1.198724 -2.1159303 0.26968017] [ 0.26968017 2.406004 0.1104034 ... 2.4641767 -0.3456281 0.26968017] ... [ 0.26968017 1.9385873 -1.8559262 ... 1.3587254 -0.6094543 0.26968017] [ 0.26968017 3.2473638 1.7172887 ... -1.077655 -0.1498375 0.26968017] [ 0.26968017 1.7282732 -0.77008 ... -3.3060148 2.2819078 0.26968017]] [[ 0.26968017 0.77056766 -1.4881122 ... -0.65170455 4.192835 0.26968017] [ 0.26968017 2.0924435 -0.716694 ... -0.938604 -0.8770853 0.26968017] [ 0.26968017 -1.2615976 -0.7760664 ... 0.96416664 -0.47391298 0.26968017] ... [ 0.26968017 -3.077347 -0.6712297 ... -1.8088771 0.5246073 0.26968017] [ 0.26968017 -0.02265948 -0.9386581 ... 1.6698827 0.58032227 0.26968017] [ 0.26968017 1.4022343 -1.6001407 ... -0.3434722 0.9047543 0.26968017]] [[ 0.26968017 -0.57638943 -0.6196563 ... 0.40625164 1.3304564 0.26968017] [ 0.26968017 0.44594887 -0.25311223 ... 1.5372806 -1.5146376 0.26968017] [ 0.26968017 -2.8182833 0.9903139 ... 1.0319097 -4.0855923 0.26968017] ... [ 0.26968017 1.4069916 0.05913159 ... -1.6273847 2.2211914 0.26968017] [ 0.26968017 -0.47913203 2.1879063 ... 3.357184 0.8234242 0.26968017] [ 0.26968017 2.2851992 -0.94226515 ... -0.97185445 1.8847516 0.26968017]] [[ 0.26968017 -0.44538948 2.6376178 ... -0.590003 -0.38976982 0.26968017] [ 0.26968017 1.0870651 2.8849108 ... 1.8117688 0.6703589 0.26968017] [ 0.26968017 1.5424618 1.7681469 ... -0.2628512 0.8788692 0.26968017] ... [ 0.26968017 0.12665302 -0.9155997 ... 1.0396272 -1.307869 0.26968017] [ 0.26968017 -0.57163227 -1.4972023 ... 1.1028614 3.088454 0.26968017] [ 0.26968017 -0.24447075 0.01790109 ... -0.23691043 2.5336847 0.26968017]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 1] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_978.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.356562}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.9243 (2,1,1,.,.) = -1.1018 (3,1,1,.,.) = -0.7676 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-2.136981 0.35656217 1.1207842 ... 1.1108775 0.35656217 0.5547596 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.19182214 0.35656217 0.94340885 ... 3.2240784 0.35656217 0.37619504] ... [ 1.2332255 0.35656217 -1.1090345 ... 1.4384055 0.35656217 1.7762283 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.58733785 0.35656217 1.557368 ... 0.5911166 0.35656217 0.67143255]] [[ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] ... [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217]] [[ 0.2288537 0.35656217 1.8351837 ... -3.4323678 0.35656217 2.2278643 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.9045516 0.35656217 -0.72064734 ... -1.3705022 0.35656217 -1.8986135 ] ... [ 0.8842151 0.35656217 4.020628 ... -0.6778115 0.35656217 2.8874512 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 3.0440865 0.35656217 -0.47942552 ... -2.4619777 0.35656217 1.8123379 ]] ... [[-0.28957823 0.35656217 1.8082795 ... 2.3222857 0.35656217 2.242486 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.0618667 0.35656217 1.4355814 ... -2.083423 0.35656217 1.8167578 ] ... [ 0.9547144 0.35656217 -1.2876258 ... 2.3824086 0.35656217 1.212835 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.82379174 0.35656217 -1.2110356 ... 1.0048106 0.35656217 -1.5360328 ]] [[ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] ... [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217]] [[ 1.5921141 0.35656217 -0.45128223 ... -1.9836016 0.35656217 1.0332111 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.35925046 0.35656217 1.1506158 ... -0.72147334 0.35656217 0.668299 ] ... [-0.5682063 0.35656217 1.8114297 ... -1.0615675 0.35656217 -2.5952 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-1.8205681 0.35656217 1.1062162 ... 0.41551536 0.35656217 0.39231783]]]]]; ov_res: [[[[[-2.136981 0.35656217 1.1207842 ... 1.1108775 0.35656217 0.5547596 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.19182214 0.35656217 0.94340885 ... 3.2240784 0.35656217 0.37619504] ... [ 1.2332255 0.35656217 -1.1090345 ... 1.4384055 0.35656217 1.7762283 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.58733785 0.35656217 1.557368 ... 0.5911166 0.35656217 0.67143255]] [[ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] ... [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217]] [[ 0.2288537 0.35656217 1.8351837 ... -3.4323678 0.35656217 2.2278643 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.9045516 0.35656217 -0.72064734 ... -1.3705022 0.35656217 -1.8986135 ] ... [ 0.8842151 0.35656217 4.020628 ... -0.6778115 0.35656217 2.8874512 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 3.0440865 0.35656217 -0.47942552 ... -2.4619777 0.35656217 1.8123379 ]] ... [[-0.28957823 0.35656217 1.8082795 ... 2.3222857 0.35656217 2.242486 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.0618667 0.35656217 1.4355814 ... -2.083423 0.35656217 1.8167578 ] ... [ 0.9547144 0.35656217 -1.2876258 ... 2.3824086 0.35656217 1.212835 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.82379174 0.35656217 -1.2110356 ... 1.0048106 0.35656217 -1.5360328 ]] [[ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] ... [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217]] [[ 1.5921141 0.35656217 -0.45128223 ... -1.9836016 0.35656217 1.0332111 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-0.35925046 0.35656217 1.1506158 ... -0.72147334 0.35656217 0.668299 ] ... [-0.5682063 0.35656217 1.8114297 ... -1.0615675 0.35656217 -2.5952 ] [ 0.35656217 0.35656217 0.35656217 ... 0.35656217 0.35656217 0.35656217] [-1.8205681 0.35656217 1.1062162 ... 0.41551536 0.35656217 0.39231783]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [2, 2, 2], 'groups': 1, 'output_padding': [1, 1, 1], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_980.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.965759}]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.2700 (2,1,1,.,.) = -1.6812 (3,1,1,.,.) = -0.1577 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -2.6518085 -0.9657586 ... 0.6613644 -0.9657586 0.29832047] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 5.7579584 -0.9657586 ... 0.95447856 -0.9657586 2.0937922 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -1.5629314 -0.9657586 ... 1.9373717 -0.9657586 1.3726418 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] ... [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -3.3157 -0.9657586 ... -0.67774224 -0.9657586 -0.105259 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 0.9425902 -0.9657586 ... -4.30652 -0.9657586 0.80102485] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -3.166421 -0.9657586 ... 0.5864716 -0.9657586 -0.36476755]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 0.05945688 -0.9657586 ... -2.6748276 -0.9657586 -1.0056522 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.19844353 -0.9657586 ... -1.9458528 -0.9657586 -2.8721712 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -2.1217887 -0.9657586 ... -3.3270686 -0.9657586 -0.4046231 ]]]]]; ov_res: [[[[[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -2.6518085 -0.9657586 ... 0.6613644 -0.9657586 0.29832047] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 5.7579584 -0.9657586 ... 0.95447856 -0.9657586 2.0937922 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -1.5629314 -0.9657586 ... 1.9373717 -0.9657586 1.3726418 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] ... [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -3.3157 -0.9657586 ... -0.67774224 -0.9657586 -0.105259 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 0.9425902 -0.9657586 ... -4.30652 -0.9657586 0.80102485] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -3.166421 -0.9657586 ... 0.5864716 -0.9657586 -0.36476755]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ]] [[-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 0.05945688 -0.9657586 ... -2.6748276 -0.9657586 -1.0056522 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] ... [-0.9657586 -0.19844353 -0.9657586 ... -1.9458528 -0.9657586 -2.8721712 ] [-0.9657586 -0.9657586 -0.9657586 ... -0.9657586 -0.9657586 -0.9657586 ] [-0.9657586 -2.1217887 -0.9657586 ... -3.3270686 -0.9657586 -0.4046231 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_982.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-1.922836 -1.3674282 -0.24700075 ... 2.701504 -1.146168 -1.9925871 ] [-2.5089662 4.270332 -1.5986091 ... 2.2057843 1.230188 1.7179594 ] [-0.8879955 -4.487578 0.20169117 ... 0.8361307 -2.6419404 -5.1418805 ] ... [-0.49785906 0.19265926 1.0873444 ... 2.7081618 -4.3301806 -0.20633005] [ 3.2761192 0.31609824 -2.034173 ... 0.37185556 -0.41434887 -2.96023 ] [ 0.20382316 2.3820574 -0.99799025 ... -0.9713965 0.27022618 1.5191116 ]] [[ 0.7032315 1.0037627 -4.218412 ... 1.5466905 2.5701013 3.3785298 ] [ 2.019321 -0.5722374 2.5100527 ... -1.7070134 -5.7544065 -0.85201156] [ 2.7101119 3.346553 -4.6138573 ... 0.2786502 2.9659052 2.3745558 ] ... [ 4.3097334 3.142785 3.6602108 ... -4.7304606 -6.850432 2.1232157 ] [-3.9654615 -2.6726034 -1.6091871 ... -1.2260548 -0.36461127 1.239555 ] [ 1.1489707 1.9981788 -0.08185291 ... -1.6319478 -2.6323452 1.3624518 ]] [[-5.450168 1.6359022 3.1898327 ... 0.95019436 -1.0692804 0.08008218] [ 6.5633826 0.91318536 -1.3063973 ... -4.481791 1.1714908 3.0701413 ] [-4.4339514 -0.0354073 2.8649883 ... -0.10474056 6.4192524 2.4623432 ] ... [ 0.19197083 -2.2679508 8.329144 ... -2.2121887 1.0501916 -0.5520797 ] [-3.2437186 -3.8605533 3.5013647 ... -0.03863312 -6.4077826 3.1048443 ] [-0.01089919 -2.3365283 1.8375862 ... -0.43946636 -2.1296275 1.85732 ]] ... [[-1.9163406 -0.37211257 -0.0874342 ... -1.6125188 3.2426734 1.7152326 ] [ 2.1359754 -2.604324 1.8424482 ... -7.5738544 -2.5246027 -1.2192392 ] [ 2.1644213 -0.21046686 -1.7022176 ... 7.113252 3.7121735 -2.1327844 ] ... [-5.0702 1.1992183 2.271394 ... -2.3281107 6.9759235 -3.851448 ] [-1.9331396 -7.1400204 -1.4487966 ... 7.715377 0.6947546 -1.9714832 ] [ 0.5285387 -1.101154 1.727555 ... -1.402645 0.08656653 -2.4067037 ]] [[ 2.538242 -5.5669804 4.7264695 ... 0.2546413 -0.62267774 3.7979956 ] [ 2.8225477 5.460346 5.4980273 ... -4.4517574 -2.3181589 -3.136805 ] [-7.2942715 1.0671669 -3.7611637 ... -2.985989 -3.5631816 -0.38977242] ... [ 2.6964035 -7.3555055 -0.55173314 ... -1.2861907 0.5419705 -0.45196807] [-2.1566143 2.4960961 0.45817924 ... 3.8360603 -2.7580554 -4.073941 ] [-1.3238877 -5.23886 -0.460734 ... -2.8687873 -0.29605612 -1.1525887 ]] [[-0.90334916 -1.2448934 0.92091626 ... -4.7492585 0.3264028 0.49991518] [-0.3547567 0.93559086 0.9419753 ... 0.6751267 0.5241359 0.60577786] [-4.832039 0.72768766 -0.09165812 ... 2.4291277 1.5464536 -4.430273 ] ... [ 4.2121024 1.142739 -5.4190784 ... 1.050791 1.7620982 1.1073259 ] [-4.207443 -3.7859228 -1.2361953 ... -5.691618 -1.2424356 -3.4376242 ] [ 3.221185 3.597093 0.49681202 ... -2.03789 -0.44102216 1.1948853 ]]] [[[ 0.354579 0.5185766 0.18087685 ... 2.3454661 -0.11977167 0.72459865] [-2.3638208 1.0591363 0.10653031 ... 5.0598493 -1.3729551 -1.8796463 ] [-4.6380777 -0.0798142 -0.6723973 ... 0.17334282 1.1262985 1.8355381 ] ... [ 1.9551153 2.1862152 1.100348 ... -2.4605184 1.0516827 1.6139314 ] [ 0.8526956 3.6910696 -1.9510596 ... 0.50058544 -2.6198487 -0.09964661] [ 1.0633245 -0.32911503 -2.2235277 ... 1.1941339 -0.04843649 -2.853035 ]] [[-2.1898181 -0.7500707 -1.6824614 ... -0.7498536 -0.22685921 -0.52589583] [-0.6751312 -2.3497338 -2.9766493 ... -1.5726702 1.5985864 2.271501 ] [-0.27701533 0.73003775 3.1575904 ... 1.979892 -2.229397 -3.9502583 ] ... [-0.9899022 -3.5581768 -1.0313938 ... -0.71531284 0.41245002 -0.13407546] [ 0.18674296 -1.5688554 3.2931724 ... 0.2420665 -6.857999 -0.4435575 ] [-1.8936489 -1.8986387 -2.6605778 ... -1.0472263 -1.3239264 -2.134604 ]] [[ 0.26727024 -0.82559514 -0.21269453 ... -0.04898948 1.9449456 0.02332768] [-5.8765903 0.6965934 0.987386 ... -1.1813173 -1.5570602 0.7673882 ] [-4.801039 -2.180986 -2.7435436 ... -0.8708918 -1.9635228 1.7548962 ] ... [-1.6304187 3.8517518 -2.5741386 ... 0.8736475 1.6204293 2.79858 ] [-3.5865989 0.39681345 1.2851459 ... -2.4691088 0.5777328 -0.3847409 ] [-3.2804704 -1.6002269 -1.095669 ... -0.7781359 -1.2439133 0.01837254]] ... [[-0.9269719 -1.0470227 2.5672026 ... 3.4496684 2.2079806 1.7373444 ] [-4.771144 -1.651483 3.7703938 ... -2.5399098 4.845497 4.811503 ] [ 0.17728138 -0.16350555 2.712053 ... -1.7415645 -1.5369066 -1.5914993 ] ... [-0.27952772 -2.74292 0.10764682 ... -0.7336397 -0.64449507 0.38696274] [-3.8566856 -4.0225134 2.0939972 ... -1.2675049 1.6884985 -0.73578733] [-1.6020321 -3.8057642 -1.0785072 ... 1.9567399 1.776373 -1.2832854 ]] [[-2.8998494 -1.7193034 0.2017467 ... -3.6418705 1.613312 1.9679148 ] [ 1.0617186 -5.124713 2.9893882 ... -2.148039 1.4312946 0.4231224 ] [ 0.15976238 -1.5694582 4.0291224 ... -5.884842 -4.6032743 0.2163411 ] ... [-0.4618809 -2.8445904 0.40278628 ... 1.0105355 -1.6415035 0.895528 ] [ 1.4551439 -4.3579545 -4.735587 ... 1.5815487 2.7201438 4.26542 ] [-1.9782166 -0.05703628 -1.1283095 ... 4.337256 -0.9023961 -1.1598711 ]] [[ 2.1167374 -0.8264336 -0.07856407 ... 1.5018448 -0.15874806 0.95655966] [ 1.4431448 -0.7250439 3.3879175 ... -2.41278 -2.203529 1.6329825 ] [-1.1577922 1.1245977 0.75242466 ... 0.47149444 -1.3731035 -1.1111623 ] ... [-0.1323238 -0.9799045 -1.2890303 ... 2.2662287 0.05127084 0.5833898 ] [-0.29873356 0.13081706 0.79544234 ... 2.3409526 2.1366572 1.5214088 ] [-0.40413442 1.0173163 0.17801073 ... 1.4173175 -0.34966987 0.12142988]]] [[[-1.1754745 -0.06403415 1.1766601 ... 0.5621603 0.23022348 -0.02869687] [-3.0519733 -1.6392018 1.8181142 ... 0.42402452 0.7157599 -0.53021204] [-2.866793 1.8878165 0.48636714 ... 1.5714676 1.838846 -0.5437242 ] ... [ 0.13127357 1.2931511 -0.7528336 ... -1.1485394 1.7398982 0.3210015 ] [-1.7187705 -1.4391645 0.12190211 ... 1.3953997 -0.10270089 -1.0226408 ] [-0.07830282 -2.5598407 0.81939805 ... 1.4087733 -0.01727343 -1.8865937 ]] [[ 2.2391038 0.55144906 -0.7067393 ... 3.1467047 0.35334903 2.099918 ] [ 0.15453148 -0.960747 -4.2063246 ... 5.95302 -3.7637358 -4.5376296 ] [-6.6295247 0.07265162 -3.1911879 ... -4.100028 -0.7137008 7.2837877 ] ... [ 0.7358651 3.730218 -1.1491622 ... -4.923261 4.662003 1.1605377 ] [-0.80346525 9.340335 -4.4499607 ... 0.11615777 -3.153941 0.35496277] [ 0.09193528 -1.2488167 -4.4158373 ... 1.4049084 0.99461716 -6.14949 ]] [[-1.5367389 1.1579792 0.8301836 ... -0.93192446 -1.2745203 1.6944959 ] [ 0.5659094 -2.161133 -4.6344585 ... -3.3949153 3.318699 1.3544075 ] [-1.2205082 -1.801383 5.0326023 ... -0.9619707 -0.60674477 -3.2089448 ] ... [ 0.70953465 -9.0015335 -3.3801353 ... -0.91320133 1.5463519 -2.01754 ] [ 6.9844227 0.9622352 2.0755975 ... -2.117006 -7.437097 2.7495997 ] [-2.1607516 -1.9791328 -4.964817 ... -1.9380796 -0.54379046 -0.23866737]] ... [[-1.9115624 -1.1154996 -2.4469552 ... -4.2235613 -1.0523992 -2.2988408 ] [-9.009802 -4.0253706 -4.0885105 ... 3.8974 3.5749097 0.25024176] [-0.4104159 -5.414483 3.50247 ... -0.5896466 0.4370494 5.6571207 ] ... [-1.4206908 -2.9891381 -5.3282266 ... 3.7529752 -2.2039099 -2.2194095 ] [-3.7981212 -4.8527026 3.5967207 ... -0.5755164 4.042459 2.5483308 ] [-3.8044949 -5.300933 -1.5702463 ... -1.6173277 3.0664084 8.076939 ]] [[ 3.8840299 -0.39888722 2.8485613 ... 1.7565652 -0.74741507 -0.6796845 ] [-0.5998206 -0.45644605 -0.46769834 ... -2.8639004 5.4914227 6.5828605 ] [ 1.4289443 -0.76069903 6.49612 ... -8.139385 0.9741827 -2.540373 ] ... [ 1.2554353 1.5431101 1.0599601 ... 2.0640597 -3.2736924 -3.088019 ] [-3.8174915 -7.5009074 3.3995612 ... 0.55176425 4.585568 -3.284024 ] [-1.917424 -1.3973129 -0.8476182 ... 5.3564415 1.5186197 -2.3172822 ]] [[-2.6273596 -0.11143137 2.0919921 ... -3.4573932 0.0528899 0.14044353] [ 2.3995385 -3.7870004 1.5478935 ... 1.8240798 1.6581802 2.5893407 ] [-1.1791065 0.43796563 3.5315475 ... -7.6572604 -4.7779484 -3.1550043 ] ... [-1.2131017 -0.3334394 -0.6270791 ... 1.0946321 -5.6742907 0.4187712 ] [ 2.5901575 -8.167447 -5.8771124 ... -0.5722363 3.803567 2.2556849 ] [-1.6637235 3.4216013 0.64266276 ... 5.214694 -1.1943474 0.36952573]]]]]; ov_res: [[[[[-1.922836 -1.3674282 -0.24700075 ... 2.701504 -1.146168 -1.9925871 ] [-2.5089664 4.2703314 -1.5986091 ... 2.205784 1.230188 1.7179595 ] [-0.88799554 -4.487578 0.20169121 ... 0.8361307 -2.6419404 -5.1418805 ] ... [-0.4978591 0.19265923 1.0873444 ... 2.7081618 -4.3301806 -0.20633006] [ 3.2761192 0.31609824 -2.034173 ... 0.3718555 -0.4143489 -2.96023 ] [ 0.20382316 2.3820574 -0.99799025 ... -0.9713965 0.27022618 1.5191116 ]] [[ 0.7032315 1.0037626 -4.2184114 ... 1.5466905 2.5701015 3.37853 ] [ 2.019321 -0.57223743 2.5100522 ... -1.7070134 -5.7544065 -0.8520115 ] [ 2.7101119 3.3465533 -4.6138573 ... 0.27865013 2.9659047 2.3745558 ] ... [ 4.309733 3.1427853 3.660211 ... -4.7304606 -6.850432 2.1232157 ] [-3.965461 -2.6726034 -1.6091871 ... -1.2260545 -0.36461124 1.2395549 ] [ 1.1489707 1.9981788 -0.08185304 ... -1.6319478 -2.6323452 1.3624518 ]] [[-5.450168 1.6359022 3.1898327 ... 0.9501943 -1.0692804 0.08008213] [ 6.5633826 0.9131853 -1.3063972 ... -4.481791 1.171491 3.070141 ] [-4.433951 -0.03540754 2.8649886 ... -0.1047404 6.419253 2.4623432 ] ... [ 0.19197042 -2.2679508 8.329144 ... -2.2121887 1.0501914 -0.55207974] [-3.2437184 -3.860553 3.5013647 ... -0.03863312 -6.4077826 3.1048446 ] [-0.01089917 -2.3365283 1.8375862 ... -0.4394664 -2.1296275 1.85732 ]] ... [[-1.9163405 -0.37211263 -0.08743419 ... -1.6125188 3.2426732 1.7152326 ] [ 2.1359754 -2.604324 1.8424485 ... -7.5738535 -2.5246024 -1.2192391 ] [ 2.164421 -0.21046692 -1.7022176 ... 7.113252 3.7121732 -2.1327846 ] ... [-5.0702004 1.199218 2.2713938 ... -2.328111 6.975924 -3.8514485 ] [-1.9331397 -7.140021 -1.4487965 ... 7.7153773 0.6947546 -1.9714831 ] [ 0.5285387 -1.1011539 1.7275549 ... -1.4026451 0.08656652 -2.4067037 ]] [[ 2.538242 -5.56698 4.7264695 ... 0.2546414 -0.62267774 3.7979956 ] [ 2.822548 5.4603457 5.4980273 ... -4.451758 -2.3181586 -3.1368055 ] [-7.2942715 1.0671668 -3.7611635 ... -2.9859889 -3.5631816 -0.38977236] ... [ 2.6964035 -7.3555064 -0.55173314 ... -1.2861909 0.5419701 -0.45196787] [-2.1566143 2.496096 0.4581792 ... 3.8360603 -2.7580554 -4.0739408 ] [-1.3238877 -5.23886 -0.46073395 ... -2.868787 -0.2960561 -1.1525886 ]] [[-0.90334916 -1.2448934 0.92091626 ... -4.7492585 0.3264028 0.49991518] [-0.3547567 0.93559086 0.9419754 ... 0.6751268 0.5241359 0.60577786] [-4.832039 0.7276876 -0.09165802 ... 2.4291277 1.5464536 -4.430273 ] ... [ 4.2121024 1.142739 -5.4190784 ... 1.050791 1.7620982 1.1073259 ] [-4.2074437 -3.7859228 -1.2361953 ... -5.6916175 -1.2424353 -3.4376242 ] [ 3.221185 3.597093 0.49681202 ... -2.03789 -0.44102216 1.1948853 ]]] [[[ 0.354579 0.5185766 0.18087685 ... 2.3454661 -0.11977167 0.72459865] [-2.3638208 1.0591363 0.10653035 ... 5.0598493 -1.3729551 -1.8796464 ] [-4.6380773 -0.07981423 -0.6723973 ... 0.17334273 1.1262987 1.8355381 ] ... [ 1.9551154 2.186215 1.100348 ... -2.4605181 1.0516828 1.6139314 ] [ 0.85269564 3.6910696 -1.9510595 ... 0.5005853 -2.6198487 -0.09964663] [ 1.0633245 -0.32911503 -2.2235277 ... 1.1941339 -0.04843649 -2.853035 ]] [[-2.1898184 -0.7500708 -1.6824613 ... -0.7498536 -0.2268592 -0.52589583] [-0.6751311 -2.3497338 -2.9766486 ... -1.5726702 1.5985864 2.2715008 ] [-0.2770156 0.7300377 3.1575904 ... 1.9798921 -2.229397 -3.9502583 ] ... [-0.98990214 -3.558177 -1.0313936 ... -0.7153128 0.41245005 -0.13407542] [ 0.18674302 -1.5688552 3.2931728 ... 0.24206664 -6.857998 -0.4435576 ] [-1.8936489 -1.8986388 -2.660578 ... -1.0472263 -1.3239264 -2.1346042 ]] [[ 0.26727024 -0.825595 -0.2126944 ... -0.04898951 1.9449457 0.02332767] [-5.87659 0.6965934 0.9873859 ... -1.1813173 -1.5570601 0.76738816] [-4.801039 -2.1809857 -2.7435436 ... -0.8708919 -1.9635228 1.7548962 ] ... [-1.6304188 3.851751 -2.5741382 ... 0.8736475 1.6204293 2.7985797 ] [-3.586599 0.39681327 1.2851456 ... -2.4691086 0.57773286 -0.384741 ] [-3.2804701 -1.6002268 -1.0956689 ... -0.7781359 -1.2439133 0.01837257]] ... [[-0.92697173 -1.0470227 2.5672026 ... 3.4496684 2.2079806 1.7373445 ] [-4.7711434 -1.6514833 3.7703938 ... -2.5399098 4.8454967 4.811503 ] [ 0.17728135 -0.16350538 2.712053 ... -1.7415642 -1.5369067 -1.5914992 ] ... [-0.27952775 -2.7429197 0.10764696 ... -0.7336395 -0.64449495 0.3869628 ] [-3.856686 -4.022514 2.0939968 ... -1.2675049 1.6884985 -0.73578745] [-1.6020321 -3.805764 -1.0785071 ... 1.95674 1.776373 -1.2832854 ]] [[-2.8998492 -1.7193034 0.20174664 ... -3.6418703 1.6133119 1.9679148 ] [ 1.0617188 -5.124714 2.9893887 ... -2.1480393 1.4312946 0.42312226] [ 0.15976228 -1.5694584 4.0291224 ... -5.8848424 -4.6032743 0.21634099] ... [-0.46188095 -2.8445902 0.40278628 ... 1.0105356 -1.6415037 0.89552796] [ 1.4551443 -4.357955 -4.735587 ... 1.5815487 2.7201438 4.2654204 ] [-1.9782165 -0.05703628 -1.1283095 ... 4.3372564 -0.9023961 -1.1598711 ]] [[ 2.1167374 -0.8264336 -0.07856407 ... 1.5018448 -0.15874806 0.95655966] [ 1.4431449 -0.7250439 3.3879178 ... -2.4127798 -2.203529 1.6329825 ] [-1.1577922 1.1245977 0.75242466 ... 0.47149453 -1.3731035 -1.1111623 ] ... [-0.13232373 -0.97990453 -1.2890303 ... 2.266229 0.05127074 0.58338976] [-0.29873356 0.1308171 0.79544234 ... 2.3409526 2.1366572 1.5214087 ] [-0.40413442 1.0173163 0.17801073 ... 1.4173175 -0.34966987 0.12142988]]] [[[-1.1754745 -0.06403415 1.1766601 ... 0.5621603 0.23022348 -0.02869687] [-3.0519733 -1.6392018 1.8181142 ... 0.42402452 0.7157599 -0.53021204] [-2.866793 1.8878164 0.4863671 ... 1.5714675 1.838846 -0.54372424] ... [ 0.13127357 1.2931511 -0.7528336 ... -1.1485394 1.7398982 0.32100144] [-1.7187704 -1.4391645 0.1219021 ... 1.3953997 -0.10270094 -1.0226408 ] [-0.07830282 -2.5598407 0.81939805 ... 1.4087733 -0.01727343 -1.8865937 ]] [[ 2.2391038 0.551449 -0.70673937 ... 3.1467047 0.35334912 2.0999177 ] [ 0.1545314 -0.96074694 -4.2063246 ... 5.9530196 -3.7637362 -4.5376296 ] [-6.6295247 0.07265183 -3.1911876 ... -4.100027 -0.71370095 7.2837873 ] ... [ 0.73586524 3.730218 -1.149162 ... -4.923261 4.6620026 1.1605377 ] [-0.80346507 9.340334 -4.4499607 ... 0.11615778 -3.1539407 0.35496286] [ 0.09193523 -1.2488166 -4.415838 ... 1.4049084 0.9946171 -6.14949 ]] [[-1.5367388 1.1579794 0.8301836 ... -0.9319244 -1.2745204 1.6944959 ] [ 0.56590974 -2.161133 -4.634459 ... -3.3949153 3.318699 1.3544077 ] [-1.2205086 -1.801383 5.032603 ... -0.9619706 -0.6067447 -3.208945 ] ... [ 0.7095346 -9.0015335 -3.3801355 ... -0.91320133 1.546352 -2.0175402 ] [ 6.984423 0.9622352 2.0755973 ... -2.117006 -7.4370975 2.7495997 ] [-2.1607518 -1.9791329 -4.964817 ... -1.9380795 -0.5437905 -0.23866738]] ... [[-1.9115624 -1.1154995 -2.4469552 ... -4.2235613 -1.0523993 -2.2988408 ] [-9.009802 -4.025371 -4.0885105 ... 3.8974 3.5749097 0.25024173] [-0.41041583 -5.414483 3.5024698 ... -0.5896464 0.43704948 5.657121 ] ... [-1.420691 -2.9891381 -5.328227 ... 3.7529755 -2.2039099 -2.2194097 ] [-3.7981212 -4.852702 3.5967205 ... -0.5755166 4.0424585 2.5483308 ] [-3.804495 -5.300933 -1.5702463 ... -1.6173278 3.0664084 8.076939 ]] [[ 3.8840299 -0.39888722 2.8485613 ... 1.7565652 -0.7474151 -0.6796846 ] [-0.5998206 -0.45644608 -0.46769845 ... -2.8639004 5.491422 6.58286 ] [ 1.4289445 -0.7606994 6.49612 ... -8.139385 0.9741828 -2.540373 ] ... [ 1.2554353 1.5431103 1.05996 ... 2.0640593 -3.2736924 -3.0880191 ] [-3.8174913 -7.5009074 3.3995614 ... 0.5517642 4.585568 -3.284024 ] [-1.917424 -1.3973129 -0.84761804 ... 5.356441 1.5186197 -2.3172824 ]] [[-2.6273596 -0.11143137 2.0919921 ... -3.4573932 0.0528899 0.14044353] [ 2.3995385 -3.787001 1.5478936 ... 1.8240796 1.6581801 2.5893404 ] [-1.1791065 0.43796566 3.531548 ... -7.6572604 -4.7779484 -3.1550043 ] ... [-1.2131016 -0.33343932 -0.6270792 ... 1.0946324 -5.6742907 0.41877124] [ 2.5901573 -8.167447 -5.8771124 ... -0.57223636 3.8035672 2.2556849 ] [-1.6637235 3.4216013 0.64266276 ... 5.214694 -1.1943474 0.36952573]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 2, 2, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_984.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 2, 2, 1, strides=[12, 4, 2, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-3.9667535e+00 1.9814060e+00 -3.7212734e+00 ... 1.4156942e+00 -2.5850384e+00 -5.8932714e+00] [-4.1602039e-01 -2.7306621e+00 -2.2914863e+00 ... 2.0206881e+00 -3.6730710e-01 6.6158068e-01] [-4.3358064e+00 -1.0201343e+00 -1.0309517e+00 ... 6.6677386e-01 -4.4430823e+00 3.5478253e+00] ... [-4.9629480e-01 3.1006522e+00 -3.2051110e+00 ... 2.4250612e+00 1.8667793e+00 5.8089191e-01] [ 1.4411997e+00 3.7865341e+00 -1.2868115e-01 ... 6.4649501e+00 -5.4449692e+00 4.5986319e+00] [ 7.6591760e-01 1.1139903e-01 -1.6001946e+00 ... -3.1972179e-01 3.2549493e+00 -2.6802881e+00]] [[ 8.4842092e-01 -8.1971467e-01 -1.7642416e+00 ... -1.1402621e-01 3.1698167e+00 5.4571223e+00] [-4.9888849e+00 3.5084093e+00 4.7266216e+00 ... -7.1472412e-01 5.6647830e+00 3.0992274e+00] [-4.5714684e+00 -3.1163824e+00 8.3038149e+00 ... 4.4969101e+00 6.8141848e-01 4.1914219e-01] ... [-6.8762815e-01 2.6460071e+00 4.4597018e-01 ... -1.3395343e+00 1.9537852e+00 2.2337000e+00] [ 4.6154463e-01 1.4928597e+00 -1.8558544e+00 ... 5.5886632e-01 1.6525664e+00 -5.8442247e-01] [ 1.2648960e+00 2.1963978e+00 3.2625630e+00 ... 5.2935033e+00 2.7327068e+00 -6.7339110e+00]] [[ 1.4440440e+00 -1.7535540e+00 -2.0150611e+00 ... 1.9592217e+00 -1.6926036e+00 1.1321564e-01] [-3.2974019e+00 6.3750839e+00 -3.8587759e+00 ... -6.0636506e+00 -1.2162912e+00 2.2839785e+00] [-1.6996335e+00 6.0833532e-01 -3.2883828e+00 ... -2.7843280e+00 2.6358976e+00 -3.5908055e+00] ... [-1.3529605e+00 5.8178658e+00 -2.4291155e+00 ... -1.7718853e+00 8.3431590e-01 -2.3422301e+00] [-1.8456435e+00 5.0537181e+00 1.2113978e+00 ... -6.5357251e+00 -1.5967877e+00 -4.0136685e+00] [-1.8163760e-01 1.7857851e-02 -3.6877861e+00 ... 1.9820478e+00 6.6319399e+00 -9.0243500e-01]] ... [[ 5.9455705e+00 -3.0269284e+00 -1.3224598e+00 ... 3.6357594e+00 -4.0856627e-01 -9.1441212e+00] [ 8.1049335e-01 4.3649454e+00 2.1673532e+00 ... 9.1062886e-01 -3.2616580e+00 -3.2124320e-01] [ 3.5554349e+00 1.7412975e+00 2.9844444e+00 ... -9.6778429e-01 -1.0829163e+00 -4.5644517e+00] ... [ 5.0698948e+00 -2.7041924e+00 2.6028380e+00 ... 2.5880363e+00 8.0763906e-01 1.3365433e+00] [-2.0859804e+00 -1.8139105e+00 1.9077004e-01 ... 1.1264273e+00 4.7941022e+00 3.0164618e+00] [ 3.6519673e+00 -4.4930482e+00 -8.7914896e-01 ... 9.8597936e-02 -2.0435750e+00 -4.7264614e+00]] [[-1.6156278e+00 -7.7578884e-01 -3.8823547e+00 ... -4.3119302e+00 -4.6067090e+00 -3.7549467e+00] [-1.5424255e+00 1.6358407e-01 -3.2691234e-01 ... 2.2481591e-01 2.3627410e+00 -2.1115456e+00] [ 4.1955366e+00 -4.3135958e+00 -2.0190704e+00 ... 2.0501158e+00 -5.2319064e+00 9.9895817e-01] ... [ 5.7640390e+00 5.9315646e-01 6.1679584e-01 ... 3.7741196e+00 2.4923246e+00 -2.4081790e+00] [-1.0251663e+00 4.6458030e+00 4.4216695e+00 ... 4.3062951e-02 -1.2305671e+00 -5.2100286e+00] [-6.8318236e-01 4.5395017e-02 -2.9682720e+00 ... -2.0084357e+00 3.4238694e+00 5.2180223e-02]] [[ 1.1644383e-02 1.5626584e+00 -1.0324291e+00 ... 1.5323509e+00 2.5534501e+00 4.7558084e+00] [-3.6219287e+00 2.9679033e-01 3.3951896e-01 ... -1.0440840e+00 2.4212049e-01 -4.5795946e+00] [-4.8624678e+00 -6.8747497e+00 -3.8827736e+00 ... -1.2883301e+00 2.8772593e+00 1.1002769e+00] ... [-4.7530761e+00 -9.3848600e+00 -5.9824939e+00 ... -2.0772991e+00 3.9069347e+00 -2.3759414e-01] [ 1.9054555e+00 -3.6192298e+00 -5.4704037e+00 ... 1.2274017e+00 3.6763370e+00 -3.8669660e+00] [-1.1362659e-01 9.3696982e-01 1.2931477e+00 ... 5.5588937e+00 3.2534856e-01 -4.9144015e+00]]] [[[ 3.0096159e+00 2.2134600e+00 -1.5074114e+00 ... -3.7770431e+00 8.5373039e+00 2.5347304e+00] [-2.2619965e+00 4.0967493e+00 -8.7317133e-01 ... 1.2076534e+00 4.8850183e+00 2.7608593e+00] [ 1.6648193e-01 -4.2865596e+00 3.8681939e+00 ... 1.8337020e+00 3.7171988e+00 -1.7941495e+00] ... [-1.8247893e+00 -3.5115016e+00 2.2754226e+00 ... -1.0786112e+01 4.8724699e+00 2.5842295e+00] [-5.9975381e+00 -7.4351954e+00 2.4190359e+00 ... -6.7850196e-01 3.2019868e+00 -5.5728960e+00] [ 1.3443229e-01 2.0229549e+00 6.2782159e+00 ... 7.3911853e+00 3.1368840e+00 -6.1090136e+00]] [[ 2.2791371e+00 5.2081671e+00 -4.1767943e-01 ... -1.5663114e+00 -6.0104036e-01 -9.3356266e+00] [-5.3123870e+00 1.8237506e+00 -5.4172072e+00 ... -1.6113533e+00 -6.8109360e+00 1.1105534e+00] [ 7.1443558e+00 -4.4817104e+00 -3.5489500e+00 ... -4.3552909e+00 1.5563616e+00 3.0909712e+00] ... [-2.9336562e+00 5.3049002e+00 -7.8008723e-01 ... -1.0351074e+00 1.1593084e+00 -7.5554943e-01] [ 3.8063977e+00 -5.4634456e-02 -1.3919524e+00 ... 5.1927078e-01 -2.4929280e+00 -3.4933460e+00] [ 2.9387038e+00 1.8869215e+00 -2.1097378e-01 ... -9.7316637e+00 1.0790666e+01 -5.0606217e+00]] [[-3.2733629e+00 -3.9685962e+00 2.3345942e+00 ... 4.6338458e+00 1.2454631e+00 3.5057402e+00] [-2.9135139e+00 -3.6691515e+00 1.0846176e+01 ... 2.3261051e+00 6.2940879e+00 3.8376188e+00] [-2.0667214e+00 -1.5532115e+00 -2.6354120e+00 ... -2.5931561e-01 3.8326530e+00 -1.9659346e+00] ... [ 1.5630877e+00 -7.2197027e+00 2.7188842e+00 ... 2.1833596e+00 3.7840271e+00 7.4484386e+00] [-1.8729607e+00 -3.9728992e+00 1.9374293e+00 ... 3.0151498e+00 5.3453474e+00 -3.8119044e+00] [-6.3058801e+00 4.5194225e+00 4.6139464e+00 ... 8.9727813e-01 -8.7683592e+00 -3.3658628e+00]] ... [[ 6.9525352e+00 -4.6279664e+00 -2.4637105e+00 ... -3.9942167e+00 -3.6964324e+00 -7.2271156e+00] [-4.8648133e+00 4.1396084e+00 -3.3388479e+00 ... 2.5159862e+00 -2.0577235e+00 8.1553812e+00] [-4.2073240e+00 -2.3283281e+00 -9.9414148e+00 ... -3.5091457e+00 6.2605679e-01 -3.2697966e+00] ... [-3.7122576e+00 -1.5093462e-02 1.4249529e-01 ... 8.2420082e+00 3.0450351e+00 -1.4676379e+00] [-5.9538800e-01 2.9367542e+00 6.6062391e-01 ... -5.6520300e+00 -6.9271618e-01 -6.7740555e+00] [-2.7322042e-01 4.4911942e+00 -8.2430544e+00 ... -4.7259283e+00 -2.4233119e+00 -3.2199855e+00]] [[-3.4770777e+00 3.9170303e+00 6.1446242e+00 ... -1.8984739e-01 2.2617100e-01 -8.6107022e-01] [-3.8839331e+00 5.6213489e+00 2.6574066e+00 ... 3.9739943e+00 -6.1319022e+00 1.2980039e+00] [ 1.9877136e+00 -5.0377879e+00 -3.4703846e+00 ... -5.9853840e+00 -9.0526873e-01 -4.5822697e+00] ... [ 9.4656515e-01 -3.0879924e+00 -2.0983968e+00 ... 3.5792933e+00 -1.2361771e+00 8.1159389e-01] [ 6.9779748e-01 -6.9206648e+00 -1.1027734e+00 ... -3.8398278e+00 9.0549030e+00 -2.2869341e-01] [ 7.9493623e+00 -1.9262820e+00 6.6665572e-01 ... 1.3762147e+01 -6.1187518e-01 -5.2245444e-01]] [[-1.1471975e-01 -6.7277837e+00 -4.8929152e+00 ... -3.1918864e+00 -1.5588907e+00 -1.6420218e+00] [-4.8604670e+00 6.1207790e+00 7.9267669e+00 ... -6.6140547e+00 -8.2318085e-01 -2.0243409e-01] [ 4.7533903e+00 -1.9675124e+00 -2.0940958e-02 ... -3.4289124e+00 -2.5515935e+00 6.0536571e+00] ... [ 7.9268160e+00 1.0446953e+00 -6.2537951e+00 ... 7.9696460e+00 -2.1515849e+00 -4.3930912e+00] [ 5.3364739e+00 1.5771152e+00 1.1299178e+01 ... -3.1941147e+00 -5.9426813e+00 6.0316408e-01] [-1.1172180e+00 2.7350855e+00 -4.4452014e+00 ... -3.4135509e+00 -2.9998593e+00 7.1578155e+00]]] [[[ 4.5739260e+00 2.6626399e+00 -1.0099223e+00 ... -9.7793716e-01 2.4521186e+00 4.0304485e-01] [-1.8939148e+00 -1.8470006e+00 7.7473660e+00 ... 7.3515183e-01 -1.2876173e+00 1.4044664e+00] [-5.0008172e-01 -4.6559982e+00 2.8575985e+00 ... 1.3685544e+00 -2.1097488e+00 -2.1893322e+00] ... [-4.5759273e+00 3.4763616e-01 1.3546865e+00 ... -5.3361220e+00 -4.5420828e+00 7.4190102e+00] [ 6.0845671e+00 4.0753622e+00 -3.8706276e+00 ... 7.9067278e+00 -1.1840221e+00 6.4028525e-01] [ 1.6494381e+00 8.3282214e-01 -8.3909025e+00 ... -5.6381488e-01 -4.3127432e+00 7.1138573e-01]] [[ 1.0213236e+00 -1.9152641e+00 7.3513883e-01 ... 3.8882530e+00 -3.9863987e+00 -7.4927157e-01] [-1.6926923e+00 -7.4999547e+00 -1.0417445e+00 ... 1.8945993e+00 9.7889328e-01 6.2126679e+00] [ 1.3101201e+00 4.1020598e+00 5.0428934e+00 ... -2.0317130e+00 -2.6344850e+00 -5.1341927e-01] ... [ 3.0666617e-01 4.2258186e+00 -8.3139124e+00 ... 2.5944533e+00 -2.5381976e-01 -6.6991634e+00] [ 1.2734194e+00 -1.8506489e+00 6.7382483e+00 ... -1.2997845e+00 9.5267373e-01 2.9040792e+00] [-7.8554106e+00 -3.3011621e-01 4.4576006e+00 ... -1.9560877e+00 -1.2980698e+00 2.4389490e-01]] [[ 1.3232733e+00 4.9888096e+00 4.5912971e+00 ... -3.1115656e+00 -3.7076240e+00 4.1980848e+00] [ 2.3768852e+00 3.2868061e+00 -3.2978406e+00 ... -7.0479250e+00 -7.8533864e-01 -7.1552410e+00] [-1.2643120e+00 2.3379280e-01 4.3998370e-01 ... -7.5851965e-01 -6.5640993e+00 2.1557779e+00] ... [ 2.2387152e+00 1.4501718e+00 6.6627598e+00 ... 6.3897155e-02 3.2501015e-01 7.4360595e+00] [-6.1112313e+00 -2.9461181e+00 -7.1052265e+00 ... -1.9691371e+00 -6.4686036e+00 5.0152934e-01] [ 6.6210737e+00 -3.2724326e+00 -8.6857088e-02 ... 2.6168327e-03 -2.5602975e+00 1.8434358e+00]] ... [[-5.9275155e+00 9.3521242e+00 1.0006571e+00 ... 6.1798632e-01 2.9714507e-01 4.1705618e+00] [-4.0248556e+00 -1.5815159e+00 3.8522861e+00 ... -1.6096886e+00 -2.3467507e+00 -2.0657301e+00] [-1.4382929e-01 -1.2795222e+00 4.2074027e+00 ... 9.4476372e-02 5.1742065e-01 2.8693500e+00] ... [ 4.2261357e+00 2.4199145e+00 1.4018004e+00 ... -2.3296471e+00 1.7240072e+00 3.7318251e-01] [ 3.1347315e+00 7.2976074e+00 -3.3886603e-01 ... 1.2937223e+00 -9.6606045e+00 -6.1434336e+00] [-3.4643810e+00 -6.7577583e-01 9.3656766e-01 ... 3.9050872e+00 8.4066591e+00 5.2048888e+00]] [[ 2.2199626e+00 -6.3096414e+00 -9.9133372e-01 ... 3.2535362e+00 -1.7806777e+00 -5.1874695e+00] [ 5.5018816e+00 -1.6726116e+00 -4.0280929e+00 ... -1.6938767e+00 7.4742322e+00 4.7599053e+00] [-4.4799337e+00 -3.6709561e+00 1.8853313e+00 ... -1.0095097e+00 3.9566848e+00 -1.8089415e+00] ... [-2.7006447e+00 -1.1356139e+00 -1.8426188e+00 ... 6.7810512e-01 -5.8037934e+00 2.2368741e+00] [-5.3629917e-01 -2.5249481e+00 -3.0762935e+00 ... 6.1349019e-02 6.5578094e+00 -3.7696805e+00] [-3.6427143e-01 -6.1424112e-01 1.9387695e+00 ... -3.0767808e+00 -1.0067671e+01 1.0989844e+00]] [[ 1.9359969e+00 3.3523591e+00 -5.9213896e+00 ... -3.6194112e+00 1.6944940e+00 -2.5916324e+00] [ 1.4468449e+00 -1.3211954e+00 8.8660651e-01 ... 2.1063228e+00 -5.4023352e+00 1.9243910e+00] [ 9.0393038e+00 -3.0268421e+00 -8.9895076e-01 ... 3.9049201e+00 1.5291705e+00 3.7405274e+00] ... [ 5.4110661e+00 5.2571964e+00 -3.3851659e+00 ... 7.9959404e-01 8.7279564e-01 -2.2673519e+00] [-1.5275594e+00 3.7486112e-01 1.0594418e+00 ... -9.5514840e-01 -4.2137661e+00 9.2071009e+00] [-1.1667236e+00 1.6947685e+00 -7.5127330e+00 ... -1.1707506e+00 1.1487546e+00 -3.2968156e+00]]]]]; ov_res: [[[[[-3.96675348e+00 1.98140609e+00 -3.72127342e+00 ... 1.41569412e+00 -2.58503842e+00 -5.89327097e+00] [-4.16020453e-01 -2.73066211e+00 -2.29148626e+00 ... 2.02068806e+00 -3.67307216e-01 6.61580563e-01] [-4.33580637e+00 -1.02013433e+00 -1.03095150e+00 ... 6.66773736e-01 -4.44308233e+00 3.54782510e+00] ... [-4.96294767e-01 3.10065222e+00 -3.20511103e+00 ... 2.42506123e+00 1.86677933e+00 5.80891907e-01] [ 1.44119978e+00 3.78653431e+00 -1.28681064e-01 ... 6.46494913e+00 -5.44496918e+00 4.59863186e+00] [ 7.65917480e-01 1.11398987e-01 -1.60019469e+00 ... -3.19721669e-01 3.25494933e+00 -2.68028831e+00]] [[ 8.48421037e-01 -8.19714606e-01 -1.76424170e+00 ... -1.14026204e-01 3.16981649e+00 5.45712233e+00] [-4.98888445e+00 3.50840926e+00 4.72662163e+00 ... -7.14724183e-01 5.66478300e+00 3.09922791e+00] [-4.57146835e+00 -3.11638236e+00 8.30381489e+00 ... 4.49691010e+00 6.81418657e-01 4.19141948e-01] ... [-6.87628150e-01 2.64600682e+00 4.45970356e-01 ... -1.33953428e+00 1.95378506e+00 2.23369980e+00] [ 4.61544424e-01 1.49285960e+00 -1.85585451e+00 ... 5.58866560e-01 1.65256643e+00 -5.84422469e-01] [ 1.26489627e+00 2.19639778e+00 3.26256299e+00 ... 5.29350233e+00 2.73270750e+00 -6.73391056e+00]] [[ 1.44404399e+00 -1.75355375e+00 -2.01506114e+00 ... 1.95922184e+00 -1.69260347e+00 1.13215707e-01] [-3.29740214e+00 6.37508440e+00 -3.85877538e+00 ... -6.06365061e+00 -1.21629119e+00 2.28397822e+00] [-1.69963336e+00 6.08335197e-01 -3.28838253e+00 ... -2.78432798e+00 2.63589740e+00 -3.59080529e+00] ... [-1.35296023e+00 5.81786585e+00 -2.42911530e+00 ... -1.77188528e+00 8.34315836e-01 -2.34223008e+00] [-1.84564352e+00 5.05371857e+00 1.21139789e+00 ... -6.53572512e+00 -1.59678745e+00 -4.01366901e+00] [-1.81637436e-01 1.78579893e-02 -3.68778610e+00 ... 1.98204768e+00 6.63193989e+00 -9.02435005e-01]] ... [[ 5.94557047e+00 -3.02692890e+00 -1.32245994e+00 ... 3.63575959e+00 -4.08566028e-01 -9.14412117e+00] [ 8.10493410e-01 4.36494493e+00 2.16735268e+00 ... 9.10628617e-01 -3.26165819e+00 -3.21243346e-01] [ 3.55543494e+00 1.74129748e+00 2.98444438e+00 ... -9.67784226e-01 -1.08291614e+00 -4.56445169e+00] ... [ 5.06989479e+00 -2.70419288e+00 2.60283780e+00 ... 2.58803630e+00 8.07638884e-01 1.33654308e+00] [-2.08598065e+00 -1.81391096e+00 1.90770492e-01 ... 1.12642717e+00 4.79410219e+00 3.01646185e+00] [ 3.65196729e+00 -4.49304724e+00 -8.79148662e-01 ... 9.85981524e-02 -2.04357505e+00 -4.72646093e+00]] [[-1.61562777e+00 -7.75788724e-01 -3.88235450e+00 ... -4.31192923e+00 -4.60670853e+00 -3.75494623e+00] [-1.54242551e+00 1.63584039e-01 -3.26912344e-01 ... 2.24815771e-01 2.36274123e+00 -2.11154556e+00] [ 4.19553661e+00 -4.31359577e+00 -2.01907063e+00 ... 2.05011559e+00 -5.23190641e+00 9.98958111e-01] ... [ 5.76403856e+00 5.93156457e-01 6.16795957e-01 ... 3.77411985e+00 2.49232459e+00 -2.40817928e+00] [-1.02516592e+00 4.64580297e+00 4.42166948e+00 ... 4.30630259e-02 -1.23056686e+00 -5.21002817e+00] [-6.83182299e-01 4.53948267e-02 -2.96827197e+00 ... -2.00843573e+00 3.42386937e+00 5.21796569e-02]] [[ 1.16444472e-02 1.56265867e+00 -1.03242886e+00 ... 1.53235078e+00 2.55344987e+00 4.75580835e+00] [-3.62192798e+00 2.96790361e-01 3.39518964e-01 ... -1.04408395e+00 2.42120489e-01 -4.57959461e+00] [-4.86246824e+00 -6.87474966e+00 -3.88277364e+00 ... -1.28833008e+00 2.87725902e+00 1.10027683e+00] ... [-4.75307608e+00 -9.38485909e+00 -5.98249435e+00 ... -2.07729912e+00 3.90693474e+00 -2.37594202e-01] [ 1.90545571e+00 -3.61923027e+00 -5.47040367e+00 ... 1.22740173e+00 3.67633700e+00 -3.86696672e+00] [-1.13626532e-01 9.36969876e-01 1.29314768e+00 ... 5.55889416e+00 3.25348526e-01 -4.91440153e+00]]] [[[ 3.00961590e+00 2.21345973e+00 -1.50741160e+00 ... -3.77704310e+00 8.53730392e+00 2.53473043e+00] [-2.26199603e+00 4.09674931e+00 -8.73171449e-01 ... 1.20765364e+00 4.88501835e+00 2.76085925e+00] [ 1.66482046e-01 -4.28656006e+00 3.86819386e+00 ... 1.83370197e+00 3.71719837e+00 -1.79414952e+00] ... [-1.82478905e+00 -3.51150179e+00 2.27542257e+00 ... -1.07861118e+01 4.87246990e+00 2.58422923e+00] [-5.99753809e+00 -7.43519545e+00 2.41903567e+00 ... -6.78501725e-01 3.20198679e+00 -5.57289600e+00] [ 1.34432226e-01 2.02295494e+00 6.27821732e+00 ... 7.39118528e+00 3.13688397e+00 -6.10901403e+00]] [[ 2.27913737e+00 5.20816755e+00 -4.17679429e-01 ... -1.56631148e+00 -6.01040423e-01 -9.33562660e+00] [-5.31238556e+00 1.82375026e+00 -5.41720724e+00 ... -1.61135328e+00 -6.81093597e+00 1.11055386e+00] [ 7.14435577e+00 -4.48171091e+00 -3.54894996e+00 ... -4.35529089e+00 1.55636191e+00 3.09097123e+00] ... [-2.93365598e+00 5.30490065e+00 -7.80087173e-01 ... -1.03510749e+00 1.15930867e+00 -7.55549490e-01] [ 3.80639744e+00 -5.46347536e-02 -1.39195263e+00 ... 5.19270897e-01 -2.49292850e+00 -3.49334550e+00] [ 2.93870401e+00 1.88692129e+00 -2.10974321e-01 ... -9.73166466e+00 1.07906656e+01 -5.06062222e+00]] [[-3.27336311e+00 -3.96859622e+00 2.33459401e+00 ... 4.63384581e+00 1.24546325e+00 3.50573993e+00] [-2.91351342e+00 -3.66915131e+00 1.08461761e+01 ... 2.32610512e+00 6.29408789e+00 3.83761883e+00] [-2.06672144e+00 -1.55321169e+00 -2.63541198e+00 ... -2.59315133e-01 3.83265281e+00 -1.96593428e+00] ... [ 1.56308758e+00 -7.21970272e+00 2.71888399e+00 ... 2.18335962e+00 3.78402734e+00 7.44843960e+00] [-1.87296081e+00 -3.97289920e+00 1.93742943e+00 ... 3.01514983e+00 5.34534740e+00 -3.81190419e+00] [-6.30588007e+00 4.51942301e+00 4.61394644e+00 ... 8.97278130e-01 -8.76835918e+00 -3.36586285e+00]] ... [[ 6.95253515e+00 -4.62796640e+00 -2.46371078e+00 ... -3.99421668e+00 -3.69643188e+00 -7.22711515e+00] [-4.86481333e+00 4.13960791e+00 -3.33884788e+00 ... 2.51598644e+00 -2.05772305e+00 8.15538120e+00] [-4.20732403e+00 -2.32832766e+00 -9.94141579e+00 ... -3.50914526e+00 6.26056671e-01 -3.26979685e+00] ... [-3.71225786e+00 -1.50934616e-02 1.42495409e-01 ... 8.24200726e+00 3.04503536e+00 -1.46763778e+00] [-5.95388055e-01 2.93675470e+00 6.60624146e-01 ... -5.65203094e+00 -6.92716062e-01 -6.77405548e+00] [-2.73220062e-01 4.49119520e+00 -8.24305439e+00 ... -4.72592783e+00 -2.42331195e+00 -3.21998549e+00]] [[-3.47707748e+00 3.91703010e+00 6.14462423e+00 ... -1.89847633e-01 2.26170883e-01 -8.61070275e-01] [-3.88393307e+00 5.62134838e+00 2.65740657e+00 ... 3.97399378e+00 -6.13190222e+00 1.29800379e+00] [ 1.98771310e+00 -5.03778791e+00 -3.47038412e+00 ... -5.98538399e+00 -9.05268848e-01 -4.58226967e+00] ... [ 9.46564913e-01 -3.08799243e+00 -2.09839678e+00 ... 3.57929325e+00 -1.23617685e+00 8.11593711e-01] [ 6.97797477e-01 -6.92066574e+00 -1.10277331e+00 ... -3.83982825e+00 9.05490303e+00 -2.28693649e-01] [ 7.94936228e+00 -1.92628253e+00 6.66655540e-01 ... 1.37621469e+01 -6.11874938e-01 -5.22454441e-01]] [[-1.14719659e-01 -6.72778416e+00 -4.89291477e+00 ... -3.19188643e+00 -1.55889082e+00 -1.64202178e+00] [-4.86046743e+00 6.12077904e+00 7.92676640e+00 ... -6.61405468e+00 -8.23181152e-01 -2.02434331e-01] [ 4.75339127e+00 -1.96751201e+00 -2.09410172e-02 ... -3.42891240e+00 -2.55159378e+00 6.05365753e+00] ... [ 7.92681599e+00 1.04469550e+00 -6.25379467e+00 ... 7.96964598e+00 -2.15158463e+00 -4.39309120e+00] [ 5.33647299e+00 1.57711518e+00 1.12991781e+01 ... -3.19411469e+00 -5.94268131e+00 6.03164256e-01] [-1.11721778e+00 2.73508525e+00 -4.44520187e+00 ... -3.41355085e+00 -2.99985886e+00 7.15781546e+00]]] [[[ 4.57392550e+00 2.66264009e+00 -1.00992215e+00 ... -9.77937281e-01 2.45211840e+00 4.03044969e-01] [-1.89391482e+00 -1.84700048e+00 7.74736643e+00 ... 7.35151768e-01 -1.28761733e+00 1.40446639e+00] [-5.00081480e-01 -4.65599871e+00 2.85759807e+00 ... 1.36855435e+00 -2.10974908e+00 -2.18933249e+00] ... [-4.57592773e+00 3.47636223e-01 1.35468638e+00 ... -5.33612204e+00 -4.54208231e+00 7.41901016e+00] [ 6.08456755e+00 4.07536221e+00 -3.87062764e+00 ... 7.90672779e+00 -1.18402171e+00 6.40285254e-01] [ 1.64943802e+00 8.32822263e-01 -8.39090252e+00 ... -5.63814878e-01 -4.31274366e+00 7.11385787e-01]] [[ 1.02132356e+00 -1.91526413e+00 7.35138953e-01 ... 3.88825274e+00 -3.98639870e+00 -7.49271452e-01] [-1.69269216e+00 -7.49995422e+00 -1.04174447e+00 ... 1.89459932e+00 9.78893399e-01 6.21266794e+00] [ 1.31011999e+00 4.10205984e+00 5.04289341e+00 ... -2.03171325e+00 -2.63448501e+00 -5.13419330e-01] ... [ 3.06666076e-01 4.22581911e+00 -8.31391239e+00 ... 2.59445333e+00 -2.53819764e-01 -6.69916391e+00] [ 1.27341914e+00 -1.85064912e+00 6.73824835e+00 ... -1.29978466e+00 9.52673733e-01 2.90407896e+00] [-7.85541010e+00 -3.30116063e-01 4.45760059e+00 ... -1.95608783e+00 -1.29806972e+00 2.43894786e-01]] [[ 1.32327318e+00 4.98881006e+00 4.59129763e+00 ... -3.11156559e+00 -3.70762348e+00 4.19808531e+00] [ 2.37688518e+00 3.28680611e+00 -3.29784060e+00 ... -7.04792452e+00 -7.85338640e-01 -7.15524101e+00] [-1.26431203e+00 2.33792797e-01 4.39984053e-01 ... -7.58519471e-01 -6.56409931e+00 2.15577817e+00] ... [ 2.23871493e+00 1.45017195e+00 6.66275978e+00 ... 6.38971552e-02 3.25010270e-01 7.43605900e+00] [-6.11123180e+00 -2.94611764e+00 -7.10522652e+00 ... -1.96913683e+00 -6.46860361e+00 5.01529634e-01] [ 6.62107372e+00 -3.27243257e+00 -8.68569687e-02 ... 2.61693704e-03 -2.56029749e+00 1.84343576e+00]] ... [[-5.92751551e+00 9.35212421e+00 1.00065732e+00 ... 6.17986321e-01 2.97144949e-01 4.17056179e+00] [-4.02485514e+00 -1.58151591e+00 3.85228610e+00 ... -1.60968864e+00 -2.34675026e+00 -2.06572986e+00] [-1.43829167e-01 -1.27952206e+00 4.20740271e+00 ... 9.44763124e-02 5.17420530e-01 2.86935019e+00] ... [ 4.22613573e+00 2.41991425e+00 1.40180039e+00 ... -2.32964683e+00 1.72400749e+00 3.73182476e-01] [ 3.13473153e+00 7.29760695e+00 -3.38865966e-01 ... 1.29372180e+00 -9.66060448e+00 -6.14343405e+00] [-3.46438074e+00 -6.75775826e-01 9.36567783e-01 ... 3.90508699e+00 8.40666008e+00 5.20488930e+00]] [[ 2.21996236e+00 -6.30964136e+00 -9.91333783e-01 ... 3.25353622e+00 -1.78067768e+00 -5.18746901e+00] [ 5.50188160e+00 -1.67261183e+00 -4.02809286e+00 ... -1.69387650e+00 7.47423267e+00 4.75990534e+00] [-4.47993374e+00 -3.67095613e+00 1.88533127e+00 ... -1.00950980e+00 3.95668459e+00 -1.80894196e+00] ... [-2.70064449e+00 -1.13561368e+00 -1.84261882e+00 ... 6.78105116e-01 -5.80379343e+00 2.23687434e+00] [-5.36299050e-01 -2.52494812e+00 -3.07629299e+00 ... 6.13491386e-02 6.55780888e+00 -3.76968002e+00] [-3.64271313e-01 -6.14241242e-01 1.93876970e+00 ... -3.07678103e+00 -1.00676718e+01 1.09898448e+00]] [[ 1.93599677e+00 3.35235882e+00 -5.92138958e+00 ... -3.61941123e+00 1.69449401e+00 -2.59163260e+00] [ 1.44684505e+00 -1.32119548e+00 8.86606514e-01 ... 2.10632253e+00 -5.40233517e+00 1.92439103e+00] [ 9.03930473e+00 -3.02684259e+00 -8.98950636e-01 ... 3.90492082e+00 1.52917075e+00 3.74052739e+00] ... [ 5.41106653e+00 5.25719643e+00 -3.38516593e+00 ... 7.99593925e-01 8.72795761e-01 -2.26735163e+00] [-1.52755940e+00 3.74861300e-01 1.05944180e+00 ... -9.55148518e-01 -4.21376657e+00 9.20709991e+00] [-1.16672361e+00 1.69476855e+00 -7.51273298e+00 ... -1.17075109e+00 1.14875448e+00 -3.29681563e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_986.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.1529 (2,1,1,.,.) = 2.0699 (3,1,1,.,.) = 1.4487 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-6.18519336e-02 1.61838472e-01 3.12679231e-01 ... -2.50553519e-01 1.31523190e-02 -9.75768790e-02] [-1.82064295e-01 -2.33472139e-02 -1.65019363e-01 ... -2.65716523e-01 -3.10317099e-01 1.76851273e-01] [ 1.37717143e-01 -1.80558816e-01 2.55637884e-01 ... 1.04160018e-01 1.86985657e-01 5.54093495e-02] ... [-7.14671761e-02 4.41952683e-02 -1.68744788e-01 ... 6.76853284e-02 -6.24481179e-02 8.69343653e-02] [-3.56829204e-02 -3.47304314e-01 1.51678383e-01 ... -2.50196643e-02 1.27061140e-02 3.92903328e-01] [-7.26869777e-02 1.41220719e-01 -1.60073936e-01 ... -1.45577773e-01 1.31791206e-02 -1.50698602e-01]] [[ 1.80176377e-01 1.42090738e-01 1.63762361e-01 ... -1.48864612e-01 -1.19417883e-01 -3.93297104e-03] [ 3.93695831e-02 -9.39824730e-02 2.23538369e-01 ... 1.22006677e-01 -8.93553346e-02 -6.05002530e-02] [ 1.80663824e-01 2.84826398e-01 -3.01723890e-02 ... 1.13638472e-02 -1.27647862e-01 1.70026422e-01] ... [ 1.28403664e-01 7.69242272e-03 -1.97441913e-02 ... 1.47712722e-01 2.30660245e-01 1.97433844e-01] [ 2.74070296e-02 2.08771080e-01 2.33011600e-02 ... -3.03914875e-01 -2.92183191e-01 -2.54279763e-01] [ 4.82237386e-03 5.42260781e-02 3.42670023e-01 ... 6.73642009e-02 -7.47509375e-02 1.42634466e-01]] [[-2.28722002e-02 1.53374448e-01 4.51005958e-02 ... 4.90858629e-02 6.08953089e-02 3.14198375e-01] [ 2.96536028e-01 1.89135835e-01 -8.82858858e-02 ... 7.34729692e-02 4.43438925e-02 1.85644533e-02] [-1.00322105e-01 -3.04112863e-02 -9.39675197e-02 ... -1.10379502e-01 -2.87121952e-01 -1.43706441e-01] ... [-6.50509223e-02 1.99788123e-01 -7.29240030e-02 ... -5.77922165e-02 -2.08063006e-01 2.05269381e-01] [-6.73490465e-02 -4.38844636e-02 5.51019013e-02 ... 8.00717548e-02 8.25502723e-02 -3.54276150e-02] [-3.13636303e-01 -2.49693751e-01 -2.75984734e-01 ... -1.78486183e-01 -2.46853247e-01 6.10775240e-02]] ... [[-4.49512005e-02 -2.14539766e-01 1.56403124e-01 ... -1.27208635e-01 -1.94297999e-01 2.58548349e-01] [-5.36916107e-02 7.95768052e-02 -1.85485989e-01 ... -1.76590998e-02 -8.16032514e-02 2.62273848e-02] [-1.77594572e-02 -8.38304684e-02 1.14006668e-01 ... -1.54922068e-01 -1.36463448e-01 1.66268140e-01] ... [-1.22748069e-01 -6.06300123e-02 -1.15566805e-01 ... -4.50495118e-03 -4.92334254e-02 3.31937015e-01] [ 4.72498566e-01 4.80632149e-02 1.99463606e-01 ... -6.87522367e-02 -1.56910747e-01 -1.12029217e-01] [ 3.36323269e-02 2.79945016e-01 1.19863629e-01 ... -7.80142993e-02 1.16717204e-01 2.08406776e-01]] [[ 1.09842315e-01 1.52897090e-02 1.09674931e-01 ... -1.38675854e-01 -5.39015643e-02 1.47087410e-01] [-6.56546280e-02 -1.21841570e-02 -5.47040142e-02 ... -1.80325340e-02 7.69044459e-02 2.01804742e-01] [ 6.14961460e-02 -2.33989656e-01 8.78947750e-02 ... 4.10904037e-03 2.42971912e-01 1.85357146e-02] ... [-3.76008421e-01 1.68993086e-01 -6.81992387e-03 ... -5.18768728e-02 -2.83384770e-01 -1.89898014e-01] [-3.59231383e-02 -4.06326689e-02 1.72722995e-01 ... -3.00510079e-01 -3.79183404e-02 -2.72772104e-01] [-3.08157563e-01 -8.20635855e-02 -1.75450861e-01 ... 7.97304884e-02 1.09242767e-01 -1.59551427e-01]] [[ 2.08769795e-02 -8.35694522e-02 1.20097265e-01 ... 2.52574503e-01 -3.80569808e-02 -2.55726516e-01] [-1.31397843e-01 -3.43186229e-01 5.39632849e-02 ... -5.75963110e-02 4.62458320e-02 3.50547165e-01] [ 2.46594667e-01 -1.51310349e-02 2.53719557e-02 ... -9.38883051e-02 -7.88257867e-02 1.64847989e-02] ... [ 1.76693112e-01 -6.13602214e-02 -1.14578009e-01 ... -1.92117095e-01 -1.36164308e-01 -2.53195882e-01] [-3.76194343e-02 -1.68058008e-01 7.17897862e-02 ... -1.06944717e-01 -1.05633639e-01 -4.81460951e-02] [ 3.57398242e-02 -6.23432733e-02 -1.06764071e-01 ... -9.50737894e-02 -9.92439911e-02 -3.69100948e-03]]] [[[-9.07269895e-01 -9.52898681e-01 2.75797319e+00 ... -5.63908041e-01 -2.71735936e-01 2.93481201e-02] [ 1.91243982e+00 2.61402273e+00 1.78635383e+00 ... 1.25846398e+00 3.78429852e-02 5.85965812e-01] [-4.95193928e-01 4.02640772e+00 -3.99272203e-01 ... -5.51843286e-01 6.98814332e-01 -2.36728001e+00] ... [ 2.10500264e+00 -4.18844044e-01 -1.47193611e+00 ... -4.25752878e+00 -3.28349590e-01 -4.52027261e-01] [-9.11629319e-01 2.66980863e+00 1.37543321e+00 ... -2.34140420e+00 1.80099475e+00 -5.19792438e-01] [-9.41343188e-01 1.71637487e+00 2.64157987e+00 ... -3.32764053e+00 -2.38706374e+00 3.86810231e+00]] [[-3.02640438e-01 1.83632648e+00 -5.90885460e-01 ... -5.79057097e-01 -1.90306985e+00 6.51784956e-01] [ 1.79395676e+00 -1.52340412e+00 -1.96277475e+00 ... -4.39296156e-01 -1.82733393e+00 -1.23352483e-01] [ 1.79639697e+00 7.74812102e-01 1.97694528e+00 ... -3.79373956e+00 2.02689409e+00 2.16736484e+00] ... [-1.25129493e-02 -4.06851351e-01 3.53544378e+00 ... 3.28954726e-01 2.29052305e+00 -7.33048439e-01] [ 4.03027952e-01 -1.35620165e+00 -1.89542079e+00 ... 1.29344964e+00 6.13213921e+00 3.81194949e+00] [-5.30276820e-02 -1.54383853e-01 8.48599315e-01 ... 1.43788218e+00 1.48676479e+00 1.33071864e+00]] [[-4.68750387e-01 5.05560517e-01 1.83492064e+00 ... 9.02925849e-01 1.40859151e+00 -5.27948713e+00] [-4.18690234e-01 -4.49845457e+00 -1.61355484e+00 ... -2.24604297e+00 -1.15589857e+00 5.51522732e-01] [ 1.05637813e+00 1.48213177e-03 -1.58400580e-01 ... -1.08015740e+00 -1.40925980e+00 1.92155266e+00] ... [ 5.64051270e-01 4.15994692e+00 -2.40302920e+00 ... 4.52800602e-01 -1.28752160e+00 2.37518001e+00] [ 1.60283244e+00 1.57013938e-01 5.12206674e-01 ... 4.45020533e+00 3.45724016e-01 1.26537752e+00] [ 2.28001857e+00 -2.94062924e+00 -2.17173266e+00 ... 1.80573463e+00 1.95560053e-01 -2.03936458e+00]] ... [[-1.46637225e+00 -1.19588208e+00 1.17833507e+00 ... -9.89469051e-01 7.13022470e-01 -8.76952708e-02] [ 2.57501984e+00 -5.96307993e-01 1.06302893e+00 ... 3.06912780e+00 -3.00670385e+00 -2.40704584e+00] [ 1.00469387e+00 -3.20786142e+00 -1.01477265e+00 ... 1.48851287e+00 1.16726303e+00 1.04197168e+00] ... [-1.70106784e-01 2.04170609e+00 -1.59949660e+00 ... 3.12093520e+00 -6.63596764e-02 -7.77902901e-01] [ 1.90190828e+00 2.83443856e+00 1.96053421e+00 ... -1.19210947e+00 -1.42493546e-01 -2.39711910e-01] [ 4.84038925e+00 1.68887544e+00 -1.02589071e+00 ... -1.93456268e+00 2.34052515e+00 2.08238840e+00]] [[ 5.40410221e-01 6.37335062e-01 -2.47583151e+00 ... 3.96857214e+00 8.84708226e-01 4.38724136e+00] [ 7.14520454e-01 7.13184834e-01 -2.87519360e+00 ... 4.75830507e+00 -2.59801466e-02 2.15148950e+00] [ 1.33679330e+00 4.12136048e-01 -2.56983781e+00 ... 3.56853493e-02 1.98373914e-01 -6.82820261e-01] ... [ 7.91743159e-01 -1.34581244e+00 1.05377495e+00 ... -3.63247228e+00 1.32406175e+00 -3.90648127e-01] [-1.39385641e+00 -2.54621601e+00 -9.48747456e-01 ... 4.02127624e-01 2.08056521e+00 -2.00113702e+00] [ 1.51081991e+00 -6.00138307e-01 -3.37015420e-01 ... 1.92069516e-01 -3.50537992e+00 3.47573328e+00]] [[ 1.89852905e+00 -2.88076818e-01 3.60722327e+00 ... -1.32330418e+00 -2.57970071e+00 3.80901480e+00] [-1.41711915e+00 -1.60956717e+00 3.32607776e-01 ... 2.51661587e+00 2.21712232e+00 -3.29067969e+00] [ 1.38744938e+00 2.47231913e+00 3.79920983e+00 ... 2.61085518e-02 -5.84625769e+00 2.17052650e+00] ... [-8.75481606e-01 4.10633659e+00 1.60404706e+00 ... 2.28512740e+00 4.74576205e-01 1.77415824e+00] [-4.14533854e+00 1.75182855e+00 1.23338223e+00 ... 2.30356321e-01 3.98996496e+00 -1.72647488e+00] [ 2.39665532e+00 3.43054205e-01 -4.37647216e-02 ... -2.19408774e+00 4.50605124e-01 1.85021996e+00]]] [[[ 1.16870987e+00 3.40583014e+00 8.04269493e-01 ... -1.60412669e+00 -8.75780523e-01 -1.24036098e+00] [-3.14749420e-01 -9.45726871e-01 6.39006555e-01 ... 6.91185668e-02 1.73918927e+00 4.68681008e-01] [ 1.48320031e+00 2.40583920e+00 3.00984681e-01 ... 1.32684326e+00 2.86589527e+00 1.98611188e+00] ... [-1.16325605e+00 -1.60108590e+00 2.64921993e-01 ... 1.39526832e+00 4.92039114e-01 -2.83648896e+00] [ 3.85655355e+00 1.57238615e+00 9.11441445e-01 ... 7.88091063e-01 1.78517830e+00 2.08725438e-01] [-5.89600503e-01 -9.96123612e-01 1.27030301e+00 ... -4.49280649e-01 5.29044330e-01 -3.76113504e-02]] [[ 3.04797935e+00 -9.91641939e-01 2.27968264e-02 ... -3.34774637e+00 2.45651913e+00 -6.84477448e-01] [ 1.96630657e+00 1.39137936e+00 -2.04957891e+00 ... -9.60124612e-01 6.99127078e-01 -3.15027499e+00] [-5.07944040e-02 4.29120868e-01 3.89550614e+00 ... -1.42975819e+00 -5.32569170e-01 1.32297683e+00] ... [ 9.42437887e-01 -3.32102954e-01 -5.57276607e-01 ... 1.15771198e+00 2.44812298e+00 -2.19502592e+00] [ 3.05003822e-01 -2.01130688e-01 -2.38541174e+00 ... -1.15739262e+00 -8.69680285e-01 -1.60388970e+00] [ 2.00153012e-02 2.65375972e-01 1.31893530e-01 ... 6.29941106e-01 1.54091024e+00 -6.18142307e-01]] [[ 1.94606614e+00 -1.07240677e+00 1.65232635e+00 ... 9.22588646e-01 2.17719340e+00 6.53001070e-01] [ 4.17946279e-01 1.48224699e+00 -1.90311134e-01 ... -1.35242510e+00 -5.32784224e-01 4.73097831e-01] [-1.94154465e+00 -1.33821845e-01 -2.18874741e+00 ... -6.06279671e-01 -5.56524098e-01 2.63567209e-01] ... [-1.44223547e+00 1.31093895e+00 -9.17500138e-01 ... -1.31775153e+00 1.66677368e+00 7.62124240e-01] [-7.81101942e-01 -1.43084168e+00 -2.83548534e-01 ... 1.06207967e+00 -1.33706665e+00 7.72636592e-01] [ 1.45585907e+00 -2.54012465e+00 2.07018232e+00 ... 2.56981760e-01 -8.30758959e-02 -5.19599736e-01]] ... [[-6.13804936e-01 -2.54366374e+00 1.28285766e+00 ... -1.55369234e+00 -6.30146325e-01 9.30070877e-01] [-1.20622325e+00 -2.18813276e+00 -1.03918946e+00 ... 1.38071811e+00 -7.41944849e-01 -7.65542090e-01] [ 1.33526969e+00 -1.29693615e+00 3.56801540e-01 ... 2.47132063e+00 2.21312687e-01 3.58202130e-01] ... [ 1.83387071e-01 5.05384207e-01 2.00446868e+00 ... 4.51411128e-01 1.09530783e+00 -3.14180583e-01] [ 2.12914214e-01 -7.85295904e-01 1.52931643e+00 ... 3.16298604e+00 7.76353896e-01 9.30179656e-01] [-5.99687278e-01 3.82679589e-02 -2.61184740e+00 ... -1.32926345e+00 1.90674090e+00 1.02894045e-01]] [[-1.83095491e+00 1.46103776e+00 1.51557207e+00 ... -9.64818120e-01 2.83755898e+00 -8.48878443e-01] [-1.29797053e+00 9.86720562e-01 1.07130098e+00 ... 1.92425847e+00 1.80189550e+00 1.50422847e+00] [ 2.76489649e-02 -1.86862206e+00 -3.61413509e-01 ... -1.21638298e+00 2.23883724e+00 -7.52986550e-01] ... [ 1.37026799e+00 8.13131750e-01 8.91166270e-01 ... -1.38362956e+00 1.08419490e+00 -1.90813267e+00] [ 8.41932595e-01 -1.17473733e+00 9.73388314e-01 ... 1.71281433e+00 -2.90795594e-01 -2.83142120e-01] [ 3.94660980e-01 4.09813023e+00 -1.44199336e+00 ... -1.47483063e+00 -8.83825645e-02 1.97121954e+00]] [[ 2.83877730e-01 -3.19662690e+00 -3.36542654e+00 ... -3.19920778e-01 1.09645712e+00 1.45503271e+00] [-3.55951881e+00 6.03249669e-01 6.83748245e-01 ... -1.11152864e+00 7.39841759e-01 -1.07433856e+00] [ 1.73135114e+00 -3.73961329e-01 2.56025147e+00 ... -9.37453136e-02 3.03060031e+00 2.32366085e-01] ... [-2.19148731e+00 -1.86013138e+00 -1.24618256e+00 ... -9.02773559e-01 1.23915064e+00 3.48628998e-01] [ 8.75941992e-01 1.87379837e+00 -1.39493310e+00 ... -7.03480124e-01 5.04741728e-01 -1.16867304e-01] [-4.56038475e-01 -6.54871345e-01 8.21402729e-01 ... 1.21725500e+00 6.56388044e-01 -1.15157723e+00]]]]]; ov_res: [[[[[-6.18519336e-02 1.61838472e-01 3.12679231e-01 ... -2.50553519e-01 1.31523190e-02 -9.75768790e-02] [-1.82064295e-01 -2.33472139e-02 -1.65019363e-01 ... -2.65716523e-01 -3.10317099e-01 1.76851273e-01] [ 1.37717143e-01 -1.80558816e-01 2.55637884e-01 ... 1.04160018e-01 1.86985657e-01 5.54093495e-02] ... [-7.14671761e-02 4.41952683e-02 -1.68744788e-01 ... 6.76853284e-02 -6.24481179e-02 8.69343653e-02] [-3.56829204e-02 -3.47304314e-01 1.51678383e-01 ... -2.50196643e-02 1.27061140e-02 3.92903328e-01] [-7.26869777e-02 1.41220719e-01 -1.60073936e-01 ... -1.45577773e-01 1.31791206e-02 -1.50698602e-01]] [[ 1.80176377e-01 1.42090738e-01 1.63762361e-01 ... -1.48864612e-01 -1.19417883e-01 -3.93297104e-03] [ 3.93695831e-02 -9.39824730e-02 2.23538369e-01 ... 1.22006677e-01 -8.93553346e-02 -6.05002530e-02] [ 1.80663824e-01 2.84826398e-01 -3.01723890e-02 ... 1.13638472e-02 -1.27647862e-01 1.70026422e-01] ... [ 1.28403664e-01 7.69242272e-03 -1.97441913e-02 ... 1.47712722e-01 2.30660245e-01 1.97433844e-01] [ 2.74070296e-02 2.08771080e-01 2.33011600e-02 ... -3.03914875e-01 -2.92183191e-01 -2.54279763e-01] [ 4.82237386e-03 5.42260781e-02 3.42670023e-01 ... 6.73642009e-02 -7.47509375e-02 1.42634466e-01]] [[-2.28722002e-02 1.53374448e-01 4.51005958e-02 ... 4.90858629e-02 6.08953089e-02 3.14198375e-01] [ 2.96536028e-01 1.89135835e-01 -8.82858858e-02 ... 7.34729692e-02 4.43438925e-02 1.85644533e-02] [-1.00322105e-01 -3.04112863e-02 -9.39675197e-02 ... -1.10379502e-01 -2.87121952e-01 -1.43706441e-01] ... [-6.50509223e-02 1.99788123e-01 -7.29240030e-02 ... -5.77922165e-02 -2.08063006e-01 2.05269381e-01] [-6.73490465e-02 -4.38844636e-02 5.51019013e-02 ... 8.00717548e-02 8.25502723e-02 -3.54276150e-02] [-3.13636303e-01 -2.49693751e-01 -2.75984734e-01 ... -1.78486183e-01 -2.46853247e-01 6.10775240e-02]] ... [[-4.49512005e-02 -2.14539766e-01 1.56403124e-01 ... -1.27208635e-01 -1.94297999e-01 2.58548349e-01] [-5.36916107e-02 7.95768052e-02 -1.85485989e-01 ... -1.76590998e-02 -8.16032514e-02 2.62273848e-02] [-1.77594572e-02 -8.38304684e-02 1.14006668e-01 ... -1.54922068e-01 -1.36463448e-01 1.66268140e-01] ... [-1.22748069e-01 -6.06300123e-02 -1.15566805e-01 ... -4.50495118e-03 -4.92334254e-02 3.31937015e-01] [ 4.72498566e-01 4.80632149e-02 1.99463606e-01 ... -6.87522367e-02 -1.56910747e-01 -1.12029217e-01] [ 3.36323269e-02 2.79945016e-01 1.19863629e-01 ... -7.80142993e-02 1.16717204e-01 2.08406776e-01]] [[ 1.09842315e-01 1.52897090e-02 1.09674931e-01 ... -1.38675854e-01 -5.39015643e-02 1.47087410e-01] [-6.56546280e-02 -1.21841570e-02 -5.47040142e-02 ... -1.80325340e-02 7.69044459e-02 2.01804742e-01] [ 6.14961460e-02 -2.33989656e-01 8.78947750e-02 ... 4.10904037e-03 2.42971912e-01 1.85357146e-02] ... [-3.76008421e-01 1.68993086e-01 -6.81992387e-03 ... -5.18768728e-02 -2.83384770e-01 -1.89898014e-01] [-3.59231383e-02 -4.06326689e-02 1.72722995e-01 ... -3.00510079e-01 -3.79183404e-02 -2.72772104e-01] [-3.08157563e-01 -8.20635855e-02 -1.75450861e-01 ... 7.97304884e-02 1.09242767e-01 -1.59551427e-01]] [[ 2.08769795e-02 -8.35694522e-02 1.20097265e-01 ... 2.52574503e-01 -3.80569808e-02 -2.55726516e-01] [-1.31397843e-01 -3.43186229e-01 5.39632849e-02 ... -5.75963110e-02 4.62458320e-02 3.50547165e-01] [ 2.46594667e-01 -1.51310349e-02 2.53719557e-02 ... -9.38883051e-02 -7.88257867e-02 1.64847989e-02] ... [ 1.76693112e-01 -6.13602214e-02 -1.14578009e-01 ... -1.92117095e-01 -1.36164308e-01 -2.53195882e-01] [-3.76194343e-02 -1.68058008e-01 7.17897862e-02 ... -1.06944717e-01 -1.05633639e-01 -4.81460951e-02] [ 3.57398242e-02 -6.23432733e-02 -1.06764071e-01 ... -9.50737894e-02 -9.92439911e-02 -3.69100948e-03]]] [[[-9.07269895e-01 -9.52898681e-01 2.75797319e+00 ... -5.63908041e-01 -2.71735936e-01 2.93481201e-02] [ 1.91243982e+00 2.61402273e+00 1.78635383e+00 ... 1.25846398e+00 3.78429852e-02 5.85965812e-01] [-4.95193928e-01 4.02640772e+00 -3.99272203e-01 ... -5.51843286e-01 6.98814332e-01 -2.36728001e+00] ... [ 2.10500264e+00 -4.18844044e-01 -1.47193611e+00 ... -4.25752878e+00 -3.28349590e-01 -4.52027261e-01] [-9.11629319e-01 2.66980863e+00 1.37543321e+00 ... -2.34140420e+00 1.80099475e+00 -5.19792438e-01] [-9.41343188e-01 1.71637487e+00 2.64157987e+00 ... -3.32764053e+00 -2.38706374e+00 3.86810231e+00]] [[-3.02640438e-01 1.83632648e+00 -5.90885460e-01 ... -5.79057097e-01 -1.90306985e+00 6.51784956e-01] [ 1.79395676e+00 -1.52340412e+00 -1.96277475e+00 ... -4.39296156e-01 -1.82733393e+00 -1.23352483e-01] [ 1.79639697e+00 7.74812102e-01 1.97694528e+00 ... -3.79373956e+00 2.02689409e+00 2.16736484e+00] ... [-1.25129493e-02 -4.06851351e-01 3.53544378e+00 ... 3.28954726e-01 2.29052305e+00 -7.33048439e-01] [ 4.03027952e-01 -1.35620165e+00 -1.89542079e+00 ... 1.29344964e+00 6.13213921e+00 3.81194949e+00] [-5.30276820e-02 -1.54383853e-01 8.48599315e-01 ... 1.43788218e+00 1.48676479e+00 1.33071864e+00]] [[-4.68750387e-01 5.05560517e-01 1.83492064e+00 ... 9.02925849e-01 1.40859151e+00 -5.27948713e+00] [-4.18690234e-01 -4.49845457e+00 -1.61355484e+00 ... -2.24604297e+00 -1.15589857e+00 5.51522732e-01] [ 1.05637813e+00 1.48213177e-03 -1.58400580e-01 ... -1.08015740e+00 -1.40925980e+00 1.92155266e+00] ... [ 5.64051270e-01 4.15994692e+00 -2.40302920e+00 ... 4.52800602e-01 -1.28752160e+00 2.37518001e+00] [ 1.60283244e+00 1.57013938e-01 5.12206674e-01 ... 4.45020533e+00 3.45724016e-01 1.26537752e+00] [ 2.28001857e+00 -2.94062924e+00 -2.17173266e+00 ... 1.80573463e+00 1.95560053e-01 -2.03936458e+00]] ... [[-1.46637225e+00 -1.19588208e+00 1.17833507e+00 ... -9.89469051e-01 7.13022470e-01 -8.76952708e-02] [ 2.57501984e+00 -5.96307993e-01 1.06302893e+00 ... 3.06912780e+00 -3.00670385e+00 -2.40704584e+00] [ 1.00469387e+00 -3.20786142e+00 -1.01477265e+00 ... 1.48851287e+00 1.16726303e+00 1.04197168e+00] ... [-1.70106784e-01 2.04170609e+00 -1.59949660e+00 ... 3.12093520e+00 -6.63596764e-02 -7.77902901e-01] [ 1.90190828e+00 2.83443856e+00 1.96053421e+00 ... -1.19210947e+00 -1.42493546e-01 -2.39711910e-01] [ 4.84038925e+00 1.68887544e+00 -1.02589071e+00 ... -1.93456268e+00 2.34052515e+00 2.08238840e+00]] [[ 5.40410221e-01 6.37335062e-01 -2.47583151e+00 ... 3.96857214e+00 8.84708226e-01 4.38724136e+00] [ 7.14520454e-01 7.13184834e-01 -2.87519360e+00 ... 4.75830507e+00 -2.59801466e-02 2.15148950e+00] [ 1.33679330e+00 4.12136048e-01 -2.56983781e+00 ... 3.56853493e-02 1.98373914e-01 -6.82820261e-01] ... [ 7.91743159e-01 -1.34581244e+00 1.05377495e+00 ... -3.63247228e+00 1.32406175e+00 -3.90648127e-01] [-1.39385641e+00 -2.54621601e+00 -9.48747456e-01 ... 4.02127624e-01 2.08056521e+00 -2.00113702e+00] [ 1.51081991e+00 -6.00138307e-01 -3.37015420e-01 ... 1.92069516e-01 -3.50537992e+00 3.47573328e+00]] [[ 1.89852905e+00 -2.88076818e-01 3.60722327e+00 ... -1.32330418e+00 -2.57970071e+00 3.80901480e+00] [-1.41711915e+00 -1.60956717e+00 3.32607776e-01 ... 2.51661587e+00 2.21712232e+00 -3.29067969e+00] [ 1.38744938e+00 2.47231913e+00 3.79920983e+00 ... 2.61085518e-02 -5.84625769e+00 2.17052650e+00] ... [-8.75481606e-01 4.10633659e+00 1.60404706e+00 ... 2.28512740e+00 4.74576205e-01 1.77415824e+00] [-4.14533854e+00 1.75182855e+00 1.23338223e+00 ... 2.30356321e-01 3.98996496e+00 -1.72647488e+00] [ 2.39665532e+00 3.43054205e-01 -4.37647216e-02 ... -2.19408774e+00 4.50605124e-01 1.85021996e+00]]] [[[ 1.16870987e+00 3.40583014e+00 8.04269493e-01 ... -1.60412669e+00 -8.75780523e-01 -1.24036098e+00] [-3.14749420e-01 -9.45726871e-01 6.39006555e-01 ... 6.91185668e-02 1.73918927e+00 4.68681008e-01] [ 1.48320031e+00 2.40583920e+00 3.00984681e-01 ... 1.32684326e+00 2.86589527e+00 1.98611188e+00] ... [-1.16325605e+00 -1.60108590e+00 2.64921993e-01 ... 1.39526832e+00 4.92039114e-01 -2.83648896e+00] [ 3.85655355e+00 1.57238615e+00 9.11441445e-01 ... 7.88091063e-01 1.78517830e+00 2.08725438e-01] [-5.89600503e-01 -9.96123612e-01 1.27030301e+00 ... -4.49280649e-01 5.29044330e-01 -3.76113504e-02]] [[ 3.04797935e+00 -9.91641939e-01 2.27968264e-02 ... -3.34774637e+00 2.45651913e+00 -6.84477448e-01] [ 1.96630657e+00 1.39137936e+00 -2.04957891e+00 ... -9.60124612e-01 6.99127078e-01 -3.15027499e+00] [-5.07944040e-02 4.29120868e-01 3.89550614e+00 ... -1.42975819e+00 -5.32569170e-01 1.32297683e+00] ... [ 9.42437887e-01 -3.32102954e-01 -5.57276607e-01 ... 1.15771198e+00 2.44812298e+00 -2.19502592e+00] [ 3.05003822e-01 -2.01130688e-01 -2.38541174e+00 ... -1.15739262e+00 -8.69680285e-01 -1.60388970e+00] [ 2.00153012e-02 2.65375972e-01 1.31893530e-01 ... 6.29941106e-01 1.54091024e+00 -6.18142307e-01]] [[ 1.94606614e+00 -1.07240677e+00 1.65232635e+00 ... 9.22588646e-01 2.17719340e+00 6.53001070e-01] [ 4.17946279e-01 1.48224699e+00 -1.90311134e-01 ... -1.35242510e+00 -5.32784224e-01 4.73097831e-01] [-1.94154465e+00 -1.33821845e-01 -2.18874741e+00 ... -6.06279671e-01 -5.56524098e-01 2.63567209e-01] ... [-1.44223547e+00 1.31093895e+00 -9.17500138e-01 ... -1.31775153e+00 1.66677368e+00 7.62124240e-01] [-7.81101942e-01 -1.43084168e+00 -2.83548534e-01 ... 1.06207967e+00 -1.33706665e+00 7.72636592e-01] [ 1.45585907e+00 -2.54012465e+00 2.07018232e+00 ... 2.56981760e-01 -8.30758959e-02 -5.19599736e-01]] ... [[-6.13804936e-01 -2.54366374e+00 1.28285766e+00 ... -1.55369234e+00 -6.30146325e-01 9.30070877e-01] [-1.20622325e+00 -2.18813276e+00 -1.03918946e+00 ... 1.38071811e+00 -7.41944849e-01 -7.65542090e-01] [ 1.33526969e+00 -1.29693615e+00 3.56801540e-01 ... 2.47132063e+00 2.21312687e-01 3.58202130e-01] ... [ 1.83387071e-01 5.05384207e-01 2.00446868e+00 ... 4.51411128e-01 1.09530783e+00 -3.14180583e-01] [ 2.12914214e-01 -7.85295904e-01 1.52931643e+00 ... 3.16298604e+00 7.76353896e-01 9.30179656e-01] [-5.99687278e-01 3.82679589e-02 -2.61184740e+00 ... -1.32926345e+00 1.90674090e+00 1.02894045e-01]] [[-1.83095491e+00 1.46103776e+00 1.51557207e+00 ... -9.64818120e-01 2.83755898e+00 -8.48878443e-01] [-1.29797053e+00 9.86720562e-01 1.07130098e+00 ... 1.92425847e+00 1.80189550e+00 1.50422847e+00] [ 2.76489649e-02 -1.86862206e+00 -3.61413509e-01 ... -1.21638298e+00 2.23883724e+00 -7.52986550e-01] ... [ 1.37026799e+00 8.13131750e-01 8.91166270e-01 ... -1.38362956e+00 1.08419490e+00 -1.90813267e+00] [ 8.41932595e-01 -1.17473733e+00 9.73388314e-01 ... 1.71281433e+00 -2.90795594e-01 -2.83142120e-01] [ 3.94660980e-01 4.09813023e+00 -1.44199336e+00 ... -1.47483063e+00 -8.83825645e-02 1.97121954e+00]] [[ 2.83877730e-01 -3.19662690e+00 -3.36542654e+00 ... -3.19920778e-01 1.09645712e+00 1.45503271e+00] [-3.55951881e+00 6.03249669e-01 6.83748245e-01 ... -1.11152864e+00 7.39841759e-01 -1.07433856e+00] [ 1.73135114e+00 -3.73961329e-01 2.56025147e+00 ... -9.37453136e-02 3.03060031e+00 2.32366085e-01] ... [-2.19148731e+00 -1.86013138e+00 -1.24618256e+00 ... -9.02773559e-01 1.23915064e+00 3.48628998e-01] [ 8.75941992e-01 1.87379837e+00 -1.39493310e+00 ... -7.03480124e-01 5.04741728e-01 -1.16867304e-01] [-4.56038475e-01 -6.54871345e-01 8.21402729e-01 ... 1.21725500e+00 6.56388044e-01 -1.15157723e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_988.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.2918 (2,1,1,.,.) = 0.1052 (3,1,1,.,.) = -1.6905 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 2.07046777e-01 1.54634550e-01 -8.93136412e-02 ... -1.06644727e-01 3.06004822e-01 4.52779502e-01] [-1.09744996e-01 -4.12288606e-01 -2.75902331e-01 ... 1.72984332e-01 5.84743135e-02 -5.77936292e-01] [ 9.87817794e-02 -3.18639427e-01 -3.42298001e-01 ... 2.90723860e-01 2.33017966e-01 -7.69811347e-02] ... [-3.53961676e-01 5.79270050e-02 2.13322341e-01 ... 4.76761550e-01 4.94930804e-01 4.55946594e-01] [ 9.90601331e-02 6.96200803e-02 2.60989785e-01 ... -7.28165805e-02 6.52100295e-02 -5.54185331e-01] [-3.69546451e-02 -2.28133172e-01 1.81230649e-01 ... 3.46720099e-01 -1.07433580e-01 1.46636099e-01]] [[ 4.02260453e-01 1.87046260e-01 1.23477392e-01 ... -1.79781049e-01 -1.08850837e-01 1.16542175e-01] [-6.01050220e-02 -3.21665108e-01 3.85464996e-01 ... -6.25273407e-01 1.24435499e-01 -2.51051366e-01] [-2.55904436e-01 1.11151390e-01 -4.30070050e-02 ... -9.98407602e-02 -2.04632223e-01 1.03691852e+00] ... [ 2.40817100e-01 -2.31958970e-01 -6.82194233e-02 ... -4.79756504e-01 3.22003752e-01 -2.24580035e-01] [ 5.56776077e-02 3.36166531e-01 -2.80152291e-01 ... 3.16265136e-01 1.46909326e-01 -3.29560816e-01] [-2.36825734e-01 1.54976159e-01 -2.12079640e-02 ... -2.85659153e-02 -1.00604594e-01 2.92478114e-01]] [[-1.23037212e-01 1.94689393e-01 -2.40130737e-01 ... 1.07302710e-01 -5.27230382e-01 -3.24451715e-01] [-3.07333052e-01 -4.25198734e-01 3.28973234e-01 ... 1.41548857e-01 -2.93506265e-01 1.06249139e-01] [-5.10463595e-01 -2.01438993e-01 4.50887889e-01 ... -4.85557541e-02 1.88994870e-01 1.33540705e-01] ... [-4.63851124e-01 -7.99462259e-01 -9.07471627e-02 ... -6.08597815e-01 -4.29602146e-01 2.68705279e-01] [ 8.64086486e-03 -1.56082317e-01 1.08833373e-01 ... 1.46352723e-01 3.46295834e-01 1.77783743e-02] [ 1.48017064e-01 4.76075888e-01 -3.73635620e-01 ... -9.72554758e-02 -1.73273757e-02 2.51974016e-01]] ... [[ 9.57382023e-02 2.50294190e-02 2.13276103e-01 ... -1.05051726e-01 -2.35513113e-02 -4.76495564e-01] [ 1.63661063e-01 -1.32750437e-01 -7.57067725e-02 ... 3.43828291e-01 -7.66361430e-02 3.58629793e-01] [ 1.57083109e-01 4.27894771e-01 -4.48108912e-02 ... -3.41851234e-01 1.24385148e-01 -4.11459982e-01] ... [-2.84175705e-02 -8.23856816e-02 4.96747531e-02 ... -1.22064650e-01 9.17725563e-01 5.13156116e-01] [ 1.45883679e-01 -4.06188458e-01 1.55174984e-02 ... 3.39906774e-02 6.06378764e-02 -1.98737457e-01] [ 3.28198642e-01 4.15361315e-01 -1.36564508e-01 ... -9.23510939e-02 3.12748075e-01 -5.19808173e-01]] [[ 1.13406204e-01 -1.56745553e-01 -5.52985847e-01 ... -7.26292133e-01 7.35496283e-02 7.14335963e-02] [-4.68934961e-02 -2.09252506e-01 2.94652075e-01 ... 1.81139976e-01 1.62018696e-03 4.06295955e-01] [-2.58420050e-01 -7.99957275e-01 -3.14503461e-01 ... -3.78643483e-01 -1.50843300e-02 -1.39144078e-01] ... [-1.75879765e-02 1.20667174e-01 -1.97788790e-01 ... 3.09662312e-01 -6.29545227e-02 1.27941757e-01] [-1.25356287e-01 2.19150722e-01 5.56162536e-01 ... 1.94917113e-01 6.88591957e-01 -2.61179835e-01] [ 3.19310904e-01 -4.28940296e-01 -2.92361051e-01 ... -3.68159205e-01 2.18485802e-01 4.67137583e-02]] [[ 3.32593709e-01 -2.27288291e-01 3.39383394e-01 ... -2.17480376e-01 -3.14913064e-01 -4.46271263e-02] [ 7.79164732e-02 -3.05153662e-03 1.46550566e-01 ... -2.37981007e-01 -1.91959932e-01 -2.52902508e-01] [ 3.10896605e-01 -4.03521091e-01 -1.73423082e-01 ... -2.30427399e-01 4.04464245e-01 -9.81615856e-02] ... [ 1.39516667e-01 3.96313101e-01 1.64084032e-01 ... -2.70620525e-01 9.16558877e-02 3.32721144e-01] [ 5.31338573e-01 1.96574435e-01 1.49064913e-01 ... -1.27061503e-03 1.38443321e-01 8.35697874e-02] [ 4.43754271e-02 -3.32503945e-01 3.66499871e-01 ... -3.59745979e-01 -2.33481213e-01 1.62402317e-01]]] [[[ 1.94111373e-02 -5.64109534e-04 1.84407202e-03 ... 7.60519952e-02 -2.89786085e-02 -5.08020865e-03] [-7.38401860e-02 -2.86930427e-02 3.59671600e-02 ... 1.04222819e-01 1.25253096e-01 3.93412337e-02] [-2.91435514e-02 1.70466006e-01 -1.01626702e-01 ... -1.94002554e-01 1.87200814e-01 -1.56351144e-03] ... [-1.23467356e-01 6.21725013e-03 -8.24537799e-02 ... -2.12656423e-01 -8.38295147e-02 -4.94839251e-02] [ 3.32472660e-02 -4.47966438e-03 2.92233434e-02 ... 1.20502792e-01 -5.65020135e-03 -1.88874472e-02] [ 3.40570137e-02 4.57980894e-02 4.87834103e-02 ... -1.19101487e-01 2.31605247e-02 1.16946675e-01]] [[-2.85958126e-02 -1.17644444e-01 9.30741206e-02 ... 9.81089771e-02 -3.54642831e-02 -2.35247780e-02] [-6.91112950e-02 -1.60699725e-01 -8.27496424e-02 ... -9.48704407e-02 2.76503023e-02 4.96439524e-02] [-1.57515839e-01 7.78019950e-02 -6.29159436e-02 ... -4.55915295e-02 -6.27285391e-02 1.75474603e-02] ... [ 3.25166136e-02 -7.05148606e-03 7.49766752e-02 ... 7.54471645e-02 -2.35485539e-01 -6.23031892e-02] [ 1.52598366e-01 -1.53923929e-01 7.67508522e-02 ... -6.64322749e-02 1.11126853e-02 1.33218721e-01] [-2.18499333e-01 2.69769412e-02 3.69221973e-03 ... -1.43946316e-02 -9.52244624e-02 7.92587548e-02]] [[ 8.61960948e-02 -6.33823574e-02 -2.24963985e-02 ... -3.21058929e-02 3.06609608e-02 1.57459661e-01] [ 6.25731274e-02 -9.35477540e-02 -1.48709282e-01 ... 6.35258406e-02 5.10484129e-02 5.72384745e-02] [ 7.47294500e-02 -1.17253311e-01 -1.16427705e-01 ... 6.41393363e-02 -1.60030380e-01 4.24639694e-02] ... [ 9.77242589e-02 1.18313758e-02 -5.44146858e-02 ... 3.02339882e-01 2.26721149e-02 7.77113140e-02] [ 1.12643331e-01 1.06924310e-01 3.11159529e-02 ... 2.08886582e-02 -1.00205928e-01 -2.82833949e-02] [-9.11429748e-02 -7.05994666e-02 1.26071289e-01 ... -9.33034122e-02 -6.56380728e-02 1.26995951e-01]] ... [[-4.75061908e-02 -1.63675338e-01 3.23242173e-02 ... 4.59108949e-02 1.36599347e-01 -1.29714057e-01] [ 3.13684940e-02 1.89020578e-02 7.16466084e-03 ... -1.05334014e-01 1.42024632e-03 -1.25957072e-01] [-5.71836233e-02 5.82724176e-02 -7.46597676e-03 ... 9.89426896e-02 2.32240912e-02 1.44008920e-01] ... [ 1.00096144e-01 2.74505038e-02 -4.58830781e-02 ... 1.38949767e-01 5.01865055e-03 -1.20552003e-01] [ 3.10632810e-02 4.27713580e-02 1.13891788e-01 ... 6.31514490e-02 -4.37222235e-02 -1.36950567e-01] [-1.08295968e-02 -3.67165841e-02 4.39643562e-02 ... -6.81527629e-02 -5.56297153e-02 1.96146578e-01]] [[ 6.79250583e-02 -4.64668460e-02 -5.13651185e-02 ... 1.23844698e-01 3.09429988e-02 -9.14824083e-02] [-5.34831360e-02 1.83370903e-01 2.01249957e-01 ... -1.84343860e-01 -7.04791816e-03 2.08348483e-01] [-6.36294261e-02 1.10931113e-01 1.15458161e-01 ... -1.61312462e-03 1.19016618e-01 1.29090928e-04] ... [ 1.77642554e-01 3.64843309e-02 -2.86365231e-03 ... -1.73741937e-01 5.46972416e-02 -1.01681970e-01] [-1.19369403e-01 -1.72855064e-01 -3.26757021e-02 ... 2.88110822e-02 5.04525602e-02 1.58709690e-01] [-2.56154239e-02 1.79842100e-01 -2.60459721e-01 ... -7.82023221e-02 1.92328945e-01 3.19393799e-02]] [[-1.15055703e-02 1.25918075e-01 2.89236400e-02 ... -1.48348019e-01 5.82159357e-03 8.07070136e-02] [ 1.18672922e-01 -1.25285313e-01 2.35121232e-02 ... 1.29145637e-01 1.49301916e-01 5.64345494e-02] [ 6.60447329e-02 -2.17920244e-02 -2.59714890e-02 ... 7.39720464e-02 1.15159675e-02 5.45761772e-02] ... [ 5.94677478e-02 5.30197136e-02 -4.33450304e-02 ... 9.47731659e-02 1.43823743e-01 -5.75723015e-02] [-9.71915200e-02 7.35102519e-02 -2.52303742e-02 ... -5.32956906e-02 -3.66544351e-02 7.16160834e-02] [-1.86671212e-01 1.39882015e-02 -1.83286697e-01 ... -4.17257473e-02 1.36434734e-01 1.84963435e-01]]] [[[-2.53479052e+00 4.90600157e+00 2.05903077e+00 ... 2.71132719e-02 -3.01878065e-01 3.48207450e+00] [-1.35796499e+00 1.84132886e+00 2.53188825e+00 ... 1.09059012e+00 1.82671085e-01 3.71456854e-02] [-2.77803707e+00 -9.05918539e-01 -7.53725827e-01 ... 1.52587914e+00 -5.86490691e-01 8.33364129e-01] ... [ 2.89867163e+00 -7.07558766e-02 -6.72140121e-01 ... 1.59076834e+00 -6.14372611e-01 -1.27466404e+00] [-2.55829722e-01 5.41856997e-02 1.42619157e+00 ... -1.83009195e+00 8.24164689e-01 5.44737160e-01] [-4.63889122e-01 3.30593288e-01 2.62587714e+00 ... 2.55022478e+00 -2.91860366e+00 -3.04811239e-01]] [[ 2.89664054e+00 1.32864130e+00 8.49268287e-02 ... -5.25510848e-01 1.66588116e+00 8.78940463e-01] [-2.97972083e-01 1.71944106e+00 7.75471091e-01 ... 2.70987582e+00 5.36182165e-01 2.62184441e-01] [ 4.83125985e-01 -1.82362989e-01 -1.49917603e+00 ... 2.46515036e-01 -3.47287029e-01 1.59557533e+00] ... [ 1.93267047e+00 -1.50790644e+00 3.63329816e+00 ... -1.41518319e+00 5.38536370e-01 -5.78104973e-01] [-1.62622833e+00 3.66053581e-01 2.86653966e-01 ... 5.86734831e-01 7.80772567e-01 1.10820305e+00] [-7.93981194e-01 -2.37702203e+00 1.13912189e+00 ... -3.24254744e-02 1.66527152e+00 -1.30632269e+00]] [[-7.86791265e-01 2.51509714e+00 4.50480318e+00 ... -1.36812523e-01 2.88917851e+00 -3.63195562e+00] [ 1.91859722e-01 -3.52584147e+00 1.04950428e+00 ... 2.11590004e+00 5.58414161e-01 1.10089052e-02] [-3.68814468e+00 -1.35007071e+00 -2.04520583e+00 ... -3.30167007e+00 1.81072950e+00 -1.28514731e+00] ... [-2.94653559e+00 -9.39613581e-01 -4.81678128e-01 ... 2.58034915e-01 -5.13202846e-01 -1.28414941e+00] [ 2.98999488e-01 -4.00333136e-01 2.05487537e+00 ... -4.73896116e-01 1.80258107e+00 5.58795035e-01] [-9.37290609e-01 1.30275452e+00 2.61310863e+00 ... 2.86039162e+00 -2.60337561e-01 -6.77803516e-01]] ... [[-1.57128453e+00 7.89765298e-01 1.10471022e+00 ... 1.68325818e+00 -2.72414237e-01 -1.18539774e+00] [ 1.30890298e+00 3.30398530e-01 -3.01092339e+00 ... -2.58486830e-02 1.91165245e+00 -3.93194616e-01] [ 1.16982400e+00 -7.29315519e-01 -2.48029304e+00 ... 2.84298825e+00 -2.05596185e+00 1.66291785e+00] ... [ 5.96838057e-01 -1.10800171e+00 5.47933757e-01 ... -1.01230836e+00 -5.26741683e-01 2.74926782e+00] [-3.19184446e+00 3.32043394e-02 -4.34337282e+00 ... 8.27925444e-01 -1.06851041e+00 1.51614916e+00] [-1.46714294e+00 -2.66261005e+00 -1.18159902e+00 ... 8.14728498e-01 5.37670016e-01 -1.44736186e-01]] [[ 1.76833212e-01 -2.75226742e-01 1.30985498e+00 ... 3.13214302e+00 -9.89874661e-01 4.02960443e+00] [-2.76890969e+00 3.55202854e-01 1.86699107e-01 ... -8.42383623e-01 -1.20722330e+00 2.19894123e+00] [ 9.42519069e-01 -1.60168767e+00 -1.90934575e+00 ... 6.19267881e-01 -1.88544703e+00 1.22794664e+00] ... [-1.99593413e+00 -1.20936704e+00 -1.34726429e+00 ... 2.58583277e-01 1.09700537e+00 -1.89136660e+00] [-1.96887136e+00 -4.70122010e-01 9.87248778e-01 ... 1.03974617e+00 -1.85242426e+00 -2.59155321e+00] [-1.15363443e+00 1.00175694e-01 -3.84165853e-01 ... -4.66488861e-02 -5.91133595e-01 -2.56064057e+00]] [[ 1.45295739e+00 -8.27656686e-02 -7.17886031e-01 ... 1.66945148e+00 3.32266116e+00 -1.17616415e+00] [ 5.50173879e-01 3.60250354e-01 -1.23917162e+00 ... 1.76947045e+00 -4.42083359e-01 1.59505987e+00] [-1.55164897e+00 6.66085631e-03 -1.71493471e+00 ... -5.75393260e-01 3.42635214e-01 -1.08631325e+00] ... [-9.97459233e-01 1.40418947e+00 -4.14156646e-01 ... -1.29385495e+00 1.30544221e+00 -1.53613782e+00] [ 1.33116591e+00 4.07397598e-01 -5.55294812e-01 ... -1.49804580e+00 -2.54534215e-01 7.79125333e-01] [-8.20120394e-01 3.72328758e-01 1.58826277e-01 ... 2.05173111e+00 -1.10582948e+00 1.71504343e+00]]]]]; ov_res: [[[[[ 2.07046777e-01 1.54634550e-01 -8.93136412e-02 ... -1.06644727e-01 3.06004822e-01 4.52779502e-01] [-1.09744996e-01 -4.12288606e-01 -2.75902331e-01 ... 1.72984332e-01 5.84743135e-02 -5.77936292e-01] [ 9.87817794e-02 -3.18639427e-01 -3.42298001e-01 ... 2.90723860e-01 2.33017966e-01 -7.69811347e-02] ... [-3.53961676e-01 5.79270050e-02 2.13322341e-01 ... 4.76761550e-01 4.94930804e-01 4.55946594e-01] [ 9.90601331e-02 6.96200803e-02 2.60989785e-01 ... -7.28165805e-02 6.52100295e-02 -5.54185331e-01] [-3.69546451e-02 -2.28133172e-01 1.81230649e-01 ... 3.46720099e-01 -1.07433580e-01 1.46636099e-01]] [[ 4.02260453e-01 1.87046260e-01 1.23477392e-01 ... -1.79781049e-01 -1.08850837e-01 1.16542175e-01] [-6.01050220e-02 -3.21665108e-01 3.85464996e-01 ... -6.25273407e-01 1.24435499e-01 -2.51051366e-01] [-2.55904436e-01 1.11151390e-01 -4.30070050e-02 ... -9.98407602e-02 -2.04632223e-01 1.03691852e+00] ... [ 2.40817100e-01 -2.31958970e-01 -6.82194233e-02 ... -4.79756504e-01 3.22003752e-01 -2.24580035e-01] [ 5.56776077e-02 3.36166531e-01 -2.80152291e-01 ... 3.16265136e-01 1.46909326e-01 -3.29560816e-01] [-2.36825734e-01 1.54976159e-01 -2.12079640e-02 ... -2.85659153e-02 -1.00604594e-01 2.92478114e-01]] [[-1.23037212e-01 1.94689393e-01 -2.40130737e-01 ... 1.07302710e-01 -5.27230382e-01 -3.24451715e-01] [-3.07333052e-01 -4.25198734e-01 3.28973234e-01 ... 1.41548857e-01 -2.93506265e-01 1.06249139e-01] [-5.10463595e-01 -2.01438993e-01 4.50887889e-01 ... -4.85557541e-02 1.88994870e-01 1.33540705e-01] ... [-4.63851124e-01 -7.99462259e-01 -9.07471627e-02 ... -6.08597815e-01 -4.29602146e-01 2.68705279e-01] [ 8.64086486e-03 -1.56082317e-01 1.08833373e-01 ... 1.46352723e-01 3.46295834e-01 1.77783743e-02] [ 1.48017064e-01 4.76075888e-01 -3.73635620e-01 ... -9.72554758e-02 -1.73273757e-02 2.51974016e-01]] ... [[ 9.57382023e-02 2.50294190e-02 2.13276103e-01 ... -1.05051726e-01 -2.35513113e-02 -4.76495564e-01] [ 1.63661063e-01 -1.32750437e-01 -7.57067725e-02 ... 3.43828291e-01 -7.66361430e-02 3.58629793e-01] [ 1.57083109e-01 4.27894771e-01 -4.48108912e-02 ... -3.41851234e-01 1.24385148e-01 -4.11459982e-01] ... [-2.84175705e-02 -8.23856816e-02 4.96747531e-02 ... -1.22064650e-01 9.17725563e-01 5.13156116e-01] [ 1.45883679e-01 -4.06188458e-01 1.55174984e-02 ... 3.39906774e-02 6.06378764e-02 -1.98737457e-01] [ 3.28198642e-01 4.15361315e-01 -1.36564508e-01 ... -9.23510939e-02 3.12748075e-01 -5.19808173e-01]] [[ 1.13406204e-01 -1.56745553e-01 -5.52985847e-01 ... -7.26292133e-01 7.35496283e-02 7.14335963e-02] [-4.68934961e-02 -2.09252506e-01 2.94652075e-01 ... 1.81139976e-01 1.62018696e-03 4.06295955e-01] [-2.58420050e-01 -7.99957275e-01 -3.14503461e-01 ... -3.78643483e-01 -1.50843300e-02 -1.39144078e-01] ... [-1.75879765e-02 1.20667174e-01 -1.97788790e-01 ... 3.09662312e-01 -6.29545227e-02 1.27941757e-01] [-1.25356287e-01 2.19150722e-01 5.56162536e-01 ... 1.94917113e-01 6.88591957e-01 -2.61179835e-01] [ 3.19310904e-01 -4.28940296e-01 -2.92361051e-01 ... -3.68159205e-01 2.18485802e-01 4.67137583e-02]] [[ 3.32593709e-01 -2.27288291e-01 3.39383394e-01 ... -2.17480376e-01 -3.14913064e-01 -4.46271263e-02] [ 7.79164732e-02 -3.05153662e-03 1.46550566e-01 ... -2.37981007e-01 -1.91959932e-01 -2.52902508e-01] [ 3.10896605e-01 -4.03521091e-01 -1.73423082e-01 ... -2.30427399e-01 4.04464245e-01 -9.81615856e-02] ... [ 1.39516667e-01 3.96313101e-01 1.64084032e-01 ... -2.70620525e-01 9.16558877e-02 3.32721144e-01] [ 5.31338573e-01 1.96574435e-01 1.49064913e-01 ... -1.27061503e-03 1.38443321e-01 8.35697874e-02] [ 4.43754271e-02 -3.32503945e-01 3.66499871e-01 ... -3.59745979e-01 -2.33481213e-01 1.62402317e-01]]] [[[ 1.94111373e-02 -5.64109534e-04 1.84407202e-03 ... 7.60519952e-02 -2.89786085e-02 -5.08020865e-03] [-7.38401860e-02 -2.86930427e-02 3.59671600e-02 ... 1.04222819e-01 1.25253096e-01 3.93412337e-02] [-2.91435514e-02 1.70466006e-01 -1.01626702e-01 ... -1.94002554e-01 1.87200814e-01 -1.56351144e-03] ... [-1.23467356e-01 6.21725013e-03 -8.24537799e-02 ... -2.12656423e-01 -8.38295147e-02 -4.94839251e-02] [ 3.32472660e-02 -4.47966438e-03 2.92233434e-02 ... 1.20502792e-01 -5.65020135e-03 -1.88874472e-02] [ 3.40570137e-02 4.57980894e-02 4.87834103e-02 ... -1.19101487e-01 2.31605247e-02 1.16946675e-01]] [[-2.85958126e-02 -1.17644444e-01 9.30741206e-02 ... 9.81089771e-02 -3.54642831e-02 -2.35247780e-02] [-6.91112950e-02 -1.60699725e-01 -8.27496424e-02 ... -9.48704407e-02 2.76503023e-02 4.96439524e-02] [-1.57515839e-01 7.78019950e-02 -6.29159436e-02 ... -4.55915295e-02 -6.27285391e-02 1.75474603e-02] ... [ 3.25166136e-02 -7.05148606e-03 7.49766752e-02 ... 7.54471645e-02 -2.35485539e-01 -6.23031892e-02] [ 1.52598366e-01 -1.53923929e-01 7.67508522e-02 ... -6.64322749e-02 1.11126853e-02 1.33218721e-01] [-2.18499333e-01 2.69769412e-02 3.69221973e-03 ... -1.43946316e-02 -9.52244624e-02 7.92587548e-02]] [[ 8.61960948e-02 -6.33823574e-02 -2.24963985e-02 ... -3.21058929e-02 3.06609608e-02 1.57459661e-01] [ 6.25731274e-02 -9.35477540e-02 -1.48709282e-01 ... 6.35258406e-02 5.10484129e-02 5.72384745e-02] [ 7.47294500e-02 -1.17253311e-01 -1.16427705e-01 ... 6.41393363e-02 -1.60030380e-01 4.24639694e-02] ... [ 9.77242589e-02 1.18313758e-02 -5.44146858e-02 ... 3.02339882e-01 2.26721149e-02 7.77113140e-02] [ 1.12643331e-01 1.06924310e-01 3.11159529e-02 ... 2.08886582e-02 -1.00205928e-01 -2.82833949e-02] [-9.11429748e-02 -7.05994666e-02 1.26071289e-01 ... -9.33034122e-02 -6.56380728e-02 1.26995951e-01]] ... [[-4.75061908e-02 -1.63675338e-01 3.23242173e-02 ... 4.59108949e-02 1.36599347e-01 -1.29714057e-01] [ 3.13684940e-02 1.89020578e-02 7.16466084e-03 ... -1.05334014e-01 1.42024632e-03 -1.25957072e-01] [-5.71836233e-02 5.82724176e-02 -7.46597676e-03 ... 9.89426896e-02 2.32240912e-02 1.44008920e-01] ... [ 1.00096144e-01 2.74505038e-02 -4.58830781e-02 ... 1.38949767e-01 5.01865055e-03 -1.20552003e-01] [ 3.10632810e-02 4.27713580e-02 1.13891788e-01 ... 6.31514490e-02 -4.37222235e-02 -1.36950567e-01] [-1.08295968e-02 -3.67165841e-02 4.39643562e-02 ... -6.81527629e-02 -5.56297153e-02 1.96146578e-01]] [[ 6.79250583e-02 -4.64668460e-02 -5.13651185e-02 ... 1.23844698e-01 3.09429988e-02 -9.14824083e-02] [-5.34831360e-02 1.83370903e-01 2.01249957e-01 ... -1.84343860e-01 -7.04791816e-03 2.08348483e-01] [-6.36294261e-02 1.10931113e-01 1.15458161e-01 ... -1.61312462e-03 1.19016618e-01 1.29090928e-04] ... [ 1.77642554e-01 3.64843309e-02 -2.86365231e-03 ... -1.73741937e-01 5.46972416e-02 -1.01681970e-01] [-1.19369403e-01 -1.72855064e-01 -3.26757021e-02 ... 2.88110822e-02 5.04525602e-02 1.58709690e-01] [-2.56154239e-02 1.79842100e-01 -2.60459721e-01 ... -7.82023221e-02 1.92328945e-01 3.19393799e-02]] [[-1.15055703e-02 1.25918075e-01 2.89236400e-02 ... -1.48348019e-01 5.82159357e-03 8.07070136e-02] [ 1.18672922e-01 -1.25285313e-01 2.35121232e-02 ... 1.29145637e-01 1.49301916e-01 5.64345494e-02] [ 6.60447329e-02 -2.17920244e-02 -2.59714890e-02 ... 7.39720464e-02 1.15159675e-02 5.45761772e-02] ... [ 5.94677478e-02 5.30197136e-02 -4.33450304e-02 ... 9.47731659e-02 1.43823743e-01 -5.75723015e-02] [-9.71915200e-02 7.35102519e-02 -2.52303742e-02 ... -5.32956906e-02 -3.66544351e-02 7.16160834e-02] [-1.86671212e-01 1.39882015e-02 -1.83286697e-01 ... -4.17257473e-02 1.36434734e-01 1.84963435e-01]]] [[[-2.53479052e+00 4.90600157e+00 2.05903077e+00 ... 2.71132719e-02 -3.01878065e-01 3.48207450e+00] [-1.35796499e+00 1.84132886e+00 2.53188825e+00 ... 1.09059012e+00 1.82671085e-01 3.71456854e-02] [-2.77803707e+00 -9.05918539e-01 -7.53725827e-01 ... 1.52587914e+00 -5.86490691e-01 8.33364129e-01] ... [ 2.89867163e+00 -7.07558766e-02 -6.72140121e-01 ... 1.59076834e+00 -6.14372611e-01 -1.27466404e+00] [-2.55829722e-01 5.41856997e-02 1.42619157e+00 ... -1.83009195e+00 8.24164689e-01 5.44737160e-01] [-4.63889122e-01 3.30593288e-01 2.62587714e+00 ... 2.55022478e+00 -2.91860366e+00 -3.04811239e-01]] [[ 2.89664054e+00 1.32864130e+00 8.49268287e-02 ... -5.25510848e-01 1.66588116e+00 8.78940463e-01] [-2.97972083e-01 1.71944106e+00 7.75471091e-01 ... 2.70987582e+00 5.36182165e-01 2.62184441e-01] [ 4.83125985e-01 -1.82362989e-01 -1.49917603e+00 ... 2.46515036e-01 -3.47287029e-01 1.59557533e+00] ... [ 1.93267047e+00 -1.50790644e+00 3.63329816e+00 ... -1.41518319e+00 5.38536370e-01 -5.78104973e-01] [-1.62622833e+00 3.66053581e-01 2.86653966e-01 ... 5.86734831e-01 7.80772567e-01 1.10820305e+00] [-7.93981194e-01 -2.37702203e+00 1.13912189e+00 ... -3.24254744e-02 1.66527152e+00 -1.30632269e+00]] [[-7.86791265e-01 2.51509714e+00 4.50480318e+00 ... -1.36812523e-01 2.88917851e+00 -3.63195562e+00] [ 1.91859722e-01 -3.52584147e+00 1.04950428e+00 ... 2.11590004e+00 5.58414161e-01 1.10089052e-02] [-3.68814468e+00 -1.35007071e+00 -2.04520583e+00 ... -3.30167007e+00 1.81072950e+00 -1.28514731e+00] ... [-2.94653559e+00 -9.39613581e-01 -4.81678128e-01 ... 2.58034915e-01 -5.13202846e-01 -1.28414941e+00] [ 2.98999488e-01 -4.00333136e-01 2.05487537e+00 ... -4.73896116e-01 1.80258107e+00 5.58795035e-01] [-9.37290609e-01 1.30275452e+00 2.61310863e+00 ... 2.86039162e+00 -2.60337561e-01 -6.77803516e-01]] ... [[-1.57128453e+00 7.89765298e-01 1.10471022e+00 ... 1.68325818e+00 -2.72414237e-01 -1.18539774e+00] [ 1.30890298e+00 3.30398530e-01 -3.01092339e+00 ... -2.58486830e-02 1.91165245e+00 -3.93194616e-01] [ 1.16982400e+00 -7.29315519e-01 -2.48029304e+00 ... 2.84298825e+00 -2.05596185e+00 1.66291785e+00] ... [ 5.96838057e-01 -1.10800171e+00 5.47933757e-01 ... -1.01230836e+00 -5.26741683e-01 2.74926782e+00] [-3.19184446e+00 3.32043394e-02 -4.34337282e+00 ... 8.27925444e-01 -1.06851041e+00 1.51614916e+00] [-1.46714294e+00 -2.66261005e+00 -1.18159902e+00 ... 8.14728498e-01 5.37670016e-01 -1.44736186e-01]] [[ 1.76833212e-01 -2.75226742e-01 1.30985498e+00 ... 3.13214302e+00 -9.89874661e-01 4.02960443e+00] [-2.76890969e+00 3.55202854e-01 1.86699107e-01 ... -8.42383623e-01 -1.20722330e+00 2.19894123e+00] [ 9.42519069e-01 -1.60168767e+00 -1.90934575e+00 ... 6.19267881e-01 -1.88544703e+00 1.22794664e+00] ... [-1.99593413e+00 -1.20936704e+00 -1.34726429e+00 ... 2.58583277e-01 1.09700537e+00 -1.89136660e+00] [-1.96887136e+00 -4.70122010e-01 9.87248778e-01 ... 1.03974617e+00 -1.85242426e+00 -2.59155321e+00] [-1.15363443e+00 1.00175694e-01 -3.84165853e-01 ... -4.66488861e-02 -5.91133595e-01 -2.56064057e+00]] [[ 1.45295739e+00 -8.27656686e-02 -7.17886031e-01 ... 1.66945148e+00 3.32266116e+00 -1.17616415e+00] [ 5.50173879e-01 3.60250354e-01 -1.23917162e+00 ... 1.76947045e+00 -4.42083359e-01 1.59505987e+00] [-1.55164897e+00 6.66085631e-03 -1.71493471e+00 ... -5.75393260e-01 3.42635214e-01 -1.08631325e+00] ... [-9.97459233e-01 1.40418947e+00 -4.14156646e-01 ... -1.29385495e+00 1.30544221e+00 -1.53613782e+00] [ 1.33116591e+00 4.07397598e-01 -5.55294812e-01 ... -1.49804580e+00 -2.54534215e-01 7.79125333e-01] [-8.20120394e-01 3.72328758e-01 1.58826277e-01 ... 2.05173111e+00 -1.10582948e+00 1.71504343e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_990.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.8805 (2,1,1,.,.) = 0.8701 (3,1,1,.,.) = -0.5727 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[-0.03317142 0.915371 -0.38573426 -1.788001 1.096643 1.0168177 0.19831136 1.2778447 ] [-0.5368799 0.20908357 -1.7826337 -1.740497 0.29506066 0.05385517 1.8436861 0.16381614] [-0.39225134 -0.63253105 2.1780589 1.3510123 0.47770098 1.6608781 0.6117697 -3.0546796 ] [-2.858848 0.819509 -0.80543876 -0.8045546 -0.5423505 -2.4380364 0.88725495 -1.834157 ] [-0.65508044 -1.1996448 -2.9475422 2.2334425 -1.1982539 0.10931724 -0.7376065 1.5522051 ] [-1.8224163 0.32577604 -0.4997488 1.9210407 -0.6898982 -0.4879652 -2.5244005 -1.2095377 ] [-0.3516433 2.1935887 3.4989855 -2.1106918 -1.6822736 0.98355615 -1.162252 0.17525741] [ 0.5895057 0.41318074 -2.508045 -0.55749524 -1.4339267 0.57813877 -1.4637452 0.8513456 ]] [[-0.75345314 1.5398703 0.5732688 -0.21959811 -0.6909234 0.08160591 0.40311298 -1.3496686 ] [-1.0820574 -1.7216996 -1.1664168 0.35272458 0.3764889 0.15033816 -0.92358553 -2.2285464 ] [ 3.2173533 -0.36337563 0.25717187 -0.44769353 -0.64068997 2.7777858 -1.0219177 -1.5276079 ] [-1.6912068 -1.5392482 -0.96413344 -1.7396443 -0.5388023 -0.09218105 0.54256696 0.52394223] [ 1.3375925 0.5066776 1.9472916 1.500905 1.1589078 -2.4505918 0.30206418 0.57839876] [-2.8146071 -0.03904708 -1.1615245 0.03166322 -0.8149368 -1.7979674 -0.11918925 -1.0840324 ] [-0.64652437 -1.3519421 -1.2829287 -1.5992111 -1.9665154 1.115556 1.1041012 1.3211417 ] [-1.1230459 -0.6902639 1.867448 -0.01104303 1.4662939 0.58684176 -0.7635491 0.14504856]] [[ 0.09391603 1.2159008 -0.09977689 0.8955435 -1.96123 1.3267382 2.0742953 2.031168 ] [ 1.6580826 -1.0444067 1.2150578 -0.28064725 -2.0566115 -0.6946617 -1.9648086 -1.9548659 ] [ 0.5908156 1.3631716 -0.3020593 0.7967746 -0.96034676 -2.3906374 -2.553204 0.65417886] [ 1.4553447 0.4089139 -2.2038233 2.0670893 1.5445515 0.13061213 -1.8979197 0.33574814] [ 0.00979018 -2.2139678 1.6883782 -2.3640928 -0.6633895 -0.72553086 -0.3855708 -1.4727273 ] [ 2.0981417 0.0394815 0.61008185 1.3573601 -0.74905163 -0.23914754 1.232285 -0.10634608] [ 0.54218477 -0.1660427 2.9895623 2.1992471 -0.26689914 1.3764787 -1.4297584 -1.5239377 ] [-2.2793415 -0.16044033 -0.8073575 1.5243648 0.94144267 0.26295295 -2.361062 -0.7180525 ]] [[ 1.1273822 -0.90680605 -0.9017279 -0.6322414 -0.6947631 -0.6265415 -0.67995423 1.3461187 ] [ 2.3348157 2.1279917 -0.5988847 0.22898276 0.20042868 -1.3237983 0.8611685 1.0253799 ] [-1.9243263 1.1531771 -1.1368729 0.80243576 0.90872383 -0.10702308 0.1616822 -1.9168928 ] [ 1.8745977 0.7241509 -2.460259 -0.9139077 -1.1773645 0.99105155 -0.6704938 -0.80566126] [ 0.12309317 0.35114372 -1.5189211 1.5993689 -2.2210996 0.44885343 0.00908625 -0.18092944] [-0.8511644 1.8625256 -0.46751413 0.6038035 1.4647896 2.2217293 -0.5575442 1.754593 ] [-0.9169705 0.47534716 0.6681682 0.61630195 -0.70076466 1.1914573 1.1292495 1.1179612 ] [-0.12820716 2.549999 0.20808724 -0.17219138 -2.176315 1.6722885 -1.4180008 2.073297 ]] [[-0.31149167 1.2404083 2.1881726 -0.29043093 1.3402362 2.6692033 0.11310883 0.3553671 ] [ 0.889184 1.9720429 0.78758043 1.411212 1.0629933 -0.11800307 -0.9968486 0.69778216] [-0.6166087 -2.2911184 -0.5215372 0.54166514 2.0914948 1.9365281 0.10634537 -1.0142276 ] [-0.34513223 -0.7788588 1.5772027 1.5863945 -0.25981864 -1.4733876 -1.3794264 -2.1940534 ] [-0.79479086 0.15238224 -1.8333255 0.6751236 -0.26828977 0.9283522 0.43004787 -1.2105349 ] [-1.9796864 -1.8875644 0.06193492 -0.58006 0.05320901 -1.5836636 -0.96031785 -0.66616243] [-1.4843158 0.59338063 -1.2990818 2.7745845 0.33692122 1.9183371 -0.03538371 -0.5582109 ] [-1.1937616 -1.0839016 -0.57467633 1.1224754 0.12943919 -1.3833807 -0.9521581 3.1054049 ]] [[-1.74687 -2.0062108 -0.3679901 2.5081177 -0.9808552 -0.2133374 1.7555705 -0.55215573] [-0.49188787 1.0608898 -0.36858678 -0.45983797 -1.4544399 -0.04639005 -0.40659285 0.9009135 ] [ 0.05724944 -0.6910407 -0.60946846 -0.45377982 0.44140112 -0.8262268 -1.7508914 -2.473631 ] [ 0.01485766 3.3164783 -1.391133 -2.9731677 1.1041965 1.5471245 0.33572733 -0.1367917 ] [ 3.1959517 0.71744096 -0.8644549 -2.5522926 0.19278143 1.3101817 -0.34468803 2.0914195 ] [ 0.3365448 1.6606945 1.0195192 0.93576527 1.3715564 3.9196963 -2.198381 -0.23053297] [ 1.9676505 -0.9642154 -1.9222987 2.5276783 0.6923939 0.22060372 0.09825614 -0.34433827] [ 1.5348399 -0.29551455 -1.3311177 -0.42260638 1.6044999 1.0249028 -0.04141596 -1.7084132 ]] [[-0.32843864 0.87666124 -1.5947183 -0.5027312 0.14619772 0.14104474 -1.1725119 0.1834495 ] [ 0.9296372 0.5423516 0.7729058 -1.7030491 -0.567318 -1.7713562 -0.37265512 -0.92622375] [-1.5341219 0.11395371 0.02975585 -0.3083565 -1.034354 -2.351005 -1.4075123 -3.4796636 ] [ 0.19537058 0.45584646 1.9503732 3.2244732 -2.4297163 0.48483288 0.38652205 0.54757875] [-1.3466938 -0.49380577 1.8745762 0.31288585 -1.2291292 0.70270085 2.4724388 -1.1171747 ] [-1.4044774 -1.6689743 -0.03586137 0.12102599 -1.2053695 -1.4660625 1.4157495 0.04886291] [ 3.8754027 -0.5792384 -0.0086737 -1.793264 1.564709 -1.3481203 -0.29680625 -1.6573005 ] [-0.11774301 1.6873615 -2.1161802 -0.6305761 2.3661551 -1.1310441 0.37005457 0.8149764 ]] [[ 1.5954102 0.15920208 1.7485 -1.7150506 -0.43214518 -0.9800125 -0.59186673 0.11697683] [ 0.36490008 0.75823617 -0.33401257 -1.5593122 -1.2398057 -1.6177372 0.7182002 2.0711203 ] [ 0.05248446 0.3893195 0.6572665 -0.63759136 -1.6987032 -1.1570855 2.3315725 -1.3546355 ] [ 0.6220893 -1.4164172 -2.6497216 -0.40329376 0.894022 -0.09052049 -0.49158776 1.8611206 ] [ 0.61135 -1.5523049 -1.2290136 1.7090219 -0.7999593 0.05633954 -0.34798402 0.03367921] [ 0.9312749 -0.39215624 0.19831514 0.23133686 -0.08304679 1.1119875 0.50776863 -1.256313 ] [ 0.47380742 -0.5677951 -0.14822595 2.3619468 -0.97697407 4.854337 -0.5008078 -0.5394254 ] [ 2.3483348 -1.3146564 -0.5153242 -2.2773817 -0.86775696 0.8107575 -1.0313569 2.0698981 ]]]]]; ov_res: [[[[[-0.03317142 0.915371 -0.38573426 -1.788001 1.096643 1.0168177 0.19831136 1.2778447 ] [-0.5368799 0.20908357 -1.7826337 -1.740497 0.29506066 0.05385517 1.8436861 0.16381614] [-0.39225134 -0.63253105 2.1780589 1.3510123 0.47770098 1.6608781 0.6117697 -3.0546796 ] [-2.858848 0.819509 -0.80543876 -0.8045546 -0.5423505 -2.4380364 0.88725495 -1.834157 ] [-0.65508044 -1.1996448 -2.9475422 2.2334425 -1.1982539 0.10931724 -0.7376065 1.5522051 ] [-1.8224163 0.32577604 -0.4997488 1.9210407 -0.6898982 -0.4879652 -2.5244005 -1.2095377 ] [-0.3516433 2.1935887 3.4989855 -2.1106918 -1.6822736 0.98355615 -1.162252 0.17525741] [ 0.5895057 0.41318074 -2.508045 -0.55749524 -1.4339267 0.57813877 -1.4637452 0.8513456 ]] [[-0.75345314 1.5398703 0.5732688 -0.21959811 -0.6909234 0.08160591 0.40311298 -1.3496686 ] [-1.0820574 -1.7216996 -1.1664168 0.35272458 0.3764889 0.15033816 -0.92358553 -2.2285464 ] [ 3.2173533 -0.36337563 0.25717187 -0.44769353 -0.64068997 2.7777858 -1.0219177 -1.5276079 ] [-1.6912068 -1.5392482 -0.96413344 -1.7396443 -0.5388023 -0.09218105 0.54256696 0.52394223] [ 1.3375925 0.5066776 1.9472916 1.500905 1.1589078 -2.4505918 0.30206418 0.57839876] [-2.8146071 -0.03904708 -1.1615245 0.03166322 -0.8149368 -1.7979674 -0.11918925 -1.0840324 ] [-0.64652437 -1.3519421 -1.2829287 -1.5992111 -1.9665154 1.115556 1.1041012 1.3211417 ] [-1.1230459 -0.6902639 1.867448 -0.01104303 1.4662939 0.58684176 -0.7635491 0.14504856]] [[ 0.09391603 1.2159008 -0.09977689 0.8955435 -1.96123 1.3267382 2.0742953 2.031168 ] [ 1.6580826 -1.0444067 1.2150578 -0.28064725 -2.0566115 -0.6946617 -1.9648086 -1.9548659 ] [ 0.5908156 1.3631716 -0.3020593 0.7967746 -0.96034676 -2.3906374 -2.553204 0.65417886] [ 1.4553447 0.4089139 -2.2038233 2.0670893 1.5445515 0.13061213 -1.8979197 0.33574814] [ 0.00979018 -2.2139678 1.6883782 -2.3640928 -0.6633895 -0.72553086 -0.3855708 -1.4727273 ] [ 2.0981417 0.0394815 0.61008185 1.3573601 -0.74905163 -0.23914754 1.232285 -0.10634608] [ 0.54218477 -0.1660427 2.9895623 2.1992471 -0.26689914 1.3764787 -1.4297584 -1.5239377 ] [-2.2793415 -0.16044033 -0.8073575 1.5243648 0.94144267 0.26295295 -2.361062 -0.7180525 ]] [[ 1.1273822 -0.90680605 -0.9017279 -0.6322414 -0.6947631 -0.6265415 -0.67995423 1.3461187 ] [ 2.3348157 2.1279917 -0.5988847 0.22898276 0.20042868 -1.3237983 0.8611685 1.0253799 ] [-1.9243263 1.1531771 -1.1368729 0.80243576 0.90872383 -0.10702308 0.1616822 -1.9168928 ] [ 1.8745977 0.7241509 -2.460259 -0.9139077 -1.1773645 0.99105155 -0.6704938 -0.80566126] [ 0.12309317 0.35114372 -1.5189211 1.5993689 -2.2210996 0.44885343 0.00908625 -0.18092944] [-0.8511644 1.8625256 -0.46751413 0.6038035 1.4647896 2.2217293 -0.5575442 1.754593 ] [-0.9169705 0.47534716 0.6681682 0.61630195 -0.70076466 1.1914573 1.1292495 1.1179612 ] [-0.12820716 2.549999 0.20808724 -0.17219138 -2.176315 1.6722885 -1.4180008 2.073297 ]] [[-0.31149167 1.2404083 2.1881726 -0.29043093 1.3402362 2.6692033 0.11310883 0.3553671 ] [ 0.889184 1.9720429 0.78758043 1.411212 1.0629933 -0.11800307 -0.9968486 0.69778216] [-0.6166087 -2.2911184 -0.5215372 0.54166514 2.0914948 1.9365281 0.10634537 -1.0142276 ] [-0.34513223 -0.7788588 1.5772027 1.5863945 -0.25981864 -1.4733876 -1.3794264 -2.1940534 ] [-0.79479086 0.15238224 -1.8333255 0.6751236 -0.26828977 0.9283522 0.43004787 -1.2105349 ] [-1.9796864 -1.8875644 0.06193492 -0.58006 0.05320901 -1.5836636 -0.96031785 -0.66616243] [-1.4843158 0.59338063 -1.2990818 2.7745845 0.33692122 1.9183371 -0.03538371 -0.5582109 ] [-1.1937616 -1.0839016 -0.57467633 1.1224754 0.12943919 -1.3833807 -0.9521581 3.1054049 ]] [[-1.74687 -2.0062108 -0.3679901 2.5081177 -0.9808552 -0.2133374 1.7555705 -0.55215573] [-0.49188787 1.0608898 -0.36858678 -0.45983797 -1.4544399 -0.04639005 -0.40659285 0.9009135 ] [ 0.05724944 -0.6910407 -0.60946846 -0.45377982 0.44140112 -0.8262268 -1.7508914 -2.473631 ] [ 0.01485766 3.3164783 -1.391133 -2.9731677 1.1041965 1.5471245 0.33572733 -0.1367917 ] [ 3.1959517 0.71744096 -0.8644549 -2.5522926 0.19278143 1.3101817 -0.34468803 2.0914195 ] [ 0.3365448 1.6606945 1.0195192 0.93576527 1.3715564 3.9196963 -2.198381 -0.23053297] [ 1.9676505 -0.9642154 -1.9222987 2.5276783 0.6923939 0.22060372 0.09825614 -0.34433827] [ 1.5348399 -0.29551455 -1.3311177 -0.42260638 1.6044999 1.0249028 -0.04141596 -1.7084132 ]] [[-0.32843864 0.87666124 -1.5947183 -0.5027312 0.14619772 0.14104474 -1.1725119 0.1834495 ] [ 0.9296372 0.5423516 0.7729058 -1.7030491 -0.567318 -1.7713562 -0.37265512 -0.92622375] [-1.5341219 0.11395371 0.02975585 -0.3083565 -1.034354 -2.351005 -1.4075123 -3.4796636 ] [ 0.19537058 0.45584646 1.9503732 3.2244732 -2.4297163 0.48483288 0.38652205 0.54757875] [-1.3466938 -0.49380577 1.8745762 0.31288585 -1.2291292 0.70270085 2.4724388 -1.1171747 ] [-1.4044774 -1.6689743 -0.03586137 0.12102599 -1.2053695 -1.4660625 1.4157495 0.04886291] [ 3.8754027 -0.5792384 -0.0086737 -1.793264 1.564709 -1.3481203 -0.29680625 -1.6573005 ] [-0.11774301 1.6873615 -2.1161802 -0.6305761 2.3661551 -1.1310441 0.37005457 0.8149764 ]] [[ 1.5954102 0.15920208 1.7485 -1.7150506 -0.43214518 -0.9800125 -0.59186673 0.11697683] [ 0.36490008 0.75823617 -0.33401257 -1.5593122 -1.2398057 -1.6177372 0.7182002 2.0711203 ] [ 0.05248446 0.3893195 0.6572665 -0.63759136 -1.6987032 -1.1570855 2.3315725 -1.3546355 ] [ 0.6220893 -1.4164172 -2.6497216 -0.40329376 0.894022 -0.09052049 -0.49158776 1.8611206 ] [ 0.61135 -1.5523049 -1.2290136 1.7090219 -0.7999593 0.05633954 -0.34798402 0.03367921] [ 0.9312749 -0.39215624 0.19831514 0.23133686 -0.08304679 1.1119875 0.50776863 -1.256313 ] [ 0.47380742 -0.5677951 -0.14822595 2.3619468 -0.97697407 4.854337 -0.5008078 -0.5394254 ] [ 2.3483348 -1.3146564 -0.5153242 -2.2773817 -0.86775696 0.8107575 -1.0313569 2.0698981 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 1, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_992.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.4495 (2,1,1,.,.) = 1.6022 (3,1,1,.,.) = -0.2573 (1,2,1,.,.) = 0.01 * -3.9479 (2,2,1,.,.) = -0.4297 (3,2,1,.,.) = 0.01 * 2.2308 (1,3,1,.,.) = -0.3168 (2,3,1,.,.) = -0.1875 (3,3,1,.,.) = -0.1542 [ CPUFloatType{3,3,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.strides, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.8644353 0.82197636 ... 0.40553832 -0.6168585 0. ] [ 0. 0.05088005 0.22242557 ... -0.8731904 -0.81351835 0. ] ... [ 0. 1.2844545 -0.52856994 ... 0.21903849 0.25411403 0. ] [ 0. -0.3774052 1.4100637 ... -0.10656687 0.61054885 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.29249135 1.1685779 ... 0.12446842 0.89055395 0. ] [ 0. -0.11505813 -0.87469983 ... -0.3675946 0.20017137 0. ] ... [ 0. 0.6530865 0.75366163 ... 0.13205542 -0.44155282 0. ] [ 0. 0.40037853 -0.767469 ... -0.43526164 -0.56182086 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.0098851 0.34693253 ... 0.076163 -0.02163416 0. ] [ 0. 0.72499 0.08768281 ... 0.18469888 -0.04165241 0. ] ... [ 0. -0.79301137 1.1728954 ... -0.3019603 0.09809598 0. ] [ 0. 0.16415946 0.20744586 ... 0.10924737 -0.28905043 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.7282542 -0.02970614 ... -0.27103215 0.14933744 0. ] [ 0. -0.18608756 -0.8563071 ... -0.38719606 0.4813889 0. ] ... [ 0. 0.72714764 -0.98117507 ... 0.24464753 -1.0708107 0. ] [ 0. -0.16338086 0.74171203 ... 0.59214956 -0.6612854 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.3881907 -2.9059987 ... -1.3109974 -0.57032835 0. ] [ 0. -0.7533545 0.9688649 ... 2.4061458 1.0863016 0. ] ... [ 0. -2.217691 1.2204498 ... -2.39678 1.4011089 0. ] [ 0. 0.3829849 -2.5718424 ... 2.2244594 -2.404081 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.40140063 -1.7096751 ... -1.7190453 -4.5554733 0. ] [ 0. -0.76508564 1.0417228 ... 0.6189643 1.9875555 0. ] ... [ 0. -1.2153994 -4.096236 ... -0.49816158 1.8966916 0. ] [ 0. 0.5111411 1.5547304 ... -0.69554317 1.6345024 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.28721443 -0.3219261 ... -0.07748396 -0.43377087 0. ] [ 0. 0.3562881 -0.23502526 ... 0.73686117 0.37069556 0. ] ... [ 0. 1.8241276 -3.2785912 ... 2.106742 0.5064992 0. ] [ 0. -0.90241945 -0.9540034 ... -0.50572383 -0.5372591 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.9947376 0.51463497 ... 0.6235851 0.7430547 0. ] [ 0. 0.37202793 1.5670912 ... 0.7634328 0.02295174 0. ] ... [ 0. -4.2032847 1.243629 ... 0.41297364 1.3679446 0. ] [ 0. 1.9382465 -0.716472 ... -3.0066392 1.0323458 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.49717328 0.50707394 ... 0.24503513 -0.3041191 0. ] [ 0. 0.06663148 0.07528404 ... -0.52780885 -0.41963857 0. ] ... [ 0. 0.69690055 -0.31251794 ... 0.19851369 0.01873501 0. ] [ 0. -0.17221169 0.7296578 ... -0.1699778 0.33793744 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.21699715 0.61863196 ... 0.12440258 0.5867349 0. ] [ 0. -0.0105215 -0.41965434 ... -0.14752206 0.04086864 0. ] ... [ 0. 0.3608917 0.47471315 ... 0.08182139 -0.27001905 0. ] [ 0. 0.14161366 -0.4301636 ... -0.2070855 -0.29530534 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.0320765 0.25161186 ... 0.0321961 0.0295383 0. ] [ 0. 0.29935944 0.03279342 ... 0.04533187 -0.01003717 0. ] ... [ 0. -0.39549974 0.62036306 ... -0.20242256 -0.02888574 0. ] [ 0. 0.07466385 0.20399305 ... 0.08077534 -0.10341299 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.43644357 -0.07241317 ... -0.11921836 0.04532188 0. ] [ 0. -0.106352 -0.42639488 ... -0.19411865 0.2201634 0. ] ... [ 0. 0.4858101 -0.54204696 ... 0.05386824 -0.5777471 0. ] [ 0. -0.12364294 0.33949617 ... 0.43855834 -0.35176983 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.8644353 0.82197636 ... 0.40553832 -0.6168585 0. ] [ 0. 0.05088005 0.22242557 ... -0.8731904 -0.81351835 0. ] ... [ 0. 1.2844545 -0.52856994 ... 0.21903849 0.25411403 0. ] [ 0. -0.3774052 1.4100637 ... -0.10656687 0.61054885 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.29249135 1.1685779 ... 0.12446842 0.89055395 0. ] [ 0. -0.11505813 -0.87469983 ... -0.3675946 0.20017137 0. ] ... [ 0. 0.6530865 0.75366163 ... 0.13205542 -0.44155282 0. ] [ 0. 0.40037853 -0.767469 ... -0.43526164 -0.56182086 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.0098851 0.34693253 ... 0.076163 -0.02163416 0. ] [ 0. 0.72499 0.08768281 ... 0.18469888 -0.04165241 0. ] ... [ 0. -0.79301137 1.1728954 ... -0.3019603 0.09809598 0. ] [ 0. 0.16415946 0.20744586 ... 0.10924737 -0.28905043 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.7282542 -0.02970614 ... -0.27103215 0.14933744 0. ] [ 0. -0.18608756 -0.8563071 ... -0.38719606 0.4813889 0. ] ... [ 0. 0.72714764 -0.98117507 ... 0.24464753 -1.0708107 0. ] [ 0. -0.16338086 0.74171203 ... 0.59214956 -0.6612854 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.3881907 -2.9059987 ... -1.3109974 -0.57032835 0. ] [ 0. -0.7533545 0.9688649 ... 2.4061458 1.0863016 0. ] ... [ 0. -2.217691 1.2204498 ... -2.39678 1.4011089 0. ] [ 0. 0.3829849 -2.5718424 ... 2.2244594 -2.404081 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.40140063 -1.7096751 ... -1.7190453 -4.5554733 0. ] [ 0. -0.76508564 1.0417228 ... 0.6189643 1.9875555 0. ] ... [ 0. -1.2153994 -4.096236 ... -0.49816158 1.8966916 0. ] [ 0. 0.5111411 1.5547304 ... -0.69554317 1.6345024 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.28721443 -0.3219261 ... -0.07748396 -0.43377087 0. ] [ 0. 0.3562881 -0.23502526 ... 0.73686117 0.37069556 0. ] ... [ 0. 1.8241276 -3.2785912 ... 2.106742 0.5064992 0. ] [ 0. -0.90241945 -0.9540034 ... -0.50572383 -0.5372591 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.9947376 0.51463497 ... 0.6235851 0.7430547 0. ] [ 0. 0.37202793 1.5670912 ... 0.7634328 0.02295174 0. ] ... [ 0. -4.2032847 1.243629 ... 0.41297364 1.3679446 0. ] [ 0. 1.9382465 -0.716472 ... -3.0066392 1.0323458 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.49717328 0.50707394 ... 0.24503513 -0.3041191 0. ] [ 0. 0.06663148 0.07528404 ... -0.52780885 -0.41963857 0. ] ... [ 0. 0.69690055 -0.31251794 ... 0.19851369 0.01873501 0. ] [ 0. -0.17221169 0.7296578 ... -0.1699778 0.33793744 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.21699715 0.61863196 ... 0.12440258 0.5867349 0. ] [ 0. -0.0105215 -0.41965434 ... -0.14752206 0.04086864 0. ] ... [ 0. 0.3608917 0.47471315 ... 0.08182139 -0.27001905 0. ] [ 0. 0.14161366 -0.4301636 ... -0.2070855 -0.29530534 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.0320765 0.25161186 ... 0.0321961 0.0295383 0. ] [ 0. 0.29935944 0.03279342 ... 0.04533187 -0.01003717 0. ] ... [ 0. -0.39549974 0.62036306 ... -0.20242256 -0.02888574 0. ] [ 0. 0.07466385 0.20399305 ... 0.08077534 -0.10341299 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.43644357 -0.07241317 ... -0.11921836 0.04532188 0. ] [ 0. -0.106352 -0.42639488 ... -0.19411865 0.2201634 0. ] ... [ 0. 0.4858101 -0.54204696 ... 0.05386824 -0.5777471 0. ] [ 0. -0.12364294 0.33949617 ... 0.43855834 -0.35176983 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_994.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.7566 (2,1,1,.,.) = -0.7219 (3,1,1,.,.) = 0.6153 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-0.2961525 -0.07959002 0.5115597 1.028505 ] [-2.1138935 -0.96852493 0.6838562 1.1885104 ] [ 0.8490671 -1.7991263 0.7173609 1.2968543 ] [-0.8250056 0.63287246 -0.03254403 0.4274413 ] [ 0.4353276 -0.48141173 -0.22581945 -0.20097929] [-0.67365676 -0.1829598 0.4424058 -0.5940715 ] [-2.2809799 -0.08625183 -0.7858385 -0.43790105] [ 1.74909 0.9286642 1.0525643 -1.9623799 ]] [[-1.015416 2.607362 2.8665087 -0.11410157] [ 1.7984464 0.9389577 -0.59376454 2.2942786 ] [ 0.9875402 0.94292706 -1.1205323 -0.671342 ] [-1.7374235 0.65034294 -1.8875841 2.0833325 ] [ 1.7822893 0.69836086 -1.2217368 -1.5880247 ] [-1.3723382 -1.112709 0.2811917 -0.90948844] [-0.45732182 2.0568728 -0.0194046 1.836822 ] [ 1.178052 -0.47470734 -1.0304068 -1.7451242 ]] [[-0.76422125 -1.36919 0.6128707 0.8011404 ] [-0.82171655 -1.4948967 -0.4257697 -0.27343336] [ 0.39498723 1.3151762 0.81753725 -2.1715004 ] [-0.42526504 -0.1426941 1.9528967 0.37714618] [-0.2539998 1.0701762 -1.0017172 0.91911566] [-1.5936307 0.6643257 0.89698076 -0.5343499 ] [ 2.8516567 0.8486624 -1.0319728 1.9581634 ] [ 2.19641 -1.6850922 0.9643556 -0.19526571]] [[ 0.34432238 -0.03329761 -0.45813847 0.98868835] [-0.3628866 -1.3331542 -1.2279305 -0.65400755] [ 0.5333225 0.9681241 1.1916634 -2.0933058 ] [ 0.31988055 0.81485146 -0.3122232 -1.2210833 ] [ 0.13851564 0.2994397 0.18581545 0.19381066] [-1.6399668 -0.8265774 -0.28089136 -1.882211 ] [ 0.39135435 1.8768892 -1.029999 -1.6829302 ] [-0.9232661 0.2357185 -0.7536343 -0.2876369 ]]]]]; ov_res: [[[[[-0.2961525 -0.07959002 0.5115597 1.028505 ] [-2.1138935 -0.96852493 0.6838562 1.1885104 ] [ 0.8490671 -1.7991263 0.7173609 1.2968543 ] [-0.8250056 0.63287246 -0.03254403 0.4274413 ] [ 0.4353276 -0.48141173 -0.22581945 -0.20097929] [-0.67365676 -0.1829598 0.4424058 -0.5940715 ] [-2.2809799 -0.08625183 -0.7858385 -0.43790105] [ 1.74909 0.9286642 1.0525643 -1.9623799 ]] [[-1.015416 2.607362 2.8665087 -0.11410157] [ 1.7984464 0.9389577 -0.59376454 2.2942786 ] [ 0.9875402 0.94292706 -1.1205323 -0.671342 ] [-1.7374235 0.65034294 -1.8875841 2.0833325 ] [ 1.7822893 0.69836086 -1.2217368 -1.5880247 ] [-1.3723382 -1.112709 0.2811917 -0.90948844] [-0.45732182 2.0568728 -0.0194046 1.836822 ] [ 1.178052 -0.47470734 -1.0304068 -1.7451242 ]] [[-0.76422125 -1.36919 0.6128707 0.8011404 ] [-0.82171655 -1.4948967 -0.4257697 -0.27343336] [ 0.39498723 1.3151762 0.81753725 -2.1715004 ] [-0.42526504 -0.1426941 1.9528967 0.37714618] [-0.2539998 1.0701762 -1.0017172 0.91911566] [-1.5936307 0.6643257 0.89698076 -0.5343499 ] [ 2.8516567 0.8486624 -1.0319728 1.9581634 ] [ 2.19641 -1.6850922 0.9643556 -0.19526571]] [[ 0.34432238 -0.03329761 -0.45813847 0.98868835] [-0.3628866 -1.3331542 -1.2279305 -0.65400755] [ 0.5333225 0.9681241 1.1916634 -2.0933058 ] [ 0.31988055 0.81485146 -0.3122232 -1.2210833 ] [ 0.13851564 0.2994397 0.18581545 0.19381066] [-1.6399668 -0.8265774 -0.28089136 -1.882211 ] [ 0.39135435 1.8768892 -1.029999 -1.6829302 ] [-0.9232661 0.2357185 -0.7536343 -0.2876369 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [3, 1, 3], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_996.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[3, 1, 3]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.3532 (2,1,1,.,.) = 0.3515 (3,1,1,.,.) = -0.4324 (1,2,1,.,.) = 0.01 * 2.3704 (2,2,1,.,.) = -0.9379 (3,2,1,.,.) = -1.1633 (1,3,1,.,.) = -0.8268 (2,3,1,.,.) = 0.6839 (3,3,1,.,.) = 0.3253 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_998.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.3108 (2,1,1,.,.) = -0.1290 (3,1,1,.,.) = -1.5761 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-1.84629011e+00 5.77052176e-01 2.25451255e+00 -2.29191288e-01 6.13764040e-02 2.09056211e+00 -4.50460911e+00 -8.24914992e-01 -2.64436269e+00 -1.23543310e+00] [-5.50305128e-01 -8.38430285e-01 1.49772274e+00 6.48287535e-01 -1.87402654e+00 1.01437375e-01 -7.81735361e-01 1.76053500e+00 3.62060666e+00 -7.69068837e-01] [-1.78959608e+00 -2.80918527e+00 3.56480122e-01 3.04637837e+00 1.54746795e+00 -9.84248638e-01 -1.14279404e-01 1.36497843e+00 -3.30869603e+00 1.59259272e+00] [ 8.41881692e-01 -1.26637447e+00 2.43111944e+00 8.31537366e-01 7.77763247e-01 -8.63798559e-01 -1.14850056e+00 -1.64416626e-01 -9.97672044e-03 -1.64756811e+00] [-2.64030743e+00 7.89076388e-01 -2.30643988e+00 2.10326838e+00 1.49461222e+00 -1.27205992e+00 2.49123549e+00 -2.96701360e+00 -1.51727533e+00 1.42307031e+00] [-2.67656112e+00 -1.01856267e+00 -2.01452807e-01 3.36830765e-01 -5.84371686e-01 1.92935097e+00 2.21602106e+00 1.56734252e+00 6.23200774e-01 -1.65797246e+00] [ 2.84646487e+00 -1.20958078e+00 4.34579182e+00 1.50751817e+00 2.20422626e+00 9.89433646e-01 -1.11947691e+00 2.21932268e+00 3.17152202e-01 -1.23743916e+00] [-1.05101414e-01 2.63489580e+00 2.72720981e+00 1.71769872e-01 -1.81818342e+00 -7.97467649e-01 3.21351743e+00 7.04637706e-01 -1.10861802e+00 2.93964297e-02] [-1.11279440e+00 1.12046385e+00 -7.91410565e-01 -1.21623373e+00 7.42876470e-01 -1.79045415e+00 9.91495848e-01 1.20923984e+00 2.79537626e-02 2.16335797e+00] [ 7.58222699e-01 8.92887354e-01 4.58040506e-01 2.31033683e+00 -3.27201319e+00 2.06641316e+00 4.24423128e-01 3.06006581e-01 -1.53671503e-01 1.87203622e+00]] [[ 1.97495461e+00 -1.60550785e+00 3.29172969e+00 9.02898729e-01 6.65100813e-02 -3.85656625e-01 -8.73957634e-01 -1.73439753e+00 -5.41575253e-02 -1.48752928e-01] [ 2.00893760e+00 5.63019097e-01 1.38936833e-01 4.21524334e+00 3.20042968e+00 -3.34408164e+00 3.07627463e+00 1.54436743e+00 3.32161164e+00 -1.14624906e+00] [ 1.16551943e-01 1.17608905e+00 -7.19234273e-02 -4.07788783e-01 4.39430743e-01 -5.01386356e+00 1.45770288e+00 -2.76176524e+00 -7.69732594e-01 9.48590398e-01] [ 5.70815980e-01 -1.28051913e+00 -4.68004346e-01 -6.63009107e-01 1.95347235e-01 -6.56844556e-01 2.19805145e+00 4.16490614e-01 -1.64155090e+00 -8.50676894e-01] [-2.04866022e-01 -3.57278466e-01 -2.31860590e+00 3.97448927e-01 -6.67358935e-01 -4.15033817e-01 2.54328203e+00 1.21742225e+00 -3.11681581e+00 -1.06279564e+00] [ 5.67974038e-02 -4.15312737e-01 -1.80162740e+00 6.03602707e-01 1.44327998e-01 1.10866308e+00 -3.70071262e-01 2.28061175e+00 1.88683534e+00 1.70988679e+00] [-5.49213886e-01 2.23216042e-01 1.34293222e+00 1.93507290e+00 1.02022898e+00 3.00071383e+00 -4.40232962e-01 -2.64669251e+00 4.00656414e+00 1.15552688e+00] [-1.57746446e+00 5.48691750e-02 6.14273846e-01 -3.66861254e-01 9.19469178e-01 1.21796057e-01 -2.12102942e-02 -2.36125994e+00 -1.07308757e+00 1.53813720e+00] [ 2.52423334e+00 1.35136974e+00 -2.81491041e+00 -5.20373464e-01 -1.75058353e+00 -4.88797545e-01 -2.04050612e+00 -1.40748405e+00 4.00734377e+00 -2.31649375e+00] [-1.12349534e+00 3.97226006e-01 -5.85540473e-01 2.43786335e+00 1.74197960e+00 -1.81738746e+00 -1.44071388e+00 5.75559974e-01 -3.11185336e+00 1.38598442e+00]] [[-1.08948939e-01 -5.39542973e-01 1.40486932e+00 -1.31081867e+00 -2.91508198e+00 -7.60546386e-01 -8.04094970e-01 -2.82231116e+00 -2.81943941e+00 -6.38532778e-03] [-1.66250443e+00 -7.20647499e-02 3.58819175e+00 1.92474937e+00 2.65079069e+00 1.84262717e+00 -6.31845295e-02 -3.54147863e+00 -5.56900024e-01 -2.54167151e+00] [-8.14509332e-01 -7.74196208e-01 -2.50609255e+00 7.14370310e-01 -2.90907454e+00 2.99006724e+00 -4.56546485e-01 -1.22238159e+00 1.99998081e+00 1.25780666e+00] [-1.83422148e+00 -1.73366046e+00 6.66743577e-01 -1.41163731e+00 2.42543173e+00 2.20859504e+00 2.28241158e+00 -1.06975533e-01 -1.91350126e+00 2.35972703e-01] [ 9.68470633e-01 -4.64882106e-01 -2.29807109e-01 -1.15239829e-01 -3.33532047e+00 -1.53862190e+00 1.13402605e+00 -1.25627863e+00 -5.73318422e-01 2.82914728e-01] [ 2.05537772e+00 -3.44478458e-01 2.76674056e+00 1.14764512e+00 9.07419682e-01 -5.23770392e-01 -7.19482183e-01 -7.49776781e-01 1.06044078e+00 -7.24912882e-01] [ 6.30261898e-01 6.89934790e-02 -9.95890737e-01 8.94793451e-01 -3.61874282e-01 8.58313859e-01 1.64160097e+00 2.82572436e+00 1.58015716e+00 1.82635915e+00] [-1.83913320e-01 -2.22896442e-01 6.77270964e-02 -2.42245126e+00 -2.31260851e-01 1.37322903e+00 -1.35117039e-01 1.09366822e+00 1.00880921e+00 1.16024904e-01] [-3.24672723e+00 -2.22855210e+00 -2.12224245e+00 -1.59038210e+00 -5.88618934e-01 -7.39147186e-01 -1.58260894e+00 -1.28970683e+00 4.48425561e-01 -1.82709742e+00] [ 7.98360229e-01 -3.22457743e+00 -4.48106003e+00 -6.29274189e-01 8.47903490e-01 3.41429114e-01 1.00963986e+00 -2.04782438e+00 -8.30823898e-01 -1.31871819e+00]] [[ 1.12886131e+00 -2.44865060e+00 5.77408314e-01 1.41419458e+00 2.26839647e-01 1.07127941e+00 -8.55825782e-01 2.72339797e+00 3.54133576e-01 7.23992527e-01] [ 1.18415380e+00 2.43346497e-01 -4.00986552e-01 -3.07536864e+00 9.71120477e-01 1.32685626e+00 -1.80917993e-01 -2.98929405e+00 -6.19063079e-01 1.35417879e+00] [ 6.28514826e-01 -2.13317060e+00 -6.47628725e-01 6.04194641e-01 5.95374763e-01 -1.10851265e-01 4.42343205e-01 1.33387327e+00 4.86762345e-01 6.67850375e-01] [ 2.75804305e+00 1.21880481e-02 3.26689529e+00 9.59278584e-01 -1.04463066e-03 -7.64909208e-01 3.85357857e-01 -7.41018176e-01 3.24739122e+00 -9.30692434e-01] [-9.68659639e-01 1.22098401e-01 1.82280934e+00 -1.84460700e-01 -2.57271498e-01 -2.94709317e-02 -1.44729578e+00 -3.55993271e-01 2.33742905e+00 -7.09899962e-01] [ 3.82419318e-01 2.16795731e+00 1.87211609e+00 4.81353909e-01 -6.88543022e-01 2.14167237e+00 -1.30071926e+00 -3.41907430e+00 -7.27548957e-01 -4.32923508e+00] [-2.08789062e+00 1.19251347e+00 3.05273724e+00 -3.90197545e-01 -6.70833230e-01 -5.67197144e-01 6.56505108e-01 -9.03532267e-01 -5.95497131e-01 5.25077544e-02] [-3.14760447e+00 2.41738820e+00 -1.30311638e-01 -6.61612988e-01 -7.51428902e-01 -1.52059925e+00 -2.29526758e+00 -6.50332510e-01 2.51475525e+00 4.06972915e-01] [-1.08598888e+00 -1.46100831e+00 1.59322298e+00 -8.30475986e-01 5.09076416e-01 1.77128637e+00 -2.24169660e+00 -4.01894987e-01 1.02462792e+00 -4.57539707e-01] [ 4.17232215e-01 2.51761031e+00 -2.85741019e+00 -1.84320438e+00 2.45050001e+00 6.27358854e-01 -1.55316472e+00 3.18300426e-01 -1.17538345e+00 1.65435314e+00]] [[-7.83624426e-02 8.87159705e-01 3.35721672e-01 1.08468711e-01 4.52151477e-01 6.52664363e-01 -2.68651938e+00 2.58784080e+00 -2.44042921e+00 -1.94779307e-01] [ 2.09509659e+00 1.04622328e+00 6.24114096e-01 7.59641349e-01 -6.74564958e-01 5.99245727e-01 1.61454344e+00 -1.14831197e+00 1.42182231e+00 7.94046298e-02] [ 2.06464601e+00 1.30474234e+00 1.36189306e+00 1.93631664e-01 1.79005325e+00 -7.72534966e-01 6.32121861e-01 1.09337473e+00 -1.73717782e-01 -3.03661871e+00] [-1.66537786e+00 4.78281856e-01 -1.88219404e+00 1.42963862e+00 6.00652635e-01 1.22665393e+00 -1.30671954e+00 -1.47691453e+00 3.95663333e+00 -1.52829659e+00] [ 2.43242550e+00 2.21310806e+00 -2.06093431e-01 1.04624534e+00 -1.62831560e-01 2.02560872e-01 2.00074482e+00 8.88472438e-01 1.15829611e+00 -1.82584560e+00] [-1.63666785e+00 1.93589997e+00 1.87740195e+00 -1.93779719e+00 -3.46305037e+00 6.04268491e-01 4.80900913e-01 -1.78222907e+00 -4.67040390e-01 1.05098522e+00] [ 1.72733799e-01 -2.32460022e+00 6.99315667e-01 1.40120649e+00 -6.96683586e-01 -7.76890218e-01 -9.17753112e-03 -5.40767312e-01 1.00587654e+00 1.25609744e+00] [ 8.09002817e-02 2.86730671e+00 -9.02971983e-01 9.70937669e-01 -2.95501977e-01 -1.92440760e+00 2.07778263e+00 1.88203216e-01 2.10247064e+00 1.55463409e+00] [ 6.18367612e-01 -4.33687091e-01 2.41464424e+00 -6.10695422e-01 1.35265493e+00 9.57072973e-01 4.43215221e-01 -1.19812739e+00 1.44150901e+00 3.89080048e+00] [-2.00227928e+00 3.63252252e-01 -2.37212491e+00 4.72821385e-01 1.71937609e+00 -1.05678998e-01 1.06252110e+00 1.37486875e+00 -1.14843690e+00 -7.07384050e-01]] [[ 1.27824163e+00 -1.02595782e+00 4.41770464e-01 9.52243507e-01 -5.00445366e-01 3.46496016e-01 3.68112653e-01 -2.83925980e-01 3.17094952e-01 -1.65681469e+00] [-6.02862418e-01 9.25889194e-01 8.26401055e-01 5.17390072e-01 -8.83594513e-01 -8.81983757e-01 3.36111355e+00 -2.02427268e+00 2.14280105e+00 -1.98396158e+00] [-7.70684183e-02 -9.06561315e-01 2.31538296e+00 1.40859747e+00 -2.88424730e+00 -8.93450454e-02 -1.41124025e-01 1.21754444e+00 -1.24713624e+00 6.02012634e-01] [-7.17073858e-01 -5.01116961e-02 3.41376573e-01 1.59933221e+00 1.53867245e+00 6.92216456e-01 -8.39826524e-01 -8.85184348e-01 2.98114848e+00 -9.78785157e-01] [ 1.01356578e+00 3.18419242e+00 3.92093599e-01 -1.14042914e+00 -5.84462166e-01 8.97995532e-01 7.59178996e-01 -5.53099823e+00 -4.88214523e-01 2.64261150e+00] [ 1.76148677e+00 -2.51589656e+00 -1.54346311e+00 2.71213472e-01 4.44347084e-01 -3.82425666e-01 7.83528149e-01 -2.34657216e+00 -2.57463980e+00 1.77654362e+00] [ 1.35298431e+00 1.86446977e+00 -7.18019366e-01 -3.43525195e+00 3.27495694e+00 -6.18897557e-01 -2.15017319e+00 -2.97397470e+00 -1.85167909e+00 -8.09942484e-01] [ 2.37432599e+00 -7.94775188e-01 -1.26594782e+00 2.83732033e+00 2.42926049e+00 -8.25337395e-02 -9.65961933e-01 2.29981971e+00 -8.04863095e-01 3.15033507e+00] [-3.85530770e-01 -1.19560945e+00 1.21875143e+00 -1.25683993e-02 2.46985197e-01 4.64736879e-01 -1.83687913e+00 6.25964999e-01 -5.28494477e-01 1.94096088e+00] [-1.49456963e-01 -1.25090528e+00 1.38062924e-01 -7.61955857e-01 4.32427138e-01 1.74345934e+00 -1.37762094e+00 2.95858335e+00 -1.26857615e+00 1.71053335e-02]] [[ 6.27976835e-01 -1.49568915e+00 7.43106976e-02 -3.18383551e+00 1.71910036e+00 1.20431566e+00 -3.21654749e+00 -3.03501815e-01 3.52286667e-01 1.07657576e+00] [ 9.99996364e-01 1.35519159e+00 -1.40050960e+00 -4.85096604e-01 2.14344192e+00 5.92369437e-01 4.95543063e-01 1.09446108e+00 1.29784214e+00 3.51297557e-01] [-2.58438200e-01 -1.27655303e+00 -5.47859371e-01 -1.70259464e+00 1.20475221e+00 3.34086347e+00 1.35713029e+00 2.53352141e+00 -1.54420280e+00 1.79903650e+00] [-1.87662899e+00 -1.05813158e+00 1.65498093e-01 -2.14408922e+00 9.54035401e-01 -3.45530844e+00 -1.86973119e+00 2.91419387e+00 1.37607932e+00 -1.17474985e+00] [ 1.44889283e+00 -2.29503059e+00 1.32355917e+00 -1.53677356e+00 -8.77072096e-01 -3.22310090e+00 -5.14055550e-01 -2.37991595e+00 1.72733164e+00 2.26046920e+00] [-3.19414258e+00 -3.35237741e+00 9.76746261e-01 -1.62593532e+00 1.91517138e+00 1.38162899e+00 -6.50677145e-01 -2.12095046e+00 1.40136290e+00 -8.35393071e-01] [-3.41544580e+00 -8.58682275e-01 5.31485736e-01 2.56996423e-01 -4.54459280e-01 1.79778087e+00 1.76623392e+00 1.44612744e-01 2.18462944e-01 1.90637207e+00] [ 2.75005913e+00 1.20360613e+00 1.16931999e+00 -2.40962648e+00 -1.08532894e+00 2.83615708e-01 -2.50486255e+00 4.81624275e-01 7.62392402e-01 -7.06171453e-01] [ 1.08167624e+00 1.52990043e+00 8.66308331e-01 2.05311060e+00 1.46062744e+00 1.11375463e+00 -2.36729667e-01 3.33376020e-01 2.98076606e+00 2.56811953e+00] [ 3.06157410e-01 -8.54672313e-01 -1.16592908e+00 -6.34765685e-01 -1.42598641e+00 -5.71771383e-01 -3.04534078e+00 1.92566827e-01 2.06961584e+00 1.88794124e+00]] [[-7.80022681e-01 1.03983390e+00 -1.98847711e+00 -2.63862562e+00 2.72774863e+00 -5.70717990e-01 2.70076036e+00 1.22749126e+00 1.27612388e+00 -1.21184814e+00] [ 4.37026173e-01 1.41736376e+00 -2.62350702e+00 4.57940221e-01 -1.36003530e+00 -1.91509438e+00 1.85231268e+00 5.38773954e-01 1.66694784e+00 4.06880565e-02] [-1.00039327e+00 1.28567964e-01 -3.20331931e+00 2.53894210e-01 2.87509775e+00 -4.03650582e-01 -2.53158212e+00 5.01301587e-01 -4.11734432e-01 -7.92672634e-01] [-2.03148270e+00 -1.08666658e+00 -3.21271658e-01 -4.38353419e-01 -1.87007505e-02 1.36728144e+00 1.71948951e-02 2.95995927e+00 -1.74082279e+00 3.30686793e-02] [ 3.59706789e-01 2.61080861e+00 -2.50857425e+00 -1.86706349e-01 6.28912508e-01 -1.79643154e+00 2.43964720e+00 1.55163682e+00 -3.06770945e+00 2.61901593e+00] [ 2.84792352e+00 -2.39293838e+00 -1.62263179e+00 1.24431670e+00 -1.84622300e+00 -1.92115569e+00 -7.09687948e-01 1.33446181e+00 3.17296052e+00 2.40997434e+00] [ 5.35740197e-01 5.16614318e-01 1.06450409e-01 -1.12035286e+00 1.04058886e+00 -1.78759301e+00 -6.93495750e-01 -1.06435895e+00 1.80826545e+00 5.08850396e-01] [ 3.22289491e+00 2.05199599e+00 1.74657702e+00 7.35098541e-01 1.45518577e+00 -2.10805106e+00 -8.69995713e-01 -4.37368482e-01 -6.31536126e-01 -1.09727550e+00] [-3.64161444e+00 1.89048517e+00 9.33746934e-01 2.70046544e+00 1.38485491e+00 2.84183919e-01 1.43580723e+00 3.30543481e-02 4.77730811e-01 -2.31744945e-02] [ 2.70677924e+00 -2.18084335e+00 4.49838817e-01 -1.51866579e+00 -2.41821814e+00 -6.19097471e-01 -1.98414952e-01 9.45445120e-01 -1.98272240e+00 7.50616670e-01]]]]]; ov_res: [[[[[-1.84629011e+00 5.77052176e-01 2.25451255e+00 -2.29191288e-01 6.13764040e-02 2.09056211e+00 -4.50460911e+00 -8.24914992e-01 -2.64436269e+00 -1.23543310e+00] [-5.50305128e-01 -8.38430285e-01 1.49772274e+00 6.48287535e-01 -1.87402654e+00 1.01437375e-01 -7.81735361e-01 1.76053500e+00 3.62060666e+00 -7.69068837e-01] [-1.78959608e+00 -2.80918527e+00 3.56480122e-01 3.04637837e+00 1.54746795e+00 -9.84248638e-01 -1.14279404e-01 1.36497843e+00 -3.30869603e+00 1.59259272e+00] [ 8.41881692e-01 -1.26637447e+00 2.43111944e+00 8.31537366e-01 7.77763247e-01 -8.63798559e-01 -1.14850056e+00 -1.64416626e-01 -9.97672044e-03 -1.64756811e+00] [-2.64030743e+00 7.89076388e-01 -2.30643988e+00 2.10326838e+00 1.49461222e+00 -1.27205992e+00 2.49123549e+00 -2.96701360e+00 -1.51727533e+00 1.42307031e+00] [-2.67656112e+00 -1.01856267e+00 -2.01452807e-01 3.36830765e-01 -5.84371686e-01 1.92935097e+00 2.21602106e+00 1.56734252e+00 6.23200774e-01 -1.65797246e+00] [ 2.84646487e+00 -1.20958078e+00 4.34579182e+00 1.50751817e+00 2.20422626e+00 9.89433646e-01 -1.11947691e+00 2.21932268e+00 3.17152202e-01 -1.23743916e+00] [-1.05101414e-01 2.63489580e+00 2.72720981e+00 1.71769872e-01 -1.81818342e+00 -7.97467649e-01 3.21351743e+00 7.04637706e-01 -1.10861802e+00 2.93964297e-02] [-1.11279440e+00 1.12046385e+00 -7.91410565e-01 -1.21623373e+00 7.42876470e-01 -1.79045415e+00 9.91495848e-01 1.20923984e+00 2.79537626e-02 2.16335797e+00] [ 7.58222699e-01 8.92887354e-01 4.58040506e-01 2.31033683e+00 -3.27201319e+00 2.06641316e+00 4.24423128e-01 3.06006581e-01 -1.53671503e-01 1.87203622e+00]] [[ 1.97495461e+00 -1.60550785e+00 3.29172969e+00 9.02898729e-01 6.65100813e-02 -3.85656625e-01 -8.73957634e-01 -1.73439753e+00 -5.41575253e-02 -1.48752928e-01] [ 2.00893760e+00 5.63019097e-01 1.38936833e-01 4.21524334e+00 3.20042968e+00 -3.34408164e+00 3.07627463e+00 1.54436743e+00 3.32161164e+00 -1.14624906e+00] [ 1.16551943e-01 1.17608905e+00 -7.19234273e-02 -4.07788783e-01 4.39430743e-01 -5.01386356e+00 1.45770288e+00 -2.76176524e+00 -7.69732594e-01 9.48590398e-01] [ 5.70815980e-01 -1.28051913e+00 -4.68004346e-01 -6.63009107e-01 1.95347235e-01 -6.56844556e-01 2.19805145e+00 4.16490614e-01 -1.64155090e+00 -8.50676894e-01] [-2.04866022e-01 -3.57278466e-01 -2.31860590e+00 3.97448927e-01 -6.67358935e-01 -4.15033817e-01 2.54328203e+00 1.21742225e+00 -3.11681581e+00 -1.06279564e+00] [ 5.67974038e-02 -4.15312737e-01 -1.80162740e+00 6.03602707e-01 1.44327998e-01 1.10866308e+00 -3.70071262e-01 2.28061175e+00 1.88683534e+00 1.70988679e+00] [-5.49213886e-01 2.23216042e-01 1.34293222e+00 1.93507290e+00 1.02022898e+00 3.00071383e+00 -4.40232962e-01 -2.64669251e+00 4.00656414e+00 1.15552688e+00] [-1.57746446e+00 5.48691750e-02 6.14273846e-01 -3.66861254e-01 9.19469178e-01 1.21796057e-01 -2.12102942e-02 -2.36125994e+00 -1.07308757e+00 1.53813720e+00] [ 2.52423334e+00 1.35136974e+00 -2.81491041e+00 -5.20373464e-01 -1.75058353e+00 -4.88797545e-01 -2.04050612e+00 -1.40748405e+00 4.00734377e+00 -2.31649375e+00] [-1.12349534e+00 3.97226006e-01 -5.85540473e-01 2.43786335e+00 1.74197960e+00 -1.81738746e+00 -1.44071388e+00 5.75559974e-01 -3.11185336e+00 1.38598442e+00]] [[-1.08948939e-01 -5.39542973e-01 1.40486932e+00 -1.31081867e+00 -2.91508198e+00 -7.60546386e-01 -8.04094970e-01 -2.82231116e+00 -2.81943941e+00 -6.38532778e-03] [-1.66250443e+00 -7.20647499e-02 3.58819175e+00 1.92474937e+00 2.65079069e+00 1.84262717e+00 -6.31845295e-02 -3.54147863e+00 -5.56900024e-01 -2.54167151e+00] [-8.14509332e-01 -7.74196208e-01 -2.50609255e+00 7.14370310e-01 -2.90907454e+00 2.99006724e+00 -4.56546485e-01 -1.22238159e+00 1.99998081e+00 1.25780666e+00] [-1.83422148e+00 -1.73366046e+00 6.66743577e-01 -1.41163731e+00 2.42543173e+00 2.20859504e+00 2.28241158e+00 -1.06975533e-01 -1.91350126e+00 2.35972703e-01] [ 9.68470633e-01 -4.64882106e-01 -2.29807109e-01 -1.15239829e-01 -3.33532047e+00 -1.53862190e+00 1.13402605e+00 -1.25627863e+00 -5.73318422e-01 2.82914728e-01] [ 2.05537772e+00 -3.44478458e-01 2.76674056e+00 1.14764512e+00 9.07419682e-01 -5.23770392e-01 -7.19482183e-01 -7.49776781e-01 1.06044078e+00 -7.24912882e-01] [ 6.30261898e-01 6.89934790e-02 -9.95890737e-01 8.94793451e-01 -3.61874282e-01 8.58313859e-01 1.64160097e+00 2.82572436e+00 1.58015716e+00 1.82635915e+00] [-1.83913320e-01 -2.22896442e-01 6.77270964e-02 -2.42245126e+00 -2.31260851e-01 1.37322903e+00 -1.35117039e-01 1.09366822e+00 1.00880921e+00 1.16024904e-01] [-3.24672723e+00 -2.22855210e+00 -2.12224245e+00 -1.59038210e+00 -5.88618934e-01 -7.39147186e-01 -1.58260894e+00 -1.28970683e+00 4.48425561e-01 -1.82709742e+00] [ 7.98360229e-01 -3.22457743e+00 -4.48106003e+00 -6.29274189e-01 8.47903490e-01 3.41429114e-01 1.00963986e+00 -2.04782438e+00 -8.30823898e-01 -1.31871819e+00]] [[ 1.12886131e+00 -2.44865060e+00 5.77408314e-01 1.41419458e+00 2.26839647e-01 1.07127941e+00 -8.55825782e-01 2.72339797e+00 3.54133576e-01 7.23992527e-01] [ 1.18415380e+00 2.43346497e-01 -4.00986552e-01 -3.07536864e+00 9.71120477e-01 1.32685626e+00 -1.80917993e-01 -2.98929405e+00 -6.19063079e-01 1.35417879e+00] [ 6.28514826e-01 -2.13317060e+00 -6.47628725e-01 6.04194641e-01 5.95374763e-01 -1.10851265e-01 4.42343205e-01 1.33387327e+00 4.86762345e-01 6.67850375e-01] [ 2.75804305e+00 1.21880481e-02 3.26689529e+00 9.59278584e-01 -1.04463066e-03 -7.64909208e-01 3.85357857e-01 -7.41018176e-01 3.24739122e+00 -9.30692434e-01] [-9.68659639e-01 1.22098401e-01 1.82280934e+00 -1.84460700e-01 -2.57271498e-01 -2.94709317e-02 -1.44729578e+00 -3.55993271e-01 2.33742905e+00 -7.09899962e-01] [ 3.82419318e-01 2.16795731e+00 1.87211609e+00 4.81353909e-01 -6.88543022e-01 2.14167237e+00 -1.30071926e+00 -3.41907430e+00 -7.27548957e-01 -4.32923508e+00] [-2.08789062e+00 1.19251347e+00 3.05273724e+00 -3.90197545e-01 -6.70833230e-01 -5.67197144e-01 6.56505108e-01 -9.03532267e-01 -5.95497131e-01 5.25077544e-02] [-3.14760447e+00 2.41738820e+00 -1.30311638e-01 -6.61612988e-01 -7.51428902e-01 -1.52059925e+00 -2.29526758e+00 -6.50332510e-01 2.51475525e+00 4.06972915e-01] [-1.08598888e+00 -1.46100831e+00 1.59322298e+00 -8.30475986e-01 5.09076416e-01 1.77128637e+00 -2.24169660e+00 -4.01894987e-01 1.02462792e+00 -4.57539707e-01] [ 4.17232215e-01 2.51761031e+00 -2.85741019e+00 -1.84320438e+00 2.45050001e+00 6.27358854e-01 -1.55316472e+00 3.18300426e-01 -1.17538345e+00 1.65435314e+00]] [[-7.83624426e-02 8.87159705e-01 3.35721672e-01 1.08468711e-01 4.52151477e-01 6.52664363e-01 -2.68651938e+00 2.58784080e+00 -2.44042921e+00 -1.94779307e-01] [ 2.09509659e+00 1.04622328e+00 6.24114096e-01 7.59641349e-01 -6.74564958e-01 5.99245727e-01 1.61454344e+00 -1.14831197e+00 1.42182231e+00 7.94046298e-02] [ 2.06464601e+00 1.30474234e+00 1.36189306e+00 1.93631664e-01 1.79005325e+00 -7.72534966e-01 6.32121861e-01 1.09337473e+00 -1.73717782e-01 -3.03661871e+00] [-1.66537786e+00 4.78281856e-01 -1.88219404e+00 1.42963862e+00 6.00652635e-01 1.22665393e+00 -1.30671954e+00 -1.47691453e+00 3.95663333e+00 -1.52829659e+00] [ 2.43242550e+00 2.21310806e+00 -2.06093431e-01 1.04624534e+00 -1.62831560e-01 2.02560872e-01 2.00074482e+00 8.88472438e-01 1.15829611e+00 -1.82584560e+00] [-1.63666785e+00 1.93589997e+00 1.87740195e+00 -1.93779719e+00 -3.46305037e+00 6.04268491e-01 4.80900913e-01 -1.78222907e+00 -4.67040390e-01 1.05098522e+00] [ 1.72733799e-01 -2.32460022e+00 6.99315667e-01 1.40120649e+00 -6.96683586e-01 -7.76890218e-01 -9.17753112e-03 -5.40767312e-01 1.00587654e+00 1.25609744e+00] [ 8.09002817e-02 2.86730671e+00 -9.02971983e-01 9.70937669e-01 -2.95501977e-01 -1.92440760e+00 2.07778263e+00 1.88203216e-01 2.10247064e+00 1.55463409e+00] [ 6.18367612e-01 -4.33687091e-01 2.41464424e+00 -6.10695422e-01 1.35265493e+00 9.57072973e-01 4.43215221e-01 -1.19812739e+00 1.44150901e+00 3.89080048e+00] [-2.00227928e+00 3.63252252e-01 -2.37212491e+00 4.72821385e-01 1.71937609e+00 -1.05678998e-01 1.06252110e+00 1.37486875e+00 -1.14843690e+00 -7.07384050e-01]] [[ 1.27824163e+00 -1.02595782e+00 4.41770464e-01 9.52243507e-01 -5.00445366e-01 3.46496016e-01 3.68112653e-01 -2.83925980e-01 3.17094952e-01 -1.65681469e+00] [-6.02862418e-01 9.25889194e-01 8.26401055e-01 5.17390072e-01 -8.83594513e-01 -8.81983757e-01 3.36111355e+00 -2.02427268e+00 2.14280105e+00 -1.98396158e+00] [-7.70684183e-02 -9.06561315e-01 2.31538296e+00 1.40859747e+00 -2.88424730e+00 -8.93450454e-02 -1.41124025e-01 1.21754444e+00 -1.24713624e+00 6.02012634e-01] [-7.17073858e-01 -5.01116961e-02 3.41376573e-01 1.59933221e+00 1.53867245e+00 6.92216456e-01 -8.39826524e-01 -8.85184348e-01 2.98114848e+00 -9.78785157e-01] [ 1.01356578e+00 3.18419242e+00 3.92093599e-01 -1.14042914e+00 -5.84462166e-01 8.97995532e-01 7.59178996e-01 -5.53099823e+00 -4.88214523e-01 2.64261150e+00] [ 1.76148677e+00 -2.51589656e+00 -1.54346311e+00 2.71213472e-01 4.44347084e-01 -3.82425666e-01 7.83528149e-01 -2.34657216e+00 -2.57463980e+00 1.77654362e+00] [ 1.35298431e+00 1.86446977e+00 -7.18019366e-01 -3.43525195e+00 3.27495694e+00 -6.18897557e-01 -2.15017319e+00 -2.97397470e+00 -1.85167909e+00 -8.09942484e-01] [ 2.37432599e+00 -7.94775188e-01 -1.26594782e+00 2.83732033e+00 2.42926049e+00 -8.25337395e-02 -9.65961933e-01 2.29981971e+00 -8.04863095e-01 3.15033507e+00] [-3.85530770e-01 -1.19560945e+00 1.21875143e+00 -1.25683993e-02 2.46985197e-01 4.64736879e-01 -1.83687913e+00 6.25964999e-01 -5.28494477e-01 1.94096088e+00] [-1.49456963e-01 -1.25090528e+00 1.38062924e-01 -7.61955857e-01 4.32427138e-01 1.74345934e+00 -1.37762094e+00 2.95858335e+00 -1.26857615e+00 1.71053335e-02]] [[ 6.27976835e-01 -1.49568915e+00 7.43106976e-02 -3.18383551e+00 1.71910036e+00 1.20431566e+00 -3.21654749e+00 -3.03501815e-01 3.52286667e-01 1.07657576e+00] [ 9.99996364e-01 1.35519159e+00 -1.40050960e+00 -4.85096604e-01 2.14344192e+00 5.92369437e-01 4.95543063e-01 1.09446108e+00 1.29784214e+00 3.51297557e-01] [-2.58438200e-01 -1.27655303e+00 -5.47859371e-01 -1.70259464e+00 1.20475221e+00 3.34086347e+00 1.35713029e+00 2.53352141e+00 -1.54420280e+00 1.79903650e+00] [-1.87662899e+00 -1.05813158e+00 1.65498093e-01 -2.14408922e+00 9.54035401e-01 -3.45530844e+00 -1.86973119e+00 2.91419387e+00 1.37607932e+00 -1.17474985e+00] [ 1.44889283e+00 -2.29503059e+00 1.32355917e+00 -1.53677356e+00 -8.77072096e-01 -3.22310090e+00 -5.14055550e-01 -2.37991595e+00 1.72733164e+00 2.26046920e+00] [-3.19414258e+00 -3.35237741e+00 9.76746261e-01 -1.62593532e+00 1.91517138e+00 1.38162899e+00 -6.50677145e-01 -2.12095046e+00 1.40136290e+00 -8.35393071e-01] [-3.41544580e+00 -8.58682275e-01 5.31485736e-01 2.56996423e-01 -4.54459280e-01 1.79778087e+00 1.76623392e+00 1.44612744e-01 2.18462944e-01 1.90637207e+00] [ 2.75005913e+00 1.20360613e+00 1.16931999e+00 -2.40962648e+00 -1.08532894e+00 2.83615708e-01 -2.50486255e+00 4.81624275e-01 7.62392402e-01 -7.06171453e-01] [ 1.08167624e+00 1.52990043e+00 8.66308331e-01 2.05311060e+00 1.46062744e+00 1.11375463e+00 -2.36729667e-01 3.33376020e-01 2.98076606e+00 2.56811953e+00] [ 3.06157410e-01 -8.54672313e-01 -1.16592908e+00 -6.34765685e-01 -1.42598641e+00 -5.71771383e-01 -3.04534078e+00 1.92566827e-01 2.06961584e+00 1.88794124e+00]] [[-7.80022681e-01 1.03983390e+00 -1.98847711e+00 -2.63862562e+00 2.72774863e+00 -5.70717990e-01 2.70076036e+00 1.22749126e+00 1.27612388e+00 -1.21184814e+00] [ 4.37026173e-01 1.41736376e+00 -2.62350702e+00 4.57940221e-01 -1.36003530e+00 -1.91509438e+00 1.85231268e+00 5.38773954e-01 1.66694784e+00 4.06880565e-02] [-1.00039327e+00 1.28567964e-01 -3.20331931e+00 2.53894210e-01 2.87509775e+00 -4.03650582e-01 -2.53158212e+00 5.01301587e-01 -4.11734432e-01 -7.92672634e-01] [-2.03148270e+00 -1.08666658e+00 -3.21271658e-01 -4.38353419e-01 -1.87007505e-02 1.36728144e+00 1.71948951e-02 2.95995927e+00 -1.74082279e+00 3.30686793e-02] [ 3.59706789e-01 2.61080861e+00 -2.50857425e+00 -1.86706349e-01 6.28912508e-01 -1.79643154e+00 2.43964720e+00 1.55163682e+00 -3.06770945e+00 2.61901593e+00] [ 2.84792352e+00 -2.39293838e+00 -1.62263179e+00 1.24431670e+00 -1.84622300e+00 -1.92115569e+00 -7.09687948e-01 1.33446181e+00 3.17296052e+00 2.40997434e+00] [ 5.35740197e-01 5.16614318e-01 1.06450409e-01 -1.12035286e+00 1.04058886e+00 -1.78759301e+00 -6.93495750e-01 -1.06435895e+00 1.80826545e+00 5.08850396e-01] [ 3.22289491e+00 2.05199599e+00 1.74657702e+00 7.35098541e-01 1.45518577e+00 -2.10805106e+00 -8.69995713e-01 -4.37368482e-01 -6.31536126e-01 -1.09727550e+00] [-3.64161444e+00 1.89048517e+00 9.33746934e-01 2.70046544e+00 1.38485491e+00 2.84183919e-01 1.43580723e+00 3.30543481e-02 4.77730811e-01 -2.31744945e-02] [ 2.70677924e+00 -2.18084335e+00 4.49838817e-01 -1.51866579e+00 -2.41821814e+00 -6.19097471e-01 -1.98414952e-01 9.45445120e-01 -1.98272240e+00 7.50616670e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1000.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.2742 (2,1,1,.,.) = 2.4653 (3,1,1,.,.) = 0.1074 (1,2,1,.,.) = 1.0551 (2,2,1,.,.) = 1.8652 (3,2,1,.,.) = 0.01 * -6.4973 (1,3,1,.,.) = -0.4739 (2,3,1,.,.) = 1.0223 (3,3,1,.,.) = 0.1352 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.73378932e+00 -6.40994012e-01 2.17161798e+00 ... -7.69350111e-01 2.41779208e-01 2.73293406e-01] [ 2.88301492e+00 -4.10775363e-01 -3.50460839e+00 ... 6.18613005e-01 1.83229077e+00 1.47493839e+00] ... [-4.39627886e+00 -2.07551003e+00 -1.48977995e+00 ... 6.48366570e-01 -2.12164760e+00 -1.60316861e+00] [ 1.37622416e+00 8.52917254e-01 4.23533297e+00 ... 1.14340521e-01 7.77152836e-01 -1.63297141e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.25322294e+00 -5.56651175e-01 4.17300344e-01 ... -1.54134488e+00 9.33658779e-01 2.34808803e+00] [-3.53748709e-01 -2.98630804e-01 9.82934058e-01 ... -3.28818291e-01 -2.18789673e+00 -9.74748373e-01] ... [ 1.95730460e+00 -3.39973235e+00 -4.74266142e-01 ... -6.51510119e-01 -2.03365743e-01 1.11426222e+00] [ 2.68345666e+00 5.37344694e+00 -2.65163183e+00 ... -2.63013053e+00 3.45595837e-01 -9.86301303e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.15398371e-01 9.07383740e-01 -1.24393785e+00 ... -6.18305445e-01 -9.27605987e-01 2.13850617e+00] [ 9.39235449e-01 1.33982325e+00 -2.27276659e+00 ... -1.07259464e+00 -1.77495360e-01 1.17517853e+00] ... [-2.17879915e+00 -2.88268232e+00 -4.74294245e-01 ... 2.03327322e+00 -1.07515728e+00 -2.83981895e+00] [ 2.94958735e+00 8.37870657e-01 1.45750570e+00 ... -3.97963262e+00 9.23471272e-01 -1.00436258e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-6.67819142e-01 -2.39362144e+00 -2.90470004e-01 ... -2.50368025e-02 -1.40059501e-01 -1.10379720e+00] [ 4.30044460e+00 -1.24266088e+00 2.27579021e+00 ... 1.08893788e+00 2.16107392e+00 -1.62189150e+00] ... [ 1.64458132e+00 -4.50071007e-01 1.97717398e-01 ... 1.89626193e+00 -3.58609766e-01 -2.01380181e+00] [-2.29135394e-01 -1.16490793e+00 2.86490774e+00 ... -7.13932276e-01 -4.58377242e-01 2.84581900e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-5.35337090e-01 3.09649897e+00 -3.48726869e-01 ... 4.32975590e-01 1.88497019e+00 6.63195670e-01] [-1.80840230e+00 -9.69380140e-01 -3.66508663e-01 ... -4.47083998e+00 3.91381288e+00 -8.98410559e-01] ... [-7.02498257e-01 9.15254951e-01 -1.07410647e-01 ... 1.44030347e-01 1.11871374e+00 1.39157927e+00] [ 1.20096457e+00 7.29722381e-01 -2.53411198e+00 ... -1.98416531e+00 1.15836847e+00 1.61891294e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 3.39766979e+00 -3.65553999e+00 2.44185376e+00 ... -1.07662332e+00 1.07757902e+00 1.80053341e+00] [ 2.57211328e+00 -2.77631736e+00 6.72683835e-01 ... -3.77509022e+00 5.10974944e-01 6.06738269e-01] ... [-1.16809571e+00 -1.06256747e+00 -1.23778486e+00 ... -2.82442260e+00 -1.87706602e+00 -5.16447735e+00] [ 2.46573043e+00 -2.31646329e-01 -1.12212908e+00 ... -2.69744325e+00 -2.76243019e+00 -1.81645060e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.80668545e+00 -1.48558140e+00 2.56565952e+00 ... 3.40912247e+00 -2.42935553e-01 -2.90748382e+00] [ 6.18939734e+00 -7.95589328e-01 -5.28947496e+00 ... -4.48998809e-01 4.15453529e+00 9.37890947e-01] ... [-7.75416183e+00 1.73593327e-01 -3.51419973e+00 ... -4.22767496e+00 -5.99348259e+00 -1.87444711e+00] [ 1.57743335e+00 3.66646647e+00 7.32449627e+00 ... -7.99439311e-01 1.55025423e+00 -1.21315384e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.58479667e+00 -2.45983267e+00 -5.03049672e-01 ... -2.86335254e+00 1.18685865e+00 6.49912834e+00] [-1.43877304e+00 -1.43305302e+00 4.04398060e+00 ... -8.40896785e-01 -2.60978580e+00 -2.76253700e+00] ... [ 2.36274958e+00 -5.56237650e+00 -4.89084673e+00 ... -3.11398357e-01 3.11299372e+00 3.08001733e+00] [ 7.54594469e+00 5.51284456e+00 -6.27287292e+00 ... -2.43679214e+00 1.39839828e+00 4.45624620e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.22092867e+00 3.27344632e+00 -1.60359430e+00 ... -2.60467148e+00 -3.42940831e+00 2.52350044e+00] [ 1.34209573e+00 2.73448801e+00 -6.37195945e-01 ... 6.21700108e-01 3.99191260e-01 1.91206598e+00] ... [-2.51636291e+00 -5.96723413e+00 -1.61219120e+00 ... 4.75580788e+00 1.70119095e+00 -2.32570052e+00] [ 2.05184674e+00 -5.89070261e-01 2.73124027e+00 ... -5.95830631e+00 2.24535728e+00 -2.37076187e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.57942033e+00 -1.63491249e+00 2.52944279e+00 ... -1.09684741e+00 1.68734300e+00 -1.87499988e+00] [ 5.50208426e+00 -2.50927228e-02 3.56978965e+00 ... 2.98876238e+00 3.12055063e+00 -3.46723199e-01] ... [ 4.21496868e+00 -1.37434304e+00 8.22945535e-01 ... 1.65669501e+00 3.66555423e-01 -4.33955050e+00] [-1.61401355e+00 -1.85732734e+00 4.40848064e+00 ... -2.07775617e+00 -2.87986112e+00 4.02536726e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.74122620e-01 7.70930767e+00 -1.63559961e+00 ... 3.73419833e+00 2.17420506e+00 2.55852628e+00] [-9.69977319e-01 -2.79311252e+00 3.19103360e-01 ... -8.94539452e+00 3.85803342e+00 -2.41632009e+00] ... [-1.92466593e+00 -1.07032943e+00 -2.93640327e+00 ... 2.46768785e+00 2.95794225e+00 3.29404449e+00] [ 5.87429142e+00 3.75816774e+00 -6.20054960e+00 ... -3.18957090e+00 1.88152707e+00 1.23947203e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 6.66237640e+00 -6.49914837e+00 1.40921772e+00 ... 1.23545182e+00 -2.81113833e-02 1.05004621e+00] [ 5.24357796e+00 -6.01598072e+00 -8.77077997e-01 ... -4.71753454e+00 -3.49126840e+00 2.26776791e+00] ... [ 3.97925854e-01 -4.48634243e+00 8.94301951e-01 ... -3.36914444e+00 -4.29888487e+00 -8.17098236e+00] [ 5.64179564e+00 1.16956365e+00 -1.80026197e+00 ... -4.50508499e+00 -6.94014072e+00 -4.22772360e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.08035782e-01 -1.18950933e-01 -1.94145039e-01 ... 3.89425576e-01 1.53635964e-01 -3.16793799e-01] [ 2.10035533e-01 -7.36650229e-02 8.07664022e-02 ... -8.38760957e-02 8.20149407e-02 -7.56174838e-03] ... [-1.53703779e-01 1.67652115e-01 7.17447773e-02 ... -4.42629158e-01 -2.76356697e-01 -5.97615428e-02] [-1.01113327e-01 2.10314825e-01 2.82583386e-01 ... -2.29687124e-01 -9.58458558e-02 2.10619509e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 7.02393577e-02 -1.59496978e-01 -7.72356987e-02 ... -2.31613144e-02 -7.28029981e-02 1.34774193e-01] [-2.12020770e-01 -4.67277654e-02 1.09349355e-01 ... -4.26474363e-02 2.69074887e-01 9.48026255e-02] ... [-9.65557396e-02 -1.66915759e-01 -3.93027961e-01 ... -2.22081527e-01 3.29343051e-01 1.77291974e-01] [ 4.16280299e-01 -3.29304188e-01 -4.43355814e-02 ... 6.99214637e-02 4.11259443e-01 1.21480316e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.25633413e-01 1.34948716e-01 4.25720848e-02 ... -2.56504029e-01 -1.79296896e-01 -8.45788345e-02] [ 2.34094001e-02 -1.03908584e-01 3.27259719e-01 ... 1.28898665e-01 3.98615785e-02 -6.85917437e-02] ... [-1.12588704e-01 -3.26558888e-01 8.22767848e-04 ... 6.10852353e-02 2.52599597e-01 2.20575675e-01] [-2.58947968e-01 -1.49754032e-01 5.40636592e-02 ... 5.83795831e-02 2.44469810e-02 -1.46341607e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.32566819e-01 9.11712050e-02 1.47624895e-01 ... -3.86260822e-02 1.05365239e-01 1.47193342e-01] [-7.80483559e-02 1.89241290e-01 3.03585172e-01 ... 2.74718940e-01 -4.91621159e-02 1.00287423e-01] ... [ 5.01741171e-02 -1.15250172e-02 -4.46914062e-02 ... -9.86081585e-02 -1.09973431e-01 -5.89611381e-02] [-9.38478261e-02 2.25398213e-01 1.49400160e-01 ... -1.39986590e-01 -1.88545465e-01 -7.06078559e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 9.18923989e-02 2.17313722e-01 -1.66325495e-01 ... 1.45129696e-01 -8.84171277e-02 2.36126900e-01] [ 8.77876878e-02 -2.02880859e-01 -4.60765697e-02 ... -2.69240234e-02 -1.53506055e-01 -2.10460052e-01] ... [ 5.44103421e-02 -1.82906732e-01 -1.05900362e-01 ... 1.02563448e-01 5.04154712e-02 5.40106297e-02] [ 3.89003009e-01 2.98536032e-01 -1.87443390e-01 ... 1.58020213e-01 3.87129672e-02 -3.04403473e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 2.01555908e-01 8.31736177e-02 -2.20927060e-01 ... 1.92842916e-01 -1.50831074e-01 -1.29476367e-02] [ 3.00000131e-01 -2.82655567e-01 -9.95701477e-02 ... 2.09910691e-01 -3.29440892e-01 1.42509222e-01] ... [ 5.31548262e-01 -1.49839640e-01 4.96041775e-02 ... 2.13075504e-01 -1.92171678e-01 -2.49639153e-02] [ 1.72923788e-01 1.61670744e-01 -5.04817553e-02 ... -3.57529335e-02 -2.79154569e-01 -1.48630599e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.73378932e+00 -6.40994012e-01 2.17161798e+00 ... -7.69350111e-01 2.41779208e-01 2.73293406e-01] [ 2.88301492e+00 -4.10775363e-01 -3.50460839e+00 ... 6.18613005e-01 1.83229077e+00 1.47493839e+00] ... [-4.39627886e+00 -2.07551003e+00 -1.48977995e+00 ... 6.48366570e-01 -2.12164760e+00 -1.60316861e+00] [ 1.37622416e+00 8.52917254e-01 4.23533297e+00 ... 1.14340521e-01 7.77152836e-01 -1.63297141e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.25322294e+00 -5.56651175e-01 4.17300344e-01 ... -1.54134488e+00 9.33658779e-01 2.34808803e+00] [-3.53748709e-01 -2.98630804e-01 9.82934058e-01 ... -3.28818291e-01 -2.18789673e+00 -9.74748373e-01] ... [ 1.95730460e+00 -3.39973235e+00 -4.74266142e-01 ... -6.51510119e-01 -2.03365743e-01 1.11426222e+00] [ 2.68345666e+00 5.37344694e+00 -2.65163183e+00 ... -2.63013053e+00 3.45595837e-01 -9.86301303e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.15398371e-01 9.07383740e-01 -1.24393785e+00 ... -6.18305445e-01 -9.27605987e-01 2.13850617e+00] [ 9.39235449e-01 1.33982325e+00 -2.27276659e+00 ... -1.07259464e+00 -1.77495360e-01 1.17517853e+00] ... [-2.17879915e+00 -2.88268232e+00 -4.74294245e-01 ... 2.03327322e+00 -1.07515728e+00 -2.83981895e+00] [ 2.94958735e+00 8.37870657e-01 1.45750570e+00 ... -3.97963262e+00 9.23471272e-01 -1.00436258e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-6.67819142e-01 -2.39362144e+00 -2.90470004e-01 ... -2.50368025e-02 -1.40059501e-01 -1.10379720e+00] [ 4.30044460e+00 -1.24266088e+00 2.27579021e+00 ... 1.08893788e+00 2.16107392e+00 -1.62189150e+00] ... [ 1.64458132e+00 -4.50071007e-01 1.97717398e-01 ... 1.89626193e+00 -3.58609766e-01 -2.01380181e+00] [-2.29135394e-01 -1.16490793e+00 2.86490774e+00 ... -7.13932276e-01 -4.58377242e-01 2.84581900e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-5.35337090e-01 3.09649897e+00 -3.48726869e-01 ... 4.32975590e-01 1.88497019e+00 6.63195670e-01] [-1.80840230e+00 -9.69380140e-01 -3.66508663e-01 ... -4.47083998e+00 3.91381288e+00 -8.98410559e-01] ... [-7.02498257e-01 9.15254951e-01 -1.07410647e-01 ... 1.44030347e-01 1.11871374e+00 1.39157927e+00] [ 1.20096457e+00 7.29722381e-01 -2.53411198e+00 ... -1.98416531e+00 1.15836847e+00 1.61891294e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 3.39766979e+00 -3.65553999e+00 2.44185376e+00 ... -1.07662332e+00 1.07757902e+00 1.80053341e+00] [ 2.57211328e+00 -2.77631736e+00 6.72683835e-01 ... -3.77509022e+00 5.10974944e-01 6.06738269e-01] ... [-1.16809571e+00 -1.06256747e+00 -1.23778486e+00 ... -2.82442260e+00 -1.87706602e+00 -5.16447735e+00] [ 2.46573043e+00 -2.31646329e-01 -1.12212908e+00 ... -2.69744325e+00 -2.76243019e+00 -1.81645060e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.80668545e+00 -1.48558140e+00 2.56565952e+00 ... 3.40912247e+00 -2.42935553e-01 -2.90748382e+00] [ 6.18939734e+00 -7.95589328e-01 -5.28947496e+00 ... -4.48998809e-01 4.15453529e+00 9.37890947e-01] ... [-7.75416183e+00 1.73593327e-01 -3.51419973e+00 ... -4.22767496e+00 -5.99348259e+00 -1.87444711e+00] [ 1.57743335e+00 3.66646647e+00 7.32449627e+00 ... -7.99439311e-01 1.55025423e+00 -1.21315384e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.58479667e+00 -2.45983267e+00 -5.03049672e-01 ... -2.86335254e+00 1.18685865e+00 6.49912834e+00] [-1.43877304e+00 -1.43305302e+00 4.04398060e+00 ... -8.40896785e-01 -2.60978580e+00 -2.76253700e+00] ... [ 2.36274958e+00 -5.56237650e+00 -4.89084673e+00 ... -3.11398357e-01 3.11299372e+00 3.08001733e+00] [ 7.54594469e+00 5.51284456e+00 -6.27287292e+00 ... -2.43679214e+00 1.39839828e+00 4.45624620e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.22092867e+00 3.27344632e+00 -1.60359430e+00 ... -2.60467148e+00 -3.42940831e+00 2.52350044e+00] [ 1.34209573e+00 2.73448801e+00 -6.37195945e-01 ... 6.21700108e-01 3.99191260e-01 1.91206598e+00] ... [-2.51636291e+00 -5.96723413e+00 -1.61219120e+00 ... 4.75580788e+00 1.70119095e+00 -2.32570052e+00] [ 2.05184674e+00 -5.89070261e-01 2.73124027e+00 ... -5.95830631e+00 2.24535728e+00 -2.37076187e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.57942033e+00 -1.63491249e+00 2.52944279e+00 ... -1.09684741e+00 1.68734300e+00 -1.87499988e+00] [ 5.50208426e+00 -2.50927228e-02 3.56978965e+00 ... 2.98876238e+00 3.12055063e+00 -3.46723199e-01] ... [ 4.21496868e+00 -1.37434304e+00 8.22945535e-01 ... 1.65669501e+00 3.66555423e-01 -4.33955050e+00] [-1.61401355e+00 -1.85732734e+00 4.40848064e+00 ... -2.07775617e+00 -2.87986112e+00 4.02536726e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.74122620e-01 7.70930767e+00 -1.63559961e+00 ... 3.73419833e+00 2.17420506e+00 2.55852628e+00] [-9.69977319e-01 -2.79311252e+00 3.19103360e-01 ... -8.94539452e+00 3.85803342e+00 -2.41632009e+00] ... [-1.92466593e+00 -1.07032943e+00 -2.93640327e+00 ... 2.46768785e+00 2.95794225e+00 3.29404449e+00] [ 5.87429142e+00 3.75816774e+00 -6.20054960e+00 ... -3.18957090e+00 1.88152707e+00 1.23947203e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 6.66237640e+00 -6.49914837e+00 1.40921772e+00 ... 1.23545182e+00 -2.81113833e-02 1.05004621e+00] [ 5.24357796e+00 -6.01598072e+00 -8.77077997e-01 ... -4.71753454e+00 -3.49126840e+00 2.26776791e+00] ... [ 3.97925854e-01 -4.48634243e+00 8.94301951e-01 ... -3.36914444e+00 -4.29888487e+00 -8.17098236e+00] [ 5.64179564e+00 1.16956365e+00 -1.80026197e+00 ... -4.50508499e+00 -6.94014072e+00 -4.22772360e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.08035782e-01 -1.18950933e-01 -1.94145039e-01 ... 3.89425576e-01 1.53635964e-01 -3.16793799e-01] [ 2.10035533e-01 -7.36650229e-02 8.07664022e-02 ... -8.38760957e-02 8.20149407e-02 -7.56174838e-03] ... [-1.53703779e-01 1.67652115e-01 7.17447773e-02 ... -4.42629158e-01 -2.76356697e-01 -5.97615428e-02] [-1.01113327e-01 2.10314825e-01 2.82583386e-01 ... -2.29687124e-01 -9.58458558e-02 2.10619509e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 7.02393577e-02 -1.59496978e-01 -7.72356987e-02 ... -2.31613144e-02 -7.28029981e-02 1.34774193e-01] [-2.12020770e-01 -4.67277654e-02 1.09349355e-01 ... -4.26474363e-02 2.69074887e-01 9.48026255e-02] ... [-9.65557396e-02 -1.66915759e-01 -3.93027961e-01 ... -2.22081527e-01 3.29343051e-01 1.77291974e-01] [ 4.16280299e-01 -3.29304188e-01 -4.43355814e-02 ... 6.99214637e-02 4.11259443e-01 1.21480316e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.25633413e-01 1.34948716e-01 4.25720848e-02 ... -2.56504029e-01 -1.79296896e-01 -8.45788345e-02] [ 2.34094001e-02 -1.03908584e-01 3.27259719e-01 ... 1.28898665e-01 3.98615785e-02 -6.85917437e-02] ... [-1.12588704e-01 -3.26558888e-01 8.22767848e-04 ... 6.10852353e-02 2.52599597e-01 2.20575675e-01] [-2.58947968e-01 -1.49754032e-01 5.40636592e-02 ... 5.83795831e-02 2.44469810e-02 -1.46341607e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.32566819e-01 9.11712050e-02 1.47624895e-01 ... -3.86260822e-02 1.05365239e-01 1.47193342e-01] [-7.80483559e-02 1.89241290e-01 3.03585172e-01 ... 2.74718940e-01 -4.91621159e-02 1.00287423e-01] ... [ 5.01741171e-02 -1.15250172e-02 -4.46914062e-02 ... -9.86081585e-02 -1.09973431e-01 -5.89611381e-02] [-9.38478261e-02 2.25398213e-01 1.49400160e-01 ... -1.39986590e-01 -1.88545465e-01 -7.06078559e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 9.18923989e-02 2.17313722e-01 -1.66325495e-01 ... 1.45129696e-01 -8.84171277e-02 2.36126900e-01] [ 8.77876878e-02 -2.02880859e-01 -4.60765697e-02 ... -2.69240234e-02 -1.53506055e-01 -2.10460052e-01] ... [ 5.44103421e-02 -1.82906732e-01 -1.05900362e-01 ... 1.02563448e-01 5.04154712e-02 5.40106297e-02] [ 3.89003009e-01 2.98536032e-01 -1.87443390e-01 ... 1.58020213e-01 3.87129672e-02 -3.04403473e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 2.01555908e-01 8.31736177e-02 -2.20927060e-01 ... 1.92842916e-01 -1.50831074e-01 -1.29476367e-02] [ 3.00000131e-01 -2.82655567e-01 -9.95701477e-02 ... 2.09910691e-01 -3.29440892e-01 1.42509222e-01] ... [ 5.31548262e-01 -1.49839640e-01 4.96041775e-02 ... 2.13075504e-01 -1.92171678e-01 -2.49639153e-02] [ 1.72923788e-01 1.61670744e-01 -5.04817553e-02 ... -3.57529335e-02 -2.79154569e-01 -1.48630599e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1002.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.6512 (2,1,1,.,.) = -1.3614 (3,1,1,.,.) = 0.8881 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) e in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instancefw_re: [[[[[-3.59469801e-01 -7.93683290e-01 -4.58173066e-01 ... -2.04758858e-03 3.17680597e-01 -2.23595589e-01] [-2.68838108e-01 -3.91497701e-01 -3.62352729e-01 ... 7.32428432e-01 -1.83828914e+00 4.83148634e-01] [-9.94111955e-01 -1.32502228e-01 5.07813990e-01 ... -5.56889832e-01 3.88944775e-01 1.92322135e-01] ... [ 1.29935801e+00 -4.07447442e-02 1.61365911e-01 ... -6.93130314e-01 1.01567671e-01 -1.22964576e-01] [-1.20049715e+00 1.57001927e-01 4.14902091e-01 ... 6.00834489e-01 -6.77195787e-01 2.32937157e-01] [ 9.00223553e-01 -3.71433981e-02 -1.35183856e-01 ... -2.57866651e-01 1.19066834e+00 -1.14816442e-01]] [[-4.69016850e-01 -1.29872799e+00 -1.71235383e+00 ... 3.67876738e-02 -5.71383238e-01 3.84844780e-01] [ 3.13218713e-01 -8.61273646e-01 4.23088819e-01 ... 2.09414029e+00 -1.30849051e+00 -5.58763385e-01] [ 4.73545827e-02 -1.07931048e-01 -2.43706718e-01 ... 2.52228349e-01 3.86067092e-01 -2.63641059e-01] ... [-1.08458471e+00 -7.06016541e-01 -3.44514459e-01 ... -7.50932753e-01 1.14743066e+00 -5.43365180e-01] [ 4.53522503e-01 -1.04580998e+00 2.16355845e-01 ... 1.46205151e+00 -1.23617508e-01 8.64649594e-01] [ 5.12693584e-01 4.35550474e-02 -3.52594107e-01 ... 6.41581118e-01 6.25728130e-01 1.89270582e-02]] [[ 1.59267798e-01 -2.37924859e-01 -3.26571465e-01 ... -6.69897869e-02 1.52294829e-01 -8.12007129e-01] [ 1.27529764e+00 -6.76160455e-01 -2.74033099e-01 ... -2.05017328e-02 1.09701431e+00 4.43542123e-01] [-7.25300372e-01 -7.80219972e-01 -8.06571126e-01 ... -3.67295772e-01 6.81708634e-01 5.18090092e-02] ... [ 1.28351593e+00 -3.28376800e-01 -2.92998180e-02 ... -4.80646759e-01 -3.68599057e-01 7.47470081e-01] [ 3.32576632e-01 -1.69943675e-01 -9.65682417e-02 ... 1.53600439e-01 6.63388431e-01 1.57269672e-01] [ 3.89898419e-01 1.15878117e+00 5.45328259e-01 ... -8.18005025e-01 -1.52418062e-01 -7.36952722e-02]] ... [[-2.54050434e-01 -6.07667446e-01 -5.82914591e-01 ... 8.36687922e-01 8.37087572e-01 8.07450414e-01] [-1.09742808e+00 -8.05407941e-01 7.04808176e-01 ... 4.81958568e-01 1.05335927e+00 2.48691425e-01] [ 3.14199269e-01 -2.24642336e-01 3.86033326e-01 ... 4.45126444e-01 -5.44542313e-01 -6.00852311e-01] ... [-1.24205661e+00 4.68529612e-01 -4.60085958e-01 ... 3.19628119e-01 6.49380922e-01 -2.40012035e-01] [ 2.74577141e-01 -8.38399053e-01 -2.94427782e-01 ... 1.65912703e-01 -1.63703620e-01 1.77283156e+00] [ 6.09975457e-01 -8.69006634e-01 -1.91394493e-01 ... -2.52067238e-01 -3.34271699e-01 5.51683187e-01]] [[-2.30461150e-01 -4.79671180e-01 -3.30772437e-02 ... -7.19720423e-01 5.93872607e-01 -6.84237778e-01] [-7.12859333e-01 -1.25685513e-01 7.46397793e-01 ... 2.77634263e-01 4.86914665e-01 -3.89984459e-01] [ 1.32236409e+00 6.44134045e-01 -9.13053632e-01 ... 1.26789063e-01 -1.74609637e+00 -2.27396637e-02] ... [-5.35598934e-01 3.63037676e-01 -4.80114400e-01 ... 2.38589004e-01 -1.70415893e-01 -2.31209159e-01] [-5.68340048e-02 -1.83107197e+00 -2.14587554e-01 ... 7.45907009e-01 -1.01203656e+00 -6.53039873e-01] [ 1.03946519e+00 5.81371605e-01 -7.29833186e-01 ... -1.58225417e-01 1.23474073e+00 -2.18032882e-01]] [[-3.31357002e-01 9.56688881e-01 5.71642160e-01 ... -4.18013602e-01 1.86939812e+00 -7.14590073e-01] [ 1.83490500e-01 1.07054472e+00 2.49784395e-01 ... -1.93599209e-01 1.02616839e-01 2.57225156e-01] [ 8.19792688e-01 2.28998467e-01 -1.94741726e-01 ... -1.47847772e-01 -1.25828969e+00 -6.92721233e-02] ... [-4.98991549e-01 -1.22050665e-01 -7.13144541e-01 ... 8.94551516e-01 -6.13372564e-01 2.45957747e-01] [ 1.60557523e-01 8.47890675e-01 4.11466435e-02 ... 2.41438702e-01 8.90677869e-02 -4.07250881e-01] [ 4.07720596e-01 -6.28068447e-01 -2.57171661e-01 ... 2.25316092e-01 1.07808208e+00 1.32919505e-01]]] [[[ 8.25615525e-01 1.81356180e+00 -8.66076946e-01 ... -9.89050940e-02 1.45218700e-01 -1.20174491e+00] [-5.23962438e-01 1.80820489e+00 1.11708081e+00 ... -8.36570621e-01 4.20578092e-01 2.54613733e+00] [-6.94975793e-01 1.58919859e+00 1.68095529e+00 ... -2.54499125e+00 -6.36069179e-01 1.44331360e+00] ... [-1.40171421e+00 1.65714252e+00 -1.95739925e-01 ... 5.88319242e-01 -9.24150199e-02 -2.73232013e-01] [ 5.53514242e-01 6.39447033e-01 5.52607812e-02 ... -1.44736135e+00 2.80547452e+00 -1.45662010e+00] [ 4.95688260e-01 -9.13025856e-01 -1.01789439e+00 ... -6.97981954e-01 1.67462063e+00 2.92738169e-01]] [[ 1.19668984e+00 7.21937299e-01 1.42704499e+00 ... 1.56402260e-01 -2.16693783e+00 1.99755800e+00] [ 2.62091815e-01 5.88733196e-01 -2.19849363e-01 ... -8.86493266e-01 3.57160449e-01 1.27183092e+00] [-3.11833644e+00 -2.23737669e+00 4.93798524e-01 ... -3.77952121e-02 1.38077104e+00 -4.80741769e-01] ... [-1.43860206e-01 4.43858474e-01 2.32282662e+00 ... 1.38742179e-01 5.81207573e-01 -3.62611473e-01] [-2.31510729e-01 -3.54437113e-01 -2.29033336e-01 ... 4.82865453e-01 1.37592292e+00 -1.87030494e+00] [ 8.18248630e-01 1.24932814e+00 3.62147665e+00 ... -9.75662112e-01 -9.22248840e-01 2.67722845e+00]] [[-3.67460698e-01 1.36492717e+00 -1.01616895e+00 ... -6.08770132e-01 -6.02531433e-01 5.93257844e-01] [-6.74505889e-01 8.72868955e-01 -1.29252303e+00 ... -7.93642819e-01 -2.66751766e+00 -1.24426588e-01] [-5.70021689e-01 1.22961116e+00 -1.55840680e-01 ... -1.28096509e+00 1.31134844e+00 4.31161344e-01] ... [-3.44477963e+00 -6.03047788e-01 -1.89816892e+00 ... -1.50416359e-01 -3.98444086e-01 -2.41477728e+00] [ 6.27341330e-01 7.95397639e-01 -7.90393770e-01 ... -7.63399541e-01 1.46502542e+00 -1.94759417e+00] [-3.36428165e-01 -1.60232937e+00 -1.77591249e-01 ... 3.86021256e-01 1.04135072e+00 4.38311547e-01]] ... [[-1.18916437e-01 -3.65837872e-01 -1.29987746e-01 ... 9.38756526e-01 2.06027842e+00 -3.38769507e+00] [ 6.35874689e-01 2.03076530e+00 -2.16217709e+00 ... 3.71316701e-01 -1.18614867e-01 -5.88313341e-02] [-7.96790481e-01 -6.85453773e-01 -3.54261303e+00 ... -4.16919053e-01 1.92076302e+00 3.82162571e-01] ... [-3.47994655e-01 -2.43777744e-02 -1.64690018e+00 ... -5.92212021e-01 -7.41515011e-02 1.23063099e+00] [-6.57767773e-01 6.80462837e-01 -1.63917810e-01 ... -6.70929074e-01 5.50397813e-01 -5.03994226e-01] [-3.94734478e+00 3.28117669e-01 1.66491377e+00 ... 1.54713535e+00 2.97097325e-01 2.03726435e+00]] [[-5.53390861e-01 -1.11214459e+00 7.71475554e-01 ... 2.96657825e+00 1.98688328e+00 -2.00884104e+00] [ 1.33954093e-01 -3.95129383e-01 -1.05606899e-01 ... -5.17512083e-01 -2.03728065e-01 -1.25674317e-02] [-1.11466527e+00 -4.87686723e-01 -1.13286924e+00 ... -3.87338877e-01 3.52274239e-01 -3.07702243e-01] ... [-3.59267622e-01 -9.69477072e-02 5.59513152e-01 ... 1.93292010e+00 -1.35370743e+00 -3.89907993e-02] [ 2.03768396e+00 -1.36172652e-01 -1.40440953e+00 ... -2.07560897e+00 1.94744825e-01 -1.47092545e+00] [-2.66265482e-01 -2.43369770e+00 -3.53390396e-01 ... -2.72652000e-01 4.12058979e-01 1.33954668e+00]] [[-2.52262449e+00 -9.86020863e-02 1.89435613e+00 ... 1.53804272e-01 7.80662715e-01 8.96219373e-01] [-2.99125403e-01 4.58868474e-01 1.11471581e+00 ... 1.37112093e+00 1.81074750e+00 8.47174108e-01] [-1.46232855e+00 1.00664866e+00 1.45081258e+00 ... 9.58983779e-01 -9.60008383e-01 1.39331961e+00] ... [-2.11399436e+00 6.41068339e-01 3.05708021e-01 ... -1.76465976e+00 -1.03229594e+00 1.01881742e-01] [ 1.32840168e+00 8.53751242e-01 1.92118382e+00 ... 3.62299979e-02 -1.23311698e+00 4.02282858e+00] [-2.52094269e+00 6.27124727e-01 5.41134059e-01 ... 1.15271127e+00 1.94926277e-01 1.43286839e-01]]] [[[ 2.29938582e-01 2.12093377e+00 -7.18584359e-01 ... -1.91118479e-01 7.97745407e-01 -7.26370454e-01] [-1.93720117e-01 7.06631780e-01 -4.48903233e-01 ... -1.16582632e+00 1.34240925e-01 -1.42326280e-01] [-1.66442871e-01 -7.05649927e-02 -2.22926438e-01 ... 1.76510215e+00 4.82371718e-01 -3.52484196e-01] ... [ 2.12708980e-01 -4.46911380e-02 6.62938893e-01 ... 1.06654748e-01 -9.59996641e-01 4.20225620e-01] [-8.59072745e-01 7.64077961e-01 -1.11031294e-01 ... -5.88676035e-01 -4.61347938e-01 -7.57442564e-02] [ 2.89670616e-01 -1.28845334e+00 1.19659913e+00 ... -3.93799871e-01 1.48725939e+00 -5.45624256e-01]] [[-7.39127874e-01 1.37809098e+00 -1.04036689e+00 ... -2.01619434e+00 -1.46898675e+00 -2.04927444e+00] [ 9.71508622e-01 8.16866532e-02 3.54607165e-01 ... 1.01657784e+00 -6.24686852e-02 -4.16098237e-01] [-6.79967821e-01 2.59212732e-01 -1.23887450e-01 ... -9.47945952e-01 8.89490172e-02 -1.66729331e-01] ... [ 1.88087142e+00 -9.87066388e-01 -1.22022450e+00 ... -1.20220459e+00 4.33319569e-01 1.30498797e-01] [-1.52101606e-01 -1.85071945e-01 -3.40592772e-01 ... 9.63850558e-01 -1.02251507e-01 -6.67645752e-01] [ 7.95226693e-01 6.93284929e-01 -4.26819891e-01 ... 4.00376320e-01 7.11297393e-01 2.37224922e-01]] [[-1.13557601e+00 1.03531107e-01 -5.62609315e-01 ... 7.55592704e-01 1.59065640e+00 4.65706468e-01] [-1.12256753e+00 3.19761097e-01 8.28556716e-01 ... 7.89312840e-01 -4.09988374e-01 7.91986704e-01] [ 2.83128202e-01 -6.04571998e-02 -7.03970373e-01 ... 7.99406290e-01 2.96971817e-02 8.57537314e-02] ... [-2.30442390e-01 4.65911835e-01 1.01752472e+00 ... -4.20730822e-02 -1.01410854e+00 1.38689354e-01] [ 2.13800609e-01 -1.98771402e-01 4.15642150e-02 ... -2.27985578e-03 -5.51742196e-01 -2.16820806e-01] [ 5.01909316e-01 -8.65036845e-01 -2.77141273e-01 ... 5.57169676e-01 -8.04671466e-01 -1.39080322e+00]] ... [[ 4.77367222e-01 7.43384659e-01 6.52290404e-01 ... -4.72574145e-01 1.10505247e+00 -4.51132536e-01] [ 2.42501006e-01 4.04860616e-01 -3.61268073e-01 ... 1.36232030e+00 -9.31523740e-02 5.85098922e-01] [-6.44791961e-01 5.37525952e-01 3.46183211e-01 ... 1.89197227e-01 7.41023004e-01 1.11857355e+00] ... [-3.61923277e-01 -1.00952184e+00 4.83974904e-01 ... -1.08326793e+00 -8.98689151e-01 -8.46857548e-01] [ 9.70049560e-01 7.90402070e-02 3.87940556e-01 ... 6.19331837e-01 -7.47528434e-01 4.41341400e-01] [ 3.97482179e-02 3.48581463e-01 4.12805587e-01 ... -1.04694617e+00 -3.48496065e-02 -9.13475871e-01]] [[-1.21407107e-01 2.17482954e-01 7.66860187e-01 ... -1.43700808e-01 -1.08144373e-01 2.99938202e-01] [ 5.40837109e-01 2.86067933e-01 -6.64420366e-01 ... 8.73623371e-01 -2.78961122e-01 1.27583623e+00] [ 1.50018379e-01 -1.09091926e+00 8.17057490e-01 ... 6.58730686e-01 9.73892450e-01 -9.69095588e-01] ... [-1.41364709e-01 1.52205861e+00 -1.79203141e+00 ... -1.19063878e+00 4.24126267e-01 -3.70764345e-01] [ 1.92799568e+00 -5.18361807e-01 -1.43384129e-01 ... -7.95067549e-01 8.68026316e-01 3.58119577e-01] [ 1.64591420e+00 1.90302086e+00 -9.45921719e-01 ... -4.34096664e-01 3.10969651e-01 -6.88856721e-01]] [[-1.12178791e+00 -1.88130319e+00 -9.93188262e-01 ... 1.10917974e+00 4.03490454e-01 1.31414604e+00] [-6.02916069e-02 -5.17936230e-01 5.79308569e-01 ... 1.23709869e+00 5.63742936e-01 9.47844207e-01] [ 1.34189868e+00 1.33920595e-01 -1.59131825e-01 ... 1.73028481e+00 -9.42028522e-01 1.31747842e+00] ... [-4.29459035e-01 1.09147120e+00 -1.79892123e-01 ... 3.27192515e-01 9.38173711e-01 1.51046276e-01] [-1.70739973e+00 -5.95293045e-01 -9.75802064e-01 ... -9.56360757e-01 -8.75406027e-01 1.79768431e+00] [-1.07339180e+00 1.79315710e+00 9.92721498e-01 ... -8.33951414e-01 1.58943379e+00 -8.83327186e-01]]]]]; ov_res: [[[[[-3.59469801e-01 -7.93683290e-01 -4.58173066e-01 ... -2.04758858e-03 3.17680597e-01 -2.23595589e-01] [-2.68838108e-01 -3.91497701e-01 -3.62352729e-01 ... 7.32428432e-01 -1.83828914e+00 4.83148634e-01] [-9.94111955e-01 -1.32502228e-01 5.07813990e-01 ... -5.56889832e-01 3.88944775e-01 1.92322135e-01] ... [ 1.29935801e+00 -4.07447442e-02 1.61365911e-01 ... -6.93130314e-01 1.01567671e-01 -1.22964576e-01] [-1.20049715e+00 1.57001927e-01 4.14902091e-01 ... 6.00834489e-01 -6.77195787e-01 2.32937157e-01] [ 9.00223553e-01 -3.71433981e-02 -1.35183856e-01 ... -2.57866651e-01 1.19066834e+00 -1.14816442e-01]] [[-4.69016850e-01 -1.29872799e+00 -1.71235383e+00 ... 3.67876738e-02 -5.71383238e-01 3.84844780e-01] [ 3.13218713e-01 -8.61273646e-01 4.23088819e-01 ... 2.09414029e+00 -1.30849051e+00 -5.58763385e-01] [ 4.73545827e-02 -1.07931048e-01 -2.43706718e-01 ... 2.52228349e-01 3.86067092e-01 -2.63641059e-01] ... [-1.08458471e+00 -7.06016541e-01 -3.44514459e-01 ... -7.50932753e-01 1.14743066e+00 -5.43365180e-01] [ 4.53522503e-01 -1.04580998e+00 2.16355845e-01 ... 1.46205151e+00 -1.23617508e-01 8.64649594e-01] [ 5.12693584e-01 4.35550474e-02 -3.52594107e-01 ... 6.41581118e-01 6.25728130e-01 1.89270582e-02]] [[ 1.59267798e-01 -2.37924859e-01 -3.26571465e-01 ... -6.69897869e-02 1.52294829e-01 -8.12007129e-01] [ 1.27529764e+00 -6.76160455e-01 -2.74033099e-01 ... -2.05017328e-02 1.09701431e+00 4.43542123e-01] [-7.25300372e-01 -7.80219972e-01 -8.06571126e-01 ... -3.67295772e-01 6.81708634e-01 5.18090092e-02] ... [ 1.28351593e+00 -3.28376800e-01 -2.92998180e-02 ... -4.80646759e-01 -3.68599057e-01 7.47470081e-01] [ 3.32576632e-01 -1.69943675e-01 -9.65682417e-02 ... 1.53600439e-01 6.63388431e-01 1.57269672e-01] [ 3.89898419e-01 1.15878117e+00 5.45328259e-01 ... -8.18005025e-01 -1.52418062e-01 -7.36952722e-02]] ... [[-2.54050434e-01 -6.07667446e-01 -5.82914591e-01 ... 8.36687922e-01 8.37087572e-01 8.07450414e-01] [-1.09742808e+00 -8.05407941e-01 7.04808176e-01 ... 4.81958568e-01 1.05335927e+00 2.48691425e-01] [ 3.14199269e-01 -2.24642336e-01 3.86033326e-01 ... 4.45126444e-01 -5.44542313e-01 -6.00852311e-01] ... [-1.24205661e+00 4.68529612e-01 -4.60085958e-01 ... 3.19628119e-01 6.49380922e-01 -2.40012035e-01] [ 2.74577141e-01 -8.38399053e-01 -2.94427782e-01 ... 1.65912703e-01 -1.63703620e-01 1.77283156e+00] [ 6.09975457e-01 -8.69006634e-01 -1.91394493e-01 ... -2.52067238e-01 -3.34271699e-01 5.51683187e-01]] [[-2.30461150e-01 -4.79671180e-01 -3.30772437e-02 ... -7.19720423e-01 5.93872607e-01 -6.84237778e-01] [-7.12859333e-01 -1.25685513e-01 7.46397793e-01 ... 2.77634263e-01 4.86914665e-01 -3.89984459e-01] [ 1.32236409e+00 6.44134045e-01 -9.13053632e-01 ... 1.26789063e-01 -1.74609637e+00 -2.27396637e-02] ... [-5.35598934e-01 3.63037676e-01 -4.80114400e-01 ... 2.38589004e-01 -1.70415893e-01 -2.31209159e-01] [-5.68340048e-02 -1.83107197e+00 -2.14587554e-01 ... 7.45907009e-01 -1.01203656e+00 -6.53039873e-01] [ 1.03946519e+00 5.81371605e-01 -7.29833186e-01 ... -1.58225417e-01 1.23474073e+00 -2.18032882e-01]] [[-3.31357002e-01 9.56688881e-01 5.71642160e-01 ... -4.18013602e-01 1.86939812e+00 -7.14590073e-01] [ 1.83490500e-01 1.07054472e+00 2.49784395e-01 ... -1.93599209e-01 1.02616839e-01 2.57225156e-01] [ 8.19792688e-01 2.28998467e-01 -1.94741726e-01 ... -1.47847772e-01 -1.25828969e+00 -6.92721233e-02] ... [-4.98991549e-01 -1.22050665e-01 -7.13144541e-01 ... 8.94551516e-01 -6.13372564e-01 2.45957747e-01] [ 1.60557523e-01 8.47890675e-01 4.11466435e-02 ... 2.41438702e-01 8.90677869e-02 -4.07250881e-01] [ 4.07720596e-01 -6.28068447e-01 -2.57171661e-01 ... 2.25316092e-01 1.07808208e+00 1.32919505e-01]]] [[[ 8.25615525e-01 1.81356180e+00 -8.66076946e-01 ... -9.89050940e-02 1.45218700e-01 -1.20174491e+00] [-5.23962438e-01 1.80820489e+00 1.11708081e+00 ... -8.36570621e-01 4.20578092e-01 2.54613733e+00] [-6.94975793e-01 1.58919859e+00 1.68095529e+00 ... -2.54499125e+00 -6.36069179e-01 1.44331360e+00] ... [-1.40171421e+00 1.65714252e+00 -1.95739925e-01 ... 5.88319242e-01 -9.24150199e-02 -2.73232013e-01] [ 5.53514242e-01 6.39447033e-01 5.52607812e-02 ... -1.44736135e+00 2.80547452e+00 -1.45662010e+00] [ 4.95688260e-01 -9.13025856e-01 -1.01789439e+00 ... -6.97981954e-01 1.67462063e+00 2.92738169e-01]] [[ 1.19668984e+00 7.21937299e-01 1.42704499e+00 ... 1.56402260e-01 -2.16693783e+00 1.99755800e+00] [ 2.62091815e-01 5.88733196e-01 -2.19849363e-01 ... -8.86493266e-01 3.57160449e-01 1.27183092e+00] [-3.11833644e+00 -2.23737669e+00 4.93798524e-01 ... -3.77952121e-02 1.38077104e+00 -4.80741769e-01] ... [-1.43860206e-01 4.43858474e-01 2.32282662e+00 ... 1.38742179e-01 5.81207573e-01 -3.62611473e-01] [-2.31510729e-01 -3.54437113e-01 -2.29033336e-01 ... 4.82865453e-01 1.37592292e+00 -1.87030494e+00] [ 8.18248630e-01 1.24932814e+00 3.62147665e+00 ... -9.75662112e-01 -9.22248840e-01 2.67722845e+00]] [[-3.67460698e-01 1.36492717e+00 -1.01616895e+00 ... -6.08770132e-01 -6.02531433e-01 5.93257844e-01] [-6.74505889e-01 8.72868955e-01 -1.29252303e+00 ... -7.93642819e-01 -2.66751766e+00 -1.24426588e-01] [-5.70021689e-01 1.22961116e+00 -1.55840680e-01 ... -1.28096509e+00 1.31134844e+00 4.31161344e-01] ... [-3.44477963e+00 -6.03047788e-01 -1.89816892e+00 ... -1.50416359e-01 -3.98444086e-01 -2.41477728e+00] [ 6.27341330e-01 7.95397639e-01 -7.90393770e-01 ... -7.63399541e-01 1.46502542e+00 -1.94759417e+00] [-3.36428165e-01 -1.60232937e+00 -1.77591249e-01 ... 3.86021256e-01 1.04135072e+00 4.38311547e-01]] ... [[-1.18916437e-01 -3.65837872e-01 -1.29987746e-01 ... 9.38756526e-01 2.06027842e+00 -3.38769507e+00] [ 6.35874689e-01 2.03076530e+00 -2.16217709e+00 ... 3.71316701e-01 -1.18614867e-01 -5.88313341e-02] [-7.96790481e-01 -6.85453773e-01 -3.54261303e+00 ... -4.16919053e-01 1.92076302e+00 3.82162571e-01] ... [-3.47994655e-01 -2.43777744e-02 -1.64690018e+00 ... -5.92212021e-01 -7.41515011e-02 1.23063099e+00] [-6.57767773e-01 6.80462837e-01 -1.63917810e-01 ... -6.70929074e-01 5.50397813e-01 -5.03994226e-01] [-3.94734478e+00 3.28117669e-01 1.66491377e+00 ... 1.54713535e+00 2.97097325e-01 2.03726435e+00]] [[-5.53390861e-01 -1.11214459e+00 7.71475554e-01 ... 2.96657825e+00 1.98688328e+00 -2.00884104e+00] [ 1.33954093e-01 -3.95129383e-01 -1.05606899e-01 ... -5.17512083e-01 -2.03728065e-01 -1.25674317e-02] [-1.11466527e+00 -4.87686723e-01 -1.13286924e+00 ... -3.87338877e-01 3.52274239e-01 -3.07702243e-01] ... [-3.59267622e-01 -9.69477072e-02 5.59513152e-01 ... 1.93292010e+00 -1.35370743e+00 -3.89907993e-02] [ 2.03768396e+00 -1.36172652e-01 -1.40440953e+00 ... -2.07560897e+00 1.94744825e-01 -1.47092545e+00] [-2.66265482e-01 -2.43369770e+00 -3.53390396e-01 ... -2.72652000e-01 4.12058979e-01 1.33954668e+00]] [[-2.52262449e+00 -9.86020863e-02 1.89435613e+00 ... 1.53804272e-01 7.80662715e-01 8.96219373e-01] [-2.99125403e-01 4.58868474e-01 1.11471581e+00 ... 1.37112093e+00 1.81074750e+00 8.47174108e-01] [-1.46232855e+00 1.00664866e+00 1.45081258e+00 ... 9.58983779e-01 -9.60008383e-01 1.39331961e+00] ... [-2.11399436e+00 6.41068339e-01 3.05708021e-01 ... -1.76465976e+00 -1.03229594e+00 1.01881742e-01] [ 1.32840168e+00 8.53751242e-01 1.92118382e+00 ... 3.62299979e-02 -1.23311698e+00 4.02282858e+00] [-2.52094269e+00 6.27124727e-01 5.41134059e-01 ... 1.15271127e+00 1.94926277e-01 1.43286839e-01]]] [[[ 2.29938582e-01 2.12093377e+00 -7.18584359e-01 ... -1.91118479e-01 7.97745407e-01 -7.26370454e-01] [-1.93720117e-01 7.06631780e-01 -4.48903233e-01 ... -1.16582632e+00 1.34240925e-01 -1.42326280e-01] [-1.66442871e-01 -7.05649927e-02 -2.22926438e-01 ... 1.76510215e+00 4.82371718e-01 -3.52484196e-01] ... [ 2.12708980e-01 -4.46911380e-02 6.62938893e-01 ... 1.06654748e-01 -9.59996641e-01 4.20225620e-01] [-8.59072745e-01 7.64077961e-01 -1.11031294e-01 ... -5.88676035e-01 -4.61347938e-01 -7.57442564e-02] [ 2.89670616e-01 -1.28845334e+00 1.19659913e+00 ... -3.93799871e-01 1.48725939e+00 -5.45624256e-01]] [[-7.39127874e-01 1.37809098e+00 -1.04036689e+00 ... -2.01619434e+00 -1.46898675e+00 -2.04927444e+00] [ 9.71508622e-01 8.16866532e-02 3.54607165e-01 ... 1.01657784e+00 -6.24686852e-02 -4.16098237e-01] [-6.79967821e-01 2.59212732e-01 -1.23887450e-01 ... -9.47945952e-01 8.89490172e-02 -1.66729331e-01] ... [ 1.88087142e+00 -9.87066388e-01 -1.22022450e+00 ... -1.20220459e+00 4.33319569e-01 1.30498797e-01] [-1.52101606e-01 -1.85071945e-01 -3.40592772e-01 ... 9.63850558e-01 -1.02251507e-01 -6.67645752e-01] [ 7.95226693e-01 6.93284929e-01 -4.26819891e-01 ... 4.00376320e-01 7.11297393e-01 2.37224922e-01]] [[-1.13557601e+00 1.03531107e-01 -5.62609315e-01 ... 7.55592704e-01 1.59065640e+00 4.65706468e-01] [-1.12256753e+00 3.19761097e-01 8.28556716e-01 ... 7.89312840e-01 -4.09988374e-01 7.91986704e-01] [ 2.83128202e-01 -6.04571998e-02 -7.03970373e-01 ... 7.99406290e-01 2.96971817e-02 8.57537314e-02] ... [-2.30442390e-01 4.65911835e-01 1.01752472e+00 ... -4.20730822e-02 -1.01410854e+00 1.38689354e-01] [ 2.13800609e-01 -1.98771402e-01 4.15642150e-02 ... -2.27985578e-03 -5.51742196e-01 -2.16820806e-01] [ 5.01909316e-01 -8.65036845e-01 -2.77141273e-01 ... 5.57169676e-01 -8.04671466e-01 -1.39080322e+00]] ... [[ 4.77367222e-01 7.43384659e-01 6.52290404e-01 ... -4.72574145e-01 1.10505247e+00 -4.51132536e-01] [ 2.42501006e-01 4.04860616e-01 -3.61268073e-01 ... 1.36232030e+00 -9.31523740e-02 5.85098922e-01] [-6.44791961e-01 5.37525952e-01 3.46183211e-01 ... 1.89197227e-01 7.41023004e-01 1.11857355e+00] ... [-3.61923277e-01 -1.00952184e+00 4.83974904e-01 ... -1.08326793e+00 -8.98689151e-01 -8.46857548e-01] [ 9.70049560e-01 7.90402070e-02 3.87940556e-01 ... 6.19331837e-01 -7.47528434e-01 4.41341400e-01] [ 3.97482179e-02 3.48581463e-01 4.12805587e-01 ... -1.04694617e+00 -3.48496065e-02 -9.13475871e-01]] [[-1.21407107e-01 2.17482954e-01 7.66860187e-01 ... -1.43700808e-01 -1.08144373e-01 2.99938202e-01] [ 5.40837109e-01 2.86067933e-01 -6.64420366e-01 ... 8.73623371e-01 -2.78961122e-01 1.27583623e+00] [ 1.50018379e-01 -1.09091926e+00 8.17057490e-01 ... 6.58730686e-01 9.73892450e-01 -9.69095588e-01] ... [-1.41364709e-01 1.52205861e+00 -1.79203141e+00 ... -1.19063878e+00 4.24126267e-01 -3.70764345e-01] [ 1.92799568e+00 -5.18361807e-01 -1.43384129e-01 ... -7.95067549e-01 8.68026316e-01 3.58119577e-01] [ 1.64591420e+00 1.90302086e+00 -9.45921719e-01 ... -4.34096664e-01 3.10969651e-01 -6.88856721e-01]] [[-1.12178791e+00 -1.88130319e+00 -9.93188262e-01 ... 1.10917974e+00 4.03490454e-01 1.31414604e+00] [-6.02916069e-02 -5.17936230e-01 5.79308569e-01 ... 1.23709869e+00 5.63742936e-01 9.47844207e-01] [ 1.34189868e+00 1.33920595e-01 -1.59131825e-01 ... 1.73028481e+00 -9.42028522e-01 1.31747842e+00] ... [-4.29459035e-01 1.09147120e+00 -1.79892123e-01 ... 3.27192515e-01 9.38173711e-01 1.51046276e-01] [-1.70739973e+00 -5.95293045e-01 -9.75802064e-01 ... -9.56360757e-01 -8.75406027e-01 1.79768431e+00] [-1.07339180e+00 1.79315710e+00 9.92721498e-01 ... -8.33951414e-01 1.58943379e+00 -8.83327186e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 1, 1], 'dilations': [1, 1, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1004.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.pads : int[] = prim::Constant[value=[0, 1, 1]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * 1.9677 (2,1,1,.,.) = -0.5281 (3,1,1,.,.) = -0.8658 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.4210302e-03 2.8139288e-02 ... -8.2135303e-03 2.6384650e-02 0.0000000e+00] [ 0.0000000e+00 -2.7277252e-02 1.4558244e-02 ... -2.8013671e-02 -8.9213224e-03 0.0000000e+00] ... [ 0.0000000e+00 1.7852239e-02 -2.0979310e-03 ... -3.4691103e-02 4.0350193e-03 0.0000000e+00] [ 0.0000000e+00 4.6095900e-02 1.9612294e-02 ... 2.5869417e-03 2.3025987e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.8237278e-02 1.1928703e-02 ... 5.3413078e-02 -1.8528599e-02 0.0000000e+00] [ 0.0000000e+00 -1.7192263e-02 2.0235414e-02 ... 2.6738383e-03 1.7016238e-02 0.0000000e+00] ... [ 0.0000000e+00 -1.0544360e-02 -2.0890770e-02 ... -1.6372614e-02 5.8816425e-03 0.0000000e+00] [ 0.0000000e+00 4.6797398e-02 -1.1066653e-02 ... 2.1696616e-02 -2.8663885e-02 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.1601941e-02 -2.2068862e-02 ... 2.2767218e-02 -1.0430363e-03 0.0000000e+00] [ 0.0000000e+00 -3.1851474e-02 -1.3430564e-02 ... 1.1208314e-02 -1.9148784e-02 0.0000000e+00] ... [ 0.0000000e+00 -3.0711552e-02 1.4292120e-02 ... 2.4070460e-02 -1.2074976e-02 0.0000000e+00] [ 0.0000000e+00 -1.0459658e-02 -4.3375213e-03 ... 1.7944120e-02 5.7946495e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 4.0005897e-03 -8.0879973e-03 ... -1.1525988e-02 1.4765149e-02 0.0000000e+00] [ 0.0000000e+00 8.2708020e-03 -1.1731864e-02 ... 6.5699033e-04 -2.6711984e-02 0.0000000e+00] ... [ 0.0000000e+00 1.6311960e-02 -5.0149234e-03 ... 4.9186824e-03 -5.9329979e-03 0.0000000e+00] [ 0.0000000e+00 1.3649773e-03 3.9208871e-03 ... 1.5360676e-02 7.8958459e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.3289722e-02 2.4738265e-02 ... 9.5782185e-04 1.5208754e-02 0.0000000e+00] [ 0.0000000e+00 7.3184795e-03 1.8757210e-03 ... 3.2903068e-03 -3.0683581e-02 0.0000000e+00] ... [ 0.0000000e+00 -1.3956260e-02 8.9134853e-03 ... -1.1211864e-02 -3.5292688e-03 0.0000000e+00] [ 0.0000000e+00 -2.1609853e-03 -1.1290754e-02 ... -2.2014027e-02 2.6552614e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.7857585e-06 -2.5958033e-02 ... -1.0213445e-05 -4.9068876e-02 0.0000000e+00] [ 0.0000000e+00 -3.5967671e-03 -2.1730676e-02 ... -1.2448561e-02 -2.1678607e-03 0.0000000e+00] ... [ 0.0000000e+00 -1.7572055e-02 5.3960537e-03 ... -3.2118328e-02 -2.7967156e-03 0.0000000e+00] [ 0.0000000e+00 1.2844954e-02 -1.6657094e-02 ... -1.9618180e-02 -5.1109563e-04 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.7968150e-01 2.4880126e-01 ... 4.7869790e-01 3.1766877e-01 0.0000000e+00] [ 0.0000000e+00 -1.6922201e-01 -3.8375974e-01 ... 2.4571848e-01 -1.9924219e-01 0.0000000e+00] ... [ 0.0000000e+00 3.7838012e-01 1.8409440e-01 ... 1.4990814e-01 2.7300435e-01 0.0000000e+00] [ 0.0000000e+00 -4.6393153e-01 5.9092510e-01 ... -3.2392937e-01 -3.2068625e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.7128344e-01 -2.3520815e-01 ... -1.0318377e+00 6.5524179e-01 0.0000000e+00] [ 0.0000000e+00 3.6472261e-01 6.4846480e-01 ... -1.9819558e-01 1.6484140e-01 0.0000000e+00] ... [ 0.0000000e+00 -1.4243947e-01 6.0456544e-01 ... 6.1876082e-01 8.6241975e-02 0.0000000e+00] [ 0.0000000e+00 -5.1887637e-01 4.8522073e-01 ... 9.7460228e-01 -7.6925474e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.4794143e-01 -2.3811510e-01 ... 1.0683701e+00 -6.3805842e-01 0.0000000e+00] [ 0.0000000e+00 -5.5791515e-01 -1.7211637e-01 ... -1.3270554e-01 -4.0452448e-01 0.0000000e+00] ... [ 0.0000000e+00 -4.7541142e-01 1.1802976e-01 ... -2.0336424e-01 -5.5699885e-01 0.0000000e+00] [ 0.0000000e+00 1.0278167e+00 3.0936143e-01 ... -1.1511653e-01 -7.2868365e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.6878646e-01 -2.0415698e-01 ... 5.8718526e-01 6.6766977e-02 0.0000000e+00] [ 0.0000000e+00 -3.0486971e-01 -2.1778427e-01 ... 1.0259461e-01 -3.9571500e-01 0.0000000e+00] ... [ 0.0000000e+00 2.5010794e-01 9.3899441e-01 ... -8.0499607e-01 1.0225321e+00 0.0000000e+00] [ 0.0000000e+00 -3.6919960e-03 -1.4047804e-01 ... 1.0012223e-01 4.2193741e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.4614049e+00 -3.1465694e-01 ... 3.6797780e-01 3.4656966e-01 0.0000000e+00] [ 0.0000000e+00 3.1407923e-01 5.0910091e-01 ... 1.0393699e+00 6.6344512e-01 0.0000000e+00] ... [ 0.0000000e+00 2.3373996e-01 2.9546797e-01 ... 1.0075743e+00 5.7490772e-01 0.0000000e+00] [ 0.0000000e+00 6.8750018e-01 4.7534626e-02 ... -7.8237253e-01 -5.6077097e-02 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -7.2381598e-01 4.7682893e-01 ... 6.5590248e-02 -2.0154023e-01 0.0000000e+00] [ 0.0000000e+00 6.5394235e-01 -1.4746901e-01 ... -9.8564303e-01 -2.9338557e-01 0.0000000e+00] ... [ 0.0000000e+00 7.3086774e-01 -3.1484556e-01 ... -2.1156214e-01 3.2027155e-01 0.0000000e+00] [ 0.0000000e+00 9.8509034e-03 2.3578184e-02 ... 6.3899577e-01 5.1109058e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.0052309e+00 4.0591273e-01 ... -1.2668877e+00 -1.2587374e-01 0.0000000e+00] [ 0.0000000e+00 6.4767498e-01 7.0139647e-01 ... -3.5704830e-01 -6.7348814e-01 0.0000000e+00] ... [ 0.0000000e+00 2.3717608e-01 1.0418903e+00 ... 7.4560755e-01 -3.6476129e-01 0.0000000e+00] [ 0.0000000e+00 -1.3775052e+00 9.7141802e-01 ... 2.0397561e+00 -7.3237664e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.8680540e-01 -1.6985494e-01 ... -3.9255327e-01 1.0463545e+00 0.0000000e+00] [ 0.0000000e+00 -2.4724448e-01 -8.7677413e-01 ... 8.4747955e-02 2.1369486e+00 0.0000000e+00] ... [ 0.0000000e+00 3.6774975e-01 -8.5688645e-01 ... 7.9988383e-02 1.2339389e+00 0.0000000e+00] [ 0.0000000e+00 -5.5422115e-01 1.6773539e+00 ... -2.9352692e-01 -1.2403655e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -9.4074696e-01 6.2453419e-01 ... -1.5776097e+00 -4.6975431e-01 0.0000000e+00] [ 0.0000000e+00 -5.5531079e-01 5.6544747e-02 ... 1.8088473e+00 -9.4723128e-02 0.0000000e+00] ... [ 0.0000000e+00 5.2936715e-01 4.1132307e-01 ... 1.4442085e+00 2.9475325e-01 0.0000000e+00] [ 0.0000000e+00 -1.4925730e-01 6.0399193e-01 ... 4.3741348e-01 -9.4539440e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.8871363e-01 3.3035278e-01 ... 1.8047805e-01 2.8508508e-01 0.0000000e+00] [ 0.0000000e+00 -5.6127936e-01 3.5991248e-01 ... 1.5518564e-01 -7.5099093e-01 0.0000000e+00] ... [ 0.0000000e+00 7.6906079e-01 -2.1605399e-02 ... 6.1005861e-01 2.4413402e-01 0.0000000e+00] [ 0.0000000e+00 9.1956931e-01 6.9908530e-01 ... 1.9330148e-01 -4.7660393e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.0728198e-01 3.1184465e-01 ... 4.2644776e-02 -8.3795822e-01 0.0000000e+00] [ 0.0000000e+00 -1.5824392e-01 5.5391067e-01 ... -4.3641478e-01 8.2692248e-01 0.0000000e+00] ... [ 0.0000000e+00 1.9234124e-01 1.3078959e+00 ... 1.0055003e+00 1.0804971e+00 0.0000000e+00] [ 0.0000000e+00 -1.9351815e-01 -6.0470011e-02 ... -6.3886684e-01 -4.1226411e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.6302444e-01 1.1252494e-01 ... -1.1834843e+00 1.3367213e+00 0.0000000e+00] [ 0.0000000e+00 1.9042014e-01 8.5994518e-01 ... -1.7346660e+00 -1.4418792e+00 0.0000000e+00] ... [ 0.0000000e+00 -2.7397949e-01 2.5681727e+00 ... -6.4386594e-01 -3.1415853e-01 0.0000000e+00] [ 0.0000000e+00 1.8065439e-01 -9.6081406e-02 ... -1.2905267e+00 1.6346376e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]]; ov_res: [[[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.4210302e-03 2.8139288e-02 ... -8.2135303e-03 2.6384650e-02 0.0000000e+00] [ 0.0000000e+00 -2.7277252e-02 1.4558244e-02 ... -2.8013671e-02 -8.9213224e-03 0.0000000e+00] ... [ 0.0000000e+00 1.7852239e-02 -2.0979310e-03 ... -3.4691103e-02 4.0350193e-03 0.0000000e+00] [ 0.0000000e+00 4.6095900e-02 1.9612294e-02 ... 2.5869417e-03 2.3025987e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.8237278e-02 1.1928703e-02 ... 5.3413078e-02 -1.8528599e-02 0.0000000e+00] [ 0.0000000e+00 -1.7192263e-02 2.0235414e-02 ... 2.6738383e-03 1.7016238e-02 0.0000000e+00] ... [ 0.0000000e+00 -1.0544360e-02 -2.0890770e-02 ... -1.6372614e-02 5.8816425e-03 0.0000000e+00] [ 0.0000000e+00 4.6797398e-02 -1.1066653e-02 ... 2.1696616e-02 -2.8663885e-02 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.1601941e-02 -2.2068862e-02 ... 2.2767218e-02 -1.0430363e-03 0.0000000e+00] [ 0.0000000e+00 -3.1851474e-02 -1.3430564e-02 ... 1.1208314e-02 -1.9148784e-02 0.0000000e+00] ... [ 0.0000000e+00 -3.0711552e-02 1.4292120e-02 ... 2.4070460e-02 -1.2074976e-02 0.0000000e+00] [ 0.0000000e+00 -1.0459658e-02 -4.3375213e-03 ... 1.7944120e-02 5.7946495e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 4.0005897e-03 -8.0879973e-03 ... -1.1525988e-02 1.4765149e-02 0.0000000e+00] [ 0.0000000e+00 8.2708020e-03 -1.1731864e-02 ... 6.5699033e-04 -2.6711984e-02 0.0000000e+00] ... [ 0.0000000e+00 1.6311960e-02 -5.0149234e-03 ... 4.9186824e-03 -5.9329979e-03 0.0000000e+00] [ 0.0000000e+00 1.3649773e-03 3.9208871e-03 ... 1.5360676e-02 7.8958459e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.3289722e-02 2.4738265e-02 ... 9.5782185e-04 1.5208754e-02 0.0000000e+00] [ 0.0000000e+00 7.3184795e-03 1.8757210e-03 ... 3.2903068e-03 -3.0683581e-02 0.0000000e+00] ... [ 0.0000000e+00 -1.3956260e-02 8.9134853e-03 ... -1.1211864e-02 -3.5292688e-03 0.0000000e+00] [ 0.0000000e+00 -2.1609853e-03 -1.1290754e-02 ... -2.2014027e-02 2.6552614e-03 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.7857585e-06 -2.5958033e-02 ... -1.0213445e-05 -4.9068876e-02 0.0000000e+00] [ 0.0000000e+00 -3.5967671e-03 -2.1730676e-02 ... -1.2448561e-02 -2.1678607e-03 0.0000000e+00] ... [ 0.0000000e+00 -1.7572055e-02 5.3960537e-03 ... -3.2118328e-02 -2.7967156e-03 0.0000000e+00] [ 0.0000000e+00 1.2844954e-02 -1.6657094e-02 ... -1.9618180e-02 -5.1109563e-04 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.7968150e-01 2.4880126e-01 ... 4.7869790e-01 3.1766877e-01 0.0000000e+00] [ 0.0000000e+00 -1.6922201e-01 -3.8375974e-01 ... 2.4571848e-01 -1.9924219e-01 0.0000000e+00] ... [ 0.0000000e+00 3.7838012e-01 1.8409440e-01 ... 1.4990814e-01 2.7300435e-01 0.0000000e+00] [ 0.0000000e+00 -4.6393153e-01 5.9092510e-01 ... -3.2392937e-01 -3.2068625e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.7128344e-01 -2.3520815e-01 ... -1.0318377e+00 6.5524179e-01 0.0000000e+00] [ 0.0000000e+00 3.6472261e-01 6.4846480e-01 ... -1.9819558e-01 1.6484140e-01 0.0000000e+00] ... [ 0.0000000e+00 -1.4243947e-01 6.0456544e-01 ... 6.1876082e-01 8.6241975e-02 0.0000000e+00] [ 0.0000000e+00 -5.1887637e-01 4.8522073e-01 ... 9.7460228e-01 -7.6925474e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 3.4794143e-01 -2.3811510e-01 ... 1.0683701e+00 -6.3805842e-01 0.0000000e+00] [ 0.0000000e+00 -5.5791515e-01 -1.7211637e-01 ... -1.3270554e-01 -4.0452448e-01 0.0000000e+00] ... [ 0.0000000e+00 -4.7541142e-01 1.1802976e-01 ... -2.0336424e-01 -5.5699885e-01 0.0000000e+00] [ 0.0000000e+00 1.0278167e+00 3.0936143e-01 ... -1.1511653e-01 -7.2868365e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.6878646e-01 -2.0415698e-01 ... 5.8718526e-01 6.6766977e-02 0.0000000e+00] [ 0.0000000e+00 -3.0486971e-01 -2.1778427e-01 ... 1.0259461e-01 -3.9571500e-01 0.0000000e+00] ... [ 0.0000000e+00 2.5010794e-01 9.3899441e-01 ... -8.0499607e-01 1.0225321e+00 0.0000000e+00] [ 0.0000000e+00 -3.6919960e-03 -1.4047804e-01 ... 1.0012223e-01 4.2193741e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.4614049e+00 -3.1465694e-01 ... 3.6797780e-01 3.4656966e-01 0.0000000e+00] [ 0.0000000e+00 3.1407923e-01 5.0910091e-01 ... 1.0393699e+00 6.6344512e-01 0.0000000e+00] ... [ 0.0000000e+00 2.3373996e-01 2.9546797e-01 ... 1.0075743e+00 5.7490772e-01 0.0000000e+00] [ 0.0000000e+00 6.8750018e-01 4.7534626e-02 ... -7.8237253e-01 -5.6077097e-02 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -7.2381598e-01 4.7682893e-01 ... 6.5590248e-02 -2.0154023e-01 0.0000000e+00] [ 0.0000000e+00 6.5394235e-01 -1.4746901e-01 ... -9.8564303e-01 -2.9338557e-01 0.0000000e+00] ... [ 0.0000000e+00 7.3086774e-01 -3.1484556e-01 ... -2.1156214e-01 3.2027155e-01 0.0000000e+00] [ 0.0000000e+00 9.8509034e-03 2.3578184e-02 ... 6.3899577e-01 5.1109058e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.0052309e+00 4.0591273e-01 ... -1.2668877e+00 -1.2587374e-01 0.0000000e+00] [ 0.0000000e+00 6.4767498e-01 7.0139647e-01 ... -3.5704830e-01 -6.7348814e-01 0.0000000e+00] ... [ 0.0000000e+00 2.3717608e-01 1.0418903e+00 ... 7.4560755e-01 -3.6476129e-01 0.0000000e+00] [ 0.0000000e+00 -1.3775052e+00 9.7141802e-01 ... 2.0397561e+00 -7.3237664e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.8680540e-01 -1.6985494e-01 ... -3.9255327e-01 1.0463545e+00 0.0000000e+00] [ 0.0000000e+00 -2.4724448e-01 -8.7677413e-01 ... 8.4747955e-02 2.1369486e+00 0.0000000e+00] ... [ 0.0000000e+00 3.6774975e-01 -8.5688645e-01 ... 7.9988383e-02 1.2339389e+00 0.0000000e+00] [ 0.0000000e+00 -5.5422115e-01 1.6773539e+00 ... -2.9352692e-01 -1.2403655e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -9.4074696e-01 6.2453419e-01 ... -1.5776097e+00 -4.6975431e-01 0.0000000e+00] [ 0.0000000e+00 -5.5531079e-01 5.6544747e-02 ... 1.8088473e+00 -9.4723128e-02 0.0000000e+00] ... [ 0.0000000e+00 5.2936715e-01 4.1132307e-01 ... 1.4442085e+00 2.9475325e-01 0.0000000e+00] [ 0.0000000e+00 -1.4925730e-01 6.0399193e-01 ... 4.3741348e-01 -9.4539440e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.8871363e-01 3.3035278e-01 ... 1.8047805e-01 2.8508508e-01 0.0000000e+00] [ 0.0000000e+00 -5.6127936e-01 3.5991248e-01 ... 1.5518564e-01 -7.5099093e-01 0.0000000e+00] ... [ 0.0000000e+00 7.6906079e-01 -2.1605399e-02 ... 6.1005861e-01 2.4413402e-01 0.0000000e+00] [ 0.0000000e+00 9.1956931e-01 6.9908530e-01 ... 1.9330148e-01 -4.7660393e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 2.0728198e-01 3.1184465e-01 ... 4.2644776e-02 -8.3795822e-01 0.0000000e+00] [ 0.0000000e+00 -1.5824392e-01 5.5391067e-01 ... -4.3641478e-01 8.2692248e-01 0.0000000e+00] ... [ 0.0000000e+00 1.9234124e-01 1.3078959e+00 ... 1.0055003e+00 1.0804971e+00 0.0000000e+00] [ 0.0000000e+00 -1.9351815e-01 -6.0470011e-02 ... -6.3886684e-01 -4.1226411e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.6302444e-01 1.1252494e-01 ... -1.1834843e+00 1.3367213e+00 0.0000000e+00] [ 0.0000000e+00 1.9042014e-01 8.5994518e-01 ... -1.7346660e+00 -1.4418792e+00 0.0000000e+00] ... [ 0.0000000e+00 -2.7397949e-01 2.5681727e+00 ... -6.4386594e-01 -3.1415853e-01 0.0000000e+00] [ 0.0000000e+00 1.8065439e-01 -9.6081406e-02 ... -1.2905267e+00 1.6346376e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [1, 0, 0], 'dilations': [2, 2, 1], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1006.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.7748 (2,1,1,.,.) = -1.5144 (3,1,1,.,.) = 0.01 * 5.1939 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[-1.29064882e+00 -2.28347850e+00 -4.59412515e-01 ... -3.25463438e+00 1.03407466e+00 -1.62278938e+00] [-1.16638085e-02 9.98595059e-01 1.39216280e+00 ... 1.36192882e+00 -8.01366806e-01 3.19550681e+00] [-1.07617676e+00 2.81950593e+00 -1.25619900e+00 ... 1.00707543e+00 1.32008982e+00 -6.44125402e-01] ... [ 1.51824689e+00 9.07440603e-01 -1.22795260e+00 ... -7.38867104e-01 1.16737187e+00 2.15056229e+00] [-1.94506013e+00 -1.28633845e+00 -1.36678851e+00 ... -8.10313165e-01 5.90976596e-01 -8.34112838e-02] [ 4.03334290e-01 5.45394063e-01 -1.79480684e+00 ... 1.04865658e+00 2.07580328e+00 -1.55735695e+00]] [[ 4.67263758e-01 -3.74282718e+00 3.12997103e+00 ... -2.89616919e+00 4.48498279e-01 -2.85518527e-01] [-1.83473921e+00 -2.89142942e+00 2.80280781e+00 ... 2.48434439e-01 -7.59073734e-01 9.57131684e-01] [ 1.18637466e+00 -1.47598040e+00 -1.35782111e+00 ... -2.16562469e-02 1.57920372e+00 -1.76834440e+00] ... [ 3.02374125e+00 -3.14672542e+00 1.21919882e+00 ... -7.26606771e-02 1.56886697e+00 1.69462335e+00] [ 1.39866650e+00 1.82179753e-02 1.89147308e-01 ... 7.90789187e-01 -2.80909944e+00 2.51237321e+00] [-2.67150140e+00 -1.90952992e+00 -5.10582030e-02 ... -3.07853252e-01 1.49883437e+00 1.91895366e-02]] [[-3.85600209e-01 -9.60663199e-01 -8.25282097e-01 ... -1.72016168e+00 -2.17471933e+00 1.20580924e+00] [-1.45841134e+00 -1.18058372e+00 -1.40009820e+00 ... 8.98722351e-01 -2.53675842e+00 -2.35302711e+00] [ 3.15881062e+00 -2.24197054e+00 1.06274271e+00 ... 1.07333159e+00 2.16466284e+00 -2.88169956e+00] ... [-6.84258223e-01 -8.04131269e-01 -3.71174312e+00 ... -5.64145029e-01 1.08366370e+00 -1.14578474e+00] [-6.08356237e-01 1.16360450e+00 1.36304840e-01 ... -4.33074564e-01 7.22941458e-01 1.18384445e+00] [-9.36330259e-01 2.34873557e+00 -2.25198245e+00 ... 9.46776450e-01 -2.85368919e+00 6.24472141e-01]] ... [[-8.65416706e-01 -1.23398852e+00 -4.45454836e-01 ... 1.54654348e+00 1.64703727e+00 2.57600856e+00] [ 2.72013760e+00 4.62805152e-01 -1.64780033e+00 ... 2.65191603e+00 1.81357241e+00 4.54656273e-01] [ 8.64536822e-01 -2.38501692e+00 2.30638623e+00 ... -7.07481682e-01 -1.35948205e+00 1.07577944e+00] ... [-2.22334647e+00 -1.62282079e-01 -2.40613365e+00 ... 1.90208897e-01 -2.32037854e+00 1.20694607e-01] [ 5.15248626e-02 -7.93611765e-01 -1.41933584e+00 ... -2.21471518e-01 7.93801129e-01 9.52769399e-01] [-2.06404638e+00 7.31129944e-01 -7.16655612e-01 ... 1.40995467e+00 -2.84174657e+00 2.93142271e+00]] [[ 3.63421154e+00 6.27695143e-01 7.12209046e-01 ... 1.27711177e+00 6.53849781e-01 1.72047228e-01] [ 3.58136177e+00 1.00921798e+00 6.75608873e-01 ... -1.70662642e+00 -3.63577724e+00 -1.08279765e+00] [-6.32360131e-02 -4.33385223e-02 1.58316112e+00 ... 7.84494519e-01 -1.79313049e-01 -3.36522722e+00] ... [ 2.83957291e+00 8.61505628e-01 2.15039062e+00 ... -3.18700790e+00 -2.19199610e+00 -1.09069705e+00] [ 8.86168361e-01 -1.81701016e+00 -2.50473881e+00 ... 1.77451456e+00 8.04500654e-02 -8.70555639e-01] [-1.19504619e+00 -3.55053663e-01 1.40838516e+00 ... -1.01682246e+00 -1.91603959e+00 2.23073587e-01]] [[ 1.75680745e+00 1.13347590e+00 2.00795555e+00 ... 2.17615199e+00 -1.93882215e+00 8.68631244e-01] [ 5.94371438e-01 1.64348936e+00 2.12127900e+00 ... 1.28357625e+00 -1.68256986e+00 -6.94101393e-01] [ 6.05588377e-01 1.92987609e+00 5.86283267e-01 ... 2.04672670e+00 7.88940310e-01 1.89677036e+00] ... [ 1.80359125e+00 6.57159448e-01 -3.04262495e+00 ... 2.81811500e+00 9.26338881e-02 1.14654124e+00] [-5.47281563e-01 2.09647608e+00 -3.81941527e-01 ... 1.01715720e+00 -2.19154310e+00 -1.65755188e+00] [ 7.64549911e-01 -1.12018764e+00 -9.23341140e-02 ... 5.75721562e-01 4.47017074e-01 1.01980954e-01]]] [[[ 7.03955650e-01 -1.14179265e+00 1.07442558e+00 ... 2.87314248e+00 1.12174404e+00 -4.42624569e-01] [-1.35202515e+00 -1.03941441e+00 -6.83428347e-01 ... -1.61867738e+00 1.96068928e-01 -1.47295725e+00] [-1.50764501e+00 1.51205802e+00 -2.37160611e+00 ... -1.43804634e+00 7.28485167e-01 -3.07823706e+00] ... [-8.67319465e-01 -2.43282035e-01 1.69859678e-01 ... -9.91338253e-01 -2.41173387e+00 1.10732973e+00] [ 1.79111528e+00 2.06585526e+00 7.73291111e-01 ... 3.43067241e+00 -2.62223423e-01 -1.72887906e-01] [ 1.66575384e+00 -1.81544289e-01 1.25349295e+00 ... 1.13209295e+00 -1.91652250e+00 5.43451011e-01]] [[ 3.19413066e-01 2.48449492e+00 1.34265220e+00 ... -8.76189530e-01 -2.69511491e-02 1.54907596e+00] [ 9.34221804e-01 1.64112115e+00 1.07134461e-01 ... 6.57582760e-01 -2.92790866e+00 1.74062443e+00] [ 5.11944532e-01 1.00687474e-01 -1.35183084e+00 ... -2.08960676e+00 2.36424923e+00 2.28801894e+00] ... [-6.96687162e-01 1.12108672e+00 -2.10915253e-01 ... -1.72718561e+00 -7.78539121e-01 1.05576587e+00] [ 1.60328805e+00 -7.81220853e-01 3.36820769e+00 ... 2.80753970e-01 2.99748182e+00 -2.55198979e+00] [ 8.60724270e-01 2.47341797e-01 5.21594000e+00 ... -1.44112980e+00 1.38448143e+00 2.94528437e+00]] [[ 2.31269553e-01 -2.57616669e-01 -2.03876638e+00 ... -8.39912295e-01 -2.14833975e-01 -1.20434308e+00] [-1.69734633e+00 -2.20097804e+00 5.45218945e-01 ... 6.72146678e-01 8.52256358e-01 -6.63268089e-01] [-2.00549579e+00 -3.38791400e-01 -4.04736474e-02 ... 8.04242432e-01 2.45663500e+00 2.11481428e+00] ... [-1.87232411e+00 2.03358269e+00 1.00917006e+00 ... 5.38867533e-01 1.97021616e+00 2.04274607e+00] [-1.60770988e+00 8.32664847e-01 -3.67787480e-01 ... -1.80883777e+00 1.95186818e+00 -1.77201760e+00] [ 3.01887125e-01 -1.52601469e+00 -6.95363224e-01 ... 2.69735622e+00 -1.28586328e+00 8.81245971e-01]] ... [[-2.70706654e-01 6.52926683e-01 2.44641614e+00 ... 2.70417356e+00 1.59248388e+00 -1.41471708e+00] [-7.52980113e-01 3.60184371e-01 1.88245416e+00 ... -8.63027692e-01 -1.61629975e+00 -3.67998183e-01] [-1.85335839e+00 2.70168972e+00 -1.53294981e+00 ... -2.06261635e+00 1.52979052e+00 2.32900113e-01] ... [ 1.89480817e+00 1.82421005e+00 1.09493506e+00 ... 2.94934416e+00 3.00345492e+00 -1.10555840e+00] [-2.69018674e+00 -1.52414846e+00 9.99887884e-01 ... 3.04693055e+00 9.87165630e-01 1.74148953e+00] [-1.13186622e+00 1.82797885e+00 -5.08943498e-01 ... 2.80990094e-01 8.02224040e-01 2.34132886e-01]] [[-9.04075325e-01 2.18754873e-01 -8.47878978e-02 ... 1.13477266e+00 8.87685776e-01 4.99861956e+00] [-1.16093302e+00 6.69850767e-01 -2.46290863e-01 ... 1.21089611e-02 -1.20244372e+00 1.78308141e+00] [ 7.75878251e-01 3.27338636e-01 2.09642577e+00 ... 2.82544231e+00 7.71090388e-02 7.34928310e-01] ... [ 2.01479721e+00 -3.59660476e-01 9.39656496e-01 ... 6.22097194e-01 -3.23843747e-01 -4.71462756e-02] [ 1.53102756e+00 1.57977080e+00 -2.54255033e+00 ... -1.55298543e+00 2.78197861e+00 -3.78069496e+00] [-7.40776360e-01 1.43526304e+00 -1.46220827e+00 ... 1.61356938e+00 -1.28396809e+00 1.61101365e+00]] [[-2.35866952e+00 8.81102920e-01 2.83193731e+00 ... -3.58886671e+00 4.48130488e-01 -1.57313216e+00] [-4.32090193e-01 1.95213687e+00 6.86901987e-01 ... 7.01281905e-01 4.77117807e-01 1.56540763e+00] [-2.55059153e-01 -8.85351300e-01 2.76173139e+00 ... 6.53311312e-01 -1.74123609e+00 2.02259564e+00] ... [ 7.57454932e-01 -1.49127579e+00 1.33410633e-01 ... 8.66505980e-01 5.46012342e-01 2.13170385e+00] [-9.98375893e-01 -3.22503042e+00 1.45584452e+00 ... -1.79409862e+00 -2.21225691e+00 7.13531792e-01] [-7.10138381e-02 1.24390066e+00 -7.61731029e-01 ... 2.11426124e-01 1.38887024e+00 2.66935992e+00]]] [[[-9.67330262e-02 -8.62889457e-03 1.18366452e-02 ... -1.14834100e-01 -3.12770121e-02 -1.31113473e-02] [ 1.02787139e-02 3.21114920e-02 -1.07663259e-01 ... -2.52646673e-02 -4.24503125e-02 5.08065410e-02] [-7.69478679e-02 -1.87285561e-02 5.02806231e-02 ... -1.96536761e-02 -3.41149345e-02 -2.27819588e-02] ... [-3.23799029e-02 1.87069893e-01 5.96464016e-02 ... 1.86727718e-02 2.24335697e-02 3.92791443e-02] [ 1.93043184e-02 4.81451377e-02 1.62668582e-02 ... -6.06970005e-02 3.19779888e-02 2.77068336e-02] [ 4.81733456e-02 -3.06229154e-03 9.76288691e-02 ... -6.61544874e-03 -5.23780435e-02 -2.90726237e-02]] [[-1.22302789e-02 -8.94242227e-02 -4.02431972e-02 ... 2.67872084e-02 -1.77677963e-02 1.03911735e-01] [ 8.89316574e-02 4.30829041e-02 -2.44882684e-02 ... -3.42665799e-02 5.37605174e-02 -3.40090096e-02] [ 1.67040545e-02 3.90254483e-02 3.97337712e-02 ... -2.97415685e-02 6.83289915e-02 2.95923594e-02] ... [ 2.59293057e-02 8.65230262e-02 2.51227319e-02 ... -1.24910489e-01 -1.53554063e-02 -2.54319943e-02] [ 5.40053658e-02 -1.08693227e-01 -6.68053925e-02 ... -3.26402783e-02 9.11602676e-02 -1.82260294e-02] [-1.03728794e-01 -9.75895002e-02 -6.25402406e-02 ... 2.14196127e-02 4.80541810e-02 -2.85435729e-02]] [[-5.39296260e-03 -1.12570534e-02 -5.66279367e-02 ... 2.73394529e-02 3.62831168e-02 -1.84486359e-02] [-7.59796947e-02 5.71518987e-02 3.20214406e-02 ... 1.03392340e-01 3.36296745e-02 8.89233314e-03] [-4.37597930e-03 -2.22759098e-02 2.90046185e-02 ... 6.02973364e-02 -1.75631826e-03 -8.49128421e-03] ... [-1.86149925e-02 -1.60484146e-02 -8.57000798e-03 ... 2.52389163e-03 -2.21376829e-02 7.14565292e-02] [ 8.20055455e-02 -4.38298881e-02 6.67534489e-03 ... 2.64127273e-03 -2.83935573e-02 -1.90521695e-03] [-7.45096942e-03 -4.23176363e-02 5.37794381e-02 ... -9.41145327e-03 7.47719640e-03 2.93759955e-03]] ... [[ 2.44392897e-03 3.80863287e-02 -3.52710336e-02 ... 3.05368863e-02 -1.65281035e-02 -1.02074901e-02] [ 5.44518009e-02 -7.43661001e-02 -1.52761843e-02 ... 4.14377041e-02 -5.85491862e-03 5.40122278e-02] [ 3.84455882e-02 -2.43494976e-02 3.93483303e-02 ... -7.21546710e-02 9.16450024e-02 -3.51136364e-02] ... [-3.24209183e-02 4.84812520e-02 -6.54301792e-02 ... 3.04153766e-02 -2.97235753e-02 -2.47219540e-02] [ 4.31665778e-03 -3.96654978e-02 -1.95750166e-02 ... -2.98860092e-02 4.17515747e-02 -7.04723075e-02] [ 4.13226187e-02 3.40885762e-03 -5.99502819e-04 ... 1.66657921e-02 1.53178498e-02 1.72907058e-02]] [[ 3.87920216e-02 1.49294231e-02 -1.28009031e-03 ... 1.40926456e-02 3.57521512e-02 4.77113649e-02] [-1.33621678e-01 -5.20677771e-03 4.07697540e-03 ... 1.36497453e-01 -2.12741289e-02 1.85700245e-02] [ 5.11527248e-02 -1.09911777e-01 4.30126209e-03 ... 6.66525066e-02 7.18036387e-03 6.20894320e-02] ... [-1.40043488e-02 7.42416158e-02 -6.31919876e-02 ... 2.52725799e-02 6.07672371e-02 4.74250391e-02] [ 4.62615490e-03 9.23782960e-02 2.47264858e-02 ... -2.52309944e-02 8.98899212e-02 3.63173783e-02] [-1.91884842e-02 -9.72555112e-03 7.40960240e-02 ... -3.87328006e-02 1.17180019e-03 3.41995470e-02]] [[-1.99867189e-02 4.65706736e-02 -3.73326391e-02 ... 2.04142313e-02 -2.76568234e-02 -1.08701289e-02] [-7.19584748e-02 -8.10158253e-02 -3.91063206e-02 ... -3.86797707e-03 5.19734109e-03 4.69355062e-02] [ 1.22981139e-01 1.75042264e-02 -6.85604438e-02 ... 2.21125707e-02 -2.91429888e-02 -4.92220372e-03] ... [ 6.86931014e-02 3.91271710e-03 -5.49462661e-02 ... 2.48838030e-02 -5.45648411e-02 -3.06377728e-02] [ 1.66220050e-02 -7.78137296e-02 3.27955782e-02 ... -4.55882363e-02 1.91289186e-02 -1.72505323e-02] [-2.81936787e-02 -6.60450161e-02 3.28499451e-02 ... 5.19846901e-02 -3.75247635e-02 1.45772863e-02]]]]]; ov_res: [[[[[-1.29064882e+00 -2.28347850e+00 -4.59412515e-01 ... -3.25463438e+00 1.03407466e+00 -1.62278938e+00] [-1.16638085e-02 9.98595059e-01 1.39216280e+00 ... 1.36192882e+00 -8.01366806e-01 3.19550681e+00] [-1.07617676e+00 2.81950593e+00 -1.25619900e+00 ... 1.00707543e+00 1.32008982e+00 -6.44125402e-01] ... [ 1.51824689e+00 9.07440603e-01 -1.22795260e+00 ... -7.38867104e-01 1.16737187e+00 2.15056229e+00] [-1.94506013e+00 -1.28633845e+00 -1.36678851e+00 ... -8.10313165e-01 5.90976596e-01 -8.34112838e-02] [ 4.03334290e-01 5.45394063e-01 -1.79480684e+00 ... 1.04865658e+00 2.07580328e+00 -1.55735695e+00]] [[ 4.67263758e-01 -3.74282718e+00 3.12997103e+00 ... -2.89616919e+00 4.48498279e-01 -2.85518527e-01] [-1.83473921e+00 -2.89142942e+00 2.80280781e+00 ... 2.48434439e-01 -7.59073734e-01 9.57131684e-01] [ 1.18637466e+00 -1.47598040e+00 -1.35782111e+00 ... -2.16562469e-02 1.57920372e+00 -1.76834440e+00] ... [ 3.02374125e+00 -3.14672542e+00 1.21919882e+00 ... -7.26606771e-02 1.56886697e+00 1.69462335e+00] [ 1.39866650e+00 1.82179753e-02 1.89147308e-01 ... 7.90789187e-01 -2.80909944e+00 2.51237321e+00] [-2.67150140e+00 -1.90952992e+00 -5.10582030e-02 ... -3.07853252e-01 1.49883437e+00 1.91895366e-02]] [[-3.85600209e-01 -9.60663199e-01 -8.25282097e-01 ... -1.72016168e+00 -2.17471933e+00 1.20580924e+00] [-1.45841134e+00 -1.18058372e+00 -1.40009820e+00 ... 8.98722351e-01 -2.53675842e+00 -2.35302711e+00] [ 3.15881062e+00 -2.24197054e+00 1.06274271e+00 ... 1.07333159e+00 2.16466284e+00 -2.88169956e+00] ... [-6.84258223e-01 -8.04131269e-01 -3.71174312e+00 ... -5.64145029e-01 1.08366370e+00 -1.14578474e+00] [-6.08356237e-01 1.16360450e+00 1.36304840e-01 ... -4.33074564e-01 7.22941458e-01 1.18384445e+00] [-9.36330259e-01 2.34873557e+00 -2.25198245e+00 ... 9.46776450e-01 -2.85368919e+00 6.24472141e-01]] ... [[-8.65416706e-01 -1.23398852e+00 -4.45454836e-01 ... 1.54654348e+00 1.64703727e+00 2.57600856e+00] [ 2.72013760e+00 4.62805152e-01 -1.64780033e+00 ... 2.65191603e+00 1.81357241e+00 4.54656273e-01] [ 8.64536822e-01 -2.38501692e+00 2.30638623e+00 ... -7.07481682e-01 -1.35948205e+00 1.07577944e+00] ... [-2.22334647e+00 -1.62282079e-01 -2.40613365e+00 ... 1.90208897e-01 -2.32037854e+00 1.20694607e-01] [ 5.15248626e-02 -7.93611765e-01 -1.41933584e+00 ... -2.21471518e-01 7.93801129e-01 9.52769399e-01] [-2.06404638e+00 7.31129944e-01 -7.16655612e-01 ... 1.40995467e+00 -2.84174657e+00 2.93142271e+00]] [[ 3.63421154e+00 6.27695143e-01 7.12209046e-01 ... 1.27711177e+00 6.53849781e-01 1.72047228e-01] [ 3.58136177e+00 1.00921798e+00 6.75608873e-01 ... -1.70662642e+00 -3.63577724e+00 -1.08279765e+00] [-6.32360131e-02 -4.33385223e-02 1.58316112e+00 ... 7.84494519e-01 -1.79313049e-01 -3.36522722e+00] ... [ 2.83957291e+00 8.61505628e-01 2.15039062e+00 ... -3.18700790e+00 -2.19199610e+00 -1.09069705e+00] [ 8.86168361e-01 -1.81701016e+00 -2.50473881e+00 ... 1.77451456e+00 8.04500654e-02 -8.70555639e-01] [-1.19504619e+00 -3.55053663e-01 1.40838516e+00 ... -1.01682246e+00 -1.91603959e+00 2.23073587e-01]] [[ 1.75680745e+00 1.13347590e+00 2.00795555e+00 ... 2.17615199e+00 -1.93882215e+00 8.68631244e-01] [ 5.94371438e-01 1.64348936e+00 2.12127900e+00 ... 1.28357625e+00 -1.68256986e+00 -6.94101393e-01] [ 6.05588377e-01 1.92987609e+00 5.86283267e-01 ... 2.04672670e+00 7.88940310e-01 1.89677036e+00] ... [ 1.80359125e+00 6.57159448e-01 -3.04262495e+00 ... 2.81811500e+00 9.26338881e-02 1.14654124e+00] [-5.47281563e-01 2.09647608e+00 -3.81941527e-01 ... 1.01715720e+00 -2.19154310e+00 -1.65755188e+00] [ 7.64549911e-01 -1.12018764e+00 -9.23341140e-02 ... 5.75721562e-01 4.47017074e-01 1.01980954e-01]]] [[[ 7.03955650e-01 -1.14179265e+00 1.07442558e+00 ... 2.87314248e+00 1.12174404e+00 -4.42624569e-01] [-1.35202515e+00 -1.03941441e+00 -6.83428347e-01 ... -1.61867738e+00 1.96068928e-01 -1.47295725e+00] [-1.50764501e+00 1.51205802e+00 -2.37160611e+00 ... -1.43804634e+00 7.28485167e-01 -3.07823706e+00] ... [-8.67319465e-01 -2.43282035e-01 1.69859678e-01 ... -9.91338253e-01 -2.41173387e+00 1.10732973e+00] [ 1.79111528e+00 2.06585526e+00 7.73291111e-01 ... 3.43067241e+00 -2.62223423e-01 -1.72887906e-01] [ 1.66575384e+00 -1.81544289e-01 1.25349295e+00 ... 1.13209295e+00 -1.91652250e+00 5.43451011e-01]] [[ 3.19413066e-01 2.48449492e+00 1.34265220e+00 ... -8.76189530e-01 -2.69511491e-02 1.54907596e+00] [ 9.34221804e-01 1.64112115e+00 1.07134461e-01 ... 6.57582760e-01 -2.92790866e+00 1.74062443e+00] [ 5.11944532e-01 1.00687474e-01 -1.35183084e+00 ... -2.08960676e+00 2.36424923e+00 2.28801894e+00] ... [-6.96687162e-01 1.12108672e+00 -2.10915253e-01 ... -1.72718561e+00 -7.78539121e-01 1.05576587e+00] [ 1.60328805e+00 -7.81220853e-01 3.36820769e+00 ... 2.80753970e-01 2.99748182e+00 -2.55198979e+00] [ 8.60724270e-01 2.47341797e-01 5.21594000e+00 ... -1.44112980e+00 1.38448143e+00 2.94528437e+00]] [[ 2.31269553e-01 -2.57616669e-01 -2.03876638e+00 ... -8.39912295e-01 -2.14833975e-01 -1.20434308e+00] [-1.69734633e+00 -2.20097804e+00 5.45218945e-01 ... 6.72146678e-01 8.52256358e-01 -6.63268089e-01] [-2.00549579e+00 -3.38791400e-01 -4.04736474e-02 ... 8.04242432e-01 2.45663500e+00 2.11481428e+00] ... [-1.87232411e+00 2.03358269e+00 1.00917006e+00 ... 5.38867533e-01 1.97021616e+00 2.04274607e+00] [-1.60770988e+00 8.32664847e-01 -3.67787480e-01 ... -1.80883777e+00 1.95186818e+00 -1.77201760e+00] [ 3.01887125e-01 -1.52601469e+00 -6.95363224e-01 ... 2.69735622e+00 -1.28586328e+00 8.81245971e-01]] ... [[-2.70706654e-01 6.52926683e-01 2.44641614e+00 ... 2.70417356e+00 1.59248388e+00 -1.41471708e+00] [-7.52980113e-01 3.60184371e-01 1.88245416e+00 ... -8.63027692e-01 -1.61629975e+00 -3.67998183e-01] [-1.85335839e+00 2.70168972e+00 -1.53294981e+00 ... -2.06261635e+00 1.52979052e+00 2.32900113e-01] ... [ 1.89480817e+00 1.82421005e+00 1.09493506e+00 ... 2.94934416e+00 3.00345492e+00 -1.10555840e+00] [-2.69018674e+00 -1.52414846e+00 9.99887884e-01 ... 3.04693055e+00 9.87165630e-01 1.74148953e+00] [-1.13186622e+00 1.82797885e+00 -5.08943498e-01 ... 2.80990094e-01 8.02224040e-01 2.34132886e-01]] [[-9.04075325e-01 2.18754873e-01 -8.47878978e-02 ... 1.13477266e+00 8.87685776e-01 4.99861956e+00] [-1.16093302e+00 6.69850767e-01 -2.46290863e-01 ... 1.21089611e-02 -1.20244372e+00 1.78308141e+00] [ 7.75878251e-01 3.27338636e-01 2.09642577e+00 ... 2.82544231e+00 7.71090388e-02 7.34928310e-01] ... [ 2.01479721e+00 -3.59660476e-01 9.39656496e-01 ... 6.22097194e-01 -3.23843747e-01 -4.71462756e-02] [ 1.53102756e+00 1.57977080e+00 -2.54255033e+00 ... -1.55298543e+00 2.78197861e+00 -3.78069496e+00] [-7.40776360e-01 1.43526304e+00 -1.46220827e+00 ... 1.61356938e+00 -1.28396809e+00 1.61101365e+00]] [[-2.35866952e+00 8.81102920e-01 2.83193731e+00 ... -3.58886671e+00 4.48130488e-01 -1.57313216e+00] [-4.32090193e-01 1.95213687e+00 6.86901987e-01 ... 7.01281905e-01 4.77117807e-01 1.56540763e+00] [-2.55059153e-01 -8.85351300e-01 2.76173139e+00 ... 6.53311312e-01 -1.74123609e+00 2.02259564e+00] ... [ 7.57454932e-01 -1.49127579e+00 1.33410633e-01 ... 8.66505980e-01 5.46012342e-01 2.13170385e+00] [-9.98375893e-01 -3.22503042e+00 1.45584452e+00 ... -1.79409862e+00 -2.21225691e+00 7.13531792e-01] [-7.10138381e-02 1.24390066e+00 -7.61731029e-01 ... 2.11426124e-01 1.38887024e+00 2.66935992e+00]]] [[[-9.67330262e-02 -8.62889457e-03 1.18366452e-02 ... -1.14834100e-01 -3.12770121e-02 -1.31113473e-02] [ 1.02787139e-02 3.21114920e-02 -1.07663259e-01 ... -2.52646673e-02 -4.24503125e-02 5.08065410e-02] [-7.69478679e-02 -1.87285561e-02 5.02806231e-02 ... -1.96536761e-02 -3.41149345e-02 -2.27819588e-02] ... [-3.23799029e-02 1.87069893e-01 5.96464016e-02 ... 1.86727718e-02 2.24335697e-02 3.92791443e-02] [ 1.93043184e-02 4.81451377e-02 1.62668582e-02 ... -6.06970005e-02 3.19779888e-02 2.77068336e-02] [ 4.81733456e-02 -3.06229154e-03 9.76288691e-02 ... -6.61544874e-03 -5.23780435e-02 -2.90726237e-02]] [[-1.22302789e-02 -8.94242227e-02 -4.02431972e-02 ... 2.67872084e-02 -1.77677963e-02 1.03911735e-01] [ 8.89316574e-02 4.30829041e-02 -2.44882684e-02 ... -3.42665799e-02 5.37605174e-02 -3.40090096e-02] [ 1.67040545e-02 3.90254483e-02 3.97337712e-02 ... -2.97415685e-02 6.83289915e-02 2.95923594e-02] ... [ 2.59293057e-02 8.65230262e-02 2.51227319e-02 ... -1.24910489e-01 -1.53554063e-02 -2.54319943e-02] [ 5.40053658e-02 -1.08693227e-01 -6.68053925e-02 ... -3.26402783e-02 9.11602676e-02 -1.82260294e-02] [-1.03728794e-01 -9.75895002e-02 -6.25402406e-02 ... 2.14196127e-02 4.80541810e-02 -2.85435729e-02]] [[-5.39296260e-03 -1.12570534e-02 -5.66279367e-02 ... 2.73394529e-02 3.62831168e-02 -1.84486359e-02] [-7.59796947e-02 5.71518987e-02 3.20214406e-02 ... 1.03392340e-01 3.36296745e-02 8.89233314e-03] [-4.37597930e-03 -2.22759098e-02 2.90046185e-02 ... 6.02973364e-02 -1.75631826e-03 -8.49128421e-03] ... [-1.86149925e-02 -1.60484146e-02 -8.57000798e-03 ... 2.52389163e-03 -2.21376829e-02 7.14565292e-02] [ 8.20055455e-02 -4.38298881e-02 6.67534489e-03 ... 2.64127273e-03 -2.83935573e-02 -1.90521695e-03] [-7.45096942e-03 -4.23176363e-02 5.37794381e-02 ... -9.41145327e-03 7.47719640e-03 2.93759955e-03]] ... [[ 2.44392897e-03 3.80863287e-02 -3.52710336e-02 ... 3.05368863e-02 -1.65281035e-02 -1.02074901e-02] [ 5.44518009e-02 -7.43661001e-02 -1.52761843e-02 ... 4.14377041e-02 -5.85491862e-03 5.40122278e-02] [ 3.84455882e-02 -2.43494976e-02 3.93483303e-02 ... -7.21546710e-02 9.16450024e-02 -3.51136364e-02] ... [-3.24209183e-02 4.84812520e-02 -6.54301792e-02 ... 3.04153766e-02 -2.97235753e-02 -2.47219540e-02] [ 4.31665778e-03 -3.96654978e-02 -1.95750166e-02 ... -2.98860092e-02 4.17515747e-02 -7.04723075e-02] [ 4.13226187e-02 3.40885762e-03 -5.99502819e-04 ... 1.66657921e-02 1.53178498e-02 1.72907058e-02]] [[ 3.87920216e-02 1.49294231e-02 -1.28009031e-03 ... 1.40926456e-02 3.57521512e-02 4.77113649e-02] [-1.33621678e-01 -5.20677771e-03 4.07697540e-03 ... 1.36497453e-01 -2.12741289e-02 1.85700245e-02] [ 5.11527248e-02 -1.09911777e-01 4.30126209e-03 ... 6.66525066e-02 7.18036387e-03 6.20894320e-02] ... [-1.40043488e-02 7.42416158e-02 -6.31919876e-02 ... 2.52725799e-02 6.07672371e-02 4.74250391e-02] [ 4.62615490e-03 9.23782960e-02 2.47264858e-02 ... -2.52309944e-02 8.98899212e-02 3.63173783e-02] [-1.91884842e-02 -9.72555112e-03 7.40960240e-02 ... -3.87328006e-02 1.17180019e-03 3.41995470e-02]] [[-1.99867189e-02 4.65706736e-02 -3.73326391e-02 ... 2.04142313e-02 -2.76568234e-02 -1.08701289e-02] [-7.19584748e-02 -8.10158253e-02 -3.91063206e-02 ... -3.86797707e-03 5.19734109e-03 4.69355062e-02] [ 1.22981139e-01 1.75042264e-02 -6.85604438e-02 ... 2.21125707e-02 -2.91429888e-02 -4.92220372e-03] ... [ 6.86931014e-02 3.91271710e-03 -5.49462661e-02 ... 2.48838030e-02 -5.45648411e-02 -3.06377728e-02] [ 1.66220050e-02 -7.78137296e-02 3.27955782e-02 ... -4.55882363e-02 1.91289186e-02 -1.72505323e-02] [-2.81936787e-02 -6.60450161e-02 3.28499451e-02 ... 5.19846901e-02 -3.75247635e-02 1.45772863e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': [0, 0, 0], 'dilations': [2, 2, 2], 'groups': 3, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1008.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[2, 2, 2]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.7307 (2,1,1,.,.) = -0.2926 (3,1,1,.,.) = -1.2238 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 1.25090575e+00 -3.45239806e+00 1.88094127e+00 ... -2.62342644e+00 -1.11350274e+00 -3.75829637e-01] [-1.32491136e+00 7.10186720e-01 -4.57239568e-01 ... 1.61559057e+00 -2.02444077e-01 -6.85930610e-01] [ 1.61205518e+00 9.11397755e-01 -1.00795043e+00 ... 2.04555893e+00 -6.53253078e-01 1.39913487e+00] ... [ 5.77322841e-01 1.55771053e+00 2.56965923e+00 ... 1.39771199e+00 -6.28191769e-01 -6.82554990e-02] [-1.12633681e+00 -1.71421552e+00 -6.68365359e-01 ... 2.67406464e+00 -2.38955975e+00 -2.01909709e+00] [-3.58145928e+00 -2.65244889e+00 -1.18206000e+00 ... -3.57741714e-02 -1.16540170e+00 -4.80276966e+00]] [[ 5.06408870e-01 -1.68422270e+00 9.06456113e-01 ... 1.45385578e-01 8.11207950e-01 -5.95748484e-01] [-3.18189204e-01 -1.17628658e+00 7.13904738e-01 ... -2.21660852e+00 1.80712163e+00 -2.40617380e-01] [ 1.11494958e+00 2.99795508e+00 1.59704602e+00 ... 7.85227656e-01 -2.24684167e+00 -1.74398482e+00] ... [-6.09069057e-02 -3.67075056e-02 1.72205186e+00 ... 5.53335905e-01 1.67774570e+00 -3.57822403e-02] [-9.71043944e-01 -1.21128380e+00 -2.27061081e+00 ... 2.93404269e+00 1.20379470e-01 1.14128923e+00] [-1.83416152e+00 9.34609324e-02 2.35114002e+00 ... -2.60876751e+00 -2.65015650e+00 2.76780152e+00]] [[ 1.00406671e+00 2.05182135e-01 1.44684160e+00 ... 1.22156394e+00 2.83288527e+00 -2.69655228e+00] [ 2.99922061e+00 -7.14405596e-01 -3.91140163e-01 ... 1.41394496e+00 5.75906324e+00 9.04233634e-01] [-4.60715264e-01 -1.54302642e-01 2.18123293e+00 ... 3.09182143e+00 6.68783963e-01 -1.13116848e+00] ... [ 1.27211027e-02 1.93274677e+00 1.60938287e+00 ... -2.27798119e-01 7.77132511e-01 -8.19271266e-01] [-1.51870251e+00 1.74419177e+00 -1.80975124e-01 ... 2.73467517e+00 -1.64088234e-01 -1.74096680e+00] [-2.10980701e+00 -2.09429026e+00 -1.69718182e+00 ... -1.26546431e+00 -1.34485722e+00 -3.56876373e+00]] ... [[ 4.64030474e-01 1.64592221e-01 -6.99414670e-01 ... -1.59893572e+00 3.02677345e+00 2.41616392e+00] [ 1.38897955e+00 -4.49727595e-01 -9.26410928e-02 ... 6.82004988e-01 -2.56735086e+00 1.44330096e+00] [-1.60245275e+00 3.89840341e+00 -7.15684593e-01 ... 4.22231294e-02 6.16701305e-01 -1.95627451e-01] ... [ 1.05939472e+00 2.22739291e+00 -5.63787341e-01 ... -1.46416199e+00 2.15385342e+00 -2.05042720e+00] [ 1.83954549e+00 -1.50349271e+00 5.04779220e-01 ... 1.28970587e+00 2.20670864e-01 1.13062751e+00] [ 3.81405741e-01 -2.90278643e-01 9.87720490e-01 ... 1.36287892e+00 1.36238635e+00 -1.04645658e+00]] [[-3.64439607e-01 1.44002342e+00 -1.18064657e-01 ... -2.68856287e+00 -1.59563363e+00 4.07161325e-01] [-1.71436578e-01 -1.58349967e+00 1.75067578e-02 ... 1.58543468e+00 -2.69643247e-01 -4.81721580e-01] [-1.60415339e+00 -9.93966579e-01 1.80142581e+00 ... 1.38347046e-02 1.79599369e+00 -1.14799254e-01] ... [ 6.49912119e-01 9.52600896e-01 1.02336478e+00 ... -3.16647649e+00 -1.62745726e+00 5.37378120e+00] [-1.63848412e+00 -1.26288772e+00 2.84059548e+00 ... 6.20652795e-01 1.37725663e+00 -2.55850816e+00] [-2.21396637e+00 -3.36419851e-01 -1.67294323e+00 ... 8.06434512e-01 -6.27097547e-01 2.45726538e+00]] [[ 2.13058090e+00 -1.68592596e+00 3.05899769e-01 ... -2.61237097e+00 3.09982777e+00 1.00391757e+00] [ 3.33821654e+00 3.70483518e-01 8.27186942e-01 ... 5.22745669e-01 -8.17758799e-01 2.32231617e+00] [-1.32971078e-01 -4.61601448e+00 5.77191234e-01 ... -1.54977369e+00 -1.67629325e+00 -3.75093162e-01] ... [-2.75886238e-01 -1.14250255e+00 1.36655498e+00 ... -1.30961573e+00 -1.32703602e+00 2.02998543e+00] [-2.11875081e+00 2.19531822e+00 1.43920577e+00 ... -1.02984285e+00 2.10817051e+00 3.46726942e+00] [-1.61581075e+00 1.24454081e+00 5.88993371e-01 ... 6.50056243e-01 1.40411973e+00 -2.30453396e+00]]] [[[-1.99527010e-01 -3.43720354e-02 1.31652519e-01 ... 1.24669187e-01 1.02413028e-01 -2.98847824e-01] [-3.46373647e-01 5.30914217e-02 -3.72970194e-01 ... 1.67174160e-01 -2.04458967e-01 3.08056846e-02] [-2.98474003e-02 6.61655143e-02 -5.42826891e-01 ... -1.82001755e-01 6.55892119e-02 -5.37915826e-01] ... [ 7.77305216e-02 5.51206619e-02 7.00900137e-01 ... 2.71233499e-01 3.77125263e-01 1.87899526e-02] [ 3.16223264e-01 -2.78846383e-01 1.67054653e-01 ... -9.98288691e-02 3.16352397e-01 -1.55192260e-02] [ 4.02397305e-01 -4.97604281e-01 1.80464014e-01 ... -3.14673394e-01 -3.53888333e-01 4.26026016e-01]] [[-9.24167261e-02 -5.56603193e-01 -8.13351721e-02 ... 1.87341899e-01 4.47005332e-01 -2.42959738e-01] [-4.53088582e-01 -6.04067624e-01 2.92648165e-03 ... -1.61281321e-02 4.30488139e-01 -1.48459569e-01] [ 4.06915009e-01 1.28059551e-01 -1.72244906e-01 ... 2.77278990e-01 9.54096615e-02 -1.03693582e-01] ... [ 1.41390294e-01 -4.74938303e-01 -1.55237794e-01 ... 1.05544426e-01 -2.49241710e-01 -1.28156051e-01] [-8.07045251e-02 -1.03248999e-01 -5.10266647e-02 ... 1.55679718e-01 6.65359676e-01 4.35978621e-01] [-2.56762028e-01 -4.26727198e-02 -3.70506614e-01 ... -2.04013854e-01 -1.57367721e-01 -1.44321308e-01]] [[ 2.02287123e-01 -2.12341040e-01 9.82000455e-02 ... -3.48969363e-02 -6.36902511e-01 2.10670575e-01] [-6.91367447e-01 3.01318944e-01 -3.32011938e-01 ... 2.65831739e-01 2.99983829e-01 -5.52472286e-02] [-3.61937769e-02 1.78946197e-01 1.70717612e-01 ... 4.65378631e-03 7.57278949e-02 -5.21547794e-01] ... [-2.30041519e-01 1.11054800e-01 -8.33186656e-02 ... 5.16502202e-01 1.34847924e-01 1.43483371e-01] [-2.15022027e-01 4.50864613e-01 -3.24094296e-01 ... 6.01407349e-01 -4.01825637e-01 -3.60923618e-01] [-2.11586088e-01 3.63255590e-01 -2.85087675e-01 ... 4.53504741e-01 7.65142068e-02 -8.97813663e-02]] ... [[-2.55485326e-01 -3.23082864e-01 1.65154234e-01 ... -2.07796380e-01 -2.35212177e-01 1.13359220e-01] [ 1.78116605e-01 4.99972999e-02 3.25975835e-01 ... -1.45366073e-01 4.29392219e-01 3.98244411e-01] [ 1.76126435e-01 -2.89867133e-01 -4.76969451e-01 ... 1.06796660e-01 -1.26452520e-01 9.09347385e-02] ... [-4.95829254e-01 -9.88977849e-02 -1.92660809e-01 ... -3.05774391e-01 2.60880738e-01 -1.40365660e-01] [ 2.27320641e-01 -2.54370630e-01 3.09376776e-01 ... -2.57894129e-01 5.72816551e-01 -5.13732255e-01] [-4.73847866e-01 2.55846530e-02 -6.14198484e-02 ... -1.99864596e-01 8.94167185e-01 -1.23079129e-01]] [[-1.96495041e-01 6.35281026e-01 3.10246527e-01 ... -3.06174695e-01 -2.70191073e-01 -1.58884332e-01] [ 3.44280005e-01 1.25476465e-01 -3.95824999e-01 ... 3.51923883e-01 3.14237438e-02 -2.89533865e-02] [ 3.47062200e-01 -7.18515575e-01 8.56845379e-02 ... -3.97488708e-03 -4.17476259e-02 -1.81015044e-01] ... [-2.36587331e-01 1.56243056e-01 -8.07697251e-02 ... -9.74412978e-01 -1.29686072e-01 -1.87757924e-01] [ 1.38987854e-01 -2.81661451e-01 1.32802844e-01 ... -1.45187438e-01 2.14063376e-02 -1.62348285e-01] [-4.69862670e-01 -1.28967449e-01 4.67489399e-02 ... 4.61154759e-01 9.12254527e-02 -2.85187691e-01]] [[-2.13225111e-01 -2.55590468e-03 -2.28453279e-01 ... -1.96332559e-01 6.55265525e-02 8.47231522e-02] [-3.90864968e-01 -4.32316437e-02 -1.94144137e-02 ... -2.33515307e-01 4.95122746e-02 2.82449964e-02] [ 2.89555043e-01 9.52425599e-02 4.41881359e-01 ... -3.31142843e-01 -2.29860514e-01 6.54559493e-01] ... [ 1.07339218e-01 4.18070138e-01 4.11162764e-01 ... -2.42488503e-01 2.00250119e-01 -1.19624950e-01] [-1.71397775e-01 -1.33787096e-01 5.71882166e-03 ... 5.26587546e-01 3.41208935e-01 6.65682033e-02] [-2.09318534e-01 -3.38130653e-01 1.50052607e-01 ... -1.09489486e-01 -4.36935604e-01 5.59711099e-01]]] [[[-1.46964204e+00 1.19669533e+00 -3.61488134e-01 ... 5.19540012e-01 6.15840971e-01 -2.05389905e+00] [-1.23246968e+00 -1.72601435e-02 -3.30151176e+00 ... -1.53803363e-01 -1.83968604e+00 -5.32742143e-02] [ 2.15771049e-02 2.66219378e-01 -4.86379534e-01 ... -3.49714160e-02 5.33976734e-01 -5.48135817e-01] ... [ 8.30416322e-01 -8.10426354e-01 -2.30624533e+00 ... 3.70647728e-01 -1.43698680e+00 5.16501725e-01] [-1.87111169e-01 -4.82066780e-01 3.39568794e-01 ... 4.37817276e-01 -2.41958737e+00 8.15437317e-01] [ 2.46343112e+00 -4.78777349e-01 2.57159543e+00 ... -9.70514894e-01 -9.81735468e-01 -3.31759810e-01]] [[-3.19565296e-01 -6.67339504e-01 2.41145818e-03 ... -1.03414249e+00 1.33137798e+00 -1.79036200e-01] [ 2.06730819e+00 2.53118098e-01 -4.38868076e-01 ... -8.22943628e-01 -4.21241522e-01 -8.93705726e-01] [-4.12096500e-01 -2.71665263e+00 -3.44545865e+00 ... 2.93425536e+00 2.55519390e-01 -9.69943032e-02] ... [ 1.70128465e+00 -8.28823924e-01 7.32939720e-01 ... -3.93531680e-01 1.65441108e+00 -2.09905982e+00] [ 4.20557052e-01 -6.45092666e-01 2.98664594e+00 ... -1.59629226e-01 1.30837142e+00 -1.01885974e+00] [-1.15299439e+00 1.35650730e+00 2.74253190e-01 ... 6.22929096e-01 2.18633115e-01 1.95353186e+00]] [[-1.41452897e+00 4.33910578e-01 1.16084032e-01 ... 1.10453236e+00 4.28716600e-01 -1.88811636e+00] [-1.36105669e+00 -1.00154078e+00 -1.71954560e+00 ... -4.84511912e-01 -1.94361794e+00 -1.59986567e+00] [-1.33794403e+00 -1.02002478e+00 -9.45655525e-01 ... -2.19216251e+00 1.35455894e+00 -1.12669326e-01] ... [-1.46978664e+00 -1.35065711e+00 -1.52603418e-01 ... -5.10162115e-01 2.02847362e+00 -7.53695816e-02] [-1.04926276e+00 -7.05976933e-02 1.27453706e-03 ... -7.84404227e-04 -5.83291829e-01 -2.40572977e+00] [ 1.32573855e+00 1.39055717e+00 -1.06360599e-01 ... 2.52884537e-01 -8.98125470e-01 2.01407504e+00]] ... [[ 1.25836217e+00 1.66637647e+00 9.54699695e-01 ... 6.37907863e-01 -1.22099660e-01 -4.40560102e-01] [-1.99260795e+00 -1.92850277e-01 -3.98777425e-01 ... -1.61069191e+00 1.34195730e-01 1.05081367e+00] [-2.74174482e-01 3.00593376e-01 2.36869001e+00 ... 1.96915722e+00 1.21478879e+00 -2.82423198e-01] ... [-2.83539623e-01 -1.00473814e-01 -7.98629403e-01 ... 2.04119587e+00 7.96326041e-01 -1.45022905e+00] [-3.27139199e-01 1.11028564e+00 7.25032508e-01 ... 7.01453015e-02 -4.66689914e-01 1.88812792e+00] [ 1.87219286e+00 1.82537472e+00 -3.26811343e-01 ... 1.23663104e+00 -8.18782091e-01 -1.15461922e+00]] [[ 2.18449950e+00 -9.73336935e-01 -7.01366782e-01 ... -4.26269412e-01 -1.22021124e-01 7.39347517e-01] [-2.44749737e+00 -1.50475943e+00 -2.61729932e+00 ... 7.16180503e-01 -3.84604979e+00 -1.77677751e+00] [-2.45406199e+00 -1.77860260e+00 1.74016774e+00 ... 9.10565674e-01 1.72033310e+00 -4.02669013e-01] ... [-8.01684558e-01 -1.82877943e-01 6.80637777e-01 ... -1.27184093e+00 -8.34578574e-01 -5.40141881e-01] [ 8.10411990e-01 -1.02935982e+00 -5.54470360e-01 ... 2.70819139e+00 -4.04635519e-01 1.66930449e+00] [ 1.74345386e+00 6.88065290e-01 2.29175121e-01 ... -1.37724352e+00 5.76697588e-01 -2.09070110e+00]] [[-5.88376939e-01 1.30486107e+00 -9.18063700e-01 ... -2.93678093e+00 1.34508777e+00 6.15152955e-01] [-6.72538280e-01 -1.45572960e+00 -3.06665152e-01 ... -8.06681931e-01 8.58011782e-01 -2.38739237e-01] [-1.47901726e+00 1.21993530e+00 5.14984906e-01 ... 9.61585045e-01 1.38978100e+00 3.77352029e-01] ... [-1.10535526e+00 -1.56125534e+00 2.89897132e+00 ... 4.75863703e-02 9.88673806e-01 2.15827376e-01] [ 6.27287507e-01 9.45184052e-01 -4.56552714e-01 ... -7.14213014e-01 1.76579535e-01 2.05554271e+00] [ 1.80074889e-02 -1.54230368e+00 -2.17169309e+00 ... -4.13489914e+00 2.75941879e-01 -1.25153184e+00]]]]]; ov_res: [[[[[ 1.25090575e+00 -3.45239806e+00 1.88094127e+00 ... -2.62342644e+00 -1.11350274e+00 -3.75829637e-01] [-1.32491136e+00 7.10186720e-01 -4.57239568e-01 ... 1.61559057e+00 -2.02444077e-01 -6.85930610e-01] [ 1.61205518e+00 9.11397755e-01 -1.00795043e+00 ... 2.04555893e+00 -6.53253078e-01 1.39913487e+00] ... [ 5.77322841e-01 1.55771053e+00 2.56965923e+00 ... 1.39771199e+00 -6.28191769e-01 -6.82554990e-02] [-1.12633681e+00 -1.71421552e+00 -6.68365359e-01 ... 2.67406464e+00 -2.38955975e+00 -2.01909709e+00] [-3.58145928e+00 -2.65244889e+00 -1.18206000e+00 ... -3.57741714e-02 -1.16540170e+00 -4.80276966e+00]] [[ 5.06408870e-01 -1.68422270e+00 9.06456113e-01 ... 1.45385578e-01 8.11207950e-01 -5.95748484e-01] [-3.18189204e-01 -1.17628658e+00 7.13904738e-01 ... -2.21660852e+00 1.80712163e+00 -2.40617380e-01] [ 1.11494958e+00 2.99795508e+00 1.59704602e+00 ... 7.85227656e-01 -2.24684167e+00 -1.74398482e+00] ... [-6.09069057e-02 -3.67075056e-02 1.72205186e+00 ... 5.53335905e-01 1.67774570e+00 -3.57822403e-02] [-9.71043944e-01 -1.21128380e+00 -2.27061081e+00 ... 2.93404269e+00 1.20379470e-01 1.14128923e+00] [-1.83416152e+00 9.34609324e-02 2.35114002e+00 ... -2.60876751e+00 -2.65015650e+00 2.76780152e+00]] [[ 1.00406671e+00 2.05182135e-01 1.44684160e+00 ... 1.22156394e+00 2.83288527e+00 -2.69655228e+00] [ 2.99922061e+00 -7.14405596e-01 -3.91140163e-01 ... 1.41394496e+00 5.75906324e+00 9.04233634e-01] [-4.60715264e-01 -1.54302642e-01 2.18123293e+00 ... 3.09182143e+00 6.68783963e-01 -1.13116848e+00] ... [ 1.27211027e-02 1.93274677e+00 1.60938287e+00 ... -2.27798119e-01 7.77132511e-01 -8.19271266e-01] [-1.51870251e+00 1.74419177e+00 -1.80975124e-01 ... 2.73467517e+00 -1.64088234e-01 -1.74096680e+00] [-2.10980701e+00 -2.09429026e+00 -1.69718182e+00 ... -1.26546431e+00 -1.34485722e+00 -3.56876373e+00]] ... [[ 4.64030474e-01 1.64592221e-01 -6.99414670e-01 ... -1.59893572e+00 3.02677345e+00 2.41616392e+00] [ 1.38897955e+00 -4.49727595e-01 -9.26410928e-02 ... 6.82004988e-01 -2.56735086e+00 1.44330096e+00] [-1.60245275e+00 3.89840341e+00 -7.15684593e-01 ... 4.22231294e-02 6.16701305e-01 -1.95627451e-01] ... [ 1.05939472e+00 2.22739291e+00 -5.63787341e-01 ... -1.46416199e+00 2.15385342e+00 -2.05042720e+00] [ 1.83954549e+00 -1.50349271e+00 5.04779220e-01 ... 1.28970587e+00 2.20670864e-01 1.13062751e+00] [ 3.81405741e-01 -2.90278643e-01 9.87720490e-01 ... 1.36287892e+00 1.36238635e+00 -1.04645658e+00]] [[-3.64439607e-01 1.44002342e+00 -1.18064657e-01 ... -2.68856287e+00 -1.59563363e+00 4.07161325e-01] [-1.71436578e-01 -1.58349967e+00 1.75067578e-02 ... 1.58543468e+00 -2.69643247e-01 -4.81721580e-01] [-1.60415339e+00 -9.93966579e-01 1.80142581e+00 ... 1.38347046e-02 1.79599369e+00 -1.14799254e-01] ... [ 6.49912119e-01 9.52600896e-01 1.02336478e+00 ... -3.16647649e+00 -1.62745726e+00 5.37378120e+00] [-1.63848412e+00 -1.26288772e+00 2.84059548e+00 ... 6.20652795e-01 1.37725663e+00 -2.55850816e+00] [-2.21396637e+00 -3.36419851e-01 -1.67294323e+00 ... 8.06434512e-01 -6.27097547e-01 2.45726538e+00]] [[ 2.13058090e+00 -1.68592596e+00 3.05899769e-01 ... -2.61237097e+00 3.09982777e+00 1.00391757e+00] [ 3.33821654e+00 3.70483518e-01 8.27186942e-01 ... 5.22745669e-01 -8.17758799e-01 2.32231617e+00] [-1.32971078e-01 -4.61601448e+00 5.77191234e-01 ... -1.54977369e+00 -1.67629325e+00 -3.75093162e-01] ... [-2.75886238e-01 -1.14250255e+00 1.36655498e+00 ... -1.30961573e+00 -1.32703602e+00 2.02998543e+00] [-2.11875081e+00 2.19531822e+00 1.43920577e+00 ... -1.02984285e+00 2.10817051e+00 3.46726942e+00] [-1.61581075e+00 1.24454081e+00 5.88993371e-01 ... 6.50056243e-01 1.40411973e+00 -2.30453396e+00]]] [[[-1.99527010e-01 -3.43720354e-02 1.31652519e-01 ... 1.24669187e-01 1.02413028e-01 -2.98847824e-01] [-3.46373647e-01 5.30914217e-02 -3.72970194e-01 ... 1.67174160e-01 -2.04458967e-01 3.08056846e-02] [-2.98474003e-02 6.61655143e-02 -5.42826891e-01 ... -1.82001755e-01 6.55892119e-02 -5.37915826e-01] ... [ 7.77305216e-02 5.51206619e-02 7.00900137e-01 ... 2.71233499e-01 3.77125263e-01 1.87899526e-02] [ 3.16223264e-01 -2.78846383e-01 1.67054653e-01 ... -9.98288691e-02 3.16352397e-01 -1.55192260e-02] [ 4.02397305e-01 -4.97604281e-01 1.80464014e-01 ... -3.14673394e-01 -3.53888333e-01 4.26026016e-01]] [[-9.24167261e-02 -5.56603193e-01 -8.13351721e-02 ... 1.87341899e-01 4.47005332e-01 -2.42959738e-01] [-4.53088582e-01 -6.04067624e-01 2.92648165e-03 ... -1.61281321e-02 4.30488139e-01 -1.48459569e-01] [ 4.06915009e-01 1.28059551e-01 -1.72244906e-01 ... 2.77278990e-01 9.54096615e-02 -1.03693582e-01] ... [ 1.41390294e-01 -4.74938303e-01 -1.55237794e-01 ... 1.05544426e-01 -2.49241710e-01 -1.28156051e-01] [-8.07045251e-02 -1.03248999e-01 -5.10266647e-02 ... 1.55679718e-01 6.65359676e-01 4.35978621e-01] [-2.56762028e-01 -4.26727198e-02 -3.70506614e-01 ... -2.04013854e-01 -1.57367721e-01 -1.44321308e-01]] [[ 2.02287123e-01 -2.12341040e-01 9.82000455e-02 ... -3.48969363e-02 -6.36902511e-01 2.10670575e-01] [-6.91367447e-01 3.01318944e-01 -3.32011938e-01 ... 2.65831739e-01 2.99983829e-01 -5.52472286e-02] [-3.61937769e-02 1.78946197e-01 1.70717612e-01 ... 4.65378631e-03 7.57278949e-02 -5.21547794e-01] ... [-2.30041519e-01 1.11054800e-01 -8.33186656e-02 ... 5.16502202e-01 1.34847924e-01 1.43483371e-01] [-2.15022027e-01 4.50864613e-01 -3.24094296e-01 ... 6.01407349e-01 -4.01825637e-01 -3.60923618e-01] [-2.11586088e-01 3.63255590e-01 -2.85087675e-01 ... 4.53504741e-01 7.65142068e-02 -8.97813663e-02]] ... [[-2.55485326e-01 -3.23082864e-01 1.65154234e-01 ... -2.07796380e-01 -2.35212177e-01 1.13359220e-01] [ 1.78116605e-01 4.99972999e-02 3.25975835e-01 ... -1.45366073e-01 4.29392219e-01 3.98244411e-01] [ 1.76126435e-01 -2.89867133e-01 -4.76969451e-01 ... 1.06796660e-01 -1.26452520e-01 9.09347385e-02] ... [-4.95829254e-01 -9.88977849e-02 -1.92660809e-01 ... -3.05774391e-01 2.60880738e-01 -1.40365660e-01] [ 2.27320641e-01 -2.54370630e-01 3.09376776e-01 ... -2.57894129e-01 5.72816551e-01 -5.13732255e-01] [-4.73847866e-01 2.55846530e-02 -6.14198484e-02 ... -1.99864596e-01 8.94167185e-01 -1.23079129e-01]] [[-1.96495041e-01 6.35281026e-01 3.10246527e-01 ... -3.06174695e-01 -2.70191073e-01 -1.58884332e-01] [ 3.44280005e-01 1.25476465e-01 -3.95824999e-01 ... 3.51923883e-01 3.14237438e-02 -2.89533865e-02] [ 3.47062200e-01 -7.18515575e-01 8.56845379e-02 ... -3.97488708e-03 -4.17476259e-02 -1.81015044e-01] ... [-2.36587331e-01 1.56243056e-01 -8.07697251e-02 ... -9.74412978e-01 -1.29686072e-01 -1.87757924e-01] [ 1.38987854e-01 -2.81661451e-01 1.32802844e-01 ... -1.45187438e-01 2.14063376e-02 -1.62348285e-01] [-4.69862670e-01 -1.28967449e-01 4.67489399e-02 ... 4.61154759e-01 9.12254527e-02 -2.85187691e-01]] [[-2.13225111e-01 -2.55590468e-03 -2.28453279e-01 ... -1.96332559e-01 6.55265525e-02 8.47231522e-02] [-3.90864968e-01 -4.32316437e-02 -1.94144137e-02 ... -2.33515307e-01 4.95122746e-02 2.82449964e-02] [ 2.89555043e-01 9.52425599e-02 4.41881359e-01 ... -3.31142843e-01 -2.29860514e-01 6.54559493e-01] ... [ 1.07339218e-01 4.18070138e-01 4.11162764e-01 ... -2.42488503e-01 2.00250119e-01 -1.19624950e-01] [-1.71397775e-01 -1.33787096e-01 5.71882166e-03 ... 5.26587546e-01 3.41208935e-01 6.65682033e-02] [-2.09318534e-01 -3.38130653e-01 1.50052607e-01 ... -1.09489486e-01 -4.36935604e-01 5.59711099e-01]]] [[[-1.46964204e+00 1.19669533e+00 -3.61488134e-01 ... 5.19540012e-01 6.15840971e-01 -2.05389905e+00] [-1.23246968e+00 -1.72601435e-02 -3.30151176e+00 ... -1.53803363e-01 -1.83968604e+00 -5.32742143e-02] [ 2.15771049e-02 2.66219378e-01 -4.86379534e-01 ... -3.49714160e-02 5.33976734e-01 -5.48135817e-01] ... [ 8.30416322e-01 -8.10426354e-01 -2.30624533e+00 ... 3.70647728e-01 -1.43698680e+00 5.16501725e-01] [-1.87111169e-01 -4.82066780e-01 3.39568794e-01 ... 4.37817276e-01 -2.41958737e+00 8.15437317e-01] [ 2.46343112e+00 -4.78777349e-01 2.57159543e+00 ... -9.70514894e-01 -9.81735468e-01 -3.31759810e-01]] [[-3.19565296e-01 -6.67339504e-01 2.41145818e-03 ... -1.03414249e+00 1.33137798e+00 -1.79036200e-01] [ 2.06730819e+00 2.53118098e-01 -4.38868076e-01 ... -8.22943628e-01 -4.21241522e-01 -8.93705726e-01] [-4.12096500e-01 -2.71665263e+00 -3.44545865e+00 ... 2.93425536e+00 2.55519390e-01 -9.69943032e-02] ... [ 1.70128465e+00 -8.28823924e-01 7.32939720e-01 ... -3.93531680e-01 1.65441108e+00 -2.09905982e+00] [ 4.20557052e-01 -6.45092666e-01 2.98664594e+00 ... -1.59629226e-01 1.30837142e+00 -1.01885974e+00] [-1.15299439e+00 1.35650730e+00 2.74253190e-01 ... 6.22929096e-01 2.18633115e-01 1.95353186e+00]] [[-1.41452897e+00 4.33910578e-01 1.16084032e-01 ... 1.10453236e+00 4.28716600e-01 -1.88811636e+00] [-1.36105669e+00 -1.00154078e+00 -1.71954560e+00 ... -4.84511912e-01 -1.94361794e+00 -1.59986567e+00] [-1.33794403e+00 -1.02002478e+00 -9.45655525e-01 ... -2.19216251e+00 1.35455894e+00 -1.12669326e-01] ... [-1.46978664e+00 -1.35065711e+00 -1.52603418e-01 ... -5.10162115e-01 2.02847362e+00 -7.53695816e-02] [-1.04926276e+00 -7.05976933e-02 1.27453706e-03 ... -7.84404227e-04 -5.83291829e-01 -2.40572977e+00] [ 1.32573855e+00 1.39055717e+00 -1.06360599e-01 ... 2.52884537e-01 -8.98125470e-01 2.01407504e+00]] ... [[ 1.25836217e+00 1.66637647e+00 9.54699695e-01 ... 6.37907863e-01 -1.22099660e-01 -4.40560102e-01] [-1.99260795e+00 -1.92850277e-01 -3.98777425e-01 ... -1.61069191e+00 1.34195730e-01 1.05081367e+00] [-2.74174482e-01 3.00593376e-01 2.36869001e+00 ... 1.96915722e+00 1.21478879e+00 -2.82423198e-01] ... [-2.83539623e-01 -1.00473814e-01 -7.98629403e-01 ... 2.04119587e+00 7.96326041e-01 -1.45022905e+00] [-3.27139199e-01 1.11028564e+00 7.25032508e-01 ... 7.01453015e-02 -4.66689914e-01 1.88812792e+00] [ 1.87219286e+00 1.82537472e+00 -3.26811343e-01 ... 1.23663104e+00 -8.18782091e-01 -1.15461922e+00]] [[ 2.18449950e+00 -9.73336935e-01 -7.01366782e-01 ... -4.26269412e-01 -1.22021124e-01 7.39347517e-01] [-2.44749737e+00 -1.50475943e+00 -2.61729932e+00 ... 7.16180503e-01 -3.84604979e+00 -1.77677751e+00] [-2.45406199e+00 -1.77860260e+00 1.74016774e+00 ... 9.10565674e-01 1.72033310e+00 -4.02669013e-01] ... [-8.01684558e-01 -1.82877943e-01 6.80637777e-01 ... -1.27184093e+00 -8.34578574e-01 -5.40141881e-01] [ 8.10411990e-01 -1.02935982e+00 -5.54470360e-01 ... 2.70819139e+00 -4.04635519e-01 1.66930449e+00] [ 1.74345386e+00 6.88065290e-01 2.29175121e-01 ... -1.37724352e+00 5.76697588e-01 -2.09070110e+00]] [[-5.88376939e-01 1.30486107e+00 -9.18063700e-01 ... -2.93678093e+00 1.34508777e+00 6.15152955e-01] [-6.72538280e-01 -1.45572960e+00 -3.06665152e-01 ... -8.06681931e-01 8.58011782e-01 -2.38739237e-01] [-1.47901726e+00 1.21993530e+00 5.14984906e-01 ... 9.61585045e-01 1.38978100e+00 3.77352029e-01] ... [-1.10535526e+00 -1.56125534e+00 2.89897132e+00 ... 4.75863703e-02 9.88673806e-01 2.15827376e-01] [ 6.27287507e-01 9.45184052e-01 -4.56552714e-01 ... -7.14213014e-01 1.76579535e-01 2.05554271e+00] [ 1.80074889e-02 -1.54230368e+00 -2.17169309e+00 ... -4.13489914e+00 2.75941879e-01 -1.25153184e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 1, 1], 'bias_shape': [1], 'pads': [1, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1010.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[1, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.6173 (2,1,1,.,.) = 0.5619 (3,1,1,.,.) = -0.9843 [ CPUFloatType{3,1,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 1.3400904 -1.0730318 1.0147448 ... 0.2636985 -2.5386198 -2.5620735 ] [ 1.0596045 -1.1488533 1.0791314 ... -0.7552473 -2.3310182 1.026121 ] [ 1.0750109 0.69527054 -0.23489437 ... -2.103264 1.3799268 -1.2680191 ] ... [-0.7013416 0.0268004 2.302135 ... 0.60701877 2.0889928 0.43691882] [-1.5501381 -0.78713655 -0.04780811 ... 1.3885598 -1.7823173 -0.45159024] [-2.8109183 2.5675368 -1.9782717 ... -2.091314 -0.7332592 -1.3695498 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-1.0259289 -0.53390795 0.6931147 ... 0.8817581 -0.5889135 0.37981743] [-2.1811986 1.2671942 -2.5710142 ... -0.69315386 -0.01725751 -0.2671184 ] [-0.07038292 1.0071334 -0.50935507 ... 0.1915794 0.571904 0.24910615] ... [ 0.5484286 0.6761725 -0.60060495 ... 1.2456874 0.7462054 -0.10671508] [ 4.502967 0.6867013 2.836767 ... -1.7223301 -1.1597462 -2.603922 ] [-0.37082162 -2.2713368 -0.78104633 ... -2.7244797 -1.5476863 1.8417543 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 1.3400904 -1.0730318 1.0147448 ... 0.2636985 -2.5386198 -2.5620735 ] [ 1.0596045 -1.1488533 1.0791314 ... -0.7552473 -2.3310182 1.026121 ] [ 1.0750109 0.69527054 -0.23489437 ... -2.103264 1.3799268 -1.2680191 ] ... [-0.7013416 0.0268004 2.302135 ... 0.60701877 2.0889928 0.43691882] [-1.5501381 -0.78713655 -0.04780811 ... 1.3885598 -1.7823173 -0.45159024] [-2.8109183 2.5675368 -1.9782717 ... -2.091314 -0.7332592 -1.3695498 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-1.0259289 -0.53390795 0.6931147 ... 0.8817581 -0.5889135 0.37981743] [-2.1811986 1.2671942 -2.5710142 ... -0.69315386 -0.01725751 -0.2671184 ] [-0.07038292 1.0071334 -0.50935507 ... 0.1915794 0.571904 0.24910615] ... [ 0.5484286 0.6761725 -0.60060495 ... 1.2456874 0.7462054 -0.10671508] [ 4.502967 0.6867013 2.836767 ... -1.7223301 -1.1597462 -2.603922 ] [-0.37082162 -2.2713368 -0.78104633 ... -2.7244797 -1.5476863 1.8417543 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1012.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.01 * -7.3986 (2,1,1,.,.) = 2.0279 (3,1,1,.,.) = 1.0612 (1,2,1,.,.) = -1.1050 (2,2,1,.,.) = 0.5175 (3,2,1,.,.) = -1.6846 (1,3,1,.,.) = -0.4525 (2,3,1,.,.) = -0.7248 (3,3,1,.,.) = 0.2157 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 2.58957720e+00 1.71236539e+00 6.52948320e-01 ... 2.11463809e+00 -1.65892780e-01 1.03726494e+00] [ 1.02420449e+00 1.26018941e+00 9.71840918e-01 ... -2.76996821e-01 -1.21437800e+00 -1.57563484e+00] [-1.68026435e+00 -1.42315924e+00 7.88345456e-01 ... 2.28970480e+00 5.44526100e-01 -1.10763276e+00] ... [-6.30874455e-01 2.93308854e-01 -2.31088996e-01 ... -2.38491440e+00 1.12579298e+00 7.64711022e-01] [-8.43065977e-01 -4.34550405e-01 -9.18934405e-01 ... -1.23437166e-01 5.20588279e-01 -9.00823295e-01] [ 1.98364556e+00 1.58462420e-01 2.18563652e+00 ... -8.12061191e-01 -1.14767706e+00 6.86049402e-01]] [[ 2.88669586e-01 4.18172032e-01 1.54979765e+00 ... 9.39754069e-01 9.98146087e-02 -1.75927341e+00] [ 2.10459113e-01 2.37398267e-01 4.90081400e-01 ... -8.40116516e-02 -7.74469435e-01 -1.30594182e+00] [-1.33135402e+00 8.70073557e-01 9.24159527e-01 ... -6.96330667e-02 1.65558469e+00 -6.25885308e-01] ... [ 1.17026544e+00 7.53550887e-01 6.26914501e-01 ... -1.68121290e+00 -1.50586319e+00 -1.25439966e+00] [ 1.23093438e+00 2.54833388e+00 -8.06980073e-01 ... -1.35212111e+00 -5.93242347e-01 -1.92918861e+00] [ 5.61977863e-01 -1.46047735e+00 -1.16701806e+00 ... -1.27432966e+00 -8.53046298e-01 3.93040150e-01]] [[ 9.64704990e-01 5.91748834e-01 -2.08833361e+00 ... 1.68350303e+00 1.26007020e-01 3.08339179e-01] [ 1.38339615e+00 2.37855256e-01 1.86861467e+00 ... 1.31399548e+00 -1.67221975e+00 4.46084231e-01] [-1.18307841e+00 -7.96207726e-01 -2.35584890e-03 ... 2.26216412e+00 -1.04615104e+00 1.67286992e+00] ... [-2.10916734e+00 1.25649273e+00 -6.85827076e-01 ... -1.94848076e-01 -8.16535175e-01 2.50794435e+00] [ 3.36391836e-01 7.50150234e-02 1.62661815e+00 ... 4.97993112e-01 -8.20363343e-01 2.13028121e+00] [ 1.45785940e+00 -3.23886901e-01 1.81931281e+00 ... 1.29322577e+00 -1.63291469e-02 1.39243174e+00]] [[ 6.65925026e-01 -2.52821326e+00 -2.13179898e+00 ... -2.46273071e-01 2.07071647e-01 -5.80961704e-01] [ 2.13370037e+00 6.14581287e-01 6.40982151e-01 ... 8.39006066e-01 -1.25269496e+00 2.62389183e+00] [ 7.34236956e-01 1.68756938e+00 8.01346779e-01 ... -2.87492275e-01 4.73850489e-01 6.45047605e-01] ... [ 3.92955504e-02 2.30321243e-01 2.03651130e-01 ... 2.25747190e-02 3.87463927e-01 -1.56436312e+00] [-2.55332708e-01 6.95577919e-01 -1.51312876e+00 ... -2.12180465e-01 2.30966568e+00 9.20100391e-01] [-1.22818077e+00 1.70854002e-01 -1.89855409e+00 ... -1.56865299e+00 -1.55841267e+00 9.47027132e-02]] [[-2.52960110e+00 -2.25822031e-02 2.96458989e-01 ... 1.78982306e+00 -1.41474140e+00 1.42012310e+00] [ 3.62109840e-01 -1.24006951e+00 -1.67261100e+00 ... 9.22189653e-01 2.25703299e-01 -7.26173282e-01] [ 9.26846504e-01 2.25705433e+00 1.38946271e+00 ... 1.08930194e+00 -5.16361147e-02 -1.22849524e+00] ... [ 4.66293097e-01 -9.97215331e-01 1.26094353e+00 ... -3.55387866e-01 2.50213016e-02 3.48460704e-01] [-4.86413032e-01 1.38427138e+00 2.00808191e+00 ... -2.97372627e+00 -7.13446259e-01 -7.26683855e-01] [-1.10671258e+00 -5.59831560e-01 -6.33730650e-01 ... 5.09136379e-01 1.39811194e+00 9.63844359e-01]]] [[[ 1.81560087e+00 -3.07384133e+00 6.54065371e-01 ... 1.80507255e+00 1.22451890e+00 -9.63284731e-01] [ 1.30423546e-01 7.72237182e-01 -3.38291883e+00 ... 4.56690460e-01 1.39264154e+00 1.51197100e+00] [-3.51976842e-01 -2.36292100e+00 1.83603907e+00 ... -3.59150261e-01 1.47933567e+00 -1.13279417e-01] ... [ 1.72140706e+00 3.17384434e+00 -3.81064922e-01 ... -1.43802786e+00 -3.11185503e+00 4.63329852e-02] [ 2.34769130e+00 1.16898882e+00 -3.30113602e+00 ... 5.03719139e+00 -3.45038867e+00 -1.08605933e+00] [ 1.58701921e+00 3.46370053e+00 7.89138854e-01 ... -1.55635345e+00 -2.20968652e+00 1.55907369e+00]] [[-3.60265732e-01 -4.15527773e+00 -7.21127808e-01 ... -1.39490306e+00 3.46118331e+00 5.60974956e-01] [ 4.37092751e-01 2.77864766e+00 -3.47636521e-01 ... 5.60541630e-01 1.49820173e+00 1.14903804e-02] [ 4.49223787e-01 -2.82276511e+00 2.01079226e+00 ... 2.46656752e+00 -2.93107438e+00 1.46306622e+00] ... [ 1.37706220e-01 2.42046952e+00 -4.01427418e-01 ... 2.39957070e+00 3.30128789e+00 3.77533460e+00] [-1.54169118e+00 -9.23272908e-01 -5.88649988e-01 ... 3.30307531e+00 -1.30303407e+00 1.89540827e+00] [ 1.58871815e-01 1.03409529e-01 2.36344337e+00 ... 1.21841168e+00 -1.03456211e+00 2.49551034e+00]] [[ 1.76739061e+00 2.57361919e-01 -5.74996471e-01 ... 2.70258927e+00 2.03890491e+00 -2.76019454e+00] [-1.02230632e+00 8.48417580e-01 -1.38808823e+00 ... -6.60618901e-01 3.90146494e+00 1.18387246e+00] [ 2.90888047e+00 3.81215119e+00 -6.36896372e-01 ... -2.18222332e+00 -7.88542747e-01 -8.73379469e-01] ... [ 1.74783742e+00 3.60349357e-01 -2.06737208e+00 ... -6.05501592e-01 4.51786518e+00 1.55421591e+00] [-2.68101275e-01 -1.33961821e+00 -7.47864962e-01 ... -8.48013580e-01 -3.36119246e+00 -3.76520562e+00] [ 2.49089718e+00 -3.75055403e-01 -5.94415128e-01 ... -1.47900283e+00 -4.79207230e+00 -2.07373798e-01]] [[-7.67835021e-01 2.68751407e+00 -8.71665657e-01 ... 2.25480184e-01 -2.13937306e+00 4.02020979e+00] [ 4.35593367e+00 -2.96167135e-01 -4.59634304e-01 ... -6.63481727e-02 -2.18759131e+00 2.61111021e+00] [ 2.33178449e+00 -3.39929938e-01 2.25461102e+00 ... -5.74624240e-01 -1.85060918e+00 -1.17129254e+00] ... [ 6.11028552e-01 9.13322091e-01 1.98505843e+00 ... -2.06169558e+00 3.72288132e+00 -6.36724770e-01] [-8.91442358e-01 -2.18603447e-01 2.71705675e+00 ... -2.01253906e-01 1.97322339e-01 -1.88081074e+00] [-8.89356434e-01 -8.43466282e-01 -2.32065392e+00 ... 8.53785723e-02 4.00511074e+00 1.40490597e-02]] [[-1.93826035e-01 4.83642183e-02 3.74043179e+00 ... -5.57999563e+00 3.11577857e-01 2.89660382e+00] [-2.39219046e+00 5.95835924e-01 -6.50890493e+00 ... -1.41255474e+00 2.56736994e+00 -1.06520429e-01] [ 3.02719623e-01 -2.09484458e+00 1.10516405e+00 ... 1.52348161e+00 1.60894811e+00 1.78996038e+00] ... [ 6.84617102e-01 -4.78147194e-02 1.17037475e+00 ... 3.65967226e+00 -1.81160903e+00 -3.05288577e+00] [-1.41308141e+00 2.75298476e+00 -3.31343317e+00 ... 8.12606037e-01 4.09099913e+00 -2.35299706e+00] [-1.85688698e+00 -4.12336498e-01 -3.92059374e+00 ... 1.64694405e+00 -5.59366107e-01 -1.01040490e-01]]] [[[ 1.36326647e+00 -1.50495049e-04 1.64642715e+00 ... 3.54477859e+00 1.11104167e+00 2.06782293e+00] [ 1.56543767e+00 2.16667366e+00 -3.86860251e-01 ... -3.25785249e-01 -7.97126651e-01 -9.64064121e-01] [-1.60064745e+00 -3.95537472e+00 2.03332567e+00 ... 4.07360840e+00 -2.74254775e+00 -4.18664843e-01] ... [-1.51066899e+00 1.54719162e+00 8.88215125e-01 ... -4.08296251e+00 4.62638825e-01 5.60815871e-01] [-2.01914239e+00 -2.33247232e+00 -1.87839794e+00 ... 6.68725818e-02 -2.17441893e+00 -1.40586984e+00] [ 2.20148087e+00 2.04147243e+00 5.13234138e+00 ... -1.84177184e+00 -1.55956018e+00 1.03807926e+00]] [[-1.54347467e+00 -8.11427295e-01 3.31343174e+00 ... 5.93246102e-01 -3.44324380e-01 -1.06514335e+00] [ 7.00556695e-01 2.12742352e+00 1.33909017e-01 ... 1.74400663e+00 4.86407965e-01 -1.07426500e+00] [-3.51031494e+00 -2.85166335e+00 5.91174513e-02 ... 1.56089544e+00 2.56169653e+00 1.37257218e+00] ... [ 2.42609501e+00 -8.51163745e-01 -5.01108885e-01 ... -3.77003998e-01 -2.49186873e+00 -7.93065131e-02] [ 9.66567039e-01 3.38485003e+00 -5.75123787e-01 ... -1.69428480e+00 -1.70049274e+00 -2.48305202e+00] [ 1.20021224e+00 -2.53933758e-01 -9.66970563e-01 ... -1.50475252e+00 1.92487404e-01 8.40519190e-01]] [[ 1.25939190e+00 1.29938745e+00 -2.31361151e+00 ... 3.70419621e+00 9.29140806e-01 -5.34954667e-01] [ 3.92763942e-01 -1.60894200e-01 3.06029582e+00 ... 1.05145514e+00 1.24640393e+00 7.32169211e-01] [-5.01803994e-01 6.75713956e-01 7.32481062e-01 ... 9.68573868e-01 1.32150754e-01 4.19913721e+00] ... [-2.46556807e+00 6.48587406e-01 -3.73064488e-01 ... -3.66028637e-01 -1.34162605e+00 4.28882647e+00] [-3.88924032e-01 2.32383990e+00 9.25356448e-01 ... 1.36478484e+00 -2.31924939e+00 2.47679782e+00] [ 2.60839272e+00 1.42199147e+00 2.58650184e+00 ... -1.38918996e-01 -1.15829229e+00 6.50236964e-01]] [[ 2.99105334e+00 -2.80850840e+00 -3.52229738e+00 ... 2.96356416e+00 -3.23174143e+00 3.32364470e-01] [ 2.84377789e+00 1.03774357e+00 4.80338894e-02 ... 1.32519126e+00 -3.00495410e+00 3.24999237e+00] [ 6.76498115e-01 6.38291016e-02 3.05307460e+00 ... -1.60912621e+00 -3.20717037e-01 1.60603178e+00] ... [ 2.96876997e-01 1.95249915e-01 -1.49260890e+00 ... -3.21793771e+00 2.09857702e+00 -3.94084287e+00] [-2.38721108e+00 1.09966671e+00 1.02940178e+00 ... -2.30758503e-01 3.81177115e+00 -1.16423666e+00] [ 8.43889490e-02 -1.22555757e+00 -3.03534889e+00 ... -1.37741423e+00 8.38019669e-01 -2.22322226e+00]] [[-1.03991008e+00 -9.66355503e-01 2.86584616e+00 ... 1.00928390e+00 -1.26487243e+00 5.85098791e+00] [ 1.85722196e+00 -2.23717046e+00 -4.23636198e+00 ... 1.12268770e+00 3.61444831e-01 -2.25000119e+00] [ 6.50057435e-01 8.60302895e-02 2.18200016e+00 ... 1.77158606e+00 -4.64547545e-01 -3.66358185e+00] ... [ 1.35966742e+00 -6.71615183e-01 6.39420867e-01 ... 2.18067169e+00 1.16057599e+00 7.97927797e-01] [-2.01330376e+00 4.93020391e+00 1.93773198e+00 ... -3.22896504e+00 1.61311436e+00 -9.38106179e-01] [-1.48798883e+00 -2.00095490e-01 -1.57506275e+00 ... 1.66705549e+00 2.36106038e+00 5.88373423e-01]]]]]; ov_res: [[[[[ 2.58957720e+00 1.71236539e+00 6.52948320e-01 ... 2.11463809e+00 -1.65892780e-01 1.03726494e+00] [ 1.02420449e+00 1.26018941e+00 9.71840918e-01 ... -2.76996821e-01 -1.21437800e+00 -1.57563484e+00] [-1.68026435e+00 -1.42315924e+00 7.88345456e-01 ... 2.28970480e+00 5.44526100e-01 -1.10763276e+00] ... [-6.30874455e-01 2.93308854e-01 -2.31088996e-01 ... -2.38491440e+00 1.12579298e+00 7.64711022e-01] [-8.43065977e-01 -4.34550405e-01 -9.18934405e-01 ... -1.23437166e-01 5.20588279e-01 -9.00823295e-01] [ 1.98364556e+00 1.58462420e-01 2.18563652e+00 ... -8.12061191e-01 -1.14767706e+00 6.86049402e-01]] [[ 2.88669586e-01 4.18172032e-01 1.54979765e+00 ... 9.39754069e-01 9.98146087e-02 -1.75927341e+00] [ 2.10459113e-01 2.37398267e-01 4.90081400e-01 ... -8.40116516e-02 -7.74469435e-01 -1.30594182e+00] [-1.33135402e+00 8.70073557e-01 9.24159527e-01 ... -6.96330667e-02 1.65558469e+00 -6.25885308e-01] ... [ 1.17026544e+00 7.53550887e-01 6.26914501e-01 ... -1.68121290e+00 -1.50586319e+00 -1.25439966e+00] [ 1.23093438e+00 2.54833388e+00 -8.06980073e-01 ... -1.35212111e+00 -5.93242347e-01 -1.92918861e+00] [ 5.61977863e-01 -1.46047735e+00 -1.16701806e+00 ... -1.27432966e+00 -8.53046298e-01 3.93040150e-01]] [[ 9.64704990e-01 5.91748834e-01 -2.08833361e+00 ... 1.68350303e+00 1.26007020e-01 3.08339179e-01] [ 1.38339615e+00 2.37855256e-01 1.86861467e+00 ... 1.31399548e+00 -1.67221975e+00 4.46084231e-01] [-1.18307841e+00 -7.96207726e-01 -2.35584890e-03 ... 2.26216412e+00 -1.04615104e+00 1.67286992e+00] ... [-2.10916734e+00 1.25649273e+00 -6.85827076e-01 ... -1.94848076e-01 -8.16535175e-01 2.50794435e+00] [ 3.36391836e-01 7.50150234e-02 1.62661815e+00 ... 4.97993112e-01 -8.20363343e-01 2.13028121e+00] [ 1.45785940e+00 -3.23886901e-01 1.81931281e+00 ... 1.29322577e+00 -1.63291469e-02 1.39243174e+00]] [[ 6.65925026e-01 -2.52821326e+00 -2.13179898e+00 ... -2.46273071e-01 2.07071647e-01 -5.80961704e-01] [ 2.13370037e+00 6.14581287e-01 6.40982151e-01 ... 8.39006066e-01 -1.25269496e+00 2.62389183e+00] [ 7.34236956e-01 1.68756938e+00 8.01346779e-01 ... -2.87492275e-01 4.73850489e-01 6.45047605e-01] ... [ 3.92955504e-02 2.30321243e-01 2.03651130e-01 ... 2.25747190e-02 3.87463927e-01 -1.56436312e+00] [-2.55332708e-01 6.95577919e-01 -1.51312876e+00 ... -2.12180465e-01 2.30966568e+00 9.20100391e-01] [-1.22818077e+00 1.70854002e-01 -1.89855409e+00 ... -1.56865299e+00 -1.55841267e+00 9.47027132e-02]] [[-2.52960110e+00 -2.25822031e-02 2.96458989e-01 ... 1.78982306e+00 -1.41474140e+00 1.42012310e+00] [ 3.62109840e-01 -1.24006951e+00 -1.67261100e+00 ... 9.22189653e-01 2.25703299e-01 -7.26173282e-01] [ 9.26846504e-01 2.25705433e+00 1.38946271e+00 ... 1.08930194e+00 -5.16361147e-02 -1.22849524e+00] ... [ 4.66293097e-01 -9.97215331e-01 1.26094353e+00 ... -3.55387866e-01 2.50213016e-02 3.48460704e-01] [-4.86413032e-01 1.38427138e+00 2.00808191e+00 ... -2.97372627e+00 -7.13446259e-01 -7.26683855e-01] [-1.10671258e+00 -5.59831560e-01 -6.33730650e-01 ... 5.09136379e-01 1.39811194e+00 9.63844359e-01]]] [[[ 1.81560087e+00 -3.07384133e+00 6.54065371e-01 ... 1.80507255e+00 1.22451890e+00 -9.63284731e-01] [ 1.30423546e-01 7.72237182e-01 -3.38291883e+00 ... 4.56690460e-01 1.39264154e+00 1.51197100e+00] [-3.51976842e-01 -2.36292100e+00 1.83603907e+00 ... -3.59150261e-01 1.47933567e+00 -1.13279417e-01] ... [ 1.72140706e+00 3.17384434e+00 -3.81064922e-01 ... -1.43802786e+00 -3.11185503e+00 4.63329852e-02] [ 2.34769130e+00 1.16898882e+00 -3.30113602e+00 ... 5.03719139e+00 -3.45038867e+00 -1.08605933e+00] [ 1.58701921e+00 3.46370053e+00 7.89138854e-01 ... -1.55635345e+00 -2.20968652e+00 1.55907369e+00]] [[-3.60265732e-01 -4.15527773e+00 -7.21127808e-01 ... -1.39490306e+00 3.46118331e+00 5.60974956e-01] [ 4.37092751e-01 2.77864766e+00 -3.47636521e-01 ... 5.60541630e-01 1.49820173e+00 1.14903804e-02] [ 4.49223787e-01 -2.82276511e+00 2.01079226e+00 ... 2.46656752e+00 -2.93107438e+00 1.46306622e+00] ... [ 1.37706220e-01 2.42046952e+00 -4.01427418e-01 ... 2.39957070e+00 3.30128789e+00 3.77533460e+00] [-1.54169118e+00 -9.23272908e-01 -5.88649988e-01 ... 3.30307531e+00 -1.30303407e+00 1.89540827e+00] [ 1.58871815e-01 1.03409529e-01 2.36344337e+00 ... 1.21841168e+00 -1.03456211e+00 2.49551034e+00]] [[ 1.76739061e+00 2.57361919e-01 -5.74996471e-01 ... 2.70258927e+00 2.03890491e+00 -2.76019454e+00] [-1.02230632e+00 8.48417580e-01 -1.38808823e+00 ... -6.60618901e-01 3.90146494e+00 1.18387246e+00] [ 2.90888047e+00 3.81215119e+00 -6.36896372e-01 ... -2.18222332e+00 -7.88542747e-01 -8.73379469e-01] ... [ 1.74783742e+00 3.60349357e-01 -2.06737208e+00 ... -6.05501592e-01 4.51786518e+00 1.55421591e+00] [-2.68101275e-01 -1.33961821e+00 -7.47864962e-01 ... -8.48013580e-01 -3.36119246e+00 -3.76520562e+00] [ 2.49089718e+00 -3.75055403e-01 -5.94415128e-01 ... -1.47900283e+00 -4.79207230e+00 -2.07373798e-01]] [[-7.67835021e-01 2.68751407e+00 -8.71665657e-01 ... 2.25480184e-01 -2.13937306e+00 4.02020979e+00] [ 4.35593367e+00 -2.96167135e-01 -4.59634304e-01 ... -6.63481727e-02 -2.18759131e+00 2.61111021e+00] [ 2.33178449e+00 -3.39929938e-01 2.25461102e+00 ... -5.74624240e-01 -1.85060918e+00 -1.17129254e+00] ... [ 6.11028552e-01 9.13322091e-01 1.98505843e+00 ... -2.06169558e+00 3.72288132e+00 -6.36724770e-01] [-8.91442358e-01 -2.18603447e-01 2.71705675e+00 ... -2.01253906e-01 1.97322339e-01 -1.88081074e+00] [-8.89356434e-01 -8.43466282e-01 -2.32065392e+00 ... 8.53785723e-02 4.00511074e+00 1.40490597e-02]] [[-1.93826035e-01 4.83642183e-02 3.74043179e+00 ... -5.57999563e+00 3.11577857e-01 2.89660382e+00] [-2.39219046e+00 5.95835924e-01 -6.50890493e+00 ... -1.41255474e+00 2.56736994e+00 -1.06520429e-01] [ 3.02719623e-01 -2.09484458e+00 1.10516405e+00 ... 1.52348161e+00 1.60894811e+00 1.78996038e+00] ... [ 6.84617102e-01 -4.78147194e-02 1.17037475e+00 ... 3.65967226e+00 -1.81160903e+00 -3.05288577e+00] [-1.41308141e+00 2.75298476e+00 -3.31343317e+00 ... 8.12606037e-01 4.09099913e+00 -2.35299706e+00] [-1.85688698e+00 -4.12336498e-01 -3.92059374e+00 ... 1.64694405e+00 -5.59366107e-01 -1.01040490e-01]]] [[[ 1.36326647e+00 -1.50495049e-04 1.64642715e+00 ... 3.54477859e+00 1.11104167e+00 2.06782293e+00] [ 1.56543767e+00 2.16667366e+00 -3.86860251e-01 ... -3.25785249e-01 -7.97126651e-01 -9.64064121e-01] [-1.60064745e+00 -3.95537472e+00 2.03332567e+00 ... 4.07360840e+00 -2.74254775e+00 -4.18664843e-01] ... [-1.51066899e+00 1.54719162e+00 8.88215125e-01 ... -4.08296251e+00 4.62638825e-01 5.60815871e-01] [-2.01914239e+00 -2.33247232e+00 -1.87839794e+00 ... 6.68725818e-02 -2.17441893e+00 -1.40586984e+00] [ 2.20148087e+00 2.04147243e+00 5.13234138e+00 ... -1.84177184e+00 -1.55956018e+00 1.03807926e+00]] [[-1.54347467e+00 -8.11427295e-01 3.31343174e+00 ... 5.93246102e-01 -3.44324380e-01 -1.06514335e+00] [ 7.00556695e-01 2.12742352e+00 1.33909017e-01 ... 1.74400663e+00 4.86407965e-01 -1.07426500e+00] [-3.51031494e+00 -2.85166335e+00 5.91174513e-02 ... 1.56089544e+00 2.56169653e+00 1.37257218e+00] ... [ 2.42609501e+00 -8.51163745e-01 -5.01108885e-01 ... -3.77003998e-01 -2.49186873e+00 -7.93065131e-02] [ 9.66567039e-01 3.38485003e+00 -5.75123787e-01 ... -1.69428480e+00 -1.70049274e+00 -2.48305202e+00] [ 1.20021224e+00 -2.53933758e-01 -9.66970563e-01 ... -1.50475252e+00 1.92487404e-01 8.40519190e-01]] [[ 1.25939190e+00 1.29938745e+00 -2.31361151e+00 ... 3.70419621e+00 9.29140806e-01 -5.34954667e-01] [ 3.92763942e-01 -1.60894200e-01 3.06029582e+00 ... 1.05145514e+00 1.24640393e+00 7.32169211e-01] [-5.01803994e-01 6.75713956e-01 7.32481062e-01 ... 9.68573868e-01 1.32150754e-01 4.19913721e+00] ... [-2.46556807e+00 6.48587406e-01 -3.73064488e-01 ... -3.66028637e-01 -1.34162605e+00 4.28882647e+00] [-3.88924032e-01 2.32383990e+00 9.25356448e-01 ... 1.36478484e+00 -2.31924939e+00 2.47679782e+00] [ 2.60839272e+00 1.42199147e+00 2.58650184e+00 ... -1.38918996e-01 -1.15829229e+00 6.50236964e-01]] [[ 2.99105334e+00 -2.80850840e+00 -3.52229738e+00 ... 2.96356416e+00 -3.23174143e+00 3.32364470e-01] [ 2.84377789e+00 1.03774357e+00 4.80338894e-02 ... 1.32519126e+00 -3.00495410e+00 3.24999237e+00] [ 6.76498115e-01 6.38291016e-02 3.05307460e+00 ... -1.60912621e+00 -3.20717037e-01 1.60603178e+00] ... [ 2.96876997e-01 1.95249915e-01 -1.49260890e+00 ... -3.21793771e+00 2.09857702e+00 -3.94084287e+00] [-2.38721108e+00 1.09966671e+00 1.02940178e+00 ... -2.30758503e-01 3.81177115e+00 -1.16423666e+00] [ 8.43889490e-02 -1.22555757e+00 -3.03534889e+00 ... -1.37741423e+00 8.38019669e-01 -2.22322226e+00]] [[-1.03991008e+00 -9.66355503e-01 2.86584616e+00 ... 1.00928390e+00 -1.26487243e+00 5.85098791e+00] [ 1.85722196e+00 -2.23717046e+00 -4.23636198e+00 ... 1.12268770e+00 3.61444831e-01 -2.25000119e+00] [ 6.50057435e-01 8.60302895e-02 2.18200016e+00 ... 1.77158606e+00 -4.64547545e-01 -3.66358185e+00] ... [ 1.35966742e+00 -6.71615183e-01 6.39420867e-01 ... 2.18067169e+00 1.16057599e+00 7.97927797e-01] [-2.01330376e+00 4.93020391e+00 1.93773198e+00 ... -3.22896504e+00 1.61311436e+00 -9.38106179e-01] [-1.48798883e+00 -2.00095490e-01 -1.57506275e+00 ... 1.66705549e+00 2.36106038e+00 5.88373423e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 0] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1014.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.1665 (2,1,1,.,.) = 0.2605 (3,1,1,.,.) = -0.4708 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[-0.96541685 0. 1.478121 ... 1.5001916 0. -0.49135 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.48919937 0. 2.6191053 ... 0.371917 0. 0.5781815 ] ... [-0.67196727 0. -0.53087676 ... -1.1150653 0. 0.32429343] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.4196469 0. -1.5892092 ... 0.46615002 0. -1.042997 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0.582045 0. -1.7618695 ... -2.2399979 0. -1.3204108 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.99813145 0. 2.3465545 ... 0.61711556 0. 1.4755105 ] ... [ 0.33754534 0. 1.1823934 ... -0.16564229 0. 0.3607981 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.36494997 0. -0.32833868 ... -1.6520898 0. -1.758356 ]] ... [[-1.394787 0. 1.3284893 ... -0.9466721 0. 0.23481244] [ 0. 0. 0. ... 0. 0. 0. ] [-0.05513497 0. 1.1691855 ... 0.7096533 0. 1.3751864 ] ... [-0.72579193 0. 0.94044286 ... 1.3820853 0. -1.5463301 ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.6229932 0. -1.9434805 ... 0.9319588 0. 0.2965187 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 2.1281154 0. 0.11672003 ... -0.5814517 0. -0.96780545] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.3840527 0. -1.4933063 ... 0.73525065 0. -2.0617275 ] ... [-0.68564653 0. 0.10719005 ... -0.820063 0. -0.68245244] [ 0. 0. 0. ... 0. 0. 0. ] [-0.9872975 0. -2.9492214 ... 0.01312101 0. 0.73052484]]]]]; ov_res: [[[[[-0.96541685 0. 1.478121 ... 1.5001916 0. -0.49135 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.48919937 0. 2.6191053 ... 0.371917 0. 0.5781815 ] ... [-0.67196727 0. -0.53087676 ... -1.1150653 0. 0.32429343] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.4196469 0. -1.5892092 ... 0.46615002 0. -1.042997 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0.582045 0. -1.7618695 ... -2.2399979 0. -1.3204108 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.99813145 0. 2.3465545 ... 0.61711556 0. 1.4755105 ] ... [ 0.33754534 0. 1.1823934 ... -0.16564229 0. 0.3607981 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.36494997 0. -0.32833868 ... -1.6520898 0. -1.758356 ]] ... [[-1.394787 0. 1.3284893 ... -0.9466721 0. 0.23481244] [ 0. 0. 0. ... 0. 0. 0. ] [-0.05513497 0. 1.1691855 ... 0.7096533 0. 1.3751864 ] ... [-0.72579193 0. 0.94044286 ... 1.3820853 0. -1.5463301 ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.6229932 0. -1.9434805 ... 0.9319588 0. 0.2965187 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 2.1281154 0. 0.11672003 ... -0.5814517 0. -0.96780545] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.3840527 0. -1.4933063 ... 0.73525065 0. -2.0617275 ] ... [-0.68564653 0. 0.10719005 ... -0.820063 0. -0.68245244] [ 0. 0. 0. ... 0. 0. 0. ] [-0.9872975 0. -2.9492214 ... 0.01312101 0. 0.73052484]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 2, 2], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1016.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.9957 (2,1,1,.,.) = 0.5809 (3,1,1,.,.) = -0.4832 (1,2,1,.,.) = -1.0819 (2,2,1,.,.) = 0.1258 (3,2,1,.,.) = -1.2222 (1,3,1,.,.) = 0.8219 (2,3,1,.,.) = 0.3249 (3,3,1,.,.) = 0.7106 [ CPUFloatType{3,3,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 9.1353053e-01 -2.6901848e+00 1.7841444e+00 5.7702327e-01 5.0719726e-01] [-1.2891350e+00 3.0771778e+00 1.9215153e+00 -1.3120345e+00 -3.3776627e+00] [ 9.3450695e-01 1.7936667e+00 -1.2126445e+00 5.1523101e-01 -3.0703611e+00] [-1.3032579e+00 8.8803107e-01 1.5765996e-01 1.6429558e+00 -5.2103168e-01] [-3.9444523e+00 -6.2270647e-01 1.2849981e+00 5.7086086e-01 -5.7399523e-01]] [[-1.1748374e+00 -1.8733518e-01 -4.1397807e-01 4.0629545e-01 2.4085932e+00] [ 4.3988366e-02 -1.2304560e+00 9.4845659e-01 3.4697434e-01 9.4244242e-01] [-4.5351166e-01 2.6415069e+00 4.3473297e-01 1.3524176e+00 5.2132475e-01] [ 5.6474978e-01 1.5193895e-01 1.6812540e+00 5.8522022e-01 -7.6771820e-01] [ 3.8028058e-01 4.0260792e+00 -1.3971033e+00 2.2382369e+00 3.7075892e-01]] [[-2.3108151e+00 7.7668393e-01 4.7527081e-01 3.5336763e-01 -1.2799026e+00] [ 5.9730005e-01 3.4413065e-03 6.4956498e-01 1.0165814e+00 -6.5059048e-01] [ 3.2674141e+00 5.4034173e-01 -3.9237836e-01 -2.3375294e-01 -2.2077005e+00] [ 4.6752220e-01 1.7176911e+00 -4.7065794e-01 -2.2195077e+00 3.1238633e-01] [ 2.6390736e+00 5.3166920e-01 1.0506392e+00 -5.5014592e-01 1.4424353e+00]] [[ 2.9552201e-02 2.7544634e+00 -1.7248785e+00 -1.3187687e+00 -1.5924355e-01] [ 1.8428320e+00 2.4859320e-01 1.5344149e+00 5.5700511e-02 -8.6010259e-01] [ 1.4863013e+00 2.5139098e+00 1.4540170e+00 -2.5476757e-01 -1.8372730e+00] [-1.1112459e+00 9.6910846e-01 3.0837841e+00 -2.6265070e+00 -1.4429014e+00] [ 1.2485694e+00 1.8665712e+00 -1.7536151e+00 -9.9056309e-01 1.8949176e+00]] [[ 6.1319478e-02 1.1408831e+00 2.9723961e+00 3.7511596e-01 -1.3009927e+00] [-9.3201202e-01 -2.9250295e+00 -1.1548207e+00 -5.7529479e-01 1.0887601e-01] [-2.4447355e+00 -1.0020986e+00 4.9862266e+00 2.6845140e+00 -6.7682713e-01] [ 1.8614802e+00 3.7921840e-01 1.1484895e+00 -1.3054434e+00 -4.6660523e+00] [-1.9645796e+00 -1.2887354e+00 7.1364152e-01 -9.7423047e-01 4.4759858e-02]]] [[[ 2.7102786e-01 -9.5921487e-01 4.3009171e-01 2.6022598e-01 4.8224196e-01] [-7.7216202e-01 6.8187398e-01 7.5468463e-01 -1.3483301e+00 -1.1295794e+00] [ 7.2660506e-02 -2.1763361e-01 -5.1058918e-01 -2.4481888e-01 -3.6237079e-01] [-1.3298326e+00 8.8893515e-01 -9.0447024e-02 5.2983981e-01 -3.1480277e-01] [ 1.8324560e-03 7.4755454e-01 3.0160743e-01 4.7371458e-02 1.3538757e-02]] [[-7.0944643e-01 -1.9599345e-01 -4.1770759e-01 6.6092992e-01 4.2381889e-01] [ 2.5786215e-01 2.4373464e-01 2.4372725e-01 1.8644696e-01 2.1967466e-01] [-1.7817928e-01 4.6695629e-01 6.1769289e-01 3.1141892e-01 2.4187635e-01] [-4.6418411e-01 -7.4076939e-01 4.4901723e-01 8.2119621e-02 -1.0687268e+00] [-4.4244720e-04 1.3665816e+00 -5.2804792e-01 4.8911104e-01 4.2031986e-01]] [[-2.2971831e-01 1.9915386e+00 1.3882397e-01 7.4993837e-01 -4.7121218e-01] [-6.5652215e-01 4.6960613e-01 -4.4676565e-02 7.0974547e-01 -1.0079970e+00] [ 1.2540542e+00 1.7691778e-02 -6.5179741e-01 4.9235097e-01 2.5087166e-01] [ 1.9787166e-01 8.0938274e-01 -5.5640030e-01 -6.9367945e-01 -2.4784769e-01] [ 1.1161138e+00 8.5259128e-01 -1.1560360e+00 -8.8735068e-01 -6.0820144e-02]] [[ 3.0732363e-01 3.6312667e-01 -1.5414889e-01 -1.0877589e+00 -6.1517596e-02] [-2.9782969e-01 -2.2821602e-01 -4.1951793e-01 -5.3486332e-02 -1.0027953e+00] [ 3.5120821e-01 3.6214867e-01 1.7625930e+00 2.0872648e-01 -1.0462666e+00] [ 3.1261107e-01 5.6764781e-01 6.6219771e-01 -7.5327730e-01 -3.1444207e-01] [-2.4486531e-01 3.1368482e-01 3.0971742e-01 1.3719778e-01 1.3666750e+00]] [[-6.7751402e-01 -3.6529783e-02 7.5088012e-01 -8.0967657e-02 -1.5797736e-01] [-6.4368278e-02 -1.0817341e+00 -9.2797279e-01 -3.6441621e-01 6.6892761e-01] [-9.2760801e-01 -1.4592606e-01 1.0353869e+00 1.0460975e+00 4.5915160e-01] [ 1.6520407e+00 6.1595124e-01 4.9182769e-02 1.3389402e-02 -8.1481642e-01] [-6.2906569e-01 1.7979406e-01 1.1636129e-01 -4.5919287e-01 1.0126820e+00]]] [[[-8.4124975e-02 -2.3821807e+00 1.3728826e+00 -1.0492068e-01 9.2340994e-01] [ 9.8978549e-01 2.0832150e+00 8.2508618e-01 9.8804688e-01 -6.7609572e-01] [ 1.2274675e+00 1.6377660e+00 -9.9932872e-02 1.4296008e+00 -3.0153384e+00] [ 1.7522031e+00 -6.2892026e-01 5.1142377e-01 1.1329165e+00 1.9848692e-01] [-3.8629913e+00 -3.9944227e+00 1.2015352e+00 7.7811420e-02 -5.6115246e-01]] [[-6.8086702e-01 3.2292965e-01 7.1639073e-01 -9.7390014e-01 1.1815339e+00] [ 1.5624936e-01 -1.6575619e+00 6.0570759e-01 9.2190576e-01 -1.9465497e-01] [ 5.2727020e-01 2.0479469e+00 -7.6875752e-01 -9.3458885e-01 7.0707840e-01] [ 1.2821180e+00 1.5109798e+00 1.0361871e+00 1.0103033e+00 1.1685513e+00] [ 5.1553536e-02 1.7988787e+00 -3.6125427e-01 1.6197466e+00 -2.5664330e-01]] [[-8.7276608e-01 -3.9667773e+00 -4.7928932e-01 -1.0469594e+00 9.5368497e-02] [ 2.1167750e+00 -8.0872327e-01 1.4976963e+00 -6.6586953e-01 1.7835323e+00] [ 6.2974632e-01 1.0499951e-01 1.5890089e-01 -8.0558765e-01 -2.3127918e+00] [-4.3471712e-01 1.4159490e+00 1.0540830e+00 -1.0327109e+00 8.9843047e-01] [ 3.7738630e-01 -1.7176055e+00 3.7380743e+00 1.1409265e+00 5.9455997e-01]] [[ 1.3362645e+00 2.1311123e+00 -4.9629754e-01 8.3233571e-01 7.7336508e-01] [ 2.4244757e+00 7.8840697e-01 3.0971086e+00 1.7106292e-01 1.5755932e+00] [ 1.1675751e+00 2.0083361e+00 -2.1167500e+00 2.7183694e-01 6.9059420e-01] [-1.7028058e+00 3.9069606e-03 1.8885846e+00 -1.3194026e+00 5.3463680e-01] [ 2.1509457e+00 2.1628380e+00 -2.1658263e+00 -7.8249568e-01 -1.0079240e+00]] [[ 1.1661774e+00 9.8955345e-01 9.6137863e-01 -2.8534257e-01 -8.7863117e-01] [-9.1144651e-01 -1.2373480e+00 -6.4997423e-01 4.7699323e-01 -2.0772829e+00] [-7.4682885e-01 -1.3215293e+00 3.1941202e+00 4.2239282e-01 -1.0766304e+00] [-1.6326137e+00 -3.8843098e-01 8.6239606e-01 -1.0044463e+00 -2.4523745e+00] [ 9.5530742e-01 -1.3607438e+00 1.5648896e+00 6.3940711e-02 -1.7394185e+00]]]]]; ov_res: [[[[[ 9.1353053e-01 -2.6901848e+00 1.7841444e+00 5.7702327e-01 5.0719726e-01] [-1.2891350e+00 3.0771778e+00 1.9215153e+00 -1.3120345e+00 -3.3776627e+00] [ 9.3450695e-01 1.7936667e+00 -1.2126445e+00 5.1523101e-01 -3.0703611e+00] [-1.3032579e+00 8.8803107e-01 1.5765996e-01 1.6429558e+00 -5.2103168e-01] [-3.9444523e+00 -6.2270647e-01 1.2849981e+00 5.7086086e-01 -5.7399523e-01]] [[-1.1748374e+00 -1.8733518e-01 -4.1397807e-01 4.0629545e-01 2.4085932e+00] [ 4.3988366e-02 -1.2304560e+00 9.4845659e-01 3.4697434e-01 9.4244242e-01] [-4.5351166e-01 2.6415069e+00 4.3473297e-01 1.3524176e+00 5.2132475e-01] [ 5.6474978e-01 1.5193895e-01 1.6812540e+00 5.8522022e-01 -7.6771820e-01] [ 3.8028058e-01 4.0260792e+00 -1.3971033e+00 2.2382369e+00 3.7075892e-01]] [[-2.3108151e+00 7.7668393e-01 4.7527081e-01 3.5336763e-01 -1.2799026e+00] [ 5.9730005e-01 3.4413065e-03 6.4956498e-01 1.0165814e+00 -6.5059048e-01] [ 3.2674141e+00 5.4034173e-01 -3.9237836e-01 -2.3375294e-01 -2.2077005e+00] [ 4.6752220e-01 1.7176911e+00 -4.7065794e-01 -2.2195077e+00 3.1238633e-01] [ 2.6390736e+00 5.3166920e-01 1.0506392e+00 -5.5014592e-01 1.4424353e+00]] [[ 2.9552201e-02 2.7544634e+00 -1.7248785e+00 -1.3187687e+00 -1.5924355e-01] [ 1.8428320e+00 2.4859320e-01 1.5344149e+00 5.5700511e-02 -8.6010259e-01] [ 1.4863013e+00 2.5139098e+00 1.4540170e+00 -2.5476757e-01 -1.8372730e+00] [-1.1112459e+00 9.6910846e-01 3.0837841e+00 -2.6265070e+00 -1.4429014e+00] [ 1.2485694e+00 1.8665712e+00 -1.7536151e+00 -9.9056309e-01 1.8949176e+00]] [[ 6.1319478e-02 1.1408831e+00 2.9723961e+00 3.7511596e-01 -1.3009927e+00] [-9.3201202e-01 -2.9250295e+00 -1.1548207e+00 -5.7529479e-01 1.0887601e-01] [-2.4447355e+00 -1.0020986e+00 4.9862266e+00 2.6845140e+00 -6.7682713e-01] [ 1.8614802e+00 3.7921840e-01 1.1484895e+00 -1.3054434e+00 -4.6660523e+00] [-1.9645796e+00 -1.2887354e+00 7.1364152e-01 -9.7423047e-01 4.4759858e-02]]] [[[ 2.7102786e-01 -9.5921487e-01 4.3009171e-01 2.6022598e-01 4.8224196e-01] [-7.7216202e-01 6.8187398e-01 7.5468463e-01 -1.3483301e+00 -1.1295794e+00] [ 7.2660506e-02 -2.1763361e-01 -5.1058918e-01 -2.4481888e-01 -3.6237079e-01] [-1.3298326e+00 8.8893515e-01 -9.0447024e-02 5.2983981e-01 -3.1480277e-01] [ 1.8324560e-03 7.4755454e-01 3.0160743e-01 4.7371458e-02 1.3538757e-02]] [[-7.0944643e-01 -1.9599345e-01 -4.1770759e-01 6.6092992e-01 4.2381889e-01] [ 2.5786215e-01 2.4373464e-01 2.4372725e-01 1.8644696e-01 2.1967466e-01] [-1.7817928e-01 4.6695629e-01 6.1769289e-01 3.1141892e-01 2.4187635e-01] [-4.6418411e-01 -7.4076939e-01 4.4901723e-01 8.2119621e-02 -1.0687268e+00] [-4.4244720e-04 1.3665816e+00 -5.2804792e-01 4.8911104e-01 4.2031986e-01]] [[-2.2971831e-01 1.9915386e+00 1.3882397e-01 7.4993837e-01 -4.7121218e-01] [-6.5652215e-01 4.6960613e-01 -4.4676565e-02 7.0974547e-01 -1.0079970e+00] [ 1.2540542e+00 1.7691778e-02 -6.5179741e-01 4.9235097e-01 2.5087166e-01] [ 1.9787166e-01 8.0938274e-01 -5.5640030e-01 -6.9367945e-01 -2.4784769e-01] [ 1.1161138e+00 8.5259128e-01 -1.1560360e+00 -8.8735068e-01 -6.0820144e-02]] [[ 3.0732363e-01 3.6312667e-01 -1.5414889e-01 -1.0877589e+00 -6.1517596e-02] [-2.9782969e-01 -2.2821602e-01 -4.1951793e-01 -5.3486332e-02 -1.0027953e+00] [ 3.5120821e-01 3.6214867e-01 1.7625930e+00 2.0872648e-01 -1.0462666e+00] [ 3.1261107e-01 5.6764781e-01 6.6219771e-01 -7.5327730e-01 -3.1444207e-01] [-2.4486531e-01 3.1368482e-01 3.0971742e-01 1.3719778e-01 1.3666750e+00]] [[-6.7751402e-01 -3.6529783e-02 7.5088012e-01 -8.0967657e-02 -1.5797736e-01] [-6.4368278e-02 -1.0817341e+00 -9.2797279e-01 -3.6441621e-01 6.6892761e-01] [-9.2760801e-01 -1.4592606e-01 1.0353869e+00 1.0460975e+00 4.5915160e-01] [ 1.6520407e+00 6.1595124e-01 4.9182769e-02 1.3389402e-02 -8.1481642e-01] [-6.2906569e-01 1.7979406e-01 1.1636129e-01 -4.5919287e-01 1.0126820e+00]]] [[[-8.4124975e-02 -2.3821807e+00 1.3728826e+00 -1.0492068e-01 9.2340994e-01] [ 9.8978549e-01 2.0832150e+00 8.2508618e-01 9.8804688e-01 -6.7609572e-01] [ 1.2274675e+00 1.6377660e+00 -9.9932872e-02 1.4296008e+00 -3.0153384e+00] [ 1.7522031e+00 -6.2892026e-01 5.1142377e-01 1.1329165e+00 1.9848692e-01] [-3.8629913e+00 -3.9944227e+00 1.2015352e+00 7.7811420e-02 -5.6115246e-01]] [[-6.8086702e-01 3.2292965e-01 7.1639073e-01 -9.7390014e-01 1.1815339e+00] [ 1.5624936e-01 -1.6575619e+00 6.0570759e-01 9.2190576e-01 -1.9465497e-01] [ 5.2727020e-01 2.0479469e+00 -7.6875752e-01 -9.3458885e-01 7.0707840e-01] [ 1.2821180e+00 1.5109798e+00 1.0361871e+00 1.0103033e+00 1.1685513e+00] [ 5.1553536e-02 1.7988787e+00 -3.6125427e-01 1.6197466e+00 -2.5664330e-01]] [[-8.7276608e-01 -3.9667773e+00 -4.7928932e-01 -1.0469594e+00 9.5368497e-02] [ 2.1167750e+00 -8.0872327e-01 1.4976963e+00 -6.6586953e-01 1.7835323e+00] [ 6.2974632e-01 1.0499951e-01 1.5890089e-01 -8.0558765e-01 -2.3127918e+00] [-4.3471712e-01 1.4159490e+00 1.0540830e+00 -1.0327109e+00 8.9843047e-01] [ 3.7738630e-01 -1.7176055e+00 3.7380743e+00 1.1409265e+00 5.9455997e-01]] [[ 1.3362645e+00 2.1311123e+00 -4.9629754e-01 8.3233571e-01 7.7336508e-01] [ 2.4244757e+00 7.8840697e-01 3.0971086e+00 1.7106292e-01 1.5755932e+00] [ 1.1675751e+00 2.0083361e+00 -2.1167500e+00 2.7183694e-01 6.9059420e-01] [-1.7028058e+00 3.9069606e-03 1.8885846e+00 -1.3194026e+00 5.3463680e-01] [ 2.1509457e+00 2.1628380e+00 -2.1658263e+00 -7.8249568e-01 -1.0079240e+00]] [[ 1.1661774e+00 9.8955345e-01 9.6137863e-01 -2.8534257e-01 -8.7863117e-01] [-9.1144651e-01 -1.2373480e+00 -6.4997423e-01 4.7699323e-01 -2.0772829e+00] [-7.4682885e-01 -1.3215293e+00 3.1941202e+00 4.2239282e-01 -1.0766304e+00] [-1.6326137e+00 -3.8843098e-01 8.6239606e-01 -1.0044463e+00 -2.4523745e+00] [ 9.5530742e-01 -1.3607438e+00 1.5648896e+00 6.3940711e-02 -1.7394185e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 3, 1, 1, 1], 'strides': [2, 1, 1], 'pads': [0, 0, 1], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1018.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.output_padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.transposed : bool = prim::Constant[value=0]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 1]]() %self.strides : int[] = prim::Constant[value=[2, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 3, 1, 1, 1, strides=[3, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.3102 (2,1,1,.,.) = 0.2974 (3,1,1,.,.) = 0.8225 (1,2,1,.,.) = -0.4284 (2,2,1,.,.) = 0.01 * -5.4543 (3,2,1,.,.) = -0.9398 (1,3,1,.,.) = 1.3789 (2,3,1,.,.) = -0.9552 (3,3,1,.,.) = -0.3605 [ CPUFloatType{3,3,1,1,1} ]]() %10 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.output_padding, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%10) fw_re: [[[[[ 0.00000000e+00 1.92897654e+00 5.03748238e-01 ... 2.06894207e+00 -2.45043755e+00 0.00000000e+00] [ 0.00000000e+00 2.29537964e-01 -5.37036777e-01 ... 1.19557309e+00 8.66291821e-01 0.00000000e+00] [ 0.00000000e+00 1.61668229e+00 7.90027201e-01 ... 2.54237556e+00 -4.25832367e+00 0.00000000e+00] ... [ 0.00000000e+00 -7.26409197e-01 -1.74004292e+00 ... -1.34240463e-01 1.68045640e-01 0.00000000e+00] [ 0.00000000e+00 -1.50816762e+00 5.37045360e-01 ... -7.96802461e-01 9.83816087e-01 0.00000000e+00] [ 0.00000000e+00 5.87680638e-01 1.26147187e+00 ... -6.14036739e-01 5.93702316e-01 0.00000000e+00]] [[ 0.00000000e+00 2.09569976e-01 2.66313642e-01 ... -3.92797256e+00 -1.40440369e+00 0.00000000e+00] [ 0.00000000e+00 6.37468845e-02 -1.40380180e+00 ... -5.42629004e-01 3.16957521e+00 0.00000000e+00] [ 0.00000000e+00 -6.26760483e-01 9.72192466e-01 ... 1.75295269e+00 3.77324790e-01 0.00000000e+00] ... [ 0.00000000e+00 1.14345253e+00 -2.06730413e+00 ... -1.65806878e+00 4.42436308e-01 0.00000000e+00] [ 0.00000000e+00 -9.98683333e-01 2.38378119e+00 ... 1.15832293e+00 -1.46017003e+00 0.00000000e+00] [ 0.00000000e+00 -3.17180276e-01 -4.26917404e-01 ... -5.06728828e-01 3.91733599e+00 0.00000000e+00]] [[ 0.00000000e+00 2.37988877e+00 1.31338465e+00 ... 1.52701712e+00 7.18412280e-01 0.00000000e+00] [ 0.00000000e+00 2.00616550e+00 -2.46477413e+00 ... 2.37288880e+00 6.49962962e-01 0.00000000e+00] [ 0.00000000e+00 -3.57599592e+00 1.18319488e+00 ... -1.19455242e+00 -6.98079094e-02 0.00000000e+00] ... [ 0.00000000e+00 -1.37799442e-01 -1.54036999e-01 ... 1.39952695e+00 -2.28277847e-01 0.00000000e+00] [ 0.00000000e+00 1.65637642e-01 3.15958118e+00 ... 1.29308438e+00 -2.10707736e+00 0.00000000e+00] [ 0.00000000e+00 -4.90494430e-01 -3.41694027e-01 ... 4.21420962e-01 -7.54173100e-01 0.00000000e+00]] [[ 0.00000000e+00 -9.15275037e-01 -2.43371987e+00 ... 7.22178102e-01 -1.13219357e+00 0.00000000e+00] [ 0.00000000e+00 8.38098407e-01 -1.07661021e+00 ... 2.58189392e+00 1.45594203e+00 0.00000000e+00] [ 0.00000000e+00 3.57275875e-03 8.87638032e-01 ... 1.88599646e+00 1.50704980e+00 0.00000000e+00] ... [ 0.00000000e+00 1.51669383e+00 2.40592241e+00 ... -6.22226596e-01 -5.85232735e-01 0.00000000e+00] [ 0.00000000e+00 -9.81719971e-01 -2.06826955e-01 ... 1.89381063e+00 -1.77142382e+00 0.00000000e+00] [ 0.00000000e+00 -1.86172581e+00 -1.02422154e+00 ... 1.10973632e+00 2.16070563e-01 0.00000000e+00]] [[ 0.00000000e+00 9.11552846e-01 -7.21427441e-01 ... -7.40372896e-01 -1.65803099e+00 0.00000000e+00] [ 0.00000000e+00 1.97064114e+00 3.36942151e-02 ... -9.07663703e-02 2.32395124e+00 0.00000000e+00] [ 0.00000000e+00 6.32420003e-01 -5.67543459e+00 ... -7.67666996e-02 5.87942660e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.12082160e+00 -1.94225669e+00 ... -4.52718884e-01 -3.61228615e-01 0.00000000e+00] [ 0.00000000e+00 1.85580349e+00 -2.14568353e+00 ... -1.85150647e+00 5.76063633e-01 0.00000000e+00] [ 0.00000000e+00 -2.41331935e+00 1.71481192e+00 ... 2.12984651e-01 5.04958510e-01 0.00000000e+00]]] [[[ 0.00000000e+00 -9.46717978e-01 -6.91611946e-01 ... -1.04828608e+00 1.80310678e+00 0.00000000e+00] [ 0.00000000e+00 1.51747674e-01 5.64754486e-01 ... -1.01611507e+00 -4.75132376e-01 0.00000000e+00] [ 0.00000000e+00 -1.08215380e+00 -3.38762581e-01 ... -1.80809784e+00 2.80039191e+00 0.00000000e+00] ... [ 0.00000000e+00 7.66698062e-01 1.04260290e+00 ... 2.88300246e-01 -4.69257295e-01 0.00000000e+00] [ 0.00000000e+00 1.08176112e-01 -1.10871665e-01 ... 1.59240574e-01 -6.85030103e-01 0.00000000e+00] [ 0.00000000e+00 -7.76684999e-01 -1.40709782e+00 ... 7.26652443e-01 -6.89917922e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.61760226e-01 -4.86106753e-01 ... 2.56816244e+00 7.29574502e-01 0.00000000e+00] [ 0.00000000e+00 -1.84752017e-01 8.43886316e-01 ... 9.14365947e-01 -1.93596160e+00 0.00000000e+00] [ 0.00000000e+00 8.80795121e-01 -6.01067662e-01 ... -1.87291598e+00 -7.42472351e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.55875361e-01 1.39384508e+00 ... 1.72676170e+00 -3.92121434e-01 0.00000000e+00] [ 0.00000000e+00 -5.23232073e-02 -1.92120183e+00 ... -7.71220088e-01 1.00314128e+00 0.00000000e+00] [ 0.00000000e+00 5.87092414e-02 9.72894132e-02 ... 5.63879371e-01 -2.02500415e+00 0.00000000e+00]] [[ 0.00000000e+00 -2.04327822e+00 -1.25847614e+00 ... -7.45314300e-01 -3.73857856e-01 0.00000000e+00] [ 0.00000000e+00 -1.55033648e+00 1.64934039e+00 ... -1.46546686e+00 2.21611217e-01 0.00000000e+00] [ 0.00000000e+00 2.55893207e+00 -5.42615592e-01 ... 6.88223779e-01 4.73368555e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.75599501e-02 -5.85430749e-02 ... -7.88186789e-01 1.24074556e-02 0.00000000e+00] [ 0.00000000e+00 -1.01257622e+00 -1.92655611e+00 ... -1.79304218e+00 1.41244853e+00 0.00000000e+00] [ 0.00000000e+00 4.36228305e-01 2.65724659e-01 ... -1.07880008e+00 6.42955005e-01 0.00000000e+00]] [[ 0.00000000e+00 1.14759183e+00 1.54778147e+00 ... -4.98588085e-01 9.04097974e-01 0.00000000e+00] [ 0.00000000e+00 -6.51547909e-01 6.27884090e-01 ... -1.34422743e+00 -1.29178965e+00 0.00000000e+00] [ 0.00000000e+00 3.64466965e-01 -6.33315369e-02 ... -8.70796263e-01 -1.51010072e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.77124953e+00 -8.63663197e-01 ... 1.11517712e-01 -5.62766902e-02 0.00000000e+00] [ 0.00000000e+00 9.03635085e-01 1.10626958e-01 ... -9.87862527e-01 1.45922315e+00 0.00000000e+00] [ 0.00000000e+00 9.87618506e-01 7.56073713e-01 ... -4.02086973e-01 -1.25594912e-02 0.00000000e+00]] [[ 0.00000000e+00 -8.43149543e-01 3.75381172e-01 ... 4.79141504e-01 1.28768742e+00 0.00000000e+00] [ 0.00000000e+00 -1.08067524e+00 -3.45954329e-01 ... 1.99461430e-01 -1.06063592e+00 0.00000000e+00] [ 0.00000000e+00 -8.57554436e-01 3.36640334e+00 ... -9.14061815e-02 -3.07003558e-02 0.00000000e+00] ... [ 0.00000000e+00 1.83895156e-01 8.43013525e-01 ... -2.19877362e-02 4.24788147e-01 0.00000000e+00] [ 0.00000000e+00 -1.15804017e+00 1.59934318e+00 ... 1.33804500e+00 -4.01460916e-01 0.00000000e+00] [ 0.00000000e+00 9.75299060e-01 -1.93815202e-01 ... 5.47732532e-01 -1.19675481e+00 0.00000000e+00]]] [[[ 0.00000000e+00 7.39868283e-01 -1.79550374e+00 ... 2.74491042e-01 1.55797768e+00 0.00000000e+00] [ 0.00000000e+00 1.35425162e+00 6.65805459e-01 ... -5.65172195e-01 1.06910121e+00 0.00000000e+00] [ 0.00000000e+00 -3.10846031e-01 -4.84375693e-02 ... -1.60908318e+00 8.61835957e-01 0.00000000e+00] ... [ 0.00000000e+00 1.03825223e+00 -4.50567603e-01 ... 1.03647661e+00 -1.02747560e+00 0.00000000e+00] [ 0.00000000e+00 -2.46851516e+00 2.83058643e-01 ... -9.75390434e-01 -1.07204902e+00 0.00000000e+00] [ 0.00000000e+00 -1.31274557e+00 -2.30415058e+00 ... 1.02780128e+00 -1.09701300e+00 0.00000000e+00]] [[ 0.00000000e+00 -3.27125996e-01 -1.70925534e+00 ... 9.35201719e-02 1.06821823e+00 0.00000000e+00] [ 0.00000000e+00 -8.47171605e-01 -6.77011788e-01 ... 1.74690700e+00 3.56743811e-03 0.00000000e+00] [ 0.00000000e+00 2.71873879e+00 4.09773946e-01 ... -3.06413078e+00 -1.68157697e+00 0.00000000e+00] ... [ 0.00000000e+00 6.42402291e-01 9.13862765e-01 ... 2.74045634e+00 -3.83709341e-01 0.00000000e+00] [ 0.00000000e+00 -2.07383585e+00 -2.19688606e+00 ... -3.39319170e-01 -1.92200225e-02 0.00000000e+00] [ 0.00000000e+00 1.97419956e-01 -9.17290866e-01 ... 6.95787907e-01 1.08657622e+00 0.00000000e+00]] [[ 0.00000000e+00 -2.03226233e+00 -1.39349973e+00 ... -9.48991403e-02 8.17479253e-01 0.00000000e+00] [ 0.00000000e+00 -1.12889457e+00 1.94588435e+00 ... 9.76255015e-02 1.62426925e+00 0.00000000e+00] [ 0.00000000e+00 1.72916842e+00 -2.02968568e-02 ... 1.66395932e-01 2.50568533e+00 0.00000000e+00] ... [ 0.00000000e+00 -4.10157472e-01 -2.48909265e-01 ... -2.52091195e-02 -8.38846028e-01 0.00000000e+00] [ 0.00000000e+00 -2.60164523e+00 -7.66798779e-02 ... -3.48034143e+00 7.79503822e-01 0.00000000e+00] [ 0.00000000e+00 9.63359237e-01 -1.37936786e-01 ... -2.67520952e+00 1.19763935e+00 0.00000000e+00]] [[ 0.00000000e+00 1.26181340e+00 5.37435710e-01 ... 7.17402220e-01 7.72830009e-01 0.00000000e+00] [ 0.00000000e+00 1.18105568e-01 -3.93267661e-01 ... 9.38332260e-01 -6.25131249e-01 0.00000000e+00] [ 0.00000000e+00 7.36810267e-01 2.19646454e+00 ... 7.14767575e-01 -2.67329407e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.83797622e+00 2.45368958e+00 ... 2.06604987e-01 -1.44620609e+00 0.00000000e+00] [ 0.00000000e+00 1.24246383e+00 5.54448009e-01 ... 4.95155931e-01 1.84150982e+00 0.00000000e+00] [ 0.00000000e+00 -5.49941361e-01 4.74238276e-01 ... 5.63783050e-01 1.93116561e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.45019233e+00 -1.18158780e-01 ... 1.51670694e-01 1.07299173e+00 0.00000000e+00] [ 0.00000000e+00 5.19327939e-01 -5.09405613e-01 ... 2.90460020e-01 1.50038421e+00 0.00000000e+00] [ 0.00000000e+00 -1.37355185e+00 4.88620460e-01 ... -1.08978379e+00 3.86236638e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.04398608e+00 -1.28893220e+00 ... -1.05858648e+00 4.15562361e-01 0.00000000e+00] [ 0.00000000e+00 4.55516040e-01 7.40392387e-01 ... 1.00321794e+00 1.47019729e-01 0.00000000e+00] [ 0.00000000e+00 -1.75697601e+00 2.59768128e+00 ... 2.00769711e+00 -3.34516573e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 1.92897654e+00 5.03748238e-01 ... 2.06894207e+00 -2.45043755e+00 0.00000000e+00] [ 0.00000000e+00 2.29537964e-01 -5.37036777e-01 ... 1.19557309e+00 8.66291821e-01 0.00000000e+00] [ 0.00000000e+00 1.61668229e+00 7.90027201e-01 ... 2.54237556e+00 -4.25832367e+00 0.00000000e+00] ... [ 0.00000000e+00 -7.26409197e-01 -1.74004292e+00 ... -1.34240463e-01 1.68045640e-01 0.00000000e+00] [ 0.00000000e+00 -1.50816762e+00 5.37045360e-01 ... -7.96802461e-01 9.83816087e-01 0.00000000e+00] [ 0.00000000e+00 5.87680638e-01 1.26147187e+00 ... -6.14036739e-01 5.93702316e-01 0.00000000e+00]] [[ 0.00000000e+00 2.09569976e-01 2.66313642e-01 ... -3.92797256e+00 -1.40440369e+00 0.00000000e+00] [ 0.00000000e+00 6.37468845e-02 -1.40380180e+00 ... -5.42629004e-01 3.16957521e+00 0.00000000e+00] [ 0.00000000e+00 -6.26760483e-01 9.72192466e-01 ... 1.75295269e+00 3.77324790e-01 0.00000000e+00] ... [ 0.00000000e+00 1.14345253e+00 -2.06730413e+00 ... -1.65806878e+00 4.42436308e-01 0.00000000e+00] [ 0.00000000e+00 -9.98683333e-01 2.38378119e+00 ... 1.15832293e+00 -1.46017003e+00 0.00000000e+00] [ 0.00000000e+00 -3.17180276e-01 -4.26917404e-01 ... -5.06728828e-01 3.91733599e+00 0.00000000e+00]] [[ 0.00000000e+00 2.37988877e+00 1.31338465e+00 ... 1.52701712e+00 7.18412280e-01 0.00000000e+00] [ 0.00000000e+00 2.00616550e+00 -2.46477413e+00 ... 2.37288880e+00 6.49962962e-01 0.00000000e+00] [ 0.00000000e+00 -3.57599592e+00 1.18319488e+00 ... -1.19455242e+00 -6.98079094e-02 0.00000000e+00] ... [ 0.00000000e+00 -1.37799442e-01 -1.54036999e-01 ... 1.39952695e+00 -2.28277847e-01 0.00000000e+00] [ 0.00000000e+00 1.65637642e-01 3.15958118e+00 ... 1.29308438e+00 -2.10707736e+00 0.00000000e+00] [ 0.00000000e+00 -4.90494430e-01 -3.41694027e-01 ... 4.21420962e-01 -7.54173100e-01 0.00000000e+00]] [[ 0.00000000e+00 -9.15275037e-01 -2.43371987e+00 ... 7.22178102e-01 -1.13219357e+00 0.00000000e+00] [ 0.00000000e+00 8.38098407e-01 -1.07661021e+00 ... 2.58189392e+00 1.45594203e+00 0.00000000e+00] [ 0.00000000e+00 3.57275875e-03 8.87638032e-01 ... 1.88599646e+00 1.50704980e+00 0.00000000e+00] ... [ 0.00000000e+00 1.51669383e+00 2.40592241e+00 ... -6.22226596e-01 -5.85232735e-01 0.00000000e+00] [ 0.00000000e+00 -9.81719971e-01 -2.06826955e-01 ... 1.89381063e+00 -1.77142382e+00 0.00000000e+00] [ 0.00000000e+00 -1.86172581e+00 -1.02422154e+00 ... 1.10973632e+00 2.16070563e-01 0.00000000e+00]] [[ 0.00000000e+00 9.11552846e-01 -7.21427441e-01 ... -7.40372896e-01 -1.65803099e+00 0.00000000e+00] [ 0.00000000e+00 1.97064114e+00 3.36942151e-02 ... -9.07663703e-02 2.32395124e+00 0.00000000e+00] [ 0.00000000e+00 6.32420003e-01 -5.67543459e+00 ... -7.67666996e-02 5.87942660e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.12082160e+00 -1.94225669e+00 ... -4.52718884e-01 -3.61228615e-01 0.00000000e+00] [ 0.00000000e+00 1.85580349e+00 -2.14568353e+00 ... -1.85150647e+00 5.76063633e-01 0.00000000e+00] [ 0.00000000e+00 -2.41331935e+00 1.71481192e+00 ... 2.12984651e-01 5.04958510e-01 0.00000000e+00]]] [[[ 0.00000000e+00 -9.46717978e-01 -6.91611946e-01 ... -1.04828608e+00 1.80310678e+00 0.00000000e+00] [ 0.00000000e+00 1.51747674e-01 5.64754486e-01 ... -1.01611507e+00 -4.75132376e-01 0.00000000e+00] [ 0.00000000e+00 -1.08215380e+00 -3.38762581e-01 ... -1.80809784e+00 2.80039191e+00 0.00000000e+00] ... [ 0.00000000e+00 7.66698062e-01 1.04260290e+00 ... 2.88300246e-01 -4.69257295e-01 0.00000000e+00] [ 0.00000000e+00 1.08176112e-01 -1.10871665e-01 ... 1.59240574e-01 -6.85030103e-01 0.00000000e+00] [ 0.00000000e+00 -7.76684999e-01 -1.40709782e+00 ... 7.26652443e-01 -6.89917922e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.61760226e-01 -4.86106753e-01 ... 2.56816244e+00 7.29574502e-01 0.00000000e+00] [ 0.00000000e+00 -1.84752017e-01 8.43886316e-01 ... 9.14365947e-01 -1.93596160e+00 0.00000000e+00] [ 0.00000000e+00 8.80795121e-01 -6.01067662e-01 ... -1.87291598e+00 -7.42472351e-01 0.00000000e+00] ... [ 0.00000000e+00 -5.55875361e-01 1.39384508e+00 ... 1.72676170e+00 -3.92121434e-01 0.00000000e+00] [ 0.00000000e+00 -5.23232073e-02 -1.92120183e+00 ... -7.71220088e-01 1.00314128e+00 0.00000000e+00] [ 0.00000000e+00 5.87092414e-02 9.72894132e-02 ... 5.63879371e-01 -2.02500415e+00 0.00000000e+00]] [[ 0.00000000e+00 -2.04327822e+00 -1.25847614e+00 ... -7.45314300e-01 -3.73857856e-01 0.00000000e+00] [ 0.00000000e+00 -1.55033648e+00 1.64934039e+00 ... -1.46546686e+00 2.21611217e-01 0.00000000e+00] [ 0.00000000e+00 2.55893207e+00 -5.42615592e-01 ... 6.88223779e-01 4.73368555e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.75599501e-02 -5.85430749e-02 ... -7.88186789e-01 1.24074556e-02 0.00000000e+00] [ 0.00000000e+00 -1.01257622e+00 -1.92655611e+00 ... -1.79304218e+00 1.41244853e+00 0.00000000e+00] [ 0.00000000e+00 4.36228305e-01 2.65724659e-01 ... -1.07880008e+00 6.42955005e-01 0.00000000e+00]] [[ 0.00000000e+00 1.14759183e+00 1.54778147e+00 ... -4.98588085e-01 9.04097974e-01 0.00000000e+00] [ 0.00000000e+00 -6.51547909e-01 6.27884090e-01 ... -1.34422743e+00 -1.29178965e+00 0.00000000e+00] [ 0.00000000e+00 3.64466965e-01 -6.33315369e-02 ... -8.70796263e-01 -1.51010072e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.77124953e+00 -8.63663197e-01 ... 1.11517712e-01 -5.62766902e-02 0.00000000e+00] [ 0.00000000e+00 9.03635085e-01 1.10626958e-01 ... -9.87862527e-01 1.45922315e+00 0.00000000e+00] [ 0.00000000e+00 9.87618506e-01 7.56073713e-01 ... -4.02086973e-01 -1.25594912e-02 0.00000000e+00]] [[ 0.00000000e+00 -8.43149543e-01 3.75381172e-01 ... 4.79141504e-01 1.28768742e+00 0.00000000e+00] [ 0.00000000e+00 -1.08067524e+00 -3.45954329e-01 ... 1.99461430e-01 -1.06063592e+00 0.00000000e+00] [ 0.00000000e+00 -8.57554436e-01 3.36640334e+00 ... -9.14061815e-02 -3.07003558e-02 0.00000000e+00] ... [ 0.00000000e+00 1.83895156e-01 8.43013525e-01 ... -2.19877362e-02 4.24788147e-01 0.00000000e+00] [ 0.00000000e+00 -1.15804017e+00 1.59934318e+00 ... 1.33804500e+00 -4.01460916e-01 0.00000000e+00] [ 0.00000000e+00 9.75299060e-01 -1.93815202e-01 ... 5.47732532e-01 -1.19675481e+00 0.00000000e+00]]] [[[ 0.00000000e+00 7.39868283e-01 -1.79550374e+00 ... 2.74491042e-01 1.55797768e+00 0.00000000e+00] [ 0.00000000e+00 1.35425162e+00 6.65805459e-01 ... -5.65172195e-01 1.06910121e+00 0.00000000e+00] [ 0.00000000e+00 -3.10846031e-01 -4.84375693e-02 ... -1.60908318e+00 8.61835957e-01 0.00000000e+00] ... [ 0.00000000e+00 1.03825223e+00 -4.50567603e-01 ... 1.03647661e+00 -1.02747560e+00 0.00000000e+00] [ 0.00000000e+00 -2.46851516e+00 2.83058643e-01 ... -9.75390434e-01 -1.07204902e+00 0.00000000e+00] [ 0.00000000e+00 -1.31274557e+00 -2.30415058e+00 ... 1.02780128e+00 -1.09701300e+00 0.00000000e+00]] [[ 0.00000000e+00 -3.27125996e-01 -1.70925534e+00 ... 9.35201719e-02 1.06821823e+00 0.00000000e+00] [ 0.00000000e+00 -8.47171605e-01 -6.77011788e-01 ... 1.74690700e+00 3.56743811e-03 0.00000000e+00] [ 0.00000000e+00 2.71873879e+00 4.09773946e-01 ... -3.06413078e+00 -1.68157697e+00 0.00000000e+00] ... [ 0.00000000e+00 6.42402291e-01 9.13862765e-01 ... 2.74045634e+00 -3.83709341e-01 0.00000000e+00] [ 0.00000000e+00 -2.07383585e+00 -2.19688606e+00 ... -3.39319170e-01 -1.92200225e-02 0.00000000e+00] [ 0.00000000e+00 1.97419956e-01 -9.17290866e-01 ... 6.95787907e-01 1.08657622e+00 0.00000000e+00]] [[ 0.00000000e+00 -2.03226233e+00 -1.39349973e+00 ... -9.48991403e-02 8.17479253e-01 0.00000000e+00] [ 0.00000000e+00 -1.12889457e+00 1.94588435e+00 ... 9.76255015e-02 1.62426925e+00 0.00000000e+00] [ 0.00000000e+00 1.72916842e+00 -2.02968568e-02 ... 1.66395932e-01 2.50568533e+00 0.00000000e+00] ... [ 0.00000000e+00 -4.10157472e-01 -2.48909265e-01 ... -2.52091195e-02 -8.38846028e-01 0.00000000e+00] [ 0.00000000e+00 -2.60164523e+00 -7.66798779e-02 ... -3.48034143e+00 7.79503822e-01 0.00000000e+00] [ 0.00000000e+00 9.63359237e-01 -1.37936786e-01 ... -2.67520952e+00 1.19763935e+00 0.00000000e+00]] [[ 0.00000000e+00 1.26181340e+00 5.37435710e-01 ... 7.17402220e-01 7.72830009e-01 0.00000000e+00] [ 0.00000000e+00 1.18105568e-01 -3.93267661e-01 ... 9.38332260e-01 -6.25131249e-01 0.00000000e+00] [ 0.00000000e+00 7.36810267e-01 2.19646454e+00 ... 7.14767575e-01 -2.67329407e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.83797622e+00 2.45368958e+00 ... 2.06604987e-01 -1.44620609e+00 0.00000000e+00] [ 0.00000000e+00 1.24246383e+00 5.54448009e-01 ... 4.95155931e-01 1.84150982e+00 0.00000000e+00] [ 0.00000000e+00 -5.49941361e-01 4.74238276e-01 ... 5.63783050e-01 1.93116561e-01 0.00000000e+00]] [[ 0.00000000e+00 -1.45019233e+00 -1.18158780e-01 ... 1.51670694e-01 1.07299173e+00 0.00000000e+00] [ 0.00000000e+00 5.19327939e-01 -5.09405613e-01 ... 2.90460020e-01 1.50038421e+00 0.00000000e+00] [ 0.00000000e+00 -1.37355185e+00 4.88620460e-01 ... -1.08978379e+00 3.86236638e-01 0.00000000e+00] ... [ 0.00000000e+00 -2.04398608e+00 -1.28893220e+00 ... -1.05858648e+00 4.15562361e-01 0.00000000e+00] [ 0.00000000e+00 4.55516040e-01 7.40392387e-01 ... 1.00321794e+00 1.47019729e-01 0.00000000e+00] [ 0.00000000e+00 -1.75697601e+00 2.59768128e+00 ... 2.00769711e+00 -3.34516573e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [0, 0, 0], 'dilations': [1, 1, 1], 'groups': 1, 'output_padding': [0, 0, 0], 'transposed': True} 1] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1020.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : int[] = prim::Constant[value=[0, 0, 0]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.7235 (2,1,1,.,.) = 0.3664 (3,1,1,.,.) = -0.4082 [ CPUFloatType{3,1,1,1,1} ]]() %9 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%9) fw_re: [[[[[ 1.742876 0. 3.1323593 ... 0.15677789 0. -1.8103265 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.8606741 0. 0.583982 ... -1.5030078 0. 1.2203388 ] ... [ 0.01692031 0. 0.26294065 ... -1.2272882 0. -0.48148516] [ 0. 0. 0. ... 0. 0. 0. ] [-1.8935907 0. 0.6588843 ... 2.8433633 0. 0.20295364]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-1.5092901 0. -0.35092127 ... 2.9153628 0. 2.4567945 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0278656 0. 0.2472792 ... -0.11160328 0. -0.99549425] ... [ 1.3113683 0. -2.5359828 ... -0.56442046 0. 0.8897643 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2790325 0. -0.97233534 ... 0.8606339 0. -0.12870693]] ... [[-1.7141504 0. 0.85151017 ... -0.8477549 0. -1.0343802 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.5603162 0. 1.4853591 ... -0.30671203 0. 1.4179116 ] ... [-2.4052732 0. 1.2008532 ... -0.9338674 0. -0.6232554 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.94220185 0. 2.462473 ... 0.49999416 0. -2.1367316 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 2.2942536 0. 5.8692174 ... 1.5626177 0. 1.1707977 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.7050904 0. -2.47914 ... -1.2910099 0. -0.2254691 ] ... [-1.7751492 0. 2.0332243 ... -1.0813079 0. 3.4671786 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.9021289 0. -0.02356471 ... -1.7339145 0. 0.5954328 ]]]]]; ov_res: [[[[[ 1.742876 0. 3.1323593 ... 0.15677789 0. -1.8103265 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.8606741 0. 0.583982 ... -1.5030078 0. 1.2203388 ] ... [ 0.01692031 0. 0.26294065 ... -1.2272882 0. -0.48148516] [ 0. 0. 0. ... 0. 0. 0. ] [-1.8935907 0. 0.6588843 ... 2.8433633 0. 0.20295364]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[-1.5092901 0. -0.35092127 ... 2.9153628 0. 2.4567945 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0278656 0. 0.2472792 ... -0.11160328 0. -0.99549425] ... [ 1.3113683 0. -2.5359828 ... -0.56442046 0. 0.8897643 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2790325 0. -0.97233534 ... 0.8606339 0. -0.12870693]] ... [[-1.7141504 0. 0.85151017 ... -0.8477549 0. -1.0343802 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.5603162 0. 1.4853591 ... -0.30671203 0. 1.4179116 ] ... [-2.4052732 0. 1.2008532 ... -0.9338674 0. -0.6232554 ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.94220185 0. 2.462473 ... 0.49999416 0. -2.1367316 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 2.2942536 0. 5.8692174 ... 1.5626177 0. 1.1707977 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.7050904 0. -2.47914 ... -1.2910099 0. -0.2254691 ] ... [-1.7751492 0. 2.0332243 ... -1.0813079 0. 3.4671786 ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.9021289 0. -0.02356471 ... -1.7339145 0. 0.5954328 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution.py::TestConvolution::test_convolution3d[ ie_device:CPU - precision:FP32 - underscore:False - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [2, 2, 2], 'bias_shape': [1], 'pads': [1, 1, 1], 'dilations': [2, 2, 2], 'groups': 1, 'output_padding': [1, 1, 1], 'transposed': True} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution.___torch_mangle_1022.aten_convolution, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.transposed : bool = prim::Constant[value=1]() %self.pads : int[] = prim::Constant[value=[1, 1, 1]]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.4663 (2,1,1,.,.) = 1.2626 (3,1,1,.,.) = 2.2395 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::convolution(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.transposed, %self.pads, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution.py:187:23 return (%8) fw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.97536755 0. ... -0.8956372 0. 1.0696757 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -1.5707535 0. ... 2.4310637 0. 0.1291278 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.2384057 0. ... 0.77284235 0. 3.3047903 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.0502892 0. ... 2.42775 0. 0.24885866] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -0.7968318 0. ... -0.78209126 0. 3.6394916 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.19577444 0. ... 1.2381591 0. -2.9375348 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2726926 0. ... -0.584097 0. -1.1046387 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -0.03214129 0. ... -1.0471772 0. -4.798639 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.4492205 0. ... -0.5979496 0. 2.2594247 ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.97536755 0. ... -0.8956372 0. 1.0696757 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -1.5707535 0. ... 2.4310637 0. 0.1291278 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 3.2384057 0. ... 0.77284235 0. 3.3047903 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. -3.0502892 0. ... 2.42775 0. 0.24885866] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -0.7968318 0. ... -0.78209126 0. 3.6394916 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.19577444 0. ... 1.2381591 0. -2.9375348 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2726926 0. ... -0.584097 0. -1.1046387 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. -0.03214129 0. ... -1.0471772 0. -4.798639 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.4492205 0. ... -0.5979496 0. 2.2594247 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'same', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1023.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.08964}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ -4.408004 -2.8626785 9.864809 3.411536 -5.8606825 -6.1630425 -5.7111845 -6.4707084 -2.810422 1.8273454 ] [ 3.0916007 3.8773277 6.0929523 -8.762264 3.6905546 7.8104897 -5.6319847 6.165745 7.5906296 -0.5406546 ] [ -3.4165642 -5.9019265 -0.46920964 -1.6727613 -2.1981378 3.7580757 6.0893564 -13.416721 -2.0496953 -0.43663162] [ -9.25641 -16.387222 -7.110726 1.7252287 4.801715 3.4909265 1.5323907 7.524839 -0.04993804 0.8959408 ] [ -2.302378 -3.6035569 3.3439376 4.423674 10.02733 2.6091843 2.762278 -4.876086 -2.77141 1.7598604 ] [ 0.5745382 -5.117176 7.287287 0.30157068 -6.6235147 1.6395293 4.426442 3.9188275 0.6271362 2.1417274 ] [ 1.5170063 -6.668884 1.9475781 -5.7321124 -6.821445 -0.6398249 2.169983 7.8385396 -4.4903755 1.7185805 ] [ 0.42851377 4.786776 -5.800709 -4.347042 -0.60556084 1.7164699 -0.21563478 -6.7166314 2.9223707 2.2701824 ] [ -5.526319 -7.6704884 -0.7542115 -6.35651 -0.08667041 -8.996621 0.2529446 5.1182184 -0.31417173 -5.855486 ] [ -5.1908803 0.7855011 -4.2620068 -2.5556629 0.22688149 -4.1540804 -1.9883245 -2.1558573 -2.8523655 -2.0118442 ]]]]; ov_res: [[[[ -4.408004 -2.862678 9.864808 3.411535 -5.8606834 -6.163042 -5.7111845 -6.470708 -2.8104222 1.8273451 ] [ 3.0916007 3.8773282 6.092951 -8.762264 3.690554 7.8104897 -5.6319847 6.165744 7.5906296 -0.540655 ] [ -3.416565 -5.901927 -0.46920955 -1.6727607 -2.1981378 3.758076 6.0893564 -13.416722 -2.0496945 -0.43663174] [ -9.25641 -16.387222 -7.1107264 1.7252283 4.801715 3.490928 1.5323906 7.5248394 -0.04993773 0.89594007] [ -2.3023782 -3.6035564 3.3439362 4.4236746 10.02733 2.6091847 2.7622786 -4.876086 -2.7714097 1.7598603 ] [ 0.574538 -5.117176 7.2872877 0.30157065 -6.623514 1.639529 4.426442 3.9188268 0.627136 2.141728 ] [ 1.5170069 -6.6688843 1.9475775 -5.732113 -6.8214455 -0.63982415 2.1699827 7.83854 -4.490376 1.7185805 ] [ 0.42851388 4.7867756 -5.800709 -4.3470416 -0.60555995 1.71647 -0.21563417 -6.71663 2.9223702 2.2701824 ] [ -5.5263186 -7.6704865 -0.75421107 -6.35651 -0.0866704 -8.996621 0.25294495 5.1182184 -0.31417173 -5.855485 ] [ -5.190881 0.785501 -4.2620068 -2.5556626 0.22688174 -4.1540804 -1.9883244 -2.1558576 -2.852365 -2.0118442 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1025.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.417831}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ -4.1289816 9.372839 9.713086 0.685169 7.487055 6.091267 0.38953766 -5.4171457 ] [ 1.5939333 7.5395055 2.5722442 5.3850274 -2.0975702 2.896304 0.2239383 -11.159784 ] [ -1.8052531 5.269354 2.7402625 4.1177726 -0.5624338 2.3241823 -2.9218705 -8.236072 ] [ -1.02907 3.7944539 -1.796548 -0.2203029 0.36733752 -2.2264109 -0.20866312 -1.0803773 ] [ 5.411626 -1.3106415 -0.8068467 -6.4035053 1.9150245 -4.984507 2.8134582 -1.9962796 ] [ -0.2611743 -2.1177216 -6.778019 0.4536745 5.2212763 1.4095029 5.188278 2.5370903 ] [ 0.3629537 -5.3622704 -5.571534 -0.7130186 2.500493 1.3892472 -5.7015543 2.9582405 ] [ -5.709972 7.6760798 -0.21810605 2.168265 3.0536292 0.07725123 -1.9643315 -0.5104259 ]]]]; ov_res: [[[[ -4.1289816 9.372839 9.713086 0.68516886 7.4870553 6.091267 0.38953754 -5.417146 ] [ 1.5939337 7.5395055 2.5722442 5.3850265 -2.09757 2.8963044 0.22393815 -11.159782 ] [ -1.8052528 5.269354 2.7402632 4.1177726 -0.56243277 2.3241823 -2.9218705 -8.236073 ] [ -1.0290703 3.7944539 -1.7965479 -0.22030231 0.3673373 -2.2264106 -0.20866355 -1.0803769 ] [ 5.411626 -1.3106419 -0.8068464 -6.4035053 1.9150249 -4.9845076 2.8134587 -1.99628 ] [ -0.26117447 -2.117721 -6.778019 0.45367453 5.2212768 1.409503 5.1882772 2.5370905 ] [ 0.36295363 -5.362271 -5.571534 -0.71301866 2.500493 1.3892471 -5.7015533 2.9582403 ] [ -5.7099724 7.6760793 -0.21810493 2.1682656 3.0536292 0.07725105 -1.9643321 -0.51042557]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1027.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.922446}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ -4.6034164 1.754259 3.0913625 4.183604 -4.207744 -5.9318695 -9.501409 -5.849151 -0.15372975 0.16092727] [-10.179118 2.357965 -1.5017327 9.561914 -4.4439535 5.082945 8.294553 -5.1851306 7.23662 -8.306805 ] [ 0.67227393 0.06410836 -6.69018 -7.6123414 -8.060372 -7.4107413 6.9362006 -6.3597555 2.1064615 -2.5908642 ] [ -4.888758 1.3583165 0.79747975 -3.8545868 1.1877174 5.0302625 1.5081636 -3.9297957 -13.474885 2.4008393 ] [ -6.046813 -0.3893044 1.1980984 0.24996054 -2.1278043 -1.9594743 -4.515482 4.848262 -6.8974924 -4.035826 ] [ 0.4165359 -8.1109705 -3.307085 12.016965 0.15837023 -3.4110098 -7.794852 -11.588931 -3.0269728 -0.19726779] [ -3.3127043 3.4872735 -1.3668171 -8.006674 13.297123 -7.621924 10.279823 -4.6574855 2.382596 -3.7098477 ] [ -7.2149224 -6.5078077 -12.33798 -8.728156 -4.881344 5.175343 -6.8455405 5.208628 1.853674 1.8769726 ] [ -6.3361773 4.691792 -1.1860263 -12.3496895 -3.8739972 -4.4999475 -0.02042675 -4.0880795 -10.007452 -1.9157526 ] [ -6.6767917 3.906 -4.5338564 4.670928 1.6769148 -9.54137 5.4364777 0.42701563 5.797786 -11.819168 ]]]]; ov_res: [[[[ -4.603416 1.7542591 3.0913625 4.183604 -4.207744 -5.9318695 -9.501409 -5.8491507 -0.1537298 0.16092747] [-10.179117 2.3579648 -1.5017326 9.561914 -4.4439535 5.0829444 8.294554 -5.1851306 7.2366195 -8.306804 ] [ 0.67227393 0.06410819 -6.690181 -7.612343 -8.060372 -7.410742 6.9362 -6.3597565 2.1064615 -2.5908642 ] [ -4.888758 1.3583162 0.79747957 -3.8545864 1.1877177 5.030263 1.5081632 -3.929796 -13.474884 2.4008393 ] [ -6.0468135 -0.38930422 1.1980982 0.24996012 -2.1278038 -1.9594746 -4.515482 4.8482614 -6.8974934 -4.0358257 ] [ 0.4165358 -8.11097 -3.3070846 12.016966 0.15837032 -3.4110096 -7.7948523 -11.588929 -3.0269725 -0.19726807] [ -3.3127046 3.4872732 -1.3668169 -8.006674 13.297124 -7.621924 10.279824 -4.657485 2.382596 -3.7098477 ] [ -7.214923 -6.507808 -12.33798 -8.728156 -4.881344 5.1753426 -6.845541 5.208629 1.8536737 1.876972 ] [ -6.336177 4.6917925 -1.1860266 -12.349688 -3.8739972 -4.499947 -0.02042675 -4.0880795 -10.007451 -1.9157529 ] [ -6.676792 3.9059997 -4.533856 4.6709275 1.6769149 -9.5413685 5.436478 0.42701548 5.797786 -11.819168 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1029.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.328054}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 2.1588387 2.3759992 -1.3058703 -0.58829176 2.3150659 0.20659035] [ 7.152252 6.0291367 4.9489274 -2.4135432 4.880938 -2.8007545 ] [-1.4127232 -3.0664208 -3.7138455 -2.2695923 0.82532674 4.9265537 ] [-8.819876 -4.8374867 0.0140127 -2.717295 -5.71413 -2.7687922 ] [ 0.5364204 2.058317 2.4601946 3.1383016 -0.76186293 -1.411454 ] [ 2.2731967 -3.0219367 2.9534502 4.918137 1.4066815 5.3576446 ]]]]; ov_res: [[[[ 2.1588387 2.3759992 -1.30587 -0.5882918 2.3150654 0.20659027] [ 7.1522517 6.0291367 4.948927 -2.4135435 4.8809376 -2.8007545 ] [-1.4127228 -3.0664206 -3.7138457 -2.2695918 0.8253268 4.926554 ] [-8.819876 -4.837486 0.01401275 -2.7172954 -5.714129 -2.768792 ] [ 0.5364201 2.0583167 2.4601948 3.138302 -0.76186293 -1.4114542 ] [ 2.2731967 -3.021937 2.95345 4.9181366 1.4066815 5.3576446 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1031.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.4852 -0.5760 -0.1812 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.7214 (2,1,.,.) = -1.9099 (3,1,.,.) = -0.6550 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ 2.9153296e-01 -3.9243870e-04 7.2949044e-02 7.5303793e-02 5.2756792e-01 1.1170872e+00 -5.7978851e-01 -1.2452112e+00 2.9270014e-01 9.9969190e-01] [ 7.5320292e-01 6.6872275e-01 2.9509923e-01 5.9137619e-01 -9.1548139e-01 4.2313254e-01 -2.9977149e-01 -4.4243994e-01 1.2436135e+00 2.0323610e+00] [ 2.7014238e-01 -3.5460573e-01 -6.5731180e-01 2.2395122e+00 5.7028818e-01 -4.5355335e-01 2.6183644e-01 3.3473420e-01 1.1157389e+00 9.5591545e-01] [-5.5545479e-01 -3.3512452e-01 1.6701875e+00 -9.5392793e-01 1.9897759e-02 7.8032547e-01 -4.3878344e-01 8.8188171e-01 7.7932853e-01 -2.5698847e-01] [-5.6915408e-01 1.0914867e+00 1.2455913e+00 2.2288018e-01 8.2891810e-01 5.9981060e-01 2.3319821e-01 -6.5497720e-01 -2.5222126e-01 -5.3739834e-01] [-1.7497674e+00 8.5871696e-02 -7.0085400e-01 7.7513158e-01 6.1785752e-01 1.3938662e+00 8.5025102e-01 -1.0962752e-01 8.2353514e-01 2.0022523e+00] [ 3.0633968e-01 1.2927028e+00 1.8877872e+00 1.0162253e+00 1.0784994e+00 1.2349131e+00 1.0269146e+00 -2.5798890e-01 -1.2724187e+00 4.7217038e-01] [ 5.8165061e-01 8.8481426e-01 1.0768809e+00 2.4814335e-01 2.3443110e-01 9.5689744e-01 1.2913338e+00 -4.9622887e-01 -5.5685263e-02 1.1861149e+00] [ 1.4852319e+00 1.6975191e+00 1.0062262e+00 4.2543170e-01 1.2661504e+00 3.4930341e-02 -3.6682982e-02 1.9523597e-01 2.9106054e-01 9.4010001e-01] [ 1.7645789e+00 -2.7067184e-01 1.5029868e-01 -2.9213038e-01 -1.4742433e-01 -4.0369454e-01 7.6674467e-01 1.7923474e-01 -5.1616764e-01 1.3031033e-01]] [[ 6.5119761e-01 -3.3699713e+00 2.0956163e+00 -2.1525722e+00 4.6327534e-01 -9.0701270e-01 -2.0024269e+00 2.6980376e+00 -1.7654580e+00 3.1424724e-02] [-2.9565766e+00 -1.4954365e+00 -1.5265783e+00 1.2984660e+00 -3.4945462e+00 -5.3530803e+00 6.6699499e-01 -4.8238766e-01 -7.2757965e-01 -1.0760539e+00] [-2.0493557e+00 8.6431152e-01 1.1057500e+00 -6.2010825e-01 1.9803284e-01 -2.4445827e+00 5.1067239e-01 -5.1781111e+00 -5.9148848e-01 9.0980357e-01] [-1.2503411e+00 -2.1590896e+00 8.5348189e-02 -1.8542144e+00 -8.1278348e-01 -1.5651886e-01 -2.1387992e+00 4.1804650e-01 -2.5693829e+00 -2.1717472e+00] [ 5.4651803e-01 -1.2463133e+00 -1.5585084e+00 2.1228585e+00 -2.4256825e+00 -1.9877617e+00 3.5367029e+00 2.0100276e+00 -7.5142109e-01 -1.9347187e+00] [ 3.0466101e+00 -3.0087557e+00 5.5971509e-01 -2.4479787e+00 1.6079368e-01 4.4111347e+00 -1.8084624e+00 -3.1659484e-01 -7.0203948e-01 -2.0412705e+00] [-6.2273973e-01 -1.9318861e+00 5.7797658e-01 1.4279026e-01 2.2763226e+00 -4.2125392e+00 6.9353676e-01 -4.2065620e+00 -1.6359233e+00 -5.5309896e+00] [-2.0198083e-01 6.0178244e-01 -2.2399060e-01 -9.4247365e-01 -4.0595064e+00 -3.3798213e+00 6.8409510e-02 5.8243360e-02 6.9547147e-01 1.6658375e+00] [-1.0791749e+00 4.6261942e-01 -1.7529410e+00 -1.9441195e+00 -1.0412810e+00 -2.4634068e+00 2.2977981e-01 -1.7742823e-01 -1.0169809e+00 -1.1693945e+00] [-8.0643749e-01 -6.6594636e-01 1.3528385e+00 2.6337876e+00 -9.7362882e-01 3.9420099e+00 -2.9038137e-01 1.5569147e+00 -1.2087418e+00 -1.4760062e+00]] [[-5.7510018e-01 -1.5571716e+00 5.0280988e-02 -5.8523543e-02 -8.3338267e-01 3.7428674e-01 -2.0548469e-01 -5.4047060e-01 1.0539130e+00 -1.3234730e+00] [ 1.5481399e-02 -6.6672373e-01 -3.6211595e-01 -3.2530150e-01 -3.4284067e-01 -9.6793348e-01 -1.0821270e-01 -1.5986963e-01 3.1082636e-01 1.9366788e-02] [ 4.1065630e-01 -3.9263800e-01 -1.6992810e+00 -3.5940027e-01 -6.8668139e-01 -9.0952766e-01 5.9890258e-01 8.0228478e-01 -4.6423909e-01 6.9881076e-01] [-9.0088809e-01 1.3174541e-01 7.0091563e-01 8.5415691e-02 3.9465362e-01 -1.7311618e-01 4.0119141e-01 -5.5403376e-01 9.9644482e-01 1.1825643e+00] [ 7.6520711e-01 1.2997690e-01 -5.2942359e-01 1.0191677e+00 -3.2195944e-02 4.4510373e-01 2.1286282e-01 -3.0799240e-01 -3.5584837e-01 -8.2353097e-01] [ 7.7796543e-01 -1.5623956e+00 6.8415642e-01 6.8042433e-01 -6.0579818e-01 -2.1399158e-01 -8.2921225e-01 -5.5616733e-02 1.7070669e+00 -8.2359469e-01] [-9.0949237e-01 -2.0324308e-01 -5.7062441e-01 2.3824877e-01 -5.7309002e-01 -2.2289033e-01 2.7989832e-01 2.6210022e-01 -3.6425433e-01 5.7975101e-01] [ 5.7645481e-02 -3.4705713e-01 -3.5661995e-01 -6.5792724e-02 5.3814238e-01 -1.2908368e+00 -6.4459801e-01 -8.9329398e-01 -7.2811121e-01 -3.4121603e-01] [ 1.2339956e-01 -3.8488868e-01 -6.9865811e-01 -3.0567974e-01 -3.6888492e-01 -3.2522926e-01 4.4900846e-01 -6.6615701e-01 1.4862987e-01 3.0631208e-01] [-5.5214316e-01 -1.0775793e+00 5.7503921e-01 4.6711275e-01 -6.4565800e-02 3.8153071e-02 3.7039194e-01 -7.4440444e-01 6.6284269e-01 -2.8255391e-01]]]]; ov_res: [[[[ 2.9153296e-01 -3.9243870e-04 7.2949044e-02 7.5303793e-02 5.2756792e-01 1.1170872e+00 -5.7978851e-01 -1.2452112e+00 2.9270014e-01 9.9969190e-01] [ 7.5320292e-01 6.6872275e-01 2.9509923e-01 5.9137619e-01 -9.1548139e-01 4.2313254e-01 -2.9977149e-01 -4.4243994e-01 1.2436135e+00 2.0323610e+00] [ 2.7014238e-01 -3.5460573e-01 -6.5731180e-01 2.2395122e+00 5.7028818e-01 -4.5355335e-01 2.6183644e-01 3.3473420e-01 1.1157389e+00 9.5591545e-01] [-5.5545479e-01 -3.3512452e-01 1.6701875e+00 -9.5392793e-01 1.9897759e-02 7.8032547e-01 -4.3878344e-01 8.8188171e-01 7.7932853e-01 -2.5698847e-01] [-5.6915408e-01 1.0914867e+00 1.2455913e+00 2.2288018e-01 8.2891810e-01 5.9981060e-01 2.3319821e-01 -6.5497720e-01 -2.5222126e-01 -5.3739834e-01] [-1.7497674e+00 8.5871696e-02 -7.0085400e-01 7.7513158e-01 6.1785752e-01 1.3938662e+00 8.5025102e-01 -1.0962752e-01 8.2353514e-01 2.0022523e+00] [ 3.0633968e-01 1.2927028e+00 1.8877872e+00 1.0162253e+00 1.0784994e+00 1.2349131e+00 1.0269146e+00 -2.5798890e-01 -1.2724187e+00 4.7217038e-01] [ 5.8165061e-01 8.8481426e-01 1.0768809e+00 2.4814335e-01 2.3443110e-01 9.5689744e-01 1.2913338e+00 -4.9622887e-01 -5.5685263e-02 1.1861149e+00] [ 1.4852319e+00 1.6975191e+00 1.0062262e+00 4.2543170e-01 1.2661504e+00 3.4930341e-02 -3.6682982e-02 1.9523597e-01 2.9106054e-01 9.4010001e-01] [ 1.7645789e+00 -2.7067184e-01 1.5029868e-01 -2.9213038e-01 -1.4742433e-01 -4.0369454e-01 7.6674467e-01 1.7923474e-01 -5.1616764e-01 1.3031033e-01]] [[ 6.5119761e-01 -3.3699713e+00 2.0956163e+00 -2.1525722e+00 4.6327534e-01 -9.0701270e-01 -2.0024269e+00 2.6980376e+00 -1.7654580e+00 3.1424724e-02] [-2.9565766e+00 -1.4954365e+00 -1.5265783e+00 1.2984660e+00 -3.4945462e+00 -5.3530803e+00 6.6699499e-01 -4.8238766e-01 -7.2757965e-01 -1.0760539e+00] [-2.0493557e+00 8.6431152e-01 1.1057500e+00 -6.2010825e-01 1.9803284e-01 -2.4445827e+00 5.1067239e-01 -5.1781111e+00 -5.9148848e-01 9.0980357e-01] [-1.2503411e+00 -2.1590896e+00 8.5348189e-02 -1.8542144e+00 -8.1278348e-01 -1.5651886e-01 -2.1387992e+00 4.1804650e-01 -2.5693829e+00 -2.1717472e+00] [ 5.4651803e-01 -1.2463133e+00 -1.5585084e+00 2.1228585e+00 -2.4256825e+00 -1.9877617e+00 3.5367029e+00 2.0100276e+00 -7.5142109e-01 -1.9347187e+00] [ 3.0466101e+00 -3.0087557e+00 5.5971509e-01 -2.4479787e+00 1.6079368e-01 4.4111347e+00 -1.8084624e+00 -3.1659484e-01 -7.0203948e-01 -2.0412705e+00] [-6.2273973e-01 -1.9318861e+00 5.7797658e-01 1.4279026e-01 2.2763226e+00 -4.2125392e+00 6.9353676e-01 -4.2065620e+00 -1.6359233e+00 -5.5309896e+00] [-2.0198083e-01 6.0178244e-01 -2.2399060e-01 -9.4247365e-01 -4.0595064e+00 -3.3798213e+00 6.8409510e-02 5.8243360e-02 6.9547147e-01 1.6658375e+00] [-1.0791749e+00 4.6261942e-01 -1.7529410e+00 -1.9441195e+00 -1.0412810e+00 -2.4634068e+00 2.2977981e-01 -1.7742823e-01 -1.0169809e+00 -1.1693945e+00] [-8.0643749e-01 -6.6594636e-01 1.3528385e+00 2.6337876e+00 -9.7362882e-01 3.9420099e+00 -2.9038137e-01 1.5569147e+00 -1.2087418e+00 -1.4760062e+00]] [[-5.7510018e-01 -1.5571716e+00 5.0280988e-02 -5.8523543e-02 -8.3338267e-01 3.7428674e-01 -2.0548469e-01 -5.4047060e-01 1.0539130e+00 -1.3234730e+00] [ 1.5481399e-02 -6.6672373e-01 -3.6211595e-01 -3.2530150e-01 -3.4284067e-01 -9.6793348e-01 -1.0821270e-01 -1.5986963e-01 3.1082636e-01 1.9366788e-02] [ 4.1065630e-01 -3.9263800e-01 -1.6992810e+00 -3.5940027e-01 -6.8668139e-01 -9.0952766e-01 5.9890258e-01 8.0228478e-01 -4.6423909e-01 6.9881076e-01] [-9.0088809e-01 1.3174541e-01 7.0091563e-01 8.5415691e-02 3.9465362e-01 -1.7311618e-01 4.0119141e-01 -5.5403376e-01 9.9644482e-01 1.1825643e+00] [ 7.6520711e-01 1.2997690e-01 -5.2942359e-01 1.0191677e+00 -3.2195944e-02 4.4510373e-01 2.1286282e-01 -3.0799240e-01 -3.5584837e-01 -8.2353097e-01] [ 7.7796543e-01 -1.5623956e+00 6.8415642e-01 6.8042433e-01 -6.0579818e-01 -2.1399158e-01 -8.2921225e-01 -5.5616733e-02 1.7070669e+00 -8.2359469e-01] [-9.0949237e-01 -2.0324308e-01 -5.7062441e-01 2.3824877e-01 -5.7309002e-01 -2.2289033e-01 2.7989832e-01 2.6210022e-01 -3.6425433e-01 5.7975101e-01] [ 5.7645481e-02 -3.4705713e-01 -3.5661995e-01 -6.5792724e-02 5.3814238e-01 -1.2908368e+00 -6.4459801e-01 -8.9329398e-01 -7.2811121e-01 -3.4121603e-01] [ 1.2339956e-01 -3.8488868e-01 -6.9865811e-01 -3.0567974e-01 -3.6888492e-01 -3.2522926e-01 4.4900846e-01 -6.6615701e-01 1.4862987e-01 3.0631208e-01] [-5.5214316e-01 -1.0775793e+00 5.7503921e-01 4.6711275e-01 -6.4565800e-02 3.8153071e-02 3.7039194e-01 -7.4440444e-01 6.6284269e-01 -2.8255391e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1033.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=0.01 * 3.1990 -31.9272 -128.5293 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.1709 (2,1,.,.) = 0.6817 (3,1,.,.) = 0.4836 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[-9.03144628e-02 1.88674971e-01 -6.31392375e-02 4.01065797e-01 9.60418731e-02 -1.46617755e-01 7.88536519e-02 2.50350058e-01 1.62715510e-01 -2.75780559e-01] [-2.13175397e-02 1.17396362e-01 2.53935605e-01 -1.04613574e-02 -5.67355081e-02 -1.23563468e-01 -2.33435437e-01 -3.51933897e-01 6.79690987e-02 -7.79693574e-02] [ 2.96321690e-01 1.74202487e-01 3.98023650e-02 -2.18987405e-01 2.26956129e-01 1.49318740e-01 1.56317249e-01 3.87279958e-01 1.00729741e-01 -4.03987728e-02] [-9.74847302e-02 -2.33693019e-01 -1.83487728e-01 2.74158306e-02 -1.75813705e-01 1.27511188e-01 1.53912080e-03 2.02955499e-01 4.96755749e-01 3.45891356e-01] [ 1.48724273e-01 4.03790444e-01 3.53883021e-02 1.66182369e-01 4.19406444e-02 -2.57844687e-01 2.94335932e-01 3.84593457e-02 -3.70774776e-01 -4.44143675e-02] [ 2.06870511e-02 -2.31610492e-01 1.61321610e-01 3.08939666e-02 1.62972718e-01 -2.30957702e-01 1.51235342e-01 1.68415219e-01 -2.07647786e-01 9.48481634e-02] [ 1.70299143e-03 2.09076077e-01 1.55178696e-01 -6.24129325e-02 2.45039597e-01 -1.64219324e-04 -2.21150666e-01 -9.21646506e-03 1.41414478e-01 1.02271661e-01] [ 2.88995832e-01 9.15080979e-02 -1.22459017e-01 3.75147425e-02 7.36646503e-02 -7.20317587e-02 -2.50454471e-02 1.39333457e-01 1.15018040e-01 -2.57952437e-02] [ 2.09927008e-01 1.15282156e-01 1.55024782e-01 5.97986281e-02 -2.29813293e-01 -2.50289291e-01 -2.27706969e-01 -5.29011525e-02 8.55957493e-02 9.94487628e-02] [ 2.00900465e-01 -3.20299529e-02 1.54028088e-01 1.60824835e-01 -3.54700871e-02 -2.07357004e-01 -1.62679836e-01 -1.26970261e-01 -8.10713246e-02 -5.66773526e-02]] [[-1.17115402e+00 -8.30201149e-01 -4.08727199e-01 -8.35352004e-01 -1.50382280e+00 -3.17231923e-01 -1.41550824e-01 -1.92803860e+00 -3.57424170e-01 -1.30664694e+00] [ 1.77842811e-01 3.79141271e-02 9.32209671e-01 9.25276577e-02 3.60443801e-01 -1.23821759e+00 1.94559515e-01 -1.07616298e-01 -1.56076014e+00 1.01549196e+00] [ 6.83544755e-01 -7.57130444e-01 -2.75914460e-01 -2.12331980e-01 9.82902586e-01 -8.91041338e-01 -1.04673636e+00 8.48180771e-01 -6.71279311e-01 1.38636664e-01] [ 5.58861375e-01 6.60576522e-01 -5.91132462e-01 2.60787398e-01 1.00092866e-01 -1.07640302e+00 -1.22213922e-03 7.28536189e-01 1.67498261e-01 -5.92214584e-01] [ 2.72478282e-01 1.38416961e-01 -7.15619922e-01 -9.93023217e-01 -7.38451064e-01 2.91574955e-01 -7.11158514e-01 5.02569497e-01 -3.47820878e-01 4.90996897e-01] [-1.17353141e+00 -1.53404087e-01 -4.58606668e-02 -8.12949955e-01 -1.07020307e+00 8.79211947e-02 -1.29693687e-01 1.06162059e+00 -1.47422954e-01 -4.85423386e-01] [-1.91182709e+00 -5.85386217e-01 7.41166115e-01 -1.36565134e-01 -7.34094143e-01 2.16207579e-01 9.09417212e-01 1.08054841e+00 1.01434983e-01 -1.32778540e-01] [-7.96435297e-01 1.67803812e+00 3.94497693e-01 1.08959675e+00 -4.04089838e-01 5.54611802e-01 1.57970101e-01 -2.53845185e-01 1.56768119e+00 2.04813585e-01] [ 2.17155635e-01 -2.46079773e-01 -7.93172047e-02 -9.91591096e-01 4.63024765e-01 5.92694938e-01 4.03754681e-01 -9.49107528e-01 8.57788101e-02 1.03112710e+00] [ 2.89442688e-01 -7.64149964e-01 -2.70818353e-01 -2.05074415e-01 -1.24034619e+00 -6.78538024e-01 -1.82232559e-01 -2.03422976e+00 -1.86774373e+00 -7.60969818e-01]] [[-1.48536015e+00 -1.59490955e+00 -8.47779751e-01 -1.67103982e+00 -7.72233963e-01 -2.09424353e+00 -1.19572783e+00 -1.08297026e+00 -1.04125893e+00 -1.76808989e+00] [-1.38353336e+00 -7.59667039e-01 -1.90809453e+00 -1.91556776e+00 -1.92402232e+00 -1.37312555e+00 -1.42115390e+00 -1.32211435e+00 -1.09198940e+00 -9.39396918e-01] [-1.68337524e+00 -1.23824155e+00 -2.08959818e+00 -8.34047854e-01 -8.16103399e-01 -1.49109411e+00 -1.06208348e+00 -1.17043734e+00 -1.34895527e+00 -1.45474911e+00] [-1.55845547e+00 -1.38566744e+00 -1.26038563e+00 -1.33645880e+00 -9.97690856e-01 -2.75965071e+00 -6.68332458e-01 -9.47565138e-01 -1.33055830e+00 -7.43689239e-01] [-2.17986655e+00 -9.96462107e-01 -1.02813208e+00 -1.08236063e+00 -1.83566844e+00 -1.22854781e+00 -1.33395886e+00 -1.04732156e+00 -1.83314180e+00 -1.03375709e+00] [-1.42750978e+00 -6.87124610e-01 -8.92139137e-01 -2.29683566e+00 -1.56563127e+00 -1.02005553e+00 -9.36304271e-01 -1.56525159e+00 -1.37751126e+00 -7.74306118e-01] [-1.31051290e+00 -1.76845074e+00 -2.04715466e+00 -1.36559296e+00 -9.56850946e-01 -1.48994899e+00 -3.29148352e-01 -2.14871383e+00 -8.76067638e-01 -3.52357417e-01] [-1.84541419e-01 -9.85534370e-01 -1.00225091e+00 -1.16141808e+00 -1.35050082e+00 -1.02910483e+00 -1.40125442e+00 -1.23364401e+00 -7.32746065e-01 -1.32935262e+00] [-1.11904466e+00 -1.19609690e+00 -2.23908305e+00 -1.63337255e+00 -1.36546433e+00 -2.23971128e+00 -1.15663517e+00 -1.94339168e+00 -1.33530092e+00 -2.17231464e+00] [-1.47273898e+00 -1.33044326e+00 -1.42555594e+00 -2.03873539e+00 -1.17983413e+00 -2.19717979e+00 -1.28065240e+00 -9.34437573e-01 -4.79986489e-01 -9.71994936e-01]]]]; ov_res: [[[[-9.03144628e-02 1.88674971e-01 -6.31392375e-02 4.01065797e-01 9.60418731e-02 -1.46617755e-01 7.88536519e-02 2.50350058e-01 1.62715510e-01 -2.75780559e-01] [-2.13175397e-02 1.17396362e-01 2.53935605e-01 -1.04613574e-02 -5.67355081e-02 -1.23563468e-01 -2.33435437e-01 -3.51933897e-01 6.79690987e-02 -7.79693574e-02] [ 2.96321690e-01 1.74202487e-01 3.98023650e-02 -2.18987405e-01 2.26956129e-01 1.49318740e-01 1.56317249e-01 3.87279958e-01 1.00729741e-01 -4.03987728e-02] [-9.74847302e-02 -2.33693019e-01 -1.83487728e-01 2.74158306e-02 -1.75813705e-01 1.27511188e-01 1.53912080e-03 2.02955499e-01 4.96755749e-01 3.45891356e-01] [ 1.48724273e-01 4.03790444e-01 3.53883021e-02 1.66182369e-01 4.19406444e-02 -2.57844687e-01 2.94335932e-01 3.84593457e-02 -3.70774776e-01 -4.44143675e-02] [ 2.06870511e-02 -2.31610492e-01 1.61321610e-01 3.08939666e-02 1.62972718e-01 -2.30957702e-01 1.51235342e-01 1.68415219e-01 -2.07647786e-01 9.48481634e-02] [ 1.70299143e-03 2.09076077e-01 1.55178696e-01 -6.24129325e-02 2.45039597e-01 -1.64219324e-04 -2.21150666e-01 -9.21646506e-03 1.41414478e-01 1.02271661e-01] [ 2.88995832e-01 9.15080979e-02 -1.22459017e-01 3.75147425e-02 7.36646503e-02 -7.20317587e-02 -2.50454471e-02 1.39333457e-01 1.15018040e-01 -2.57952437e-02] [ 2.09927008e-01 1.15282156e-01 1.55024782e-01 5.97986281e-02 -2.29813293e-01 -2.50289291e-01 -2.27706969e-01 -5.29011525e-02 8.55957493e-02 9.94487628e-02] [ 2.00900465e-01 -3.20299529e-02 1.54028088e-01 1.60824835e-01 -3.54700871e-02 -2.07357004e-01 -1.62679836e-01 -1.26970261e-01 -8.10713246e-02 -5.66773526e-02]] [[-1.17115402e+00 -8.30201149e-01 -4.08727199e-01 -8.35352004e-01 -1.50382280e+00 -3.17231923e-01 -1.41550824e-01 -1.92803860e+00 -3.57424170e-01 -1.30664694e+00] [ 1.77842811e-01 3.79141271e-02 9.32209671e-01 9.25276577e-02 3.60443801e-01 -1.23821759e+00 1.94559515e-01 -1.07616298e-01 -1.56076014e+00 1.01549196e+00] [ 6.83544755e-01 -7.57130444e-01 -2.75914460e-01 -2.12331980e-01 9.82902586e-01 -8.91041338e-01 -1.04673636e+00 8.48180771e-01 -6.71279311e-01 1.38636664e-01] [ 5.58861375e-01 6.60576522e-01 -5.91132462e-01 2.60787398e-01 1.00092866e-01 -1.07640302e+00 -1.22213922e-03 7.28536189e-01 1.67498261e-01 -5.92214584e-01] [ 2.72478282e-01 1.38416961e-01 -7.15619922e-01 -9.93023217e-01 -7.38451064e-01 2.91574955e-01 -7.11158514e-01 5.02569497e-01 -3.47820878e-01 4.90996897e-01] [-1.17353141e+00 -1.53404087e-01 -4.58606668e-02 -8.12949955e-01 -1.07020307e+00 8.79211947e-02 -1.29693687e-01 1.06162059e+00 -1.47422954e-01 -4.85423386e-01] [-1.91182709e+00 -5.85386217e-01 7.41166115e-01 -1.36565134e-01 -7.34094143e-01 2.16207579e-01 9.09417212e-01 1.08054841e+00 1.01434983e-01 -1.32778540e-01] [-7.96435297e-01 1.67803812e+00 3.94497693e-01 1.08959675e+00 -4.04089838e-01 5.54611802e-01 1.57970101e-01 -2.53845185e-01 1.56768119e+00 2.04813585e-01] [ 2.17155635e-01 -2.46079773e-01 -7.93172047e-02 -9.91591096e-01 4.63024765e-01 5.92694938e-01 4.03754681e-01 -9.49107528e-01 8.57788101e-02 1.03112710e+00] [ 2.89442688e-01 -7.64149964e-01 -2.70818353e-01 -2.05074415e-01 -1.24034619e+00 -6.78538024e-01 -1.82232559e-01 -2.03422976e+00 -1.86774373e+00 -7.60969818e-01]] [[-1.48536015e+00 -1.59490955e+00 -8.47779751e-01 -1.67103982e+00 -7.72233963e-01 -2.09424353e+00 -1.19572783e+00 -1.08297026e+00 -1.04125893e+00 -1.76808989e+00] [-1.38353336e+00 -7.59667039e-01 -1.90809453e+00 -1.91556776e+00 -1.92402232e+00 -1.37312555e+00 -1.42115390e+00 -1.32211435e+00 -1.09198940e+00 -9.39396918e-01] [-1.68337524e+00 -1.23824155e+00 -2.08959818e+00 -8.34047854e-01 -8.16103399e-01 -1.49109411e+00 -1.06208348e+00 -1.17043734e+00 -1.34895527e+00 -1.45474911e+00] [-1.55845547e+00 -1.38566744e+00 -1.26038563e+00 -1.33645880e+00 -9.97690856e-01 -2.75965071e+00 -6.68332458e-01 -9.47565138e-01 -1.33055830e+00 -7.43689239e-01] [-2.17986655e+00 -9.96462107e-01 -1.02813208e+00 -1.08236063e+00 -1.83566844e+00 -1.22854781e+00 -1.33395886e+00 -1.04732156e+00 -1.83314180e+00 -1.03375709e+00] [-1.42750978e+00 -6.87124610e-01 -8.92139137e-01 -2.29683566e+00 -1.56563127e+00 -1.02005553e+00 -9.36304271e-01 -1.56525159e+00 -1.37751126e+00 -7.74306118e-01] [-1.31051290e+00 -1.76845074e+00 -2.04715466e+00 -1.36559296e+00 -9.56850946e-01 -1.48994899e+00 -3.29148352e-01 -2.14871383e+00 -8.76067638e-01 -3.52357417e-01] [-1.84541419e-01 -9.85534370e-01 -1.00225091e+00 -1.16141808e+00 -1.35050082e+00 -1.02910483e+00 -1.40125442e+00 -1.23364401e+00 -7.32746065e-01 -1.32935262e+00] [-1.11904466e+00 -1.19609690e+00 -2.23908305e+00 -1.63337255e+00 -1.36546433e+00 -2.23971128e+00 -1.15663517e+00 -1.94339168e+00 -1.33530092e+00 -2.17231464e+00] [-1.47273898e+00 -1.33044326e+00 -1.42555594e+00 -2.03873539e+00 -1.17983413e+00 -2.19717979e+00 -1.28065240e+00 -9.34437573e-01 -4.79986489e-01 -9.71994936e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 2], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1035.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.44786}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 0.7031823 1.6257728 -2.351391 -12.738618 ] [ -3.45958 -4.106828 -2.240238 5.8506103] [ -3.2203958 -3.1479995 5.76254 5.321079 ] [ -5.3072777 0.6143483 1.1204512 -5.184649 ]]]]; ov_res: [[[[ 0.703182 1.6257732 -2.3513908 -12.738619 ] [ -3.459581 -4.1068287 -2.2402377 5.8506107] [ -3.2203956 -3.1479993 5.76254 5.3210793] [ -5.3072786 0.6143482 1.1204503 -5.1846504]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 2], 'pads': 'valid', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1037.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.364499}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ 7.05959 -2.2110863 -4.477718 ] [ -2.085908 -1.06254 -1.7613343] [ -9.748293 4.446623 -13.264878 ]]]]; ov_res: [[[[ 7.059591 -2.2110872 -4.477717 ] [ -2.0859075 -1.0625398 -1.7613342] [ -9.748294 4.446623 -13.264879 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1039.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.29364}]() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 7.2381864 8.350644 -0.54239374 1.9652562 -7.6392684 6.5461054 -6.8223066 14.921321 ] [-1.1358371 9.587426 3.1614957 3.6404207 7.830261 -0.07938033 6.92167 -9.236364 ] [-0.1424703 -1.90428 9.895478 11.364407 -2.526305 5.146326 -9.607113 -2.506911 ] [ 5.828493 -3.5366433 2.0750558 -0.6312051 -2.682517 1.7430185 -2.2109253 4.860487 ]]]]; ov_res: [[[[ 7.2381883 8.350644 -0.54239285 1.9652562 -7.6392684 6.5461054 -6.822307 14.921322 ] [-1.135837 9.587427 3.1614952 3.6404204 7.830262 -0.07938087 6.9216704 -9.236365 ] [-0.14247036 -1.9042803 9.895478 11.364406 -2.5263057 5.146326 -9.607113 -2.5069108 ] [ 5.828493 -3.536643 2.0750563 -0.63120425 -2.6825166 1.743018 -2.210925 4.860487 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1041.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.2927 0.5125 0.2223 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.9168 (2,1,.,.) = 1.1034 (3,1,.,.) = 0.8015 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Cfw_re: [[[[ 1.7081404 1.9586234 0.97986406 3.6337233 1.112854 ] [ 2.4144735 2.1021428 1.6227516 2.3353975 1.8808943 ] [ 2.2307446 0.0563421 1.6816239 1.5003141 1.4607549 ] [ 3.0289016 3.542155 0.13541174 0.42599535 1.3864731 ] [ 2.7745028 1.3738307 2.212925 1.7655702 1.3215667 ]] [[ 0.71602935 -0.2334004 0.7601148 0.31592837 -0.8127363 ] [ 1.643318 1.208873 1.4176502 1.7749035 -1.2365874 ] [-0.6842732 -0.44714868 0.7838623 0.17523071 3.575387 ] [-0.5753195 0.27385 1.814236 0.8873854 -0.78634006] [ 1.3632718 2.0718641 0.00979766 2.161359 0.79200023]] [[ 0.6835795 -0.1319033 -0.6064796 -1.0985619 0.66190237] [ 0.74389166 0.70674646 -0.667102 -0.3540463 0.46924213] [ 1.281568 0.6766597 -0.49658945 1.7978494 0.0747069 ] [-0.32831255 0.8449832 0.5182969 0.77982575 0.90059525] [-0.5452627 0.7589926 0.9300408 -0.62085515 -0.81864905]]]]; ov_res: [[[[ 1.7081404 1.9586234 0.97986406 3.6337233 1.112854 ] [ 2.4144735 2.1021428 1.6227516 2.3353975 1.8808943 ] [ 2.2307446 0.0563421 1.6816239 1.5003141 1.4607549 ] [ 3.0289016 3.542155 0.13541174 0.42599535 1.3864731 ] [ 2.7745028 1.3738307 2.212925 1.7655702 1.3215667 ]] [[ 0.71602935 -0.2334004 0.7601148 0.31592837 -0.8127363 ] [ 1.643318 1.208873 1.4176502 1.7749035 -1.2365874 ] [-0.6842732 -0.44714868 0.7838623 0.17523071 3.575387 ] [-0.5753195 0.27385 1.814236 0.8873854 -0.78634006] [ 1.3632718 2.0718641 0.00979766 2.161359 0.79200023]] [[ 0.6835795 -0.1319033 -0.6064796 -1.0985619 0.66190237] [ 0.74389166 0.70674646 -0.667102 -0.3540463 0.46924213] [ 1.281568 0.6766597 -0.49658945 1.7978494 0.0747069 ] [-0.32831255 0.8449832 0.5182969 0.77982575 0.90059525] [-0.5452627 0.7589926 0.9300408 -0.62085515 -0.81864905]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'pads': 'valid', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1043.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2152 -0.2578 -1.3152 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.4454 (2,1,.,.) = -0.1770 (3,1,.,.) = -0.2139 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[-0.4988192 -0.4242256 -0.615807 -0.31438085 -1.1165962 ] [ 0.13454404 -0.16944146 0.2426717 0.49236894 -0.05804456] [ 0.18032093 0.12176719 0.42186046 0.25153598 -0.07043032] [-0.16703983 -0.7778844 -0.58415735 0.12084478 -0.02826692] [-0.478513 -0.2771432 0.05727718 -0.87579083 -0.13293402]] [[-0.03988115 -0.42648175 -0.5230434 -0.15385883 -0.17804407] [-0.30913484 -0.34573582 -0.34111786 -0.12203264 -0.11186311] [-0.20732044 -0.2159083 -0.34967732 -0.27248982 -0.28888988] [-0.09530863 -0.16041534 -0.41339454 -0.35167724 0.07163011] [-0.46954948 -0.32834214 -0.43264768 -0.41012627 -0.25205827]] [[-1.4749322 -1.3830254 -0.93253475 -1.4141222 -1.3858839 ] [-1.5802633 -1.0951254 -1.2081033 -1.8815281 -1.1546906 ] [-1.7466155 -1.0163587 -1.1895438 -1.2105302 -0.9127013 ] [-1.2613599 -1.2990611 -1.3216648 -1.0835657 -1.5564622 ] [-1.3396435 -1.3146989 -1.3161596 -1.6917185 -1.0784794 ]]]]; ov_res: [[[[-0.4988192 -0.4242256 -0.615807 -0.31438085 -1.1165962 ] [ 0.13454404 -0.16944146 0.2426717 0.49236894 -0.05804456] [ 0.18032093 0.12176719 0.42186046 0.25153598 -0.07043032] [-0.16703983 -0.7778844 -0.58415735 0.12084478 -0.02826692] [-0.478513 -0.2771432 0.05727718 -0.87579083 -0.13293402]] [[-0.03988115 -0.42648175 -0.5230434 -0.15385883 -0.17804407] [-0.30913484 -0.34573582 -0.34111786 -0.12203264 -0.11186311] [-0.20732044 -0.2159083 -0.34967732 -0.27248982 -0.28888988] [-0.09530863 -0.16041534 -0.41339454 -0.35167724 0.07163011] [-0.46954948 -0.32834214 -0.43264768 -0.41012627 -0.25205827]] [[-1.4749322 -1.3830254 -0.93253475 -1.4141222 -1.3858839 ] [-1.5802633 -1.0951254 -1.2081033 -1.8815281 -1.1546906 ] [-1.7466155 -1.0163587 -1.1895438 -1.2105302 -0.9127013 ] [-1.2613599 -1.2990611 -1.3216648 -1.0835657 -1.5564622 ] [-1.3396435 -1.3146989 -1.3161596 -1.6917185 -1.0784794 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1045.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3888 -0.3932 0.2233 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -2.8008 (2,1,.,.) = 0.7104 (3,1,.,.) = 1.0943 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 2.9613914e+00 2.2726736e+00 -3.8194592e+00 8.5117346e-01 -2.7593944e+00 -4.3487139e+00 -8.6651403e-01 9.0707403e-01 5.9932928e+00 2.7674621e-02] [-7.7394992e-01 -1.0406668e+00 -1.3074181e+00 -6.0408888e+00 -2.7834344e+00 -7.0235658e+00 1.1622843e+00 -4.4078417e+00 -2.7087300e+00 -9.0871841e-01] [ 4.3185487e+00 -4.8066211e+00 -2.0203955e+00 -1.5381870e+00 4.0233231e+00 1.9031999e+00 1.1759952e+00 9.7975445e-01 -1.7059184e+00 -9.3426418e-01] [-5.5412489e-01 2.7263932e+00 -4.7081810e-01 -2.5930026e+00 -9.4671436e-02 5.0873733e+00 -5.0610905e+00 7.8339034e-01 -5.1972513e+00 -5.6742544e+00] [ 7.4237734e-01 -1.6927393e-01 5.5465150e+00 3.4829035e+00 -4.0618691e+00 -1.3501283e+00 3.7899027e+00 -2.8864987e+00 -1.6763743e+00 -1.9813948e+00]] [[-1.5145154e+00 1.1620160e-01 -1.3492143e+00 -7.8746450e-01 -3.7451389e-01 -1.2457072e+00 -3.1123742e-01 -1.9992411e+00 9.7481921e-02 -1.2953298e+00] [-1.3893516e+00 -9.8920453e-01 -1.2977087e-01 -4.4751710e-01 5.0270069e-01 -2.5048318e+00 -1.2399420e-01 1.9717236e-01 -1.0962579e+00 -9.4145632e-01] [-8.8552082e-01 -1.9343724e-02 -3.3269912e-01 -1.4125969e+00 -2.1607528e+00 1.7662913e-01 2.4418145e-02 -5.3211844e-01 -2.9730123e-01 -1.0849295e+00] [-1.4898576e-01 -1.5055014e+00 5.0723523e-01 -5.6987607e-01 9.5330678e-02 3.9255258e-01 -9.6990871e-01 -4.2929837e-01 -9.0773362e-01 -1.4304508e+00] [ 3.1746179e-01 4.9590570e-01 4.3931445e-01 -1.1474042e+00 -7.8883827e-02 -2.0215103e-01 -4.6764329e-01 -3.7921897e-01 -2.4232445e-03 -9.6671408e-01]] [[ 3.7938622e-01 -2.3215751e-01 -6.5065473e-02 4.0162474e-01 -1.0017280e-01 1.6470050e+00 1.6932976e-01 1.0008867e+00 1.7587749e+00 7.6061743e-01] [ 1.9559366e-01 1.5524646e+00 -6.2780634e-02 2.5646050e-02 8.3291620e-01 2.8148797e-01 1.5175656e+00 8.7201571e-01 -4.8801452e-02 3.5851857e-01] [ 8.9865470e-01 -2.0002742e+00 2.4844150e-01 3.4236065e-01 1.6528584e+00 -1.3265387e+00 8.3390521e-03 -1.7807829e+00 2.3163941e+00 1.0462557e+00] [ 7.4685562e-01 -4.9700099e-01 2.0167975e+00 -1.3877599e+00 -3.0650192e-01 -9.1263831e-01 7.2999007e-01 3.7346342e-01 -7.2870719e-01 6.6942585e-01] [-1.4448172e+00 -3.8486397e-01 1.9772549e+00 -1.8641502e+00 6.2588322e-01 3.6312442e+00 -3.5378993e-01 -8.1205028e-01 1.0840385e+00 -2.6489680e+00]]]]; ov_res: [[[[ 2.9613914e+00 2.2726736e+00 -3.8194592e+00 8.5117346e-01 -2.7593944e+00 -4.3487139e+00 -8.6651403e-01 9.0707403e-01 5.9932928e+00 2.7674621e-02] [-7.7394992e-01 -1.0406668e+00 -1.3074181e+00 -6.0408888e+00 -2.7834344e+00 -7.0235658e+00 1.1622843e+00 -4.4078417e+00 -2.7087300e+00 -9.0871841e-01] [ 4.3185487e+00 -4.8066211e+00 -2.0203955e+00 -1.5381870e+00 4.0233231e+00 1.9031999e+00 1.1759952e+00 9.7975445e-01 -1.7059184e+00 -9.3426418e-01] [-5.5412489e-01 2.7263932e+00 -4.7081810e-01 -2.5930026e+00 -9.4671436e-02 5.0873733e+00 -5.0610905e+00 7.8339034e-01 -5.1972513e+00 -5.6742544e+00] [ 7.4237734e-01 -1.6927393e-01 5.5465150e+00 3.4829035e+00 -4.0618691e+00 -1.3501283e+00 3.7899027e+00 -2.8864987e+00 -1.6763743e+00 -1.9813948e+00]] [[-1.5145154e+00 1.1620160e-01 -1.3492143e+00 -7.8746450e-01 -3.7451389e-01 -1.2457072e+00 -3.1123742e-01 -1.9992411e+00 9.7481921e-02 -1.2953298e+00] [-1.3893516e+00 -9.8920453e-01 -1.2977087e-01 -4.4751710e-01 5.0270069e-01 -2.5048318e+00 -1.2399420e-01 1.9717236e-01 -1.0962579e+00 -9.4145632e-01] [-8.8552082e-01 -1.9343724e-02 -3.3269912e-01 -1.4125969e+00 -2.1607528e+00 1.7662913e-01 2.4418145e-02 -5.3211844e-01 -2.9730123e-01 -1.0849295e+00] [-1.4898576e-01 -1.5055014e+00 5.0723523e-01 -5.6987607e-01 9.5330678e-02 3.9255258e-01 -9.6990871e-01 -4.2929837e-01 -9.0773362e-01 -1.4304508e+00] [ 3.1746179e-01 4.9590570e-01 4.3931445e-01 -1.1474042e+00 -7.8883827e-02 -2.0215103e-01 -4.6764329e-01 -3.7921897e-01 -2.4232445e-03 -9.6671408e-01]] [[ 3.7938622e-01 -2.3215751e-01 -6.5065473e-02 4.0162474e-01 -1.0017280e-01 1.6470050e+00 1.6932976e-01 1.0008867e+00 1.7587749e+00 7.6061743e-01] [ 1.9559366e-01 1.5524646e+00 -6.2780634e-02 2.5646050e-02 8.3291620e-01 2.8148797e-01 1.5175656e+00 8.7201571e-01 -4.8801452e-02 3.5851857e-01] [ 8.9865470e-01 -2.0002742e+00 2.4844150e-01 3.4236065e-01 1.6528584e+00 -1.3265387e+00 8.3390521e-03 -1.7807829e+00 2.3163941e+00 1.0462557e+00] [ 7.4685562e-01 -4.9700099e-01 2.0167975e+00 -1.3877599e+00 -3.0650192e-01 -9.1263831e-01 7.2999007e-01 3.7346342e-01 -7.2870719e-01 6.6942585e-01] [-1.4448172e+00 -3.8486397e-01 1.9772549e+00 -1.8641502e+00 6.2588322e-01 3.6312442e+00 -3.5378993e-01 -8.1205028e-01 1.0840385e+00 -2.6489680e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1047.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.9081 1.5749 -1.1439 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2846 (2,1,.,.) = 0.4585 (3,1,.,.) = -1.0486 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[-1.9516189 -1.8748871 -2.1557422 -1.9437044 -2.0240862 -1.8470889 -1.805448 -1.609175 -2.4587593 -1.623442 ] [-1.5499176 -1.7822655 -1.8257438 -2.0109916 -1.7467823 -1.8179876 -2.1085901 -1.8027872 -1.8530916 -1.8757025 ] [-1.7266904 -1.6444302 -2.1431022 -2.1996233 -1.6988422 -1.5389287 -1.9383855 -1.6389228 -2.176039 -2.1448243 ] [-2.3029783 -1.9252715 -1.9791914 -2.2558873 -2.2499187 -1.9432065 -2.1178906 -2.200544 -2.22065 -2.0199718 ] [-2.2331297 -1.8920428 -1.684115 -1.6353784 -2.530411 -2.4124348 -1.9049083 -1.9817635 -1.689076 -1.7670895 ] [-2.2519252 -1.2836546 -1.4914973 -2.2078686 -2.1396184 -2.0294478 -1.7887762 -1.2630361 -2.1429806 -1.792679 ] [-1.6066053 -1.9626933 -1.5837358 -1.870979 -2.1369503 -1.4729465 -1.7556355 -2.054014 -2.337736 -1.7901596 ] [-1.484293 -2.2652094 -1.8688431 -1.8089422 -2.19877 -2.293814 -2.1540136 -1.6047299 -1.9309922 -2.0832603 ] [-1.6454057 -1.7365066 -2.3392777 -1.7369808 -1.6982774 -1.7677844 -2.4012213 -2.0324986 -2.177115 -1.7404486 ] [-1.6401124 -1.6638827 -1.7026414 -1.9295484 -1.7580276 -1.6900803 -1.8923897 -1.7442777 -2.1354928 -1.9573176 ]] [[ 1.2891799 1.814055 1.183452 1.5879067 1.4771979 1.5010846 0.9772461 1.4862074 1.6488295 1.4099679 ] [ 2.5744555 1.4164876 1.1324494 2.4145799 1.7360629 1.6015863 1.9402676 1.0660046 2.213063 1.7902964 ] [ 2.4134638 1.3785864 1.3008393 1.008929 1.0071745 1.3506652 1.2984779 1.028079 1.136403 1.2074404 ] [ 1.2222588 1.9934933 1.4883577 1.8969676 1.6150218 1.6027604 1.7632594 1.6366457 0.5231822 1.7506627 ] [ 1.4095864 2.0619233 1.2562498 1.9927818 0.26254553 1.4238625 1.2716982 1.5206767 2.0603216 1.3610964 ] [ 1.9046345 1.0304484 1.3546426 1.9839327 1.1157864 1.3504117 1.4274491 1.3471241 1.5672208 1.4355518 ] [ 2.5736954 1.6918733 1.1096029 2.0681403 2.4966743 2.0673807 1.500471 1.6895449 0.8539476 2.1748147 ] [ 1.0521445 2.0571957 1.7421896 1.3897414 2.275936 1.6472691 1.8370894 1.6075417 1.4481739 0.9004135 ] [ 2.089698 1.4946754 1.7894568 1.3285576 1.1106694 0.53175884 1.6702428 1.7760556 2.0986536 2.271006 ] [ 2.5986392 1.2821989 1.2345129 1.4302571 2.3820941 1.5772781 0.9052115 0.96181023 1.4849813 1.6146069 ]] [[-2.4946787 -1.6425251 -0.1874285 -2.6086493 -2.2150345 0.10080195 -0.6112302 -1.5545505 -0.8785793 -0.24249426] [-1.6346526 -1.6020651 -1.2572893 0.24196896 -0.76003176 -2.5897417 -0.48059756 -0.45175424 -1.8170075 -2.5816698 ] [-1.8631223 -5.0692606 -1.7537278 -1.0912251 -1.154485 1.4170204 -1.4322628 0.4573428 -1.2913442 -1.6651672 ] [-0.31399584 -0.65337676 -2.0034332 -0.513056 -3.3274145 -2.588097 -2.0955632 -1.3481772 -0.746664 0.59745276] [-0.87051255 -1.0249312 0.31408837 -2.5855443 -2.6712184 -2.4840863 -0.3284725 -2.6951175 -3.059017 -0.24590452] [-2.616301 -1.3642083 -2.3690586 -0.4042498 1.5626168 0.07722961 -1.4916153 -2.440919 -1.9397142 -1.0103451 ] [-1.3389566 -3.5132031 -0.1133483 -1.1628265 -1.78918 -0.6527704 -1.4216125 -1.4852095 -1.2783729 -1.4020622 ] [-1.6177309 -1.360348 -0.06682833 -0.52998954 -1.2775337 -1.3549205 -2.001861 -1.0209016 0.62509155 -0.01772049] [-2.4342709 -0.04753882 0.7040024 -1.4560474 -2.6424725 -0.81225544 -1.7372413 0.13908695 -1.437011 -0.4598434 ] [-0.4760613 -1.4719555 -2.4504972 -1.0680839 -2.2662294 -3.324716 -2.8257577 -2.8070786 -1.853198 -2.9180646 ]]]]; ov_res: [[[[-1.9516189 -1.8748871 -2.1557422 -1.9437044 -2.0240862 -1.8470889 -1.805448 -1.609175 -2.4587593 -1.623442 ] [-1.5499176 -1.7822655 -1.8257438 -2.0109916 -1.7467823 -1.8179876 -2.1085901 -1.8027872 -1.8530916 -1.8757025 ] [-1.7266904 -1.6444302 -2.1431022 -2.1996233 -1.6988422 -1.5389287 -1.9383855 -1.6389228 -2.176039 -2.1448243 ] [-2.3029783 -1.9252715 -1.9791914 -2.2558873 -2.2499187 -1.9432065 -2.1178906 -2.200544 -2.22065 -2.0199718 ] [-2.2331297 -1.8920428 -1.684115 -1.6353784 -2.530411 -2.4124348 -1.9049083 -1.9817635 -1.689076 -1.7670895 ] [-2.2519252 -1.2836546 -1.4914973 -2.2078686 -2.1396184 -2.0294478 -1.7887762 -1.2630361 -2.1429806 -1.792679 ] [-1.6066053 -1.9626933 -1.5837358 -1.870979 -2.1369503 -1.4729465 -1.7556355 -2.054014 -2.337736 -1.7901596 ] [-1.484293 -2.2652094 -1.8688431 -1.8089422 -2.19877 -2.293814 -2.1540136 -1.6047299 -1.9309922 -2.0832603 ] [-1.6454057 -1.7365066 -2.3392777 -1.7369808 -1.6982774 -1.7677844 -2.4012213 -2.0324986 -2.177115 -1.7404486 ] [-1.6401124 -1.6638827 -1.7026414 -1.9295484 -1.7580276 -1.6900803 -1.8923897 -1.7442777 -2.1354928 -1.9573176 ]] [[ 1.2891799 1.814055 1.183452 1.5879067 1.4771979 1.5010846 0.9772461 1.4862074 1.6488295 1.4099679 ] [ 2.5744555 1.4164876 1.1324494 2.4145799 1.7360629 1.6015863 1.9402676 1.0660046 2.213063 1.7902964 ] [ 2.4134638 1.3785864 1.3008393 1.008929 1.0071745 1.3506652 1.2984779 1.028079 1.136403 1.2074404 ] [ 1.2222588 1.9934933 1.4883577 1.8969676 1.6150218 1.6027604 1.7632594 1.6366457 0.5231822 1.7506627 ] [ 1.4095864 2.0619233 1.2562498 1.9927818 0.26254553 1.4238625 1.2716982 1.5206767 2.0603216 1.3610964 ] [ 1.9046345 1.0304484 1.3546426 1.9839327 1.1157864 1.3504117 1.4274491 1.3471241 1.5672208 1.4355518 ] [ 2.5736954 1.6918733 1.1096029 2.0681403 2.4966743 2.0673807 1.500471 1.6895449 0.8539476 2.1748147 ] [ 1.0521445 2.0571957 1.7421896 1.3897414 2.275936 1.6472691 1.8370894 1.6075417 1.4481739 0.9004135 ] [ 2.089698 1.4946754 1.7894568 1.3285576 1.1106694 0.53175884 1.6702428 1.7760556 2.0986536 2.271006 ] [ 2.5986392 1.2821989 1.2345129 1.4302571 2.3820941 1.5772781 0.9052115 0.96181023 1.4849813 1.6146069 ]] [[-2.4946787 -1.6425251 -0.1874285 -2.6086493 -2.2150345 0.10080195 -0.6112302 -1.5545505 -0.8785793 -0.24249426] [-1.6346526 -1.6020651 -1.2572893 0.24196896 -0.76003176 -2.5897417 -0.48059756 -0.45175424 -1.8170075 -2.5816698 ] [-1.8631223 -5.0692606 -1.7537278 -1.0912251 -1.154485 1.4170204 -1.4322628 0.4573428 -1.2913442 -1.6651672 ] [-0.31399584 -0.65337676 -2.0034332 -0.513056 -3.3274145 -2.588097 -2.0955632 -1.3481772 -0.746664 0.59745276] [-0.87051255 -1.0249312 0.31408837 -2.5855443 -2.6712184 -2.4840863 -0.3284725 -2.6951175 -3.059017 -0.24590452] [-2.616301 -1.3642083 -2.3690586 -0.4042498 1.5626168 0.07722961 -1.4916153 -2.440919 -1.9397142 -1.0103451 ] [-1.3389566 -3.5132031 -0.1133483 -1.1628265 -1.78918 -0.6527704 -1.4216125 -1.4852095 -1.2783729 -1.4020622 ] [-1.6177309 -1.360348 -0.06682833 -0.52998954 -1.2775337 -1.3549205 -2.001861 -1.0209016 0.62509155 -0.01772049] [-2.4342709 -0.04753882 0.7040024 -1.4560474 -2.6424725 -0.81225544 -1.7372413 0.13908695 -1.437011 -0.4598434 ] [-0.4760613 -1.4719555 -2.4504972 -1.0680839 -2.2662294 -3.324716 -2.8257577 -2.8070786 -1.853198 -2.9180646 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1049.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.6616 -0.9999 -0.2176 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.7295 (2,1,.,.) = 1.0989 (3,1,.,.) = -2.0639 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 1.56313658e+00 1.85669586e-01 1.05473721e+00 1.40552866e+00 4.06649476e-03 -1.00167763e+00 8.97695184e-01 -1.75364271e-01 9.05775487e-01 1.94871223e+00] [ 8.33345115e-01 1.17470455e+00 6.43962681e-01 1.41665673e+00 1.32367384e+00 1.03562927e+00 9.42983091e-01 3.58083397e-01 4.31286365e-01 1.54613721e+00] [-1.96958154e-01 1.49835289e+00 -3.69054884e-01 -3.93622339e-01 -8.46268423e-03 1.28306401e+00 1.38734031e+00 -2.42636040e-01 2.93051839e-01 6.78952634e-01] [ 1.06318140e+00 8.10624421e-01 -3.45179856e-01 -1.27634418e+00 3.02082926e-01 -8.50019604e-02 -3.93337965e-01 3.15083176e-01 2.01822773e-01 3.45986605e-01] [ 2.52652884e-01 1.74523443e-01 7.87949204e-01 1.22817433e+00 4.51199472e-01 9.84343469e-01 -2.67287977e-02 1.48083115e+00 7.66715944e-01 6.08989716e-01] [ 4.11826409e-02 2.69405752e-01 1.50873184e+00 9.55788732e-01 -7.80474484e-01 1.06299901e+00 8.03905845e-01 7.74599969e-01 1.34301281e+00 -2.93258280e-01] [-1.89799175e-01 -5.26831686e-01 -1.13759196e+00 9.62370157e-01 3.85939367e-02 -6.81562543e-01 1.30053008e+00 7.20387220e-01 1.59961784e+00 -3.71437930e-02] [ 7.10236967e-01 1.33842015e+00 1.83497921e-01 5.53763986e-01 1.04550648e+00 9.51925874e-01 1.31128430e+00 2.10802221e+00 1.48605093e-01 -1.12540364e-01] [ 1.14868891e+00 1.09300172e+00 9.19894934e-01 9.62154984e-01 8.53865802e-01 8.97460461e-01 -8.19518864e-01 8.06948006e-01 9.47253942e-01 -2.74773538e-01] [ 7.09476769e-01 7.32726276e-01 4.43433374e-01 -1.11195631e-03 2.17889905e+00 4.56297129e-01 -1.50849700e-01 9.98813748e-01 6.50576234e-01 1.84734833e+00]] [[-1.68250418e+00 -1.02541581e-01 -1.24751568e+00 -3.32381785e-01 -1.71106720e+00 -1.14468291e-01 2.13949665e-01 -1.10707057e+00 -7.41403818e-01 7.72514641e-01] [ 1.25636801e-01 7.03538135e-02 -1.92411923e+00 -7.86602616e-01 2.86123931e-01 2.30894375e+00 -1.77691793e+00 2.19294820e-02 -1.09681034e+00 -8.02487016e-01] [-1.06289399e+00 -3.98570323e+00 -3.58512670e-01 -1.36382246e+00 3.28629881e-01 1.93104959e+00 -1.52400517e+00 6.84707403e-01 -2.46254027e-01 -1.77616096e+00] [-3.36456776e-01 -2.12930918e+00 -1.33358407e+00 -1.27926826e+00 -8.76259387e-01 -1.71587753e+00 -6.00192308e-01 -6.33426309e-01 5.25408745e-01 -2.21390152e+00] [-1.77677572e+00 -1.22348452e+00 6.03418708e-01 -1.58242548e+00 5.02408922e-01 -4.23195988e-01 -4.76126403e-01 -7.42027700e-01 -5.77106714e-01 -1.38633633e+00] [-1.25149655e+00 -2.27518964e+00 -2.65600234e-02 -9.34095442e-01 1.21179366e+00 -8.88045251e-01 -6.03027225e-01 -1.80470213e-01 -1.94629455e+00 -1.21130502e+00] [-8.84873986e-01 -9.78630304e-01 3.90587956e-01 -8.73209417e-01 -1.85904062e+00 -2.86699867e+00 -3.28567438e-02 6.80004716e-01 -2.22179914e+00 -1.62552550e-01] [-1.59240103e+00 -1.60691664e-01 -1.51108456e+00 -1.38774562e+00 -1.86837387e+00 3.91175896e-01 8.56047511e-01 -2.31743407e+00 5.49714506e-01 2.33477801e-01] [-1.55948734e+00 -2.37151176e-01 -4.98186022e-01 -2.02908874e+00 -1.35213923e+00 -1.06091034e+00 -3.25023580e+00 -1.45861411e+00 1.05419910e+00 4.89171892e-01] [ 3.50730360e-01 -9.75910425e-01 -3.69549930e-01 -1.77264130e+00 5.12776256e-01 -2.21206236e+00 -7.10695326e-01 -1.72064519e+00 -7.37683296e-01 -2.26592946e+00]] [[ 1.93282235e+00 -9.34238195e-01 -2.23160982e+00 -1.81725532e-01 -1.84172463e+00 1.17697716e+00 1.76381695e+00 -5.10209942e+00 -5.04947710e+00 -2.85718966e+00] [ 1.73350021e-01 1.90702975e+00 2.93509078e+00 -4.12349850e-01 5.51587343e-01 -5.60022712e-01 9.36109245e-01 -1.69888687e+00 -1.27577138e+00 1.78082180e+00] [ 2.54762983e+00 -1.30577374e+00 3.39973068e+00 2.17360950e+00 1.07840168e+00 -2.55941176e+00 3.59034956e-01 -5.28802109e+00 1.29627383e+00 2.26047039e+00] [-1.89626068e-01 1.84209716e+00 -2.89413571e+00 -3.95008922e+00 -7.69413829e-01 1.00175107e+00 1.17958522e+00 2.06176305e+00 -2.97124743e+00 1.84228921e+00] [ 1.62340927e+00 -2.82516789e+00 -2.65861225e+00 -1.42948174e+00 6.99842989e-01 3.02020788e+00 -2.09324741e+00 2.10342407e+00 -2.89861727e+00 6.91397250e-01] [-7.13853955e-01 -2.54280186e+00 1.37337792e+00 -6.44706845e-01 -2.30228162e+00 -2.02552462e+00 6.59461677e-01 -5.97199798e-01 -1.69710004e+00 4.23092985e+00] [ 2.58066511e+00 2.32192135e+00 -5.05389214e+00 3.32473278e+00 2.18333435e+00 2.30486321e+00 -7.86562061e+00 1.38212633e+00 -1.07960820e+00 4.11551982e-01] [-3.21401072e+00 3.45254374e+00 -1.53197264e-02 7.16709197e-01 -3.09292437e-03 4.81824018e-02 -3.33930349e+00 6.33189380e-01 -1.65173829e-01 -3.16085291e+00] [ 7.24944651e-01 1.98785618e-01 3.30488831e-01 -4.72230196e-01 5.98148286e-01 -1.89660168e+00 6.00219369e-01 4.01569724e-01 -1.53839755e+00 1.93953300e+00] [-2.03038454e+00 -2.37665629e+00 -1.42309129e+00 -5.99005699e+00 1.65992939e+00 -7.36448348e-01 -4.67249483e-01 -3.91829640e-01 -6.88640952e-01 -8.62378001e-01]]]]; ov_res: [[[[ 1.56313658e+00 1.85669586e-01 1.05473721e+00 1.40552866e+00 4.06649476e-03 -1.00167763e+00 8.97695184e-01 -1.75364271e-01 9.05775487e-01 1.94871223e+00] [ 8.33345115e-01 1.17470455e+00 6.43962681e-01 1.41665673e+00 1.32367384e+00 1.03562927e+00 9.42983091e-01 3.58083397e-01 4.31286365e-01 1.54613721e+00] [-1.96958154e-01 1.49835289e+00 -3.69054884e-01 -3.93622339e-01 -8.46268423e-03 1.28306401e+00 1.38734031e+00 -2.42636040e-01 2.93051839e-01 6.78952634e-01] [ 1.06318140e+00 8.10624421e-01 -3.45179856e-01 -1.27634418e+00 3.02082926e-01 -8.50019604e-02 -3.93337965e-01 3.15083176e-01 2.01822773e-01 3.45986605e-01] [ 2.52652884e-01 1.74523443e-01 7.87949204e-01 1.22817433e+00 4.51199472e-01 9.84343469e-01 -2.67287977e-02 1.48083115e+00 7.66715944e-01 6.08989716e-01] [ 4.11826409e-02 2.69405752e-01 1.50873184e+00 9.55788732e-01 -7.80474484e-01 1.06299901e+00 8.03905845e-01 7.74599969e-01 1.34301281e+00 -2.93258280e-01] [-1.89799175e-01 -5.26831686e-01 -1.13759196e+00 9.62370157e-01 3.85939367e-02 -6.81562543e-01 1.30053008e+00 7.20387220e-01 1.59961784e+00 -3.71437930e-02] [ 7.10236967e-01 1.33842015e+00 1.83497921e-01 5.53763986e-01 1.04550648e+00 9.51925874e-01 1.31128430e+00 2.10802221e+00 1.48605093e-01 -1.12540364e-01] [ 1.14868891e+00 1.09300172e+00 9.19894934e-01 9.62154984e-01 8.53865802e-01 8.97460461e-01 -8.19518864e-01 8.06948006e-01 9.47253942e-01 -2.74773538e-01] [ 7.09476769e-01 7.32726276e-01 4.43433374e-01 -1.11195631e-03 2.17889905e+00 4.56297129e-01 -1.50849700e-01 9.98813748e-01 6.50576234e-01 1.84734833e+00]] [[-1.68250418e+00 -1.02541581e-01 -1.24751568e+00 -3.32381785e-01 -1.71106720e+00 -1.14468291e-01 2.13949665e-01 -1.10707057e+00 -7.41403818e-01 7.72514641e-01] [ 1.25636801e-01 7.03538135e-02 -1.92411923e+00 -7.86602616e-01 2.86123931e-01 2.30894375e+00 -1.77691793e+00 2.19294820e-02 -1.09681034e+00 -8.02487016e-01] [-1.06289399e+00 -3.98570323e+00 -3.58512670e-01 -1.36382246e+00 3.28629881e-01 1.93104959e+00 -1.52400517e+00 6.84707403e-01 -2.46254027e-01 -1.77616096e+00] [-3.36456776e-01 -2.12930918e+00 -1.33358407e+00 -1.27926826e+00 -8.76259387e-01 -1.71587753e+00 -6.00192308e-01 -6.33426309e-01 5.25408745e-01 -2.21390152e+00] [-1.77677572e+00 -1.22348452e+00 6.03418708e-01 -1.58242548e+00 5.02408922e-01 -4.23195988e-01 -4.76126403e-01 -7.42027700e-01 -5.77106714e-01 -1.38633633e+00] [-1.25149655e+00 -2.27518964e+00 -2.65600234e-02 -9.34095442e-01 1.21179366e+00 -8.88045251e-01 -6.03027225e-01 -1.80470213e-01 -1.94629455e+00 -1.21130502e+00] [-8.84873986e-01 -9.78630304e-01 3.90587956e-01 -8.73209417e-01 -1.85904062e+00 -2.86699867e+00 -3.28567438e-02 6.80004716e-01 -2.22179914e+00 -1.62552550e-01] [-1.59240103e+00 -1.60691664e-01 -1.51108456e+00 -1.38774562e+00 -1.86837387e+00 3.91175896e-01 8.56047511e-01 -2.31743407e+00 5.49714506e-01 2.33477801e-01] [-1.55948734e+00 -2.37151176e-01 -4.98186022e-01 -2.02908874e+00 -1.35213923e+00 -1.06091034e+00 -3.25023580e+00 -1.45861411e+00 1.05419910e+00 4.89171892e-01] [ 3.50730360e-01 -9.75910425e-01 -3.69549930e-01 -1.77264130e+00 5.12776256e-01 -2.21206236e+00 -7.10695326e-01 -1.72064519e+00 -7.37683296e-01 -2.26592946e+00]] [[ 1.93282235e+00 -9.34238195e-01 -2.23160982e+00 -1.81725532e-01 -1.84172463e+00 1.17697716e+00 1.76381695e+00 -5.10209942e+00 -5.04947710e+00 -2.85718966e+00] [ 1.73350021e-01 1.90702975e+00 2.93509078e+00 -4.12349850e-01 5.51587343e-01 -5.60022712e-01 9.36109245e-01 -1.69888687e+00 -1.27577138e+00 1.78082180e+00] [ 2.54762983e+00 -1.30577374e+00 3.39973068e+00 2.17360950e+00 1.07840168e+00 -2.55941176e+00 3.59034956e-01 -5.28802109e+00 1.29627383e+00 2.26047039e+00] [-1.89626068e-01 1.84209716e+00 -2.89413571e+00 -3.95008922e+00 -7.69413829e-01 1.00175107e+00 1.17958522e+00 2.06176305e+00 -2.97124743e+00 1.84228921e+00] [ 1.62340927e+00 -2.82516789e+00 -2.65861225e+00 -1.42948174e+00 6.99842989e-01 3.02020788e+00 -2.09324741e+00 2.10342407e+00 -2.89861727e+00 6.91397250e-01] [-7.13853955e-01 -2.54280186e+00 1.37337792e+00 -6.44706845e-01 -2.30228162e+00 -2.02552462e+00 6.59461677e-01 -5.97199798e-01 -1.69710004e+00 4.23092985e+00] [ 2.58066511e+00 2.32192135e+00 -5.05389214e+00 3.32473278e+00 2.18333435e+00 2.30486321e+00 -7.86562061e+00 1.38212633e+00 -1.07960820e+00 4.11551982e-01] [-3.21401072e+00 3.45254374e+00 -1.53197264e-02 7.16709197e-01 -3.09292437e-03 4.81824018e-02 -3.33930349e+00 6.33189380e-01 -1.65173829e-01 -3.16085291e+00] [ 7.24944651e-01 1.98785618e-01 3.30488831e-01 -4.72230196e-01 5.98148286e-01 -1.89660168e+00 6.00219369e-01 4.01569724e-01 -1.53839755e+00 1.93953300e+00] [-2.03038454e+00 -2.37665629e+00 -1.42309129e+00 -5.99005699e+00 1.65992939e+00 -7.36448348e-01 -4.67249483e-01 -3.91829640e-01 -6.88640952e-01 -8.62378001e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1051.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.9065 0.0879 0.9582 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 2.2492 (2,1,.,.) = -2.1202 (3,1,.,.) = 0.1952 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 1.9233142 4.593846 -1.4328393 4.424625 2.6202977 0.3019999 0.24123515 2.4181845 2.8807166 0.39625758] [ 1.7383057 7.205799 2.326718 4.8250957 3.5020838 3.2918875 1.0940305 -2.605554 -2.6977332 -6.4245443 ] [ 2.0894659 3.2466295 0.2928973 -1.6489105 5.86722 -2.0969281 1.688183 4.217514 3.9046798 0.78216577] [-0.8804524 5.556735 1.9372678 0.6912443 3.1812005 -0.40737972 0.6018191 -0.62690634 3.891671 4.5276046 ] [ 1.141856 2.223696 4.1611547 -0.355334 4.6284785 2.3666503 -0.3947672 2.7036943 -2.5526245 0.90322584] [-0.19021651 3.5530899 -0.64651465 -3.9829707 2.2878678 -0.5950738 3.5925179 0.6046203 4.088224 1.4587216 ] [ 3.163328 3.2216682 0.05015366 1.7099396 1.8165675 1.2214949 3.8251545 4.874106 1.2755383 3.199064 ] [ 3.2936063 1.3605535 3.6294188 -0.8571624 5.7703843 -2.0822918 1.5635579 5.8518925 1.9405521 0.01981593] [ 0.80554175 3.063533 4.207107 4.683492 -2.0181112 2.4891748 2.4640026 0.85891396 1.5464761 3.8663607 ] [ 1.0310686 3.4886053 1.7106863 5.1183953 3.7734332 -0.6738952 -0.05857629 -2.5611129 -0.24625388 0.3470051 ]] [[-0.9402736 2.682709 1.1314234 -0.79149073 -4.4557548 -1.6338998 -0.5906617 0.55835044 0.7813205 0.86376303] [-2.096323 -4.50431 3.6257818 5.1636324 1.3318278 -2.9415452 1.3757358 -5.129636 3.2918305 1.1421715 ] [-5.001038 1.3726753 1.4960077 1.6717029 -2.7859862 3.1809769 -6.479688 3.1785533 -0.12725832 -0.04981067] [ 3.2921808 2.844036 -4.0500374 0.03531587 0.18753473 0.61637515 -2.279094 1.6060125 -1.2507167 3.9015653 ] [-0.24128918 0.85534847 4.9719973 0.7447335 0.8841859 0.8829139 -1.144304 1.641589 0.3658883 -0.15571223] [ 2.7720737 2.0974677 0.88442445 1.409503 -5.5810094 -1.3240225 1.7349161 0.83830184 2.0064723 -2.3734057 ] [ 1.4582871 2.825591 -0.17800786 -3.314489 -2.143711 -1.9892296 -0.2875697 3.1620495 2.4584014 0.91308755] [-0.10598039 -1.778176 -0.75406635 1.3369913 -2.8368862 -1.6927894 0.57456064 1.2551572 -3.6933572 -1.1040394 ] [ 1.1264975 -0.8980395 -1.2394001 2.3650062 -1.2223291 4.1169925 -0.37748945 -0.948779 2.0317335 3.0890312 ] [ 7.794815 -1.0509038 0.2655996 -0.4929544 -1.790576 3.006978 0.83921784 0.5937056 -1.0796465 1.3895785 ]] [[ 0.9312002 1.2339766 0.6552714 1.0251404 1.2387966 1.2577221 0.78414303 1.1946099 1.1255598 0.71357363] [ 0.7763082 0.95691127 0.9175926 0.9312135 1.2547524 0.6631414 0.7163754 1.2135055 1.2394031 0.9041583 ] [ 1.167679 1.1720349 1.1013179 0.98934746 0.69975907 1.2578923 0.9031735 0.8999818 1.0879673 0.841969 ] [ 0.63554794 0.96855944 0.72044724 0.90441585 1.0874374 0.8296429 1.0237045 0.5876307 0.8620404 0.5921822 ] [ 1.0715244 1.2454052 0.8685041 1.0863485 1.0736567 1.0093918 0.93713707 1.1097872 1.2242322 1.3257912 ] [ 0.6814643 1.2243029 0.72867364 0.83364946 1.3289056 1.2237754 0.8475796 0.8858102 1.3452021 1.0027552 ] [ 1.2295982 0.9625933 0.79740787 0.9372033 0.6920049 0.90317696 0.82089794 1.1375823 1.239148 0.8961318 ] [ 1.22185 1.0943385 0.9116299 0.9849469 0.99330235 0.5666433 0.9118853 1.1350936 0.8267957 0.8590011 ] [ 1.3423364 0.42742565 1.153015 1.0458708 1.0855484 0.96427345 0.9239251 0.88887185 0.9242613 1.1577334 ] [ 0.81165737 1.1573232 1.0561241 1.0797592 0.89382046 1.0229172 1.0509973 1.0136579 0.65300435 0.88367033]]]]; ov_res: [[[[ 1.9233142 4.593846 -1.4328393 4.424625 2.6202977 0.3019999 0.24123515 2.4181845 2.8807166 0.39625758] [ 1.7383057 7.205799 2.326718 4.8250957 3.5020838 3.2918875 1.0940305 -2.605554 -2.6977332 -6.4245443 ] [ 2.0894659 3.2466295 0.2928973 -1.6489105 5.86722 -2.0969281 1.688183 4.217514 3.9046798 0.78216577] [-0.8804524 5.556735 1.9372678 0.6912443 3.1812005 -0.40737972 0.6018191 -0.62690634 3.891671 4.5276046 ] [ 1.141856 2.223696 4.1611547 -0.355334 4.6284785 2.3666503 -0.3947672 2.7036943 -2.5526245 0.90322584] [-0.19021651 3.5530899 -0.64651465 -3.9829707 2.2878678 -0.5950738 3.5925179 0.6046203 4.088224 1.4587216 ] [ 3.163328 3.2216682 0.05015366 1.7099396 1.8165675 1.2214949 3.8251545 4.874106 1.2755383 3.199064 ] [ 3.2936063 1.3605535 3.6294188 -0.8571624 5.7703843 -2.0822918 1.5635579 5.8518925 1.9405521 0.01981593] [ 0.80554175 3.063533 4.207107 4.683492 -2.0181112 2.4891748 2.4640026 0.85891396 1.5464761 3.8663607 ] [ 1.0310686 3.4886053 1.7106863 5.1183953 3.7734332 -0.6738952 -0.05857629 -2.5611129 -0.24625388 0.3470051 ]] [[-0.9402736 2.682709 1.1314234 -0.79149073 -4.4557548 -1.6338998 -0.5906617 0.55835044 0.7813205 0.86376303] [-2.096323 -4.50431 3.6257818 5.1636324 1.3318278 -2.9415452 1.3757358 -5.129636 3.2918305 1.1421715 ] [-5.001038 1.3726753 1.4960077 1.6717029 -2.7859862 3.1809769 -6.479688 3.1785533 -0.12725832 -0.04981067] [ 3.2921808 2.844036 -4.0500374 0.03531587 0.18753473 0.61637515 -2.279094 1.6060125 -1.2507167 3.9015653 ] [-0.24128918 0.85534847 4.9719973 0.7447335 0.8841859 0.8829139 -1.144304 1.641589 0.3658883 -0.15571223] [ 2.7720737 2.0974677 0.88442445 1.409503 -5.5810094 -1.3240225 1.7349161 0.83830184 2.0064723 -2.3734057 ] [ 1.4582871 2.825591 -0.17800786 -3.314489 -2.143711 -1.9892296 -0.2875697 3.1620495 2.4584014 0.91308755] [-0.10598039 -1.778176 -0.75406635 1.3369913 -2.8368862 -1.6927894 0.57456064 1.2551572 -3.6933572 -1.1040394 ] [ 1.1264975 -0.8980395 -1.2394001 2.3650062 -1.2223291 4.1169925 -0.37748945 -0.948779 2.0317335 3.0890312 ] [ 7.794815 -1.0509038 0.2655996 -0.4929544 -1.790576 3.006978 0.83921784 0.5937056 -1.0796465 1.3895785 ]] [[ 0.9312002 1.2339766 0.6552714 1.0251404 1.2387966 1.2577221 0.78414303 1.1946099 1.1255598 0.71357363] [ 0.7763082 0.95691127 0.9175926 0.9312135 1.2547524 0.6631414 0.7163754 1.2135055 1.2394031 0.9041583 ] [ 1.167679 1.1720349 1.1013179 0.98934746 0.69975907 1.2578923 0.9031735 0.8999818 1.0879673 0.841969 ] [ 0.63554794 0.96855944 0.72044724 0.90441585 1.0874374 0.8296429 1.0237045 0.5876307 0.8620404 0.5921822 ] [ 1.0715244 1.2454052 0.8685041 1.0863485 1.0736567 1.0093918 0.93713707 1.1097872 1.2242322 1.3257912 ] [ 0.6814643 1.2243029 0.72867364 0.83364946 1.3289056 1.2237754 0.8475796 0.8858102 1.3452021 1.0027552 ] [ 1.2295982 0.9625933 0.79740787 0.9372033 0.6920049 0.90317696 0.82089794 1.1375823 1.239148 0.8961318 ] [ 1.22185 1.0943385 0.9116299 0.9849469 0.99330235 0.5666433 0.9118853 1.1350936 0.8267957 0.8590011 ] [ 1.3423364 0.42742565 1.153015 1.0458708 1.0855484 0.96427345 0.9239251 0.88887185 0.9242613 1.1577334 ] [ 0.81165737 1.1573232 1.0561241 1.0797592 0.89382046 1.0229172 1.0509973 1.0136579 0.65300435 0.88367033]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1053.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7332 0.2569 0.5314 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.2900 (2,1,.,.) = -1.2037 (3,1,.,.) = 0.3448 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 4.86476123e-01 1.22725642e+00 6.84216917e-01 9.13024783e-01 9.51662123e-01 5.56493819e-01 1.16225731e+00 8.23162317e-01 1.09765279e+00 4.31038052e-01] [ 1.22273302e+00 -1.43790320e-01 5.34794986e-01 8.73285756e-02 6.56654775e-01 1.11209512e+00 6.08904004e-01 6.22758627e-01 6.88815832e-01 9.78024304e-01] [ 6.07404470e-01 2.72967443e-02 7.97829747e-01 1.10557532e+00 1.07814765e+00 9.35649097e-01 4.84659970e-01 8.25531185e-01 8.58792961e-01 1.05069375e+00] [ 5.98677874e-01 3.52126509e-01 8.42481971e-01 5.46316683e-01 1.42255640e+00 7.66362071e-01 9.35077786e-01 5.99322200e-01 7.36986399e-01 5.07911623e-01] [ 1.82077557e-01 8.41196239e-01 8.91223788e-01 3.53794903e-01 7.50289559e-01 6.76315963e-01 3.95690680e-01 6.25246346e-01 9.93540287e-01 7.86858320e-01] [ 7.46078610e-01 1.10623169e+00 7.11570680e-01 5.22427201e-01 1.12874472e+00 1.15592110e+00 4.63589251e-01 8.47152591e-01 7.83281744e-01 6.10896230e-01] [ 4.52547282e-01 5.55702329e-01 7.65848517e-01 1.08773923e+00 3.19201529e-01 2.43204534e-01 9.40348148e-01 5.87602079e-01 9.15746331e-01 6.34246826e-01] [ 3.61162752e-01 7.74765134e-01 7.45261967e-01 9.52667058e-01 7.96504021e-01 8.48636687e-01 4.39591408e-01 3.31106961e-01 8.24278295e-01 6.90194666e-01] [ 7.90229440e-01 5.77487648e-01 1.49459887e+00 9.73094344e-01 4.41064358e-01 1.65502191e+00 1.14560771e+00 1.16508114e+00 5.33483446e-01 1.32082915e+00] [ 6.78503573e-01 6.74534023e-01 9.93062615e-01 8.73230338e-01 7.48107672e-01 7.38507271e-01 2.93662310e-01 6.03897989e-01 7.11179852e-01 6.32347822e-01]] [[ 4.22094941e-01 -1.04361427e+00 -1.59242690e-01 -1.12683403e+00 -1.95179150e-01 4.46126834e-02 2.18039417e+00 -9.26735029e-02 3.06095064e-01 -1.02201208e-01] [ 3.64419490e-01 -1.34073508e+00 1.17484517e-01 -2.27876902e-01 -1.50636923e+00 2.41600990e-01 -5.43698609e-01 2.79595971e-01 2.52761340e+00 1.40804458e+00] [ 1.55011964e+00 1.67649007e+00 4.26413834e-01 1.28762409e-01 -1.28364623e-01 5.92278957e-01 2.15069652e-01 3.34529839e-02 5.30988634e-01 1.90078926e+00] [-1.27194786e+00 2.86262482e-01 -2.51106095e+00 -1.48710728e+00 7.74991989e-01 -1.09176114e-02 1.09150767e-01 2.82636613e-01 -2.65886140e+00 1.11861062e+00] [ 1.34902596e+00 -4.68532175e-01 -1.00213349e+00 -1.58372179e-01 -5.42458057e-01 9.84451175e-01 -3.47709060e-01 2.10130408e-01 -1.02490020e+00 1.01477993e+00] [-1.82307988e-01 -6.96894348e-01 -1.18632324e-01 -3.39962095e-01 -1.91770002e-01 1.78062093e+00 3.12650770e-01 6.53256699e-02 -1.16746080e+00 3.29823941e-01] [ 2.32876778e+00 -4.18247581e-01 7.82820046e-01 -7.42844045e-01 1.99470848e-01 1.51048005e+00 -1.28855944e+00 -1.92211568e+00 1.38705981e+00 -5.86255550e-01] [ 2.21264315e+00 -2.62370080e-01 -6.23111665e-01 -8.50904107e-01 -1.60377502e-01 6.04711175e-01 1.42100692e+00 -2.89178103e-01 3.25227046e+00 -1.61280715e+00] [ 2.87586546e+00 1.05072951e+00 -1.37456942e+00 1.42563033e+00 1.50515270e+00 2.23836017e+00 1.84065253e-01 1.50130495e-01 -2.57165408e+00 4.73344028e-01] [-7.21087381e-02 2.78473282e+00 2.24099731e+00 8.75776887e-01 -9.53641951e-01 -1.79082692e+00 1.26942313e+00 -7.90380359e-01 -9.48512435e-01 -6.15508795e-01]] [[ 2.96860456e-01 3.08372498e-01 4.09849614e-01 6.53791368e-01 6.92330062e-01 8.38102758e-01 5.36913097e-01 5.20589292e-01 2.69687712e-01 1.03551936e+00] [ 4.07629371e-01 8.01317155e-01 5.08309186e-01 -7.38611668e-02 2.65793920e-01 6.40333235e-01 7.56968737e-01 9.52763259e-01 7.69652426e-02 1.03971374e+00] [ 1.53346151e-01 8.38784814e-01 1.40903756e-01 7.19471455e-01 4.73767787e-01 1.77601814e-01 -9.74340960e-02 -4.78491746e-02 6.76867068e-01 4.17170465e-01] [ 1.08969915e+00 4.46012795e-01 1.00528479e+00 6.96832955e-01 8.60947967e-01 1.04594743e+00 6.80286705e-01 1.32793784e-01 6.18141592e-01 9.37841475e-01] [ 5.88002026e-01 5.40798187e-01 -6.09513223e-01 1.08216393e+00 3.67189944e-01 3.07215035e-01 2.48901695e-01 6.40554965e-01 2.78288692e-01 7.63107121e-01] [ 6.22405946e-01 1.40712038e-01 1.16852567e-01 6.18893683e-01 4.03667241e-01 5.53353429e-01 4.58142340e-01 6.26125634e-01 4.20079798e-01 7.95852304e-01] [-1.15779303e-02 5.24415791e-01 7.68695891e-01 -3.32925990e-02 5.26632845e-01 -1.02724113e-01 2.83603281e-01 4.16396976e-01 1.30423725e-01 8.23450804e-01] [ 6.22639775e-01 -8.86342078e-02 7.89468169e-01 5.60566843e-01 3.40863138e-01 5.50864756e-01 3.75044018e-01 7.31480837e-01 6.42641425e-01 4.56591398e-01] [ 4.16179061e-01 5.70120454e-01 4.96227175e-01 1.30805838e+00 1.10614705e+00 5.83425879e-01 8.00186813e-01 3.27394195e-02 8.57852638e-01 1.07507992e+00] [ 2.44478568e-01 -1.50533497e-01 4.89624351e-01 5.02825379e-01 4.15721208e-01 7.12613463e-01 -6.55681069e-04 6.11321449e-01 2.88010597e-01 7.24537596e-02]]]]; ov_res: [[[[ 4.86476123e-01 1.22725642e+00 6.84216917e-01 9.13024783e-01 9.51662123e-01 5.56493819e-01 1.16225731e+00 8.23162317e-01 1.09765279e+00 4.31038052e-01] [ 1.22273302e+00 -1.43790320e-01 5.34794986e-01 8.73285756e-02 6.56654775e-01 1.11209512e+00 6.08904004e-01 6.22758627e-01 6.88815832e-01 9.78024304e-01] [ 6.07404470e-01 2.72967443e-02 7.97829747e-01 1.10557532e+00 1.07814765e+00 9.35649097e-01 4.84659970e-01 8.25531185e-01 8.58792961e-01 1.05069375e+00] [ 5.98677874e-01 3.52126509e-01 8.42481971e-01 5.46316683e-01 1.42255640e+00 7.66362071e-01 9.35077786e-01 5.99322200e-01 7.36986399e-01 5.07911623e-01] [ 1.82077557e-01 8.41196239e-01 8.91223788e-01 3.53794903e-01 7.50289559e-01 6.76315963e-01 3.95690680e-01 6.25246346e-01 9.93540287e-01 7.86858320e-01] [ 7.46078610e-01 1.10623169e+00 7.11570680e-01 5.22427201e-01 1.12874472e+00 1.15592110e+00 4.63589251e-01 8.47152591e-01 7.83281744e-01 6.10896230e-01] [ 4.52547282e-01 5.55702329e-01 7.65848517e-01 1.08773923e+00 3.19201529e-01 2.43204534e-01 9.40348148e-01 5.87602079e-01 9.15746331e-01 6.34246826e-01] [ 3.61162752e-01 7.74765134e-01 7.45261967e-01 9.52667058e-01 7.96504021e-01 8.48636687e-01 4.39591408e-01 3.31106961e-01 8.24278295e-01 6.90194666e-01] [ 7.90229440e-01 5.77487648e-01 1.49459887e+00 9.73094344e-01 4.41064358e-01 1.65502191e+00 1.14560771e+00 1.16508114e+00 5.33483446e-01 1.32082915e+00] [ 6.78503573e-01 6.74534023e-01 9.93062615e-01 8.73230338e-01 7.48107672e-01 7.38507271e-01 2.93662310e-01 6.03897989e-01 7.11179852e-01 6.32347822e-01]] [[ 4.22094941e-01 -1.04361427e+00 -1.59242690e-01 -1.12683403e+00 -1.95179150e-01 4.46126834e-02 2.18039417e+00 -9.26735029e-02 3.06095064e-01 -1.02201208e-01] [ 3.64419490e-01 -1.34073508e+00 1.17484517e-01 -2.27876902e-01 -1.50636923e+00 2.41600990e-01 -5.43698609e-01 2.79595971e-01 2.52761340e+00 1.40804458e+00] [ 1.55011964e+00 1.67649007e+00 4.26413834e-01 1.28762409e-01 -1.28364623e-01 5.92278957e-01 2.15069652e-01 3.34529839e-02 5.30988634e-01 1.90078926e+00] [-1.27194786e+00 2.86262482e-01 -2.51106095e+00 -1.48710728e+00 7.74991989e-01 -1.09176114e-02 1.09150767e-01 2.82636613e-01 -2.65886140e+00 1.11861062e+00] [ 1.34902596e+00 -4.68532175e-01 -1.00213349e+00 -1.58372179e-01 -5.42458057e-01 9.84451175e-01 -3.47709060e-01 2.10130408e-01 -1.02490020e+00 1.01477993e+00] [-1.82307988e-01 -6.96894348e-01 -1.18632324e-01 -3.39962095e-01 -1.91770002e-01 1.78062093e+00 3.12650770e-01 6.53256699e-02 -1.16746080e+00 3.29823941e-01] [ 2.32876778e+00 -4.18247581e-01 7.82820046e-01 -7.42844045e-01 1.99470848e-01 1.51048005e+00 -1.28855944e+00 -1.92211568e+00 1.38705981e+00 -5.86255550e-01] [ 2.21264315e+00 -2.62370080e-01 -6.23111665e-01 -8.50904107e-01 -1.60377502e-01 6.04711175e-01 1.42100692e+00 -2.89178103e-01 3.25227046e+00 -1.61280715e+00] [ 2.87586546e+00 1.05072951e+00 -1.37456942e+00 1.42563033e+00 1.50515270e+00 2.23836017e+00 1.84065253e-01 1.50130495e-01 -2.57165408e+00 4.73344028e-01] [-7.21087381e-02 2.78473282e+00 2.24099731e+00 8.75776887e-01 -9.53641951e-01 -1.79082692e+00 1.26942313e+00 -7.90380359e-01 -9.48512435e-01 -6.15508795e-01]] [[ 2.96860456e-01 3.08372498e-01 4.09849614e-01 6.53791368e-01 6.92330062e-01 8.38102758e-01 5.36913097e-01 5.20589292e-01 2.69687712e-01 1.03551936e+00] [ 4.07629371e-01 8.01317155e-01 5.08309186e-01 -7.38611668e-02 2.65793920e-01 6.40333235e-01 7.56968737e-01 9.52763259e-01 7.69652426e-02 1.03971374e+00] [ 1.53346151e-01 8.38784814e-01 1.40903756e-01 7.19471455e-01 4.73767787e-01 1.77601814e-01 -9.74340960e-02 -4.78491746e-02 6.76867068e-01 4.17170465e-01] [ 1.08969915e+00 4.46012795e-01 1.00528479e+00 6.96832955e-01 8.60947967e-01 1.04594743e+00 6.80286705e-01 1.32793784e-01 6.18141592e-01 9.37841475e-01] [ 5.88002026e-01 5.40798187e-01 -6.09513223e-01 1.08216393e+00 3.67189944e-01 3.07215035e-01 2.48901695e-01 6.40554965e-01 2.78288692e-01 7.63107121e-01] [ 6.22405946e-01 1.40712038e-01 1.16852567e-01 6.18893683e-01 4.03667241e-01 5.53353429e-01 4.58142340e-01 6.26125634e-01 4.20079798e-01 7.95852304e-01] [-1.15779303e-02 5.24415791e-01 7.68695891e-01 -3.32925990e-02 5.26632845e-01 -1.02724113e-01 2.83603281e-01 4.16396976e-01 1.30423725e-01 8.23450804e-01] [ 6.22639775e-01 -8.86342078e-02 7.89468169e-01 5.60566843e-01 3.40863138e-01 5.50864756e-01 3.75044018e-01 7.31480837e-01 6.42641425e-01 4.56591398e-01] [ 4.16179061e-01 5.70120454e-01 4.96227175e-01 1.30805838e+00 1.10614705e+00 5.83425879e-01 8.00186813e-01 3.27394195e-02 8.57852638e-01 1.07507992e+00] [ 2.44478568e-01 -1.50533497e-01 4.89624351e-01 5.02825379e-01 4.15721208e-01 7.12613463e-01 -6.55681069e-04 6.11321449e-01 2.88010597e-01 7.24537596e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'same', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1055.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ 0.26778552 2.8555758 -0.30522874 3.2874956 2.5153096 0.6524753 -1.1076009 -3.8565238 2.1107616 4.7565317 ] [ 0.4815965 2.6240463 -5.682421 2.2962635 3.8460293 9.598372 -3.9412405 -13.919657 -2.3512807 -2.4857144 ] [ 2.4581256 -5.36622 -13.344275 7.4308705 10.453 -4.137257 -5.6519017 -4.056506 -5.411113 0.3747221 ] [ -0.4641714 -5.4894266 -1.939761 4.102547 -11.845893 -5.01705 -2.347295 -4.4534125 -10.675036 -1.3172767 ] [ 1.1713681 -9.462657 1.6311274 -4.9708395 3.2106736 1.5437645 2.2812903 -1.4202102 -3.641443 -4.840359 ] [ -0.8381568 -8.346823 7.597745 -3.8330822 -0.83789164 1.0540774 5.014246 2.3013573 -9.886293 4.6743035 ] [ 8.102788 8.150285 2.1417797 -1.6505005 -1.8345518 -0.6203909 3.260672 2.0792634 -10.688479 0.24479629] [ 7.313789 -0.92248917 4.215695 -8.330518 -6.285721 1.978085 -2.3520596 -11.241799 -4.886957 -8.897206 ] [ 5.297274 2.8379505 2.313053 1.9738057 -11.064879 -10.5944605 6.2196903 -5.511531 -8.383923 -6.650249 ] [ -3.3038807 -10.993504 -8.938036 -1.8706609 1.2773895 7.579279 -1.7989403 -5.3118277 -5.864798 -2.276923 ]]]]; ov_res: [[[[ 0.26778552 2.8555758 -0.30522874 3.2874959 2.5153096 0.65247554 -1.1076009 -3.8565238 2.1107614 4.7565312 ] [ 0.48159626 2.6240463 -5.6824217 2.2962635 3.8460283 9.598371 -3.94124 -13.919657 -2.3512805 -2.485715 ] [ 2.458126 -5.3662186 -13.344275 7.4308705 10.453001 -4.1372566 -5.651902 -4.0565057 -5.4111123 0.37472197] [ -0.46417165 -5.4894266 -1.9397615 4.1025457 -11.845893 -5.0170507 -2.347296 -4.4534125 -10.6750345 -1.3172767 ] [ 1.171368 -9.462657 1.6311277 -4.970839 3.2106738 1.5437645 2.2812908 -1.4202096 -3.6414425 -4.8403587 ] [ -0.8381567 -8.34682 7.5977454 -3.8330822 -0.8378917 1.0540779 5.014246 2.3013573 -9.886293 4.674304 ] [ 8.102788 8.150285 2.14178 -1.6505009 -1.8345516 -0.62039113 3.2606716 2.0792632 -10.688478 0.2447962 ] [ 7.313789 -0.9224894 4.215694 -8.330517 -6.285722 1.9780848 -2.3520596 -11.2418 -4.8869576 -8.897205 ] [ 5.2972736 2.8379505 2.3130527 1.9738059 -11.06488 -10.5944605 6.219691 -5.51153 -8.383923 -6.6502485 ] [ -3.3038812 -10.993503 -8.938036 -1.8706607 1.2773896 7.5792775 -1.7989408 -5.3118277 -5.8647976 -2.2769234 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1057.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ 0.9082435 -8.410052 10.613602 1.2753241 10.163115 0.28615364 5.780318 1.0917127 ] [ -4.8744345 -2.4698775 -1.2534828 7.835161 0.6202787 -1.8434014 4.994428 -3.5674856 ] [ 3.156116 4.6317434 -0.03797104 6.081419 1.7432799 -4.0531564 -4.766114 6.19145 ] [ 4.0689473 1.4736625 9.322583 -6.399955 -1.9967401 0.23274112 -2.3306165 5.606699 ] [ -1.5108812 -4.573642 -1.4037893 -2.2292676 -13.41498 4.6788907 -1.2162105 -3.852177 ] [ 0.47112274 -8.606477 1.815066 4.5046325 2.7197247 -8.405129 7.14396 -9.083778 ] [ -1.2774963 -2.2537196 0.4261101 -5.4525456 -2.4920795 3.7271245 -5.504849 -0.36770964] [ -3.2226453 -6.0879297 2.6099277 6.033598 0.07945905 1.4834205 -7.351095 -7.7572346 ]]]]; ov_res: [[[[ 0.90824336 -8.410053 10.613602 1.2753243 10.1631155 0.28615406 5.7803173 1.0917126 ] [ -4.8744345 -2.4698777 -1.2534823 7.835163 0.6202793 -1.8434016 4.9944286 -3.5674841 ] [ 3.1561158 4.6317444 -0.03797116 6.081419 1.7432802 -4.0531564 -4.766114 6.1914506 ] [ 4.068948 1.4736623 9.322583 -6.3999553 -1.99674 0.23274064 -2.3306162 5.606699 ] [ -1.5108813 -4.5736413 -1.403789 -2.2292678 -13.414981 4.67889 -1.2162101 -3.8521771 ] [ 0.47112286 -8.606477 1.8150665 4.5046334 2.7197242 -8.405129 7.14396 -9.083778 ] [ -1.2774966 -2.253719 0.42611122 -5.4525456 -2.49208 3.7271237 -5.504849 -0.36770946] [ -3.2226453 -6.0879283 2.6099272 6.033598 0.07945917 1.4834198 -7.351095 -7.757234 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1059.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 2.4215996 5.6786175 0.09872479 2.364379 -7.3275385 2.5542676 -5.0733576 -0.17046006 0.2351394 -0.80575967] [ 2.3942559 7.1685214 -7.6654677 -0.32485208 -11.987496 0.1187762 -2.4831781 2.2581465 0.5004131 -2.0931368 ] [ 2.401386 -5.9713597 11.732586 -6.4119596 -5.977326 -4.045125 -0.21651444 -6.896268 -0.7593714 -2.9841807 ] [ -3.191876 7.3561544 3.0609276 3.0734758 2.0039458 2.3081543 -2.38766 -0.9784003 3.450682 1.0659778 ] [ -6.677153 -2.5804636 0.24067597 -5.202843 1.0916886 -7.0897155 -1.0636555 1.4988147 0.85421985 -4.411377 ] [ 3.591509 -9.425257 10.478186 -1.3310843 3.7176597 -2.0459428 0.14918958 -10.08083 -1.2640524 0.5202397 ] [ -0.08698779 5.5704474 -0.4407781 6.4789047 -2.9440522 5.0764256 5.8733196 6.87422 -5.287901 4.503554 ] [ -8.000948 -6.486985 5.4906354 -7.999663 8.270099 -1.7994089 -1.565777 -5.3135614 5.807004 2.8934252 ] [ 0.6587911 0.68788886 2.0881093 -0.39377737 -3.684214 1.4711958 0.40289497 2.7201846 2.5350025 4.139426 ] [ 4.515341 3.3085308 -3.107132 0.47818446 1.7138883 6.145276 1.0474436 1.0633419 -2.1663642 0.26058498]]]]; ov_res: [[[[ 2.4215999 5.678617 0.09872467 2.3643787 -7.3275385 2.5542674 -5.0733576 -0.17045982 0.23513949 -0.80575997] [ 2.394256 7.1685214 -7.6654673 -0.32485238 -11.987497 0.11877596 -2.4831781 2.2581465 0.50041306 -2.093137 ] [ 2.4013858 -5.9713597 11.732587 -6.411959 -5.9773254 -4.045125 -0.21651492 -6.8962684 -0.75937146 -2.9841812 ] [ -3.191876 7.3561544 3.0609272 3.0734754 2.0039454 2.3081546 -2.3876593 -0.97840005 3.4506817 1.0659786 ] [ -6.6771536 -2.580464 0.24067597 -5.202843 1.0916877 -7.0897145 -1.0636555 1.4988147 0.8542202 -4.4113774 ] [ 3.591509 -9.425258 10.478188 -1.3310845 3.717659 -2.0459423 0.14918931 -10.08083 -1.2640524 0.52023906] [ -0.08698779 5.5704474 -0.4407778 6.4789057 -2.9440522 5.0764256 5.8733196 6.87422 -5.2879004 4.503554 ] [ -8.000947 -6.4869857 5.4906363 -7.999665 8.270099 -1.7994089 -1.5657775 -5.313561 5.807004 2.8934252 ] [ 0.65879124 0.68788886 2.088109 -0.3937775 -3.6842136 1.4711959 0.40289533 2.7201848 2.5350027 4.1394258 ] [ 4.515341 3.3085313 -3.1071317 0.4781841 1.7138886 6.1452756 1.0474435 1.0633422 -2.1663642 0.26058486]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1061.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 6.7163963 -4.8714933 1.5731604 1.7639717 7.70275 9.797384 ] [-0.50501174 0.8953869 2.1007512 -5.3361545 3.601065 6.487521 ] [-0.97445273 3.919782 -1.9805171 2.9511805 4.0922093 -8.226744 ] [-1.4985944 0.33271688 -4.063944 1.0397989 6.5827837 -1.4070024 ] [ 0.7293709 0.42269692 -2.815238 -0.48237616 -1.9038267 3.4361887 ] [ 2.4199734 -4.938828 -1.3762544 9.634974 -6.0149555 -7.475866 ]]]]; ov_res: [[[[ 6.7163963 -4.871494 1.5731609 1.7639713 7.7027507 9.797384 ] [-0.50501174 0.8953869 2.1007512 -5.336155 3.6010647 6.4875216 ] [-0.97445285 3.919782 -1.9805173 2.9511807 4.092209 -8.226744 ] [-1.4985949 0.332717 -4.063944 1.0397989 6.582784 -1.4070022 ] [ 0.7293716 0.42269716 -2.8152378 -0.48237616 -1.9038267 3.4361887 ] [ 2.4199734 -4.938828 -1.3762542 9.634974 -6.0149555 -7.475866 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1063.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.01 * 7.1383 (2,1,.,.) = -0.3159 (3,1,.,.) = 3.8965 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[-6.24921732e-02 4.24132943e-02 -6.10409118e-02 9.00475234e-02 3.91131528e-02 -5.05620940e-03 2.81436555e-02 -1.06167784e-02 6.79686964e-02 6.58274116e-03] [-7.65626654e-02 8.91480297e-02 1.94021687e-02 2.39202660e-02 -4.13149856e-02 5.87069578e-02 -2.14216597e-02 5.92204444e-02 3.01021198e-03 1.76660810e-02] [ 9.31431726e-02 1.08448759e-01 -3.55457375e-03 -2.32603736e-02 3.01804370e-03 -2.65493197e-03 5.43671772e-02 6.40282556e-02 -2.46763695e-02 -2.61348970e-02] [ 1.11669756e-01 -9.44862440e-02 3.70249078e-02 1.20962009e-01 3.68454382e-02 -2.26403326e-02 1.17344316e-02 4.29450683e-02 -5.21187857e-02 -1.18981488e-01] [-4.45252657e-02 -1.15906104e-01 7.54572228e-02 2.22404916e-02 1.50991250e-02 -1.28915504e-01 8.26421473e-03 -1.21278770e-01 2.08952297e-02 -9.28742997e-03] [ 7.60695785e-02 3.41286957e-02 -4.98692393e-02 3.77026126e-02 8.66843238e-02 -8.78960118e-02 -7.80185461e-02 2.96571329e-02 -2.33778954e-02 2.31761467e-02] [-1.91853009e-02 3.02993674e-02 7.75800422e-02 7.45837986e-02 -5.52800223e-02 3.38701978e-02 -6.03081435e-02 -7.69006610e-02 -3.66824828e-02 6.11623302e-02] [-3.22004706e-02 4.57075872e-02 -1.22929484e-01 5.65900952e-02 -1.39265060e-01 -1.31113725e-02 -4.99057695e-02 2.43491167e-03 -4.05266508e-02 -2.00522095e-01] [-1.20260730e-01 3.14701088e-02 -5.68070672e-02 1.02368511e-01 5.35005890e-03 1.68123245e-02 6.55822530e-02 -5.94237261e-02 -8.27543586e-02 -8.79469607e-03] [-4.83494960e-02 -2.19869260e-02 -9.43980962e-02 5.26175536e-02 3.37635726e-02 -2.54388414e-02 -1.50308400e-01 -4.22715358e-02 -3.81227247e-02 -3.71650942e-02]] [[-2.06249043e-01 -3.15824747e-01 1.91835567e-01 -1.15239471e-01 6.17308378e-01 1.08801179e-01 3.28693539e-03 6.40251100e-01 4.25637141e-02 1.06741562e-01] [-2.37835217e-02 5.90068936e-01 6.24538772e-03 2.65412122e-01 -3.65628868e-01 2.50334352e-01 2.60104597e-01 -4.77804720e-01 -6.34293497e-01 1.25722513e-01] [-1.69829756e-01 -4.75881785e-01 9.72758979e-02 2.51806676e-01 3.90177101e-01 -6.53725505e-01 2.59593818e-02 -4.23634440e-01 -2.04826608e-01 -1.52518019e-01] [ 3.96606803e-01 -4.02998775e-01 -6.49225488e-02 -6.19124472e-02 7.44199082e-02 -6.69640079e-02 -4.66099173e-01 2.25228250e-01 3.30221236e-01 -2.73101509e-01] [-3.85026157e-01 1.11207493e-01 3.49063247e-01 -2.22900305e-02 -1.10941967e-02 -1.09030791e-01 2.25980416e-01 2.91219279e-02 7.36989021e-01 -1.41105086e-01] [ 2.87588656e-01 5.17344534e-01 3.59506816e-01 -5.87197244e-01 1.43586040e-01 -7.25500733e-02 -9.52760801e-02 1.45947793e-02 6.72018155e-02 2.10896835e-01] [-1.51901320e-01 4.27817166e-01 3.44763607e-01 1.79649472e-01 2.27101147e-01 2.19848230e-01 -2.84360796e-01 -3.95607293e-01 5.19122034e-02 -5.91494255e-02] [-1.45420849e-01 4.82993037e-01 -8.37257877e-02 2.83559918e-01 -3.39163728e-02 2.02411518e-01 1.67732388e-01 -1.20795332e-01 1.60842538e-02 -3.52714688e-01] [-4.86677408e-01 -3.92382950e-01 1.37860505e-02 1.03804015e-01 5.16972661e-01 1.41345143e-01 1.93105891e-01 4.18775082e-01 -1.00663379e-01 -9.03252959e-02] [-4.60642993e-01 2.74281770e-01 -7.11783946e-01 2.54258275e-01 -3.65097314e-01 -5.82897425e-01 2.54147172e-01 6.24546587e-01 1.26983687e-01 -1.69958264e-01]] [[-6.52693939e+00 1.40001762e+00 -2.21518254e+00 2.30494499e+00 4.99532729e-01 -2.65162086e+00 1.09076655e+00 -3.71366882e+00 1.66251516e+00 -2.86396575e+00] [ 5.14257860e+00 3.23454452e+00 -4.93001413e+00 -4.25385523e+00 -2.54919505e+00 -1.64854372e+00 7.79699683e-02 5.02635479e-01 -3.41100430e+00 -4.15001440e+00] [ 4.29643250e+00 -4.03158522e+00 -5.90840197e+00 -5.50079167e-01 1.43547833e+00 -2.05978441e+00 4.74527657e-01 -7.93815279e+00 1.04974282e+00 -5.64428377e+00] [ 4.75036526e+00 -7.56517506e+00 -2.10838413e+00 1.17215419e+00 -5.17531204e+00 -2.03047323e+00 6.31870556e+00 -3.40719676e+00 -2.21700978e+00 -4.89314049e-01] [ 1.78329360e+00 3.13296890e+00 6.18021631e+00 8.17948282e-01 -4.85495329e+00 7.24451351e+00 1.88234997e+00 1.24603879e+00 -1.97586524e+00 -7.42028141e+00] [ 3.55014658e+00 -2.95990992e+00 1.77949548e-01 -4.79094267e+00 -1.25293989e+01 1.43999529e+00 2.67427182e+00 -7.64647245e-01 -3.43218541e+00 -6.14050627e+00] [-6.26173258e+00 3.29707526e-02 1.97101831e+00 1.59884703e+00 5.09147835e+00 3.47463751e+00 -2.63455439e+00 4.37893057e+00 -2.86030769e+00 7.78598428e-01] [ 3.06704760e+00 -2.18423438e+00 1.13419342e+00 6.28602648e+00 -8.30187619e-01 -1.89524531e+00 5.37061810e-01 -3.42291546e+00 -7.20813870e-01 2.00343013e+00] [ 7.01005697e+00 7.32582951e+00 5.84661126e-01 -3.82341003e+00 1.16115010e+00 8.45265865e-01 1.31114137e+00 8.56661892e+00 1.16778231e+00 1.17204905e+00] [ 7.55751562e+00 5.64599657e+00 1.53557432e+00 5.29721737e+00 -3.41978401e-01 -4.99512345e-01 3.55970240e+00 -5.40814340e-01 -8.99885058e-01 -9.75247681e-01]]]]; ov_res: [[[[-6.24921732e-02 4.24132943e-02 -6.10409118e-02 9.00475234e-02 3.91131528e-02 -5.05620940e-03 2.81436555e-02 -1.06167784e-02 6.79686964e-02 6.58274116e-03] [-7.65626654e-02 8.91480297e-02 1.94021687e-02 2.39202660e-02 -4.13149856e-02 5.87069578e-02 -2.14216597e-02 5.92204444e-02 3.01021198e-03 1.76660810e-02] [ 9.31431726e-02 1.08448759e-01 -3.55457375e-03 -2.32603736e-02 3.01804370e-03 -2.65493197e-03 5.43671772e-02 6.40282556e-02 -2.46763695e-02 -2.61348970e-02] [ 1.11669756e-01 -9.44862440e-02 3.70249078e-02 1.20962009e-01 3.68454382e-02 -2.26403326e-02 1.17344316e-02 4.29450683e-02 -5.21187857e-02 -1.18981488e-01] [-4.45252657e-02 -1.15906104e-01 7.54572228e-02 2.22404916e-02 1.50991250e-02 -1.28915504e-01 8.26421473e-03 -1.21278770e-01 2.08952297e-02 -9.28742997e-03] [ 7.60695785e-02 3.41286957e-02 -4.98692393e-02 3.77026126e-02 8.66843238e-02 -8.78960118e-02 -7.80185461e-02 2.96571329e-02 -2.33778954e-02 2.31761467e-02] [-1.91853009e-02 3.02993674e-02 7.75800422e-02 7.45837986e-02 -5.52800223e-02 3.38701978e-02 -6.03081435e-02 -7.69006610e-02 -3.66824828e-02 6.11623302e-02] [-3.22004706e-02 4.57075872e-02 -1.22929484e-01 5.65900952e-02 -1.39265060e-01 -1.31113725e-02 -4.99057695e-02 2.43491167e-03 -4.05266508e-02 -2.00522095e-01] [-1.20260730e-01 3.14701088e-02 -5.68070672e-02 1.02368511e-01 5.35005890e-03 1.68123245e-02 6.55822530e-02 -5.94237261e-02 -8.27543586e-02 -8.79469607e-03] [-4.83494960e-02 -2.19869260e-02 -9.43980962e-02 5.26175536e-02 3.37635726e-02 -2.54388414e-02 -1.50308400e-01 -4.22715358e-02 -3.81227247e-02 -3.71650942e-02]] [[-2.06249043e-01 -3.15824747e-01 1.91835567e-01 -1.15239471e-01 6.17308378e-01 1.08801179e-01 3.28693539e-03 6.40251100e-01 4.25637141e-02 1.06741562e-01] [-2.37835217e-02 5.90068936e-01 6.24538772e-03 2.65412122e-01 -3.65628868e-01 2.50334352e-01 2.60104597e-01 -4.77804720e-01 -6.34293497e-01 1.25722513e-01] [-1.69829756e-01 -4.75881785e-01 9.72758979e-02 2.51806676e-01 3.90177101e-01 -6.53725505e-01 2.59593818e-02 -4.23634440e-01 -2.04826608e-01 -1.52518019e-01] [ 3.96606803e-01 -4.02998775e-01 -6.49225488e-02 -6.19124472e-02 7.44199082e-02 -6.69640079e-02 -4.66099173e-01 2.25228250e-01 3.30221236e-01 -2.73101509e-01] [-3.85026157e-01 1.11207493e-01 3.49063247e-01 -2.22900305e-02 -1.10941967e-02 -1.09030791e-01 2.25980416e-01 2.91219279e-02 7.36989021e-01 -1.41105086e-01] [ 2.87588656e-01 5.17344534e-01 3.59506816e-01 -5.87197244e-01 1.43586040e-01 -7.25500733e-02 -9.52760801e-02 1.45947793e-02 6.72018155e-02 2.10896835e-01] [-1.51901320e-01 4.27817166e-01 3.44763607e-01 1.79649472e-01 2.27101147e-01 2.19848230e-01 -2.84360796e-01 -3.95607293e-01 5.19122034e-02 -5.91494255e-02] [-1.45420849e-01 4.82993037e-01 -8.37257877e-02 2.83559918e-01 -3.39163728e-02 2.02411518e-01 1.67732388e-01 -1.20795332e-01 1.60842538e-02 -3.52714688e-01] [-4.86677408e-01 -3.92382950e-01 1.37860505e-02 1.03804015e-01 5.16972661e-01 1.41345143e-01 1.93105891e-01 4.18775082e-01 -1.00663379e-01 -9.03252959e-02] [-4.60642993e-01 2.74281770e-01 -7.11783946e-01 2.54258275e-01 -3.65097314e-01 -5.82897425e-01 2.54147172e-01 6.24546587e-01 1.26983687e-01 -1.69958264e-01]] [[-6.52693939e+00 1.40001762e+00 -2.21518254e+00 2.30494499e+00 4.99532729e-01 -2.65162086e+00 1.09076655e+00 -3.71366882e+00 1.66251516e+00 -2.86396575e+00] [ 5.14257860e+00 3.23454452e+00 -4.93001413e+00 -4.25385523e+00 -2.54919505e+00 -1.64854372e+00 7.79699683e-02 5.02635479e-01 -3.41100430e+00 -4.15001440e+00] [ 4.29643250e+00 -4.03158522e+00 -5.90840197e+00 -5.50079167e-01 1.43547833e+00 -2.05978441e+00 4.74527657e-01 -7.93815279e+00 1.04974282e+00 -5.64428377e+00] [ 4.75036526e+00 -7.56517506e+00 -2.10838413e+00 1.17215419e+00 -5.17531204e+00 -2.03047323e+00 6.31870556e+00 -3.40719676e+00 -2.21700978e+00 -4.89314049e-01] [ 1.78329360e+00 3.13296890e+00 6.18021631e+00 8.17948282e-01 -4.85495329e+00 7.24451351e+00 1.88234997e+00 1.24603879e+00 -1.97586524e+00 -7.42028141e+00] [ 3.55014658e+00 -2.95990992e+00 1.77949548e-01 -4.79094267e+00 -1.25293989e+01 1.43999529e+00 2.67427182e+00 -7.64647245e-01 -3.43218541e+00 -6.14050627e+00] [-6.26173258e+00 3.29707526e-02 1.97101831e+00 1.59884703e+00 5.09147835e+00 3.47463751e+00 -2.63455439e+00 4.37893057e+00 -2.86030769e+00 7.78598428e-01] [ 3.06704760e+00 -2.18423438e+00 1.13419342e+00 6.28602648e+00 -8.30187619e-01 -1.89524531e+00 5.37061810e-01 -3.42291546e+00 -7.20813870e-01 2.00343013e+00] [ 7.01005697e+00 7.32582951e+00 5.84661126e-01 -3.82341003e+00 1.16115010e+00 8.45265865e-01 1.31114137e+00 8.56661892e+00 1.16778231e+00 1.17204905e+00] [ 7.55751562e+00 5.64599657e+00 1.53557432e+00 5.29721737e+00 -3.41978401e-01 -4.99512345e-01 3.55970240e+00 -5.40814340e-01 -8.99885058e-01 -9.75247681e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1065.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.9148 (2,1,.,.) = -1.9217 (3,1,.,.) = -0.6874 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[-2.45079851e+00 2.22715497e+00 2.17800140e+00 -3.02659273e-01 -2.22219266e-02 -6.82669759e-01 -3.74518633e+00 -3.91854763e+00 -3.77863556e-01 8.79929304e-01] [-4.29381800e+00 7.64403820e-01 7.47123659e-01 -8.32315862e-01 -3.00912172e-01 -2.16830254e+00 4.23543406e+00 -1.31846297e+00 8.45797658e-01 1.18626595e+00] [ 1.33741379e+00 4.54266578e-01 8.44011176e-03 -5.82068920e-01 1.21426797e+00 -5.23139238e-01 -1.29899824e+00 -1.56086874e+00 8.16270649e-01 4.42268515e+00] [-1.85562563e+00 -4.62313831e-01 1.31812024e+00 2.40731776e-01 -4.85435843e-01 1.81191134e+00 -1.16823280e+00 9.61607769e-02 -1.74789369e+00 -2.76983547e+00] [ 1.68995440e+00 2.67318845e+00 -6.34571731e-01 -8.94023776e-01 -7.66279578e-01 9.44212615e-01 4.74244213e+00 -2.65912485e+00 -2.77345490e+00 -3.89028460e-01] [ 8.48904252e-01 -1.00934112e+00 -3.10118556e+00 2.10438117e-01 -1.62822807e+00 1.13646379e-02 1.55195624e-01 2.54248047e+00 -1.17383325e+00 1.66854703e+00] [-3.48617458e+00 1.35917735e+00 1.10988283e+00 -5.46743011e+00 -3.21652621e-01 5.83115518e-01 -1.39248955e+00 -1.26720771e-01 -7.76316702e-01 1.68945205e+00] [ 2.20726728e+00 1.25694275e+00 9.21881080e-01 -1.29231656e+00 3.01394109e-02 -6.00670815e-01 -1.03196239e+00 -2.13448334e+00 -4.36060727e-01 -2.74494982e+00] [ 1.00312555e+00 -1.71916318e+00 3.49257767e-01 9.32173356e-02 1.08851445e+00 3.67100596e-01 2.38019204e+00 4.99461532e-01 -2.08126521e+00 1.34579134e+00] [ 1.68208456e+00 -3.10532141e+00 7.49503314e-01 -2.34252572e+00 3.59701395e+00 8.98813307e-01 -1.19776525e-01 -1.95867926e-01 -1.81697536e+00 3.00683165e+00]] [[ 7.24449301e+00 2.42678761e+00 5.17434552e-02 -4.51746166e-01 2.23384202e-01 2.02200413e+00 -2.50233173e+00 -5.57581782e-01 1.93088388e+00 2.47913098e+00] [ 2.79834479e-01 2.71034813e+00 2.74602443e-01 8.78590107e-01 1.34107694e-01 -1.16946137e+00 -1.54506743e+00 1.01792967e+00 5.87470710e-01 2.10898662e+00] [-9.85709667e-01 1.49532390e+00 -1.57089424e+00 -2.51849699e+00 4.02882785e-01 1.11619747e+00 2.76734859e-01 -1.02192020e+00 -3.79190803e-01 -1.58457130e-01] [-2.07088399e+00 4.54245925e-01 2.58112955e+00 2.59340906e+00 1.21347284e+00 -2.41851711e+00 -2.15967965e+00 9.05125380e-01 -1.27334905e+00 -2.25073743e+00] [ 4.24456477e-01 -5.93608916e-01 2.79125780e-01 -9.38702703e-01 -1.01829231e+00 -1.44590390e+00 4.19413328e-01 -2.30044651e+00 4.83753175e-01 -1.18404649e-01] [-1.59335279e+00 4.08866453e+00 4.94051695e-01 1.57903984e-01 -9.92657244e-01 2.31642485e+00 1.46360660e+00 1.93484592e+00 -9.17883992e-01 -1.12482941e+00] [ 1.15288109e-01 8.06008220e-01 2.45065708e-02 -2.48701501e+00 2.79646444e+00 6.72880650e-01 1.09188163e+00 -4.50010896e-01 1.82682538e+00 -2.00114727e+00] [-9.98526692e-01 -1.41857541e+00 -1.03254294e+00 3.13082623e+00 1.33598375e+00 1.55169988e+00 -2.50821066e+00 1.54207444e+00 4.18614388e+00 -8.99585128e-01] [-2.07795858e+00 9.57052410e-01 -1.92627633e+00 -1.05638647e+00 1.30976808e+00 -9.70498264e-01 1.13005853e+00 9.25077200e-01 5.55789292e-01 -1.03563797e+00] [-2.34976530e+00 -2.42973471e+00 7.06225634e-01 -4.22388077e+00 -1.52979720e+00 3.11575270e+00 -1.24210906e+00 -1.27800965e+00 5.34115374e-01 6.63157463e-01]] [[-4.03476596e-01 -3.73485148e-01 -1.99748933e+00 6.91820011e-02 1.01877558e+00 7.01275170e-01 3.98563385e-01 5.55903018e-01 3.90226096e-01 -2.25084007e-01] [ 1.86309028e+00 1.22501874e+00 -2.17760324e-01 1.46514699e-01 -1.40171692e-01 4.03917789e-01 3.30048829e-01 -4.82991397e-01 7.11373031e-01 4.64068145e-01] [-3.95713188e-03 -2.85349011e-01 -3.72714460e-01 1.76020592e-01 -1.21342160e-01 -4.77546543e-01 -1.26525342e-01 2.47063041e-01 4.88303453e-01 1.22421145e-01] [ 9.98456776e-01 4.55248356e-01 1.42254949e-01 -7.16884136e-01 -5.60857654e-01 2.46099561e-01 5.93791366e-01 3.63387585e-01 5.07261872e-01 9.73466456e-01] [-3.69804986e-02 -1.39910126e+00 -2.05444694e-01 -2.76297480e-01 1.30846500e-01 6.49551928e-01 -5.15988767e-01 1.88460127e-01 -6.94368601e-01 1.60827547e-01] [ 2.94963419e-01 -4.58848923e-01 2.90702522e-01 -5.87841928e-01 1.65756261e+00 2.80104756e-01 2.13807374e-01 -1.06598926e+00 4.40008819e-01 3.39592665e-01] [ 1.10916071e-01 -8.17512274e-02 1.75364465e-01 3.94262761e-01 1.71160951e-01 1.23850036e+00 7.09055841e-01 4.77323383e-01 -3.09564695e-02 -1.54143155e-01] [ 9.79644120e-01 -9.36286151e-01 -2.43520871e-01 1.14743173e+00 -3.94511461e-01 7.09078193e-01 8.75093877e-01 -2.86798984e-01 7.72162080e-01 -7.24830180e-02] [ 1.09741636e-01 -9.55245197e-02 -8.59012842e-01 -4.96311128e-01 5.67510910e-02 -2.43631080e-01 4.76441085e-01 -3.16931069e-01 1.61378473e-01 -1.11079848e+00] [ 5.37112772e-01 -8.40188920e-01 -3.40100348e-01 -8.44928026e-01 1.00611919e-03 1.25982285e+00 3.72017205e-01 -1.28492922e-01 -3.87145758e-01 -5.53518355e-01]]]]; ov_res: [[[[-2.45079851e+00 2.22715497e+00 2.17800140e+00 -3.02659273e-01 -2.22219266e-02 -6.82669759e-01 -3.74518633e+00 -3.91854763e+00 -3.77863556e-01 8.79929304e-01] [-4.29381800e+00 7.64403820e-01 7.47123659e-01 -8.32315862e-01 -3.00912172e-01 -2.16830254e+00 4.23543406e+00 -1.31846297e+00 8.45797658e-01 1.18626595e+00] [ 1.33741379e+00 4.54266578e-01 8.44011176e-03 -5.82068920e-01 1.21426797e+00 -5.23139238e-01 -1.29899824e+00 -1.56086874e+00 8.16270649e-01 4.42268515e+00] [-1.85562563e+00 -4.62313831e-01 1.31812024e+00 2.40731776e-01 -4.85435843e-01 1.81191134e+00 -1.16823280e+00 9.61607769e-02 -1.74789369e+00 -2.76983547e+00] [ 1.68995440e+00 2.67318845e+00 -6.34571731e-01 -8.94023776e-01 -7.66279578e-01 9.44212615e-01 4.74244213e+00 -2.65912485e+00 -2.77345490e+00 -3.89028460e-01] [ 8.48904252e-01 -1.00934112e+00 -3.10118556e+00 2.10438117e-01 -1.62822807e+00 1.13646379e-02 1.55195624e-01 2.54248047e+00 -1.17383325e+00 1.66854703e+00] [-3.48617458e+00 1.35917735e+00 1.10988283e+00 -5.46743011e+00 -3.21652621e-01 5.83115518e-01 -1.39248955e+00 -1.26720771e-01 -7.76316702e-01 1.68945205e+00] [ 2.20726728e+00 1.25694275e+00 9.21881080e-01 -1.29231656e+00 3.01394109e-02 -6.00670815e-01 -1.03196239e+00 -2.13448334e+00 -4.36060727e-01 -2.74494982e+00] [ 1.00312555e+00 -1.71916318e+00 3.49257767e-01 9.32173356e-02 1.08851445e+00 3.67100596e-01 2.38019204e+00 4.99461532e-01 -2.08126521e+00 1.34579134e+00] [ 1.68208456e+00 -3.10532141e+00 7.49503314e-01 -2.34252572e+00 3.59701395e+00 8.98813307e-01 -1.19776525e-01 -1.95867926e-01 -1.81697536e+00 3.00683165e+00]] [[ 7.24449301e+00 2.42678761e+00 5.17434552e-02 -4.51746166e-01 2.23384202e-01 2.02200413e+00 -2.50233173e+00 -5.57581782e-01 1.93088388e+00 2.47913098e+00] [ 2.79834479e-01 2.71034813e+00 2.74602443e-01 8.78590107e-01 1.34107694e-01 -1.16946137e+00 -1.54506743e+00 1.01792967e+00 5.87470710e-01 2.10898662e+00] [-9.85709667e-01 1.49532390e+00 -1.57089424e+00 -2.51849699e+00 4.02882785e-01 1.11619747e+00 2.76734859e-01 -1.02192020e+00 -3.79190803e-01 -1.58457130e-01] [-2.07088399e+00 4.54245925e-01 2.58112955e+00 2.59340906e+00 1.21347284e+00 -2.41851711e+00 -2.15967965e+00 9.05125380e-01 -1.27334905e+00 -2.25073743e+00] [ 4.24456477e-01 -5.93608916e-01 2.79125780e-01 -9.38702703e-01 -1.01829231e+00 -1.44590390e+00 4.19413328e-01 -2.30044651e+00 4.83753175e-01 -1.18404649e-01] [-1.59335279e+00 4.08866453e+00 4.94051695e-01 1.57903984e-01 -9.92657244e-01 2.31642485e+00 1.46360660e+00 1.93484592e+00 -9.17883992e-01 -1.12482941e+00] [ 1.15288109e-01 8.06008220e-01 2.45065708e-02 -2.48701501e+00 2.79646444e+00 6.72880650e-01 1.09188163e+00 -4.50010896e-01 1.82682538e+00 -2.00114727e+00] [-9.98526692e-01 -1.41857541e+00 -1.03254294e+00 3.13082623e+00 1.33598375e+00 1.55169988e+00 -2.50821066e+00 1.54207444e+00 4.18614388e+00 -8.99585128e-01] [-2.07795858e+00 9.57052410e-01 -1.92627633e+00 -1.05638647e+00 1.30976808e+00 -9.70498264e-01 1.13005853e+00 9.25077200e-01 5.55789292e-01 -1.03563797e+00] [-2.34976530e+00 -2.42973471e+00 7.06225634e-01 -4.22388077e+00 -1.52979720e+00 3.11575270e+00 -1.24210906e+00 -1.27800965e+00 5.34115374e-01 6.63157463e-01]] [[-4.03476596e-01 -3.73485148e-01 -1.99748933e+00 6.91820011e-02 1.01877558e+00 7.01275170e-01 3.98563385e-01 5.55903018e-01 3.90226096e-01 -2.25084007e-01] [ 1.86309028e+00 1.22501874e+00 -2.17760324e-01 1.46514699e-01 -1.40171692e-01 4.03917789e-01 3.30048829e-01 -4.82991397e-01 7.11373031e-01 4.64068145e-01] [-3.95713188e-03 -2.85349011e-01 -3.72714460e-01 1.76020592e-01 -1.21342160e-01 -4.77546543e-01 -1.26525342e-01 2.47063041e-01 4.88303453e-01 1.22421145e-01] [ 9.98456776e-01 4.55248356e-01 1.42254949e-01 -7.16884136e-01 -5.60857654e-01 2.46099561e-01 5.93791366e-01 3.63387585e-01 5.07261872e-01 9.73466456e-01] [-3.69804986e-02 -1.39910126e+00 -2.05444694e-01 -2.76297480e-01 1.30846500e-01 6.49551928e-01 -5.15988767e-01 1.88460127e-01 -6.94368601e-01 1.60827547e-01] [ 2.94963419e-01 -4.58848923e-01 2.90702522e-01 -5.87841928e-01 1.65756261e+00 2.80104756e-01 2.13807374e-01 -1.06598926e+00 4.40008819e-01 3.39592665e-01] [ 1.10916071e-01 -8.17512274e-02 1.75364465e-01 3.94262761e-01 1.71160951e-01 1.23850036e+00 7.09055841e-01 4.77323383e-01 -3.09564695e-02 -1.54143155e-01] [ 9.79644120e-01 -9.36286151e-01 -2.43520871e-01 1.14743173e+00 -3.94511461e-01 7.09078193e-01 8.75093877e-01 -2.86798984e-01 7.72162080e-01 -7.24830180e-02] [ 1.09741636e-01 -9.55245197e-02 -8.59012842e-01 -4.96311128e-01 5.67510910e-02 -2.43631080e-01 4.76441085e-01 -3.16931069e-01 1.61378473e-01 -1.11079848e+00] [ 5.37112772e-01 -8.40188920e-01 -3.40100348e-01 -8.44928026e-01 1.00611919e-03 1.25982285e+00 3.72017205e-01 -1.28492922e-01 -3.87145758e-01 -5.53518355e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 2], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1067.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) onstant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode fw_re: [[[[ 7.8166633 2.3140044 11.621053 3.4782968 ] [ 1.9145999 -4.7604423 -1.9417634 3.8802686 ] [ -0.6466848 -7.271516 10.479487 -2.9264116 ] [-12.513112 8.4569235 -2.8255212 0.83637756]]]]; ov_res: [[[[ 7.8166637 2.3140042 11.621053 3.4782968] [ 1.9145999 -4.7604423 -1.9417635 3.8802679] [ -0.6466843 -7.2715173 10.479487 -2.9264119] [-12.513112 8.456925 -2.825521 0.8363777]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 2], 'pads': 'valid', 'dilations': [2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1069.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[-10.022995 0.1734333 -14.496404 ] [ -1.8658106 -1.3094332 1.9336272] [ -3.781589 -3.3983793 -2.070552 ]]]]; ov_res: [[[[-10.022994 0.1734334 -14.496402 ] [ -1.8658102 -1.3094333 1.9336274] [ -3.7815888 -3.398379 -2.0705526]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3], 'strides': [2, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1071.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, strides=[27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ -6.819351 6.19949 3.4780352 1.3736393 -12.368484 -0.538689 5.0113516 0.5784524] [ 3.0635738 7.373793 -5.8203645 -8.947622 3.3597977 -0.8606595 -0.3114192 -5.264503 ] [ -0.9553643 -6.970964 -1.808576 0.8002689 4.7921658 -3.9351082 -4.510731 2.1485598] [ -1.8337563 -1.1486043 -1.1778543 -12.308519 -0.7063096 10.884491 10.15286 1.4078124]]]]; ov_res: [[[[ -6.819351 6.1994896 3.4780357 1.3736392 -12.368484 -0.538689 5.0113525 0.5784526 ] [ 3.063574 7.373794 -5.8203645 -8.947622 3.3597977 -0.86065936 -0.3114196 -5.2645025 ] [ -0.95536405 -6.970963 -1.8085753 0.80026793 4.7921643 -3.9351082 -4.510731 2.1485593 ] [ -1.8337564 -1.1486039 -1.1778541 -12.308519 -0.70631 10.884489 10.152858 1.4078125 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1073.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.8117 (2,1,.,.) = -0.5981 (3,1,.,.) = 0.2834 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[-1.29602766e+00 2.46962476e+00 7.83168435e-01 -1.00512779e+00 -3.28513265e+00] [ 2.15148234e+00 -3.44715738e+00 -2.54487967e+00 -1.77194238e+00 3.77374125e+00] [-9.28934693e-01 1.69800997e+00 1.80965590e+00 -4.17657900e+00 -1.53879750e+00] [ 1.14406669e+00 -4.19365376e-01 9.01774704e-01 -3.47845650e+00 2.36222506e+00] [ 2.94039249e+00 1.22706068e+00 -6.29637122e-01 1.56312299e+00 -6.12482369e-01]] [[-6.82682991e-01 -8.16509724e-02 -1.47031224e+00 4.27634180e-01 -2.96463221e-01] [ 2.27326050e-01 5.96652031e-01 -8.69900919e-03 3.76253784e-01 4.30227757e-01] [ 8.77304226e-02 -3.34139854e-01 2.87189096e-01 -5.50274312e-01 -3.70869249e-01] [-5.54025233e-01 3.37651163e-01 4.76881564e-01 4.15991604e-01 8.75096396e-02] [ 5.95960140e-01 1.94127128e-01 -8.77955496e-01 2.75885522e-01 7.72696063e-02]] [[-2.41943985e-01 -9.74518955e-02 5.90407290e-02 5.90325706e-02 1.22724459e-01] [-1.00083835e-01 -2.12738857e-01 1.43172532e-01 -2.30664074e-01 2.17918694e-01] [-2.42050603e-01 -2.73894221e-01 4.06534463e-01 -1.30378664e-01 -2.66682863e-01] [-5.90243563e-02 -3.90050441e-01 5.14064170e-02 1.66072398e-02 2.21714540e-03] [-1.67296886e-01 -3.89048636e-01 -9.39649493e-02 -1.43437356e-01 8.16287324e-02]]]]; ov_res: [[[[-1.29602766e+00 2.46962476e+00 7.83168435e-01 -1.00512779e+00 -3.28513265e+00] [ 2.15148234e+00 -3.44715738e+00 -2.54487967e+00 -1.77194238e+00 3.77374125e+00] [-9.28934693e-01 1.69800997e+00 1.80965590e+00 -4.17657900e+00 -1.53879750e+00] [ 1.14406669e+00 -4.19365376e-01 9.01774704e-01 -3.47845650e+00 2.36222506e+00] [ 2.94039249e+00 1.22706068e+00 -6.29637122e-01 1.56312299e+00 -6.12482369e-01]] [[-6.82682991e-01 -8.16509724e-02 -1.47031224e+00 4.27634180e-01 -2.96463221e-01] [ 2.27326050e-01 5.96652031e-01 -8.69900919e-03 3.76253784e-01 4.30227757e-01] [ 8.77304226e-02 -3.34139854e-01 2.87189096e-01 -5.50274312e-01 -3.70869249e-01] [-5.54025233e-01 3.37651163e-01 4.76881564e-01 4.15991604e-01 8.75096396e-02] [ 5.95960140e-01 1.94127128e-01 -8.77955496e-01 2.75885522e-01 7.72696063e-02]] [[-2.41943985e-01 -9.74518955e-02 5.90407290e-02 5.90325706e-02 1.22724459e-01] [-1.00083835e-01 -2.12738857e-01 1.43172532e-01 -2.30664074e-01 2.17918694e-01] [-2.42050603e-01 -2.73894221e-01 4.06534463e-01 -1.30378664e-01 -2.66682863e-01] [-5.90243563e-02 -3.90050441e-01 5.14064170e-02 1.66072398e-02 2.21714540e-03] [-1.67296886e-01 -3.89048636e-01 -9.39649493e-02 -1.43437356e-01 8.16287324e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 2], 'pads': 'valid', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1075.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -1.0028 (2,1,.,.) = -1.5133 (3,1,.,.) = 0.6272 [ CPUFloatType{3,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[ 0.54877543 -1.0025249 0.35612273 1.2033231 -0.05449979] [-1.6339618 -0.41886622 1.2997382 -1.6519521 1.1474812 ] [ 1.5110266 -0.170925 0.8717368 -1.2872121 -0.7776477 ] [ 1.3225805 -0.5207107 0.28032494 -0.49864 0.22341248] [-0.7904451 1.1174177 -0.9725298 0.5351332 0.8862518 ]] [[ 1.6757619 1.8215971 1.7469337 -1.0930816 0.5121063 ] [-0.31398967 -0.61610335 0.83774155 -1.8956634 -3.7553318 ] [-0.06353363 1.9410214 -1.5750989 -0.37070602 -0.5536731 ] [-1.5932097 0.9227341 0.56941354 -1.1683296 -1.9700497 ] [-2.2678618 -0.25981176 0.824062 -1.4829757 -0.6226729 ]] [[ 0.30408156 -0.38003683 0.29265618 0.8470867 0.16523103] [-0.15802702 0.31943536 0.3614052 -0.33173707 0.07653338] [-0.41940582 0.17621997 0.6319101 -1.2071856 0.05256195] [ 0.3067721 0.3899698 -0.24898602 -0.9152994 0.87632656] [ 1.8768861 1.1268411 0.3291257 -0.01134201 0.44754693]]]]; ov_res: [[[[ 0.54877543 -1.0025249 0.35612273 1.2033231 -0.05449979] [-1.6339618 -0.41886622 1.2997382 -1.6519521 1.1474812 ] [ 1.5110266 -0.170925 0.8717368 -1.2872121 -0.7776477 ] [ 1.3225805 -0.5207107 0.28032494 -0.49864 0.22341248] [-0.7904451 1.1174177 -0.9725298 0.5351332 0.8862518 ]] [[ 1.6757619 1.8215971 1.7469337 -1.0930816 0.5121063 ] [-0.31398967 -0.61610335 0.83774155 -1.8956634 -3.7553318 ] [-0.06353363 1.9410214 -1.5750989 -0.37070602 -0.5536731 ] [-1.5932097 0.9227341 0.56941354 -1.1683296 -1.9700497 ] [-2.2678618 -0.25981176 0.824062 -1.4829757 -0.6226729 ]] [[ 0.30408156 -0.38003683 0.29265618 0.8470867 0.16523103] [-0.15802702 0.31943536 0.3614052 -0.33173707 0.07653338] [-0.41940582 0.17621997 0.6319101 -1.2071856 0.05256195] [ 0.3067721 0.3899698 -0.24898602 -0.9152994 0.87632656] [ 1.8768861 1.1268411 0.3291257 -0.01134201 0.44754693]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [2, 1], 'pads': 'valid', 'dilations': [1, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1077.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.9079 (2,1,.,.) = 1.2581 (3,1,.,.) = -0.4669 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 2.77212597e-02 -3.83603245e-01 -9.94379044e-01 -9.97552156e-01 4.68925953e-01 -4.90708411e-01 -4.08404060e-02 6.37803435e-01 -1.51165700e+00 -6.29824162e-01] [-7.21795678e-01 -1.87898591e-01 2.47681892e-04 6.92969114e-02 2.98533291e-01 6.56855881e-01 8.43693376e-01 2.93730468e-01 6.77378893e-01 -1.22673285e+00] [ 4.71372634e-01 -1.46819103e+00 -6.03168905e-01 1.77727950e+00 -2.85033375e-01 -1.44866675e-01 -1.16311336e+00 -4.02745962e-01 1.12937832e+00 -1.52097628e-01] [-1.02845505e-01 1.29693449e-01 1.34878290e+00 1.71985543e+00 1.57864499e+00 -1.31241000e+00 -3.77700835e-01 5.29907465e-01 1.90992463e+00 -8.39009225e-01] [-5.61056614e-01 2.08256078e+00 -1.62680954e-01 4.76190001e-01 -2.06760898e-01 -1.36633050e+00 6.00979269e-01 -1.64462805e-01 1.22189891e+00 2.16029473e-02]] [[-1.92207968e+00 -1.64166570e+00 -1.16570902e+00 -8.15539122e-01 1.24560189e+00 -6.93216562e-01 -6.15041971e-01 2.50774503e-01 -1.76079556e-01 -1.21006286e+00] [-2.95908034e-01 -1.41036212e+00 -5.19576192e-01 -2.49588799e+00 -2.09494209e+00 -2.27180099e+00 -1.33084846e+00 -1.30956852e+00 -1.27678454e+00 -6.13866687e-01] [ 9.83874857e-01 9.77113187e-01 1.22115839e+00 -6.06720410e-02 2.91975474e+00 -3.45940590e+00 7.81740010e-01 1.32936370e+00 -1.40748632e+00 -7.79921114e-02] [ 1.28956628e+00 -1.06128097e+00 2.64766121e+00 6.84468448e-01 -7.26809502e-01 -1.35273546e-01 1.34976268e+00 -1.96674085e+00 4.33253670e+00 3.16938579e-01] [-1.90900058e-01 -2.75623471e-01 -1.28195643e+00 1.49454737e+00 -4.48172122e-01 8.82904112e-01 1.77120721e+00 -1.05760600e-02 -3.28851175e+00 -5.73232830e-01]] [[-9.70164120e-01 3.24517459e-01 6.23419769e-02 -2.57399291e-01 1.21867090e-01 6.71058536e-01 -3.22675794e-01 -3.87442350e-01 -9.12166774e-01 5.11778653e-01] [-2.46678963e-01 6.95871532e-01 -2.21223429e-01 -5.03091872e-01 5.65968037e-01 -7.63467371e-01 2.89682925e-01 3.32682401e-01 -7.72570252e-01 3.13437670e-01] [-3.77839088e-01 -6.88387871e-01 -2.01052144e-01 7.45767832e-01 4.25175251e-03 -8.05571854e-01 1.19614899e-01 -5.25926769e-01 1.66217819e-01 1.84232265e-01] [ 1.14532506e+00 -2.53301114e-01 -2.90637523e-01 5.01150079e-02 -2.82896608e-01 1.81196883e-01 -3.34692329e-01 -1.03472032e-01 2.27054998e-01 -8.09205621e-02] [ 3.02430429e-03 -6.06684759e-02 2.13652402e-01 -5.11602938e-01 5.10333441e-02 -2.04722345e-01 8.79570186e-01 5.23185581e-02 7.62237966e-01 2.73826838e-01]]]]; ov_res: [[[[ 2.77212597e-02 -3.83603245e-01 -9.94379044e-01 -9.97552156e-01 4.68925953e-01 -4.90708411e-01 -4.08404060e-02 6.37803435e-01 -1.51165700e+00 -6.29824162e-01] [-7.21795678e-01 -1.87898591e-01 2.47681892e-04 6.92969114e-02 2.98533291e-01 6.56855881e-01 8.43693376e-01 2.93730468e-01 6.77378893e-01 -1.22673285e+00] [ 4.71372634e-01 -1.46819103e+00 -6.03168905e-01 1.77727950e+00 -2.85033375e-01 -1.44866675e-01 -1.16311336e+00 -4.02745962e-01 1.12937832e+00 -1.52097628e-01] [-1.02845505e-01 1.29693449e-01 1.34878290e+00 1.71985543e+00 1.57864499e+00 -1.31241000e+00 -3.77700835e-01 5.29907465e-01 1.90992463e+00 -8.39009225e-01] [-5.61056614e-01 2.08256078e+00 -1.62680954e-01 4.76190001e-01 -2.06760898e-01 -1.36633050e+00 6.00979269e-01 -1.64462805e-01 1.22189891e+00 2.16029473e-02]] [[-1.92207968e+00 -1.64166570e+00 -1.16570902e+00 -8.15539122e-01 1.24560189e+00 -6.93216562e-01 -6.15041971e-01 2.50774503e-01 -1.76079556e-01 -1.21006286e+00] [-2.95908034e-01 -1.41036212e+00 -5.19576192e-01 -2.49588799e+00 -2.09494209e+00 -2.27180099e+00 -1.33084846e+00 -1.30956852e+00 -1.27678454e+00 -6.13866687e-01] [ 9.83874857e-01 9.77113187e-01 1.22115839e+00 -6.06720410e-02 2.91975474e+00 -3.45940590e+00 7.81740010e-01 1.32936370e+00 -1.40748632e+00 -7.79921114e-02] [ 1.28956628e+00 -1.06128097e+00 2.64766121e+00 6.84468448e-01 -7.26809502e-01 -1.35273546e-01 1.34976268e+00 -1.96674085e+00 4.33253670e+00 3.16938579e-01] [-1.90900058e-01 -2.75623471e-01 -1.28195643e+00 1.49454737e+00 -4.48172122e-01 8.82904112e-01 1.77120721e+00 -1.05760600e-02 -3.28851175e+00 -5.73232830e-01]] [[-9.70164120e-01 3.24517459e-01 6.23419769e-02 -2.57399291e-01 1.21867090e-01 6.71058536e-01 -3.22675794e-01 -3.87442350e-01 -9.12166774e-01 5.11778653e-01] [-2.46678963e-01 6.95871532e-01 -2.21223429e-01 -5.03091872e-01 5.65968037e-01 -7.63467371e-01 2.89682925e-01 3.32682401e-01 -7.72570252e-01 3.13437670e-01] [-3.77839088e-01 -6.88387871e-01 -2.01052144e-01 7.45767832e-01 4.25175251e-03 -8.05571854e-01 1.19614899e-01 -5.25926769e-01 1.66217819e-01 1.84232265e-01] [ 1.14532506e+00 -2.53301114e-01 -2.90637523e-01 5.01150079e-02 -2.82896608e-01 1.81196883e-01 -3.34692329e-01 -1.03472032e-01 2.27054998e-01 -8.09205621e-02] [ 3.02430429e-03 -6.06684759e-02 2.13652402e-01 -5.11602938e-01 5.10333441e-02 -2.04722345e-01 8.79570186e-01 5.23185581e-02 7.62237966e-01 2.73826838e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1079.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.9318 (2,1,.,.) = 0.01 * 2.2375 (3,1,.,.) = 0.2508 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 1.12457228e+00 -1.34955394e+00 2.03086212e-01 -8.19247603e-01 -6.99871004e-01 -1.75066382e-01 -6.90552711e-01 6.17518090e-02 6.34426653e-01 2.62008739e+00] [ 2.03227544e+00 -3.25901330e-01 1.04914081e+00 3.25350106e-01 -5.38634583e-02 -1.01241052e+00 1.06284773e+00 -1.70045257e+00 1.89504409e+00 -3.36506009e-01] [-5.22641718e-01 -7.33889043e-01 -2.11059287e-01 -5.44328570e-01 -4.24298763e-01 -6.24418557e-01 -1.64813066e+00 -1.07235873e+00 -9.19863939e-01 6.03785157e-01] [ 1.01953954e-01 -5.42837344e-02 7.51646578e-01 7.28899598e-01 1.78063822e+00 1.98834634e+00 1.60927749e+00 1.33068573e+00 -5.12237847e-01 -3.02793115e-01] [ 7.98478499e-02 -1.75913006e-01 -4.80684489e-01 -6.76690996e-01 6.08334184e-01 -1.81573117e+00 -5.95202558e-02 -9.96200085e-01 -6.69396341e-01 -4.23884660e-01] [ 6.35065019e-01 -1.12686448e-01 7.77653933e-01 2.82613993e-01 -1.06793761e+00 2.47353047e-01 -2.23971057e+00 4.63770300e-01 3.94542873e-01 -1.27937829e+00] [ 4.01948899e-01 7.20352888e-01 -1.43598866e+00 1.11433959e+00 1.61986440e-01 -1.39646780e+00 -1.82842582e-01 1.21685994e+00 -1.36239421e+00 1.22843337e+00] [-7.76807904e-01 -8.48514616e-01 -6.40994966e-01 2.00861514e-01 1.76722974e-01 -1.51993632e-01 -8.73028994e-01 -6.44304097e-01 1.75517881e+00 -1.50957632e+00] [ 2.79954225e-01 -2.68117332e+00 -1.68816105e-01 2.28652740e+00 -1.32107437e+00 1.88646436e+00 -2.02349091e+00 -8.78393829e-01 1.18947692e-01 9.37409580e-01] [-1.08090198e+00 3.82246077e-01 5.41237116e-01 8.71863186e-01 1.16426192e-01 -1.29248130e+00 1.03907537e+00 7.14814186e-01 4.11157668e-01 6.36219263e-01]] [[ 2.94670500e-02 1.20456722e-02 -2.52513360e-04 2.42594220e-02 -3.85852344e-02 5.53829072e-04 -3.83106060e-02 -5.59391044e-02 1.34419529e-02 2.44806074e-02] [-8.47739540e-03 -6.70307456e-03 -5.23540145e-03 -2.85114311e-02 -6.85639353e-03 1.63869206e-02 -1.03032375e-02 7.10004801e-03 -4.11043270e-03 -1.24661522e-02] [-1.23447948e-03 2.73588616e-02 1.18457908e-02 1.41919032e-02 -2.35595647e-02 2.55269371e-02 2.09229290e-02 4.37634736e-02 -5.64568080e-02 -2.51990631e-02] [ 2.81996056e-02 1.71867467e-03 -1.73940323e-02 8.37839022e-03 2.57944055e-02 1.39369247e-02 -1.28210650e-03 -1.98725742e-04 -6.75824750e-03 1.48356156e-02] [ 1.12775117e-02 -5.22515411e-03 -9.47286561e-03 7.87718408e-03 -1.67277288e-02 1.35418400e-02 2.98493151e-02 1.59796998e-02 1.90265775e-02 -8.59590434e-03] [ 1.77939925e-02 4.13544737e-02 -1.07346512e-02 -6.47347840e-03 -7.40999961e-03 -1.56424362e-02 -4.76947706e-03 2.79710768e-03 -1.47826187e-02 -3.68398391e-02] [ 1.02269072e-02 2.76071094e-02 3.96799110e-02 2.84370426e-02 2.02192590e-02 -1.11534139e-02 -1.02112512e-03 6.75202231e-04 1.69540918e-03 -1.35394139e-02] [ 5.97408041e-03 -6.39593694e-03 3.93026844e-02 -1.93140358e-02 2.52392702e-02 7.28496598e-05 -1.92819778e-02 5.04604951e-02 5.08762943e-03 3.33403051e-02] [ 3.35433055e-03 1.08270915e-02 -3.32203135e-02 -1.90301836e-02 -1.64466351e-03 -7.94277433e-03 3.02750841e-02 7.82933086e-04 -1.99226034e-03 -1.12280704e-03] [ 2.90230419e-02 -6.27017347e-03 -1.74123701e-02 -2.04293951e-02 1.11697300e-03 -6.80299057e-03 8.27053227e-05 9.72694065e-03 -4.42717969e-03 -1.32655362e-02]] [[ 2.28205435e-02 2.84715354e-01 -8.49185586e-01 6.38778508e-02 -1.10516749e-01 -3.16239923e-01 1.82822108e-01 -2.51948744e-01 6.08489551e-02 -3.17030162e-01] [ 1.09203592e-01 -1.54988110e-01 1.58616945e-01 4.16084617e-01 -8.67993943e-03 -1.63156837e-01 -1.13347910e-01 -1.30228817e-01 1.80923268e-01 -3.08210939e-01] [ 1.72595248e-01 -3.48421216e-01 -2.26927251e-02 2.14788705e-01 -3.22491169e-01 2.04326451e-01 -1.02216259e-01 -8.04290548e-02 -1.76639214e-01 -1.83629289e-01] [ 3.17704111e-01 -3.82170789e-02 3.39349329e-01 5.28833680e-02 1.63826700e-02 2.74442174e-02 -2.15672642e-01 -1.69518776e-02 -4.47604030e-01 -1.31715432e-01] [-2.01456562e-01 6.09302819e-01 1.99385732e-01 -2.41677076e-01 -3.36176902e-02 1.19623289e-01 3.82658511e-01 -7.03593669e-03 1.78219173e-02 -1.84824467e-01] [ 1.78353921e-01 2.10214317e-01 4.81702298e-01 -3.91467094e-01 1.41174793e-01 1.26582310e-01 -6.56895339e-03 -3.17119777e-01 1.31794065e-01 2.05910653e-01] [ 1.00720711e-01 -1.10625274e-01 -2.49846488e-01 -6.13434196e-01 1.90504536e-01 1.03098057e-01 -9.14615467e-02 1.70541584e-01 1.88231662e-01 -1.68543011e-01] [-1.21195957e-01 6.57748850e-03 3.97644751e-02 2.48849448e-02 1.91740781e-01 2.38138780e-01 5.46655394e-02 2.39903256e-01 1.99259862e-01 2.28863675e-02] [ 2.63342530e-01 -3.35250758e-02 -1.00186735e-01 4.03737724e-02 9.00142491e-02 1.73648074e-01 7.05459863e-02 -2.72623569e-01 2.59485185e-01 -3.08096707e-01] [-4.56566811e-01 1.91004708e-01 3.26658657e-04 1.56918406e-01 -7.58891553e-02 -6.70762882e-02 3.20354581e-01 -2.68089145e-01 -3.14577103e-01 1.46232426e-01]]]]; ov_res: [[[[ 1.12457228e+00 -1.34955394e+00 2.03086212e-01 -8.19247603e-01 -6.99871004e-01 -1.75066382e-01 -6.90552711e-01 6.17518090e-02 6.34426653e-01 2.62008739e+00] [ 2.03227544e+00 -3.25901330e-01 1.04914081e+00 3.25350106e-01 -5.38634583e-02 -1.01241052e+00 1.06284773e+00 -1.70045257e+00 1.89504409e+00 -3.36506009e-01] [-5.22641718e-01 -7.33889043e-01 -2.11059287e-01 -5.44328570e-01 -4.24298763e-01 -6.24418557e-01 -1.64813066e+00 -1.07235873e+00 -9.19863939e-01 6.03785157e-01] [ 1.01953954e-01 -5.42837344e-02 7.51646578e-01 7.28899598e-01 1.78063822e+00 1.98834634e+00 1.60927749e+00 1.33068573e+00 -5.12237847e-01 -3.02793115e-01] [ 7.98478499e-02 -1.75913006e-01 -4.80684489e-01 -6.76690996e-01 6.08334184e-01 -1.81573117e+00 -5.95202558e-02 -9.96200085e-01 -6.69396341e-01 -4.23884660e-01] [ 6.35065019e-01 -1.12686448e-01 7.77653933e-01 2.82613993e-01 -1.06793761e+00 2.47353047e-01 -2.23971057e+00 4.63770300e-01 3.94542873e-01 -1.27937829e+00] [ 4.01948899e-01 7.20352888e-01 -1.43598866e+00 1.11433959e+00 1.61986440e-01 -1.39646780e+00 -1.82842582e-01 1.21685994e+00 -1.36239421e+00 1.22843337e+00] [-7.76807904e-01 -8.48514616e-01 -6.40994966e-01 2.00861514e-01 1.76722974e-01 -1.51993632e-01 -8.73028994e-01 -6.44304097e-01 1.75517881e+00 -1.50957632e+00] [ 2.79954225e-01 -2.68117332e+00 -1.68816105e-01 2.28652740e+00 -1.32107437e+00 1.88646436e+00 -2.02349091e+00 -8.78393829e-01 1.18947692e-01 9.37409580e-01] [-1.08090198e+00 3.82246077e-01 5.41237116e-01 8.71863186e-01 1.16426192e-01 -1.29248130e+00 1.03907537e+00 7.14814186e-01 4.11157668e-01 6.36219263e-01]] [[ 2.94670500e-02 1.20456722e-02 -2.52513360e-04 2.42594220e-02 -3.85852344e-02 5.53829072e-04 -3.83106060e-02 -5.59391044e-02 1.34419529e-02 2.44806074e-02] [-8.47739540e-03 -6.70307456e-03 -5.23540145e-03 -2.85114311e-02 -6.85639353e-03 1.63869206e-02 -1.03032375e-02 7.10004801e-03 -4.11043270e-03 -1.24661522e-02] [-1.23447948e-03 2.73588616e-02 1.18457908e-02 1.41919032e-02 -2.35595647e-02 2.55269371e-02 2.09229290e-02 4.37634736e-02 -5.64568080e-02 -2.51990631e-02] [ 2.81996056e-02 1.71867467e-03 -1.73940323e-02 8.37839022e-03 2.57944055e-02 1.39369247e-02 -1.28210650e-03 -1.98725742e-04 -6.75824750e-03 1.48356156e-02] [ 1.12775117e-02 -5.22515411e-03 -9.47286561e-03 7.87718408e-03 -1.67277288e-02 1.35418400e-02 2.98493151e-02 1.59796998e-02 1.90265775e-02 -8.59590434e-03] [ 1.77939925e-02 4.13544737e-02 -1.07346512e-02 -6.47347840e-03 -7.40999961e-03 -1.56424362e-02 -4.76947706e-03 2.79710768e-03 -1.47826187e-02 -3.68398391e-02] [ 1.02269072e-02 2.76071094e-02 3.96799110e-02 2.84370426e-02 2.02192590e-02 -1.11534139e-02 -1.02112512e-03 6.75202231e-04 1.69540918e-03 -1.35394139e-02] [ 5.97408041e-03 -6.39593694e-03 3.93026844e-02 -1.93140358e-02 2.52392702e-02 7.28496598e-05 -1.92819778e-02 5.04604951e-02 5.08762943e-03 3.33403051e-02] [ 3.35433055e-03 1.08270915e-02 -3.32203135e-02 -1.90301836e-02 -1.64466351e-03 -7.94277433e-03 3.02750841e-02 7.82933086e-04 -1.99226034e-03 -1.12280704e-03] [ 2.90230419e-02 -6.27017347e-03 -1.74123701e-02 -2.04293951e-02 1.11697300e-03 -6.80299057e-03 8.27053227e-05 9.72694065e-03 -4.42717969e-03 -1.32655362e-02]] [[ 2.28205435e-02 2.84715354e-01 -8.49185586e-01 6.38778508e-02 -1.10516749e-01 -3.16239923e-01 1.82822108e-01 -2.51948744e-01 6.08489551e-02 -3.17030162e-01] [ 1.09203592e-01 -1.54988110e-01 1.58616945e-01 4.16084617e-01 -8.67993943e-03 -1.63156837e-01 -1.13347910e-01 -1.30228817e-01 1.80923268e-01 -3.08210939e-01] [ 1.72595248e-01 -3.48421216e-01 -2.26927251e-02 2.14788705e-01 -3.22491169e-01 2.04326451e-01 -1.02216259e-01 -8.04290548e-02 -1.76639214e-01 -1.83629289e-01] [ 3.17704111e-01 -3.82170789e-02 3.39349329e-01 5.28833680e-02 1.63826700e-02 2.74442174e-02 -2.15672642e-01 -1.69518776e-02 -4.47604030e-01 -1.31715432e-01] [-2.01456562e-01 6.09302819e-01 1.99385732e-01 -2.41677076e-01 -3.36176902e-02 1.19623289e-01 3.82658511e-01 -7.03593669e-03 1.78219173e-02 -1.84824467e-01] [ 1.78353921e-01 2.10214317e-01 4.81702298e-01 -3.91467094e-01 1.41174793e-01 1.26582310e-01 -6.56895339e-03 -3.17119777e-01 1.31794065e-01 2.05910653e-01] [ 1.00720711e-01 -1.10625274e-01 -2.49846488e-01 -6.13434196e-01 1.90504536e-01 1.03098057e-01 -9.14615467e-02 1.70541584e-01 1.88231662e-01 -1.68543011e-01] [-1.21195957e-01 6.57748850e-03 3.97644751e-02 2.48849448e-02 1.91740781e-01 2.38138780e-01 5.46655394e-02 2.39903256e-01 1.99259862e-01 2.28863675e-02] [ 2.63342530e-01 -3.35250758e-02 -1.00186735e-01 4.03737724e-02 9.00142491e-02 1.73648074e-01 7.05459863e-02 -2.72623569e-01 2.59485185e-01 -3.08096707e-01] [-4.56566811e-01 1.91004708e-01 3.26658657e-04 1.56918406e-01 -7.58891553e-02 -6.70762882e-02 3.20354581e-01 -2.68089145e-01 -3.14577103e-01 1.46232426e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1081.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = 0.7490 (2,1,.,.) = 0.8490 (3,1,.,.) = 1.2042 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 4.07155871e-01 -3.29522401e-01 1.06961735e-01 2.10984454e-01 -2.07190529e-01 -6.69431910e-02 -4.54861969e-01 3.34298909e-01 -3.47967476e-01 3.94334465e-01] [-3.85128856e-01 2.17574555e-02 -2.56463617e-01 5.38204610e-01 3.07122231e-01 -4.15043414e-01 4.02990758e-01 -1.09752417e+00 1.50527135e-01 9.87727106e-01] [ 5.90055108e-01 1.51623128e-04 1.36418030e-01 -1.11722505e+00 -5.62262416e-01 -9.29009259e-01 4.97573286e-01 -3.87548715e-01 -4.88073230e-01 -4.02636498e-01] [ 1.03101623e+00 -1.33877158e+00 2.78848350e-01 -3.93422574e-01 -9.05021727e-02 -5.92572894e-03 1.18349206e+00 -1.66911638e+00 1.98229671e-01 -2.45083258e-01] [ 6.50917530e-01 -1.85870278e+00 -2.00046703e-01 1.20498955e+00 -1.74004823e-01 -7.79609680e-01 -9.67302918e-01 -2.76571572e-01 1.30914915e+00 6.97059810e-01] [ 4.22693133e-01 -9.29986477e-01 -1.12974036e+00 4.34713155e-01 -7.35953093e-01 8.80629420e-01 1.68981159e+00 -3.72990310e-01 7.00746179e-02 -6.35462224e-01] [ 2.48522639e-01 4.89807665e-01 -6.59680545e-01 1.75485122e+00 7.59171963e-01 -8.78401101e-02 -4.56865191e-01 -3.43632847e-01 1.02354646e+00 2.66624354e-02] [ 1.07569921e+00 -7.75893927e-02 8.12186718e-01 -2.18212754e-01 2.20965847e-01 1.06002462e+00 1.14448396e-02 9.63769078e-01 2.19833747e-01 6.11975908e-01] [ 3.56774718e-01 -6.30334079e-01 -1.13879848e+00 3.24667484e-01 -1.48547888e-01 2.00588560e+00 -7.12375820e-01 6.98301733e-01 1.02950025e+00 1.16813087e+00] [-4.40126836e-01 4.72877532e-01 -3.76692891e-01 -3.07283700e-01 3.78493756e-01 1.17736822e-02 5.94317079e-01 -1.59878477e-01 -1.44123182e-01 -2.49329790e-01]] [[ 1.30432129e+00 -4.52548802e-01 1.32099926e+00 2.19752356e-01 -2.03865156e-01 -8.26973259e-01 -5.60573816e-01 -1.20266771e+00 -2.76326299e-01 1.04589391e+00] [ 7.27502346e-01 6.65672064e-01 -1.06987202e+00 -5.73141932e-01 9.89383757e-01 1.57962179e+00 -1.37958288e+00 -5.35841823e-01 3.37406397e-01 -4.39812332e-01] [ 1.00209296e+00 4.27370071e-02 3.71774048e-01 8.14366281e-01 4.75801021e-01 4.65932637e-01 9.69257295e-01 -3.91397595e-01 -7.49037087e-01 1.20918252e-01] [-5.41017115e-01 -8.01788092e-01 1.89389622e+00 -2.13074875e+00 3.56658250e-01 -4.42587674e-01 -1.46810904e-01 1.61742195e-01 -1.16775548e+00 1.20539379e+00] [-1.23690557e+00 -2.79519618e-01 6.39795419e-03 -1.48068154e+00 4.08576339e-01 -4.97477204e-01 2.40614876e-01 4.16900069e-01 8.67555141e-01 7.23380744e-01] [-6.74688399e-01 1.42417741e+00 -4.75982398e-01 -7.23011076e-01 1.73861325e+00 -8.68321002e-01 -1.05892289e+00 -7.56721377e-01 1.63768064e-02 -5.67919791e-01] [ 4.37114418e-01 7.76036382e-01 7.86017239e-01 -2.90209502e-01 7.46909142e-01 7.34883666e-01 -1.35185495e-01 2.28878275e-01 1.76029310e-01 -1.34766903e-02] [ 9.86397207e-01 -1.01682737e-01 -2.98249155e-01 3.01772833e-01 -2.13398069e-01 2.93852776e-01 8.25735867e-01 7.38177359e-01 1.82073414e-01 -5.84407806e-01] [ 9.33044851e-01 -1.19107687e+00 -8.37607861e-01 1.07067621e+00 1.16640794e+00 -1.20505369e+00 1.24269813e-01 1.25767335e-01 -1.20202589e+00 8.48192126e-02] [ 7.69551024e-02 -1.21792817e+00 1.71062676e-03 -7.24610448e-01 -1.29009902e-01 -8.73679399e-01 -5.35788357e-01 -8.94213021e-01 8.41621697e-01 -1.41289675e+00]] [[ 1.89072394e+00 1.13802111e+00 -1.19676495e+00 1.08592319e+00 -5.55083990e-01 -1.38572180e+00 2.44495228e-01 -2.53434449e-01 -9.73280311e-01 1.21309447e+00] [ 1.05327857e+00 -3.84504288e-01 1.97077656e+00 5.65466583e-01 -3.79842669e-01 1.26620397e-01 2.91638732e-01 -1.97830141e-01 -1.32000506e+00 -1.07376885e+00] [ 5.93486801e-02 -2.13330746e+00 9.31724429e-01 8.84402245e-02 3.79704535e-01 2.49158931e+00 1.90750623e+00 2.45164442e+00 2.72935122e-01 2.96299100e-01] [-3.89210396e-02 -4.74206656e-01 3.68046492e-01 -2.13065219e+00 -2.27312684e-01 -1.66344929e+00 -5.62195778e-01 -1.91557336e+00 8.10393393e-01 1.28281355e+00] [-4.14269447e-01 1.49169719e+00 -8.19117427e-01 8.95047367e-01 8.53012741e-01 4.22591180e-01 -1.02795553e+00 -5.46115749e-02 1.19236577e+00 -2.31049228e+00] [ 1.15966365e-01 5.22175252e-01 -8.35951507e-01 4.20692652e-01 -1.08097278e-01 -5.06096840e-01 7.49318719e-01 8.43419731e-01 -5.38137436e-01 -1.31247818e-01] [ 8.37904513e-01 1.56521106e+00 5.78231335e-01 2.06188178e+00 1.92140594e-01 1.32095873e+00 -1.11271024e+00 2.46944451e+00 6.08412385e-01 -1.48505437e+00] [ 9.30618107e-01 1.93678904e+00 -2.09461093e+00 1.00581384e+00 1.55820251e+00 5.79036213e-02 5.40438354e-01 -1.15005994e+00 1.63314417e-01 4.84191358e-01] [ 1.70685744e+00 1.31297410e+00 -1.07891369e+00 -1.28216445e+00 9.18307006e-01 -2.94550967e+00 8.38106871e-01 -8.52737459e-04 1.04336846e+00 3.19393545e-01] [ 2.39048392e-01 -5.03229976e-01 1.74222016e+00 1.19743061e+00 7.18215108e-02 -2.43116453e-01 -5.68436868e-02 -5.38998246e-01 5.17065585e-01 1.41370761e+00]]]]; ov_res: [[[[ 4.07155871e-01 -3.29522401e-01 1.06961735e-01 2.10984454e-01 -2.07190529e-01 -6.69431910e-02 -4.54861969e-01 3.34298909e-01 -3.47967476e-01 3.94334465e-01] [-3.85128856e-01 2.17574555e-02 -2.56463617e-01 5.38204610e-01 3.07122231e-01 -4.15043414e-01 4.02990758e-01 -1.09752417e+00 1.50527135e-01 9.87727106e-01] [ 5.90055108e-01 1.51623128e-04 1.36418030e-01 -1.11722505e+00 -5.62262416e-01 -9.29009259e-01 4.97573286e-01 -3.87548715e-01 -4.88073230e-01 -4.02636498e-01] [ 1.03101623e+00 -1.33877158e+00 2.78848350e-01 -3.93422574e-01 -9.05021727e-02 -5.92572894e-03 1.18349206e+00 -1.66911638e+00 1.98229671e-01 -2.45083258e-01] [ 6.50917530e-01 -1.85870278e+00 -2.00046703e-01 1.20498955e+00 -1.74004823e-01 -7.79609680e-01 -9.67302918e-01 -2.76571572e-01 1.30914915e+00 6.97059810e-01] [ 4.22693133e-01 -9.29986477e-01 -1.12974036e+00 4.34713155e-01 -7.35953093e-01 8.80629420e-01 1.68981159e+00 -3.72990310e-01 7.00746179e-02 -6.35462224e-01] [ 2.48522639e-01 4.89807665e-01 -6.59680545e-01 1.75485122e+00 7.59171963e-01 -8.78401101e-02 -4.56865191e-01 -3.43632847e-01 1.02354646e+00 2.66624354e-02] [ 1.07569921e+00 -7.75893927e-02 8.12186718e-01 -2.18212754e-01 2.20965847e-01 1.06002462e+00 1.14448396e-02 9.63769078e-01 2.19833747e-01 6.11975908e-01] [ 3.56774718e-01 -6.30334079e-01 -1.13879848e+00 3.24667484e-01 -1.48547888e-01 2.00588560e+00 -7.12375820e-01 6.98301733e-01 1.02950025e+00 1.16813087e+00] [-4.40126836e-01 4.72877532e-01 -3.76692891e-01 -3.07283700e-01 3.78493756e-01 1.17736822e-02 5.94317079e-01 -1.59878477e-01 -1.44123182e-01 -2.49329790e-01]] [[ 1.30432129e+00 -4.52548802e-01 1.32099926e+00 2.19752356e-01 -2.03865156e-01 -8.26973259e-01 -5.60573816e-01 -1.20266771e+00 -2.76326299e-01 1.04589391e+00] [ 7.27502346e-01 6.65672064e-01 -1.06987202e+00 -5.73141932e-01 9.89383757e-01 1.57962179e+00 -1.37958288e+00 -5.35841823e-01 3.37406397e-01 -4.39812332e-01] [ 1.00209296e+00 4.27370071e-02 3.71774048e-01 8.14366281e-01 4.75801021e-01 4.65932637e-01 9.69257295e-01 -3.91397595e-01 -7.49037087e-01 1.20918252e-01] [-5.41017115e-01 -8.01788092e-01 1.89389622e+00 -2.13074875e+00 3.56658250e-01 -4.42587674e-01 -1.46810904e-01 1.61742195e-01 -1.16775548e+00 1.20539379e+00] [-1.23690557e+00 -2.79519618e-01 6.39795419e-03 -1.48068154e+00 4.08576339e-01 -4.97477204e-01 2.40614876e-01 4.16900069e-01 8.67555141e-01 7.23380744e-01] [-6.74688399e-01 1.42417741e+00 -4.75982398e-01 -7.23011076e-01 1.73861325e+00 -8.68321002e-01 -1.05892289e+00 -7.56721377e-01 1.63768064e-02 -5.67919791e-01] [ 4.37114418e-01 7.76036382e-01 7.86017239e-01 -2.90209502e-01 7.46909142e-01 7.34883666e-01 -1.35185495e-01 2.28878275e-01 1.76029310e-01 -1.34766903e-02] [ 9.86397207e-01 -1.01682737e-01 -2.98249155e-01 3.01772833e-01 -2.13398069e-01 2.93852776e-01 8.25735867e-01 7.38177359e-01 1.82073414e-01 -5.84407806e-01] [ 9.33044851e-01 -1.19107687e+00 -8.37607861e-01 1.07067621e+00 1.16640794e+00 -1.20505369e+00 1.24269813e-01 1.25767335e-01 -1.20202589e+00 8.48192126e-02] [ 7.69551024e-02 -1.21792817e+00 1.71062676e-03 -7.24610448e-01 -1.29009902e-01 -8.73679399e-01 -5.35788357e-01 -8.94213021e-01 8.41621697e-01 -1.41289675e+00]] [[ 1.89072394e+00 1.13802111e+00 -1.19676495e+00 1.08592319e+00 -5.55083990e-01 -1.38572180e+00 2.44495228e-01 -2.53434449e-01 -9.73280311e-01 1.21309447e+00] [ 1.05327857e+00 -3.84504288e-01 1.97077656e+00 5.65466583e-01 -3.79842669e-01 1.26620397e-01 2.91638732e-01 -1.97830141e-01 -1.32000506e+00 -1.07376885e+00] [ 5.93486801e-02 -2.13330746e+00 9.31724429e-01 8.84402245e-02 3.79704535e-01 2.49158931e+00 1.90750623e+00 2.45164442e+00 2.72935122e-01 2.96299100e-01] [-3.89210396e-02 -4.74206656e-01 3.68046492e-01 -2.13065219e+00 -2.27312684e-01 -1.66344929e+00 -5.62195778e-01 -1.91557336e+00 8.10393393e-01 1.28281355e+00] [-4.14269447e-01 1.49169719e+00 -8.19117427e-01 8.95047367e-01 8.53012741e-01 4.22591180e-01 -1.02795553e+00 -5.46115749e-02 1.19236577e+00 -2.31049228e+00] [ 1.15966365e-01 5.22175252e-01 -8.35951507e-01 4.20692652e-01 -1.08097278e-01 -5.06096840e-01 7.49318719e-01 8.43419731e-01 -5.38137436e-01 -1.31247818e-01] [ 8.37904513e-01 1.56521106e+00 5.78231335e-01 2.06188178e+00 1.92140594e-01 1.32095873e+00 -1.11271024e+00 2.46944451e+00 6.08412385e-01 -1.48505437e+00] [ 9.30618107e-01 1.93678904e+00 -2.09461093e+00 1.00581384e+00 1.55820251e+00 5.79036213e-02 5.40438354e-01 -1.15005994e+00 1.63314417e-01 4.84191358e-01] [ 1.70685744e+00 1.31297410e+00 -1.07891369e+00 -1.28216445e+00 9.18307006e-01 -2.94550967e+00 8.38106871e-01 -8.52737459e-04 1.04336846e+00 3.19393545e-01] [ 2.39048392e-01 -5.03229976e-01 1.74222016e+00 1.19743061e+00 7.18215108e-02 -2.43116453e-01 -5.68436868e-02 -5.38998246e-01 5.17065585e-01 1.41370761e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'same', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1083.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.4862 (2,1,.,.) = -0.6196 (3,1,.,.) = 0.4753 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 7.87103027e-02 6.53954923e-01 -5.05105555e-01 3.32728699e-02 1.87645048e-01 -5.20750642e-01 -1.01124711e-01 -5.17678738e-01 9.74378586e-02 -2.82052964e-01] [ 1.22883034e+00 9.13175792e-02 -1.76864281e-01 1.05351344e-01 7.78752327e-01 -2.93750614e-01 6.96343601e-01 -1.07574441e-01 3.79957467e-01 -8.32385838e-01] [ 1.79467693e-01 -2.00508237e-01 1.48297530e-02 -8.49488616e-01 -4.89808351e-01 4.49217528e-01 -4.25675631e-01 -7.98414409e-01 -3.07089329e-01 9.40387398e-02] [-3.21011513e-01 -4.50031638e-01 3.72102827e-01 -1.09593146e-01 2.69597143e-01 2.74719775e-01 -3.11054945e-01 -7.83105433e-01 1.00556600e+00 -5.36897719e-01] [ 3.31006855e-01 -5.48659623e-01 1.32771833e-02 3.34872425e-01 1.57955334e-01 4.91891325e-01 -4.37947437e-02 -2.84502357e-01 2.84986287e-01 -2.94844396e-02] [-4.63660806e-01 -2.28408486e-01 7.02893734e-01 1.05435908e-01 -1.06375575e+00 4.98890579e-02 2.85145760e-01 2.48245656e-01 -1.34458497e-01 5.48931248e-02] [-7.52186358e-01 5.45204997e-01 -1.33772895e-01 -1.14512883e-01 -3.07227689e-04 -1.25922725e-01 7.06875846e-02 9.17251185e-02 -3.32046539e-01 -1.72756404e-01] [-6.34191573e-01 -9.27909195e-01 -4.31256533e-01 1.44435811e+00 5.88368475e-01 -2.86242396e-01 2.35436484e-01 -9.62487340e-01 -8.54808509e-01 5.68227708e-01] [-1.34002462e-01 8.49021301e-02 -2.76570115e-02 1.06546044e-01 -1.76253513e-01 -1.98252216e-01 2.02383012e-01 -4.87706751e-01 -9.66504931e-01 2.81058222e-01] [-1.05704379e+00 1.61226138e-01 -8.00372884e-02 9.93256986e-01 6.02134645e-01 -1.86177582e-01 1.13255866e-01 5.91555953e-01 -1.38139293e-01 8.61480236e-02]] [[ 7.25528523e-02 -1.26745844e+00 1.87921241e-01 4.55937773e-01 2.42767483e-01 5.96518338e-01 8.01178142e-02 9.91016850e-02 -2.95100957e-01 7.69929513e-02] [-1.02308929e+00 2.58294165e-01 7.61205494e-01 2.23705694e-01 -1.20260373e-01 9.80391026e-01 -3.94903630e-01 4.11997139e-01 -7.07812190e-01 2.06417385e-02] [-4.65898842e-01 4.25763205e-02 -1.82629839e-01 -1.07585073e+00 -2.80979544e-01 4.11185473e-01 1.34824812e-01 1.07885134e+00 -3.61255258e-01 -6.05463505e-01] [ 2.90582150e-01 -3.07772279e-01 3.58774006e-01 -8.28017950e-01 4.96654809e-02 -3.59585166e-01 3.33009571e-01 -1.46052599e+00 1.60632282e-01 1.44538462e-01] [-1.10567546e+00 9.78336781e-02 2.17617586e-01 7.05971122e-01 -4.98887673e-02 -1.11875705e-01 1.38947174e-01 6.20221458e-02 5.22411644e-01 3.98635268e-01] [-5.80581613e-02 -1.63777947e-01 -1.12880540e+00 -4.91699457e-01 6.26814544e-01 3.52237731e-01 -6.55383825e-01 -8.59089792e-02 9.42712903e-01 -5.02135813e-01] [-3.10585171e-01 1.06643331e+00 -4.40218300e-01 -2.00925060e-02 8.91471148e-01 5.21770775e-01 -5.05989373e-01 6.94528759e-01 -2.10489824e-01 1.86854184e-01] [ 1.47584125e-01 -2.93438733e-01 1.05283999e+00 5.51953912e-01 -1.38691261e-01 9.48617235e-02 3.53305787e-01 -7.02051997e-01 6.29376054e-01 4.33048397e-01] [ 1.94601023e+00 -2.49217376e-01 6.25545382e-01 9.96873498e-01 -5.85630164e-02 -4.08448517e-01 -8.39585185e-01 5.26444972e-01 -9.83495638e-02 3.80002081e-01] [-2.29771987e-01 4.93305564e-01 1.20772433e+00 2.79701632e-02 1.96451191e-02 -3.01109523e-01 -1.29258907e+00 2.53766686e-01 -1.09861946e+00 1.44212711e+00]] [[ 1.39964595e-01 3.18236619e-01 -7.44708836e-01 -5.41941784e-02 2.86500435e-02 -1.56649902e-01 -2.30400965e-01 -1.94306836e-01 -1.22375339e-01 1.04272239e-01] [ 5.59700787e-01 -4.09520000e-01 4.49564047e-02 7.75297999e-01 -7.73411453e-01 -1.65878564e-01 -3.34500819e-01 -6.18284941e-01 5.39159417e-01 5.98414848e-03] [ 9.03526917e-02 6.07753806e-02 -1.83836341e-01 -3.58353645e-01 -9.65146005e-01 -3.11668944e-02 -1.39523476e-01 3.88092637e-01 -4.96979624e-01 2.58163929e-01] [ 2.86599427e-01 -7.96374083e-02 1.00228786e+00 -1.43806830e-01 -1.29843071e-01 4.88506407e-01 -3.63818198e-01 -8.12539682e-02 5.42264938e-01 5.23046315e-01] [-3.93505037e-01 -3.79562467e-01 -5.89563549e-01 -7.71168843e-02 -5.16886652e-01 -1.90354437e-01 -5.32338917e-01 3.16455543e-01 -3.27665627e-01 4.42036726e-02] [ 2.36520782e-01 -3.28889340e-01 6.94760442e-01 -4.85193841e-02 -5.82716838e-02 -4.21532393e-01 1.49199560e-01 1.28129900e-01 -7.41017342e-01 4.44599614e-03] [-4.91508991e-01 -5.56395128e-02 -5.50048649e-02 -4.87670273e-01 3.26311290e-01 -1.18417218e-01 -3.48287225e-01 7.03319907e-01 -1.40756190e-01 -2.65695248e-03] [-4.21539873e-01 -4.48142081e-01 6.25133693e-01 1.51299044e-01 -5.66470802e-01 -1.38582904e-02 3.60165179e-01 -3.68396461e-01 -3.86845618e-01 2.73667186e-01] [-2.71123797e-01 1.54473543e+00 -2.56249815e-01 3.04917872e-01 -3.03157628e-01 7.32466340e-01 8.33579779e-01 -5.02629280e-01 1.92236304e-01 2.21599877e-01] [-2.60069221e-01 -1.53088525e-01 -6.41478717e-01 1.21651702e-01 3.18274707e-01 -3.03597182e-01 -4.20082882e-02 6.44102320e-02 -2.22872421e-02 5.34652352e-01]]]]; ov_res: [[[[ 7.87103027e-02 6.53954923e-01 -5.05105555e-01 3.32728699e-02 1.87645048e-01 -5.20750642e-01 -1.01124711e-01 -5.17678738e-01 9.74378586e-02 -2.82052964e-01] [ 1.22883034e+00 9.13175792e-02 -1.76864281e-01 1.05351344e-01 7.78752327e-01 -2.93750614e-01 6.96343601e-01 -1.07574441e-01 3.79957467e-01 -8.32385838e-01] [ 1.79467693e-01 -2.00508237e-01 1.48297530e-02 -8.49488616e-01 -4.89808351e-01 4.49217528e-01 -4.25675631e-01 -7.98414409e-01 -3.07089329e-01 9.40387398e-02] [-3.21011513e-01 -4.50031638e-01 3.72102827e-01 -1.09593146e-01 2.69597143e-01 2.74719775e-01 -3.11054945e-01 -7.83105433e-01 1.00556600e+00 -5.36897719e-01] [ 3.31006855e-01 -5.48659623e-01 1.32771833e-02 3.34872425e-01 1.57955334e-01 4.91891325e-01 -4.37947437e-02 -2.84502357e-01 2.84986287e-01 -2.94844396e-02] [-4.63660806e-01 -2.28408486e-01 7.02893734e-01 1.05435908e-01 -1.06375575e+00 4.98890579e-02 2.85145760e-01 2.48245656e-01 -1.34458497e-01 5.48931248e-02] [-7.52186358e-01 5.45204997e-01 -1.33772895e-01 -1.14512883e-01 -3.07227689e-04 -1.25922725e-01 7.06875846e-02 9.17251185e-02 -3.32046539e-01 -1.72756404e-01] [-6.34191573e-01 -9.27909195e-01 -4.31256533e-01 1.44435811e+00 5.88368475e-01 -2.86242396e-01 2.35436484e-01 -9.62487340e-01 -8.54808509e-01 5.68227708e-01] [-1.34002462e-01 8.49021301e-02 -2.76570115e-02 1.06546044e-01 -1.76253513e-01 -1.98252216e-01 2.02383012e-01 -4.87706751e-01 -9.66504931e-01 2.81058222e-01] [-1.05704379e+00 1.61226138e-01 -8.00372884e-02 9.93256986e-01 6.02134645e-01 -1.86177582e-01 1.13255866e-01 5.91555953e-01 -1.38139293e-01 8.61480236e-02]] [[ 7.25528523e-02 -1.26745844e+00 1.87921241e-01 4.55937773e-01 2.42767483e-01 5.96518338e-01 8.01178142e-02 9.91016850e-02 -2.95100957e-01 7.69929513e-02] [-1.02308929e+00 2.58294165e-01 7.61205494e-01 2.23705694e-01 -1.20260373e-01 9.80391026e-01 -3.94903630e-01 4.11997139e-01 -7.07812190e-01 2.06417385e-02] [-4.65898842e-01 4.25763205e-02 -1.82629839e-01 -1.07585073e+00 -2.80979544e-01 4.11185473e-01 1.34824812e-01 1.07885134e+00 -3.61255258e-01 -6.05463505e-01] [ 2.90582150e-01 -3.07772279e-01 3.58774006e-01 -8.28017950e-01 4.96654809e-02 -3.59585166e-01 3.33009571e-01 -1.46052599e+00 1.60632282e-01 1.44538462e-01] [-1.10567546e+00 9.78336781e-02 2.17617586e-01 7.05971122e-01 -4.98887673e-02 -1.11875705e-01 1.38947174e-01 6.20221458e-02 5.22411644e-01 3.98635268e-01] [-5.80581613e-02 -1.63777947e-01 -1.12880540e+00 -4.91699457e-01 6.26814544e-01 3.52237731e-01 -6.55383825e-01 -8.59089792e-02 9.42712903e-01 -5.02135813e-01] [-3.10585171e-01 1.06643331e+00 -4.40218300e-01 -2.00925060e-02 8.91471148e-01 5.21770775e-01 -5.05989373e-01 6.94528759e-01 -2.10489824e-01 1.86854184e-01] [ 1.47584125e-01 -2.93438733e-01 1.05283999e+00 5.51953912e-01 -1.38691261e-01 9.48617235e-02 3.53305787e-01 -7.02051997e-01 6.29376054e-01 4.33048397e-01] [ 1.94601023e+00 -2.49217376e-01 6.25545382e-01 9.96873498e-01 -5.85630164e-02 -4.08448517e-01 -8.39585185e-01 5.26444972e-01 -9.83495638e-02 3.80002081e-01] [-2.29771987e-01 4.93305564e-01 1.20772433e+00 2.79701632e-02 1.96451191e-02 -3.01109523e-01 -1.29258907e+00 2.53766686e-01 -1.09861946e+00 1.44212711e+00]] [[ 1.39964595e-01 3.18236619e-01 -7.44708836e-01 -5.41941784e-02 2.86500435e-02 -1.56649902e-01 -2.30400965e-01 -1.94306836e-01 -1.22375339e-01 1.04272239e-01] [ 5.59700787e-01 -4.09520000e-01 4.49564047e-02 7.75297999e-01 -7.73411453e-01 -1.65878564e-01 -3.34500819e-01 -6.18284941e-01 5.39159417e-01 5.98414848e-03] [ 9.03526917e-02 6.07753806e-02 -1.83836341e-01 -3.58353645e-01 -9.65146005e-01 -3.11668944e-02 -1.39523476e-01 3.88092637e-01 -4.96979624e-01 2.58163929e-01] [ 2.86599427e-01 -7.96374083e-02 1.00228786e+00 -1.43806830e-01 -1.29843071e-01 4.88506407e-01 -3.63818198e-01 -8.12539682e-02 5.42264938e-01 5.23046315e-01] [-3.93505037e-01 -3.79562467e-01 -5.89563549e-01 -7.71168843e-02 -5.16886652e-01 -1.90354437e-01 -5.32338917e-01 3.16455543e-01 -3.27665627e-01 4.42036726e-02] [ 2.36520782e-01 -3.28889340e-01 6.94760442e-01 -4.85193841e-02 -5.82716838e-02 -4.21532393e-01 1.49199560e-01 1.28129900e-01 -7.41017342e-01 4.44599614e-03] [-4.91508991e-01 -5.56395128e-02 -5.50048649e-02 -4.87670273e-01 3.26311290e-01 -1.18417218e-01 -3.48287225e-01 7.03319907e-01 -1.40756190e-01 -2.65695248e-03] [-4.21539873e-01 -4.48142081e-01 6.25133693e-01 1.51299044e-01 -5.66470802e-01 -1.38582904e-02 3.60165179e-01 -3.68396461e-01 -3.86845618e-01 2.73667186e-01] [-2.71123797e-01 1.54473543e+00 -2.56249815e-01 3.04917872e-01 -3.03157628e-01 7.32466340e-01 8.33579779e-01 -5.02629280e-01 1.92236304e-01 2.21599877e-01] [-2.60069221e-01 -1.53088525e-01 -6.41478717e-01 1.21651702e-01 3.18274707e-01 -3.03597182e-01 -4.20082882e-02 6.44102320e-02 -2.22872421e-02 5.34652352e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_2d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1], 'strides': [1, 1], 'pads': 'valid', 'dilations': [2, 2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1085.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, strides=[1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,.,.) = -0.4572 (2,1,.,.) = -0.1716 (3,1,.,.) = -0.1361 [ CPUFloatType{3,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[ 0.4071723 0.08473228 0.08153723 0.13693288 0.24403119 -0.38949805 0.2971015 -0.38006312 -0.5641096 0.76502043] [-0.6620679 -0.06208363 0.26405752 -0.37874717 -0.19204487 -0.2144298 -0.43454337 0.15606107 0.55103403 -0.00549213] [ 0.6734972 -0.3223084 -0.1854913 -0.65356845 -0.07327663 -0.38095582 -0.92925155 -0.39507797 0.76601046 0.4439537 ] [ 0.93093854 0.9185451 0.27524492 -0.13942094 0.8305779 -0.19799341 -0.52651125 -0.8480512 -0.64776736 0.45024896] [ 0.00690734 0.10576686 0.36225304 0.25933865 -0.0148957 -0.6351202 -0.1096236 -0.03754548 0.19717945 0.33773127] [-0.46029606 0.32285973 0.24539004 0.11872115 -0.5904572 -0.7140383 0.05630909 -0.15381913 0.16976817 0.26028898] [ 0.49871996 -0.20247653 1.0581503 0.42222014 -0.51154643 -0.3856275 0.27018887 0.65848345 0.34049344 -0.05182462] [-0.54430085 -0.2614085 0.27602735 0.18343104 -0.48797974 -0.4527356 -0.03928417 0.71115255 0.8376872 0.37019613] [-0.33994207 -0.02736313 -0.41157162 0.70324147 0.37829766 -0.28615883 -0.5517747 -0.11024568 0.8109117 0.47977415] [ 0.5073747 0.0891054 -0.19606356 0.13555293 0.41913363 0.4214159 -0.8571822 -0.41373736 0.16216958 0.5443272 ]] [[ 0.20149897 -0.24655214 -0.07211845 -0.07890946 0.00686478 0.01037966 -0.1834566 -0.26195148 -0.30550128 0.18779536] [ 0.11920491 0.24330835 -0.12703305 -0.0347435 -0.23704864 0.04347035 -0.1642295 -0.13248771 -0.20757617 -0.28276065] [-0.00909386 -0.3874158 0.17310809 -0.18561158 -0.32638797 0.03354591 -0.09478553 0.35312033 -0.04613502 0.08465185] [-0.05932833 0.19206263 -0.15937546 0.16426556 -0.04037324 -0.09340148 -0.28963146 0.06244542 -0.07752077 0.20200425] [-0.05468995 -0.01848621 -0.15174086 0.43140477 0.16724914 -0.01240783 -0.2532383 0.29218304 -0.09482835 0.3523815 ] [ 0.06806035 -0.41751415 -0.17958951 -0.02655308 -0.0788815 -0.17512952 -0.0477952 0.12680623 0.080563 -0.30647957] [ 0.04502035 0.11625984 0.10890257 0.2217837 0.17502858 -0.06040896 0.1774783 -0.04293849 -0.12697496 0.25336987] [ 0.1017703 0.05250373 0.25822854 0.3518227 -0.16264391 0.03831277 -0.12044758 0.04500318 -0.03718257 -0.40574053] [-0.23311205 0.09168616 -0.08144108 0.26703575 -0.13854563 -0.03263031 0.01689233 0.11783884 0.34311172 0.04227354] [ 0.10382741 0.25286856 -0.19549364 -0.17994972 0.21345237 -0.42728204 0.08413272 0.16736813 -0.20230125 0.13942942]] [[-0.04457104 0.11770622 -0.05918684 -0.15256082 0.11808619 0.29012764 -0.01100137 -0.09582892 -0.29825532 -0.12769017] [ 0.17866889 -0.04529528 0.0145859 -0.09042051 -0.07110765 0.04676332 -0.19424002 -0.26587272 0.11300903 0.17591043] [ 0.14050543 0.01391678 0.00531384 0.09550732 0.25900516 -0.00670242 0.0848188 0.08204135 0.20328939 -0.03878418] [ 0.06726379 -0.08735456 0.02488227 0.03135366 -0.10722187 -0.02823791 0.02772984 -0.120239 0.11061597 -0.08467412] [ 0.20860228 -0.22934264 -0.0149647 -0.06697267 0.25307384 0.17604102 0.13246222 -0.0159181 -0.20486908 -0.02583558] [ 0.04901128 0.01589062 0.18866187 0.06242567 -0.06852895 -0.1214762 -0.02759772 0.08750359 -0.06930412 -0.02217064] [-0.04860296 0.04771708 0.15057757 -0.04978533 -0.04115891 -0.11147454 -0.04341222 -0.06574348 -0.0085696 0.0035899 ] [ 0.06064491 0.17476714 -0.14107142 0.10669643 0.03824786 0.09698291 0.03566962 0.13059887 0.04395753 -0.15908486] [-0.09255794 0.3058333 -0.12933682 0.0699663 -0.00640079 -0.05984067 -0.02916187 0.13648848 0.04132598 0.2354193 ] [ 0.09417513 0.07473141 0.32179683 -0.20821042 0.00512656 0.05391748 0.0211422 0.08449376 -0.08292881 -0.07504198]]]]; ov_res: [[[[ 0.4071723 0.08473228 0.08153723 0.13693288 0.24403119 -0.38949805 0.2971015 -0.38006312 -0.5641096 0.76502043] [-0.6620679 -0.06208363 0.26405752 -0.37874717 -0.19204487 -0.2144298 -0.43454337 0.15606107 0.55103403 -0.00549213] [ 0.6734972 -0.3223084 -0.1854913 -0.65356845 -0.07327663 -0.38095582 -0.92925155 -0.39507797 0.76601046 0.4439537 ] [ 0.93093854 0.9185451 0.27524492 -0.13942094 0.8305779 -0.19799341 -0.52651125 -0.8480512 -0.64776736 0.45024896] [ 0.00690734 0.10576686 0.36225304 0.25933865 -0.0148957 -0.6351202 -0.1096236 -0.03754548 0.19717945 0.33773127] [-0.46029606 0.32285973 0.24539004 0.11872115 -0.5904572 -0.7140383 0.05630909 -0.15381913 0.16976817 0.26028898] [ 0.49871996 -0.20247653 1.0581503 0.42222014 -0.51154643 -0.3856275 0.27018887 0.65848345 0.34049344 -0.05182462] [-0.54430085 -0.2614085 0.27602735 0.18343104 -0.48797974 -0.4527356 -0.03928417 0.71115255 0.8376872 0.37019613] [-0.33994207 -0.02736313 -0.41157162 0.70324147 0.37829766 -0.28615883 -0.5517747 -0.11024568 0.8109117 0.47977415] [ 0.5073747 0.0891054 -0.19606356 0.13555293 0.41913363 0.4214159 -0.8571822 -0.41373736 0.16216958 0.5443272 ]] [[ 0.20149897 -0.24655214 -0.07211845 -0.07890946 0.00686478 0.01037966 -0.1834566 -0.26195148 -0.30550128 0.18779536] [ 0.11920491 0.24330835 -0.12703305 -0.0347435 -0.23704864 0.04347035 -0.1642295 -0.13248771 -0.20757617 -0.28276065] [-0.00909386 -0.3874158 0.17310809 -0.18561158 -0.32638797 0.03354591 -0.09478553 0.35312033 -0.04613502 0.08465185] [-0.05932833 0.19206263 -0.15937546 0.16426556 -0.04037324 -0.09340148 -0.28963146 0.06244542 -0.07752077 0.20200425] [-0.05468995 -0.01848621 -0.15174086 0.43140477 0.16724914 -0.01240783 -0.2532383 0.29218304 -0.09482835 0.3523815 ] [ 0.06806035 -0.41751415 -0.17958951 -0.02655308 -0.0788815 -0.17512952 -0.0477952 0.12680623 0.080563 -0.30647957] [ 0.04502035 0.11625984 0.10890257 0.2217837 0.17502858 -0.06040896 0.1774783 -0.04293849 -0.12697496 0.25336987] [ 0.1017703 0.05250373 0.25822854 0.3518227 -0.16264391 0.03831277 -0.12044758 0.04500318 -0.03718257 -0.40574053] [-0.23311205 0.09168616 -0.08144108 0.26703575 -0.13854563 -0.03263031 0.01689233 0.11783884 0.34311172 0.04227354] [ 0.10382741 0.25286856 -0.19549364 -0.17994972 0.21345237 -0.42728204 0.08413272 0.16736813 -0.20230125 0.13942942]] [[-0.04457104 0.11770622 -0.05918684 -0.15256082 0.11808619 0.29012764 -0.01100137 -0.09582892 -0.29825532 -0.12769017] [ 0.17866889 -0.04529528 0.0145859 -0.09042051 -0.07110765 0.04676332 -0.19424002 -0.26587272 0.11300903 0.17591043] [ 0.14050543 0.01391678 0.00531384 0.09550732 0.25900516 -0.00670242 0.0848188 0.08204135 0.20328939 -0.03878418] [ 0.06726379 -0.08735456 0.02488227 0.03135366 -0.10722187 -0.02823791 0.02772984 -0.120239 0.11061597 -0.08467412] [ 0.20860228 -0.22934264 -0.0149647 -0.06697267 0.25307384 0.17604102 0.13246222 -0.0159181 -0.20486908 -0.02583558] [ 0.04901128 0.01589062 0.18866187 0.06242567 -0.06852895 -0.1214762 -0.02759772 0.08750359 -0.06930412 -0.02217064] [-0.04860296 0.04771708 0.15057757 -0.04978533 -0.04115891 -0.11147454 -0.04341222 -0.06574348 -0.0085696 0.0035899 ] [ 0.06064491 0.17476714 -0.14107142 0.10669643 0.03824786 0.09698291 0.03566962 0.13059887 0.04395753 -0.15908486] [-0.09255794 0.3058333 -0.12933682 0.0699663 -0.00640079 -0.05984067 -0.02916187 0.13648848 0.04132598 0.2354193 ] [ 0.09417513 0.07473141 0.32179683 -0.20821042 0.00512656 0.05391748 0.0211422 0.08449376 -0.08292881 -0.07504198]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'same', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1087.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.565699}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.4881 1.1645 0.2667 -0.6698 0.0666 1.0593 0.5762 1.3891 0.6959 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#dfw_re: [[[-2.7013266 -2.6596072 3.13586 3.7218628 2.4389157 2.5099137 2.6248925 -1.7687082 -0.7112431 -0.17991124]]]; ov_res: [[[-2.7013268 -2.659607 3.13586 3.7218628 2.438916 2.509914 2.6248922 -1.7687082 -0.7112431 -0.1799112]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'valid', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1089.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.0805}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.0780 -0.0010 -0.0884 0.5880 1.0742 0.1437 -0.7674 0.9618 -0.6328 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-0.2596878 -3.055072 3.1117756 4.6321254 0.9909574 -0.32079116 2.448129 -3.3721063 ]]]; ov_res: [[[-0.2596879 -3.0550725 3.1117759 4.632126 0.9909571 -0.32079136 2.4481287 -3.3721058 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'same', 'dilations': [2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1091.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.017609}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.7880 0.8940 -0.3568 2.9713 -1.3395 2.0898 -1.9368 -1.0585 -0.8500 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[ 2.6059575 -1.0009942 -5.759382 -2.365489 0.8730943 -1.369864 3.6368964 -4.138796 0.9759162 1.4258159]]]; ov_res: [[[ 2.6059573 -1.0009943 -5.7593822 -2.3654888 0.87309444 -1.3698641 3.6368966 -4.138796 0.97591627 1.425816 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'valid', 'dilations': [2], 'groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1093.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.10961}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.5895 0.6082 -0.5827 -0.3762 0.1765 -0.2375 -0.2195 0.4097 0.6919 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-3.4420414 -0.4231136 0.7930545 -0.28874132 -3.3035502 4.237644 ]]]; ov_res: [[[-3.4420414 -0.42311358 0.79305434 -0.28874135 -3.3035502 4.2376437 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'same', 'dilations': [1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1095.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.0007 -0.1323 -1.0815 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 2.0753 (2,.,.) = 1.0508 (3,.,.) = -1.3365 [ CPUFloatType{3,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-2.8573744 0.41764572 -2.375593 0.95032966 -1.4078295 1.1301725 -1.4232904 -1.3342602 -1.1408464 -2.1046948 ] [ 1.1488671 -1.4274162 -1.0299035 0.01807787 -0.36416748 -0.74240774 -0.34261185 0.9211795 -0.9150966 0.58123034] [-1.334726 -0.9416013 -2.0116155 -1.6288706 -0.61306065 -3.0643475 -2.3774276 -0.1169082 -0.87297165 -1.2233385 ]]]; ov_res: [[[-2.8573744 0.41764572 -2.375593 0.95032966 -1.4078295 1.1301725 -1.4232904 -1.3342602 -1.1408464 -2.1046948 ] [ 1.1488671 -1.4274162 -1.0299035 0.01807787 -0.36416748 -0.74240774 -0.34261185 0.9211795 -0.9150966 0.58123034] [-1.334726 -0.9416013 -2.0116155 -1.6288706 -0.61306065 -3.0643475 -2.3774276 -0.1169082 -0.87297165 -1.2233385 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'valid', 'dilations': [1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1097.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.8502 -0.3770 0.9397 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1688 (2,.,.) = -0.6116 (3,.,.) = 2.1304 [ CPUFloatType{3,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-1.0166278 -0.9817982 -0.7663592 -0.8479326 -0.73116463 -0.95654833 -0.9165278 -1.0121047 -0.85283595 -1.218991 ] [ 0.3082712 0.07432216 -0.70657116 0.07390162 -0.7269295 -0.11778584 -1.1909369 -0.12676531 -0.45041558 -0.9230868 ] [ 4.031963 2.0443108 -1.5674436 5.1285276 0.38137922 -1.9212128 2.327978 -1.6763842 1.9284576 1.1147326 ]]]; ov_res: [[[-1.0166278 -0.9817982 -0.7663592 -0.8479326 -0.73116463 -0.95654833 -0.9165278 -1.0121047 -0.85283595 -1.218991 ] [ 0.3082712 0.07432216 -0.70657116 0.07390162 -0.7269295 -0.11778584 -1.1909369 -0.12676531 -0.45041558 -0.9230868 ] [ 4.031963 2.0443108 -1.5674436 5.1285276 0.38137922 -1.9212128 2.327978 -1.6763842 1.9284576 1.1147326 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [2], 'pads': 'valid', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1099.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.67599}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.4479 0.8177 -0.4737 -0.1579 1.1115 0.7642 0.2442 -2.3908 -0.6885 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-4.5523305 -2.089174 0.20204051 2.4812245 ]]]; ov_res: [[[-4.5523305 -2.089174 0.20204067 2.481225 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3], 'strides': [2], 'pads': 'valid', 'dilations': [2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1101.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-1.28278}]() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.0385 1.1805 -0.4388 -0.1139 -1.4377 1.1583 1.7090 0.3945 1.2072 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-1.8276247 3.793036 -3.453083 ]]]; ov_res: [[[-1.8276246 3.793036 -3.453083 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'same', 'dilations': [2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1103.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 1.5044 2.0136 -0.4977 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.3078 (2,.,.) = 0.1446 (3,.,.) = 0.001 * -3.0888 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[ 4.3324428 2.2946463 2.3346853 -0.43254152 2.1255746 2.160399 3.1805766 1.6523037 1.7765384 0.73496044] [ 2.041863 1.8876585 2.1112301 2.12394 2.2332737 2.093403 2.1043227 2.1694086 1.900104 2.0071583 ] [-0.49434096 -0.50084794 -0.49502954 -0.49436337 -0.4992416 -0.49880195 -0.50183046 -0.49503145 -0.496928 -0.49864605]]]; ov_res: [[[ 4.3324428 2.2946463 2.3346853 -0.43254152 2.1255746 2.160399 3.1805766 1.6523037 1.7765384 0.73496044] [ 2.041863 1.8876585 2.1112301 2.12394 2.2332737 2.093403 2.1043227 2.1694086 1.900104 2.0071583 ] [-0.49434096 -0.50084794 -0.49502954 -0.49436337 -0.4992416 -0.49880195 -0.50183046 -0.49503145 -0.496928 -0.49864605]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'valid', 'dilations': [2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1105.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7031 1.0498 0.4310 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.2422 (2,.,.) = 0.7148 (3,.,.) = -0.1322 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-0.33892354 -1.0038782 -0.5209575 -0.40774864 -0.76848435 -1.2866025 -0.43817872 -0.6169162 -0.6851396 -1.3032849 ] [ 0.00767931 1.3748397 1.0815156 0.77830064 0.99662995 0.7692756 1.0008034 1.006022 0.5267501 0.81895715] [ 0.3200584 0.31320456 0.6005633 0.22334398 0.39272967 0.51761293 0.47193146 0.29272342 0.22648965 0.21713077]]]; ov_res: [[[-0.33892354 -1.0038782 -0.5209575 -0.40774864 -0.76848435 -1.2866025 -0.43817872 -0.6169162 -0.6851396 -1.3032849 ] [ 0.00767931 1.3748397 1.0815156 0.77830064 0.99662995 0.7692756 1.0008034 1.006022 0.5267501 0.81895715] [ 0.3200584 0.31320456 0.6005633 0.22334398 0.39272967 0.51761293 0.47193146 0.29272342 0.22648965 0.21713077]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'same', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1107.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.0477 -0.1905 -0.2390 1.7005 -0.9988 0.1755 -1.8259 -0.2676 -0.5172 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[ 0.00735086 0.818557 0.7756966 1.0602349 -4.8953156 1.5460386 2.7356346 2.7207065 0.1282225 3.7453787 ]]]; ov_res: [[[ 0.00735086 0.818557 0.7756966 1.0602348 -4.895315 1.5460386 2.7356348 2.7207065 0.12822247 3.7453787 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'valid', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1109.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.2207 -1.0033 -1.6939 -0.9388 0.9785 0.3308 -0.1384 -0.1646 -0.8475 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-1.2986637 -4.070331 -3.3206534 0.5453119 -1.5293659 -2.8661323 1.4480544 7.507576 ]]]; ov_res: [[[-1.2986636 -4.070331 -3.3206532 0.5453119 -1.5293659 -2.8661325 1.4480543 7.507576 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'same', 'dilations': [2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1111.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.2032 -0.9548 0.3086 0.6820 1.3834 0.0188 -1.6929 -0.1290 0.1797 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-2.0376046 2.6533992 1.5381299 4.1721487 -2.368953 3.2659998 -1.6246271 2.770997 1.8974041 5.1132116]]]; ov_res: [[[-2.0376046 2.6533992 1.5381298 4.1721487 -2.3689532 3.2659998 -1.6246274 2.770997 1.8974042 5.113212 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [1], 'pads': 'valid', 'dilations': [2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1113.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -2.0259 -0.4215 1.9582 -0.2706 -2.0480 -2.1541 1.2112 0.8609 1.0912 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-1.5386344 -2.0670624 0.1237435 2.0076547 -1.3445458 1.4581915]]]; ov_res: [[[-1.5386344 -2.0670624 0.12374362 2.0076547 -1.3445455 1.4581916 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'same', 'dilations': [1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1115.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -1.7149 (2,.,.) = -2.1344 (3,.,.) = 1.6410 [ CPUFloatType{3,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-1.2408347 2.099468 -1.2574979 -1.2639538 2.9022853 -0.71014524 -0.79094917 -1.8147587 -2.2484734 0.22789171] [ 0.23409006 -1.5702637 1.7176536 -0.83332825 -3.6281507 0.5836215 4.956859 -0.624769 -5.5000887 -2.4249873 ] [ 0.09893302 1.8921696 -1.8768926 -3.4875517 -0.94059235 -0.9211165 -1.1615999 -0.4708553 0.3852509 1.1499854 ]]]; ov_res: [[[-1.2408347 2.099468 -1.2574979 -1.2639538 2.9022853 -0.71014524 -0.79094917 -1.8147587 -2.2484734 0.22789171] [ 0.23409006 -1.5702637 1.7176536 -0.83332825 -3.6281507 0.5836215 4.956859 -0.624769 -5.5000887 -2.4249873 ] [ 0.09893302 1.8921696 -1.8768926 -3.4875517 -0.94059235 -0.9211165 -1.1615999 -0.4708553 0.3852509 1.1499854 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'valid', 'dilations': [1], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1117.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.3872 (2,.,.) = 0.6769 (3,.,.) = -1.1484 [ CPUFloatType{3,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) efine PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schfw_re: [[[-0.9328119 0.198032 0.31962833 -0.29629096 -0.36343646 -0.11564321 0.0146219 0.17784223 -0.33725926 0.6513716 ] [ 0.7092599 0.03037353 0.4833148 0.43135378 -0.4747286 1.4148321 0.49056625 0.19788137 0.36555535 -1.0631099 ] [ 2.4844007 -0.9725657 1.4205717 1.4713519 0.62391156 -1.1179309 -0.6811636 0.04667059 0.640485 1.689015 ]]]; ov_res: [[[-0.9328119 0.198032 0.31962833 -0.29629096 -0.36343646 -0.11564321 0.0146219 0.17784223 -0.33725926 0.6513716 ] [ 0.7092599 0.03037353 0.4833148 0.43135378 -0.4747286 1.4148321 0.49056625 0.19788137 0.36555535 -1.0631099 ] [ 2.4844007 -0.9725657 1.4205717 1.4713519 0.62391156 -1.1179309 -0.6811636 0.04667059 0.640485 1.689015 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [2], 'pads': 'valid', 'dilations': [1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1119.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.1034 -1.2129 0.8873 -0.4899 -1.0319 -0.3186 0.9004 -0.2747 0.8319 [ CPUFloatType{1,3,3} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-1.9655999 0.33583477 0.36858246 3.1390238 ]]]; ov_res: [[[-1.9656001 0.33583465 0.36858246 3.139024 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3], 'strides': [2], 'pads': 'valid', 'dilations': [2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1121.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, strides=[9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 0.0680 -1.0018 0.9650 -0.7700 -1.1579 -0.2987 -0.1652 0.1141 0.9557 [ CPUFloatType{1,3,3} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[-2.5136392 -1.0526264 -2.0706775]]]; ov_res: [[[-2.513639 -1.0526264 -2.0706773]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'same', 'dilations': [2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1123.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = 1.4943 (2,.,.) = 0.01 * -2.1669 (3,.,.) = 0.3952 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-1.1209246e+00 -6.8316740e-01 -9.3708521e-01 1.0855053e-01 8.7848127e-01 -5.3312904e-01 -1.9513745e+00 -1.9340769e+00 -2.4640967e-01 -3.6526304e-01] [-1.2273459e-03 -2.2048259e-02 -8.0997945e-04 1.0682498e-02 -1.9954789e-02 1.3473808e-02 1.2673766e-02 1.4046299e-02 -1.2848027e-02 1.3722989e-02] [-2.4362071e-01 5.0598252e-01 -5.7284290e-01 4.1870024e-02 3.8739526e-01 9.1456354e-01 4.2608592e-01 2.4982095e-03 2.6136863e-01 1.9379051e-01]]]; ov_res: [[[-1.1209246e+00 -6.8316740e-01 -9.3708521e-01 1.0855053e-01 8.7848127e-01 -5.3312904e-01 -1.9513745e+00 -1.9340769e+00 -2.4640967e-01 -3.6526304e-01] [-1.2273459e-03 -2.2048259e-02 -8.0997945e-04 1.0682498e-02 -1.9954789e-02 1.3473808e-02 1.2673766e-02 1.4046299e-02 -1.2848027e-02 1.3722989e-02] [-2.4362071e-01 5.0598252e-01 -5.7284290e-01 4.1870024e-02 3.8739526e-01 9.1456354e-01 4.2608592e-01 2.4982095e-03 2.6136863e-01 1.9379051e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_1d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1], 'strides': [1], 'pads': 'valid', 'dilations': [2], 'groups': 3} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1125.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, strides=[1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,.,.) = -0.9446 (2,.,.) = -0.4525 (3,.,.) = -0.8812 [ CPUFloatType{3,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[-0.24741867 0.31185418 -0.5110467 -1.7954221 0.60525614 0.6328386 -0.06479248 -1.070159 -0.08334438 2.0562236 ] [ 0.16431281 0.17629254 0.35927078 -0.11197949 1.0562401 -0.01102882 0.21140213 0.04425862 -0.06169746 0.81634307] [-0.27010718 1.2130882 -0.5704646 0.72462803 0.45897615 -0.5461313 -0.03389997 0.27945492 0.9189822 -0.10894805]]]; ov_res: [[[-0.24741867 0.31185418 -0.5110467 -1.7954221 0.60525614 0.6328386 -0.06479248 -1.070159 -0.08334438 2.0562236 ] [ 0.16431281 0.17629254 0.35927078 -0.11197949 1.0562401 -0.01102882 0.21140213 0.04425862 -0.06169746 0.81634307] [-0.27010718 1.2130882 -0.5704646 0.72462803 0.45897615 -0.5461313 -0.03389997 0.27945492 0.9189822 -0.10894805]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.09 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1127.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.380544}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[ 5.36823845e+00 6.04532099e+00 -4.05976248e+00 4.09191656e+00 7.86615849e+00 -6.02464318e-01 -5.41988039e+00 -4.13944769e+00 9.05521512e-02 -7.03444767e+00] [-1.41010170e+01 -3.94539261e+00 3.14994669e+00 -1.06137383e+00 3.99107337e+00 5.96907616e+00 -9.74751413e-01 -1.35410490e+01 2.99630141e+00 1.32212806e+00] [-1.22313046e+00 -1.23544493e+01 2.23504257e+00 1.23483362e+01 8.28206956e-01 -7.78822041e+00 -3.72657490e+00 5.55269909e+00 6.57779121e+00 -1.00168591e+01] [ 8.46683216e+00 6.82821131e+00 -4.89193058e+00 4.22563505e+00 1.42434001e+00 9.53487015e+00 2.64751339e+00 -1.11634457e+00 5.21223164e+00 5.67387962e+00] [-1.25166750e+00 -2.16267323e+00 -4.31236172e+00 2.15411401e+00 -1.81374991e+00 1.79166431e+01 2.91588759e+00 -6.64408159e+00 -1.26223125e+01 5.41507721e-01] [ 1.07782831e+01 -5.10028458e+00 -9.61616230e+00 -1.73213375e+00 -5.64021230e-01 -1.31665354e+01 4.82791364e-01 8.88333702e+00 7.26216888e+00 7.63983107e+00] [ 1.29762287e+01 1.25257654e+01 -5.02177656e-01 1.20307846e+01 -2.61066508e+00 -7.12049723e+00 -1.78353004e+01 -1.48061447e+01 1.21679096e+01 8.98757267e+00] [ 1.05396293e-01 1.41886234e+01 -5.15338087e+00 -1.02450180e+01 1.01317823e+00 7.27345514e+00 8.30543709e+00 -9.75141430e+00 7.00992537e+00 7.30155277e+00] [ 8.35839367e+00 2.69398904e+00 3.99663305e+00 -1.18010435e+01 8.27751446e+00 1.58076811e+01 8.70049763e+00 1.89231777e+01 -9.46757317e+00 -5.58944750e+00] [ 5.75342464e+00 -3.55256343e+00 -8.94075966e+00 -4.81840563e+00 -1.19718285e+01 -3.27388096e+00 -4.10200834e-01 2.31794953e+00 5.10027122e+00 -7.64717674e+00]] [[ 2.80015469e+00 -4.03345495e-01 -6.08000326e+00 1.97251880e+00 2.27559795e+01 5.30284882e+00 4.69044536e-01 -4.85478354e+00 1.16215219e+01 -1.45328856e+01] [-8.04147530e+00 8.47456551e+00 6.15550756e+00 -2.77999949e+00 -5.66374540e+00 1.94021778e+01 6.48447275e+00 7.53273368e-01 -2.01374989e+01 -3.49313855e+00] [ 3.09458613e-01 -2.33317161e+00 -1.63130379e+01 1.29592931e+00 -1.76697731e+00 8.05749321e+00 -2.23225856e+00 -2.47467017e+00 -1.20233717e+01 -4.64234066e+00] [ 1.13656816e+01 6.06492281e+00 1.00025377e+01 1.46958962e-01 -1.63298168e+01 2.62484097e+00 2.08271122e+00 4.61625385e+00 8.13358116e+00 -5.47718406e-01] [ 7.33684587e+00 -1.14831114e+01 4.52555370e+00 -1.99341369e+00 8.16724586e+00 -2.38847756e+00 2.56702232e+00 1.11408520e+01 -8.38156033e+00 3.31729674e+00] [ 2.16308880e+00 2.15557075e+00 -2.99509740e+00 -6.70578575e+00 2.81926298e+00 -3.82032490e+00 1.06707697e+01 4.80801773e+00 -1.31586361e+00 6.97718239e+00] [-6.01597595e+00 -2.84934473e+00 -1.56727972e+01 -1.39966002e+01 2.18580437e+00 -9.59975338e+00 1.88566411e+00 1.21766436e+00 1.61671886e+01 -3.41503572e+00] [-7.67609453e+00 -1.58664618e+01 -7.43565083e+00 -1.06079054e+01 8.58978653e+00 9.54537487e+00 -8.72742176e+00 -9.35075283e+00 1.26811123e+00 7.27502060e+00] [-4.38668013e+00 6.22971821e+00 5.89746284e+00 1.28962145e+01 -7.82539177e+00 9.51288128e+00 3.36794925e+00 1.41935911e+01 3.16949606e+00 6.57562351e+00] [ 1.50756159e+01 7.42636013e+00 -5.02790356e+00 2.76585460e+00 -1.10315294e+01 4.34642315e+00 4.71939468e+00 9.09496975e+00 3.87997150e+00 1.59179282e+00]] [[ 6.95360541e-01 -2.33281803e+00 3.12285900e+00 -6.23555088e+00 1.63800354e+01 6.44048452e+00 1.30496740e+00 -1.41462767e+00 -8.29355335e+00 -6.42704582e+00] [-1.25372705e+01 -1.62596989e+01 6.61201859e+00 2.22640061e+00 8.74469090e+00 -8.97663593e+00 -9.95290661e+00 6.45360613e+00 6.19849920e+00 1.85579796e+01] [ 6.82845056e-01 -1.74653511e+01 3.77920532e+00 1.30642614e+01 5.69368792e+00 2.07614517e+00 5.11899137e+00 4.96924877e+00 -6.20763159e+00 1.00486641e+01] [ 1.08442192e+01 -8.02651644e-01 1.69187107e+01 1.12222214e+01 -2.22836232e+00 8.07445240e+00 -1.21782064e+01 1.66250682e+00 6.33204412e+00 -9.45976973e-01] [ 2.05211210e+00 -6.25913560e-01 -8.06363201e+00 -7.37622917e-01 9.20096684e+00 -3.72367561e-01 9.59208679e+00 8.43388736e-01 -1.51076403e+01 1.93661919e+01] [ 3.33675885e+00 5.65670109e+00 -1.56386223e+01 -3.28542614e+00 5.65059566e+00 4.49971437e+00 2.99717283e+00 -2.90577561e-01 -9.39918232e+00 4.12802839e+00] [-9.31636393e-01 1.52506390e+01 8.42754364e+00 -5.33232594e+00 -9.35949326e-01 -2.80723691e+00 2.36961603e-01 -5.93381310e+00 3.57071757e+00 2.38471007e+00] [ 5.53214359e+00 3.98365664e+00 2.06856155e+00 -1.09374009e-01 6.05434299e-01 -1.02739029e+01 -9.16158772e+00 1.75619149e+00 -9.92439079e+00 3.08309555e-01] [ 1.31110878e+01 1.39508514e+01 -2.43160462e+00 3.46840763e+00 1.40695162e+01 1.05595362e+00 1.38110857e+01 1.75161052e+00 -9.31597233e-01 -4.29673100e+00] [-1.56734009e+01 -1.88172588e+01 1.65893316e+00 -1.41264181e+01 -1.73392830e+01 -4.33088636e+00 1.68419242e+00 -1.92816219e+01 -2.25329132e+01 -8.74298573e+00]] [[-4.03367609e-01 8.83763695e+00 1.04165459e+01 5.93881464e+00 -3.20929956e+00 -1.26909208e+00 4.78458738e+00 -4.67462122e-01 -1.67526169e+01 -1.54882774e+01] [ 1.65138543e+00 1.09491253e+01 -1.51089144e+01 -1.06711426e+01 7.19734383e+00 -5.58847570e+00 -1.82357998e+01 6.51763916e+00 1.00876150e+01 -9.48245525e-02] [-1.45124130e+01 5.87118752e-02 -1.12707510e+01 4.95849037e+00 -1.16064072e+00 1.10579987e+01 -7.10296154e+00 -1.89291668e+01 1.88692417e+01 4.68381834e+00] [ 1.02893629e+01 -1.38405228e+00 -1.58212671e+01 8.85528564e+00 -9.69996834e+00 -8.64705276e+00 -1.63190007e+00 -9.61125565e+00 -3.57473588e+00 -9.69206333e+00] [-4.37030345e-01 2.26304226e+01 4.80617619e+00 1.13707042e+00 7.80532503e+00 4.83407354e+00 -5.55594683e+00 8.84018421e+00 -4.77366924e+00 -1.73768177e+01] [ 1.26489201e+01 1.29848986e+01 5.99988079e+00 1.15091219e+01 -7.16774702e+00 -3.34433532e+00 -1.51555271e+01 8.84352982e-01 9.99335575e+00 2.45911384e+00] [ 4.68809891e+00 7.22950459e+00 5.76505041e+00 -1.17775476e+00 -1.04903030e+01 1.02365437e+01 3.26599646e+00 -5.66539192e+00 6.92082739e+00 -1.86025906e+00] [-1.23648005e+01 -8.22843456e+00 3.79064083e+00 -1.06090851e+01 1.42272625e+01 -1.58161526e+01 2.79081464e+00 -3.23399997e+00 4.32874823e+00 2.69078588e+00] [ 1.11971035e+01 5.70358419e+00 5.48269081e+00 -1.01322508e+01 -7.93462372e+00 1.69016628e+01 1.32734365e+01 -2.58399372e+01 -2.56520414e+00 3.43621683e+00] [-9.84124541e-02 -1.83312559e+00 3.73839426e+00 1.86144962e+01 -6.12768221e+00 -4.67418575e+00 -8.11763668e+00 -2.79328966e+00 -1.21742592e+01 -5.45029926e+00]] [[ 5.94590127e-01 -1.47699423e+01 -2.69085795e-01 -2.42413425e+00 5.51270437e+00 3.25407028e+00 2.97287345e+00 -8.18217564e+00 3.14949512e+00 -7.50994873e+00] [-1.15453091e+01 -1.50200071e+01 -8.41387939e+00 -4.40163612e+00 -3.91285396e+00 -1.19999781e+01 5.05489397e+00 2.22482224e+01 2.88040257e+00 1.40758381e+01] [ 1.82727375e+01 9.91884804e+00 -4.59373206e-01 -1.51820335e+01 2.85674095e+00 1.52369022e+01 -1.13180342e+01 -1.19164495e+01 -3.65517068e+00 -1.79189682e+00] [ 1.54369555e+01 -4.77361727e+00 2.21096954e+01 2.12982345e+00 -6.26946092e-01 -1.09476252e+01 8.12821960e+00 -3.66173649e+00 -8.98803043e+00 -2.52358294e+00] [ 3.60168290e+00 -6.51812494e-01 1.43490067e+01 7.03036642e+00 1.41037169e+01 1.76723824e+01 -2.36316299e+00 3.98359895e+00 1.12027848e+00 -9.13369942e+00] [-1.41425762e+01 4.39143944e+00 6.73117065e+00 4.46075773e+00 -3.50557280e+00 6.70821142e+00 -1.39177494e+01 5.17756128e+00 -1.13130198e+01 3.01481342e+00] [-6.68796873e+00 -1.41675692e+01 1.80961514e+01 6.33847857e+00 1.98584957e+01 -5.89782524e+00 -6.17158175e+00 2.81746268e+00 -1.08630962e+01 -3.62820172e+00] [-3.10330033e+00 -3.71201307e-01 1.00363827e+01 -9.66455841e+00 2.20420265e+01 6.34036064e+00 1.87124004e+01 1.07011576e+01 1.86380267e+00 3.94402170e+00] [ 1.09390392e+01 -2.32037401e+00 1.66723309e+01 1.23125696e+01 -8.45523357e+00 4.64591408e+00 -7.79681921e+00 3.96327353e+00 1.13991919e+01 3.73443151e+00] [ 3.64412308e+00 -1.24887323e+00 -4.93415117e-01 5.25189495e+00 -4.75688744e+00 1.41161823e+01 -7.17393017e+00 -6.88225603e+00 -9.22837925e+00 -5.84112453e+00]] [[ 1.04200697e+00 7.80517197e+00 1.06723013e+01 -6.17809772e-01 2.23118424e+00 -6.52172089e+00 -2.37039566e+00 2.89360952e+00 -6.91385412e+00 -1.09355831e+01] [-5.89309406e+00 -1.04522400e+01 7.36921930e+00 -2.91456747e+00 -2.41115093e+01 -3.05081177e+00 -4.21305561e+00 4.53889179e+00 4.54775095e+00 -2.31115294e+00] [ 1.14158611e+01 1.53089886e+01 -1.03359480e+01 -8.23788166e+00 -1.43831873e+01 -8.09735298e+00 -1.20342264e+01 6.54411650e+00 1.23502007e+01 -2.13233185e+00] [ 2.06706977e+00 -1.73641682e+00 7.99502313e-01 -1.41659765e+01 -2.47447662e+01 -9.40513706e+00 -2.84187722e+00 7.86800206e-01 -1.92286655e-01 2.24585705e+01] [ 1.21210089e+01 1.54960155e+01 1.05581492e-01 -1.32867777e+00 -4.28978729e+00 -4.16893148e+00 -8.04468822e+00 1.03579512e+01 -2.55853534e+00 -9.67102909e+00] [ 1.27882826e+00 1.13287890e+00 -8.84613609e+00 3.79076886e+00 -8.32848835e+00 6.04421258e-01 -2.47463012e+00 -1.37135029e+01 6.47694731e+00 1.45077772e+01] [ 1.19439459e+00 1.72533870e+00 -2.53740907e+00 -4.43063879e+00 -5.87003350e-01 4.78023434e+00 -1.02056847e+01 -2.90885592e+00 7.90593863e+00 -1.11873541e+01] [ 1.17657337e+01 -4.14883643e-01 -1.01990623e+01 -1.65322247e+01 -2.20942783e+00 3.42289567e+00 1.46328220e+01 -1.71312904e+01 -5.36921978e+00 1.16264319e+00] [-1.40335674e+01 1.33046341e+00 2.69199824e+00 -4.28580236e+00 -9.72178650e+00 -1.16631365e+01 7.61669636e+00 -3.57843804e+00 4.47046852e+00 -6.57899976e-01] [-1.14918842e+01 1.60364795e+00 -1.85200844e+01 -4.68438530e+00 6.27461052e+00 2.12679386e-01 -4.35220623e+00 -3.46228790e+00 -2.41345692e+00 1.59601188e+00]] [[ 1.41387534e+00 -2.57023573e+00 -7.37657022e+00 -8.16115665e+00 -5.14128733e+00 -5.30593824e+00 9.65637112e+00 -3.28704739e+00 5.78380786e-02 -5.67205191e-01] [-2.36470175e+00 5.58197308e+00 7.11873627e+00 4.09718180e+00 2.21423936e+00 -6.44684029e+00 -3.54192066e+00 -1.12541323e+01 2.09905262e+01 2.67662258e+01] [ 1.64624286e+00 1.15059633e+01 2.17333183e+01 8.41004467e+00 -2.05628324e+00 -4.02047682e+00 -8.09748363e+00 -7.44982195e+00 -1.17685728e+01 -4.59848213e+00] [ 1.39308004e+01 1.42182970e+01 3.71757865e+00 -1.34503708e+01 -2.80262394e+01 -5.58212471e+00 4.96181822e+00 -1.83044453e+01 -5.73925257e+00 9.40120602e+00] [-3.38542914e+00 1.76561470e+01 2.36855774e+01 1.20310640e+01 -3.53447485e+00 -4.94648027e+00 -1.28049183e+01 7.99587345e+00 -2.99939275e+00 -9.51737785e+00] [ 2.36654949e+00 -1.22094774e+01 -1.39177263e+00 1.62539978e+01 1.69285238e+00 1.25230198e+01 -1.17915239e+01 -2.22021627e+00 4.44018698e+00 -1.89339876e-01] [ 2.07514286e+00 3.08659458e+00 5.01749897e+00 1.16516294e+01 -1.22200155e+01 -3.87994289e+00 -5.92364645e+00 1.32309541e-01 5.50663996e+00 6.08215380e+00] [-1.90048943e+01 7.79315770e-01 3.94024342e-01 6.70821428e+00 -5.10009480e+00 8.80661678e+00 -3.27084136e+00 -5.79154062e+00 -1.73878517e+01 6.79772854e+00] [ 9.03057575e+00 3.21761203e+00 -3.11487794e+00 3.21846581e+00 2.19959807e+00 6.37426233e+00 2.42296624e+00 -3.25836539e+00 6.53152180e+00 3.09233975e+00] [-2.43745375e+00 5.39692116e+00 5.36180019e+00 -7.09314823e+00 1.27992887e+01 -3.03195000e-01 -8.83050728e+00 -6.99467659e-01 1.51978922e+00 -2.30883360e+00]] [[-4.59643781e-01 5.09862328e+00 1.21125564e-01 -2.95032454e+00 1.35633030e+01 1.01587124e+01 -1.09413071e+01 1.02154703e+01 6.15069246e+00 -1.43244648e+01] [-8.59998465e-01 -2.00184420e-01 -7.47070217e+00 1.68168330e+00 -4.79501581e+00 -3.20754826e-01 -6.15177584e+00 1.26453047e+01 -2.76337671e+00 4.10777330e+00] [ 7.10512209e+00 -5.27535391e+00 5.64623594e+00 3.06699038e+00 -1.41517506e+01 1.77306318e+00 -9.42302942e-01 1.47693901e+01 -3.98008871e+00 2.93024778e+00] [-4.37340307e+00 8.72987461e+00 -7.52415848e+00 -2.16738343e+00 2.46711701e-01 3.12881231e+00 -6.40716743e+00 5.31311464e+00 -9.04235458e+00 -1.53271074e+01] [-3.12676764e+00 8.98696136e+00 9.05569363e+00 -3.80992079e+00 -8.44678879e+00 -8.50984669e+00 9.50085342e-01 -4.58489895e+00 -1.65671196e+01 -1.95853567e+00] [-6.58583999e-01 -7.65878201e+00 -8.92732620e+00 -2.47578983e+01 -2.23292809e+01 -5.77449417e+00 1.62961082e+01 -4.59944868e+00 -1.29942751e+01 -7.00725508e+00] [-3.98533559e+00 8.71526814e+00 5.47677469e+00 4.64206457e+00 1.41240168e+01 -8.14422488e-01 -1.96600807e+00 -5.74879789e+00 1.31045783e+00 -1.12653122e+01] [-1.19244471e+01 -4.84834099e+00 1.19215631e+01 -1.03459806e+01 -8.86952019e+00 -8.95811462e+00 -9.46549320e+00 -1.50033970e+01 -6.33156633e+00 1.10417223e+00] [-3.27138948e+00 -1.32405198e+00 2.16473675e+00 -1.21564960e+01 2.73355465e+01 -1.52188263e+01 -1.03393068e+01 -4.73087263e+00 -1.33157473e+01 -2.66279364e+00] [-5.41610360e-01 1.07335615e+00 -3.12117004e+00 -9.53704453e+00 9.78885651e+00 2.55631471e+00 8.65074062e+00 1.87672162e+00 -1.05947828e+00 9.08391953e-01]] [[ 5.05991399e-01 -2.05616188e+00 -7.43866968e+00 1.38872170e+00 5.04066515e+00 2.05899525e+00 9.27188301e+00 -6.08432293e+00 -3.23141098e+00 -9.30311871e+00] [ 4.38892078e+00 4.60281801e+00 2.79976559e+00 -3.42154551e+00 7.62165213e+00 4.37022257e+00 1.63761463e+01 -1.31398261e+00 1.78893363e+00 6.34685230e+00] [ 1.80924988e+01 -1.06801558e+00 -4.92716599e+00 1.18147888e+01 5.17526531e+00 -2.71578383e+00 9.20065022e+00 -6.93978739e+00 2.19719505e+01 5.64285564e+00] [ 6.38050461e+00 -8.56304264e+00 -1.86005630e+01 -6.40595293e+00 -4.52212286e+00 4.03296566e+00 1.24473867e+01 -5.92158461e+00 -1.49119616e+00 1.77349246e+00] [-1.66292512e+00 -4.57569790e+00 7.85213375e+00 -2.80149722e+00 -1.52576008e+01 -1.33215666e+01 -1.78093224e+01 -6.78990269e+00 1.78550949e+01 1.35615883e+01] [-5.79218960e+00 -2.53671722e+01 7.03567028e+00 -6.70924711e+00 9.94192314e+00 -3.45141053e+00 3.53350830e+00 -1.45254135e+00 5.32482052e+00 -7.05680227e+00] [ 3.20674729e+00 1.01882191e+01 4.88948822e+00 3.25853705e-01 5.62861156e+00 -3.54614735e+00 1.02205658e+01 5.18150106e-02 1.66378212e+01 -1.19796181e+01] [-1.68262043e+01 -1.49986076e+01 -1.65520535e+01 2.19752288e+00 -2.49716210e+00 1.24695525e-02 1.08797989e+01 9.18946457e+00 -4.98662628e-02 5.53574514e+00] [ 3.87481594e+00 -1.09459658e+01 2.04603362e+00 6.73892069e+00 -1.68029652e+01 -4.48401070e+00 -3.95542812e+00 9.53733921e+00 -3.83028364e+00 -2.13450384e+00] [ 8.16543007e+00 3.94612074e-01 5.20034122e+00 2.44117141e+00 4.74200726e+00 6.19705534e+00 -1.16764212e+01 -4.84710979e+00 6.14361572e+00 -1.13149529e+01]] [[ 1.84987795e+00 -5.36630392e+00 -5.28682947e+00 9.42287827e+00 4.49902058e-01 9.21802044e+00 1.52027488e+00 9.19057846e+00 9.19960022e+00 2.08678484e+00] [-7.56000137e+00 -5.62541485e+00 -1.48047657e+01 -6.45966768e+00 1.44511976e+01 -1.77945781e+00 -1.79497206e+00 8.32743931e+00 7.21812153e+00 1.44671946e+01] [-1.74497736e+00 6.56413364e+00 1.90512600e+01 -4.69860983e+00 -8.47257853e-01 2.26723456e+00 3.03102684e+00 1.52821293e+01 3.53585911e+00 8.85222244e+00] [ 1.58817852e+00 6.37920666e+00 -4.93056679e+00 4.37525702e+00 9.29526043e+00 1.01375732e+01 3.09435678e+00 3.40466261e+00 -6.98375416e+00 3.55190992e+00] [ 9.91174698e+00 1.84748764e+01 -1.35617437e+01 -1.01046896e+01 2.30630779e+00 -3.24580383e+00 -1.05805531e+01 4.84425068e+00 -5.46917439e+00 1.10986018e+00] [-8.50174618e+00 -3.49832964e+00 -2.31360435e+00 -1.74256349e+00 -5.17185783e+00 4.22944450e+00 4.18063831e+00 -5.81572056e+00 -1.17293692e+01 4.91370869e+00] [ 2.85747862e+00 2.67796874e+00 -6.29005814e+00 7.16286469e+00 -9.42690372e-01 1.20927687e+01 -2.32534027e+00 8.00966835e+00 -1.28278685e+00 -4.12868595e+00] [-1.11900258e+00 -2.47858787e+00 -5.31137800e+00 5.81823254e+00 -3.37838268e+00 1.82052803e+01 6.93483067e+00 -3.53120804e-01 -6.16725779e+00 -7.52487659e+00] [ 8.09690285e+00 8.59125614e+00 -6.07138038e-01 -2.46746898e+00 1.69963121e-01 -5.48580647e+00 5.86017323e+00 6.10399842e-01 -1.21264286e+01 -8.80547714e+00] [ 8.66171074e+00 8.78632736e+00 5.08632231e+00 3.73310733e+00 3.25363231e+00 -5.72187722e-01 6.29591036e+00 4.95686245e+00 -5.81602573e+00 -2.90654254e+00]]]]]; ov_res: [[[[[ 5.36823750e+00 6.04532003e+00 -4.05976200e+00 4.09191608e+00 7.86615849e+00 -6.02464914e-01 -5.41988039e+00 -4.13944769e+00 9.05513763e-02 -7.03444672e+00] [-1.41010180e+01 -3.94539332e+00 3.14994621e+00 -1.06137168e+00 3.99107385e+00 5.96907520e+00 -9.74752426e-01 -1.35410471e+01 2.99630022e+00 1.32212770e+00] [-1.22313046e+00 -1.23544512e+01 2.23504233e+00 1.23483381e+01 8.28207016e-01 -7.78822041e+00 -3.72657418e+00 5.55269861e+00 6.57779121e+00 -1.00168600e+01] [ 8.46683121e+00 6.82821131e+00 -4.89193058e+00 4.22563601e+00 1.42434084e+00 9.53487110e+00 2.64751291e+00 -1.11634624e+00 5.21223116e+00 5.67387962e+00] [-1.25166738e+00 -2.16267180e+00 -4.31236219e+00 2.15411425e+00 -1.81375003e+00 1.79166431e+01 2.91588783e+00 -6.64408112e+00 -1.26223125e+01 5.41507721e-01] [ 1.07782812e+01 -5.10028601e+00 -9.61616325e+00 -1.73213410e+00 -5.64022183e-01 -1.31665325e+01 4.82792020e-01 8.88333511e+00 7.26217031e+00 7.63983059e+00] [ 1.29762287e+01 1.25257635e+01 -5.02177715e-01 1.20307846e+01 -2.61066508e+00 -7.12049770e+00 -1.78353024e+01 -1.48061428e+01 1.21679096e+01 8.98757172e+00] [ 1.05396658e-01 1.41886282e+01 -5.15338087e+00 -1.02450161e+01 1.01317859e+00 7.27345419e+00 8.30543709e+00 -9.75141239e+00 7.00992489e+00 7.30155373e+00] [ 8.35839558e+00 2.69398713e+00 3.99663210e+00 -1.18010445e+01 8.27751350e+00 1.58076830e+01 8.70049572e+00 1.89231758e+01 -9.46757507e+00 -5.58944988e+00] [ 5.75342512e+00 -3.55256367e+00 -8.94075871e+00 -4.81840372e+00 -1.19718304e+01 -3.27388096e+00 -4.10200089e-01 2.31794977e+00 5.10027075e+00 -7.64717674e+00]] [[ 2.80015397e+00 -4.03345972e-01 -6.08000183e+00 1.97251928e+00 2.27559757e+01 5.30284882e+00 4.69043016e-01 -4.85478401e+00 1.16215219e+01 -1.45328865e+01] [-8.04147530e+00 8.47456932e+00 6.15550756e+00 -2.77999926e+00 -5.66374636e+00 1.94021778e+01 6.48447084e+00 7.53274202e-01 -2.01374950e+01 -3.49313712e+00] [ 3.09458315e-01 -2.33317232e+00 -1.63130379e+01 1.29592907e+00 -1.76697612e+00 8.05749226e+00 -2.23225832e+00 -2.47466993e+00 -1.20233755e+01 -4.64234018e+00] [ 1.13656816e+01 6.06492281e+00 1.00025396e+01 1.46960720e-01 -1.63298111e+01 2.62484121e+00 2.08271217e+00 4.61625481e+00 8.13358021e+00 -5.47719717e-01] [ 7.33684683e+00 -1.14831114e+01 4.52555323e+00 -1.99341345e+00 8.16724491e+00 -2.38847685e+00 2.56701779e+00 1.11408501e+01 -8.38156033e+00 3.31729388e+00] [ 2.16308880e+00 2.15557432e+00 -2.99509573e+00 -6.70579052e+00 2.81926298e+00 -3.82032561e+00 1.06707668e+01 4.80801725e+00 -1.31586289e+00 6.97718334e+00] [-6.01597500e+00 -2.84934425e+00 -1.56728010e+01 -1.39966002e+01 2.18580461e+00 -9.59975243e+00 1.88566256e+00 1.21766436e+00 1.61671925e+01 -3.41503668e+00] [-7.67609692e+00 -1.58664656e+01 -7.43565178e+00 -1.06079063e+01 8.58978367e+00 9.54537582e+00 -8.72742081e+00 -9.35075378e+00 1.26810837e+00 7.27502155e+00] [-4.38668060e+00 6.22971964e+00 5.89746237e+00 1.28962164e+01 -7.82539272e+00 9.51288223e+00 3.36794972e+00 1.41935930e+01 3.16949701e+00 6.57562113e+00] [ 1.50756168e+01 7.42635965e+00 -5.02790308e+00 2.76585340e+00 -1.10315304e+01 4.34642124e+00 4.71939373e+00 9.09496975e+00 3.87997293e+00 1.59179258e+00]] [[ 6.95360839e-01 -2.33281684e+00 3.12285876e+00 -6.23555040e+00 1.63800373e+01 6.44048595e+00 1.30496693e+00 -1.41462791e+00 -8.29355431e+00 -6.42704821e+00] [-1.25372725e+01 -1.62596931e+01 6.61202002e+00 2.22640085e+00 8.74468994e+00 -8.97663784e+00 -9.95290852e+00 6.45360661e+00 6.19850159e+00 1.85579796e+01] [ 6.82845414e-01 -1.74653530e+01 3.77920723e+00 1.30642624e+01 5.69368601e+00 2.07614517e+00 5.11899137e+00 4.96924686e+00 -6.20763063e+00 1.00486631e+01] [ 1.08442192e+01 -8.02652001e-01 1.69187145e+01 1.12222204e+01 -2.22836709e+00 8.07444859e+00 -1.21782074e+01 1.66250670e+00 6.33204269e+00 -9.45975423e-01] [ 2.05211306e+00 -6.25915885e-01 -8.06363106e+00 -7.37623572e-01 9.20096684e+00 -3.72368485e-01 9.59208775e+00 8.43389153e-01 -1.51076384e+01 1.93661957e+01] [ 3.33675814e+00 5.65670061e+00 -1.56386232e+01 -3.28542638e+00 5.65059328e+00 4.49971724e+00 2.99717236e+00 -2.90577680e-01 -9.39918041e+00 4.12802839e+00] [-9.31637645e-01 1.52506380e+01 8.42754555e+00 -5.33232641e+00 -9.35951710e-01 -2.80723739e+00 2.36960620e-01 -5.93381453e+00 3.57072043e+00 2.38470984e+00] [ 5.53214407e+00 3.98365593e+00 2.06856179e+00 -1.09375179e-01 6.05434000e-01 -1.02739058e+01 -9.16159534e+00 1.75619102e+00 -9.92439079e+00 3.08309913e-01] [ 1.31110868e+01 1.39508505e+01 -2.43160415e+00 3.46840787e+00 1.40695181e+01 1.05595458e+00 1.38110838e+01 1.75161028e+00 -9.31596756e-01 -4.29673147e+00] [-1.56733999e+01 -1.88172607e+01 1.65893352e+00 -1.41264143e+01 -1.73392754e+01 -4.33088446e+00 1.68419170e+00 -1.92816181e+01 -2.25329132e+01 -8.74298573e+00]] [[-4.03367549e-01 8.83763599e+00 1.04165449e+01 5.93881369e+00 -3.20929933e+00 -1.26909208e+00 4.78458881e+00 -4.67461973e-01 -1.67526169e+01 -1.54882784e+01] [ 1.65138614e+00 1.09491272e+01 -1.51089106e+01 -1.06711416e+01 7.19734430e+00 -5.58847666e+00 -1.82357979e+01 6.51763964e+00 1.00876160e+01 -9.48246121e-02] [-1.45124130e+01 5.87119758e-02 -1.12707491e+01 4.95848989e+00 -1.16064155e+00 1.10579987e+01 -7.10296106e+00 -1.89291649e+01 1.88692436e+01 4.68381691e+00] [ 1.02893667e+01 -1.38405120e+00 -1.58212652e+01 8.85528564e+00 -9.69996738e+00 -8.64705372e+00 -1.63190031e+00 -9.61125469e+00 -3.57473469e+00 -9.69206047e+00] [-4.37030107e-01 2.26304264e+01 4.80617762e+00 1.13707018e+00 7.80532742e+00 4.83407545e+00 -5.55594873e+00 8.84018230e+00 -4.77366734e+00 -1.73768215e+01] [ 1.26489210e+01 1.29848986e+01 5.99988079e+00 1.15091238e+01 -7.16774797e+00 -3.34433270e+00 -1.51555262e+01 8.84353638e-01 9.99335670e+00 2.45911384e+00] [ 4.68809843e+00 7.22950411e+00 5.76504993e+00 -1.17775834e+00 -1.04903078e+01 1.02365437e+01 3.26599574e+00 -5.66539240e+00 6.92082739e+00 -1.86025882e+00] [-1.23648033e+01 -8.22843552e+00 3.79064107e+00 -1.06090851e+01 1.42272663e+01 -1.58161535e+01 2.79081440e+00 -3.23399973e+00 4.32874918e+00 2.69078541e+00] [ 1.11971035e+01 5.70358515e+00 5.48269176e+00 -1.01322508e+01 -7.93462467e+00 1.69016571e+01 1.32734356e+01 -2.58399391e+01 -2.56520414e+00 3.43621778e+00] [-9.84117687e-02 -1.83312607e+00 3.73839474e+00 1.86144943e+01 -6.12768269e+00 -4.67418575e+00 -8.11763859e+00 -2.79328990e+00 -1.21742611e+01 -5.45029974e+00]] [[ 5.94590187e-01 -1.47699423e+01 -2.69086212e-01 -2.42413568e+00 5.51270151e+00 3.25407028e+00 2.97287369e+00 -8.18217754e+00 3.14949489e+00 -7.50994730e+00] [-1.15453062e+01 -1.50200071e+01 -8.41388130e+00 -4.40163612e+00 -3.91285300e+00 -1.19999800e+01 5.05489254e+00 2.22482262e+01 2.88040328e+00 1.40758381e+01] [ 1.82727413e+01 9.91884804e+00 -4.59372014e-01 -1.51820335e+01 2.85674167e+00 1.52369022e+01 -1.13180332e+01 -1.19164543e+01 -3.65517068e+00 -1.79189658e+00] [ 1.54369535e+01 -4.77361870e+00 2.21096935e+01 2.12982011e+00 -6.26946092e-01 -1.09476252e+01 8.12821960e+00 -3.66173434e+00 -8.98802948e+00 -2.52358365e+00] [ 3.60168362e+00 -6.51813269e-01 1.43490057e+01 7.03036594e+00 1.41037130e+01 1.76723785e+01 -2.36316371e+00 3.98359823e+00 1.12027740e+00 -9.13370037e+00] [-1.41425734e+01 4.39143944e+00 6.73116732e+00 4.46075773e+00 -3.50557160e+00 6.70821095e+00 -1.39177504e+01 5.17756081e+00 -1.13130159e+01 3.01481557e+00] [-6.68796921e+00 -1.41675692e+01 1.80961533e+01 6.33847761e+00 1.98584976e+01 -5.89782524e+00 -6.17158365e+00 2.81746197e+00 -1.08630915e+01 -3.62820268e+00] [-3.10330057e+00 -3.71200889e-01 1.00363855e+01 -9.66456127e+00 2.20420265e+01 6.34036064e+00 1.87124004e+01 1.07011566e+01 1.86380589e+00 3.94402170e+00] [ 1.09390383e+01 -2.32037282e+00 1.66723385e+01 1.23125677e+01 -8.45523262e+00 4.64591312e+00 -7.79681873e+00 3.96327496e+00 1.13991928e+01 3.73443174e+00] [ 3.64412236e+00 -1.24887240e+00 -4.93414909e-01 5.25189590e+00 -4.75688505e+00 1.41161823e+01 -7.17393160e+00 -6.88225508e+00 -9.22838116e+00 -5.84112358e+00]] [[ 1.04200661e+00 7.80517387e+00 1.06722994e+01 -6.17810488e-01 2.23118448e+00 -6.52172184e+00 -2.37039590e+00 2.89361024e+00 -6.91385269e+00 -1.09355841e+01] [-5.89309502e+00 -1.04522381e+01 7.36922026e+00 -2.91456914e+00 -2.41115055e+01 -3.05081224e+00 -4.21305561e+00 4.53889227e+00 4.54774714e+00 -2.31115150e+00] [ 1.14158630e+01 1.53089876e+01 -1.03359499e+01 -8.23788357e+00 -1.43831882e+01 -8.09735775e+00 -1.20342274e+01 6.54411554e+00 1.23502016e+01 -2.13233304e+00] [ 2.06706905e+00 -1.73641562e+00 7.99502969e-01 -1.41659756e+01 -2.47447643e+01 -9.40513802e+00 -2.84187627e+00 7.86799312e-01 -1.92285925e-01 2.24585724e+01] [ 1.21210089e+01 1.54960155e+01 1.05583251e-01 -1.32868040e+00 -4.28978920e+00 -4.16893196e+00 -8.04468822e+00 1.03579502e+01 -2.55853748e+00 -9.67102909e+00] [ 1.27882814e+00 1.13287711e+00 -8.84613895e+00 3.79076982e+00 -8.32849026e+00 6.04420006e-01 -2.47462869e+00 -1.37135057e+01 6.47694874e+00 1.45077782e+01] [ 1.19439530e+00 1.72534060e+00 -2.53740978e+00 -4.43063974e+00 -5.87002873e-01 4.78023481e+00 -1.02056856e+01 -2.90885568e+00 7.90593958e+00 -1.11873560e+01] [ 1.17657347e+01 -4.14883286e-01 -1.01990643e+01 -1.65322189e+01 -2.20942855e+00 3.42289615e+00 1.46328211e+01 -1.71312904e+01 -5.36922121e+00 1.16264272e+00] [-1.40335684e+01 1.33046269e+00 2.69199896e+00 -4.28580332e+00 -9.72179031e+00 -1.16631374e+01 7.61669827e+00 -3.57843828e+00 4.47046804e+00 -6.57900929e-01] [-1.14918842e+01 1.60364664e+00 -1.85200825e+01 -4.68438435e+00 6.27461052e+00 2.12679043e-01 -4.35220575e+00 -3.46228814e+00 -2.41345644e+00 1.59601176e+00]] [[ 1.41387570e+00 -2.57023621e+00 -7.37657070e+00 -8.16115665e+00 -5.14128780e+00 -5.30593920e+00 9.65636826e+00 -3.28704715e+00 5.78376949e-02 -5.67205906e-01] [-2.36470175e+00 5.58197212e+00 7.11873484e+00 4.09718180e+00 2.21424103e+00 -6.44683981e+00 -3.54192138e+00 -1.12541304e+01 2.09905243e+01 2.67662239e+01] [ 1.64624214e+00 1.15059652e+01 2.17333221e+01 8.41004467e+00 -2.05628300e+00 -4.02047348e+00 -8.09748650e+00 -7.44982195e+00 -1.17685776e+01 -4.59848309e+00] [ 1.39308004e+01 1.42182980e+01 3.71757865e+00 -1.34503679e+01 -2.80262337e+01 -5.58212471e+00 4.96181917e+00 -1.83044415e+01 -5.73925352e+00 9.40120506e+00] [-3.38542962e+00 1.76561432e+01 2.36855793e+01 1.20310650e+01 -3.53447604e+00 -4.94648123e+00 -1.28049202e+01 7.99587250e+00 -2.99939251e+00 -9.51737785e+00] [ 2.36654806e+00 -1.22094784e+01 -1.39177120e+00 1.62540035e+01 1.69285154e+00 1.25230246e+01 -1.17915239e+01 -2.22021842e+00 4.44018793e+00 -1.89340144e-01] [ 2.07514215e+00 3.08659554e+00 5.01749706e+00 1.16516275e+01 -1.22200165e+01 -3.87994313e+00 -5.92364645e+00 1.32309258e-01 5.50663900e+00 6.08215475e+00] [-1.90048923e+01 7.79317379e-01 3.94023269e-01 6.70821047e+00 -5.10009813e+00 8.80661488e+00 -3.27084208e+00 -5.79154158e+00 -1.73878498e+01 6.79772711e+00] [ 9.03057575e+00 3.21761060e+00 -3.11487937e+00 3.21846509e+00 2.19959641e+00 6.37426376e+00 2.42296696e+00 -3.25836682e+00 6.53152227e+00 3.09233904e+00] [-2.43745399e+00 5.39692116e+00 5.36180210e+00 -7.09314823e+00 1.27992887e+01 -3.03196102e-01 -8.83050632e+00 -6.99468851e-01 1.51978981e+00 -2.30883288e+00]] [[-4.59644049e-01 5.09862280e+00 1.21126324e-01 -2.95032501e+00 1.35633030e+01 1.01587095e+01 -1.09413061e+01 1.02154713e+01 6.15069246e+00 -1.43244667e+01] [-8.59998345e-01 -2.00184911e-01 -7.47069931e+00 1.68168604e+00 -4.79501677e+00 -3.20754260e-01 -6.15177584e+00 1.26453047e+01 -2.76337647e+00 4.10777473e+00] [ 7.10512257e+00 -5.27535534e+00 5.64623308e+00 3.06699085e+00 -1.41517487e+01 1.77306306e+00 -9.42304730e-01 1.47693920e+01 -3.98008943e+00 2.93024921e+00] [-4.37340212e+00 8.72987175e+00 -7.52416229e+00 -2.16738319e+00 2.46712357e-01 3.12881160e+00 -6.40716934e+00 5.31311417e+00 -9.04235458e+00 -1.53271055e+01] [-3.12676835e+00 8.98696232e+00 9.05569649e+00 -3.80992150e+00 -8.44678974e+00 -8.50984669e+00 9.50083375e-01 -4.58489943e+00 -1.65671196e+01 -1.95853639e+00] [-6.58583641e-01 -7.65878201e+00 -8.92732430e+00 -2.47579021e+01 -2.23292847e+01 -5.77449226e+00 1.62961082e+01 -4.59944820e+00 -1.29942780e+01 -7.00725508e+00] [-3.98533511e+00 8.71526814e+00 5.47677326e+00 4.64206648e+00 1.41240187e+01 -8.14422369e-01 -1.96600890e+00 -5.74879742e+00 1.31045699e+00 -1.12653131e+01] [-1.19244442e+01 -4.84834242e+00 1.19215631e+01 -1.03459826e+01 -8.86952019e+00 -8.95811653e+00 -9.46549225e+00 -1.50033951e+01 -6.33156872e+00 1.10417056e+00] [-3.27138734e+00 -1.32405150e+00 2.16473436e+00 -1.21564960e+01 2.73355503e+01 -1.52188320e+01 -1.03393097e+01 -4.73087215e+00 -1.33157444e+01 -2.66279316e+00] [-5.41610479e-01 1.07335496e+00 -3.12116838e+00 -9.53704548e+00 9.78885937e+00 2.55631399e+00 8.65073967e+00 1.87672317e+00 -1.05947804e+00 9.08391476e-01]] [[ 5.05991876e-01 -2.05616164e+00 -7.43867111e+00 1.38872206e+00 5.04066515e+00 2.05899620e+00 9.27188396e+00 -6.08432245e+00 -3.23141026e+00 -9.30311871e+00] [ 4.38892078e+00 4.60281801e+00 2.79976487e+00 -3.42154574e+00 7.62165260e+00 4.37022209e+00 1.63761463e+01 -1.31398213e+00 1.78893399e+00 6.34685278e+00] [ 1.80924969e+01 -1.06801546e+00 -4.92716742e+00 1.18147860e+01 5.17526484e+00 -2.71578336e+00 9.20064831e+00 -6.93978405e+00 2.19719524e+01 5.64285564e+00] [ 6.38050365e+00 -8.56304455e+00 -1.86005611e+01 -6.40594959e+00 -4.52212381e+00 4.03296614e+00 1.24473867e+01 -5.92158318e+00 -1.49119580e+00 1.77349365e+00] [-1.66292548e+00 -4.57569838e+00 7.85213137e+00 -2.80149865e+00 -1.52576027e+01 -1.33215675e+01 -1.78093204e+01 -6.78990412e+00 1.78550949e+01 1.35615873e+01] [-5.79218864e+00 -2.53671665e+01 7.03567219e+00 -6.70924616e+00 9.94192505e+00 -3.45140934e+00 3.53350854e+00 -1.45254111e+00 5.32482100e+00 -7.05680370e+00] [ 3.20674610e+00 1.01882191e+01 4.88948822e+00 3.25854868e-01 5.62861061e+00 -3.54614377e+00 1.02205667e+01 5.18137813e-02 1.66378174e+01 -1.19796171e+01] [-1.68262043e+01 -1.49986086e+01 -1.65520477e+01 2.19752097e+00 -2.49716258e+00 1.24705434e-02 1.08797979e+01 9.18946648e+00 -4.98684347e-02 5.53574276e+00] [ 3.87481570e+00 -1.09459658e+01 2.04603410e+00 6.73892355e+00 -1.68029652e+01 -4.48401070e+00 -3.95542693e+00 9.53733826e+00 -3.83028293e+00 -2.13450289e+00] [ 8.16543102e+00 3.94612879e-01 5.20034122e+00 2.44117022e+00 4.74200726e+00 6.19705629e+00 -1.16764212e+01 -4.84711075e+00 6.14361429e+00 -1.13149538e+01]] [[ 1.84987748e+00 -5.36630535e+00 -5.28682947e+00 9.42288017e+00 4.49902505e-01 9.21801853e+00 1.52027440e+00 9.19057751e+00 9.19959927e+00 2.08678484e+00] [-7.56000090e+00 -5.62541580e+00 -1.48047657e+01 -6.45966911e+00 1.44511967e+01 -1.77945781e+00 -1.79497266e+00 8.32743931e+00 7.21812105e+00 1.44671955e+01] [-1.74497652e+00 6.56413317e+00 1.90512638e+01 -4.69861126e+00 -8.47257614e-01 2.26723409e+00 3.03102565e+00 1.52821264e+01 3.53585839e+00 8.85222149e+00] [ 1.58817852e+00 6.37920523e+00 -4.93056822e+00 4.37525797e+00 9.29525948e+00 1.01375723e+01 3.09435630e+00 3.40466523e+00 -6.98375416e+00 3.55190992e+00] [ 9.91174412e+00 1.84748764e+01 -1.35617418e+01 -1.01046896e+01 2.30630875e+00 -3.24580431e+00 -1.05805550e+01 4.84425163e+00 -5.46917439e+00 1.10986054e+00] [-8.50174427e+00 -3.49833083e+00 -2.31360316e+00 -1.74256635e+00 -5.17185831e+00 4.22944355e+00 4.18063784e+00 -5.81571913e+00 -1.17293682e+01 4.91370773e+00] [ 2.85747957e+00 2.67796850e+00 -6.29005814e+00 7.16286325e+00 -9.42690492e-01 1.20927687e+01 -2.32534099e+00 8.00966930e+00 -1.28278768e+00 -4.12868690e+00] [-1.11900306e+00 -2.47858763e+00 -5.31137943e+00 5.81822968e+00 -3.37838340e+00 1.82052784e+01 6.93483114e+00 -3.53120357e-01 -6.16725922e+00 -7.52487707e+00] [ 8.09690380e+00 8.59125805e+00 -6.07137442e-01 -2.46746874e+00 1.69964030e-01 -5.48580599e+00 5.86017132e+00 6.10399723e-01 -1.21264267e+01 -8.80547714e+00] [ 8.66170979e+00 8.78632736e+00 5.08632040e+00 3.73310733e+00 3.25363159e+00 -5.72187066e-01 6.29591131e+00 4.95686245e+00 -5.81602669e+00 -2.90654230e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1129.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={1.08491}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[ 6.13342190e+00 -1.41518908e+01 -1.02742491e+01 -1.14443350e+01 5.43930471e-01 8.11705303e+00 -7.48561621e+00 1.48837605e+01] [ 3.82510638e+00 1.23799658e+01 -5.33731890e+00 -1.23173761e+01 -4.60427094e+00 9.64281750e+00 8.91601562e+00 1.29240446e+01] [ 5.31900597e+00 7.42659140e+00 7.31437635e+00 9.01603794e+00 3.44691992e-01 -1.07325277e+01 3.56467557e+00 1.88510513e+01] [ 1.18992662e+01 1.61117134e+01 3.23522985e-01 -2.42824578e+00 -7.48732901e+00 -1.13601131e+01 9.77800560e+00 5.39331293e+00] [-2.33463311e+00 2.65105081e+00 6.60115719e+00 1.09253759e+01 4.45978880e+00 7.10184240e+00 6.13551426e+00 6.75910282e+00] [ 1.17867908e+01 -2.74383694e-01 -7.33139420e+00 -2.77468419e+00 -1.33188705e+01 1.26722231e+01 -1.00103545e+00 3.23117590e+00] [-4.55975103e+00 1.85813880e+00 -6.94580603e+00 -8.11008573e-01 2.72525287e+00 1.91403008e+01 7.41453469e-01 2.36244507e+01] [ 6.12822175e-01 -2.14924860e+00 7.78291416e+00 3.45703650e+00 2.63579994e-01 -1.40711384e+01 4.82787800e+00 -5.48586750e+00]] [[-6.21212339e+00 2.23551583e+00 -1.68959808e+01 4.09407949e+00 5.46951866e+00 1.10374031e+01 -6.07572842e+00 2.45505738e+00] [-8.10328674e+00 -3.66276026e+00 3.53844786e+00 -7.95747232e+00 9.12293148e+00 6.58706784e-01 7.96757174e+00 -1.96472216e+00] [ 7.34374094e+00 2.84024596e+00 -1.80490589e+00 -2.93864310e-01 -1.38419354e+00 -5.64751959e+00 6.72821617e+00 -5.05931318e-01] [-7.49858809e+00 3.73644614e+00 -8.04126549e+00 -1.28500986e+00 1.86074686e+00 -6.75722742e+00 -2.55894399e+00 -2.40078473e+00] [-1.50062428e+01 -3.90044391e-01 -1.17523823e+01 6.36291933e+00 -3.29341006e+00 1.33514462e+01 1.90828419e+00 2.27952433e+00] [ 4.12774611e+00 -1.20123758e+01 -9.60489035e-01 1.45783072e+01 3.39989233e+00 -1.38952601e+00 -2.56849861e+01 6.21350193e+00] [ 8.87721920e+00 -1.08891888e+01 -4.37841749e+00 3.59316730e+00 -7.56311369e+00 -1.90432405e+00 1.68457947e+01 8.98796976e-01] [ 2.24952912e+00 -5.18902111e+00 6.88567972e+00 1.33755016e+01 5.70869827e+00 -9.64100266e+00 1.55748739e+01 4.60359430e+00]] [[ 1.31901674e+01 2.00017548e+00 -4.67263572e-02 2.81586576e+00 -1.10964167e+00 8.60254478e+00 5.98937559e+00 3.19375348e+00] [-1.23345242e+01 -8.78664684e+00 -6.26617479e+00 -2.88119555e+00 1.33799791e+01 -4.51545668e+00 -7.20244265e+00 -4.53871727e+00] [-2.10146785e+00 1.09015417e+00 1.26263344e+00 -4.74666452e+00 5.96119642e+00 1.68082312e-01 2.96011257e+00 -8.78189278e+00] [-7.66272879e+00 3.35174513e+00 4.63282299e+00 1.22908134e+01 -1.89613044e+00 2.74989009e+00 -5.42632341e+00 6.70276117e+00] [-2.96102107e-01 9.23608303e+00 1.51258242e+00 -8.60381699e+00 1.45251834e+00 -1.93223357e-01 -2.41052551e+01 2.56715813e+01] [-5.87339830e+00 -7.40727997e+00 1.59856949e+01 -3.54427624e+00 -4.98831415e+00 -3.18650556e+00 1.01795921e+01 -4.67954302e+00] [-2.46551800e+00 -6.33646393e+00 1.11114798e+01 -2.58917046e+00 1.00598660e+01 3.50550145e-01 -1.10527220e+01 -3.07867050e+00] [ 3.32822037e+00 8.06735325e+00 -3.40713906e+00 -5.74922752e+00 9.28744525e-02 9.02167797e+00 -1.43105400e+00 9.86894321e+00]] [[-1.24145603e+01 1.21022491e+01 -1.22647679e+00 1.85460072e+01 2.09985971e+00 -2.16267556e-01 -1.20746155e+01 1.40852773e+00] [ 1.18759346e+01 -4.03039360e+00 1.87107906e+01 5.55055952e+00 5.26652193e+00 -1.74649847e+00 1.81388867e+00 -1.78826898e-02] [-3.94340873e-01 3.66193056e+00 9.67910957e+00 -7.04521418e+00 -6.70941257e+00 1.81720734e+01 -7.28307247e+00 6.20567846e+00] [ 4.66791439e+00 8.97682667e+00 4.35933977e-01 7.79008198e+00 4.12960196e+00 -4.18635511e+00 4.51371527e+00 2.54808121e+01] [ 1.16967163e+01 5.60623741e+00 1.44029837e+01 -3.88519812e+00 2.89673716e-01 1.06384287e+01 -3.57829452e+00 3.13791156e+00] [ 1.17478287e+00 4.40343094e+00 1.51721311e+00 3.18746161e+00 1.37846553e+00 1.20666723e+01 1.43809617e-02 7.93909597e+00] [ 1.43773389e+00 7.37535477e-01 -1.01518571e+00 -6.37270832e+00 2.05935631e+01 -1.08258333e+01 4.02117872e+00 8.13663292e+00] [ 4.04384422e+00 -1.48907824e+01 1.45695925e+00 1.12335682e+00 -2.02021675e+01 6.47194922e-01 -6.70152903e+00 2.02040362e+00]] [[-8.18528843e+00 7.56635571e+00 5.80098963e+00 -1.73907459e+00 5.75477839e+00 -2.96104765e+00 -9.36641693e+00 4.90961981e+00] [ 4.20758104e+00 1.40603333e+01 1.31126986e+01 1.41968358e+00 -2.29848981e+00 8.30041027e+00 2.76606536e+00 1.34354782e+01] [-4.47518682e+00 6.78469181e+00 -8.29642391e+00 -1.05118790e+01 -8.18952656e+00 -2.09613895e+00 -6.06001091e+00 -6.96555555e-01] [ 2.01455078e+01 1.58891714e+00 1.50490975e+00 -7.49075508e+00 1.76313174e+00 -1.99670875e+00 -1.90271163e+00 -1.65929089e+01] [ 9.00717926e+00 -1.18930244e+01 -6.81629276e+00 8.39700317e+00 1.81016016e+00 1.19722853e+01 8.34271431e+00 1.82015193e+00] [-8.73253167e-01 5.39958417e-01 -1.66903381e+01 -3.11898923e+00 -1.07306662e+01 4.31598091e+00 1.97988358e+01 -1.28429041e+01] [ 5.65204334e+00 -2.10003048e-01 8.42643547e+00 1.42248750e+00 -8.24021530e+00 -9.30347347e+00 1.64459741e+00 -6.74635291e-01] [-8.34873676e+00 1.33848963e+01 -8.64928216e-02 3.48238897e+00 -4.85294914e+00 -5.62230539e+00 -1.54627352e+01 6.18681192e+00]] [[ 7.65184402e+00 -6.97072458e+00 8.50102615e+00 -1.05226974e+01 1.35710745e+01 -2.12832475e+00 -5.43572330e+00 -7.34035492e+00] [-7.58374071e+00 8.77299881e+00 -7.16167402e+00 7.51196051e+00 1.89343758e+01 -4.95598698e+00 -1.07042491e+00 -8.75525665e+00] [ 6.30843258e+00 -6.28282881e+00 5.27644205e+00 7.15294170e+00 1.68325977e+01 -2.94552922e+00 1.84782181e+01 -1.49529066e+01] [-9.50585938e+00 1.91948450e+00 1.55410802e+00 6.34645271e+00 -1.22021227e+01 6.92574406e+00 2.64772081e+00 7.24086475e+00] [ 7.65970230e+00 1.18912735e+01 -1.42939281e+01 1.05516350e+00 3.81801319e+00 -1.79969931e+00 -2.32348037e+00 4.21047974e+00] [ 6.83894055e-03 -3.94925499e+00 2.33605909e+00 -1.23492746e+01 1.23375273e+01 1.00206935e+00 -6.94811296e+00 1.56402979e+01] [ 9.13685989e+00 4.06758022e+00 -1.13768101e+01 1.22375050e+01 -1.04250431e+01 3.66459465e+00 6.96936324e-02 -6.94132662e+00] [ 8.18113136e+00 -6.46935081e+00 6.95332098e+00 -1.21239452e+01 1.35321398e+01 3.87097573e+00 -8.03919506e+00 1.44573698e+01]] [[-7.52479935e+00 -7.36355707e-02 -1.44483023e+01 4.79214239e+00 -1.20278006e+01 5.55343723e+00 9.90004063e+00 -7.51265430e+00] [ 1.59809532e+01 -1.84264164e+01 -7.95243025e+00 3.81511760e+00 -1.14047260e+01 3.94072860e-01 9.10417843e+00 2.06216526e+00] [ 4.77959299e+00 1.23321414e+00 3.50286460e+00 1.58161783e+00 -5.81180668e+00 1.25936806e+00 1.28351822e-01 -1.61781120e+01] [ 1.35555344e+01 -5.61257029e+00 1.68767700e+01 -6.67506790e+00 -6.55338573e+00 -1.86203432e+00 2.88802838e+00 -4.05049419e+00] [-4.96804380e+00 -1.75035310e+00 5.80961168e-01 2.50975460e-01 -2.33135629e+00 6.08547688e+00 -5.80003405e+00 2.97392035e+00] [ 6.32081413e+00 1.02789640e+00 -9.35873699e+00 9.56555557e+00 1.09230065e+00 2.54689579e+01 -7.53403139e+00 5.29862499e+00] [-3.67517853e+00 1.17272625e+01 -1.69358528e+00 -1.50453987e+01 2.38645577e+00 1.33057463e+00 6.94977236e+00 7.74482250e+00] [-4.47449398e+00 -1.34555473e+01 1.33668928e+01 7.06214571e+00 -1.13652887e+01 1.71631260e+01 4.04816151e+00 8.76364899e+00]] [[ 4.30231428e+00 -7.85976744e+00 3.40743899e+00 1.95777493e+01 -1.03109245e+01 -3.11001492e+00 -5.47834873e+00 -7.16401005e+00] [-1.67733002e+00 -2.59447169e+00 -4.28628111e+00 -7.58938408e+00 -2.86783576e+00 4.30650383e-01 1.30661860e+01 1.50178041e+01] [-3.24441016e-01 6.82286119e+00 9.35958862e+00 -7.01120186e+00 6.55041742e+00 -1.03462305e+01 1.09133892e+01 3.39820838e+00] [ 8.82804298e+00 -9.47124672e+00 3.75636768e+00 2.18574834e+00 1.40187502e+01 -6.53299761e+00 1.42014389e+01 -4.87215090e+00] [ 1.12542415e+00 -7.70723724e+00 1.91564980e+01 -1.08054709e+00 -6.81792545e+00 -1.08026733e+01 -3.74052882e+00 1.84154758e+01] [ 1.33270562e-01 8.04429913e+00 -2.67250347e+01 -6.18874025e+00 1.16342688e+01 -1.29393711e+01 -1.81843388e+00 -1.34479392e+00] [ 4.21456528e+00 -1.85139155e+00 -3.31848359e+00 1.22155514e+01 2.03363037e+00 -1.25360880e+01 4.33242178e+00 8.58131123e+00] [ 5.38231945e+00 1.92422545e+00 8.05524349e+00 8.45967770e+00 2.52169251e+00 2.76998687e+00 -1.25425749e+01 -7.22808456e+00]]]]]; ov_res: [[[[[ 6.13341999e+00 -1.41518908e+01 -1.02742510e+01 -1.14443378e+01 5.43931663e-01 8.11705780e+00 -7.48561192e+00 1.48837614e+01] [ 3.82510567e+00 1.23799658e+01 -5.33731651e+00 -1.23173733e+01 -4.60427189e+00 9.64281750e+00 8.91601372e+00 1.29240437e+01] [ 5.31900597e+00 7.42658806e+00 7.31437778e+00 9.01603794e+00 3.44691575e-01 -1.07325277e+01 3.56467485e+00 1.88510494e+01] [ 1.18992682e+01 1.61117191e+01 3.23520899e-01 -2.42824602e+00 -7.48732853e+00 -1.13601122e+01 9.77800655e+00 5.39331436e+00] [-2.33463216e+00 2.65105104e+00 6.60115433e+00 1.09253750e+01 4.45978737e+00 7.10184097e+00 6.13551140e+00 6.75910473e+00] [ 1.17867899e+01 -2.74384618e-01 -7.33139420e+00 -2.77468395e+00 -1.33188715e+01 1.26722250e+01 -1.00103426e+00 3.23117638e+00] [-4.55975151e+00 1.85813618e+00 -6.94580650e+00 -8.11008215e-01 2.72525167e+00 1.91402988e+01 7.41454482e-01 2.36244469e+01] [ 6.12821102e-01 -2.14924812e+00 7.78291130e+00 3.45703912e+00 2.63581097e-01 -1.40711346e+01 4.82787800e+00 -5.48586845e+00]] [[-6.21212292e+00 2.23551631e+00 -1.68959846e+01 4.09407902e+00 5.46951866e+00 1.10374031e+01 -6.07572746e+00 2.45505786e+00] [-8.10328293e+00 -3.66276002e+00 3.53844833e+00 -7.95747185e+00 9.12293243e+00 6.58706665e-01 7.96757317e+00 -1.96472406e+00] [ 7.34374142e+00 2.84025002e+00 -1.80490661e+00 -2.93863773e-01 -1.38419175e+00 -5.64751816e+00 6.72821712e+00 -5.05929947e-01] [-7.49858475e+00 3.73644853e+00 -8.04126930e+00 -1.28500986e+00 1.86074603e+00 -6.75722599e+00 -2.55894399e+00 -2.40078282e+00] [-1.50062437e+01 -3.90044093e-01 -1.17523861e+01 6.36291981e+00 -3.29341054e+00 1.33514500e+01 1.90828454e+00 2.27952385e+00] [ 4.12774658e+00 -1.20123777e+01 -9.60488081e-01 1.45783100e+01 3.39989138e+00 -1.38952470e+00 -2.56849937e+01 6.21350288e+00] [ 8.87722015e+00 -1.08891888e+01 -4.37841606e+00 3.59316635e+00 -7.56311512e+00 -1.90432382e+00 1.68457947e+01 8.98795962e-01] [ 2.24953103e+00 -5.18902016e+00 6.88568306e+00 1.33755026e+01 5.70869732e+00 -9.64100266e+00 1.55748730e+01 4.60359573e+00]] [[ 1.31901693e+01 2.00017595e+00 -4.67278957e-02 2.81586599e+00 -1.10964108e+00 8.60254478e+00 5.98937607e+00 3.19375348e+00] [-1.23345232e+01 -8.78664589e+00 -6.26617432e+00 -2.88119507e+00 1.33799763e+01 -4.51545429e+00 -7.20244217e+00 -4.53871822e+00] [-2.10146785e+00 1.09015703e+00 1.26263332e+00 -4.74666786e+00 5.96119595e+00 1.68082774e-01 2.96011376e+00 -8.78189087e+00] [-7.66272736e+00 3.35174274e+00 4.63282013e+00 1.22908144e+01 -1.89613008e+00 2.74989080e+00 -5.42632294e+00 6.70276165e+00] [-2.96101093e-01 9.23608303e+00 1.51258266e+00 -8.60381603e+00 1.45251846e+00 -1.93224549e-01 -2.41052513e+01 2.56715775e+01] [-5.87339687e+00 -7.40727806e+00 1.59856939e+01 -3.54427505e+00 -4.98831081e+00 -3.18650556e+00 1.01795902e+01 -4.67954350e+00] [-2.46551943e+00 -6.33646202e+00 1.11114788e+01 -2.58917046e+00 1.00598669e+01 3.50551069e-01 -1.10527210e+01 -3.07867360e+00] [ 3.32822013e+00 8.06735039e+00 -3.40713954e+00 -5.74922657e+00 9.28744078e-02 9.02167988e+00 -1.43105340e+00 9.86894131e+00]] [[-1.24145584e+01 1.21022530e+01 -1.22647762e+00 1.85460033e+01 2.09985971e+00 -2.16266990e-01 -1.20746174e+01 1.40852726e+00] [ 1.18759336e+01 -4.03039265e+00 1.87107849e+01 5.55056572e+00 5.26652145e+00 -1.74649882e+00 1.81388879e+00 -1.78821087e-02] [-3.94341469e-01 3.66193128e+00 9.67910671e+00 -7.04521465e+00 -6.70940971e+00 1.81720734e+01 -7.28307152e+00 6.20567799e+00] [ 4.66791487e+00 8.97682476e+00 4.35935855e-01 7.79008007e+00 4.12959814e+00 -4.18635368e+00 4.51371670e+00 2.54808121e+01] [ 1.16967144e+01 5.60623646e+00 1.44029837e+01 -3.88519931e+00 2.89674520e-01 1.06384249e+01 -3.57829499e+00 3.13791060e+00] [ 1.17478347e+00 4.40343094e+00 1.51721191e+00 3.18746042e+00 1.37846673e+00 1.20666704e+01 1.43802166e-02 7.93909645e+00] [ 1.43773437e+00 7.37534225e-01 -1.01518369e+00 -6.37270832e+00 2.05935669e+01 -1.08258305e+01 4.02117682e+00 8.13663292e+00] [ 4.04384327e+00 -1.48907833e+01 1.45695853e+00 1.12335670e+00 -2.02021675e+01 6.47195458e-01 -6.70153046e+00 2.02040386e+00]] [[-8.18528843e+00 7.56635380e+00 5.80098820e+00 -1.73907518e+00 5.75477791e+00 -2.96104884e+00 -9.36641693e+00 4.90962219e+00] [ 4.20758057e+00 1.40603323e+01 1.31126947e+01 1.41968322e+00 -2.29848933e+00 8.30040932e+00 2.76606274e+00 1.34354763e+01] [-4.47518730e+00 6.78468990e+00 -8.29642391e+00 -1.05118790e+01 -8.18952847e+00 -2.09613943e+00 -6.06001091e+00 -6.96555734e-01] [ 2.01455059e+01 1.58891654e+00 1.50491178e+00 -7.49075413e+00 1.76313388e+00 -1.99670982e+00 -1.90271425e+00 -1.65929050e+01] [ 9.00717926e+00 -1.18930264e+01 -6.81629181e+00 8.39700222e+00 1.81015968e+00 1.19722862e+01 8.34271240e+00 1.82015204e+00] [-8.73253465e-01 5.39958775e-01 -1.66903381e+01 -3.11898875e+00 -1.07306671e+01 4.31597900e+00 1.97988319e+01 -1.28429070e+01] [ 5.65204144e+00 -2.10003495e-01 8.42643642e+00 1.42248774e+00 -8.24021530e+00 -9.30347347e+00 1.64459729e+00 -6.74635172e-01] [-8.34873390e+00 1.33848972e+01 -8.64943266e-02 3.48238897e+00 -4.85295200e+00 -5.62230587e+00 -1.54627371e+01 6.18681145e+00]] [[ 7.65184307e+00 -6.97072411e+00 8.50102806e+00 -1.05226974e+01 1.35710726e+01 -2.12832642e+00 -5.43572426e+00 -7.34035110e+00] [-7.58374023e+00 8.77300167e+00 -7.16167450e+00 7.51196289e+00 1.89343719e+01 -4.95598602e+00 -1.07042599e+00 -8.75525475e+00] [ 6.30843258e+00 -6.28282738e+00 5.27644062e+00 7.15294266e+00 1.68325958e+01 -2.94553018e+00 1.84782219e+01 -1.49529123e+01] [-9.50586033e+00 1.91948485e+00 1.55410695e+00 6.34645271e+00 -1.22021236e+01 6.92574406e+00 2.64772177e+00 7.24086475e+00] [ 7.65970516e+00 1.18912706e+01 -1.42939301e+01 1.05516338e+00 3.81801391e+00 -1.79969931e+00 -2.32347894e+00 4.21048069e+00] [ 6.83784485e-03 -3.94925523e+00 2.33606052e+00 -1.23492727e+01 1.23375273e+01 1.00206852e+00 -6.94811344e+00 1.56403017e+01] [ 9.13685989e+00 4.06758022e+00 -1.13768120e+01 1.22375021e+01 -1.04250431e+01 3.66459703e+00 6.96936846e-02 -6.94132614e+00] [ 8.18112946e+00 -6.46935177e+00 6.95332241e+00 -1.21239405e+01 1.35321388e+01 3.87097478e+00 -8.03919506e+00 1.44573708e+01]] [[-7.52480030e+00 -7.36358166e-02 -1.44483070e+01 4.79214191e+00 -1.20278006e+01 5.55343628e+00 9.90004063e+00 -7.51265335e+00] [ 1.59809523e+01 -1.84264126e+01 -7.95243168e+00 3.81511664e+00 -1.14047260e+01 3.94070506e-01 9.10417747e+00 2.06216860e+00] [ 4.77959347e+00 1.23321438e+00 3.50286317e+00 1.58161795e+00 -5.81180668e+00 1.25936818e+00 1.28351808e-01 -1.61781139e+01] [ 1.35555334e+01 -5.61257362e+00 1.68767700e+01 -6.67506504e+00 -6.55338669e+00 -1.86203456e+00 2.88802862e+00 -4.05049324e+00] [-4.96804333e+00 -1.75035191e+00 5.80961525e-01 2.50975847e-01 -2.33135772e+00 6.08547592e+00 -5.80003452e+00 2.97392130e+00] [ 6.32081509e+00 1.02789748e+00 -9.35873413e+00 9.56555557e+00 1.09230161e+00 2.54689598e+01 -7.53403187e+00 5.29862404e+00] [-3.67517781e+00 1.17272644e+01 -1.69358158e+00 -1.50453978e+01 2.38645601e+00 1.33057511e+00 6.94977379e+00 7.74482346e+00] [-4.47449398e+00 -1.34555492e+01 1.33668938e+01 7.06214428e+00 -1.13652887e+01 1.71631222e+01 4.04816198e+00 8.76364803e+00]] [[ 4.30231380e+00 -7.85976791e+00 3.40743780e+00 1.95777531e+01 -1.03109283e+01 -3.11001658e+00 -5.47834873e+00 -7.16400814e+00] [-1.67733026e+00 -2.59446931e+00 -4.28628063e+00 -7.58938408e+00 -2.86783695e+00 4.30652201e-01 1.30661860e+01 1.50178041e+01] [-3.24442387e-01 6.82285881e+00 9.35958767e+00 -7.01120281e+00 6.55041885e+00 -1.03462315e+01 1.09133883e+01 3.39820814e+00] [ 8.82804203e+00 -9.47124577e+00 3.75636554e+00 2.18574977e+00 1.40187511e+01 -6.53299904e+00 1.42014389e+01 -4.87215042e+00] [ 1.12542725e+00 -7.70723724e+00 1.91564903e+01 -1.08054638e+00 -6.81792736e+00 -1.08026724e+01 -3.74052644e+00 1.84154816e+01] [ 1.33272946e-01 8.04429817e+00 -2.67250385e+01 -6.18874073e+00 1.16342669e+01 -1.29393759e+01 -1.81843448e+00 -1.34479260e+00] [ 4.21456623e+00 -1.85139036e+00 -3.31848359e+00 1.22155533e+01 2.03363109e+00 -1.25360870e+01 4.33242321e+00 8.58131027e+00] [ 5.38231945e+00 1.92422593e+00 8.05524635e+00 8.45968056e+00 2.52169275e+00 2.76998615e+00 -1.25425749e+01 -7.22808075e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [1, 1, 1], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1131.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.9334 0.0298 0.0118 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.1188 (2,1,1,.,.) = -0.3290 (3,1,1,.,.) = -1.5707 [ CPUFloatType{3,1,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[ 1.11582077e+00 9.66975927e-01 8.20247650e-01 ... 7.56445289e-01 1.04892683e+00 8.53887916e-01] [ 8.43855262e-01 7.12219596e-01 1.04804540e+00 ... 7.36853957e-01 8.53448451e-01 8.55714917e-01] [ 1.01445031e+00 8.30750287e-01 9.28074658e-01 ... 4.79592055e-01 8.21260154e-01 7.37317562e-01] ... [ 9.38840568e-01 9.31496382e-01 8.75502408e-01 ... 7.78919101e-01 9.12061989e-01 7.77552605e-01] [ 7.31338084e-01 1.02699745e+00 7.53095210e-01 ... 7.64584184e-01 8.88916969e-01 1.09127581e+00] [ 9.57659900e-01 8.35028470e-01 9.64906275e-01 ... 8.83684874e-01 1.09369266e+00 8.55744958e-01]] [[ 1.01001894e+00 9.65801060e-01 1.02173817e+00 ... 1.25924253e+00 8.99781942e-01 1.01337028e+00] [ 7.89634347e-01 8.21508884e-01 9.22807097e-01 ... 8.69695842e-01 1.07051873e+00 9.13956642e-01] [ 1.06942105e+00 1.04493117e+00 8.98782790e-01 ... 8.99324775e-01 8.67046177e-01 1.06774032e+00] ... [ 1.04756546e+00 8.73249888e-01 9.03727591e-01 ... 7.67690659e-01 8.51627469e-01 8.49805832e-01] [ 9.72333848e-01 8.58348191e-01 8.93512368e-01 ... 8.64722848e-01 9.85806644e-01 1.03507614e+00] [ 1.23803520e+00 9.47920084e-01 7.19476700e-01 ... 9.07565117e-01 8.05574000e-01 8.98866594e-01]] [[ 9.55202997e-01 9.76690829e-01 9.55374062e-01 ... 9.03993607e-01 1.17680490e+00 1.09344590e+00] [ 9.71431375e-01 1.05277085e+00 1.06619334e+00 ... 8.54604125e-01 9.76494610e-01 1.00988972e+00] [ 8.29485536e-01 1.09420156e+00 9.15049970e-01 ... 8.34769189e-01 8.00001562e-01 1.17906559e+00] ... [ 1.08198583e+00 1.08225727e+00 1.05417895e+00 ... 9.07709658e-01 8.55000973e-01 1.03504395e+00] [ 7.93921113e-01 9.96562243e-01 1.05226290e+00 ... 9.07797694e-01 8.80159378e-01 8.76272738e-01] [ 8.85734797e-01 9.43049610e-01 7.84412026e-01 ... 8.38057280e-01 7.78770149e-01 1.05269694e+00]] ... [[ 6.53228104e-01 8.24966073e-01 9.93554711e-01 ... 9.84899819e-01 9.39407229e-01 1.05801868e+00] [ 9.46274161e-01 9.98037398e-01 9.35766041e-01 ... 7.65082359e-01 9.19838190e-01 1.02214456e+00] [ 9.27643597e-01 1.03599620e+00 6.93253517e-01 ... 9.75823343e-01 8.77395630e-01 1.01362121e+00] ... [ 1.07514429e+00 9.73075569e-01 1.11853361e+00 ... 1.12478316e+00 1.04865932e+00 1.01643598e+00] [ 9.74287450e-01 8.22766125e-01 8.15622509e-01 ... 8.22778344e-01 8.09590757e-01 9.79680181e-01] [ 8.78654242e-01 1.00058925e+00 9.62460041e-01 ... 9.61168647e-01 8.48718882e-01 1.07379150e+00]] [[ 1.08346176e+00 7.90131450e-01 9.84375715e-01 ... 9.25816655e-01 9.70433056e-01 9.94026124e-01] [ 8.21089387e-01 1.01585615e+00 9.73787308e-01 ... 9.77356613e-01 1.00969243e+00 1.04268074e+00] [ 6.85479522e-01 1.14722991e+00 8.98350596e-01 ... 1.03886402e+00 9.10711825e-01 1.07294619e+00] ... [ 9.35152888e-01 1.05149305e+00 1.01938534e+00 ... 1.04939711e+00 8.91300976e-01 8.04378808e-01] [ 9.79023457e-01 6.99018478e-01 8.86318147e-01 ... 1.07902646e+00 9.04327035e-01 9.43764091e-01] [ 8.06782603e-01 8.33701491e-01 7.37712979e-01 ... 1.02064025e+00 1.01900494e+00 9.75226104e-01]] [[ 8.46093595e-01 9.45521116e-01 7.71975100e-01 ... 9.91351485e-01 9.01837707e-01 1.09342504e+00] [ 1.14495015e+00 9.13695991e-01 1.20333648e+00 ... 1.13338292e+00 9.60912943e-01 9.33705628e-01] [ 1.06292653e+00 1.07141411e+00 1.00637102e+00 ... 9.28932309e-01 1.11891329e+00 9.93673265e-01] ... [ 8.87940347e-01 1.01211250e+00 9.15943384e-01 ... 1.01855528e+00 8.73694181e-01 9.81953800e-01] [ 8.39780152e-01 9.59221780e-01 9.06849921e-01 ... 9.29318368e-01 8.68621707e-01 8.75271559e-01] [ 9.34842765e-01 8.00770998e-01 8.19704950e-01 ... 1.04335046e+00 9.28309321e-01 8.56863976e-01]]] [[[-9.44802016e-02 -7.96349719e-03 -1.73027351e-01 ... 2.75305808e-02 -2.72557646e-01 2.42875014e-02] [ 1.88903995e-02 -9.98668820e-02 2.76548326e-01 ... 2.73867518e-01 5.35958290e-01 1.09282970e-01] [-3.50331753e-01 -1.76160336e-02 4.49527174e-01 ... 1.17926553e-01 -3.12531739e-01 1.05913132e-01] ... [ 5.15819341e-02 -5.09145856e-01 -1.65657401e-01 ... -4.31249589e-01 5.27155340e-01 -6.10931873e-01] [ 4.74903077e-01 -2.18387038e-01 9.61478800e-02 ... 1.12609491e-01 1.52874082e-01 -1.69695243e-01] [-2.85346657e-01 -2.91087389e-01 -4.73360531e-02 ... 2.14926451e-01 4.33155328e-01 1.17472000e-03]] [[ 3.15758348e-01 2.59337369e-02 -5.09332776e-01 ... -2.81416625e-02 2.73023367e-01 -1.63391829e-01] [-5.26613556e-02 -1.57452151e-01 -1.09140486e-01 ... 5.40355071e-02 6.66088238e-02 -1.59272492e-01] [-3.65101933e-01 -2.10556373e-01 -4.75570560e-03 ... 9.48150903e-02 5.11796832e-01 2.77952254e-01] ... [ 5.38222015e-01 1.61804199e-01 -1.14326820e-01 ... 2.46791378e-01 3.90308589e-01 2.80504644e-01] [-1.86077565e-01 6.01346731e-01 -6.06027067e-01 ... 3.17776650e-01 4.47254963e-02 -2.67081946e-01] [-9.83916968e-02 -1.82563156e-01 -2.50339881e-02 ... 3.47180784e-01 -2.45780982e-02 -2.00109243e-01]] [[-4.45973039e-01 1.56603009e-01 -3.19341838e-01 ... 6.91192150e-02 1.16745785e-01 1.65522113e-01] [ 6.76969230e-01 -2.70388931e-01 -7.27252901e-01 ... -9.14609432e-02 3.75782013e-01 -1.19779289e-01] [ 1.25110358e-01 -5.67159832e-01 2.78200418e-01 ... -4.34691280e-01 6.76959872e-01 2.43839517e-01] ... [-3.18908960e-01 -1.29357904e-01 5.73683202e-01 ... 2.64235623e-02 1.58967122e-01 -4.62740362e-01] [ 3.62530500e-01 5.12632072e-01 -2.64796078e-01 ... 4.10674453e-01 3.18593532e-01 6.59303486e-01] [ 7.00032592e-01 6.76932871e-01 2.37001508e-01 ... -2.34408677e-02 3.90653849e-01 9.13497061e-02]] ... [[ 2.81680614e-01 8.15919191e-02 -1.12918988e-01 ... 1.31966799e-01 -2.75694251e-01 6.18740618e-01] [-3.15235525e-01 -1.07162639e-01 -3.68795209e-02 ... -1.48370206e-01 9.48086232e-02 6.43315077e-01] [-1.45749107e-01 1.87231153e-01 7.77025819e-02 ... -2.25748062e-01 -4.54253733e-01 3.50881815e-01] ... [-2.46398807e-01 -4.74866144e-02 7.18998760e-02 ... 4.90031868e-01 1.65923268e-01 6.56419620e-03] [ 3.09544235e-01 3.48654181e-01 8.39074552e-02 ... -9.58452970e-02 -4.63346899e-01 1.34293288e-01] [ 1.24504730e-01 2.35598207e-01 9.20015872e-02 ... 5.18649340e-01 4.16831285e-01 1.52935252e-01]] [[ 2.55041927e-01 4.96314317e-02 -5.17907858e-01 ... -2.64622748e-01 9.37087275e-03 5.80584586e-01] [-1.34881169e-01 -1.54015109e-01 1.74354076e-01 ... -2.84991413e-01 1.14329979e-01 2.58065671e-01] [ 1.07309029e-01 6.63964629e-01 -6.74346536e-02 ... 5.91137707e-01 1.87421188e-01 2.76758492e-01] ... [ 4.27628338e-01 5.87711513e-01 -1.85288489e-01 ... -1.80698380e-01 2.33060747e-01 -1.00716814e-01] [-9.36477482e-02 2.15136230e-01 7.51411915e-03 ... 4.20048684e-02 1.35178044e-01 -6.22008801e-01] [ 2.23097324e-01 6.81284487e-01 3.35814469e-02 ... 1.16363719e-01 -9.77979004e-02 -1.46042958e-01]] [[ 1.81312218e-01 8.19057047e-01 2.58145958e-01 ... -2.07053497e-01 4.96784896e-02 1.93467110e-01] [-2.92092681e-01 -5.21247089e-01 -6.53628632e-03 ... -1.38914108e-01 -6.12227201e-01 3.60314935e-01] [ 1.00748479e-01 1.91202462e-01 1.80690721e-01 ... 2.75874257e-01 1.16829306e-01 3.82108614e-04] ... [ 1.72254920e-01 -2.96328634e-01 -3.85735743e-02 ... 4.30628896e-01 -6.67131990e-02 2.46385276e-01] [ 9.10739005e-02 9.38891321e-02 -1.86610907e-01 ... -1.26241982e-01 3.04638803e-01 -4.49339718e-01] [-1.24543369e-01 3.59319121e-01 2.14030683e-01 ... 6.29788935e-02 3.91663939e-01 1.19721577e-01]]] [[[ 2.22455335e+00 -2.22107029e+00 -8.14220011e-02 ... -2.00149393e+00 1.50033370e-01 7.20595777e-01] [ 1.45524406e+00 -1.19935945e-01 7.18576670e-01 ... -1.50656879e+00 -2.44697285e+00 -1.51538467e+00] [ 9.33316469e-01 7.00626493e-01 7.85311103e-01 ... 3.36476636e+00 4.03572321e+00 5.75509310e-01] ... [ 1.19378781e+00 -8.39047253e-01 1.48905199e-02 ... -5.97650647e-01 -2.92766213e-01 1.93566537e+00] [ 9.89739776e-01 7.57737815e-01 -1.67308724e+00 ... -2.04370141e+00 -5.86064160e-01 -5.25412261e-01] [-8.86947155e-01 1.06613970e+00 -1.21477449e+00 ... -2.00196099e+00 2.00206447e+00 2.28953838e-01]] [[-2.77573884e-01 -1.46017623e+00 -3.07158142e-01 ... -4.03969622e+00 1.12034583e+00 -1.45082533e+00] [ 1.63194215e+00 -3.31022429e+00 7.38874972e-01 ... -1.69498670e+00 -2.72288251e+00 -2.19381571e+00] [-1.81219077e+00 -2.47240499e-01 -1.41903973e+00 ... -1.66534334e-01 -2.23834205e+00 1.05396664e+00] ... [ 6.13245904e-01 1.41083527e+00 -1.37363896e-01 ... -3.87666821e-01 -1.87046885e+00 -3.26354718e+00] [ 1.94603705e+00 -1.55846202e+00 -9.13441420e-01 ... 6.85415685e-01 1.13249743e+00 -9.30694878e-01] [-5.61978698e-01 -1.61339080e+00 -9.07490253e-01 ... 1.50979495e+00 3.05741596e+00 2.10241020e-01]] [[-5.05773962e-01 1.39975810e+00 -6.96046710e-01 ... 7.19661653e-01 1.45907670e-01 -8.42628062e-01] [-3.58583927e-01 -2.13955715e-02 8.04830074e-01 ... 3.03292274e+00 -3.74405026e-01 -2.92017245e+00] [-2.24015951e+00 3.10648531e-01 -1.60195041e+00 ... 8.49526703e-01 -3.38799596e+00 2.26133823e+00] ... [ 3.30872238e-01 -2.52393574e-01 2.16689855e-01 ... 5.22324443e-01 1.23323631e+00 1.35688174e+00] [ 8.09586108e-01 5.12381971e-01 6.62036479e-01 ... 2.51770520e+00 -7.94178724e-01 3.38041687e+00] [ 2.89683533e+00 4.02000308e-01 2.06289196e+00 ... 1.27219105e+00 -1.42089772e+00 1.42578697e+00]] ... [[ 8.93780172e-01 8.39165747e-01 1.75609565e+00 ... -2.53610522e-01 1.52772021e+00 1.16233444e+00] [ 2.05312634e+00 9.06513214e-01 -5.35932243e-01 ... 1.55930805e+00 -2.86970282e+00 -6.60897017e-01] [ 9.48097050e-01 -3.22400093e-01 7.62378931e-01 ... 2.68317968e-01 2.45090771e+00 1.51267517e+00] ... [ 1.18855171e-01 2.92539895e-01 9.39179584e-02 ... -2.44304276e+00 4.07401741e-01 -3.00866437e+00] [-3.50218081e+00 -3.57202625e+00 -2.01022601e+00 ... 8.12545121e-01 -2.00566220e+00 -1.61134994e+00] [ 1.54609704e+00 -1.14182234e+00 -8.52299273e-01 ... 1.07642794e+00 1.94395626e+00 -1.07697225e+00]] [[-2.68838584e-01 1.32690716e+00 2.50825238e+00 ... 3.81511331e+00 1.93134725e+00 3.97654444e-01] [-3.71778399e-01 -1.77725101e+00 -8.21671367e-01 ... -1.70796585e+00 -8.73394534e-02 -4.31105904e-02] [ 5.96491396e-01 1.16615033e+00 1.38764083e+00 ... 9.38795567e-01 -3.30891818e-01 8.46067965e-01] ... [-3.79972667e-01 2.13716045e-01 -5.44327572e-02 ... -2.60171843e+00 2.13178471e-01 2.20054999e-01] [ 5.69233239e-01 1.85288405e+00 -6.94244087e-01 ... -1.87922525e+00 9.53729451e-01 1.94528842e+00] [-1.06929505e+00 -1.05227804e+00 2.21734524e+00 ... -1.56255186e-01 -2.59531331e+00 1.02993178e+00]] [[ 2.07911468e+00 -1.59910440e+00 7.80402720e-01 ... -5.36675692e-01 -2.44925693e-01 1.71422172e+00] [-2.11605000e+00 -1.46519274e-01 -2.23603964e+00 ... 5.06538749e-01 -4.59968835e-01 -1.24393427e+00] [ 7.66952485e-02 -8.56430948e-01 -3.82089210e+00 ... 1.62424254e+00 2.51154947e+00 4.84217256e-01] ... [ 2.66857952e-01 9.60245848e-01 -6.28077209e-01 ... 1.53982401e-01 -1.39421487e+00 1.77596915e+00] [-1.35160315e+00 3.51309466e+00 -1.79928970e+00 ... 6.72067821e-01 2.03917906e-01 -9.58603799e-01] [ 1.35043478e+00 -1.16787326e+00 -1.15907049e+00 ... 2.25240755e+00 6.05779827e-01 2.51815826e-01]]]]]; ov_res: [[[[[ 1.11582077e+00 9.66975927e-01 8.20247650e-01 ... 7.56445229e-01 1.04892683e+00 8.53887975e-01] [ 8.43855262e-01 7.12219596e-01 1.04804540e+00 ... 7.36853957e-01 8.53448451e-01 8.55714917e-01] [ 1.01445031e+00 8.30750287e-01 9.28074658e-01 ... 4.79592055e-01 8.21260154e-01 7.37317562e-01] ... [ 9.38840568e-01 9.31496382e-01 8.75502408e-01 ... 7.78919101e-01 9.12061989e-01 7.77552605e-01] [ 7.31338084e-01 1.02699745e+00 7.53095210e-01 ... 7.64584184e-01 8.88916969e-01 1.09127581e+00] [ 9.57659900e-01 8.35028470e-01 9.64906275e-01 ... 8.83684874e-01 1.09369266e+00 8.55744958e-01]] [[ 1.01001894e+00 9.65801060e-01 1.02173817e+00 ... 1.25924253e+00 8.99781942e-01 1.01337028e+00] [ 7.89634347e-01 8.21508944e-01 9.22807097e-01 ... 8.69695842e-01 1.07051861e+00 9.13956642e-01] [ 1.06942105e+00 1.04493117e+00 8.98782790e-01 ... 8.99324775e-01 8.67046177e-01 1.06774032e+00] ... [ 1.04756546e+00 8.73249888e-01 9.03727591e-01 ... 7.67690659e-01 8.51627469e-01 8.49805832e-01] [ 9.72333848e-01 8.58348191e-01 8.93512368e-01 ... 8.64722848e-01 9.85806644e-01 1.03507614e+00] [ 1.23803520e+00 9.47920144e-01 7.19476700e-01 ... 9.07565117e-01 8.05574000e-01 8.98866594e-01]] [[ 9.55202997e-01 9.76690829e-01 9.55374062e-01 ... 9.03993607e-01 1.17680490e+00 1.09344590e+00] [ 9.71431375e-01 1.05277085e+00 1.06619334e+00 ... 8.54604125e-01 9.76494610e-01 1.00988972e+00] [ 8.29485536e-01 1.09420156e+00 9.15049970e-01 ... 8.34769189e-01 8.00001562e-01 1.17906559e+00] ... [ 1.08198583e+00 1.08225727e+00 1.05417895e+00 ... 9.07709658e-01 8.55000973e-01 1.03504395e+00] [ 7.93921113e-01 9.96562243e-01 1.05226290e+00 ... 9.07797694e-01 8.80159378e-01 8.76272738e-01] [ 8.85734797e-01 9.43049610e-01 7.84412026e-01 ... 8.38057280e-01 7.78770149e-01 1.05269694e+00]] ... [[ 6.53228104e-01 8.24966133e-01 9.93554652e-01 ... 9.84899819e-01 9.39407229e-01 1.05801868e+00] [ 9.46274161e-01 9.98037398e-01 9.35766041e-01 ... 7.65082359e-01 9.19838190e-01 1.02214444e+00] [ 9.27643597e-01 1.03599620e+00 6.93253517e-01 ... 9.75823343e-01 8.77395630e-01 1.01362121e+00] ... [ 1.07514429e+00 9.73075569e-01 1.11853361e+00 ... 1.12478316e+00 1.04865932e+00 1.01643598e+00] [ 9.74287450e-01 8.22766125e-01 8.15622509e-01 ... 8.22778344e-01 8.09590757e-01 9.79680181e-01] [ 8.78654242e-01 1.00058925e+00 9.62460041e-01 ... 9.61168647e-01 8.48718882e-01 1.07379150e+00]] [[ 1.08346176e+00 7.90131450e-01 9.84375715e-01 ... 9.25816655e-01 9.70433056e-01 9.94026124e-01] [ 8.21089387e-01 1.01585615e+00 9.73787308e-01 ... 9.77356613e-01 1.00969243e+00 1.04268074e+00] [ 6.85479522e-01 1.14722991e+00 8.98350656e-01 ... 1.03886402e+00 9.10711825e-01 1.07294619e+00] ... [ 9.35152888e-01 1.05149305e+00 1.01938534e+00 ... 1.04939711e+00 8.91300976e-01 8.04378808e-01] [ 9.79023457e-01 6.99018478e-01 8.86318147e-01 ... 1.07902646e+00 9.04327035e-01 9.43764091e-01] [ 8.06782603e-01 8.33701491e-01 7.37712979e-01 ... 1.02064025e+00 1.01900494e+00 9.75226104e-01]] [[ 8.46093595e-01 9.45521116e-01 7.71975100e-01 ... 9.91351485e-01 9.01837707e-01 1.09342504e+00] [ 1.14495015e+00 9.13695991e-01 1.20333648e+00 ... 1.13338292e+00 9.60912943e-01 9.33705628e-01] [ 1.06292653e+00 1.07141411e+00 1.00637102e+00 ... 9.28932309e-01 1.11891329e+00 9.93673265e-01] ... [ 8.87940347e-01 1.01211250e+00 9.15943384e-01 ... 1.01855528e+00 8.73694181e-01 9.81953800e-01] [ 8.39780152e-01 9.59221780e-01 9.06849921e-01 ... 9.29318368e-01 8.68621707e-01 8.75271559e-01] [ 9.34842765e-01 8.00770998e-01 8.19704950e-01 ... 1.04335046e+00 9.28309321e-01 8.56863976e-01]]] [[[-9.44802016e-02 -7.96349719e-03 -1.73027351e-01 ... 2.75305808e-02 -2.72557646e-01 2.42875014e-02] [ 1.88903995e-02 -9.98668820e-02 2.76548356e-01 ... 2.73867518e-01 5.35958350e-01 1.09282978e-01] [-3.50331753e-01 -1.76160336e-02 4.49527174e-01 ... 1.17926560e-01 -3.12531739e-01 1.05913132e-01] ... [ 5.15819341e-02 -5.09145856e-01 -1.65657416e-01 ... -4.31249619e-01 5.27155340e-01 -6.10931873e-01] [ 4.74903077e-01 -2.18387038e-01 9.61478800e-02 ... 1.12609498e-01 1.52874082e-01 -1.69695243e-01] [-2.85346657e-01 -2.91087419e-01 -4.73360531e-02 ... 2.14926451e-01 4.33155328e-01 1.17471942e-03]] [[ 3.15758348e-01 2.59337369e-02 -5.09332716e-01 ... -2.81416643e-02 2.73023367e-01 -1.63391829e-01] [-5.26613556e-02 -1.57452151e-01 -1.09140486e-01 ... 5.40355071e-02 6.66088238e-02 -1.59272492e-01] [-3.65101933e-01 -2.10556373e-01 -4.75570699e-03 ... 9.48150828e-02 5.11796832e-01 2.77952254e-01] ... [ 5.38222075e-01 1.61804199e-01 -1.14326820e-01 ... 2.46791378e-01 3.90308589e-01 2.80504644e-01] [-1.86077565e-01 6.01346791e-01 -6.06027067e-01 ... 3.17776650e-01 4.47254963e-02 -2.67081946e-01] [-9.83917043e-02 -1.82563156e-01 -2.50339862e-02 ... 3.47180784e-01 -2.45781001e-02 -2.00109243e-01]] [[-4.45973039e-01 1.56602994e-01 -3.19341838e-01 ... 6.91192150e-02 1.16745785e-01 1.65522113e-01] [ 6.76969230e-01 -2.70388931e-01 -7.27252841e-01 ... -9.14609507e-02 3.75782013e-01 -1.19779289e-01] [ 1.25110358e-01 -5.67159832e-01 2.78200418e-01 ... -4.34691280e-01 6.76959932e-01 2.43839517e-01] ... [-3.18908960e-01 -1.29357919e-01 5.73683202e-01 ... 2.64235642e-02 1.58967122e-01 -4.62740362e-01] [ 3.62530500e-01 5.12632072e-01 -2.64796078e-01 ... 4.10674453e-01 3.18593532e-01 6.59303546e-01] [ 7.00032592e-01 6.76932871e-01 2.37001508e-01 ... -2.34408695e-02 3.90653849e-01 9.13496986e-02]] ... [[ 2.81680614e-01 8.15919191e-02 -1.12918988e-01 ... 1.31966799e-01 -2.75694251e-01 6.18740678e-01] [-3.15235525e-01 -1.07162647e-01 -3.68795209e-02 ... -1.48370206e-01 9.48086232e-02 6.43315136e-01] [-1.45749107e-01 1.87231153e-01 7.77025893e-02 ... -2.25748047e-01 -4.54253733e-01 3.50881815e-01] ... [-2.46398807e-01 -4.74866144e-02 7.18998760e-02 ... 4.90031868e-01 1.65923268e-01 6.56419620e-03] [ 3.09544235e-01 3.48654181e-01 8.39074478e-02 ... -9.58453044e-02 -4.63346899e-01 1.34293288e-01] [ 1.24504730e-01 2.35598207e-01 9.20015872e-02 ... 5.18649340e-01 4.16831285e-01 1.52935252e-01]] [[ 2.55041927e-01 4.96314317e-02 -5.17907858e-01 ... -2.64622748e-01 9.37087275e-03 5.80584586e-01] [-1.34881169e-01 -1.54015109e-01 1.74354061e-01 ... -2.84991443e-01 1.14329971e-01 2.58065671e-01] [ 1.07309021e-01 6.63964629e-01 -6.74346462e-02 ... 5.91137767e-01 1.87421188e-01 2.76758492e-01] ... [ 4.27628338e-01 5.87711573e-01 -1.85288489e-01 ... -1.80698395e-01 2.33060747e-01 -1.00716822e-01] [-9.36477482e-02 2.15136230e-01 7.51411868e-03 ... 4.20048684e-02 1.35178044e-01 -6.22008741e-01] [ 2.23097324e-01 6.81284547e-01 3.35814469e-02 ... 1.16363712e-01 -9.77979004e-02 -1.46042958e-01]] [[ 1.81312203e-01 8.19057107e-01 2.58145988e-01 ... -2.07053497e-01 4.96784896e-02 1.93467110e-01] [-2.92092681e-01 -5.21247089e-01 -6.53628586e-03 ... -1.38914123e-01 -6.12227142e-01 3.60314935e-01] [ 1.00748472e-01 1.91202447e-01 1.80690721e-01 ... 2.75874257e-01 1.16829306e-01 3.82107915e-04] ... [ 1.72254905e-01 -2.96328634e-01 -3.85735780e-02 ... 4.30628896e-01 -6.67131990e-02 2.46385276e-01] [ 9.10738930e-02 9.38891321e-02 -1.86610922e-01 ... -1.26241982e-01 3.04638803e-01 -4.49339718e-01] [-1.24543369e-01 3.59319121e-01 2.14030668e-01 ... 6.29788935e-02 3.91663939e-01 1.19721569e-01]]] [[[ 2.22455359e+00 -2.22107029e+00 -8.14220011e-02 ... -2.00149393e+00 1.50033370e-01 7.20595777e-01] [ 1.45524406e+00 -1.19935945e-01 7.18576670e-01 ... -1.50656879e+00 -2.44697285e+00 -1.51538467e+00] [ 9.33316410e-01 7.00626493e-01 7.85311103e-01 ... 3.36476660e+00 4.03572321e+00 5.75509250e-01] ... [ 1.19378781e+00 -8.39047253e-01 1.48905199e-02 ... -5.97650707e-01 -2.92766184e-01 1.93566537e+00] [ 9.89739716e-01 7.57737756e-01 -1.67308724e+00 ... -2.04370141e+00 -5.86064160e-01 -5.25412261e-01] [-8.86947155e-01 1.06613982e+00 -1.21477437e+00 ... -2.00196099e+00 2.00206447e+00 2.28953838e-01]] [[-2.77573884e-01 -1.46017611e+00 -3.07158113e-01 ... -4.03969622e+00 1.12034595e+00 -1.45082533e+00] [ 1.63194215e+00 -3.31022429e+00 7.38874912e-01 ... -1.69498670e+00 -2.72288251e+00 -2.19381571e+00] [-1.81219065e+00 -2.47240499e-01 -1.41903961e+00 ... -1.66534334e-01 -2.23834205e+00 1.05396676e+00] ... [ 6.13245904e-01 1.41083527e+00 -1.37363896e-01 ... -3.87666821e-01 -1.87046885e+00 -3.26354718e+00] [ 1.94603705e+00 -1.55846190e+00 -9.13441420e-01 ... 6.85415685e-01 1.13249743e+00 -9.30694878e-01] [-5.61978698e-01 -1.61339068e+00 -9.07490253e-01 ... 1.50979495e+00 3.05741596e+00 2.10241020e-01]] [[-5.05774021e-01 1.39975822e+00 -6.96046710e-01 ... 7.19661653e-01 1.45907670e-01 -8.42628121e-01] [-3.58583897e-01 -2.13955715e-02 8.04830074e-01 ... 3.03292274e+00 -3.74404997e-01 -2.92017245e+00] [-2.24015951e+00 3.10648561e-01 -1.60195041e+00 ... 8.49526703e-01 -3.38799596e+00 2.26133823e+00] ... [ 3.30872267e-01 -2.52393544e-01 2.16689840e-01 ... 5.22324443e-01 1.23323631e+00 1.35688174e+00] [ 8.09586108e-01 5.12381971e-01 6.62036479e-01 ... 2.51770520e+00 -7.94178784e-01 3.38041687e+00] [ 2.89683533e+00 4.02000308e-01 2.06289196e+00 ... 1.27219105e+00 -1.42089772e+00 1.42578697e+00]] ... [[ 8.93780172e-01 8.39165747e-01 1.75609565e+00 ... -2.53610492e-01 1.52772021e+00 1.16233456e+00] [ 2.05312657e+00 9.06513214e-01 -5.35932243e-01 ... 1.55930817e+00 -2.86970282e+00 -6.60897017e-01] [ 9.48097050e-01 -3.22400093e-01 7.62378931e-01 ... 2.68317997e-01 2.45090795e+00 1.51267517e+00] ... [ 1.18855171e-01 2.92539924e-01 9.39179584e-02 ... -2.44304276e+00 4.07401770e-01 -3.00866413e+00] [-3.50218081e+00 -3.57202625e+00 -2.01022601e+00 ... 8.12545121e-01 -2.00566196e+00 -1.61134982e+00] [ 1.54609716e+00 -1.14182222e+00 -8.52299273e-01 ... 1.07642794e+00 1.94395626e+00 -1.07697213e+00]] [[-2.68838584e-01 1.32690728e+00 2.50825238e+00 ... 3.81511331e+00 1.93134737e+00 3.97654444e-01] [-3.71778369e-01 -1.77725089e+00 -8.21671367e-01 ... -1.70796585e+00 -8.73394534e-02 -4.31105904e-02] [ 5.96491337e-01 1.16615033e+00 1.38764095e+00 ... 9.38795567e-01 -3.30891818e-01 8.46067965e-01] ... [-3.79972637e-01 2.13716045e-01 -5.44327609e-02 ... -2.60171843e+00 2.13178471e-01 2.20054999e-01] [ 5.69233239e-01 1.85288405e+00 -6.94244146e-01 ... -1.87922525e+00 9.53729391e-01 1.94528842e+00] [-1.06929493e+00 -1.05227804e+00 2.21734524e+00 ... -1.56255186e-01 -2.59531331e+00 1.02993178e+00]] [[ 2.07911491e+00 -1.59910440e+00 7.80402720e-01 ... -5.36675692e-01 -2.44925678e-01 1.71422172e+00] [-2.11604977e+00 -1.46519274e-01 -2.23603964e+00 ... 5.06538749e-01 -4.59968835e-01 -1.24393415e+00] [ 7.66952485e-02 -8.56430948e-01 -3.82089186e+00 ... 1.62424254e+00 2.51154947e+00 4.84217256e-01] ... [ 2.66857952e-01 9.60245848e-01 -6.28077209e-01 ... 1.53982401e-01 -1.39421487e+00 1.77596927e+00] [-1.35160315e+00 3.51309466e+00 -1.79928970e+00 ... 6.72067761e-01 2.03917906e-01 -9.58603799e-01] [ 1.35043478e+00 -1.16787326e+00 -1.15907049e+00 ... 2.25240755e+00 6.05779827e-01 2.51815826e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1133.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.0093 0.2770 0.5126 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 0.4043 (2,1,1,.,.) = 0.5145 (3,1,1,.,.) = 1.5867 [ CPUFloatType{3,1,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[-8.66331816e-01 -2.01596767e-01 -5.32840490e-01 ... -1.00488675e+00 -4.98260319e-01 6.15645170e-01] [ 6.39318764e-01 -3.01299810e-01 1.87646568e-01 ... -5.50148726e-01 -2.84142822e-01 -2.52709627e-01] [-8.92186984e-02 -4.65223908e-01 3.73876393e-02 ... -6.74895644e-01 -1.82692222e-02 -5.70256114e-01] ... [-3.39678023e-03 -1.20198401e-02 1.48411483e-01 ... 2.61340708e-01 -4.02924508e-01 2.22746447e-01] [-2.33006895e-01 5.74944019e-01 4.78625059e-01 ... 2.29962558e-01 5.40302753e-01 -9.19552147e-01] [-1.23980597e-01 -1.19139403e-01 -9.38798785e-02 ... 2.68306732e-01 1.73223123e-01 6.00947618e-01]] [[-2.21330859e-02 6.69728100e-01 -1.78462371e-01 ... -1.57228917e-01 -8.65284875e-02 -7.97365308e-01] [ 7.20790178e-02 4.77451950e-01 -5.94500005e-02 ... 1.33317217e-01 -9.75535065e-02 -9.34365243e-02] [-1.75931737e-01 1.57089189e-01 -1.14888921e-01 ... -2.12464914e-01 -7.24627897e-02 -2.10620105e-01] ... [ 8.08963537e-01 -3.97673428e-01 -3.74789447e-01 ... 1.16178356e-02 -1.90377221e-01 6.19445443e-01] [ 2.22928911e-01 6.60411000e-01 4.88308743e-02 ... -4.35569316e-01 4.60482627e-01 2.81422168e-01] [-4.08675298e-02 1.52644888e-01 -3.79788846e-01 ... 3.32944423e-01 -1.35470942e-01 4.87669736e-01]] [[ 6.91379249e-01 4.50747997e-01 9.14101601e-01 ... -5.58104455e-01 -2.48277672e-02 -1.19099736e-01] [-1.36013227e-02 -9.40941367e-03 1.40202925e-01 ... 2.84906238e-01 -1.89784229e-01 1.96249515e-01] [ 6.25804424e-01 -3.17142844e-01 -3.67321670e-01 ... -3.26007098e-01 7.68895686e-01 -2.98057497e-01] ... [-6.24013484e-01 -7.14952469e-01 -3.43527794e-01 ... 3.68002892e-01 -3.08645338e-01 1.32600084e-01] [-1.17754890e-02 5.23705408e-02 3.06002885e-01 ... 4.59133208e-01 1.03254989e-01 2.28332728e-01] [-1.86326519e-01 -1.95961043e-01 -5.00948727e-03 ... 5.33263087e-01 -2.41927296e-01 -5.89061618e-01]] ... [[ 1.67521462e-01 -3.43873322e-01 3.01771313e-01 ... 5.64340968e-03 -4.63680148e-01 -6.76205456e-01] [ 3.52587104e-01 -2.55499601e-01 -6.07099175e-01 ... 4.17491645e-01 -2.22745389e-01 -1.79773569e-03] [-5.99294603e-01 -4.51236159e-01 -9.71639380e-02 ... -2.01341093e-01 3.97179425e-01 2.68329591e-01] ... [-2.29921699e-01 1.03459561e+00 -2.23118752e-01 ... 8.19410533e-02 9.29223299e-02 2.76021838e-01] [-1.95788994e-01 -4.24434431e-02 5.02275527e-02 ... 1.15869975e+00 2.55519360e-01 -4.09377933e-01] [ 2.76204258e-01 -6.70027062e-02 -6.13355279e-01 ... -4.05065507e-01 -2.13995606e-01 -4.52608019e-02]] [[ 6.33740518e-03 7.82419443e-01 3.28384250e-01 ... -5.71197793e-02 3.76063645e-01 6.52422756e-02] [ 1.08361468e-01 2.71200895e-01 5.75718701e-01 ... 7.75642097e-02 1.58107616e-02 2.03352809e-01] [-7.08502114e-01 -2.91336834e-01 1.11682117e-01 ... -9.21361864e-01 4.75713521e-01 1.76597834e-02] ... [ 2.99138203e-02 -5.83283842e-01 3.06690903e-03 ... 2.32592553e-01 5.34977257e-01 6.12421811e-01] [ 3.34309667e-01 3.71587276e-01 -6.76942766e-01 ... -5.07896125e-01 1.75907195e-01 3.92876297e-01] [ 6.59794629e-01 1.38708279e-01 5.63956238e-02 ... -3.64244699e-01 -6.32614076e-01 -4.17927295e-01]] [[ 4.88911062e-01 -1.57236323e-01 4.20466185e-01 ... 4.50954735e-01 -1.50556758e-01 -7.17270315e-01] [-2.48570889e-01 -2.96183974e-01 -3.41258824e-01 ... 1.19051039e-01 4.56372917e-01 1.58085749e-01] [-3.85484874e-01 -7.89725129e-03 8.87184918e-01 ... -2.21345395e-01 -1.33399367e-01 -1.96730867e-01] ... [ 7.08109915e-01 -3.09535451e-02 -2.32491009e-02 ... -2.41245925e-01 -9.46114678e-03 -4.98406924e-02] [-4.26393867e-01 -4.49261993e-01 7.30498061e-02 ... 4.36552703e-01 3.48610841e-02 1.01817787e-01] [-1.09810710e-01 1.86535120e-01 -7.05068707e-02 ... -1.04515338e+00 3.03158134e-01 3.08051080e-01]]] [[[ 1.11967778e+00 4.35495019e-01 -2.90702343e-01 ... 4.66340333e-01 -8.42328429e-01 -6.18201792e-01] [ 2.40300223e-01 3.05675596e-01 9.23519850e-01 ... 4.60235983e-01 -1.33561820e-01 1.09580517e+00] [ 3.08243155e-01 1.02118492e+00 5.36908865e-01 ... 9.51314569e-01 4.62122738e-01 7.22054601e-01] ... [ 7.32151628e-01 4.73552644e-01 5.03421366e-01 ... -3.84930730e-01 1.79691046e-01 7.33023703e-01] [ 4.43086743e-01 3.81357014e-01 1.17223501e-01 ... 5.84379077e-01 1.46962059e+00 4.57391977e-01] [ 6.01649523e-01 2.63796449e-02 -5.20133018e-01 ... 8.84486139e-01 8.04158688e-01 -7.99323440e-01]] [[ 1.04332829e+00 1.74212158e-01 7.90389001e-01 ... -1.67512268e-01 1.78724289e-01 4.82103586e-01] [ 2.18375921e-01 1.02553248e+00 -1.96455836e-01 ... 3.47038954e-02 2.26625174e-01 -3.15436423e-01] [ 1.09390616e-02 -7.66322017e-02 5.94877124e-01 ... 4.40345585e-01 6.26821041e-01 -2.28557587e-02] ... [-2.36336887e-01 4.23769593e-01 -5.34594655e-01 ... 5.36458373e-01 2.32213855e-01 2.14598700e-01] [-8.03065777e-01 7.04419374e-01 3.85082483e-01 ... -4.19012368e-01 1.14232406e-01 1.48677975e-01] [-2.48282611e-01 -3.14724982e-01 5.47422767e-02 ... 2.75870889e-01 2.12517843e-01 3.56330395e-01]] [[-1.91379279e-01 4.89880681e-01 3.49267185e-01 ... 4.96057123e-02 3.11793655e-01 3.75284582e-01] [ 4.13064241e-01 7.21633852e-01 4.74263310e-01 ... 4.43131328e-02 2.88854390e-01 -6.65349305e-01] [-3.49619329e-01 4.85119700e-01 1.62038887e+00 ... 3.24504405e-01 -1.24054968e-01 5.25308013e-01] ... [ 8.56593609e-01 4.14562821e-01 6.46938801e-01 ... 5.64634204e-01 -3.06186855e-01 -5.93308330e-01] [ 6.54106289e-02 8.59905005e-01 8.93625081e-01 ... 8.22790265e-01 5.28908730e-01 -7.93186069e-01] [ 2.56079733e-01 6.30486608e-02 1.26655602e+00 ... 2.76112676e-01 4.22692984e-01 5.57862401e-01]] ... [[-2.84719467e-03 2.24129498e-01 -3.85996222e-01 ... -9.31459665e-03 1.39870733e-01 -4.86913025e-02] [ 5.87859511e-01 2.56779373e-01 1.45920873e-01 ... 6.86857104e-01 3.49333256e-01 -1.05459321e+00] [ 9.38258767e-02 -1.37237835e+00 5.19313872e-01 ... 5.52686900e-02 6.11312628e-01 4.55096275e-01] ... [-2.97916234e-01 -2.45517850e-01 2.27850720e-01 ... 1.03492737e-02 9.72720861e-01 -1.24496222e-03] [ 3.29293668e-01 -2.80873716e-01 8.86989355e-01 ... 2.87025124e-01 7.17606843e-01 2.51551718e-01] [ 8.95905435e-01 -3.46223712e-02 4.01229024e-01 ... 2.53192186e-01 -3.68577957e-01 -9.35058296e-02]] [[ 4.32622164e-01 7.07805216e-01 4.50517476e-01 ... 5.83458781e-01 1.84245080e-01 1.81096032e-01] [-3.88764501e-01 7.15463400e-01 5.91938853e-01 ... 4.30757105e-01 4.24879730e-01 -5.39506257e-01] [-7.01095641e-01 1.63536906e-01 4.07168746e-01 ... -3.45112920e-01 -2.08694845e-01 -2.87002265e-01] ... [ 1.00328371e-01 2.35908896e-01 4.78020906e-01 ... 6.97375417e-01 -6.28248572e-01 -4.49951053e-01] [ 4.66593653e-02 1.14111245e-01 7.20335364e-01 ... -5.48281848e-01 -3.14737082e-01 1.46269754e-01] [-5.58705509e-01 6.13620639e-01 1.10096705e+00 ... -3.03423166e-01 -3.16645980e-01 -4.71816897e-01]] [[ 9.59143639e-02 5.87738395e-01 -6.70468211e-02 ... 5.64916670e-01 5.81984103e-01 4.28876042e-01] [ 1.45922947e+00 3.02148223e-01 -5.98967969e-02 ... -2.21808612e-01 7.74726272e-01 2.51000017e-01] [ 7.54575729e-01 2.49125406e-01 9.74195898e-01 ... 7.50436038e-02 3.39082569e-01 -2.20579773e-01] ... [ 1.07940912e-01 8.02462697e-01 3.81029010e-01 ... 2.62580365e-01 8.18407297e-01 1.09072417e-01] [ 7.24334657e-01 1.83396325e-01 7.46925920e-02 ... 5.83962381e-01 -1.41218960e-01 1.18060634e-01] [ 9.11168277e-01 -8.04649711e-01 -7.95957923e-01 ... 9.56901163e-02 5.65980673e-01 -5.02305567e-01]]] [[[ 2.56551194e+00 2.18170667e+00 2.36109233e+00 ... 1.17046189e+00 1.54953456e+00 -1.85730648e+00] [-1.00555134e+00 2.01881909e+00 -1.81607151e+00 ... 1.03709102e-01 -1.77869129e+00 -1.44814944e+00] [ 1.65481925e+00 2.23481941e+00 -6.02396190e-01 ... -1.70007420e+00 1.78855515e+00 2.76177108e-01] ... [ 2.31357646e+00 9.88864541e-01 -5.21674156e-02 ... -4.53562737e-01 1.42387295e+00 5.49016535e-01] [-2.36628580e+00 1.57422841e-01 -8.38844478e-01 ... 1.47245526e-02 4.51058388e+00 -3.82024407e-01] [-1.81704164e-01 2.88460422e+00 -9.12486374e-01 ... 2.08464861e+00 -3.87117505e-01 -1.01897144e+00]] [[ 1.52582192e+00 -1.85856175e+00 -2.12475955e-01 ... -2.31042910e+00 -2.17598987e+00 7.14598894e-01] [ 1.14981556e+00 -3.43965888e-01 9.43440497e-01 ... 2.21415162e+00 -1.72295427e+00 2.99877954e+00] [-2.20189953e+00 9.02106225e-01 -5.28871119e-01 ... -1.06357408e+00 3.19560027e+00 1.38978899e-01] ... [ 1.22133851e+00 3.64292860e-02 -2.05221009e+00 ... 5.52393556e-01 3.29923534e+00 2.21599841e+00] [ 1.05112791e+00 1.80115652e+00 5.81024218e+00 ... 3.71083498e+00 4.84436226e+00 5.63983738e-01] [ 1.50962567e+00 -2.27868974e-01 -5.76112092e-01 ... -1.10167956e+00 1.94603014e+00 -1.19162750e+00]] [[ 1.36958027e+00 2.31125736e+00 1.76041341e+00 ... -2.83873200e-01 -1.28242254e+00 6.69700205e-01] [-7.77804911e-01 1.95117927e+00 1.37805343e-01 ... -1.22318268e+00 -2.88601923e+00 1.31065488e+00] [ 5.93822765e+00 -4.48034525e-01 2.06371403e+00 ... -8.59201968e-01 1.55223012e+00 -6.63533807e-02] ... [-1.50756526e+00 3.91078281e+00 4.79824096e-01 ... 2.74129725e+00 -9.79768336e-01 -7.59297431e-01] [-8.40956151e-01 -7.83995807e-01 -8.55500281e-01 ... 2.62766272e-01 2.77839565e+00 1.85157633e+00] [-1.87950754e+00 -2.85480976e-01 2.22540677e-01 ... -6.70376003e-01 2.16084433e+00 1.81429291e+00]] ... [[ 6.58489287e-01 2.29151607e-01 2.10498118e+00 ... 1.35328209e+00 4.74736899e-01 1.49568629e+00] [ 1.93936276e+00 2.94150424e+00 7.44209349e-01 ... 1.10194349e+00 1.27082539e+00 3.19221497e+00] [ 1.28629899e+00 5.20689678e+00 1.13826561e+00 ... 1.25956154e+00 2.45759416e+00 2.46178222e+00] ... [ 3.70052814e+00 7.93873250e-01 5.80746174e-01 ... 8.58214974e-01 -1.41139102e+00 9.02437866e-02] [ 3.45621228e+00 -1.66657829e+00 5.07114768e-01 ... -1.53175163e+00 -2.39840794e+00 -1.64600205e+00] [-6.75791323e-01 2.73884535e-02 6.43767238e-01 ... -3.02118540e+00 2.29343486e+00 1.14421105e+00]] [[ 1.14446640e+00 -1.04846501e+00 -6.48784935e-01 ... -8.58847201e-01 6.46236718e-01 -1.47523665e+00] [-2.45733762e+00 8.63560200e-01 1.35232806e+00 ... -2.21246624e+00 -1.27914858e+00 5.45425117e-01] [-2.13071346e+00 8.37408185e-01 -1.96456933e+00 ... 1.15922451e+00 -1.03573656e+00 -1.58629346e+00] ... [-6.05154574e-01 3.06528831e+00 1.53122807e+00 ... -8.36690843e-01 1.20883298e+00 2.05879092e+00] [-2.40438700e-01 -2.60638666e+00 7.95707941e-01 ... 2.11954355e+00 3.39931041e-01 1.43725693e+00] [-1.54209328e+00 4.31069899e+00 6.68147981e-01 ... 5.39366186e-01 1.60777497e+00 3.67629826e-02]] [[-1.76052749e-01 1.57286572e+00 -1.26969314e+00 ... 2.10451484e-01 3.34881568e+00 1.00692070e+00] [ 1.42418456e+00 -3.52881730e-01 1.30508816e+00 ... 1.90268707e+00 -1.07995176e+00 9.87425864e-01] [ 8.30005288e-01 1.22173440e+00 -1.15578628e+00 ... 2.76026011e+00 2.17713833e+00 3.59414124e+00] ... [-3.60999823e-01 -8.45601618e-01 4.44064051e-01 ... 1.19172573e+00 1.09171534e+00 4.35593319e+00] [ 1.26154423e+00 1.01653659e+00 1.33185971e+00 ... 1.06989789e+00 7.43194461e-01 3.52916336e+00] [ 1.37491632e+00 7.21721172e-01 4.01569307e-02 ... 1.76039720e+00 -6.87541783e-01 4.00108784e-01]]]]]; ov_res: [[[[[-8.66331816e-01 -2.01596767e-01 -5.32840490e-01 ... -1.00488675e+00 -4.98260319e-01 6.15645170e-01] [ 6.39318764e-01 -3.01299810e-01 1.87646568e-01 ... -5.50148726e-01 -2.84142822e-01 -2.52709627e-01] [-8.92186984e-02 -4.65223908e-01 3.73876393e-02 ... -6.74895644e-01 -1.82692241e-02 -5.70256114e-01] ... [-3.39677976e-03 -1.20198410e-02 1.48411468e-01 ... 2.61340708e-01 -4.02924508e-01 2.22746447e-01] [-2.33006895e-01 5.74944019e-01 4.78625059e-01 ... 2.29962558e-01 5.40302753e-01 -9.19552147e-01] [-1.23980597e-01 -1.19139396e-01 -9.38798785e-02 ... 2.68306702e-01 1.73223123e-01 6.00947618e-01]] [[-2.21330859e-02 6.69728100e-01 -1.78462371e-01 ... -1.57228917e-01 -8.65284875e-02 -7.97365308e-01] [ 7.20790178e-02 4.77451950e-01 -5.94500005e-02 ... 1.33317217e-01 -9.75535139e-02 -9.34365243e-02] [-1.75931737e-01 1.57089174e-01 -1.14888921e-01 ... -2.12464914e-01 -7.24627897e-02 -2.10620105e-01] ... [ 8.08963537e-01 -3.97673428e-01 -3.74789447e-01 ... 1.16178356e-02 -1.90377221e-01 6.19445443e-01] [ 2.22928911e-01 6.60411000e-01 4.88308743e-02 ... -4.35569316e-01 4.60482627e-01 2.81422168e-01] [-4.08675298e-02 1.52644888e-01 -3.79788846e-01 ... 3.32944423e-01 -1.35470942e-01 4.87669736e-01]] [[ 6.91379249e-01 4.50747997e-01 9.14101601e-01 ... -5.58104455e-01 -2.48277672e-02 -1.19099736e-01] [-1.36013227e-02 -9.40941274e-03 1.40202925e-01 ... 2.84906238e-01 -1.89784244e-01 1.96249515e-01] [ 6.25804424e-01 -3.17142844e-01 -3.67321670e-01 ... -3.26007098e-01 7.68895686e-01 -2.98057497e-01] ... [-6.24013484e-01 -7.14952469e-01 -3.43527794e-01 ... 3.68002892e-01 -3.08645338e-01 1.32600084e-01] [-1.17754899e-02 5.23705371e-02 3.06002885e-01 ... 4.59133208e-01 1.03254981e-01 2.28332713e-01] [-1.86326519e-01 -1.95961043e-01 -5.00948774e-03 ... 5.33263087e-01 -2.41927311e-01 -5.89061618e-01]] ... [[ 1.67521462e-01 -3.43873322e-01 3.01771313e-01 ... 5.64340921e-03 -4.63680148e-01 -6.76205456e-01] [ 3.52587104e-01 -2.55499601e-01 -6.07099235e-01 ... 4.17491645e-01 -2.22745389e-01 -1.79773592e-03] [-5.99294603e-01 -4.51236159e-01 -9.71639380e-02 ... -2.01341093e-01 3.97179425e-01 2.68329591e-01] ... [-2.29921699e-01 1.03459561e+00 -2.23118752e-01 ... 8.19410533e-02 9.29223225e-02 2.76021838e-01] [-1.95788994e-01 -4.24434468e-02 5.02275527e-02 ... 1.15869975e+00 2.55519360e-01 -4.09377933e-01] [ 2.76204258e-01 -6.70027062e-02 -6.13355279e-01 ... -4.05065507e-01 -2.13995606e-01 -4.52608056e-02]] [[ 6.33740565e-03 7.82419443e-01 3.28384250e-01 ... -5.71197830e-02 3.76063645e-01 6.52422756e-02] [ 1.08361468e-01 2.71200895e-01 5.75718701e-01 ... 7.75642022e-02 1.58107635e-02 2.03352809e-01] [-7.08502114e-01 -2.91336834e-01 1.11682117e-01 ... -9.21361864e-01 4.75713521e-01 1.76597815e-02] ... [ 2.99138185e-02 -5.83283842e-01 3.06690927e-03 ... 2.32592553e-01 5.34977257e-01 6.12421811e-01] [ 3.34309667e-01 3.71587276e-01 -6.76942766e-01 ... -5.07896125e-01 1.75907195e-01 3.92876297e-01] [ 6.59794629e-01 1.38708279e-01 5.63956238e-02 ... -3.64244699e-01 -6.32614076e-01 -4.17927295e-01]] [[ 4.88911062e-01 -1.57236323e-01 4.20466185e-01 ... 4.50954735e-01 -1.50556758e-01 -7.17270315e-01] [-2.48570874e-01 -2.96183974e-01 -3.41258824e-01 ... 1.19051039e-01 4.56372917e-01 1.58085749e-01] [-3.85484874e-01 -7.89725129e-03 8.87184918e-01 ... -2.21345395e-01 -1.33399367e-01 -1.96730867e-01] ... [ 7.08109915e-01 -3.09535451e-02 -2.32491028e-02 ... -2.41245925e-01 -9.46114678e-03 -4.98406924e-02] [-4.26393867e-01 -4.49261993e-01 7.30498061e-02 ... 4.36552703e-01 3.48610841e-02 1.01817787e-01] [-1.09810710e-01 1.86535120e-01 -7.05068707e-02 ... -1.04515338e+00 3.03158134e-01 3.08051080e-01]]] [[[ 1.11967778e+00 4.35495019e-01 -2.90702343e-01 ... 4.66340333e-01 -8.42328429e-01 -6.18201792e-01] [ 2.40300223e-01 3.05675596e-01 9.23519850e-01 ... 4.60235983e-01 -1.33561820e-01 1.09580505e+00] [ 3.08243155e-01 1.02118492e+00 5.36908865e-01 ... 9.51314569e-01 4.62122768e-01 7.22054660e-01] ... [ 7.32151568e-01 4.73552644e-01 5.03421366e-01 ... -3.84930730e-01 1.79691046e-01 7.33023703e-01] [ 4.43086743e-01 3.81357014e-01 1.17223509e-01 ... 5.84379077e-01 1.46962059e+00 4.57391977e-01] [ 6.01649463e-01 2.63796560e-02 -5.20133018e-01 ... 8.84486139e-01 8.04158688e-01 -7.99323380e-01]] [[ 1.04332829e+00 1.74212158e-01 7.90389001e-01 ... -1.67512283e-01 1.78724289e-01 4.82103616e-01] [ 2.18375921e-01 1.02553248e+00 -1.96455836e-01 ... 3.47038992e-02 2.26625174e-01 -3.15436423e-01] [ 1.09390626e-02 -7.66321942e-02 5.94877183e-01 ... 4.40345585e-01 6.26821101e-01 -2.28557643e-02] ... [-2.36336887e-01 4.23769593e-01 -5.34594655e-01 ... 5.36458433e-01 2.32213870e-01 2.14598700e-01] [-8.03065836e-01 7.04419315e-01 3.85082483e-01 ... -4.19012368e-01 1.14232406e-01 1.48677975e-01] [-2.48282596e-01 -3.14724982e-01 5.47422767e-02 ... 2.75870889e-01 2.12517843e-01 3.56330395e-01]] [[-1.91379279e-01 4.89880681e-01 3.49267185e-01 ... 4.96057160e-02 3.11793655e-01 3.75284582e-01] [ 4.13064271e-01 7.21633852e-01 4.74263340e-01 ... 4.43131328e-02 2.88854390e-01 -6.65349305e-01] [-3.49619329e-01 4.85119700e-01 1.62038887e+00 ... 3.24504405e-01 -1.24054968e-01 5.25308013e-01] ... [ 8.56593609e-01 4.14562821e-01 6.46938860e-01 ... 5.64634204e-01 -3.06186825e-01 -5.93308330e-01] [ 6.54106289e-02 8.59905005e-01 8.93625081e-01 ... 8.22790265e-01 5.28908730e-01 -7.93186009e-01] [ 2.56079733e-01 6.30486608e-02 1.26655614e+00 ... 2.76112676e-01 4.22692984e-01 5.57862401e-01]] ... [[-2.84720864e-03 2.24129498e-01 -3.85996222e-01 ... -9.31460131e-03 1.39870733e-01 -4.86912988e-02] [ 5.87859511e-01 2.56779373e-01 1.45920873e-01 ... 6.86857164e-01 3.49333256e-01 -1.05459321e+00] [ 9.38258767e-02 -1.37237835e+00 5.19313872e-01 ... 5.52686974e-02 6.11312628e-01 4.55096275e-01] ... [-2.97916263e-01 -2.45517865e-01 2.27850720e-01 ... 1.03492755e-02 9.72720861e-01 -1.24497258e-03] [ 3.29293668e-01 -2.80873716e-01 8.86989355e-01 ... 2.87025124e-01 7.17606843e-01 2.51551718e-01] [ 8.95905435e-01 -3.46223824e-02 4.01229024e-01 ... 2.53192186e-01 -3.68577957e-01 -9.35058296e-02]] [[ 4.32622164e-01 7.07805216e-01 4.50517476e-01 ... 5.83458781e-01 1.84245095e-01 1.81096032e-01] [-3.88764501e-01 7.15463340e-01 5.91938853e-01 ... 4.30757105e-01 4.24879760e-01 -5.39506257e-01] [-7.01095641e-01 1.63536906e-01 4.07168776e-01 ... -3.45112920e-01 -2.08694860e-01 -2.87002265e-01] ... [ 1.00328363e-01 2.35908896e-01 4.78020936e-01 ... 6.97375357e-01 -6.28248572e-01 -4.49951053e-01] [ 4.66593727e-02 1.14111245e-01 7.20335364e-01 ... -5.48281848e-01 -3.14737082e-01 1.46269754e-01] [-5.58705509e-01 6.13620698e-01 1.10096705e+00 ... -3.03423136e-01 -3.16646010e-01 -4.71816868e-01]] [[ 9.59143564e-02 5.87738454e-01 -6.70468360e-02 ... 5.64916670e-01 5.81984103e-01 4.28876042e-01] [ 1.45922947e+00 3.02148223e-01 -5.98968007e-02 ... -2.21808612e-01 7.74726272e-01 2.51000017e-01] [ 7.54575729e-01 2.49125406e-01 9.74195898e-01 ... 7.50435963e-02 3.39082569e-01 -2.20579788e-01] ... [ 1.07940912e-01 8.02462697e-01 3.81029010e-01 ... 2.62580365e-01 8.18407297e-01 1.09072417e-01] [ 7.24334657e-01 1.83396325e-01 7.46925846e-02 ... 5.83962381e-01 -1.41218975e-01 1.18060626e-01] [ 9.11168277e-01 -8.04649711e-01 -7.95957923e-01 ... 9.56901163e-02 5.65980673e-01 -5.02305567e-01]]] [[[ 2.56551194e+00 2.18170667e+00 2.36109233e+00 ... 1.17046189e+00 1.54953444e+00 -1.85730648e+00] [-1.00555122e+00 2.01881909e+00 -1.81607139e+00 ... 1.03709102e-01 -1.77869117e+00 -1.44814944e+00] [ 1.65481925e+00 2.23481941e+00 -6.02396190e-01 ... -1.70007408e+00 1.78855503e+00 2.76177108e-01] ... [ 2.31357646e+00 9.88864541e-01 -5.21673970e-02 ... -4.53562707e-01 1.42387295e+00 5.49016535e-01] [-2.36628580e+00 1.57422855e-01 -8.38844478e-01 ... 1.47245638e-02 4.51058388e+00 -3.82024437e-01] [-1.81704164e-01 2.88460422e+00 -9.12486434e-01 ... 2.08464861e+00 -3.87117505e-01 -1.01897156e+00]] [[ 1.52582181e+00 -1.85856175e+00 -2.12475955e-01 ... -2.31042886e+00 -2.17598987e+00 7.14598894e-01] [ 1.14981568e+00 -3.43965888e-01 9.43440497e-01 ... 2.21415162e+00 -1.72295427e+00 2.99877954e+00] [-2.20189953e+00 9.02106225e-01 -5.28871119e-01 ... -1.06357408e+00 3.19560051e+00 1.38978899e-01] ... [ 1.22133851e+00 3.64292823e-02 -2.05221009e+00 ... 5.52393556e-01 3.29923534e+00 2.21599841e+00] [ 1.05112803e+00 1.80115652e+00 5.81024218e+00 ... 3.71083498e+00 4.84436226e+00 5.63983738e-01] [ 1.50962567e+00 -2.27868944e-01 -5.76112032e-01 ... -1.10167956e+00 1.94603002e+00 -1.19162750e+00]] [[ 1.36958039e+00 2.31125736e+00 1.76041341e+00 ... -2.83873171e-01 -1.28242266e+00 6.69700205e-01] [-7.77804852e-01 1.95117927e+00 1.37805343e-01 ... -1.22318268e+00 -2.88601923e+00 1.31065488e+00] [ 5.93822765e+00 -4.48034525e-01 2.06371403e+00 ... -8.59201968e-01 1.55223024e+00 -6.63533509e-02] ... [-1.50756514e+00 3.91078305e+00 4.79824096e-01 ... 2.74129725e+00 -9.79768276e-01 -7.59297431e-01] [-8.40956211e-01 -7.83995807e-01 -8.55500281e-01 ... 2.62766272e-01 2.77839589e+00 1.85157621e+00] [-1.87950742e+00 -2.85480976e-01 2.22540662e-01 ... -6.70376003e-01 2.16084433e+00 1.81429291e+00]] ... [[ 6.58489287e-01 2.29151607e-01 2.10498118e+00 ... 1.35328209e+00 4.74736899e-01 1.49568629e+00] [ 1.93936288e+00 2.94150424e+00 7.44209349e-01 ... 1.10194349e+00 1.27082527e+00 3.19221497e+00] [ 1.28629911e+00 5.20689678e+00 1.13826549e+00 ... 1.25956154e+00 2.45759416e+00 2.46178222e+00] ... [ 3.70052814e+00 7.93873250e-01 5.80746114e-01 ... 8.58214974e-01 -1.41139102e+00 9.02437717e-02] [ 3.45621228e+00 -1.66657829e+00 5.07114768e-01 ... -1.53175151e+00 -2.39840794e+00 -1.64600205e+00] [-6.75791264e-01 2.73884460e-02 6.43767238e-01 ... -3.02118540e+00 2.29343486e+00 1.14421093e+00]] [[ 1.14446628e+00 -1.04846513e+00 -6.48784995e-01 ... -8.58847201e-01 6.46236718e-01 -1.47523677e+00] [-2.45733762e+00 8.63560259e-01 1.35232806e+00 ... -2.21246600e+00 -1.27914858e+00 5.45425117e-01] [-2.13071346e+00 8.37408125e-01 -1.96456933e+00 ... 1.15922451e+00 -1.03573644e+00 -1.58629334e+00] ... [-6.05154634e-01 3.06528831e+00 1.53122807e+00 ... -8.36690843e-01 1.20883310e+00 2.05879092e+00] [-2.40438685e-01 -2.60638642e+00 7.95707881e-01 ... 2.11954355e+00 3.39931041e-01 1.43725693e+00] [-1.54209316e+00 4.31069899e+00 6.68147981e-01 ... 5.39366186e-01 1.60777497e+00 3.67629975e-02]] [[-1.76052764e-01 1.57286572e+00 -1.26969314e+00 ... 2.10451484e-01 3.34881568e+00 1.00692070e+00] [ 1.42418456e+00 -3.52881730e-01 1.30508816e+00 ... 1.90268719e+00 -1.07995176e+00 9.87425864e-01] [ 8.30005288e-01 1.22173440e+00 -1.15578640e+00 ... 2.76026011e+00 2.17713833e+00 3.59414124e+00] ... [-3.60999823e-01 -8.45601618e-01 4.44064051e-01 ... 1.19172561e+00 1.09171546e+00 4.35593319e+00] [ 1.26154411e+00 1.01653659e+00 1.33185971e+00 ... 1.06989777e+00 7.43194461e-01 3.52916336e+00] [ 1.37491632e+00 7.21721172e-01 4.01569307e-02 ... 1.76039732e+00 -6.87541783e-01 4.00108784e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1135.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 1]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.484311}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 6.600746 3.543324 -3.0021842 1.2218262 3.6103256 3.272322 -5.636719 -4.7349443 ] [-11.054987 -12.247942 -2.7405899 -2.0431073 14.208693 3.6828506 -1.1456045 13.531871 ] [ -8.5415535 -9.208241 0.24291526 -28.24598 1.3273278 7.3022904 12.397524 -3.920017 ] [ 1.0029932 15.690036 -10.958319 -4.1921353 14.457415 16.77304 -22.189747 2.053182 ]] [[ -4.009871 -10.781505 16.909918 -8.732653 4.424955 19.984278 8.879063 6.3514977 ] [ -2.5397346 1.6566279 -15.020053 -5.0277824 0.06768756 0.9372773 4.8027687 5.1964307 ] [ -7.5689354 -2.1855412 3.5442588 -15.73698 -17.10011 -3.1350899 -3.8069153 -3.0103424 ] [ 0.2663286 -7.0805864 1.5174218 -0.35188046 2.3783734 0.09208656 4.9348884 0.18888757]] [[ 5.944068 -9.82682 -2.2396717 3.212579 -0.9335232 -3.38334 -2.0578587 11.502058 ] [ 8.830102 -4.863942 -6.6008887 7.429468 10.147659 7.742633 3.7487583 16.994768 ] [-16.068981 13.27316 13.3544855 -8.038965 -15.572469 3.4114075 9.287346 -17.663761 ] [ 8.080671 -17.64474 -20.68248 13.706991 -2.5957165 -1.5144529 9.579573 5.7382755 ]] [[-16.447895 0.61951447 -4.7913494 9.339925 10.431965 -8.177457 -1.7315915 -1.4326663 ] [ 18.166946 15.075152 3.9720643 -0.30892378 2.4917934 -2.0209126 -11.048706 1.3676912 ] [ 8.926453 8.167392 14.294787 -4.878683 8.245535 3.7579541 11.037072 11.175373 ] [ 11.741962 -11.1624565 -22.874775 12.861515 -2.331975 10.224474 2.875141 -13.890609 ]]]]]; ov_res: [[[[[ 6.6007466 3.543324 -3.0021832 1.2218263 3.6103265 3.2723196 -5.636718 -4.734945 ] [-11.054988 -12.247943 -2.7405908 -2.0431101 14.2086935 3.682852 -1.1456048 13.531874 ] [ -8.5415535 -9.2082405 0.24291511 -28.245972 1.3273269 7.3022885 12.397521 -3.9200168 ] [ 1.0029945 15.690034 -10.958318 -4.19214 14.457419 16.77304 -22.18975 2.0531836 ]] [[ -4.00987 -10.781508 16.909916 -8.732651 4.424955 19.98428 8.879064 6.351499 ] [ -2.5397356 1.6566278 -15.020052 -5.0277824 0.0676868 0.9372764 4.8027678 5.196432 ] [ -7.568935 -2.185542 3.5442576 -15.736977 -17.100111 -3.1350918 -3.8069162 -3.0103407 ] [ 0.2663281 -7.080589 1.5174218 -0.35188076 2.378372 0.09208587 4.93489 0.18888688]] [[ 5.9440694 -9.8268175 -2.239671 3.21258 -0.93352365 -3.38334 -2.0578573 11.502052 ] [ 8.830098 -4.8639426 -6.600889 7.4294705 10.147659 7.7426324 3.7487578 16.994764 ] [-16.068981 13.273159 13.354487 -8.038966 -15.572475 3.4114068 9.287346 -17.663763 ] [ 8.08067 -17.64474 -20.682484 13.70699 -2.5957184 -1.5144521 9.57957 5.7382746 ]] [[-16.447891 0.6195135 -4.791349 9.339923 10.431965 -8.177451 -1.7315942 -1.4326655 ] [ 18.166945 15.075156 3.9720652 -0.30892244 2.4917936 -2.02091 -11.048708 1.3676914 ] [ 8.926454 8.167393 14.294788 -4.8786826 8.245531 3.7579503 11.037075 11.17537 ] [ 11.741964 -11.162455 -22.87477 12.861518 -2.3319752 10.224472 2.8751407 -13.890608 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 2], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1137.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={0.497402}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 9.592277 2.499948 -4.389157 6.5149856] [ -5.6142864 -4.4599056 9.922337 1.2527138] [ 2.6594696 -11.421094 12.480655 6.695778 ] [-12.592071 6.1641965 -4.515815 -4.1922655]] [[ 13.956762 1.9655368 9.098564 1.2106371] [-14.643401 12.386505 5.4803348 1.8280847] [ -4.7405014 -0.9600566 4.4651856 1.7365199] [ -0.5986274 -15.353958 5.8927846 -9.598957 ]] [[ -5.4609013 -10.76 -4.48148 -9.956247 ] [ 0.9002857 -1.6034802 2.83634 5.298455 ] [ -8.589813 -9.302103 14.668485 7.433088 ] [-11.078006 -1.4980539 2.4268243 0.6249631]] [[ -9.628248 3.8137565 -0.1496667 -3.5279753] [ 12.734416 2.817895 -0.8848063 10.5297575] [ 4.622322 0.7889275 -12.6351 -4.2104006] [ 5.430731 18.346733 -13.181775 -4.065311 ]]]]]; ov_res: [[[[[ 9.59228 2.4999487 -4.3891563 6.514987 ] [ -5.6142864 -4.4599047 9.922338 1.2527133 ] [ 2.65947 -11.421093 12.480654 6.6957774 ] [-12.5920725 6.164195 -4.515816 -4.192267 ]] [[ 13.956759 1.9655366 9.098565 1.2106373 ] [-14.643402 12.386509 5.4803343 1.8280835 ] [ -4.740502 -0.96005553 4.465185 1.7365193 ] [ -0.5986263 -15.353955 5.8927855 -9.598959 ]] [[ -5.4609013 -10.76 -4.481481 -9.956246 ] [ 0.90028733 -1.6034794 2.8363395 5.298454 ] [ -8.589813 -9.3021 14.668484 7.4330854 ] [-11.078007 -1.4980545 2.4268246 0.6249633 ]] [[ -9.628247 3.8137558 -0.14966619 -3.5279753 ] [ 12.734413 2.8178947 -0.88480586 10.529759 ] [ 4.622323 0.7889265 -12.635103 -4.2104 ] [ 5.4307313 18.346735 -13.181775 -4.06531 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 2], 'pads': 'valid', 'dilations': [2, 2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1139.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : Float(1, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value={-0.641517}]() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[ -8.582158 -4.320401 5.9230943] [ -3.7197104 -7.406358 3.9711077] [ 5.5689807 -4.9778557 -17.334707 ]] [[ 7.8444877 6.8174186 -1.5830244] [ 7.136565 -2.1754262 -7.9360347] [ 4.1751294 -7.181864 3.9386947]] [[-12.959929 1.2664531 6.774807 ] [ 7.5895505 2.4033391 -4.8418345] [-10.563996 -9.073652 -3.73732 ]]]]]; ov_res: [[[[[ -8.58216 -4.3203993 5.9230967] [ -3.719711 -7.406356 3.9711072] [ 5.568984 -4.9778566 -17.33471 ]] [[ 7.8444843 6.817418 -1.5830251] [ 7.136565 -2.175425 -7.936036 ] [ 4.175129 -7.181859 3.9386947]] [[-12.959933 1.2664534 6.774808 ] [ 7.589551 2.4033403 -4.8418303] [-10.563996 -9.07365 -3.7373223]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [2, 1, 2], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1141.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.3241 0.3736 0.2549 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = 1.5339 (2,1,1,.,.) = -1.3883 (3,1,1,.,.) = -1.0566 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 2.06420779e-01 -2.01051188e+00 -4.71369553e+00 ... -1.44680834e+00 -2.91030979e+00 -1.71500814e+00] [-1.07400250e+00 -1.43487084e+00 -2.85120726e+00 ... -1.02990139e+00 -2.15515614e+00 -1.18915915e-01] [-2.11515117e+00 1.13650954e+00 1.73040628e-01 ... -7.28784561e-01 1.11699879e+00 -5.00919533e+00] ... [-9.54035342e-01 -4.00701046e+00 -3.58553743e+00 ... -9.59906936e-01 -1.30914783e+00 -2.69444823e+00] [-3.76715660e-01 -7.79900968e-01 -2.92911506e+00 ... -1.19822049e+00 -3.33454037e+00 -1.51947880e+00] [-1.27057874e+00 -1.27345049e+00 -6.08950710e+00 ... 8.17112565e-01 -4.16961288e+00 -1.46326530e+00]] [[-8.37414622e-01 -1.02362335e+00 5.04371166e-01 ... -7.86953330e-01 -5.78776300e-01 -9.58825350e-02] [-4.21416426e+00 -3.73808146e-02 -2.32052350e+00 ... -4.65627098e+00 8.30377936e-01 -2.67353368e+00] [-1.04260302e+00 -2.92551661e+00 2.74810314e-01 ... -1.72154546e+00 4.64196682e-01 -2.16164231e+00] ... [-2.52536750e+00 -2.07626200e+00 -9.28745151e-01 ... -6.78623259e-01 -2.40059853e+00 -2.53078604e+00] [-9.16561186e-01 -8.53178382e-01 -3.23097658e+00 ... -2.48621416e+00 3.59341025e-01 -1.77681708e+00] [-3.27020001e+00 -2.67804074e+00 -7.43174076e-01 ... -2.32414603e-01 -1.43111062e+00 2.24749374e+00]] [[-4.40319920e+00 -3.25318384e+00 -2.02481890e+00 ... -2.04295826e+00 5.33062696e-01 -1.00177431e+00] [-1.57728434e+00 -1.10463333e+00 -1.38311481e+00 ... -5.56013942e-01 -1.52580404e+00 -3.23863506e+00] [-4.47014570e-01 -2.44758368e+00 7.95265079e-01 ... -1.63268852e+00 1.74647212e-01 -1.29411423e+00] ... [-9.89676237e-01 1.44401491e+00 -1.41491556e+00 ... -2.78657913e+00 -1.93762016e+00 -5.37134886e-01] [-1.39414310e+00 -3.91690826e+00 2.51145601e-01 ... -3.59162045e+00 -4.11381543e-01 -4.00969172e+00] [-1.21978700e+00 2.23231316e-03 1.73586071e+00 ... -1.37508512e+00 -2.13627386e+00 -1.62969089e+00]] ... [[-2.45490074e+00 -3.18389416e+00 -7.09424973e-01 ... -3.51429999e-01 -1.62071514e+00 -2.90334463e+00] [-2.60404873e+00 1.02895391e+00 3.05126667e+00 ... -2.46516156e+00 -2.16189861e-01 -1.93368948e+00] [-1.77240705e+00 -9.22896743e-01 -1.85009205e+00 ... -2.59697199e+00 -1.19184160e+00 -4.38140297e+00] ... [-3.08310628e-01 -3.52020121e+00 -2.29793668e+00 ... -1.42707491e+00 3.18807364e-02 -2.78469276e+00] [-1.67322707e+00 -1.20182025e+00 -2.44905186e+00 ... 4.09242749e-01 -7.98178077e-01 4.17646527e-01] [-1.21051526e+00 -4.38704205e+00 2.21323204e+00 ... 1.31401908e+00 -2.49410319e+00 -2.74904871e+00]] [[-1.66885734e+00 -2.39549160e+00 -2.85541391e+00 ... 2.74914026e-01 -3.25535798e+00 -3.38655591e-01] [-1.33239245e+00 1.24544537e+00 -1.33261621e+00 ... -2.00770521e+00 -5.85659444e-01 -2.20292664e+00] [-2.18346715e-01 -5.34251118e+00 -1.75745559e+00 ... 1.76367772e+00 1.67249644e+00 -2.90278721e+00] ... [-1.97063589e+00 -2.12377357e+00 -1.11506796e+00 ... -3.69752741e+00 -2.50511932e+00 -4.47816420e+00] [-1.17870367e+00 3.83620143e-01 -5.50573170e-01 ... -3.45444202e+00 -3.01699519e+00 -3.88329744e+00] [-9.97679114e-01 -3.05755234e+00 -1.81864953e+00 ... -2.25319576e+00 -2.49736214e+00 4.76870894e-01]] [[-3.26734447e+00 -4.07063007e-01 9.76887345e-01 ... -2.95582151e+00 7.16159940e-01 4.02666450e-01] [ 2.06493139e+00 1.52092016e+00 1.13572121e-01 ... -2.44299626e+00 5.00247955e-01 -3.19267559e+00] [ 9.49284673e-01 -5.56123018e+00 1.74708235e+00 ... 1.28272903e+00 -2.33137321e+00 -2.08071709e-01] ... [ 7.88182020e-02 -5.10081244e+00 -2.85722017e+00 ... 1.26971591e+00 -3.06197023e+00 -1.09984553e+00] [-5.01976371e-01 -2.16153383e+00 -6.73632979e-01 ... -1.09890640e+00 -1.32905400e+00 1.05681419e-02] [ 2.97317266e-01 -5.43910801e-01 1.71748400e-02 ... -4.14319277e+00 1.50314510e+00 -4.12465572e+00]]] [[[-6.35288894e-01 4.39082712e-01 3.40345883e+00 ... 1.14794874e+00 6.10608459e-02 -2.97993565e+00] [ 7.52537251e-01 6.47632003e-01 -1.21422857e-01 ... 1.52391529e+00 -5.43206155e-01 -1.04139590e+00] [ 1.84054208e+00 -7.93523610e-01 3.86033177e+00 ... 1.54251957e+00 -2.84286022e-01 8.91114175e-02] ... [ 1.61810040e+00 1.64091134e+00 2.61971974e+00 ... -8.82620215e-02 -1.90359855e+00 5.72027266e-01] [ 1.41167665e+00 1.04687142e+00 -8.22014153e-01 ... -1.90048361e+00 -6.56574368e-02 -1.97281027e+00] [-1.86364722e+00 -2.09842873e+00 1.93992615e+00 ... 6.89956903e-01 2.69565368e+00 1.19914269e+00]] [[-1.47115970e+00 1.47175050e+00 9.19005036e-01 ... 1.65675163e+00 -1.25274634e+00 1.74215540e-01] [-8.75621736e-01 -6.62090242e-01 2.33677506e+00 ... 4.63293701e-01 4.46772873e-02 6.59699321e-01] [ 1.47846413e+00 2.35913205e+00 2.10778666e+00 ... -4.77914512e-01 -1.43801236e+00 8.96980703e-01] ... [ 2.10221946e-01 6.28930211e-01 1.38426852e+00 ... 7.99749374e-01 1.80428100e+00 1.08092397e-01] [-1.99716878e+00 2.62729824e-02 4.15014565e-01 ... 2.90898395e+00 3.28132033e+00 2.08697319e-01] [-2.57235229e-01 1.43015981e+00 1.50120044e+00 ... -1.36698651e+00 -2.09050798e+00 -1.21433616e+00]] [[ 4.70064402e-01 -1.41767144e-01 1.12572026e+00 ... 1.09535491e+00 -4.17051435e-01 1.40019274e+00] [-1.83353961e-01 -2.20490098e+00 2.32389733e-01 ... -9.03211653e-01 3.35144579e-01 3.18205118e+00] [ 4.10716867e+00 -5.19094408e-01 5.07386684e-01 ... 8.17592621e-01 2.00789523e+00 1.81921458e+00] ... [-1.83882570e+00 2.50745678e+00 3.95651388e+00 ... 1.52489305e+00 -8.57820690e-01 2.19572425e+00] [-8.29570591e-01 1.71813035e+00 -2.24882770e+00 ... 1.08176613e+00 -7.86868513e-01 2.03065848e+00] [ 1.29746485e+00 4.98786300e-01 1.52809286e+00 ... -8.25242221e-01 -7.07045734e-01 -1.15551472e-01]] ... [[ 1.33944893e+00 1.06742191e+00 3.57778788e-01 ... -9.51681197e-01 1.24874544e+00 -3.12193584e+00] [ 7.56514192e-01 6.83180571e-01 -9.62909400e-01 ... 3.37413573e+00 1.81008720e+00 -8.87680233e-01] [ 4.73412842e-01 7.29308903e-01 -7.89091766e-01 ... 6.02188349e-01 6.35843158e-01 -1.68058562e+00] ... [ 3.75171155e-01 7.33583689e-01 2.60119510e+00 ... -1.11874461e+00 2.45838836e-01 2.55248338e-01] [ 1.42636108e+00 1.94127035e+00 -9.98845518e-01 ... 1.13305545e+00 3.60843992e+00 2.22320724e+00] [-1.36222768e+00 -9.12137568e-01 -2.36654687e+00 ... 1.20494461e+00 -2.40138113e-01 2.15354204e+00]] [[-4.83417094e-01 2.19293642e+00 -1.99404836e-01 ... 2.80402780e-01 -2.48545051e-01 -8.31498563e-01] [ 1.98624706e+00 -1.15146899e+00 4.86502379e-01 ... -4.13719773e-01 -1.13076735e+00 1.24606028e-01] [ 2.18469071e+00 1.22652304e+00 1.49682093e+00 ... 1.11908293e+00 6.75975978e-02 -6.53794587e-01] ... [-2.46361589e+00 1.56749201e+00 1.43738866e+00 ... 7.78554976e-02 3.45553875e-01 -3.89247513e+00] [ 9.52258110e-01 -3.36596787e-01 7.35419929e-01 ... -1.13132143e+00 -5.72231829e-01 6.83064461e-01] [-3.25587964e+00 4.51906800e-01 -4.36585009e-01 ... -4.85309422e-01 1.04736280e+00 1.40845466e+00]] [[-1.89955401e+00 -3.74936581e+00 -1.02130938e+00 ... -4.29661453e-01 2.37420630e+00 2.23649931e+00] [ 8.68489981e-01 -1.37909961e+00 -1.39573765e+00 ... 2.55922019e-01 1.49265170e+00 1.87192106e+00] [-8.37859213e-01 1.02272534e+00 -1.36620331e+00 ... -1.56126094e+00 -1.20882368e+00 -1.82538390e+00] ... [ 1.95002466e-01 -4.06037569e-02 -2.69645572e-01 ... 1.80103135e+00 -1.41489363e+00 -9.25734937e-01] [-8.85054648e-01 -8.90790284e-01 -1.39252925e+00 ... -2.25285292e-01 1.20706987e+00 -1.46054912e+00] [-4.61679041e-01 7.87853062e-01 6.16075397e-01 ... 2.39089894e+00 7.69502163e-01 2.44374990e+00]]] [[[-1.68128586e+00 1.07820809e-01 1.54401851e+00 ... 2.06146836e-01 3.28955501e-02 1.40902781e+00] [-2.53313398e+00 1.31846046e+00 9.67190385e-01 ... 6.61974967e-01 7.96427608e-01 1.30489886e+00] [-4.78705257e-01 -2.79324323e-01 -3.94734174e-01 ... 1.70938444e+00 9.07674432e-01 -1.10350060e+00] ... [-5.12855172e-01 7.66346812e-01 -1.25212622e+00 ... -1.51702023e+00 -2.34205067e-01 1.19939077e+00] [ 2.81908154e-01 8.55743170e-01 -1.03584278e+00 ... -1.28587937e+00 3.76389086e-01 6.11523867e-01] [-3.23846936e-02 -9.65932727e-01 2.90855139e-01 ... -9.51194167e-01 1.19199562e+00 -1.26357388e+00]] [[ 1.81487739e+00 -1.46733522e-02 3.28373045e-01 ... -8.32673550e-01 6.31727815e-01 9.70097065e-01] [ 2.09506536e+00 1.31916010e+00 1.43860805e+00 ... 2.46747231e+00 -1.91431344e-01 1.04070318e+00] [ 8.27945173e-02 3.31602502e+00 6.53769910e-01 ... -4.13113266e-01 -1.05831718e+00 9.79358315e-01] ... [ 9.20145512e-01 5.39222240e-01 -4.97821063e-01 ... -1.61540776e-01 6.36889577e-01 5.05154252e-01] [ 2.49807668e+00 -3.10395151e-01 7.12117553e-01 ... 1.64379048e+00 -1.05577517e+00 3.92680287e-01] [-6.87524974e-02 1.61984015e+00 3.16309988e-01 ... -5.86207032e-01 -9.54663396e-01 2.05598593e+00]] [[ 2.08612490e+00 -2.48973101e-01 -1.24748075e+00 ... 2.24978352e+00 -1.46609032e+00 1.97774303e+00] [ 1.95058775e+00 2.00063205e+00 9.23650384e-01 ... 1.62701166e+00 4.73838300e-01 -1.96703315e+00] [-1.00734830e-01 -1.09647304e-01 8.54128122e-01 ... -8.56890559e-01 9.18908477e-01 7.05306590e-01] ... [ 1.00956202e+00 -2.35906863e+00 -1.88161445e+00 ... 6.63000822e-01 1.99004322e-01 -2.86343604e-01] [-9.03078675e-01 -5.33924222e-01 5.45000494e-01 ... -2.63100117e-01 -1.28801346e+00 -1.11018300e+00] [ 2.18827873e-01 2.07897320e-01 -2.95516759e-01 ... 6.73561215e-01 9.78301048e-01 1.50287843e+00]] ... [[-8.65256488e-02 3.26526940e-01 -1.38358086e-01 ... 1.13713041e-01 -3.91753227e-01 -1.15575552e-01] [ 9.64801788e-01 1.56849706e+00 1.78313041e+00 ... -7.58138657e-01 -1.34379458e+00 -1.44866133e+00] [ 2.85517484e-01 1.53903759e+00 -1.92893434e+00 ... -2.92466670e-01 -6.61197424e-01 1.60133851e+00] ... [-7.19087005e-01 -1.36563814e+00 1.08061945e+00 ... 1.24959207e+00 6.26618266e-01 1.80861843e+00] [-7.54897952e-01 -1.75025940e-01 -5.54856956e-02 ... 8.49437833e-01 2.34580234e-01 5.85263133e-01] [ 9.34272856e-02 5.73747396e-01 7.52363384e-01 ... 7.28838682e-01 -5.44364572e-01 6.11275882e-02]] [[ 1.14982212e+00 1.87736541e-01 2.02319086e-01 ... 6.38952851e-01 -9.53113735e-02 -1.04222882e+00] [-2.70016164e-01 2.26226258e+00 -3.82032096e-02 ... -1.30828559e+00 -2.63973188e+00 -6.88367724e-01] [ 1.57642052e-01 -1.73107553e+00 4.04525131e-01 ... -2.92052060e-01 2.51698828e+00 -3.11082512e-01] ... [-6.52944684e-01 7.24367261e-01 5.23734987e-02 ... 7.39447057e-01 2.11335659e-01 9.34171915e-01] [-7.77443051e-02 8.31132770e-01 -3.03554565e-01 ... 5.30247509e-01 1.83251953e+00 1.63325265e-01] [-1.17197782e-01 -1.01466286e+00 -2.27515757e-01 ... -2.91051656e-01 -5.41933179e-01 2.02674106e-01]] [[-4.64753658e-01 5.17938733e-01 8.87927294e-01 ... 3.56319129e-01 -4.68625098e-01 -9.68387485e-01] [ 1.87123418e+00 -1.03264081e+00 5.22299230e-01 ... -1.38952780e+00 1.39281905e+00 -1.14327383e+00] [-2.66341567e-02 1.74038196e+00 1.99244440e-01 ... 3.29436630e-01 1.49521559e-01 7.63210058e-01] ... [ 5.47589898e-01 -4.00962323e-01 -7.49136090e-01 ... -2.42948174e-01 -9.10911679e-01 7.63329148e-01] [-4.50598001e-02 1.55488491e-01 1.49460661e+00 ... 1.02679765e+00 -1.04267120e+00 2.17123175e+00] [-8.51680040e-02 7.64751911e-01 -7.58586526e-02 ... 9.88738835e-02 9.76540089e-01 1.96221876e+00]]]]]; ov_res: [[[[[ 2.06420824e-01 -2.01051211e+00 -4.71369553e+00 ... -1.44680834e+00 -2.91030979e+00 -1.71500814e+00] [-1.07400262e+00 -1.43487084e+00 -2.85120726e+00 ... -1.02990139e+00 -2.15515614e+00 -1.18915945e-01] [-2.11515117e+00 1.13650942e+00 1.73040599e-01 ... -7.28784561e-01 1.11699879e+00 -5.00919533e+00] ... [-9.54035342e-01 -4.00701046e+00 -3.58553720e+00 ... -9.59906936e-01 -1.30914783e+00 -2.69444823e+00] [-3.76715660e-01 -7.79900968e-01 -2.92911506e+00 ... -1.19822049e+00 -3.33454013e+00 -1.51947892e+00] [-1.27057874e+00 -1.27345049e+00 -6.08950710e+00 ... 8.17112565e-01 -4.16961288e+00 -1.46326530e+00]] [[-8.37414622e-01 -1.02362335e+00 5.04371226e-01 ... -7.86953330e-01 -5.78776300e-01 -9.58824903e-02] [-4.21416426e+00 -3.73807698e-02 -2.32052350e+00 ... -4.65627098e+00 8.30377877e-01 -2.67353368e+00] [-1.04260302e+00 -2.92551661e+00 2.74810344e-01 ... -1.72154546e+00 4.64196682e-01 -2.16164231e+00] ... [-2.52536750e+00 -2.07626200e+00 -9.28745151e-01 ... -6.78623259e-01 -2.40059853e+00 -2.53078604e+00] [-9.16561186e-01 -8.53178322e-01 -3.23097658e+00 ... -2.48621440e+00 3.59340966e-01 -1.77681696e+00] [-3.27020001e+00 -2.67804074e+00 -7.43174076e-01 ... -2.32414663e-01 -1.43111062e+00 2.24749398e+00]] [[-4.40319920e+00 -3.25318384e+00 -2.02481866e+00 ... -2.04295850e+00 5.33062696e-01 -1.00177419e+00] [-1.57728434e+00 -1.10463333e+00 -1.38311481e+00 ... -5.56013942e-01 -1.52580404e+00 -3.23863506e+00] [-4.47014570e-01 -2.44758368e+00 7.95265198e-01 ... -1.63268852e+00 1.74647182e-01 -1.29411423e+00] ... [-9.89676237e-01 1.44401491e+00 -1.41491556e+00 ... -2.78657913e+00 -1.93762016e+00 -5.37134886e-01] [-1.39414310e+00 -3.91690826e+00 2.51145571e-01 ... -3.59162045e+00 -4.11381572e-01 -4.00969172e+00] [-1.21978700e+00 2.23230361e-03 1.73586082e+00 ... -1.37508512e+00 -2.13627386e+00 -1.62969089e+00]] ... [[-2.45490050e+00 -3.18389416e+00 -7.09424973e-01 ... -3.51429969e-01 -1.62071514e+00 -2.90334463e+00] [-2.60404873e+00 1.02895391e+00 3.05126667e+00 ... -2.46516156e+00 -2.16189876e-01 -1.93368948e+00] [-1.77240705e+00 -9.22896802e-01 -1.85009205e+00 ... -2.59697199e+00 -1.19184160e+00 -4.38140297e+00] ... [-3.08310628e-01 -3.52020144e+00 -2.29793668e+00 ... -1.42707491e+00 3.18806916e-02 -2.78469253e+00] [-1.67322695e+00 -1.20182025e+00 -2.44905186e+00 ... 4.09242749e-01 -7.98178077e-01 4.17646468e-01] [-1.21051526e+00 -4.38704205e+00 2.21323204e+00 ... 1.31401908e+00 -2.49410319e+00 -2.74904871e+00]] [[-1.66885734e+00 -2.39549160e+00 -2.85541415e+00 ... 2.74913996e-01 -3.25535798e+00 -3.38655591e-01] [-1.33239245e+00 1.24544549e+00 -1.33261621e+00 ... -2.00770521e+00 -5.85659444e-01 -2.20292664e+00] [-2.18346760e-01 -5.34251118e+00 -1.75745559e+00 ... 1.76367772e+00 1.67249656e+00 -2.90278721e+00] ... [-1.97063589e+00 -2.12377357e+00 -1.11506796e+00 ... -3.69752741e+00 -2.50511932e+00 -4.47816420e+00] [-1.17870367e+00 3.83620143e-01 -5.50573170e-01 ... -3.45444202e+00 -3.01699519e+00 -3.88329744e+00] [-9.97679114e-01 -3.05755258e+00 -1.81864965e+00 ... -2.25319576e+00 -2.49736214e+00 4.76870835e-01]] [[-3.26734447e+00 -4.07063037e-01 9.76887465e-01 ... -2.95582151e+00 7.16159880e-01 4.02666479e-01] [ 2.06493163e+00 1.52092016e+00 1.13572113e-01 ... -2.44299626e+00 5.00247955e-01 -3.19267559e+00] [ 9.49284673e-01 -5.56123018e+00 1.74708235e+00 ... 1.28272903e+00 -2.33137321e+00 -2.08071664e-01] ... [ 7.88181871e-02 -5.10081244e+00 -2.85722017e+00 ... 1.26971579e+00 -3.06197023e+00 -1.09984553e+00] [-5.01976371e-01 -2.16153359e+00 -6.73632979e-01 ... -1.09890640e+00 -1.32905400e+00 1.05681932e-02] [ 2.97317237e-01 -5.43910801e-01 1.71748064e-02 ... -4.14319277e+00 1.50314510e+00 -4.12465572e+00]]] [[[-6.35288954e-01 4.39082712e-01 3.40345883e+00 ... 1.14794874e+00 6.10608459e-02 -2.97993565e+00] [ 7.52537251e-01 6.47632062e-01 -1.21422850e-01 ... 1.52391541e+00 -5.43206155e-01 -1.04139578e+00] [ 1.84054208e+00 -7.93523550e-01 3.86033154e+00 ... 1.54251957e+00 -2.84286052e-01 8.91114175e-02] ... [ 1.61810029e+00 1.64091134e+00 2.61971974e+00 ... -8.82620364e-02 -1.90359867e+00 5.72027266e-01] [ 1.41167665e+00 1.04687142e+00 -8.22014213e-01 ... -1.90048361e+00 -6.56574294e-02 -1.97281039e+00] [-1.86364722e+00 -2.09842873e+00 1.93992603e+00 ... 6.89956844e-01 2.69565368e+00 1.19914269e+00]] [[-1.47115958e+00 1.47175062e+00 9.19005036e-01 ... 1.65675163e+00 -1.25274634e+00 1.74215540e-01] [-8.75621736e-01 -6.62090182e-01 2.33677506e+00 ... 4.63293701e-01 4.46772799e-02 6.59699321e-01] [ 1.47846413e+00 2.35913205e+00 2.10778666e+00 ... -4.77914512e-01 -1.43801236e+00 8.96980703e-01] ... [ 2.10221946e-01 6.28930271e-01 1.38426852e+00 ... 7.99749434e-01 1.80428088e+00 1.08092397e-01] [-1.99716890e+00 2.62729712e-02 4.15014565e-01 ... 2.90898395e+00 3.28132033e+00 2.08697319e-01] [-2.57235199e-01 1.43015981e+00 1.50120032e+00 ... -1.36698639e+00 -2.09050822e+00 -1.21433616e+00]] [[ 4.70064431e-01 -1.41767144e-01 1.12572026e+00 ... 1.09535491e+00 -4.17051405e-01 1.40019286e+00] [-1.83353946e-01 -2.20490122e+00 2.32389733e-01 ... -9.03211594e-01 3.35144579e-01 3.18205094e+00] [ 4.10716915e+00 -5.19094408e-01 5.07386684e-01 ... 8.17592680e-01 2.00789523e+00 1.81921470e+00] ... [-1.83882582e+00 2.50745678e+00 3.95651388e+00 ... 1.52489305e+00 -8.57820690e-01 2.19572425e+00] [-8.29570651e-01 1.71813035e+00 -2.24882793e+00 ... 1.08176613e+00 -7.86868513e-01 2.03065848e+00] [ 1.29746485e+00 4.98786300e-01 1.52809274e+00 ... -8.25242221e-01 -7.07045674e-01 -1.15551479e-01]] ... [[ 1.33944905e+00 1.06742179e+00 3.57778788e-01 ... -9.51681256e-01 1.24874532e+00 -3.12193608e+00] [ 7.56514251e-01 6.83180571e-01 -9.62909400e-01 ... 3.37413573e+00 1.81008720e+00 -8.87680292e-01] [ 4.73412842e-01 7.29308903e-01 -7.89091706e-01 ... 6.02188349e-01 6.35843158e-01 -1.68058574e+00] ... [ 3.75171155e-01 7.33583689e-01 2.60119486e+00 ... -1.11874461e+00 2.45838836e-01 2.55248338e-01] [ 1.42636120e+00 1.94127047e+00 -9.98845518e-01 ... 1.13305545e+00 3.60843992e+00 2.22320724e+00] [-1.36222756e+00 -9.12137508e-01 -2.36654687e+00 ... 1.20494461e+00 -2.40138128e-01 2.15354204e+00]] [[-4.83417064e-01 2.19293642e+00 -1.99404821e-01 ... 2.80402780e-01 -2.48545036e-01 -8.31498563e-01] [ 1.98624706e+00 -1.15146899e+00 4.86502379e-01 ... -4.13719773e-01 -1.13076723e+00 1.24606028e-01] [ 2.18469071e+00 1.22652304e+00 1.49682105e+00 ... 1.11908293e+00 6.75975978e-02 -6.53794527e-01] ... [-2.46361613e+00 1.56749213e+00 1.43738866e+00 ... 7.78554976e-02 3.45553875e-01 -3.89247513e+00] [ 9.52258110e-01 -3.36596757e-01 7.35419929e-01 ... -1.13132143e+00 -5.72231829e-01 6.83064461e-01] [-3.25587964e+00 4.51906800e-01 -4.36585039e-01 ... -4.85309422e-01 1.04736292e+00 1.40845466e+00]] [[-1.89955413e+00 -3.74936581e+00 -1.02130949e+00 ... -4.29661483e-01 2.37420630e+00 2.23649931e+00] [ 8.68489921e-01 -1.37909961e+00 -1.39573753e+00 ... 2.55921990e-01 1.49265170e+00 1.87192118e+00] [-8.37859154e-01 1.02272522e+00 -1.36620331e+00 ... -1.56126094e+00 -1.20882380e+00 -1.82538402e+00] ... [ 1.95002466e-01 -4.06037606e-02 -2.69645602e-01 ... 1.80103123e+00 -1.41489363e+00 -9.25734937e-01] [-8.85054648e-01 -8.90790284e-01 -1.39252925e+00 ... -2.25285292e-01 1.20706975e+00 -1.46054912e+00] [-4.61679041e-01 7.87853062e-01 6.16075456e-01 ... 2.39089894e+00 7.69502163e-01 2.44374990e+00]]] [[[-1.68128586e+00 1.07820801e-01 1.54401839e+00 ... 2.06146821e-01 3.28955576e-02 1.40902781e+00] [-2.53313398e+00 1.31846046e+00 9.67190385e-01 ... 6.61974967e-01 7.96427608e-01 1.30489886e+00] [-4.78705227e-01 -2.79324323e-01 -3.94734144e-01 ... 1.70938432e+00 9.07674491e-01 -1.10350072e+00] ... [-5.12855172e-01 7.66346812e-01 -1.25212622e+00 ... -1.51702023e+00 -2.34205052e-01 1.19939077e+00] [ 2.81908154e-01 8.55743170e-01 -1.03584278e+00 ... -1.28587937e+00 3.76389086e-01 6.11523867e-01] [-3.23846936e-02 -9.65932727e-01 2.90855139e-01 ... -9.51194167e-01 1.19199562e+00 -1.26357388e+00]] [[ 1.81487739e+00 -1.46733597e-02 3.28373045e-01 ... -8.32673550e-01 6.31727815e-01 9.70097005e-01] [ 2.09506536e+00 1.31916010e+00 1.43860805e+00 ... 2.46747231e+00 -1.91431329e-01 1.04070318e+00] [ 8.27945173e-02 3.31602502e+00 6.53769910e-01 ... -4.13113296e-01 -1.05831718e+00 9.79358256e-01] ... [ 9.20145571e-01 5.39222240e-01 -4.97821093e-01 ... -1.61540762e-01 6.36889577e-01 5.05154252e-01] [ 2.49807668e+00 -3.10395151e-01 7.12117612e-01 ... 1.64379048e+00 -1.05577517e+00 3.92680258e-01] [-6.87524825e-02 1.61984015e+00 3.16309988e-01 ... -5.86206973e-01 -9.54663396e-01 2.05598593e+00]] [[ 2.08612490e+00 -2.48973101e-01 -1.24748075e+00 ... 2.24978352e+00 -1.46609032e+00 1.97774303e+00] [ 1.95058775e+00 2.00063205e+00 9.23650384e-01 ... 1.62701166e+00 4.73838300e-01 -1.96703315e+00] [-1.00734830e-01 -1.09647289e-01 8.54128063e-01 ... -8.56890559e-01 9.18908477e-01 7.05306590e-01] ... [ 1.00956202e+00 -2.35906863e+00 -1.88161457e+00 ... 6.63000822e-01 1.99004322e-01 -2.86343575e-01] [-9.03078675e-01 -5.33924222e-01 5.45000494e-01 ... -2.63100117e-01 -1.28801346e+00 -1.11018300e+00] [ 2.18827873e-01 2.07897320e-01 -2.95516759e-01 ... 6.73561215e-01 9.78301108e-01 1.50287831e+00]] ... [[-8.65256637e-02 3.26526940e-01 -1.38358071e-01 ... 1.13713048e-01 -3.91753227e-01 -1.15575559e-01] [ 9.64801729e-01 1.56849706e+00 1.78313041e+00 ... -7.58138716e-01 -1.34379458e+00 -1.44866145e+00] [ 2.85517484e-01 1.53903759e+00 -1.92893445e+00 ... -2.92466700e-01 -6.61197484e-01 1.60133839e+00] ... [-7.19087005e-01 -1.36563826e+00 1.08061945e+00 ... 1.24959207e+00 6.26618266e-01 1.80861843e+00] [-7.54897952e-01 -1.75025925e-01 -5.54856956e-02 ... 8.49437773e-01 2.34580234e-01 5.85263133e-01] [ 9.34272781e-02 5.73747456e-01 7.52363384e-01 ... 7.28838682e-01 -5.44364572e-01 6.11275919e-02]] [[ 1.14982212e+00 1.87736526e-01 2.02319086e-01 ... 6.38952851e-01 -9.53113735e-02 -1.04222894e+00] [-2.70016134e-01 2.26226234e+00 -3.82032171e-02 ... -1.30828559e+00 -2.63973188e+00 -6.88367665e-01] [ 1.57642052e-01 -1.73107553e+00 4.04525131e-01 ... -2.92052060e-01 2.51698804e+00 -3.11082512e-01] ... [-6.52944624e-01 7.24367261e-01 5.23734950e-02 ... 7.39447057e-01 2.11335659e-01 9.34171915e-01] [-7.77442977e-02 8.31132770e-01 -3.03554565e-01 ... 5.30247509e-01 1.83251941e+00 1.63325265e-01] [-1.17197767e-01 -1.01466286e+00 -2.27515757e-01 ... -2.91051626e-01 -5.41933119e-01 2.02674106e-01]] [[-4.64753658e-01 5.17938733e-01 8.87927353e-01 ... 3.56319129e-01 -4.68625069e-01 -9.68387544e-01] [ 1.87123418e+00 -1.03264081e+00 5.22299230e-01 ... -1.38952792e+00 1.39281893e+00 -1.14327383e+00] [-2.66341530e-02 1.74038196e+00 1.99244440e-01 ... 3.29436630e-01 1.49521574e-01 7.63209999e-01] ... [ 5.47589898e-01 -4.00962323e-01 -7.49136090e-01 ... -2.42948160e-01 -9.10911739e-01 7.63329148e-01] [-4.50597927e-02 1.55488491e-01 1.49460661e+00 ... 1.02679765e+00 -1.04267120e+00 2.17123199e+00] [-8.51679966e-02 7.64751852e-01 -7.58586451e-02 ... 9.88738835e-02 9.76540089e-01 1.96221864e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:True - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [2, 1, 2], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1143.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : Float(3, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.0159 -0.5702 1.7325 [ CPUFloatType{3} ]]() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.5092 (2,1,1,.,.) = 1.1111 (3,1,1,.,.) = -0.5969 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[-1.7949369 0.6260039 0.7299018 ... 1.129341 0.6414738 0.06665911] [-0.10323544 1.1221548 0.6260639 ... 0.86325413 -1.6730216 -1.9367783 ] [ 0.34976152 0.58439386 -0.33842102 ... -2.0677173 0.07114715 2.9942086 ] ... [ 1.5406209 -1.5555803 -0.52242476 ... -0.51544744 -2.44472 -0.6703823 ] [ 0.23703764 0.594786 -0.94698876 ... -0.598596 -0.40680397 -0.8131779 ] [ 3.000027 2.891218 1.654489 ... 0.9583334 1.6852916 -2.4007714 ]] [[ 2.86423 0.9118961 2.23245 ... 0.7911008 1.0535876 -1.1570072 ] [-1.0334443 0.6285525 -2.6461487 ... -1.9621853 -0.33188435 2.0730665 ] [ 0.26422226 -0.1191819 0.2945332 ... -0.24945949 0.66076344 -1.6449146 ] ... [ 0.6809964 -1.1026443 -0.5333609 ... -1.6464539 -1.4117991 -1.6818141 ] [ 0.43635958 -2.5146804 0.03087946 ... -0.06154486 2.364456 -0.17090666] [-2.4369292 -0.7693327 -0.8769297 ... 0.50460094 -1.6540245 -2.794051 ]] [[-0.48941693 1.0471385 -2.047919 ... -0.04561664 1.6430972 -0.8448601 ] [ 0.17075871 -0.6728889 -0.6372948 ... -2.490401 -1.1510395 -1.2865335 ] [-0.2751564 -0.5418193 0.17770073 ... -0.99916637 1.4164442 -0.7130871 ] ... [-1.9230803 1.4498523 -3.2940116 ... -1.186675 1.6195563 -1.724098 ] [-1.6260563 -1.0477488 -0.07696556 ... 0.44098735 -0.97980654 2.1466067 ] [ 0.7416592 -0.16841742 -1.8135943 ... -0.37423632 -0.04456642 0.08215058]] ... [[-0.3624931 0.0978952 -1.2226956 ... 3.683126 0.9637923 4.2373104 ] [-1.5185899 0.4339922 -1.162253 ... 0.7252159 0.19926241 0.8381684 ] [-1.5236074 1.1079522 0.23099262 ... 0.12552497 -0.43861064 1.6945231 ] ... [ 1.6514399 0.75844926 -1.1984779 ... 1.7494345 -0.30442536 -1.2970954 ] [ 0.5990233 0.38829643 1.1037729 ... 2.797183 -0.38472655 -2.8540952 ] [-0.64982533 2.084012 0.19810721 ... -2.4791996 0.97617596 2.0489225 ]] [[ 3.615869 0.09210512 0.67137516 ... 0.51752514 -1.364868 0.64898914] [ 1.9154935 -1.2570729 -0.8771733 ... -2.403189 -0.47530615 -1.2746471 ] [ 0.14579289 2.4129987 0.6441283 ... 2.195172 0.4068256 -0.8262298 ] ... [-0.9278129 2.454176 0.78705513 ... -0.85698754 -0.03078121 -0.04052171] [ 0.7153988 0.51502466 0.10734032 ... 2.7273831 -3.061286 -0.7804061 ] [ 0.3039068 -1.6331328 0.06828688 ... -1.8879356 1.1188905 -0.08426115]] [[ 0.5875494 1.7412255 -0.6728531 ... -0.72479236 0.90733534 -0.23030171] [ 0.17460144 2.1757834 -0.18068737 ... -1.0965114 -1.197465 3.2094905 ] [-2.7251282 -2.0812714 -0.40585348 ... -3.234259 1.6939646 -0.24307583] ... [-0.95969963 0.77871925 -1.634029 ... 1.1335443 -2.4198358 -0.36408815] [-1.5936863 -1.1901518 0.639297 ... 0.97049695 -0.842302 -0.07404692] [ 1.4924558 -0.57890475 0.24564146 ... 0.16506691 -1.7269168 -1.8956517 ]]] [[[-2.3169322 0.38551086 -1.221929 ... 1.257669 0.05969954 -2.1143134 ] [ 0.330274 0.65820646 -0.9851176 ... -0.13571855 -0.06448632 -0.742259 ] [ 1.0036029 -1.0050864 -1.6036063 ... 0.9329388 -1.7593743 -0.9808339 ] ... [-0.791997 -0.8422768 -0.6035723 ... -1.660235 -0.8690134 -1.604253 ] [-1.3734261 0.34174526 0.0098424 ... -0.7186328 1.4564457 1.2723217 ] [-0.8626535 -1.1793096 0.30707014 ... -1.9266647 -2.1184318 0.96691513]] [[-1.3946726 -1.3269162 -2.1098757 ... -1.6377913 -0.69129145 -0.32594907] [ 0.15743613 0.5085975 0.9114864 ... -1.0401882 -0.21770936 -0.764915 ] [-0.8853832 -0.30212367 0.6466558 ... -1.2070017 0.75257266 2.2101119 ] ... [-2.234551 0.8916346 -1.1734203 ... 0.4462502 -1.7088569 -1.2883384 ] [ 0.3144341 -1.6732162 -1.1402054 ... 0.20601976 -1.5946655 -1.7326905 ] [-0.12541282 0.7288699 -0.74042696 ... -1.6783901 1.3318518 -1.8736573 ]] [[ 1.5038767 -0.8276831 -0.39208645 ... -0.52187264 -0.1607554 -0.79235446] [-0.28173178 -1.5484257 -0.37952408 ... 1.3708326 -0.62946045 -0.5892005 ] [-0.37921566 0.29199934 -2.3541179 ... -3.0262523 1.7937992 0.22690213] ... [-1.4180125 -1.4437063 -2.7579107 ... -0.77153206 -1.8944913 0.8260939 ] [ 2.2034965 1.5647304 -1.4037786 ... -0.56791896 -1.8314785 -1.8644854 ] [-1.5109065 -1.644116 0.54310274 ... 0.41031134 0.30953097 0.93867576]] ... [[ 0.747187 -0.23304322 0.6844028 ... -0.27555746 0.5314729 -1.683115 ] [-1.0088229 -0.45333347 0.5346581 ... -1.2252802 -1.4116434 -1.6586946 ] [-0.33029842 -1.1697168 -1.6035745 ... -1.6074865 -0.94216704 0.7278136 ] ... [-0.3345648 -2.2510104 0.8175683 ... -1.5747297 -1.4576597 -0.23274338] [-1.2249653 -0.21452072 -1.6969998 ... 1.0692008 -1.6546772 -0.97747934] [-1.9010409 0.38674408 -2.2208266 ... 0.06887883 -2.2281456 -0.08210802]] [[-2.187415 -1.7022415 0.3382743 ... -0.2044715 1.4471614 -0.35073414] [ 0.15087432 -0.22664306 0.24543941 ... 0.10083598 0.14647573 -0.5773635 ] [-0.53858775 -0.9132901 -1.2816436 ... 0.33137667 1.5050223 -1.447989 ] ... [-2.5187645 -1.9680809 -0.9390235 ... -0.4210316 -1.2474517 -0.3878801 ] [-0.00625348 -0.8932452 -1.8495252 ... -1.8130742 -1.3578286 -2.1957254 ] [-1.7550441 -1.250073 -0.8020125 ... -1.4799886 0.36832887 -0.16564965]] [[-0.7285876 -0.9566276 -2.0688143 ... -2.0052276 -0.9631852 -2.1761117 ] [-0.43738967 -1.4177387 -0.66918254 ... -1.1662194 -0.08176562 -0.14280263] [-0.6471272 -0.8358772 -1.8278756 ... -1.2045887 -0.3225897 1.1411998 ] ... [ 0.51397824 -0.8268924 1.3955758 ... -0.7099316 1.8558621 0.4670186 ] [ 0.22991693 0.9763688 -1.3959982 ... 0.03613418 0.46386337 -1.3177693 ] [-0.3989975 0.31959945 -1.6078011 ... -0.16324517 0.05985665 -0.0438379 ]]] [[[ 0.68828785 1.9502665 0.7879442 ... 2.056684 1.7512058 1.8437005 ] [ 1.582671 1.7506281 1.4587711 ... 1.5727516 1.9704548 1.8296907 ] [ 2.285747 2.448176 2.6039386 ... 2.014952 1.3077508 1.6705108 ] ... [ 1.8395162 1.5120555 1.4093521 ... 2.4899888 2.131661 2.2473812 ] [ 2.5369296 1.3391904 2.5446618 ... 0.96776205 1.1076607 1.736516 ] [ 1.9565252 2.8249898 0.6505619 ... 2.2526221 1.9503841 1.8986362 ]] [[ 1.5952446 2.332613 1.2391424 ... 1.3244188 2.1796222 1.4875124 ] [ 1.4633545 0.94226104 2.563573 ... 1.999833 1.6921107 1.1410017 ] [ 1.8475926 1.4818996 1.5161328 ... 1.8333371 1.4928403 2.2339191 ] ... [ 2.0774827 1.6144646 0.8522896 ... 1.9449098 1.8881745 0.78445923] [ 1.4612434 3.3237185 2.0024996 ... 2.2463791 0.9463748 2.412186 ] [ 1.9445375 1.0984788 1.0102797 ... 2.458708 0.4673072 1.7490705 ]] [[ 1.5778952 2.1189144 0.9793179 ... 2.2016783 1.3201585 1.5704169 ] [ 1.6923985 1.9938467 1.1167777 ... 1.6539505 2.6840937 1.5237834 ] [ 1.688436 1.6341969 1.0497417 ... 0.92727506 1.333119 2.562757 ] ... [ 1.9438665 0.7252915 1.1210644 ... 1.640153 1.8760693 1.266298 ] [ 0.9171481 2.455586 1.2507505 ... 0.9001891 1.633659 0.6687653 ] [ 0.9816728 2.7769055 1.2241336 ... 3.8232307 2.6061769 1.9636642 ]] ... [[ 1.4586253 1.8337046 1.9798896 ... 1.1244473 2.16475 1.9332747 ] [ 1.2662274 1.3060901 2.3772333 ... 1.8727658 2.0752983 1.1463366 ] [ 2.1844273 1.2292264 1.4756334 ... 2.01856 1.519515 0.6893126 ] ... [ 1.2348789 2.7653852 1.7850958 ... 1.9170209 0.95501626 0.8296107 ] [ 2.8085637 1.7423382 1.6936744 ... 1.6772705 1.5956874 1.4181573 ] [ 0.8145808 2.850737 1.7913885 ... 2.1318288 1.586301 1.8356109 ]] [[ 1.8659159 1.9553674 1.7606597 ... 2.611534 2.0885224 2.4573245 ] [ 1.3995928 2.1387613 2.2248616 ... 2.152504 1.7653854 1.6089772 ] [ 1.7931905 2.5470984 2.107224 ... 1.4486215 1.7038894 2.0774605 ] ... [ 1.8259611 1.5901911 2.3416677 ... 1.305389 1.6127928 0.8223152 ] [ 0.89043176 1.7892973 2.4557824 ... 1.3272978 2.6541007 2.0197437 ] [ 2.2750773 1.0192537 1.7335243 ... 2.8976812 1.5026764 1.9728497 ]] [[ 1.1644685 2.4239457 2.3576617 ... 2.70808 1.9501048 1.8701926 ] [ 1.5474193 2.1615057 2.3194497 ... 1.0159075 1.2666546 1.2602196 ] [ 1.4899323 2.2684155 1.4382138 ... 1.4053055 1.9333686 3.0753212 ] ... [ 1.052181 2.9523962 1.8395289 ... 2.560134 2.026153 1.2843146 ] [ 1.6888999 1.1088552 1.1155198 ... 2.1499057 2.1569443 1.0742416 ] [ 2.5106354 1.6719615 1.8537825 ... 2.1781185 2.9133582 2.731684 ]]]]]; ov_res: [[[[[-1.794937 0.6260039 0.72990173 ... 1.129341 0.6414738 0.06665911] [-0.10323544 1.1221548 0.6260638 ... 0.8632541 -1.6730216 -1.9367784 ] [ 0.34976152 0.58439386 -0.33842102 ... -2.0677173 0.07114715 2.9942088 ] ... [ 1.5406209 -1.5555803 -0.52242476 ... -0.51544744 -2.4447198 -0.6703823 ] [ 0.23703763 0.594786 -0.94698876 ... -0.59859604 -0.40680394 -0.8131779 ] [ 3.000027 2.8912182 1.654489 ... 0.9583333 1.6852916 -2.4007714 ]] [[ 2.8642302 0.91189605 2.23245 ... 0.7911008 1.0535876 -1.1570072 ] [-1.0334443 0.6285525 -2.6461487 ... -1.9621853 -0.33188435 2.0730665 ] [ 0.26422226 -0.11918191 0.29453322 ... -0.24945949 0.66076344 -1.6449146 ] ... [ 0.68099636 -1.1026443 -0.5333609 ... -1.6464539 -1.4117991 -1.6818141 ] [ 0.4363596 -2.5146801 0.03087946 ... -0.06154485 2.364456 -0.17090666] [-2.4369292 -0.7693327 -0.87692976 ... 0.50460094 -1.6540245 -2.794051 ]] [[-0.48941693 1.0471385 -2.0479188 ... -0.04561664 1.6430972 -0.8448601 ] [ 0.17075871 -0.6728889 -0.6372948 ... -2.4904008 -1.1510395 -1.2865335 ] [-0.27515638 -0.5418193 0.17770073 ... -0.99916637 1.4164442 -0.7130871 ] ... [-1.9230804 1.4498523 -3.2940116 ... -1.1866751 1.6195563 -1.724098 ] [-1.6260563 -1.0477488 -0.07696556 ... 0.44098735 -0.97980654 2.146607 ] [ 0.7416592 -0.16841742 -1.8135945 ... -0.37423632 -0.04456642 0.08215057]] ... [[-0.3624931 0.0978952 -1.2226956 ... 3.683126 0.9637923 4.2373104 ] [-1.5185899 0.43399224 -1.1622531 ... 0.7252159 0.19926241 0.8381684 ] [-1.5236074 1.1079522 0.23099262 ... 0.12552497 -0.43861064 1.6945231 ] ... [ 1.6514399 0.75844926 -1.1984779 ... 1.7494345 -0.30442536 -1.2970954 ] [ 0.5990233 0.38829643 1.1037728 ... 2.7971833 -0.38472655 -2.854095 ] [-0.6498254 2.0840123 0.19810721 ... -2.4791996 0.97617596 2.0489225 ]] [[ 3.615869 0.09210512 0.6713751 ... 0.5175251 -1.364868 0.64898914] [ 1.9154935 -1.2570729 -0.87717336 ... -2.4031887 -0.47530612 -1.2746471 ] [ 0.14579289 2.4129987 0.6441283 ... 2.1951723 0.4068256 -0.8262298 ] ... [-0.9278129 2.454176 0.78705513 ... -0.85698754 -0.03078121 -0.04052171] [ 0.7153988 0.5150246 0.10734032 ... 2.7273831 -3.061286 -0.7804062 ] [ 0.30390683 -1.6331328 0.06828687 ... -1.8879356 1.1188905 -0.08426115]] [[ 0.5875493 1.7412255 -0.6728531 ... -0.72479236 0.9073353 -0.23030171] [ 0.17460144 2.1757834 -0.18068737 ... -1.0965115 -1.197465 3.2094908 ] [-2.725128 -2.0812714 -0.40585345 ... -3.2342587 1.6939646 -0.24307583] ... [-0.95969963 0.77871925 -1.634029 ... 1.1335443 -2.4198356 -0.36408812] [-1.5936863 -1.1901518 0.639297 ... 0.97049695 -0.842302 -0.07404692] [ 1.4924557 -0.57890475 0.24564146 ... 0.16506691 -1.7269168 -1.8956517 ]]] [[[-2.3169324 0.38551086 -1.221929 ... 1.257669 0.05969957 -2.1143134 ] [ 0.330274 0.65820646 -0.9851176 ... -0.13571857 -0.06448631 -0.742259 ] [ 1.0036029 -1.0050864 -1.6036063 ... 0.9329388 -1.7593743 -0.9808339 ] ... [-0.791997 -0.8422769 -0.6035723 ... -1.660235 -0.8690134 -1.604253 ] [-1.3734261 0.34174526 0.00984241 ... -0.7186328 1.4564458 1.2723217 ] [-0.8626535 -1.1793095 0.30707014 ... -1.9266647 -2.1184318 0.9669151 ]] [[-1.3946726 -1.3269162 -2.109876 ... -1.6377913 -0.69129145 -0.32594904] [ 0.15743612 0.50859743 0.91148645 ... -1.0401882 -0.21770938 -0.76491505] [-0.8853832 -0.30212367 0.64665586 ... -1.2070018 0.7525726 2.2101119 ] ... [-2.234551 0.89163464 -1.1734203 ... 0.44625017 -1.7088569 -1.2883384 ] [ 0.3144341 -1.6732162 -1.1402055 ... 0.20601977 -1.5946655 -1.7326905 ] [-0.12541282 0.7288699 -0.74042696 ... -1.6783901 1.3318518 -1.8736573 ]] [[ 1.5038767 -0.8276831 -0.39208648 ... -0.52187264 -0.1607554 -0.79235446] [-0.28173178 -1.5484257 -0.37952408 ... 1.3708326 -0.62946045 -0.5892005 ] [-0.37921566 0.29199937 -2.3541176 ... -3.0262523 1.7937992 0.22690216] ... [-1.4180125 -1.4437064 -2.7579107 ... -0.7715321 -1.8944913 0.826094 ] [ 2.2034965 1.5647304 -1.4037786 ... -0.56791896 -1.8314785 -1.8644854 ] [-1.5109065 -1.644116 0.54310274 ... 0.41031134 0.30953097 0.9386757 ]] ... [[ 0.747187 -0.23304322 0.6844028 ... -0.27555746 0.5314729 -1.683115 ] [-1.0088229 -0.45333347 0.5346581 ... -1.2252802 -1.4116434 -1.6586946 ] [-0.33029842 -1.1697168 -1.6035745 ... -1.6074865 -0.9421671 0.7278136 ] ... [-0.3345648 -2.2510104 0.81756836 ... -1.5747297 -1.4576597 -0.23274337] [-1.2249655 -0.21452072 -1.6969998 ... 1.0692008 -1.6546772 -0.9774793 ] [-1.9010409 0.38674408 -2.2208266 ... 0.06887882 -2.2281456 -0.08210801]] [[-2.187415 -1.7022415 0.33827427 ... -0.2044715 1.4471613 -0.35073414] [ 0.1508743 -0.22664306 0.24543941 ... 0.10083595 0.14647573 -0.57736355] [-0.53858775 -0.9132901 -1.2816436 ... 0.33137667 1.5050223 -1.447989 ] ... [-2.5187643 -1.9680809 -0.93902344 ... -0.42103156 -1.2474517 -0.3878801 ] [-0.00625351 -0.8932452 -1.8495252 ... -1.8130742 -1.3578285 -2.1957254 ] [-1.7550441 -1.250073 -0.8020125 ... -1.4799887 0.3683289 -0.16564967]] [[-0.7285876 -0.9566276 -2.0688143 ... -2.0052276 -0.96318525 -2.1761117 ] [-0.43738967 -1.4177387 -0.66918254 ... -1.1662194 -0.08176561 -0.14280263] [-0.6471272 -0.8358772 -1.8278756 ... -1.2045887 -0.3225897 1.1411998 ] ... [ 0.5139783 -0.8268924 1.3955758 ... -0.7099316 1.8558621 0.46701857] [ 0.22991696 0.9763688 -1.3959982 ... 0.03613418 0.46386337 -1.3177693 ] [-0.39899752 0.31959945 -1.6078011 ... -0.16324519 0.05985666 -0.04383788]]] [[[ 0.6882879 1.9502665 0.7879442 ... 2.056684 1.7512058 1.8437005 ] [ 1.582671 1.7506281 1.4587711 ... 1.5727516 1.9704548 1.8296907 ] [ 2.285747 2.4481757 2.6039386 ... 2.014952 1.3077508 1.6705108 ] ... [ 1.8395162 1.5120555 1.4093521 ... 2.4899888 2.131661 2.2473814 ] [ 2.5369296 1.3391904 2.5446618 ... 0.96776205 1.1076607 1.736516 ] [ 1.9565252 2.8249898 0.650562 ... 2.252622 1.9503841 1.8986362 ]] [[ 1.5952446 2.332613 1.2391424 ... 1.3244188 2.1796222 1.4875124 ] [ 1.4633545 0.94226104 2.5635726 ... 1.999833 1.6921107 1.1410018 ] [ 1.8475926 1.4818996 1.5161328 ... 1.8333371 1.4928403 2.2339191 ] ... [ 2.0774827 1.6144646 0.8522896 ... 1.9449098 1.8881744 0.78445923] [ 1.4612434 3.3237185 2.0024998 ... 2.2463791 0.9463748 2.412186 ] [ 1.9445375 1.0984789 1.0102797 ... 2.458708 0.46730718 1.7490705 ]] [[ 1.5778952 2.1189144 0.9793179 ... 2.2016783 1.3201585 1.5704169 ] [ 1.6923985 1.9938467 1.1167777 ... 1.6539505 2.6840937 1.5237834 ] [ 1.688436 1.6341969 1.0497417 ... 0.92727506 1.333119 2.562757 ] ... [ 1.9438664 0.72529155 1.1210645 ... 1.640153 1.8760693 1.2662979 ] [ 0.9171481 2.455586 1.2507507 ... 0.9001891 1.633659 0.6687653 ] [ 0.9816728 2.7769053 1.2241336 ... 3.8232307 2.6061769 1.9636642 ]] ... [[ 1.4586253 1.8337046 1.9798896 ... 1.1244473 2.16475 1.9332747 ] [ 1.2662274 1.3060902 2.3772333 ... 1.8727658 2.0752983 1.1463367 ] [ 2.1844273 1.2292265 1.4756335 ... 2.01856 1.5195149 0.6893126 ] ... [ 1.2348789 2.765385 1.7850958 ... 1.9170209 0.95501626 0.8296107 ] [ 2.8085637 1.7423382 1.6936744 ... 1.6772705 1.5956874 1.4181573 ] [ 0.8145808 2.850737 1.7913885 ... 2.1318288 1.586301 1.8356109 ]] [[ 1.8659159 1.9553674 1.7606597 ... 2.611534 2.0885227 2.4573245 ] [ 1.3995928 2.1387613 2.2248619 ... 2.152504 1.7653853 1.6089772 ] [ 1.7931905 2.5470984 2.107224 ... 1.4486216 1.7038894 2.0774605 ] ... [ 1.8259611 1.5901911 2.3416677 ... 1.305389 1.6127928 0.8223152 ] [ 0.89043176 1.7892973 2.4557824 ... 1.3272978 2.6541007 2.0197437 ] [ 2.2750776 1.0192537 1.7335243 ... 2.8976812 1.5026764 1.9728497 ]] [[ 1.1644685 2.4239457 2.3576617 ... 2.70808 1.9501048 1.8701926 ] [ 1.5474193 2.1615057 2.3194497 ... 1.0159075 1.2666546 1.2602197 ] [ 1.4899323 2.2684155 1.4382137 ... 1.4053055 1.9333686 3.0753214 ] ... [ 1.052181 2.9523962 1.8395289 ... 2.560134 2.026153 1.2843146 ] [ 1.6888999 1.1088552 1.1155198 ... 2.1499057 2.1569443 1.0742415 ] [ 2.5106354 1.6719615 1.8537825 ... 2.1781185 2.9133582 2.731684 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1145.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) ema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Excfw_re: [[[[[-6.05333614e+00 -5.90219641e+00 -2.91067195e+00 -1.33376284e+01 3.67093593e-01 -6.10936165e+00 8.48047638e+00 -1.40535223e+00 1.80500102e+00 -3.54332209e+00] [ 1.13037863e+01 1.90066922e+00 -1.68545052e-01 1.75598276e+00 1.48466730e+00 -8.09512711e+00 -8.46300507e+00 -5.41735649e+00 5.65607786e+00 6.89996195e+00] [-1.72448421e+00 -5.19002485e+00 1.17580533e+00 -2.66545868e+00 -4.03128767e+00 3.29995584e+00 -3.79529810e+00 -3.07402998e-01 -1.31580791e+01 2.43633366e+00] [-1.24905896e+00 5.87041330e+00 5.18814981e-01 1.36180849e+01 1.78229070e+00 1.02238715e+00 -2.40892076e+00 1.35842383e+00 1.04487152e+01 4.78572083e+00] [-5.47167921e+00 -1.72968459e+00 1.27234869e+01 -3.31302071e+00 8.39621639e+00 1.89573243e-01 1.94604540e+00 -8.03034115e+00 -8.35474300e+00 -4.76411438e+00] [-9.17079449e+00 4.06023169e+00 -7.75670576e+00 6.40906048e+00 6.03115559e+00 1.12171459e+01 1.29928780e+01 -8.92817211e+00 5.12980795e+00 -5.28536797e-01] [ 2.62228298e+00 -7.57709169e+00 -2.65969396e-01 -7.91002607e+00 -1.06105843e+01 7.71304226e+00 -1.26826878e+01 7.94422150e+00 -2.07223177e+00 -5.93663025e+00] [-1.90280962e+00 4.54126167e+00 -5.06063282e-01 -1.29460871e-01 1.30319500e+01 2.66116351e-01 9.94253159e+00 -1.34589882e+01 1.29861414e+00 -7.08012581e-01] [-2.45727271e-01 1.10394406e+00 -2.29208484e-01 -6.89070368e+00 5.38641930e+00 5.70343161e+00 2.39153886e+00 -1.30894983e+00 -4.77292538e+00 -2.75081539e+00] [-5.09080315e+00 -5.78815579e-01 -7.88275194e+00 3.69687223e+00 2.67753696e+00 1.98927724e+00 -3.92807913e+00 1.11828089e-01 -1.42219543e-01 -2.11655807e+00]] [[ 3.49482059e+00 6.68912381e-02 4.57153463e+00 -4.46440458e+00 6.82941866e+00 -2.86540699e+00 2.53093290e+00 -1.97401059e+00 -5.35261345e+00 8.44986737e-02] [-1.41455259e+01 -3.23989868e+00 -1.60629880e+00 5.91616631e+00 -3.28750014e+00 6.88692033e-01 7.04337549e+00 1.54595203e+01 -4.66227674e+00 1.81498003e+00] [-2.67795444e+00 1.85320067e+00 -1.11848621e+01 -4.03039837e+00 -6.59696054e+00 3.76757169e+00 1.95701046e+01 3.71866655e+00 -4.73442841e+00 5.52532673e+00] [-3.82518721e+00 2.44944668e+00 -1.71565590e+01 7.72384882e+00 1.71752262e+01 -4.72601509e+00 1.18228378e+01 -1.30549660e+01 -1.06247072e+01 -7.19095755e+00] [ 1.26283903e+01 5.19109774e+00 5.69328403e+00 -4.75293493e+00 -3.37038684e+00 -6.26049137e+00 9.75418949e+00 1.34272346e+01 2.89517632e+01 -1.03334017e+01] [ 1.64995134e+00 -7.90092230e+00 6.23973560e+00 -1.22993207e+00 8.60307407e+00 4.03175640e+00 -7.21651936e+00 -2.66121459e+00 -4.07962084e+00 3.47300029e+00] [-5.98884010e+00 -5.26223660e+00 -6.42391253e+00 -1.09585133e+01 -1.36637640e+01 4.14624959e-02 -7.56353712e+00 8.60398674e+00 5.05628300e+00 -4.52630329e+00] [-1.21330476e+00 3.32941592e-01 2.27523017e+00 2.57014215e-01 -1.37090051e+00 -6.08094358e+00 -2.35850191e+00 -2.23442242e-01 6.58820248e+00 5.74292374e+00] [ 1.41309619e+00 1.26624870e+01 -3.76122284e+00 8.19064331e+00 -3.51465440e+00 -4.42160320e+00 8.43479061e+00 -4.08341599e+00 -1.01957912e+01 -5.64645338e+00] [ 2.00112700e+00 3.64955902e+00 2.11582565e+00 6.61088824e-02 -1.66440475e+00 3.76550388e+00 -7.26537848e+00 5.51879835e+00 1.12171822e+01 -5.04271746e-01]] [[-6.38818204e-01 -7.05351686e+00 -1.37738839e-01 -4.92046976e+00 -5.53472614e+00 -6.46238756e+00 -7.37893534e+00 -6.07029295e+00 2.30940032e+00 -2.91372716e-01] [ 6.78749275e+00 4.36314058e+00 8.79388809e+00 8.52478600e+00 5.69729614e+00 3.81266522e+00 1.07246866e+01 -1.34889436e+00 7.41089678e+00 1.14062905e+00] [ 5.57645988e+00 5.11357784e+00 4.83243316e-01 9.66771889e+00 7.61410713e+00 -9.11711311e+00 -1.45729475e+01 1.13878107e+01 4.19613028e+00 -9.04427767e-01] [-7.71442938e+00 -2.06360888e+00 -7.57598114e+00 -1.81765258e+00 -1.06387777e+01 -1.98950875e+00 8.56388092e+00 -1.42279739e+01 1.18557205e+01 -1.76331081e+01] [-3.98772144e+00 -1.88306034e+00 5.42554951e+00 -3.17335844e+00 -5.39706612e+00 7.49788475e+00 6.35527325e+00 -1.05723667e+01 6.36625147e+00 -9.44146156e+00] [-4.68425846e+00 -1.26217585e+01 1.29134283e+01 1.39578428e+01 1.04018230e+01 -1.33477964e+01 5.61005211e+00 2.53469086e+00 2.52850914e+00 5.61819553e+00] [-7.54771614e+00 8.29571247e-01 -6.11877012e+00 4.56676912e+00 1.29303827e+01 5.94721365e+00 -1.84876335e+00 -3.64393234e+00 -7.23911285e+00 1.14058161e+00] [-2.71174312e+00 1.07194519e+01 -1.05999498e+01 5.06771755e+00 -4.10185194e+00 6.77475595e+00 8.73940349e-01 5.07130337e+00 8.42057323e+00 -1.03583574e+01] [ 8.11062455e-01 1.19443903e+01 6.13507080e+00 6.28129196e+00 1.52343082e+01 1.14361887e+01 1.25406337e+00 -8.60936356e+00 1.72836232e+00 1.19207182e+01] [ 3.89506865e+00 -3.41506910e+00 3.82562590e+00 4.84780884e+00 1.55919492e+00 -8.93813515e+00 -2.11505127e+00 3.38553041e-01 1.80080175e+00 -1.45124936e+00]] [[ 6.92869902e-01 5.21287727e+00 -7.93290520e+00 -1.22631235e+01 -8.10861111e+00 4.54447842e+00 -1.62952633e+01 5.45991421e+00 -5.69040537e+00 4.08247471e+00] [-1.38657570e+01 1.03167009e+00 -3.16122460e+00 -1.25579047e+00 2.99130702e+00 1.23179734e+00 8.40440178e+00 -1.24400835e+01 1.07829310e-01 -2.35164642e-01] [-6.60132766e-01 3.30597806e+00 -1.67558384e+01 -5.70834827e+00 -4.11513138e+00 3.84115505e+00 3.17148566e+00 9.33601475e+00 -6.63011980e+00 -1.11455832e+01] [ 1.42636671e+01 -3.97864580e+00 5.69228983e+00 4.77263355e+00 1.25858192e+01 -5.09284496e+00 -5.57504797e+00 -1.02074842e+01 1.48140395e+00 1.30801067e+01] [ 7.59883213e+00 2.45304203e+00 -9.35015774e+00 1.80862408e+01 1.65208566e+00 -1.50422316e+01 2.49985957e+00 4.48160410e+00 -1.12785244e+01 -4.39381790e+00] [ 3.78587222e+00 -8.16360474e+00 -5.12743378e+00 5.10205775e-02 -4.41126728e+00 6.77115917e+00 -1.72357578e+01 -2.29558206e+00 -1.29506559e+01 8.12951183e+00] [ 6.37398839e-01 1.39185057e+01 6.53388929e+00 6.38230896e+00 1.72579753e+00 -5.99403191e+00 7.31981611e+00 -1.04748507e+01 1.58746928e-01 -1.31062758e+00] [ 7.23991966e+00 -6.03749657e+00 2.70292330e+00 -1.33238678e+01 9.36424923e+00 9.05796719e+00 1.43033063e+00 4.66761065e+00 -5.16178513e+00 4.90171814e+00] [-1.00869255e+01 3.06813669e+00 -1.41659304e-01 -1.82726746e+01 1.57112980e+00 -1.24147282e+01 1.12905204e+00 -6.01863527e+00 3.64547920e+00 -7.02613544e+00] [ 5.07253599e+00 4.69776249e+00 1.42889690e+00 7.46007633e+00 9.59031677e+00 2.99883294e+00 5.07827616e+00 6.35254264e-01 -2.25947762e+00 1.87664318e+00]] [[ 2.71395469e+00 2.75350881e+00 -1.25912132e+01 -5.40996885e+00 -1.09292340e+00 1.13403196e+01 7.40659046e+00 1.04097271e+01 -3.31658173e+00 -1.13477793e+01] [ 8.28359032e+00 3.41738343e+00 2.61311417e+01 -1.09005795e+01 -4.41813439e-01 -1.35699272e+01 -6.30102348e+00 -1.08585443e+01 1.06645594e+01 4.36346531e+00] [ 2.25088462e-01 -7.11466122e+00 -3.46632385e+00 4.14151955e+00 7.06465912e+00 -2.04832220e+00 1.54190149e+01 9.08269691e+00 1.80396244e-01 1.99209213e+00] [ 6.91541243e+00 8.03209400e+00 -2.89756155e+00 -1.98925571e+01 -3.60546875e+00 6.41737843e+00 1.00463057e+01 3.79952478e+00 -5.53280294e-01 -6.94986439e+00] [-1.00629053e+01 1.10334663e+01 7.19422579e+00 4.56957436e+00 -4.02922678e+00 1.70685962e-01 8.02400112e+00 -2.35624695e+00 5.01859474e+00 -2.24150205e+00] [ 3.27753282e+00 7.50025654e+00 -1.09916172e+01 -2.96873975e+00 4.94736385e+00 8.33186913e+00 4.25241804e+00 1.91467988e+00 -4.15062761e+00 -1.04917736e+01] [-9.52618885e+00 -8.77187157e+00 6.35977316e+00 -6.40859318e+00 -1.71451797e+01 6.92842531e+00 5.56834459e+00 5.28878355e+00 -1.31493416e-02 -1.60969937e+00] [-1.25819778e+01 -2.20309138e+00 -1.64222705e+00 -6.95959854e+00 1.22471571e+01 -4.46311426e+00 -4.19118214e+00 -1.20406904e+01 -1.15923939e+01 -1.20071971e+00] [ 5.38571548e+00 -1.14317741e+01 -5.66519642e+00 7.57168674e+00 -2.84424663e+00 -9.55049324e+00 -4.26318550e+00 1.02844658e+01 -8.74759293e+00 7.02413464e+00] [ 1.31533718e+00 7.67685509e+00 -5.04740477e-01 -7.42735815e+00 -9.09914315e-01 1.17952240e+00 -4.95911598e+00 8.91461015e-01 1.57761824e+00 1.51831317e+00]] [[ 3.77302194e+00 -1.18775396e+01 3.63494158e+00 1.04298048e+01 1.48934736e+01 4.30047703e+00 1.35193393e-01 3.24891686e+00 7.07779264e+00 5.01884079e+00] [ 1.23777456e+01 -4.37141848e+00 4.47775459e+00 -1.13719320e+01 -1.03408766e+01 7.78839350e+00 3.69799089e+00 -3.93323451e-01 -2.59224987e+00 1.66217601e+00] [ 1.43981528e+00 -3.23811388e+00 1.64378452e+01 -6.45896339e+00 1.70300713e+01 -1.58100977e+01 -1.65436859e+01 -1.03299398e+01 -1.13933811e+01 -4.38694179e-01] [-4.82703477e-01 2.10459018e+00 -8.17077637e+00 9.59805012e+00 5.79020977e-01 6.17591381e+00 -5.37109375e+00 1.85847545e+00 3.86332107e+00 -2.49754667e+00] [ 8.94117355e-01 -3.81924534e+00 4.31871128e+00 6.58910418e+00 -8.99124908e+00 -1.34062471e+01 9.97877121e+00 1.24786663e+00 -4.57639790e+00 -3.56504381e-01] [ 4.01977825e+00 -2.72953272e-01 -5.45657778e+00 2.59432626e+00 -2.78788137e+00 -9.63401139e-01 -9.98328209e+00 -2.60903001e+00 1.00096464e+00 1.27188969e+01] [ 2.45165658e+00 2.41683435e+00 4.35343361e+00 -1.17364550e+01 3.27561641e+00 -1.97748375e+01 5.50849819e+00 2.78070641e+00 9.30328274e+00 -5.11160803e+00] [ 6.56404972e-01 -7.19771147e+00 3.76361489e+00 -1.92654002e+00 6.93029070e+00 1.85038948e+01 -1.11073327e+00 2.49683285e+00 -1.15505540e+00 -7.83016682e-01] [ 2.66344953e+00 4.28139687e+00 3.30455989e-01 -3.25337291e+00 -1.39962378e+01 8.53400946e-01 5.69661260e-01 -5.59873533e+00 4.06663799e+00 9.59904861e+00] [-6.03279829e+00 6.07271576e+00 -4.74038124e+00 1.32536769e-01 8.78482246e+00 1.59260726e+00 -9.12569714e+00 -3.54080915e+00 1.43374252e+00 -6.10012054e+00]] [[-3.46296453e+00 -4.44325686e+00 -2.01058602e+00 1.03525853e+00 5.82993555e+00 -8.42866135e+00 9.16382694e+00 -6.86250389e-01 1.16527042e+01 5.13493896e-01] [ 7.79755592e+00 -6.75236750e+00 -1.07833123e+00 3.03773642e+00 -1.63057113e+00 -1.22347097e+01 8.37785053e+00 -9.08020306e+00 -7.72467470e+00 2.44097090e+00] [ 4.92215492e-02 -2.72349215e+00 -7.08052635e+00 -8.27347469e+00 3.23310447e+00 -2.35402298e+00 6.31677723e+00 2.27559586e+01 1.16973658e+01 -1.43998432e+00] [-3.05909514e+00 3.21166348e+00 -9.28603458e+00 8.41643333e+00 -7.79587209e-01 2.20259801e-01 -8.91732597e+00 -4.49210739e+00 -1.60298336e+00 5.92048311e+00] [-7.17430210e+00 2.25077415e+00 2.25790024e-01 -5.44161797e+00 9.40581417e+00 1.78788071e+01 1.01552591e+01 1.91314101e+00 -4.67258167e+00 -2.70243645e+00] [ 6.11293697e+00 1.18056393e+00 1.14034212e+00 2.40327001e+00 -6.04992104e+00 -1.06612682e+01 5.77351761e+00 3.75600964e-01 3.35074353e+00 7.84267426e+00] [-8.96114445e+00 -9.53475475e+00 -9.05503750e+00 -1.26230919e+00 -6.15831089e+00 1.12118340e+01 -8.82663727e-01 -1.31880369e+01 -5.42669582e+00 3.93915033e+00] [ 1.24837904e+01 1.55383492e+00 2.15344238e+01 8.95941830e+00 3.59276009e+00 -1.42263203e+01 -7.54521656e+00 -5.54864359e+00 2.17791510e+00 -5.46418428e+00] [ 9.80632287e-03 -6.15540218e+00 -4.12104368e+00 -5.12198782e+00 -1.20077295e+01 1.35878477e+01 2.17008615e+00 2.53622985e+00 3.40210009e+00 -2.66177893e+00] [-8.39588284e-01 -4.31593895e+00 -4.15663052e+00 1.29515469e-01 1.84209657e+00 -6.31663799e+00 2.16408491e+00 2.94588995e+00 -9.90767002e-01 4.33295679e+00]] [[-4.10728264e+00 -3.15649199e+00 1.13860929e+00 -2.80240631e+00 7.25810623e+00 3.19509125e+00 -9.81011152e-01 5.79381704e+00 1.43804588e+01 5.96371949e-01] [-4.67842817e+00 8.17242527e+00 -1.68947220e+01 6.46087217e+00 4.80991840e+00 -6.06733179e+00 2.91342187e+00 -3.28748608e+00 -2.44141603e+00 1.36089969e+00] [-1.43860006e+01 -6.78435755e+00 6.23165178e+00 1.08784962e+01 -1.95150447e+00 1.47299633e+01 -2.12834120e+00 1.62483387e+01 -9.20332527e+00 1.02517605e-01] [-5.23458338e+00 1.15542450e+01 1.41810007e+01 -1.29574919e+01 -1.30517912e+01 5.17747211e+00 -1.68000240e+01 -4.51944351e+00 -4.33629322e+00 1.41267633e+00] [-7.19377184e+00 5.04021168e+00 -1.35063620e+01 8.25974655e+00 -3.42597532e+00 -1.25937328e+01 1.08997192e+01 -1.23607540e+01 1.34708033e+01 5.70701122e+00] [-6.50800180e+00 2.08618011e+01 4.00385857e+00 4.46376419e+00 -6.14778137e+00 1.80823541e+00 -1.14477072e+01 1.22849607e+01 7.65036964e+00 -4.06511402e+00] [-1.22687378e+01 -1.69405384e+01 1.96654916e+00 -4.12962556e-01 1.09121151e+01 4.59636778e-01 7.90811300e+00 3.17003989e+00 1.08060904e+01 -1.51731277e+00] [ 8.23079777e+00 6.37346554e+00 5.39221954e+00 -3.72509575e+00 1.82700062e+00 5.72284269e+00 7.92752552e+00 2.52813196e+00 5.82519007e+00 -2.36722612e+00] [-1.01604118e+01 -6.84714973e-01 7.73204708e+00 -1.10986681e+01 1.04709530e+01 -8.56639862e+00 -9.14899158e+00 -1.71167488e+01 1.92316949e-01 2.19322729e+00] [ 9.05955911e-01 -1.71475267e+00 -3.26566911e+00 1.54989758e+01 -7.37672567e+00 5.82074642e-01 4.22819519e+00 3.37838221e+00 4.13311481e+00 4.26742029e+00]] [[ 1.52980912e+00 -3.98838401e+00 3.85022473e+00 5.80231905e+00 -3.94130135e+00 1.37860179e+00 9.02614117e+00 -2.51168156e+00 4.85020494e+00 -7.04021454e-02] [-6.20282054e-01 -1.97177804e+00 3.68507576e+00 3.24183226e+00 -5.34900141e+00 1.17640769e+00 -7.07697678e+00 9.51969528e+00 3.62743998e+00 1.83087087e+00] [-3.54554510e+00 -1.00354652e+01 -2.70188212e+00 6.32020330e+00 -2.49906611e+00 -3.52394199e+00 -8.60784626e+00 -1.11055279e+01 -1.72453666e+00 7.68421531e-01] [-1.50167155e+00 1.16271877e+01 1.22659266e+00 1.21500647e+00 1.99869025e+00 -3.83035350e+00 1.26170187e+01 2.06253862e+00 3.13160515e+00 6.69106293e+00] [ 8.51094127e-01 -9.72360420e+00 -1.02580118e+01 8.75712967e+00 5.78152752e+00 -2.15036988e+00 -2.01795616e+01 -5.40394878e+00 -1.28781853e+01 -4.64522028e+00] [ 1.08946552e+01 6.92591429e+00 5.12022495e+00 -7.94058418e+00 -1.67297971e+00 5.81443691e+00 1.72228642e+01 -1.06001940e+01 -2.21066022e+00 1.86263561e+01] [-2.30786991e+00 -4.15460253e+00 3.48766923e+00 9.98269749e+00 1.69675579e+01 -6.73785925e+00 -4.72793484e+00 6.11959314e+00 -1.73472614e+01 -2.04705191e+00] [-2.81578112e+00 2.41110826e+00 5.77675819e+00 -1.58464508e+01 2.44449258e+00 -1.12101784e+01 -1.31266270e+01 -1.04270267e+01 2.86946833e-01 2.71381831e+00] [ 1.82666707e+00 -1.64604111e+01 -1.20021462e-01 1.48514614e+01 -1.23162518e+01 1.45938110e+01 6.09407902e-01 2.17829752e+00 -1.38642836e+01 -4.42109251e+00] [-1.16251659e+01 -3.10869813e+00 -8.38902760e+00 -2.55778480e+00 -4.59054661e+00 -5.02398396e+00 -2.52206206e-01 -3.26075792e-01 -6.43323517e+00 -3.98776102e+00]] [[ 3.88968444e+00 -3.66778255e+00 2.73108673e+00 2.17690468e+00 1.96257675e+00 -2.90547228e+00 6.24429226e-01 -3.94032025e+00 -1.26469660e+01 -6.51525259e+00] [-8.25980663e+00 1.94400253e+01 -7.16274357e+00 7.12304163e+00 3.51154476e-01 -2.11275387e+00 4.74969292e+00 6.57070160e+00 1.30794239e+01 5.42494345e+00] [-6.10387802e-01 -5.13492870e+00 -5.93591976e+00 -1.47914183e+00 -1.59392953e-01 -2.85905862e+00 -8.14618301e+00 -1.05593596e+01 -1.01790495e+01 -3.85761690e+00] [ 8.68597507e+00 -2.86206341e+00 3.43648744e+00 3.70903850e-01 -1.09379940e+01 1.04233284e+01 -5.20933676e+00 -3.24250793e+00 -1.00764027e+01 -8.40538144e-02] [-5.81645250e+00 -2.93746662e+00 -2.09709120e+00 -4.84982061e+00 -1.10236692e+01 5.92400265e+00 4.00366783e+00 1.09722071e+01 6.00541019e+00 6.86290550e+00] [-1.39774835e+00 4.54841900e+00 -4.13189030e+00 -1.25257149e+01 5.26093149e+00 -3.68897533e+00 -1.87884502e+01 -3.51463771e+00 -4.51144123e+00 -1.29323215e+01] [ 2.04591393e+00 2.35212708e+00 3.95963764e+00 -8.54240179e-01 -6.92203188e+00 -2.66066313e-01 4.79098558e+00 8.59623790e-01 7.65353537e+00 -2.02480626e+00] [ 2.77072835e+00 -1.71790695e+01 -1.94142270e+00 8.63753796e+00 -2.28312254e-01 -1.67525315e+00 -1.89003551e+00 -9.57723260e-01 1.40952091e+01 -1.85222363e+00] [-1.04438591e+00 1.73047600e+01 -4.74796534e+00 8.39894867e+00 -2.41638303e-01 -1.09467545e+01 -3.80557656e+00 6.27045155e-01 -1.05047340e+01 1.76984310e-01] [-2.81629777e+00 -1.10136104e+00 3.50772381e+00 -5.39899588e+00 7.52374935e+00 9.47301197e+00 3.71599126e+00 5.67116678e-01 8.49976826e+00 4.18241453e+00]]]]]; ov_res: [[[[[-6.05333662e+00 -5.90219593e+00 -2.91067147e+00 -1.33376303e+01 3.67092639e-01 -6.10936165e+00 8.48047638e+00 -1.40535212e+00 1.80500114e+00 -3.54332232e+00] [ 1.13037853e+01 1.90066993e+00 -1.68545052e-01 1.75598323e+00 1.48466730e+00 -8.09512615e+00 -8.46300316e+00 -5.41735554e+00 5.65607834e+00 6.89996243e+00] [-1.72448492e+00 -5.19002533e+00 1.17580557e+00 -2.66545820e+00 -4.03128767e+00 3.29995632e+00 -3.79529810e+00 -3.07403475e-01 -1.31580791e+01 2.43633389e+00] [-1.24905920e+00 5.87041235e+00 5.18814743e-01 1.36180820e+01 1.78229082e+00 1.02238643e+00 -2.40892172e+00 1.35842359e+00 1.04487162e+01 4.78572035e+00] [-5.47167778e+00 -1.72968435e+00 1.27234869e+01 -3.31302118e+00 8.39621830e+00 1.89573407e-01 1.94604456e+00 -8.03033924e+00 -8.35474300e+00 -4.76411438e+00] [-9.17079353e+00 4.06023169e+00 -7.75670624e+00 6.40905952e+00 6.03115463e+00 1.12171459e+01 1.29928770e+01 -8.92817211e+00 5.12980890e+00 -5.28536677e-01] [ 2.62228274e+00 -7.57709265e+00 -2.65970111e-01 -7.91002607e+00 -1.06105824e+01 7.71304035e+00 -1.26826897e+01 7.94421959e+00 -2.07223082e+00 -5.93662977e+00] [-1.90280986e+00 4.54126215e+00 -5.06063759e-01 -1.29460633e-01 1.30319500e+01 2.66115636e-01 9.94252968e+00 -1.34589872e+01 1.29861486e+00 -7.08012640e-01] [-2.45726794e-01 1.10394454e+00 -2.29210392e-01 -6.89070225e+00 5.38641882e+00 5.70343113e+00 2.39153910e+00 -1.30894959e+00 -4.77292538e+00 -2.75081563e+00] [-5.09080315e+00 -5.78815401e-01 -7.88275051e+00 3.69687176e+00 2.67753649e+00 1.98927748e+00 -3.92807984e+00 1.11828230e-01 -1.42219171e-01 -2.11655831e+00]] [[ 3.49482012e+00 6.68919384e-02 4.57153463e+00 -4.46440458e+00 6.82941914e+00 -2.86540723e+00 2.53093362e+00 -1.97401011e+00 -5.35261297e+00 8.44985917e-02] [-1.41455278e+01 -3.23989844e+00 -1.60629964e+00 5.91616678e+00 -3.28750038e+00 6.88692093e-01 7.04337168e+00 1.54595232e+01 -4.66227627e+00 1.81498015e+00] [-2.67795515e+00 1.85320115e+00 -1.11848612e+01 -4.03039742e+00 -6.59696054e+00 3.76757073e+00 1.95701046e+01 3.71866417e+00 -4.73442841e+00 5.52532482e+00] [-3.82518673e+00 2.44944501e+00 -1.71565628e+01 7.72384882e+00 1.71752243e+01 -4.72601557e+00 1.18228416e+01 -1.30549698e+01 -1.06247053e+01 -7.19095898e+00] [ 1.26283913e+01 5.19109821e+00 5.69328547e+00 -4.75293493e+00 -3.37038732e+00 -6.26048994e+00 9.75418758e+00 1.34272327e+01 2.89517593e+01 -1.03334036e+01] [ 1.64995086e+00 -7.90092134e+00 6.23973513e+00 -1.22993135e+00 8.60307598e+00 4.03175545e+00 -7.21651888e+00 -2.66121435e+00 -4.07961988e+00 3.47300053e+00] [-5.98884106e+00 -5.26223612e+00 -6.42391157e+00 -1.09585075e+01 -1.36637640e+01 4.14609462e-02 -7.56353474e+00 8.60398769e+00 5.05628395e+00 -4.52630472e+00] [-1.21330476e+00 3.32939804e-01 2.27523065e+00 2.57013500e-01 -1.37089956e+00 -6.08094645e+00 -2.35850453e+00 -2.23440096e-01 6.58820152e+00 5.74292517e+00] [ 1.41309571e+00 1.26624851e+01 -3.76122236e+00 8.19064236e+00 -3.51465464e+00 -4.42160463e+00 8.43478966e+00 -4.08341455e+00 -1.01957932e+01 -5.64645195e+00] [ 2.00112653e+00 3.64955854e+00 2.11582494e+00 6.61096424e-02 -1.66440499e+00 3.76550436e+00 -7.26538086e+00 5.51879883e+00 1.12171841e+01 -5.04271686e-01]] [[-6.38819039e-01 -7.05351496e+00 -1.37738839e-01 -4.92047024e+00 -5.53472710e+00 -6.46238708e+00 -7.37893438e+00 -6.07029247e+00 2.30940104e+00 -2.91372389e-01] [ 6.78749275e+00 4.36314058e+00 8.79388714e+00 8.52478504e+00 5.69729805e+00 3.81266379e+00 1.07246895e+01 -1.34889293e+00 7.41089916e+00 1.14063072e+00] [ 5.57646084e+00 5.11358070e+00 4.83244747e-01 9.66771507e+00 7.61410666e+00 -9.11710835e+00 -1.45729475e+01 1.13878117e+01 4.19613409e+00 -9.04427469e-01] [-7.71442986e+00 -2.06361032e+00 -7.57597876e+00 -1.81765294e+00 -1.06387768e+01 -1.98950875e+00 8.56388187e+00 -1.42279749e+01 1.18557243e+01 -1.76331081e+01] [-3.98772049e+00 -1.88306010e+00 5.42554951e+00 -3.17335796e+00 -5.39706659e+00 7.49788618e+00 6.35527468e+00 -1.05723677e+01 6.36625004e+00 -9.44146156e+00] [-4.68425941e+00 -1.26217556e+01 1.29134321e+01 1.39578428e+01 1.04018259e+01 -1.33477964e+01 5.61005211e+00 2.53469062e+00 2.52850842e+00 5.61819553e+00] [-7.54771519e+00 8.29570413e-01 -6.11877203e+00 4.56676817e+00 1.29303827e+01 5.94721413e+00 -1.84876478e+00 -3.64393306e+00 -7.23911381e+00 1.14058125e+00] [-2.71174288e+00 1.07194538e+01 -1.05999489e+01 5.06771469e+00 -4.10185337e+00 6.77475643e+00 8.73939395e-01 5.07130194e+00 8.42057323e+00 -1.03583574e+01] [ 8.11062574e-01 1.19443893e+01 6.13507128e+00 6.28129148e+00 1.52343073e+01 1.14361858e+01 1.25406432e+00 -8.60936451e+00 1.72836232e+00 1.19207191e+01] [ 3.89506769e+00 -3.41506934e+00 3.82562637e+00 4.84781027e+00 1.55919588e+00 -8.93813515e+00 -2.11504984e+00 3.38552654e-01 1.80080187e+00 -1.45124948e+00]] [[ 6.92869842e-01 5.21287870e+00 -7.93290710e+00 -1.22631235e+01 -8.10861111e+00 4.54447889e+00 -1.62952633e+01 5.45991468e+00 -5.69040537e+00 4.08247614e+00] [-1.38657589e+01 1.03166974e+00 -3.16122532e+00 -1.25579000e+00 2.99130797e+00 1.23179710e+00 8.40439987e+00 -1.24400845e+01 1.07828833e-01 -2.35164180e-01] [-6.60132289e-01 3.30597687e+00 -1.67558365e+01 -5.70834732e+00 -4.11513138e+00 3.84115505e+00 3.17148519e+00 9.33601379e+00 -6.63012028e+00 -1.11455832e+01] [ 1.42636662e+01 -3.97864342e+00 5.69229031e+00 4.77263403e+00 1.25858192e+01 -5.09284592e+00 -5.57504988e+00 -1.02074823e+01 1.48140514e+00 1.30801058e+01] [ 7.59883404e+00 2.45304298e+00 -9.35015869e+00 1.80862408e+01 1.65208638e+00 -1.50422297e+01 2.49985862e+00 4.48160362e+00 -1.12785244e+01 -4.39381742e+00] [ 3.78587222e+00 -8.16360474e+00 -5.12743473e+00 5.10188863e-02 -4.41126823e+00 6.77116013e+00 -1.72357578e+01 -2.29558134e+00 -1.29506578e+01 8.12951183e+00] [ 6.37398720e-01 1.39185085e+01 6.53388977e+00 6.38231230e+00 1.72579837e+00 -5.99403095e+00 7.31981707e+00 -1.04748516e+01 1.58748835e-01 -1.31062734e+00] [ 7.23991966e+00 -6.03749657e+00 2.70292377e+00 -1.33238678e+01 9.36424828e+00 9.05796528e+00 1.43032968e+00 4.66760969e+00 -5.16178656e+00 4.90171909e+00] [-1.00869265e+01 3.06813622e+00 -1.41660020e-01 -1.82726707e+01 1.57112944e+00 -1.24147282e+01 1.12905395e+00 -6.01863527e+00 3.64547992e+00 -7.02613544e+00] [ 5.07253599e+00 4.69776201e+00 1.42889619e+00 7.46007633e+00 9.59031677e+00 2.99883294e+00 5.07827663e+00 6.35255039e-01 -2.25947833e+00 1.87664378e+00]] [[ 2.71395469e+00 2.75350904e+00 -1.25912142e+01 -5.40996885e+00 -1.09292293e+00 1.13403187e+01 7.40658998e+00 1.04097261e+01 -3.31658101e+00 -1.13477793e+01] [ 8.28359127e+00 3.41738391e+00 2.61311417e+01 -1.09005785e+01 -4.41813797e-01 -1.35699272e+01 -6.30102444e+00 -1.08585472e+01 1.06645584e+01 4.36346292e+00] [ 2.25088224e-01 -7.11466026e+00 -3.46632195e+00 4.14152050e+00 7.06465960e+00 -2.04832029e+00 1.54190121e+01 9.08270073e+00 1.80397436e-01 1.99209273e+00] [ 6.91541290e+00 8.03209496e+00 -2.89756179e+00 -1.98925648e+01 -3.60546875e+00 6.41737938e+00 1.00463047e+01 3.79952407e+00 -5.53279340e-01 -6.94986486e+00] [-1.00629053e+01 1.10334663e+01 7.19422579e+00 4.56957340e+00 -4.02922869e+00 1.70685202e-01 8.02400017e+00 -2.35624695e+00 5.01859426e+00 -2.24150300e+00] [ 3.27753186e+00 7.50025463e+00 -1.09916162e+01 -2.96874094e+00 4.94736528e+00 8.33186817e+00 4.25241709e+00 1.91467977e+00 -4.15062666e+00 -1.04917736e+01] [-9.52618790e+00 -8.77187157e+00 6.35977459e+00 -6.40859127e+00 -1.71451817e+01 6.92842531e+00 5.56834507e+00 5.28878260e+00 -1.31486263e-02 -1.60969937e+00] [-1.25819769e+01 -2.20309401e+00 -1.64222646e+00 -6.95959854e+00 1.22471561e+01 -4.46311283e+00 -4.19118261e+00 -1.20406866e+01 -1.15923958e+01 -1.20071912e+00] [ 5.38571596e+00 -1.14317741e+01 -5.66520119e+00 7.57168436e+00 -2.84424806e+00 -9.55049419e+00 -4.26318693e+00 1.02844648e+01 -8.74759197e+00 7.02413559e+00] [ 1.31533659e+00 7.67685366e+00 -5.04740477e-01 -7.42735767e+00 -9.09914076e-01 1.17952263e+00 -4.95911360e+00 8.91461372e-01 1.57761753e+00 1.51831162e+00]] [[ 3.77302265e+00 -1.18775368e+01 3.63494205e+00 1.04298048e+01 1.48934755e+01 4.30047607e+00 1.35194346e-01 3.24891710e+00 7.07779312e+00 5.01883984e+00] [ 1.23777447e+01 -4.37142086e+00 4.47775459e+00 -1.13719292e+01 -1.03408766e+01 7.78839397e+00 3.69799066e+00 -3.93323153e-01 -2.59225035e+00 1.66217577e+00] [ 1.43981504e+00 -3.23811364e+00 1.64378471e+01 -6.45896387e+00 1.70300732e+01 -1.58100948e+01 -1.65436859e+01 -1.03299389e+01 -1.13933802e+01 -4.38694090e-01] [-4.82702047e-01 2.10459161e+00 -8.17077446e+00 9.59804726e+00 5.79020262e-01 6.17591286e+00 -5.37109470e+00 1.85847449e+00 3.86331964e+00 -2.49754739e+00] [ 8.94117415e-01 -3.81924725e+00 4.31871223e+00 6.58910322e+00 -8.99124908e+00 -1.34062481e+01 9.97877121e+00 1.24786615e+00 -4.57639694e+00 -3.56504053e-01] [ 4.01977873e+00 -2.72951871e-01 -5.45657682e+00 2.59432626e+00 -2.78788352e+00 -9.63400900e-01 -9.98327923e+00 -2.60903239e+00 1.00096416e+00 1.27188978e+01] [ 2.45165586e+00 2.41683340e+00 4.35343361e+00 -1.17364531e+01 3.27561617e+00 -1.97748394e+01 5.50849533e+00 2.78070688e+00 9.30328465e+00 -5.11160707e+00] [ 6.56404734e-01 -7.19771051e+00 3.76361442e+00 -1.92653930e+00 6.93029070e+00 1.85038929e+01 -1.11073351e+00 2.49683309e+00 -1.15505362e+00 -7.83017457e-01] [ 2.66344953e+00 4.28139687e+00 3.30455989e-01 -3.25337315e+00 -1.39962339e+01 8.53401303e-01 5.69661140e-01 -5.59873629e+00 4.06663942e+00 9.59904766e+00] [-6.03279877e+00 6.07271671e+00 -4.74038315e+00 1.32536024e-01 8.78482437e+00 1.59260690e+00 -9.12569523e+00 -3.54080987e+00 1.43374240e+00 -6.10012197e+00]] [[-3.46296406e+00 -4.44325638e+00 -2.01058626e+00 1.03525710e+00 5.82993650e+00 -8.42866135e+00 9.16382599e+00 -6.86250389e-01 1.16527023e+01 5.13493836e-01] [ 7.79755545e+00 -6.75236893e+00 -1.07832980e+00 3.03773642e+00 -1.63057232e+00 -1.22347069e+01 8.37784672e+00 -9.08020210e+00 -7.72467661e+00 2.44097185e+00] [ 4.92194928e-02 -2.72349215e+00 -7.08052540e+00 -8.27347183e+00 3.23310351e+00 -2.35402369e+00 6.31677818e+00 2.27559528e+01 1.16973629e+01 -1.43998361e+00] [-3.05909562e+00 3.21166611e+00 -9.28603268e+00 8.41643429e+00 -7.79586256e-01 2.20260873e-01 -8.91732788e+00 -4.49210739e+00 -1.60298288e+00 5.92048311e+00] [-7.17430210e+00 2.25077343e+00 2.25790262e-01 -5.44161844e+00 9.40581608e+00 1.78788052e+01 1.01552601e+01 1.91314030e+00 -4.67258024e+00 -2.70243859e+00] [ 6.11293793e+00 1.18056250e+00 1.14034295e+00 2.40326786e+00 -6.04992008e+00 -1.06612673e+01 5.77351665e+00 3.75601083e-01 3.35074401e+00 7.84267330e+00] [-8.96114540e+00 -9.53475761e+00 -9.05503559e+00 -1.26231074e+00 -6.15830994e+00 1.12118378e+01 -8.82662773e-01 -1.31880369e+01 -5.42669630e+00 3.93914986e+00] [ 1.24837914e+01 1.55383575e+00 2.15344238e+01 8.95941830e+00 3.59276032e+00 -1.42263203e+01 -7.54521465e+00 -5.54864454e+00 2.17791462e+00 -5.46418667e+00] [ 9.80524998e-03 -6.15540171e+00 -4.12104273e+00 -5.12199163e+00 -1.20077305e+01 1.35878477e+01 2.17008352e+00 2.53622866e+00 3.40209937e+00 -2.66177845e+00] [-8.39588583e-01 -4.31593990e+00 -4.15663004e+00 1.29513055e-01 1.84209633e+00 -6.31663799e+00 2.16408420e+00 2.94588971e+00 -9.90767419e-01 4.33295584e+00]] [[-4.10728264e+00 -3.15649271e+00 1.13860953e+00 -2.80240583e+00 7.25810766e+00 3.19509149e+00 -9.81011033e-01 5.79381800e+00 1.43804569e+01 5.96372128e-01] [-4.67842817e+00 8.17242527e+00 -1.68947239e+01 6.46087408e+00 4.80991745e+00 -6.06733370e+00 2.91342115e+00 -3.28748608e+00 -2.44141603e+00 1.36089945e+00] [-1.43860044e+01 -6.78435564e+00 6.23165369e+00 1.08785000e+01 -1.95150352e+00 1.47299595e+01 -2.12834191e+00 1.62483368e+01 -9.20332813e+00 1.02518931e-01] [-5.23458242e+00 1.15542459e+01 1.41810045e+01 -1.29574909e+01 -1.30517893e+01 5.17747307e+00 -1.68000259e+01 -4.51944399e+00 -4.33629274e+00 1.41267657e+00] [-7.19377422e+00 5.04021358e+00 -1.35063601e+01 8.25974655e+00 -3.42597389e+00 -1.25937347e+01 1.08997192e+01 -1.23607521e+01 1.34708042e+01 5.70701170e+00] [-6.50800180e+00 2.08618031e+01 4.00385809e+00 4.46376324e+00 -6.14778137e+00 1.80823636e+00 -1.14477091e+01 1.22849579e+01 7.65037155e+00 -4.06511450e+00] [-1.22687387e+01 -1.69405403e+01 1.96654928e+00 -4.12963033e-01 1.09121132e+01 4.59638447e-01 7.90811014e+00 3.17003894e+00 1.08060894e+01 -1.51731348e+00] [ 8.23079586e+00 6.37346458e+00 5.39221907e+00 -3.72509432e+00 1.82700098e+00 5.72284269e+00 7.92752647e+00 2.52813268e+00 5.82518911e+00 -2.36722612e+00] [-1.01604137e+01 -6.84715211e-01 7.73204565e+00 -1.10986671e+01 1.04709539e+01 -8.56640148e+00 -9.14898872e+00 -1.71167488e+01 1.92317724e-01 2.19322491e+00] [ 9.05955672e-01 -1.71475279e+00 -3.26567078e+00 1.54989777e+01 -7.37672329e+00 5.82074881e-01 4.22819567e+00 3.37838173e+00 4.13311434e+00 4.26741982e+00]] [[ 1.52980959e+00 -3.98838115e+00 3.85022473e+00 5.80231810e+00 -3.94130087e+00 1.37860167e+00 9.02614212e+00 -2.51168180e+00 4.85020590e+00 -7.04019666e-02] [-6.20281577e-01 -1.97177792e+00 3.68507600e+00 3.24183130e+00 -5.34900236e+00 1.17640781e+00 -7.07697678e+00 9.51969814e+00 3.62743998e+00 1.83087075e+00] [-3.54554558e+00 -1.00354633e+01 -2.70188212e+00 6.32020330e+00 -2.49906540e+00 -3.52394199e+00 -8.60784531e+00 -1.11055260e+01 -1.72453737e+00 7.68421710e-01] [-1.50167191e+00 1.16271868e+01 1.22659552e+00 1.21500695e+00 1.99869049e+00 -3.83035493e+00 1.26170177e+01 2.06253862e+00 3.13160467e+00 6.69106293e+00] [ 8.51093411e-01 -9.72360325e+00 -1.02580099e+01 8.75712585e+00 5.78152609e+00 -2.15037036e+00 -2.01795635e+01 -5.40394831e+00 -1.28781853e+01 -4.64521980e+00] [ 1.08946524e+01 6.92591810e+00 5.12022352e+00 -7.94058323e+00 -1.67297900e+00 5.81443596e+00 1.72228603e+01 -1.06001930e+01 -2.21066046e+00 1.86263542e+01] [-2.30786991e+00 -4.15460300e+00 3.48766947e+00 9.98269558e+00 1.69675560e+01 -6.73785925e+00 -4.72793579e+00 6.11959171e+00 -1.73472614e+01 -2.04705191e+00] [-2.81578135e+00 2.41110873e+00 5.77675724e+00 -1.58464489e+01 2.44449329e+00 -1.12101803e+01 -1.31266260e+01 -1.04270277e+01 2.86947072e-01 2.71381903e+00] [ 1.82666624e+00 -1.64604130e+01 -1.20021045e-01 1.48514643e+01 -1.23162537e+01 1.45938101e+01 6.09406471e-01 2.17830110e+00 -1.38642836e+01 -4.42109203e+00] [-1.16251640e+01 -3.10869837e+00 -8.38902760e+00 -2.55778551e+00 -4.59054565e+00 -5.02398491e+00 -2.52206028e-01 -3.26076537e-01 -6.43323517e+00 -3.98776078e+00]] [[ 3.88968468e+00 -3.66778255e+00 2.73108697e+00 2.17690444e+00 1.96257734e+00 -2.90547204e+00 6.24429345e-01 -3.94032049e+00 -1.26469688e+01 -6.51525259e+00] [-8.25980568e+00 1.94400215e+01 -7.16274261e+00 7.12304068e+00 3.51154119e-01 -2.11275387e+00 4.74969530e+00 6.57070160e+00 1.30794230e+01 5.42494392e+00] [-6.10387385e-01 -5.13492727e+00 -5.93592072e+00 -1.47914183e+00 -1.59393504e-01 -2.85905838e+00 -8.14618111e+00 -1.05593605e+01 -1.01790504e+01 -3.85761714e+00] [ 8.68597507e+00 -2.86206388e+00 3.43648648e+00 3.70902508e-01 -1.09379959e+01 1.04233274e+01 -5.20933628e+00 -3.24250913e+00 -1.00764008e+01 -8.40535909e-02] [-5.81645203e+00 -2.93746781e+00 -2.09709024e+00 -4.84982014e+00 -1.10236673e+01 5.92400312e+00 4.00366783e+00 1.09722052e+01 6.00541210e+00 6.86290598e+00] [-1.39774835e+00 4.54841852e+00 -4.13189268e+00 -1.25257158e+01 5.26093435e+00 -3.68897486e+00 -1.87884521e+01 -3.51463699e+00 -4.51144171e+00 -1.29323235e+01] [ 2.04591441e+00 2.35212636e+00 3.95963812e+00 -8.54239941e-01 -6.92203283e+00 -2.66067028e-01 4.79098606e+00 8.59624147e-01 7.65353441e+00 -2.02480674e+00] [ 2.77072787e+00 -1.71790695e+01 -1.94142258e+00 8.63753986e+00 -2.28312612e-01 -1.67525268e+00 -1.89003408e+00 -9.57722902e-01 1.40952091e+01 -1.85222411e+00] [-1.04438686e+00 1.73047600e+01 -4.74796486e+00 8.39894772e+00 -2.41637960e-01 -1.09467564e+01 -3.80557680e+00 6.27045095e-01 -1.05047340e+01 1.76984206e-01] [-2.81629801e+00 -1.10136092e+00 3.50772405e+00 -5.39899588e+00 7.52374983e+00 9.47301006e+00 3.71599102e+00 5.67117333e-01 8.49976730e+00 4.18241405e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1147.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[-8.44024754e+00 7.79150438e+00 -6.93663216e+00 1.72136288e+01 -2.22767258e+00 -1.43812835e+00 4.87048244e+00 -1.88848610e+01] [-5.28153849e+00 -8.15570068e+00 1.52863235e+01 -1.23536444e+01 2.16411552e+01 1.09108763e+01 9.24984455e+00 1.39248097e+00] [ 1.06628504e+01 6.91879809e-01 4.39638472e+00 5.48783445e+00 5.99591589e+00 1.61075840e+01 -1.25651312e+01 -2.94417262e+00] [ 3.17582488e+00 -8.29239368e+00 7.20612240e+00 8.72570097e-01 -6.73039007e+00 -1.33186646e+01 6.77352726e-01 9.62460577e-01] [-3.76954532e+00 1.99154103e+00 -1.70618649e+01 -2.69575834e-01 -6.11102521e-01 -4.94338840e-01 2.99678326e+00 1.06172915e+01] [ 2.39562213e-01 -1.55411243e+01 4.64109325e+00 4.85771847e+00 9.55536604e-01 8.73824120e+00 7.85007000e-01 1.57663841e+01] [-1.75928807e+00 2.42875910e+00 -2.75459290e+00 -5.92540598e+00 4.80091715e+00 7.08653355e+00 7.43257761e-01 -7.25958395e+00] [-1.70708251e+00 -1.59968128e+01 2.33614945e+00 -2.06164074e+01 -3.09226203e+00 1.06786966e+00 -3.54567361e+00 -9.31626987e+00]] [[-1.36802733e+00 -4.53717804e+00 -9.67394733e+00 3.66042233e+00 -1.47046204e+01 2.78106618e+00 -3.17184410e+01 4.13740873e+00] [ 1.36981096e+01 -1.33934317e+01 -3.01955414e+00 -1.09055758e+01 -2.18740630e+00 3.78333068e+00 -4.92193842e+00 4.41041803e+00] [ 7.29041815e+00 7.08095789e+00 -8.80805302e+00 7.70628572e-01 2.17152405e+01 -7.25957584e+00 1.02182484e+00 -1.33549547e+01] [ 1.00510097e+00 -9.68364429e+00 3.54890847e+00 6.14570236e+00 -2.88108677e-01 -6.14315891e+00 2.02072501e+00 -3.34595609e+00] [-4.22660446e+00 -1.47330391e+00 -4.67646313e+00 9.50358677e+00 -7.85740185e+00 5.48737240e+00 1.41718459e+00 3.16318107e+00] [ 5.28692491e-02 -4.96606874e+00 -5.77077448e-01 -3.28017497e+00 -8.14081287e+00 5.82396078e+00 -8.04061985e+00 -6.07457352e+00] [ 5.94243240e+00 -4.72463655e+00 -3.18592906e+00 -1.59504461e+01 -2.27132511e+00 1.25840845e+01 1.44151363e+01 9.68757629e+00] [ 3.40614676e+00 -5.13914108e+00 -3.95646000e+00 1.15514469e+01 -4.85153675e+00 -9.74199295e+00 2.41522408e+00 -6.85832977e+00]] [[-7.14685965e+00 -1.02493989e+00 8.31958950e-01 -1.18211870e+01 2.19206548e+00 7.54600334e+00 2.71391654e+00 -1.02302337e+00] [-2.24334335e+00 1.45554752e+01 -1.65385952e+01 -1.58529463e+01 6.29573154e+00 7.16784120e-01 6.89590263e+00 4.91199303e+00] [ 1.50537891e+01 5.35565281e+00 -1.44491253e+01 6.74807453e+00 -5.82937241e+00 -1.37852454e+00 1.24972095e+01 4.83806992e+00] [ 1.09860725e+01 -1.08244410e+01 9.62722683e+00 -6.19990379e-03 -1.47416115e+00 1.28552735e+00 -1.32067785e+01 1.00776606e+01] [ 1.28271759e+00 3.77328300e+00 -1.19905500e+01 8.37645435e+00 1.83488417e+00 4.21114492e+00 -1.73674703e+00 1.05197277e+01] [-3.97354937e+00 6.64650536e+00 -4.94265509e+00 -2.12711835e+00 -1.56372428e+00 6.43511868e+00 5.74981976e+00 -5.44620705e+00] [-5.64471006e+00 -2.42691731e+00 -3.59102607e+00 9.51060104e+00 -6.47050524e+00 5.11950874e+00 -6.43495512e+00 -3.53499055e+00] [-5.06272912e-02 -1.59618607e+01 8.98046589e+00 -9.82030106e+00 3.68550658e+00 -1.29744816e+01 -1.33248246e+00 9.15126801e+00]] [[-5.53947020e+00 -1.73279381e+00 -1.04925594e+01 7.72806168e+00 1.79684055e+00 8.31044292e+00 1.34888668e+01 -1.61893330e+01] [ 2.05757833e+00 -1.04618635e+01 -8.57266331e+00 -8.13165283e+00 8.39085484e+00 -7.20153213e-01 6.01906157e+00 4.61752802e-01] [ 7.26738405e+00 -9.65502930e+00 7.93346977e+00 -4.71788836e+00 7.64077950e+00 -1.20776606e+01 -1.12081738e+01 -4.83861017e+00] [-2.36490798e+00 -1.54158306e+01 -4.10246229e+00 2.10543442e+01 -1.51372981e+00 1.02695980e+01 -1.28250635e+00 -8.80651772e-01] [-1.32731047e+01 -9.24707699e+00 2.35579872e+00 3.25273085e+00 -9.03011990e+00 1.62436733e+01 8.28508663e+00 -1.16702709e+01] [ 3.12604642e+00 1.59499490e+00 1.05067892e+01 -8.80103397e+00 1.77534029e-01 -1.14284360e+00 -6.43952560e+00 -1.96630096e+01] [ 5.90061331e+00 7.11689901e+00 5.10854578e+00 7.08873987e+00 -1.35345268e+00 -1.58780634e+00 1.46055012e+01 6.11290598e+00] [ 1.55396109e+01 1.42205009e+01 7.45260382e+00 -5.68974555e-01 1.41084547e+01 -1.24411325e+01 1.90081463e+01 2.13586879e+00]] [[ 4.75667095e+00 -2.37604065e+01 -1.08495271e+00 -7.24122906e+00 5.50511456e+00 1.02204618e+01 -5.60636342e-01 -9.99174500e+00] [ 1.05296526e+01 6.97197008e+00 -1.29216642e+01 1.22991192e+00 7.28213167e+00 -1.31273546e+01 3.05145288e+00 -1.15045941e+00] [ 7.40244961e+00 -1.38210077e+01 5.22993946e+00 1.54255781e+01 -5.29692936e+00 -5.03548086e-01 4.40879583e+00 8.28177643e+00] [-3.42618966e+00 -8.96626532e-01 8.91028214e+00 6.07197523e+00 -9.78481579e+00 9.59177780e+00 1.14085732e+01 -3.06531954e+00] [ 2.85834980e+00 4.07288933e+00 -8.35954666e+00 -4.87972069e+00 3.31418586e+00 -3.86676550e+00 3.53560996e+00 -2.32564373e+01] [ 1.49315987e+01 1.57509365e+01 -1.64223251e+01 2.55468011e+00 -1.25766897e+01 2.85728145e+00 -4.18165594e-01 1.02572575e+01] [-1.55152912e+01 3.36634707e+00 -5.54106474e+00 -1.23854084e+01 5.53340054e+00 6.04925442e+00 -8.76165676e+00 5.84122467e+00] [-3.72233224e+00 6.20365477e+00 -1.67053425e+00 1.08139963e+01 -1.86988182e+01 -1.68623047e+01 -1.47086143e+01 -6.37904310e+00]] [[-1.26139889e+01 -1.62426338e+01 -7.06493425e+00 1.25069797e+00 8.55585670e+00 -6.09165001e+00 9.88197231e+00 -4.38901663e+00] [-9.19695663e+00 5.66475439e+00 -4.36289263e+00 1.23856044e+00 9.91406059e+00 2.94840646e+00 -8.70590496e+00 -3.73682308e+00] [-4.02434206e+00 4.08258867e+00 7.41048157e-01 -5.47921133e+00 -8.37452292e-01 -1.14410055e+00 -3.10347033e+00 -2.22443581e+00] [ 1.01557696e+00 1.00441637e+01 -1.51720028e+01 -4.46436977e+00 -3.75815868e+00 5.51056147e+00 5.24208260e+00 -1.12257195e+01] [ 3.10683227e+00 1.91350079e+00 5.76114511e+00 -9.73032570e+00 6.72054827e-01 -5.46067286e+00 1.00568848e+01 -8.08491325e+00] [ 1.00620449e+00 -1.32283115e+01 2.30724392e+01 -6.80369759e+00 2.19225836e+00 1.79790366e+00 1.00907164e+01 -8.20284843e+00] [ 2.55886722e+00 2.94244218e+00 4.14003515e+00 1.71782589e+01 4.28967762e+00 -1.47569408e+01 -3.22423410e+00 2.52706695e+00] [ 1.47966595e+01 -8.01734066e+00 6.92432451e+00 1.79484570e+00 1.44236994e+01 -2.71388340e+00 3.59456062e+00 9.01028061e+00]] [[ 4.29198265e+00 -1.29743643e+01 -6.70474863e+00 -6.25911951e+00 5.17102098e+00 -9.41668093e-01 -1.02427709e+00 -1.05933990e+01] [-1.22661743e+01 -6.18541336e+00 1.42206392e+01 2.05956101e+00 1.25937490e+01 -7.73831177e+00 -1.37046146e+01 7.59144974e+00] [ 2.74430156e+00 5.14970303e+00 -6.68626165e+00 -8.89139557e+00 1.29757290e+01 -7.08722067e+00 1.20238495e+01 -7.05276728e+00] [ 1.01541209e+00 -1.36404300e+00 -1.77759819e+01 -1.07661247e+01 7.80936384e+00 1.86402988e+01 -2.82274747e+00 -4.58393145e+00] [-9.50846863e+00 5.00364862e-02 -4.22982883e+00 3.33936542e-01 3.27865529e+00 5.10824251e+00 5.70552588e+00 8.52701545e-01] [-1.35030622e+01 4.78814983e+00 3.03625679e+00 4.69826460e+00 1.68104160e+00 -9.35047436e+00 1.77642870e+00 5.40871763e+00] [-1.42025077e+00 -4.54701281e+00 -7.59397507e-01 1.18207273e+01 6.11590528e+00 6.57775116e+00 -8.52455902e+00 8.64351749e+00] [ 6.05061817e+00 -2.15294857e+01 7.22794294e-01 4.78647518e+00 -8.78734875e+00 7.65816212e+00 1.19219913e+01 1.71822662e+01]] [[ 4.48441696e+00 2.23217297e+00 2.11357880e+00 -2.83050227e+00 -8.88085175e+00 -7.16090775e+00 6.92268038e+00 1.88338745e+00] [ 8.85155392e+00 2.68961763e+00 -1.79218082e+01 -1.33514255e-01 7.82179773e-01 -1.04627047e+01 -1.26049671e+01 2.07572060e+01] [ 4.06327200e+00 6.89162254e+00 -1.25809126e+01 9.88111401e+00 9.51093864e+00 -5.84874964e+00 -4.49310827e+00 -7.04976988e+00] [-4.98976421e+00 1.02496982e+00 -3.23224950e+00 4.36711836e+00 -1.05180597e+01 -5.94881487e+00 -8.82319927e+00 4.02158171e-01] [ 2.36426950e+00 4.32727909e+00 -5.93343544e+00 -7.40601254e+00 6.30219650e+00 -6.02861071e+00 4.10152578e+00 -4.18783522e+00] [-3.88447428e+00 -5.09719181e+00 -7.94528627e+00 8.41861820e+00 -9.90446091e+00 1.19713271e+00 -7.06286001e+00 2.96694785e-01] [ 4.35611963e+00 1.98465979e+00 6.34326398e-01 4.62984657e+00 3.13517642e+00 -8.00594807e+00 8.36198235e+00 -3.53644514e+00] [-3.32861090e+00 7.44044447e+00 2.79111934e+00 1.55504789e+01 -2.64111090e+00 -4.24212790e+00 8.01610947e+00 -3.87703490e+00]]]]]; ov_res: [[[[[-8.44024658e+00 7.79150248e+00 -6.93663120e+00 1.72136307e+01 -2.22767305e+00 -1.43812811e+00 4.87048197e+00 -1.88848648e+01] [-5.28153992e+00 -8.15570164e+00 1.52863235e+01 -1.23536425e+01 2.16411552e+01 1.09108772e+01 9.24984646e+00 1.39248025e+00] [ 1.06628485e+01 6.91878140e-01 4.39638472e+00 5.48783350e+00 5.99591303e+00 1.61075859e+01 -1.25651331e+01 -2.94417214e+00] [ 3.17582536e+00 -8.29239559e+00 7.20612049e+00 8.72569144e-01 -6.73038960e+00 -1.33186684e+01 6.77351594e-01 9.62460339e-01] [-3.76954675e+00 1.99154055e+00 -1.70618591e+01 -2.69574702e-01 -6.11103475e-01 -4.94339317e-01 2.99678421e+00 1.06172934e+01] [ 2.39563167e-01 -1.55411234e+01 4.64109325e+00 4.85771656e+00 9.55535412e-01 8.73824120e+00 7.85008550e-01 1.57663803e+01] [-1.75928950e+00 2.42875814e+00 -2.75459337e+00 -5.92540550e+00 4.80091619e+00 7.08653450e+00 7.43257940e-01 -7.25958538e+00] [-1.70708013e+00 -1.59968081e+01 2.33615065e+00 -2.06164055e+01 -3.09226131e+00 1.06786871e+00 -3.54567361e+00 -9.31627178e+00]] [[-1.36802876e+00 -4.53717709e+00 -9.67394638e+00 3.66042280e+00 -1.47046194e+01 2.78106666e+00 -3.17184296e+01 4.13740826e+00] [ 1.36981134e+01 -1.33934326e+01 -3.01955462e+00 -1.09055786e+01 -2.18740535e+00 3.78333163e+00 -4.92193890e+00 4.41041756e+00] [ 7.29041767e+00 7.08095694e+00 -8.80805206e+00 7.70626783e-01 2.17152386e+01 -7.25957489e+00 1.02182293e+00 -1.33549528e+01] [ 1.00510204e+00 -9.68364239e+00 3.54890800e+00 6.14570141e+00 -2.88110226e-01 -6.14315939e+00 2.02072477e+00 -3.34595585e+00] [-4.22660589e+00 -1.47330320e+00 -4.67646456e+00 9.50358772e+00 -7.85740185e+00 5.48737288e+00 1.41718519e+00 3.16318154e+00] [ 5.28688915e-02 -4.96606874e+00 -5.77076495e-01 -3.28017545e+00 -8.14081383e+00 5.82395983e+00 -8.04061699e+00 -6.07457209e+00] [ 5.94243050e+00 -4.72463703e+00 -3.18592787e+00 -1.59504480e+01 -2.27132320e+00 1.25840807e+01 1.44151382e+01 9.68757534e+00] [ 3.40614581e+00 -5.13914204e+00 -3.95646191e+00 1.15514469e+01 -4.85153770e+00 -9.74199200e+00 2.41522288e+00 -6.85832977e+00]] [[-7.14685917e+00 -1.02494061e+00 8.31959903e-01 -1.18211870e+01 2.19206476e+00 7.54600382e+00 2.71391606e+00 -1.02302420e+00] [-2.24334311e+00 1.45554743e+01 -1.65385971e+01 -1.58529482e+01 6.29573250e+00 7.16784120e-01 6.89590073e+00 4.91199446e+00] [ 1.50537910e+01 5.35565329e+00 -1.44491272e+01 6.74807215e+00 -5.82937241e+00 -1.37852418e+00 1.24972134e+01 4.83807182e+00] [ 1.09860735e+01 -1.08244429e+01 9.62722683e+00 -6.19942695e-03 -1.47416127e+00 1.28552759e+00 -1.32067785e+01 1.00776596e+01] [ 1.28271782e+00 3.77328086e+00 -1.19905519e+01 8.37645340e+00 1.83488536e+00 4.21114302e+00 -1.73674655e+00 1.05197306e+01] [-3.97354889e+00 6.64650393e+00 -4.94265413e+00 -2.12711787e+00 -1.56372654e+00 6.43512058e+00 5.74982166e+00 -5.44620705e+00] [-5.64470911e+00 -2.42691636e+00 -3.59102559e+00 9.51059914e+00 -6.47050095e+00 5.11950970e+00 -6.43495464e+00 -3.53499055e+00] [-5.06262779e-02 -1.59618587e+01 8.98046589e+00 -9.82030201e+00 3.68550658e+00 -1.29744816e+01 -1.33248389e+00 9.15126991e+00]] [[-5.53946972e+00 -1.73279417e+00 -1.04925585e+01 7.72806215e+00 1.79683816e+00 8.31044388e+00 1.34888687e+01 -1.61893349e+01] [ 2.05758071e+00 -1.04618664e+01 -8.57266808e+00 -8.13165379e+00 8.39085674e+00 -7.20151305e-01 6.01906061e+00 4.61752325e-01] [ 7.26738501e+00 -9.65502930e+00 7.93347168e+00 -4.71788883e+00 7.64077950e+00 -1.20776625e+01 -1.12081747e+01 -4.83861113e+00] [-2.36490870e+00 -1.54158297e+01 -4.10246372e+00 2.10543442e+01 -1.51372886e+00 1.02695980e+01 -1.28250456e+00 -8.80652487e-01] [-1.32731047e+01 -9.24707890e+00 2.35579753e+00 3.25273037e+00 -9.03011799e+00 1.62436733e+01 8.28508663e+00 -1.16702738e+01] [ 3.12604713e+00 1.59499514e+00 1.05067883e+01 -8.80103111e+00 1.77536175e-01 -1.14284384e+00 -6.43952513e+00 -1.96630096e+01] [ 5.90061426e+00 7.11689997e+00 5.10854721e+00 7.08873892e+00 -1.35345292e+00 -1.58780718e+00 1.46055031e+01 6.11290550e+00] [ 1.55396128e+01 1.42205019e+01 7.45260286e+00 -5.68973601e-01 1.41084576e+01 -1.24411335e+01 1.90081482e+01 2.13586879e+00]] [[ 4.75667143e+00 -2.37604065e+01 -1.08495402e+00 -7.24123096e+00 5.50511599e+00 1.02204657e+01 -5.60634613e-01 -9.99174500e+00] [ 1.05296526e+01 6.97197104e+00 -1.29216642e+01 1.22991025e+00 7.28213167e+00 -1.31273565e+01 3.05145311e+00 -1.15045941e+00] [ 7.40245008e+00 -1.38210087e+01 5.22993898e+00 1.54255762e+01 -5.29693079e+00 -5.03548563e-01 4.40879583e+00 8.28177452e+00] [-3.42619014e+00 -8.96627069e-01 8.91028118e+00 6.07197666e+00 -9.78481483e+00 9.59177780e+00 1.14085722e+01 -3.06531811e+00] [ 2.85835123e+00 4.07289028e+00 -8.35954666e+00 -4.87972021e+00 3.31418681e+00 -3.86676502e+00 3.53560948e+00 -2.32564297e+01] [ 1.49315987e+01 1.57509384e+01 -1.64223213e+01 2.55467772e+00 -1.25766888e+01 2.85728145e+00 -4.18165475e-01 1.02572594e+01] [-1.55152912e+01 3.36634445e+00 -5.54106569e+00 -1.23854074e+01 5.53340054e+00 6.04925489e+00 -8.76165676e+00 5.84122467e+00] [-3.72233272e+00 6.20365524e+00 -1.67053592e+00 1.08140001e+01 -1.86988163e+01 -1.68623047e+01 -1.47086105e+01 -6.37904167e+00]] [[-1.26139898e+01 -1.62426338e+01 -7.06493378e+00 1.25069749e+00 8.55585575e+00 -6.09164858e+00 9.88197517e+00 -4.38901711e+00] [-9.19695759e+00 5.66475487e+00 -4.36289310e+00 1.23856091e+00 9.91405964e+00 2.94840646e+00 -8.70590496e+00 -3.73682451e+00] [-4.02434063e+00 4.08258820e+00 7.41049111e-01 -5.47921133e+00 -8.37453008e-01 -1.14409983e+00 -3.10347080e+00 -2.22443962e+00] [ 1.01557767e+00 1.00441628e+01 -1.51719999e+01 -4.46437216e+00 -3.75816035e+00 5.51055861e+00 5.24208164e+00 -1.12257195e+01] [ 3.10683179e+00 1.91349888e+00 5.76114416e+00 -9.73032379e+00 6.72055304e-01 -5.46066999e+00 1.00568838e+01 -8.08491516e+00] [ 1.00620496e+00 -1.32283106e+01 2.30724411e+01 -6.80369759e+00 2.19225860e+00 1.79790378e+00 1.00907164e+01 -8.20284748e+00] [ 2.55886292e+00 2.94244027e+00 4.14003754e+00 1.71782570e+01 4.28967667e+00 -1.47569408e+01 -3.22423363e+00 2.52706838e+00] [ 1.47966633e+01 -8.01733971e+00 6.92432642e+00 1.79484701e+00 1.44236984e+01 -2.71388483e+00 3.59456015e+00 9.01027966e+00]] [[ 4.29198170e+00 -1.29743576e+01 -6.70475531e+00 -6.25911951e+00 5.17102051e+00 -9.41669226e-01 -1.02427661e+00 -1.05934029e+01] [-1.22661762e+01 -6.18541288e+00 1.42206392e+01 2.05955935e+00 1.25937481e+01 -7.73831224e+00 -1.37046165e+01 7.59145069e+00] [ 2.74430108e+00 5.14970255e+00 -6.68626118e+00 -8.89139366e+00 1.29757271e+01 -7.08722258e+00 1.20238476e+01 -7.05276775e+00] [ 1.01541221e+00 -1.36404324e+00 -1.77759781e+01 -1.07661247e+01 7.80936289e+00 1.86402988e+01 -2.82274771e+00 -4.58393335e+00] [-9.50846672e+00 5.00364862e-02 -4.22983027e+00 3.33936065e-01 3.27865553e+00 5.10824537e+00 5.70552492e+00 8.52703094e-01] [-1.35030622e+01 4.78815031e+00 3.03625870e+00 4.69826603e+00 1.68104160e+00 -9.35047340e+00 1.77642965e+00 5.40871954e+00] [-1.42025173e+00 -4.54701138e+00 -7.59396911e-01 1.18207273e+01 6.11590290e+00 6.57775068e+00 -8.52455807e+00 8.64351749e+00] [ 6.05061722e+00 -2.15294876e+01 7.22794294e-01 4.78647375e+00 -8.78734970e+00 7.65816402e+00 1.19219942e+01 1.71822681e+01]] [[ 4.48441648e+00 2.23217225e+00 2.11357689e+00 -2.83050323e+00 -8.88086033e+00 -7.16090727e+00 6.92268133e+00 1.88338697e+00] [ 8.85155487e+00 2.68961763e+00 -1.79218121e+01 -1.33514136e-01 7.82179475e-01 -1.04627028e+01 -1.26049690e+01 2.07572060e+01] [ 4.06327295e+00 6.89162445e+00 -1.25809145e+01 9.88111305e+00 9.51093769e+00 -5.84874964e+00 -4.49310875e+00 -7.04976797e+00] [-4.98976374e+00 1.02496982e+00 -3.23224902e+00 4.36711931e+00 -1.05180635e+01 -5.94881487e+00 -8.82319832e+00 4.02158648e-01] [ 2.36426854e+00 4.32727909e+00 -5.93343449e+00 -7.40601206e+00 6.30219650e+00 -6.02861071e+00 4.10152388e+00 -4.18783426e+00] [-3.88447380e+00 -5.09719133e+00 -7.94528627e+00 8.41861820e+00 -9.90446186e+00 1.19713533e+00 -7.06286001e+00 2.96693355e-01] [ 4.35612011e+00 1.98465908e+00 6.34326398e-01 4.62984514e+00 3.13517618e+00 -8.00594902e+00 8.36198139e+00 -3.53644657e+00] [-3.32861066e+00 7.44044447e+00 2.79111600e+00 1.55504789e+01 -2.64110875e+00 -4.24212885e+00 8.01610851e+00 -3.87703514e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [1, 1, 1], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1149.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.0421 (2,1,1,.,.) = 0.01 * -6.1942 (3,1,1,.,.) = 0.7165 [ CPUFloatType{3,1,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[-5.67054510e-01 1.66725492e+00 3.53841841e-01 ... 2.05133826e-01 -5.76194763e-01 6.77842319e-01] [ 8.45157862e-01 2.51558218e-02 -5.08120716e-01 ... -8.40003014e-01 -1.09869623e+00 -6.03446305e-01] [-1.09269774e+00 4.33052368e-02 9.40864325e-01 ... -5.24173677e-01 -1.60565212e-01 -1.25210798e+00] ... [-7.40131587e-02 -1.60202122e+00 -7.80724704e-01 ... 3.57244968e-01 1.14651036e+00 7.87331641e-01] [ 2.09763741e+00 -4.14793700e-01 -9.76253986e-01 ... -3.93206686e-01 1.87439665e-01 -3.49531502e-01] [-4.60931033e-01 7.57306099e-01 2.80917436e-01 ... -1.67798042e+00 1.96926892e-01 1.34931756e-02]] [[ 1.09656525e+00 -1.57916677e+00 -1.94193006e-01 ... 1.00607908e+00 -1.24594879e+00 8.17276418e-01] [-5.55880487e-01 7.27988005e-01 3.51699263e-01 ... -1.52262318e+00 9.25043225e-01 1.26687574e+00] [-1.23342358e-01 -6.39441490e-01 1.58586824e+00 ... -6.05795562e-01 -1.72957408e+00 4.84928906e-01] ... [ 8.96769941e-01 4.23948258e-01 1.15017450e+00 ... 2.38355851e+00 4.71846789e-01 2.28865832e-01] [ 2.01191735e+00 7.97574580e-01 1.59829402e+00 ... 1.93050790e+00 -1.14092141e-01 -8.52302685e-02] [-6.15698397e-01 -1.68875203e-01 -1.05591014e-01 ... -1.33187532e+00 -2.37148666e+00 -8.72031510e-01]] [[ 2.71534845e-02 -1.33254454e-01 3.08949798e-01 ... 8.40940118e-01 2.08501741e-01 2.80316234e-01] [-2.25450203e-01 1.56171489e+00 4.66001272e-01 ... 3.71308267e-01 -2.86618590e-01 5.76004803e-01] [-3.78205866e-01 -6.25458807e-02 5.13570309e-01 ... -1.13314437e-02 1.21340477e+00 6.17387176e-01] ... [ 2.76130736e-01 1.03276753e+00 -1.53566658e+00 ... 2.05030799e+00 -4.89496514e-02 3.51642519e-01] [ 1.25564680e-01 1.69266391e+00 1.55911148e+00 ... -1.73713529e+00 2.59743422e-01 -4.58468854e-01] [ 1.11799920e+00 1.36338878e+00 4.27268744e-01 ... 2.09069028e-01 -4.02704090e-01 -1.53182530e+00]] ... [[-3.74676496e-01 -3.80522698e-01 6.24187171e-01 ... -1.12155628e+00 -4.04645085e-01 3.80456865e-01] [-7.04502881e-01 5.62071085e-01 9.51108396e-01 ... 1.05589211e+00 -5.36793172e-01 8.65741372e-01] [-1.10621011e+00 1.04762204e-01 6.82884514e-01 ... -1.31203544e+00 5.83753467e-01 1.49174377e-01] ... [ 5.29899523e-02 -2.39304170e-01 -1.43668443e-01 ... -1.41370904e+00 8.25020075e-01 4.64985579e-01] [-7.67332539e-02 -5.32928228e-01 9.42157388e-01 ... -1.10565376e+00 1.68691802e+00 1.34496117e+00] [ 6.59595847e-01 -6.10041022e-01 1.48077801e-01 ... -7.56894231e-01 9.16802958e-02 6.04949355e-01]] [[ 2.12764412e-01 4.02009308e-01 -2.05859661e+00 ... 3.14600539e+00 8.25115323e-01 -3.55636850e-02] [-4.44348037e-01 -4.48568583e-01 7.53235891e-02 ... -1.87772036e-01 1.32732701e+00 -8.43990564e-01] [-4.71080653e-02 2.84762859e-01 -6.81378484e-01 ... 2.73097301e+00 -3.04940432e-01 9.82117534e-01] ... [-6.89329281e-02 -6.42684877e-01 2.38439828e-01 ... -1.92023352e-01 5.40414751e-01 1.75882566e+00] [-2.87378691e-02 -1.58667946e+00 -1.54065281e-01 ... 4.43516523e-01 6.16510332e-01 -5.57661831e-01] [ 9.47441280e-01 1.04888391e+00 -4.44376975e-01 ... -1.17585087e+00 4.81656462e-01 -5.51430702e-01]] [[-6.46949708e-01 -8.48407865e-01 -1.01385677e+00 ... -8.72380972e-01 1.20217693e+00 1.08711493e+00] [-3.61755379e-02 -1.08704293e+00 5.58850408e-01 ... 2.21631661e-01 -1.06117809e+00 -9.56025302e-01] [-2.35492349e-01 8.82690251e-01 1.20826766e-01 ... -1.32780373e-01 3.38424295e-01 1.73194185e-01] ... [ 8.56207609e-01 -8.76559988e-02 -9.94771004e-01 ... -6.86590016e-01 -7.38402233e-02 7.05119222e-02] [-1.50760996e+00 -7.02923059e-01 -1.15353074e-02 ... 1.85051882e+00 2.67022681e+00 1.38784540e+00] [ 5.64165190e-02 1.59209538e+00 1.08693779e+00 ... -1.50952530e+00 -9.36932564e-01 2.83534050e-01]]] [[[ 9.26204473e-02 4.39018123e-02 9.69350897e-03 ... -8.23472962e-02 3.09063680e-02 -3.58093642e-02] [-1.76613387e-02 -8.91328529e-02 -3.31916735e-02 ... 2.43966561e-02 -1.30860331e-02 -1.74376070e-02] [-1.44694513e-02 5.63837923e-02 5.31394146e-02 ... -6.00650124e-02 2.46680174e-02 -4.49287258e-02] ... [ 4.56980914e-02 -2.19727773e-02 -1.55907094e-01 ... 5.15543995e-03 1.27531543e-01 2.95442622e-02] [-9.88590345e-02 -2.92668566e-02 -6.11071587e-02 ... 3.04969940e-02 -2.61630286e-02 -4.96586822e-02] [ 1.16866723e-01 -1.24682086e-02 -1.49820615e-02 ... 1.63661949e-02 5.87341897e-02 -2.37348285e-02]] [[ 3.04368604e-02 -4.54008430e-02 7.56744342e-03 ... -4.27367389e-02 -5.69703430e-02 -5.21216281e-02] [ 1.54584972e-02 3.40328552e-02 9.88873933e-03 ... -7.79129192e-02 8.17836374e-02 -1.10789975e-02] [ 3.92120369e-02 -5.55649176e-02 1.95419509e-02 ... -6.30203187e-02 -1.06425442e-01 5.43106496e-02] ... [-1.06407329e-02 -1.13935232e-01 9.20731798e-02 ... 2.03427505e-02 -4.89456989e-02 -8.54161829e-02] [-3.62904416e-03 8.98571238e-02 8.59702975e-02 ... 9.47000906e-02 -2.96230856e-02 1.62617527e-02] [ 1.77411232e-02 2.22629048e-02 1.99718233e-02 ... -2.69501135e-02 -4.24055569e-02 1.62714161e-03]] [[-7.34069273e-02 3.70027199e-02 -6.90905526e-02 ... -1.31556079e-01 5.11948839e-02 1.06057324e-01] [ 2.44790632e-02 1.82546191e-02 9.72630009e-02 ... -5.42785926e-03 3.62687372e-02 -5.97992912e-02] [-1.03332259e-01 -9.03331675e-03 1.84432026e-02 ... 1.01564027e-01 -4.03734930e-02 -1.00147516e-01] ... [-7.32817873e-02 -5.30639254e-02 -2.80737337e-02 ... -2.14242190e-02 -8.42920095e-02 -9.71069112e-02] [ 5.26091270e-02 7.66774788e-02 -7.55181685e-02 ... -7.51135871e-02 -3.54677700e-02 -2.79208627e-02] [ 2.91688070e-02 -2.68416014e-02 -8.41023459e-04 ... -5.75752333e-02 5.12460172e-02 2.68269014e-02]] ... [[ 5.85023966e-03 -2.78429091e-02 -6.39546383e-03 ... -1.05588265e-01 -3.65795717e-02 -2.05295980e-02] [ 8.20309818e-02 -1.07398540e-01 4.25475389e-02 ... -3.82872485e-02 -3.03792451e-02 2.68110856e-02] [ 8.62625148e-03 4.39170003e-02 5.21728918e-02 ... 3.30536366e-02 -3.35958339e-02 1.83013484e-01] ... [-8.27567931e-03 -1.99754001e-03 -3.49592157e-02 ... -3.32508832e-02 -3.62275429e-02 8.53343382e-02] [-2.60396861e-02 -2.03170143e-02 7.82870948e-02 ... -3.75107303e-02 -2.77751666e-02 -8.22516754e-02] [-3.60392593e-02 1.44464197e-02 -5.19330949e-02 ... -1.04112327e-01 -4.64900024e-02 4.93888594e-02]] [[ 2.21518185e-02 4.25879657e-02 -1.45729317e-03 ... 6.31795898e-02 4.79295813e-02 1.15367062e-01] [-6.30839467e-02 -4.63846363e-02 5.06612733e-02 ... -2.50299722e-02 2.62049828e-02 -1.16316741e-02] [ 5.11862598e-02 -4.16276092e-03 -6.68291524e-02 ... -1.56206667e-01 6.99779950e-03 -4.34687063e-02] ... [ 7.24595459e-03 -1.44748392e-05 -2.05791611e-02 ... -5.38374344e-03 -1.14899520e-02 -2.21791826e-02] [ 2.19593216e-02 2.98097730e-02 -1.57012008e-02 ... 3.46094705e-02 -1.39426962e-01 1.75311100e-02] [-1.29933357e-01 -3.48103978e-02 9.14780647e-02 ... -6.17491789e-02 1.53263986e-01 7.55308867e-02]] [[ 2.70579141e-02 1.84972733e-02 2.92671788e-02 ... 3.46166790e-02 -1.60219949e-02 5.90914376e-02] [ 1.20204389e-02 1.31367698e-01 7.86921456e-02 ... -6.28743768e-02 -1.92204416e-02 1.80452019e-01] [ 8.02319273e-02 -1.90080851e-02 5.15965968e-02 ... 4.11378592e-02 -2.31196787e-02 -5.34031875e-02] ... [-9.68849137e-02 5.12819476e-02 -9.03921947e-02 ... -4.53966632e-02 -1.47180587e-01 -5.19418232e-02] [-3.54292952e-02 3.93831953e-02 -3.49468514e-02 ... -7.69611448e-02 -6.98662028e-02 -5.56697138e-02] [ 4.11413610e-02 6.21868894e-02 -3.79134389e-03 ... -1.40005145e-02 -3.00923474e-02 -2.15224363e-02]]] [[[-5.09906232e-01 3.24482709e-01 -4.96395648e-01 ... -8.55576932e-01 9.03435051e-02 1.05835986e+00] [-5.91136813e-01 9.78366613e-01 -6.00696027e-01 ... 3.85790691e-02 6.13839030e-01 -9.51681256e-01] [ 1.06918907e+00 -1.28244385e-01 -8.69081736e-01 ... 1.68549657e-01 1.62518704e+00 2.27542496e+00] ... [-4.84044760e-01 9.36795324e-02 5.55334032e-01 ... -9.50787142e-02 9.08902943e-01 -9.17360783e-01] [-9.59450364e-01 1.17771733e+00 -9.87978041e-01 ... 2.00022483e+00 -6.90776527e-01 1.12695359e-01] [ 1.48673463e+00 -4.99571294e-01 -6.97557151e-01 ... 2.12124452e-01 1.56808615e+00 6.03240728e-01]] [[-1.19836740e-01 -3.74231547e-01 8.53431821e-02 ... -3.68976355e-01 -1.42613217e-01 -3.10389608e-01] [ 9.34032559e-01 1.14374124e-02 -5.63420653e-01 ... 5.51618077e-02 7.03714252e-01 -3.37016195e-01] [ 1.15265167e+00 3.36745232e-01 -2.23845784e-02 ... -2.95952737e-01 3.67606580e-01 -1.33391544e-01] ... [-5.14995873e-01 -1.19772531e-01 -8.20849895e-01 ... -5.01826823e-01 -8.91154110e-01 -3.88468236e-01] [-3.20680439e-01 4.91143286e-01 -9.00178015e-01 ... 7.83838212e-01 4.20463711e-01 -8.98417771e-01] [-6.66822076e-01 5.97786844e-01 4.65725094e-01 ... 1.22719073e+00 3.18089537e-02 -2.28973895e-01]] [[ 8.72869730e-01 -1.19287670e+00 -2.55629152e-01 ... -1.57384777e+00 -8.76105070e-01 -3.61190945e-01] [ 4.03348356e-01 3.91716838e-01 1.13624656e+00 ... -1.35310936e+00 4.75569993e-01 8.87088656e-01] [-1.18991160e+00 4.23846811e-01 4.36113477e-01 ... 2.26592064e-01 1.17748290e-01 1.28088224e+00] ... [-4.89392370e-01 -1.47736037e+00 -5.44107974e-01 ... -6.84584618e-01 1.11540772e-01 6.52893484e-01] [-8.65231395e-01 4.33031768e-01 1.16319704e+00 ... 3.57743144e-01 3.29577863e-01 -5.43073304e-02] [-9.37508702e-01 -2.07745880e-01 5.61881065e-01 ... 3.81887518e-02 -2.87142247e-01 -1.51390463e-01]] ... [[ 1.14123181e-01 -2.46319205e-01 -1.55265677e+00 ... 3.42430681e-01 3.95431966e-01 9.69800591e-01] [-1.94215322e+00 -1.12721562e-01 -2.36472040e-01 ... 1.66717160e+00 2.24473453e+00 -1.38234329e+00] [-1.02994435e-01 2.38859713e-01 4.06305164e-01 ... 9.80928019e-02 1.98106393e-01 -3.56181078e-02] ... [-6.32701397e-01 1.19990326e-01 8.15798044e-01 ... 4.44966257e-01 1.05677450e+00 -1.11391962e+00] [-1.06760585e+00 1.44253814e+00 -5.36324084e-01 ... -1.91020381e+00 4.81154740e-01 -8.48433822e-02] [ 5.45483708e-01 3.11009943e-01 -7.61495292e-01 ... 1.27579713e+00 -2.03647524e-01 1.15856931e-01]] [[ 1.34081268e+00 6.10053241e-01 -4.99231964e-01 ... 6.29953563e-01 8.90677348e-02 6.07141256e-01] [-4.40355211e-01 6.37777984e-01 -3.04457426e-01 ... 8.12224269e-01 -1.97331071e-01 1.63262725e-01] [ 3.29271168e-01 -1.20051789e+00 -1.22661805e+00 ... -4.36978519e-01 7.60865092e-01 9.01427329e-01] ... [-1.14446533e+00 -1.07149851e+00 -9.83211994e-01 ... 1.19050793e-01 -5.09559870e-01 -3.14907320e-02] [ 1.07927895e+00 7.68926442e-01 2.07738459e-01 ... -1.63461840e+00 -4.08484876e-01 8.27046156e-01] [ 5.92841923e-01 -3.81001562e-01 -2.74420440e-01 ... -1.40082383e+00 -4.32895303e-01 5.54907501e-01]] [[ 4.22058225e-01 1.12125337e+00 -2.45457381e-01 ... 7.09549963e-01 -3.62936229e-01 2.78596967e-01] [ 6.76648796e-01 -4.13078487e-01 4.78447378e-01 ... -1.19384363e-01 4.81120437e-01 7.89009809e-01] [ 3.91960353e-01 -9.43853140e-01 -1.75604418e-01 ... -5.50604582e-01 -4.46180493e-01 1.29593596e-01] ... [-4.25448298e-01 9.80821669e-01 2.84006596e-01 ... 1.27957091e-01 8.73998165e-01 1.33501697e+00] [-7.86302984e-03 -1.88625976e-01 -8.74092638e-01 ... -4.62031186e-01 1.10083614e-02 -1.28256273e+00] [ 6.22669995e-01 1.56275547e+00 -7.17745483e-01 ... 9.03473318e-01 1.01640500e-01 -5.28077900e-01]]]]]; ov_res: [[[[[-5.67054510e-01 1.66725492e+00 3.53841841e-01 ... 2.05133826e-01 -5.76194763e-01 6.77842319e-01] [ 8.45157862e-01 2.51558218e-02 -5.08120716e-01 ... -8.40003014e-01 -1.09869623e+00 -6.03446305e-01] [-1.09269774e+00 4.33052368e-02 9.40864325e-01 ... -5.24173677e-01 -1.60565212e-01 -1.25210798e+00] ... [-7.40131587e-02 -1.60202122e+00 -7.80724704e-01 ... 3.57244968e-01 1.14651036e+00 7.87331641e-01] [ 2.09763741e+00 -4.14793700e-01 -9.76253986e-01 ... -3.93206686e-01 1.87439665e-01 -3.49531502e-01] [-4.60931033e-01 7.57306099e-01 2.80917436e-01 ... -1.67798042e+00 1.96926892e-01 1.34931756e-02]] [[ 1.09656525e+00 -1.57916677e+00 -1.94193006e-01 ... 1.00607908e+00 -1.24594879e+00 8.17276418e-01] [-5.55880487e-01 7.27988005e-01 3.51699263e-01 ... -1.52262318e+00 9.25043225e-01 1.26687574e+00] [-1.23342358e-01 -6.39441490e-01 1.58586824e+00 ... -6.05795562e-01 -1.72957408e+00 4.84928906e-01] ... [ 8.96769941e-01 4.23948258e-01 1.15017450e+00 ... 2.38355851e+00 4.71846789e-01 2.28865832e-01] [ 2.01191735e+00 7.97574580e-01 1.59829402e+00 ... 1.93050790e+00 -1.14092141e-01 -8.52302685e-02] [-6.15698397e-01 -1.68875203e-01 -1.05591014e-01 ... -1.33187532e+00 -2.37148666e+00 -8.72031510e-01]] [[ 2.71534845e-02 -1.33254454e-01 3.08949798e-01 ... 8.40940118e-01 2.08501741e-01 2.80316234e-01] [-2.25450203e-01 1.56171489e+00 4.66001272e-01 ... 3.71308267e-01 -2.86618590e-01 5.76004803e-01] [-3.78205866e-01 -6.25458807e-02 5.13570309e-01 ... -1.13314437e-02 1.21340477e+00 6.17387176e-01] ... [ 2.76130736e-01 1.03276753e+00 -1.53566658e+00 ... 2.05030799e+00 -4.89496514e-02 3.51642519e-01] [ 1.25564680e-01 1.69266391e+00 1.55911148e+00 ... -1.73713529e+00 2.59743422e-01 -4.58468854e-01] [ 1.11799920e+00 1.36338878e+00 4.27268744e-01 ... 2.09069028e-01 -4.02704090e-01 -1.53182530e+00]] ... [[-3.74676496e-01 -3.80522698e-01 6.24187171e-01 ... -1.12155628e+00 -4.04645085e-01 3.80456865e-01] [-7.04502881e-01 5.62071085e-01 9.51108396e-01 ... 1.05589211e+00 -5.36793172e-01 8.65741372e-01] [-1.10621011e+00 1.04762204e-01 6.82884514e-01 ... -1.31203544e+00 5.83753467e-01 1.49174377e-01] ... [ 5.29899523e-02 -2.39304170e-01 -1.43668443e-01 ... -1.41370904e+00 8.25020075e-01 4.64985579e-01] [-7.67332539e-02 -5.32928228e-01 9.42157388e-01 ... -1.10565376e+00 1.68691802e+00 1.34496117e+00] [ 6.59595847e-01 -6.10041022e-01 1.48077801e-01 ... -7.56894231e-01 9.16802958e-02 6.04949355e-01]] [[ 2.12764412e-01 4.02009308e-01 -2.05859661e+00 ... 3.14600539e+00 8.25115323e-01 -3.55636850e-02] [-4.44348037e-01 -4.48568583e-01 7.53235891e-02 ... -1.87772036e-01 1.32732701e+00 -8.43990564e-01] [-4.71080653e-02 2.84762859e-01 -6.81378484e-01 ... 2.73097301e+00 -3.04940432e-01 9.82117534e-01] ... [-6.89329281e-02 -6.42684877e-01 2.38439828e-01 ... -1.92023352e-01 5.40414751e-01 1.75882566e+00] [-2.87378691e-02 -1.58667946e+00 -1.54065281e-01 ... 4.43516523e-01 6.16510332e-01 -5.57661831e-01] [ 9.47441280e-01 1.04888391e+00 -4.44376975e-01 ... -1.17585087e+00 4.81656462e-01 -5.51430702e-01]] [[-6.46949708e-01 -8.48407865e-01 -1.01385677e+00 ... -8.72380972e-01 1.20217693e+00 1.08711493e+00] [-3.61755379e-02 -1.08704293e+00 5.58850408e-01 ... 2.21631661e-01 -1.06117809e+00 -9.56025302e-01] [-2.35492349e-01 8.82690251e-01 1.20826766e-01 ... -1.32780373e-01 3.38424295e-01 1.73194185e-01] ... [ 8.56207609e-01 -8.76559988e-02 -9.94771004e-01 ... -6.86590016e-01 -7.38402233e-02 7.05119222e-02] [-1.50760996e+00 -7.02923059e-01 -1.15353074e-02 ... 1.85051882e+00 2.67022681e+00 1.38784540e+00] [ 5.64165190e-02 1.59209538e+00 1.08693779e+00 ... -1.50952530e+00 -9.36932564e-01 2.83534050e-01]]] [[[ 9.26204473e-02 4.39018123e-02 9.69350897e-03 ... -8.23472962e-02 3.09063680e-02 -3.58093642e-02] [-1.76613387e-02 -8.91328529e-02 -3.31916735e-02 ... 2.43966561e-02 -1.30860331e-02 -1.74376070e-02] [-1.44694513e-02 5.63837923e-02 5.31394146e-02 ... -6.00650124e-02 2.46680174e-02 -4.49287258e-02] ... [ 4.56980914e-02 -2.19727773e-02 -1.55907094e-01 ... 5.15543995e-03 1.27531543e-01 2.95442622e-02] [-9.88590345e-02 -2.92668566e-02 -6.11071587e-02 ... 3.04969940e-02 -2.61630286e-02 -4.96586822e-02] [ 1.16866723e-01 -1.24682086e-02 -1.49820615e-02 ... 1.63661949e-02 5.87341897e-02 -2.37348285e-02]] [[ 3.04368604e-02 -4.54008430e-02 7.56744342e-03 ... -4.27367389e-02 -5.69703430e-02 -5.21216281e-02] [ 1.54584972e-02 3.40328552e-02 9.88873933e-03 ... -7.79129192e-02 8.17836374e-02 -1.10789975e-02] [ 3.92120369e-02 -5.55649176e-02 1.95419509e-02 ... -6.30203187e-02 -1.06425442e-01 5.43106496e-02] ... [-1.06407329e-02 -1.13935232e-01 9.20731798e-02 ... 2.03427505e-02 -4.89456989e-02 -8.54161829e-02] [-3.62904416e-03 8.98571238e-02 8.59702975e-02 ... 9.47000906e-02 -2.96230856e-02 1.62617527e-02] [ 1.77411232e-02 2.22629048e-02 1.99718233e-02 ... -2.69501135e-02 -4.24055569e-02 1.62714161e-03]] [[-7.34069273e-02 3.70027199e-02 -6.90905526e-02 ... -1.31556079e-01 5.11948839e-02 1.06057324e-01] [ 2.44790632e-02 1.82546191e-02 9.72630009e-02 ... -5.42785926e-03 3.62687372e-02 -5.97992912e-02] [-1.03332259e-01 -9.03331675e-03 1.84432026e-02 ... 1.01564027e-01 -4.03734930e-02 -1.00147516e-01] ... [-7.32817873e-02 -5.30639254e-02 -2.80737337e-02 ... -2.14242190e-02 -8.42920095e-02 -9.71069112e-02] [ 5.26091270e-02 7.66774788e-02 -7.55181685e-02 ... -7.51135871e-02 -3.54677700e-02 -2.79208627e-02] [ 2.91688070e-02 -2.68416014e-02 -8.41023459e-04 ... -5.75752333e-02 5.12460172e-02 2.68269014e-02]] ... [[ 5.85023966e-03 -2.78429091e-02 -6.39546383e-03 ... -1.05588265e-01 -3.65795717e-02 -2.05295980e-02] [ 8.20309818e-02 -1.07398540e-01 4.25475389e-02 ... -3.82872485e-02 -3.03792451e-02 2.68110856e-02] [ 8.62625148e-03 4.39170003e-02 5.21728918e-02 ... 3.30536366e-02 -3.35958339e-02 1.83013484e-01] ... [-8.27567931e-03 -1.99754001e-03 -3.49592157e-02 ... -3.32508832e-02 -3.62275429e-02 8.53343382e-02] [-2.60396861e-02 -2.03170143e-02 7.82870948e-02 ... -3.75107303e-02 -2.77751666e-02 -8.22516754e-02] [-3.60392593e-02 1.44464197e-02 -5.19330949e-02 ... -1.04112327e-01 -4.64900024e-02 4.93888594e-02]] [[ 2.21518185e-02 4.25879657e-02 -1.45729317e-03 ... 6.31795898e-02 4.79295813e-02 1.15367062e-01] [-6.30839467e-02 -4.63846363e-02 5.06612733e-02 ... -2.50299722e-02 2.62049828e-02 -1.16316741e-02] [ 5.11862598e-02 -4.16276092e-03 -6.68291524e-02 ... -1.56206667e-01 6.99779950e-03 -4.34687063e-02] ... [ 7.24595459e-03 -1.44748392e-05 -2.05791611e-02 ... -5.38374344e-03 -1.14899520e-02 -2.21791826e-02] [ 2.19593216e-02 2.98097730e-02 -1.57012008e-02 ... 3.46094705e-02 -1.39426962e-01 1.75311100e-02] [-1.29933357e-01 -3.48103978e-02 9.14780647e-02 ... -6.17491789e-02 1.53263986e-01 7.55308867e-02]] [[ 2.70579141e-02 1.84972733e-02 2.92671788e-02 ... 3.46166790e-02 -1.60219949e-02 5.90914376e-02] [ 1.20204389e-02 1.31367698e-01 7.86921456e-02 ... -6.28743768e-02 -1.92204416e-02 1.80452019e-01] [ 8.02319273e-02 -1.90080851e-02 5.15965968e-02 ... 4.11378592e-02 -2.31196787e-02 -5.34031875e-02] ... [-9.68849137e-02 5.12819476e-02 -9.03921947e-02 ... -4.53966632e-02 -1.47180587e-01 -5.19418232e-02] [-3.54292952e-02 3.93831953e-02 -3.49468514e-02 ... -7.69611448e-02 -6.98662028e-02 -5.56697138e-02] [ 4.11413610e-02 6.21868894e-02 -3.79134389e-03 ... -1.40005145e-02 -3.00923474e-02 -2.15224363e-02]]] [[[-5.09906232e-01 3.24482709e-01 -4.96395648e-01 ... -8.55576932e-01 9.03435051e-02 1.05835986e+00] [-5.91136813e-01 9.78366613e-01 -6.00696027e-01 ... 3.85790691e-02 6.13839030e-01 -9.51681256e-01] [ 1.06918907e+00 -1.28244385e-01 -8.69081736e-01 ... 1.68549657e-01 1.62518704e+00 2.27542496e+00] ... [-4.84044760e-01 9.36795324e-02 5.55334032e-01 ... -9.50787142e-02 9.08902943e-01 -9.17360783e-01] [-9.59450364e-01 1.17771733e+00 -9.87978041e-01 ... 2.00022483e+00 -6.90776527e-01 1.12695359e-01] [ 1.48673463e+00 -4.99571294e-01 -6.97557151e-01 ... 2.12124452e-01 1.56808615e+00 6.03240728e-01]] [[-1.19836740e-01 -3.74231547e-01 8.53431821e-02 ... -3.68976355e-01 -1.42613217e-01 -3.10389608e-01] [ 9.34032559e-01 1.14374124e-02 -5.63420653e-01 ... 5.51618077e-02 7.03714252e-01 -3.37016195e-01] [ 1.15265167e+00 3.36745232e-01 -2.23845784e-02 ... -2.95952737e-01 3.67606580e-01 -1.33391544e-01] ... [-5.14995873e-01 -1.19772531e-01 -8.20849895e-01 ... -5.01826823e-01 -8.91154110e-01 -3.88468236e-01] [-3.20680439e-01 4.91143286e-01 -9.00178015e-01 ... 7.83838212e-01 4.20463711e-01 -8.98417771e-01] [-6.66822076e-01 5.97786844e-01 4.65725094e-01 ... 1.22719073e+00 3.18089537e-02 -2.28973895e-01]] [[ 8.72869730e-01 -1.19287670e+00 -2.55629152e-01 ... -1.57384777e+00 -8.76105070e-01 -3.61190945e-01] [ 4.03348356e-01 3.91716838e-01 1.13624656e+00 ... -1.35310936e+00 4.75569993e-01 8.87088656e-01] [-1.18991160e+00 4.23846811e-01 4.36113477e-01 ... 2.26592064e-01 1.17748290e-01 1.28088224e+00] ... [-4.89392370e-01 -1.47736037e+00 -5.44107974e-01 ... -6.84584618e-01 1.11540772e-01 6.52893484e-01] [-8.65231395e-01 4.33031768e-01 1.16319704e+00 ... 3.57743144e-01 3.29577863e-01 -5.43073304e-02] [-9.37508702e-01 -2.07745880e-01 5.61881065e-01 ... 3.81887518e-02 -2.87142247e-01 -1.51390463e-01]] ... [[ 1.14123181e-01 -2.46319205e-01 -1.55265677e+00 ... 3.42430681e-01 3.95431966e-01 9.69800591e-01] [-1.94215322e+00 -1.12721562e-01 -2.36472040e-01 ... 1.66717160e+00 2.24473453e+00 -1.38234329e+00] [-1.02994435e-01 2.38859713e-01 4.06305164e-01 ... 9.80928019e-02 1.98106393e-01 -3.56181078e-02] ... [-6.32701397e-01 1.19990326e-01 8.15798044e-01 ... 4.44966257e-01 1.05677450e+00 -1.11391962e+00] [-1.06760585e+00 1.44253814e+00 -5.36324084e-01 ... -1.91020381e+00 4.81154740e-01 -8.48433822e-02] [ 5.45483708e-01 3.11009943e-01 -7.61495292e-01 ... 1.27579713e+00 -2.03647524e-01 1.15856931e-01]] [[ 1.34081268e+00 6.10053241e-01 -4.99231964e-01 ... 6.29953563e-01 8.90677348e-02 6.07141256e-01] [-4.40355211e-01 6.37777984e-01 -3.04457426e-01 ... 8.12224269e-01 -1.97331071e-01 1.63262725e-01] [ 3.29271168e-01 -1.20051789e+00 -1.22661805e+00 ... -4.36978519e-01 7.60865092e-01 9.01427329e-01] ... [-1.14446533e+00 -1.07149851e+00 -9.83211994e-01 ... 1.19050793e-01 -5.09559870e-01 -3.14907320e-02] [ 1.07927895e+00 7.68926442e-01 2.07738459e-01 ... -1.63461840e+00 -4.08484876e-01 8.27046156e-01] [ 5.92841923e-01 -3.81001562e-01 -2.74420440e-01 ... -1.40082383e+00 -4.32895303e-01 5.54907501e-01]] [[ 4.22058225e-01 1.12125337e+00 -2.45457381e-01 ... 7.09549963e-01 -3.62936229e-01 2.78596967e-01] [ 6.76648796e-01 -4.13078487e-01 4.78447378e-01 ... -1.19384363e-01 4.81120437e-01 7.89009809e-01] [ 3.91960353e-01 -9.43853140e-01 -1.75604418e-01 ... -5.50604582e-01 -4.46180493e-01 1.29593596e-01] ... [-4.25448298e-01 9.80821669e-01 2.84006596e-01 ... 1.27957091e-01 8.73998165e-01 1.33501697e+00] [-7.86302984e-03 -1.88625976e-01 -8.74092638e-01 ... -4.62031186e-01 1.10083614e-02 -1.28256273e+00] [ 6.22669995e-01 1.56275547e+00 -7.17745483e-01 ... 9.03473318e-01 1.01640500e-01 -5.28077900e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1151.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.2252 (2,1,1,.,.) = -0.6414 (3,1,1,.,.) = 0.1335 [ CPUFloatType{3,1,1,1,1} ]]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[-2.46704713e-01 -1.96535081e-01 4.73109484e-02 ... 1.50884539e-01 1.70951471e-01 3.17560673e-01] [ 1.29072234e-01 -2.31247425e-01 -2.84611166e-01 ... -5.30886184e-03 -4.25415099e-01 -4.30237502e-02] [ 6.84540808e-01 1.57357380e-01 2.97120810e-01 ... -4.10280943e-01 -4.62633222e-01 2.97486961e-01] ... [-5.95775060e-02 -3.83219868e-02 -3.99125516e-02 ... 8.39184746e-02 2.46383712e-01 1.59239814e-01] [-1.54392406e-01 -1.52384400e-01 8.92234147e-02 ... 2.38022372e-01 -2.50878364e-01 9.86548439e-02] [-4.10938382e-01 6.25380054e-02 7.27175251e-02 ... -3.14070992e-02 2.98074484e-02 1.46899804e-01]] [[ 2.87449181e-01 1.03734687e-01 3.07450652e-01 ... -1.42829731e-01 9.85330120e-02 3.77659202e-02] [ 1.25987470e-01 -1.38022661e-01 -2.33589590e-01 ... 4.20434266e-01 -1.08184970e-04 -8.51933807e-02] [ 1.71567023e-01 -7.29249865e-02 -1.74193352e-01 ... 1.23859279e-01 -2.51696169e-01 1.78161860e-01] ... [ 6.02137782e-02 3.17439318e-01 6.93125725e-02 ... -6.25989884e-02 -2.04544067e-01 -6.65744841e-02] [-8.56542960e-02 1.65695861e-01 2.42454275e-01 ... -1.14275068e-01 -2.63152458e-02 -3.71975243e-01] [ 3.27841580e-01 7.77809620e-02 1.79853499e-01 ... 1.44506529e-01 2.31000975e-01 -3.76025140e-01]] [[ 2.54513592e-01 9.59704965e-02 2.72997350e-01 ... -8.12880024e-02 3.15659285e-01 1.39203697e-01] [ 2.19522297e-01 -1.18379809e-01 9.10939462e-03 ... 9.43785906e-02 2.56247483e-02 1.28119841e-01] [ 8.50130245e-03 -1.25248641e-01 -1.80721641e-01 ... -9.10853446e-02 4.54123318e-02 -2.95924127e-01] ... [-3.43995392e-01 -3.15359386e-04 -3.08911443e-01 ... 4.42944676e-01 -1.80125892e-01 4.32879180e-01] [-3.37882698e-01 -1.07229307e-01 -1.03323236e-01 ... 6.05027489e-02 -1.77370042e-01 -4.02282834e-01] [-2.27642059e-02 1.28852084e-01 3.98081183e-01 ... 4.96083451e-03 -5.29067330e-02 -1.19652428e-01]] ... [[-7.01365108e-03 -8.47829953e-02 -1.60216719e-01 ... 7.69813478e-01 -8.93694460e-02 -1.18402176e-01] [ 2.64947087e-01 -1.25538697e-02 -3.35608780e-01 ... -4.76637110e-02 3.41247857e-01 -3.54028190e-03] [ 2.45900348e-01 3.39552313e-01 -2.39202306e-02 ... 1.15832694e-01 1.58692449e-01 1.85958087e-01] ... [ 1.58860460e-01 -1.13739036e-01 5.27311638e-02 ... -3.12812418e-01 6.51513338e-02 2.14947656e-01] [-3.50175314e-02 -3.99687022e-01 6.84346035e-02 ... 1.91483870e-01 -3.30359936e-02 -8.27822611e-02] [-1.55220836e-01 4.27620649e-01 -8.02993681e-03 ... -4.72216815e-01 -1.03078336e-01 -1.19645543e-01]] [[ 3.00987273e-01 1.95052534e-01 -1.20815281e-02 ... 1.02000505e-01 -7.10218176e-02 -5.22685908e-02] [-7.94859156e-02 2.58188576e-01 7.59356143e-03 ... 1.65602639e-01 2.37023264e-01 -3.59255403e-01] [-8.79288018e-02 1.11596078e-01 -1.13834869e-02 ... -2.16815889e-01 -1.86975915e-02 -4.57406372e-01] ... [ 2.58776098e-01 -1.04079522e-01 2.90187180e-01 ... -1.87666893e-01 -1.46066695e-01 -1.63544193e-01] [-1.95487425e-01 4.39373791e-01 -1.75077274e-01 ... 9.14861634e-02 -8.22696164e-02 1.70560241e-01] [-1.85589314e-01 2.98880011e-01 -1.54021397e-01 ... 1.99654289e-02 8.90254602e-02 1.15581483e-01]] [[ 2.22437337e-01 3.04081291e-01 -2.65900850e-01 ... -1.62900910e-01 3.01495820e-01 2.99190313e-01] [ 5.44749498e-02 -1.58590049e-01 4.06223536e-01 ... -9.10093039e-02 7.92148933e-02 5.85743971e-02] [-4.23891693e-02 -1.92172647e-01 1.09732032e-01 ... 8.86995159e-03 9.58198681e-02 6.52296841e-02] ... [ 4.06682730e-01 4.56829578e-01 -3.61444622e-01 ... -4.65658717e-02 4.92432415e-01 3.65259916e-01] [-1.97018012e-01 7.73115084e-02 7.35629536e-03 ... 1.38836592e-01 3.12657326e-01 -3.17798048e-01] [-1.94798470e-01 4.43321913e-02 -4.30580378e-01 ... 1.51288122e-01 -1.39935866e-01 -2.37852678e-01]]] [[[ 3.69474590e-01 -2.97069281e-01 3.05191934e-01 ... -3.81325990e-01 -1.22081149e+00 6.25869393e-01] [ 1.54799461e-01 5.76131642e-01 -4.99145001e-01 ... -4.22998339e-01 -1.85102195e-01 4.95722950e-01] [-4.50669080e-01 2.91250855e-01 -8.56696665e-01 ... 7.30334878e-01 -5.78213573e-01 -3.22586507e-01] ... [-8.40389669e-01 -1.83927047e+00 1.58267654e-02 ... 1.22728145e+00 5.27423441e-01 -4.85580504e-01] [-4.29159880e-01 9.02571023e-01 3.18517506e-01 ... 4.34179664e-01 -9.23934281e-02 5.70605338e-01] [ 5.07176578e-01 -1.06193051e-01 3.62791181e-01 ... -6.48393571e-01 -3.98302406e-01 -8.02780390e-02]] [[ 5.47581017e-01 -2.33172804e-01 4.41878349e-01 ... -2.41919920e-01 7.20358491e-01 3.07650626e-01] [ 1.01276129e-01 -1.70481116e-01 -2.71478981e-01 ... 1.52760282e-01 1.93526417e-01 3.73754382e-01] [ 4.26720917e-01 3.27291340e-01 1.18532553e-01 ... 9.90429401e-01 1.46515036e+00 -1.80634224e+00] ... [-2.94679612e-01 5.06576538e-01 7.23053873e-01 ... 9.99636352e-01 7.26782065e-03 8.22471023e-01] [ 1.19015503e+00 -5.44685498e-02 -3.81434202e-01 ... 2.74217486e-01 -7.90142119e-01 6.60667419e-01] [-6.16075337e-01 -1.29314137e+00 -1.42255977e-01 ... -9.79179889e-02 -2.95785606e-01 6.39682174e-01]] [[-3.77840906e-01 -4.47589368e-01 -5.27454853e-01 ... 3.17796737e-01 7.77034909e-02 -4.50267404e-01] [ 6.66082263e-01 -1.67261928e-01 7.82961547e-02 ... 4.61464912e-01 6.83728039e-01 1.07437320e-01] [ 9.68782604e-02 3.91902447e-01 -1.43335998e-01 ... -5.77413201e-01 5.02304018e-01 -6.52699053e-01] ... [ 7.06115127e-01 1.45063743e-01 1.27607870e+00 ... -2.68228590e-01 7.96995997e-01 -6.79278135e-01] [ 4.96309578e-01 7.94296324e-01 -4.81212765e-01 ... 3.12191546e-01 -1.87766135e-01 -8.61564815e-01] [ 3.96934837e-01 -4.39811975e-01 -1.47017574e+00 ... 5.12042046e-01 6.44742310e-01 -1.23267615e+00]] ... [[ 1.90088227e-01 -6.91017628e-01 2.52251536e-01 ... -2.29465142e-02 -4.39877927e-01 5.90913832e-01] [ 2.67097980e-01 1.60761726e+00 3.84705395e-01 ... 1.28427207e+00 2.33835131e-01 2.39167109e-01] [-9.47760344e-01 -1.87239528e-01 -3.39988828e-01 ... 6.57329798e-01 -8.03724527e-01 -7.47469425e-01] ... [ 4.37013954e-01 -3.95900786e-01 -4.20854241e-01 ... 6.69920027e-01 1.85611919e-01 -1.66927218e-01] [-1.41228274e-01 -7.94515789e-01 3.73906612e-01 ... 6.12033606e-01 7.83981264e-01 -6.16791487e-01] [-1.47079915e-01 4.90375282e-03 -2.60182798e-01 ... 3.75001729e-01 8.30591977e-01 -1.54438084e-02]] [[-1.01982975e+00 3.39926146e-02 8.58378351e-01 ... -1.80201709e+00 -1.00095046e+00 5.50662726e-02] [ 2.69189328e-01 1.67290390e-01 -8.84904116e-02 ... -1.71787620e-01 8.51123929e-01 4.52684432e-01] [ 4.34996188e-02 -7.48039842e-01 2.20337585e-01 ... 1.40053466e-01 -1.35613218e-01 1.25774765e+00] ... [ 4.95161355e-01 -1.64783180e+00 5.76686144e-01 ... 4.12908256e-01 1.58615157e-01 -1.58519611e-01] [ 4.85009968e-01 4.60094899e-01 5.85761726e-01 ... 1.65325478e-01 -3.64044219e-01 5.91435969e-01] [ 6.47125363e-01 -4.08077717e-01 -4.20295179e-01 ... 1.16404593e-01 -1.92657053e-01 5.60561717e-01]] [[ 4.42441218e-02 6.98009431e-01 -2.23790437e-01 ... 7.23974705e-01 -5.42401791e-01 -8.49283814e-01] [ 1.30076662e-01 -2.28947774e-01 1.19653344e+00 ... 4.68130350e-01 -1.02609761e-01 -2.24882334e-01] [ 1.20796251e+00 3.20136458e-01 -9.69060436e-02 ... -4.43942428e-01 3.95135850e-01 -2.49306798e-01] ... [ 1.21710114e-01 -1.27669975e-01 -3.30566466e-01 ... -4.97223735e-02 -1.16573310e+00 1.39607334e+00] [ 1.08614659e+00 1.23125005e+00 -2.48282224e-01 ... -6.00890398e-01 -1.07931316e-01 -4.46876168e-01] [-4.79854941e-01 9.47132766e-01 -2.19242737e-01 ... -6.44503385e-02 1.10804379e+00 -2.00861230e-01]]] [[[ 3.45082395e-02 -3.69183086e-02 1.73676964e-02 ... -2.21406490e-01 4.25356068e-02 1.77293923e-02] [ 5.79273067e-02 -1.44007847e-01 8.44415948e-02 ... -1.61650687e-01 -2.03991443e-01 7.88436979e-02] [-7.38777891e-02 2.56350189e-02 5.74963577e-02 ... -9.41728055e-02 9.90354940e-02 1.15672618e-01] ... [-1.92413047e-01 5.08326478e-02 1.42462049e-02 ... -8.79609138e-02 2.16995664e-02 -1.23661742e-01] [ 1.05074205e-01 3.07123214e-02 7.14172376e-03 ... -1.56869605e-01 -1.32825807e-01 -1.34731784e-01] [-1.39743343e-01 8.49216282e-02 1.99758917e-01 ... -5.35614155e-02 -5.21484390e-02 1.57526746e-01]] [[-3.27882543e-02 -2.45713428e-01 1.53233498e-01 ... -1.46166727e-01 1.20507918e-01 -3.92856784e-02] [ 1.71557948e-01 2.59298414e-01 -1.69057027e-02 ... -2.14798287e-01 -2.36572936e-01 1.21680491e-01] [ 5.01501523e-02 1.34975955e-01 6.90761488e-03 ... 1.91196345e-03 2.97885071e-02 5.75762689e-02] ... [-3.11755538e-02 -1.00571275e-01 -1.30938485e-01 ... -6.94015101e-02 -1.01292267e-01 2.37848684e-01] [ 1.73024610e-02 -6.18217811e-02 1.98394269e-01 ... 8.86930525e-02 1.50179684e-01 -1.35578081e-01] [ 1.06383629e-01 7.46548325e-02 1.31430730e-01 ... -5.80767132e-02 4.44453880e-02 -3.29055011e-01]] [[-8.56663287e-02 1.36928737e-01 -1.69255450e-01 ... 5.25081493e-02 -1.10824637e-01 2.58444726e-01] [ 8.85310918e-02 1.14038408e-01 2.79434565e-02 ... 8.22946206e-02 -1.00329228e-01 4.50845920e-02] [-8.13367292e-02 -1.51237443e-01 -1.13353811e-01 ... 2.94083238e-01 -1.50481179e-01 1.67536840e-01] ... [ 2.97218300e-02 -7.02943131e-02 -7.91633204e-02 ... 2.11535990e-01 -6.59232363e-02 3.52209061e-02] [-1.18934475e-01 6.39733896e-02 -3.67716908e-01 ... -2.63189990e-02 -1.21225074e-01 -8.15312862e-02] [ 1.02951251e-01 1.64217338e-01 2.86389768e-01 ... -3.41693461e-02 2.12594748e-01 -1.18313231e-01]] ... [[ 2.39259005e-02 1.38194442e-01 -3.13630030e-02 ... 1.12480335e-01 -1.13449454e-01 -3.30141224e-02] [ 2.69117147e-01 3.11398655e-01 2.28611842e-01 ... -2.80060798e-01 1.05875533e-03 5.21333218e-02] [-4.81631756e-02 1.63266122e-01 -1.10723712e-01 ... 7.20352009e-02 1.54469788e-01 1.74072981e-01] ... [ 1.71076074e-01 4.27484177e-02 -1.17541291e-01 ... -9.29421484e-02 1.06242776e-01 -9.42229778e-02] [ 9.38095748e-02 -8.56623277e-02 8.90652016e-02 ... -9.44202691e-02 7.46174774e-04 1.05795577e-01] [-4.46290933e-02 1.23874001e-01 -6.76745176e-02 ... 7.62136430e-02 4.83550131e-02 1.39701724e-01]] [[ 1.27156511e-01 5.13366945e-02 -4.17757854e-02 ... -4.99247238e-02 5.23906648e-02 1.08710162e-01] [-1.41661301e-01 -5.11699878e-02 -5.61652659e-03 ... -9.53892767e-02 2.27687526e-02 -1.57963470e-01] [ 4.89934012e-02 2.00042590e-01 -7.33989552e-02 ... -3.56478244e-02 -7.28389695e-02 -1.02069555e-02] ... [-8.93342867e-03 7.82107785e-02 -2.70834621e-02 ... 1.45798624e-01 -1.25504076e-01 2.53679365e-01] [ 2.61648536e-01 8.56241584e-02 -1.99692711e-01 ... 1.50604740e-01 -2.92465597e-01 -1.25367239e-01] [-6.54297918e-02 -8.31398275e-03 -7.69480914e-02 ... -4.86008078e-01 9.53746587e-02 2.49329302e-02]] [[ 3.87856811e-02 3.18378896e-01 1.99786857e-01 ... 1.51403397e-01 -1.12528041e-01 1.66843414e-01] [-1.01756066e-01 1.30918249e-01 -5.16722091e-02 ... -3.08889095e-02 -1.16800517e-01 2.16306701e-01] [ 9.08289254e-02 -7.06630796e-02 4.90218736e-02 ... 1.55998513e-01 -1.12401336e-01 -1.68099910e-01] ... [ 8.05206448e-02 1.28351480e-01 -1.76674724e-01 ... 6.55276887e-03 1.55070707e-01 3.79390419e-02] [-6.34055212e-02 -2.34554950e-02 -8.01805500e-03 ... -1.72463998e-01 -2.76296549e-02 -5.15814424e-02] [-3.13689113e-02 -4.98336507e-03 -2.33831033e-01 ... 6.36736295e-05 -7.38699809e-02 3.86312082e-02]]]]]; ov_res: [[[[[-2.46704713e-01 -1.96535081e-01 4.73109484e-02 ... 1.50884539e-01 1.70951471e-01 3.17560673e-01] [ 1.29072234e-01 -2.31247425e-01 -2.84611166e-01 ... -5.30886184e-03 -4.25415099e-01 -4.30237502e-02] [ 6.84540808e-01 1.57357380e-01 2.97120810e-01 ... -4.10280943e-01 -4.62633222e-01 2.97486961e-01] ... [-5.95775060e-02 -3.83219868e-02 -3.99125516e-02 ... 8.39184746e-02 2.46383712e-01 1.59239814e-01] [-1.54392406e-01 -1.52384400e-01 8.92234147e-02 ... 2.38022372e-01 -2.50878364e-01 9.86548439e-02] [-4.10938382e-01 6.25380054e-02 7.27175251e-02 ... -3.14070992e-02 2.98074484e-02 1.46899804e-01]] [[ 2.87449181e-01 1.03734687e-01 3.07450652e-01 ... -1.42829731e-01 9.85330120e-02 3.77659202e-02] [ 1.25987470e-01 -1.38022661e-01 -2.33589590e-01 ... 4.20434266e-01 -1.08184970e-04 -8.51933807e-02] [ 1.71567023e-01 -7.29249865e-02 -1.74193352e-01 ... 1.23859279e-01 -2.51696169e-01 1.78161860e-01] ... [ 6.02137782e-02 3.17439318e-01 6.93125725e-02 ... -6.25989884e-02 -2.04544067e-01 -6.65744841e-02] [-8.56542960e-02 1.65695861e-01 2.42454275e-01 ... -1.14275068e-01 -2.63152458e-02 -3.71975243e-01] [ 3.27841580e-01 7.77809620e-02 1.79853499e-01 ... 1.44506529e-01 2.31000975e-01 -3.76025140e-01]] [[ 2.54513592e-01 9.59704965e-02 2.72997350e-01 ... -8.12880024e-02 3.15659285e-01 1.39203697e-01] [ 2.19522297e-01 -1.18379809e-01 9.10939462e-03 ... 9.43785906e-02 2.56247483e-02 1.28119841e-01] [ 8.50130245e-03 -1.25248641e-01 -1.80721641e-01 ... -9.10853446e-02 4.54123318e-02 -2.95924127e-01] ... [-3.43995392e-01 -3.15359386e-04 -3.08911443e-01 ... 4.42944676e-01 -1.80125892e-01 4.32879180e-01] [-3.37882698e-01 -1.07229307e-01 -1.03323236e-01 ... 6.05027489e-02 -1.77370042e-01 -4.02282834e-01] [-2.27642059e-02 1.28852084e-01 3.98081183e-01 ... 4.96083451e-03 -5.29067330e-02 -1.19652428e-01]] ... [[-7.01365108e-03 -8.47829953e-02 -1.60216719e-01 ... 7.69813478e-01 -8.93694460e-02 -1.18402176e-01] [ 2.64947087e-01 -1.25538697e-02 -3.35608780e-01 ... -4.76637110e-02 3.41247857e-01 -3.54028190e-03] [ 2.45900348e-01 3.39552313e-01 -2.39202306e-02 ... 1.15832694e-01 1.58692449e-01 1.85958087e-01] ... [ 1.58860460e-01 -1.13739036e-01 5.27311638e-02 ... -3.12812418e-01 6.51513338e-02 2.14947656e-01] [-3.50175314e-02 -3.99687022e-01 6.84346035e-02 ... 1.91483870e-01 -3.30359936e-02 -8.27822611e-02] [-1.55220836e-01 4.27620649e-01 -8.02993681e-03 ... -4.72216815e-01 -1.03078336e-01 -1.19645543e-01]] [[ 3.00987273e-01 1.95052534e-01 -1.20815281e-02 ... 1.02000505e-01 -7.10218176e-02 -5.22685908e-02] [-7.94859156e-02 2.58188576e-01 7.59356143e-03 ... 1.65602639e-01 2.37023264e-01 -3.59255403e-01] [-8.79288018e-02 1.11596078e-01 -1.13834869e-02 ... -2.16815889e-01 -1.86975915e-02 -4.57406372e-01] ... [ 2.58776098e-01 -1.04079522e-01 2.90187180e-01 ... -1.87666893e-01 -1.46066695e-01 -1.63544193e-01] [-1.95487425e-01 4.39373791e-01 -1.75077274e-01 ... 9.14861634e-02 -8.22696164e-02 1.70560241e-01] [-1.85589314e-01 2.98880011e-01 -1.54021397e-01 ... 1.99654289e-02 8.90254602e-02 1.15581483e-01]] [[ 2.22437337e-01 3.04081291e-01 -2.65900850e-01 ... -1.62900910e-01 3.01495820e-01 2.99190313e-01] [ 5.44749498e-02 -1.58590049e-01 4.06223536e-01 ... -9.10093039e-02 7.92148933e-02 5.85743971e-02] [-4.23891693e-02 -1.92172647e-01 1.09732032e-01 ... 8.86995159e-03 9.58198681e-02 6.52296841e-02] ... [ 4.06682730e-01 4.56829578e-01 -3.61444622e-01 ... -4.65658717e-02 4.92432415e-01 3.65259916e-01] [-1.97018012e-01 7.73115084e-02 7.35629536e-03 ... 1.38836592e-01 3.12657326e-01 -3.17798048e-01] [-1.94798470e-01 4.43321913e-02 -4.30580378e-01 ... 1.51288122e-01 -1.39935866e-01 -2.37852678e-01]]] [[[ 3.69474590e-01 -2.97069281e-01 3.05191934e-01 ... -3.81325990e-01 -1.22081149e+00 6.25869393e-01] [ 1.54799461e-01 5.76131642e-01 -4.99145001e-01 ... -4.22998339e-01 -1.85102195e-01 4.95722950e-01] [-4.50669080e-01 2.91250855e-01 -8.56696665e-01 ... 7.30334878e-01 -5.78213573e-01 -3.22586507e-01] ... [-8.40389669e-01 -1.83927047e+00 1.58267654e-02 ... 1.22728145e+00 5.27423441e-01 -4.85580504e-01] [-4.29159880e-01 9.02571023e-01 3.18517506e-01 ... 4.34179664e-01 -9.23934281e-02 5.70605338e-01] [ 5.07176578e-01 -1.06193051e-01 3.62791181e-01 ... -6.48393571e-01 -3.98302406e-01 -8.02780390e-02]] [[ 5.47581017e-01 -2.33172804e-01 4.41878349e-01 ... -2.41919920e-01 7.20358491e-01 3.07650626e-01] [ 1.01276129e-01 -1.70481116e-01 -2.71478981e-01 ... 1.52760282e-01 1.93526417e-01 3.73754382e-01] [ 4.26720917e-01 3.27291340e-01 1.18532553e-01 ... 9.90429401e-01 1.46515036e+00 -1.80634224e+00] ... [-2.94679612e-01 5.06576538e-01 7.23053873e-01 ... 9.99636352e-01 7.26782065e-03 8.22471023e-01] [ 1.19015503e+00 -5.44685498e-02 -3.81434202e-01 ... 2.74217486e-01 -7.90142119e-01 6.60667419e-01] [-6.16075337e-01 -1.29314137e+00 -1.42255977e-01 ... -9.79179889e-02 -2.95785606e-01 6.39682174e-01]] [[-3.77840906e-01 -4.47589368e-01 -5.27454853e-01 ... 3.17796737e-01 7.77034909e-02 -4.50267404e-01] [ 6.66082263e-01 -1.67261928e-01 7.82961547e-02 ... 4.61464912e-01 6.83728039e-01 1.07437320e-01] [ 9.68782604e-02 3.91902447e-01 -1.43335998e-01 ... -5.77413201e-01 5.02304018e-01 -6.52699053e-01] ... [ 7.06115127e-01 1.45063743e-01 1.27607870e+00 ... -2.68228590e-01 7.96995997e-01 -6.79278135e-01] [ 4.96309578e-01 7.94296324e-01 -4.81212765e-01 ... 3.12191546e-01 -1.87766135e-01 -8.61564815e-01] [ 3.96934837e-01 -4.39811975e-01 -1.47017574e+00 ... 5.12042046e-01 6.44742310e-01 -1.23267615e+00]] ... [[ 1.90088227e-01 -6.91017628e-01 2.52251536e-01 ... -2.29465142e-02 -4.39877927e-01 5.90913832e-01] [ 2.67097980e-01 1.60761726e+00 3.84705395e-01 ... 1.28427207e+00 2.33835131e-01 2.39167109e-01] [-9.47760344e-01 -1.87239528e-01 -3.39988828e-01 ... 6.57329798e-01 -8.03724527e-01 -7.47469425e-01] ... [ 4.37013954e-01 -3.95900786e-01 -4.20854241e-01 ... 6.69920027e-01 1.85611919e-01 -1.66927218e-01] [-1.41228274e-01 -7.94515789e-01 3.73906612e-01 ... 6.12033606e-01 7.83981264e-01 -6.16791487e-01] [-1.47079915e-01 4.90375282e-03 -2.60182798e-01 ... 3.75001729e-01 8.30591977e-01 -1.54438084e-02]] [[-1.01982975e+00 3.39926146e-02 8.58378351e-01 ... -1.80201709e+00 -1.00095046e+00 5.50662726e-02] [ 2.69189328e-01 1.67290390e-01 -8.84904116e-02 ... -1.71787620e-01 8.51123929e-01 4.52684432e-01] [ 4.34996188e-02 -7.48039842e-01 2.20337585e-01 ... 1.40053466e-01 -1.35613218e-01 1.25774765e+00] ... [ 4.95161355e-01 -1.64783180e+00 5.76686144e-01 ... 4.12908256e-01 1.58615157e-01 -1.58519611e-01] [ 4.85009968e-01 4.60094899e-01 5.85761726e-01 ... 1.65325478e-01 -3.64044219e-01 5.91435969e-01] [ 6.47125363e-01 -4.08077717e-01 -4.20295179e-01 ... 1.16404593e-01 -1.92657053e-01 5.60561717e-01]] [[ 4.42441218e-02 6.98009431e-01 -2.23790437e-01 ... 7.23974705e-01 -5.42401791e-01 -8.49283814e-01] [ 1.30076662e-01 -2.28947774e-01 1.19653344e+00 ... 4.68130350e-01 -1.02609761e-01 -2.24882334e-01] [ 1.20796251e+00 3.20136458e-01 -9.69060436e-02 ... -4.43942428e-01 3.95135850e-01 -2.49306798e-01] ... [ 1.21710114e-01 -1.27669975e-01 -3.30566466e-01 ... -4.97223735e-02 -1.16573310e+00 1.39607334e+00] [ 1.08614659e+00 1.23125005e+00 -2.48282224e-01 ... -6.00890398e-01 -1.07931316e-01 -4.46876168e-01] [-4.79854941e-01 9.47132766e-01 -2.19242737e-01 ... -6.44503385e-02 1.10804379e+00 -2.00861230e-01]]] [[[ 3.45082395e-02 -3.69183086e-02 1.73676964e-02 ... -2.21406490e-01 4.25356068e-02 1.77293923e-02] [ 5.79273067e-02 -1.44007847e-01 8.44415948e-02 ... -1.61650687e-01 -2.03991443e-01 7.88436979e-02] [-7.38777891e-02 2.56350189e-02 5.74963577e-02 ... -9.41728055e-02 9.90354940e-02 1.15672618e-01] ... [-1.92413047e-01 5.08326478e-02 1.42462049e-02 ... -8.79609138e-02 2.16995664e-02 -1.23661742e-01] [ 1.05074205e-01 3.07123214e-02 7.14172376e-03 ... -1.56869605e-01 -1.32825807e-01 -1.34731784e-01] [-1.39743343e-01 8.49216282e-02 1.99758917e-01 ... -5.35614155e-02 -5.21484390e-02 1.57526746e-01]] [[-3.27882543e-02 -2.45713428e-01 1.53233498e-01 ... -1.46166727e-01 1.20507918e-01 -3.92856784e-02] [ 1.71557948e-01 2.59298414e-01 -1.69057027e-02 ... -2.14798287e-01 -2.36572936e-01 1.21680491e-01] [ 5.01501523e-02 1.34975955e-01 6.90761488e-03 ... 1.91196345e-03 2.97885071e-02 5.75762689e-02] ... [-3.11755538e-02 -1.00571275e-01 -1.30938485e-01 ... -6.94015101e-02 -1.01292267e-01 2.37848684e-01] [ 1.73024610e-02 -6.18217811e-02 1.98394269e-01 ... 8.86930525e-02 1.50179684e-01 -1.35578081e-01] [ 1.06383629e-01 7.46548325e-02 1.31430730e-01 ... -5.80767132e-02 4.44453880e-02 -3.29055011e-01]] [[-8.56663287e-02 1.36928737e-01 -1.69255450e-01 ... 5.25081493e-02 -1.10824637e-01 2.58444726e-01] [ 8.85310918e-02 1.14038408e-01 2.79434565e-02 ... 8.22946206e-02 -1.00329228e-01 4.50845920e-02] [-8.13367292e-02 -1.51237443e-01 -1.13353811e-01 ... 2.94083238e-01 -1.50481179e-01 1.67536840e-01] ... [ 2.97218300e-02 -7.02943131e-02 -7.91633204e-02 ... 2.11535990e-01 -6.59232363e-02 3.52209061e-02] [-1.18934475e-01 6.39733896e-02 -3.67716908e-01 ... -2.63189990e-02 -1.21225074e-01 -8.15312862e-02] [ 1.02951251e-01 1.64217338e-01 2.86389768e-01 ... -3.41693461e-02 2.12594748e-01 -1.18313231e-01]] ... [[ 2.39259005e-02 1.38194442e-01 -3.13630030e-02 ... 1.12480335e-01 -1.13449454e-01 -3.30141224e-02] [ 2.69117147e-01 3.11398655e-01 2.28611842e-01 ... -2.80060798e-01 1.05875533e-03 5.21333218e-02] [-4.81631756e-02 1.63266122e-01 -1.10723712e-01 ... 7.20352009e-02 1.54469788e-01 1.74072981e-01] ... [ 1.71076074e-01 4.27484177e-02 -1.17541291e-01 ... -9.29421484e-02 1.06242776e-01 -9.42229778e-02] [ 9.38095748e-02 -8.56623277e-02 8.90652016e-02 ... -9.44202691e-02 7.46174774e-04 1.05795577e-01] [-4.46290933e-02 1.23874001e-01 -6.76745176e-02 ... 7.62136430e-02 4.83550131e-02 1.39701724e-01]] [[ 1.27156511e-01 5.13366945e-02 -4.17757854e-02 ... -4.99247238e-02 5.23906648e-02 1.08710162e-01] [-1.41661301e-01 -5.11699878e-02 -5.61652659e-03 ... -9.53892767e-02 2.27687526e-02 -1.57963470e-01] [ 4.89934012e-02 2.00042590e-01 -7.33989552e-02 ... -3.56478244e-02 -7.28389695e-02 -1.02069555e-02] ... [-8.93342867e-03 7.82107785e-02 -2.70834621e-02 ... 1.45798624e-01 -1.25504076e-01 2.53679365e-01] [ 2.61648536e-01 8.56241584e-02 -1.99692711e-01 ... 1.50604740e-01 -2.92465597e-01 -1.25367239e-01] [-6.54297918e-02 -8.31398275e-03 -7.69480914e-02 ... -4.86008078e-01 9.53746587e-02 2.49329302e-02]] [[ 3.87856811e-02 3.18378896e-01 1.99786857e-01 ... 1.51403397e-01 -1.12528041e-01 1.66843414e-01] [-1.01756066e-01 1.30918249e-01 -5.16722091e-02 ... -3.08889095e-02 -1.16800517e-01 2.16306701e-01] [ 9.08289254e-02 -7.06630796e-02 4.90218736e-02 ... 1.55998513e-01 -1.12401336e-01 -1.68099910e-01] ... [ 8.05206448e-02 1.28351480e-01 -1.76674724e-01 ... 6.55276887e-03 1.55070707e-01 3.79390419e-02] [-6.34055212e-02 -2.34554950e-02 -8.01805500e-03 ... -1.72463998e-01 -2.76296549e-02 -5.15814424e-02] [-3.13689113e-02 -4.98336507e-03 -2.33831033e-01 ... 6.36736295e-05 -7.38699809e-02 3.86312082e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 1], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1153.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 9.441217 -6.3649096 -20.892836 3.9077344 -17.446285 9.139368 -5.6142845 16.693249 ] [-14.437657 10.489196 -10.569782 17.157402 -17.265842 -3.0124397 10.055299 -9.814334 ] [ -4.599207 0.14496425 -0.51596075 -1.4590797 -0.4555822 15.504405 -1.9499485 7.7902517 ] [ 6.130486 0.97939825 -8.048911 1.3319565 7.354209 -21.528442 -0.34420243 -10.173611 ]] [[ 17.20657 -10.444 4.2848063 13.935418 -10.005537 -9.04276 11.1736765 -8.898718 ] [ 14.838109 -7.782191 -13.986053 -8.289464 2.9478693 8.882055 -2.015631 6.055865 ] [ 4.5892587 -6.862891 -4.5376034 -0.29491675 12.515197 -7.1647043 -3.849537 15.156322 ] [ -5.5597115 -9.827706 7.1074 1.082586 -4.291501 0.08946435 -3.1804025 -7.811488 ]] [[ 0.73818374 -7.6590858 3.5700195 -7.167631 -3.4947078 16.477045 2.8657417 8.252123 ] [ 22.76482 5.7975206 12.010617 10.249871 -11.481921 14.266253 3.9627364 -0.08565281] [ 12.288812 -5.7211504 4.4480157 1.8654215 -6.365772 5.3733263 2.5193605 8.50866 ] [-23.065483 -1.4991326 14.434592 -1.6998315 6.6864953 12.045591 -8.962875 -3.7799215 ]] [[ 12.229792 -3.3990467 -9.46248 -13.971669 11.728712 -6.360514 -7.6403294 -2.5830293 ] [ 15.637965 5.9850254 4.2621346 8.216748 7.419461 8.602411 -13.268846 5.813876 ] [ -4.143248 -12.471962 3.344618 0.94788224 -4.757082 -5.9660316 3.030128 0.3377695 ] [ -7.1264253 13.208599 -19.8392 9.3140335 -3.037507 -3.98753 0.6724305 -16.104744 ]]]]]; ov_res: [[[[[ 9.441214 -6.364911 -20.892832 3.907734 -17.446285 9.139368 -5.614285 16.693247 ] [-14.437659 10.489195 -10.569783 17.157406 -17.265848 -3.0124402 10.0553 -9.814333 ] [ -4.59921 0.14496282 -0.51596063 -1.4590796 -0.45558244 15.504406 -1.9499471 7.790252 ] [ 6.130486 0.97939795 -8.048912 1.331955 7.35421 -21.52844 -0.34420314 -10.173611 ]] [[ 17.206573 -10.444006 4.284807 13.935418 -10.005537 -9.042759 11.173679 -8.898717 ] [ 14.838105 -7.78219 -13.986054 -8.289463 2.947867 8.882057 -2.015633 6.0558653 ] [ 4.589259 -6.8628917 -4.5376034 -0.29491842 12.515197 -7.1647024 -3.8495347 15.156321 ] [ -5.5597124 -9.827707 7.107397 1.0825864 -4.2915015 0.08946531 -3.1804037 -7.81149 ]] [[ 0.7381842 -7.6590896 3.5700188 -7.16763 -3.4947083 16.477049 2.8657415 8.252123 ] [ 22.764814 5.7975197 12.010615 10.249872 -11.481918 14.266254 3.9627373 -0.08565305] [ 12.2888155 -5.72115 4.4480143 1.8654209 -6.3657756 5.373326 2.5193605 8.508663 ] [-23.06548 -1.4991331 14.434592 -1.699832 6.686495 12.045592 -8.962874 -3.7799263 ]] [[ 12.229794 -3.3990474 -9.46248 -13.97167 11.728712 -6.3605146 -7.6403284 -2.5830297 ] [ 15.637966 5.985027 4.2621336 8.21675 7.41946 8.602412 -13.268849 5.813879 ] [ -4.143249 -12.471962 3.3446178 0.9478825 -4.757082 -5.9660277 3.0301292 0.33777022] [ -7.126425 13.208596 -19.839201 9.314033 -3.0375066 -3.9875283 0.67243147 -16.104742 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 2], 'pads': 'valid', 'dilations': [1, 1, 1], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1155.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.dilations : int[] = prim::Constant[value=[1, 1, 1]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 7.764699 -10.169013 -16.758419 -8.199017 ] [ -1.4898711 15.235872 0.39937943 4.222118 ] [ 6.545791 4.541597 -19.586924 9.174021 ] [ 3.7885683 -9.039869 12.374664 -6.5330086 ]] [[ -0.88140696 -5.1634407 4.771131 -1.6078807 ] [-13.304615 7.521934 6.3059525 6.84944 ] [ 2.7359834 1.84274 4.215438 -20.957708 ] [ 5.6809373 -1.8989738 7.381242 15.713421 ]] [[-10.238544 0.25839087 -3.9849057 -10.579303 ] [ -3.1438565 1.6084249 2.0749907 2.4729211 ] [-12.109162 2.101958 -9.128576 2.285176 ] [ -6.8310275 2.376358 5.4492917 -6.82901 ]] [[ -4.4524035 -0.1830202 4.6631246 5.770938 ] [ 15.992884 -12.020291 -21.990461 -11.34126 ] [ -0.4017245 1.3645135 6.514224 -12.039926 ] [ -8.22131 -9.292634 1.250231 5.4270616 ]]]]]; ov_res: [[[[[ 7.7646985 -10.169014 -16.758419 -8.1990185 ] [ -1.4898695 15.23587 0.3993795 4.2221174 ] [ 6.5457926 4.5415974 -19.586922 9.174023 ] [ 3.7885685 -9.03987 12.374662 -6.5330067 ]] [[ -0.8814069 -5.1634398 4.771132 -1.6078812 ] [-13.304615 7.5219355 6.3059525 6.8494387 ] [ 2.7359846 1.8427404 4.2154374 -20.957705 ] [ 5.680936 -1.8989745 7.381242 15.713423 ]] [[-10.238545 0.25839454 -3.9849057 -10.579303 ] [ -3.143857 1.6084242 2.074992 2.4729216 ] [-12.10916 2.1019578 -9.128579 2.285177 ] [ -6.831026 2.3763578 5.4492927 -6.8290105 ]] [[ -4.4524045 -0.18302113 4.663123 5.7709355 ] [ 15.992886 -12.020288 -21.990456 -11.34126 ] [ -0.40172523 1.3645152 6.514224 -12.039923 ] [ -8.221307 -9.292638 1.2502314 5.427063 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [1, 3, 3, 3, 3], 'strides': [2, 2, 2], 'pads': 'valid', 'dilations': [2, 2, 2], 'groups': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1157.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=1]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[2, 2, 2]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(1, 3, 3, 3, 3, strides=[81, 27, 9, 3, 1], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %7 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.strides, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%7) fw_re: [[[[[ -3.02748 2.3707242 -4.643452 ] [ 2.2248662 -6.0901947 -7.8929257] [ 2.7234473 -7.57404 7.924538 ]] [[ 4.021915 15.651516 16.26476 ] [ 13.9664955 5.2912335 -1.9495878] [ -2.5919414 3.671333 1.5375599]] [[ -4.651499 -7.5359097 -13.404657 ] [-14.010613 -3.2322605 -3.0067253] [ -5.274795 -7.49153 5.328835 ]]]]]; ov_res: [[[[[ -3.0274816 2.3707268 -4.6434546] [ 2.2248647 -6.0901957 -7.8929257] [ 2.7234478 -7.574039 7.924539 ]] [[ 4.021914 15.651518 16.264755 ] [ 13.966491 5.291232 -1.9495873] [ -2.5919406 3.6713328 1.5375615]] [[ -4.6515017 -7.535912 -13.404659 ] [-14.010613 -3.2322621 -3.0067265] [ -5.2747955 -7.4915314 5.3288355]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'same', 'dilations': [2, 1, 2], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1159.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1, 2]]() %self.pads : str = prim::Constant[value="same"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -1.1168 (2,1,1,.,.) = -1.2204 (3,1,1,.,.) = 0.4406 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[-3.84951085e-01 1.31037903e+00 3.71752828e-01 ... 1.31112349e+00 2.03847814e+00 8.88951719e-01] [ 2.93551588e+00 2.06846714e+00 -1.48806989e+00 ... 8.51069629e-01 2.60630339e-01 1.22420959e-01] [-1.79608107e-01 5.82099557e-01 1.97644866e+00 ... 2.21437603e-01 3.30598235e+00 -4.48469341e-01] ... [-5.95598280e-01 5.65814078e-01 -1.08276784e+00 ... 4.86198366e-02 -4.40731011e-02 -3.06617498e-01] [-4.30311501e-01 2.98131394e+00 8.89783084e-01 ... 5.05368650e-01 1.31299949e+00 -1.87597990e+00] [ 1.80497456e+00 4.72336918e-01 3.13248858e-02 ... -5.92381740e-03 2.31294227e+00 1.94445288e+00]] [[-4.38073277e-01 2.23887712e-01 1.88603520e+00 ... 7.97598362e-01 9.76381958e-01 1.10736735e-01] [-2.62596726e-01 -1.76370561e+00 3.20679158e-01 ... -6.62056327e-01 -1.61682332e+00 9.47218955e-01] [-1.20995152e+00 7.30128229e-01 -4.99089718e-01 ... 1.54018551e-01 1.27460110e+00 -1.93712485e+00] ... [ 2.29811132e-01 1.83216071e+00 -2.83223420e-01 ... -3.76937598e-01 1.51634049e+00 2.27439001e-01] [ 4.10541892e-01 -6.67216539e-01 2.20393634e+00 ... -1.84762549e+00 1.88039470e+00 1.39251983e+00] [ 2.91109711e-01 2.19183564e+00 -1.52072740e+00 ... 9.06951904e-01 -1.96384251e+00 3.38662624e-01]] [[-1.22062337e+00 -8.31309438e-01 -1.41824067e+00 ... 1.35548174e+00 2.42656857e-01 -1.00814176e+00] [ 1.39216468e-01 1.40048131e-01 -1.24867842e-01 ... 2.01554894e+00 1.03176033e+00 8.21429312e-01] [ 4.61346477e-01 -9.39763129e-01 9.72765461e-02 ... -2.27083698e-01 1.97061867e-01 4.42184299e-01] ... [-4.81293350e-02 -3.72194678e-01 -1.31817210e+00 ... 6.30653650e-02 1.72755313e+00 -7.97303438e-01] [-9.14607167e-01 -2.70386600e+00 1.54505625e-01 ... -7.81434715e-01 4.11808759e-01 1.46758401e+00] [ 1.57036138e+00 4.28191811e-01 -6.35740221e-01 ... 1.46154380e+00 -7.48875439e-02 -1.18661320e+00]] ... [[-8.88674974e-01 -3.86543065e-01 2.44041324e-01 ... -2.15562820e+00 1.28508663e+00 -5.84283531e-01] [-2.00423896e-01 -1.13733780e+00 -1.98198915e+00 ... -1.24260736e+00 1.19108462e+00 -9.97947216e-01] [-9.10489082e-01 1.91803467e+00 -2.91829199e-01 ... -8.00868750e-01 8.37299109e-01 1.58837414e+00] ... [ 1.69901574e+00 -1.68969944e-01 6.11143947e-01 ... 9.19941217e-02 2.83203065e-01 3.65658826e-03] [-7.85080791e-02 2.56025165e-01 -3.55445117e-01 ... -1.81008339e+00 -3.96733314e-01 6.63457096e-01] [-3.29933912e-01 -1.10756612e+00 3.28382850e-01 ... 2.62836456e+00 -1.46810740e-01 1.06276497e-01]] [[-1.24797869e+00 1.21062696e+00 1.52410471e+00 ... 1.33512187e+00 1.33170217e-01 -3.95936608e-01] [-4.83122557e-01 7.97110558e-01 1.08012640e+00 ... -4.78753716e-01 1.20776296e+00 -1.61438718e-01] [ 2.09378409e+00 -2.67124116e-01 -4.95852530e-01 ... 2.02820212e-01 1.34746730e+00 6.96327627e-01] ... [ 2.29920268e+00 1.27101350e+00 1.00186217e+00 ... 5.55216102e-03 1.65476871e+00 -1.22976017e+00] [-1.62154162e+00 -2.37487406e-01 -1.25544345e+00 ... 4.71953899e-01 1.56861091e+00 9.53083158e-01] [ 2.13186219e-01 -3.05449694e-01 6.66062951e-01 ... -1.61572731e+00 7.25945473e-01 3.26698542e-01]] [[ 5.65857112e-01 5.63166201e-01 3.50734115e-01 ... 1.46825895e-01 9.43259776e-01 -1.64835203e+00] [-4.29715961e-01 8.38413060e-01 -6.78331435e-01 ... -4.86118883e-01 9.22306404e-02 1.51314008e+00] [ 1.20073891e+00 -1.25431836e+00 6.19057894e-01 ... 6.26779854e-01 -2.28957748e+00 4.85076606e-02] ... [ 5.80575347e-01 1.15337677e-01 -3.64509434e-01 ... 4.60276365e-01 -8.65880609e-01 -1.33088470e+00] [-1.65238559e+00 -7.86012411e-01 -5.14460266e-01 ... 1.21078646e+00 -7.32127845e-01 2.60818809e-01] [-1.54406476e+00 1.14349079e+00 1.47105968e+00 ... 1.42398274e+00 1.26156795e+00 -1.91155136e+00]]] [[[-1.01008868e+00 5.34205914e-01 3.15829702e-02 ... 5.78805625e-01 -5.41879773e-01 -2.48163843e+00] [ 2.10487628e+00 1.47480094e+00 -5.13624072e-01 ... -9.49245512e-01 -1.29455304e+00 -2.10875068e-02] [-8.05356503e-01 -2.50651270e-01 1.52895784e+00 ... -3.58368754e-01 -2.12419242e-01 -7.76742280e-01] ... [-2.33063355e-01 -2.03460515e-01 -8.40188712e-02 ... 7.91646540e-01 -4.03738081e-01 -1.91774523e+00] [ 2.51485562e+00 1.30541131e-01 -7.33498394e-01 ... 6.61121368e-01 9.27581489e-01 -4.56651539e-01] [ 1.26773143e+00 -2.05129528e+00 3.75868529e-01 ... 1.36266783e-01 1.77196905e-01 6.30488396e-01]] [[-3.51066798e-01 -1.88070044e-01 -3.08788657e-01 ... 3.93960685e-01 2.12946400e-01 -2.62168503e+00] [ 7.25099146e-01 2.94247448e-01 8.64191055e-02 ... -2.67131031e-01 6.29494607e-01 1.90723622e+00] [-3.76114339e-01 1.23067284e+00 -8.55430305e-01 ... 7.54285514e-01 3.98361832e-01 1.24161923e+00] ... [ 1.42096448e+00 1.49809897e+00 7.30672598e-01 ... 1.38612199e+00 1.24572933e+00 -4.77077156e-01] [-1.89197052e+00 -1.21015474e-01 1.30086923e+00 ... 1.83918417e+00 -3.44000220e+00 1.95685542e+00] [-1.98575354e+00 -1.05351850e-01 -8.92683566e-01 ... -8.01226869e-02 -1.00169432e+00 2.18674994e+00]] [[ 1.90913987e+00 -7.14059711e-01 9.44590449e-01 ... -3.32675576e-01 -6.12960272e-02 -2.97377419e+00] [-3.57410163e-01 9.08683181e-01 -2.45726871e+00 ... 4.67057705e-01 2.17985106e+00 -7.65608847e-01] [-2.06961417e+00 1.71206862e-01 7.61261106e-01 ... -2.84069920e+00 7.38036156e-01 -7.89736032e-01] ... [ 3.12799406e+00 -5.34116924e-01 -1.19290352e+00 ... 3.38583857e-01 1.15992069e+00 -3.97410952e-02] [ 7.69355800e-03 -1.11523330e+00 -1.93790519e+00 ... -1.74629843e+00 -2.12861881e-01 -1.76903689e+00] [-8.25033247e-01 3.12811065e+00 -1.09406340e+00 ... -6.30747736e-01 -6.77171409e-01 2.11172128e+00]] ... [[ 4.30879146e-02 -1.62445724e+00 -8.84068906e-01 ... -4.51506287e-01 -2.58172894e+00 -2.15669680e+00] [-1.59029174e+00 1.39879966e+00 6.47406399e-01 ... 1.00927413e+00 -1.40856218e+00 1.02577507e+00] [-4.72374976e-01 -5.84760010e-01 3.17985028e-01 ... -1.75637171e-01 1.60470679e-01 9.82248187e-01] ... [ 1.05935287e+00 1.17563212e+00 7.77003348e-01 ... -1.35285759e+00 -2.02789450e+00 -7.43995547e-01] [-1.04238188e+00 1.25701320e+00 -3.89163047e-02 ... -9.95312870e-01 -8.56258810e-01 3.94717097e-01] [-7.74845421e-01 1.17459573e-01 -6.41575933e-01 ... -5.11250973e-01 1.13019288e+00 -2.68258715e+00]] [[-3.45122457e-01 -3.50075692e-01 -1.16158020e+00 ... 9.71835077e-01 -1.15678763e+00 1.78643942e+00] [ 1.95609480e-01 -5.09522617e-01 -1.00600056e-01 ... 8.72210979e-01 -1.45435011e+00 -1.11202884e+00] [-6.76917911e-01 -1.45665586e+00 1.03502311e-01 ... -5.61857283e-01 -1.24824703e+00 -6.87798560e-01] ... [ 2.14574128e-01 -7.74028003e-02 -1.47482574e-01 ... 1.34035900e-01 -7.15570271e-01 7.22111702e-01] [-3.48824978e-01 2.70713925e-01 6.95411682e-01 ... 5.22587895e-01 -1.46950161e+00 -4.39177424e-01] [ 1.04016848e-01 -1.55917239e+00 1.48266423e+00 ... 1.34857130e+00 4.76485677e-02 2.64640403e+00]] [[-5.35438716e-01 9.98370767e-01 -7.22637951e-01 ... -1.77571669e-01 -4.44850147e-01 -2.67687410e-01] [-1.91453183e+00 9.26172972e-01 -1.86514884e-01 ... -2.65766382e+00 -5.65763414e-01 5.45089424e-01] [-1.03747261e+00 -2.14226451e-03 -2.23748851e+00 ... 1.91105455e-01 9.15764153e-01 5.49177349e-01] ... [-1.52511227e+00 2.44124150e+00 -2.35298023e-01 ... 4.93391335e-01 8.06693554e-01 1.22221880e-01] [ 1.05078566e+00 1.36943507e+00 1.02816582e+00 ... 6.70631826e-01 -8.52083981e-01 2.23632288e+00] [ 7.45021224e-01 1.62992299e+00 1.06560278e+00 ... -1.72211421e+00 4.74356294e-01 -1.22080362e+00]]] [[[-6.46400154e-02 2.68644989e-01 2.98506111e-01 ... 1.28696293e-01 -3.31295818e-01 5.56007922e-01] [ 5.04629612e-01 -3.75459120e-02 -3.52284266e-03 ... 1.35623842e-01 7.98686221e-02 3.60285014e-01] [-1.76014990e-01 1.77318498e-01 2.50775158e-01 ... -2.77833045e-01 8.03268030e-02 3.82889420e-01] ... [ 6.29429936e-01 -3.92995298e-01 1.72948427e-02 ... -5.58272064e-01 -6.46739423e-01 3.25034499e-01] [-5.60918570e-01 6.58063591e-01 -4.08434689e-01 ... 4.83858943e-01 -1.31135046e-01 -9.59321380e-01] [ 2.14169398e-01 -5.83281755e-01 -3.15178990e-01 ... -6.00047469e-01 -1.56473458e-01 4.40917373e-01]] [[ 4.43757772e-01 -6.12981856e-01 -6.43111765e-01 ... -1.17067762e-01 -6.69668838e-02 -1.02253869e-01] [-1.52709829e-02 9.02773619e-01 4.59690168e-02 ... 2.22380355e-01 -3.24391097e-01 1.64414555e-01] [ 8.53419676e-02 -1.56042874e-01 -2.50323832e-01 ... 4.98160452e-01 -5.78057766e-01 2.44881555e-01] ... [-1.00574601e+00 -2.23731399e-01 2.01691762e-01 ... -9.25499797e-01 -2.67861307e-01 -8.96107629e-02] [-5.46715856e-01 8.33218619e-02 -1.00149596e+00 ... -3.58497143e-01 6.35863781e-01 3.24958920e-01] [-6.68790564e-02 -5.47582991e-02 -7.88803279e-01 ... 3.41153890e-01 3.20952594e-01 -4.61796075e-01]] [[-1.57451674e-01 -2.22720012e-01 4.25795585e-01 ... 1.81324497e-01 4.19837326e-01 -7.38606825e-02] [ 9.37343478e-01 9.04352143e-02 3.21132131e-02 ... 1.67383492e-01 -2.35854387e-02 -3.55056643e-01] [-4.52619612e-01 -5.57876378e-03 2.43030086e-01 ... -4.02402192e-01 -2.08543062e-01 -2.75803715e-01] ... [-5.13554066e-02 3.16458315e-01 1.41364736e-02 ... 7.01998889e-01 2.88150728e-01 5.37366867e-01] [ 1.20108292e-01 -3.71339113e-01 1.20004860e-03 ... 3.59174341e-01 5.13715930e-02 3.40771168e-01] [-1.64637819e-01 -1.87056884e-01 1.72440732e+00 ... -2.23191887e-01 1.30480099e+00 -2.23077133e-01]] ... [[ 2.05985770e-01 -2.14803815e-01 3.11886638e-01 ... 7.42260873e-01 9.97137368e-01 7.02999771e-01] [ 1.07820824e-01 5.18326998e-01 -2.00605154e-01 ... 5.05866647e-01 -2.83812165e-01 1.09202251e-01] [-1.93445489e-01 4.49769795e-01 1.25742570e-01 ... -4.81263489e-01 6.61174357e-01 -2.40307525e-01] ... [-3.45847785e-01 1.09575927e+00 8.99151385e-01 ... 3.25661272e-01 5.05668700e-01 4.86238062e-01] [ 3.23149234e-01 -7.78177202e-01 -2.79881597e-01 ... -8.54847789e-01 -6.36243299e-02 -4.02229041e-01] [-8.16188604e-02 -3.29868108e-01 3.91771197e-01 ... 9.29099992e-02 -1.96680948e-01 1.04524064e+00]] [[-6.15572110e-02 2.79420018e-01 6.39025122e-03 ... -1.73439234e-01 -8.67391169e-01 -1.05030572e+00] [ 4.28751677e-01 -8.76858711e-01 2.30310053e-01 ... 2.96618879e-01 -4.89085943e-01 -5.36453366e-01] [-4.37910482e-02 3.75640720e-01 -1.83537170e-01 ... -1.98200941e-01 3.36875081e-01 5.93988001e-02] ... [-7.46643484e-01 -4.51564014e-01 -4.46205676e-01 ... 6.06052697e-01 6.42651543e-02 5.38035214e-01] [ 3.98053557e-01 1.90652832e-01 6.51695073e-01 ... 6.47852989e-03 -6.01110280e-01 4.15999115e-01] [-6.78288937e-01 -1.49808626e-03 9.70563769e-01 ... 7.23964155e-01 -4.97753434e-02 7.62754083e-01]] [[-2.01182395e-01 -2.83605933e-01 -4.31659877e-01 ... 4.13079083e-01 -1.04438886e-01 7.04295278e-01] [-5.08213900e-02 2.82909811e-01 2.97170818e-01 ... -2.58765042e-01 4.65918928e-01 2.67460346e-01] [-3.43032748e-01 3.64173025e-01 6.80719733e-01 ... -4.15156186e-01 -2.03045279e-01 -9.49730933e-01] ... [ 3.48472714e-01 -5.23427069e-01 5.56843460e-01 ... 4.72002834e-01 2.06264313e-02 -3.67733866e-01] [ 7.03338757e-02 3.22734922e-01 4.15338606e-01 ... -1.93835244e-01 1.09195411e-01 -1.84333995e-01] [ 6.90778732e-01 -3.86482626e-01 2.82713830e-01 ... -4.74094778e-01 2.40333125e-01 -1.06611408e-01]]]]]; ov_res: [[[[[-3.84951085e-01 1.31037903e+00 3.71752828e-01 ... 1.31112349e+00 2.03847814e+00 8.88951719e-01] [ 2.93551588e+00 2.06846714e+00 -1.48806989e+00 ... 8.51069629e-01 2.60630339e-01 1.22420959e-01] [-1.79608107e-01 5.82099557e-01 1.97644866e+00 ... 2.21437603e-01 3.30598235e+00 -4.48469341e-01] ... [-5.95598280e-01 5.65814078e-01 -1.08276784e+00 ... 4.86198366e-02 -4.40731011e-02 -3.06617498e-01] [-4.30311501e-01 2.98131394e+00 8.89783084e-01 ... 5.05368650e-01 1.31299949e+00 -1.87597990e+00] [ 1.80497456e+00 4.72336918e-01 3.13248858e-02 ... -5.92381740e-03 2.31294227e+00 1.94445288e+00]] [[-4.38073277e-01 2.23887712e-01 1.88603520e+00 ... 7.97598362e-01 9.76381958e-01 1.10736735e-01] [-2.62596726e-01 -1.76370561e+00 3.20679158e-01 ... -6.62056327e-01 -1.61682332e+00 9.47218955e-01] [-1.20995152e+00 7.30128229e-01 -4.99089718e-01 ... 1.54018551e-01 1.27460110e+00 -1.93712485e+00] ... [ 2.29811132e-01 1.83216071e+00 -2.83223420e-01 ... -3.76937598e-01 1.51634049e+00 2.27439001e-01] [ 4.10541892e-01 -6.67216539e-01 2.20393634e+00 ... -1.84762549e+00 1.88039470e+00 1.39251983e+00] [ 2.91109711e-01 2.19183564e+00 -1.52072740e+00 ... 9.06951904e-01 -1.96384251e+00 3.38662624e-01]] [[-1.22062337e+00 -8.31309438e-01 -1.41824067e+00 ... 1.35548174e+00 2.42656857e-01 -1.00814176e+00] [ 1.39216468e-01 1.40048131e-01 -1.24867842e-01 ... 2.01554894e+00 1.03176033e+00 8.21429312e-01] [ 4.61346477e-01 -9.39763129e-01 9.72765461e-02 ... -2.27083698e-01 1.97061867e-01 4.42184299e-01] ... [-4.81293350e-02 -3.72194678e-01 -1.31817210e+00 ... 6.30653650e-02 1.72755313e+00 -7.97303438e-01] [-9.14607167e-01 -2.70386600e+00 1.54505625e-01 ... -7.81434715e-01 4.11808759e-01 1.46758401e+00] [ 1.57036138e+00 4.28191811e-01 -6.35740221e-01 ... 1.46154380e+00 -7.48875439e-02 -1.18661320e+00]] ... [[-8.88674974e-01 -3.86543065e-01 2.44041324e-01 ... -2.15562820e+00 1.28508663e+00 -5.84283531e-01] [-2.00423896e-01 -1.13733780e+00 -1.98198915e+00 ... -1.24260736e+00 1.19108462e+00 -9.97947216e-01] [-9.10489082e-01 1.91803467e+00 -2.91829199e-01 ... -8.00868750e-01 8.37299109e-01 1.58837414e+00] ... [ 1.69901574e+00 -1.68969944e-01 6.11143947e-01 ... 9.19941217e-02 2.83203065e-01 3.65658826e-03] [-7.85080791e-02 2.56025165e-01 -3.55445117e-01 ... -1.81008339e+00 -3.96733314e-01 6.63457096e-01] [-3.29933912e-01 -1.10756612e+00 3.28382850e-01 ... 2.62836456e+00 -1.46810740e-01 1.06276497e-01]] [[-1.24797869e+00 1.21062696e+00 1.52410471e+00 ... 1.33512187e+00 1.33170217e-01 -3.95936608e-01] [-4.83122557e-01 7.97110558e-01 1.08012640e+00 ... -4.78753716e-01 1.20776296e+00 -1.61438718e-01] [ 2.09378409e+00 -2.67124116e-01 -4.95852530e-01 ... 2.02820212e-01 1.34746730e+00 6.96327627e-01] ... [ 2.29920268e+00 1.27101350e+00 1.00186217e+00 ... 5.55216102e-03 1.65476871e+00 -1.22976017e+00] [-1.62154162e+00 -2.37487406e-01 -1.25544345e+00 ... 4.71953899e-01 1.56861091e+00 9.53083158e-01] [ 2.13186219e-01 -3.05449694e-01 6.66062951e-01 ... -1.61572731e+00 7.25945473e-01 3.26698542e-01]] [[ 5.65857112e-01 5.63166201e-01 3.50734115e-01 ... 1.46825895e-01 9.43259776e-01 -1.64835203e+00] [-4.29715961e-01 8.38413060e-01 -6.78331435e-01 ... -4.86118883e-01 9.22306404e-02 1.51314008e+00] [ 1.20073891e+00 -1.25431836e+00 6.19057894e-01 ... 6.26779854e-01 -2.28957748e+00 4.85076606e-02] ... [ 5.80575347e-01 1.15337677e-01 -3.64509434e-01 ... 4.60276365e-01 -8.65880609e-01 -1.33088470e+00] [-1.65238559e+00 -7.86012411e-01 -5.14460266e-01 ... 1.21078646e+00 -7.32127845e-01 2.60818809e-01] [-1.54406476e+00 1.14349079e+00 1.47105968e+00 ... 1.42398274e+00 1.26156795e+00 -1.91155136e+00]]] [[[-1.01008868e+00 5.34205914e-01 3.15829702e-02 ... 5.78805625e-01 -5.41879773e-01 -2.48163843e+00] [ 2.10487628e+00 1.47480094e+00 -5.13624072e-01 ... -9.49245512e-01 -1.29455304e+00 -2.10875068e-02] [-8.05356503e-01 -2.50651270e-01 1.52895784e+00 ... -3.58368754e-01 -2.12419242e-01 -7.76742280e-01] ... [-2.33063355e-01 -2.03460515e-01 -8.40188712e-02 ... 7.91646540e-01 -4.03738081e-01 -1.91774523e+00] [ 2.51485562e+00 1.30541131e-01 -7.33498394e-01 ... 6.61121368e-01 9.27581489e-01 -4.56651539e-01] [ 1.26773143e+00 -2.05129528e+00 3.75868529e-01 ... 1.36266783e-01 1.77196905e-01 6.30488396e-01]] [[-3.51066798e-01 -1.88070044e-01 -3.08788657e-01 ... 3.93960685e-01 2.12946400e-01 -2.62168503e+00] [ 7.25099146e-01 2.94247448e-01 8.64191055e-02 ... -2.67131031e-01 6.29494607e-01 1.90723622e+00] [-3.76114339e-01 1.23067284e+00 -8.55430305e-01 ... 7.54285514e-01 3.98361832e-01 1.24161923e+00] ... [ 1.42096448e+00 1.49809897e+00 7.30672598e-01 ... 1.38612199e+00 1.24572933e+00 -4.77077156e-01] [-1.89197052e+00 -1.21015474e-01 1.30086923e+00 ... 1.83918417e+00 -3.44000220e+00 1.95685542e+00] [-1.98575354e+00 -1.05351850e-01 -8.92683566e-01 ... -8.01226869e-02 -1.00169432e+00 2.18674994e+00]] [[ 1.90913987e+00 -7.14059711e-01 9.44590449e-01 ... -3.32675576e-01 -6.12960272e-02 -2.97377419e+00] [-3.57410163e-01 9.08683181e-01 -2.45726871e+00 ... 4.67057705e-01 2.17985106e+00 -7.65608847e-01] [-2.06961417e+00 1.71206862e-01 7.61261106e-01 ... -2.84069920e+00 7.38036156e-01 -7.89736032e-01] ... [ 3.12799406e+00 -5.34116924e-01 -1.19290352e+00 ... 3.38583857e-01 1.15992069e+00 -3.97410952e-02] [ 7.69355800e-03 -1.11523330e+00 -1.93790519e+00 ... -1.74629843e+00 -2.12861881e-01 -1.76903689e+00] [-8.25033247e-01 3.12811065e+00 -1.09406340e+00 ... -6.30747736e-01 -6.77171409e-01 2.11172128e+00]] ... [[ 4.30879146e-02 -1.62445724e+00 -8.84068906e-01 ... -4.51506287e-01 -2.58172894e+00 -2.15669680e+00] [-1.59029174e+00 1.39879966e+00 6.47406399e-01 ... 1.00927413e+00 -1.40856218e+00 1.02577507e+00] [-4.72374976e-01 -5.84760010e-01 3.17985028e-01 ... -1.75637171e-01 1.60470679e-01 9.82248187e-01] ... [ 1.05935287e+00 1.17563212e+00 7.77003348e-01 ... -1.35285759e+00 -2.02789450e+00 -7.43995547e-01] [-1.04238188e+00 1.25701320e+00 -3.89163047e-02 ... -9.95312870e-01 -8.56258810e-01 3.94717097e-01] [-7.74845421e-01 1.17459573e-01 -6.41575933e-01 ... -5.11250973e-01 1.13019288e+00 -2.68258715e+00]] [[-3.45122457e-01 -3.50075692e-01 -1.16158020e+00 ... 9.71835077e-01 -1.15678763e+00 1.78643942e+00] [ 1.95609480e-01 -5.09522617e-01 -1.00600056e-01 ... 8.72210979e-01 -1.45435011e+00 -1.11202884e+00] [-6.76917911e-01 -1.45665586e+00 1.03502311e-01 ... -5.61857283e-01 -1.24824703e+00 -6.87798560e-01] ... [ 2.14574128e-01 -7.74028003e-02 -1.47482574e-01 ... 1.34035900e-01 -7.15570271e-01 7.22111702e-01] [-3.48824978e-01 2.70713925e-01 6.95411682e-01 ... 5.22587895e-01 -1.46950161e+00 -4.39177424e-01] [ 1.04016848e-01 -1.55917239e+00 1.48266423e+00 ... 1.34857130e+00 4.76485677e-02 2.64640403e+00]] [[-5.35438716e-01 9.98370767e-01 -7.22637951e-01 ... -1.77571669e-01 -4.44850147e-01 -2.67687410e-01] [-1.91453183e+00 9.26172972e-01 -1.86514884e-01 ... -2.65766382e+00 -5.65763414e-01 5.45089424e-01] [-1.03747261e+00 -2.14226451e-03 -2.23748851e+00 ... 1.91105455e-01 9.15764153e-01 5.49177349e-01] ... [-1.52511227e+00 2.44124150e+00 -2.35298023e-01 ... 4.93391335e-01 8.06693554e-01 1.22221880e-01] [ 1.05078566e+00 1.36943507e+00 1.02816582e+00 ... 6.70631826e-01 -8.52083981e-01 2.23632288e+00] [ 7.45021224e-01 1.62992299e+00 1.06560278e+00 ... -1.72211421e+00 4.74356294e-01 -1.22080362e+00]]] [[[-6.46400154e-02 2.68644989e-01 2.98506111e-01 ... 1.28696293e-01 -3.31295818e-01 5.56007922e-01] [ 5.04629612e-01 -3.75459120e-02 -3.52284266e-03 ... 1.35623842e-01 7.98686221e-02 3.60285014e-01] [-1.76014990e-01 1.77318498e-01 2.50775158e-01 ... -2.77833045e-01 8.03268030e-02 3.82889420e-01] ... [ 6.29429936e-01 -3.92995298e-01 1.72948427e-02 ... -5.58272064e-01 -6.46739423e-01 3.25034499e-01] [-5.60918570e-01 6.58063591e-01 -4.08434689e-01 ... 4.83858943e-01 -1.31135046e-01 -9.59321380e-01] [ 2.14169398e-01 -5.83281755e-01 -3.15178990e-01 ... -6.00047469e-01 -1.56473458e-01 4.40917373e-01]] [[ 4.43757772e-01 -6.12981856e-01 -6.43111765e-01 ... -1.17067762e-01 -6.69668838e-02 -1.02253869e-01] [-1.52709829e-02 9.02773619e-01 4.59690168e-02 ... 2.22380355e-01 -3.24391097e-01 1.64414555e-01] [ 8.53419676e-02 -1.56042874e-01 -2.50323832e-01 ... 4.98160452e-01 -5.78057766e-01 2.44881555e-01] ... [-1.00574601e+00 -2.23731399e-01 2.01691762e-01 ... -9.25499797e-01 -2.67861307e-01 -8.96107629e-02] [-5.46715856e-01 8.33218619e-02 -1.00149596e+00 ... -3.58497143e-01 6.35863781e-01 3.24958920e-01] [-6.68790564e-02 -5.47582991e-02 -7.88803279e-01 ... 3.41153890e-01 3.20952594e-01 -4.61796075e-01]] [[-1.57451674e-01 -2.22720012e-01 4.25795585e-01 ... 1.81324497e-01 4.19837326e-01 -7.38606825e-02] [ 9.37343478e-01 9.04352143e-02 3.21132131e-02 ... 1.67383492e-01 -2.35854387e-02 -3.55056643e-01] [-4.52619612e-01 -5.57876378e-03 2.43030086e-01 ... -4.02402192e-01 -2.08543062e-01 -2.75803715e-01] ... [-5.13554066e-02 3.16458315e-01 1.41364736e-02 ... 7.01998889e-01 2.88150728e-01 5.37366867e-01] [ 1.20108292e-01 -3.71339113e-01 1.20004860e-03 ... 3.59174341e-01 5.13715930e-02 3.40771168e-01] [-1.64637819e-01 -1.87056884e-01 1.72440732e+00 ... -2.23191887e-01 1.30480099e+00 -2.23077133e-01]] ... [[ 2.05985770e-01 -2.14803815e-01 3.11886638e-01 ... 7.42260873e-01 9.97137368e-01 7.02999771e-01] [ 1.07820824e-01 5.18326998e-01 -2.00605154e-01 ... 5.05866647e-01 -2.83812165e-01 1.09202251e-01] [-1.93445489e-01 4.49769795e-01 1.25742570e-01 ... -4.81263489e-01 6.61174357e-01 -2.40307525e-01] ... [-3.45847785e-01 1.09575927e+00 8.99151385e-01 ... 3.25661272e-01 5.05668700e-01 4.86238062e-01] [ 3.23149234e-01 -7.78177202e-01 -2.79881597e-01 ... -8.54847789e-01 -6.36243299e-02 -4.02229041e-01] [-8.16188604e-02 -3.29868108e-01 3.91771197e-01 ... 9.29099992e-02 -1.96680948e-01 1.04524064e+00]] [[-6.15572110e-02 2.79420018e-01 6.39025122e-03 ... -1.73439234e-01 -8.67391169e-01 -1.05030572e+00] [ 4.28751677e-01 -8.76858711e-01 2.30310053e-01 ... 2.96618879e-01 -4.89085943e-01 -5.36453366e-01] [-4.37910482e-02 3.75640720e-01 -1.83537170e-01 ... -1.98200941e-01 3.36875081e-01 5.93988001e-02] ... [-7.46643484e-01 -4.51564014e-01 -4.46205676e-01 ... 6.06052697e-01 6.42651543e-02 5.38035214e-01] [ 3.98053557e-01 1.90652832e-01 6.51695073e-01 ... 6.47852989e-03 -6.01110280e-01 4.15999115e-01] [-6.78288937e-01 -1.49808626e-03 9.70563769e-01 ... 7.23964155e-01 -4.97753434e-02 7.62754083e-01]] [[-2.01182395e-01 -2.83605933e-01 -4.31659877e-01 ... 4.13079083e-01 -1.04438886e-01 7.04295278e-01] [-5.08213900e-02 2.82909811e-01 2.97170818e-01 ... -2.58765042e-01 4.65918928e-01 2.67460346e-01] [-3.43032748e-01 3.64173025e-01 6.80719733e-01 ... -4.15156186e-01 -2.03045279e-01 -9.49730933e-01] ... [ 3.48472714e-01 -5.23427069e-01 5.56843460e-01 ... 4.72002834e-01 2.06264313e-02 -3.67733866e-01] [ 7.03338757e-02 3.22734922e-01 4.15338606e-01 ... -1.93835244e-01 1.09195411e-01 -1.84333995e-01] [ 6.90778732e-01 -3.86482626e-01 2.82713830e-01 ... -4.74094778e-01 2.40333125e-01 -1.06611408e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_convolution_mode.py::TestConv2D::test_convolution_mode_3d[ ie_device:CPU - precision:FP32 - bias:False - params:{'weights_shape': [3, 1, 1, 1, 1], 'strides': [1, 1, 1], 'pads': 'valid', 'dilations': [2, 1, 2], 'groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_convolution_mode.___torch_mangle_1161.aten_convolution_mode, %x.1 : Tensor): %self.groups : int = prim::Constant[value=3]() %self.dilations : int[] = prim::Constant[value=[2, 1, 2]]() %self.pads : str = prim::Constant[value="valid"]() %self.strides : int[] = prim::Constant[value=[1, 1, 1]]() %self.bias : NoneType = prim::Constant() %self.weight : Float(3, 1, 1, 1, 1, strides=[1, 1, 1, 1, 1], requires_grad=0, device=cpu) = prim::Constant[value=(1,1,1,.,.) = -0.6598 (2,1,1,.,.) = 0.2298 (3,1,1,.,.) = -1.0393 [ CPUFloatType{3,1,1,1,1} ]]() %8 : Tensor = aten::_convolution_mode(%x.1, %self.weight, %self.bias, %self.strides, %self.pads, %self.dilations, %self.groups) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_convolution_mode.py:29:23 return (%8) fw_re: [[[[[ 4.10584897e-01 -2.61679232e-01 1.96506664e-01 ... 2.02488631e-01 6.13615930e-01 -2.20533222e-01] [ 2.22371370e-02 -1.54572773e+00 3.69519353e-01 ... 8.59915987e-02 1.01104951e+00 -2.75343120e-01] [-4.15193290e-01 4.23707485e-01 2.08016023e-01 ... -3.02493423e-01 2.29562357e-01 1.09291777e-01] ... [-9.98138070e-01 -7.15328097e-01 -2.91558504e-01 ... -6.91168010e-01 2.14949563e-01 -5.24270907e-02] [ 9.36365366e-01 7.48023152e-01 -3.08487296e-01 ... 3.42856437e-01 -6.10683858e-01 5.26830435e-01] [-4.85929340e-01 5.73864460e-01 4.65220869e-01 ... -7.25721002e-01 -4.37075555e-01 6.84800565e-01]] [[ 5.33948600e-01 7.68108070e-01 -1.22326344e-01 ... 6.98675454e-01 -4.76328358e-02 -2.23940238e-01] [ 3.95428360e-01 -7.94767797e-01 -2.83067495e-01 ... 1.12351930e+00 7.86421895e-01 -2.28217527e-01] [-2.38146231e-01 -1.38059878e+00 6.87347293e-01 ... 5.36524475e-01 5.33074200e-01 1.18088984e+00] ... [ 5.73785722e-01 -1.16457276e-01 2.91493744e-01 ... -9.27027464e-01 1.38792777e+00 7.23784119e-02] [-6.52113706e-02 9.83012542e-02 5.69170892e-01 ... -5.17579198e-01 -1.94151193e-01 -3.36677097e-02] [ 5.09645045e-01 9.71614838e-01 1.96487755e-01 ... -9.44104373e-01 2.68372238e-01 -4.41547841e-01]] [[-1.14900209e-01 7.95436740e-01 -1.20086096e-01 ... -4.15680632e-02 -1.90415487e-01 6.99884117e-01] [ 1.54587477e-01 -1.09000754e+00 1.23813069e+00 ... -1.34953424e-01 1.20221961e+00 -9.50244725e-01] [ 1.35038540e-01 1.50734186e-01 5.23622394e-01 ... 3.65953147e-01 -1.02714443e+00 -1.08955348e+00] ... [-1.51225045e-01 2.06842870e-01 1.10354787e-02 ... -1.02083910e+00 -1.55545712e+00 9.69776154e-01] [-8.79311025e-01 1.65170342e-01 -1.59238026e-01 ... -8.37840259e-01 1.24276929e-01 -6.69160366e-01] [ 3.05132538e-01 -8.32560733e-02 -2.23611146e-01 ... -1.32949913e+00 1.07689023e-01 8.78661275e-01]] ... [[-5.24171710e-01 5.64116299e-01 8.08661461e-01 ... 4.42314476e-01 -8.08015466e-01 -6.90896451e-01] [ 1.73256886e+00 -2.41306186e-01 -2.03554571e-01 ... -4.55476753e-02 -4.42594826e-01 -4.23546404e-01] [ 6.11959815e-01 5.02133779e-02 -1.33136138e-01 ... 4.88545075e-02 1.40426010e-01 7.23708933e-03] ... [ 2.02122293e-02 7.46532798e-01 -3.78679723e-01 ... -7.47826040e-01 6.75953209e-01 -7.77191460e-01] [ 3.98368090e-01 4.74940866e-01 6.18467629e-01 ... -3.04320663e-01 3.43435374e-03 1.81274009e+00] [ 5.94201088e-01 -3.54739010e-01 -1.64902186e+00 ... -5.73141277e-01 4.39678133e-01 1.83173835e-01]] [[ 1.87549349e-02 7.68335581e-01 4.56367522e-01 ... -5.70518911e-01 -2.91807890e-01 -3.21529001e-01] [ 3.12672019e-01 -6.17718458e-01 1.49712581e-02 ... 2.97845542e-01 -6.12742782e-01 1.37441084e-01] [ 1.14967787e+00 -1.12811960e-01 -3.76383990e-01 ... 2.27130532e-01 1.33197263e-01 -2.17141941e-01] ... [ 2.58360114e-02 -4.90004152e-01 -6.13064587e-01 ... 4.40527499e-01 -4.41746473e-01 -2.83232957e-01] [ 6.71112955e-01 1.44639820e-01 -1.39439678e+00 ... -9.68154728e-01 6.69813395e-01 2.12354243e-01] [ 1.56014189e-01 -2.69716233e-02 7.68295705e-01 ... -4.04962152e-01 1.16363072e+00 5.81064343e-01]] [[ 5.09946704e-01 5.35468221e-01 -6.25380278e-01 ... -6.70190930e-01 -9.07578766e-01 -2.43380636e-01] [-1.22693397e-01 -3.55051607e-01 -9.12176371e-01 ... -1.23118281e-01 -9.83787179e-01 1.33855373e-01] [-2.98094209e-02 -7.16698647e-01 -7.85796583e-01 ... -2.89333493e-01 2.65529156e-01 -4.41111594e-01] ... [-2.39697993e-02 -8.40438426e-01 8.02707255e-01 ... 9.70839322e-01 1.46048069e-01 1.06443620e+00] [ 3.46326381e-02 9.73388791e-01 -6.63038313e-01 ... 4.69580621e-01 -6.21224165e-01 1.96348578e-01] [ 5.35796642e-01 -1.61568975e+00 -1.54269978e-01 ... 9.83637094e-01 -4.68651891e-01 1.40996003e+00]]] [[[ 3.91413599e-01 1.30647704e-01 -2.29301795e-01 ... -5.00008762e-02 -7.70060048e-02 -2.01628311e-03] [-2.61521518e-01 9.61269289e-02 -1.40317112e-01 ... 2.98960623e-03 -1.47672519e-01 1.69453710e-01] [-2.22237632e-01 -6.63410485e-01 -4.06244427e-01 ... 1.31854070e-02 -6.22065842e-01 -2.22474113e-01] ... [-8.23385045e-02 2.37671852e-01 -5.10784686e-01 ... -4.77844775e-02 -2.96419133e-02 -2.27560356e-01] [ 2.39486769e-01 1.51240662e-01 -6.14788413e-01 ... 2.96308070e-01 -1.90533936e-01 -2.51002192e-01] [ 1.88639954e-01 3.82327735e-02 1.05596684e-01 ... 8.27969890e-03 -4.00370866e-01 3.63717556e-01]] [[ 3.52252334e-01 -1.27855256e-01 -1.06400726e-02 ... -1.61919832e-01 -3.47180605e-01 -8.15805607e-03] [ 4.01325151e-02 1.64769709e-01 -3.62296343e-01 ... 1.34382248e-01 -2.69191295e-01 -3.61604840e-01] [ 7.56567717e-02 -2.25465581e-01 -2.31656194e-01 ... 2.01846864e-02 -4.93405163e-02 7.29442164e-02] ... [ 8.79522860e-02 -6.02397844e-02 -8.04116651e-02 ... 3.39931011e-01 -1.31276578e-01 -1.39903501e-01] [ 1.66260928e-01 -1.49083687e-02 -1.16239354e-01 ... 5.94543144e-02 -2.90579766e-01 -5.51837757e-02] [-1.58689857e-01 -4.84694690e-02 2.86991626e-01 ... -3.51054013e-01 -2.27689788e-01 3.02367598e-01]] [[ 7.33064711e-02 5.52361369e-01 1.29475147e-01 ... -1.32236183e-02 8.37187022e-02 -3.04483771e-01] [-1.20305620e-01 4.27011222e-01 -1.03859745e-01 ... -1.00831486e-01 -4.58309561e-01 6.71483129e-02] [-5.14439344e-02 2.76303496e-02 -4.79612827e-01 ... 4.43011701e-01 1.53894827e-01 2.14366391e-01] ... [ 4.99891043e-01 1.07467026e-01 -1.37229189e-01 ... 1.33974478e-01 -2.38724560e-01 -5.81585765e-01] [ 6.66031316e-02 -1.73936859e-01 -5.69920242e-02 ... -4.16334748e-01 -1.88899517e-01 -2.87321448e-01] [-3.09612006e-01 2.17088252e-01 -3.21444511e-01 ... -4.66858268e-01 -6.52839601e-01 1.13573596e-01]] ... [[ 1.69026271e-01 2.94878334e-01 -8.61933604e-02 ... -8.10847357e-02 1.19118221e-01 1.65133372e-01] [-7.52362162e-02 -3.83991282e-03 1.51393205e-01 ... -1.89876817e-02 -4.31057870e-01 1.64224103e-01] [ 1.59768835e-02 1.24261975e-01 1.85141057e-01 ... 2.28708014e-01 5.86605519e-02 3.07460517e-01] ... [-2.35235333e-01 -2.13824123e-01 2.77392179e-01 ... 2.29054585e-01 -2.23311901e-01 1.28196716e-01] [ 3.00294220e-01 -1.02612842e-02 -6.96286440e-01 ... 5.83808541e-01 -1.35914832e-01 1.90892324e-01] [-1.88515648e-01 -1.12039953e-01 1.22529566e-01 ... -4.04801816e-02 3.76351574e-03 -7.08190426e-02]] [[-1.48429468e-01 2.95577347e-01 -1.57979429e-01 ... 1.44793594e-03 6.43396154e-02 2.36952975e-01] [ 1.80235878e-01 -2.74010445e-03 -1.84454042e-02 ... -1.74501806e-01 1.14935882e-01 -1.98273346e-01] [-9.79543552e-02 1.78078786e-01 -3.05283010e-01 ... 1.51060164e-01 1.52766943e-01 -2.54705697e-01] ... [ 3.39571089e-01 5.70176961e-03 -1.61225647e-01 ... 1.02607295e-01 -6.00890741e-02 -2.14115418e-02] [ 1.80493280e-01 -4.77726497e-02 -3.92502576e-01 ... -4.29515652e-02 2.38054425e-01 1.30244032e-01] [-4.41228032e-01 -7.16325045e-02 1.62505265e-02 ... 2.83530235e-01 4.38444406e-01 -3.93683434e-01]] [[ 2.52053700e-03 9.19666588e-02 1.61018282e-01 ... -4.28814411e-01 -1.67198852e-01 6.72932714e-02] [ 5.30024111e-01 -1.99670643e-01 1.48218825e-01 ... -5.41500887e-03 -6.47512749e-02 -2.24727556e-01] [ 1.93596166e-02 6.52059764e-02 -4.76487502e-02 ... -9.02743116e-02 -3.88080515e-02 -2.42485087e-02] ... [-1.83368698e-01 3.03885750e-02 -3.90082002e-02 ... -2.91236430e-01 1.31172985e-01 1.10178016e-01] [ 2.76466399e-01 -2.30243832e-01 -2.22199768e-01 ... -4.72265244e-01 2.31849611e-01 -5.53318299e-02] [-1.78235754e-01 -3.16116095e-01 -1.23758085e-01 ... 3.21016192e-01 -3.81323732e-02 -5.62775508e-02]]] [[[ 8.84911537e-01 -1.57972410e-01 9.11005259e-01 ... -4.82866585e-01 1.20916140e+00 1.47691131e+00] [-1.01507115e+00 4.41770069e-02 -1.58644002e-02 ... 4.61413153e-02 -1.59198895e-01 3.95814441e-02] [ 1.53071091e-01 5.95001817e-01 -1.64662862e+00 ... -1.27680826e+00 3.81646901e-01 3.64239514e-01] ... [-6.38078213e-01 -1.40803730e+00 -1.76489758e+00 ... 7.95833230e-01 -9.23283875e-01 -7.60242283e-01] [ 6.65810704e-01 -1.86637402e-01 1.17150033e+00 ... 7.43672490e-01 -2.81552643e-01 -1.13458419e+00] [-1.38962555e+00 -5.75932324e-01 1.00247931e+00 ... 2.60841131e-01 1.12747848e-01 -9.63873386e-01]] [[ 1.30978787e+00 1.01063228e+00 -4.12380159e-01 ... -1.15655375e+00 5.01606762e-01 1.72333205e+00] [-1.07052636e+00 -2.30933025e-01 -6.04441345e-01 ... -2.13249207e+00 -1.92597046e-01 -7.19273567e-01] [ 5.36026239e-01 -6.90491557e-01 2.55663037e-01 ... -9.98394430e-01 5.52726865e-01 4.44342673e-01] ... [-4.06222582e-01 -3.59178871e-01 -1.99785575e-01 ... 1.04845965e+00 5.44349074e-01 -4.40438956e-01] [-1.16387419e-01 1.84267783e+00 -1.24578643e+00 ... -4.90868568e-01 2.87265182e-01 9.57661495e-03] [-1.22422469e+00 -1.09819561e-01 -1.13965487e+00 ... 5.23496568e-01 -6.39610410e-01 -1.25788546e+00]] [[-1.38256237e-01 -6.87385559e-01 5.32805264e-01 ... 6.36606574e-01 8.69930446e-01 8.59929204e-01] [-7.15640366e-01 1.31878614e-01 -5.68737322e-03 ... 1.69902258e-02 8.46318960e-01 1.28119266e+00] [ 3.17868638e+00 -1.48081887e+00 -5.62025368e-01 ... -8.58771384e-01 -4.46791947e-01 -3.63705784e-01] ... [ 1.99205428e-01 -1.88853908e+00 1.71855330e+00 ... 2.09086657e-01 -1.55733120e+00 -1.35151935e+00] [ 4.17154580e-01 4.21230830e-02 1.11655772e+00 ... 1.03666484e+00 -1.53703883e-01 9.48237956e-01] [ 1.87218499e+00 -7.59307086e-01 7.06035495e-01 ... -9.06112254e-01 -6.95131794e-02 1.18594539e+00]] ... [[ 1.58145165e+00 -1.17829561e+00 8.91949415e-01 ... 7.38148808e-01 -1.83660090e-01 1.68908787e+00] [-2.64070064e-01 -2.18884662e-01 1.25607562e+00 ... -1.75834095e+00 -1.34661406e-01 -6.64261520e-01] [-7.39865184e-01 -3.39157283e-01 -1.09115899e+00 ... 1.10252190e+00 -2.33684802e+00 -6.86272919e-01] ... [ 7.50620067e-02 -9.34408724e-01 -5.75100929e-02 ... 3.53865981e-01 8.04632664e-01 -1.71855235e+00] [ 3.47614735e-01 5.29233575e-01 4.71489489e-01 ... 3.96910235e-02 3.01036984e-01 1.29354250e+00] [ 7.93733954e-01 -1.93717852e-01 1.15318522e-01 ... -1.35030210e+00 7.21986949e-01 -3.23322237e-01]] [[-2.27815390e+00 -1.37092900e+00 -1.19218063e+00 ... 3.66013646e-01 1.24342823e+00 -2.08004698e-01] [-5.58668733e-01 6.80767745e-02 -5.60417235e-01 ... -7.50696480e-01 -1.88413084e+00 3.89179401e-02] [ 4.38659102e-01 8.98620337e-02 -1.70584130e+00 ... -3.49255055e-01 -7.32013136e-02 -3.99660081e-01] ... [ 1.40636420e+00 -2.46858835e-01 2.14521456e+00 ... 1.41815746e+00 -3.52348536e-01 1.50966275e+00] [ 1.77276218e+00 8.30681920e-01 4.10370380e-01 ... -1.13058090e+00 -8.37336183e-01 7.77545273e-01] [-8.73822987e-01 1.37928277e-01 1.24422991e+00 ... -1.01415575e+00 -9.59606469e-01 -2.62442023e-01]] [[ 2.43488955e+00 -2.26006556e+00 -7.56227732e-01 ... 8.74140084e-01 -4.06720549e-01 -1.46300864e+00] [ 6.94952548e-01 9.92701173e-01 2.27003679e-01 ... 1.08463418e+00 3.50265205e-01 2.17065305e-01] [-6.52748525e-01 -8.46856534e-01 1.01294979e-01 ... 5.36906838e-01 2.97480792e-01 4.59109575e-01] ... [-1.79726899e-01 2.16964865e+00 1.82380170e-01 ... 3.77341956e-01 -5.07276654e-01 1.70112327e-01] [ 1.03883135e+00 6.70636415e-01 1.28090405e+00 ... 7.43179142e-01 1.14301312e+00 -4.88883369e-02] [-4.55716029e-02 1.11428928e+00 1.87311077e+00 ... -9.03497159e-01 -3.07862937e-01 9.78520930e-01]]]]]; ov_res: [[[[[ 4.10584897e-01 -2.61679232e-01 1.96506664e-01 ... 2.02488631e-01 6.13615930e-01 -2.20533222e-01] [ 2.22371370e-02 -1.54572773e+00 3.69519353e-01 ... 8.59915987e-02 1.01104951e+00 -2.75343120e-01] [-4.15193290e-01 4.23707485e-01 2.08016023e-01 ... -3.02493423e-01 2.29562357e-01 1.09291777e-01] ... [-9.98138070e-01 -7.15328097e-01 -2.91558504e-01 ... -6.91168010e-01 2.14949563e-01 -5.24270907e-02] [ 9.36365366e-01 7.48023152e-01 -3.08487296e-01 ... 3.42856437e-01 -6.10683858e-01 5.26830435e-01] [-4.85929340e-01 5.73864460e-01 4.65220869e-01 ... -7.25721002e-01 -4.37075555e-01 6.84800565e-01]] [[ 5.33948600e-01 7.68108070e-01 -1.22326344e-01 ... 6.98675454e-01 -4.76328358e-02 -2.23940238e-01] [ 3.95428360e-01 -7.94767797e-01 -2.83067495e-01 ... 1.12351930e+00 7.86421895e-01 -2.28217527e-01] [-2.38146231e-01 -1.38059878e+00 6.87347293e-01 ... 5.36524475e-01 5.33074200e-01 1.18088984e+00] ... [ 5.73785722e-01 -1.16457276e-01 2.91493744e-01 ... -9.27027464e-01 1.38792777e+00 7.23784119e-02] [-6.52113706e-02 9.83012542e-02 5.69170892e-01 ... -5.17579198e-01 -1.94151193e-01 -3.36677097e-02] [ 5.09645045e-01 9.71614838e-01 1.96487755e-01 ... -9.44104373e-01 2.68372238e-01 -4.41547841e-01]] [[-1.14900209e-01 7.95436740e-01 -1.20086096e-01 ... -4.15680632e-02 -1.90415487e-01 6.99884117e-01] [ 1.54587477e-01 -1.09000754e+00 1.23813069e+00 ... -1.34953424e-01 1.20221961e+00 -9.50244725e-01] [ 1.35038540e-01 1.50734186e-01 5.23622394e-01 ... 3.65953147e-01 -1.02714443e+00 -1.08955348e+00] ... [-1.51225045e-01 2.06842870e-01 1.10354787e-02 ... -1.02083910e+00 -1.55545712e+00 9.69776154e-01] [-8.79311025e-01 1.65170342e-01 -1.59238026e-01 ... -8.37840259e-01 1.24276929e-01 -6.69160366e-01] [ 3.05132538e-01 -8.32560733e-02 -2.23611146e-01 ... -1.32949913e+00 1.07689023e-01 8.78661275e-01]] ... [[-5.24171710e-01 5.64116299e-01 8.08661461e-01 ... 4.42314476e-01 -8.08015466e-01 -6.90896451e-01] [ 1.73256886e+00 -2.41306186e-01 -2.03554571e-01 ... -4.55476753e-02 -4.42594826e-01 -4.23546404e-01] [ 6.11959815e-01 5.02133779e-02 -1.33136138e-01 ... 4.88545075e-02 1.40426010e-01 7.23708933e-03] ... [ 2.02122293e-02 7.46532798e-01 -3.78679723e-01 ... -7.47826040e-01 6.75953209e-01 -7.77191460e-01] [ 3.98368090e-01 4.74940866e-01 6.18467629e-01 ... -3.04320663e-01 3.43435374e-03 1.81274009e+00] [ 5.94201088e-01 -3.54739010e-01 -1.64902186e+00 ... -5.73141277e-01 4.39678133e-01 1.83173835e-01]] [[ 1.87549349e-02 7.68335581e-01 4.56367522e-01 ... -5.70518911e-01 -2.91807890e-01 -3.21529001e-01] [ 3.12672019e-01 -6.17718458e-01 1.49712581e-02 ... 2.97845542e-01 -6.12742782e-01 1.37441084e-01] [ 1.14967787e+00 -1.12811960e-01 -3.76383990e-01 ... 2.27130532e-01 1.33197263e-01 -2.17141941e-01] ... [ 2.58360114e-02 -4.90004152e-01 -6.13064587e-01 ... 4.40527499e-01 -4.41746473e-01 -2.83232957e-01] [ 6.71112955e-01 1.44639820e-01 -1.39439678e+00 ... -9.68154728e-01 6.69813395e-01 2.12354243e-01] [ 1.56014189e-01 -2.69716233e-02 7.68295705e-01 ... -4.04962152e-01 1.16363072e+00 5.81064343e-01]] [[ 5.09946704e-01 5.35468221e-01 -6.25380278e-01 ... -6.70190930e-01 -9.07578766e-01 -2.43380636e-01] [-1.22693397e-01 -3.55051607e-01 -9.12176371e-01 ... -1.23118281e-01 -9.83787179e-01 1.33855373e-01] [-2.98094209e-02 -7.16698647e-01 -7.85796583e-01 ... -2.89333493e-01 2.65529156e-01 -4.41111594e-01] ... [-2.39697993e-02 -8.40438426e-01 8.02707255e-01 ... 9.70839322e-01 1.46048069e-01 1.06443620e+00] [ 3.46326381e-02 9.73388791e-01 -6.63038313e-01 ... 4.69580621e-01 -6.21224165e-01 1.96348578e-01] [ 5.35796642e-01 -1.61568975e+00 -1.54269978e-01 ... 9.83637094e-01 -4.68651891e-01 1.40996003e+00]]] [[[ 3.91413599e-01 1.30647704e-01 -2.29301795e-01 ... -5.00008762e-02 -7.70060048e-02 -2.01628311e-03] [-2.61521518e-01 9.61269289e-02 -1.40317112e-01 ... 2.98960623e-03 -1.47672519e-01 1.69453710e-01] [-2.22237632e-01 -6.63410485e-01 -4.06244427e-01 ... 1.31854070e-02 -6.22065842e-01 -2.22474113e-01] ... [-8.23385045e-02 2.37671852e-01 -5.10784686e-01 ... -4.77844775e-02 -2.96419133e-02 -2.27560356e-01] [ 2.39486769e-01 1.51240662e-01 -6.14788413e-01 ... 2.96308070e-01 -1.90533936e-01 -2.51002192e-01] [ 1.88639954e-01 3.82327735e-02 1.05596684e-01 ... 8.27969890e-03 -4.00370866e-01 3.63717556e-01]] [[ 3.52252334e-01 -1.27855256e-01 -1.06400726e-02 ... -1.61919832e-01 -3.47180605e-01 -8.15805607e-03] [ 4.01325151e-02 1.64769709e-01 -3.62296343e-01 ... 1.34382248e-01 -2.69191295e-01 -3.61604840e-01] [ 7.56567717e-02 -2.25465581e-01 -2.31656194e-01 ... 2.01846864e-02 -4.93405163e-02 7.29442164e-02] ... [ 8.79522860e-02 -6.02397844e-02 -8.04116651e-02 ... 3.39931011e-01 -1.31276578e-01 -1.39903501e-01] [ 1.66260928e-01 -1.49083687e-02 -1.16239354e-01 ... 5.94543144e-02 -2.90579766e-01 -5.51837757e-02] [-1.58689857e-01 -4.84694690e-02 2.86991626e-01 ... -3.51054013e-01 -2.27689788e-01 3.02367598e-01]] [[ 7.33064711e-02 5.52361369e-01 1.29475147e-01 ... -1.32236183e-02 8.37187022e-02 -3.04483771e-01] [-1.20305620e-01 4.27011222e-01 -1.03859745e-01 ... -1.00831486e-01 -4.58309561e-01 6.71483129e-02] [-5.14439344e-02 2.76303496e-02 -4.79612827e-01 ... 4.43011701e-01 1.53894827e-01 2.14366391e-01] ... [ 4.99891043e-01 1.07467026e-01 -1.37229189e-01 ... 1.33974478e-01 -2.38724560e-01 -5.81585765e-01] [ 6.66031316e-02 -1.73936859e-01 -5.69920242e-02 ... -4.16334748e-01 -1.88899517e-01 -2.87321448e-01] [-3.09612006e-01 2.17088252e-01 -3.21444511e-01 ... -4.66858268e-01 -6.52839601e-01 1.13573596e-01]] ... [[ 1.69026271e-01 2.94878334e-01 -8.61933604e-02 ... -8.10847357e-02 1.19118221e-01 1.65133372e-01] [-7.52362162e-02 -3.83991282e-03 1.51393205e-01 ... -1.89876817e-02 -4.31057870e-01 1.64224103e-01] [ 1.59768835e-02 1.24261975e-01 1.85141057e-01 ... 2.28708014e-01 5.86605519e-02 3.07460517e-01] ... [-2.35235333e-01 -2.13824123e-01 2.77392179e-01 ... 2.29054585e-01 -2.23311901e-01 1.28196716e-01] [ 3.00294220e-01 -1.02612842e-02 -6.96286440e-01 ... 5.83808541e-01 -1.35914832e-01 1.90892324e-01] [-1.88515648e-01 -1.12039953e-01 1.22529566e-01 ... -4.04801816e-02 3.76351574e-03 -7.08190426e-02]] [[-1.48429468e-01 2.95577347e-01 -1.57979429e-01 ... 1.44793594e-03 6.43396154e-02 2.36952975e-01] [ 1.80235878e-01 -2.74010445e-03 -1.84454042e-02 ... -1.74501806e-01 1.14935882e-01 -1.98273346e-01] [-9.79543552e-02 1.78078786e-01 -3.05283010e-01 ... 1.51060164e-01 1.52766943e-01 -2.54705697e-01] ... [ 3.39571089e-01 5.70176961e-03 -1.61225647e-01 ... 1.02607295e-01 -6.00890741e-02 -2.14115418e-02] [ 1.80493280e-01 -4.77726497e-02 -3.92502576e-01 ... -4.29515652e-02 2.38054425e-01 1.30244032e-01] [-4.41228032e-01 -7.16325045e-02 1.62505265e-02 ... 2.83530235e-01 4.38444406e-01 -3.93683434e-01]] [[ 2.52053700e-03 9.19666588e-02 1.61018282e-01 ... -4.28814411e-01 -1.67198852e-01 6.72932714e-02] [ 5.30024111e-01 -1.99670643e-01 1.48218825e-01 ... -5.41500887e-03 -6.47512749e-02 -2.24727556e-01] [ 1.93596166e-02 6.52059764e-02 -4.76487502e-02 ... -9.02743116e-02 -3.88080515e-02 -2.42485087e-02] ... [-1.83368698e-01 3.03885750e-02 -3.90082002e-02 ... -2.91236430e-01 1.31172985e-01 1.10178016e-01] [ 2.76466399e-01 -2.30243832e-01 -2.22199768e-01 ... -4.72265244e-01 2.31849611e-01 -5.53318299e-02] [-1.78235754e-01 -3.16116095e-01 -1.23758085e-01 ... 3.21016192e-01 -3.81323732e-02 -5.62775508e-02]]] [[[ 8.84911537e-01 -1.57972410e-01 9.11005259e-01 ... -4.82866585e-01 1.20916140e+00 1.47691131e+00] [-1.01507115e+00 4.41770069e-02 -1.58644002e-02 ... 4.61413153e-02 -1.59198895e-01 3.95814441e-02] [ 1.53071091e-01 5.95001817e-01 -1.64662862e+00 ... -1.27680826e+00 3.81646901e-01 3.64239514e-01] ... [-6.38078213e-01 -1.40803730e+00 -1.76489758e+00 ... 7.95833230e-01 -9.23283875e-01 -7.60242283e-01] [ 6.65810704e-01 -1.86637402e-01 1.17150033e+00 ... 7.43672490e-01 -2.81552643e-01 -1.13458419e+00] [-1.38962555e+00 -5.75932324e-01 1.00247931e+00 ... 2.60841131e-01 1.12747848e-01 -9.63873386e-01]] [[ 1.30978787e+00 1.01063228e+00 -4.12380159e-01 ... -1.15655375e+00 5.01606762e-01 1.72333205e+00] [-1.07052636e+00 -2.30933025e-01 -6.04441345e-01 ... -2.13249207e+00 -1.92597046e-01 -7.19273567e-01] [ 5.36026239e-01 -6.90491557e-01 2.55663037e-01 ... -9.98394430e-01 5.52726865e-01 4.44342673e-01] ... [-4.06222582e-01 -3.59178871e-01 -1.99785575e-01 ... 1.04845965e+00 5.44349074e-01 -4.40438956e-01] [-1.16387419e-01 1.84267783e+00 -1.24578643e+00 ... -4.90868568e-01 2.87265182e-01 9.57661495e-03] [-1.22422469e+00 -1.09819561e-01 -1.13965487e+00 ... 5.23496568e-01 -6.39610410e-01 -1.25788546e+00]] [[-1.38256237e-01 -6.87385559e-01 5.32805264e-01 ... 6.36606574e-01 8.69930446e-01 8.59929204e-01] [-7.15640366e-01 1.31878614e-01 -5.68737322e-03 ... 1.69902258e-02 8.46318960e-01 1.28119266e+00] [ 3.17868638e+00 -1.48081887e+00 -5.62025368e-01 ... -8.58771384e-01 -4.46791947e-01 -3.63705784e-01] ... [ 1.99205428e-01 -1.88853908e+00 1.71855330e+00 ... 2.09086657e-01 -1.55733120e+00 -1.35151935e+00] [ 4.17154580e-01 4.21230830e-02 1.11655772e+00 ... 1.03666484e+00 -1.53703883e-01 9.48237956e-01] [ 1.87218499e+00 -7.59307086e-01 7.06035495e-01 ... -9.06112254e-01 -6.95131794e-02 1.18594539e+00]] ... [[ 1.58145165e+00 -1.17829561e+00 8.91949415e-01 ... 7.38148808e-01 -1.83660090e-01 1.68908787e+00] [-2.64070064e-01 -2.18884662e-01 1.25607562e+00 ... -1.75834095e+00 -1.34661406e-01 -6.64261520e-01] [-7.39865184e-01 -3.39157283e-01 -1.09115899e+00 ... 1.10252190e+00 -2.33684802e+00 -6.86272919e-01] ... [ 7.50620067e-02 -9.34408724e-01 -5.75100929e-02 ... 3.53865981e-01 8.04632664e-01 -1.71855235e+00] [ 3.47614735e-01 5.29233575e-01 4.71489489e-01 ... 3.96910235e-02 3.01036984e-01 1.29354250e+00] [ 7.93733954e-01 -1.93717852e-01 1.15318522e-01 ... -1.35030210e+00 7.21986949e-01 -3.23322237e-01]] [[-2.27815390e+00 -1.37092900e+00 -1.19218063e+00 ... 3.66013646e-01 1.24342823e+00 -2.08004698e-01] [-5.58668733e-01 6.80767745e-02 -5.60417235e-01 ... -7.50696480e-01 -1.88413084e+00 3.89179401e-02] [ 4.38659102e-01 8.98620337e-02 -1.70584130e+00 ... -3.49255055e-01 -7.32013136e-02 -3.99660081e-01] ... [ 1.40636420e+00 -2.46858835e-01 2.14521456e+00 ... 1.41815746e+00 -3.52348536e-01 1.50966275e+00] [ 1.77276218e+00 8.30681920e-01 4.10370380e-01 ... -1.13058090e+00 -8.37336183e-01 7.77545273e-01] [-8.73822987e-01 1.37928277e-01 1.24422991e+00 ... -1.01415575e+00 -9.59606469e-01 -2.62442023e-01]] [[ 2.43488955e+00 -2.26006556e+00 -7.56227732e-01 ... 8.74140084e-01 -4.06720549e-01 -1.46300864e+00] [ 6.94952548e-01 9.92701173e-01 2.27003679e-01 ... 1.08463418e+00 3.50265205e-01 2.17065305e-01] [-6.52748525e-01 -8.46856534e-01 1.01294979e-01 ... 5.36906838e-01 2.97480792e-01 4.59109575e-01] ... [-1.79726899e-01 2.16964865e+00 1.82380170e-01 ... 3.77341956e-01 -5.07276654e-01 1.70112327e-01] [ 1.03883135e+00 6.70636415e-01 1.28090405e+00 ... 7.43179142e-01 1.14301312e+00 -4.88883369e-02] [-4.55716029e-02 1.11428928e+00 1.87311077e+00 ... -9.03497159e-01 -3.07862937e-01 9.78520930e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:0 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1162.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=0]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[-0.19355899 -0.5925803 -1.155251 ... -0.2701589 -0.5286189 -1.043548 ] [ 1.1226704 2.0637364 0.9023996 ... -0.811162 0.05765459 -0.29272237] [ 1.0171378 0.7361811 1.0435379 ... 1.0747836 -0.0972623 -0.8268474 ] ... [ 0.2068193 1.1256858 0.7075288 ... 1.3759451 0.03380796 -0.7755332 ] [ 0.6525484 -0.09952906 -1.1971081 ... 0.7333328 -0.01819696 0.60587686] [ 0.84732544 -0.6065801 -1.4409503 ... -0.83929014 -0.8456723 -0.6830948 ]] [[-2.124289 0.6052781 -0.37178123 ... -0.5376495 -0.88464725 -0.143451 ] [-0.1164374 0.7878938 0.86092794 ... -0.5990807 -0.9094152 -1.4606441 ] [ 1.3601977 -0.6304261 0.3215956 ... 0.793389 -1.0814625 -0.32292524] ... [-1.0253979 -0.7094353 -1.1296186 ... 1.0183325 -0.08133316 -0.23661666] [-1.3668554 0.11857578 -0.53573155 ... 0.4715719 -1.0599728 1.156763 ] [ 0.7073319 0.6586978 0.89849275 ... 1.1402414 -0.2603762 1.0190852 ]] [[ 1.4163157 -0.9335256 1.3990232 ... -0.64577985 -1.7584598 -0.70947635] [-0.5620701 1.5871633 0.18321338 ... -1.1791171 -0.3102751 0.24712354] [-0.70640075 0.3781057 -1.3968637 ... -0.62402254 -0.3311224 0.4621991 ] ... [ 1.8919846 -1.5841537 -0.21148266 ... -0.96983844 -2.1832707 -1.2086965 ] [ 0.03625459 0.25093132 -0.15997092 ... 0.38837597 1.4714826 -2.8815954 ] [ 0.33267364 1.1820596 0.12156031 ... 1.0184354 -0.5567211 1.4045062 ]]]]; ov_res: [[[[-0.19355899 -0.5925803 -1.155251 ... -0.2701589 -0.5286189 -1.043548 ] [ 1.1226704 2.0637364 0.9023996 ... -0.811162 0.05765459 -0.29272237] [ 1.0171378 0.7361811 1.0435379 ... 1.0747836 -0.0972623 -0.8268474 ] ... [ 0.2068193 1.1256858 0.7075288 ... 1.3759451 0.03380796 -0.7755332 ] [ 0.6525484 -0.09952906 -1.1971081 ... 0.7333328 -0.01819696 0.60587686] [ 0.84732544 -0.6065801 -1.4409503 ... -0.83929014 -0.8456723 -0.6830948 ]] [[-2.124289 0.6052781 -0.37178123 ... -0.5376495 -0.88464725 -0.143451 ] [-0.1164374 0.7878938 0.86092794 ... -0.5990807 -0.9094152 -1.4606441 ] [ 1.3601977 -0.6304261 0.3215956 ... 0.793389 -1.0814625 -0.32292524] ... [-1.0253979 -0.7094353 -1.1296186 ... 1.0183325 -0.08133316 -0.23661666] [-1.3668554 0.11857578 -0.53573155 ... 0.4715719 -1.0599728 1.156763 ] [ 0.7073319 0.6586978 0.89849275 ... 1.1402414 -0.2603762 1.0190852 ]] [[ 1.4163157 -0.9335256 1.3990232 ... -0.64577985 -1.7584598 -0.70947635] [-0.5620701 1.5871633 0.18321338 ... -1.1791171 -0.3102751 0.24712354] [-0.70640075 0.3781057 -1.3968637 ... -0.62402254 -0.3311224 0.4621991 ] ... [ 1.8919846 -1.5841537 -0.21148266 ... -0.96983844 -2.1832707 -1.2086965 ] [ 0.03625459 0.25093132 -0.15997092 ... 0.38837597 1.4714826 -2.8815954 ] [ 0.33267364 1.1820596 0.12156031 ... 1.0184354 -0.5567211 1.4045062 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1164.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=1]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[ 0.77311015 1.0218333 -1.7249333 ... 1.3052567 -0.07116438 -0.09811558] [ 1.2361125 0.25909337 0.40183103 ... -1.2489731 -0.54993117 -1.0265754 ] [ 0.9417325 1.5132768 -0.74469787 ... -0.96285003 1.6976746 -2.1283016 ] ... [ 0.44786057 2.0212562 -0.48610336 ... 1.2467252 -0.34384584 -0.6205678 ] [-0.07258883 -0.07639123 0.22852425 ... -0.6669709 0.21392559 0.04049014] [-1.6110791 -0.32338193 -1.1841063 ... -0.06481057 0.311409 -0.33593166]] [[ 0.13369769 0.70060873 -4.8187294 ... 2.6038222 0.7593495 -0.25187993] [ 1.542893 0.6698125 -0.9661468 ... -2.8949938 -0.74619216 -1.6701553 ] [ 1.3914955 3.3007536 -1.4359555 ... -1.0191258 3.352767 -1.7307639 ] ... [ 1.9213592 1.9144492 -0.56909525 ... 0.33342487 -0.26442403 -1.7782072 ] [ 0.82903016 -2.463058 0.34534115 ... -1.9734986 -0.6612196 0.352231 ] [-0.52700615 0.27940586 -0.7977382 ... -0.49143818 0.7687063 -0.14073177]] [[-1.5814345 1.2394316 -5.8733335 ... 2.429152 -0.49728948 2.3219469 ] [ 0.85997415 2.338158 -2.0562043 ... -4.463763 0.4233082 -1.1173506 ] [-0.44196382 2.6664202 -0.69884497 ... -0.8132902 1.9677811 -0.9080695 ] ... [ 2.1241074 1.157113 -0.14589752 ... 0.9515328 1.2908963 -0.1712848 ] [ 0.92367804 -2.6291058 -0.06327686 ... -3.449303 0.42646646 0.37539133] [ 0.35224408 0.39685103 -0.5509237 ... 0.454999 -0.01462793 -2.4793391 ]]]]; ov_res: [[[[ 0.77311015 1.0218333 -1.7249333 ... 1.3052567 -0.07116438 -0.09811558] [ 1.2361125 0.25909337 0.40183103 ... -1.2489731 -0.54993117 -1.0265754 ] [ 0.9417325 1.5132768 -0.74469787 ... -0.96285003 1.6976746 -2.1283016 ] ... [ 0.44786057 2.0212562 -0.48610336 ... 1.2467252 -0.34384584 -0.6205678 ] [-0.07258883 -0.07639123 0.22852425 ... -0.6669709 0.21392559 0.04049014] [-1.6110791 -0.32338193 -1.1841063 ... -0.06481057 0.311409 -0.33593166]] [[ 0.13369769 0.70060873 -4.8187294 ... 2.6038222 0.7593495 -0.25187993] [ 1.542893 0.6698125 -0.9661468 ... -2.8949938 -0.74619216 -1.6701553 ] [ 1.3914955 3.3007536 -1.4359555 ... -1.0191258 3.352767 -1.7307639 ] ... [ 1.9213592 1.9144492 -0.56909525 ... 0.33342487 -0.26442403 -1.7782072 ] [ 0.82903016 -2.463058 0.34534115 ... -1.9734986 -0.6612196 0.352231 ] [-0.52700615 0.27940586 -0.7977382 ... -0.49143818 0.7687063 -0.14073177]] [[-1.5814345 1.2394316 -5.8733335 ... 2.429152 -0.49728948 2.3219469 ] [ 0.85997415 2.338158 -2.0562043 ... -4.463763 0.4233082 -1.1173505 ] [-0.4419638 2.66642 -0.6988449 ... -0.8132902 1.9677812 -0.90806955] ... [ 2.1241074 1.1571131 -0.14589751 ... 0.9515328 1.2908962 -0.1712848 ] [ 0.92367804 -2.6291058 -0.06327686 ... -3.4493027 0.42646646 0.37539133] [ 0.35224408 0.39685103 -0.5509237 ... 0.454999 -0.01462793 -2.4793391 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1166.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=2]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) eption happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_Dfw_re: [[[[ 1.09714770e+00 1.93877959e+00 -1.91135085e+00 ... -1.31741762e-01 4.91327107e-01 6.88052416e-01] [ 4.47568595e-01 1.75270450e+00 -1.74988413e+00 ... 3.89793873e-01 -1.69579601e+00 3.21581197e+00] [-2.78669238e-01 1.44082642e+00 -1.20305216e+00 ... 1.08832419e+00 -2.17534208e+00 3.44555163e+00] ... [ 1.43100185e+01 1.83712757e+00 -1.07133865e+01 ... 2.93840438e-01 -2.57195778e+01 -1.94939251e+01] [ 1.64814358e+01 -1.09521639e+00 -1.20911169e+01 ... -5.99864483e-01 -2.60545673e+01 -2.11638260e+01] [ 1.71644840e+01 -1.62508821e+00 -9.73783684e+00 ... 7.54152179e-01 -2.67217808e+01 -2.22276115e+01]] [[ 3.42629761e-01 4.17997688e-01 2.17992574e-01 ... -1.51066124e+00 -1.60831228e-01 -8.00925255e-01] [ 5.02093315e-01 -6.32941723e-01 -1.18715274e+00 ... -2.35041213e+00 1.56875849e-02 -1.43480134e+00] [ 1.52418518e+00 -5.97364366e-01 -8.34178627e-01 ... -3.37883949e+00 -4.13874954e-01 -3.59650731e+00] ... [ 1.19619341e+01 -1.21626177e+01 -1.98762608e+00 ... 6.66365528e+00 7.60854816e+00 1.17123899e+01] [ 1.16829205e+01 -1.27548895e+01 -1.84311318e+00 ... 7.42823887e+00 6.95035076e+00 1.08474665e+01] [ 1.20800915e+01 -1.19877529e+01 -3.29048514e+00 ... 6.09827137e+00 7.12288284e+00 1.06293392e+01]] [[-2.03928566e+00 5.52062243e-02 -1.95399082e+00 ... 2.81758875e-01 -4.72824633e-01 5.34227848e-01] [-3.09687185e+00 1.85197651e+00 -3.10394955e+00 ... 1.87272167e+00 -2.61882544e-01 5.60156822e-01] [-4.40828705e+00 1.65070701e+00 -3.64431262e+00 ... 9.62068319e-01 -5.48742592e-01 1.12432010e-01] ... [-5.62548113e+00 -1.06540287e+00 -1.72925282e+01 ... 2.55734406e+01 6.39506960e+00 -4.82983023e-01] [-3.17460537e+00 -1.31160402e+00 -1.61926575e+01 ... 2.49950504e+01 6.39933491e+00 1.27085221e+00] [-2.69893503e+00 -1.47720456e-01 -1.61654320e+01 ... 2.38901978e+01 6.46770334e+00 1.48448968e+00]]]]; ov_res: [[[[ 1.0971477e+00 1.9387796e+00 -1.9113508e+00 ... -1.3174176e-01 4.9132711e-01 6.8805242e-01] [ 4.4756860e-01 1.7527045e+00 -1.7498841e+00 ... 3.8979387e-01 -1.6957960e+00 3.2158120e+00] [-2.7866924e-01 1.4408264e+00 -1.2030520e+00 ... 1.0883242e+00 -2.1753421e+00 3.4455516e+00] ... [ 1.4310018e+01 1.8371224e+00 -1.0713385e+01 ... 2.9383978e-01 -2.5719582e+01 -1.9493923e+01] [ 1.6481436e+01 -1.0952215e+00 -1.2091116e+01 ... -5.9986508e-01 -2.6054571e+01 -2.1163824e+01] [ 1.7164484e+01 -1.6250932e+00 -9.7378359e+00 ... 7.5415158e-01 -2.6721786e+01 -2.2227610e+01]] [[ 3.4262976e-01 4.1799769e-01 2.1799257e-01 ... -1.5106612e+00 -1.6083123e-01 -8.0092525e-01] [ 5.0209332e-01 -6.3294172e-01 -1.1871527e+00 ... -2.3504121e+00 1.5687585e-02 -1.4348013e+00] [ 1.5241852e+00 -5.9736431e-01 -8.3417869e-01 ... -3.3788395e+00 -4.1387495e-01 -3.5965073e+00] ... [ 1.1961935e+01 -1.2162621e+01 -1.9876195e+00 ... 6.6636534e+00 7.6085467e+00 1.1712392e+01] [ 1.1682921e+01 -1.2754892e+01 -1.8431066e+00 ... 7.4282374e+00 6.9503493e+00 1.0847468e+01] [ 1.2080092e+01 -1.1987756e+01 -3.2904787e+00 ... 6.0982699e+00 7.1228814e+00 1.0629341e+01]] [[-2.0392857e+00 5.5206224e-02 -1.9539908e+00 ... 2.8175887e-01 -4.7282463e-01 5.3422785e-01] [-3.0968719e+00 1.8519765e+00 -3.1039495e+00 ... 1.8727217e+00 -2.6188254e-01 5.6015682e-01] [-4.4082870e+00 1.6507069e+00 -3.6443126e+00 ... 9.6206832e-01 -5.4874253e-01 1.1243203e-01] ... [-5.6254797e+00 -1.0654050e+00 -1.7292524e+01 ... 2.5573433e+01 6.3950686e+00 -4.8298454e-01] [-3.1746039e+00 -1.3116062e+00 -1.6192654e+01 ... 2.4995045e+01 6.3993340e+00 1.2708507e+00] [-2.6989336e+00 -1.4772260e-01 -1.6165428e+01 ... 2.3890192e+01 6.4677024e+00 1.4844881e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1168.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=3]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[ -1.1132233 -1.321107 -2.7182937 ... 17.141602 16.790722 17.259022 ] [ -1.0289252 -1.7332416 -0.71083385 ... 12.066278 13.272629 11.22789 ] [ 0.5797937 -0.78069603 -1.4481959 ... 3.8174083 3.7845852 4.0418134 ] ... [ -0.22587344 1.966017 2.2509315 ... -7.3437853 -7.4624076 -7.4247837 ] [ -0.04626679 0.9730772 1.3117467 ... 5.3319955 5.7547994 6.705416 ] [ 0.16870144 -1.2917134 -2.734342 ... -4.7295184 -5.655341 -3.4895637 ]] [[ 0.3182368 -1.1566927 -1.4874533 ... -12.689021 -13.035893 -12.695709 ] [ 0.14199935 1.0688334 0.54116714 ... 9.1248455 8.095436 6.953335 ] [ 0.10459463 1.2593278 0.16219513 ... -12.587085 -13.124382 -12.995917 ] ... [ -1.3706791 -0.77092314 0.5114784 ... -16.346554 -16.136316 -14.49766 ] [ -0.28178412 0.9714791 1.2775487 ... -7.5747666 -7.5938945 -8.899741 ] [ -1.1006587 0.3160987 1.1078155 ... 17.3624 17.058212 17.14762 ]] [[ -0.6646611 -0.61656505 -1.0389813 ... -17.764507 -18.000471 -16.5047 ] [ 0.61532664 -0.3111108 -1.3429806 ... -9.7352 -10.327283 -9.966263 ] [ -0.6051853 -0.7546346 -2.267319 ... 8.352545 9.032347 9.794482 ] ... [ 1.6679574 0.7768209 1.261844 ... 6.376667 6.057214 6.1830893 ] [ 1.4002612 1.57136 2.5138254 ... -14.694312 -15.3306 -14.726444 ] [ -1.1131815 0.06373179 1.1634053 ... -4.2060146 -3.6652236 -4.4815183 ]]]]; ov_res: [[[[ -1.1132233 -1.321107 -2.7182937 ... 17.141603 16.790724 17.259024 ] [ -1.0289252 -1.7332416 -0.7108338 ... 12.0662775 13.272628 11.227889 ] [ 0.5797937 -0.78069603 -1.4481959 ... 3.8174133 3.7845902 4.0418186 ] ... [ -0.22587344 1.966017 2.2509315 ... -7.343787 -7.46241 -7.424786 ] [ -0.04626679 0.9730772 1.3117467 ... 5.331994 5.754798 6.705415 ] [ 0.16870144 -1.2917134 -2.734342 ... -4.729516 -5.655339 -3.4895613 ]] [[ 0.3182368 -1.1566927 -1.4874533 ... -12.689026 -13.035898 -12.695715 ] [ 0.14199935 1.0688334 0.5411671 ... 9.124847 8.095438 6.9533362 ] [ 0.10459463 1.2593278 0.16219509 ... -12.587084 -13.124381 -12.995916 ] ... [ -1.3706791 -0.77092314 0.5114784 ... -16.346552 -16.136314 -14.497657 ] [ -0.28178412 0.9714791 1.2775487 ... -7.574768 -7.5938954 -8.899742 ] [ -1.1006587 0.3160987 1.1078155 ... 17.3624 17.058212 17.14762 ]] [[ -0.6646611 -0.61656505 -1.0389814 ... -17.764511 -18.000475 -16.504704 ] [ 0.61532664 -0.3111108 -1.3429806 ... -9.7352 -10.327283 -9.966263 ] [ -0.6051853 -0.7546346 -2.267319 ... 8.352545 9.032347 9.794482 ] ... [ 1.6679574 0.7768209 1.261844 ... 6.376666 6.0572133 6.183089 ] [ 1.4002612 1.57136 2.5138254 ... -14.694316 -15.330604 -14.726448 ] [ -1.1131815 0.06373179 1.1634053 ... -4.206015 -3.665224 -4.4815187 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1170.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=-1]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[-3.77038270e-01 -2.06334710e+00 -1.03775454e+00 ... 1.47845488e+01 1.38250170e+01 1.34941139e+01] [ 1.33276191e-02 -3.65951568e-01 -1.18809831e+00 ... -7.33802319e+00 -8.06791782e+00 -9.36498547e+00] [ 1.48401809e+00 2.66809464e-01 -3.84532154e-01 ... -1.67396412e+01 -1.67124214e+01 -1.78846226e+01] ... [-1.40077364e+00 -9.14983749e-01 1.55114198e+00 ... 1.28773088e+01 1.26165323e+01 1.06008158e+01] [ 1.60586560e+00 1.45693088e+00 8.20287943e-01 ... -5.69900692e-01 -2.58035421e+00 -2.03269506e+00] [ 8.88643861e-02 7.52087295e-01 -3.28428328e-01 ... 1.23920097e+01 1.31539078e+01 1.08371763e+01]] [[ 1.86768079e+00 9.75656331e-01 5.49796641e-01 ... -3.65004373e+00 -5.37970018e+00 -4.74671268e+00] [-9.03655410e-01 -1.70353603e+00 -1.34389317e+00 ... -9.54071903e+00 -1.07906675e+01 -1.04647007e+01] [ 1.27597439e+00 1.38873696e+00 1.18688121e-02 ... 1.26575251e+01 1.38726234e+01 1.39252052e+01] ... [-4.40295756e-01 -1.36663198e+00 -1.54631698e+00 ... 1.07842798e+01 1.01526766e+01 1.00895481e+01] [-7.77662694e-01 -1.12698257e+00 -3.52903068e-01 ... -4.46372896e-01 6.05530202e-01 5.79539657e-01] [-9.40672994e-01 -1.13662887e+00 -6.64018989e-02 ... 2.12973762e+00 2.91334844e+00 1.47491527e+00]] [[-2.43359074e-01 2.91595995e-01 7.79888570e-01 ... 1.51196613e+01 1.51441116e+01 1.51253681e+01] [ 4.58566308e-01 -2.23414600e-02 -4.50277478e-01 ... -1.29274774e+00 -1.56040418e+00 -3.46356332e-01] [-1.07354596e-01 -1.25418198e+00 -2.49235296e+00 ... 3.55685592e+00 4.78801155e+00 3.61545181e+00] ... [-8.09368312e-01 -1.74616313e+00 -6.95262372e-01 ... -1.45629339e+01 -1.38205872e+01 -1.31862278e+01] [-3.37893218e-02 4.99310553e-01 2.07074618e+00 ... -1.07262955e+01 -1.15515594e+01 -1.18202124e+01] [-1.62433326e-01 -3.98022309e-02 6.27423227e-02 ... 8.73003578e+00 8.80155087e+00 8.02413368e+00]]]]; ov_res: [[[[-3.77038270e-01 -2.06334710e+00 -1.03775465e+00 ... 1.47845507e+01 1.38250198e+01 1.34941168e+01] [ 1.33276191e-02 -3.65951568e-01 -1.18809831e+00 ... -7.33802319e+00 -8.06791782e+00 -9.36498451e+00] [ 1.48401809e+00 2.66809464e-01 -3.84532154e-01 ... -1.67396412e+01 -1.67124233e+01 -1.78846226e+01] ... [-1.40077364e+00 -9.14983749e-01 1.55114198e+00 ... 1.28773050e+01 1.26165285e+01 1.06008120e+01] [ 1.60586560e+00 1.45693088e+00 8.20287883e-01 ... -5.69902301e-01 -2.58035564e+00 -2.03269672e+00] [ 8.88643861e-02 7.52087295e-01 -3.28428328e-01 ... 1.23920097e+01 1.31539078e+01 1.08371773e+01]] [[ 1.86768079e+00 9.75656331e-01 5.49796641e-01 ... -3.65004396e+00 -5.37970066e+00 -4.74671316e+00] [-9.03655410e-01 -1.70353603e+00 -1.34389317e+00 ... -9.54071999e+00 -1.07906694e+01 -1.04647017e+01] [ 1.27597439e+00 1.38873696e+00 1.18688345e-02 ... 1.26575241e+01 1.38726225e+01 1.39252043e+01] ... [-4.40295756e-01 -1.36663198e+00 -1.54631698e+00 ... 1.07842827e+01 1.01526794e+01 1.00895510e+01] [-7.77662694e-01 -1.12698257e+00 -3.52903068e-01 ... -4.46370959e-01 6.05532169e-01 5.79541564e-01] [-9.40672994e-01 -1.13662887e+00 -6.64018393e-02 ... 2.12974119e+00 2.91335201e+00 1.47491884e+00]] [[-2.43359074e-01 2.91595995e-01 7.79888570e-01 ... 1.51196642e+01 1.51441145e+01 1.51253700e+01] [ 4.58566308e-01 -2.23414600e-02 -4.50277478e-01 ... -1.29274857e+00 -1.56040502e+00 -3.46357107e-01] [-1.07354596e-01 -1.25418198e+00 -2.49235296e+00 ... 3.55685472e+00 4.78801060e+00 3.61545086e+00] ... [-8.09368312e-01 -1.74616313e+00 -6.95262313e-01 ... -1.45629349e+01 -1.38205881e+01 -1.31862288e+01] [-3.37893218e-02 4.99310553e-01 2.07074618e+00 ... -1.07262993e+01 -1.15515633e+01 -1.18202162e+01] [-1.62433326e-01 -3.98022309e-02 6.27423227e-02 ... 8.73003674e+00 8.80155182e+00 8.02413559e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1172.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=-2]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[ -1.1654441 -0.36989248 -0.7786916 ... -1.3448007 -0.20644495 1.6549003 ] [ -2.2815952 0.7098464 -0.36469832 ... -1.5769144 -1.6480681 2.408393 ] [ -2.3529823 -0.2987938 -0.5007429 ... -1.9270152 -0.76771545 2.3587987 ] ... [ 27.689238 -4.217049 -10.318423 ... -4.7132587 16.367188 -7.956256 ] [ 27.63322 -2.3455637 -10.742367 ... -4.644217 17.161238 -8.364514 ] [ 28.189253 -1.5536822 -9.812266 ... -5.150331 17.160418 -7.0795503 ]] [[ 0.11285148 -2.1543155 0.18073845 ... 0.65359855 0.768049 -0.9304748 ] [ 0.21167132 -1.7045665 -0.3669443 ... 2.0206232 0.6968288 -1.6070427 ] [ 1.2056814 -1.7643895 -0.51303065 ... 2.2232332 1.3069575 -1.8278522 ] ... [ 30.623669 21.104057 -7.958778 ... -2.77881 11.782135 9.163623 ] [ 29.284811 20.832546 -8.510087 ... -1.1889589 10.768969 8.201298 ] [ 29.81962 20.682312 -8.824914 ... -0.61516815 10.972047 8.168918 ]] [[ -0.932523 0.26400608 -0.84128493 ... -0.11419638 -0.38679892 0.83504313] [ -2.2081838 1.3529017 -1.0794125 ... -2.8149507 -0.15675741 -1.680418 ] [ -2.898259 1.942219 -0.45318347 ... -1.8994958 0.37578297 -0.90387887] ... [-25.744553 -8.282784 26.620977 ... 23.03063 -18.174482 -21.298586 ] [-26.113306 -7.289415 25.47887 ... 23.548561 -16.652222 -20.787746 ] [-27.758606 -6.150361 25.76797 ... 23.29877 -15.995627 -20.53831 ]]]]; ov_res: [[[[ -1.1654441 -0.36989248 -0.7786916 ... -1.3448007 -0.20644495 1.6549003 ] [ -2.2815952 0.7098464 -0.36469832 ... -1.5769144 -1.6480681 2.408393 ] [ -2.3529823 -0.2987938 -0.5007429 ... -1.9270152 -0.76771545 2.3587987 ] ... [ 27.68924 -4.2170496 -10.318424 ... -4.7132607 16.36719 -7.956249 ] [ 27.633223 -2.345564 -10.742368 ... -4.644219 17.16124 -8.364508 ] [ 28.189255 -1.5536823 -9.812266 ... -5.150333 17.16042 -7.0795436 ]] [[ 0.11285148 -2.1543155 0.18073845 ... 0.65359855 0.768049 -0.9304748 ] [ 0.21167132 -1.7045665 -0.3669443 ... 2.0206232 0.6968288 -1.6070427 ] [ 1.2056814 -1.7643895 -0.51303065 ... 2.2232332 1.3069575 -1.8278522 ] ... [ 30.623669 21.104052 -7.958777 ... -2.77881 11.782139 9.163621 ] [ 29.284811 20.83254 -8.510087 ... -1.1889589 10.768972 8.201296 ] [ 29.819618 20.682306 -8.824913 ... -0.61516815 10.972051 8.168916 ]] [[ -0.932523 0.26400608 -0.84128493 ... -0.11419638 -0.38679892 0.83504313] [ -2.2081838 1.3529017 -1.0794125 ... -2.8149507 -0.15675741 -1.680418 ] [ -2.898259 1.942219 -0.45318347 ... -1.8994957 0.37578297 -0.9038788 ] ... [-25.744549 -8.282785 26.620981 ... 23.030636 -18.174494 -21.29859 ] [-26.113302 -7.289416 25.478874 ... 23.548565 -16.652233 -20.787748 ] [-27.758602 -6.150362 25.767973 ... 23.298775 -15.995639 -20.538311 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1174.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=-3]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[-0.21737862 -0.5670901 -1.2124772 ... -1.3541305 0.6330615 1.1785525 ] [-0.2207023 0.9774833 0.92352676 ... 0.40565112 0.2057188 0.3422103 ] [-0.97569925 0.20156886 0.4441391 ... 0.47253367 -2.118635 0.16368659] ... [-0.74619555 0.76362216 1.2627544 ... -0.19783677 1.0396144 -0.3097174 ] [-0.69081587 -2.279169 0.38403302 ... 0.5012413 -0.66908604 -1.7677872 ] [ 1.2641833 0.82499874 0.8886342 ... 1.1058588 -2.182005 -1.2040818 ]] [[-0.1854154 -0.20847192 -0.2618478 ... -1.1063659 -0.3529306 1.8399855 ] [ 0.23123108 -0.7822981 2.27805 ... 1.2533501 1.5465168 0.9415295 ] [-0.94872785 -0.4707303 0.20025268 ... 0.48267218 -3.5986714 0.4258501 ] ... [-0.63245755 -0.3501842 0.61117196 ... 1.1650304 1.9995588 -0.4488386 ] [ 0.12261569 -0.06555653 -1.2732277 ... 1.1027956 -0.50809383 -1.9661872 ] [ 0.03407478 1.1495726 -0.872241 ... 2.5874696 -3.3191879 -2.727561 ]] [[ 0.43945363 -1.8470482 -0.56692445 ... -1.8800668 -0.88380957 1.839849 ] [-1.4911643 -1.1261909 3.454603 ... 2.4518342 0.5324227 1.1647336 ] [-1.6100364 -0.02982996 0.68152636 ... -0.12887718 -3.4053617 1.4581274 ] ... [-0.3841482 -1.1734082 1.6129465 ... 1.6319844 1.945547 -0.9706342 ] [ 0.2688656 1.2206875 -0.2566933 ... 1.5991831 0.8390428 -1.5589235 ] [-0.62044805 0.42980963 -0.7598977 ... 1.0717312 -3.9826703 -2.6919615 ]]]]; ov_res: [[[[-0.21737862 -0.5670901 -1.2124772 ... -1.3541305 0.6330615 1.1785525 ] [-0.2207023 0.9774833 0.92352676 ... 0.40565112 0.2057188 0.3422103 ] [-0.97569925 0.20156886 0.4441391 ... 0.47253367 -2.118635 0.16368659] ... [-0.74619555 0.76362216 1.2627544 ... -0.19783677 1.0396144 -0.3097174 ] [-0.69081587 -2.279169 0.38403302 ... 0.5012413 -0.66908604 -1.7677872 ] [ 1.2641833 0.82499874 0.8886342 ... 1.1058588 -2.182005 -1.2040818 ]] [[-0.1854154 -0.20847192 -0.2618478 ... -1.1063659 -0.3529306 1.8399855 ] [ 0.23123108 -0.7822981 2.27805 ... 1.2533501 1.5465168 0.9415295 ] [-0.94872785 -0.4707303 0.20025268 ... 0.48267218 -3.5986714 0.4258501 ] ... [-0.63245755 -0.3501842 0.61117196 ... 1.1650304 1.9995588 -0.4488386 ] [ 0.12261569 -0.06555653 -1.2732277 ... 1.1027956 -0.50809383 -1.9661872 ] [ 0.03407478 1.1495726 -0.872241 ... 2.5874696 -3.3191879 -2.727561 ]] [[ 0.43945366 -1.8470482 -0.56692445 ... -1.8800669 -0.88380957 1.839849 ] [-1.4911643 -1.1261909 3.4546032 ... 2.4518342 0.53242266 1.1647336 ] [-1.6100364 -0.02982998 0.68152636 ... -0.1288772 -3.4053617 1.4581273 ] ... [-0.3841482 -1.1734082 1.6129465 ... 1.6319844 1.945547 -0.9706342 ] [ 0.2688656 1.2206875 -0.25669324 ... 1.599183 0.8390428 -1.5589235 ] [-0.62044805 0.4298097 -0.7598977 ... 1.0717312 -3.9826703 -2.6919615 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_cumsum.py::TestCumSum::test_cumsum[ ie_device:CPU - precision:FP32 - axis:-4 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_cumsum.___torch_mangle_1176.aten_cumsum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.axis : int = prim::Constant[value=-4]() %4 : Tensor = aten::cumsum(%x.1, %self.axis, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_cumsum.py:22:23 return (%4) fw_re: [[[[ 0.36248726 -0.1793867 -1.7947141 ... -0.06908783 0.51895463 2.4491224 ] [-0.05785588 2.5098553 -0.6876116 ... -0.37729016 0.8047046 -1.2878945 ] [-1.9375068 -0.06260327 -0.8555147 ... -2.134853 1.3702575 -0.658236 ] ... [ 1.5997034 0.00652428 -0.2610781 ... 0.25079274 -0.6123847 1.3868475 ] [-0.46839893 0.23123278 0.8976625 ... -0.43538857 -1.015258 -0.28022033] [-0.37782884 -0.44369125 1.208826 ... 1.9604388 -0.1659238 0.32107776]] [[ 0.22878459 2.2267933 2.5129607 ... 0.05154969 -0.5330099 1.589126 ] [-0.37752438 0.32188442 -1.1966518 ... 0.42537248 -1.4190466 1.6454184 ] [-0.06882237 0.4271505 0.4059324 ... -0.34757122 1.2597489 0.8843164 ] ... [ 0.24710183 0.32361943 0.19280313 ... 0.6021314 -0.5395959 -1.4327425 ] [ 0.7872773 1.366362 -1.0007681 ... 1.2270004 0.7743859 -0.7250296 ] [ 0.26918745 0.8062345 -0.03821287 ... 0.65967804 -1.7782105 0.4303328 ]] [[-0.34433934 -0.3029295 -0.99141306 ... -1.430625 -1.282334 0.5417172 ] [-0.83628654 0.8814543 -0.3157641 ... -1.2308264 0.5642106 0.72727793] [ 0.94742644 -1.6822033 -0.88603556 ... -0.69933134 1.4031547 0.928443 ] ... [-0.5043679 -0.6855492 1.1346594 ... 0.42287365 0.23298001 -0.05195556] [-1.1540899 0.90182066 0.4000912 ... -0.69381857 -0.51277304 0.29581544] [-1.6405629 0.32659155 -0.05164969 ... 1.3802593 -0.89019537 -0.47429207]]]]; ov_res: [[[[ 0.36248726 -0.1793867 -1.7947141 ... -0.06908783 0.51895463 2.4491224 ] [-0.05785588 2.5098553 -0.6876116 ... -0.37729016 0.8047046 -1.2878945 ] [-1.9375068 -0.06260327 -0.8555147 ... -2.134853 1.3702575 -0.658236 ] ... [ 1.5997034 0.00652428 -0.2610781 ... 0.25079274 -0.6123847 1.3868475 ] [-0.46839893 0.23123278 0.8976625 ... -0.43538857 -1.015258 -0.28022033] [-0.37782884 -0.44369125 1.208826 ... 1.9604388 -0.1659238 0.32107776]] [[ 0.22878459 2.2267933 2.5129607 ... 0.05154969 -0.5330099 1.589126 ] [-0.37752438 0.32188442 -1.1966518 ... 0.42537248 -1.4190466 1.6454184 ] [-0.06882237 0.4271505 0.4059324 ... -0.34757122 1.2597489 0.8843164 ] ... [ 0.24710183 0.32361943 0.19280313 ... 0.6021314 -0.5395959 -1.4327425 ] [ 0.7872773 1.366362 -1.0007681 ... 1.2270004 0.7743859 -0.7250296 ] [ 0.26918745 0.8062345 -0.03821287 ... 0.65967804 -1.7782105 0.4303328 ]] [[-0.34433934 -0.3029295 -0.99141306 ... -1.430625 -1.282334 0.5417172 ] [-0.83628654 0.8814543 -0.3157641 ... -1.2308264 0.5642106 0.72727793] [ 0.94742644 -1.6822033 -0.88603556 ... -0.69933134 1.4031547 0.928443 ] ... [-0.5043679 -0.6855492 1.1346594 ... 0.42287365 0.23298001 -0.05195556] [-1.1540899 0.90182066 0.4000912 ... -0.69381857 -0.51277304 0.29581544] [-1.6405629 0.32659155 -0.05164969 ... 1.3802593 -0.89019537 -0.47429207]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1177.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[0.5190511 4.4201145 5.1899524 0.98071027 1.2852257 ] [3.0487585 4.1888413 2.4151309 1.7427168 5.5962687 ] [3.9291143 4.549719 3.5086198 2.0395584 2.8683255 ] [0.9366406 2.2444253 4.564644 2.7156339 1.7434598 ] [5.4284225 1.4304941 3.071928 1.4379983 1.1459876 ]]; ov_res: [[0.5190511 4.4201145 5.1899524 0.98071027 1.2852257 ] [3.0487587 4.1888413 2.4151309 1.7427169 5.5962687 ] [3.9291146 4.549719 3.5086198 2.0395584 2.8683255 ] [0.9366406 2.2444253 4.564644 2.7156339 1.7434598 ] [5.4284225 1.4304942 3.071928 1.4379983 1.1459876 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1179.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.9278142 ] [5.353986 ] [3.7453904 ] [4.6922293 ] [4.9925833 ]] [[5.728383 ] [0.62052584] [0.8889454 ] [0.31930214] [6.213388 ]] [[6.975217 ] [7.370888 ] [5.3975444 ] [1.9756566 ] [3.4948967 ]] [[6.9785004 ] [1.7749413 ] [6.3001394 ] [1.0364809 ] [5.1778293 ]] [[4.213921 ] [7.336237 ] [3.812526 ] [3.4407604 ] [4.87926 ]]]; ov_res: [[[2.9278142 ] [5.353986 ] [3.7453904 ] [4.6922297 ] [4.9925833 ]] [[5.728383 ] [0.62052584] [0.8889454 ] [0.31930214] [6.213388 ]] [[6.975217 ] [7.370888 ] [5.3975444 ] [1.9756566 ] [3.4948967 ]] [[6.9785004 ] [1.7749413 ] [6.3001394 ] [1.036481 ] [5.1778293 ]] [[4.213921 ] [7.336237 ] [3.812526 ] [3.4407604 ] [4.87926 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1181.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[[1.939233 1.2767049 2.0089421 2.8143806 0.33123556] [1.463698 1.1127292 0.37331113 0.29984498 0.7897491 ] [1.8898668 1.5279512 1.9890832 0.20241074 2.360894 ] [2.7198436 1.8718216 0.40212828 1.3508549 2.8565376 ] [2.8880277 0.20862228 1.3036999 2.8546228 2.7418473 ]]]]; ov_res: [[[[1.9392328 1.2767048 2.0089421 2.8143806 0.33123556] [1.463698 1.1127292 0.37331113 0.29984495 0.7897491 ] [1.8898668 1.5279512 1.9890832 0.20241073 2.360894 ] [2.7198436 1.8718215 0.40212828 1.3508549 2.8565373 ] [2.8880277 0.20862228 1.3036999 2.8546228 2.7418473 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:None - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1183.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.754949 ] [5.0251427 ] [0.93257487] [0.26182708] [5.483197 ]] [[4.4589376 ] [4.6766205 ] [2.2543504 ] [0.7923153 ] [2.5400262 ]] [[2.5300307 ] [1.6324959 ] [0.3127847 ] [1.942082 ] [0.5929371 ]] [[4.47775 ] [5.209153 ] [0.14852467] [0.28546876] [5.535439 ]] [[0.35994297] [4.1713266 ] [1.3299826 ] [0.7494792 ] [2.5712724 ]]]; ov_res: [[[2.754949 ] [5.025142 ] [0.9325748 ] [0.26182708] [5.483197 ]] [[4.4589376 ] [4.6766205 ] [2.2543502 ] [0.7923153 ] [2.5400262 ]] [[2.530031 ] [1.6324958 ] [0.3127847 ] [1.942082 ] [0.5929371 ]] [[4.47775 ] [5.209153 ] [0.14852466] [0.28546876] [5.535439 ]] [[0.359943 ] [4.171326 ] [1.3299826 ] [0.7494792 ] [2.5712724 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1209.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[0. 4. 5. 0. 1.] [3. 4. 2. 1. 5.] [3. 4. 3. 2. 2.] [0. 2. 4. 2. 1.] [5. 1. 3. 1. 1.]]; ov_res: [[0. 4. 5. 0. 1.] [3. 4. 2. 1. 5.] [3. 4. 3. 2. 2.] [0. 2. 4. 2. 1.] [5. 1. 3. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1211.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.] [5.] [3.] [4.] [4.]] [[5.] [0.] [0.] [0.] [6.]] [[6.] [7.] [5.] [1.] [3.]] [[6.] [1.] [6.] [1.] [5.]] [[4.] [7.] [3.] [3.] [4.]]]; ov_res: [[[2.] [5.] [3.] [4.] [4.]] [[5.] [0.] [0.] [0.] [6.]] [[6.] [7.] [5.] [1.] [3.]] [[6.] [1.] [6.] [1.] [5.]] [[4.] [7.] [3.] [3.] [4.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1213.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[[1. 1. 2. 2. 0.] [1. 1. 0. 0. 0.] [1. 1. 1. 0. 2.] [2. 1. 0. 1. 2.] [2. 0. 1. 2. 2.]]]]; ov_res: [[[[1. 1. 2. 2. 0.] [1. 1. 0. 0. 0.] [1. 1. 1. 0. 2.] [2. 1. 0. 1. 2.] [2. 0. 1. 2. 2.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:floor - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1215.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.] [5.] [0.] [0.] [5.]] [[4.] [4.] [2.] [0.] [2.]] [[2.] [1.] [0.] [1.] [0.]] [[4.] [5.] [0.] [0.] [5.]] [[0.] [4.] [1.] [0.] [2.]]]; ov_res: [[[2.] [5.] [0.] [0.] [5.]] [[4.] [4.] [2.] [0.] [2.]] [[2.] [1.] [0.] [1.] [0.]] [[4.] [5.] [0.] [0.] [5.]] [[0.] [4.] [1.] [0.] [2.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[0.90247622 7.68527131 9.02379164 1.70516486 2.2346272 ] [5.300889 7.2831553 4.19919755 3.03006879 9.73025512] [6.83156725 7.91061436 6.10045135 3.54618868 4.98716904] [1.62854083 3.90239147 7.93656487 4.72168371 3.03136055] [9.43842077 2.48720598 5.34117367 2.50025354 1.99253338]] - other_array:[1.73870404] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1241.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[0. 4. 5. 0. 1.] [3. 4. 2. 1. 5.] [3. 4. 3. 2. 2.] [0. 2. 4. 2. 1.] [5. 1. 3. 1. 1.]]; ov_res: [[0. 4. 5. 0. 1.] [3. 4. 2. 1. 5.] [3. 4. 3. 2. 2.] [0. 2. 4. 2. 1.] [5. 1. 3. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[3.90463578] [7.14026347] [4.99498432] [6.25772194] [6.65828409]] [[7.63957252] [0.82755503] [1.18552878] [0.42583255] [8.28639225]] [[9.30239395] [9.83007559] [7.19835468] [2.63480486] [4.66091702]] [[9.3067732 ] [2.3671241 ] [8.40208729] [1.38228733] [6.9053349 ]] [[5.61983315] [9.78386344] [5.08451843] [4.58871909] [6.50715271]]] - other_array:[1.33363507] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1243.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.] [5.] [3.] [4.] [4.]] [[5.] [0.] [0.] [0.] [6.]] [[6.] [7.] [5.] [1.] [3.]] [[6.] [1.] [6.] [1.] [5.]] [[4.] [7.] [3.] [3.] [4.]]]; ov_res: [[[2.] [5.] [3.] [4.] [4.]] [[5.] [0.] [0.] [0.] [6.]] [[6.] [7.] [5.] [1.] [3.]] [[6.] [1.] [6.] [1.] [5.]] [[4.] [7.] [3.] [3.] [4.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[[6.13562094 4.03942058 6.35617663 8.90453805 1.04801022] [4.63105617 3.52061108 1.18113494 0.94869218 2.49872055] [5.97942967 4.83434984 6.29334466 0.64041594 7.46973248] [8.60542914 5.92233553 1.27231074 4.27402709 9.03791957] [9.13755296 0.66006885 4.1248312 9.03186207 8.67504731]]]] - other_array:[3.16394232] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1245.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) : Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened fw_re: [[[[1. 1. 2. 2. 0.] [1. 1. 0. 0. 0.] [1. 1. 1. 0. 2.] [2. 1. 0. 1. 2.] [2. 0. 1. 2. 2.]]]]; ov_res: [[[[1. 1. 2. 2. 0.] [1. 1. 0. 0. 0.] [1. 1. 1. 0. 2.] [2. 1. 0. 1. 2.] [2. 0. 1. 2. 2.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - types:(<class 'numpy.float32'>, <class 'numpy.float32'>) - input_array:[[[5.63621207] [7.97586911] [3.83646915] [1.00771288] [9.03086258]] [[9.12231615] [7.42269784] [9.27404938] [3.04944119] [4.18344042]] [[5.17606324] [2.5910854 ] [1.28674806] [7.47463186] [0.97657142]] [[9.160804 ] [8.26793016] [0.61100758] [1.09870419] [9.11690487]] [[0.73638931] [6.62069904] [5.47134331] [2.88457466] [4.23490289]]] - other_array:[[2.04584977] [1.5871927 ] [4.113846 ] [3.8487724 ] [1.6470067 ]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1247.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[[2.] [5.] [0.] [0.] [5.]] [[4.] [4.] [2.] [0.] [2.]] [[2.] [1.] [0.] [1.] [0.]] [[4.] [5.] [0.] [0.] [5.]] [[0.] [4.] [1.] [0.] [2.]]]; ov_res: [[[2.] [5.] [0.] [0.] [5.]] [[4.] [4.] [2.] [0.] [2.]] [[2.] [1.] [0.] [1.] [0.]] [[4.] [5.] [0.] [0.] [5.]] [[0.] [4.] [1.] [0.] [2.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:None - input_array:[ 0.762 2.5548 -0.5944 -0.7438 0.9274] - other_array:0.5 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1273.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [ 1.524 5.1096 -1.1888 -1.4876 1.8548]; ov_res: [ 1.524 5.1096 -1.1888 -1.4876 1.8548] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:None - input_array:[[-0.3711 -1.9353 -0.4605 -0.2917] [ 0.1815 -1.0111 0.9805 -1.5923] [ 0.1062 1.4581 0.7759 -1.2344] [-0.183 -0.0313 1.1908 -1.4757]] - other_array:[ 0.8032 0.293 -0.8113 -0.2308] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1275.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : NoneType = prim::Constant() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[-0.4620269 -6.605119 0.5676076 1.2638649 ] [ 0.22597112 -3.4508533 -1.2085541 6.899047 ] [ 0.13222112 4.97645 -0.95636636 5.348354 ] [-0.22783864 -0.10682594 -1.4677677 6.3938475 ]]; ov_res: [[-0.4620269 -6.605119 0.5676076 1.2638649 ] [ 0.22597112 -3.4508533 -1.2085541 6.899047 ] [ 0.13222112 4.97645 -0.95636636 5.3483534 ] [-0.22783864 -0.10682593 -1.4677678 6.3938475 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:floor - input_array:[ 0.762 2.5548 -0.5944 -0.7438 0.9274] - other_array:0.5 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1277.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [ 1. 5. -2. -2. 1.]; ov_res: [ 1. 5. -2. -2. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:floor - input_array:[[-0.3711 -1.9353 -0.4605 -0.2917] [ 0.1815 -1.0111 0.9805 -1.5923] [ 0.1062 1.4581 0.7759 -1.2344] [-0.183 -0.0313 1.1908 -1.4757]] - other_array:[ 0.8032 0.293 -0.8113 -0.2308] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1279.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="floor"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [[-1. -7. 0. 1.] [ 0. -4. -2. 6.] [ 0. 4. -1. 5.] [-1. -1. -2. 6.]]; ov_res: [[-1. -7. 0. 1.] [ 0. -4. -2. 6.] [ 0. 4. -1. 5.] [-1. -1. -2. 6.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - input_array:[ 0.762 2.5548 -0.5944 -0.7438 0.9274] - other_array:0.5 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1281.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) fw_re: [ 1. 5. -1. -1. 1.]; ov_res: [ 1. 5. -1. -1. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_div.py::TestDiv::test_div_pt_spec[ ie_device:CPU - precision:FP32 - rounding_mode:trunc - input_array:[[-0.3711 -1.9353 -0.4605 -0.2917] [ 0.1815 -1.0111 0.9805 -1.5923] [ 0.1062 1.4581 0.7759 -1.2344] [-0.183 -0.0313 1.1908 -1.4757]] - other_array:[ 0.8032 0.293 -0.8113 -0.2308] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_div.___torch_mangle_1283.aten_div, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %self.rounding_mode : str = prim::Constant[value="trunc"]() %4 : Tensor = aten::div(%input_tensor.1, %other_tensor.1, %self.rounding_mode) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_div.py:23:23 return (%4) during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MEfw_re: [[-0. -6. 0. 1.] [ 0. -3. -1. 6.] [ 0. 4. -0. 5.] [-0. -0. -1. 6.]]; ov_res: [[ 0. -6. 0. 1.] [ 0. -3. -1. 6.] [ 0. 4. 0. 5.] [ 0. 0. -1. 6.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_exp.py::TestExp::test_exp[ ie_device:CPU - precision:FP32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_exp.___torch_mangle_1284.aten_exp, %x.1 : Tensor): %2 : Tensor = aten::exp(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_exp.py:19:23 return (%2) fw_re: [[[[ 0.23408063 2.7434459 1.2726157 ... 2.022489 0.25018042 5.4133 ] [ 0.16826178 1.9899856 1.4113864 ... 0.09970743 1.238014 1.0727981 ] [ 4.9314985 1.4366287 0.5935397 ... 0.6944243 1.4625937 0.18880439] ... [ 2.81268 1.7630012 0.6876852 ... 2.3334708 0.11546229 0.10195107] [ 0.8292453 2.2754705 0.34524593 ... 0.5952514 0.52011895 5.578104 ] [ 2.449755 1.8873887 0.4340939 ... 0.2883431 1.9479473 0.8063945 ]] [[ 1.3852133 1.0068275 1.0187968 ... 0.71227235 0.91010994 0.9569524 ] [ 0.41613838 1.393781 0.3693429 ... 0.6134635 3.5164003 2.23477 ] [ 1.1450024 1.1340036 1.916812 ... 2.5871906 0.9904263 0.5446744 ] ... [ 1.2616389 3.4941921 0.99060553 ... 12.790259 1.3791996 0.44581178] [ 0.63371557 0.6280218 2.2168803 ... 0.38180345 2.6004715 0.43643215] [ 1.2795521 0.30066195 0.98440266 ... 0.7877543 0.5979334 2.9205012 ]] [[ 2.6366084 0.9056418 2.9974077 ... 1.9295115 7.6757164 1.0549616 ] [ 5.556801 0.8776958 0.30290598 ... 2.2900603 0.5044258 0.14628208] [ 2.7793345 2.088173 1.174869 ... 0.11526544 0.42565894 0.7046393 ] ... [ 0.8869417 2.610183 0.3874901 ... 0.63183606 0.7228768 1.304859 ] [ 1.1702033 1.8094192 2.3038967 ... 1.1246796 0.42737126 0.32443237] [ 1.7448971 1.1027032 1.4065878 ... 1.816473 8.545889 1.6487465 ]]]]; ov_res: [[[[ 0.23408063 2.743446 1.2726157 ... 2.022489 0.25018042 5.4133005 ] [ 0.1682618 1.9899856 1.4113863 ... 0.09970743 1.238014 1.072798 ] [ 4.9314985 1.4366287 0.59353966 ... 0.6944243 1.4625938 0.1888044 ] ... [ 2.8126798 1.763001 0.68768525 ... 2.3334706 0.11546228 0.10195107] [ 0.8292452 2.2754703 0.34524596 ... 0.5952513 0.5201189 5.578104 ] [ 2.449755 1.8873887 0.43409386 ... 0.28834307 1.9479473 0.80639446]] [[ 1.3852133 1.0068275 1.0187968 ... 0.7122723 0.9101099 0.9569524 ] [ 0.41613835 1.393781 0.36934292 ... 0.61346346 3.5164 2.2347698 ] [ 1.1450022 1.1340035 1.916812 ... 2.5871909 0.9904263 0.54467434] ... [ 1.2616389 3.494192 0.99060553 ... 12.790258 1.3791996 0.44581178] [ 0.63371557 0.6280218 2.21688 ... 0.38180348 2.6004717 0.43643212] [ 1.2795522 0.30066192 0.98440266 ... 0.7877543 0.59793335 2.9205015 ]] [[ 2.6366086 0.90564173 2.997408 ... 1.9295114 7.6757164 1.0549616 ] [ 5.556801 0.87769574 0.30290595 ... 2.29006 0.5044258 0.14628208] [ 2.7793348 2.0881727 1.1748688 ... 0.11526543 0.4256589 0.70463926] ... [ 0.8869416 2.6101832 0.3874901 ... 0.63183606 0.72287685 1.3048592 ] [ 1.1702032 1.809419 2.3038967 ... 1.1246794 0.42737123 0.32443237] [ 1.7448969 1.1027032 1.4065878 ... 1.8164729 8.545889 1.6487464 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpand::test_expand[ ie_device:CPU - precision:FP32 - dims:(4, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1285.aten_expand, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[4, 3]]() %3 : bool = prim::Constant[value=0]() %4 : Tensor = aten::expand(%x.1, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:23:23 return (%4) fw_re: [[-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275]]; ov_res: [[-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275] [-1.5343822 1.2470589 -2.3433275]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpand::test_expand[ ie_device:CPU - precision:FP32 - dims:(-1, -1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1287.aten_expand, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -1]]() %3 : bool = prim::Constant[value=0]() %4 : Tensor = aten::expand(%x.1, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:23:23 return (%4) fw_re: [[0.13629875 0.12815979 0.7247628 ]]; ov_res: [[0.13629875 0.12815979 0.7247628 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpand::test_expand[ ie_device:CPU - precision:FP32 - dims:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1289.aten_expand, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3]]() %3 : bool = prim::Constant[value=0]() %4 : Tensor = aten::expand(%x.1, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:23:23 return (%4) fw_re: [[[-0.23278734 1.0079383 0.02722645] [-0.23278734 1.0079383 0.02722645]]]; ov_res: [[[-0.23278734 1.0079383 0.02722645] [-0.23278734 1.0079383 0.02722645]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpand::test_expand[ ie_device:CPU - precision:FP32 - dims:(1, 2, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1291.aten_expand, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 2, 3]]() %3 : bool = prim::Constant[value=0]() %4 : Tensor = aten::expand(%x.1, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:23:23 return (%4) fw_re: [[[[0.67026734 0.0053891 1.4215083 ] [0.67026734 0.0053891 1.4215083 ]] [[0.67026734 0.0053891 1.4215083 ] [0.67026734 0.0053891 1.4215083 ]]]]; ov_res: [[[[0.67026734 0.0053891 1.4215083 ] [0.67026734 0.0053891 1.4215083 ]] [[0.67026734 0.0053891 1.4215083 ] [0.67026734 0.0053891 1.4215083 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2], 'broadcast_shape': [1, 2]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1292.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[-1.1863 -0.10829386]]; ov_res: [[-1.1863 -0.10829386]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2], 'broadcast_shape': [1, 4, 2]} 0] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1294.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ]]]; ov_res: [[[-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ] [-0.30404904 -1.019343 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2], 'broadcast_shape': [2, 2]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1296.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[0.1372855 0.7952777] [0.1372855 0.7952777]]; ov_res: [[0.1372855 0.7952777] [0.1372855 0.7952777]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2], 'broadcast_shape': [2, 2, 2]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1298.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[-1.0305212 -1.3475614] [-1.0305212 -1.3475614]] [[-1.0305212 -1.3475614] [-1.0305212 -1.3475614]]]; ov_res: [[[-1.0305212 -1.3475614] [-1.0305212 -1.3475614]] [[-1.0305212 -1.3475614] [-1.0305212 -1.3475614]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2], 'broadcast_shape': [1, 4, 2]} 1] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1300.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ]]]; ov_res: [[[-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ] [-0.80384535 0.5634995 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2, 3], 'broadcast_shape': [1, 2, 3]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1302.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[-0.05380798 -0.8010318 0.4964492 ] [-0.23084044 -0.89962703 0.00271529]]]; ov_res: [[[-0.05380798 -0.8010318 0.4964492 ] [-0.23084044 -0.89962703 0.00271529]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2, 3], 'broadcast_shape': [1, 4, 2, 3]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1304.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]]]]; ov_res: [[[[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]] [[0.15388428 0.54396397 0.9421791 ] [0.9980933 0.03612002 0.02959938]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2, 3, 4], 'broadcast_shape': [1, 2, 3, 4]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1306.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[[-0.70847225 -1.0363775 -0.30269384 1.512988 ] [ 1.1873285 -0.8911097 -1.6060777 -1.3253126 ] [-0.43092418 -1.0740772 -0.1090296 0.25054118]] [[ 1.4427484 -0.8464968 -0.53054214 -0.48716015] [ 0.02412279 -1.8943092 -0.47594246 0.49728155] [-1.5432857 -0.95348907 1.2675428 1.4601216 ]]]]; ov_res: [[[[-0.70847225 -1.0363775 -0.30269384 1.512988 ] [ 1.1873285 -0.8911097 -1.6060777 -1.3253126 ] [-0.43092418 -1.0740772 -0.1090296 0.25054118]] [[ 1.4427484 -0.8464968 -0.53054214 -0.48716015] [ 0.02412279 -1.8943092 -0.47594246 0.49728155] [-1.5432857 -0.95348907 1.2675428 1.4601216 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_expand.py::TestExpandAs::test_expand[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': [1, 2, 3, 4], 'broadcast_shape': [1, 4, 2, 3, 4]} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_expand.___torch_mangle_1308.aten_expand_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::expand_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_expand.py:49:23 return (%3) fw_re: [[[[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]]]]; ov_res: [[[[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]] [[[-0.7628757 -0.9420382 1.5323014 0.41365144] [-0.09810807 1.0471576 0.08372699 0.00936675] [-0.5156482 -1.2667387 -1.3443851 0.9032145 ]] [[-0.09425765 -0.2451079 -0.24304506 -1.6735955 ] [-1.006008 0.07869951 1.3991637 2.01702 ] [ 1.4676739 1.716746 0.49626258 -0.61602956]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor.py::TestFloor::test_floor[ ie_device:CPU - precision:FP32 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor.___torch_mangle_1309.aten_floor, %x.1 : Tensor): %2 : Tensor = aten::floor(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor.py:22:26 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %2) return (%3) fw_re: [[[[ 0.21912988 -1.5067416 0.43343273 ... 0.4192412 -0.6938677 0.30788818] [ 0.51528156 0.11034961 0.79521924 ... -0.05670949 1.2462338 1.0668726 ] [ 1.8348272 1.1813849 -0.2961457 ... 1.5413991 0.12281229 0.95648026] ... [ 0.7498407 1.2583318 -0.45354757 ... -1.8231434 0.26941144 -0.16412702] [ 0.47392377 0.7854522 0.33391887 ... 0.95082796 -0.7305727 -1.1099187 ] [ 0.42275167 0.8187829 -1.3036156 ... -0.8764224 0.77274793 -0.05534838]] [[ 0.08089918 0.5699505 1.0911622 ... 0.63011765 2.3970625 -1.9332563 ] [-0.9870346 0.8867219 -0.49761775 ... -1.6627297 -0.7682753 0.25433525] [ 1.0157237 1.026672 0.2355929 ... 0.02442197 0.7813243 -1.14908 ] ... [ 0.05185184 -0.33982614 1.3231971 ... -0.6136583 0.30845872 1.4655099 ] [ 1.2030613 1.5818276 1.0981175 ... 0.43031964 1.1237018 -0.18374655] [-0.61282504 0.24178739 1.2545 ... 0.5041778 -1.7788292 -0.93642765]] [[ 0.7494734 0.52117866 0.96358234 ... -0.08226675 -1.6427116 -0.46424496] [-0.6439089 1.0417353 -0.4425609 ... -0.97659945 0.54120415 -2.1036992 ] [ 2.4427183 -0.07179745 -1.1828283 ... -0.14074932 -0.23910904 -0.07034183] ... [ 1.8437618 -1.9036885 -0.29436788 ... -0.4698596 -0.42909038 0.14606659] [-1.0816579 0.88343537 0.79075545 ... -0.811704 0.35835886 -0.20527098] [ 1.0417509 0.51890373 0.29721963 ... 0.4892846 1.9272553 0.8026814 ]]]]; ov_res: [[[[ 0.21912988 -1.5067416 0.43343273 ... 0.4192412 -0.6938677 0.30788818] [ 0.51528156 0.11034961 0.79521924 ... -0.05670949 1.2462338 1.0668726 ] [ 1.8348272 1.1813849 -0.2961457 ... 1.5413991 0.12281229 0.95648026] ... [ 0.7498407 1.2583318 -0.45354757 ... -1.8231434 0.26941144 -0.16412702] [ 0.47392377 0.7854522 0.33391887 ... 0.95082796 -0.7305727 -1.1099187 ] [ 0.42275167 0.8187829 -1.3036156 ... -0.8764224 0.77274793 -0.05534838]] [[ 0.08089918 0.5699505 1.0911622 ... 0.63011765 2.3970625 -1.9332563 ] [-0.9870346 0.8867219 -0.49761775 ... -1.6627297 -0.7682753 0.25433525] [ 1.0157237 1.026672 0.2355929 ... 0.02442197 0.7813243 -1.14908 ] ... [ 0.05185184 -0.33982614 1.3231971 ... -0.6136583 0.30845872 1.4655099 ] [ 1.2030613 1.5818276 1.0981175 ... 0.43031964 1.1237018 -0.18374655] [-0.61282504 0.24178739 1.2545 ... 0.5041778 -1.7788292 -0.93642765]] [[ 0.7494734 0.52117866 0.96358234 ... -0.08226675 -1.6427116 -0.46424496] [-0.6439089 1.0417353 -0.4425609 ... -0.97659945 0.54120415 -2.1036992 ] [ 2.4427183 -0.07179745 -1.1828283 ... -0.14074932 -0.23910904 -0.07034183] ... [ 1.8437618 -1.9036885 -0.29436788 ... -0.4698596 -0.42909038 0.14606659] [-1.0816579 0.88343537 0.79075545 ... -0.811704 0.35835886 -0.20527098] [ 1.0417509 0.51890373 0.29721963 ... 0.4892846 1.9272553 0.8026814 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 0. -2. 0. ... 0. -1. 0.] [ 0. 0. 0. ... -1. 1. 1.] [ 1. 1. -1. ... 1. 0. 0.] ... [ 0. 1. -1. ... -2. 0. -1.] [ 0. 0. 0. ... 0. -1. -2.] [ 0. 0. -2. ... -1. 0. -1.]] [[ 0. 0. 1. ... 0. 2. -2.] [-1. 0. -1. ... -2. -1. 0.] [ 1. 1. 0. ... 0. 0. -2.] ... [ 0. -1. 1. ... -1. 0. 1.] [ 1. 1. 1. ... 0. 1. -1.] [-1. 0. 1. ... 0. -2. -1.]] [[ 0. 0. 0. ... -1. -2. -1.] [-1. 1. -1. ... -1. 0. -3.] [ 2. -1. -2. ... -1. -1. -1.] ... [ 1. -2. -1. ... -1. -1. 0.] [-2. 0. 0. ... -1. 0. -1.] [ 1. 0. 0. ... 0. 1. 0.]]]]; ov_res: [[[[ 0. -2. 0. ... 0. -1. 0.] [ 0. 0. 0. ... -1. 1. 1.] [ 1. 1. -1. ... 1. 0. 0.] ... [ 0. 1. -1. ... -2. 0. -1.] [ 0. 0. 0. ... 0. -1. -2.] [ 0. 0. -2. ... -1. 0. -1.]] [[ 0. 0. 1. ... 0. 2. -2.] [-1. 0. -1. ... -2. -1. 0.] [ 1. 1. 0. ... 0. 0. -2.] ... [ 0. -1. 1. ... -1. 0. 1.] [ 1. 1. 1. ... 0. 1. -1.] [-1. 0. 1. ... 0. -2. -1.]] [[ 0. 0. 0. ... -1. -2. -1.] [-1. 1. -1. ... -1. 0. -3.] [ 2. -1. -2. ... -1. -1. -1.] ... [ 1. -2. -1. ... -1. -1. 0.] [-2. 0. 0. ... -1. 0. -1.] [ 1. 0. 0. ... 0. 1. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor.py::TestFloor::test_floor[ ie_device:CPU - precision:FP32 - inplace:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor.___torch_mangle_1311.aten_floor, %x.1 : Tensor): %2 : Tensor = aten::floor_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor.py:22:26 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %2) return (%3) fw_re: [[[[ 1. -1. -1. ... 0. -1. 0.] [-1. 0. -2. ... -2. 0. -1.] [ 0. 1. 0. ... 0. 0. -2.] ... [ 0. -1. -1. ... 0. -1. -2.] [-1. 0. -1. ... -3. -1. -2.] [-1. 0. -1. ... -1. 1. 1.]] [[ 0. -2. -1. ... -1. -2. 1.] [ 1. -1. 0. ... 0. -2. 0.] [ 0. 1. 1. ... 0. -1. 0.] ... [-1. -3. -1. ... -1. -1. 1.] [ 3. 0. -2. ... -2. 0. 1.] [-1. -1. 1. ... 1. 0. 1.]] [[-2. 0. 0. ... 0. 0. -2.] [-1. 1. -1. ... -1. -2. -1.] [-1. -2. -1. ... 0. -1. 0.] ... [-2. 1. -1. ... 0. -1. 0.] [-2. -1. -1. ... 0. -1. 1.] [ 0. -2. 1. ... 0. -1. -1.]]]]; ov_res: [[[[ 1. -1. -1. ... 0. -1. 0.] [-1. 0. -2. ... -2. 0. -1.] [ 0. 1. 0. ... 0. 0. -2.] ... [ 0. -1. -1. ... 0. -1. -2.] [-1. 0. -1. ... -3. -1. -2.] [-1. 0. -1. ... -1. 1. 1.]] [[ 0. -2. -1. ... -1. -2. 1.] [ 1. -1. 0. ... 0. -2. 0.] [ 0. 1. 1. ... 0. -1. 0.] ... [-1. -3. -1. ... -1. -1. 1.] [ 3. 0. -2. ... -2. 0. 1.] [-1. -1. 1. ... 1. 0. 1.]] [[-2. 0. 0. ... 0. 0. -2.] [-1. 1. -1. ... -1. -2. -1.] [-1. -2. -1. ... 0. -1. 0.] ... [-2. 1. -1. ... 0. -1. 0.] [-2. -1. -1. ... 0. -1. 1.] [ 0. -2. 1. ... 0. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 1. -1. -1. ... 0. -1. 0.] [-1. 0. -2. ... -2. 0. -1.] [ 0. 1. 0. ... 0. 0. -2.] ... [ 0. -1. -1. ... 0. -1. -2.] [-1. 0. -1. ... -3. -1. -2.] [-1. 0. -1. ... -1. 1. 1.]] [[ 0. -2. -1. ... -1. -2. 1.] [ 1. -1. 0. ... 0. -2. 0.] [ 0. 1. 1. ... 0. -1. 0.] ... [-1. -3. -1. ... -1. -1. 1.] [ 3. 0. -2. ... -2. 0. 1.] [-1. -1. 1. ... 1. 0. 1.]] [[-2. 0. 0. ... 0. 0. -2.] [-1. 1. -1. ... -1. -2. -1.] [-1. -2. -1. ... 0. -1. 0.] ... [-2. 1. -1. ... 0. -1. 0.] [-2. -1. -1. ... 0. -1. 1.] [ 0. -2. 1. ... 0. -1. -1.]]]]; ov_res: [[[[ 1. -1. -1. ... 0. -1. 0.] [-1. 0. -2. ... -2. 0. -1.] [ 0. 1. 0. ... 0. 0. -2.] ... [ 0. -1. -1. ... 0. -1. -2.] [-1. 0. -1. ... -3. -1. -2.] [-1. 0. -1. ... -1. 1. 1.]] [[ 0. -2. -1. ... -1. -2. 1.] [ 1. -1. 0. ... 0. -2. 0.] [ 0. 1. 1. ... 0. -1. 0.] ... [-1. -3. -1. ... -1. -1. 1.] [ 3. 0. -2. ... -2. 0. 1.] [-1. -1. 1. ... 1. 0. 1.]] [[-2. 0. 0. ... 0. 0. -2.] [-1. 1. -1. ... -1. -2. -1.] [-1. -2. -1. ... 0. -1. 0.] ... [-2. 1. -1. ... 0. -1. 0.] [-2. -1. -1. ... 0. -1. 1.] [ 0. -2. 1. ... 0. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[0.5]] - input_tensor:[-0.80945957 -0.9414682 1.4969639 -2.3047984 -0.28210458] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1312.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[-2. -2. 2. -5. -1.]]; ov_res: [[-2. -2. 2. -5. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[0.5]] - input_tensor:[[[-1.4014182 ] [-0.38940692] [ 2.0926871 ] [-0.7249706 ] [ 0.4510613 ]] [[-0.7308124 ] [ 0.77649724] [ 1.600262 ] [-0.95150995] [-0.11358247]] [[-0.69071877] [ 0.13934493] [ 0.04790821] [-0.2819047 ] [ 0.14385499]] [[ 0.92586464] [-1.5850416 ] [-1.7169756 ] [-1.6380394 ] [-1.5305184 ]] [[ 0.6621189 ] [ 1.7097511 ] [ 0.33776814] [-1.3023399 ] [-1.6897037 ]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1314.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[-3.] [-1.] [ 4.] [-2.] [ 0.]] [[-2.] [ 1.] [ 3.] [-2.] [-1.]] [[-2.] [ 0.] [ 0.] [-1.] [ 0.]] [[ 1.] [-4.] [-4.] [-4.] [-4.]] [[ 1.] [ 3.] [ 0.] [-3.] [-4.]]]; ov_res: [[[-3.] [-1.] [ 4.] [-2.] [ 0.]] [[-2.] [ 1.] [ 3.] [-2.] [-1.]] [[-2.] [ 0.] [ 0.] [-1.] [ 0.]] [[ 1.] [-4.] [-4.] [-4.] [-4.]] [[ 1.] [ 3.] [ 0.] [-3.] [-4.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[0.5]] - input_tensor:[[[[-0.6919343 -0.10116539 0.1452594 0.66338193 -0.621069 ] [ 1.1235336 -0.93407345 -1.4803522 1.4928006 -0.22491845] [ 1.3487433 0.35082003 0.20022644 0.0509897 -0.8364672 ] [ 0.04205312 2.0616775 -0.48445994 -1.1341372 -0.26971203] [-1.1967769 0.09163836 -1.8229662 -0.78206956 -0.27780572]]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1316.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[[-2. -1. 0. 1. -2.] [ 2. -2. -3. 2. -1.] [ 2. 0. 0. 0. -2.] [ 0. 4. -1. -3. -1.] [-3. 0. -4. -2. -1.]]]]; ov_res: [[[[-2. -1. 0. 1. -2.] [ 2. -2. -3. 2. -1.] [ 2. 0. 0. 0. -2.] [ 0. 4. -1. -3. -1.] [-3. 0. -4. -2. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[ 1.3534938 2.1854753 0.36389795 -1.7454736 -0.04353828] - input_tensor:[-0.80945957 -0.9414682 1.4969639 -2.3047984 -0.28210458] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1318.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [-1. -1. 4. 1. 6.]; ov_res: [-1. -1. 4. 1. 6.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[ 1.3534938 2.1854753 0.36389795 -1.7454736 -0.04353828] - input_tensor:[[[-1.4014182 ] [-0.38940692] [ 2.0926871 ] [-0.7249706 ] [ 0.4510613 ]] [[-0.7308124 ] [ 0.77649724] [ 1.600262 ] [-0.95150995] [-0.11358247]] [[-0.69071877] [ 0.13934493] [ 0.04790821] [-0.2819047 ] [ 0.14385499]] [[ 0.92586464] [-1.5850416 ] [-1.7169756 ] [-1.6380394 ] [-1.5305184 ]] [[ 0.6621189 ] [ 1.7097511 ] [ 0.33776814] [-1.3023399 ] [-1.6897037 ]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1320.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[ -2. -1. -4. 0. 32.] [ -1. -1. -2. 0. 8.] [ 1. 0. 5. -2. -49.] [ -1. -1. -2. 0. 16.] [ 0. 0. 1. -1. -11.]] [[ -1. -1. -3. 0. 16.] [ 0. 0. 2. -1. -18.] [ 1. 0. 4. -1. -37.] [ -1. -1. -3. 0. 21.] [ -1. -1. -1. 0. 2.]] [[ -1. -1. -2. 0. 15.] [ 0. 0. 0. -1. -4.] [ 0. 0. 0. -1. -2.] [ -1. -1. -1. 0. 6.] [ 0. 0. 0. -1. -4.]] [[ 0. 0. 2. -1. -22.] [ -2. -1. -5. 0. 36.] [ -2. -1. -5. 0. 39.] [ -2. -1. -5. 0. 37.] [ -2. -1. -5. 0. 35.]] [[ 0. 0. 1. -1. -16.] [ 1. 0. 4. -1. -40.] [ 0. 0. 0. -1. -8.] [ -1. -1. -4. 0. 29.] [ -2. -1. -5. 0. 38.]]]; ov_res: [[[ -2. -1. -4. 0. 32.] [ -1. -1. -2. 0. 8.] [ 1. 0. 5. -2. -49.] [ -1. -1. -2. 0. 16.] [ 0. 0. 1. -1. -11.]] [[ -1. -1. -3. 0. 16.] [ 0. 0. 2. -1. -18.] [ 1. 0. 4. -1. -37.] [ -1. -1. -3. 0. 21.] [ -1. -1. -1. 0. 2.]] [[ -1. -1. -2. 0. 15.] [ 0. 0. 0. -1. -4.] [ 0. 0. 0. -1. -2.] [ -1. -1. -1. 0. 6.] [ 0. 0. 0. -1. -4.]] [[ 0. 0. 2. -1. -22.] [ -2. -1. -5. 0. 36.] [ -2. -1. -5. 0. 39.] [ -2. -1. -5. 0. 37.] [ -2. -1. -5. 0. 35.]] [[ 0. 0. 1. -1. -16.] [ 1. 0. 4. -1. -40.] [ 0. 0. 0. -1. -8.] [ -1. -1. -4. 0. 29.] [ -2. -1. -5. 0. 38.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[ 1.3534938 2.1854753 0.36389795 -1.7454736 -0.04353828] - input_tensor:[[[[-0.6919343 -0.10116539 0.1452594 0.66338193 -0.621069 ] [ 1.1235336 -0.93407345 -1.4803522 1.4928006 -0.22491845] [ 1.3487433 0.35082003 0.20022644 0.0509897 -0.8364672 ] [ 0.04205312 2.0616775 -0.48445994 -1.1341372 -0.26971203] [-1.1967769 0.09163836 -1.8229662 -0.78206956 -0.27780572]]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1322.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[[-1. -1. 0. -1. 14.] [ 0. -1. -5. -1. 5.] [ 0. 0. 0. -1. 19.] [ 0. 0. -2. 0. 6.] [-1. 0. -6. 0. 6.]]]]; ov_res: [[[[-1. -1. 0. -1. 14.] [ 0. -1. -5. -1. 5.] [ 0. 0. 0. -1. 19.] [ 0. 0. -2. 0. 6.] [-1. 0. -6. 0. 6.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.94487333] [ 1.6951487 ] [ 1.6655464 ] [ 0.2071931 ] [ 0.7233429 ]] - input_tensor:[-0.80945957 -0.9414682 1.4969639 -2.3047984 -0.28210458] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1324.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[ 0. 0. -2. 2. 0.] [ -1. -1. 0. -2. -1.] [ -1. -1. 0. -2. -1.] [ -4. -5. 7. -12. -2.] [ -2. -2. 2. -4. -1.]]; ov_res: [[ 0. 0. -2. 2. 0.] [ -1. -1. 0. -2. -1.] [ -1. -1. 0. -2. -1.] [ -4. -5. 7. -12. -2.] [ -2. -2. 2. -4. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.94487333] [ 1.6951487 ] [ 1.6655464 ] [ 0.2071931 ] [ 0.7233429 ]] - input_tensor:[[[-1.4014182 ] [-0.38940692] [ 2.0926871 ] [-0.7249706 ] [ 0.4510613 ]] [[-0.7308124 ] [ 0.77649724] [ 1.600262 ] [-0.95150995] [-0.11358247]] [[-0.69071877] [ 0.13934493] [ 0.04790821] [-0.2819047 ] [ 0.14385499]] [[ 0.92586464] [-1.5850416 ] [-1.7169756 ] [-1.6380394 ] [-1.5305184 ]] [[ 0.6621189 ] [ 1.7097511 ] [ 0.33776814] [-1.3023399 ] [-1.6897037 ]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1326.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[ 1.] [-1.] [ 1.] [-4.] [ 0.]] [[ 0.] [ 0.] [ 0.] [-5.] [-1.]] [[ 0.] [ 0.] [ 0.] [-2.] [ 0.]] [[-1.] [-1.] [-2.] [-8.] [-3.]] [[-1.] [ 1.] [ 0.] [-7.] [-3.]]]; ov_res: [[[ 1.] [-1.] [ 1.] [-4.] [ 0.]] [[ 0.] [ 0.] [ 0.] [-5.] [-1.]] [[ 0.] [ 0.] [ 0.] [-2.] [ 0.]] [[-1.] [-1.] [-2.] [-8.] [-3.]] [[-1.] [ 1.] [ 0.] [-7.] [-3.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.94487333] [ 1.6951487 ] [ 1.6655464 ] [ 0.2071931 ] [ 0.7233429 ]] - input_tensor:[[[[-0.6919343 -0.10116539 0.1452594 0.66338193 -0.621069 ] [ 1.1235336 -0.93407345 -1.4803522 1.4928006 -0.22491845] [ 1.3487433 0.35082003 0.20022644 0.0509897 -0.8364672 ] [ 0.04205312 2.0616775 -0.48445994 -1.1341372 -0.26971203] [-1.1967769 0.09163836 -1.8229662 -0.78206956 -0.27780572]]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1328.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[[ 0. 0. -1. -1. 0.] [ 0. -1. -1. 0. -1.] [ 0. 0. 0. 0. -1.] [ 0. 9. -3. -6. -2.] [-2. 0. -3. -2. -1.]]]]; ov_res: [[[[ 0. 0. -1. -1. 0.] [ 0. -1. -1. 0. -1.] [ 0. 0. 0. 0. -1.] [ 0. 9. -3. -6. -2.] [-2. 0. -3. -2. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.14532736 1.7973984 -0.9004882 0.8480743 -0.6931458 ]] - input_tensor:[-0.80945957 -0.9414682 1.4969639 -2.3047984 -0.28210458] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1330.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[ 5. -1. -2. -3. 0.]]; ov_res: [[ 5. -1. -2. -3. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.14532736 1.7973984 -0.9004882 0.8480743 -0.6931458 ]] - input_tensor:[[[-1.4014182 ] [-0.38940692] [ 2.0926871 ] [-0.7249706 ] [ 0.4510613 ]] [[-0.7308124 ] [ 0.77649724] [ 1.600262 ] [-0.95150995] [-0.11358247]] [[-0.69071877] [ 0.13934493] [ 0.04790821] [-0.2819047 ] [ 0.14385499]] [[ 0.92586464] [-1.5850416 ] [-1.7169756 ] [-1.6380394 ] [-1.5305184 ]] [[ 0.6621189 ] [ 1.7097511 ] [ 0.33776814] [-1.3023399 ] [-1.6897037 ]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1332.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[ 9. -1. 1. -2. 2.] [ 2. -1. 0. -1. 0.] [-15. 1. -3. 2. -4.] [ 4. -1. 0. -1. 1.] [ -4. 0. -1. 0. -1.]] [[ 5. -1. 0. -1. 1.] [ -6. 0. -1. 0. -2.] [-12. 0. -2. 1. -3.] [ 6. -1. 1. -2. 1.] [ 0. -1. 0. -1. 0.]] [[ 4. -1. 0. -1. 0.] [ -1. 0. -1. 0. -1.] [ -1. 0. -1. 0. -1.] [ 1. -1. 0. -1. 0.] [ -1. 0. -1. 0. -1.]] [[ -7. 0. -2. 1. -2.] [ 10. -1. 1. -2. 2.] [ 11. -1. 1. -3. 2.] [ 11. -1. 1. -2. 2.] [ 10. -1. 1. -2. 2.]] [[ -5. 0. -1. 0. -1.] [-12. 0. -2. 2. -3.] [ -3. 0. -1. 0. -1.] [ 8. -1. 1. -2. 1.] [ 11. -1. 1. -2. 2.]]]; ov_res: [[[ 9. -1. 1. -2. 2.] [ 2. -1. 0. -1. 0.] [-15. 1. -3. 2. -4.] [ 4. -1. 0. -1. 1.] [ -4. 0. -1. 0. -1.]] [[ 5. -1. 0. -1. 1.] [ -6. 0. -1. 0. -2.] [-12. 0. -2. 1. -3.] [ 6. -1. 1. -2. 1.] [ 0. -1. 0. -1. 0.]] [[ 4. -1. 0. -1. 0.] [ -1. 0. -1. 0. -1.] [ -1. 0. -1. 0. -1.] [ 1. -1. 0. -1. 0.] [ -1. 0. -1. 0. -1.]] [[ -7. 0. -2. 1. -2.] [ 10. -1. 1. -2. 2.] [ 11. -1. 1. -3. 2.] [ 11. -1. 1. -2. 2.] [ 10. -1. 1. -2. 2.]] [[ -5. 0. -1. 0. -1.] [-12. 0. -2. 2. -3.] [ -3. 0. -1. 0. -1.] [ 8. -1. 1. -2. 1.] [ 11. -1. 1. -2. 2.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_floor_divide.py::TestFloorDivide::test_floor_divide[ ie_device:CPU - precision:FP32 - other_tensor:[[-0.14532736 1.7973984 -0.9004882 0.8480743 -0.6931458 ]] - input_tensor:[[[[-0.6919343 -0.10116539 0.1452594 0.66338193 -0.621069 ] [ 1.1235336 -0.93407345 -1.4803522 1.4928006 -0.22491845] [ 1.3487433 0.35082003 0.20022644 0.0509897 -0.8364672 ] [ 0.04205312 2.0616775 -0.48445994 -1.1341372 -0.26971203] [-1.1967769 0.09163836 -1.8229662 -0.78206956 -0.27780572]]]] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_floor_divide.___torch_mangle_1334.aten_floor_divide, %input_tensor.1 : Tensor, %other_tensor.1 : Tensor): %3 : Tensor = aten::floor_divide(%input_tensor.1, %other_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_floor_divide.py:22:23 return (%3) fw_re: [[[[ 4. -1. -1. 0. 0.] [ -8. -1. 1. 1. 0.] [-10. 0. -1. 0. 1.] [ -1. 1. 0. -2. 0.] [ 8. 0. 2. -1. 0.]]]]; ov_res: [[[[ 4. -1. -1. 0. 0.] [ -8. -1. 1. 1. 0.] [-10. 0. -1. 0. 1.] [ -1. 1. 0. -2. 0.] [ 8. 0. 2. -1. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1335.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1337.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1339.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1341.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1343.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1345.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1347.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1349.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1351.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1353.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1355.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1357.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1359.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1361.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1363.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1365.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1367.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) SSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1369.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1371.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1373.aten_full, %x.1 : float): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::full(%self.shape, %x.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:30:23 return (%4) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1374.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1376.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1378.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1380.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1382.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1384.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1386.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1388.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1390.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1392.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1394.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1396.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1398.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1400.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1402.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1404.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1406.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1408.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1410.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1412.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1414.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1416.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1418.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1420.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1422.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1424.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1426.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1428.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1430.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1432.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1434.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1436.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1438.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1440.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1442.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1444.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1446.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1448.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1450.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1452.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1454.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1456.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1458.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1460.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1462.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1464.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1466.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1468.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1470.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1472.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1474.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1476.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1478.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1480.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1482.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1484.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1486.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1488.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1490.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1492.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1494.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1496.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1498.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1500.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1502.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1504.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1506.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1508.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1510.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1512.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1514.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1516.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1518.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1520.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1522.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1524.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1526.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1528.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1530.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1532.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1534.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1536.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1538.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1540.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1542.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1544.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1546.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1548.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1550.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1552.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1554.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1556.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1558.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1560.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1562.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1564.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1566.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1568.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1570.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1572.aten_full_dtype_with_names, %x.1 : float): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:73 %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:48:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1573.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1575.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1577.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1579.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1581.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1583.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1585.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1587.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1589.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1591.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1593.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1595.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1597.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1599.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1601.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1603.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1605.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1607.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1609.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1611.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1613.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1615.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1617.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1619.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1621.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1623.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1625.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1627.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1629.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1631.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1633.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1635.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1637.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1639.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1641.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1643.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1645.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1647.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1649.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1651.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1653.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1655.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1657.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1659.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1661.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1663.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1665.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1667.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1669.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1671.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1673.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1675.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1677.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1679.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1681.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1683.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1685.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1687.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1689.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1691.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1693.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1695.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1697.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1699.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1701.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1703.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1705.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1707.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1709.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1711.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1713.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1715.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1717.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1719.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1721.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1723.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1725.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1727.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1729.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1731.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1733.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1735.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1737.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1739.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1741.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1743.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1745.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1747.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1749.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1751.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1753.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1755.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1757.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1759.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1761.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1763.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1765.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1767.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1769.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1771.aten_full_dtype, %x.1 : float): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::full(%self.shape, %x.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:39:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1772.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1774.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1776.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1778.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1780.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1782.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1784.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1786.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1788.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1790.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1792.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1794.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1796.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1798.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1800.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1802.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1804.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1806.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1808.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1810.aten_full_out_with_names, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %7 : Tensor = aten::full(%self.shape, %x.1, %2, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1812.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1814.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1816.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1818.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1820.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1822.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1824.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1826.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1828.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1830.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1832.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1834.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1836.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1838.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1840.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1842.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1844.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1846.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1848.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1850.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1852.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1854.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1856.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1858.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1860.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1862.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1864.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1866.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1868.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1870.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1872.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1874.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1876.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1878.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1880.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1882.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1884.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1886.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1888.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1890.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1892.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1894.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1896.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.10 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1898.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1900.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1902.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1904.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1906.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1908.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1910.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1912.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1914.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1916.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1918.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1920.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1922.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1924.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1926.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1928.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1930.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1932.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1934.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1936.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1938.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1940.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1942.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1944.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1946.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1948.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1950.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1952.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1954.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1956.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1958.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1960.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1962.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1964.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1966.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1968.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1970.aten_full_out_with_names, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:53 %8 : Tensor = aten::full(%self.shape, %x.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:67:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1971.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1973.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1975.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1977.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1979.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1981.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1983.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1985.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1987.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1989.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1991.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1993.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1995.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1997.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_1999.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2001.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2003.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2005.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2007.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2009.aten_full_out, %x.1 : float): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %7 : Tensor = aten::full(%self.shape, %x.1, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2011.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2013.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2015.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2017.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2019.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2021.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2023.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2025.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2027.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2029.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2031.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2033.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2035.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2037.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2039.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2041.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2043.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2045.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2047.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2049.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2051.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2053.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2055.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2057.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2059.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2061.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2063.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2065.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2067.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2069.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2071.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2073.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2075.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2077.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2079.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2081.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2083.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2085.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2087.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2089.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2091.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2093.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2095.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2097.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2099.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2101.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2103.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2105.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2107.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2109.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2111.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2113.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2115.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2117.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2119.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2121.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2123.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2125.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2127.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2129.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2131.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2133.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2135.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2137.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2139.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2141.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2143.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2145.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2147.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2149.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2151.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2153.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2155.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2157.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2159.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2161.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2163.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2165.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2167.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFull::test_full_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2169.aten_full_out, %x.1 : float): %2 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:66 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:53 %8 : Tensor = aten::full(%self.shape, %x.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:57:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2170.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2172.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2174.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2176.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2178.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2180.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2182.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2184.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2186.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2188.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2190.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2192.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2194.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2196.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2198.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2200.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2202.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2204.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2206.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like[ ie_device:CPU - precision:FP32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2208.aten_full_like, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:124:23 return (%4) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2209.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2211.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2213.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2215.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2217.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2219.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2221.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2223.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2225.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2227.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2229.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2231.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2233.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2235.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2237.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2239.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2241.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2243.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2245.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2247.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2249.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2251.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2253.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2255.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2257.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2259.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2261.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2263.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2265.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2267.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2269.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2271.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2273.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2275.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2277.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2279.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2281.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2283.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2285.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2287.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2289.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2291.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2293.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2295.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2297.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2299.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2301.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2303.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2305.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2307.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2309.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2311.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2313.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2315.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2317.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2319.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2321.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2323.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2325.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2327.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2329.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2331.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2333.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2335.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2337.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2339.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2341.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2343.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2345.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2347.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2349.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2351.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2353.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2355.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2357.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2359.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2361.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2363.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2365.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2367.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2369.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2371.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2373.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2375.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2377.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2379.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2381.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2383.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2385.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2387.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2389.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2391.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2393.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2395.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2397.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2399.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2401.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2403.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2405.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2407.aten_full_like_dtype, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %5 : Tensor = aten::full_like(%input_t.1, %x.1, %self.dtype, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:132:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2408.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2410.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2412.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2414.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2416.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2418.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2420.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2422.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2424.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2426.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2428.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2430.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2432.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2434.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2436.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2438.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2440.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2442.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2444.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2446.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%self.dtype, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %7 : Tensor = aten::full_like(%input_t.1, %x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2448.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2450.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2452.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2454.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2456.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2458.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2460.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2462.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2464.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2466.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2468.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2470.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2472.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2474.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2476.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2478.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2480.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2482.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2484.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2486.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2488.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2490.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2492.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2494.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2496.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2498.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2500.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2502.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2504.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2506.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2508.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2510.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2512.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2514.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2516.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2518.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2520.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2522.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2524.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2526.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2528.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2530.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2532.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2534.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2536.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2538.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2540.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2542.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2544.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2546.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2548.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2550.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2552.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2554.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2556.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2558.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2560.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2562.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2564.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2566.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2568.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2570.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2572.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2574.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2576.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2578.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2580.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2582.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2584.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2586.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2588.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2590.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2592.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2594.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2596.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2598.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2600.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2602.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2604.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestFullLike::test_full_like_out[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2606.aten_full_like_out, %input_t.1 : Tensor, %x.1 : float): %3 : int = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:68 %4 : NoneType = prim::Constant() %5 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %7 : Tensor = aten::tensor(%3, %self.dtype, %4, %5) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:55 %8 : Tensor = aten::full_like(%input_t.1, %x.1, %4, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:140:23 return (%8) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2608.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2610.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2612.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2614.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2616.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2618.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2620.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2622.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2624.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2626.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2628.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2630.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2632.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2634.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2636.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2638.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2640.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2642.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2644.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full[ ie_device:CPU - precision:FP32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2646.aten_full, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %3, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:200:23 return (%5) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2647.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2649.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2651.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2653.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2655.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2657.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2659.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2661.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2663.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2665.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2667.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2669.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2671.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2673.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2675.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2677.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2679.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2681.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2683.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2685.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2687.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2689.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2691.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2693.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2695.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2697.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2699.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2701.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2703.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2705.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2707.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2709.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2711.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2713.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2715.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2717.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2719.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2721.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2723.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2725.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2727.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2729.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2731.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2733.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2735.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2737.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2739.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2741.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2743.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2745.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2747.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [-1]; ov_res: [-1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2749.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[-1 -1]]; ov_res: [[-1 -1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2751.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[-1 -1 -1] [-1 -1 -1]]]; ov_res: [[[-1 -1 -1] [-1 -1 -1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2753.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]]; ov_res: [[[[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]] [[-1 -1 -1 -1] [-1 -1 -1 -1] [-1 -1 -1 -1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2755.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]]; ov_res: [[[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]] [[[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]] [[[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]] [[-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1] [-1 -1 -1 -1 -1 -1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2757.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2759.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2761.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2763.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2765.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2767.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2769.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2771.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2773.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2775.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2777.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2779.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2781.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2783.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2785.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2787.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2789.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2791.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2793.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2795.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2797.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2799.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2801.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2803.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2805.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2807.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2809.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2811.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2813.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2815.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2817.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2819.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2821.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2823.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:1 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2825.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2827.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [-1.]; ov_res: [-1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2829.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[-1. -1.]]; ov_res: [[-1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2831.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[-1. -1. -1.] [-1. -1. -1.]]]; ov_res: [[[-1. -1. -1.] [-1. -1. -1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2833.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]]; ov_res: [[[[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]] [[-1. -1. -1. -1.] [-1. -1. -1. -1.] [-1. -1. -1. -1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:-1 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2835.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]]; ov_res: [[[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]] [[[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]] [[[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]] [[-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.] [-1. -1. -1. -1. -1. -1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2837.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [0.5]; ov_res: [0.5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2839.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[0.5 0.5]]; ov_res: [[0.5 0.5]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2841.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]]; ov_res: [[[0.5 0.5 0.5] [0.5 0.5 0.5]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2843.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]]; ov_res: [[[[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewFull::test_new_full_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - value:0.5 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2845.aten_full_with_dtype, %input_tensor.1 : Tensor, %x.1 : float): %3 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %6 : Tensor = aten::new_full(%input_tensor.1, %self.shape, %x.1, %self.dtype, %3, %3, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:209:23 return (%6) fw_re: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]]; ov_res: [[[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]] [[[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]] [[[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]] [[0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5] [0.5 0.5 0.5 0.5 0.5 0.5]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2846.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::zeros(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2848.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::zeros(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2850.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::zeros(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2852.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::zeros(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2854.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::zeros(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2856.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::ones(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2858.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::ones(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2860.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::ones(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2862.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::ones(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2864.aten_op, %x.1 : Tensor): %2 : NoneType = prim::Constant() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %4 : Tensor = aten::ones(%shape.1, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:266:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2865.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::zeros_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2867.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::zeros_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2869.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::zeros_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2871.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::zeros_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2873.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::zeros_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2875.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::ones_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2877.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::ones_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2879.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::ones_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2881.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::ones_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill[ ie_device:CPU - precision:FP32 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2883.aten_op_like, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::ones_like(%x.1, %2, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:274:23 return (%3) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2884.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2886.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2888.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2890.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2892.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2894.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2896.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2898.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2900.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2902.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2904.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2906.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2908.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2910.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2912.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2914.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2916.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2918.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2920.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2922.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2924.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2926.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2928.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2930.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2932.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2934.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2936.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2938.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2940.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2942.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2944.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2946.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2948.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2950.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2952.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2954.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2956.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2958.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2960.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2962.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2964.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2966.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2968.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2970.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2972.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2974.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2976.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2978.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2980.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2982.aten_op_dtype_with_names, %x.1 : Tensor): %2 : NoneType = prim::Constant() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:62 %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %2, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:289:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2983.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2985.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2987.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2989.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2991.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2993.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2995.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2997.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_2999.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3001.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3003.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3005.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3007.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3009.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3011.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3013.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3015.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3017.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3019.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3021.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3023.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3025.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3027.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3029.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3031.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3033.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3035.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3037.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3039.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3041.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3043.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3045.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3047.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3049.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3051.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3053.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3055.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3057.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3059.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3061.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3063.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3065.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3067.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3069.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3071.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::zeros(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3073.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3075.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3077.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3079.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_dtype[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3081.aten_op_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %5 : Tensor = aten::ones(%shape.1, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:284:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3082.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3084.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3086.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3088.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3090.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3092.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3094.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3096.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3098.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int8 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3100.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3102.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3104.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3106.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3108.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3110.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3112.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3114.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3116.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3118.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3120.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3122.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3124.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3126.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3128.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3130.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3132.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3134.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3136.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3138.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:int64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3140.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3142.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3144.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3146.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3148.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3150.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3152.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3154.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3156.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3158.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3160.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3162.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3164.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3166.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3168.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3170.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::zeros(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3172.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3174.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3176.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3178.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:True - dtype:float64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3180.aten_op_out_with_names, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:42 %8 : Tensor = aten::ones(%shape.1, %3, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:318:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3181.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3183.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3185.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3187.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3189.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3191.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3193.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3195.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3197.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int8 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3199.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3201.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3203.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3205.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3207.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3209.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3211.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3213.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3215.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3217.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3219.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3221.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3223.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3225.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3227.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3229.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3231.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3233.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3235.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3237.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:int64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3239.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3241.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3243.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3245.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3247.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3249.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3251.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3253.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3255.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3257.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float32 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3259.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3261.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3263.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3265.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3267.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::zeros - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3269.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::zeros(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3271.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3273.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3275.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3277.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_with_out[ ie_device:CPU - precision:FP32 - with_names:False - dtype:float64 - op_type:aten::ones - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3279.aten_op_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:55 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %shape.1 : int[] = aten::size(%x.1) # <string>:13:9 %7 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:42 %8 : Tensor = aten::ones(%shape.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:308:23 return (%8) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3280.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3282.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3284.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3286.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3288.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3290.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3292.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3294.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3296.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3298.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3300.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3302.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3304.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3306.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3308.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3310.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3312.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3314.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3316.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3318.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3320.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3322.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3324.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3326.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3328.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3330.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2) ] | 0.12 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3332.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3334.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3336.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3338.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3340.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3342.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3344.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3346.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3348.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3350.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3352.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3354.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3356.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3358.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3360.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3362.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3364.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3366.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3368.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::zeros_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3370.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3372.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3374.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3376.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3378.aten_op_like_dtype, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %4 : Tensor = aten::ones_like(%x.1, %self.dtype, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:298:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3379.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3381.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3383.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3385.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3387.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3389.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3391.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3393.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3395.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int8 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3397.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=1]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3399.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3401.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3403.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3405.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3407.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3409.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3411.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3413.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3415.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int32 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3417.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=3]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3419.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0]]; ov_res: [[0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3421.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3423.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3425.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3427.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3429.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1]]; ov_res: [[1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3431.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3433.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3435.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:int64 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3437.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=4]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3439.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3441.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3443.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3445.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3447.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3449.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3451.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3453.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3455.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float32 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3457.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=6]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3459.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0.]]; ov_res: [[0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3461.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3463.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3465.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::zeros_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3467.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::zeros_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3469.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1.]]; ov_res: [[1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3471.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3473.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(1, 2, 3, 4) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3475.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestZerosAndOnes::test_fill_like_with_out[ ie_device:CPU - precision:FP32 - dtype:float64 - op_type:aten::ones_like - shape:(2, 3, 4, 5, 6) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3477.aten_op_like_out, %x.1 : Tensor): %2 : int = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:51 %3 : NoneType = prim::Constant() %4 : bool = prim::Constant[value=0]() %self.dtype : int = prim::Constant[value=7]() %6 : Tensor = aten::tensor(%2, %self.dtype, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:38 %7 : Tensor = aten::ones_like(%x.1, %3, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:327:23 return (%7) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3479.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3481.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3483.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3485.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3487.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3489.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3491.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3493.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3495.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3497.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3499.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3501.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3503.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3505.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3507.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3509.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3511.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3513.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3515.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3517.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3519.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3521.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3523.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3525.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3527.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3529.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3531.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3533.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3535.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3537.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:413:23 return (%4) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3539.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3541.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3543.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3545.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3547.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3549.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3551.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3553.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3555.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3557.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3559.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3561.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3563.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3565.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3567.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3569.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3571.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3573.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3575.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3577.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3579.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3581.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3583.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3585.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3587.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3589.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3591.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3593.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3595.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3597.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3599.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [False]; ov_res: [False] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3601.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[False False]]; ov_res: [[False False]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3603.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[False False False] [False False False]]]; ov_res: [[[False False False] [False False False]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3605.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]]; ov_res: [[[[False False False False] [False False False False] [False False False False]] [[False False False False] [False False False False] [False False False False]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3607.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]]; ov_res: [[[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]] [[[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]] [[[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]] [[False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False] [False False False False False False]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3609.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3611.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3613.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3615.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3617.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3619.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3621.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3623.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3625.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3627.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3629.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3631.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3633.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3635.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3637.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3639.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3641.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3643.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3645.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3647.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3649.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3651.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3653.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3655.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3657.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3659.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3661.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3663.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3665.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3667.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3669.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3671.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3673.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3675.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3677.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3679.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3681.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3683.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3685.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3687.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3689.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3691.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3693.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3695.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3697.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3699.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3701.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3703.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3705.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3707.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3709.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3711.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3713.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3715.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3717.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3719.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3721.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3723.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3725.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3727.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3729.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3731.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3733.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3735.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3737.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3739.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3741.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3743.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3745.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3747.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3749.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3751.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3753.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3755.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3757.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3759.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3761.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3763.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3765.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3767.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3769.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3771.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3773.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3775.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3777.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3779.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3781.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3783.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3785.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3787.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3789.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3791.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3793.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3795.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3797.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3799.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3801.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3803.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3805.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3807.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3809.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3811.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3813.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3815.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3817.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3819.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3821.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3823.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3825.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3827.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3829.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3831.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3833.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3835.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3837.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3839.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3841.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3843.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3845.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3847.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3849.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3851.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3853.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3855.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3857.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3859.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3861.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3863.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3865.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3867.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3869.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3871.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3873.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3875.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3877.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3879.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0]; ov_res: [0] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3881.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0 0]]; ov_res: [[0 0]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3883.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0 0 0] [0 0 0]]]; ov_res: [[[0 0 0] [0 0 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3885.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]]; ov_res: [[[[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0 0 0 0] [0 0 0 0] [0 0 0 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3887.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]]; ov_res: [[[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]] [[[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]] [[[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]] [[0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0] [0 0 0 0 0 0]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3889.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3891.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3893.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3895.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3897.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3899.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3901.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3903.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3905.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3907.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3909.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3911.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3913.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3915.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3917.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3919.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3921.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3923.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3925.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3927.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3929.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3931.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3933.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3935.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3937.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3939.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3941.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3943.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3945.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3947.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3949.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3951.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3953.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3955.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3957.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3959.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3961.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3963.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3965.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3967.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3969.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3971.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3973.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3975.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3977.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3979.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3981.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3983.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3985.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3987.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3989.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3991.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3993.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3995.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3997.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_3999.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4001.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4003.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4005.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4007.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4009.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4011.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4013.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4015.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4017.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4019.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [0.]; ov_res: [0.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4021.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[0. 0.]]; ov_res: [[0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4023.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[0. 0. 0.] [0. 0. 0.]]]; ov_res: [[[0. 0. 0.] [0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4025.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewZeros::test_new_zeros_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4027.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_zeros(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:422:23 return (%5) fw_re: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]] [[[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]] [[[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]] [[0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.] [0. 0. 0. 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4029.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4031.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4033.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4035.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4037.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4039.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4041.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4043.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4045.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4047.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4049.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4051.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4053.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4055.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4057.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4059.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4061.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4063.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4065.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4067.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4069.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4071.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4073.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4075.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4077.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4079.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4081.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4083.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4085.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones[ ie_device:CPU - precision:FP32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4087.aten_full, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %4 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %2, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:472:23 return (%4) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4089.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4091.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4093.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4095.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4097.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4099.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4101.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4103.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4105.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4107.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in dfw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4109.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4111.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4113.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4115.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4117.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4119.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4121.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4123.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4125.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4127.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4129.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4131.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4133.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4135.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4137.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4139.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4141.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4143.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4145.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4147.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) ebug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C+fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4149.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [ True]; ov_res: [ True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4151.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[ True True]]; ov_res: [[ True True]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4153.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[ True True True] [ True True True]]]; ov_res: [[[ True True True] [ True True True]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4155.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]]; ov_res: [[[[ True True True True] [ True True True True] [ True True True True]] [[ True True True True] [ True True True True] [ True True True True]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:bool - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4157.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=11]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]]; ov_res: [[[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]] [[[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]] [[[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]] [[ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True] [ True True True True True True]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4159.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4161.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4163.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4165.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4167.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4169.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4171.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4173.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4175.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4177.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4179.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4181.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4183.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4185.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4187.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) + type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constanfw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4189.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4191.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4193.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4195.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4197.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4199.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4201.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4203.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4205.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4207.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4209.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4211.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4213.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4215.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4217.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4219.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4221.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4223.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4225.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) t with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for defw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:uint8 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4227.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=0]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4229.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4231.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4233.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4235.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4237.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4239.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4241.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4243.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4245.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4247.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4249.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4251.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4253.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4255.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4257.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4259.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4261.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4263.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4265.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) tails) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4267.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4269.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4271.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4273.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4275.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4277.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4279.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4281.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4283.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4285.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4287.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4289.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4291.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4293.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4295.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int8 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4297.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=1]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4299.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4301.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4303.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4305.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4307.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4309.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4311.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4313.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4315.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4317.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4319.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4321.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4323.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4325.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4327.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4329.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4331.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4333.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4335.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4337.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4339.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4341.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4343.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4345.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exceptionfw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4347.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4349.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4351.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4353.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4355.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4357.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4359.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4361.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4363.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4365.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4367.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=3]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4369.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4371.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4373.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4375.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4377.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4379.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4381.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4383.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILEfw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4385.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4387.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4389.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4391.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4393.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4395.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4397.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4399.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4401.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4403.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4405.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4407.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4409.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4411.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4413.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4415.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4417.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4419.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4421.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4423.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) D_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unabfw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4425.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4427.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4429.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1]; ov_res: [1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4431.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1 1]]; ov_res: [[1 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4433.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1 1 1] [1 1 1]]]; ov_res: [[[1 1 1] [1 1 1]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4435.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]]; ov_res: [[[[1 1 1 1] [1 1 1 1] [1 1 1 1]] [[1 1 1 1] [1 1 1 1] [1 1 1 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:int64 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4437.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=4]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]]; ov_res: [[[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]] [[[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]] [[[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]] [[1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1] [1 1 1 1 1 1]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4439.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4441.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4443.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4445.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4447.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4449.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4451.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4453.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4455.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4457.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4459.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4461.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4463.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) le to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened duringfw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4465.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4467.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4469.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4471.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4473.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4475.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4477.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4479.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4481.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4483.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4485.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4487.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4489.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4491.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4493.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4495.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4497.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4499.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4501.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGESfw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4503.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4505.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float32 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4507.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=6]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4509.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4511.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4513.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4515.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'bool'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4517.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4519.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4521.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4523.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4525.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.uint8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4527.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4529.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4531.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4533.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4535.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int8'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4537.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4539.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4541.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4543.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4545.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4547.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4549.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4551.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4553.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4555.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.int64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4557.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4559.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4561.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4563.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4565.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float32'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4567.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4569.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [1.]; ov_res: [1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4571.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[1. 1.]]; ov_res: [[1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4573.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[1, 2, 3, 4] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4575.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]]; ov_res: [[[[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]] [[1. 1. 1. 1.] [1. 1. 1. 1.] [1. 1. 1. 1.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_full.py::TestNewOnes::test_new_ones_with_dtype[ ie_device:CPU - precision:FP32 - dtype:float64 - input_dtype:<class 'numpy.float64'> - shape:[2, 3, 4, 5, 6] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_full.___torch_mangle_4577.aten_full_with_dtype, %input_tensor.1 : Tensor): %2 : NoneType = prim::Constant() %self.dtype : int = prim::Constant[value=7]() %self.shape : int[] = prim::Constant[value=[2, 3, 4, 5, 6]]() %5 : Tensor = aten::new_ones(%input_tensor.1, %self.shape, %self.dtype, %2, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_full.py:481:23 return (%5) fw_re: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]]; ov_res: [[[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]] [[[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]] [[[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]] [[1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 3} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4578.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of ofw_re: [[[-1.62506 -0.3228012 0.1402407 ... -0.72376883 0.25250503 0.124341 ] [-1.1029894 -1.2949824 0.35729346 ... -0.36764365 0.21874839 -0.480636 ] [ 0.32211274 0.4954687 0.45432982 ... -0.4695685 0.7256074 0.4076333 ] [-0.89474595 1.4368033 -1.9960102 ... 0.82861924 -1.7149584 -0.20176943] [ 1.042794 -1.6190667 -0.22009401 ... -0.37080595 -0.38773313 -0.3437139 ] [ 0.73785317 0.5700237 -0.2794367 ... -0.7202685 -1.8297735 0.7134766 ]] [[-1.2760402 1.6154963 -1.1666342 ... 1.051929 0.10237995 -0.27787068] [-0.62965626 0.7045587 -1.7748982 ... -0.01550834 0.6719431 -0.34516627] [ 0.5853066 -0.82939935 2.3642695 ... -1.3811215 0.00890727 -0.8733295 ] [-0.65325695 -0.5269014 -0.88207793 ... 0.98202294 -1.1823164 -1.0382675 ] [ 0.3531159 -1.1531063 0.5335452 ... -0.13494982 0.02164876 0.2212873 ] [ 0.18401805 -1.652617 0.18369302 ... 0.1689286 0.8132888 0.85895324]] [[-0.12160185 -0.63133836 -0.35006693 ... -0.7290419 0.7129159 1.4557257 ] [ 0.37794474 0.32716936 -0.14085384 ... 0.28482372 -0.66799635 -0.50898147] [ 0.2689858 -0.29272544 -0.8411914 ... 0.9465494 0.21631609 -0.59929997] [ 0.44941166 -1.6347537 -1.2831092 ... 1.8145165 -1.3783232 -0.9475418 ] [-0.19135284 0.15041974 -0.55420417 ... -0.905113 0.6428673 0.11701661] [ 1.3647225 0.9360701 1.319611 ... -0.9737628 -1.0448059 -2.165256 ]] ... [[ 0.41093627 -0.04608832 -0.9394369 ... -0.0182835 0.20804536 1.5211554 ] [ 2.1618865 -0.70711696 0.6929246 ... -1.6298575 -0.6652346 -1.1488032 ] [ 0.96622694 0.7894843 -0.7711948 ... -0.02290124 0.6468036 0.40551287] [-1.5277618 -1.9643753 -1.1226628 ... 0.6154347 0.31716114 -0.77942944] [ 0.0613985 0.2545183 0.95603263 ... -1.2176702 0.2379979 -1.7960818 ] [-0.7777979 -1.0899546 0.6115765 ... -1.0903668 -1.1951962 1.8882271 ]] [[ 0.36032453 1.5423429 -1.2951051 ... 0.6495119 -0.8191351 -1.4151887 ] [-2.0545237 0.22145753 0.4076852 ... -0.1350982 -0.40027738 -0.7133198 ] [ 0.53883946 -0.27124932 1.7307308 ... -1.6788803 0.50713724 1.3823024 ] [-0.14480431 0.46273696 -0.07090759 ... 1.6196411 -1.2018765 0.15841474] [-0.52472335 -1.122918 -1.5314828 ... -0.91003704 -1.6130551 -0.79803586] [ 1.0164199 0.8661213 0.9836887 ... 0.53167117 -0.58090234 1.1354709 ]] [[-0.6848297 -0.68269193 1.0212835 ... 1.1790874 -0.49394992 -1.489292 ] [ 1.7576023 -1.0717223 0.7482912 ... 0.6421339 0.92021817 0.10024536] [-0.30517954 -1.6585186 0.9449552 ... -0.24175641 0.46418917 0.5006072 ] [-1.504468 -0.14720234 0.47350037 ... 0.44060647 0.26858085 0.26902717] [-0.57296705 1.0872573 -0.749321 ... -0.49925014 -0.61371803 0.17200433] [-1.6311612 0.9720236 -0.18476483 ... 0.8043942 2.7422636 0.04088156]]]; ov_res: [[[-1.62506 -0.3228012 0.14024071 ... -0.7237688 0.25250503 0.124341 ] [-1.1029894 -1.2949824 0.35729346 ... -0.36764365 0.21874839 -0.480636 ] [ 0.32211274 0.4954687 0.45432982 ... -0.4695685 0.7256074 0.4076333 ] [-0.89474595 1.4368033 -1.9960102 ... 0.82861924 -1.7149584 -0.20176943] [ 1.042794 -1.6190667 -0.22009398 ... -0.37080592 -0.3877331 -0.34371388] [ 0.73785317 0.5700237 -0.27943668 ... -0.7202685 -1.8297735 0.7134766 ]] [[-1.2760402 1.6154963 -1.1666342 ... 1.051929 0.10237996 -0.27787068] [-0.62965626 0.7045587 -1.7748982 ... -0.01550834 0.6719431 -0.34516627] [ 0.5853066 -0.82939935 2.3642695 ... -1.3811215 0.00890727 -0.8733295 ] [-0.65325695 -0.5269014 -0.88207793 ... 0.98202294 -1.1823164 -1.0382675 ] [ 0.3531159 -1.1531063 0.5335452 ... -0.13494982 0.02164876 0.2212873 ] [ 0.18401805 -1.652617 0.18369302 ... 0.1689286 0.8132888 0.85895324]] [[-0.12160186 -0.6313384 -0.35006696 ... -0.72904193 0.71291596 1.4557258 ] [ 0.37794477 0.3271694 -0.14085385 ... 0.28482375 -0.6679964 -0.50898147] [ 0.2689858 -0.29272544 -0.8411914 ... 0.9465494 0.21631609 -0.59929997] [ 0.44941166 -1.6347537 -1.2831092 ... 1.8145165 -1.3783232 -0.9475418 ] [-0.19135284 0.15041974 -0.55420417 ... -0.905113 0.6428673 0.11701661] [ 1.3647225 0.9360701 1.319611 ... -0.9737628 -1.0448059 -2.165256 ]] ... [[ 0.41093627 -0.04608836 -0.9394371 ... -0.01828353 0.20804536 1.5211556 ] [ 2.1618867 -0.70711714 0.6929246 ... -1.6298578 -0.6652347 -1.1488035 ] [ 0.96622694 0.7894843 -0.7711948 ... -0.0229012 0.6468037 0.4055129 ] [-1.5277618 -1.9643753 -1.1226627 ... 0.6154347 0.3171612 -0.77942944] [ 0.06139848 0.25451827 0.95603263 ... -1.2176702 0.23799789 -1.7960819 ] [-0.7777979 -1.0899546 0.6115765 ... -1.0903668 -1.1951962 1.8882271 ]] [[ 0.36032453 1.5423429 -1.2951051 ... 0.64951193 -0.81913507 -1.4151887 ] [-2.0545237 0.22145756 0.4076852 ... -0.13509819 -0.40027738 -0.7133197 ] [ 0.53883934 -0.27124935 1.7307307 ... -1.6788802 0.5071371 1.3823023 ] [-0.14480437 0.46273685 -0.07090765 ... 1.619641 -1.2018764 0.15841465] [-0.5247233 -1.122918 -1.5314827 ... -0.910037 -1.6130551 -0.7980358 ] [ 1.0164199 0.86612135 0.9836887 ... 0.5316712 -0.5809023 1.1354709 ]] [[-0.68482965 -0.6826919 1.0212834 ... 1.1790873 -0.49394986 -1.4892919 ] [ 1.7576021 -1.0717221 0.7482911 ... 0.6421338 0.92021805 0.10024532] [-0.3051795 -1.6585182 0.9449551 ... -0.24175638 0.4641891 0.50060713] [-1.5044677 -0.14720231 0.4735003 ... 0.44060645 0.2685808 0.26902714] [-0.572967 1.0872574 -0.749321 ... -0.49925014 -0.61371803 0.17200437] [-1.6311612 0.9720237 -0.18476479 ... 0.8043943 2.7422636 0.0408816 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4580.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=1]() %10 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %11 : int = aten::size(%x.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %12 : int = aten::mul(%10, %11) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int[] = prim::ListConstruct(%12, %self.n_groups) %14 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %15 : int[] = aten::slice(%14, %4, %self.weight, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %16 : int[] = aten::list(%15) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %17 : int[] = aten::add(%13, %16) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%17, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %19 : int = aten::len(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %20 : int = aten::sub(%19, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%20, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %24 : int = aten::add(%i.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %25 : int = aten::__getitem__(%17, %24) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %25) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %27 : bool = aten::eq(%size_prods, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %28 : str = aten::format(%5, %17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%28, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %29 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%29) fw_re: [[[ 0.04420414 0.8167845 -2.4371302 ... 1.4148623 1.1072406 0.39235342] [ 0.37130278 -1.1355562 0.8383322 ... 0.39216912 0.9933818 0.6995796 ] [-1.0241275 1.250658 0.81138504 ... -0.5625275 1.3799994 0.34104946] [ 0.4557823 0.9841139 -2.8159342 ... -0.20451851 0.49486765 -0.33799314] [-1.3208921 1.4188286 1.4698293 ... 0.26837233 -0.3324928 0.48333272] [-1.3573098 0.02741148 -0.6792509 ... -0.29539183 0.19686939 -2.1565816 ]] [[-0.53242016 -2.288412 -1.7005388 ... 1.5815853 0.5944149 -1.1855963 ] [-0.09748241 -1.2409654 -0.7776626 ... 0.09331201 0.5343122 1.7100884 ] [-0.20315726 1.4814144 -0.23374882 ... -0.20802507 2.031143 -0.36954084] [ 0.4051233 0.5660774 0.5215089 ... 0.19206528 0.82721925 0.27596897] [-0.85053456 0.59843844 0.43722573 ... -1.0597067 -0.4062158 0.77118 ] [-2.2873325 1.626366 0.70333695 ... 1.1364216 1.0986624 1.281751 ]] [[ 1.297967 -0.57189226 -0.35581782 ... 0.19592838 0.56592834 -0.56708604] [-0.44668996 -1.653023 1.5608892 ... -0.6014911 -1.8866693 0.9018253 ] [ 0.02781066 0.42489573 0.02693452 ... 0.42612445 -0.04461448 -0.501063 ] [ 0.508407 -1.1724769 -0.1469498 ... 1.011417 -0.14110173 0.15905781] [-0.56475484 -1.0709121 -1.729221 ... 0.30016938 0.3374932 0.02109803] [ 0.27515474 1.3778296 1.1707678 ... 1.2946845 -0.95572025 -0.4171436 ]] ... [[-0.36287895 -0.6369293 -1.7533114 ... -0.4962205 -0.8019984 -1.0970153 ] [ 1.2147164 -0.32799408 -0.17321415 ... 1.0067109 2.094967 -1.6584438 ] [-0.39387655 -0.6562917 0.95828915 ... 0.5617383 -0.64170265 -0.32468414] [ 1.0921944 -1.0022675 0.7203767 ... -0.01276755 0.8545033 -0.4377116 ] [-0.2490267 -0.86073357 0.43803355 ... -0.94859135 0.38400897 0.60016435] [ 0.09799445 -0.7529135 0.4779407 ... 1.6641592 -0.98130935 1.9619043 ]] [[ 0.5188959 0.7806213 -0.24114431 ... -1.4725169 0.5833035 0.5216196 ] [-0.43999517 -0.01542486 0.09273856 ... 1.3052578 0.72440106 -1.2111487 ] [ 0.4217542 -0.5455122 0.81435114 ... -1.4839168 0.87138456 1.7652977 ] [-0.581761 -0.7304987 0.53103155 ... 0.81130743 1.3407247 -0.7057912 ] [ 1.3577154 -0.5207078 0.12879598 ... 0.79041654 0.9140555 1.562219 ] [-0.26332113 0.18721536 -0.98816395 ... 0.42846274 -0.8439381 -0.36997357]] [[ 0.7160472 0.758124 0.04309408 ... -1.1512879 -0.80267584 -0.5728624 ] [ 1.3937138 -0.6179413 -0.0390956 ... 0.5257307 -1.80816 0.92604375] [ 2.004374 -0.93251514 -0.35292804 ... 1.3969539 0.66157377 0.5580213 ] [ 1.0883297 -0.83928657 1.1974202 ... -0.32060412 -0.31010193 0.66306716] [ 1.0960414 -0.9845883 0.755744 ... 1.0481608 -0.20622441 -1.1561737 ] [ 1.1421838 -1.2024769 -0.71207505 ... -1.1922969 0.96169543 0.14523473]]]; ov_res: [[[ 0.04420416 0.8167845 -2.4371302 ... 1.4148624 1.1072406 0.39235345] [ 0.37130278 -1.1355562 0.8383322 ... 0.39216915 0.9933818 0.6995796 ] [-1.0241275 1.250658 0.81138504 ... -0.5625274 1.3799995 0.34104952] [ 0.45578232 0.9841139 -2.8159342 ... -0.20451848 0.49486768 -0.33799312] [-1.320892 1.4188286 1.4698293 ... 0.26837236 -0.33249274 0.48333275] [-1.3573096 0.0274115 -0.6792509 ... -0.2953918 0.19686942 -2.1565816 ]] [[-0.53242016 -2.288412 -1.7005388 ... 1.5815853 0.5944149 -1.1855963 ] [-0.09748241 -1.2409654 -0.7776626 ... 0.09331202 0.5343122 1.7100884 ] [-0.20315726 1.4814144 -0.23374881 ... -0.20802507 2.031143 -0.36954084] [ 0.4051233 0.5660774 0.5215089 ... 0.19206531 0.82721925 0.27596897] [-0.85053456 0.59843844 0.43722573 ... -1.0597067 -0.4062158 0.77118 ] [-2.2873325 1.626366 0.70333695 ... 1.1364216 1.0986624 1.281751 ]] [[ 1.2979671 -0.57189226 -0.3558178 ... 0.19592844 0.56592834 -0.56708604] [-0.44668993 -1.6530229 1.5608894 ... -0.6014911 -1.8866692 0.9018253 ] [ 0.0278107 0.42489576 0.02693456 ... 0.42612448 -0.04461444 -0.5010629 ] [ 0.50840706 -1.1724768 -0.14694975 ... 1.011417 -0.14110169 0.15905786] [-0.56475484 -1.0709121 -1.7292209 ... 0.3001694 0.33749324 0.02109806] [ 0.27515477 1.3778297 1.1707679 ... 1.2946846 -0.95572025 -0.41714358]] ... [[-0.36287892 -0.6369292 -1.7533114 ... -0.49622044 -0.8019984 -1.0970153 ] [ 1.2147164 -0.32799405 -0.17321414 ... 1.0067109 2.094967 -1.6584438 ] [-0.39387652 -0.6562917 0.95828915 ... 0.5617384 -0.64170265 -0.3246841 ] [ 1.0921944 -1.0022675 0.72037673 ... -0.01276754 0.8545033 -0.43771157] [-0.2490267 -0.86073357 0.43803355 ... -0.94859135 0.38400897 0.60016435] [ 0.09799446 -0.7529135 0.4779407 ... 1.6641592 -0.98130935 1.9619043 ]] [[ 0.5188959 0.7806213 -0.24114434 ... -1.472517 0.5833035 0.5216196 ] [-0.43999517 -0.01542489 0.09273852 ... 1.3052577 0.72440106 -1.2111489 ] [ 0.42175418 -0.5455122 0.81435114 ... -1.4839168 0.87138444 1.7652977 ] [-0.581761 -0.7304987 0.53103155 ... 0.81130743 1.3407247 -0.7057912 ] [ 1.3577152 -0.5207078 0.12879594 ... 0.79041654 0.9140554 1.562219 ] [-0.26332116 0.18721531 -0.988164 ... 0.42846274 -0.8439381 -0.36997363]] [[ 0.7160471 0.75812393 0.04309409 ... -1.1512878 -0.8026758 -0.57286227] [ 1.3937136 -0.6179412 -0.03909558 ... 0.52573067 -1.8081597 0.9260437 ] [ 2.0043738 -0.932515 -0.35292798 ... 1.3969537 0.66157365 0.5580212 ] [ 1.0883296 -0.8392865 1.19742 ... -0.32060403 -0.31010187 0.6630671 ] [ 1.0960413 -0.9845882 0.7557439 ... 1.0481607 -0.20622437 -1.1561736 ] [ 1.1421837 -1.2024767 -0.712075 ... -1.1922967 0.9616953 0.14523472]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 3, 'eps': 1.0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4582.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[-0.44942385 0.17619756 -0.31492925 ... -1.2141405 -0.43479759 -0.30707964] [ 0.23041783 0.55681175 0.13411847 ... -0.20514749 0.6390344 0.36216304] [ 0.06867787 -1.2324983 0.07238683 ... -0.07775512 0.8619537 0.8319036 ] [-0.2115509 0.38630134 -0.65024066 ... 1.2459655 0.06295735 -0.25518516] [-0.655473 -1.0499502 -0.03323679 ... 1.0653802 0.01833082 -0.90500695] [ 0.09433024 1.5563006 -0.15883823 ... 0.07909569 0.0441191 -1.1462468 ]] [[-0.31298393 -0.49512827 0.340695 ... -0.8300913 0.44966435 -0.15166134] [-0.61845404 0.18800467 -0.1309931 ... 0.41536745 1.0884736 0.34917298] [-0.7731322 -0.17654686 0.08735369 ... 0.641068 -1.242673 -1.0960152 ] [ 0.7202019 1.357402 -0.39875728 ... -0.19970815 -0.8311465 -0.24884433] [-1.1556169 0.23321646 -0.728121 ... 2.4097621 0.4189943 -0.28157532] [-0.42777252 1.4166528 -0.64695805 ... -0.542819 0.00672407 -0.48090965]] [[-0.4917504 -0.3267189 -0.55819666 ... 1.2691679 0.32617465 -0.54911244] [-1.8065155 1.0493798 0.312837 ... 0.67933863 0.50178874 -0.14053342] [ 0.55909 0.09728182 0.19283833 ... -0.16431746 -0.23281628 -0.879967 ] [-0.06234891 0.6764206 -0.0082887 ... 0.9201755 -0.15281726 -1.0870177 ] [-0.40140766 1.0148457 1.0441039 ... 0.3904632 0.320493 -1.5199906 ] [-0.34162462 -0.06362066 -0.37041256 ... 1.4481308 0.01726688 0.37891284]] ... [[-1.3551852 -1.5159818 -0.23271406 ... 0.6355324 0.06816676 0.8442787 ] [ 0.30176407 -0.10121214 0.7254788 ... -0.9520679 0.2397092 -0.16754827] [ 0.8162387 -0.6642903 0.22495049 ... 0.7046483 -0.43993357 0.23764086] [ 0.21883653 0.74201065 -0.48578608 ... 0.430095 0.72659856 0.02798334] [ 0.24517235 -0.36905146 -0.40500185 ... -0.01232781 -0.64993584 0.30391008] [-0.2858162 -1.3182175 0.17693801 ... 0.6330201 -0.06690264 -0.4524843 ]] [[ 1.1312046 -1.3642389 -0.12522523 ... 0.7010786 0.6027542 -0.0242259 ] [-0.24884741 -0.07446835 0.1626188 ... 0.11014998 -0.6019054 -0.18030606] [-1.4169316 -1.5760543 0.03719207 ... 0.69712305 -0.05460063 -0.08209411] [ 0.5231257 -0.21235053 -0.08473483 ... 0.4679321 0.07522849 -0.00494393] [-1.3114961 -0.5103013 -0.07100023 ... 0.56750673 -0.272848 1.4350041 ] [ 0.44201183 0.80235356 -0.32856703 ... -0.39281803 0.2611722 0.35450843]] [[ 0.07002492 -0.30855218 -0.06875335 ... 0.33381853 0.7032494 -0.7017541 ] [ 0.742588 0.8200319 -0.73883325 ... -0.4227844 0.32145154 0.16462614] [ 0.23312026 0.46136847 0.1279515 ... -0.89489883 -0.3670861 0.3736647 ] [-0.3690443 0.7365641 0.44816467 ... 1.2876011 -0.48654673 -0.09062447] [ 1.7671486 0.03126444 0.1268585 ... -0.6350566 -0.94865847 -0.16934025] [ 0.29127285 -0.8073715 -0.7023145 ... 0.49354333 0.09943842 -0.08261261]]]; ov_res: [[[-0.4494239 0.17619751 -0.3149293 ... -1.2141407 -0.4347976 -0.30707964] [ 0.23041779 0.55681175 0.13411844 ... -0.20514752 0.63903433 0.36216298] [ 0.06867789 -1.2324984 0.07238685 ... -0.07775512 0.86195374 0.83190364] [-0.21155092 0.38630143 -0.6502407 ... 1.2459656 0.06295737 -0.25518516] [-0.65547293 -1.0499502 -0.03323676 ... 1.0653803 0.01833086 -0.9050069 ] [ 0.09433027 1.5563006 -0.15883818 ... 0.07909573 0.04411915 -1.1462468 ]] [[-0.31298396 -0.4951283 0.34069496 ... -0.8300914 0.44966435 -0.15166137] [-0.61845404 0.18800466 -0.13099311 ... 0.4153674 1.0884736 0.34917298] [-0.7731322 -0.17654684 0.0873537 ... 0.64106804 -1.242673 -1.0960152 ] [ 0.72020197 1.357402 -0.39875728 ... -0.19970812 -0.8311465 -0.2488443 ] [-1.1556169 0.23321648 -0.728121 ... 2.4097621 0.41899434 -0.2815753 ] [-0.4277725 1.4166529 -0.64695805 ... -0.54281896 0.0067241 -0.48090962]] [[-0.4917504 -0.3267189 -0.55819666 ... 1.2691679 0.32617465 -0.54911244] [-1.8065155 1.0493798 0.312837 ... 0.67933863 0.50178874 -0.14053342] [ 0.55908996 0.09728181 0.19283831 ... -0.16431744 -0.23281625 -0.87996686] [-0.0623489 0.67642057 -0.0082887 ... 0.9201755 -0.15281725 -1.0870175 ] [-0.40140763 1.0148456 1.0441039 ... 0.39046314 0.32049298 -1.5199903 ] [-0.3416246 -0.06362066 -0.37041253 ... 1.4481307 0.01726688 0.3789128 ]] ... [[-1.355185 -1.5159817 -0.23271406 ... 0.6355323 0.06816673 0.84427863] [ 0.301764 -0.10121215 0.72547877 ... -0.9520679 0.23970917 -0.16754828] [ 0.81623864 -0.66429025 0.22495048 ... 0.70464826 -0.43993354 0.23764087] [ 0.21883652 0.7420106 -0.48578605 ... 0.43009496 0.7265985 0.02798334] [ 0.24517235 -0.36905146 -0.40500185 ... -0.01232782 -0.64993584 0.30391008] [-0.2858162 -1.3182176 0.17693801 ... 0.6330201 -0.06690265 -0.45248437]] [[ 1.1312046 -1.3642389 -0.12522523 ... 0.7010786 0.6027542 -0.0242259 ] [-0.24884741 -0.07446835 0.1626188 ... 0.11014998 -0.6019054 -0.18030606] [-1.4169316 -1.5760543 0.03719208 ... 0.6971231 -0.05460062 -0.0820941 ] [ 0.5231257 -0.21235053 -0.08473481 ... 0.4679321 0.0752285 -0.00494392] [-1.3114963 -0.5103013 -0.07100023 ... 0.56750685 -0.272848 1.4350042 ] [ 0.44201192 0.8023536 -0.32856706 ... -0.39281806 0.26117224 0.35450846]] [[ 0.07002494 -0.30855212 -0.06875332 ... 0.33381853 0.7032494 -0.701754 ] [ 0.742588 0.8200318 -0.7388332 ... -0.42278433 0.32145154 0.16462615] [ 0.23312029 0.46136847 0.1279515 ... -0.89489883 -0.36708608 0.3736647 ] [-0.3690443 0.7365641 0.44816467 ... 1.2876011 -0.48654673 -0.09062445] [ 1.7671486 0.03126444 0.1268585 ... -0.6350566 -0.94865847 -0.16934025] [ 0.29127285 -0.8073715 -0.7023145 ... 0.49354333 0.09943842 -0.08261261]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 3, 'weights_shape': (6,), 'eps': -0.05} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4584.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=-0.050000000000000003]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5886 0.4225 -1.7811 0.0318 0.4499 -0.2369 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) p: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4811555 (Squeeze_4811554[0]:i64[], Constant_4811504[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4811555': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4814042 (Squeeze_4814041[0]:i64[], Constant_4813992[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4814042': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4816531 (Squeeze_4816530[0]:i64[], Constant_4816480[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4816531': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constfw_re: [[[-3.46878588e-01 -9.31818962e-01 -3.52835238e-01 ... 6.80595398e-01 -5.80369651e-01 8.41324210e-01] [-4.01457936e-01 -2.35919550e-01 2.10956335e-01 ... 9.85731423e-01 -1.28366962e-01 -1.67074025e-01] [-3.76927465e-01 1.92537999e+00 1.65672198e-01 ... -2.40723848e+00 1.64485276e+00 -2.85342288e+00] [ 1.65166017e-02 -1.07889250e-02 -5.08368015e-02 ... -1.36258258e-02 -3.78280506e-02 2.85445489e-02] [-2.92300493e-01 -2.98255444e-01 9.19738412e-01 ... -5.69304824e-01 -1.15854609e+00 -6.03683945e-03] [-1.28702238e-01 -6.20164052e-02 -3.66534710e-01 ... -1.31988674e-01 -3.25380206e-01 -8.76182690e-02]] [[ 4.55599397e-01 -5.69971144e-01 -3.76448423e-01 ... -1.02446389e+00 3.08764070e-01 5.29133618e-01] [ 6.38558447e-01 -4.53888357e-01 4.03295249e-01 ... 5.64021230e-01 -1.86522007e-01 -3.40019733e-01] [ 9.74521220e-01 -4.03360891e+00 1.22325122e+00 ... 1.74759698e+00 -1.96384680e+00 -1.70268655e+00] [ 2.60662790e-02 2.98053119e-02 4.72623557e-02 ... -6.08994684e-04 -1.12276466e-03 7.96225201e-03] [-2.83752441e-01 -3.32485467e-01 8.71291876e-01 ... 2.50744343e-01 1.12211311e+00 -2.86898315e-01] [ 2.04837650e-01 2.32704982e-01 1.12828970e-01 ... 2.04112694e-01 4.33415711e-01 2.74640024e-01]] [[-8.68044674e-01 4.96341348e-01 4.66656566e-01 ... 3.39032978e-01 6.54223442e-01 -2.05503359e-01] [ 6.01940095e-01 -6.66492462e-01 3.43900502e-01 ... 5.58568656e-01 -1.64779902e-01 3.45596433e-01] [ 1.33681774e+00 5.37508070e-01 5.13882160e-01 ... -3.46794558e+00 -1.05891907e+00 1.78765059e-01] [-7.01130703e-02 4.64911275e-02 -2.05917489e-02 ... -1.95396934e-02 -2.83033103e-02 -2.16506105e-02] [ 4.41313297e-01 3.61149102e-01 1.01794034e-01 ... 3.16432029e-01 -4.02045101e-02 -6.05813205e-01] [ 4.79293287e-01 1.18245035e-01 -4.01803516e-02 ... 1.22687638e-01 1.94552332e-01 8.20785016e-02]] ... [[-1.38252392e-01 -6.06559873e-01 -9.68222082e-01 ... -6.11490428e-01 -2.56760746e-01 8.20050001e-01] [ 5.04823864e-01 5.91089845e-01 -2.11684227e-01 ... -4.10396248e-01 9.05748248e-01 1.94236174e-01] [-8.56328309e-01 -2.74402946e-01 -8.75567973e-01 ... 1.68668365e+00 1.45239532e+00 5.07823110e-01] [-5.33323502e-04 7.29274228e-02 -9.97259934e-03 ... 2.43209563e-02 -4.62574288e-02 4.97698188e-02] [-3.67494583e-01 -1.75390780e-01 -2.19211802e-01 ... -1.59674793e-01 6.25326693e-01 -5.80490589e-01] [ 1.68108419e-01 2.51673490e-01 -3.59281041e-02 ... -4.26781952e-01 -8.04237649e-02 -2.55687624e-01]] [[-2.08212733e-01 -3.58684331e-01 4.84516531e-01 ... -3.50252241e-01 7.47248471e-01 2.76647717e-01] [ 1.57446548e-01 3.21994424e-02 1.84348717e-01 ... 3.35817367e-01 -1.00885965e-01 -1.05258322e+00] [ 2.14382768e+00 8.63519847e-01 -9.16565418e-01 ... -2.75016451e+00 -5.10934114e-01 -1.18791997e+00] [-1.90949775e-02 1.22895529e-02 6.21573627e-02 ... -9.25100874e-03 -5.68737425e-02 -6.78942055e-02] [-2.04535112e-01 -4.12565589e-01 -6.78144515e-01 ... -3.38212222e-01 -7.42167234e-01 1.39653057e-01] [-2.61220455e-01 3.87487352e-01 -3.12467426e-01 ... 1.96694195e-01 2.07577646e-01 -3.37333918e-01]] [[ 8.94401193e-01 -6.23598933e-01 -3.71390551e-01 ... -1.17944680e-01 6.72097862e-01 8.05710912e-01] [-1.14871435e-01 -4.99759018e-01 -3.10917079e-01 ... 1.66164577e-01 -3.85546774e-01 -3.54409188e-01] [ 1.74444354e+00 -4.67229366e-01 2.79658008e+00 ... -2.32353762e-01 -1.17045844e+00 5.72007477e-01] [ 5.97066283e-02 4.80854418e-03 2.94836853e-02 ... -3.77313383e-02 1.59835897e-03 -2.53883339e-02] [ 9.25053656e-01 -1.00030363e+00 3.96908283e-01 ... 3.89367372e-01 7.65785202e-02 -2.15947881e-01] [-2.34459832e-01 -2.50317693e-01 -1.42552927e-02 ... 1.59695625e-01 2.44258091e-01 -1.19891509e-01]]]; ov_res: [[[-3.46878588e-01 -9.31819022e-01 -3.52835238e-01 ... 6.80595398e-01 -5.80369651e-01 8.41324151e-01] [-4.01457995e-01 -2.35919580e-01 2.10956305e-01 ... 9.85731304e-01 -1.28366977e-01 -1.67074054e-01] [-3.76927465e-01 1.92538023e+00 1.65672243e-01 ... -2.40723896e+00 1.64485300e+00 -2.85342336e+00] [ 1.65166017e-02 -1.07889278e-02 -5.08368053e-02 ... -1.36258276e-02 -3.78280580e-02 2.85445545e-02] [-2.92300403e-01 -2.98255384e-01 9.19738233e-01 ... -5.69304705e-01 -1.15854585e+00 -6.03683665e-03] [-1.28702208e-01 -6.20163865e-02 -3.66534650e-01 ... -1.31988645e-01 -3.25380117e-01 -8.76182467e-02]] [[ 4.55599427e-01 -5.69971144e-01 -3.76448423e-01 ... -1.02446389e+00 3.08764100e-01 5.29133677e-01] [ 6.38558388e-01 -4.53888357e-01 4.03295249e-01 ... 5.64021230e-01 -1.86522007e-01 -3.40019703e-01] [ 9.74521160e-01 -4.03360891e+00 1.22325122e+00 ... 1.74759686e+00 -1.96384680e+00 -1.70268655e+00] [ 2.60662828e-02 2.98053138e-02 4.72623631e-02 ... -6.08994043e-04 -1.12276408e-03 7.96225388e-03] [-2.83752471e-01 -3.32485497e-01 8.71291816e-01 ... 2.50744283e-01 1.12211299e+00 -2.86898345e-01] [ 2.04837635e-01 2.32704967e-01 1.12828970e-01 ... 2.04112679e-01 4.33415651e-01 2.74640024e-01]] [[-8.68044615e-01 4.96341377e-01 4.66656566e-01 ... 3.39033008e-01 6.54223502e-01 -2.05503359e-01] [ 6.01940155e-01 -6.66492462e-01 3.43900532e-01 ... 5.58568656e-01 -1.64779902e-01 3.45596462e-01] [ 1.33681774e+00 5.37508070e-01 5.13882160e-01 ... -3.46794605e+00 -1.05891919e+00 1.78765059e-01] [-7.01130778e-02 4.64911349e-02 -2.05917489e-02 ... -1.95396952e-02 -2.83033121e-02 -2.16506124e-02] [ 4.41313297e-01 3.61149132e-01 1.01794019e-01 ... 3.16431999e-01 -4.02045250e-02 -6.05813205e-01] [ 4.79293257e-01 1.18245043e-01 -4.01803479e-02 ... 1.22687638e-01 1.94552347e-01 8.20785090e-02]] ... [[-1.38252437e-01 -6.06559932e-01 -9.68222201e-01 ... -6.11490488e-01 -2.56760806e-01 8.20050120e-01] [ 5.04823923e-01 5.91089904e-01 -2.11684272e-01 ... -4.10396308e-01 9.05748367e-01 1.94236189e-01] [-8.56328309e-01 -2.74402946e-01 -8.75567913e-01 ... 1.68668354e+00 1.45239520e+00 5.07823110e-01] [-5.33323677e-04 7.29274154e-02 -9.97259840e-03 ... 2.43209545e-02 -4.62574288e-02 4.97698188e-02] [-3.67494613e-01 -1.75390795e-01 -2.19211802e-01 ... -1.59674793e-01 6.25326633e-01 -5.80490589e-01] [ 1.68108419e-01 2.51673490e-01 -3.59281003e-02 ... -4.26781923e-01 -8.04237649e-02 -2.55687624e-01]] [[-2.08212718e-01 -3.58684301e-01 4.84516561e-01 ... -3.50252241e-01 7.47248530e-01 2.76647747e-01] [ 1.57446578e-01 3.21994536e-02 1.84348747e-01 ... 3.35817367e-01 -1.00885965e-01 -1.05258322e+00] [ 2.14382768e+00 8.63519728e-01 -9.16565418e-01 ... -2.75016451e+00 -5.10934174e-01 -1.18791997e+00] [-1.90949738e-02 1.22895529e-02 6.21573552e-02 ... -9.25100688e-03 -5.68737388e-02 -6.78942055e-02] [-2.04535127e-01 -4.12565649e-01 -6.78144634e-01 ... -3.38212252e-01 -7.42167354e-01 1.39653072e-01] [-2.61220485e-01 3.87487411e-01 -3.12467456e-01 ... 1.96694225e-01 2.07577676e-01 -3.37333947e-01]] [[ 8.94401073e-01 -6.23598933e-01 -3.71390551e-01 ... -1.17944695e-01 6.72097862e-01 8.05710852e-01] [-1.14871450e-01 -4.99759048e-01 -3.10917109e-01 ... 1.66164577e-01 -3.85546803e-01 -3.54409188e-01] [ 1.74444342e+00 -4.67229366e-01 2.79657960e+00 ... -2.32353762e-01 -1.17045844e+00 5.72007418e-01] [ 5.97066209e-02 4.80854372e-03 2.94836815e-02 ... -3.77313346e-02 1.59835897e-03 -2.53883321e-02] [ 9.25053656e-01 -1.00030375e+00 3.96908313e-01 ... 3.89367372e-01 7.65785351e-02 -2.15947852e-01] [-2.34459832e-01 -2.50317693e-01 -1.42552992e-02 ... 1.59695610e-01 2.44258091e-01 -1.19891524e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 3, 'weights_shape': (6,)} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4586.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1596 -1.2182 0.6793 -0.1472 0.9630 -1.2837 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[-1.51887193e-01 1.37648895e-01 1.31766945e-01 ... -1.45852357e-01 -8.70576575e-02 -1.66147724e-01] [-8.46709490e-01 -1.22922935e-01 -9.81612563e-01 ... 4.92438704e-01 1.19739473e+00 2.54552269e+00] [-7.00966194e-02 8.82155776e-01 -3.57899457e-01 ... 1.25338602e+00 -1.43517599e-01 -7.11432159e-01] [-3.33002985e-01 -1.63717970e-01 -6.25596493e-02 ... -1.04922809e-01 6.32369667e-02 1.21645793e-01] [-5.72536647e-01 1.09433079e+00 5.14790177e-01 ... -8.65770042e-01 7.14908957e-01 6.22665465e-01] [ 4.99543667e-01 9.52899814e-01 -1.91394582e-01 ... -2.94356322e+00 3.21278989e-01 -6.19845867e-01]] [[ 9.12607387e-02 -1.78474352e-01 1.38523862e-01 ... 1.34037640e-02 -1.88184574e-01 2.96191990e-01] [-1.91865647e+00 -1.34492628e-02 3.61827075e-01 ... -8.26298833e-01 -8.06007266e-01 1.34838021e+00] [ 1.27142251e-01 -6.04974627e-01 -1.99946668e-02 ... -4.34502631e-01 4.02246326e-01 4.40363407e-01] [-1.76832944e-01 1.92808405e-01 -1.98447466e-01 ... -9.50302705e-02 -2.72542208e-01 6.11321554e-02] [-1.18980157e+00 6.83949888e-02 1.67896569e+00 ... -1.07957983e+00 1.52549446e+00 3.03352922e-01] [ 2.22347927e+00 -1.15256771e-01 1.27410316e+00 ... -7.40206003e-01 1.08677030e+00 5.73868513e-01]] [[-1.49548426e-01 -2.30344608e-02 -6.74053580e-02 ... 2.91434191e-02 1.50951430e-01 2.59102166e-01] [ 1.95320323e-01 -1.17567360e+00 -1.91708058e-01 ... -6.15456164e-01 -2.83194089e+00 -1.83062088e+00] [-9.19815063e-01 -9.05852556e-01 -2.45721743e-01 ... -5.75789928e-01 4.08625185e-01 -2.03692794e-01] [-4.72849177e-04 -8.42533484e-02 2.80095607e-01 ... -3.76201980e-02 -3.87491137e-02 -8.58891010e-02] [-7.46437848e-01 6.44560635e-01 -1.50733578e+00 ... 9.59880650e-01 -7.45859593e-02 1.73637047e-01] [-2.09676337e+00 2.14301395e+00 -6.82373822e-01 ... 2.00516701e+00 6.82836831e-01 -1.19595778e+00]] ... [[-1.65552124e-01 2.55660266e-01 -3.02988797e-01 ... -1.96053356e-01 1.55454844e-01 -1.72984019e-01] [ 8.11632156e-01 -1.80229127e+00 -1.03719079e+00 ... 1.18773711e+00 -6.93045974e-01 1.63995981e+00] [-1.77471042e+00 4.50779736e-01 2.21163556e-01 ... -9.72685814e-01 3.40474620e-02 -4.23002928e-01] [-1.36110827e-01 8.48086625e-02 -1.62966058e-01 ... -1.95488036e-01 -2.83926167e-02 -1.50203109e-01] [-1.28355336e+00 -1.66841239e-01 1.43763758e-02 ... 9.48414147e-01 1.08181620e+00 1.58517897e+00] [ 6.55211985e-01 1.36255407e+00 -9.19068515e-01 ... 1.06587458e+00 8.66135657e-01 -2.54284143e+00]] [[ 3.37199122e-01 2.37326488e-01 -1.59129024e-01 ... -5.95306680e-02 -1.30704030e-01 2.27472410e-01] [-6.83869302e-01 -5.43242157e-01 -3.07557493e-01 ... -6.72915459e-01 3.94408070e-02 -3.55817646e-01] [-1.18257630e+00 -6.25155807e-01 2.37385154e-01 ... -1.32696724e+00 7.94253469e-01 8.24377477e-01] [ 1.09325834e-01 1.07401557e-01 -1.11071438e-01 ... 1.23701662e-01 1.46931652e-02 4.06754799e-02] [ 6.03227317e-01 5.64247787e-01 1.67595059e-01 ... -1.21534789e+00 -2.39540428e-01 -5.16154706e-01] [ 2.13082016e-01 1.67564559e+00 -9.71089363e-01 ... -1.58058143e+00 -4.19019431e-01 2.17961502e+00]] [[ 1.94758460e-01 -1.05407506e-01 1.11915968e-01 ... -9.47823673e-02 9.97720063e-02 -1.13450088e-01] [ 1.34261306e-02 -4.61341679e-01 9.33737278e-01 ... -4.80285019e-01 -6.70350373e-01 3.01612878e+00] [ 1.01889968e+00 7.94142857e-02 -5.43302417e-01 ... 1.24002767e+00 -4.15318180e-03 1.13002443e+00] [-1.77113384e-01 7.21917152e-02 2.23450005e-01 ... 1.20100386e-01 -3.16342153e-02 -6.08571768e-02] [-3.63325387e-01 7.83096194e-01 1.00986934e+00 ... -4.61096406e-01 3.54848921e-01 -4.67739910e-01] [-1.59867132e+00 -2.04404473e+00 -6.38843417e-01 ... 1.31199622e+00 2.21366549e+00 5.74437976e-01]]]; ov_res: [[[-1.51887208e-01 1.37648910e-01 1.31766945e-01 ... -1.45852372e-01 -8.70576575e-02 -1.66147724e-01] [-8.46709490e-01 -1.22922920e-01 -9.81612504e-01 ... 4.92438704e-01 1.19739473e+00 2.54552269e+00] [-7.00966269e-02 8.82155716e-01 -3.57899457e-01 ... 1.25338614e+00 -1.43517613e-01 -7.11432219e-01] [-3.33002985e-01 -1.63717985e-01 -6.25596493e-02 ... -1.04922816e-01 6.32369667e-02 1.21645793e-01] [-5.72536588e-01 1.09433091e+00 5.14790237e-01 ... -8.65770042e-01 7.14909077e-01 6.22665465e-01] [ 4.99543667e-01 9.52899814e-01 -1.91394567e-01 ... -2.94356322e+00 3.21279019e-01 -6.19845867e-01]] [[ 9.12607312e-02 -1.78474352e-01 1.38523862e-01 ... 1.34037612e-02 -1.88184574e-01 2.96191990e-01] [-1.91865647e+00 -1.34492833e-02 3.61827046e-01 ... -8.26298833e-01 -8.06007266e-01 1.34838021e+00] [ 1.27142236e-01 -6.04974627e-01 -1.99946836e-02 ... -4.34502661e-01 4.02246296e-01 4.40363377e-01] [-1.76832929e-01 1.92808405e-01 -1.98447496e-01 ... -9.50302705e-02 -2.72542208e-01 6.11321628e-02] [-1.18980145e+00 6.83950260e-02 1.67896581e+00 ... -1.07957971e+00 1.52549446e+00 3.03352952e-01] [ 2.22347903e+00 -1.15256839e-01 1.27410316e+00 ... -7.40206063e-01 1.08677030e+00 5.73868454e-01]] [[-1.49548426e-01 -2.30344608e-02 -6.74053580e-02 ... 2.91434191e-02 1.50951415e-01 2.59102166e-01] [ 1.95320338e-01 -1.17567348e+00 -1.91708043e-01 ... -6.15456104e-01 -2.83194065e+00 -1.83062077e+00] [-9.19815123e-01 -9.05852556e-01 -2.45721757e-01 ... -5.75789928e-01 4.08625215e-01 -2.03692809e-01] [-4.72847838e-04 -8.42533484e-02 2.80095607e-01 ... -3.76201943e-02 -3.87491137e-02 -8.58891085e-02] [-7.46437788e-01 6.44560695e-01 -1.50733578e+00 ... 9.59880710e-01 -7.45859295e-02 1.73637077e-01] [-2.09676361e+00 2.14301395e+00 -6.82373881e-01 ... 2.00516701e+00 6.82836831e-01 -1.19595790e+00]] ... [[-1.65552139e-01 2.55660295e-01 -3.02988827e-01 ... -1.96053371e-01 1.55454859e-01 -1.72984019e-01] [ 8.11632276e-01 -1.80229139e+00 -1.03719091e+00 ... 1.18773723e+00 -6.93045974e-01 1.63995981e+00] [-1.77471066e+00 4.50779796e-01 2.21163571e-01 ... -9.72685874e-01 3.40474546e-02 -4.23002988e-01] [-1.36110827e-01 8.48086774e-02 -1.62966073e-01 ... -1.95488051e-01 -2.83926148e-02 -1.50203109e-01] [-1.28355336e+00 -1.66841269e-01 1.43763563e-02 ... 9.48414087e-01 1.08181608e+00 1.58517897e+00] [ 6.55211926e-01 1.36255407e+00 -9.19068515e-01 ... 1.06587458e+00 8.66135657e-01 -2.54284120e+00]] [[ 3.37199152e-01 2.37326503e-01 -1.59129038e-01 ... -5.95306717e-02 -1.30704030e-01 2.27472410e-01] [-6.83869302e-01 -5.43242157e-01 -3.07557493e-01 ... -6.72915399e-01 3.94408032e-02 -3.55817646e-01] [-1.18257654e+00 -6.25155866e-01 2.37385198e-01 ... -1.32696748e+00 7.94253588e-01 8.24377596e-01] [ 1.09325841e-01 1.07401565e-01 -1.11071445e-01 ... 1.23701677e-01 1.46931671e-02 4.06754799e-02] [ 6.03227377e-01 5.64247847e-01 1.67595088e-01 ... -1.21534789e+00 -2.39540428e-01 -5.16154706e-01] [ 2.13081971e-01 1.67564559e+00 -9.71089423e-01 ... -1.58058155e+00 -4.19019490e-01 2.17961502e+00]] [[ 1.94758460e-01 -1.05407514e-01 1.11915968e-01 ... -9.47823599e-02 9.97720137e-02 -1.13450103e-01] [ 1.34261455e-02 -4.61341679e-01 9.33737338e-01 ... -4.80284989e-01 -6.70350313e-01 3.01612854e+00] [ 1.01889968e+00 7.94142783e-02 -5.43302417e-01 ... 1.24002767e+00 -4.15318459e-03 1.13002443e+00] [-1.77113399e-01 7.21917152e-02 2.23450005e-01 ... 1.20100394e-01 -3.16342153e-02 -6.08571805e-02] [-3.63325387e-01 7.83096135e-01 1.00986934e+00 ... -4.61096436e-01 3.54848951e-01 -4.67739940e-01] [-1.59867120e+00 -2.04404473e+00 -6.38843417e-01 ... 1.31199634e+00 2.21366549e+00 5.74437976e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4588.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5360 -0.9178 -2.2379 0.5458 -0.8705 -0.8744 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.8070 0.1287 -0.6789 -0.5044 -1.4871 -2.1722 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) ant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4819021 (Squeeze_4819020[0]:i64[], Constant_4818969[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4819021': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4821819 (Squeeze_4821818[0]:i64[], Constant_4821767[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4821819': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4824617 (Squeeze_4824616[0]:i64[], Constant_4824571[0]:i32[]) -> (dynamic[..fw_re: [[[-0.20267828 0.4428228 -1.9884084 ... 0.15212785 -0.72253525 -0.12924297] [-0.81607926 -0.80827445 -0.8079917 ... -1.1882296 -0.8534812 -0.78162736] [-1.6444609 -2.5003045 -2.2535605 ... -1.4881109 -1.9990289 -2.6282134 ] [ 1.341061 1.0487407 -0.3798587 ... 0.41761908 0.7091427 0.07125207] [-0.8412256 -1.2074302 -0.12011618 ... 2.034046 -0.50958174 -2.1949854 ] [-1.8072418 1.0640119 -1.2565732 ... -2.459456 -2.1269102 -6.2302814 ]] [[ 0.3483701 -1.418545 -0.7458392 ... -0.3643289 -1.444187 -0.46133882] [-0.70232785 -0.8547611 -0.97311777 ... -0.94486636 -0.6746496 -0.82587934] [-2.728135 -1.8188703 -2.6310382 ... -2.7981231 -1.8228769 -2.3862274 ] [ 0.7186465 0.12308312 0.12404223 ... 0.6886446 0.6443397 -0.07104436] [-1.742818 1.4573219 -0.95595664 ... -0.5930403 -0.53645223 0.14544684] [ 0.01198222 2.3007216 0.02949468 ... -2.2406783 -0.85119146 -1.8542666 ]] [[-1.8941405 0.09107731 -0.09417111 ... 0.28352287 -1.3803452 -1.6333451 ] [-0.950879 -0.97461027 -1.0204664 ... -0.8002726 -1.1381869 -0.70729846] [-1.9504504 -3.6007576 -2.72606 ... -2.6167176 -1.2557676 -2.7809997 ] [ 0.5240593 1.3472724 1.411199 ... 0.10985793 -0.7948631 0.25383076] [-1.2825074 -0.42468894 -1.7638636 ... -0.17080192 -2.3854146 -1.369308 ] [ 1.6423614 -2.155239 -3.8298864 ... 1.2540572 -2.929763 -2.568176 ]] ... [[-1.3880553 -0.7923465 -0.3263108 ... -0.39912227 -0.29841247 -0.4063817 ] [-1.0114471 -1.0249573 -0.77315825 ... -1.1097203 -1.0532367 -0.95551294] [-1.3153389 -1.8830024 -2.3546228 ... -2.3849304 -2.4682567 -3.5432658 ] [ 0.4285991 0.32595325 0.03698947 ... 0.70647925 1.3341317 -0.31759602] [-0.28457648 -4.799173 0.2233698 ... 0.34474784 -1.9296107 -0.10740693] [-3.0615594 -0.0353256 0.20450221 ... -0.3867588 -2.9526725 -3.636939 ]] [[-1.5244149 -1.108358 -0.27440658 ... -0.9463746 1.412613 -0.6089586 ] [-0.8572399 -0.8959834 -0.997982 ... -1.0183959 -0.9794073 -0.83426857] [-1.8515007 -2.207298 -2.9447584 ... -2.2036512 -2.3899972 -3.2066436 ] [ 0.27708638 0.28574792 0.5858521 ... 0.44347462 1.4969217 0.44456354] [ 0.35763723 -2.6040754 -2.2965455 ... -1.3482398 -0.10735867 0.68563956] [-0.64609665 -0.01812967 -2.6293771 ... -4.498091 0.54745543 -2.744948 ]] [[-0.49075043 -1.5126141 -1.1728661 ... -1.5547197 0.62521064 0.41866624] [-1.1370265 -1.1885607 -0.8718223 ... -0.9671135 -0.9212326 -0.88390625] [-2.5276604 -2.1741252 -1.1621796 ... -2.5899982 -1.8040752 -2.3366265 ] [-0.04689658 0.46779865 1.3011098 ... 0.23724811 0.53360045 0.8736636 ] [ 0.88842136 -0.5192923 -1.5671636 ... -0.99301404 -2.805458 0.5859301 ] [-1.0236648 -1.3355443 2.9248397 ... -0.9750988 -3.424322 -3.7749734 ]]]; ov_res: [[[-0.20267832 0.44282284 -1.9884084 ... 0.15212779 -0.72253525 -0.129243 ] [-0.81607926 -0.80827445 -0.8079917 ... -1.1882294 -0.8534812 -0.78162736] [-1.6444612 -2.5003045 -2.2535605 ... -1.4881111 -1.999029 -2.6282134 ] [ 1.341061 1.0487407 -0.37985864 ... 0.41761908 0.7091427 0.0712521 ] [-0.84122556 -1.2074301 -0.12011608 ... 2.0340462 -0.5095817 -2.1949854 ] [-1.8072418 1.0640119 -1.2565732 ... -2.459456 -2.1269102 -6.2302814 ]] [[ 0.34837013 -1.418545 -0.7458392 ... -0.36432886 -1.444187 -0.4613388 ] [-0.7023278 -0.85476106 -0.97311777 ... -0.9448663 -0.67464954 -0.82587934] [-2.728135 -1.8188704 -2.6310382 ... -2.7981231 -1.822877 -2.3862274 ] [ 0.71864647 0.12308311 0.1240422 ... 0.6886446 0.6443397 -0.07104436] [-1.7428178 1.4573218 -0.95595664 ... -0.5930402 -0.53645223 0.14544682] [ 0.01198226 2.3007216 0.02949473 ... -2.2406783 -0.8511914 -1.8542665 ]] [[-1.8941404 0.09107725 -0.09417119 ... 0.28352275 -1.3803451 -1.6333449 ] [-0.9508789 -0.97461027 -1.0204664 ... -0.8002726 -1.1381868 -0.70729846] [-1.9504504 -3.6007576 -2.72606 ... -2.6167176 -1.2557676 -2.7809994 ] [ 0.5240593 1.3472724 1.411199 ... 0.10985789 -0.79486316 0.25383073] [-1.2825073 -0.4246889 -1.7638636 ... -0.1708019 -2.3854146 -1.369308 ] [ 1.6423615 -2.155239 -3.8298864 ... 1.2540573 -2.9297628 -2.568176 ]] ... [[-1.3880553 -0.7923465 -0.3263108 ... -0.39912224 -0.29841247 -0.40638167] [-1.0114471 -1.0249573 -0.77315825 ... -1.1097203 -1.0532367 -0.95551294] [-1.3153389 -1.8830024 -2.354623 ... -2.3849304 -2.4682567 -3.543266 ] [ 0.4285991 0.32595327 0.03698943 ... 0.7064792 1.3341317 -0.317596 ] [-0.2845765 -4.7991734 0.22336985 ... 0.3447478 -1.9296108 -0.10740688] [-3.0615594 -0.03532564 0.2045022 ... -0.3867589 -2.9526725 -3.636939 ]] [[-1.5244149 -1.1083581 -0.2744066 ... -0.9463746 1.4126129 -0.60895866] [-0.8572399 -0.8959834 -0.997982 ... -1.0183959 -0.9794073 -0.83426857] [-1.8515007 -2.207298 -2.9447584 ... -2.2036512 -2.389997 -3.2066436 ] [ 0.27708638 0.2857479 0.5858521 ... 0.4434746 1.4969217 0.4445635 ] [ 0.35763723 -2.6040757 -2.2965457 ... -1.3482399 -0.10735875 0.68563956] [-0.64609677 -0.01812979 -2.6293771 ... -4.4980907 0.54745525 -2.744948 ]] [[-0.4907505 -1.5126141 -1.1728661 ... -1.5547197 0.62521064 0.41866624] [-1.1370265 -1.1885607 -0.8718223 ... -0.9671135 -0.9212326 -0.88390625] [-2.5276604 -2.174125 -1.1621794 ... -2.5899982 -1.8040751 -2.3366265 ] [-0.04689663 0.46779865 1.30111 ... 0.23724806 0.53360045 0.87366366] [ 0.8884218 -0.51929224 -1.5671636 ... -0.993014 -2.8054583 0.5859303 ] [-1.0236648 -1.3355445 2.92484 ... -0.9750989 -3.4243221 -3.7749734 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4590.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.7732 0.5094 -2.5304 -0.0648 -0.0984 -0.6696 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[-5.62413633e-01 5.22791088e-01 4.76122826e-01 ... 5.57304919e-01 2.02457711e-01 8.82204413e-01] [ 9.52998996e-01 1.32245171e+00 -2.27828309e-01 ... -5.20898998e-01 5.89801431e-01 -5.69626749e-01] [ 9.54496443e-01 -9.40113008e-01 -1.97124798e-02 ... -1.83848426e-01 -1.08406305e+00 -2.37989616e+00] [ 5.07886335e-02 2.34398525e-02 -5.64672872e-02 ... -6.46186247e-02 1.81515403e-02 2.71519106e-02] [ 3.89233455e-02 2.22245287e-02 8.56068283e-02 ... -1.54385909e-01 8.91911015e-02 -8.60352591e-02] [ 1.88634142e-01 1.98848188e-01 8.26733559e-02 ... -1.17339182e+00 1.55944431e+00 7.65074074e-01]] [[ 1.05749106e+00 6.47440493e-01 -2.34424099e-02 ... -4.83093083e-01 -3.77211034e-01 1.39639294e+00] [ 3.09534743e-02 3.06832969e-01 -4.85836059e-01 ... -3.76579553e-01 -4.57349390e-01 5.48113823e-01] [-1.20048487e+00 -6.72279596e-01 1.48396969e+00 ... -4.05313778e+00 4.98481840e-01 2.29370165e+00] [ 1.94205716e-02 3.69779840e-02 9.97037888e-02 ... 6.85825720e-02 4.90014628e-02 -1.07353054e-01] [-1.37179762e-01 9.52601284e-02 -5.67625277e-02 ... -2.51464099e-01 -1.82739630e-01 -1.21709026e-01] [ 6.36778891e-01 -4.46386844e-01 -3.65263194e-01 ... 6.97524726e-01 -4.27736402e-01 1.11711025e-01]] [[ 8.70294034e-01 2.45470740e-02 -1.07632828e+00 ... -4.99358833e-01 1.37738073e+00 -8.10965896e-01] [-5.09188890e-01 8.88618529e-02 -3.44139785e-01 ... 2.47629374e-01 2.11710945e-01 -7.46462166e-01] [ 2.75871921e+00 -1.37617433e+00 -2.32846475e+00 ... 2.51894683e-01 3.95514536e+00 2.92157245e+00] [ 1.96434073e-02 -7.23735895e-03 -1.35643005e-01 ... 4.82218377e-02 -2.78278225e-04 1.40803054e-01] [-4.11947668e-02 5.30149415e-02 1.45047903e-01 ... 2.32344419e-02 1.31925091e-01 6.75257668e-02] [ 5.22740960e-01 -1.82305753e-01 -1.34328818e+00 ... -4.83167470e-02 7.47753441e-01 1.52708292e-01]] ... [[-4.61765416e-02 -3.31908852e-01 -1.25894165e+00 ... 2.72373229e-01 8.61735761e-01 1.14024258e+00] [-4.51788932e-01 1.88887492e-01 -3.86416674e-01 ... 2.77091622e-01 8.43894556e-02 -3.87726724e-01] [ 3.09305263e+00 -2.97135568e+00 4.15411520e+00 ... 8.50977242e-01 -4.97371197e+00 -7.45262206e-01] [ 7.17361420e-02 1.07185975e-01 4.40379828e-02 ... 2.53574853e-03 7.95629919e-02 9.90077853e-03] [-1.74872860e-01 5.61722182e-02 1.23351000e-01 ... 8.84548351e-02 3.64656895e-02 -4.23485078e-02] [-1.10760045e+00 5.73942900e-01 -1.97045907e-01 ... -4.02482033e-01 -9.26242530e-01 -2.27743797e-02]] [[-1.10303557e+00 6.05390549e-01 -9.09696400e-01 ... 3.97400618e-01 -1.91079482e-01 -5.92437863e-01] [-2.71273524e-01 -1.25706151e-01 8.85184631e-02 ... 3.76189232e-01 1.27508953e-01 2.01874882e-01] [ 9.15042162e-02 -4.52490854e+00 -3.16865826e+00 ... 2.19902873e+00 3.28465056e+00 -2.36888218e+00] [-5.61054647e-02 -8.96036699e-02 -1.27823409e-02 ... 1.18259462e-02 5.87345101e-02 1.37294447e-02] [ 9.01865363e-02 2.45080031e-02 1.69028014e-01 ... -1.51272304e-02 1.58067122e-01 -2.11454973e-01] [-4.71455663e-01 -1.07653069e+00 3.03237885e-01 ... -1.19858354e-01 -4.98764902e-01 4.08873886e-01]] [[-1.74251962e+00 1.13386428e+00 -2.84708172e-01 ... 8.50046426e-02 -1.26833296e+00 -2.57911950e-01] [ 3.26999351e-02 3.27156335e-01 -5.75230122e-01 ... 5.39155602e-01 2.53622055e-01 4.34811205e-01] [ 2.81353498e+00 -7.64383823e-02 -1.86808538e+00 ... -3.03525209e+00 6.27689004e-01 4.33342123e+00] [-6.70263171e-02 -9.02657062e-02 4.67708297e-02 ... -4.43892628e-02 1.52204800e-02 7.00308979e-02] [ 8.00384358e-02 -5.85571676e-02 7.90601000e-02 ... -1.47347853e-01 1.24814874e-02 9.02583897e-02] [-6.33741558e-01 4.75318104e-01 7.96672583e-01 ... 1.52563012e+00 1.02147305e+00 -5.69481432e-01]]]; ov_res: [[[-5.62413633e-01 5.22791088e-01 4.76122797e-01 ... 5.57304859e-01 2.02457696e-01 8.82204413e-01] [ 9.52999055e-01 1.32245159e+00 -2.27828294e-01 ... -5.20898938e-01 5.89801431e-01 -5.69626689e-01] [ 9.54496443e-01 -9.40113008e-01 -1.97125431e-02 ... -1.83848485e-01 -1.08406305e+00 -2.37989640e+00] [ 5.07886335e-02 2.34398525e-02 -5.64672872e-02 ... -6.46186247e-02 1.81515422e-02 2.71519106e-02] [ 3.89233455e-02 2.22245287e-02 8.56068283e-02 ... -1.54385909e-01 8.91911015e-02 -8.60352591e-02] [ 1.88634157e-01 1.98848203e-01 8.26733634e-02 ... -1.17339182e+00 1.55944431e+00 7.65074134e-01]] [[ 1.05749083e+00 6.47440434e-01 -2.34424211e-02 ... -4.83093053e-01 -3.77211034e-01 1.39639282e+00] [ 3.09534743e-02 3.06832939e-01 -4.85835999e-01 ... -3.76579523e-01 -4.57349330e-01 5.48113763e-01] [-1.20048475e+00 -6.72279477e-01 1.48396945e+00 ... -4.05313778e+00 4.98481750e-01 2.29370141e+00] [ 1.94205660e-02 3.69779766e-02 9.97037739e-02 ... 6.85825646e-02 4.90014516e-02 -1.07353039e-01] [-1.37179732e-01 9.52601060e-02 -5.67625165e-02 ... -2.51464039e-01 -1.82739586e-01 -1.21709004e-01] [ 6.36778831e-01 -4.46386784e-01 -3.65263104e-01 ... 6.97524548e-01 -4.27736372e-01 1.11710973e-01]] [[ 8.70293915e-01 2.45470647e-02 -1.07632816e+00 ... -4.99358803e-01 1.37738049e+00 -8.10965836e-01] [-5.09188831e-01 8.88618454e-02 -3.44139755e-01 ... 2.47629359e-01 2.11710930e-01 -7.46462047e-01] [ 2.75871873e+00 -1.37617421e+00 -2.32846451e+00 ... 2.51894653e-01 3.95514488e+00 2.92157197e+00] [ 1.96434073e-02 -7.23735942e-03 -1.35643020e-01 ... 4.82218415e-02 -2.78278836e-04 1.40803054e-01] [-4.11947668e-02 5.30149415e-02 1.45047903e-01 ... 2.32344419e-02 1.31925076e-01 6.75257668e-02] [ 5.22740960e-01 -1.82305753e-01 -1.34328806e+00 ... -4.83167507e-02 7.47753501e-01 1.52708292e-01]] ... [[-4.61765341e-02 -3.31908822e-01 -1.25894165e+00 ... 2.72373229e-01 8.61735761e-01 1.14024258e+00] [-4.51788962e-01 1.88887507e-01 -3.86416733e-01 ... 2.77091622e-01 8.43894631e-02 -3.87726754e-01] [ 3.09305263e+00 -2.97135592e+00 4.15411520e+00 ... 8.50977242e-01 -4.97371197e+00 -7.45262206e-01] [ 7.17361420e-02 1.07185982e-01 4.40379865e-02 ... 2.53574899e-03 7.95629993e-02 9.90078039e-03] [-1.74872860e-01 5.61722182e-02 1.23351008e-01 ... 8.84548351e-02 3.64656895e-02 -4.23485115e-02] [-1.10760045e+00 5.73942900e-01 -1.97045892e-01 ... -4.02482003e-01 -9.26242471e-01 -2.27743741e-02]] [[-1.10303557e+00 6.05390489e-01 -9.09696400e-01 ... 3.97400588e-01 -1.91079497e-01 -5.92437923e-01] [-2.71273494e-01 -1.25706136e-01 8.85184780e-02 ... 3.76189262e-01 1.27508953e-01 2.01874897e-01] [ 9.15041566e-02 -4.52490854e+00 -3.16865802e+00 ... 2.19902849e+00 3.28465056e+00 -2.36888218e+00] [-5.61054535e-02 -8.96036550e-02 -1.27823390e-02 ... 1.18259452e-02 5.87345026e-02 1.37294428e-02] [ 9.01865289e-02 2.45080031e-02 1.69027999e-01 ... -1.51272286e-02 1.58067122e-01 -2.11454958e-01] [-4.71455604e-01 -1.07653058e+00 3.03237885e-01 ... -1.19858347e-01 -4.98764902e-01 4.08873826e-01]] [[-1.74251962e+00 1.13386428e+00 -2.84708142e-01 ... 8.50046501e-02 -1.26833296e+00 -2.57911921e-01] [ 3.26999277e-02 3.27156305e-01 -5.75230062e-01 ... 5.39155543e-01 2.53622025e-01 4.34811175e-01] [ 2.81353498e+00 -7.64383376e-02 -1.86808538e+00 ... -3.03525233e+00 6.27689064e-01 4.33342075e+00] [-6.70263171e-02 -9.02656913e-02 4.67708297e-02 ... -4.43892591e-02 1.52204810e-02 7.00309053e-02] [ 8.00384358e-02 -5.85571602e-02 7.90601000e-02 ... -1.47347838e-01 1.24814902e-02 9.02583897e-02] [-6.33741558e-01 4.75318134e-01 7.96672702e-01 ... 1.52563024e+00 1.02147317e+00 -5.69481373e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True, 'eps': 0.0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4592.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.3849 0.0057 -0.7531 -0.8508 -1.6041 -0.1014 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.5180 0.5662 0.7075 -1.2058 -0.0777 1.9806 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[-7.29680717e-01 3.31250399e-01 2.74106532e-01 ... -4.60683852e-01 -1.13941634e+00 1.01567030e-01] [ 7.06175089e-01 7.75793970e-01 -6.42250836e-01 ... 6.62549794e-01 3.94637853e-01 -3.63795370e-01] [-5.00283360e-01 -1.27655923e+00 -1.29963768e+00 ... -8.30794513e-01 -7.20395744e-01 -7.65137553e-01] [ 4.06683832e-01 -2.94393277e+00 9.21894312e-01 ... -1.06681466e+00 5.16900361e-01 1.22955590e-01] [-1.63676155e+00 -1.66028273e+00 -1.58596528e+00 ... -1.50081778e+00 -1.74289763e+00 -1.55038273e+00] [ 7.94830918e-01 2.21281910e+00 1.77583665e-01 ... -1.22953467e-01 -1.25081408e+00 2.39601469e+00]] [[-8.21376070e-02 -7.14763820e-01 1.71220258e-01 ... -5.32422841e-01 -1.65085658e-01 -4.44582058e-03] [-1.56252101e-01 -3.83365005e-01 -9.23688531e-01 ... -6.31569637e-05 -6.30141318e-01 8.35246146e-02] [-1.60931480e+00 -1.32483578e+00 -1.01273453e+00 ... 2.64347438e-02 -1.11164320e+00 8.46515149e-02] [-1.38030839e+00 2.24768102e-01 -2.06771851e+00 ... -8.03468645e-01 8.37702751e-01 -1.70315230e+00] [-1.52364373e+00 -1.55826497e+00 -1.55016267e+00 ... -1.61567295e+00 -1.53717959e+00 -1.53136802e+00] [ 2.86000228e+00 1.24509716e+00 -1.20764816e+00 ... 8.80435646e-01 2.60443068e+00 8.81411552e-01]] [[-9.62477624e-01 -2.99180329e-01 -3.39229941e-01 ... -2.47294635e-01 -7.04851389e-01 -8.73163402e-01] [-2.46650949e-01 1.03525138e+00 5.32230616e-01 ... -1.02353759e-01 -1.59075379e-01 1.56326964e-01] [-1.76970327e+00 2.01149330e-01 -3.68014246e-01 ... -2.93907672e-01 -1.40306270e+00 -5.21147072e-01] [-9.10066187e-01 1.57710540e+00 8.78856838e-01 ... -8.59551013e-01 -7.80063808e-01 -9.31613386e-01] [-1.64847851e+00 -1.71959615e+00 -1.54423881e+00 ... -1.59922099e+00 -1.39491248e+00 -1.72646630e+00] [ 9.84084234e-02 4.20731366e-01 -2.75008059e+00 ... -4.22729820e-01 -2.74796009e-01 2.40920997e+00]] ... [[ 9.96988639e-02 -1.03649199e+00 -1.04602027e+00 ... -4.17235583e-01 -9.58137512e-01 -7.91207373e-01] [ 5.68565667e-01 7.34738588e-01 1.46372527e-01 ... -5.62764525e-01 2.94373065e-01 -2.54276186e-01] [-1.44009864e+00 -4.58099008e-01 5.02394497e-01 ... -1.39285612e+00 -8.07084799e-01 -5.25513589e-01] [ 2.15401605e-01 3.25758904e-01 3.13061059e-01 ... -1.06791818e+00 8.45718682e-01 -1.03267157e+00] [-1.53309727e+00 -1.56970537e+00 -1.64675415e+00 ... -1.58359993e+00 -1.58794332e+00 -1.59621549e+00] [-2.81771612e+00 -1.28863811e+00 1.43875992e+00 ... -4.03103065e+00 -1.38570201e+00 -3.02895576e-01]] [[-3.87696028e-01 -2.40029201e-01 -1.03025639e+00 ... -1.33812070e-01 -9.84165013e-01 1.46027049e-02] [ 1.26950157e+00 1.50182724e-01 -4.74887669e-01 ... -1.40254781e-01 -5.04582047e-01 4.56486613e-01] [ 2.54446208e-01 -1.12116086e+00 -1.17807102e+00 ... -1.75630641e+00 -1.86203316e-01 1.97083145e-01] [-2.20132780e+00 -1.38766789e+00 -2.30514026e+00 ... -1.76018834e+00 -1.71248114e+00 -2.09552932e+00] [-1.49119043e+00 -1.62628961e+00 -1.62653172e+00 ... -1.53204536e+00 -1.55341864e+00 -1.52471125e+00] [-3.97987336e-01 2.23674130e+00 -3.40286270e-02 ... -3.23801011e-01 -1.31663775e+00 1.14005849e-01]] [[-5.07092059e-01 1.04781963e-01 -1.15288734e-01 ... -6.28647581e-02 -9.97204244e-01 -6.31459117e-01] [ 1.84498981e-01 -3.18490684e-01 -4.10902888e-01 ... 5.82467198e-01 3.35350692e-01 -5.17304063e-01] [-8.52211475e-01 -1.07534885e+00 -8.62134635e-01 ... -3.68820459e-01 -1.39976394e+00 -4.73560721e-01] [-1.75207758e+00 6.42440915e-01 -8.27622354e-01 ... -2.20802927e+00 -1.26693702e+00 -2.38999176e+00] [-1.55776405e+00 -1.69289541e+00 -1.63980317e+00 ... -1.73141026e+00 -1.63086987e+00 -1.63691962e+00] [-1.94817483e+00 1.13290226e+00 -2.75370574e+00 ... 2.16754526e-01 3.31309342e+00 8.47388029e-01]]]; ov_res: [[[-7.29680657e-01 3.31250280e-01 2.74106383e-01 ... -4.60683852e-01 -1.13941622e+00 1.01566941e-01] [ 7.06174970e-01 7.75793850e-01 -6.42250776e-01 ... 6.62549675e-01 3.94637793e-01 -3.63795340e-01] [-5.00283420e-01 -1.27655911e+00 -1.29963756e+00 ... -8.30794573e-01 -7.20395744e-01 -7.65137553e-01] [ 4.06683862e-01 -2.94393277e+00 9.21894372e-01 ... -1.06681466e+00 5.16900420e-01 1.22955598e-01] [-1.63676155e+00 -1.66028273e+00 -1.58596528e+00 ... -1.50081778e+00 -1.74289763e+00 -1.55038273e+00] [ 7.94830918e-01 2.21281910e+00 1.77583709e-01 ... -1.22953467e-01 -1.25081420e+00 2.39601469e+00]] [[-8.21376368e-02 -7.14763820e-01 1.71220243e-01 ... -5.32422900e-01 -1.65085688e-01 -4.44585364e-03] [-1.56252116e-01 -3.83365005e-01 -9.23688591e-01 ... -6.31694202e-05 -6.30141377e-01 8.35246071e-02] [-1.60931468e+00 -1.32483578e+00 -1.01273441e+00 ... 2.64347550e-02 -1.11164320e+00 8.46514776e-02] [-1.38030839e+00 2.24768206e-01 -2.06771851e+00 ... -8.03468645e-01 8.37702751e-01 -1.70315242e+00] [-1.52364385e+00 -1.55826497e+00 -1.55016267e+00 ... -1.61567307e+00 -1.53717971e+00 -1.53136802e+00] [ 2.86000252e+00 1.24509728e+00 -1.20764828e+00 ... 8.80435705e-01 2.60443115e+00 8.81411612e-01]] [[-9.62477565e-01 -2.99180329e-01 -3.39229912e-01 ... -2.47294635e-01 -7.04851389e-01 -8.73163402e-01] [-2.46650934e-01 1.03525138e+00 5.32230616e-01 ... -1.02353752e-01 -1.59075364e-01 1.56326979e-01] [-1.76970327e+00 2.01149344e-01 -3.68014246e-01 ... -2.93907702e-01 -1.40306270e+00 -5.21147132e-01] [-9.10066187e-01 1.57710540e+00 8.78856719e-01 ... -8.59551013e-01 -7.80063808e-01 -9.31613386e-01] [-1.64847851e+00 -1.71959615e+00 -1.54423881e+00 ... -1.59922099e+00 -1.39491248e+00 -1.72646630e+00] [ 9.84084606e-02 4.20731395e-01 -2.75008059e+00 ... -4.22729760e-01 -2.74795920e-01 2.40920997e+00]] ... [[ 9.96987969e-02 -1.03649187e+00 -1.04602015e+00 ... -4.17235583e-01 -9.58137453e-01 -7.91207254e-01] [ 5.68565607e-01 7.34738588e-01 1.46372512e-01 ... -5.62764466e-01 2.94373065e-01 -2.54276156e-01] [-1.44009852e+00 -4.58099037e-01 5.02394378e-01 ... -1.39285600e+00 -8.07084799e-01 -5.25513589e-01] [ 2.15401381e-01 3.25758696e-01 3.13060850e-01 ... -1.06791818e+00 8.45718384e-01 -1.03267145e+00] [-1.53309727e+00 -1.56970537e+00 -1.64675415e+00 ... -1.58359993e+00 -1.58794332e+00 -1.59621549e+00] [-2.81771588e+00 -1.28863800e+00 1.43875980e+00 ... -4.03103018e+00 -1.38570189e+00 -3.02895546e-01]] [[-3.87696028e-01 -2.40029216e-01 -1.03025639e+00 ... -1.33812055e-01 -9.84165072e-01 1.46026891e-02] [ 1.26950157e+00 1.50182724e-01 -4.74887669e-01 ... -1.40254781e-01 -5.04582107e-01 4.56486613e-01] [ 2.54446179e-01 -1.12116086e+00 -1.17807090e+00 ... -1.75630641e+00 -1.86203346e-01 1.97083145e-01] [-2.20132780e+00 -1.38766778e+00 -2.30514002e+00 ... -1.76018822e+00 -1.71248102e+00 -2.09552908e+00] [-1.49119043e+00 -1.62628961e+00 -1.62653160e+00 ... -1.53204525e+00 -1.55341852e+00 -1.52471125e+00] [-3.97987306e-01 2.23674083e+00 -3.40286419e-02 ... -3.23800981e-01 -1.31663764e+00 1.14005812e-01]] [[-5.07092059e-01 1.04781918e-01 -1.15288764e-01 ... -6.28648028e-02 -9.97204185e-01 -6.31459117e-01] [ 1.84498936e-01 -3.18490684e-01 -4.10902888e-01 ... 5.82467079e-01 3.35350662e-01 -5.17304063e-01] [-8.52211475e-01 -1.07534885e+00 -8.62134695e-01 ... -3.68820548e-01 -1.39976394e+00 -4.73560810e-01] [-1.75207758e+00 6.42440856e-01 -8.27622414e-01 ... -2.20802927e+00 -1.26693714e+00 -2.38999176e+00] [-1.55776417e+00 -1.69289553e+00 -1.63980329e+00 ... -1.73141026e+00 -1.63086998e+00 -1.63691962e+00] [-1.94817483e+00 1.13290226e+00 -2.75370574e+00 ... 2.16754541e-01 3.31309366e+00 8.47388029e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 3} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False, 'eps': 0.0001} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- 3 graph(%self : __torch__.test_group_norm.___torch_mangle_4594.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.0001]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5555 0.3727 0.6223 0.5514 0.3029 0.6489 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) .])' with friendly_name 'Subtract_4824617': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4827557 (Squeeze_4827556[0]:i64[], Constant_4827511[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4827557': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4830355 (Squeeze_4830354[0]:i64[], Constant_4830309[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4830355': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_efw_re: [[[ 1.06995487e+00 7.83533037e-01 1.09092951e+00 ... -1.25017750e+00 5.47562361e-01 -4.40095328e-02] [ 5.77989295e-02 1.10802047e-01 3.05633783e-01 ... -1.60289362e-01 -4.96073477e-02 8.72924104e-02] [ 4.07043725e-01 -1.01159918e+00 2.26422716e-02 ... 9.73237872e-01 -4.69273090e-01 4.62712884e-01] [-6.67465210e-01 5.39313793e-01 -2.40030438e-01 ... -6.09998226e-01 -4.61585909e-01 -3.73867638e-02] [ 9.91084695e-01 -2.27184981e-01 -1.70781299e-01 ... 1.61053855e-02 -1.59987643e-01 -2.15085119e-01] [-6.46474838e-01 1.70216918e-01 1.30565539e-01 ... -2.36715063e-01 6.52005911e-01 2.06420228e-01]] [[-1.12945902e+00 -5.97226322e-01 4.57838655e-01 ... -2.94714361e-01 -4.69092220e-01 -1.01148927e+00] [ 9.59083736e-02 -4.28049415e-01 3.83663207e-01 ... -4.73618925e-01 -5.91021061e-01 -4.51889485e-01] [-6.85166001e-01 -6.97519720e-01 -4.88824733e-02 ... 4.78134602e-01 8.15128721e-03 1.23739190e-01] [ 6.51535153e-01 1.15367305e+00 -3.43764633e-01 ... 3.99885893e-01 -3.28985900e-01 6.83745384e-01] [ 7.09413961e-02 -6.97331071e-01 -1.12432629e-01 ... 1.55240238e-01 -3.49282652e-01 -1.50017083e-01] [-2.17328787e-01 1.46821648e-01 -2.46868879e-01 ... 6.81269884e-01 -7.91189909e-01 -5.30344069e-01]] [[ 1.09724438e+00 -2.06005067e-01 6.91323459e-01 ... -2.39237979e-01 3.77583593e-01 -1.10975005e-01] [ 2.27512509e-01 -5.45882918e-02 -5.83580792e-01 ... 8.58526826e-01 1.00992143e-01 -3.98976915e-02] [-9.39076185e-01 6.24299467e-01 -1.02693439e-01 ... 1.97723389e-01 -9.84591842e-02 8.09595346e-01] [-1.78967118e-01 -5.24387717e-01 -1.47585467e-01 ... -8.70457888e-01 -5.28874755e-01 -1.18344404e-01] [-1.89230680e-01 -3.97018045e-01 -2.58239180e-01 ... -3.78752202e-02 1.09309934e-01 -1.20468937e-01] [ 7.35146642e-01 6.26881123e-01 1.28678584e+00 ... -1.57816574e-01 4.68842357e-01 -7.11394370e-01]] ... [[ 2.53053784e-01 1.99107736e-01 -6.08406842e-01 ... -1.61975503e-01 -3.98881435e-01 -2.41422728e-01] [-6.28076419e-02 5.01486242e-01 1.62566584e-02 ... -3.92912209e-01 -5.09180188e-01 -9.48572233e-02] [-7.65278816e-01 -4.28908527e-01 5.23726523e-01 ... -1.29080638e-01 1.29231095e+00 5.77396631e-01] [ 2.94222236e-01 5.69715798e-01 -9.42607820e-02 ... 3.09493124e-01 -3.93071800e-01 -4.56268370e-01] [ 3.07841122e-01 -1.54133752e-01 1.30859628e-01 ... -2.67154915e-04 -4.00587112e-01 -4.00084376e-01] [-2.17754364e-01 -7.53274798e-01 -8.46966922e-01 ... -1.19080894e-01 2.14885414e-01 1.11280036e+00]] [[-6.39411628e-01 -1.00778472e+00 -4.89242941e-01 ... 1.47597864e-02 -2.36154661e-01 3.47390831e-01] [ 3.30240548e-01 4.01763022e-02 -3.93317729e-01 ... 3.57770771e-01 -7.63349116e-01 1.28065616e-01] [ 8.46146226e-01 -3.28806758e-01 6.87889516e-01 ... 6.00503564e-01 -7.41583407e-01 -9.52432156e-01] [ 2.36164927e-01 -1.43793568e-01 9.18264806e-01 ... 5.15028119e-01 4.15184051e-01 8.07022527e-02] [ 4.25781101e-01 1.53262988e-01 -4.70288545e-01 ... -2.89004117e-01 -1.07639626e-01 -6.81052208e-02] [ 1.42543744e-02 -3.70935500e-01 -6.35493577e-01 ... 3.98742884e-01 9.95244563e-01 -3.28684151e-01]] [[-6.18932664e-01 1.13097951e-01 3.09220552e-01 ... -1.34693831e-01 -2.17356533e-01 5.99905476e-02] [-4.15310776e-03 -1.19873129e-01 -4.68005121e-01 ... 6.31502569e-01 -4.96262848e-01 1.44815624e-01] [-3.87524307e-01 1.97144430e-02 -2.97150791e-01 ... 6.43647969e-01 -1.86258659e-01 -7.11212277e-01] [-3.54180932e-01 1.41509727e-01 -8.87073055e-02 ... 4.68434721e-01 2.43453294e-01 -3.36916476e-01] [-6.46945015e-02 6.59984410e-01 -3.91749829e-01 ... -4.47982326e-02 -2.84403920e-01 -5.53390443e-01] [-5.06020524e-02 -7.41203547e-01 -7.05315590e-01 ... 1.32716405e+00 5.45543015e-01 1.26367971e-01]]]; ov_res: [[[ 1.06995499e+00 7.83533096e-01 1.09092951e+00 ... -1.25017750e+00 5.47562420e-01 -4.40095142e-02] [ 5.77989183e-02 1.10802040e-01 3.05633783e-01 ... -1.60289392e-01 -4.96073663e-02 8.72923955e-02] [ 4.07043695e-01 -1.01159918e+00 2.26422455e-02 ... 9.73237872e-01 -4.69273120e-01 4.62712884e-01] [-6.67465270e-01 5.39313793e-01 -2.40030438e-01 ... -6.09998226e-01 -4.61585939e-01 -3.73867601e-02] [ 9.91084695e-01 -2.27184981e-01 -1.70781299e-01 ... 1.61053855e-02 -1.59987628e-01 -2.15085104e-01] [-6.46474898e-01 1.70216918e-01 1.30565554e-01 ... -2.36715078e-01 6.52005970e-01 2.06420228e-01]] [[-1.12945890e+00 -5.97226322e-01 4.57838565e-01 ... -2.94714361e-01 -4.69092190e-01 -1.01148927e+00] [ 9.59083661e-02 -4.28049356e-01 3.83663207e-01 ... -4.73618865e-01 -5.91021001e-01 -4.51889455e-01] [-6.85165942e-01 -6.97519660e-01 -4.88824472e-02 ... 4.78134573e-01 8.15130863e-03 1.23739205e-01] [ 6.51535094e-01 1.15367293e+00 -3.43764633e-01 ... 3.99885893e-01 -3.28985929e-01 6.83745384e-01] [ 7.09413961e-02 -6.97331011e-01 -1.12432629e-01 ... 1.55240238e-01 -3.49282652e-01 -1.50017083e-01] [-2.17328787e-01 1.46821633e-01 -2.46868879e-01 ... 6.81269884e-01 -7.91189909e-01 -5.30344069e-01]] [[ 1.09724426e+00 -2.06005037e-01 6.91323459e-01 ... -2.39237934e-01 3.77583563e-01 -1.10974982e-01] [ 2.27512494e-01 -5.45882955e-02 -5.83580732e-01 ... 8.58526647e-01 1.00992128e-01 -3.98976989e-02] [-9.39076126e-01 6.24299407e-01 -1.02693446e-01 ... 1.97723359e-01 -9.84591916e-02 8.09595287e-01] [-1.78967133e-01 -5.24387717e-01 -1.47585452e-01 ... -8.70457888e-01 -5.28874755e-01 -1.18344404e-01] [-1.89230695e-01 -3.97018045e-01 -2.58239180e-01 ... -3.78752239e-02 1.09309927e-01 -1.20468952e-01] [ 7.35146701e-01 6.26881123e-01 1.28678584e+00 ... -1.57816589e-01 4.68842387e-01 -7.11394429e-01]] ... [[ 2.53053784e-01 1.99107736e-01 -6.08406901e-01 ... -1.61975503e-01 -3.98881435e-01 -2.41422728e-01] [-6.28076345e-02 5.01486301e-01 1.62566621e-02 ... -3.92912209e-01 -5.09180129e-01 -9.48572233e-02] [-7.65278757e-01 -4.28908527e-01 5.23726523e-01 ... -1.29080623e-01 1.29231095e+00 5.77396631e-01] [ 2.94222176e-01 5.69715679e-01 -9.42608565e-02 ... 3.09493065e-01 -3.93071860e-01 -4.56268430e-01] [ 3.07841092e-01 -1.54133797e-01 1.30859599e-01 ... -2.67193565e-04 -4.00587171e-01 -4.00084406e-01] [-2.17754424e-01 -7.53274858e-01 -8.46967041e-01 ... -1.19080968e-01 2.14885339e-01 1.11280024e+00]] [[-6.39411509e-01 -1.00778461e+00 -4.89242852e-01 ... 1.47597659e-02 -2.36154631e-01 3.47390771e-01] [ 3.30240488e-01 4.01763022e-02 -3.93317699e-01 ... 3.57770741e-01 -7.63349056e-01 1.28065616e-01] [ 8.46146166e-01 -3.28806758e-01 6.87889457e-01 ... 6.00503504e-01 -7.41583347e-01 -9.52432036e-01] [ 2.36164942e-01 -1.43793568e-01 9.18264866e-01 ... 5.15028179e-01 4.15184081e-01 8.07022750e-02] [ 4.25781131e-01 1.53263003e-01 -4.70288545e-01 ... -2.89004117e-01 -1.07639618e-01 -6.81052208e-02] [ 1.42543893e-02 -3.70935470e-01 -6.35493577e-01 ... 3.98742944e-01 9.95244622e-01 -3.28684121e-01]] [[-6.18932724e-01 1.13097936e-01 3.09220552e-01 ... -1.34693846e-01 -2.17356548e-01 5.99905290e-02] [-4.15309845e-03 -1.19873114e-01 -4.68005121e-01 ... 6.31502628e-01 -4.96262819e-01 1.44815639e-01] [-3.87524337e-01 1.97144561e-02 -2.97150791e-01 ... 6.43647969e-01 -1.86258659e-01 -7.11212218e-01] [-3.54180932e-01 1.41509682e-01 -8.87073353e-02 ... 4.68434662e-01 2.43453264e-01 -3.36916506e-01] [-6.46945164e-02 6.59984350e-01 -3.91749829e-01 ... -4.47982475e-02 -2.84403950e-01 -5.53390443e-01] [-5.06020896e-02 -7.41203547e-01 -7.05315590e-01 ... 1.32716393e+00 5.45543015e-01 1.26367927e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 3} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4596.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[[-6.07496142e-01 7.44553924e-01 -4.93952513e-01 ... -6.42431259e-01 -5.04176557e-01 1.12712181e+00] [ 4.96969581e-01 -5.16786039e-01 -1.48856175e+00 ... 8.75306785e-01 -5.09265661e-01 1.52477428e-01] [ 1.25300717e+00 -1.36138296e+00 -4.49931443e-01 ... 4.50821221e-01 -4.32311565e-01 -1.21876347e+00] ... [-1.66640490e-01 1.69363427e+00 1.59087956e+00 ... -3.16928357e-01 -9.11916792e-01 -3.96689475e-01] [-7.05328137e-02 -5.70169151e-01 1.79215455e+00 ... 1.13165462e+00 -7.99456716e-01 -2.23747656e-01] [-1.93359005e+00 -4.27709401e-01 -3.37177008e-01 ... 1.01813428e-01 3.95442158e-01 -2.91181773e-01]] [[-6.67525351e-01 3.97923201e-01 -9.32073891e-01 ... 9.56510067e-01 -4.85441349e-02 -1.85955554e-01] [ 1.48907506e+00 1.33359420e+00 -9.53108013e-01 ... 5.48573315e-01 1.51613939e+00 4.39261258e-01] [-2.13025379e+00 1.13798881e+00 -1.61380649e+00 ... -1.92173398e+00 1.08127081e+00 -7.67997146e-01] ... [ 1.65927899e+00 5.88012338e-01 -1.50872648e+00 ... 7.50951827e-01 -5.43085217e-01 -1.55928206e+00] [-7.29897499e-01 -8.77472460e-01 -1.14102289e-01 ... 3.03525981e-02 5.85888326e-01 -7.19616175e-01] [-4.38638449e-01 -4.34031308e-01 3.16757470e-01 ... -1.97782075e+00 -8.84838641e-01 2.83539265e-01]] [[ 7.04528034e-01 -6.58287466e-01 3.80925119e-01 ... -2.80225128e-01 7.60069251e-01 4.38593805e-01] [-6.99701548e-01 -6.07214212e-01 -5.39742351e-01 ... -4.21612203e-01 4.44302797e-01 1.40644383e+00] [-3.97843540e-01 -1.25934803e+00 -8.57948780e-01 ... -2.67564923e-01 8.47564161e-01 -1.18430662e+00] ... [-1.51254237e-01 2.04912639e+00 -6.33420825e-01 ... -9.17511344e-01 4.28537041e-01 3.40524256e-01] [-1.37675285e+00 -8.56539831e-02 -1.80799052e-01 ... 4.92848068e-01 -1.36851087e-01 -3.59414011e-01] [ 1.41267681e+00 -8.86567116e-01 -4.20597732e-01 ... -1.52658939e+00 -2.11449698e-01 7.39010632e-01]] [[-1.25864697e+00 -6.03080571e-01 -7.64142498e-02 ... 1.28699350e+00 1.48128128e+00 6.56693101e-01] [-1.79092467e-01 -1.74513984e+00 -1.35062265e+00 ... 4.75725979e-01 1.25659004e-01 -1.52002841e-01] [ 9.80749547e-01 2.78549552e-01 -8.92120779e-01 ... 7.10651100e-01 1.38062790e-01 -5.46154320e-01] ... [-3.52662444e-01 3.57528269e-01 8.46279442e-01 ... -1.61188555e+00 5.51061332e-01 -4.58678722e-01] [ 3.39794219e-01 8.82311687e-02 1.63886875e-01 ... 7.58740902e-02 -6.39652789e-01 -3.57750356e-01] [ 8.70192111e-01 8.95315707e-01 4.27348584e-01 ... -1.64403689e+00 9.07088339e-01 1.02146411e+00]] [[ 1.80030060e+00 -1.33100009e+00 1.52064526e+00 ... -1.86315107e+00 -5.79516232e-01 7.50803709e-01] [-2.38250804e+00 2.46429279e-01 1.47909296e+00 ... -1.06015730e+00 7.68919408e-01 9.52243388e-01] [ 2.82147557e-01 -7.40537465e-01 1.03380942e+00 ... 1.05221856e+00 -1.00664544e+00 1.66080907e-01] ... [ 2.46543717e+00 -1.27921581e+00 8.80311251e-01 ... -4.67154346e-02 -5.56123078e-01 4.20568407e-01] [-1.32093942e+00 -1.30919576e+00 -5.76699436e-01 ... -1.01515390e-01 -7.32981265e-01 5.90097368e-01] [-4.88239288e-01 8.18930387e-01 3.02062303e-01 ... 1.52191365e+00 -2.01714486e-01 -3.65527004e-01]] [[ 1.36709774e+00 8.97041634e-02 6.41068071e-02 ... -6.83972657e-01 -6.77900612e-01 -1.95023584e+00] [-4.65286732e-01 -2.15895995e-01 -9.62514877e-01 ... 8.84654298e-02 -1.25786746e+00 2.26438176e-04] [ 5.09275198e-01 -7.11887300e-01 -4.28461522e-01 ... -7.60579765e-01 -2.69770026e-01 -6.40370011e-01] ... [-2.50909001e-01 1.41357410e+00 -1.10994327e+00 ... 5.81833243e-01 -2.96463132e-01 -5.97184062e-01] [ 8.06029975e-01 1.49937654e+00 -3.93457055e-01 ... 2.01544344e-01 1.27440453e+00 -2.29617834e-01] [ 8.60630333e-01 -1.03403199e+00 -9.12734866e-02 ... 8.42689276e-01 1.03860235e+00 -1.04027176e+00]]] [[[-6.30220234e-01 -4.31123316e-01 -5.79957247e-01 ... 7.75064304e-02 -6.17749751e-01 -1.40544808e+00] [-2.29126859e+00 4.51002449e-01 7.18122244e-01 ... 4.94451731e-01 -8.29293370e-01 -5.35167217e-01] [ 2.83326238e-01 1.19338751e+00 7.98671186e-01 ... -5.64932108e-01 2.86186129e-01 -3.72922271e-01] ... [-8.26088250e-01 1.33124352e+00 -8.42255175e-01 ... 7.35496640e-01 -9.93567526e-01 -1.06539464e+00] [ 1.38516653e+00 1.41332352e+00 -4.37929258e-02 ... 4.48530912e-01 -1.66504121e+00 1.41472265e-01] [-3.34861487e-01 2.67176533e+00 -2.41970226e-01 ... 1.47695804e+00 1.65381029e-01 -1.57399309e+00]] [[-2.61781037e-01 1.38025939e-01 -9.50605273e-01 ... -2.13444448e+00 -7.89475858e-01 5.29625714e-01] [-1.28171813e+00 7.10400879e-01 -6.04409814e-01 ... -7.05372810e-01 4.44760025e-01 -1.30374515e+00] [-5.21916866e-01 -3.06349128e-01 -1.42575711e-01 ... 1.89740336e+00 1.30019999e+00 -1.61301568e-01] ... [ 9.47250426e-01 -1.11222076e+00 -1.27449915e-01 ... 1.77859385e-02 9.30707455e-01 -2.75060356e-01] [ 3.95854384e-01 -1.12570763e+00 -1.64633229e-01 ... 1.04462385e+00 -3.13736260e-01 6.32564306e-01] [-5.50841153e-01 -1.01472127e+00 2.35405251e-01 ... -6.71248376e-01 -1.67524874e-01 -6.18993759e-01]] [[ 4.53714490e-01 -6.35865510e-01 -6.39540493e-01 ... 1.04046047e+00 9.73391533e-01 9.54772055e-01] [-3.17912102e-01 -2.06695151e+00 1.70880079e+00 ... 4.76150721e-01 -1.22349977e+00 1.02084601e+00] [ 2.61601663e+00 -4.59265411e-02 1.27122676e+00 ... -5.94034672e-01 2.02612150e-02 2.52702296e-01] ... [-2.27977705e+00 2.43377924e+00 -3.45352829e-01 ... 1.51146695e-01 -1.04782850e-01 6.83668017e-01] [-1.01106250e+00 -4.65138644e-01 -2.39952922e+00 ... 1.98421192e+00 1.28548133e+00 -1.45306915e-01] [-1.52316004e-01 1.16311145e+00 1.25718728e-01 ... 8.31271470e-01 1.06771982e+00 -1.05214584e+00]] [[-1.06113359e-01 -1.09050477e+00 -1.03453887e+00 ... 6.96327686e-01 -1.38288999e+00 -4.63938236e-01] [ 1.15140307e+00 2.65729761e+00 4.82770115e-01 ... 1.16606966e-01 1.24079359e+00 -3.28304153e-03] [ 9.68358219e-01 -7.30615377e-01 5.04206061e-01 ... 1.44804168e+00 -4.79647636e-01 -1.30750072e+00] ... [-9.09757018e-02 -4.21674885e-02 -1.39597684e-01 ... -2.18748212e+00 -2.25781751e+00 2.61391103e-01] [-4.58173722e-01 -1.47883683e-01 1.52668941e+00 ... 1.71373701e+00 1.08396053e+00 7.39337206e-01] [ 6.62022606e-02 -1.11862743e+00 -7.16818094e-01 ... -6.13375425e-01 -9.14278269e-01 7.69739032e-01]] [[-4.11385357e-01 -7.93473184e-01 3.77811700e-01 ... -1.31347442e+00 -1.45484030e-01 1.42795718e+00] [ 7.94530272e-01 1.07508099e+00 -3.36803079e-01 ... -8.55274916e-01 -3.36596891e-02 -1.47772861e+00] [-7.44530499e-01 1.98053789e+00 1.85221565e+00 ... 5.39360404e-01 -1.50246680e+00 6.89107001e-01] ... [ 2.81347722e-01 6.94456920e-02 2.08837241e-01 ... -5.94113946e-01 4.04659986e-01 -9.03994739e-01] [-2.45631725e-01 -5.64113557e-01 -1.25526452e+00 ... -5.41102409e-01 1.04378831e+00 1.00731976e-01] [ 7.90369391e-01 2.50914335e-01 4.21565384e-01 ... 4.94697332e-01 1.09071597e-01 1.19214308e+00]] [[ 1.14424773e-01 3.06725837e-02 -1.63972521e+00 ... 8.22064698e-01 -1.70252562e+00 -1.17100930e+00] [ 5.99145830e-01 1.30503416e-01 1.87291667e-01 ... -4.49515015e-01 1.08074188e+00 2.19728422e+00] [-1.07745004e+00 -6.24345914e-02 1.52670062e+00 ... 3.04535627e-01 3.29225034e-01 -4.43636239e-01] ... [-2.41470292e-01 2.34035969e+00 1.54618084e-01 ... -1.06786954e+00 -1.20711970e+00 -9.42700148e-01] [-2.03315303e-01 -5.00266790e-01 5.12945056e-01 ... 5.59708059e-01 2.44417697e-01 -9.52735245e-01] [-1.26883888e+00 -9.04842198e-01 -7.90204227e-01 ... -7.89678171e-02 9.58438031e-03 -1.36915803e+00]]] [[[ 2.48333359e+00 4.41282749e-01 -1.30289540e-01 ... 8.87378216e-01 7.80664623e-01 -1.75341487e+00] [-7.03440130e-01 -1.22710586e+00 8.83465588e-01 ... -1.55335319e+00 -9.24776029e-03 1.93633914e-01] [-1.64028347e-01 -5.62791049e-01 1.79274964e+00 ... -2.65028238e-01 7.60572255e-01 -1.44732034e+00] ... [ 7.71119893e-01 4.47927386e-01 -9.90005016e-01 ... -6.94459736e-01 -1.87526250e+00 -4.56081450e-01] [-1.77042174e+00 -1.19987711e-01 -3.32478397e-02 ... -1.56872439e+00 5.98601438e-03 -7.97004640e-01] [ 8.26534867e-01 6.41416982e-02 -1.12890756e+00 ... 2.60319680e-01 -2.68923610e-01 8.93934071e-01]] [[-7.01823950e-01 1.01275623e+00 -1.28705367e-01 ... 1.25021327e+00 6.51335180e-01 1.20379329e+00] [-1.22195899e+00 -9.36100259e-02 2.83990681e-01 ... 5.84923267e-01 -1.45569634e+00 2.97873348e-01] [-9.11798775e-01 -9.58633542e-01 1.02002047e-01 ... 1.47767401e+00 2.01430392e+00 1.15886867e+00] ... [-9.40436542e-01 1.40193045e+00 3.79307032e-01 ... 6.37975752e-01 8.30306590e-01 1.03922524e-01] [ 3.19166243e-01 -9.78795230e-01 4.62920934e-01 ... 1.50929406e-01 5.63349247e-01 -1.23950221e-01] [ 1.44765484e+00 -1.64187402e-01 -8.79424334e-01 ... -1.31503892e+00 7.76868701e-01 -4.79603171e-01]] [[-7.01448023e-01 3.91345531e-01 -1.26179159e-02 ... -2.90282667e-01 -1.34368345e-01 7.50465989e-02] [ 4.55309451e-03 4.90418077e-01 1.70561886e+00 ... 3.56422186e-01 -5.11448085e-01 -8.61200869e-01] [-1.55535233e+00 2.32891703e+00 -2.00969768e+00 ... -2.17230126e-01 -1.14235365e+00 -1.21649170e+00] ... [-2.65028834e-01 3.30110162e-01 1.58876920e+00 ... -1.04156041e+00 -2.67143607e-01 -1.21301544e+00] [-1.61701083e-01 5.29435396e-01 -9.32508767e-01 ... 1.86484098e+00 1.04019813e-01 2.21004510e+00] [-1.82300973e+00 -5.58408856e-01 -1.54213655e+00 ... -3.19098905e-02 8.31991255e-01 -2.21916938e+00]] [[ 1.24098137e-01 -1.39318332e-02 -1.08585656e+00 ... -7.51959443e-01 4.95775282e-01 3.85191381e-01] [-2.28083596e-01 5.69860101e-01 4.91944700e-01 ... -1.91971266e+00 1.28030598e+00 1.14928746e+00] [-1.36279523e-01 2.55682141e-01 -6.88557625e-01 ... -8.16097379e-01 3.00175279e-01 -8.25176835e-02] ... [ 1.91630316e+00 -1.52726829e+00 -2.05374826e-02 ... -6.24524504e-02 -5.32064199e-01 -9.02843624e-02] [-1.67360187e-01 -6.79968476e-01 -1.42205387e-01 ... 1.76651195e-01 5.64894319e-01 7.03363776e-01] [ 7.94057190e-01 -8.25912237e-01 -9.32808340e-01 ... 1.32814825e-01 1.35841227e+00 -7.36679971e-01]] [[-4.55961317e-01 7.84167647e-01 -8.32846940e-01 ... 3.69740248e-01 2.38824701e+00 -1.29110301e+00] [-1.55942589e-01 8.57637078e-02 -7.26139545e-03 ... 7.26355076e-01 1.27136266e+00 8.92881602e-02] [ 1.17477548e+00 -1.87961996e-01 -3.66004080e-01 ... -5.96545219e-01 -6.16321921e-01 -9.70817208e-01] ... [ 9.01796043e-01 -1.02838910e+00 -1.88947797e-01 ... 2.41519094e-01 -8.67481411e-01 -2.17857933e+00] [ 9.40679312e-01 -7.34018922e-01 -2.21158719e+00 ... -1.10867858e+00 -2.80775964e-01 -6.14174426e-01] [-2.70755887e-01 -4.23063666e-01 -2.08512768e-01 ... -1.11735487e+00 6.21155381e-01 -1.19721040e-01]] [[ 9.11489248e-01 -5.52943468e-01 3.09381318e+00 ... -1.16899705e+00 -1.02691770e+00 -1.46739334e-01] [ 8.48428369e-01 -3.18395764e-01 -8.66139412e-01 ... -1.56199098e+00 1.22989260e-01 -1.65684402e+00] [-7.67369688e-01 -4.43228006e-01 -1.12115633e+00 ... -1.06692836e-01 1.26319930e-01 1.59637082e+00] ... [ 8.47369373e-01 -1.04065466e+00 -1.34539616e+00 ... 4.28104371e-01 -7.24896431e-01 -2.99109817e-01] [ 1.07950699e+00 -1.00574799e-01 7.56987333e-01 ... -1.01053035e+00 1.66059983e+00 6.39105976e-01] [ 4.62231040e-01 5.74496627e-01 -2.41088383e-02 ... -2.52161551e+00 -7.51970232e-01 5.78874469e-01]]] ... [[[-2.24070430e+00 9.22896802e-01 4.00288124e-03 ... 1.37561023e-01 7.19615757e-01 -1.54180670e+00] [ 5.32969177e-01 5.65392315e-01 -5.58840215e-01 ... 1.34929621e+00 1.53684616e-01 9.05793786e-01] [ 4.56608146e-01 -5.15756667e-01 1.70636475e+00 ... 2.03005552e-01 8.67330015e-01 9.02058840e-01] ... [ 7.77523994e-01 6.36622310e-02 -1.36326575e+00 ... -2.90571719e-01 5.29557884e-01 -1.00313580e+00] [-1.55365869e-01 -2.27942646e-01 -2.64987993e+00 ... -2.00334072e+00 -1.49024796e+00 -6.20092392e-01] [ 1.82689202e+00 -1.06480801e+00 1.89130142e-01 ... -9.38439488e-01 -6.70767188e-01 8.82037461e-01]] [[ 8.83616626e-01 -5.27729213e-01 -3.32953811e-01 ... -1.36430371e+00 -4.72180903e-01 1.73139679e+00] [ 8.63139987e-01 4.35938220e-03 -9.95417774e-01 ... -6.69438764e-02 2.27250889e-01 2.55715179e+00] [ 1.03622556e+00 -2.41680264e-01 -4.05767739e-01 ... 2.54971653e-01 -5.88152230e-01 5.09507775e-01] ... [ 3.30805570e-01 2.87469000e-01 1.24823737e+00 ... -1.36488831e+00 9.50053036e-02 1.35989404e+00] [ 2.85714817e+00 9.37385976e-01 -1.28274783e-01 ... -4.01754588e-01 -5.71177542e-01 2.73694545e-01] [ 5.64003944e-01 1.70492530e+00 -7.61966109e-01 ... -5.00897467e-01 4.58595343e-02 -1.63944662e+00]] [[-1.25813854e+00 9.42327857e-01 -6.08042851e-02 ... 4.27085578e-01 5.68748891e-01 -1.24958821e-01] [ 2.67781472e+00 -7.14335561e-01 -3.84924024e-01 ... -6.42838299e-01 -2.52843833e+00 -1.70531428e+00] [-4.30451989e-01 -2.76478171e-01 6.11928627e-02 ... -1.31537163e+00 1.69001698e+00 1.56169617e+00] ... [-1.05880702e+00 6.17767751e-01 1.41527069e+00 ... 1.38252795e+00 6.10002339e-01 1.54619849e+00] [-6.44519806e-01 -4.29573208e-01 7.74526060e-01 ... -8.88690501e-02 3.51864785e-01 1.45150614e+00] [ 4.08895582e-01 9.17239785e-01 3.85699064e-01 ... 7.83419460e-02 1.28430939e+00 9.14861619e-01]] [[-1.20160294e+00 -1.87852129e-01 -1.37696519e-01 ... 1.24546731e+00 -1.09974957e+00 -5.25948763e-01] [-5.42844713e-01 -7.43334651e-01 3.59119117e-01 ... 9.37158585e-01 7.10017681e-01 -2.03959966e+00] [-2.33009577e-01 -6.66364968e-01 -3.92138273e-01 ... -1.14070129e+00 1.42970845e-01 6.67402804e-01] ... [-1.20053244e+00 -5.30622780e-01 1.98493734e-01 ... -7.57516444e-01 -2.30513498e-01 -5.88622034e-01] [ 6.73948348e-01 1.82330728e+00 8.80364120e-01 ... 9.55628097e-01 -3.95291984e-01 1.14143121e+00] [ 3.68611038e-01 -1.15977919e+00 1.52640358e-01 ... 6.05893135e-01 -2.00257003e-01 -5.39178729e-01]] [[ 9.43836093e-01 1.48710346e+00 -9.99433458e-01 ... -7.89558411e-01 9.41248357e-01 1.03725064e+00] [-8.83965552e-01 6.26632571e-01 5.86893976e-01 ... -5.86233556e-01 1.00156319e+00 -3.57306510e-01] [ 3.34273070e-01 -1.47970736e+00 2.89066732e-02 ... 9.59913153e-03 6.96489334e-01 1.21642935e+00] ... [ 1.09595358e+00 -7.13098526e-01 1.26782310e+00 ... 6.73181772e-01 1.22208703e+00 -1.26816690e+00] [ 6.10201657e-01 -1.13345957e+00 -6.55505955e-01 ... -5.38043380e-02 -2.66366696e+00 4.95122522e-01] [ 1.59781075e+00 -6.96349815e-02 8.42617691e-01 ... -2.59377092e-01 8.31429511e-02 4.77102846e-01]] [[-7.37857521e-01 1.94741245e-02 1.64552796e+00 ... 1.18406296e+00 9.41225737e-02 -3.71392101e-01] [-4.85511810e-01 -6.62509501e-01 1.10065198e+00 ... -2.13377643e+00 -7.61618137e-01 -9.23816442e-01] [-7.55278528e-01 -9.65279162e-01 1.22983909e+00 ... 5.50442576e-01 -2.31192484e-01 1.20620370e-01] ... [-2.23385036e-01 3.01518321e+00 -3.50931659e-02 ... 7.99048722e-01 1.19152829e-01 -1.90505278e+00] [-5.79187632e-01 5.45314670e-01 5.99944949e-01 ... -1.22599475e-01 -2.86227643e-01 -7.06752896e-01] [ 1.26271769e-01 2.07175899e+00 -9.78836417e-01 ... -4.13310319e-01 9.02023911e-02 -2.71501005e-01]]] [[[-2.41458356e-01 -1.75621462e+00 2.07665876e-01 ... -5.60939431e-01 -1.11988854e+00 9.10430193e-01] [ 5.08465230e-01 5.62029898e-01 1.26082802e+00 ... -3.56602430e-01 2.99706489e-01 -1.68000832e-01] [-1.55674636e+00 -2.74289441e+00 -5.47688365e-01 ... -3.95138562e-01 -3.62923712e-01 -6.02652967e-01] ... [ 3.63516770e-02 -1.77111328e-01 9.30857301e-01 ... 1.34795034e+00 3.39363426e-01 1.41253674e+00] [ 6.48989901e-03 1.33773565e+00 -1.72946465e+00 ... -2.62402780e-02 2.22240353e+00 -1.44704628e+00] [ 5.56392550e-01 -2.11247146e-01 -2.88616508e-01 ... 1.04770744e+00 -2.56200969e-01 1.09268308e+00]] [[-1.20807219e+00 1.72998822e+00 -1.13393962e+00 ... 1.17202416e-01 3.02508742e-01 5.10120690e-01] [ 3.79541069e-01 1.18916798e+00 -2.01724172e+00 ... -6.95221007e-01 -9.28344369e-01 -5.69488943e-01] [-1.46144402e+00 -4.42274481e-01 1.71287537e+00 ... -8.34857404e-01 -7.76059926e-01 1.09552431e+00] ... [-1.22993588e+00 -1.28881887e-01 -7.89662004e-01 ... 6.72749162e-01 2.93427408e-01 3.09438199e-01] [ 2.03560740e-01 6.23604357e-01 1.13348842e-01 ... -9.65647340e-01 1.15636170e+00 -4.32649612e-01] [ 1.40368783e+00 9.80129182e-01 -5.01533188e-02 ... 1.64930308e+00 1.89881235e-01 -1.06622827e+00]] [[-1.71560168e-01 -4.68862325e-01 -1.15023613e+00 ... -6.80003315e-02 -7.69693479e-02 -5.99048674e-01] [ 3.72892708e-01 8.99834111e-02 6.95608631e-02 ... -5.14292836e-01 1.29105461e+00 1.88482583e+00] [-1.99044645e-01 1.54664230e+00 8.48839641e-01 ... 8.91860485e-01 1.34310627e+00 9.79807436e-01] ... [ 2.10226446e-01 7.43304372e-01 1.56587586e-01 ... 7.01523125e-01 8.85446131e-01 1.08094811e+00] [ 1.97667792e-01 1.60761118e+00 4.08040076e-01 ... -4.04046088e-01 -8.19450200e-01 2.51607925e-01] [ 1.04202521e+00 -4.13877338e-01 6.48568094e-01 ... -2.92775345e+00 1.39715707e+00 1.36958301e-01]] [[-1.16923165e+00 1.42861664e+00 -1.53381896e+00 ... 3.79282415e-01 9.23397243e-01 -2.63248563e+00] [ 5.78190088e-01 -9.05812025e-01 2.36813992e-01 ... -2.77771577e-02 -9.74730015e-01 3.89503598e-01] [-3.11731696e-01 -2.27902746e+00 1.93098843e+00 ... -1.51389062e+00 1.74028844e-01 1.63974619e+00] ... [ 1.05695879e+00 1.31664264e+00 -9.29458160e-03 ... 9.81596559e-02 -1.96537375e-01 -1.42616081e+00] [-3.12511027e-01 1.92732304e-01 8.30515027e-01 ... -5.92765450e-01 -1.36926651e-01 5.59680939e-01] [-5.66681147e-01 1.14876831e+00 -1.24093688e+00 ... -2.87279993e-01 -3.55683446e-01 -3.72683406e-02]] [[-7.02042699e-01 -1.45111299e+00 -4.63366151e-01 ... 1.71538234e+00 1.47632349e+00 9.76030350e-01] [ 3.52264196e-01 -1.04668033e+00 2.70136744e-01 ... -3.42351973e-01 7.23002672e-01 -3.76620412e-01] [-1.22152126e+00 1.60122168e+00 1.26223254e+00 ... -3.18384618e-01 -1.13027322e+00 6.04172111e-01] ... [ 1.15635201e-01 4.50942725e-01 -2.10365701e+00 ... -1.28467631e+00 5.14174104e-01 1.49259675e+00] [ 5.76957107e-01 1.11009359e+00 8.05089772e-01 ... -1.78945124e+00 -1.35980392e+00 1.44451737e+00] [-2.41189629e-01 2.55226111e+00 1.60605633e+00 ... -2.50277191e-01 9.92128789e-01 2.39446715e-01]] [[ 7.12345064e-01 2.19105586e-01 -4.58656996e-01 ... -1.71662904e-02 -1.11927199e+00 1.68408787e+00] [-1.08255577e+00 -1.24333128e-01 -2.00609326e+00 ... -3.94268721e-01 2.91517973e-01 -1.09524369e+00] [ 2.52159208e-01 -1.95779538e+00 -9.25360441e-01 ... -1.81879127e+00 -1.14409223e-01 -9.52416420e-01] ... [-1.25003016e+00 -8.52418244e-02 -1.48664594e-01 ... 1.18842208e+00 1.27489281e+00 -1.41912949e+00] [-2.55371213e-01 1.20445478e+00 -6.01242967e-02 ... 8.89521420e-01 -1.55026078e-01 -8.31379667e-02] [ 5.86152136e-01 -7.34595433e-02 3.21834415e-01 ... 2.67069966e-01 -8.21388423e-01 1.19719005e+00]]] [[[ 3.93334091e-01 -8.21282983e-01 1.18442446e-01 ... -2.00751856e-01 3.39769185e-01 -4.25353080e-01] [-1.42495200e-01 2.71699056e-02 -1.48290768e-01 ... 5.90767443e-01 2.20269656e+00 -1.50315571e+00] [-6.45041466e-02 -1.21768415e+00 1.16819584e+00 ... -8.91949594e-01 -6.21396601e-01 3.47095966e-01] ... [-2.78598934e-01 -5.75402379e-01 7.87344277e-01 ... 6.52639747e-01 -9.40583050e-01 -8.00423265e-01] [-6.47420168e-01 1.13563728e+00 1.20890774e-01 ... 3.05296034e-01 2.79881179e-01 -4.31861877e-01] [-7.68420160e-01 -2.52154827e-01 1.72412550e+00 ... -3.94970775e-01 2.62550282e+00 -1.82521260e+00]] [[ 2.84533471e-01 1.48434341e+00 3.29925984e-01 ... -8.35540175e-01 -3.40699494e-01 -6.36055827e-01] [-4.03972626e-01 5.60871005e-01 -3.10513794e-01 ... 2.32576561e+00 -5.16346693e-01 3.12471986e-01] [ 5.60479522e-01 -1.04976618e+00 -8.79661500e-01 ... 4.10267889e-01 6.06906414e-02 -3.91801804e-01] ... [ 1.18283641e+00 1.43302828e-01 1.54237378e+00 ... 8.02728832e-01 6.38918757e-01 -8.36009204e-01] [ 3.51565587e-03 4.45923954e-01 2.44834572e-01 ... -1.26887634e-01 -1.21626890e+00 1.48700857e+00] [-1.21665724e-01 3.32767665e-01 -1.05630815e+00 ... -6.85575485e-01 1.53746843e-01 -9.28264201e-01]] [[-1.60115778e+00 1.12662089e+00 -1.62750900e-01 ... -7.79681325e-01 7.56751537e-01 1.74646652e+00] [-9.28789973e-01 8.17303002e-01 -4.24619287e-01 ... 1.52547145e+00 1.14833701e+00 -6.36904418e-01] [-6.66038930e-01 5.61099172e-01 2.40939111e-01 ... -1.56696111e-01 1.35016334e+00 5.42500377e-01] ... [-1.09569478e+00 5.59528184e-04 -4.47276145e-01 ... -2.02422962e-01 -1.30875993e+00 3.82926315e-01] [-8.67722094e-01 -3.86582553e-01 8.18177342e-01 ... 5.29415846e-01 7.43848622e-01 3.81118417e-01] [-6.69124901e-01 -7.89420605e-01 -9.67166960e-01 ... 1.64872134e+00 -2.39284709e-01 -3.70899469e-01]] [[ 2.51109540e-01 -6.80175245e-01 -1.19311929e+00 ... -1.76286623e-02 9.52333272e-01 -1.37762606e+00] [-2.99556822e-01 1.12747526e+00 -5.67289516e-02 ... 6.94008529e-01 -1.08712804e+00 7.87546158e-01] [-1.29853174e-01 7.57851303e-01 7.35054672e-01 ... 2.35979223e+00 8.77640963e-01 -6.02424324e-01] ... [-1.11303222e+00 4.37100440e-01 6.22941911e-01 ... 1.71444571e+00 5.93905628e-01 -1.28862309e+00] [ 6.49208188e-01 7.54522562e-01 1.51692414e+00 ... 4.01654541e-01 1.07504690e+00 -9.78881493e-02] [-1.00003994e+00 -9.75504100e-01 -7.30698168e-01 ... 7.61486351e-01 2.23910600e-01 -1.37225759e+00]] [[-1.80643797e+00 3.22939277e-01 5.28453648e-01 ... -1.04355609e+00 -9.94718909e-01 1.03082430e+00] [ 1.43902743e+00 1.61450997e-01 1.56833553e+00 ... 8.42260897e-01 -2.56209642e-01 4.70299155e-01] [-1.40789354e+00 1.85891092e-01 8.44637454e-01 ... 1.31328619e+00 2.76370168e-01 1.23998880e+00] ... [ 1.06273019e+00 -1.17194310e-01 6.13032579e-01 ... -4.77115989e-01 -4.28852558e-01 -1.83626994e-01] [ 6.54819310e-01 5.86876273e-01 -5.30235827e-01 ... 8.76252711e-01 3.95535454e-02 1.99498582e+00] [-1.84426951e+00 -2.54350352e+00 7.55467951e-01 ... 9.64926302e-01 2.02385616e+00 -1.45032322e+00]] [[-7.05829030e-03 6.92198813e-01 -1.67682338e+00 ... 1.04101586e+00 -2.48718619e-01 8.93607140e-01] [-1.80837270e-02 4.08797562e-01 -1.08462203e+00 ... -1.23574293e+00 8.49512994e-01 -4.69876379e-01] [ 2.12417260e-01 8.15689981e-01 4.69595402e-01 ... -4.45052981e-01 6.45951271e-01 -2.32411146e+00] ... [-8.82583201e-01 1.06245041e+00 -1.17562377e+00 ... 3.69942486e-01 1.09748042e+00 3.53278846e-01] [ 2.02645397e+00 -3.13003995e-02 -1.42506456e+00 ... -5.62128127e-01 7.71381199e-01 1.21217883e+00] [-1.31024942e-01 4.25236911e-01 8.57453495e-02 ... 1.02941239e+00 8.03928852e-01 4.56930161e-01]]]]; ov_res: [[[[-6.07496142e-01 7.44553924e-01 -4.93952513e-01 ... -6.42431259e-01 -5.04176557e-01 1.12712181e+00] [ 4.96969581e-01 -5.16786039e-01 -1.48856175e+00 ... 8.75306785e-01 -5.09265661e-01 1.52477428e-01] [ 1.25300717e+00 -1.36138296e+00 -4.49931443e-01 ... 4.50821221e-01 -4.32311565e-01 -1.21876359e+00] ... [-1.66640505e-01 1.69363427e+00 1.59087944e+00 ... -3.16928357e-01 -9.11916792e-01 -3.96689475e-01] [-7.05328211e-02 -5.70169151e-01 1.79215443e+00 ... 1.13165462e+00 -7.99456716e-01 -2.23747656e-01] [-1.93359005e+00 -4.27709401e-01 -3.37177008e-01 ... 1.01813421e-01 3.95442158e-01 -2.91181773e-01]] [[-6.67525351e-01 3.97923201e-01 -9.32073891e-01 ... 9.56510067e-01 -4.85441424e-02 -1.85955554e-01] [ 1.48907506e+00 1.33359420e+00 -9.53108132e-01 ... 5.48573315e-01 1.51613939e+00 4.39261258e-01] [-2.13025379e+00 1.13798881e+00 -1.61380649e+00 ... -1.92173398e+00 1.08127081e+00 -7.67997146e-01] ... [ 1.65927887e+00 5.88012338e-01 -1.50872660e+00 ... 7.50951827e-01 -5.43085217e-01 -1.55928218e+00] [-7.29897499e-01 -8.77472460e-01 -1.14102289e-01 ... 3.03525906e-02 5.85888326e-01 -7.19616175e-01] [-4.38638449e-01 -4.34031308e-01 3.16757470e-01 ... -1.97782075e+00 -8.84838641e-01 2.83539265e-01]] [[ 7.04528093e-01 -6.58287585e-01 3.80925179e-01 ... -2.80225158e-01 7.60069370e-01 4.38593864e-01] [-6.99701607e-01 -6.07214272e-01 -5.39742410e-01 ... -4.21612233e-01 4.44302857e-01 1.40644395e+00] [-3.97843599e-01 -1.25934815e+00 -8.57948840e-01 ... -2.67564952e-01 8.47564280e-01 -1.18430674e+00] ... [-1.51254267e-01 2.04912663e+00 -6.33420885e-01 ... -9.17511404e-01 4.28537101e-01 3.40524286e-01] [-1.37675297e+00 -8.56540054e-02 -1.80799097e-01 ... 4.92848128e-01 -1.36851117e-01 -3.59414071e-01] [ 1.41267705e+00 -8.86567235e-01 -4.20597792e-01 ... -1.52658963e+00 -2.11449742e-01 7.39010692e-01]] [[-1.25864720e+00 -6.03080630e-01 -7.64142647e-02 ... 1.28699362e+00 1.48128152e+00 6.56693220e-01] [-1.79092512e-01 -1.74513996e+00 -1.35062277e+00 ... 4.75726038e-01 1.25659019e-01 -1.52002871e-01] [ 9.80749667e-01 2.78549582e-01 -8.92120898e-01 ... 7.10651219e-01 1.38062790e-01 -5.46154380e-01] ... [-3.52662474e-01 3.57528299e-01 8.46279562e-01 ... -1.61188567e+00 5.51061392e-01 -4.58678782e-01] [ 3.39794248e-01 8.82311761e-02 1.63886875e-01 ... 7.58740976e-02 -6.39652848e-01 -3.57750386e-01] [ 8.70192230e-01 8.95315826e-01 4.27348644e-01 ... -1.64403713e+00 9.07088459e-01 1.02146423e+00]] [[ 1.80030060e+00 -1.33100009e+00 1.52064526e+00 ... -1.86315107e+00 -5.79516232e-01 7.50803709e-01] [-2.38250780e+00 2.46429279e-01 1.47909296e+00 ... -1.06015730e+00 7.68919408e-01 9.52243388e-01] [ 2.82147557e-01 -7.40537465e-01 1.03380942e+00 ... 1.05221856e+00 -1.00664544e+00 1.66080907e-01] ... [ 2.46543741e+00 -1.27921581e+00 8.80311251e-01 ... -4.67154272e-02 -5.56123018e-01 4.20568407e-01] [-1.32093942e+00 -1.30919576e+00 -5.76699436e-01 ... -1.01515383e-01 -7.32981265e-01 5.90097368e-01] [-4.88239288e-01 8.18930387e-01 3.02062303e-01 ... 1.52191365e+00 -2.01714486e-01 -3.65526974e-01]] [[ 1.36709774e+00 8.97041708e-02 6.41068146e-02 ... -6.83972657e-01 -6.77900612e-01 -1.95023584e+00] [-4.65286732e-01 -2.15895981e-01 -9.62514877e-01 ... 8.84654373e-02 -1.25786746e+00 2.26445743e-04] [ 5.09275198e-01 -7.11887300e-01 -4.28461522e-01 ... -7.60579765e-01 -2.69770026e-01 -6.40370011e-01] ... [-2.50909001e-01 1.41357410e+00 -1.10994327e+00 ... 5.81833243e-01 -2.96463132e-01 -5.97184062e-01] [ 8.06029975e-01 1.49937654e+00 -3.93457055e-01 ... 2.01544344e-01 1.27440453e+00 -2.29617834e-01] [ 8.60630333e-01 -1.03403199e+00 -9.12734792e-02 ... 8.42689276e-01 1.03860235e+00 -1.04027176e+00]]] [[[-6.30220234e-01 -4.31123286e-01 -5.79957247e-01 ... 7.75064379e-02 -6.17749751e-01 -1.40544808e+00] [-2.29126859e+00 4.51002479e-01 7.18122244e-01 ... 4.94451761e-01 -8.29293370e-01 -5.35167217e-01] [ 2.83326268e-01 1.19338751e+00 7.98671186e-01 ... -5.64932108e-01 2.86186159e-01 -3.72922242e-01] ... [-8.26088250e-01 1.33124352e+00 -8.42255175e-01 ... 7.35496640e-01 -9.93567526e-01 -1.06539464e+00] [ 1.38516653e+00 1.41332352e+00 -4.37929220e-02 ... 4.48530942e-01 -1.66504121e+00 1.41472265e-01] [-3.34861457e-01 2.67176533e+00 -2.41970226e-01 ... 1.47695804e+00 1.65381029e-01 -1.57399309e+00]] [[-2.61781007e-01 1.38025939e-01 -9.50605273e-01 ... -2.13444448e+00 -7.89475858e-01 5.29625714e-01] [-1.28171813e+00 7.10400879e-01 -6.04409814e-01 ... -7.05372810e-01 4.44760054e-01 -1.30374515e+00] [-5.21916866e-01 -3.06349099e-01 -1.42575711e-01 ... 1.89740336e+00 1.30019999e+00 -1.61301568e-01] ... [ 9.47250426e-01 -1.11222076e+00 -1.27449915e-01 ... 1.77859422e-02 9.30707455e-01 -2.75060326e-01] [ 3.95854414e-01 -1.12570763e+00 -1.64633229e-01 ... 1.04462385e+00 -3.13736230e-01 6.32564306e-01] [-5.50841153e-01 -1.01472127e+00 2.35405251e-01 ... -6.71248376e-01 -1.67524874e-01 -6.18993759e-01]] [[ 4.53714550e-01 -6.35865629e-01 -6.39540613e-01 ... 1.04046059e+00 9.73391652e-01 9.54772115e-01] [-3.17912132e-01 -2.06695175e+00 1.70880103e+00 ... 4.76150751e-01 -1.22349989e+00 1.02084601e+00] [ 2.61601710e+00 -4.59265597e-02 1.27122688e+00 ... -5.94034791e-01 2.02612057e-02 2.52702326e-01] ... [-2.27977753e+00 2.43377948e+00 -3.45352858e-01 ... 1.51146695e-01 -1.04782879e-01 6.83668137e-01] [-1.01106262e+00 -4.65138704e-01 -2.39952946e+00 ... 1.98421216e+00 1.28548157e+00 -1.45306945e-01] [-1.52316034e-01 1.16311157e+00 1.25718728e-01 ... 8.31271470e-01 1.06771994e+00 -1.05214596e+00]] [[-1.06113382e-01 -1.09050500e+00 -1.03453898e+00 ... 6.96327746e-01 -1.38289011e+00 -4.63938296e-01] [ 1.15140331e+00 2.65729809e+00 4.82770175e-01 ... 1.16606966e-01 1.24079370e+00 -3.28305340e-03] [ 9.68358338e-01 -7.30615556e-01 5.04206121e-01 ... 1.44804180e+00 -4.79647696e-01 -1.30750084e+00] ... [-9.09757316e-02 -4.21675071e-02 -1.39597729e-01 ... -2.18748236e+00 -2.25781775e+00 2.61391133e-01] [-4.58173782e-01 -1.47883713e-01 1.52668965e+00 ... 1.71373725e+00 1.08396065e+00 7.39337325e-01] [ 6.62022531e-02 -1.11862755e+00 -7.16818273e-01 ... -6.13375545e-01 -9.14278328e-01 7.69739032e-01]] [[-4.11385357e-01 -7.93473184e-01 3.77811700e-01 ... -1.31347442e+00 -1.45484030e-01 1.42795718e+00] [ 7.94530272e-01 1.07508099e+00 -3.36803079e-01 ... -8.55274916e-01 -3.36596817e-02 -1.47772861e+00] [-7.44530499e-01 1.98053789e+00 1.85221565e+00 ... 5.39360404e-01 -1.50246680e+00 6.89107001e-01] ... [ 2.81347722e-01 6.94457069e-02 2.08837256e-01 ... -5.94113946e-01 4.04659986e-01 -9.03994739e-01] [-2.45631710e-01 -5.64113557e-01 -1.25526452e+00 ... -5.41102409e-01 1.04378831e+00 1.00731991e-01] [ 7.90369391e-01 2.50914335e-01 4.21565384e-01 ... 4.94697332e-01 1.09071612e-01 1.19214308e+00]] [[ 1.14424787e-01 3.06725930e-02 -1.63972521e+00 ... 8.22064698e-01 -1.70252562e+00 -1.17100930e+00] [ 5.99145830e-01 1.30503431e-01 1.87291682e-01 ... -4.49515015e-01 1.08074188e+00 2.19728422e+00] [-1.07745004e+00 -6.24345802e-02 1.52670062e+00 ... 3.04535627e-01 3.29225034e-01 -4.43636239e-01] ... [-2.41470277e-01 2.34035969e+00 1.54618099e-01 ... -1.06786954e+00 -1.20711970e+00 -9.42700148e-01] [-2.03315288e-01 -5.00266790e-01 5.12945056e-01 ... 5.59708059e-01 2.44417712e-01 -9.52735245e-01] [-1.26883888e+00 -9.04842198e-01 -7.90204227e-01 ... -7.89678171e-02 9.58438776e-03 -1.36915803e+00]]] [[[ 2.48333359e+00 4.41282779e-01 -1.30289525e-01 ... 8.87378216e-01 7.80664623e-01 -1.75341487e+00] [-7.03440130e-01 -1.22710586e+00 8.83465588e-01 ... -1.55335319e+00 -9.24774539e-03 1.93633929e-01] [-1.64028332e-01 -5.62791049e-01 1.79274964e+00 ... -2.65028208e-01 7.60572255e-01 -1.44732034e+00] ... [ 7.71119893e-01 4.47927415e-01 -9.90005016e-01 ... -6.94459736e-01 -1.87526250e+00 -4.56081420e-01] [-1.77042174e+00 -1.19987696e-01 -3.32478248e-02 ... -1.56872439e+00 5.98602928e-03 -7.97004640e-01] [ 8.26534867e-01 6.41417131e-02 -1.12890756e+00 ... 2.60319710e-01 -2.68923610e-01 8.93934071e-01]] [[-7.01823950e-01 1.01275623e+00 -1.28705353e-01 ... 1.25021327e+00 6.51335180e-01 1.20379329e+00] [-1.22195899e+00 -9.36100110e-02 2.83990681e-01 ... 5.84923267e-01 -1.45569634e+00 2.97873348e-01] [-9.11798775e-01 -9.58633542e-01 1.02002062e-01 ... 1.47767401e+00 2.01430392e+00 1.15886867e+00] ... [-9.40436542e-01 1.40193045e+00 3.79307032e-01 ... 6.37975752e-01 8.30306590e-01 1.03922538e-01] [ 3.19166273e-01 -9.78795230e-01 4.62920934e-01 ... 1.50929421e-01 5.63349247e-01 -1.23950206e-01] [ 1.44765484e+00 -1.64187387e-01 -8.79424334e-01 ... -1.31503892e+00 7.76868701e-01 -4.79603171e-01]] [[-7.01448023e-01 3.91345501e-01 -1.26179308e-02 ... -2.90282696e-01 -1.34368345e-01 7.50465840e-02] [ 4.55308007e-03 4.90418047e-01 1.70561886e+00 ... 3.56422156e-01 -5.11448085e-01 -8.61200869e-01] [-1.55535233e+00 2.32891703e+00 -2.00969768e+00 ... -2.17230156e-01 -1.14235365e+00 -1.21649170e+00] ... [-2.65028864e-01 3.30110133e-01 1.58876920e+00 ... -1.04156041e+00 -2.67143637e-01 -1.21301544e+00] [-1.61701113e-01 5.29435396e-01 -9.32508767e-01 ... 1.86484098e+00 1.04019798e-01 2.21004510e+00] [-1.82300973e+00 -5.58408856e-01 -1.54213655e+00 ... -3.19099054e-02 8.31991255e-01 -2.21916938e+00]] [[ 1.24098122e-01 -1.39318481e-02 -1.08585656e+00 ... -7.51959443e-01 4.95775282e-01 3.85191351e-01] [-2.28083625e-01 5.69860101e-01 4.91944671e-01 ... -1.91971266e+00 1.28030598e+00 1.14928746e+00] [-1.36279553e-01 2.55682111e-01 -6.88557625e-01 ... -8.16097379e-01 3.00175279e-01 -8.25176984e-02] ... [ 1.91630316e+00 -1.52726829e+00 -2.05374975e-02 ... -6.24524653e-02 -5.32064199e-01 -9.02843773e-02] [-1.67360216e-01 -6.79968476e-01 -1.42205417e-01 ... 1.76651195e-01 5.64894319e-01 7.03363776e-01] [ 7.94057190e-01 -8.25912237e-01 -9.32808340e-01 ... 1.32814825e-01 1.35841227e+00 -7.36679971e-01]] [[-4.55961406e-01 7.84167767e-01 -8.32847059e-01 ... 3.69740278e-01 2.38824725e+00 -1.29110324e+00] [-1.55942619e-01 8.57637152e-02 -7.26140384e-03 ... 7.26355195e-01 1.27136278e+00 8.92881602e-02] [ 1.17477560e+00 -1.87962025e-01 -3.66004109e-01 ... -5.96545279e-01 -6.16321981e-01 -9.70817327e-01] ... [ 9.01796162e-01 -1.02838922e+00 -1.88947842e-01 ... 2.41519108e-01 -8.67481530e-01 -2.17857957e+00] [ 9.40679431e-01 -7.34018981e-01 -2.21158743e+00 ... -1.10867870e+00 -2.80776024e-01 -6.14174485e-01] [-2.70755917e-01 -4.23063725e-01 -2.08512813e-01 ... -1.11735499e+00 6.21155441e-01 -1.19721062e-01]] [[ 9.11489308e-01 -5.52943528e-01 3.09381342e+00 ... -1.16899717e+00 -1.02691782e+00 -1.46739349e-01] [ 8.48428488e-01 -3.18395793e-01 -8.66139472e-01 ... -1.56199110e+00 1.22989267e-01 -1.65684426e+00] [-7.67369807e-01 -4.43228066e-01 -1.12115645e+00 ... -1.06692858e-01 1.26319930e-01 1.59637094e+00] ... [ 8.47369492e-01 -1.04065490e+00 -1.34539628e+00 ... 4.28104401e-01 -7.24896491e-01 -2.99109876e-01] [ 1.07950711e+00 -1.00574821e-01 7.56987453e-01 ... -1.01053047e+00 1.66060007e+00 6.39106095e-01] [ 4.62231100e-01 5.74496686e-01 -2.41088495e-02 ... -2.52161598e+00 -7.51970351e-01 5.78874528e-01]]] ... [[[-2.24070430e+00 9.22896802e-01 4.00288124e-03 ... 1.37561023e-01 7.19615757e-01 -1.54180670e+00] [ 5.32969177e-01 5.65392315e-01 -5.58840215e-01 ... 1.34929621e+00 1.53684616e-01 9.05793786e-01] [ 4.56608146e-01 -5.15756667e-01 1.70636475e+00 ... 2.03005552e-01 8.67330015e-01 9.02058840e-01] ... [ 7.77523994e-01 6.36622310e-02 -1.36326575e+00 ... -2.90571719e-01 5.29557884e-01 -1.00313580e+00] [-1.55365869e-01 -2.27942646e-01 -2.64987993e+00 ... -2.00334072e+00 -1.49024796e+00 -6.20092392e-01] [ 1.82689202e+00 -1.06480801e+00 1.89130142e-01 ... -9.38439488e-01 -6.70767188e-01 8.82037461e-01]] [[ 8.83616626e-01 -5.27729213e-01 -3.32953811e-01 ... -1.36430371e+00 -4.72180903e-01 1.73139679e+00] [ 8.63139987e-01 4.35938220e-03 -9.95417774e-01 ... -6.69438764e-02 2.27250889e-01 2.55715179e+00] [ 1.03622556e+00 -2.41680264e-01 -4.05767739e-01 ... 2.54971653e-01 -5.88152230e-01 5.09507775e-01] ... [ 3.30805570e-01 2.87469000e-01 1.24823737e+00 ... -1.36488831e+00 9.50053036e-02 1.35989404e+00] [ 2.85714817e+00 9.37385976e-01 -1.28274783e-01 ... -4.01754588e-01 -5.71177542e-01 2.73694545e-01] [ 5.64003944e-01 1.70492530e+00 -7.61966109e-01 ... -5.00897467e-01 4.58595343e-02 -1.63944662e+00]] [[-1.25813854e+00 9.42327857e-01 -6.08042777e-02 ... 4.27085578e-01 5.68748891e-01 -1.24958806e-01] [ 2.67781472e+00 -7.14335561e-01 -3.84924024e-01 ... -6.42838299e-01 -2.52843833e+00 -1.70531428e+00] [-4.30451989e-01 -2.76478171e-01 6.11928701e-02 ... -1.31537163e+00 1.69001698e+00 1.56169617e+00] ... [-1.05880702e+00 6.17767751e-01 1.41527069e+00 ... 1.38252795e+00 6.10002339e-01 1.54619849e+00] [-6.44519806e-01 -4.29573208e-01 7.74526060e-01 ... -8.88690427e-02 3.51864785e-01 1.45150614e+00] [ 4.08895582e-01 9.17239785e-01 3.85699064e-01 ... 7.83419535e-02 1.28430939e+00 9.14861619e-01]] [[-1.20160294e+00 -1.87852114e-01 -1.37696519e-01 ... 1.24546731e+00 -1.09974957e+00 -5.25948763e-01] [-5.42844713e-01 -7.43334651e-01 3.59119117e-01 ... 9.37158585e-01 7.10017681e-01 -2.03959966e+00] [-2.33009562e-01 -6.66364968e-01 -3.92138273e-01 ... -1.14070129e+00 1.42970860e-01 6.67402804e-01] ... [-1.20053244e+00 -5.30622780e-01 1.98493749e-01 ... -7.57516444e-01 -2.30513483e-01 -5.88622034e-01] [ 6.73948348e-01 1.82330728e+00 8.80364120e-01 ... 9.55628097e-01 -3.95291984e-01 1.14143121e+00] [ 3.68611038e-01 -1.15977919e+00 1.52640373e-01 ... 6.05893135e-01 -2.00256988e-01 -5.39178729e-01]] [[ 9.43836093e-01 1.48710346e+00 -9.99433458e-01 ... -7.89558411e-01 9.41248357e-01 1.03725064e+00] [-8.83965552e-01 6.26632571e-01 5.86893976e-01 ... -5.86233556e-01 1.00156319e+00 -3.57306510e-01] [ 3.34273070e-01 -1.47970736e+00 2.89066732e-02 ... 9.59913153e-03 6.96489334e-01 1.21642935e+00] ... [ 1.09595358e+00 -7.13098526e-01 1.26782310e+00 ... 6.73181772e-01 1.22208703e+00 -1.26816690e+00] [ 6.10201657e-01 -1.13345957e+00 -6.55505955e-01 ... -5.38043380e-02 -2.66366696e+00 4.95122522e-01] [ 1.59781075e+00 -6.96349815e-02 8.42617691e-01 ... -2.59377092e-01 8.31429511e-02 4.77102846e-01]] [[-7.37857521e-01 1.94741245e-02 1.64552796e+00 ... 1.18406296e+00 9.41225737e-02 -3.71392101e-01] [-4.85511810e-01 -6.62509501e-01 1.10065198e+00 ... -2.13377643e+00 -7.61618137e-01 -9.23816442e-01] [-7.55278528e-01 -9.65279162e-01 1.22983909e+00 ... 5.50442576e-01 -2.31192484e-01 1.20620370e-01] ... [-2.23385036e-01 3.01518321e+00 -3.50931659e-02 ... 7.99048722e-01 1.19152829e-01 -1.90505278e+00] [-5.79187632e-01 5.45314670e-01 5.99944949e-01 ... -1.22599475e-01 -2.86227643e-01 -7.06752896e-01] [ 1.26271769e-01 2.07175899e+00 -9.78836417e-01 ... -4.13310319e-01 9.02023911e-02 -2.71501005e-01]]] [[[-2.41458371e-01 -1.75621474e+00 2.07665890e-01 ... -5.60939491e-01 -1.11988866e+00 9.10430253e-01] [ 5.08465290e-01 5.62029898e-01 1.26082802e+00 ... -3.56602460e-01 2.99706519e-01 -1.68000847e-01] [-1.55674648e+00 -2.74289465e+00 -5.47688365e-01 ... -3.95138592e-01 -3.62923741e-01 -6.02652967e-01] ... [ 3.63516770e-02 -1.77111343e-01 9.30857360e-01 ... 1.34795046e+00 3.39363456e-01 1.41253686e+00] [ 6.48989948e-03 1.33773577e+00 -1.72946477e+00 ... -2.62402799e-02 2.22240353e+00 -1.44704640e+00] [ 5.56392550e-01 -2.11247161e-01 -2.88616538e-01 ... 1.04770756e+00 -2.56200999e-01 1.09268308e+00]] [[-1.20807219e+00 1.72998834e+00 -1.13393974e+00 ... 1.17202424e-01 3.02508742e-01 5.10120690e-01] [ 3.79541099e-01 1.18916810e+00 -2.01724172e+00 ... -6.95221066e-01 -9.28344429e-01 -5.69489002e-01] [-1.46144414e+00 -4.42274511e-01 1.71287549e+00 ... -8.34857464e-01 -7.76059926e-01 1.09552443e+00] ... [-1.22993588e+00 -1.28881887e-01 -7.89662004e-01 ... 6.72749221e-01 2.93427438e-01 3.09438229e-01] [ 2.03560755e-01 6.23604417e-01 1.13348849e-01 ... -9.65647399e-01 1.15636170e+00 -4.32649612e-01] [ 1.40368795e+00 9.80129242e-01 -5.01533225e-02 ... 1.64930320e+00 1.89881250e-01 -1.06622827e+00]] [[-1.71560183e-01 -4.68862325e-01 -1.15023613e+00 ... -6.80003315e-02 -7.69693553e-02 -5.99048674e-01] [ 3.72892708e-01 8.99834037e-02 6.95608631e-02 ... -5.14292836e-01 1.29105461e+00 1.88482583e+00] [-1.99044660e-01 1.54664230e+00 8.48839641e-01 ... 8.91860485e-01 1.34310627e+00 9.79807436e-01] ... [ 2.10226446e-01 7.43304372e-01 1.56587586e-01 ... 7.01523125e-01 8.85446131e-01 1.08094811e+00] [ 1.97667778e-01 1.60761118e+00 4.08040076e-01 ... -4.04046088e-01 -8.19450200e-01 2.51607925e-01] [ 1.04202521e+00 -4.13877338e-01 6.48568094e-01 ... -2.92775345e+00 1.39715707e+00 1.36958301e-01]] [[-1.16923165e+00 1.42861664e+00 -1.53381896e+00 ... 3.79282415e-01 9.23397243e-01 -2.63248563e+00] [ 5.78190088e-01 -9.05812025e-01 2.36813977e-01 ... -2.77771614e-02 -9.74730015e-01 3.89503598e-01] [-3.11731696e-01 -2.27902746e+00 1.93098843e+00 ... -1.51389062e+00 1.74028844e-01 1.63974619e+00] ... [ 1.05695879e+00 1.31664264e+00 -9.29458532e-03 ... 9.81596485e-02 -1.96537375e-01 -1.42616081e+00] [-3.12511027e-01 1.92732304e-01 8.30515027e-01 ... -5.92765450e-01 -1.36926651e-01 5.59680939e-01] [-5.66681147e-01 1.14876831e+00 -1.24093688e+00 ... -2.87279993e-01 -3.55683446e-01 -3.72683443e-02]] [[-7.02042699e-01 -1.45111299e+00 -4.63366121e-01 ... 1.71538234e+00 1.47632349e+00 9.76030350e-01] [ 3.52264225e-01 -1.04668033e+00 2.70136744e-01 ... -3.42351943e-01 7.23002672e-01 -3.76620382e-01] [-1.22152126e+00 1.60122168e+00 1.26223254e+00 ... -3.18384618e-01 -1.13027322e+00 6.04172111e-01] ... [ 1.15635209e-01 4.50942725e-01 -2.10365701e+00 ... -1.28467631e+00 5.14174104e-01 1.49259675e+00] [ 5.76957107e-01 1.11009359e+00 8.05089772e-01 ... -1.78945124e+00 -1.35980392e+00 1.44451737e+00] [-2.41189614e-01 2.55226111e+00 1.60605633e+00 ... -2.50277162e-01 9.92128789e-01 2.39446715e-01]] [[ 7.12345064e-01 2.19105586e-01 -4.58656967e-01 ... -1.71662811e-02 -1.11927199e+00 1.68408787e+00] [-1.08255577e+00 -1.24333113e-01 -2.00609326e+00 ... -3.94268721e-01 2.91517973e-01 -1.09524369e+00] [ 2.52159208e-01 -1.95779538e+00 -9.25360441e-01 ... -1.81879127e+00 -1.14409216e-01 -9.52416420e-01] ... [-1.25003016e+00 -8.52418169e-02 -1.48664579e-01 ... 1.18842208e+00 1.27489281e+00 -1.41912949e+00] [-2.55371183e-01 1.20445478e+00 -6.01242892e-02 ... 8.89521420e-01 -1.55026078e-01 -8.31379592e-02] [ 5.86152136e-01 -7.34595358e-02 3.21834445e-01 ... 2.67069995e-01 -8.21388423e-01 1.19719005e+00]]] [[[ 3.93334091e-01 -8.21282983e-01 1.18442461e-01 ... -2.00751856e-01 3.39769185e-01 -4.25353050e-01] [-1.42495185e-01 2.71699224e-02 -1.48290753e-01 ... 5.90767443e-01 2.20269656e+00 -1.50315571e+00] [-6.45041242e-02 -1.21768415e+00 1.16819584e+00 ... -8.91949594e-01 -6.21396601e-01 3.47095966e-01] ... [-2.78598905e-01 -5.75402379e-01 7.87344277e-01 ... 6.52639747e-01 -9.40583050e-01 -8.00423265e-01] [-6.47420049e-01 1.13563728e+00 1.20890789e-01 ... 3.05296034e-01 2.79881209e-01 -4.31861848e-01] [-7.68420160e-01 -2.52154797e-01 1.72412550e+00 ... -3.94970745e-01 2.62550282e+00 -1.82521260e+00]] [[ 2.84533471e-01 1.48434341e+00 3.29926044e-01 ... -8.35540175e-01 -3.40699494e-01 -6.36055708e-01] [-4.03972626e-01 5.60871005e-01 -3.10513794e-01 ... 2.32576561e+00 -5.16346693e-01 3.12471986e-01] [ 5.60479522e-01 -1.04976618e+00 -8.79661500e-01 ... 4.10267919e-01 6.06906600e-02 -3.91801804e-01] ... [ 1.18283641e+00 1.43302843e-01 1.54237378e+00 ... 8.02728832e-01 6.38918757e-01 -8.36009204e-01] [ 3.51567264e-03 4.45923954e-01 2.44834602e-01 ... -1.26887619e-01 -1.21626890e+00 1.48700857e+00] [-1.21665709e-01 3.32767665e-01 -1.05630815e+00 ... -6.85575485e-01 1.53746858e-01 -9.28264201e-01]] [[-1.60115778e+00 1.12662089e+00 -1.62750900e-01 ... -7.79681325e-01 7.56751597e-01 1.74646652e+00] [-9.28789914e-01 8.17303002e-01 -4.24619287e-01 ... 1.52547145e+00 1.14833701e+00 -6.36904418e-01] [-6.66038871e-01 5.61099172e-01 2.40939111e-01 ... -1.56696111e-01 1.35016334e+00 5.42500436e-01] ... [-1.09569478e+00 5.59535343e-04 -4.47276145e-01 ... -2.02422962e-01 -1.30875993e+00 3.82926315e-01] [-8.67722034e-01 -3.86582553e-01 8.18177402e-01 ... 5.29415846e-01 7.43848622e-01 3.81118417e-01] [-6.69124901e-01 -7.89420545e-01 -9.67166960e-01 ... 1.64872134e+00 -2.39284694e-01 -3.70899469e-01]] [[ 2.51109540e-01 -6.80175245e-01 -1.19311929e+00 ... -1.76286548e-02 9.52333331e-01 -1.37762606e+00] [-2.99556822e-01 1.12747526e+00 -5.67289442e-02 ... 6.94008529e-01 -1.08712804e+00 7.87546158e-01] [-1.29853159e-01 7.57851362e-01 7.35054731e-01 ... 2.35979223e+00 8.77640963e-01 -6.02424264e-01] ... [-1.11303222e+00 4.37100440e-01 6.22941971e-01 ... 1.71444571e+00 5.93905628e-01 -1.28862309e+00] [ 6.49208188e-01 7.54522622e-01 1.51692414e+00 ... 4.01654541e-01 1.07504690e+00 -9.78881419e-02] [-1.00003994e+00 -9.75504100e-01 -7.30698168e-01 ... 7.61486351e-01 2.23910600e-01 -1.37225759e+00]] [[-1.80643797e+00 3.22939277e-01 5.28453648e-01 ... -1.04355609e+00 -9.94718909e-01 1.03082430e+00] [ 1.43902743e+00 1.61450997e-01 1.56833553e+00 ... 8.42260897e-01 -2.56209642e-01 4.70299155e-01] [-1.40789354e+00 1.85891092e-01 8.44637454e-01 ... 1.31328619e+00 2.76370168e-01 1.23998880e+00] ... [ 1.06273019e+00 -1.17194310e-01 6.13032579e-01 ... -4.77115989e-01 -4.28852558e-01 -1.83626994e-01] [ 6.54819310e-01 5.86876273e-01 -5.30235827e-01 ... 8.76252711e-01 3.95535454e-02 1.99498582e+00] [-1.84426951e+00 -2.54350352e+00 7.55467951e-01 ... 9.64926302e-01 2.02385616e+00 -1.45032322e+00]] [[-7.05829030e-03 6.92198813e-01 -1.67682338e+00 ... 1.04101586e+00 -2.48718619e-01 8.93607140e-01] [-1.80837270e-02 4.08797562e-01 -1.08462203e+00 ... -1.23574293e+00 8.49512994e-01 -4.69876379e-01] [ 2.12417260e-01 8.15689981e-01 4.69595402e-01 ... -4.45052981e-01 6.45951271e-01 -2.32411146e+00] ... [-8.82583201e-01 1.06245041e+00 -1.17562377e+00 ... 3.69942486e-01 1.09748042e+00 3.53278846e-01] [ 2.02645397e+00 -3.13003995e-02 -1.42506456e+00 ... -5.62128127e-01 7.71381199e-01 1.21217883e+00] [-1.31024942e-01 4.25236911e-01 8.57453495e-02 ... 1.02941239e+00 8.03928852e-01 4.56930161e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4598.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=1]() %10 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %11 : int = aten::size(%x.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %12 : int = aten::mul(%10, %11) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int[] = prim::ListConstruct(%12, %self.n_groups) %14 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %15 : int[] = aten::slice(%14, %4, %self.weight, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %16 : int[] = aten::list(%15) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %17 : int[] = aten::add(%13, %16) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%17, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %19 : int = aten::len(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %20 : int = aten::sub(%19, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%20, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %24 : int = aten::add(%i.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %25 : int = aten::__getitem__(%17, %24) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %25) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %27 : bool = aten::eq(%size_prods, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %28 : str = aten::format(%5, %17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%28, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %29 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%29) fw_re: [[[[-7.47022569e-01 1.66065860e+00 -1.47218680e+00 ... 1.20598912e+00 4.48176444e-01 8.67222369e-01] [ 3.33831936e-01 6.86105192e-01 -9.13659155e-01 ... 5.50927997e-01 1.10987675e+00 -1.69095814e+00] [-8.98229003e-01 4.58223432e-01 -1.58977926e+00 ... -1.18696523e+00 -1.41859579e+00 6.44723415e-01] ... [-9.71065164e-01 1.64878905e+00 -5.80001473e-01 ... 5.06752670e-01 8.08261037e-01 -5.45744061e-01] [-1.81481099e+00 -1.04691768e+00 -1.00772643e+00 ... 1.50810158e+00 -8.09326589e-01 -1.14715719e+00] [ 1.09729898e+00 1.06768227e+00 4.91571516e-01 ... -1.29104614e+00 1.17644024e+00 -4.07452345e-01]] [[-3.26905072e-01 -1.18096638e+00 -1.20920278e-01 ... 4.43626940e-01 1.02855498e-02 3.22916627e-01] [ 6.08502984e-01 1.26355922e+00 1.22483528e+00 ... -2.16198012e-01 -2.23291206e+00 1.77158165e+00] [-8.87141287e-01 2.86027431e+00 1.90307987e+00 ... 6.47642076e-01 -1.68742847e+00 4.68132585e-01] ... [-7.88051903e-01 1.57757390e+00 1.80619431e+00 ... 1.24180026e-01 -3.79381105e-02 3.43037307e-01] [-8.82480741e-02 5.94846785e-01 9.36584175e-01 ... 6.94146156e-01 1.28065240e+00 -1.30205464e+00] [-5.33517301e-02 1.61498380e+00 1.22069322e-01 ... -7.44915247e-01 2.69363546e+00 -2.01682544e+00]] [[ 7.52918601e-01 -1.18822181e+00 1.20434403e+00 ... -4.20860916e-01 -4.45093393e-01 1.40069440e-01] [ 1.23667264e+00 7.74060249e-01 -3.97337288e-01 ... 1.34848850e-02 -1.18357158e+00 -1.58626795e+00] [ 1.87288240e-01 -1.01163661e+00 -8.79201055e-01 ... -1.91102409e+00 6.90164387e-01 -1.19149065e+00] ... [-1.91232526e+00 -1.54828489e+00 8.48476738e-02 ... 3.92530292e-01 6.50107980e-01 -2.20377427e-02] [ 3.98098588e-01 -7.15560257e-01 -6.22010291e-01 ... 3.00640076e-01 -2.53132224e-01 -3.03813517e-01] [-6.63983881e-01 -1.72844434e+00 1.82332909e+00 ... -2.49230695e+00 6.41799450e-01 -2.60457563e+00]] [[-1.03226793e+00 -1.25696242e-01 7.68863082e-01 ... 1.48262846e+00 2.10610461e+00 -6.44782722e-01] [ 2.65808012e-02 5.47999144e-01 8.21068645e-01 ... 9.34619248e-01 1.96728528e-01 -6.31193638e-01] [-7.86518395e-01 -2.09743112e-01 -9.22520012e-02 ... -7.70382702e-01 1.19501829e+00 -2.47220099e-01] ... [ 4.46156949e-01 4.22859490e-01 -3.21536750e-01 ... 7.42051780e-01 -1.67724475e-01 -2.80174017e-01] [-1.68563709e-01 4.15310621e-01 -1.48466873e+00 ... -5.03152370e-01 3.63476127e-01 1.66837418e+00] [-3.82574290e-01 -5.84470987e-01 1.00202966e+00 ... -4.18314546e-01 1.40122965e-01 -4.83138323e-01]] [[-9.03433084e-01 5.28229773e-01 9.30018574e-02 ... 1.10510981e+00 1.38054445e-01 -1.25022185e+00] [-1.16883600e+00 1.08465827e+00 -3.09696961e-02 ... -1.86674035e+00 -1.77671921e+00 -1.21089196e+00] [ 8.20465267e-01 6.94029510e-01 1.36678302e+00 ... -5.28720379e-01 7.81577468e-01 -1.62591457e-01] ... [-1.04934061e+00 1.07239135e-01 -2.86642224e-01 ... -5.70282787e-02 -1.28154933e+00 -1.49519300e+00] [ 3.89698684e-01 3.17639053e-01 4.77756709e-01 ... -6.35892451e-01 -1.27033651e+00 6.62679732e-01] [-3.70991468e-01 -1.59275472e+00 1.07403433e+00 ... 1.32492518e+00 -1.49123549e+00 -6.74905479e-01]] [[-4.50604200e-01 1.04695164e-01 -4.54607338e-01 ... 1.10759318e+00 -1.62484989e-01 -7.34351218e-01] [ 4.17199850e-01 -1.35908961e+00 -1.20786691e+00 ... -7.00236410e-02 1.33277965e+00 4.64554757e-01] [ 4.93291259e-01 7.32933760e-01 2.24605560e+00 ... 1.37804329e+00 -9.22023416e-01 1.33676529e+00] ... [ 5.50901257e-02 6.28043413e-01 3.78615022e-01 ... 2.35184455e+00 -1.35087132e+00 -5.21922410e-01] [-6.32524490e-01 4.90931600e-01 7.03265488e-01 ... 1.35673642e+00 -1.64264794e-02 -5.11695027e-01] [ 9.90640402e-01 2.70888567e-01 -3.61588925e-01 ... 6.88791990e-01 3.76996666e-01 -6.67757809e-01]]] [[[ 5.43739855e-01 -8.18269849e-01 -2.25507081e-01 ... 7.38205552e-01 1.54462993e+00 6.37320042e-01] [-1.36973631e+00 1.68335664e+00 -1.68029690e+00 ... -5.14749587e-01 3.24885428e-01 -3.15500200e-01] [ 9.55082476e-01 -1.22881830e+00 2.70146227e+00 ... -1.31093562e+00 6.64016679e-02 -1.95453799e+00] ... [ 3.09566760e+00 8.03143978e-01 1.57569420e+00 ... 2.44700983e-01 -3.28301907e-01 -1.96423814e-01] [-5.61860383e-01 3.68315965e-01 -4.64476883e-01 ... 7.92892694e-01 -2.77163982e-01 4.38059032e-01] [-9.94996130e-01 6.68079257e-02 -7.62581706e-01 ... 3.59108627e-01 -6.03821933e-01 3.40742558e-01]] [[ 2.93152779e-01 5.44541299e-01 6.64159179e-01 ... 9.36210930e-01 -3.69451344e-01 -3.90369624e-01] [ 9.45880353e-01 -1.34029076e-01 1.07009101e+00 ... 1.42640054e+00 7.78994739e-01 -1.55093515e+00] [-4.30136770e-01 1.56972313e+00 -4.42009211e-01 ... 2.84040666e+00 2.42656022e-02 -2.92056259e-02] ... [ 7.55678177e-01 1.37064505e+00 5.54152846e-01 ... 5.92300117e-01 -2.41303712e-01 8.77801418e-01] [-8.13696861e-01 5.80902874e-01 8.20063651e-01 ... 1.38729525e+00 4.52107698e-01 2.51752555e-01] [-4.24916968e-02 -8.62566531e-01 -3.03401053e-01 ... 8.52771223e-01 1.17018843e+00 1.18528342e+00]] [[-1.11005163e+00 8.42755556e-01 2.96947926e-01 ... 1.05718935e+00 2.30209756e+00 -9.42798495e-01] [ 1.02750707e+00 -8.82816970e-01 -1.01517951e+00 ... -8.34168494e-01 7.96627700e-01 5.54606974e-01] [-2.65607871e-02 -4.88447845e-01 -1.55252075e+00 ... 3.90199453e-01 -4.76255327e-01 -1.22989431e-01] ... [-6.11414254e-01 1.60813904e+00 -2.26678109e+00 ... -8.96600425e-01 6.13999367e-01 -1.22786570e+00] [-2.34005108e-01 -1.84323204e+00 6.27477050e-01 ... -1.27444398e+00 -4.22041751e-02 -8.75140011e-01] [-3.32872123e-01 -3.03976625e-01 -7.09744573e-01 ... -3.89249086e-01 -9.34020102e-01 5.48051119e-01]] [[ 1.24221051e+00 -1.10198998e+00 2.26678804e-01 ... 8.08705807e-01 -8.23525637e-02 3.67657304e-01] [-7.00133145e-01 -2.35063767e+00 -4.14867550e-01 ... 2.75383472e-01 -5.65652430e-01 6.38366878e-01] [ 3.87821645e-01 1.24338198e+00 -2.47188553e-01 ... -2.34131026e+00 1.06754459e-01 -4.59612548e-01] ... [ 4.86371964e-01 -8.06467235e-01 -9.14571881e-01 ... 1.20188403e+00 -1.70539463e+00 -1.32755756e+00] [ 9.12560076e-02 -9.59554315e-01 1.27094936e+00 ... 8.64541829e-01 -1.06096923e+00 -3.33827019e-01] [-1.71242923e-01 1.07964218e+00 1.35391103e-02 ... -4.11278099e-01 6.38165832e-01 -5.18164299e-02]] [[-8.47602904e-01 -1.22916496e+00 5.10456145e-01 ... -1.25539172e+00 4.21638131e-01 -1.60465956e-01] [-3.78838062e-01 -4.09913123e-01 -1.71315342e-01 ... 1.32878709e+00 5.16753376e-01 2.03064010e-01] [ 1.47925043e+00 1.61939836e+00 1.46900797e+00 ... 5.70939600e-01 -3.66503417e-01 -1.20564282e+00] ... [ 4.51667726e-01 -9.43859518e-02 3.50293994e-01 ... -1.09248590e+00 -7.09604919e-01 2.01051474e-01] [-1.56791961e+00 -2.26180959e+00 5.91359377e-01 ... 1.21814214e-01 -3.00697118e-01 -1.00911701e+00] [-4.27573062e-02 8.72059226e-01 -1.35299110e+00 ... -1.36408687e+00 1.56549489e+00 1.79600537e+00]] [[ 1.08418798e+00 -1.12625092e-01 -2.25345477e-01 ... -2.38596976e-01 -3.85079026e-01 1.88712680e+00] [ 5.42507052e-01 -7.17317224e-01 -2.30586886e-01 ... 1.02690554e+00 -1.58128440e-02 -4.35983181e-01] [-3.02379757e-01 4.11130011e-01 -1.21670175e+00 ... 5.09793043e-01 4.02995914e-01 -1.30812478e+00] ... [ 9.88995507e-02 8.34336340e-01 2.14250660e+00 ... 2.51663804e-01 -1.87282670e+00 2.13852957e-01] [ 1.80109704e+00 7.03039527e-01 4.65485513e-01 ... 3.93938690e-01 -1.30683273e-01 -1.55999029e+00] [ 2.68731058e-01 -8.89741004e-01 1.02634788e+00 ... -8.72214973e-01 3.22427869e-01 6.34069741e-01]]] [[[ 6.15031838e-01 1.50611258e+00 -2.14437791e-03 ... 1.00717723e+00 2.52036780e-01 6.67551517e-01] [ 2.72492200e-01 -4.29103315e-01 1.60467100e+00 ... 1.67666066e+00 9.16903019e-01 -2.04606816e-01] [-1.67229772e+00 -5.45317233e-02 -1.21497416e+00 ... 1.56536365e+00 8.32337737e-01 3.93925726e-01] ... [-5.31893373e-01 -3.58021230e-01 -8.41489673e-01 ... 9.38701779e-02 -5.39856493e-01 -1.05120444e+00] [ 7.51040518e-01 9.13946033e-01 -3.94529432e-01 ... 9.16387439e-01 7.59082735e-01 3.05959195e-01] [ 1.77440321e+00 -1.10898292e+00 5.54636002e-01 ... -1.12230730e+00 -6.36340022e-01 1.89857352e+00]] [[ 1.27017629e+00 7.34933317e-01 -4.45318252e-01 ... -6.48082495e-01 -6.06957614e-01 9.72429395e-01] [-2.51829457e+00 -1.05254972e+00 5.44601142e-01 ... 1.59759974e+00 8.04834068e-01 -1.20340312e+00] [-1.56268346e+00 3.98917377e-01 -1.26105678e+00 ... -9.00707483e-01 1.78502783e-01 1.40619242e+00] ... [-1.99088204e+00 7.57582724e-01 -1.19440711e+00 ... -1.10826147e+00 -1.13482460e-01 -1.41712916e+00] [ 6.29854441e-01 3.29279780e-01 6.28722012e-01 ... 2.42058587e+00 -2.82128770e-02 -1.13979137e+00] [-2.27454782e-01 1.70489013e+00 -1.07478344e+00 ... 1.99221149e-01 3.57197374e-01 -1.47906923e+00]] [[ 7.79107869e-01 -2.96843071e-02 -5.03252625e-01 ... -3.69549751e-01 1.11706460e+00 1.17399442e+00] [ 3.49069506e-01 -5.30578122e-02 -1.87331271e+00 ... 8.07761014e-01 -2.90214539e-01 8.29199374e-01] [ 6.89719379e-01 -8.59435558e-01 7.42245257e-01 ... 1.89721513e+00 -3.05136234e-01 8.31548989e-01] ... [ 5.47564387e-01 -7.30302274e-01 2.16792083e+00 ... -2.07665384e-01 -4.71954107e-01 1.31538463e+00] [ 1.27039862e+00 1.32874632e+00 -5.84368348e-01 ... -2.04964566e+00 -1.17553532e+00 -1.65283054e-01] [ 1.39096454e-02 1.11333680e+00 -9.52542305e-01 ... 5.02550043e-02 -3.67605358e-01 1.16042638e+00]] [[-2.57562280e+00 1.62113905e+00 2.23072365e-01 ... 2.80013472e-01 8.20039809e-01 -1.66701341e+00] [-1.54356524e-01 -8.02366659e-02 -1.40620577e+00 ... 3.98196489e-01 -8.45758736e-01 -3.81152183e-01] [ 1.83010995e-01 1.90419960e+00 7.44955599e-01 ... -4.74369347e-01 1.28646195e+00 -2.43893728e-01] ... [ 7.84791946e-01 -8.91956329e-01 -6.24448121e-01 ... 1.62363037e-01 6.99647248e-01 4.07227010e-01] [-9.12515163e-01 -3.40219826e-01 1.01565230e+00 ... 2.40921497e+00 3.25863361e-01 6.63720548e-01] [ 6.04642928e-01 3.53955716e-01 -3.42404914e+00 ... -3.02778900e-01 -1.08379805e+00 -1.37066805e+00]] [[-2.15712503e-01 1.71223767e-02 -1.57532144e+00 ... 3.66924435e-01 6.89266026e-01 -1.74479449e+00] [-6.66498005e-01 1.30098248e+00 -9.26986098e-01 ... 7.69195974e-01 -2.41168573e-01 6.69129938e-02] [ 4.50495221e-02 3.85937333e-01 -3.50944638e-01 ... -3.25811625e-01 -1.06997395e+00 1.95988119e+00] ... [ 2.07416987e+00 -1.75090981e+00 -1.03541207e+00 ... 1.03574550e+00 1.51543427e+00 1.29548168e+00] [ 5.78051567e-01 8.46529938e-03 -2.43838616e-02 ... 5.04699469e-01 1.26654208e+00 5.21619432e-02] [ 1.25369191e+00 -8.00812721e-01 -9.23845589e-01 ... -9.01484549e-01 7.69793868e-01 1.39457166e-01]] [[ 7.92064428e-01 1.12115037e+00 2.84189075e-01 ... -2.17228420e-02 -1.20264518e+00 -7.77303398e-01] [ 5.50196588e-01 3.52634370e-01 8.13802361e-01 ... -6.87766373e-01 -1.90884280e+00 -2.93211728e-01] [ 2.56737500e-01 -1.49735725e+00 -7.03506529e-01 ... 2.28475571e+00 3.63750964e-01 -5.82939208e-01] ... [ 1.03300607e+00 -1.75805259e+00 -2.88071811e-01 ... -9.91833150e-01 -7.73904264e-01 -1.23637474e+00] [-1.86036718e+00 -1.32518172e+00 -1.35500872e+00 ... -1.31138659e+00 7.46904761e-02 -3.96621048e-01] [ 1.09899414e+00 -1.21624494e+00 5.11981785e-01 ... -6.21057391e-01 -9.68295813e-01 5.68605423e-01]]] ... [[[-8.43383133e-01 5.45552671e-01 2.89663412e-02 ... 8.69070172e-01 -1.09460413e+00 2.07051337e-01] [ 1.24306619e+00 7.15654016e-01 1.32770050e+00 ... 7.14626133e-01 2.30356917e-01 1.21590102e+00] [ 1.17285883e+00 -1.56249475e+00 -1.73802227e-01 ... -4.14450467e-01 1.05874503e+00 -6.09355867e-01] ... [ 1.07046127e+00 -2.14641616e-01 -9.15323421e-02 ... -1.15663908e-01 5.95669687e-01 7.12011635e-01] [-7.97202468e-01 1.64764553e-01 2.79694736e-01 ... 1.08938944e+00 -1.60119265e-01 -1.06542218e+00] [ 1.73122060e+00 7.43405640e-01 -1.28407693e+00 ... -2.35977203e-01 8.53193939e-01 1.34461626e-01]] [[-1.77548873e+00 8.22161213e-02 -1.19794890e-01 ... -5.61878502e-01 2.75773853e-01 -1.16289258e+00] [ 2.56071866e-01 -8.10378313e-01 9.42462981e-01 ... -7.66358614e-01 1.59096634e+00 -5.45072794e-01] [-1.48874581e+00 -1.24690485e+00 -1.47676304e-01 ... 1.30637765e+00 -4.11045611e-01 -2.64917165e-01] ... [-1.38357842e+00 1.16389811e+00 6.29663229e-01 ... -2.70228349e-02 -2.05569196e+00 1.73852369e-01] [-1.22377670e+00 -2.36775801e-01 1.41492260e+00 ... 1.20226234e-01 3.97462040e-01 -1.08679682e-01] [-1.75103652e+00 6.77569509e-01 4.94887203e-01 ... -6.82472527e-01 -3.69669259e-01 4.57596593e-02]] [[ 5.70589364e-01 -8.28021765e-01 -7.69666612e-01 ... -1.35969710e+00 -1.03376710e+00 1.98761493e-01] [ 1.44975539e-02 4.76949215e-02 2.34757447e+00 ... 1.50723553e+00 -8.59518468e-01 -2.03342829e-02] [ 1.40113401e+00 -9.27466571e-01 -7.08400249e-01 ... 7.37278104e-01 1.73717558e-01 1.28335655e+00] ... [ 1.02783513e+00 -1.20733827e-01 -1.46268070e+00 ... -2.34952140e+00 -1.05360651e+00 -9.73344564e-01] [-4.42404300e-01 -2.90954471e-01 1.10009718e+00 ... -2.72797561e+00 -4.29841220e-01 -3.13919723e-01] [-2.00709924e-01 1.11335909e+00 -1.00501347e+00 ... -7.36525595e-01 9.09455359e-01 -9.25665140e-01]] [[ 2.73359370e+00 2.08181873e-01 -4.04268950e-02 ... -5.19633710e-01 -1.50172460e+00 6.23678803e-01] [-1.06939387e+00 1.02470911e+00 1.09074116e-01 ... -1.48853576e+00 1.23704278e+00 1.46142173e+00] [-1.01098895e+00 -1.76647913e+00 2.98978627e-01 ... 5.74752808e-01 -6.51790977e-01 -8.00641596e-01] ... [ 8.04406285e-01 -3.75776619e-01 -6.83956325e-01 ... 1.75331450e+00 2.10896921e+00 6.75373524e-03] [-5.09930015e-01 4.02201116e-01 -1.39598042e-01 ... 1.09361136e+00 -2.64988613e+00 5.43591321e-01] [ 5.10336101e-01 5.30992866e-01 2.24462271e+00 ... 2.11598292e-01 -3.72131318e-01 -9.91365686e-02]] [[-5.11797667e-01 3.94179881e-01 8.15077871e-03 ... -1.31448135e-01 4.54231650e-01 -4.82120246e-01] [ 5.92685938e-01 -4.77564305e-01 9.60720982e-03 ... -5.41108847e-01 4.04917896e-02 7.37837076e-01] [ 1.25181687e+00 3.02904129e-01 3.80341798e-01 ... -4.00743306e-01 -3.38393152e-01 8.11906159e-01] ... [-7.07473040e-01 2.29521804e-02 1.62887678e-01 ... -7.08348632e-01 -1.53173363e+00 -4.37663421e-02] [-5.66426098e-01 -1.86889917e-01 4.36607003e-01 ... -6.86004639e-01 -2.66615540e-01 1.98684022e-01] [ 6.26180708e-01 -5.88813066e-01 7.16188073e-01 ... 1.32143867e+00 1.24160302e+00 9.43613410e-01]] [[-6.44203424e-01 -9.02561903e-01 1.08187151e+00 ... 2.08347988e+00 -8.80237281e-01 6.08828127e-01] [ 2.21902084e+00 -6.36068225e-01 3.61954033e-01 ... -3.10298622e-01 -6.98916852e-01 1.14815488e-01] [ 3.97024006e-01 -2.08119440e+00 2.35188648e-01 ... 1.21211267e+00 -3.37142497e-01 8.97629023e-01] ... [ 1.41667172e-01 -5.76134920e-01 1.44587374e+00 ... -7.70560801e-01 -1.03696215e+00 -9.58389223e-01] [ 2.87207007e-01 -1.33545136e+00 2.52403712e+00 ... 2.23990607e+00 -1.28717554e+00 -9.38507915e-01] [-9.56222296e-01 1.71336234e+00 7.41339386e-01 ... -4.25897300e-01 1.79539585e+00 3.21589440e-01]]] [[[ 4.09697853e-02 7.27723897e-01 -6.29183948e-01 ... 2.08355546e+00 5.77003956e-01 1.22317374e+00] [-9.45787728e-01 3.55037749e-02 -1.02669722e-03 ... 7.73114741e-01 -6.60166264e-01 5.72380245e-01] [-3.87956232e-01 1.84065962e+00 7.73836911e-01 ... -7.66472742e-02 2.48044521e-01 2.39382073e-01] ... [-1.72592044e+00 1.20357835e+00 4.52410907e-01 ... -5.70209622e-01 8.75383854e-01 -3.74793005e+00] [ 1.45037413e-01 9.36258972e-01 5.57283640e-01 ... -1.11154580e+00 -6.85967445e-01 2.56539583e-01] [ 2.82489508e-01 -1.10145815e-01 1.61156046e+00 ... 7.10040331e-01 -5.20146549e-01 -1.72508824e+00]] [[-7.13259637e-01 1.18743634e+00 -6.75989091e-01 ... -3.90428960e-01 -1.29621918e-03 -3.30155164e-01] [ 5.22351027e-01 8.81326437e-01 -7.62020111e-01 ... 1.50500393e+00 2.19931066e-01 1.79768682e-01] [ 1.25708675e+00 1.60440832e-01 1.39081752e+00 ... 3.58777046e-01 4.04001564e-01 9.69312489e-01] ... [ 2.28321910e-01 8.52390289e-01 2.16954127e-01 ... 3.29109401e-01 1.19733560e+00 -9.48939025e-01] [ 3.85629892e-01 1.49883592e+00 3.15684199e-01 ... -4.11727548e-01 -2.76236981e-01 -1.07374573e+00] [ 3.45738083e-01 8.54072273e-02 9.41030085e-01 ... 7.21572697e-01 -7.52850771e-01 1.99419999e+00]] [[-1.52581751e+00 -7.59488404e-01 9.97132719e-01 ... -9.01188493e-01 4.11942333e-01 -2.53867686e-01] [ 2.61022663e+00 -6.93593323e-02 5.76756358e-01 ... 1.26672894e-01 1.29349804e+00 1.02446318e-01] [ 1.20061302e+00 1.89773858e-01 5.30131698e-01 ... 2.54828286e+00 1.17329967e+00 3.19059044e-01] ... [-4.43569064e-01 -1.35378897e+00 5.67425072e-01 ... -1.25200033e+00 -6.34147584e-01 -1.34041929e+00] [-5.28697550e-01 -1.23722088e+00 2.81608611e-01 ... 1.47447622e+00 -2.03309596e-01 -1.96245992e+00] [-1.33599028e-01 3.60689871e-02 2.14941934e-01 ... -1.09965491e+00 -8.81643295e-01 -8.73817503e-01]] [[-8.60245585e-01 -5.21323800e-01 -5.91858268e-01 ... -7.91204751e-01 3.17843825e-01 -7.27105916e-01] [ 1.59191442e+00 -9.45939779e-01 1.17687285e+00 ... -1.07127166e+00 1.53917241e+00 -4.60870653e-01] [-1.73951471e+00 3.42948101e-02 -6.26868336e-03 ... -8.72315645e-01 -1.23410249e+00 -2.11703870e-02] ... [ 7.13767931e-02 -3.82626578e-02 2.53195500e+00 ... 1.58277646e-01 -1.41331255e+00 1.12478696e-01] [-5.00695705e-02 8.82714152e-01 -2.38929605e+00 ... -2.30507684e+00 -1.40452003e+00 1.25277591e+00] [-4.47678894e-01 -2.21529022e-01 -1.62894559e+00 ... 9.79099348e-02 -3.85912776e-01 -1.11046636e+00]] [[ 1.28996164e-01 -9.23263431e-01 -3.98013413e-01 ... -6.81402862e-01 1.01628736e-01 -1.92135707e-01] [ 2.08317208e+00 -1.60313833e+00 -1.15533113e+00 ... 4.55457956e-01 -9.29536462e-01 -5.33788884e-03] [-1.42372239e+00 -3.58609468e-01 1.14364016e+00 ... -4.77332979e-01 1.04135108e+00 -6.17138982e-01] ... [-4.90080059e-01 1.05827540e-01 -3.27071619e+00 ... 5.18496871e-01 -2.52692610e-01 6.06369615e-01] [ 3.75595421e-01 6.22450650e-01 1.93639243e+00 ... -1.14731145e+00 -9.76667926e-02 -2.32735157e+00] [ 4.04391855e-01 -3.25858474e-01 -3.39299768e-01 ... -5.38659170e-02 -8.11866581e-01 2.07976267e-01]] [[-9.75396261e-02 -7.07365155e-01 -3.34505141e-01 ... 5.73245026e-02 7.94672728e-01 -2.17785254e-01] [-8.68041158e-01 -1.21749699e+00 1.18281758e+00 ... -7.37078190e-01 7.06636012e-01 8.25575739e-02] [-4.71124351e-02 -9.52640712e-01 9.64721084e-01 ... -1.82572448e+00 1.35425925e+00 8.02625120e-01] ... [-9.51111197e-01 1.54138774e-01 -3.60804796e-01 ... 1.35150456e+00 1.05865049e+00 -2.69846380e-01] [ 1.45685393e-02 -3.35452482e-02 4.28789645e-01 ... -8.61987174e-01 5.55367947e-01 7.79686451e-01] [ 5.53660512e-01 -4.08540457e-01 -6.33731112e-02 ... 1.38013971e+00 5.23480535e-01 5.06981194e-01]]] [[[-1.02804458e+00 -7.82210588e-01 6.96618497e-01 ... -6.35893881e-01 3.15803289e-02 3.14268917e-01] [-1.13439000e+00 -4.81130034e-01 1.40639246e+00 ... 1.07191300e+00 -1.96809262e-01 1.57909226e+00] [ 1.14235497e+00 -5.68630934e-01 9.93735790e-01 ... 9.45281208e-01 -1.38804078e+00 -3.18577290e-01] ... [-1.01049876e+00 7.67962217e-01 1.36935139e+00 ... -5.91935776e-02 8.86202872e-01 -7.62958646e-01] [-3.75574529e-01 4.01168197e-01 -1.55715525e-01 ... 1.06490374e+00 6.11299157e-01 -3.61127079e-01] [ 8.03377926e-01 -1.67246193e-01 6.47434652e-01 ... -1.68143535e+00 2.53655136e-01 -2.44655818e-01]] [[-1.12144029e+00 2.72076511e+00 4.82218981e-01 ... 4.39391226e-01 1.40917075e+00 -1.60198379e+00] [-1.78437400e+00 -2.08784103e+00 -6.87254906e-01 ... -4.97272313e-01 -1.22831345e+00 7.53479600e-01] [-1.55325913e+00 -5.36260545e-01 -7.85676062e-01 ... -3.17184299e-01 1.21965694e+00 -4.71090734e-01] ... [-1.52786553e+00 -1.40178335e+00 1.27591205e+00 ... -1.04048304e-01 -1.81157994e+00 -3.48438382e-01] [-6.92144513e-01 -3.81207883e-01 8.23429525e-01 ... 1.06326711e+00 6.56793177e-01 -1.18393064e+00] [ 1.07451916e+00 -5.52157462e-01 -2.07941771e+00 ... -6.72699034e-01 -6.64261162e-01 -7.61167049e-01]] [[ 1.45961094e+00 -4.68504578e-01 4.88833994e-01 ... 3.04083496e-01 5.47819436e-01 -1.83075026e-01] [-2.22449467e-01 -9.63560045e-01 1.18021429e+00 ... 1.39044833e+00 -6.78856134e-01 9.63575959e-01] [ 2.12074065e+00 8.73531759e-01 -3.27722609e-01 ... -7.28868008e-01 3.13470095e-01 -1.04783140e-01] ... [ 8.83146286e-01 -6.26046136e-02 1.26014993e-01 ... -8.85336757e-01 4.51480776e-01 -1.23087394e+00] [-2.99694031e-01 -1.00167720e-02 3.52938384e-01 ... -1.18605331e-01 -2.29463053e+00 -2.33562636e+00] [-6.94016993e-01 7.09673166e-01 1.70923090e+00 ... 2.36036852e-01 -3.26246470e-01 1.49066165e-01]] [[ 5.73863089e-01 1.50668967e+00 6.11266077e-01 ... -3.84747386e-01 -1.33855000e-01 -2.34772295e-01] [ 1.99033543e-01 -4.40017655e-02 -3.15767862e-02 ... 7.88868248e-01 -2.10280761e-01 -2.80888736e-01] [ 1.87000084e+00 3.41744602e-01 1.35236311e+00 ... 1.61818361e+00 -3.71834993e-01 -2.43761033e-01] ... [-1.43517518e+00 -1.38579321e+00 8.16511929e-01 ... -5.31101450e-02 4.02775884e-01 1.91495731e-01] [ 2.04778481e+00 4.70763445e-01 1.08793773e-01 ... -4.17857707e-01 1.94585085e+00 5.06055474e-01] [-3.39607954e-01 -4.16661873e-02 1.71127343e+00 ... 1.15081921e-01 3.34342152e-01 1.29024577e+00]] [[-1.53782797e+00 9.77958798e-01 7.73826361e-01 ... -8.31587374e-01 1.41829216e+00 6.03712797e-01] [ 2.12402210e-01 -7.97038853e-01 -7.53054917e-02 ... 1.86375737e-01 9.48523462e-01 -1.92110157e+00] [ 2.62961596e-01 7.67518163e-01 -4.20182914e-01 ... 1.29888725e+00 1.11971045e+00 3.27777863e-01] ... [-1.77415383e+00 -1.01826358e+00 -5.19777894e-01 ... 4.24864620e-01 -5.15767336e-01 1.23282766e+00] [ 1.52549458e+00 4.82261211e-01 -1.71675995e-01 ... 2.95344055e-01 -3.19126457e-01 -1.50083810e-01] [ 1.72260240e-01 8.35558176e-01 -2.16551208e+00 ... 5.67609072e-01 -1.71274555e+00 2.06430626e+00]] [[-1.02155900e+00 2.32806608e-01 1.11692202e+00 ... -1.37065005e+00 4.93614748e-02 1.72617018e-01] [-1.18435711e-01 -1.64600581e-01 -2.64419913e-01 ... -3.59802842e-01 -1.60642123e+00 5.18503487e-01] [ 2.02156290e-01 -8.46878827e-01 3.12254071e-01 ... -2.20360804e+00 1.13148749e+00 -1.15398097e+00] ... [-3.92192692e-01 2.37134725e-01 7.03583419e-01 ... 8.88997912e-01 -2.64215082e-01 7.38654852e-01] [ 2.19431496e+00 -9.83508229e-02 3.07994753e-01 ... 1.63901234e+00 -8.06951821e-01 -1.54141712e+00] [ 1.12064373e+00 -3.71023923e-01 8.77669096e-01 ... 5.09034336e-01 3.77397627e-01 -1.33542210e-01]]]]; ov_res: [[[[-7.47022569e-01 1.66065860e+00 -1.47218680e+00 ... 1.20598912e+00 4.48176444e-01 8.67222369e-01] [ 3.33831936e-01 6.86105192e-01 -9.13659155e-01 ... 5.50927997e-01 1.10987675e+00 -1.69095814e+00] [-8.98229003e-01 4.58223432e-01 -1.58977926e+00 ... -1.18696523e+00 -1.41859579e+00 6.44723415e-01] ... [-9.71065164e-01 1.64878905e+00 -5.80001473e-01 ... 5.06752670e-01 8.08261037e-01 -5.45744061e-01] [-1.81481099e+00 -1.04691768e+00 -1.00772643e+00 ... 1.50810158e+00 -8.09326589e-01 -1.14715719e+00] [ 1.09729898e+00 1.06768227e+00 4.91571516e-01 ... -1.29104614e+00 1.17644024e+00 -4.07452345e-01]] [[-3.26905072e-01 -1.18096638e+00 -1.20920278e-01 ... 4.43626940e-01 1.02855498e-02 3.22916627e-01] [ 6.08502984e-01 1.26355922e+00 1.22483528e+00 ... -2.16198012e-01 -2.23291206e+00 1.77158165e+00] [-8.87141287e-01 2.86027431e+00 1.90307987e+00 ... 6.47642076e-01 -1.68742847e+00 4.68132585e-01] ... [-7.88051903e-01 1.57757390e+00 1.80619431e+00 ... 1.24180026e-01 -3.79381105e-02 3.43037307e-01] [-8.82480741e-02 5.94846785e-01 9.36584175e-01 ... 6.94146156e-01 1.28065240e+00 -1.30205464e+00] [-5.33517301e-02 1.61498380e+00 1.22069322e-01 ... -7.44915247e-01 2.69363546e+00 -2.01682544e+00]] [[ 7.52918601e-01 -1.18822181e+00 1.20434403e+00 ... -4.20860916e-01 -4.45093393e-01 1.40069440e-01] [ 1.23667264e+00 7.74060249e-01 -3.97337288e-01 ... 1.34848850e-02 -1.18357158e+00 -1.58626795e+00] [ 1.87288240e-01 -1.01163661e+00 -8.79201055e-01 ... -1.91102409e+00 6.90164387e-01 -1.19149065e+00] ... [-1.91232526e+00 -1.54828489e+00 8.48476738e-02 ... 3.92530292e-01 6.50107980e-01 -2.20377427e-02] [ 3.98098588e-01 -7.15560257e-01 -6.22010291e-01 ... 3.00640076e-01 -2.53132224e-01 -3.03813517e-01] [-6.63983881e-01 -1.72844434e+00 1.82332909e+00 ... -2.49230695e+00 6.41799450e-01 -2.60457563e+00]] [[-1.03226793e+00 -1.25696242e-01 7.68863082e-01 ... 1.48262846e+00 2.10610461e+00 -6.44782722e-01] [ 2.65808012e-02 5.47999144e-01 8.21068645e-01 ... 9.34619248e-01 1.96728528e-01 -6.31193638e-01] [-7.86518395e-01 -2.09743112e-01 -9.22520012e-02 ... -7.70382702e-01 1.19501829e+00 -2.47220099e-01] ... [ 4.46156949e-01 4.22859490e-01 -3.21536750e-01 ... 7.42051780e-01 -1.67724475e-01 -2.80174017e-01] [-1.68563709e-01 4.15310621e-01 -1.48466873e+00 ... -5.03152370e-01 3.63476127e-01 1.66837418e+00] [-3.82574290e-01 -5.84470987e-01 1.00202966e+00 ... -4.18314546e-01 1.40122965e-01 -4.83138323e-01]] [[-9.03433084e-01 5.28229773e-01 9.30018574e-02 ... 1.10510981e+00 1.38054445e-01 -1.25022185e+00] [-1.16883600e+00 1.08465827e+00 -3.09696961e-02 ... -1.86674035e+00 -1.77671921e+00 -1.21089196e+00] [ 8.20465267e-01 6.94029510e-01 1.36678302e+00 ... -5.28720379e-01 7.81577468e-01 -1.62591457e-01] ... [-1.04934061e+00 1.07239135e-01 -2.86642224e-01 ... -5.70282787e-02 -1.28154933e+00 -1.49519300e+00] [ 3.89698684e-01 3.17639053e-01 4.77756709e-01 ... -6.35892451e-01 -1.27033651e+00 6.62679732e-01] [-3.70991468e-01 -1.59275472e+00 1.07403433e+00 ... 1.32492518e+00 -1.49123549e+00 -6.74905479e-01]] [[-4.50604200e-01 1.04695164e-01 -4.54607338e-01 ... 1.10759318e+00 -1.62484989e-01 -7.34351218e-01] [ 4.17199850e-01 -1.35908961e+00 -1.20786691e+00 ... -7.00236410e-02 1.33277965e+00 4.64554757e-01] [ 4.93291259e-01 7.32933760e-01 2.24605560e+00 ... 1.37804329e+00 -9.22023416e-01 1.33676529e+00] ... [ 5.50901257e-02 6.28043413e-01 3.78615022e-01 ... 2.35184455e+00 -1.35087132e+00 -5.21922410e-01] [-6.32524490e-01 4.90931600e-01 7.03265488e-01 ... 1.35673642e+00 -1.64264794e-02 -5.11695027e-01] [ 9.90640402e-01 2.70888567e-01 -3.61588925e-01 ... 6.88791990e-01 3.76996666e-01 -6.67757809e-01]]] [[[ 5.43739915e-01 -8.18269849e-01 -2.25507081e-01 ... 7.38205552e-01 1.54462993e+00 6.37320101e-01] [-1.36973631e+00 1.68335664e+00 -1.68029690e+00 ... -5.14749587e-01 3.24885428e-01 -3.15500200e-01] [ 9.55082476e-01 -1.22881830e+00 2.70146227e+00 ... -1.31093562e+00 6.64016753e-02 -1.95453799e+00] ... [ 3.09566760e+00 8.03144038e-01 1.57569420e+00 ... 2.44700983e-01 -3.28301907e-01 -1.96423814e-01] [-5.61860383e-01 3.68315965e-01 -4.64476883e-01 ... 7.92892754e-01 -2.77163982e-01 4.38059032e-01] [-9.94996130e-01 6.68079332e-02 -7.62581706e-01 ... 3.59108627e-01 -6.03821874e-01 3.40742558e-01]] [[ 2.93152779e-01 5.44541299e-01 6.64159179e-01 ... 9.36210930e-01 -3.69451344e-01 -3.90369624e-01] [ 9.45880353e-01 -1.34029061e-01 1.07009101e+00 ... 1.42640054e+00 7.78994739e-01 -1.55093515e+00] [-4.30136770e-01 1.56972313e+00 -4.42009211e-01 ... 2.84040666e+00 2.42656097e-02 -2.92056184e-02] ... [ 7.55678236e-01 1.37064505e+00 5.54152846e-01 ... 5.92300117e-01 -2.41303712e-01 8.77801418e-01] [-8.13696861e-01 5.80902874e-01 8.20063710e-01 ... 1.38729525e+00 4.52107698e-01 2.51752555e-01] [-4.24916893e-02 -8.62566471e-01 -3.03401053e-01 ... 8.52771282e-01 1.17018843e+00 1.18528342e+00]] [[-1.11005163e+00 8.42755616e-01 2.96947926e-01 ... 1.05718935e+00 2.30209756e+00 -9.42798436e-01] [ 1.02750707e+00 -8.82816970e-01 -1.01517951e+00 ... -8.34168434e-01 7.96627700e-01 5.54606974e-01] [-2.65607797e-02 -4.88447845e-01 -1.55252075e+00 ... 3.90199453e-01 -4.76255327e-01 -1.22989431e-01] ... [-6.11414254e-01 1.60813904e+00 -2.26678109e+00 ... -8.96600425e-01 6.13999426e-01 -1.22786570e+00] [-2.34005108e-01 -1.84323204e+00 6.27477050e-01 ... -1.27444398e+00 -4.22041677e-02 -8.75140011e-01] [-3.32872123e-01 -3.03976625e-01 -7.09744513e-01 ... -3.89249086e-01 -9.34020102e-01 5.48051119e-01]] [[ 1.24221051e+00 -1.10198998e+00 2.26678804e-01 ... 8.08705866e-01 -8.23525563e-02 3.67657304e-01] [-7.00133145e-01 -2.35063767e+00 -4.14867550e-01 ... 2.75383472e-01 -5.65652430e-01 6.38366878e-01] [ 3.87821645e-01 1.24338198e+00 -2.47188538e-01 ... -2.34131026e+00 1.06754467e-01 -4.59612548e-01] ... [ 4.86371964e-01 -8.06467175e-01 -9.14571822e-01 ... 1.20188403e+00 -1.70539463e+00 -1.32755756e+00] [ 9.12560150e-02 -9.59554255e-01 1.27094936e+00 ... 8.64541888e-01 -1.06096923e+00 -3.33827019e-01] [-1.71242923e-01 1.07964218e+00 1.35391168e-02 ... -4.11278099e-01 6.38165832e-01 -5.18164225e-02]] [[-8.47602844e-01 -1.22916496e+00 5.10456145e-01 ... -1.25539172e+00 4.21638131e-01 -1.60465941e-01] [-3.78838062e-01 -4.09913123e-01 -1.71315342e-01 ... 1.32878709e+00 5.16753376e-01 2.03064010e-01] [ 1.47925043e+00 1.61939836e+00 1.46900797e+00 ... 5.70939600e-01 -3.66503417e-01 -1.20564282e+00] ... [ 4.51667726e-01 -9.43859518e-02 3.50293994e-01 ... -1.09248590e+00 -7.09604919e-01 2.01051474e-01] [-1.56791961e+00 -2.26180959e+00 5.91359437e-01 ... 1.21814221e-01 -3.00697118e-01 -1.00911701e+00] [-4.27572988e-02 8.72059226e-01 -1.35299110e+00 ... -1.36408687e+00 1.56549489e+00 1.79600537e+00]] [[ 1.08418798e+00 -1.12625085e-01 -2.25345477e-01 ... -2.38596961e-01 -3.85079026e-01 1.88712680e+00] [ 5.42507112e-01 -7.17317164e-01 -2.30586872e-01 ... 1.02690554e+00 -1.58128366e-02 -4.35983181e-01] [-3.02379757e-01 4.11130011e-01 -1.21670175e+00 ... 5.09793043e-01 4.02995914e-01 -1.30812478e+00] ... [ 9.88995582e-02 8.34336400e-01 2.14250660e+00 ... 2.51663804e-01 -1.87282670e+00 2.13852957e-01] [ 1.80109704e+00 7.03039527e-01 4.65485513e-01 ... 3.93938690e-01 -1.30683258e-01 -1.55999029e+00] [ 2.68731058e-01 -8.89740944e-01 1.02634788e+00 ... -8.72214913e-01 3.22427869e-01 6.34069800e-01]]] [[[ 6.15031838e-01 1.50611258e+00 -2.14436324e-03 ... 1.00717723e+00 2.52036780e-01 6.67551517e-01] [ 2.72492230e-01 -4.29103285e-01 1.60467100e+00 ... 1.67666066e+00 9.16903019e-01 -2.04606801e-01] [-1.67229772e+00 -5.45317084e-02 -1.21497416e+00 ... 1.56536365e+00 8.32337737e-01 3.93925726e-01] ... [-5.31893373e-01 -3.58021200e-01 -8.41489673e-01 ... 9.38701928e-02 -5.39856493e-01 -1.05120444e+00] [ 7.51040518e-01 9.13946033e-01 -3.94529432e-01 ... 9.16387439e-01 7.59082735e-01 3.05959225e-01] [ 1.77440321e+00 -1.10898292e+00 5.54636002e-01 ... -1.12230730e+00 -6.36340022e-01 1.89857352e+00]] [[ 1.27017629e+00 7.34933317e-01 -4.45318222e-01 ... -6.48082495e-01 -6.06957614e-01 9.72429395e-01] [-2.51829457e+00 -1.05254972e+00 5.44601142e-01 ... 1.59759974e+00 8.04834068e-01 -1.20340312e+00] [-1.56268346e+00 3.98917377e-01 -1.26105678e+00 ... -9.00707483e-01 1.78502798e-01 1.40619242e+00] ... [-1.99088180e+00 7.57582724e-01 -1.19440711e+00 ... -1.10826147e+00 -1.13482445e-01 -1.41712916e+00] [ 6.29854441e-01 3.29279780e-01 6.28722012e-01 ... 2.42058587e+00 -2.82128621e-02 -1.13979137e+00] [-2.27454767e-01 1.70489013e+00 -1.07478344e+00 ... 1.99221164e-01 3.57197404e-01 -1.47906923e+00]] [[ 7.79107869e-01 -2.96842922e-02 -5.03252625e-01 ... -3.69549721e-01 1.11706460e+00 1.17399442e+00] [ 3.49069506e-01 -5.30577973e-02 -1.87331271e+00 ... 8.07761014e-01 -2.90214509e-01 8.29199374e-01] [ 6.89719379e-01 -8.59435558e-01 7.42245257e-01 ... 1.89721513e+00 -3.05136204e-01 8.31548989e-01] ... [ 5.47564387e-01 -7.30302274e-01 2.16792083e+00 ... -2.07665369e-01 -4.71954077e-01 1.31538463e+00] [ 1.27039862e+00 1.32874632e+00 -5.84368348e-01 ... -2.04964566e+00 -1.17553532e+00 -1.65283054e-01] [ 1.39096603e-02 1.11333680e+00 -9.52542305e-01 ... 5.02550155e-02 -3.67605329e-01 1.16042638e+00]] [[-2.57562280e+00 1.62113905e+00 2.23072380e-01 ... 2.80013502e-01 8.20039809e-01 -1.66701341e+00] [-1.54356524e-01 -8.02366510e-02 -1.40620577e+00 ... 3.98196489e-01 -8.45758736e-01 -3.81152153e-01] [ 1.83011010e-01 1.90419960e+00 7.44955599e-01 ... -4.74369317e-01 1.28646195e+00 -2.43893713e-01] ... [ 7.84791946e-01 -8.91956329e-01 -6.24448121e-01 ... 1.62363052e-01 6.99647248e-01 4.07227039e-01] [-9.12515163e-01 -3.40219826e-01 1.01565230e+00 ... 2.40921497e+00 3.25863391e-01 6.63720548e-01] [ 6.04642928e-01 3.53955716e-01 -3.42404914e+00 ... -3.02778900e-01 -1.08379805e+00 -1.37066805e+00]] [[-2.15712488e-01 1.71223916e-02 -1.57532144e+00 ... 3.66924465e-01 6.89266026e-01 -1.74479449e+00] [-6.66498005e-01 1.30098248e+00 -9.26986098e-01 ... 7.69195974e-01 -2.41168559e-01 6.69130087e-02] [ 4.50495370e-02 3.85937363e-01 -3.50944608e-01 ... -3.25811625e-01 -1.06997395e+00 1.95988119e+00] ... [ 2.07416987e+00 -1.75090981e+00 -1.03541207e+00 ... 1.03574550e+00 1.51543427e+00 1.29548168e+00] [ 5.78051567e-01 8.46531428e-03 -2.43838467e-02 ... 5.04699469e-01 1.26654208e+00 5.21619581e-02] [ 1.25369191e+00 -8.00812721e-01 -9.23845589e-01 ... -9.01484549e-01 7.69793868e-01 1.39457181e-01]] [[ 7.92064428e-01 1.12115037e+00 2.84189075e-01 ... -2.17228271e-02 -1.20264518e+00 -7.77303398e-01] [ 5.50196588e-01 3.52634370e-01 8.13802361e-01 ... -6.87766373e-01 -1.90884280e+00 -2.93211699e-01] [ 2.56737500e-01 -1.49735725e+00 -7.03506529e-01 ... 2.28475571e+00 3.63750964e-01 -5.82939208e-01] ... [ 1.03300607e+00 -1.75805259e+00 -2.88071811e-01 ... -9.91833150e-01 -7.73904264e-01 -1.23637474e+00] [-1.86036718e+00 -1.32518172e+00 -1.35500872e+00 ... -1.31138659e+00 7.46904910e-02 -3.96621019e-01] [ 1.09899414e+00 -1.21624494e+00 5.11981785e-01 ... -6.21057391e-01 -9.68295813e-01 5.68605423e-01]]] ... [[[-8.43383133e-01 5.45552731e-01 2.89663523e-02 ... 8.69070172e-01 -1.09460413e+00 2.07051352e-01] [ 1.24306619e+00 7.15654016e-01 1.32770050e+00 ... 7.14626133e-01 2.30356932e-01 1.21590102e+00] [ 1.17285883e+00 -1.56249475e+00 -1.73802212e-01 ... -4.14450467e-01 1.05874503e+00 -6.09355807e-01] ... [ 1.07046127e+00 -2.14641601e-01 -9.15323272e-02 ... -1.15663894e-01 5.95669746e-01 7.12011755e-01] [-7.97202408e-01 1.64764568e-01 2.79694736e-01 ... 1.08938944e+00 -1.60119250e-01 -1.06542218e+00] [ 1.73122060e+00 7.43405640e-01 -1.28407693e+00 ... -2.35977188e-01 8.53193998e-01 1.34461641e-01]] [[-1.77548873e+00 8.22161287e-02 -1.19794883e-01 ... -5.61878443e-01 2.75773853e-01 -1.16289258e+00] [ 2.56071866e-01 -8.10378313e-01 9.42463040e-01 ... -7.66358554e-01 1.59096634e+00 -5.45072794e-01] [-1.48874581e+00 -1.24690485e+00 -1.47676289e-01 ... 1.30637765e+00 -4.11045611e-01 -2.64917165e-01] ... [-1.38357842e+00 1.16389811e+00 6.29663229e-01 ... -2.70228237e-02 -2.05569196e+00 1.73852384e-01] [-1.22377670e+00 -2.36775786e-01 1.41492260e+00 ... 1.20226249e-01 3.97462040e-01 -1.08679675e-01] [-1.75103652e+00 6.77569568e-01 4.94887203e-01 ... -6.82472467e-01 -3.69669259e-01 4.57596704e-02]] [[ 5.70589423e-01 -8.28021765e-01 -7.69666553e-01 ... -1.35969710e+00 -1.03376710e+00 1.98761508e-01] [ 1.44975651e-02 4.76949327e-02 2.34757447e+00 ... 1.50723553e+00 -8.59518468e-01 -2.03342717e-02] [ 1.40113401e+00 -9.27466512e-01 -7.08400130e-01 ... 7.37278104e-01 1.73717573e-01 1.28335655e+00] ... [ 1.02783513e+00 -1.20733820e-01 -1.46268070e+00 ... -2.34952140e+00 -1.05360651e+00 -9.73344564e-01] [-4.42404300e-01 -2.90954471e-01 1.10009718e+00 ... -2.72797561e+00 -4.29841220e-01 -3.13919723e-01] [-2.00709909e-01 1.11335909e+00 -1.00501335e+00 ... -7.36525536e-01 9.09455359e-01 -9.25665140e-01]] [[ 2.73359370e+00 2.08181888e-01 -4.04268838e-02 ... -5.19633710e-01 -1.50172460e+00 6.23678803e-01] [-1.06939387e+00 1.02470911e+00 1.09074131e-01 ... -1.48853576e+00 1.23704278e+00 1.46142173e+00] [-1.01098895e+00 -1.76647913e+00 2.98978627e-01 ... 5.74752808e-01 -6.51790917e-01 -8.00641537e-01] ... [ 8.04406285e-01 -3.75776619e-01 -6.83956265e-01 ... 1.75331450e+00 2.10896921e+00 6.75374642e-03] [-5.09929955e-01 4.02201116e-01 -1.39598027e-01 ... 1.09361136e+00 -2.64988613e+00 5.43591380e-01] [ 5.10336101e-01 5.30992866e-01 2.24462271e+00 ... 2.11598307e-01 -3.72131318e-01 -9.91365612e-02]] [[-5.11797667e-01 3.94179881e-01 8.15078989e-03 ... -1.31448120e-01 4.54231650e-01 -4.82120246e-01] [ 5.92685997e-01 -4.77564305e-01 9.60722100e-03 ... -5.41108847e-01 4.04918008e-02 7.37837136e-01] [ 1.25181687e+00 3.02904129e-01 3.80341798e-01 ... -4.00743306e-01 -3.38393152e-01 8.11906219e-01] ... [-7.07472980e-01 2.29521915e-02 1.62887692e-01 ... -7.08348572e-01 -1.53173363e+00 -4.37663309e-02] [-5.66426098e-01 -1.86889902e-01 4.36607003e-01 ... -6.86004639e-01 -2.66615540e-01 1.98684037e-01] [ 6.26180708e-01 -5.88813066e-01 7.16188133e-01 ... 1.32143867e+00 1.24160302e+00 9.43613410e-01]] [[-6.44203424e-01 -9.02561843e-01 1.08187151e+00 ... 2.08347988e+00 -8.80237222e-01 6.08828127e-01] [ 2.21902084e+00 -6.36068165e-01 3.61954033e-01 ... -3.10298622e-01 -6.98916793e-01 1.14815503e-01] [ 3.97024006e-01 -2.08119440e+00 2.35188663e-01 ... 1.21211267e+00 -3.37142497e-01 8.97629082e-01] ... [ 1.41667187e-01 -5.76134861e-01 1.44587374e+00 ... -7.70560801e-01 -1.03696215e+00 -9.58389163e-01] [ 2.87207007e-01 -1.33545136e+00 2.52403712e+00 ... 2.23990607e+00 -1.28717554e+00 -9.38507855e-01] [-9.56222296e-01 1.71336234e+00 7.41339386e-01 ... -4.25897300e-01 1.79539585e+00 3.21589440e-01]]] [[[ 4.09697890e-02 7.27723897e-01 -6.29183948e-01 ... 2.08355546e+00 5.77003956e-01 1.22317374e+00] [-9.45787728e-01 3.55037786e-02 -1.02669478e-03 ... 7.73114741e-01 -6.60166264e-01 5.72380245e-01] [-3.87956232e-01 1.84065962e+00 7.73836911e-01 ... -7.66472742e-02 2.48044521e-01 2.39382073e-01] ... [-1.72592044e+00 1.20357835e+00 4.52410907e-01 ... -5.70209622e-01 8.75383854e-01 -3.74793005e+00] [ 1.45037413e-01 9.36258972e-01 5.57283640e-01 ... -1.11154580e+00 -6.85967445e-01 2.56539583e-01] [ 2.82489508e-01 -1.10145815e-01 1.61156046e+00 ... 7.10040331e-01 -5.20146549e-01 -1.72508824e+00]] [[-7.13259637e-01 1.18743634e+00 -6.75989091e-01 ... -3.90428960e-01 -1.29621674e-03 -3.30155164e-01] [ 5.22351027e-01 8.81326437e-01 -7.62020111e-01 ... 1.50500393e+00 2.19931066e-01 1.79768682e-01] [ 1.25708675e+00 1.60440832e-01 1.39081752e+00 ... 3.58777046e-01 4.04001564e-01 9.69312489e-01] ... [ 2.28321910e-01 8.52390289e-01 2.16954127e-01 ... 3.29109401e-01 1.19733560e+00 -9.48939025e-01] [ 3.85629892e-01 1.49883592e+00 3.15684199e-01 ... -4.11727548e-01 -2.76236981e-01 -1.07374573e+00] [ 3.45738083e-01 8.54072273e-02 9.41030085e-01 ... 7.21572697e-01 -7.52850771e-01 1.99419999e+00]] [[-1.52581751e+00 -7.59488404e-01 9.97132719e-01 ... -9.01188493e-01 4.11942333e-01 -2.53867686e-01] [ 2.61022663e+00 -6.93593323e-02 5.76756358e-01 ... 1.26672894e-01 1.29349804e+00 1.02446318e-01] [ 1.20061302e+00 1.89773858e-01 5.30131698e-01 ... 2.54828286e+00 1.17329967e+00 3.19059044e-01] ... [-4.43569064e-01 -1.35378897e+00 5.67425072e-01 ... -1.25200033e+00 -6.34147584e-01 -1.34041929e+00] [-5.28697550e-01 -1.23722088e+00 2.81608611e-01 ... 1.47447622e+00 -2.03309596e-01 -1.96245992e+00] [-1.33599028e-01 3.60689908e-02 2.14941934e-01 ... -1.09965491e+00 -8.81643295e-01 -8.73817503e-01]] [[-8.60245585e-01 -5.21323800e-01 -5.91858268e-01 ... -7.91204751e-01 3.17843825e-01 -7.27105916e-01] [ 1.59191442e+00 -9.45939779e-01 1.17687285e+00 ... -1.07127166e+00 1.53917241e+00 -4.60870653e-01] [-1.73951471e+00 3.42948139e-02 -6.26868056e-03 ... -8.72315645e-01 -1.23410249e+00 -2.11703852e-02] ... [ 7.13767931e-02 -3.82626541e-02 2.53195500e+00 ... 1.58277646e-01 -1.41331255e+00 1.12478696e-01] [-5.00695668e-02 8.82714152e-01 -2.38929605e+00 ... -2.30507684e+00 -1.40452003e+00 1.25277591e+00] [-4.47678894e-01 -2.21529022e-01 -1.62894559e+00 ... 9.79099348e-02 -3.85912776e-01 -1.11046636e+00]] [[ 1.28996164e-01 -9.23263431e-01 -3.98013413e-01 ... -6.81402862e-01 1.01628736e-01 -1.92135707e-01] [ 2.08317208e+00 -1.60313833e+00 -1.15533113e+00 ... 4.55457956e-01 -9.29536462e-01 -5.33788651e-03] [-1.42372239e+00 -3.58609468e-01 1.14364016e+00 ... -4.77332979e-01 1.04135108e+00 -6.17138982e-01] ... [-4.90080059e-01 1.05827540e-01 -3.27071619e+00 ... 5.18496871e-01 -2.52692610e-01 6.06369615e-01] [ 3.75595421e-01 6.22450650e-01 1.93639243e+00 ... -1.14731145e+00 -9.76667926e-02 -2.32735157e+00] [ 4.04391855e-01 -3.25858474e-01 -3.39299768e-01 ... -5.38659133e-02 -8.11866581e-01 2.07976267e-01]] [[-9.75396261e-02 -7.07365155e-01 -3.34505141e-01 ... 5.73245063e-02 7.94672728e-01 -2.17785254e-01] [-8.68041158e-01 -1.21749699e+00 1.18281758e+00 ... -7.37078190e-01 7.06636012e-01 8.25575739e-02] [-4.71124314e-02 -9.52640712e-01 9.64721084e-01 ... -1.82572448e+00 1.35425925e+00 8.02625120e-01] ... [-9.51111197e-01 1.54138774e-01 -3.60804796e-01 ... 1.35150456e+00 1.05865049e+00 -2.69846380e-01] [ 1.45685412e-02 -3.35452445e-02 4.28789645e-01 ... -8.61987174e-01 5.55367947e-01 7.79686451e-01] [ 5.53660512e-01 -4.08540457e-01 -6.33731112e-02 ... 1.38013971e+00 5.23480535e-01 5.06981194e-01]]] [[[-1.02804458e+00 -7.82210588e-01 6.96618497e-01 ... -6.35893881e-01 3.15803327e-02 3.14268917e-01] [-1.13439000e+00 -4.81130034e-01 1.40639246e+00 ... 1.07191300e+00 -1.96809247e-01 1.57909226e+00] [ 1.14235497e+00 -5.68630934e-01 9.93735790e-01 ... 9.45281208e-01 -1.38804078e+00 -3.18577290e-01] ... [-1.01049876e+00 7.67962217e-01 1.36935139e+00 ... -5.91935739e-02 8.86202872e-01 -7.62958646e-01] [-3.75574529e-01 4.01168197e-01 -1.55715525e-01 ... 1.06490374e+00 6.11299157e-01 -3.61127079e-01] [ 8.03377926e-01 -1.67246178e-01 6.47434652e-01 ... -1.68143535e+00 2.53655165e-01 -2.44655818e-01]] [[-1.12144029e+00 2.72076511e+00 4.82218981e-01 ... 4.39391226e-01 1.40917075e+00 -1.60198379e+00] [-1.78437400e+00 -2.08784103e+00 -6.87254906e-01 ... -4.97272313e-01 -1.22831345e+00 7.53479600e-01] [-1.55325913e+00 -5.36260545e-01 -7.85676062e-01 ... -3.17184299e-01 1.21965694e+00 -4.71090734e-01] ... [-1.52786553e+00 -1.40178335e+00 1.27591205e+00 ... -1.04048297e-01 -1.81157994e+00 -3.48438382e-01] [-6.92144513e-01 -3.81207883e-01 8.23429525e-01 ... 1.06326711e+00 6.56793177e-01 -1.18393064e+00] [ 1.07451916e+00 -5.52157462e-01 -2.07941771e+00 ... -6.72699034e-01 -6.64261162e-01 -7.61167049e-01]] [[ 1.45961094e+00 -4.68504578e-01 4.88833994e-01 ... 3.04083496e-01 5.47819436e-01 -1.83075011e-01] [-2.22449467e-01 -9.63560045e-01 1.18021429e+00 ... 1.39044833e+00 -6.78856134e-01 9.63575959e-01] [ 2.12074065e+00 8.73531759e-01 -3.27722609e-01 ... -7.28868008e-01 3.13470095e-01 -1.04783133e-01] ... [ 8.83146286e-01 -6.26046136e-02 1.26014993e-01 ... -8.85336757e-01 4.51480776e-01 -1.23087394e+00] [-2.99694031e-01 -1.00167682e-02 3.52938384e-01 ... -1.18605323e-01 -2.29463053e+00 -2.33562636e+00] [-6.94016993e-01 7.09673166e-01 1.70923090e+00 ... 2.36036852e-01 -3.26246470e-01 1.49066180e-01]] [[ 5.73863089e-01 1.50668967e+00 6.11266077e-01 ... -3.84747386e-01 -1.33854985e-01 -2.34772295e-01] [ 1.99033543e-01 -4.40017618e-02 -3.15767825e-02 ... 7.88868248e-01 -2.10280761e-01 -2.80888736e-01] [ 1.87000084e+00 3.41744602e-01 1.35236311e+00 ... 1.61818361e+00 -3.71834993e-01 -2.43761033e-01] ... [-1.43517518e+00 -1.38579321e+00 8.16511929e-01 ... -5.31101413e-02 4.02775884e-01 1.91495746e-01] [ 2.04778481e+00 4.70763445e-01 1.08793773e-01 ... -4.17857707e-01 1.94585085e+00 5.06055474e-01] [-3.39607954e-01 -4.16661836e-02 1.71127343e+00 ... 1.15081921e-01 3.34342152e-01 1.29024577e+00]] [[-1.53782797e+00 9.77958798e-01 7.73826361e-01 ... -8.31587374e-01 1.41829216e+00 6.03712797e-01] [ 2.12402210e-01 -7.97038853e-01 -7.53054917e-02 ... 1.86375752e-01 9.48523462e-01 -1.92110157e+00] [ 2.62961596e-01 7.67518163e-01 -4.20182914e-01 ... 1.29888725e+00 1.11971045e+00 3.27777863e-01] ... [-1.77415383e+00 -1.01826358e+00 -5.19777894e-01 ... 4.24864620e-01 -5.15767336e-01 1.23282766e+00] [ 1.52549458e+00 4.82261211e-01 -1.71675980e-01 ... 2.95344055e-01 -3.19126457e-01 -1.50083795e-01] [ 1.72260255e-01 8.35558176e-01 -2.16551208e+00 ... 5.67609072e-01 -1.71274555e+00 2.06430626e+00]] [[-1.02155900e+00 2.32806623e-01 1.11692202e+00 ... -1.37065005e+00 4.93614785e-02 1.72617033e-01] [-1.18435703e-01 -1.64600566e-01 -2.64419913e-01 ... -3.59802842e-01 -1.60642123e+00 5.18503487e-01] [ 2.02156305e-01 -8.46878827e-01 3.12254071e-01 ... -2.20360804e+00 1.13148749e+00 -1.15398097e+00] ... [-3.92192692e-01 2.37134725e-01 7.03583419e-01 ... 8.88997912e-01 -2.64215082e-01 7.38654852e-01] [ 2.19431496e+00 -9.83508155e-02 3.07994753e-01 ... 1.63901234e+00 -8.06951821e-01 -1.54141712e+00] [ 1.12064373e+00 -3.71023923e-01 8.77669096e-01 ... 5.09034336e-01 3.77397627e-01 -1.33542210e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 3, 'eps': 1.0} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4600.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) lement_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4833295 (Squeeze_4833294[0]:i64[], Constant_4833249[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4833295': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4836092 (Squeeze_4836091[0]:i64[], Constant_4836041[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4836092': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4838579 (Squeeze_4838578[0]:i64[], Constant_4838529[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4838579': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputsfw_re: [[[[ 5.58792949e-01 -3.65527831e-02 6.59737408e-01 ... -1.33785099e-01 7.88705051e-01 1.24809349e+00] [ 5.86700499e-01 -8.27935696e-01 -1.59949228e-01 ... 1.21813178e+00 -6.88431144e-01 -2.09119990e-01] [-2.18100452e+00 2.07695508e+00 -3.89057726e-01 ... -9.70488727e-01 -1.18753040e+00 7.88735926e-01] ... [ 8.72770846e-02 -8.56626332e-01 9.83422577e-01 ... -5.89089930e-01 8.12381744e-01 -3.31445277e-01] [ 7.79528141e-01 -3.92840147e-01 -6.89092755e-01 ... -4.77582902e-01 -4.96783376e-01 3.62820536e-01] [ 7.45702863e-01 1.25124216e-01 -2.34528393e-01 ... -5.82137227e-01 -1.55129564e+00 4.09344107e-01]] [[-7.37468898e-01 -3.52294743e-01 3.35293263e-01 ... -4.49066371e-01 -1.28883672e+00 1.85120553e-01] [ 1.34521872e-01 1.13825373e-01 -2.08466798e-01 ... -2.95489043e-01 4.30681497e-01 -6.22156203e-01] [-4.36986797e-02 3.88517112e-01 1.35714972e+00 ... 6.94203079e-01 4.93890554e-01 -1.37764561e+00] ... [-5.02853692e-01 -6.75871432e-01 4.09127660e-02 ... 1.48064148e+00 4.63248789e-01 -8.23119164e-01] [ 1.29297042e+00 -2.26352319e-01 1.77466333e+00 ... -1.34679878e+00 -5.26962757e-01 -7.33609423e-02] [ 3.22575092e-01 -5.22369087e-01 6.61633790e-01 ... 8.78251910e-01 -1.48789868e-01 -7.73032963e-01]] [[ 5.27985930e-01 6.27144054e-02 3.10307622e-01 ... -3.30187321e-01 1.40560615e+00 4.23553377e-01] [-1.04281104e+00 2.29003191e-01 -3.68448868e-02 ... 6.63793206e-01 -7.31374800e-01 8.32354248e-01] [-1.78778812e-01 -1.46017468e+00 1.07056379e-01 ... -4.60837364e-01 3.61959040e-01 1.36240553e-02] ... [ 4.41173434e-01 3.30920011e-01 2.79580683e-01 ... -3.49676132e-01 4.39904004e-01 7.78380215e-01] [-3.89230102e-01 1.76466811e+00 -1.33340806e-02 ... 2.77689964e-01 -7.82808065e-01 6.88673913e-01] [ 1.12957084e+00 -3.84556711e-01 2.91577488e-01 ... 1.15161538e+00 -9.88345802e-01 1.28243339e+00]] [[ 1.73335326e+00 -3.24015468e-01 9.32891548e-01 ... -1.79909623e+00 -7.11885929e-01 -3.43050480e-01] [ 1.45820248e+00 -2.16705471e-01 -9.05542821e-02 ... -1.77756831e-01 -5.23899853e-01 -5.83890259e-01] [ 3.32584567e-02 6.55764937e-01 1.61901808e+00 ... -2.38419846e-01 2.87165314e-01 3.66440453e-02] ... [-6.91091835e-01 1.26610145e-01 -4.30252373e-01 ... 5.84849477e-01 -7.81990409e-01 -3.59334610e-02] [ 1.44837189e+00 3.07620376e-01 8.99519920e-01 ... 9.84875858e-01 -4.09983099e-01 1.06800370e-01] [ 9.75220799e-01 -5.93902767e-01 -1.16335821e+00 ... -5.58513463e-01 4.15829599e-01 -3.69138300e-01]] [[ 6.35786206e-02 -5.99080682e-01 -6.21889830e-01 ... 9.47249383e-02 5.83878696e-01 -9.66137648e-01] [-1.39183909e-01 -3.35723251e-01 3.17641139e-01 ... 7.33675480e-01 2.95470417e-01 -1.36430794e-02] [ 3.56456973e-02 -5.67749381e-01 -3.27703923e-01 ... 1.42305702e-01 4.51203316e-01 8.92947078e-01] ... [-1.30960429e+00 5.24626255e-01 1.12827802e+00 ... -1.46726042e-01 2.12230593e-01 7.70810246e-01] [ 9.31066036e-01 -2.15505380e-02 9.64352608e-01 ... -6.34364843e-01 9.40226376e-01 4.57938641e-01] [-7.00723052e-01 -2.12886542e-01 -9.75185037e-01 ... 7.91693032e-02 -5.62958539e-01 -3.37331533e-01]] [[-1.71819478e-01 3.85207087e-01 7.38579810e-01 ... -7.20867157e-01 -1.10551512e+00 2.38949701e-01] [ 4.86238807e-01 8.51517677e-01 -3.19452971e-01 ... -1.04509592e+00 2.85731971e-01 -8.86405408e-01] [ 4.42459792e-01 -2.72701889e-01 -7.03114510e-01 ... -4.03187662e-01 -3.00226986e-01 6.11497998e-01] ... [ 1.02603756e-01 -2.94073701e-01 -1.01314449e+00 ... -1.10568225e-01 -1.18982005e+00 -7.27617204e-01] [ 7.40577400e-01 -1.58221588e-01 -4.64205861e-01 ... -1.36232400e+00 -1.93103522e-01 -1.30643249e+00] [ 1.41446605e-01 2.17165455e-01 2.71027803e-01 ... 1.05338134e-01 5.45678020e-01 3.68316948e-01]]] [[[-7.00328350e-01 -5.40522695e-01 1.40211821e+00 ... -1.20243883e+00 8.83984625e-01 4.16035652e-01] [ 7.24923611e-01 -1.76717389e+00 3.70525420e-02 ... -8.89881030e-02 5.84717035e-01 3.25422257e-01] [ 1.60104662e-01 7.78033733e-01 -2.09679246e-01 ... -8.12060609e-02 6.80530146e-02 1.91209406e-01] ... [-5.47586501e-01 4.67252553e-01 -9.24839616e-01 ... 3.81387621e-01 -4.44719732e-01 7.69609749e-01] [ 3.52068618e-02 1.44396186e-01 -7.16999471e-01 ... -5.35135388e-01 -7.07915425e-01 5.60338378e-01] [-8.20922911e-01 -7.90127397e-01 2.43667468e-01 ... 2.02001736e-01 1.87922418e+00 8.04325759e-01]] [[-3.00397962e-01 1.97823808e-01 3.37376922e-01 ... 1.10373914e+00 -1.69056520e-01 5.33559203e-01] [ 1.17349100e+00 2.29946956e-01 -7.90798783e-01 ... 5.53903580e-01 -6.16077423e-01 -6.77601218e-01] [-1.16640769e-01 6.92963779e-01 5.76193869e-01 ... -1.58573389e-01 6.08623074e-03 -8.99179757e-01] ... [ 2.78697550e-01 -1.44706559e+00 -2.96898425e-01 ... -5.08268103e-02 7.74471760e-01 -9.87901390e-02] [-1.05063391e+00 1.00507832e+00 1.32180288e-01 ... -6.18068799e-02 -2.83098251e-01 -8.27241167e-02] [ 1.03930974e+00 -9.52690840e-01 -2.38515884e-01 ... 1.09095776e+00 -8.10927451e-01 6.78549945e-01]] [[-6.22075677e-01 4.70203340e-01 -6.23347998e-01 ... 3.78690749e-01 1.20555460e-01 -5.54353893e-01] [-1.53864765e+00 -7.84213901e-01 -7.73037272e-03 ... -1.38078988e+00 -3.82585377e-01 1.50184739e+00] [ 2.88002223e-01 2.91176707e-01 5.38216352e-01 ... 7.00649738e-01 -4.11775291e-01 -5.81481397e-01] ... [ 4.20913815e-01 1.20665169e+00 -1.06793153e+00 ... -4.97235835e-01 -3.17991197e-01 -1.45128801e-01] [ 7.90075779e-01 -5.27454197e-01 -9.16049480e-01 ... -1.39921319e+00 7.15658903e-01 1.05517435e+00] [ 1.86326608e-01 2.90259331e-01 -1.61096722e-01 ... -5.79898395e-02 1.35423005e+00 4.50399637e-01]] [[-1.25155553e-01 1.44200444e-01 4.59509522e-01 ... 6.32436350e-02 8.32002759e-01 -2.56986797e-01] [-5.60897470e-01 2.45139688e-01 1.97629881e+00 ... 1.42974377e-01 -4.36909765e-01 -3.79844099e-01] [-4.97886151e-01 4.27618951e-01 2.19595551e-01 ... -9.11462188e-01 6.92074776e-01 -7.60537863e-01] ... [-5.60075939e-01 -7.72015393e-01 -8.98663476e-02 ... 2.41497636e-01 -3.15030701e-02 -8.19346428e-01] [-4.80162680e-01 3.26463103e-01 1.39139795e+00 ... 7.32071579e-01 6.55306756e-01 -1.74863076e+00] [ 1.56113446e-01 3.04497063e-01 -2.49029681e-01 ... 5.46839952e-01 1.57989907e+00 -7.49828637e-01]] [[ 1.11431575e+00 6.87888205e-01 2.27970496e-01 ... -2.77536601e-01 -1.05578518e-02 7.97797590e-02] [ 3.41563433e-01 5.56936525e-02 1.00126660e+00 ... -6.85917318e-01 -1.42140543e+00 2.20153272e-01] [-3.53693604e-01 -6.09014332e-01 1.08746767e-01 ... -6.39089882e-01 -3.49353701e-01 -1.76675916e-02] ... [-4.10856247e-01 5.05082428e-01 3.69658887e-01 ... 7.43861794e-01 -3.18785042e-01 -7.71897316e-01] [-3.71387362e-01 -5.60453832e-01 6.47354841e-01 ... 1.63479045e-01 9.99429405e-01 1.01305908e-02] [-2.93804020e-01 1.19632292e+00 9.10782456e-01 ... 2.36214072e-01 5.30604482e-01 2.49819264e-01]] [[ 4.81532693e-01 -7.50523627e-01 -3.99385929e-01 ... -5.53251624e-01 -7.30025947e-01 4.93238926e-01] [ 3.37270588e-01 5.44719398e-01 -2.49683231e-01 ... -1.69304574e+00 3.78163457e-01 2.46087819e-01] [ 6.96117356e-02 -4.10837114e-01 9.86570492e-02 ... 1.07488620e+00 1.40562624e-01 -1.17102973e-01] ... [ 1.41870308e+00 2.22831443e-01 -2.05461338e-01 ... -5.91296732e-01 -1.89336911e-01 -9.58418325e-02] [-1.19525468e+00 -1.29495943e-02 7.89410591e-01 ... -4.70787644e-01 -7.90021062e-01 -2.23279983e-01] [ 6.58161521e-01 -3.68521847e-02 -1.88270971e-01 ... -5.83506286e-01 7.96072721e-01 -1.37463009e+00]]] [[[-6.78433850e-02 -1.13115907e-01 6.23436093e-01 ... -8.42426300e-01 -2.01016385e-02 6.55666366e-02] [-2.55042702e-01 -3.95209461e-01 -2.76347607e-01 ... -2.23852769e-01 7.98166513e-01 -2.23875493e-01] [ 5.11911392e-01 -4.85506386e-01 -1.97507441e-01 ... 1.71588528e+00 -1.08786352e-01 1.01483130e+00] ... [-5.39328873e-01 9.15587306e-01 1.01326048e+00 ... -1.00957894e+00 3.85783851e-01 -3.07261050e-02] [-5.15971601e-01 3.54431003e-01 -1.32730693e-01 ... 2.40379229e-01 6.99823737e-01 4.82984811e-01] [-8.43160152e-02 -1.41680419e+00 1.08738387e+00 ... 6.67905092e-01 -3.53755981e-01 2.35557005e-01]] [[-4.11036551e-01 1.17444366e-01 3.86793882e-01 ... 9.83693302e-01 -1.15918994e+00 -5.30549586e-01] [ 6.06035590e-01 1.21160232e-01 -4.45738077e-01 ... 2.45090351e-01 6.62715912e-01 3.80396456e-01] [ 8.22210193e-01 6.92854404e-01 6.00849330e-01 ... -2.66122073e-01 -9.37699676e-01 5.18995821e-01] ... [-9.20997143e-01 -2.36588731e-01 -9.41351593e-01 ... 7.02677846e-01 9.62165058e-01 3.00067157e-01] [-1.66872039e-01 -1.52551103e+00 -6.63370311e-01 ... -7.58244097e-01 3.00784148e-02 -2.21797545e-02] [-1.26128554e+00 9.30320561e-01 2.05277824e+00 ... 2.47052372e-01 9.63993967e-01 7.26654470e-01]] [[-5.42792678e-02 1.26042032e+00 1.17646050e+00 ... 3.27234775e-01 1.10965610e+00 6.07574284e-01] [-5.09837091e-01 -1.93280354e-01 -1.87950265e-02 ... 1.01159441e+00 -1.29680783e-01 3.45959328e-02] [-5.14339626e-01 -7.27291107e-02 4.24496680e-01 ... 2.44026542e-01 1.36823416e-01 -1.51371288e+00] ... [ 6.42987847e-01 -3.50236982e-01 -2.72359014e-01 ... -9.66241062e-01 -1.96108654e-01 6.94313765e-01] [ 8.01607251e-01 -3.02374810e-01 1.79342583e-01 ... 6.78967714e-01 2.69159049e-01 -7.14974105e-01] [-4.31869745e-01 7.13200197e-02 1.27804625e+00 ... -4.43947613e-01 -1.67564988e+00 -6.05703175e-01]] [[ 9.46991324e-01 -1.28223360e+00 -2.85925299e-01 ... -1.46653354e-01 -9.88476425e-02 5.26490770e-02] [-9.80907619e-01 1.02894628e+00 7.20968068e-01 ... -4.73782271e-02 -9.21693325e-01 -1.94252178e-01] [-9.26508665e-01 -1.89800143e-01 -3.75876784e-01 ... -1.05891848e+00 -3.87659550e-01 -1.07979871e-01] ... [-9.22670305e-01 -6.59385026e-01 2.06693653e-02 ... 3.07703286e-01 -4.11489338e-01 -5.38771808e-01] [ 7.66913518e-02 -6.28674388e-01 1.49608418e-01 ... -5.52569702e-02 8.14789534e-01 2.38641217e-01] [-1.91007748e-01 -2.68366337e-01 -5.29901326e-01 ... 7.42785513e-01 3.18150610e-01 9.62556899e-01]] [[-3.76783758e-01 2.35551983e-01 -8.04420263e-02 ... 1.14769928e-01 5.63425362e-01 1.04847550e+00] [-5.05932689e-01 1.91180959e-01 9.67064023e-01 ... -5.59405088e-01 1.42946744e+00 -8.85979235e-01] [-9.72349346e-01 2.66579866e-01 -9.85669121e-02 ... -1.94713678e-02 -6.77616224e-02 5.32372631e-02] ... [-2.98381269e-01 1.15074468e+00 -4.56333786e-01 ... 1.29754588e-01 -7.68426001e-01 -1.45074576e-01] [-2.65333086e-01 8.46886873e-01 5.85937440e-01 ... -5.87527275e-01 8.37947667e-01 2.36946076e-01] [ 1.29446574e-02 -3.65453601e-01 7.46247619e-02 ... -6.82541788e-01 2.91312128e-01 -5.45640647e-01]] [[ 4.80311185e-01 4.93877292e-01 -1.61229804e-01 ... -1.97945967e-01 -3.51819634e-01 -6.50663435e-01] [-9.16749656e-01 -2.98726320e-01 -9.51824561e-02 ... 1.09249413e+00 2.07886279e-01 8.55899930e-01] [-1.62632391e-02 8.86489749e-01 -8.78053725e-01 ... -6.31314754e-01 -1.16238749e+00 5.20337403e-01] ... [-1.03391416e-01 -1.26172698e+00 -1.72127768e-01 ... -4.17714894e-01 1.19830108e+00 7.27866054e-01] [ 1.81821096e+00 8.34196329e-01 -1.60667285e-01 ... -1.51586264e-01 -3.09076428e-01 2.16667593e-01] [-2.24484667e-01 -9.51120377e-01 -3.71638268e-01 ... -1.06559229e+00 2.59901702e-01 5.08200586e-01]]] ... [[[-5.91760390e-02 5.35744071e-01 -6.66150093e-01 ... 4.73787576e-01 6.44320905e-01 -2.05517501e-01] [ 9.35535580e-02 -6.24150515e-01 5.14869690e-01 ... -6.91028833e-01 -8.45238566e-01 -3.48636001e-01] [ 1.63798130e+00 -3.36606860e-01 4.08548862e-01 ... 3.05489693e-02 -3.38138878e-01 -1.94871083e-01] ... [-3.24217111e-01 -2.00510907e+00 1.19935918e+00 ... -7.22757075e-03 3.25346500e-01 7.02966332e-01] [ 2.01735780e-01 -3.31502646e-01 -6.75379276e-01 ... 1.31238270e+00 2.19336022e-02 2.15987667e-01] [ 1.27422786e+00 -1.03545439e+00 -1.60071266e+00 ... 5.45574486e-01 -8.54279220e-01 -5.29473484e-01]] [[ 7.01197207e-01 -8.42499077e-01 -3.51983100e-01 ... 1.90058842e-01 -9.20236856e-03 1.16771305e+00] [-2.38173157e-01 -5.79894006e-01 1.48013353e-01 ... 1.10663436e-01 1.03946589e-01 1.23892248e+00] [ 6.02903426e-01 -2.40994349e-01 1.44797885e+00 ... -5.86178154e-02 -2.50835776e-01 -5.26774764e-01] ... [-2.03948587e-01 5.03191769e-01 -1.74350953e+00 ... 7.76892304e-01 6.71908796e-01 -7.97637999e-01] [-4.84682769e-01 -7.14057148e-01 -3.04405063e-01 ... 1.34891599e-01 -5.35240114e-01 -9.22582597e-02] [-1.00272581e-01 1.65354490e-01 5.06135404e-01 ... 4.68705088e-01 -7.69703507e-01 2.50612833e-02]] [[ 4.02386308e-01 -3.49682510e-01 -8.15796077e-01 ... 5.80658972e-01 -3.49812359e-01 -1.39121389e+00] [ 7.25278258e-01 -2.94044107e-01 2.63695598e-01 ... -4.66740549e-01 6.43554032e-01 9.08224098e-03] [ 4.69319522e-01 -6.17670640e-02 -2.43195727e-01 ... -8.37063849e-01 3.84794712e-01 -5.66252708e-01] ... [-3.70481968e-01 2.51415402e-01 3.33371133e-01 ... 2.01444894e-01 -2.59055614e-01 1.49086273e+00] [-9.34146821e-01 -1.45923495e-01 1.12670735e-01 ... 9.27482426e-01 7.71814346e-01 6.81833386e-01] [ 1.47024965e+00 -3.06211770e-01 2.58633852e-01 ... 1.68476331e+00 1.70912862e-01 1.56165883e-01]] [[ 1.65360737e+00 6.03637695e-01 9.64515865e-01 ... 1.52880609e-01 2.21681848e-01 5.81087291e-01] [-6.98553622e-01 3.24388951e-01 -4.15047705e-01 ... 5.52829444e-01 -9.05273736e-01 -1.44426182e-01] [-4.14603084e-01 -8.56423080e-02 -1.57583937e-01 ... -1.50083709e+00 4.21029985e-01 -7.04873860e-01] ... [ 3.93975466e-01 5.89768231e-01 8.53216052e-01 ... -1.09809697e+00 6.97838962e-01 1.13761187e+00] [-4.92130071e-01 -1.36370194e+00 -3.71447086e-01 ... 6.82289243e-01 8.40392485e-02 6.54571533e-01] [ 5.78930438e-01 -1.50736058e+00 4.67967451e-01 ... -1.23556602e+00 6.26852274e-01 -2.25485742e-01]] [[-2.02678040e-01 5.08030891e-01 -7.30985999e-01 ... 1.28562659e-01 -3.46179843e-01 9.03698325e-01] [-8.63114536e-01 2.79589295e-01 1.38693810e-01 ... -9.48507965e-01 -2.53147721e-01 6.21908545e-01] [-3.71404469e-01 -6.78660274e-01 -1.89660057e-01 ... -1.25611767e-01 -2.59729922e-01 -9.23669696e-01] ... [-8.12365174e-01 1.64026654e+00 1.08588922e+00 ... -6.09178066e-01 -8.86213593e-03 1.81734309e-01] [ 9.30364653e-02 4.11004573e-01 4.84565705e-01 ... 2.79486418e-01 2.60692179e-01 -8.47774327e-01] [-4.86503206e-02 7.19063461e-01 -3.62459958e-01 ... 1.33381522e+00 5.88469148e-01 6.99876249e-01]] [[ 1.85349357e+00 -1.16734993e+00 -5.61921418e-01 ... -3.67712259e-01 5.71112275e-01 -2.18204677e-01] [ 2.39721155e+00 6.71365142e-01 -1.84713340e+00 ... 1.27196789e+00 4.12461817e-01 2.64784068e-01] [-7.58309543e-01 1.03867555e+00 1.61135182e-01 ... -7.15583980e-01 -2.96157867e-01 6.95186138e-01] ... [-1.62590548e-01 2.16586456e-01 2.91431636e-01 ... 1.16299659e-01 -1.78949726e+00 -1.06482649e+00] [-6.04173839e-02 3.03557925e-02 4.83974099e-01 ... -1.26632607e+00 -4.81503606e-01 2.34500498e-01] [-1.11787009e+00 -5.21921515e-01 -1.05438125e+00 ... 7.58421957e-01 7.63431489e-01 -1.55171111e-01]]] [[[ 8.39790523e-01 -1.20318145e-01 5.45149386e-01 ... 6.92370951e-01 8.45047593e-01 -9.02507722e-01] [-7.04238340e-02 -5.25301248e-02 -7.50549734e-01 ... 1.06651223e+00 -6.29677296e-01 4.09356445e-01] [ 4.20697927e-01 -6.61711931e-01 -6.93833947e-01 ... -1.32041788e-02 3.10279906e-01 -1.21897995e+00] ... [-6.37706459e-01 9.95504081e-01 -8.85328464e-03 ... -7.90878594e-01 2.21068799e-01 1.13928747e+00] [-6.67882204e-01 4.45106685e-01 -7.87220716e-01 ... 6.44935146e-02 2.99043894e-01 -6.93069935e-01] [-7.71859944e-01 6.35135531e-01 1.96914807e-01 ... 8.26559484e-01 6.11899495e-01 -1.66554332e-01]] [[ 8.86006057e-01 8.00035477e-01 -3.96931581e-02 ... 9.76777434e-01 8.61979544e-01 -5.17968118e-01] [-3.91313471e-02 -7.65132964e-01 -7.66286254e-01 ... 6.47317410e-01 1.09400797e+00 5.52692294e-01] [ 2.12938428e-01 -6.28313184e-01 -1.68178499e+00 ... -8.08282554e-01 4.63891774e-01 -1.31642580e+00] ... [-2.87824571e-01 -9.33696032e-01 -6.84830725e-01 ... -6.86888516e-01 -1.76413983e-01 1.29225981e+00] [-1.02022755e+00 -5.91318727e-01 8.16569388e-01 ... 9.37403738e-01 -1.04583728e+00 -3.96351725e-01] [ 1.17405510e+00 2.72688121e-01 -1.53253567e+00 ... -3.25096995e-01 3.89622509e-01 -4.08651412e-01]] [[-8.60227764e-01 -2.15089276e-01 3.82984996e-01 ... -1.41133261e+00 -5.39216340e-01 -2.01030135e-01] [ 3.96022171e-01 -1.11393511e+00 1.27183175e+00 ... -1.22201431e+00 -1.22438121e+00 5.22569343e-02] [ 5.38088322e-01 -1.06331277e+00 -9.70882773e-02 ... 8.18090498e-01 -8.54649603e-01 -1.02767122e+00] ... [ 1.95083797e-01 1.93224454e+00 1.17854357e+00 ... -6.76306009e-01 -3.06760132e-01 -1.18574524e+00] [ 6.69418499e-02 7.60171831e-01 -6.82732761e-01 ... 2.23652516e-02 -1.90496638e-01 1.47646892e+00] [ 8.06668997e-01 -2.85302877e-01 -7.32564270e-01 ... -5.51169991e-01 -1.14146151e-01 9.86497700e-02]] [[-1.68579030e+00 -1.70025420e+00 6.48097042e-03 ... 1.31410623e+00 -2.81757474e-01 6.81395710e-01] [ 2.84497082e-01 5.76054037e-01 3.98220032e-01 ... 5.19828379e-01 2.95799941e-01 6.78573489e-01] [ 5.39496303e-01 -3.35995525e-01 -9.39865708e-02 ... -2.53895558e-02 -1.66154921e+00 -1.23192132e+00] ... [ 2.58416981e-01 -6.52447343e-01 2.64554262e-01 ... -1.31344393e-01 7.57747173e-01 -5.57110965e-01] [-5.80965951e-02 8.51163268e-01 6.41205966e-01 ... 8.54858533e-02 4.66009915e-01 8.36809993e-01] [ 1.89673841e-01 -2.70031467e-02 -7.30624735e-01 ... 1.75201714e+00 2.00291023e-01 1.96414754e-01]] [[-8.52320313e-01 -5.06289124e-01 2.71068007e-01 ... 5.70865810e-01 1.23183298e+00 2.72065163e-01] [-6.42688632e-01 2.40947366e-01 -2.02729642e-01 ... -6.97970986e-01 5.84625602e-01 -1.29545355e+00] [-5.45448549e-02 -2.70070527e-02 8.51162612e-01 ... 5.59113443e-01 -6.24708354e-01 -5.95188260e-01] ... [ 1.34688377e-01 8.92513037e-01 5.94824813e-02 ... -5.19316733e-01 -2.75800973e-01 7.48488829e-02] [-3.50925654e-01 -2.16544181e-01 -4.75447565e-01 ... 7.09139049e-01 -3.53493869e-01 -2.43420303e-01] [-1.02857113e-01 1.53904498e+00 5.61590016e-01 ... -4.24361616e-01 1.11992061e+00 1.07637298e+00]] [[ 1.55426455e+00 5.77952325e-01 -3.13415378e-01 ... -3.77818942e-01 -1.45551026e-01 -7.42904425e-01] [-1.37150824e-01 -2.55088508e-01 5.17721355e-01 ... 6.02268338e-01 2.38344416e-01 -1.02506220e+00] [-1.53151476e+00 1.42112747e-01 7.03812778e-01 ... 1.76837444e-02 1.07756644e-01 -7.44433999e-01] ... [-4.13306743e-01 5.39088726e-01 -1.59468472e-01 ... 3.47408891e-01 -1.47187114e+00 -4.59729791e-01] [-9.75849628e-02 -2.00979352e-01 1.12870209e-01 ... 1.00363481e+00 -1.36253446e-01 -3.72586176e-02] [ 9.16970491e-01 -7.27690160e-02 1.60428381e+00 ... -8.68640184e-01 5.08682549e-01 3.85168195e-01]]] [[[ 3.76040220e-01 -3.93980145e-01 5.89990854e-01 ... 1.54438838e-01 5.57577074e-01 4.85645887e-03] [-2.60350049e-01 9.50158656e-01 5.83549023e-01 ... 6.64148554e-02 -5.31926341e-02 -1.22878206e+00] [-1.09475337e-01 -3.34342271e-01 -1.42969370e+00 ... -7.43671775e-01 -9.29552972e-01 -3.18157256e-01] ... [-9.64829028e-02 8.43969703e-01 -8.78223896e-01 ... -6.49552122e-02 -7.84377575e-01 2.13189982e-02] [-1.84786017e-03 -4.03181463e-01 -3.09313864e-01 ... 1.82977077e-02 -9.65487063e-02 7.20689833e-01] [ 1.72942847e-01 -4.25914317e-01 1.06655574e+00 ... 3.78150642e-02 -3.31384957e-01 -1.65039793e-01]] [[ 1.03092062e+00 5.07860072e-02 -1.27348113e+00 ... 6.90989733e-01 -4.95206237e-01 -4.36616957e-01] [ 4.60612416e-01 -1.31083280e-01 -3.12494487e-01 ... -8.94525766e-01 -6.27788186e-01 -5.32558799e-01] [ 1.27801526e+00 1.71647418e+00 8.85402739e-01 ... 5.91789901e-01 -3.01099718e-01 7.30017245e-01] ... [ 6.79241657e-01 3.01894218e-01 7.13041246e-01 ... -7.66619384e-01 1.35253453e+00 -6.65173829e-01] [-2.59000599e-01 6.30001962e-01 2.16518417e-01 ... 1.05131054e+00 -5.85914068e-02 1.12284529e+00] [-7.70124018e-01 -1.67737246e+00 5.03589928e-01 ... 8.42430234e-01 9.44391251e-01 -9.74932671e-01]] [[-1.86960131e-01 1.06914055e+00 -1.37761009e+00 ... -1.08901787e+00 1.20984882e-01 8.32989931e-01] [ 6.58914685e-01 5.98456144e-01 2.79629469e-01 ... -8.44483674e-01 -6.61426365e-01 1.58906788e-01] [-1.23213246e-01 2.19566733e-01 -1.17949152e+00 ... 4.32140172e-01 1.52130234e+00 4.64187264e-01] ... [-8.89747083e-01 -1.55310476e+00 1.38701308e+00 ... -2.02703738e+00 -8.54627252e-01 1.32585216e+00] [ 9.52865779e-01 4.74868506e-01 2.96099503e-02 ... 6.46992028e-01 -1.00116804e-01 3.64218473e-01] [-3.64262834e-02 1.45788360e-02 9.84262526e-01 ... -6.29305422e-01 -3.73204529e-01 4.31618124e-01]] [[ 4.61611040e-02 -7.28315532e-01 3.58270258e-01 ... -2.83180535e-01 -2.60605291e-02 6.92830920e-01] [-6.43637478e-01 -1.43531299e+00 5.07616282e-01 ... -2.66523436e-02 -1.36908090e+00 -5.42944968e-01] [ 4.23887014e-01 5.06262183e-01 4.64206368e-01 ... 9.06384766e-01 4.70605254e-01 2.78208882e-01] ... [-3.54649872e-01 1.28157943e-01 1.97901458e-01 ... 8.55141699e-01 -3.02804774e-03 -4.15388465e-01] [-5.06225169e-01 9.19536889e-01 2.44103119e-01 ... 1.38163936e+00 -2.13035289e-03 7.60316491e-01] [ 3.70910287e-01 2.57118702e-01 -1.03468955e+00 ... -3.16281050e-01 2.74154067e-01 -1.47971790e-03]] [[-9.88082409e-01 -8.40267301e-01 2.65441854e-02 ... -2.77163446e-01 8.16141665e-01 3.50273162e-01] [ 4.25784700e-02 1.27669311e+00 -3.44593138e-01 ... -9.51120481e-02 -6.95406139e-01 -4.90137964e-01] [ 6.55582368e-01 -6.49750888e-01 -1.37548238e-01 ... 5.34743428e-01 4.93308693e-01 -6.87657893e-01] ... [-1.30529642e+00 5.44968843e-01 7.99827874e-01 ... -1.45127010e+00 -4.25904602e-01 -4.95293252e-02] [ 9.10477489e-02 3.44691098e-01 -1.47256628e-01 ... -3.80386531e-01 1.09352641e-01 6.99534491e-02] [-4.29555923e-01 8.71251374e-02 -5.06955147e-01 ... -8.12891603e-01 -1.33815646e+00 -1.05454373e+00]] [[ 3.58145684e-01 -3.34152997e-01 -3.62531021e-02 ... -2.82173213e-02 -1.25785995e+00 -4.70821708e-01] [ 2.75634397e-02 1.58930063e-01 5.68900168e-01 ... -2.81680264e-02 -8.81930709e-01 6.51051462e-01] [-5.72768211e-01 1.28905642e+00 -5.36568284e-01 ... 3.73899713e-02 1.22781014e+00 3.82357389e-01] ... [ 8.92783225e-01 5.04540540e-02 2.28467539e-01 ... 8.00314769e-02 -2.23021269e-01 1.29774764e-01] [ 5.31025469e-01 -1.42725945e+00 -1.18085388e-02 ... -1.44076693e+00 5.62920272e-01 1.01493394e+00] [ 7.08149314e-01 7.58150637e-01 1.15499504e-01 ... 5.71669698e-01 8.51456355e-03 -8.99691731e-02]]]]; ov_res: [[[[ 5.58792949e-01 -3.65527757e-02 6.59737408e-01 ... -1.33785084e-01 7.88705051e-01 1.24809349e+00] [ 5.86700499e-01 -8.27935696e-01 -1.59949228e-01 ... 1.21813178e+00 -6.88431144e-01 -2.09119961e-01] [-2.18100452e+00 2.07695508e+00 -3.89057726e-01 ... -9.70488727e-01 -1.18753040e+00 7.88735926e-01] ... [ 8.72770995e-02 -8.56626332e-01 9.83422577e-01 ... -5.89089930e-01 8.12381744e-01 -3.31445277e-01] [ 7.79528141e-01 -3.92840147e-01 -6.89092755e-01 ... -4.77582902e-01 -4.96783376e-01 3.62820536e-01] [ 7.45702863e-01 1.25124231e-01 -2.34528378e-01 ... -5.82137227e-01 -1.55129564e+00 4.09344107e-01]] [[-7.37468898e-01 -3.52294743e-01 3.35293293e-01 ... -4.49066371e-01 -1.28883672e+00 1.85120583e-01] [ 1.34521887e-01 1.13825388e-01 -2.08466783e-01 ... -2.95489043e-01 4.30681497e-01 -6.22156203e-01] [-4.36986722e-02 3.88517112e-01 1.35714972e+00 ... 6.94203079e-01 4.93890554e-01 -1.37764561e+00] ... [-5.02853692e-01 -6.75871432e-01 4.09127735e-02 ... 1.48064148e+00 4.63248789e-01 -8.23119164e-01] [ 1.29297042e+00 -2.26352304e-01 1.77466333e+00 ... -1.34679878e+00 -5.26962757e-01 -7.33609349e-02] [ 3.22575092e-01 -5.22369087e-01 6.61633790e-01 ... 8.78251910e-01 -1.48789853e-01 -7.73032963e-01]] [[ 5.27985930e-01 6.27144128e-02 3.10307652e-01 ... -3.30187291e-01 1.40560615e+00 4.23553377e-01] [-1.04281104e+00 2.29003206e-01 -3.68448831e-02 ... 6.63793206e-01 -7.31374800e-01 8.32354248e-01] [-1.78778812e-01 -1.46017468e+00 1.07056387e-01 ... -4.60837364e-01 3.61959040e-01 1.36240600e-02] ... [ 4.41173434e-01 3.30920011e-01 2.79580712e-01 ... -3.49676132e-01 4.39904004e-01 7.78380215e-01] [-3.89230102e-01 1.76466811e+00 -1.33340759e-02 ... 2.77689964e-01 -7.82808065e-01 6.88673913e-01] [ 1.12957084e+00 -3.84556711e-01 2.91577518e-01 ... 1.15161538e+00 -9.88345802e-01 1.28243339e+00]] [[ 1.73335326e+00 -3.24015439e-01 9.32891548e-01 ... -1.79909623e+00 -7.11885929e-01 -3.43050480e-01] [ 1.45820248e+00 -2.16705471e-01 -9.05542821e-02 ... -1.77756831e-01 -5.23899853e-01 -5.83890259e-01] [ 3.32584642e-02 6.55764937e-01 1.61901808e+00 ... -2.38419831e-01 2.87165344e-01 3.66440490e-02] ... [-6.91091835e-01 1.26610160e-01 -4.30252373e-01 ... 5.84849477e-01 -7.81990409e-01 -3.59334573e-02] [ 1.44837189e+00 3.07620376e-01 8.99519920e-01 ... 9.84875858e-01 -4.09983099e-01 1.06800370e-01] [ 9.75220799e-01 -5.93902767e-01 -1.16335821e+00 ... -5.58513463e-01 4.15829599e-01 -3.69138300e-01]] [[ 6.35786206e-02 -5.99080682e-01 -6.21889830e-01 ... 9.47249383e-02 5.83878696e-01 -9.66137648e-01] [-1.39183909e-01 -3.35723221e-01 3.17641139e-01 ... 7.33675480e-01 2.95470446e-01 -1.36430729e-02] [ 3.56457010e-02 -5.67749381e-01 -3.27703893e-01 ... 1.42305702e-01 4.51203316e-01 8.92947078e-01] ... [-1.30960429e+00 5.24626255e-01 1.12827802e+00 ... -1.46726042e-01 2.12230608e-01 7.70810246e-01] [ 9.31066036e-01 -2.15505306e-02 9.64352608e-01 ... -6.34364843e-01 9.40226376e-01 4.57938641e-01] [-7.00723052e-01 -2.12886512e-01 -9.75185037e-01 ... 7.91693106e-02 -5.62958539e-01 -3.37331504e-01]] [[-1.71819478e-01 3.85207087e-01 7.38579810e-01 ... -7.20867157e-01 -1.10551512e+00 2.38949716e-01] [ 4.86238807e-01 8.51517677e-01 -3.19452941e-01 ... -1.04509592e+00 2.85732001e-01 -8.86405408e-01] [ 4.42459792e-01 -2.72701859e-01 -7.03114510e-01 ... -4.03187662e-01 -3.00226986e-01 6.11497998e-01] ... [ 1.02603756e-01 -2.94073671e-01 -1.01314449e+00 ... -1.10568225e-01 -1.18982005e+00 -7.27617204e-01] [ 7.40577400e-01 -1.58221588e-01 -4.64205861e-01 ... -1.36232400e+00 -1.93103492e-01 -1.30643249e+00] [ 1.41446605e-01 2.17165470e-01 2.71027833e-01 ... 1.05338134e-01 5.45678020e-01 3.68316948e-01]]] [[[-7.00328350e-01 -5.40522695e-01 1.40211821e+00 ... -1.20243883e+00 8.83984625e-01 4.16035652e-01] [ 7.24923611e-01 -1.76717389e+00 3.70525382e-02 ... -8.89881030e-02 5.84717035e-01 3.25422257e-01] [ 1.60104662e-01 7.78033733e-01 -2.09679246e-01 ... -8.12060609e-02 6.80530146e-02 1.91209406e-01] ... [-5.47586501e-01 4.67252553e-01 -9.24839616e-01 ... 3.81387621e-01 -4.44719732e-01 7.69609749e-01] [ 3.52068581e-02 1.44396186e-01 -7.16999471e-01 ... -5.35135388e-01 -7.07915425e-01 5.60338378e-01] [-8.20922911e-01 -7.90127397e-01 2.43667468e-01 ... 2.02001736e-01 1.87922418e+00 8.04325759e-01]] [[-3.00397962e-01 1.97823808e-01 3.37376922e-01 ... 1.10373914e+00 -1.69056520e-01 5.33559203e-01] [ 1.17349100e+00 2.29946956e-01 -7.90798783e-01 ... 5.53903580e-01 -6.16077423e-01 -6.77601218e-01] [-1.16640769e-01 6.92963779e-01 5.76193869e-01 ... -1.58573389e-01 6.08622981e-03 -8.99179757e-01] ... [ 2.78697550e-01 -1.44706559e+00 -2.96898425e-01 ... -5.08268103e-02 7.74471760e-01 -9.87901390e-02] [-1.05063391e+00 1.00507832e+00 1.32180288e-01 ... -6.18068799e-02 -2.83098251e-01 -8.27241167e-02] [ 1.03930974e+00 -9.52690840e-01 -2.38515884e-01 ... 1.09095776e+00 -8.10927451e-01 6.78549945e-01]] [[-6.22075677e-01 4.70203340e-01 -6.23347998e-01 ... 3.78690749e-01 1.20555460e-01 -5.54353893e-01] [-1.53864765e+00 -7.84213901e-01 -7.73037970e-03 ... -1.38078988e+00 -3.82585377e-01 1.50184739e+00] [ 2.88002223e-01 2.91176677e-01 5.38216352e-01 ... 7.00649738e-01 -4.11775291e-01 -5.81481397e-01] ... [ 4.20913815e-01 1.20665169e+00 -1.06793153e+00 ... -4.97235835e-01 -3.17991227e-01 -1.45128801e-01] [ 7.90075779e-01 -5.27454197e-01 -9.16049480e-01 ... -1.39921319e+00 7.15658903e-01 1.05517435e+00] [ 1.86326578e-01 2.90259331e-01 -1.61096722e-01 ... -5.79898432e-02 1.35423005e+00 4.50399637e-01]] [[-1.25155553e-01 1.44200444e-01 4.59509522e-01 ... 6.32436275e-02 8.32002759e-01 -2.56986827e-01] [-5.60897470e-01 2.45139673e-01 1.97629881e+00 ... 1.42974377e-01 -4.36909765e-01 -3.79844099e-01] [-4.97886151e-01 4.27618951e-01 2.19595522e-01 ... -9.11462188e-01 6.92074776e-01 -7.60537863e-01] ... [-5.60075939e-01 -7.72015393e-01 -8.98663476e-02 ... 2.41497606e-01 -3.15030776e-02 -8.19346428e-01] [-4.80162680e-01 3.26463073e-01 1.39139795e+00 ... 7.32071579e-01 6.55306756e-01 -1.74863076e+00] [ 1.56113446e-01 3.04497033e-01 -2.49029711e-01 ... 5.46839952e-01 1.57989907e+00 -7.49828637e-01]] [[ 1.11431575e+00 6.87888205e-01 2.27970496e-01 ... -2.77536601e-01 -1.05578518e-02 7.97797590e-02] [ 3.41563433e-01 5.56936525e-02 1.00126660e+00 ... -6.85917318e-01 -1.42140543e+00 2.20153272e-01] [-3.53693604e-01 -6.09014332e-01 1.08746767e-01 ... -6.39089882e-01 -3.49353701e-01 -1.76675916e-02] ... [-4.10856247e-01 5.05082428e-01 3.69658887e-01 ... 7.43861794e-01 -3.18785042e-01 -7.71897316e-01] [-3.71387362e-01 -5.60453832e-01 6.47354841e-01 ... 1.63479045e-01 9.99429405e-01 1.01305908e-02] [-2.93804020e-01 1.19632292e+00 9.10782456e-01 ... 2.36214072e-01 5.30604482e-01 2.49819264e-01]] [[ 4.81532693e-01 -7.50523627e-01 -3.99385929e-01 ... -5.53251624e-01 -7.30025947e-01 4.93238926e-01] [ 3.37270588e-01 5.44719398e-01 -2.49683231e-01 ... -1.69304574e+00 3.78163457e-01 2.46087819e-01] [ 6.96117356e-02 -4.10837114e-01 9.86570492e-02 ... 1.07488620e+00 1.40562624e-01 -1.17102973e-01] ... [ 1.41870308e+00 2.22831443e-01 -2.05461338e-01 ... -5.91296732e-01 -1.89336911e-01 -9.58418325e-02] [-1.19525468e+00 -1.29495943e-02 7.89410591e-01 ... -4.70787644e-01 -7.90021062e-01 -2.23279983e-01] [ 6.58161521e-01 -3.68521847e-02 -1.88270971e-01 ... -5.83506286e-01 7.96072721e-01 -1.37463009e+00]]] [[[-6.78433925e-02 -1.13115907e-01 6.23436093e-01 ... -8.42426300e-01 -2.01016404e-02 6.55666366e-02] [-2.55042702e-01 -3.95209461e-01 -2.76347607e-01 ... -2.23852769e-01 7.98166513e-01 -2.23875493e-01] [ 5.11911392e-01 -4.85506386e-01 -1.97507441e-01 ... 1.71588528e+00 -1.08786352e-01 1.01483130e+00] ... [-5.39328873e-01 9.15587306e-01 1.01326048e+00 ... -1.00957894e+00 3.85783851e-01 -3.07261050e-02] [-5.15971601e-01 3.54431003e-01 -1.32730693e-01 ... 2.40379229e-01 6.99823737e-01 4.82984811e-01] [-8.43160152e-02 -1.41680419e+00 1.08738387e+00 ... 6.67905092e-01 -3.53755981e-01 2.35557005e-01]] [[-4.11036551e-01 1.17444366e-01 3.86793882e-01 ... 9.83693302e-01 -1.15918994e+00 -5.30549586e-01] [ 6.06035590e-01 1.21160232e-01 -4.45738077e-01 ... 2.45090351e-01 6.62715912e-01 3.80396456e-01] [ 8.22210193e-01 6.92854404e-01 6.00849330e-01 ... -2.66122073e-01 -9.37699676e-01 5.18995821e-01] ... [-9.20997143e-01 -2.36588731e-01 -9.41351593e-01 ... 7.02677846e-01 9.62165058e-01 3.00067157e-01] [-1.66872039e-01 -1.52551103e+00 -6.63370311e-01 ... -7.58244097e-01 3.00784148e-02 -2.21797563e-02] [-1.26128554e+00 9.30320561e-01 2.05277824e+00 ... 2.47052372e-01 9.63993967e-01 7.26654470e-01]] [[-5.42792566e-02 1.26042032e+00 1.17646050e+00 ... 3.27234775e-01 1.10965610e+00 6.07574284e-01] [-5.09837091e-01 -1.93280354e-01 -1.87950172e-02 ... 1.01159441e+00 -1.29680768e-01 3.45959403e-02] [-5.14339626e-01 -7.27291033e-02 4.24496680e-01 ... 2.44026542e-01 1.36823431e-01 -1.51371288e+00] ... [ 6.42987847e-01 -3.50236982e-01 -2.72359014e-01 ... -9.66241062e-01 -1.96108654e-01 6.94313765e-01] [ 8.01607251e-01 -3.02374810e-01 1.79342598e-01 ... 6.78967714e-01 2.69159049e-01 -7.14974105e-01] [-4.31869686e-01 7.13200271e-02 1.27804625e+00 ... -4.43947583e-01 -1.67564988e+00 -6.05703175e-01]] [[ 9.46991324e-01 -1.28223360e+00 -2.85925299e-01 ... -1.46653339e-01 -9.88476276e-02 5.26490845e-02] [-9.80907619e-01 1.02894628e+00 7.20968068e-01 ... -4.73782197e-02 -9.21693325e-01 -1.94252178e-01] [-9.26508665e-01 -1.89800143e-01 -3.75876784e-01 ... -1.05891848e+00 -3.87659550e-01 -1.07979856e-01] ... [-9.22670305e-01 -6.59384966e-01 2.06693728e-02 ... 3.07703286e-01 -4.11489278e-01 -5.38771808e-01] [ 7.66913593e-02 -6.28674388e-01 1.49608433e-01 ... -5.52569591e-02 8.14789534e-01 2.38641217e-01] [-1.91007748e-01 -2.68366337e-01 -5.29901326e-01 ... 7.42785513e-01 3.18150610e-01 9.62556899e-01]] [[-3.76783758e-01 2.35551983e-01 -8.04420337e-02 ... 1.14769913e-01 5.63425362e-01 1.04847550e+00] [-5.05932689e-01 1.91180959e-01 9.67064023e-01 ... -5.59405088e-01 1.42946744e+00 -8.85979235e-01] [-9.72349346e-01 2.66579866e-01 -9.85669270e-02 ... -1.94713715e-02 -6.77616298e-02 5.32372594e-02] ... [-2.98381269e-01 1.15074468e+00 -4.56333786e-01 ... 1.29754588e-01 -7.68426001e-01 -1.45074576e-01] [-2.65333086e-01 8.46886873e-01 5.85937440e-01 ... -5.87527275e-01 8.37947667e-01 2.36946076e-01] [ 1.29446546e-02 -3.65453601e-01 7.46247545e-02 ... -6.82541788e-01 2.91312128e-01 -5.45640647e-01]] [[ 4.80311185e-01 4.93877292e-01 -1.61229804e-01 ... -1.97945967e-01 -3.51819634e-01 -6.50663435e-01] [-9.16749656e-01 -2.98726320e-01 -9.51824561e-02 ... 1.09249413e+00 2.07886279e-01 8.55899930e-01] [-1.62632409e-02 8.86489749e-01 -8.78053725e-01 ... -6.31314754e-01 -1.16238749e+00 5.20337403e-01] ... [-1.03391424e-01 -1.26172698e+00 -1.72127768e-01 ... -4.17714894e-01 1.19830108e+00 7.27866054e-01] [ 1.81821096e+00 8.34196329e-01 -1.60667285e-01 ... -1.51586264e-01 -3.09076428e-01 2.16667593e-01] [-2.24484667e-01 -9.51120377e-01 -3.71638268e-01 ... -1.06559229e+00 2.59901702e-01 5.08200586e-01]]] ... [[[-5.91760352e-02 5.35744071e-01 -6.66150093e-01 ... 4.73787576e-01 6.44320905e-01 -2.05517501e-01] [ 9.35535654e-02 -6.24150515e-01 5.14869690e-01 ... -6.91028833e-01 -8.45238566e-01 -3.48636001e-01] [ 1.63798130e+00 -3.36606860e-01 4.08548862e-01 ... 3.05489786e-02 -3.38138878e-01 -1.94871083e-01] ... [-3.24217111e-01 -2.00510907e+00 1.19935918e+00 ... -7.22756423e-03 3.25346500e-01 7.02966332e-01] [ 2.01735780e-01 -3.31502646e-01 -6.75379276e-01 ... 1.31238270e+00 2.19336096e-02 2.15987667e-01] [ 1.27422786e+00 -1.03545439e+00 -1.60071266e+00 ... 5.45574486e-01 -8.54279220e-01 -5.29473484e-01]] [[ 7.01197207e-01 -8.42499077e-01 -3.51983100e-01 ... 1.90058842e-01 -9.20236204e-03 1.16771305e+00] [-2.38173157e-01 -5.79894006e-01 1.48013353e-01 ... 1.10663444e-01 1.03946596e-01 1.23892248e+00] [ 6.02903426e-01 -2.40994349e-01 1.44797885e+00 ... -5.86178117e-02 -2.50835776e-01 -5.26774764e-01] ... [-2.03948587e-01 5.03191769e-01 -1.74350953e+00 ... 7.76892304e-01 6.71908796e-01 -7.97637999e-01] [-4.84682769e-01 -7.14057148e-01 -3.04405063e-01 ... 1.34891614e-01 -5.35240114e-01 -9.22582522e-02] [-1.00272574e-01 1.65354490e-01 5.06135404e-01 ... 4.68705088e-01 -7.69703507e-01 2.50612907e-02]] [[ 4.02386308e-01 -3.49682510e-01 -8.15796077e-01 ... 5.80658972e-01 -3.49812359e-01 -1.39121389e+00] [ 7.25278258e-01 -2.94044107e-01 2.63695568e-01 ... -4.66740549e-01 6.43554032e-01 9.08223074e-03] [ 4.69319522e-01 -6.17670752e-02 -2.43195757e-01 ... -8.37063849e-01 3.84794712e-01 -5.66252708e-01] ... [-3.70481968e-01 2.51415402e-01 3.33371133e-01 ... 2.01444894e-01 -2.59055644e-01 1.49086273e+00] [-9.34146821e-01 -1.45923510e-01 1.12670735e-01 ... 9.27482426e-01 7.71814346e-01 6.81833386e-01] [ 1.47024965e+00 -3.06211799e-01 2.58633822e-01 ... 1.68476331e+00 1.70912862e-01 1.56165883e-01]] [[ 1.65360737e+00 6.03637695e-01 9.64515865e-01 ... 1.52880609e-01 2.21681833e-01 5.81087291e-01] [-6.98553622e-01 3.24388921e-01 -4.15047705e-01 ... 5.52829444e-01 -9.05273736e-01 -1.44426197e-01] [-4.14603084e-01 -8.56423229e-02 -1.57583952e-01 ... -1.50083709e+00 4.21029985e-01 -7.04873860e-01] ... [ 3.93975466e-01 5.89768231e-01 8.53216052e-01 ... -1.09809697e+00 6.97838962e-01 1.13761187e+00] [-4.92130071e-01 -1.36370194e+00 -3.71447086e-01 ... 6.82289243e-01 8.40392411e-02 6.54571533e-01] [ 5.78930438e-01 -1.50736058e+00 4.67967451e-01 ... -1.23556602e+00 6.26852274e-01 -2.25485772e-01]] [[-2.02678040e-01 5.08030891e-01 -7.30985999e-01 ... 1.28562659e-01 -3.46179843e-01 9.03698325e-01] [-8.63114536e-01 2.79589295e-01 1.38693810e-01 ... -9.48507965e-01 -2.53147721e-01 6.21908545e-01] [-3.71404469e-01 -6.78660274e-01 -1.89660057e-01 ... -1.25611767e-01 -2.59729922e-01 -9.23669696e-01] ... [-8.12365174e-01 1.64026678e+00 1.08588922e+00 ... -6.09178066e-01 -8.86213034e-03 1.81734309e-01] [ 9.30364653e-02 4.11004573e-01 4.84565705e-01 ... 2.79486418e-01 2.60692179e-01 -8.47774327e-01] [-4.86503206e-02 7.19063461e-01 -3.62459958e-01 ... 1.33381522e+00 5.88469148e-01 6.99876249e-01]] [[ 1.85349381e+00 -1.16734993e+00 -5.61921418e-01 ... -3.67712259e-01 5.71112275e-01 -2.18204677e-01] [ 2.39721179e+00 6.71365142e-01 -1.84713316e+00 ... 1.27196789e+00 4.12461817e-01 2.64784068e-01] [-7.58309543e-01 1.03867555e+00 1.61135182e-01 ... -7.15583980e-01 -2.96157867e-01 6.95186138e-01] ... [-1.62590548e-01 2.16586456e-01 2.91431636e-01 ... 1.16299659e-01 -1.78949702e+00 -1.06482649e+00] [-6.04173839e-02 3.03557981e-02 4.83974099e-01 ... -1.26632607e+00 -4.81503606e-01 2.34500498e-01] [-1.11787009e+00 -5.21921515e-01 -1.05438125e+00 ... 7.58421957e-01 7.63431489e-01 -1.55171111e-01]]] [[[ 8.39790523e-01 -1.20318145e-01 5.45149386e-01 ... 6.92370951e-01 8.45047593e-01 -9.02507722e-01] [-7.04238340e-02 -5.25301322e-02 -7.50549734e-01 ... 1.06651223e+00 -6.29677296e-01 4.09356445e-01] [ 4.20697927e-01 -6.61711931e-01 -6.93833947e-01 ... -1.32041806e-02 3.10279906e-01 -1.21897995e+00] ... [-6.37706459e-01 9.95504081e-01 -8.85328557e-03 ... -7.90878594e-01 2.21068799e-01 1.13928747e+00] [-6.67882204e-01 4.45106685e-01 -7.87220716e-01 ... 6.44935146e-02 2.99043894e-01 -6.93069935e-01] [-7.71859944e-01 6.35135531e-01 1.96914807e-01 ... 8.26559484e-01 6.11899495e-01 -1.66554332e-01]] [[ 8.86006057e-01 8.00035477e-01 -3.96931618e-02 ... 9.76777434e-01 8.61979544e-01 -5.17968118e-01] [-3.91313508e-02 -7.65132964e-01 -7.66286254e-01 ... 6.47317410e-01 1.09400797e+00 5.52692294e-01] [ 2.12938428e-01 -6.28313184e-01 -1.68178499e+00 ... -8.08282554e-01 4.63891774e-01 -1.31642580e+00] ... [-2.87824571e-01 -9.33696032e-01 -6.84830725e-01 ... -6.86888516e-01 -1.76413983e-01 1.29225981e+00] [-1.02022755e+00 -5.91318727e-01 8.16569388e-01 ... 9.37403738e-01 -1.04583728e+00 -3.96351725e-01] [ 1.17405510e+00 2.72688121e-01 -1.53253567e+00 ... -3.25096995e-01 3.89622509e-01 -4.08651412e-01]] [[-8.60227704e-01 -2.15089262e-01 3.82984966e-01 ... -1.41133249e+00 -5.39216280e-01 -2.01030090e-01] [ 3.96022141e-01 -1.11393499e+00 1.27183163e+00 ... -1.22201419e+00 -1.22438121e+00 5.22569381e-02] [ 5.38088262e-01 -1.06331265e+00 -9.70882550e-02 ... 8.18090439e-01 -8.54649544e-01 -1.02767110e+00] ... [ 1.95083782e-01 1.93224442e+00 1.17854345e+00 ... -6.76305950e-01 -3.06760073e-01 -1.18574512e+00] [ 6.69418573e-02 7.60171771e-01 -6.82732701e-01 ... 2.23652571e-02 -1.90496624e-01 1.47646880e+00] [ 8.06668937e-01 -2.85302877e-01 -7.32564211e-01 ... -5.51169991e-01 -1.14146136e-01 9.86497700e-02]] [[-1.68579018e+00 -1.70025408e+00 6.48097787e-03 ... 1.31410611e+00 -2.81757444e-01 6.81395650e-01] [ 2.84497082e-01 5.76054037e-01 3.98220003e-01 ... 5.19828320e-01 2.95799941e-01 6.78573430e-01] [ 5.39496243e-01 -3.35995466e-01 -9.39865634e-02 ... -2.53895447e-02 -1.66154909e+00 -1.23192132e+00] ... [ 2.58416981e-01 -6.52447283e-01 2.64554232e-01 ... -1.31344363e-01 7.57747114e-01 -5.57110906e-01] [-5.80965839e-02 8.51163149e-01 6.41205907e-01 ... 8.54858533e-02 4.66009885e-01 8.36809933e-01] [ 1.89673826e-01 -2.70031374e-02 -7.30624676e-01 ... 1.75201690e+00 2.00290993e-01 1.96414739e-01]] [[-8.52320313e-01 -5.06289124e-01 2.71068007e-01 ... 5.70865810e-01 1.23183298e+00 2.72065163e-01] [-6.42688632e-01 2.40947366e-01 -2.02729642e-01 ... -6.97970986e-01 5.84625602e-01 -1.29545355e+00] [-5.45448512e-02 -2.70070471e-02 8.51162612e-01 ... 5.59113443e-01 -6.24708354e-01 -5.95188260e-01] ... [ 1.34688377e-01 8.92513037e-01 5.94824851e-02 ... -5.19316733e-01 -2.75800973e-01 7.48488903e-02] [-3.50925654e-01 -2.16544181e-01 -4.75447565e-01 ... 7.09139049e-01 -3.53493869e-01 -2.43420303e-01] [-1.02857105e-01 1.53904498e+00 5.61590016e-01 ... -4.24361616e-01 1.11992061e+00 1.07637298e+00]] [[ 1.55426455e+00 5.77952325e-01 -3.13415378e-01 ... -3.77818942e-01 -1.45551011e-01 -7.42904425e-01] [-1.37150824e-01 -2.55088508e-01 5.17721355e-01 ... 6.02268338e-01 2.38344416e-01 -1.02506220e+00] [-1.53151476e+00 1.42112762e-01 7.03812778e-01 ... 1.76837500e-02 1.07756652e-01 -7.44433999e-01] ... [-4.13306743e-01 5.39088726e-01 -1.59468472e-01 ... 3.47408891e-01 -1.47187114e+00 -4.59729791e-01] [-9.75849554e-02 -2.00979352e-01 1.12870216e-01 ... 1.00363481e+00 -1.36253431e-01 -3.72586176e-02] [ 9.16970491e-01 -7.27690086e-02 1.60428381e+00 ... -8.68640184e-01 5.08682549e-01 3.85168195e-01]]] [[[ 3.76040190e-01 -3.93980116e-01 5.89990795e-01 ... 1.54438823e-01 5.57577014e-01 4.85645095e-03] [-2.60350019e-01 9.50158536e-01 5.83549023e-01 ... 6.64148405e-02 -5.31926416e-02 -1.22878206e+00] [-1.09475330e-01 -3.34342241e-01 -1.42969358e+00 ... -7.43671775e-01 -9.29552972e-01 -3.18157226e-01] ... [-9.64828953e-02 8.43969584e-01 -8.78223896e-01 ... -6.49552122e-02 -7.84377575e-01 2.13189889e-02] [-1.84786739e-03 -4.03181434e-01 -3.09313834e-01 ... 1.82976983e-02 -9.65486988e-02 7.20689654e-01] [ 1.72942832e-01 -4.25914288e-01 1.06655562e+00 ... 3.78150530e-02 -3.31384927e-01 -1.65039778e-01]] [[ 1.03092051e+00 5.07859997e-02 -1.27348101e+00 ... 6.90989673e-01 -4.95206207e-01 -4.36616898e-01] [ 4.60612386e-01 -1.31083265e-01 -3.12494457e-01 ... -8.94525766e-01 -6.27788126e-01 -5.32558739e-01] [ 1.27801502e+00 1.71647406e+00 8.85402620e-01 ... 5.91789842e-01 -3.01099688e-01 7.30017126e-01] ... [ 6.79241598e-01 3.01894188e-01 7.13041067e-01 ... -7.66619444e-01 1.35253441e+00 -6.65173769e-01] [-2.59000570e-01 6.30001903e-01 2.16518402e-01 ... 1.05131042e+00 -5.85914068e-02 1.12284505e+00] [-7.70124078e-01 -1.67737234e+00 5.03589869e-01 ... 8.42430115e-01 9.44391072e-01 -9.74932671e-01]] [[-1.86960131e-01 1.06914055e+00 -1.37761009e+00 ... -1.08901787e+00 1.20984882e-01 8.32989931e-01] [ 6.58914685e-01 5.98456144e-01 2.79629469e-01 ... -8.44483674e-01 -6.61426365e-01 1.58906788e-01] [-1.23213246e-01 2.19566733e-01 -1.17949152e+00 ... 4.32140172e-01 1.52130234e+00 4.64187264e-01] ... [-8.89747083e-01 -1.55310476e+00 1.38701308e+00 ... -2.02703738e+00 -8.54627252e-01 1.32585216e+00] [ 9.52865779e-01 4.74868506e-01 2.96099503e-02 ... 6.46992028e-01 -1.00116804e-01 3.64218473e-01] [-3.64262834e-02 1.45788360e-02 9.84262526e-01 ... -6.29305422e-01 -3.73204529e-01 4.31618124e-01]] [[ 4.61611040e-02 -7.28315532e-01 3.58270258e-01 ... -2.83180535e-01 -2.60605291e-02 6.92830920e-01] [-6.43637478e-01 -1.43531299e+00 5.07616282e-01 ... -2.66523436e-02 -1.36908090e+00 -5.42944968e-01] [ 4.23887014e-01 5.06262183e-01 4.64206368e-01 ... 9.06384766e-01 4.70605254e-01 2.78208882e-01] ... [-3.54649872e-01 1.28157943e-01 1.97901458e-01 ... 8.55141699e-01 -3.02804774e-03 -4.15388465e-01] [-5.06225169e-01 9.19536889e-01 2.44103119e-01 ... 1.38163936e+00 -2.13035289e-03 7.60316491e-01] [ 3.70910287e-01 2.57118702e-01 -1.03468955e+00 ... -3.16281050e-01 2.74154067e-01 -1.47971790e-03]] [[-9.88082409e-01 -8.40267301e-01 2.65441854e-02 ... -2.77163446e-01 8.16141665e-01 3.50273162e-01] [ 4.25784700e-02 1.27669311e+00 -3.44593138e-01 ... -9.51120481e-02 -6.95406139e-01 -4.90137964e-01] [ 6.55582368e-01 -6.49750888e-01 -1.37548238e-01 ... 5.34743428e-01 4.93308693e-01 -6.87657893e-01] ... [-1.30529642e+00 5.44968843e-01 7.99827874e-01 ... -1.45127010e+00 -4.25904602e-01 -4.95293252e-02] [ 9.10477489e-02 3.44691098e-01 -1.47256628e-01 ... -3.80386531e-01 1.09352641e-01 6.99534491e-02] [-4.29555923e-01 8.71251374e-02 -5.06955147e-01 ... -8.12891603e-01 -1.33815646e+00 -1.05454373e+00]] [[ 3.58145684e-01 -3.34152997e-01 -3.62531021e-02 ... -2.82173213e-02 -1.25785995e+00 -4.70821708e-01] [ 2.75634397e-02 1.58930063e-01 5.68900168e-01 ... -2.81680264e-02 -8.81930709e-01 6.51051462e-01] [-5.72768211e-01 1.28905642e+00 -5.36568284e-01 ... 3.73899713e-02 1.22781014e+00 3.82357389e-01] ... [ 8.92783225e-01 5.04540540e-02 2.28467539e-01 ... 8.00314769e-02 -2.23021269e-01 1.29774764e-01] [ 5.31025469e-01 -1.42725945e+00 -1.18085388e-02 ... -1.44076693e+00 5.62920272e-01 1.01493394e+00] [ 7.08149314e-01 7.58150637e-01 1.15499504e-01 ... 5.71669698e-01 8.51456355e-03 -8.99691731e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 3, 'weights_shape': (6,), 'eps': -0.05} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4602.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=-0.050000000000000003]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5516 1.0805 -0.2738 0.1403 -0.0676 0.0889 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[[ 3.11506301e-01 -4.02600020e-01 2.02164426e-01 ... -1.14136696e+00 -8.36095810e-01 -3.45890820e-01] [-2.01468617e-01 -8.68467808e-01 4.88867730e-01 ... 6.67144284e-02 -2.27538899e-01 3.19043517e-01] [-3.57115567e-02 -1.19789159e+00 3.49276923e-02 ... -3.63494337e-01 1.48826063e-01 1.27741963e-01] ... [ 4.91209358e-01 -6.21194914e-02 7.75863409e-01 ... 1.29276916e-01 1.05759430e+00 -1.10718191e+00] [ 3.38120669e-01 1.77617595e-01 -2.84482360e-01 ... -3.45836699e-01 6.27292693e-01 3.65881443e-01] [-1.13814078e-01 -4.30641085e-01 6.23858869e-01 ... 2.93869257e-01 -6.64956689e-01 -1.34778723e-01]] [[ 6.59407556e-01 -8.48211408e-01 1.51666075e-01 ... 3.61875296e-01 5.59694231e-01 -1.27555773e-01] [ 4.74483460e-01 6.12090409e-01 -1.01814955e-01 ... 9.11710635e-02 -9.23654437e-01 -1.13847864e+00] [ 2.52410650e-01 -8.09843719e-01 1.30770731e+00 ... -1.00658405e+00 2.00535655e+00 1.86187899e+00] ... [-1.26123488e+00 -1.04942727e+00 6.51014507e-01 ... 4.23190683e-01 -2.78237253e-01 2.27777705e-01] [-4.50582922e-01 -9.99088109e-01 -1.01967347e+00 ... -1.15458630e-01 1.62694931e+00 6.09866381e-01] [-6.60418421e-02 -3.78553629e-01 -5.12965024e-01 ... -5.60479425e-02 -1.22923279e+00 1.79376975e-01]] [[-1.07044227e-01 -8.41611326e-02 -3.79266620e-01 ... -1.81128547e-01 -5.71556762e-02 2.51439124e-01] [ 1.34227112e-01 1.54664665e-01 2.88701922e-01 ... -8.18773985e-01 -2.60423064e-01 -2.62406647e-01] [-5.02588935e-02 -3.31918806e-01 2.58114338e-01 ... 1.98969007e-01 2.21442170e-02 1.22844595e-02] ... [ 7.58564249e-02 -1.63905010e-01 -6.19171597e-02 ... -5.96283190e-02 -3.46791059e-01 1.31087735e-01] [-3.27730626e-01 8.91565755e-02 1.41721800e-01 ... 3.16863626e-01 1.12113333e+00 3.07537973e-01] [-1.52748391e-01 -4.86561954e-01 3.02038133e-01 ... -4.79584515e-01 3.66693288e-02 -1.71696469e-01]] [[ 5.79504110e-02 -1.48235122e-02 2.44186074e-01 ... 6.75328001e-02 1.78019404e-01 8.38066414e-02] [-8.80363062e-02 -2.09976867e-01 -9.58865359e-02 ... 8.25812891e-02 -4.16776203e-02 7.22205117e-02] [-1.48698404e-01 -1.14527538e-01 2.16612294e-02 ... -6.16206191e-02 -1.23061635e-01 -1.40622929e-01] ... [ 4.47895341e-02 9.41352695e-02 -2.10103896e-02 ... -2.49864668e-01 -6.09863410e-03 -1.10447910e-02] [ 4.25332747e-02 1.06048025e-01 1.41754434e-01 ... 8.28325748e-02 1.16552815e-01 -6.45839944e-02] [ 1.66335881e-01 -4.42780145e-02 -7.71524534e-02 ... 1.77470326e-01 1.82193324e-01 -2.14913055e-01]] [[ 2.16893554e-02 -7.51153007e-02 1.15608960e-01 ... 3.37624201e-03 -5.17992675e-03 -1.00706555e-01] [ 2.36401986e-02 1.30110756e-01 6.84624491e-03 ... 4.66807634e-02 2.98853759e-02 4.97599272e-03] [ 6.88208789e-02 -3.66327576e-02 -9.06346515e-02 ... 1.07984580e-01 3.08087356e-02 3.16632316e-02] ... [ 9.01097804e-02 1.80816669e-02 1.14797419e-02 ... -7.42016779e-03 -5.77359721e-02 -3.79196107e-02] [-5.08100959e-03 9.64584425e-02 3.68461758e-02 ... -4.44110483e-03 -1.15971416e-01 5.35202138e-02] [-5.29045612e-02 -2.11051442e-02 1.10735878e-01 ... 5.46812601e-02 4.20486405e-02 9.81170908e-02]] [[-3.04237325e-02 4.25422899e-02 -7.81028271e-02 ... -2.77497414e-02 7.89909735e-02 5.64556103e-03] [-8.42633098e-02 8.92074183e-02 -1.87272932e-02 ... 1.80713758e-02 1.30780609e-02 1.79588929e-01] [ 7.39768744e-02 -3.68539095e-02 1.50748596e-01 ... -1.95950698e-02 -7.88622499e-02 8.96877646e-02] ... [-9.15380940e-02 6.34610429e-02 -1.11338988e-01 ... 3.97871993e-02 -1.00512818e-01 1.23516940e-01] [ 9.14326087e-02 1.37341842e-01 -7.54410177e-02 ... 4.40737531e-02 1.95419684e-01 1.35088749e-02] [ 7.81969428e-02 1.16409231e-02 1.60448238e-01 ... -2.87156761e-01 2.08241865e-01 4.26079445e-02]]] [[[-5.52773952e-01 -3.15036207e-01 1.48697823e-01 ... 4.92012620e-01 4.06256288e-01 4.46495920e-01] [-5.72456181e-01 3.04739535e-01 1.28650740e-01 ... -3.77855092e-01 5.57070076e-01 9.14028227e-01] [-5.47105074e-01 -5.48108220e-01 -5.50871015e-01 ... -4.08799380e-01 1.75440572e-02 7.70640135e-01] ... [ 5.95915020e-01 1.14992929e+00 4.73640412e-02 ... 9.22605693e-01 7.61699796e-01 -6.13174975e-01] [ 3.30651999e-01 4.50681776e-01 -1.99583359e-03 ... -3.39994669e-01 -1.10622931e+00 -1.41096354e-01] [ 6.85426354e-01 -3.39979261e-01 2.27998510e-01 ... -4.25613701e-01 1.33226717e+00 1.16282058e+00]] [[-8.76048923e-01 -9.43783224e-01 8.04393053e-01 ... 1.51238680e+00 1.51144874e+00 3.41436356e-01] [-8.28657687e-01 1.99982449e-01 1.68078089e+00 ... 1.08542240e+00 -1.26669657e+00 -3.66371512e-01] [ 2.42962033e-01 1.39203000e+00 3.91660661e-01 ... 5.64021826e-01 -1.73916721e+00 -1.70851424e-01] ... [-1.79063165e+00 -4.42088634e-01 -2.87873328e-01 ... 6.29776791e-02 3.78767401e-01 1.40906179e+00] [ 2.03197189e-02 -2.27944896e-01 -7.59514451e-01 ... 6.74144089e-01 -8.75097096e-01 6.50257051e-01] [ 2.57617712e-01 1.36292768e+00 -4.20594245e-01 ... 7.94952661e-02 2.12267414e-01 1.60842597e+00]] [[ 1.45164290e-02 -1.02528691e-01 -2.38031596e-01 ... -3.85682017e-01 2.63851732e-01 1.55038446e-01] [-9.83473137e-02 3.23451459e-01 -1.67792523e-03 ... 3.24647963e-01 -3.65178496e-01 -4.24305379e-01] [-1.83594599e-01 1.21802300e-01 -2.67343938e-01 ... -2.32572109e-01 -3.96425501e-02 1.53383344e-01] ... [ 3.60100232e-02 -7.29017779e-02 -1.09963931e-01 ... 3.94050479e-01 -3.42437118e-01 2.84196772e-02] [-2.85995483e-01 2.86006033e-02 -6.73463792e-02 ... -4.53243375e-01 -2.14470416e-01 1.45423740e-01] [-3.41345891e-02 -4.23030145e-02 -1.07734151e-01 ... -3.61057699e-01 -1.69160724e-01 -2.77339637e-01]] [[ 1.69936970e-01 -1.16803430e-01 -2.61377454e-01 ... -1.02875777e-01 5.55813648e-02 1.93614259e-01] [ 1.63408220e-02 -1.56878889e-01 -5.83521724e-02 ... -1.60550943e-03 9.42155644e-02 -3.42118256e-02] [-9.12219957e-02 2.13310674e-01 -1.13008082e-01 ... 1.59105301e-01 6.39847666e-02 -3.17132249e-02] ... [ 2.02523381e-01 2.41605416e-02 2.25748092e-01 ... -1.23145610e-01 -2.12659920e-03 1.14917494e-01] [ 1.85622588e-01 5.60219735e-02 1.80949941e-02 ... 5.33022825e-03 -5.97151481e-02 -1.81662500e-01] [-3.71810526e-01 1.30069807e-01 3.63954231e-02 ... -2.88918763e-01 2.73270696e-01 5.16465977e-02]] [[-2.23585181e-02 -1.94291994e-02 2.14831643e-02 ... -7.77980760e-02 -1.57088682e-03 1.26618594e-01] [-2.78908759e-02 4.44824286e-02 1.04286447e-01 ... 2.64593377e-03 -2.75510084e-02 -3.54348421e-02] [ 3.46076116e-02 5.38583025e-02 3.89762372e-02 ... -1.17426485e-01 1.44219529e-02 -5.46664260e-02] ... [ 4.23896946e-02 1.97186545e-02 2.17333995e-02 ... -1.97083764e-02 4.31660824e-02 5.03373845e-03] [ 1.85259769e-03 8.85250792e-02 3.15294191e-02 ... 5.96081577e-02 1.32471040e-01 -9.96117592e-02] [-4.15228680e-02 4.89232391e-02 8.22503641e-02 ... 5.41291498e-02 -1.37121394e-01 1.74804609e-02]] [[ 3.01426463e-02 -7.23383669e-03 1.88742220e-01 ... -1.18561707e-01 -7.72670582e-02 1.61556542e-01] [-1.36524183e-03 -6.45777062e-02 -2.60712113e-02 ... 2.65145183e-01 5.45669012e-02 2.80410759e-02] [ 3.91595066e-02 1.02411978e-01 8.14935714e-02 ... 9.71998647e-02 -1.52729541e-01 -1.19941674e-01] ... [ 3.01031955e-02 -6.00922368e-02 1.33542955e-01 ... 9.64923352e-02 1.64636612e-01 1.05687946e-01] [-2.06229508e-01 -7.43860602e-02 -4.90189157e-02 ... -8.79392251e-02 1.04032859e-01 -1.60274748e-02] [ 6.18763193e-02 9.81676877e-02 8.57161507e-02 ... 7.74035230e-02 -1.59072950e-01 -3.44177298e-02]]] [[[-2.16077849e-01 9.13388550e-01 8.21344912e-01 ... 8.00956905e-01 -5.88661253e-01 4.05793935e-01] [-1.20912266e+00 -5.36603153e-01 2.15054348e-01 ... 6.91495687e-02 6.69589281e-01 1.08274734e+00] [ 1.08390272e-01 2.36428648e-01 2.34340757e-01 ... -4.85693425e-01 -4.62544680e-01 1.65954992e-01] ... [ 1.70763776e-01 -4.04998720e-01 -5.23678027e-02 ... -1.12128901e+00 -1.37623787e+00 2.76152343e-01] [-1.99139059e-01 7.86637664e-01 3.89922261e-02 ... 1.02216041e+00 8.50439727e-01 1.17560178e-01] [ 7.71781266e-01 -1.31768808e-01 -1.30313730e+00 ... -8.07064235e-01 -2.39740219e-03 5.66353500e-01]] [[ 8.29910457e-01 6.23787045e-01 2.33668184e+00 ... -7.28927970e-01 -7.82001793e-01 7.92206049e-01] [-2.16264868e+00 -7.14639783e-01 -9.51408222e-02 ... -1.63991362e-01 1.33828664e+00 8.32965672e-01] [-7.18765438e-01 1.31522906e+00 5.63703775e-01 ... -2.28996277e+00 8.69794562e-02 1.03161323e+00] ... [-7.86092281e-01 8.77197504e-01 1.35102272e+00 ... 8.21379721e-01 -3.88495117e-01 1.76202309e+00] [ 1.06231317e-01 8.07373345e-01 -2.46929675e-01 ... 7.52601564e-01 -1.89708933e-01 -3.65379155e-01] [ 1.02032578e+00 -3.57053667e-01 -1.65858161e+00 ... -1.50148880e+00 -2.70249248e-01 -1.47628203e-01]] [[-2.73334384e-01 -1.04525685e-01 -2.03440771e-01 ... -2.92512417e-01 1.86724737e-01 -1.40765794e-02] [ 2.66369432e-01 -5.73407374e-02 -2.52329350e-01 ... -5.08242808e-02 2.29288086e-01 -1.88091863e-02] [-1.80921033e-01 -2.08816111e-01 -9.13093835e-02 ... 2.31112719e-01 1.96054250e-01 7.99876526e-02] ... [ 3.98060888e-01 3.03668648e-01 5.47150299e-02 ... -7.68186599e-02 3.27494830e-01 -1.32051244e-01] [ 2.15806380e-01 5.15067875e-02 -1.49112895e-01 ... 6.56000748e-02 1.05596095e-01 2.19003800e-02] [-1.78132534e-01 9.48348790e-02 4.20098871e-01 ... -2.45741308e-01 -1.18235379e-01 -5.00716805e-01]] [[-4.83517125e-02 -6.59699319e-03 -1.71375230e-01 ... 3.65244336e-02 -4.45305444e-02 -4.24851514e-02] [-3.03285033e-01 -7.65936747e-02 2.34057680e-02 ... -2.87991092e-02 -9.74556059e-02 -5.20290509e-02] [ 1.97637692e-01 6.94108158e-02 1.65407225e-01 ... -1.60304606e-01 1.48217663e-01 -2.89909542e-02] ... [ 1.51089102e-01 4.97898366e-03 7.17762411e-02 ... -3.11869401e-02 -7.13138133e-02 -2.37271786e-01] [-6.68777972e-02 9.40289050e-02 1.25396401e-01 ... 2.13186126e-02 -9.09639746e-02 -8.57230201e-02] [ 1.23342142e-01 4.10572365e-02 1.03786802e-02 ... 9.25204381e-02 -1.63281783e-01 2.13944793e-01]] [[ 2.75064688e-02 -1.95912614e-01 1.20290248e-02 ... -4.48234938e-02 -1.38129033e-02 -9.55366902e-03] [-4.30981144e-02 7.15217963e-02 -1.27462540e-02 ... 2.00353982e-03 2.06974167e-02 -6.03667237e-02] [-4.87099998e-02 -1.21088549e-01 -4.51905318e-02 ... -7.56434128e-02 1.57066109e-03 -8.42732266e-02] ... [ 9.45277438e-02 1.67458523e-02 -6.27811402e-02 ... 2.36855615e-02 -3.14790793e-02 -7.23512918e-02] [ 7.90238157e-02 6.75935820e-02 5.51798418e-02 ... -1.45658534e-02 -3.05035505e-02 -6.59780856e-03] [ 3.10664177e-02 7.81872571e-02 -1.67048480e-02 ... 1.12390900e-02 -9.12090577e-03 4.14252244e-02]] [[-2.70234849e-02 7.80173838e-02 -9.30768698e-02 ... 9.08582285e-02 -5.14037348e-02 4.80676033e-02] [ 1.40738845e-01 5.93622029e-02 9.47345793e-02 ... 2.10851096e-02 5.67236822e-03 7.20424205e-02] [-1.75362043e-02 2.20900401e-02 2.24421881e-02 ... -5.59933633e-02 -1.34287491e-01 6.86084107e-02] ... [-3.75758894e-02 -4.32885773e-02 -1.58699304e-01 ... -1.65673152e-01 1.39633089e-01 8.45305026e-02] [ 1.99917629e-02 -1.07346915e-01 1.26522928e-01 ... -6.86906874e-02 6.95834979e-02 -7.98804779e-03] [-7.14866519e-02 -5.50248511e-02 -5.75700738e-02 ... 7.47137070e-02 -1.76298078e-02 -6.44020289e-02]]] ... [[[-2.23763082e-02 -1.38403863e-01 6.23601913e-01 ... 5.29174805e-01 -3.55077773e-01 1.16098320e+00] [ 9.74852502e-01 5.24396300e-01 -2.15535853e-02 ... 1.21858038e-01 1.34750113e-01 -1.16355014e+00] [ 9.36726868e-01 -2.63221651e-01 -4.18368101e-01 ... 8.65958154e-01 4.73808497e-01 3.25392365e-01] ... [-1.83601096e-01 -5.41116707e-02 3.33024263e-01 ... 7.10693359e-01 -3.08435917e-01 -7.17190027e-01] [-1.10215947e-01 -1.32777226e+00 4.29999620e-01 ... 7.36411035e-01 -3.51267517e-01 -1.63916337e+00] [ 4.79770303e-01 9.27682221e-02 4.32451546e-01 ... 2.02645570e-01 -5.61010063e-01 -7.34644890e-01]] [[-1.90322638e-01 5.78276217e-01 -6.98652267e-01 ... -1.71416664e+00 -1.81349576e+00 5.06311953e-01] [ 9.35689092e-01 -1.09578407e+00 -3.68196756e-01 ... -6.84417307e-01 -1.11784148e+00 1.32303023e+00] [-3.99881937e-02 2.30023932e+00 -5.73833048e-01 ... -2.64874753e-02 3.84495020e-01 2.05806419e-01] ... [ 1.70016676e-01 2.41564441e+00 -1.38489270e+00 ... -1.60677910e+00 -1.17053974e+00 -1.32549560e+00] [ 1.89363599e-01 1.11373627e+00 -3.65602732e-01 ... -2.51051354e+00 -4.19667363e-01 8.17204416e-01] [ 6.15559593e-02 4.68594939e-01 -3.94687712e-01 ... -9.20896947e-01 -2.98245549e-01 -6.84083462e-01]] [[ 1.19843492e-02 2.56700013e-02 1.47013530e-01 ... -1.37538210e-01 -2.10777164e-01 4.72119570e-01] [-5.53871728e-02 -4.74829748e-02 2.91996300e-01 ... 9.26024169e-02 -2.23154485e-01 -4.72338498e-01] [ 2.55085286e-02 2.45529920e-01 5.52715249e-02 ... -1.64423678e-02 -1.79707095e-01 -2.08373129e-01] ... [-4.41790074e-02 1.24060765e-01 -1.49122404e-03 ... 3.70554239e-01 1.57947361e-01 -2.08221287e-01] [ 2.62689311e-02 -2.82245934e-01 -1.34677619e-01 ... 2.26307496e-01 -7.91757166e-01 2.27670707e-02] [ 4.72669691e-01 1.30733147e-01 6.07860744e-01 ... 4.40707654e-01 -5.82636714e-01 -1.87612221e-01]] [[-1.10330075e-01 -1.10468566e-02 -4.78031635e-02 ... -1.30077899e-01 -3.29826809e-02 1.16412165e-02] [ 1.03280976e-01 4.68281843e-02 -1.77423045e-01 ... 1.33889332e-01 -1.96288049e-01 4.28305566e-03] [-6.01729676e-02 1.79900870e-01 2.73602813e-01 ... -1.41763166e-02 -6.84288889e-02 -6.68592006e-02] ... [-7.59467781e-02 -2.38037407e-02 1.03793666e-01 ... 4.49107029e-02 1.60496086e-01 1.59609810e-01] [-2.03561008e-01 -4.61389162e-02 -6.46425933e-02 ... 1.33769527e-01 3.63514461e-02 7.23408982e-02] [ 1.89670268e-02 1.83919489e-01 3.14322077e-02 ... 4.37589325e-02 -2.67228186e-02 -1.44470349e-01]] [[-3.34075429e-02 7.76324719e-02 9.29818749e-02 ... -8.30947533e-02 -5.96810915e-02 8.03133845e-03] [-1.12585865e-01 -5.40660694e-02 -1.58628840e-02 ... -1.68801043e-02 -7.53068924e-02 -2.30155215e-01] [-3.91653460e-03 -3.34219448e-02 -4.49827947e-02 ... -5.72500527e-02 1.70952398e-02 -5.49033470e-02] ... [ 2.54674107e-02 1.02663130e-01 -1.17423698e-01 ... 8.03734288e-02 4.18527201e-02 3.25798132e-02] [ 5.26801534e-02 6.48939833e-02 -8.24445933e-02 ... 1.11338370e-01 3.56413834e-02 -1.33449495e-01] [ 5.66767231e-02 -6.57036901e-02 -1.04083866e-02 ... -4.86779362e-02 -1.99746434e-02 -1.05952419e-01]] [[-9.03627649e-02 -1.18414439e-01 -2.07459256e-01 ... 3.65367979e-02 -9.52006727e-02 9.92932990e-02] [-1.65941492e-01 2.22655339e-03 1.92162103e-03 ... 1.21103153e-01 1.78647768e-02 2.83402652e-02] [ 1.43135175e-01 3.02857552e-02 8.91994387e-02 ... -5.63134588e-02 7.73206428e-02 -8.38286653e-02] ... [ 4.77787033e-02 1.53298918e-02 -1.02357559e-01 ... -2.76787784e-02 -7.86046162e-02 5.96097559e-02] [ 7.30141113e-03 2.48608384e-02 -3.88186015e-02 ... -3.81697677e-02 -8.22553560e-02 -5.42299561e-02] [-7.58251771e-02 -1.07225463e-01 -9.72517058e-02 ... -1.46173537e-01 -1.33505315e-01 4.54034880e-02]]] [[[-9.47230577e-01 7.59604514e-01 -3.09360832e-01 ... 1.56108034e+00 -1.86186001e-01 -5.43083787e-01] [ 2.84763217e-01 1.05471504e+00 2.62460828e-01 ... 1.26998043e+00 5.28584719e-01 -2.95245349e-02] [-1.22512899e-01 3.23780507e-01 -9.61436331e-01 ... -1.38274327e-01 4.87574965e-01 1.14623225e+00] ... [-2.77728766e-01 8.44049975e-02 4.66371849e-02 ... -6.32174388e-02 -6.56308651e-01 4.10350442e-01] [ 1.26353517e-01 -2.54174322e-01 5.10485530e-01 ... -9.15445209e-01 -8.27227652e-01 2.76238799e-01] [ 6.44794583e-01 -2.74146050e-01 6.49514854e-01 ... -2.83447117e-01 -3.59389156e-01 5.48476160e-01]] [[ 1.06467389e-01 5.42038262e-01 4.73456681e-01 ... 4.75676596e-01 -2.49097720e-01 -1.20178089e-01] [-7.23759413e-01 8.51211905e-01 6.57046914e-01 ... -6.12359583e-01 -2.00621724e-01 -2.26434425e-01] [ 6.25850737e-01 -1.45998871e+00 -1.51277339e+00 ... 4.68998581e-01 1.99072492e+00 -1.19401261e-01] ... [ 2.53440762e+00 1.23672938e+00 9.61901546e-01 ... 7.65157819e-01 8.51649344e-01 -9.22584772e-01] [ 8.46456409e-01 7.48258650e-01 -1.91225447e-02 ... -7.88280070e-01 4.08169717e-01 2.09148693e+00] [ 9.37167108e-01 1.25864483e-02 2.45894742e+00 ... -7.07311392e-01 -8.87975216e-01 3.17400932e-01]] [[-3.68238419e-01 1.45930812e-01 2.97184765e-01 ... 1.90389901e-01 3.79109681e-01 1.70528576e-01] [-3.03222537e-01 -2.61114538e-01 3.15196425e-01 ... 1.24610730e-01 -6.74201250e-02 9.84361619e-02] [-5.05269170e-01 -1.22071374e-02 -4.09509718e-01 ... 2.62596682e-02 -3.39118838e-01 1.40668705e-01] ... [-1.81532502e-01 3.12231898e-01 3.78701240e-01 ... 1.19510196e-01 -3.85868430e-01 3.12743592e-03] [ 3.69816124e-01 -4.92762417e-01 -3.55886072e-01 ... 9.05847326e-02 1.06071398e-01 -2.29530260e-01] [ 2.26165339e-01 1.39419079e-01 3.34681362e-01 ... 1.35167778e-01 -2.55277544e-01 -3.94407302e-01]] [[ 1.80416808e-01 -7.71449208e-02 2.56309479e-01 ... 7.29195327e-02 1.04195729e-01 1.40115712e-02] [-1.04635321e-01 1.09942123e-01 -1.94770191e-02 ... -3.41249704e-01 -2.03617930e-01 -1.14973478e-01] [ 9.01707634e-02 8.60274881e-02 4.10364158e-02 ... 1.14645146e-01 -2.35551402e-01 -3.88679653e-02] ... [ 9.44507048e-02 -1.15128808e-01 -3.20687443e-02 ... 5.41018471e-02 -4.64501195e-02 -8.35061595e-02] [-6.67756796e-02 -2.93950047e-02 6.66953772e-02 ... -1.09665379e-01 1.38808906e-01 -4.46653329e-02] [ 2.39050966e-02 1.63414627e-02 7.18735233e-02 ... 9.53973681e-02 9.13558453e-02 5.15437722e-02]] [[-8.85044113e-02 -7.82523945e-04 2.51056869e-02 ... -6.25663549e-02 2.08822228e-02 1.50227584e-02] [-9.00024399e-02 -8.32037777e-02 -3.20166051e-02 ... 3.78200188e-02 -2.26035565e-02 5.60176522e-02] [ 7.68435821e-02 -1.20242588e-01 -7.37793837e-03 ... 1.72341183e-01 4.46559563e-02 -5.21712601e-02] ... [-7.08989054e-03 1.10277971e-02 -1.75086863e-03 ... 4.35967669e-02 1.38814986e-01 -1.96835026e-01] [ 5.31464480e-02 -5.13099506e-03 -1.60859823e-02 ... -6.22119531e-02 1.06638044e-01 -2.40286477e-02] [ 1.43583268e-01 -8.88823792e-02 -1.53706791e-02 ... -4.64570038e-02 -4.68157120e-02 5.73995663e-03]] [[-2.25850232e-02 2.04273892e-04 1.07703209e-02 ... -1.44533902e-01 -1.77580744e-01 6.34101406e-02] [-3.18278410e-02 7.02543706e-02 6.86574960e-03 ... -1.17018804e-01 -6.06370158e-02 7.99367726e-02] [-2.02029981e-02 -8.26657191e-02 -7.47375004e-03 ... 6.36110380e-02 -3.02229077e-02 -1.94073049e-03] ... [ 1.46274537e-01 -5.17413393e-02 1.05997279e-01 ... 4.06170972e-02 -2.69566067e-02 8.62087756e-02] [ 1.78256601e-01 -1.30275255e-02 -1.13502175e-01 ... 6.05411865e-02 4.41018827e-02 -2.34535187e-02] [ 8.01214650e-02 1.44343488e-02 2.53939000e-03 ... -9.48833823e-02 -4.01838012e-02 -2.47060597e-01]]] [[[-4.88796651e-01 1.30236304e+00 6.65731192e-01 ... -3.15498769e-01 5.34444973e-02 5.40259294e-02] [ 6.53726459e-02 7.53076151e-02 4.56755787e-01 ... 6.55440390e-01 2.97501385e-02 -9.86878693e-01] [ 1.89247891e-01 1.52448401e-01 -2.34005898e-01 ... -3.46312463e-01 -1.38761365e+00 4.33331639e-01] ... [ 1.09066296e+00 -3.66636753e-01 -1.59011826e-01 ... 5.73171377e-02 1.34720296e-01 -7.22327292e-01] [ 3.10213923e-01 -3.76599021e-02 -2.73727506e-01 ... -2.57135093e-01 5.91470778e-01 5.10957122e-01] [ 2.90705979e-01 -2.76721716e-01 1.76931202e+00 ... 3.18521440e-01 -5.67452550e-01 -2.00460032e-02]] [[-5.21606803e-01 -1.10716808e+00 4.29252565e-01 ... 8.53788182e-02 9.71756577e-01 -1.30544019e+00] [ 4.56318170e-01 -5.17937601e-01 3.27230722e-01 ... -8.25300336e-01 -2.25310951e-01 2.14875889e+00] [-6.00183383e-02 3.31395835e-01 1.31237179e-01 ... 6.03346705e-01 1.52071258e-02 5.73333204e-01] ... [-1.46232188e+00 -1.63256258e-01 -2.17739558e+00 ... -3.56307596e-01 -1.14060426e+00 2.15061140e+00] [ 1.08610010e+00 1.42380035e+00 7.93549120e-01 ... -2.92121112e-01 4.07989293e-01 -1.66542375e+00] [-1.19566992e-01 -2.14057669e-01 1.39956522e+00 ... 1.83012545e-01 1.67801782e-01 1.35823333e+00]] [[ 2.14708030e-01 4.55810428e-02 1.39339790e-01 ... 4.79418337e-01 -1.44819230e-01 9.35575739e-02] [-4.42505032e-01 -5.12203667e-03 5.76690376e-01 ... 5.85633039e-01 -1.58817604e-01 -2.26639271e-01] [ 3.90858240e-02 1.92631245e-01 -2.85987049e-01 ... 1.31795198e-01 2.09348157e-01 6.64627180e-02] ... [ 1.04823545e-01 -5.95328547e-02 4.88111302e-02 ... 1.82034701e-01 -3.10758222e-02 -1.97439536e-01] [-1.13702767e-01 2.73273140e-01 -4.59418297e-01 ... -3.21314037e-01 8.06503892e-02 2.30701581e-01] [ 5.47995493e-02 4.87642772e-02 -1.00780189e-01 ... 2.78391093e-01 -3.94162275e-02 1.31589442e-01]] [[-3.04847378e-02 1.11023821e-01 8.52536485e-02 ... 8.00606795e-03 6.43120287e-03 7.44250715e-02] [-1.79876104e-01 -4.44205850e-02 2.97230273e-03 ... 2.21924916e-01 -1.16066840e-04 2.52925027e-02] [ 7.27836639e-02 -5.63343801e-02 -4.43670107e-03 ... 1.66132957e-01 1.55078545e-01 3.00899632e-02] ... [-5.65830730e-02 5.80175817e-02 9.25483331e-02 ... -1.00498714e-01 -1.83391199e-01 2.85010099e-01] [-2.72137135e-01 3.35525900e-01 4.81797941e-03 ... 2.64067948e-01 8.90098792e-03 2.30074331e-01] [ 8.59575942e-02 -2.28083599e-03 6.63085729e-02 ... -4.91246406e-04 -1.57577798e-01 3.84120457e-02]] [[-1.23291925e-01 -1.07195489e-01 6.61799014e-02 ... -5.41070439e-02 5.80019492e-04 4.26764078e-02] [ 1.56727377e-02 5.79295307e-02 -3.54134962e-02 ... 3.53636779e-02 6.96158111e-02 5.06423377e-02] [ 4.86444030e-03 -7.54053667e-02 -4.16766331e-02 ... 5.57854027e-02 -6.77969353e-03 -1.71751641e-02] ... [ 2.96429154e-02 -4.04190794e-02 -6.37709349e-02 ... 1.08932152e-01 -3.22158299e-02 -9.22653824e-02] [ 1.92996617e-02 -6.03489727e-02 -1.11816516e-02 ... -3.65596712e-02 4.24773321e-02 -2.77361330e-02] [ 4.52775955e-02 -5.24249077e-02 1.22909397e-01 ... 2.96382215e-02 -4.88387719e-02 5.48647642e-02]] [[ 6.58919290e-02 -3.66231613e-02 4.57052924e-02 ... -4.73974682e-02 -6.90170228e-02 2.00470492e-01] [ 4.77973521e-02 4.33160109e-04 3.59137580e-02 ... -2.03565750e-02 1.51307821e-01 1.60974801e-01] [ 2.08083428e-02 -6.83971792e-02 9.02917534e-02 ... 8.74704719e-02 1.77342251e-01 7.47435912e-02] ... [-1.96282123e-03 -1.51074916e-01 6.97239563e-02 ... 1.46459062e-02 -1.35689005e-01 -7.33082965e-02] [ 1.11051857e-01 -1.17297368e-02 4.24045809e-02 ... -3.24041769e-02 -2.42146239e-01 -1.56264052e-01] [-5.61434031e-02 -1.44902617e-01 -1.84065532e-02 ... 3.31077762e-02 1.60083786e-01 -1.54586837e-01]]]]; ov_res: [[[[ 3.11506361e-01 -4.02600080e-01 2.02164441e-01 ... -1.14136720e+00 -8.36095929e-01 -3.45890850e-01] [-2.01468647e-01 -8.68467867e-01 4.88867819e-01 ... 6.67144358e-02 -2.27538943e-01 3.19043547e-01] [-3.57115678e-02 -1.19789171e+00 3.49276885e-02 ... -3.63494396e-01 1.48826078e-01 1.27741978e-01] ... [ 4.91209447e-01 -6.21195063e-02 7.75863528e-01 ... 1.29276931e-01 1.05759442e+00 -1.10718203e+00] [ 3.38120729e-01 1.77617595e-01 -2.84482419e-01 ... -3.45836729e-01 6.27292812e-01 3.65881503e-01] [-1.13814101e-01 -4.30641145e-01 6.23858929e-01 ... 2.93869287e-01 -6.64956748e-01 -1.34778753e-01]] [[ 6.59407616e-01 -8.48211527e-01 1.51666090e-01 ... 3.61875355e-01 5.59694290e-01 -1.27555773e-01] [ 4.74483520e-01 6.12090409e-01 -1.01814963e-01 ... 9.11710784e-02 -9.23654497e-01 -1.13847876e+00] [ 2.52410680e-01 -8.09843779e-01 1.30770743e+00 ... -1.00658429e+00 2.00535703e+00 1.86187923e+00] ... [-1.26123500e+00 -1.04942739e+00 6.51014507e-01 ... 4.23190743e-01 -2.78237253e-01 2.27777719e-01] [-4.50582951e-01 -9.99088228e-01 -1.01967359e+00 ... -1.15458637e-01 1.62694943e+00 6.09866440e-01] [-6.60418347e-02 -3.78553629e-01 -5.12965083e-01 ... -5.60479388e-02 -1.22923291e+00 1.79376990e-01]] [[-1.07044235e-01 -8.41611400e-02 -3.79266620e-01 ... -1.81128561e-01 -5.71556762e-02 2.51439124e-01] [ 1.34227112e-01 1.54664665e-01 2.88701922e-01 ... -8.18773985e-01 -2.60423064e-01 -2.62406647e-01] [-5.02588972e-02 -3.31918836e-01 2.58114338e-01 ... 1.98969007e-01 2.21442170e-02 1.22844586e-02] ... [ 7.58564249e-02 -1.63905010e-01 -6.19171597e-02 ... -5.96283190e-02 -3.46791029e-01 1.31087735e-01] [-3.27730626e-01 8.91565830e-02 1.41721785e-01 ... 3.16863626e-01 1.12113333e+00 3.07538003e-01] [-1.52748391e-01 -4.86561984e-01 3.02038133e-01 ... -4.79584515e-01 3.66693325e-02 -1.71696469e-01]] [[ 5.79504110e-02 -1.48235140e-02 2.44186088e-01 ... 6.75328076e-02 1.78019419e-01 8.38066414e-02] [-8.80363062e-02 -2.09976867e-01 -9.58865434e-02 ... 8.25812891e-02 -4.16776203e-02 7.22205192e-02] [-1.48698404e-01 -1.14527546e-01 2.16612294e-02 ... -6.16206191e-02 -1.23061635e-01 -1.40622944e-01] ... [ 4.47895378e-02 9.41352695e-02 -2.10103896e-02 ... -2.49864683e-01 -6.09863410e-03 -1.10447910e-02] [ 4.25332785e-02 1.06048033e-01 1.41754448e-01 ... 8.28325823e-02 1.16552822e-01 -6.45839944e-02] [ 1.66335881e-01 -4.42780182e-02 -7.71524608e-02 ... 1.77470341e-01 1.82193339e-01 -2.14913055e-01]] [[ 2.16893535e-02 -7.51152933e-02 1.15608953e-01 ... 3.37624177e-03 -5.17992629e-03 -1.00706547e-01] [ 2.36401986e-02 1.30110741e-01 6.84624445e-03 ... 4.66807596e-02 2.98853740e-02 4.97599272e-03] [ 6.88208714e-02 -3.66327576e-02 -9.06346515e-02 ... 1.07984580e-01 3.08087338e-02 3.16632278e-02] ... [ 9.01097804e-02 1.80816650e-02 1.14797410e-02 ... -7.42016733e-03 -5.77359684e-02 -3.79196107e-02] [-5.08100912e-03 9.64584425e-02 3.68461721e-02 ... -4.44110483e-03 -1.15971401e-01 5.35202101e-02] [-5.29045612e-02 -2.11051423e-02 1.10735871e-01 ... 5.46812564e-02 4.20486368e-02 9.81170908e-02]] [[-3.04237325e-02 4.25422899e-02 -7.81028271e-02 ... -2.77497433e-02 7.89909810e-02 5.64556103e-03] [-8.42633098e-02 8.92074183e-02 -1.87272951e-02 ... 1.80713758e-02 1.30780619e-02 1.79588944e-01] [ 7.39768744e-02 -3.68539132e-02 1.50748596e-01 ... -1.95950698e-02 -7.88622499e-02 8.96877721e-02] ... [-9.15380865e-02 6.34610504e-02 -1.11338995e-01 ... 3.97871993e-02 -1.00512825e-01 1.23516932e-01] [ 9.14326087e-02 1.37341842e-01 -7.54410177e-02 ... 4.40737568e-02 1.95419684e-01 1.35088749e-02] [ 7.81969503e-02 1.16409231e-02 1.60448238e-01 ... -2.87156761e-01 2.08241865e-01 4.26079482e-02]]] [[[-5.52773952e-01 -3.15036178e-01 1.48697823e-01 ... 4.92012590e-01 4.06256258e-01 4.46495861e-01] [-5.72456121e-01 3.04739505e-01 1.28650725e-01 ... -3.77855062e-01 5.57070017e-01 9.14028108e-01] [-5.47105014e-01 -5.48108160e-01 -5.50870955e-01 ... -4.08799350e-01 1.75440572e-02 7.70640075e-01] ... [ 5.95914960e-01 1.14992917e+00 4.73640375e-02 ... 9.22605634e-01 7.61699736e-01 -6.13174915e-01] [ 3.30651939e-01 4.50681716e-01 -1.99583266e-03 ... -3.39994639e-01 -1.10622919e+00 -1.41096339e-01] [ 6.85426295e-01 -3.39979231e-01 2.27998480e-01 ... -4.25613642e-01 1.33226705e+00 1.16282046e+00]] [[-8.76048803e-01 -9.43783104e-01 8.04392993e-01 ... 1.51238668e+00 1.51144862e+00 3.41436327e-01] [-8.28657627e-01 1.99982420e-01 1.68078065e+00 ... 1.08542240e+00 -1.26669645e+00 -3.66371453e-01] [ 2.42962003e-01 1.39202988e+00 3.91660601e-01 ... 5.64021826e-01 -1.73916709e+00 -1.70851409e-01] ... [-1.79063153e+00 -4.42088604e-01 -2.87873328e-01 ... 6.29776642e-02 3.78767341e-01 1.40906167e+00] [ 2.03197133e-02 -2.27944881e-01 -7.59514332e-01 ... 6.74144030e-01 -8.75096977e-01 6.50256991e-01] [ 2.57617682e-01 1.36292756e+00 -4.20594215e-01 ... 7.94952586e-02 2.12267384e-01 1.60842586e+00]] [[ 1.45164337e-02 -1.02528699e-01 -2.38031626e-01 ... -3.85682046e-01 2.63851762e-01 1.55038461e-01] [-9.83473212e-02 3.23451519e-01 -1.67792256e-03 ... 3.24648023e-01 -3.65178525e-01 -4.24305409e-01] [-1.83594629e-01 1.21802323e-01 -2.67343968e-01 ... -2.32572153e-01 -3.96425538e-02 1.53383359e-01] ... [ 3.60100344e-02 -7.29017779e-02 -1.09963939e-01 ... 3.94050568e-01 -3.42437148e-01 2.84196828e-02] [-2.85995543e-01 2.86006089e-02 -6.73463866e-02 ... -4.53243405e-01 -2.14470446e-01 1.45423755e-01] [-3.41345891e-02 -4.23030183e-02 -1.07734159e-01 ... -3.61057729e-01 -1.69160739e-01 -2.77339697e-01]] [[ 1.69936985e-01 -1.16803445e-01 -2.61377513e-01 ... -1.02875791e-01 5.55813685e-02 1.93614289e-01] [ 1.63408220e-02 -1.56878933e-01 -5.83521836e-02 ... -1.60551129e-03 9.42155868e-02 -3.42118330e-02] [-9.12220106e-02 2.13310704e-01 -1.13008097e-01 ... 1.59105316e-01 6.39847741e-02 -3.17132287e-02] ... [ 2.02523395e-01 2.41605435e-02 2.25748122e-01 ... -1.23145625e-01 -2.12660106e-03 1.14917517e-01] [ 1.85622603e-01 5.60219735e-02 1.80949960e-02 ... 5.33022778e-03 -5.97151592e-02 -1.81662545e-01] [-3.71810585e-01 1.30069837e-01 3.63954306e-02 ... -2.88918823e-01 2.73270726e-01 5.16466051e-02]] [[-2.23585162e-02 -1.94291994e-02 2.14831661e-02 ... -7.77980760e-02 -1.57088565e-03 1.26618609e-01] [-2.78908741e-02 4.44824249e-02 1.04286447e-01 ... 2.64593470e-03 -2.75510047e-02 -3.54348384e-02] [ 3.46076116e-02 5.38583025e-02 3.89762372e-02 ... -1.17426485e-01 1.44219529e-02 -5.46664260e-02] ... [ 4.23896909e-02 1.97186545e-02 2.17334032e-02 ... -1.97083745e-02 4.31660786e-02 5.03373938e-03] [ 1.85259886e-03 8.85250866e-02 3.15294191e-02 ... 5.96081577e-02 1.32471040e-01 -9.96117517e-02] [-4.15228680e-02 4.89232354e-02 8.22503641e-02 ... 5.41291460e-02 -1.37121394e-01 1.74804609e-02]] [[ 3.01426463e-02 -7.23383809e-03 1.88742235e-01 ... -1.18561722e-01 -7.72670582e-02 1.61556542e-01] [-1.36524311e-03 -6.45777062e-02 -2.60712150e-02 ... 2.65145183e-01 5.45669049e-02 2.80410759e-02] [ 3.91595066e-02 1.02411985e-01 8.14935789e-02 ... 9.71998721e-02 -1.52729556e-01 -1.19941682e-01] ... [ 3.01031955e-02 -6.00922406e-02 1.33542955e-01 ... 9.64923427e-02 1.64636627e-01 1.05687946e-01] [-2.06229523e-01 -7.43860602e-02 -4.90189195e-02 ... -8.79392326e-02 1.04032852e-01 -1.60274766e-02] [ 6.18763193e-02 9.81676877e-02 8.57161507e-02 ... 7.74035305e-02 -1.59072965e-01 -3.44177336e-02]]] [[[-2.16077834e-01 9.13388550e-01 8.21344852e-01 ... 8.00956905e-01 -5.88661194e-01 4.05793935e-01] [-1.20912254e+00 -5.36603153e-01 2.15054348e-01 ... 6.91495687e-02 6.69589281e-01 1.08274734e+00] [ 1.08390264e-01 2.36428633e-01 2.34340742e-01 ... -4.85693395e-01 -4.62544680e-01 1.65954977e-01] ... [ 1.70763761e-01 -4.04998690e-01 -5.23678027e-02 ... -1.12128901e+00 -1.37623775e+00 2.76152343e-01] [-1.99139044e-01 7.86637664e-01 3.89922261e-02 ... 1.02216041e+00 8.50439727e-01 1.17560171e-01] [ 7.71781266e-01 -1.31768808e-01 -1.30313730e+00 ... -8.07064235e-01 -2.39740196e-03 5.66353500e-01]] [[ 8.29910517e-01 6.23787105e-01 2.33668184e+00 ... -7.28928030e-01 -7.82001853e-01 7.92206109e-01] [-2.16264892e+00 -7.14639783e-01 -9.51408297e-02 ... -1.63991362e-01 1.33828676e+00 8.32965732e-01] [-7.18765497e-01 1.31522918e+00 5.63703835e-01 ... -2.28996301e+00 8.69794637e-02 1.03161335e+00] ... [-7.86092341e-01 8.77197564e-01 1.35102296e+00 ... 8.21379781e-01 -3.88495117e-01 1.76202309e+00] [ 1.06231332e-01 8.07373405e-01 -2.46929675e-01 ... 7.52601564e-01 -1.89708948e-01 -3.65379184e-01] [ 1.02032578e+00 -3.57053697e-01 -1.65858161e+00 ... -1.50148880e+00 -2.70249248e-01 -1.47628218e-01]] [[-2.73334384e-01 -1.04525678e-01 -2.03440756e-01 ... -2.92512387e-01 1.86724722e-01 -1.40765784e-02] [ 2.66369432e-01 -5.73407300e-02 -2.52329320e-01 ... -5.08242808e-02 2.29288071e-01 -1.88091844e-02] [-1.80921033e-01 -2.08816096e-01 -9.13093761e-02 ... 2.31112704e-01 1.96054250e-01 7.99876451e-02] ... [ 3.98060888e-01 3.03668678e-01 5.47150262e-02 ... -7.68186525e-02 3.27494830e-01 -1.32051244e-01] [ 2.15806365e-01 5.15067875e-02 -1.49112895e-01 ... 6.56000674e-02 1.05596095e-01 2.19003800e-02] [-1.78132519e-01 9.48348716e-02 4.20098871e-01 ... -2.45741308e-01 -1.18235379e-01 -5.00716805e-01]] [[-4.83517125e-02 -6.59699319e-03 -1.71375245e-01 ... 3.65244374e-02 -4.45305444e-02 -4.24851514e-02] [-3.03285062e-01 -7.65936747e-02 2.34057698e-02 ... -2.87991092e-02 -9.74556059e-02 -5.20290546e-02] [ 1.97637677e-01 6.94108084e-02 1.65407225e-01 ... -1.60304606e-01 1.48217678e-01 -2.89909542e-02] ... [ 1.51089087e-01 4.97898366e-03 7.17762411e-02 ... -3.11869401e-02 -7.13138133e-02 -2.37271801e-01] [-6.68778047e-02 9.40289125e-02 1.25396401e-01 ... 2.13186163e-02 -9.09639746e-02 -8.57230276e-02] [ 1.23342149e-01 4.10572402e-02 1.03786802e-02 ... 9.25204456e-02 -1.63281813e-01 2.13944808e-01]] [[ 2.75064670e-02 -1.95912600e-01 1.20290238e-02 ... -4.48234901e-02 -1.38129015e-02 -9.55366902e-03] [-4.30981144e-02 7.15217888e-02 -1.27462521e-02 ... 2.00354028e-03 2.06974167e-02 -6.03667200e-02] [-4.87099998e-02 -1.21088542e-01 -4.51905318e-02 ... -7.56434053e-02 1.57066167e-03 -8.42732266e-02] ... [ 9.45277363e-02 1.67458523e-02 -6.27811402e-02 ... 2.36855634e-02 -3.14790793e-02 -7.23512918e-02] [ 7.90238082e-02 6.75935820e-02 5.51798381e-02 ... -1.45658515e-02 -3.05035468e-02 -6.59780717e-03] [ 3.10664177e-02 7.81872496e-02 -1.67048462e-02 ... 1.12390900e-02 -9.12090484e-03 4.14252207e-02]] [[-2.70234831e-02 7.80173913e-02 -9.30768624e-02 ... 9.08582285e-02 -5.14037348e-02 4.80676033e-02] [ 1.40738845e-01 5.93622029e-02 9.47345719e-02 ... 2.10851077e-02 5.67236822e-03 7.20424205e-02] [-1.75362043e-02 2.20900383e-02 2.24421844e-02 ... -5.59933558e-02 -1.34287477e-01 6.86084107e-02] ... [-3.75758894e-02 -4.32885811e-02 -1.58699289e-01 ... -1.65673152e-01 1.39633089e-01 8.45305026e-02] [ 1.99917611e-02 -1.07346907e-01 1.26522914e-01 ... -6.86906800e-02 6.95834979e-02 -7.98804685e-03] [-7.14866519e-02 -5.50248511e-02 -5.75700700e-02 ... 7.47137070e-02 -1.76298060e-02 -6.44020289e-02]]] ... [[[-2.23763082e-02 -1.38403878e-01 6.23601973e-01 ... 5.29174805e-01 -3.55077803e-01 1.16098332e+00] [ 9.74852502e-01 5.24396360e-01 -2.15535834e-02 ... 1.21858038e-01 1.34750113e-01 -1.16355026e+00] [ 9.36726928e-01 -2.63221651e-01 -4.18368101e-01 ... 8.65958154e-01 4.73808497e-01 3.25392365e-01] ... [-1.83601096e-01 -5.41116707e-02 3.33024263e-01 ... 7.10693359e-01 -3.08435917e-01 -7.17190027e-01] [-1.10215947e-01 -1.32777214e+00 4.29999650e-01 ... 7.36411095e-01 -3.51267517e-01 -1.63916337e+00] [ 4.79770333e-01 9.27682295e-02 4.32451546e-01 ... 2.02645570e-01 -5.61010063e-01 -7.34644890e-01]] [[-1.90322623e-01 5.78276157e-01 -6.98652208e-01 ... -1.71416664e+00 -1.81349576e+00 5.06311893e-01] [ 9.35689032e-01 -1.09578395e+00 -3.68196726e-01 ... -6.84417307e-01 -1.11784136e+00 1.32303023e+00] [-3.99881974e-02 2.30023909e+00 -5.73833048e-01 ... -2.64874808e-02 3.84495020e-01 2.05806404e-01] ... [ 1.70016646e-01 2.41564417e+00 -1.38489270e+00 ... -1.60677898e+00 -1.17053974e+00 -1.32549560e+00] [ 1.89363569e-01 1.11373627e+00 -3.65602702e-01 ... -2.51051354e+00 -4.19667333e-01 8.17204416e-01] [ 6.15559556e-02 4.68594909e-01 -3.94687682e-01 ... -9.20896888e-01 -2.98245519e-01 -6.84083462e-01]] [[ 1.19843474e-02 2.56700013e-02 1.47013530e-01 ... -1.37538210e-01 -2.10777164e-01 4.72119600e-01] [-5.53871766e-02 -4.74829786e-02 2.91996330e-01 ... 9.26024094e-02 -2.23154485e-01 -4.72338527e-01] [ 2.55085304e-02 2.45529920e-01 5.52715249e-02 ... -1.64423697e-02 -1.79707095e-01 -2.08373129e-01] ... [-4.41790111e-02 1.24060772e-01 -1.49122602e-03 ... 3.70554268e-01 1.57947376e-01 -2.08221301e-01] [ 2.62689311e-02 -2.82245904e-01 -1.34677619e-01 ... 2.26307496e-01 -7.91757166e-01 2.27670688e-02] [ 4.72669721e-01 1.30733147e-01 6.07860804e-01 ... 4.40707684e-01 -5.82636654e-01 -1.87612236e-01]] [[-1.10330075e-01 -1.10468548e-02 -4.78031635e-02 ... -1.30077899e-01 -3.29826809e-02 1.16412174e-02] [ 1.03280976e-01 4.68281843e-02 -1.77423045e-01 ... 1.33889318e-01 -1.96288049e-01 4.28305659e-03] [-6.01729676e-02 1.79900855e-01 2.73602813e-01 ... -1.41763147e-02 -6.84288815e-02 -6.68592006e-02] ... [-7.59467781e-02 -2.38037389e-02 1.03793658e-01 ... 4.49106991e-02 1.60496086e-01 1.59609795e-01] [-2.03561008e-01 -4.61389162e-02 -6.46425933e-02 ... 1.33769512e-01 3.63514461e-02 7.23408982e-02] [ 1.89670250e-02 1.83919474e-01 3.14322077e-02 ... 4.37589325e-02 -2.67228168e-02 -1.44470364e-01]] [[-3.34075391e-02 7.76324570e-02 9.29818600e-02 ... -8.30947384e-02 -5.96810803e-02 8.03133752e-03] [-1.12585850e-01 -5.40660620e-02 -1.58628821e-02 ... -1.68801025e-02 -7.53068775e-02 -2.30155185e-01] [-3.91653506e-03 -3.34219448e-02 -4.49827872e-02 ... -5.72500490e-02 1.70952361e-02 -5.49033396e-02] ... [ 2.54674070e-02 1.02663122e-01 -1.17423676e-01 ... 8.03734213e-02 4.18527126e-02 3.25798094e-02] [ 5.26801459e-02 6.48939759e-02 -8.24445784e-02 ... 1.11338355e-01 3.56413797e-02 -1.33449480e-01] [ 5.66767119e-02 -6.57036826e-02 -1.04083847e-02 ... -4.86779287e-02 -1.99746415e-02 -1.05952404e-01]] [[-9.03627574e-02 -1.18414432e-01 -2.07459241e-01 ... 3.65367942e-02 -9.52006727e-02 9.92932841e-02] [-1.65941492e-01 2.22655386e-03 1.92162138e-03 ... 1.21103145e-01 1.78647749e-02 2.83402652e-02] [ 1.43135160e-01 3.02857552e-02 8.91994387e-02 ... -5.63134514e-02 7.73206428e-02 -8.38286579e-02] ... [ 4.77786995e-02 1.53298909e-02 -1.02357559e-01 ... -2.76787765e-02 -7.86046088e-02 5.96097521e-02] [ 7.30141066e-03 2.48608384e-02 -3.88185978e-02 ... -3.81697640e-02 -8.22553486e-02 -5.42299524e-02] [-7.58251771e-02 -1.07225463e-01 -9.72517058e-02 ... -1.46173522e-01 -1.33505315e-01 4.54034843e-02]]] [[[-9.47230577e-01 7.59604454e-01 -3.09360832e-01 ... 1.56108034e+00 -1.86186001e-01 -5.43083787e-01] [ 2.84763247e-01 1.05471504e+00 2.62460828e-01 ... 1.26998043e+00 5.28584778e-01 -2.95245368e-02] [-1.22512899e-01 3.23780537e-01 -9.61436331e-01 ... -1.38274327e-01 4.87574995e-01 1.14623225e+00] ... [-2.77728766e-01 8.44049975e-02 4.66371849e-02 ... -6.32174462e-02 -6.56308651e-01 4.10350442e-01] [ 1.26353517e-01 -2.54174322e-01 5.10485530e-01 ... -9.15445209e-01 -8.27227712e-01 2.76238769e-01] [ 6.44794524e-01 -2.74146050e-01 6.49514854e-01 ... -2.83447146e-01 -3.59389156e-01 5.48476160e-01]] [[ 1.06467396e-01 5.42038262e-01 4.73456681e-01 ... 4.75676626e-01 -2.49097720e-01 -1.20178089e-01] [-7.23759472e-01 8.51211965e-01 6.57046974e-01 ... -6.12359583e-01 -2.00621724e-01 -2.26434425e-01] [ 6.25850797e-01 -1.45998871e+00 -1.51277328e+00 ... 4.68998611e-01 1.99072504e+00 -1.19401261e-01] ... [ 2.53440785e+00 1.23672950e+00 9.61901605e-01 ... 7.65157878e-01 8.51649344e-01 -9.22584832e-01] [ 8.46456409e-01 7.48258650e-01 -1.91225410e-02 ... -7.88280070e-01 4.08169746e-01 2.09148717e+00] [ 9.37167168e-01 1.25864530e-02 2.45894766e+00 ... -7.07311451e-01 -8.87975276e-01 3.17400932e-01]] [[-3.68238449e-01 1.45930827e-01 2.97184825e-01 ... 1.90389916e-01 3.79109681e-01 1.70528591e-01] [-3.03222567e-01 -2.61114568e-01 3.15196455e-01 ... 1.24610737e-01 -6.74201325e-02 9.84361693e-02] [-5.05269229e-01 -1.22071411e-02 -4.09509748e-01 ... 2.62596700e-02 -3.39118868e-01 1.40668720e-01] ... [-1.81532517e-01 3.12231928e-01 3.78701299e-01 ... 1.19510211e-01 -3.85868460e-01 3.12743452e-03] [ 3.69816154e-01 -4.92762476e-01 -3.55886132e-01 ... 9.05847400e-02 1.06071413e-01 -2.29530275e-01] [ 2.26165354e-01 1.39419094e-01 3.34681392e-01 ... 1.35167792e-01 -2.55277544e-01 -3.94407362e-01]] [[ 1.80416837e-01 -7.71449283e-02 2.56309509e-01 ... 7.29195401e-02 1.04195751e-01 1.40115740e-02] [-1.04635336e-01 1.09942138e-01 -1.94770209e-02 ... -3.41249734e-01 -2.03617960e-01 -1.14973493e-01] [ 9.01707783e-02 8.60274956e-02 4.10364233e-02 ... 1.14645161e-01 -2.35551432e-01 -3.88679691e-02] ... [ 9.44507122e-02 -1.15128823e-01 -3.20687480e-02 ... 5.41018546e-02 -4.64501269e-02 -8.35061744e-02] [-6.67756870e-02 -2.93950085e-02 6.66953847e-02 ... -1.09665401e-01 1.38808921e-01 -4.46653366e-02] [ 2.39051003e-02 1.63414646e-02 7.18735382e-02 ... 9.53973904e-02 9.13558528e-02 5.15437797e-02]] [[-8.85044113e-02 -7.82523595e-04 2.51056887e-02 ... -6.25663549e-02 2.08822247e-02 1.50227584e-02] [-9.00024399e-02 -8.32037777e-02 -3.20166089e-02 ... 3.78200188e-02 -2.26035565e-02 5.60176559e-02] [ 7.68435895e-02 -1.20242588e-01 -7.37793837e-03 ... 1.72341183e-01 4.46559601e-02 -5.21712601e-02] ... [-7.08989007e-03 1.10277981e-02 -1.75086828e-03 ... 4.35967706e-02 1.38814986e-01 -1.96835026e-01] [ 5.31464480e-02 -5.13099460e-03 -1.60859842e-02 ... -6.22119531e-02 1.06638052e-01 -2.40286477e-02] [ 1.43583268e-01 -8.88823792e-02 -1.53706800e-02 ... -4.64570038e-02 -4.68157120e-02 5.73995709e-03]] [[-2.25850251e-02 2.04273500e-04 1.07703209e-02 ... -1.44533917e-01 -1.77580759e-01 6.34101406e-02] [-3.18278447e-02 7.02543706e-02 6.86575007e-03 ... -1.17018819e-01 -6.06370158e-02 7.99367726e-02] [-2.02029981e-02 -8.26657191e-02 -7.47375004e-03 ... 6.36110380e-02 -3.02229077e-02 -1.94073084e-03] ... [ 1.46274537e-01 -5.17413393e-02 1.05997279e-01 ... 4.06170972e-02 -2.69566085e-02 8.62087756e-02] [ 1.78256616e-01 -1.30275264e-02 -1.13502182e-01 ... 6.05411865e-02 4.41018827e-02 -2.34535187e-02] [ 8.01214576e-02 1.44343488e-02 2.53938977e-03 ... -9.48833898e-02 -4.01838012e-02 -2.47060597e-01]]] [[[-4.88796681e-01 1.30236316e+00 6.65731311e-01 ... -3.15498829e-01 5.34444936e-02 5.40259294e-02] [ 6.53726459e-02 7.53076151e-02 4.56755817e-01 ... 6.55440450e-01 2.97501367e-02 -9.86878753e-01] [ 1.89247891e-01 1.52448401e-01 -2.34005913e-01 ... -3.46312493e-01 -1.38761377e+00 4.33331668e-01] ... [ 1.09066308e+00 -3.66636753e-01 -1.59011841e-01 ... 5.73171377e-02 1.34720311e-01 -7.22327352e-01] [ 3.10213953e-01 -3.76599133e-02 -2.73727536e-01 ... -2.57135123e-01 5.91470838e-01 5.10957122e-01] [ 2.90705979e-01 -2.76721776e-01 1.76931214e+00 ... 3.18521440e-01 -5.67452610e-01 -2.00460106e-02]] [[-5.21606803e-01 -1.10716820e+00 4.29252595e-01 ... 8.53788406e-02 9.71756637e-01 -1.30544031e+00] [ 4.56318200e-01 -5.17937601e-01 3.27230752e-01 ... -8.25300395e-01 -2.25310951e-01 2.14875913e+00] [-6.00183345e-02 3.31395894e-01 1.31237194e-01 ... 6.03346705e-01 1.52071360e-02 5.73333263e-01] ... [-1.46232200e+00 -1.63256273e-01 -2.17739582e+00 ... -3.56307626e-01 -1.14060438e+00 2.15061140e+00] [ 1.08610010e+00 1.42380035e+00 7.93549180e-01 ... -2.92121142e-01 4.07989323e-01 -1.66542387e+00] [-1.19566984e-01 -2.14057669e-01 1.39956534e+00 ... 1.83012575e-01 1.67801782e-01 1.35823333e+00]] [[ 2.14708030e-01 4.55810428e-02 1.39339790e-01 ... 4.79418337e-01 -1.44819230e-01 9.35575739e-02] [-4.42505062e-01 -5.12203854e-03 5.76690376e-01 ... 5.85633039e-01 -1.58817619e-01 -2.26639271e-01] [ 3.90858203e-02 1.92631260e-01 -2.85987049e-01 ... 1.31795198e-01 2.09348142e-01 6.64627180e-02] ... [ 1.04823545e-01 -5.95328584e-02 4.88111302e-02 ... 1.82034716e-01 -3.10758241e-02 -1.97439536e-01] [-1.13702774e-01 2.73273140e-01 -4.59418327e-01 ... -3.21314037e-01 8.06503892e-02 2.30701566e-01] [ 5.47995493e-02 4.87642735e-02 -1.00780196e-01 ... 2.78391093e-01 -3.94162312e-02 1.31589442e-01]] [[-3.04847378e-02 1.11023828e-01 8.52536410e-02 ... 8.00606981e-03 6.43120380e-03 7.44250715e-02] [-1.79876104e-01 -4.44205850e-02 2.97230389e-03 ... 2.21924931e-01 -1.16065683e-04 2.52925046e-02] [ 7.27836713e-02 -5.63343801e-02 -4.43669967e-03 ... 1.66132972e-01 1.55078560e-01 3.00899651e-02] ... [-5.65830693e-02 5.80175854e-02 9.25483331e-02 ... -1.00498706e-01 -1.83391199e-01 2.85010099e-01] [-2.72137135e-01 3.35525900e-01 4.81798081e-03 ... 2.64067948e-01 8.90098885e-03 2.30074346e-01] [ 8.59575942e-02 -2.28083506e-03 6.63085729e-02 ... -4.91245301e-04 -1.57577798e-01 3.84120494e-02]] [[-1.23291917e-01 -1.07195482e-01 6.61798939e-02 ... -5.41070439e-02 5.80019027e-04 4.26764078e-02] [ 1.56727359e-02 5.79295270e-02 -3.54134925e-02 ... 3.53636779e-02 6.96158037e-02 5.06423302e-02] [ 4.86443937e-03 -7.54053667e-02 -4.16766256e-02 ... 5.57853952e-02 -6.77969353e-03 -1.71751622e-02] ... [ 2.96429116e-02 -4.04190756e-02 -6.37709275e-02 ... 1.08932137e-01 -3.22158299e-02 -9.22653750e-02] [ 1.92996599e-02 -6.03489690e-02 -1.11816498e-02 ... -3.65596674e-02 4.24773283e-02 -2.77361292e-02] [ 4.52775918e-02 -5.24249002e-02 1.22909382e-01 ... 2.96382196e-02 -4.88387682e-02 5.48647568e-02]] [[ 6.58919215e-02 -3.66231576e-02 4.57052886e-02 ... -4.73974645e-02 -6.90170154e-02 2.00470477e-01] [ 4.77973446e-02 4.33160574e-04 3.59137580e-02 ... -2.03565732e-02 1.51307806e-01 1.60974771e-01] [ 2.08083410e-02 -6.83971718e-02 9.02917385e-02 ... 8.74704644e-02 1.77342221e-01 7.47435763e-02] ... [-1.96282053e-03 -1.51074901e-01 6.97239488e-02 ... 1.46459052e-02 -1.35688990e-01 -7.33082891e-02] [ 1.11051850e-01 -1.17297349e-02 4.24045734e-02 ... -3.24041732e-02 -2.42146209e-01 -1.56264037e-01] [-5.61433919e-02 -1.44902602e-01 -1.84065495e-02 ... 3.31077762e-02 1.60083771e-01 -1.54586822e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 3, 'weights_shape': (6,)} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4604.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.6586 -0.2861 -0.7616 0.5177 0.7432 -0.0732 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[[-4.12632734e-01 1.40550363e+00 6.22657612e-02 ... 5.64252660e-02 -2.08353877e-01 -8.96453559e-02] [ 1.69548941e+00 -4.57477719e-01 -6.59943521e-02 ... 6.72182627e-03 1.12224773e-01 -1.28789306e+00] [-4.28365588e-01 5.83944499e-01 -1.43374872e+00 ... -3.47336680e-02 -2.90767640e-01 -4.38433737e-01] ... [ 3.21749240e-01 2.57941514e-01 -9.40588593e-01 ... -4.35111344e-01 3.38420719e-01 1.69390464e+00] [ 6.95922911e-01 -3.29741478e-01 -8.83719862e-01 ... 3.20747867e-02 -9.51590419e-01 -1.08754611e+00] [-6.67701364e-02 3.77493024e-01 -2.40673587e-01 ... 2.47648671e-01 -4.97768968e-02 -9.63278711e-01]] [[-1.63556561e-01 -2.24000245e-01 -2.72275984e-01 ... 3.41319181e-02 -3.37302476e-01 2.60529667e-01] [-1.62539691e-01 1.40570387e-01 7.63194785e-02 ... 2.02452868e-01 -9.52066258e-02 -3.94096076e-01] [-6.25741780e-01 -1.86228007e-01 -4.69588861e-02 ... 4.15981427e-04 4.90653403e-02 1.41317546e-01] ... [ 9.91966426e-02 -4.40366656e-01 1.81777999e-01 ... -2.82948434e-01 -1.84862196e-01 -1.51914284e-01] [ 1.91984832e-01 1.43493801e-01 -6.83497414e-02 ... 1.70117423e-01 1.23950452e-01 8.48659873e-03] [ 4.06757176e-01 -1.48798317e-01 -3.54027629e-01 ... -3.44855525e-03 -2.97534633e-02 -5.37797296e-03]] [[ 2.57228494e-01 7.83253968e-01 1.03633177e+00 ... 6.40723944e-01 -1.15269661e+00 6.47679865e-01] [ 4.79891926e-01 6.57619238e-02 2.50019789e-01 ... -3.01847249e-01 6.16560221e-01 3.09495389e-01] [ 5.23397565e-01 -6.22551322e-01 1.87627263e-02 ... 5.75214565e-01 -9.58882749e-01 -2.56961346e-01] ... [ 5.68377495e-01 9.22591984e-01 -8.02408397e-01 ... -1.87443778e-01 5.13456017e-02 1.02619600e+00] [-1.33210027e+00 5.10504544e-01 1.92195594e+00 ... -5.04135549e-01 3.19367856e-01 -3.14637244e-01] [ 2.78830200e-01 -3.82918835e-01 4.79368031e-01 ... -1.23180069e-01 2.15542361e-01 2.06206754e-01]] [[ 1.26372620e-01 -5.61571419e-01 -4.67437804e-01 ... 1.95493683e-01 1.19078837e-01 5.99424303e-01] [ 2.19459876e-01 9.18749869e-02 3.78131658e-01 ... 7.28657067e-01 1.16193285e-02 4.08268839e-01] [-7.09862113e-01 3.40787843e-02 -6.76713943e-01 ... 1.14632212e-01 -1.91167891e-02 -9.06833231e-01] ... [-5.12749314e-01 7.55062997e-02 -4.79656816e-01 ... 7.53515027e-03 5.62836714e-02 6.80465549e-02] [ 5.53739607e-01 -5.32655120e-01 -1.70513138e-01 ... 7.69340158e-01 1.03239059e+00 4.04637098e-01] [-3.58920246e-01 -9.21317697e-01 -5.19434154e-01 ... 8.34150136e-01 3.56603086e-01 -1.27632067e-01]] [[ 1.29480317e-01 -8.71518627e-02 -2.15186858e+00 ... -5.29581942e-02 -2.72810370e-01 4.90236163e-01] [-1.66378307e+00 -1.44166756e+00 9.06838119e-01 ... 4.12457343e-03 -3.16175409e-02 1.43423820e+00] [-1.06464922e+00 9.04051721e-01 -2.19148435e-02 ... -4.94066089e-01 5.81199713e-02 8.02771926e-01] ... [-1.26743257e+00 -2.00958893e-01 4.83152628e-01 ... -2.18866855e-01 4.77806814e-02 -8.04400682e-01] [-1.17798276e-01 3.62922102e-01 6.18148625e-01 ... 6.60624385e-01 -4.26774025e-02 -6.74744487e-01] [ 1.60951003e-01 1.48072124e+00 1.30459204e-01 ... 7.46462107e-01 5.93804121e-01 -5.26661813e-01]] [[-7.53797144e-02 -1.26288563e-03 1.20768599e-01 ... 4.46979776e-02 -5.86680807e-02 -1.42609039e-02] [-2.25711800e-03 3.43010835e-02 1.52977481e-01 ... -2.69760117e-02 -6.60312176e-02 -2.35926211e-02] [ 4.75272425e-02 8.81236270e-02 1.17475614e-01 ... -9.70617607e-02 -3.75226769e-03 2.01614969e-03] ... [ 1.24689825e-01 4.71529998e-02 -4.50830534e-02 ... 2.63721216e-02 -1.18837291e-02 -1.32935748e-01] [-5.19910902e-02 -5.69913164e-02 2.66430285e-02 ... 5.06230183e-02 1.54869817e-02 1.94098353e-02] [-9.75719467e-02 -6.13243366e-03 -1.17433388e-02 ... -3.12136300e-03 -4.29834239e-02 -2.65674647e-02]]] [[[-6.35721028e-01 -4.87455189e-01 7.39899755e-01 ... -9.17700231e-01 -3.82022470e-01 -5.01966953e-01] [ 3.63934398e-01 -3.78494978e-01 -1.01313758e+00 ... -2.32526839e-01 6.58865273e-01 2.64735818e-01] [-3.09791297e-01 1.03504872e+00 6.65009469e-02 ... 6.59777880e-01 6.19914770e-01 -4.30470705e-01] ... [ 7.82449782e-01 6.97270155e-01 8.82584751e-01 ... 1.40006505e-02 -6.61771297e-01 6.23396575e-01] [-6.48742020e-01 -5.20056129e-01 -9.33182001e-01 ... 5.76295257e-01 5.65195620e-01 -8.09928119e-01] [-7.33792424e-01 3.36355686e-01 -7.17117637e-02 ... 1.88693851e-02 -2.21659154e-01 8.37118864e-01]] [[-4.11555171e-01 -1.41234443e-01 1.43635660e-01 ... 6.95265591e-01 -9.94609296e-02 1.30057812e-01] [ 2.52977181e-02 1.57742217e-01 -2.21888367e-02 ... -2.46607214e-01 3.49936515e-01 -3.74088198e-01] [ 8.58125240e-02 -1.91043422e-01 1.85772300e-01 ... 1.54889151e-01 -2.44816065e-01 -2.96222419e-01] ... [ 2.75851376e-02 -5.03390908e-01 5.23963690e-01 ... 1.27997354e-01 -3.87967736e-01 1.87551424e-01] [ 2.88240254e-01 4.64303307e-02 -1.01494409e-01 ... 7.12697059e-02 2.13399879e-03 -1.48514241e-01] [ 7.82443136e-02 -1.97861731e-01 1.50871187e-01 ... 1.76184550e-01 -2.26102367e-01 -6.12469614e-02]] [[ 3.71650815e-01 -1.58015227e+00 4.34818715e-01 ... -1.94810614e-01 -6.06731117e-01 -1.18221521e+00] [ 1.20115948e+00 1.56394470e+00 -1.88300803e-01 ... 4.34183151e-01 9.72452581e-01 5.37788868e-01] [-4.72931445e-01 -9.38273430e-01 -1.02540016e+00 ... 1.06044912e+00 -6.83088720e-01 5.72635412e-01] ... [-1.05625880e+00 1.30959427e+00 1.28335044e-01 ... 5.42494476e-01 9.08379674e-01 -3.14036995e-01] [-1.80121392e-01 -1.23987579e+00 1.14289153e+00 ... -7.25208282e-01 1.73801112e+00 5.19859552e-01] [-7.47214496e-01 -3.23718697e-01 -8.62406135e-01 ... -2.01013640e-01 7.27302805e-02 7.39687443e-01]] [[-5.42653978e-01 -1.63134158e-01 -1.25384584e-01 ... -3.94165814e-02 3.97232562e-01 9.00236845e-01] [ 3.54247093e-01 2.06134543e-01 4.75741744e-01 ... 3.43754202e-01 -4.48399447e-02 4.08845633e-01] [-7.76084140e-02 4.39470381e-01 1.82980493e-01 ... -7.71326363e-01 -1.48488611e-01 -4.49466296e-02] ... [ 5.06539285e-01 1.13842440e+00 7.94900507e-02 ... -5.22903800e-01 -2.92615920e-01 -6.81270421e-01] [ 2.71121502e-01 -4.33235019e-01 3.96025814e-02 ... -4.19854134e-01 -6.26220226e-01 9.14889798e-02] [ 5.45129955e-01 -4.51386452e-01 -1.25873816e+00 ... -9.38171506e-01 -4.90201145e-01 -9.08028483e-01]] [[-3.33745241e-01 1.09493542e+00 -3.21437687e-01 ... -6.85075760e-01 8.70137036e-01 1.46757409e-01] [-3.94805849e-01 -1.19221652e+00 2.25203544e-01 ... -6.78957462e-01 1.04231906e+00 1.44751453e+00] [ 8.98851752e-02 1.29415631e+00 1.33970425e-01 ... 1.03916955e+00 -1.20829904e+00 -2.00996339e-01] ... [ 1.97402641e-01 7.53766060e-01 -6.76477849e-02 ... 8.87622535e-01 -1.05113596e-01 -1.70269680e+00] [ 3.49008411e-01 5.07600233e-02 -1.99414298e-01 ... -8.53953242e-01 7.65515566e-01 3.40993643e-01] [-5.96075170e-02 1.44602939e-01 1.41733384e+00 ... -9.79623139e-01 -9.89909887e-01 -1.00359094e+00]] [[-3.59627940e-02 -3.20355594e-02 2.75128465e-02 ... 4.99241464e-02 5.35533316e-02 5.80091290e-02] [ 2.53535230e-02 -1.02726951e-01 -8.30532610e-02 ... -1.08397834e-01 -8.32773000e-03 -3.44960615e-02] [ 1.88621450e-02 1.88506432e-02 -1.70582067e-02 ... 2.38576978e-02 1.91949941e-02 -9.40326080e-02] ... [-9.93680432e-02 -8.03512707e-02 -1.01142585e-01 ... 1.47682894e-02 1.47971854e-01 9.84311923e-02] [-6.07278943e-02 1.99724697e-02 1.14376500e-01 ... 3.92801464e-02 7.34385252e-02 1.19419403e-01] [-7.80880973e-02 3.77225503e-02 -6.65642694e-02 ... -2.38844305e-02 1.73840308e-05 -6.15496039e-02]]] [[[-1.01420641e+00 2.57568896e-01 -6.93072081e-01 ... -3.48751813e-01 2.89940447e-01 -4.17878568e-01] [ 3.40087861e-01 5.51692367e-01 -6.50012851e-01 ... 6.39592946e-01 -8.11896920e-01 4.00468439e-01] [ 9.32912946e-01 -3.24480295e-01 -1.09306052e-01 ... 3.24979395e-01 -3.64298493e-01 3.52121383e-01] ... [-1.00393045e+00 -9.32589650e-01 6.47509217e-01 ... -6.67961985e-02 -1.37346223e-01 -4.39891338e-01] [-4.56505358e-01 2.02319011e-01 -7.95163393e-01 ... 3.10702741e-01 1.84750843e+00 -4.30089414e-01] [ 6.53218389e-01 3.67654204e-01 -1.77773476e-01 ... -2.23677784e-01 -8.05145979e-01 -6.94575071e-01]] [[ 1.46572441e-01 4.56443608e-01 4.10164028e-01 ... -1.11925997e-01 5.07242046e-02 2.87861884e-01] [ 2.62501985e-01 2.56727666e-01 3.49669367e-01 ... -1.94573730e-01 -7.34806880e-02 2.68658638e-01] [ 3.65482152e-01 -1.23118833e-01 -3.63275468e-01 ... -1.17688283e-01 -3.33257496e-01 2.36304075e-01] ... [-2.82657027e-01 1.03872180e-01 -1.91627499e-02 ... -2.54408658e-01 -3.18610102e-01 -8.09754059e-02] [-1.98697492e-01 -6.49821982e-02 -3.77610028e-01 ... -1.20578714e-01 -2.70499468e-01 1.13264561e-01] [ 1.41761377e-01 -2.11810052e-01 1.46570742e-01 ... 3.27150822e-01 1.44984141e-01 4.18841131e-02]] [[-4.25378025e-01 -8.44556034e-01 1.90844268e-01 ... 1.80473566e-01 -7.29227364e-01 6.66148484e-01] [ 2.54553378e-01 -3.20101500e-01 -5.97077727e-01 ... 1.66096759e+00 6.66208148e-01 -1.78000584e-01] [ 4.17874396e-01 1.11599994e+00 -1.50755525e+00 ... -1.10984959e-01 4.94219549e-02 1.23115218e+00] ... [ 6.52606487e-01 7.93152869e-01 -5.78231690e-03 ... -1.09491050e+00 -1.01346505e+00 3.06835324e-01] [ 4.47510511e-01 -7.10382283e-01 -9.43831205e-01 ... 4.27323952e-02 3.14416260e-01 3.34607035e-01] [-3.60065937e-01 1.58700749e-01 7.71940947e-01 ... 1.27382159e+00 2.46328302e-02 -1.16172314e+00]] [[-7.06590891e-01 9.80999529e-01 4.57449526e-01 ... -1.58952102e-02 -4.43644881e-01 -9.06638056e-02] [ 6.29228950e-01 9.86856341e-01 9.68795493e-02 ... -2.32528433e-01 6.15122616e-01 -2.32136875e-01] [ 3.42921019e-01 5.18275082e-01 6.18941002e-02 ... -8.75550151e-01 6.24107838e-01 -4.92276311e-01] ... [-1.19847882e+00 4.96258587e-01 -4.60800081e-02 ... 4.78902787e-01 5.53784847e-01 2.26943530e-02] [ 6.07232392e-01 -6.76361561e-01 4.00172085e-01 ... -4.69580114e-01 9.97566357e-02 -4.14609343e-01] [ 4.03988510e-01 -1.31191865e-01 2.82663375e-01 ... 7.31992200e-02 4.01957005e-01 -3.10185224e-01]] [[-3.36536288e-01 -6.62129745e-02 6.75772011e-01 ... 1.19048274e+00 -1.45388842e+00 -3.29284668e-01] [ 2.99690723e-01 -5.76356836e-02 -1.82672575e-01 ... 4.92763788e-01 -8.46172333e-01 -4.91939753e-01] [-1.05775619e+00 6.80557266e-02 -8.14912915e-01 ... 6.04578972e-01 -9.78957534e-01 -3.36794168e-01] ... [ 9.77274895e-01 3.96822989e-01 -6.30352318e-01 ... -9.73879248e-02 -4.89513814e-01 6.54779673e-01] [-1.02034867e+00 5.86487710e-01 8.07605982e-01 ... -3.38554084e-01 -1.83751261e+00 4.95978236e-01] [ 1.00972846e-01 2.94322819e-01 9.30228531e-02 ... 2.94923216e-01 3.41669559e-01 1.49919853e-01]] [[-1.02640586e-02 4.64209057e-02 -5.55042215e-02 ... -7.05826432e-02 -6.26942515e-02 1.45308524e-02] [-5.57640418e-02 -4.81593534e-02 4.70549874e-02 ... -3.68770696e-02 -7.12524280e-02 8.78990665e-02] [ 3.02682910e-02 3.89537178e-02 6.65141791e-02 ... -9.91890505e-02 -6.04960788e-03 1.86889395e-02] ... [ 4.47164886e-02 -9.76804197e-02 3.56453583e-02 ... 2.64881663e-02 8.57062787e-02 -8.87085199e-02] [ 1.06074601e-01 -2.32112687e-02 1.04772508e-01 ... -4.26187925e-03 -3.96833271e-02 4.17989455e-02] [ 3.62137929e-02 -3.39536071e-02 7.04710260e-02 ... 4.54063416e-02 -2.75456868e-02 -4.45371773e-03]]] ... [[[-1.20051131e-01 -5.30358791e-01 5.68277299e-01 ... 6.19094968e-01 -5.50280511e-01 7.82266676e-01] [ 6.77273154e-01 -9.42540228e-01 -5.43555200e-01 ... 3.07611972e-01 -9.43486392e-01 3.47189963e-01] [ 1.20910215e+00 -5.03481738e-02 6.33754790e-01 ... -8.58137429e-01 2.60738999e-01 3.88140559e-01] ... [ 4.34900746e-02 8.31767559e-01 2.89686233e-01 ... -7.31671274e-01 3.56540263e-01 4.26136106e-01] [-7.62802064e-01 -2.73971856e-01 1.91857770e-01 ... -8.99116695e-01 -3.22755277e-01 5.40745378e-01] [ 5.06883204e-01 7.91264713e-01 -4.41169322e-01 ... -9.04251456e-01 -4.10524994e-01 -6.72906518e-01]] [[-8.31863225e-01 4.04603127e-03 -9.54021439e-02 ... -6.06343560e-02 5.62912261e-04 2.23438546e-01] [-3.56439918e-01 5.85600957e-02 -2.74896950e-01 ... -6.32171988e-01 4.58456486e-01 -8.90616607e-03] [-1.39039159e-01 1.02806874e-01 -3.26936878e-02 ... 4.58152384e-01 -3.31128299e-01 -4.08257186e-01] ... [-2.97918797e-01 -1.50100589e-01 -1.43453255e-01 ... -1.61580861e-01 -9.62239578e-02 -4.77298051e-01] [-3.61043960e-01 7.25893080e-02 -1.22055069e-01 ... 2.93259710e-01 -2.83115149e-01 8.87669101e-02] [ 2.60979295e-01 1.42030358e-01 5.63206933e-02 ... 9.43620762e-05 -1.74503610e-01 2.64235288e-01]] [[-3.43667686e-01 -1.41633034e+00 -3.68558407e-01 ... 7.32003868e-01 -6.62977546e-02 6.31005824e-01] [ 3.37759882e-01 -1.86497033e+00 -7.24214435e-01 ... 2.34386474e-02 -8.75028908e-01 6.87144399e-01] [-5.92253983e-01 1.33385742e+00 -4.87610042e-01 ... -7.27009714e-01 -7.34925866e-01 -1.04426779e-01] ... [ 1.00008571e+00 -5.36008142e-02 3.82476360e-01 ... 8.57231855e-01 -1.31315792e+00 -5.48630238e-01] [-2.93663889e-01 -7.17033863e-01 -2.05281377e-01 ... 5.63824899e-04 2.11323634e-01 1.85858443e-01] [ 2.33819485e-01 -4.42851782e-01 -9.87064719e-01 ... -8.38557839e-01 5.65332651e-01 -4.43436146e-01]] [[ 5.42862564e-02 -4.08315152e-01 -6.82625651e-01 ... 3.65838587e-01 6.21708870e-01 6.10516965e-01] [-4.23581928e-01 3.98376703e-01 -6.56836271e-01 ... 1.07628435e-01 3.79626870e-01 3.47838223e-01] [-8.60419512e-01 3.48714769e-01 8.84509623e-01 ... -4.80783314e-01 -7.38243461e-01 5.80707371e-01] ... [ 1.82619229e-01 -6.76879704e-01 9.14614618e-01 ... -5.74710131e-01 1.75375655e-01 3.05560589e-01] [ 5.77598155e-01 4.00722861e-01 -4.97590601e-01 ... -1.94400266e-01 6.32038534e-01 2.00020865e-01] [-6.53280199e-01 2.52912045e-01 -1.83627784e-01 ... -5.27112007e-01 -1.47516295e-01 6.96145654e-01]] [[ 3.63231957e-01 -7.32662082e-01 -1.21058261e+00 ... -3.79844040e-01 6.06585480e-02 1.10642183e+00] [ 1.20247833e-01 2.39523754e-01 -8.91062319e-01 ... -6.48677349e-01 1.31380284e+00 -1.23137847e-01] [ 1.29301918e+00 -1.50645003e-01 1.00412965e+00 ... 8.57359946e-01 -4.00182694e-01 -2.54885286e-01] ... [-6.52684152e-01 -1.52204007e-01 -6.12512231e-01 ... 2.13271528e-01 -6.13490641e-02 1.04182982e+00] [-5.55081785e-01 1.05510883e-01 5.23483336e-01 ... 6.47520542e-01 -1.55151260e+00 -5.11382163e-01] [ 5.95273077e-01 3.61025125e-01 9.16244626e-01 ... -2.82106400e-01 -1.07308149e-01 4.72570926e-01]] [[-3.80382314e-02 -7.10110590e-02 1.02227163e-02 ... -4.35088389e-03 -3.71768251e-02 -6.75001666e-02] [-1.44488975e-01 2.79332418e-02 -7.72459656e-02 ... 3.66321169e-02 -7.28426278e-02 -3.09991520e-02] [-1.46396486e-02 2.68540196e-02 7.43679777e-02 ... 7.85882026e-02 -4.05033082e-02 1.30013200e-02] ... [-7.19133019e-02 -9.88175049e-02 1.46825433e-01 ... 1.20989550e-02 3.94727886e-02 -3.27953286e-02] [-2.31134649e-02 1.23082995e-02 1.19748741e-01 ... 1.43207721e-02 -8.49993378e-02 6.85530063e-03] [ 1.97697990e-02 -1.31621823e-01 4.74608727e-02 ... 1.40212700e-01 -2.95471381e-02 -3.87772396e-02]]] [[[ 1.51709095e-01 7.79576480e-01 1.14579365e-01 ... -1.35899091e+00 -3.76579285e-01 8.26945901e-01] [-5.20748794e-01 -9.61482048e-01 8.67353320e-01 ... -1.61049575e-01 8.37531269e-01 5.56378841e-01] [-1.01077020e+00 8.77103806e-01 -7.23882318e-01 ... -6.00060880e-01 -4.88960557e-02 -7.88736641e-01] ... [-6.65869653e-01 2.73732364e-01 1.21338284e+00 ... -7.94362783e-01 -2.31200710e-01 -4.62946773e-01] [-7.71881044e-01 7.88518250e-01 -4.68198061e-02 ... -9.90426302e-01 -2.20780224e-01 -6.62040472e-01] [-4.27910626e-01 9.01693642e-01 -2.04935759e-01 ... 5.41585088e-01 2.60486901e-01 1.17524171e+00]] [[ 3.67585123e-01 -1.03282213e-01 1.09564930e-01 ... -1.14220507e-01 4.29013997e-01 -1.33620963e-01] [-7.59082362e-02 -1.79818854e-01 9.39098820e-02 ... 9.44802072e-03 -4.01954651e-01 -1.07069843e-01] [ 1.20491236e-01 -5.27697265e-01 -1.80669665e-01 ... 1.20611507e-02 4.19573724e-01 1.17675448e-03] ... [ 1.68828219e-01 -3.23208898e-01 -4.05687869e-01 ... 2.36410856e-01 3.19986902e-02 -1.85327113e-01] [ 3.90109122e-01 2.49477267e-01 -2.21933231e-01 ... -1.62402019e-01 -3.06415409e-01 1.09326066e-02] [ 2.78631926e-01 1.85079873e-02 1.79362401e-01 ... -9.22253206e-02 7.32666031e-02 4.69601713e-02]] [[-9.81055319e-01 -1.91129133e-01 -5.18545032e-01 ... -1.08614072e-01 1.49988592e+00 7.72797227e-01] [ 1.92309365e-01 2.59904623e-01 7.86484599e-01 ... 8.17233562e-01 -3.00966024e-01 4.65990365e-01] [ 1.17728364e+00 1.00655071e-01 -1.32692611e+00 ... 1.16783921e-02 -2.56916016e-01 3.20146763e-04] ... [-1.24505675e+00 -2.48891130e-01 7.24206090e-01 ... 1.29964995e+00 5.65396622e-02 -6.90382898e-01] [-1.18188500e+00 -2.27844492e-02 9.23255682e-01 ... 7.13314891e-01 -5.62210262e-01 2.81252414e-01] [-9.33058262e-01 -9.48472142e-01 2.31617302e-01 ... 4.07438129e-01 3.75423253e-01 -1.24963117e+00]] [[-2.74030566e-01 3.44719648e-01 -4.88946408e-01 ... -6.15213335e-01 -5.92819333e-01 -3.53974253e-01] [ 2.44629309e-01 -2.27369219e-01 -9.11416888e-01 ... 1.04968941e+00 -4.26206768e-01 -9.73263025e-01] [ 1.38727888e-01 -5.27507290e-02 -4.05102998e-01 ... -6.13935828e-01 -1.13796842e+00 3.45549583e-01] ... [ 5.04272044e-01 1.53037056e-01 5.60212672e-01 ... 2.92032391e-01 -3.77494060e-02 1.63548410e-01] [-3.65272850e-01 1.38245195e-01 7.17361748e-01 ... -2.15614289e-01 -3.01429451e-01 4.37828153e-01] [ 6.61232352e-01 1.76928088e-01 -1.25993192e-01 ... 4.24290329e-01 5.37672900e-02 3.60548526e-01]] [[ 2.02476993e-01 -9.56350744e-01 -3.11622530e-01 ... -1.56621747e-02 1.03777654e-01 -3.80389661e-01] [-4.90655638e-02 1.23700090e-01 5.93787991e-02 ... -4.39091474e-01 -7.57580340e-01 -3.79996151e-01] [ 5.57274461e-01 5.49839079e-01 -4.67922628e-01 ... -1.36940992e-02 1.43152744e-01 9.70044553e-01] ... [-6.00189567e-01 -7.16038167e-01 7.36922860e-01 ... 2.02116266e-01 -1.38999259e+00 1.77039608e-01] [ 3.97648364e-01 -1.86982676e-01 -1.15282750e+00 ... 2.51377940e-01 1.03565860e+00 2.04044312e-01] [ 6.66935891e-02 6.71028912e-01 -6.12649500e-01 ... 1.09084320e+00 2.63804775e-02 1.94053158e-01]] [[ 8.79359394e-02 1.38016984e-01 3.96672487e-02 ... 1.18440650e-02 -1.32200107e-01 1.14463530e-01] [ 8.93473178e-02 8.40043183e-03 3.99754569e-02 ... -1.25473728e-02 2.19615661e-02 -1.15902655e-01] [ 7.83678610e-03 -8.44399557e-02 5.75277663e-04 ... 4.67277095e-02 6.97802082e-02 2.95757763e-02] ... [-5.50018251e-02 -6.84801862e-02 2.18270142e-02 ... -5.40793687e-02 6.28695041e-02 -3.02890074e-02] [-3.30184959e-02 -6.78595155e-02 1.22601902e-02 ... -8.30265135e-02 6.82309419e-02 2.04797834e-01] [-9.97823253e-02 -2.32892148e-02 -2.06578858e-02 ... 2.21800003e-02 -6.41749054e-02 1.32656887e-01]]] [[[-6.53044581e-01 -3.22940685e-02 -9.37020853e-02 ... -6.55765235e-01 6.08542681e-01 4.47570443e-01] [-3.60977650e-01 9.00974512e-01 3.78687918e-01 ... -5.67152083e-01 -2.46929258e-01 4.74755973e-01] [ 1.95448827e-02 -5.44310093e-01 1.67459343e-02 ... 8.63407791e-01 -9.57302228e-02 1.29033580e-01] ... [ 4.43265706e-01 1.30983796e-02 -9.02926549e-02 ... 2.51911193e-01 1.21609278e-01 -7.53377795e-01] [-1.19955683e+00 -6.82441425e-03 -2.88058132e-01 ... 1.76185638e-01 -3.30076009e-01 -3.42759609e-01] [ 1.07046151e+00 -1.20413387e+00 -3.13082039e-01 ... 3.48390281e-01 2.78251078e-02 3.75661433e-01]] [[-3.07833940e-01 -1.25763297e-01 2.62536615e-01 ... -1.11595236e-01 -2.21416026e-01 -3.92329305e-01] [-4.65789527e-01 2.38301978e-02 3.82126331e-01 ... 6.17914051e-02 -2.65573084e-01 7.94827119e-02] [-7.74264755e-03 -1.87826201e-01 4.48485941e-01 ... 3.21298838e-01 -2.06471935e-01 6.25894189e-01] ... [-1.64365217e-01 3.33683103e-01 -6.43908754e-02 ... 7.39787519e-02 2.55017340e-01 -7.50460476e-02] [-1.01240061e-01 8.51884633e-02 1.58385754e-01 ... -1.44464269e-01 -2.47462347e-01 3.80418777e-01] [-5.32041527e-02 -8.45680356e-01 -5.00043392e-01 ... -2.16154277e-01 5.44006824e-02 1.87040970e-01]] [[ 1.21698773e+00 5.42370498e-01 -3.49032462e-01 ... 1.69779912e-01 -8.69242847e-01 6.62324071e-01] [-1.27128291e+00 -7.24991620e-01 -1.53945833e-01 ... -5.11000931e-01 4.84716237e-01 -7.62266040e-01] [ 4.74575281e-01 4.49911594e-01 -4.90876377e-01 ... -4.41008598e-01 -1.17093992e+00 1.16522625e-01] ... [ 2.87567437e-01 9.01327375e-03 9.01281908e-02 ... 5.18817782e-01 -7.89407372e-01 -1.94339722e-01] [ 5.57044923e-01 1.08184540e+00 4.37582552e-01 ... -7.92568445e-01 6.86568975e-01 3.56527358e-01] [-5.50966740e-01 -9.59707201e-02 1.67233363e-01 ... -1.02330185e-01 -7.78856158e-01 -2.67684966e-01]] [[-6.30775988e-01 5.91454864e-01 -2.56716102e-01 ... -4.12734479e-01 6.03225222e-03 -3.91311288e-01] [ 5.32616854e-01 -2.38003671e-01 4.72681634e-02 ... -8.33917797e-01 1.90529659e-01 -2.86678016e-01] [ 3.26376885e-01 9.48744297e-01 1.43776178e-01 ... -7.11645424e-01 -3.56359601e-01 -6.35336518e-01] ... [ 6.14417791e-01 -1.27153471e-01 -5.61765909e-01 ... 1.14971153e-01 -6.71985000e-03 1.89151362e-01] [ 5.99111676e-01 -4.48821753e-01 -3.52905467e-02 ... 2.36149475e-01 5.04273891e-01 4.07377183e-01] [ 5.15382886e-01 -2.33781114e-01 -5.24312794e-01 ... 4.80808228e-01 -1.13289595e+00 6.10214323e-02]] [[-4.38320011e-01 -7.32188284e-01 1.73335806e-01 ... -9.25911590e-02 3.91253054e-01 3.94875616e-01] [-6.07558824e-02 -5.71997166e-01 4.87462610e-01 ... 3.23373824e-01 1.69507097e-02 -3.41340870e-01] [ 4.07399327e-01 9.35802698e-01 -5.97205341e-01 ... 6.46617472e-01 4.02700305e-02 6.85198307e-01] ... [-9.36169773e-02 8.01646769e-01 1.21972454e+00 ... -1.25703847e+00 2.25698799e-02 2.37535030e-01] [ 1.10015070e+00 9.13428664e-01 -4.80645269e-01 ... -1.69710922e+00 -6.33360207e-01 5.23617744e-01] [-1.15535045e+00 -9.54154313e-01 -1.35491759e-01 ... -7.10806131e-01 -6.30293548e-01 2.16303214e-01]] [[-1.90951908e-03 -4.75575924e-02 5.51640466e-02 ... -3.77830230e-02 -1.60480868e-02 -3.64910923e-02] [ 1.61481984e-02 -5.61843067e-02 1.45117631e-02 ... 3.72100323e-02 -1.62305087e-01 4.55479883e-02] [-1.18391067e-01 3.58318351e-02 4.64623235e-02 ... -1.51040226e-01 1.13394419e-02 6.33287206e-02] ... [-9.50425416e-02 -1.22220470e-02 3.57759488e-03 ... -1.29331902e-01 7.55475461e-02 5.46537386e-03] [ 4.39984165e-02 6.55397028e-02 9.10497978e-02 ... 1.42275363e-01 1.47896931e-02 -6.50557354e-02] [ 1.05415300e-01 -5.90503626e-02 -9.46754031e-03 ... -6.36933073e-02 2.27436852e-02 -4.77279909e-02]]]]; ov_res: [[[[-4.12632763e-01 1.40550375e+00 6.22657724e-02 ... 5.64252734e-02 -2.08353892e-01 -8.96453559e-02] [ 1.69548953e+00 -4.57477748e-01 -6.59943521e-02 ... 6.72183000e-03 1.12224780e-01 -1.28789318e+00] [-4.28365588e-01 5.83944559e-01 -1.43374884e+00 ... -3.47336642e-02 -2.90767670e-01 -4.38433737e-01] ... [ 3.21749270e-01 2.57941544e-01 -9.40588713e-01 ... -4.35111374e-01 3.38420749e-01 1.69390476e+00] [ 6.95923030e-01 -3.29741508e-01 -8.83719981e-01 ... 3.20747942e-02 -9.51590538e-01 -1.08754623e+00] [-6.67701438e-02 3.77493054e-01 -2.40673617e-01 ... 2.47648686e-01 -4.97769006e-02 -9.63278830e-01]] [[-1.63556576e-01 -2.24000260e-01 -2.72276014e-01 ... 3.41319256e-02 -3.37302506e-01 2.60529727e-01] [-1.62539706e-01 1.40570402e-01 7.63194859e-02 ... 2.02452898e-01 -9.52066332e-02 -3.94096136e-01] [-6.25741839e-01 -1.86228022e-01 -4.69588935e-02 ... 4.15984367e-04 4.90653478e-02 1.41317561e-01] ... [ 9.91966501e-02 -4.40366685e-01 1.81778014e-01 ... -2.82948434e-01 -1.84862211e-01 -1.51914284e-01] [ 1.91984862e-01 1.43493816e-01 -6.83497488e-02 ... 1.70117453e-01 1.23950467e-01 8.48660246e-03] [ 4.06757206e-01 -1.48798317e-01 -3.54027689e-01 ... -3.44855268e-03 -2.97534652e-02 -5.37797064e-03]] [[ 2.57228494e-01 7.83254027e-01 1.03633177e+00 ... 6.40723884e-01 -1.15269649e+00 6.47679865e-01] [ 4.79891866e-01 6.57619312e-02 2.50019789e-01 ... -3.01847219e-01 6.16560221e-01 3.09495389e-01] [ 5.23397505e-01 -6.22551262e-01 1.87627319e-02 ... 5.75214505e-01 -9.58882689e-01 -2.56961316e-01] ... [ 5.68377435e-01 9.22592044e-01 -8.02408397e-01 ... -1.87443763e-01 5.13456091e-02 1.02619600e+00] [-1.33210015e+00 5.10504544e-01 1.92195570e+00 ... -5.04135549e-01 3.19367856e-01 -3.14637214e-01] [ 2.78830230e-01 -3.82918775e-01 4.79368001e-01 ... -1.23180062e-01 2.15542376e-01 2.06206754e-01]] [[ 1.26372606e-01 -5.61571419e-01 -4.67437804e-01 ... 1.95493683e-01 1.19078822e-01 5.99424243e-01] [ 2.19459862e-01 9.18749794e-02 3.78131658e-01 ... 7.28657007e-01 1.16193220e-02 4.08268809e-01] [-7.09862113e-01 3.40787768e-02 -6.76713943e-01 ... 1.14632212e-01 -1.91167947e-02 -9.06833231e-01] ... [-5.12749314e-01 7.55062923e-02 -4.79656786e-01 ... 7.53514376e-03 5.62836640e-02 6.80465475e-02] [ 5.53739607e-01 -5.32655120e-01 -1.70513153e-01 ... 7.69340098e-01 1.03239048e+00 4.04637098e-01] [-3.58920246e-01 -9.21317756e-01 -5.19434094e-01 ... 8.34150136e-01 3.56603086e-01 -1.27632082e-01]] [[ 1.29480332e-01 -8.71518701e-02 -2.15186858e+00 ... -5.29581979e-02 -2.72810370e-01 4.90236193e-01] [-1.66378307e+00 -1.44166756e+00 9.06838119e-01 ... 4.12457343e-03 -3.16175409e-02 1.43423820e+00] [-1.06464922e+00 9.04051721e-01 -2.19148453e-02 ... -4.94066119e-01 5.81199713e-02 8.02771926e-01] ... [-1.26743257e+00 -2.00958893e-01 4.83152628e-01 ... -2.18866855e-01 4.77806814e-02 -8.04400682e-01] [-1.17798284e-01 3.62922102e-01 6.18148625e-01 ... 6.60624385e-01 -4.26774062e-02 -6.74744487e-01] [ 1.60951018e-01 1.48072124e+00 1.30459219e-01 ... 7.46462166e-01 5.93804121e-01 -5.26661873e-01]] [[-7.53797144e-02 -1.26288563e-03 1.20768607e-01 ... 4.46979813e-02 -5.86680844e-02 -1.42609049e-02] [-2.25711800e-03 3.43010835e-02 1.52977481e-01 ... -2.69760117e-02 -6.60312250e-02 -2.35926230e-02] [ 4.75272425e-02 8.81236270e-02 1.17475614e-01 ... -9.70617533e-02 -3.75226792e-03 2.01614969e-03] ... [ 1.24689832e-01 4.71529998e-02 -4.50830534e-02 ... 2.63721216e-02 -1.18837291e-02 -1.32935762e-01] [-5.19910902e-02 -5.69913164e-02 2.66430303e-02 ... 5.06230183e-02 1.54869827e-02 1.94098353e-02] [-9.75719467e-02 -6.13243412e-03 -1.17433397e-02 ... -3.12136300e-03 -4.29834239e-02 -2.65674647e-02]]] [[[-6.35721028e-01 -4.87455159e-01 7.39899755e-01 ... -9.17700291e-01 -3.82022440e-01 -5.01966953e-01] [ 3.63934398e-01 -3.78494978e-01 -1.01313770e+00 ... -2.32526824e-01 6.58865213e-01 2.64735848e-01] [-3.09791267e-01 1.03504860e+00 6.65009543e-02 ... 6.59777820e-01 6.19914770e-01 -4.30470705e-01] ... [ 7.82449722e-01 6.97270155e-01 8.82584691e-01 ... 1.40006607e-02 -6.61771297e-01 6.23396575e-01] [-6.48742020e-01 -5.20056129e-01 -9.33182001e-01 ... 5.76295257e-01 5.65195620e-01 -8.09928119e-01] [-7.33792424e-01 3.36355686e-01 -7.17117563e-02 ... 1.88693944e-02 -2.21659139e-01 8.37118864e-01]] [[-4.11555201e-01 -1.41234443e-01 1.43635660e-01 ... 6.95265591e-01 -9.94609296e-02 1.30057827e-01] [ 2.52977256e-02 1.57742217e-01 -2.21888330e-02 ... -2.46607214e-01 3.49936515e-01 -3.74088198e-01] [ 8.58125314e-02 -1.91043422e-01 1.85772300e-01 ... 1.54889151e-01 -2.44816065e-01 -2.96222419e-01] ... [ 2.75851432e-02 -5.03390908e-01 5.23963690e-01 ... 1.27997369e-01 -3.87967765e-01 1.87551454e-01] [ 2.88240284e-01 4.64303382e-02 -1.01494409e-01 ... 7.12697133e-02 2.13400368e-03 -1.48514241e-01] [ 7.82443210e-02 -1.97861746e-01 1.50871202e-01 ... 1.76184565e-01 -2.26102382e-01 -6.12469576e-02]] [[ 3.71650845e-01 -1.58015263e+00 4.34818745e-01 ... -1.94810659e-01 -6.06731236e-01 -1.18221545e+00] [ 1.20115960e+00 1.56394482e+00 -1.88300833e-01 ... 4.34183151e-01 9.72452641e-01 5.37788928e-01] [-4.72931534e-01 -9.38273549e-01 -1.02540028e+00 ... 1.06044924e+00 -6.83088779e-01 5.72635412e-01] ... [-1.05625904e+00 1.30959439e+00 1.28335044e-01 ... 5.42494535e-01 9.08379734e-01 -3.14037055e-01] [-1.80121437e-01 -1.23987603e+00 1.14289165e+00 ... -7.25208461e-01 1.73801124e+00 5.19859552e-01] [-7.47214615e-01 -3.23718756e-01 -8.62406313e-01 ... -2.01013684e-01 7.27302656e-02 7.39687562e-01]] [[-5.42654037e-01 -1.63134158e-01 -1.25384584e-01 ... -3.94165702e-02 3.97232652e-01 9.00236964e-01] [ 3.54247123e-01 2.06134573e-01 4.75741833e-01 ... 3.43754262e-01 -4.48399372e-02 4.08845723e-01] [-7.76084140e-02 4.39470440e-01 1.82980523e-01 ... -7.71326363e-01 -1.48488611e-01 -4.49466258e-02] ... [ 5.06539345e-01 1.13842452e+00 7.94900730e-02 ... -5.22903800e-01 -2.92615920e-01 -6.81270480e-01] [ 2.71121562e-01 -4.33235049e-01 3.96025963e-02 ... -4.19854134e-01 -6.26220286e-01 9.14890021e-02] [ 5.45130014e-01 -4.51386482e-01 -1.25873828e+00 ... -9.38171566e-01 -4.90201175e-01 -9.08028543e-01]] [[-3.33745241e-01 1.09493554e+00 -3.21437687e-01 ... -6.85075760e-01 8.70137036e-01 1.46757409e-01] [-3.94805849e-01 -1.19221652e+00 2.25203559e-01 ... -6.78957462e-01 1.04231906e+00 1.44751465e+00] [ 8.98851752e-02 1.29415631e+00 1.33970439e-01 ... 1.03916955e+00 -1.20829916e+00 -2.00996339e-01] ... [ 1.97402641e-01 7.53766119e-01 -6.76477849e-02 ... 8.87622535e-01 -1.05113596e-01 -1.70269692e+00] [ 3.49008411e-01 5.07600196e-02 -1.99414298e-01 ... -8.53953183e-01 7.65515625e-01 3.40993673e-01] [-5.96075207e-02 1.44602939e-01 1.41733384e+00 ... -9.79623199e-01 -9.89909887e-01 -1.00359094e+00]] [[-3.59627940e-02 -3.20355594e-02 2.75128465e-02 ... 4.99241464e-02 5.35533354e-02 5.80091290e-02] [ 2.53535230e-02 -1.02726951e-01 -8.30532536e-02 ... -1.08397827e-01 -8.32773000e-03 -3.44960615e-02] [ 1.88621469e-02 1.88506432e-02 -1.70582086e-02 ... 2.38576978e-02 1.91949941e-02 -9.40326080e-02] ... [-9.93680432e-02 -8.03512707e-02 -1.01142593e-01 ... 1.47682894e-02 1.47971869e-01 9.84311923e-02] [-6.07278943e-02 1.99724678e-02 1.14376500e-01 ... 3.92801501e-02 7.34385252e-02 1.19419396e-01] [-7.80880898e-02 3.77225541e-02 -6.65642694e-02 ... -2.38844305e-02 1.73844146e-05 -6.15496039e-02]]] [[[-1.01420641e+00 2.57568896e-01 -6.93072021e-01 ... -3.48751783e-01 2.89940417e-01 -4.17878538e-01] [ 3.40087861e-01 5.51692367e-01 -6.50012791e-01 ... 6.39592946e-01 -8.11896861e-01 4.00468439e-01] [ 9.32912946e-01 -3.24480265e-01 -1.09306052e-01 ... 3.24979424e-01 -3.64298463e-01 3.52121383e-01] ... [-1.00393045e+00 -9.32589591e-01 6.47509217e-01 ... -6.67961910e-02 -1.37346208e-01 -4.39891338e-01] [-4.56505358e-01 2.02318996e-01 -7.95163333e-01 ... 3.10702711e-01 1.84750831e+00 -4.30089414e-01] [ 6.53218329e-01 3.67654204e-01 -1.77773461e-01 ... -2.23677769e-01 -8.05145860e-01 -6.94575071e-01]] [[ 1.46572456e-01 4.56443608e-01 4.10163999e-01 ... -1.11925997e-01 5.07242046e-02 2.87861913e-01] [ 2.62501985e-01 2.56727695e-01 3.49669367e-01 ... -1.94573730e-01 -7.34806955e-02 2.68658638e-01] [ 3.65482152e-01 -1.23118825e-01 -3.63275468e-01 ... -1.17688283e-01 -3.33257496e-01 2.36304075e-01] ... [-2.82657027e-01 1.03872180e-01 -1.91627499e-02 ... -2.54408658e-01 -3.18610102e-01 -8.09754059e-02] [-1.98697478e-01 -6.49821982e-02 -3.77610028e-01 ... -1.20578706e-01 -2.70499468e-01 1.13264568e-01] [ 1.41761363e-01 -2.11810052e-01 1.46570742e-01 ... 3.27150792e-01 1.44984126e-01 4.18841094e-02]] [[-4.25378084e-01 -8.44556153e-01 1.90844297e-01 ... 1.80473581e-01 -7.29227424e-01 6.66148543e-01] [ 2.54553407e-01 -3.20101589e-01 -5.97077847e-01 ... 1.66096771e+00 6.66208208e-01 -1.78000614e-01] [ 4.17874396e-01 1.11600018e+00 -1.50755548e+00 ... -1.10984981e-01 4.94219549e-02 1.23115230e+00] ... [ 6.52606547e-01 7.93152988e-01 -5.78232203e-03 ... -1.09491062e+00 -1.01346517e+00 3.06835353e-01] [ 4.47510570e-01 -7.10382402e-01 -9.43831384e-01 ... 4.27323990e-02 3.14416260e-01 3.34607065e-01] [-3.60065967e-01 1.58700764e-01 7.71941006e-01 ... 1.27382183e+00 2.46328283e-02 -1.16172326e+00]] [[-7.06590950e-01 9.80999649e-01 4.57449585e-01 ... -1.58952083e-02 -4.43644941e-01 -9.06638131e-02] [ 6.29229069e-01 9.86856461e-01 9.68795568e-02 ... -2.32528448e-01 6.15122736e-01 -2.32136905e-01] [ 3.42921078e-01 5.18275142e-01 6.18941113e-02 ... -8.75550210e-01 6.24107897e-01 -4.92276371e-01] ... [-1.19847894e+00 4.96258676e-01 -4.60800081e-02 ... 4.78902876e-01 5.53784966e-01 2.26943623e-02] [ 6.07232511e-01 -6.76361680e-01 4.00172144e-01 ... -4.69580144e-01 9.97566581e-02 -4.14609373e-01] [ 4.03988570e-01 -1.31191865e-01 2.82663405e-01 ... 7.31992349e-02 4.01957065e-01 -3.10185254e-01]] [[-3.36536229e-01 -6.62129596e-02 6.75771952e-01 ... 1.19048250e+00 -1.45388830e+00 -3.29284638e-01] [ 2.99690694e-01 -5.76356724e-02 -1.82672560e-01 ... 4.92763698e-01 -8.46172273e-01 -4.91939664e-01] [-1.05775595e+00 6.80557191e-02 -8.14912736e-01 ... 6.04578912e-01 -9.78957415e-01 -3.36794138e-01] ... [ 9.77274776e-01 3.96822929e-01 -6.30352199e-01 ... -9.73878950e-02 -4.89513725e-01 6.54779553e-01] [-1.02034843e+00 5.86487651e-01 8.07605863e-01 ... -3.38554054e-01 -1.83751237e+00 4.95978147e-01] [ 1.00972839e-01 2.94322759e-01 9.30228457e-02 ... 2.94923156e-01 3.41669500e-01 1.49919823e-01]] [[-1.02640586e-02 4.64208983e-02 -5.55042177e-02 ... -7.05826357e-02 -6.26942441e-02 1.45308506e-02] [-5.57640344e-02 -4.81593497e-02 4.70549800e-02 ... -3.68770659e-02 -7.12524205e-02 8.78990591e-02] [ 3.02682873e-02 3.89537141e-02 6.65141717e-02 ... -9.91890356e-02 -6.04960741e-03 1.86889376e-02] ... [ 4.47164811e-02 -9.76804122e-02 3.56453545e-02 ... 2.64881626e-02 8.57062638e-02 -8.87085050e-02] [ 1.06074587e-01 -2.32112650e-02 1.04772493e-01 ... -4.26187925e-03 -3.96833271e-02 4.17989381e-02] [ 3.62137854e-02 -3.39536034e-02 7.04710111e-02 ... 4.54063341e-02 -2.75456812e-02 -4.45371727e-03]]] ... [[[-1.20051123e-01 -5.30358791e-01 5.68277240e-01 ... 6.19094908e-01 -5.50280511e-01 7.82266676e-01] [ 6.77273154e-01 -9.42540169e-01 -5.43555200e-01 ... 3.07611972e-01 -9.43486392e-01 3.47189933e-01] [ 1.20910215e+00 -5.03481776e-02 6.33754730e-01 ... -8.58137369e-01 2.60738969e-01 3.88140529e-01] ... [ 4.34900634e-02 8.31767559e-01 2.89686233e-01 ... -7.31671274e-01 3.56540233e-01 4.26136076e-01] [-7.62802064e-01 -2.73971826e-01 1.91857770e-01 ... -8.99116695e-01 -3.22755277e-01 5.40745318e-01] [ 5.06883204e-01 7.91264653e-01 -4.41169322e-01 ... -9.04251397e-01 -4.10524964e-01 -6.72906518e-01]] [[-8.31863165e-01 4.04602988e-03 -9.54021513e-02 ... -6.06343560e-02 5.62911038e-04 2.23438546e-01] [-3.56439888e-01 5.85600920e-02 -2.74896950e-01 ... -6.32171988e-01 4.58456486e-01 -8.90616793e-03] [-1.39039159e-01 1.02806866e-01 -3.26936916e-02 ... 4.58152384e-01 -3.31128299e-01 -4.08257186e-01] ... [-2.97918797e-01 -1.50100589e-01 -1.43453240e-01 ... -1.61580861e-01 -9.62239504e-02 -4.77298051e-01] [-3.61043960e-01 7.25893006e-02 -1.22055076e-01 ... 2.93259710e-01 -2.83115149e-01 8.87669101e-02] [ 2.60979295e-01 1.42030358e-01 5.63206896e-02 ... 9.43608757e-05 -1.74503610e-01 2.64235258e-01]] [[-3.43667656e-01 -1.41633034e+00 -3.68558407e-01 ... 7.32003808e-01 -6.62977472e-02 6.31005764e-01] [ 3.37759882e-01 -1.86497033e+00 -7.24214435e-01 ... 2.34386548e-02 -8.75028908e-01 6.87144339e-01] [-5.92253983e-01 1.33385742e+00 -4.87610042e-01 ... -7.27009654e-01 -7.34925866e-01 -1.04426764e-01] ... [ 1.00008559e+00 -5.36008067e-02 3.82476330e-01 ... 8.57231855e-01 -1.31315792e+00 -5.48630238e-01] [-2.93663859e-01 -7.17033863e-01 -2.05281347e-01 ... 5.63833804e-04 2.11323649e-01 1.85858458e-01] [ 2.33819500e-01 -4.42851812e-01 -9.87064779e-01 ... -8.38557839e-01 5.65332592e-01 -4.43436146e-01]] [[ 5.42862527e-02 -4.08315182e-01 -6.82625592e-01 ... 3.65838587e-01 6.21708870e-01 6.10516965e-01] [-4.23581928e-01 3.98376733e-01 -6.56836271e-01 ... 1.07628420e-01 3.79626840e-01 3.47838223e-01] [-8.60419512e-01 3.48714769e-01 8.84509563e-01 ... -4.80783314e-01 -7.38243461e-01 5.80707371e-01] ... [ 1.82619214e-01 -6.76879704e-01 9.14614618e-01 ... -5.74710131e-01 1.75375655e-01 3.05560589e-01] [ 5.77598214e-01 4.00722861e-01 -4.97590601e-01 ... -1.94400266e-01 6.32038534e-01 2.00020850e-01] [-6.53280139e-01 2.52912045e-01 -1.83627799e-01 ... -5.27112007e-01 -1.47516295e-01 6.96145654e-01]] [[ 3.63231987e-01 -7.32662082e-01 -1.21058273e+00 ... -3.79844010e-01 6.06585592e-02 1.10642183e+00] [ 1.20247833e-01 2.39523768e-01 -8.91062260e-01 ... -6.48677349e-01 1.31380284e+00 -1.23137847e-01] [ 1.29301918e+00 -1.50645018e-01 1.00412965e+00 ... 8.57360005e-01 -4.00182664e-01 -2.54885286e-01] ... [-6.52684152e-01 -1.52204007e-01 -6.12512231e-01 ... 2.13271543e-01 -6.13490604e-02 1.04182982e+00] [-5.55081725e-01 1.05510890e-01 5.23483396e-01 ... 6.47520542e-01 -1.55151272e+00 -5.11382163e-01] [ 5.95273077e-01 3.61025155e-01 9.16244626e-01 ... -2.82106370e-01 -1.07308149e-01 4.72570986e-01]] [[-3.80382352e-02 -7.10110664e-02 1.02227172e-02 ... -4.35088435e-03 -3.71768288e-02 -6.75001666e-02] [-1.44488961e-01 2.79332399e-02 -7.72459656e-02 ... 3.66321169e-02 -7.28426278e-02 -3.09991557e-02] [-1.46396486e-02 2.68540177e-02 7.43679777e-02 ... 7.85882026e-02 -4.05033119e-02 1.30013200e-02] ... [-7.19133019e-02 -9.88175124e-02 1.46825448e-01 ... 1.20989541e-02 3.94727886e-02 -3.27953324e-02] [-2.31134668e-02 1.23082995e-02 1.19748741e-01 ... 1.43207721e-02 -8.49993452e-02 6.85530016e-03] [ 1.97697990e-02 -1.31621838e-01 4.74608764e-02 ... 1.40212715e-01 -2.95471419e-02 -3.87772433e-02]]] [[[ 1.51709095e-01 7.79576540e-01 1.14579372e-01 ... -1.35899091e+00 -3.76579314e-01 8.26946020e-01] [-5.20748794e-01 -9.61481988e-01 8.67353439e-01 ... -1.61049575e-01 8.37531328e-01 5.56378841e-01] [-1.01077020e+00 8.77103865e-01 -7.23882258e-01 ... -6.00060880e-01 -4.88960482e-02 -7.88736641e-01] ... [-6.65869653e-01 2.73732364e-01 1.21338296e+00 ... -7.94362783e-01 -2.31200710e-01 -4.62946773e-01] [-7.71881044e-01 7.88518369e-01 -4.68198024e-02 ... -9.90426242e-01 -2.20780239e-01 -6.62040532e-01] [-4.27910656e-01 9.01693702e-01 -2.04935759e-01 ... 5.41585147e-01 2.60486901e-01 1.17524183e+00]] [[ 3.67585152e-01 -1.03282213e-01 1.09564930e-01 ... -1.14220507e-01 4.29014027e-01 -1.33620963e-01] [-7.59082362e-02 -1.79818854e-01 9.39098895e-02 ... 9.44802444e-03 -4.01954651e-01 -1.07069850e-01] [ 1.20491236e-01 -5.27697265e-01 -1.80669665e-01 ... 1.20611535e-02 4.19573754e-01 1.17675774e-03] ... [ 1.68828219e-01 -3.23208898e-01 -4.05687898e-01 ... 2.36410871e-01 3.19986939e-02 -1.85327128e-01] [ 3.90109181e-01 2.49477267e-01 -2.21933246e-01 ... -1.62402019e-01 -3.06415379e-01 1.09326104e-02] [ 2.78631926e-01 1.85079910e-02 1.79362416e-01 ... -9.22253281e-02 7.32666031e-02 4.69601750e-02]] [[-9.81055379e-01 -1.91129103e-01 -5.18544972e-01 ... -1.08614072e-01 1.49988592e+00 7.72797227e-01] [ 1.92309380e-01 2.59904653e-01 7.86484540e-01 ... 8.17233503e-01 -3.00966024e-01 4.65990365e-01] [ 1.17728353e+00 1.00655079e-01 -1.32692611e+00 ... 1.16783967e-02 -2.56916016e-01 3.20152030e-04] ... [-1.24505675e+00 -2.48891145e-01 7.24206090e-01 ... 1.29964995e+00 5.65396659e-02 -6.90382838e-01] [-1.18188512e+00 -2.27844436e-02 9.23255682e-01 ... 7.13314950e-01 -5.62210262e-01 2.81252444e-01] [-9.33058262e-01 -9.48472202e-01 2.31617302e-01 ... 4.07438159e-01 3.75423282e-01 -1.24963117e+00]] [[-2.74030566e-01 3.44719619e-01 -4.88946378e-01 ... -6.15213275e-01 -5.92819273e-01 -3.53974253e-01] [ 2.44629279e-01 -2.27369204e-01 -9.11416888e-01 ... 1.04968929e+00 -4.26206768e-01 -9.73262966e-01] [ 1.38727874e-01 -5.27507327e-02 -4.05102998e-01 ... -6.13935828e-01 -1.13796842e+00 3.45549554e-01] ... [ 5.04271984e-01 1.53037041e-01 5.60212672e-01 ... 2.92032361e-01 -3.77494097e-02 1.63548410e-01] [-3.65272880e-01 1.38245180e-01 7.17361748e-01 ... -2.15614274e-01 -3.01429451e-01 4.37828124e-01] [ 6.61232293e-01 1.76928088e-01 -1.25993192e-01 ... 4.24290299e-01 5.37672862e-02 3.60548496e-01]] [[ 2.02476978e-01 -9.56350744e-01 -3.11622530e-01 ... -1.56621691e-02 1.03777654e-01 -3.80389601e-01] [-4.90655564e-02 1.23700090e-01 5.93787991e-02 ... -4.39091414e-01 -7.57580340e-01 -3.79996091e-01] [ 5.57274461e-01 5.49839139e-01 -4.67922598e-01 ... -1.36940917e-02 1.43152744e-01 9.70044553e-01] ... [-6.00189507e-01 -7.16038167e-01 7.36922860e-01 ... 2.02116266e-01 -1.38999259e+00 1.77039608e-01] [ 3.97648364e-01 -1.86982676e-01 -1.15282750e+00 ... 2.51377910e-01 1.03565848e+00 2.04044312e-01] [ 6.66935965e-02 6.71028912e-01 -6.12649500e-01 ... 1.09084320e+00 2.63804831e-02 1.94053158e-01]] [[ 8.79359469e-02 1.38016984e-01 3.96672487e-02 ... 1.18440650e-02 -1.32200107e-01 1.14463530e-01] [ 8.93473178e-02 8.40043090e-03 3.99754569e-02 ... -1.25473738e-02 2.19615642e-02 -1.15902655e-01] [ 7.83678517e-03 -8.44399557e-02 5.75277023e-04 ... 4.67277057e-02 6.97802082e-02 2.95757782e-02] ... [-5.50018288e-02 -6.84801862e-02 2.18270142e-02 ... -5.40793724e-02 6.28695041e-02 -3.02890092e-02] [-3.30184959e-02 -6.78595155e-02 1.22601902e-02 ... -8.30265209e-02 6.82309419e-02 2.04797834e-01] [-9.97823328e-02 -2.32892148e-02 -2.06578858e-02 ... 2.21800003e-02 -6.41749054e-02 1.32656887e-01]]] [[[-6.53044581e-01 -3.22940573e-02 -9.37020779e-02 ... -6.55765235e-01 6.08542681e-01 4.47570503e-01] [-3.60977650e-01 9.00974572e-01 3.78687948e-01 ... -5.67152023e-01 -2.46929258e-01 4.74756032e-01] [ 1.95448957e-02 -5.44310093e-01 1.67459454e-02 ... 8.63407850e-01 -9.57302079e-02 1.29033595e-01] ... [ 4.43265766e-01 1.30983917e-02 -9.02926475e-02 ... 2.51911223e-01 1.21609293e-01 -7.53377795e-01] [-1.19955695e+00 -6.82440307e-03 -2.88058132e-01 ... 1.76185668e-01 -3.30076009e-01 -3.42759579e-01] [ 1.07046163e+00 -1.20413387e+00 -3.13082039e-01 ... 3.48390341e-01 2.78251208e-02 3.75661492e-01]] [[-3.07833910e-01 -1.25763282e-01 2.62536615e-01 ... -1.11595228e-01 -2.21416011e-01 -3.92329305e-01] [-4.65789497e-01 2.38302015e-02 3.82126331e-01 ... 6.17914051e-02 -2.65573055e-01 7.94827193e-02] [-7.74264149e-03 -1.87826172e-01 4.48485941e-01 ... 3.21298808e-01 -2.06471905e-01 6.25894070e-01] ... [-1.64365202e-01 3.33683074e-01 -6.43908605e-02 ... 7.39787593e-02 2.55017340e-01 -7.50460401e-02] [-1.01240061e-01 8.51884633e-02 1.58385769e-01 ... -1.44464254e-01 -2.47462332e-01 3.80418777e-01] [-5.32041453e-02 -8.45680296e-01 -5.00043392e-01 ... -2.16154262e-01 5.44006862e-02 1.87040970e-01]] [[ 1.21698773e+00 5.42370558e-01 -3.49032491e-01 ... 1.69779927e-01 -8.69242907e-01 6.62324131e-01] [-1.27128303e+00 -7.24991679e-01 -1.53945848e-01 ... -5.11000991e-01 4.84716296e-01 -7.62266099e-01] [ 4.74575311e-01 4.49911594e-01 -4.90876436e-01 ... -4.41008657e-01 -1.17093992e+00 1.16522640e-01] ... [ 2.87567466e-01 9.01327934e-03 9.01281983e-02 ... 5.18817782e-01 -7.89407432e-01 -1.94339737e-01] [ 5.57044923e-01 1.08184552e+00 4.37582582e-01 ... -7.92568505e-01 6.86569035e-01 3.56527388e-01] [-5.50966859e-01 -9.59707201e-02 1.67233393e-01 ... -1.02330185e-01 -7.78856277e-01 -2.67684996e-01]] [[-6.30776048e-01 5.91454923e-01 -2.56716132e-01 ... -4.12734509e-01 6.03224896e-03 -3.91311318e-01] [ 5.32616973e-01 -2.38003716e-01 4.72681671e-02 ... -8.33917916e-01 1.90529689e-01 -2.86678046e-01] [ 3.26376945e-01 9.48744476e-01 1.43776193e-01 ... -7.11645544e-01 -3.56359661e-01 -6.35336578e-01] ... [ 6.14417911e-01 -1.27153501e-01 -5.61765969e-01 ... 1.14971176e-01 -6.71985513e-03 1.89151391e-01] [ 5.99111795e-01 -4.48821813e-01 -3.52905542e-02 ... 2.36149505e-01 5.04273951e-01 4.07377243e-01] [ 5.15382946e-01 -2.33781159e-01 -5.24312794e-01 ... 4.80808318e-01 -1.13289607e+00 6.10214360e-02]] [[-4.38320011e-01 -7.32188225e-01 1.73335791e-01 ... -9.25911516e-02 3.91253024e-01 3.94875616e-01] [-6.07558824e-02 -5.71997166e-01 4.87462550e-01 ... 3.23373824e-01 1.69507079e-02 -3.41340840e-01] [ 4.07399297e-01 9.35802698e-01 -5.97205341e-01 ... 6.46617472e-01 4.02700268e-02 6.85198247e-01] ... [-9.36169773e-02 8.01646769e-01 1.21972454e+00 ... -1.25703847e+00 2.25698799e-02 2.37535030e-01] [ 1.10015070e+00 9.13428605e-01 -4.80645210e-01 ... -1.69710922e+00 -6.33360147e-01 5.23617744e-01] [-1.15535045e+00 -9.54154313e-01 -1.35491744e-01 ... -7.10806191e-01 -6.30293548e-01 2.16303200e-01]] [[-1.90951896e-03 -4.75575924e-02 5.51640466e-02 ... -3.77830192e-02 -1.60480849e-02 -3.64910886e-02] [ 1.61481965e-02 -5.61843030e-02 1.45117640e-02 ... 3.72100323e-02 -1.62305072e-01 4.55479845e-02] [-1.18391059e-01 3.58318351e-02 4.64623235e-02 ... -1.51040226e-01 1.13394419e-02 6.33287132e-02] ... [-9.50425342e-02 -1.22220460e-02 3.57759511e-03 ... -1.29331902e-01 7.55475536e-02 5.46537433e-03] [ 4.39984128e-02 6.55396953e-02 9.10498053e-02 ... 1.42275363e-01 1.47896940e-02 -6.50557354e-02] [ 1.05415300e-01 -5.90503551e-02 -9.46754031e-03 ... -6.36933073e-02 2.27436852e-02 -4.77279834e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4606.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.5828 0.2408 0.5011 -0.2189 1.4190 -0.0183 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7969 0.7419 -2.3594 0.5851 -0.0550 -0.1079 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4841068 (Squeeze_4841067[0]:i64[], Constant_4841017[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4841068': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4843558 (Squeeze_4843557[0]:i64[], Constant_4843506[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4843558': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4846356 (Squeeze_4846355[0]:i64[], Constant_4846304[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4846356': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schefw_re: [[[[-7.76802480e-01 -1.19112742e+00 -6.07810557e-01 ... -9.22174990e-01 -1.55164480e+00 -1.13361347e+00] [-1.85702407e+00 -3.21761084e+00 -2.49298167e+00 ... -1.02185404e+00 -1.44512117e+00 -1.39623392e+00] [-3.12731838e+00 -1.91733503e+00 -1.79249406e+00 ... -1.46631789e+00 -1.03021276e+00 -3.92703265e-01] ... [-3.47092247e+00 -1.40185082e+00 -1.82335770e+00 ... -2.89535546e+00 -1.06978989e+00 -2.11825562e+00] [-2.64810252e+00 -2.25801945e+00 -2.28514338e+00 ... -2.04881024e+00 -2.50668931e+00 -1.88103795e+00] [-3.07049179e+00 -9.52051997e-01 -1.89487517e+00 ... -6.43807292e-01 -2.22779417e+00 -4.23368156e-01]] [[ 1.41612983e+00 1.32184789e-01 8.97325635e-01 ... 1.47656178e+00 3.23634177e-01 3.69610995e-01] [-1.16909027e+00 1.47975206e+00 1.39635825e+00 ... 1.07763541e+00 -2.12050036e-01 -1.49306789e-01] [ 2.31400862e-01 -3.46469074e-01 5.62334001e-01 ... 8.29698503e-01 1.61517709e-01 -1.57354921e-02] ... [ 1.13100219e+00 -1.37424302e+00 4.46681708e-01 ... -4.64009196e-01 1.52773130e+00 -7.10516989e-01] [ 1.31220686e+00 -1.60498008e-01 5.43592155e-01 ... 3.43711674e-02 -7.00749829e-02 4.62134123e-01] [-3.18122357e-01 7.42043078e-01 6.57287478e-01 ... -5.09828985e-01 1.42913330e+00 -1.20377891e-01]] [[ 1.67084262e-01 3.37319827e+00 -9.58457351e-01 ... -1.28047585e+00 -9.16887462e-01 -2.58381009e+00] [-2.46645474e+00 -4.82032716e-01 1.18291128e+00 ... 2.98700261e+00 -2.34437704e+00 -1.97800505e+00] [-1.06245363e+00 4.45138597e+00 -1.99495184e+00 ... 6.10437930e-01 1.20238149e+00 -4.22447085e-01] ... [-1.48773110e+00 3.52179575e+00 -9.04680192e-01 ... -3.47459801e-02 4.18053716e-01 -2.20074415e+00] [ 3.94894719e+00 -3.43935907e-01 -3.00621986e+00 ... 5.82273789e-02 -3.33357477e+00 -3.06998992e+00] [ 1.01063788e-01 1.24131966e+00 4.33582067e+00 ... 2.23454475e+00 2.47287318e-01 -2.08899665e+00]] [[ 9.94978845e-01 -2.32304156e-01 -2.23038554e-01 ... -2.11681619e-01 -9.05174375e-01 -2.77845562e-01] [-9.26000103e-02 -2.20285043e-01 -2.45380551e-01 ... -9.19870138e-01 -5.77411726e-02 -6.71536207e-01] [ 1.95952822e-02 -1.36089885e+00 -5.37400782e-01 ... -7.49657094e-01 -5.93057752e-01 -2.83272237e-01] ... [ 2.17054263e-01 -7.66968071e-01 -9.83743593e-02 ... -2.51902729e-01 2.44584411e-01 -1.03076553e+00] [-1.73087180e-01 -1.18434513e+00 1.77669212e-01 ... -1.08119977e+00 3.77455741e-01 -1.06850064e+00] [-1.43489271e-01 -3.93450379e-01 -6.11256421e-01 ... 1.00456464e+00 -3.13267410e-01 -8.36453259e-01]] [[ 1.45323730e+00 1.35136807e+00 1.37715554e+00 ... 1.29225016e+00 1.40838122e+00 1.45278203e+00] [ 1.48976541e+00 1.33028650e+00 1.40532732e+00 ... 1.48924482e+00 1.42726290e+00 1.41155064e+00] [ 1.41041756e+00 1.42275429e+00 1.40154278e+00 ... 1.43728554e+00 1.39950418e+00 1.43790901e+00] ... [ 1.41271806e+00 1.29400396e+00 1.39082289e+00 ... 1.31404877e+00 1.28339469e+00 1.43574309e+00] [ 1.43210018e+00 1.40870631e+00 1.40988183e+00 ... 1.48471606e+00 1.52175117e+00 1.44902921e+00] [ 1.47695458e+00 1.35207593e+00 1.45042479e+00 ... 1.42092574e+00 1.37692583e+00 1.39142954e+00]] [[ 1.14434995e-02 6.93535730e-02 -1.50584340e-01 ... -9.85544771e-02 5.54661378e-02 -8.43447633e-03] [-7.84953311e-02 -2.28295419e-02 -4.66105230e-02 ... -4.11967821e-02 -2.32793301e-01 1.42622247e-01] [-1.25419358e-02 2.17064284e-02 -5.83615638e-02 ... -1.90533623e-01 -3.53525728e-02 -4.78444211e-02] ... [-4.27366681e-02 -1.89278394e-01 1.54070351e-02 ... -7.05765560e-02 -3.67126684e-03 -7.45298564e-02] [ 1.29830763e-01 -6.64562806e-02 6.93670586e-02 ... -6.49851039e-02 2.13565394e-01 1.27615958e-01] [ 9.29829255e-02 4.43029739e-02 -8.89905263e-03 ... -1.08446032e-01 4.31773886e-02 2.24603061e-02]]] [[[-1.26517725e+00 -2.28602934e+00 -1.70432246e+00 ... -2.73335052e+00 -1.63522434e+00 -1.25138056e+00] [-2.10923576e+00 -1.70337713e+00 -1.28756511e+00 ... -1.85296977e+00 3.46915603e-01 -1.50150001e+00] [-1.34422040e+00 -1.82527888e+00 -1.29324937e+00 ... -2.10729098e+00 -1.14907897e+00 -1.29346132e+00] ... [-1.10433906e-01 -2.17326546e+00 -3.16067624e+00 ... -1.66208565e+00 -1.39383388e+00 -2.47087145e+00] [ 6.20013118e-01 -1.27062607e+00 -1.66917396e+00 ... -1.67958558e+00 -6.61987305e-01 -9.89255786e-01] [-7.57205963e-01 -1.20350552e+00 -1.13124883e+00 ... -8.59534502e-01 -1.59898472e+00 -1.22120285e+00]] [[ 9.33682263e-01 8.48550320e-01 -1.82955295e-01 ... -1.06233329e-01 1.68850446e+00 1.22489405e+00] [-5.96022010e-01 -2.73435950e-01 2.54637063e-01 ... 1.95584297e-01 1.52342880e+00 5.81722498e-01] [ 4.07808155e-01 -1.01488113e+00 3.37859213e-01 ... 1.10137916e+00 -8.45683515e-02 3.66256237e-01] ... [-4.07354832e-01 1.23743582e+00 -1.87630221e-01 ... -6.44242048e-01 5.27854681e-01 -7.74552226e-01] [ 4.28929180e-02 -1.67264521e+00 1.00490940e+00 ... 3.42363894e-01 3.07733476e-01 2.02389240e-01] [ 1.55393600e+00 4.60526407e-01 -4.43320237e-02 ... 1.95006907e-01 -6.07220948e-01 -9.52060342e-01]] [[-2.33540297e+00 8.45847011e-01 1.18758380e+00 ... 3.10046256e-01 9.36188757e-01 -1.55886734e+00] [-1.09739077e+00 1.25414765e+00 -2.87715197e+00 ... 2.19032511e-01 7.20849216e-01 3.24251622e-01] [-9.13489580e-01 2.57182455e+00 1.87224495e+00 ... -1.37722206e+00 -1.85729122e+00 3.52326989e+00] ... [ 2.11918592e+00 2.23011899e+00 3.21155190e-01 ... -5.51531255e-01 -1.30144536e+00 2.82750273e+00] [ 3.78154898e+00 -1.36226094e+00 -4.38797802e-01 ... -2.20255837e-01 4.76184338e-02 3.52751279e+00] [ 2.65516138e+00 3.32244062e+00 2.97202277e+00 ... -9.74356890e-01 -8.13074708e-01 -7.42767453e-01]] [[ 9.31246996e-01 1.92842722e-01 -6.93601251e-01 ... 4.63986039e-01 -5.97637221e-02 -5.74061990e-01] [ 3.91078591e-01 -7.82873034e-01 -1.65313393e-01 ... -3.31450790e-01 4.70785320e-01 -6.58850193e-01] [-6.90390170e-01 -7.27588594e-01 -1.91503334e+00 ... -2.59501904e-01 -1.71402663e-01 8.13681901e-01] ... [ 2.09541842e-01 2.85703659e-01 -5.43604791e-01 ... -5.70942223e-01 2.77772490e-02 7.21553147e-01] [-8.84982347e-01 1.37462527e-01 -1.96763888e-01 ... -6.00588977e-01 8.63865241e-02 4.50931042e-01] [ 6.69905961e-01 -8.43722582e-01 1.10785678e-01 ... 1.68097213e-01 7.62604028e-02 -2.87224799e-01]] [[ 1.38702714e+00 1.37735415e+00 1.45692432e+00 ... 1.46982741e+00 1.41778171e+00 1.43306530e+00] [ 1.46142220e+00 1.40319312e+00 1.43911421e+00 ... 1.47873080e+00 1.43687952e+00 1.48460948e+00] [ 1.37418938e+00 1.40274763e+00 1.31475747e+00 ... 1.49305487e+00 1.40056753e+00 1.41009581e+00] ... [ 1.42498970e+00 1.42464519e+00 1.40281343e+00 ... 1.47703302e+00 1.47528517e+00 1.30516291e+00] [ 1.48186779e+00 1.46019316e+00 1.43083179e+00 ... 1.35476589e+00 1.41625595e+00 1.43497980e+00] [ 1.52832723e+00 1.43699050e+00 1.38287914e+00 ... 1.39710796e+00 1.33949482e+00 1.43410861e+00]] [[-9.39881429e-02 -5.15334718e-02 -2.78740898e-02 ... 4.43411767e-02 7.97814876e-02 8.09604004e-02] [ 8.93458277e-02 -1.05777569e-01 -2.45508011e-02 ... -1.71138465e-01 -4.15642895e-02 1.21742889e-01] [-7.63527676e-02 2.28444174e-01 -3.15122753e-02 ... 4.85185832e-02 -9.72112045e-02 -2.78055500e-02] ... [-1.25525445e-01 -4.95344736e-02 -7.20129833e-02 ... 6.66115135e-02 -2.13747725e-01 -4.26620506e-02] [-6.30799755e-02 -3.74632403e-02 -1.27120942e-01 ... 8.41553435e-02 1.00288108e-01 -1.47002554e-02] [ 3.74504402e-02 6.73608705e-02 1.47705108e-01 ... 3.90917324e-02 1.06797636e-01 1.50343832e-02]]] [[[-2.04889560e+00 -1.72488070e+00 -1.35708654e+00 ... -1.08404016e+00 -1.63073516e+00 -1.49009609e+00] [-1.46789491e+00 -7.81511605e-01 -1.92873645e+00 ... -1.22670364e+00 -5.76582789e-01 -2.50251913e+00] [-3.18563461e+00 -2.47801757e+00 -2.26553607e+00 ... -1.75728595e+00 -2.84498692e+00 -1.80429339e-01] ... [-1.36202323e+00 -1.62239671e+00 -7.65137911e-01 ... -8.72766003e-02 -8.60679567e-01 -1.80345857e+00] [-1.04498792e+00 -2.15950251e+00 -1.09109282e+00 ... -1.15886879e+00 -2.64988041e+00 -1.44960690e+00] [-1.66293812e+00 -2.51057100e+00 -2.97755241e-01 ... -1.53485048e+00 -2.17936373e+00 -7.66339898e-01]] [[ 1.80004275e+00 3.78197998e-01 2.94043630e-01 ... 6.04117692e-01 1.26775646e+00 -7.42876232e-01] [-9.80901659e-01 -2.07346752e-01 8.00379813e-02 ... 1.91157416e-01 -1.37965274e+00 4.92407143e-01] [ 3.54886711e-01 3.71707231e-01 -1.21869254e+00 ... -8.26795936e-01 2.55342722e-01 4.77401108e-01] ... [ 3.17178249e-01 6.18352115e-01 3.26198608e-01 ... -1.17360353e+00 1.75541878e+00 2.68261135e-01] [-3.64750028e-01 2.96408176e-01 2.18942061e-01 ... 8.20444643e-01 -2.56281048e-01 -3.21317136e-01] [ 4.09914792e-01 3.07115912e-01 -1.09648085e+00 ... 8.13409686e-02 2.91830868e-01 1.14231396e+00]] [[ 4.68578529e+00 2.22603869e+00 3.52361488e+00 ... 3.38356495e+00 1.27302098e+00 7.96892166e-01] [ 3.78853846e+00 8.12309682e-01 3.69787073e+00 ... 3.81004119e+00 -1.10742867e+00 -5.52923083e-01] [-1.04835618e+00 -6.02894902e-01 -4.32713413e+00 ... 2.45221019e+00 3.93272305e+00 -1.96749210e+00] ... [ 4.14063543e-01 3.97125745e+00 -3.10544276e+00 ... -1.30380571e+00 -2.01265380e-01 3.45766854e+00] [ 7.10629582e-01 -2.62522012e-01 -1.34552407e+00 ... -3.15551853e+00 4.64932585e+00 -1.24353662e-01] [ 3.36580753e+00 -4.08060646e+00 2.15381455e+00 ... 7.65967369e-01 2.30521774e+00 2.11020440e-01]] [[-4.54293460e-01 -2.24472776e-01 -1.73296437e-01 ... 3.07653099e-01 3.94567028e-02 -1.17671895e+00] [-3.55038702e-01 -2.00100243e-01 -4.93532956e-01 ... -2.80411661e-01 -2.51839012e-01 -2.37478018e-01] [-4.81260061e-01 5.21097958e-01 1.23591781e-01 ... 2.42651805e-01 6.32168949e-01 -1.27328682e+00] ... [-4.59340066e-01 3.89558107e-01 -2.38141671e-01 ... -1.75615132e+00 -2.70210028e-01 4.79672998e-01] [-3.82067084e-01 -2.27316767e-01 6.49434388e-01 ... 1.42391667e-01 6.81629330e-02 -6.04251802e-01] [ 1.61312416e-01 -7.19377279e-01 1.58964097e-01 ... -1.06410635e+00 -1.01119792e+00 -7.95984566e-01]] [[ 1.37091982e+00 1.40165412e+00 1.40575063e+00 ... 1.43258715e+00 1.38762093e+00 1.41979444e+00] [ 1.51643980e+00 1.41381311e+00 1.39834297e+00 ... 1.36441791e+00 1.32838869e+00 1.41168678e+00] [ 1.33634579e+00 1.50805199e+00 1.38110340e+00 ... 1.37918305e+00 1.39763808e+00 1.47819936e+00] ... [ 1.34519029e+00 1.39706469e+00 1.46979010e+00 ... 1.34269214e+00 1.46493781e+00 1.38301909e+00] [ 1.30460966e+00 1.29509068e+00 1.32751942e+00 ... 1.49474645e+00 1.47372723e+00 1.42830360e+00] [ 1.49559975e+00 1.42730486e+00 1.34383404e+00 ... 1.37755907e+00 1.32563317e+00 1.39512849e+00]] [[ 1.40590183e-02 5.91639951e-02 -2.24039719e-01 ... -8.56791064e-03 -2.19250768e-02 -2.11745992e-01] [ 3.05340886e-02 -1.38617912e-02 -1.79658204e-01 ... 6.20206110e-02 -4.92378734e-02 3.02162450e-02] [-7.78002739e-02 1.28450975e-01 -2.62804218e-02 ... 2.60738105e-01 9.57634151e-02 8.64079148e-02] ... [ 1.01353899e-01 7.50825256e-02 1.22462384e-01 ... -1.20227262e-01 -1.28034964e-01 8.93839747e-02] [-7.51899853e-02 3.64188515e-02 -4.90919575e-02 ... 1.07278831e-01 -5.82012907e-02 5.51161021e-02] [-2.20447127e-02 -2.13809665e-02 1.50543556e-01 ... 5.44399507e-02 7.96976537e-02 -6.24813847e-02]]] ... [[[-1.42880630e+00 -3.54788113e+00 -2.09902257e-01 ... -1.26248872e+00 -2.05870008e+00 -2.18537068e+00] [-1.84590852e+00 -1.37969112e+00 -6.44424200e-01 ... -1.95817161e+00 -1.25109029e+00 -2.18267035e+00] [-1.33997464e+00 -1.48732722e+00 -1.34998167e+00 ... -6.22456551e-01 -2.66118860e+00 -1.17069054e+00] ... [-1.47647190e+00 -4.34166288e+00 -1.35918713e+00 ... -4.05584067e-01 -1.03318012e+00 -1.82580268e+00] [-1.91231656e+00 -1.84696996e+00 -6.44592345e-01 ... -2.20350289e+00 -8.08968067e-01 -2.44802213e+00] [-1.90548289e+00 -9.93242681e-01 -1.80626345e+00 ... -4.41245854e-01 -2.79879975e+00 -1.27254450e+00]] [[ 2.40140963e+00 3.68176222e-01 -6.74704552e-01 ... -7.00460553e-01 4.19078916e-01 -1.67359203e-01] [ 2.76271105e-01 -6.36835158e-01 -3.66501123e-01 ... -1.30834058e-01 -1.82058871e-01 8.58162567e-02] [-7.22702026e-01 -6.45299137e-01 1.48715019e-01 ... -1.74043655e-01 4.39254194e-01 3.57363850e-01] ... [ 8.96007836e-01 2.17924261e+00 8.07667553e-01 ... -5.53950906e-01 2.08464533e-01 1.01081145e+00] [ 1.54053128e+00 -1.55237928e-01 5.69794215e-02 ... -2.47979924e-01 -2.78966010e-01 2.57970154e-01] [-2.92973131e-01 1.22749388e+00 9.85899866e-01 ... 4.94774163e-01 6.72588170e-01 -2.55906492e-01]] [[-2.21430254e+00 -8.49779487e-01 3.01964998e+00 ... -2.15746307e+00 1.66370034e+00 7.30591789e-02] [ 7.01886797e+00 1.42342508e+00 1.02912486e-01 ... -1.39354527e+00 2.39963078e+00 2.89327502e+00] [ 8.24205816e-01 3.60351610e+00 -3.43286729e+00 ... 1.95011485e+00 -3.67457056e+00 1.30417669e+00] ... [ 1.85753357e+00 -3.84989786e+00 2.66254163e+00 ... -2.25868866e-01 4.25059652e+00 4.86117229e-02] [-2.29212141e+00 -3.69009995e+00 -1.19614232e+00 ... 4.32120514e+00 -3.61638337e-01 -2.88896847e+00] [ 1.50297269e-01 2.41140866e+00 5.09178877e+00 ... -2.01250955e-01 2.31042814e+00 2.52799296e+00]] [[ 7.75213987e-02 3.71456474e-01 -1.09717917e+00 ... 1.57649890e-02 -6.01628840e-01 6.95545852e-01] [-7.93759823e-02 -3.49652678e-01 -4.97161299e-01 ... 4.36145604e-01 9.91320163e-02 3.02096426e-01] [-1.15846646e+00 -3.72607052e-01 3.64669710e-01 ... -5.09348571e-01 4.24791686e-03 -7.79191971e-01] ... [-1.57529569e+00 1.81968465e-01 1.99689567e-01 ... -1.13009524e+00 -6.29966080e-01 -1.09493780e+00] [ 3.24732095e-01 8.40528548e-01 -1.48492050e+00 ... -2.50876307e-01 -6.14033699e-01 -2.75022835e-01] [ 3.69611084e-01 -8.29081237e-01 1.91655313e-03 ... -1.05944967e+00 2.02404201e-01 -5.46668172e-01]] [[ 1.45549822e+00 1.47111595e+00 1.43599975e+00 ... 1.37944317e+00 1.39421749e+00 1.39777029e+00] [ 1.39607763e+00 1.36593866e+00 1.36914492e+00 ... 1.39865029e+00 1.39242995e+00 1.34724855e+00] [ 1.40844798e+00 1.36025429e+00 1.40926528e+00 ... 1.30336130e+00 1.43901563e+00 1.46605182e+00] ... [ 1.44554842e+00 1.49890709e+00 1.40430021e+00 ... 1.39035845e+00 1.35075247e+00 1.43887615e+00] [ 1.35682523e+00 1.37321103e+00 1.44158232e+00 ... 1.34449828e+00 1.46206319e+00 1.45392156e+00] [ 1.42702293e+00 1.42849481e+00 1.37147343e+00 ... 1.45326221e+00 1.43011844e+00 1.28318822e+00]] [[ 8.33086446e-02 -1.54247627e-01 -1.15543775e-01 ... -3.50599065e-02 -2.10116550e-01 1.32127136e-01] [-1.12230312e-02 8.28171521e-02 -1.18886560e-01 ... 1.27030013e-03 -1.45436481e-01 5.36450222e-02] [-1.01334088e-01 -1.22582829e-02 8.07877406e-02 ... -2.53671128e-02 5.27389944e-02 -1.56903863e-02] ... [ 1.00428695e-02 8.62672254e-02 1.58340596e-02 ... 9.74290818e-02 1.93399768e-02 3.68071534e-02] [ 9.20537487e-03 -7.93156400e-02 5.88281080e-02 ... -1.69715509e-01 1.37047946e-01 -2.95906793e-02] [-1.91053063e-01 5.91149181e-02 2.75045745e-02 ... 1.92576110e-01 9.82353687e-02 4.67336066e-02]]] [[[-7.50514090e-01 -1.46777582e+00 -1.78477621e+00 ... -2.17251706e+00 -1.49545288e+00 -1.64420843e+00] [-1.38680100e+00 -1.68897355e+00 -4.53855425e-01 ... -2.53056765e+00 -1.24013305e+00 2.60582507e-01] [-1.93455029e+00 -1.39236593e+00 -2.06762648e+00 ... -2.06658936e+00 -2.13883281e+00 -1.93382382e+00] ... [-2.59025908e+00 -3.33609891e+00 -8.40808630e-01 ... -2.24225259e+00 -8.75541389e-01 -2.87804365e+00] [-2.09854364e+00 -2.85585356e+00 -1.52516711e+00 ... -2.20217061e+00 -7.92123616e-01 -2.83867955e+00] [-1.88924456e+00 -1.55453730e+00 -1.53908515e+00 ... -2.80134249e+00 -3.95809531e-01 -1.37449038e+00]] [[ 1.30869567e+00 3.15419585e-01 -8.81986558e-01 ... 8.83328676e-01 1.06887841e+00 -2.20789909e-02] [-3.11439894e-02 -2.74899811e-01 3.83128077e-02 ... 4.93966758e-01 1.17942178e+00 6.51770234e-01] [ 5.54088056e-01 -5.22909760e-01 -6.50105834e-01 ... 5.17524898e-01 -3.35111648e-01 1.20642334e-01] ... [ 5.82551122e-01 -1.59561202e-01 -2.84195453e-01 ... -4.45551932e-01 1.56153053e-01 1.10063434e+00] [ 1.00271034e+00 5.62391579e-01 -1.16987932e+00 ... -7.69751608e-01 1.10727727e+00 -4.32928354e-01] [ 6.90553963e-01 5.89995146e-01 1.02457619e+00 ... 1.27497983e+00 1.53438592e+00 -5.36318198e-02]] [[-4.46018040e-01 -1.29010785e+00 1.66802716e+00 ... 3.59724569e+00 1.10815132e+00 -1.62530887e+00] [ 7.99132466e-01 2.35149527e+00 8.69000912e-01 ... 1.73241460e+00 5.12075901e+00 3.13988686e-01] [-2.32208657e+00 -2.14882135e+00 1.99216175e+00 ... -1.54923275e-01 -3.33998346e+00 -1.50635350e+00] ... [ 3.02430034e+00 1.49733758e+00 2.48130023e-01 ... 2.85091257e+00 1.90913749e+00 5.55037308e+00] [-2.62314558e+00 -2.03540254e+00 -1.03154409e+00 ... 1.82952791e-01 -6.13710344e-01 3.25153637e+00] [ 3.39334536e+00 -3.34259272e+00 -8.65287960e-01 ... -5.43652236e-01 -1.12078273e+00 -1.93342149e+00]] [[ 4.16799068e-01 -2.34408259e-01 6.67682961e-02 ... -1.28627419e+00 2.81176031e-01 -1.13530302e+00] [ 8.27018499e-01 4.30963457e-01 -4.75083292e-01 ... -2.75008321e-01 9.50646818e-01 4.25051093e-01] [-2.89653540e-01 -1.23796201e+00 5.78436673e-01 ... 4.57376912e-02 -6.42636061e-01 -9.01838318e-02] ... [-2.46000290e-01 -5.93724549e-01 -6.83041096e-01 ... -2.36525089e-01 3.46836835e-01 -5.10853156e-03] [ 1.22486484e+00 3.52004217e-03 -1.79216594e-01 ... -3.35366905e-01 -9.22179937e-01 4.81336385e-01] [ 3.20011348e-01 -7.10734367e-01 6.50206327e-01 ... -2.03399062e-01 -4.93026286e-01 -5.03498316e-01]] [[ 1.40612829e+00 1.44868040e+00 1.36860359e+00 ... 1.39549375e+00 1.40655077e+00 1.51156473e+00] [ 1.40427947e+00 1.41857982e+00 1.36883020e+00 ... 1.37064791e+00 1.42894304e+00 1.34163189e+00] [ 1.49755526e+00 1.40148616e+00 1.44357479e+00 ... 1.44294453e+00 1.52200615e+00 1.41711926e+00] ... [ 1.38223183e+00 1.44337618e+00 1.35428727e+00 ... 1.47237659e+00 1.53465283e+00 1.39384437e+00] [ 1.38500357e+00 1.31879532e+00 1.44892371e+00 ... 1.36691535e+00 1.40177548e+00 1.47601879e+00] [ 1.37566829e+00 1.41366339e+00 1.52287972e+00 ... 1.39071584e+00 1.42249322e+00 1.31278634e+00]] [[-2.66023097e-04 -7.58573115e-02 -5.48499599e-02 ... -7.60631412e-02 -3.84471193e-02 4.25405353e-02] [-1.06834359e-01 -6.46616071e-02 -6.59633894e-03 ... -8.06504395e-03 1.33175422e-02 2.09297761e-02] [-2.26371810e-01 -1.88608933e-02 -5.43013625e-02 ... 1.88241750e-01 -1.04577683e-01 -8.71579722e-03] ... [ 3.24220471e-02 -2.38289550e-01 1.68465137e-01 ... -1.39546528e-01 -1.60425007e-01 -1.49499267e-01] [-5.57957739e-02 -7.07862526e-02 2.29581788e-01 ... 6.01611584e-02 6.81877658e-02 -1.66767880e-01] [ 1.02258295e-01 -1.30315229e-01 -6.50511384e-02 ... -2.35981010e-02 1.65338323e-01 1.40278667e-01]]] [[[-1.47377920e+00 -1.33857214e+00 -4.00913417e-01 ... -1.11274672e+00 -2.35591912e+00 -2.08863115e+00] [-4.82708216e-01 -1.51454735e+00 -8.25255513e-01 ... -1.64940846e+00 -1.31644511e+00 -2.94676089e+00] [-1.22948480e+00 -6.01320446e-01 -7.65176058e-01 ... -5.23512363e-01 -2.36243868e+00 -1.26111150e+00] ... [-8.49292278e-02 -1.08440864e+00 -6.00213051e-01 ... -3.98701042e-01 -2.12779924e-01 -1.97112274e+00] [-1.55828369e+00 -2.26403928e+00 -1.35485137e+00 ... -1.64241624e+00 -2.95724440e+00 -1.99282837e+00] [ 4.02081199e-02 -1.61120629e+00 -1.58213484e+00 ... -6.41560137e-01 -2.90756655e+00 -6.45759702e-01]] [[ 4.99801636e-01 -6.13521814e-01 2.63225764e-01 ... 1.20652504e-02 -5.23521066e-01 -2.19822034e-01] [-9.21685323e-02 8.45391452e-01 3.85714084e-01 ... 6.96677744e-01 4.18288767e-01 -2.97003567e-01] [-3.54484290e-01 9.20486629e-01 6.49732351e-01 ... -5.62529624e-01 9.69094396e-01 -5.99693298e-01] ... [ 6.85655057e-01 4.05172467e-01 1.53115439e+00 ... 7.48390853e-02 -1.08773208e+00 2.24920914e-01] [ 3.21027488e-01 6.62238970e-02 7.44202852e-01 ... -9.16283667e-01 -3.37316692e-02 2.19399646e-01] [ 6.88439786e-01 6.97066709e-02 1.15302777e+00 ... 6.04711235e-01 -7.19137132e-01 -1.12700537e-01]] [[ 3.63301516e+00 2.29069853e+00 -9.16814566e-01 ... -2.28183866e+00 8.56974125e-01 2.11364007e+00] [ 2.77078462e+00 6.55165985e-02 -3.39221454e+00 ... -8.77555788e-01 2.29173350e+00 3.69646025e+00] [-1.81492949e+00 -1.12903309e+00 -5.27241516e+00 ... 2.98601925e-01 8.62796688e+00 3.77732903e-01] ... [ 5.76923192e-01 -2.26788688e+00 -1.50564837e+00 ... -9.33436513e-01 1.04205990e+00 2.99258351e+00] [ 9.74743247e-01 6.75314486e-01 -3.37923431e+00 ... 3.12286282e+00 -1.16593277e+00 5.53627729e+00] [ 1.95150745e+00 -1.88040650e+00 -2.91073704e+00 ... 1.64979732e+00 -3.83002472e+00 3.66805625e+00]] [[ 4.01461795e-02 2.97153115e-01 7.20469713e-01 ... 7.96219707e-01 -2.94942081e-01 -8.09081852e-01] [-1.22076166e+00 -6.28761292e-01 1.85846820e-01 ... -3.23020518e-01 -7.36248791e-01 4.14755046e-02] [-5.31184852e-01 2.24891484e-01 7.86370158e-01 ... -9.63413239e-01 1.83192372e-01 -2.61423364e-02] ... [-1.18258047e+00 -7.41675198e-02 2.15132743e-01 ... -3.67955178e-01 -1.00566244e+00 -3.29553515e-01] [ 2.78878719e-01 2.73304433e-01 3.23131114e-01 ... -9.81932878e-01 -8.12909305e-01 -3.63485634e-01] [-1.15924823e+00 -7.86788464e-01 -1.25964439e+00 ... -5.21534026e-01 -7.34649241e-01 -2.24995643e-01]] [[ 1.47483838e+00 1.42207694e+00 1.47242093e+00 ... 1.40440416e+00 1.35902011e+00 1.34382963e+00] [ 1.43570399e+00 1.47623169e+00 1.49415362e+00 ... 1.45310366e+00 1.38794875e+00 1.36877966e+00] [ 1.36553621e+00 1.38530600e+00 1.38811171e+00 ... 1.37338567e+00 1.43588793e+00 1.45761287e+00] ... [ 1.33312654e+00 1.34728670e+00 1.33496439e+00 ... 1.49071205e+00 1.36010170e+00 1.40270996e+00] [ 1.48233044e+00 1.44530404e+00 1.38323081e+00 ... 1.40527380e+00 1.37632203e+00 1.43655896e+00] [ 1.50710762e+00 1.41478825e+00 1.47226501e+00 ... 1.49727857e+00 1.42080116e+00 1.46331775e+00]] [[-3.26807722e-02 -1.26311570e-01 1.21656209e-01 ... 2.24005543e-02 -5.08731091e-03 -1.90693326e-02] [-2.10599512e-01 2.76525579e-02 -1.13934480e-01 ... -2.09369451e-01 -1.26607463e-01 2.05344930e-02] [ 1.73090681e-01 6.41313717e-02 1.53807893e-01 ... -1.07276790e-01 1.52479529e-01 -1.17669910e-01] ... [-1.59954920e-01 2.26741079e-02 -1.84067726e-01 ... -2.08698303e-01 -6.57766089e-02 1.16724446e-01] [-1.48065597e-01 -3.60979319e-01 -7.79103711e-02 ... -1.96514666e-01 4.43442948e-02 1.13867328e-01] [-9.40777585e-02 -8.13600868e-02 9.88014787e-02 ... -3.42643447e-02 -3.78369354e-02 1.01952264e-02]]]]; ov_res: [[[[-7.76802421e-01 -1.19112742e+00 -6.07810557e-01 ... -9.22174990e-01 -1.55164480e+00 -1.13361347e+00] [-1.85702407e+00 -3.21761084e+00 -2.49298167e+00 ... -1.02185404e+00 -1.44512117e+00 -1.39623380e+00] [-3.12731838e+00 -1.91733503e+00 -1.79249406e+00 ... -1.46631789e+00 -1.03021276e+00 -3.92703295e-01] ... [-3.47092247e+00 -1.40185082e+00 -1.82335770e+00 ... -2.89535522e+00 -1.06978989e+00 -2.11825562e+00] [-2.64810252e+00 -2.25801969e+00 -2.28514338e+00 ... -2.04881024e+00 -2.50668931e+00 -1.88103795e+00] [-3.07049179e+00 -9.52051938e-01 -1.89487517e+00 ... -6.43807232e-01 -2.22779417e+00 -4.23368126e-01]] [[ 1.41612983e+00 1.32184789e-01 8.97325635e-01 ... 1.47656167e+00 3.23634177e-01 3.69610995e-01] [-1.16909027e+00 1.47975194e+00 1.39635825e+00 ... 1.07763541e+00 -2.12050021e-01 -1.49306789e-01] [ 2.31400877e-01 -3.46469104e-01 5.62333941e-01 ... 8.29698503e-01 1.61517709e-01 -1.57354940e-02] ... [ 1.13100219e+00 -1.37424290e+00 4.46681738e-01 ... -4.64009225e-01 1.52773130e+00 -7.10516930e-01] [ 1.31220686e+00 -1.60497978e-01 5.43592155e-01 ... 3.43711860e-02 -7.00749755e-02 4.62134123e-01] [-3.18122327e-01 7.42043078e-01 6.57287478e-01 ... -5.09828985e-01 1.42913330e+00 -1.20377876e-01]] [[ 1.67084232e-01 3.37319827e+00 -9.58457470e-01 ... -1.28047597e+00 -9.16887581e-01 -2.58381033e+00] [-2.46645498e+00 -4.82032776e-01 1.18291128e+00 ... 2.98700285e+00 -2.34437752e+00 -1.97800529e+00] [-1.06245375e+00 4.45138597e+00 -1.99495196e+00 ... 6.10437930e-01 1.20238149e+00 -4.22447115e-01] ... [-1.48773122e+00 3.52179575e+00 -9.04680312e-01 ... -3.47460359e-02 4.18053687e-01 -2.20074439e+00] [ 3.94894719e+00 -3.43935966e-01 -3.00622010e+00 ... 5.82273267e-02 -3.33357477e+00 -3.06999016e+00] [ 1.01063758e-01 1.24131966e+00 4.33582067e+00 ... 2.23454475e+00 2.47287273e-01 -2.08899689e+00]] [[ 9.94978786e-01 -2.32304141e-01 -2.23038524e-01 ... -2.11681604e-01 -9.05174255e-01 -2.77845562e-01] [-9.25999880e-02 -2.20285013e-01 -2.45380521e-01 ... -9.19870079e-01 -5.77411428e-02 -6.71536148e-01] [ 1.95953101e-02 -1.36089885e+00 -5.37400782e-01 ... -7.49657094e-01 -5.93057752e-01 -2.83272207e-01] ... [ 2.17054248e-01 -7.66968012e-01 -9.83743370e-02 ... -2.51902699e-01 2.44584426e-01 -1.03076553e+00] [-1.73087150e-01 -1.18434513e+00 1.77669212e-01 ... -1.08119977e+00 3.77455771e-01 -1.06850052e+00] [-1.43489257e-01 -3.93450350e-01 -6.11256361e-01 ... 1.00456464e+00 -3.13267380e-01 -8.36453199e-01]] [[ 1.45323730e+00 1.35136795e+00 1.37715554e+00 ... 1.29225016e+00 1.40838110e+00 1.45278203e+00] [ 1.48976541e+00 1.33028638e+00 1.40532732e+00 ... 1.48924470e+00 1.42726290e+00 1.41155064e+00] [ 1.41041756e+00 1.42275429e+00 1.40154266e+00 ... 1.43728554e+00 1.39950418e+00 1.43790889e+00] ... [ 1.41271794e+00 1.29400384e+00 1.39082277e+00 ... 1.31404877e+00 1.28339458e+00 1.43574297e+00] [ 1.43210018e+00 1.40870631e+00 1.40988171e+00 ... 1.48471594e+00 1.52175117e+00 1.44902909e+00] [ 1.47695446e+00 1.35207582e+00 1.45042467e+00 ... 1.42092562e+00 1.37692583e+00 1.39142954e+00]] [[ 1.14434958e-02 6.93535730e-02 -1.50584340e-01 ... -9.85544771e-02 5.54661304e-02 -8.43448006e-03] [-7.84953311e-02 -2.28295457e-02 -4.66105267e-02 ... -4.11967821e-02 -2.32793286e-01 1.42622232e-01] [-1.25419395e-02 2.17064228e-02 -5.83615638e-02 ... -1.90533623e-01 -3.53525728e-02 -4.78444248e-02] ... [-4.27366681e-02 -1.89278394e-01 1.54070305e-02 ... -7.05765560e-02 -3.67127056e-03 -7.45298564e-02] [ 1.29830733e-01 -6.64562881e-02 6.93670511e-02 ... -6.49851114e-02 2.13565409e-01 1.27615944e-01] [ 9.29829106e-02 4.43029739e-02 -8.89905635e-03 ... -1.08446032e-01 4.31773886e-02 2.24603023e-02]]] [[[-1.26517725e+00 -2.28602934e+00 -1.70432246e+00 ... -2.73335052e+00 -1.63522434e+00 -1.25138056e+00] [-2.10923576e+00 -1.70337713e+00 -1.28756511e+00 ... -1.85296977e+00 3.46915722e-01 -1.50149989e+00] [-1.34422040e+00 -1.82527888e+00 -1.29324937e+00 ... -2.10729098e+00 -1.14907897e+00 -1.29346120e+00] ... [-1.10433877e-01 -2.17326522e+00 -3.16067624e+00 ... -1.66208565e+00 -1.39383388e+00 -2.47087145e+00] [ 6.20013177e-01 -1.27062607e+00 -1.66917384e+00 ... -1.67958558e+00 -6.61987305e-01 -9.89255726e-01] [-7.57206023e-01 -1.20350552e+00 -1.13124883e+00 ... -8.59534442e-01 -1.59898472e+00 -1.22120285e+00]] [[ 9.33682263e-01 8.48550320e-01 -1.82955295e-01 ... -1.06233321e-01 1.68850446e+00 1.22489405e+00] [-5.96022069e-01 -2.73435950e-01 2.54637063e-01 ... 1.95584297e-01 1.52342880e+00 5.81722498e-01] [ 4.07808155e-01 -1.01488125e+00 3.37859213e-01 ... 1.10137916e+00 -8.45683441e-02 3.66256237e-01] ... [-4.07354861e-01 1.23743582e+00 -1.87630206e-01 ... -6.44242108e-01 5.27854741e-01 -7.74552226e-01] [ 4.28929180e-02 -1.67264521e+00 1.00490940e+00 ... 3.42363894e-01 3.07733476e-01 2.02389240e-01] [ 1.55393600e+00 4.60526407e-01 -4.43320200e-02 ... 1.95006907e-01 -6.07221007e-01 -9.52060342e-01]] [[-2.33540297e+00 8.45847011e-01 1.18758368e+00 ... 3.10046226e-01 9.36188698e-01 -1.55886745e+00] [-1.09739077e+00 1.25414753e+00 -2.87715197e+00 ... 2.19032496e-01 7.20849156e-01 3.24251592e-01] [-9.13489640e-01 2.57182431e+00 1.87224483e+00 ... -1.37722218e+00 -1.85729122e+00 3.52326965e+00] ... [ 2.11918592e+00 2.23011899e+00 3.21155161e-01 ... -5.51531315e-01 -1.30144536e+00 2.82750273e+00] [ 3.78154898e+00 -1.36226094e+00 -4.38797832e-01 ... -2.20255926e-01 4.76184078e-02 3.52751255e+00] [ 2.65516138e+00 3.32244062e+00 2.97202277e+00 ... -9.74356890e-01 -8.13074708e-01 -7.42767572e-01]] [[ 9.31246996e-01 1.92842707e-01 -6.93601191e-01 ... 4.63986039e-01 -5.97637147e-02 -5.74061990e-01] [ 3.91078591e-01 -7.82873034e-01 -1.65313408e-01 ... -3.31450790e-01 4.70785320e-01 -6.58850133e-01] [-6.90390110e-01 -7.27588594e-01 -1.91503334e+00 ... -2.59501934e-01 -1.71402678e-01 8.13681901e-01] ... [ 2.09541813e-01 2.85703659e-01 -5.43604791e-01 ... -5.70942223e-01 2.77772434e-02 7.21553087e-01] [-8.84982347e-01 1.37462512e-01 -1.96763903e-01 ... -6.00588977e-01 8.63865092e-02 4.50931042e-01] [ 6.69905961e-01 -8.43722522e-01 1.10785671e-01 ... 1.68097228e-01 7.62604177e-02 -2.87224799e-01]] [[ 1.38702714e+00 1.37735415e+00 1.45692432e+00 ... 1.46982741e+00 1.41778171e+00 1.43306530e+00] [ 1.46142220e+00 1.40319312e+00 1.43911421e+00 ... 1.47873080e+00 1.43687952e+00 1.48460948e+00] [ 1.37418938e+00 1.40274763e+00 1.31475759e+00 ... 1.49305487e+00 1.40056753e+00 1.41009581e+00] ... [ 1.42498970e+00 1.42464519e+00 1.40281343e+00 ... 1.47703314e+00 1.47528517e+00 1.30516291e+00] [ 1.48186779e+00 1.46019316e+00 1.43083179e+00 ... 1.35476589e+00 1.41625595e+00 1.43497980e+00] [ 1.52832723e+00 1.43699050e+00 1.38287926e+00 ... 1.39710796e+00 1.33949482e+00 1.43410861e+00]] [[-9.39881429e-02 -5.15334718e-02 -2.78740879e-02 ... 4.43411767e-02 7.97814801e-02 8.09604004e-02] [ 8.93458277e-02 -1.05777569e-01 -2.45507993e-02 ... -1.71138465e-01 -4.15642895e-02 1.21742889e-01] [-7.63527676e-02 2.28444174e-01 -3.15122716e-02 ... 4.85185832e-02 -9.72112045e-02 -2.78055482e-02] ... [-1.25525445e-01 -4.95344698e-02 -7.20129833e-02 ... 6.66115135e-02 -2.13747725e-01 -4.26620468e-02] [-6.30799755e-02 -3.74632403e-02 -1.27120957e-01 ... 8.41553435e-02 1.00288101e-01 -1.47002544e-02] [ 3.74504402e-02 6.73608705e-02 1.47705108e-01 ... 3.90917361e-02 1.06797636e-01 1.50343832e-02]]] [[[-2.04889560e+00 -1.72488081e+00 -1.35708666e+00 ... -1.08404016e+00 -1.63073516e+00 -1.49009609e+00] [-1.46789503e+00 -7.81511664e-01 -1.92873645e+00 ... -1.22670364e+00 -5.76582909e-01 -2.50251913e+00] [-3.18563461e+00 -2.47801757e+00 -2.26553607e+00 ... -1.75728595e+00 -2.84498692e+00 -1.80429384e-01] ... [-1.36202323e+00 -1.62239671e+00 -7.65137911e-01 ... -8.72767419e-02 -8.60679626e-01 -1.80345857e+00] [-1.04498792e+00 -2.15950251e+00 -1.09109282e+00 ... -1.15886891e+00 -2.64988041e+00 -1.44960690e+00] [-1.66293812e+00 -2.51057124e+00 -2.97755331e-01 ... -1.53485060e+00 -2.17936373e+00 -7.66339898e-01]] [[ 1.80004263e+00 3.78197998e-01 2.94043601e-01 ... 6.04117692e-01 1.26775646e+00 -7.42876351e-01] [-9.80901718e-01 -2.07346767e-01 8.00379664e-02 ... 1.91157416e-01 -1.37965274e+00 4.92407143e-01] [ 3.54886711e-01 3.71707261e-01 -1.21869266e+00 ... -8.26795995e-01 2.55342692e-01 4.77401108e-01] ... [ 3.17178249e-01 6.18352115e-01 3.26198608e-01 ... -1.17360365e+00 1.75541866e+00 2.68261135e-01] [-3.64750028e-01 2.96408176e-01 2.18942061e-01 ... 8.20444643e-01 -2.56281078e-01 -3.21317106e-01] [ 4.09914792e-01 3.07115883e-01 -1.09648085e+00 ... 8.13409612e-02 2.91830868e-01 1.14231396e+00]] [[ 4.68578529e+00 2.22603846e+00 3.52361488e+00 ... 3.38356519e+00 1.27302086e+00 7.96892166e-01] [ 3.78853869e+00 8.12309682e-01 3.69787073e+00 ... 3.81004119e+00 -1.10742855e+00 -5.52922964e-01] [-1.04835618e+00 -6.02894783e-01 -4.32713413e+00 ... 2.45221019e+00 3.93272281e+00 -1.96749198e+00] ... [ 4.14063573e-01 3.97125721e+00 -3.10544252e+00 ... -1.30380559e+00 -2.01265335e-01 3.45766854e+00] [ 7.10629582e-01 -2.62521982e-01 -1.34552395e+00 ... -3.15551829e+00 4.64932585e+00 -1.24353595e-01] [ 3.36580753e+00 -4.08060646e+00 2.15381455e+00 ... 7.65967429e-01 2.30521774e+00 2.11020470e-01]] [[-4.54293460e-01 -2.24472776e-01 -1.73296437e-01 ... 3.07653129e-01 3.94566879e-02 -1.17671895e+00] [-3.55038702e-01 -2.00100243e-01 -4.93532926e-01 ... -2.80411661e-01 -2.51839012e-01 -2.37478003e-01] [-4.81260061e-01 5.21097958e-01 1.23591788e-01 ... 2.42651805e-01 6.32168949e-01 -1.27328682e+00] ... [-4.59340066e-01 3.89558107e-01 -2.38141656e-01 ... -1.75615120e+00 -2.70210028e-01 4.79673028e-01] [-3.82067084e-01 -2.27316767e-01 6.49434388e-01 ... 1.42391667e-01 6.81629255e-02 -6.04251742e-01] [ 1.61312416e-01 -7.19377279e-01 1.58964112e-01 ... -1.06410635e+00 -1.01119792e+00 -7.95984507e-01]] [[ 1.37091994e+00 1.40165412e+00 1.40575063e+00 ... 1.43258715e+00 1.38762093e+00 1.41979444e+00] [ 1.51643991e+00 1.41381311e+00 1.39834309e+00 ... 1.36441791e+00 1.32838869e+00 1.41168678e+00] [ 1.33634579e+00 1.50805199e+00 1.38110340e+00 ... 1.37918305e+00 1.39763808e+00 1.47819948e+00] ... [ 1.34519041e+00 1.39706469e+00 1.46979010e+00 ... 1.34269226e+00 1.46493781e+00 1.38301921e+00] [ 1.30460966e+00 1.29509068e+00 1.32751942e+00 ... 1.49474645e+00 1.47372723e+00 1.42830360e+00] [ 1.49559975e+00 1.42730498e+00 1.34383404e+00 ... 1.37755907e+00 1.32563317e+00 1.39512849e+00]] [[ 1.40590183e-02 5.91639914e-02 -2.24039719e-01 ... -8.56791157e-03 -2.19250768e-02 -2.11745992e-01] [ 3.05340905e-02 -1.38617922e-02 -1.79658204e-01 ... 6.20206073e-02 -4.92378697e-02 3.02162450e-02] [-7.78002739e-02 1.28450975e-01 -2.62804218e-02 ... 2.60738105e-01 9.57634151e-02 8.64079148e-02] ... [ 1.01353899e-01 7.50825256e-02 1.22462384e-01 ... -1.20227262e-01 -1.28034964e-01 8.93839672e-02] [-7.51899853e-02 3.64188515e-02 -4.90919575e-02 ... 1.07278831e-01 -5.82012907e-02 5.51160984e-02] [-2.20447145e-02 -2.13809665e-02 1.50543556e-01 ... 5.44399507e-02 7.96976537e-02 -6.24813810e-02]]] ... [[[-1.42880630e+00 -3.54788113e+00 -2.09902212e-01 ... -1.26248872e+00 -2.05870008e+00 -2.18537068e+00] [-1.84590852e+00 -1.37969100e+00 -6.44424140e-01 ... -1.95817161e+00 -1.25109029e+00 -2.18267035e+00] [-1.33997464e+00 -1.48732722e+00 -1.34998167e+00 ... -6.22456491e-01 -2.66118860e+00 -1.17069042e+00] ... [-1.47647190e+00 -4.34166288e+00 -1.35918701e+00 ... -4.05584067e-01 -1.03318012e+00 -1.82580268e+00] [-1.91231656e+00 -1.84696996e+00 -6.44592285e-01 ... -2.20350289e+00 -8.08968008e-01 -2.44802189e+00] [-1.90548277e+00 -9.93242562e-01 -1.80626345e+00 ... -4.41245764e-01 -2.79879975e+00 -1.27254450e+00]] [[ 2.40140963e+00 3.68176222e-01 -6.74704671e-01 ... -7.00460613e-01 4.19078916e-01 -1.67359218e-01] [ 2.76271105e-01 -6.36835217e-01 -3.66501153e-01 ... -1.30834058e-01 -1.82058886e-01 8.58162344e-02] [-7.22702026e-01 -6.45299256e-01 1.48715004e-01 ... -1.74043640e-01 4.39254194e-01 3.57363850e-01] ... [ 8.96007836e-01 2.17924261e+00 8.07667613e-01 ... -5.53951025e-01 2.08464518e-01 1.01081145e+00] [ 1.54053128e+00 -1.55237913e-01 5.69793954e-02 ... -2.47979924e-01 -2.78966039e-01 2.57970154e-01] [-2.92973131e-01 1.22749388e+00 9.85899806e-01 ... 4.94774163e-01 6.72588170e-01 -2.55906492e-01]] [[-2.21430206e+00 -8.49779487e-01 3.01964974e+00 ... -2.15746284e+00 1.66370022e+00 7.30592310e-02] [ 7.01886702e+00 1.42342508e+00 1.02912501e-01 ... -1.39354527e+00 2.39963078e+00 2.89327502e+00] [ 8.24205816e-01 3.60351610e+00 -3.43286705e+00 ... 1.95011473e+00 -3.67457032e+00 1.30417669e+00] ... [ 1.85753357e+00 -3.84989738e+00 2.66254163e+00 ... -2.25868806e-01 4.25059652e+00 4.86117601e-02] [-2.29212093e+00 -3.69009972e+00 -1.19614232e+00 ... 4.32120514e+00 -3.61638248e-01 -2.88896823e+00] [ 1.50297314e-01 2.41140842e+00 5.09178829e+00 ... -2.01250851e-01 2.31042814e+00 2.52799296e+00]] [[ 7.75214061e-02 3.71456444e-01 -1.09717917e+00 ... 1.57649871e-02 -6.01628840e-01 6.95545733e-01] [-7.93759972e-02 -3.49652678e-01 -4.97161329e-01 ... 4.36145544e-01 9.91320163e-02 3.02096397e-01] [-1.15846646e+00 -3.72607052e-01 3.64669681e-01 ... -5.09348571e-01 4.24789637e-03 -7.79191971e-01] ... [-1.57529557e+00 1.81968436e-01 1.99689552e-01 ... -1.13009524e+00 -6.29966080e-01 -1.09493780e+00] [ 3.24732065e-01 8.40528488e-01 -1.48492038e+00 ... -2.50876307e-01 -6.14033699e-01 -2.75022864e-01] [ 3.69611084e-01 -8.29081237e-01 1.91652041e-03 ... -1.05944967e+00 2.02404171e-01 -5.46668172e-01]] [[ 1.45549822e+00 1.47111595e+00 1.43599975e+00 ... 1.37944317e+00 1.39421749e+00 1.39777029e+00] [ 1.39607763e+00 1.36593866e+00 1.36914492e+00 ... 1.39865029e+00 1.39242995e+00 1.34724855e+00] [ 1.40844798e+00 1.36025429e+00 1.40926528e+00 ... 1.30336130e+00 1.43901563e+00 1.46605182e+00] ... [ 1.44554842e+00 1.49890709e+00 1.40430021e+00 ... 1.39035845e+00 1.35075247e+00 1.43887615e+00] [ 1.35682523e+00 1.37321103e+00 1.44158232e+00 ... 1.34449816e+00 1.46206319e+00 1.45392156e+00] [ 1.42702293e+00 1.42849481e+00 1.37147343e+00 ... 1.45326221e+00 1.43011844e+00 1.28318810e+00]] [[ 8.33086446e-02 -1.54247612e-01 -1.15543775e-01 ... -3.50599028e-02 -2.10116535e-01 1.32127151e-01] [-1.12230303e-02 8.28171447e-02 -1.18886553e-01 ... 1.27030129e-03 -1.45436466e-01 5.36450185e-02] [-1.01334088e-01 -1.22582819e-02 8.07877406e-02 ... -2.53671128e-02 5.27389944e-02 -1.56903863e-02] ... [ 1.00428704e-02 8.62672180e-02 1.58340596e-02 ... 9.74290892e-02 1.93399750e-02 3.68071608e-02] [ 9.20537766e-03 -7.93156400e-02 5.88281043e-02 ... -1.69715494e-01 1.37047946e-01 -2.95906793e-02] [-1.91053063e-01 5.91149144e-02 2.75045764e-02 ... 1.92576110e-01 9.82353687e-02 4.67336029e-02]]] [[[-7.50514030e-01 -1.46777594e+00 -1.78477633e+00 ... -2.17251730e+00 -1.49545288e+00 -1.64420843e+00] [-1.38680100e+00 -1.68897355e+00 -4.53855515e-01 ... -2.53056788e+00 -1.24013305e+00 2.60582536e-01] [-1.93455029e+00 -1.39236605e+00 -2.06762648e+00 ... -2.06658936e+00 -2.13883281e+00 -1.93382382e+00] ... [-2.59025908e+00 -3.33609915e+00 -8.40808630e-01 ... -2.24225259e+00 -8.75541389e-01 -2.87804365e+00] [-2.09854364e+00 -2.85585380e+00 -1.52516711e+00 ... -2.20217085e+00 -7.92123616e-01 -2.83867955e+00] [-1.88924468e+00 -1.55453730e+00 -1.53908515e+00 ... -2.80134249e+00 -3.95809561e-01 -1.37449050e+00]] [[ 1.30869567e+00 3.15419585e-01 -8.81986558e-01 ... 8.83328736e-01 1.06887829e+00 -2.20789779e-02] [-3.11439764e-02 -2.74899840e-01 3.83128040e-02 ... 4.93966788e-01 1.17942178e+00 6.51770294e-01] [ 5.54088116e-01 -5.22909760e-01 -6.50105894e-01 ... 5.17524958e-01 -3.35111678e-01 1.20642334e-01] ... [ 5.82551122e-01 -1.59561202e-01 -2.84195423e-01 ... -4.45551932e-01 1.56153053e-01 1.10063434e+00] [ 1.00271046e+00 5.62391639e-01 -1.16987944e+00 ... -7.69751668e-01 1.10727727e+00 -4.32928324e-01] [ 6.90553963e-01 5.89995146e-01 1.02457619e+00 ... 1.27497983e+00 1.53438592e+00 -5.36318198e-02]] [[-4.46018010e-01 -1.29010785e+00 1.66802716e+00 ... 3.59724593e+00 1.10815132e+00 -1.62530875e+00] [ 7.99132526e-01 2.35149527e+00 8.69000912e-01 ... 1.73241460e+00 5.12075901e+00 3.13988686e-01] [-2.32208657e+00 -2.14882135e+00 1.99216175e+00 ... -1.54923275e-01 -3.33998322e+00 -1.50635338e+00] ... [ 3.02430058e+00 1.49733746e+00 2.48130053e-01 ... 2.85091257e+00 1.90913761e+00 5.55037260e+00] [-2.62314534e+00 -2.03540230e+00 -1.03154409e+00 ... 1.82952836e-01 -6.13710344e-01 3.25153637e+00] [ 3.39334536e+00 -3.34259248e+00 -8.65287900e-01 ... -5.43652177e-01 -1.12078273e+00 -1.93342149e+00]] [[ 4.16799009e-01 -2.34408244e-01 6.67682663e-02 ... -1.28627408e+00 2.81176001e-01 -1.13530290e+00] [ 8.27018380e-01 4.30963397e-01 -4.75083232e-01 ... -2.75008291e-01 9.50646639e-01 4.25051033e-01] [-2.89653510e-01 -1.23796177e+00 5.78436613e-01 ... 4.57376726e-02 -6.42636001e-01 -9.01838467e-02] ... [-2.46000290e-01 -5.93724430e-01 -6.83041036e-01 ... -2.36525074e-01 3.46836776e-01 -5.10854041e-03] [ 1.22486460e+00 3.52003565e-03 -1.79216594e-01 ... -3.35366875e-01 -9.22179818e-01 4.81336325e-01] [ 3.20011288e-01 -7.10734308e-01 6.50206208e-01 ... -2.03399062e-01 -4.93026257e-01 -5.03498256e-01]] [[ 1.40612841e+00 1.44868040e+00 1.36860371e+00 ... 1.39549387e+00 1.40655077e+00 1.51156473e+00] [ 1.40427959e+00 1.41857982e+00 1.36883020e+00 ... 1.37064791e+00 1.42894304e+00 1.34163201e+00] [ 1.49755538e+00 1.40148616e+00 1.44357491e+00 ... 1.44294465e+00 1.52200615e+00 1.41711926e+00] ... [ 1.38223183e+00 1.44337630e+00 1.35428727e+00 ... 1.47237659e+00 1.53465295e+00 1.39384437e+00] [ 1.38500357e+00 1.31879532e+00 1.44892371e+00 ... 1.36691535e+00 1.40177560e+00 1.47601891e+00] [ 1.37566829e+00 1.41366339e+00 1.52287972e+00 ... 1.39071584e+00 1.42249322e+00 1.31278634e+00]] [[-2.66026909e-04 -7.58573040e-02 -5.48499562e-02 ... -7.60631412e-02 -3.84471193e-02 4.25405316e-02] [-1.06834352e-01 -6.46616071e-02 -6.59634126e-03 ... -8.06504674e-03 1.33175366e-02 2.09297687e-02] [-2.26371795e-01 -1.88608952e-02 -5.43013625e-02 ... 1.88241720e-01 -1.04577675e-01 -8.71580094e-03] ... [ 3.24220397e-02 -2.38289520e-01 1.68465108e-01 ... -1.39546514e-01 -1.60424992e-01 -1.49499252e-01] [-5.57957739e-02 -7.07862452e-02 2.29581758e-01 ... 6.01611510e-02 6.81877583e-02 -1.66767865e-01] [ 1.02258280e-01 -1.30315214e-01 -6.50511384e-02 ... -2.35981010e-02 1.65338293e-01 1.40278652e-01]]] [[[-1.47377920e+00 -1.33857214e+00 -4.00913388e-01 ... -1.11274672e+00 -2.35591936e+00 -2.08863115e+00] [-4.82708246e-01 -1.51454735e+00 -8.25255513e-01 ... -1.64940846e+00 -1.31644499e+00 -2.94676089e+00] [-1.22948480e+00 -6.01320505e-01 -7.65176058e-01 ... -5.23512363e-01 -2.36243868e+00 -1.26111150e+00] ... [-8.49291086e-02 -1.08440864e+00 -6.00213051e-01 ... -3.98701042e-01 -2.12779924e-01 -1.97112274e+00] [-1.55828369e+00 -2.26403928e+00 -1.35485137e+00 ... -1.64241624e+00 -2.95724463e+00 -1.99282837e+00] [ 4.02081534e-02 -1.61120629e+00 -1.58213484e+00 ... -6.41560197e-01 -2.90756655e+00 -6.45759761e-01]] [[ 4.99801666e-01 -6.13521814e-01 2.63225764e-01 ... 1.20652523e-02 -5.23521185e-01 -2.19822034e-01] [-9.21685249e-02 8.45391512e-01 3.85714084e-01 ... 6.96677804e-01 4.18288767e-01 -2.97003567e-01] [-3.54484290e-01 9.20486629e-01 6.49732351e-01 ... -5.62529683e-01 9.69094396e-01 -5.99693418e-01] ... [ 6.85655057e-01 4.05172467e-01 1.53115451e+00 ... 7.48390853e-02 -1.08773208e+00 2.24920914e-01] [ 3.21027488e-01 6.62238970e-02 7.44202793e-01 ... -9.16283727e-01 -3.37316692e-02 2.19399646e-01] [ 6.88439846e-01 6.97066709e-02 1.15302777e+00 ... 6.04711235e-01 -7.19137192e-01 -1.12700544e-01]] [[ 3.63301539e+00 2.29069877e+00 -9.16814566e-01 ... -2.28183866e+00 8.56974125e-01 2.11364007e+00] [ 2.77078485e+00 6.55165687e-02 -3.39221454e+00 ... -8.77555788e-01 2.29173350e+00 3.69646049e+00] [-1.81492937e+00 -1.12903309e+00 -5.27241516e+00 ... 2.98601896e-01 8.62796688e+00 3.77732903e-01] ... [ 5.76923192e-01 -2.26788688e+00 -1.50564837e+00 ... -9.33436513e-01 1.04205990e+00 2.99258351e+00] [ 9.74743247e-01 6.75314486e-01 -3.37923431e+00 ... 3.12286282e+00 -1.16593289e+00 5.53627729e+00] [ 1.95150745e+00 -1.88040650e+00 -2.91073704e+00 ... 1.64979732e+00 -3.83002472e+00 3.66805649e+00]] [[ 4.01461534e-02 2.97153115e-01 7.20469713e-01 ... 7.96219707e-01 -2.94942081e-01 -8.09081852e-01] [-1.22076154e+00 -6.28761232e-01 1.85846820e-01 ... -3.23020518e-01 -7.36248791e-01 4.14754935e-02] [-5.31184852e-01 2.24891469e-01 7.86370158e-01 ... -9.63413179e-01 1.83192357e-01 -2.61423625e-02] ... [-1.18258047e+00 -7.41675422e-02 2.15132758e-01 ... -3.67955178e-01 -1.00566244e+00 -3.29553485e-01] [ 2.78878748e-01 2.73304403e-01 3.23131114e-01 ... -9.81932878e-01 -8.12909305e-01 -3.63485634e-01] [-1.15924823e+00 -7.86788464e-01 -1.25964427e+00 ... -5.21534026e-01 -7.34649241e-01 -2.24995658e-01]] [[ 1.47483838e+00 1.42207694e+00 1.47242093e+00 ... 1.40440416e+00 1.35902011e+00 1.34382963e+00] [ 1.43570399e+00 1.47623181e+00 1.49415362e+00 ... 1.45310378e+00 1.38794875e+00 1.36877966e+00] [ 1.36553621e+00 1.38530612e+00 1.38811171e+00 ... 1.37338567e+00 1.43588793e+00 1.45761287e+00] ... [ 1.33312654e+00 1.34728682e+00 1.33496439e+00 ... 1.49071205e+00 1.36010170e+00 1.40270996e+00] [ 1.48233044e+00 1.44530416e+00 1.38323081e+00 ... 1.40527380e+00 1.37632215e+00 1.43655896e+00] [ 1.50710773e+00 1.41478825e+00 1.47226501e+00 ... 1.49727857e+00 1.42080116e+00 1.46331775e+00]] [[-3.26807722e-02 -1.26311556e-01 1.21656209e-01 ... 2.24005580e-02 -5.08731045e-03 -1.90693308e-02] [-2.10599512e-01 2.76525617e-02 -1.13934480e-01 ... -2.09369451e-01 -1.26607448e-01 2.05344949e-02] [ 1.73090681e-01 6.41313717e-02 1.53807893e-01 ... -1.07276790e-01 1.52479529e-01 -1.17669910e-01] ... [-1.59954920e-01 2.26741098e-02 -1.84067726e-01 ... -2.08698303e-01 -6.57766089e-02 1.16724446e-01] [-1.48065597e-01 -3.60979319e-01 -7.79103711e-02 ... -1.96514666e-01 4.43442911e-02 1.13867320e-01] [-9.40777585e-02 -8.13600868e-02 9.88014713e-02 ... -3.42643447e-02 -3.78369354e-02 1.01952273e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4608.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.6355 -1.1182 -0.3501 0.2705 -0.6378 1.3706 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[[-2.73566365e-01 2.08757997e+00 -5.47589660e-01 ... -9.43575382e-01 5.97590327e-01 2.18660042e-01] [-2.11658478e-01 9.86332059e-01 8.09134915e-02 ... 6.73647523e-01 -7.53305554e-01 -1.03687048e+00] [-6.19260311e-01 -1.10465121e+00 -1.86015964e+00 ... 3.18433940e-01 -1.20352238e-01 -1.20436490e+00] ... [-1.66422091e-02 3.33021909e-01 -9.13445532e-01 ... 3.01955491e-01 1.99048653e-01 2.80899912e-01] [ 1.26246095e+00 3.51632982e-01 -2.56697029e-01 ... 2.57479370e-01 -2.66000837e-01 8.53155136e-01] [ 9.82693434e-01 -9.31819439e-01 1.16645351e-01 ... 1.72199503e-01 6.19191453e-02 3.83777474e-03]] [[-3.34673643e-01 -9.73171115e-01 4.26911563e-01 ... -1.74318588e+00 2.93861365e+00 1.61678517e+00] [-1.42245972e+00 -4.46312606e-01 -4.67176616e-01 ... 1.38285112e+00 -1.71334684e+00 -7.74428666e-01] [ 1.33637249e+00 2.40891919e-01 -1.83305287e+00 ... -2.41046643e+00 1.11031878e+00 -1.59577176e-01] ... [ 1.35494196e+00 -5.50780237e-01 -1.66395056e+00 ... -1.36128974e+00 -5.09261072e-01 1.17975843e+00] [-1.37292099e+00 2.58924454e-01 1.68347621e+00 ... 1.43494964e+00 -2.31484818e+00 1.19872427e+00] [ 2.11857772e+00 4.52985644e-01 4.63198453e-01 ... -1.18307936e+00 -6.81369960e-01 1.42804158e+00]] [[-5.23218252e-02 8.74064937e-02 -2.86051899e-01 ... 3.45258147e-01 3.47553313e-01 -4.76578087e-01] [-1.65934488e-01 -4.03673619e-01 1.65576831e-01 ... 1.56089157e-01 -1.76542103e-01 8.61741722e-01] [ 5.13772190e-01 -4.54701036e-01 4.45834666e-01 ... 7.60701075e-02 -7.33999386e-02 -2.60273546e-01] ... [-1.67148262e-01 -3.62114087e-02 -2.26383090e-01 ... 9.80241448e-02 1.26210555e-01 4.95595112e-02] [-1.02667592e-01 8.14059600e-02 -5.08250833e-01 ... -2.06857994e-01 -6.41981602e-01 1.54599100e-01] [ 1.24844119e-01 3.41917068e-01 -1.76583678e-01 ... 3.91574460e-04 1.35555357e-01 4.94794846e-02]] [[ 4.77852635e-02 -1.98229894e-01 -9.47917439e-03 ... -1.04271829e-01 -5.74096560e-01 8.66710618e-02] [ 2.45350096e-02 -2.00601771e-01 -3.70524108e-01 ... 2.44911984e-01 -1.09470755e-01 -7.46784866e-01] [ 3.53444159e-01 -2.22615451e-01 9.04929191e-02 ... -6.23394191e-01 2.26307467e-01 1.55068219e-01] ... [ 1.58043116e-01 1.81530610e-01 3.31018716e-01 ... -5.55456698e-01 1.19169280e-01 1.00504443e-01] [ 2.83129007e-01 -1.57999083e-01 9.55888927e-02 ... -4.34047222e-01 -1.27163649e-01 7.08717853e-02] [-2.79243916e-01 2.02382490e-01 -5.63705504e-01 ... -5.11150718e-01 2.55271345e-01 1.18594252e-01]] [[-8.61900747e-01 -5.02893984e-01 6.11358047e-01 ... -4.98630434e-01 3.45057636e-01 -9.16015327e-01] [-1.02970123e-01 -5.38684964e-01 -3.65376949e-01 ... -8.96090388e-01 -3.92320901e-01 6.21612549e-01] [ 3.27466905e-01 -4.34921920e-01 5.40244877e-01 ... -5.61379850e-01 5.19282781e-02 -5.99263012e-01] ... [-1.46450669e-01 -5.79829752e-01 1.05155766e+00 ... 1.17873773e-02 -3.47156793e-01 4.08360958e-01] [ 5.97030103e-01 3.08399469e-01 1.58884391e-01 ... 1.11577070e+00 4.11615491e-01 1.25723004e+00] [ 1.56883821e-01 -1.42481160e+00 -1.23333605e-03 ... 3.89900088e-01 2.70205647e-01 2.56305248e-01]] [[-1.02649987e+00 -2.20740294e+00 3.27412516e-01 ... 1.77117419e+00 -6.20099247e-01 -1.17387198e-01] [-2.25618529e+00 -4.10957821e-02 -1.98344517e+00 ... 1.18604684e+00 1.54235637e+00 -1.30040777e+00] [ 2.99040794e-01 -2.02335572e+00 1.66363513e+00 ... 1.28251553e-01 -1.23927987e+00 -2.38301560e-01] ... [-4.65531081e-01 6.92702472e-01 -1.50565267e+00 ... -1.36376679e-01 9.97513592e-01 1.48573816e+00] [-9.51920807e-01 -9.93697464e-01 1.15625286e+00 ... -1.99447453e+00 -2.14956731e-01 1.80790043e+00] [ 1.38974893e+00 2.13018012e+00 6.13461388e-03 ... -1.11403775e+00 9.59750891e-01 2.57537097e-01]]] [[[ 5.63921690e-01 -2.21715048e-01 -2.88149893e-01 ... 4.56323177e-01 1.60962954e-01 -3.57447594e-01] [ 5.31756401e-01 5.85190713e-01 7.18518734e-01 ... -4.18794006e-01 5.04608333e-01 -3.18321228e-01] [ 6.65634871e-01 1.66529402e-01 1.13727534e+00 ... -1.01384795e+00 -4.77494538e-01 -5.97921729e-01] ... [-3.63202780e-01 6.50776267e-01 -9.35713723e-02 ... -8.65807712e-01 1.85962355e+00 1.13934577e-01] [-2.16952160e-01 8.16851199e-01 6.81427360e-01 ... -8.36023033e-01 1.79227218e-01 -1.99516833e-01] [-4.32170630e-01 3.85833651e-01 5.06003618e-01 ... -2.69016534e-01 -6.57700747e-02 3.54801685e-01]] [[ 5.01821280e-01 -1.95359015e+00 -8.57961535e-01 ... 2.71370739e-01 -7.23361135e-01 -4.00219634e-02] [-5.72241724e-01 6.95457339e-01 -5.79909325e-01 ... 1.16639638e+00 2.12696934e+00 -1.63110048e-01] [-3.84487242e-01 9.18880999e-02 -2.30097950e-01 ... 1.22884929e+00 1.23208606e+00 -1.83898842e+00] ... [ 2.04001069e+00 7.23137081e-01 1.83117747e+00 ... -7.59866834e-01 8.36235821e-01 5.88211954e-01] [ 1.10096920e+00 -6.00400448e-01 -1.20210004e+00 ... -3.67893904e-01 -3.00570726e-01 7.19852388e-01] [ 7.41170466e-01 3.24540854e+00 -4.57684696e-02 ... -1.59154296e+00 6.62796259e-01 -1.20557237e+00]] [[-4.89422493e-02 -1.07362770e-01 3.16697419e-01 ... -1.08607687e-01 2.64282584e-01 -4.71794277e-01] [-6.27394915e-01 2.10385937e-02 -1.26683593e-01 ... -2.04167515e-02 2.05442756e-01 1.03649282e+00] [ 5.97479865e-02 -1.66458696e-01 -1.17778584e-01 ... 2.59056956e-01 3.16658944e-01 -5.54193854e-01] ... [-2.61101723e-01 -3.30293775e-01 -2.06905276e-01 ... 1.39239296e-01 9.40188110e-01 -1.10701092e-01] [-2.33289208e-02 -2.53765643e-01 2.66757905e-01 ... -6.08492978e-02 -3.30736302e-02 -3.44049156e-01] [ 4.14763808e-01 3.73793095e-01 -3.85181338e-01 ... 3.33750546e-01 -2.43259326e-01 3.28209639e-01]] [[ 1.49219766e-01 2.34309047e-01 1.59533340e-02 ... -6.73082750e-03 2.85194159e-01 1.96966499e-01] [ 4.41361815e-01 -4.14578170e-02 1.57715663e-01 ... -8.22761431e-02 1.03325270e-01 -1.21333502e-01] [ 4.11653578e-01 8.88601169e-02 3.52011740e-01 ... 1.59605190e-01 2.56073475e-01 -1.05865180e-01] ... [ 7.03493953e-02 3.52575839e-01 1.96252912e-01 ... -7.33083963e-01 2.01939628e-01 -3.88666064e-01] [ 1.02793068e-01 -3.64237010e-01 -2.90848851e-01 ... 1.51615620e-01 1.05505638e-01 -4.22902852e-01] [ 8.44947621e-02 3.23426664e-01 -7.62381181e-02 ... 1.73488081e-01 1.69479907e-01 -3.99785548e-01]] [[ 5.46875417e-01 1.21523011e+00 9.25577521e-01 ... -2.23522797e-01 -9.34693038e-01 6.04804084e-02] [ 4.06792134e-01 1.03830898e+00 1.52781323e-01 ... -8.14782083e-01 1.04973009e-02 -1.04825866e+00] [-5.52902281e-01 -4.18179333e-01 -1.21410981e-01 ... 6.21827602e-01 3.32414091e-01 8.28510165e-01] ... [ 1.04103208e+00 4.18975860e-01 6.77109063e-02 ... 6.11452937e-01 4.97463524e-01 9.20633376e-01] [ 4.46046233e-01 6.25838399e-01 4.73952264e-01 ... 1.07210577e-01 4.65454429e-01 9.26208496e-01] [-1.10306656e+00 5.00424325e-01 1.02758920e+00 ... 6.66818023e-01 -4.84449923e-01 6.25329256e-01]] [[ 4.35378909e-01 -1.11231780e+00 8.99844050e-01 ... -1.87738180e+00 -1.41939056e+00 -1.95474207e+00] [ 9.78213727e-01 -2.21303672e-01 2.00124121e+00 ... -9.33296084e-01 1.53091109e+00 9.47186947e-01] [ 1.35859036e+00 -2.10541797e+00 2.05889523e-01 ... -1.24970400e+00 3.80540520e-01 -5.24584413e-01] ... [ 4.15367931e-01 -1.16208768e+00 2.23073816e+00 ... -1.13531077e+00 -4.65175569e-01 2.72471333e+00] [ 1.54967725e+00 -4.18486819e-02 -2.44474769e+00 ... 1.23408270e+00 -3.12761664e-01 2.96110344e+00] [ 7.70880759e-01 -1.38206518e+00 -1.55652952e+00 ... 3.74872983e-01 -1.06154241e-01 1.60242856e+00]]] [[[ 6.38662636e-01 -6.23044908e-01 -1.82977274e-01 ... 5.85335672e-01 -1.48577854e-01 4.35426563e-01] [-1.01459885e+00 7.72588611e-01 -5.53083301e-01 ... 2.04570964e-01 -3.85023981e-01 1.80475545e+00] [ 4.45526630e-01 1.78335518e-01 4.70990151e-01 ... -1.35797155e+00 -4.47783083e-01 -1.06943226e+00] ... [-2.36043915e-01 1.66942149e-01 1.97241247e-01 ... -3.40156436e-01 8.59786570e-01 -5.09940423e-02] [-5.36393881e-01 1.59511968e-01 -2.81729698e-01 ... -6.17395163e-01 -3.07698369e-01 -3.94021183e-01] [-3.92530300e-03 3.92219841e-01 -8.87511577e-03 ... -8.26086923e-02 -3.70207191e-01 -6.39986217e-01]] [[-4.27740276e-01 -1.91039944e+00 1.28884423e+00 ... 2.28821356e-02 9.75991070e-01 1.06877148e+00] [-2.71603316e-01 -9.05744195e-01 -9.46118474e-01 ... 1.04831624e+00 -9.52506900e-01 1.04943526e+00] [-9.28307593e-01 -1.01916468e+00 1.00277638e+00 ... 3.10281825e+00 1.76739469e-01 6.38596296e-01] ... [ 7.42040873e-01 -6.23115182e-01 8.97769272e-01 ... 9.73532736e-01 1.05133951e+00 -1.83024958e-01] [-6.27025217e-02 -9.98975754e-01 -1.19274962e+00 ... 3.92550588e-01 1.45523119e+00 -9.31285918e-01] [-1.23609960e+00 3.53456467e-01 -1.04393768e+00 ... 5.16539931e-01 -1.49539673e+00 -7.78814971e-01]] [[-3.86570357e-02 9.04683694e-02 -3.92051041e-01 ... 5.17508626e-01 2.78815418e-01 7.88725242e-02] [-3.86505090e-02 1.40969709e-01 -2.47181192e-01 ... -2.95745254e-01 -4.73688155e-01 -4.30094451e-01] [-4.94501516e-02 3.53523254e-01 4.69382852e-01 ... -4.65288341e-01 1.20993152e-01 -2.12010235e-01] ... [-6.51360899e-02 -5.01299620e-01 9.85428810e-01 ... -1.37610495e-01 4.75994289e-01 -3.17721516e-01] [-1.99147850e-01 -3.61242294e-02 6.98527694e-03 ... -1.28933594e-01 1.27584249e-01 -6.97488964e-01] [-4.63900417e-01 3.53524357e-01 4.46991473e-02 ... 4.90652204e-01 -3.90416645e-02 -4.33079712e-02]] [[-2.30666939e-02 8.15941274e-01 5.48081517e-01 ... 5.07712713e-04 8.40029642e-02 -4.98051047e-01] [ 1.83661282e-01 -5.55466041e-02 2.05823377e-01 ... -9.75047350e-02 -1.45296022e-01 -2.25360304e-01] [ 4.93694305e-01 6.79529682e-02 8.82147346e-03 ... -1.41491309e-01 3.51721227e-01 -4.08166349e-01] ... [-2.72084773e-01 -3.52838516e-01 -8.52193311e-02 ... 1.12098247e-01 1.05578423e-01 -4.82628375e-01] [ 6.39174879e-02 1.56475842e-01 -2.81919360e-01 ... 1.93574905e-01 -3.61020356e-01 1.26853585e-01] [-1.65846273e-01 2.07266398e-03 -3.01044762e-01 ... -1.95221454e-01 -2.43519768e-01 9.25294638e-01]] [[ 2.85499185e-01 -2.68356293e-01 9.08652604e-01 ... -1.57655671e-01 7.93536663e-01 2.43629798e-01] [ 1.70708507e-01 2.30330139e-01 -1.63042679e-01 ... -6.70434237e-01 -2.33201936e-01 -4.04427558e-01] [-2.38815874e-01 -7.55534351e-01 1.25255629e-01 ... -3.03200155e-01 2.97623575e-01 -1.20243877e-01] ... [ 8.72729838e-01 -9.46771633e-03 -8.49619150e-01 ... 5.94715029e-03 2.57626921e-01 -3.70235853e-02] [ 5.56138456e-01 4.55728412e-01 1.93116963e-01 ... 2.57158935e-01 -1.73823059e-01 1.65245223e+00] [-8.77080858e-01 -6.44801557e-01 1.03257693e-01 ... 2.06407398e-01 4.44647402e-01 -3.30433488e-01]] [[ 9.34165195e-02 9.32241559e-01 -4.34725583e-01 ... 5.94196618e-01 -3.69110167e-01 1.34165049e+00] [-1.53696346e+00 -1.62271595e+00 -1.27946031e+00 ... 7.59759605e-01 -9.71535325e-01 1.60008240e+00] [ 6.85074389e-01 9.49220598e-01 -4.79703069e-01 ... 1.33471155e+00 -1.97946393e+00 5.44674039e-01] ... [-1.96760845e+00 -6.61240935e-01 -1.90397370e+00 ... -1.10369706e+00 9.79258120e-01 -6.69648349e-01] [ 4.34534818e-01 -9.28923607e-01 6.04970098e-01 ... 3.81077915e-01 2.13475561e+00 1.31520247e+00] [-9.00297165e-01 6.93641484e-01 -1.55898869e-01 ... 1.68499708e+00 1.60930380e-02 -5.64548731e-01]]] ... [[[ 9.26630870e-02 -2.58803964e-01 -3.81640851e-01 ... 3.71313721e-01 2.42976606e-01 1.08309698e+00] [ 9.97201443e-01 -1.16858684e-01 -3.73547733e-01 ... 5.40647447e-01 -1.04331481e+00 -1.19596314e+00] [ 4.07208681e-01 -5.82835317e-01 5.27023256e-01 ... -1.22485101e+00 -2.91901797e-01 -2.16310874e-01] ... [-1.11143529e+00 -9.17073548e-01 -5.48823595e-01 ... 3.66943747e-01 7.32095599e-01 7.59916306e-01] [ 1.38816029e-01 3.08080584e-01 -6.71336174e-01 ... 5.25467955e-02 -1.06413007e+00 -3.00012559e-01] [-1.08127333e-01 4.17421967e-01 -1.35758489e-01 ... 1.01850951e+00 -2.42964730e-01 1.74283072e-01]] [[-1.07385683e+00 7.92355716e-01 -1.77030122e+00 ... 1.24756455e-01 8.15058827e-01 1.96025038e+00] [ 2.40685865e-01 9.67227995e-01 -1.05201578e+00 ... -1.38785350e+00 2.24382415e-01 1.26169574e+00] [-7.68560171e-01 -4.00396854e-01 -8.59436333e-01 ... 1.00458670e+00 3.98616374e-01 -1.04428256e+00] ... [-1.22839105e+00 1.27106810e+00 1.45885396e+00 ... -9.67837512e-01 3.43779296e-01 7.19224870e-01] [-6.59949780e-01 2.68784881e-01 -1.23526716e+00 ... -6.83949664e-02 -2.39064604e-01 9.07074809e-01] [ 1.98824510e-01 1.33317959e+00 -2.24286580e+00 ... 5.82076013e-01 1.32670760e+00 2.32427772e-02]] [[ 5.85248992e-02 4.42714065e-01 -1.59236312e-01 ... -1.02194750e+00 -3.38576913e-01 -2.08563954e-01] [ 5.13848029e-02 3.16056401e-01 4.32008088e-01 ... 2.47981533e-01 4.26761717e-01 -3.15023988e-01] [ 3.96399051e-01 3.82921636e-01 4.34715360e-01 ... -1.22142389e-01 -1.43368123e-02 4.82714504e-01] ... [ 7.95983523e-02 -4.07346860e-02 1.36189505e-01 ... 1.63186356e-01 5.95686808e-02 -8.56136009e-02] [ 3.49799663e-01 2.34559000e-01 -2.43996426e-01 ... 3.99427079e-02 1.34432286e-01 -4.76314247e-01] [ 1.45560622e-01 6.38020158e-01 -1.34229317e-01 ... -4.52283829e-01 -3.83489996e-01 6.71780705e-02]] [[ 3.22608471e-01 -5.34500599e-01 1.09695897e-01 ... 4.90766764e-02 1.63553637e-02 -1.03198685e-01] [ 1.66819572e-01 -2.11865455e-01 2.99425989e-01 ... -3.39846075e-01 -7.02236533e-01 7.79662356e-02] [ 2.41939113e-01 3.57957989e-01 -4.29331422e-01 ... -1.00887321e-01 5.40972412e-01 -2.04389729e-02] ... [ 1.21424615e-01 1.58987045e-01 1.05247550e-01 ... 1.83616593e-01 9.94068980e-02 -1.28329381e-01] [-3.86296600e-01 4.19242293e-01 -2.70971775e-01 ... 1.24134138e-01 1.45162061e-01 -2.88138121e-01] [-7.12971315e-02 -8.92262459e-02 -3.91855657e-01 ... -1.56288251e-01 -1.33020252e-01 4.46711123e-01]] [[-8.62500548e-01 -1.10721692e-01 1.26005101e+00 ... -7.63611674e-01 1.32040307e-01 1.90845624e-01] [-6.96125269e-01 2.34615892e-01 -1.23828098e-01 ... 5.86762667e-01 -3.78809869e-01 3.59144688e-01] [-5.36240995e-01 3.20797771e-01 -5.01835108e-01 ... 4.14913118e-01 7.33147264e-01 2.09939629e-01] ... [ 2.45305374e-01 -9.40434169e-03 -6.18948996e-01 ... -1.20150554e+00 -1.96302354e-01 -3.88129979e-01] [ 2.10847974e-01 -1.09935308e+00 7.39334747e-02 ... -6.33441985e-01 -8.48888099e-01 3.95704687e-01] [-5.29663146e-01 4.71959040e-02 -1.13694882e+00 ... -1.10030770e+00 1.65803716e-01 -2.20286340e-01]] [[ 1.51259840e+00 1.01730347e+00 2.22578943e-01 ... -8.14471781e-01 -6.21166050e-01 4.11165744e-01] [-7.39018500e-01 -5.88328958e-01 1.36809754e+00 ... -1.24644339e+00 1.16550434e+00 1.02326155e+00] [ 3.96353984e+00 -1.93609536e+00 -1.13774908e+00 ... 1.71436381e+00 1.48461759e-01 1.18345654e+00] ... [-1.08536258e-02 -1.29394829e+00 -2.36747193e+00 ... 1.61632240e+00 -4.03702319e-01 -3.22985023e-01] [-6.80437982e-02 6.88638270e-01 -5.93243122e-01 ... -1.97206843e+00 8.12564850e-01 -9.70661342e-01] [ 7.48406112e-01 -7.69299626e-01 6.54631913e-01 ... 1.32435203e+00 -1.84465453e-01 4.41925406e-01]]] [[[-8.44096720e-01 6.08217418e-01 1.48822641e+00 ... -8.21867466e-01 -6.83340788e-01 -2.53385812e-01] [ 9.99890804e-01 -4.69103754e-01 -1.00623108e-01 ... 1.07570477e-02 1.16115622e-01 -5.55382133e-01] [ 7.68775940e-01 -2.33374834e-02 1.36242807e-01 ... 5.58973253e-01 -4.08852488e-01 -4.38090891e-01] ... [-1.99786454e-01 6.43068790e-01 -9.76280451e-01 ... 7.13499069e-01 7.29896963e-01 -5.05812764e-02] [ 8.33879888e-01 -6.08703315e-01 7.13993907e-02 ... 9.81910944e-01 4.48184371e-01 3.68654370e-01] [-4.89067912e-01 4.20901835e-01 3.69638592e-01 ... 1.02821195e+00 -5.01025021e-01 -1.35907620e-01]] [[ 1.40783250e-01 -4.82624114e-01 -2.58886743e+00 ... -4.25335795e-01 -3.51922423e-01 -1.93924987e+00] [-1.08400309e+00 1.97212070e-01 -3.94102484e-01 ... -1.48535252e-01 5.32714128e-01 -1.97010672e+00] [ 1.23996770e+00 -1.05516434e+00 4.38055426e-01 ... -2.33553842e-01 -5.57612002e-01 -2.34225774e+00] ... [ 6.56468928e-01 1.10585976e+00 -4.23080862e-01 ... 1.54144371e+00 6.40402257e-01 6.26773238e-01] [ 7.08922029e-01 -4.09484170e-02 1.23762059e+00 ... 4.26642090e-01 1.12376794e-01 2.49683785e+00] [-8.03805888e-02 3.86595249e-01 -4.35704708e-01 ... -1.26139903e+00 -4.33248341e-01 -9.09356236e-01]] [[ 1.81840450e-01 -1.70813739e-01 1.54009476e-01 ... -3.71070474e-01 1.82729989e-01 5.85481644e-01] [-4.30081606e-01 7.48690665e-01 1.05382510e-01 ... 8.77889320e-02 -6.42564520e-02 1.66457847e-01] [ 8.41405332e-01 -1.75947219e-01 -4.09847796e-01 ... 1.64705336e-01 7.67986000e-01 -4.48713809e-01] ... [ 5.42682469e-01 -4.24909331e-02 5.00696540e-01 ... -1.40829802e-01 7.01053515e-02 -1.46756485e-01] [ 5.01064122e-01 5.08572042e-01 1.60834119e-01 ... -2.31591433e-01 4.24345843e-02 4.60745186e-01] [-1.04035288e-01 4.65403199e-01 4.15564626e-01 ... -3.39972019e-01 3.42390947e-02 3.11020046e-01]] [[-1.01949163e-01 -2.58011818e-01 -3.23489696e-01 ... -3.63508523e-01 -2.62804389e-01 4.42029208e-01] [-2.93696634e-02 2.09961355e-01 3.25729519e-01 ... 1.75760865e-01 3.74288291e-01 4.35363084e-01] [ 8.87598246e-02 -2.29205582e-02 2.14845866e-01 ... -1.57730371e-01 -3.09812605e-01 -3.89935255e-01] ... [-2.36365169e-01 -5.23684211e-02 -4.99616042e-02 ... -7.15451613e-02 -7.41170794e-02 -1.97430164e-01] [ 4.66514081e-02 2.88792849e-01 -3.17826599e-01 ... -4.36174929e-01 -2.83219576e-01 3.04786116e-01] [ 1.66061133e-01 -1.03892526e-02 5.65876305e-01 ... 6.02539599e-01 -8.01596977e-03 2.88561642e-01]] [[ 4.56682026e-01 -1.55511588e-01 8.50462496e-01 ... -4.79261205e-02 -1.14329684e+00 7.34363258e-01] [-1.22238085e-01 9.74313676e-01 -5.51417708e-01 ... -1.15339041e+00 -6.35114789e-01 -1.39293289e+00] [ 5.26576042e-01 -3.79106820e-01 6.70550525e-01 ... -5.91862679e-01 1.19163983e-01 1.81294233e-01] ... [ 5.34622192e-01 5.36598265e-01 4.43896919e-01 ... 7.44081378e-01 1.58899993e-01 -2.16723263e-01] [-1.24828172e+00 -4.66184616e-01 -1.77579075e-01 ... 2.92615950e-01 2.39857182e-01 -1.70290872e-01] [ 8.44562113e-01 4.15625274e-01 -1.94044471e-01 ... -1.43259957e-01 4.77626413e-01 -9.29164514e-02]] [[ 2.85166454e+00 -1.88505933e-01 1.42873001e+00 ... -2.72762120e-01 -2.07234934e-01 -6.05260551e-01] [-1.03828736e-01 -2.19152465e-01 2.02744222e+00 ... -4.58303571e-01 -2.15836465e-01 9.78047192e-01] [ 3.02731800e+00 -7.00643957e-01 -7.55650699e-01 ... 2.01107502e-01 3.29120100e-01 2.76127434e+00] ... [ 1.21366811e+00 2.83288550e+00 7.80127525e-01 ... 5.26646554e-01 -2.50046730e-01 1.05264843e+00] [ 4.98673886e-01 8.55935276e-01 9.15231287e-01 ... 3.20571005e-01 -2.99107820e-01 1.34429753e+00] [-1.67914391e+00 -1.89613736e+00 2.41421652e+00 ... -2.44794667e-01 1.48340479e-01 -3.02479124e+00]]] [[[-1.17212558e+00 3.77139121e-01 -8.94643843e-01 ... -6.05112195e-01 -1.62252545e-01 -4.34078813e-01] [ 1.11758924e+00 4.24061656e-01 5.67713797e-01 ... -7.06611633e-01 4.25248265e-01 1.11789310e+00] [ 9.30582762e-01 3.65151346e-01 -3.59367162e-01 ... -7.31354773e-01 -3.24518323e-01 -1.48302084e-02] ... [-6.98907554e-01 -2.15841159e-01 -4.58960116e-01 ... -1.18550472e-01 3.39808047e-01 2.44389057e-01] [-7.40724131e-02 -6.69122577e-01 1.45463574e+00 ... 1.05906978e-01 2.42002591e-01 -1.08613908e+00] [-2.10621670e-01 -5.02868474e-01 1.77653924e-01 ... 1.20387757e+00 4.31094319e-01 2.46977419e-01]] [[-1.92528117e+00 -9.48611677e-01 6.22288764e-01 ... -6.96327686e-01 1.42218804e+00 1.22075403e+00] [ 6.32852614e-01 1.23557663e+00 8.98529768e-01 ... 1.89877644e-01 -5.11611104e-01 4.14124399e-01] [ 1.70865667e+00 1.74638927e+00 4.14033681e-01 ... -3.09759587e-01 1.51995134e+00 8.23544919e-01] ... [ 1.90399349e+00 1.53416562e+00 -2.01829123e+00 ... 4.91909027e-01 -2.44203258e+00 9.04500782e-01] [-1.29670870e+00 7.02903047e-02 1.13891983e+00 ... 1.92798793e+00 1.05979955e+00 -4.47211832e-01] [-8.52569997e-01 3.46873790e-01 -1.13004431e-01 ... -1.12146771e+00 -2.53986359e-01 8.08922291e-01]] [[-3.80215436e-01 4.14379746e-01 -2.86442697e-01 ... -3.70231301e-01 6.48817793e-02 1.50379062e-01] [-1.04094204e-02 -3.03100143e-03 -2.50479698e-01 ... 6.30651542e-04 -1.67299867e-01 -8.96715879e-01] [-4.05242592e-01 1.17628261e-01 2.65063405e-01 ... -1.01561897e-01 -1.91224724e-01 1.73966944e-01] ... [-4.05485302e-01 5.23087859e-01 -4.03815731e-02 ... -3.42442870e-01 -1.80850819e-01 1.52084157e-01] [-4.65664715e-01 -1.88865304e-01 4.22711402e-01 ... 1.92405641e-01 -2.21874967e-01 1.13737099e-01] [-1.61365077e-01 2.41134763e-01 5.38737066e-02 ... 1.55816704e-01 -1.23569965e-02 -1.45788297e-01]] [[-6.12884434e-03 8.83646756e-02 4.16966110e-01 ... 1.29954532e-01 7.89716616e-02 4.03209090e-01] [-1.33855075e-01 -2.17624545e-01 -1.82491675e-01 ... -3.06300581e-01 4.39644545e-01 1.82729438e-01] [-4.58506942e-01 6.88491389e-02 2.28367940e-01 ... 3.13270241e-01 -6.00160286e-02 2.29223207e-01] ... [-2.33541399e-01 1.22934999e-02 -9.95224789e-02 ... -1.55305758e-01 2.86760002e-01 6.70679092e-01] [-1.81457058e-01 2.13323340e-01 2.40639132e-02 ... -4.16975981e-03 -1.66156679e-01 -5.76111317e-01] [-2.92418808e-01 -1.93180263e-01 -6.80057183e-02 ... -1.73767865e-01 1.55066386e-01 3.21257949e-01]] [[-2.55800784e-01 6.62265718e-01 -6.10612221e-02 ... 2.47647762e-01 -5.48580289e-02 3.07587117e-01] [-4.72015530e-01 9.11419570e-01 2.57700473e-01 ... 1.27883601e+00 -5.62521279e-01 6.18251979e-01] [ 4.33332890e-01 8.24608862e-01 -2.00079277e-01 ... 6.08876705e-01 6.37882531e-01 -4.61009771e-01] ... [-1.34116709e+00 5.49060464e-01 -1.65217027e-01 ... -4.73868102e-01 9.81632113e-01 -3.93434197e-01] [ 5.14165342e-01 1.79190099e-01 1.26367435e-01 ... 2.14030877e-01 1.80342600e-01 -6.10525072e-01] [-1.70484930e-01 1.70272663e-01 8.50767314e-01 ... -2.16678008e-01 -2.62141258e-01 -2.90395677e-01]] [[ 1.13109612e+00 -9.96218443e-01 -7.30423555e-02 ... 8.22715640e-01 -1.88636172e+00 -9.08459425e-01] [ 2.18917775e+00 2.72777033e+00 -8.08476925e-01 ... 7.03489184e-01 9.69654560e-01 2.12136602e+00] [ 1.30678856e+00 -4.89633054e-01 2.30238748e+00 ... -1.00285873e-01 1.38185358e+00 1.45074701e+00] ... [-6.49178147e-01 2.66969502e-01 -2.21017432e+00 ... -1.36743307e+00 7.91130543e-01 -5.09209752e-01] [ 2.43358269e-01 -1.47207761e+00 1.42272389e+00 ... -1.59402609e-01 9.93883848e-01 -6.67653158e-02] [-8.35062265e-01 -2.45513305e-01 -5.07919431e-01 ... 1.37340045e+00 5.41583121e-01 -6.85695112e-02]]]]; ov_res: [[[[-2.73566365e-01 2.08757997e+00 -5.47589660e-01 ... -9.43575382e-01 5.97590327e-01 2.18660012e-01] [-2.11658463e-01 9.86332059e-01 8.09134841e-02 ... 6.73647523e-01 -7.53305554e-01 -1.03687048e+00] [-6.19260311e-01 -1.10465121e+00 -1.86015964e+00 ... 3.18433940e-01 -1.20352246e-01 -1.20436490e+00] ... [-1.66422147e-02 3.33021879e-01 -9.13445532e-01 ... 3.01955462e-01 1.99048653e-01 2.80899942e-01] [ 1.26246095e+00 3.51632953e-01 -2.56697059e-01 ... 2.57479370e-01 -2.66000837e-01 8.53155196e-01] [ 9.82693434e-01 -9.31819439e-01 1.16645351e-01 ... 1.72199473e-01 6.19191378e-02 3.83776869e-03]] [[-3.34673613e-01 -9.73171115e-01 4.26911563e-01 ... -1.74318576e+00 2.93861365e+00 1.61678517e+00] [-1.42245972e+00 -4.46312577e-01 -4.67176557e-01 ... 1.38285112e+00 -1.71334684e+00 -7.74428666e-01] [ 1.33637238e+00 2.40891919e-01 -1.83305287e+00 ... -2.41046667e+00 1.11031878e+00 -1.59577161e-01] ... [ 1.35494196e+00 -5.50780237e-01 -1.66395056e+00 ... -1.36128974e+00 -5.09261072e-01 1.17975843e+00] [-1.37292099e+00 2.58924454e-01 1.68347621e+00 ... 1.43494952e+00 -2.31484818e+00 1.19872427e+00] [ 2.11857772e+00 4.52985644e-01 4.63198423e-01 ... -1.18307936e+00 -6.81369960e-01 1.42804158e+00]] [[-5.23218177e-02 8.74064937e-02 -2.86051899e-01 ... 3.45258176e-01 3.47553313e-01 -4.76578116e-01] [-1.65934503e-01 -4.03673619e-01 1.65576845e-01 ... 1.56089187e-01 -1.76542118e-01 8.61741722e-01] [ 5.13772190e-01 -4.54701036e-01 4.45834696e-01 ... 7.60701150e-02 -7.33999386e-02 -2.60273546e-01] ... [-1.67148262e-01 -3.62114087e-02 -2.26383105e-01 ... 9.80241522e-02 1.26210570e-01 4.95595150e-02] [-1.02667592e-01 8.14059600e-02 -5.08250833e-01 ... -2.06857994e-01 -6.41981661e-01 1.54599100e-01] [ 1.24844119e-01 3.41917068e-01 -1.76583678e-01 ... 3.91577691e-04 1.35555357e-01 4.94794883e-02]] [[ 4.77852635e-02 -1.98229894e-01 -9.47917812e-03 ... -1.04271822e-01 -5.74096560e-01 8.66710544e-02] [ 2.45350040e-02 -2.00601771e-01 -3.70524108e-01 ... 2.44911954e-01 -1.09470747e-01 -7.46784866e-01] [ 3.53444129e-01 -2.22615436e-01 9.04929116e-02 ... -6.23394191e-01 2.26307452e-01 1.55068204e-01] ... [ 1.58043101e-01 1.81530580e-01 3.31018686e-01 ... -5.55456638e-01 1.19169280e-01 1.00504436e-01] [ 2.83128947e-01 -1.57999083e-01 9.55888927e-02 ... -4.34047222e-01 -1.27163649e-01 7.08717778e-02] [-2.79243916e-01 2.02382475e-01 -5.63705564e-01 ... -5.11150718e-01 2.55271345e-01 1.18594244e-01]] [[-8.61900747e-01 -5.02893984e-01 6.11358047e-01 ... -4.98630404e-01 3.45057666e-01 -9.16015327e-01] [-1.02970123e-01 -5.38684905e-01 -3.65376890e-01 ... -8.96090329e-01 -3.92320901e-01 6.21612549e-01] [ 3.27466905e-01 -4.34921861e-01 5.40244877e-01 ... -5.61379790e-01 5.19282855e-02 -5.99262953e-01] ... [-1.46450654e-01 -5.79829693e-01 1.05155766e+00 ... 1.17873838e-02 -3.47156763e-01 4.08360988e-01] [ 5.97030103e-01 3.08399469e-01 1.58884391e-01 ... 1.11577070e+00 4.11615491e-01 1.25723004e+00] [ 1.56883821e-01 -1.42481148e+00 -1.23332837e-03 ... 3.89900118e-01 2.70205647e-01 2.56305277e-01]] [[-1.02649987e+00 -2.20740294e+00 3.27412516e-01 ... 1.77117419e+00 -6.20099247e-01 -1.17387220e-01] [-2.25618529e+00 -4.10958007e-02 -1.98344517e+00 ... 1.18604684e+00 1.54235625e+00 -1.30040777e+00] [ 2.99040765e-01 -2.02335572e+00 1.66363502e+00 ... 1.28251523e-01 -1.23927975e+00 -2.38301560e-01] ... [-4.65531081e-01 6.92702353e-01 -1.50565255e+00 ... -1.36376694e-01 9.97513533e-01 1.48573816e+00] [-9.51920807e-01 -9.93697464e-01 1.15625274e+00 ... -1.99447453e+00 -2.14956731e-01 1.80790043e+00] [ 1.38974881e+00 2.13018012e+00 6.13459526e-03 ... -1.11403775e+00 9.59750831e-01 2.57537067e-01]]] [[[ 5.63921750e-01 -2.21715048e-01 -2.88149893e-01 ... 4.56323177e-01 1.60962969e-01 -3.57447594e-01] [ 5.31756401e-01 5.85190713e-01 7.18518674e-01 ... -4.18794006e-01 5.04608393e-01 -3.18321198e-01] [ 6.65634871e-01 1.66529417e-01 1.13727534e+00 ... -1.01384795e+00 -4.77494538e-01 -5.97921729e-01] ... [-3.63202780e-01 6.50776327e-01 -9.35713574e-02 ... -8.65807712e-01 1.85962355e+00 1.13934591e-01] [-2.16952160e-01 8.16851258e-01 6.81427300e-01 ... -8.36023033e-01 1.79227218e-01 -1.99516833e-01] [-4.32170600e-01 3.85833651e-01 5.06003618e-01 ... -2.69016534e-01 -6.57700673e-02 3.54801685e-01]] [[ 5.01821280e-01 -1.95359027e+00 -8.57961535e-01 ... 2.71370739e-01 -7.23361135e-01 -4.00219783e-02] [-5.72241724e-01 6.95457339e-01 -5.79909325e-01 ... 1.16639638e+00 2.12696934e+00 -1.63110062e-01] [-3.84487271e-01 9.18880776e-02 -2.30097964e-01 ... 1.22884929e+00 1.23208606e+00 -1.83898842e+00] ... [ 2.04001069e+00 7.23137081e-01 1.83117747e+00 ... -7.59866893e-01 8.36235821e-01 5.88211954e-01] [ 1.10096920e+00 -6.00400448e-01 -1.20210004e+00 ... -3.67893934e-01 -3.00570756e-01 7.19852388e-01] [ 7.41170466e-01 3.24540854e+00 -4.57684845e-02 ... -1.59154296e+00 6.62796199e-01 -1.20557237e+00]] [[-4.89422604e-02 -1.07362777e-01 3.16697419e-01 ... -1.08607687e-01 2.64282584e-01 -4.71794277e-01] [-6.27394915e-01 2.10385881e-02 -1.26683608e-01 ... -2.04167571e-02 2.05442756e-01 1.03649282e+00] [ 5.97479865e-02 -1.66458711e-01 -1.17778584e-01 ... 2.59056956e-01 3.16658974e-01 -5.54193854e-01] ... [-2.61101723e-01 -3.30293804e-01 -2.06905276e-01 ... 1.39239296e-01 9.40188110e-01 -1.10701099e-01] [-2.33289264e-02 -2.53765643e-01 2.66757905e-01 ... -6.08493015e-02 -3.30736339e-02 -3.44049186e-01] [ 4.14763838e-01 3.73793066e-01 -3.85181367e-01 ... 3.33750546e-01 -2.43259341e-01 3.28209639e-01]] [[ 1.49219766e-01 2.34309047e-01 1.59533285e-02 ... -6.73083309e-03 2.85194159e-01 1.96966499e-01] [ 4.41361845e-01 -4.14578244e-02 1.57715663e-01 ... -8.22761580e-02 1.03325263e-01 -1.21333517e-01] [ 4.11653608e-01 8.88601094e-02 3.52011740e-01 ... 1.59605190e-01 2.56073475e-01 -1.05865195e-01] ... [ 7.03493878e-02 3.52575868e-01 1.96252912e-01 ... -7.33084023e-01 2.01939613e-01 -3.88666034e-01] [ 1.02793068e-01 -3.64237010e-01 -2.90848881e-01 ... 1.51615605e-01 1.05505630e-01 -4.22902882e-01] [ 8.44947547e-02 3.23426694e-01 -7.62381256e-02 ... 1.73488081e-01 1.69479907e-01 -3.99785548e-01]] [[ 5.46875417e-01 1.21523011e+00 9.25577521e-01 ... -2.23522797e-01 -9.34693038e-01 6.04804233e-02] [ 4.06792164e-01 1.03830898e+00 1.52781337e-01 ... -8.14782143e-01 1.04973130e-02 -1.04825866e+00] [-5.52902281e-01 -4.18179303e-01 -1.21410966e-01 ... 6.21827662e-01 3.32414120e-01 8.28510165e-01] ... [ 1.04103208e+00 4.18975860e-01 6.77109212e-02 ... 6.11452937e-01 4.97463554e-01 9.20633376e-01] [ 4.46046263e-01 6.25838459e-01 4.73952293e-01 ... 1.07210591e-01 4.65454459e-01 9.26208436e-01] [-1.10306656e+00 5.00424325e-01 1.02758920e+00 ... 6.66818023e-01 -4.84449953e-01 6.25329316e-01]] [[ 4.35378850e-01 -1.11231780e+00 8.99843931e-01 ... -1.87738180e+00 -1.41939056e+00 -1.95474207e+00] [ 9.78213668e-01 -2.21303701e-01 2.00124121e+00 ... -9.33296144e-01 1.53091109e+00 9.47186887e-01] [ 1.35859025e+00 -2.10541797e+00 2.05889463e-01 ... -1.24970412e+00 3.80540490e-01 -5.24584413e-01] ... [ 4.15367872e-01 -1.16208768e+00 2.23073816e+00 ... -1.13531089e+00 -4.65175629e-01 2.72471356e+00] [ 1.54967737e+00 -4.18487154e-02 -2.44474769e+00 ... 1.23408270e+00 -3.12761694e-01 2.96110320e+00] [ 7.70880699e-01 -1.38206518e+00 -1.55652964e+00 ... 3.74872953e-01 -1.06154278e-01 1.60242867e+00]]] [[[ 6.38662696e-01 -6.23044908e-01 -1.82977289e-01 ... 5.85335732e-01 -1.48577869e-01 4.35426593e-01] [-1.01459885e+00 7.72588670e-01 -5.53083301e-01 ... 2.04570964e-01 -3.85024011e-01 1.80475557e+00] [ 4.45526689e-01 1.78335533e-01 4.70990151e-01 ... -1.35797155e+00 -4.47783142e-01 -1.06943238e+00] ... [-2.36043930e-01 1.66942164e-01 1.97241247e-01 ... -3.40156496e-01 8.59786630e-01 -5.09940460e-02] [-5.36393881e-01 1.59511968e-01 -2.81729698e-01 ... -6.17395163e-01 -3.07698369e-01 -3.94021183e-01] [-3.92530626e-03 3.92219871e-01 -8.87511950e-03 ... -8.26086998e-02 -3.70207191e-01 -6.39986217e-01]] [[-4.27740246e-01 -1.91039932e+00 1.28884411e+00 ... 2.28821430e-02 9.75991070e-01 1.06877148e+00] [-2.71603286e-01 -9.05744135e-01 -9.46118414e-01 ... 1.04831612e+00 -9.52506900e-01 1.04943514e+00] [-9.28307533e-01 -1.01916468e+00 1.00277627e+00 ... 3.10281801e+00 1.76739469e-01 6.38596296e-01] ... [ 7.42040873e-01 -6.23115182e-01 8.97769213e-01 ... 9.73532617e-01 1.05133951e+00 -1.83024943e-01] [-6.27025142e-02 -9.98975754e-01 -1.19274950e+00 ... 3.92550588e-01 1.45523119e+00 -9.31285858e-01] [-1.23609960e+00 3.53456467e-01 -1.04393768e+00 ... 5.16539931e-01 -1.49539661e+00 -7.78814971e-01]] [[-3.86570357e-02 9.04683694e-02 -3.92051071e-01 ... 5.17508686e-01 2.78815418e-01 7.88725317e-02] [-3.86505090e-02 1.40969723e-01 -2.47181207e-01 ... -2.95745254e-01 -4.73688185e-01 -4.30094481e-01] [-4.94501479e-02 3.53523254e-01 4.69382852e-01 ... -4.65288401e-01 1.20993160e-01 -2.12010249e-01] ... [-6.51360899e-02 -5.01299620e-01 9.85428810e-01 ... -1.37610495e-01 4.75994289e-01 -3.17721516e-01] [-1.99147880e-01 -3.61242257e-02 6.98528020e-03 ... -1.28933594e-01 1.27584264e-01 -6.97489023e-01] [-4.63900447e-01 3.53524387e-01 4.46991511e-02 ... 4.90652233e-01 -3.90416607e-02 -4.33079712e-02]] [[-2.30666902e-02 8.15941274e-01 5.48081577e-01 ... 5.07714867e-04 8.40029567e-02 -4.98051047e-01] [ 1.83661282e-01 -5.55466004e-02 2.05823392e-01 ... -9.75047350e-02 -1.45296022e-01 -2.25360304e-01] [ 4.93694305e-01 6.79529682e-02 8.82147625e-03 ... -1.41491294e-01 3.51721227e-01 -4.08166349e-01] ... [-2.72084773e-01 -3.52838486e-01 -8.52193311e-02 ... 1.12098254e-01 1.05578423e-01 -4.82628375e-01] [ 6.39174953e-02 1.56475857e-01 -2.81919360e-01 ... 1.93574920e-01 -3.61020327e-01 1.26853570e-01] [-1.65846258e-01 2.07266631e-03 -3.01044762e-01 ... -1.95221454e-01 -2.43519753e-01 9.25294697e-01]] [[ 2.85499185e-01 -2.68356293e-01 9.08652604e-01 ... -1.57655671e-01 7.93536663e-01 2.43629813e-01] [ 1.70708522e-01 2.30330139e-01 -1.63042694e-01 ... -6.70434296e-01 -2.33201936e-01 -4.04427588e-01] [-2.38815874e-01 -7.55534410e-01 1.25255615e-01 ... -3.03200155e-01 2.97623605e-01 -1.20243885e-01] ... [ 8.72729838e-01 -9.46772099e-03 -8.49619150e-01 ... 5.94714563e-03 2.57626951e-01 -3.70235927e-02] [ 5.56138456e-01 4.55728382e-01 1.93116978e-01 ... 2.57158935e-01 -1.73823059e-01 1.65245223e+00] [-8.77080917e-01 -6.44801557e-01 1.03257686e-01 ... 2.06407413e-01 4.44647372e-01 -3.30433518e-01]] [[ 9.34165344e-02 9.32241619e-01 -4.34725583e-01 ... 5.94196558e-01 -3.69110137e-01 1.34165049e+00] [-1.53696334e+00 -1.62271595e+00 -1.27946031e+00 ... 7.59759605e-01 -9.71535265e-01 1.60008228e+00] [ 6.85074389e-01 9.49220538e-01 -4.79703039e-01 ... 1.33471143e+00 -1.97946382e+00 5.44673979e-01] ... [-1.96760833e+00 -6.61240876e-01 -1.90397358e+00 ... -1.10369694e+00 9.79258120e-01 -6.69648349e-01] [ 4.34534788e-01 -9.28923547e-01 6.04970098e-01 ... 3.81077886e-01 2.13475561e+00 1.31520236e+00] [-9.00297105e-01 6.93641484e-01 -1.55898854e-01 ... 1.68499696e+00 1.60930492e-02 -5.64548731e-01]]] ... [[[ 9.26630944e-02 -2.58803934e-01 -3.81640851e-01 ... 3.71313721e-01 2.42976621e-01 1.08309698e+00] [ 9.97201443e-01 -1.16858669e-01 -3.73547733e-01 ... 5.40647447e-01 -1.04331481e+00 -1.19596314e+00] [ 4.07208681e-01 -5.82835317e-01 5.27023256e-01 ... -1.22485101e+00 -2.91901767e-01 -2.16310844e-01] ... [-1.11143529e+00 -9.17073488e-01 -5.48823655e-01 ... 3.66943747e-01 7.32095659e-01 7.59916306e-01] [ 1.38816044e-01 3.08080584e-01 -6.71336114e-01 ... 5.25468066e-02 -1.06413007e+00 -3.00012559e-01] [-1.08127318e-01 4.17421967e-01 -1.35758489e-01 ... 1.01850951e+00 -2.42964715e-01 1.74283102e-01]] [[-1.07385683e+00 7.92355716e-01 -1.77030122e+00 ... 1.24756441e-01 8.15058768e-01 1.96025038e+00] [ 2.40685850e-01 9.67227995e-01 -1.05201578e+00 ... -1.38785362e+00 2.24382386e-01 1.26169562e+00] [-7.68560171e-01 -4.00396854e-01 -8.59436274e-01 ... 1.00458670e+00 3.98616344e-01 -1.04428256e+00] ... [-1.22839117e+00 1.27106810e+00 1.45885384e+00 ... -9.67837512e-01 3.43779325e-01 7.19224870e-01] [-6.59949720e-01 2.68784851e-01 -1.23526728e+00 ... -6.83949813e-02 -2.39064634e-01 9.07074809e-01] [ 1.98824480e-01 1.33317947e+00 -2.24286580e+00 ... 5.82076013e-01 1.32670748e+00 2.32427604e-02]] [[ 5.85248955e-02 4.42714036e-01 -1.59236327e-01 ... -1.02194738e+00 -3.38576913e-01 -2.08563939e-01] [ 5.13847992e-02 3.16056401e-01 4.32008058e-01 ... 2.47981533e-01 4.26761717e-01 -3.15023988e-01] [ 3.96399021e-01 3.82921606e-01 4.34715331e-01 ... -1.22142389e-01 -1.43368179e-02 4.82714444e-01] ... [ 7.95983374e-02 -4.07346897e-02 1.36189505e-01 ... 1.63186342e-01 5.95686734e-02 -8.56136084e-02] [ 3.49799663e-01 2.34559000e-01 -2.43996426e-01 ... 3.99427004e-02 1.34432271e-01 -4.76314247e-01] [ 1.45560607e-01 6.38020098e-01 -1.34229317e-01 ... -4.52283829e-01 -3.83489996e-01 6.71780631e-02]] [[ 3.22608441e-01 -5.34500599e-01 1.09695904e-01 ... 4.90766801e-02 1.63553692e-02 -1.03198677e-01] [ 1.66819587e-01 -2.11865455e-01 2.99425989e-01 ... -3.39846075e-01 -7.02236474e-01 7.79662430e-02] [ 2.41939113e-01 3.57957989e-01 -4.29331452e-01 ... -1.00887321e-01 5.40972412e-01 -2.04389673e-02] ... [ 1.21424623e-01 1.58987045e-01 1.05247550e-01 ... 1.83616608e-01 9.94069055e-02 -1.28329366e-01] [-3.86296600e-01 4.19242293e-01 -2.70971805e-01 ... 1.24134146e-01 1.45162076e-01 -2.88138121e-01] [-7.12971315e-02 -8.92262384e-02 -3.91855687e-01 ... -1.56288251e-01 -1.33020252e-01 4.46711093e-01]] [[-8.62500548e-01 -1.10721715e-01 1.26005113e+00 ... -7.63611674e-01 1.32040292e-01 1.90845624e-01] [-6.96125269e-01 2.34615892e-01 -1.23828121e-01 ... 5.86762607e-01 -3.78809869e-01 3.59144688e-01] [-5.36240995e-01 3.20797741e-01 -5.01835108e-01 ... 4.14913118e-01 7.33147264e-01 2.09939614e-01] ... [ 2.45305359e-01 -9.40435566e-03 -6.18949056e-01 ... -1.20150554e+00 -1.96302369e-01 -3.88130009e-01] [ 2.10847959e-01 -1.09935308e+00 7.39334598e-02 ... -6.33442044e-01 -8.48888099e-01 3.95704657e-01] [-5.29663146e-01 4.71958891e-02 -1.13694870e+00 ... -1.10030770e+00 1.65803701e-01 -2.20286354e-01]] [[ 1.51259828e+00 1.01730335e+00 2.22578973e-01 ... -8.14471722e-01 -6.21165991e-01 4.11165744e-01] [-7.39018440e-01 -5.88328958e-01 1.36809754e+00 ... -1.24644339e+00 1.16550434e+00 1.02326155e+00] [ 3.96353984e+00 -1.93609536e+00 -1.13774896e+00 ... 1.71436369e+00 1.48461789e-01 1.18345654e+00] ... [-1.08535970e-02 -1.29394817e+00 -2.36747193e+00 ... 1.61632240e+00 -4.03702259e-01 -3.22984993e-01] [-6.80437684e-02 6.88638270e-01 -5.93243122e-01 ... -1.97206855e+00 8.12564850e-01 -9.70661283e-01] [ 7.48406053e-01 -7.69299567e-01 6.54631913e-01 ... 1.32435203e+00 -1.84465423e-01 4.41925406e-01]]] [[[-8.44096780e-01 6.08217418e-01 1.48822641e+00 ... -8.21867526e-01 -6.83340847e-01 -2.53385812e-01] [ 9.99890804e-01 -4.69103783e-01 -1.00623108e-01 ... 1.07570561e-02 1.16115630e-01 -5.55382133e-01] [ 7.68776000e-01 -2.33374760e-02 1.36242822e-01 ... 5.58973253e-01 -4.08852488e-01 -4.38090891e-01] ... [-1.99786440e-01 6.43068850e-01 -9.76280451e-01 ... 7.13499069e-01 7.29897022e-01 -5.05812727e-02] [ 8.33879948e-01 -6.08703315e-01 7.13993981e-02 ... 9.81911004e-01 4.48184371e-01 3.68654341e-01] [-4.89067942e-01 4.20901835e-01 3.69638592e-01 ... 1.02821195e+00 -5.01025021e-01 -1.35907605e-01]] [[ 1.40783235e-01 -4.82624143e-01 -2.58886743e+00 ... -4.25335824e-01 -3.51922452e-01 -1.93924975e+00] [-1.08400309e+00 1.97212070e-01 -3.94102514e-01 ... -1.48535267e-01 5.32714128e-01 -1.97010660e+00] [ 1.23996770e+00 -1.05516446e+00 4.38055396e-01 ... -2.33553857e-01 -5.57612002e-01 -2.34225798e+00] ... [ 6.56468987e-01 1.10585976e+00 -4.23080921e-01 ... 1.54144371e+00 6.40402317e-01 6.26773298e-01] [ 7.08922029e-01 -4.09484357e-02 1.23762059e+00 ... 4.26642060e-01 1.12376772e-01 2.49683785e+00] [-8.03806111e-02 3.86595219e-01 -4.35704738e-01 ... -1.26139891e+00 -4.33248401e-01 -9.09356236e-01]] [[ 1.81840464e-01 -1.70813754e-01 1.54009476e-01 ... -3.71070504e-01 1.82730004e-01 5.85481644e-01] [-4.30081636e-01 7.48690665e-01 1.05382502e-01 ... 8.77889320e-02 -6.42564595e-02 1.66457847e-01] [ 8.41405332e-01 -1.75947234e-01 -4.09847826e-01 ... 1.64705336e-01 7.67986000e-01 -4.48713809e-01] ... [ 5.42682528e-01 -4.24909405e-02 5.00696540e-01 ... -1.40829816e-01 7.01053515e-02 -1.46756500e-01] [ 5.01064122e-01 5.08572042e-01 1.60834119e-01 ... -2.31591433e-01 4.24345769e-02 4.60745186e-01] [-1.04035296e-01 4.65403229e-01 4.15564626e-01 ... -3.39972019e-01 3.42390910e-02 3.11020046e-01]] [[-1.01949155e-01 -2.58011788e-01 -3.23489666e-01 ... -3.63508493e-01 -2.62804389e-01 4.42029208e-01] [-2.93696616e-02 2.09961340e-01 3.25729519e-01 ... 1.75760850e-01 3.74288291e-01 4.35363084e-01] [ 8.87598246e-02 -2.29205582e-02 2.14845866e-01 ... -1.57730386e-01 -3.09812605e-01 -3.89935225e-01] ... [-2.36365169e-01 -5.23684174e-02 -4.99616042e-02 ... -7.15451613e-02 -7.41170794e-02 -1.97430179e-01] [ 4.66514081e-02 2.88792849e-01 -3.17826569e-01 ... -4.36174899e-01 -2.83219546e-01 3.04786116e-01] [ 1.66061118e-01 -1.03892507e-02 5.65876305e-01 ... 6.02539539e-01 -8.01596791e-03 2.88561642e-01]] [[ 4.56682056e-01 -1.55511588e-01 8.50462437e-01 ... -4.79261205e-02 -1.14329684e+00 7.34363198e-01] [-1.22238100e-01 9.74313736e-01 -5.51417708e-01 ... -1.15339041e+00 -6.35114789e-01 -1.39293289e+00] [ 5.26576042e-01 -3.79106820e-01 6.70550525e-01 ... -5.91862679e-01 1.19163975e-01 1.81294248e-01] ... [ 5.34622192e-01 5.36598325e-01 4.43896919e-01 ... 7.44081378e-01 1.58899993e-01 -2.16723278e-01] [-1.24828172e+00 -4.66184586e-01 -1.77579060e-01 ... 2.92615950e-01 2.39857197e-01 -1.70290858e-01] [ 8.44562113e-01 4.15625304e-01 -1.94044471e-01 ... -1.43259972e-01 4.77626473e-01 -9.29164514e-02]] [[ 2.85166454e+00 -1.88505918e-01 1.42872989e+00 ... -2.72762090e-01 -2.07234934e-01 -6.05260551e-01] [-1.03828721e-01 -2.19152451e-01 2.02744222e+00 ... -4.58303571e-01 -2.15836450e-01 9.78047073e-01] [ 3.02731776e+00 -7.00643897e-01 -7.55650759e-01 ... 2.01107502e-01 3.29120100e-01 2.76127434e+00] ... [ 1.21366799e+00 2.83288527e+00 7.80127466e-01 ... 5.26646554e-01 -2.50046730e-01 1.05264843e+00] [ 4.98673856e-01 8.55935276e-01 9.15231228e-01 ... 3.20571035e-01 -2.99107820e-01 1.34429753e+00] [-1.67914379e+00 -1.89613724e+00 2.41421628e+00 ... -2.44794652e-01 1.48340479e-01 -3.02479124e+00]]] [[[-1.17212558e+00 3.77139091e-01 -8.94643784e-01 ... -6.05112135e-01 -1.62252530e-01 -4.34078783e-01] [ 1.11758912e+00 4.24061626e-01 5.67713737e-01 ... -7.06611633e-01 4.25248265e-01 1.11789298e+00] [ 9.30582702e-01 3.65151316e-01 -3.59367132e-01 ... -7.31354773e-01 -3.24518263e-01 -1.48301991e-02] ... [-6.98907495e-01 -2.15841144e-01 -4.58960086e-01 ... -1.18550457e-01 3.39808017e-01 2.44389042e-01] [-7.40723982e-02 -6.69122458e-01 1.45463562e+00 ... 1.05906978e-01 2.42002577e-01 -1.08613896e+00] [-2.10621655e-01 -5.02868414e-01 1.77653924e-01 ... 1.20387757e+00 4.31094289e-01 2.46977404e-01]] [[-1.92528105e+00 -9.48611617e-01 6.22288704e-01 ... -6.96327686e-01 1.42218792e+00 1.22075391e+00] [ 6.32852495e-01 1.23557663e+00 8.98529589e-01 ... 1.89877599e-01 -5.11611044e-01 4.14124370e-01] [ 1.70865655e+00 1.74638927e+00 4.14033651e-01 ... -3.09759587e-01 1.51995122e+00 8.23544860e-01] ... [ 1.90399337e+00 1.53416562e+00 -2.01829100e+00 ... 4.91908967e-01 -2.44203234e+00 9.04500663e-01] [-1.29670870e+00 7.02902749e-02 1.13891971e+00 ... 1.92798793e+00 1.05979943e+00 -4.47211832e-01] [-8.52569997e-01 3.46873760e-01 -1.13004446e-01 ... -1.12146771e+00 -2.53986359e-01 8.08922172e-01]] [[-3.80215406e-01 4.14379716e-01 -2.86442697e-01 ... -3.70231271e-01 6.48817644e-02 1.50379047e-01] [-1.04094250e-02 -3.03100725e-03 -2.50479668e-01 ... 6.30645372e-04 -1.67299852e-01 -8.96715760e-01] [-4.05242532e-01 1.17628254e-01 2.65063345e-01 ... -1.01561882e-01 -1.91224694e-01 1.73966929e-01] ... [-4.05485243e-01 5.23087800e-01 -4.03815769e-02 ... -3.42442840e-01 -1.80850804e-01 1.52084127e-01] [-4.65664655e-01 -1.88865274e-01 4.22711372e-01 ... 1.92405626e-01 -2.21874952e-01 1.13737069e-01] [-1.61365062e-01 2.41134733e-01 5.38736917e-02 ... 1.55816674e-01 -1.23570003e-02 -1.45788297e-01]] [[-6.12884061e-03 8.83646682e-02 4.16966051e-01 ... 1.29954517e-01 7.89716542e-02 4.03209031e-01] [-1.33855060e-01 -2.17624515e-01 -1.82491645e-01 ... -3.06300521e-01 4.39644486e-01 1.82729423e-01] [-4.58506852e-01 6.88491315e-02 2.28367910e-01 ... 3.13270181e-01 -6.00160174e-02 2.29223192e-01] ... [-2.33541355e-01 1.22935008e-02 -9.95224565e-02 ... -1.55305728e-01 2.86759943e-01 6.70678973e-01] [-1.81457028e-01 2.13323325e-01 2.40639150e-02 ... -4.16975655e-03 -1.66156650e-01 -5.76111257e-01] [-2.92418778e-01 -1.93180218e-01 -6.80057034e-02 ... -1.73767835e-01 1.55066371e-01 3.21257919e-01]] [[-2.55800754e-01 6.62265658e-01 -6.10612258e-02 ... 2.47647718e-01 -5.48580326e-02 3.07587087e-01] [-4.72015530e-01 9.11419451e-01 2.57700443e-01 ... 1.27883589e+00 -5.62521219e-01 6.18251860e-01] [ 4.33332831e-01 8.24608743e-01 -2.00079262e-01 ... 6.08876586e-01 6.37882531e-01 -4.61009741e-01] ... [-1.34116685e+00 5.49060345e-01 -1.65217027e-01 ... -4.73868072e-01 9.81631935e-01 -3.93434137e-01] [ 5.14165282e-01 1.79190069e-01 1.26367405e-01 ... 2.14030847e-01 1.80342585e-01 -6.10525072e-01] [-1.70484900e-01 1.70272633e-01 8.50767255e-01 ... -2.16677979e-01 -2.62141258e-01 -2.90395677e-01]] [[ 1.13109601e+00 -9.96218264e-01 -7.30423331e-02 ... 8.22715580e-01 -1.88636148e+00 -9.08459246e-01] [ 2.18917727e+00 2.72776985e+00 -8.08476806e-01 ... 7.03489125e-01 9.69654500e-01 2.12136579e+00] [ 1.30678844e+00 -4.89632994e-01 2.30238700e+00 ... -1.00285843e-01 1.38185334e+00 1.45074677e+00] ... [-6.49178088e-01 2.66969472e-01 -2.21017408e+00 ... -1.36743283e+00 7.91130483e-01 -5.09209633e-01] [ 2.43358240e-01 -1.47207749e+00 1.42272365e+00 ... -1.59402579e-01 9.93883848e-01 -6.67652935e-02] [-8.35062087e-01 -2.45513275e-01 -5.07919312e-01 ... 1.37340021e+00 5.41583061e-01 -6.85694888e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True, 'eps': 0.0} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4610.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.8127 -1.1413 -0.8992 -2.6230 0.0897 -1.5172 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2827 -1.4408 -0.7109 0.0804 -1.1680 1.1594 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[[-1.81183660e+00 -1.73919165e+00 -1.66485965e+00 ... -2.18304420e+00 -1.83031380e+00 -2.03803492e+00] [-2.07950687e+00 -1.26102471e+00 -1.67746890e+00 ... -2.27760029e+00 -1.26447248e+00 -1.78657579e+00] [-1.84333074e+00 -2.04593825e+00 -1.76271343e+00 ... -1.95110977e+00 -1.80962551e+00 -1.64726746e+00] ... [-1.74488008e+00 -1.86134255e+00 -1.77730119e+00 ... -1.53522503e+00 -2.25917530e+00 -1.55910027e+00] [-1.80595684e+00 -1.90861750e+00 -2.01366973e+00 ... -1.56616819e+00 -1.81318808e+00 -1.72524023e+00] [-1.23247385e+00 -2.09065294e+00 -1.65013456e+00 ... -1.26398921e+00 -2.05500817e+00 -1.68280029e+00]] [[-1.77438033e+00 -2.44082808e+00 6.37150526e-01 ... -1.49005091e+00 -3.87223864e+00 -2.40118289e+00] [-1.81799412e+00 -2.11542562e-01 1.48445513e-04 ... -2.84189057e+00 -2.17676330e+00 -1.77741122e+00] [-2.41641641e+00 -1.76818645e+00 -6.14315450e-01 ... -3.74813390e+00 1.03527689e+00 -1.18634605e+00] ... [-1.38883603e+00 1.02511966e+00 -1.93166196e+00 ... -1.32137895e+00 -2.50947833e+00 -2.52071762e+00] [-1.61984074e+00 -3.72797275e+00 4.20306832e-01 ... -1.70355999e+00 -3.23094225e+00 -1.07902169e+00] [-1.01685643e+00 1.54504120e+00 1.47509605e-01 ... -8.39513361e-01 -1.29825521e+00 -1.26719904e+00]] [[-3.87051046e-01 3.51495057e-01 -1.84183919e+00 ... -1.02175665e+00 -9.34687257e-01 -1.75259757e+00] [-9.08369482e-01 -4.82103944e-01 -8.44259441e-01 ... 5.62045462e-02 -1.85800231e+00 -1.44341886e+00] [-6.45134509e-01 -2.72023473e-02 -3.87148649e-01 ... -1.44206607e+00 -8.41478229e-01 -1.30373049e+00] ... [-1.01552093e+00 -1.49392354e+00 -2.21599583e-02 ... -5.25243104e-01 8.75285268e-03 -6.97273374e-01] [-2.93251729e+00 -1.47830522e+00 -1.21007776e+00 ... -8.53755891e-01 -1.35152137e+00 -2.04974294e+00] [-1.58523095e+00 -8.12824488e-01 -2.52551079e-01 ... -5.87666869e-01 -1.02129662e+00 9.07510817e-02]] [[-2.57021427e+00 -2.60784936e+00 -2.64864993e+00 ... -2.61062646e+00 -2.55270147e+00 -2.53331041e+00] [-2.58194327e+00 -2.63886476e+00 -2.71675420e+00 ... -2.54645181e+00 -2.68975759e+00 -2.68855882e+00] [-2.75221705e+00 -2.60924697e+00 -2.59981418e+00 ... -2.68779421e+00 -2.57736373e+00 -2.61816001e+00] ... [-2.58069324e+00 -2.67594886e+00 -2.67746425e+00 ... -2.53740120e+00 -2.58058286e+00 -2.62913942e+00] [-2.61336422e+00 -2.64774513e+00 -2.50074482e+00 ... -2.57632303e+00 -2.55523443e+00 -2.54700017e+00] [-2.65822721e+00 -2.59056902e+00 -2.55988503e+00 ... -2.66781330e+00 -2.58375025e+00 -2.59381413e+00]] [[-2.69902855e-01 4.00566190e-01 -9.56328750e-01 ... 1.12521753e-01 6.79307640e-01 -1.17104602e+00] [ 7.64775932e-01 -6.46411896e-01 2.73542464e-01 ... -4.87901241e-01 4.23823059e-01 -1.96322310e+00] [ 8.47269893e-01 1.54014146e+00 -2.30342317e+00 ... 8.06870282e-01 9.80014443e-01 -5.50190866e-01] ... [ 7.23099113e-01 7.31457710e-01 1.46047860e-01 ... 6.43823266e-01 7.24327803e-01 -1.80592513e+00] [ 1.69336534e+00 -6.92600310e-01 1.69396865e+00 ... 1.87250435e+00 -1.01115751e+00 -4.13830191e-01] [-1.51969004e+00 1.13675880e+00 1.27710080e+00 ... -1.21913135e+00 -2.00541571e-01 -6.69584334e-01]] [[-5.85091174e-01 -5.47325850e-01 -1.35578918e+00 ... -1.18692029e+00 -4.00379658e+00 -7.15209305e-01] [-2.79463267e+00 -3.43638563e+00 -2.08842778e+00 ... -3.03543836e-01 1.94568396e+00 -1.32478797e+00] [-1.88154674e+00 -9.27809775e-01 -1.18124712e+00 ... 1.10310398e-01 -6.87639415e-01 -8.89587700e-01] ... [-1.79897797e+00 -1.74418223e+00 -2.66147232e+00 ... -2.38226390e+00 -1.46913838e+00 -2.72536755e+00] [-3.54449177e+00 -2.95621586e+00 8.82828653e-01 ... -1.37864459e+00 -1.90067935e+00 -1.28983748e+00] [-1.75693738e+00 -1.71404839e+00 -3.13151240e+00 ... -8.95315781e-02 2.90777028e-01 -1.64683533e+00]]] [[[-2.42763281e+00 -1.90765297e+00 -1.53835654e+00 ... -1.26075411e+00 -1.46505070e+00 -2.25985122e+00] [-1.82546806e+00 -2.01815867e+00 -2.07858109e+00 ... -1.24113417e+00 -1.44886494e+00 -2.26458001e+00] [-1.74672818e+00 -1.67464173e+00 -1.95994580e+00 ... -1.72707570e+00 -1.71391022e+00 -1.55962884e+00] ... [-2.09243155e+00 -2.05233121e+00 -2.07297349e+00 ... -1.41601217e+00 -2.29477382e+00 -2.29201388e+00] [-2.15471148e+00 -1.69405890e+00 -1.55874777e+00 ... -1.62552893e+00 -1.69127166e+00 -2.24587727e+00] [-1.99791539e+00 -1.90974772e+00 -1.78546989e+00 ... -2.11001325e+00 -1.92056537e+00 -1.51682925e+00]] [[-3.72630429e+00 -2.21479678e+00 -1.93294510e-01 ... -1.67943895e-01 -2.58538276e-01 -9.15787756e-01] [-2.56104302e+00 3.64418060e-01 -2.63214803e+00 ... 7.46222019e-01 1.88835394e+00 -2.12213039e+00] [-3.01758623e+00 -1.96982741e+00 -9.76569891e-01 ... -1.11745441e+00 9.90761578e-01 -1.41453433e+00] ... [-2.36452579e+00 -1.17849380e-01 -1.81609488e+00 ... -4.14711571e+00 -1.27739146e-01 -3.68772388e+00] [-7.77706742e-01 1.44659233e+00 -1.27952516e+00 ... 1.86802590e+00 -9.25355792e-01 1.22643642e-01] [-2.09934711e+00 3.44607592e-01 -9.69094038e-01 ... -1.59569931e+00 -1.78769088e+00 -5.70665896e-01]] [[-1.53515339e+00 -1.09660876e+00 -8.37938309e-01 ... -4.83877957e-01 -9.91289794e-01 1.15835965e+00] [-1.54252505e+00 -1.70498192e+00 -3.37162584e-01 ... -1.65308073e-01 -1.90904009e+00 -6.49486065e-01] [-1.41876566e+00 2.10177124e-01 -1.17037702e+00 ... -1.30644405e+00 -1.19818175e+00 -2.14024210e+00] ... [-7.60173619e-01 -4.70748037e-01 -8.99597585e-01 ... -1.81858099e+00 -3.68195802e-01 -1.41714931e+00] [-1.13098395e+00 -7.85920024e-03 -6.07480049e-01 ... -1.41185331e+00 -1.58435476e+00 -1.47467852e+00] [ 9.90214765e-01 -2.37053967e+00 -2.30846500e+00 ... -2.99815740e-02 -5.52327573e-01 -5.99827707e-01]] [[-2.56743336e+00 -2.64217138e+00 -2.63698888e+00 ... -2.69521284e+00 -2.61274219e+00 -2.64139748e+00] [-2.65306926e+00 -2.55355358e+00 -2.71128249e+00 ... -2.70225596e+00 -2.66358423e+00 -2.58741546e+00] [-2.48892212e+00 -2.52761173e+00 -2.55717850e+00 ... -2.80055356e+00 -2.65758896e+00 -2.43100095e+00] ... [-2.59787846e+00 -2.66640210e+00 -2.62561989e+00 ... -2.61965871e+00 -2.57436419e+00 -2.59074521e+00] [-2.63183093e+00 -2.61718249e+00 -2.66646171e+00 ... -2.68691730e+00 -2.72690392e+00 -2.68021059e+00] [-2.55278230e+00 -2.46380615e+00 -2.55659533e+00 ... -2.58673668e+00 -2.72678709e+00 -2.51587367e+00]] [[-4.75470394e-01 5.74015677e-01 1.67279458e+00 ... -1.06536996e+00 3.20655972e-01 1.72488713e+00] [ 8.68479729e-01 -1.63223267e+00 -1.57102013e+00 ... 1.20728612e+00 -2.76738644e-01 -1.80655193e+00] [-1.30376685e+00 2.03191829e+00 3.53710562e-01 ... 7.21633434e-01 -4.91673142e-01 -4.60212320e-01] ... [ 5.30384362e-01 1.84719563e+00 -4.48508784e-02 ... -3.60470980e-01 3.21432590e-01 1.28311384e+00] [-9.61441040e-01 2.68751502e-01 1.11816847e+00 ... -1.39099807e-01 1.01476061e+00 1.73624587e+00] [ 4.72135037e-01 9.66494381e-01 -6.59198463e-01 ... -4.50812042e-01 1.98920286e+00 1.41766548e+00]] [[ 8.67284715e-01 -2.23318696e+00 -2.88416004e+00 ... -1.43659651e-01 -9.69217896e-01 -3.26247931e+00] [-2.19852638e+00 -1.59182096e+00 -1.74295497e+00 ... -1.53893197e+00 8.27028632e-01 3.73065501e-01] [-1.85414696e+00 -1.91924965e+00 -1.50092375e+00 ... -2.05949521e+00 1.09460938e+00 -5.64500511e-01] ... [-3.83781195e+00 2.23131597e-01 -2.74848056e+00 ... -1.72174215e+00 -1.35973132e+00 -7.13219702e-01] [-8.93190324e-01 -2.27882004e+00 -1.05816138e+00 ... -4.23219681e+00 -3.32999420e+00 -4.14128661e-01] [-2.00720239e+00 -1.43582332e+00 -1.43975759e+00 ... -1.43222642e+00 -2.40409589e+00 -3.55668855e+00]]] [[[-1.83452046e+00 -1.63264763e+00 -1.88472283e+00 ... -2.01246595e+00 -1.58876717e+00 -1.47475672e+00] [-1.47348154e+00 -1.13969100e+00 -1.31543338e+00 ... -1.63550913e+00 -1.93737411e+00 -1.55729496e+00] [-1.92518067e+00 -1.91547823e+00 -1.75226021e+00 ... -2.17465973e+00 -1.71788442e+00 -1.52981627e+00] ... [-1.77451551e+00 -1.87578070e+00 -1.69996119e+00 ... -1.91927814e+00 -1.88670003e+00 -2.23064375e+00] [-1.45240402e+00 -1.92295146e+00 -1.62994266e+00 ... -1.76421797e+00 -2.01740074e+00 -1.92767882e+00] [-1.83334851e+00 -1.77721035e+00 -2.23374057e+00 ... -1.59762251e+00 -2.23348045e+00 -2.02389550e+00]] [[-8.99857998e-01 8.93254802e-02 -3.51665653e-02 ... -9.05028403e-01 -8.88175905e-01 -1.25492954e+00] [-1.42165065e+00 -3.03435636e+00 1.57089293e-01 ... -1.54590356e+00 6.12007737e-01 1.25699580e-01] [-1.42417920e+00 -3.20445609e+00 5.51835954e-01 ... -7.36284316e-01 -3.21868682e+00 -6.80525184e-01] ... [-1.72526884e+00 -2.50998807e+00 2.90225089e-01 ... -3.91392499e-01 -9.20096576e-01 1.84475708e+00] [-4.19041157e-01 -2.06988957e-02 -2.38638186e+00 ... -6.31884336e-01 -1.22126997e+00 1.43075848e+00] [ 9.04379904e-01 9.05795097e-01 -3.32947016e+00 ... -3.50529528e+00 -4.38332891e+00 -3.59919429e+00]] [[-1.54545999e+00 2.84038577e-02 -4.78758395e-01 ... -1.00318575e+00 -7.80073702e-01 -1.36114776e+00] [-1.22534013e+00 -9.63402510e-01 -5.76954365e-01 ... -8.66216421e-01 -7.99917340e-01 -2.45786548e-01] [-1.70073354e+00 -3.05866599e-01 -1.20493984e+00 ... -1.96798146e+00 -1.15418673e+00 -4.82885808e-01] ... [-1.18908036e+00 -5.68025291e-01 -2.30428672e+00 ... -9.03898358e-01 -4.81284887e-01 -1.25810122e+00] [-4.22671914e-01 -3.71060103e-01 -5.46282411e-01 ... -2.66722202e+00 -3.00081193e-01 -5.63335001e-01] [-6.18406296e-01 -6.39632344e-01 -1.07246339e+00 ... 1.14436019e-02 -5.87017953e-01 -1.92663574e+00]] [[-2.62102532e+00 -2.59845638e+00 -2.62026048e+00 ... -2.66750646e+00 -2.55685949e+00 -2.50767422e+00] [-2.64798093e+00 -2.51004052e+00 -2.65395308e+00 ... -2.62754416e+00 -2.57642245e+00 -2.63749814e+00] [-2.77602863e+00 -2.54895759e+00 -2.47077012e+00 ... -2.62220573e+00 -2.65946531e+00 -2.58049703e+00] ... [-2.54209781e+00 -2.50066018e+00 -2.68725657e+00 ... -2.64284825e+00 -2.54762673e+00 -2.65180111e+00] [-2.59762955e+00 -2.60706520e+00 -2.60746312e+00 ... -2.55468035e+00 -2.65801835e+00 -2.62259579e+00] [-2.73112345e+00 -2.59422612e+00 -2.43728018e+00 ... -2.55203009e+00 -2.52974200e+00 -2.54965854e+00]] [[-1.69694722e+00 8.20408821e-01 2.19050908e+00 ... -4.65544701e-01 7.14291707e-02 1.27527475e+00] [ 1.46963108e+00 -8.81810486e-01 1.50918394e-01 ... 4.03726786e-01 6.53610528e-01 9.28641915e-01] [-3.83257240e-01 -4.77759093e-01 2.38870919e-01 ... 7.08938837e-01 5.50321698e-01 1.49522257e+00] ... [-1.06093764e+00 -5.68999827e-01 9.90823269e-01 ... 2.12393075e-01 1.14993930e+00 2.23593563e-01] [ 6.97710693e-01 -1.79266080e-01 1.07750642e+00 ... -6.37567341e-01 -2.04947925e+00 3.26415807e-01] [-2.09849477e+00 -8.55706751e-01 1.86733818e+00 ... 1.00051865e-01 1.98803127e-01 3.48009735e-01]] [[-2.76903892e+00 -1.66178334e+00 -1.42591119e+00 ... -1.41827345e+00 -2.56304812e+00 -1.28604293e+00] [-3.04907060e+00 -2.01419592e+00 -3.39400649e+00 ... -2.01031804e+00 -9.48737040e-02 -1.37918317e+00] [-2.61662126e+00 -1.05988097e+00 -2.50198770e+00 ... -1.44813764e+00 -1.76894939e+00 -2.99836683e+00] ... [-1.58841574e+00 -1.15727045e-01 -1.62509537e+00 ... 6.00823760e-01 -6.19284451e-01 -2.88438559e+00] [-1.31970847e+00 3.21115330e-02 -1.32500660e+00 ... -1.41033483e+00 -2.42438626e+00 -2.91266036e+00] [-4.41028214e+00 -2.26823831e+00 -1.41150129e+00 ... -2.27369213e+00 -2.35060525e+00 -2.11625814e+00]]] ... [[[-2.26563096e+00 -1.72129834e+00 -1.57066715e+00 ... -1.96050346e+00 -1.49065757e+00 -1.40623951e+00] [-1.94347084e+00 -2.20259714e+00 -2.37284827e+00 ... -2.02899742e+00 -1.76178658e+00 -1.54449904e+00] [-1.83458245e+00 -2.14212084e+00 -1.65349615e+00 ... -2.16237712e+00 -1.65909827e+00 -1.58756065e+00] ... [-1.78070092e+00 -1.45930326e+00 -1.45475698e+00 ... -1.50750899e+00 -2.01173520e+00 -1.44597137e+00] [-1.75800002e+00 -1.95313072e+00 -1.58423150e+00 ... -1.79818797e+00 -1.65342629e+00 -1.66107440e+00] [-1.78027678e+00 -2.13483667e+00 -2.24624157e+00 ... -2.02513146e+00 -1.46190345e+00 -1.34225798e+00]] [[-1.80283332e+00 -3.47503692e-01 -6.59584701e-01 ... -2.00407696e+00 -2.00176668e+00 -7.17069447e-01] [-1.12852380e-01 7.56855011e-01 -6.42369926e-01 ... -2.95935130e+00 2.42838457e-01 -1.62486756e+00] [-1.02501345e+00 1.85252690e+00 -1.39600837e+00 ... -1.27840102e+00 -2.08660102e+00 6.66633725e-01] ... [-1.35427940e+00 -3.38108563e+00 -1.33724499e+00 ... -2.62359643e+00 -2.55469942e+00 -8.87419701e-01] [-2.17449117e+00 -1.96571827e+00 1.38593352e+00 ... -2.46912837e+00 -3.27157211e+00 -1.39299214e+00] [-2.09913421e+00 -2.13204646e+00 3.07015747e-01 ... -8.69314075e-01 -4.06727934e+00 -1.46638381e+00]] [[-1.06196783e-01 -8.85143518e-01 -3.91606629e-01 ... -1.13283336e-01 -1.62422681e+00 -1.59183335e+00] [ 1.15422022e+00 -1.44297004e+00 3.06671411e-01 ... -1.29051641e-01 7.40051270e-01 -2.25666553e-01] [-1.02326703e+00 -5.25473118e-01 -4.37931031e-01 ... 1.70655191e-01 -9.99520957e-01 2.79917449e-01] ... [-2.00893736e+00 -2.11531782e+00 -1.63895345e+00 ... -7.94208407e-01 -1.43592310e+00 -7.46540844e-01] [-1.30920136e+00 -1.41856873e+00 -9.21350941e-02 ... -6.97013199e-01 2.20298748e-02 -2.05493593e+00] [-9.25321758e-01 -2.05729342e+00 -2.05414712e-01 ... -3.89230289e-02 -1.59463060e+00 -1.00322568e+00]] [[-2.57780170e+00 -2.70147252e+00 -2.61231971e+00 ... -2.83641100e+00 -2.69807720e+00 -2.69948983e+00] [-2.68605757e+00 -2.53286028e+00 -2.63646078e+00 ... -2.72779369e+00 -2.50755191e+00 -2.56357169e+00] [-2.66909146e+00 -2.65087271e+00 -2.72757363e+00 ... -2.49791145e+00 -2.62203431e+00 -2.64131069e+00] ... [-2.53720593e+00 -2.48687744e+00 -2.67569280e+00 ... -2.59154415e+00 -2.78637552e+00 -2.58655572e+00] [-2.63375282e+00 -2.66609287e+00 -2.52466416e+00 ... -2.62403393e+00 -2.51803327e+00 -2.66976285e+00] [-2.47904229e+00 -2.63113928e+00 -2.61847830e+00 ... -2.55141044e+00 -2.71959567e+00 -2.53255701e+00]] [[ 2.50806499e+00 -1.24690616e+00 1.49381077e+00 ... 2.11511135e+00 -2.89115101e-01 -6.31120503e-02] [ 9.35281396e-01 1.50855517e+00 6.84815764e-01 ... -1.24225426e+00 7.96695828e-01 1.69629943e+00] [-7.41875887e-01 4.48476583e-01 6.83330297e-02 ... 2.67007256e+00 -8.95447314e-01 -5.56235254e-01] ... [ 1.45027077e+00 1.00720966e+00 -1.64193463e+00 ... -9.44491148e-01 2.03331995e+00 -1.07274222e+00] [-6.70983791e-01 -9.91900802e-01 7.01686218e-02 ... -1.98188210e+00 -2.71853596e-01 -1.36426461e+00] [-1.06238890e+00 -4.10209924e-01 -1.39359951e+00 ... 1.28506780e+00 -1.09876418e+00 -8.63859773e-01]] [[-1.78930953e-01 -3.70090079e+00 -2.64885640e+00 ... -2.39680004e+00 -9.10264313e-01 -2.71757793e+00] [ 3.09046358e-01 -1.90194845e+00 -2.65585661e+00 ... -2.12058401e+00 -2.45527363e+00 -7.93795764e-01] [-2.90195894e+00 -2.96231675e+00 2.70047486e-01 ... -1.33429873e+00 -3.35661918e-01 -2.37979650e+00] ... [-1.90490380e-01 -1.21445465e+00 1.36403120e+00 ... -1.50321043e+00 -1.49260402e+00 -2.22961473e+00] [-4.72201437e-01 -2.43720746e+00 -1.69151771e+00 ... -2.27164364e+00 -3.83650541e+00 -1.80606902e+00] [-3.35913658e+00 -2.32927895e+00 -1.75779426e+00 ... -9.69181955e-01 -8.69768143e-01 -2.68538547e+00]]] [[[-1.62299418e+00 -1.49398506e+00 -1.62484276e+00 ... -2.04650092e+00 -2.30110645e+00 -2.04559445e+00] [-1.55703580e+00 -1.56917691e+00 -1.98391020e+00 ... -1.66479313e+00 -2.08099055e+00 -1.97079372e+00] [-1.79492784e+00 -1.96933794e+00 -1.69392610e+00 ... -1.99987173e+00 -1.77454793e+00 -1.65107989e+00] ... [-2.07274389e+00 -2.13154745e+00 -2.02587390e+00 ... -1.86593294e+00 -2.01253629e+00 -1.37884402e+00] [-1.93689883e+00 -1.66229248e+00 -1.72889042e+00 ... -1.59372509e+00 -1.84942186e+00 -1.83387387e+00] [-1.49212849e+00 -1.97026837e+00 -2.09662390e+00 ... -1.90748370e+00 -1.84527719e+00 -2.38784456e+00]] [[-1.88710201e+00 -5.70467710e-01 -2.97515535e+00 ... -9.43041563e-01 -3.92358017e+00 -1.21395612e+00] [ 8.08595717e-01 -6.89659655e-01 -3.91206205e-01 ... -2.44041872e+00 -1.22147775e+00 -1.77469921e+00] [-9.36113536e-01 1.06964326e+00 2.64279127e+00 ... 1.25979817e+00 -1.12346470e-01 6.47398531e-01] ... [-2.22531080e+00 1.62730336e+00 -1.22128367e+00 ... -2.37442851e+00 -1.63977587e+00 -1.20589137e+00] [-1.95898771e+00 -3.25556374e+00 -1.17317748e+00 ... -7.32209086e-02 -6.32414222e-01 -1.89394283e+00] [-1.63564026e+00 -1.47281241e+00 -4.95328569e+00 ... -1.27068973e+00 8.63077760e-01 -1.74744284e+00]] [[-6.34779096e-01 -1.49214768e+00 -8.13188672e-01 ... -2.25968671e+00 -1.26233137e+00 4.87003773e-01] [-7.54389822e-01 -1.16404053e-02 -2.39653945e+00 ... -1.78863347e-01 -9.51557696e-01 -7.94689596e-01] [-4.84206229e-01 -6.15165174e-01 -1.54201198e+00 ... -1.96163380e+00 -7.22412527e-01 -4.31272030e-01] ... [-8.13053787e-01 -1.66012716e+00 -1.28976583e+00 ... -1.12594712e+00 -3.31655264e-01 -1.17464471e+00] [ 1.80695817e-01 -9.14797708e-02 -1.16635513e+00 ... -6.38260663e-01 -1.09114182e+00 -5.51755250e-01] [-3.92563969e-01 -1.70376778e+00 -9.43486154e-01 ... 1.90243214e-01 -1.35526133e+00 -1.67287040e+00]] [[-2.67532682e+00 -2.60750628e+00 -2.58389521e+00 ... -2.58552194e+00 -2.62023520e+00 -2.69165826e+00] [-2.60650682e+00 -2.66090679e+00 -2.55788708e+00 ... -2.67926788e+00 -2.57186389e+00 -2.63212752e+00] [-2.59069991e+00 -2.58910632e+00 -2.61855912e+00 ... -2.48694038e+00 -2.57387424e+00 -2.72948050e+00] ... [-2.74532580e+00 -2.53477883e+00 -2.61972499e+00 ... -2.61401343e+00 -2.61350131e+00 -2.68435621e+00] [-2.65293956e+00 -2.55576897e+00 -2.64428091e+00 ... -2.53518295e+00 -2.65996575e+00 -2.66855240e+00] [-2.63152289e+00 -2.55025816e+00 -2.52901530e+00 ... -2.67643666e+00 -2.53554630e+00 -2.67470860e+00]] [[-2.13780379e+00 1.80027282e+00 -5.70980906e-01 ... 6.51086032e-01 1.07738948e+00 -8.24706316e-01] [ 1.28821671e+00 -3.58893782e-01 -8.44016194e-01 ... -7.04722345e-01 -1.75435930e-01 -1.27075791e+00] [-1.32995129e+00 -8.98902059e-01 4.56309587e-01 ... -1.69922853e+00 -1.09460509e+00 7.05534458e-01] ... [ 1.29913092e+00 1.05650656e-01 -2.18525752e-01 ... -4.29597080e-01 4.75421607e-01 6.21905208e-01] [-1.60992920e+00 -4.47086632e-01 4.82162148e-01 ... -1.36661589e+00 -3.68197441e-01 -8.13138664e-01] [ 6.09907687e-01 -1.42900810e-01 -4.83611524e-01 ... 3.26204896e-01 -8.17754388e-01 -1.35455430e+00]] [[ 1.72980177e+00 -2.46866035e+00 -1.22317863e+00 ... -1.09384644e+00 -3.96007442e+00 -1.40342641e+00] [-1.73561382e+00 -1.63144201e-01 -2.22205424e+00 ... -2.69685864e+00 -1.38135397e+00 -2.44198346e+00] [-1.10505092e+00 -1.47846401e+00 -1.55618161e-01 ... -7.28003442e-01 -1.59774911e+00 -8.65096450e-01] ... [-3.23234987e+00 2.31156850e+00 -1.14851964e+00 ... -4.93912935e+00 3.56449276e-01 -1.19521248e+00] [-1.75373816e+00 2.75760353e-01 -1.81788743e+00 ... -8.68640006e-01 1.40866667e-01 -9.33673441e-01] [-2.29604077e+00 -2.10335636e+00 -2.02872276e+00 ... 8.57872188e-01 -1.42859197e+00 1.29324818e+00]]] [[[-2.35478544e+00 -1.71080935e+00 -1.43006575e+00 ... -1.52683473e+00 -1.63170683e+00 -1.22913885e+00] [-1.89352560e+00 -2.10119677e+00 -1.97699118e+00 ... -1.78542948e+00 -2.04516459e+00 -1.55528641e+00] [-1.67101490e+00 -1.82372594e+00 -2.23049569e+00 ... -1.89166224e+00 -1.22886729e+00 -1.91818345e+00] ... [-2.08648086e+00 -2.38219976e+00 -1.55976105e+00 ... -1.50549686e+00 -2.03976011e+00 -1.44339681e+00] [-1.32307553e+00 -2.62727380e+00 -1.48334801e+00 ... -2.63360167e+00 -1.82555866e+00 -1.93373621e+00] [-1.80929518e+00 -2.03835702e+00 -1.78123844e+00 ... -2.22276330e+00 -2.01733422e+00 -1.65602684e+00]] [[-2.43310380e+00 -1.64447546e+00 -2.49041700e+00 ... 4.64240879e-01 -1.07509887e+00 -5.30278385e-01] [-2.05058888e-01 -1.15789914e+00 -2.66629386e+00 ... -1.38941240e+00 1.95910141e-01 -1.55119166e-01] [-1.07706451e+00 2.10366058e+00 -1.33740807e+00 ... -3.82880449e-01 -2.41945958e+00 -5.95585823e-01] ... [-2.68797517e+00 -1.75126147e+00 -2.56905496e-01 ... 3.29302877e-01 -3.00527525e+00 -8.06845009e-01] [-1.68499649e+00 -2.31911945e+00 1.71085119e-01 ... -1.75661147e+00 -3.88447237e+00 -4.02848387e+00] [-9.38075185e-01 -8.61416638e-01 -1.05604982e+00 ... -9.92444873e-01 -1.27142429e+00 -1.10449731e+00]] [[-5.34775198e-01 -1.67450994e-01 -9.08196151e-01 ... -1.96653748e+00 7.21680045e-01 -6.22271895e-01] [-1.04925883e+00 -5.03082812e-01 -9.27335978e-01 ... -9.24841523e-01 -1.49040473e+00 -1.13794267e+00] [-8.75898674e-02 -7.34769881e-01 -8.32587361e-01 ... -1.25977123e+00 -1.27835190e+00 -5.55032194e-01] ... [-1.08707893e+00 -1.34038532e+00 -1.29774809e+00 ... -1.57893741e+00 -7.43157089e-01 -1.52128363e+00] [-1.69328058e+00 -1.41005504e+00 -5.78469992e-01 ... -9.07706857e-01 -6.16807163e-01 -2.31895238e-01] [-6.78008735e-01 -5.45185991e-02 -1.36822009e+00 ... -2.20641062e-01 -1.38322592e+00 3.87081891e-01]] [[-2.55653238e+00 -2.62734652e+00 -2.47129321e+00 ... -2.62563205e+00 -2.56619382e+00 -2.66289783e+00] [-2.71371603e+00 -2.65610433e+00 -2.66670370e+00 ... -2.71172571e+00 -2.59190488e+00 -2.60107398e+00] [-2.59701848e+00 -2.74796295e+00 -2.66279507e+00 ... -2.57393885e+00 -2.67640853e+00 -2.67229700e+00] ... [-2.69413161e+00 -2.65862584e+00 -2.76970458e+00 ... -2.75813079e+00 -2.58002377e+00 -2.52739906e+00] [-2.72549701e+00 -2.60902619e+00 -2.51727915e+00 ... -2.71367645e+00 -2.75185227e+00 -2.68775654e+00] [-2.52209496e+00 -2.49452400e+00 -2.54016972e+00 ... -2.70420527e+00 -2.60389924e+00 -2.54389739e+00]] [[-1.47991979e+00 -2.29629800e-01 2.42855415e-01 ... 2.01242566e+00 1.97822642e+00 -1.95629105e-01] [ 7.62476683e-01 -2.63785958e+00 2.39432645e+00 ... -1.60932684e+00 3.98441344e-01 2.17619944e+00] [ 1.01167297e+00 -4.05599833e-01 3.51389498e-01 ... 5.03317654e-01 -9.11390603e-01 -7.74258554e-01] ... [-1.20101020e-01 1.83594155e+00 -3.53670001e-01 ... -1.82133067e+00 2.49645993e-01 -8.65134835e-01] [ 6.98121965e-01 9.21398640e-01 -5.48202157e-01 ... 3.03630978e-01 -7.12952137e-01 -2.02766180e+00] [-2.16760302e+00 1.84447110e+00 -1.74942636e+00 ... 1.49368978e+00 -1.03252566e+00 -1.00858517e-01]] [[-1.40019953e+00 -2.30428171e+00 -1.35068548e+00 ... -1.46925545e+00 -2.50146317e+00 -1.87904036e+00] [-1.40336168e+00 -2.24461031e+00 -2.50217724e+00 ... -1.49430543e-01 1.90637696e+00 -4.37240899e-01] [-2.64595962e+00 -8.25723052e-01 -1.05541682e+00 ... -1.63123202e+00 -7.77642131e-01 -1.51681137e+00] ... [-7.28674531e-01 -2.45926356e+00 -2.36236826e-02 ... -1.53951168e+00 -1.85303199e+00 -1.93927705e+00] [-2.16674733e+00 -2.36976504e+00 -1.56735992e+00 ... -1.46297646e+00 -1.38842762e+00 3.51852506e-01] [-2.37855291e+00 2.67038733e-01 -3.15336394e+00 ... -5.65769196e-01 -6.75411701e-01 -3.50452840e-01]]]]; ov_res: [[[[-1.81183660e+00 -1.73919165e+00 -1.66485965e+00 ... -2.18304396e+00 -1.83031380e+00 -2.03803468e+00] [-2.07950664e+00 -1.26102471e+00 -1.67746890e+00 ... -2.27760029e+00 -1.26447248e+00 -1.78657579e+00] [-1.84333062e+00 -2.04593825e+00 -1.76271331e+00 ... -1.95110965e+00 -1.80962551e+00 -1.64726746e+00] ... [-1.74488008e+00 -1.86134255e+00 -1.77730119e+00 ... -1.53522503e+00 -2.25917530e+00 -1.55910027e+00] [-1.80595684e+00 -1.90861750e+00 -2.01366973e+00 ... -1.56616819e+00 -1.81318808e+00 -1.72524011e+00] [-1.23247385e+00 -2.09065294e+00 -1.65013444e+00 ... -1.26398921e+00 -2.05500793e+00 -1.68280029e+00]] [[-1.77438033e+00 -2.44082808e+00 6.37150705e-01 ... -1.49005079e+00 -3.87223864e+00 -2.40118289e+00] [-1.81799412e+00 -2.11542428e-01 1.48530380e-04 ... -2.84189057e+00 -2.17676330e+00 -1.77741122e+00] [-2.41641641e+00 -1.76818645e+00 -6.14315391e-01 ... -3.74813390e+00 1.03527701e+00 -1.18634593e+00] ... [-1.38883591e+00 1.02511978e+00 -1.93166196e+00 ... -1.32137883e+00 -2.50947833e+00 -2.52071762e+00] [-1.61984074e+00 -3.72797275e+00 4.20306891e-01 ... -1.70355988e+00 -3.23094225e+00 -1.07902169e+00] [-1.01685643e+00 1.54504132e+00 1.47509724e-01 ... -8.39513302e-01 -1.29825521e+00 -1.26719892e+00]] [[-3.87050986e-01 3.51495087e-01 -1.84183931e+00 ... -1.02175665e+00 -9.34687257e-01 -1.75259757e+00] [-9.08369482e-01 -4.82103914e-01 -8.44259441e-01 ... 5.62046133e-02 -1.85800231e+00 -1.44341886e+00] [-6.45134509e-01 -2.72022877e-02 -3.87148619e-01 ... -1.44206607e+00 -8.41478229e-01 -1.30373049e+00] ... [-1.01552093e+00 -1.49392343e+00 -2.21598875e-02 ... -5.25243104e-01 8.75287224e-03 -6.97273374e-01] [-2.93251753e+00 -1.47830522e+00 -1.21007788e+00 ... -8.53755832e-01 -1.35152137e+00 -2.04974294e+00] [-1.58523095e+00 -8.12824488e-01 -2.52551049e-01 ... -5.87666869e-01 -1.02129662e+00 9.07511339e-02]] [[-2.57021427e+00 -2.60784960e+00 -2.64865017e+00 ... -2.61062670e+00 -2.55270171e+00 -2.53331041e+00] [-2.58194351e+00 -2.63886499e+00 -2.71675444e+00 ... -2.54645181e+00 -2.68975759e+00 -2.68855882e+00] [-2.75221705e+00 -2.60924697e+00 -2.59981441e+00 ... -2.68779445e+00 -2.57736397e+00 -2.61816001e+00] ... [-2.58069348e+00 -2.67594910e+00 -2.67746425e+00 ... -2.53740144e+00 -2.58058310e+00 -2.62913942e+00] [-2.61336446e+00 -2.64774513e+00 -2.50074482e+00 ... -2.57632303e+00 -2.55523443e+00 -2.54700041e+00] [-2.65822744e+00 -2.59056902e+00 -2.55988526e+00 ... -2.66781354e+00 -2.58375049e+00 -2.59381437e+00]] [[-2.69902885e-01 4.00566220e-01 -9.56328809e-01 ... 1.12521738e-01 6.79307699e-01 -1.17104614e+00] [ 7.64776051e-01 -6.46411955e-01 2.73542464e-01 ... -4.87901300e-01 4.23823059e-01 -1.96322322e+00] [ 8.47269952e-01 1.54014170e+00 -2.30342340e+00 ... 8.06870341e-01 9.80014443e-01 -5.50190985e-01] ... [ 7.23099172e-01 7.31457770e-01 1.46047845e-01 ... 6.43823326e-01 7.24327803e-01 -1.80592525e+00] [ 1.69336545e+00 -6.92600369e-01 1.69396889e+00 ... 1.87250447e+00 -1.01115751e+00 -4.13830221e-01] [-1.51969016e+00 1.13675892e+00 1.27710092e+00 ... -1.21913147e+00 -2.00541615e-01 -6.69584334e-01]] [[-5.85091114e-01 -5.47325790e-01 -1.35578918e+00 ... -1.18692029e+00 -4.00379658e+00 -7.15209246e-01] [-2.79463267e+00 -3.43638587e+00 -2.08842778e+00 ... -3.03543627e-01 1.94568443e+00 -1.32478797e+00] [-1.88154685e+00 -9.27809715e-01 -1.18124712e+00 ... 1.10310659e-01 -6.87639296e-01 -8.89587641e-01] ... [-1.79897797e+00 -1.74418235e+00 -2.66147256e+00 ... -2.38226414e+00 -1.46913838e+00 -2.72536778e+00] [-3.54449201e+00 -2.95621610e+00 8.82828891e-01 ... -1.37864459e+00 -1.90067935e+00 -1.28983748e+00] [-1.75693750e+00 -1.71404839e+00 -3.13151264e+00 ... -8.95313770e-02 2.90777236e-01 -1.64683533e+00]]] [[[-2.42763281e+00 -1.90765297e+00 -1.53835654e+00 ... -1.26075411e+00 -1.46505070e+00 -2.25985122e+00] [-1.82546806e+00 -2.01815867e+00 -2.07858109e+00 ... -1.24113405e+00 -1.44886482e+00 -2.26458001e+00] [-1.74672818e+00 -1.67464173e+00 -1.95994580e+00 ... -1.72707558e+00 -1.71391022e+00 -1.55962873e+00] ... [-2.09243155e+00 -2.05233121e+00 -2.07297349e+00 ... -1.41601217e+00 -2.29477382e+00 -2.29201388e+00] [-2.15471148e+00 -1.69405890e+00 -1.55874777e+00 ... -1.62552893e+00 -1.69127166e+00 -2.24587727e+00] [-1.99791539e+00 -1.90974772e+00 -1.78546977e+00 ... -2.11001325e+00 -1.92056537e+00 -1.51682925e+00]] [[-3.72630429e+00 -2.21479654e+00 -1.93294480e-01 ... -1.67943910e-01 -2.58538336e-01 -9.15787756e-01] [-2.56104302e+00 3.64417940e-01 -2.63214803e+00 ... 7.46222138e-01 1.88835418e+00 -2.12213039e+00] [-3.01758623e+00 -1.96982741e+00 -9.76569891e-01 ... -1.11745429e+00 9.90761578e-01 -1.41453433e+00] ... [-2.36452579e+00 -1.17849417e-01 -1.81609488e+00 ... -4.14711523e+00 -1.27739161e-01 -3.68772411e+00] [-7.77706742e-01 1.44659221e+00 -1.27952516e+00 ... 1.86802614e+00 -9.25355792e-01 1.22643672e-01] [-2.09934711e+00 3.44607562e-01 -9.69093978e-01 ... -1.59569931e+00 -1.78769088e+00 -5.70665836e-01]] [[-1.53515339e+00 -1.09660876e+00 -8.37938309e-01 ... -4.83877957e-01 -9.91289794e-01 1.15835965e+00] [-1.54252505e+00 -1.70498204e+00 -3.37162614e-01 ... -1.65308118e-01 -1.90904009e+00 -6.49486065e-01] [-1.41876566e+00 2.10177168e-01 -1.17037702e+00 ... -1.30644405e+00 -1.19818175e+00 -2.14024186e+00] ... [-7.60173619e-01 -4.70748037e-01 -8.99597585e-01 ... -1.81858099e+00 -3.68195802e-01 -1.41714931e+00] [-1.13098395e+00 -7.85915647e-03 -6.07480109e-01 ... -1.41185331e+00 -1.58435476e+00 -1.47467852e+00] [ 9.90214765e-01 -2.37053967e+00 -2.30846500e+00 ... -2.99816094e-02 -5.52327633e-01 -5.99827707e-01]] [[-2.56743312e+00 -2.64217114e+00 -2.63698864e+00 ... -2.69521284e+00 -2.61274219e+00 -2.64139748e+00] [-2.65306926e+00 -2.55355358e+00 -2.71128249e+00 ... -2.70225573e+00 -2.66358423e+00 -2.58741546e+00] [-2.48892212e+00 -2.52761173e+00 -2.55717826e+00 ... -2.80055356e+00 -2.65758896e+00 -2.43100095e+00] ... [-2.59787846e+00 -2.66640210e+00 -2.62561989e+00 ... -2.61965871e+00 -2.57436419e+00 -2.59074521e+00] [-2.63183093e+00 -2.61718225e+00 -2.66646171e+00 ... -2.68691730e+00 -2.72690392e+00 -2.68021035e+00] [-2.55278206e+00 -2.46380591e+00 -2.55659533e+00 ... -2.58673668e+00 -2.72678709e+00 -2.51587367e+00]] [[-4.75470364e-01 5.74015677e-01 1.67279446e+00 ... -1.06536996e+00 3.20655972e-01 1.72488701e+00] [ 8.68479729e-01 -1.63223279e+00 -1.57102013e+00 ... 1.20728600e+00 -2.76738644e-01 -1.80655193e+00] [-1.30376685e+00 2.03191829e+00 3.53710562e-01 ... 7.21633434e-01 -4.91673112e-01 -4.60212290e-01] ... [ 5.30384362e-01 1.84719563e+00 -4.48508784e-02 ... -3.60470951e-01 3.21432590e-01 1.28311384e+00] [-9.61440980e-01 2.68751502e-01 1.11816847e+00 ... -1.39099792e-01 1.01476061e+00 1.73624587e+00] [ 4.72135037e-01 9.66494381e-01 -6.59198403e-01 ... -4.50812042e-01 1.98920286e+00 1.41766536e+00]] [[ 8.67284894e-01 -2.23318696e+00 -2.88416004e+00 ... -1.43659472e-01 -9.69217896e-01 -3.26247954e+00] [-2.19852638e+00 -1.59182096e+00 -1.74295497e+00 ... -1.53893197e+00 8.27028930e-01 3.73065680e-01] [-1.85414696e+00 -1.91924965e+00 -1.50092375e+00 ... -2.05949521e+00 1.09460938e+00 -5.64500451e-01] ... [-3.83781219e+00 2.23131791e-01 -2.74848056e+00 ... -1.72174215e+00 -1.35973120e+00 -7.13219702e-01] [-8.93190324e-01 -2.27882004e+00 -1.05816126e+00 ... -4.23219728e+00 -3.32999420e+00 -4.14128602e-01] [-2.00720239e+00 -1.43582332e+00 -1.43975759e+00 ... -1.43222630e+00 -2.40409589e+00 -3.55668831e+00]]] [[[-1.83452046e+00 -1.63264763e+00 -1.88472295e+00 ... -2.01246595e+00 -1.58876729e+00 -1.47475672e+00] [-1.47348154e+00 -1.13969100e+00 -1.31543350e+00 ... -1.63550925e+00 -1.93737411e+00 -1.55729508e+00] [-1.92518067e+00 -1.91547835e+00 -1.75226021e+00 ... -2.17465973e+00 -1.71788454e+00 -1.52981627e+00] ... [-1.77451551e+00 -1.87578070e+00 -1.69996130e+00 ... -1.91927814e+00 -1.88670003e+00 -2.23064375e+00] [-1.45240414e+00 -1.92295158e+00 -1.62994266e+00 ... -1.76421809e+00 -2.01740098e+00 -1.92767894e+00] [-1.83334851e+00 -1.77721035e+00 -2.23374057e+00 ... -1.59762263e+00 -2.23348045e+00 -2.02389550e+00]] [[-8.99858057e-01 8.93254578e-02 -3.51666585e-02 ... -9.05028462e-01 -8.88175964e-01 -1.25492966e+00] [-1.42165065e+00 -3.03435636e+00 1.57089278e-01 ... -1.54590356e+00 6.12007558e-01 1.25699505e-01] [-1.42417920e+00 -3.20445609e+00 5.51835954e-01 ... -7.36284316e-01 -3.21868682e+00 -6.80525243e-01] ... [-1.72526896e+00 -2.50998807e+00 2.90224969e-01 ... -3.91392529e-01 -9.20096636e-01 1.84475696e+00] [-4.19041246e-01 -2.06989143e-02 -2.38638186e+00 ... -6.31884456e-01 -1.22126997e+00 1.43075836e+00] [ 9.04379845e-01 9.05795097e-01 -3.32947016e+00 ... -3.50529528e+00 -4.38332891e+00 -3.59919405e+00]] [[-1.54545999e+00 2.84038670e-02 -4.78758365e-01 ... -1.00318575e+00 -7.80073643e-01 -1.36114776e+00] [-1.22534013e+00 -9.63402510e-01 -5.76954305e-01 ... -8.66216421e-01 -7.99917340e-01 -2.45786503e-01] [-1.70073366e+00 -3.05866569e-01 -1.20493984e+00 ... -1.96798146e+00 -1.15418673e+00 -4.82885778e-01] ... [-1.18908048e+00 -5.68025291e-01 -2.30428672e+00 ... -9.03898358e-01 -4.81284857e-01 -1.25810122e+00] [-4.22671884e-01 -3.71060073e-01 -5.46282411e-01 ... -2.66722202e+00 -3.00081164e-01 -5.63335001e-01] [-6.18406296e-01 -6.39632344e-01 -1.07246339e+00 ... 1.14436392e-02 -5.87017894e-01 -1.92663574e+00]] [[-2.62102509e+00 -2.59845638e+00 -2.62026048e+00 ... -2.66750646e+00 -2.55685949e+00 -2.50767422e+00] [-2.64798093e+00 -2.51004052e+00 -2.65395308e+00 ... -2.62754416e+00 -2.57642245e+00 -2.63749814e+00] [-2.77602863e+00 -2.54895759e+00 -2.47077012e+00 ... -2.62220573e+00 -2.65946531e+00 -2.58049703e+00] ... [-2.54209781e+00 -2.50066018e+00 -2.68725657e+00 ... -2.64284825e+00 -2.54762673e+00 -2.65180087e+00] [-2.59762955e+00 -2.60706496e+00 -2.60746312e+00 ... -2.55468035e+00 -2.65801811e+00 -2.62259579e+00] [-2.73112321e+00 -2.59422612e+00 -2.43728018e+00 ... -2.55203009e+00 -2.52974200e+00 -2.54965854e+00]] [[-1.69694710e+00 8.20408762e-01 2.19050884e+00 ... -4.65544641e-01 7.14291856e-02 1.27527475e+00] [ 1.46963108e+00 -8.81810427e-01 1.50918394e-01 ... 4.03726786e-01 6.53610528e-01 9.28641915e-01] [-3.83257151e-01 -4.77759033e-01 2.38870919e-01 ... 7.08938777e-01 5.50321698e-01 1.49522245e+00] ... [-1.06093752e+00 -5.68999767e-01 9.90823209e-01 ... 2.12393075e-01 1.14993918e+00 2.23593563e-01] [ 6.97710693e-01 -1.79266036e-01 1.07750642e+00 ... -6.37567282e-01 -2.04947925e+00 3.26415777e-01] [-2.09849453e+00 -8.55706632e-01 1.86733806e+00 ... 1.00051872e-01 1.98803127e-01 3.48009735e-01]] [[-2.76903868e+00 -1.66178334e+00 -1.42591119e+00 ... -1.41827357e+00 -2.56304789e+00 -1.28604305e+00] [-3.04907036e+00 -2.01419592e+00 -3.39400625e+00 ... -2.01031804e+00 -9.48739871e-02 -1.37918317e+00] [-2.61662126e+00 -1.05988109e+00 -2.50198770e+00 ... -1.44813764e+00 -1.76894939e+00 -2.99836683e+00] ... [-1.58841574e+00 -1.15727283e-01 -1.62509537e+00 ... 6.00823462e-01 -6.19284630e-01 -2.88438535e+00] [-1.31970859e+00 3.21112834e-02 -1.32500672e+00 ... -1.41033483e+00 -2.42438626e+00 -2.91266012e+00] [-4.41028214e+00 -2.26823807e+00 -1.41150129e+00 ... -2.27369189e+00 -2.35060525e+00 -2.11625814e+00]]] ... [[[-2.26563096e+00 -1.72129846e+00 -1.57066715e+00 ... -1.96050358e+00 -1.49065757e+00 -1.40623951e+00] [-1.94347095e+00 -2.20259714e+00 -2.37284827e+00 ... -2.02899742e+00 -1.76178670e+00 -1.54449916e+00] [-1.83458245e+00 -2.14212084e+00 -1.65349615e+00 ... -2.16237712e+00 -1.65909827e+00 -1.58756077e+00] ... [-1.78070092e+00 -1.45930338e+00 -1.45475709e+00 ... -1.50750899e+00 -2.01173520e+00 -1.44597149e+00] [-1.75800002e+00 -1.95313072e+00 -1.58423150e+00 ... -1.79818797e+00 -1.65342629e+00 -1.66107440e+00] [-1.78027678e+00 -2.13483667e+00 -2.24624157e+00 ... -2.02513146e+00 -1.46190345e+00 -1.34225798e+00]] [[-1.80283332e+00 -3.47503752e-01 -6.59584761e-01 ... -2.00407720e+00 -2.00176668e+00 -7.17069447e-01] [-1.12852335e-01 7.56855071e-01 -6.42369986e-01 ... -2.95935154e+00 2.42838427e-01 -1.62486768e+00] [-1.02501345e+00 1.85252666e+00 -1.39600837e+00 ... -1.27840102e+00 -2.08660102e+00 6.66633785e-01] ... [-1.35427940e+00 -3.38108563e+00 -1.33724511e+00 ... -2.62359619e+00 -2.55469942e+00 -8.87419760e-01] [-2.17449117e+00 -1.96571815e+00 1.38593352e+00 ... -2.46912837e+00 -3.27157211e+00 -1.39299226e+00] [-2.09913421e+00 -2.13204646e+00 3.07015687e-01 ... -8.69314134e-01 -4.06727982e+00 -1.46638381e+00]] [[-1.06196754e-01 -8.85143578e-01 -3.91606599e-01 ... -1.13283351e-01 -1.62422681e+00 -1.59183335e+00] [ 1.15422022e+00 -1.44297004e+00 3.06671500e-01 ... -1.29051611e-01 7.40051210e-01 -2.25666523e-01] [-1.02326715e+00 -5.25473058e-01 -4.37931031e-01 ... 1.70655206e-01 -9.99520957e-01 2.79917449e-01] ... [-2.00893736e+00 -2.11531782e+00 -1.63895333e+00 ... -7.94208407e-01 -1.43592310e+00 -7.46540844e-01] [-1.30920136e+00 -1.41856873e+00 -9.21350718e-02 ... -6.97013199e-01 2.20299363e-02 -2.05493593e+00] [-9.25321817e-01 -2.05729342e+00 -2.05414668e-01 ... -3.89230065e-02 -1.59463060e+00 -1.00322568e+00]] [[-2.57780170e+00 -2.70147228e+00 -2.61231947e+00 ... -2.83641100e+00 -2.69807720e+00 -2.69948983e+00] [-2.68605757e+00 -2.53286028e+00 -2.63646078e+00 ... -2.72779369e+00 -2.50755191e+00 -2.56357145e+00] [-2.66909122e+00 -2.65087247e+00 -2.72757363e+00 ... -2.49791145e+00 -2.62203407e+00 -2.64131045e+00] ... [-2.53720593e+00 -2.48687744e+00 -2.67569280e+00 ... -2.59154391e+00 -2.78637528e+00 -2.58655572e+00] [-2.63375282e+00 -2.66609287e+00 -2.52466416e+00 ... -2.62403369e+00 -2.51803327e+00 -2.66976285e+00] [-2.47904205e+00 -2.63113904e+00 -2.61847830e+00 ... -2.55141020e+00 -2.71959567e+00 -2.53255677e+00]] [[ 2.50806475e+00 -1.24690616e+00 1.49381077e+00 ... 2.11511135e+00 -2.89115071e-01 -6.31120577e-02] [ 9.35281277e-01 1.50855505e+00 6.84815705e-01 ... -1.24225414e+00 7.96695769e-01 1.69629943e+00] [-7.41875827e-01 4.48476583e-01 6.83330297e-02 ... 2.67007256e+00 -8.95447314e-01 -5.56235254e-01] ... [ 1.45027065e+00 1.00720954e+00 -1.64193451e+00 ... -9.44491148e-01 2.03331995e+00 -1.07274222e+00] [-6.70983791e-01 -9.91900802e-01 7.01686218e-02 ... -1.98188198e+00 -2.71853566e-01 -1.36426449e+00] [-1.06238890e+00 -4.10209894e-01 -1.39359939e+00 ... 1.28506780e+00 -1.09876418e+00 -8.63859773e-01]] [[-1.78930938e-01 -3.70090079e+00 -2.64885640e+00 ... -2.39680004e+00 -9.10264254e-01 -2.71757793e+00] [ 3.09046477e-01 -1.90194845e+00 -2.65585661e+00 ... -2.12058401e+00 -2.45527363e+00 -7.93795705e-01] [-2.90195918e+00 -2.96231675e+00 2.70047635e-01 ... -1.33429861e+00 -3.35661799e-01 -2.37979650e+00] ... [-1.90490320e-01 -1.21445453e+00 1.36403131e+00 ... -1.50321043e+00 -1.49260402e+00 -2.22961473e+00] [-4.72201347e-01 -2.43720746e+00 -1.69151771e+00 ... -2.27164364e+00 -3.83650541e+00 -1.80606902e+00] [-3.35913658e+00 -2.32927895e+00 -1.75779426e+00 ... -9.69181895e-01 -8.69768023e-01 -2.68538547e+00]]] [[[-1.62299418e+00 -1.49398506e+00 -1.62484276e+00 ... -2.04650092e+00 -2.30110645e+00 -2.04559445e+00] [-1.55703592e+00 -1.56917691e+00 -1.98391020e+00 ... -1.66479313e+00 -2.08099079e+00 -1.97079384e+00] [-1.79492784e+00 -1.96933794e+00 -1.69392610e+00 ... -1.99987173e+00 -1.77454805e+00 -1.65107989e+00] ... [-2.07274389e+00 -2.13154769e+00 -2.02587390e+00 ... -1.86593294e+00 -2.01253629e+00 -1.37884402e+00] [-1.93689895e+00 -1.66229248e+00 -1.72889042e+00 ... -1.59372509e+00 -1.84942186e+00 -1.83387387e+00] [-1.49212861e+00 -1.97026837e+00 -2.09662390e+00 ... -1.90748370e+00 -1.84527731e+00 -2.38784456e+00]] [[-1.88710201e+00 -5.70467651e-01 -2.97515535e+00 ... -9.43041563e-01 -3.92358017e+00 -1.21395612e+00] [ 8.08595598e-01 -6.89659655e-01 -3.91206264e-01 ... -2.44041872e+00 -1.22147775e+00 -1.77469921e+00] [-9.36113536e-01 1.06964326e+00 2.64279127e+00 ... 1.25979817e+00 -1.12346515e-01 6.47398591e-01] ... [-2.22531080e+00 1.62730336e+00 -1.22128367e+00 ... -2.37442851e+00 -1.63977587e+00 -1.20589125e+00] [-1.95898771e+00 -3.25556397e+00 -1.17317736e+00 ... -7.32208863e-02 -6.32414222e-01 -1.89394295e+00] [-1.63564026e+00 -1.47281241e+00 -4.95328569e+00 ... -1.27068973e+00 8.63077760e-01 -1.74744284e+00]] [[-6.34779096e-01 -1.49214780e+00 -8.13188672e-01 ... -2.25968671e+00 -1.26233137e+00 4.87003773e-01] [-7.54389822e-01 -1.16403643e-02 -2.39653945e+00 ... -1.78863376e-01 -9.51557696e-01 -7.94689596e-01] [-4.84206259e-01 -6.15165174e-01 -1.54201198e+00 ... -1.96163380e+00 -7.22412527e-01 -4.31272000e-01] ... [-8.13053787e-01 -1.66012728e+00 -1.28976583e+00 ... -1.12594712e+00 -3.31655264e-01 -1.17464471e+00] [ 1.80695847e-01 -9.14797708e-02 -1.16635513e+00 ... -6.38260603e-01 -1.09114182e+00 -5.51755250e-01] [-3.92563969e-01 -1.70376790e+00 -9.43486154e-01 ... 1.90243229e-01 -1.35526133e+00 -1.67287040e+00]] [[-2.67532682e+00 -2.60750628e+00 -2.58389521e+00 ... -2.58552194e+00 -2.62023520e+00 -2.69165826e+00] [-2.60650682e+00 -2.66090679e+00 -2.55788732e+00 ... -2.67926812e+00 -2.57186389e+00 -2.63212776e+00] [-2.59069991e+00 -2.58910632e+00 -2.61855912e+00 ... -2.48694038e+00 -2.57387447e+00 -2.72948074e+00] ... [-2.74532580e+00 -2.53477907e+00 -2.61972499e+00 ... -2.61401367e+00 -2.61350155e+00 -2.68435621e+00] [-2.65293956e+00 -2.55576897e+00 -2.64428091e+00 ... -2.53518295e+00 -2.65996575e+00 -2.66855240e+00] [-2.63152289e+00 -2.55025816e+00 -2.52901530e+00 ... -2.67643666e+00 -2.53554630e+00 -2.67470860e+00]] [[-2.13780403e+00 1.80027306e+00 -5.70980906e-01 ... 6.51086152e-01 1.07738960e+00 -8.24706376e-01] [ 1.28821683e+00 -3.58893812e-01 -8.44016254e-01 ... -7.04722404e-01 -1.75435960e-01 -1.27075803e+00] [-1.32995141e+00 -8.98902118e-01 4.56309617e-01 ... -1.69922864e+00 -1.09460521e+00 7.05534577e-01] ... [ 1.29913104e+00 1.05650671e-01 -2.18525767e-01 ... -4.29597110e-01 4.75421697e-01 6.21905267e-01] [-1.60992944e+00 -4.47086662e-01 4.82162207e-01 ... -1.36661601e+00 -3.68197471e-01 -8.13138723e-01] [ 6.09907746e-01 -1.42900839e-01 -4.83611554e-01 ... 3.26204926e-01 -8.17754447e-01 -1.35455441e+00]] [[ 1.72980225e+00 -2.46866035e+00 -1.22317863e+00 ... -1.09384644e+00 -3.96007490e+00 -1.40342641e+00] [-1.73561394e+00 -1.63143978e-01 -2.22205448e+00 ... -2.69685888e+00 -1.38135397e+00 -2.44198370e+00] [-1.10505092e+00 -1.47846401e+00 -1.55617997e-01 ... -7.28003323e-01 -1.59774911e+00 -8.65096390e-01] ... [-3.23235011e+00 2.31156898e+00 -1.14851964e+00 ... -4.93913031e+00 3.56449515e-01 -1.19521248e+00] [-1.75373828e+00 2.75760621e-01 -1.81788743e+00 ... -8.68639946e-01 1.40866816e-01 -9.33673441e-01] [-2.29604101e+00 -2.10335660e+00 -2.02872300e+00 ... 8.57872546e-01 -1.42859197e+00 1.29324853e+00]]] [[[-2.35478544e+00 -1.71080947e+00 -1.43006575e+00 ... -1.52683473e+00 -1.63170683e+00 -1.22913885e+00] [-1.89352572e+00 -2.10119677e+00 -1.97699130e+00 ... -1.78542948e+00 -2.04516482e+00 -1.55528641e+00] [-1.67101502e+00 -1.82372594e+00 -2.23049569e+00 ... -1.89166236e+00 -1.22886741e+00 -1.91818345e+00] ... [-2.08648109e+00 -2.38219976e+00 -1.55976117e+00 ... -1.50549698e+00 -2.03976011e+00 -1.44339693e+00] [-1.32307565e+00 -2.62727380e+00 -1.48334813e+00 ... -2.63360167e+00 -1.82555866e+00 -1.93373621e+00] [-1.80929518e+00 -2.03835702e+00 -1.78123856e+00 ... -2.22276354e+00 -2.01733422e+00 -1.65602684e+00]] [[-2.43310380e+00 -1.64447546e+00 -2.49041700e+00 ... 4.64240938e-01 -1.07509887e+00 -5.30278385e-01] [-2.05058903e-01 -1.15789914e+00 -2.66629410e+00 ... -1.38941240e+00 1.95910126e-01 -1.55119076e-01] [-1.07706451e+00 2.10366106e+00 -1.33740819e+00 ... -3.82880419e-01 -2.41945982e+00 -5.95585823e-01] ... [-2.68797541e+00 -1.75126147e+00 -2.56905466e-01 ... 3.29302937e-01 -3.00527525e+00 -8.06845009e-01] [-1.68499660e+00 -2.31911945e+00 1.71085119e-01 ... -1.75661147e+00 -3.88447261e+00 -4.02848387e+00] [-9.38075185e-01 -8.61416638e-01 -1.05604982e+00 ... -9.92444873e-01 -1.27142429e+00 -1.10449731e+00]] [[-5.34775198e-01 -1.67451039e-01 -9.08196151e-01 ... -1.96653748e+00 7.21680105e-01 -6.22271955e-01] [-1.04925895e+00 -5.03082871e-01 -9.27336037e-01 ... -9.24841583e-01 -1.49040473e+00 -1.13794267e+00] [-8.75899270e-02 -7.34769940e-01 -8.32587361e-01 ... -1.25977123e+00 -1.27835190e+00 -5.55032253e-01] ... [-1.08707893e+00 -1.34038532e+00 -1.29774809e+00 ... -1.57893741e+00 -7.43157148e-01 -1.52128363e+00] [-1.69328058e+00 -1.41005504e+00 -5.78470051e-01 ... -9.07706916e-01 -6.16807222e-01 -2.31895328e-01] [-6.78008795e-01 -5.45186587e-02 -1.36822009e+00 ... -2.20641106e-01 -1.38322592e+00 3.87081802e-01]] [[-2.55653238e+00 -2.62734628e+00 -2.47129321e+00 ... -2.62563205e+00 -2.56619382e+00 -2.66289783e+00] [-2.71371603e+00 -2.65610433e+00 -2.66670370e+00 ... -2.71172571e+00 -2.59190488e+00 -2.60107398e+00] [-2.59701848e+00 -2.74796295e+00 -2.66279507e+00 ... -2.57393861e+00 -2.67640853e+00 -2.67229676e+00] ... [-2.69413137e+00 -2.65862584e+00 -2.76970434e+00 ... -2.75813055e+00 -2.58002377e+00 -2.52739906e+00] [-2.72549701e+00 -2.60902619e+00 -2.51727891e+00 ... -2.71367645e+00 -2.75185227e+00 -2.68775654e+00] [-2.52209496e+00 -2.49452400e+00 -2.54016948e+00 ... -2.70420527e+00 -2.60389924e+00 -2.54389715e+00]] [[-1.47991991e+00 -2.29629815e-01 2.42855385e-01 ... 2.01242566e+00 1.97822630e+00 -1.95629150e-01] [ 7.62476683e-01 -2.63785982e+00 2.39432645e+00 ... -1.60932696e+00 3.98441344e-01 2.17619920e+00] [ 1.01167297e+00 -4.05599833e-01 3.51389468e-01 ... 5.03317654e-01 -9.11390662e-01 -7.74258554e-01] ... [-1.20101057e-01 1.83594155e+00 -3.53670031e-01 ... -1.82133079e+00 2.49645963e-01 -8.65134895e-01] [ 6.98121905e-01 9.21398640e-01 -5.48202157e-01 ... 3.03630948e-01 -7.12952197e-01 -2.02766204e+00] [-2.16760325e+00 1.84447098e+00 -1.74942636e+00 ... 1.49368978e+00 -1.03252566e+00 -1.00858547e-01]] [[-1.40019953e+00 -2.30428195e+00 -1.35068548e+00 ... -1.46925545e+00 -2.50146317e+00 -1.87904036e+00] [-1.40336168e+00 -2.24461055e+00 -2.50217724e+00 ... -1.49430394e-01 1.90637708e+00 -4.37240869e-01] [-2.64595962e+00 -8.25723052e-01 -1.05541682e+00 ... -1.63123214e+00 -7.77642071e-01 -1.51681149e+00] ... [-7.28674531e-01 -2.45926380e+00 -2.36235932e-02 ... -1.53951168e+00 -1.85303199e+00 -1.93927705e+00] [-2.16674733e+00 -2.36976528e+00 -1.56735992e+00 ... -1.46297657e+00 -1.38842762e+00 3.51852655e-01] [-2.37855291e+00 2.67038882e-01 -3.15336394e+00 ... -5.65769136e-01 -6.75411761e-01 -3.50452691e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 4} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False, 'eps': 0.0001} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- 4 graph(%self : __torch__.test_group_norm.___torch_mangle_4612.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.0001]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.1920 0.3677 -0.3291 -2.1850 -0.0535 -0.0591 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) ma): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4849154 (Squeeze_4849153[0]:i64[], Constant_4849108[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4849154': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4852094 (Squeeze_4852093[0]:i64[], Constant_4852048[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4852094': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4854892 (Squeeze_4854891[0]:i64[], Constant_4854846[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4854892': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to castfw_re: [[[[-1.54732406e-01 1.18683010e-01 -3.54699269e-02 ... -9.76460055e-02 -3.79891768e-02 7.12204278e-02] [-1.86173469e-01 4.27651033e-03 -8.51177350e-02 ... 2.35768393e-01 -2.52746284e-01 3.66555639e-02] [ 3.49706024e-01 -1.19059578e-01 2.56829001e-02 ... -5.71580157e-02 -1.26094416e-01 1.22257039e-01] ... [ 8.72188509e-02 -2.25696117e-01 3.20912153e-02 ... -5.06302118e-01 -1.41211331e-01 -3.52757663e-01] [ 6.24137074e-02 3.51862423e-02 1.49357021e-01 ... 2.79045515e-02 2.20150918e-01 2.66417325e-01] [-4.63175774e-01 -1.43904379e-02 8.21223408e-02 ... -2.97689408e-01 1.06188260e-01 1.11205034e-01]] [[-9.32783544e-01 8.85355845e-02 -4.12276238e-02 ... 4.66426730e-01 -5.22584543e-02 -3.08059677e-02] [ 8.72911438e-02 -6.01670086e-01 7.58520067e-01 ... 3.89706679e-02 1.80124506e-01 -4.28308249e-01] [-3.69628519e-01 1.74301714e-02 2.61616051e-01 ... 4.74174261e-01 -3.92525285e-01 5.14706850e-01] ... [-3.25648725e-01 5.51193655e-01 7.98298955e-01 ... -5.49315989e-01 1.04659605e+00 2.74623930e-01] [-2.68544465e-01 -7.81665087e-01 1.89064667e-01 ... 2.42271170e-01 8.04277539e-01 -1.29221827e-01] [ 8.72818053e-01 7.65731752e-01 -1.22721404e-01 ... -2.98530221e-01 2.76500732e-01 -9.82522443e-02]] [[-1.15975654e-02 -3.42242986e-01 -2.00076222e-01 ... 1.01342008e-01 1.61280096e-01 5.43207705e-01] [ 1.72038436e-01 -4.57075596e-01 -1.16339125e-01 ... -3.02513570e-01 -1.66484639e-02 7.30567947e-02] [-1.61260903e-01 7.11836740e-02 -4.01363909e-01 ... 1.71478286e-01 -2.36403011e-02 -1.55233890e-01] ... [ 2.05135718e-01 -3.60159092e-02 9.16478112e-02 ... 4.80728209e-01 8.91109928e-02 2.77680278e-01] [ 1.46009466e-02 1.10917814e-01 -2.73912191e-01 ... 1.56878665e-01 -4.55918908e-01 -1.40757680e-01] [-2.45332494e-01 -4.88630563e-01 -1.01155050e-01 ... 3.68956625e-02 4.84012604e-01 1.02640212e-01]] [[-4.17005224e-03 4.11984968e+00 -3.37615037e+00 ... -1.15174735e+00 -2.10525560e+00 1.21923614e+00] [ 2.40587831e+00 -1.44964659e+00 -1.13172150e+00 ... 1.65750480e+00 6.53262258e-01 6.23799324e-01] [ 6.76094055e-01 -4.48309928e-01 2.32732511e+00 ... -5.32614589e-01 -4.80615520e+00 5.59543085e+00] ... [ 2.29034662e+00 5.97925901e-01 -1.34917915e+00 ... 1.54589641e+00 5.73575020e-01 4.57490206e-01] [ 2.86493993e+00 3.68074983e-01 -5.37985973e-02 ... -2.22219968e+00 -2.07457468e-01 2.16189766e+00] [-6.99628115e-01 2.42559925e-01 3.42323470e+00 ... 4.34927797e+00 -1.62618971e+00 5.75223207e-01]] [[ 1.15839988e-01 -1.36955045e-02 2.44133491e-02 ... 8.89922678e-02 9.45146084e-02 2.86397114e-02] [-4.88293841e-02 4.57911976e-02 7.62418360e-02 ... -1.03492431e-01 -1.09311091e-02 -1.18854158e-01] [ 4.96155247e-02 -5.41111862e-04 -1.27161508e-02 ... -8.42326432e-02 -8.92914087e-02 4.46031950e-02] ... [ 8.43519941e-02 6.33650832e-03 -2.89802663e-02 ... 3.39596644e-02 1.67565923e-02 6.90670460e-02] [ 3.72662097e-02 7.99474865e-02 -6.02685800e-03 ... 3.12929638e-02 -4.24484685e-02 -1.32856920e-01] [-7.43127093e-02 1.17729316e-02 -1.75285563e-02 ... -1.91814024e-02 -1.80297717e-02 -8.24498460e-02]] [[-2.62909364e-02 -4.22513932e-02 -1.08566694e-02 ... -6.34760112e-02 3.04160379e-02 1.52064478e-02] [-6.07825778e-02 1.95533894e-02 -2.93779019e-02 ... -3.64203677e-02 -1.60922986e-02 -8.51063803e-02] [-9.94230900e-03 -3.41820642e-02 -8.50423984e-03 ... -9.93375108e-02 7.48335477e-03 -6.18439242e-02] ... [ 3.66704091e-02 -3.98885310e-02 1.12265795e-01 ... 3.61644477e-02 4.24341895e-02 -6.77447245e-02] [-4.93455529e-02 6.08616360e-02 -2.21978370e-02 ... 1.05248086e-01 1.61455944e-01 -1.64539218e-02] [ 1.20240115e-01 -5.50851822e-02 -5.82066551e-02 ... -5.71187176e-02 4.51262258e-02 1.59834754e-02]]] [[[ 1.32995829e-01 2.95338005e-01 1.32375345e-01 ... 4.97132875e-02 -9.57855210e-02 4.03749943e-01] [ 1.84046388e-01 9.32412371e-02 -6.76719397e-02 ... -4.93210368e-02 1.02740832e-01 6.09183013e-02] [ 1.04307294e-01 -1.52385399e-01 -1.85231790e-01 ... 3.18896532e-01 -5.53589799e-02 -2.93329246e-02] ... [ 1.47289976e-01 1.96970478e-01 5.47146723e-02 ... 6.48794509e-03 -1.84946358e-01 4.33205068e-01] [ 6.57026097e-02 3.12139332e-01 -7.87181482e-02 ... 1.17283568e-01 -2.66843915e-01 2.13081717e-01] [-1.11544713e-01 -6.93894401e-02 -2.01927736e-01 ... -1.11345142e-01 -1.10476434e-01 -2.67757893e-01]] [[ 8.38483647e-02 -1.72397584e-01 -5.88153362e-01 ... 3.89648862e-02 1.85209870e-01 -1.15741968e-01] [ 1.37129739e-01 1.89070433e-01 8.55601206e-02 ... 7.58385360e-02 -1.06909156e-01 -5.32951713e-01] [ 4.31704432e-01 7.18780234e-02 -1.91665724e-01 ... -6.40198663e-02 9.10985619e-02 1.42913699e-01] ... [ 4.87843633e-01 -6.53716087e-01 1.97545767e-01 ... -5.38658679e-01 -4.13652599e-01 4.44479853e-01] [-2.37102926e-01 -6.27673626e-01 -4.72825289e-01 ... 4.60594147e-01 1.38988078e-01 7.17876434e-01] [ 3.54516149e-01 2.58796662e-01 1.16377138e-01 ... 3.91681552e-01 -5.37944198e-01 -7.22908452e-02]] [[ 3.06002468e-01 6.39993232e-04 4.07272696e-01 ... -6.79688931e-01 -1.75628781e-01 -2.72381846e-02] [-3.92754614e-01 1.94149524e-01 -1.37723207e-01 ... 1.80787787e-01 1.61603808e-01 3.87805462e-01] [ 3.68486375e-01 3.73972625e-01 4.41650778e-01 ... 2.23057985e-01 -5.25577903e-01 -5.71305454e-01] ... [ 5.74156761e-01 1.40361249e-01 -8.15859139e-02 ... -5.26539348e-02 -4.21793669e-01 1.11748345e-01] [ 2.25998312e-01 -2.05010772e-01 -4.40247715e-01 ... -7.83400595e-01 -3.14616024e-01 2.26681978e-01] [-4.36555833e-01 -4.03981775e-01 -3.20783615e-01 ... 1.11005090e-01 2.10205749e-01 -3.81488986e-02]] [[ 1.84347296e+00 1.53104317e+00 -2.51346254e+00 ... -1.74746144e+00 -3.29837084e+00 -3.33152676e+00] [-8.38754117e-01 -2.40365243e+00 -2.29806352e+00 ... 1.07015216e+00 6.04349494e-01 -3.55308604e+00] [-1.26505566e+00 -4.53408957e+00 -2.69278264e+00 ... -1.44253564e+00 7.65305996e-01 -1.30638969e+00] ... [-8.63819420e-01 1.16854322e+00 1.53896797e+00 ... -1.83850920e+00 -8.67712438e-01 -1.06065524e+00] [ 5.44256508e-01 -2.20285267e-01 6.75656617e-01 ... 2.03739658e-01 -5.97660542e-01 2.46761990e+00] [-2.23124194e+00 1.99769318e+00 2.05065489e+00 ... 2.45820808e+00 -2.69907427e+00 1.64212012e+00]] [[ 9.17274412e-03 1.83660965e-02 7.43963802e-03 ... -6.85550570e-02 -2.36916523e-02 1.16312671e-02] [-4.69675697e-02 -1.39183197e-02 -4.44021299e-02 ... 8.90633091e-02 -1.30871227e-02 6.40882477e-02] [-8.35438967e-02 -3.95087749e-02 6.57103732e-02 ... 3.17318439e-02 4.48942408e-02 -3.25029083e-02] ... [-3.87020186e-02 4.33699265e-02 2.76240166e-02 ... 1.09888194e-02 1.41951069e-01 -5.52268773e-02] [ 5.83029576e-02 4.27234322e-02 -2.35663280e-02 ... 3.04754097e-02 -8.84615537e-03 2.77636433e-03] [-2.04283791e-03 -4.39464301e-02 -8.28681588e-02 ... 1.64730884e-02 -5.67845963e-02 -7.33504519e-02]] [[ 9.48335510e-04 -1.21411666e-01 5.20898774e-02 ... 2.90826075e-02 -2.90398393e-02 -2.53154873e-03] [-6.50368631e-02 6.19771592e-02 3.87612917e-02 ... 8.56015086e-03 6.63813204e-03 -2.57566925e-02] [ 1.55300736e-01 -3.99412848e-02 6.16736226e-02 ... -1.16424143e-01 6.99467510e-02 4.57093120e-02] ... [-4.99573946e-02 8.60962570e-02 3.80324833e-02 ... 6.16708845e-02 7.07068220e-02 4.94375974e-02] [ 4.30738591e-02 -5.71289659e-02 -3.02964170e-02 ... -1.24024712e-01 -5.67375449e-03 1.90249077e-04] [-1.16510242e-01 -1.79779399e-02 -3.08181383e-02 ... 1.68936625e-02 4.34585437e-02 6.47909045e-02]]] [[[-4.44423258e-02 8.72654915e-02 3.46612602e-01 ... -9.22644418e-03 1.51405081e-01 1.88793257e-01] [-5.73822260e-02 1.05441026e-01 -1.07971907e-01 ... 1.16357408e-01 2.35230133e-01 -9.82413534e-03] [ 1.96818247e-01 -5.78509234e-02 1.46290109e-01 ... -4.00205702e-02 1.31044239e-01 2.74333149e-01] ... [-1.09507203e-01 4.97678697e-01 1.18622817e-02 ... 3.13226283e-01 -3.39536756e-01 1.21962689e-01] [-5.55222854e-02 1.26589999e-01 -1.28853947e-01 ... 2.27005959e-01 6.22953288e-02 1.04469419e-01] [-9.26471129e-02 4.29838002e-02 -1.39807686e-01 ... -4.15106505e-01 2.53912270e-01 1.01852670e-01]] [[-2.57522821e-01 -3.89689505e-01 3.79579097e-01 ... -2.04343542e-01 -2.28992164e-01 2.70274729e-01] [ 7.10841939e-02 -1.72305673e-01 -2.27032527e-01 ... 5.97619772e-01 -2.22451426e-02 -4.96797860e-02] [-6.66362107e-01 -1.94601551e-01 1.63223237e-01 ... -1.99642897e-01 -2.71667451e-01 -7.02051893e-02] ... [-3.19029599e-01 6.58582032e-01 2.75284853e-02 ... -4.12296742e-01 -2.63960928e-01 3.75045657e-01] [-2.15560839e-01 -3.59863341e-01 4.80443463e-02 ... -1.65676270e-02 2.49509111e-01 -1.41337529e-01] [ 5.39138578e-02 9.29088116e-01 -1.34689882e-01 ... -5.75024597e-02 2.82247424e-01 5.82276434e-02]] [[ 6.28141820e-01 -1.06333140e-02 1.08622879e-01 ... 4.44503367e-01 -3.92982304e-01 -4.10715342e-01] [-2.82340199e-01 4.89219546e-01 -3.61424059e-01 ... 3.18036050e-01 1.99957997e-01 2.02218428e-01] [ 1.95038825e-01 -1.59977004e-01 3.05125386e-01 ... -5.60181320e-01 2.05383375e-02 3.21757197e-02] ... [ 3.70230258e-01 -4.58132565e-01 -3.24139714e-01 ... 5.16900063e-01 -3.85165475e-02 -2.29699090e-01] [-4.81381297e-01 -1.92594245e-01 -2.68756878e-02 ... 2.39859462e-01 2.23331317e-01 8.70604962e-02] [-2.71388531e-01 -7.91965723e-02 4.56057429e-01 ... 1.04345374e-01 -2.91829646e-01 -5.14850244e-02]] [[-1.25436723e+00 1.84412301e+00 -1.75981835e-01 ... -1.97138834e+00 2.80737829e+00 -4.86056209e-01] [-1.90444756e+00 -2.56003290e-01 2.90226984e+00 ... 6.84933484e-01 2.03697705e+00 1.79680717e+00] [-3.47188413e-02 -5.38375139e-01 4.07305431e+00 ... -1.93181551e+00 3.17445326e+00 1.13166261e+00] ... [ 8.91549766e-01 9.41108525e-01 6.18887365e-01 ... 3.88796270e-01 1.96533728e+00 5.79891242e-02] [-2.95177698e-01 2.38301706e+00 -1.00136399e+00 ... 8.92427564e-02 -2.46681166e+00 -3.40718842e+00] [-3.97191495e-01 1.49040294e+00 1.13999140e+00 ... -3.43674421e+00 -1.53907084e+00 3.80922842e+00]] [[-1.02385841e-01 -2.70854291e-02 3.69923078e-02 ... 1.89755503e-02 2.81607769e-02 -8.63331854e-02] [ 1.89514291e-02 9.74248163e-03 9.93110426e-03 ... -4.64545153e-02 -3.33270840e-02 6.02408797e-02] [-3.06103285e-02 -5.64927608e-02 1.06023170e-01 ... -3.70420739e-02 -1.17387539e-02 -8.23172480e-02] ... [ 8.86247400e-03 5.85194752e-02 4.28119674e-02 ... 1.62771530e-02 -1.94755904e-02 2.75290646e-02] [-2.35798545e-02 3.34553272e-02 -4.49844003e-02 ... -2.17933208e-02 -4.21579704e-02 -7.47526530e-03] [ 8.04377869e-02 1.15952320e-01 5.83327375e-02 ... 4.38446067e-02 -1.45228719e-03 2.18323302e-02]] [[ 5.07886782e-02 2.29495596e-02 -3.66069190e-03 ... -1.26251236e-01 1.73975211e-02 9.95223299e-02] [ 2.65605859e-02 9.31646228e-02 1.00209806e-02 ... 3.44312713e-02 1.31992269e-02 1.62513882e-01] [ 7.21753091e-02 9.17811766e-02 -8.66324082e-02 ... 8.85330141e-02 3.68941552e-03 -1.47228315e-02] ... [ 2.95818299e-02 -1.15194255e-02 -3.65537852e-02 ... 4.30710707e-03 -1.17187258e-02 9.73896012e-02] [-5.84462248e-02 6.89646825e-02 3.49696055e-02 ... -5.45362271e-02 1.75452363e-02 -6.72531575e-02] [ 2.81188507e-02 5.53528070e-02 8.08302760e-02 ... -2.59576365e-02 1.16281070e-01 -4.57965955e-02]]] ... [[[-4.31127734e-02 2.11382076e-01 6.68129176e-02 ... -6.23356439e-02 -1.44351766e-01 3.33173186e-01] [ 2.31399447e-01 -4.08512317e-02 -2.01148074e-02 ... 1.14125401e-01 2.92323023e-01 -1.50003582e-01] [-3.74906510e-01 2.55514473e-01 -1.78090692e-01 ... 6.14902079e-01 3.06582004e-01 -7.68522993e-02] ... [ 1.77319109e-01 2.00468183e-01 -1.82198629e-01 ... 1.89071923e-01 4.16468643e-03 -7.69413114e-02] [ 7.80999511e-02 3.05756480e-01 1.72622744e-02 ... 1.01451784e-01 3.15249443e-01 -2.32624382e-01] [-6.65317029e-02 4.98357058e-01 -2.35268861e-01 ... -4.38163504e-02 -5.45242168e-02 -1.19851120e-02]] [[ 1.11934736e-01 -6.02334477e-02 8.29348028e-01 ... 2.61048257e-01 -7.35878706e-01 -2.20988438e-01] [ 6.44982219e-01 -1.69921398e-01 -2.64503479e-01 ... -2.13679671e-01 4.98374045e-01 -5.85328937e-01] [-7.92892352e-02 1.60108153e-02 -2.90441215e-01 ... 1.18281908e-01 1.04314573e-01 3.98875266e-01] ... [ 2.99999803e-01 -4.43365574e-01 -1.32037684e-01 ... -3.43411744e-01 -4.06440675e-01 3.11315387e-01] [ 1.21849529e-01 -2.23138146e-02 -1.02749377e-01 ... -1.91863254e-01 -4.19340789e-01 -8.17391098e-01] [-1.95598960e-01 -6.98178768e-01 -1.59198016e-01 ... 2.10209593e-01 2.92261422e-01 2.28176475e-01]] [[-1.13068573e-01 -1.63563430e-01 -4.14365567e-02 ... -4.93605286e-01 -3.99027318e-02 -1.75068855e-01] [-1.50014564e-01 9.23041403e-02 3.92746553e-02 ... 2.43556146e-02 7.91135788e-01 -8.98808166e-02] [ 4.28060740e-01 -2.28880122e-02 -5.50676212e-02 ... 1.26688685e-02 2.10724026e-02 2.70171314e-01] ... [ 1.85552627e-01 -1.75395042e-01 -1.10154882e-01 ... 3.58207136e-01 6.93239048e-02 3.28887731e-01] [ 6.30742252e-01 -2.72717737e-02 -1.19708166e-01 ... -1.01111427e-01 1.88686237e-01 -1.89247251e-01] [-2.81493664e-01 1.39614105e-01 6.88349754e-02 ... 2.99078971e-01 -5.18669486e-01 -4.37500536e-01]] [[ 1.41303420e+00 4.21340853e-01 5.46136796e-01 ... -7.20640838e-01 -2.20388103e+00 3.00813007e+00] [ 2.02215123e+00 3.25935900e-01 1.83034146e+00 ... -2.01907182e+00 3.62718582e+00 -1.04888570e+00] [ 1.65644133e+00 -8.40985358e-01 -2.21116185e+00 ... 9.06271279e-01 4.49074835e-01 9.04175714e-02] ... [ 9.26453412e-01 2.48057812e-01 5.84805548e-01 ... -6.52804041e+00 -1.66064525e+00 2.78698206e-01] [ 8.62572372e-01 1.58041811e+00 -1.33231938e+00 ... 3.61729097e+00 -5.67258716e-01 -1.35002720e+00] [-1.63429391e+00 -1.16956151e+00 -1.09570348e+00 ... -1.94988954e+00 -5.35842121e-01 -3.02678764e-01]] [[ 1.11832041e-02 5.37444279e-02 -1.72495916e-02 ... 4.79410887e-02 -2.47962400e-02 4.09114435e-02] [ 7.71255866e-02 -8.10962170e-02 -3.93946655e-02 ... 2.98522525e-02 -3.67093123e-02 -1.59183273e-03] [ 1.30235637e-02 8.90477598e-02 7.36121982e-02 ... 4.96962629e-02 -7.88671896e-02 1.54199833e-02] ... [ 1.90757345e-02 -5.49890287e-02 -4.46056165e-02 ... -2.71002054e-02 4.09293827e-03 -1.28335170e-02] [ 5.56494622e-03 -4.41653095e-03 2.05273870e-02 ... 2.89115030e-02 -4.73490916e-03 5.28064109e-02] [-1.87527929e-02 -1.87923945e-02 -8.63536610e-04 ... 5.22970855e-02 6.14763871e-02 6.16397709e-02]] [[ 2.95439400e-02 -2.19778139e-02 -9.08996724e-03 ... 4.23970371e-02 -1.30104244e-01 5.99771598e-03] [ 7.15321749e-02 -3.21615450e-02 -7.42503479e-02 ... 4.32949066e-02 1.22077785e-01 6.51972601e-04] [ 5.41754588e-02 -1.38687929e-02 -3.63229476e-02 ... -1.05368957e-01 -7.64668733e-02 -8.69987160e-02] ... [-7.49258325e-03 -5.79063818e-02 -1.04135741e-02 ... 1.02328425e-02 1.07275173e-01 -2.07086373e-02] [-5.20554595e-02 -1.05647952e-03 3.14387828e-02 ... 1.30750779e-02 -4.76276688e-02 -9.25755352e-02] [ 3.27298068e-03 1.28875330e-01 4.02276814e-02 ... -1.12867923e-02 -1.68137881e-03 -7.58300826e-04]]] [[[ 1.88848972e-01 -3.50236535e-01 4.23726857e-01 ... -2.45151386e-01 2.21869707e-01 -2.83168573e-02] [ 1.72488406e-01 -1.18510984e-01 -4.21918556e-02 ... -1.29976213e-01 1.46501303e-01 9.66124460e-02] [ 7.23550394e-02 -1.02395937e-02 -1.16572708e-01 ... 7.37894922e-02 2.71104932e-01 -5.70209771e-02] ... [-9.15315282e-03 -1.37604520e-01 2.96470989e-02 ... 1.11942045e-01 1.38140082e-01 -2.15788454e-01] [-1.05711214e-01 5.58762439e-02 1.04893267e-01 ... -1.79591015e-01 -1.45575970e-01 3.81004822e-04] [-8.29810426e-02 2.19384372e-01 1.59796610e-01 ... -3.77642624e-02 -1.61595136e-01 -1.85037740e-02]] [[ 3.57431285e-02 -3.12101096e-01 2.35195562e-01 ... 5.94523311e-01 8.65498036e-02 3.83292049e-01] [-5.20842195e-01 3.43080074e-01 3.27776931e-02 ... -5.19673169e-01 -9.21762735e-02 6.23896897e-01] [-1.92608684e-01 8.55214000e-02 2.83451945e-01 ... 1.69797480e-01 2.67176256e-02 3.15964252e-01] ... [ 2.83723652e-01 2.66037494e-01 -1.96457371e-01 ... 7.55925104e-02 4.21193033e-01 -8.05198133e-01] [-2.10163882e-03 3.32830191e-01 -2.98321515e-01 ... 5.64255655e-01 -3.22956026e-01 1.43578306e-01] [-6.02390766e-01 3.38001102e-01 4.37666744e-01 ... -2.18323141e-01 8.22029352e-01 2.08608240e-01]] [[-8.48741829e-01 3.53594095e-01 1.98972180e-01 ... -9.33992192e-02 -4.68056649e-01 1.06521046e+00] [ 5.86990476e-01 3.75922859e-01 1.63112730e-01 ... 8.93150046e-02 1.05098791e-01 -2.03707308e-01] [ 1.06004007e-01 -3.86454388e-02 -1.61579698e-01 ... -2.44647563e-02 -3.00681710e-01 2.88344145e-01] ... [ 3.56590122e-01 1.22296438e-01 8.67450703e-03 ... 5.07267937e-02 -3.07094723e-01 6.58311069e-01] [ 1.03886336e-01 -1.92378923e-01 -5.89241013e-02 ... -9.38691348e-02 -2.40731657e-01 -3.32332551e-01] [ 9.49532241e-02 3.92918319e-01 -3.36597621e-01 ... -4.63329703e-01 -5.07038198e-02 2.86483854e-01]] [[ 2.13533783e+00 -2.61894727e+00 -3.93467736e+00 ... -9.66202617e-01 -1.72625375e+00 -2.78012776e+00] [-4.03181732e-01 6.83336675e-01 1.60709284e-02 ... -3.01764297e+00 1.21258032e+00 -4.95438337e+00] [-2.02573210e-01 5.48190176e-01 -2.43173289e+00 ... -1.53672493e+00 3.09667706e-01 -1.64411366e+00] ... [-7.73914695e-01 2.05301642e-01 1.19754583e-01 ... 5.90409577e-01 -8.02696049e-01 -8.27741981e-01] [ 2.54603720e+00 2.53639221e+00 4.85224867e+00 ... -4.09996319e+00 3.98391575e-01 -1.39770940e-01] [ 2.07194552e-01 -1.72531828e-01 -1.43968236e+00 ... 2.38814801e-01 3.30088687e+00 -7.56842613e-01]] [[ 1.04311630e-01 -2.08847667e-03 1.59660615e-02 ... -8.08356330e-02 -9.07242373e-02 -1.10546546e-02] [ 4.02983911e-02 -2.42293086e-02 -4.71839197e-02 ... -1.27229596e-05 -6.85051829e-02 2.05256976e-02] [ 5.03199510e-02 4.58061509e-03 -8.16645473e-02 ... -1.07144089e-02 3.23666818e-02 4.31907997e-02] ... [-3.51567031e-03 1.17652155e-01 -1.82564848e-03 ... -1.80512741e-02 4.30491827e-02 4.27611824e-03] [-6.58786595e-02 1.39450699e-01 1.16475038e-02 ... 1.06271068e-02 -8.09719265e-02 1.22545268e-02] [-6.19304776e-02 5.87654412e-02 -4.24143597e-02 ... -8.19891039e-03 -2.65641175e-02 5.17752282e-02]] [[-1.12175882e-01 1.87039934e-02 1.34642813e-02 ... -6.57193735e-02 -3.70920412e-02 2.49528848e-02] [ 6.02079788e-03 -8.79652575e-02 -4.13381914e-03 ... 9.78827197e-03 -8.97011813e-03 -8.36733133e-02] [ 1.53799728e-02 -2.53034662e-02 -2.89007137e-03 ... 2.44167037e-02 9.17655230e-02 -1.03400648e-02] ... [ 2.98820785e-03 -5.26498631e-02 3.16068083e-02 ... -7.84137771e-02 -1.08774267e-02 7.65199810e-02] [ 8.25523213e-02 6.78423122e-02 3.85845266e-02 ... 1.36080861e-01 -8.22592005e-02 -2.18340056e-03] [-2.46274676e-02 -8.01737010e-02 -5.58295213e-02 ... 4.09402251e-02 1.05367646e-01 -1.81996614e-01]]] [[[ 1.06880128e-01 -3.37759167e-01 5.63001096e-01 ... 8.96985829e-02 4.93617877e-02 -8.78302287e-03] [ 1.78933099e-01 -5.22449389e-02 -3.46527696e-02 ... -1.88817978e-01 2.12315828e-01 -3.24013144e-01] [ 3.29948872e-01 3.41653287e-01 -3.29480432e-02 ... 3.36303450e-02 3.84384036e-01 -8.71944800e-02] ... [ 2.60378987e-01 -1.26333013e-01 -5.64595520e-01 ... 7.24782869e-02 1.24039754e-01 2.30068132e-01] [ 2.11363614e-01 -8.20356980e-02 2.59081811e-01 ... 3.10041662e-02 -1.83630154e-01 -4.27738279e-02] [ 1.27706990e-01 2.13183641e-01 -1.65504888e-01 ... 2.29810495e-02 2.66069323e-01 1.32985160e-01]] [[-7.45267153e-01 -1.83616608e-01 1.94178507e-01 ... -1.27270386e-01 3.72040987e-01 1.42502144e-01] [-2.46515051e-01 4.58028883e-01 5.70638299e-01 ... 4.62970912e-01 3.60496372e-01 9.21675935e-02] [ 1.59835652e-01 2.99337566e-01 -6.49683118e-01 ... -5.93252063e-01 -1.86376512e-01 8.87181461e-02] ... [ 6.39577925e-01 -5.77790618e-01 6.87804759e-01 ... 2.42145732e-01 3.45049053e-01 -4.00650799e-01] [ 2.07816333e-01 -2.10264072e-01 5.56665100e-02 ... 1.36646833e-02 -4.54953223e-01 3.40961456e-01] [ 4.66219299e-02 -1.25539020e-01 4.56951652e-03 ... -5.24158895e-01 -4.43896264e-01 -8.16200793e-01]] [[-7.23810554e-01 1.99598223e-01 3.45959127e-01 ... 1.03743054e-01 -3.03120822e-01 2.53426284e-01] [-1.40229091e-01 -3.15386653e-01 5.64280339e-02 ... -4.37334925e-02 7.87311256e-01 -3.41380388e-01] [ 3.86880398e-01 2.70148516e-01 -2.40152419e-01 ... -5.00190198e-01 -1.42004445e-01 2.26666301e-01] ... [ 4.61898923e-01 -8.87213051e-02 4.63637739e-01 ... 9.78728458e-02 5.46690047e-01 -1.62980005e-01] [ 2.43263528e-01 -4.78071839e-01 2.45730132e-01 ... -3.19299370e-01 -7.20189214e-02 1.87487781e-01] [ 6.92465454e-02 -1.79360688e-01 -2.33181745e-01 ... -8.13281357e-01 7.90494028e-03 1.40010258e-02]] [[ 5.12994677e-02 2.27715302e+00 -6.26399279e-01 ... -1.69082725e+00 2.26350769e-01 1.43880749e+00] [-3.04793143e+00 1.64282250e+00 5.79191804e-01 ... 7.12747812e-01 1.15148938e+00 9.47983205e-01] [ 1.44904935e+00 -1.16854481e-01 -9.92148742e-02 ... 2.19613481e+00 -1.40003824e+00 -3.41207290e+00] ... [ 1.83982539e+00 1.05527890e+00 1.78245294e+00 ... -2.08180571e+00 -1.65054929e+00 -1.79703641e+00] [-1.89437187e+00 3.62595057e+00 2.49778843e+00 ... 2.15931877e-01 -2.29578900e+00 1.21577322e+00] [-9.60423887e-01 -1.07615674e+00 -1.51109975e-02 ... -1.15445197e+00 3.78999019e+00 1.00845560e-01]] [[-1.26784574e-02 -3.06767710e-02 -6.23297468e-02 ... 2.17567515e-02 -1.59713309e-02 -8.14027116e-02] [ 5.42759150e-02 4.27673757e-02 -1.11358846e-02 ... -3.87452617e-02 1.99097861e-02 -2.96636336e-02] [ 5.39356694e-02 5.92226274e-02 -3.39856297e-02 ... -1.75597367e-03 -3.10728420e-03 -3.20697278e-02] ... [ 4.71993946e-02 -5.82639575e-02 -9.67082083e-02 ... -7.71738961e-03 4.18356508e-02 -2.48818211e-02] [ 3.41556454e-03 7.02890307e-02 2.94653210e-03 ... -5.03262877e-02 2.06545014e-02 8.40041786e-03] [ 2.47792099e-02 5.99619374e-02 -8.70190375e-03 ... 3.38877328e-02 -6.86307997e-02 2.76262201e-02]] [[-5.95804863e-02 -2.33694334e-02 3.41881327e-02 ... 1.02174237e-01 2.80269515e-02 -8.06520060e-02] [-5.07772490e-02 2.58682035e-02 -1.66804641e-01 ... -5.63336685e-02 -4.01983745e-02 -4.74275649e-02] [-1.32372424e-01 7.44867399e-02 -4.86330576e-02 ... 2.53093021e-04 -2.85185110e-02 -8.50098487e-03] ... [ 1.67719033e-02 -2.95215342e-02 -5.94393909e-02 ... -8.38267151e-04 -5.15339002e-02 -8.61445144e-02] [-9.38554853e-02 -2.01134682e-02 -7.39451945e-02 ... -2.89095864e-02 3.11736558e-02 8.22315663e-02] [-3.21594700e-02 1.45739941e-02 1.18771568e-03 ... -6.34362102e-02 -6.49212301e-02 -6.30584136e-02]]]]; ov_res: [[[[-1.54732406e-01 1.18683018e-01 -3.54699232e-02 ... -9.76459980e-02 -3.79891805e-02 7.12204278e-02] [-1.86173484e-01 4.27651079e-03 -8.51177350e-02 ... 2.35768378e-01 -2.52746314e-01 3.66555639e-02] [ 3.49705994e-01 -1.19059578e-01 2.56829020e-02 ... -5.71580119e-02 -1.26094416e-01 1.22257046e-01] ... [ 8.72188583e-02 -2.25696146e-01 3.20912153e-02 ... -5.06302118e-01 -1.41211331e-01 -3.52757663e-01] [ 6.24137111e-02 3.51862423e-02 1.49357036e-01 ... 2.79045552e-02 2.20150918e-01 2.66417325e-01] [-4.63175774e-01 -1.43904360e-02 8.21223482e-02 ... -2.97689408e-01 1.06188260e-01 1.11205034e-01]] [[-9.32783484e-01 8.85355845e-02 -4.12276201e-02 ... 4.66426730e-01 -5.22584505e-02 -3.08059659e-02] [ 8.72911513e-02 -6.01670086e-01 7.58520007e-01 ... 3.89706679e-02 1.80124506e-01 -4.28308249e-01] [-3.69628519e-01 1.74301732e-02 2.61616021e-01 ... 4.74174231e-01 -3.92525256e-01 5.14706790e-01] ... [-3.25648695e-01 5.51193595e-01 7.98298895e-01 ... -5.49315989e-01 1.04659605e+00 2.74623930e-01] [-2.68544465e-01 -7.81665027e-01 1.89064667e-01 ... 2.42271170e-01 8.04277480e-01 -1.29221812e-01] [ 8.72818112e-01 7.65731633e-01 -1.22721389e-01 ... -2.98530221e-01 2.76500702e-01 -9.82522443e-02]] [[-1.15975663e-02 -3.42242986e-01 -2.00076222e-01 ... 1.01342000e-01 1.61280081e-01 5.43207705e-01] [ 1.72038421e-01 -4.57075566e-01 -1.16339125e-01 ... -3.02513570e-01 -1.66484639e-02 7.30567947e-02] [-1.61260903e-01 7.11836740e-02 -4.01363879e-01 ... 1.71478271e-01 -2.36403029e-02 -1.55233890e-01] ... [ 2.05135718e-01 -3.60159092e-02 9.16478112e-02 ... 4.80728239e-01 8.91109854e-02 2.77680278e-01] [ 1.46009438e-02 1.10917807e-01 -2.73912162e-01 ... 1.56878665e-01 -4.55918878e-01 -1.40757680e-01] [-2.45332494e-01 -4.88630533e-01 -1.01155058e-01 ... 3.68956625e-02 4.84012604e-01 1.02640197e-01]] [[-4.17001918e-03 4.11984968e+00 -3.37614989e+00 ... -1.15174711e+00 -2.10525537e+00 1.21923602e+00] [ 2.40587807e+00 -1.44964647e+00 -1.13172138e+00 ... 1.65750468e+00 6.53262198e-01 6.23799324e-01] [ 6.76094055e-01 -4.48309869e-01 2.32732511e+00 ... -5.32614529e-01 -4.80615473e+00 5.59543037e+00] ... [ 2.29034662e+00 5.97925842e-01 -1.34917903e+00 ... 1.54589641e+00 5.73574960e-01 4.57490206e-01] [ 2.86493993e+00 3.68074983e-01 -5.37985601e-02 ... -2.22219944e+00 -2.07457423e-01 2.16189742e+00] [-6.99628055e-01 2.42559940e-01 3.42323470e+00 ... 4.34927797e+00 -1.62618959e+00 5.75223148e-01]] [[ 1.15839973e-01 -1.36955017e-02 2.44133454e-02 ... 8.89922604e-02 9.45146009e-02 2.86397059e-02] [-4.88293804e-02 4.57911901e-02 7.62418360e-02 ... -1.03492409e-01 -1.09311063e-02 -1.18854135e-01] [ 4.96155173e-02 -5.41111047e-04 -1.27161480e-02 ... -8.42326283e-02 -8.92913938e-02 4.46031913e-02] ... [ 8.43519792e-02 6.33650832e-03 -2.89802607e-02 ... 3.39596607e-02 1.67565905e-02 6.90670386e-02] [ 3.72662060e-02 7.99474791e-02 -6.02685614e-03 ... 3.12929563e-02 -4.24484611e-02 -1.32856891e-01] [-7.43126944e-02 1.17729316e-02 -1.75285526e-02 ... -1.91814005e-02 -1.80297680e-02 -8.24498311e-02]] [[-2.62909327e-02 -4.22513895e-02 -1.08566685e-02 ... -6.34759963e-02 3.04160360e-02 1.52064469e-02] [-6.07825704e-02 1.95533875e-02 -2.93778982e-02 ... -3.64203677e-02 -1.60922967e-02 -8.51063654e-02] [-9.94230621e-03 -3.41820605e-02 -8.50423798e-03 ... -9.93374959e-02 7.48335477e-03 -6.18439130e-02] ... [ 3.66704017e-02 -3.98885272e-02 1.12265788e-01 ... 3.61644439e-02 4.24341857e-02 -6.77447096e-02] [-4.93455492e-02 6.08616322e-02 -2.21978333e-02 ... 1.05248086e-01 1.61455929e-01 -1.64539181e-02] [ 1.20240107e-01 -5.50851747e-02 -5.82066476e-02 ... -5.71187101e-02 4.51262221e-02 1.59834754e-02]]] [[[ 1.32995829e-01 2.95338005e-01 1.32375330e-01 ... 4.97132875e-02 -9.57855210e-02 4.03749943e-01] [ 1.84046388e-01 9.32412297e-02 -6.76719397e-02 ... -4.93210442e-02 1.02740817e-01 6.09182976e-02] [ 1.04307286e-01 -1.52385384e-01 -1.85231775e-01 ... 3.18896532e-01 -5.53589799e-02 -2.93329246e-02] ... [ 1.47289976e-01 1.96970463e-01 5.47146648e-02 ... 6.48794277e-03 -1.84946358e-01 4.33205068e-01] [ 6.57026023e-02 3.12139302e-01 -7.87181482e-02 ... 1.17283568e-01 -2.66843915e-01 2.13081732e-01] [-1.11544706e-01 -6.93894401e-02 -2.01927736e-01 ... -1.11345127e-01 -1.10476427e-01 -2.67757893e-01]] [[ 8.38483572e-02 -1.72397599e-01 -5.88153362e-01 ... 3.89648825e-02 1.85209855e-01 -1.15741976e-01] [ 1.37129739e-01 1.89070418e-01 8.55601206e-02 ... 7.58385286e-02 -1.06909163e-01 -5.32951713e-01] [ 4.31704402e-01 7.18780160e-02 -1.91665709e-01 ... -6.40198737e-02 9.10985544e-02 1.42913684e-01] ... [ 4.87843573e-01 -6.53716087e-01 1.97545767e-01 ... -5.38658619e-01 -4.13652629e-01 4.44479823e-01] [-2.37102911e-01 -6.27673626e-01 -4.72825319e-01 ... 4.60594118e-01 1.38988063e-01 7.17876434e-01] [ 3.54516119e-01 2.58796662e-01 1.16377130e-01 ... 3.91681582e-01 -5.37944257e-01 -7.22908527e-02]] [[ 3.06002468e-01 6.39996550e-04 4.07272696e-01 ... -6.79688931e-01 -1.75628766e-01 -2.72381809e-02] [-3.92754614e-01 1.94149524e-01 -1.37723193e-01 ... 1.80787772e-01 1.61603794e-01 3.87805492e-01] [ 3.68486375e-01 3.73972625e-01 4.41650778e-01 ... 2.23057970e-01 -5.25577843e-01 -5.71305454e-01] ... [ 5.74156761e-01 1.40361249e-01 -8.15859064e-02 ... -5.26539236e-02 -4.21793640e-01 1.11748338e-01] [ 2.25998282e-01 -2.05010772e-01 -4.40247685e-01 ... -7.83400536e-01 -3.14616024e-01 2.26681963e-01] [-4.36555803e-01 -4.03981745e-01 -3.20783615e-01 ... 1.11005090e-01 2.10205734e-01 -3.81488949e-02]] [[ 1.84347296e+00 1.53104317e+00 -2.51346254e+00 ... -1.74746144e+00 -3.29837108e+00 -3.33152676e+00] [-8.38754117e-01 -2.40365243e+00 -2.29806352e+00 ... 1.07015228e+00 6.04349554e-01 -3.55308604e+00] [-1.26505578e+00 -4.53408957e+00 -2.69278288e+00 ... -1.44253564e+00 7.65306056e-01 -1.30638981e+00] ... [-8.63819420e-01 1.16854310e+00 1.53896797e+00 ... -1.83850932e+00 -8.67712438e-01 -1.06065512e+00] [ 5.44256568e-01 -2.20285237e-01 6.75656676e-01 ... 2.03739688e-01 -5.97660542e-01 2.46761990e+00] [-2.23124170e+00 1.99769318e+00 2.05065489e+00 ... 2.45820808e+00 -2.69907451e+00 1.64212012e+00]] [[ 9.17274505e-03 1.83660965e-02 7.43963849e-03 ... -6.85550570e-02 -2.36916523e-02 1.16312681e-02] [-4.69675697e-02 -1.39183188e-02 -4.44021262e-02 ... 8.90633091e-02 -1.30871227e-02 6.40882477e-02] [-8.35438892e-02 -3.95087749e-02 6.57103732e-02 ... 3.17318439e-02 4.48942408e-02 -3.25029083e-02] ... [-3.87020186e-02 4.33699265e-02 2.76240166e-02 ... 1.09888194e-02 1.41951069e-01 -5.52268773e-02] [ 5.83029576e-02 4.27234285e-02 -2.35663261e-02 ... 3.04754060e-02 -8.84615444e-03 2.77636503e-03] [-2.04283698e-03 -4.39464301e-02 -8.28681588e-02 ... 1.64730884e-02 -5.67845926e-02 -7.33504519e-02]] [[ 9.48336499e-04 -1.21411659e-01 5.20898774e-02 ... 2.90826093e-02 -2.90398393e-02 -2.53154757e-03] [-6.50368631e-02 6.19771630e-02 3.87612917e-02 ... 8.56015086e-03 6.63813343e-03 -2.57566907e-02] [ 1.55300751e-01 -3.99412848e-02 6.16736226e-02 ... -1.16424143e-01 6.99467584e-02 4.57093120e-02] ... [-4.99573946e-02 8.60962570e-02 3.80324833e-02 ... 6.16708882e-02 7.07068220e-02 4.94375974e-02] [ 4.30738591e-02 -5.71289696e-02 -3.02964170e-02 ... -1.24024704e-01 -5.67375310e-03 1.90250081e-04] [-1.16510242e-01 -1.79779399e-02 -3.08181383e-02 ... 1.68936644e-02 4.34585437e-02 6.47909045e-02]]] [[[-4.44423221e-02 8.72654915e-02 3.46612602e-01 ... -9.22644045e-03 1.51405081e-01 1.88793257e-01] [-5.73822260e-02 1.05441034e-01 -1.07971907e-01 ... 1.16357416e-01 2.35230133e-01 -9.82413068e-03] [ 1.96818262e-01 -5.78509234e-02 1.46290123e-01 ... -4.00205664e-02 1.31044254e-01 2.74333149e-01] ... [-1.09507203e-01 4.97678727e-01 1.18622864e-02 ... 3.13226283e-01 -3.39536786e-01 1.21962696e-01] [-5.55222817e-02 1.26590014e-01 -1.28853962e-01 ... 2.27005973e-01 6.22953288e-02 1.04469426e-01] [-9.26471204e-02 4.29838076e-02 -1.39807686e-01 ... -4.15106505e-01 2.53912300e-01 1.01852678e-01]] [[-2.57522792e-01 -3.89689505e-01 3.79579127e-01 ... -2.04343528e-01 -2.28992164e-01 2.70274729e-01] [ 7.10842088e-02 -1.72305688e-01 -2.27032527e-01 ... 5.97619772e-01 -2.22451352e-02 -4.96797822e-02] [-6.66362107e-01 -1.94601566e-01 1.63223237e-01 ... -1.99642897e-01 -2.71667451e-01 -7.02051818e-02] ... [-3.19029599e-01 6.58582032e-01 2.75284927e-02 ... -4.12296742e-01 -2.63960928e-01 3.75045687e-01] [-2.15560839e-01 -3.59863341e-01 4.80443537e-02 ... -1.65676195e-02 2.49509111e-01 -1.41337529e-01] [ 5.39138690e-02 9.29088175e-01 -1.34689882e-01 ... -5.75024560e-02 2.82247424e-01 5.82276508e-02]] [[ 6.28141820e-01 -1.06333215e-02 1.08622879e-01 ... 4.44503367e-01 -3.92982334e-01 -4.10715342e-01] [-2.82340199e-01 4.89219546e-01 -3.61424059e-01 ... 3.18036050e-01 1.99957997e-01 2.02218443e-01] [ 1.95038825e-01 -1.59977019e-01 3.05125386e-01 ... -5.60181320e-01 2.05383301e-02 3.21757123e-02] ... [ 3.70230287e-01 -4.58132565e-01 -3.24139714e-01 ... 5.16900063e-01 -3.85165587e-02 -2.29699105e-01] [-4.81381297e-01 -1.92594260e-01 -2.68756971e-02 ... 2.39859477e-01 2.23331317e-01 8.70604962e-02] [-2.71388531e-01 -7.91965798e-02 4.56057429e-01 ... 1.04345381e-01 -2.91829675e-01 -5.14850356e-02]] [[-1.25436711e+00 1.84412313e+00 -1.75981864e-01 ... -1.97138834e+00 2.80737829e+00 -4.86056238e-01] [-1.90444767e+00 -2.56003320e-01 2.90226984e+00 ... 6.84933543e-01 2.03697729e+00 1.79680729e+00] [-3.47188488e-02 -5.38375139e-01 4.07305431e+00 ... -1.93181551e+00 3.17445326e+00 1.13166273e+00] ... [ 8.91549826e-01 9.41108644e-01 6.18887424e-01 ... 3.88796270e-01 1.96533728e+00 5.79891168e-02] [-2.95177728e-01 2.38301706e+00 -1.00136399e+00 ... 8.92427638e-02 -2.46681166e+00 -3.40718865e+00] [-3.97191525e-01 1.49040294e+00 1.13999152e+00 ... -3.43674469e+00 -1.53907096e+00 3.80922842e+00]] [[-1.02385841e-01 -2.70854272e-02 3.69923078e-02 ... 1.89755503e-02 2.81607769e-02 -8.63331854e-02] [ 1.89514291e-02 9.74248163e-03 9.93110426e-03 ... -4.64545153e-02 -3.33270840e-02 6.02408797e-02] [-3.06103285e-02 -5.64927608e-02 1.06023170e-01 ... -3.70420739e-02 -1.17387539e-02 -8.23172554e-02] ... [ 8.86247400e-03 5.85194677e-02 4.28119674e-02 ... 1.62771530e-02 -1.94755904e-02 2.75290627e-02] [-2.35798527e-02 3.34553309e-02 -4.49844003e-02 ... -2.17933189e-02 -4.21579666e-02 -7.47526530e-03] [ 8.04377794e-02 1.15952328e-01 5.83327301e-02 ... 4.38446067e-02 -1.45228743e-03 2.18323302e-02]] [[ 5.07886782e-02 2.29495578e-02 -3.66069213e-03 ... -1.26251236e-01 1.73975211e-02 9.95223299e-02] [ 2.65605841e-02 9.31646228e-02 1.00209806e-02 ... 3.44312713e-02 1.31992269e-02 1.62513882e-01] [ 7.21753016e-02 9.17811766e-02 -8.66324082e-02 ... 8.85330066e-02 3.68941552e-03 -1.47228306e-02] ... [ 2.95818299e-02 -1.15194255e-02 -3.65537815e-02 ... 4.30710660e-03 -1.17187258e-02 9.73895937e-02] [-5.84462285e-02 6.89646751e-02 3.49696055e-02 ... -5.45362271e-02 1.75452381e-02 -6.72531575e-02] [ 2.81188507e-02 5.53528070e-02 8.08302686e-02 ... -2.59576365e-02 1.16281077e-01 -4.57965955e-02]]] ... [[[-4.31127697e-02 2.11382076e-01 6.68129101e-02 ... -6.23356365e-02 -1.44351751e-01 3.33173156e-01] [ 2.31399447e-01 -4.08512242e-02 -2.01148055e-02 ... 1.14125378e-01 2.92322963e-01 -1.50003567e-01] [-3.74906451e-01 2.55514473e-01 -1.78090677e-01 ... 6.14902020e-01 3.06581974e-01 -7.68522918e-02] ... [ 1.77319080e-01 2.00468153e-01 -1.82198599e-01 ... 1.89071909e-01 4.16468596e-03 -7.69413039e-02] [ 7.80999437e-02 3.05756450e-01 1.72622725e-02 ... 1.01451769e-01 3.15249413e-01 -2.32624352e-01] [-6.65316954e-02 4.98356998e-01 -2.35268831e-01 ... -4.38163467e-02 -5.45242131e-02 -1.19851111e-02]] [[ 1.11934729e-01 -6.02334440e-02 8.29348028e-01 ... 2.61048257e-01 -7.35878587e-01 -2.20988423e-01] [ 6.44982219e-01 -1.69921383e-01 -2.64503479e-01 ... -2.13679656e-01 4.98374045e-01 -5.85328877e-01] [-7.92892352e-02 1.60108134e-02 -2.90441185e-01 ... 1.18281886e-01 1.04314551e-01 3.98875237e-01] ... [ 2.99999774e-01 -4.43365514e-01 -1.32037684e-01 ... -3.43411714e-01 -4.06440645e-01 3.11315358e-01] [ 1.21849515e-01 -2.23138127e-02 -1.02749370e-01 ... -1.91863239e-01 -4.19340730e-01 -8.17391038e-01] [-1.95598945e-01 -6.98178709e-01 -1.59198001e-01 ... 2.10209578e-01 2.92261392e-01 2.28176445e-01]] [[-1.13068558e-01 -1.63563401e-01 -4.14365493e-02 ... -4.93605226e-01 -3.99027281e-02 -1.75068840e-01] [-1.50014549e-01 9.23041329e-02 3.92746516e-02 ... 2.43556108e-02 7.91135669e-01 -8.98808017e-02] [ 4.28060681e-01 -2.28880104e-02 -5.50676137e-02 ... 1.26688667e-02 2.10723989e-02 2.70171285e-01] ... [ 1.85552612e-01 -1.75394997e-01 -1.10154860e-01 ... 3.58207077e-01 6.93238974e-02 3.28887701e-01] [ 6.30742133e-01 -2.72717699e-02 -1.19708151e-01 ... -1.01111412e-01 1.88686207e-01 -1.89247221e-01] [-2.81493604e-01 1.39614090e-01 6.88349605e-02 ... 2.99078912e-01 -5.18669426e-01 -4.37500477e-01]] [[ 1.41303384e+00 4.21340734e-01 5.46136677e-01 ... -7.20640779e-01 -2.20388079e+00 3.00812960e+00] [ 2.02215099e+00 3.25935811e-01 1.83034110e+00 ... -2.01907182e+00 3.62718511e+00 -1.04888546e+00] [ 1.65644109e+00 -8.40985179e-01 -2.21116161e+00 ... 9.06271100e-01 4.49074775e-01 9.04175416e-02] ... [ 9.26453292e-01 2.48057753e-01 5.84805429e-01 ... -6.52803946e+00 -1.66064501e+00 2.78698117e-01] [ 8.62572193e-01 1.58041775e+00 -1.33231914e+00 ... 3.61729026e+00 -5.67258656e-01 -1.35002708e+00] [-1.63429368e+00 -1.16956139e+00 -1.09570336e+00 ... -1.94988930e+00 -5.35842061e-01 -3.02678764e-01]] [[ 1.11832023e-02 5.37444204e-02 -1.72495898e-02 ... 4.79410850e-02 -2.47962382e-02 4.09114361e-02] [ 7.71255791e-02 -8.10962096e-02 -3.93946618e-02 ... 2.98522469e-02 -3.67093123e-02 -1.59183296e-03] [ 1.30235618e-02 8.90477449e-02 7.36121833e-02 ... 4.96962555e-02 -7.88671747e-02 1.54199824e-02] ... [ 1.90757308e-02 -5.49890213e-02 -4.46056128e-02 ... -2.71002036e-02 4.09293734e-03 -1.28335170e-02] [ 5.56494528e-03 -4.41653049e-03 2.05273852e-02 ... 2.89114993e-02 -4.73490963e-03 5.28064072e-02] [-1.87527910e-02 -1.87923927e-02 -8.63536960e-04 ... 5.22970818e-02 6.14763834e-02 6.16397597e-02]] [[ 2.95439344e-02 -2.19778121e-02 -9.08996537e-03 ... 4.23970297e-02 -1.30104214e-01 5.99771459e-03] [ 7.15321600e-02 -3.21615413e-02 -7.42503330e-02 ... 4.32948992e-02 1.22077771e-01 6.51972136e-04] [ 5.41754514e-02 -1.38687911e-02 -3.63229439e-02 ... -1.05368942e-01 -7.64668584e-02 -8.69987011e-02] ... [-7.49258278e-03 -5.79063743e-02 -1.04135731e-02 ... 1.02328416e-02 1.07275151e-01 -2.07086354e-02] [-5.20554520e-02 -1.05647976e-03 3.14387791e-02 ... 1.30750760e-02 -4.76276614e-02 -9.25755277e-02] [ 3.27297975e-03 1.28875315e-01 4.02276739e-02 ... -1.12867914e-02 -1.68137893e-03 -7.58301117e-04]]] [[[ 1.88848987e-01 -3.50236535e-01 4.23726857e-01 ... -2.45151386e-01 2.21869722e-01 -2.83168573e-02] [ 1.72488421e-01 -1.18510991e-01 -4.21918593e-02 ... -1.29976213e-01 1.46501303e-01 9.66124460e-02] [ 7.23550394e-02 -1.02395928e-02 -1.16572715e-01 ... 7.37894997e-02 2.71104962e-01 -5.70209846e-02] ... [-9.15315282e-03 -1.37604520e-01 2.96470989e-02 ... 1.11942045e-01 1.38140082e-01 -2.15788454e-01] [-1.05711222e-01 5.58762439e-02 1.04893275e-01 ... -1.79591015e-01 -1.45575970e-01 3.81005084e-04] [-8.29810426e-02 2.19384387e-01 1.59796610e-01 ... -3.77642624e-02 -1.61595136e-01 -1.85037740e-02]] [[ 3.57431285e-02 -3.12101096e-01 2.35195547e-01 ... 5.94523311e-01 8.65497962e-02 3.83292019e-01] [-5.20842195e-01 3.43080074e-01 3.27776931e-02 ... -5.19673109e-01 -9.21762735e-02 6.23896897e-01] [-1.92608684e-01 8.55213925e-02 2.83451945e-01 ... 1.69797465e-01 2.67176256e-02 3.15964222e-01] ... [ 2.83723652e-01 2.66037464e-01 -1.96457356e-01 ... 7.55925030e-02 4.21193063e-01 -8.05198193e-01] [-2.10163905e-03 3.32830161e-01 -2.98321515e-01 ... 5.64255655e-01 -3.22955996e-01 1.43578306e-01] [-6.02390707e-01 3.38001102e-01 4.37666744e-01 ... -2.18323141e-01 8.22029352e-01 2.08608225e-01]] [[-8.48741829e-01 3.53594095e-01 1.98972180e-01 ... -9.33992192e-02 -4.68056649e-01 1.06521046e+00] [ 5.86990476e-01 3.75922859e-01 1.63112730e-01 ... 8.93150121e-02 1.05098791e-01 -2.03707308e-01] [ 1.06004007e-01 -3.86454388e-02 -1.61579698e-01 ... -2.44647563e-02 -3.00681710e-01 2.88344145e-01] ... [ 3.56590152e-01 1.22296438e-01 8.67450703e-03 ... 5.07267937e-02 -3.07094723e-01 6.58311069e-01] [ 1.03886336e-01 -1.92378923e-01 -5.89241013e-02 ... -9.38691497e-02 -2.40731671e-01 -3.32332551e-01] [ 9.49532315e-02 3.92918289e-01 -3.36597651e-01 ... -4.63329703e-01 -5.07038236e-02 2.86483854e-01]] [[ 2.13533759e+00 -2.61894727e+00 -3.93467760e+00 ... -9.66202617e-01 -1.72625375e+00 -2.78012776e+00] [-4.03181732e-01 6.83336675e-01 1.60709098e-02 ... -3.01764297e+00 1.21258020e+00 -4.95438337e+00] [-2.02573225e-01 5.48190117e-01 -2.43173289e+00 ... -1.53672481e+00 3.09667677e-01 -1.64411354e+00] ... [-7.73914695e-01 2.05301613e-01 1.19754560e-01 ... 5.90409458e-01 -8.02696049e-01 -8.27741921e-01] [ 2.54603696e+00 2.53639197e+00 4.85224867e+00 ... -4.09996319e+00 3.98391575e-01 -1.39770955e-01] [ 2.07194537e-01 -1.72531843e-01 -1.43968225e+00 ... 2.38814771e-01 3.30088663e+00 -7.56842613e-01]] [[ 1.04311623e-01 -2.08847690e-03 1.59660596e-02 ... -8.08356330e-02 -9.07242373e-02 -1.10546555e-02] [ 4.02983911e-02 -2.42293123e-02 -4.71839160e-02 ... -1.27233507e-05 -6.85051829e-02 2.05256976e-02] [ 5.03199548e-02 4.58061462e-03 -8.16645548e-02 ... -1.07144099e-02 3.23666856e-02 4.31907997e-02] ... [-3.51567054e-03 1.17652155e-01 -1.82564883e-03 ... -1.80512741e-02 4.30491865e-02 4.27611778e-03] [-6.58786669e-02 1.39450699e-01 1.16475038e-02 ... 1.06271058e-02 -8.09719265e-02 1.22545268e-02] [-6.19304813e-02 5.87654375e-02 -4.24143635e-02 ... -8.19891132e-03 -2.65641175e-02 5.17752282e-02]] [[-1.12175889e-01 1.87039934e-02 1.34642813e-02 ... -6.57193735e-02 -3.70920449e-02 2.49528829e-02] [ 6.02079742e-03 -8.79652649e-02 -4.13381960e-03 ... 9.78827197e-03 -8.97011906e-03 -8.36733207e-02] [ 1.53799728e-02 -2.53034681e-02 -2.89007183e-03 ... 2.44167019e-02 9.17655155e-02 -1.03400657e-02] ... [ 2.98820762e-03 -5.26498631e-02 3.16068120e-02 ... -7.84137845e-02 -1.08774276e-02 7.65199810e-02] [ 8.25523213e-02 6.78423122e-02 3.85845266e-02 ... 1.36080861e-01 -8.22592080e-02 -2.18340126e-03] [-2.46274676e-02 -8.01737010e-02 -5.58295213e-02 ... 4.09402288e-02 1.05367646e-01 -1.81996629e-01]]] [[[ 1.06880121e-01 -3.37759167e-01 5.63001156e-01 ... 8.96985829e-02 4.93617877e-02 -8.78302287e-03] [ 1.78933099e-01 -5.22449352e-02 -3.46527696e-02 ... -1.88817993e-01 2.12315813e-01 -3.24013174e-01] [ 3.29948872e-01 3.41653287e-01 -3.29480432e-02 ... 3.36303450e-02 3.84384036e-01 -8.71944726e-02] ... [ 2.60378987e-01 -1.26333013e-01 -5.64595461e-01 ... 7.24782869e-02 1.24039747e-01 2.30068117e-01] [ 2.11363614e-01 -8.20356980e-02 2.59081781e-01 ... 3.10041644e-02 -1.83630168e-01 -4.27738279e-02] [ 1.27706990e-01 2.13183627e-01 -1.65504888e-01 ... 2.29810495e-02 2.66069323e-01 1.32985160e-01]] [[-7.45267034e-01 -1.83616608e-01 1.94178492e-01 ... -1.27270371e-01 3.72040987e-01 1.42502144e-01] [-2.46515051e-01 4.58028853e-01 5.70638299e-01 ... 4.62970883e-01 3.60496342e-01 9.21675861e-02] [ 1.59835666e-01 2.99337566e-01 -6.49683118e-01 ... -5.93252122e-01 -1.86376512e-01 8.87181461e-02] ... [ 6.39577866e-01 -5.77790618e-01 6.87804759e-01 ... 2.42145717e-01 3.45049053e-01 -4.00650829e-01] [ 2.07816303e-01 -2.10264087e-01 5.56665137e-02 ... 1.36646833e-02 -4.54953223e-01 3.40961426e-01] [ 4.66219224e-02 -1.25539005e-01 4.56951652e-03 ... -5.24158835e-01 -4.43896264e-01 -8.16200674e-01]] [[-7.23810554e-01 1.99598238e-01 3.45959157e-01 ... 1.03743061e-01 -3.03120852e-01 2.53426313e-01] [-1.40229091e-01 -3.15386653e-01 5.64280376e-02 ... -4.37334962e-02 7.87311256e-01 -3.41380417e-01] [ 3.86880398e-01 2.70148546e-01 -2.40152419e-01 ... -5.00190198e-01 -1.42004445e-01 2.26666316e-01] ... [ 4.61898953e-01 -8.87213126e-02 4.63637769e-01 ... 9.78728533e-02 5.46690047e-01 -1.62980020e-01] [ 2.43263543e-01 -4.78071839e-01 2.45730162e-01 ... -3.19299370e-01 -7.20189214e-02 1.87487796e-01] [ 6.92465454e-02 -1.79360688e-01 -2.33181760e-01 ... -8.13281357e-01 7.90494028e-03 1.40010267e-02]] [[ 5.12994640e-02 2.27715278e+00 -6.26399159e-01 ... -1.69082701e+00 2.26350754e-01 1.43880713e+00] [-3.04793096e+00 1.64282227e+00 5.79191744e-01 ... 7.12747753e-01 1.15148914e+00 9.47983086e-01] [ 1.44904912e+00 -1.16854459e-01 -9.92148593e-02 ... 2.19613433e+00 -1.40003788e+00 -3.41207242e+00] ... [ 1.83982515e+00 1.05527878e+00 1.78245258e+00 ... -2.08180547e+00 -1.65054893e+00 -1.79703617e+00] [-1.89437151e+00 3.62595010e+00 2.49778795e+00 ... 2.15931848e-01 -2.29578876e+00 1.21577299e+00] [-9.60423708e-01 -1.07615662e+00 -1.51109919e-02 ... -1.15445173e+00 3.78998947e+00 1.00845546e-01]] [[-1.26784565e-02 -3.06767654e-02 -6.23297393e-02 ... 2.17567496e-02 -1.59713272e-02 -8.14026967e-02] [ 5.42759150e-02 4.27673720e-02 -1.11358827e-02 ... -3.87452580e-02 1.99097842e-02 -2.96636298e-02] [ 5.39356656e-02 5.92226237e-02 -3.39856222e-02 ... -1.75597332e-03 -3.10728373e-03 -3.20697241e-02] ... [ 4.71993908e-02 -5.82639463e-02 -9.67081934e-02 ... -7.71738868e-03 4.18356471e-02 -2.48818174e-02] [ 3.41556431e-03 7.02890232e-02 2.94653187e-03 ... -5.03262840e-02 2.06544995e-02 8.40041786e-03] [ 2.47792080e-02 5.99619299e-02 -8.70190281e-03 ... 3.38877290e-02 -6.86307847e-02 2.76262183e-02]] [[-5.95804788e-02 -2.33694278e-02 3.41881290e-02 ... 1.02174222e-01 2.80269478e-02 -8.06519911e-02] [-5.07772416e-02 2.58682016e-02 -1.66804612e-01 ... -5.63336574e-02 -4.01983671e-02 -4.74275574e-02] [-1.32372409e-01 7.44867325e-02 -4.86330502e-02 ... 2.53093109e-04 -2.85185073e-02 -8.50098394e-03] ... [ 1.67719033e-02 -2.95215286e-02 -5.94393834e-02 ... -8.38266918e-04 -5.15338928e-02 -8.61445069e-02] [-9.38554779e-02 -2.01134644e-02 -7.39451870e-02 ... -2.89095808e-02 3.11736539e-02 8.22315589e-02] [-3.21594663e-02 1.45739932e-02 1.18771568e-03 ... -6.34362102e-02 -6.49212226e-02 -6.30584061e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 3} ] | 0.23 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4614.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[[[-3.07089210e-01 -2.59255856e-01 -3.25307101e-01 ... -6.27340525e-02 -2.07651639e+00 -1.50032389e+00] [-1.05801761e+00 -2.10936949e-01 -2.24286020e-01 ... -5.91633677e-01 -1.40935612e+00 -1.07956779e+00] [ 1.00771397e-01 -1.07344890e+00 1.17538738e+00 ... 2.36254156e-01 -4.60840493e-01 7.91387379e-01] ... [ 2.22856700e-01 -1.32433987e+00 1.18470919e+00 ... 6.89991951e-01 -3.26041555e+00 -1.71648145e-01] [ 1.56432819e+00 1.13245058e+00 -1.85560644e-01 ... 1.02490556e+00 -3.02369297e-01 1.04060876e+00] [-1.11943138e+00 -4.73787695e-01 3.94051343e-01 ... -1.39513087e+00 -2.43789625e+00 -1.54389903e-01]] [[-8.80797207e-01 -3.78568470e-01 7.65827596e-02 ... -5.06971657e-01 -2.12717557e+00 -1.69036090e+00] [-2.50188336e-02 -3.47657561e-01 -1.40789795e+00 ... 4.52377051e-01 -2.79145658e-01 -2.54271460e+00] [ 3.13974053e-01 -1.28796309e-01 9.34762836e-01 ... 4.50434327e-01 -2.04562068e+00 -1.10826278e+00] ... [ 1.55866861e+00 6.84336945e-02 1.01729226e+00 ... 6.30788267e-01 3.00555736e-01 -5.79506755e-01] [ 6.26463890e-01 2.99236894e-01 1.41883767e+00 ... 9.55852792e-02 1.18145859e+00 4.25306261e-01] [ 7.13976383e-01 2.63594657e-01 1.25476980e+00 ... 9.50965583e-01 1.30947113e-01 -4.80122387e-01]] [[ 1.33797288e+00 -1.29357791e+00 -1.56572506e-01 ... 5.70261441e-02 8.66988063e-01 8.74234661e-02] [ 4.93035644e-01 -6.66248500e-01 -1.85129178e+00 ... -2.18204275e-01 -7.30261803e-01 1.08436382e+00] [ 1.16192675e+00 1.36946449e-02 5.91841817e-01 ... -1.61810458e-01 -1.59215376e-01 5.68682611e-01] ... [-1.19287741e+00 1.71962607e+00 1.91695309e+00 ... 5.93535244e-01 -2.55025059e-01 -1.39006805e+00] [-1.32313633e+00 -1.79651931e-01 -1.49311638e+00 ... -6.55063450e-01 -1.13908082e-01 5.36647998e-02] [-1.09237647e+00 5.49039721e-01 -1.26509428e+00 ... -1.70860946e-01 1.27820343e-01 -9.58498597e-01]] ... [[-3.88798505e-01 5.75352237e-02 5.35132706e-01 ... 8.42800178e-03 -7.86468208e-01 -1.85882896e-01] [ 1.02221048e+00 1.42363143e+00 2.03923836e-01 ... -1.24354219e+00 3.86297911e-01 -6.80015013e-02] [-1.01715112e+00 -1.24937999e+00 7.29476511e-01 ... 3.03126454e-01 -8.39359090e-02 1.41998768e+00] ... [-7.58909523e-01 8.41613352e-01 -1.45671606e+00 ... -6.13141000e-01 -7.10210204e-01 1.94708812e+00] [-4.47124243e-01 -7.37316251e-01 7.77779460e-01 ... 1.54542476e-01 -1.36220622e+00 -8.93044770e-01] [-9.10378098e-01 8.39966536e-01 -5.71134746e-01 ... -5.99764705e-01 8.47688258e-01 -8.01169336e-01]] [[ 3.54428053e-01 2.85623342e-01 -2.27147162e-01 ... -4.01756436e-01 1.40306628e+00 -3.45867798e-02] [-7.04479933e-01 -1.30901501e-01 1.02647340e+00 ... 6.85083330e-01 1.94943726e-01 -2.12068275e-01] [-2.17203528e-01 -2.02264860e-02 8.62609029e-01 ... 1.51631045e+00 -8.62786472e-01 2.14786839e+00] ... [-1.31904995e+00 -1.89473942e-01 5.24290577e-02 ... -3.33434483e-03 3.49488229e-01 3.38432699e-01] [ 5.42281926e-01 -6.26772419e-02 1.94920671e+00 ... 1.24306190e+00 1.21978962e+00 -3.24082553e-01] [ 4.19474185e-01 -5.11646390e-01 -6.38116121e-01 ... -8.32220316e-01 1.51334941e+00 -8.83013844e-01]] [[ 1.03197336e+00 3.54437441e-01 -5.55997550e-01 ... 1.60481167e+00 -1.50063968e+00 -7.97105283e-02] [-1.55838028e-01 -7.42217183e-01 9.19467628e-01 ... -6.11750722e-01 -3.52384537e-01 -3.92852634e-01] [-5.36557257e-01 -4.87532467e-01 3.31239790e-01 ... 1.98494351e+00 -2.04354715e+00 4.42864358e-01] ... [ 7.07954764e-01 -8.93881023e-01 8.44539523e-01 ... 6.66418433e-01 -1.03522098e+00 6.69971883e-01] [ 3.41323614e-01 -1.14581513e+00 1.14650750e+00 ... -1.97078675e-01 -5.31013310e-01 1.74459374e+00] [-1.39424884e+00 -2.72944234e-02 -1.54244792e+00 ... -1.19588292e+00 1.29844499e+00 8.81706893e-01]]] [[[ 6.14342272e-01 -7.90160179e-01 3.69225629e-02 ... 1.34538639e+00 -9.03753713e-02 -5.09685993e-01] [ 1.34016812e+00 -6.84467375e-01 3.04762036e-01 ... 3.94533306e-01 -5.07582366e-01 -9.24267948e-01] [-2.97883719e-01 -3.33789855e-01 -8.20981205e-01 ... 5.40180266e-01 8.28088880e-01 -7.71517932e-01] ... [-2.65783727e-01 -2.25293469e+00 1.29125810e+00 ... 2.53237545e-01 -7.52569318e-01 -1.55328035e+00] [-7.15986550e-01 -1.00245988e+00 4.69650984e-01 ... 2.67260408e+00 1.41820383e+00 -1.16060781e+00] [ 2.60978532e+00 9.10865486e-01 2.15768162e-02 ... 5.81333697e-01 6.74460292e-01 1.02680862e+00]] [[-1.13203073e+00 -3.13753366e+00 7.09615648e-01 ... 1.09207880e+00 1.29046929e+00 -5.62950552e-01] [ 1.74328592e-02 1.39495838e+00 9.07585680e-01 ... -3.46136898e-01 1.15509617e+00 -7.06732631e-01] [ 1.14327681e+00 5.47937572e-01 -7.66185641e-01 ... -1.86408460e+00 1.55369270e+00 -6.74968362e-02] ... [-2.30001783e+00 9.60135832e-02 9.61013436e-01 ... 4.93192106e-01 2.20839715e+00 1.72441936e+00] [-9.68316793e-01 -1.30181178e-01 6.58837333e-02 ... 3.30394655e-01 -1.67448902e+00 4.37347472e-01] [ 6.60218418e-01 4.26195890e-01 1.30952036e+00 ... 1.09318900e+00 -2.34140188e-01 -7.69219220e-01]] [[-4.22532380e-01 8.91310573e-01 -3.87282372e-01 ... -9.53663066e-02 1.49098718e+00 -7.77668059e-01] [-5.81896484e-01 2.17765167e-01 1.82401031e-01 ... 6.70357719e-02 1.79504633e-01 8.37711215e-01] [-2.36377209e-01 1.61137128e+00 -1.28079414e+00 ... 8.87712613e-02 6.63395286e-01 -1.03629994e+00] ... [ 1.33717275e+00 -2.90598292e-02 6.27681315e-02 ... 1.91365445e+00 4.84088778e-01 1.23684156e+00] [-9.25348997e-01 1.48685002e+00 -1.43975413e+00 ... -2.38197637e+00 2.38032937e-01 -1.40559220e+00] [ 1.77924737e-01 -5.97812533e-01 7.52186537e-01 ... 3.10949117e-01 -6.93197250e-02 1.31370378e+00]] ... [[-7.18110085e-01 5.83360910e-01 5.39371729e-01 ... -4.55409557e-01 -6.23903334e-01 -4.95546728e-01] [-6.00399494e-01 -2.48648107e-01 7.72891939e-01 ... 8.00267935e-01 3.14243525e-01 6.98991299e-01] [ 3.72478515e-01 -8.05463567e-02 1.36527157e+00 ... 1.17763245e+00 1.48518121e+00 -1.03843606e+00] ... [ 8.73792946e-01 -1.34211791e+00 1.28428981e-01 ... 2.15727180e-01 4.52071369e-01 -3.86661440e-01] [-1.57750890e-01 -5.65775335e-01 1.30937564e+00 ... -2.97072470e-01 -2.59541959e-01 2.94948310e-01] [-2.46647522e-01 1.11798382e+00 -8.94181430e-01 ... -1.80659914e+00 -1.68048784e-01 -3.84916186e-01]] [[ 4.10791218e-01 3.33364725e-01 1.38922882e+00 ... -8.01067531e-01 -1.63283065e-01 2.20874429e-01] [ 2.98782494e-02 -1.04094014e-01 -1.48569155e+00 ... -2.60088176e-01 -7.42909253e-01 5.75805269e-02] [ 5.88591211e-02 -7.43696690e-01 -1.60141802e+00 ... 1.03367484e+00 1.72180966e-01 -6.53886378e-01] ... [-1.75081062e+00 -1.10514998e-01 -1.49179471e+00 ... -1.62984347e+00 2.74204254e-01 -2.47171551e-01] [ 2.65217751e-01 2.41102517e-01 3.54308903e-01 ... -1.41512549e+00 -1.87014788e-01 3.11306745e-01] [-2.04616070e+00 1.04133236e+00 9.50563967e-01 ... 1.06937416e-01 8.48953664e-01 -7.48532891e-01]] [[-1.30772972e-02 -2.18479371e+00 -3.38213325e-01 ... 9.87423882e-02 -1.14438379e+00 -1.87204432e+00] [-1.57461330e-01 1.87450433e+00 -7.49437928e-01 ... -1.13120747e+00 -5.81278801e-01 4.08862948e-01] [-5.96267223e-01 -1.22538018e+00 -2.20856810e+00 ... 1.93335623e-01 -1.09734595e+00 1.44835854e+00] ... [-3.18222463e-01 8.65045786e-01 -1.35284507e+00 ... 1.49190688e+00 -1.11212206e+00 2.52368659e-01] [ 6.32842854e-02 6.22862339e-01 -8.35639387e-02 ... 3.00482154e-01 2.48622918e+00 6.29610360e-01] [ 2.62637794e-01 6.44985020e-01 -1.24191451e+00 ... -1.08372331e+00 6.56815469e-01 -1.08098909e-01]]] [[[ 2.16169879e-01 2.26476979e+00 1.18063891e+00 ... 5.35811633e-02 -7.42489517e-01 -8.84120345e-01] [ 3.03075820e-01 -1.11201644e+00 -4.33679402e-01 ... -6.74808979e-01 -7.78584659e-01 -1.01792192e+00] [-7.04737365e-01 -1.90597093e+00 -2.16325387e-01 ... -6.25697434e-01 6.96708262e-01 -1.21898508e+00] ... [-1.50301170e+00 6.36290014e-01 7.16120660e-01 ... -2.12028250e-01 -2.11135313e-01 5.24208546e-02] [ 1.57545722e+00 -1.26549375e+00 -2.41472650e+00 ... -3.45966697e-01 3.79082114e-01 1.29878271e+00] [-1.78695753e-01 3.60263079e-01 -3.54386330e-01 ... -8.69291663e-01 2.69175768e-01 1.41555890e-01]] [[ 1.26624882e-01 7.86758006e-01 -1.16891515e+00 ... -1.26065242e+00 -6.12585187e-01 2.11869955e+00] [ 7.35693991e-01 -2.93341339e-01 1.83763638e-01 ... -7.30082393e-01 -2.26377100e-01 -1.22209013e+00] [ 2.59889126e-01 1.94255069e-01 5.83401263e-01 ... -7.17897058e-01 -1.25490069e+00 -3.41691822e-01] ... [ 5.24346530e-01 -5.51526070e-01 1.10456228e+00 ... 3.91778231e-01 6.63563609e-01 1.09472573e+00] [ 6.79538697e-02 -1.93612146e+00 -1.69352007e+00 ... 1.05899751e+00 -2.50004768e-01 -1.15368104e+00] [ 1.65737092e-01 7.11474895e-01 1.86572492e+00 ... -9.85017657e-01 6.61007106e-01 -3.69635820e-01]] [[-1.03852093e+00 4.58317369e-01 1.05165446e+00 ... -1.57275391e+00 1.38198614e+00 8.81005645e-01] [-7.83612430e-01 9.93612111e-01 -5.97101450e-01 ... -2.12141800e+00 1.27819443e+00 -1.58994401e+00] [-4.14644718e-01 -1.03009140e+00 -6.21070564e-01 ... -1.00290585e+00 -1.21725023e+00 1.22332990e+00] ... [ 1.80288076e-01 7.57567585e-01 2.64259100e-01 ... -2.18195394e-01 -6.22252941e-01 -4.64130700e-01] [ 6.13133729e-01 -4.46151495e-02 9.76211876e-02 ... 1.38383460e+00 1.33077598e+00 6.79614246e-01] [ 7.86406815e-01 5.75258657e-02 7.84491122e-01 ... 1.03612125e+00 6.00779235e-01 1.38604867e+00]] ... [[ 3.06494981e-01 3.21100652e-01 4.57839996e-01 ... 3.38423282e-01 -2.69340932e-01 1.35718417e+00] [ 5.50400257e-01 -3.92875731e-01 -1.57962775e+00 ... 7.93231726e-01 4.08427238e-01 -1.10533381e+00] [-9.68855798e-01 -1.99385905e+00 3.75534564e-01 ... 5.76188192e-02 -8.00004482e-01 2.20381069e+00] ... [-1.33891714e+00 -2.42205672e-02 -4.72158134e-01 ... -8.01054657e-01 -7.56987408e-02 3.60011280e-01] [ 2.13035035e+00 1.87343165e-01 -2.97028869e-01 ... 9.33709666e-02 -2.09562254e+00 -1.99719071e+00] [ 8.59795511e-01 -8.35254431e-01 5.94946325e-01 ... -3.11078101e-01 4.28419448e-02 7.50982165e-01]] [[-1.03151870e+00 -3.67400140e-01 3.03122461e-01 ... 7.57461727e-01 -2.31130764e-01 7.55016029e-01] [-1.94505289e-01 -1.34229147e+00 -1.03764248e+00 ... 1.21137130e+00 3.54179919e-01 -4.22159314e-01] [-1.68125880e+00 -7.32558250e-01 -1.07338583e+00 ... -1.00592636e-01 4.13962811e-01 1.02371871e-02] ... [-1.14448118e+00 -1.90112841e+00 4.06904370e-01 ... 8.40153515e-01 -1.38661075e+00 6.83323264e-01] [-1.39001656e+00 1.61432326e-01 1.06997478e+00 ... 1.55165982e+00 6.86120152e-01 -1.25191069e+00] [-9.78643954e-01 -8.01794231e-01 5.93939185e-01 ... -9.93308485e-01 1.17326051e-01 3.06221342e+00]] [[-1.03519356e+00 6.91897094e-01 -1.47110403e+00 ... -6.52702987e-01 -3.90212327e-01 -7.34064162e-01] [-8.26433115e-03 1.06160009e+00 -1.23869586e+00 ... 1.96268260e-01 3.99892658e-01 -1.06149518e+00] [ 4.09318119e-01 -5.13467453e-02 2.31242582e-01 ... 5.88921189e-01 -4.14718479e-01 -4.44744051e-01] ... [-6.56484783e-01 3.39124858e-01 6.26471341e-02 ... -1.80057585e-01 4.74460393e-01 -5.93887627e-01] [-1.72960669e-01 -2.25353622e+00 -5.95147371e-01 ... 1.39883280e+00 8.49712919e-03 6.88334465e-01] [ 2.89517355e+00 6.32841945e-01 1.23773932e-01 ... 1.20198596e+00 -1.38265669e-01 -2.78564066e-01]]] [[[-1.94187939e-01 7.87997022e-02 -1.04800797e+00 ... -1.01826668e+00 -5.89395463e-01 -4.23877575e-02] [-3.87525260e-01 1.10976672e+00 -2.89689273e-01 ... -7.28033483e-02 1.59552407e+00 1.41545916e+00] [ 8.10753584e-01 -2.55458879e+00 -1.44900247e-01 ... 5.00694692e-01 -3.03946853e-01 7.39020184e-02] ... [-1.28957546e+00 1.37821227e-01 2.01149315e-01 ... 5.44988066e-02 3.27425420e-01 -3.52744043e-01] [ 7.32981682e-01 -8.37784529e-01 -3.27751279e-01 ... 5.95416725e-02 1.45530832e+00 5.08156359e-01] [ 1.23301315e+00 7.97252238e-01 -2.30779123e+00 ... 3.85218829e-01 -5.52950144e-01 4.90997016e-01]] [[ 5.87267280e-01 8.67984891e-01 -3.48715693e-01 ... 6.40367866e-01 1.01852512e+00 -5.05150318e-01] [ 2.39580691e-01 -1.24459553e+00 2.51615435e-01 ... -1.26302862e+00 9.79609609e-01 -1.72365665e-01] [-1.06356561e+00 -1.32657969e+00 6.13138318e-01 ... 4.86990184e-01 -3.70532662e-01 -5.01396894e-01] ... [-9.12976384e-01 5.70455849e-01 7.58564472e-02 ... -4.93275166e-01 2.61264652e-01 2.22615242e-01] [ 1.30314302e+00 1.32674289e+00 -3.80688816e-01 ... -7.16417074e-01 6.90661132e-01 7.19216391e-02] [-2.13758588e+00 2.35446692e+00 2.28691554e+00 ... 2.42516652e-01 4.57387328e-01 -4.92746904e-02]] [[ 2.64215112e-01 -8.06818232e-02 9.71965015e-01 ... 7.45815754e-01 8.12721625e-02 2.24671006e-01] [ 1.23136044e+00 -1.12454891e+00 1.02714920e+00 ... -8.04973781e-01 -1.46852362e+00 9.41173196e-01] [ 1.60776317e-01 1.44372955e-01 -1.41574085e+00 ... -1.25442594e-01 1.10803103e+00 1.40847981e+00] ... [-1.58900726e+00 -1.27577841e+00 -1.52754977e-01 ... 7.00756192e-01 -1.34147441e+00 1.09217024e+00] [ 1.62386382e+00 1.92482197e+00 -1.37056530e+00 ... -9.87847447e-01 5.62870383e-01 -2.32138470e-01] [-5.75781465e-01 -5.88853836e-01 -7.69538701e-01 ... -5.76304436e-01 6.51819944e-01 -1.15163565e+00]] ... [[ 2.06861228e-01 -8.53369772e-01 9.47082758e-01 ... -7.99268663e-01 3.92340243e-01 4.26192135e-01] [ 1.17506778e+00 3.53620589e-01 -1.37327313e+00 ... -2.49747798e-01 6.72252357e-01 -1.17057585e-03] [-2.16435504e+00 -6.47933662e-01 1.39289916e+00 ... -1.87661994e+00 -1.71042723e-03 1.66972435e+00] ... [-2.93248713e-01 9.20133352e-01 -1.10964322e+00 ... -8.64744365e-01 1.79507732e-01 -8.80966902e-01] [-7.06103563e-01 7.79597759e-01 -5.00137620e-02 ... 1.20967162e+00 -1.37975979e+00 1.31014657e+00] [-1.71437085e-01 -1.41812456e+00 9.35252845e-01 ... 9.85872805e-01 -7.29432583e-01 4.88042891e-01]] [[-1.64186627e-01 -4.62950617e-01 -1.62264049e+00 ... -5.75071499e-02 2.26202309e-01 -3.34280059e-02] [-3.46100479e-01 1.27181208e+00 8.56560767e-01 ... -1.58488020e-01 8.79267097e-01 -9.48230088e-01] [-6.48963153e-02 -1.96463394e+00 1.75036871e+00 ... 1.03988715e-01 -4.66866285e-01 -1.02253270e+00] ... [ 1.60774362e+00 -4.95052606e-01 1.31400436e-01 ... -5.64466417e-01 4.86860037e-01 -4.77397799e-01] [ 3.09067637e-01 -6.31840527e-01 -2.34469429e-01 ... -3.03882360e+00 -3.83757502e-01 4.05072302e-01] [-5.15520632e-01 -3.22525918e-01 -5.01965106e-01 ... -1.77492604e-01 -1.11837246e-01 -7.13052392e-01]] [[ 1.20500576e+00 1.48272991e-01 7.93299437e-01 ... 5.76717079e-01 -1.45907372e-01 -5.73347628e-01] [-1.16881776e+00 4.86874878e-01 1.24344671e+00 ... 3.67979258e-01 -1.67779315e+00 -1.43943679e+00] [ 7.81879604e-01 -7.25415573e-02 -2.12909007e+00 ... 1.73501015e+00 -1.11764657e+00 4.38317984e-01] ... [ 3.65567923e-01 -4.25081849e-01 -1.18554735e+00 ... -1.23526144e+00 2.25347781e+00 1.63791704e+00] [ 1.39628983e+00 2.70252287e-01 -5.20383418e-02 ... -1.35575700e+00 -1.97801054e+00 -1.35551059e+00] [ 2.22551417e+00 -1.43935013e+00 -2.49376106e+00 ... -9.17003393e-01 9.25242186e-01 1.12847626e+00]]] [[[ 3.25127214e-01 -1.56809294e+00 -1.31062102e+00 ... -2.06098795e+00 -1.68006033e-01 -1.72482729e+00] [ 2.13619232e-01 1.88689566e+00 1.79324225e-01 ... -1.07927635e-01 -1.11488372e-01 9.43831444e-01] [-1.09785549e-01 1.54420629e-01 -7.58505404e-01 ... -6.68677762e-02 3.21692199e-01 1.57053411e+00] ... [ 1.50517190e+00 7.90704727e-01 6.43151551e-02 ... 5.18887460e-01 6.22952998e-01 -3.24053466e-01] [-4.73257571e-01 -4.95763749e-01 5.25954783e-01 ... 6.48853004e-01 7.61326730e-01 8.94579828e-01] [-7.30752230e-01 1.72862574e-01 -7.16149449e-01 ... -9.32540298e-01 5.76503612e-02 1.95142972e+00]] [[-7.62983799e-01 -1.51899707e+00 1.14069134e-01 ... 1.58741653e-01 9.87820983e-01 -5.62884450e-01] [-4.83251244e-01 -1.00975685e-01 -1.88830411e+00 ... -2.18700027e+00 2.74647981e-01 -5.70335627e-01] [-3.97614032e-01 3.39628488e-01 4.97590452e-01 ... -6.96297944e-01 -7.03629181e-02 1.36156309e+00] ... [-7.84759164e-01 -7.01141298e-01 1.31220162e+00 ... -3.24778914e-01 5.27745008e-01 -2.41152239e+00] [-2.40514946e+00 8.53927255e-01 1.14205182e+00 ... 1.02370727e+00 1.65422838e-02 3.98853362e-01] [ 1.77825391e+00 3.10576588e-01 5.49035907e-01 ... 7.80740142e-01 -1.49088100e-01 2.91787714e-01]] [[ 1.03068538e-01 -5.19862652e-01 -7.58745253e-01 ... 1.14749801e+00 -1.21443224e+00 -6.41532898e-01] [ 1.32244551e+00 1.36133480e+00 1.06848359e+00 ... 2.85396546e-01 1.09119427e+00 2.75922358e-01] [ 4.29248452e-01 -7.17998385e-01 6.55559778e-01 ... -3.04070413e-01 1.06817794e+00 -3.62584114e-01] ... [ 3.79418194e-01 -4.21558321e-01 -3.75427395e-01 ... 6.23062961e-02 2.70374751e+00 2.30436102e-01] [-1.68596578e+00 2.63222671e+00 -9.86951292e-01 ... 5.72151482e-01 -8.75621855e-01 1.32425117e+00] [ 8.08815956e-01 -4.49048966e-01 -5.89706838e-01 ... 2.36586666e+00 3.37808698e-01 7.08149791e-01]] ... [[-5.18545322e-02 1.24582481e+00 1.93408656e+00 ... 1.80315226e-01 5.90915620e-01 -1.17646335e-02] [-6.34768546e-01 1.44755840e+00 -7.52793729e-01 ... 5.71797013e-01 3.80541205e-01 -2.24619079e+00] [ 6.23306751e-01 -8.39356542e-01 8.60100612e-02 ... -1.08431482e+00 -7.62691796e-02 -9.51440930e-01] ... [-1.53354526e+00 -9.82103586e-01 1.05841231e+00 ... -1.73872769e-01 -3.62487346e-01 4.38041776e-01] [-9.04893160e-01 -7.91227724e-03 -6.45420194e-01 ... 1.37333000e+00 2.03224468e+00 -6.99846685e-01] [ 6.04262250e-03 -6.26958489e-01 -7.29159832e-01 ... 1.45525920e+00 8.13774228e-01 8.62434208e-01]] [[-1.54123142e-01 2.16328591e-01 -8.78649116e-01 ... -9.18302357e-01 -7.78004467e-01 -6.95536971e-01] [ 1.56554127e+00 1.87002242e+00 -1.72093844e+00 ... -9.66672957e-01 9.34756845e-02 1.71475291e+00] [-7.81486928e-01 -5.65168142e-01 -9.34707999e-01 ... 1.38358462e+00 3.09014525e-02 1.41336429e+00] ... [ 3.57269198e-01 3.11002463e-01 -5.50053298e-01 ... -1.29401696e+00 -8.52119148e-01 9.10298169e-01] [-1.24401355e+00 -5.15753925e-01 1.06414282e+00 ... 9.58260119e-01 1.07919133e+00 -5.37384935e-02] [-2.69216932e-02 1.30893126e-01 3.76492381e-01 ... -7.80666769e-01 3.21257085e-01 -1.46672833e+00]] [[-8.78930748e-01 -1.51972604e+00 -1.46908030e-01 ... -6.84262812e-01 -1.35216558e+00 1.39121485e+00] [-9.14657831e-01 1.48927331e+00 -5.13245463e-02 ... -5.77352464e-01 -9.81411219e-01 7.33975947e-01] [-1.05802488e+00 4.92528975e-01 -1.63895226e+00 ... 9.39034283e-01 2.25024354e-02 1.12277198e+00] ... [-1.20361912e+00 1.12509501e+00 9.04195011e-01 ... -5.01724064e-01 7.74510950e-02 1.65916204e+00] [ 5.11942387e-01 -1.69469738e+00 -6.68123543e-01 ... -7.14063764e-01 5.21540008e-02 3.35621595e-01] [ 1.09153771e+00 -2.23800465e-01 -9.35014933e-02 ... -8.64335001e-01 4.43409562e-01 -7.00653970e-01]]] [[[-2.85806894e-01 2.70972520e-01 -1.41133404e+00 ... 1.95924938e-02 -4.17528689e-01 1.30792654e+00] [-5.27298629e-01 7.80916095e-01 1.70297116e-01 ... -1.33129088e-02 2.80344356e-02 -3.21931869e-01] [-1.31231821e+00 1.29239172e-01 -2.02151775e+00 ... 2.57549852e-01 -1.50075495e+00 -6.32612824e-01] ... [ 5.47004819e-01 -8.52625743e-02 -9.68376398e-01 ... -1.39347792e-01 -2.27595493e-01 5.73728502e-01] [ 4.83902901e-01 8.70290041e-01 8.93242300e-01 ... 8.57504606e-02 5.74317753e-01 7.96005905e-01] [ 1.64091444e+00 1.35271490e+00 1.97816819e-01 ... 1.14076376e+00 1.10757804e+00 4.94132906e-01]] [[-1.85793594e-01 6.25943601e-01 -3.13546229e-03 ... -2.26279664e+00 -9.49372768e-01 -1.81040752e+00] [ 1.23196155e-01 1.78012753e+00 4.29632276e-01 ... 3.07652447e-02 -7.15600073e-01 6.64282203e-01] [-4.56588894e-01 -6.75247610e-01 1.55090034e-01 ... 8.72037947e-01 -3.65632921e-01 -1.32462108e+00] ... [-2.16331147e-02 -1.05281246e+00 -2.11440444e-01 ... -1.62898111e+00 -1.51078689e+00 1.83041131e+00] [ 3.32316250e-01 -3.73264313e-01 -1.31020713e+00 ... 1.29378307e+00 6.44447058e-02 1.28149307e+00] [-7.95752645e-01 -1.04108691e+00 -7.11993635e-01 ... 4.81317192e-01 2.57886231e-01 -2.90182799e-01]] [[ 9.73249614e-01 9.43157434e-01 -1.23535979e+00 ... 8.75112057e-01 -9.71379280e-02 6.20199680e-01] [-8.07806015e-01 -4.98328686e-01 5.33791959e-01 ... -1.23969436e+00 1.08907115e+00 -8.90797675e-02] [ 1.25732267e+00 -2.28432536e+00 -2.44521335e-01 ... -6.40085161e-01 -1.36021927e-01 1.83263719e-01] ... [ 7.43415117e-01 -3.68486255e-01 -7.49667823e-01 ... -5.09314239e-01 1.62529445e+00 4.66538191e-01] [ 7.00279593e-01 -1.84451210e+00 1.04974079e+00 ... -9.68194366e-01 -1.65232825e+00 1.87042153e+00] [ 1.61651298e-01 2.14477205e+00 2.40762576e-01 ... 8.36346328e-01 -2.68840455e-02 -1.54083684e-01]] ... [[-4.29560363e-01 -3.68148565e-01 1.89481342e+00 ... -4.80343789e-01 1.43422437e+00 -8.74848247e-01] [ 3.00808549e-01 1.00488818e+00 3.45850259e-01 ... -8.28907549e-01 4.66326565e-01 1.21914148e+00] [ 1.10068262e+00 -4.57291692e-01 -1.68726814e+00 ... -9.99217093e-01 -2.09790421e+00 -4.53833759e-01] ... [-2.61244607e+00 -1.07362807e+00 -5.94396284e-03 ... -6.95061624e-01 -1.35682270e-01 1.05915911e-01] [-1.43214071e+00 5.52656710e-01 6.27997160e-01 ... -1.01537907e+00 -1.05417490e+00 1.15793180e+00] [-5.76317132e-01 -1.37438262e+00 5.77828169e-01 ... 5.03944159e-01 1.85586870e+00 -7.23448694e-01]] [[ 7.19304383e-01 1.07078099e+00 1.30590117e+00 ... 7.23666906e-01 1.60280240e+00 -1.57662356e+00] [-1.18609035e+00 1.73160322e-02 9.09146011e-01 ... -1.40821242e+00 -6.27368212e-01 1.52028513e+00] [ 1.36721456e+00 3.05465460e-01 -1.55816644e-01 ... 1.59946406e+00 1.05798495e+00 -7.02521682e-01] ... [-1.09224439e+00 2.85137951e-01 -1.51121473e+00 ... 9.58658278e-01 -1.47862395e-03 2.45745540e-01] [ 7.58737683e-01 1.50843012e+00 -6.85238659e-01 ... -2.65179753e-01 3.00349385e-01 -1.18739867e+00] [-1.07341921e+00 8.33113968e-01 5.13739526e-01 ... -1.57814825e+00 6.00713730e-01 1.70695889e+00]] [[-1.01966691e+00 6.44463480e-01 -8.71126473e-01 ... 1.20108175e+00 -1.97976053e+00 -5.23254037e-01] [ 3.48285943e-01 -1.95543244e-01 -4.30573255e-01 ... 7.06603467e-01 -7.31959641e-01 2.32115790e-01] [ 6.55915365e-02 -4.41934764e-01 -5.11836946e-01 ... 1.41926932e+00 3.03533912e-01 5.21254182e-01] ... [-4.01293039e-01 5.84229112e-01 2.66839504e-01 ... 6.55836523e-01 1.34575880e+00 -1.02458191e+00] [ 1.32465613e+00 1.00830233e+00 8.01625192e-01 ... 8.26743007e-01 5.49180150e-01 -5.84791839e-01] [ 1.23591900e+00 -4.21519637e-01 -4.29836243e-01 ... 1.20894217e+00 -1.93832424e-02 3.31969947e-01]]]] [[[[-2.26461816e+00 -4.98859644e-01 1.38683963e+00 ... 2.10951924e+00 -6.43102348e-01 -4.06477243e-01] [ 1.41292942e+00 1.22545946e+00 7.58813500e-01 ... 1.58221170e-01 8.07213664e-01 -1.71239579e+00] [ 4.51040685e-01 -5.22726655e-01 -2.77020717e+00 ... 7.58058205e-02 -2.02891088e+00 -2.02824563e-01] ... [-4.86112326e-01 -1.07330287e+00 4.64394456e-03 ... -4.61854339e-02 -3.16916972e-01 -4.05595899e-01] [-1.57097971e+00 3.12157989e-01 2.51703292e-01 ... -6.91173434e-01 -7.85528958e-01 7.12181985e-01] [ 1.66050625e+00 -8.05028200e-01 -2.94575357e+00 ... 1.28046870e-01 1.33105481e+00 -6.92257643e-01]] [[-3.43030721e-01 -1.74506474e+00 -1.97069615e-01 ... -2.43217841e-01 -6.03435099e-01 -1.36740410e+00] [ 1.50868058e+00 3.72746617e-01 1.39333367e+00 ... -1.25767863e+00 -1.41556191e+00 -7.23881796e-02] [ 2.97525078e-01 -1.21184714e-01 2.61142671e-01 ... -4.56218958e-01 -1.07029557e-01 -2.21250147e-01] ... [ 6.99339688e-01 1.56324661e+00 9.46232319e-01 ... -1.04486752e+00 -4.56613839e-01 -1.31496561e+00] [-1.54481971e+00 6.98048532e-01 -5.49602270e-01 ... -7.08848059e-01 -1.30294010e-01 2.35150039e-01] [-2.61261106e-01 2.76021659e-01 5.95300645e-02 ... -6.63691461e-01 -3.01237583e-01 1.36899328e+00]] [[ 6.79937422e-01 6.54273748e-01 5.13639092e-01 ... -1.08258390e+00 -9.33465600e-01 -1.67187667e+00] [-4.42686945e-01 7.32329115e-02 -1.13674486e+00 ... -7.13967919e-01 1.18177965e-01 -5.48614681e-01] [-6.07066333e-01 -1.04723871e+00 -1.59817207e+00 ... -1.08133423e+00 8.19471657e-01 -2.12092310e-01] ... [-1.05492949e-01 4.66310114e-01 -1.33241653e+00 ... -1.70139742e+00 7.37490281e-02 -1.65636241e+00] [-1.79353952e+00 -8.20924938e-02 5.10190487e-01 ... 1.40174949e+00 7.61266589e-01 -7.15419725e-02] [-2.59610987e+00 -1.80921808e-01 -2.12367868e+00 ... -8.89535099e-02 -3.43532383e-01 4.14516419e-01]] ... [[-4.54911679e-01 2.18613788e-01 4.99420196e-01 ... 3.10468376e-01 -5.63406721e-02 -1.01940095e-01] [-3.17789130e-02 5.13383567e-01 1.60995471e+00 ... 7.91629910e-01 -7.86304995e-02 1.94670939e+00] [-1.71775848e-01 -7.00185537e-01 -8.41852069e-01 ... -4.69320178e-01 -1.56067431e+00 1.46645939e+00] ... [-1.06135082e+00 -1.40493894e+00 2.07620001e+00 ... -1.82963908e+00 -1.26482412e-01 -1.74265355e-01] [ 5.47098637e-01 -1.02230966e+00 3.70029986e-01 ... -2.33562660e+00 7.68483281e-01 1.76295955e-02] [ 7.69930407e-02 -8.35147917e-01 1.81787765e+00 ... 8.69250119e-01 -8.42036724e-01 8.90697062e-01]] [[ 2.98137379e+00 1.25800514e+00 2.27603644e-01 ... -4.24625546e-01 4.88301486e-01 1.13496220e+00] [-1.57070625e+00 1.16765046e+00 -2.25933388e-01 ... 7.38183379e-01 -2.27416232e-02 5.87870600e-03] [ 2.37209067e-01 -1.65968597e-01 -9.81477022e-01 ... 1.05561960e+00 -2.00975090e-01 -6.56832814e-01] ... [-1.35484004e+00 6.23260319e-01 -8.56900513e-01 ... -1.81062424e+00 -1.29580915e-01 1.76986063e+00] [ 4.05821294e-01 -7.78534472e-01 1.61070454e+00 ... 7.72242993e-02 -4.14763957e-01 1.63034666e+00] [-4.53116208e-01 1.38366565e-01 -1.44666541e+00 ... 1.47778738e+00 -4.78931367e-01 -3.70705217e-01]] [[ 4.80687618e-01 -6.33005083e-01 -5.76307356e-01 ... -4.61133540e-01 1.45733774e+00 1.65351719e-01] [-3.54993165e-01 1.56125379e+00 -3.36059064e-01 ... 1.58490539e+00 -2.04988813e+00 -7.07852066e-01] [ 4.44666952e-01 6.05080187e-01 -2.40600333e-01 ... -1.15272355e+00 -9.85568464e-01 1.04829140e-01] ... [ 4.51786220e-01 -1.99978769e+00 -1.37477612e+00 ... 7.83582151e-01 1.55149436e+00 2.81175852e-01] [-4.16349083e-01 -5.54124713e-01 -1.80451810e+00 ... 2.76838094e-01 -6.35530889e-01 -9.93538082e-01] [-1.13007379e+00 -6.84741795e-01 -1.05791700e+00 ... 1.81487572e+00 -1.43596268e+00 -5.55937886e-01]]] [[[ 6.28199160e-01 1.18428659e+00 -5.40464520e-01 ... -9.57243979e-01 -3.32123429e-01 -1.25900304e+00] [-2.30336761e+00 2.66481090e+00 -7.58509755e-01 ... -3.46661955e-02 2.92909443e-01 -6.13670349e-01] [ 1.56465366e-01 1.69100118e+00 1.24803591e+00 ... -8.86754096e-01 6.34784937e-01 -2.29011744e-01] ... [ 2.14007851e-02 -1.40894222e+00 -1.52404368e-01 ... 8.60494494e-01 -2.71790385e-01 1.37853456e+00] [ 1.82273127e-02 -7.69238114e-01 -9.77354869e-02 ... -6.01464391e-01 3.14388901e-01 2.16505861e+00] [ 1.45046222e+00 -9.72509384e-01 2.28326768e-01 ... -5.57606593e-02 -1.28352320e+00 4.99006748e-01]] [[-9.13788915e-01 -8.06979656e-01 -1.18056810e+00 ... 1.51800919e+00 -2.31668651e-01 1.11112189e+00] [ 4.67270494e-01 1.30288333e-01 1.16478181e+00 ... 5.15047073e-01 1.25872374e+00 -4.30796355e-01] [ 2.58662134e-01 4.83750612e-01 1.68834090e+00 ... 5.54284036e-01 -6.25094950e-01 6.46454632e-01] ... [-1.47432268e+00 -9.67696428e-01 -5.75186253e-01 ... -8.65593791e-01 -7.76904762e-01 -2.80732960e-01] [ 1.38157618e+00 -1.46810877e+00 1.06067955e+00 ... 7.76043653e-01 -2.71077424e-01 6.84550643e-01] [ 6.19492769e-01 5.07582128e-01 7.48891592e-01 ... 3.95492166e-01 1.28493941e+00 -5.54428339e-01]] [[-1.96191895e+00 1.52300382e+00 -7.17518568e-01 ... 5.79010844e-01 -7.82494783e-01 1.15530968e+00] [ 2.10451916e-01 -5.08125186e-01 -8.72141421e-01 ... -1.24040759e+00 -2.90331483e-01 -7.54222870e-01] [ 3.24243724e-01 -1.13477528e+00 8.43723565e-02 ... -2.15977287e+00 -1.28808665e+00 5.48189342e-01] ... [-2.22292659e-03 1.10629213e+00 -1.16571181e-01 ... -4.07227725e-01 5.42013705e-01 6.21292591e-01] [ 4.51136261e-01 -1.48531592e+00 -1.02213037e+00 ... 6.87584952e-02 1.00897086e+00 4.88942981e-01] [ 1.88005224e-01 6.94038451e-01 2.87605256e-01 ... 1.76494598e-01 8.33088577e-01 6.77953362e-01]] ... [[-1.78966308e+00 -1.30264854e+00 -5.42035580e-01 ... 1.03330719e+00 -4.41020697e-01 -6.09557748e-01] [ 6.32737458e-01 3.15965354e-01 7.75925517e-01 ... 5.02963960e-01 -8.49694729e-01 2.44432226e-01] [ 9.04874384e-01 4.15542573e-01 -5.26423454e-01 ... 7.52917528e-01 -1.92961499e-01 -6.24543965e-01] ... [ 1.18860137e+00 6.93145454e-01 4.46155429e-01 ... -6.46345437e-01 -3.32687944e-02 3.09240729e-01] [ 9.26551282e-01 5.46570599e-01 -1.35047829e+00 ... -6.05017066e-01 1.81415403e+00 -3.91591132e-01] [-1.09175050e+00 5.61214983e-01 4.84490812e-01 ... -1.28369823e-01 -1.04301631e+00 1.17019141e+00]] [[-6.28620744e-01 8.83654177e-01 2.83294111e-01 ... -9.89461958e-01 9.02159870e-01 -3.11805964e-01] [-1.39776874e+00 -2.21101880e-01 -6.05310321e-01 ... 6.30345523e-01 1.11395657e+00 -9.72981930e-01] [-3.82553965e-01 -6.89670816e-02 -2.05593634e+00 ... -5.96037984e-01 7.83040047e-01 9.32283878e-01] ... [-1.36310613e+00 -9.84076262e-01 -3.63380253e-01 ... 7.85581112e-01 8.99026453e-01 -1.04592144e+00] [-2.55789340e-01 1.83868825e-01 -8.85054618e-02 ... 1.14099324e+00 -7.64277458e-01 9.59050179e-01] [ 7.68475890e-01 3.95517141e-01 1.06196359e-01 ... -4.83244538e-01 3.76874804e-01 2.13477588e+00]] [[-1.04356647e+00 4.32402104e-01 1.20687807e+00 ... -2.14355493e+00 -2.13542080e+00 -3.18211094e-02] [-2.67041147e-01 1.12572145e+00 -8.87403011e-01 ... 7.87444711e-01 1.62574744e+00 -3.17423820e-01] [ 1.89317673e-01 1.61341488e-01 6.40219510e-01 ... 1.79025486e-01 1.80677921e-01 1.28078258e+00] ... [ 2.20068884e+00 -1.10201395e+00 1.20794880e+00 ... -1.31661355e+00 5.67619324e-01 4.94710177e-01] [-4.74633992e-01 1.67543304e+00 9.36336637e-01 ... -9.34798270e-02 3.49293351e-02 -6.37576759e-01] [ 9.12651658e-01 -8.36500943e-01 -4.18616861e-01 ... 4.10356164e-01 7.52958357e-01 -2.26415014e+00]]] [[[ 1.82225299e+00 -1.38780251e-01 1.10253739e+00 ... -2.58653387e-02 2.27724403e-01 3.08622301e-01] [-9.52742577e-01 1.02073610e+00 -1.00338626e+00 ... 9.07983184e-01 2.69125193e-01 -1.71952128e-01] [ 2.18566418e+00 -1.67890000e+00 -8.01782191e-01 ... -4.95614111e-01 -1.62607336e+00 -8.01990807e-01] ... [-1.85337543e+00 3.32406223e-01 -8.21178019e-01 ... 2.05865383e+00 1.30511701e+00 -6.36918902e-01] [-6.83092177e-01 3.61851424e-01 -3.44933838e-01 ... 5.52077174e-01 5.34294426e-01 -5.56852400e-01] [-8.62300992e-01 -5.21965504e-01 -1.51960278e+00 ... 3.15743566e-01 -1.73560572e+00 2.86127906e-02]] [[-4.87430900e-01 -2.10474467e+00 -1.00206339e+00 ... -1.92494065e-01 -6.31885350e-01 -1.42035639e+00] [-1.31548662e-02 -4.67768162e-01 1.09073496e+00 ... -6.49700984e-02 9.96505022e-01 -1.85879302e+00] [ 3.33176972e-03 -4.80023235e-01 -1.96912467e-01 ... 1.17210448e+00 -6.74070716e-02 6.88632727e-02] ... [-1.91865295e-01 -2.11828440e-01 -1.69775474e+00 ... 7.09923804e-01 -1.45320877e-01 -3.13054860e-01] [-1.53117061e+00 -2.00368476e+00 9.33024049e-01 ... -1.90399480e+00 -6.40247583e-01 7.33525753e-01] [ 3.30206573e-01 -1.40073943e+00 9.26906019e-02 ... -2.71265600e-02 1.09912157e-01 -3.39967757e-01]] [[-1.01901628e-01 3.35535616e-01 6.25418723e-01 ... 4.15817052e-01 4.22148734e-01 9.26620185e-01] [-1.20816410e+00 -1.20658827e+00 7.91663945e-01 ... -6.90144122e-01 -1.94175065e-01 3.43744308e-02] [-4.40084279e-01 -3.02768111e-01 -1.20305097e+00 ... 2.25440770e-01 -1.49783063e+00 -2.17619643e-01] ... [-6.79185927e-01 -3.53332460e-01 -1.62799549e+00 ... 3.60331833e-01 5.39746583e-01 2.36337245e-01] [ 2.36424255e+00 2.47140974e-01 -9.90307480e-02 ... -1.77316710e-01 1.99442422e+00 7.29323328e-01] [-1.16552174e+00 2.68859249e-02 -3.94694954e-01 ... 3.59205693e-01 -7.59410679e-01 5.17755985e-01]] ... [[-1.82591105e+00 -1.01222026e+00 7.87963152e-01 ... -1.23556244e+00 9.31193471e-01 -1.74502850e+00] [ 7.12673903e-01 5.37888527e-01 -7.34175146e-01 ... -9.55512583e-01 -1.96149337e+00 2.15089512e+00] [-9.98484015e-01 1.29477322e-01 1.01823294e+00 ... -8.81452858e-01 -2.84130156e-01 2.48505712e-01] ... [-3.79732341e-01 5.61239600e-01 6.33366227e-01 ... -6.58103228e-02 -1.12850392e+00 9.24771369e-01] [ 1.36611283e-01 3.89645286e-02 1.14058197e+00 ... -1.79698870e-01 4.94180799e-01 6.10892594e-01] [ 5.97259998e-01 -1.37018275e+00 2.50062495e-01 ... -1.69969842e-01 -3.91535312e-01 -7.00570762e-01]] [[-2.48566896e-01 1.59273803e+00 -8.87142241e-01 ... -1.67655781e-01 1.97009766e+00 -1.90931773e+00] [ 1.65724027e+00 1.44858249e-02 -3.21239918e-01 ... 2.53037167e+00 -1.18062362e-01 -1.34554410e+00] [-5.35692155e-01 -5.37942827e-01 8.48691285e-01 ... 4.38365936e-01 1.41156852e-01 5.17243326e-01] ... [ 8.59254003e-01 -3.94467413e-01 3.95307951e-02 ... 5.59866309e-01 8.84024560e-01 1.46630871e+00] [ 2.46075535e+00 3.90631437e-01 -1.59123468e+00 ... -4.43045050e-01 1.96008325e+00 -1.80075204e+00] [ 4.83837843e-01 -1.96657145e+00 1.64516866e+00 ... -9.34194505e-01 5.05173802e-01 8.01133513e-01]] [[-4.83615935e-01 -1.84667051e+00 -1.30257356e+00 ... 2.71847248e-01 7.42418766e-01 -1.98065825e-02] [-4.72344279e-01 -1.45398641e+00 1.38207698e+00 ... 1.67308062e-01 1.49259245e+00 1.25869727e+00] [ 1.42109454e+00 -3.50510955e-01 -1.11247909e+00 ... -3.48979756e-02 -8.82024169e-01 9.03290689e-01] ... [ 1.95217741e+00 -8.50172579e-01 -1.16252959e+00 ... -1.09596908e+00 5.89598835e-01 1.51003194e+00] [-1.32628393e+00 2.17123413e+00 8.48608077e-01 ... -7.49552369e-01 1.11797571e+00 -4.41234797e-01] [ 1.56523430e+00 1.84941316e+00 7.69629180e-01 ... -1.47768766e-01 1.26062036e+00 -9.07334387e-02]]] [[[-1.00217782e-01 1.06016934e+00 -4.02788758e-01 ... -1.31899178e+00 1.25313151e+00 3.47379833e-01] [-1.22386619e-01 1.39300182e-01 -1.68430701e-01 ... -8.96998644e-01 -5.76135278e-01 -1.50222516e+00] [-2.86143959e-01 -1.71201468e-01 7.11947381e-01 ... -1.82112247e-01 -3.81600648e-01 1.29113233e+00] ... [-9.65648770e-01 5.37727296e-01 6.19013369e-01 ... -2.27053732e-01 6.12663999e-02 -1.80282128e+00] [ 4.19071406e-01 -8.28822732e-01 9.68629122e-01 ... -6.70936644e-01 2.53410721e+00 -2.42240310e-01] [ 1.43509853e+00 1.53555930e+00 -1.13098156e+00 ... 2.31359661e-01 -4.96512264e-01 -1.59258616e+00]] [[-2.65589404e+00 3.12829465e-01 -1.25794613e+00 ... -8.86898041e-01 -5.99673569e-01 -1.17754519e-01] [-1.64193344e+00 5.92624724e-01 -2.07264543e+00 ... -8.68625283e-01 7.46235177e-02 -9.26388085e-01] [ 9.78794277e-01 -2.59229690e-01 -4.85993028e-02 ... 1.11538696e+00 8.89060497e-01 -5.16174614e-01] ... [ 1.45011806e+00 5.84238768e-01 1.69723725e+00 ... -1.18808365e+00 4.42234039e-01 6.17241800e-01] [ 8.25763419e-02 1.43171027e-01 3.76513273e-01 ... -1.78786421e+00 8.49470854e-01 3.59179646e-01] [-3.26706082e-01 5.08285999e-01 3.14573318e-01 ... -1.37800145e+00 -1.97642297e-01 2.88989186e-01]] [[-5.31700552e-01 5.20106316e-01 -1.10391402e+00 ... 1.05268466e+00 1.01984990e+00 5.20738006e-01] [-2.73267174e+00 8.73729169e-01 1.94264948e-01 ... -5.82092047e-01 1.69410661e-01 -7.78588474e-01] [-1.15633404e+00 -1.63587379e+00 5.87061308e-02 ... -1.04362011e+00 1.90311658e+00 -1.18108869e+00] ... [ 1.98328161e+00 2.02825737e+00 -2.53997731e+00 ... 1.62094831e-01 5.75090826e-01 1.49085236e+00] [ 4.00420487e-01 -5.43697774e-01 -2.98408806e-01 ... -1.62800562e+00 -1.59120548e+00 3.56704116e-01] [-8.19490433e-01 5.25216460e-01 -1.11884594e+00 ... 7.43102133e-01 2.20851123e-01 4.23900872e-01]] ... [[-1.09284878e+00 5.09097219e-01 -3.70706290e-01 ... 1.52639973e+00 3.89462300e-02 -8.24176133e-01] [-2.41923422e-01 3.85688871e-01 -2.51007110e-01 ... 1.55958140e+00 8.14097524e-01 -1.50631517e-01] [ 2.53304001e-02 -3.62463057e-01 -3.45380306e-01 ... -4.68536824e-01 6.51757538e-01 2.73339987e+00] ... [ 2.94182301e-01 -1.43122005e+00 2.72337198e-01 ... -5.77085465e-02 -1.51199865e+00 -6.64814770e-01] [-6.45356596e-01 2.48259082e-01 4.53665227e-01 ... 1.27086961e+00 -1.42866111e+00 6.40370920e-02] [-5.24554908e-01 -1.37515426e+00 1.24712819e-02 ... -2.70146370e-01 7.39092231e-02 -2.48767257e+00]] [[-3.28107715e-01 -3.89144152e-01 -6.65817976e-01 ... -1.00200868e+00 -2.57709169e+00 -6.04860365e-01] [ 9.78652313e-02 5.35726883e-02 -1.24517202e+00 ... 9.77164984e-01 -1.78708935e+00 -1.73901117e+00] [ 1.59819052e-02 2.45853737e-01 -8.83042336e-01 ... 1.12459302e+00 5.58933854e-01 -2.01628283e-01] ... [ 6.42610729e-01 9.21326041e-01 -3.35231215e-01 ... -5.33352375e-01 -8.73661339e-01 -1.52858305e+00] [ 1.73986399e+00 -7.11370289e-01 -4.53933895e-01 ... -1.08934128e+00 8.79999101e-01 -9.80900824e-01] [ 5.32982767e-01 -3.16615909e-01 -1.08946526e+00 ... -1.79020315e-01 1.19677162e+00 5.10076821e-01]] [[-1.87058485e+00 4.24757391e-01 2.55967397e-03 ... 7.93899819e-02 1.12174869e-01 3.16135406e-01] [ 1.28831565e-01 -7.39026189e-01 1.16481435e+00 ... 3.87906224e-01 -2.06010491e-02 3.19578052e-02] [-2.25877333e+00 8.15858006e-01 9.58480895e-01 ... 5.61381698e-01 -3.61462027e-01 -7.32685387e-01] ... [ 3.54252636e-01 -3.89630228e-01 -3.56306970e-01 ... -2.85762176e-02 1.22769915e-01 5.02531350e-01] [ 1.65194714e+00 -1.54157162e+00 7.23173022e-01 ... -2.23020339e+00 -3.85253966e-01 -3.98137212e-01] [ 1.30117178e+00 1.17345726e+00 2.74843669e+00 ... 1.58415303e-01 2.82043666e-01 -1.39260089e+00]]] [[[-3.36766988e-01 -1.10966003e+00 -7.29403436e-01 ... 3.55627775e-01 3.05121616e-02 7.42698237e-02] [ 3.92249942e-01 7.49052227e-01 -1.54199994e+00 ... -8.34977403e-02 1.43518603e+00 -1.43585896e+00] [ 9.16679800e-01 1.55947423e+00 1.12899709e+00 ... 1.61432493e+00 -9.95460212e-01 9.27627206e-01] ... [ 1.50724679e-01 -7.20107317e-01 4.64192629e-01 ... -1.24091280e+00 1.54470730e+00 3.42050165e-01] [-7.21333995e-02 -1.08567357e+00 7.25422978e-01 ... -8.98719370e-01 -5.32744050e-01 -2.76243281e+00] [ 1.48003256e+00 4.66456175e-01 -5.19666672e-01 ... 8.70654464e-01 5.81247866e-01 9.31047440e-01]] [[-1.12476811e-01 9.97874737e-01 8.01597476e-01 ... -1.34417665e+00 -4.55884665e-01 -7.67041266e-01] [-1.03447235e+00 1.19192564e+00 1.92689145e+00 ... -1.16390157e+00 6.79719508e-01 -6.02660537e-01] [ 8.70705426e-01 -9.16959196e-02 -3.24028879e-01 ... -1.50320470e+00 9.24942791e-01 2.05337629e-01] ... [-3.08995619e-02 -1.48384169e-01 -1.59214532e+00 ... 8.19195688e-01 7.79933095e-01 2.51461059e-01] [-3.75217676e-01 -5.16828954e-01 -1.97397041e+00 ... 1.37589526e+00 4.49067086e-01 -2.45940804e+00] [ 3.14814836e-01 7.45710135e-01 -1.08467448e+00 ... -7.80577779e-01 2.42615867e+00 8.33409011e-01]] [[ 1.33122706e+00 1.32192254e+00 -7.27825761e-01 ... 4.69285548e-01 -9.35207307e-01 3.68725181e-01] [-2.13779044e+00 1.49256635e+00 3.41138929e-01 ... 1.43801355e+00 1.26657152e+00 1.37713468e+00] [ 1.30973530e+00 -2.00816318e-01 2.40301803e-01 ... -7.06223309e-01 9.88982320e-02 1.14259481e+00] ... [-6.73531368e-02 7.93866217e-01 -9.33380902e-01 ... -4.27635819e-01 -6.32966578e-01 -1.30578089e+00] [ 1.03184593e+00 -3.30985498e+00 -3.68158743e-02 ... -1.43278491e+00 -3.92914534e-01 -4.30914521e-01] [ 8.97779316e-03 -6.89447761e-01 1.40776348e+00 ... -8.88077140e-01 -3.42391759e-01 -1.77229667e+00]] ... [[-1.38898194e+00 -1.14468586e+00 -3.83387238e-01 ... -6.13068521e-01 -9.42479074e-01 9.27191675e-01] [-2.93019235e-01 -6.69954121e-02 -9.81067538e-01 ... -5.54815054e-01 4.86298263e-01 3.28812271e-01] [ 4.21672881e-01 3.38597059e-01 7.76082218e-01 ... 1.71327496e+00 -7.89740145e-01 -6.95607543e-01] ... [-1.71501744e+00 8.41311932e-01 -8.79503429e-01 ... -8.77456605e-01 -1.26404011e+00 -3.48987818e-01] [ 1.15099156e+00 -3.24684620e-01 4.32149023e-01 ... -7.45630413e-02 7.15768874e-01 -1.62347341e+00] [-3.80829394e-01 9.29788873e-03 -1.68044698e+00 ... -1.81711388e+00 -1.14035654e+00 -7.41581142e-01]] [[-3.13073754e-01 1.63005050e-02 -2.18284869e+00 ... -4.89987403e-01 -1.42750072e+00 -9.54963148e-01] [-2.18564653e+00 -5.62404275e-01 -1.10877955e+00 ... 1.90101594e-01 7.57360458e-01 7.33938396e-01] [-4.51717228e-01 2.19384193e+00 1.50935188e-01 ... -4.59968388e-01 1.06951997e-01 8.34496796e-01] ... [ 1.68760687e-01 -2.84162807e+00 -6.95981681e-02 ... -4.83920574e-02 1.59881496e+00 2.93067545e-01] [-3.54638934e-01 -1.28806448e+00 -1.36489773e+00 ... 9.09758449e-01 -1.44176800e-02 1.60347319e+00] [ 1.08956659e+00 -7.84244895e-01 -4.99229738e-03 ... 1.98709682e-01 -1.67905584e-01 -1.09948647e+00]] [[-7.15992510e-01 -7.31804594e-02 -9.63673055e-01 ... 1.13191891e+00 5.87334394e-01 7.22611845e-01] [-7.23083913e-01 -2.54960513e+00 -1.20159209e+00 ... 1.26946890e+00 7.38198981e-02 2.94078052e-01] [ 1.49351716e+00 4.61576998e-01 1.86738729e-01 ... 5.14227629e-01 5.14276564e-01 1.29531109e+00] ... [-2.23248124e+00 -9.28126454e-01 -8.99381757e-01 ... -5.25783487e-02 2.43903264e-01 -2.98073381e-01] [ 7.27493465e-01 -3.65314484e-01 -8.15952063e-01 ... -1.03360280e-01 -8.21539581e-01 6.14242256e-01] [-5.43053448e-01 2.07782769e+00 1.15995955e+00 ... 1.94235015e+00 8.98796439e-01 1.17995703e+00]]] [[[ 1.01093602e+00 5.75544536e-01 -3.91107619e-01 ... -1.30841374e-01 3.48543406e-01 3.34820002e-01] [-1.23212365e-02 8.73732388e-01 1.22882295e+00 ... 1.49097872e+00 2.02477679e-01 -3.79301459e-01] [ 2.67077327e-01 6.02551520e-01 1.05404878e+00 ... 4.80744898e-01 -6.22859478e-01 -3.46368402e-01] ... [ 1.84921885e+00 -9.12770867e-01 8.64612699e-01 ... -1.00578034e+00 -1.61155856e+00 9.49343204e-01] [ 1.40520263e+00 9.02042449e-01 -4.14958745e-01 ... -5.80425084e-01 1.09362268e+00 -2.53129572e-01] [-1.03345001e+00 -1.63198090e+00 -5.34327865e-01 ... -2.19632936e+00 -6.72054365e-02 2.98983663e-01]] [[ 4.14364874e-01 -1.78568244e-01 -9.74376023e-01 ... -1.46886289e+00 9.27338958e-01 -1.76568970e-01] [-1.57728410e+00 -1.46144927e+00 1.11725879e+00 ... -1.14453457e-01 2.30125189e+00 -5.15219510e-01] [ 3.53826910e-01 1.00845262e-01 1.18362784e+00 ... 1.20668434e-01 -3.45428729e+00 1.32629156e-01] ... [ 9.30010617e-01 -1.36561596e+00 7.11228177e-02 ... -1.13333941e+00 4.45266485e-01 -2.25965977e+00] [ 1.59966373e+00 1.08345795e+00 -1.31375045e-01 ... -4.72613312e-02 9.27078009e-01 -1.47065663e+00] [ 1.81219864e+00 -1.22375870e+00 6.07465506e-01 ... 5.34695029e-01 -1.78691089e-01 1.07684612e+00]] [[ 1.35705256e+00 -1.94498193e+00 -6.74507082e-01 ... 7.16410697e-01 3.31064016e-01 1.23686954e-01] [ 1.24353290e+00 -1.12653661e+00 5.42829573e-01 ... -2.04495177e-01 -6.33848608e-01 2.84097522e-01] [ 3.03118706e+00 -8.63337278e-01 5.55707574e-01 ... 5.11542678e-01 2.53080308e-01 2.33594254e-01] ... [ 2.06520095e-01 -8.23810026e-02 -1.04718792e+00 ... 1.60033643e-01 7.27273405e-01 -2.26290774e+00] [ 1.39227855e+00 -1.24172843e+00 -6.44820094e-01 ... 9.07120883e-01 1.05661523e+00 -7.62220085e-01] [ 8.37331176e-01 2.80849282e-02 3.26041970e-03 ... 4.54647899e-01 2.16504097e+00 5.57669759e-01]] ... [[-1.87054539e+00 -1.07297115e-01 -1.45621970e-01 ... 1.01662889e-01 3.52712840e-01 5.31178534e-01] [-4.26484972e-01 3.09570998e-01 -2.30813050e+00 ... -6.43099666e-01 1.64133370e-01 2.73472853e-02] [ 1.94232181e-01 -2.40257546e-01 2.41484210e-01 ... -3.62348586e-01 -6.34920597e-01 -1.47453055e-01] ... [-1.48540330e+00 -8.22501659e-01 1.10388827e+00 ... 9.13404286e-01 -6.96481824e-01 -7.04348803e-01] [ 3.75496686e-01 -1.05963278e+00 -1.62313664e+00 ... 1.49745226e-01 -8.72163415e-01 7.04698265e-01] [-5.28187215e-01 -3.62895995e-01 -1.69629484e-01 ... 1.00001979e+00 -9.48665917e-01 2.87943602e-01]] [[-1.58091748e+00 -1.00980318e+00 -1.85413450e-01 ... 5.33696055e-01 1.34610581e+00 -4.96969253e-01] [-7.42146194e-01 -5.91650009e-01 7.78516710e-01 ... -4.24473226e-01 8.74313235e-01 -1.38025272e+00] [ 4.57679510e-01 1.98265016e-01 -2.54879802e-01 ... 2.71216750e-01 3.46333474e-01 1.68666220e+00] ... [ 3.37983280e-01 -8.95227551e-01 1.54505205e+00 ... -2.75118560e-01 8.46596897e-01 1.08666062e+00] [ 2.08372295e-01 -9.08299744e-01 5.16259670e-02 ... -4.70230281e-01 7.94576406e-01 4.75613594e-01] [-5.73864996e-01 2.89518926e-02 -6.20914042e-01 ... -3.07149959e+00 1.62952229e-01 7.35158861e-01]] [[-7.70353198e-01 1.64731479e+00 -4.74549890e-01 ... -1.19478345e+00 1.70692548e-01 -2.02101246e-01] [ 1.04319894e+00 -3.71174179e-02 1.32921767e+00 ... -6.56217277e-01 5.14473021e-01 4.67819154e-01] [-1.49302208e+00 7.62767553e-01 -2.89168090e-01 ... -1.77676067e-01 2.99742520e-01 -4.25811857e-01] ... [-1.04129743e-02 1.91890049e+00 -2.24898696e-01 ... -1.74964238e-02 1.39712706e-01 -1.41517508e+00] [ 1.52617526e+00 4.77431901e-02 -1.63561320e+00 ... -9.20560881e-02 -9.07420158e-01 6.26702368e-01] [ 1.26699209e+00 -9.60134685e-01 5.12679279e-01 ... 1.32786167e+00 -1.04354942e+00 1.11552131e+00]]]] [[[[ 1.28444254e-01 -7.69190311e-01 4.79786210e-02 ... -1.94206059e-01 -1.89798847e-01 -1.57749891e+00] [ 4.49626625e-01 7.16387510e-01 -7.51531124e-02 ... -1.48739612e+00 -1.07755625e+00 -2.91670537e+00] [ 6.67565763e-01 -9.48738694e-01 1.18920100e+00 ... 1.36051178e+00 6.55066252e-01 6.91825628e-01] ... [ 9.50970531e-01 -3.18303108e-01 7.36346126e-01 ... -4.27379608e-01 -2.22559810e+00 1.94899380e-01] [-4.60302949e-01 8.62498939e-01 1.00547159e+00 ... 1.61663294e-02 -3.91928375e-01 -2.12050170e-01] [-1.16972935e+00 -9.83949363e-01 3.13903421e-01 ... 5.83636835e-02 5.85561246e-03 6.19440854e-01]] [[-1.15890920e+00 4.62079607e-02 9.64548826e-01 ... -1.54868579e+00 -2.25547440e-02 1.59500182e-01] [-1.37972486e+00 4.31481063e-01 6.41966045e-01 ... -2.06703041e-02 -5.36904037e-01 8.06042492e-01] [-1.40317452e+00 -1.03109133e+00 4.15878147e-01 ... 5.83341360e-01 -1.05181515e+00 -1.04486263e+00] ... [ 1.96297765e+00 -9.10320461e-01 1.44774270e+00 ... -2.41163611e-01 7.82382488e-03 9.90771353e-01] [ 1.45080876e+00 2.08654785e+00 -7.91699409e-01 ... -4.88910317e-01 -8.69470835e-02 -1.00804079e+00] [ 5.36996007e-01 -5.13185322e-01 2.42859110e-01 ... 2.61299682e+00 -4.02875543e-01 1.19286573e+00]] [[ 8.30402553e-01 -5.88252485e-01 -6.72806740e-01 ... 1.36752343e+00 1.13951302e+00 -1.77387461e-01] [-1.45960975e+00 1.19762039e+00 8.86821210e-01 ... 3.59513938e-01 5.60747422e-02 -6.26275420e-01] [-9.61182475e-01 -1.53153121e-01 -3.58607680e-01 ... -5.37591815e-01 9.30665016e-01 -1.63465762e+00] ... [-6.20182037e-01 -5.61106727e-02 4.87469703e-01 ... 2.17441380e-01 1.11568427e+00 -2.76606381e-01] [-4.63734061e-01 -4.27530557e-01 8.65402579e-01 ... -8.50720823e-01 -1.55064905e+00 1.33658350e-01] [ 6.74743116e-01 -1.38859032e-02 5.68310022e-01 ... 1.02269173e+00 5.35826445e-01 -2.69071013e-01]] ... [[-1.75939932e-01 6.75716341e-01 -3.93061608e-01 ... -1.45089135e-01 1.24699914e+00 -9.41989064e-01] [-5.71596205e-01 6.31898046e-01 3.58908921e-01 ... -3.59725028e-01 1.25502634e+00 -2.18935102e-01] [ 1.61590666e-01 -2.36403084e+00 5.31573057e-01 ... 7.52095342e-01 -3.06918323e-01 6.37672842e-01] ... [ 1.37906599e+00 -1.41118789e+00 1.04023442e-01 ... -3.63730341e-01 -1.48531592e+00 -1.49398661e+00] [ 1.48134485e-01 4.50390399e-01 1.15896869e+00 ... 1.94475651e+00 -1.00004685e+00 2.95606017e-01] [ 2.09775642e-01 1.57871997e+00 -6.63131118e-01 ... 5.73706865e-01 -5.13798594e-02 1.95047212e+00]] [[ 3.99227232e-01 -4.05042708e-01 3.74857098e-01 ... -6.09872282e-01 -3.90284866e-01 -2.85839159e-02] [ 5.93256474e-01 -3.66187006e-01 1.19859540e+00 ... 8.84443223e-01 7.11760402e-01 -5.90956509e-01] [ 1.92019850e-01 1.16572034e+00 1.27075946e+00 ... -5.44955097e-02 -8.54436904e-02 -2.10004989e-02] ... [-1.32642519e+00 1.14770877e+00 9.25942510e-02 ... -2.66963929e-01 7.88862169e-01 -2.78008640e-01] [-1.91849992e-01 2.87614793e-01 7.69792974e-01 ... 5.33795774e-01 1.74068546e+00 8.48140061e-01] [-2.47648552e-01 2.95105994e-01 -1.91489792e+00 ... -2.17115819e-01 2.03915969e-01 -9.18466821e-02]] [[-1.02825649e-01 -1.56910694e+00 8.93621296e-02 ... -1.80999696e-01 1.38871253e-01 -5.94765209e-02] [-1.55080199e-01 2.22913951e-01 -1.39534247e+00 ... 6.80980742e-01 -1.31232429e+00 7.60497302e-02] [ 1.64120585e-01 -7.35048532e-01 1.02112377e+00 ... 1.14290208e-01 -5.78791916e-01 6.93231300e-02] ... [ 1.51455367e+00 6.30307913e-01 5.93157172e-01 ... 1.23435223e+00 1.17793131e+00 -6.76178813e-01] [-1.22720385e+00 1.18980765e+00 2.79842138e-01 ... -5.14917910e-01 -9.60919619e-01 5.05104303e-01] [-2.40642026e-01 -5.78506337e-03 2.25882426e-01 ... -4.96611893e-01 -2.32171100e-02 8.18850517e-01]]] [[[ 1.53005552e+00 -2.92509887e-02 -2.71789461e-01 ... 1.84243426e-01 1.65199608e-01 3.33926529e-01] [ 9.42379832e-01 -3.06069732e-01 1.78809231e-03 ... 2.39033175e+00 -3.15999269e-01 1.08777320e+00] [-1.02761185e+00 -2.17342734e-01 -1.50372639e-01 ... -2.05841079e-01 -3.53237003e-01 5.00090957e-01] ... [-1.65967536e+00 9.54133034e-01 -7.09670663e-01 ... 7.24289000e-01 8.56795907e-01 7.39149153e-01] [ 1.59562483e-01 6.65463686e-01 3.29224914e-01 ... -8.39308202e-01 7.83792377e-01 -5.47714829e-01] [-3.00493240e-01 -4.67367947e-01 4.95047361e-01 ... 2.47705445e-01 -1.03608274e+00 1.59130692e+00]] [[ 1.95702240e-02 -1.68062449e+00 -5.12644768e-01 ... -5.77794552e-01 6.83327258e-01 -5.03898263e-01] [ 2.43938658e-02 -1.97348490e-01 -1.81371188e+00 ... 3.70193012e-02 -4.07481015e-01 -5.26103616e-01] [ 1.69496015e-02 -3.64589000e+00 1.08008397e+00 ... -8.68145645e-01 -1.65736198e+00 -7.59857118e-01] ... [-3.43649209e-01 1.09567595e+00 -6.47608101e-01 ... 1.75338790e-01 8.19799006e-01 8.57486069e-01] [ 1.52453816e+00 -9.54253376e-01 -3.62048239e-01 ... -9.69742179e-01 1.08796525e+00 1.55552059e-01] [-2.40599895e+00 2.70312548e-01 -5.03513813e-01 ... 1.06575735e-01 -6.13796890e-01 8.96719098e-02]] [[-7.99023628e-01 2.08915114e+00 -1.45140839e+00 ... 4.43318933e-01 -4.18895185e-01 6.24718904e-01] [-3.35733950e-01 1.04450881e+00 6.41071141e-01 ... 5.13728797e-01 -6.80611253e-01 -1.55819619e+00] [ 5.85390806e-01 -1.34543669e+00 -1.55745733e+00 ... 1.54485834e+00 3.51904845e+00 9.07376409e-01] ... [-1.45100331e+00 -6.76420569e-01 -4.93658423e-01 ... 1.08631723e-01 7.57965028e-01 1.04396617e+00] [-5.87788701e-01 -8.74255121e-01 -7.07216740e-01 ... -1.52784956e+00 4.72315401e-01 6.26697242e-01] [ 2.33543113e-01 -9.83573556e-01 2.07315612e+00 ... 1.16637254e+00 -6.94395065e-01 -2.08552647e+00]] ... [[-2.65452862e-01 2.71031952e+00 6.96013272e-01 ... -1.16496718e+00 4.18646246e-01 2.38438681e-01] [ 9.75983620e-01 8.33730325e-02 1.38632989e+00 ... 4.50760216e-01 -2.73933697e+00 -1.38072753e+00] [ 8.12654555e-01 -1.23165512e+00 5.84355772e-01 ... -6.96475863e-01 -3.69081527e-01 1.92867652e-01] ... [ 1.95839733e-01 3.44270885e-01 2.80464441e-01 ... 1.12336051e+00 -1.57653141e+00 -5.73618650e-01] [ 6.82178093e-03 -4.45458025e-01 -4.00049746e-01 ... 1.12069800e-01 4.15579766e-01 -7.78871834e-01] [ 8.86638522e-01 -4.10121381e-02 -1.25621212e+00 ... 4.84671772e-01 -1.93810868e+00 1.81928658e+00]] [[ 1.64018142e+00 -6.31593347e-01 -1.69497579e-01 ... 8.84771705e-01 -9.49831128e-01 -9.61820424e-01] [ 2.82073259e-01 -2.60657817e-01 -1.68025768e+00 ... -1.03114891e+00 -5.51938593e-01 -5.40286720e-01] [ 8.24589193e-01 -1.68659925e+00 7.81393170e-01 ... -2.34129205e-01 -1.30347833e-01 -7.56173909e-01] ... [ 5.01487851e-01 2.18862963e+00 5.82691491e-01 ... 9.35512543e-01 -1.46528512e-01 9.24311042e-01] [ 2.16861337e-01 -1.35419536e+00 -9.96695340e-01 ... -3.07007551e-01 -9.52698439e-02 -3.60476077e-01] [-2.13948274e+00 -1.26516342e+00 1.19568753e+00 ... 1.24970865e+00 3.80804777e-01 -2.36981177e+00]] [[-4.29076254e-01 3.27605963e-01 -7.20227182e-01 ... 1.23367107e+00 -2.76835293e-01 -7.58132815e-01] [ 1.08991370e-01 2.08580047e-01 7.57072151e-01 ... 8.42161179e-01 -1.69469565e-02 1.48877871e+00] [ 1.62514210e-01 -7.36025751e-01 -5.63007891e-01 ... 3.27411920e-01 -1.50518656e-01 4.80148196e-01] ... [-1.07938349e+00 -1.59776986e+00 -2.74332142e+00 ... 2.84351206e+00 2.18683824e-01 1.05882859e+00] [-1.51512647e+00 1.41655076e+00 5.74846447e-01 ... -8.13811183e-01 2.47753695e-01 1.54263079e+00] [-1.56233811e+00 -9.58481550e-01 -1.17002571e+00 ... 6.12603873e-02 -5.92480004e-01 -2.92159498e-01]]] [[[ 2.16133523e+00 1.24250747e-01 3.32947373e-01 ... -6.88449442e-01 -1.06889534e+00 5.68488598e-01] [ 7.18161583e-01 2.00747299e+00 5.16361535e-01 ... 9.55750763e-01 1.52209353e+00 -6.41158879e-01] [-8.63170743e-01 1.07353020e+00 5.29545784e-01 ... -5.74765503e-01 5.39808452e-01 -4.10860270e-01] ... [-1.72387511e-01 3.65137726e-01 -5.54466605e-01 ... -6.33462489e-01 -2.17358923e+00 -4.88574475e-01] [ 7.87736118e-01 -5.22152126e-01 1.41452587e+00 ... 2.17544055e+00 1.54827222e-01 -3.61912608e-01] [-7.66987145e-01 5.23322284e-01 -3.38637918e-01 ... -1.53882504e-01 1.33865213e+00 -1.62919676e+00]] [[-6.17489755e-01 -3.58366251e-01 -1.47701967e+00 ... 4.64374989e-01 1.38599932e+00 5.01608491e-01] [ 3.43321502e-01 5.33839881e-01 4.67450589e-01 ... -1.26141262e+00 -1.92957258e+00 -1.50645959e+00] [ 1.52363706e+00 -1.33330123e-02 -9.45887327e-01 ... -1.65561521e+00 2.05410552e+00 -1.49286103e+00] ... [-4.02918607e-01 6.93270639e-02 -4.71456021e-01 ... 1.66675615e+00 5.71420372e-01 2.50111938e-01] [ 1.64820004e+00 1.78337291e-01 6.00407243e-01 ... 5.95651746e-01 -9.11209404e-01 -1.53380787e+00] [ 1.27536193e-01 -2.38375738e-01 -5.92889786e-01 ... 3.44366163e-01 -3.72363716e-01 -4.79284316e-01]] [[ 1.62208900e-01 -6.00539446e-01 -1.09732056e+00 ... -2.71376312e-01 -6.57588840e-01 -1.40452552e+00] [ 1.23268038e-01 1.31515348e+00 9.14601143e-03 ... -1.34187317e+00 9.69134033e-01 -1.45821428e+00] [ 7.85080314e-01 -1.32074559e+00 -3.54498982e-01 ... -2.05665618e-01 -2.72200793e-01 3.26756686e-01] ... [ 1.36770550e-02 4.03947204e-01 -4.26939249e-01 ... -4.78943795e-01 5.95829785e-01 -1.07463574e+00] [-2.19405365e+00 1.51810801e+00 1.83408156e-01 ... 4.10478055e-01 3.03595066e-01 -9.59445715e-01] [-1.02196622e+00 1.12585282e+00 -3.78540635e-01 ... -4.80629176e-01 1.15265167e+00 2.28506848e-01]] ... [[ 7.05423892e-01 1.10725570e+00 1.53098595e+00 ... 1.28314805e+00 4.64095175e-01 6.29920244e-01] [ 2.40788281e-01 2.00474471e-01 -7.13002563e-01 ... 1.20644677e+00 2.28222355e-01 2.06871510e+00] [ 1.56553614e+00 4.07323092e-01 -6.58487439e-01 ... 6.92930073e-03 -1.04255438e+00 1.64180532e-01] ... [-1.54807007e+00 1.24920082e+00 -2.52553105e-01 ... -9.48371470e-01 1.05552292e+00 1.21143246e+00] [ 1.07806516e+00 -1.58460462e+00 -9.76105213e-01 ... 1.89038038e+00 5.39440036e-01 -9.16578749e-04] [-2.86975987e-02 -9.76545691e-01 5.70043139e-02 ... 6.39134943e-01 1.78055835e+00 -6.91188499e-03]] [[ 1.31892419e+00 1.34332478e-01 1.86390567e+00 ... 5.03923476e-01 3.29957940e-02 1.36392534e+00] [ 1.01545513e-01 2.20711336e-01 -1.63608718e+00 ... -1.74660012e-01 -1.22624829e-01 -1.49390686e+00] [-9.95228067e-02 1.19774270e+00 1.23856103e+00 ... -2.22328216e-01 2.87776351e-01 1.74254215e+00] ... [ 7.55143285e-01 7.30895758e-01 5.89414716e-01 ... -9.54059303e-01 -3.39841366e-01 1.07817435e+00] [-5.20184748e-02 -2.33579874e-01 1.08029366e+00 ... 6.35090351e-01 -6.46758914e-01 -5.40969372e-01] [ 1.38166025e-01 -3.28301579e-01 1.96734214e+00 ... -2.28997254e+00 2.54303098e-01 1.71424359e-01]] [[-1.24209011e+00 8.77959430e-01 2.33094883e+00 ... 1.52971494e+00 4.08690989e-01 -7.65755892e-01] [-2.03996754e+00 -4.37161028e-02 1.19098328e-01 ... -4.18927431e-01 -1.24143505e+00 7.94800699e-01] [-1.96801439e-01 1.33647006e-02 -1.93600464e+00 ... 2.48740271e-01 -1.20719627e-01 -3.50202858e-01] ... [-4.44462061e-01 6.65815592e-01 -8.37635458e-01 ... 7.25054741e-01 -1.46610844e+00 1.02406752e+00] [-1.48372340e+00 4.94159490e-01 -1.24047649e+00 ... 8.84917453e-02 -1.86507195e-01 2.04962611e+00] [ 1.07632983e+00 7.15260088e-01 -3.67420971e-01 ... -1.23196669e-01 1.50298610e-01 5.17496645e-01]]] [[[-1.60089761e-01 -2.09203386e+00 1.89538106e-01 ... 2.67003506e-01 -2.13190961e+00 -2.71914220e+00] [ 1.61041260e-01 -1.52808952e+00 -1.74587953e+00 ... 1.21349432e-01 1.82964647e+00 2.49143168e-01] [-7.06790745e-01 3.22525889e-01 4.56292838e-01 ... -7.78493702e-01 -2.44914472e-01 7.94227600e-01] ... [-9.47640777e-01 2.13355765e-01 3.21833491e-01 ... -2.77722120e-01 -9.83932465e-02 -1.81095564e+00] [ 1.43976355e+00 -1.11128581e+00 1.42105317e+00 ... -5.25604129e-01 -8.28269005e-01 -1.17650673e-01] [-5.68654954e-01 -7.10930169e-01 -9.49269831e-01 ... 3.79058838e-01 8.52466345e-01 9.46962774e-01]] [[ 3.97684127e-01 -9.37641442e-01 9.90292549e-01 ... -3.46046209e-01 7.48286009e-01 2.99994975e-01] [ 1.60115755e+00 2.29854301e-01 -1.83204949e+00 ... -2.56362033e+00 1.05555761e+00 -1.32937706e+00] [ 2.15756997e-01 1.66606522e+00 1.01115155e+00 ... 2.27670118e-01 4.98794198e-01 -1.27182141e-01] ... [-1.05288029e-01 1.63471925e+00 -1.21343374e+00 ... -1.49727201e+00 1.06954254e-01 2.37680376e-01] [-2.17788959e+00 -3.02653491e-01 -9.61084366e-01 ... -8.32398236e-01 8.57778609e-01 -2.32268244e-01] [ 4.46048975e-01 2.58994877e-01 5.93840539e-01 ... 1.07869580e-01 -4.15714860e-01 2.01941937e-01]] [[-1.55972290e+00 -2.04525441e-01 -1.08433843e+00 ... -1.96658611e+00 9.48760882e-02 2.99741209e-01] [ 4.40251529e-02 -9.46126357e-02 -8.28027844e-01 ... -1.51419055e+00 -2.40976739e+00 -5.92878222e-01] [-8.18813384e-01 2.80501880e-02 -2.95491189e-01 ... 8.72640729e-01 -8.50213587e-01 6.89649463e-01] ... [-1.40576935e+00 3.24066013e-01 -1.32812333e+00 ... -9.32315469e-01 1.68455318e-02 4.77550440e-02] [-1.37192762e+00 -8.45178068e-01 1.14346111e+00 ... 1.72838497e+00 -4.67190266e-01 -2.31576294e-01] [ 1.71317780e+00 1.14989138e+00 4.38657910e-01 ... 2.50673480e-02 -5.13370395e-01 6.91230953e-01]] ... [[-1.43753827e+00 9.84938443e-02 -4.66584235e-01 ... -2.07023883e+00 1.84377480e+00 -1.23292923e+00] [ 1.79352462e+00 -1.96314186e-01 2.19636559e+00 ... -1.62479687e+00 -4.45971847e-01 -1.13583811e-01] [-5.82918286e-01 -6.49664104e-02 -6.05986178e-01 ... -5.77743948e-01 -1.22601867e-01 2.13753080e+00] ... [ 4.45330381e-01 -1.74052298e+00 3.65745962e-01 ... -7.76681781e-01 7.65285313e-01 6.44023180e-01] [-6.27864480e-01 -2.12309670e+00 1.68944120e-01 ... -1.70368016e+00 1.15412939e+00 5.65402091e-01] [ 2.65413493e-01 -5.01462877e-01 -1.18553981e-01 ... -9.31810200e-01 3.81166816e-01 -4.64718670e-01]] [[-1.37272954e-01 1.55962849e+00 -1.33613631e-01 ... 8.70867252e-01 8.84767115e-01 -1.54577315e+00] [-2.49250486e-01 -3.14343184e-01 2.72188690e-02 ... 1.16366589e+00 8.56334344e-03 1.39119124e+00] [-5.43208301e-01 1.27167284e+00 2.08215976e+00 ... -1.16848862e+00 4.51815218e-01 -6.57387555e-01] ... [ 1.31562102e+00 -1.45348454e+00 -4.58730370e-01 ... -1.38646090e+00 4.67384547e-01 -4.59756285e-01] [-3.64218712e-01 1.28710985e+00 2.14397669e-01 ... -1.86147287e-01 5.61677694e-01 7.54340589e-01] [-1.05331635e+00 5.73976874e-01 -6.15262151e-01 ... 8.32772553e-01 7.43878126e-01 -8.85813951e-01]] [[ 3.70757550e-01 8.55840862e-01 -1.33231449e+00 ... -6.66320473e-02 1.22103024e+00 4.41707373e-01] [ 6.25491977e-01 -5.88076055e-01 -6.08992949e-02 ... -7.53410876e-01 -1.26755261e+00 -1.21220440e-01] [-6.93957090e-01 2.76927173e-01 -3.22702259e-01 ... 1.98919094e+00 6.98605597e-01 -1.29313439e-01] ... [ 4.95002091e-01 2.45134413e-01 -5.30989766e-01 ... 5.22788107e-01 9.63037789e-01 4.47828919e-01] [-9.23626184e-01 -1.03334442e-01 -2.84158885e-02 ... -8.91156554e-01 7.24140108e-01 -1.57231867e+00] [ 4.70124841e-01 -8.43237162e-01 -2.49185428e-01 ... 1.53415784e-01 -9.41612422e-01 8.68688583e-01]]] [[[ 1.22847283e+00 1.78052831e+00 -7.00766027e-01 ... -8.42822969e-01 1.56027746e+00 -7.13808119e-01] [-2.17961460e-01 -2.13324666e-01 -2.24575639e+00 ... 1.49908328e+00 -7.32441485e-01 5.11984348e-01] [ 1.11170053e+00 -1.28315553e-01 2.15581393e+00 ... -6.94315016e-01 1.13326721e-01 -2.54900336e-01] ... [ 6.66699171e-01 -4.66841012e-02 -1.16776538e+00 ... 1.45137084e+00 -5.65129340e-01 -1.70520067e+00] [ 8.00502837e-01 1.28677464e+00 -1.49208814e-01 ... 3.36699933e-01 -6.59655869e-01 -1.64216793e+00] [ 8.66080284e-01 -5.58525383e-01 -6.26595080e-01 ... 1.17982459e+00 -4.88902032e-01 -3.84129658e-02]] [[ 8.52384195e-02 -6.83621645e-01 4.52280007e-02 ... -8.10053706e-01 1.51853406e+00 1.15809226e+00] [-1.88776985e-01 1.92466283e+00 2.93698817e-01 ... 1.09906685e+00 -2.94581950e-01 1.18198121e+00] [ 3.22731018e-01 9.87141728e-01 -6.48714835e-03 ... -1.86620092e+00 -2.93546349e-01 1.40569299e-01] ... [-1.53132391e+00 1.27684653e-01 1.55522060e+00 ... 1.96026370e-01 -1.49763954e+00 4.32295263e-01] [ 1.54475498e+00 -1.64283097e+00 -1.23606038e+00 ... -2.85246998e-01 1.59135818e+00 -2.06740260e-01] [-6.01181984e-01 1.25879574e+00 9.78163302e-01 ... -1.86239362e+00 2.56603718e-01 1.10496618e-01]] [[-3.27619091e-02 -9.30477679e-01 -3.53978798e-02 ... 9.28929269e-01 1.39326596e+00 7.63742268e-01] [ 6.97500527e-01 2.19923568e+00 -1.14627159e+00 ... 4.75089163e-01 1.64242625e-01 -1.22963786e+00] [-1.76529527e+00 -3.31407845e-01 2.81066179e-01 ... -1.78703666e+00 4.58729297e-01 2.71277219e-01] ... [ 2.30922699e-01 7.73906231e-01 1.85774907e-01 ... -2.05291414e+00 5.47257125e-01 1.88770735e+00] [-1.39163032e-01 -7.83589602e-01 9.79762435e-01 ... 7.73683190e-01 -1.04345059e+00 5.71269870e-01] [-8.91739488e-01 -2.08075118e+00 -4.21126217e-01 ... 2.70529777e-01 4.18834716e-01 -8.28963161e-01]] ... [[-1.59357667e+00 9.44777608e-01 2.03176665e+00 ... -1.18935955e+00 2.82164901e-01 -1.08309352e+00] [-5.53282380e-01 -4.11657095e-01 1.11292720e+00 ... 1.34663731e-01 8.82503092e-01 -3.08681995e-01] [-1.32503539e-01 -2.59454548e-01 -7.22906291e-02 ... 2.31557027e-01 1.38046527e+00 -1.49495554e+00] ... [ 2.65171587e-01 -3.77731889e-01 -4.93425339e-01 ... 2.51921326e-01 1.49548256e+00 8.18800986e-01] [ 6.93608642e-01 3.56326610e-01 2.53144360e+00 ... 4.21952635e-01 5.75105369e-01 -7.64661312e-01] [ 8.20156872e-01 1.32077944e-03 1.30266535e+00 ... -3.95009726e-01 5.69823146e-01 -1.72622597e+00]] [[ 1.96494281e-01 1.55207109e+00 -9.86040771e-01 ... 3.71250421e-01 3.91471565e-01 -4.15889084e-01] [-1.16910243e+00 -1.56065595e+00 7.76926935e-01 ... 2.31161928e+00 -1.62056029e-01 3.09201032e-02] [ 3.47234786e-01 1.01359773e+00 1.02163625e+00 ... -7.22061098e-02 -2.90656477e-01 -1.62226212e+00] ... [-1.49609005e+00 2.99672872e-01 1.29021153e-01 ... 1.21825129e-01 -5.03950477e-01 1.57113814e+00] [-1.57123721e+00 -8.40250611e-01 7.83206880e-01 ... 1.39987969e+00 -1.94551885e+00 1.11945426e+00] [ 6.02093875e-01 -4.51615274e-01 2.15754628e+00 ... 1.31443155e+00 9.73869205e-01 -1.46383226e+00]] [[ 2.72830844e-01 5.37238009e-02 8.74573231e-01 ... 2.16681674e-01 -6.61270320e-01 6.68220103e-01] [-3.45351517e-01 -7.61896491e-01 -6.51001990e-01 ... 2.36879992e+00 2.89112832e-02 1.05786180e+00] [ 2.60238886e-01 -1.41851497e+00 -4.88561302e-01 ... 1.38102245e+00 5.61255142e-02 3.74333233e-01] ... [ 5.62187195e-01 9.53423381e-01 1.22247720e+00 ... -6.38752937e-01 1.35389018e+00 -2.61672497e-01] [-1.25949830e-01 1.58371699e+00 1.96296364e-01 ... 1.37029767e+00 -1.49323452e+00 -1.26645148e+00] [-6.22489870e-01 -3.13311517e-01 6.08941317e-01 ... -3.08257550e-01 2.27099895e+00 5.96399307e-01]]] [[[ 4.81122166e-01 -3.51135224e-01 1.65776289e+00 ... 1.85252595e+00 -4.46934015e-01 6.05660200e-01] [ 7.55418181e-01 -1.55671433e-01 5.73171675e-01 ... -2.29051664e-01 -2.82720655e-01 -9.02098119e-01] [-1.00110435e+00 -7.34019101e-01 -9.78932858e-01 ... 5.29848337e-01 1.31147116e-01 2.32107949e+00] ... [-9.48157310e-01 -2.48273551e-01 -1.17861938e+00 ... 1.12826765e+00 -3.93737465e-01 -1.16329575e+00] [-2.66427875e-01 8.38619709e-01 -1.03145659e+00 ... 4.45579559e-01 -4.06488955e-01 -1.07247734e+00] [-4.60016757e-01 1.07283223e+00 -1.91790983e-01 ... 1.07153833e+00 5.56823492e-01 -1.25971544e+00]] [[ 1.77893066e+00 9.29998577e-01 -2.13415694e+00 ... -4.03870851e-01 -3.88038993e-01 -4.89576876e-01] [-2.26325333e-01 1.13098621e+00 -2.84221560e-01 ... -1.20298766e-01 7.09602475e-01 1.48166135e-01] [ 1.26107916e-01 -2.04623985e+00 2.07816884e-01 ... -1.43861604e+00 -9.82544363e-01 6.61993742e-01] ... [-1.95062804e+00 -1.29369783e+00 -5.35464942e-01 ... 5.94430327e-01 8.47498178e-01 -1.18180490e+00] [ 7.64916062e-01 -1.18285632e+00 -5.19017518e-01 ... 1.44586420e+00 -1.31020403e+00 -1.41683793e+00] [-1.28747416e+00 3.77296746e-01 1.99782297e-01 ... 1.87512845e-01 -3.25421512e-01 6.03285074e-01]] [[-5.82424700e-01 -4.01541382e-01 1.19336462e+00 ... 4.54300612e-01 1.70143202e-01 -6.70079350e-01] [-9.42366242e-01 2.33790979e-01 4.51436825e-02 ... 1.95457831e-01 -2.06406808e+00 -1.54851258e+00] [-8.48056734e-01 -2.10884854e-01 -5.38819373e-01 ... 2.16087788e-01 -5.35836577e-01 -1.40438402e+00] ... [-8.91544521e-01 3.39437038e-01 -1.32875168e+00 ... -1.21497190e+00 1.83162004e-01 1.62827957e+00] [-1.36974737e-01 1.85390604e+00 8.51952612e-01 ... -4.46692288e-01 -1.37534261e-01 7.68241286e-01] [ 1.95397269e-02 -9.60203409e-01 1.42306304e+00 ... -2.54573345e+00 -4.69982773e-02 7.10527420e-01]] ... [[-1.86767578e-01 1.35646671e-01 -2.76471066e+00 ... 1.95580229e-01 1.16402090e+00 7.37535596e-01] [ 1.74855387e+00 7.00138390e-01 1.82253993e+00 ... 4.73802894e-01 2.70585001e-01 -2.82376677e-01] [ 1.09742135e-01 -1.29935324e+00 -6.55927539e-01 ... 6.16058767e-01 -8.28547418e-01 3.45156968e-01] ... [ 1.85125142e-01 -1.14396907e-01 -2.28860354e+00 ... -3.74998957e-01 -3.27174813e-01 2.23244262e+00] [-4.29169565e-01 -4.96149838e-01 -2.30057335e+00 ... 1.80430971e-02 4.64026481e-02 -1.68408060e+00] [ 6.46951675e-01 -6.02505207e-02 1.28878939e+00 ... -3.06624621e-01 2.71843523e-01 -2.45928131e-02]] [[ 7.39805043e-01 6.09583139e-01 -7.75924623e-01 ... -1.78267920e+00 -9.79103625e-01 9.05951023e-01] [ 9.71837401e-01 -4.25457358e-01 8.77340436e-02 ... -3.32789689e-01 1.72430325e+00 9.37785685e-01] [ 3.44952136e-01 -2.36906552e+00 -2.63953352e+00 ... 1.75126076e-01 -1.54333055e+00 -1.20058107e+00] ... [ 1.27945685e+00 4.05595928e-01 8.76281440e-01 ... -7.75364697e-01 -2.05793366e-01 2.24068546e+00] [-7.94719756e-01 -1.12350082e+00 -3.26918292e+00 ... 1.66545892e+00 -2.84034044e-01 -1.37219739e+00] [-1.48751484e-02 5.63667834e-01 -2.00743780e-01 ... 1.88881791e+00 -4.71032143e-01 -5.49566090e-01]] [[-4.19794321e-01 2.44714901e-01 -2.45107442e-01 ... 5.16120911e-01 1.15495074e+00 -4.01665047e-02] [-7.70017147e-01 1.07461950e-02 6.01973772e-01 ... 4.91056591e-01 -1.53426492e+00 -9.60570216e-01] [ 1.29796529e+00 4.40724224e-01 8.18152368e-01 ... -2.51122236e-01 8.59250009e-01 9.81023073e-01] ... [-1.83891729e-01 -4.72118527e-01 -2.33619675e-01 ... -1.27074867e-01 7.92614996e-01 2.26725176e-01] [ 1.67697549e-01 2.22316813e+00 -3.84913206e-01 ... -1.50216174e+00 5.65848827e-01 1.08078048e-01] [-8.42984617e-01 -1.66358697e+00 -4.88000095e-01 ... 9.59795117e-01 -1.84357393e+00 4.07617897e-01]]]] ... [[[[ 4.36536521e-01 -8.37198198e-01 -1.01501405e+00 ... 6.93288684e-01 -7.14084566e-01 1.41066897e+00] [ 1.56840765e+00 -1.70016372e+00 2.71913916e-01 ... -1.06212962e-02 1.88077107e-01 -7.89635360e-01] [ 4.76585627e-01 6.47408128e-01 -4.70100343e-01 ... 1.83567250e+00 -1.71963954e+00 -1.01676929e+00] ... [-7.34794438e-01 1.48342705e+00 1.41763437e+00 ... 8.14157367e-01 -1.02247727e+00 -2.04503998e-01] [-5.63530803e-01 2.78502703e-01 -5.70041656e-01 ... 2.06796646e-01 -6.98255002e-01 2.37281561e-01] [ 8.03423285e-01 -4.83918756e-01 -1.15676439e+00 ... -4.51365292e-01 -1.70643210e+00 8.62729847e-01]] [[-1.93241000e+00 1.56142819e+00 -5.71213141e-02 ... -5.32378435e-01 -2.99577922e-01 -1.75530761e-01] [-2.21049190e-01 6.95880413e-01 2.88551545e+00 ... -1.88489184e-01 1.52373827e+00 4.10892367e-01] [-5.89900196e-01 -2.99772477e+00 8.14984381e-01 ... 1.97455883e-01 2.49758482e+00 -1.05320716e+00] ... [ 1.41407833e-01 -5.11123717e-01 -3.00268054e-01 ... 9.21748221e-01 -2.80765653e-01 1.95636805e-02] [ 1.02819252e+00 2.65346432e+00 5.47617793e-01 ... -1.08335197e+00 -8.90898824e-01 2.66108513e-01] [ 2.59269565e-01 1.77210367e+00 4.45226729e-01 ... 1.94610870e+00 -4.14899513e-02 1.00136364e+00]] [[-1.62224782e+00 7.81960249e-01 -3.31805140e-01 ... -4.51638609e-01 9.42553818e-01 8.78875136e-01] [-6.10819519e-01 9.32095706e-01 -3.66130143e-01 ... 1.88931572e+00 -1.01670825e+00 2.21021935e-01] [ 8.68205354e-02 -8.96271348e-01 1.27755404e+00 ... 3.41718674e-01 -5.14196217e-01 -5.71639001e-01] ... [-2.16360784e+00 -1.71566093e+00 -5.20398855e-01 ... -1.26673549e-01 1.00750756e+00 1.57329428e+00] [-7.04091311e-01 -1.53584528e+00 -9.24965858e-01 ... -1.79906934e-01 1.83836579e+00 -8.33006442e-01] [ 2.03621316e+00 3.86967301e-01 -9.69223753e-02 ... 2.49041036e-01 5.04616022e-01 9.64078248e-01]] ... [[ 1.67889714e+00 -1.02613592e+00 -3.13269645e-01 ... 3.88775349e-01 -1.11189377e+00 1.96647197e-02] [-9.10385191e-01 -1.45195067e-01 4.14306939e-01 ... 1.17777251e-01 -4.77012485e-01 2.59686500e-01] [ 5.22772551e-01 -8.67052019e-01 -5.12180865e-01 ... -1.04943955e+00 7.66958416e-01 -1.06601310e+00] ... [-5.96542299e-01 -7.51134932e-01 -4.85689729e-01 ... -9.86608267e-01 -1.15443432e+00 3.38909209e-01] [-8.72664690e-01 5.11722088e-01 1.17655432e+00 ... -8.95411596e-02 -2.65078187e-01 2.04106390e-01] [-2.81467468e-01 9.78620410e-01 7.82638669e-01 ... 9.25063968e-01 -1.16747066e-01 1.38868535e+00]] [[-1.04533207e+00 -1.27570760e+00 -9.57644284e-02 ... -1.44557250e+00 -1.22612759e-01 -3.26029897e-01] [ 4.50987406e-02 -9.10538256e-01 -4.27439630e-01 ... 4.17242855e-01 -3.40821356e-01 -3.13199572e-02] [ 2.44902924e-01 8.63539994e-01 9.08304691e-01 ... -1.04191124e+00 -4.37059313e-01 -9.36607063e-01] ... [ 7.61082649e-01 -1.05982363e+00 8.41325581e-01 ... 6.03440106e-01 -6.97172403e-01 -1.48908591e+00] [ 1.96503118e-01 -3.49453896e-01 -6.57097101e-01 ... 5.90412438e-01 3.90591949e-01 3.94048929e-01] [ 9.30728853e-01 9.36879456e-01 -4.56527248e-02 ... 6.92667484e-01 5.80921590e-01 1.02872290e-01]] [[ 5.58308959e-01 -9.81828094e-01 -7.68508017e-01 ... 4.24775667e-03 -1.01079226e+00 2.28566840e-01] [ 1.57044351e-01 -7.62175918e-02 6.02438033e-01 ... 1.51241207e+00 -2.45886058e-01 -3.31346780e-01] [-2.32759342e-01 -7.99262643e-01 6.10696375e-01 ... -1.10400155e-01 -7.06362426e-01 -1.18918955e-01] ... [-7.01854944e-01 -1.77127063e+00 6.31672978e-01 ... 6.72869503e-01 -5.38762212e-01 1.41174591e+00] [-7.68561721e-01 5.93785226e-01 -1.00592506e+00 ... -1.77098846e+00 -7.25627720e-01 5.51743448e-01] [ 8.87860239e-01 -9.55304742e-01 -2.95787334e-01 ... 7.61394322e-01 -9.72974479e-01 -8.55413154e-02]]] [[[-4.02331620e-01 1.04995656e+00 -6.37172341e-01 ... 2.26743389e-02 -4.33735579e-01 9.25613701e-01] [-4.63899434e-01 -8.76577646e-02 -1.84371746e+00 ... 4.92973387e-01 -3.90890986e-01 1.80479264e+00] [-3.32284600e-01 -8.35307956e-01 5.91227353e-01 ... 8.23382020e-01 -7.30859756e-01 3.82324792e-02] ... [-2.21808815e+00 -9.99771535e-01 1.70095015e+00 ... -7.26143837e-01 1.03002536e+00 -2.02360082e+00] [-4.58799690e-01 3.35095197e-01 -1.25877750e+00 ... -9.39618289e-01 5.96639574e-01 7.95726717e-01] [-1.10989273e+00 3.04010659e-01 -2.68283010e-01 ... -1.92991614e-01 1.13856661e+00 4.81329858e-01]] [[ 2.89624125e-01 5.59368253e-01 -1.51423252e+00 ... -1.71807051e-01 3.68200451e-01 -7.90430367e-01] [-1.37770426e+00 2.02449822e+00 -3.97438318e-01 ... -2.01208711e+00 -1.61183167e+00 -1.92114699e+00] [ 6.69038892e-01 4.09880221e-01 -1.76286489e-01 ... 1.54038936e-01 3.71512502e-01 -1.27071369e+00] ... [ 7.15347767e-01 -3.03042065e-02 -1.14224482e+00 ... -1.76204813e+00 2.81099749e+00 -1.52801722e-01] [ 3.71006310e-01 1.23736978e+00 3.54473561e-01 ... 1.86237246e-01 -2.75023460e-01 -3.33503634e-01] [ 7.40547776e-01 -2.97248900e-01 1.58762479e+00 ... 1.18566167e+00 4.44228053e-01 -4.54073280e-01]] [[ 1.07333875e+00 1.00218081e+00 -4.57604736e-01 ... 7.55362725e-03 -1.26422215e+00 1.32601357e+00] [ 9.55503643e-01 -7.52798557e-01 1.40474796e+00 ... 4.12548631e-02 6.92317724e-01 1.15685046e+00] [-6.89199626e-01 1.07158089e+00 -8.40928137e-01 ... 6.89978659e-01 2.39451811e-01 6.24954104e-01] ... [ 1.23767769e+00 3.53075564e-01 -5.24956100e-02 ... 1.05225039e+00 -1.43839598e+00 -2.24956751e-01] [-1.77642733e-01 -6.88705146e-02 -9.39250529e-01 ... 1.60287142e+00 1.66918635e-01 -1.54475439e+00] [ 1.03965712e+00 1.05290341e+00 8.04073393e-01 ... -8.63655746e-01 1.49502897e+00 5.85393369e-01]] ... [[ 1.38584507e+00 8.14741790e-01 9.55837816e-02 ... 1.20033002e+00 -1.26625621e+00 -2.32133120e-01] [-1.49845636e+00 -3.19511980e-01 1.25874949e+00 ... -1.24803580e-01 1.77081919e+00 1.15922356e+00] [-6.00439906e-01 8.94468665e-01 -3.64894748e-01 ... -2.21592844e-01 3.45197380e-01 1.04318154e+00] ... [ 8.45039546e-01 -1.67981339e+00 3.08246762e-01 ... -9.68018651e-01 -1.36499286e+00 -1.21497676e-01] [-5.08138061e-01 5.31817853e-01 -4.54546392e-01 ... 2.56243706e-01 2.50549942e-01 2.04214931e+00] [-2.18434262e+00 8.14131796e-01 1.85114831e-01 ... 8.72293711e-01 -3.97206128e-01 -4.89379168e-01]] [[ 8.47885191e-01 -1.42116165e-02 -6.19716585e-01 ... -3.90335023e-01 1.53424263e+00 -2.91529328e-01] [-7.98696220e-01 -6.92532837e-01 -5.87817490e-01 ... -5.40074229e-01 2.70815134e-01 3.83696109e-01] [-5.64595461e-01 -4.94370788e-01 -1.25329089e+00 ... 1.06446251e-01 -2.59216285e+00 1.57451594e+00] ... [-4.53714520e-01 2.82496750e-01 1.08116770e+00 ... -4.05308068e-01 1.22561884e+00 -1.18163407e+00] [-7.14238703e-01 1.58839536e+00 -9.19696689e-01 ... 1.74101508e+00 -9.45411474e-02 6.91352487e-01] [ 4.29024398e-01 3.68464231e-01 -9.53082263e-01 ... -9.05863583e-01 1.21128809e+00 -6.92231953e-01]] [[-2.01352254e-01 -1.04828215e+00 -6.20517552e-01 ... -5.00632226e-01 -2.12880182e+00 -6.30413890e-01] [ 1.63270235e+00 -1.48045456e+00 -1.47633761e-01 ... 1.73626256e+00 1.02919888e+00 5.98753542e-02] [-5.16836464e-01 -9.77983296e-01 4.09163475e-01 ... 1.67051148e+00 5.74878514e-01 -7.13088989e-01] ... [-4.42725897e-01 7.52309918e-01 -2.17625462e-02 ... 1.35514104e+00 -8.79451558e-02 -5.32915235e-01] [ 1.95919380e-01 9.29511368e-01 5.32479882e-01 ... -1.53741896e+00 -8.04148793e-01 -1.98638380e-01] [-4.76759166e-01 -1.55707633e+00 5.27283669e-01 ... 2.69604325e+00 2.32085609e+00 9.91925538e-01]]] [[[-1.54478014e+00 2.04223609e+00 1.64156958e-01 ... 1.10158527e+00 -1.00446594e+00 4.17813331e-01] [-9.91566896e-01 1.30432582e+00 -4.24085140e-01 ... 4.55589652e-01 -1.57329381e-01 -1.90539503e+00] [ 5.95411301e-01 -5.43706596e-01 6.64277613e-01 ... -1.23584628e+00 -1.33175242e+00 -4.57739621e-01] ... [-2.65523762e-01 7.14970887e-01 7.44239569e-01 ... 9.17311013e-01 -2.08544064e+00 -7.30023921e-01] [-1.10736454e+00 4.89151895e-01 -8.51740062e-01 ... 8.33237529e-01 -5.08557320e-01 2.36574697e+00] [ 1.37921607e+00 -6.64519906e-01 4.85523283e-01 ... 1.02234817e+00 6.57512426e-01 -6.06817782e-01]] [[-1.88691616e-01 6.65802181e-01 -2.77198386e+00 ... 5.38468242e-01 -1.14672804e+00 -1.96910337e-01] [ 2.09718752e+00 -1.54099837e-01 -3.75807941e-01 ... -1.20329416e+00 -1.39488280e+00 -1.15936351e+00] [ 4.24945587e-03 3.26844901e-01 -1.13471770e+00 ... -3.09436247e-02 4.50656325e-01 7.10713983e-01] ... [ 1.98137760e-02 5.82572579e-01 -1.84189141e+00 ... -3.91535580e-01 -1.73462248e+00 1.07012606e+00] [ 1.29593587e+00 7.40815401e-01 2.20209551e+00 ... 3.66995692e-01 1.86479777e-01 -1.13239205e+00] [ 2.37334207e-01 -6.73201561e-01 1.78350639e+00 ... 3.34541500e-01 -1.90946174e+00 -1.86253917e+00]] [[ 6.18497849e-01 2.28081211e-01 -4.34012711e-01 ... 6.67305052e-01 2.33601868e-01 -2.07736564e+00] [ 1.50119916e-01 1.60256815e+00 1.03125997e-01 ... 1.63472831e+00 1.90791690e+00 6.56704724e-01] [-8.30480933e-01 -2.31189108e+00 -1.26727387e-01 ... 1.53532565e-01 -7.29817808e-01 1.61054349e+00] ... [-7.69143641e-01 4.19251412e-01 -8.48667920e-01 ... -2.17060462e-01 -1.13594496e+00 -1.09472930e+00] [-1.60762823e+00 5.04029058e-02 -4.69524935e-02 ... 2.40505004e+00 6.35415316e-01 1.51927873e-01] [ 3.34030986e-01 -6.01310015e-01 7.91566491e-01 ... -3.07054341e-01 -7.65398085e-01 -2.72207528e-01]] ... [[ 2.07819462e-01 -8.38606358e-01 -7.62836635e-01 ... -7.82372713e-01 -9.95603204e-01 1.74667251e+00] [-1.01345289e+00 5.97277343e-01 -2.62543976e-01 ... 6.28429055e-01 -1.83987820e+00 5.89942396e-01] [ 1.48867309e-01 -2.66954273e-01 7.57624209e-02 ... -2.39297494e-01 -3.90751004e-01 8.40969443e-01] ... [ 2.47266978e-01 2.03161806e-01 1.09959066e-01 ... 7.34730363e-02 4.24076974e-01 1.19667375e+00] [-1.10486317e+00 1.50936759e+00 -1.55964434e-01 ... 1.55511248e+00 4.05391246e-01 6.40719235e-01] [ 1.70870030e+00 -1.62962914e+00 9.68378663e-01 ... 1.01885855e+00 -2.08016109e+00 8.25243175e-01]] [[ 4.04228032e-01 9.96099591e-01 -3.18100631e-01 ... 5.22818685e-01 2.35347007e-03 -7.85791934e-01] [ 2.91524023e-01 -4.06112373e-01 1.04303551e+00 ... 6.00066148e-02 6.02431595e-01 4.12486941e-02] [-1.53451955e+00 5.41222870e-01 -6.53930783e-01 ... -2.29088023e-01 -2.95139879e-01 -4.60310012e-01] ... [-3.32560986e-01 4.12245333e-01 -9.31809723e-01 ... 1.43725467e+00 5.01600802e-01 3.30635905e-01] [-6.40289545e-01 1.30350387e+00 9.30013880e-02 ... -6.96852326e-01 -1.39936700e-01 2.59262472e-01] [-1.67988315e-01 1.05818677e+00 1.16568351e+00 ... 7.07410812e-01 1.64006507e+00 9.61231530e-01]] [[-1.15351844e+00 2.00252104e+00 8.83480191e-01 ... 1.68991327e-01 1.90792704e+00 1.05734622e+00] [-1.15222484e-01 4.81471717e-02 1.92222074e-01 ... -2.41538620e+00 6.41262531e-01 1.25509012e+00] [-4.53003228e-01 5.74974000e-01 7.95690492e-02 ... 1.42445374e+00 2.13586330e-01 1.10295355e+00] ... [-1.14218819e+00 3.85691911e-01 -6.22450709e-01 ... 4.32173103e-01 9.43209231e-01 9.52467799e-01] [-1.00529313e+00 2.93519467e-01 -5.55280484e-02 ... -4.11363959e-01 3.47680241e-01 -1.60493720e+00] [ 1.60456645e+00 6.57415926e-01 3.56379986e-01 ... 1.55328408e-01 -1.75781441e+00 -1.02631688e+00]]] [[[ 6.07891679e-01 -9.49309289e-01 1.90256447e-01 ... -3.31874758e-01 3.17086309e-01 -1.13056965e-01] [-7.89759398e-01 -4.02239352e-01 -1.36819518e+00 ... 1.74936509e+00 -1.16078067e+00 -3.55061710e-01] [-2.58174253e+00 -2.52519459e-01 -2.63731599e-01 ... -1.35606492e+00 -2.62322873e-01 -2.28317633e-01] ... [-1.68673849e+00 -1.01392806e+00 -2.59445846e-01 ... 1.14076898e-01 5.09947121e-01 -1.42393267e+00] [-1.25324190e+00 4.56018716e-01 -1.30512333e+00 ... -9.43109870e-01 -6.46989495e-02 5.04703283e-01] [-9.39415619e-02 -5.28106987e-01 7.26646841e-01 ... 6.30713115e-03 9.36641991e-01 1.41185510e+00]] [[-6.16395772e-01 -1.15984869e+00 -1.96036443e-01 ... 7.08332717e-01 -7.99190044e-01 -4.19318825e-01] [-2.49859437e-01 1.23911393e+00 -5.71036279e-01 ... 1.32759762e+00 -7.11055696e-01 -1.30318272e+00] [ 1.06314385e+00 -4.26413983e-01 -1.26421720e-01 ... 2.24936676e+00 7.34946787e-01 -1.08071792e+00] ... [ 1.39733827e+00 -6.91328049e-01 2.36430094e-01 ... -1.93505436e-01 1.37370870e-01 -1.00971448e+00] [-1.58173009e-03 -1.06074929e+00 9.74748552e-01 ... -7.48506010e-01 -8.64326954e-01 3.26408672e+00] [ 3.57951783e-02 -8.45820069e-01 -6.11540020e-01 ... 2.41583869e-01 3.67016435e-01 1.37754405e+00]] [[ 1.60509631e-01 -2.02011153e-01 -2.60594785e-01 ... 2.91139930e-01 9.88590240e-01 7.90439487e-01] [-1.16260743e+00 -5.41178524e-01 1.60652483e+00 ... -1.01724172e+00 -2.26491880e+00 2.00344682e-01] [ 7.58399427e-01 7.51995683e-01 -4.02884752e-01 ... 7.24845707e-01 4.07599062e-01 -2.72017241e-01] ... [ 2.98145324e-01 -3.47348452e-01 7.88036138e-02 ... -1.16801512e+00 -1.14748788e+00 -3.58615130e-01] [ 1.53758302e-01 -1.77485728e+00 -6.34553432e-01 ... -1.89468879e-02 -5.37753344e-01 -9.34220135e-01] [-6.91091642e-02 -1.11662841e+00 5.24800837e-01 ... 8.64088628e-03 8.04200888e-01 -1.00249600e+00]] ... [[-5.43503761e-01 6.41932249e-01 -8.63288760e-01 ... 1.53121173e+00 -4.70222890e-01 -8.20088923e-01] [ 8.35244536e-01 3.19534332e-01 -5.03882110e-01 ... 5.56820273e-01 1.50571430e+00 -3.81065309e-02] [ 1.11608386e+00 -3.73832494e-01 -1.09818792e+00 ... 1.15979874e+00 -5.58294237e-01 -1.33217406e+00] ... [ 8.39234054e-01 -8.32440019e-01 -1.41540259e-01 ... -4.79340464e-01 6.80374324e-01 -2.07405210e+00] [ 7.77199566e-01 9.30759132e-01 1.37780309e+00 ... 5.49898982e-01 1.22097664e-01 7.98274517e-01] [-9.57283735e-01 3.45186353e-01 4.44869697e-01 ... 2.00810599e+00 -3.64573747e-01 2.28272080e-01]] [[ 6.87927127e-01 8.78230035e-01 3.16090174e-02 ... -6.84444606e-01 2.00926495e+00 -6.37044787e-01] [ 2.33634400e+00 1.46583164e+00 -7.22921908e-01 ... -1.21222293e+00 1.08297415e-01 5.39055467e-01] [-4.89453167e-01 8.62764418e-01 8.17925572e-01 ... -7.36856088e-02 3.67848754e-01 -1.08953767e-01] ... [-6.05002582e-01 1.75111663e+00 -2.06330925e-01 ... -3.69846970e-01 -1.04078984e+00 -1.38328350e+00] [-1.65494323e+00 -1.45912838e+00 9.23679113e-01 ... 1.76860571e+00 1.49118209e+00 7.93815613e-01] [-5.53313792e-01 -5.23975641e-02 8.59010041e-01 ... 3.77298594e-01 -2.52232194e-01 5.24513960e-01]] [[-8.69170666e-01 -1.16701758e+00 -3.80440950e-01 ... 1.16249859e+00 -1.25099316e-01 6.98169291e-01] [-1.06296194e+00 6.23767674e-01 8.49452913e-01 ... -3.89598012e-01 1.07750916e+00 -1.37616944e+00] [ 1.62215137e+00 -2.44970292e-01 -1.34842110e+00 ... -2.26810241e+00 -1.27848196e+00 -1.50641516e-01] ... [-2.75580227e-01 3.17433119e-01 1.95756602e+00 ... 1.15713608e+00 1.26511261e-01 -3.69888574e-01] [-6.59513950e-01 8.63177106e-02 2.76472507e-04 ... 9.65489626e-01 1.52354658e+00 4.83033419e-01] [ 2.19356680e+00 -8.61882567e-01 6.68512046e-01 ... -1.01785529e+00 8.96501094e-02 1.60952246e+00]]] [[[ 1.27352595e+00 4.29707766e-01 6.20696664e-01 ... -1.11153233e+00 1.04015112e+00 -7.19335794e-01] [-4.87683803e-01 -4.87816691e-01 1.14358091e+00 ... 1.11485314e+00 1.64882019e-02 -1.02329719e+00] [-6.06649101e-01 -1.40152834e-02 -7.18935251e-01 ... -3.44529539e-01 -1.14325501e-01 7.85879016e-01] ... [-2.16016650e-01 1.55636334e+00 -1.22281218e+00 ... -1.23584831e+00 -9.81701672e-01 6.82976723e-01] [ 2.65420973e-01 4.45245057e-01 -1.17000866e+00 ... -7.07377613e-01 7.34916389e-01 3.36462408e-01] [ 2.50808269e-01 -3.71844232e-01 -6.72442794e-01 ... -1.31716597e+00 5.22610366e-01 4.72524136e-01]] [[ 1.77903399e-01 -1.30083442e+00 1.01476300e+00 ... -1.22160029e+00 7.80318916e-01 5.27292550e-01] [ 1.70028663e+00 8.41143608e-01 -5.13560891e-01 ... 1.22766447e+00 -3.29463035e-01 -1.14199817e-01] [ 5.51103592e-01 7.96869814e-01 9.02001560e-01 ... -4.93022382e-01 -8.85473490e-01 -1.12211215e+00] ... [-5.42125046e-01 -1.85301468e-01 4.32953477e-01 ... -1.44431102e+00 1.36915147e+00 3.23383778e-01] [-1.01169360e+00 -1.05721450e+00 1.24550223e+00 ... 9.10491228e-01 1.16238487e+00 -2.46721196e+00] [ 4.51120228e-01 -2.00378084e+00 -5.75299740e-01 ... -1.22271085e+00 -5.60515262e-02 -3.61697197e-01]] [[-2.33858407e-01 1.04202986e+00 -9.99349594e-01 ... -1.32824764e-01 -1.25626600e+00 1.75482464e+00] [-4.21126127e-01 -8.55265915e-01 -2.22910786e+00 ... 2.36173138e-01 -1.69698489e+00 -3.86288077e-01] [ 1.61054373e+00 -1.17656624e+00 -1.12220323e+00 ... -7.17402935e-01 9.89856899e-01 -8.10425460e-01] ... [-4.31244642e-01 1.78654790e+00 1.36487460e+00 ... 4.72024918e-01 -6.30901098e-01 1.15587151e+00] [ 7.28547215e-01 -1.45741180e-01 1.67934930e+00 ... 4.84858900e-01 1.81771553e+00 5.89857459e-01] [ 1.07967257e+00 -1.20150197e+00 -9.99696851e-01 ... 3.84525746e-01 -4.10133541e-01 2.87856340e+00]] ... [[ 5.08274317e-01 3.42981607e-01 -1.83727499e-02 ... -5.07860363e-01 -4.66360271e-01 -1.33187151e+00] [ 4.09138501e-01 -5.55985451e-01 -5.94625734e-02 ... -1.41528141e+00 -8.09730053e-01 -1.34586751e-01] [ 2.96716869e-01 -2.56723195e-01 -5.06254852e-01 ... -2.05710983e+00 1.42607892e+00 6.25004098e-02] ... [-9.73514616e-01 -5.64082503e-01 -3.29136737e-02 ... 2.19217181e-01 -9.01869297e-01 3.39036733e-01] [ 9.09499824e-01 -1.49643457e+00 -4.85809594e-01 ... 1.49512076e+00 3.48359644e-01 9.72510576e-01] [ 3.07963967e-01 1.48878765e+00 1.02227759e+00 ... 1.04589176e+00 -2.16834933e-01 -2.05741659e-01]] [[ 1.80304140e-01 -1.08289957e+00 -1.13308871e+00 ... 4.99401540e-01 2.32070535e-01 2.78789788e-01] [ 1.48898602e+00 1.47271347e+00 -1.02710746e-01 ... 9.64886308e-01 9.40608740e-01 9.75910485e-01] [ 1.15947351e-02 6.67697042e-02 2.71810722e-02 ... -9.02033091e-01 -6.27168059e-01 1.00392425e+00] ... [-7.51190960e-01 1.61344051e+00 -2.58568019e-01 ... 2.77870476e-01 -5.83779871e-01 3.48304003e-01] [ 9.16850209e-01 8.02493691e-02 -2.66225791e+00 ... 1.00684285e+00 -1.07220197e+00 -9.43700746e-02] [-1.52753687e+00 -2.70681717e-02 6.23731852e-01 ... -4.14455980e-01 1.50701940e+00 -4.35464948e-01]] [[ 1.40737087e-01 -7.91416287e-01 -5.50027788e-01 ... 3.09507638e-01 -1.27104020e+00 -1.97198033e-01] [-7.71497428e-01 -1.01571834e+00 -8.84695828e-01 ... -4.34747308e-01 -1.91280198e+00 8.79953444e-01] [-2.21727043e-01 -9.76389825e-01 -6.20231628e-01 ... -8.67307007e-01 5.44787467e-01 2.44390416e+00] ... [-4.43562001e-01 -1.16098571e+00 6.50725782e-01 ... -6.71460152e-01 -1.04527712e+00 1.42376387e+00] [-5.32152951e-01 -1.00681257e+00 3.18182856e-01 ... 1.01755428e+00 8.86222064e-01 1.67216912e-01] [-2.91984886e-01 -7.42582977e-01 -1.36284244e+00 ... -1.11454630e+00 5.11993408e-01 -1.26836610e+00]]] [[[ 1.12687147e+00 6.87207282e-01 -8.96483839e-01 ... 6.64670229e-01 -1.15655589e+00 1.77938390e+00] [-1.02009094e+00 2.17237383e-01 2.85126269e-01 ... -1.74410045e+00 -1.47319380e-02 -5.98117888e-01] [-5.77866316e-01 6.06611431e-01 -5.66461086e-01 ... 3.47975977e-02 1.29901850e+00 1.57414520e+00] ... [ 1.86369729e+00 -4.94661033e-01 -9.49337304e-01 ... -2.24076331e-01 -3.72102767e-01 2.54930586e-01] [ 1.24895144e+00 -5.30104041e-01 1.59250990e-01 ... -1.98025334e+00 -4.21262771e-01 -6.72455728e-01] [-1.36848521e+00 -4.01370406e-01 -5.20054638e-01 ... 3.72644782e-01 -1.27750739e-01 -1.86242616e+00]] [[-1.44421101e+00 2.84001492e-02 6.52752757e-01 ... -5.16653717e-01 1.19032927e-01 5.46270907e-01] [ 5.07824123e-01 -1.03204355e-01 -3.10246721e-02 ... 4.02903348e-01 9.39060509e-01 1.17875981e+00] [ 2.26436305e+00 -4.12735939e-01 -2.89734781e-01 ... -1.38812542e+00 -7.36768663e-01 -2.03221178e+00] ... [ 7.60074735e-01 -7.65573010e-02 -7.72858441e-01 ... 1.12097216e+00 1.23193368e-01 -1.23585701e+00] [ 4.98964578e-01 1.67334116e+00 1.57526505e+00 ... 8.01941454e-01 1.80011141e+00 1.94640160e-02] [ 4.00777549e-01 1.28700900e+00 8.74737680e-01 ... -2.73559660e-01 1.00312531e+00 -7.56821573e-01]] [[ 1.06249464e+00 -2.56804729e+00 1.46172118e+00 ... 3.60493809e-01 -6.65699422e-01 2.49082112e+00] [-1.22776985e+00 -1.05918944e+00 -2.34529674e-01 ... -1.56107080e+00 1.54577598e-01 9.51158702e-01] [-6.85365081e-01 4.10226107e-01 -2.42408410e-01 ... 1.90463766e-01 -1.61496222e+00 -1.02772284e+00] ... [ 8.42414856e-01 4.82614897e-02 1.06691480e+00 ... 4.66206521e-01 -8.84888694e-02 -7.80635238e-01] [ 5.73272169e-01 2.60854483e-01 -7.16434270e-02 ... -7.58442938e-01 -7.44930133e-02 -4.09713984e-01] [ 1.69447744e+00 -1.18254936e+00 -2.83055872e-01 ... 7.06928909e-01 9.23361599e-01 6.46296859e-01]] ... [[ 1.22723065e-01 1.41602254e+00 1.63859338e-01 ... 8.94809246e-01 -5.28081238e-01 9.53165367e-02] [-1.94519663e+00 8.72577012e-01 6.62501514e-01 ... -1.18073471e-01 4.84087229e-01 5.68535507e-01] [-1.80509269e+00 -4.07100201e-01 2.04032108e-01 ... 5.55934310e-01 4.15356189e-01 9.20915425e-01] ... [ 8.35988045e-01 -8.17403942e-02 -1.73282373e+00 ... 4.63336974e-01 2.50732064e-01 -1.05040181e+00] [ 2.02303320e-01 1.13922738e-01 7.01340660e-03 ... -8.23151786e-03 -1.01887949e-01 6.37206435e-01] [ 1.42962897e+00 -1.12309575e+00 2.30256230e-01 ... -4.63985175e-01 -1.07224917e+00 -7.86538422e-01]] [[ 1.42474079e+00 -1.29522514e+00 -5.80435276e-01 ... -5.55705249e-01 -8.11176598e-01 2.90274787e+00] [-1.00796652e+00 -2.27088615e-01 1.45538783e+00 ... 6.91090643e-01 -8.33386779e-01 -3.31315994e-01] [-3.62685680e-01 -2.01715302e+00 -5.95409453e-01 ... 5.39618611e-01 1.18537426e+00 -1.39023101e+00] ... [ 1.88802230e+00 -6.02978051e-01 -2.77284414e-01 ... -6.24871135e-01 3.58449101e-01 8.64444375e-01] [-3.00623953e-01 -2.36457542e-01 8.95967305e-01 ... 7.73108482e-01 7.36167312e-01 4.81978327e-01] [ 1.19826806e+00 -3.74185205e-01 8.00408483e-01 ... 3.82182330e-01 -7.70024359e-01 7.89210379e-01]] [[ 7.95765042e-01 -4.58370358e-01 3.09137392e+00 ... -4.29765999e-01 2.71922499e-01 -2.28248667e-02] [-6.63423657e-01 5.50597012e-01 2.95952410e-01 ... 4.71750796e-01 5.89204669e-01 -7.05319166e-01] [-1.82380867e+00 1.41217959e+00 -6.92372620e-01 ... 4.78561014e-01 7.29046822e-01 -1.79761246e-01] ... [ 1.22752404e+00 1.09254193e+00 1.99259484e+00 ... -1.18304086e+00 1.04327989e+00 -1.40448853e-01] [-3.20431530e-01 -1.30331039e+00 -3.68656009e-01 ... -6.13254845e-01 -4.30890441e-01 -3.23401660e-01] [-5.00681818e-01 9.66455519e-01 -4.93214309e-01 ... 3.57832760e-01 9.10970747e-01 -1.49505258e+00]]]] [[[[ 1.17513075e-01 1.41066754e+00 1.50196517e+00 ... -1.04021347e+00 2.60089684e+00 6.22735202e-01] [ 1.00731969e+00 1.05213709e-01 -8.86406660e-01 ... -3.69520366e-01 6.88985527e-01 -1.35138047e+00] [-9.78397056e-02 1.04482818e+00 -1.07924521e+00 ... -1.25029206e+00 2.46176982e+00 -1.24039993e-01] ... [ 1.32120466e+00 -2.73742080e-01 -9.92444575e-01 ... 1.40322793e+00 5.49881279e-01 2.39111871e-01] [-3.85482490e-01 4.92348880e-01 -3.21433127e-01 ... -5.73876679e-01 2.55558968e-01 1.09846568e+00] [ 1.18974471e+00 5.53468406e-01 -1.47609282e+00 ... -4.09395903e-01 -1.15336812e+00 1.00844026e+00]] [[-2.25038275e-01 -4.46291417e-01 -2.12982678e+00 ... -6.35189056e-01 2.29619622e+00 8.83603841e-02] [ 1.14486851e-01 -1.11528695e+00 -5.24279296e-01 ... -7.99023807e-01 -1.05434728e+00 6.23672605e-01] [ 1.82095647e-01 -8.30537200e-01 -4.26943064e-01 ... -5.79593778e-01 1.89694881e-01 1.02258682e-01] ... [ 5.67987084e-01 -9.37776625e-01 4.11716253e-01 ... 1.08077121e+00 7.57196248e-01 8.63877609e-02] [ 4.78582740e-01 3.13488275e-01 1.38479257e+00 ... 6.82213783e-01 -4.78179276e-01 7.94287741e-01] [-9.01551962e-01 -1.97796345e+00 -1.60217202e+00 ... 1.76670218e+00 1.11045921e+00 6.75106468e-03]] [[ 8.32149148e-01 -5.68368256e-01 -3.87821734e-01 ... 1.13197291e+00 8.70756507e-02 2.55048013e+00] [ 1.47689116e+00 -1.24916720e+00 -2.01269343e-01 ... -2.89058208e-01 1.21735597e+00 1.54032028e+00] [ 3.26176465e-01 3.02946288e-02 -6.83366895e-01 ... -6.00203216e-01 -7.94520199e-01 2.07710356e-01] ... [ 2.41851926e-01 9.24217224e-01 1.68596244e+00 ... -9.36906934e-01 1.32516921e+00 2.58205724e+00] [ 3.74816120e-01 -7.03275740e-01 1.62077993e-01 ... 3.99123639e-01 -7.63985813e-02 1.11705065e+00] [ 1.03026129e-01 1.51891100e+00 1.53592324e+00 ... -1.25584841e+00 3.11876088e-01 4.30108309e-01]] ... [[-9.93170500e-01 -2.48820912e-02 -1.27202225e+00 ... 6.24835610e-01 -2.11170435e+00 1.08467102e+00] [-7.58477226e-02 -4.09461617e-01 1.10246336e+00 ... 1.95721424e+00 -8.50570858e-01 6.59037232e-01] [-7.60602891e-01 -9.98802111e-02 -7.53375292e-01 ... -2.05503845e+00 8.46911013e-01 -1.03874981e+00] ... [-8.40729237e-01 -9.19376984e-02 -7.58174837e-01 ... -1.63340434e-01 3.40562224e-01 9.73033667e-01] [ 1.06661189e+00 -4.20114636e-01 -6.39916778e-01 ... 2.03833848e-01 4.98083085e-02 4.46770430e-01] [ 1.99536908e+00 3.34573358e-01 -2.60129452e+00 ... -1.76783442e+00 1.27760386e+00 4.95572723e-02]] [[ 7.29614675e-01 -1.10202229e+00 -9.58077967e-01 ... -1.60042644e-01 6.00233138e-01 -2.16107655e+00] [ 4.45281208e-01 -3.71262282e-01 -1.70891511e+00 ... 4.52272892e-01 1.09662771e-01 1.02344608e+00] [ 1.12466121e+00 6.10263467e-01 1.71947753e+00 ... 1.14329541e+00 -1.14869714e+00 1.57182187e-01] ... [ 8.94093096e-01 1.28185487e+00 -5.97079992e-01 ... 3.22117299e-01 1.44937015e+00 2.00700641e+00] [-9.19552922e-01 1.45758438e+00 -7.65495002e-01 ... -2.42853022e+00 -1.12986159e+00 1.15575147e+00] [ 3.49548459e-03 -5.69702566e-01 8.20893526e-01 ... -4.05102521e-01 -1.18956864e+00 1.75557882e-02]] [[-6.00081205e-01 4.83334780e-01 -9.75141048e-01 ... -8.71948361e-01 2.84769058e-01 -1.03863752e+00] [-6.26788735e-01 -3.56079906e-01 5.93633577e-02 ... -1.25901902e+00 -1.93054810e-01 -2.87840843e+00] [-3.78796011e-01 6.06886387e-01 5.19396365e-01 ... -9.11621988e-01 -6.53830469e-01 -9.68208387e-02] ... [-2.87433445e-01 -1.46313524e+00 -1.02897322e+00 ... 8.96584868e-01 -3.70899469e-01 3.94293994e-01] [-1.28132355e+00 3.90164614e-01 1.20004928e+00 ... 4.20815349e-01 -9.96913850e-01 -6.64374053e-01] [ 1.12288930e-01 8.56402755e-01 -1.19724429e+00 ... -7.45088279e-01 2.12542266e-01 5.63926846e-02]]] [[[-6.18098140e-01 1.92648864e+00 2.77331084e-01 ... 1.30519044e+00 -4.16733712e-01 1.78262234e+00] [-9.45398733e-02 -6.81571901e-01 -3.22312593e-01 ... 8.79966170e-02 2.63370931e-01 1.34023249e-01] [-4.66465801e-01 3.09154183e-01 -3.40233110e-02 ... 7.05025911e-01 6.34725034e-01 6.43724352e-02] ... [-1.46054721e+00 3.09386998e-01 -2.26618186e-01 ... 3.68063211e-01 1.32414377e+00 -1.47410023e+00] [ 9.86120641e-01 -1.14781129e+00 1.18252122e+00 ... 1.27278471e+00 1.35753071e+00 1.98097318e-01] [ 1.38413942e+00 3.77031386e-01 -1.52882338e+00 ... -1.07783772e-01 -9.46763337e-01 3.36497068e-01]] [[ 4.10324000e-02 -6.99351430e-02 6.25889897e-02 ... -9.54122961e-01 1.51003671e+00 5.53272307e-01] [-3.02345991e+00 -3.16540778e-01 1.02452230e+00 ... 1.85993344e-01 -4.44195867e-02 1.17955565e-01] [-8.92122030e-01 -1.03439355e+00 1.84590614e+00 ... 2.07666802e+00 1.76632154e+00 -3.71098936e-01] ... [-7.37190619e-02 1.00097585e+00 6.03366673e-01 ... 7.74663687e-01 1.47595656e+00 -6.00518167e-01] [-4.76107188e-02 1.44896495e+00 -1.98925674e+00 ... -3.64038080e-01 7.29895160e-02 -6.16699636e-01] [ 7.10107803e-01 1.26752484e+00 -4.08368349e-01 ... -9.23824683e-03 -8.57757181e-02 2.60131419e-01]] [[ 8.56939629e-02 7.37937093e-01 -5.48409581e-01 ... 3.30139667e-01 1.46318480e-01 -3.28426927e-01] [-3.33332443e+00 5.36322236e-01 -3.86962593e-01 ... 9.43829536e-01 5.72201312e-02 2.45557666e+00] [ 1.75064754e+00 -2.53720021e+00 2.60925502e-01 ... 1.87020040e+00 -1.70567393e+00 -8.71557176e-01] ... [-1.08668900e+00 -1.06265414e+00 -7.54784584e-01 ... 6.90255225e-01 1.64566076e+00 -4.58895922e-01] [-2.17234492e-01 -7.94611871e-01 1.69265878e+00 ... -1.06431234e+00 7.08827153e-02 -7.97081441e-02] [ 8.22594345e-01 6.79059625e-01 1.45795131e+00 ... 4.01783809e-02 -1.30869165e-01 -1.25099766e+00]] ... [[ 4.54608321e-01 -1.22539252e-01 -1.00771976e+00 ... -4.78863895e-01 -1.10755348e+00 1.46487975e+00] [-1.02572061e-01 -1.68168879e+00 -1.98816851e-01 ... 1.38592649e+00 -1.29234016e-01 4.52314526e-01] [-1.86479449e-01 7.72974789e-01 -1.05924296e+00 ... 3.98603678e-01 5.39525688e-01 -7.83611238e-01] ... [-6.33402467e-01 -7.41071224e-01 -2.69202977e-01 ... 9.39201355e-01 -1.48972964e+00 -8.13661397e-01] [ 6.97824419e-01 -7.95281231e-02 6.85292840e-01 ... -6.25703156e-01 -1.11718094e+00 -9.80685711e-01] [ 7.99119294e-01 2.42349601e+00 4.05085117e-01 ... -8.80624473e-01 2.14880490e+00 -1.59652495e+00]] [[ 6.13427162e-01 1.21301591e+00 8.51334184e-02 ... 3.30234081e-01 8.22012544e-01 5.67377880e-02] [-2.53555000e-01 7.33844221e-01 -3.28244805e-01 ... -9.06478405e-01 -1.16156101e+00 1.30105519e+00] [-7.29785144e-01 -1.39047432e+00 -6.51578367e-01 ... -2.41276979e+00 1.93774176e+00 -8.07417810e-01] ... [ 1.28288972e+00 1.75670872e-03 1.13557386e+00 ... -1.30733979e+00 -6.26805782e-01 -3.98424804e-01] [ 3.53129476e-01 6.57074690e-01 -7.03412950e-01 ... 3.77789944e-01 1.43867627e-01 9.89235282e-01] [-2.18062341e-01 5.64442158e-01 1.45758343e+00 ... 8.03581715e-01 1.34410977e+00 4.91906554e-01]] [[-1.04918408e+00 -1.39053632e-02 -1.66534793e+00 ... -1.13322866e+00 -4.76390868e-01 -1.90753639e+00] [ 7.22675562e-01 -1.93942451e+00 4.51384604e-01 ... 9.16559696e-01 -2.62209088e-01 -5.63886940e-01] [-7.49260485e-01 -6.04048193e-01 -6.93162799e-01 ... -1.57497287e+00 -7.45330751e-01 -6.39985681e-01] ... [ 1.12674549e-01 -6.74315989e-01 -5.75124621e-01 ... 2.14890286e-01 3.73586565e-02 6.31668746e-01] [ 2.11521417e-01 3.81509990e-01 8.32207441e-01 ... -9.03455496e-01 2.09640336e+00 5.47925234e-01] [ 3.50255907e-01 1.49829233e+00 -8.45133960e-01 ... 2.21566868e+00 1.16626942e+00 -3.13080996e-01]]] [[[-4.13546354e-01 -2.54282594e+00 -1.41294086e+00 ... 2.08894086e+00 -7.82856345e-01 -1.03013016e-01] [-7.57797062e-01 6.36388958e-01 -6.76573738e-02 ... -1.23360848e+00 -7.41540909e-01 -1.84675738e-01] [ 6.51394606e-01 4.66792852e-01 1.77086785e-03 ... -9.35647547e-01 4.00956571e-01 1.01458645e+00] ... [ 3.79905164e-01 9.11904931e-01 -8.49160627e-02 ... -1.09467566e+00 -1.86723226e-03 2.06745565e-01] [-6.47433519e-01 -6.85630560e-01 -4.23128009e-01 ... -1.04357076e+00 1.14591348e+00 4.86828089e-01] [ 1.18055606e+00 9.40320611e-01 1.30253625e+00 ... -3.77313763e-01 -4.77368593e-01 -1.54051757e+00]] [[-1.90672910e+00 -2.62161285e-01 -3.13339472e-01 ... 8.46991986e-02 1.33093560e+00 -1.09081246e-01] [-2.43116426e+00 -8.13959479e-01 3.11489925e-02 ... -9.21302438e-01 -4.02540386e-01 -1.38695073e+00] [-6.69947788e-02 6.01380467e-02 -8.81335855e-01 ... -1.38455808e-01 -2.06525826e+00 -1.35641015e+00] ... [ 7.42055595e-01 -5.51692069e-01 -7.25184858e-01 ... 1.37948024e+00 1.57242489e+00 -2.64007509e-01] [ 1.39461184e+00 3.06851536e-01 7.92565405e-01 ... 4.22048479e-01 -1.24327493e+00 -8.15688968e-01] [ 1.06972325e+00 -8.02787364e-01 -6.92687452e-01 ... -2.07663703e+00 -7.43084133e-01 -6.13325894e-01]] [[ 6.91504776e-01 -7.68914223e-01 -9.52764213e-01 ... -1.34577322e+00 -3.52249831e-01 -1.22725971e-01] [-8.96950722e-01 1.06030059e+00 1.06540418e+00 ... -1.42165601e+00 -1.61071098e+00 -8.68602633e-01] [ 1.96594059e-01 5.88845134e-01 -2.89915383e-01 ... -1.53479838e+00 7.46230721e-01 -2.49847341e-02] ... [-3.15081269e-01 1.89149350e-01 -1.27997221e-02 ... 7.82944858e-01 -1.09615493e+00 1.38582325e+00] [-2.46458560e-01 1.66208470e+00 -1.46930671e+00 ... -6.13581598e-01 2.89657980e-01 2.20382071e+00] [ 1.29450154e+00 -1.22161138e+00 9.61646438e-01 ... -1.65977871e+00 -5.12098491e-01 1.62759626e+00]] ... [[-6.01031065e-01 -1.20223165e+00 1.34900296e+00 ... 5.01058638e-01 1.47914457e+00 8.20310295e-01] [-7.58110046e-01 1.34292591e+00 1.35500944e+00 ... 1.29359353e+00 8.60278070e-01 1.76162004e+00] [ 2.52076052e-02 -3.53222936e-01 8.12771261e-01 ... 2.41364405e-01 -9.84772384e-01 -1.49409986e+00] ... [ 1.99349582e-01 8.65659118e-01 -6.34744525e-01 ... -1.50469291e+00 -1.59923255e+00 1.92561001e-02] [ 5.24476767e-01 5.66367209e-01 -3.82562757e-01 ... 5.15603833e-03 -4.37798589e-01 -1.28546667e+00] [-2.63090163e-01 -7.42271185e-01 -2.13315740e-01 ... 2.94090003e-01 1.27699709e+00 1.00771344e+00]] [[-9.49498117e-02 -1.41471159e+00 -4.89642531e-01 ... -1.97520331e-01 1.03893638e+00 6.76485777e-01] [-8.67616117e-01 1.52055395e+00 -1.15236640e+00 ... 5.69418311e-01 1.50009489e+00 -1.00229013e+00] [-1.54195750e+00 1.68078995e+00 1.11527133e+00 ... 1.38143373e+00 5.16012073e-01 -9.07561660e-01] ... [ 5.10723591e-01 1.83548009e+00 1.43571720e-01 ... 1.23243594e+00 7.95223296e-01 -9.50673372e-02] [ 4.66620535e-01 -1.47207046e+00 -1.96560299e+00 ... -2.00386018e-01 -7.02991486e-01 -1.31440550e-01] [-5.93585849e-01 -5.00024594e-02 -7.16972172e-01 ... -4.71768707e-01 1.26911685e-01 -9.18433666e-01]] [[ 5.49838305e-01 5.56703269e-01 1.25358629e+00 ... -2.72721124e+00 -1.40703249e+00 -1.00776410e+00] [-8.04916583e-03 1.49478245e+00 -6.80435777e-01 ... -7.64760315e-01 2.42625728e-01 5.85736692e-01] [ 6.63280308e-01 -7.66491950e-01 1.50870645e+00 ... -7.52989501e-02 -7.50029564e-01 -3.35083842e-01] ... [-1.57097027e-01 1.26830614e+00 2.10992241e+00 ... 1.08554967e-01 8.93642604e-01 -1.22127879e+00] [ 1.19912362e+00 -7.59575665e-01 -1.12613356e+00 ... 8.79197836e-01 -9.94647741e-01 -6.32942498e-01] [ 8.58300254e-02 1.18159652e+00 -4.28751737e-01 ... 5.64415038e-01 -1.24506593e+00 1.46160915e-03]]] [[[ 4.51644063e-01 -2.06046194e-01 -1.99209824e-01 ... -9.65207636e-01 -6.84579462e-02 1.09553826e+00] [-3.45078647e-01 8.75413358e-01 7.31275603e-02 ... 4.64643449e-01 3.17301333e-01 5.93413115e-01] [-7.43833557e-02 -9.17242691e-02 -8.58465791e-01 ... -6.24775767e-01 2.05573988e+00 -1.08409119e+00] ... [-1.55283391e-01 2.18067184e-01 3.64432000e-02 ... 1.62499338e-01 1.02844298e+00 1.03148632e-01] [ 1.17886707e-01 1.34296149e-01 5.27112603e-01 ... 2.28678912e-01 -6.42298236e-02 3.19276989e-01] [-9.33762789e-01 4.70445842e-01 1.10504484e+00 ... -4.19064611e-01 -8.68073385e-03 -6.48139775e-01]] [[ 4.27126646e-01 1.18502729e-01 -5.02516150e-01 ... 1.07927215e+00 -4.88920838e-01 6.01427436e-01] [ 1.23810351e+00 -2.05322957e+00 -4.96715307e-01 ... 2.20025516e+00 5.62214077e-01 -9.10238445e-01] [ 6.35774791e-01 -8.96197021e-01 1.78829503e+00 ... 1.31378889e-01 -2.43857503e+00 2.02713549e-01] ... [ 9.31084752e-01 -3.96762908e-01 -2.12132573e-01 ... -5.08969948e-02 5.19665658e-01 -9.12803650e-01] [-2.53944188e-01 -8.29356611e-01 8.16047788e-01 ... 9.15269852e-01 -1.12749469e+00 6.94909811e-01] [-6.93363488e-01 -1.98596865e-01 -8.97916257e-02 ... 8.10463801e-02 -2.39867255e-01 3.98736328e-01]] [[ 2.00552896e-01 -3.24222505e-01 -9.60773408e-01 ... 9.48436260e-02 1.95658013e-01 4.19544160e-01] [ 1.31147814e+00 3.98441255e-01 1.50285661e+00 ... -1.05824101e+00 -1.66828167e+00 -8.25273097e-01] [-6.25564218e-01 -6.19883120e-01 8.70557129e-01 ... 3.66697133e-01 3.85274708e-01 -1.34432578e+00] ... [ 8.61899197e-01 -3.57124269e-01 1.71516404e-01 ... 3.11722845e-01 4.17350322e-01 -7.22472012e-01] [-6.87554240e-01 -1.31584513e+00 -9.91791368e-01 ... 4.84321058e-01 -6.98134601e-01 4.21301544e-01] [ 1.58548489e-01 9.18396831e-01 1.63022578e+00 ... 2.16121578e+00 5.40397763e-01 -8.86150479e-01]] ... [[-1.26505649e+00 -1.70014441e-01 6.17876112e-01 ... -6.59930646e-01 6.41397417e-01 -1.45943552e-01] [ 1.78341544e+00 1.33787870e-01 -1.53307751e-01 ... 4.27753339e-03 -1.74074018e+00 -6.65333092e-01] [-9.98483241e-01 -1.06300950e-01 1.18198884e+00 ... -8.44469786e-01 7.78117001e-01 2.19337374e-01] ... [-1.23574905e-01 -2.23856640e+00 -5.09169638e-01 ... 9.36553419e-01 -4.71859545e-01 -2.83104837e-01] [-1.12061679e+00 1.47241578e-01 -2.07934690e+00 ... 5.33271194e-01 1.82768643e+00 4.85130399e-01] [ 8.94855261e-01 3.43724459e-01 2.91743755e-01 ... 6.98538125e-01 1.01342452e+00 3.48374844e-01]] [[-1.12829413e-02 -1.10478759e+00 -4.18310642e-01 ... 8.66415262e-01 -8.66983593e-01 1.55782795e+00] [-4.56809521e-01 -9.46371019e-01 2.30726480e-01 ... -1.74436137e-01 1.61912203e+00 -1.03514910e+00] [-1.87360227e-01 -4.05683160e-01 3.45271170e-01 ... 1.24956679e+00 -1.51413321e+00 -5.98087311e-01] ... [ 8.58230412e-01 4.62951250e-02 -1.36071694e+00 ... -1.20286189e-01 -1.67091918e+00 -1.95612520e-01] [ 7.36853182e-02 5.96591711e-01 3.24753582e-01 ... -1.12066925e+00 1.99305272e+00 -9.81955409e-01] [ 9.15177047e-01 9.18614447e-01 1.77684367e+00 ... -1.11952953e-01 2.04486504e-01 3.14968497e-01]] [[ 1.15329301e+00 1.39047708e-02 -5.36271513e-01 ... 3.69920254e-01 -6.28035545e-01 4.51018423e-01] [ 5.50114632e-01 -8.37155223e-01 1.76615655e+00 ... 1.73450634e-01 -2.10953712e+00 3.60658348e-01] [ 3.45573902e-01 3.17361653e-01 -8.10797513e-02 ... 4.26579654e-01 -6.85868084e-01 4.73123968e-01] ... [-4.47890908e-02 -1.31786466e+00 6.05567455e-01 ... -1.00417614e+00 -1.05684745e+00 -1.29874802e+00] [ 3.57530206e-01 8.08822989e-01 -3.12215716e-01 ... -1.01526886e-01 1.42533779e+00 7.56879330e-01] [-1.61835611e+00 9.98243392e-01 1.62270951e+00 ... 4.76117969e-01 -4.52819377e-01 -1.61004412e+00]]] [[[-4.72747505e-01 -2.42834598e-01 -2.59725273e-01 ... 4.34466064e-01 7.79826105e-01 6.30344391e-01] [-1.78222120e-01 9.81823206e-01 7.89261699e-01 ... -1.67857242e+00 -1.04918502e-01 1.33450067e+00] [ 2.19533992e+00 -1.17951095e+00 2.31251884e+00 ... -1.24389088e+00 -4.35284436e-01 -6.88719869e-01] ... [ 5.08108437e-01 -2.97099829e-01 1.15808284e+00 ... 1.76351643e+00 1.11195850e+00 6.48310259e-02] [-1.83368459e-01 -3.10640007e-01 4.27364498e-01 ... -1.29585409e+00 -1.96297705e-01 -4.19216722e-01] [ 7.14544892e-01 1.87915608e-01 8.19405675e-01 ... 9.30656672e-01 7.65352845e-01 -4.75182891e-01]] [[-1.05864167e+00 1.97436631e+00 1.04569519e+00 ... 6.85806096e-01 3.82526368e-01 2.26472646e-01] [-3.09979796e-01 1.41556036e+00 2.17869449e+00 ... -2.08970353e-01 -1.00506616e+00 -3.70404631e-01] [ 3.59258443e-01 3.80432874e-01 -8.83164167e-01 ... -1.55210257e+00 -9.59164426e-02 1.95402336e+00] ... [-1.35840786e+00 -5.27788520e-01 -9.24743831e-01 ... 4.97387052e-01 -4.76472229e-02 8.51025820e-01] [ 9.02760744e-01 1.21587586e+00 1.19073451e+00 ... -6.69120014e-01 -2.65964001e-01 -1.29087448e+00] [-3.10003906e-01 -1.40367460e+00 2.46176235e-02 ... -1.09646201e+00 5.58407046e-02 -1.99145347e-01]] [[ 2.19400287e+00 8.14180374e-01 -5.55804372e-01 ... 7.14214861e-01 -1.53792059e+00 5.26525557e-01] [-1.92920435e-02 3.98903191e-02 1.15400422e+00 ... 6.13631606e-01 -6.82665884e-01 7.00211942e-01] [-1.38305366e-01 -1.82912278e+00 -1.09416437e+00 ... 1.54031122e+00 -5.81897914e-01 4.18668926e-01] ... [-7.82529950e-01 9.50309560e-02 3.63139927e-01 ... 5.16375422e-01 -6.31581187e-01 1.85415208e-01] [-9.51010436e-02 8.52378011e-01 -2.51724291e+00 ... -1.14954948e+00 1.12957025e+00 -6.58558428e-01] [ 4.88582075e-01 3.04366320e-01 -6.03822827e-01 ... 1.13193440e+00 -9.24279094e-01 -4.70319450e-01]] ... [[-1.37106299e+00 -3.51067662e-01 -5.40821552e-01 ... 1.91084787e-01 4.05371785e-01 5.52941918e-01] [-1.69960415e+00 4.24365908e-01 -7.47338235e-02 ... 1.53081441e+00 9.94424343e-01 -2.34936237e-01] [ 1.06481242e+00 3.47132087e-01 4.10484970e-01 ... 2.19436139e-01 2.11042359e-01 -6.11493587e-01] ... [-1.01840460e+00 -2.12693080e-01 -1.21539843e+00 ... 6.41991675e-01 -1.69138086e+00 1.71149874e+00] [-5.28220415e-01 1.17695713e+00 -2.15744227e-02 ... 1.75423360e+00 -1.67962515e+00 5.79135239e-01] [ 1.64666307e+00 1.08961999e+00 1.27530754e+00 ... 1.45507172e-01 -2.87601018e+00 6.30445421e-01]] [[-3.17566842e-01 -6.31241143e-01 -8.64478886e-01 ... -6.67101219e-02 -6.66168332e-01 7.32571781e-01] [-1.22042820e-01 2.72688270e-01 9.71479893e-01 ... 2.44430304e-01 -1.74547628e-01 8.60574126e-01] [-1.39194226e+00 -6.26862705e-01 -2.03957349e-01 ... 1.19311500e+00 -7.38707259e-02 1.34834266e+00] ... [-5.86729288e-01 5.64508140e-01 -2.78917640e-01 ... -9.37336326e-01 -3.50614518e-01 -1.16426682e+00] [-1.24733460e+00 -1.20706894e-01 -6.86604083e-01 ... -1.19210923e+00 4.68208700e-01 -2.44876951e-01] [-6.44539446e-02 7.99447417e-01 8.28865230e-01 ... -8.94231677e-01 -9.04402316e-01 -5.71915865e-01]] [[ 2.00981319e-01 -1.98579013e+00 -6.85945511e-01 ... -4.63838041e-01 8.91190469e-01 9.58405793e-01] [-1.16161358e+00 7.99304068e-01 -6.60549641e-01 ... 1.44421831e-01 -4.86995161e-01 -8.18065822e-01] [ 4.87782419e-01 -7.86097467e-01 -1.13864434e+00 ... -6.90437973e-01 1.45343995e+00 1.12243187e+00] ... [-5.69935501e-01 8.24366748e-01 -8.06626260e-01 ... -7.66012132e-01 -1.94050729e-01 1.60101250e-01] [-7.99669325e-03 -8.84472191e-01 4.00195301e-01 ... -4.82207257e-03 -9.54383537e-02 1.30776882e+00] [-1.28480315e-01 1.32752264e+00 3.47241819e-01 ... 3.32704157e-01 -2.05303192e+00 -4.65236455e-01]]] [[[-2.05523074e-01 -6.05379455e-02 -1.94319832e+00 ... -1.54094234e-01 2.21575379e+00 -3.25717866e-01] [-1.04701543e+00 -1.96586072e+00 5.21553993e-01 ... 9.94900525e-01 -6.26597822e-01 3.68015975e-01] [-4.94525254e-01 -1.32859096e-01 1.01733959e+00 ... 5.94672918e-01 -1.32446027e+00 2.22860470e-01] ... [ 3.78037810e-01 6.02127373e-01 -1.08783555e+00 ... 2.08160400e+00 2.07432523e-01 -5.06793261e-01] [ 1.02519132e-01 -1.12091100e+00 1.27862132e+00 ... -3.01714167e-02 6.03924274e-01 3.82217199e-01] [ 3.91173482e-01 -1.04945588e+00 -1.31792456e-01 ... -5.99692523e-01 -6.74735904e-02 1.68799996e-01]] [[-1.17435122e+00 6.49834752e-01 1.11308956e+00 ... -6.09254360e-01 -6.40000880e-01 5.55928648e-01] [-1.11141920e+00 -4.28656414e-02 -6.96589768e-01 ... -6.60743594e-01 -4.98955905e-01 -6.70488775e-01] [ 4.49526101e-01 -5.22864163e-01 2.76463771e+00 ... -1.08279809e-01 -3.36641036e-02 7.00430393e-01] ... [-1.74067962e+00 1.00435412e+00 1.07123360e-01 ... -5.58747530e-01 -5.24594486e-01 5.71505010e-01] [-9.85707939e-01 -6.94100201e-01 1.94731104e+00 ... -5.00969410e-01 -5.47650933e-01 1.11567259e-01] [-3.75594236e-02 -5.69176018e-01 1.32100463e+00 ... 1.46317780e-01 -5.49521483e-02 -1.31615198e+00]] [[ 4.41321790e-01 -5.43535650e-01 -2.99592829e+00 ... -1.67344451e+00 1.15616977e+00 1.31682134e+00] [-7.00695395e-01 1.29977778e-01 6.74552739e-01 ... 3.70238930e-01 1.84264243e+00 1.22972775e+00] [ 9.29995239e-01 -9.31897342e-01 5.40864468e-01 ... 9.57085013e-01 -1.21268106e+00 8.92431289e-02] ... [ 5.89360483e-02 8.95511508e-01 6.56032503e-01 ... -2.01863885e+00 -3.65339518e-01 -2.90435972e-03] [ 6.43990338e-01 -7.30911255e-01 -6.70782149e-01 ... -1.85374093e+00 4.48890418e-01 1.52484894e+00] [ 4.91712600e-01 -5.25885761e-01 -8.47885907e-02 ... 1.15820217e+00 2.25356221e-01 2.96250165e-01]] ... [[ 6.20155632e-01 -1.06870234e+00 -9.20180798e-01 ... -6.44859314e-01 2.20307454e-01 9.39421654e-01] [ 3.85047078e-01 2.03839600e-01 -2.68761277e-01 ... 8.33484828e-01 8.63603830e-01 -8.67757976e-01] [ 3.14611256e-01 -2.72453845e-01 5.53075790e-01 ... -6.60874069e-01 -3.10845733e-01 3.98145437e-01] ... [ 1.64840436e+00 1.13494897e+00 -1.80998766e+00 ... -7.53437042e-01 -1.54831839e+00 -8.31163406e-01] [-2.35618666e-01 -3.78483534e-01 2.82071888e-01 ... -2.93915957e-01 2.74499369e+00 -1.59860894e-01] [-2.18702525e-01 -1.24698246e+00 -1.37882486e-01 ... -3.57490569e-01 -1.57885730e-01 -1.47809070e-02]] [[-8.57590735e-01 -2.54530883e+00 8.27615380e-01 ... -1.57808617e-01 -9.69617814e-02 -1.90164661e+00] [-1.34069240e+00 2.12723106e-01 9.15736079e-01 ... -8.32216263e-01 7.43306577e-01 -3.58905822e-01] [-1.24778008e+00 7.30070025e-02 -1.78962380e-01 ... 3.12430769e-01 -7.27020383e-01 1.05632317e+00] ... [-3.70776325e-01 -1.10724568e+00 -5.14269829e-01 ... 5.50239623e-01 4.81306046e-01 -4.24647212e-01] [-8.69241476e-01 3.39182198e-01 2.52205253e-01 ... 9.11967084e-02 4.78798240e-01 6.69413924e-01] [-4.70113993e-01 -7.51044229e-02 9.60241914e-01 ... 1.10756266e+00 4.86239314e-01 1.73178629e-03]] [[-4.82642889e-01 -1.21946299e+00 -1.90140438e+00 ... 5.92204332e-01 -1.30503392e+00 -8.05114508e-01] [-1.42937434e+00 1.58393085e-02 -5.95378458e-01 ... 1.39568520e+00 6.10006332e-01 -2.02004409e+00] [-2.51883537e-01 2.10029316e+00 -1.15189433e+00 ... -8.60461354e-01 6.26252234e-01 -1.40541166e-01] ... [ 4.90005553e-01 1.69241738e+00 2.81759173e-01 ... 3.20829809e-01 1.13744247e+00 1.97765696e+00] [-1.41396916e+00 5.66883683e-01 -1.15075529e+00 ... 1.13569415e+00 -7.54113019e-01 -3.57292473e-01] [-8.20849001e-01 -6.08477056e-01 -9.50867161e-02 ... 7.52935052e-01 -1.16182506e+00 5.07882774e-01]]]] [[[[-7.55911291e-01 9.88483191e-01 6.08776361e-02 ... 1.68791878e+00 1.74790335e+00 -7.93423712e-01] [-1.26039374e+00 3.60073566e-01 7.80821621e-01 ... 4.50389057e-01 -1.37145802e-01 8.82560372e-01] [-6.53205693e-01 1.14390659e+00 8.17849219e-01 ... 6.17349267e-01 -3.93188111e-02 1.00995958e+00] ... [-4.04616117e-01 -3.72085363e-01 7.50895798e-01 ... -2.21442127e+00 9.25616562e-01 -1.97686777e-02] [-2.36749873e-01 -8.68628263e-01 -7.67396033e-01 ... -6.88347340e-01 8.58897418e-02 -1.04283047e+00] [ 5.18060863e-01 -8.55213404e-01 2.66985178e-01 ... 7.47185707e-01 6.42318502e-02 2.50660837e-01]] [[-1.11642516e+00 -1.99650362e-01 -3.24065983e-01 ... 7.47600913e-01 1.09506929e+00 -5.92280865e-01] [-6.00767553e-01 1.24477100e+00 3.34425300e-01 ... 2.46685314e+00 -6.52201355e-01 -1.25091648e+00] [-1.51857531e+00 -2.68951803e-01 1.44693565e+00 ... -1.38998353e+00 8.10509324e-01 -1.16277230e+00] ... [ 9.41981912e-01 6.14317536e-01 3.46473336e-01 ... 1.18676949e+00 -7.00666709e-03 -1.02333021e+00] [ 1.53754282e+00 -2.17842132e-01 -1.49660993e+00 ... -4.70094502e-01 -4.92688775e-01 -1.39690578e+00] [ 1.28708732e+00 -1.76294208e-01 -5.24202883e-01 ... -1.46861720e+00 -8.10894787e-01 5.94180703e-01]] [[-4.52385783e-01 6.24414861e-01 2.96430767e-01 ... 1.30516822e-02 1.04715490e+00 9.82551873e-01] [ 6.46073163e-01 -8.77819657e-01 -1.37930250e+00 ... 1.84801280e-01 -3.35634202e-01 1.37831256e-01] [ 2.91522115e-01 -1.38515770e-01 9.36912239e-01 ... 1.89021200e-01 3.11863512e-01 -4.56063092e-01] ... [-1.16456962e+00 9.77333486e-01 -5.83249867e-01 ... 1.48021376e+00 -1.24461460e+00 4.70423639e-01] [-7.59913087e-01 3.24084580e-01 -1.17122209e+00 ... 2.58545130e-01 -1.72474861e-01 -1.09926558e+00] [ 7.13913500e-01 2.83545911e-01 1.96428502e+00 ... -1.39669597e+00 1.65349111e-01 -7.02526867e-02]] ... [[ 1.85635448e+00 -3.47782969e-01 -7.33757466e-02 ... -1.00995278e+00 6.09290004e-01 -1.18478227e+00] [-1.53537536e+00 -4.60035056e-01 -1.84139937e-01 ... 4.88847405e-01 1.58083725e+00 -1.78301618e-01] [ 1.14627731e+00 -1.22003973e+00 5.61268687e-01 ... -1.45127070e+00 -6.82374597e-01 1.57384336e-01] ... [ 7.40323305e-01 -1.59668446e+00 1.31119025e+00 ... -4.95254397e-01 1.97247970e+00 9.63560402e-01] [-1.03688896e+00 4.27406371e-01 4.05896902e-01 ... 1.00882912e+00 7.98481345e-01 -4.68153208e-02] [ 2.76794255e-01 1.44127715e+00 -1.13998616e+00 ... -4.47721511e-01 -3.98024619e-01 -6.03531301e-01]] [[ 3.45059335e-01 1.13194013e+00 -1.28518915e+00 ... -6.15591526e-01 7.39491642e-01 5.06471038e-01] [-1.02168131e+00 1.47129580e-01 -5.04618645e-01 ... 3.40709776e-01 6.57936335e-01 -2.04963759e-02] [-2.76876479e-01 -1.34189332e+00 -1.88647044e+00 ... 1.04338527e+00 -1.20571973e-02 -7.69607723e-01] ... [ 8.47717106e-01 -1.10964584e+00 -5.44977307e-01 ... -1.01763773e+00 -4.99128491e-01 5.26789010e-01] [-9.46656466e-01 1.52276373e-02 -6.50387764e-01 ... 6.70834780e-01 1.17890179e+00 3.02844375e-01] [ 6.34760737e-01 9.26276028e-01 -1.34087527e+00 ... -1.07112750e-02 -1.03522491e+00 9.41143453e-01]] [[-1.42657447e+00 -3.11126947e-01 -1.53099263e+00 ... -1.52292103e-01 -2.18858433e+00 -1.23599148e+00] [-5.38670242e-01 4.60832566e-01 -2.24822521e+00 ... -1.75996792e+00 2.97525048e-01 -3.53569806e-01] [-2.07798351e-02 8.77943635e-01 -1.32758498e+00 ... -2.04471922e+00 -8.25156510e-01 -1.25918043e+00] ... [-1.85614690e-01 -1.16344118e+00 -7.62344241e-01 ... -9.79491055e-01 5.69248259e-01 5.60267389e-01] [ 1.52182853e+00 1.17295015e+00 2.00005960e+00 ... -4.48693484e-02 -1.98370922e+00 2.83270192e+00] [ 1.10939169e+00 -7.30196357e-01 -1.15957379e+00 ... -9.46791112e-01 -3.86187695e-02 1.67307985e+00]]] [[[ 1.41862822e+00 -1.56128109e+00 9.73704576e-01 ... 3.93631071e-01 -5.54739773e-01 6.18943095e-01] [-4.25748497e-01 1.13250983e+00 -1.17971623e+00 ... -9.37492669e-01 1.52396679e+00 -9.07339573e-01] [ 3.52407247e-01 1.24624300e+00 -1.60968915e-01 ... -6.61108792e-01 9.86457944e-01 1.15415728e+00] ... [ 1.11962903e+00 1.10077310e+00 1.10614645e+00 ... 8.74489367e-01 7.99384177e-01 -8.48828018e-01] [ 1.33255646e-01 4.62867878e-02 4.55411673e-01 ... 1.75038445e+00 8.87881398e-01 -1.05954969e+00] [ 4.03403312e-01 6.63717747e-01 -6.91618085e-01 ... 1.07664071e-01 1.82394803e+00 1.76351333e+00]] [[ 2.02414608e+00 2.86287355e+00 -8.26494277e-01 ... -2.38237277e-01 1.31414235e+00 1.22162426e+00] [-4.75781271e-04 6.04642391e-01 -2.11445257e-01 ... 3.18438351e-01 1.44423872e-01 6.97629988e-01] [-5.39556205e-01 -5.15681565e-01 5.31101525e-02 ... -4.94958073e-01 1.16482723e+00 -1.43254519e+00] ... [-1.00490701e+00 -1.17949677e+00 -1.37860990e+00 ... -2.85254717e-01 -2.16294193e+00 -1.87411845e+00] [ 7.28324950e-01 -1.65119901e-01 1.85470566e-01 ... 3.43652338e-01 -2.06006765e+00 7.96536267e-01] [ 1.27480233e+00 -7.16605723e-01 2.93486919e-02 ... -1.21165502e+00 2.75700629e-01 -1.71591020e+00]] [[-1.95711923e+00 -1.65553045e+00 7.03549564e-01 ... -1.46755600e+00 -1.46189415e+00 -8.38968810e-03] [ 7.97518075e-01 -4.91392799e-02 -2.80323982e-01 ... 1.04971766e+00 1.00781746e-01 -5.35054356e-02] [ 8.03624332e-01 -1.04296184e+00 -2.65972950e-02 ... 1.47123516e-01 -1.20874858e+00 4.96737182e-01] ... [-6.37946606e-01 9.72930312e-01 8.40828121e-01 ... -5.58809638e-01 -2.14549452e-01 -4.24430460e-01] [-3.32933486e-01 -8.13112080e-01 3.67385477e-01 ... -5.27837396e-01 1.40629008e-01 -2.10979342e+00] [-1.07241683e-01 1.34310615e+00 -1.05232334e+00 ... 1.12422931e+00 -1.07109678e+00 -2.24400237e-01]] ... [[-5.16234040e-01 7.20734477e-01 2.96163499e-01 ... -2.49291807e-02 -9.14452314e-01 7.50149310e-01] [-1.25320613e+00 1.52246428e+00 -1.52759147e+00 ... -1.04363441e+00 -6.54054344e-01 1.57739997e+00] [-6.98513389e-01 1.79413235e+00 7.22545743e-01 ... 3.94938260e-01 1.10346234e+00 -8.65122199e-01] ... [-1.01328933e+00 2.46187139e+00 -3.06503683e-01 ... -3.40098858e-01 -8.99072051e-01 1.44630218e+00] [-1.75936747e+00 -5.09715557e-01 -7.81184882e-02 ... -3.34837548e-02 1.43482327e+00 1.74528611e+00] [-2.20571250e-01 -6.99407518e-01 1.03575015e+00 ... -2.19187334e-01 9.08854723e-01 4.49310303e-01]] [[ 1.28547692e+00 -1.63949800e+00 1.07263565e+00 ... -1.41596556e+00 6.71987176e-01 2.86902755e-01] [-6.62169695e-01 -1.11282922e-01 4.05435026e-01 ... 2.13747099e-01 -1.16987276e+00 -5.50012052e-01] [-1.01779258e+00 1.46721208e+00 -4.69565630e-01 ... -3.88382107e-01 -1.50927114e+00 -1.31911874e+00] ... [ 5.92177629e-01 -2.04413012e-01 1.70473102e-02 ... -1.10763705e+00 -1.01411533e+00 2.08308125e+00] [ 8.56681228e-01 2.36295640e-01 -1.02853167e+00 ... -5.84241450e-01 9.78925228e-01 1.47382176e+00] [-4.78600785e-02 -5.30719697e-01 -4.32098269e-01 ... 9.06653047e-01 -1.79102111e+00 9.61764276e-01]] [[ 2.93275565e-02 -5.76252222e-01 -1.74425435e+00 ... -1.12277400e+00 -1.13781071e+00 -2.05743670e-01] [-9.54099417e-01 4.82216626e-01 8.65035713e-01 ... -7.61180222e-01 1.78019285e-01 -3.45129743e-02] [ 6.54819429e-01 6.44400120e-01 5.54017544e-01 ... -3.65523815e-01 -4.84794647e-01 2.34495258e+00] ... [-5.68569481e-01 -1.02413917e+00 -2.08396006e+00 ... -1.91572070e-01 1.01132178e+00 -1.12903170e-01] [ 8.10231447e-01 6.16415858e-01 1.87269494e-01 ... 2.26850420e-01 2.08545303e+00 3.88641894e-01] [ 1.13967788e+00 1.86713204e-01 -1.05982542e+00 ... -1.71064854e+00 -8.83034706e-01 1.68124485e+00]]] [[[-2.12058330e+00 1.11200547e+00 -3.24482232e-01 ... -7.09206700e-01 -2.04629689e-01 2.58350134e-01] [ 8.91509891e-01 -6.24599569e-02 -6.52327538e-01 ... 1.07473768e-01 1.65716326e+00 -1.24234116e+00] [ 2.28490686e+00 3.78829427e-02 8.22353959e-01 ... -2.31819057e+00 1.03513062e-01 7.14456886e-02] ... [ 2.41586328e-01 -6.01934671e-01 -6.30070984e-01 ... -2.30133489e-01 2.38300085e+00 -8.87164891e-01] [-6.30494833e-01 -7.93637812e-01 -8.32629979e-01 ... 3.39130104e-01 4.56057250e-01 1.27579117e+00] [ 9.47307765e-01 -1.05189848e+00 5.83057523e-01 ... -6.58082724e-01 -8.92179191e-01 1.91672885e+00]] [[ 7.22020686e-01 1.31628466e+00 -1.44342089e+00 ... 1.75859463e+00 -2.53569198e+00 1.61604309e+00] [ 1.22244346e+00 1.32772958e+00 -5.95481634e-01 ... 1.21196710e-01 1.08641648e+00 2.50001729e-01] [ 6.95018232e-01 -1.50094748e+00 -5.18660963e-01 ... 3.79213244e-01 1.02772689e+00 2.66260171e+00] ... [ 5.19665241e-01 6.37040675e-01 1.59476495e+00 ... -8.60606313e-01 -4.15833443e-01 -4.15944785e-01] [-8.89247060e-01 -9.79446471e-01 1.45399439e+00 ... -3.92372087e-02 1.17638312e-01 1.31618470e-01] [-8.83297443e-01 3.09036789e-03 -1.03742158e+00 ... -1.52599180e+00 7.44908214e-01 1.01282191e+00]] [[ 2.51191445e-02 -2.52871633e+00 6.84832573e-01 ... 5.86811900e-01 -4.68941927e-01 5.38745046e-01] [ 1.13103531e-01 7.29031801e-01 -1.40608096e+00 ... -8.83473083e-02 1.68345749e-01 4.02279407e-01] [-9.79708970e-01 -1.40858257e+00 -1.93231523e+00 ... -5.19606292e-01 -8.59270513e-01 1.79727748e-01] ... [ 9.40793872e-01 -5.00289142e-01 -1.70172989e+00 ... -1.30890608e+00 1.77596962e+00 -3.85532320e-01] [-1.11792672e+00 2.44483614e+00 1.78658068e+00 ... -1.34356603e-01 -3.67979258e-01 -1.57093894e+00] [-1.30983961e+00 -2.48302603e+00 -9.60065603e-01 ... 9.70430017e-01 1.56429410e+00 -9.90480781e-01]] ... [[ 5.15790701e-01 1.27528203e+00 1.60432398e+00 ... -1.18737984e+00 1.31376278e+00 7.21591592e-01] [-8.33124816e-01 -8.89859676e-01 -1.42059469e+00 ... -1.51636708e+00 -3.99684876e-01 -9.70898807e-01] [-5.28923035e-01 -1.61522937e+00 6.67679980e-02 ... 4.81095701e-01 -8.09993684e-01 -1.19984400e+00] ... [-1.46651983e+00 -2.47581944e-01 7.39924656e-03 ... -3.47122073e-01 -7.91720331e-01 -5.47066867e-01] [ 1.13271904e+00 1.27138221e+00 -2.11043000e+00 ... -5.74386835e-01 -1.28934368e-01 -2.29184484e+00] [ 4.78259146e-01 1.04664135e+00 -1.74437076e-01 ... -1.24512305e-02 -6.61331594e-01 6.58887982e-01]] [[ 7.26028264e-01 -1.37806982e-02 -1.69401670e+00 ... -1.23098500e-01 2.23887399e-01 -5.13321042e-01] [ 6.78991795e-01 -1.14378870e-01 1.01175189e-01 ... -1.69301295e+00 -9.18659449e-01 1.15250044e-01] [ 1.55964804e+00 -4.08732921e-01 9.35982823e-01 ... -1.92584738e-01 -8.80604446e-01 1.17999804e+00] ... [-3.96876574e-01 3.90129954e-01 6.91420436e-01 ... -3.84702049e-02 -3.89746279e-01 -3.15091342e-01] [-2.01005727e-01 6.19720995e-01 1.02675724e+00 ... 1.89489841e+00 -6.92540765e-01 -1.14192434e-01] [ 6.82717085e-01 9.53020394e-01 -9.86561596e-01 ... 1.75670934e+00 -4.71571982e-01 -3.74831736e-01]] [[ 2.24412233e-01 5.55979460e-02 -1.73767376e+00 ... 4.68215197e-02 -5.57284772e-01 8.05203974e-01] [ 1.32000124e+00 -1.12399387e+00 3.03221978e-02 ... -4.38776910e-01 8.80786061e-01 -9.06563520e-01] [-3.08295250e-01 1.01985838e-02 -5.99281132e-01 ... -5.41472256e-01 -8.95632982e-01 -4.85681415e-01] ... [-3.37439156e+00 1.21316664e-01 6.20792508e-01 ... -3.58650535e-01 1.19792199e+00 -1.86436221e-01] [-5.75477600e-01 2.18929505e+00 2.42704347e-01 ... 1.31938958e+00 2.03839183e-01 7.67257661e-02] [ 8.65337372e-01 -4.64892894e-01 7.12492943e-01 ... -3.70217741e-01 3.13634992e-01 2.44475976e-01]]] [[[ 2.02204272e-01 -6.94048941e-01 -1.00844450e-01 ... 1.07493544e+00 8.74486417e-02 -1.38207245e+00] [ 1.20600283e+00 1.81836128e+00 7.16094851e-01 ... -3.47753465e-01 -2.32928085e+00 4.02989805e-01] [ 9.63753819e-01 3.06121141e-01 -3.31714869e-01 ... 1.32202840e+00 4.52585459e-01 -2.21816134e+00] ... [ 2.46107668e-01 -6.94637537e-01 2.33139467e+00 ... -3.19735527e-01 -8.88445139e-01 -5.97679079e-01] [ 1.11689913e+00 5.11577904e-01 4.82637972e-01 ... 2.76792002e+00 -9.17494535e-01 7.80478477e-01] [-1.70595276e+00 7.75070488e-01 -5.01671493e-01 ... -2.66769767e-01 5.56780040e-01 6.73949838e-01]] [[ 5.71400106e-01 7.89561033e-01 1.43530786e-01 ... 1.26909450e-01 -1.13420928e+00 -6.79945529e-01] [-1.74183702e+00 -7.60641277e-01 1.24232364e+00 ... -1.30740440e+00 -1.65202403e+00 1.12959242e+00] [ 9.88415897e-01 2.67616391e-01 -8.56390417e-01 ... -2.30346751e+00 -8.57199967e-01 8.01820695e-01] ... [ 6.16068125e-01 1.75495195e+00 -8.98051083e-01 ... -5.94619334e-01 3.90740812e-01 1.38777783e-02] [-1.92385840e+00 -1.04223263e+00 1.43188909e-01 ... 2.36231828e+00 2.12629318e-01 1.36758849e-01] [-3.43434811e-01 -1.29369861e-02 -7.61275530e-01 ... 1.06644762e+00 6.52486563e-01 -7.82144070e-01]] [[-5.50201833e-01 -1.97555602e+00 -2.79077220e+00 ... -9.36381519e-01 -1.15637183e+00 -2.80779898e-01] [-2.28728199e+00 -2.13019013e+00 1.26757324e-01 ... -1.09394002e+00 -3.57901126e-01 -2.20883656e+00] [ 1.41569936e+00 1.03521168e+00 -1.76630080e-01 ... -1.00722206e+00 -2.53048569e-01 -6.42531493e-04] ... [ 5.33807203e-02 -7.82061100e-01 -2.32316345e-01 ... 5.95871627e-01 1.52933407e+00 -1.38887525e-01] [ 6.22704923e-01 -1.04534638e+00 -1.39912629e+00 ... -7.83050418e-01 -2.79390484e-01 -2.49765456e-01] [-1.28618866e-01 1.03482580e+00 1.86201081e-01 ... -1.26648271e+00 -9.52254713e-01 8.71725082e-02]] ... [[ 2.48344445e+00 -2.89715409e-01 4.07432050e-01 ... 8.83334577e-01 -8.13156247e-01 1.29351795e+00] [ 3.29897404e-01 -5.94857216e-01 2.46997058e-01 ... 7.17107713e-01 3.88692506e-02 3.34253430e-01] [-1.37862802e-01 5.93918324e-01 2.09611636e-02 ... 4.74969298e-01 -6.57632053e-01 2.14315325e-01] ... [ 3.77125084e-01 8.99145067e-01 -2.15483665e+00 ... -2.79954553e-01 2.29248405e-01 4.98676896e-01] [-5.08976519e-01 -4.74108368e-01 1.22815758e-01 ... 7.28244960e-01 1.03588009e+00 -1.46331859e+00] [-3.65462273e-01 8.67505252e-01 1.69160008e+00 ... -1.03000498e+00 -3.95149052e-01 -9.41701472e-01]] [[-3.43982846e-01 -1.36418134e-01 2.54839689e-01 ... -3.77560109e-01 2.13941932e-01 1.02296472e+00] [-4.12094921e-01 -3.91144514e-01 -4.95626271e-01 ... -1.11232722e+00 -2.45254874e+00 6.30059719e-01] [ 5.55650949e-01 -8.92876923e-01 -1.57449031e+00 ... 1.29882753e+00 1.96514320e+00 -2.32832599e+00] ... [ 9.30116773e-01 5.37917972e-01 -2.71141410e+00 ... 2.73335427e-01 -5.98592222e-01 -6.12799823e-01] [ 1.81736588e-01 -1.27753925e+00 -1.55157161e+00 ... -2.06528127e-01 1.82237637e+00 1.63735080e+00] [-1.27595031e+00 -2.61690021e-01 -1.15455127e+00 ... 1.20331697e-01 -2.15299702e+00 -6.88573480e-01]] [[ 9.16472733e-01 -4.70894635e-01 -1.13048875e+00 ... -1.90425277e-01 -3.35619658e-01 1.42498434e-01] [-1.45970893e+00 -1.36310852e+00 9.08245742e-02 ... -5.65723293e-02 -3.19939822e-01 -1.15341771e+00] [-2.53473330e+00 2.57905871e-01 2.06531554e-01 ... -4.17883635e-01 -3.32518309e-01 2.45948538e-01] ... [-7.74593726e-02 2.14714766e+00 -9.54681993e-01 ... -7.74374247e-01 2.80635953e-01 2.57517993e-01] [-1.88361323e+00 -7.57160842e-01 9.27320898e-01 ... 8.43563795e-01 7.08519876e-01 4.00670528e-01] [ 1.15967363e-01 -9.96490344e-02 -2.92121619e-01 ... 9.63857532e-01 3.83671075e-01 1.97871876e+00]]] [[[ 5.76610625e-01 8.52394164e-01 -9.99720275e-01 ... 8.25584769e-01 -9.26875532e-01 9.35801625e-01] [ 3.85124654e-01 1.77702558e+00 2.54101992e-01 ... 5.52028298e-01 -1.80887210e+00 4.23011273e-01] [ 1.12247741e+00 -3.98500115e-02 -5.09653568e-01 ... 1.09744579e-01 4.68806237e-01 6.79738045e-01] ... [ 2.12116623e+00 1.98604476e+00 2.23741198e+00 ... -1.10534713e-01 1.02515924e+00 9.90453839e-01] [-4.99744594e-01 1.74094427e+00 9.74775672e-01 ... -1.59097284e-01 1.30083993e-01 -1.69122204e-01] [-3.95878673e-01 -6.76410913e-01 -4.15815085e-01 ... -4.53311175e-01 1.31238472e+00 2.32552797e-01]] [[ 3.12351622e-02 5.60102344e-01 1.90823197e-01 ... 1.40742287e-01 8.75782490e-01 -3.35868627e-01] [-1.40081465e+00 2.32852742e-01 1.20564610e-01 ... -1.21708259e-01 -6.97714448e-01 9.37453687e-01] [ 1.97642434e+00 7.40772337e-02 -5.08075058e-01 ... -2.57283390e-01 6.43724263e-01 6.74950778e-01] ... [ 9.06743467e-01 -1.19269013e+00 -2.08262831e-01 ... -2.39279017e-01 1.11229253e+00 -1.08388925e+00] [ 2.66907364e-01 3.38489145e-01 6.04644537e-01 ... -5.10609269e-01 -2.83345997e-01 -4.92323041e-01] [-1.03828895e+00 4.40879822e-01 8.80859017e-01 ... -6.93076909e-01 9.17684853e-01 8.41642737e-01]] [[-5.59332192e-01 1.55252886e+00 -1.76371560e-01 ... -1.13342845e+00 1.60565650e+00 -2.34479591e-01] [-4.48774904e-01 -1.43232441e+00 -1.68096328e+00 ... -5.89735270e-01 5.61402559e-01 -1.35240948e+00] [-9.69612300e-01 -9.84861434e-01 7.60701776e-01 ... -4.17495459e-01 -4.07481045e-01 6.29396915e-01] ... [ 9.98319685e-01 6.89509690e-01 8.84133875e-01 ... -1.35669506e+00 3.27909255e+00 7.43565932e-02] [-1.16153479e+00 -4.40421432e-01 2.14721680e+00 ... 1.33480489e+00 -1.63695347e+00 1.94405615e+00] [-2.25173211e+00 2.12635323e-02 3.88041317e-01 ... 7.65676916e-01 1.02009833e+00 -1.21465969e+00]] ... [[ 2.41693377e-01 -3.42940867e-01 -1.41184175e+00 ... -2.13609442e-01 -1.45189512e+00 3.62537056e-01] [-1.71341226e-01 3.35005492e-01 -1.63877025e-01 ... 1.31804109e+00 -2.35629573e-01 -9.48668182e-01] [-1.12375982e-01 3.29663962e-01 8.63997459e-01 ... 5.37122898e-02 1.87178111e+00 -6.20544195e-01] ... [-6.52623773e-01 -1.07454860e+00 1.22474372e+00 ... -7.66297281e-01 -5.64060986e-01 -1.26272750e+00] [-1.26233891e-01 -1.14761561e-01 4.04801697e-01 ... -5.13547838e-01 8.64722490e-01 1.02692592e+00] [-2.68706620e-01 1.17310834e+00 -1.67121068e-01 ... 4.19958755e-02 5.41456580e-01 -1.79565787e+00]] [[ 1.11370611e+00 -1.40117943e+00 -1.19468987e+00 ... -2.57880181e-01 4.70092624e-01 -4.35959280e-01] [-9.02906358e-02 -5.69617748e-01 -4.30831313e-01 ... -1.10321462e+00 6.55395627e-01 9.88946676e-01] [ 1.76970780e+00 -6.97102249e-01 8.63086164e-01 ... 5.84925227e-02 -2.83353359e-01 1.23099411e+00] ... [ 3.74574512e-01 -2.41867274e-01 1.55998683e+00 ... 3.47758643e-02 5.88523567e-01 1.16201782e+00] [ 1.61396205e+00 -1.08069479e+00 -1.92794514e+00 ... -8.47181678e-02 -2.79020238e+00 -1.23382127e+00] [ 1.25978410e+00 -4.26313818e-01 3.17270644e-02 ... -1.02730468e-01 8.39632630e-01 9.60191786e-01]] [[ 8.49337876e-01 5.45499288e-03 -1.18784046e+00 ... 2.93378234e+00 5.41558862e-01 3.48566622e-01] [-1.15133643e+00 3.36090416e-01 -9.45270896e-01 ... 1.12182438e+00 -2.80597657e-01 -9.43497300e-01] [ 1.15739977e+00 -6.90701544e-01 -3.64405662e-01 ... -6.46562755e-01 -1.07107532e+00 4.35259283e-01] ... [-2.13213950e-01 4.73034382e-01 1.80096880e-01 ... -2.16823316e+00 -1.03920567e+00 5.82921088e-01] [ 1.12131917e+00 5.77674448e-01 6.86180830e-01 ... -2.46199235e-01 -2.61566162e-01 1.71811029e-01] [ 4.07493442e-01 2.84399480e-01 5.23699224e-01 ... 2.12967730e+00 -1.97043240e-01 3.48913580e-01]]] [[[ 1.00846243e+00 -1.41780925e+00 1.30153048e+00 ... 1.93110955e+00 -7.81996727e-01 1.61673412e-01] [ 5.29755950e-01 7.14676857e-01 -4.08025868e-02 ... 1.48991555e-01 -1.86912403e-01 5.87061167e-01] [-3.66210610e-01 1.30860582e-01 4.80838001e-01 ... 1.22971094e+00 -1.18108571e+00 8.50877404e-01] ... [-3.74794394e-01 -1.87205553e+00 7.14673162e-01 ... 1.27837420e-01 2.39893627e+00 9.18621182e-01] [-1.71051353e-01 -5.97722292e-01 1.26086366e+00 ... 5.25841236e-01 6.84089541e-01 -1.75595903e+00] [ 2.92859316e-01 2.41771773e-01 -4.44055051e-01 ... -7.19199121e-01 1.36268783e+00 6.74368739e-01]] [[ 1.04639518e+00 5.15516102e-01 -1.19075620e+00 ... 7.85874009e-01 1.06587708e+00 3.41055840e-01] [ 8.85556817e-01 -1.34277022e+00 -5.88386357e-01 ... 2.47896850e-01 7.75573552e-01 3.57861400e-01] [ 6.31826460e-01 -1.78585935e+00 -2.64509588e-01 ... 3.94388199e-01 1.41375768e+00 -1.88789558e+00] ... [ 1.76160455e-01 1.20156384e+00 2.23628789e-01 ... -3.01387787e-01 8.73872936e-01 -2.32380360e-01] [ 5.02628863e-01 -2.05275393e+00 -7.65923321e-01 ... 3.44037227e-02 1.00321317e+00 2.44330096e+00] [-1.95185685e+00 2.24801481e-01 1.78037584e+00 ... -5.27947545e-01 -1.30020702e+00 1.50775743e+00]] [[-1.35202229e-01 -7.62783110e-01 -7.12948591e-02 ... -4.35822904e-01 6.49005055e-01 -3.83438230e-01] [-1.22254217e+00 1.33048213e+00 5.92481315e-01 ... 3.35010707e-01 -8.69733095e-02 -3.65217239e-01] [-1.77940816e-01 2.54375875e-01 -2.05790114e+00 ... -4.03067678e-01 1.64670572e-01 -3.50519195e-02] ... [-2.55746055e+00 8.16547036e-01 1.03272855e+00 ... -7.93400705e-01 -1.67575013e-02 1.10323477e+00] [-7.56246567e-01 -2.50432342e-02 -2.05651999e-01 ... 7.17272043e-01 -4.54772443e-01 5.71273386e-01] [-1.72609150e+00 -6.31954610e-01 1.98961332e-01 ... -3.23878467e-01 -3.56003165e-01 -5.22498906e-01]] ... [[-1.21616936e+00 3.08697194e-01 -2.54290283e-01 ... -2.06595445e+00 -6.79750144e-02 2.22996330e+00] [ 9.86555934e-01 7.73361385e-01 8.79905105e-01 ... -3.22870106e-01 1.89981628e-02 4.47109938e-01] [-1.87398100e+00 1.54760265e+00 1.52845904e-01 ... -3.18035603e-01 -5.40146112e-01 4.13900137e-01] ... [ 1.39691818e+00 -9.56852794e-01 9.17776227e-01 ... 2.10531548e-01 -6.48626864e-01 -1.95850790e-01] [-2.59725749e-01 -1.40004063e+00 7.88852721e-02 ... -4.77380753e-01 3.96747410e-01 3.19840837e+00] [ 8.00710022e-02 -1.37295437e+00 2.95746356e-01 ... -2.06772357e-01 -1.95395601e+00 -1.27306807e+00]] [[-3.83291364e-01 5.08342758e-02 3.88289839e-01 ... 7.27782965e-01 1.39699256e+00 -5.95642626e-01] [ 2.06879437e-01 -2.37406567e-01 1.39058065e+00 ... -3.46289389e-02 -5.90234339e-01 4.18593675e-01] [-1.44115782e+00 -5.59418388e-02 7.73092151e-01 ... 5.47911882e-01 4.84271646e-01 -6.00767016e-01] ... [ 9.37986076e-01 -2.41139278e-01 8.66693497e-01 ... 3.54001135e-01 -1.55272830e+00 1.16813493e+00] [ 2.44164541e-01 3.09627056e-01 -4.04609412e-01 ... -4.70153034e-01 -5.23191094e-01 6.12215936e-01] [-6.30719781e-01 -1.26632679e+00 -6.20323479e-01 ... -8.89818549e-01 1.82337070e+00 -1.14213407e+00]] [[-4.77219671e-01 1.02977514e+00 -1.70532316e-01 ... 7.68192112e-01 -9.85379755e-01 5.85739911e-01] [ 5.09214580e-01 2.62855381e-01 -7.07397535e-02 ... 6.95985794e-01 -7.11323500e-01 1.43336833e+00] [ 1.33856630e+00 -5.82237661e-01 8.96370530e-01 ... -4.18793589e-01 -3.86366099e-01 -7.85295784e-01] ... [-2.88753957e-01 1.53289568e+00 3.68304104e-01 ... 1.23273417e-01 -6.72924757e-01 -6.89404070e-01] [ 1.24980259e+00 -4.94941801e-01 -3.81793201e-01 ... -1.96193218e+00 -5.39217234e-01 4.83357191e-01] [ 7.42357075e-01 -4.49557155e-01 1.78069162e+00 ... 8.82529259e-01 6.23511195e-01 2.46406108e-01]]]]]; ov_res: [[[[[-3.07089210e-01 -2.59255856e-01 -3.25307101e-01 ... -6.27340525e-02 -2.07651639e+00 -1.50032389e+00] [-1.05801761e+00 -2.10936949e-01 -2.24286020e-01 ... -5.91633677e-01 -1.40935612e+00 -1.07956779e+00] [ 1.00771397e-01 -1.07344890e+00 1.17538738e+00 ... 2.36254156e-01 -4.60840493e-01 7.91387379e-01] ... [ 2.22856700e-01 -1.32433987e+00 1.18470919e+00 ... 6.89991951e-01 -3.26041555e+00 -1.71648145e-01] [ 1.56432819e+00 1.13245058e+00 -1.85560644e-01 ... 1.02490556e+00 -3.02369297e-01 1.04060876e+00] [-1.11943138e+00 -4.73787695e-01 3.94051343e-01 ... -1.39513087e+00 -2.43789625e+00 -1.54389903e-01]] [[-8.80797207e-01 -3.78568470e-01 7.65827596e-02 ... -5.06971657e-01 -2.12717557e+00 -1.69036090e+00] [-2.50188317e-02 -3.47657561e-01 -1.40789795e+00 ... 4.52377051e-01 -2.79145658e-01 -2.54271460e+00] [ 3.13974053e-01 -1.28796309e-01 9.34762836e-01 ... 4.50434327e-01 -2.04562068e+00 -1.10826278e+00] ... [ 1.55866861e+00 6.84336945e-02 1.01729226e+00 ... 6.30788267e-01 3.00555736e-01 -5.79506755e-01] [ 6.26463890e-01 2.99236894e-01 1.41883767e+00 ... 9.55852792e-02 1.18145859e+00 4.25306261e-01] [ 7.13976383e-01 2.63594657e-01 1.25476980e+00 ... 9.50965583e-01 1.30947113e-01 -4.80122387e-01]] [[ 1.33797288e+00 -1.29357791e+00 -1.56572506e-01 ... 5.70261441e-02 8.66988063e-01 8.74234661e-02] [ 4.93035644e-01 -6.66248500e-01 -1.85129178e+00 ... -2.18204275e-01 -7.30261803e-01 1.08436382e+00] [ 1.16192675e+00 1.36946468e-02 5.91841817e-01 ... -1.61810458e-01 -1.59215376e-01 5.68682611e-01] ... [-1.19287741e+00 1.71962607e+00 1.91695309e+00 ... 5.93535244e-01 -2.55025059e-01 -1.39006805e+00] [-1.32313633e+00 -1.79651931e-01 -1.49311638e+00 ... -6.55063450e-01 -1.13908082e-01 5.36647998e-02] [-1.09237647e+00 5.49039721e-01 -1.26509428e+00 ... -1.70860946e-01 1.27820343e-01 -9.58498597e-01]] ... [[-3.88798505e-01 5.75352237e-02 5.35132706e-01 ... 8.42800364e-03 -7.86468208e-01 -1.85882896e-01] [ 1.02221048e+00 1.42363143e+00 2.03923836e-01 ... -1.24354219e+00 3.86297911e-01 -6.80015013e-02] [-1.01715112e+00 -1.24937999e+00 7.29476511e-01 ... 3.03126454e-01 -8.39359090e-02 1.41998768e+00] ... [-7.58909523e-01 8.41613352e-01 -1.45671606e+00 ... -6.13141000e-01 -7.10210204e-01 1.94708812e+00] [-4.47124243e-01 -7.37316251e-01 7.77779460e-01 ... 1.54542476e-01 -1.36220622e+00 -8.93044770e-01] [-9.10378098e-01 8.39966536e-01 -5.71134746e-01 ... -5.99764705e-01 8.47688258e-01 -8.01169336e-01]] [[ 3.54428053e-01 2.85623342e-01 -2.27147162e-01 ... -4.01756436e-01 1.40306628e+00 -3.45867798e-02] [-7.04479933e-01 -1.30901501e-01 1.02647340e+00 ... 6.85083330e-01 1.94943726e-01 -2.12068275e-01] [-2.17203528e-01 -2.02264842e-02 8.62609029e-01 ... 1.51631045e+00 -8.62786472e-01 2.14786839e+00] ... [-1.31904995e+00 -1.89473942e-01 5.24290577e-02 ... -3.33434390e-03 3.49488229e-01 3.38432699e-01] [ 5.42281926e-01 -6.26772419e-02 1.94920671e+00 ... 1.24306190e+00 1.21978962e+00 -3.24082553e-01] [ 4.19474185e-01 -5.11646390e-01 -6.38116121e-01 ... -8.32220316e-01 1.51334941e+00 -8.83013844e-01]] [[ 1.03197336e+00 3.54437441e-01 -5.55997550e-01 ... 1.60481167e+00 -1.50063968e+00 -7.97105283e-02] [-1.55838028e-01 -7.42217183e-01 9.19467628e-01 ... -6.11750722e-01 -3.52384537e-01 -3.92852634e-01] [-5.36557257e-01 -4.87532467e-01 3.31239790e-01 ... 1.98494351e+00 -2.04354715e+00 4.42864358e-01] ... [ 7.07954764e-01 -8.93881023e-01 8.44539523e-01 ... 6.66418433e-01 -1.03522098e+00 6.69971883e-01] [ 3.41323614e-01 -1.14581513e+00 1.14650750e+00 ... -1.97078675e-01 -5.31013310e-01 1.74459374e+00] [-1.39424884e+00 -2.72944216e-02 -1.54244792e+00 ... -1.19588292e+00 1.29844499e+00 8.81706893e-01]]] [[[ 6.14342272e-01 -7.90160179e-01 3.69225629e-02 ... 1.34538639e+00 -9.03753713e-02 -5.09685993e-01] [ 1.34016812e+00 -6.84467375e-01 3.04762036e-01 ... 3.94533306e-01 -5.07582366e-01 -9.24267948e-01] [-2.97883719e-01 -3.33789855e-01 -8.20981205e-01 ... 5.40180266e-01 8.28088880e-01 -7.71517932e-01] ... [-2.65783727e-01 -2.25293469e+00 1.29125810e+00 ... 2.53237545e-01 -7.52569318e-01 -1.55328035e+00] [-7.15986550e-01 -1.00245988e+00 4.69650984e-01 ... 2.67260408e+00 1.41820383e+00 -1.16060781e+00] [ 2.60978532e+00 9.10865486e-01 2.15768181e-02 ... 5.81333697e-01 6.74460292e-01 1.02680862e+00]] [[-1.13203073e+00 -3.13753366e+00 7.09615648e-01 ... 1.09207880e+00 1.29046929e+00 -5.62950552e-01] [ 1.74328610e-02 1.39495838e+00 9.07585680e-01 ... -3.46136898e-01 1.15509617e+00 -7.06732631e-01] [ 1.14327681e+00 5.47937572e-01 -7.66185641e-01 ... -1.86408460e+00 1.55369270e+00 -6.74968362e-02] ... [-2.30001783e+00 9.60135832e-02 9.61013436e-01 ... 4.93192106e-01 2.20839715e+00 1.72441936e+00] [-9.68316793e-01 -1.30181178e-01 6.58837333e-02 ... 3.30394655e-01 -1.67448902e+00 4.37347472e-01] [ 6.60218418e-01 4.26195890e-01 1.30952036e+00 ... 1.09318900e+00 -2.34140188e-01 -7.69219220e-01]] [[-4.22532380e-01 8.91310573e-01 -3.87282372e-01 ... -9.53663066e-02 1.49098718e+00 -7.77668059e-01] [-5.81896484e-01 2.17765167e-01 1.82401031e-01 ... 6.70357719e-02 1.79504633e-01 8.37711215e-01] [-2.36377209e-01 1.61137128e+00 -1.28079414e+00 ... 8.87712613e-02 6.63395286e-01 -1.03629994e+00] ... [ 1.33717275e+00 -2.90598273e-02 6.27681315e-02 ... 1.91365445e+00 4.84088778e-01 1.23684156e+00] [-9.25348997e-01 1.48685002e+00 -1.43975413e+00 ... -2.38197637e+00 2.38032937e-01 -1.40559220e+00] [ 1.77924737e-01 -5.97812533e-01 7.52186537e-01 ... 3.10949117e-01 -6.93197250e-02 1.31370378e+00]] ... [[-7.18110085e-01 5.83360910e-01 5.39371729e-01 ... -4.55409557e-01 -6.23903334e-01 -4.95546728e-01] [-6.00399494e-01 -2.48648107e-01 7.72891939e-01 ... 8.00267935e-01 3.14243525e-01 6.98991299e-01] [ 3.72478515e-01 -8.05463567e-02 1.36527157e+00 ... 1.17763245e+00 1.48518121e+00 -1.03843606e+00] ... [ 8.73792946e-01 -1.34211791e+00 1.28428981e-01 ... 2.15727180e-01 4.52071369e-01 -3.86661440e-01] [-1.57750890e-01 -5.65775335e-01 1.30937564e+00 ... -2.97072470e-01 -2.59541959e-01 2.94948310e-01] [-2.46647522e-01 1.11798382e+00 -8.94181430e-01 ... -1.80659914e+00 -1.68048784e-01 -3.84916186e-01]] [[ 4.10791218e-01 3.33364725e-01 1.38922882e+00 ... -8.01067531e-01 -1.63283065e-01 2.20874429e-01] [ 2.98782513e-02 -1.04094014e-01 -1.48569155e+00 ... -2.60088176e-01 -7.42909253e-01 5.75805269e-02] [ 5.88591211e-02 -7.43696690e-01 -1.60141802e+00 ... 1.03367484e+00 1.72180966e-01 -6.53886378e-01] ... [-1.75081062e+00 -1.10514998e-01 -1.49179471e+00 ... -1.62984347e+00 2.74204254e-01 -2.47171551e-01] [ 2.65217751e-01 2.41102517e-01 3.54308903e-01 ... -1.41512549e+00 -1.87014788e-01 3.11306745e-01] [-2.04616070e+00 1.04133236e+00 9.50563967e-01 ... 1.06937416e-01 8.48953664e-01 -7.48532891e-01]] [[-1.30772954e-02 -2.18479371e+00 -3.38213325e-01 ... 9.87423882e-02 -1.14438379e+00 -1.87204432e+00] [-1.57461330e-01 1.87450433e+00 -7.49437928e-01 ... -1.13120747e+00 -5.81278801e-01 4.08862948e-01] [-5.96267223e-01 -1.22538018e+00 -2.20856810e+00 ... 1.93335623e-01 -1.09734595e+00 1.44835854e+00] ... [-3.18222463e-01 8.65045786e-01 -1.35284507e+00 ... 1.49190688e+00 -1.11212206e+00 2.52368659e-01] [ 6.32842854e-02 6.22862339e-01 -8.35639387e-02 ... 3.00482154e-01 2.48622918e+00 6.29610360e-01] [ 2.62637794e-01 6.44985020e-01 -1.24191451e+00 ... -1.08372331e+00 6.56815469e-01 -1.08098909e-01]]] [[[ 2.16169834e-01 2.26476955e+00 1.18063879e+00 ... 5.35811409e-02 -7.42489457e-01 -8.84120286e-01] [ 3.03075790e-01 -1.11201632e+00 -4.33679342e-01 ... -6.74808919e-01 -7.78584599e-01 -1.01792181e+00] [-7.04737306e-01 -1.90597069e+00 -2.16325372e-01 ... -6.25697434e-01 6.96708143e-01 -1.21898496e+00] ... [-1.50301147e+00 6.36289895e-01 7.16120541e-01 ... -2.12028235e-01 -2.11135298e-01 5.24208322e-02] [ 1.57545710e+00 -1.26549351e+00 -2.41472626e+00 ... -3.45966667e-01 3.79082084e-01 1.29878259e+00] [-1.78695738e-01 3.60263050e-01 -3.54386300e-01 ... -8.69291604e-01 2.69175738e-01 1.41555861e-01]] [[ 1.26624852e-01 7.86757886e-01 -1.16891503e+00 ... -1.26065218e+00 -6.12585187e-01 2.11869931e+00] [ 7.35693812e-01 -2.93341279e-01 1.83763593e-01 ... -7.30082393e-01 -2.26377085e-01 -1.22208989e+00] [ 2.59889096e-01 1.94255024e-01 5.83401144e-01 ... -7.17896998e-01 -1.25490057e+00 -3.41691792e-01] ... [ 5.24346411e-01 -5.51526070e-01 1.10456216e+00 ... 3.91778171e-01 6.63563490e-01 1.09472561e+00] [ 6.79538473e-02 -1.93612123e+00 -1.69351983e+00 ... 1.05899739e+00 -2.50004768e-01 -1.15368092e+00] [ 1.65737063e-01 7.11474717e-01 1.86572468e+00 ... -9.85017598e-01 6.61006927e-01 -3.69635791e-01]] [[-1.03852081e+00 4.58317310e-01 1.05165434e+00 ... -1.57275379e+00 1.38198590e+00 8.81005466e-01] [-7.83612430e-01 9.93611932e-01 -5.97101450e-01 ... -2.12141776e+00 1.27819431e+00 -1.58994389e+00] [-4.14644659e-01 -1.03009129e+00 -6.21070504e-01 ... -1.00290573e+00 -1.21725011e+00 1.22332978e+00] ... [ 1.80288047e-01 7.57567406e-01 2.64259040e-01 ... -2.18195379e-01 -6.22252882e-01 -4.64130640e-01] [ 6.13133609e-01 -4.46151681e-02 9.76211578e-02 ... 1.38383436e+00 1.33077586e+00 6.79614127e-01] [ 7.86406636e-01 5.75258434e-02 7.84490943e-01 ... 1.03612113e+00 6.00779057e-01 1.38604856e+00]] ... [[ 3.06494951e-01 3.21100593e-01 4.57839936e-01 ... 3.38423222e-01 -2.69340903e-01 1.35718393e+00] [ 5.50400138e-01 -3.92875671e-01 -1.57962751e+00 ... 7.93231547e-01 4.08427209e-01 -1.10533369e+00] [-9.68855739e-01 -1.99385881e+00 3.75534505e-01 ... 5.76187968e-02 -8.00004482e-01 2.20381045e+00] ... [-1.33891702e+00 -2.42205821e-02 -4.72158104e-01 ... -8.01054657e-01 -7.56987557e-02 3.60011220e-01] [ 2.13035011e+00 1.87343121e-01 -2.97028840e-01 ... 9.33709294e-02 -2.09562230e+00 -1.99719048e+00] [ 8.59795332e-01 -8.35254371e-01 5.94946146e-01 ... -3.11078072e-01 4.28419225e-02 7.50981987e-01]] [[-1.03151858e+00 -3.67400110e-01 3.03122431e-01 ... 7.57461548e-01 -2.31130749e-01 7.55015910e-01] [-1.94505274e-01 -1.34229136e+00 -1.03764236e+00 ... 1.21137118e+00 3.54179889e-01 -4.22159255e-01] [-1.68125856e+00 -7.32558191e-01 -1.07338572e+00 ... -1.00592650e-01 4.13962752e-01 1.02371676e-02] ... [-1.14448106e+00 -1.90112817e+00 4.06904310e-01 ... 8.40153337e-01 -1.38661063e+00 6.83323145e-01] [-1.39001644e+00 1.61432281e-01 1.06997466e+00 ... 1.55165958e+00 6.86119974e-01 -1.25191057e+00] [-9.78643894e-01 -8.01794171e-01 5.93939066e-01 ... -9.93308425e-01 1.17326014e-01 3.06221294e+00]] [[-1.03519344e+00 6.91896915e-01 -1.47110379e+00 ... -6.52702987e-01 -3.90212268e-01 -7.34064102e-01] [-8.26434884e-03 1.06159997e+00 -1.23869574e+00 ... 1.96268216e-01 3.99892628e-01 -1.06149507e+00] [ 4.09318060e-01 -5.13467602e-02 2.31242537e-01 ... 5.88921070e-01 -4.14718449e-01 -4.44743991e-01] ... [-6.56484783e-01 3.39124799e-01 6.26471117e-02 ... -1.80057585e-01 4.74460334e-01 -5.93887568e-01] [-1.72960669e-01 -2.25353599e+00 -5.95147371e-01 ... 1.39883256e+00 8.49710964e-03 6.88334286e-01] [ 2.89517331e+00 6.32841766e-01 1.23773895e-01 ... 1.20198584e+00 -1.38265669e-01 -2.78564036e-01]]] [[[-1.94187924e-01 7.87996724e-02 -1.04800785e+00 ... -1.01826656e+00 -5.89395463e-01 -4.23877761e-02] [-3.87525231e-01 1.10976660e+00 -2.89689243e-01 ... -7.28033632e-02 1.59552383e+00 1.41545892e+00] [ 8.10753405e-01 -2.55458832e+00 -1.44900233e-01 ... 5.00694633e-01 -3.03946823e-01 7.39019960e-02] ... [-1.28957534e+00 1.37821198e-01 2.01149285e-01 ... 5.44987842e-02 3.27425361e-01 -3.52743983e-01] [ 7.32981563e-01 -8.37784469e-01 -3.27751249e-01 ... 5.95416427e-02 1.45530820e+00 5.08156300e-01] [ 1.23301291e+00 7.97252119e-01 -2.30779099e+00 ... 3.85218799e-01 -5.52950144e-01 4.90996957e-01]] [[ 5.87267160e-01 8.67984712e-01 -3.48715663e-01 ... 6.40367746e-01 1.01852500e+00 -5.05150318e-01] [ 2.39580646e-01 -1.24459541e+00 2.51615405e-01 ... -1.26302850e+00 9.79609430e-01 -1.72365665e-01] [-1.06356537e+00 -1.32657957e+00 6.13138199e-01 ... 4.86990124e-01 -3.70532632e-01 -5.01396835e-01] ... [-9.12976325e-01 5.70455730e-01 7.58564174e-02 ... -4.93275106e-01 2.61264622e-01 2.22615197e-01] [ 1.30314279e+00 1.32674265e+00 -3.80688757e-01 ... -7.16417074e-01 6.90661013e-01 7.19216093e-02] [-2.13758564e+00 2.35446644e+00 2.28691530e+00 ... 2.42516607e-01 4.57387269e-01 -4.92747091e-02]] [[ 2.64215082e-01 -8.06818381e-02 9.71964836e-01 ... 7.45815575e-01 8.12721327e-02 2.24670961e-01] [ 1.23136032e+00 -1.12454879e+00 1.02714908e+00 ... -8.04973722e-01 -1.46852338e+00 9.41173017e-01] [ 1.60776287e-01 1.44372925e-01 -1.41574061e+00 ... -1.25442594e-01 1.10803092e+00 1.40847957e+00] ... [-1.58900714e+00 -1.27577829e+00 -1.52754977e-01 ... 7.00756073e-01 -1.34147418e+00 1.09217012e+00] [ 1.62386358e+00 1.92482173e+00 -1.37056506e+00 ... -9.87847388e-01 5.62870264e-01 -2.32138455e-01] [-5.75781465e-01 -5.88853836e-01 -7.69538641e-01 ... -5.76304436e-01 6.51819766e-01 -1.15163553e+00]] ... [[ 2.06861198e-01 -8.53369772e-01 9.47082579e-01 ... -7.99268603e-01 3.92340213e-01 4.26192075e-01] [ 1.17506766e+00 3.53620529e-01 -1.37327290e+00 ... -2.49747783e-01 6.72252178e-01 -1.17059448e-03] [-2.16435480e+00 -6.47933662e-01 1.39289904e+00 ... -1.87661970e+00 -1.71044585e-03 1.66972411e+00] ... [-2.93248683e-01 9.20133173e-01 -1.10964310e+00 ... -8.64744306e-01 1.79507688e-01 -8.80966842e-01] [-7.06103563e-01 7.79597640e-01 -5.00137769e-02 ... 1.20967150e+00 -1.37975967e+00 1.31014645e+00] [-1.71437070e-01 -1.41812432e+00 9.35252726e-01 ... 9.85872626e-01 -7.29432523e-01 4.88042831e-01]] [[-1.64186612e-01 -4.62950557e-01 -1.62264025e+00 ... -5.75071648e-02 2.26202279e-01 -3.34280245e-02] [-3.46100420e-01 1.27181196e+00 8.56560588e-01 ... -1.58488020e-01 8.79266977e-01 -9.48230028e-01] [-6.48963302e-02 -1.96463370e+00 1.75036848e+00 ... 1.03988677e-01 -4.66866225e-01 -1.02253258e+00] ... [ 1.60774338e+00 -4.95052546e-01 1.31400406e-01 ... -5.64466417e-01 4.86860007e-01 -4.77397740e-01] [ 3.09067577e-01 -6.31840467e-01 -2.34469414e-01 ... -3.03882337e+00 -3.83757472e-01 4.05072272e-01] [-5.15520632e-01 -3.22525859e-01 -5.01965046e-01 ... -1.77492604e-01 -1.11837260e-01 -7.13052392e-01]] [[ 1.20500565e+00 1.48272961e-01 7.93299258e-01 ... 5.76716959e-01 -1.45907372e-01 -5.73347628e-01] [-1.16881764e+00 4.86874819e-01 1.24344659e+00 ... 3.67979199e-01 -1.67779291e+00 -1.43943655e+00] [ 7.81879425e-01 -7.25415722e-02 -2.12908983e+00 ... 1.73501003e+00 -1.11764634e+00 4.38317955e-01] ... [ 3.65567863e-01 -4.25081789e-01 -1.18554711e+00 ... -1.23526120e+00 2.25347733e+00 1.63791680e+00] [ 1.39628971e+00 2.70252228e-01 -5.20383567e-02 ... -1.35575676e+00 -1.97801030e+00 -1.35551047e+00] [ 2.22551394e+00 -1.43935001e+00 -2.49376082e+00 ... -9.17003334e-01 9.25242007e-01 1.12847614e+00]]] [[[ 3.25127155e-01 -1.56809282e+00 -1.31062078e+00 ... -2.06098771e+00 -1.68006018e-01 -1.72482705e+00] [ 2.13619187e-01 1.88689542e+00 1.79324180e-01 ... -1.07927628e-01 -1.11488365e-01 9.43831325e-01] [-1.09785542e-01 1.54420614e-01 -7.58505285e-01 ... -6.68677762e-02 3.21692109e-01 1.57053399e+00] ... [ 1.50517166e+00 7.90704608e-01 6.43151477e-02 ... 5.18887401e-01 6.22952938e-01 -3.24053437e-01] [-4.73257542e-01 -4.95763719e-01 5.25954723e-01 ... 6.48852944e-01 7.61326611e-01 8.94579768e-01] [-7.30752110e-01 1.72862545e-01 -7.16149390e-01 ... -9.32540178e-01 5.76503463e-02 1.95142949e+00]] [[-7.62983680e-01 -1.51899695e+00 1.14069112e-01 ... 1.58741608e-01 9.87820864e-01 -5.62884390e-01] [-4.83251214e-01 -1.00975685e-01 -1.88830388e+00 ... -2.18700004e+00 2.74647951e-01 -5.70335567e-01] [-3.97614002e-01 3.39628428e-01 4.97590393e-01 ... -6.96297884e-01 -7.03629181e-02 1.36156297e+00] ... [-7.84759104e-01 -7.01141179e-01 1.31220150e+00 ... -3.24778914e-01 5.27744949e-01 -2.41152215e+00] [-2.40514922e+00 8.53927135e-01 1.14205170e+00 ... 1.02370715e+00 1.65422745e-02 3.98853272e-01] [ 1.77825367e+00 3.10576528e-01 5.49035788e-01 ... 7.80740082e-01 -1.49088070e-01 2.91787654e-01]] [[ 1.03068523e-01 -5.19862592e-01 -7.58745134e-01 ... 1.14749789e+00 -1.21443212e+00 -6.41532838e-01] [ 1.32244527e+00 1.36133456e+00 1.06848347e+00 ... 2.85396487e-01 1.09119415e+00 2.75922298e-01] [ 4.29248393e-01 -7.17998266e-01 6.55559719e-01 ... -3.04070413e-01 1.06817782e+00 -3.62584084e-01] ... [ 3.79418164e-01 -4.21558261e-01 -3.75427336e-01 ... 6.23062886e-02 2.70374703e+00 2.30436072e-01] [-1.68596554e+00 2.63222647e+00 -9.86951172e-01 ... 5.72151423e-01 -8.75621736e-01 1.32425094e+00] [ 8.08815837e-01 -4.49048936e-01 -5.89706779e-01 ... 2.36586642e+00 3.37808669e-01 7.08149731e-01]] ... [[-5.18545359e-02 1.24582469e+00 1.93408632e+00 ... 1.80315182e-01 5.90915561e-01 -1.17646400e-02] [-6.34768486e-01 1.44755816e+00 -7.52793610e-01 ... 5.71796954e-01 3.80541146e-01 -2.24619055e+00] [ 6.23306692e-01 -8.39356482e-01 8.60100389e-02 ... -1.08431458e+00 -7.62691796e-02 -9.51440811e-01] ... [-1.53354514e+00 -9.82103467e-01 1.05841219e+00 ... -1.73872739e-01 -3.62487316e-01 4.38041717e-01] [-9.04893041e-01 -7.91228376e-03 -6.45420134e-01 ... 1.37332976e+00 2.03224444e+00 -6.99846625e-01] [ 6.04261411e-03 -6.26958430e-01 -7.29159713e-01 ... 1.45525897e+00 8.13774168e-01 8.62434149e-01]] [[-1.54123127e-01 2.16328546e-01 -8.78648996e-01 ... -9.18302238e-01 -7.78004408e-01 -6.95536911e-01] [ 1.56554115e+00 1.87002218e+00 -1.72093821e+00 ... -9.66672838e-01 9.34756696e-02 1.71475267e+00] [-7.81486809e-01 -5.65168083e-01 -9.34707940e-01 ... 1.38358450e+00 3.09014414e-02 1.41336417e+00] ... [ 3.57269108e-01 3.11002374e-01 -5.50053239e-01 ... -1.29401672e+00 -8.52119029e-01 9.10298049e-01] [-1.24401343e+00 -5.15753865e-01 1.06414270e+00 ... 9.58260000e-01 1.07919121e+00 -5.37384935e-02] [-2.69216970e-02 1.30893096e-01 3.76492321e-01 ... -7.80666649e-01 3.21257025e-01 -1.46672809e+00]] [[-8.78930628e-01 -1.51972592e+00 -1.46908030e-01 ... -6.84262693e-01 -1.35216546e+00 1.39121461e+00] [-9.14657712e-01 1.48927319e+00 -5.13245501e-02 ... -5.77352405e-01 -9.81411099e-01 7.33975828e-01] [-1.05802476e+00 4.92528886e-01 -1.63895214e+00 ... 9.39034164e-01 2.25024242e-02 1.12277186e+00] ... [-1.20361888e+00 1.12509489e+00 9.04194891e-01 ... -5.01724005e-01 7.74510801e-02 1.65916181e+00] [ 5.11942327e-01 -1.69469726e+00 -6.68123424e-01 ... -7.14063644e-01 5.21539897e-02 3.35621536e-01] [ 1.09153759e+00 -2.23800451e-01 -9.35014859e-02 ... -8.64334881e-01 4.43409503e-01 -7.00653851e-01]]] [[[-2.85806894e-01 2.70972490e-01 -1.41133392e+00 ... 1.95924826e-02 -4.17528659e-01 1.30792642e+00] [-5.27298570e-01 7.80916035e-01 1.70297101e-01 ... -1.33129144e-02 2.80344244e-02 -3.21931839e-01] [-1.31231797e+00 1.29239142e-01 -2.02151752e+00 ... 2.57549822e-01 -1.50075471e+00 -6.32612765e-01] ... [ 5.47004759e-01 -8.52625743e-02 -9.68376279e-01 ... -1.39347762e-01 -2.27595478e-01 5.73728442e-01] [ 4.83902842e-01 8.70289922e-01 8.93242240e-01 ... 8.57504457e-02 5.74317634e-01 7.96005785e-01] [ 1.64091432e+00 1.35271478e+00 1.97816804e-01 ... 1.14076364e+00 1.10757780e+00 4.94132817e-01]] [[-1.85793579e-01 6.25943542e-01 -3.13546928e-03 ... -2.26279640e+00 -9.49372649e-01 -1.81040728e+00] [ 1.23196132e-01 1.78012729e+00 4.29632217e-01 ... 3.07652336e-02 -7.15600014e-01 6.64282084e-01] [-4.56588835e-01 -6.75247550e-01 1.55090004e-01 ... 8.72037828e-01 -3.65632892e-01 -1.32462096e+00] ... [-2.16331203e-02 -1.05281234e+00 -2.11440429e-01 ... -1.62898099e+00 -1.51078665e+00 1.83041108e+00] [ 3.32316190e-01 -3.73264283e-01 -1.31020701e+00 ... 1.29378283e+00 6.44446909e-02 1.28149295e+00] [-7.95752525e-01 -1.04108679e+00 -7.11993515e-01 ... 4.81317103e-01 2.57886201e-01 -2.90182769e-01]] [[ 9.73249495e-01 9.43157315e-01 -1.23535967e+00 ... 8.75111938e-01 -9.71379206e-02 6.20199621e-01] [-8.07805955e-01 -4.98328626e-01 5.33791900e-01 ... -1.23969424e+00 1.08907104e+00 -8.90797675e-02] [ 1.25732255e+00 -2.28432512e+00 -2.44521320e-01 ... -6.40085101e-01 -1.36021912e-01 1.83263689e-01] ... [ 7.43415058e-01 -3.68486196e-01 -7.49667764e-01 ... -5.09314179e-01 1.62529421e+00 4.66538101e-01] [ 7.00279534e-01 -1.84451187e+00 1.04974067e+00 ... -9.68194246e-01 -1.65232801e+00 1.87042141e+00] [ 1.61651284e-01 2.14477181e+00 2.40762532e-01 ... 8.36346209e-01 -2.68840492e-02 -1.54083669e-01]] ... [[-4.29560333e-01 -3.68148535e-01 1.89481318e+00 ... -4.80343759e-01 1.43422425e+00 -8.74848187e-01] [ 3.00808489e-01 1.00488806e+00 3.45850199e-01 ... -8.28907490e-01 4.66326505e-01 1.21914136e+00] [ 1.10068250e+00 -4.57291633e-01 -1.68726790e+00 ... -9.99216974e-01 -2.09790397e+00 -4.53833699e-01] ... [-2.61244583e+00 -1.07362795e+00 -5.94396936e-03 ... -6.95061564e-01 -1.35682255e-01 1.05915889e-01] [-1.43214059e+00 5.52656651e-01 6.27997100e-01 ... -1.01537895e+00 -1.05417478e+00 1.15793169e+00] [-5.76317072e-01 -1.37438238e+00 5.77828109e-01 ... 5.03944099e-01 1.85586846e+00 -7.23448575e-01]] [[ 7.19304264e-01 1.07078087e+00 1.30590105e+00 ... 7.23666847e-01 1.60280216e+00 -1.57662332e+00] [-1.18609011e+00 1.73160229e-02 9.09145892e-01 ... -1.40821230e+00 -6.27368152e-01 1.52028501e+00] [ 1.36721444e+00 3.05465430e-01 -1.55816644e-01 ... 1.59946382e+00 1.05798483e+00 -7.02521622e-01] ... [-1.09224415e+00 2.85137922e-01 -1.51121449e+00 ... 9.58658159e-01 -1.47863117e-03 2.45745495e-01] [ 7.58737564e-01 1.50843000e+00 -6.85238600e-01 ... -2.65179753e-01 3.00349325e-01 -1.18739855e+00] [-1.07341909e+00 8.33113849e-01 5.13739467e-01 ... -1.57814801e+00 6.00713670e-01 1.70695865e+00]] [[-1.01966679e+00 6.44463420e-01 -8.71126354e-01 ... 1.20108163e+00 -1.97976029e+00 -5.23253977e-01] [ 3.48285884e-01 -1.95543215e-01 -4.30573195e-01 ... 7.06603408e-01 -7.31959522e-01 2.32115775e-01] [ 6.55915141e-02 -4.41934735e-01 -5.11836886e-01 ... 1.41926920e+00 3.03533882e-01 5.21254122e-01] ... [-4.01292980e-01 5.84229052e-01 2.66839474e-01 ... 6.55836463e-01 1.34575868e+00 -1.02458179e+00] [ 1.32465589e+00 1.00830221e+00 8.01625073e-01 ... 8.26742947e-01 5.49180090e-01 -5.84791780e-01] [ 1.23591888e+00 -4.21519578e-01 -4.29836214e-01 ... 1.20894206e+00 -1.93832479e-02 3.31969887e-01]]]] [[[[-2.26461816e+00 -4.98859644e-01 1.38683963e+00 ... 2.10951924e+00 -6.43102348e-01 -4.06477243e-01] [ 1.41292942e+00 1.22545946e+00 7.58813500e-01 ... 1.58221155e-01 8.07213664e-01 -1.71239579e+00] [ 4.51040685e-01 -5.22726655e-01 -2.77020717e+00 ... 7.58058131e-02 -2.02891088e+00 -2.02824563e-01] ... [-4.86112326e-01 -1.07330287e+00 4.64393804e-03 ... -4.61854413e-02 -3.16916972e-01 -4.05595899e-01] [-1.57097971e+00 3.12157989e-01 2.51703292e-01 ... -6.91173434e-01 -7.85528958e-01 7.12181985e-01] [ 1.66050625e+00 -8.05028200e-01 -2.94575357e+00 ... 1.28046870e-01 1.33105481e+00 -6.92257643e-01]] [[-3.43030721e-01 -1.74506474e+00 -1.97069615e-01 ... -2.43217841e-01 -6.03435099e-01 -1.36740410e+00] [ 1.50868058e+00 3.72746617e-01 1.39333367e+00 ... -1.25767863e+00 -1.41556191e+00 -7.23881871e-02] [ 2.97525078e-01 -1.21184722e-01 2.61142671e-01 ... -4.56218958e-01 -1.07029565e-01 -2.21250147e-01] ... [ 6.99339688e-01 1.56324661e+00 9.46232319e-01 ... -1.04486752e+00 -4.56613839e-01 -1.31496561e+00] [-1.54481971e+00 6.98048532e-01 -5.49602270e-01 ... -7.08848059e-01 -1.30294010e-01 2.35150039e-01] [-2.61261106e-01 2.76021659e-01 5.95300570e-02 ... -6.63691461e-01 -3.01237583e-01 1.36899328e+00]] [[ 6.79937422e-01 6.54273748e-01 5.13639092e-01 ... -1.08258390e+00 -9.33465600e-01 -1.67187667e+00] [-4.42686945e-01 7.32329041e-02 -1.13674486e+00 ... -7.13967919e-01 1.18177958e-01 -5.48614681e-01] [-6.07066333e-01 -1.04723871e+00 -1.59817207e+00 ... -1.08133423e+00 8.19471657e-01 -2.12092325e-01] ... [-1.05492957e-01 4.66310114e-01 -1.33241653e+00 ... -1.70139742e+00 7.37490207e-02 -1.65636241e+00] [-1.79353952e+00 -8.20924938e-02 5.10190487e-01 ... 1.40174949e+00 7.61266589e-01 -7.15419799e-02] [-2.59610987e+00 -1.80921808e-01 -2.12367868e+00 ... -8.89535174e-02 -3.43532383e-01 4.14516419e-01]] ... [[-4.54911679e-01 2.18613788e-01 4.99420196e-01 ... 3.10468376e-01 -5.63406795e-02 -1.01940103e-01] [-3.17789167e-02 5.13383567e-01 1.60995471e+00 ... 7.91629910e-01 -7.86305070e-02 1.94670939e+00] [-1.71775848e-01 -7.00185537e-01 -8.41852069e-01 ... -4.69320178e-01 -1.56067431e+00 1.46645939e+00] ... [-1.06135082e+00 -1.40493894e+00 2.07620001e+00 ... -1.82963908e+00 -1.26482412e-01 -1.74265355e-01] [ 5.47098637e-01 -1.02230966e+00 3.70029986e-01 ... -2.33562660e+00 7.68483281e-01 1.76295880e-02] [ 7.69930333e-02 -8.35147917e-01 1.81787765e+00 ... 8.69250119e-01 -8.42036724e-01 8.90697062e-01]] [[ 2.98137379e+00 1.25800514e+00 2.27603629e-01 ... -4.24625546e-01 4.88301486e-01 1.13496220e+00] [-1.57070625e+00 1.16765046e+00 -2.25933403e-01 ... 7.38183379e-01 -2.27416307e-02 5.87869948e-03] [ 2.37209052e-01 -1.65968597e-01 -9.81477022e-01 ... 1.05561960e+00 -2.00975105e-01 -6.56832814e-01] ... [-1.35484004e+00 6.23260319e-01 -8.56900513e-01 ... -1.81062424e+00 -1.29580915e-01 1.76986063e+00] [ 4.05821294e-01 -7.78534472e-01 1.61070454e+00 ... 7.72242919e-02 -4.14763957e-01 1.63034666e+00] [-4.53116208e-01 1.38366550e-01 -1.44666541e+00 ... 1.47778738e+00 -4.78931367e-01 -3.70705217e-01]] [[ 4.80687618e-01 -6.33005083e-01 -5.76307356e-01 ... -4.61133540e-01 1.45733774e+00 1.65351704e-01] [-3.54993165e-01 1.56125379e+00 -3.36059064e-01 ... 1.58490539e+00 -2.04988813e+00 -7.07852066e-01] [ 4.44666952e-01 6.05080187e-01 -2.40600348e-01 ... -1.15272355e+00 -9.85568464e-01 1.04829133e-01] ... [ 4.51786220e-01 -1.99978769e+00 -1.37477612e+00 ... 7.83582151e-01 1.55149436e+00 2.81175852e-01] [-4.16349083e-01 -5.54124713e-01 -1.80451810e+00 ... 2.76838094e-01 -6.35530889e-01 -9.93538082e-01] [-1.13007379e+00 -6.84741795e-01 -1.05791700e+00 ... 1.81487572e+00 -1.43596268e+00 -5.55937886e-01]]] [[[ 6.28199160e-01 1.18428659e+00 -5.40464520e-01 ... -9.57243979e-01 -3.32123429e-01 -1.25900304e+00] [-2.30336761e+00 2.66481090e+00 -7.58509755e-01 ... -3.46662030e-02 2.92909443e-01 -6.13670349e-01] [ 1.56465366e-01 1.69100118e+00 1.24803591e+00 ... -8.86754096e-01 6.34784937e-01 -2.29011744e-01] ... [ 2.14007795e-02 -1.40894222e+00 -1.52404383e-01 ... 8.60494494e-01 -2.71790385e-01 1.37853456e+00] [ 1.82273053e-02 -7.69238114e-01 -9.77354944e-02 ... -6.01464391e-01 3.14388901e-01 2.16505861e+00] [ 1.45046222e+00 -9.72509384e-01 2.28326753e-01 ... -5.57606667e-02 -1.28352320e+00 4.99006748e-01]] [[-9.13788915e-01 -8.06979656e-01 -1.18056810e+00 ... 1.51800919e+00 -2.31668666e-01 1.11112189e+00] [ 4.67270494e-01 1.30288333e-01 1.16478181e+00 ... 5.15047073e-01 1.25872374e+00 -4.30796355e-01] [ 2.58662134e-01 4.83750612e-01 1.68834090e+00 ... 5.54284036e-01 -6.25094950e-01 6.46454632e-01] ... [-1.47432268e+00 -9.67696428e-01 -5.75186253e-01 ... -8.65593791e-01 -7.76904762e-01 -2.80732960e-01] [ 1.38157618e+00 -1.46810877e+00 1.06067955e+00 ... 7.76043653e-01 -2.71077424e-01 6.84550643e-01] [ 6.19492769e-01 5.07582128e-01 7.48891592e-01 ... 3.95492166e-01 1.28493941e+00 -5.54428339e-01]] [[-1.96191895e+00 1.52300382e+00 -7.17518568e-01 ... 5.79010844e-01 -7.82494783e-01 1.15530968e+00] [ 2.10451916e-01 -5.08125186e-01 -8.72141421e-01 ... -1.24040759e+00 -2.90331483e-01 -7.54222870e-01] [ 3.24243724e-01 -1.13477528e+00 8.43723491e-02 ... -2.15977287e+00 -1.28808665e+00 5.48189342e-01] ... [-2.22293288e-03 1.10629213e+00 -1.16571188e-01 ... -4.07227725e-01 5.42013705e-01 6.21292591e-01] [ 4.51136261e-01 -1.48531592e+00 -1.02213037e+00 ... 6.87584877e-02 1.00897086e+00 4.88942981e-01] [ 1.88005224e-01 6.94038451e-01 2.87605256e-01 ... 1.76494583e-01 8.33088577e-01 6.77953362e-01]] ... [[-1.78966308e+00 -1.30264854e+00 -5.42035580e-01 ... 1.03330719e+00 -4.41020697e-01 -6.09557748e-01] [ 6.32737458e-01 3.15965354e-01 7.75925517e-01 ... 5.02963960e-01 -8.49694729e-01 2.44432226e-01] [ 9.04874384e-01 4.15542573e-01 -5.26423454e-01 ... 7.52917528e-01 -1.92961514e-01 -6.24543965e-01] ... [ 1.18860137e+00 6.93145454e-01 4.46155429e-01 ... -6.46345437e-01 -3.32688019e-02 3.09240729e-01] [ 9.26551282e-01 5.46570599e-01 -1.35047829e+00 ... -6.05017066e-01 1.81415403e+00 -3.91591132e-01] [-1.09175050e+00 5.61214983e-01 4.84490812e-01 ... -1.28369823e-01 -1.04301631e+00 1.17019141e+00]] [[-6.28620744e-01 8.83654177e-01 2.83294111e-01 ... -9.89461958e-01 9.02159870e-01 -3.11805964e-01] [-1.39776874e+00 -2.21101895e-01 -6.05310321e-01 ... 6.30345523e-01 1.11395657e+00 -9.72981930e-01] [-3.82553965e-01 -6.89670891e-02 -2.05593634e+00 ... -5.96037984e-01 7.83040047e-01 9.32283878e-01] ... [-1.36310613e+00 -9.84076262e-01 -3.63380253e-01 ... 7.85581112e-01 8.99026453e-01 -1.04592144e+00] [-2.55789340e-01 1.83868825e-01 -8.85054693e-02 ... 1.14099324e+00 -7.64277458e-01 9.59050179e-01] [ 7.68475890e-01 3.95517141e-01 1.06196351e-01 ... -4.83244538e-01 3.76874804e-01 2.13477588e+00]] [[-1.04356647e+00 4.32402104e-01 1.20687807e+00 ... -2.14355493e+00 -2.13542080e+00 -3.18211168e-02] [-2.67041147e-01 1.12572145e+00 -8.87403011e-01 ... 7.87444711e-01 1.62574744e+00 -3.17423820e-01] [ 1.89317659e-01 1.61341473e-01 6.40219510e-01 ... 1.79025486e-01 1.80677906e-01 1.28078258e+00] ... [ 2.20068884e+00 -1.10201395e+00 1.20794880e+00 ... -1.31661355e+00 5.67619324e-01 4.94710177e-01] [-4.74633992e-01 1.67543304e+00 9.36336637e-01 ... -9.34798345e-02 3.49293277e-02 -6.37576759e-01] [ 9.12651658e-01 -8.36500943e-01 -4.18616861e-01 ... 4.10356164e-01 7.52958357e-01 -2.26415014e+00]]] [[[ 1.82225275e+00 -1.38780221e-01 1.10253727e+00 ... -2.58653257e-02 2.27724388e-01 3.08622271e-01] [-9.52742398e-01 1.02073598e+00 -1.00338614e+00 ... 9.07983065e-01 2.69125164e-01 -1.71952099e-01] [ 2.18566394e+00 -1.67889977e+00 -8.01782131e-01 ... -4.95614052e-01 -1.62607312e+00 -8.01990628e-01] ... [-1.85337520e+00 3.32406163e-01 -8.21177840e-01 ... 2.05865335e+00 1.30511689e+00 -6.36918783e-01] [-6.83092117e-01 3.61851394e-01 -3.44933778e-01 ... 5.52077115e-01 5.34294426e-01 -5.56852281e-01] [-8.62300932e-01 -5.21965444e-01 -1.51960254e+00 ... 3.15743536e-01 -1.73560548e+00 2.86127962e-02]] [[-4.87430841e-01 -2.10474443e+00 -1.00206316e+00 ... -1.92494035e-01 -6.31885290e-01 -1.42035627e+00] [-1.31548541e-02 -4.67768103e-01 1.09073484e+00 ... -6.49700761e-02 9.96504903e-01 -1.85879290e+00] [ 3.33177997e-03 -4.80023175e-01 -1.96912423e-01 ... 1.17210436e+00 -6.74070492e-02 6.88632727e-02] ... [-1.91865250e-01 -2.11828396e-01 -1.69775450e+00 ... 7.09923744e-01 -1.45320833e-01 -3.13054830e-01] [-1.53117049e+00 -2.00368452e+00 9.33023989e-01 ... -1.90399456e+00 -6.40247464e-01 7.33525634e-01] [ 3.30206543e-01 -1.40073931e+00 9.26906094e-02 ... -2.71265469e-02 1.09912157e-01 -3.39967728e-01]] [[-1.01901598e-01 3.35535586e-01 6.25418723e-01 ... 4.15816993e-01 4.22148675e-01 9.26620066e-01] [-1.20816398e+00 -1.20658815e+00 7.91663945e-01 ... -6.90144062e-01 -1.94175035e-01 3.43744345e-02] [-4.40084219e-01 -3.02768052e-01 -1.20305085e+00 ... 2.25440755e-01 -1.49783051e+00 -2.17619598e-01] ... [-6.79185808e-01 -3.53332430e-01 -1.62799525e+00 ... 3.60331774e-01 5.39746583e-01 2.36337230e-01] [ 2.36424232e+00 2.47140944e-01 -9.90307331e-02 ... -1.77316666e-01 1.99442387e+00 7.29323268e-01] [-1.16552162e+00 2.68859323e-02 -3.94694895e-01 ... 3.59205633e-01 -7.59410501e-01 5.17755926e-01]] ... [[-1.82591081e+00 -1.01222014e+00 7.87963033e-01 ... -1.23556232e+00 9.31193352e-01 -1.74502826e+00] [ 7.12673843e-01 5.37888408e-01 -7.34174967e-01 ... -9.55512404e-01 -1.96149313e+00 2.15089488e+00] [-9.98483896e-01 1.29477322e-01 1.01823282e+00 ... -8.81452739e-01 -2.84130126e-01 2.48505682e-01] ... [-3.79732281e-01 5.61239600e-01 6.33366168e-01 ... -6.58103079e-02 -1.12850368e+00 9.24771249e-01] [ 1.36611268e-01 3.89645323e-02 1.14058185e+00 ... -1.79698840e-01 4.94180769e-01 6.10892534e-01] [ 5.97259939e-01 -1.37018251e+00 2.50062466e-01 ... -1.69969797e-01 -3.91535282e-01 -7.00570583e-01]] [[-2.48566866e-01 1.59273779e+00 -8.87142122e-01 ... -1.67655751e-01 1.97009742e+00 -1.90931749e+00] [ 1.65724003e+00 1.44858342e-02 -3.21239889e-01 ... 2.53037143e+00 -1.18062332e-01 -1.34554386e+00] [-5.35692096e-01 -5.37942708e-01 8.48691165e-01 ... 4.38365877e-01 1.41156837e-01 5.17243266e-01] ... [ 8.59253943e-01 -3.94467384e-01 3.95308025e-02 ... 5.59866309e-01 8.84024501e-01 1.46630859e+00] [ 2.46075511e+00 3.90631407e-01 -1.59123445e+00 ... -4.43044990e-01 1.96008301e+00 -1.80075181e+00] [ 4.83837783e-01 -1.96657121e+00 1.64516854e+00 ... -9.34194326e-01 5.05173802e-01 8.01133454e-01]] [[-4.83615875e-01 -1.84667027e+00 -1.30257332e+00 ... 2.71847188e-01 7.42418766e-01 -1.98065676e-02] [-4.72344220e-01 -1.45398629e+00 1.38207674e+00 ... 1.67308062e-01 1.49259222e+00 1.25869715e+00] [ 1.42109430e+00 -3.50510925e-01 -1.11247897e+00 ... -3.48979607e-02 -8.82024050e-01 9.03290629e-01] ... [ 1.95217717e+00 -8.50172460e-01 -1.16252947e+00 ... -1.09596896e+00 5.89598835e-01 1.51003170e+00] [-1.32628381e+00 2.17123389e+00 8.48607957e-01 ... -7.49552310e-01 1.11797559e+00 -4.41234738e-01] [ 1.56523418e+00 1.84941292e+00 7.69629121e-01 ... -1.47768736e-01 1.26062024e+00 -9.07334238e-02]]] [[[-1.00217760e-01 1.06016922e+00 -4.02788728e-01 ... -1.31899166e+00 1.25313139e+00 3.47379804e-01] [-1.22386605e-01 1.39300182e-01 -1.68430671e-01 ... -8.96998525e-01 -5.76135218e-01 -1.50222492e+00] [-2.86143929e-01 -1.71201438e-01 7.11947381e-01 ... -1.82112202e-01 -3.81600589e-01 1.29113221e+00] ... [-9.65648651e-01 5.37727296e-01 6.19013369e-01 ... -2.27053687e-01 6.12663999e-02 -1.80282104e+00] [ 4.19071347e-01 -8.28822672e-01 9.68629003e-01 ... -6.70936525e-01 2.53410697e+00 -2.42240280e-01] [ 1.43509841e+00 1.53555906e+00 -1.13098145e+00 ... 2.31359646e-01 -4.96512175e-01 -1.59258604e+00]] [[-2.65589356e+00 3.12829435e-01 -1.25794601e+00 ... -8.86897922e-01 -5.99673390e-01 -1.17754489e-01] [-1.64193320e+00 5.92624664e-01 -2.07264519e+00 ... -8.68625164e-01 7.46235251e-02 -9.26387906e-01] [ 9.78794158e-01 -2.59229660e-01 -4.85992841e-02 ... 1.11538684e+00 8.89060378e-01 -5.16174495e-01] ... [ 1.45011783e+00 5.84238708e-01 1.69723713e+00 ... -1.18808353e+00 4.42233980e-01 6.17241740e-01] [ 8.25763345e-02 1.43171027e-01 3.76513213e-01 ... -1.78786397e+00 8.49470794e-01 3.59179586e-01] [-3.26706052e-01 5.08285940e-01 3.14573288e-01 ... -1.37800133e+00 -1.97642267e-01 2.88989156e-01]] [[-5.31700432e-01 5.20106256e-01 -1.10391390e+00 ... 1.05268455e+00 1.01984978e+00 5.20737946e-01] [-2.73267126e+00 8.73729110e-01 1.94264933e-01 ... -5.82091987e-01 1.69410661e-01 -7.78588295e-01] [-1.15633392e+00 -1.63587368e+00 5.87061346e-02 ... -1.04361999e+00 1.90311635e+00 -1.18108857e+00] ... [ 1.98328137e+00 2.02825713e+00 -2.53997707e+00 ... 1.62094831e-01 5.75090766e-01 1.49085224e+00] [ 4.00420427e-01 -5.43697715e-01 -2.98408777e-01 ... -1.62800539e+00 -1.59120536e+00 3.56704056e-01] [-8.19490254e-01 5.25216460e-01 -1.11884570e+00 ... 7.43102014e-01 2.20851108e-01 4.23900843e-01]] ... [[-1.09284866e+00 5.09097219e-01 -3.70706260e-01 ... 1.52639949e+00 3.89462374e-02 -8.24176013e-01] [-2.41923392e-01 3.85688812e-01 -2.51007080e-01 ... 1.55958128e+00 8.14097404e-01 -1.50631487e-01] [ 2.53304075e-02 -3.62462997e-01 -3.45380276e-01 ... -4.68536764e-01 6.51757479e-01 2.73339939e+00] ... [ 2.94182241e-01 -1.43121982e+00 2.72337139e-01 ... -5.77085316e-02 -1.51199841e+00 -6.64814651e-01] [-6.45356476e-01 2.48259053e-01 4.53665167e-01 ... 1.27086937e+00 -1.42866099e+00 6.40370995e-02] [-5.24554849e-01 -1.37515402e+00 1.24712903e-02 ... -2.70146310e-01 7.39092156e-02 -2.48767233e+00]] [[-3.28107655e-01 -3.89144093e-01 -6.65817857e-01 ... -1.00200856e+00 -2.57709122e+00 -6.04860246e-01] [ 9.78652313e-02 5.35726920e-02 -1.24517190e+00 ... 9.77164865e-01 -1.78708911e+00 -1.73901093e+00] [ 1.59819145e-02 2.45853707e-01 -8.83042216e-01 ... 1.12459290e+00 5.58933735e-01 -2.01628238e-01] ... [ 6.42610669e-01 9.21325982e-01 -3.35231155e-01 ... -5.33352315e-01 -8.73661160e-01 -1.52858293e+00] [ 1.73986375e+00 -7.11370111e-01 -4.53933835e-01 ... -1.08934116e+00 8.79999042e-01 -9.80900705e-01] [ 5.32982767e-01 -3.16615850e-01 -1.08946514e+00 ... -1.79020286e-01 1.19677138e+00 5.10076821e-01]] [[-1.87058461e+00 4.24757332e-01 2.55968445e-03 ... 7.93899745e-02 1.12174869e-01 3.16135377e-01] [ 1.28831565e-01 -7.39026129e-01 1.16481423e+00 ... 3.87906194e-01 -2.06010360e-02 3.19578089e-02] [-2.25877309e+00 8.15857887e-01 9.58480775e-01 ... 5.61381638e-01 -3.61461997e-01 -7.32685268e-01] ... [ 3.54252577e-01 -3.89630169e-01 -3.56306940e-01 ... -2.85762046e-02 1.22769915e-01 5.02531290e-01] [ 1.65194702e+00 -1.54157138e+00 7.23172903e-01 ... -2.23020291e+00 -3.85253906e-01 -3.98137152e-01] [ 1.30117166e+00 1.17345715e+00 2.74843645e+00 ... 1.58415288e-01 2.82043606e-01 -1.39260077e+00]]] [[[-3.36766958e-01 -1.10965991e+00 -7.29403377e-01 ... 3.55627745e-01 3.05121578e-02 7.42698163e-02] [ 3.92249882e-01 7.49052107e-01 -1.54199982e+00 ... -8.34977254e-02 1.43518591e+00 -1.43585873e+00] [ 9.16679680e-01 1.55947411e+00 1.12899685e+00 ... 1.61432469e+00 -9.95460093e-01 9.27627087e-01] ... [ 1.50724664e-01 -7.20107198e-01 4.64192569e-01 ... -1.24091268e+00 1.54470718e+00 3.42050135e-01] [-7.21333846e-02 -1.08567345e+00 7.25422859e-01 ... -8.98719251e-01 -5.32743990e-01 -2.76243258e+00] [ 1.48003244e+00 4.66456115e-01 -5.19666612e-01 ... 8.70654345e-01 5.81247807e-01 9.31047320e-01]] [[-1.12476796e-01 9.97874618e-01 8.01597357e-01 ... -1.34417653e+00 -4.55884606e-01 -7.67041206e-01] [-1.03447223e+00 1.19192553e+00 1.92689121e+00 ... -1.16390145e+00 6.79719388e-01 -6.02660418e-01] [ 8.70705366e-01 -9.16959122e-02 -3.24028850e-01 ... -1.50320446e+00 9.24942672e-01 2.05337599e-01] ... [-3.08995582e-02 -1.48384154e-01 -1.59214509e+00 ... 8.19195628e-01 7.79932976e-01 2.51461029e-01] [-3.75217617e-01 -5.16828895e-01 -1.97397017e+00 ... 1.37589514e+00 4.49067026e-01 -2.45940757e+00] [ 3.14814806e-01 7.45710015e-01 -1.08467436e+00 ... -7.80577719e-01 2.42615843e+00 8.33408952e-01]] [[ 1.33122694e+00 1.32192242e+00 -7.27825701e-01 ... 4.69285488e-01 -9.35207188e-01 3.68725121e-01] [-2.13779020e+00 1.49256623e+00 3.41138870e-01 ... 1.43801343e+00 1.26657140e+00 1.37713444e+00] [ 1.30973518e+00 -2.00816289e-01 2.40301773e-01 ... -7.06223249e-01 9.88982171e-02 1.14259470e+00] ... [-6.73531294e-02 7.93866098e-01 -9.33380842e-01 ... -4.27635789e-01 -6.32966518e-01 -1.30578077e+00] [ 1.03184581e+00 -3.30985475e+00 -3.68158706e-02 ... -1.43278480e+00 -3.92914474e-01 -4.30914462e-01] [ 8.97779223e-03 -6.89447641e-01 1.40776336e+00 ... -8.88077021e-01 -3.42391729e-01 -1.77229655e+00]] ... [[-1.38898182e+00 -1.14468575e+00 -3.83387178e-01 ... -6.13068461e-01 -9.42478955e-01 9.27191556e-01] [-2.93019205e-01 -6.69954047e-02 -9.81067419e-01 ... -5.54814994e-01 4.86298203e-01 3.28812212e-01] [ 4.21672851e-01 3.38597000e-01 7.76082098e-01 ... 1.71327472e+00 -7.89740026e-01 -6.95607424e-01] ... [-1.71501720e+00 8.41311812e-01 -8.79503310e-01 ... -8.77456486e-01 -1.26403999e+00 -3.48987788e-01] [ 1.15099144e+00 -3.24684590e-01 4.32148963e-01 ... -7.45630339e-02 7.15768814e-01 -1.62347317e+00] [-3.80829334e-01 9.29788686e-03 -1.68044674e+00 ... -1.81711364e+00 -1.14035630e+00 -7.41581023e-01]] [[-3.13073725e-01 1.63005032e-02 -2.18284845e+00 ... -4.89987344e-01 -1.42750049e+00 -9.54963088e-01] [-2.18564630e+00 -5.62404215e-01 -1.10877943e+00 ... 1.90101564e-01 7.57360399e-01 7.33938277e-01] [-4.51717168e-01 2.19384146e+00 1.50935173e-01 ... -4.59968328e-01 1.06951989e-01 8.34496677e-01] ... [ 1.68760672e-01 -2.84162760e+00 -6.95981607e-02 ... -4.83920500e-02 1.59881485e+00 2.93067515e-01] [-3.54638904e-01 -1.28806436e+00 -1.36489761e+00 ... 9.09758329e-01 -1.44176781e-02 1.60347295e+00] [ 1.08956647e+00 -7.84244776e-01 -4.99229692e-03 ... 1.98709652e-01 -1.67905569e-01 -1.09948635e+00]] [[-7.15992451e-01 -7.31804520e-02 -9.63672936e-01 ... 1.13191867e+00 5.87334335e-01 7.22611725e-01] [-7.23083854e-01 -2.54960465e+00 -1.20159197e+00 ... 1.26946878e+00 7.38198906e-02 2.94077992e-01] [ 1.49351692e+00 4.61576939e-01 1.86738715e-01 ... 5.14227569e-01 5.14276505e-01 1.29531097e+00] ... [-2.23248100e+00 -9.28126335e-01 -8.99381638e-01 ... -5.25783412e-02 2.43903235e-01 -2.98073351e-01] [ 7.27493346e-01 -3.65314424e-01 -8.15951943e-01 ... -1.03360273e-01 -8.21539462e-01 6.14242136e-01] [-5.43053389e-01 2.07782722e+00 1.15995932e+00 ... 1.94235003e+00 8.98796320e-01 1.17995691e+00]]] [[[ 1.01093590e+00 5.75544477e-01 -3.91107559e-01 ... -1.30841359e-01 3.48543346e-01 3.34819973e-01] [-1.23212347e-02 8.73732269e-01 1.22882271e+00 ... 1.49097860e+00 2.02477664e-01 -3.79301399e-01] [ 2.67077267e-01 6.02551460e-01 1.05404866e+00 ... 4.80744839e-01 -6.22859418e-01 -3.46368372e-01] ... [ 1.84921861e+00 -9.12770748e-01 8.64612579e-01 ... -1.00578022e+00 -1.61155844e+00 9.49343085e-01] [ 1.40520251e+00 9.02042329e-01 -4.14958686e-01 ... -5.80424964e-01 1.09362257e+00 -2.53129542e-01] [-1.03344989e+00 -1.63198066e+00 -5.34327805e-01 ... -2.19632912e+00 -6.72054291e-02 2.98983604e-01]] [[ 4.14364815e-01 -1.78568214e-01 -9.74375904e-01 ... -1.46886265e+00 9.27338839e-01 -1.76568940e-01] [-1.57728386e+00 -1.46144915e+00 1.11725867e+00 ... -1.14453442e-01 2.30125165e+00 -5.15219450e-01] [ 3.53826851e-01 1.00845255e-01 1.18362761e+00 ... 1.20668419e-01 -3.45428681e+00 1.32629126e-01] ... [ 9.30010498e-01 -1.36561584e+00 7.11228102e-02 ... -1.13333929e+00 4.45266426e-01 -2.25965953e+00] [ 1.59966350e+00 1.08345783e+00 -1.31375030e-01 ... -4.72613275e-02 9.27077889e-01 -1.47065651e+00] [ 1.81219852e+00 -1.22375858e+00 6.07465446e-01 ... 5.34694970e-01 -1.78691074e-01 1.07684600e+00]] [[ 1.35705245e+00 -1.94498181e+00 -6.74507022e-01 ... 7.16410577e-01 3.31063956e-01 1.23686939e-01] [ 1.24353278e+00 -1.12653649e+00 5.42829514e-01 ... -2.04495162e-01 -6.33848548e-01 2.84097493e-01] [ 3.03118682e+00 -8.63337219e-01 5.55707455e-01 ... 5.11542618e-01 2.53080279e-01 2.33594224e-01] ... [ 2.06520081e-01 -8.23809952e-02 -1.04718781e+00 ... 1.60033613e-01 7.27273345e-01 -2.26290727e+00] [ 1.39227831e+00 -1.24172831e+00 -6.44820035e-01 ... 9.07120764e-01 1.05661511e+00 -7.62219965e-01] [ 8.37331057e-01 2.80849244e-02 3.26041947e-03 ... 4.54647839e-01 2.16504073e+00 5.57669699e-01]] ... [[-1.87054527e+00 -1.07297100e-01 -1.45621955e-01 ... 1.01662882e-01 3.52712810e-01 5.31178474e-01] [-4.26484913e-01 3.09570968e-01 -2.30813026e+00 ... -6.43099606e-01 1.64133355e-01 2.73472816e-02] [ 1.94232151e-01 -2.40257517e-01 2.41484180e-01 ... -3.62348527e-01 -6.34920537e-01 -1.47453040e-01] ... [-1.48540318e+00 -8.22501540e-01 1.10388803e+00 ... 9.13404167e-01 -6.96481764e-01 -7.04348683e-01] [ 3.75496626e-01 -1.05963266e+00 -1.62313640e+00 ... 1.49745211e-01 -8.72163296e-01 7.04698145e-01] [-5.28187156e-01 -3.62895936e-01 -1.69629470e-01 ... 1.00001967e+00 -9.48665798e-01 2.87943542e-01]] [[-1.58091736e+00 -1.00980306e+00 -1.85413420e-01 ... 5.33695996e-01 1.34610558e+00 -4.96969193e-01] [-7.42146134e-01 -5.91649950e-01 7.78516591e-01 ... -4.24473166e-01 8.74313116e-01 -1.38025248e+00] [ 4.57679451e-01 1.98265001e-01 -2.54879773e-01 ... 2.71216720e-01 3.46333444e-01 1.68666196e+00] ... [ 3.37983251e-01 -8.95227432e-01 1.54505181e+00 ... -2.75118530e-01 8.46596777e-01 1.08666050e+00] [ 2.08372265e-01 -9.08299625e-01 5.16259596e-02 ... -4.70230222e-01 7.94576287e-01 4.75613534e-01] [-5.73864877e-01 2.89518889e-02 -6.20913923e-01 ... -3.07149935e+00 1.62952200e-01 7.35158801e-01]] [[-7.70353079e-01 1.64731455e+00 -4.74549830e-01 ... -1.19478321e+00 1.70692533e-01 -2.02101216e-01] [ 1.04319882e+00 -3.71174142e-02 1.32921755e+00 ... -6.56217158e-01 5.14472961e-01 4.67819095e-01] [-1.49302185e+00 7.62767434e-01 -2.89168060e-01 ... -1.77676052e-01 2.99742490e-01 -4.25811827e-01] ... [-1.04129734e-02 1.91890025e+00 -2.24898666e-01 ... -1.74964219e-02 1.39712691e-01 -1.41517496e+00] [ 1.52617502e+00 4.77431826e-02 -1.63561308e+00 ... -9.20560732e-02 -9.07420099e-01 6.26702309e-01] [ 1.26699197e+00 -9.60134566e-01 5.12679219e-01 ... 1.32786155e+00 -1.04354930e+00 1.11552119e+00]]]] [[[[ 1.28444254e-01 -7.69190311e-01 4.79786173e-02 ... -1.94206059e-01 -1.89798847e-01 -1.57749891e+00] [ 4.49626625e-01 7.16387510e-01 -7.51531124e-02 ... -1.48739612e+00 -1.07755625e+00 -2.91670537e+00] [ 6.67565763e-01 -9.48738694e-01 1.18920100e+00 ... 1.36051178e+00 6.55066252e-01 6.91825628e-01] ... [ 9.50970531e-01 -3.18303108e-01 7.36346126e-01 ... -4.27379608e-01 -2.22559810e+00 1.94899380e-01] [-4.60302949e-01 8.62498939e-01 1.00547159e+00 ... 1.61663275e-02 -3.91928375e-01 -2.12050170e-01] [-1.16972935e+00 -9.83949363e-01 3.13903421e-01 ... 5.83636798e-02 5.85561153e-03 6.19440854e-01]] [[-1.15890920e+00 4.62079570e-02 9.64548826e-01 ... -1.54868579e+00 -2.25547459e-02 1.59500182e-01] [-1.37972486e+00 4.31481063e-01 6.41966045e-01 ... -2.06703059e-02 -5.36904037e-01 8.06042492e-01] [-1.40317452e+00 -1.03109133e+00 4.15878147e-01 ... 5.83341360e-01 -1.05181515e+00 -1.04486263e+00] ... [ 1.96297765e+00 -9.10320461e-01 1.44774270e+00 ... -2.41163611e-01 7.82382395e-03 9.90771353e-01] [ 1.45080876e+00 2.08654785e+00 -7.91699409e-01 ... -4.88910317e-01 -8.69470835e-02 -1.00804079e+00] [ 5.36996007e-01 -5.13185322e-01 2.42859110e-01 ... 2.61299682e+00 -4.02875543e-01 1.19286573e+00]] [[ 8.30402553e-01 -5.88252485e-01 -6.72806740e-01 ... 1.36752343e+00 1.13951302e+00 -1.77387461e-01] [-1.45960975e+00 1.19762039e+00 8.86821210e-01 ... 3.59513938e-01 5.60747385e-02 -6.26275420e-01] [-9.61182475e-01 -1.53153121e-01 -3.58607680e-01 ... -5.37591815e-01 9.30665016e-01 -1.63465762e+00] ... [-6.20182037e-01 -5.61106727e-02 4.87469703e-01 ... 2.17441380e-01 1.11568427e+00 -2.76606381e-01] [-4.63734061e-01 -4.27530557e-01 8.65402579e-01 ... -8.50720823e-01 -1.55064905e+00 1.33658350e-01] [ 6.74743116e-01 -1.38859041e-02 5.68310022e-01 ... 1.02269173e+00 5.35826445e-01 -2.69071013e-01]] ... [[-1.75939932e-01 6.75716341e-01 -3.93061608e-01 ... -1.45089135e-01 1.24699914e+00 -9.41989064e-01] [-5.71596205e-01 6.31898046e-01 3.58908921e-01 ... -3.59725028e-01 1.25502634e+00 -2.18935102e-01] [ 1.61590666e-01 -2.36403084e+00 5.31573057e-01 ... 7.52095342e-01 -3.06918323e-01 6.37672842e-01] ... [ 1.37906599e+00 -1.41118789e+00 1.04023442e-01 ... -3.63730341e-01 -1.48531592e+00 -1.49398661e+00] [ 1.48134485e-01 4.50390399e-01 1.15896869e+00 ... 1.94475651e+00 -1.00004685e+00 2.95606017e-01] [ 2.09775642e-01 1.57871997e+00 -6.63131118e-01 ... 5.73706865e-01 -5.13798632e-02 1.95047212e+00]] [[ 3.99227232e-01 -4.05042708e-01 3.74857098e-01 ... -6.09872282e-01 -3.90284866e-01 -2.85839178e-02] [ 5.93256474e-01 -3.66187006e-01 1.19859540e+00 ... 8.84443223e-01 7.11760402e-01 -5.90956509e-01] [ 1.92019850e-01 1.16572034e+00 1.27075946e+00 ... -5.44955134e-02 -8.54436904e-02 -2.10004989e-02] ... [-1.32642519e+00 1.14770877e+00 9.25942510e-02 ... -2.66963929e-01 7.88862169e-01 -2.78008640e-01] [-1.91849992e-01 2.87614793e-01 7.69792974e-01 ... 5.33795774e-01 1.74068546e+00 8.48140061e-01] [-2.47648552e-01 2.95105994e-01 -1.91489792e+00 ... -2.17115819e-01 2.03915969e-01 -9.18466821e-02]] [[-1.02825649e-01 -1.56910694e+00 8.93621296e-02 ... -1.80999696e-01 1.38871253e-01 -5.94765209e-02] [-1.55080199e-01 2.22913951e-01 -1.39534247e+00 ... 6.80980742e-01 -1.31232429e+00 7.60497302e-02] [ 1.64120585e-01 -7.35048532e-01 1.02112377e+00 ... 1.14290208e-01 -5.78791916e-01 6.93231300e-02] ... [ 1.51455367e+00 6.30307913e-01 5.93157172e-01 ... 1.23435223e+00 1.17793131e+00 -6.76178813e-01] [-1.22720385e+00 1.18980765e+00 2.79842138e-01 ... -5.14917910e-01 -9.60919619e-01 5.05104303e-01] [-2.40642026e-01 -5.78506431e-03 2.25882426e-01 ... -4.96611893e-01 -2.32171100e-02 8.18850517e-01]]] [[[ 1.53005552e+00 -2.92509906e-02 -2.71789461e-01 ... 1.84243426e-01 1.65199608e-01 3.33926529e-01] [ 9.42379832e-01 -3.06069732e-01 1.78809138e-03 ... 2.39033175e+00 -3.15999269e-01 1.08777320e+00] [-1.02761185e+00 -2.17342734e-01 -1.50372639e-01 ... -2.05841079e-01 -3.53237003e-01 5.00090957e-01] ... [-1.65967536e+00 9.54133034e-01 -7.09670663e-01 ... 7.24289000e-01 8.56795907e-01 7.39149153e-01] [ 1.59562483e-01 6.65463686e-01 3.29224914e-01 ... -8.39308202e-01 7.83792377e-01 -5.47714829e-01] [-3.00493240e-01 -4.67367947e-01 4.95047361e-01 ... 2.47705445e-01 -1.03608274e+00 1.59130692e+00]] [[ 1.95702240e-02 -1.68062449e+00 -5.12644768e-01 ... -5.77794552e-01 6.83327258e-01 -5.03898263e-01] [ 2.43938658e-02 -1.97348490e-01 -1.81371188e+00 ... 3.70193012e-02 -4.07481015e-01 -5.26103616e-01] [ 1.69496015e-02 -3.64589000e+00 1.08008397e+00 ... -8.68145645e-01 -1.65736198e+00 -7.59857118e-01] ... [-3.43649209e-01 1.09567595e+00 -6.47608101e-01 ... 1.75338790e-01 8.19799006e-01 8.57486069e-01] [ 1.52453816e+00 -9.54253376e-01 -3.62048239e-01 ... -9.69742179e-01 1.08796525e+00 1.55552059e-01] [-2.40599895e+00 2.70312548e-01 -5.03513813e-01 ... 1.06575735e-01 -6.13796890e-01 8.96719098e-02]] [[-7.99023628e-01 2.08915114e+00 -1.45140839e+00 ... 4.43318933e-01 -4.18895185e-01 6.24718904e-01] [-3.35733950e-01 1.04450881e+00 6.41071141e-01 ... 5.13728797e-01 -6.80611253e-01 -1.55819619e+00] [ 5.85390806e-01 -1.34543669e+00 -1.55745733e+00 ... 1.54485834e+00 3.51904845e+00 9.07376409e-01] ... [-1.45100331e+00 -6.76420569e-01 -4.93658423e-01 ... 1.08631723e-01 7.57965028e-01 1.04396617e+00] [-5.87788701e-01 -8.74255121e-01 -7.07216740e-01 ... -1.52784956e+00 4.72315401e-01 6.26697242e-01] [ 2.33543113e-01 -9.83573556e-01 2.07315612e+00 ... 1.16637254e+00 -6.94395065e-01 -2.08552647e+00]] ... [[-2.65452862e-01 2.71031952e+00 6.96013272e-01 ... -1.16496718e+00 4.18646246e-01 2.38438681e-01] [ 9.75983620e-01 8.33730325e-02 1.38632989e+00 ... 4.50760216e-01 -2.73933697e+00 -1.38072753e+00] [ 8.12654555e-01 -1.23165512e+00 5.84355772e-01 ... -6.96475863e-01 -3.69081527e-01 1.92867652e-01] ... [ 1.95839733e-01 3.44270885e-01 2.80464441e-01 ... 1.12336051e+00 -1.57653141e+00 -5.73618650e-01] [ 6.82178000e-03 -4.45458025e-01 -4.00049746e-01 ... 1.12069800e-01 4.15579766e-01 -7.78871834e-01] [ 8.86638522e-01 -4.10121419e-02 -1.25621212e+00 ... 4.84671772e-01 -1.93810868e+00 1.81928658e+00]] [[ 1.64018142e+00 -6.31593347e-01 -1.69497579e-01 ... 8.84771705e-01 -9.49831128e-01 -9.61820424e-01] [ 2.82073259e-01 -2.60657817e-01 -1.68025768e+00 ... -1.03114891e+00 -5.51938593e-01 -5.40286720e-01] [ 8.24589193e-01 -1.68659925e+00 7.81393170e-01 ... -2.34129205e-01 -1.30347833e-01 -7.56173909e-01] ... [ 5.01487851e-01 2.18862963e+00 5.82691491e-01 ... 9.35512543e-01 -1.46528512e-01 9.24311042e-01] [ 2.16861337e-01 -1.35419536e+00 -9.96695340e-01 ... -3.07007551e-01 -9.52698439e-02 -3.60476077e-01] [-2.13948274e+00 -1.26516342e+00 1.19568753e+00 ... 1.24970865e+00 3.80804777e-01 -2.36981177e+00]] [[-4.29076254e-01 3.27605963e-01 -7.20227182e-01 ... 1.23367107e+00 -2.76835293e-01 -7.58132815e-01] [ 1.08991370e-01 2.08580047e-01 7.57072151e-01 ... 8.42161179e-01 -1.69469584e-02 1.48877871e+00] [ 1.62514210e-01 -7.36025751e-01 -5.63007891e-01 ... 3.27411920e-01 -1.50518656e-01 4.80148196e-01] ... [-1.07938349e+00 -1.59776986e+00 -2.74332142e+00 ... 2.84351206e+00 2.18683824e-01 1.05882859e+00] [-1.51512647e+00 1.41655076e+00 5.74846447e-01 ... -8.13811183e-01 2.47753695e-01 1.54263079e+00] [-1.56233811e+00 -9.58481550e-01 -1.17002571e+00 ... 6.12603836e-02 -5.92480004e-01 -2.92159498e-01]]] [[[ 2.16133523e+00 1.24250732e-01 3.32947373e-01 ... -6.88449442e-01 -1.06889534e+00 5.68488598e-01] [ 7.18161583e-01 2.00747299e+00 5.16361535e-01 ... 9.55750763e-01 1.52209353e+00 -6.41158879e-01] [-8.63170743e-01 1.07353020e+00 5.29545784e-01 ... -5.74765503e-01 5.39808452e-01 -4.10860270e-01] ... [-1.72387525e-01 3.65137726e-01 -5.54466605e-01 ... -6.33462489e-01 -2.17358923e+00 -4.88574475e-01] [ 7.87736118e-01 -5.22152126e-01 1.41452587e+00 ... 2.17544055e+00 1.54827207e-01 -3.61912608e-01] [-7.66987145e-01 5.23322284e-01 -3.38637918e-01 ... -1.53882518e-01 1.33865213e+00 -1.62919676e+00]] [[-6.17489755e-01 -3.58366251e-01 -1.47701967e+00 ... 4.64374989e-01 1.38599932e+00 5.01608491e-01] [ 3.43321502e-01 5.33839881e-01 4.67450589e-01 ... -1.26141262e+00 -1.92957258e+00 -1.50645959e+00] [ 1.52363706e+00 -1.33330217e-02 -9.45887327e-01 ... -1.65561521e+00 2.05410552e+00 -1.49286103e+00] ... [-4.02918607e-01 6.93270490e-02 -4.71456021e-01 ... 1.66675615e+00 5.71420372e-01 2.50111938e-01] [ 1.64820004e+00 1.78337276e-01 6.00407243e-01 ... 5.95651746e-01 -9.11209404e-01 -1.53380787e+00] [ 1.27536178e-01 -2.38375753e-01 -5.92889786e-01 ... 3.44366163e-01 -3.72363716e-01 -4.79284316e-01]] [[ 1.62208885e-01 -6.00539446e-01 -1.09732056e+00 ... -2.71376312e-01 -6.57588840e-01 -1.40452552e+00] [ 1.23268023e-01 1.31515348e+00 9.14600212e-03 ... -1.34187317e+00 9.69134033e-01 -1.45821428e+00] [ 7.85080314e-01 -1.32074559e+00 -3.54498982e-01 ... -2.05665633e-01 -2.72200793e-01 3.26756686e-01] ... [ 1.36770457e-02 4.03947204e-01 -4.26939249e-01 ... -4.78943795e-01 5.95829785e-01 -1.07463574e+00] [-2.19405365e+00 1.51810801e+00 1.83408141e-01 ... 4.10478055e-01 3.03595066e-01 -9.59445715e-01] [-1.02196622e+00 1.12585282e+00 -3.78540635e-01 ... -4.80629176e-01 1.15265167e+00 2.28506833e-01]] ... [[ 7.05423892e-01 1.10725570e+00 1.53098595e+00 ... 1.28314805e+00 4.64095175e-01 6.29920244e-01] [ 2.40788266e-01 2.00474456e-01 -7.13002563e-01 ... 1.20644677e+00 2.28222340e-01 2.06871510e+00] [ 1.56553614e+00 4.07323092e-01 -6.58487439e-01 ... 6.92929095e-03 -1.04255438e+00 1.64180517e-01] ... [-1.54807007e+00 1.24920082e+00 -2.52553105e-01 ... -9.48371470e-01 1.05552292e+00 1.21143246e+00] [ 1.07806516e+00 -1.58460462e+00 -9.76105213e-01 ... 1.89038038e+00 5.39440036e-01 -9.16588120e-04] [-2.86976080e-02 -9.76545691e-01 5.70043027e-02 ... 6.39134943e-01 1.78055835e+00 -6.91189431e-03]] [[ 1.31892419e+00 1.34332463e-01 1.86390567e+00 ... 5.03923476e-01 3.29957865e-02 1.36392534e+00] [ 1.01545498e-01 2.20711321e-01 -1.63608718e+00 ... -1.74660027e-01 -1.22624837e-01 -1.49390686e+00] [-9.95228142e-02 1.19774270e+00 1.23856103e+00 ... -2.22328231e-01 2.87776351e-01 1.74254215e+00] ... [ 7.55143285e-01 7.30895758e-01 5.89414716e-01 ... -9.54059303e-01 -3.39841366e-01 1.07817435e+00] [-5.20184822e-02 -2.33579889e-01 1.08029366e+00 ... 6.35090351e-01 -6.46758914e-01 -5.40969372e-01] [ 1.38166010e-01 -3.28301579e-01 1.96734214e+00 ... -2.28997254e+00 2.54303098e-01 1.71424329e-01]] [[-1.24209011e+00 8.77959430e-01 2.33094883e+00 ... 1.52971494e+00 4.08690989e-01 -7.65755892e-01] [-2.03996754e+00 -4.37161103e-02 1.19098313e-01 ... -4.18927431e-01 -1.24143505e+00 7.94800699e-01] [-1.96801454e-01 1.33646913e-02 -1.93600464e+00 ... 2.48740256e-01 -1.20719634e-01 -3.50202858e-01] ... [-4.44462061e-01 6.65815592e-01 -8.37635458e-01 ... 7.25054741e-01 -1.46610844e+00 1.02406752e+00] [-1.48372340e+00 4.94159490e-01 -1.24047649e+00 ... 8.84917378e-02 -1.86507210e-01 2.04962611e+00] [ 1.07632983e+00 7.15260088e-01 -3.67420971e-01 ... -1.23196676e-01 1.50298595e-01 5.17496645e-01]]] [[[-1.60089776e-01 -2.09203386e+00 1.89538091e-01 ... 2.67003506e-01 -2.13190961e+00 -2.71914220e+00] [ 1.61041245e-01 -1.52808952e+00 -1.74587953e+00 ... 1.21349417e-01 1.82964647e+00 2.49143153e-01] [-7.06790745e-01 3.22525889e-01 4.56292838e-01 ... -7.78493702e-01 -2.44914487e-01 7.94227600e-01] ... [-9.47640777e-01 2.13355750e-01 3.21833491e-01 ... -2.77722120e-01 -9.83932614e-02 -1.81095564e+00] [ 1.43976355e+00 -1.11128581e+00 1.42105317e+00 ... -5.25604129e-01 -8.28269005e-01 -1.17650680e-01] [-5.68654954e-01 -7.10930169e-01 -9.49269831e-01 ... 3.79058838e-01 8.52466345e-01 9.46962774e-01]] [[ 3.97684127e-01 -9.37641442e-01 9.90292549e-01 ... -3.46046209e-01 7.48286009e-01 2.99994975e-01] [ 1.60115755e+00 2.29854286e-01 -1.83204949e+00 ... -2.56362033e+00 1.05555761e+00 -1.32937706e+00] [ 2.15756983e-01 1.66606522e+00 1.01115155e+00 ... 2.27670103e-01 4.98794198e-01 -1.27182141e-01] ... [-1.05288044e-01 1.63471925e+00 -1.21343374e+00 ... -1.49727201e+00 1.06954239e-01 2.37680361e-01] [-2.17788959e+00 -3.02653491e-01 -9.61084366e-01 ... -8.32398236e-01 8.57778609e-01 -2.32268259e-01] [ 4.46048975e-01 2.58994877e-01 5.93840539e-01 ... 1.07869565e-01 -4.15714860e-01 2.01941922e-01]] [[-1.55972290e+00 -2.04525456e-01 -1.08433843e+00 ... -1.96658611e+00 9.48760808e-02 2.99741209e-01] [ 4.40251417e-02 -9.46126431e-02 -8.28027844e-01 ... -1.51419055e+00 -2.40976739e+00 -5.92878222e-01] [-8.18813384e-01 2.80501768e-02 -2.95491189e-01 ... 8.72640729e-01 -8.50213587e-01 6.89649463e-01] ... [-1.40576935e+00 3.24066013e-01 -1.32812333e+00 ... -9.32315469e-01 1.68455224e-02 4.77550328e-02] [-1.37192762e+00 -8.45178068e-01 1.14346111e+00 ... 1.72838497e+00 -4.67190266e-01 -2.31576309e-01] [ 1.71317780e+00 1.14989138e+00 4.38657910e-01 ... 2.50673387e-02 -5.13370395e-01 6.91230953e-01]] ... [[-1.43753827e+00 9.84938294e-02 -4.66584235e-01 ... -2.07023883e+00 1.84377480e+00 -1.23292923e+00] [ 1.79352462e+00 -1.96314201e-01 2.19636559e+00 ... -1.62479687e+00 -4.45971847e-01 -1.13583826e-01] [-5.82918286e-01 -6.49664178e-02 -6.05986178e-01 ... -5.77743948e-01 -1.22601882e-01 2.13753080e+00] ... [ 4.45330381e-01 -1.74052298e+00 3.65745962e-01 ... -7.76681781e-01 7.65285313e-01 6.44023180e-01] [-6.27864480e-01 -2.12309670e+00 1.68944106e-01 ... -1.70368016e+00 1.15412939e+00 5.65402091e-01] [ 2.65413493e-01 -5.01462877e-01 -1.18553989e-01 ... -9.31810200e-01 3.81166816e-01 -4.64718670e-01]] [[-1.37272954e-01 1.55962849e+00 -1.33613631e-01 ... 8.70867252e-01 8.84767115e-01 -1.54577315e+00] [-2.49250501e-01 -3.14343184e-01 2.72188596e-02 ... 1.16366589e+00 8.56333412e-03 1.39119124e+00] [-5.43208301e-01 1.27167284e+00 2.08215976e+00 ... -1.16848862e+00 4.51815218e-01 -6.57387555e-01] ... [ 1.31562102e+00 -1.45348454e+00 -4.58730370e-01 ... -1.38646090e+00 4.67384547e-01 -4.59756285e-01] [-3.64218712e-01 1.28710985e+00 2.14397654e-01 ... -1.86147302e-01 5.61677694e-01 7.54340589e-01] [-1.05331635e+00 5.73976874e-01 -6.15262151e-01 ... 8.32772553e-01 7.43878126e-01 -8.85813951e-01]] [[ 3.70757550e-01 8.55840862e-01 -1.33231449e+00 ... -6.66320547e-02 1.22103024e+00 4.41707373e-01] [ 6.25491977e-01 -5.88076055e-01 -6.08993061e-02 ... -7.53410876e-01 -1.26755261e+00 -1.21220447e-01] [-6.93957090e-01 2.76927173e-01 -3.22702259e-01 ... 1.98919094e+00 6.98605597e-01 -1.29313439e-01] ... [ 4.95002091e-01 2.45134398e-01 -5.30989766e-01 ... 5.22788107e-01 9.63037789e-01 4.47828919e-01] [-9.23626184e-01 -1.03334457e-01 -2.84158979e-02 ... -8.91156554e-01 7.24140108e-01 -1.57231867e+00] [ 4.70124841e-01 -8.43237162e-01 -2.49185443e-01 ... 1.53415769e-01 -9.41612422e-01 8.68688583e-01]]] [[[ 1.22847259e+00 1.78052807e+00 -7.00765908e-01 ... -8.42822850e-01 1.56027722e+00 -7.13808060e-01] [-2.17961431e-01 -2.13324636e-01 -2.24575615e+00 ... 1.49908316e+00 -7.32441366e-01 5.11984289e-01] [ 1.11170042e+00 -1.28315523e-01 2.15581369e+00 ... -6.94314957e-01 1.13326721e-01 -2.54900277e-01] ... [ 6.66699052e-01 -4.66840900e-02 -1.16776526e+00 ... 1.45137072e+00 -5.65129220e-01 -1.70520043e+00] [ 8.00502717e-01 1.28677452e+00 -1.49208784e-01 ... 3.36699903e-01 -6.59655750e-01 -1.64216781e+00] [ 8.66080165e-01 -5.58525324e-01 -6.26595020e-01 ... 1.17982447e+00 -4.88901973e-01 -3.84129584e-02]] [[ 8.52384120e-02 -6.83621526e-01 4.52280007e-02 ... -8.10053647e-01 1.51853383e+00 1.15809214e+00] [-1.88776970e-01 1.92466259e+00 2.93698788e-01 ... 1.09906673e+00 -2.94581890e-01 1.18198109e+00] [ 3.22731018e-01 9.87141609e-01 -6.48714416e-03 ... -1.86620069e+00 -2.93546259e-01 1.40569285e-01] ... [-1.53132367e+00 1.27684653e-01 1.55522037e+00 ... 1.96026340e-01 -1.49763930e+00 4.32295203e-01] [ 1.54475474e+00 -1.64283073e+00 -1.23606026e+00 ... -2.85246968e-01 1.59135795e+00 -2.06740230e-01] [-6.01181924e-01 1.25879550e+00 9.78163183e-01 ... -1.86239338e+00 2.56603688e-01 1.10496603e-01]] [[-3.27619016e-02 -9.30477560e-01 -3.53978723e-02 ... 9.28929150e-01 1.39326572e+00 7.63742149e-01] [ 6.97500467e-01 2.19923544e+00 -1.14627135e+00 ... 4.75089133e-01 1.64242595e-01 -1.22963774e+00] [-1.76529503e+00 -3.31407756e-01 2.81066149e-01 ... -1.78703642e+00 4.58729297e-01 2.71277219e-01] ... [ 2.30922669e-01 7.73906112e-01 1.85774893e-01 ... -2.05291390e+00 5.47257066e-01 1.88770711e+00] [-1.39163017e-01 -7.83589482e-01 9.79762316e-01 ... 7.73683131e-01 -1.04345047e+00 5.71269810e-01] [-8.91739368e-01 -2.08075094e+00 -4.21126157e-01 ... 2.70529747e-01 4.18834656e-01 -8.28963041e-01]] ... [[-1.59357643e+00 9.44777489e-01 2.03176641e+00 ... -1.18935943e+00 2.82164901e-01 -1.08309329e+00] [-5.53282320e-01 -4.11657006e-01 1.11292708e+00 ... 1.34663716e-01 8.82503033e-01 -3.08681965e-01] [-1.32503524e-01 -2.59454519e-01 -7.22906142e-02 ... 2.31556997e-01 1.38046515e+00 -1.49495542e+00] ... [ 2.65171587e-01 -3.77731830e-01 -4.93425280e-01 ... 2.51921296e-01 1.49548244e+00 8.18800926e-01] [ 6.93608582e-01 3.56326610e-01 2.53144336e+00 ... 4.21952605e-01 5.75105309e-01 -7.64661193e-01] [ 8.20156753e-01 1.32078282e-03 1.30266511e+00 ... -3.95009667e-01 5.69823086e-01 -1.72622573e+00]] [[ 1.96494251e-01 1.55207098e+00 -9.86040652e-01 ... 3.71250391e-01 3.91471565e-01 -4.15889025e-01] [-1.16910231e+00 -1.56065571e+00 7.76926816e-01 ... 2.31161880e+00 -1.62056014e-01 3.09201032e-02] [ 3.47234726e-01 1.01359761e+00 1.02163613e+00 ... -7.22061023e-02 -2.90656447e-01 -1.62226188e+00] ... [-1.49608982e+00 2.99672842e-01 1.29021138e-01 ... 1.21825114e-01 -5.03950417e-01 1.57113802e+00] [-1.57123709e+00 -8.40250492e-01 7.83206820e-01 ... 1.39987946e+00 -1.94551861e+00 1.11945403e+00] [ 6.02093816e-01 -4.51615244e-01 2.15754604e+00 ... 1.31443131e+00 9.73869085e-01 -1.46383214e+00]] [[ 2.72830814e-01 5.37237972e-02 8.74573112e-01 ... 2.16681644e-01 -6.61270201e-01 6.68219984e-01] [-3.45351458e-01 -7.61896431e-01 -6.51001871e-01 ... 2.36879969e+00 2.89112832e-02 1.05786169e+00] [ 2.60238856e-01 -1.41851485e+00 -4.88561243e-01 ... 1.38102233e+00 5.61255105e-02 3.74333203e-01] ... [ 5.62187135e-01 9.53423262e-01 1.22247708e+00 ... -6.38752878e-01 1.35389006e+00 -2.61672437e-01] [-1.25949815e-01 1.58371675e+00 1.96296349e-01 ... 1.37029755e+00 -1.49323428e+00 -1.26645136e+00] [-6.22489810e-01 -3.13311487e-01 6.08941257e-01 ... -3.08257461e-01 2.27099872e+00 5.96399188e-01]]] [[[ 4.81122136e-01 -3.51135194e-01 1.65776277e+00 ... 1.85252571e+00 -4.46933955e-01 6.05660141e-01] [ 7.55418122e-01 -1.55671418e-01 5.73171556e-01 ... -2.29051635e-01 -2.82720625e-01 -9.02098000e-01] [-1.00110424e+00 -7.34019041e-01 -9.78932738e-01 ... 5.29848278e-01 1.31147102e-01 2.32107925e+00] ... [-9.48157191e-01 -2.48273522e-01 -1.17861927e+00 ... 1.12826741e+00 -3.93737376e-01 -1.16329563e+00] [-2.66427845e-01 8.38619590e-01 -1.03145647e+00 ... 4.45579499e-01 -4.06488895e-01 -1.07247722e+00] [-4.60016698e-01 1.07283199e+00 -1.91790953e-01 ... 1.07153821e+00 5.56823432e-01 -1.25971520e+00]] [[ 1.77893043e+00 9.29998517e-01 -2.13415670e+00 ... -4.03870821e-01 -3.88038903e-01 -4.89576817e-01] [-2.26325303e-01 1.13098609e+00 -2.84221500e-01 ... -1.20298751e-01 7.09602416e-01 1.48166120e-01] [ 1.26107901e-01 -2.04623961e+00 2.07816854e-01 ... -1.43861580e+00 -9.82544243e-01 6.61993682e-01] ... [-1.95062780e+00 -1.29369771e+00 -5.35464883e-01 ... 5.94430208e-01 8.47498059e-01 -1.18180478e+00] [ 7.64915943e-01 -1.18285608e+00 -5.19017458e-01 ... 1.44586396e+00 -1.31020379e+00 -1.41683769e+00] [-1.28747404e+00 3.77296716e-01 1.99782267e-01 ... 1.87512830e-01 -3.25421453e-01 6.03285015e-01]] [[-5.82424641e-01 -4.01541322e-01 1.19336450e+00 ... 4.54300553e-01 1.70143187e-01 -6.70079231e-01] [-9.42366123e-01 2.33790949e-01 4.51436788e-02 ... 1.95457816e-01 -2.06406784e+00 -1.54851234e+00] [-8.48056674e-01 -2.10884824e-01 -5.38819313e-01 ... 2.16087773e-01 -5.35836518e-01 -1.40438390e+00] ... [-8.91544402e-01 3.39437008e-01 -1.32875156e+00 ... -1.21497178e+00 1.83161974e-01 1.62827933e+00] [-1.36974707e-01 1.85390580e+00 8.51952493e-01 ... -4.46692228e-01 -1.37534246e-01 7.68241227e-01] [ 1.95397288e-02 -9.60203290e-01 1.42306292e+00 ... -2.54573298e+00 -4.69982699e-02 7.10527301e-01]] ... [[-1.86767563e-01 1.35646671e-01 -2.76471043e+00 ... 1.95580199e-01 1.16402078e+00 7.37535536e-01] [ 1.74855375e+00 7.00138330e-01 1.82253969e+00 ... 4.73802865e-01 2.70584971e-01 -2.82376617e-01] [ 1.09742127e-01 -1.29935312e+00 -6.55927420e-01 ... 6.16058707e-01 -8.28547299e-01 3.45156938e-01] ... [ 1.85125113e-01 -1.14396900e-01 -2.28860331e+00 ... -3.74998897e-01 -3.27174753e-01 2.23244238e+00] [-4.29169476e-01 -4.96149778e-01 -2.30057311e+00 ... 1.80430990e-02 4.64026481e-02 -1.68408048e+00] [ 6.46951556e-01 -6.02505095e-02 1.28878915e+00 ... -3.06624591e-01 2.71843463e-01 -2.45928057e-02]] [[ 7.39804924e-01 6.09583080e-01 -7.75924504e-01 ... -1.78267896e+00 -9.79103506e-01 9.05950904e-01] [ 9.71837282e-01 -4.25457299e-01 8.77340436e-02 ... -3.32789630e-01 1.72430313e+00 9.37785566e-01] [ 3.44952106e-01 -2.36906528e+00 -2.63953328e+00 ... 1.75126046e-01 -1.54333031e+00 -1.20058095e+00] ... [ 1.27945662e+00 4.05595928e-01 8.76281321e-01 ... -7.75364637e-01 -2.05793336e-01 2.24068522e+00] [-7.94719636e-01 -1.12350070e+00 -3.26918244e+00 ... 1.66545868e+00 -2.84033984e-01 -1.37219727e+00] [-1.48751428e-02 5.63667774e-01 -2.00743765e-01 ... 1.88881767e+00 -4.71032083e-01 -5.49566031e-01]] [[-4.19794232e-01 2.44714871e-01 -2.45107383e-01 ... 5.16120851e-01 1.15495062e+00 -4.01664935e-02] [-7.70017087e-01 1.07461978e-02 6.01973712e-01 ... 4.91056502e-01 -1.53426468e+00 -9.60570097e-01] [ 1.29796505e+00 4.40724194e-01 8.18152249e-01 ... -2.51122177e-01 8.59249890e-01 9.81022954e-01] ... [-1.83891699e-01 -4.72118467e-01 -2.33619645e-01 ... -1.27074853e-01 7.92614877e-01 2.26725146e-01] [ 1.67697534e-01 2.22316790e+00 -3.84913176e-01 ... -1.50216150e+00 5.65848768e-01 1.08078033e-01] [-8.42984498e-01 -1.66358685e+00 -4.88000035e-01 ... 9.59794998e-01 -1.84357369e+00 4.07617867e-01]]]] ... [[[[ 4.36536491e-01 -8.37198198e-01 -1.01501405e+00 ... 6.93288684e-01 -7.14084566e-01 1.41066897e+00] [ 1.56840765e+00 -1.70016372e+00 2.71913886e-01 ... -1.06213037e-02 1.88077107e-01 -7.89635360e-01] [ 4.76585627e-01 6.47408128e-01 -4.70100373e-01 ... 1.83567250e+00 -1.71963954e+00 -1.01676929e+00] ... [-7.34794438e-01 1.48342705e+00 1.41763437e+00 ... 8.14157367e-01 -1.02247727e+00 -2.04503998e-01] [-5.63530803e-01 2.78502673e-01 -5.70041656e-01 ... 2.06796646e-01 -6.98255002e-01 2.37281561e-01] [ 8.03423285e-01 -4.83918786e-01 -1.15676439e+00 ... -4.51365322e-01 -1.70643210e+00 8.62729847e-01]] [[-1.93241000e+00 1.56142819e+00 -5.71213216e-02 ... -5.32378435e-01 -2.99577951e-01 -1.75530761e-01] [-2.21049190e-01 6.95880413e-01 2.88551545e+00 ... -1.88489184e-01 1.52373827e+00 4.10892338e-01] [-5.89900196e-01 -2.99772477e+00 8.14984381e-01 ... 1.97455883e-01 2.49758482e+00 -1.05320716e+00] ... [ 1.41407833e-01 -5.11123717e-01 -3.00268084e-01 ... 9.21748221e-01 -2.80765682e-01 1.95636731e-02] [ 1.02819252e+00 2.65346432e+00 5.47617793e-01 ... -1.08335197e+00 -8.90898824e-01 2.66108483e-01] [ 2.59269536e-01 1.77210367e+00 4.45226699e-01 ... 1.94610870e+00 -4.14899588e-02 1.00136364e+00]] [[-1.62224782e+00 7.81960249e-01 -3.31805170e-01 ... -4.51638639e-01 9.42553818e-01 8.78875136e-01] [-6.10819519e-01 9.32095706e-01 -3.66130173e-01 ... 1.88931572e+00 -1.01670825e+00 2.21021935e-01] [ 8.68205205e-02 -8.96271348e-01 1.27755404e+00 ... 3.41718644e-01 -5.14196217e-01 -5.71639001e-01] ... [-2.16360784e+00 -1.71566093e+00 -5.20398855e-01 ... -1.26673549e-01 1.00750756e+00 1.57329428e+00] [-7.04091311e-01 -1.53584528e+00 -9.24965858e-01 ... -1.79906934e-01 1.83836579e+00 -8.33006442e-01] [ 2.03621316e+00 3.86967272e-01 -9.69223902e-02 ... 2.49041036e-01 5.04616022e-01 9.64078248e-01]] ... [[ 1.67889714e+00 -1.02613592e+00 -3.13269675e-01 ... 3.88775319e-01 -1.11189377e+00 1.96647123e-02] [-9.10385191e-01 -1.45195067e-01 4.14306909e-01 ... 1.17777251e-01 -4.77012515e-01 2.59686470e-01] [ 5.22772551e-01 -8.67052019e-01 -5.12180865e-01 ... -1.04943955e+00 7.66958416e-01 -1.06601310e+00] ... [-5.96542299e-01 -7.51134932e-01 -4.85689759e-01 ... -9.86608267e-01 -1.15443432e+00 3.38909179e-01] [-8.72664690e-01 5.11722088e-01 1.17655432e+00 ... -8.95411670e-02 -2.65078217e-01 2.04106390e-01] [-2.81467497e-01 9.78620410e-01 7.82638669e-01 ... 9.25063968e-01 -1.16747066e-01 1.38868535e+00]] [[-1.04533207e+00 -1.27570760e+00 -9.57644284e-02 ... -1.44557250e+00 -1.22612759e-01 -3.26029927e-01] [ 4.50987332e-02 -9.10538256e-01 -4.27439660e-01 ... 4.17242825e-01 -3.40821385e-01 -3.13199647e-02] [ 2.44902924e-01 8.63539994e-01 9.08304691e-01 ... -1.04191124e+00 -4.37059343e-01 -9.36607063e-01] ... [ 7.61082649e-01 -1.05982363e+00 8.41325581e-01 ... 6.03440106e-01 -6.97172403e-01 -1.48908591e+00] [ 1.96503118e-01 -3.49453926e-01 -6.57097101e-01 ... 5.90412438e-01 3.90591919e-01 3.94048899e-01] [ 9.30728853e-01 9.36879456e-01 -4.56527323e-02 ... 6.92667484e-01 5.80921590e-01 1.02872275e-01]] [[ 5.58308959e-01 -9.81828094e-01 -7.68508017e-01 ... 4.24774922e-03 -1.01079226e+00 2.28566840e-01] [ 1.57044351e-01 -7.62175918e-02 6.02438033e-01 ... 1.51241207e+00 -2.45886058e-01 -3.31346810e-01] [-2.32759342e-01 -7.99262643e-01 6.10696375e-01 ... -1.10400155e-01 -7.06362426e-01 -1.18918955e-01] ... [-7.01854944e-01 -1.77127063e+00 6.31672978e-01 ... 6.72869503e-01 -5.38762212e-01 1.41174591e+00] [-7.68561721e-01 5.93785226e-01 -1.00592506e+00 ... -1.77098846e+00 -7.25627720e-01 5.51743448e-01] [ 8.87860239e-01 -9.55304742e-01 -2.95787364e-01 ... 7.61394322e-01 -9.72974479e-01 -8.55413303e-02]]] [[[-4.02331650e-01 1.04995656e+00 -6.37172341e-01 ... 2.26743314e-02 -4.33735609e-01 9.25613701e-01] [-4.63899463e-01 -8.76577646e-02 -1.84371746e+00 ... 4.92973357e-01 -3.90891016e-01 1.80479264e+00] [-3.32284629e-01 -8.35307956e-01 5.91227353e-01 ... 8.23382020e-01 -7.30859756e-01 3.82324718e-02] ... [-2.21808815e+00 -9.99771535e-01 1.70095015e+00 ... -7.26143837e-01 1.03002536e+00 -2.02360082e+00] [-4.58799720e-01 3.35095167e-01 -1.25877750e+00 ... -9.39618289e-01 5.96639574e-01 7.95726717e-01] [-1.10989273e+00 3.04010630e-01 -2.68283039e-01 ... -1.92991614e-01 1.13856661e+00 4.81329829e-01]] [[ 2.89624095e-01 5.59368253e-01 -1.51423252e+00 ... -1.71807051e-01 3.68200421e-01 -7.90430367e-01] [-1.37770426e+00 2.02449822e+00 -3.97438347e-01 ... -2.01208711e+00 -1.61183167e+00 -1.92114699e+00] [ 6.69038892e-01 4.09880191e-01 -1.76286489e-01 ... 1.54038936e-01 3.71512473e-01 -1.27071369e+00] ... [ 7.15347767e-01 -3.03042140e-02 -1.14224482e+00 ... -1.76204813e+00 2.81099749e+00 -1.52801722e-01] [ 3.71006280e-01 1.23736978e+00 3.54473531e-01 ... 1.86237246e-01 -2.75023490e-01 -3.33503664e-01] [ 7.40547776e-01 -2.97248930e-01 1.58762479e+00 ... 1.18566167e+00 4.44228023e-01 -4.54073310e-01]] [[ 1.07333875e+00 1.00218081e+00 -4.57604766e-01 ... 7.55361980e-03 -1.26422215e+00 1.32601357e+00] [ 9.55503643e-01 -7.52798557e-01 1.40474796e+00 ... 4.12548557e-02 6.92317724e-01 1.15685046e+00] [-6.89199626e-01 1.07158089e+00 -8.40928137e-01 ... 6.89978659e-01 2.39451811e-01 6.24954104e-01] ... [ 1.23767769e+00 3.53075534e-01 -5.24956174e-02 ... 1.05225039e+00 -1.43839598e+00 -2.24956751e-01] [-1.77642733e-01 -6.88705146e-02 -9.39250529e-01 ... 1.60287142e+00 1.66918635e-01 -1.54475439e+00] [ 1.03965712e+00 1.05290341e+00 8.04073393e-01 ... -8.63655746e-01 1.49502897e+00 5.85393369e-01]] ... [[ 1.38584507e+00 8.14741790e-01 9.55837667e-02 ... 1.20033002e+00 -1.26625621e+00 -2.32133120e-01] [-1.49845636e+00 -3.19512010e-01 1.25874949e+00 ... -1.24803580e-01 1.77081919e+00 1.15922356e+00] [-6.00439906e-01 8.94468665e-01 -3.64894778e-01 ... -2.21592844e-01 3.45197350e-01 1.04318154e+00] ... [ 8.45039546e-01 -1.67981339e+00 3.08246732e-01 ... -9.68018651e-01 -1.36499286e+00 -1.21497676e-01] [-5.08138061e-01 5.31817853e-01 -4.54546422e-01 ... 2.56243706e-01 2.50549912e-01 2.04214931e+00] [-2.18434262e+00 8.14131796e-01 1.85114831e-01 ... 8.72293711e-01 -3.97206157e-01 -4.89379197e-01]] [[ 8.47885191e-01 -1.42116239e-02 -6.19716585e-01 ... -3.90335053e-01 1.53424263e+00 -2.91529357e-01] [-7.98696220e-01 -6.92532837e-01 -5.87817490e-01 ... -5.40074229e-01 2.70815104e-01 3.83696079e-01] [-5.64595461e-01 -4.94370818e-01 -1.25329089e+00 ... 1.06446251e-01 -2.59216285e+00 1.57451594e+00] ... [-4.53714550e-01 2.82496721e-01 1.08116770e+00 ... -4.05308098e-01 1.22561884e+00 -1.18163407e+00] [-7.14238703e-01 1.58839536e+00 -9.19696689e-01 ... 1.74101508e+00 -9.45411623e-02 6.91352487e-01] [ 4.29024369e-01 3.68464202e-01 -9.53082263e-01 ... -9.05863583e-01 1.21128809e+00 -6.92231953e-01]] [[-2.01352254e-01 -1.04828215e+00 -6.20517552e-01 ... -5.00632226e-01 -2.12880182e+00 -6.30413890e-01] [ 1.63270235e+00 -1.48045456e+00 -1.47633761e-01 ... 1.73626256e+00 1.02919888e+00 5.98753467e-02] [-5.16836464e-01 -9.77983296e-01 4.09163445e-01 ... 1.67051148e+00 5.74878514e-01 -7.13088989e-01] ... [-4.42725927e-01 7.52309918e-01 -2.17625536e-02 ... 1.35514104e+00 -8.79451558e-02 -5.32915235e-01] [ 1.95919380e-01 9.29511368e-01 5.32479882e-01 ... -1.53741896e+00 -8.04148793e-01 -1.98638380e-01] [-4.76759195e-01 -1.55707633e+00 5.27283669e-01 ... 2.69604325e+00 2.32085609e+00 9.91925538e-01]]] [[[-1.54478014e+00 2.04223609e+00 1.64156958e-01 ... 1.10158527e+00 -1.00446594e+00 4.17813331e-01] [-9.91566896e-01 1.30432582e+00 -4.24085140e-01 ... 4.55589652e-01 -1.57329381e-01 -1.90539503e+00] [ 5.95411301e-01 -5.43706596e-01 6.64277613e-01 ... -1.23584628e+00 -1.33175242e+00 -4.57739621e-01] ... [-2.65523762e-01 7.14970887e-01 7.44239569e-01 ... 9.17311013e-01 -2.08544064e+00 -7.30023921e-01] [-1.10736454e+00 4.89151895e-01 -8.51740062e-01 ... 8.33237529e-01 -5.08557320e-01 2.36574697e+00] [ 1.37921607e+00 -6.64519906e-01 4.85523283e-01 ... 1.02234817e+00 6.57512426e-01 -6.06817782e-01]] [[-1.88691616e-01 6.65802181e-01 -2.77198386e+00 ... 5.38468242e-01 -1.14672804e+00 -1.96910337e-01] [ 2.09718752e+00 -1.54099837e-01 -3.75807941e-01 ... -1.20329416e+00 -1.39488280e+00 -1.15936351e+00] [ 4.24945774e-03 3.26844901e-01 -1.13471770e+00 ... -3.09436228e-02 4.50656325e-01 7.10713983e-01] ... [ 1.98137779e-02 5.82572579e-01 -1.84189141e+00 ... -3.91535580e-01 -1.73462248e+00 1.07012606e+00] [ 1.29593587e+00 7.40815401e-01 2.20209551e+00 ... 3.66995692e-01 1.86479777e-01 -1.13239205e+00] [ 2.37334207e-01 -6.73201561e-01 1.78350639e+00 ... 3.34541500e-01 -1.90946174e+00 -1.86253917e+00]] [[ 6.18497849e-01 2.28081211e-01 -4.34012711e-01 ... 6.67305052e-01 2.33601868e-01 -2.07736564e+00] [ 1.50119916e-01 1.60256815e+00 1.03125997e-01 ... 1.63472831e+00 1.90791690e+00 6.56704724e-01] [-8.30480933e-01 -2.31189108e+00 -1.26727387e-01 ... 1.53532565e-01 -7.29817808e-01 1.61054349e+00] ... [-7.69143641e-01 4.19251412e-01 -8.48667920e-01 ... -2.17060462e-01 -1.13594496e+00 -1.09472930e+00] [-1.60762823e+00 5.04029095e-02 -4.69524898e-02 ... 2.40505004e+00 6.35415316e-01 1.51927873e-01] [ 3.34030986e-01 -6.01310015e-01 7.91566491e-01 ... -3.07054341e-01 -7.65398085e-01 -2.72207528e-01]] ... [[ 2.07819462e-01 -8.38606358e-01 -7.62836635e-01 ... -7.82372713e-01 -9.95603204e-01 1.74667251e+00] [-1.01345289e+00 5.97277343e-01 -2.62543976e-01 ... 6.28429055e-01 -1.83987820e+00 5.89942396e-01] [ 1.48867309e-01 -2.66954273e-01 7.57624209e-02 ... -2.39297494e-01 -3.90751004e-01 8.40969443e-01] ... [ 2.47266978e-01 2.03161806e-01 1.09959066e-01 ... 7.34730363e-02 4.24076974e-01 1.19667375e+00] [-1.10486317e+00 1.50936759e+00 -1.55964434e-01 ... 1.55511248e+00 4.05391246e-01 6.40719235e-01] [ 1.70870030e+00 -1.62962914e+00 9.68378663e-01 ... 1.01885855e+00 -2.08016109e+00 8.25243175e-01]] [[ 4.04228032e-01 9.96099591e-01 -3.18100631e-01 ... 5.22818685e-01 2.35347194e-03 -7.85791934e-01] [ 2.91524023e-01 -4.06112373e-01 1.04303551e+00 ... 6.00066185e-02 6.02431595e-01 4.12486941e-02] [-1.53451955e+00 5.41222870e-01 -6.53930783e-01 ... -2.29088023e-01 -2.95139879e-01 -4.60310012e-01] ... [-3.32560986e-01 4.12245333e-01 -9.31809723e-01 ... 1.43725467e+00 5.01600802e-01 3.30635905e-01] [-6.40289545e-01 1.30350387e+00 9.30013880e-02 ... -6.96852326e-01 -1.39936700e-01 2.59262472e-01] [-1.67988315e-01 1.05818677e+00 1.16568351e+00 ... 7.07410812e-01 1.64006507e+00 9.61231530e-01]] [[-1.15351844e+00 2.00252104e+00 8.83480191e-01 ... 1.68991327e-01 1.90792704e+00 1.05734622e+00] [-1.15222476e-01 4.81471755e-02 1.92222074e-01 ... -2.41538620e+00 6.41262531e-01 1.25509012e+00] [-4.53003228e-01 5.74974000e-01 7.95690566e-02 ... 1.42445374e+00 2.13586330e-01 1.10295355e+00] ... [-1.14218819e+00 3.85691911e-01 -6.22450709e-01 ... 4.32173103e-01 9.43209231e-01 9.52467799e-01] [-1.00529313e+00 2.93519467e-01 -5.55280447e-02 ... -4.11363959e-01 3.47680241e-01 -1.60493720e+00] [ 1.60456645e+00 6.57415926e-01 3.56379986e-01 ... 1.55328408e-01 -1.75781441e+00 -1.02631688e+00]]] [[[ 6.07891679e-01 -9.49309289e-01 1.90256447e-01 ... -3.31874758e-01 3.17086309e-01 -1.13056958e-01] [-7.89759398e-01 -4.02239352e-01 -1.36819518e+00 ... 1.74936509e+00 -1.16078067e+00 -3.55061710e-01] [-2.58174253e+00 -2.52519459e-01 -2.63731599e-01 ... -1.35606492e+00 -2.62322873e-01 -2.28317633e-01] ... [-1.68673849e+00 -1.01392806e+00 -2.59445846e-01 ... 1.14076898e-01 5.09947121e-01 -1.42393267e+00] [-1.25324190e+00 4.56018716e-01 -1.30512333e+00 ... -9.43109870e-01 -6.46989420e-02 5.04703283e-01] [-9.39415544e-02 -5.28106987e-01 7.26646841e-01 ... 6.30713301e-03 9.36641991e-01 1.41185510e+00]] [[-6.16395772e-01 -1.15984869e+00 -1.96036443e-01 ... 7.08332717e-01 -7.99190044e-01 -4.19318825e-01] [-2.49859437e-01 1.23911393e+00 -5.71036279e-01 ... 1.32759762e+00 -7.11055696e-01 -1.30318272e+00] [ 1.06314385e+00 -4.26413983e-01 -1.26421720e-01 ... 2.24936676e+00 7.34946787e-01 -1.08071792e+00] ... [ 1.39733827e+00 -6.91328049e-01 2.36430094e-01 ... -1.93505436e-01 1.37370870e-01 -1.00971448e+00] [-1.58172823e-03 -1.06074929e+00 9.74748552e-01 ... -7.48506010e-01 -8.64326954e-01 3.26408672e+00] [ 3.57951820e-02 -8.45820069e-01 -6.11540020e-01 ... 2.41583869e-01 3.67016435e-01 1.37754405e+00]] [[ 1.60509631e-01 -2.02011153e-01 -2.60594785e-01 ... 2.91139930e-01 9.88590240e-01 7.90439487e-01] [-1.16260743e+00 -5.41178524e-01 1.60652483e+00 ... -1.01724172e+00 -2.26491880e+00 2.00344682e-01] [ 7.58399427e-01 7.51995683e-01 -4.02884752e-01 ... 7.24845707e-01 4.07599062e-01 -2.72017241e-01] ... [ 2.98145324e-01 -3.47348452e-01 7.88036138e-02 ... -1.16801512e+00 -1.14748788e+00 -3.58615130e-01] [ 1.53758302e-01 -1.77485728e+00 -6.34553432e-01 ... -1.89468861e-02 -5.37753344e-01 -9.34220135e-01] [-6.91091642e-02 -1.11662841e+00 5.24800837e-01 ... 8.64088815e-03 8.04200888e-01 -1.00249600e+00]] ... [[-5.43503761e-01 6.41932249e-01 -8.63288760e-01 ... 1.53121173e+00 -4.70222890e-01 -8.20088923e-01] [ 8.35244536e-01 3.19534332e-01 -5.03882110e-01 ... 5.56820273e-01 1.50571430e+00 -3.81065309e-02] [ 1.11608386e+00 -3.73832494e-01 -1.09818792e+00 ... 1.15979874e+00 -5.58294237e-01 -1.33217406e+00] ... [ 8.39234054e-01 -8.32440019e-01 -1.41540259e-01 ... -4.79340464e-01 6.80374324e-01 -2.07405210e+00] [ 7.77199566e-01 9.30759132e-01 1.37780309e+00 ... 5.49898982e-01 1.22097671e-01 7.98274517e-01] [-9.57283735e-01 3.45186353e-01 4.44869697e-01 ... 2.00810599e+00 -3.64573747e-01 2.28272080e-01]] [[ 6.87927127e-01 8.78230035e-01 3.16090174e-02 ... -6.84444606e-01 2.00926495e+00 -6.37044787e-01] [ 2.33634400e+00 1.46583164e+00 -7.22921908e-01 ... -1.21222293e+00 1.08297423e-01 5.39055467e-01] [-4.89453167e-01 8.62764418e-01 8.17925572e-01 ... -7.36856014e-02 3.67848754e-01 -1.08953759e-01] ... [-6.05002582e-01 1.75111663e+00 -2.06330925e-01 ... -3.69846970e-01 -1.04078984e+00 -1.38328350e+00] [-1.65494323e+00 -1.45912838e+00 9.23679113e-01 ... 1.76860571e+00 1.49118209e+00 7.93815613e-01] [-5.53313792e-01 -5.23975603e-02 8.59010041e-01 ... 3.77298594e-01 -2.52232194e-01 5.24513960e-01]] [[-8.69170666e-01 -1.16701758e+00 -3.80440950e-01 ... 1.16249859e+00 -1.25099301e-01 6.98169291e-01] [-1.06296194e+00 6.23767674e-01 8.49452913e-01 ... -3.89598012e-01 1.07750916e+00 -1.37616944e+00] [ 1.62215137e+00 -2.44970292e-01 -1.34842110e+00 ... -2.26810241e+00 -1.27848196e+00 -1.50641516e-01] ... [-2.75580227e-01 3.17433119e-01 1.95756602e+00 ... 1.15713608e+00 1.26511261e-01 -3.69888574e-01] [-6.59513950e-01 8.63177180e-02 2.76474399e-04 ... 9.65489626e-01 1.52354658e+00 4.83033419e-01] [ 2.19356680e+00 -8.61882567e-01 6.68512046e-01 ... -1.01785529e+00 8.96501094e-02 1.60952246e+00]]] [[[ 1.27352595e+00 4.29707795e-01 6.20696664e-01 ... -1.11153233e+00 1.04015112e+00 -7.19335794e-01] [-4.87683773e-01 -4.87816662e-01 1.14358091e+00 ... 1.11485314e+00 1.64882094e-02 -1.02329719e+00] [-6.06649101e-01 -1.40152741e-02 -7.18935251e-01 ... -3.44529510e-01 -1.14325494e-01 7.85879016e-01] ... [-2.16016650e-01 1.55636334e+00 -1.22281218e+00 ... -1.23584831e+00 -9.81701672e-01 6.82976723e-01] [ 2.65421003e-01 4.45245087e-01 -1.17000866e+00 ... -7.07377613e-01 7.34916389e-01 3.36462438e-01] [ 2.50808299e-01 -3.71844202e-01 -6.72442794e-01 ... -1.31716597e+00 5.22610366e-01 4.72524166e-01]] [[ 1.77903399e-01 -1.30083442e+00 1.01476300e+00 ... -1.22160029e+00 7.80318916e-01 5.27292550e-01] [ 1.70028663e+00 8.41143608e-01 -5.13560891e-01 ... 1.22766447e+00 -3.29463005e-01 -1.14199810e-01] [ 5.51103592e-01 7.96869814e-01 9.02001560e-01 ... -4.93022352e-01 -8.85473490e-01 -1.12211215e+00] ... [-5.42125046e-01 -1.85301468e-01 4.32953507e-01 ... -1.44431102e+00 1.36915147e+00 3.23383808e-01] [-1.01169360e+00 -1.05721450e+00 1.24550223e+00 ... 9.10491228e-01 1.16238487e+00 -2.46721196e+00] [ 4.51120257e-01 -2.00378084e+00 -5.75299740e-01 ... -1.22271085e+00 -5.60515150e-02 -3.61697167e-01]] [[-2.33858407e-01 1.04202986e+00 -9.99349594e-01 ... -1.32824764e-01 -1.25626600e+00 1.75482464e+00] [-4.21126097e-01 -8.55265915e-01 -2.22910786e+00 ... 2.36173138e-01 -1.69698489e+00 -3.86288047e-01] [ 1.61054373e+00 -1.17656624e+00 -1.12220323e+00 ... -7.17402935e-01 9.89856899e-01 -8.10425460e-01] ... [-4.31244612e-01 1.78654790e+00 1.36487460e+00 ... 4.72024947e-01 -6.30901098e-01 1.15587151e+00] [ 7.28547215e-01 -1.45741180e-01 1.67934930e+00 ... 4.84858930e-01 1.81771553e+00 5.89857459e-01] [ 1.07967257e+00 -1.20150197e+00 -9.99696851e-01 ... 3.84525776e-01 -4.10133511e-01 2.87856340e+00]] ... [[ 5.08274317e-01 3.42981637e-01 -1.83727406e-02 ... -5.07860363e-01 -4.66360241e-01 -1.33187151e+00] [ 4.09138530e-01 -5.55985451e-01 -5.94625622e-02 ... -1.41528141e+00 -8.09730053e-01 -1.34586751e-01] [ 2.96716899e-01 -2.56723166e-01 -5.06254852e-01 ... -2.05710983e+00 1.42607892e+00 6.25004172e-02] ... [-9.73514616e-01 -5.64082503e-01 -3.29136625e-02 ... 2.19217181e-01 -9.01869297e-01 3.39036763e-01] [ 9.09499824e-01 -1.49643457e+00 -4.85809565e-01 ... 1.49512076e+00 3.48359674e-01 9.72510576e-01] [ 3.07963997e-01 1.48878765e+00 1.02227759e+00 ... 1.04589176e+00 -2.16834933e-01 -2.05741659e-01]] [[ 1.80304140e-01 -1.08289957e+00 -1.13308871e+00 ... 4.99401569e-01 2.32070535e-01 2.78789818e-01] [ 1.48898602e+00 1.47271347e+00 -1.02710739e-01 ... 9.64886308e-01 9.40608740e-01 9.75910485e-01] [ 1.15947444e-02 6.67697117e-02 2.71810815e-02 ... -9.02033091e-01 -6.27168059e-01 1.00392425e+00] ... [-7.51190960e-01 1.61344051e+00 -2.58567989e-01 ... 2.77870506e-01 -5.83779871e-01 3.48304033e-01] [ 9.16850209e-01 8.02493766e-02 -2.66225791e+00 ... 1.00684285e+00 -1.07220197e+00 -9.43700671e-02] [-1.52753687e+00 -2.70681642e-02 6.23731852e-01 ... -4.14455950e-01 1.50701940e+00 -4.35464919e-01]] [[ 1.40737087e-01 -7.91416287e-01 -5.50027788e-01 ... 3.09507668e-01 -1.27104020e+00 -1.97198033e-01] [-7.71497428e-01 -1.01571834e+00 -8.84695828e-01 ... -4.34747279e-01 -1.91280198e+00 8.79953444e-01] [-2.21727043e-01 -9.76389825e-01 -6.20231628e-01 ... -8.67307007e-01 5.44787467e-01 2.44390416e+00] ... [-4.43561971e-01 -1.16098571e+00 6.50725782e-01 ... -6.71460152e-01 -1.04527712e+00 1.42376387e+00] [-5.32152951e-01 -1.00681257e+00 3.18182886e-01 ... 1.01755428e+00 8.86222064e-01 1.67216912e-01] [-2.91984856e-01 -7.42582977e-01 -1.36284244e+00 ... -1.11454630e+00 5.11993408e-01 -1.26836610e+00]]] [[[ 1.12687147e+00 6.87207282e-01 -8.96483839e-01 ... 6.64670229e-01 -1.15655589e+00 1.77938390e+00] [-1.02009094e+00 2.17237383e-01 2.85126299e-01 ... -1.74410045e+00 -1.47319287e-02 -5.98117888e-01] [-5.77866316e-01 6.06611431e-01 -5.66461086e-01 ... 3.47976089e-02 1.29901850e+00 1.57414520e+00] ... [ 1.86369729e+00 -4.94661003e-01 -9.49337304e-01 ... -2.24076331e-01 -3.72102737e-01 2.54930615e-01] [ 1.24895144e+00 -5.30104041e-01 1.59250990e-01 ... -1.98025334e+00 -4.21262741e-01 -6.72455728e-01] [-1.36848521e+00 -4.01370376e-01 -5.20054638e-01 ... 3.72644812e-01 -1.27750739e-01 -1.86242616e+00]] [[-1.44421101e+00 2.84001566e-02 6.52752757e-01 ... -5.16653717e-01 1.19032934e-01 5.46270907e-01] [ 5.07824123e-01 -1.03204347e-01 -3.10246646e-02 ... 4.02903378e-01 9.39060509e-01 1.17875981e+00] [ 2.26436305e+00 -4.12735909e-01 -2.89734751e-01 ... -1.38812542e+00 -7.36768663e-01 -2.03221178e+00] ... [ 7.60074735e-01 -7.65572935e-02 -7.72858441e-01 ... 1.12097216e+00 1.23193376e-01 -1.23585701e+00] [ 4.98964608e-01 1.67334116e+00 1.57526505e+00 ... 8.01941454e-01 1.80011141e+00 1.94640234e-02] [ 4.00777578e-01 1.28700900e+00 8.74737680e-01 ... -2.73559630e-01 1.00312531e+00 -7.56821573e-01]] [[ 1.06249464e+00 -2.56804729e+00 1.46172118e+00 ... 3.60493839e-01 -6.65699422e-01 2.49082112e+00] [-1.22776985e+00 -1.05918944e+00 -2.34529674e-01 ... -1.56107080e+00 1.54577598e-01 9.51158702e-01] [-6.85365081e-01 4.10226136e-01 -2.42408410e-01 ... 1.90463766e-01 -1.61496222e+00 -1.02772284e+00] ... [ 8.42414856e-01 4.82615009e-02 1.06691480e+00 ... 4.66206551e-01 -8.84888619e-02 -7.80635238e-01] [ 5.73272169e-01 2.60854512e-01 -7.16434196e-02 ... -7.58442938e-01 -7.44930059e-02 -4.09713954e-01] [ 1.69447744e+00 -1.18254936e+00 -2.83055842e-01 ... 7.06928909e-01 9.23361599e-01 6.46296859e-01]] ... [[ 1.22723073e-01 1.41602254e+00 1.63859338e-01 ... 8.94809246e-01 -5.28081238e-01 9.53165442e-02] [-1.94519663e+00 8.72577012e-01 6.62501514e-01 ... -1.18073463e-01 4.84087259e-01 5.68535507e-01] [-1.80509269e+00 -4.07100171e-01 2.04032108e-01 ... 5.55934310e-01 4.15356219e-01 9.20915425e-01] ... [ 8.35988045e-01 -8.17403868e-02 -1.73282373e+00 ... 4.63337004e-01 2.50732094e-01 -1.05040181e+00] [ 2.02303320e-01 1.13922745e-01 7.01341545e-03 ... -8.23150855e-03 -1.01887941e-01 6.37206435e-01] [ 1.42962897e+00 -1.12309575e+00 2.30256230e-01 ... -4.63985145e-01 -1.07224917e+00 -7.86538422e-01]] [[ 1.42474079e+00 -1.29522514e+00 -5.80435276e-01 ... -5.55705249e-01 -8.11176598e-01 2.90274787e+00] [-1.00796652e+00 -2.27088615e-01 1.45538783e+00 ... 6.91090643e-01 -8.33386779e-01 -3.31315964e-01] [-3.62685651e-01 -2.01715302e+00 -5.95409453e-01 ... 5.39618611e-01 1.18537426e+00 -1.39023101e+00] ... [ 1.88802230e+00 -6.02978051e-01 -2.77284384e-01 ... -6.24871135e-01 3.58449131e-01 8.64444375e-01] [-3.00623924e-01 -2.36457542e-01 8.95967305e-01 ... 7.73108482e-01 7.36167312e-01 4.81978357e-01] [ 1.19826806e+00 -3.74185175e-01 8.00408483e-01 ... 3.82182360e-01 -7.70024359e-01 7.89210379e-01]] [[ 7.95765042e-01 -4.58370328e-01 3.09137392e+00 ... -4.29765970e-01 2.71922529e-01 -2.28248592e-02] [-6.63423657e-01 5.50597012e-01 2.95952439e-01 ... 4.71750826e-01 5.89204669e-01 -7.05319166e-01] [-1.82380867e+00 1.41217959e+00 -6.92372620e-01 ... 4.78561044e-01 7.29046822e-01 -1.79761246e-01] ... [ 1.22752404e+00 1.09254193e+00 1.99259484e+00 ... -1.18304086e+00 1.04327989e+00 -1.40448853e-01] [-3.20431501e-01 -1.30331039e+00 -3.68655980e-01 ... -6.13254845e-01 -4.30890411e-01 -3.23401630e-01] [-5.00681818e-01 9.66455519e-01 -4.93214279e-01 ... 3.57832789e-01 9.10970747e-01 -1.49505258e+00]]]] [[[[ 1.17513075e-01 1.41066754e+00 1.50196517e+00 ... -1.04021347e+00 2.60089684e+00 6.22735202e-01] [ 1.00731969e+00 1.05213709e-01 -8.86406660e-01 ... -3.69520366e-01 6.88985527e-01 -1.35138047e+00] [-9.78397056e-02 1.04482818e+00 -1.07924521e+00 ... -1.25029206e+00 2.46176982e+00 -1.24039993e-01] ... [ 1.32120466e+00 -2.73742080e-01 -9.92444575e-01 ... 1.40322793e+00 5.49881279e-01 2.39111885e-01] [-3.85482490e-01 4.92348880e-01 -3.21433127e-01 ... -5.73876679e-01 2.55558968e-01 1.09846568e+00] [ 1.18974471e+00 5.53468406e-01 -1.47609282e+00 ... -4.09395903e-01 -1.15336812e+00 1.00844026e+00]] [[-2.25038260e-01 -4.46291417e-01 -2.12982678e+00 ... -6.35189056e-01 2.29619622e+00 8.83603841e-02] [ 1.14486851e-01 -1.11528695e+00 -5.24279296e-01 ... -7.99023807e-01 -1.05434728e+00 6.23672605e-01] [ 1.82095662e-01 -8.30537200e-01 -4.26943064e-01 ... -5.79593778e-01 1.89694896e-01 1.02258682e-01] ... [ 5.67987084e-01 -9.37776625e-01 4.11716253e-01 ... 1.08077121e+00 7.57196248e-01 8.63877609e-02] [ 4.78582740e-01 3.13488275e-01 1.38479257e+00 ... 6.82213783e-01 -4.78179276e-01 7.94287741e-01] [-9.01551962e-01 -1.97796345e+00 -1.60217202e+00 ... 1.76670218e+00 1.11045921e+00 6.75106794e-03]] [[ 8.32149148e-01 -5.68368256e-01 -3.87821734e-01 ... 1.13197291e+00 8.70756507e-02 2.55048013e+00] [ 1.47689116e+00 -1.24916720e+00 -2.01269329e-01 ... -2.89058208e-01 1.21735597e+00 1.54032028e+00] [ 3.26176465e-01 3.02946307e-02 -6.83366895e-01 ... -6.00203216e-01 -7.94520199e-01 2.07710370e-01] ... [ 2.41851941e-01 9.24217224e-01 1.68596244e+00 ... -9.36906934e-01 1.32516921e+00 2.58205724e+00] [ 3.74816120e-01 -7.03275740e-01 1.62078008e-01 ... 3.99123639e-01 -7.63985813e-02 1.11705065e+00] [ 1.03026129e-01 1.51891100e+00 1.53592324e+00 ... -1.25584841e+00 3.11876088e-01 4.30108309e-01]] ... [[-9.93170500e-01 -2.48820893e-02 -1.27202225e+00 ... 6.24835610e-01 -2.11170435e+00 1.08467102e+00] [-7.58477226e-02 -4.09461617e-01 1.10246336e+00 ... 1.95721424e+00 -8.50570858e-01 6.59037232e-01] [-7.60602891e-01 -9.98802111e-02 -7.53375292e-01 ... -2.05503845e+00 8.46911013e-01 -1.03874981e+00] ... [-8.40729237e-01 -9.19376984e-02 -7.58174837e-01 ... -1.63340420e-01 3.40562224e-01 9.73033667e-01] [ 1.06661189e+00 -4.20114636e-01 -6.39916778e-01 ... 2.03833863e-01 4.98083122e-02 4.46770430e-01] [ 1.99536908e+00 3.34573358e-01 -2.60129452e+00 ... -1.76783442e+00 1.27760386e+00 4.95572761e-02]] [[ 7.29614675e-01 -1.10202229e+00 -9.58077967e-01 ... -1.60042629e-01 6.00233138e-01 -2.16107655e+00] [ 4.45281208e-01 -3.71262282e-01 -1.70891511e+00 ... 4.52272892e-01 1.09662771e-01 1.02344608e+00] [ 1.12466121e+00 6.10263467e-01 1.71947753e+00 ... 1.14329541e+00 -1.14869714e+00 1.57182202e-01] ... [ 8.94093096e-01 1.28185487e+00 -5.97079992e-01 ... 3.22117299e-01 1.44937015e+00 2.00700641e+00] [-9.19552922e-01 1.45758438e+00 -7.65495002e-01 ... -2.42853022e+00 -1.12986159e+00 1.15575147e+00] [ 3.49548785e-03 -5.69702566e-01 8.20893526e-01 ... -4.05102521e-01 -1.18956864e+00 1.75557919e-02]] [[-6.00081205e-01 4.83334780e-01 -9.75141048e-01 ... -8.71948361e-01 2.84769058e-01 -1.03863752e+00] [-6.26788735e-01 -3.56079906e-01 5.93633614e-02 ... -1.25901902e+00 -1.93054795e-01 -2.87840843e+00] [-3.78796011e-01 6.06886387e-01 5.19396365e-01 ... -9.11621988e-01 -6.53830469e-01 -9.68208387e-02] ... [-2.87433445e-01 -1.46313524e+00 -1.02897322e+00 ... 8.96584868e-01 -3.70899469e-01 3.94293994e-01] [-1.28132355e+00 3.90164614e-01 1.20004928e+00 ... 4.20815349e-01 -9.96913850e-01 -6.64374053e-01] [ 1.12288930e-01 8.56402755e-01 -1.19724429e+00 ... -7.45088279e-01 2.12542281e-01 5.63926883e-02]]] [[[-6.18098140e-01 1.92648864e+00 2.77331084e-01 ... 1.30519044e+00 -4.16733712e-01 1.78262234e+00] [-9.45398733e-02 -6.81571901e-01 -3.22312593e-01 ... 8.79966170e-02 2.63370931e-01 1.34023264e-01] [-4.66465801e-01 3.09154183e-01 -3.40233073e-02 ... 7.05025911e-01 6.34725034e-01 6.43724427e-02] ... [-1.46054721e+00 3.09386998e-01 -2.26618171e-01 ... 3.68063211e-01 1.32414377e+00 -1.47410023e+00] [ 9.86120641e-01 -1.14781129e+00 1.18252122e+00 ... 1.27278471e+00 1.35753071e+00 1.98097333e-01] [ 1.38413942e+00 3.77031386e-01 -1.52882338e+00 ... -1.07783772e-01 -9.46763337e-01 3.36497068e-01]] [[ 4.10324037e-02 -6.99351430e-02 6.25889972e-02 ... -9.54122961e-01 1.51003671e+00 5.53272307e-01] [-3.02345991e+00 -3.16540778e-01 1.02452230e+00 ... 1.85993358e-01 -4.44195829e-02 1.17955565e-01] [-8.92122030e-01 -1.03439355e+00 1.84590614e+00 ... 2.07666802e+00 1.76632154e+00 -3.71098936e-01] ... [-7.37190619e-02 1.00097585e+00 6.03366673e-01 ... 7.74663687e-01 1.47595656e+00 -6.00518167e-01] [-4.76107150e-02 1.44896495e+00 -1.98925674e+00 ... -3.64038080e-01 7.29895160e-02 -6.16699636e-01] [ 7.10107803e-01 1.26752484e+00 -4.08368349e-01 ... -9.23824310e-03 -8.57757181e-02 2.60131419e-01]] [[ 8.56939629e-02 7.37937093e-01 -5.48409581e-01 ... 3.30139667e-01 1.46318495e-01 -3.28426927e-01] [-3.33332443e+00 5.36322236e-01 -3.86962593e-01 ... 9.43829536e-01 5.72201349e-02 2.45557666e+00] [ 1.75064754e+00 -2.53720021e+00 2.60925502e-01 ... 1.87020040e+00 -1.70567393e+00 -8.71557176e-01] ... [-1.08668900e+00 -1.06265414e+00 -7.54784584e-01 ... 6.90255225e-01 1.64566076e+00 -4.58895922e-01] [-2.17234477e-01 -7.94611871e-01 1.69265878e+00 ... -1.06431234e+00 7.08827153e-02 -7.97081441e-02] [ 8.22594345e-01 6.79059625e-01 1.45795131e+00 ... 4.01783846e-02 -1.30869150e-01 -1.25099766e+00]] ... [[ 4.54608321e-01 -1.22539252e-01 -1.00771976e+00 ... -4.78863895e-01 -1.10755348e+00 1.46487975e+00] [-1.02572061e-01 -1.68168879e+00 -1.98816836e-01 ... 1.38592649e+00 -1.29234001e-01 4.52314526e-01] [-1.86479434e-01 7.72974789e-01 -1.05924296e+00 ... 3.98603678e-01 5.39525688e-01 -7.83611238e-01] ... [-6.33402467e-01 -7.41071224e-01 -2.69202977e-01 ... 9.39201355e-01 -1.48972964e+00 -8.13661397e-01] [ 6.97824419e-01 -7.95281231e-02 6.85292840e-01 ... -6.25703156e-01 -1.11718094e+00 -9.80685711e-01] [ 7.99119294e-01 2.42349601e+00 4.05085117e-01 ... -8.80624473e-01 2.14880490e+00 -1.59652495e+00]] [[ 6.13427162e-01 1.21301591e+00 8.51334184e-02 ... 3.30234081e-01 8.22012544e-01 5.67377917e-02] [-2.53555000e-01 7.33844221e-01 -3.28244805e-01 ... -9.06478405e-01 -1.16156101e+00 1.30105519e+00] [-7.29785144e-01 -1.39047432e+00 -6.51578367e-01 ... -2.41276979e+00 1.93774176e+00 -8.07417810e-01] ... [ 1.28288972e+00 1.75671198e-03 1.13557386e+00 ... -1.30733979e+00 -6.26805782e-01 -3.98424804e-01] [ 3.53129476e-01 6.57074690e-01 -7.03412950e-01 ... 3.77789944e-01 1.43867642e-01 9.89235282e-01] [-2.18062326e-01 5.64442158e-01 1.45758343e+00 ... 8.03581715e-01 1.34410977e+00 4.91906554e-01]] [[-1.04918408e+00 -1.39053594e-02 -1.66534793e+00 ... -1.13322866e+00 -4.76390868e-01 -1.90753639e+00] [ 7.22675562e-01 -1.93942451e+00 4.51384604e-01 ... 9.16559696e-01 -2.62209088e-01 -5.63886940e-01] [-7.49260485e-01 -6.04048193e-01 -6.93162799e-01 ... -1.57497287e+00 -7.45330751e-01 -6.39985681e-01] ... [ 1.12674549e-01 -6.74315989e-01 -5.75124621e-01 ... 2.14890301e-01 3.73586603e-02 6.31668746e-01] [ 2.11521432e-01 3.81509990e-01 8.32207441e-01 ... -9.03455496e-01 2.09640336e+00 5.47925234e-01] [ 3.50255907e-01 1.49829233e+00 -8.45133960e-01 ... 2.21566868e+00 1.16626942e+00 -3.13080996e-01]]] [[[-4.13546324e-01 -2.54282594e+00 -1.41294086e+00 ... 2.08894086e+00 -7.82856345e-01 -1.03013016e-01] [-7.57797062e-01 6.36388958e-01 -6.76573738e-02 ... -1.23360848e+00 -7.41540909e-01 -1.84675738e-01] [ 6.51394606e-01 4.66792852e-01 1.77086878e-03 ... -9.35647547e-01 4.00956571e-01 1.01458645e+00] ... [ 3.79905164e-01 9.11904931e-01 -8.49160627e-02 ... -1.09467566e+00 -1.86723133e-03 2.06745565e-01] [-6.47433519e-01 -6.85630560e-01 -4.23128009e-01 ... -1.04357076e+00 1.14591348e+00 4.86828089e-01] [ 1.18055606e+00 9.40320611e-01 1.30253625e+00 ... -3.77313733e-01 -4.77368563e-01 -1.54051757e+00]] [[-1.90672910e+00 -2.62161285e-01 -3.13339442e-01 ... 8.46991986e-02 1.33093560e+00 -1.09081246e-01] [-2.43116426e+00 -8.13959479e-01 3.11489925e-02 ... -9.21302438e-01 -4.02540356e-01 -1.38695073e+00] [-6.69947788e-02 6.01380467e-02 -8.81335855e-01 ... -1.38455808e-01 -2.06525826e+00 -1.35641015e+00] ... [ 7.42055595e-01 -5.51692069e-01 -7.25184858e-01 ... 1.37948024e+00 1.57242489e+00 -2.64007479e-01] [ 1.39461184e+00 3.06851536e-01 7.92565405e-01 ... 4.22048509e-01 -1.24327493e+00 -8.15688968e-01] [ 1.06972325e+00 -8.02787364e-01 -6.92687452e-01 ... -2.07663703e+00 -7.43084133e-01 -6.13325894e-01]] [[ 6.91504776e-01 -7.68914223e-01 -9.52764213e-01 ... -1.34577322e+00 -3.52249831e-01 -1.22725956e-01] [-8.96950722e-01 1.06030059e+00 1.06540418e+00 ... -1.42165601e+00 -1.61071098e+00 -8.68602633e-01] [ 1.96594059e-01 5.88845134e-01 -2.89915353e-01 ... -1.53479838e+00 7.46230721e-01 -2.49847341e-02] ... [-3.15081239e-01 1.89149350e-01 -1.27997212e-02 ... 7.82944858e-01 -1.09615493e+00 1.38582325e+00] [-2.46458560e-01 1.66208470e+00 -1.46930671e+00 ... -6.13581598e-01 2.89657980e-01 2.20382071e+00] [ 1.29450154e+00 -1.22161138e+00 9.61646438e-01 ... -1.65977871e+00 -5.12098491e-01 1.62759626e+00]] ... [[-6.01031065e-01 -1.20223165e+00 1.34900296e+00 ... 5.01058638e-01 1.47914457e+00 8.20310295e-01] [-7.58110046e-01 1.34292591e+00 1.35500944e+00 ... 1.29359353e+00 8.60278070e-01 1.76162004e+00] [ 2.52076052e-02 -3.53222936e-01 8.12771261e-01 ... 2.41364405e-01 -9.84772384e-01 -1.49409986e+00] ... [ 1.99349582e-01 8.65659118e-01 -6.34744525e-01 ... -1.50469291e+00 -1.59923255e+00 1.92561001e-02] [ 5.24476767e-01 5.66367209e-01 -3.82562757e-01 ... 5.15603926e-03 -4.37798589e-01 -1.28546667e+00] [-2.63090134e-01 -7.42271185e-01 -2.13315740e-01 ... 2.94090033e-01 1.27699709e+00 1.00771344e+00]] [[-9.49498117e-02 -1.41471159e+00 -4.89642531e-01 ... -1.97520331e-01 1.03893638e+00 6.76485777e-01] [-8.67616117e-01 1.52055395e+00 -1.15236640e+00 ... 5.69418311e-01 1.50009489e+00 -1.00229013e+00] [-1.54195750e+00 1.68078995e+00 1.11527133e+00 ... 1.38143373e+00 5.16012073e-01 -9.07561660e-01] ... [ 5.10723591e-01 1.83548009e+00 1.43571720e-01 ... 1.23243594e+00 7.95223296e-01 -9.50673372e-02] [ 4.66620564e-01 -1.47207046e+00 -1.96560299e+00 ... -2.00386018e-01 -7.02991486e-01 -1.31440550e-01] [-5.93585849e-01 -5.00024594e-02 -7.16972172e-01 ... -4.71768707e-01 1.26911685e-01 -9.18433666e-01]] [[ 5.49838305e-01 5.56703269e-01 1.25358629e+00 ... -2.72721124e+00 -1.40703249e+00 -1.00776410e+00] [-8.04916583e-03 1.49478245e+00 -6.80435777e-01 ... -7.64760315e-01 2.42625728e-01 5.85736692e-01] [ 6.63280308e-01 -7.66491950e-01 1.50870645e+00 ... -7.52989501e-02 -7.50029564e-01 -3.35083842e-01] ... [-1.57097027e-01 1.26830614e+00 2.10992241e+00 ... 1.08554967e-01 8.93642604e-01 -1.22127879e+00] [ 1.19912362e+00 -7.59575665e-01 -1.12613356e+00 ... 8.79197836e-01 -9.94647741e-01 -6.32942498e-01] [ 8.58300254e-02 1.18159652e+00 -4.28751737e-01 ... 5.64415038e-01 -1.24506593e+00 1.46160997e-03]]] [[[ 4.51644063e-01 -2.06046194e-01 -1.99209824e-01 ... -9.65207636e-01 -6.84579462e-02 1.09553826e+00] [-3.45078647e-01 8.75413358e-01 7.31275603e-02 ... 4.64643449e-01 3.17301363e-01 5.93413115e-01] [-7.43833557e-02 -9.17242691e-02 -8.58465791e-01 ... -6.24775767e-01 2.05573988e+00 -1.08409119e+00] ... [-1.55283391e-01 2.18067184e-01 3.64432000e-02 ... 1.62499338e-01 1.02844298e+00 1.03148632e-01] [ 1.17886707e-01 1.34296149e-01 5.27112603e-01 ... 2.28678912e-01 -6.42298236e-02 3.19277018e-01] [-9.33762789e-01 4.70445842e-01 1.10504484e+00 ... -4.19064611e-01 -8.68073292e-03 -6.48139775e-01]] [[ 4.27126676e-01 1.18502729e-01 -5.02516150e-01 ... 1.07927215e+00 -4.88920808e-01 6.01427436e-01] [ 1.23810351e+00 -2.05322957e+00 -4.96715307e-01 ... 2.20025516e+00 5.62214077e-01 -9.10238445e-01] [ 6.35774791e-01 -8.96197021e-01 1.78829503e+00 ... 1.31378889e-01 -2.43857503e+00 2.02713549e-01] ... [ 9.31084752e-01 -3.96762908e-01 -2.12132573e-01 ... -5.08969948e-02 5.19665658e-01 -9.12803650e-01] [-2.53944188e-01 -8.29356611e-01 8.16047788e-01 ... 9.15269852e-01 -1.12749469e+00 6.94909811e-01] [-6.93363488e-01 -1.98596865e-01 -8.97916257e-02 ... 8.10463801e-02 -2.39867255e-01 3.98736358e-01]] [[ 2.00552896e-01 -3.24222505e-01 -9.60773408e-01 ... 9.48436260e-02 1.95658013e-01 4.19544190e-01] [ 1.31147814e+00 3.98441285e-01 1.50285661e+00 ... -1.05824101e+00 -1.66828167e+00 -8.25273097e-01] [-6.25564218e-01 -6.19883120e-01 8.70557129e-01 ... 3.66697162e-01 3.85274708e-01 -1.34432578e+00] ... [ 8.61899197e-01 -3.57124269e-01 1.71516404e-01 ... 3.11722845e-01 4.17350322e-01 -7.22472012e-01] [-6.87554240e-01 -1.31584513e+00 -9.91791368e-01 ... 4.84321058e-01 -6.98134601e-01 4.21301544e-01] [ 1.58548489e-01 9.18396831e-01 1.63022578e+00 ... 2.16121578e+00 5.40397763e-01 -8.86150479e-01]] ... [[-1.26505649e+00 -1.70014441e-01 6.17876112e-01 ... -6.59930646e-01 6.41397417e-01 -1.45943552e-01] [ 1.78341544e+00 1.33787870e-01 -1.53307751e-01 ... 4.27753432e-03 -1.74074018e+00 -6.65333092e-01] [-9.98483241e-01 -1.06300950e-01 1.18198884e+00 ... -8.44469786e-01 7.78117001e-01 2.19337374e-01] ... [-1.23574890e-01 -2.23856640e+00 -5.09169638e-01 ... 9.36553419e-01 -4.71859515e-01 -2.83104807e-01] [-1.12061679e+00 1.47241578e-01 -2.07934690e+00 ... 5.33271194e-01 1.82768643e+00 4.85130399e-01] [ 8.94855261e-01 3.43724489e-01 2.91743785e-01 ... 6.98538125e-01 1.01342452e+00 3.48374844e-01]] [[-1.12829404e-02 -1.10478759e+00 -4.18310642e-01 ... 8.66415262e-01 -8.66983593e-01 1.55782795e+00] [-4.56809491e-01 -9.46371019e-01 2.30726480e-01 ... -1.74436137e-01 1.61912203e+00 -1.03514910e+00] [-1.87360227e-01 -4.05683160e-01 3.45271170e-01 ... 1.24956679e+00 -1.51413321e+00 -5.98087311e-01] ... [ 8.58230412e-01 4.62951250e-02 -1.36071694e+00 ... -1.20286189e-01 -1.67091918e+00 -1.95612520e-01] [ 7.36853182e-02 5.96591711e-01 3.24753582e-01 ... -1.12066925e+00 1.99305272e+00 -9.81955409e-01] [ 9.15177047e-01 9.18614447e-01 1.77684367e+00 ... -1.11952953e-01 2.04486504e-01 3.14968497e-01]] [[ 1.15329301e+00 1.39047718e-02 -5.36271513e-01 ... 3.69920284e-01 -6.28035545e-01 4.51018423e-01] [ 5.50114632e-01 -8.37155223e-01 1.76615655e+00 ... 1.73450634e-01 -2.10953712e+00 3.60658348e-01] [ 3.45573932e-01 3.17361683e-01 -8.10797513e-02 ... 4.26579654e-01 -6.85868084e-01 4.73123997e-01] ... [-4.47890870e-02 -1.31786466e+00 6.05567455e-01 ... -1.00417614e+00 -1.05684745e+00 -1.29874802e+00] [ 3.57530206e-01 8.08822989e-01 -3.12215686e-01 ... -1.01526886e-01 1.42533779e+00 7.56879330e-01] [-1.61835611e+00 9.98243392e-01 1.62270951e+00 ... 4.76117969e-01 -4.52819377e-01 -1.61004412e+00]]] [[[-4.72747445e-01 -2.42834553e-01 -2.59725243e-01 ... 4.34466004e-01 7.79825985e-01 6.30344331e-01] [-1.78222075e-01 9.81823087e-01 7.89261639e-01 ... -1.67857218e+00 -1.04918480e-01 1.33450043e+00] [ 2.19533968e+00 -1.17951071e+00 2.31251860e+00 ... -1.24389064e+00 -4.35284406e-01 -6.88719809e-01] ... [ 5.08108377e-01 -2.97099799e-01 1.15808272e+00 ... 1.76351619e+00 1.11195838e+00 6.48310184e-02] [-1.83368444e-01 -3.10639977e-01 4.27364439e-01 ... -1.29585397e+00 -1.96297675e-01 -4.19216663e-01] [ 7.14544773e-01 1.87915578e-01 8.19405556e-01 ... 9.30656552e-01 7.65352726e-01 -4.75182831e-01]] [[-1.05864155e+00 1.97436607e+00 1.04569507e+00 ... 6.85806036e-01 3.82526338e-01 2.26472646e-01] [-3.09979737e-01 1.41556025e+00 2.17869425e+00 ... -2.08970308e-01 -1.00506604e+00 -3.70404571e-01] [ 3.59258384e-01 3.80432814e-01 -8.83164048e-01 ... -1.55210233e+00 -9.59164277e-02 1.95402312e+00] ... [-1.35840774e+00 -5.27788460e-01 -9.24743712e-01 ... 4.97386992e-01 -4.76472117e-02 8.51025701e-01] [ 9.02760684e-01 1.21587574e+00 1.19073439e+00 ... -6.69119895e-01 -2.65963972e-01 -1.29087436e+00] [-3.10003877e-01 -1.40367448e+00 2.46176254e-02 ... -1.09646189e+00 5.58407009e-02 -1.99145317e-01]] [[ 2.19400263e+00 8.14180315e-01 -5.55804312e-01 ... 7.14214802e-01 -1.53792048e+00 5.26525497e-01] [-1.92920379e-02 3.98903191e-02 1.15400410e+00 ... 6.13631546e-01 -6.82665825e-01 7.00211883e-01] [-1.38305351e-01 -1.82912254e+00 -1.09416425e+00 ... 1.54031098e+00 -5.81897855e-01 4.18668866e-01] ... [-7.82529891e-01 9.50309485e-02 3.63139868e-01 ... 5.16375363e-01 -6.31581068e-01 1.85415208e-01] [-9.51010287e-02 8.52377951e-01 -2.51724267e+00 ... -1.14954925e+00 1.12957013e+00 -6.58558369e-01] [ 4.88582015e-01 3.04366291e-01 -6.03822768e-01 ... 1.13193429e+00 -9.24279034e-01 -4.70319390e-01]] ... [[-1.37106287e+00 -3.51067632e-01 -5.40821493e-01 ... 1.91084787e-01 4.05371755e-01 5.52941859e-01] [-1.69960392e+00 4.24365848e-01 -7.47338161e-02 ... 1.53081429e+00 9.94424224e-01 -2.34936193e-01] [ 1.06481230e+00 3.47132057e-01 4.10484910e-01 ... 2.19436124e-01 2.11042330e-01 -6.11493528e-01] ... [-1.01840448e+00 -2.12693065e-01 -1.21539831e+00 ... 6.41991615e-01 -1.69138062e+00 1.71149850e+00] [-5.28220356e-01 1.17695701e+00 -2.15744153e-02 ... 1.75423336e+00 -1.67962492e+00 5.79135180e-01] [ 1.64666283e+00 1.08961987e+00 1.27530742e+00 ... 1.45507157e-01 -2.87600994e+00 6.30445361e-01]] [[-3.17566782e-01 -6.31241024e-01 -8.64478767e-01 ... -6.67101070e-02 -6.66168272e-01 7.32571721e-01] [-1.22042798e-01 2.72688210e-01 9.71479774e-01 ... 2.44430274e-01 -1.74547598e-01 8.60574007e-01] [-1.39194214e+00 -6.26862645e-01 -2.03957319e-01 ... 1.19311488e+00 -7.38707110e-02 1.34834242e+00] ... [-5.86729228e-01 5.64508080e-01 -2.78917611e-01 ... -9.37336206e-01 -3.50614458e-01 -1.16426671e+00] [-1.24733436e+00 -1.20706871e-01 -6.86604023e-01 ... -1.19210911e+00 4.68208641e-01 -2.44876906e-01] [-6.44539371e-02 7.99447298e-01 8.28865170e-01 ... -8.94231617e-01 -9.04402256e-01 -5.71915805e-01]] [[ 2.00981289e-01 -1.98578990e+00 -6.85945392e-01 ... -4.63837981e-01 8.91190350e-01 9.58405674e-01] [-1.16161346e+00 7.99304008e-01 -6.60549581e-01 ... 1.44421831e-01 -4.86995101e-01 -8.18065703e-01] [ 4.87782359e-01 -7.86097348e-01 -1.13864422e+00 ... -6.90437913e-01 1.45343983e+00 1.12243176e+00] ... [-5.69935441e-01 8.24366629e-01 -8.06626141e-01 ... -7.66012073e-01 -1.94050685e-01 1.60101250e-01] [-7.99668860e-03 -8.84472072e-01 4.00195241e-01 ... -4.82206838e-03 -9.54383388e-02 1.30776870e+00] [-1.28480300e-01 1.32752252e+00 3.47241789e-01 ... 3.32704097e-01 -2.05303168e+00 -4.65236425e-01]]] [[[-2.05523044e-01 -6.05379343e-02 -1.94319808e+00 ... -1.54094204e-01 2.21575332e+00 -3.25717837e-01] [-1.04701519e+00 -1.96586049e+00 5.21553934e-01 ... 9.94900405e-01 -6.26597762e-01 3.68015915e-01] [-4.94525194e-01 -1.32859066e-01 1.01733947e+00 ... 5.94672859e-01 -1.32446015e+00 2.22860456e-01] ... [ 3.78037781e-01 6.02127314e-01 -1.08783543e+00 ... 2.08160377e+00 2.07432494e-01 -5.06793201e-01] [ 1.02519125e-01 -1.12091088e+00 1.27862120e+00 ... -3.01714092e-02 6.03924215e-01 3.82217139e-01] [ 3.91173422e-01 -1.04945576e+00 -1.31792441e-01 ... -5.99692464e-01 -6.74735755e-02 1.68799981e-01]] [[-1.17435110e+00 6.49834692e-01 1.11308944e+00 ... -6.09254241e-01 -6.40000820e-01 5.55928588e-01] [-1.11141908e+00 -4.28656340e-02 -6.96589708e-01 ... -6.60743535e-01 -4.98955846e-01 -6.70488715e-01] [ 4.49526072e-01 -5.22864103e-01 2.76463747e+00 ... -1.08279787e-01 -3.36640961e-02 7.00430274e-01] ... [-1.74067950e+00 1.00435400e+00 1.07123345e-01 ... -5.58747470e-01 -5.24594426e-01 5.71504951e-01] [-9.85707819e-01 -6.94100142e-01 1.94731081e+00 ... -5.00969350e-01 -5.47650874e-01 1.11567251e-01] [-3.75594161e-02 -5.69175959e-01 1.32100451e+00 ... 1.46317765e-01 -5.49521372e-02 -1.31615174e+00]] [[ 4.41321731e-01 -5.43535590e-01 -2.99592781e+00 ... -1.67344427e+00 1.15616965e+00 1.31682122e+00] [-7.00695276e-01 1.29977778e-01 6.74552619e-01 ... 3.70238900e-01 1.84264219e+00 1.22972763e+00] [ 9.29995120e-01 -9.31897223e-01 5.40864348e-01 ... 9.57084894e-01 -1.21268094e+00 8.92431214e-02] ... [ 5.89360446e-02 8.95511389e-01 6.56032443e-01 ... -2.01863861e+00 -3.65339458e-01 -2.90435553e-03] [ 6.43990278e-01 -7.30911195e-01 -6.70782089e-01 ... -1.85374069e+00 4.48890358e-01 1.52484882e+00] [ 4.91712540e-01 -5.25885701e-01 -8.47885832e-02 ... 1.15820205e+00 2.25356206e-01 2.96250135e-01]] ... [[ 6.20155573e-01 -1.06870222e+00 -9.20180678e-01 ... -6.44859254e-01 2.20307440e-01 9.39421594e-01] [ 3.85047019e-01 2.03839585e-01 -2.68761247e-01 ... 8.33484769e-01 8.63603711e-01 -8.67757857e-01] [ 3.14611226e-01 -2.72453815e-01 5.53075731e-01 ... -6.60874009e-01 -3.10845703e-01 3.98145407e-01] ... [ 1.64840424e+00 1.13494885e+00 -1.80998743e+00 ... -7.53436983e-01 -1.54831827e+00 -8.31163287e-01] [-2.35618621e-01 -3.78483474e-01 2.82071829e-01 ... -2.93915927e-01 2.74499321e+00 -1.59860879e-01] [-2.18702510e-01 -1.24698234e+00 -1.37882441e-01 ... -3.57490540e-01 -1.57885686e-01 -1.47809014e-02]] [[-8.57590616e-01 -2.54530859e+00 8.27615261e-01 ... -1.57808587e-01 -9.69617590e-02 -1.90164638e+00] [-1.34069216e+00 2.12723091e-01 9.15735960e-01 ... -8.32216144e-01 7.43306518e-01 -3.58905792e-01] [-1.24777997e+00 7.30070025e-02 -1.78962365e-01 ... 3.12430739e-01 -7.27020323e-01 1.05632305e+00] ... [-3.70776296e-01 -1.10724556e+00 -5.14269769e-01 ... 5.50239563e-01 4.81305987e-01 -4.24647152e-01] [-8.69241357e-01 3.39182168e-01 2.52205223e-01 ... 9.11967084e-02 4.78798181e-01 6.69413865e-01] [-4.70113933e-01 -7.51044080e-02 9.60241795e-01 ... 1.10756254e+00 4.86239254e-01 1.73178979e-03]] [[-4.82642829e-01 -1.21946287e+00 -1.90140414e+00 ... 5.92204213e-01 -1.30503380e+00 -8.05114448e-01] [-1.42937410e+00 1.58393104e-02 -5.95378339e-01 ... 1.39568508e+00 6.10006273e-01 -2.02004385e+00] [-2.51883507e-01 2.10029292e+00 -1.15189421e+00 ... -8.60461235e-01 6.26252174e-01 -1.40541151e-01] ... [ 4.90005493e-01 1.69241714e+00 2.81759143e-01 ... 3.20829779e-01 1.13744235e+00 1.97765672e+00] [-1.41396904e+00 5.66883624e-01 -1.15075517e+00 ... 1.13569403e+00 -7.54112899e-01 -3.57292444e-01] [-8.20848882e-01 -6.08476996e-01 -9.50867087e-02 ... 7.52934992e-01 -1.16182494e+00 5.07882714e-01]]]] [[[[-7.55911291e-01 9.88483191e-01 6.08776398e-02 ... 1.68791890e+00 1.74790347e+00 -7.93423712e-01] [-1.26039362e+00 3.60073566e-01 7.80821621e-01 ... 4.50389057e-01 -1.37145802e-01 8.82560372e-01] [-6.53205693e-01 1.14390671e+00 8.17849219e-01 ... 6.17349267e-01 -3.93188037e-02 1.00995970e+00] ... [-4.04616117e-01 -3.72085363e-01 7.50895798e-01 ... -2.21442127e+00 9.25616562e-01 -1.97686721e-02] [-2.36749873e-01 -8.68628263e-01 -7.67396033e-01 ... -6.88347340e-01 8.58897418e-02 -1.04283047e+00] [ 5.18060863e-01 -8.55213404e-01 2.66985208e-01 ... 7.47185707e-01 6.42318577e-02 2.50660837e-01]] [[-1.11642516e+00 -1.99650362e-01 -3.24065983e-01 ... 7.47600913e-01 1.09506941e+00 -5.92280865e-01] [-6.00767553e-01 1.24477100e+00 3.34425300e-01 ... 2.46685314e+00 -6.52201355e-01 -1.25091636e+00] [-1.51857519e+00 -2.68951803e-01 1.44693577e+00 ... -1.38998353e+00 8.10509324e-01 -1.16277218e+00] ... [ 9.41981912e-01 6.14317536e-01 3.46473336e-01 ... 1.18676949e+00 -7.00666150e-03 -1.02333021e+00] [ 1.53754282e+00 -2.17842132e-01 -1.49660993e+00 ... -4.70094502e-01 -4.92688775e-01 -1.39690578e+00] [ 1.28708732e+00 -1.76294208e-01 -5.24202883e-01 ... -1.46861708e+00 -8.10894787e-01 5.94180703e-01]] [[-4.52385783e-01 6.24414861e-01 2.96430767e-01 ... 1.30516877e-02 1.04715502e+00 9.82551873e-01] [ 6.46073163e-01 -8.77819657e-01 -1.37930238e+00 ... 1.84801280e-01 -3.35634202e-01 1.37831256e-01] [ 2.91522115e-01 -1.38515770e-01 9.36912239e-01 ... 1.89021200e-01 3.11863512e-01 -4.56063092e-01] ... [-1.16456950e+00 9.77333486e-01 -5.83249867e-01 ... 1.48021376e+00 -1.24461460e+00 4.70423639e-01] [-7.59913087e-01 3.24084580e-01 -1.17122197e+00 ... 2.58545130e-01 -1.72474861e-01 -1.09926558e+00] [ 7.13913500e-01 2.83545911e-01 1.96428502e+00 ... -1.39669597e+00 1.65349111e-01 -7.02526793e-02]] ... [[ 1.85635459e+00 -3.47782969e-01 -7.33757392e-02 ... -1.00995278e+00 6.09290004e-01 -1.18478227e+00] [-1.53537524e+00 -4.60035056e-01 -1.84139937e-01 ... 4.88847405e-01 1.58083737e+00 -1.78301618e-01] [ 1.14627743e+00 -1.22003961e+00 5.61268687e-01 ... -1.45127058e+00 -6.82374597e-01 1.57384336e-01] ... [ 7.40323305e-01 -1.59668446e+00 1.31119025e+00 ... -4.95254397e-01 1.97247970e+00 9.63560402e-01] [-1.03688884e+00 4.27406371e-01 4.05896902e-01 ... 1.00882912e+00 7.98481345e-01 -4.68153134e-02] [ 2.76794255e-01 1.44127715e+00 -1.13998616e+00 ... -4.47721511e-01 -3.98024619e-01 -6.03531301e-01]] [[ 3.45059335e-01 1.13194025e+00 -1.28518915e+00 ... -6.15591526e-01 7.39491642e-01 5.06471097e-01] [-1.02168119e+00 1.47129580e-01 -5.04618645e-01 ... 3.40709776e-01 6.57936335e-01 -2.04963703e-02] [-2.76876479e-01 -1.34189332e+00 -1.88647044e+00 ... 1.04338539e+00 -1.20571917e-02 -7.69607723e-01] ... [ 8.47717106e-01 -1.10964572e+00 -5.44977307e-01 ... -1.01763773e+00 -4.99128491e-01 5.26789010e-01] [-9.46656466e-01 1.52276428e-02 -6.50387764e-01 ... 6.70834780e-01 1.17890191e+00 3.02844375e-01] [ 6.34760737e-01 9.26276028e-01 -1.34087527e+00 ... -1.07112695e-02 -1.03522491e+00 9.41143453e-01]] [[-1.42657447e+00 -3.11126947e-01 -1.53099263e+00 ... -1.52292103e-01 -2.18858433e+00 -1.23599148e+00] [-5.38670242e-01 4.60832566e-01 -2.24822521e+00 ... -1.75996792e+00 2.97525048e-01 -3.53569806e-01] [-2.07798295e-02 8.77943635e-01 -1.32758498e+00 ... -2.04471922e+00 -8.25156510e-01 -1.25918043e+00] ... [-1.85614690e-01 -1.16344106e+00 -7.62344241e-01 ... -9.79491055e-01 5.69248259e-01 5.60267389e-01] [ 1.52182853e+00 1.17295015e+00 2.00005960e+00 ... -4.48693410e-02 -1.98370910e+00 2.83270192e+00] [ 1.10939181e+00 -7.30196357e-01 -1.15957367e+00 ... -9.46791112e-01 -3.86187620e-02 1.67307985e+00]]] [[[ 1.41862833e+00 -1.56128097e+00 9.73704576e-01 ... 3.93631071e-01 -5.54739773e-01 6.18943095e-01] [-4.25748497e-01 1.13250995e+00 -1.17971623e+00 ... -9.37492669e-01 1.52396691e+00 -9.07339573e-01] [ 3.52407247e-01 1.24624300e+00 -1.60968915e-01 ... -6.61108792e-01 9.86457944e-01 1.15415728e+00] ... [ 1.11962903e+00 1.10077322e+00 1.10614645e+00 ... 8.74489367e-01 7.99384177e-01 -8.48828018e-01] [ 1.33255646e-01 4.62867916e-02 4.55411673e-01 ... 1.75038457e+00 8.87881398e-01 -1.05954957e+00] [ 4.03403312e-01 6.63717747e-01 -6.91618085e-01 ... 1.07664078e-01 1.82394803e+00 1.76351345e+00]] [[ 2.02414608e+00 2.86287355e+00 -8.26494277e-01 ... -2.38237277e-01 1.31414247e+00 1.22162426e+00] [-4.75775654e-04 6.04642391e-01 -2.11445257e-01 ... 3.18438351e-01 1.44423872e-01 6.97629988e-01] [-5.39556205e-01 -5.15681565e-01 5.31101599e-02 ... -4.94958073e-01 1.16482723e+00 -1.43254507e+00] ... [-1.00490701e+00 -1.17949665e+00 -1.37860978e+00 ... -2.85254717e-01 -2.16294193e+00 -1.87411833e+00] [ 7.28324950e-01 -1.65119901e-01 1.85470566e-01 ... 3.43652338e-01 -2.06006765e+00 7.96536267e-01] [ 1.27480233e+00 -7.16605723e-01 2.93486975e-02 ... -1.21165502e+00 2.75700629e-01 -1.71591020e+00]] [[-1.95711911e+00 -1.65553033e+00 7.03549564e-01 ... -1.46755588e+00 -1.46189404e+00 -8.38968251e-03] [ 7.97518075e-01 -4.91392724e-02 -2.80323982e-01 ... 1.04971766e+00 1.00781754e-01 -5.35054281e-02] [ 8.03624332e-01 -1.04296172e+00 -2.65972894e-02 ... 1.47123516e-01 -1.20874846e+00 4.96737182e-01] ... [-6.37946606e-01 9.72930312e-01 8.40828121e-01 ... -5.58809638e-01 -2.14549452e-01 -4.24430460e-01] [-3.32933486e-01 -8.13112080e-01 3.67385477e-01 ... -5.27837396e-01 1.40629023e-01 -2.10979342e+00] [-1.07241675e-01 1.34310627e+00 -1.05232334e+00 ... 1.12422931e+00 -1.07109666e+00 -2.24400237e-01]] ... [[-5.16234040e-01 7.20734477e-01 2.96163499e-01 ... -2.49291752e-02 -9.14452314e-01 7.50149310e-01] [-1.25320601e+00 1.52246439e+00 -1.52759135e+00 ... -1.04363441e+00 -6.54054344e-01 1.57739997e+00] [-6.98513389e-01 1.79413247e+00 7.22545743e-01 ... 3.94938260e-01 1.10346234e+00 -8.65122199e-01] ... [-1.01328921e+00 2.46187139e+00 -3.06503683e-01 ... -3.40098858e-01 -8.99072051e-01 1.44630218e+00] [-1.75936747e+00 -5.09715557e-01 -7.81184807e-02 ... -3.34837474e-02 1.43482339e+00 1.74528611e+00] [-2.20571250e-01 -6.99407518e-01 1.03575027e+00 ... -2.19187334e-01 9.08854723e-01 4.49310303e-01]] [[ 1.28547692e+00 -1.63949800e+00 1.07263565e+00 ... -1.41596556e+00 6.71987176e-01 2.86902755e-01] [-6.62169695e-01 -1.11282915e-01 4.05435026e-01 ... 2.13747099e-01 -1.16987276e+00 -5.50012052e-01] [-1.01779246e+00 1.46721220e+00 -4.69565630e-01 ... -3.88382107e-01 -1.50927103e+00 -1.31911862e+00] ... [ 5.92177629e-01 -2.04413012e-01 1.70473177e-02 ... -1.10763693e+00 -1.01411533e+00 2.08308125e+00] [ 8.56681228e-01 2.36295640e-01 -1.02853167e+00 ... -5.84241450e-01 9.78925228e-01 1.47382176e+00] [-4.78600711e-02 -5.30719697e-01 -4.32098269e-01 ... 9.06653047e-01 -1.79102099e+00 9.61764276e-01]] [[ 2.93275621e-02 -5.76252222e-01 -1.74425435e+00 ... -1.12277400e+00 -1.13781059e+00 -2.05743670e-01] [-9.54099417e-01 4.82216626e-01 8.65035713e-01 ... -7.61180222e-01 1.78019285e-01 -3.45129669e-02] [ 6.54819429e-01 6.44400120e-01 5.54017544e-01 ... -3.65523815e-01 -4.84794647e-01 2.34495258e+00] ... [-5.68569481e-01 -1.02413905e+00 -2.08396006e+00 ... -1.91572070e-01 1.01132190e+00 -1.12903163e-01] [ 8.10231447e-01 6.16415858e-01 1.87269494e-01 ... 2.26850420e-01 2.08545303e+00 3.88641894e-01] [ 1.13967800e+00 1.86713204e-01 -1.05982542e+00 ... -1.71064854e+00 -8.83034706e-01 1.68124497e+00]]] [[[-2.12058330e+00 1.11200547e+00 -3.24482232e-01 ... -7.09206700e-01 -2.04629689e-01 2.58350134e-01] [ 8.91509891e-01 -6.24599569e-02 -6.52327538e-01 ... 1.07473768e-01 1.65716326e+00 -1.24234116e+00] [ 2.28490686e+00 3.78829427e-02 8.22353959e-01 ... -2.31819057e+00 1.03513062e-01 7.14456886e-02] ... [ 2.41586328e-01 -6.01934671e-01 -6.30070984e-01 ... -2.30133489e-01 2.38300085e+00 -8.87164891e-01] [-6.30494833e-01 -7.93637812e-01 -8.32629979e-01 ... 3.39130104e-01 4.56057250e-01 1.27579117e+00] [ 9.47307765e-01 -1.05189848e+00 5.83057523e-01 ... -6.58082724e-01 -8.92179191e-01 1.91672885e+00]] [[ 7.22020686e-01 1.31628466e+00 -1.44342089e+00 ... 1.75859463e+00 -2.53569198e+00 1.61604309e+00] [ 1.22244346e+00 1.32772958e+00 -5.95481634e-01 ... 1.21196710e-01 1.08641648e+00 2.50001729e-01] [ 6.95018232e-01 -1.50094748e+00 -5.18660963e-01 ... 3.79213244e-01 1.02772689e+00 2.66260171e+00] ... [ 5.19665241e-01 6.37040675e-01 1.59476495e+00 ... -8.60606313e-01 -4.15833443e-01 -4.15944785e-01] [-8.89247060e-01 -9.79446471e-01 1.45399439e+00 ... -3.92372087e-02 1.17638312e-01 1.31618470e-01] [-8.83297443e-01 3.09036789e-03 -1.03742158e+00 ... -1.52599180e+00 7.44908214e-01 1.01282191e+00]] [[ 2.51191445e-02 -2.52871633e+00 6.84832573e-01 ... 5.86811900e-01 -4.68941927e-01 5.38745046e-01] [ 1.13103531e-01 7.29031801e-01 -1.40608096e+00 ... -8.83473083e-02 1.68345749e-01 4.02279407e-01] [-9.79708970e-01 -1.40858257e+00 -1.93231523e+00 ... -5.19606292e-01 -8.59270513e-01 1.79727748e-01] ... [ 9.40793872e-01 -5.00289142e-01 -1.70172989e+00 ... -1.30890608e+00 1.77596962e+00 -3.85532320e-01] [-1.11792672e+00 2.44483614e+00 1.78658068e+00 ... -1.34356603e-01 -3.67979258e-01 -1.57093894e+00] [-1.30983961e+00 -2.48302603e+00 -9.60065603e-01 ... 9.70430017e-01 1.56429410e+00 -9.90480781e-01]] ... [[ 5.15790701e-01 1.27528203e+00 1.60432398e+00 ... -1.18737984e+00 1.31376278e+00 7.21591592e-01] [-8.33124816e-01 -8.89859676e-01 -1.42059469e+00 ... -1.51636708e+00 -3.99684876e-01 -9.70898807e-01] [-5.28923035e-01 -1.61522937e+00 6.67679980e-02 ... 4.81095701e-01 -8.09993684e-01 -1.19984400e+00] ... [-1.46651983e+00 -2.47581944e-01 7.39924656e-03 ... -3.47122073e-01 -7.91720331e-01 -5.47066867e-01] [ 1.13271904e+00 1.27138221e+00 -2.11043000e+00 ... -5.74386835e-01 -1.28934368e-01 -2.29184484e+00] [ 4.78259146e-01 1.04664135e+00 -1.74437076e-01 ... -1.24512305e-02 -6.61331594e-01 6.58887982e-01]] [[ 7.26028264e-01 -1.37806982e-02 -1.69401670e+00 ... -1.23098500e-01 2.23887399e-01 -5.13321042e-01] [ 6.78991795e-01 -1.14378870e-01 1.01175189e-01 ... -1.69301295e+00 -9.18659449e-01 1.15250044e-01] [ 1.55964804e+00 -4.08732921e-01 9.35982823e-01 ... -1.92584738e-01 -8.80604446e-01 1.17999804e+00] ... [-3.96876574e-01 3.90129954e-01 6.91420436e-01 ... -3.84702049e-02 -3.89746279e-01 -3.15091342e-01] [-2.01005727e-01 6.19720995e-01 1.02675724e+00 ... 1.89489841e+00 -6.92540765e-01 -1.14192434e-01] [ 6.82717085e-01 9.53020394e-01 -9.86561596e-01 ... 1.75670934e+00 -4.71571982e-01 -3.74831736e-01]] [[ 2.24412233e-01 5.55979460e-02 -1.73767376e+00 ... 4.68215197e-02 -5.57284772e-01 8.05203974e-01] [ 1.32000124e+00 -1.12399387e+00 3.03221978e-02 ... -4.38776910e-01 8.80786061e-01 -9.06563520e-01] [-3.08295250e-01 1.01985838e-02 -5.99281132e-01 ... -5.41472256e-01 -8.95632982e-01 -4.85681415e-01] ... [-3.37439156e+00 1.21316664e-01 6.20792508e-01 ... -3.58650535e-01 1.19792199e+00 -1.86436221e-01] [-5.75477600e-01 2.18929505e+00 2.42704347e-01 ... 1.31938958e+00 2.03839183e-01 7.67257661e-02] [ 8.65337372e-01 -4.64892894e-01 7.12492943e-01 ... -3.70217741e-01 3.13634992e-01 2.44475976e-01]]] [[[ 2.02204272e-01 -6.94048941e-01 -1.00844450e-01 ... 1.07493544e+00 8.74486417e-02 -1.38207245e+00] [ 1.20600283e+00 1.81836128e+00 7.16094851e-01 ... -3.47753465e-01 -2.32928085e+00 4.02989805e-01] [ 9.63753819e-01 3.06121141e-01 -3.31714869e-01 ... 1.32202840e+00 4.52585459e-01 -2.21816134e+00] ... [ 2.46107668e-01 -6.94637537e-01 2.33139467e+00 ... -3.19735527e-01 -8.88445139e-01 -5.97679079e-01] [ 1.11689913e+00 5.11577904e-01 4.82637972e-01 ... 2.76792002e+00 -9.17494535e-01 7.80478477e-01] [-1.70595276e+00 7.75070488e-01 -5.01671493e-01 ... -2.66769767e-01 5.56780040e-01 6.73949838e-01]] [[ 5.71400106e-01 7.89561033e-01 1.43530786e-01 ... 1.26909450e-01 -1.13420928e+00 -6.79945529e-01] [-1.74183702e+00 -7.60641277e-01 1.24232364e+00 ... -1.30740440e+00 -1.65202403e+00 1.12959242e+00] [ 9.88415897e-01 2.67616391e-01 -8.56390417e-01 ... -2.30346751e+00 -8.57199967e-01 8.01820695e-01] ... [ 6.16068125e-01 1.75495195e+00 -8.98051083e-01 ... -5.94619334e-01 3.90740812e-01 1.38777783e-02] [-1.92385840e+00 -1.04223263e+00 1.43188909e-01 ... 2.36231828e+00 2.12629318e-01 1.36758849e-01] [-3.43434811e-01 -1.29369861e-02 -7.61275530e-01 ... 1.06644762e+00 6.52486563e-01 -7.82144070e-01]] [[-5.50201833e-01 -1.97555602e+00 -2.79077220e+00 ... -9.36381519e-01 -1.15637183e+00 -2.80779898e-01] [-2.28728199e+00 -2.13019013e+00 1.26757324e-01 ... -1.09394002e+00 -3.57901126e-01 -2.20883656e+00] [ 1.41569936e+00 1.03521168e+00 -1.76630080e-01 ... -1.00722206e+00 -2.53048569e-01 -6.42531493e-04] ... [ 5.33807203e-02 -7.82061100e-01 -2.32316345e-01 ... 5.95871627e-01 1.52933407e+00 -1.38887525e-01] [ 6.22704923e-01 -1.04534638e+00 -1.39912629e+00 ... -7.83050418e-01 -2.79390484e-01 -2.49765456e-01] [-1.28618866e-01 1.03482580e+00 1.86201081e-01 ... -1.26648271e+00 -9.52254713e-01 8.71725082e-02]] ... [[ 2.48344445e+00 -2.89715409e-01 4.07432050e-01 ... 8.83334577e-01 -8.13156247e-01 1.29351795e+00] [ 3.29897404e-01 -5.94857216e-01 2.46997058e-01 ... 7.17107713e-01 3.88692506e-02 3.34253430e-01] [-1.37862802e-01 5.93918324e-01 2.09611636e-02 ... 4.74969298e-01 -6.57632053e-01 2.14315325e-01] ... [ 3.77125084e-01 8.99145067e-01 -2.15483665e+00 ... -2.79954553e-01 2.29248405e-01 4.98676896e-01] [-5.08976519e-01 -4.74108368e-01 1.22815758e-01 ... 7.28244960e-01 1.03588009e+00 -1.46331859e+00] [-3.65462273e-01 8.67505252e-01 1.69160008e+00 ... -1.03000498e+00 -3.95149052e-01 -9.41701472e-01]] [[-3.43982846e-01 -1.36418134e-01 2.54839689e-01 ... -3.77560109e-01 2.13941932e-01 1.02296472e+00] [-4.12094921e-01 -3.91144514e-01 -4.95626271e-01 ... -1.11232722e+00 -2.45254874e+00 6.30059719e-01] [ 5.55650949e-01 -8.92876923e-01 -1.57449031e+00 ... 1.29882753e+00 1.96514320e+00 -2.32832599e+00] ... [ 9.30116773e-01 5.37917972e-01 -2.71141410e+00 ... 2.73335427e-01 -5.98592222e-01 -6.12799823e-01] [ 1.81736588e-01 -1.27753925e+00 -1.55157161e+00 ... -2.06528127e-01 1.82237637e+00 1.63735080e+00] [-1.27595031e+00 -2.61690021e-01 -1.15455127e+00 ... 1.20331697e-01 -2.15299702e+00 -6.88573480e-01]] [[ 9.16472733e-01 -4.70894635e-01 -1.13048875e+00 ... -1.90425277e-01 -3.35619658e-01 1.42498434e-01] [-1.45970893e+00 -1.36310852e+00 9.08245742e-02 ... -5.65723293e-02 -3.19939822e-01 -1.15341771e+00] [-2.53473330e+00 2.57905871e-01 2.06531554e-01 ... -4.17883635e-01 -3.32518309e-01 2.45948538e-01] ... [-7.74593726e-02 2.14714766e+00 -9.54681993e-01 ... -7.74374247e-01 2.80635953e-01 2.57517993e-01] [-1.88361323e+00 -7.57160842e-01 9.27320898e-01 ... 8.43563795e-01 7.08519876e-01 4.00670528e-01] [ 1.15967363e-01 -9.96490344e-02 -2.92121619e-01 ... 9.63857532e-01 3.83671075e-01 1.97871876e+00]]] [[[ 5.76610625e-01 8.52394164e-01 -9.99720275e-01 ... 8.25584769e-01 -9.26875532e-01 9.35801625e-01] [ 3.85124683e-01 1.77702558e+00 2.54101992e-01 ... 5.52028298e-01 -1.80887210e+00 4.23011303e-01] [ 1.12247741e+00 -3.98500077e-02 -5.09653568e-01 ... 1.09744586e-01 4.68806267e-01 6.79738045e-01] ... [ 2.12116623e+00 1.98604476e+00 2.23741198e+00 ... -1.10534713e-01 1.02515924e+00 9.90453839e-01] [-4.99744594e-01 1.74094427e+00 9.74775672e-01 ... -1.59097284e-01 1.30084008e-01 -1.69122204e-01] [-3.95878643e-01 -6.76410913e-01 -4.15815085e-01 ... -4.53311145e-01 1.31238472e+00 2.32552797e-01]] [[ 3.12351659e-02 5.60102344e-01 1.90823197e-01 ... 1.40742287e-01 8.75782490e-01 -3.35868597e-01] [-1.40081465e+00 2.32852742e-01 1.20564617e-01 ... -1.21708259e-01 -6.97714448e-01 9.37453687e-01] [ 1.97642434e+00 7.40772337e-02 -5.08075058e-01 ... -2.57283390e-01 6.43724263e-01 6.74950778e-01] ... [ 9.06743467e-01 -1.19269013e+00 -2.08262831e-01 ... -2.39279017e-01 1.11229253e+00 -1.08388925e+00] [ 2.66907364e-01 3.38489145e-01 6.04644537e-01 ... -5.10609269e-01 -2.83345968e-01 -4.92323041e-01] [-1.03828895e+00 4.40879852e-01 8.80859017e-01 ... -6.93076909e-01 9.17684853e-01 8.41642737e-01]] [[-5.59332192e-01 1.55252886e+00 -1.76371560e-01 ... -1.13342845e+00 1.60565650e+00 -2.34479591e-01] [-4.48774904e-01 -1.43232441e+00 -1.68096328e+00 ... -5.89735270e-01 5.61402559e-01 -1.35240948e+00] [-9.69612300e-01 -9.84861434e-01 7.60701776e-01 ... -4.17495430e-01 -4.07481015e-01 6.29396915e-01] ... [ 9.98319685e-01 6.89509690e-01 8.84133875e-01 ... -1.35669506e+00 3.27909255e+00 7.43566006e-02] [-1.16153479e+00 -4.40421402e-01 2.14721680e+00 ... 1.33480489e+00 -1.63695347e+00 1.94405615e+00] [-2.25173211e+00 2.12635361e-02 3.88041347e-01 ... 7.65676916e-01 1.02009833e+00 -1.21465969e+00]] ... [[ 2.41693377e-01 -3.42940837e-01 -1.41184175e+00 ... -2.13609442e-01 -1.45189512e+00 3.62537086e-01] [-1.71341226e-01 3.35005522e-01 -1.63877025e-01 ... 1.31804109e+00 -2.35629573e-01 -9.48668182e-01] [-1.12375982e-01 3.29663992e-01 8.63997459e-01 ... 5.37122935e-02 1.87178111e+00 -6.20544195e-01] ... [-6.52623773e-01 -1.07454860e+00 1.22474372e+00 ... -7.66297281e-01 -5.64060986e-01 -1.26272750e+00] [-1.26233891e-01 -1.14761561e-01 4.04801697e-01 ... -5.13547838e-01 8.64722490e-01 1.02692592e+00] [-2.68706620e-01 1.17310834e+00 -1.67121068e-01 ... 4.19958793e-02 5.41456580e-01 -1.79565787e+00]] [[ 1.11370611e+00 -1.40117943e+00 -1.19468987e+00 ... -2.57880181e-01 4.70092624e-01 -4.35959250e-01] [-9.02906358e-02 -5.69617748e-01 -4.30831313e-01 ... -1.10321462e+00 6.55395627e-01 9.88946676e-01] [ 1.76970780e+00 -6.97102249e-01 8.63086164e-01 ... 5.84925264e-02 -2.83353329e-01 1.23099411e+00] ... [ 3.74574512e-01 -2.41867274e-01 1.55998683e+00 ... 3.47758681e-02 5.88523567e-01 1.16201782e+00] [ 1.61396205e+00 -1.08069479e+00 -1.92794514e+00 ... -8.47181603e-02 -2.79020238e+00 -1.23382127e+00] [ 1.25978410e+00 -4.26313788e-01 3.17270681e-02 ... -1.02730468e-01 8.39632630e-01 9.60191786e-01]] [[ 8.49337876e-01 5.45499660e-03 -1.18784046e+00 ... 2.93378234e+00 5.41558862e-01 3.48566651e-01] [-1.15133643e+00 3.36090416e-01 -9.45270896e-01 ... 1.12182438e+00 -2.80597657e-01 -9.43497300e-01] [ 1.15739977e+00 -6.90701544e-01 -3.64405662e-01 ... -6.46562755e-01 -1.07107532e+00 4.35259312e-01] ... [-2.13213950e-01 4.73034382e-01 1.80096880e-01 ... -2.16823316e+00 -1.03920567e+00 5.82921088e-01] [ 1.12131917e+00 5.77674448e-01 6.86180830e-01 ... -2.46199235e-01 -2.61566162e-01 1.71811029e-01] [ 4.07493442e-01 2.84399509e-01 5.23699224e-01 ... 2.12967730e+00 -1.97043240e-01 3.48913610e-01]]] [[[ 1.00846243e+00 -1.41780925e+00 1.30153048e+00 ... 1.93110955e+00 -7.81996727e-01 1.61673412e-01] [ 5.29755950e-01 7.14676857e-01 -4.08025831e-02 ... 1.48991570e-01 -1.86912403e-01 5.87061167e-01] [-3.66210610e-01 1.30860582e-01 4.80838001e-01 ... 1.22971094e+00 -1.18108571e+00 8.50877404e-01] ... [-3.74794394e-01 -1.87205553e+00 7.14673162e-01 ... 1.27837434e-01 2.39893627e+00 9.18621182e-01] [-1.71051353e-01 -5.97722292e-01 1.26086366e+00 ... 5.25841236e-01 6.84089541e-01 -1.75595903e+00] [ 2.92859346e-01 2.41771773e-01 -4.44055051e-01 ... -7.19199121e-01 1.36268783e+00 6.74368739e-01]] [[ 1.04639518e+00 5.15516162e-01 -1.19075620e+00 ... 7.85874009e-01 1.06587708e+00 3.41055870e-01] [ 8.85556817e-01 -1.34277022e+00 -5.88386357e-01 ... 2.47896850e-01 7.75573552e-01 3.57861400e-01] [ 6.31826460e-01 -1.78585935e+00 -2.64509588e-01 ... 3.94388229e-01 1.41375768e+00 -1.88789558e+00] ... [ 1.76160455e-01 1.20156384e+00 2.23628789e-01 ... -3.01387757e-01 8.73872936e-01 -2.32380360e-01] [ 5.02628922e-01 -2.05275393e+00 -7.65923321e-01 ... 3.44037265e-02 1.00321317e+00 2.44330096e+00] [-1.95185685e+00 2.24801481e-01 1.78037584e+00 ... -5.27947545e-01 -1.30020702e+00 1.50775743e+00]] [[-1.35202229e-01 -7.62783110e-01 -7.12948516e-02 ... -4.35822874e-01 6.49005055e-01 -3.83438200e-01] [-1.22254217e+00 1.33048213e+00 5.92481315e-01 ... 3.35010707e-01 -8.69733095e-02 -3.65217209e-01] [-1.77940816e-01 2.54375875e-01 -2.05790114e+00 ... -4.03067678e-01 1.64670572e-01 -3.50519158e-02] ... [-2.55746055e+00 8.16547036e-01 1.03272855e+00 ... -7.93400705e-01 -1.67574976e-02 1.10323477e+00] [-7.56246567e-01 -2.50432305e-02 -2.05651999e-01 ... 7.17272043e-01 -4.54772413e-01 5.71273386e-01] [-1.72609150e+00 -6.31954610e-01 1.98961332e-01 ... -3.23878467e-01 -3.56003165e-01 -5.22498906e-01]] ... [[-1.21616936e+00 3.08697194e-01 -2.54290283e-01 ... -2.06595445e+00 -6.79750070e-02 2.22996330e+00] [ 9.86555934e-01 7.73361385e-01 8.79905105e-01 ... -3.22870106e-01 1.89981665e-02 4.47109967e-01] [-1.87398100e+00 1.54760265e+00 1.52845904e-01 ... -3.18035573e-01 -5.40146112e-01 4.13900167e-01] ... [ 1.39691818e+00 -9.56852794e-01 9.17776227e-01 ... 2.10531548e-01 -6.48626864e-01 -1.95850790e-01] [-2.59725720e-01 -1.40004063e+00 7.88852796e-02 ... -4.77380753e-01 3.96747440e-01 3.19840837e+00] [ 8.00710022e-02 -1.37295437e+00 2.95746386e-01 ... -2.06772357e-01 -1.95395601e+00 -1.27306807e+00]] [[-3.83291334e-01 5.08342832e-02 3.88289839e-01 ... 7.27782965e-01 1.39699256e+00 -5.95642626e-01] [ 2.06879437e-01 -2.37406567e-01 1.39058065e+00 ... -3.46289352e-02 -5.90234339e-01 4.18593705e-01] [-1.44115782e+00 -5.59418350e-02 7.73092151e-01 ... 5.47911882e-01 4.84271646e-01 -6.00767016e-01] ... [ 9.37986076e-01 -2.41139278e-01 8.66693497e-01 ... 3.54001135e-01 -1.55272830e+00 1.16813493e+00] [ 2.44164541e-01 3.09627086e-01 -4.04609412e-01 ... -4.70153034e-01 -5.23191094e-01 6.12215936e-01] [-6.30719781e-01 -1.26632679e+00 -6.20323479e-01 ... -8.89818549e-01 1.82337070e+00 -1.14213407e+00]] [[-4.77219671e-01 1.02977514e+00 -1.70532316e-01 ... 7.68192112e-01 -9.85379755e-01 5.85739911e-01] [ 5.09214640e-01 2.62855381e-01 -7.07397535e-02 ... 6.95985794e-01 -7.11323500e-01 1.43336833e+00] [ 1.33856630e+00 -5.82237661e-01 8.96370530e-01 ... -4.18793559e-01 -3.86366099e-01 -7.85295784e-01] ... [-2.88753927e-01 1.53289568e+00 3.68304133e-01 ... 1.23273425e-01 -6.72924757e-01 -6.89404070e-01] [ 1.24980259e+00 -4.94941801e-01 -3.81793201e-01 ... -1.96193218e+00 -5.39217234e-01 4.83357221e-01] [ 7.42357075e-01 -4.49557126e-01 1.78069162e+00 ... 8.82529259e-01 6.23511195e-01 2.46406108e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 1} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4616.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=1]() %10 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %11 : int = aten::size(%x.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %12 : int = aten::mul(%10, %11) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int[] = prim::ListConstruct(%12, %self.n_groups) %14 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %15 : int[] = aten::slice(%14, %4, %self.weight, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %16 : int[] = aten::list(%15) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %17 : int[] = aten::add(%13, %16) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%17, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %19 : int = aten::len(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %20 : int = aten::sub(%19, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%20, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %24 : int = aten::add(%i.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %25 : int = aten::__getitem__(%17, %24) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %25) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %27 : bool = aten::eq(%size_prods, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %28 : str = aten::format(%5, %17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%28, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %29 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%29) fw_re: [[[[[ 2.12173909e-01 -6.53969944e-01 6.06755197e-01 ... 1.85776603e+00 2.45638430e-01 -9.76070523e-01] [ 1.04023278e+00 -8.80849302e-01 -9.66305006e-03 ... 8.73054266e-01 1.45649225e-01 -3.71141434e-01] [ 6.48138702e-01 -4.72052336e-01 6.86412156e-01 ... -1.11692846e+00 3.14393342e-01 -4.31204617e-01] ... [ 8.69596422e-01 -1.30053961e+00 -3.26316446e-01 ... -1.14120042e+00 1.22366524e+00 -1.21305358e+00] [-1.92318916e-01 -1.41212195e-01 -1.51492536e+00 ... -2.18843961e+00 -4.26238477e-01 4.08224165e-01] [-1.60450590e+00 -1.29268038e+00 -1.32803190e+00 ... -6.94066584e-01 9.33120102e-02 1.74623072e+00]] [[-1.66059911e+00 2.58430123e-01 4.74726886e-01 ... -1.07288575e+00 2.77530104e-01 4.52837676e-01] [-1.95381150e-01 -1.57514942e+00 -4.10510987e-01 ... -1.97371587e-01 -4.28009242e-01 9.94362891e-01] [ 1.76943541e+00 -7.12736428e-01 -1.33630574e+00 ... 5.96087396e-01 8.30090284e-01 -8.86390924e-01] ... [-3.61817122e-01 -6.59410581e-02 -8.78502607e-01 ... 1.61247206e+00 -1.26312554e+00 4.85355079e-01] [ 1.84466207e+00 -2.11399937e+00 -9.67707157e-01 ... -6.08581960e-01 7.22423553e-01 -5.56877613e-01] [-2.83271790e-01 1.76167822e+00 -1.36796868e+00 ... -1.77667701e+00 -7.79295146e-01 6.22062802e-01]] [[ 5.37337303e-01 -7.64595687e-01 1.70153841e-01 ... 1.35460213e-01 -1.64925528e+00 6.10508740e-01] [ 5.36523163e-01 2.51211858e+00 -1.23258924e+00 ... -2.86938369e-01 3.67188543e-01 2.34202743e-01] [ 1.11306012e+00 1.66542673e+00 1.06238449e+00 ... -6.84512317e-01 7.69596159e-01 8.80802095e-01] ... [-4.70721900e-01 1.40335274e+00 6.26066744e-01 ... -2.65432060e-01 6.89869344e-01 2.94316316e+00] [ 9.31488872e-01 1.35983557e-01 5.43544829e-01 ... -1.75569665e+00 1.15390301e+00 -3.23615879e-01] [-2.85531014e-01 -1.48939788e+00 1.98358452e+00 ... -3.23096961e-01 2.29424453e+00 -3.17618728e-01]] ... [[ 2.78616339e-01 8.51521730e-01 1.10796845e+00 ... -9.94047105e-01 -6.77550077e-01 1.38776422e+00] [ 6.63480282e-01 1.53298989e-01 1.57731915e+00 ... 6.94357278e-03 -8.64132047e-01 -1.34428191e+00] [ 5.66240072e-01 2.78350025e-01 -7.19450712e-01 ... -1.79866970e+00 -7.92168677e-02 -1.34234846e+00] ... [ 2.37463045e+00 5.09724393e-02 2.04862133e-01 ... 8.78217995e-01 -3.72297347e-01 -8.71458650e-02] [ 2.01724887e+00 -1.20816672e+00 2.03505182e+00 ... -2.22255039e+00 -1.79664576e+00 -7.99920321e-01] [-2.41587996e+00 -5.87653875e-01 9.08940434e-01 ... 3.17259803e-02 -1.42192561e-02 9.09862697e-01]] [[-1.28378379e+00 -6.64108038e-01 5.21183431e-01 ... -1.26532388e+00 -2.17842888e-02 -2.22082663e+00] [ 2.00860190e+00 5.95981218e-02 2.90110886e-01 ... 4.05581683e-01 -2.08861649e-01 -7.59894252e-02] [ 1.58880424e+00 -2.15637755e+00 -7.46849120e-01 ... -5.23495138e-01 4.24540371e-01 5.47317803e-01] ... [-5.60497880e-01 1.39414084e+00 -1.41505167e-01 ... 4.83785719e-01 8.14530432e-01 -1.00026870e+00] [-4.38310266e-01 -4.16030794e-01 7.99747348e-01 ... -1.43981147e+00 -1.17703867e+00 3.48931998e-02] [ 1.53577304e+00 1.00789106e+00 1.95631528e+00 ... -4.89088207e-01 -1.08527708e+00 -1.51846445e+00]] [[-6.72134876e-01 -8.38088572e-01 1.52483273e+00 ... -1.10487974e+00 -1.01641595e-01 -1.81334627e+00] [ 9.72837150e-01 2.94059664e-01 1.16947579e+00 ... -1.32734001e+00 -5.02526760e-01 -6.79090440e-01] [ 1.50502431e+00 1.02112150e+00 -5.78136416e-03 ... -5.79100430e-01 -1.54068375e+00 -7.74490297e-01] ... [ 1.18404150e+00 7.57049561e-01 -1.07836986e+00 ... -4.14257020e-01 8.23308825e-01 -6.78702354e-01] [-1.34418917e+00 7.84762297e-03 1.53253245e+00 ... 1.91138577e+00 -9.53767121e-01 -2.83361256e-01] [ 2.01379824e+00 5.21126747e-01 9.55291092e-01 ... 9.94403481e-01 3.33568966e-03 -6.63817376e-02]]] [[[-5.54897249e-01 -1.41896188e+00 -1.71796978e-02 ... 7.12718546e-01 7.31274545e-01 -3.71338367e-01] [-2.28045732e-01 1.29973042e+00 -8.54533538e-02 ... 4.18067724e-01 7.55941927e-01 1.55417919e+00] [ 1.91819370e+00 9.54396427e-01 1.56135052e-01 ... 6.76804066e-01 9.08335373e-02 -6.96641505e-02] ... [-2.46380363e-02 -5.77682853e-01 1.26066744e-01 ... -9.89442766e-01 -3.49269301e-01 3.22731465e-01] [-2.41237834e-01 2.31289238e-01 -1.12673342e+00 ... 3.48298311e-01 -8.91626716e-01 -1.21251166e+00] [-4.04237747e-01 -5.23236275e-01 1.36554587e+00 ... 2.03429794e+00 3.26561183e-01 1.72563720e+00]] [[-2.50914246e-01 1.40354288e+00 9.15227652e-01 ... -3.78108352e-01 -1.11986279e+00 1.23992920e+00] [-9.96889591e-01 9.45008576e-01 1.59084582e+00 ... -8.20249319e-01 3.93674940e-01 1.63032925e+00] [-1.31707954e+00 -1.36189783e+00 6.02070034e-01 ... 1.82496086e-01 2.02367023e-01 -6.48479223e-01] ... [ 1.18374169e+00 -1.18373290e-01 -1.55877605e-01 ... 8.81138980e-01 -7.71250069e-01 -4.44463342e-01] [ 1.98717237e+00 1.40004134e+00 8.03262949e-01 ... 2.07598305e+00 -1.96644926e+00 -9.57964182e-01] [-7.59481668e-01 1.86089408e+00 -2.39247411e-01 ... 1.01318443e+00 -2.50815123e-01 -2.24679466e-02]] [[ 1.21608794e+00 1.11817276e+00 -2.79921353e-01 ... 8.78786385e-01 -7.35326290e-01 5.94434321e-01] [-1.45967829e+00 1.74844754e+00 9.95260954e-01 ... -1.86366153e+00 1.73727834e+00 2.91582227e-01] [-1.99052483e-01 2.23765492e+00 -1.22117960e+00 ... -6.17739297e-02 1.53329575e+00 3.28693986e-01] ... [-4.20380145e-01 -3.73783082e-01 2.17000794e-04 ... 4.39801604e-01 2.35986367e-01 -8.73950779e-01] [ 5.57041466e-01 -7.35465884e-01 -6.56656384e-01 ... 1.16072297e+00 8.61744024e-03 9.54028010e-01] [ 1.30364215e+00 1.10262668e+00 9.69991446e-01 ... 6.19132280e-01 1.30775297e+00 -2.09428787e+00]] ... [[-3.18092972e-01 -1.34613827e-01 -6.21387541e-01 ... 4.20853406e-01 3.29794854e-01 -5.41251183e-01] [-2.10659474e-01 4.46927667e-01 -2.61763901e-01 ... 3.31309624e-02 -6.69641554e-01 6.15874529e-01] [-4.34324890e-01 3.44597071e-01 -1.57488346e+00 ... 2.02022290e+00 8.25998187e-01 8.33520353e-01] ... [ 8.80810976e-01 -2.20430541e+00 -1.72662497e+00 ... -4.98237431e-01 -5.83641469e-01 7.21602857e-01] [-5.21852791e-01 -4.07054186e-01 -2.03738481e-01 ... -2.06217229e-01 8.61290768e-02 -4.68162537e-01] [ 1.47122300e+00 -3.01399771e-02 -1.79674351e+00 ... 5.10253072e-01 -9.03280079e-01 1.56485033e+00]] [[-1.35652125e+00 8.87544394e-01 -4.40005124e-01 ... -2.93100923e-01 6.97555006e-01 -5.54899871e-01] [-2.39505816e+00 -2.06316367e-01 -3.06251705e-01 ... -7.72193372e-01 -1.05159128e+00 1.11886382e+00] [ 5.04845381e-01 -2.50931323e-01 -9.12476659e-01 ... 3.43508609e-02 -1.96312940e+00 -1.33234560e+00] ... [ 2.60931570e-02 -8.29961121e-01 1.14692330e+00 ... 8.94430697e-01 9.98450935e-01 7.04389453e-01] [ 5.23144305e-01 9.33558404e-01 -3.67960542e-01 ... -5.36685586e-01 -7.67046273e-01 -9.32475090e-01] [-1.03190196e+00 7.32798278e-01 -1.02392304e+00 ... 1.24149525e+00 -7.40395069e-01 6.01787329e-01]] [[ 9.22805846e-01 -1.12787783e+00 -1.97561085e-01 ... -7.78701723e-01 1.38846052e+00 2.83562660e-01] [-9.18819845e-01 2.56329864e-01 7.92653799e-01 ... -8.69385064e-01 -2.43468833e+00 -3.55843544e-01] [ 6.80479258e-02 7.28205681e-01 -5.08340895e-01 ... -1.20111263e+00 -8.19475770e-01 -6.64620280e-01] ... [-5.79400778e-01 6.67809844e-01 9.60451901e-01 ... -8.49792361e-01 -1.18276596e+00 5.79500973e-01] [ 8.71769667e-01 2.13394475e+00 1.61811933e-01 ... 9.37873185e-01 1.02823079e+00 -8.15275311e-01] [ 4.78867553e-02 -1.58867747e-01 6.41970515e-01 ... -1.23048425e+00 1.24008000e+00 -3.68076146e-01]]] [[[ 1.64934635e+00 -1.57040250e+00 -1.27635324e+00 ... -1.60265997e-01 -1.43798327e+00 4.21153344e-02] [ 5.67983985e-01 2.15069747e+00 2.61551523e+00 ... 4.08209980e-01 -2.65006423e-01 -2.11882964e-01] [ 4.39596444e-01 1.34058583e+00 4.77970093e-01 ... -1.07104075e+00 1.32129520e-01 1.25583577e+00] ... [-1.41759467e+00 1.77673459e+00 1.73006630e+00 ... -9.86726224e-01 8.86495650e-01 -7.20072627e-01] [ 2.67714471e-01 -1.46013343e+00 -4.65987958e-02 ... 4.57910180e-01 -1.62302211e-01 -5.73457897e-01] [-1.09165967e+00 -2.76604563e-01 -4.62700635e-01 ... -1.33153093e+00 -1.18351245e+00 -7.43140876e-01]] [[-4.95679945e-01 -1.06310797e+00 1.91322342e-02 ... 3.18190381e-02 1.10318375e+00 2.05723572e+00] [ 3.10438901e-01 7.77571276e-02 -5.57703972e-01 ... -7.47181028e-02 4.54704642e-01 -9.05067444e-01] [ 1.36898792e+00 -1.85353741e-01 1.85488060e-01 ... 3.47129675e-03 -2.57701427e-01 1.42257953e+00] ... [ 6.10128760e-01 -7.12434530e-01 1.21546701e-01 ... 3.24745268e-01 6.66766644e-01 1.06198037e+00] [ 1.02630116e-01 7.61899590e-01 -5.40494286e-02 ... -9.07656789e-01 -7.36946240e-02 4.58400667e-01] [ 9.15739179e-01 4.05097187e-01 -3.49479169e-01 ... 5.72051525e-01 3.16789687e-01 -7.07823217e-01]] [[-8.43550086e-01 -7.61461318e-01 -1.24920420e-01 ... 3.70434880e-01 1.60316005e-01 2.64019191e-01] [-2.82258600e-01 -5.00216931e-02 -5.19251451e-02 ... -1.75199106e-01 4.03905720e-01 2.47042990e+00] [-2.40321353e-01 -5.92893183e-01 -1.90813470e+00 ... 1.37942207e+00 -3.24850857e-01 1.17504728e+00] ... [-6.40842259e-01 7.45095074e-01 -2.40028605e-01 ... 1.65898252e+00 -1.20123100e+00 -2.11279559e+00] [-2.62680531e-01 -1.97616851e+00 -3.77412528e-01 ... -1.46244347e-01 -8.81270587e-01 5.13020277e-01] [-1.32465482e+00 -1.18099391e+00 -8.79175901e-01 ... -1.26711428e+00 -1.10691166e+00 2.17040634e+00]] ... [[ 5.36777973e-01 -3.92170161e-01 3.66003782e-01 ... 6.07424140e-01 5.78560121e-02 3.02064896e+00] [ 2.87236385e-02 3.83073986e-01 -6.91657186e-01 ... 3.12846482e-01 -1.29854664e-01 1.48454404e+00] [ 3.94801766e-01 6.09994471e-01 -8.40489388e-01 ... -9.48334694e-01 -9.15411711e-01 -4.94790584e-01] ... [ 1.17350280e+00 1.32302952e+00 7.22589135e-01 ... 3.68740559e-01 5.89835644e-01 4.79266673e-01] [-2.54007965e-01 -7.21699148e-02 -6.45980299e-01 ... -4.01392549e-01 -1.11411417e+00 -3.51208031e-01] [-2.55349278e-01 -2.41657481e-01 3.90691727e-01 ... -5.28706968e-01 -2.07569152e-01 1.40301621e+00]] [[ 2.45656714e-01 6.98658168e-01 1.00120473e+00 ... 5.40025771e-01 -1.92152417e+00 -1.01857328e+00] [-1.04702246e+00 4.72505391e-01 1.76396415e-01 ... 1.47713661e+00 5.73629618e-01 9.09329504e-02] [ 1.02834575e-01 7.37219274e-01 1.41946113e+00 ... 6.95303679e-01 1.83381557e-01 4.49084073e-01] ... [-9.00279701e-01 9.73990917e-01 2.14642930e+00 ... 7.41741732e-02 -8.30714345e-01 2.77313050e-02] [-1.56483257e+00 1.21220863e+00 1.81958094e-01 ... 7.64316320e-01 3.29038687e-02 -8.81738663e-01] [-1.84627068e+00 -9.14689839e-01 5.84256113e-01 ... 1.08765376e+00 -5.87506741e-02 -1.68095899e+00]] [[ 1.14836836e+00 -3.25919718e-01 2.13368833e-01 ... -1.73605800e+00 -4.55452383e-01 -4.00612876e-02] [-1.12053132e+00 -2.71003127e-01 6.18185937e-01 ... -1.30542610e-02 -1.00848091e+00 -5.49105942e-01] [ 7.11667359e-01 -2.12930694e-01 1.80651653e+00 ... -1.85079432e+00 -3.40420976e-02 5.48240721e-01] ... [-1.03443468e+00 7.28003025e-01 2.05354595e+00 ... 6.61939159e-02 -1.06429957e-01 -1.26807582e+00] [ 2.99365342e-01 -2.15555623e-01 2.10640401e-01 ... 1.73638475e+00 1.32576898e-01 -5.07059991e-01] [ 8.02191973e-01 1.39212117e-01 -1.72985941e-01 ... -1.72304535e+00 1.27329886e+00 -7.59142637e-01]]] [[[ 1.72667694e+00 1.44631565e-01 -1.60012865e+00 ... 1.31380403e+00 -2.02942491e+00 1.63066518e+00] [-1.07451701e+00 -7.94017911e-01 6.44913256e-01 ... 2.19200432e-01 6.75414741e-01 1.61652958e+00] [-7.79701531e-01 -7.15490282e-01 9.81000215e-02 ... 9.79150832e-01 7.96074688e-01 -1.32168964e-01] ... [ 1.33121476e-01 6.60855591e-01 2.56895900e-01 ... 1.07394171e+00 1.08949482e+00 -5.64259827e-01] [-1.35266638e+00 -1.96706757e-01 1.06839669e+00 ... -5.55741847e-01 5.80198705e-01 1.92645144e+00] [ 1.01520514e+00 -1.05069733e+00 -5.40698886e-01 ... -1.15386136e-01 -4.95668314e-02 -2.21662834e-01]] [[ 2.66284883e-01 5.67765594e-01 -1.18331957e+00 ... -1.14590049e+00 -1.79732159e-01 -1.24652886e+00] [ 4.21188742e-01 -6.89871967e-01 9.60821867e-01 ... 1.21958148e+00 9.65680897e-01 -1.21153891e+00] [ 2.08688766e-01 -1.85110295e+00 1.43544328e+00 ... -6.55006647e-01 7.26370752e-01 7.89458036e-01] ... [-2.65413642e-01 2.59447861e+00 -8.13747346e-01 ... -1.33880007e+00 8.03624868e-01 -4.56398070e-01] [ 4.05982018e-01 1.31010807e+00 -1.77982962e+00 ... -4.38073725e-01 -9.39098477e-01 -1.15016572e-01] [ 1.71047521e+00 -2.87532747e-01 1.80689704e+00 ... 3.94021809e-01 -5.40716529e-01 -5.87356865e-01]] [[ 2.95307815e-01 -6.65454865e-02 -1.49959922e+00 ... 9.87914622e-01 1.06457818e+00 -1.88485309e-01] [-8.77974331e-01 -1.85687673e+00 5.86790740e-01 ... 4.33275364e-02 -1.14936435e+00 -1.90323222e+00] [-8.40421796e-01 -4.26281840e-01 2.15320885e-01 ... 2.28726554e+00 1.14229679e+00 6.82413101e-01] ... [ 7.34755516e-01 6.72109663e-01 -2.43739277e-01 ... -4.93509054e-01 2.57004648e-01 -6.50200069e-01] [ 1.31773043e+00 -9.43212450e-01 -1.32110298e+00 ... 1.94393411e-01 -8.64662468e-01 -7.75687039e-01] [-6.62352145e-01 -1.59155518e-01 6.32434189e-01 ... 7.09542990e-01 1.98097154e-01 2.59431183e-01]] ... [[ 3.71352613e-01 1.73075140e-01 -1.35423636e+00 ... -3.90652359e-01 1.11043662e-01 1.28827643e+00] [ 1.84897923e+00 5.54751098e-01 1.54559720e+00 ... -5.36568046e-01 1.41511464e+00 -1.27156651e+00] [ 4.26100284e-01 -3.06441277e-01 -9.23175991e-01 ... 1.86970085e-01 -2.49525189e+00 -3.50652039e-01] ... [ 6.97803557e-01 -1.51230052e-01 5.64452291e-01 ... -4.99699116e-01 -3.28484148e-01 7.53675222e-01] [-1.19832110e+00 -9.84346628e-01 -1.28459454e+00 ... -1.60015202e+00 -2.60943532e-01 -1.78918922e+00] [-1.32164061e+00 7.59543896e-01 9.95918274e-01 ... 3.64553124e-01 -9.58823621e-01 1.38827920e-01]] [[ 7.57133663e-02 -6.05345309e-01 9.66821909e-02 ... 1.06747460e+00 -1.97084218e-01 -1.34139240e+00] [ 9.50173378e-01 1.83328733e-01 6.54599011e-01 ... -2.41857147e+00 4.15344276e-02 -6.72388136e-01] [ 6.76899731e-01 1.51602805e+00 1.06731999e+00 ... 1.63928545e+00 -5.75223744e-01 9.18733299e-01] ... [ 2.00950646e+00 -8.44257355e-01 -2.71168500e-01 ... -1.42730749e+00 -1.11101282e+00 -1.51354253e-01] [-1.99390042e+00 -1.22524679e+00 1.66584527e+00 ... -8.18642676e-01 -4.40255672e-01 -1.70356017e-02] [-4.82568890e-01 -1.79503834e+00 1.72820296e-02 ... 1.25381088e+00 -6.22919738e-01 6.00831509e-01]] [[-9.34665918e-01 5.91345765e-02 1.56392646e+00 ... 6.98097289e-01 2.01046601e-01 -9.06523287e-01] [ 3.13403130e+00 -3.34187180e-01 -6.66610181e-01 ... 4.19831425e-01 -1.05640912e+00 1.28499079e+00] [-8.62161160e-01 -4.49739546e-01 1.43902704e-01 ... -6.59152687e-01 1.66528791e-01 1.70098677e-01] ... [ 5.55744529e-01 -3.76087934e-01 9.81168225e-02 ... -7.16528177e-01 -8.41857672e-01 -2.19260707e-01] [-9.19413328e-01 1.65655386e+00 -1.25192606e+00 ... 1.74323547e+00 7.92885065e-01 -1.47070038e+00] [-9.79137361e-01 -1.56948283e-01 -5.55427194e-01 ... 5.75111091e-01 1.18306410e+00 -3.83826077e-01]]] [[[ 3.81122604e-02 1.48694491e+00 -1.16909981e+00 ... 1.32544768e+00 -1.06143606e+00 1.63799718e-01] [ 7.60157287e-01 -1.05560303e+00 4.15704668e-01 ... -8.24026763e-01 7.91228235e-01 3.03908974e-01] [ 3.56626958e-01 -2.29554319e+00 -7.80124843e-01 ... -5.94751120e-01 -3.45420450e-01 -1.07110953e+00] ... [ 5.85273325e-01 -1.84810293e+00 -5.24181783e-01 ... 1.36775538e-01 -4.50325340e-01 1.18995810e+00] [ 2.20754333e-02 -1.32807887e+00 5.03412426e-01 ... 1.00294292e+00 -1.54787517e+00 8.78054738e-01] [-4.97987300e-01 -7.58685946e-01 -3.06670398e-01 ... -2.15262794e+00 9.03365970e-01 1.17842543e+00]] [[-4.41891432e-01 -9.33931470e-01 5.98643124e-01 ... 1.36367641e-02 -1.43793845e+00 3.49362880e-01] [ 1.89421251e-02 -6.15185559e-01 -6.40412688e-01 ... 2.55066603e-01 -9.34433103e-01 -3.37294847e-01] [ 5.75304687e-01 -1.33088005e+00 -1.85730085e-01 ... 5.09989619e-01 7.22633153e-02 -5.00231266e-01] ... [-9.85262096e-01 2.32458305e+00 5.71460724e-01 ... 2.05358505e+00 7.80727744e-01 -1.28535521e+00] [ 1.32982147e+00 -1.16087472e+00 4.00525510e-01 ... -1.75779152e+00 1.54423952e-01 1.06065404e+00] [-9.92031932e-01 -1.61961932e-02 5.08625329e-01 ... -1.16171563e+00 1.01273334e+00 -1.36602879e-01]] [[-1.48122191e+00 3.12026054e-01 -4.29146960e-02 ... -1.68473470e+00 -9.34667766e-01 -1.52032459e+00] [-6.01643980e-01 3.18652213e-01 -6.10068381e-01 ... 1.28757799e+00 3.18867624e-01 -1.08793616e+00] [ 6.45364702e-01 8.61403286e-01 2.84707457e-01 ... 1.34380907e-01 -1.27189946e+00 -1.32159007e+00] ... [ 6.67062327e-02 -6.43252611e-01 2.91829586e-01 ... 1.52324069e+00 2.00068331e+00 1.36307463e-01] [-1.09826255e+00 -4.67585802e-01 9.78737116e-01 ... -1.63720381e+00 -4.51033115e-01 -2.22355247e-01] [-1.36047769e+00 1.22232363e-01 -8.99297357e-01 ... -1.17904711e+00 -4.19449270e-01 4.79800552e-02]] ... [[-5.01605690e-01 -3.97291690e-01 -2.09986921e-02 ... 6.28361702e-01 -5.67638814e-01 1.24277973e+00] [ 9.79711294e-01 1.30451024e-01 -3.88576314e-02 ... 5.58903039e-01 3.42232317e-01 2.34316993e+00] [-2.52732396e-01 -8.00612450e-01 -7.26469904e-02 ... 1.11192143e+00 -1.50246274e+00 1.46226272e-01] ... [ 6.19957864e-01 -5.46869263e-02 -2.48152900e+00 ... 1.24449146e+00 -1.19793618e+00 2.57609263e-02] [ 1.95159107e-01 -2.18874767e-01 -2.67090797e-01 ... 1.49088871e+00 1.05881982e-01 6.52779162e-01] [-1.64348495e+00 -1.53528404e+00 -6.36182189e-01 ... 4.23486739e-01 4.60571736e-01 8.23682904e-01]] [[ 1.22910511e+00 -1.34637922e-01 -1.07332158e+00 ... -7.55763054e-02 -1.03145015e+00 -7.57290542e-01] [-2.32846904e+00 -2.70387679e-01 -5.09087741e-01 ... 8.84181321e-01 -8.88112485e-01 -3.96156982e-02] [ 3.18057656e-01 1.23634958e+00 1.10518122e+00 ... 1.36977458e+00 1.56497169e+00 -3.52425911e-02] ... [ 2.52985179e-01 -2.70504504e-01 4.24374223e-01 ... 3.52579206e-01 2.38779813e-01 2.98315197e-01] [ 1.54103053e+00 6.62178338e-01 8.27930808e-01 ... -2.23470360e-01 -8.91904980e-02 5.25610507e-01] [ 6.64671957e-01 2.22896099e-01 -6.52499497e-01 ... -1.19363594e+00 -9.79720771e-01 -6.42055631e-01]] [[ 3.07419270e-01 1.25363266e+00 -2.08710581e-02 ... -7.80680299e-01 -8.13775837e-01 -3.09027970e-01] [ 5.35228670e-01 2.64569730e-01 1.91295886e+00 ... -2.18373723e-02 8.65942299e-01 1.52259195e+00] [ 1.01970863e+00 -4.57873583e-01 5.05022109e-01 ... -2.39041179e-01 -1.38070971e-01 -1.44977880e+00] ... [ 1.42907917e-01 -4.14037667e-02 2.26917833e-01 ... -1.33670256e-01 2.09647298e+00 1.24614149e-01] [ 7.22583309e-02 1.88066605e-02 7.41062343e-01 ... 5.60249567e-01 1.83110547e+00 -1.05524373e+00] [ 7.68572867e-01 1.62287965e-01 -5.83031103e-02 ... -2.98636794e-01 -1.30584329e-01 -6.96545303e-01]]] [[[ 4.67891276e-01 2.53667742e-01 -7.91326463e-01 ... 1.23771334e+00 1.48256838e+00 1.19236618e-01] [-5.31509280e-01 -5.57570457e-01 -3.30888450e-01 ... -1.27242017e+00 -1.46647682e-02 -1.34239054e+00] [-4.79110479e-02 1.85803616e+00 1.18459868e+00 ... 4.51368779e-01 -7.94318140e-01 8.80086720e-02] ... [-3.41906339e-01 3.02981943e-01 -1.62028110e+00 ... 3.01435757e-02 9.20178771e-01 1.39374703e-01] [ 2.25101322e-01 6.51079118e-01 -2.43559971e-01 ... -1.11264706e+00 7.20223188e-02 -5.99165857e-01] [ 2.49140695e-01 1.64026046e+00 -3.01913708e-01 ... 1.37658283e-01 9.39968705e-01 -2.23426186e-02]] [[ 1.36509359e+00 8.27966273e-01 -1.18628311e+00 ... 7.02586055e-01 -2.15897512e+00 -1.05383337e+00] [ 2.61843085e-01 -1.80405617e+00 -1.03752315e+00 ... 2.83613324e-01 4.61889923e-01 4.23910677e-01] [-1.93162844e-01 -6.97424233e-01 4.60490175e-02 ... 1.07106006e+00 1.69516385e+00 -1.99626684e-01] ... [-4.42839414e-01 -5.64030111e-01 -3.14370662e-01 ... 8.95859361e-01 1.30321181e+00 7.27909565e-01] [ 8.67041588e-01 -4.03810322e-01 2.32012525e-01 ... -1.05822134e+00 -7.19414473e-01 -9.11260188e-01] [ 2.65718531e-02 -1.21541059e+00 -1.93433627e-01 ... -1.50241625e+00 9.16519165e-01 -4.25154716e-01]] [[ 1.11653991e-01 1.22639179e-01 -3.95466715e-01 ... 1.15063198e-01 -1.13520539e+00 1.04656637e+00] [ 1.62291467e+00 6.22415900e-01 -1.62697840e+00 ... 4.19070125e-01 4.78491813e-01 9.67886150e-01] [-1.44979310e+00 1.81717467e+00 1.16990721e+00 ... 4.13904250e-01 -4.53087360e-01 8.18019032e-01] ... [-1.18701708e+00 7.71685302e-01 1.87654781e+00 ... 1.30872741e-01 4.41912740e-01 -2.20293999e+00] [ 1.91672310e-01 -9.99485195e-01 -1.51388955e+00 ... -1.55987501e+00 1.66733396e+00 -4.98768725e-02] [-1.25233757e+00 -1.49496388e+00 -3.02673787e-01 ... 1.02955985e+00 3.83003652e-01 -1.60962248e+00]] ... [[-3.16951752e-01 -3.43004435e-01 1.23360530e-01 ... 6.13937140e-01 -2.31359935e+00 1.04178762e+00] [-5.58538556e-01 -1.22035873e+00 4.65846747e-01 ... -7.50201046e-01 1.03914344e+00 -2.52524585e-01] [-7.82106400e-01 1.28779069e-01 1.35475683e+00 ... -7.77001074e-03 -9.07896459e-01 3.30393076e-01] ... [ 9.19450283e-01 -8.56324077e-01 -7.79940903e-01 ... -1.16437888e+00 -1.43448222e+00 -4.10385281e-01] [-2.47706199e+00 -9.96344447e-01 -8.93604010e-02 ... -2.46514797e-01 -1.60588264e+00 2.29372755e-01] [-1.47188395e-01 -5.35489380e-01 2.04566526e+00 ... -7.88521096e-02 -8.40262473e-01 1.11680102e+00]] [[-9.35119212e-01 3.02895308e-01 -8.44860554e-01 ... -1.56251693e+00 1.61815181e-01 4.75861669e-01] [-5.18706977e-01 -2.56456822e-01 1.31604695e+00 ... 6.24243081e-01 1.76708019e+00 8.78712237e-01] [ 2.58342475e-01 -1.80413353e+00 4.80028510e-01 ... 1.30284324e-01 7.27941915e-02 2.44780891e-02] ... [-1.01518691e+00 -4.09423172e-01 1.65005612e+00 ... 1.12630045e+00 2.20009756e+00 -3.80541503e-01] [-1.46621192e+00 -4.97361481e-01 -1.04098213e+00 ... 3.23476404e-01 3.68390709e-01 -1.37916493e+00] [ 2.93135405e-01 -2.24972606e+00 5.97273648e-01 ... -6.23991370e-01 -8.74275982e-01 1.50696886e+00]] [[ 3.12389493e-01 -1.60474718e+00 6.96624279e-01 ... 3.69214147e-01 -4.56012815e-01 -1.65657091e+00] [ 7.80855238e-01 -7.69584700e-02 -2.36725092e-01 ... -4.95101631e-01 1.75672352e+00 7.10741520e-01] [ 2.65305489e-01 1.00377941e+00 -1.02608669e+00 ... -7.75644407e-02 -1.26738513e+00 3.14744860e-02] ... [ 1.71425998e+00 4.00847971e-01 8.07860434e-01 ... 9.65343297e-01 -5.84024847e-01 -6.01064920e-01] [ 1.66497082e-01 -1.23056605e-01 -6.31631136e-01 ... -7.46396780e-02 -9.89916563e-01 -1.51090667e-01] [ 1.62921882e+00 1.58864141e+00 5.46276808e-01 ... 6.31755114e-01 -6.83282435e-01 8.80511582e-01]]]] [[[[ 2.06667572e-01 -2.64679283e-01 7.40906894e-02 ... -1.08359218e-01 1.22295213e+00 3.53482276e-01] [-1.27212882e+00 2.53803372e-01 3.86138588e-01 ... 6.83006793e-02 -1.64995861e+00 5.98887742e-01] [-1.66386950e+00 -1.84129179e+00 -3.79952669e-01 ... 8.19700480e-01 -7.03396618e-01 1.02931798e+00] ... [-1.25665975e+00 -2.55621731e-01 8.33786309e-01 ... -1.48100182e-01 1.44907132e-01 -6.66340709e-01] [-9.29564178e-01 -2.15046024e+00 2.61008477e+00 ... -8.36342216e-01 6.12104475e-01 2.23727718e-01] [ 1.06640272e-01 4.82778281e-01 1.99507916e+00 ... -1.15776467e+00 9.27747607e-01 4.97549772e-01]] [[ 2.20903650e-01 -4.76303816e-01 4.49309424e-02 ... -5.55958569e-01 -1.94013441e+00 3.39935511e-01] [ 1.46472907e+00 -5.25679648e-01 -3.60421762e-02 ... -4.55890179e-01 1.79515779e+00 -2.76254788e-02] [-2.94900358e-01 9.19188797e-01 5.71354926e-01 ... -6.77494407e-02 3.27175975e-01 -3.79404634e-01] ... [ 1.11797735e-01 -1.11803389e+00 -5.01634479e-01 ... -8.22167456e-01 -1.78340897e-01 5.35097897e-01] [ 5.05367927e-02 2.90385246e-01 -9.16887105e-01 ... -4.35469039e-02 -1.51655644e-01 -6.45177782e-01] [-3.92269164e-01 -8.69886726e-02 -1.89119434e+00 ... 5.11262476e-01 7.74610221e-01 2.59762496e-01]] [[-9.87787664e-01 -8.68119299e-01 -7.46949434e-01 ... 1.14694691e+00 1.56697261e+00 2.43734717e-01] [-7.07979262e-01 -4.14106846e-02 -1.23968315e+00 ... -3.08616370e-01 2.53451228e-01 -5.15584528e-01] [ 1.36143291e+00 -1.16790915e+00 7.91117251e-01 ... 2.64534801e-01 -5.72289109e-01 -9.62539092e-02] ... [-1.89303505e+00 1.31529725e+00 -1.32048357e+00 ... -5.18065631e-01 -1.50129580e+00 1.13433313e+00] [ 1.38745874e-01 5.44431627e-01 -4.02754128e-01 ... -1.21077919e+00 1.62368506e-01 -3.75752598e-01] [-2.64788568e-01 -1.01666915e+00 -2.47400686e-01 ... -3.77597481e-01 8.87228072e-01 9.73280549e-01]] ... [[-4.20467928e-02 -5.34536362e-01 1.27337372e+00 ... 3.07008058e-01 2.95505226e-02 -6.06682479e-01] [ 2.34652147e-01 -1.01748414e-01 9.72926497e-01 ... -3.96422565e-01 2.25303859e-01 3.80849391e-01] [-3.32422197e-01 5.27995467e-01 1.17878482e-01 ... -7.40965083e-02 1.65520120e+00 1.23927176e+00] ... [ 2.68930960e+00 8.90425205e-01 -2.27455080e-01 ... 5.85949361e-01 -1.94065034e-01 1.07722151e+00] [-4.84489262e-01 -5.10758638e-01 -7.84004390e-01 ... 6.83474541e-01 3.12674189e+00 3.51309329e-01] [ 1.00673747e+00 5.93591750e-01 3.45223159e-01 ... -1.11169919e-01 9.40144002e-01 -8.82448733e-01]] [[-1.48238969e+00 1.41811585e+00 -2.97611952e-01 ... 2.24888250e-01 1.20846391e+00 2.27815914e+00] [-1.79827839e-01 -9.54337358e-01 1.34045672e+00 ... 2.73110062e-01 8.82737517e-01 7.59934425e-01] [ 3.10494810e-01 -6.62325799e-01 -2.14601606e-01 ... 1.70364833e+00 5.91548681e-01 -1.11245656e+00] ... [-2.92742252e+00 -7.34680593e-02 -6.86725855e-01 ... -3.29189944e+00 3.97693783e-01 2.05314830e-01] [-1.98928565e-01 6.40398920e-01 -1.12093651e+00 ... 1.48203123e+00 7.19480574e-01 6.96516275e-01] [-7.39742517e-01 1.54914534e+00 1.13489163e+00 ... -1.06173539e+00 -1.50867212e+00 4.16379154e-01]] [[-1.01947200e+00 4.92970437e-01 1.56747961e+00 ... -1.06440985e+00 -1.13720989e+00 -2.80431122e-01] [-2.18616411e-01 1.37514734e+00 -1.83366716e-01 ... -8.29562843e-01 7.94710338e-01 6.91236332e-02] [-1.12711227e+00 2.89037526e-01 -1.07638586e+00 ... 4.59863603e-01 5.48923537e-02 1.97145092e+00] ... [ 1.55564356e+00 -3.60841900e-02 -9.81260598e-01 ... -1.67231524e+00 7.65640259e-01 2.46081090e+00] [ 5.35948098e-01 1.14848888e+00 -1.09730792e+00 ... 4.77747738e-01 -6.31922543e-01 1.05764441e-01] [ 1.41954494e+00 4.09226626e-01 8.50031137e-01 ... 1.33501256e+00 -9.44722533e-01 -1.21934235e-01]]] [[[ 1.91930503e-01 -5.30634880e-01 1.56869233e-01 ... -2.20593885e-02 -1.16091526e+00 6.43517077e-01] [-1.72866181e-01 3.23942095e-01 -1.64552838e-01 ... -7.32903838e-01 -1.35351527e+00 -1.15667903e+00] [-1.13586891e+00 9.11095798e-01 -2.69006252e-01 ... 2.83126688e+00 9.34876621e-01 1.57228351e+00] ... [ 2.13023782e-01 -2.03774348e-01 3.99482459e-01 ... 2.31789660e+00 -6.86217770e-02 -3.62419784e-01] [-2.49759778e-01 7.11620808e-01 -7.48204648e-01 ... -2.31865719e-01 1.03360426e+00 8.74988496e-01] [-4.72308517e-01 2.04586029e+00 -2.04251528e-01 ... -3.41484919e-02 -5.45134246e-01 1.08575201e+00]] [[ 2.59699196e-01 2.44328082e-01 -2.27226764e-01 ... -1.56306779e+00 -3.51039350e-01 3.63389313e-01] [-2.22745204e+00 4.57226694e-01 -1.38414025e+00 ... -2.37754154e+00 -1.21623707e+00 1.69070624e-02] [-3.44576329e-01 1.76030612e+00 1.14407517e-01 ... 5.76067150e-01 -1.07127726e+00 3.31855565e-01] ... [ 8.93264115e-01 -1.57648146e-01 -7.73956895e-01 ... 7.26155996e-01 -1.36878097e+00 -1.08252667e-01] [-3.48937809e-01 8.68124306e-01 1.05661380e+00 ... -1.15922284e+00 7.20709383e-01 1.47784078e+00] [-2.49710083e-01 9.63613153e-01 -9.73578691e-02 ... 1.27712286e+00 7.34303892e-02 1.74693739e+00]] [[ 1.97032785e+00 -9.42686424e-02 1.83801198e+00 ... 1.44099128e+00 -8.73604655e-01 1.02704394e+00] [ 3.29753458e-01 8.14346790e-01 6.69190645e-01 ... -7.11173832e-01 -1.64034617e+00 -1.39646626e+00] [-3.22197795e+00 1.00050378e+00 8.09909225e-01 ... 1.03007287e-01 2.19614163e-01 -7.34515131e-01] ... [-5.53115487e-01 -1.15756178e+00 4.97466177e-01 ... 1.47908032e+00 8.39558363e-01 -1.38116324e+00] [ 1.63014495e+00 1.43977106e+00 6.78577423e-01 ... 2.51508653e-01 -5.01085341e-01 -1.46036029e-01] [-7.17218220e-01 -1.20238483e+00 -7.58029753e-03 ... 7.52518028e-02 -1.26508009e+00 -1.03497291e+00]] ... [[ 2.89778739e-01 1.87351584e+00 -1.14580333e+00 ... 1.05961037e+00 3.35482687e-01 3.31537068e-01] [ 1.16343534e+00 -1.25980163e+00 -9.06836748e-01 ... 7.31496930e-01 -3.29231501e-01 -1.25281775e+00] [-1.12526548e+00 -7.59191513e-01 4.86816466e-01 ... 8.63201141e-01 1.98798394e+00 4.87190008e-01] ... [ 1.14656174e+00 7.44345963e-01 1.24650694e-01 ... -1.97371408e-01 -8.45105886e-01 -1.43343520e+00] [ 6.19206503e-02 8.34482193e-01 -1.83819026e-01 ... 9.39463139e-01 -1.17976165e+00 4.74623561e-01] [ 1.18038952e+00 -5.37651181e-02 9.53117311e-01 ... -1.08361268e+00 1.04810560e+00 -5.80354512e-01]] [[ 5.13506114e-01 -5.74424803e-01 2.76889086e-01 ... -1.02277291e+00 -7.22419739e-01 6.10800087e-01] [-1.31057918e+00 -2.01506734e+00 -1.49483734e-03 ... 1.45523059e+00 -1.87625200e-01 1.29098654e-01] [-1.30380586e-01 1.37673676e-01 -1.84585178e+00 ... 1.85541165e+00 -5.63597262e-01 -9.98179972e-01] ... [-4.70720530e-01 -1.08898020e+00 1.03913343e+00 ... -4.35038388e-01 5.09090126e-01 -1.91142941e+00] [ 1.61405313e+00 -9.95262623e-01 4.43878442e-01 ... 1.58299053e+00 -7.70477206e-02 1.40796757e+00] [ 1.70899212e+00 2.31344938e-01 -1.71944058e+00 ... 2.66100973e-01 -6.52722836e-01 -3.97873931e-02]] [[-7.88351178e-01 -1.87278795e+00 -2.28765082e+00 ... -5.86095899e-02 3.98395061e-01 1.05325568e+00] [ 2.23970103e+00 1.33050993e-01 6.58731103e-01 ... 9.38081183e-03 -1.46808624e+00 7.48494923e-01] [ 2.71273792e-01 2.77428478e-01 -1.13428056e+00 ... -7.49900460e-01 7.56279528e-01 9.29027319e-01] ... [-8.01920354e-01 -1.87369204e+00 8.67097795e-01 ... 1.45141232e+00 9.27547395e-01 1.83946824e+00] [ 9.50133353e-02 -9.69399571e-01 -1.86712158e+00 ... 6.93307161e-01 -1.13533676e+00 -5.62055349e-01] [-2.06150308e-01 6.59567118e-02 -2.01866937e+00 ... -8.49107504e-01 -6.90150738e-01 1.88551724e+00]]] [[[ 9.53622997e-01 -1.80390686e-01 7.31876433e-01 ... -6.72995389e-01 2.58609295e-01 8.56374025e-01] [ 2.23226428e-01 2.94660062e-01 -1.05672038e+00 ... -7.12875903e-01 -6.84014916e-01 -3.87120545e-01] [-7.04576254e-01 -9.68466997e-01 1.03227407e-01 ... 5.93725562e-01 -1.15598667e+00 7.90312767e-01] ... [ 7.52403259e-01 -1.94058105e-01 1.38077289e-01 ... -2.19747066e+00 -1.57776058e+00 3.14288318e-01] [ 5.18997386e-02 -1.49131700e-01 1.32970607e+00 ... 1.66985929e+00 -7.47146904e-01 7.03787744e-01] [ 3.73103410e-01 9.63592604e-02 -2.27628016e+00 ... 1.90739572e-01 -1.15539610e+00 -1.83077085e+00]] [[-4.18658167e-01 -9.44526017e-01 6.44436240e-01 ... -1.92496514e+00 -1.71235991e+00 -6.07771337e-01] [-4.23359483e-01 4.78179604e-01 -7.57127106e-01 ... -6.80064559e-01 -6.16788566e-01 5.20448565e-01] [ 1.86993492e+00 -1.15316617e+00 -3.47279012e-01 ... -3.43504578e-01 7.43005931e-01 1.17203963e+00] ... [-1.22293687e+00 -1.32682502e-01 -6.29219890e-01 ... 8.73293459e-01 9.67995942e-01 -1.40651536e+00] [-8.63266528e-01 2.23952457e-01 -2.88336873e-01 ... 4.68522847e-01 -3.72127563e-01 -7.91044891e-01] [ 1.54144943e+00 4.06220496e-01 1.28616527e-01 ... -1.10680902e+00 1.18888855e+00 -3.64762455e-01]] [[ 2.95600563e-01 7.30619252e-01 -2.54074186e-01 ... 6.54203236e-01 -3.14300627e-01 -5.86457908e-01] [-1.59130538e+00 -1.51702964e+00 4.29751277e-01 ... -1.45093754e-01 -6.27419502e-02 -5.22055745e-01] [-5.90145707e-01 6.79940581e-01 -1.26675355e+00 ... -9.49050605e-01 5.81484020e-01 4.95303243e-01] ... [-5.14364898e-01 -5.34796774e-01 9.16819870e-01 ... 9.39088404e-01 -1.26371729e+00 -1.23688698e-01] [-7.07484961e-01 -4.74097803e-02 -1.10218249e-01 ... -4.91767019e-01 -3.44909787e-01 1.03926802e+00] [-2.22500995e-01 -3.06852162e-02 8.04801285e-01 ... 5.08351386e-01 6.07424736e-01 -2.48190492e-01]] ... [[ 4.01781559e-01 -6.43331468e-01 -1.21078845e-02 ... 7.74384916e-01 -1.28258276e+00 -1.37440884e+00] [-2.68448919e-01 6.05629802e-01 -5.61545253e-01 ... -9.87516105e-01 4.47557047e-02 -3.24054629e-01] [ 2.11109662e+00 6.62069559e-01 -3.41239154e-01 ... -3.24228615e-01 -2.93989271e-01 1.05968213e+00] ... [ 2.11466387e-01 -1.07588232e+00 8.53271604e-01 ... -4.71323490e-01 8.49506617e-01 -8.80894005e-01] [-2.59414768e+00 -4.54543561e-01 -5.38383782e-01 ... -5.97846627e-01 3.87320518e-01 -1.07098830e+00] [-8.60352695e-01 -1.85505772e+00 -7.43128419e-01 ... -3.27163428e-01 1.60587990e+00 -3.35177407e-02]] [[-8.18976343e-01 -1.34825933e+00 7.16292739e-01 ... 1.51329863e+00 1.77553773e-01 -1.85084522e+00] [ 2.84820765e-01 2.02608156e+00 -5.03987312e-01 ... 2.07867518e-01 -6.65378273e-02 -7.37760782e-01] [-4.94812906e-01 1.13109910e+00 -6.02062941e-01 ... -3.84175360e-01 -3.71873640e-02 5.06129980e-01] ... [-3.74184906e-01 -1.41674113e+00 -1.81456938e-01 ... -4.67789650e-01 -1.37695408e+00 -9.15603757e-01] [ 1.93144426e-01 9.93545771e-01 -6.49878442e-01 ... -5.08077145e-01 -4.28443789e-01 -4.53709096e-01] [-1.39276409e+00 1.39945164e-01 1.01331556e+00 ... -1.94608796e+00 9.29236710e-01 1.15883744e+00]] [[-8.14822793e-01 -1.17868054e+00 -2.41098359e-01 ... 1.53839365e-02 -1.06769407e+00 -2.19808623e-01] [ 2.57548481e-01 1.12279221e-01 8.99171531e-01 ... -9.65883315e-01 -1.51021039e+00 4.85546649e-01] [-1.04170454e+00 -7.65689090e-02 -1.48165262e+00 ... -8.40455592e-01 -1.27149832e+00 -4.58315074e-01] ... [ 1.14775288e+00 -3.15305501e-01 -8.89545023e-01 ... -4.39223707e-01 -3.69515270e-01 2.49540389e-01] [-4.84721929e-01 -1.02480829e+00 -2.49224171e-01 ... -9.02030945e-01 7.85711765e-01 1.37383914e+00] [-1.74945390e+00 1.26655445e-01 8.01746070e-01 ... 5.24932504e-01 1.67423642e+00 -5.05467236e-01]]] [[[ 1.20763868e-01 8.27019870e-01 9.78700578e-01 ... -2.11677670e+00 3.02351594e-01 -3.67287666e-01] [-3.65977347e-01 -2.90901005e-01 3.28410029e-01 ... 3.21206182e-01 4.59188253e-01 9.91900444e-01] [ 1.09564722e+00 9.05727565e-01 9.20417905e-01 ... 3.78491729e-01 9.35762703e-01 -2.62069553e-01] ... [-8.94387662e-01 1.00849068e+00 -1.29453942e-01 ... -1.12145412e+00 -1.52769828e+00 4.23987418e-01] [-3.63571584e-01 5.25398612e-01 1.15372574e+00 ... 5.83407402e-01 3.07219356e-01 -1.92154694e+00] [-1.52066088e+00 4.51054245e-01 6.72825813e-01 ... 2.10890770e-01 -1.47992277e+00 1.78361267e-01]] [[-6.25315368e-01 1.14751136e+00 -1.01571846e+00 ... -6.13015413e-01 5.44305742e-01 8.25439990e-01] [-5.39522946e-01 -4.04984117e-01 -8.90653551e-01 ... -4.21801597e-01 -5.40019572e-01 9.52754080e-01] [ 1.36873221e+00 1.66890129e-01 5.73816121e-01 ... 6.48367345e-01 -2.49245495e-01 1.35673928e+00] ... [ 1.05681407e+00 3.69177938e-01 -1.19803526e-01 ... 8.57699394e-01 8.96856010e-01 -4.49316353e-01] [-6.15041077e-01 -3.25057417e-01 -1.71703935e-01 ... -7.11938500e-01 6.19132876e-01 -7.74370492e-01] [ 2.89905101e-01 -2.11447263e+00 1.05706787e+00 ... 4.96414267e-02 -1.34186876e+00 2.02960062e+00]] [[ 4.83425647e-01 1.93029428e+00 -2.35942200e-01 ... -4.49886203e-01 2.84076333e-01 -5.83725655e-03] [-7.68234611e-01 4.97821957e-01 -1.99997455e-01 ... 1.94471157e+00 -6.99675560e-01 1.13527930e+00] [ 3.14734101e-01 4.58027154e-01 -6.80557549e-01 ... -5.54813921e-01 1.16986442e+00 -7.64942467e-01] ... [ 1.43376455e-01 8.18300188e-01 3.87491971e-01 ... -6.88617229e-01 -1.25725746e+00 -4.16016966e-01] [-9.02495682e-01 -8.16656649e-01 -5.81749916e-01 ... 1.61822617e+00 -1.37695551e-01 -3.14967394e-01] [ 7.83180773e-01 8.85470510e-01 -3.65881830e-01 ... 9.38965753e-02 1.40324998e+00 8.74607265e-01]] ... [[-8.65208089e-01 1.44203007e+00 -8.05992723e-01 ... -1.28516388e+00 3.05902421e-01 -2.22936064e-01] [-3.44154924e-01 -1.57520449e+00 -2.31987555e-02 ... -5.31620145e-01 -1.15754652e+00 3.46713930e-01] [ 3.74497503e-01 -7.21379101e-01 -3.69041234e-01 ... 1.76825535e+00 1.54634789e-01 5.47314644e-01] ... [ 2.61802435e-01 1.81438780e+00 -1.44908845e+00 ... -2.76567250e-01 -8.86132836e-01 -1.14600110e+00] [-3.84081095e-01 4.12902117e-01 9.09493744e-01 ... 4.35967118e-01 -2.90281326e-01 1.40965164e+00] [-5.39392757e-04 1.96982056e-01 -1.82049990e+00 ... -2.51428097e-01 1.10521257e+00 -9.99518454e-01]] [[-1.12486236e-01 1.04214597e+00 6.52067244e-01 ... 8.51716697e-01 8.25984597e-01 -6.93717539e-01] [-8.68814439e-02 -1.66971743e+00 -1.23017490e+00 ... 1.05283916e+00 5.32503068e-01 1.75066864e+00] [ 5.98351657e-01 -1.09605837e+00 3.72211337e-01 ... 5.09895086e-01 -6.54194474e-01 -1.16475619e-01] ... [ 1.19200516e+00 1.21905565e+00 -1.20421386e+00 ... 1.46329269e-01 1.88017035e+00 -7.54193366e-01] [-6.67666912e-01 -9.58840072e-01 1.11553991e+00 ... -3.61957848e-01 -6.87870562e-01 -8.79406989e-01] [-3.14598978e-01 -2.35739636e+00 8.34421158e-01 ... 5.02961397e-01 -2.65496522e-01 6.85774505e-01]] [[-4.56567615e-01 9.76413429e-01 1.75386393e+00 ... -4.93614435e-01 -1.11113846e+00 -2.55582023e+00] [ 9.82047096e-02 -5.81952810e-01 -1.00044906e-01 ... 3.66731554e-01 -9.22060549e-01 1.33604574e+00] [ 1.85734010e+00 -1.69544899e+00 1.79687008e-01 ... -5.61732650e-01 -1.49392498e+00 -6.35030925e-01] ... [-1.27291369e+00 2.17200413e-01 -1.16607630e+00 ... 1.07814336e+00 4.48553860e-01 -6.86378121e-01] [-1.04314196e+00 3.00773352e-01 9.57079589e-01 ... -3.01811665e-01 8.62680554e-01 -2.66021818e-01] [-9.31853056e-01 1.58123165e-01 4.15208668e-01 ... -9.05573070e-01 -7.93466568e-01 1.29566801e+00]]] [[[-6.02081597e-01 -5.94046712e-01 -1.41732001e+00 ... -1.37965131e+00 1.59616983e+00 -2.38219410e-01] [-7.24376798e-01 -1.33532524e+00 -1.07766771e+00 ... -1.16869581e+00 9.98495281e-01 -8.19873810e-02] [-1.58870173e+00 2.63360411e-01 1.84711432e+00 ... -5.78419752e-02 -5.10970727e-02 6.84966981e-01] ... [-5.48192978e-01 8.32117021e-01 -2.35998482e-01 ... 8.77859354e-01 1.67812622e+00 -4.22065258e-02] [ 7.90827930e-01 -1.47773862e+00 1.20602858e+00 ... 6.24508560e-01 1.26687801e+00 3.31600785e-01] [-1.23999810e+00 1.36081719e+00 9.53021348e-01 ... -5.33783495e-01 2.47355270e+00 3.82425994e-01]] [[-2.93040419e+00 7.54220843e-01 -7.16635883e-01 ... 1.05211623e-01 2.59997755e-01 -2.20736408e+00] [ 1.56170279e-01 5.87006450e-01 -6.64979964e-02 ... 9.42820311e-01 3.49124968e-01 -3.06512445e-01] [ 3.53885382e-01 6.22912347e-01 -3.06488931e-01 ... -1.54870713e+00 -7.13519603e-02 7.31498182e-01] ... [ 7.84497336e-02 7.79489458e-01 5.85257053e-01 ... -1.82127595e+00 4.87407148e-01 -1.36021852e+00] [-1.08629370e+00 -7.55476534e-01 -9.10715342e-01 ... -1.48520744e+00 -1.30495155e+00 -1.19015932e+00] [-9.02404487e-01 4.08843905e-01 5.44260681e-01 ... -1.01842368e+00 2.40940857e+00 -3.98833364e-01]] [[ 4.40387905e-01 -8.47291708e-01 -6.60317481e-01 ... 7.62690604e-01 -2.00039715e-01 2.48768598e-01] [ 3.59836459e-01 -9.80687618e-01 6.62862360e-01 ... 1.06594253e+00 1.37882566e+00 -4.25717467e-03] [ 1.13194668e+00 -1.35539817e-02 1.82517505e+00 ... 3.36210221e-01 8.38890314e-01 2.89846253e+00] ... [-2.00326490e+00 -1.36127114e-01 9.84892696e-02 ... 1.22468793e+00 -1.52317989e+00 -1.98749468e-01] [-5.92649400e-01 1.15274870e+00 -1.55087233e+00 ... -3.99755657e-01 1.57649621e-01 1.32520711e+00] [ 5.87299047e-03 1.10603666e+00 4.33470041e-01 ... 1.43049288e+00 7.16164410e-01 6.81770205e-01]] ... [[-1.73734677e+00 1.75181401e+00 8.17178249e-01 ... 2.82941073e-01 8.50992024e-01 5.72614610e-01] [-1.68058884e+00 -1.49483430e+00 8.51757407e-01 ... 7.53060341e-01 -1.77625799e+00 -1.74356115e+00] [ 6.89407468e-01 -7.10558593e-01 1.87200379e+00 ... -4.89293367e-01 -7.74583340e-01 -7.83985928e-02] ... [-1.68468654e-01 1.21850908e-01 -9.35351133e-01 ... 1.81990528e+00 -2.06519771e+00 -7.02612519e-01] [ 2.38435268e+00 -1.66183341e+00 -1.28835678e-01 ... 3.06615740e-01 -8.45475197e-01 -4.59020972e-01] [-2.20653281e-01 1.92458823e-01 1.97820389e+00 ... -1.25856245e+00 -1.14866756e-01 -1.46128023e+00]] [[ 3.84714931e-01 -1.24152064e+00 -4.24875468e-01 ... -1.05226204e-01 2.82658935e-01 8.75565931e-02] [-1.15839042e-01 1.25971571e-01 -1.29227817e+00 ... -1.42939019e+00 -1.35768902e+00 2.45879069e-01] [ 1.83516562e-01 7.41596222e-01 5.13454437e-01 ... 1.45098519e+00 -1.39312577e+00 -5.13722837e-01] ... [-9.51221958e-02 -1.61020100e+00 1.01974976e+00 ... 1.92909741e+00 5.41058421e-01 -1.10970831e+00] [ 4.30475444e-01 1.07926786e+00 -1.17457819e+00 ... -1.75107777e+00 -7.38496363e-01 1.60405450e-02] [ 2.98136926e+00 -7.76714683e-01 2.08180055e-01 ... -1.76984692e+00 5.61688421e-03 6.77843750e-01]] [[ 1.02721679e+00 9.33940887e-01 -1.60020888e+00 ... -1.07332337e+00 -1.28438497e+00 -9.42647159e-02] [-2.86072898e+00 -5.24433553e-01 1.23775125e+00 ... 9.67592299e-02 -2.38018170e-01 -1.13143492e+00] [-9.88471389e-01 -1.43009949e+00 -2.77972631e-02 ... 3.74866515e-01 3.39029282e-01 8.54863882e-01] ... [ 1.50050783e+00 -2.59420425e-01 -9.65866208e-01 ... -2.95092374e-01 -1.35842001e+00 6.56699389e-02] [-1.30403411e+00 -1.28483033e+00 -3.77191514e-01 ... 6.83886707e-02 1.20547100e-03 -1.27910876e+00] [ 8.37084949e-01 -8.74578714e-01 9.88755405e-01 ... 1.48315835e+00 -7.64621139e-01 1.51079446e-01]]] [[[ 1.31558506e-02 4.01280612e-01 1.39903575e-02 ... -1.18282247e+00 2.36928314e-01 -1.14098954e+00] [-8.20199624e-02 2.63587441e-02 2.83574909e-01 ... -1.20142043e+00 1.61731589e+00 -1.30864096e+00] [-1.60801673e+00 1.53157365e+00 -1.85404252e-02 ... -6.29537180e-02 7.51082748e-02 -1.04008949e+00] ... [-2.33757243e-01 -2.02116513e+00 -4.57154602e-01 ... -6.40141487e-01 2.08410963e-01 1.61730695e+00] [-1.37016690e+00 5.29959321e-01 1.64672947e+00 ... -2.26976573e-01 -4.60952550e-01 2.30193622e-02] [ 7.32472479e-01 2.17088923e-01 6.02563322e-01 ... 7.46850133e-01 9.12431359e-01 9.42446962e-02]] [[-1.57461929e+00 4.91484523e-01 -7.10497737e-01 ... 3.51874113e-01 -1.00222361e+00 -1.02690712e-01] [-2.09321737e+00 -7.09664635e-03 5.60872734e-01 ... -3.80940408e-01 -6.41477048e-01 -8.93071353e-01] [ 1.88773072e+00 1.70128018e-01 -2.25654729e-02 ... 1.63564134e+00 4.12418872e-01 -9.61271346e-01] ... [ 2.41287664e-01 3.49938758e-02 -1.28659058e+00 ... 1.67176783e+00 -2.67150015e-01 -2.88755447e-02] [-3.81119773e-02 -3.24916214e-01 3.22743148e-01 ... -1.94267184e-01 2.57221878e-01 -1.09031689e+00] [-8.78403962e-01 -2.66375184e-01 1.31601572e+00 ... -1.33821428e+00 1.12288058e+00 7.66819477e-01]] [[ 2.65914607e+00 6.59075797e-01 -3.98884982e-01 ... 1.22664467e-01 1.15642452e+00 1.53343871e-01] [ 1.13900125e+00 -4.70025539e-01 -4.00727570e-01 ... 5.08365691e-01 6.44261777e-01 -7.44854569e-01] [ 8.03224564e-01 -1.15140951e+00 -2.86703992e+00 ... -1.58689129e+00 6.51686668e-01 -7.11767316e-01] ... [-6.24876618e-01 1.09430194e+00 1.52733132e-01 ... 5.39156020e-01 9.55669224e-01 -1.44007146e+00] [-6.24575019e-02 -2.05281556e-01 5.57914793e-01 ... -8.75536919e-01 -1.50003648e+00 -1.03100941e-01] [ 6.96090236e-02 -3.88018489e-01 -9.96577963e-02 ... 5.39370894e-01 -1.13210034e+00 6.85560882e-01]] ... [[-1.60902143e+00 -4.68555748e-01 -4.14724708e-01 ... 6.41960800e-02 2.25360179e+00 -6.85790360e-01] [-2.90467888e-01 1.49045587e-01 8.43801022e-01 ... 3.56854230e-01 -1.35236692e+00 7.57872880e-01] [ 6.46277428e-01 -1.84488237e-01 -2.61189401e-01 ... -4.48559761e-01 -1.12263930e+00 -4.96017665e-01] ... [-9.19827163e-01 1.18583226e+00 -2.64050066e-01 ... 1.51019394e+00 4.35720831e-01 -1.28647566e+00] [ 2.44737172e+00 2.14921618e+00 1.57233790e-01 ... -1.05600941e+00 -5.91398239e-01 -8.31767738e-01] [-8.50565314e-01 9.39395607e-01 6.75310791e-01 ... -9.70504403e-01 -1.28626573e+00 -1.16949439e+00]] [[ 1.62513345e-01 6.44627988e-01 -3.55228841e-01 ... 4.42972630e-01 7.31825829e-01 2.14122701e+00] [ 3.86918932e-01 8.09399903e-01 2.19240928e+00 ... 3.10528278e-01 -9.30772349e-02 1.11101663e+00] [ 8.16518903e-01 4.69337285e-01 1.21445584e+00 ... -3.38800192e-01 -9.18025732e-01 -7.13768378e-02] ... [-5.72139382e-01 -1.98056906e-01 -1.37635563e-02 ... 1.48500991e+00 -8.75279546e-01 -2.58552045e-01] [ 8.44075561e-01 2.02640033e+00 -3.05907488e-01 ... 1.32801580e+00 3.21285874e-02 1.49089468e+00] [-9.98900592e-01 -4.07066226e-01 2.21707132e-02 ... -1.68590653e+00 4.32747640e-02 -3.65256816e-01]] [[-1.82579517e+00 -3.02114248e-01 -8.12276423e-01 ... 4.66650009e-01 -2.10413194e+00 8.48827183e-01] [ 1.01055229e+00 -4.20425802e-01 1.00816655e+00 ... 9.43720758e-01 -3.57220113e-01 -4.65635270e-01] [-1.65543914e-01 -6.97284281e-01 1.29850554e+00 ... 1.43835473e+00 9.46403623e-01 -6.91554487e-01] ... [ 2.19020229e-02 -1.23964226e+00 1.20897591e+00 ... -3.82464767e-01 1.92065299e-01 3.65685582e-01] [-2.99517512e-01 -4.93246853e-01 -1.69416356e+00 ... -1.71526444e+00 1.94774330e-01 1.16504908e+00] [ 1.23940349e+00 -1.46825790e+00 4.70272303e-01 ... 1.40129471e+00 3.97697628e-01 1.65193212e+00]]]] [[[[ 1.23323512e+00 -5.92369795e-01 -3.82493198e-01 ... 1.57969236e+00 -2.38125876e-01 -1.31276739e+00] [-3.57378274e-01 6.97614253e-01 9.52830791e-01 ... 8.76951337e-01 -4.11116898e-01 1.34587383e+00] [-4.88036424e-01 -1.36388099e+00 -1.20908225e+00 ... 7.04106763e-02 1.21434319e+00 -9.43076909e-01] ... [ 9.78540659e-01 -1.02512693e+00 1.20208666e-01 ... -1.94214627e-01 7.18377382e-02 -2.24389836e-01] [-1.74473834e+00 -3.04792561e-02 4.38361615e-01 ... 1.55304432e+00 1.17161751e+00 -2.58954167e+00] [ 5.66697717e-01 3.66875529e-01 9.59797323e-01 ... -9.70815003e-01 -3.06181103e-01 5.89311302e-01]] [[ 3.15551996e-01 6.10315919e-01 -2.39857292e+00 ... -1.12881279e+00 5.85308671e-01 1.39757073e+00] [ 7.58175313e-01 -5.02248049e-01 -3.06897807e+00 ... 6.57462955e-01 -1.13724089e+00 -9.19555306e-01] [-3.41358721e-01 -3.59268457e-01 -1.34351993e+00 ... 9.18600708e-02 8.68714228e-02 -1.02976787e+00] ... [ 4.08145010e-01 1.26874900e+00 4.06572819e-01 ... -1.03783858e+00 8.17284107e-01 -5.33232629e-01] [ 2.04957056e+00 -1.65471733e+00 2.25635719e+00 ... 9.89819348e-01 -7.86351621e-01 -1.19606686e+00] [ 9.60854053e-01 -3.87807339e-01 6.94653094e-01 ... -4.05494660e-01 8.07480156e-01 -2.27209508e-01]] [[ 5.85759640e-01 -5.44750869e-01 -1.28146863e+00 ... 1.83366358e+00 -1.66755831e+00 2.28518531e-01] [ 7.75380313e-01 7.61840045e-01 -1.65965939e+00 ... -1.16502762e+00 3.94952655e-01 -3.24094796e+00] [ 5.43846667e-01 -4.08110052e-01 1.74398494e+00 ... 2.11789417e+00 -3.24546881e-02 -6.19434357e-01] ... [ 5.17748535e-01 -2.05390528e-01 7.13772774e-01 ... 1.23120368e+00 -6.31003618e-01 -1.91408053e-01] [-1.36532617e+00 -8.79153728e-01 2.24785471e+00 ... -2.26697135e+00 6.51571989e-01 -9.18032825e-01] [-2.05171514e+00 4.10707504e-01 2.78859305e+00 ... -5.26428856e-02 1.36438906e+00 1.96186528e-01]] ... [[-6.95043623e-01 7.87300706e-01 1.11082661e+00 ... -2.28976533e-02 3.44765961e-01 1.22104637e-01] [-9.29076225e-02 1.53926384e+00 3.06722879e-01 ... -5.29127121e-02 -7.97016323e-02 1.77234292e-01] [ 5.80816746e-01 -1.01856565e+00 -5.59776068e-01 ... -1.34296203e+00 -7.97224343e-01 4.94813234e-01] ... [-1.01449108e+00 1.68803430e+00 1.10684443e+00 ... 5.49471915e-01 -1.08938515e+00 7.87671208e-01] [ 4.55784023e-01 -5.89005232e-01 -4.92163002e-01 ... 6.02514967e-02 7.64396131e-01 5.55210173e-01] [ 5.46140730e-01 -1.00749612e+00 1.01629861e-01 ... -1.96708068e-01 -5.69219515e-02 -7.39310980e-01]] [[-1.12327790e+00 3.23240250e-01 -1.36971593e+00 ... -1.79433382e+00 -6.55200541e-01 9.05515134e-01] [-4.78085011e-01 -9.04835880e-01 -3.40139091e-01 ... 1.95652530e-01 1.34294197e-01 5.54476641e-02] [-1.74069476e+00 3.64239722e-01 -2.15346709e-01 ... -5.45705199e-01 -1.69555962e-01 -5.86713672e-01] ... [-1.90374464e-01 -3.26616406e+00 -7.27030337e-01 ... 1.34652400e+00 7.10754812e-01 -2.02841854e+00] [ 1.97831500e+00 1.34686732e+00 7.07914054e-01 ... -1.07147753e-01 1.06575453e+00 -8.67629528e-01] [ 1.22636735e+00 6.08076751e-01 6.75554037e-01 ... -9.34183449e-02 1.25215483e+00 -4.77159142e-01]] [[-8.71403992e-01 -1.36350179e+00 5.11147559e-01 ... -5.89190304e-01 -4.52949911e-01 -8.29600692e-01] [ 8.22145700e-01 2.43693829e-01 -1.06348872e+00 ... 1.24091089e+00 -3.18931639e-01 7.81398833e-01] [ 3.61252755e-01 3.07243645e-01 -2.53461242e-01 ... -1.92626691e+00 -8.98219228e-01 1.22531474e+00] ... [ 3.99703562e-01 -5.70364535e-01 -5.47709525e-01 ... 1.24709499e+00 -1.43715930e+00 -1.53088629e-01] [-9.41608071e-01 -5.36349773e-01 1.66098654e+00 ... 2.50633061e-01 -1.70048928e+00 -9.06331599e-01] [-1.07201099e-01 3.28672320e-01 8.13732445e-01 ... 1.02233458e+00 -1.14653838e+00 -8.43030632e-01]]] [[[-1.99292257e-01 -2.44174019e-01 5.27302980e-01 ... 8.29736590e-01 6.95907101e-02 -2.07615089e+00] [ 1.73912394e+00 -6.11447245e-02 -1.58457741e-01 ... 6.36339664e-01 -1.85358357e+00 1.48410642e+00] [ 2.83758759e-01 -7.68213570e-01 -7.85294414e-01 ... -4.08806443e-01 -3.58740985e-01 1.31747246e-01] ... [ 1.15496266e+00 4.89925086e-01 -6.43652380e-01 ... 1.51456743e-01 1.41802505e-01 5.81032217e-01] [-1.79085433e+00 -2.08211541e-01 -9.55031991e-01 ... -5.10650992e-01 1.40240490e+00 -1.32749185e-01] [-1.58007848e+00 -2.38886863e-01 -9.40107167e-01 ... -8.95949721e-01 -2.07107782e-01 6.60413861e-01]] [[ 1.94848210e-01 -1.23052955e+00 -7.30633795e-01 ... 2.57281303e-01 1.25650191e+00 -1.79818475e+00] [-8.64759326e-01 -1.87927568e+00 3.74456584e-01 ... -1.30349681e-01 1.19263959e+00 -1.00722522e-01] [ 7.34839857e-01 4.81009096e-01 -3.12831640e-01 ... -1.87124237e-01 -1.24904621e+00 -8.53290856e-01] ... [ 4.47807401e-01 1.79635966e+00 1.21335578e+00 ... 7.95196712e-01 2.44477153e+00 4.87428397e-01] [-1.89167142e+00 1.82332408e+00 2.83809286e-02 ... 1.85709059e-01 -2.43125677e-01 2.80836165e-01] [ 4.76822048e-01 -9.43690479e-01 4.38408405e-01 ... -3.76912564e-01 -5.05963087e-01 1.36234117e+00]] [[ 9.06827092e-01 1.46823347e-01 -9.56331491e-01 ... -5.07975280e-01 -1.06165670e-01 1.39818716e+00] [ 1.39629006e+00 -4.31789517e-01 1.36277175e+00 ... -4.67828393e-01 -1.22116365e-01 3.20492089e-01] [ 5.56151092e-01 -1.07760203e+00 8.38512361e-01 ... -6.83139339e-02 -3.62557858e-01 -1.39153123e+00] ... [-1.80738461e+00 -2.00004530e+00 -3.34668458e-01 ... 2.42159769e-01 -2.34327048e-01 -8.30332041e-01] [ 9.86381829e-01 2.83307123e+00 1.78498852e+00 ... -1.11301386e+00 1.21628666e+00 1.52603149e-01] [ 8.59207392e-01 2.41804695e+00 9.95200098e-01 ... -2.36511633e-01 1.94675237e-01 4.02861178e-01]] ... [[ 5.26562870e-01 -3.81431997e-01 1.45357847e+00 ... -1.56162068e-01 -6.69771731e-01 3.11314166e-01] [-1.31942317e-01 -5.15506506e-01 -1.92893580e-01 ... -7.92276323e-01 -7.64997184e-01 -9.06961381e-01] [ 6.48517311e-01 -2.20618963e+00 2.13108938e-02 ... -1.31417382e+00 1.26781821e+00 -1.68037724e+00] ... [-6.83493972e-01 -1.58783245e+00 3.25858176e-01 ... 1.41958165e+00 -2.95870781e-01 2.24571705e-01] [-1.83093524e+00 -2.29872733e-01 -1.62534022e+00 ... -1.23344958e-01 8.60444725e-01 -4.92367387e-01] [-1.89675844e+00 -1.03959465e+00 1.86307669e+00 ... -6.35155290e-02 -5.16636848e-01 4.73163813e-01]] [[-1.73132360e+00 3.71608473e-02 -5.25125086e-01 ... 5.64853251e-01 2.41342708e-01 7.63946593e-01] [-1.11741029e-01 1.80669570e+00 9.39510703e-01 ... -2.27151692e-01 8.45427155e-01 -3.95124376e-01] [ 2.18798056e-01 2.85426474e+00 -1.72677207e+00 ... -9.76272300e-02 3.00188512e-01 -2.08920670e+00] ... [-8.27631429e-02 3.13713551e-02 6.65946603e-01 ... 6.55785084e-01 -6.31808877e-01 1.24091434e+00] [ 1.03435969e+00 -4.53088343e-01 5.93459845e-01 ... -6.18472882e-03 -5.83477855e-01 2.37755463e-01] [-6.17238820e-01 -2.75851083e+00 -1.12214375e+00 ... -6.37178540e-01 1.11288273e+00 2.55052149e-01]] [[ 4.92838979e-01 -6.98591590e-01 7.84847558e-01 ... 2.51913995e-01 -3.29211712e-01 -1.63471177e-01] [ 6.21001124e-01 -1.71785164e+00 9.53293741e-01 ... 1.85326612e+00 -1.51103616e-01 3.16508263e-01] [ 3.56601149e-01 -3.38633284e-02 6.20319247e-01 ... -1.10310650e+00 -1.69890249e+00 -1.00483429e+00] ... [ 1.37349653e+00 2.19930280e-02 5.54792941e-01 ... 4.27298620e-02 2.50167847e+00 5.50373256e-01] [-4.71383959e-01 -9.11017179e-01 5.02838254e-01 ... -1.03183639e+00 1.14653075e+00 -2.03875646e-01] [ 7.81262338e-01 -7.56572604e-01 1.44760156e+00 ... -4.91196185e-01 3.00148875e-01 -9.48434770e-01]]] [[[-1.04119170e+00 3.20580649e+00 9.52521324e-01 ... 9.11120474e-01 9.44278538e-01 -5.87845266e-01] [-1.34069204e+00 1.98793784e-01 -9.67982471e-01 ... 7.80359447e-01 -4.45636004e-01 -1.25475311e+00] [ 1.72991467e+00 1.15843999e+00 1.16892457e+00 ... -1.63524187e+00 -6.44384682e-01 1.27915943e+00] ... [ 6.19956672e-01 -8.47307026e-01 1.87965202e+00 ... -3.76906663e-01 5.42749166e-01 1.25311410e+00] [ 8.26768219e-01 -1.27555418e+00 3.81657213e-01 ... 1.49923718e+00 -6.49992406e-01 1.37071335e+00] [ 6.68976068e-01 6.92575753e-01 9.24205661e-01 ... 5.59639871e-01 -1.05588508e+00 6.53019026e-02]] [[-9.03311431e-01 8.27974617e-01 -2.19244146e+00 ... -5.79581201e-01 1.32362103e+00 -4.96715039e-01] [-8.85189712e-01 -5.79379439e-01 1.62113202e+00 ... -4.04243648e-01 -1.96340412e-01 -7.50915289e-01] [ 5.81580698e-01 4.59042266e-02 -1.55113757e+00 ... 1.08059704e+00 -1.16074679e-03 -4.45255637e-01] ... [-3.09289813e-01 -9.49557483e-01 1.03845251e+00 ... -3.48792583e-01 -1.40967798e+00 -2.73477197e-01] [-4.36649412e-01 -3.96095037e-01 -7.83810318e-01 ... 1.25730443e+00 -1.74989104e+00 5.93147576e-01] [ 4.05574322e-01 9.60221469e-01 4.39011872e-01 ... 7.34966278e-01 -3.90512496e-01 -2.40003616e-01]] [[-8.46816301e-01 4.61549312e-01 -1.01174557e+00 ... -2.20966980e-01 -1.10270238e+00 -1.32181263e-02] [-3.14041257e-01 -1.40180826e+00 -3.47694576e-01 ... -5.67678325e-02 4.06782001e-01 -7.75528014e-01] [ 1.13789463e+00 -9.62726116e-01 1.31970882e+00 ... -5.25520205e-01 1.56546354e+00 7.67431319e-01] ... [-1.75088573e+00 -8.13360333e-01 -4.81725782e-01 ... 8.42024684e-01 1.53393734e+00 -1.71970975e+00] [-1.21072674e+00 1.92572880e+00 -2.05520940e+00 ... 5.19997835e-01 -1.84094107e+00 -2.03566027e+00] [-6.16168737e-01 1.11507542e-01 2.50321984e-01 ... -1.77267730e+00 1.49444473e+00 -1.58593029e-01]] ... [[ 2.24233568e-01 -1.05283654e+00 -1.12325215e+00 ... -1.75294554e+00 -2.24798059e+00 3.38135391e-01] [-5.78029573e-01 1.24806619e+00 -1.23398590e+00 ... 8.79238665e-01 4.11455542e-01 -8.52298319e-01] [ 1.14349174e+00 8.46877754e-01 -2.83826113e-01 ... 2.89198786e-01 1.29274929e+00 -1.02408814e+00] ... [-1.77220654e+00 -8.77820134e-01 5.98758519e-01 ... 1.66585684e+00 1.71664572e+00 -7.78434038e-01] [-9.18538272e-01 -2.86503553e-01 -1.23894602e-01 ... -1.66818607e+00 -9.49083984e-01 6.34241939e-01] [ 2.73534143e-03 1.23105752e+00 -7.66553164e-01 ... -3.31558824e-01 -4.22038734e-01 7.85169423e-01]] [[-1.70507222e-01 4.84787315e-01 7.29420707e-02 ... 2.71066397e-01 5.75823903e-01 -1.03737116e-01] [-1.45861065e+00 -4.04422939e-01 1.34473085e-01 ... 6.22011781e-01 1.07802963e+00 -1.82018518e-01] [ 1.06206679e+00 -2.26808995e-01 1.82099625e-01 ... -5.82803965e-01 2.83344924e-01 2.12890553e+00] ... [ 9.43042517e-01 -9.10798192e-01 -8.54331255e-01 ... -1.37387323e+00 6.72504663e-01 -1.40149117e+00] [ 1.35822403e+00 5.17645299e-01 5.99327326e-01 ... 7.62225986e-01 -1.76312417e-01 -2.95318812e-01] [ 2.19628406e+00 -1.01754737e+00 -3.89888018e-01 ... -4.52889413e-01 -9.85047519e-01 6.22693777e-01]] [[ 2.52115309e-01 -2.04128861e+00 -3.36778075e-01 ... -3.57238382e-01 -6.60629809e-01 1.84601367e+00] [-4.54383552e-01 -4.23836023e-01 1.01674449e+00 ... 7.63773561e-01 -4.99166101e-01 2.91234326e+00] [-1.11351359e+00 -1.69542670e+00 -9.79650974e-01 ... 3.78629625e-01 4.90242809e-01 -4.39599425e-01] ... [-8.97537827e-01 4.98310268e-01 -2.74095833e-02 ... 1.73176870e-01 1.90637755e+00 -1.49531949e+00] [-5.05801380e-01 1.37510908e+00 1.51159072e+00 ... 2.08269969e-01 -3.22863877e-01 5.84220350e-01] [-3.26224744e-01 -1.12695456e+00 -4.61279452e-01 ... -8.97293448e-01 5.84057152e-01 -2.73894876e-01]]] [[[ 8.25672969e-02 1.68310678e+00 9.36542809e-01 ... -8.37416887e-01 8.50504994e-01 1.85378587e+00] [ 3.87880832e-01 -5.55452764e-01 -6.14343226e-01 ... -2.03548169e+00 -2.67641902e-01 6.23421431e-01] [ 1.88004982e+00 5.71523070e-01 2.59809327e+00 ... -3.40940177e-01 -1.19576439e-01 3.12010199e-01] ... [-3.48633349e-01 -2.76373386e+00 3.79700929e-01 ... 1.52130246e+00 2.40536124e-01 -1.45502460e+00] [ 5.19213855e-01 8.60076368e-01 1.96033731e-01 ... -2.42146373e+00 -8.12173545e-01 -9.66538548e-01] [-1.70254812e-01 -3.81191105e-01 -4.89145786e-01 ... -1.32867146e+00 -8.79010707e-02 4.08449501e-01]] [[-1.53334713e+00 -1.99106708e-01 -2.83432722e-01 ... 7.00655758e-01 8.08698416e-01 3.51904809e-01] [ 1.38719404e+00 -1.24528043e-01 2.37090141e-01 ... 1.17021632e+00 -3.35550830e-02 1.66016385e-01] [ 8.36102292e-02 1.63325086e-01 1.28392965e-01 ... -1.65840894e-01 5.73568523e-01 1.11107600e+00] ... [ 2.07613158e+00 1.82499923e-02 -1.94331288e+00 ... -1.08056176e+00 6.75805151e-01 1.83208835e+00] [ 1.87859023e+00 -4.63661291e-02 1.06365633e+00 ... 3.61745685e-01 8.38381886e-01 8.74055207e-01] [-1.25409997e+00 -7.62355208e-01 1.01374900e+00 ... -1.38003564e+00 -1.54599205e-01 -1.02444732e+00]] [[-2.35954762e-01 1.81624067e+00 -1.44937980e+00 ... -3.82171988e-01 -1.17936599e+00 5.34940839e-01] [ 7.49502957e-01 -1.82404792e+00 9.74797010e-01 ... -8.64340961e-01 4.28074628e-01 5.01801133e-01] [-1.12465954e+00 -2.50032067e-01 -2.27902937e+00 ... 1.17708959e-01 -6.38418734e-01 9.83063042e-01] ... [-6.22120798e-01 1.96153498e+00 8.52497101e-01 ... -1.17280209e+00 -4.80624735e-01 4.60051715e-01] [ 8.59479368e-01 2.02954058e-02 2.08681718e-01 ... 1.05545902e+00 9.19258058e-01 -5.66499829e-01] [ 9.36640501e-02 1.37197617e-02 -4.23963368e-01 ... -6.00975692e-01 3.02896313e-02 1.38170302e-01]] ... [[-3.13680828e-01 -5.70291936e-01 -6.95022345e-01 ... -1.67165458e+00 3.17069471e-01 8.27540696e-01] [ 1.45326138e-01 1.09369099e-01 -1.30585885e+00 ... -5.50931156e-01 1.32080579e+00 -1.92635749e-02] [ 4.32821326e-02 -3.03161032e-02 -2.42560551e-01 ... -1.47872761e-01 2.04490948e+00 -1.84300733e+00] ... [-8.12578678e-01 1.28445292e+00 4.60221589e-01 ... 1.34315193e+00 -2.04022482e-01 -1.58559394e+00] [ 1.55989841e-01 -3.44407588e-01 -2.98917741e-02 ... 1.80889189e+00 -5.61834872e-02 1.19643891e+00] [ 2.41240174e-01 1.84120464e+00 5.77644408e-01 ... 1.50881875e+00 -1.46873486e+00 -1.73828638e+00]] [[-5.01763284e-01 -1.27009439e+00 3.56971145e-01 ... -5.82411647e-01 -2.94435918e-01 1.59395516e+00] [ 7.19871581e-01 5.40608644e-01 -1.61181962e+00 ... 3.55356038e-01 1.78189313e+00 -6.92036152e-01] [-6.95129812e-01 6.16742559e-02 1.67774808e+00 ... -2.89837807e-01 -2.87683606e-01 -2.85936624e-01] ... [-1.03956449e+00 3.30699086e-01 4.99727637e-01 ... 1.31057203e+00 2.32563806e+00 -2.84732074e-01] [-6.33441145e-03 3.66066754e-01 2.18281955e-01 ... 5.15444934e-01 -5.99232793e-01 2.64733434e-01] [-1.88614070e-01 -1.41793776e+00 -1.98302731e-01 ... 5.01456380e-01 4.94673789e-01 4.13170278e-01]] [[-4.33109909e-01 -2.05069203e-02 8.12116206e-01 ... 6.36162996e-01 8.86530429e-02 -1.92825198e-01] [ 1.33895040e+00 9.19704065e-02 6.87900707e-02 ... 1.50202525e+00 8.14985991e-01 1.66620243e+00] [-2.31382832e-01 -1.09768581e+00 9.97317612e-01 ... -1.51716483e+00 -3.63068968e-01 -3.43902797e-01] ... [-1.53664196e+00 -7.28919625e-01 -5.00432968e-01 ... -9.55130160e-01 -5.33877134e-01 -2.57605631e-02] [-1.13452578e+00 -3.15092295e-01 1.74718726e+00 ... 1.17755838e-01 -5.24000764e-01 -1.84759378e+00] [-1.05973494e+00 8.38681340e-01 1.38228416e-01 ... -8.01938236e-01 1.29497838e+00 -2.33601015e-02]]] [[[-1.24116875e-01 2.82647014e+00 -1.79715320e-01 ... -6.57250285e-01 -3.52028638e-01 -5.29193163e-01] [ 5.31513453e-01 -1.82047391e+00 2.64017045e-01 ... -4.17031208e-03 2.75496662e-01 6.20372854e-02] [ 1.17698264e+00 -1.95081800e-01 3.47714067e-01 ... 2.16299504e-01 6.41112030e-01 -4.81896490e-01] ... [-2.13509774e+00 -7.75935411e-01 1.19212782e+00 ... -2.00269356e-01 4.49509323e-01 -1.03953457e+00] [-4.05832320e-01 1.34318185e+00 1.55305517e+00 ... -4.14028406e-01 2.65138179e-01 -9.74316418e-01] [ 5.05516171e-01 2.19950056e+00 1.16721249e+00 ... -2.67300874e-01 -9.84815955e-01 1.26386654e+00]] [[-2.65632689e-01 -7.42035627e-01 -3.72997463e-01 ... 2.76972186e-02 1.11501575e+00 -1.49477673e+00] [ 4.38948721e-01 1.03586905e-01 -6.07058227e-01 ... -4.59066480e-01 5.78289449e-01 1.84544468e+00] [-3.05903226e-01 -2.58542538e+00 5.59585214e-01 ... -6.88689709e-01 5.28519630e-01 -1.31372082e+00] ... [-1.24733448e+00 -1.19777000e+00 -2.50864685e-01 ... 5.51269174e-01 -1.68670464e+00 5.46304286e-01] [-4.19547647e-01 6.47293866e-01 -1.38268661e+00 ... 9.61519778e-01 5.98090649e-01 6.55693591e-01] [ 5.05559087e-01 -1.37608600e+00 1.73180148e-01 ... -1.26184940e+00 9.85593796e-01 1.58208847e-01]] [[-4.68838960e-01 2.28742886e+00 1.48969255e-02 ... 1.19924165e-01 -9.45627019e-02 -1.33593440e+00] [-2.92197204e+00 -3.34928334e-01 -4.92065370e-01 ... -2.05360317e+00 1.52509749e+00 -1.32268798e+00] [-7.37448335e-01 7.61305392e-01 -9.96521056e-01 ... 2.08226275e+00 -1.44677907e-01 4.50825185e-01] ... [ 1.33413088e+00 9.86350775e-02 2.13618731e+00 ... 7.20673978e-01 -4.87931669e-01 2.38185334e+00] [ 1.41657695e-01 -1.35390818e-01 -5.37012875e-01 ... 2.13244534e+00 5.84550202e-01 -8.84828046e-02] [-4.98478562e-01 -1.54145885e+00 3.16027284e-01 ... -9.22374606e-01 3.48665446e-01 8.04802403e-02]] ... [[ 9.22358572e-01 -1.38796329e-01 5.76180160e-01 ... 9.67807353e-01 -1.47622907e+00 1.06075418e+00] [ 7.47362003e-02 4.01634634e-01 2.99691141e-01 ... -7.91973889e-01 -1.87439732e-02 4.33059245e-01] [ 2.06224775e+00 5.96792698e-01 -1.41160488e-01 ... -7.19188273e-01 -1.57654786e+00 2.89727986e-01] ... [-1.17701316e+00 -1.49757457e+00 -1.68115422e-01 ... -7.01937020e-01 -1.76297426e+00 2.35593513e-01] [-2.12019777e+00 -7.11837590e-01 -9.98061240e-01 ... 2.20757335e-01 -6.70595706e-01 1.45228967e-01] [-4.08972561e-01 4.71383303e-01 -2.99436331e+00 ... -2.96353102e-01 5.80798507e-01 3.14088792e-01]] [[ 1.13884521e+00 -9.06621993e-01 1.22417644e-01 ... 1.91953230e+00 -4.05674167e-02 6.34951949e-01] [ 4.56831038e-01 -9.46309149e-01 -3.06961477e-01 ... -1.22727847e+00 5.76908030e-02 -5.15070558e-01] [-5.55093527e-01 1.37794644e-01 4.58012015e-01 ... 1.24902678e+00 7.86109090e-01 1.95260274e+00] ... [-2.14197230e+00 -3.45706105e-01 -4.16914582e-01 ... 5.80788970e-01 1.68462956e+00 3.45028073e-01] [ 2.32677078e+00 1.02450883e+00 9.54781353e-01 ... -5.48759878e-01 -1.42903054e+00 -3.85897547e-01] [ 5.27863264e-01 -1.98867345e+00 -2.45611280e-01 ... 1.09632790e+00 -1.63478777e-01 2.58767903e-01]] [[-4.33783919e-01 1.25964928e+00 2.92805403e-01 ... -1.55536532e+00 6.81801617e-01 -5.33733845e-01] [ 4.72892761e-01 -9.83745217e-01 -5.76501131e-01 ... -5.94656505e-02 -6.39589429e-01 -2.98113406e-01] [-8.42219070e-02 6.99959457e-01 1.30263638e+00 ... -4.58206087e-01 -2.41228312e-01 -9.27195668e-01] ... [ 8.00883889e-01 6.81703150e-01 7.82039881e-01 ... -1.52125612e-01 -1.82654333e+00 -3.16890925e-01] [-1.88711059e+00 2.49228150e-01 -1.20670128e+00 ... -1.53396234e-01 1.09022307e+00 1.37971854e+00] [-1.16777039e+00 5.52834928e-01 6.77093565e-01 ... -2.85320103e-01 -9.66352284e-01 -4.18940127e-01]]] [[[-8.14168274e-01 -1.50688088e+00 -2.46849403e-01 ... -1.20063412e+00 1.78216010e-01 1.19574524e-01] [-5.11868060e-01 -2.51695782e-01 -5.39613485e-01 ... 6.54019535e-01 4.81167138e-01 -3.46344471e-01] [ 1.57561147e+00 -5.92904806e-01 -4.11899537e-01 ... 3.33355427e+00 -1.05405450e+00 -1.36339264e-02] ... [-1.97976995e+00 9.29478467e-01 3.20688635e-01 ... -2.71644711e-01 5.88916183e-01 1.25213945e+00] [ 1.03271031e+00 -1.35842133e+00 -1.51807340e-02 ... 3.62438746e-02 1.91286385e+00 1.41257241e-01] [ 1.82077801e+00 -6.10528946e-01 -3.04210544e-01 ... -1.21484530e+00 -1.24215674e+00 7.09961057e-01]] [[ 1.14217229e-01 -2.15288967e-01 9.30857003e-01 ... -8.56602728e-01 7.06472456e-01 -9.32388783e-01] [-1.06035341e-02 -4.85999286e-01 1.96579918e-01 ... 2.89196700e-01 -6.06337562e-02 -2.21739963e-01] [-3.73295724e-01 -8.16678166e-01 -2.92980611e-01 ... 7.79880956e-02 -4.14910018e-01 1.10100158e-01] ... [ 2.11294413e+00 1.58550847e+00 -8.11345503e-02 ... -2.29020402e-01 -1.60076305e-01 8.44452620e-01] [ 2.02940607e+00 4.42257762e-01 -3.91410321e-01 ... 1.48302078e+00 -1.86132228e+00 5.81569374e-02] [-3.29954863e-01 -7.59531140e-01 7.44316757e-01 ... 1.52333331e+00 1.23458982e+00 -1.60830081e-01]] [[ 1.06691492e+00 -2.20538348e-01 4.70837563e-01 ... 1.49259961e+00 -2.34604791e-01 -3.78947198e-01] [ 3.52039002e-02 7.23970771e-01 5.19726276e-01 ... -7.12679386e-01 -1.34373665e+00 -1.37389362e+00] [-5.69474101e-01 5.20515025e-01 -1.25208545e+00 ... -4.67612535e-01 3.99172127e-01 2.15485454e+00] ... [-1.99749812e-01 3.84550154e-01 1.51545179e+00 ... -1.05164480e+00 -3.05267811e-01 -7.99723327e-01] [-2.56312191e-01 2.27544570e+00 1.84188282e+00 ... 3.07602674e-01 7.56657958e-01 7.36142457e-01] [-6.90336943e-01 8.22424352e-01 -1.88573754e+00 ... 2.45586738e-01 -1.68684110e-01 -4.32212740e-01]] ... [[-2.22001219e+00 -7.13729039e-02 -1.51141495e-01 ... 1.16127551e+00 -2.71401912e-01 9.31363523e-01] [ 1.96595460e-01 -1.62084952e-01 -1.75801432e+00 ... 1.77662939e-01 -1.25923781e-02 -8.60807657e-01] [ 7.24100649e-01 -2.51618832e-01 1.30705309e+00 ... 1.65219021e+00 4.32569198e-02 -8.21910679e-01] ... [ 2.83169603e+00 2.24251717e-01 4.84773785e-01 ... -2.77169049e-02 -1.38566852e+00 1.30955935e+00] [ 7.26527214e-01 -1.11993861e+00 -7.86900640e-01 ... 7.83188641e-01 -2.34520853e-01 -1.51240721e-01] [ 1.32438087e+00 -8.94450903e-01 1.78865540e+00 ... -6.13308072e-01 -4.86299962e-01 2.36188382e-01]] [[-6.39480114e-01 -1.41815472e+00 7.24639654e-01 ... 2.55851984e-01 1.60126102e+00 -4.77635801e-01] [-5.90146065e-01 -9.12667453e-01 -1.43165350e+00 ... -3.56548667e-01 -7.16394007e-01 7.22086906e-01] [ 3.24397683e-01 -2.87472963e-01 1.56323946e+00 ... -1.91436410e+00 6.15598977e-01 -4.94901627e-01] ... [ 3.04662347e-01 1.62678123e+00 4.40839753e-02 ... 7.98130035e-01 1.38791370e+00 -2.91127622e-01] [ 1.99637437e+00 1.54408410e-01 -1.63557875e+00 ... 8.80822361e-01 1.55325651e-01 6.58087790e-01] [-2.54527807e-01 -3.46701592e-01 -3.77955317e-01 ... -4.26400661e-01 -1.80084956e+00 1.32083869e+00]] [[ 8.30722675e-02 9.76954401e-01 8.46440375e-01 ... 7.00207174e-01 9.30897951e-01 -6.16636574e-01] [ 8.85097742e-01 1.00361156e+00 -8.38128686e-01 ... 3.54911029e-01 6.33222759e-02 -9.90838826e-01] [-9.88297224e-01 1.07954073e+00 6.88245714e-01 ... 4.33982044e-01 1.40297079e+00 1.54254425e+00] ... [-2.88113445e-01 -3.16107631e-01 3.79420191e-01 ... -2.17032745e-01 1.77754253e-01 4.68302131e-01] [ 5.23018181e-01 -6.84516728e-01 -1.71422994e+00 ... -4.88921016e-01 -6.91384077e-01 -2.11284828e+00] [-6.91985965e-01 -1.32244599e+00 6.51153445e-01 ... -8.26089144e-01 2.31163239e+00 -2.18758249e+00]]]] ... [[[[-2.98022777e-01 -4.37921554e-01 9.91455376e-01 ... -4.34172839e-01 1.30497232e-01 -1.25343025e+00] [ 5.09379983e-01 -1.54562443e-01 1.38641620e+00 ... 6.77553833e-01 -1.47933519e+00 -1.48929882e+00] [-2.93767542e-01 -7.92958498e-01 4.53885138e-01 ... -2.07441783e+00 -7.01238275e-01 -1.23089099e+00] ... [ 5.95566094e-01 -9.36157584e-01 -2.35783505e+00 ... -1.68771076e+00 -2.66819715e-01 -1.10199308e+00] [-1.86823845e+00 9.08959031e-01 -3.49521726e-01 ... 1.00845769e-01 1.54189026e+00 1.14453435e+00] [-1.68781435e+00 1.19495523e+00 -1.20185947e+00 ... -1.78719211e+00 8.62529993e-01 1.00295925e+00]] [[-5.32358348e-01 1.66233051e+00 -1.71463564e-01 ... -5.58763277e-03 -3.30771804e-01 -1.51003492e+00] [ 3.47650766e-01 -1.56760633e+00 -4.61286366e-01 ... -7.61323929e-01 1.06309187e+00 6.97506130e-01] [ 4.50670779e-01 1.28585267e+00 -8.82708549e-01 ... 5.06523669e-01 1.65370867e-01 2.98056994e-02] ... [-9.90025222e-01 -1.15113211e+00 -3.49359483e-01 ... 5.48123062e-01 3.51218075e-01 9.46310222e-01] [ 1.51809680e+00 -6.96771502e-01 -4.79740322e-01 ... -2.69467402e+00 7.30077207e-01 1.50064051e+00] [ 1.73587120e+00 -8.80226940e-02 -6.36670113e-01 ... 2.71509081e-01 5.54391146e-01 -4.08929378e-01]] [[-7.84060419e-01 3.16636592e-01 1.41719669e-01 ... -3.93868327e-01 1.04822509e-01 5.92736363e-01] [-1.67268574e+00 -2.33994409e-01 -4.95155513e-01 ... -3.75978261e-01 -2.15091243e-01 -1.04426396e+00] [ 8.96140158e-01 3.50786120e-01 2.16597915e-02 ... 2.78413236e-01 -1.30933702e+00 -3.11081976e-01] ... [-5.13862848e-01 1.47120988e+00 1.18036270e+00 ... -1.86693084e+00 2.91743398e-01 -8.66261423e-01] [ 4.71305102e-01 -4.95586336e-01 -3.91531050e-01 ... -1.22726452e+00 1.55386729e-02 1.19704795e+00] [-6.91106737e-01 -2.53381419e+00 1.50803542e+00 ... 1.25516641e+00 6.94213986e-01 -1.18969142e+00]] ... [[ 9.35591519e-01 -2.07480097e+00 1.11836982e+00 ... -1.28477669e+00 2.00251803e-01 -1.02149332e+00] [-3.90377715e-02 -2.77496517e-01 8.81944239e-01 ... 1.03459573e+00 8.20776701e-01 -6.40210092e-01] [-6.61968827e-01 1.39856920e-01 -2.40411982e-01 ... -1.93251097e+00 -3.02250266e-01 -1.11610442e-02] ... [-2.66046554e-01 1.28087223e+00 1.07049799e+00 ... 9.03105140e-02 3.92597854e-01 3.44882399e-01] [-5.76349974e-01 5.26230514e-01 1.62749326e+00 ... -7.40284324e-01 -9.77647305e-01 4.82813209e-01] [-2.12449241e+00 -8.72712851e-01 1.02011943e+00 ... 3.64555046e-02 -1.28704858e+00 -2.75542796e-01]] [[-5.59053481e-01 -2.00041914e+00 -9.19193447e-01 ... 2.09278560e+00 -1.09223330e+00 -1.51087761e+00] [ 1.42041862e-01 -1.03476569e-01 -6.82449698e-01 ... -1.31160319e+00 -8.41419637e-01 1.21287942e-01] [ 3.93020809e-01 7.35969543e-01 5.43347895e-01 ... 4.29195404e-01 -1.08148649e-01 -4.68469620e-01] ... [-7.69974470e-01 -5.92963755e-01 -2.60750383e-01 ... -1.96082747e+00 -1.16367209e+00 3.50827038e-01] [-1.23777699e+00 4.32817459e-01 -1.36324871e+00 ... 9.91008699e-01 7.88540006e-01 -5.63372254e-01] [ 6.00132227e-01 9.08828974e-01 1.39338076e+00 ... 1.86217427e-01 -2.44244426e-01 -1.03669810e+00]] [[-4.33671117e-01 -2.40455222e+00 2.50085622e-01 ... -9.62990701e-01 -5.97485065e-01 5.23988247e-01] [ 8.02855313e-01 2.92344600e-01 -1.16521466e+00 ... 1.19854820e+00 9.93564665e-01 -1.74082056e-01] [ 4.17880982e-01 1.42321199e-01 -5.04607745e-02 ... -3.37157756e-01 4.50370193e-01 -3.23434800e-01] ... [ 1.43248892e+00 -8.81983757e-01 -5.17810285e-01 ... -5.65172553e-01 2.16675133e-01 3.60995084e-01] [-3.89824152e-01 -3.03533822e-01 -2.04091847e-01 ... -1.46381009e+00 5.50602734e-01 -9.29794848e-01] [ 1.16866514e-01 1.00532949e+00 -6.58021644e-02 ... 9.69556272e-01 2.23707691e-01 -1.00432944e+00]]] [[[-1.29907656e+00 -8.38365972e-01 -8.71790528e-01 ... -9.66411829e-01 -3.27199429e-01 8.30997407e-01] [ 1.00380445e+00 2.58995271e+00 6.73040569e-01 ... 2.42689535e-01 2.33633232e+00 7.46833324e-01] [ 1.30162382e+00 -6.50072813e-01 1.85230240e-01 ... 1.61857549e-02 -4.58607614e-01 1.39075243e+00] ... [ 8.18875253e-01 1.26966774e+00 2.76705205e-01 ... -1.60977948e+00 -6.79495037e-01 1.13047503e-01] [-1.09949553e+00 -1.95525062e+00 1.17632341e+00 ... -4.29677933e-01 -1.47890463e-01 2.76875329e+00] [ 8.62452745e-01 -1.24924052e+00 -5.63189924e-01 ... -3.27157527e-01 1.17862034e+00 1.09058142e+00]] [[ 8.79135668e-01 1.52063191e-01 8.58872950e-01 ... -1.20327783e+00 5.57758212e-01 -1.42236929e-02] [-1.18401480e+00 3.26201953e-02 -8.42212737e-01 ... -3.73373032e-01 1.19146585e+00 4.14538801e-01] [-1.27066958e+00 -4.95876729e-01 6.82820320e-01 ... -5.83926201e-01 3.49596217e-02 -1.15158129e+00] ... [-1.85377657e-01 4.04922277e-01 -5.35038650e-01 ... -1.04902160e+00 2.73688644e-01 1.85748327e+00] [-1.35468340e+00 1.80679604e-01 3.67538959e-01 ... -7.49618590e-01 1.92774329e-02 2.04210067e+00] [-3.81668746e-01 -7.22765326e-01 -1.25558209e+00 ... -3.24473791e-02 -6.91917315e-02 1.07806420e+00]] [[-7.69001484e-01 -1.08087409e+00 -7.57986903e-01 ... 2.18380421e-01 -1.48854971e+00 -4.42063153e-01] [ 9.31606293e-01 1.56083786e+00 1.87296271e+00 ... -6.14856243e-01 6.70526400e-02 -3.38759363e-01] [-3.09740547e-02 4.00249004e-01 4.80894357e-01 ... -1.26360810e+00 6.61580920e-01 -9.01444495e-01] ... [ 1.12704718e+00 -1.15425205e+00 -9.19791341e-01 ... 7.86808968e-01 -6.39697254e-01 9.83101130e-01] [ 2.34882176e-01 -4.56113994e-01 -4.53340374e-02 ... -1.33700359e+00 5.69365978e-01 8.08248520e-01] [ 1.05208468e+00 -1.66018084e-01 7.25627467e-02 ... -1.05405593e+00 7.94016838e-01 -8.02374959e-01]] ... [[-7.58064687e-01 -1.68300009e+00 8.57324064e-01 ... 2.60757148e-01 1.00518131e+00 -1.07697904e+00] [-2.73793668e-01 -1.20196357e-01 -1.16170752e+00 ... -6.54827237e-01 3.16844255e-01 -9.99971867e-01] [ 2.78058028e+00 2.81777382e-01 6.75669193e-01 ... -1.55976951e+00 3.56842399e-01 1.88035285e+00] ... [-5.66086173e-01 -1.74978361e-01 -1.49246514e+00 ... 6.03094578e-01 2.57918388e-01 2.59785682e-01] [ 1.93791699e+00 -2.20127240e-01 -1.89538074e+00 ... -3.42788637e-01 7.66292334e-01 -9.92952943e-01] [ 7.89782166e-01 -3.50428194e-01 -1.57240140e+00 ... -7.72974253e-01 1.04199052e+00 5.10908961e-01]] [[-7.29679644e-01 1.31783354e+00 -1.38246310e+00 ... 1.06721795e+00 5.14504135e-01 -1.24649024e+00] [-4.21040542e-02 -1.59661758e+00 -3.21975023e-01 ... 1.05288291e+00 3.85368079e-01 1.14971113e+00] [-1.29866874e+00 3.56258452e-01 1.12272871e+00 ... 1.44057786e+00 2.05911231e+00 -9.35699999e-01] ... [-8.82039249e-01 -4.60991800e-01 5.06837726e-01 ... 2.84261853e-01 -9.94906247e-01 -1.96395814e-01] [-8.17688823e-01 -1.06654310e+00 -4.32391852e-01 ... 5.06893814e-01 -3.50278527e-01 -8.97899926e-01] [ 8.27033997e-01 -6.86331213e-01 -5.99274457e-01 ... -1.18595326e+00 -1.27541840e-01 -1.22883514e-01]] [[ 1.35418785e+00 -1.42129815e+00 -1.85124373e+00 ... -2.22057557e+00 -2.71481037e-01 4.62675607e-03] [-2.02733889e-01 1.75722098e+00 9.84244943e-01 ... -3.29120517e-01 1.02842724e+00 -2.15911150e+00] [-1.60174739e+00 2.50101638e+00 -9.61856186e-01 ... -3.59206259e-01 1.38798857e+00 -2.79285455e+00] ... [ 9.10472691e-01 2.26262603e-02 5.37381768e-01 ... -7.11536944e-01 -1.49128997e+00 -1.55243742e+00] [ 2.54243731e-01 6.54056251e-01 5.94736636e-01 ... 1.33867490e+00 -1.00093436e+00 -5.64532995e-01] [ 1.80450995e-02 -6.64657772e-01 1.24670422e+00 ... 3.55986685e-01 6.34747982e-01 -1.30971742e+00]]] [[[-2.60027504e+00 -3.65656346e-01 -7.04175293e-01 ... 1.06350958e-01 8.74921560e-01 -8.51155102e-01] [-1.12016654e+00 1.83378458e-01 1.62169778e+00 ... -1.70531464e+00 -3.18451524e-01 -1.08638310e+00] [ 6.19016528e-01 -1.71026003e+00 3.08831692e-01 ... 6.59804881e-01 6.99101031e-01 -2.00946426e+00] ... [ 3.56114477e-01 1.64253604e+00 -1.09033477e+00 ... -3.27219814e-01 1.98230791e+00 -4.13952112e-01] [ 2.37551403e+00 6.11293137e-01 8.62183213e-01 ... 1.33435771e-01 7.08774209e-01 7.02696800e-01] [ 9.85159397e-01 8.13335359e-01 4.47707530e-03 ... 8.15284967e-01 -5.20266950e-01 -7.59400547e-01]] [[-3.63985538e-01 6.24140143e-01 -1.92756438e+00 ... 1.26749766e+00 1.07347703e+00 -1.37341037e-01] [-9.36119407e-02 1.51246500e+00 -1.52122951e+00 ... -1.53177476e+00 -1.30002034e+00 1.22684233e-01] [-9.04954314e-01 3.49175423e-01 -8.87836456e-01 ... -9.26574409e-01 -5.98260999e-01 -1.13926160e+00] ... [ 5.37449598e-01 5.68383038e-01 -7.45947883e-02 ... -1.22225201e+00 3.75832707e-01 -2.37843800e+00] [-1.29823580e-01 -6.52635574e-01 -3.07660490e-01 ... 8.57804716e-01 -3.47872555e-01 1.68438280e+00] [-9.47006762e-01 -2.14288160e-01 -2.20970303e-01 ... -4.05128032e-01 7.51355350e-01 -2.71156281e-01]] [[ 6.20243013e-01 1.02543855e+00 9.70465183e-01 ... -2.14146209e+00 -5.79918444e-01 -1.09757051e-01] [-4.85191166e-01 1.91610605e-01 -4.03084189e-01 ... 9.09267545e-01 1.89881567e-02 -1.22137718e-01] [-1.05512178e+00 -8.47329021e-01 -1.61743045e-01 ... -8.90527666e-01 -1.18376887e+00 -1.29666674e+00] ... [-5.98716319e-01 -6.81104064e-01 -6.86821938e-01 ... -4.20967013e-01 -1.20660794e+00 -3.32129091e-01] [ 5.04696906e-01 -5.62164187e-01 -1.39495182e+00 ... 5.04006743e-01 -1.86943781e+00 -9.17652071e-01] [ 9.47821379e-01 -1.26633847e+00 5.65066457e-01 ... -5.19035399e-01 -4.98170964e-02 1.01364821e-01]] ... [[ 7.39931703e-01 9.77891564e-01 4.16521609e-01 ... -8.37791681e-01 -3.12056929e-01 -2.27786109e-01] [-1.57208335e+00 -1.49035823e+00 4.81687248e-01 ... -4.59301502e-01 4.43849415e-01 -2.17466176e-01] [ 9.16465402e-01 2.02388108e-01 5.12905478e-01 ... 9.13525164e-01 -3.91446412e-01 8.28337610e-01] ... [-1.11615686e-02 1.52110767e+00 -2.03180242e+00 ... -4.02104586e-01 5.02852917e-01 2.48142898e-01] [-1.32168233e+00 7.58467495e-01 1.20576048e+00 ... 3.05984110e-01 -9.11062241e-01 4.50589359e-01] [ 8.37120116e-02 1.30711412e+00 -1.37806606e+00 ... -8.55073333e-01 -1.25982797e+00 6.54147744e-01]] [[-2.04667425e+00 1.90778479e-01 1.31114113e+00 ... -1.86336547e-01 -7.27118492e-01 8.09514403e-01] [-2.60416090e-01 1.03674531e+00 -1.54758513e+00 ... 4.69505280e-01 1.85306966e+00 1.14839129e-01] [ 5.81094384e-01 -5.65849006e-01 1.12157440e+00 ... -7.58479178e-01 4.64571774e-01 1.49207070e-01] ... [-1.34386706e+00 -7.60343552e-01 6.03659928e-01 ... -1.65037668e+00 -4.30349141e-01 3.73811603e-01] [-1.42549857e-01 1.01834428e+00 -1.65097201e+00 ... 7.45337784e-01 -1.55098051e-01 -3.76051486e-01] [ 3.50168020e-01 5.23304641e-01 -3.81865561e-01 ... -1.71545982e-01 1.64486790e+00 -3.81997883e-01]] [[-2.05234146e+00 -2.20760718e-01 1.05946267e+00 ... 1.12304986e+00 -1.02943599e+00 1.53332698e+00] [-3.40166837e-01 -1.23254716e+00 -2.02264500e+00 ... 1.92774069e+00 -9.00575399e-01 -1.48615408e+00] [ 5.58644891e-01 -6.51021659e-01 -3.66321951e-02 ... -2.63233989e-01 1.31470537e+00 -9.40512955e-01] ... [-1.75997055e+00 -1.09027684e+00 9.58682775e-01 ... -1.24432313e+00 -8.93096447e-01 -6.13603890e-02] [ 2.25217700e+00 3.03064704e-01 5.23494363e-01 ... 1.53627181e+00 4.72761035e-01 8.30399513e-01] [-1.17303431e+00 7.24310398e-01 -8.49663198e-01 ... -1.89699844e-01 1.30509102e+00 2.48133596e-02]]] [[[ 8.00437629e-01 5.25036097e-01 -4.84626107e-02 ... 2.61361957e-01 -8.40807378e-01 5.29642045e-01] [ 6.00103021e-01 -1.17542028e+00 -5.97584069e-01 ... -2.30992902e-02 -8.71424317e-01 -5.93794703e-01] [ 2.94146478e-01 2.38245782e-02 -9.92293477e-01 ... -1.30008674e+00 -1.70658040e+00 -8.06698203e-01] ... [ 5.58174968e-01 -2.82773376e-01 5.75177789e-01 ... -7.77914345e-01 4.43571478e-01 -7.57125020e-01] [ 1.48358631e+00 4.04104501e-01 9.56130922e-02 ... -6.89992845e-01 -2.78424501e-01 1.04821777e+00] [-3.47021431e-01 6.71098351e-01 1.01170853e-01 ... -6.04485989e-01 3.13349545e-01 2.04115927e-01]] [[-1.51196167e-01 -9.47842777e-01 2.79777944e-01 ... 7.74439275e-01 2.17367694e-01 -3.27466801e-02] [ 3.23472083e-01 -2.44953966e+00 7.87411183e-02 ... 9.34198856e-01 2.10038871e-01 -5.16940057e-01] [-2.40613878e-01 4.21588808e-01 2.75288615e-02 ... 1.45913804e+00 3.95039350e-01 7.83042312e-01] ... [-2.78471500e-01 6.28041148e-01 -9.13837671e-01 ... -6.85514987e-01 -3.40745524e-02 -2.71021366e-01] [-1.77647424e+00 1.31344914e-01 -3.79916131e-01 ... -7.48444796e-01 -6.26288533e-01 4.73471582e-01] [ 2.98136443e-01 1.35503277e-01 5.91538614e-03 ... -7.38971829e-02 3.47754025e+00 -1.40916634e+00]] [[ 8.79931271e-01 8.65192950e-01 -5.95467091e-02 ... 6.13780141e-01 2.44274199e-01 1.99416339e+00] [ 6.06564164e-01 -5.28275490e-01 -7.06879556e-01 ... -4.84971941e-01 -8.91383469e-01 5.05618572e-01] [ 7.92198598e-01 1.74228489e+00 3.02440405e-01 ... 9.91261780e-01 -2.52543092e-01 -1.63449064e-01] ... [ 1.24752426e+00 -2.75536150e-01 1.04424512e+00 ... -8.71739209e-01 3.44581753e-01 1.20611393e+00] [ 8.46463263e-01 1.12729776e+00 -8.03124845e-01 ... 1.12898342e-01 -3.38771641e-01 1.93662494e-01] [ 1.46258128e+00 1.01583254e+00 2.14924186e-01 ... -4.27199990e-01 -3.88933599e-01 -1.00303328e+00]] ... [[-1.06799531e+00 2.70313954e+00 -1.71777189e-01 ... 7.69911349e-01 -8.05183113e-01 -5.67712225e-02] [-3.86388808e-01 1.08411396e+00 9.69985783e-01 ... -2.52393603e-01 -6.58168137e-01 4.50226009e-01] [ 7.12075114e-01 -3.41078639e-01 -1.34107277e-01 ... -1.01768434e+00 -6.97395802e-01 2.32323885e+00] ... [-1.13750184e+00 -1.26586807e+00 -9.75549400e-01 ... -2.31594183e-02 1.38442910e+00 -9.36693907e-01] [ 4.32753772e-01 -5.83606184e-01 1.25825751e+00 ... 8.32049906e-01 5.30574739e-01 9.56354141e-02] [ 1.03877056e+00 -8.09652865e-01 -2.12335920e+00 ... 1.09020984e+00 8.43875110e-01 -5.23820221e-01]] [[-1.35468936e+00 -1.25766957e+00 -1.27937794e-01 ... -1.43420863e+00 7.50596583e-01 -4.41007793e-01] [-1.73254240e+00 2.73725186e-02 -1.15709209e+00 ... 1.95159316e+00 9.69229102e-01 3.66003811e-02] [ 1.87412488e+00 6.85349643e-01 8.10647249e-01 ... -1.07990468e+00 -7.91019619e-01 -7.15677559e-01] ... [ 1.39980316e+00 -1.10642242e+00 -5.60540557e-01 ... 1.02942407e+00 8.60148251e-01 1.43288627e-01] [ 1.09416652e+00 -1.09416139e+00 2.20966959e+00 ... 3.39603692e-01 1.21665645e+00 3.25883895e-01] [-7.01727867e-01 -8.00629914e-01 6.10486567e-01 ... 2.18260145e+00 3.86856556e-01 2.90736884e-01]] [[-1.71421766e-01 7.38686800e-01 -3.26269686e-01 ... 9.41861451e-01 -8.53098452e-01 -2.45984897e-01] [-1.07436046e-01 6.92368507e-01 2.33444977e+00 ... 1.54963875e+00 -6.72317564e-01 -7.74687052e-01] [-2.85398155e-01 1.41923892e+00 -4.51662034e-01 ... 6.56603873e-01 -1.02886105e+00 -6.86729074e-01] ... [-1.96095973e-01 3.09665650e-01 -3.24034840e-01 ... -1.01555884e+00 -2.47274876e-01 3.76613259e-01] [ 2.53346741e-01 5.52407205e-01 9.84446183e-02 ... -8.44879448e-01 -1.28823793e+00 -8.16190004e-01] [-1.48202133e+00 -3.30662310e-01 1.77384000e-02 ... -1.70261174e-01 -6.82551742e-01 4.61606920e-01]]] [[[-9.77768719e-01 -7.79628530e-02 -8.33356306e-02 ... 5.45129001e-01 6.22256137e-02 -1.20544982e+00] [ 2.15597123e-01 3.99776906e-01 -9.48047280e-01 ... -1.76330402e-01 -3.31601948e-01 5.20622671e-01] [-2.68283635e-01 -1.32137525e+00 -9.06073689e-01 ... 1.72378421e+00 2.43955508e-01 -1.33618033e+00] ... [-1.44904482e+00 -8.34957421e-01 8.40586543e-01 ... -9.38709080e-01 5.60631931e-01 -5.80559552e-01] [ 1.86635911e-01 -1.42859191e-01 5.73098540e-01 ... 1.49101451e-01 3.39793921e-01 1.77677858e+00] [ 1.33890605e+00 6.42937243e-01 1.29877639e+00 ... 1.18177104e+00 -3.28600481e-02 -5.99836230e-01]] [[ 7.48792589e-02 3.07571352e-01 -1.62252992e-01 ... 3.28203648e-01 -6.45616174e-01 1.13870156e+00] [ 3.14633906e-01 -2.45066807e-01 -5.89075327e-01 ... -1.14388311e+00 -1.69464898e+00 5.35624027e-01] [ 7.57525325e-01 4.56502289e-01 -1.47808695e+00 ... -4.55217719e-01 -8.09759200e-01 -3.93807739e-02] ... [ 1.27571726e+00 5.66420965e-02 4.07348387e-02 ... 5.18677831e-01 -1.71423350e-02 9.44973946e-01] [-1.29420948e+00 -5.62042952e-01 -5.19358814e-01 ... -1.51345277e+00 2.39994735e-01 -8.10051739e-01] [ 1.56976783e+00 -3.46249104e-01 4.84354384e-02 ... 8.29032779e-01 1.29122883e-01 -8.58694136e-01]] [[-1.15287483e+00 1.74855247e-01 -2.26433903e-01 ... 7.09240973e-01 -1.39526713e+00 1.48504376e+00] [ 2.04147387e+00 6.46725833e-01 3.42096835e-02 ... 9.88503277e-01 -2.96669483e-01 7.34130263e-01] [ 3.08275253e-01 2.33407569e+00 -1.16015654e-02 ... 8.66379619e-01 5.17720997e-01 1.01094401e+00] ... [-2.23230934e+00 7.53706276e-01 6.36275291e-01 ... 6.54808357e-02 -1.22191775e+00 -6.72494948e-01] [-7.71280527e-01 1.95143640e+00 7.39431918e-01 ... -9.82768476e-01 9.50212926e-02 9.04838666e-02] [ 6.63554430e-01 1.55538261e+00 -1.53236404e-01 ... -9.75603759e-01 7.76429355e-01 1.21928918e+00]] ... [[-3.47120404e-01 -2.99846441e-01 -6.94571674e-01 ... -8.02172959e-01 -6.24874830e-01 1.74759746e+00] [ 5.27079105e-01 4.21061277e-01 -4.17835146e-01 ... 1.95755446e+00 -2.08588272e-01 -5.91997683e-01] [-1.58328378e+00 1.37547791e+00 2.19442844e+00 ... -9.46928322e-01 1.78156912e-01 2.30815113e-01] ... [-7.00185120e-01 -1.06625579e-01 -8.72405410e-01 ... -6.41748428e-01 1.29957783e+00 -8.63687694e-01] [-1.38626683e+00 6.42633021e-01 1.36803555e+00 ... 2.35967152e-02 2.13106960e-01 1.43428231e-02] [ 1.11089790e+00 -1.52885032e+00 1.93324018e+00 ... -1.49660170e+00 -1.34595811e-01 3.08618098e-01]] [[ 9.07910317e-02 -8.33658874e-03 -5.13959527e-01 ... -7.59984434e-01 -8.33205819e-01 7.26915225e-02] [-1.20338368e+00 -1.56098619e-01 -6.07799888e-01 ... 1.93746960e+00 5.14362454e-01 -5.55454671e-01] [ 3.10115337e-01 3.53746898e-02 -3.04432243e-01 ... 3.71614963e-01 1.00053561e+00 -9.09827888e-01] ... [-5.78682013e-02 -3.17700267e-01 -1.60441589e+00 ... 8.81102204e-01 1.00859749e+00 -7.78702319e-01] [ 1.45411289e+00 1.37792397e+00 -8.26324046e-01 ... -1.21560194e-01 -7.93296039e-01 -2.49390507e+00] [-8.30346942e-02 2.88723767e-01 -5.28561175e-01 ... -1.84394881e-01 -1.78898573e+00 1.30037022e+00]] [[-1.39383867e-01 -5.51375449e-02 -5.89215219e-01 ... 4.23129164e-02 -5.10981262e-01 4.17435884e-01] [-2.63039947e-01 1.17103589e+00 1.30254114e+00 ... 1.51888394e+00 -1.06141591e+00 4.88603085e-01] [-1.58941463e-01 2.37744284e+00 1.26308608e+00 ... 1.18913102e+00 1.78611195e+00 1.51082945e+00] ... [-1.43089104e+00 7.79519677e-01 3.98005217e-01 ... -1.28794730e+00 -2.35020494e+00 -2.33823586e+00] [ 1.81038535e+00 5.09414673e-01 4.43085045e-01 ... 7.80430198e-01 1.24773395e+00 4.23285991e-01] [ 1.27116716e+00 5.63850462e-01 5.03640249e-02 ... 1.43398118e+00 -5.90906501e-01 3.76710624e-01]]] [[[-2.35047545e-02 -5.42704761e-01 2.02147961e+00 ... 7.57715702e-01 -1.89290687e-01 7.43631065e-01] [-9.28776443e-01 1.11178443e-01 -2.31176298e-02 ... -4.10123952e-02 1.26368761e+00 1.51136816e+00] [-1.04323995e+00 -4.59913075e-01 2.58559316e-01 ... -3.66556823e-01 1.32031115e-02 -2.57871412e-02] ... [-1.88264310e-01 -9.11137819e-01 8.22786331e-01 ... 3.22415978e-01 -2.87851214e-01 -1.55281627e+00] [-2.22533965e+00 -8.11492205e-01 2.04491496e+00 ... 9.88354743e-01 -2.19335127e+00 -5.15920341e-01] [ 9.52675581e-01 1.32227793e-01 4.34082568e-01 ... -2.80028552e-01 8.73352289e-01 1.23878264e+00]] [[-1.44714460e-01 5.07059455e-01 3.25898111e-01 ... -1.73964798e-01 8.21662188e-01 4.62944627e-01] [-9.85013187e-01 -9.39087808e-01 5.14764249e-01 ... 9.51534927e-01 -1.58769298e+00 -1.16017365e+00] [-3.12431127e-01 -1.06892335e+00 -3.13201696e-01 ... 1.38263774e+00 -2.68862844e+00 2.29014426e-01] ... [ 1.57836902e+00 -1.33141696e+00 5.24289906e-01 ... -7.61605978e-01 -1.06692791e+00 3.84294510e-01] [ 2.40168780e-01 6.57089174e-01 -2.45657086e-01 ... 6.59015700e-02 2.00557616e-03 -4.13870424e-01] [ 3.47789317e-01 1.79946542e-01 3.06170672e-01 ... 4.35041845e-01 5.87109089e-01 -9.95096564e-02]] [[-1.56796336e+00 1.31683886e+00 -4.26486492e-01 ... -1.28134990e+00 -1.37305999e+00 -9.28267002e-01] [-7.04745352e-01 -8.17218363e-01 1.34695828e+00 ... -7.58046508e-01 1.74596596e+00 1.01794255e+00] [ 4.72009867e-01 8.56180131e-01 -4.21279706e-02 ... -9.09325004e-01 -6.24584444e-02 -7.50655472e-01] ... [-7.52612948e-01 1.37434292e+00 7.36444294e-01 ... 1.70139360e+00 -6.40391588e-01 -1.95861626e-02] [ 5.44637501e-01 5.12215197e-01 -2.32753545e-01 ... -1.41858235e-01 -5.49509190e-02 -3.55128050e-01] [ 1.46103096e+00 -7.60770321e-01 -9.04588103e-01 ... 7.09398150e-01 7.43809342e-01 8.45852554e-01]] ... [[ 5.63162148e-01 -6.27650201e-01 -3.56812716e-01 ... 2.96339184e-01 -1.73723423e+00 2.56353587e-01] [ 7.91329682e-01 3.32537256e-02 1.07513852e-01 ... -8.34481195e-02 1.42862394e-01 -1.11201966e+00] [-1.01495489e-01 4.68588144e-01 5.92436552e-01 ... -8.56682360e-02 -2.28097945e-01 1.14442587e+00] ... [ 1.12831318e+00 -5.72843850e-01 1.84669292e+00 ... 1.31053722e+00 -9.34779644e-01 -9.17160571e-01] [-2.41616480e-02 6.26867473e-01 -5.27447879e-01 ... -4.97446090e-01 2.70100474e+00 7.88186491e-01] [-2.69189954e-01 -3.22607160e-02 -1.42877340e+00 ... 1.11971283e+00 -9.08179581e-01 -4.31323200e-01]] [[-1.03758466e+00 -6.50868416e-01 1.81473613e-01 ... -4.88055289e-01 6.10314250e-01 1.59439170e+00] [ 7.67194808e-01 -4.60291982e-01 -1.40030175e-01 ... 4.70112860e-01 5.64885318e-01 -1.66554010e+00] [ 9.95675147e-01 5.85566819e-01 -4.43761528e-01 ... 6.45466626e-01 -1.63578379e+00 1.55268538e+00] ... [ 1.38054597e+00 -7.77466521e-02 1.12900853e+00 ... -9.40999016e-02 -2.24127814e-01 9.59949851e-01] [ 1.71565962e+00 8.28114867e-01 3.20538402e-01 ... -1.37278104e+00 1.37853551e+00 1.57772493e+00] [-1.81687370e-01 -2.36757159e+00 6.17390096e-01 ... 7.71330535e-01 1.85928971e-01 1.79002500e+00]] [[-4.24574494e-01 1.78354168e+00 1.18101805e-01 ... -2.77137089e+00 6.26321673e-01 -1.32514572e+00] [ 1.55264294e+00 1.00065231e+00 -3.08142245e-01 ... -1.38898289e+00 8.88456225e-01 1.26951182e+00] [ 2.10358500e+00 -6.55971527e-01 6.80466354e-01 ... -1.37663627e+00 -5.35441756e-01 1.88196492e+00] ... [-1.33993077e+00 -5.38655698e-01 -8.88684541e-02 ... -2.09928370e+00 -6.82016075e-01 1.36601806e+00] [ 8.23677778e-01 -3.54379326e-01 -7.55394638e-01 ... -1.32001984e+00 1.57119715e+00 -3.46263766e-01] [-1.77151132e+00 -1.23556197e+00 -1.19882846e+00 ... -1.21722305e+00 -7.09970951e-01 1.10798359e-01]]]] [[[[ 5.10304496e-02 -1.17188942e+00 -5.05951047e-01 ... -1.29570797e-01 2.26620927e-01 1.08342397e+00] [-1.34385198e-01 2.32448006e+00 7.70157278e-01 ... 1.17125976e+00 -4.72838610e-01 -5.68784893e-01] [ 2.17751098e+00 -1.71304691e+00 4.47307736e-01 ... 7.14793742e-01 8.90373170e-01 -1.12301775e-03] ... [ 1.14842844e+00 1.95512488e-01 7.54491091e-01 ... 1.89592764e-01 -2.18040720e-02 -1.73552191e+00] [-1.75744581e+00 -2.56810188e+00 8.29037607e-01 ... -1.30411839e+00 -8.99530888e-01 -1.49974477e+00] [ 8.83812010e-01 3.40120673e-01 -1.25535250e+00 ... 1.35016680e-01 5.02391942e-02 -1.45054638e+00]] [[ 2.11319506e-01 9.65819418e-01 -6.98299408e-01 ... 7.38326788e-01 -1.17583966e+00 6.37149513e-01] [ 1.01193094e+00 9.36032653e-01 -7.94887170e-02 ... 1.30310953e-01 -2.83174187e-01 5.47101974e-01] [-3.32226634e-01 -8.32932591e-01 2.10288048e+00 ... 3.09035093e-01 5.18182158e-01 8.50828826e-01] ... [-5.39365709e-01 -7.87340641e-01 3.21422786e-01 ... -2.70066082e-01 -7.02788770e-01 -8.32269549e-01] [-5.69183528e-01 -7.96778142e-01 -3.30427229e-01 ... 2.94862390e-01 -8.57720912e-01 8.01495075e-01] [-1.20461977e+00 -8.64214122e-01 7.09435463e-01 ... 3.21220064e+00 2.20658207e+00 2.72084832e-01]] [[ 2.56390631e-01 -1.53609052e-01 -1.59481335e+00 ... 1.70024082e-01 -1.72250524e-01 -1.74148381e+00] [-3.56207974e-02 1.47171938e+00 -1.43593395e+00 ... 3.28702301e-01 5.02274096e-01 4.19638939e-02] [ 3.05491388e-01 -5.44941604e-01 1.77120543e+00 ... -1.25778884e-01 2.12931067e-01 -1.21789956e+00] ... [-2.40921721e-01 9.94811058e-01 2.17571783e+00 ... 1.00841790e-01 -9.51620162e-01 6.82138562e-01] [-1.47646606e+00 9.02083755e-01 3.93458784e-01 ... 1.35783160e+00 -6.70377165e-02 6.46039605e-01] [ 2.45421216e-01 -1.35444307e+00 -1.89716887e+00 ... 9.14906144e-01 -9.69018042e-01 -9.96266305e-01]] ... [[-6.01083636e-01 8.78339529e-01 1.93681240e+00 ... -1.21388865e+00 3.74708563e-01 1.45102847e+00] [-1.02718997e+00 -5.72324753e-01 1.53157759e+00 ... -1.25035679e+00 6.51333332e-01 4.46078509e-01] [ 2.23394588e-01 -1.55047202e+00 1.29770470e+00 ... 1.77798247e+00 2.83100426e-01 7.91553557e-01] ... [ 9.45012942e-02 -1.54002756e-01 -6.34337246e-01 ... -8.26404333e-01 1.09224290e-01 7.70764612e-03] [-4.56405699e-01 -8.12246442e-01 3.71490002e-01 ... -2.24411488e-01 -5.83231747e-01 3.91929150e-01] [ 1.39323080e+00 -8.90805066e-01 -1.14094031e+00 ... 3.14323217e-01 8.20892490e-03 4.84547943e-01]] [[ 4.92902994e-01 -1.21108659e-01 9.58920419e-02 ... -1.72349349e-01 -8.80407929e-01 1.92149246e+00] [ 1.10345885e-01 -2.43456233e-02 2.33047694e-01 ... -4.30961400e-01 4.21965063e-01 6.88259482e-01] [-2.41407871e+00 -1.00145578e+00 -1.26688504e+00 ... 1.02254367e+00 1.07131696e+00 5.22094190e-01] ... [-2.12843418e-01 1.39645731e+00 -2.34677613e-01 ... 2.12667680e+00 6.71097636e-01 2.86892176e+00] [ 1.42674339e+00 1.19153821e+00 6.94956064e-01 ... 2.22300720e+00 3.56457263e-01 -1.19430721e+00] [-6.48975670e-01 -4.27290916e-01 -7.23500371e-01 ... -4.30388391e-01 1.32298723e-01 -5.27345300e-01]] [[ 2.48119816e-01 7.98852980e-01 7.06610680e-01 ... -2.89776951e-01 -1.79913628e+00 -5.56201398e-01] [ 8.59657466e-01 -1.65758491e-01 -4.37232763e-01 ... 2.95133814e-02 -1.62313771e+00 4.75051999e-01] [ 6.26549602e-01 -1.80315876e+00 1.13883722e+00 ... -1.92106855e+00 6.54789269e-01 -5.37348449e-01] ... [-6.36574924e-01 -1.08781874e+00 -1.68502510e+00 ... -2.06108645e-01 -3.72608900e-01 -1.09696233e+00] [-1.03903401e+00 1.42287743e+00 2.32028011e-02 ... -8.64781201e-01 -1.45135212e+00 -8.85721385e-01] [-3.37012172e-01 1.12872636e+00 -4.46239591e-01 ... 7.26883769e-01 -1.10299742e+00 -2.17369819e+00]]] [[[-6.51304781e-01 2.83224463e-01 1.15609908e+00 ... 3.15294683e-01 -5.96822083e-01 -4.29417342e-01] [ 9.14697230e-01 -7.44813204e-01 -7.97436178e-01 ... 3.67876552e-02 -9.34964180e-01 4.09475058e-01] [-1.17301106e+00 6.40033126e-01 -1.27649736e+00 ... -2.36623836e+00 1.27776718e+00 -4.66635972e-01] ... [-3.32251668e-01 7.29708672e-02 4.98140663e-01 ... -5.06591022e-01 2.29875684e+00 -2.66604161e+00] [ 2.55095267e+00 -1.13464367e+00 6.79660439e-01 ... -2.17659980e-01 -2.80457705e-01 9.47552681e-01] [ 3.48576218e-01 -8.27449188e-02 -2.23598003e-01 ... 7.39052773e-01 -5.26744947e-02 -2.06051040e+00]] [[ 1.88887835e-01 -2.70973802e-01 6.09943688e-01 ... -1.76639473e+00 -6.67787865e-02 1.54585347e-01] [ 1.41459048e+00 1.94229531e+00 5.37737191e-01 ... -1.75918829e+00 -3.07487369e-01 -3.18525076e-01] [ 1.20179522e+00 -8.81579101e-01 1.06364536e+00 ... -3.99451889e-02 -2.01443091e-01 -4.85639244e-01] ... [-7.73452461e-01 1.95998228e+00 8.97924781e-01 ... 8.44312727e-01 1.53202498e+00 -8.02861154e-01] [ 2.02382946e+00 -7.32421875e-01 1.19988465e+00 ... -2.29342744e-01 2.70453000e+00 -1.59917378e+00] [-1.49486673e+00 -2.64620692e-01 1.58377960e-01 ... 1.34004998e+00 -1.76550448e-01 -1.60233229e-01]] [[-6.32328570e-01 2.52699828e+00 -4.81998861e-01 ... -2.77362776e+00 8.56816113e-01 -2.52536982e-01] [ 9.63567078e-01 -8.27614963e-01 7.25930333e-01 ... 4.06930864e-01 1.49666178e+00 -5.42381585e-01] [-3.68610322e-01 2.59868956e+00 2.56516766e-02 ... -6.75500989e-01 9.96334255e-01 7.33941317e-01] ... [-5.47166526e-01 3.24545532e-01 4.12256092e-01 ... 1.29475617e+00 2.02154636e-01 -8.98055673e-01] [-1.07923359e-01 -1.63239288e+00 -1.09994662e+00 ... -1.36560202e+00 2.77511012e-02 2.04177380e+00] [ 1.45771480e+00 3.67926449e-01 -9.55202758e-01 ... -1.51229072e+00 1.53746688e+00 -2.65676856e-01]] ... [[-1.99571297e-01 5.30568898e-01 -1.41337919e+00 ... -7.64672235e-02 9.03310180e-01 1.96113169e-01] [ 1.51173520e+00 -1.16243243e+00 -1.09989786e+00 ... -1.47422826e+00 1.32999051e+00 -6.98963463e-01] [-9.38338816e-01 -9.05074738e-03 -1.11521757e+00 ... -2.44238687e+00 4.21763033e-01 1.54541826e+00] ... [-2.79266119e+00 -5.29997945e-01 -3.06963235e-01 ... -7.72101283e-02 9.42228556e-01 2.85187602e-01] [ 8.27596009e-01 5.79158485e-01 4.28210050e-01 ... 1.34563899e+00 1.08061087e+00 -5.47848284e-01] [ 2.68521339e-01 1.15483439e+00 4.49803799e-01 ... 6.64329827e-02 -1.59341109e+00 1.98366255e-01]] [[ 2.10008159e-01 -1.64996684e+00 1.27456486e+00 ... -8.35051298e-01 -1.11789715e+00 1.03969789e+00] [-1.85816097e+00 -1.56421155e-01 1.27162504e+00 ... -6.12215102e-01 -5.40713191e-01 1.02459192e-01] [-1.09519339e+00 3.03748131e-01 1.05506516e+00 ... -6.07032590e-02 -5.35866916e-01 4.19755012e-01] ... [ 7.14988708e-02 1.91461027e-01 -4.69611019e-01 ... -1.91694602e-01 1.10829163e+00 1.76301196e-01] [-2.15103850e-02 -1.98214519e+00 -4.14362818e-01 ... -1.06668139e+00 -8.79959464e-01 -2.01775819e-01] [-1.83617845e-01 -9.51694191e-01 -1.41568387e+00 ... -7.14036942e-01 7.20410764e-01 -4.79581684e-01]] [[ 1.12141538e+00 7.44460404e-01 9.35607791e-01 ... 3.61093262e-04 -2.35179886e-01 -4.28773224e-01] [ 1.80283034e+00 1.87409794e+00 -1.23610592e+00 ... 7.49695599e-01 -1.01130629e+00 1.44622922e+00] [-3.30996215e-01 2.47884297e+00 -4.36572522e-01 ... -1.38731682e+00 1.54275107e+00 -9.69251275e-01] ... [ 8.71978760e-01 1.09628606e+00 -2.21244621e+00 ... -5.99459291e-01 -4.34037745e-01 -1.74396619e-01] [-1.35995701e-01 5.71462750e-01 -3.85622889e-01 ... 1.58485556e+00 4.35842872e-01 -1.41410100e+00] [ 1.05019617e+00 5.53177111e-02 -1.97370386e+00 ... 1.85937703e-01 3.94698411e-01 7.85120606e-01]]] [[[ 5.85409760e-01 5.40915370e-01 -2.35498846e-01 ... 5.45001566e-01 1.47679001e-01 1.62059975e+00] [ 1.86394930e+00 -5.95545352e-01 -1.52789330e+00 ... 1.08239867e-01 -5.62755112e-03 -1.67766094e-01] [-2.19436359e+00 -1.84422171e+00 6.13333881e-01 ... -3.04783404e-01 1.14626241e+00 -8.72351050e-01] ... [ 6.12180889e-01 -1.39728272e+00 1.42313206e+00 ... -1.99986711e-01 -8.99502411e-02 -5.64875007e-01] [-2.87409216e-01 1.36066794e+00 -5.21139085e-01 ... -5.98187983e-01 -1.00442386e+00 -9.91618454e-01] [ 1.95116436e+00 -1.37216222e+00 -2.11294746e+00 ... 1.65096104e-01 -1.40293896e+00 1.31965563e-01]] [[ 8.42331409e-01 7.16434419e-01 4.50551480e-01 ... 1.33022189e+00 -1.93084586e+00 1.08268905e+00] [-1.92439839e-01 1.38956106e+00 -6.15254641e-01 ... -7.72727072e-01 -1.15010561e-03 -3.87185901e-01] [ 8.08907688e-01 1.33000875e+00 1.16405129e+00 ... 4.88109618e-01 -3.18373293e-01 -5.43524683e-01] ... [ 9.07229558e-02 -8.67570579e-01 1.95616230e-01 ... 1.29951134e-01 1.09702504e+00 8.26948062e-02] [ 3.72993410e-01 9.75838840e-01 -8.77439138e-03 ... 1.51137382e-01 -1.81607401e+00 7.70227492e-01] [-2.40863752e+00 -7.17144758e-02 1.42891359e+00 ... -5.55753648e-01 6.52419925e-01 -1.34256029e+00]] [[-1.62321138e+00 1.96106184e+00 5.73335052e-01 ... -1.30650330e+00 -1.63381979e-01 -5.55523932e-01] [-1.40324759e+00 -1.46059799e+00 -1.83149949e-02 ... -1.67264163e-01 3.79305542e-01 -3.50221276e-01] [-4.20464247e-01 1.17354226e+00 6.69868171e-01 ... 1.32696673e-01 -5.78683138e-01 1.23605895e+00] ... [-2.04609776e+00 -1.72006488e+00 -5.60611546e-01 ... 3.44797194e-01 1.89472044e+00 -8.11928213e-01] [-3.64800692e-01 -7.45073259e-01 -4.65743303e-01 ... -8.67133364e-02 -9.01303351e-01 1.67318475e+00] [ 4.60695177e-01 -5.75002372e-01 -1.01301491e+00 ... 1.68020338e-01 4.94613737e-01 9.65670198e-02]] ... [[-1.61054209e-01 -1.80724895e+00 -4.64241624e-01 ... -1.03714263e+00 -1.35132098e+00 -2.06839418e+00] [ 2.93531492e-02 -1.59992766e+00 1.63591051e+00 ... -2.33220100e+00 -8.24843496e-02 -1.13454573e-02] [ 1.42869186e+00 -1.06527776e-01 7.30277836e-01 ... 1.23539388e+00 4.55949605e-01 1.26477373e+00] ... [-6.57714069e-01 -4.47294265e-02 -3.89293092e-03 ... -5.93477130e-01 2.45367214e-01 5.61276257e-01] [-3.19173843e-01 -1.29607165e+00 -2.18036175e-01 ... -3.74800228e-02 1.73352003e+00 2.55899262e+00] [ 1.53192067e+00 5.64440668e-01 1.19625342e+00 ... -2.86080074e+00 -1.09460151e+00 1.13579631e+00]] [[-8.15285146e-02 1.08749121e-01 -8.96129429e-01 ... 3.12698364e-01 2.22038373e-01 1.14600408e+00] [ 1.01629488e-01 3.70001078e-01 -2.30915412e-01 ... 1.41660798e+00 4.48860675e-01 -3.48375082e-01] [-5.44085085e-01 1.19233525e+00 -6.56487584e-01 ... 2.40771666e-01 -8.38946342e-01 5.26163876e-01] ... [-7.93284357e-01 -7.21366048e-01 -9.79975522e-01 ... -8.95450950e-01 1.10111010e+00 -1.13623047e+00] [-1.30308783e+00 2.15227985e+00 1.35749686e+00 ... 1.93652555e-01 1.80541068e-01 -5.19523323e-01] [ 1.06059229e+00 1.62049389e+00 -1.08911967e+00 ... -7.85243750e-01 1.66393423e+00 2.13997439e-01]] [[-1.82456839e+00 3.53723824e-01 1.35841334e+00 ... -3.93985063e-01 2.37239420e-01 3.83048087e-01] [-2.92710811e-01 6.67735994e-01 4.56791192e-01 ... 3.49939346e-01 -2.99538779e+00 1.16799784e+00] [-5.48224986e-01 2.58066654e+00 7.68309355e-01 ... -7.18403459e-01 1.28971711e-01 1.85158122e+00] ... [ 1.95097756e+00 6.15424395e-01 -1.58712971e+00 ... -7.04671293e-02 -6.60599649e-01 -2.50122577e-01] [-9.35625210e-02 7.77940571e-01 1.33810616e+00 ... -3.23354125e-01 -8.47992778e-01 -1.02620924e+00] [-2.75640577e-01 -2.53348976e-01 -5.46449721e-01 ... -7.44783938e-01 -1.20031106e+00 -1.06650853e+00]]] [[[ 3.48394327e-02 -3.72410476e-01 1.22407973e+00 ... 1.11961198e+00 -3.99762124e-01 1.86870885e+00] [-1.30580544e+00 3.27959120e-01 9.14312720e-01 ... 6.70679152e-01 7.75882423e-01 -1.89762607e-01] [-3.15038301e-02 5.12257218e-02 8.72938037e-01 ... -1.98772587e-02 -1.12087810e+00 -7.54341900e-01] ... [-1.22456563e+00 -1.34807885e+00 -1.49924219e-01 ... 1.10361874e+00 -2.12606981e-01 8.11943829e-01] [ 7.91806400e-01 4.47068177e-02 -8.85892361e-02 ... -1.46869048e-01 -3.00106108e-01 7.37481296e-01] [-5.69763854e-02 -1.37573183e+00 5.88806927e-01 ... -1.39473915e+00 -4.48755413e-01 -5.61350584e-01]] [[-1.30897272e+00 -7.60208309e-01 -2.18434310e+00 ... 4.58885506e-02 3.79354477e-01 1.16613388e+00] [-5.39407670e-01 -1.36723936e+00 8.36695969e-01 ... 1.66161954e+00 -1.54191136e+00 1.31105876e+00] [-1.07359953e-01 1.06368649e+00 2.46262506e-01 ... 3.30664635e-01 -1.12307835e+00 -2.19654918e-01] ... [-6.22105300e-01 -5.22812046e-02 -2.80369043e-01 ... 9.64236915e-01 1.44962299e+00 -6.51076496e-01] [-7.62743711e-01 3.39238167e-01 6.70649886e-01 ... -7.88070381e-01 7.69662201e-01 -8.29351723e-01] [ 7.68495560e-01 -1.61522853e+00 1.98750108e-01 ... 3.40957731e-01 1.49511874e+00 6.72512770e-01]] [[ 9.31903481e-01 1.38336673e-01 -8.34206760e-01 ... -2.31500670e-01 -1.91965270e+00 -5.84312379e-01] [ 1.31889999e+00 -1.95904791e+00 2.19709530e-01 ... 1.23127294e+00 -2.23957038e+00 9.58907843e-01] [-9.66333449e-01 -7.42967010e-01 -8.59485924e-01 ... 9.28956389e-01 -5.16424179e-01 -1.04216731e+00] ... [ 2.32341576e+00 1.82194567e+00 1.01715505e+00 ... 2.41106972e-01 -7.11867273e-01 8.06856394e-01] [ 1.72504961e+00 -4.07680780e-01 -5.58256328e-01 ... -1.08185840e+00 5.93255103e-01 1.25966680e+00] [ 8.40815976e-02 1.43724620e+00 -1.50290751e+00 ... 3.11783701e-01 -8.69427025e-01 1.17875671e+00]] ... [[-7.14873731e-01 -2.39871049e+00 -2.18258905e+00 ... 2.10058570e+00 7.90230095e-01 -3.21539491e-01] [-1.63254786e+00 -2.64357090e+00 -3.63842219e-01 ... 1.01185715e+00 1.48157468e-02 4.12453443e-01] [ 9.22481596e-01 1.76378489e-01 -4.10136461e-01 ... 1.68224782e-01 -1.69769144e+00 -1.09662306e+00] ... [-2.47012991e-02 5.02005778e-02 8.84875834e-01 ... -1.73130393e+00 -5.41342914e-01 -1.99996322e-01] [ 6.22051954e-01 -1.18686223e+00 -5.95338821e-01 ... -1.16854534e-01 2.78463177e-02 1.82866976e-01] [-1.16941059e+00 2.37573549e-01 4.34925616e-01 ... -1.74477005e+00 -1.48033190e+00 3.08105588e+00]] [[-4.41232055e-01 -7.51869142e-01 1.52080262e+00 ... -1.14013147e+00 -2.59727788e+00 1.65087533e+00] [ 3.47559422e-01 -4.21872377e-01 5.55789053e-01 ... 2.06121111e+00 -1.15747201e+00 1.22650333e-01] [ 8.44579577e-01 1.84118116e+00 -6.52958512e-01 ... 4.32311714e-01 1.99344888e-01 -6.14718199e-01] ... [ 1.25179195e+00 -1.52625120e+00 -1.05705714e+00 ... -1.88545704e+00 9.48536992e-01 3.63338143e-01] [ 1.43574905e+00 -1.07838476e+00 2.00227112e-01 ... 8.85685161e-02 -4.50809635e-02 3.72589827e-01] [ 4.21636939e-01 -1.67120382e-01 -3.83318067e-01 ... -5.72570980e-01 -6.34353280e-01 -1.57795683e-01]] [[-9.97910555e-03 8.24420393e-01 4.20085669e-01 ... 9.13487494e-01 7.28325620e-02 -6.64110541e-01] [-1.18928397e+00 1.61127079e+00 -1.12513018e+00 ... -1.22166015e-01 -8.93189088e-02 9.07548249e-01] [ 1.09892763e-01 -8.05522203e-01 1.54134488e+00 ... -5.01990803e-02 5.20576358e-01 2.08438993e+00] ... [ 8.50418150e-01 7.65229464e-01 -1.13118255e+00 ... 5.83832804e-03 -9.86087024e-01 4.27977145e-01] [ 3.18351030e-01 1.07155395e+00 2.30348420e+00 ... -1.61754692e+00 -7.69050300e-01 4.06581640e-01] [-1.70083269e-01 1.62441170e+00 8.80011916e-01 ... -4.05873746e-01 -8.07721019e-01 -1.17907369e+00]]] [[[ 5.11702061e-01 -1.05854243e-01 1.09654951e+00 ... 1.97440028e-01 -7.03700840e-01 7.59365618e-01] [-7.12829888e-01 -1.38462031e+00 -5.38535893e-01 ... 1.26638621e-01 -5.07420778e-01 -1.88454080e+00] [-6.27312779e-01 1.05800200e+00 -2.62068248e+00 ... 2.47636184e-01 2.47778103e-01 -1.00682569e+00] ... [-2.39073306e-01 1.02238941e+00 3.75297099e-01 ... -5.09373285e-02 2.71090895e-01 -1.81555629e-01] [ 9.64270830e-01 -2.74654090e-01 -5.91863871e-01 ... 1.39552295e+00 8.24490070e-01 1.10059893e+00] [ 4.55841571e-01 1.87214136e+00 -4.27830845e-01 ... -1.70319474e+00 1.61564255e+00 -5.09547710e-01]] [[-5.77238388e-02 1.76280409e-01 1.49205649e+00 ... -2.95173556e-01 -8.00417721e-01 -1.81067675e-01] [-1.27179682e+00 -1.56986582e+00 7.01485157e-01 ... 2.01331943e-01 1.61079895e+00 1.62165010e+00] [-1.05353796e+00 6.47523880e-01 2.34859824e-01 ... 8.10894430e-01 1.09374961e-02 -1.75511444e+00] ... [ 6.40728295e-01 4.60916042e-01 9.22921300e-01 ... 1.91668940e+00 1.03555298e+00 -9.78934616e-02] [-6.03228927e-01 -1.35508299e+00 1.41936734e-01 ... 3.08904743e+00 2.59833783e-01 -1.66424364e-01] [-1.31638849e+00 -5.89194536e-01 -3.15755159e-01 ... -1.30114973e+00 -5.13599813e-01 1.24053168e+00]] [[-3.70355904e-01 -5.29310293e-02 1.23421586e+00 ... -1.70776665e-01 1.40353251e+00 -1.27708912e+00] [-5.37567258e-01 -1.76962733e-01 -8.61637652e-01 ... -9.99141037e-01 1.89012420e+00 1.04644847e+00] [-1.61356553e-02 2.42072865e-01 4.03108329e-01 ... 5.07316351e-01 -6.18547797e-01 1.08107522e-01] ... [ 5.35515249e-01 2.43373930e-01 -1.29317373e-01 ... -2.73280096e+00 5.44037938e-01 -8.27189028e-01] [ 9.67537999e-01 -6.88730717e-01 5.13032377e-01 ... 4.25416023e-01 4.46160525e-01 1.69559467e+00] [-1.08504844e+00 2.17613682e-01 5.62191427e-01 ... -1.65088904e+00 -8.17813337e-01 1.36265957e+00]] ... [[ 3.08221430e-01 1.06779003e+00 -1.88277113e+00 ... -2.89345354e-01 -1.73563707e+00 1.67610693e+00] [ 2.16520071e+00 1.82049584e+00 7.36785293e-01 ... 1.02128349e-01 1.03880906e+00 -5.82405508e-01] [-6.63962066e-01 -1.43638015e+00 -1.09548473e+00 ... 1.26370156e+00 1.26780361e-01 -9.56357777e-01] ... [ 9.42773581e-01 4.94155914e-01 -3.47941101e-01 ... 1.19884622e+00 3.19199890e-01 9.04697895e-01] [ 6.71546757e-01 7.27165103e-01 -1.30771601e+00 ... 6.81360126e-01 -5.84531188e-01 4.60580029e-02] [-1.83972135e-01 1.91297680e-01 4.58926201e-01 ... 3.45467567e-01 7.18524098e-01 -5.41702688e-01]] [[ 4.95671868e-01 2.06295800e+00 2.85839558e-01 ... 6.06042206e-01 1.53167754e-01 4.73855913e-01] [ 2.33903185e-01 -1.08213985e+00 -1.70924389e+00 ... -3.61126274e-01 2.80622661e-01 1.86232284e-01] [-8.45804736e-02 -4.29486841e-01 1.14526701e+00 ... -1.16318476e+00 2.13913202e+00 -7.94024527e-01] ... [-3.57486635e-01 -8.28325927e-01 4.64833260e-01 ... -1.00058722e+00 2.79764563e-01 2.15426826e+00] [-1.41778159e+00 -3.47839266e-01 3.01683068e-01 ... -8.62224817e-01 1.08200349e-01 -4.43628609e-01] [ 1.06939161e+00 5.57623804e-01 8.85903358e-01 ... -6.89424217e-01 6.65470183e-01 8.68103683e-01]] [[ 2.19867897e+00 -7.05495775e-01 -1.12747836e+00 ... 1.54076830e-01 -1.29677296e-01 2.05550241e+00] [ 7.09329903e-01 -1.95160046e-01 1.34411454e+00 ... -8.28461826e-01 3.59471709e-01 5.96444070e-01] [-6.46322072e-01 -3.46402019e-01 2.13080168e+00 ... 8.24996114e-01 -1.47721134e-02 -2.12349367e+00] ... [-1.75453290e-01 -3.15599680e-01 -8.65734875e-01 ... 2.17348188e-01 -6.44554317e-01 -1.02047849e+00] [ 1.05446899e+00 2.03912333e-02 -1.90573394e+00 ... -7.50683606e-01 3.48951817e-01 7.87854314e-01] [-7.44449556e-01 -9.88101602e-01 -7.81365991e-01 ... -2.38799691e-01 -1.51476109e+00 -3.94561470e-01]]] [[[ 5.36769927e-01 -5.29076636e-01 2.89892219e-02 ... -4.24215347e-01 -2.32221484e-01 8.57958615e-01] [-9.43783391e-03 -6.21393859e-01 -7.72634566e-01 ... -1.25410986e+00 -5.02737939e-01 -9.83390212e-01] [ 5.13302326e-01 1.40496397e+00 -2.51999289e-01 ... -6.21772170e-01 -1.10084903e+00 -2.66157240e-01] ... [ 2.69670963e-01 4.79976207e-01 -1.09670922e-01 ... 1.25967145e+00 -5.19089043e-01 -2.08914256e+00] [-2.99274176e-02 -1.54365003e+00 -2.43530169e-01 ... 1.11212027e+00 3.62879097e-01 -4.24698651e-01] [ 3.75260234e-01 -7.54906118e-01 3.23345155e-01 ... -2.76848644e-01 1.65773202e-02 -1.27536757e-02]] [[ 4.24601048e-01 1.24263115e-01 1.19744980e+00 ... -6.24434590e-01 2.34802410e-01 6.30853891e-01] [ 1.29856062e+00 -2.21944427e+00 4.17709649e-01 ... 1.10100067e+00 1.65077257e+00 -1.83760118e+00] [ 1.04744554e+00 1.41050720e+00 -2.39855021e-01 ... -7.67271340e-01 2.96360660e+00 6.17322862e-01] ... [-7.42036164e-01 4.16352808e-01 -7.38490701e-01 ... 1.79083258e-01 -1.17671502e+00 -6.17699265e-01] [ 7.12564141e-02 4.77226019e-01 7.63981402e-01 ... -9.37766060e-02 1.42702806e+00 6.69088244e-01] [ 1.64247870e-01 -4.06811833e-01 2.31248140e-02 ... 4.51530367e-01 4.33836281e-01 5.19624293e-01]] [[ 1.66463113e+00 -7.03654945e-01 -5.87346137e-01 ... -1.57612884e+00 1.19827783e+00 4.30866718e-01] [ 1.19669557e+00 -1.97698712e+00 -1.18915367e+00 ... 1.90768659e+00 1.60113680e+00 -1.58373904e+00] [ 7.60306358e-01 -1.01409972e+00 -9.71136928e-01 ... -9.26183835e-02 -1.14296520e+00 1.96788430e+00] ... [ 4.42815185e-01 5.39632499e-01 -9.93585959e-02 ... 7.98913062e-01 1.85886347e+00 5.04300177e-01] [ 3.52966398e-01 7.58546829e-01 -8.79477784e-02 ... -2.80747376e-02 -2.35944128e+00 4.16882336e-01] [ 7.06868052e-01 -3.85682046e-01 -2.87807416e-02 ... -1.30719793e+00 -1.30415571e+00 1.95352185e+00]] ... [[ 1.54350865e+00 -2.30557933e-01 -1.04549789e+00 ... -6.81589395e-02 3.01176429e-01 8.43397200e-01] [ 2.68778712e-01 9.73946869e-01 1.84937879e-01 ... -1.14657605e+00 -3.27185303e-01 -2.50752282e+00] [ 4.41435218e-01 -8.19467828e-02 -4.43526536e-01 ... 4.66941088e-01 7.39521027e-01 1.95177865e+00] ... [-8.64472687e-01 2.05723912e-01 1.01820283e-01 ... 1.23305988e+00 5.91746211e-01 6.14663243e-01] [ 2.19906998e+00 -9.82372761e-01 6.31608486e-01 ... 7.13750660e-01 -2.92372853e-01 5.38165987e-01] [-8.70262802e-01 6.93627417e-01 1.17864692e+00 ... 6.30923569e-01 -5.01065217e-02 -6.40845418e-01]] [[ 6.45091236e-01 3.54848534e-01 -4.90869641e-01 ... -1.33637047e+00 -9.00346562e-02 1.28469074e+00] [ 4.26029176e-01 -7.63582587e-01 -2.40161702e-01 ... 1.12478524e-01 -5.09543300e-01 1.57053745e+00] [-2.28583425e-01 1.37331605e+00 -1.77975312e-01 ... -1.50692696e-02 -4.22650486e-01 5.44047415e-01] ... [-1.04266554e-01 -2.42792419e-03 -1.92898348e-01 ... 1.05527294e+00 1.71388102e+00 5.18788159e-01] [ 5.95890403e-01 -6.72036558e-02 -1.35090017e+00 ... 4.75274831e-01 8.39732811e-02 -1.41056538e+00] [-1.12646985e+00 -4.17745322e-01 7.88860440e-01 ... -8.43533099e-01 -2.87685186e-01 1.48827350e+00]] [[ 2.09931314e-01 3.22914332e-01 2.16609463e-01 ... -1.71421969e+00 -5.97336590e-01 -2.78078437e-01] [-7.12160945e-01 -8.18483293e-01 -6.55915856e-01 ... 2.50876737e+00 -1.03185713e+00 9.12325621e-01] [-3.26470435e-01 -1.57785940e+00 8.71113956e-01 ... -1.01714432e+00 -1.94569290e+00 -2.06038266e-01] ... [-1.39008299e-01 8.14380109e-01 -1.39469564e-01 ... -1.08143777e-01 7.84018815e-01 -7.54694417e-02] [ 2.64734006e+00 4.33352411e-01 -2.94168711e-01 ... -9.19128358e-01 -4.90225077e-01 -2.15984726e+00] [ 1.43570125e+00 -3.48431200e-01 -1.91240764e+00 ... -1.51165700e+00 1.45936096e+00 -1.89690804e+00]]]] [[[[ 1.28945231e+00 6.40518606e-01 6.75180852e-01 ... -1.41681814e+00 -1.71078014e+00 -7.33489037e-01] [-1.20457685e+00 1.92087200e-02 -1.02622434e-01 ... -1.51259446e+00 5.91593444e-01 -1.34934962e+00] [ 3.77467453e-01 2.18622994e+00 6.91974342e-01 ... -4.42237593e-03 5.77555180e-01 1.39654234e-01] ... [ 1.41859317e+00 8.65943253e-01 1.39423418e+00 ... -8.27933908e-01 -2.27129549e-01 8.99207830e-01] [ 4.37950552e-01 4.15398210e-01 -6.37194097e-01 ... 1.10757375e+00 1.70418292e-01 1.08800244e+00] [ 8.01235914e-01 -9.70570207e-01 -5.75908065e-01 ... -3.17639858e-01 1.20523465e+00 1.36988270e+00]] [[-1.10144198e+00 -3.79909545e-01 3.13864321e-01 ... 2.38786563e-01 2.36242741e-01 1.76349193e-01] [ 1.95718336e+00 -1.30807674e+00 -6.84510767e-01 ... 1.33579507e-01 1.56676412e-01 1.21368003e+00] [-1.81816626e+00 -2.04937279e-01 7.84156099e-02 ... -1.17523706e+00 5.18855304e-02 6.86689854e-01] ... [ 5.33577383e-01 -1.29718375e+00 -4.51248169e-01 ... 7.04661250e-01 -2.34751865e-01 2.57022262e-01] [ 1.11473334e+00 1.41571462e+00 1.08084202e+00 ... 1.23554431e-02 8.71712565e-01 -1.28323829e+00] [ 1.13061929e+00 1.14729393e+00 8.75033319e-01 ... -3.72978777e-01 1.56976655e-01 8.26828361e-01]] [[ 1.03657770e+00 9.61894810e-01 1.16381034e-01 ... 1.37740660e+00 2.01546288e+00 1.07275045e+00] [-8.88713747e-02 -7.97710836e-01 -2.91581690e-01 ... -7.98010767e-01 -1.63318145e+00 2.52432913e-01] [ 9.71647620e-01 1.36551589e-01 -1.09743106e+00 ... 4.10086453e-01 1.22200370e-01 1.56858921e+00] ... [ 4.40892912e-02 8.35599661e-01 -1.49200261e-01 ... -5.01497686e-01 -1.58593512e+00 -3.23122703e-02] [-7.83416986e-01 -1.78958803e-01 1.84637451e+00 ... -1.02160239e+00 -1.41929165e-01 -9.92814243e-01] [ 1.26651800e+00 -1.14730084e+00 9.61129904e-01 ... 1.33327814e-02 -1.76693845e+00 -6.24904931e-01]] ... [[ 4.51607049e-01 -4.54382032e-01 7.32994258e-01 ... 8.39294016e-01 7.15392083e-02 -1.04899812e+00] [ 1.23403418e+00 -7.30733395e-01 -1.02175629e+00 ... -1.32874548e+00 -1.20843995e+00 -6.39860213e-01] [ 1.94484699e+00 5.91293097e-01 9.40437078e-01 ... -1.41682649e+00 2.75513619e-01 -4.28968132e-01] ... [ 1.56224573e+00 6.64771795e-01 1.07494569e+00 ... -4.01412696e-01 1.06186759e+00 8.61969650e-01] [ 7.76298463e-01 -9.72019196e-01 1.02393878e+00 ... 2.30661571e-01 -1.40774381e+00 -4.38224286e-01] [ 5.39217949e-01 -9.09371912e-01 -2.59736389e-01 ... 7.44883716e-01 -7.29408681e-01 4.70569909e-01]] [[ 5.16110539e-01 -1.06146407e+00 -9.02056336e-01 ... -1.41740167e+00 -9.58559334e-01 -3.54660183e-01] [-5.01398265e-01 -1.03371751e+00 4.54230875e-01 ... -2.50933796e-01 -1.43216765e+00 1.42108834e+00] [ 6.32668674e-01 2.84041822e-01 -4.46338415e-01 ... -1.38027859e+00 -2.00055218e+00 3.07361096e-01] ... [ 1.15979433e+00 3.38425487e-02 -2.43923739e-01 ... -3.01947892e-01 1.85646072e-01 -1.17304504e+00] [ 6.53655708e-01 2.95809776e-01 -1.14361458e-01 ... 2.38321066e-01 1.04877830e+00 3.59685421e-01] [ 4.89373356e-01 1.84727550e-01 -1.39230585e+00 ... -4.60935712e-01 1.14865124e+00 1.36707377e+00]] [[-9.25496638e-01 -4.49632883e-01 -1.61036241e+00 ... -7.33928978e-01 4.02681768e-01 1.09126878e+00] [-4.78602767e-01 1.65974057e+00 4.48695272e-01 ... -1.42885482e+00 -4.99097615e-01 1.72895503e+00] [ 1.03608131e+00 -6.20818973e-01 1.94518316e+00 ... 1.71498096e+00 2.10589081e-01 7.98473954e-01] ... [ 1.87551892e+00 -8.88549507e-01 4.32341337e-01 ... -4.27218169e-01 -7.89700598e-02 -9.10437226e-01] [ 7.85954416e-01 -2.79838830e-01 1.14897370e+00 ... -7.75655389e-01 -8.87210369e-01 -8.42794031e-02] [-1.19140595e-01 4.62880880e-01 -2.21076086e-01 ... 8.97275031e-01 -1.86432898e-01 5.97045958e-01]]] [[[ 7.13418543e-01 -5.24140120e-01 -2.85790622e-01 ... -2.28398132e+00 -4.26159650e-01 3.14065814e-01] [ 8.55724335e-01 -1.05045378e+00 -1.73582566e+00 ... 5.83153665e-02 1.94721842e+00 1.21325469e+00] [-7.24119008e-01 -8.34955156e-01 -7.95898676e-01 ... 2.16244921e-01 1.25890875e+00 1.12214053e+00] ... [-5.25563776e-01 8.78603935e-01 2.69452810e-01 ... -9.28062856e-01 5.35266995e-01 1.12517476e+00] [ 3.78921568e-01 -1.24203647e-02 5.48137665e-01 ... -6.60764277e-01 -2.34410310e+00 1.23420799e+00] [ 7.04225004e-01 -1.38436988e-01 3.56435865e-01 ... 4.62718487e-01 3.83326203e-01 5.31175137e-01]] [[ 1.60977495e+00 -1.04725361e+00 1.02446914e+00 ... 2.52225548e-01 -1.57188043e-01 -1.32712114e+00] [-2.24090981e+00 -1.73592961e+00 -2.21657246e-01 ... -3.05247605e-01 5.49181283e-01 -2.88233101e-01] [ 1.05275738e+00 1.06582749e+00 2.65675366e-01 ... 9.51319754e-01 7.03823686e-01 4.32416022e-01] ... [-1.88409701e-01 -1.09210110e+00 5.32177925e-01 ... -5.54349005e-01 9.16306853e-01 1.73821747e-01] [ 5.60815513e-01 -9.70668256e-01 -1.99974224e-01 ... -4.96916234e-01 -2.20626406e-02 -6.17066741e-01] [-2.30148807e-01 7.45214522e-01 1.56031728e+00 ... 1.55397642e+00 9.05366421e-01 -1.94087327e+00]] [[ 1.66219151e+00 1.25802040e+00 -5.03707707e-01 ... -1.71684638e-01 4.46742505e-01 1.49694991e+00] [-7.05374718e-01 -6.12285554e-01 -6.90033436e-02 ... -3.52658868e-01 -6.01451397e-01 -1.84149861e-01] [-7.15587735e-01 4.89035904e-01 -1.66756177e+00 ... -2.71037817e-01 -2.24992847e+00 1.63341418e-01] ... [ 4.68730718e-01 -1.99397564e-01 1.34880340e+00 ... -1.65452138e-02 -2.34829299e-02 1.16522121e+00] [ 1.86840081e+00 6.53270662e-01 2.04488087e+00 ... 3.10538203e-01 -1.81138277e+00 6.11992419e-01] [ 1.50718197e-01 -7.39822745e-01 1.20838925e-01 ... -1.62052751e-01 -1.46655858e-01 5.35751641e-01]] ... [[-1.13528383e+00 -1.69242322e+00 1.91365445e+00 ... 5.46165526e-01 8.68454039e-01 1.80062771e+00] [ 1.32822990e-01 -7.68545330e-01 -8.03092480e-01 ... 3.76414657e-02 -4.42871064e-01 -1.20173299e+00] [ 4.10266191e-01 3.47306490e-01 4.00698245e-01 ... -9.92914855e-01 4.77964908e-01 6.52244091e-01] ... [ 2.75403827e-01 -1.45427138e-01 -7.93439820e-02 ... 7.78248727e-01 8.33085597e-01 8.92733455e-01] [-1.55314058e-02 2.61369109e-01 -1.17506111e+00 ... -1.20490670e+00 -3.89545709e-01 1.46665072e+00] [ 4.62756425e-01 -3.60929817e-01 -1.48869801e+00 ... -2.50169009e-01 3.44330758e-01 -2.21743155e+00]] [[ 1.29097986e+00 -1.87154576e-01 -5.46885729e-01 ... 1.21440017e+00 7.44897366e-01 -4.75987375e-01] [-1.78506613e+00 -3.08750402e-02 -2.57929355e-01 ... 1.56445158e+00 -3.27457845e-01 -1.59942794e+00] [ 7.36129820e-01 5.51685750e-01 9.71612394e-01 ... -1.40436733e+00 -1.39429307e+00 3.25770885e-01] ... [-8.67977083e-01 -3.00886244e-01 2.07443133e-01 ... -7.61594996e-02 1.00240541e+00 -8.92925799e-01] [-7.16234267e-01 1.40599445e-01 -1.59019351e-01 ... -3.64880919e-01 -4.83350366e-01 5.09132743e-01] [-2.01762900e-01 -1.18316996e+00 1.75474763e+00 ... 1.19993186e+00 -2.72489637e-01 1.33238554e+00]] [[-1.78164649e+00 7.12904036e-01 -1.63885653e-02 ... 4.48183626e-01 7.79581606e-01 1.99659908e+00] [ 2.76660532e-01 -5.88539243e-01 -9.14394498e-01 ... 1.53025436e+00 -6.97033703e-02 -1.79749942e+00] [ 5.48624098e-01 5.80692768e-01 -2.27293298e-01 ... 7.32929707e-01 -6.42199293e-02 -1.30676508e-01] ... [ 2.09652245e-01 6.92524076e-01 -2.68583804e-01 ... -1.02467668e+00 1.39415491e+00 1.77867162e+00] [ 8.15159619e-01 1.40051385e-02 -7.67434776e-01 ... -7.91400611e-01 -7.16686323e-02 -4.96026069e-01] [-7.92607248e-01 -1.62074819e-01 1.57934010e+00 ... -1.15014303e+00 -2.12493920e+00 -5.43586314e-01]]] [[[-6.30402029e-01 -1.19439125e-01 -7.26524770e-01 ... -3.50523919e-01 1.27856588e+00 8.79856408e-01] [-2.18242332e-01 -9.48687553e-01 2.61823446e-01 ... 1.04008541e-01 -2.87891603e+00 1.60544395e-01] [-1.75077140e-01 2.56574422e-01 -1.40134916e-01 ... -1.92925525e+00 -1.32903004e+00 -1.02446043e+00] ... [-5.62102854e-01 -6.33655429e-01 1.48287952e+00 ... -2.11345673e-01 2.05430016e-01 4.25361812e-01] [ 4.64737527e-02 -6.60113757e-03 1.19642889e+00 ... -1.23422444e-01 3.91194016e-01 -3.50231677e-01] [-5.10277390e-01 8.40270594e-02 -2.34414637e-01 ... -9.78234529e-01 -1.50541699e+00 -1.57784611e-01]] [[-2.89107502e-01 8.61023068e-01 -1.71291780e+00 ... 5.67603469e-01 1.25921056e-01 -2.30879277e-01] [-3.34935874e-01 -1.17197943e+00 -3.24505508e-01 ... -1.35477006e-01 -1.54527450e+00 2.07553911e+00] [ 1.56953943e+00 1.85878575e-02 6.01825774e-01 ... 1.90932006e-01 1.12017918e+00 -8.45284343e-01] ... [-4.90884632e-01 -5.32631993e-01 -1.57468271e+00 ... 1.00285113e+00 3.97514433e-01 1.81571627e+00] [-1.36059594e+00 3.96263629e-01 -4.56758916e-01 ... 5.47544658e-01 1.74088466e+00 1.70372143e-01] [-2.02798772e+00 -5.19822538e-01 -1.52141333e-01 ... 5.69277942e-01 7.40616918e-01 -9.75231409e-01]] [[-1.84250891e-01 5.79737067e-01 1.20336577e-01 ... 9.08416212e-01 -1.58328688e+00 6.43685937e-01] [ 9.09829676e-01 -9.26182508e-01 8.01240742e-01 ... -9.64584649e-01 -2.61760187e+00 -5.09296358e-01] [-9.74646151e-01 6.78426683e-01 7.36929178e-01 ... -1.02453187e-01 -3.76353174e-01 1.09641123e+00] ... [ 1.17517209e+00 -3.69986773e-01 8.75298858e-01 ... -1.61469066e+00 -8.58396649e-01 -1.48564875e+00] [-5.14218271e-01 -9.95172039e-02 7.60418832e-01 ... -1.76920688e+00 -7.31819570e-01 -1.84127927e-01] [-1.53536582e+00 -1.45166650e-01 -6.72966480e-01 ... 7.86230639e-02 5.37638128e-01 4.65170443e-01]] ... [[-3.93745542e-01 -1.11291252e-01 4.65014160e-01 ... 5.79302430e-01 2.86129206e-01 -5.26307011e-03] [-3.12730819e-01 2.81709284e-01 1.93994105e+00 ... -1.13360071e+00 -6.46963298e-01 -6.60365999e-01] [ 2.61225879e-01 -7.38252878e-01 -1.17864001e+00 ... -4.14602309e-01 4.40665871e-01 2.64972240e-01] ... [ 1.23760462e+00 9.04634655e-01 -2.29040265e-01 ... -2.92025685e-01 -3.18186671e-01 5.02774775e-01] [ 1.64601934e+00 -9.72522378e-01 1.20371902e+00 ... -2.21739233e-01 3.16715407e+00 -2.21880031e+00] [-6.81320846e-01 -1.62566388e+00 -1.21863151e+00 ... -4.43559676e-01 1.04185832e+00 1.54821932e-01]] [[-1.57582116e+00 -1.24408972e+00 3.73095907e-02 ... -1.13975644e+00 3.37400526e-01 1.43489373e+00] [ 8.94114196e-01 -9.15318727e-01 -6.26249611e-02 ... -3.54968697e-01 -1.19169390e+00 -8.47329497e-01] [ 1.40070748e+00 -1.54057574e+00 -1.59176672e+00 ... -1.32885385e+00 4.91938367e-02 1.23101664e+00] ... [ 8.18603694e-01 7.56005585e-01 -2.11390764e-01 ... 2.68054700e+00 1.31016386e+00 3.86254430e-01] [ 4.95795578e-01 -2.76116228e+00 -4.87791002e-01 ... 1.10512161e+00 -5.96416593e-02 -1.21017456e+00] [ 1.34347141e+00 -5.22823930e-01 -8.07117045e-01 ... 4.78582084e-01 -1.62114573e+00 -1.40872002e+00]] [[ 1.03055477e+00 -1.46054757e+00 -2.09785059e-01 ... -3.60613227e-01 2.98085183e-01 2.64781165e+00] [ 3.19665164e-01 1.18067420e+00 -1.70670465e-01 ... 8.50761682e-02 1.09111154e+00 1.55703163e+00] [-1.74934968e-01 -9.91317153e-01 -1.61369848e+00 ... -9.96300951e-03 -7.32110083e-01 -1.16621248e-01] ... [-1.48405421e+00 1.06856048e+00 -7.56238401e-01 ... -1.22808647e+00 -7.55939931e-02 -1.62765801e+00] [-5.72768986e-01 -7.86111295e-01 1.09234190e+00 ... 2.28428423e-01 -3.59081060e-01 -9.46944237e-01] [ 8.63422096e-01 1.55701625e+00 2.38499045e-02 ... 1.71412063e+00 8.45993936e-01 7.36386240e-01]]] [[[-5.83111465e-01 1.31837381e-02 -4.55829561e-01 ... -1.21542132e+00 -1.13851249e+00 -1.73587769e-01] [-2.89721936e-01 6.75151169e-01 -3.17716926e-01 ... 1.11529338e+00 -1.43885195e+00 3.04044455e-01] [ 2.35145658e-01 -1.55067182e+00 -1.42864132e+00 ... 2.50479484e+00 -1.43094099e+00 -4.41436648e-01] ... [ 1.02892470e+00 -1.29908121e+00 -1.81613743e+00 ... 8.86071146e-01 9.99191523e-01 -1.43847561e+00] [ 4.69757736e-01 7.05751598e-01 -1.11380005e+00 ... 1.55017242e-01 1.73693359e+00 -4.11342472e-01] [-1.18892956e+00 -1.49672806e+00 -6.62156224e-01 ... -9.04036283e-01 -1.00727789e-02 9.15655792e-01]] [[ 8.16015005e-01 1.27851403e+00 1.41220224e+00 ... 9.72712040e-01 -7.99290121e-01 1.04700930e-01] [ 1.00826669e+00 -1.20362854e+00 5.01355946e-01 ... 9.75452244e-01 -7.22690113e-03 -2.72958875e+00] [-2.34711036e-01 2.77281851e-01 -9.58651304e-02 ... 3.71171951e-01 -6.89057946e-01 -8.32536280e-01] ... [-8.71657729e-01 -2.03094259e-01 4.73055691e-02 ... -1.77138269e+00 -1.29770923e+00 -1.05286784e-01] [ 1.26717997e+00 -1.07395291e+00 1.27876210e+00 ... 5.91951370e-01 9.63085890e-01 1.74378037e+00] [-1.20035100e+00 -4.40700501e-01 -9.75909606e-02 ... 2.24919295e+00 8.34828198e-01 -1.63845921e+00]] [[-5.07346392e-01 -3.45658004e-01 -4.55844939e-01 ... -1.69487345e+00 -1.86712947e-02 9.08543468e-01] [ 2.85118997e-01 6.03944302e-01 -9.10144150e-01 ... 2.55612350e+00 -5.12163043e-01 -3.67963523e-01] [ 2.17918798e-01 -1.75365722e+00 -1.01120937e+00 ... -8.31374764e-01 3.65878493e-01 6.66193366e-01] ... [-9.95070755e-01 6.52571499e-01 -1.04040578e-01 ... 1.30877483e+00 -1.49819851e-01 -2.69903671e-02] [-5.47945380e-01 1.03402682e-01 9.71472919e-01 ... 3.83546442e-01 1.24260700e+00 -2.23102975e+00] [ 8.49345386e-01 -1.06570566e+00 1.32103062e+00 ... 8.79343569e-01 -1.01330185e+00 5.55606067e-01]] ... [[ 3.92643809e-01 -3.55857015e-02 -4.84988570e-01 ... 6.23351097e-01 -4.56445277e-01 -4.48906928e-01] [ 1.28779972e+00 -1.86449337e+00 -1.78531480e+00 ... 2.26786327e+00 -6.47976041e-01 -8.11068833e-01] [-3.31087023e-01 -5.31334430e-02 -1.88358918e-01 ... -1.66561961e+00 1.03187680e-01 -9.20455575e-01] ... [ 2.10873947e-01 1.00403440e+00 1.59981847e-01 ... 1.41117764e+00 5.59055865e-01 2.91243643e-01] [ 5.54061949e-01 1.22167859e-02 5.35699613e-02 ... 7.38952637e-01 9.07645375e-03 1.23505831e-01] [-2.91216206e-02 -6.09156489e-01 -1.41482937e+00 ... 1.16699576e+00 -6.46159410e-01 -3.08267891e-01]] [[-1.55337584e+00 -4.27883506e-01 8.12432170e-01 ... 1.54045069e+00 -9.18745339e-01 -3.02959502e-01] [ 1.07801211e+00 2.00371099e+00 5.45908809e-01 ... -4.59703952e-01 -5.71200967e-01 4.13129628e-02] [ 3.94674502e-02 7.96362162e-01 4.53093737e-01 ... -6.86210036e-01 3.75321835e-01 -1.12978077e+00] ... [-3.52086097e-01 2.89553016e-01 -1.53150260e+00 ... -4.77967083e-01 -7.79249966e-01 5.32548428e-01] [ 2.88227987e+00 3.43062170e-02 1.21127689e+00 ... 5.21198273e-01 1.57222956e-01 -2.52418876e-01] [-1.16326019e-01 1.95279598e+00 -1.80706065e-02 ... 9.72109199e-01 -7.74129391e-01 3.19660187e-01]] [[ 1.83903921e+00 -2.83463776e-01 -1.21899211e+00 ... 1.01657605e+00 8.09463620e-01 7.91785538e-01] [-1.15463686e+00 1.37331998e+00 -4.66042817e-01 ... 1.62810907e-01 1.91997200e-01 -3.62258434e-01] [-1.34903884e+00 -1.30995715e+00 -1.14227593e+00 ... 1.04082787e+00 -3.94682854e-01 4.70711142e-01] ... [ 2.87383109e-01 -5.58218420e-01 2.18936503e-01 ... 1.05262697e-01 -4.62784469e-01 4.63642627e-01] [-8.93520564e-02 -9.68835711e-01 -1.92039680e+00 ... -6.38882697e-01 -4.78375077e-01 -2.08294988e+00] [ 1.16585243e+00 -4.93222140e-02 3.56546879e-01 ... 1.17963529e+00 -7.06666410e-01 -5.66477656e-01]]] [[[ 8.38627368e-02 -1.57940280e+00 4.97350320e-02 ... -3.30936223e-01 -2.30942860e-01 -1.13128209e+00] [-2.98781574e-01 5.06298721e-01 -7.60006234e-02 ... 1.03812051e+00 4.18865532e-01 -1.00532365e+00] [-4.39053208e-01 -1.84162900e-01 -1.09596932e+00 ... 1.45244136e-01 -1.08199728e+00 2.74057705e-02] ... [ 8.10164027e-03 1.05834723e+00 5.91881946e-03 ... -1.20189989e+00 -1.68390465e+00 1.41884756e+00] [ 6.54909462e-02 -1.47430956e+00 -1.49605364e-01 ... -1.82699835e+00 -1.93008542e-01 -5.59565544e-01] [-1.58845270e+00 4.79812622e-01 -8.65610480e-01 ... -5.60581505e-01 1.30509162e+00 5.09125710e-01]] [[-1.16003406e+00 6.58009231e-01 8.61331761e-01 ... -4.15453345e-01 -2.67322135e+00 1.65837443e+00] [-1.54946697e+00 -1.30469084e+00 2.06838459e-01 ... 9.60617792e-03 1.36385882e+00 8.11712503e-01] [ 1.02595365e+00 -1.29413414e+00 1.37042260e+00 ... 2.55767822e-01 -6.22443259e-02 -9.11606997e-02] ... [ 1.50775909e-01 5.75480878e-01 -6.30639434e-01 ... -7.20301390e-01 -8.26751590e-01 -2.50929952e+00] [ 1.11403406e+00 -7.54840136e-01 1.59330833e+00 ... 6.26439452e-01 5.04467748e-02 5.99349380e-01] [ 4.13728625e-01 -3.46880853e-01 2.51226425e-01 ... 3.68699461e-01 -2.35478863e-01 -4.27054405e-01]] [[ 5.56647956e-01 7.90398777e-01 8.50284249e-02 ... 6.19705558e-01 -1.98185831e-01 1.39395997e-01] [-9.61173773e-01 4.07987505e-01 1.71372071e-01 ... 8.95923376e-01 -7.95719504e-01 -9.66659263e-02] [ 3.23417008e-01 7.85419822e-01 9.88191292e-02 ... -4.57419008e-01 -1.07777581e-01 3.19978237e-01] ... [-2.69552290e-01 -6.36201978e-01 -5.21980047e-01 ... 8.23835969e-01 -7.39667267e-02 -5.94755113e-01] [-1.04218376e+00 -1.93362787e-01 -1.27426171e+00 ... 1.15055645e+00 -1.55155349e+00 1.36101961e+00] [-2.77735710e-01 3.15968305e-01 -1.14356506e+00 ... 1.84252298e+00 9.65673700e-02 -1.20077598e+00]] ... [[-2.64811311e-02 -8.07101905e-01 4.21676666e-01 ... -8.81951094e-01 4.36863840e-01 1.03766970e-01] [ 3.50801349e-01 1.78366065e-01 -2.43961755e-02 ... 6.73811376e-01 -4.88922536e-01 -1.14154577e+00] [ 1.71046484e+00 5.30221164e-01 -6.85802042e-01 ... 6.65628836e-02 1.78371027e-01 7.01855242e-01] ... [-5.83732128e-01 -1.33131456e+00 -5.50036728e-01 ... -1.67673573e-01 1.63343012e-01 -9.70476270e-01] [-7.28569329e-01 8.38323653e-01 1.39793491e+00 ... 6.09386623e-01 1.00941169e+00 9.93945956e-01] [ 5.13494372e-01 7.98378110e-01 1.41827703e+00 ... 7.28343070e-01 -2.03674150e+00 -1.25913560e+00]] [[ 6.68191433e-01 -6.43875122e-01 1.66964009e-01 ... -6.43702865e-01 3.88570219e-01 1.99366301e-01] [-7.59536505e-01 -1.92674562e-01 6.12842500e-01 ... 7.78997719e-01 2.26889908e-01 -1.06921649e+00] [-1.35113746e-01 -4.98746753e-01 4.31154668e-01 ... -3.79479408e-01 6.92353904e-01 -1.81555629e-01] ... [ 1.11557674e+00 -9.88946021e-01 -6.02923781e-02 ... -1.22180808e+00 3.47607851e-01 -1.62065864e+00] [-1.58148363e-01 7.31096836e-03 -5.21363497e-01 ... 6.38597727e-01 -1.01641679e+00 4.34787452e-01] [-4.42790896e-01 1.61638811e-01 -5.76209605e-01 ... -1.18158293e+00 -1.09243035e+00 1.62991345e-01]] [[-2.73895949e-01 5.30051053e-01 2.68230885e-01 ... 2.78248310e-01 -9.90949810e-01 -4.13606822e-01] [-1.99017793e-01 6.42195046e-01 -5.52923679e-01 ... 1.64679921e+00 -7.43290424e-01 4.33476925e-01] [ 8.66259038e-01 1.31895185e-01 -7.57846951e-01 ... 3.10327435e+00 -2.14069271e+00 1.19976245e-01] ... [-8.64409447e-01 7.36967698e-02 -1.05960190e+00 ... -1.95438743e+00 1.15141344e+00 4.61751342e-01] [-6.39544249e-01 1.29072392e+00 3.83043796e-01 ... 1.37045771e-01 1.34683096e+00 2.38611007e+00] [ 4.71857339e-01 1.45504415e+00 4.26745862e-01 ... -3.25602107e-02 -2.18026495e+00 6.11884534e-01]]] [[[-2.35006630e-01 -1.50648057e+00 1.04259622e+00 ... 1.67773938e+00 -1.20146406e+00 -3.00291777e-01] [-3.12231958e-01 1.00226378e+00 9.47799325e-01 ... 4.57554311e-01 4.28915620e-01 3.71729195e-01] [ 2.37870693e-01 2.77101189e-01 2.76444435e-01 ... 5.52655876e-01 4.60505992e-01 4.02534157e-01] ... [-1.50755155e+00 -7.42810965e-01 1.81380785e+00 ... 4.70509619e-01 5.25715828e-01 -1.33844420e-01] [ 1.40873170e+00 1.76941395e-01 -2.38408327e+00 ... -1.38006580e+00 1.99911964e+00 1.70699048e+00] [ 1.30868721e+00 -1.18482363e+00 5.33307612e-01 ... -9.10074711e-01 -2.08014295e-01 -1.00633800e+00]] [[ 1.29882491e+00 1.31087869e-01 1.00341845e+00 ... 3.29382904e-02 1.49784815e+00 -8.91154885e-01] [-1.03564847e+00 -2.38562226e-01 -3.95809114e-01 ... -4.94701356e-01 -1.41289279e-01 -1.34196258e+00] [-1.12817454e+00 3.27537060e-01 1.97084177e+00 ... 1.43370605e+00 -3.12629968e-01 -1.41406715e+00] ... [-9.48555410e-01 5.83964646e-01 -5.61111391e-01 ... 4.61064488e-01 -2.04818666e-01 -2.23182023e-01] [-1.76555246e-01 -8.97910118e-01 -1.16494846e+00 ... 4.24676418e-01 -3.78679931e-02 -7.52210379e-01] [-3.37314993e-01 -1.69182634e+00 1.44430354e-01 ... -4.78429973e-01 -4.81443405e-01 -4.95335907e-01]] [[-1.66155565e+00 1.82441914e+00 -4.07908350e-01 ... 4.64004695e-01 -1.50410974e+00 3.53620648e-01] [ 6.79291606e-01 -1.29825592e-01 -1.14963865e+00 ... 8.49975407e-01 -2.77795387e-03 5.07159829e-01] [ 9.24976528e-01 5.18087208e-01 -2.51785144e-02 ... -5.89181364e-01 -9.31752995e-02 -8.69834602e-01] ... [-6.66392863e-01 -4.09990281e-01 2.43893409e+00 ... 2.11025882e+00 -8.70050043e-02 9.36525345e-01] [ 1.87948108e-01 -2.95430452e-01 -4.73922908e-01 ... -9.80426013e-01 -2.38142633e+00 9.18721139e-01] [-9.68249142e-01 -5.50251305e-01 -1.72376037e+00 ... -1.64302096e-01 9.88363028e-01 9.69554365e-01]] ... [[-8.98423553e-01 -2.84376621e-01 7.81146824e-01 ... 1.31299114e+00 3.81829441e-01 -4.30960178e-01] [-1.02150607e+00 -9.68298495e-01 -5.83786607e-01 ... -4.28902984e-01 -1.45592618e+00 6.09251499e-01] [-2.57914662e-01 -1.83325917e-01 4.91308659e-01 ... 5.37134945e-01 1.68306124e+00 -1.01367927e+00] ... [ 2.52150238e-01 -2.08413863e+00 2.55280770e-02 ... 1.50292313e+00 4.75158066e-01 -1.23036718e+00] [ 2.94944435e-01 -2.17219055e-01 -1.88944861e-01 ... -1.44992128e-01 -4.63837624e-01 1.81674755e+00] [ 2.71297634e-01 -8.68323684e-01 -7.60822713e-01 ... 1.46770203e+00 -6.80868208e-01 1.52941039e-02]] [[ 4.06589538e-01 -7.85533726e-01 8.88596714e-01 ... 6.96532130e-01 -3.66774023e-01 -1.36038995e+00] [ 1.00349438e+00 5.41792624e-02 -1.46656799e+00 ... 1.72389638e+00 -5.66634238e-01 5.13043463e-01] [ 3.86258036e-01 5.22394359e-01 -4.58338827e-01 ... 5.48441589e-01 3.62908989e-01 -6.34918511e-02] ... [-1.42733669e+00 1.59236595e-01 5.23125947e-01 ... -2.26172715e-01 6.38810843e-02 -8.49854589e-01] [-1.78384209e+00 8.96537721e-01 -9.56047833e-01 ... -1.01702833e+00 -1.24183619e+00 1.73076165e+00] [ 4.93326873e-01 9.81706777e-04 1.73933065e+00 ... 2.27844715e-01 3.92639786e-01 7.38509834e-01]] [[-8.32306206e-01 -9.94243860e-01 1.65434432e+00 ... -2.32602879e-01 1.98532557e+00 1.67053342e+00] [ 9.74959970e-01 1.23718810e+00 6.36094034e-01 ... 3.34802032e-01 1.52685916e+00 2.23905611e+00] [ 1.63801444e+00 -4.36831146e-01 1.72041524e-02 ... -1.69534996e-01 1.05643797e+00 1.37863135e+00] ... [-2.09831983e-01 -6.66303188e-02 -9.02799368e-02 ... 2.08678231e-01 -6.45721316e-01 -1.38811052e+00] [ 1.87708890e+00 6.94051504e-01 3.81546855e-01 ... 8.37369323e-01 -1.04333699e+00 3.18498790e-01] [ 5.44886947e-01 6.94495559e-01 -8.07304621e-01 ... 3.89680177e-01 1.11026847e+00 2.75925338e-01]]]]]; ov_res: [[[[[ 2.12173909e-01 -6.53969944e-01 6.06755197e-01 ... 1.85776603e+00 2.45638430e-01 -9.76070523e-01] [ 1.04023278e+00 -8.80849302e-01 -9.66304634e-03 ... 8.73054266e-01 1.45649225e-01 -3.71141404e-01] [ 6.48138702e-01 -4.72052336e-01 6.86412156e-01 ... -1.11692846e+00 3.14393371e-01 -4.31204617e-01] ... [ 8.69596422e-01 -1.30053961e+00 -3.26316416e-01 ... -1.14120042e+00 1.22366524e+00 -1.21305358e+00] [-1.92318916e-01 -1.41212195e-01 -1.51492536e+00 ... -2.18843961e+00 -4.26238447e-01 4.08224165e-01] [-1.60450590e+00 -1.29268038e+00 -1.32803190e+00 ... -6.94066584e-01 9.33120102e-02 1.74623072e+00]] [[-1.66059911e+00 2.58430123e-01 4.74726886e-01 ... -1.07288575e+00 2.77530134e-01 4.52837676e-01] [-1.95381150e-01 -1.57514942e+00 -4.10510957e-01 ... -1.97371587e-01 -4.28009212e-01 9.94362891e-01] [ 1.76943541e+00 -7.12736428e-01 -1.33630574e+00 ... 5.96087396e-01 8.30090284e-01 -8.86390924e-01] ... [-3.61817092e-01 -6.59410581e-02 -8.78502607e-01 ... 1.61247206e+00 -1.26312554e+00 4.85355079e-01] [ 1.84466207e+00 -2.11399937e+00 -9.67707157e-01 ... -6.08581960e-01 7.22423553e-01 -5.56877613e-01] [-2.83271790e-01 1.76167822e+00 -1.36796868e+00 ... -1.77667701e+00 -7.79295146e-01 6.22062802e-01]] [[ 5.37337303e-01 -7.64595687e-01 1.70153841e-01 ... 1.35460213e-01 -1.64925528e+00 6.10508740e-01] [ 5.36523163e-01 2.51211858e+00 -1.23258924e+00 ... -2.86938369e-01 3.67188543e-01 2.34202743e-01] [ 1.11306012e+00 1.66542673e+00 1.06238449e+00 ... -6.84512317e-01 7.69596159e-01 8.80802095e-01] ... [-4.70721871e-01 1.40335274e+00 6.26066744e-01 ... -2.65432060e-01 6.89869344e-01 2.94316316e+00] [ 9.31488872e-01 1.35983557e-01 5.43544829e-01 ... -1.75569665e+00 1.15390301e+00 -3.23615849e-01] [-2.85530984e-01 -1.48939788e+00 1.98358452e+00 ... -3.23096961e-01 2.29424453e+00 -3.17618698e-01]] ... [[ 2.78616369e-01 8.51521730e-01 1.10796845e+00 ... -9.94047105e-01 -6.77550077e-01 1.38776422e+00] [ 6.63480282e-01 1.53298989e-01 1.57731915e+00 ... 6.94357650e-03 -8.64132047e-01 -1.34428191e+00] [ 5.66240072e-01 2.78350025e-01 -7.19450712e-01 ... -1.79866970e+00 -7.92168677e-02 -1.34234846e+00] ... [ 2.37463045e+00 5.09724431e-02 2.04862133e-01 ... 8.78217995e-01 -3.72297317e-01 -8.71458650e-02] [ 2.01724887e+00 -1.20816672e+00 2.03505182e+00 ... -2.22255039e+00 -1.79664576e+00 -7.99920321e-01] [-2.41587996e+00 -5.87653875e-01 9.08940434e-01 ... 3.17259841e-02 -1.42192515e-02 9.09862697e-01]] [[-1.28378379e+00 -6.64108038e-01 5.21183431e-01 ... -1.26532388e+00 -2.17842851e-02 -2.22082663e+00] [ 2.00860190e+00 5.95981255e-02 2.90110916e-01 ... 4.05581683e-01 -2.08861649e-01 -7.59894252e-02] [ 1.58880424e+00 -2.15637755e+00 -7.46849120e-01 ... -5.23495138e-01 4.24540401e-01 5.47317803e-01] ... [-5.60497880e-01 1.39414084e+00 -1.41505167e-01 ... 4.83785719e-01 8.14530432e-01 -1.00026870e+00] [-4.38310266e-01 -4.16030765e-01 7.99747348e-01 ... -1.43981147e+00 -1.17703867e+00 3.48932035e-02] [ 1.53577304e+00 1.00789106e+00 1.95631528e+00 ... -4.89088207e-01 -1.08527708e+00 -1.51846445e+00]] [[-6.72134876e-01 -8.38088572e-01 1.52483273e+00 ... -1.10487974e+00 -1.01641588e-01 -1.81334627e+00] [ 9.72837150e-01 2.94059664e-01 1.16947579e+00 ... -1.32734001e+00 -5.02526760e-01 -6.79090440e-01] [ 1.50502431e+00 1.02112150e+00 -5.78136044e-03 ... -5.79100430e-01 -1.54068375e+00 -7.74490297e-01] ... [ 1.18404150e+00 7.57049561e-01 -1.07836986e+00 ... -4.14257020e-01 8.23308825e-01 -6.78702354e-01] [-1.34418917e+00 7.84762669e-03 1.53253245e+00 ... 1.91138577e+00 -9.53767121e-01 -2.83361256e-01] [ 2.01379824e+00 5.21126747e-01 9.55291092e-01 ... 9.94403481e-01 3.33569339e-03 -6.63817376e-02]]] [[[-5.54897249e-01 -1.41896188e+00 -1.71796940e-02 ... 7.12718546e-01 7.31274545e-01 -3.71338338e-01] [-2.28045732e-01 1.29973042e+00 -8.54533464e-02 ... 4.18067724e-01 7.55941927e-01 1.55417919e+00] [ 1.91819370e+00 9.54396427e-01 1.56135052e-01 ... 6.76804066e-01 9.08335447e-02 -6.96641505e-02] ... [-2.46380325e-02 -5.77682853e-01 1.26066744e-01 ... -9.89442766e-01 -3.49269301e-01 3.22731465e-01] [-2.41237834e-01 2.31289238e-01 -1.12673342e+00 ... 3.48298341e-01 -8.91626716e-01 -1.21251166e+00] [-4.04237747e-01 -5.23236275e-01 1.36554587e+00 ... 2.03429794e+00 3.26561213e-01 1.72563720e+00]] [[-2.50914246e-01 1.40354288e+00 9.15227652e-01 ... -3.78108352e-01 -1.11986279e+00 1.23992920e+00] [-9.96889591e-01 9.45008576e-01 1.59084582e+00 ... -8.20249319e-01 3.93674940e-01 1.63032925e+00] [-1.31707954e+00 -1.36189783e+00 6.02070034e-01 ... 1.82496086e-01 2.02367023e-01 -6.48479223e-01] ... [ 1.18374169e+00 -1.18373290e-01 -1.55877605e-01 ... 8.81138980e-01 -7.71250069e-01 -4.44463313e-01] [ 1.98717237e+00 1.40004134e+00 8.03262949e-01 ... 2.07598305e+00 -1.96644926e+00 -9.57964182e-01] [-7.59481668e-01 1.86089408e+00 -2.39247411e-01 ... 1.01318443e+00 -2.50815123e-01 -2.24679429e-02]] [[ 1.21608794e+00 1.11817276e+00 -2.79921323e-01 ... 8.78786385e-01 -7.35326290e-01 5.94434321e-01] [-1.45967829e+00 1.74844754e+00 9.95260954e-01 ... -1.86366153e+00 1.73727834e+00 2.91582257e-01] [-1.99052483e-01 2.23765492e+00 -1.22117960e+00 ... -6.17739260e-02 1.53329575e+00 3.28693986e-01] ... [-4.20380145e-01 -3.73783082e-01 2.17004577e-04 ... 4.39801604e-01 2.35986367e-01 -8.73950779e-01] [ 5.57041466e-01 -7.35465884e-01 -6.56656384e-01 ... 1.16072297e+00 8.61744396e-03 9.54028010e-01] [ 1.30364215e+00 1.10262668e+00 9.69991446e-01 ... 6.19132280e-01 1.30775297e+00 -2.09428787e+00]] ... [[-3.18092942e-01 -1.34613827e-01 -6.21387541e-01 ... 4.20853436e-01 3.29794884e-01 -5.41251183e-01] [-2.10659474e-01 4.46927696e-01 -2.61763871e-01 ... 3.31309661e-02 -6.69641554e-01 6.15874529e-01] [-4.34324861e-01 3.44597101e-01 -1.57488346e+00 ... 2.02022290e+00 8.25998187e-01 8.33520353e-01] ... [ 8.80810976e-01 -2.20430541e+00 -1.72662497e+00 ... -4.98237401e-01 -5.83641469e-01 7.21602857e-01] [-5.21852791e-01 -4.07054186e-01 -2.03738481e-01 ... -2.06217229e-01 8.61290842e-02 -4.68162507e-01] [ 1.47122300e+00 -3.01399734e-02 -1.79674351e+00 ... 5.10253072e-01 -9.03280079e-01 1.56485033e+00]] [[-1.35652125e+00 8.87544394e-01 -4.40005094e-01 ... -2.93100893e-01 6.97555006e-01 -5.54899871e-01] [-2.39505816e+00 -2.06316367e-01 -3.06251705e-01 ... -7.72193372e-01 -1.05159128e+00 1.11886382e+00] [ 5.04845381e-01 -2.50931323e-01 -9.12476659e-01 ... 3.43508646e-02 -1.96312940e+00 -1.33234560e+00] ... [ 2.60931607e-02 -8.29961121e-01 1.14692330e+00 ... 8.94430697e-01 9.98450935e-01 7.04389453e-01] [ 5.23144305e-01 9.33558404e-01 -3.67960513e-01 ... -5.36685586e-01 -7.67046273e-01 -9.32475090e-01] [-1.03190196e+00 7.32798278e-01 -1.02392304e+00 ... 1.24149525e+00 -7.40395069e-01 6.01787329e-01]] [[ 9.22805846e-01 -1.12787783e+00 -1.97561085e-01 ... -7.78701723e-01 1.38846052e+00 2.83562660e-01] [-9.18819845e-01 2.56329864e-01 7.92653799e-01 ... -8.69385064e-01 -2.43468833e+00 -3.55843544e-01] [ 6.80479333e-02 7.28205681e-01 -5.08340895e-01 ... -1.20111263e+00 -8.19475770e-01 -6.64620280e-01] ... [-5.79400778e-01 6.67809844e-01 9.60451901e-01 ... -8.49792361e-01 -1.18276596e+00 5.79500973e-01] [ 8.71769667e-01 2.13394475e+00 1.61811933e-01 ... 9.37873185e-01 1.02823079e+00 -8.15275311e-01] [ 4.78867553e-02 -1.58867747e-01 6.41970515e-01 ... -1.23048425e+00 1.24008000e+00 -3.68076146e-01]]] [[[ 1.64934635e+00 -1.57040250e+00 -1.27635324e+00 ... -1.60265997e-01 -1.43798327e+00 4.21153381e-02] [ 5.67983985e-01 2.15069747e+00 2.61551523e+00 ... 4.08209980e-01 -2.65006423e-01 -2.11882964e-01] [ 4.39596444e-01 1.34058583e+00 4.77970093e-01 ... -1.07104075e+00 1.32129520e-01 1.25583577e+00] ... [-1.41759467e+00 1.77673459e+00 1.73006630e+00 ... -9.86726224e-01 8.86495650e-01 -7.20072627e-01] [ 2.67714500e-01 -1.46013343e+00 -4.65987921e-02 ... 4.57910210e-01 -1.62302211e-01 -5.73457897e-01] [-1.09165967e+00 -2.76604563e-01 -4.62700605e-01 ... -1.33153093e+00 -1.18351245e+00 -7.43140876e-01]] [[-4.95679915e-01 -1.06310797e+00 1.91322379e-02 ... 3.18190418e-02 1.10318375e+00 2.05723572e+00] [ 3.10438901e-01 7.77571350e-02 -5.57703972e-01 ... -7.47180954e-02 4.54704642e-01 -9.05067444e-01] [ 1.36898792e+00 -1.85353741e-01 1.85488060e-01 ... 3.47130047e-03 -2.57701427e-01 1.42257953e+00] ... [ 6.10128760e-01 -7.12434530e-01 1.21546708e-01 ... 3.24745297e-01 6.66766644e-01 1.06198037e+00] [ 1.02630116e-01 7.61899590e-01 -5.40494248e-02 ... -9.07656789e-01 -7.36946166e-02 4.58400667e-01] [ 9.15739179e-01 4.05097216e-01 -3.49479169e-01 ... 5.72051525e-01 3.16789687e-01 -7.07823217e-01]] [[-8.43550086e-01 -7.61461318e-01 -1.24920420e-01 ... 3.70434910e-01 1.60316005e-01 2.64019191e-01] [-2.82258600e-01 -5.00216894e-02 -5.19251414e-02 ... -1.75199106e-01 4.03905749e-01 2.47042990e+00] [-2.40321353e-01 -5.92893183e-01 -1.90813470e+00 ... 1.37942207e+00 -3.24850857e-01 1.17504728e+00] ... [-6.40842259e-01 7.45095074e-01 -2.40028605e-01 ... 1.65898252e+00 -1.20123100e+00 -2.11279559e+00] [-2.62680531e-01 -1.97616851e+00 -3.77412528e-01 ... -1.46244347e-01 -8.81270587e-01 5.13020277e-01] [-1.32465482e+00 -1.18099391e+00 -8.79175901e-01 ... -1.26711428e+00 -1.10691166e+00 2.17040634e+00]] ... [[ 5.36777973e-01 -3.92170161e-01 3.66003782e-01 ... 6.07424140e-01 5.78560159e-02 3.02064896e+00] [ 2.87236422e-02 3.83073986e-01 -6.91657186e-01 ... 3.12846512e-01 -1.29854664e-01 1.48454404e+00] [ 3.94801766e-01 6.09994471e-01 -8.40489388e-01 ... -9.48334694e-01 -9.15411711e-01 -4.94790554e-01] ... [ 1.17350280e+00 1.32302952e+00 7.22589135e-01 ... 3.68740559e-01 5.89835644e-01 4.79266673e-01] [-2.54007936e-01 -7.21699148e-02 -6.45980299e-01 ... -4.01392549e-01 -1.11411417e+00 -3.51207972e-01] [-2.55349278e-01 -2.41657481e-01 3.90691757e-01 ... -5.28706968e-01 -2.07569152e-01 1.40301621e+00]] [[ 2.45656714e-01 6.98658168e-01 1.00120473e+00 ... 5.40025771e-01 -1.92152417e+00 -1.01857328e+00] [-1.04702246e+00 4.72505420e-01 1.76396415e-01 ... 1.47713661e+00 5.73629618e-01 9.09329504e-02] [ 1.02834582e-01 7.37219274e-01 1.41946113e+00 ... 6.95303679e-01 1.83381557e-01 4.49084073e-01] ... [-9.00279701e-01 9.73990917e-01 2.14642930e+00 ... 7.41741806e-02 -8.30714345e-01 2.77313087e-02] [-1.56483257e+00 1.21220863e+00 1.81958094e-01 ... 7.64316320e-01 3.29038724e-02 -8.81738663e-01] [-1.84627068e+00 -9.14689839e-01 5.84256113e-01 ... 1.08765376e+00 -5.87506704e-02 -1.68095899e+00]] [[ 1.14836836e+00 -3.25919718e-01 2.13368833e-01 ... -1.73605800e+00 -4.55452383e-01 -4.00612839e-02] [-1.12053132e+00 -2.71003097e-01 6.18185937e-01 ... -1.30542573e-02 -1.00848091e+00 -5.49105942e-01] [ 7.11667359e-01 -2.12930694e-01 1.80651653e+00 ... -1.85079432e+00 -3.40420939e-02 5.48240721e-01] ... [-1.03443468e+00 7.28003025e-01 2.05354595e+00 ... 6.61939234e-02 -1.06429957e-01 -1.26807582e+00] [ 2.99365342e-01 -2.15555623e-01 2.10640401e-01 ... 1.73638475e+00 1.32576898e-01 -5.07059991e-01] [ 8.02191973e-01 1.39212117e-01 -1.72985941e-01 ... -1.72304535e+00 1.27329886e+00 -7.59142637e-01]]] [[[ 1.72667694e+00 1.44631565e-01 -1.60012865e+00 ... 1.31380403e+00 -2.02942491e+00 1.63066518e+00] [-1.07451701e+00 -7.94017911e-01 6.44913256e-01 ... 2.19200432e-01 6.75414741e-01 1.61652958e+00] [-7.79701531e-01 -7.15490282e-01 9.81000289e-02 ... 9.79150832e-01 7.96074688e-01 -1.32168964e-01] ... [ 1.33121476e-01 6.60855591e-01 2.56895900e-01 ... 1.07394171e+00 1.08949482e+00 -5.64259827e-01] [-1.35266638e+00 -1.96706757e-01 1.06839669e+00 ... -5.55741847e-01 5.80198705e-01 1.92645144e+00] [ 1.01520514e+00 -1.05069733e+00 -5.40698886e-01 ... -1.15386136e-01 -4.95668277e-02 -2.21662834e-01]] [[ 2.66284913e-01 5.67765594e-01 -1.18331957e+00 ... -1.14590049e+00 -1.79732159e-01 -1.24652886e+00] [ 4.21188772e-01 -6.89871967e-01 9.60821867e-01 ... 1.21958148e+00 9.65680897e-01 -1.21153891e+00] [ 2.08688766e-01 -1.85110295e+00 1.43544328e+00 ... -6.55006647e-01 7.26370752e-01 7.89458036e-01] ... [-2.65413642e-01 2.59447861e+00 -8.13747346e-01 ... -1.33880007e+00 8.03624868e-01 -4.56398040e-01] [ 4.05982047e-01 1.31010807e+00 -1.77982962e+00 ... -4.38073695e-01 -9.39098477e-01 -1.15016572e-01] [ 1.71047521e+00 -2.87532747e-01 1.80689704e+00 ... 3.94021839e-01 -5.40716529e-01 -5.87356865e-01]] [[ 2.95307815e-01 -6.65454865e-02 -1.49959922e+00 ... 9.87914622e-01 1.06457818e+00 -1.88485309e-01] [-8.77974331e-01 -1.85687673e+00 5.86790740e-01 ... 4.33275439e-02 -1.14936435e+00 -1.90323222e+00] [-8.40421796e-01 -4.26281810e-01 2.15320885e-01 ... 2.28726554e+00 1.14229679e+00 6.82413101e-01] ... [ 7.34755516e-01 6.72109663e-01 -2.43739277e-01 ... -4.93509024e-01 2.57004648e-01 -6.50200069e-01] [ 1.31773043e+00 -9.43212450e-01 -1.32110298e+00 ... 1.94393411e-01 -8.64662468e-01 -7.75687039e-01] [-6.62352145e-01 -1.59155518e-01 6.32434189e-01 ... 7.09542990e-01 1.98097154e-01 2.59431183e-01]] ... [[ 3.71352613e-01 1.73075140e-01 -1.35423636e+00 ... -3.90652359e-01 1.11043669e-01 1.28827643e+00] [ 1.84897923e+00 5.54751098e-01 1.54559720e+00 ... -5.36568046e-01 1.41511464e+00 -1.27156651e+00] [ 4.26100284e-01 -3.06441277e-01 -9.23175991e-01 ... 1.86970085e-01 -2.49525189e+00 -3.50652009e-01] ... [ 6.97803557e-01 -1.51230052e-01 5.64452291e-01 ... -4.99699086e-01 -3.28484118e-01 7.53675222e-01] [-1.19832110e+00 -9.84346628e-01 -1.28459454e+00 ... -1.60015202e+00 -2.60943532e-01 -1.78918922e+00] [-1.32164061e+00 7.59543896e-01 9.95918274e-01 ... 3.64553154e-01 -9.58823621e-01 1.38827920e-01]] [[ 7.57133737e-02 -6.05345309e-01 9.66821909e-02 ... 1.06747460e+00 -1.97084218e-01 -1.34139240e+00] [ 9.50173378e-01 1.83328733e-01 6.54599011e-01 ... -2.41857147e+00 4.15344276e-02 -6.72388136e-01] [ 6.76899731e-01 1.51602805e+00 1.06731999e+00 ... 1.63928545e+00 -5.75223744e-01 9.18733299e-01] ... [ 2.00950646e+00 -8.44257355e-01 -2.71168500e-01 ... -1.42730749e+00 -1.11101282e+00 -1.51354253e-01] [-1.99390042e+00 -1.22524679e+00 1.66584527e+00 ... -8.18642676e-01 -4.40255642e-01 -1.70355979e-02] [-4.82568860e-01 -1.79503834e+00 1.72820333e-02 ... 1.25381088e+00 -6.22919738e-01 6.00831509e-01]] [[-9.34665918e-01 5.91345802e-02 1.56392646e+00 ... 6.98097289e-01 2.01046601e-01 -9.06523287e-01] [ 3.13403130e+00 -3.34187150e-01 -6.66610181e-01 ... 4.19831425e-01 -1.05640912e+00 1.28499079e+00] [-8.62161160e-01 -4.49739516e-01 1.43902704e-01 ... -6.59152687e-01 1.66528791e-01 1.70098677e-01] ... [ 5.55744529e-01 -3.76087904e-01 9.81168300e-02 ... -7.16528177e-01 -8.41857672e-01 -2.19260707e-01] [-9.19413328e-01 1.65655386e+00 -1.25192606e+00 ... 1.74323547e+00 7.92885065e-01 -1.47070038e+00] [-9.79137361e-01 -1.56948283e-01 -5.55427194e-01 ... 5.75111091e-01 1.18306410e+00 -3.83826047e-01]]] [[[ 3.81122604e-02 1.48694491e+00 -1.16909981e+00 ... 1.32544768e+00 -1.06143606e+00 1.63799718e-01] [ 7.60157287e-01 -1.05560303e+00 4.15704697e-01 ... -8.24026763e-01 7.91228235e-01 3.03909004e-01] [ 3.56626987e-01 -2.29554319e+00 -7.80124843e-01 ... -5.94751120e-01 -3.45420450e-01 -1.07110953e+00] ... [ 5.85273325e-01 -1.84810293e+00 -5.24181783e-01 ... 1.36775553e-01 -4.50325340e-01 1.18995810e+00] [ 2.20754370e-02 -1.32807887e+00 5.03412485e-01 ... 1.00294292e+00 -1.54787517e+00 8.78054738e-01] [-4.97987270e-01 -7.58685946e-01 -3.06670398e-01 ... -2.15262794e+00 9.03365970e-01 1.17842543e+00]] [[-4.41891432e-01 -9.33931470e-01 5.98643124e-01 ... 1.36367679e-02 -1.43793845e+00 3.49362880e-01] [ 1.89421289e-02 -6.15185559e-01 -6.40412688e-01 ... 2.55066633e-01 -9.34433103e-01 -3.37294817e-01] [ 5.75304687e-01 -1.33088005e+00 -1.85730085e-01 ... 5.09989619e-01 7.22633153e-02 -5.00231206e-01] ... [-9.85262096e-01 2.32458305e+00 5.71460724e-01 ... 2.05358505e+00 7.80727744e-01 -1.28535521e+00] [ 1.32982147e+00 -1.16087472e+00 4.00525510e-01 ... -1.75779152e+00 1.54423952e-01 1.06065404e+00] [-9.92031932e-01 -1.61961894e-02 5.08625329e-01 ... -1.16171563e+00 1.01273334e+00 -1.36602879e-01]] [[-1.48122191e+00 3.12026083e-01 -4.29146923e-02 ... -1.68473470e+00 -9.34667766e-01 -1.52032459e+00] [-6.01643980e-01 3.18652242e-01 -6.10068381e-01 ... 1.28757799e+00 3.18867654e-01 -1.08793616e+00] [ 6.45364702e-01 8.61403286e-01 2.84707457e-01 ... 1.34380907e-01 -1.27189946e+00 -1.32159007e+00] ... [ 6.67062327e-02 -6.43252611e-01 2.91829586e-01 ... 1.52324069e+00 2.00068331e+00 1.36307478e-01] [-1.09826255e+00 -4.67585772e-01 9.78737116e-01 ... -1.63720381e+00 -4.51033115e-01 -2.22355247e-01] [-1.36047769e+00 1.22232370e-01 -8.99297357e-01 ... -1.17904711e+00 -4.19449240e-01 4.79800627e-02]] ... [[-5.01605690e-01 -3.97291690e-01 -2.09986884e-02 ... 6.28361702e-01 -5.67638814e-01 1.24277973e+00] [ 9.79711294e-01 1.30451024e-01 -3.88576277e-02 ... 5.58903039e-01 3.42232347e-01 2.34316993e+00] [-2.52732396e-01 -8.00612450e-01 -7.26469904e-02 ... 1.11192143e+00 -1.50246274e+00 1.46226272e-01] ... [ 6.19957864e-01 -5.46869226e-02 -2.48152900e+00 ... 1.24449146e+00 -1.19793618e+00 2.57609300e-02] [ 1.95159107e-01 -2.18874767e-01 -2.67090768e-01 ... 1.49088871e+00 1.05881989e-01 6.52779162e-01] [-1.64348495e+00 -1.53528404e+00 -6.36182189e-01 ... 4.23486769e-01 4.60571736e-01 8.23682904e-01]] [[ 1.22910511e+00 -1.34637922e-01 -1.07332158e+00 ... -7.55763054e-02 -1.03145015e+00 -7.57290542e-01] [-2.32846904e+00 -2.70387679e-01 -5.09087741e-01 ... 8.84181321e-01 -8.88112485e-01 -3.96156944e-02] [ 3.18057686e-01 1.23634958e+00 1.10518122e+00 ... 1.36977458e+00 1.56497169e+00 -3.52425873e-02] ... [ 2.52985179e-01 -2.70504504e-01 4.24374223e-01 ... 3.52579206e-01 2.38779813e-01 2.98315197e-01] [ 1.54103053e+00 6.62178338e-01 8.27930808e-01 ... -2.23470360e-01 -8.91904980e-02 5.25610507e-01] [ 6.64671957e-01 2.22896099e-01 -6.52499497e-01 ... -1.19363594e+00 -9.79720771e-01 -6.42055631e-01]] [[ 3.07419270e-01 1.25363266e+00 -2.08710544e-02 ... -7.80680299e-01 -8.13775837e-01 -3.09027970e-01] [ 5.35228670e-01 2.64569730e-01 1.91295886e+00 ... -2.18373686e-02 8.65942299e-01 1.52259195e+00] [ 1.01970863e+00 -4.57873553e-01 5.05022109e-01 ... -2.39041179e-01 -1.38070971e-01 -1.44977880e+00] ... [ 1.42907917e-01 -4.14037630e-02 2.26917833e-01 ... -1.33670256e-01 2.09647298e+00 1.24614157e-01] [ 7.22583383e-02 1.88066643e-02 7.41062343e-01 ... 5.60249567e-01 1.83110547e+00 -1.05524373e+00] [ 7.68572867e-01 1.62287965e-01 -5.83031066e-02 ... -2.98636794e-01 -1.30584329e-01 -6.96545303e-01]]] [[[ 4.67891276e-01 2.53667742e-01 -7.91326463e-01 ... 1.23771334e+00 1.48256838e+00 1.19236626e-01] [-5.31509280e-01 -5.57570457e-01 -3.30888450e-01 ... -1.27242017e+00 -1.46647636e-02 -1.34239054e+00] [-4.79110442e-02 1.85803616e+00 1.18459868e+00 ... 4.51368809e-01 -7.94318140e-01 8.80086794e-02] ... [-3.41906309e-01 3.02981973e-01 -1.62028110e+00 ... 3.01435795e-02 9.20178771e-01 1.39374703e-01] [ 2.25101322e-01 6.51079118e-01 -2.43559971e-01 ... -1.11264706e+00 7.20223263e-02 -5.99165857e-01] [ 2.49140695e-01 1.64026046e+00 -3.01913679e-01 ... 1.37658283e-01 9.39968705e-01 -2.23426148e-02]] [[ 1.36509359e+00 8.27966273e-01 -1.18628311e+00 ... 7.02586055e-01 -2.15897512e+00 -1.05383337e+00] [ 2.61843115e-01 -1.80405617e+00 -1.03752315e+00 ... 2.83613324e-01 4.61889923e-01 4.23910707e-01] [-1.93162844e-01 -6.97424233e-01 4.60490212e-02 ... 1.07106006e+00 1.69516385e+00 -1.99626684e-01] ... [-4.42839414e-01 -5.64030111e-01 -3.14370662e-01 ... 8.95859361e-01 1.30321181e+00 7.27909565e-01] [ 8.67041588e-01 -4.03810322e-01 2.32012525e-01 ... -1.05822134e+00 -7.19414473e-01 -9.11260188e-01] [ 2.65718568e-02 -1.21541059e+00 -1.93433627e-01 ... -1.50241625e+00 9.16519165e-01 -4.25154716e-01]] [[ 1.11653991e-01 1.22639179e-01 -3.95466685e-01 ... 1.15063198e-01 -1.13520539e+00 1.04656637e+00] [ 1.62291467e+00 6.22415900e-01 -1.62697840e+00 ... 4.19070154e-01 4.78491843e-01 9.67886150e-01] [-1.44979310e+00 1.81717467e+00 1.16990721e+00 ... 4.13904279e-01 -4.53087330e-01 8.18019032e-01] ... [-1.18701708e+00 7.71685302e-01 1.87654781e+00 ... 1.30872741e-01 4.41912770e-01 -2.20293999e+00] [ 1.91672310e-01 -9.99485195e-01 -1.51388955e+00 ... -1.55987501e+00 1.66733396e+00 -4.98768687e-02] [-1.25233757e+00 -1.49496388e+00 -3.02673757e-01 ... 1.02955985e+00 3.83003652e-01 -1.60962248e+00]] ... [[-3.16951722e-01 -3.43004405e-01 1.23360537e-01 ... 6.13937140e-01 -2.31359935e+00 1.04178762e+00] [-5.58538556e-01 -1.22035873e+00 4.65846777e-01 ... -7.50201046e-01 1.03914344e+00 -2.52524585e-01] [-7.82106400e-01 1.28779069e-01 1.35475683e+00 ... -7.77000701e-03 -9.07896459e-01 3.30393106e-01] ... [ 9.19450283e-01 -8.56324077e-01 -7.79940903e-01 ... -1.16437888e+00 -1.43448222e+00 -4.10385251e-01] [-2.47706199e+00 -9.96344447e-01 -8.93603936e-02 ... -2.46514797e-01 -1.60588264e+00 2.29372755e-01] [-1.47188395e-01 -5.35489380e-01 2.04566526e+00 ... -7.88521022e-02 -8.40262473e-01 1.11680102e+00]] [[-9.35119212e-01 3.02895308e-01 -8.44860554e-01 ... -1.56251693e+00 1.61815181e-01 4.75861669e-01] [-5.18706977e-01 -2.56456792e-01 1.31604695e+00 ... 6.24243081e-01 1.76708019e+00 8.78712237e-01] [ 2.58342475e-01 -1.80413353e+00 4.80028540e-01 ... 1.30284324e-01 7.27941915e-02 2.44780928e-02] ... [-1.01518691e+00 -4.09423143e-01 1.65005612e+00 ... 1.12630045e+00 2.20009756e+00 -3.80541503e-01] [-1.46621192e+00 -4.97361451e-01 -1.04098213e+00 ... 3.23476404e-01 3.68390709e-01 -1.37916493e+00] [ 2.93135434e-01 -2.24972606e+00 5.97273648e-01 ... -6.23991370e-01 -8.74275982e-01 1.50696886e+00]] [[ 3.12389523e-01 -1.60474718e+00 6.96624279e-01 ... 3.69214177e-01 -4.56012785e-01 -1.65657091e+00] [ 7.80855238e-01 -7.69584626e-02 -2.36725092e-01 ... -4.95101601e-01 1.75672352e+00 7.10741520e-01] [ 2.65305489e-01 1.00377941e+00 -1.02608669e+00 ... -7.75644332e-02 -1.26738513e+00 3.14744897e-02] ... [ 1.71425998e+00 4.00847971e-01 8.07860434e-01 ... 9.65343297e-01 -5.84024847e-01 -6.01064920e-01] [ 1.66497082e-01 -1.23056605e-01 -6.31631136e-01 ... -7.46396706e-02 -9.89916563e-01 -1.51090667e-01] [ 1.62921882e+00 1.58864141e+00 5.46276808e-01 ... 6.31755114e-01 -6.83282435e-01 8.80511582e-01]]]] [[[[ 2.06667572e-01 -2.64679283e-01 7.40906894e-02 ... -1.08359218e-01 1.22295201e+00 3.53482276e-01] [-1.27212894e+00 2.53803372e-01 3.86138588e-01 ... 6.83006793e-02 -1.64995873e+00 5.98887742e-01] [-1.66386962e+00 -1.84129190e+00 -3.79952669e-01 ... 8.19700480e-01 -7.03396618e-01 1.02931786e+00] ... [-1.25665987e+00 -2.55621731e-01 8.33786309e-01 ... -1.48100182e-01 1.44907132e-01 -6.66340709e-01] [-9.29564178e-01 -2.15046024e+00 2.61008477e+00 ... -8.36342216e-01 6.12104475e-01 2.23727718e-01] [ 1.06640272e-01 4.82778281e-01 1.99507904e+00 ... -1.15776479e+00 9.27747607e-01 4.97549772e-01]] [[ 2.20903650e-01 -4.76303816e-01 4.49309386e-02 ... -5.55958569e-01 -1.94013453e+00 3.39935511e-01] [ 1.46472895e+00 -5.25679648e-01 -3.60421799e-02 ... -4.55890179e-01 1.79515767e+00 -2.76254844e-02] [-2.94900358e-01 9.19188797e-01 5.71354926e-01 ... -6.77494407e-02 3.27175975e-01 -3.79404634e-01] ... [ 1.11797735e-01 -1.11803401e+00 -5.01634479e-01 ... -8.22167456e-01 -1.78340897e-01 5.35097897e-01] [ 5.05367890e-02 2.90385246e-01 -9.16887105e-01 ... -4.35469076e-02 -1.51655644e-01 -6.45177782e-01] [-3.92269164e-01 -8.69886726e-02 -1.89119446e+00 ... 5.11262476e-01 7.74610221e-01 2.59762496e-01]] [[-9.87787664e-01 -8.68119299e-01 -7.46949434e-01 ... 1.14694679e+00 1.56697249e+00 2.43734717e-01] [-7.07979262e-01 -4.14106883e-02 -1.23968327e+00 ... -3.08616370e-01 2.53451228e-01 -5.15584528e-01] [ 1.36143279e+00 -1.16790926e+00 7.91117251e-01 ... 2.64534801e-01 -5.72289109e-01 -9.62539092e-02] ... [-1.89303517e+00 1.31529713e+00 -1.32048368e+00 ... -5.18065631e-01 -1.50129592e+00 1.13433301e+00] [ 1.38745874e-01 5.44431627e-01 -4.02754128e-01 ... -1.21077931e+00 1.62368506e-01 -3.75752598e-01] [-2.64788568e-01 -1.01666915e+00 -2.47400686e-01 ... -3.77597481e-01 8.87228072e-01 9.73280549e-01]] ... [[-4.20467965e-02 -5.34536362e-01 1.27337360e+00 ... 3.07008058e-01 2.95505170e-02 -6.06682479e-01] [ 2.34652147e-01 -1.01748414e-01 9.72926497e-01 ... -3.96422565e-01 2.25303859e-01 3.80849391e-01] [-3.32422197e-01 5.27995467e-01 1.17878482e-01 ... -7.40965083e-02 1.65520108e+00 1.23927164e+00] ... [ 2.68930960e+00 8.90425205e-01 -2.27455080e-01 ... 5.85949361e-01 -1.94065034e-01 1.07722139e+00] [-4.84489262e-01 -5.10758638e-01 -7.84004390e-01 ... 6.83474541e-01 3.12674189e+00 3.51309329e-01] [ 1.00673747e+00 5.93591750e-01 3.45223159e-01 ... -1.11169919e-01 9.40144002e-01 -8.82448733e-01]] [[-1.48238981e+00 1.41811574e+00 -2.97611952e-01 ... 2.24888250e-01 1.20846379e+00 2.27815914e+00] [-1.79827839e-01 -9.54337358e-01 1.34045660e+00 ... 2.73110062e-01 8.82737517e-01 7.59934425e-01] [ 3.10494810e-01 -6.62325799e-01 -2.14601606e-01 ... 1.70364821e+00 5.91548681e-01 -1.11245680e+00] ... [-2.92742252e+00 -7.34680593e-02 -6.86725855e-01 ... -3.29189944e+00 3.97693783e-01 2.05314830e-01] [-1.98928565e-01 6.40398920e-01 -1.12093663e+00 ... 1.48203111e+00 7.19480574e-01 6.96516275e-01] [-7.39742517e-01 1.54914522e+00 1.13489151e+00 ... -1.06173551e+00 -1.50867224e+00 4.16379154e-01]] [[-1.01947212e+00 4.92970437e-01 1.56747949e+00 ... -1.06440997e+00 -1.13721001e+00 -2.80431122e-01] [-2.18616411e-01 1.37514722e+00 -1.83366716e-01 ... -8.29562843e-01 7.94710338e-01 6.91236332e-02] [-1.12711239e+00 2.89037526e-01 -1.07638597e+00 ... 4.59863603e-01 5.48923500e-02 1.97145081e+00] ... [ 1.55564344e+00 -3.60841937e-02 -9.81260598e-01 ... -1.67231536e+00 7.65640259e-01 2.46081090e+00] [ 5.35948098e-01 1.14848876e+00 -1.09730804e+00 ... 4.77747738e-01 -6.31922543e-01 1.05764441e-01] [ 1.41954482e+00 4.09226626e-01 8.50031137e-01 ... 1.33501244e+00 -9.44722533e-01 -1.21934235e-01]]] [[[ 1.91930503e-01 -5.30634880e-01 1.56869233e-01 ... -2.20593940e-02 -1.16091537e+00 6.43517077e-01] [-1.72866181e-01 3.23942095e-01 -1.64552838e-01 ... -7.32903838e-01 -1.35351539e+00 -1.15667915e+00] [-1.13586903e+00 9.11095798e-01 -2.69006252e-01 ... 2.83126688e+00 9.34876621e-01 1.57228339e+00] ... [ 2.13023782e-01 -2.03774348e-01 3.99482459e-01 ... 2.31789660e+00 -6.86217770e-02 -3.62419784e-01] [-2.49759778e-01 7.11620808e-01 -7.48204648e-01 ... -2.31865719e-01 1.03360415e+00 8.74988496e-01] [-4.72308517e-01 2.04586029e+00 -2.04251528e-01 ... -3.41484956e-02 -5.45134246e-01 1.08575189e+00]] [[ 2.59699196e-01 2.44328082e-01 -2.27226764e-01 ... -1.56306791e+00 -3.51039350e-01 3.63389313e-01] [-2.22745204e+00 4.57226694e-01 -1.38414037e+00 ... -2.37754154e+00 -1.21623719e+00 1.69070568e-02] [-3.44576329e-01 1.76030600e+00 1.14407517e-01 ... 5.76067150e-01 -1.07127738e+00 3.31855565e-01] ... [ 8.93264115e-01 -1.57648146e-01 -7.73956895e-01 ... 7.26155996e-01 -1.36878121e+00 -1.08252667e-01] [-3.48937809e-01 8.68124306e-01 1.05661356e+00 ... -1.15922296e+00 7.20709383e-01 1.47784066e+00] [-2.49710083e-01 9.63613153e-01 -9.73578691e-02 ... 1.27712274e+00 7.34303892e-02 1.74693727e+00]] [[ 1.97032773e+00 -9.42686424e-02 1.83801186e+00 ... 1.44099116e+00 -8.73604655e-01 1.02704382e+00] [ 3.29753458e-01 8.14346790e-01 6.69190645e-01 ... -7.11173832e-01 -1.64034629e+00 -1.39646637e+00] [-3.22197795e+00 1.00050378e+00 8.09909225e-01 ... 1.03007287e-01 2.19614163e-01 -7.34515131e-01] ... [-5.53115487e-01 -1.15756190e+00 4.97466177e-01 ... 1.47908020e+00 8.39558363e-01 -1.38116336e+00] [ 1.63014483e+00 1.43977094e+00 6.78577423e-01 ... 2.51508653e-01 -5.01085341e-01 -1.46036029e-01] [-7.17218220e-01 -1.20238495e+00 -7.58030219e-03 ... 7.52518028e-02 -1.26508021e+00 -1.03497303e+00]] ... [[ 2.89778739e-01 1.87351573e+00 -1.14580345e+00 ... 1.05961025e+00 3.35482687e-01 3.31537068e-01] [ 1.16343522e+00 -1.25980175e+00 -9.06836748e-01 ... 7.31496930e-01 -3.29231501e-01 -1.25281787e+00] [-1.12526560e+00 -7.59191513e-01 4.86816466e-01 ... 8.63201141e-01 1.98798382e+00 4.87190008e-01] ... [ 1.14656162e+00 7.44345963e-01 1.24650694e-01 ... -1.97371408e-01 -8.45105886e-01 -1.43343532e+00] [ 6.19206466e-02 8.34482193e-01 -1.83819026e-01 ... 9.39463139e-01 -1.17976177e+00 4.74623561e-01] [ 1.18038940e+00 -5.37651218e-02 9.53117311e-01 ... -1.08361280e+00 1.04810548e+00 -5.80354512e-01]] [[ 5.13506114e-01 -5.74424803e-01 2.76889086e-01 ... -1.02277303e+00 -7.22419739e-01 6.10800087e-01] [-1.31057930e+00 -2.01506734e+00 -1.49484223e-03 ... 1.45523047e+00 -1.87625200e-01 1.29098654e-01] [-1.30380586e-01 1.37673676e-01 -1.84585190e+00 ... 1.85541153e+00 -5.63597262e-01 -9.98179972e-01] ... [-4.70720530e-01 -1.08898032e+00 1.03913331e+00 ... -4.35038388e-01 5.09090126e-01 -1.91142952e+00] [ 1.61405301e+00 -9.95262623e-01 4.43878442e-01 ... 1.58299041e+00 -7.70477206e-02 1.40796745e+00] [ 1.70899200e+00 2.31344938e-01 -1.71944070e+00 ... 2.66100973e-01 -6.52722836e-01 -3.97873968e-02]] [[-7.88351178e-01 -1.87278807e+00 -2.28765082e+00 ... -5.86095937e-02 3.98395061e-01 1.05325556e+00] [ 2.23970103e+00 1.33050993e-01 6.58731103e-01 ... 9.38080624e-03 -1.46808636e+00 7.48494923e-01] [ 2.71273792e-01 2.77428478e-01 -1.13428068e+00 ... -7.49900460e-01 7.56279528e-01 9.29027319e-01] ... [-8.01920354e-01 -1.87369215e+00 8.67097795e-01 ... 1.45141220e+00 9.27547395e-01 1.83946812e+00] [ 9.50133353e-02 -9.69399571e-01 -1.86712170e+00 ... 6.93307161e-01 -1.13533688e+00 -5.62055349e-01] [-2.06150308e-01 6.59567118e-02 -2.01866937e+00 ... -8.49107504e-01 -6.90150738e-01 1.88551712e+00]]] [[[ 9.53622997e-01 -1.80390686e-01 7.31876433e-01 ... -6.72995389e-01 2.58609295e-01 8.56374025e-01] [ 2.23226428e-01 2.94660062e-01 -1.05672050e+00 ... -7.12875903e-01 -6.84014916e-01 -3.87120545e-01] [-7.04576254e-01 -9.68466997e-01 1.03227407e-01 ... 5.93725562e-01 -1.15598679e+00 7.90312767e-01] ... [ 7.52403259e-01 -1.94058105e-01 1.38077289e-01 ... -2.19747066e+00 -1.57776070e+00 3.14288318e-01] [ 5.18997349e-02 -1.49131700e-01 1.32970595e+00 ... 1.66985917e+00 -7.47146904e-01 7.03787744e-01] [ 3.73103410e-01 9.63592604e-02 -2.27628016e+00 ... 1.90739572e-01 -1.15539622e+00 -1.83077097e+00]] [[-4.18658167e-01 -9.44526017e-01 6.44436240e-01 ... -1.92496526e+00 -1.71236002e+00 -6.07771337e-01] [-4.23359483e-01 4.78179604e-01 -7.57127106e-01 ... -6.80064559e-01 -6.16788566e-01 5.20448565e-01] [ 1.86993480e+00 -1.15316629e+00 -3.47279012e-01 ... -3.43504578e-01 7.43005931e-01 1.17203951e+00] ... [-1.22293699e+00 -1.32682502e-01 -6.29219890e-01 ... 8.73293459e-01 9.67995942e-01 -1.40651548e+00] [-8.63266528e-01 2.23952457e-01 -2.88336873e-01 ... 4.68522847e-01 -3.72127563e-01 -7.91044891e-01] [ 1.54144931e+00 4.06220496e-01 1.28616527e-01 ... -1.10680914e+00 1.18888843e+00 -3.64762455e-01]] [[ 2.95600563e-01 7.30619252e-01 -2.54074186e-01 ... 6.54203236e-01 -3.14300627e-01 -5.86457908e-01] [-1.59130549e+00 -1.51702976e+00 4.29751277e-01 ... -1.45093754e-01 -6.27419502e-02 -5.22055745e-01] [-5.90145707e-01 6.79940581e-01 -1.26675367e+00 ... -9.49050605e-01 5.81484020e-01 4.95303243e-01] ... [-5.14364898e-01 -5.34796774e-01 9.16819870e-01 ... 9.39088404e-01 -1.26371741e+00 -1.23688698e-01] [-7.07484961e-01 -4.74097840e-02 -1.10218249e-01 ... -4.91767019e-01 -3.44909787e-01 1.03926790e+00] [-2.22500995e-01 -3.06852218e-02 8.04801285e-01 ... 5.08351386e-01 6.07424736e-01 -2.48190492e-01]] ... [[ 4.01781559e-01 -6.43331468e-01 -1.21078892e-02 ... 7.74384916e-01 -1.28258288e+00 -1.37440896e+00] [-2.68448919e-01 6.05629802e-01 -5.61545253e-01 ... -9.87516105e-01 4.47557010e-02 -3.24054629e-01] [ 2.11109662e+00 6.62069559e-01 -3.41239154e-01 ... -3.24228615e-01 -2.93989271e-01 1.05968201e+00] ... [ 2.11466387e-01 -1.07588243e+00 8.53271604e-01 ... -4.71323490e-01 8.49506617e-01 -8.80894005e-01] [-2.59414768e+00 -4.54543561e-01 -5.38383782e-01 ... -5.97846627e-01 3.87320518e-01 -1.07098842e+00] [-8.60352695e-01 -1.85505784e+00 -7.43128419e-01 ... -3.27163428e-01 1.60587978e+00 -3.35177444e-02]] [[-8.18976343e-01 -1.34825945e+00 7.16292739e-01 ... 1.51329851e+00 1.77553773e-01 -1.85084534e+00] [ 2.84820765e-01 2.02608132e+00 -5.03987312e-01 ... 2.07867518e-01 -6.65378273e-02 -7.37760782e-01] [-4.94812906e-01 1.13109899e+00 -6.02062941e-01 ... -3.84175360e-01 -3.71873714e-02 5.06129980e-01] ... [-3.74184906e-01 -1.41674125e+00 -1.81456938e-01 ... -4.67789650e-01 -1.37695420e+00 -9.15603757e-01] [ 1.93144426e-01 9.93545771e-01 -6.49878442e-01 ... -5.08077145e-01 -4.28443789e-01 -4.53709096e-01] [-1.39276421e+00 1.39945164e-01 1.01331556e+00 ... -1.94608808e+00 9.29236710e-01 1.15883732e+00]] [[-8.14822793e-01 -1.17868066e+00 -2.41098359e-01 ... 1.53839318e-02 -1.06769419e+00 -2.19808623e-01] [ 2.57548481e-01 1.12279221e-01 8.99171531e-01 ... -9.65883315e-01 -1.51021051e+00 4.85546649e-01] [-1.04170465e+00 -7.65689090e-02 -1.48165274e+00 ... -8.40455592e-01 -1.27149844e+00 -4.58315074e-01] ... [ 1.14775276e+00 -3.15305501e-01 -8.89545023e-01 ... -4.39223707e-01 -3.69515270e-01 2.49540389e-01] [-4.84721929e-01 -1.02480841e+00 -2.49224171e-01 ... -9.02030945e-01 7.85711765e-01 1.37383902e+00] [-1.74945402e+00 1.26655445e-01 8.01746070e-01 ... 5.24932504e-01 1.67423630e+00 -5.05467236e-01]]] [[[ 1.20763868e-01 8.27019870e-01 9.78700578e-01 ... -2.11677670e+00 3.02351594e-01 -3.67287666e-01] [-3.65977347e-01 -2.90901005e-01 3.28410029e-01 ... 3.21206182e-01 4.59188253e-01 9.91900444e-01] [ 1.09564710e+00 9.05727565e-01 9.20417905e-01 ... 3.78491729e-01 9.35762703e-01 -2.62069553e-01] ... [-8.94387662e-01 1.00849068e+00 -1.29453942e-01 ... -1.12145424e+00 -1.52769840e+00 4.23987418e-01] [-3.63571584e-01 5.25398612e-01 1.15372562e+00 ... 5.83407402e-01 3.07219356e-01 -1.92154706e+00] [-1.52066100e+00 4.51054245e-01 6.72825813e-01 ... 2.10890770e-01 -1.47992289e+00 1.78361267e-01]] [[-6.25315368e-01 1.14751124e+00 -1.01571846e+00 ... -6.13015413e-01 5.44305742e-01 8.25439990e-01] [-5.39522946e-01 -4.04984117e-01 -8.90653551e-01 ... -4.21801597e-01 -5.40019572e-01 9.52754080e-01] [ 1.36873209e+00 1.66890129e-01 5.73816121e-01 ... 6.48367345e-01 -2.49245495e-01 1.35673916e+00] ... [ 1.05681396e+00 3.69177938e-01 -1.19803526e-01 ... 8.57699394e-01 8.96856010e-01 -4.49316353e-01] [-6.15041077e-01 -3.25057417e-01 -1.71703935e-01 ... -7.11938500e-01 6.19132876e-01 -7.74370492e-01] [ 2.89905101e-01 -2.11447263e+00 1.05706775e+00 ... 4.96414229e-02 -1.34186888e+00 2.02960062e+00]] [[ 4.83425647e-01 1.93029416e+00 -2.35942200e-01 ... -4.49886203e-01 2.84076333e-01 -5.83726121e-03] [-7.68234611e-01 4.97821957e-01 -1.99997455e-01 ... 1.94471145e+00 -6.99675560e-01 1.13527918e+00] [ 3.14734101e-01 4.58027154e-01 -6.80557549e-01 ... -5.54813921e-01 1.16986430e+00 -7.64942467e-01] ... [ 1.43376455e-01 8.18300188e-01 3.87491971e-01 ... -6.88617229e-01 -1.25725758e+00 -4.16016966e-01] [-9.02495682e-01 -8.16656649e-01 -5.81749916e-01 ... 1.61822605e+00 -1.37695551e-01 -3.14967394e-01] [ 7.83180773e-01 8.85470510e-01 -3.65881830e-01 ... 9.38965753e-02 1.40324986e+00 8.74607265e-01]] ... [[-8.65208089e-01 1.44202995e+00 -8.05992723e-01 ... -1.28516400e+00 3.05902421e-01 -2.22936064e-01] [-3.44154924e-01 -1.57520461e+00 -2.31987610e-02 ... -5.31620145e-01 -1.15754664e+00 3.46713930e-01] [ 3.74497503e-01 -7.21379101e-01 -3.69041234e-01 ... 1.76825523e+00 1.54634789e-01 5.47314644e-01] ... [ 2.61802435e-01 1.81438768e+00 -1.44908857e+00 ... -2.76567250e-01 -8.86132836e-01 -1.14600122e+00] [-3.84081095e-01 4.12902117e-01 9.09493744e-01 ... 4.35967118e-01 -2.90281326e-01 1.40965152e+00] [-5.39397588e-04 1.96982056e-01 -1.82050002e+00 ... -2.51428097e-01 1.10521245e+00 -9.99518454e-01]] [[-1.12486236e-01 1.04214585e+00 6.52067244e-01 ... 8.51716697e-01 8.25984597e-01 -6.93717539e-01] [-8.68814439e-02 -1.66971755e+00 -1.23017502e+00 ... 1.05283904e+00 5.32503068e-01 1.75066853e+00] [ 5.98351657e-01 -1.09605849e+00 3.72211337e-01 ... 5.09895086e-01 -6.54194474e-01 -1.16475619e-01] ... [ 1.19200504e+00 1.21905553e+00 -1.20421398e+00 ... 1.46329269e-01 1.88017023e+00 -7.54193366e-01] [-6.67666912e-01 -9.58840072e-01 1.11553979e+00 ... -3.61957848e-01 -6.87870562e-01 -8.79406989e-01] [-3.14598978e-01 -2.35739636e+00 8.34421158e-01 ... 5.02961397e-01 -2.65496522e-01 6.85774505e-01]] [[-4.56567615e-01 9.76413429e-01 1.75386381e+00 ... -4.93614435e-01 -1.11113858e+00 -2.55582023e+00] [ 9.82047096e-02 -5.81952810e-01 -1.00044906e-01 ... 3.66731554e-01 -9.22060549e-01 1.33604562e+00] [ 1.85733998e+00 -1.69544911e+00 1.79687008e-01 ... -5.61732650e-01 -1.49392509e+00 -6.35030925e-01] ... [-1.27291381e+00 2.17200413e-01 -1.16607642e+00 ... 1.07814324e+00 4.48553860e-01 -6.86378121e-01] [-1.04314208e+00 3.00773352e-01 9.57079589e-01 ... -3.01811665e-01 8.62680554e-01 -2.66021818e-01] [-9.31853056e-01 1.58123165e-01 4.15208668e-01 ... -9.05573070e-01 -7.93466568e-01 1.29566789e+00]]] [[[-6.02081597e-01 -5.94046712e-01 -1.41732013e+00 ... -1.37965143e+00 1.59616971e+00 -2.38219410e-01] [-7.24376798e-01 -1.33532536e+00 -1.07766783e+00 ... -1.16869593e+00 9.98495281e-01 -8.19873810e-02] [-1.58870184e+00 2.63360411e-01 1.84711421e+00 ... -5.78419790e-02 -5.10970764e-02 6.84966981e-01] ... [-5.48192978e-01 8.32117021e-01 -2.35998482e-01 ... 8.77859354e-01 1.67812610e+00 -4.22065295e-02] [ 7.90827930e-01 -1.47773874e+00 1.20602846e+00 ... 6.24508560e-01 1.26687789e+00 3.31600785e-01] [-1.23999822e+00 1.36081707e+00 9.53021348e-01 ... -5.33783495e-01 2.47355270e+00 3.82425994e-01]] [[-2.93040419e+00 7.54220843e-01 -7.16635883e-01 ... 1.05211623e-01 2.59997755e-01 -2.20736408e+00] [ 1.56170279e-01 5.87006450e-01 -6.64979964e-02 ... 9.42820311e-01 3.49124968e-01 -3.06512445e-01] [ 3.53885382e-01 6.22912347e-01 -3.06488931e-01 ... -1.54870725e+00 -7.13519603e-02 7.31498182e-01] ... [ 7.84497336e-02 7.79489458e-01 5.85257053e-01 ... -1.82127607e+00 4.87407148e-01 -1.36021864e+00] [-1.08629382e+00 -7.55476534e-01 -9.10715342e-01 ... -1.48520756e+00 -1.30495167e+00 -1.19015944e+00] [-9.02404487e-01 4.08843905e-01 5.44260681e-01 ... -1.01842380e+00 2.40940857e+00 -3.98833364e-01]] [[ 4.40387905e-01 -8.47291708e-01 -6.60317481e-01 ... 7.62690604e-01 -2.00039715e-01 2.48768598e-01] [ 3.59836459e-01 -9.80687618e-01 6.62862360e-01 ... 1.06594241e+00 1.37882555e+00 -4.25717933e-03] [ 1.13194656e+00 -1.35539863e-02 1.82517493e+00 ... 3.36210221e-01 8.38890314e-01 2.89846253e+00] ... [-2.00326514e+00 -1.36127114e-01 9.84892696e-02 ... 1.22468781e+00 -1.52318001e+00 -1.98749468e-01] [-5.92649400e-01 1.15274858e+00 -1.55087245e+00 ... -3.99755657e-01 1.57649621e-01 1.32520700e+00] [ 5.87298581e-03 1.10603654e+00 4.33470041e-01 ... 1.43049276e+00 7.16164410e-01 6.81770205e-01]] ... [[-1.73734689e+00 1.75181389e+00 8.17178249e-01 ... 2.82941073e-01 8.50992024e-01 5.72614610e-01] [-1.68058896e+00 -1.49483454e+00 8.51757407e-01 ... 7.53060341e-01 -1.77625811e+00 -1.74356127e+00] [ 6.89407468e-01 -7.10558593e-01 1.87200367e+00 ... -4.89293367e-01 -7.74583340e-01 -7.83985928e-02] ... [-1.68468654e-01 1.21850908e-01 -9.35351133e-01 ... 1.81990516e+00 -2.06519771e+00 -7.02612519e-01] [ 2.38435268e+00 -1.66183352e+00 -1.28835678e-01 ... 3.06615740e-01 -8.45475197e-01 -4.59020972e-01] [-2.20653281e-01 1.92458823e-01 1.97820377e+00 ... -1.25856256e+00 -1.14866756e-01 -1.46128035e+00]] [[ 3.84714931e-01 -1.24152076e+00 -4.24875468e-01 ... -1.05226204e-01 2.82658935e-01 8.75565931e-02] [-1.15839042e-01 1.25971571e-01 -1.29227829e+00 ... -1.42939031e+00 -1.35768914e+00 2.45879069e-01] [ 1.83516562e-01 7.41596222e-01 5.13454437e-01 ... 1.45098507e+00 -1.39312589e+00 -5.13722837e-01] ... [-9.51221958e-02 -1.61020112e+00 1.01974964e+00 ... 1.92909729e+00 5.41058421e-01 -1.10970843e+00] [ 4.30475444e-01 1.07926774e+00 -1.17457831e+00 ... -1.75107801e+00 -7.38496363e-01 1.60405394e-02] [ 2.98136926e+00 -7.76714683e-01 2.08180055e-01 ... -1.76984704e+00 5.61687956e-03 6.77843750e-01]] [[ 1.02721667e+00 9.33940887e-01 -1.60020900e+00 ... -1.07332349e+00 -1.28438509e+00 -9.42647159e-02] [-2.86072898e+00 -5.24433553e-01 1.23775113e+00 ... 9.67592299e-02 -2.38018170e-01 -1.13143504e+00] [-9.88471389e-01 -1.43009961e+00 -2.77972687e-02 ... 3.74866515e-01 3.39029282e-01 8.54863882e-01] ... [ 1.50050771e+00 -2.59420425e-01 -9.65866208e-01 ... -2.95092374e-01 -1.35842013e+00 6.56699389e-02] [-1.30403423e+00 -1.28483045e+00 -3.77191514e-01 ... 6.83886707e-02 1.20546611e-03 -1.27910888e+00] [ 8.37084949e-01 -8.74578714e-01 9.88755405e-01 ... 1.48315823e+00 -7.64621139e-01 1.51079446e-01]]] [[[ 1.31558459e-02 4.01280612e-01 1.39903510e-02 ... -1.18282259e+00 2.36928314e-01 -1.14098966e+00] [-8.20199624e-02 2.63587385e-02 2.83574909e-01 ... -1.20142055e+00 1.61731577e+00 -1.30864108e+00] [-1.60801685e+00 1.53157353e+00 -1.85404308e-02 ... -6.29537180e-02 7.51082748e-02 -1.04008961e+00] ... [-2.33757243e-01 -2.02116513e+00 -4.57154602e-01 ... -6.40141487e-01 2.08410963e-01 1.61730683e+00] [-1.37016702e+00 5.29959321e-01 1.64672935e+00 ... -2.26976573e-01 -4.60952550e-01 2.30193567e-02] [ 7.32472479e-01 2.17088923e-01 6.02563322e-01 ... 7.46850133e-01 9.12431359e-01 9.42446962e-02]] [[-1.57461941e+00 4.91484523e-01 -7.10497737e-01 ... 3.51874113e-01 -1.00222361e+00 -1.02690712e-01] [-2.09321737e+00 -7.09665101e-03 5.60872734e-01 ... -3.80940408e-01 -6.41477048e-01 -8.93071353e-01] [ 1.88773060e+00 1.70128018e-01 -2.25654785e-02 ... 1.63564122e+00 4.12418872e-01 -9.61271346e-01] ... [ 2.41287664e-01 3.49938720e-02 -1.28659070e+00 ... 1.67176771e+00 -2.67150015e-01 -2.88755503e-02] [-3.81119810e-02 -3.24916214e-01 3.22743148e-01 ... -1.94267184e-01 2.57221878e-01 -1.09031701e+00] [-8.78403962e-01 -2.66375184e-01 1.31601560e+00 ... -1.33821440e+00 1.12288046e+00 7.66819477e-01]] [[ 2.65914607e+00 6.59075797e-01 -3.98884982e-01 ... 1.22664467e-01 1.15642440e+00 1.53343871e-01] [ 1.13900113e+00 -4.70025539e-01 -4.00727570e-01 ... 5.08365691e-01 6.44261777e-01 -7.44854569e-01] [ 8.03224564e-01 -1.15140963e+00 -2.86703992e+00 ... -1.58689141e+00 6.51686668e-01 -7.11767316e-01] ... [-6.24876618e-01 1.09430182e+00 1.52733132e-01 ... 5.39156020e-01 9.55669224e-01 -1.44007158e+00] [-6.24575056e-02 -2.05281556e-01 5.57914793e-01 ... -8.75536919e-01 -1.50003660e+00 -1.03100941e-01] [ 6.96090236e-02 -3.88018489e-01 -9.96577963e-02 ... 5.39370894e-01 -1.13210046e+00 6.85560882e-01]] ... [[-1.60902154e+00 -4.68555748e-01 -4.14724708e-01 ... 6.41960800e-02 2.25360179e+00 -6.85790360e-01] [-2.90467888e-01 1.49045587e-01 8.43801022e-01 ... 3.56854230e-01 -1.35236704e+00 7.57872880e-01] [ 6.46277428e-01 -1.84488237e-01 -2.61189401e-01 ... -4.48559761e-01 -1.12263942e+00 -4.96017665e-01] ... [-9.19827163e-01 1.18583214e+00 -2.64050066e-01 ... 1.51019371e+00 4.35720831e-01 -1.28647578e+00] [ 2.44737172e+00 2.14921618e+00 1.57233790e-01 ... -1.05600953e+00 -5.91398239e-01 -8.31767738e-01] [-8.50565314e-01 9.39395607e-01 6.75310791e-01 ... -9.70504403e-01 -1.28626585e+00 -1.16949451e+00]] [[ 1.62513345e-01 6.44627988e-01 -3.55228841e-01 ... 4.42972630e-01 7.31825829e-01 2.14122701e+00] [ 3.86918932e-01 8.09399903e-01 2.19240928e+00 ... 3.10528278e-01 -9.30772349e-02 1.11101651e+00] [ 8.16518903e-01 4.69337285e-01 1.21445572e+00 ... -3.38800192e-01 -9.18025732e-01 -7.13768378e-02] ... [-5.72139382e-01 -1.98056906e-01 -1.37635609e-02 ... 1.48500979e+00 -8.75279546e-01 -2.58552045e-01] [ 8.44075561e-01 2.02640033e+00 -3.05907488e-01 ... 1.32801569e+00 3.21285836e-02 1.49089456e+00] [-9.98900592e-01 -4.07066226e-01 2.21707076e-02 ... -1.68590665e+00 4.32747602e-02 -3.65256816e-01]] [[-1.82579529e+00 -3.02114248e-01 -8.12276423e-01 ... 4.66650009e-01 -2.10413194e+00 8.48827183e-01] [ 1.01055229e+00 -4.20425802e-01 1.00816655e+00 ... 9.43720758e-01 -3.57220113e-01 -4.65635270e-01] [-1.65543914e-01 -6.97284281e-01 1.29850543e+00 ... 1.43835461e+00 9.46403623e-01 -6.91554487e-01] ... [ 2.19020173e-02 -1.23964238e+00 1.20897579e+00 ... -3.82464767e-01 1.92065299e-01 3.65685582e-01] [-2.99517512e-01 -4.93246853e-01 -1.69416368e+00 ... -1.71526456e+00 1.94774330e-01 1.16504896e+00] [ 1.23940337e+00 -1.46825802e+00 4.70272303e-01 ... 1.40129459e+00 3.97697628e-01 1.65193200e+00]]]] [[[[ 1.23323500e+00 -5.92369735e-01 -3.82493138e-01 ... 1.57969224e+00 -2.38125846e-01 -1.31276727e+00] [-3.57378215e-01 6.97614133e-01 9.52830672e-01 ... 8.76951277e-01 -4.11116838e-01 1.34587371e+00] [-4.88036364e-01 -1.36388075e+00 -1.20908201e+00 ... 7.04106688e-02 1.21434307e+00 -9.43076789e-01] ... [ 9.78540540e-01 -1.02512681e+00 1.20208651e-01 ... -1.94214597e-01 7.18377307e-02 -2.24389821e-01] [-1.74473810e+00 -3.04792523e-02 4.38361555e-01 ... 1.55304408e+00 1.17161739e+00 -2.58954144e+00] [ 5.66697657e-01 3.66875499e-01 9.59797204e-01 ... -9.70814884e-01 -3.06181073e-01 5.89311242e-01]] [[ 3.15551937e-01 6.10315859e-01 -2.39857268e+00 ... -1.12881267e+00 5.85308611e-01 1.39757049e+00] [ 7.58175254e-01 -5.02247989e-01 -3.06897783e+00 ... 6.57462895e-01 -1.13724065e+00 -9.19555187e-01] [-3.41358691e-01 -3.59268397e-01 -1.34351969e+00 ... 9.18600559e-02 8.68714079e-02 -1.02976775e+00] ... [ 4.08144951e-01 1.26874888e+00 4.06572789e-01 ... -1.03783846e+00 8.17284048e-01 -5.33232570e-01] [ 2.04957032e+00 -1.65471709e+00 2.25635695e+00 ... 9.89819229e-01 -7.86351502e-01 -1.19606674e+00] [ 9.60853934e-01 -3.87807280e-01 6.94653034e-01 ... -4.05494630e-01 8.07480037e-01 -2.27209479e-01]] [[ 5.85759580e-01 -5.44750810e-01 -1.28146851e+00 ... 1.83366334e+00 -1.66755819e+00 2.28518501e-01] [ 7.75380254e-01 7.61839986e-01 -1.65965927e+00 ... -1.16502750e+00 3.94952595e-01 -3.24094748e+00] [ 5.43846607e-01 -4.08109993e-01 1.74398482e+00 ... 2.11789393e+00 -3.24546844e-02 -6.19434297e-01] ... [ 5.17748475e-01 -2.05390498e-01 7.13772655e-01 ... 1.23120356e+00 -6.31003559e-01 -1.91408023e-01] [-1.36532593e+00 -8.79153609e-01 2.24785447e+00 ... -2.26697111e+00 6.51571929e-01 -9.18032765e-01] [-2.05171490e+00 4.10707444e-01 2.78859282e+00 ... -5.26428781e-02 1.36438894e+00 1.96186498e-01]] ... [[-6.95043564e-01 7.87300587e-01 1.11082637e+00 ... -2.28976496e-02 3.44765931e-01 1.22104622e-01] [-9.29076150e-02 1.53926361e+00 3.06722850e-01 ... -5.29127046e-02 -7.97016248e-02 1.77234277e-01] [ 5.80816686e-01 -1.01856554e+00 -5.59776008e-01 ... -1.34296191e+00 -7.97224283e-01 4.94813174e-01] ... [-1.01449096e+00 1.68803406e+00 1.10684431e+00 ... 5.49471855e-01 -1.08938503e+00 7.87671089e-01] [ 4.55783963e-01 -5.89005172e-01 -4.92162943e-01 ... 6.02514893e-02 7.64396012e-01 5.55210114e-01] [ 5.46140671e-01 -1.00749600e+00 1.01629846e-01 ... -1.96708053e-01 -5.69219440e-02 -7.39310920e-01]] [[-1.12327778e+00 3.23240191e-01 -1.36971569e+00 ... -1.79433358e+00 -6.55200422e-01 9.05515015e-01] [-4.78084952e-01 -9.04835820e-01 -3.40139061e-01 ... 1.95652515e-01 1.34294182e-01 5.54476567e-02] [-1.74069464e+00 3.64239663e-01 -2.15346679e-01 ... -5.45705140e-01 -1.69555947e-01 -5.86713612e-01] ... [-1.90374434e-01 -3.26616359e+00 -7.27030218e-01 ... 1.34652388e+00 7.10754752e-01 -2.02841830e+00] [ 1.97831476e+00 1.34686720e+00 7.07913995e-01 ... -1.07147738e-01 1.06575441e+00 -8.67629409e-01] [ 1.22636724e+00 6.08076692e-01 6.75553977e-01 ... -9.34183374e-02 1.25215471e+00 -4.77159083e-01]] [[-8.71403873e-01 -1.36350167e+00 5.11147499e-01 ... -5.89190185e-01 -4.52949852e-01 -8.29600573e-01] [ 8.22145581e-01 2.43693799e-01 -1.06348860e+00 ... 1.24091077e+00 -3.18931609e-01 7.81398773e-01] [ 3.61252725e-01 3.07243615e-01 -2.53461212e-01 ... -1.92626667e+00 -8.98219109e-01 1.22531462e+00] ... [ 3.99703503e-01 -5.70364416e-01 -5.47709465e-01 ... 1.24709487e+00 -1.43715918e+00 -1.53088599e-01] [-9.41607952e-01 -5.36349654e-01 1.66098630e+00 ... 2.50633031e-01 -1.70048904e+00 -9.06331539e-01] [-1.07201084e-01 3.28672260e-01 8.13732386e-01 ... 1.02233446e+00 -1.14653826e+00 -8.43030512e-01]]] [[[-1.99292228e-01 -2.44173989e-01 5.27302921e-01 ... 8.29736471e-01 6.95907027e-02 -2.07615066e+00] [ 1.73912370e+00 -6.11447170e-02 -1.58457726e-01 ... 6.36339545e-01 -1.85358346e+00 1.48410630e+00] [ 2.83758730e-01 -7.68213451e-01 -7.85294294e-01 ... -4.08806384e-01 -3.58740956e-01 1.31747231e-01] ... [ 1.15496254e+00 4.89925027e-01 -6.43652260e-01 ... 1.51456729e-01 1.41802490e-01 5.81032157e-01] [-1.79085410e+00 -2.08211526e-01 -9.55031872e-01 ... -5.10650933e-01 1.40240467e+00 -1.32749170e-01] [-1.58007824e+00 -2.38886848e-01 -9.40107048e-01 ... -8.95949602e-01 -2.07107753e-01 6.60413802e-01]] [[ 1.94848195e-01 -1.23052943e+00 -7.30633676e-01 ... 2.57281274e-01 1.25650167e+00 -1.79818463e+00] [-8.64759207e-01 -1.87927544e+00 3.74456525e-01 ... -1.30349666e-01 1.19263947e+00 -1.00722507e-01] [ 7.34839797e-01 4.81009036e-01 -3.12831610e-01 ... -1.87124208e-01 -1.24904609e+00 -8.53290796e-01] ... [ 4.47807342e-01 1.79635942e+00 1.21335566e+00 ... 7.95196593e-01 2.44477129e+00 4.87428337e-01] [-1.89167118e+00 1.82332385e+00 2.83809267e-02 ... 1.85709044e-01 -2.43125647e-01 2.80836135e-01] [ 4.76821989e-01 -9.43690360e-01 4.38408375e-01 ... -3.76912534e-01 -5.05963027e-01 1.36234105e+00]] [[ 9.06826973e-01 1.46823332e-01 -9.56331372e-01 ... -5.07975221e-01 -1.06165655e-01 1.39818704e+00] [ 1.39628994e+00 -4.31789458e-01 1.36277163e+00 ... -4.67828333e-01 -1.22116350e-01 3.20492029e-01] [ 5.56151032e-01 -1.07760191e+00 8.38512301e-01 ... -6.83139265e-02 -3.62557799e-01 -1.39153111e+00] ... [-1.80738437e+00 -2.00004506e+00 -3.34668428e-01 ... 2.42159739e-01 -2.34327018e-01 -8.30331922e-01] [ 9.86381710e-01 2.83307076e+00 1.78498828e+00 ... -1.11301374e+00 1.21628642e+00 1.52603135e-01] [ 8.59207273e-01 2.41804671e+00 9.95199978e-01 ... -2.36511603e-01 1.94675207e-01 4.02861148e-01]] ... [[ 5.26562810e-01 -3.81431937e-01 1.45357835e+00 ... -1.56162053e-01 -6.69771671e-01 3.11314136e-01] [-1.31942302e-01 -5.15506446e-01 -1.92893550e-01 ... -7.92276263e-01 -7.64997065e-01 -9.06961262e-01] [ 6.48517251e-01 -2.20618939e+00 2.13108920e-02 ... -1.31417370e+00 1.26781797e+00 -1.68037701e+00] ... [-6.83493912e-01 -1.58783221e+00 3.25858146e-01 ... 1.41958153e+00 -2.95870751e-01 2.24571675e-01] [-1.83093500e+00 -2.29872704e-01 -1.62533998e+00 ... -1.23344943e-01 8.60444665e-01 -4.92367327e-01] [-1.89675820e+00 -1.03959453e+00 1.86307657e+00 ... -6.35155216e-02 -5.16636789e-01 4.73163784e-01]] [[-1.73132336e+00 3.71608399e-02 -5.25125027e-01 ... 5.64853191e-01 2.41342679e-01 7.63946474e-01] [-1.11741014e-01 1.80669546e+00 9.39510584e-01 ... -2.27151662e-01 8.45427036e-01 -3.95124316e-01] [ 2.18798026e-01 2.85426450e+00 -1.72677183e+00 ... -9.76272151e-02 3.00188482e-01 -2.08920646e+00] ... [-8.27631280e-02 3.13713513e-02 6.65946543e-01 ... 6.55785024e-01 -6.31808817e-01 1.24091423e+00] [ 1.03435957e+00 -4.53088284e-01 5.93459785e-01 ... -6.18472788e-03 -5.83477795e-01 2.37755433e-01] [-6.17238700e-01 -2.75851035e+00 -1.12214363e+00 ... -6.37178481e-01 1.11288261e+00 2.55052119e-01]] [[ 4.92838919e-01 -6.98591530e-01 7.84847438e-01 ... 2.51913965e-01 -3.29211682e-01 -1.63471162e-01] [ 6.21001005e-01 -1.71785140e+00 9.53293622e-01 ... 1.85326588e+00 -1.51103601e-01 3.16508234e-01] [ 3.56601089e-01 -3.38633247e-02 6.20319188e-01 ... -1.10310638e+00 -1.69890225e+00 -1.00483418e+00] ... [ 1.37349629e+00 2.19930261e-02 5.54792881e-01 ... 4.27298583e-02 2.50167823e+00 5.50373197e-01] [-4.71383899e-01 -9.11017120e-01 5.02838194e-01 ... -1.03183627e+00 1.14653063e+00 -2.03875631e-01] [ 7.81262279e-01 -7.56572485e-01 1.44760144e+00 ... -4.91196126e-01 3.00148845e-01 -9.48434651e-01]]] [[[-1.04119158e+00 3.20580602e+00 9.52521205e-01 ... 9.11120355e-01 9.44278419e-01 -5.87845147e-01] [-1.34069192e+00 1.98793754e-01 -9.67982411e-01 ... 7.80359328e-01 -4.45635945e-01 -1.25475299e+00] [ 1.72991443e+00 1.15843987e+00 1.16892445e+00 ... -1.63524175e+00 -6.44384623e-01 1.27915931e+00] ... [ 6.19956613e-01 -8.47306907e-01 1.87965178e+00 ... -3.76906633e-01 5.42749107e-01 1.25311387e+00] [ 8.26768100e-01 -1.27555406e+00 3.81657153e-01 ... 1.49923694e+00 -6.49992287e-01 1.37071311e+00] [ 6.68976009e-01 6.92575693e-01 9.24205542e-01 ... 5.59639812e-01 -1.05588496e+00 6.53018951e-02]] [[-9.03311312e-01 8.27974558e-01 -2.19244123e+00 ... -5.79581141e-01 1.32362092e+00 -4.96714979e-01] [-8.85189593e-01 -5.79379380e-01 1.62113190e+00 ... -4.04243618e-01 -1.96340397e-01 -7.50915170e-01] [ 5.81580639e-01 4.59042192e-02 -1.55113745e+00 ... 1.08059692e+00 -1.16074667e-03 -4.45255578e-01] ... [-3.09289783e-01 -9.49557364e-01 1.03845239e+00 ... -3.48792553e-01 -1.40967774e+00 -2.73477167e-01] [-4.36649352e-01 -3.96095008e-01 -7.83810198e-01 ... 1.25730419e+00 -1.74989092e+00 5.93147516e-01] [ 4.05574262e-01 9.60221350e-01 4.39011842e-01 ... 7.34966159e-01 -3.90512466e-01 -2.40003586e-01]] [[-8.46816182e-01 4.61549252e-01 -1.01174545e+00 ... -2.20966965e-01 -1.10270214e+00 -1.32181244e-02] [-3.14041227e-01 -1.40180802e+00 -3.47694546e-01 ... -5.67678250e-02 4.06781942e-01 -7.75527894e-01] [ 1.13789451e+00 -9.62725997e-01 1.31970870e+00 ... -5.25520146e-01 1.56546342e+00 7.67431259e-01] ... [-1.75088549e+00 -8.13360274e-01 -4.81725723e-01 ... 8.42024565e-01 1.53393722e+00 -1.71970952e+00] [-1.21072650e+00 1.92572856e+00 -2.05520916e+00 ... 5.19997776e-01 -1.84094083e+00 -2.03566003e+00] [-6.16168678e-01 1.11507528e-01 2.50321954e-01 ... -1.77267706e+00 1.49444461e+00 -1.58593014e-01]] ... [[ 2.24233538e-01 -1.05283642e+00 -1.12325203e+00 ... -1.75294530e+00 -2.24798036e+00 3.38135332e-01] [-5.78029513e-01 1.24806607e+00 -1.23398578e+00 ... 8.79238546e-01 4.11455482e-01 -8.52298200e-01] [ 1.14349163e+00 8.46877635e-01 -2.83826083e-01 ... 2.89198756e-01 1.29274917e+00 -1.02408803e+00] ... [-1.77220631e+00 -8.77820015e-01 5.98758459e-01 ... 1.66585660e+00 1.71664548e+00 -7.78433979e-01] [-9.18538153e-01 -2.86503524e-01 -1.23894587e-01 ... -1.66818595e+00 -9.49083865e-01 6.34241879e-01] [ 2.73534120e-03 1.23105741e+00 -7.66553044e-01 ... -3.31558794e-01 -4.22038674e-01 7.85169303e-01]] [[-1.70507193e-01 4.84787256e-01 7.29420632e-02 ... 2.71066368e-01 5.75823843e-01 -1.03737101e-01] [-1.45861053e+00 -4.04422909e-01 1.34473071e-01 ... 6.22011721e-01 1.07802951e+00 -1.82018504e-01] [ 1.06206667e+00 -2.26808965e-01 1.82099611e-01 ... -5.82803905e-01 2.83344895e-01 2.12890530e+00] ... [ 9.43042397e-01 -9.10798073e-01 -8.54331136e-01 ... -1.37387300e+00 6.72504604e-01 -1.40149105e+00] [ 1.35822380e+00 5.17645240e-01 5.99327266e-01 ... 7.62225926e-01 -1.76312402e-01 -2.95318782e-01] [ 2.19628358e+00 -1.01754725e+00 -3.89887959e-01 ... -4.52889353e-01 -9.85047400e-01 6.22693658e-01]] [[ 2.52115279e-01 -2.04128838e+00 -3.36778015e-01 ... -3.57238352e-01 -6.60629749e-01 1.84601343e+00] [-4.54383492e-01 -4.23835993e-01 1.01674438e+00 ... 7.63773441e-01 -4.99166042e-01 2.91234303e+00] [-1.11351347e+00 -1.69542646e+00 -9.79650855e-01 ... 3.78629565e-01 4.90242749e-01 -4.39599365e-01] ... [-8.97537708e-01 4.98310208e-01 -2.74095796e-02 ... 1.73176840e-01 1.90637732e+00 -1.49531937e+00] [-5.05801320e-01 1.37510896e+00 1.51159060e+00 ... 2.08269939e-01 -3.22863847e-01 5.84220231e-01] [-3.26224685e-01 -1.12695444e+00 -4.61279392e-01 ... -8.97293329e-01 5.84057033e-01 -2.73894846e-01]]] [[[ 8.25672895e-02 1.68310666e+00 9.36542690e-01 ... -8.37416828e-01 8.50504875e-01 1.85378563e+00] [ 3.87880772e-01 -5.55452704e-01 -6.14343166e-01 ... -2.03548145e+00 -2.67641872e-01 6.23421371e-01] [ 1.88004959e+00 5.71522951e-01 2.59809303e+00 ... -3.40940118e-01 -1.19576424e-01 3.12010169e-01] ... [-3.48633289e-01 -2.76373363e+00 3.79700869e-01 ... 1.52130234e+00 2.40536094e-01 -1.45502448e+00] [ 5.19213796e-01 8.60076249e-01 1.96033701e-01 ... -2.42146349e+00 -8.12173486e-01 -9.66538429e-01] [-1.70254782e-01 -3.81191045e-01 -4.89145726e-01 ... -1.32867122e+00 -8.79010558e-02 4.08449471e-01]] [[-1.53334701e+00 -1.99106693e-01 -2.83432692e-01 ... 7.00655639e-01 8.08698356e-01 3.51904750e-01] [ 1.38719392e+00 -1.24528028e-01 2.37090111e-01 ... 1.17021620e+00 -3.35550793e-02 1.66016370e-01] [ 8.36102143e-02 1.63325071e-01 1.28392950e-01 ... -1.65840864e-01 5.73568463e-01 1.11107576e+00] ... [ 2.07613134e+00 1.82499904e-02 -1.94331264e+00 ... -1.08056164e+00 6.75805092e-01 1.83208811e+00] [ 1.87858999e+00 -4.63661216e-02 1.06365621e+00 ... 3.61745626e-01 8.38381767e-01 8.74055088e-01] [-1.25409985e+00 -7.62355089e-01 1.01374888e+00 ... -1.38003552e+00 -1.54599175e-01 -1.02444720e+00]] [[-2.35954732e-01 1.81624043e+00 -1.44937956e+00 ... -3.82171959e-01 -1.17936587e+00 5.34940779e-01] [ 7.49502897e-01 -1.82404768e+00 9.74796891e-01 ... -8.64340842e-01 4.28074569e-01 5.01801074e-01] [-1.12465942e+00 -2.50032037e-01 -2.27902913e+00 ... 1.17708944e-01 -6.38418615e-01 9.83062923e-01] ... [-6.22120678e-01 1.96153474e+00 8.52496982e-01 ... -1.17280185e+00 -4.80624676e-01 4.60051656e-01] [ 8.59479249e-01 2.02954039e-02 2.08681703e-01 ... 1.05545890e+00 9.19257939e-01 -5.66499770e-01] [ 9.36640352e-02 1.37197599e-02 -4.23963308e-01 ... -6.00975633e-01 3.02896276e-02 1.38170287e-01]] ... [[-3.13680798e-01 -5.70291877e-01 -6.95022225e-01 ... -1.67165434e+00 3.17069441e-01 8.27540576e-01] [ 1.45326108e-01 1.09369084e-01 -1.30585873e+00 ... -5.50931096e-01 1.32080567e+00 -1.92635730e-02] [ 4.32821289e-02 -3.03160995e-02 -2.42560521e-01 ... -1.47872746e-01 2.04490924e+00 -1.84300709e+00] ... [-8.12578559e-01 1.28445280e+00 4.60221529e-01 ... 1.34315169e+00 -2.04022452e-01 -1.58559370e+00] [ 1.55989826e-01 -3.44407558e-01 -2.98917703e-02 ... 1.80889165e+00 -5.61834797e-02 1.19643879e+00] [ 2.41240144e-01 1.84120440e+00 5.77644348e-01 ... 1.50881863e+00 -1.46873474e+00 -1.73828614e+00]] [[-5.01763225e-01 -1.27009428e+00 3.56971115e-01 ... -5.82411587e-01 -2.94435889e-01 1.59395492e+00] [ 7.19871521e-01 5.40608585e-01 -1.61181951e+00 ... 3.55355978e-01 1.78189290e+00 -6.92036092e-01] [-6.95129752e-01 6.16742484e-02 1.67774796e+00 ... -2.89837778e-01 -2.87683576e-01 -2.85936594e-01] ... [-1.03956437e+00 3.30699056e-01 4.99727577e-01 ... 1.31057191e+00 2.32563782e+00 -2.84732044e-01] [-6.33441098e-03 3.66066724e-01 2.18281940e-01 ... 5.15444875e-01 -5.99232674e-01 2.64733404e-01] [-1.88614041e-01 -1.41793764e+00 -1.98302716e-01 ... 5.01456320e-01 4.94673729e-01 4.13170248e-01]] [[-4.33109850e-01 -2.05069184e-02 8.12116086e-01 ... 6.36162877e-01 8.86530355e-02 -1.92825168e-01] [ 1.33895028e+00 9.19703916e-02 6.87900633e-02 ... 1.50202513e+00 8.14985871e-01 1.66620219e+00] [-2.31382802e-01 -1.09768569e+00 9.97317493e-01 ... -1.51716459e+00 -3.63068938e-01 -3.43902767e-01] ... [-1.53664172e+00 -7.28919566e-01 -5.00432909e-01 ... -9.55130041e-01 -5.33877075e-01 -2.57605594e-02] [-1.13452566e+00 -3.15092236e-01 1.74718702e+00 ... 1.17755823e-01 -5.24000704e-01 -1.84759355e+00] [-1.05973482e+00 8.38681221e-01 1.38228402e-01 ... -8.01938117e-01 1.29497814e+00 -2.33600996e-02]]] [[[-1.24116860e-01 2.82646990e+00 -1.79715306e-01 ... -6.57250226e-01 -3.52028579e-01 -5.29193103e-01] [ 5.31513393e-01 -1.82047367e+00 2.64017016e-01 ... -4.17031161e-03 2.75496632e-01 6.20372780e-02] [ 1.17698252e+00 -1.95081785e-01 3.47714037e-01 ... 2.16299474e-01 6.41111970e-01 -4.81896430e-01] ... [-2.13509750e+00 -7.75935292e-01 1.19212770e+00 ... -2.00269327e-01 4.49509293e-01 -1.03953445e+00] [-4.05832291e-01 1.34318173e+00 1.55305505e+00 ... -4.14028347e-01 2.65138149e-01 -9.74316299e-01] [ 5.05516112e-01 2.19950032e+00 1.16721237e+00 ... -2.67300844e-01 -9.84815836e-01 1.26386642e+00]] [[-2.65632659e-01 -7.42035508e-01 -3.72997403e-01 ... 2.76972167e-02 1.11501563e+00 -1.49477649e+00] [ 4.38948661e-01 1.03586897e-01 -6.07058108e-01 ... -4.59066421e-01 5.78289330e-01 1.84544444e+00] [-3.05903167e-01 -2.58542490e+00 5.59585154e-01 ... -6.88689649e-01 5.28519571e-01 -1.31372058e+00] ... [-1.24733424e+00 -1.19776976e+00 -2.50864655e-01 ... 5.51269114e-01 -1.68670440e+00 5.46304226e-01] [-4.19547588e-01 6.47293806e-01 -1.38268650e+00 ... 9.61519659e-01 5.98090589e-01 6.55693531e-01] [ 5.05559027e-01 -1.37608588e+00 1.73180133e-01 ... -1.26184928e+00 9.85593677e-01 1.58208832e-01]] [[-4.68838900e-01 2.28742862e+00 1.48969237e-02 ... 1.19924150e-01 -9.45626870e-02 -1.33593428e+00] [-2.92197156e+00 -3.34928274e-01 -4.92065310e-01 ... -2.05360293e+00 1.52509737e+00 -1.32268775e+00] [-7.37448215e-01 7.61305273e-01 -9.96520936e-01 ... 2.08226252e+00 -1.44677877e-01 4.50825125e-01] ... [ 1.33413064e+00 9.86350626e-02 2.13618708e+00 ... 7.20673919e-01 -4.87931609e-01 2.38185310e+00] [ 1.41657680e-01 -1.35390803e-01 -5.37012815e-01 ... 2.13244510e+00 5.84550142e-01 -8.84827971e-02] [-4.98478502e-01 -1.54145861e+00 3.16027254e-01 ... -9.22374487e-01 3.48665416e-01 8.04802328e-02]] ... [[ 9.22358453e-01 -1.38796315e-01 5.76180100e-01 ... 9.67807233e-01 -1.47622895e+00 1.06075406e+00] [ 7.47361928e-02 4.01634574e-01 2.99691081e-01 ... -7.91973770e-01 -1.87439695e-02 4.33059186e-01] [ 2.06224751e+00 5.96792638e-01 -1.41160473e-01 ... -7.19188213e-01 -1.57654762e+00 2.89727956e-01] ... [-1.17701292e+00 -1.49757433e+00 -1.68115407e-01 ... -7.01936901e-01 -1.76297414e+00 2.35593483e-01] [-2.12019753e+00 -7.11837530e-01 -9.98061121e-01 ... 2.20757306e-01 -6.70595646e-01 1.45228952e-01] [-4.08972532e-01 4.71383244e-01 -2.99436307e+00 ... -2.96353072e-01 5.80798447e-01 3.14088732e-01]] [[ 1.13884509e+00 -9.06621873e-01 1.22417629e-01 ... 1.91953206e+00 -4.05674130e-02 6.34951890e-01] [ 4.56830978e-01 -9.46309030e-01 -3.06961417e-01 ... -1.22727835e+00 5.76907992e-02 -5.15070498e-01] [-5.55093467e-01 1.37794629e-01 4.58011955e-01 ... 1.24902654e+00 7.86109030e-01 1.95260251e+00] ... [-2.14197206e+00 -3.45706075e-01 -4.16914552e-01 ... 5.80788910e-01 1.68462944e+00 3.45028043e-01] [ 2.32677054e+00 1.02450871e+00 9.54781234e-01 ... -5.48759758e-01 -1.42903042e+00 -3.85897517e-01] [ 5.27863204e-01 -1.98867321e+00 -2.45611250e-01 ... 1.09632778e+00 -1.63478762e-01 2.58767843e-01]] [[-4.33783859e-01 1.25964916e+00 2.92805374e-01 ... -1.55536509e+00 6.81801498e-01 -5.33733785e-01] [ 4.72892702e-01 -9.83745098e-01 -5.76501071e-01 ... -5.94656430e-02 -6.39589310e-01 -2.98113376e-01] [-8.42218995e-02 6.99959338e-01 1.30263627e+00 ... -4.58206058e-01 -2.41228282e-01 -9.27195549e-01] ... [ 8.00883830e-01 6.81703031e-01 7.82039762e-01 ... -1.52125582e-01 -1.82654321e+00 -3.16890895e-01] [-1.88711035e+00 2.49228120e-01 -1.20670116e+00 ... -1.53396219e-01 1.09022295e+00 1.37971842e+00] [-1.16777027e+00 5.52834868e-01 6.77093446e-01 ... -2.85320073e-01 -9.66352165e-01 -4.18940097e-01]]] [[[-8.14168215e-01 -1.50688076e+00 -2.46849373e-01 ... -1.20063388e+00 1.78215981e-01 1.19574510e-01] [-5.11868000e-01 -2.51695752e-01 -5.39613426e-01 ... 6.54019415e-01 4.81167078e-01 -3.46344441e-01] [ 1.57561135e+00 -5.92904687e-01 -4.11899507e-01 ... 3.33355379e+00 -1.05405438e+00 -1.36339245e-02] ... [-1.97976971e+00 9.29478347e-01 3.20688605e-01 ... -2.71644652e-01 5.88916063e-01 1.25213933e+00] [ 1.03271019e+00 -1.35842109e+00 -1.51807321e-02 ... 3.62438709e-02 1.91286373e+00 1.41257226e-01] [ 1.82077777e+00 -6.10528886e-01 -3.04210514e-01 ... -1.21484518e+00 -1.24215662e+00 7.09960938e-01]] [[ 1.14217214e-01 -2.15288937e-01 9.30856884e-01 ... -8.56602609e-01 7.06472337e-01 -9.32388663e-01] [-1.06035322e-02 -4.85999227e-01 1.96579903e-01 ... 2.89196670e-01 -6.06337488e-02 -2.21739933e-01] [-3.73295665e-01 -8.16678047e-01 -2.92980582e-01 ... 7.79880807e-02 -4.14909959e-01 1.10100150e-01] ... [ 2.11294389e+00 1.58550823e+00 -8.11345428e-02 ... -2.29020372e-01 -1.60076290e-01 8.44452500e-01] [ 2.02940583e+00 4.42257732e-01 -3.91410291e-01 ... 1.48302054e+00 -1.86132205e+00 5.81569299e-02] [-3.29954833e-01 -7.59531081e-01 7.44316697e-01 ... 1.52333307e+00 1.23458970e+00 -1.60830066e-01]] [[ 1.06691480e+00 -2.20538318e-01 4.70837504e-01 ... 1.49259949e+00 -2.34604761e-01 -3.78947139e-01] [ 3.52038927e-02 7.23970711e-01 5.19726217e-01 ... -7.12679267e-01 -1.34373653e+00 -1.37389350e+00] [-5.69474041e-01 5.20514965e-01 -1.25208533e+00 ... -4.67612475e-01 3.99172097e-01 2.15485430e+00] ... [-1.99749783e-01 3.84550124e-01 1.51545167e+00 ... -1.05164468e+00 -3.05267751e-01 -7.99723208e-01] [-2.56312162e-01 2.27544546e+00 1.84188259e+00 ... 3.07602614e-01 7.56657839e-01 7.36142337e-01] [-6.90336823e-01 8.22424233e-01 -1.88573730e+00 ... 2.45586708e-01 -1.68684095e-01 -4.32212681e-01]] ... [[-2.22001195e+00 -7.13728964e-02 -1.51141465e-01 ... 1.16127539e+00 -2.71401852e-01 9.31363463e-01] [ 1.96595445e-01 -1.62084937e-01 -1.75801420e+00 ... 1.77662924e-01 -1.25923762e-02 -8.60807538e-01] [ 7.24100590e-01 -2.51618803e-01 1.30705297e+00 ... 1.65218997e+00 4.32569161e-02 -8.21910560e-01] ... [ 2.83169580e+00 2.24251688e-01 4.84773725e-01 ... -2.77169012e-02 -1.38566840e+00 1.30955911e+00] [ 7.26527154e-01 -1.11993849e+00 -7.86900520e-01 ... 7.83188522e-01 -2.34520823e-01 -1.51240706e-01] [ 1.32438064e+00 -8.94450784e-01 1.78865516e+00 ... -6.13308012e-01 -4.86299902e-01 2.36188352e-01]] [[-6.39480054e-01 -1.41815448e+00 7.24639535e-01 ... 2.55851954e-01 1.60126078e+00 -4.77635741e-01] [-5.90146005e-01 -9.12667334e-01 -1.43165338e+00 ... -3.56548637e-01 -7.16393888e-01 7.22086787e-01] [ 3.24397653e-01 -2.87472934e-01 1.56323922e+00 ... -1.91436386e+00 6.15598917e-01 -4.94901568e-01] ... [ 3.04662287e-01 1.62678099e+00 4.40839715e-02 ... 7.98129916e-01 1.38791358e+00 -2.91127592e-01] [ 1.99637413e+00 1.54408395e-01 -1.63557851e+00 ... 8.80822241e-01 1.55325636e-01 6.58087730e-01] [-2.54527777e-01 -3.46701533e-01 -3.77955258e-01 ... -4.26400602e-01 -1.80084932e+00 1.32083857e+00]] [[ 8.30722600e-02 9.76954281e-01 8.46440315e-01 ... 7.00207055e-01 9.30897832e-01 -6.16636515e-01] [ 8.85097623e-01 1.00361145e+00 -8.38128626e-01 ... 3.54911000e-01 6.33222684e-02 -9.90838706e-01] [-9.88297105e-01 1.07954061e+00 6.88245595e-01 ... 4.33981985e-01 1.40297067e+00 1.54254413e+00] ... [-2.88113415e-01 -3.16107571e-01 3.79420131e-01 ... -2.17032731e-01 1.77754238e-01 4.68302071e-01] [ 5.23018122e-01 -6.84516668e-01 -1.71422982e+00 ... -4.88920957e-01 -6.91383958e-01 -2.11284804e+00] [-6.91985905e-01 -1.32244587e+00 6.51153326e-01 ... -8.26089025e-01 2.31163216e+00 -2.18758225e+00]]]] ... [[[[-2.98022807e-01 -4.37921584e-01 9.91455495e-01 ... -4.34172869e-01 1.30497247e-01 -1.25343049e+00] [ 5.09379983e-01 -1.54562458e-01 1.38641632e+00 ... 6.77553892e-01 -1.47933531e+00 -1.48929894e+00] [-2.93767571e-01 -7.92958617e-01 4.53885198e-01 ... -2.07441807e+00 -7.01238334e-01 -1.23089123e+00] ... [ 5.95566154e-01 -9.36157763e-01 -2.35783529e+00 ... -1.68771100e+00 -2.66819745e-01 -1.10199320e+00] [-1.86823869e+00 9.08959091e-01 -3.49521786e-01 ... 1.00845784e-01 1.54189038e+00 1.14453447e+00] [-1.68781459e+00 1.19495535e+00 -1.20185959e+00 ... -1.78719234e+00 8.62530112e-01 1.00295937e+00]] [[-5.32358408e-01 1.66233075e+00 -1.71463579e-01 ... -5.58763463e-03 -3.30771863e-01 -1.51003516e+00] [ 3.47650826e-01 -1.56760657e+00 -4.61286426e-01 ... -7.61324108e-01 1.06309199e+00 6.97506189e-01] [ 4.50670838e-01 1.28585291e+00 -8.82708669e-01 ... 5.06523669e-01 1.65370896e-01 2.98057031e-02] ... [-9.90025401e-01 -1.15113235e+00 -3.49359512e-01 ... 5.48123062e-01 3.51218104e-01 9.46310341e-01] [ 1.51809704e+00 -6.96771681e-01 -4.79740381e-01 ... -2.69467425e+00 7.30077267e-01 1.50064075e+00] [ 1.73587143e+00 -8.80227089e-02 -6.36670291e-01 ... 2.71509141e-01 5.54391146e-01 -4.08929437e-01]] [[-7.84060538e-01 3.16636622e-01 1.41719684e-01 ... -3.93868387e-01 1.04822524e-01 5.92736363e-01] [-1.67268598e+00 -2.33994439e-01 -4.95155573e-01 ... -3.75978291e-01 -2.15091273e-01 -1.04426408e+00] [ 8.96140277e-01 3.50786179e-01 2.16597933e-02 ... 2.78413296e-01 -1.30933714e+00 -3.11082006e-01] ... [-5.13862908e-01 1.47121000e+00 1.18036282e+00 ... -1.86693108e+00 2.91743428e-01 -8.66261542e-01] [ 4.71305162e-01 -4.95586395e-01 -3.91531110e-01 ... -1.22726464e+00 1.55386738e-02 1.19704807e+00] [-6.91106856e-01 -2.53381443e+00 1.50803566e+00 ... 1.25516653e+00 6.94214046e-01 -1.18969154e+00]] ... [[ 9.35591519e-01 -2.07480121e+00 1.11836994e+00 ... -1.28477681e+00 2.00251818e-01 -1.02149343e+00] [-3.90377790e-02 -2.77496547e-01 8.81944299e-01 ... 1.03459585e+00 8.20776761e-01 -6.40210152e-01] [-6.61968946e-01 1.39856935e-01 -2.40412012e-01 ... -1.93251121e+00 -3.02250296e-01 -1.11610470e-02] ... [-2.66046584e-01 1.28087246e+00 1.07049811e+00 ... 9.03105214e-02 3.92597914e-01 3.44882429e-01] [-5.76350033e-01 5.26230514e-01 1.62749350e+00 ... -7.40284443e-01 -9.77647424e-01 4.82813239e-01] [-2.12449265e+00 -8.72712970e-01 1.02011955e+00 ... 3.64555083e-02 -1.28704870e+00 -2.75542825e-01]] [[-5.59053600e-01 -2.00041938e+00 -9.19193566e-01 ... 2.09278584e+00 -1.09223342e+00 -1.51087785e+00] [ 1.42041877e-01 -1.03476584e-01 -6.82449818e-01 ... -1.31160331e+00 -8.41419756e-01 1.21287957e-01] [ 3.93020838e-01 7.35969603e-01 5.43347895e-01 ... 4.29195434e-01 -1.08148664e-01 -4.68469650e-01] ... [-7.69974589e-01 -5.92963934e-01 -2.60750413e-01 ... -1.96082771e+00 -1.16367233e+00 3.50827098e-01] [-1.23777711e+00 4.32817489e-01 -1.36324894e+00 ... 9.91008818e-01 7.88540065e-01 -5.63372314e-01] [ 6.00132227e-01 9.08829033e-01 1.39338100e+00 ... 1.86217442e-01 -2.44244456e-01 -1.03669822e+00]] [[-4.33671176e-01 -2.40455246e+00 2.50085652e-01 ... -9.62990880e-01 -5.97485185e-01 5.23988307e-01] [ 8.02855372e-01 2.92344630e-01 -1.16521478e+00 ... 1.19854844e+00 9.93564785e-01 -1.74082071e-01] [ 4.17881012e-01 1.42321229e-01 -5.04607819e-02 ... -3.37157816e-01 4.50370222e-01 -3.23434830e-01] ... [ 1.43248904e+00 -8.81983876e-01 -5.17810345e-01 ... -5.65172672e-01 2.16675162e-01 3.60995114e-01] [-3.89824182e-01 -3.03533852e-01 -2.04091877e-01 ... -1.46381021e+00 5.50602734e-01 -9.29795027e-01] [ 1.16866529e-01 1.00532961e+00 -6.58021793e-02 ... 9.69556332e-01 2.23707721e-01 -1.00432956e+00]]] [[[-1.29907668e+00 -8.38366091e-01 -8.71790707e-01 ... -9.66412008e-01 -3.27199459e-01 8.30997407e-01] [ 1.00380456e+00 2.58995295e+00 6.73040628e-01 ... 2.42689565e-01 2.33633256e+00 7.46833384e-01] [ 1.30162394e+00 -6.50072932e-01 1.85230255e-01 ... 1.61857568e-02 -4.58607674e-01 1.39075267e+00] ... [ 8.18875372e-01 1.26966786e+00 2.76705235e-01 ... -1.60977972e+00 -6.79495096e-01 1.13047510e-01] [-1.09949565e+00 -1.95525086e+00 1.17632353e+00 ... -4.29677963e-01 -1.47890478e-01 2.76875353e+00] [ 8.62452865e-01 -1.24924064e+00 -5.63190043e-01 ... -3.27157587e-01 1.17862058e+00 1.09058154e+00]] [[ 8.79135787e-01 1.52063206e-01 8.58873069e-01 ... -1.20327806e+00 5.57758212e-01 -1.42236957e-02] [-1.18401492e+00 3.26201990e-02 -8.42212796e-01 ... -3.73373091e-01 1.19146597e+00 4.14538860e-01] [-1.27066982e+00 -4.95876789e-01 6.82820439e-01 ... -5.83926260e-01 3.49596255e-02 -1.15158141e+00] ... [-1.85377687e-01 4.04922336e-01 -5.35038710e-01 ... -1.04902172e+00 2.73688674e-01 1.85748351e+00] [-1.35468352e+00 1.80679634e-01 3.67539018e-01 ... -7.49618649e-01 1.92774348e-02 2.04210091e+00] [-3.81668776e-01 -7.22765446e-01 -1.25558233e+00 ... -3.24473865e-02 -6.91917464e-02 1.07806432e+00]] [[-7.69001663e-01 -1.08087420e+00 -7.57987022e-01 ... 2.18380451e-01 -1.48854995e+00 -4.42063212e-01] [ 9.31606352e-01 1.56083810e+00 1.87296295e+00 ... -6.14856362e-01 6.70526475e-02 -3.38759392e-01] [-3.09740603e-02 4.00249064e-01 4.80894417e-01 ... -1.26360834e+00 6.61580980e-01 -9.01444614e-01] ... [ 1.12704730e+00 -1.15425217e+00 -9.19791460e-01 ... 7.86809027e-01 -6.39697313e-01 9.83101189e-01] [ 2.34882206e-01 -4.56114054e-01 -4.53340448e-02 ... -1.33700371e+00 5.69365978e-01 8.08248580e-01] [ 1.05208480e+00 -1.66018113e-01 7.25627616e-02 ... -1.05405605e+00 7.94016898e-01 -8.02375078e-01]] ... [[-7.58064866e-01 -1.68300033e+00 8.57324123e-01 ... 2.60757178e-01 1.00518143e+00 -1.07697916e+00] [-2.73793697e-01 -1.20196372e-01 -1.16170764e+00 ... -6.54827356e-01 3.16844285e-01 -9.99971986e-01] [ 2.78058076e+00 2.81777412e-01 6.75669253e-01 ... -1.55976963e+00 3.56842428e-01 1.88035309e+00] ... [-5.66086233e-01 -1.74978375e-01 -1.49246538e+00 ... 6.03094697e-01 2.57918417e-01 2.59785712e-01] [ 1.93791723e+00 -2.20127270e-01 -1.89538097e+00 ... -3.42788696e-01 7.66292393e-01 -9.92953062e-01] [ 7.89782166e-01 -3.50428224e-01 -1.57240164e+00 ... -7.72974372e-01 1.04199064e+00 5.10909021e-01]] [[-7.29679704e-01 1.31783366e+00 -1.38246322e+00 ... 1.06721807e+00 5.14504194e-01 -1.24649036e+00] [-4.21040617e-02 -1.59661770e+00 -3.21975052e-01 ... 1.05288303e+00 3.85368109e-01 1.14971125e+00] [-1.29866898e+00 3.56258482e-01 1.12272882e+00 ... 1.44057810e+00 2.05911255e+00 -9.35700119e-01] ... [-8.82039309e-01 -4.60991830e-01 5.06837785e-01 ... 2.84261882e-01 -9.94906425e-01 -1.96395844e-01] [-8.17688942e-01 -1.06654322e+00 -4.32391912e-01 ... 5.06893873e-01 -3.50278586e-01 -8.97900105e-01] [ 8.27033997e-01 -6.86331332e-01 -5.99274576e-01 ... -1.18595350e+00 -1.27541855e-01 -1.22883528e-01]] [[ 1.35418797e+00 -1.42129827e+00 -1.85124385e+00 ... -2.22057581e+00 -2.71481067e-01 4.62675560e-03] [-2.02733904e-01 1.75722122e+00 9.84245002e-01 ... -3.29120547e-01 1.02842736e+00 -2.15911174e+00] [-1.60174751e+00 2.50101662e+00 -9.61856246e-01 ... -3.59206289e-01 1.38798881e+00 -2.79285479e+00] ... [ 9.10472751e-01 2.26262640e-02 5.37381768e-01 ... -7.11537063e-01 -1.49129009e+00 -1.55243766e+00] [ 2.54243761e-01 6.54056370e-01 5.94736695e-01 ... 1.33867514e+00 -1.00093448e+00 -5.64533114e-01] [ 1.80451013e-02 -6.64657831e-01 1.24670434e+00 ... 3.55986714e-01 6.34747982e-01 -1.30971754e+00]]] [[[-2.60027528e+00 -3.65656406e-01 -7.04175413e-01 ... 1.06350966e-01 8.74921679e-01 -8.51155221e-01] [-1.12016666e+00 1.83378473e-01 1.62169790e+00 ... -1.70531487e+00 -3.18451554e-01 -1.08638322e+00] [ 6.19016588e-01 -1.71026027e+00 3.08831722e-01 ... 6.59804940e-01 6.99101150e-01 -2.00946450e+00] ... [ 3.56114507e-01 1.64253628e+00 -1.09033489e+00 ... -3.27219844e-01 1.98230815e+00 -4.13952172e-01] [ 2.37551427e+00 6.11293137e-01 8.62183332e-01 ... 1.33435786e-01 7.08774269e-01 7.02696919e-01] [ 9.85159516e-01 8.13335359e-01 4.47707530e-03 ... 8.15285087e-01 -5.20267010e-01 -7.59400606e-01]] [[-3.63985598e-01 6.24140203e-01 -1.92756462e+00 ... 1.26749778e+00 1.07347715e+00 -1.37341052e-01] [-9.36119556e-02 1.51246524e+00 -1.52122962e+00 ... -1.53177488e+00 -1.30002046e+00 1.22684248e-01] [-9.04954433e-01 3.49175453e-01 -8.87836576e-01 ... -9.26574528e-01 -5.98261058e-01 -1.13926172e+00] ... [ 5.37449598e-01 5.68383038e-01 -7.45948032e-02 ... -1.22225213e+00 3.75832766e-01 -2.37843823e+00] [-1.29823595e-01 -6.52635694e-01 -3.07660550e-01 ... 8.57804775e-01 -3.47872585e-01 1.68438292e+00] [-9.47006941e-01 -2.14288175e-01 -2.20970333e-01 ... -4.05128092e-01 7.51355469e-01 -2.71156311e-01]] [[ 6.20243073e-01 1.02543867e+00 9.70465302e-01 ... -2.14146233e+00 -5.79918504e-01 -1.09757066e-01] [-4.85191226e-01 1.91610634e-01 -4.03084248e-01 ... 9.09267604e-01 1.89881567e-02 -1.22137733e-01] [-1.05512190e+00 -8.47329140e-01 -1.61743060e-01 ... -8.90527844e-01 -1.18376899e+00 -1.29666686e+00] ... [-5.98716438e-01 -6.81104124e-01 -6.86822116e-01 ... -4.20967042e-01 -1.20660806e+00 -3.32129151e-01] [ 5.04696965e-01 -5.62164307e-01 -1.39495194e+00 ... 5.04006803e-01 -1.86943805e+00 -9.17652249e-01] [ 9.47821498e-01 -1.26633859e+00 5.65066457e-01 ... -5.19035518e-01 -4.98171002e-02 1.01364829e-01]] ... [[ 7.39931703e-01 9.77891684e-01 4.16521668e-01 ... -8.37791800e-01 -3.12056988e-01 -2.27786139e-01] [-1.57208347e+00 -1.49035847e+00 4.81687307e-01 ... -4.59301561e-01 4.43849474e-01 -2.17466205e-01] [ 9.16465461e-01 2.02388123e-01 5.12905478e-01 ... 9.13525283e-01 -3.91446471e-01 8.28337610e-01] ... [-1.11615714e-02 1.52110791e+00 -2.03180265e+00 ... -4.02104646e-01 5.02852917e-01 2.48142928e-01] [-1.32168245e+00 7.58467495e-01 1.20576072e+00 ... 3.05984139e-01 -9.11062419e-01 4.50589418e-01] [ 8.37120190e-02 1.30711424e+00 -1.37806630e+00 ... -8.55073452e-01 -1.25982809e+00 6.54147804e-01]] [[-2.04667449e+00 1.90778494e-01 1.31114137e+00 ... -1.86336577e-01 -7.27118611e-01 8.09514403e-01] [-2.60416120e-01 1.03674543e+00 -1.54758537e+00 ... 4.69505340e-01 1.85306990e+00 1.14839144e-01] [ 5.81094444e-01 -5.65849125e-01 1.12157452e+00 ... -7.58479297e-01 4.64571834e-01 1.49207100e-01] ... [-1.34386718e+00 -7.60343671e-01 6.03659928e-01 ... -1.65037692e+00 -4.30349171e-01 3.73811662e-01] [-1.42549872e-01 1.01834440e+00 -1.65097225e+00 ... 7.45337844e-01 -1.55098081e-01 -3.76051515e-01] [ 3.50168079e-01 5.23304641e-01 -3.81865621e-01 ... -1.71545997e-01 1.64486814e+00 -3.81997943e-01]] [[-2.05234194e+00 -2.20760748e-01 1.05946279e+00 ... 1.12304997e+00 -1.02943611e+00 1.53332722e+00] [-3.40166897e-01 -1.23254728e+00 -2.02264524e+00 ... 1.92774093e+00 -9.00575519e-01 -1.48615432e+00] [ 5.58644950e-01 -6.51021779e-01 -3.66321988e-02 ... -2.63234019e-01 1.31470549e+00 -9.40513134e-01] ... [-1.75997078e+00 -1.09027696e+00 9.58682835e-01 ... -1.24432325e+00 -8.93096507e-01 -6.13603964e-02] [ 2.25217748e+00 3.03064734e-01 5.23494422e-01 ... 1.53627193e+00 4.72761095e-01 8.30399632e-01] [-1.17303443e+00 7.24310458e-01 -8.49663258e-01 ... -1.89699873e-01 1.30509114e+00 2.48133633e-02]]] [[[ 8.00437689e-01 5.25036097e-01 -4.84626181e-02 ... 2.61361986e-01 -8.40807498e-01 5.29642105e-01] [ 6.00103021e-01 -1.17542040e+00 -5.97584128e-01 ... -2.30992958e-02 -8.71424496e-01 -5.93794763e-01] [ 2.94146508e-01 2.38245800e-02 -9.92293596e-01 ... -1.30008686e+00 -1.70658052e+00 -8.06698382e-01] ... [ 5.58175027e-01 -2.82773435e-01 5.75177789e-01 ... -7.77914524e-01 4.43571538e-01 -7.57125139e-01] [ 1.48358655e+00 4.04104531e-01 9.56130996e-02 ... -6.89992905e-01 -2.78424531e-01 1.04821789e+00] [-3.47021490e-01 6.71098411e-01 1.01170860e-01 ... -6.04486108e-01 3.13349605e-01 2.04115957e-01]] [[-1.51196182e-01 -9.47842956e-01 2.79777974e-01 ... 7.74439335e-01 2.17367709e-01 -3.27466838e-02] [ 3.23472112e-01 -2.44953990e+00 7.87411258e-02 ... 9.34198916e-01 2.10038900e-01 -5.16940117e-01] [-2.40613908e-01 4.21588838e-01 2.75288653e-02 ... 1.45913815e+00 3.95039409e-01 7.83042431e-01] ... [-2.78471529e-01 6.28041267e-01 -9.13837850e-01 ... -6.85515165e-01 -3.40745561e-02 -2.71021396e-01] [-1.77647436e+00 1.31344929e-01 -3.79916161e-01 ... -7.48444855e-01 -6.26288593e-01 4.73471612e-01] [ 2.98136473e-01 1.35503292e-01 5.91538567e-03 ... -7.38971904e-02 3.47754049e+00 -1.40916646e+00]] [[ 8.79931331e-01 8.65193009e-01 -5.95467165e-02 ... 6.13780200e-01 2.44274229e-01 1.99416363e+00] [ 6.06564224e-01 -5.28275549e-01 -7.06879616e-01 ... -4.84972000e-01 -8.91383588e-01 5.05618632e-01] [ 7.92198658e-01 1.74228513e+00 3.02440435e-01 ... 9.91261899e-01 -2.52543151e-01 -1.63449079e-01] ... [ 1.24752438e+00 -2.75536180e-01 1.04424524e+00 ... -8.71739328e-01 3.44581813e-01 1.20611417e+00] [ 8.46463382e-01 1.12729788e+00 -8.03124964e-01 ... 1.12898350e-01 -3.38771671e-01 1.93662509e-01] [ 1.46258152e+00 1.01583266e+00 2.14924216e-01 ... -4.27200019e-01 -3.88933659e-01 -1.00303340e+00]] ... [[-1.06799543e+00 2.70313978e+00 -1.71777204e-01 ... 7.69911408e-01 -8.05183291e-01 -5.67712300e-02] [-3.86388868e-01 1.08411407e+00 9.69985843e-01 ... -2.52393633e-01 -6.58168256e-01 4.50226068e-01] [ 7.12075114e-01 -3.41078669e-01 -1.34107307e-01 ... -1.01768446e+00 -6.97395921e-01 2.32323909e+00] ... [-1.13750196e+00 -1.26586831e+00 -9.75549579e-01 ... -2.31594201e-02 1.38442922e+00 -9.36694086e-01] [ 4.32753831e-01 -5.83606303e-01 1.25825775e+00 ... 8.32049966e-01 5.30574799e-01 9.56354290e-02] [ 1.03877068e+00 -8.09652925e-01 -2.12335944e+00 ... 1.09020996e+00 8.43875229e-01 -5.23820341e-01]] [[-1.35468948e+00 -1.25766969e+00 -1.27937809e-01 ... -1.43420875e+00 7.50596583e-01 -4.41007823e-01] [-1.73254263e+00 2.73725223e-02 -1.15709221e+00 ... 1.95159340e+00 9.69229162e-01 3.66003849e-02] [ 1.87412512e+00 6.85349703e-01 8.10647309e-01 ... -1.07990479e+00 -7.91019738e-01 -7.15677679e-01] ... [ 1.39980328e+00 -1.10642254e+00 -5.60540676e-01 ... 1.02942419e+00 8.60148311e-01 1.43288642e-01] [ 1.09416664e+00 -1.09416151e+00 2.20966983e+00 ... 3.39603752e-01 1.21665657e+00 3.25883955e-01] [-7.01728046e-01 -8.00630033e-01 6.10486627e-01 ... 2.18260169e+00 3.86856616e-01 2.90736914e-01]] [[-1.71421781e-01 7.38686860e-01 -3.26269716e-01 ... 9.41861510e-01 -8.53098571e-01 -2.45984927e-01] [-1.07436053e-01 6.92368507e-01 2.33445001e+00 ... 1.54963887e+00 -6.72317624e-01 -7.74687171e-01] [-2.85398185e-01 1.41923904e+00 -4.51662093e-01 ... 6.56603992e-01 -1.02886117e+00 -6.86729252e-01] ... [-1.96095988e-01 3.09665710e-01 -3.24034899e-01 ... -1.01555896e+00 -2.47274905e-01 3.76613289e-01] [ 2.53346771e-01 5.52407265e-01 9.84446332e-02 ... -8.44879627e-01 -1.28823817e+00 -8.16190183e-01] [-1.48202145e+00 -3.30662340e-01 1.77384000e-02 ... -1.70261189e-01 -6.82551861e-01 4.61606950e-01]]] [[[-9.77768898e-01 -7.79628605e-02 -8.33356455e-02 ... 5.45129061e-01 6.22256212e-02 -1.20544994e+00] [ 2.15597153e-01 3.99776936e-01 -9.48047400e-01 ... -1.76330432e-01 -3.31601977e-01 5.20622671e-01] [-2.68283665e-01 -1.32137537e+00 -9.06073868e-01 ... 1.72378433e+00 2.43955538e-01 -1.33618057e+00] ... [-1.44904494e+00 -8.34957600e-01 8.40586603e-01 ... -9.38709199e-01 5.60631931e-01 -5.80559731e-01] [ 1.86635941e-01 -1.42859206e-01 5.73098600e-01 ... 1.49101466e-01 3.39793950e-01 1.77677882e+00] [ 1.33890617e+00 6.42937303e-01 1.29877651e+00 ... 1.18177116e+00 -3.28600518e-02 -5.99836349e-01]] [[ 7.48792663e-02 3.07571381e-01 -1.62253022e-01 ... 3.28203678e-01 -6.45616233e-01 1.13870168e+00] [ 3.14633965e-01 -2.45066836e-01 -5.89075387e-01 ... -1.14388323e+00 -1.69464922e+00 5.35624087e-01] [ 7.57525444e-01 4.56502348e-01 -1.47808719e+00 ... -4.55217779e-01 -8.09759259e-01 -3.93807814e-02] ... [ 1.27571750e+00 5.66421039e-02 4.07348461e-02 ... 5.18677890e-01 -1.71423368e-02 9.44974065e-01] [-1.29420960e+00 -5.62043071e-01 -5.19358873e-01 ... -1.51345301e+00 2.39994764e-01 -8.10051918e-01] [ 1.56976807e+00 -3.46249133e-01 4.84354459e-02 ... 8.29032838e-01 1.29122898e-01 -8.58694255e-01]] [[-1.15287495e+00 1.74855262e-01 -2.26433918e-01 ... 7.09241092e-01 -1.39526737e+00 1.48504400e+00] [ 2.04147410e+00 6.46725953e-01 3.42096873e-02 ... 9.88503397e-01 -2.96669513e-01 7.34130323e-01] [ 3.08275282e-01 2.33407593e+00 -1.16015682e-02 ... 8.66379678e-01 5.17721057e-01 1.01094413e+00] ... [-2.23230982e+00 7.53706396e-01 6.36275351e-01 ... 6.54808432e-02 -1.22191787e+00 -6.72495008e-01] [-7.71280706e-01 1.95143664e+00 7.39431977e-01 ... -9.82768655e-01 9.50213075e-02 9.04838815e-02] [ 6.63554490e-01 1.55538285e+00 -1.53236434e-01 ... -9.75603878e-01 7.76429474e-01 1.21928930e+00]] ... [[-3.47120464e-01 -2.99846470e-01 -6.94571733e-01 ... -8.02173018e-01 -6.24874949e-01 1.74759769e+00] [ 5.27079105e-01 4.21061307e-01 -4.17835176e-01 ... 1.95755470e+00 -2.08588287e-01 -5.91997743e-01] [-1.58328402e+00 1.37547815e+00 2.19442868e+00 ... -9.46928382e-01 1.78156942e-01 2.30815127e-01] ... [-7.00185299e-01 -1.06625594e-01 -8.72405529e-01 ... -6.41748607e-01 1.29957795e+00 -8.63687813e-01] [-1.38626695e+00 6.42633080e-01 1.36803579e+00 ... 2.35967189e-02 2.13106990e-01 1.43428240e-02] [ 1.11089802e+00 -1.52885044e+00 1.93324041e+00 ... -1.49660182e+00 -1.34595826e-01 3.08618128e-01]] [[ 9.07910466e-02 -8.33659153e-03 -5.13959587e-01 ... -7.59984612e-01 -8.33205879e-01 7.26915300e-02] [-1.20338392e+00 -1.56098634e-01 -6.07800007e-01 ... 1.93746984e+00 5.14362454e-01 -5.55454731e-01] [ 3.10115367e-01 3.53746936e-02 -3.04432273e-01 ... 3.71614993e-01 1.00053573e+00 -9.09828067e-01] ... [-5.78682087e-02 -3.17700326e-01 -1.60441601e+00 ... 8.81102264e-01 1.00859761e+00 -7.78702438e-01] [ 1.45411301e+00 1.37792420e+00 -8.26324165e-01 ... -1.21560208e-01 -7.93296158e-01 -2.49390531e+00] [-8.30347091e-02 2.88723797e-01 -5.28561294e-01 ... -1.84394911e-01 -1.78898585e+00 1.30037034e+00]] [[-1.39383882e-01 -5.51375523e-02 -5.89215338e-01 ... 4.23129201e-02 -5.10981381e-01 4.17435944e-01] [-2.63039976e-01 1.17103612e+00 1.30254126e+00 ... 1.51888406e+00 -1.06141603e+00 4.88603145e-01] [-1.58941478e-01 2.37744308e+00 1.26308632e+00 ... 1.18913114e+00 1.78611219e+00 1.51082969e+00] ... [-1.43089128e+00 7.79519677e-01 3.98005247e-01 ... -1.28794742e+00 -2.35020518e+00 -2.33823609e+00] [ 1.81038558e+00 5.09414673e-01 4.43085104e-01 ... 7.80430317e-01 1.24773407e+00 4.23286051e-01] [ 1.27116740e+00 5.63850462e-01 5.03640287e-02 ... 1.43398130e+00 -5.90906560e-01 3.76710653e-01]]] [[[-2.35047564e-02 -5.42704821e-01 2.02147985e+00 ... 7.57715821e-01 -1.89290702e-01 7.43631065e-01] [-9.28776562e-01 1.11178450e-01 -2.31176335e-02 ... -4.10123989e-02 1.26368785e+00 1.51136827e+00] [-1.04324007e+00 -4.59913135e-01 2.58559346e-01 ... -3.66556853e-01 1.32031124e-02 -2.57871468e-02] ... [-1.88264340e-01 -9.11137998e-01 8.22786450e-01 ... 3.22416008e-01 -2.87851244e-01 -1.55281651e+00] [-2.22533989e+00 -8.11492383e-01 2.04491520e+00 ... 9.88354862e-01 -2.19335151e+00 -5.15920460e-01] [ 9.52675700e-01 1.32227808e-01 4.34082597e-01 ... -2.80028582e-01 8.73352349e-01 1.23878276e+00]] [[-1.44714475e-01 5.07059455e-01 3.25898170e-01 ... -1.73964813e-01 8.21662247e-01 4.62944686e-01] [-9.85013366e-01 -9.39087927e-01 5.14764309e-01 ... 9.51535046e-01 -1.58769321e+00 -1.16017377e+00] [-3.12431157e-01 -1.06892347e+00 -3.13201725e-01 ... 1.38263786e+00 -2.68862867e+00 2.29014456e-01] ... [ 1.57836926e+00 -1.33141708e+00 5.24290025e-01 ... -7.61606157e-01 -1.06692803e+00 3.84294540e-01] [ 2.40168795e-01 6.57089233e-01 -2.45657116e-01 ... 6.59015775e-02 2.00557546e-03 -4.13870484e-01] [ 3.47789377e-01 1.79946557e-01 3.06170702e-01 ... 4.35041904e-01 5.87109089e-01 -9.95096639e-02]] [[-1.56796360e+00 1.31683898e+00 -4.26486552e-01 ... -1.28135002e+00 -1.37306023e+00 -9.28267181e-01] [-7.04745412e-01 -8.17218423e-01 1.34695840e+00 ... -7.58046627e-01 1.74596620e+00 1.01794267e+00] [ 4.72009927e-01 8.56180251e-01 -4.21279781e-02 ... -9.09325182e-01 -6.24584518e-02 -7.50655591e-01] ... [-7.52613068e-01 1.37434304e+00 7.36444414e-01 ... 1.70139372e+00 -6.40391707e-01 -1.95861664e-02] [ 5.44637561e-01 5.12215257e-01 -2.32753575e-01 ... -1.41858250e-01 -5.49509265e-02 -3.55128109e-01] [ 1.46103120e+00 -7.60770380e-01 -9.04588223e-01 ... 7.09398150e-01 7.43809402e-01 8.45852554e-01]] ... [[ 5.63162148e-01 -6.27650261e-01 -3.56812745e-01 ... 2.96339214e-01 -1.73723447e+00 2.56353617e-01] [ 7.91329801e-01 3.32537293e-02 1.07513867e-01 ... -8.34481269e-02 1.42862409e-01 -1.11201978e+00] [-1.01495497e-01 4.68588203e-01 5.92436552e-01 ... -8.56682509e-02 -2.28097975e-01 1.14442599e+00] ... [ 1.12831330e+00 -5.72843909e-01 1.84669316e+00 ... 1.31053746e+00 -9.34779763e-01 -9.17160749e-01] [-2.41616499e-02 6.26867533e-01 -5.27447939e-01 ... -4.97446150e-01 2.70100522e+00 7.88186491e-01] [-2.69189984e-01 -3.22607197e-02 -1.42877352e+00 ... 1.11971295e+00 -9.08179700e-01 -4.31323260e-01]] [[-1.03758478e+00 -6.50868475e-01 1.81473628e-01 ... -4.88055348e-01 6.10314250e-01 1.59439182e+00] [ 7.67194867e-01 -4.60292011e-01 -1.40030190e-01 ... 4.70112920e-01 5.64885378e-01 -1.66554034e+00] [ 9.95675266e-01 5.85566878e-01 -4.43761587e-01 ... 6.45466685e-01 -1.63578403e+00 1.55268562e+00] ... [ 1.38054621e+00 -7.77466595e-02 1.12900865e+00 ... -9.40999165e-02 -2.24127844e-01 9.59949970e-01] [ 1.71565986e+00 8.28114927e-01 3.20538431e-01 ... -1.37278128e+00 1.37853575e+00 1.57772517e+00] [-1.81687400e-01 -2.36757183e+00 6.17390156e-01 ... 7.71330655e-01 1.85928985e-01 1.79002523e+00]] [[-4.24574554e-01 1.78354192e+00 1.18101820e-01 ... -2.77137136e+00 6.26321733e-01 -1.32514584e+00] [ 1.55264318e+00 1.00065243e+00 -3.08142275e-01 ... -1.38898301e+00 8.88456285e-01 1.26951194e+00] [ 2.10358524e+00 -6.55971587e-01 6.80466413e-01 ... -1.37663639e+00 -5.35441875e-01 1.88196516e+00] ... [-1.33993089e+00 -5.38655818e-01 -8.88684615e-02 ... -2.09928393e+00 -6.82016134e-01 1.36601830e+00] [ 8.23677838e-01 -3.54379356e-01 -7.55394816e-01 ... -1.32001996e+00 1.57119727e+00 -3.46263796e-01] [-1.77151144e+00 -1.23556209e+00 -1.19882858e+00 ... -1.21722317e+00 -7.09971070e-01 1.10798374e-01]]]] [[[[ 5.10304496e-02 -1.17188954e+00 -5.05951107e-01 ... -1.29570827e-01 2.26620942e-01 1.08342409e+00] [-1.34385228e-01 2.32448030e+00 7.70157397e-01 ... 1.17125988e+00 -4.72838670e-01 -5.68784952e-01] [ 2.17751122e+00 -1.71304703e+00 4.47307795e-01 ... 7.14793861e-01 8.90373290e-01 -1.12302206e-03] ... [ 1.14842856e+00 1.95512503e-01 7.54491210e-01 ... 1.89592764e-01 -2.18040776e-02 -1.73552215e+00] [-1.75744593e+00 -2.56810236e+00 8.29037726e-01 ... -1.30411851e+00 -8.99531007e-01 -1.49974501e+00] [ 8.83812070e-01 3.40120733e-01 -1.25535262e+00 ... 1.35016695e-01 5.02391979e-02 -1.45054662e+00]] [[ 2.11319536e-01 9.65819538e-01 -6.98299527e-01 ... 7.38326848e-01 -1.17583990e+00 6.37149632e-01] [ 1.01193106e+00 9.36032772e-01 -7.94887319e-02 ... 1.30310953e-01 -2.83174217e-01 5.47102034e-01] [-3.32226664e-01 -8.32932711e-01 2.10288072e+00 ... 3.09035122e-01 5.18182218e-01 8.50828946e-01] ... [-5.39365768e-01 -7.87340760e-01 3.21422815e-01 ... -2.70066112e-01 -7.02788830e-01 -8.32269609e-01] [-5.69183648e-01 -7.96778262e-01 -3.30427259e-01 ... 2.94862419e-01 -8.57721031e-01 8.01495135e-01] [-1.20461988e+00 -8.64214182e-01 7.09435523e-01 ... 3.21220112e+00 2.20658231e+00 2.72084862e-01]] [[ 2.56390661e-01 -1.53609097e-01 -1.59481359e+00 ... 1.70024112e-01 -1.72250554e-01 -1.74148405e+00] [-3.56208049e-02 1.47171950e+00 -1.43593419e+00 ... 3.28702331e-01 5.02274156e-01 4.19638939e-02] [ 3.05491418e-01 -5.44941664e-01 1.77120554e+00 ... -1.25778899e-01 2.12931067e-01 -1.21789968e+00] ... [-2.40921751e-01 9.94811177e-01 2.17571807e+00 ... 1.00841790e-01 -9.51620281e-01 6.82138681e-01] [-1.47646618e+00 9.02083874e-01 3.93458813e-01 ... 1.35783184e+00 -6.70377314e-02 6.46039665e-01] [ 2.45421231e-01 -1.35444319e+00 -1.89716911e+00 ... 9.14906263e-01 -9.69018161e-01 -9.96266425e-01]] ... [[-6.01083755e-01 8.78339589e-01 1.93681264e+00 ... -1.21388876e+00 3.74708593e-01 1.45102870e+00] [-1.02719009e+00 -5.72324872e-01 1.53157783e+00 ... -1.25035691e+00 6.51333392e-01 4.46078539e-01] [ 2.23394603e-01 -1.55047214e+00 1.29770482e+00 ... 1.77798271e+00 2.83100456e-01 7.91553617e-01] ... [ 9.45012942e-02 -1.54002771e-01 -6.34337306e-01 ... -8.26404452e-01 1.09224297e-01 7.70764286e-03] [-4.56405759e-01 -8.12246501e-01 3.71490061e-01 ... -2.24411517e-01 -5.83231807e-01 3.91929179e-01] [ 1.39323103e+00 -8.90805185e-01 -1.14094043e+00 ... 3.14323246e-01 8.20892118e-03 4.84548002e-01]] [[ 4.92903054e-01 -1.21108681e-01 9.58920419e-02 ... -1.72349378e-01 -8.80408049e-01 1.92149270e+00] [ 1.10345893e-01 -2.43456308e-02 2.33047709e-01 ... -4.30961460e-01 4.21965122e-01 6.88259602e-01] [-2.41407895e+00 -1.00145590e+00 -1.26688528e+00 ... 1.02254379e+00 1.07131708e+00 5.22094250e-01] ... [-2.12843448e-01 1.39645743e+00 -2.34677628e-01 ... 2.12667704e+00 6.71097696e-01 2.86892223e+00] [ 1.42674351e+00 1.19153833e+00 6.94956183e-01 ... 2.22300744e+00 3.56457323e-01 -1.19430733e+00] [-6.48975790e-01 -4.27290946e-01 -7.23500431e-01 ... -4.30388451e-01 1.32298738e-01 -5.27345359e-01]] [[ 2.48119831e-01 7.98853099e-01 7.06610739e-01 ... -2.89776981e-01 -1.79913652e+00 -5.56201458e-01] [ 8.59657586e-01 -1.65758505e-01 -4.37232822e-01 ... 2.95133796e-02 -1.62313795e+00 4.75052059e-01] [ 6.26549721e-01 -1.80315900e+00 1.13883734e+00 ... -1.92106879e+00 6.54789329e-01 -5.37348509e-01] ... [-6.36574984e-01 -1.08781886e+00 -1.68502522e+00 ... -2.06108660e-01 -3.72608960e-01 -1.09696245e+00] [-1.03903413e+00 1.42287767e+00 2.32028011e-02 ... -8.64781260e-01 -1.45135236e+00 -8.85721505e-01] [-3.37012202e-01 1.12872648e+00 -4.46239650e-01 ... 7.26883888e-01 -1.10299754e+00 -2.17369843e+00]]] [[[-6.51304841e-01 2.83224493e-01 1.15609932e+00 ... 3.15294713e-01 -5.96822143e-01 -4.29417402e-01] [ 9.14697349e-01 -7.44813263e-01 -7.97436237e-01 ... 3.67876552e-02 -9.34964299e-01 4.09475088e-01] [-1.17301118e+00 6.40033185e-01 -1.27649748e+00 ... -2.36623859e+00 1.27776730e+00 -4.66636032e-01] ... [-3.32251698e-01 7.29708746e-02 4.98140723e-01 ... -5.06591082e-01 2.29875708e+00 -2.66604185e+00] [ 2.55095291e+00 -1.13464379e+00 6.79660499e-01 ... -2.17660010e-01 -2.80457735e-01 9.47552800e-01] [ 3.48576248e-01 -8.27449337e-02 -2.23598033e-01 ... 7.39052892e-01 -5.26745059e-02 -2.06051064e+00]] [[ 1.88887835e-01 -2.70973831e-01 6.09943807e-01 ... -1.76639485e+00 -6.67788014e-02 1.54585361e-01] [ 1.41459060e+00 1.94229555e+00 5.37737250e-01 ... -1.75918853e+00 -3.07487398e-01 -3.18525136e-01] [ 1.20179534e+00 -8.81579220e-01 1.06364560e+00 ... -3.99452001e-02 -2.01443106e-01 -4.85639304e-01] ... [-7.73452520e-01 1.95998251e+00 8.97924840e-01 ... 8.44312847e-01 1.53202510e+00 -8.02861273e-01] [ 2.02382970e+00 -7.32421935e-01 1.19988477e+00 ... -2.29342774e-01 2.70453048e+00 -1.59917402e+00] [-1.49486685e+00 -2.64620721e-01 1.58377975e-01 ... 1.34005022e+00 -1.76550463e-01 -1.60233259e-01]] [[-6.32328629e-01 2.52699852e+00 -4.81998920e-01 ... -2.77362800e+00 8.56816232e-01 -2.52537012e-01] [ 9.63567138e-01 -8.27615023e-01 7.25930393e-01 ... 4.06930923e-01 1.49666190e+00 -5.42381644e-01] [-3.68610352e-01 2.59869003e+00 2.56516747e-02 ... -6.75501049e-01 9.96334374e-01 7.33941376e-01] ... [-5.47166646e-01 3.24545562e-01 4.12256151e-01 ... 1.29475629e+00 2.02154651e-01 -8.98055792e-01] [-1.07923381e-01 -1.63239300e+00 -1.09994674e+00 ... -1.36560214e+00 2.77510993e-02 2.04177403e+00] [ 1.45771492e+00 3.67926478e-01 -9.55202878e-01 ... -1.51229095e+00 1.53746700e+00 -2.65676886e-01]] ... [[-1.99571341e-01 5.30568957e-01 -1.41337931e+00 ... -7.64672384e-02 9.03310239e-01 1.96113169e-01] [ 1.51173544e+00 -1.16243267e+00 -1.09989798e+00 ... -1.47422838e+00 1.32999063e+00 -6.98963523e-01] [-9.38338935e-01 -9.05075297e-03 -1.11521769e+00 ... -2.44238734e+00 4.21763092e-01 1.54541838e+00] ... [-2.79266143e+00 -5.29998004e-01 -3.06963295e-01 ... -7.72101432e-02 9.42228615e-01 2.85187632e-01] [ 8.27596068e-01 5.79158545e-01 4.28210109e-01 ... 1.34563911e+00 1.08061099e+00 -5.47848344e-01] [ 2.68521369e-01 1.15483451e+00 4.49803859e-01 ... 6.64329827e-02 -1.59341121e+00 1.98366269e-01]] [[ 2.10008189e-01 -1.64996707e+00 1.27456510e+00 ... -8.35051417e-01 -1.11789727e+00 1.03969800e+00] [-1.85816121e+00 -1.56421185e-01 1.27162528e+00 ... -6.12215161e-01 -5.40713251e-01 1.02459200e-01] [-1.09519351e+00 3.03748161e-01 1.05506527e+00 ... -6.07032701e-02 -5.35866976e-01 4.19755071e-01] ... [ 7.14988708e-02 1.91461027e-01 -4.69611079e-01 ... -1.91694632e-01 1.10829175e+00 1.76301211e-01] [-2.15103924e-02 -1.98214543e+00 -4.14362848e-01 ... -1.06668150e+00 -8.79959583e-01 -2.01775849e-01] [-1.83617860e-01 -9.51694310e-01 -1.41568398e+00 ... -7.14037001e-01 7.20410883e-01 -4.79581743e-01]] [[ 1.12141550e+00 7.44460464e-01 9.35607910e-01 ... 3.61089071e-04 -2.35179931e-01 -4.28773284e-01] [ 1.80283058e+00 1.87409818e+00 -1.23610604e+00 ... 7.49695659e-01 -1.01130641e+00 1.44622934e+00] [-3.30996245e-01 2.47884321e+00 -4.36572582e-01 ... -1.38731694e+00 1.54275131e+00 -9.69251394e-01] ... [ 8.71978879e-01 1.09628618e+00 -2.21244645e+00 ... -5.99459350e-01 -4.34037805e-01 -1.74396634e-01] [-1.35995731e-01 5.71462810e-01 -3.85622919e-01 ... 1.58485568e+00 4.35842901e-01 -1.41410112e+00] [ 1.05019629e+00 5.53177148e-02 -1.97370410e+00 ... 1.85937703e-01 3.94698471e-01 7.85120666e-01]]] [[[ 5.85409820e-01 5.40915430e-01 -2.35498890e-01 ... 5.45001626e-01 1.47679031e-01 1.62059999e+00] [ 1.86394942e+00 -5.95545411e-01 -1.52789342e+00 ... 1.08239874e-01 -5.62755577e-03 -1.67766109e-01] [-2.19436383e+00 -1.84422195e+00 6.13333941e-01 ... -3.04783463e-01 1.14626253e+00 -8.72351110e-01] ... [ 6.12181008e-01 -1.39728284e+00 1.42313218e+00 ... -1.99986741e-01 -8.99502560e-02 -5.64875066e-01] [-2.87409246e-01 1.36066806e+00 -5.21139145e-01 ... -5.98188043e-01 -1.00442398e+00 -9.91618574e-01] [ 1.95116460e+00 -1.37216234e+00 -2.11294770e+00 ... 1.65096119e-01 -1.40293908e+00 1.31965578e-01]] [[ 8.42331529e-01 7.16434538e-01 4.50551540e-01 ... 1.33022213e+00 -1.93084610e+00 1.08268917e+00] [-1.92439884e-01 1.38956130e+00 -6.15254700e-01 ... -7.72727191e-01 -1.15011004e-03 -3.87185961e-01] [ 8.08907747e-01 1.33000898e+00 1.16405141e+00 ... 4.88109678e-01 -3.18373322e-01 -5.43524742e-01] ... [ 9.07229632e-02 -8.67570639e-01 1.95616260e-01 ... 1.29951134e-01 1.09702516e+00 8.26948062e-02] [ 3.72993439e-01 9.75838959e-01 -8.77439696e-03 ... 1.51137382e-01 -1.81607425e+00 7.70227551e-01] [-2.40863776e+00 -7.17144907e-02 1.42891371e+00 ... -5.55753708e-01 6.52419984e-01 -1.34256041e+00]] [[-1.62321162e+00 1.96106207e+00 5.73335111e-01 ... -1.30650342e+00 -1.63381994e-01 -5.55523992e-01] [-1.40324783e+00 -1.46059811e+00 -1.83150005e-02 ... -1.67264193e-01 3.79305571e-01 -3.50221336e-01] [-4.20464277e-01 1.17354238e+00 6.69868290e-01 ... 1.32696688e-01 -5.78683197e-01 1.23605919e+00] ... [-2.04609799e+00 -1.72006512e+00 -5.60611606e-01 ... 3.44797224e-01 1.89472067e+00 -8.11928332e-01] [-3.64800721e-01 -7.45073378e-01 -4.65743363e-01 ... -8.67133588e-02 -9.01303470e-01 1.67318487e+00] [ 4.60695237e-01 -5.75002432e-01 -1.01301503e+00 ... 1.68020338e-01 4.94613796e-01 9.65670273e-02]] ... [[-1.61054239e-01 -1.80724907e+00 -4.64241683e-01 ... -1.03714275e+00 -1.35132122e+00 -2.06839442e+00] [ 2.93531474e-02 -1.59992790e+00 1.63591075e+00 ... -2.33220148e+00 -8.24843645e-02 -1.13454629e-02] [ 1.42869198e+00 -1.06527798e-01 7.30277896e-01 ... 1.23539412e+00 4.55949634e-01 1.26477396e+00] ... [-6.57714128e-01 -4.47294340e-02 -3.89293558e-03 ... -5.93477190e-01 2.45367244e-01 5.61276317e-01] [-3.19173872e-01 -1.29607177e+00 -2.18036190e-01 ... -3.74800302e-02 1.73352015e+00 2.55899286e+00] [ 1.53192091e+00 5.64440727e-01 1.19625354e+00 ... -2.86080098e+00 -1.09460163e+00 1.13579643e+00]] [[-8.15285295e-02 1.08749129e-01 -8.96129549e-01 ... 3.12698394e-01 2.22038388e-01 1.14600420e+00] [ 1.01629488e-01 3.70001137e-01 -2.30915442e-01 ... 1.41660821e+00 4.48860735e-01 -3.48375112e-01] [-5.44085145e-01 1.19233537e+00 -6.56487644e-01 ... 2.40771696e-01 -8.38946462e-01 5.26163936e-01] ... [-7.93284476e-01 -7.21366167e-01 -9.79975641e-01 ... -8.95451069e-01 1.10111022e+00 -1.13623071e+00] [-1.30308795e+00 2.15228009e+00 1.35749698e+00 ... 1.93652570e-01 1.80541098e-01 -5.19523382e-01] [ 1.06059241e+00 1.62049401e+00 -1.08911979e+00 ... -7.85243869e-01 1.66393447e+00 2.13997439e-01]] [[-1.82456863e+00 3.53723884e-01 1.35841346e+00 ... -3.93985122e-01 2.37239450e-01 3.83048147e-01] [-2.92710841e-01 6.67736053e-01 4.56791252e-01 ... 3.49939376e-01 -2.99538803e+00 1.16799796e+00] [-5.48225045e-01 2.58066678e+00 7.68309474e-01 ... -7.18403518e-01 1.28971711e-01 1.85158145e+00] ... [ 1.95097780e+00 6.15424454e-01 -1.58712983e+00 ... -7.04671443e-02 -6.60599768e-01 -2.50122607e-01] [-9.35625359e-02 7.77940691e-01 1.33810627e+00 ... -3.23354155e-01 -8.47992897e-01 -1.02620935e+00] [-2.75640607e-01 -2.53349006e-01 -5.46449780e-01 ... -7.44784057e-01 -1.20031118e+00 -1.06650865e+00]]] [[[ 3.48394327e-02 -3.72410536e-01 1.22407985e+00 ... 1.11961210e+00 -3.99762183e-01 1.86870909e+00] [-1.30580568e+00 3.27959180e-01 9.14312840e-01 ... 6.70679271e-01 7.75882483e-01 -1.89762622e-01] [-3.15038376e-02 5.12257256e-02 8.72938156e-01 ... -1.98772643e-02 -1.12087822e+00 -7.54342020e-01] ... [-1.22456574e+00 -1.34807909e+00 -1.49924234e-01 ... 1.10361886e+00 -2.12607011e-01 8.11943889e-01] [ 7.91806459e-01 4.47068214e-02 -8.85892510e-02 ... -1.46869078e-01 -3.00106138e-01 7.37481356e-01] [-5.69763929e-02 -1.37573195e+00 5.88806987e-01 ... -1.39473939e+00 -4.48755443e-01 -5.61350644e-01]] [[-1.30897295e+00 -7.60208428e-01 -2.18434358e+00 ... 4.58885543e-02 3.79354537e-01 1.16613400e+00] [-5.39407730e-01 -1.36723948e+00 8.36696088e-01 ... 1.66161978e+00 -1.54191160e+00 1.31105900e+00] [-1.07359968e-01 1.06368661e+00 2.46262506e-01 ... 3.30664694e-01 -1.12307847e+00 -2.19654948e-01] ... [-6.22105360e-01 -5.22812121e-02 -2.80369073e-01 ... 9.64237034e-01 1.44962311e+00 -6.51076555e-01] [-7.62743771e-01 3.39238226e-01 6.70649946e-01 ... -7.88070500e-01 7.69662261e-01 -8.29351842e-01] [ 7.68495619e-01 -1.61522865e+00 1.98750138e-01 ... 3.40957761e-01 1.49511886e+00 6.72512889e-01]] [[ 9.31903601e-01 1.38336673e-01 -8.34206879e-01 ... -2.31500700e-01 -1.91965294e+00 -5.84312439e-01] [ 1.31890023e+00 -1.95904815e+00 2.19709560e-01 ... 1.23127317e+00 -2.23957062e+00 9.58907962e-01] [-9.66333568e-01 -7.42967129e-01 -8.59486043e-01 ... 9.28956509e-01 -5.16424239e-01 -1.04216743e+00] ... [ 2.32341599e+00 1.82194591e+00 1.01715517e+00 ... 2.41106987e-01 -7.11867332e-01 8.06856513e-01] [ 1.72504973e+00 -4.07680809e-01 -5.58256447e-01 ... -1.08185852e+00 5.93255222e-01 1.25966692e+00] [ 8.40815976e-02 1.43724632e+00 -1.50290763e+00 ... 3.11783731e-01 -8.69427145e-01 1.17875695e+00]] ... [[-7.14873791e-01 -2.39871073e+00 -2.18258929e+00 ... 2.10058594e+00 7.90230215e-01 -3.21539521e-01] [-1.63254797e+00 -2.64357114e+00 -3.63842249e-01 ... 1.01185727e+00 1.48157449e-02 4.12453473e-01] [ 9.22481716e-01 1.76378518e-01 -4.10136521e-01 ... 1.68224782e-01 -1.69769156e+00 -1.09662318e+00] ... [-2.47013047e-02 5.02005778e-02 8.84875953e-01 ... -1.73130417e+00 -5.41342974e-01 -1.99996337e-01] [ 6.22052073e-01 -1.18686247e+00 -5.95338881e-01 ... -1.16854556e-01 2.78463159e-02 1.82866991e-01] [-1.16941071e+00 2.37573564e-01 4.34925646e-01 ... -1.74477029e+00 -1.48033214e+00 3.08105636e+00]] [[-4.41232115e-01 -7.51869202e-01 1.52080274e+00 ... -1.14013159e+00 -2.59727812e+00 1.65087557e+00] [ 3.47559452e-01 -4.21872437e-01 5.55789113e-01 ... 2.06121135e+00 -1.15747213e+00 1.22650340e-01] [ 8.44579637e-01 1.84118140e+00 -6.52958632e-01 ... 4.32311743e-01 1.99344888e-01 -6.14718318e-01] ... [ 1.25179207e+00 -1.52625144e+00 -1.05705726e+00 ... -1.88545728e+00 9.48537052e-01 3.63338202e-01] [ 1.43574917e+00 -1.07838488e+00 2.00227141e-01 ... 8.85685235e-02 -4.50809710e-02 3.72589886e-01] [ 4.21636999e-01 -1.67120412e-01 -3.83318126e-01 ... -5.72571039e-01 -6.34353340e-01 -1.57795697e-01]] [[-9.97911021e-03 8.24420512e-01 4.20085728e-01 ... 9.13487613e-01 7.28325620e-02 -6.64110601e-01] [-1.18928409e+00 1.61127090e+00 -1.12513030e+00 ... -1.22166038e-01 -8.93189237e-02 9.07548368e-01] [ 1.09892771e-01 -8.05522263e-01 1.54134512e+00 ... -5.01990877e-02 5.20576417e-01 2.08439016e+00] ... [ 8.50418270e-01 7.65229583e-01 -1.13118267e+00 ... 5.83832432e-03 -9.86087143e-01 4.27977204e-01] [ 3.18351060e-01 1.07155406e+00 2.30348444e+00 ... -1.61754715e+00 -7.69050419e-01 4.06581670e-01] [-1.70083284e-01 1.62441194e+00 8.80011976e-01 ... -4.05873805e-01 -8.07721078e-01 -1.17907381e+00]]] [[[ 5.11702120e-01 -1.05854265e-01 1.09654963e+00 ... 1.97440058e-01 -7.03700900e-01 7.59365678e-01] [-7.12829947e-01 -1.38462043e+00 -5.38535953e-01 ... 1.26638636e-01 -5.07420838e-01 -1.88454103e+00] [-6.27312839e-01 1.05800211e+00 -2.62068295e+00 ... 2.47636199e-01 2.47778118e-01 -1.00682580e+00] ... [-2.39073336e-01 1.02238953e+00 3.75297129e-01 ... -5.09373359e-02 2.71090925e-01 -1.81555659e-01] [ 9.64270890e-01 -2.74654120e-01 -5.91863930e-01 ... 1.39552307e+00 8.24490190e-01 1.10059905e+00] [ 4.55841631e-01 1.87214160e+00 -4.27830905e-01 ... -1.70319486e+00 1.61564267e+00 -5.09547770e-01]] [[-5.77238500e-02 1.76280409e-01 1.49205673e+00 ... -2.95173585e-01 -8.00417840e-01 -1.81067690e-01] [-1.27179694e+00 -1.56986594e+00 7.01485276e-01 ... 2.01331943e-01 1.61079919e+00 1.62165022e+00] [-1.05353808e+00 6.47523999e-01 2.34859839e-01 ... 8.10894489e-01 1.09374933e-02 -1.75511467e+00] ... [ 6.40728354e-01 4.60916102e-01 9.22921419e-01 ... 1.91668963e+00 1.03555310e+00 -9.78934765e-02] [-6.03228986e-01 -1.35508311e+00 1.41936749e-01 ... 3.08904791e+00 2.59833813e-01 -1.66424379e-01] [-1.31638861e+00 -5.89194596e-01 -3.15755188e-01 ... -1.30114985e+00 -5.13599873e-01 1.24053180e+00]] [[-3.70355964e-01 -5.29310405e-02 1.23421597e+00 ... -1.70776695e-01 1.40353262e+00 -1.27708936e+00] [-5.37567317e-01 -1.76962763e-01 -8.61637771e-01 ... -9.99141157e-01 1.89012444e+00 1.04644859e+00] [-1.61356609e-02 2.42072895e-01 4.03108388e-01 ... 5.07316411e-01 -6.18547857e-01 1.08107530e-01] ... [ 5.35515308e-01 2.43373945e-01 -1.29317388e-01 ... -2.73280144e+00 5.44037998e-01 -8.27189147e-01] [ 9.67538059e-01 -6.88730836e-01 5.13032436e-01 ... 4.25416052e-01 4.46160585e-01 1.69559479e+00] [-1.08504856e+00 2.17613682e-01 5.62191486e-01 ... -1.65088928e+00 -8.17813396e-01 1.36265969e+00]] ... [[ 3.08221459e-01 1.06779015e+00 -1.88277137e+00 ... -2.89345384e-01 -1.73563731e+00 1.67610717e+00] [ 2.16520095e+00 1.82049608e+00 7.36785412e-01 ... 1.02128349e-01 1.03880918e+00 -5.82405567e-01] [-6.63962185e-01 -1.43638027e+00 -1.09548485e+00 ... 1.26370168e+00 1.26780361e-01 -9.56357896e-01] ... [ 9.42773700e-01 4.94155973e-01 -3.47941130e-01 ... 1.19884634e+00 3.19199920e-01 9.04697955e-01] [ 6.71546817e-01 7.27165222e-01 -1.30771625e+00 ... 6.81360245e-01 -5.84531248e-01 4.60580029e-02] [-1.83972165e-01 1.91297680e-01 4.58926260e-01 ... 3.45467627e-01 7.18524158e-01 -5.41702747e-01]] [[ 4.95671928e-01 2.06295824e+00 2.85839587e-01 ... 6.06042266e-01 1.53167769e-01 4.73855972e-01] [ 2.33903199e-01 -1.08213997e+00 -1.70924413e+00 ... -3.61126333e-01 2.80622691e-01 1.86232299e-01] [-8.45804885e-02 -4.29486871e-01 1.14526713e+00 ... -1.16318488e+00 2.13913226e+00 -7.94024587e-01] ... [-3.57486665e-01 -8.28326046e-01 4.64833319e-01 ... -1.00058734e+00 2.79764593e-01 2.15426850e+00] [-1.41778171e+00 -3.47839326e-01 3.01683098e-01 ... -8.62224936e-01 1.08200349e-01 -4.43628669e-01] [ 1.06939173e+00 5.57623863e-01 8.85903478e-01 ... -6.89424276e-01 6.65470302e-01 8.68103743e-01]] [[ 2.19867921e+00 -7.05495834e-01 -1.12747848e+00 ... 1.54076830e-01 -1.29677340e-01 2.05550265e+00] [ 7.09329963e-01 -1.95160061e-01 1.34411466e+00 ... -8.28461885e-01 3.59471738e-01 5.96444130e-01] [-6.46322131e-01 -3.46402049e-01 2.13080192e+00 ... 8.24996233e-01 -1.47721199e-02 -2.12349391e+00] ... [-1.75453335e-01 -3.15599740e-01 -8.65734935e-01 ... 2.17348203e-01 -6.44554377e-01 -1.02047873e+00] [ 1.05446911e+00 2.03912333e-02 -1.90573418e+00 ... -7.50683725e-01 3.48951876e-01 7.87854373e-01] [-7.44449615e-01 -9.88101721e-01 -7.81366110e-01 ... -2.38799736e-01 -1.51476121e+00 -3.94561499e-01]]] [[[ 5.36769986e-01 -5.29076755e-01 2.89892219e-02 ... -4.24215376e-01 -2.32221529e-01 8.57958734e-01] [-9.43783950e-03 -6.21393919e-01 -7.72634625e-01 ... -1.25410998e+00 -5.02737999e-01 -9.83390331e-01] [ 5.13302386e-01 1.40496409e+00 -2.51999319e-01 ... -6.21772230e-01 -1.10084915e+00 -2.66157269e-01] ... [ 2.69670993e-01 4.79976267e-01 -1.09670945e-01 ... 1.25967169e+00 -5.19089103e-01 -2.08914280e+00] [-2.99274251e-02 -1.54365015e+00 -2.43530214e-01 ... 1.11212039e+00 3.62879157e-01 -4.24698710e-01] [ 3.75260293e-01 -7.54906237e-01 3.23345184e-01 ... -2.76848674e-01 1.65773183e-02 -1.27536813e-02]] [[ 4.24601108e-01 1.24263123e-01 1.19744992e+00 ... -6.24434650e-01 2.34802425e-01 6.30854011e-01] [ 1.29856074e+00 -2.21944451e+00 4.17709678e-01 ... 1.10100079e+00 1.65077281e+00 -1.83760142e+00] [ 1.04744565e+00 1.41050732e+00 -2.39855036e-01 ... -7.67271459e-01 2.96360707e+00 6.17322922e-01] ... [-7.42036223e-01 4.16352868e-01 -7.38490760e-01 ... 1.79083273e-01 -1.17671514e+00 -6.17699325e-01] [ 7.12564141e-02 4.77226079e-01 7.63981462e-01 ... -9.37766284e-02 1.42702818e+00 6.69088364e-01] [ 1.64247885e-01 -4.06811893e-01 2.31248122e-02 ... 4.51530427e-01 4.33836341e-01 5.19624352e-01]] [[ 1.66463137e+00 -7.03655005e-01 -5.87346196e-01 ... -1.57612908e+00 1.19827795e+00 4.30866748e-01] [ 1.19669569e+00 -1.97698736e+00 -1.18915379e+00 ... 1.90768683e+00 1.60113704e+00 -1.58373916e+00] [ 7.60306418e-01 -1.01409984e+00 -9.71137047e-01 ... -9.26183984e-02 -1.14296532e+00 1.96788454e+00] ... [ 4.42815244e-01 5.39632559e-01 -9.93586183e-02 ... 7.98913121e-01 1.85886371e+00 5.04300237e-01] [ 3.52966428e-01 7.58546889e-01 -8.79477933e-02 ... -2.80747451e-02 -2.35944152e+00 4.16882366e-01] [ 7.06868112e-01 -3.85682106e-01 -2.87807491e-02 ... -1.30719805e+00 -1.30415583e+00 1.95352209e+00]] ... [[ 1.54350877e+00 -2.30557963e-01 -1.04549801e+00 ... -6.81589544e-02 3.01176488e-01 8.43397260e-01] [ 2.68778741e-01 9.73946989e-01 1.84937894e-01 ... -1.14657617e+00 -3.27185333e-01 -2.50752306e+00] [ 4.41435248e-01 -8.19467977e-02 -4.43526596e-01 ... 4.66941148e-01 7.39521086e-01 1.95177889e+00] ... [-8.64472747e-01 2.05723926e-01 1.01820290e-01 ... 1.23306000e+00 5.91746271e-01 6.14663303e-01] [ 2.19907022e+00 -9.82372880e-01 6.31608605e-01 ... 7.13750780e-01 -2.92372882e-01 5.38166046e-01] [-8.70262921e-01 6.93627477e-01 1.17864704e+00 ... 6.30923688e-01 -5.01065329e-02 -6.40845537e-01]] [[ 6.45091295e-01 3.54848593e-01 -4.90869701e-01 ... -1.33637059e+00 -9.00346711e-02 1.28469086e+00] [ 4.26029235e-01 -7.63582706e-01 -2.40161747e-01 ... 1.12478524e-01 -5.09543359e-01 1.57053769e+00] [-2.28583455e-01 1.37331617e+00 -1.77975342e-01 ... -1.50692752e-02 -4.22650516e-01 5.44047475e-01] ... [-1.04266576e-01 -2.42792885e-03 -1.92898363e-01 ... 1.05527306e+00 1.71388125e+00 5.18788218e-01] [ 5.95890462e-01 -6.72036707e-02 -1.35090029e+00 ... 4.75274891e-01 8.39732811e-02 -1.41056550e+00] [-1.12646997e+00 -4.17745382e-01 7.88860559e-01 ... -8.43533218e-01 -2.87685215e-01 1.48827374e+00]] [[ 2.09931329e-01 3.22914362e-01 2.16609478e-01 ... -1.71421993e+00 -5.97336650e-01 -2.78078467e-01] [-7.12161064e-01 -8.18483412e-01 -6.55915916e-01 ... 2.50876784e+00 -1.03185725e+00 9.12325740e-01] [-3.26470494e-01 -1.57785964e+00 8.71114016e-01 ... -1.01714444e+00 -1.94569314e+00 -2.06038296e-01] ... [-1.39008313e-01 8.14380229e-01 -1.39469579e-01 ... -1.08143792e-01 7.84018934e-01 -7.54694566e-02] [ 2.64734030e+00 4.33352470e-01 -2.94168741e-01 ... -9.19128478e-01 -4.90225136e-01 -2.15984750e+00] [ 1.43570137e+00 -3.48431230e-01 -1.91240788e+00 ... -1.51165712e+00 1.45936108e+00 -1.89690828e+00]]]] [[[[ 1.28945243e+00 6.40518665e-01 6.75180912e-01 ... -1.41681826e+00 -1.71078038e+00 -7.33489156e-01] [-1.20457709e+00 1.92087181e-02 -1.02622449e-01 ... -1.51259458e+00 5.91593504e-01 -1.34934986e+00] [ 3.77467483e-01 2.18623018e+00 6.91974461e-01 ... -4.42238059e-03 5.77555239e-01 1.39654249e-01] ... [ 1.41859341e+00 8.65943372e-01 1.39423442e+00 ... -8.27934027e-01 -2.27129579e-01 8.99207950e-01] [ 4.37950581e-01 4.15398240e-01 -6.37194157e-01 ... 1.10757387e+00 1.70418307e-01 1.08800256e+00] [ 8.01235974e-01 -9.70570326e-01 -5.75908184e-01 ... -3.17639917e-01 1.20523477e+00 1.36988282e+00]] [[-1.10144210e+00 -3.79909635e-01 3.13864321e-01 ... 2.38786593e-01 2.36242771e-01 1.76349223e-01] [ 1.95718360e+00 -1.30807686e+00 -6.84510887e-01 ... 1.33579522e-01 1.56676427e-01 1.21368015e+00] [-1.81816649e+00 -2.04937294e-01 7.84156173e-02 ... -1.17523718e+00 5.18855341e-02 6.86689973e-01] ... [ 5.33577442e-01 -1.29718399e+00 -4.51248229e-01 ... 7.04661369e-01 -2.34751895e-01 2.57022262e-01] [ 1.11473346e+00 1.41571474e+00 1.08084214e+00 ... 1.23554412e-02 8.71712685e-01 -1.28323853e+00] [ 1.13061941e+00 1.14729404e+00 8.75033438e-01 ... -3.72978866e-01 1.56976685e-01 8.26828420e-01]] [[ 1.03657782e+00 9.61894929e-01 1.16381049e-01 ... 1.37740672e+00 2.01546311e+00 1.07275057e+00] [-8.88713822e-02 -7.97710896e-01 -2.91581750e-01 ... -7.98010886e-01 -1.63318157e+00 2.52432913e-01] [ 9.71647739e-01 1.36551604e-01 -1.09743118e+00 ... 4.10086483e-01 1.22200385e-01 1.56858933e+00] ... [ 4.40892950e-02 8.35599780e-01 -1.49200276e-01 ... -5.01497746e-01 -1.58593524e+00 -3.23122777e-02] [-7.83417106e-01 -1.78958833e-01 1.84637475e+00 ... -1.02160251e+00 -1.41929179e-01 -9.92814362e-01] [ 1.26651812e+00 -1.14730108e+00 9.61130023e-01 ... 1.33327786e-02 -1.76693869e+00 -6.24904990e-01]] ... [[ 4.51607049e-01 -4.54382122e-01 7.32994318e-01 ... 8.39294136e-01 7.15392157e-02 -1.04899824e+00] [ 1.23403430e+00 -7.30733514e-01 -1.02175641e+00 ... -1.32874560e+00 -1.20844007e+00 -6.39860332e-01] [ 1.94484723e+00 5.91293156e-01 9.40437198e-01 ... -1.41682673e+00 2.75513619e-01 -4.28968221e-01] ... [ 1.56224597e+00 6.64771855e-01 1.07494581e+00 ... -4.01412785e-01 1.06186771e+00 8.61969769e-01] [ 7.76298523e-01 -9.72019315e-01 1.02393889e+00 ... 2.30661586e-01 -1.40774405e+00 -4.38224345e-01] [ 5.39218009e-01 -9.09372032e-01 -2.59736419e-01 ... 7.44883835e-01 -7.29408741e-01 4.70569938e-01]] [[ 5.16110599e-01 -1.06146419e+00 -9.02056456e-01 ... -1.41740179e+00 -9.58559453e-01 -3.54660243e-01] [-5.01398325e-01 -1.03371763e+00 4.54230905e-01 ... -2.50933826e-01 -1.43216777e+00 1.42108846e+00] [ 6.32668734e-01 2.84041822e-01 -4.46338505e-01 ... -1.38027871e+00 -2.00055242e+00 3.07361126e-01] ... [ 1.15979457e+00 3.38425487e-02 -2.43923768e-01 ... -3.01947951e-01 1.85646087e-01 -1.17304516e+00] [ 6.53655767e-01 2.95809776e-01 -1.14361472e-01 ... 2.38321096e-01 1.04877841e+00 3.59685421e-01] [ 4.89373386e-01 1.84727564e-01 -1.39230597e+00 ... -4.60935801e-01 1.14865136e+00 1.36707401e+00]] [[-9.25496757e-01 -4.49632972e-01 -1.61036253e+00 ... -7.33929098e-01 4.02681798e-01 1.09126890e+00] [-4.78602856e-01 1.65974081e+00 4.48695302e-01 ... -1.42885494e+00 -4.99097705e-01 1.72895515e+00] [ 1.03608143e+00 -6.20819032e-01 1.94518340e+00 ... 1.71498120e+00 2.10589111e-01 7.98474073e-01] ... [ 1.87551916e+00 -8.88549566e-01 4.32341367e-01 ... -4.27218258e-01 -7.89700672e-02 -9.10437346e-01] [ 7.85954535e-01 -2.79838890e-01 1.14897382e+00 ... -7.75655508e-01 -8.87210429e-01 -8.42794105e-02] [-1.19140610e-01 4.62880880e-01 -2.21076116e-01 ... 8.97275150e-01 -1.86432928e-01 5.97046018e-01]]] [[[ 7.13418663e-01 -5.24140179e-01 -2.85790682e-01 ... -2.28398156e+00 -4.26159739e-01 3.14065814e-01] [ 8.55724454e-01 -1.05045390e+00 -1.73582590e+00 ... 5.83153702e-02 1.94721866e+00 1.21325493e+00] [-7.24119127e-01 -8.34955275e-01 -7.95898736e-01 ... 2.16244951e-01 1.25890887e+00 1.12214065e+00] ... [-5.25563836e-01 8.78604054e-01 2.69452810e-01 ... -9.28062916e-01 5.35267055e-01 1.12517488e+00] [ 3.78921598e-01 -1.24203702e-02 5.48137724e-01 ... -6.60764337e-01 -2.34410334e+00 1.23420811e+00] [ 7.04225063e-01 -1.38437018e-01 3.56435865e-01 ... 4.62718517e-01 3.83326232e-01 5.31175196e-01]] [[ 1.60977519e+00 -1.04725373e+00 1.02446926e+00 ... 2.52225548e-01 -1.57188058e-01 -1.32712126e+00] [-2.24091005e+00 -1.73592985e+00 -2.21657276e-01 ... -3.05247694e-01 5.49181342e-01 -2.88233161e-01] [ 1.05275750e+00 1.06582761e+00 2.65675366e-01 ... 9.51319814e-01 7.03823745e-01 4.32416052e-01] ... [-1.88409716e-01 -1.09210122e+00 5.32177985e-01 ... -5.54349065e-01 9.16306973e-01 1.73821762e-01] [ 5.60815573e-01 -9.70668375e-01 -1.99974239e-01 ... -4.96916324e-01 -2.20626462e-02 -6.17066860e-01] [-2.30148837e-01 7.45214641e-01 1.56031740e+00 ... 1.55397654e+00 9.05366480e-01 -1.94087350e+00]] [[ 1.66219175e+00 1.25802064e+00 -5.03707767e-01 ... -1.71684653e-01 4.46742505e-01 1.49695003e+00] [-7.05374777e-01 -6.12285614e-01 -6.90033510e-02 ... -3.52658957e-01 -6.01451457e-01 -1.84149876e-01] [-7.15587854e-01 4.89035934e-01 -1.66756201e+00 ... -2.71037877e-01 -2.24992871e+00 1.63341433e-01] ... [ 4.68730748e-01 -1.99397594e-01 1.34880352e+00 ... -1.65452212e-02 -2.34829374e-02 1.16522145e+00] [ 1.86840105e+00 6.53270721e-01 2.04488111e+00 ... 3.10538203e-01 -1.81138301e+00 6.11992538e-01] [ 1.50718212e-01 -7.39822805e-01 1.20838940e-01 ... -1.62052765e-01 -1.46655872e-01 5.35751700e-01]] ... [[-1.13528395e+00 -1.69242346e+00 1.91365469e+00 ... 5.46165586e-01 8.68454158e-01 1.80062795e+00] [ 1.32823005e-01 -7.68545389e-01 -8.03092599e-01 ... 3.76414657e-02 -4.42871153e-01 -1.20173311e+00] [ 4.10266221e-01 3.47306490e-01 4.00698274e-01 ... -9.92914975e-01 4.77964938e-01 6.52244151e-01] ... [ 2.75403827e-01 -1.45427167e-01 -7.93439895e-02 ... 7.78248787e-01 8.33085656e-01 8.92733574e-01] [-1.55314114e-02 2.61369109e-01 -1.17506123e+00 ... -1.20490682e+00 -3.89545768e-01 1.46665096e+00] [ 4.62756455e-01 -3.60929877e-01 -1.48869824e+00 ... -2.50169039e-01 3.44330758e-01 -2.21743178e+00]] [[ 1.29097998e+00 -1.87154606e-01 -5.46885788e-01 ... 1.21440029e+00 7.44897425e-01 -4.75987464e-01] [-1.78506637e+00 -3.08750477e-02 -2.57929385e-01 ... 1.56445181e+00 -3.27457905e-01 -1.59942806e+00] [ 7.36129880e-01 5.51685810e-01 9.71612513e-01 ... -1.40436757e+00 -1.39429319e+00 3.25770915e-01] ... [-8.67977202e-01 -3.00886333e-01 2.07443163e-01 ... -7.61595070e-02 1.00240552e+00 -8.92925918e-01] [-7.16234326e-01 1.40599474e-01 -1.59019366e-01 ... -3.64881009e-01 -4.83350456e-01 5.09132802e-01] [-2.01762930e-01 -1.18317008e+00 1.75474787e+00 ... 1.19993198e+00 -2.72489697e-01 1.33238566e+00]] [[-1.78164661e+00 7.12904155e-01 -1.63885709e-02 ... 4.48183656e-01 7.79581666e-01 1.99659932e+00] [ 2.76660532e-01 -5.88539302e-01 -9.14394617e-01 ... 1.53025448e+00 -6.97033852e-02 -1.79749966e+00] [ 5.48624158e-01 5.80692828e-01 -2.27293313e-01 ... 7.32929826e-01 -6.42199442e-02 -1.30676523e-01] ... [ 2.09652275e-01 6.92524195e-01 -2.68583864e-01 ... -1.02467680e+00 1.39415503e+00 1.77867174e+00] [ 8.15159738e-01 1.40051357e-02 -7.67434835e-01 ... -7.91400671e-01 -7.16686472e-02 -4.96026158e-01] [-7.92607307e-01 -1.62074834e-01 1.57934022e+00 ... -1.15014315e+00 -2.12493944e+00 -5.43586433e-01]]] [[[-6.30402148e-01 -1.19439140e-01 -7.26524889e-01 ... -3.50524008e-01 1.27856612e+00 8.79856527e-01] [-2.18242362e-01 -9.48687673e-01 2.61823446e-01 ... 1.04008548e-01 -2.87891650e+00 1.60544425e-01] [-1.75077155e-01 2.56574422e-01 -1.40134931e-01 ... -1.92925549e+00 -1.32903016e+00 -1.02446055e+00] ... [-5.62102914e-01 -6.33655548e-01 1.48287964e+00 ... -2.11345702e-01 2.05430031e-01 4.25361812e-01] [ 4.64737564e-02 -6.60114223e-03 1.19642901e+00 ... -1.23422459e-01 3.91194016e-01 -3.50231737e-01] [-5.10277450e-01 8.40270743e-02 -2.34414667e-01 ... -9.78234649e-01 -1.50541711e+00 -1.57784626e-01]] [[-2.89107561e-01 8.61023128e-01 -1.71291804e+00 ... 5.67603528e-01 1.25921071e-01 -2.30879307e-01] [-3.34935963e-01 -1.17197955e+00 -3.24505597e-01 ... -1.35477021e-01 -1.54527473e+00 2.07553935e+00] [ 1.56953955e+00 1.85878556e-02 6.01825833e-01 ... 1.90932035e-01 1.12017930e+00 -8.45284462e-01] ... [-4.90884721e-01 -5.32632053e-01 -1.57468283e+00 ... 1.00285125e+00 3.97514462e-01 1.81571651e+00] [-1.36059606e+00 3.96263659e-01 -4.56759006e-01 ... 5.47544718e-01 1.74088490e+00 1.70372158e-01] [-2.02798796e+00 -5.19822598e-01 -1.52141348e-01 ... 5.69278002e-01 7.40616977e-01 -9.75231528e-01]] [[-1.84250906e-01 5.79737127e-01 1.20336592e-01 ... 9.08416331e-01 -1.58328712e+00 6.43685997e-01] [ 9.09829795e-01 -9.26182568e-01 8.01240861e-01 ... -9.64584708e-01 -2.61760211e+00 -5.09296417e-01] [-9.74646270e-01 6.78426743e-01 7.36929297e-01 ... -1.02453202e-01 -3.76353264e-01 1.09641135e+00] ... [ 1.17517221e+00 -3.69986862e-01 8.75298977e-01 ... -1.61469090e+00 -8.58396709e-01 -1.48564887e+00] [-5.14218330e-01 -9.95172188e-02 7.60418892e-01 ... -1.76920700e+00 -7.31819689e-01 -1.84127942e-01] [-1.53536606e+00 -1.45166665e-01 -6.72966540e-01 ... 7.86230788e-02 5.37638187e-01 4.65170473e-01]] ... [[-3.93745601e-01 -1.11291267e-01 4.65014189e-01 ... 5.79302549e-01 2.86129206e-01 -5.26307430e-03] [-3.12730879e-01 2.81709284e-01 1.93994129e+00 ... -1.13360083e+00 -6.46963418e-01 -6.60366058e-01] [ 2.61225879e-01 -7.38252938e-01 -1.17864013e+00 ... -4.14602399e-01 4.40665871e-01 2.64972240e-01] ... [ 1.23760474e+00 9.04634774e-01 -2.29040295e-01 ... -2.92025745e-01 -3.18186730e-01 5.02774835e-01] [ 1.64601958e+00 -9.72522497e-01 1.20371926e+00 ... -2.21739247e-01 3.16715455e+00 -2.21880054e+00] [-6.81320906e-01 -1.62566411e+00 -1.21863163e+00 ... -4.43559766e-01 1.04185843e+00 1.54821947e-01]] [[-1.57582128e+00 -1.24408996e+00 3.73095907e-02 ... -1.13975656e+00 3.37400526e-01 1.43489385e+00] [ 8.94114256e-01 -9.15318847e-01 -6.26249760e-02 ... -3.54968786e-01 -1.19169402e+00 -8.47329557e-01] [ 1.40070772e+00 -1.54057586e+00 -1.59176695e+00 ... -1.32885396e+00 4.91938405e-02 1.23101687e+00] ... [ 8.18603754e-01 7.56005704e-01 -2.11390778e-01 ... 2.68054724e+00 1.31016397e+00 3.86254430e-01] [ 4.95795608e-01 -2.76116252e+00 -4.87791091e-01 ... 1.10512173e+00 -5.96416704e-02 -1.21017468e+00] [ 1.34347165e+00 -5.22823989e-01 -8.07117105e-01 ... 4.78582114e-01 -1.62114584e+00 -1.40872014e+00]] [[ 1.03055489e+00 -1.46054769e+00 -2.09785074e-01 ... -3.60613286e-01 2.98085183e-01 2.64781213e+00] [ 3.19665164e-01 1.18067431e+00 -1.70670494e-01 ... 8.50761756e-02 1.09111166e+00 1.55703187e+00] [-1.74934983e-01 -9.91317272e-01 -1.61369872e+00 ... -9.96301416e-03 -7.32110202e-01 -1.16621263e-01] ... [-1.48405433e+00 1.06856060e+00 -7.56238520e-01 ... -1.22808659e+00 -7.55940005e-02 -1.62765825e+00] [-5.72769046e-01 -7.86111414e-01 1.09234202e+00 ... 2.28428438e-01 -3.59081149e-01 -9.46944356e-01] [ 8.63422215e-01 1.55701649e+00 2.38499045e-02 ... 1.71412086e+00 8.45994055e-01 7.36386359e-01]]] [[[-5.83111525e-01 1.31837353e-02 -4.55829650e-01 ... -1.21542144e+00 -1.13851261e+00 -1.73587784e-01] [-2.89721996e-01 6.75151229e-01 -3.17716986e-01 ... 1.11529362e+00 -1.43885207e+00 3.04044485e-01] [ 2.35145688e-01 -1.55067194e+00 -1.42864156e+00 ... 2.50479507e+00 -1.43094110e+00 -4.41436738e-01] ... [ 1.02892482e+00 -1.29908133e+00 -1.81613767e+00 ... 8.86071265e-01 9.99191642e-01 -1.43847585e+00] [ 4.69757766e-01 7.05751657e-01 -1.11380017e+00 ... 1.55017257e-01 1.73693383e+00 -4.11342561e-01] [-1.18892968e+00 -1.49672830e+00 -6.62156284e-01 ... -9.04036343e-01 -1.00727836e-02 9.15655851e-01]] [[ 8.16015065e-01 1.27851427e+00 1.41220236e+00 ... 9.72712159e-01 -7.99290240e-01 1.04700938e-01] [ 1.00826681e+00 -1.20362866e+00 5.01356006e-01 ... 9.75452363e-01 -7.22690579e-03 -2.72958899e+00] [-2.34711066e-01 2.77281851e-01 -9.58651379e-02 ... 3.71171981e-01 -6.89058065e-01 -8.32536399e-01] ... [-8.71657848e-01 -2.03094289e-01 4.73055728e-02 ... -1.77138293e+00 -1.29770947e+00 -1.05286799e-01] [ 1.26718009e+00 -1.07395303e+00 1.27876222e+00 ... 5.91951430e-01 9.63086009e-01 1.74378049e+00] [-1.20035112e+00 -4.40700561e-01 -9.75909680e-02 ... 2.24919319e+00 8.34828317e-01 -1.63845944e+00]] [[-5.07346451e-01 -3.45658094e-01 -4.55845028e-01 ... -1.69487369e+00 -1.86713003e-02 9.08543587e-01] [ 2.85119027e-01 6.03944361e-01 -9.10144210e-01 ... 2.55612373e+00 -5.12163103e-01 -3.67963582e-01] [ 2.17918828e-01 -1.75365746e+00 -1.01120949e+00 ... -8.31374884e-01 3.65878522e-01 6.66193426e-01] ... [-9.95070875e-01 6.52571559e-01 -1.04040585e-01 ... 1.30877495e+00 -1.49819866e-01 -2.69903727e-02] [-5.47945440e-01 1.03402689e-01 9.71473038e-01 ... 3.83546442e-01 1.24260712e+00 -2.23102999e+00] [ 8.49345505e-01 -1.06570578e+00 1.32103074e+00 ... 8.79343629e-01 -1.01330197e+00 5.55606127e-01]] ... [[ 3.92643839e-01 -3.55857126e-02 -4.84988660e-01 ... 6.23351157e-01 -4.56445336e-01 -4.48907018e-01] [ 1.28779984e+00 -1.86449361e+00 -1.78531504e+00 ... 2.26786375e+00 -6.47976100e-01 -8.11068892e-01] [-3.31087083e-01 -5.31334542e-02 -1.88358933e-01 ... -1.66561985e+00 1.03187695e-01 -9.20455694e-01] ... [ 2.10873961e-01 1.00403452e+00 1.59981862e-01 ... 1.41117787e+00 5.59055984e-01 2.91243672e-01] [ 5.54062009e-01 1.22167831e-02 5.35699613e-02 ... 7.38952696e-01 9.07645095e-03 1.23505846e-01] [-2.91216280e-02 -6.09156609e-01 -1.41482949e+00 ... 1.16699588e+00 -6.46159530e-01 -3.08267951e-01]] [[-1.55337608e+00 -4.27883595e-01 8.12432289e-01 ... 1.54045081e+00 -9.18745399e-01 -3.02959591e-01] [ 1.07801223e+00 2.00371122e+00 5.45908868e-01 ... -4.59704041e-01 -5.71201086e-01 4.13129665e-02] [ 3.94674502e-02 7.96362221e-01 4.53093767e-01 ... -6.86210096e-01 3.75321835e-01 -1.12978089e+00] ... [-3.52086157e-01 2.89553016e-01 -1.53150284e+00 ... -4.77967173e-01 -7.79250085e-01 5.32548487e-01] [ 2.88228011e+00 3.43062170e-02 1.21127701e+00 ... 5.21198332e-01 1.57222971e-01 -2.52418935e-01] [-1.16326034e-01 1.95279622e+00 -1.80706121e-02 ... 9.72109258e-01 -7.74129450e-01 3.19660187e-01]] [[ 1.83903944e+00 -2.83463836e-01 -1.21899223e+00 ... 1.01657617e+00 8.09463739e-01 7.91785598e-01] [-1.15463698e+00 1.37332010e+00 -4.66042906e-01 ... 1.62810937e-01 1.91997230e-01 -3.62258494e-01] [-1.34903896e+00 -1.30995739e+00 -1.14227605e+00 ... 1.04082799e+00 -3.94682944e-01 4.70711172e-01] ... [ 2.87383109e-01 -5.58218479e-01 2.18936533e-01 ... 1.05262712e-01 -4.62784529e-01 4.63642657e-01] [-8.93520638e-02 -9.68835831e-01 -1.92039704e+00 ... -6.38882756e-01 -4.78375167e-01 -2.08295012e+00] [ 1.16585255e+00 -4.93222252e-02 3.56546909e-01 ... 1.17963541e+00 -7.06666470e-01 -5.66477716e-01]]] [[[ 8.38627443e-02 -1.57940304e+00 4.97350357e-02 ... -3.30936283e-01 -2.30942890e-01 -1.13128221e+00] [-2.98781663e-01 5.06298780e-01 -7.60006383e-02 ... 1.03812063e+00 4.18865532e-01 -1.00532377e+00] [-4.39053297e-01 -1.84162915e-01 -1.09596944e+00 ... 1.45244151e-01 -1.08199739e+00 2.74057705e-02] ... [ 8.10163748e-03 1.05834734e+00 5.91881620e-03 ... -1.20190012e+00 -1.68390489e+00 1.41884768e+00] [ 6.54909536e-02 -1.47430968e+00 -1.49605393e-01 ... -1.82699859e+00 -1.93008572e-01 -5.59565604e-01] [-1.58845282e+00 4.79812652e-01 -8.65610600e-01 ... -5.60581565e-01 1.30509186e+00 5.09125769e-01]] [[-1.16003418e+00 6.58009291e-01 8.61331880e-01 ... -4.15453404e-01 -2.67322183e+00 1.65837467e+00] [-1.54946709e+00 -1.30469108e+00 2.06838474e-01 ... 9.60617512e-03 1.36385906e+00 8.11712623e-01] [ 1.02595377e+00 -1.29413426e+00 1.37042272e+00 ... 2.55767822e-01 -6.22443371e-02 -9.11607072e-02] ... [ 1.50775924e-01 5.75480938e-01 -6.30639493e-01 ... -7.20301449e-01 -8.26751709e-01 -2.50929976e+00] [ 1.11403430e+00 -7.54840255e-01 1.59330857e+00 ... 6.26439512e-01 5.04467785e-02 5.99349439e-01] [ 4.13728625e-01 -3.46880913e-01 2.51226455e-01 ... 3.68699461e-01 -2.35478893e-01 -4.27054495e-01]] [[ 5.56648016e-01 7.90398896e-01 8.50284323e-02 ... 6.19705617e-01 -1.98185861e-01 1.39396012e-01] [-9.61173892e-01 4.07987535e-01 1.71372101e-01 ... 8.95923495e-01 -7.95719624e-01 -9.66659412e-02] [ 3.23417038e-01 7.85419941e-01 9.88191366e-02 ... -4.57419097e-01 -1.07777588e-01 3.19978267e-01] ... [-2.69552350e-01 -6.36202097e-01 -5.21980107e-01 ... 8.23836029e-01 -7.39667341e-02 -5.94755232e-01] [-1.04218388e+00 -1.93362817e-01 -1.27426195e+00 ... 1.15055668e+00 -1.55155373e+00 1.36101985e+00] [-2.77735770e-01 3.15968305e-01 -1.14356518e+00 ... 1.84252322e+00 9.65673849e-02 -1.20077610e+00]] ... [[-2.64811385e-02 -8.07102025e-01 4.21676695e-01 ... -8.81951153e-01 4.36863840e-01 1.03766978e-01] [ 3.50801349e-01 1.78366095e-01 -2.43961830e-02 ... 6.73811436e-01 -4.88922626e-01 -1.14154589e+00] [ 1.71046507e+00 5.30221224e-01 -6.85802102e-01 ... 6.65628910e-02 1.78371042e-01 7.01855302e-01] ... [-5.83732188e-01 -1.33131468e+00 -5.50036788e-01 ... -1.67673603e-01 1.63343027e-01 -9.70476389e-01] [-7.28569388e-01 8.38323772e-01 1.39793503e+00 ... 6.09386683e-01 1.00941181e+00 9.93946075e-01] [ 5.13494432e-01 7.98378229e-01 1.41827726e+00 ... 7.28343129e-01 -2.03674173e+00 -1.25913572e+00]] [[ 6.68191552e-01 -6.43875241e-01 1.66964024e-01 ... -6.43702924e-01 3.88570219e-01 1.99366316e-01] [-7.59536624e-01 -1.92674577e-01 6.12842560e-01 ... 7.78997838e-01 2.26889938e-01 -1.06921661e+00] [-1.35113776e-01 -4.98746842e-01 4.31154668e-01 ... -3.79479468e-01 6.92353964e-01 -1.81555644e-01] ... [ 1.11557686e+00 -9.88946140e-01 -6.02923892e-02 ... -1.22180820e+00 3.47607851e-01 -1.62065887e+00] [-1.58148378e-01 7.31096556e-03 -5.21363556e-01 ... 6.38597786e-01 -1.01641691e+00 4.34787482e-01] [-4.42790985e-01 1.61638826e-01 -5.76209664e-01 ... -1.18158317e+00 -1.09243047e+00 1.62991360e-01]] [[-2.73896009e-01 5.30051112e-01 2.68230885e-01 ... 2.78248310e-01 -9.90949929e-01 -4.13606882e-01] [-1.99017823e-01 6.42195165e-01 -5.52923739e-01 ... 1.64679933e+00 -7.43290544e-01 4.33476925e-01] [ 8.66259158e-01 1.31895199e-01 -7.57847011e-01 ... 3.10327458e+00 -2.14069295e+00 1.19976260e-01] ... [-8.64409506e-01 7.36967772e-02 -1.05960202e+00 ... -1.95438766e+00 1.15141368e+00 4.61751372e-01] [-6.39544368e-01 1.29072404e+00 3.83043796e-01 ... 1.37045786e-01 1.34683120e+00 2.38611031e+00] [ 4.71857369e-01 1.45504439e+00 4.26745892e-01 ... -3.25602181e-02 -2.18026519e+00 6.11884594e-01]]] [[[-2.35006660e-01 -1.50648081e+00 1.04259634e+00 ... 1.67773962e+00 -1.20146418e+00 -3.00291836e-01] [-3.12232018e-01 1.00226390e+00 9.47799444e-01 ... 4.57554340e-01 4.28915650e-01 3.71729225e-01] [ 2.37870723e-01 2.77101189e-01 2.76444435e-01 ... 5.52655935e-01 4.60506022e-01 4.02534157e-01] ... [-1.50755167e+00 -7.42811084e-01 1.81380808e+00 ... 4.70509648e-01 5.25715888e-01 -1.33844435e-01] [ 1.40873182e+00 1.76941425e-01 -2.38408351e+00 ... -1.38006592e+00 1.99911988e+00 1.70699072e+00] [ 1.30868733e+00 -1.18482375e+00 5.33307672e-01 ... -9.10074830e-01 -2.08014324e-01 -1.00633812e+00]] [[ 1.29882503e+00 1.31087884e-01 1.00341856e+00 ... 3.29382904e-02 1.49784839e+00 -8.91155005e-01] [-1.03564858e+00 -2.38562256e-01 -3.95809174e-01 ... -4.94701445e-01 -1.41289294e-01 -1.34196281e+00] [-1.12817478e+00 3.27537060e-01 1.97084200e+00 ... 1.43370616e+00 -3.12630028e-01 -1.41406727e+00] ... [-9.48555470e-01 5.83964705e-01 -5.61111450e-01 ... 4.61064517e-01 -2.04818696e-01 -2.23182052e-01] [-1.76555261e-01 -8.97910237e-01 -1.16494858e+00 ... 4.24676448e-01 -3.78680006e-02 -7.52210438e-01] [-3.37315083e-01 -1.69182658e+00 1.44430369e-01 ... -4.78430063e-01 -4.81443465e-01 -4.95335996e-01]] [[-1.66155589e+00 1.82441938e+00 -4.07908440e-01 ... 4.64004725e-01 -1.50410986e+00 3.53620678e-01] [ 6.79291666e-01 -1.29825622e-01 -1.14963877e+00 ... 8.49975526e-01 -2.77795782e-03 5.07159889e-01] [ 9.24976647e-01 5.18087268e-01 -2.51785219e-02 ... -5.89181483e-01 -9.31753069e-02 -8.69834661e-01] ... [-6.66392922e-01 -4.09990370e-01 2.43893456e+00 ... 2.11025906e+00 -8.70050117e-02 9.36525464e-01] [ 1.87948123e-01 -2.95430541e-01 -4.73922968e-01 ... -9.80426133e-01 -2.38142657e+00 9.18721259e-01] [-9.68249261e-01 -5.50251365e-01 -1.72376049e+00 ... -1.64302111e-01 9.88363147e-01 9.69554424e-01]] ... [[-8.98423672e-01 -2.84376681e-01 7.81146884e-01 ... 1.31299126e+00 3.81829470e-01 -4.30960268e-01] [-1.02150619e+00 -9.68298614e-01 -5.83786666e-01 ... -4.28903073e-01 -1.45592630e+00 6.09251559e-01] [-2.57914692e-01 -1.83325931e-01 4.91308689e-01 ... 5.37135005e-01 1.68306148e+00 -1.01367939e+00] ... [ 2.52150238e-01 -2.08413887e+00 2.55280752e-02 ... 1.50292337e+00 4.75158095e-01 -1.23036730e+00] [ 2.94944435e-01 -2.17219085e-01 -1.88944876e-01 ... -1.44992143e-01 -4.63837713e-01 1.81674778e+00] [ 2.71297634e-01 -8.68323803e-01 -7.60822833e-01 ... 1.46770227e+00 -6.80868268e-01 1.52941020e-02]] [[ 4.06589538e-01 -7.85533845e-01 8.88596833e-01 ... 6.96532249e-01 -3.66774082e-01 -1.36039019e+00] [ 1.00349450e+00 5.41792624e-02 -1.46656811e+00 ... 1.72389662e+00 -5.66634297e-01 5.13043523e-01] [ 3.86258066e-01 5.22394478e-01 -4.58338916e-01 ... 5.48441648e-01 3.62909019e-01 -6.34918660e-02] ... [-1.42733681e+00 1.59236625e-01 5.23126006e-01 ... -2.26172745e-01 6.38810918e-02 -8.49854708e-01] [-1.78384233e+00 8.96537781e-01 -9.56047952e-01 ... -1.01702845e+00 -1.24183631e+00 1.73076177e+00] [ 4.93326902e-01 9.81703168e-04 1.73933089e+00 ... 2.27844745e-01 3.92639816e-01 7.38509893e-01]] [[-8.32306266e-01 -9.94243979e-01 1.65434456e+00 ... -2.32602909e-01 1.98532581e+00 1.67053366e+00] [ 9.74960089e-01 1.23718822e+00 6.36094093e-01 ... 3.34802032e-01 1.52685928e+00 2.23905635e+00] [ 1.63801455e+00 -4.36831236e-01 1.72041506e-02 ... -1.69535026e-01 1.05643809e+00 1.37863147e+00] ... [-2.09832013e-01 -6.66303262e-02 -9.02799517e-02 ... 2.08678246e-01 -6.45721436e-01 -1.38811076e+00] [ 1.87708914e+00 6.94051564e-01 3.81546855e-01 ... 8.37369442e-01 -1.04333711e+00 3.18498820e-01] [ 5.44887006e-01 6.94495618e-01 -8.07304740e-01 ... 3.89680177e-01 1.11026859e+00 2.75925338e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 3, 'eps': 1.0} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4618.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.]() %self.weight : NoneType = prim::Constant() %self.n_groups : int = prim::Constant[value=3]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %5, %self.weight, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%6, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.weight, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4857832 (Squeeze_4857831[0]:i64[], Constant_4857786[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4857832': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4860629 (Squeeze_4860628[0]:i64[], Constant_4860578[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4860629': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4863116 (Squeeze_4863115[0]:i64[], Constant_4863066[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4863116': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instanfw_re: [[[[[ 1.54548657e+00 8.76272917e-01 2.54105091e-01 ... 1.07626653e+00 4.75063622e-01 7.55027413e-01] [ 6.14653707e-01 -4.77242321e-01 -2.44188607e-01 ... 8.82427275e-01 4.63813454e-01 -7.31037617e-01] [-7.21839368e-01 -5.34637094e-01 1.70583576e-01 ... -2.12956333e+00 -6.62822664e-01 -5.17660618e-01] ... [ 5.24551459e-02 -6.32735908e-01 5.18890135e-02 ... 8.68077695e-01 -9.10869002e-01 -6.83205664e-01] [ 2.23811641e-01 3.44029635e-01 -8.70714247e-01 ... -7.33071566e-01 2.18341902e-01 -6.35977626e-01] [ 9.37092066e-01 -4.33397681e-01 -3.45798470e-02 ... -9.21873808e-01 3.82861018e-01 -2.06189156e-01]] [[ 6.26383543e-01 1.29386112e-01 -7.14686036e-01 ... 1.05048847e+00 6.48357645e-02 -9.53055501e-01] [-3.30988437e-01 4.81040776e-01 6.03569388e-01 ... -5.35219371e-01 -7.97759175e-01 2.79920995e-01] [-9.10779014e-02 -1.25844311e-02 3.43172193e-01 ... -2.14689329e-01 9.20878574e-02 8.84673655e-01] ... [-9.56774473e-01 2.43868396e-01 5.04217632e-02 ... -1.09587848e+00 -1.69859290e-01 -5.07827520e-01] [ 3.82212639e-01 -1.44400820e-01 -1.68179810e-01 ... 5.66280723e-01 6.93612754e-01 3.74958575e-01] [ 7.59450674e-01 8.62446189e-01 4.20119584e-01 ... -1.22145879e+00 -1.59024727e+00 7.29338884e-01]] [[-4.29327995e-01 -7.98887908e-01 4.35212910e-01 ... -3.74543846e-01 3.51040602e-01 -2.90795378e-02] [-1.82038689e+00 2.54401386e-01 -3.94107312e-01 ... 1.32456863e+00 1.73773718e+00 -8.98474813e-01] [ 7.91693866e-01 -1.12869255e-01 -2.99515873e-01 ... -5.64175248e-01 -1.12887658e-01 5.30435205e-01] ... [-5.77554286e-01 4.37105566e-01 -8.40597570e-01 ... 5.27924180e-01 5.10483921e-01 3.81582081e-01] [ 5.35950363e-01 -4.14646298e-01 -1.75055221e-01 ... 6.34768605e-01 -6.35925055e-01 -6.30041599e-01] [ 7.19895720e-01 -1.03111613e+00 4.37530130e-01 ... 5.29444665e-02 -1.58106253e-01 -1.00168955e+00]] ... [[ 1.51252568e+00 4.23020661e-01 1.14501989e+00 ... 1.10416365e+00 6.69207424e-02 3.63280773e-01] [ 9.09876049e-01 -2.08768502e-01 -2.12419197e-01 ... 1.30285710e-01 -8.59286487e-01 -6.59377694e-01] [ 7.22760409e-02 -3.36180568e-01 -9.09782887e-01 ... 7.22181380e-01 -2.53283548e+00 4.73996729e-01] ... [-3.93346071e-01 1.64946422e-01 -4.59868491e-01 ... -1.22768961e-01 -4.51346755e-01 4.13438708e-01] [-1.12528300e+00 -1.89899236e-01 -4.67312217e-01 ... 6.15246117e-01 4.77555454e-01 5.28165817e-01] [-1.91321075e-02 1.18259750e-01 -4.60713118e-01 ... 3.77526015e-01 4.62906778e-01 -1.67785853e-01]] [[-2.91590504e-02 -2.93466985e-01 -3.41697305e-01 ... 1.15302965e-01 -2.80187994e-01 -2.68438101e-01] [-4.38226849e-01 3.42020959e-01 -5.23915470e-01 ... -3.16893965e-01 -4.58416343e-01 -1.35442352e+00] [ 1.18010476e-01 -1.81263721e+00 -4.63753819e-01 ... 1.27977446e-01 -5.18950820e-01 -1.46103275e+00] ... [-2.71069884e-01 4.97204423e-01 -1.51599824e-01 ... 6.69633523e-02 8.43779668e-02 9.15659428e-01] [-1.40360928e+00 -6.18297398e-01 -8.31633091e-01 ... -2.29503036e-01 8.95125270e-02 -7.81324208e-01] [-1.32262886e-01 -6.46657109e-01 -6.48523122e-02 ... 3.69233727e-01 1.75645813e-01 -2.18059942e-01]] [[ 5.91741264e-01 7.84205854e-01 -2.78031379e-01 ... 2.55485326e-01 3.45608860e-01 -8.36643875e-02] [ 1.10554671e+00 -5.55596650e-01 8.08792531e-01 ... -4.97511804e-01 -2.39738926e-01 5.22772968e-02] [-1.14419743e-01 9.25392509e-01 -5.34100235e-01 ... 2.23591775e-01 -4.25161630e-01 -2.63106287e-01] ... [ 3.86358351e-01 -4.47620809e-01 -6.49558246e-01 ... 1.84826195e-01 -5.88468969e-01 -7.10417628e-01] [-8.59174609e-01 1.93926468e-01 4.71265376e-01 ... 6.22556388e-01 -6.73178136e-02 5.45682609e-01] [-4.46053177e-01 9.36507583e-01 4.84523445e-01 ... -1.80608973e-01 -2.53746688e-01 3.59491676e-01]]] [[[-1.08866096e+00 -4.79448259e-01 3.88695776e-01 ... -3.45741868e-01 2.54677474e-01 -4.79115665e-01] [-7.39258111e-01 3.28392357e-01 -1.52625322e-01 ... -2.73259450e-02 9.99219656e-01 1.14468157e+00] [ 1.41525781e+00 -7.23527074e-01 1.74764848e+00 ... 6.44177556e-01 9.05296683e-01 1.75448731e-01] ... [ 6.25014842e-01 4.53442663e-01 -1.27300751e+00 ... -1.76630348e-01 7.09405184e-01 6.35403275e-01] [ 2.56301165e-01 -5.41356921e-01 -6.92967296e-01 ... -3.18113923e-01 -1.54989615e-01 -1.44687399e-01] [ 3.27915579e-01 1.24330199e+00 2.77148157e-01 ... 3.68465215e-01 5.53028882e-01 6.79980665e-02]] [[-1.27050757e+00 -8.96523952e-01 -5.52932680e-01 ... -4.68429208e-01 1.40781009e+00 -6.39240563e-01] [-1.12275267e+00 -8.69373024e-01 -7.32179761e-01 ... -7.00787485e-01 5.18958330e-01 4.20531631e-01] [ 9.77383330e-02 -7.53858089e-01 6.72470093e-01 ... 4.40996796e-01 -4.48936075e-01 -8.44772398e-01] ... [-4.88053262e-02 -2.90435046e-01 4.66869235e-01 ... -4.03090566e-01 6.76676095e-01 6.06089890e-01] [-6.40221715e-01 -1.24561809e-01 2.42066979e-01 ... 8.57263684e-01 -4.82620388e-01 -3.72759700e-01] [-1.40010357e-01 -1.38328588e+00 9.47740495e-01 ... -2.06136972e-01 -3.56658429e-01 7.14150786e-01]] [[-9.48271602e-02 3.45502585e-01 6.44079506e-01 ... -8.82024109e-01 -6.81791186e-01 -4.60943431e-01] [-2.46486902e-01 6.30741119e-01 -3.26436102e-01 ... -2.19973624e-01 -4.38360631e-01 -1.44444549e+00] [ 5.16816825e-02 7.93982208e-01 -1.54217196e+00 ... -5.93168914e-01 -1.93883069e-02 5.68453789e-01] ... [-1.06742275e+00 1.03621446e-01 -1.17618811e+00 ... 3.45438004e-01 -1.10487580e-01 -9.30807114e-01] [-2.86815554e-01 -1.72501311e-01 -1.06168419e-01 ... -5.30335447e-03 -1.35330355e+00 2.86688089e-01] [-2.80215293e-01 2.56466746e-01 1.48418605e+00 ... -9.22163785e-01 9.52760875e-01 -7.14624763e-01]] ... [[-2.90369570e-01 -1.83557844e+00 -4.29153174e-01 ... 8.52529883e-01 -3.05412382e-01 -6.13056004e-01] [ 2.94401973e-01 2.81521101e-02 2.97539949e-01 ... 1.66062248e+00 4.78837311e-01 1.20854068e+00] [-2.19294563e-01 -7.68713951e-02 -4.68974560e-01 ... -7.93895945e-02 -6.57025039e-01 -5.40403314e-02] ... [ 3.15615386e-01 -4.57610190e-01 7.56897569e-01 ... 2.71122783e-01 -4.82249379e-01 -3.40058059e-01] [-8.39072391e-02 -1.97956693e+00 -1.07549644e+00 ... -4.62664157e-01 -1.47034198e-01 -7.04277337e-01] [-1.28304556e-01 6.50678337e-01 -1.88690349e-01 ... 1.06640041e+00 -5.66344976e-01 2.99678475e-01]] [[ 2.13605732e-01 2.31784508e-01 -5.68567693e-01 ... -4.40362781e-01 3.80610228e-02 -6.51228011e-01] [ 1.06587327e+00 2.54520148e-01 2.10373092e+00 ... -6.56441867e-01 6.39929354e-01 1.83809304e+00] [-1.25211239e+00 1.03081353e-01 -4.54050824e-02 ... -1.05360605e-01 -9.89840776e-02 2.37032846e-01] ... [ 2.76406229e-01 7.10816145e-01 -9.17583585e-01 ... 1.00295651e+00 -4.93840307e-01 2.10285969e-02] [-2.79774051e-02 -3.97658587e-01 7.49023080e-01 ... -5.91540337e-01 3.64708155e-01 -3.72187287e-01] [ 3.42385978e-01 9.66649711e-01 5.31246185e-01 ... -3.33689749e-02 -6.47223175e-01 9.59699154e-01]] [[-1.99264422e-01 -5.11303365e-01 -7.88744569e-01 ... 2.67812759e-01 -1.13917097e-01 -1.17851086e-02] [-4.72745359e-01 -4.05564606e-01 -8.57151747e-01 ... -1.18524504e+00 2.10540816e-02 -9.96889353e-01] [-7.83310175e-01 2.88962185e-01 -1.34378925e-01 ... -6.30554199e-01 5.10231331e-02 9.58074331e-02] ... [ 1.39550996e+00 -1.87931001e+00 -2.87428856e-01 ... 9.47954416e-01 -2.70805746e-01 -3.34430970e-02] [ 2.61665374e-01 3.01477939e-01 -2.88760185e-01 ... 3.26733887e-02 -3.38393241e-01 -3.02406341e-01] [-1.60621300e-01 2.84700781e-01 -8.07285964e-01 ... 9.87052202e-01 1.85553811e-03 8.03987980e-01]]] [[[ 1.43123031e-01 -6.77500188e-01 -6.44624650e-01 ... -6.82547629e-01 -1.01352520e-02 -5.65188602e-02] [ 1.02017629e+00 2.25072637e-01 5.00527740e-01 ... -5.38990200e-01 4.36598361e-02 -1.48390949e-01] [-5.60385406e-01 -4.03749421e-02 -4.79758792e-02 ... 1.78826198e-01 1.38582599e+00 -5.36104858e-01] ... [-2.42461175e-01 5.89097440e-01 3.93759012e-01 ... 1.32136002e-01 1.80328593e-01 1.16365659e+00] [-1.03942705e-02 3.25304084e-02 -5.61401129e-01 ... 2.90381163e-01 4.40132380e-01 3.07072371e-01] [-1.71164393e-01 -2.05365881e-01 1.46883488e+00 ... 4.76061851e-01 1.64999872e-01 7.54457414e-01]] [[ 2.04477042e-01 2.83558190e-01 -3.27304274e-01 ... -7.68205762e-01 -1.66551739e-01 2.60405540e-01] [-7.90017769e-02 1.05280721e+00 1.47254601e-01 ... 4.64302450e-01 8.61509264e-01 6.03654146e-01] [-1.05172610e+00 -8.77604604e-01 1.34254143e-01 ... -1.29042968e-01 -3.49782318e-01 -6.10936046e-01] ... [-3.87282610e-01 -4.53609586e-01 4.31246698e-01 ... 6.76046610e-01 -8.57945561e-01 9.42383587e-01] [-5.38503587e-01 4.50980395e-01 -8.09069097e-01 ... -3.31901133e-01 -9.25059259e-01 9.41497162e-02] [-1.25028202e-02 3.99931222e-01 5.19782305e-01 ... -5.88530600e-01 -1.23367798e+00 -1.01096296e+00]] [[-7.60986507e-01 -4.87028122e-01 1.97286487e-01 ... -1.30756870e-01 -8.34723651e-01 -4.73178893e-01] [-1.59914756e+00 -2.44572490e-01 -5.31514406e-01 ... -1.66507870e-01 1.84334409e+00 3.89031917e-01] [ 4.80015427e-01 -6.40446424e-01 1.16246827e-01 ... -1.49169818e-01 6.35721982e-02 4.92272466e-01] ... [ 1.34123814e+00 -4.64335680e-01 1.14840066e+00 ... -6.26208425e-01 -1.93478775e+00 2.06084712e-03] [-6.16643310e-01 1.19841211e-02 -7.94962764e-01 ... 1.24119484e+00 -2.27924660e-01 -7.55932108e-02] [-8.22025120e-01 -2.12210953e-01 -2.80703336e-01 ... -6.28385782e-01 -5.35123110e-01 8.80787298e-02]] ... [[ 8.54336739e-01 -1.28761601e+00 1.21469581e+00 ... 6.09897435e-01 5.85796118e-01 9.40710783e-01] [-1.96067655e+00 4.02979255e-01 3.52336131e-02 ... -3.66955042e-01 -3.84714782e-01 1.05894186e-01] [ 9.54668224e-01 -3.08945149e-01 4.79575023e-02 ... 2.96141714e-01 2.20833793e-01 5.05288184e-01] ... [ 4.76495743e-01 5.42885303e-01 -2.37989709e-01 ... 3.29048812e-01 -3.94052863e-01 4.20557231e-01] [ 1.13560712e+00 8.37185793e-03 4.66464251e-01 ... -4.86813217e-01 -1.12946880e+00 3.05763036e-01] [-5.73703051e-01 5.19652426e-01 -2.76479691e-01 ... -8.68433416e-01 8.40098560e-01 9.06391084e-01]] [[-1.77143681e+00 3.68826985e-01 2.43149444e-01 ... -1.29409754e+00 -7.75975525e-01 -4.47027296e-01] [ 6.42802179e-01 5.51812947e-01 -3.81236762e-01 ... 1.06636882e+00 -2.56234050e-01 1.42483711e+00] [-5.46455920e-01 1.42640555e-02 3.76985222e-01 ... 6.30842388e-01 -8.26726854e-01 1.36995316e+00] ... [ 1.43399671e-01 -2.35690862e-01 -9.33464527e-01 ... -8.51369575e-02 1.84628218e-01 5.05315542e-01] [-1.31497145e-01 9.56392050e-01 -1.17100215e+00 ... -4.51185614e-01 6.10158145e-01 4.67257313e-02] [ 2.71868855e-02 7.56303668e-02 4.94384795e-01 ... 1.45927799e+00 5.70258617e-01 9.32485700e-01]] [[ 5.00919044e-01 7.71422327e-01 4.05808985e-01 ... 9.27866474e-02 2.91279554e-01 2.99214691e-01] [-3.42418730e-01 -8.42789888e-01 1.90804116e-02 ... 6.03606343e-01 -3.97902161e-01 1.07217598e+00] [-1.26654744e+00 -4.22904134e-01 1.15625281e-02 ... 2.79081464e-01 -6.45264447e-01 1.03438163e+00] ... [ 2.46781498e-01 6.43471107e-02 -5.77458143e-01 ... -7.31751502e-01 -7.62921810e-01 -1.79734632e-01] [ 4.25584674e-01 2.02937201e-01 7.63139069e-01 ... -1.21724069e+00 -2.23849371e-01 -1.07464206e+00] [-9.39741015e-01 -1.01362133e+00 -4.22622979e-01 ... 4.91833597e-01 -1.08488953e+00 4.37762052e-01]]] [[[-2.54368871e-01 -2.20648006e-01 -1.31102502e+00 ... -1.20463264e+00 -7.40856603e-02 6.13704979e-01] [ 4.14835036e-01 5.61843336e-01 3.98942947e-01 ... 8.23525965e-01 -3.82329285e-01 -1.57151771e+00] [ 5.76558888e-01 3.94367546e-01 -9.79635715e-02 ... 1.49283886e+00 3.97562027e-01 -1.31611037e+00] ... [ 2.25991696e-01 -1.82432979e-01 -1.41710627e+00 ... 1.57423675e-01 -7.67655790e-01 1.12708330e+00] [ 2.34894961e-01 -7.38474846e-01 -8.19836557e-02 ... 5.86908996e-01 -3.37999403e-01 1.50427863e-01] [ 6.26569390e-01 -2.63896197e-01 6.03389084e-01 ... 2.80527234e-01 9.70185637e-01 1.48839545e+00]] [[ 9.00025249e-01 1.54137716e-01 3.52450341e-01 ... 1.36039948e+00 1.75467104e-01 -5.39303422e-01] [-2.40676831e-02 6.41192049e-02 -4.98843282e-01 ... -8.94598186e-01 1.27044415e+00 3.83489698e-01] [ 1.16266601e-01 5.13810992e-01 5.79679847e-01 ... 1.02680385e+00 2.97977984e-01 -5.19715548e-01] ... [ 7.21079409e-01 -4.02178705e-01 5.37556410e-01 ... 3.58138949e-01 -1.00332603e-01 -3.10910612e-01] [-6.86553717e-01 4.86932844e-01 6.54730380e-01 ... 1.48154008e+00 -8.41849297e-03 -4.14180845e-01] [-4.61396575e-01 4.84759599e-01 7.97526836e-01 ... 5.13760448e-01 6.38511360e-01 1.42495251e+00]] [[ 3.24874163e-01 3.62105787e-01 -4.68054041e-02 ... 5.26170671e-01 -7.52654493e-01 6.05898678e-01] [-4.25775081e-01 -1.02491617e+00 1.10070772e-01 ... 8.66068840e-01 -1.00966424e-01 8.06028247e-01] [-4.04292122e-02 3.95191699e-01 -4.75090533e-01 ... -4.69314843e-01 -4.01337385e-01 5.59211433e-01] ... [-2.14716941e-02 2.04835653e-01 2.69671679e-01 ... 2.43495002e-01 -1.02971303e+00 -4.47509795e-01] [-4.12658542e-01 3.26091424e-02 -8.86422470e-02 ... 5.73434234e-01 -1.63759553e+00 -9.23198104e-01] [ 3.95347506e-01 4.41813946e-01 1.44846290e-01 ... 1.31635678e+00 8.52822721e-01 1.19376451e-01]] ... [[ 7.93681562e-01 8.74002337e-01 -1.09489405e+00 ... 3.68468702e-01 6.34718984e-02 -3.33352201e-02] [ 1.23646462e+00 -1.64584041e-01 -5.76239228e-01 ... 5.99310040e-01 1.20580196e-01 2.80531019e-01] [ 1.19739860e-01 3.06870788e-01 -6.94194913e-01 ... 9.45706144e-02 -5.70913494e-01 -7.47110322e-02] ... [-4.97978210e-01 9.01657939e-01 -1.10262215e+00 ... 1.19738007e+00 -9.59586263e-01 -1.57996237e-01] [-4.03643958e-02 -5.21415591e-01 -1.84408650e-01 ... -9.34289098e-01 -2.21520096e-01 4.66022521e-01] [-7.86503732e-01 1.71963379e-01 7.81150818e-01 ... 5.44343293e-01 -1.27754784e+00 -1.68907475e-02]] [[ 2.47635365e-01 -7.45397806e-01 1.27707601e+00 ... -1.07598877e+00 3.17143351e-01 -8.25483680e-01] [-2.43284568e-01 5.04853902e-03 -3.51895154e-01 ... 9.26245570e-01 4.57382113e-01 3.42200935e-01] [ 6.09319627e-01 -9.96268570e-01 7.23199785e-01 ... 1.09940493e+00 -1.00021803e+00 4.46546264e-02] ... [-3.49232227e-01 -7.81183084e-03 8.47363055e-01 ... -1.00394107e-01 -4.86021161e-01 5.18836796e-01] [ 2.09294051e-01 -1.17559552e-01 5.38270891e-01 ... -4.15663064e-01 -6.98992908e-01 -3.43781948e-01] [ 4.82754558e-01 1.88848823e-02 -5.24748206e-01 ... 4.07212555e-01 -4.72820908e-01 4.20875013e-01]] [[-8.09882045e-01 3.18248123e-01 -4.91733283e-01 ... 1.03766412e-01 4.96088088e-01 -5.06076276e-01] [-4.45743382e-01 8.18215728e-01 1.08795428e+00 ... 3.40403736e-01 -5.94729602e-01 3.21771353e-01] [-4.69819903e-01 -8.01114082e-01 6.86318994e-01 ... 6.05331361e-01 1.10922873e+00 1.12200451e+00] ... [-1.06745434e+00 9.30358097e-02 -6.16698973e-02 ... -1.09526122e+00 -1.98204085e-01 -1.27177489e+00] [ 2.22046405e-01 -7.36527503e-01 -1.00731879e-01 ... 8.55392575e-01 -7.77025759e-01 1.02727127e+00] [-1.03157675e+00 -7.45499074e-01 -2.06887460e+00 ... -4.68307674e-01 1.26078761e+00 -5.96320212e-01]]] [[[-6.47962749e-01 -1.54419708e+00 -3.10946345e-01 ... -1.00592911e+00 5.46369851e-01 -3.84924859e-01] [ 4.55418289e-01 4.58632708e-01 2.77512878e-01 ... -3.50742489e-02 1.70704353e+00 -5.72275996e-01] [ 5.36659300e-01 3.93393546e-01 -1.12578392e+00 ... -8.78177762e-01 3.02255630e-01 3.80069554e-01] ... [ 6.47164928e-03 2.41105527e-01 -8.21377397e-01 ... 6.13708198e-01 -1.18050091e-01 -9.27699327e-01] [ 1.67939261e-01 1.21797144e-01 2.81351835e-01 ... 1.51153016e+00 -6.77211702e-01 -3.16269606e-01] [ 3.11422378e-01 8.71402919e-01 -6.21899009e-01 ... 9.78999197e-01 -1.34387314e-01 8.24962974e-01]] [[-3.38336945e-01 1.37238964e-01 -5.17048128e-03 ... -1.25323069e+00 6.32261574e-01 5.11729240e-01] [ 2.53217518e-01 2.55267024e-01 -5.98485827e-01 ... 1.03267646e+00 -4.75131482e-01 -9.07200575e-02] [ 5.83596490e-02 6.60721004e-01 5.16496480e-01 ... -5.16480744e-01 -7.77779162e-01 2.87072927e-01] ... [-7.57656157e-01 -7.30499849e-02 1.47950792e+00 ... -3.28110754e-01 -5.83160937e-01 7.25171685e-01] [-6.56483293e-01 -8.01870748e-02 -6.36589944e-01 ... -2.95017093e-01 -1.39194059e+00 1.11210525e+00] [-9.79799807e-01 -8.08604062e-01 -7.51366615e-01 ... -3.52168292e-01 -2.52761036e-01 -4.08440053e-01]] [[-8.10648501e-01 8.97703767e-01 -1.92272455e-01 ... 1.09014191e-01 -7.90470615e-02 9.81105089e-01] [ 7.36725390e-01 -1.04078412e-01 9.82050657e-01 ... -1.05414763e-01 4.57361132e-01 -3.31817508e-01] [ 2.38038436e-01 -3.41817528e-01 -9.22896340e-02 ... 1.57535744e+00 -6.08800709e-01 -4.52039421e-01] ... [-2.92439580e-01 -2.73125693e-02 -9.00100619e-02 ... 7.42597342e-01 -3.41648161e-01 3.67934138e-01] [ 1.15157807e+00 -1.99071676e-01 7.91430831e-01 ... 1.26377478e-01 1.46239495e+00 -1.63188100e-01] [-1.81315750e-01 -4.60489392e-02 7.63134301e-01 ... -6.30807996e-01 5.07917941e-01 -7.83297420e-01]] ... [[ 1.29793063e-01 -8.00899565e-01 -2.18925774e-01 ... -1.71196386e-01 -8.30744326e-01 6.49332166e-01] [-9.97115374e-01 4.75528300e-01 2.66418606e-01 ... -1.07463205e+00 4.15611155e-02 -2.19067946e-01] [-2.62741029e-01 8.55409428e-02 -1.58312190e+00 ... 4.62685585e-01 9.57935333e-01 -4.33640182e-01] ... [ 1.73269004e-01 2.80944914e-01 7.58779764e-01 ... 1.55972272e-01 -9.68467951e-01 1.35528898e+00] [ 3.64032388e-01 -2.67270189e-02 1.13238537e+00 ... -2.24025905e-01 -1.19226754e+00 -1.20817378e-01] [-1.59762695e-01 1.23697090e+00 -1.28226268e+00 ... -3.56677353e-01 -3.34838480e-01 3.64837974e-01]] [[ 4.63014215e-01 -1.03603137e+00 3.48568827e-01 ... -2.09145978e-01 2.00669861e+00 -4.61234331e-01] [-6.15702093e-01 3.91148686e-01 4.86926913e-01 ... 3.34328175e-01 -3.32787424e-01 5.41453958e-01] [ 1.78943932e-01 2.73950368e-01 -1.55389711e-01 ... -7.77186304e-02 -3.38473916e-01 -9.23478961e-01] ... [ 2.89481580e-01 3.06606472e-01 -9.98630464e-01 ... 3.78450043e-02 4.72936630e-01 -3.93773198e-01] [ 4.22523409e-01 1.60244390e-01 -1.22927797e+00 ... -4.35053974e-01 -3.75213593e-01 4.30516247e-03] [ 8.97081137e-01 -1.29638836e-01 1.95467204e-01 ... -4.09409761e-01 -3.67785916e-02 -4.29020077e-01]] [[-1.80678189e-01 -1.22103179e+00 -6.25170767e-01 ... 6.38002634e-01 6.69556677e-01 -1.87510923e-02] [ 2.11740676e-02 -3.18104208e-01 5.53132534e-01 ... 4.37084228e-01 -7.31768133e-03 -4.90708172e-01] [ 4.46756959e-01 -1.03299630e+00 -2.74223864e-01 ... 4.09252137e-01 2.51787543e-01 -5.06372213e-01] ... [-9.82568681e-01 -5.03962580e-03 -1.24936521e+00 ... 1.37561738e+00 -1.98363587e-01 3.65117848e-01] [ 1.25798512e+00 1.22011352e+00 -7.56413758e-01 ... 3.64037827e-02 1.36112177e+00 -1.16601992e+00] [-2.57479757e-01 1.12595296e+00 -7.09533095e-02 ... 1.17169607e+00 4.02591884e-01 -5.71455002e-01]]] [[[-7.94864833e-01 -4.85211402e-01 6.97500110e-01 ... 6.29186213e-01 3.10390860e-01 1.74365079e+00] [-5.37018478e-01 -8.45882017e-03 -9.32964981e-01 ... -6.20109022e-01 -1.29284430e-02 -6.74227476e-01] [ 4.83641118e-01 5.81362009e-01 4.13393825e-01 ... -5.52828193e-01 2.16375560e-01 2.29060397e-01] ... [-5.94371140e-01 -3.51182252e-01 -1.65931776e-01 ... -1.36894262e+00 8.85932982e-01 1.11728024e+00] [-3.07200611e-01 -6.22391999e-01 -4.55080807e-01 ... 1.58550113e-01 -1.40481329e+00 -8.36744130e-01] [-4.20843810e-01 9.05381981e-03 -4.30835307e-01 ... 1.33034539e+00 -5.12183666e-01 7.99521685e-01]] [[-3.28077286e-01 -7.50413060e-01 -5.87955415e-01 ... -5.73061109e-01 7.07285047e-01 -4.23853189e-01] [ 5.73821701e-02 -4.74109977e-01 -9.14713889e-02 ... 1.15274835e+00 6.52911723e-01 -7.49227345e-01] [ 2.49780640e-01 9.61923838e-01 4.31371450e-01 ... 6.31837010e-01 -3.54547530e-01 6.90863311e-01] ... [-4.86280143e-01 3.64212058e-02 5.29134631e-01 ... 1.94085509e-01 1.10330850e-01 9.68026042e-01] [-9.15865719e-01 -5.52207589e-01 8.26321065e-01 ... -9.38878879e-02 -3.21799725e-01 -1.24901617e+00] [ 1.02682757e+00 6.60964966e-01 2.07546130e-01 ... 4.38916296e-01 2.16603711e-01 5.65066516e-01]] [[ 4.17502612e-01 -1.00588369e+00 2.13484928e-01 ... 2.72675693e-01 -4.69746292e-02 8.17395627e-01] [-3.54403675e-01 -8.87754634e-02 -8.84275377e-01 ... 3.21842074e-01 -5.19859195e-01 5.28981984e-01] [-8.36003050e-02 1.41181141e-01 -5.34841001e-01 ... 3.96217465e-01 2.56653190e-01 4.49385494e-01] ... [ 3.52017403e-01 4.62258905e-01 4.36356552e-02 ... -6.67900741e-01 7.37365723e-01 3.34368795e-01] [ 1.50019407e+00 1.60423175e-01 -2.79601395e-01 ... -3.43859255e-01 1.22374570e+00 9.18644249e-01] [-1.17842662e+00 9.31984782e-02 -6.60279512e-01 ... -3.97750199e-01 2.89926648e-01 9.54531610e-01]] ... [[ 5.23385882e-01 5.51849604e-01 5.98309994e-01 ... -1.21335633e-01 -4.58131701e-01 1.04040094e-01] [-2.77755409e-01 -3.54348421e-01 -6.95996165e-01 ... 5.96522689e-01 -5.66866994e-01 -9.09981132e-01] [-1.10951476e-01 -4.20930713e-01 5.30460119e-01 ... 2.42086157e-01 -1.63429916e-01 1.03109717e+00] ... [-1.11044817e-01 4.14003491e-01 -7.10178673e-01 ... 2.12930053e-01 8.74186993e-01 1.67790219e-01] [ 1.35733140e+00 -5.12091160e-01 -1.83049726e+00 ... 1.33937895e-01 1.31312773e-01 -5.35358429e-01] [ 7.04233870e-02 5.08064806e-01 -1.92453671e+00 ... -2.25125760e-01 -7.81857789e-01 -7.50936687e-01]] [[-7.24550903e-01 1.67403683e-01 5.29882729e-01 ... -3.09320003e-01 -4.12601121e-02 1.92522109e-01] [-2.67942131e-01 6.65008008e-01 6.54103100e-01 ... 2.26908177e-01 -5.54188967e-01 5.69972321e-02] [-8.65684524e-02 1.27157819e+00 -6.10605478e-02 ... 1.99863911e-01 1.64481148e-01 1.15267408e+00] ... [ 1.10942602e+00 1.10992384e+00 -1.21258283e+00 ... 7.34865069e-01 -3.13900501e-01 3.34946066e-01] [ 1.74331561e-01 5.18593788e-01 -4.81042892e-01 ... 3.15410346e-01 2.64545232e-01 1.09358981e-01] [ 3.66934448e-01 3.68910849e-01 1.14770484e+00 ... -3.27993512e-01 1.23854947e+00 -1.14318609e+00]] [[-1.63824856e+00 7.73508370e-01 -9.81783643e-02 ... -4.06244576e-01 -3.10418904e-01 1.14597809e+00] [-3.01927514e-02 2.89214849e-01 8.35839927e-01 ... -3.41775179e-01 -8.60488832e-01 1.67914614e-01] [ 7.49929309e-01 3.41773897e-01 5.33607543e-01 ... -6.70622513e-02 3.31525534e-01 -1.87567249e-01] ... [ 1.63675952e+00 1.20120037e+00 -3.42098415e-01 ... 6.44125938e-01 9.26143602e-02 9.52472329e-01] [-7.01190710e-01 -3.84306818e-01 9.30185914e-01 ... -7.52068222e-01 -1.62093759e-01 2.03217193e-02] [ 1.19512427e+00 1.01553448e-01 -5.46510756e-01 ... -2.66294986e-01 6.67456686e-01 -6.82845414e-01]]]] [[[[ 2.46214181e-01 4.25280690e-01 -9.07754898e-01 ... 4.68011081e-01 8.52599919e-01 4.05470550e-01] [ 6.25374377e-01 -2.44900227e-01 4.30530131e-01 ... 2.72267669e-01 -2.88083777e-03 -5.71332216e-01] [-4.92198765e-01 1.57562584e-01 2.97782809e-01 ... -1.36356175e-01 -1.26201555e-01 -6.77077055e-01] ... [-2.08616570e-01 -9.85047877e-01 -1.01491356e+00 ... 1.22887142e-01 -7.22369075e-01 -1.28085613e+00] [ 1.19389009e+00 -5.82240641e-01 5.63433588e-01 ... -4.29795027e-01 3.69279861e-01 -3.28935474e-01] [ 2.75985658e-01 -2.41942294e-02 1.21964562e+00 ... 1.30633861e-01 -5.51865026e-02 1.18862593e+00]] [[-4.18866545e-01 -3.37559462e-01 9.96658266e-01 ... -6.80165172e-01 6.11938715e-01 1.83358759e-01] [-1.39100409e+00 8.29233348e-01 -5.27556539e-01 ... 4.45111424e-01 1.58179060e-01 -5.92713118e-01] [ 5.17760158e-01 -2.64519393e-01 -4.35612381e-01 ... -1.64542782e+00 -6.58256650e-01 2.94058621e-01] ... [-5.71701646e-01 -4.24564838e-01 -1.51220441e-01 ... 3.10770780e-01 -7.08339930e-01 -1.70976114e+00] [ 7.27346420e-01 -3.29186499e-01 -1.55064988e+00 ... -8.31637800e-01 -2.32199356e-01 -6.48974717e-01] [-1.49056709e+00 -8.02868068e-01 -5.55633187e-01 ... 3.66852939e-01 -8.27413738e-01 9.94867146e-01]] [[ 7.72189051e-02 1.02703345e+00 -5.71153820e-01 ... -2.49066919e-01 -2.21484780e-01 -8.35686743e-01] [ 3.42459172e-01 -8.62118959e-01 -5.02999485e-01 ... 4.74467099e-01 4.71248209e-01 9.00377333e-01] [ 3.80374759e-01 -1.25736818e-01 3.67895275e-01 ... 6.48395300e-01 7.55942939e-03 5.20574391e-01] ... [ 1.39741683e+00 -6.25497818e-01 -1.48503661e+00 ... 2.40629941e-01 1.03921235e+00 -7.66370893e-01] [ 5.78853965e-01 1.52717495e+00 3.07003483e-02 ... 3.30008060e-01 -5.79700589e-01 6.92820072e-01] [-4.87949997e-01 -1.54117197e-01 7.67583132e-01 ... -1.33676052e+00 2.19741747e-01 6.90920591e-01]] ... [[-5.62652528e-01 -1.47171390e+00 1.20151591e+00 ... 8.50072689e-03 -8.52687716e-01 -2.87224650e-01] [-1.05519235e+00 1.24006367e+00 7.00965643e-01 ... -2.31604886e+00 1.09783232e+00 5.24028122e-01] [ 7.02376813e-02 -2.05442473e-01 -1.61952585e-01 ... 1.41485751e+00 -5.47435522e-01 -6.30652681e-02] ... [ 3.58459763e-02 2.13406056e-01 -3.82017642e-01 ... 1.01475120e+00 8.63453031e-01 -4.54923570e-01] [-8.42712283e-01 3.61125231e-01 2.70290792e-01 ... -1.08013153e+00 -1.73137279e-03 5.06833911e-01] [ 9.05702472e-01 5.69310308e-01 7.54077852e-01 ... -4.44145560e-01 -1.45330060e+00 -5.22985101e-01]] [[-5.30036271e-01 -2.26994693e-01 3.48145634e-01 ... -5.97379208e-01 1.14746702e+00 -1.41874962e-02] [-1.08259082e+00 -2.95596495e-02 -1.14388072e+00 ... -5.32181449e-02 1.15380335e+00 1.83712423e-01] [ 5.35932839e-01 -2.04453245e-01 -9.20917317e-02 ... 3.15003783e-01 -2.28043720e-01 -1.08011043e+00] ... [ 4.12881196e-01 2.51018286e-01 -2.95573533e-01 ... -5.42210519e-01 1.01505315e+00 -1.58038244e-01] [-6.39505684e-01 -5.73257148e-01 1.75297213e+00 ... -9.17991638e-01 9.45130177e-03 -1.09806168e+00] [ 9.13563013e-01 1.17081352e-01 7.45294988e-02 ... 3.40581179e-01 7.04949439e-01 7.92014837e-01]] [[ 2.79236317e-01 2.76755869e-01 -2.32965872e-01 ... -4.06592190e-01 3.93232107e-01 -3.69527608e-01] [-1.21313953e+00 -3.29309583e-01 7.53213644e-01 ... 1.34962499e+00 4.30609852e-01 -4.93337996e-02] [ 1.17841467e-01 -2.49809876e-01 -2.12084815e-01 ... -8.70795667e-01 -8.64866972e-01 2.04211727e-01] ... [-1.46843448e-01 -1.08544183e+00 1.12777211e-01 ... 3.29947025e-01 -6.93307281e-01 2.10252807e-01] [ 3.16419005e-01 1.09735370e+00 2.10507348e-01 ... 4.41895008e-01 3.54841411e-01 -7.66848207e-01] [-4.05669779e-01 -5.95136523e-01 -1.40240148e-01 ... 1.34720254e+00 7.63942525e-02 8.77368271e-01]]] [[[ 8.99584472e-01 6.08219445e-01 -4.92011696e-01 ... -5.38850129e-01 -2.60903239e-01 -3.47819775e-01] [ 1.22880411e+00 -5.50087750e-01 2.56872684e-01 ... -1.73170775e-01 -1.50693133e-01 8.55686724e-01] [ 5.75534999e-01 8.12653124e-01 8.93745199e-03 ... 7.72103071e-01 6.41618013e-01 -9.50551331e-02] ... [ 2.68395901e-01 -7.36627638e-01 5.90005875e-01 ... -6.18025005e-01 -8.82631093e-02 -2.34450996e-01] [ 5.22634625e-01 2.06599310e-01 5.52738070e-01 ... -5.53094983e-01 2.92815834e-01 -1.38054419e+00] [-2.20055676e+00 1.78167105e-01 -3.30957286e-02 ... -1.79699391e-01 1.95646727e+00 7.03695118e-02]] [[ 4.69647720e-03 -8.36241961e-01 6.82402134e-01 ... 1.54995596e+00 1.03928894e-02 4.46749896e-01] [-6.91164076e-01 9.25087810e-01 1.54288685e+00 ... 3.86801749e-01 2.93547153e-01 1.90774892e-02] [ 5.89872539e-01 -2.16748968e-01 -8.73388648e-01 ... 9.30580795e-02 7.47708321e-01 -1.45373881e-01] ... [-1.17658806e+00 -5.01394033e-01 6.02553248e-01 ... -1.81341335e-01 -1.20976850e-01 -6.63873553e-02] [ 7.38585889e-01 -9.55221593e-01 6.24760449e-01 ... 1.65330768e-01 -4.11858074e-02 6.74107015e-01] [ 7.11126864e-01 -4.82338130e-01 1.36703169e+00 ... -1.51809484e-01 -3.23133528e-01 5.51028132e-01]] [[ 7.26731122e-01 -1.35610962e+00 -2.32246682e-01 ... 3.24679971e-01 5.20567596e-01 1.57067031e-01] [ 1.22572339e+00 2.94481277e-01 1.41971797e-01 ... -8.95833492e-01 4.46704209e-01 1.15329885e+00] [ 1.98316172e-01 1.03821123e+00 1.34589291e+00 ... 4.25538599e-01 -1.17758787e+00 6.18834019e-01] ... [-3.03473771e-01 1.31046548e-01 -7.91837037e-01 ... -2.48296753e-01 -3.91770810e-01 8.59943330e-01] [ 2.35663772e-01 -3.25287849e-01 -8.12513292e-01 ... -8.86725903e-01 2.33582929e-02 -1.80445537e-01] [ 7.44348243e-02 7.87134111e-01 5.75783551e-01 ... 1.31060791e+00 9.91626620e-01 -1.47665727e+00]] ... [[ 4.66610938e-01 9.83887970e-01 1.73079386e-01 ... -9.57218945e-01 1.38112390e+00 1.26623797e+00] [ 9.71147120e-01 5.93960106e-01 1.05668612e-01 ... -7.95458555e-01 8.26191962e-01 9.10104156e-01] [ 1.11487985e+00 -3.93948793e-01 -6.82443142e-01 ... -1.62402689e+00 1.05399955e-02 5.66537917e-01] ... [-2.02102169e-01 -6.48899615e-01 -3.00545305e-01 ... 1.22545588e+00 -5.36739707e-01 9.97140229e-01] [-6.18042946e-01 -6.61667228e-01 -3.56552005e-01 ... 7.39306688e-01 -2.86826700e-01 -5.51486135e-01] [ 3.07851046e-01 1.24405348e+00 -1.61417738e-01 ... -8.02201271e-01 1.72929573e+00 4.05390501e-01]] [[-1.21002328e+00 9.66552436e-01 -9.28494692e-01 ... 5.54700553e-01 -2.24613056e-01 9.78108123e-02] [-9.85674024e-01 3.20101470e-01 -1.04562640e+00 ... 4.61705178e-01 1.18097103e+00 -1.00543904e+00] [-2.94832259e-01 1.26110244e+00 6.26751244e-01 ... -3.85718107e-01 -2.13879019e-01 2.12774932e-01] ... [ 1.13362126e-01 -2.99442887e-01 4.47088540e-01 ... -8.39796782e-01 5.64290583e-01 -9.91294459e-02] [-6.92667782e-01 -1.02719915e+00 -1.33515343e-01 ... 1.13687849e+00 4.29948568e-01 -7.01612651e-01] [ 7.75588304e-02 -1.17538297e+00 -1.26717865e-01 ... 4.95231718e-01 5.36261082e-01 -4.64095891e-01]] [[-2.90623516e-01 9.62429419e-02 -1.02010536e+00 ... -9.12573457e-01 -5.66507816e-01 -7.09663033e-02] [ 2.01440886e-01 3.08659822e-01 3.41045707e-02 ... -4.23659474e-01 -7.01152459e-02 9.93500426e-02] [-1.27632380e+00 8.71273220e-01 1.13139045e+00 ... -1.48277915e+00 3.06515455e-01 -2.46613860e-01] ... [ 3.51235159e-02 -2.13027865e-01 2.53885806e-01 ... 1.57537889e+00 2.05427551e+00 6.00158334e-01] [-1.20986891e+00 4.59784746e-01 2.36367539e-01 ... 7.07140386e-01 3.92395914e-01 -1.41063130e+00] [ 5.34140408e-01 5.40333450e-01 -9.48682055e-02 ... -4.82021779e-01 5.00230193e-01 -4.46471781e-01]]] [[[-3.05540264e-01 -2.54620862e+00 -3.84860724e-01 ... -1.16750443e+00 3.70608643e-02 -6.08391345e-01] [-8.01255107e-02 -1.14905670e-01 -4.35687276e-03 ... -4.73858684e-01 8.00260067e-01 -4.51770842e-01] [-1.12058210e+00 7.33223408e-02 -2.87472486e-01 ... 5.69579482e-01 -1.59219727e-01 -1.88250303e-01] ... [-5.35854638e-01 1.15193529e-02 8.83864105e-01 ... 5.35743475e-01 -4.36438387e-03 -6.89349353e-01] [-8.13992500e-01 3.27434614e-02 -1.45095611e+00 ... -7.13886201e-01 5.09451509e-01 2.33663648e-01] [ 1.57461211e-01 7.07679033e-01 7.34527335e-02 ... 1.24340785e+00 6.22279286e-01 -2.98356593e-01]] [[ 1.84657431e+00 -2.59291530e-01 9.73499477e-01 ... 1.61863148e+00 5.60160279e-01 3.09840083e-01] [-4.88288224e-01 -5.58652401e-01 -1.28285974e-01 ... 6.96400106e-01 -2.96175778e-02 4.61404562e-01] [ 3.58371884e-01 8.65831733e-01 7.23769128e-01 ... -6.48182750e-01 -3.12849432e-01 -6.24177873e-01] ... [ 9.94551122e-01 5.62098324e-01 -4.92049038e-01 ... 7.87967563e-01 -8.28973353e-01 7.20153749e-01] [ 7.81109512e-01 4.22416240e-01 -1.44190907e-01 ... -2.13374570e-01 4.34066206e-01 -3.37378919e-01] [-1.01096320e+00 7.65244603e-01 4.88960266e-01 ... -8.07694674e-01 1.84134102e+00 -4.21457648e-01]] [[-3.76649022e-01 -7.14888453e-01 -7.64085412e-01 ... -9.59612370e-01 -4.92324769e-01 -3.83690685e-01] [-5.70536315e-01 -1.31605840e+00 -2.53475368e-01 ... 1.02695370e+00 -5.05183399e-01 -2.41399072e-02] [-6.57605469e-01 3.67612600e-01 -4.51322019e-01 ... 1.00884251e-01 -7.66094804e-01 -5.00644505e-01] ... [-1.23371387e+00 -2.36720890e-01 -3.05179864e-01 ... 2.94323862e-01 8.35556865e-01 5.09574533e-01] [-4.16362137e-01 -2.56537169e-01 1.04768610e+00 ... -1.20371580e-01 -4.20297891e-01 1.20687509e+00] [ 7.49170184e-01 1.03502500e+00 4.85351652e-01 ... -2.87903756e-01 -8.25088441e-01 7.52885565e-02]] ... [[-4.89764363e-01 -4.91642803e-01 1.30716860e-01 ... 2.59705096e-01 7.71239996e-01 2.04182193e-01] [ 1.89134192e+00 1.50562122e-01 -2.13151455e-01 ... -6.78663850e-01 2.94663310e-01 -1.25826597e-01] [ 1.30957329e+00 2.09918633e-01 6.00673437e-01 ... -9.56603810e-02 -9.25759077e-02 -3.17308992e-01] ... [-4.17356074e-01 6.21403933e-01 2.52255112e-01 ... -6.10493064e-01 -8.09188128e-01 1.04329002e+00] [-2.57970303e-01 -7.43045747e-01 1.04401612e+00 ... 8.11766926e-03 8.46087337e-01 3.55419308e-01] [ 6.30923286e-02 -2.23678604e-01 -2.16406599e-01 ... -5.30479968e-01 -1.19065738e+00 -3.53000402e-01]] [[-6.92435384e-01 -3.71545218e-02 -3.45540494e-01 ... -3.37769866e-01 -9.60486531e-01 -2.53156990e-01] [ 1.51195347e-01 8.56555104e-01 3.82905185e-01 ... 8.30049217e-01 1.02712691e+00 4.00630057e-01] [-1.63109636e+00 2.03090414e-01 -6.28361225e-01 ... 2.39514887e-01 -6.95640817e-02 -6.11139476e-01] ... [-1.09043241e-01 5.83182573e-01 -2.09599696e-02 ... -6.13604188e-01 -2.02518761e-01 1.84966087e-01] [-4.57796007e-01 3.75921071e-01 -2.94228524e-01 ... -9.84737039e-01 2.72234827e-01 9.62532461e-01] [-8.32551897e-01 5.64330459e-01 1.41740322e+00 ... 1.36165157e-01 -3.69022936e-01 -4.85209115e-02]] [[ 1.75175881e+00 -1.29775509e-01 3.29220712e-01 ... -9.86755192e-01 -1.33171856e+00 -5.64227462e-01] [-1.09286356e+00 -8.69342387e-01 8.34794819e-01 ... -2.93604821e-01 -1.26235867e+00 -5.87672710e-01] [ 8.53197515e-01 -1.01827286e-01 -8.47595930e-01 ... 9.52988923e-01 8.66607368e-01 1.02874732e+00] ... [-6.73397541e-01 -4.24645007e-01 6.89034164e-01 ... 3.41693074e-01 -9.33058441e-01 1.18698716e+00] [ 8.15368712e-01 1.01218867e+00 8.64118099e-01 ... -4.22694050e-02 3.60510945e-01 -2.32953504e-01] [ 1.31571603e+00 6.49356365e-01 8.67584199e-02 ... -5.21708071e-01 9.70762849e-01 -5.49051106e-01]]] [[[ 6.13301992e-01 -6.53651237e-01 1.25687587e+00 ... 4.77525115e-01 -2.03063831e-01 7.62822151e-01] [-3.08390874e-02 -8.25172305e-01 -1.11232066e+00 ... -6.13710999e-01 1.16953838e+00 7.03997970e-01] [ 1.39617455e+00 1.51437187e+00 -8.16711634e-02 ... 5.58580101e-01 6.10125780e-01 -3.85095403e-02] ... [ 1.25192630e+00 -8.50498676e-01 2.55392194e-01 ... -4.09923345e-01 -5.45304120e-01 -1.01286221e+00] [-8.91647816e-01 5.95510542e-01 -1.30949140e+00 ... 3.78991932e-01 -1.68413997e-01 2.99293250e-01] [ 7.50611186e-01 -2.78738141e-01 4.00389016e-01 ... 7.19403446e-01 -4.86468673e-02 5.80733120e-01]] [[-7.25064695e-01 2.49386162e-01 -3.45825642e-01 ... -4.14839536e-01 9.49143589e-01 -1.54685557e-01] [ 2.76737213e-01 2.38974571e-01 -1.38468981e+00 ... -1.09552276e+00 -1.14256479e-01 8.69961202e-01] [-1.00198865e-01 2.84161270e-01 6.06090426e-01 ... 2.74641335e-01 3.69461834e-01 -3.21384311e-01] ... [-1.74762204e-01 -1.19144067e-01 8.93008351e-01 ... 3.15412194e-01 -4.36835550e-02 2.14113206e-01] [ 6.93969190e-01 -1.04708862e+00 -7.58066922e-02 ... -4.78608668e-01 -8.04077983e-01 -2.56588548e-01] [ 4.97449130e-01 4.38457519e-01 7.79988110e-01 ... 3.28287780e-01 5.16536415e-01 -1.46132612e+00]] [[ 6.64971098e-02 3.22390378e-01 2.94996023e-01 ... 3.45342249e-01 4.92720217e-01 -1.10492580e-01] [ 4.49381590e-01 -1.64675593e-01 6.29742205e-01 ... 6.75895929e-01 -5.54348588e-01 -8.37305427e-01] [ 5.29945409e-03 -1.42759752e+00 2.32165549e-02 ... 5.69489226e-02 -1.15721501e-01 1.36925900e+00] ... [ 4.61578131e-01 7.67846584e-01 -8.40674341e-01 ... 2.91108519e-01 3.43333840e-01 2.74070859e-01] [-4.92757499e-01 2.82853633e-01 -9.90186214e-01 ... -1.87656075e-01 -4.24677700e-01 5.84144175e-01] [ 9.28528428e-01 -5.54081440e-01 -1.07820952e+00 ... -3.49785089e-01 -4.04705293e-02 -1.71430483e-01]] ... [[ 2.89553046e-01 1.88185036e-01 5.33001065e-01 ... -1.76613581e+00 -5.30740857e-01 -1.76751584e-01] [-5.49412608e-01 2.97033731e-02 -8.95453334e-01 ... 6.47713542e-01 5.93893588e-01 -1.24956250e+00] [-4.60687876e-01 -6.74500644e-01 -2.50220776e-01 ... -5.41116357e-01 4.08519685e-01 -1.03323638e+00] ... [-3.93811643e-01 6.40158132e-02 -5.68880618e-01 ... 3.37859869e-01 1.04841125e+00 -3.85201156e-01] [ 3.86751592e-02 3.47939469e-02 -1.02584684e+00 ... -1.26555967e+00 -5.39081037e-01 2.82803148e-01] [ 1.95651674e+00 -1.05802548e+00 3.40228230e-01 ... -5.33677757e-01 -5.06136358e-01 -9.50574398e-01]] [[ 3.56372863e-01 7.67479479e-01 4.80884701e-01 ... -3.18335265e-01 -6.31563365e-01 1.07837212e+00] [-9.17081356e-01 -3.93710464e-01 -1.42938030e+00 ... -1.06160975e+00 -1.93406940e-01 -2.48770028e-01] [-1.46451461e+00 -3.45650196e-01 -7.15684891e-01 ... 2.00176656e-01 8.64163995e-01 2.97172904e-01] ... [ 4.61972356e-01 7.03031242e-01 4.72412586e-01 ... 1.33586884e-01 -2.36367458e-03 1.28099465e+00] [ 6.79762244e-01 -4.53023374e-01 7.64011204e-01 ... -3.67544889e-02 -1.91454753e-01 -4.34203327e-01] [ 2.63672918e-02 -2.59160638e-01 3.39485079e-01 ... 2.56671816e-01 -1.04336143e+00 1.66728795e-01]] [[-4.27130580e-01 -4.92392302e-01 4.61895585e-01 ... 3.64751905e-01 -1.87688559e-01 -6.90328121e-01] [-1.33738220e-01 -4.92254823e-01 5.92475772e-01 ... -1.23373473e+00 -1.35974777e+00 -5.78163326e-01] [-3.02591980e-01 3.38479847e-01 -2.84604818e-01 ... 3.10499221e-01 7.34350264e-01 -2.08359301e-01] ... [ 1.45313609e+00 -1.03624511e+00 3.59763831e-01 ... 2.81504691e-01 -9.54248726e-01 6.33865416e-01] [ 9.23659384e-01 6.27237022e-01 1.07875228e+00 ... 1.33429837e+00 -1.31465986e-01 2.30147099e+00] [-9.82654870e-01 -1.21830560e-01 1.24417281e+00 ... -6.83556676e-01 3.41774583e-01 -1.27150461e-01]]] [[[-1.04002631e+00 -1.34014964e-01 3.77286583e-01 ... 1.20801532e+00 1.30624795e+00 8.26904953e-01] [-1.96041420e-01 -1.33965030e-01 -3.07418674e-01 ... -3.66155714e-01 -9.53875840e-01 -2.58860230e-01] [ 4.96126205e-01 -5.04376769e-01 -6.09639883e-01 ... -3.70946556e-01 8.29687774e-01 5.32243013e-01] ... [-8.07957828e-01 -1.28969002e+00 5.13509989e-01 ... 2.77964264e-01 -4.02314544e-01 5.11085331e-01] [-5.17515503e-02 1.82837233e-01 -8.43068361e-01 ... 1.73807704e+00 -4.90184695e-01 1.97664350e-01] [ 3.57391715e-01 -1.10412352e-01 1.62732556e-01 ... 5.32206357e-01 -1.42488107e-01 -5.81553340e-01]] [[-1.48862258e-01 -2.07394809e-01 2.58391142e-01 ... -6.12691820e-01 4.20498371e-01 -2.51922756e-01] [ 2.29577914e-01 -4.04457510e-01 1.18065635e-02 ... 1.33303618e+00 -5.08367062e-01 -1.35547006e+00] [ 1.35398245e+00 6.30868793e-01 -1.87872529e-01 ... -9.73041475e-01 -2.30540425e-01 -2.25713775e-01] ... [-7.16552794e-01 2.12281182e-01 7.76081607e-02 ... 2.19687209e-01 2.11067125e-01 -9.22922015e-01] [-1.83295816e-01 -1.13338821e-01 2.01104283e-01 ... 6.19941771e-01 -1.73776284e-01 1.64611060e-02] [ 1.09120297e+00 3.30105603e-01 1.90660477e-01 ... -6.41209900e-01 -6.26498461e-01 2.02160046e-01]] [[ 7.02369571e-01 9.05996095e-03 -3.10931019e-02 ... 8.87828134e-03 -1.59644991e-01 8.93544376e-01] [-7.10955739e-01 7.23730743e-01 1.31458327e-01 ... -1.10185981e+00 1.13807157e-01 6.94824994e-01] [-1.00252199e+00 -3.81497443e-01 -9.62560952e-01 ... 1.28452986e-01 2.64054954e-01 1.25551254e-01] ... [ 7.42912054e-01 -3.46449688e-02 -2.76927918e-01 ... 6.27836406e-01 -8.82984847e-02 -1.35888040e+00] [ 3.45905453e-01 -2.30833650e-01 -6.75812811e-02 ... 6.35569394e-01 -6.10348463e-01 -6.51283860e-01] [ 1.35771668e+00 1.39080480e-01 -2.32945710e-01 ... 7.93645382e-01 5.14054120e-01 2.22462565e-01]] ... [[ 3.40585738e-01 8.02334249e-01 4.50260997e-01 ... 9.16627143e-03 -2.28925556e-01 3.23467284e-01] [ 2.81291544e-01 -1.04427767e+00 -3.95032287e-01 ... -7.94442356e-01 5.00839829e-01 -5.22760935e-02] [ 3.20801944e-01 5.58943272e-01 4.14009303e-01 ... -6.76773906e-01 -7.18977526e-02 5.04582763e-01] ... [-1.27228725e+00 2.80156076e-01 -9.57722902e-01 ... 4.26098734e-01 -7.18837559e-01 2.69825280e-01] [ 1.53530931e+00 -2.37684935e-01 -4.14322019e-01 ... -7.78836608e-01 1.24320865e+00 -7.19708920e-01] [ 3.39936763e-01 -8.83503437e-01 8.00056398e-01 ... -1.27379492e-01 -2.06773818e-01 -5.90322502e-02]] [[-7.75556624e-01 2.69731134e-01 -8.32134247e-01 ... 1.43744633e-01 -1.02305663e+00 -4.21685278e-02] [ 2.22142965e-01 1.08651888e+00 1.01122193e-01 ... -4.49237019e-01 -2.17323348e-01 4.89334255e-01] [ 2.12087378e-01 5.58635771e-01 1.00004566e+00 ... 1.25244331e+00 -2.99746424e-01 1.38396055e-01] ... [ 4.00108516e-01 -2.12530553e-01 4.84751225e-01 ... 1.35573775e-01 6.87395334e-01 -5.71769834e-01] [ 6.22001767e-01 9.75425601e-01 5.17786503e-01 ... 1.39874876e+00 1.51058778e-01 -6.79613292e-01] [ 5.04902184e-01 -5.60658157e-01 -3.37824315e-01 ... 3.06982785e-01 1.82328284e-01 8.27034831e-01]] [[ 5.67302108e-01 -1.36124027e+00 -6.65185511e-01 ... 9.72064674e-01 -7.82674015e-01 -7.88156986e-02] [ 2.59809166e-01 1.94736481e-01 -4.86330956e-01 ... 6.52734101e-01 1.36385113e-01 4.35143858e-01] [ 2.11934477e-01 -1.33910477e+00 7.91618645e-01 ... -9.76305187e-01 2.90919065e-01 -3.08654398e-01] ... [ 2.74764057e-02 -5.92981763e-02 -1.27754319e+00 ... 1.65428638e-01 -1.16794276e+00 -7.06535131e-02] [ 5.02137952e-02 -7.63181269e-01 1.18461013e+00 ... -7.05803335e-01 3.30823511e-01 -8.99757862e-01] [ 3.03821921e-01 -3.35926205e-01 -1.28697944e+00 ... -9.44780111e-01 -1.15852928e+00 5.04419148e-01]]] [[[ 6.81339979e-01 2.53671378e-01 -1.21775612e-01 ... -5.40179431e-01 4.40992385e-01 -2.52195537e-01] [ 2.71449000e-01 8.98482740e-01 -5.23650587e-01 ... -3.42057496e-01 1.14826775e+00 2.01499462e-01] [-2.00540259e-01 -3.04385275e-01 -6.81119859e-01 ... 7.06394553e-01 -3.58809024e-01 5.52669227e-01] ... [ 4.73292500e-01 6.35185957e-01 -3.30118269e-01 ... -3.93780887e-01 3.20613563e-01 -3.66503328e-01] [-1.35101092e+00 -3.02955564e-02 -3.66235882e-01 ... -2.08321378e-01 -1.11958519e-01 -1.41924739e-01] [-7.67288685e-01 1.23598981e+00 1.23615511e-01 ... 3.29085797e-01 3.01855356e-01 4.14396793e-01]] [[-6.77678108e-01 -1.36428547e+00 -1.13693666e+00 ... -1.10328741e-01 5.75214207e-01 1.83748031e+00] [-3.68271619e-02 -8.85406658e-02 -5.37703216e-01 ... -2.08487540e-01 -6.39648259e-01 1.19317222e+00] [ 7.61286139e-01 3.75168562e-01 3.87631595e-01 ... -4.46299821e-01 -1.01536071e+00 -1.59659731e+00] ... [ 7.36444771e-01 1.71695244e+00 1.88127086e-01 ... -5.12018561e-01 2.65963227e-01 5.19273043e-01] [ 4.36307818e-01 2.00155348e-01 -7.46575296e-01 ... -6.35258138e-01 3.14816624e-01 -6.75642341e-02] [ 1.00143087e+00 2.83488512e-01 2.33088374e-01 ... 3.32082659e-01 -6.19686007e-01 -3.73626262e-01]] [[-4.15185452e-01 -5.59197843e-01 5.18002331e-01 ... 6.06132567e-01 4.95622277e-01 8.03150117e-01] [ 1.09419596e+00 4.07534987e-02 8.74012947e-01 ... 4.35524344e-01 -1.93320885e-01 2.55047649e-01] [ 8.31819996e-02 1.15090334e+00 -8.37539971e-01 ... -4.74847198e-01 -2.89178252e-01 1.23876953e+00] ... [ 3.26875359e-01 2.09173292e-01 5.09277701e-01 ... 8.31181586e-01 1.48313522e-01 -3.27276170e-01] [-8.21425796e-01 4.88974035e-01 -3.32413197e-01 ... -1.03283954e+00 1.12910569e+00 6.30431354e-01] [-3.57473820e-01 -6.12175226e-01 1.01781023e+00 ... -1.21897765e-01 1.18593514e+00 -6.71827495e-01]] ... [[-6.41337335e-01 9.74054541e-03 6.61556646e-02 ... 1.26312745e+00 -7.21867204e-01 7.07258821e-01] [-7.93005943e-01 -3.35384727e-01 2.82067478e-01 ... 1.22658506e-01 1.03432751e+00 -6.35792434e-01] [ 4.64282572e-01 -1.13400674e+00 -5.66822827e-01 ... -5.99891186e-01 -2.31302604e-01 1.56575692e+00] ... [-1.91224992e+00 -2.09056437e-02 -7.68533051e-01 ... 4.21844065e-01 -7.14618027e-01 -7.78729141e-01] [-2.43123502e-01 2.83660084e-01 -2.43940413e-01 ... 5.00473738e-01 4.20084059e-01 4.86098409e-01] [-3.78010049e-02 -1.46525502e+00 -7.20780253e-01 ... 2.83362538e-01 3.33109319e-01 -1.95013836e-01]] [[-4.29350108e-01 -1.75552532e-01 4.82543409e-02 ... 7.23156333e-01 -8.49944472e-01 5.16637862e-01] [ 7.58886337e-01 1.01622112e-01 -1.65412843e+00 ... 4.44542356e-02 2.20999479e-01 -1.59207606e+00] [-7.57083416e-01 6.63302422e-01 4.79452521e-01 ... 4.78843004e-01 8.62360239e-01 5.35324335e-01] ... [-7.95975253e-02 5.43914974e-01 -1.92697212e-01 ... -9.85551119e-01 -1.20476878e+00 4.06707153e-02] [-1.36612296e+00 8.00535679e-01 1.23892713e+00 ... 8.11514914e-01 -5.89272559e-01 1.17732179e+00] [-1.20493904e-01 6.78916872e-01 9.63539854e-02 ... -1.31731987e-01 1.37981009e+00 -6.09967411e-01]] [[ 1.35301426e-01 -1.20508611e+00 -6.57162130e-01 ... -7.49704540e-01 1.19964577e-01 -8.02170575e-01] [-5.45701444e-01 -1.04749632e+00 -6.27688691e-02 ... 6.64622197e-03 6.56745490e-03 -2.68959284e-01] [-3.82434785e-01 -8.00249040e-01 1.18337095e-01 ... 2.06516474e-01 6.45419359e-01 3.71185780e-01] ... [ 9.20409620e-01 6.48150265e-01 2.09037483e-01 ... -4.65369642e-01 3.77016932e-01 3.13685954e-01] [-2.46163115e-01 -9.69350561e-02 7.48268008e-01 ... -1.70945808e-01 -4.01056111e-01 1.20856524e+00] [-4.90936190e-01 -4.47355986e-01 2.07181543e-01 ... 2.11675689e-01 8.37388635e-01 4.22674529e-02]]]] [[[[ 7.01073036e-02 4.37279373e-01 -1.43628180e-01 ... -2.70032704e-01 4.57555145e-01 -3.18252414e-01] [-8.79490733e-01 -2.04021573e-01 1.33538866e+00 ... 9.47166309e-02 -8.33035946e-01 7.10474551e-02] [-6.24811530e-01 2.97629565e-01 -7.87566841e-01 ... 3.40066999e-01 2.72662520e-01 2.61713952e-01] ... [-2.64160246e-01 6.77833259e-01 3.00878137e-01 ... -4.19893801e-01 -8.60333502e-01 1.27560508e+00] [-6.69449151e-01 -4.42262888e-01 -5.06635547e-01 ... -2.51024455e-01 7.27741003e-01 -1.42107284e+00] [-2.66606897e-01 1.69788554e-01 -4.51956272e-01 ... 7.76290819e-02 1.42208889e-01 1.04088509e+00]] [[-2.09347844e-01 4.90370125e-01 -5.36152244e-01 ... -1.66397560e-02 -5.91871381e-01 5.74396193e-01] [-1.28760791e+00 -1.97242856e+00 2.64416486e-01 ... 4.50619049e-02 2.32788950e-01 -8.55698884e-02] [-1.08413212e-02 1.20663047e-01 -3.98034275e-01 ... 2.11432144e-01 -5.58826685e-01 1.92342684e-01] ... [-2.38985986e-01 7.14937627e-01 -1.40236631e-01 ... 1.91581115e-01 1.49119425e+00 -8.45394611e-01] [-2.27255851e-01 3.72070849e-01 6.27294183e-01 ... 6.54479802e-01 4.30414200e-01 -6.77485704e-01] [-5.48875630e-01 5.00957221e-02 6.10916615e-01 ... -3.70693356e-01 -5.61029434e-01 -1.69731483e-01]] [[-7.83850372e-01 -2.91292369e-01 5.07225692e-01 ... -2.73740083e-01 -8.11511159e-01 1.16352534e+00] [ 6.88864708e-01 -1.53731138e-01 2.23934233e-01 ... 9.46660787e-02 -7.49586463e-01 7.41230622e-02] [ 2.59355396e-01 2.10844502e-02 -3.18183422e-01 ... -1.44968122e-01 -1.04370618e+00 -5.29539883e-01] ... [-8.96116495e-01 -3.49486858e-01 7.55800307e-02 ... -9.42243040e-01 1.35873005e-01 -7.23207444e-02] [ 2.95125425e-01 -6.83301985e-01 1.37452304e-01 ... -1.15923929e+00 3.16310644e-01 -1.64602920e-01] [ 4.73374099e-01 -4.06721905e-02 -3.73559743e-01 ... -9.92156148e-01 -1.20707703e+00 -3.53536427e-01]] ... [[ 9.29435045e-02 6.18205182e-02 6.13005102e-01 ... -1.36772633e+00 -7.83586383e-01 -4.48217690e-01] [-1.36958516e+00 -4.72207159e-01 -2.60452002e-01 ... -1.57980561e-01 -4.54273820e-01 -1.87412485e-01] [ 2.93589264e-01 -7.84571588e-01 4.08401310e-01 ... -1.13774562e+00 -2.64614969e-01 2.08764002e-01] ... [-1.90699384e-01 4.46768701e-01 -4.62915182e-01 ... -8.47026765e-01 1.00833702e+00 9.35302079e-01] [ 4.89427119e-01 -7.49676049e-01 -6.43809736e-01 ... -7.71885753e-01 6.78642765e-02 4.65387672e-01] [-8.81770492e-01 1.12124681e+00 7.46571198e-02 ... -1.91089439e+00 -8.53821337e-01 -2.04547927e-01]] [[-3.53590727e-01 -4.85189945e-01 -1.47236079e-01 ... -9.43051636e-01 2.52997547e-01 5.03722876e-02] [ 1.83847621e-02 4.39184129e-01 -2.89517403e-01 ... -2.79553145e-01 5.24665415e-01 1.32530105e+00] [-2.01504207e+00 -1.08764696e+00 -6.21044815e-01 ... -6.48806274e-01 8.86111975e-01 -9.25634027e-01] ... [ 1.39811110e+00 -4.41503108e-01 2.35862106e-01 ... 3.79452020e-01 -7.81854331e-01 -6.13855600e-01] [-4.02331129e-02 -5.04570246e-01 -6.19304597e-01 ... 2.64535427e-01 8.99139345e-01 6.55436888e-02] [ 3.76420259e-01 -1.32933009e+00 3.36337388e-01 ... 3.76437977e-02 -6.43669665e-01 -5.79308532e-02]] [[ 1.46635607e-01 -1.71739131e-01 3.48886073e-01 ... -8.14314842e-01 1.01968896e+00 3.54214400e-01] [ 1.50502467e+00 -1.27000165e+00 -4.71845381e-02 ... 1.10909176e+00 4.17254478e-01 -1.94589809e-01] [-1.26241767e+00 4.55383211e-01 -1.56430411e+00 ... -7.74100840e-01 1.94098473e-01 7.75584221e-01] ... [-3.61208647e-01 8.31235766e-01 1.31314981e+00 ... -2.44927645e-01 -3.85487348e-01 1.76698282e-01] [ 3.86350155e-02 5.16113281e-01 -2.69746095e-01 ... 7.76594222e-01 -2.65127629e-01 -4.03621614e-01] [-2.50732541e-01 6.73590228e-02 9.61006999e-01 ... 2.66393553e-02 -7.47193933e-01 -8.34729612e-01]]] [[[ 6.20929182e-01 7.26467371e-01 1.27597427e+00 ... 5.97220302e-01 4.90451396e-01 4.23783541e-01] [-9.03117284e-02 -6.50113449e-03 -1.98060632e-01 ... -1.29295334e-01 1.32659817e+00 -1.05877995e+00] [-7.54975140e-01 -4.19042915e-01 -4.67547446e-01 ... 5.54348171e-01 8.78895819e-01 1.58333921e+00] ... [ 4.49752867e-01 6.49330974e-01 -1.69393599e-01 ... -2.34507006e-02 -3.49216282e-01 -1.94174096e-01] [-4.99523401e-01 8.18398893e-01 4.27880138e-01 ... -3.07407347e-03 9.38094854e-01 -2.28456354e+00] [ 3.26440394e-01 -9.50576007e-01 -5.94830453e-01 ... 4.30352747e-01 4.49486643e-01 3.95722508e-01]] [[-1.07403910e+00 -4.19123858e-01 4.35222745e-01 ... -9.50354517e-01 -6.63924694e-01 -4.77467537e-01] [-2.37666309e-01 5.48687518e-01 8.63277316e-01 ... -2.26561427e-02 -1.06444329e-01 2.50588715e-01] [ 5.59672952e-01 -3.00866459e-02 4.45297956e-01 ... -4.87491339e-02 9.33662057e-01 1.74227023e+00] ... [-1.55097276e-01 1.45818174e+00 5.23974419e-01 ... -9.69032884e-01 6.28151655e-01 3.38628024e-01] [-2.53116697e-01 -5.87662518e-01 -5.02440572e-01 ... -6.34393692e-01 -1.66644767e-01 2.58717611e-02] [-5.87179780e-01 2.19751149e-01 -8.53386104e-01 ... -1.22827150e-01 1.97654683e-02 -1.57980227e+00]] [[-1.89283803e-01 2.57298648e-01 3.99191350e-01 ... -8.30647722e-02 6.30867898e-01 2.86667868e-02] [ 5.19594140e-02 -8.72197807e-01 1.02017283e-01 ... -1.26364887e-01 -1.15029645e+00 3.12284559e-01] [-1.73296943e-01 2.41523325e-01 1.14453089e+00 ... 9.70408976e-01 -1.71962273e+00 6.22430623e-01] ... [-4.95658576e-01 -6.53140366e-01 9.66164470e-01 ... 2.06023023e-01 -1.18011928e+00 9.56211746e-01] [ 7.42612839e-01 6.67270184e-01 -5.09112358e-01 ... 6.29481822e-02 4.88524660e-02 3.59054208e-01] [ 2.49367490e-01 7.05894947e-01 9.86531377e-02 ... -1.11481977e+00 7.26765692e-01 -1.52828693e-01]] ... [[ 4.21140134e-01 1.01756263e+00 -5.45247614e-01 ... 6.79819584e-01 -1.28943920e+00 1.02349848e-01] [ 1.06786561e+00 -3.89474481e-02 1.56008631e-01 ... -9.47654724e-01 -6.59822524e-01 -8.14900100e-01] [-9.91729617e-01 4.67636883e-01 -6.11547709e-01 ... -4.65824664e-01 -3.42646986e-01 -7.65927434e-01] ... [ 4.42427844e-01 8.28354239e-01 1.12297006e-01 ... 7.51239121e-01 -1.51589715e+00 -1.19097185e+00] [-7.21598923e-01 4.48005974e-01 4.25341338e-01 ... -4.62051213e-01 2.19356298e-01 -1.65769458e-01] [-5.83566666e-01 2.40190193e-01 8.92577291e-01 ... 3.65448415e-01 -1.04786205e+00 1.05503047e+00]] [[-7.77899921e-01 6.61369443e-01 1.85870975e-01 ... 4.63362217e-01 -4.08074051e-01 1.16088346e-01] [ 2.94473708e-01 1.52902997e+00 3.01349133e-01 ... -5.22242725e-01 1.42552555e-01 -7.29335368e-01] [-1.45861328e-01 -5.49884915e-01 6.58889115e-02 ... 1.37284267e+00 8.81238282e-02 -1.12739372e+00] ... [-2.63540089e-01 -4.17903692e-01 1.53849736e-01 ... -3.07632983e-01 6.71354115e-01 -1.80826396e-01] [-8.19383487e-02 6.87687248e-02 1.06421947e+00 ... -3.62035125e-01 -3.43456328e-01 9.87978339e-01] [ 3.73902887e-01 -6.82192445e-01 -1.19865203e+00 ... -6.15821742e-02 5.91582768e-02 4.19921577e-01]] [[-7.37155795e-01 6.46084070e-01 -1.14280319e+00 ... 5.36886454e-01 -1.35888040e+00 1.63164750e-01] [ 7.06323564e-01 1.49008855e-02 -7.10145891e-01 ... 4.17604715e-01 -1.33126453e-01 -9.33623195e-01] [ 7.27312803e-01 1.58179033e+00 4.03113663e-01 ... -1.69760680e+00 -1.47865367e+00 3.29432413e-02] ... [ 2.33732581e-01 -1.64683297e-01 -7.49127209e-01 ... 9.39685404e-01 1.44764423e+00 5.92311382e-01] [ 3.79021466e-01 8.87935400e-01 1.96514344e+00 ... -7.76330471e-01 1.70653135e-01 4.85246271e-01] [ 1.06316261e-01 4.57825176e-02 -8.09934214e-02 ... -9.61794853e-01 -4.98914510e-01 8.93695414e-01]]] [[[ 1.83170319e-01 -6.11827791e-01 -4.37744707e-01 ... -5.26909292e-01 -1.25676049e-02 -5.09086959e-02] [-7.31008887e-01 -4.28700566e-01 -3.38655740e-01 ... -8.61000776e-01 4.86178994e-01 5.99376440e-01] [ 7.44051337e-01 -1.24910080e+00 3.90975446e-01 ... 6.76461160e-01 1.29331678e-01 -5.30795455e-01] ... [-1.16328025e+00 1.07305133e+00 1.30248022e+00 ... 3.48396599e-01 -9.10752892e-01 -1.94473132e-01] [-2.57669955e-01 1.05300176e+00 -1.06483907e-01 ... 8.91046971e-02 -7.88129643e-02 5.49446344e-01] [ 8.00253093e-01 1.40724373e+00 -5.92271030e-01 ... 2.42126256e-01 -1.06599987e-01 4.64373857e-01]] [[ 2.18267053e-01 -2.63590157e-01 1.39454293e+00 ... -2.63578206e-01 -1.12521097e-01 6.00328624e-01] [-8.16300631e-01 4.24107999e-01 -2.55918443e-01 ... -1.12526023e+00 -4.94993031e-01 -1.12727249e+00] [-4.62652802e-01 1.00155115e+00 2.95399159e-01 ... -7.44109392e-01 -1.30778551e+00 4.83900070e-01] ... [-3.86439376e-02 -1.84070647e-01 1.10419643e+00 ... -1.02009869e+00 -1.28230679e+00 -3.53129834e-01] [ 1.61786115e+00 -7.08158672e-01 -1.32385123e+00 ... -2.91531384e-01 -4.07281667e-01 5.39327025e-01] [-1.47676632e-01 -9.75636959e-01 1.75009310e+00 ... -5.36061704e-01 -5.79976797e-01 5.81038117e-01]] [[-2.34448522e-01 6.51025891e-01 -1.11271584e+00 ... 8.95395517e-01 7.75422513e-01 -3.58090490e-01] [-4.75680172e-01 6.08791053e-01 -3.36779296e-01 ... 4.96878177e-01 -1.22879744e+00 -7.53138125e-01] [ 4.87957925e-01 -2.42504090e-01 1.49235463e+00 ... -7.08466530e-01 3.01675737e-01 1.12942278e-01] ... [ 9.09188271e-01 1.74528491e-02 -7.38982484e-02 ... -4.68633264e-01 -1.80162191e-01 -7.20004857e-01] [ 3.08943897e-01 1.64507186e+00 1.44722804e-01 ... -1.28642261e+00 -6.20679915e-01 4.13499385e-01] [-2.97366381e-01 -8.09107006e-01 -2.09280595e-01 ... -1.25649921e-03 7.49066651e-01 -8.67704898e-02]] ... [[-8.91837254e-02 1.40643203e+00 -9.74885106e-01 ... 3.53401788e-02 2.58640081e-01 3.59922677e-01] [ 6.95834577e-01 7.91856647e-01 -7.38741696e-01 ... -4.24213797e-01 1.13637459e+00 -4.44548994e-01] [-5.34720004e-01 -4.73711967e-01 -1.29678190e+00 ... 1.30647838e+00 -1.23409903e+00 -6.33451641e-01] ... [ 1.64664185e+00 6.13731980e-01 2.95911819e-01 ... -2.09125757e-01 -5.53925395e-01 -7.14189559e-02] [ 2.15234756e-01 1.54306367e-01 7.84200072e-01 ... -6.53425336e-01 -1.28514290e-01 -9.60301980e-02] [-2.70232260e-01 -1.34880614e+00 -4.66298103e-01 ... 1.05028784e+00 1.78291708e-01 -2.52496123e-01]] [[ 3.13336611e-01 -1.26762199e+00 4.81185466e-01 ... 3.48363072e-01 5.07746756e-01 -8.00356448e-01] [ 6.61305547e-01 -9.03806388e-01 2.09190905e-01 ... 9.12108600e-01 3.23872030e-01 8.76453519e-01] [ 1.66371703e-01 4.97119933e-01 -1.28372610e+00 ... 8.90325159e-02 -4.79694724e-01 2.78708851e-03] ... [-1.12352335e+00 -3.86705041e-01 -5.51028013e-01 ... 1.01965773e+00 4.66289312e-01 -6.04799330e-01] [ 2.56267965e-01 -6.05246842e-01 -9.33955073e-01 ... -5.16138911e-01 -9.08671618e-01 9.13685322e-01] [-7.13396549e-01 7.58640826e-01 -7.53921270e-02 ... -1.19782686e+00 -9.81042385e-01 5.91411293e-01]] [[ 2.26951092e-01 1.17942333e+00 1.77215308e-01 ... -2.41398439e-01 -5.48307419e-01 8.18903625e-01] [ 8.12619984e-01 -3.20253432e-01 5.87097518e-02 ... -5.09820461e-01 -9.44655955e-01 5.54516137e-01] [ 2.38331151e+00 5.57181358e-01 3.54506783e-02 ... -2.47510970e-01 -3.79082672e-02 -7.65925169e-01] ... [-7.41693974e-01 9.29051280e-01 1.34325635e+00 ... 2.56463680e-02 3.04786712e-01 1.12916517e+00] [ 3.53490293e-01 3.33966732e-01 -3.69766504e-01 ... 1.03779590e+00 -3.09677333e-01 -6.01155311e-02] [ 1.20082438e-01 -5.95010102e-01 2.31417939e-01 ... -9.31952834e-01 -2.93249398e-01 1.00639331e+00]]] [[[-2.58520365e-01 3.23776543e-01 -3.58747512e-01 ... 5.96830361e-02 -8.32409084e-01 7.43119180e-01] [-2.90923536e-01 4.64291751e-01 1.42991528e-01 ... 6.14034355e-01 4.35070008e-01 8.85929018e-02] [ 2.98036681e-03 6.74505889e-01 -5.48799708e-02 ... -9.04759765e-02 3.83085221e-01 7.08332181e-01] ... [-6.75115595e-03 -2.93870032e-01 -1.70006409e-01 ... -7.31702626e-01 -7.88703859e-01 -2.97740381e-02] [ 2.62928915e+00 5.14702797e-01 -1.78574294e-01 ... 1.73052475e-01 -8.43301117e-02 4.88807142e-01] [-1.50996399e+00 2.81067431e-01 7.29324937e-01 ... 1.29474843e+00 -9.76419866e-01 1.10798404e-01]] [[-2.85286248e-01 1.19993722e+00 5.52707434e-01 ... 9.99974534e-02 -1.23150194e+00 -1.35272789e+00] [ 4.68692183e-01 -8.03509176e-01 8.72216821e-01 ... 9.52374876e-01 -1.04255497e+00 -6.70659304e-01] [ 1.08872795e+00 7.68754184e-01 -8.31290007e-01 ... 7.24313259e-01 -3.18305165e-01 8.54284108e-01] ... [-1.38584232e+00 -4.64551568e-01 1.89738214e-01 ... -1.04202676e+00 3.08465630e-01 7.70641148e-01] [ 7.47321606e-01 1.85311511e-01 -4.23573077e-01 ... -2.18534365e-01 1.05118358e+00 -1.22687232e+00] [ 1.83688506e-01 1.10954738e+00 6.87721446e-02 ... 1.74764848e+00 -9.81653452e-01 1.10262060e+00]] [[-4.77314115e-01 7.46325105e-02 8.97340834e-01 ... -1.58769703e+00 -1.29121700e-02 -1.31822631e-01] [-4.43946391e-01 -5.95624149e-01 -2.08265007e-01 ... 5.89346647e-01 -6.68656826e-02 -1.03862643e-01] [ 4.85022694e-01 -1.26509264e-01 -4.26950425e-01 ... -6.50424778e-01 -5.78185678e-01 -3.58009279e-01] ... [-8.56929868e-02 4.63985890e-01 4.26014513e-01 ... -5.07245839e-01 3.24119925e-01 -1.23095381e+00] [ 2.40549922e-01 4.77243692e-01 1.68706208e-01 ... 7.77635515e-01 -2.63647377e-01 -3.74582976e-01] [-2.78217047e-01 -1.60661757e+00 -1.41082144e+00 ... 1.12051696e-01 1.15091205e+00 5.55724092e-02]] ... [[-3.28151762e-01 -4.12611991e-01 -4.22788501e-01 ... 3.11811268e-01 -5.79034626e-01 -5.55775940e-01] [ 8.03726017e-01 -8.91886711e-01 -9.84539568e-01 ... -6.54067814e-01 -6.53397918e-01 -1.04203022e+00] [ 1.18709064e+00 3.38046908e-01 6.93316579e-01 ... 1.82718009e-01 -8.99514139e-01 -1.07634766e-02] ... [-1.02954531e+00 -1.63910687e-01 -5.60312688e-01 ... 1.01742230e-01 3.69178623e-01 -1.25834620e+00] [ 8.30603838e-01 5.68124205e-02 -2.92032827e-02 ... -1.15729511e-01 -2.59413365e-02 -2.46986389e-01] [-5.92914298e-02 2.89035857e-01 -1.67515010e-01 ... 1.34403944e+00 -6.58874214e-01 -4.34048682e-01]] [[-2.40873992e-01 -6.17022812e-01 -4.21688765e-01 ... -3.58823001e-01 -3.12751502e-01 -1.11302808e-02] [ 1.23598611e+00 3.91616046e-01 1.91502318e-01 ... -6.83657110e-01 6.04657650e-01 1.01936996e+00] [-1.71682462e-01 4.09230411e-01 -2.91944146e-02 ... 3.35497297e-02 -3.69696915e-01 1.02149928e+00] ... [ 1.28691271e-01 -2.78611984e-02 -6.48949742e-01 ... 5.81478357e-01 -5.66975772e-01 9.35852349e-01] [-1.21784434e-01 -8.05546820e-01 4.65825737e-01 ... -6.44613862e-01 -2.85062432e-01 -7.72162855e-01] [-9.96432722e-01 6.32839262e-01 1.20197468e-01 ... -7.47606099e-01 -1.26784194e+00 2.26116553e-01]] [[-1.05809295e+00 -4.44236547e-01 8.03239703e-01 ... -1.90194398e-01 -3.04454267e-01 -5.63108444e-01] [-5.91358185e-01 1.13819647e+00 -1.87105477e-01 ... -8.65151584e-01 -5.09039640e-01 1.27796936e+00] [ 7.40769088e-01 -2.40894228e-01 8.90111864e-01 ... 2.52105147e-01 -5.16890250e-02 8.09240118e-02] ... [-1.14983869e+00 1.88497439e-01 2.43840262e-01 ... -2.72770941e-01 -5.82474589e-01 5.48069477e-01] [ 4.89810994e-03 -8.92555296e-01 -5.35776794e-01 ... -1.20296581e-02 6.35813296e-01 3.78647447e-01] [-6.75052166e-01 6.11137927e-01 -1.41282094e+00 ... -9.40656543e-01 -3.81000400e-01 4.03372020e-01]]] [[[ 5.86060703e-01 -3.21647733e-01 1.94281489e-01 ... 9.35256109e-02 -6.29116535e-01 -8.90775099e-02] [ 1.64870977e-01 2.79090945e-02 2.16791600e-01 ... -8.84653747e-01 6.92089722e-02 9.23215300e-02] [-9.92465913e-01 -1.59564495e-01 -9.77263898e-02 ... -5.20729125e-01 3.11042994e-01 -4.71059501e-01] ... [-1.02800447e-02 1.33421791e+00 -1.01970088e+00 ... -1.06254578e+00 1.14805245e+00 2.37946063e-01] [ 4.33163971e-01 2.23919049e-01 8.38029206e-01 ... 3.90899628e-01 -7.23225951e-01 -4.17232394e-01] [-9.47321117e-01 -4.51300710e-01 -7.64419854e-01 ... 4.55715328e-01 -5.29759109e-01 -3.76396447e-01]] [[ 3.88403147e-01 8.67901087e-01 2.96646476e-01 ... -2.05650911e-01 -2.33726755e-01 1.65100419e-03] [-4.97733533e-01 8.43695283e-01 -3.21155697e-01 ... 1.04168463e+00 7.12807477e-01 -1.43155470e-01] [-4.46609616e-01 7.65738249e-01 8.43245447e-01 ... 6.94525003e-01 -1.22668588e+00 9.38320577e-01] ... [-7.44211257e-01 1.06125939e+00 -5.65283954e-01 ... -3.94374877e-01 -8.74151647e-01 -9.18594301e-01] [-1.48011342e-01 2.20172405e-01 -6.37888551e-01 ... -4.17897403e-02 -1.09508073e+00 -1.86269894e-01] [ 9.15501237e-01 4.69464540e-01 -3.24905306e-01 ... 1.40771270e+00 3.63137305e-01 -2.64345199e-01]] [[ 7.21438751e-02 -3.83719131e-02 -1.34185493e+00 ... 9.88265797e-02 -3.70417565e-01 5.74431904e-02] [ 1.64663959e-02 -3.64755988e-01 3.74767572e-01 ... 7.91551292e-01 -2.54452765e-01 -4.56594169e-01] [-6.92308962e-01 -4.08566177e-01 7.74984360e-02 ... -1.04323663e-01 6.54196203e-01 -7.60335743e-01] ... [-9.40721095e-01 -6.29965544e-01 -5.50657451e-01 ... 7.91415215e-01 1.05358422e+00 8.31242144e-01] [-4.92853999e-01 -2.02672835e-03 -1.30278599e+00 ... -8.18714082e-01 -6.17295623e-01 -1.07412505e+00] [ 8.96575674e-02 -6.22976005e-01 7.60209709e-02 ... 4.42855626e-01 -1.32158983e+00 -8.26321185e-01]] ... [[-8.89099762e-02 -1.59997690e+00 -1.08712669e-02 ... -3.34947914e-01 7.60288537e-01 3.24296445e-01] [ 1.11440480e+00 5.25902271e-01 -9.61616397e-01 ... 9.35389936e-01 1.09097183e+00 5.16324401e-01] [ 4.93318111e-01 2.96507683e-03 1.40091375e-01 ... 5.22319913e-01 2.41658092e-01 -3.02106500e-01] ... [-3.75166237e-01 -1.46210730e-01 -1.61689091e-02 ... -3.07852536e-01 -2.11626261e-01 9.51993167e-01] [-1.21703899e+00 -1.37076235e+00 2.81207860e-01 ... 3.68320972e-01 -2.65403599e-01 -1.02047479e+00] [ 1.09961689e+00 -1.77146047e-01 -5.04087031e-01 ... 4.05131102e-01 1.16347384e+00 -2.55843401e-01]] [[-1.04839623e+00 1.05951205e-01 -4.43454534e-02 ... 5.50688744e-01 4.75871027e-01 9.95839119e-01] [-5.45693696e-01 7.11129189e-01 1.21705472e+00 ... -4.17742848e-01 8.13349783e-01 1.99994057e-01] [-2.21401468e-01 -1.13041222e+00 4.81574267e-01 ... 5.11883378e-01 6.73588336e-01 -7.85608172e-01] ... [-1.04345989e+00 -6.76414669e-01 3.09418172e-01 ... -9.50359583e-01 1.19098628e+00 -5.74451923e-01] [ 1.75544068e-01 2.93790072e-01 2.18870908e-01 ... -3.73420149e-01 1.11345255e+00 -7.07469940e-01] [-8.74058843e-01 -3.01307380e-01 3.96578074e-01 ... -1.31579101e-01 -8.79095674e-01 -1.02628875e+00]] [[-4.04246897e-02 6.53944135e-01 -3.38454336e-01 ... -4.63891566e-01 6.09435499e-01 2.34135300e-01] [ 7.95630515e-01 8.95690203e-01 -7.07903922e-01 ... -3.57252061e-01 1.70682222e-01 -6.44951999e-01] [-2.59857744e-01 2.14922562e-01 2.00245142e-01 ... -3.47040564e-01 -1.20876670e+00 -1.07740033e+00] ... [-9.90996361e-01 8.78040433e-01 1.21485484e+00 ... 3.84852916e-01 2.73902774e-01 -5.05629063e-01] [-2.07759571e+00 1.95472494e-01 1.52764827e-01 ... 2.91495502e-01 -3.81312430e-01 5.10938823e-01] [-1.41560602e+00 -2.64340937e-01 1.01496190e-01 ... -1.10574031e+00 7.55357504e-01 -1.01702780e-01]]] [[[-4.53792401e-02 3.76711011e-01 8.73200476e-01 ... 9.26274538e-01 6.51021123e-01 5.17607868e-01] [-3.61306638e-01 -5.30073762e-01 -4.96511042e-01 ... 1.45367220e-01 5.46350181e-01 -6.80029273e-01] [ 5.44306040e-01 -6.52808905e-01 1.35446593e-01 ... 4.78209436e-01 9.04274359e-02 -2.33220924e-02] ... [ 1.38583291e+00 -8.14744234e-01 -4.56730634e-01 ... -8.11437741e-02 -2.10755968e+00 -1.12247133e+00] [-1.14246130e+00 -1.13416266e+00 -5.34009695e-01 ... 7.81092465e-01 -9.78312254e-01 5.11482120e-01] [ 4.59025383e-01 6.55694259e-03 -1.25245678e+00 ... -5.69080234e-01 -4.25561547e-01 -2.40434542e-01]] [[ 3.54512215e-01 -3.79990190e-01 1.38353799e-02 ... -4.83896017e-01 7.29841948e-01 -1.70403302e-01] [ 2.30089769e-01 -3.36315602e-01 -5.77788532e-01 ... 9.43258762e-01 -9.11581695e-01 -4.84068990e-02] [ 8.19110990e-01 1.23438787e+00 -6.78877890e-01 ... -7.47342646e-01 2.92186201e-01 2.03519017e-02] ... [ 1.77422076e-01 2.16269791e-01 -3.80733997e-01 ... -6.28583848e-01 9.31460038e-02 9.80271995e-01] [-1.90172315e-01 2.25464195e-01 -9.32766438e-01 ... -9.38270032e-01 2.43012547e-01 1.09996998e+00] [ 4.55087796e-02 -1.42072129e+00 5.33945560e-01 ... -4.45510238e-01 -2.33137548e-01 1.77938962e+00]] [[-8.78409445e-01 -9.69593823e-01 9.32239413e-01 ... 6.03668988e-01 -3.89569044e-01 3.34302306e-01] [-2.49105975e-01 -2.58434266e-01 -5.84618032e-01 ... 5.15502095e-01 -3.35969001e-01 6.42419159e-01] [ 9.38315213e-01 1.13253452e-01 4.71023202e-01 ... -1.82640803e+00 -1.85270786e-01 -1.62813783e-01] ... [-6.48017824e-01 2.55404621e-01 -2.15931818e-01 ... 7.19071150e-01 -2.36380458e-01 2.70593107e-01] [ 3.69190186e-01 1.11332524e+00 -5.10223031e-01 ... 6.07525222e-02 -9.38323796e-01 -6.53935611e-01] [-1.30307838e-01 -4.92284030e-01 -4.29969668e-01 ... 7.29334831e-01 5.10830581e-01 -6.82351112e-01]] ... [[-5.01705557e-02 -4.86075193e-01 4.25392725e-02 ... -1.31779149e-01 6.42262161e-01 2.66313583e-01] [ 5.10440826e-01 -2.42505565e-01 -8.74516845e-01 ... 5.92728734e-01 -1.39936125e+00 5.80016732e-01] [ 1.68965071e-01 -8.17860603e-01 9.42923605e-01 ... -1.65670943e+00 3.91960926e-02 1.56053805e+00] ... [ 4.20590132e-01 6.08023047e-01 -3.17464352e-01 ... 5.91163218e-01 5.49486041e-01 1.35424495e+00] [ 6.35710835e-01 -1.31155109e+00 1.03309715e+00 ... -3.09400439e-01 7.59633422e-01 1.03463328e+00] [-1.71088719e+00 -6.23312831e-01 -3.78135949e-01 ... 7.87648141e-01 -1.13937449e+00 -2.94504464e-01]] [[ 2.31631607e-01 -1.64664590e+00 -1.51451200e-01 ... 1.23639531e-01 -6.40002131e-01 -4.97914672e-01] [-1.33936942e+00 7.17158258e-01 7.57506192e-02 ... -4.99550492e-01 4.02706712e-01 -1.07976353e+00] [ 1.21170640e-01 6.35145843e-01 -3.25916678e-01 ... 1.21311200e+00 1.43030033e-01 -3.17151666e-01] ... [ 9.79729831e-01 -4.24138814e-01 -3.10199469e-01 ... -6.96823537e-01 7.60974109e-01 6.75538898e-01] [ 3.26853655e-02 1.69385865e-01 -2.22774014e-01 ... 7.27094710e-01 -7.88103998e-01 -1.73091143e-01] [-6.84546530e-01 4.02311459e-02 9.89295915e-03 ... 7.05332935e-01 -5.33720076e-01 -5.08473396e-01]] [[-4.82777208e-01 -4.87019643e-02 -5.31491339e-01 ... -4.50694114e-01 5.12167215e-01 -6.97588861e-01] [-2.60371119e-01 6.32338524e-01 1.20459425e+00 ... -3.82463843e-01 3.42792034e-01 8.20144057e-01] [-1.46797621e+00 5.34434676e-01 1.98429778e-01 ... -6.16019011e-01 -3.24863553e-01 3.32874954e-01] ... [ 9.85315740e-01 -1.10133588e+00 -1.22917879e+00 ... 2.40577951e-01 7.45473981e-01 1.15058255e+00] [-5.56531847e-01 -1.61538705e-01 3.20151418e-01 ... -1.23334050e+00 -2.29010448e-01 1.79129386e+00] [-9.98303950e-01 1.44089317e+00 -5.35812080e-01 ... -4.16065529e-02 -4.65552151e-01 7.68641949e-01]]]] ... [[[[-2.70835549e-01 6.54733062e-01 1.61078942e+00 ... -4.81868356e-01 -1.49008751e-01 2.10221455e-01] [ 1.45313278e-01 -8.99570346e-01 -2.71493018e-01 ... 6.44978642e-01 -4.00953472e-01 9.53023806e-02] [-2.35608131e-01 1.67852655e-01 6.51469469e-01 ... -5.67017913e-01 -5.74764907e-01 4.38686401e-01] ... [-3.83643538e-01 -7.97567964e-01 -8.00008923e-02 ... 1.05948653e-02 -1.00168981e-01 8.38749766e-01] [-2.88797826e-01 -4.40015107e-01 1.75582111e-01 ... -1.67595327e-01 9.03154910e-03 1.31218636e+00] [ 4.23586041e-01 1.72383830e-01 8.47988963e-01 ... 7.31450617e-02 1.76348031e+00 8.19037795e-01]] [[-9.89806652e-01 1.18875504e-01 3.96953374e-01 ... 6.33027852e-01 4.04082566e-01 2.30790842e-02] [-2.68615216e-01 6.78490937e-01 5.57568669e-01 ... 5.83717108e-01 1.29913986e-01 -1.08986545e+00] [-5.24235010e-01 1.37090594e-01 7.10593581e-01 ... -1.18987989e-02 1.45225659e-01 -4.51854974e-01] ... [-1.01481187e+00 5.31506181e-01 -4.79285449e-01 ... 7.02121019e-01 5.69539845e-01 -1.40031290e+00] [ 8.64321649e-01 1.85875249e+00 -6.11648373e-02 ... 2.25990325e-01 5.45069396e-01 1.06335592e+00] [-5.27823746e-01 5.75742006e-01 1.22386754e+00 ... -1.09264147e+00 6.61414191e-02 1.18377876e+00]] [[-5.78655362e-01 -6.85785353e-01 -9.09847021e-01 ... -1.98173583e+00 -3.86634767e-01 7.12383449e-01] [-9.63618696e-01 5.27579002e-02 -2.50146449e-01 ... 9.26076174e-01 -7.12529123e-01 1.29118383e-01] [ 3.49237621e-01 -2.78557241e-02 2.95076519e-01 ... -8.85621086e-03 -5.88089526e-01 5.67734063e-01] ... [-4.24733281e-01 4.39542308e-02 -4.13252503e-01 ... 7.89252460e-01 -1.50132999e-02 7.11500645e-01] [ 1.44204545e+00 1.76884961e+00 8.66299570e-02 ... 3.45135599e-01 1.00909144e-01 9.45780754e-01] [-6.77186906e-01 -9.79546979e-02 -4.51499760e-01 ... 7.31796503e-01 -8.00208926e-01 6.08548522e-01]] ... [[ 5.08243978e-01 9.60186481e-01 -7.97622025e-01 ... 4.91873235e-01 1.15616150e-01 7.78113455e-02] [ 1.97935045e-01 -4.25544888e-01 -5.74209630e-01 ... 4.43017662e-01 4.14963186e-01 -3.72863263e-01] [ 1.29909086e+00 5.49421310e-01 7.91072905e-01 ... -4.65548426e-01 5.31707644e-01 -3.69420052e-01] ... [-2.62421131e-01 5.66270351e-01 5.67432284e-01 ... -6.98744059e-01 -9.23466742e-01 -9.28712860e-02] [-1.07234538e+00 8.45746934e-01 5.41041017e-01 ... -4.26587939e-01 1.46858394e-01 7.06306219e-01] [-3.22579622e-01 7.50401676e-01 3.42770368e-01 ... 1.09709613e-01 5.94793260e-01 -5.49463093e-01]] [[ 8.60526562e-01 6.53595507e-01 -8.93383741e-01 ... 5.10324359e-01 2.43803740e-01 -7.46184811e-02] [ 8.73907581e-02 1.59156606e-01 -3.88670653e-01 ... -3.84219706e-01 -1.18048616e-01 4.36044306e-01] [-7.00618699e-02 1.50284588e-01 6.68326735e-01 ... 6.68406487e-01 4.83748704e-01 -1.16919160e+00] ... [-4.58910584e-01 9.38441336e-01 3.23622555e-01 ... -4.41176951e-01 -1.18551791e+00 -1.14839576e-01] [-7.31538653e-01 -5.66703618e-01 -6.38948604e-02 ... 9.94720161e-02 -6.88401699e-01 -2.54066706e-01] [-6.24935180e-02 -7.63083518e-01 3.88589084e-01 ... -1.31078064e+00 -1.12852179e-01 -7.03454435e-01]] [[ 1.22520876e+00 -8.72385561e-01 2.97650993e-02 ... 8.95124435e-01 1.86386541e-01 -4.20302451e-01] [-3.32075804e-01 -1.19587116e-01 3.81910235e-01 ... -1.39893138e+00 4.82405603e-01 -2.59897202e-01] [-5.04625849e-02 -7.50544369e-01 -1.04602528e+00 ... 2.01130472e-02 -2.03737378e-01 -8.77592623e-01] ... [-5.05368471e-01 7.47969925e-01 -7.71520659e-02 ... 1.99833751e-01 3.43456239e-01 -6.45268202e-01] [-7.87633419e-01 5.32474399e-01 -7.48445272e-01 ... -2.49073672e+00 -8.04856837e-01 5.39693177e-01] [ 5.75353026e-01 8.54734033e-02 3.30151141e-01 ... 8.72860670e-01 1.80166751e-01 8.16913784e-01]]] [[[-9.12408888e-01 -8.04439187e-01 -6.15271151e-01 ... 1.24274820e-01 -5.29128432e-01 3.99363905e-01] [ 2.99457759e-01 -6.18002474e-01 -7.05111861e-01 ... 1.22637257e-01 1.55295432e+00 3.28288555e-01] [-8.22675508e-03 -8.80553722e-01 -1.36389911e-01 ... -1.24196582e-01 -1.08570778e+00 -9.90044415e-01] ... [-1.11721659e+00 -2.29638278e-01 -9.83514845e-01 ... -4.25132006e-01 -7.35257268e-01 -3.11351508e-01] [-5.21290839e-01 4.22600240e-01 7.02268720e-01 ... -5.84805071e-01 -1.03665876e+00 3.20728838e-01] [ 1.08953416e+00 -3.33097786e-01 3.21522623e-01 ... -4.23668981e-01 -1.41360566e-01 3.95261526e-01]] [[-7.82270581e-02 5.26272535e-01 -1.29527271e+00 ... -6.25700295e-01 -3.92562747e-01 -6.69321120e-01] [-3.40085566e-01 -1.26623166e+00 2.66122580e-01 ... 3.29542130e-01 -1.00180006e+00 -5.27932823e-01] [-1.23280764e-01 -2.32099116e-01 9.81950879e-01 ... -1.44885492e+00 -2.88269341e-01 1.18140948e+00] ... [ 1.72109514e-01 -3.23575974e-01 1.09544516e-01 ... -7.14832544e-01 1.81750581e-02 -9.12317693e-01] [-5.47754765e-01 -1.11391008e+00 -1.37938827e-01 ... 2.22131148e-01 -3.58216703e-01 3.04446161e-01] [ 7.04201519e-01 -2.15757251e+00 -7.45332897e-01 ... 1.58966362e-01 1.02017537e-01 -5.69613278e-01]] [[-5.11951268e-01 -5.11274077e-02 -2.15857163e-01 ... 1.00085564e-01 5.40727079e-01 7.31713414e-01] [-3.91689688e-02 7.65462160e-01 -9.53241736e-02 ... 3.84094447e-01 -7.69152939e-01 -5.31101406e-01] [-3.84904951e-01 -4.60191481e-02 3.62331793e-02 ... 3.04223955e-01 -1.20556641e+00 -1.86370775e-01] ... [-5.50208874e-02 -1.91441095e+00 -8.72598588e-01 ... -5.09135485e-01 8.29301357e-01 -9.66059148e-01] [ 3.91100459e-02 -4.14865136e-01 -6.05953813e-01 ... -5.44324338e-01 -1.17393464e-01 3.51448096e-02] [-2.52466977e-01 5.02222739e-02 -1.61528215e-01 ... 4.96524125e-01 -7.41542578e-01 6.58664346e-01]] ... [[ 8.86880234e-02 -5.64343929e-01 -7.39639282e-01 ... 5.75690687e-01 8.40268433e-01 -1.42272919e-01] [-1.15299881e-01 -1.16973534e-01 1.37163532e+00 ... -9.47578430e-01 -5.43400764e-01 1.10079372e+00] [ 4.31585729e-01 -1.43242970e-01 1.24530666e-01 ... -5.01442075e-01 1.59123495e-01 1.05391204e+00] ... [ 7.15685338e-02 -1.13129652e+00 5.23041710e-02 ... 5.35616159e-01 7.16998801e-02 -2.26447329e-01] [-8.36407542e-01 -3.53904277e-01 -3.03256154e-01 ... 5.73631346e-01 -9.87861395e-01 -2.00714856e-01] [ 1.14142787e+00 9.98458982e-01 -9.07677770e-01 ... 5.30963540e-01 -5.72342612e-02 -4.02913749e-01]] [[ 1.15405273e+00 2.21811369e-01 -8.94884944e-01 ... 2.51886338e-01 1.84954488e+00 1.17146604e-01] [-1.65428922e-01 3.54250193e-01 -1.14478715e-01 ... 6.20967627e-01 -1.66942418e-01 -1.43485689e+00] [ 1.23974480e-01 -4.70779568e-01 3.80482376e-01 ... 1.16249180e+00 -6.42293334e-01 7.25237906e-01] ... [ 5.00114977e-01 2.94591308e-01 1.58797801e+00 ... -5.68605900e-01 8.42114687e-01 1.38568997e-01] [ 6.84066787e-02 -2.94664860e-01 -3.74517977e-01 ... -2.45626003e-01 -1.26381680e-01 6.33494318e-01] [-1.01718116e+00 5.79140075e-02 -4.08650428e-01 ... -5.72524250e-01 -7.43870139e-01 -8.87686491e-01]] [[ 1.18688774e+00 -6.71992242e-01 5.23780942e-01 ... 5.06753445e-01 8.98507610e-02 1.12491560e+00] [ 1.07587524e-01 -3.47033024e-01 -6.97345197e-01 ... -1.08319473e+00 -4.02242504e-02 7.59551227e-01] [ 1.90741271e-01 9.17530119e-01 -8.62756670e-01 ... -2.13528007e-01 -2.22299293e-01 1.57679290e-01] ... [ 9.75783944e-01 4.04611528e-01 3.94256487e-02 ... -7.26756334e-01 6.42059982e-01 -8.17020416e-01] [-4.27005589e-01 -8.69923472e-01 4.82326299e-01 ... -6.09160185e-01 -6.08012676e-01 -8.53389055e-02] [ 5.77845991e-01 -1.13273934e-01 4.61703718e-01 ... -9.64225769e-01 3.67660522e-01 -4.73323822e-01]]] [[[ 8.00159097e-01 -1.34438174e-02 1.35720706e+00 ... 2.99574256e-01 -2.69372523e-01 4.06717956e-02] [-1.40936661e+00 7.86190271e-01 -1.33045161e+00 ... -9.59583700e-01 1.04926668e-01 -1.12271917e+00] [-6.45921886e-01 8.20406750e-02 2.22044721e-01 ... 1.55409738e-01 -1.03121877e+00 1.71080366e-01] ... [ 4.84156430e-01 2.68497050e-01 3.79968792e-01 ... -5.83072066e-01 1.50217995e-01 -7.66987443e-01] [ 4.65415008e-02 1.05023444e-01 2.31279239e-01 ... -7.51368701e-01 -5.13976395e-01 -1.79937422e-01] [-4.73766059e-01 1.21279025e+00 -7.04268366e-02 ... -2.80752748e-01 6.24228954e-01 1.57703087e-01]] [[-1.81016743e-01 -1.37093326e-03 -1.89310163e-01 ... -3.25338878e-02 -4.87968981e-01 3.39158267e-01] [ 3.34465206e-01 4.68711317e-01 -1.23634195e+00 ... 5.58252931e-01 -1.78604767e-01 -6.30677044e-01] [-5.94613314e-01 -8.57800663e-01 4.20864820e-01 ... -8.49853277e-01 -3.70432287e-01 -3.26386631e-01] ... [-8.36440861e-01 3.61866206e-01 -3.29064697e-01 ... 1.34754434e-01 -1.02816594e+00 -2.36187518e-01] [-2.38361597e-01 9.65499401e-01 -8.67279232e-01 ... 8.39430392e-01 1.26635611e-01 3.07617307e-01] [-1.58287793e-01 -3.72456044e-01 -3.88338715e-01 ... 7.53448009e-01 -6.98906243e-01 3.61252129e-01]] [[ 8.69410872e-01 7.44114593e-02 9.12430361e-02 ... -7.59701207e-02 -1.80399895e-01 -2.48430803e-01] [-3.60142469e-01 -7.42546082e-01 6.56413913e-01 ... 7.91760385e-01 -3.58346343e-01 -8.60338449e-01] [-7.16650337e-02 -4.44414228e-01 -2.73797661e-02 ... -5.70362568e-01 -1.30314863e+00 -1.12684393e+00] ... [ 1.97550684e-01 -3.04651260e-01 1.89699337e-01 ... -1.12085879e+00 -5.56532383e-01 -6.39601529e-01] [-9.11390185e-01 7.88005888e-02 -2.73630977e-01 ... -9.12919581e-01 4.39712286e-01 -2.83133030e-01] [-5.78133404e-01 4.66584474e-01 9.92647111e-01 ... -8.90264273e-01 -1.44465518e+00 -5.22236168e-01]] ... [[ 8.82661462e-01 2.13448119e+00 -4.50855047e-01 ... 5.54579683e-02 -1.09084809e+00 5.47224522e-01] [-1.24299157e+00 2.48017216e+00 1.07436609e+00 ... 5.36607981e-01 -8.81748915e-01 -8.23899567e-01] [ 1.05215192e+00 -7.55531192e-01 2.60823667e-02 ... 7.78704211e-02 2.21928790e-01 -5.53457737e-01] ... [ 8.54507864e-01 -5.81598170e-02 -2.92715669e-01 ... -8.28341916e-02 9.25471559e-02 -4.68198746e-01] [-1.11771412e-01 3.05288225e-01 -4.36798185e-01 ... -9.60464180e-01 7.98449397e-01 -8.25133383e-01] [ 9.25353169e-01 -2.71540463e-01 4.85417485e-01 ... 1.57593459e-01 -2.37506583e-01 4.85661983e-01]] [[-2.10410669e-01 -9.28897202e-01 9.02477279e-02 ... 9.86744165e-01 6.53120995e-01 -3.32076460e-01] [ 8.96113753e-01 7.52022684e-01 3.29863727e-01 ... -1.28757834e+00 4.56317425e-01 -7.87215531e-01] [-6.40687346e-01 -7.56526366e-02 1.04579365e+00 ... 9.90479946e-01 -2.14909339e+00 8.04249048e-01] ... [ 4.80862200e-01 -1.05441344e+00 2.18387812e-01 ... -3.64221931e-02 1.00056565e+00 3.50780457e-01] [ 5.69687068e-01 1.27136338e+00 -9.01621133e-02 ... -1.04096341e+00 -1.12054765e-01 -7.17751861e-01] [-5.26403725e-01 9.01153207e-01 1.32145333e+00 ... -3.71890336e-01 2.03412819e+00 6.18300915e-01]] [[ 1.66383827e+00 4.80898678e-01 2.04093009e-01 ... -1.18838751e+00 -2.73592114e-01 7.31626868e-01] [-1.98201880e-01 5.97432554e-01 -8.07484150e-01 ... 1.82150924e+00 -5.15447974e-01 -2.25925297e-01] [ 5.14935493e-01 -7.95393810e-02 4.63605642e-01 ... -3.95625412e-01 1.22762179e+00 9.17322338e-01] ... [-5.98434091e-01 -4.34894204e-01 5.67775130e-01 ... -1.80145968e-02 9.49238002e-01 -1.23012757e+00] [ 1.21123981e+00 -3.64591151e-01 1.08313942e+00 ... 1.46016967e+00 1.36402249e+00 2.41665766e-01] [-2.20903888e-01 -3.69926631e-01 -2.10869133e-01 ... -8.46262813e-01 -8.96679580e-01 -4.35849965e-01]]] [[[ 3.29820871e-01 6.58328891e-01 -6.10131323e-01 ... -5.28584778e-01 1.20525110e+00 5.02521574e-01] [ 9.23983872e-01 9.26001966e-02 -1.73337147e-01 ... -5.08595333e-02 -8.10259223e-01 2.72393264e-02] [ 5.51512957e-01 4.17182118e-01 -1.27455398e-01 ... -3.65057886e-02 1.49333298e+00 -8.30237687e-01] ... [-9.25543666e-01 -5.85424423e-01 -1.57795966e-01 ... 5.34742355e-01 -1.38472068e+00 -2.50328988e-01] [-4.92672265e-01 -1.23541224e+00 7.70832837e-01 ... -7.94219851e-01 -6.05671644e-01 -2.13951349e-01] [-2.04833195e-01 9.24406409e-01 -6.03904724e-01 ... 1.55188298e+00 1.25798404e+00 6.79117218e-02]] [[-8.25161934e-01 -2.10660076e+00 3.31430212e-02 ... -5.48850536e-01 6.75134286e-02 -4.28311646e-01] [ 5.12209475e-01 8.78391027e-01 -1.06471467e+00 ... -7.27219641e-01 5.64769983e-01 2.85281867e-01] [-1.14897799e+00 1.84154272e-01 1.74878705e+00 ... -1.24039102e+00 6.42430305e-01 4.30191487e-01] ... [ 4.87384886e-01 -1.04567277e+00 4.21810746e-01 ... -6.65560588e-02 -4.52791840e-01 2.01172139e-02] [-2.94870168e-01 -1.25655365e+00 1.60213923e+00 ... 2.94289500e-01 4.01630163e-01 1.29234004e+00] [ 4.54004169e-01 -2.11143076e-01 -1.37941110e+00 ... -8.54367137e-01 1.70170426e-01 1.24014640e+00]] [[-2.01572210e-01 2.92499959e-01 7.06482291e-01 ... 1.79207191e-01 -6.66442037e-01 6.84326664e-02] [-4.85868871e-01 -5.41364193e-01 6.72812462e-01 ... 4.73746657e-01 -4.15290684e-01 3.52306843e-01] [ 1.07341635e+00 8.39455649e-02 -2.56218076e-01 ... 9.00958836e-01 6.34822696e-02 -2.94837445e-01] ... [-5.45911372e-01 -6.90364242e-01 9.11567926e-01 ... 5.83995104e-01 3.35407764e-01 1.22706580e+00] [-2.84969866e-01 3.15994501e-01 -2.56712347e-01 ... -2.25147784e-01 3.38744909e-01 2.84755796e-01] [-1.41009942e-01 4.14737076e-01 2.50555903e-01 ... -1.03242248e-01 7.82061815e-01 1.16185415e+00]] ... [[-2.81410307e-01 -4.08987910e-01 -8.15208852e-01 ... 1.21175694e+00 5.60047269e-01 -1.17132843e+00] [ 4.01156068e-01 -1.52839482e+00 -9.43428516e-01 ... -8.88955474e-01 1.86862528e+00 5.10694325e-01] [ 5.09854555e-01 5.25571704e-01 5.57653904e-01 ... 3.92914951e-01 -5.74691951e-01 -8.25800598e-02] ... [ 7.25684524e-01 -3.03134322e-01 7.36875907e-02 ... 6.71942532e-01 -7.14556575e-02 -4.12844606e-02] [-1.33811927e+00 -5.48526943e-01 8.36315844e-03 ... -4.22199637e-01 5.52497625e-01 2.23150373e-01] [-1.15894306e+00 2.99853772e-01 6.96715176e-01 ... 2.37555392e-02 2.69916475e-01 -4.75956678e-01]] [[-3.21977645e-01 -3.27554792e-01 7.11255431e-01 ... -1.49958646e+00 -9.42302823e-01 5.31485558e-01] [-7.84590188e-03 -3.25673610e-01 -7.37782419e-01 ... -3.31949294e-02 5.95495887e-02 3.08107883e-01] [-8.77248347e-02 2.49570996e-01 -1.41742241e+00 ... 5.08695066e-01 2.12125078e-01 -3.24386746e-01] ... [ 9.16136265e-01 -3.04773241e-01 1.82904005e-01 ... 5.45205772e-01 9.49439704e-02 1.44711328e+00] [-2.98759013e-01 4.62316841e-01 1.90922722e-01 ... 6.75161242e-01 -7.31792688e-01 7.29258060e-01] [ 3.72673124e-01 -6.58603549e-01 6.55205309e-01 ... -1.11915672e+00 6.43138468e-01 5.60781896e-01]] [[ 1.10101509e+00 -2.90252566e-01 6.49207950e-01 ... 1.84543550e-01 8.77369761e-01 5.50024509e-02] [-4.54196513e-01 2.96279013e-01 -3.79342198e-01 ... -4.90620434e-01 1.18982518e+00 1.31998765e+00] [ 9.62858260e-01 -5.42008698e-01 -4.03119892e-01 ... 4.89396751e-01 6.76817477e-01 -4.26424593e-01] ... [-3.40589523e-01 -3.72085199e-02 -9.42481160e-02 ... 3.04612577e-01 -1.19807506e+00 -4.14971083e-01] [ 8.99105668e-01 3.03663075e-01 1.37210593e-01 ... -3.50128800e-01 -1.23947275e+00 -1.57140005e+00] [-4.31919664e-01 -7.89225519e-01 -2.43196040e-01 ... 7.67295063e-01 -7.59900451e-01 1.04000294e+00]]] [[[-4.85757798e-01 1.30493820e+00 1.07825387e+00 ... -7.40873218e-02 3.51895630e-01 -1.41376173e+00] [ 3.93560350e-01 -1.81375399e-01 -7.59947523e-02 ... -9.69108641e-01 -5.50306678e-01 -7.87425816e-01] [-4.68417525e-01 2.40483612e-01 7.78205454e-01 ... 4.65991408e-01 9.08604860e-01 7.18763947e-01] ... [ 3.27320874e-01 8.29094499e-02 1.16703302e-01 ... 2.31334027e-02 -7.09478974e-01 -3.37852150e-01] [-6.64077818e-01 -1.52021098e+00 3.11413288e-01 ... -9.46327209e-01 1.22917280e-01 7.18671381e-01] [-9.28149819e-01 7.20297024e-02 6.76130831e-01 ... 1.59651026e-01 -9.96583700e-02 9.75197554e-01]] [[-8.11046064e-01 6.46267474e-01 -1.89470828e-01 ... -1.57191622e+00 -5.35166323e-01 -1.23210341e-01] [ 1.09320557e+00 6.42178893e-01 7.34786749e-01 ... -5.29985309e-01 1.28368235e+00 -5.64869642e-01] [ 3.76410961e-01 7.66913772e-01 -3.56724560e-02 ... -6.68030620e-01 -6.84251428e-01 -5.50993681e-01] ... [-8.95892501e-01 -7.79683769e-01 -7.71337509e-01 ... 8.03720236e-01 5.21161795e-01 -5.08937240e-01] [-6.70715809e-01 6.86793625e-02 5.96884131e-01 ... -1.78819627e-01 -1.15072572e+00 -1.18760431e+00] [ 1.00210571e+00 2.21447852e-02 -4.23735917e-01 ... 6.10911250e-01 4.80449766e-01 1.49499416e-01]] [[-3.78346235e-01 4.61897522e-01 1.85238779e-01 ... -1.07273781e+00 -3.68903011e-01 -3.94593447e-01] [ 2.41322175e-01 -8.49663794e-01 -7.45591000e-02 ... -2.78317928e-01 4.70736474e-02 -5.67403138e-01] [-7.16155991e-02 8.20239604e-01 -8.82647261e-02 ... -1.67693853e-01 -1.28995764e+00 -3.71380925e-01] ... [-1.86451167e-01 -4.78183985e-01 1.79496080e-01 ... -5.44911362e-02 -4.53157008e-01 1.09054983e+00] [-1.11205924e+00 -7.81404912e-01 -1.08412892e-01 ... 3.92096460e-01 1.07858551e+00 -8.40291321e-01] [ 2.35196069e-01 -6.77382648e-01 -1.03734232e-01 ... 2.51932144e-01 8.43257904e-01 -1.19553484e-01]] ... [[ 3.27068686e-01 2.75854349e-01 -1.15509994e-01 ... -5.83331466e-01 -3.79359663e-01 -5.89546025e-01] [ 2.55752861e-01 -1.04031849e+00 1.05322087e+00 ... -1.24690628e+00 -5.97068593e-02 8.45095277e-01] [ 2.65892595e-01 -4.02167380e-01 1.79016575e-01 ... 3.41993570e-01 1.27099857e-01 3.91160280e-01] ... [-1.07738256e+00 1.89160734e-01 1.35714030e+00 ... -2.71721900e-01 7.56469518e-02 -1.87830061e-01] [-3.30893844e-01 -2.27431446e-01 2.90924460e-01 ... -1.41000763e-01 6.16689265e-01 -7.54314184e-01] [ 2.94852138e-01 -2.95473397e-01 6.08056605e-01 ... 5.91136754e-01 -6.45402789e-01 -2.40590855e-01]] [[-2.59033412e-01 3.61122489e-01 7.37904534e-02 ... 5.49043179e-01 -1.20382142e+00 1.59509909e+00] [ 6.76066339e-01 8.13469827e-01 2.73445189e-01 ... 6.81948900e-01 -1.02815628e+00 7.41403461e-01] [ 8.27924669e-01 7.93904662e-01 1.49980143e-01 ... -7.58638740e-01 8.16908956e-01 9.22152460e-01] ... [-9.96244788e-01 1.12026525e+00 -2.50752777e-01 ... 7.78915346e-01 5.84602594e-01 -1.24962735e+00] [-1.85821280e-01 -5.81872702e-01 1.17232716e+00 ... 2.56841391e-01 -2.57159416e-02 1.61846399e+00] [ 4.30804193e-01 -4.92475539e-01 -3.23796906e-02 ... -7.73257732e-01 -2.68915385e-01 3.99719216e-02]] [[-6.63450122e-01 -7.76658416e-01 6.11045837e-01 ... 1.37995934e+00 -1.46315008e-01 1.53799234e-02] [-1.57792187e+00 7.24037766e-01 9.44792867e-01 ... 7.47159839e-01 -5.97169280e-01 -9.27300036e-01] [ 7.69217134e-01 3.56551945e-01 8.79199624e-01 ... -8.28989670e-02 4.12196487e-01 2.01050490e-01] ... [-1.29031849e+00 -5.80548167e-01 4.37760681e-01 ... 4.18247402e-01 1.59710991e+00 -3.15393567e-01] [-5.96948922e-01 -7.56629705e-01 -1.72270522e-01 ... -1.05564344e+00 2.03849271e-01 -4.14340258e-01] [-8.73149276e-01 -9.41804111e-01 -1.03963208e+00 ... -2.68475175e-01 -5.05048275e-01 1.05394281e-01]]] [[[-6.52809367e-02 -6.12257838e-01 2.22441912e+00 ... -4.11093347e-02 3.66051197e-01 -1.02083707e+00] [ 1.44873428e+00 1.00448728e+00 1.31028569e+00 ... 1.08193481e+00 1.03979445e+00 1.11725879e+00] [-3.49533528e-01 3.28470498e-01 7.53423154e-01 ... 2.31162146e-01 8.28419209e-01 -2.21696049e-01] ... [ 1.35835087e+00 4.24268432e-02 2.06963450e-01 ... -6.11714303e-01 -8.76690567e-01 -3.99347842e-01] [ 2.04726309e-01 -9.54669058e-01 1.68850291e+00 ... -5.34385085e-01 -3.83586317e-01 7.83713043e-01] [ 2.25865304e-01 8.04606140e-01 -4.15909171e-01 ... 5.65731764e-01 2.73397326e-01 -4.44366217e-01]] [[-3.74556988e-01 6.86740220e-01 -1.98371217e-01 ... 3.47148597e-01 -6.08046949e-01 2.24059924e-01] [-8.27668533e-02 8.93643439e-01 1.54626143e+00 ... -4.87234086e-01 2.21981123e-01 4.39054161e-01] [-1.02739644e+00 1.37303278e-01 1.08254540e+00 ... -4.44758207e-01 1.42295256e-01 3.68090421e-01] ... [ 4.60447818e-01 -3.77617091e-01 -1.61690223e+00 ... -7.54993558e-01 -4.30901907e-02 -9.28813279e-01] [ 2.40219086e-01 -8.15508664e-01 -1.02661514e+00 ... -7.41490304e-01 2.30335355e-01 -7.61147618e-01] [-8.16925228e-01 1.01076329e+00 3.09590369e-01 ... -4.72834080e-01 4.55124229e-01 5.56937814e-01]] [[ 7.57485867e-01 -5.71373403e-01 8.13462794e-01 ... -7.21674711e-02 -1.55344534e+00 -1.92008317e-01] [-1.70440271e-01 -4.37385082e-01 -2.76548862e-01 ... -4.11814377e-02 -8.84692907e-01 -2.31583387e-01] [-2.74510056e-01 1.40112829e+00 -1.44536212e-01 ... 6.05884016e-01 -3.72025549e-01 -3.63909036e-01] ... [-2.65720375e-02 2.45969631e-02 1.55259117e-01 ... -1.13984443e-01 -7.75140524e-02 1.29973125e+00] [-4.52287719e-02 -1.12862194e+00 -9.84018683e-01 ... 5.83905637e-01 -9.85282008e-03 5.18894017e-01] [-6.46157742e-01 5.46970107e-02 5.87990105e-01 ... 4.32744503e-01 4.75250274e-01 8.75465035e-01]] ... [[ 4.63086009e-01 3.25006276e-01 -6.48775697e-02 ... -1.12282410e-01 -5.52161038e-01 -9.42425370e-01] [ 1.21214283e+00 -6.87302649e-01 1.75526381e-01 ... -1.10485144e-01 3.30541372e-01 -3.89161050e-01] [-5.10087192e-01 5.15894592e-01 3.43573064e-01 ... -3.44193608e-01 -2.48715430e-01 1.05090491e-01] ... [-3.21608186e-01 1.99176818e-01 3.77863497e-01 ... 1.17192447e+00 -5.22577643e-01 -6.46333992e-02] [ 2.33492032e-01 1.05160010e+00 -5.95695138e-01 ... -1.76063105e-01 3.16746175e-01 -7.35899433e-02] [ 3.47214252e-01 -5.63885570e-01 -1.10950625e+00 ... -1.56407297e-01 4.66747761e-01 7.56038249e-01]] [[ 1.19184363e+00 6.68171287e-01 -9.23371792e-01 ... 7.14449227e-01 8.99558008e-01 -1.34725884e-01] [ 8.97560775e-01 -5.13527751e-01 1.21273577e+00 ... -1.21398279e-02 -1.96891874e-01 5.37678063e-01] [-1.41565549e+00 1.38133373e-02 9.08670008e-01 ... 1.50011134e+00 -9.07824874e-01 6.09561026e-01] ... [ 1.90218627e-01 5.46415687e-01 -1.23458195e+00 ... -5.96581809e-02 5.27215660e-01 1.81557226e+00] [ 2.05109209e-01 -5.54703236e-01 -2.49768570e-01 ... -1.44359994e+00 4.01770383e-01 -1.16714978e+00] [ 8.44673753e-01 3.78560692e-01 6.09581053e-01 ... 1.76359728e-01 -5.04181921e-01 -9.50185478e-01]] [[ 4.90531683e-01 -5.75232089e-01 -8.37762594e-01 ... -1.75189126e+00 -8.42032790e-01 4.80231613e-01] [-1.11436224e+00 -2.37217605e-01 3.45287055e-01 ... 1.75134186e-02 5.93651652e-01 -4.16479588e-01] [-7.04183131e-02 -6.34220421e-01 -7.00805545e-01 ... -1.44938707e+00 2.90974140e-01 -5.71139693e-01] ... [ 4.12436426e-01 -1.79108456e-01 9.40835774e-01 ... 1.33055925e-01 1.35894448e-01 -4.63987626e-02] [-6.59304380e-01 1.29389083e+00 -8.53489280e-01 ... -2.22236171e-01 -2.82893389e-01 1.27510667e+00] [ 6.00675941e-02 -2.08668053e-01 9.58234131e-01 ... 5.46341121e-01 6.18582904e-01 -7.11963296e-01]]]] [[[[ 1.71806049e+00 -7.04805553e-01 6.50813460e-01 ... 1.36086911e-01 -8.61348748e-01 -6.04141951e-01] [-1.33267319e+00 -1.17900455e+00 -1.98273864e-02 ... -5.41576862e-01 4.17020693e-02 5.23332179e-01] [-5.85374951e-01 1.40473533e+00 7.79575050e-01 ... 2.26903066e-01 -1.71032250e-01 -5.91574848e-01] ... [-7.69972265e-01 8.59176338e-01 -2.48769268e-01 ... -8.59602392e-01 1.20805967e+00 -3.36864114e-01] [ 1.05920470e+00 -4.18508314e-02 3.01199436e-01 ... 4.38051701e-01 1.78133631e+00 -6.19027801e-02] [ 1.32095528e+00 -9.49528396e-01 -1.28720790e-01 ... -3.47931176e-01 -4.09350991e-01 -1.48408294e+00]] [[-4.52172756e-02 -2.87423640e-01 -2.16152716e+00 ... -5.02525866e-01 3.91134679e-01 -1.57696116e+00] [ 1.15772676e+00 -1.89259380e-01 6.81818962e-01 ... -3.35930228e-01 -4.50179756e-01 7.54949868e-01] [-4.25879210e-01 -3.84988785e-01 -3.22987914e-01 ... 3.16370070e-01 2.44657114e-01 8.60610187e-01] ... [-1.68933845e+00 2.06976026e-01 1.00341678e-01 ... 6.97220787e-02 -7.82996535e-01 -7.32402653e-02] [ 3.43484253e-01 -2.28263110e-01 2.13225812e-01 ... 7.90905431e-02 -3.06286484e-01 9.18982089e-01] [ 2.60552824e-01 5.23110807e-01 -7.09307134e-01 ... -4.81093794e-01 -5.82144976e-01 1.50316264e-02]] [[-3.84626210e-01 1.68053791e-01 1.15045619e+00 ... -2.55142272e-01 1.42490923e+00 -1.30377674e+00] [ 7.36703873e-01 4.61661279e-01 7.13474393e-01 ... -1.23491228e+00 8.44450772e-01 -1.21327746e+00] [ 4.08930093e-01 -2.26483509e-01 3.14922333e-02 ... -4.34378505e-01 -6.55376911e-01 -4.83734673e-03] ... [-3.44358742e-01 -3.65760744e-01 1.43618727e+00 ... -1.21154273e+00 5.12775071e-02 5.55417299e-01] [ 4.10033524e-01 1.92705959e-01 1.32327870e-01 ... 6.14407361e-01 7.06587374e-01 -1.42376935e+00] [-5.30100942e-01 -1.11252797e+00 5.03462911e-01 ... 6.38715208e-01 1.05813336e+00 1.96914762e-01]] ... [[-3.10921855e-02 1.19960736e-02 -3.33239557e-03 ... -3.86791915e-01 -1.13659859e+00 7.45867431e-01] [ 1.01525986e+00 -1.22247410e+00 1.74869224e-02 ... 2.85797659e-02 -2.33648896e-01 -9.30934399e-02] [-5.23255885e-01 2.26201534e-01 3.25151145e-01 ... 1.19867451e-01 -2.58096695e-01 -9.17085111e-01] ... [-2.85559654e-01 1.03463602e+00 2.55467296e-01 ... -4.87432867e-01 1.24179375e+00 3.94820035e-01] [-3.02675128e-01 5.37378080e-02 -1.82846710e-01 ... 5.14010429e-01 -6.85709536e-01 -5.17042339e-01] [-2.92531013e-01 8.71587574e-01 -4.26300228e-01 ... -7.38691270e-01 4.88917381e-01 4.13882136e-02]] [[ 7.68970624e-02 -3.82635683e-01 3.96916449e-01 ... 3.82851452e-01 8.12017918e-01 3.82417232e-01] [-2.41376922e-01 -5.90430871e-02 -1.11403131e+00 ... -2.03504157e+00 -8.23967516e-01 1.35504276e-01] [ 7.07635701e-01 -1.02582142e-01 -2.68478356e-02 ... 1.32836020e+00 1.70915455e-01 3.16388190e-01] ... [-6.66023865e-02 1.03191698e+00 4.76059914e-01 ... -7.60092556e-01 -1.24654818e+00 -4.21036839e-01] [ 1.20078707e+00 -5.19977927e-01 7.84994245e-01 ... -1.83480906e+00 1.65730000e+00 9.30956658e-03] [ 2.35923335e-01 1.59467474e-01 -6.64107502e-01 ... -3.52793127e-01 7.60437608e-01 1.48266673e+00]] [[ 7.91185915e-01 3.74995619e-01 -6.40087366e-01 ... 5.83228290e-01 -2.05914646e-01 7.12497056e-01] [-6.24488235e-01 -1.60324502e+00 5.44673145e-01 ... -1.23133838e-01 5.97727180e-01 -4.88816559e-01] [ 9.97083485e-01 -4.13846314e-01 6.90621674e-01 ... 1.64736956e-01 1.40666887e-01 1.27338365e-01] ... [ 1.36776209e+00 1.25872448e-01 1.53796887e+00 ... -1.31799849e-02 -1.23776627e+00 3.82648230e-01] [-3.37193966e-01 3.68722379e-01 9.30413753e-02 ... -4.60836440e-01 7.44586661e-02 1.05148125e+00] [-1.91456094e-01 5.02043366e-02 -1.06568313e+00 ... 2.55848289e-01 1.64863840e-01 1.21605432e+00]]] [[[-1.22198105e+00 5.45151472e-01 8.61678720e-01 ... 7.84738362e-01 -2.77921170e-01 -1.09736824e+00] [-1.66053206e-01 -8.23024631e-01 7.56536245e-01 ... -4.71392632e-01 3.84965092e-01 -3.94317389e-01] [-4.28750694e-01 -7.01301694e-01 2.26280287e-01 ... -4.84995931e-01 -2.48842880e-01 -1.05822515e+00] ... [-6.02250099e-01 7.49982715e-01 2.56977826e-01 ... 4.62560236e-01 -3.39376032e-01 -4.21228260e-01] [ 5.70830703e-01 -1.20260846e-03 -1.32470071e-01 ... 9.85900819e-01 -2.13829949e-02 9.08108592e-01] [ 6.22043848e-01 4.65892345e-01 -3.14804107e-01 ... 2.35046700e-01 1.80162418e+00 5.08287311e-01]] [[ 1.44814897e+00 -1.26463294e+00 -1.75301239e-01 ... 3.96998435e-01 -6.70257211e-01 -1.92505762e-01] [ 4.85448569e-01 8.45654070e-01 8.39516819e-02 ... 6.91731453e-01 2.33982369e-01 -1.90104954e-02] [ 7.87630498e-01 1.32694161e+00 -6.78847790e-01 ... 6.54042363e-01 -2.41956756e-01 -1.53082299e+00] ... [-5.64216375e-01 9.30670917e-01 2.18834534e-01 ... -2.00463727e-01 4.83268909e-02 5.48642933e-01] [ 8.47435057e-01 3.84508938e-01 5.40310051e-03 ... 3.19817454e-01 -2.00477034e-01 -1.36272597e+00] [-8.55930090e-01 9.24700439e-01 -1.48774397e+00 ... -4.73284977e-04 1.24308622e+00 -3.05081129e-01]] [[-8.54983151e-01 1.07707679e+00 -2.58986384e-01 ... -2.99247324e-01 3.75394583e-01 -8.04258943e-01] [ 7.87051246e-02 1.34124815e-01 -4.77652758e-01 ... -1.14904118e+00 -1.50422835e+00 -9.00161386e-01] [ 2.31711045e-01 -5.29192805e-01 -2.17125848e-01 ... 4.08435136e-01 5.02269328e-01 -6.63457215e-01] ... [ 1.38554290e-01 8.02748382e-01 1.79693222e-01 ... -4.61319059e-01 -2.26041898e-01 1.36551833e+00] [ 6.56687140e-01 -8.49899501e-02 9.75641429e-01 ... 5.78921974e-01 3.99302095e-01 -1.64350495e-01] [ 4.93839622e-01 3.34462494e-01 -4.17624354e-01 ... 3.38252455e-01 2.61198819e-01 -9.29192364e-01]] ... [[ 5.45884185e-02 -5.24565458e-01 -1.31450236e+00 ... 8.56046200e-01 -3.94217446e-02 -6.29789889e-01] [-8.26952886e-03 -4.85386074e-01 2.30280802e-01 ... -1.13668776e+00 2.80981779e-01 -1.38392463e-01] [-7.14817941e-01 1.09757984e+00 -8.07838798e-01 ... -5.30305028e-01 -5.27755320e-01 3.03589821e-01] ... [ 2.04188496e-01 -4.07339066e-01 -7.31559634e-01 ... 1.11364293e+00 8.71076703e-01 3.22123140e-01] [ 1.74111322e-01 -2.51675323e-02 -1.37531221e-01 ... -1.31117150e-01 1.19957078e+00 8.60338926e-01] [-6.76545918e-01 5.48259854e-01 -7.51943588e-01 ... 4.09505606e-01 2.19838724e-01 -7.67035306e-01]] [[ 1.98503762e-01 -8.82645488e-01 7.62144387e-01 ... 2.27382898e-01 -2.89575636e-01 -2.00716764e-01] [-8.04750741e-01 2.15612575e-01 2.28253171e-01 ... 3.62756640e-01 8.22733715e-02 -4.89682704e-01] [-9.17517841e-01 -6.09507024e-01 -1.31487834e+00 ... 3.40681732e-01 -4.87116098e-01 -7.46425211e-01] ... [-7.11084843e-01 6.63806573e-02 6.41889691e-01 ... 5.58528244e-01 -3.49886447e-01 4.55965132e-01] [-3.63637239e-01 -1.05685031e+00 3.96310121e-01 ... -5.32552958e-01 -6.24449193e-01 8.24957490e-01] [-4.28934216e-01 -1.88676134e-01 6.57463849e-01 ... 3.10630780e-02 -5.82199335e-01 -6.84848368e-01]] [[ 3.05217028e-01 4.09941167e-01 -1.21975377e-01 ... -3.37421298e-01 8.08351040e-01 -1.28268272e-01] [ 4.45704199e-02 -3.38024855e-01 -3.66602689e-01 ... 2.68091887e-01 -9.31110978e-01 1.13284469e-01] [ 7.57570803e-01 -8.57478231e-02 6.52883947e-01 ... -9.07487497e-02 -1.28322765e-01 -1.24636568e-01] ... [ 3.60786825e-01 3.92224461e-01 2.63978988e-01 ... -7.73243248e-01 -7.89898559e-02 6.13852479e-02] [-5.93607247e-01 -1.30149114e+00 -1.91567987e-01 ... -9.70856607e-01 2.06709549e-01 -5.85448205e-01] [ 7.28409469e-01 7.75459409e-01 -5.63302219e-01 ... 4.59488392e-01 -2.74627861e-02 9.83304441e-01]]] [[[-9.19193253e-02 -3.01707327e-01 -9.36622322e-01 ... -1.42012775e+00 -3.72647971e-01 -1.16243415e-01] [-3.11936378e-01 1.46247789e-01 7.39466473e-02 ... -6.93385243e-01 -5.65507531e-01 1.20810890e+00] [ 3.18724841e-01 -8.62880766e-01 4.23075676e-01 ... 6.23933733e-01 2.29539260e-01 -2.67010450e-01] ... [ 6.28016233e-01 -1.29504696e-01 8.16963553e-01 ... -6.74217701e-01 1.17885876e+00 -1.01954436e+00] [-1.07391834e+00 -1.21363148e-01 -1.23262429e+00 ... -1.27051151e+00 2.97489762e-01 -8.57634604e-01] [ 2.11860567e-01 7.50825331e-02 1.92657039e-01 ... 1.18126132e-01 3.28047127e-01 8.45346987e-01]] [[-3.98490340e-01 -1.46798596e-01 -3.53511982e-02 ... 4.88125771e-01 -2.36676019e-02 -1.08254111e+00] [ 6.08150840e-01 -2.91163892e-01 1.16101444e+00 ... 2.37806320e-01 1.16839814e+00 6.80439115e-01] [ 5.50975740e-01 4.07682985e-01 3.44709270e-02 ... 3.06492858e-02 8.95069420e-01 -2.31350079e-01] ... [-6.18867099e-01 2.91104168e-01 -1.04782891e+00 ... 3.68394226e-01 3.61799896e-01 6.53806049e-03] [ 2.02864390e-02 5.20685971e-01 -3.60832840e-01 ... 3.33595812e-01 -1.29921067e+00 6.44370615e-01] [ 7.87911117e-01 -5.35468459e-01 -8.89779031e-01 ... 4.00973916e-01 3.45583558e-01 3.52612615e-01]] [[ 3.87830406e-01 -6.25689328e-01 -1.28189534e-01 ... -9.16931033e-01 -1.14771962e-01 1.53522205e+00] [ 2.56187677e-01 1.08112860e+00 2.43542761e-01 ... -2.02607608e+00 1.56872761e+00 -1.03020608e+00] [-8.58796358e-01 -7.30878532e-01 -4.90485847e-01 ... 6.45110071e-01 9.56209123e-01 -6.67073071e-01] ... [ 1.64414734e-01 -6.46543801e-02 5.45936167e-01 ... -5.68535745e-01 -4.54746068e-01 -1.12725461e+00] [ 1.16563523e+00 -2.79682130e-01 5.70411623e-01 ... -7.66193986e-01 1.83414960e+00 9.23011661e-01] [ 5.87569356e-01 -1.74196232e-02 9.44543004e-01 ... 1.10528326e+00 7.81862676e-01 3.36467117e-01]] ... [[-6.49552524e-01 -9.97593760e-01 1.18736959e+00 ... 7.90802777e-01 4.00850683e-01 -8.84649679e-02] [ 5.11337996e-01 7.23027825e-01 -2.54313648e-01 ... 2.82207966e-01 -1.62996221e+00 -7.24945545e-01] [ 4.36228037e-01 -8.27830434e-01 4.68381494e-02 ... 5.16485751e-01 6.22725934e-02 1.53518200e-01] ... [ 1.71251357e-01 3.97528678e-01 4.88604531e-02 ... -3.16512883e-01 9.23624098e-01 3.69887203e-01] [ 1.55698329e-01 -6.07713938e-01 1.64454710e+00 ... -1.55915177e+00 7.89905310e-01 -4.16138560e-01] [-7.43500888e-01 -7.40686506e-02 2.75217831e-01 ... 7.16695562e-02 -6.48023188e-02 1.31415522e+00]] [[-1.17729485e+00 -9.32448387e-01 -1.48967609e-01 ... 1.39511004e-01 5.67576766e-01 9.57566053e-02] [-4.51244175e-01 -1.06469166e+00 -4.43629920e-01 ... 9.70507205e-01 -5.77625573e-01 -1.07958645e-01] [-6.31567538e-01 4.11963165e-01 -3.77218604e-01 ... 4.00172472e-01 4.75711673e-01 1.09613204e+00] ... [ 9.35814619e-01 5.81286907e-01 -7.33600080e-01 ... -3.24107111e-01 2.75144309e-01 8.15180838e-01] [-1.68218923e+00 3.21203977e-01 -2.54429299e-02 ... 2.26030067e-01 4.74886775e-01 -5.11092722e-01] [-2.21256744e-02 5.72679698e-01 1.60485232e+00 ... 1.18915856e+00 -1.54716086e+00 -1.02036018e-02]] [[-6.00017369e-01 -7.59150028e-01 4.05531645e-01 ... 1.08419800e+00 -1.40634108e+00 3.78554136e-01] [-1.16632544e-01 3.82654518e-01 -9.94084537e-01 ... -3.07573006e-02 -9.25629660e-02 1.33893061e+00] [-2.07386762e-01 -3.32497388e-01 -2.49247283e-01 ... -9.17483628e-01 3.03431749e-01 -1.77012846e-01] ... [-3.03604722e-01 7.20952034e-01 -6.34184897e-01 ... -2.20050052e-01 -4.91854966e-01 9.51192901e-02] [-3.80743057e-01 -5.47228307e-02 2.11527482e-01 ... -3.22066486e-01 2.77679384e-01 2.71219939e-01] [-1.25576162e+00 -3.91974039e-02 -4.84109074e-01 ... 3.58917445e-01 1.73427105e-01 5.11911094e-01]]] [[[-7.37009048e-01 -9.27482724e-01 6.04928374e-01 ... -6.42484844e-01 -1.05637586e+00 4.05813545e-01] [ 3.15622300e-01 4.60179061e-01 -8.47731382e-02 ... 1.07722707e-01 -5.89892924e-01 -8.24409187e-01] [-1.39698803e+00 5.10435775e-02 9.14081037e-01 ... 7.25592017e-01 8.90017331e-01 -2.20876098e-01] ... [-4.53536846e-02 -7.47512162e-01 -3.01917225e-01 ... -7.21870542e-01 -5.19851506e-01 -1.91853598e-01] [ 6.67500123e-02 -1.17471270e-01 2.11418688e-01 ... 8.90828311e-01 8.26552510e-01 -9.42479491e-01] [-5.99316120e-01 1.64138407e-01 -2.97063261e-01 ... 1.14305031e+00 1.08048737e+00 -1.71693996e-01]] [[-1.75160050e-01 -7.27456212e-01 -5.42301357e-01 ... -3.42507690e-01 -7.08954334e-02 -1.55286849e-01] [-3.81343722e-01 -2.54601330e-01 -1.16540670e+00 ... 4.14958507e-01 8.64860535e-01 5.09772956e-01] [ 4.51756239e-01 6.13215342e-02 3.48925918e-01 ... -1.06365693e+00 5.69085360e-01 5.22401094e-01] ... [ 1.02463877e+00 -9.77819562e-01 6.45319968e-02 ... -2.04023067e-02 9.66132283e-01 3.97493899e-01] [-1.76756814e-01 -1.04567432e+00 -1.49316561e+00 ... 3.40860754e-01 -1.04770049e-01 1.52395621e-01] [-5.85814774e-01 2.57011741e-01 -6.57737136e-01 ... -1.41629651e-01 -1.11324139e-01 -3.36813033e-01]] [[ 3.42807740e-01 5.58854997e-01 -2.35131279e-01 ... 1.05569553e+00 1.99529573e-01 5.06056666e-01] [ 3.08883302e-02 5.06529391e-01 1.01755357e+00 ... 1.76814184e-01 1.95919907e+00 -1.13451111e+00] [ 7.66078770e-01 -8.95727277e-01 1.08037090e+00 ... 1.03097355e+00 8.48145008e-01 7.72545516e-01] ... [ 9.25482750e-01 5.03738880e-01 2.74349880e+00 ... 5.83872378e-01 -2.37058267e-01 -3.88154685e-02] [-1.81818143e-01 1.50157541e-01 -1.10087383e+00 ... 1.04869819e+00 -6.07808352e-01 -4.52601880e-01] [ 2.23204836e-01 3.25600500e-03 -1.07409132e+00 ... -3.46666127e-01 -1.06132638e+00 -4.24403213e-02]] ... [[ 2.96832770e-01 8.23813200e-01 -6.58825785e-03 ... 5.82607746e-01 -1.78904772e-01 9.34451997e-01] [ 8.51044357e-01 -1.46581322e-01 3.98078322e-01 ... -2.24856108e-01 1.32096246e-01 1.94304228e-01] [-4.56353515e-01 -9.87109900e-01 -3.55937742e-02 ... 1.11168027e-01 -9.78105247e-01 -2.38746569e-01] ... [-3.96304317e-02 -5.72999597e-01 -8.28418016e-01 ... 9.81415361e-02 3.12631540e-02 4.23896015e-01] [-8.54415596e-01 -8.70269775e-01 -3.01323086e-01 ... -9.46593404e-01 -1.14284325e+00 6.67722404e-01] [-5.73031545e-01 -6.69707954e-01 8.50365400e-01 ... -1.65608037e+00 -3.91963601e-01 -1.32529169e-01]] [[ 6.12505794e-01 5.43972664e-03 1.50613105e+00 ... -8.72354582e-02 -2.07088208e+00 -2.93565154e-01] [-1.84280086e+00 -9.65665653e-02 -8.71296108e-01 ... -5.83475709e-01 8.80885124e-01 -5.82689047e-02] [ 2.88863480e-01 1.84843779e+00 1.67149723e+00 ... 4.69198227e-01 -6.09841585e-01 -3.63429040e-01] ... [-4.32620287e-01 -6.72707558e-01 7.52212822e-01 ... -8.95239770e-01 -5.15981913e-01 5.58980167e-01] [ 3.93741243e-02 -2.67965406e-01 -2.17459917e-01 ... 2.27440093e-02 -3.65014672e-01 9.17122513e-02] [-6.98265195e-01 5.40698171e-02 4.14579779e-01 ... 9.02682543e-01 -7.03150779e-03 -2.81215668e-01]] [[ 1.12016630e+00 -7.45990634e-01 5.12974322e-01 ... -1.27636147e+00 -1.20682549e+00 1.20747790e-01] [ 4.76066083e-01 1.19445646e+00 5.03251493e-01 ... 1.98909938e-01 9.01319012e-02 -6.97026372e-01] [ 7.74428725e-01 5.30931532e-01 7.64708996e-01 ... 1.52401912e+00 -6.85725957e-02 -2.52417147e-01] ... [-1.71641254e+00 -1.02837694e+00 -5.59934855e-01 ... -1.65308118e-02 -5.70712745e-01 1.00584619e-01] [ 3.54217321e-01 1.48787570e+00 -6.50611222e-01 ... -5.42542219e-01 -3.23926687e-01 3.79510283e-01] [ 1.38916993e+00 -1.12552965e+00 -6.64639711e-01 ... 1.37908328e+00 -5.25618613e-01 -6.83237493e-01]]] [[[-9.44348335e-01 -6.25639379e-01 7.31484830e-01 ... 6.26094341e-01 -1.72709584e-01 7.85534978e-01] [ 3.97987992e-01 -2.22621799e-01 -1.41765845e+00 ... 8.32042456e-01 3.02727431e-01 -7.33821154e-01] [-5.56050614e-03 6.10080501e-03 1.15530145e+00 ... -2.28443399e-01 -3.83489609e-01 8.80471542e-02] ... [ 4.75813329e-01 -1.43702650e+00 -3.28968823e-01 ... -2.04623908e-01 1.80191055e-01 -1.13742185e+00] [-2.03685775e-01 -3.48359138e-01 4.57174391e-01 ... 2.49258339e-01 8.88676196e-02 2.22461879e-01] [-2.21018959e-02 -9.29308832e-01 -1.11854720e+00 ... 5.80691755e-01 2.80531704e-01 -3.66346985e-02]] [[-1.23676129e-01 6.30609035e-01 -8.99730384e-01 ... 1.93997920e+00 6.15733564e-01 -4.40432876e-02] [-1.09799027e+00 -3.19732547e-01 -8.58158290e-01 ... -1.07070529e+00 -1.04373193e+00 -8.36720109e-01] [-1.59633005e+00 -6.59377813e-01 2.96487004e-01 ... 6.67902887e-01 -9.25184190e-01 -7.99043417e-01] ... [-7.12121665e-01 3.01412195e-01 -1.51143596e-01 ... 4.40566391e-01 6.50814712e-01 7.77401567e-01] [ 4.02120024e-01 -1.23170292e+00 -5.00246823e-01 ... 9.67530489e-01 -1.32554042e+00 -4.00755405e-02] [ 2.09720790e-01 -1.43704042e-01 3.86428535e-01 ... 1.18375480e-01 4.46322083e-01 1.87774014e+00]] [[ 1.25423527e+00 1.57783949e+00 -6.87691987e-01 ... 4.08525974e-01 2.05261993e+00 1.82646501e-03] [-1.26342386e-01 -2.79455155e-01 -7.49740064e-01 ... -5.46999693e-01 -2.97930330e-01 3.67038161e-01] [ 3.50564510e-01 -6.27745315e-02 5.39343774e-01 ... 6.69145703e-01 -8.98289382e-01 5.89963734e-01] ... [-5.76152682e-01 -1.43858862e+00 -3.50299507e-01 ... 4.01507393e-02 -5.01080692e-01 3.25595677e-01] [-4.79590982e-01 -1.38439992e-02 -8.70067894e-01 ... 3.64259154e-01 -3.36353272e-01 -2.07328886e-01] [-1.21744168e+00 -1.98720947e-01 -7.13623106e-01 ... -3.48470360e-01 4.24477100e-01 4.05846499e-02]] ... [[ 3.48195016e-01 -7.18202710e-01 2.27233812e-01 ... 1.87802196e+00 1.34868091e-02 4.01222259e-01] [-5.61033845e-01 -8.87899637e-01 4.29514796e-01 ... -7.57470071e-01 4.07028735e-01 8.46411228e-01] [-2.01526672e-01 -1.30803898e-01 -3.75110582e-02 ... -1.28611445e-01 -1.31002617e+00 7.96802700e-01] ... [-1.03282094e+00 -7.37876952e-01 -4.39756691e-01 ... -2.79037327e-01 4.98305917e-01 -4.45814691e-02] [-2.48990119e-01 6.17269814e-01 8.92236650e-01 ... -5.18217742e-01 4.98805434e-01 -8.85690451e-01] [-1.35361171e+00 -4.01385039e-01 7.52029195e-02 ... -7.08491728e-02 2.89082509e-02 5.99962175e-01]] [[-8.96356583e-01 1.37951982e+00 7.13232636e-01 ... -1.51486382e-01 -5.38241863e-01 -6.00080252e-01] [ 5.71121156e-01 5.56612730e-01 7.27847517e-02 ... 2.72800893e-01 -1.05325915e-01 -7.79260159e-01] [ 1.05124516e-02 -1.84551537e-01 -2.07784340e-01 ... 2.06100523e-01 1.46118176e+00 -8.76485527e-01] ... [ 6.34147942e-01 -5.30158162e-01 -6.08208716e-01 ... 4.67090547e-01 1.31276453e+00 -2.69152164e-01] [ 1.20567858e+00 -1.83508313e+00 8.39371026e-01 ... -3.37337442e-02 6.87904716e-01 -7.85909176e-01] [-2.47808740e-01 -6.45135105e-01 -3.43986809e-01 ... 3.10205817e-01 -5.52721620e-02 -2.28155300e-01]] [[ 2.24548295e-01 -9.25635695e-02 6.65594578e-01 ... 2.41037998e-02 1.10244143e+00 -5.52063167e-01] [ 9.84993950e-02 8.88319165e-02 -7.92414188e-01 ... 6.43391490e-01 -9.77253020e-01 7.18511567e-02] [ 6.53320909e-01 -1.50186613e-01 -2.31479466e-01 ... -3.79278719e-01 -8.19195747e-01 1.15474117e+00] ... [ 1.05899203e+00 1.61860399e-02 7.17319250e-02 ... -7.67922223e-01 -5.24520397e-01 3.35248619e-01] [-6.44710124e-01 -7.16572285e-01 4.29084420e-01 ... 2.31651843e-01 -4.34209377e-01 -1.30192399e-01] [ 8.85762721e-02 4.17196423e-01 -6.52207911e-01 ... 2.19850942e-01 8.48544419e-01 5.19471884e-01]]] [[[-1.46263584e-01 1.71856582e+00 -3.54059398e-01 ... 1.06117213e+00 -5.91342509e-01 2.00512931e-01] [-2.24222571e-01 -3.64502460e-01 4.35230255e-01 ... -2.84075558e-01 -3.62040073e-01 8.60711515e-01] [-1.78456623e-02 -3.88404131e-01 -6.28202915e-01 ... 1.79771587e-01 -5.19699991e-01 4.95083034e-01] ... [-4.07071412e-02 -1.43283099e-01 9.66575801e-01 ... 6.25415564e-01 -4.19582933e-01 1.26057565e-01] [ 1.27136028e+00 -3.00628901e-01 3.96906048e-01 ... 3.04103017e-01 7.62264252e-01 1.06758273e+00] [-5.33766411e-02 2.88098574e-01 5.03113449e-01 ... 3.75641555e-01 1.26761365e+00 -2.37019807e-01]] [[ 1.02247024e+00 3.65074426e-01 -6.33692622e-01 ... -5.47655642e-01 7.14736283e-01 -1.02073383e+00] [-5.25833964e-01 -1.19230703e-01 -1.18120241e+00 ... -8.49295020e-01 -7.51511335e-01 -5.12379527e-01] [ 8.10273170e-01 3.72942418e-01 2.29727075e-01 ... -5.83398402e-01 -1.63782597e-01 -1.28814209e+00] ... [-1.14068091e+00 -1.25560272e+00 -1.04824829e+00 ... -3.47314984e-01 -1.11692011e+00 -5.81469119e-01] [-4.78112042e-01 -7.52924383e-01 -1.44323969e+00 ... -1.02496469e+00 -1.88755944e-01 4.38833773e-01] [-3.86715755e-02 -4.52608436e-01 -3.63406330e-01 ... -6.64423823e-01 -8.59187067e-01 -7.92958379e-01]] [[-2.52959847e-01 -1.43953875e-01 -2.21627146e-01 ... 9.81827617e-01 -1.83406338e-01 -4.84866463e-02] [ 1.21138239e+00 -3.03909361e-01 -2.06123605e-01 ... 2.50313491e-01 8.06231320e-01 3.22573632e-01] [-2.35648990e-01 -7.21356213e-01 -8.68512839e-02 ... 7.64128923e-01 3.17684352e-01 2.48708129e-02] ... [ 3.76157969e-01 3.71925473e-01 2.43654624e-01 ... -8.69620144e-01 1.25280333e+00 5.54534435e-01] [-5.01001656e-01 -9.39949080e-02 2.48246029e-01 ... -4.74762321e-01 5.05058885e-01 -3.72910649e-01] [-9.43624079e-01 5.09854496e-01 1.69387245e+00 ... -4.36502844e-01 2.31103301e-02 -7.57411942e-02]] ... [[-2.96235859e-01 2.07971916e-01 1.32367742e+00 ... 5.46927989e-01 -4.55695003e-01 -1.32051623e+00] [ 3.86536360e-01 -1.15214539e+00 7.24900246e-01 ... -1.34096444e+00 5.72109878e-01 -5.32041252e-01] [ 2.07529426e-01 2.08095819e-01 -1.51804030e-01 ... -6.27519488e-01 -2.02238932e-01 5.23232400e-01] ... [-6.26602352e-01 -6.69662058e-01 -6.35754228e-01 ... 4.85154510e-01 1.67737901e-01 7.88685918e-01] [ 2.07144827e-01 -5.52403629e-01 -2.08267614e-01 ... -1.02402949e+00 -9.58584249e-01 2.84027070e-01] [ 2.86596268e-01 8.95528674e-01 -6.57380879e-01 ... -8.77047405e-02 -2.23180130e-01 -9.69522059e-01]] [[-9.69573438e-01 -1.59277111e-01 6.75804317e-02 ... 9.20989394e-01 -7.94722497e-01 -3.71927395e-02] [ 1.23488748e+00 1.21602106e+00 9.16819692e-01 ... -3.73155475e-01 -3.69079322e-01 -4.73272987e-02] [ 1.96499694e-02 -1.41143894e+00 -5.97921729e-01 ... 3.00518960e-01 -2.81145185e-01 -1.14402920e-01] ... [ 5.84305167e-01 -1.41510570e+00 3.23269039e-01 ... -1.04776181e-01 -2.92893469e-01 -4.31143194e-02] [-1.20190346e+00 2.41700426e-01 -1.30991980e-01 ... 2.10896522e-01 3.14259768e-01 5.83962262e-01] [-5.59864104e-01 1.88144118e-01 5.52377105e-01 ... -2.69601434e-01 -5.02378166e-01 4.71484959e-01]] [[ 2.30845854e-01 -2.03420103e-01 4.74244326e-01 ... -8.43675528e-03 2.62279779e-01 -2.74183691e-01] [-4.01238166e-02 -5.77760637e-01 -3.12917456e-02 ... 6.59648061e-01 -5.62897682e-01 -3.67384762e-01] [ 1.62542272e+00 -2.35851556e-01 7.15609252e-01 ... 9.16450620e-01 -6.55899584e-01 5.52277207e-01] ... [ 7.91484475e-01 -6.14496946e-01 -4.16637182e-01 ... 6.96083188e-01 5.69855422e-02 -2.04557881e-01] [-9.22319710e-01 1.02757025e+00 2.26716936e-01 ... 1.32014424e-01 -8.00991178e-01 -7.73025095e-01] [ 1.49782926e-01 -1.25616729e-01 1.62639931e-01 ... 1.35995895e-01 8.71054888e-01 5.07574797e-01]]]] [[[[ 7.83535123e-01 9.70591843e-01 4.23578590e-01 ... 2.18836814e-01 -1.93449199e-01 -3.54072422e-01] [ 3.93853724e-01 -7.48388827e-01 -9.53112602e-01 ... 9.36875120e-02 6.56597793e-01 4.98598754e-01] [ 4.90965933e-01 -9.45722282e-01 -3.78799289e-01 ... 1.08257508e+00 -1.47885120e+00 1.16270947e+00] ... [-3.74003589e-01 -2.82120079e-01 -1.23423386e+00 ... 7.96759844e-01 -4.69044834e-01 -9.36992466e-02] [ 3.41769695e-01 6.36821806e-01 -1.10649467e+00 ... 5.56134403e-01 -1.58445328e-01 -4.91433322e-01] [ 4.53974977e-02 -1.12765741e+00 1.55914497e+00 ... -9.94053543e-01 7.47181714e-01 2.80560553e-01]] [[ 6.58372603e-03 -2.77967215e-01 3.21668178e-01 ... 3.99915129e-01 5.71747184e-01 -5.37827564e-03] [ 1.30469596e+00 -7.68115759e-01 4.12184507e-01 ... -1.53770304e+00 -4.54042614e-01 -1.05143659e-01] [ 1.15400589e+00 -1.06375289e+00 -5.55245094e-02 ... 3.08751941e-01 1.49563134e+00 -4.24793176e-03] ... [-5.45684528e-03 5.07893026e-01 -8.72960508e-01 ... -8.12412351e-02 -1.06363332e+00 3.85775089e-01] [ 1.04080486e+00 -5.43680787e-01 1.02030075e+00 ... -2.36257002e-01 2.61299107e-02 -1.31824970e+00] [-6.62165225e-01 6.05899572e-01 -3.12835157e-01 ... 1.89884394e-01 -1.44464225e-01 -8.30783367e-01]] [[-7.67362952e-01 2.63601035e-01 -1.15491939e+00 ... 3.95657331e-01 1.35037243e+00 -7.17971861e-01] [-6.29516006e-01 -1.25056565e+00 6.67966068e-01 ... -3.51826161e-01 -6.96787596e-01 -2.40271643e-01] [-1.32771397e+00 1.73571900e-01 1.06060661e-01 ... 8.16770643e-02 3.48489404e-01 -1.21886387e-01] ... [ 2.02898234e-01 -1.44311237e+00 -4.23990756e-01 ... 4.66275603e-01 -3.44595790e-01 -3.85993384e-02] [-1.42214608e+00 -2.89479464e-01 -4.51653957e-01 ... -7.57205188e-01 7.01506674e-01 -6.69784546e-01] [-1.94714308e-01 -1.05676663e+00 -1.20439902e-01 ... -3.12494725e-01 9.04793516e-02 1.27781052e-02]] ... [[-3.84967059e-01 -2.89024487e-02 8.69578421e-02 ... -2.61379480e-01 -4.53707159e-01 -1.03993550e-01] [-5.29830754e-01 -7.26284027e-01 2.47613415e-01 ... -4.64157254e-01 2.69272536e-01 2.99245596e-01] [ 1.25456810e-01 -4.36740071e-02 -1.54251885e+00 ... 7.56156266e-01 1.23131955e+00 -8.25222194e-01] ... [ 1.58183205e+00 -1.40750902e-02 -8.59463885e-02 ... -5.34660637e-01 1.33358228e+00 -1.32246837e-01] [-2.24022508e-01 -5.97936392e-01 -8.40142369e-01 ... 8.00715685e-01 -8.92625153e-01 -1.02711582e+00] [-2.39325032e-01 -1.81974679e-01 8.14082026e-02 ... 1.10563803e+00 1.06414378e+00 -2.60449618e-01]] [[ 4.71590996e-01 -4.19913560e-01 -4.08888869e-02 ... 2.67369058e-02 -2.72811174e-01 6.63573027e-01] [-4.26130503e-01 -1.64914671e-02 -2.78184414e-01 ... 4.31852788e-01 -1.78818688e-01 -1.04288590e+00] [ 1.44829190e+00 7.18147993e-01 3.73956531e-01 ... -2.08716050e-01 -2.85278320e-01 -7.07074523e-01] ... [ 3.36413890e-01 1.39617786e-01 -1.71578720e-01 ... -7.70998836e-01 6.78416193e-02 -1.57934904e-01] [ 7.44733989e-01 -1.21072447e+00 1.38846114e-01 ... 9.50371087e-01 2.59262919e-01 8.67199779e-01] [ 2.45919243e-01 4.97091234e-01 -4.94679362e-01 ... 1.19314253e+00 4.28240523e-02 -1.31713963e+00]] [[-5.82309127e-01 -8.85280728e-01 -6.95540488e-01 ... -3.16656470e-01 1.00466144e+00 -1.93038583e-01] [-1.26904345e+00 1.10260117e+00 -4.50067043e-01 ... -2.76219755e-01 -8.11870575e-01 -2.52078056e-01] [-1.01804093e-01 -1.08042300e-01 -1.39343023e-01 ... 6.17421687e-01 8.86205971e-01 -6.38159335e-01] ... [ 5.39088666e-01 -1.11748859e-01 -1.37677863e-01 ... 4.56622869e-01 -5.00635862e-01 1.16096413e+00] [ 1.01445889e+00 9.30100501e-01 9.43916023e-01 ... 1.45556638e-02 8.41857493e-01 6.41702175e-01] [-1.10776924e-01 -3.88028949e-01 -1.27456233e-01 ... -7.59065092e-01 4.84807670e-01 6.55166879e-02]]] [[[ 6.46608591e-01 -8.20197642e-01 -3.00078187e-02 ... -8.24552059e-01 -1.21238375e+00 -7.11639285e-01] [-2.33715773e-01 3.11638087e-01 7.53806114e-01 ... -1.11292386e+00 -6.67460561e-01 -1.18260443e+00] [ 2.13175312e-01 -5.12904823e-01 -1.04970813e+00 ... 4.85047624e-02 -7.58290350e-01 -6.31516039e-01] ... [ 7.32031047e-01 -1.29189521e-01 -2.39379510e-01 ... -3.83382320e-01 -2.54269868e-01 -3.59693229e-01] [-6.76857889e-01 -1.70276299e-01 -1.95064676e+00 ... 2.51956761e-01 -1.15340686e+00 -4.81931448e-01] [-1.50190488e-01 -1.60493183e+00 3.25901955e-01 ... 7.15887487e-01 1.24074578e+00 -1.95732713e-01]] [[ 2.68474907e-01 3.59142840e-01 8.82478952e-01 ... 1.10213709e+00 1.92650668e-02 9.17583779e-02] [-1.08743131e-01 6.64473653e-01 -3.57155889e-01 ... 2.10894316e-01 4.49312091e-01 1.42345870e+00] [-8.57578516e-01 4.16865647e-01 -8.31424534e-01 ... -5.88387787e-01 3.75693023e-01 6.64428115e-01] ... [-3.47396880e-02 4.81176049e-01 -1.07506335e-01 ... -4.46169615e-01 6.08097732e-01 7.23847806e-01] [ 9.28539336e-01 -1.01035714e+00 8.66603255e-01 ... 2.66000837e-01 2.64628142e-01 -2.85709113e-01] [-3.79326910e-01 2.01807201e-01 -1.27338856e-01 ... -4.39430289e-02 1.18135500e+00 1.35196590e+00]] [[-9.36921909e-02 -5.86638004e-02 -9.25875068e-01 ... 3.80953759e-01 1.23793948e+00 -4.79887873e-01] [ 3.69169533e-01 -3.54377985e-01 6.29470497e-02 ... -1.45520449e-01 -1.39908016e-01 1.59314677e-01] [-2.40195900e-01 -1.79394186e+00 4.23628628e-01 ... 7.06215203e-01 1.73747563e+00 2.06723630e-01] ... [ 1.28777909e+00 -8.11836541e-01 3.34570348e-01 ... 5.05295098e-01 1.02702534e+00 -1.00148463e+00] [ 8.15993786e-01 1.02322125e+00 -2.08284453e-01 ... -2.23508388e-01 -2.01951790e+00 4.75012273e-01] [ 5.45027435e-01 -1.73070133e-01 6.14524841e-01 ... -6.03315711e-01 -1.22550309e+00 1.48794621e-01]] ... [[ 1.42267728e+00 1.09387422e+00 4.29016322e-01 ... 5.01715779e-01 -3.09356242e-01 1.19862843e+00] [ 1.74518198e-01 -1.75881356e-01 -3.79059017e-01 ... 3.68579656e-01 6.80117607e-01 7.45206773e-01] [-1.48001635e+00 3.62307519e-01 5.32406986e-01 ... -7.63813794e-01 -4.64077115e-01 -6.15230948e-02] ... [ 5.10445774e-01 -1.58952379e+00 9.63658750e-01 ... -9.57529008e-01 1.25056779e+00 9.27830398e-01] [-1.14615870e+00 8.53768289e-01 7.11321831e-01 ... 8.74461710e-01 -6.16760612e-01 6.11994982e-01] [ 4.49639261e-01 5.65733671e-01 3.57588023e-01 ... 1.51961637e+00 4.89756227e-01 -3.31571579e-01]] [[ 1.24421573e+00 1.07292235e+00 -5.89078307e-01 ... 9.12698209e-01 2.95923889e-01 8.11163247e-01] [ 4.08670783e-01 2.38789409e-01 1.51812553e+00 ... -1.53837293e-01 -1.38329422e+00 -6.53760672e-01] [-1.24027558e-01 3.34898472e-01 -1.58822489e+00 ... 4.35807019e-01 5.99401534e-01 -1.29213586e-01] ... [ 5.56992710e-01 -2.57647365e-01 -8.09320629e-01 ... -3.80104542e-01 1.78905368e-01 1.32726744e-01] [ 1.59698144e-01 4.43721235e-01 4.37328219e-02 ... -1.14056535e-01 -1.21976614e+00 -1.18935919e+00] [ 9.95600164e-01 -1.91040313e+00 7.77866542e-01 ... -1.08324580e-01 8.24906603e-02 -1.59793794e+00]] [[-5.85234344e-01 -2.20769778e-01 -8.88670564e-01 ... -1.07771790e+00 1.05154264e+00 -3.80367965e-01] [ 9.77633707e-03 -1.26284480e+00 -1.02373075e+00 ... 4.40460801e-01 3.23168516e-01 -1.08277164e-01] [-1.01301622e+00 1.04621269e-01 -2.98490494e-01 ... 2.49490306e-01 1.21301579e+00 2.47724161e-01] ... [ 9.54065621e-01 2.59770930e-01 7.46581793e-01 ... -2.24616881e-02 -7.20128953e-01 -1.82910457e-01] [-6.30064189e-01 -5.87709129e-01 -1.18998122e+00 ... -2.42064118e-01 7.35270679e-02 -5.00626683e-01] [-8.30141485e-01 -3.07636917e-01 -1.65237576e-01 ... 8.83851424e-02 -1.17881942e+00 3.44272017e-01]]] [[[-6.37185812e-01 -8.31702530e-01 3.56784254e-01 ... 1.13654651e-01 -1.31424057e+00 1.04337955e+00] [-5.68361342e-01 -1.84017408e+00 -7.43276536e-01 ... 4.71930951e-01 7.01593995e-01 1.35254788e+00] [ 1.99540937e+00 5.25988579e-01 7.53755271e-01 ... 4.87193257e-01 5.32794595e-01 -3.70217417e-03] ... [-1.19258547e+00 4.82694536e-01 -9.02034104e-01 ... 9.09987926e-01 1.63855836e-01 -8.26598763e-01] [ 4.64832753e-01 8.98620844e-01 -3.12221348e-01 ... 2.50133246e-01 -2.76194245e-01 -1.07040715e+00] [-1.79515779e-01 1.30665052e+00 -1.09406643e-01 ... 8.95675778e-01 8.31807435e-01 -9.32039991e-02]] [[ 1.58912554e-01 1.23234284e+00 6.13119721e-01 ... -5.95307410e-01 8.07711244e-01 2.96324611e-01] [-4.19897616e-01 -1.91132426e-01 -7.64340460e-02 ... 3.22079629e-01 2.90177375e-01 1.29926562e+00] [ 1.67959762e+00 7.76226521e-01 -2.77026176e-01 ... 6.46901906e-01 8.29018950e-01 1.02030504e+00] ... [ 6.64372623e-01 7.25807190e-01 -9.79760647e-01 ... 4.32174414e-01 -4.81116295e-01 7.59130359e-01] [-6.07070863e-01 9.08547997e-01 -2.07186311e-01 ... -3.49559009e-01 8.77636373e-02 7.84177661e-01] [-1.58886480e+00 -1.55163571e-01 1.41935959e-01 ... -1.28719890e+00 -7.05089986e-01 4.62108284e-01]] [[ 1.71201074e+00 -1.41249970e-01 -1.20352423e+00 ... -1.94838554e-01 5.42047203e-01 -3.61734450e-01] [-7.91603446e-01 5.21043502e-03 -2.85851091e-01 ... 9.41685796e-01 2.39364170e-02 1.55494630e+00] [ 1.07421494e+00 8.66064429e-02 -1.42502260e+00 ... -1.11799002e+00 6.17359579e-01 7.92082429e-01] ... [-1.12424821e-01 6.66812539e-01 5.49660325e-01 ... -7.08163202e-01 6.12265885e-01 6.79990649e-01] [ 2.95083851e-01 6.68488085e-01 -9.00959790e-01 ... 7.92600334e-01 8.79866123e-01 -7.65492201e-01] [ 4.78212871e-02 -1.28435957e+00 -1.39486536e-01 ... -1.28762519e+00 1.09539819e+00 1.49204999e-01]] ... [[-2.22499803e-01 1.71251488e+00 1.76669210e-01 ... 2.53131300e-01 9.67388093e-01 1.17380731e-01] [ 1.25562429e+00 -7.16522336e-01 -7.16431379e-01 ... 4.52171713e-01 -5.58541000e-01 1.23633337e+00] [ 1.12353778e+00 -8.82317901e-01 -3.14246058e-01 ... 9.34727490e-02 3.80352795e-01 4.06198949e-01] ... [ 5.61545312e-01 -5.49571991e-01 -3.03546250e-01 ... -7.43641078e-01 1.84430569e-01 7.38477588e-01] [-1.27218807e+00 -3.53929818e-01 -1.45193920e-01 ... 8.07690024e-01 7.28124455e-02 2.17955619e-01] [-1.38275683e+00 5.11721224e-02 -4.78990763e-01 ... 8.05442810e-01 2.49832764e-01 1.41700178e-01]] [[-2.72204041e-01 -4.03335184e-01 -7.48869896e-01 ... 1.63689271e-01 1.63753286e-01 -4.30554092e-01] [-1.65890217e+00 -1.85366228e-01 -2.20324546e-01 ... 1.27235547e-01 2.80887395e-01 -3.02045673e-01] [-2.70976454e-01 -7.80989677e-02 2.52136767e-01 ... -4.42961872e-01 2.46537596e-01 7.06775412e-02] ... [ 3.83566543e-02 5.93470573e-01 1.42132416e-01 ... -1.45255968e-01 -4.10003424e-01 2.92249709e-01] [-1.12247312e+00 7.92015374e-01 8.68731737e-01 ... 1.89123347e-01 -7.84974471e-02 3.39185330e-03] [-2.25593239e-01 1.25226390e+00 -1.74757019e-01 ... 8.27663481e-01 2.14157671e-01 3.74311917e-02]] [[ 1.46158397e+00 1.58398747e+00 -2.58458287e-01 ... -7.41097391e-01 1.18841447e-01 6.70325935e-01] [-9.48547870e-02 -5.25608063e-01 -3.49881142e-01 ... -2.47058436e-01 -1.01390326e+00 -3.98509026e-01] [-7.92237937e-01 1.08354998e+00 6.21214449e-01 ... 2.46582806e-01 7.75323451e-01 3.21083039e-01] ... [ 2.17638180e-01 8.54050279e-01 -4.28457558e-01 ... 8.56640875e-01 3.65702182e-01 -9.87973094e-01] [ 4.08474207e-01 -1.93862349e-01 1.05761814e+00 ... -4.66389090e-01 4.60202783e-01 3.50297987e-01] [ 1.34641564e+00 7.45047212e-01 -3.01317364e-01 ... 8.90471101e-01 3.67808580e-01 -1.39782476e+00]]] [[[ 2.00819194e-01 1.64333344e-01 -2.25024790e-01 ... 7.54820585e-01 1.75702429e+00 -1.12250698e+00] [ 2.12482631e-01 -8.96201491e-01 1.04157758e+00 ... -1.37404823e+00 2.41837382e-01 -6.79578125e-01] [ 3.90974790e-01 -9.70821798e-01 1.45463079e-01 ... 7.08638370e-01 1.12411622e-02 -3.03376406e-01] ... [ 3.93710971e-01 -8.76566410e-01 -6.24377310e-01 ... -6.24671578e-01 -2.33222544e-01 -5.20965636e-01] [ 5.07789969e-01 6.55806661e-01 6.79628670e-01 ... 8.22276831e-01 1.25048184e+00 -7.09625006e-01] [-1.37611195e-01 1.26358235e+00 -7.70697355e-01 ... 5.55947185e-01 -5.63664854e-01 -8.34791839e-01]] [[-2.46821210e-01 -8.26464832e-01 3.86853874e-01 ... 1.32050538e+00 -3.55937153e-01 7.48265207e-01] [ 1.22474349e+00 -4.26886022e-01 1.49615884e-01 ... 1.22232664e+00 7.43556619e-01 -1.05989128e-01] [ 1.44342554e+00 9.66917947e-02 -2.92591006e-01 ... -8.97833645e-01 -1.85386643e-01 4.21847671e-01] ... [ 1.40412956e-01 5.65203190e-01 -2.66534805e-01 ... -3.39700967e-01 -5.95661044e-01 -2.54953414e-01] [ 4.74741489e-01 3.95817578e-01 -7.66321242e-01 ... -8.92320797e-02 4.27538216e-01 -2.05807053e-02] [ 4.32418585e-01 6.79405868e-01 -3.43735427e-01 ... -7.16260850e-01 2.40282640e-01 -1.56881467e-01]] [[ 6.86867759e-02 3.94191407e-03 -7.44323313e-01 ... -8.81327450e-01 -9.69382882e-01 -1.45379201e-01] [-3.40044290e-01 1.02123115e-02 -2.30275393e-01 ... -2.37526968e-01 2.93697417e-01 -2.24108025e-01] [-5.70913374e-01 -2.64903724e-01 1.26617372e-01 ... 3.07169706e-01 8.09970856e-01 6.16874635e-01] ... [ 6.24637663e-01 2.30675548e-01 -4.48201656e-01 ... 3.79879892e-01 1.14702213e+00 -1.25925496e-01] [ 9.73205388e-01 3.70302260e-01 1.36722982e-01 ... 7.55414903e-01 -3.79133850e-01 1.01597476e+00] [-2.20740438e-01 -3.57476547e-02 6.04452491e-01 ... -9.13556278e-01 1.37178469e+00 5.63726686e-02]] ... [[ 3.89056236e-01 5.10246754e-01 9.30636704e-01 ... 1.23872221e+00 7.75372326e-01 1.33209810e-01] [-1.25847864e+00 1.45739540e-01 -1.09199420e-01 ... -7.92160392e-01 4.49563801e-01 4.22899202e-02] [ 3.43045175e-01 -1.44623780e+00 -3.86696547e-01 ... 1.15151894e+00 3.69383752e-01 -3.60008389e-01] ... [-9.55574930e-01 1.13258493e+00 -6.30625546e-01 ... -1.59047997e+00 -4.31463391e-01 1.19451694e-01] [-5.84277570e-01 3.08683328e-02 -9.58750322e-02 ... -3.12255472e-01 -4.96530712e-01 6.66145682e-01] [-6.10978127e-01 -7.91008919e-02 5.91262341e-01 ... -2.78573722e-01 1.55838919e+00 -9.55922455e-02]] [[ 3.03880483e-01 -6.21786654e-01 -4.78954971e-01 ... -2.81863719e-01 2.97054112e-01 -2.91241944e-01] [ 1.73332281e-02 -2.13592842e-01 -7.62548923e-01 ... -8.62488508e-01 -1.93129420e+00 -5.65449476e-01] [ 6.72511220e-01 -7.88328528e-01 1.84371993e-01 ... 8.60416889e-02 -2.43199199e-01 2.53882945e-01] ... [-1.68150961e+00 9.97012734e-01 1.60050437e-01 ... 3.10974419e-01 5.50395191e-01 6.40301481e-02] [ 3.01169544e-01 5.16020715e-01 -2.97223717e-01 ... 1.24828346e-01 -8.67156088e-01 1.48561716e-01] [-3.50335002e-01 5.98482311e-01 9.25459526e-03 ... 5.22707641e-01 2.52218515e-01 -1.80886614e+00]] [[ 9.11926150e-01 9.41323519e-01 9.65093672e-01 ... 5.83251834e-01 6.74881041e-01 -6.62104785e-01] [ 6.39641225e-01 4.78564143e-01 5.89994490e-01 ... -1.01587760e+00 -7.22378418e-02 -8.74002054e-02] [-2.18514830e-01 6.09905481e-01 -3.24463725e-01 ... -8.07516724e-02 -1.22271031e-01 3.01467448e-01] ... [ 8.65435421e-01 -4.35005248e-01 -2.10864097e-01 ... -1.12722492e+00 -5.13240099e-01 -2.28541166e-01] [-8.86240005e-02 -2.46414885e-01 4.75485623e-01 ... 8.69645059e-01 5.57423413e-01 6.88111722e-01] [-3.31952482e-01 -5.64817250e-01 -1.15020037e+00 ... 4.72000808e-01 4.19177830e-01 -1.24950223e-02]]] [[[ 5.03507257e-01 -3.71338874e-01 -6.84106290e-01 ... 4.89693671e-01 7.58943796e-01 2.12251797e-01] [ 2.47823194e-01 3.84365581e-02 -3.73264730e-01 ... 4.80100393e-01 1.27756223e-01 5.10787845e-01] [ 3.34658772e-01 -8.41623664e-01 1.15253842e+00 ... 6.61354721e-01 -7.87529111e-01 2.36630604e-01] ... [-6.41487837e-02 8.77836049e-01 2.79764295e-01 ... 5.87528884e-01 4.54111993e-01 2.83270419e-01] [-1.73963785e+00 9.75500524e-01 -2.69275665e-01 ... -3.85986060e-01 5.21629035e-01 1.24375120e-01] [-4.24860626e-01 1.60097039e+00 -3.84714007e-01 ... 1.24811113e+00 -4.97455060e-01 -2.46746298e-02]] [[-1.20795155e+00 5.66408753e-01 1.23984146e+00 ... -5.22952020e-01 -1.11349177e+00 8.47809136e-01] [ 4.07666743e-01 -2.24775344e-01 -1.05992365e+00 ... 3.64682116e-02 1.05867060e-02 6.48715734e-01] [ 9.47714925e-01 6.98568225e-02 2.07828417e-01 ... -3.77920777e-01 -3.17362070e-01 9.26057458e-01] ... [ 1.39475867e-01 -8.54097128e-01 2.60931224e-01 ... -1.05575740e+00 -6.17017448e-01 -4.25488740e-01] [ 8.47763598e-01 6.08111084e-01 1.54878318e-01 ... -4.89821196e-01 -2.81531721e-01 -1.51484236e-01] [-2.85285503e-01 -8.15525413e-01 4.06739116e-02 ... 1.84516292e-02 2.07150951e-01 4.74666238e-01]] [[ 6.51079655e-01 1.05092883e+00 4.33077782e-01 ... 5.65675557e-01 -6.29065186e-02 1.17936087e+00] [-4.74062055e-01 -3.21291983e-01 3.42000037e-01 ... -2.93983996e-01 -3.02560091e-01 4.90426660e-01] [-3.85122806e-01 6.01899385e-01 3.05354781e-02 ... 2.06200406e-01 -6.11244738e-01 2.28146076e-01] ... [ 1.61793143e-01 -2.60474473e-01 -1.63497114e+00 ... 1.09461892e+00 1.22607432e-01 -1.21809483e+00] [-8.88520122e-01 2.39685848e-01 8.76352191e-01 ... 1.23195040e+00 8.61483142e-02 -2.42481008e-01] [ 4.66976523e-01 2.53719181e-01 4.32083458e-01 ... 9.49089527e-01 -7.88554847e-02 4.91598815e-01]] ... [[-1.43860176e-01 1.35571569e-01 -2.45421141e-01 ... 1.82784140e+00 -9.79635715e-01 -5.25365055e-01] [-7.91999757e-01 1.69928479e+00 6.63491189e-01 ... 7.73338437e-01 3.69341910e-01 2.01537347e+00] [-9.59221721e-02 -7.10146576e-02 -8.53196025e-01 ... -4.90284622e-01 1.52891651e-01 -1.71054289e-01] ... [-3.29579979e-01 -1.94111753e+00 -1.23280942e+00 ... -1.00658357e-03 2.28729576e-01 2.98412681e-01] [ 7.02441037e-01 -4.00750428e-01 5.26771955e-02 ... -4.16259348e-01 3.40267420e-01 -9.42898571e-01] [ 1.26684308e+00 3.59234333e-01 -1.01697755e+00 ... -2.02573523e-01 -1.04414332e+00 -3.03268313e-01]] [[ 6.83342338e-01 -5.19228935e-01 2.52924562e-01 ... 4.05775636e-01 1.83818564e-02 1.08785875e-01] [ 6.37634575e-01 1.71951786e-01 1.50832629e+00 ... -1.17946081e-02 -1.38132975e-01 -2.98756212e-01] [ 4.95605655e-02 -8.22982416e-02 -3.93892527e-01 ... 5.10263205e-01 2.85990089e-01 -2.60925174e-01] ... [-4.49804544e-01 -4.39901985e-02 -1.06258050e-01 ... 1.10219717e+00 1.27393946e-01 3.91000092e-01] [-8.35914075e-01 -1.06455219e+00 7.31441140e-01 ... 1.36204374e+00 -4.91246045e-01 -3.25558305e-01] [ 4.86303777e-01 2.21169877e+00 2.58821458e-01 ... -5.45359515e-02 -4.77833450e-01 -5.92267334e-01]] [[-3.76036137e-01 -4.60242003e-01 2.42666272e-03 ... 1.03758085e+00 2.12822892e-02 4.10576671e-01] [-4.07848328e-01 -6.34224266e-02 -4.34596300e-01 ... 2.13993341e-01 -2.11757794e-01 -4.96698081e-01] [ 3.74212414e-01 5.26162624e-01 -8.23498487e-01 ... 8.96628797e-01 -4.71413016e-01 1.21262681e+00] ... [-1.76902616e+00 7.32396483e-01 1.38158870e+00 ... -6.04959071e-01 -3.98448646e-01 -3.93930376e-01] [-2.34790683e-01 -3.44668850e-02 9.26807106e-01 ... -1.45119691e+00 -1.90530077e-01 -9.77539539e-01] [ 8.28848481e-01 2.57162392e-01 -2.25818351e-01 ... -8.37493241e-01 6.01749778e-01 -6.53140172e-02]]] [[[ 1.28368998e+00 1.56269574e+00 1.04681122e+00 ... 3.24068904e-01 -7.13028550e-01 1.14060761e-02] [ 7.38248885e-01 3.69668812e-01 4.25098300e-01 ... 2.16952533e-01 -6.17807925e-01 -1.40461341e-01] [ 5.63858211e-01 -1.35916397e-01 1.06385100e+00 ... -1.34167910e-01 -8.07416081e-01 1.11430740e+00] ... [ 2.22035080e-01 2.40450457e-01 5.69361627e-01 ... 2.67279912e-02 8.79079849e-02 2.01172546e-01] [-2.60962516e-01 2.31828403e+00 4.90322798e-01 ... 6.51103199e-01 1.95012856e+00 1.06088959e-01] [ 4.32490677e-01 1.05347967e+00 4.20128167e-01 ... -1.63060176e+00 3.94360006e-01 9.52576816e-01]] [[-4.43928689e-01 -1.15431106e+00 -4.19990510e-01 ... -4.27298874e-01 -1.42105961e+00 -1.72382796e+00] [ 9.21077013e-01 -6.12059116e-01 8.53156984e-01 ... -6.60360098e-01 -3.33069175e-01 4.29911494e-01] [-1.19304776e+00 -1.13730228e+00 4.20604385e-02 ... 2.40802616e-01 9.28853601e-02 -5.65994382e-01] ... [ 1.53694010e+00 1.71851754e-01 1.00834739e+00 ... 5.97536147e-01 -5.78150094e-01 5.74654877e-01] [ 1.87988803e-02 -6.87183812e-02 1.46654403e+00 ... -9.49739695e-01 -5.70586443e-01 7.22856164e-01] [ 6.64713919e-01 8.26026320e-01 -1.60340083e+00 ... -4.67789799e-01 -5.71633399e-01 -6.24943554e-01]] [[-5.35802208e-02 -2.60534346e-01 2.77133912e-01 ... 8.44219327e-01 2.13562902e-02 1.52124465e-01] [-9.86112714e-01 8.97543281e-02 1.33789909e+00 ... -8.88303339e-01 -6.86996579e-01 -4.78130251e-01] [ 4.39156801e-01 -2.84821302e-01 4.19121712e-01 ... 1.54721820e+00 2.03133774e+00 -1.10199869e+00] ... [ 3.19317311e-01 -1.51443994e+00 9.73187685e-01 ... -6.34651035e-02 -3.80314618e-01 -8.47471476e-01] [ 1.90043256e-01 2.52906114e-01 -1.59063339e-01 ... -6.79637313e-01 8.32654536e-02 -2.70142615e-01] [ 5.20929217e-01 -3.37404191e-01 1.68491578e+00 ... -1.08325040e+00 1.08863962e+00 3.91654670e-01]] ... [[-4.55055714e-01 1.06713407e-01 1.92491263e-01 ... -5.45377791e-01 5.65597825e-02 3.67998570e-01] [ 5.64924777e-01 -6.57000422e-01 -2.73163676e-01 ... 6.89717293e-01 1.70113170e+00 7.77552307e-01] [-7.03072786e-01 -9.09914911e-01 -7.90975466e-02 ... -3.74716729e-01 5.75689077e-01 -1.03090978e+00] ... [ 1.33344993e-01 2.82223493e-01 2.39574254e-01 ... 1.73656985e-01 6.93692639e-02 -4.52660441e-01] [ 1.98599726e-01 -9.10822213e-01 -6.63134098e-01 ... 1.17083585e+00 1.16365552e-01 3.04465324e-01] [-2.46373311e-01 -1.08375239e+00 7.85850212e-02 ... 5.40370937e-04 1.92015722e-01 -3.56103837e-01]] [[ 9.05259430e-01 4.96838987e-01 -4.61465955e-01 ... -3.68138477e-02 -5.31565487e-01 -5.64983726e-01] [ 4.41163361e-01 -1.00416076e+00 1.82180673e-01 ... -1.62291825e+00 -1.00798333e+00 1.05036712e+00] [-7.92805851e-01 -6.06109917e-01 -2.77545173e-02 ... -2.23851800e-01 1.09510958e+00 5.66686913e-02] ... [-4.49113369e-01 -4.24165338e-01 -5.53054452e-01 ... 5.97673357e-01 1.76995134e+00 -1.12053907e+00] [-3.63007516e-01 -3.60156566e-01 -5.13292670e-01 ... 3.84957433e-01 -1.41702279e-01 -1.72730768e+00] [-1.27810705e+00 4.64424789e-01 6.23490512e-01 ... -2.84192234e-01 6.30713552e-02 1.90426186e-01]] [[-4.91695553e-01 7.32703686e-01 8.85043323e-01 ... 2.29728773e-01 7.53885448e-01 -1.00945473e+00] [ 3.34046751e-01 -4.36679840e-01 8.65119636e-01 ... -1.17420495e+00 -5.97913750e-02 3.28675061e-01] [ 1.21687818e+00 7.62874126e-01 -2.44521033e-02 ... 2.17672795e-01 -5.17330766e-01 1.04687619e+00] ... [-1.10249273e-01 -3.96919161e-01 1.24392736e+00 ... 6.54245242e-02 -1.74895465e+00 2.31447145e-01] [ 9.94224012e-01 4.70180631e-01 -8.08942914e-01 ... 7.85067022e-01 -8.80807340e-01 -9.09594715e-01] [ 8.05006027e-01 -1.44001991e-01 8.19203198e-01 ... -5.43958008e-01 3.21770430e-01 5.62644303e-02]]]]]; ov_res: [[[[[ 1.54548645e+00 8.76272857e-01 2.54105061e-01 ... 1.07626641e+00 4.75063592e-01 7.55027354e-01] [ 6.14653707e-01 -4.77242261e-01 -2.44188592e-01 ... 8.82427216e-01 4.63813424e-01 -7.31037557e-01] [-7.21839309e-01 -5.34637034e-01 1.70583561e-01 ... -2.12956309e+00 -6.62822604e-01 -5.17660558e-01] ... [ 5.24551384e-02 -6.32735848e-01 5.18890060e-02 ... 8.68077636e-01 -9.10868943e-01 -6.83205605e-01] [ 2.23811626e-01 3.44029605e-01 -8.70714188e-01 ... -7.33071446e-01 2.18341872e-01 -6.35977507e-01] [ 9.37092006e-01 -4.33397651e-01 -3.45798433e-02 ... -9.21873748e-01 3.82861018e-01 -2.06189141e-01]] [[ 6.26383543e-01 1.29386097e-01 -7.14685977e-01 ... 1.05048835e+00 6.48357570e-02 -9.53055382e-01] [-3.30988407e-01 4.81040716e-01 6.03569329e-01 ... -5.35219312e-01 -7.97759116e-01 2.79920965e-01] [-9.10778940e-02 -1.25844274e-02 3.43172163e-01 ... -2.14689314e-01 9.20878574e-02 8.84673595e-01] ... [-9.56774414e-01 2.43868381e-01 5.04217632e-02 ... -1.09587836e+00 -1.69859275e-01 -5.07827461e-01] [ 3.82212669e-01 -1.44400805e-01 -1.68179795e-01 ... 5.66280663e-01 6.93612754e-01 3.74958575e-01] [ 7.59450555e-01 8.62446129e-01 4.20119554e-01 ... -1.22145867e+00 -1.59024715e+00 7.29338825e-01]] [[-4.29327905e-01 -7.98887849e-01 4.35212880e-01 ... -3.74543786e-01 3.51040572e-01 -2.90795323e-02] [-1.82038677e+00 2.54401356e-01 -3.94107252e-01 ... 1.32456851e+00 1.73773706e+00 -8.98474693e-01] [ 7.91693807e-01 -1.12869240e-01 -2.99515843e-01 ... -5.64175129e-01 -1.12887643e-01 5.30435205e-01] ... [-5.77554166e-01 4.37105536e-01 -8.40597510e-01 ... 5.27924180e-01 5.10483921e-01 3.81582052e-01] [ 5.35950363e-01 -4.14646268e-01 -1.75055206e-01 ... 6.34768546e-01 -6.35924995e-01 -6.30041540e-01] [ 7.19895661e-01 -1.03111613e+00 4.37530100e-01 ... 5.29444665e-02 -1.58106238e-01 -1.00168943e+00]] ... [[ 1.51252556e+00 4.23020661e-01 1.14501977e+00 ... 1.10416353e+00 6.69207424e-02 3.63280743e-01] [ 9.09875989e-01 -2.08768487e-01 -2.12419182e-01 ... 1.30285710e-01 -8.59286427e-01 -6.59377635e-01] [ 7.22760409e-02 -3.36180538e-01 -9.09782827e-01 ... 7.22181320e-01 -2.53283525e+00 4.73996729e-01] ... [-3.93346012e-01 1.64946407e-01 -4.59868431e-01 ... -1.22768953e-01 -4.51346725e-01 4.13438648e-01] [-1.12528300e+00 -1.89899221e-01 -4.67312127e-01 ... 6.15246058e-01 4.77555454e-01 5.28165817e-01] [-1.91321038e-02 1.18259735e-01 -4.60713059e-01 ... 3.77526045e-01 4.62906718e-01 -1.67785838e-01]] [[-2.91590448e-02 -2.93466955e-01 -3.41697276e-01 ... 1.15302958e-01 -2.80187964e-01 -2.68438101e-01] [-4.38226759e-01 3.42020929e-01 -5.23915470e-01 ... -3.16893935e-01 -4.58416313e-01 -1.35442340e+00] [ 1.18010469e-01 -1.81263697e+00 -4.63753730e-01 ... 1.27977431e-01 -5.18950820e-01 -1.46103263e+00] ... [-2.71069854e-01 4.97204363e-01 -1.51599824e-01 ... 6.69633523e-02 8.43779668e-02 9.15659368e-01] [-1.40360916e+00 -6.18297338e-01 -8.31633031e-01 ... -2.29503021e-01 8.95125195e-02 -7.81324089e-01] [-1.32262871e-01 -6.46656990e-01 -6.48522973e-02 ... 3.69233698e-01 1.75645798e-01 -2.18059927e-01]] [[ 5.91741204e-01 7.84205794e-01 -2.78031349e-01 ... 2.55485296e-01 3.45608830e-01 -8.36643726e-02] [ 1.10554671e+00 -5.55596530e-01 8.08792472e-01 ... -4.97511744e-01 -2.39738896e-01 5.22772968e-02] [-1.14419729e-01 9.25392449e-01 -5.34100175e-01 ... 2.23591760e-01 -4.25161541e-01 -2.63106287e-01] ... [ 3.86358321e-01 -4.47620720e-01 -6.49558187e-01 ... 1.84826180e-01 -5.88468909e-01 -7.10417569e-01] [-8.59174550e-01 1.93926454e-01 4.71265376e-01 ... 6.22556388e-01 -6.73178062e-02 5.45682609e-01] [-4.46053147e-01 9.36507523e-01 4.84523445e-01 ... -1.80608958e-01 -2.53746688e-01 3.59491646e-01]]] [[[-1.08866084e+00 -4.79448199e-01 3.88695747e-01 ... -3.45741838e-01 2.54677445e-01 -4.79115576e-01] [-7.39258051e-01 3.28392327e-01 -1.52625307e-01 ... -2.73259394e-02 9.99219596e-01 1.14468157e+00] [ 1.41525769e+00 -7.23527014e-01 1.74764824e+00 ... 6.44177496e-01 9.05296564e-01 1.75448716e-01] ... [ 6.25014782e-01 4.53442663e-01 -1.27300739e+00 ... -1.76630333e-01 7.09405124e-01 6.35403275e-01] [ 2.56301165e-01 -5.41356862e-01 -6.92967236e-01 ... -3.18113893e-01 -1.54989600e-01 -1.44687384e-01] [ 3.27915549e-01 1.24330199e+00 2.77148128e-01 ... 3.68465215e-01 5.53028822e-01 6.79980665e-02]] [[-1.27050757e+00 -8.96523893e-01 -5.52932620e-01 ... -4.68429178e-01 1.40780997e+00 -6.39240444e-01] [-1.12275267e+00 -8.69372964e-01 -7.32179701e-01 ... -7.00787365e-01 5.18958271e-01 4.20531631e-01] [ 9.77383256e-02 -7.53858030e-01 6.72470033e-01 ... 4.40996736e-01 -4.48935986e-01 -8.44772339e-01] ... [-4.88053188e-02 -2.90435016e-01 4.66869235e-01 ... -4.03090537e-01 6.76676095e-01 6.06089890e-01] [-6.40221655e-01 -1.24561802e-01 2.42066950e-01 ... 8.57263625e-01 -4.82620358e-01 -3.72759610e-01] [-1.40010357e-01 -1.38328576e+00 9.47740436e-01 ... -2.06136957e-01 -3.56658340e-01 7.14150727e-01]] [[-9.48271528e-02 3.45502555e-01 6.44079447e-01 ... -8.82024050e-01 -6.81791127e-01 -4.60943371e-01] [-2.46486887e-01 6.30741060e-01 -3.26436073e-01 ... -2.19973609e-01 -4.38360602e-01 -1.44444537e+00] [ 5.16816825e-02 7.93982148e-01 -1.54217184e+00 ... -5.93168855e-01 -1.93883032e-02 5.68453729e-01] ... [-1.06742263e+00 1.03621438e-01 -1.17618811e+00 ... 3.45437974e-01 -1.10487565e-01 -9.30807054e-01] [-2.86815524e-01 -1.72501296e-01 -1.06168412e-01 ... -5.30335074e-03 -1.35330355e+00 2.86688060e-01] [-2.80215263e-01 2.56466746e-01 1.48418593e+00 ... -9.22163725e-01 9.52760816e-01 -7.14624703e-01]] ... [[-2.90369540e-01 -1.83557820e+00 -4.29153085e-01 ... 8.52529764e-01 -3.05412352e-01 -6.13055944e-01] [ 2.94401944e-01 2.81521119e-02 2.97539920e-01 ... 1.66062224e+00 4.78837311e-01 1.20854056e+00] [-2.19294548e-01 -7.68713877e-02 -4.68974501e-01 ... -7.93895870e-02 -6.57024980e-01 -5.40403202e-02] ... [ 3.15615356e-01 -4.57610101e-01 7.56897509e-01 ... 2.71122754e-01 -4.82249320e-01 -3.40058029e-01] [-8.39072317e-02 -1.97956681e+00 -1.07549632e+00 ... -4.62664068e-01 -1.47034183e-01 -7.04277277e-01] [-1.28304541e-01 6.50678277e-01 -1.88690335e-01 ... 1.06640029e+00 -5.66344917e-01 2.99678445e-01]] [[ 2.13605717e-01 2.31784493e-01 -5.68567574e-01 ... -4.40362692e-01 3.80610228e-02 -6.51227951e-01] [ 1.06587315e+00 2.54520118e-01 2.10373068e+00 ... -6.56441748e-01 6.39929295e-01 1.83809292e+00] [-1.25211227e+00 1.03081346e-01 -4.54050712e-02 ... -1.05360590e-01 -9.89840701e-02 2.37032831e-01] ... [ 2.76406199e-01 7.10816085e-01 -9.17583466e-01 ... 1.00295639e+00 -4.93840218e-01 2.10285988e-02] [-2.79774014e-02 -3.97658497e-01 7.49023020e-01 ... -5.91540217e-01 3.64708185e-01 -3.72187257e-01] [ 3.42385948e-01 9.66649652e-01 5.31246126e-01 ... -3.33689712e-02 -6.47223115e-01 9.59699094e-01]] [[-1.99264407e-01 -5.11303306e-01 -7.88744509e-01 ... 2.67812759e-01 -1.13917083e-01 -1.17851049e-02] [-4.72745270e-01 -4.05564576e-01 -8.57151687e-01 ... -1.18524492e+00 2.10540835e-02 -9.96889234e-01] [-7.83310115e-01 2.88962156e-01 -1.34378910e-01 ... -6.30554140e-01 5.10231331e-02 9.58074257e-02] ... [ 1.39550984e+00 -1.87930977e+00 -2.87428826e-01 ... 9.47954297e-01 -2.70805717e-01 -3.34430933e-02] [ 2.61665344e-01 3.01477909e-01 -2.88760155e-01 ... 3.26733887e-02 -3.38393211e-01 -3.02406341e-01] [-1.60621285e-01 2.84700751e-01 -8.07285905e-01 ... 9.87052143e-01 1.85554125e-03 8.03987920e-01]]] [[[ 1.43123031e-01 -6.77500188e-01 -6.44624650e-01 ... -6.82547629e-01 -1.01352558e-02 -5.65188602e-02] [ 1.02017629e+00 2.25072607e-01 5.00527740e-01 ... -5.38990200e-01 4.36598323e-02 -1.48390949e-01] [-5.60385406e-01 -4.03749458e-02 -4.79758829e-02 ... 1.78826183e-01 1.38582599e+00 -5.36104858e-01] ... [-2.42461190e-01 5.89097440e-01 3.93759012e-01 ... 1.32136002e-01 1.80328578e-01 1.16365659e+00] [-1.03942733e-02 3.25304046e-02 -5.61401129e-01 ... 2.90381163e-01 4.40132380e-01 3.07072341e-01] [-1.71164393e-01 -2.05365911e-01 1.46883488e+00 ... 4.76061851e-01 1.64999872e-01 7.54457414e-01]] [[ 2.04477012e-01 2.83558190e-01 -3.27304304e-01 ... -7.68205762e-01 -1.66551739e-01 2.60405511e-01] [-7.90017769e-02 1.05280721e+00 1.47254601e-01 ... 4.64302450e-01 8.61509264e-01 6.03654146e-01] [-1.05172610e+00 -8.77604604e-01 1.34254143e-01 ... -1.29042968e-01 -3.49782348e-01 -6.10936046e-01] ... [-3.87282610e-01 -4.53609586e-01 4.31246698e-01 ... 6.76046610e-01 -8.57945561e-01 9.42383587e-01] [-5.38503587e-01 4.50980395e-01 -8.09069097e-01 ... -3.31901133e-01 -9.25059259e-01 9.41497162e-02] [-1.25028249e-02 3.99931222e-01 5.19782305e-01 ... -5.88530600e-01 -1.23367798e+00 -1.01096296e+00]] [[-7.60986507e-01 -4.87028122e-01 1.97286457e-01 ... -1.30756870e-01 -8.34723651e-01 -4.73178893e-01] [-1.59914756e+00 -2.44572520e-01 -5.31514406e-01 ... -1.66507870e-01 1.84334409e+00 3.89031917e-01] [ 4.80015427e-01 -6.40446424e-01 1.16246827e-01 ... -1.49169818e-01 6.35721982e-02 4.92272466e-01] ... [ 1.34123814e+00 -4.64335680e-01 1.14840066e+00 ... -6.26208425e-01 -1.93478775e+00 2.06084363e-03] [-6.16643310e-01 1.19841173e-02 -7.94962764e-01 ... 1.24119484e+00 -2.27924675e-01 -7.55932182e-02] [-8.22025120e-01 -2.12210983e-01 -2.80703336e-01 ... -6.28385782e-01 -5.35123110e-01 8.80787224e-02]] ... [[ 8.54336739e-01 -1.28761601e+00 1.21469581e+00 ... 6.09897435e-01 5.85796118e-01 9.40710783e-01] [-1.96067655e+00 4.02979255e-01 3.52336094e-02 ... -3.66955042e-01 -3.84714782e-01 1.05894186e-01] [ 9.54668224e-01 -3.08945179e-01 4.79574986e-02 ... 2.96141714e-01 2.20833763e-01 5.05288184e-01] ... [ 4.76495743e-01 5.42885303e-01 -2.37989739e-01 ... 3.29048783e-01 -3.94052863e-01 4.20557231e-01] [ 1.13560712e+00 8.37185420e-03 4.66464251e-01 ... -4.86813217e-01 -1.12946880e+00 3.05763006e-01] [-5.73703051e-01 5.19652426e-01 -2.76479721e-01 ... -8.68433416e-01 8.40098560e-01 9.06391084e-01]] [[-1.77143681e+00 3.68826985e-01 2.43149415e-01 ... -1.29409754e+00 -7.75975525e-01 -4.47027296e-01] [ 6.42802179e-01 5.51812947e-01 -3.81236762e-01 ... 1.06636882e+00 -2.56234080e-01 1.42483711e+00] [-5.46455920e-01 1.42640509e-02 3.76985222e-01 ... 6.30842388e-01 -8.26726854e-01 1.36995316e+00] ... [ 1.43399671e-01 -2.35690892e-01 -9.33464527e-01 ... -8.51369649e-02 1.84628189e-01 5.05315542e-01] [-1.31497145e-01 9.56392050e-01 -1.17100215e+00 ... -4.51185614e-01 6.10158145e-01 4.67257276e-02] [ 2.71868836e-02 7.56303668e-02 4.94384795e-01 ... 1.45927799e+00 5.70258617e-01 9.32485700e-01]] [[ 5.00919044e-01 7.71422327e-01 4.05808985e-01 ... 9.27866474e-02 2.91279554e-01 2.99214691e-01] [-3.42418760e-01 -8.42789888e-01 1.90804079e-02 ... 6.03606343e-01 -3.97902161e-01 1.07217598e+00] [-1.26654744e+00 -4.22904134e-01 1.15625234e-02 ... 2.79081434e-01 -6.45264447e-01 1.03438163e+00] ... [ 2.46781483e-01 6.43471032e-02 -5.77458143e-01 ... -7.31751502e-01 -7.62921810e-01 -1.79734662e-01] [ 4.25584674e-01 2.02937171e-01 7.63139069e-01 ... -1.21724069e+00 -2.23849401e-01 -1.07464206e+00] [-9.39741015e-01 -1.01362133e+00 -4.22622979e-01 ... 4.91833597e-01 -1.08488953e+00 4.37762052e-01]]] [[[-2.54368901e-01 -2.20648035e-01 -1.31102502e+00 ... -1.20463264e+00 -7.40856677e-02 6.13704979e-01] [ 4.14835036e-01 5.61843336e-01 3.98942947e-01 ... 8.23525965e-01 -3.82329285e-01 -1.57151771e+00] [ 5.76558888e-01 3.94367546e-01 -9.79635715e-02 ... 1.49283886e+00 3.97562027e-01 -1.31611037e+00] ... [ 2.25991681e-01 -1.82433009e-01 -1.41710627e+00 ... 1.57423675e-01 -7.67655790e-01 1.12708330e+00] [ 2.34894931e-01 -7.38474846e-01 -8.19836557e-02 ... 5.86908996e-01 -3.37999433e-01 1.50427863e-01] [ 6.26569390e-01 -2.63896197e-01 6.03389084e-01 ... 2.80527204e-01 9.70185637e-01 1.48839545e+00]] [[ 9.00025249e-01 1.54137716e-01 3.52450311e-01 ... 1.36039948e+00 1.75467104e-01 -5.39303422e-01] [-2.40676869e-02 6.41192049e-02 -4.98843282e-01 ... -8.94598186e-01 1.27044415e+00 3.83489698e-01] [ 1.16266601e-01 5.13810992e-01 5.79679847e-01 ... 1.02680385e+00 2.97977984e-01 -5.19715548e-01] ... [ 7.21079409e-01 -4.02178705e-01 5.37556410e-01 ... 3.58138949e-01 -1.00332603e-01 -3.10910642e-01] [-6.86553717e-01 4.86932844e-01 6.54730380e-01 ... 1.48154008e+00 -8.41849577e-03 -4.14180845e-01] [-4.61396575e-01 4.84759599e-01 7.97526836e-01 ... 5.13760448e-01 6.38511360e-01 1.42495251e+00]] [[ 3.24874133e-01 3.62105787e-01 -4.68054041e-02 ... 5.26170671e-01 -7.52654493e-01 6.05898678e-01] [-4.25775081e-01 -1.02491617e+00 1.10070772e-01 ... 8.66068840e-01 -1.00966424e-01 8.06028247e-01] [-4.04292159e-02 3.95191699e-01 -4.75090533e-01 ... -4.69314843e-01 -4.01337385e-01 5.59211433e-01] ... [-2.14716978e-02 2.04835638e-01 2.69671679e-01 ... 2.43494987e-01 -1.02971303e+00 -4.47509795e-01] [-4.12658542e-01 3.26091386e-02 -8.86422470e-02 ... 5.73434234e-01 -1.63759553e+00 -9.23198104e-01] [ 3.95347506e-01 4.41813946e-01 1.44846290e-01 ... 1.31635678e+00 8.52822721e-01 1.19376451e-01]] ... [[ 7.93681562e-01 8.74002337e-01 -1.09489405e+00 ... 3.68468702e-01 6.34718984e-02 -3.33352201e-02] [ 1.23646462e+00 -1.64584041e-01 -5.76239228e-01 ... 5.99310040e-01 1.20580196e-01 2.80531019e-01] [ 1.19739860e-01 3.06870788e-01 -6.94194913e-01 ... 9.45706144e-02 -5.70913494e-01 -7.47110322e-02] ... [-4.97978210e-01 9.01657939e-01 -1.10262215e+00 ... 1.19738007e+00 -9.59586263e-01 -1.57996237e-01] [-4.03643996e-02 -5.21415591e-01 -1.84408680e-01 ... -9.34289098e-01 -2.21520111e-01 4.66022521e-01] [-7.86503732e-01 1.71963379e-01 7.81150818e-01 ... 5.44343293e-01 -1.27754784e+00 -1.68907531e-02]] [[ 2.47635350e-01 -7.45397806e-01 1.27707601e+00 ... -1.07598877e+00 3.17143321e-01 -8.25483680e-01] [-2.43284583e-01 5.04853530e-03 -3.51895154e-01 ... 9.26245570e-01 4.57382113e-01 3.42200905e-01] [ 6.09319627e-01 -9.96268570e-01 7.23199785e-01 ... 1.09940493e+00 -1.00021803e+00 4.46546189e-02] ... [-3.49232256e-01 -7.81183410e-03 8.47363055e-01 ... -1.00394107e-01 -4.86021161e-01 5.18836796e-01] [ 2.09294036e-01 -1.17559552e-01 5.38270891e-01 ... -4.15663064e-01 -6.98992908e-01 -3.43781948e-01] [ 4.82754558e-01 1.88848786e-02 -5.24748206e-01 ... 4.07212555e-01 -4.72820908e-01 4.20875013e-01]] [[-8.09882045e-01 3.18248093e-01 -4.91733283e-01 ... 1.03766412e-01 4.96088088e-01 -5.06076276e-01] [-4.45743382e-01 8.18215728e-01 1.08795428e+00 ... 3.40403706e-01 -5.94729602e-01 3.21771324e-01] [-4.69819903e-01 -8.01114082e-01 6.86318994e-01 ... 6.05331361e-01 1.10922873e+00 1.12200451e+00] ... [-1.06745434e+00 9.30358097e-02 -6.16698973e-02 ... -1.09526122e+00 -1.98204100e-01 -1.27177489e+00] [ 2.22046390e-01 -7.36527503e-01 -1.00731879e-01 ... 8.55392575e-01 -7.77025759e-01 1.02727127e+00] [-1.03157675e+00 -7.45499074e-01 -2.06887460e+00 ... -4.68307674e-01 1.26078761e+00 -5.96320212e-01]]] [[[-6.47962749e-01 -1.54419708e+00 -3.10946345e-01 ... -1.00592911e+00 5.46369851e-01 -3.84924859e-01] [ 4.55418289e-01 4.58632708e-01 2.77512878e-01 ... -3.50742489e-02 1.70704353e+00 -5.72275996e-01] [ 5.36659300e-01 3.93393546e-01 -1.12578392e+00 ... -8.78177762e-01 3.02255630e-01 3.80069554e-01] ... [ 6.47164928e-03 2.41105527e-01 -8.21377397e-01 ... 6.13708198e-01 -1.18050091e-01 -9.27699327e-01] [ 1.67939261e-01 1.21797144e-01 2.81351835e-01 ... 1.51153016e+00 -6.77211702e-01 -3.16269606e-01] [ 3.11422378e-01 8.71402919e-01 -6.21899009e-01 ... 9.78999197e-01 -1.34387314e-01 8.24962974e-01]] [[-3.38336945e-01 1.37238964e-01 -5.17048128e-03 ... -1.25323069e+00 6.32261574e-01 5.11729240e-01] [ 2.53217518e-01 2.55267024e-01 -5.98485827e-01 ... 1.03267646e+00 -4.75131482e-01 -9.07200575e-02] [ 5.83596490e-02 6.60721004e-01 5.16496480e-01 ... -5.16480744e-01 -7.77779162e-01 2.87072927e-01] ... [-7.57656157e-01 -7.30499849e-02 1.47950792e+00 ... -3.28110754e-01 -5.83160937e-01 7.25171685e-01] [-6.56483293e-01 -8.01870748e-02 -6.36589944e-01 ... -2.95017093e-01 -1.39194059e+00 1.11210525e+00] [-9.79799807e-01 -8.08604062e-01 -7.51366615e-01 ... -3.52168292e-01 -2.52761036e-01 -4.08440053e-01]] [[-8.10648501e-01 8.97703767e-01 -1.92272455e-01 ... 1.09014191e-01 -7.90470615e-02 9.81105089e-01] [ 7.36725390e-01 -1.04078412e-01 9.82050657e-01 ... -1.05414763e-01 4.57361132e-01 -3.31817508e-01] [ 2.38038436e-01 -3.41817528e-01 -9.22896340e-02 ... 1.57535744e+00 -6.08800709e-01 -4.52039421e-01] ... [-2.92439580e-01 -2.73125693e-02 -9.00100619e-02 ... 7.42597342e-01 -3.41648161e-01 3.67934138e-01] [ 1.15157807e+00 -1.99071676e-01 7.91430831e-01 ... 1.26377478e-01 1.46239495e+00 -1.63188100e-01] [-1.81315750e-01 -4.60489392e-02 7.63134301e-01 ... -6.30807996e-01 5.07917941e-01 -7.83297420e-01]] ... [[ 1.29793063e-01 -8.00899565e-01 -2.18925774e-01 ... -1.71196386e-01 -8.30744326e-01 6.49332166e-01] [-9.97115374e-01 4.75528300e-01 2.66418606e-01 ... -1.07463205e+00 4.15611155e-02 -2.19067946e-01] [-2.62741029e-01 8.55409428e-02 -1.58312190e+00 ... 4.62685585e-01 9.57935333e-01 -4.33640182e-01] ... [ 1.73269004e-01 2.80944914e-01 7.58779764e-01 ... 1.55972272e-01 -9.68467951e-01 1.35528898e+00] [ 3.64032388e-01 -2.67270189e-02 1.13238537e+00 ... -2.24025905e-01 -1.19226754e+00 -1.20817378e-01] [-1.59762695e-01 1.23697090e+00 -1.28226268e+00 ... -3.56677353e-01 -3.34838480e-01 3.64837974e-01]] [[ 4.63014215e-01 -1.03603137e+00 3.48568827e-01 ... -2.09145978e-01 2.00669861e+00 -4.61234331e-01] [-6.15702093e-01 3.91148686e-01 4.86926913e-01 ... 3.34328175e-01 -3.32787424e-01 5.41453958e-01] [ 1.78943932e-01 2.73950368e-01 -1.55389711e-01 ... -7.77186304e-02 -3.38473916e-01 -9.23478961e-01] ... [ 2.89481580e-01 3.06606472e-01 -9.98630464e-01 ... 3.78450043e-02 4.72936630e-01 -3.93773198e-01] [ 4.22523409e-01 1.60244390e-01 -1.22927797e+00 ... -4.35053974e-01 -3.75213593e-01 4.30516247e-03] [ 8.97081137e-01 -1.29638836e-01 1.95467204e-01 ... -4.09409761e-01 -3.67785916e-02 -4.29020077e-01]] [[-1.80678189e-01 -1.22103179e+00 -6.25170767e-01 ... 6.38002634e-01 6.69556677e-01 -1.87510923e-02] [ 2.11740676e-02 -3.18104208e-01 5.53132534e-01 ... 4.37084228e-01 -7.31768133e-03 -4.90708172e-01] [ 4.46756959e-01 -1.03299630e+00 -2.74223864e-01 ... 4.09252137e-01 2.51787543e-01 -5.06372213e-01] ... [-9.82568681e-01 -5.03962580e-03 -1.24936521e+00 ... 1.37561738e+00 -1.98363587e-01 3.65117848e-01] [ 1.25798512e+00 1.22011352e+00 -7.56413758e-01 ... 3.64037827e-02 1.36112177e+00 -1.16601992e+00] [-2.57479757e-01 1.12595296e+00 -7.09533095e-02 ... 1.17169607e+00 4.02591884e-01 -5.71455002e-01]]] [[[-7.94864833e-01 -4.85211402e-01 6.97500110e-01 ... 6.29186213e-01 3.10390860e-01 1.74365079e+00] [-5.37018478e-01 -8.45882017e-03 -9.32964981e-01 ... -6.20109022e-01 -1.29284430e-02 -6.74227476e-01] [ 4.83641118e-01 5.81362009e-01 4.13393825e-01 ... -5.52828193e-01 2.16375560e-01 2.29060397e-01] ... [-5.94371140e-01 -3.51182252e-01 -1.65931776e-01 ... -1.36894262e+00 8.85932982e-01 1.11728024e+00] [-3.07200611e-01 -6.22391999e-01 -4.55080807e-01 ... 1.58550113e-01 -1.40481329e+00 -8.36744130e-01] [-4.20843810e-01 9.05381981e-03 -4.30835307e-01 ... 1.33034539e+00 -5.12183666e-01 7.99521685e-01]] [[-3.28077286e-01 -7.50413060e-01 -5.87955415e-01 ... -5.73061109e-01 7.07285047e-01 -4.23853189e-01] [ 5.73821701e-02 -4.74109977e-01 -9.14713889e-02 ... 1.15274835e+00 6.52911723e-01 -7.49227345e-01] [ 2.49780640e-01 9.61923838e-01 4.31371450e-01 ... 6.31837010e-01 -3.54547530e-01 6.90863311e-01] ... [-4.86280143e-01 3.64212058e-02 5.29134631e-01 ... 1.94085509e-01 1.10330850e-01 9.68026042e-01] [-9.15865719e-01 -5.52207589e-01 8.26321065e-01 ... -9.38878879e-02 -3.21799725e-01 -1.24901617e+00] [ 1.02682757e+00 6.60964966e-01 2.07546130e-01 ... 4.38916296e-01 2.16603711e-01 5.65066516e-01]] [[ 4.17502612e-01 -1.00588369e+00 2.13484928e-01 ... 2.72675693e-01 -4.69746292e-02 8.17395627e-01] [-3.54403675e-01 -8.87754634e-02 -8.84275377e-01 ... 3.21842074e-01 -5.19859195e-01 5.28981984e-01] [-8.36003050e-02 1.41181141e-01 -5.34841001e-01 ... 3.96217465e-01 2.56653190e-01 4.49385494e-01] ... [ 3.52017403e-01 4.62258905e-01 4.36356552e-02 ... -6.67900741e-01 7.37365723e-01 3.34368795e-01] [ 1.50019407e+00 1.60423175e-01 -2.79601395e-01 ... -3.43859255e-01 1.22374570e+00 9.18644249e-01] [-1.17842662e+00 9.31984782e-02 -6.60279512e-01 ... -3.97750199e-01 2.89926648e-01 9.54531610e-01]] ... [[ 5.23385882e-01 5.51849604e-01 5.98309994e-01 ... -1.21335633e-01 -4.58131701e-01 1.04040094e-01] [-2.77755409e-01 -3.54348421e-01 -6.95996165e-01 ... 5.96522689e-01 -5.66866994e-01 -9.09981132e-01] [-1.10951476e-01 -4.20930713e-01 5.30460119e-01 ... 2.42086157e-01 -1.63429916e-01 1.03109717e+00] ... [-1.11044817e-01 4.14003491e-01 -7.10178673e-01 ... 2.12930053e-01 8.74186993e-01 1.67790219e-01] [ 1.35733140e+00 -5.12091160e-01 -1.83049726e+00 ... 1.33937895e-01 1.31312773e-01 -5.35358429e-01] [ 7.04233870e-02 5.08064806e-01 -1.92453671e+00 ... -2.25125760e-01 -7.81857789e-01 -7.50936687e-01]] [[-7.24550903e-01 1.67403683e-01 5.29882729e-01 ... -3.09320003e-01 -4.12601121e-02 1.92522109e-01] [-2.67942131e-01 6.65008008e-01 6.54103100e-01 ... 2.26908177e-01 -5.54188967e-01 5.69972321e-02] [-8.65684524e-02 1.27157819e+00 -6.10605478e-02 ... 1.99863911e-01 1.64481148e-01 1.15267408e+00] ... [ 1.10942602e+00 1.10992384e+00 -1.21258283e+00 ... 7.34865069e-01 -3.13900501e-01 3.34946066e-01] [ 1.74331561e-01 5.18593788e-01 -4.81042892e-01 ... 3.15410346e-01 2.64545232e-01 1.09358981e-01] [ 3.66934448e-01 3.68910849e-01 1.14770484e+00 ... -3.27993512e-01 1.23854947e+00 -1.14318609e+00]] [[-1.63824856e+00 7.73508370e-01 -9.81783643e-02 ... -4.06244576e-01 -3.10418904e-01 1.14597809e+00] [-3.01927514e-02 2.89214849e-01 8.35839927e-01 ... -3.41775179e-01 -8.60488832e-01 1.67914614e-01] [ 7.49929309e-01 3.41773897e-01 5.33607543e-01 ... -6.70622513e-02 3.31525534e-01 -1.87567249e-01] ... [ 1.63675952e+00 1.20120037e+00 -3.42098415e-01 ... 6.44125938e-01 9.26143602e-02 9.52472329e-01] [-7.01190710e-01 -3.84306818e-01 9.30185914e-01 ... -7.52068222e-01 -1.62093759e-01 2.03217193e-02] [ 1.19512427e+00 1.01553448e-01 -5.46510756e-01 ... -2.66294986e-01 6.67456686e-01 -6.82845414e-01]]]] [[[[ 2.46214181e-01 4.25280690e-01 -9.07754898e-01 ... 4.68011081e-01 8.52599919e-01 4.05470550e-01] [ 6.25374377e-01 -2.44900227e-01 4.30530131e-01 ... 2.72267669e-01 -2.88083707e-03 -5.71332216e-01] [-4.92198765e-01 1.57562584e-01 2.97782809e-01 ... -1.36356175e-01 -1.26201540e-01 -6.77077055e-01] ... [-2.08616570e-01 -9.85047877e-01 -1.01491356e+00 ... 1.22887142e-01 -7.22369075e-01 -1.28085613e+00] [ 1.19389009e+00 -5.82240641e-01 5.63433588e-01 ... -4.29795027e-01 3.69279861e-01 -3.28935474e-01] [ 2.75985658e-01 -2.41942294e-02 1.21964562e+00 ... 1.30633876e-01 -5.51865026e-02 1.18862593e+00]] [[-4.18866545e-01 -3.37559462e-01 9.96658266e-01 ... -6.80165172e-01 6.11938715e-01 1.83358759e-01] [-1.39100409e+00 8.29233348e-01 -5.27556539e-01 ... 4.45111424e-01 1.58179060e-01 -5.92713118e-01] [ 5.17760158e-01 -2.64519393e-01 -4.35612381e-01 ... -1.64542782e+00 -6.58256650e-01 2.94058621e-01] ... [-5.71701646e-01 -4.24564838e-01 -1.51220426e-01 ... 3.10770780e-01 -7.08339930e-01 -1.70976114e+00] [ 7.27346420e-01 -3.29186499e-01 -1.55064988e+00 ... -8.31637800e-01 -2.32199356e-01 -6.48974717e-01] [-1.49056709e+00 -8.02868068e-01 -5.55633187e-01 ... 3.66852939e-01 -8.27413738e-01 9.94867146e-01]] [[ 7.72189051e-02 1.02703345e+00 -5.71153820e-01 ... -2.49066919e-01 -2.21484780e-01 -8.35686743e-01] [ 3.42459172e-01 -8.62118959e-01 -5.02999485e-01 ... 4.74467099e-01 4.71248209e-01 9.00377333e-01] [ 3.80374759e-01 -1.25736818e-01 3.67895275e-01 ... 6.48395300e-01 7.55942939e-03 5.20574391e-01] ... [ 1.39741683e+00 -6.25497818e-01 -1.48503661e+00 ... 2.40629941e-01 1.03921235e+00 -7.66370893e-01] [ 5.78853965e-01 1.52717495e+00 3.07003520e-02 ... 3.30008060e-01 -5.79700589e-01 6.92820072e-01] [-4.87949997e-01 -1.54117182e-01 7.67583132e-01 ... -1.33676052e+00 2.19741747e-01 6.90920591e-01]] ... [[-5.62652528e-01 -1.47171390e+00 1.20151591e+00 ... 8.50072782e-03 -8.52687716e-01 -2.87224650e-01] [-1.05519235e+00 1.24006367e+00 7.00965643e-01 ... -2.31604886e+00 1.09783232e+00 5.24028122e-01] [ 7.02376813e-02 -2.05442473e-01 -1.61952570e-01 ... 1.41485751e+00 -5.47435522e-01 -6.30652681e-02] ... [ 3.58459763e-02 2.13406056e-01 -3.82017642e-01 ... 1.01475120e+00 8.63453031e-01 -4.54923570e-01] [-8.42712283e-01 3.61125231e-01 2.70290792e-01 ... -1.08013153e+00 -1.73137209e-03 5.06833911e-01] [ 9.05702472e-01 5.69310308e-01 7.54077852e-01 ... -4.44145560e-01 -1.45330060e+00 -5.22985101e-01]] [[-5.30036271e-01 -2.26994693e-01 3.48145634e-01 ... -5.97379208e-01 1.14746702e+00 -1.41874962e-02] [-1.08259082e+00 -2.95596495e-02 -1.14388072e+00 ... -5.32181449e-02 1.15380335e+00 1.83712423e-01] [ 5.35932839e-01 -2.04453245e-01 -9.20917168e-02 ... 3.15003783e-01 -2.28043720e-01 -1.08011043e+00] ... [ 4.12881196e-01 2.51018286e-01 -2.95573533e-01 ... -5.42210519e-01 1.01505315e+00 -1.58038244e-01] [-6.39505684e-01 -5.73257148e-01 1.75297213e+00 ... -9.17991638e-01 9.45130177e-03 -1.09806168e+00] [ 9.13563013e-01 1.17081352e-01 7.45294988e-02 ... 3.40581179e-01 7.04949439e-01 7.92014837e-01]] [[ 2.79236317e-01 2.76755869e-01 -2.32965872e-01 ... -4.06592190e-01 3.93232107e-01 -3.69527608e-01] [-1.21313953e+00 -3.29309583e-01 7.53213644e-01 ... 1.34962499e+00 4.30609852e-01 -4.93337996e-02] [ 1.17841475e-01 -2.49809876e-01 -2.12084815e-01 ... -8.70795667e-01 -8.64866972e-01 2.04211727e-01] ... [-1.46843433e-01 -1.08544183e+00 1.12777226e-01 ... 3.29947025e-01 -6.93307281e-01 2.10252807e-01] [ 3.16419005e-01 1.09735370e+00 2.10507348e-01 ... 4.41895008e-01 3.54841411e-01 -7.66848207e-01] [-4.05669779e-01 -5.95136523e-01 -1.40240148e-01 ... 1.34720254e+00 7.63942525e-02 8.77368271e-01]]] [[[ 8.99584472e-01 6.08219445e-01 -4.92011696e-01 ... -5.38850129e-01 -2.60903239e-01 -3.47819775e-01] [ 1.22880411e+00 -5.50087750e-01 2.56872684e-01 ... -1.73170775e-01 -1.50693133e-01 8.55686724e-01] [ 5.75534999e-01 8.12653124e-01 8.93745199e-03 ... 7.72103071e-01 6.41618013e-01 -9.50551257e-02] ... [ 2.68395901e-01 -7.36627638e-01 5.90005875e-01 ... -6.18025005e-01 -8.82631093e-02 -2.34450996e-01] [ 5.22634625e-01 2.06599310e-01 5.52738070e-01 ... -5.53094983e-01 2.92815834e-01 -1.38054419e+00] [-2.20055676e+00 1.78167105e-01 -3.30957286e-02 ... -1.79699391e-01 1.95646727e+00 7.03695118e-02]] [[ 4.69647767e-03 -8.36241961e-01 6.82402134e-01 ... 1.54995596e+00 1.03928903e-02 4.46749896e-01] [-6.91164076e-01 9.25087810e-01 1.54288685e+00 ... 3.86801749e-01 2.93547153e-01 1.90774892e-02] [ 5.89872539e-01 -2.16748968e-01 -8.73388648e-01 ... 9.30580944e-02 7.47708321e-01 -1.45373881e-01] ... [-1.17658806e+00 -5.01394033e-01 6.02553248e-01 ... -1.81341335e-01 -1.20976850e-01 -6.63873553e-02] [ 7.38585889e-01 -9.55221593e-01 6.24760449e-01 ... 1.65330768e-01 -4.11858074e-02 6.74107015e-01] [ 7.11126864e-01 -4.82338130e-01 1.36703169e+00 ... -1.51809484e-01 -3.23133528e-01 5.51028132e-01]] [[ 7.26731122e-01 -1.35610962e+00 -2.32246682e-01 ... 3.24679971e-01 5.20567596e-01 1.57067031e-01] [ 1.22572339e+00 2.94481277e-01 1.41971812e-01 ... -8.95833492e-01 4.46704209e-01 1.15329885e+00] [ 1.98316172e-01 1.03821123e+00 1.34589291e+00 ... 4.25538599e-01 -1.17758787e+00 6.18834019e-01] ... [-3.03473771e-01 1.31046563e-01 -7.91837037e-01 ... -2.48296753e-01 -3.91770810e-01 8.59943330e-01] [ 2.35663772e-01 -3.25287849e-01 -8.12513292e-01 ... -8.86725903e-01 2.33582929e-02 -1.80445537e-01] [ 7.44348243e-02 7.87134111e-01 5.75783551e-01 ... 1.31060791e+00 9.91626620e-01 -1.47665727e+00]] ... [[ 4.66610938e-01 9.83887970e-01 1.73079401e-01 ... -9.57218945e-01 1.38112390e+00 1.26623797e+00] [ 9.71147120e-01 5.93960106e-01 1.05668619e-01 ... -7.95458555e-01 8.26191962e-01 9.10104156e-01] [ 1.11487985e+00 -3.93948793e-01 -6.82443142e-01 ... -1.62402689e+00 1.05399955e-02 5.66537917e-01] ... [-2.02102169e-01 -6.48899615e-01 -3.00545305e-01 ... 1.22545588e+00 -5.36739707e-01 9.97140229e-01] [-6.18042946e-01 -6.61667228e-01 -3.56552005e-01 ... 7.39306688e-01 -2.86826700e-01 -5.51486135e-01] [ 3.07851046e-01 1.24405348e+00 -1.61417738e-01 ... -8.02201271e-01 1.72929573e+00 4.05390501e-01]] [[-1.21002328e+00 9.66552436e-01 -9.28494692e-01 ... 5.54700553e-01 -2.24613056e-01 9.78108197e-02] [-9.85674024e-01 3.20101470e-01 -1.04562640e+00 ... 4.61705178e-01 1.18097103e+00 -1.00543904e+00] [-2.94832259e-01 1.26110244e+00 6.26751244e-01 ... -3.85718107e-01 -2.13879019e-01 2.12774932e-01] ... [ 1.13362126e-01 -2.99442887e-01 4.47088540e-01 ... -8.39796782e-01 5.64290583e-01 -9.91294384e-02] [-6.92667782e-01 -1.02719915e+00 -1.33515343e-01 ... 1.13687849e+00 4.29948568e-01 -7.01612651e-01] [ 7.75588304e-02 -1.17538297e+00 -1.26717851e-01 ... 4.95231718e-01 5.36261082e-01 -4.64095891e-01]] [[-2.90623516e-01 9.62429419e-02 -1.02010536e+00 ... -9.12573457e-01 -5.66507816e-01 -7.09663033e-02] [ 2.01440886e-01 3.08659822e-01 3.41045707e-02 ... -4.23659474e-01 -7.01152459e-02 9.93500426e-02] [-1.27632380e+00 8.71273220e-01 1.13139045e+00 ... -1.48277915e+00 3.06515455e-01 -2.46613860e-01] ... [ 3.51235159e-02 -2.13027865e-01 2.53885806e-01 ... 1.57537889e+00 2.05427551e+00 6.00158334e-01] [-1.20986891e+00 4.59784746e-01 2.36367539e-01 ... 7.07140386e-01 3.92395914e-01 -1.41063130e+00] [ 5.34140408e-01 5.40333450e-01 -9.48681980e-02 ... -4.82021779e-01 5.00230193e-01 -4.46471781e-01]]] [[[-3.05540264e-01 -2.54620862e+00 -3.84860724e-01 ... -1.16750443e+00 3.70608643e-02 -6.08391345e-01] [-8.01255107e-02 -1.14905670e-01 -4.35687322e-03 ... -4.73858684e-01 8.00260067e-01 -4.51770842e-01] [-1.12058210e+00 7.33223408e-02 -2.87472486e-01 ... 5.69579482e-01 -1.59219727e-01 -1.88250303e-01] ... [-5.35854638e-01 1.15193529e-02 8.83864105e-01 ... 5.35743475e-01 -4.36438480e-03 -6.89349353e-01] [-8.13992500e-01 3.27434577e-02 -1.45095611e+00 ... -7.13886201e-01 5.09451509e-01 2.33663648e-01] [ 1.57461211e-01 7.07679033e-01 7.34527335e-02 ... 1.24340785e+00 6.22279286e-01 -2.98356593e-01]] [[ 1.84657431e+00 -2.59291530e-01 9.73499477e-01 ... 1.61863148e+00 5.60160279e-01 3.09840083e-01] [-4.88288224e-01 -5.58652401e-01 -1.28285974e-01 ... 6.96400106e-01 -2.96175778e-02 4.61404562e-01] [ 3.58371884e-01 8.65831733e-01 7.23769128e-01 ... -6.48182750e-01 -3.12849432e-01 -6.24177873e-01] ... [ 9.94551122e-01 5.62098324e-01 -4.92049038e-01 ... 7.87967563e-01 -8.28973353e-01 7.20153749e-01] [ 7.81109512e-01 4.22416240e-01 -1.44190907e-01 ... -2.13374570e-01 4.34066206e-01 -3.37378919e-01] [-1.01096320e+00 7.65244603e-01 4.88960266e-01 ... -8.07694674e-01 1.84134102e+00 -4.21457648e-01]] [[-3.76649022e-01 -7.14888453e-01 -7.64085412e-01 ... -9.59612370e-01 -4.92324769e-01 -3.83690685e-01] [-5.70536315e-01 -1.31605840e+00 -2.53475368e-01 ... 1.02695370e+00 -5.05183399e-01 -2.41399072e-02] [-6.57605469e-01 3.67612600e-01 -4.51322019e-01 ... 1.00884251e-01 -7.66094804e-01 -5.00644505e-01] ... [-1.23371387e+00 -2.36720890e-01 -3.05179864e-01 ... 2.94323862e-01 8.35556865e-01 5.09574533e-01] [-4.16362137e-01 -2.56537169e-01 1.04768610e+00 ... -1.20371580e-01 -4.20297891e-01 1.20687509e+00] [ 7.49170184e-01 1.03502500e+00 4.85351652e-01 ... -2.87903756e-01 -8.25088441e-01 7.52885565e-02]] ... [[-4.89764363e-01 -4.91642803e-01 1.30716860e-01 ... 2.59705096e-01 7.71239996e-01 2.04182193e-01] [ 1.89134192e+00 1.50562122e-01 -2.13151455e-01 ... -6.78663850e-01 2.94663310e-01 -1.25826597e-01] [ 1.30957329e+00 2.09918633e-01 6.00673437e-01 ... -9.56603810e-02 -9.25759077e-02 -3.17308992e-01] ... [-4.17356074e-01 6.21403933e-01 2.52255112e-01 ... -6.10493064e-01 -8.09188128e-01 1.04329002e+00] [-2.57970303e-01 -7.43045747e-01 1.04401612e+00 ... 8.11766833e-03 8.46087337e-01 3.55419308e-01] [ 6.30923286e-02 -2.23678604e-01 -2.16406599e-01 ... -5.30479968e-01 -1.19065738e+00 -3.53000402e-01]] [[-6.92435384e-01 -3.71545255e-02 -3.45540494e-01 ... -3.37769866e-01 -9.60486531e-01 -2.53156990e-01] [ 1.51195347e-01 8.56555104e-01 3.82905185e-01 ... 8.30049217e-01 1.02712691e+00 4.00630057e-01] [-1.63109636e+00 2.03090414e-01 -6.28361225e-01 ... 2.39514887e-01 -6.95640817e-02 -6.11139476e-01] ... [-1.09043241e-01 5.83182573e-01 -2.09599696e-02 ... -6.13604188e-01 -2.02518761e-01 1.84966087e-01] [-4.57796007e-01 3.75921071e-01 -2.94228524e-01 ... -9.84737039e-01 2.72234827e-01 9.62532461e-01] [-8.32551897e-01 5.64330459e-01 1.41740322e+00 ... 1.36165157e-01 -3.69022936e-01 -4.85209115e-02]] [[ 1.75175881e+00 -1.29775509e-01 3.29220712e-01 ... -9.86755192e-01 -1.33171856e+00 -5.64227462e-01] [-1.09286356e+00 -8.69342387e-01 8.34794819e-01 ... -2.93604821e-01 -1.26235867e+00 -5.87672710e-01] [ 8.53197515e-01 -1.01827286e-01 -8.47595930e-01 ... 9.52988923e-01 8.66607368e-01 1.02874732e+00] ... [-6.73397541e-01 -4.24645007e-01 6.89034164e-01 ... 3.41693074e-01 -9.33058441e-01 1.18698716e+00] [ 8.15368712e-01 1.01218867e+00 8.64118099e-01 ... -4.22694087e-02 3.60510945e-01 -2.32953504e-01] [ 1.31571603e+00 6.49356365e-01 8.67584199e-02 ... -5.21708071e-01 9.70762849e-01 -5.49051106e-01]]] [[[ 6.13301992e-01 -6.53651237e-01 1.25687587e+00 ... 4.77525115e-01 -2.03063831e-01 7.62822151e-01] [-3.08390874e-02 -8.25172305e-01 -1.11232066e+00 ... -6.13710999e-01 1.16953838e+00 7.03997970e-01] [ 1.39617455e+00 1.51437187e+00 -8.16711634e-02 ... 5.58580101e-01 6.10125780e-01 -3.85095403e-02] ... [ 1.25192630e+00 -8.50498676e-01 2.55392194e-01 ... -4.09923345e-01 -5.45304120e-01 -1.01286221e+00] [-8.91647816e-01 5.95510542e-01 -1.30949140e+00 ... 3.78991932e-01 -1.68413997e-01 2.99293250e-01] [ 7.50611186e-01 -2.78738141e-01 4.00389016e-01 ... 7.19403446e-01 -4.86468673e-02 5.80733120e-01]] [[-7.25064695e-01 2.49386162e-01 -3.45825642e-01 ... -4.14839536e-01 9.49143589e-01 -1.54685557e-01] [ 2.76737213e-01 2.38974571e-01 -1.38468981e+00 ... -1.09552276e+00 -1.14256479e-01 8.69961202e-01] [-1.00198865e-01 2.84161270e-01 6.06090426e-01 ... 2.74641335e-01 3.69461834e-01 -3.21384311e-01] ... [-1.74762204e-01 -1.19144067e-01 8.93008351e-01 ... 3.15412194e-01 -4.36835550e-02 2.14113206e-01] [ 6.93969190e-01 -1.04708862e+00 -7.58066922e-02 ... -4.78608668e-01 -8.04077983e-01 -2.56588548e-01] [ 4.97449130e-01 4.38457519e-01 7.79988110e-01 ... 3.28287780e-01 5.16536415e-01 -1.46132612e+00]] [[ 6.64971098e-02 3.22390378e-01 2.94996023e-01 ... 3.45342249e-01 4.92720217e-01 -1.10492580e-01] [ 4.49381590e-01 -1.64675593e-01 6.29742205e-01 ... 6.75895929e-01 -5.54348588e-01 -8.37305427e-01] [ 5.29945362e-03 -1.42759752e+00 2.32165530e-02 ... 5.69489226e-02 -1.15721501e-01 1.36925900e+00] ... [ 4.61578131e-01 7.67846584e-01 -8.40674341e-01 ... 2.91108519e-01 3.43333840e-01 2.74070859e-01] [-4.92757499e-01 2.82853633e-01 -9.90186214e-01 ... -1.87656075e-01 -4.24677700e-01 5.84144175e-01] [ 9.28528428e-01 -5.54081440e-01 -1.07820952e+00 ... -3.49785089e-01 -4.04705293e-02 -1.71430483e-01]] ... [[ 2.89553046e-01 1.88185036e-01 5.33001065e-01 ... -1.76613581e+00 -5.30740857e-01 -1.76751584e-01] [-5.49412608e-01 2.97033694e-02 -8.95453334e-01 ... 6.47713542e-01 5.93893588e-01 -1.24956250e+00] [-4.60687876e-01 -6.74500644e-01 -2.50220776e-01 ... -5.41116357e-01 4.08519685e-01 -1.03323638e+00] ... [-3.93811643e-01 6.40158132e-02 -5.68880618e-01 ... 3.37859869e-01 1.04841125e+00 -3.85201156e-01] [ 3.86751592e-02 3.47939469e-02 -1.02584684e+00 ... -1.26555967e+00 -5.39081037e-01 2.82803148e-01] [ 1.95651674e+00 -1.05802548e+00 3.40228230e-01 ... -5.33677757e-01 -5.06136358e-01 -9.50574398e-01]] [[ 3.56372863e-01 7.67479479e-01 4.80884701e-01 ... -3.18335265e-01 -6.31563365e-01 1.07837212e+00] [-9.17081356e-01 -3.93710464e-01 -1.42938030e+00 ... -1.06160975e+00 -1.93406940e-01 -2.48770028e-01] [-1.46451461e+00 -3.45650196e-01 -7.15684891e-01 ... 2.00176656e-01 8.64163995e-01 2.97172904e-01] ... [ 4.61972356e-01 7.03031242e-01 4.72412586e-01 ... 1.33586884e-01 -2.36367527e-03 1.28099465e+00] [ 6.79762244e-01 -4.53023374e-01 7.64011204e-01 ... -3.67544889e-02 -1.91454753e-01 -4.34203327e-01] [ 2.63672899e-02 -2.59160638e-01 3.39485079e-01 ... 2.56671816e-01 -1.04336143e+00 1.66728795e-01]] [[-4.27130580e-01 -4.92392302e-01 4.61895585e-01 ... 3.64751905e-01 -1.87688559e-01 -6.90328121e-01] [-1.33738220e-01 -4.92254823e-01 5.92475772e-01 ... -1.23373473e+00 -1.35974777e+00 -5.78163326e-01] [-3.02591980e-01 3.38479847e-01 -2.84604818e-01 ... 3.10499221e-01 7.34350264e-01 -2.08359301e-01] ... [ 1.45313609e+00 -1.03624511e+00 3.59763831e-01 ... 2.81504691e-01 -9.54248726e-01 6.33865416e-01] [ 9.23659384e-01 6.27237022e-01 1.07875228e+00 ... 1.33429837e+00 -1.31465986e-01 2.30147099e+00] [-9.82654870e-01 -1.21830560e-01 1.24417281e+00 ... -6.83556676e-01 3.41774583e-01 -1.27150461e-01]]] [[[-1.04002631e+00 -1.34014964e-01 3.77286583e-01 ... 1.20801532e+00 1.30624795e+00 8.26904953e-01] [-1.96041420e-01 -1.33965030e-01 -3.07418674e-01 ... -3.66155714e-01 -9.53875840e-01 -2.58860230e-01] [ 4.96126205e-01 -5.04376769e-01 -6.09639883e-01 ... -3.70946556e-01 8.29687774e-01 5.32243013e-01] ... [-8.07957828e-01 -1.28969002e+00 5.13509989e-01 ... 2.77964264e-01 -4.02314544e-01 5.11085331e-01] [-5.17515540e-02 1.82837233e-01 -8.43068361e-01 ... 1.73807693e+00 -4.90184695e-01 1.97664350e-01] [ 3.57391655e-01 -1.10412352e-01 1.62732556e-01 ... 5.32206357e-01 -1.42488107e-01 -5.81553340e-01]] [[-1.48862258e-01 -2.07394809e-01 2.58391142e-01 ... -6.12691820e-01 4.20498371e-01 -2.51922756e-01] [ 2.29577914e-01 -4.04457510e-01 1.18065579e-02 ... 1.33303618e+00 -5.08367062e-01 -1.35547006e+00] [ 1.35398245e+00 6.30868793e-01 -1.87872529e-01 ... -9.73041475e-01 -2.30540425e-01 -2.25713775e-01] ... [-7.16552794e-01 2.12281182e-01 7.76081532e-02 ... 2.19687209e-01 2.11067125e-01 -9.22922015e-01] [-1.83295816e-01 -1.13338821e-01 2.01104283e-01 ... 6.19941771e-01 -1.73776284e-01 1.64611004e-02] [ 1.09120297e+00 3.30105603e-01 1.90660477e-01 ... -6.41209900e-01 -6.26498461e-01 2.02160046e-01]] [[ 7.02369571e-01 9.05995723e-03 -3.10931038e-02 ... 8.87827761e-03 -1.59644991e-01 8.93544376e-01] [-7.10955739e-01 7.23730743e-01 1.31458327e-01 ... -1.10185981e+00 1.13807157e-01 6.94824994e-01] [-1.00252199e+00 -3.81497443e-01 -9.62560952e-01 ... 1.28452986e-01 2.64054954e-01 1.25551254e-01] ... [ 7.42912054e-01 -3.46449688e-02 -2.76927918e-01 ... 6.27836406e-01 -8.82984847e-02 -1.35888040e+00] [ 3.45905453e-01 -2.30833650e-01 -6.75812811e-02 ... 6.35569394e-01 -6.10348463e-01 -6.51283860e-01] [ 1.35771668e+00 1.39080480e-01 -2.32945710e-01 ... 7.93645382e-01 5.14054120e-01 2.22462565e-01]] ... [[ 3.40585738e-01 8.02334249e-01 4.50260997e-01 ... 9.16626770e-03 -2.28925556e-01 3.23467284e-01] [ 2.81291544e-01 -1.04427767e+00 -3.95032287e-01 ... -7.94442356e-01 5.00839829e-01 -5.22760972e-02] [ 3.20801944e-01 5.58943272e-01 4.14009303e-01 ... -6.76773906e-01 -7.18977600e-02 5.04582763e-01] ... [-1.27228725e+00 2.80156076e-01 -9.57722902e-01 ... 4.26098734e-01 -7.18837559e-01 2.69825280e-01] [ 1.53530908e+00 -2.37684935e-01 -4.14322019e-01 ... -7.78836608e-01 1.24320865e+00 -7.19708920e-01] [ 3.39936763e-01 -8.83503437e-01 8.00056398e-01 ... -1.27379492e-01 -2.06773818e-01 -5.90322539e-02]] [[-7.75556624e-01 2.69731134e-01 -8.32134247e-01 ... 1.43744633e-01 -1.02305663e+00 -4.21685316e-02] [ 2.22142965e-01 1.08651888e+00 1.01122186e-01 ... -4.49237019e-01 -2.17323348e-01 4.89334255e-01] [ 2.12087378e-01 5.58635771e-01 1.00004566e+00 ... 1.25244331e+00 -2.99746424e-01 1.38396055e-01] ... [ 4.00108516e-01 -2.12530553e-01 4.84751225e-01 ... 1.35573775e-01 6.87395334e-01 -5.71769834e-01] [ 6.22001767e-01 9.75425601e-01 5.17786503e-01 ... 1.39874876e+00 1.51058778e-01 -6.79613292e-01] [ 5.04902184e-01 -5.60658157e-01 -3.37824315e-01 ... 3.06982785e-01 1.82328284e-01 8.27034831e-01]] [[ 5.67302108e-01 -1.36124027e+00 -6.65185511e-01 ... 9.72064674e-01 -7.82674015e-01 -7.88156986e-02] [ 2.59809166e-01 1.94736481e-01 -4.86330956e-01 ... 6.52734101e-01 1.36385113e-01 4.35143858e-01] [ 2.11934477e-01 -1.33910477e+00 7.91618645e-01 ... -9.76305187e-01 2.90919065e-01 -3.08654398e-01] ... [ 2.74764020e-02 -5.92981763e-02 -1.27754319e+00 ... 1.65428638e-01 -1.16794276e+00 -7.06535205e-02] [ 5.02137914e-02 -7.63181269e-01 1.18461013e+00 ... -7.05803335e-01 3.30823511e-01 -8.99757862e-01] [ 3.03821921e-01 -3.35926205e-01 -1.28697944e+00 ... -9.44780111e-01 -1.15852928e+00 5.04419148e-01]]] [[[ 6.81339979e-01 2.53671378e-01 -1.21775612e-01 ... -5.40179431e-01 4.40992385e-01 -2.52195537e-01] [ 2.71449000e-01 8.98482740e-01 -5.23650587e-01 ... -3.42057496e-01 1.14826775e+00 2.01499462e-01] [-2.00540259e-01 -3.04385275e-01 -6.81119859e-01 ... 7.06394553e-01 -3.58809024e-01 5.52669227e-01] ... [ 4.73292500e-01 6.35185957e-01 -3.30118269e-01 ... -3.93780887e-01 3.20613563e-01 -3.66503328e-01] [-1.35101092e+00 -3.02955620e-02 -3.66235882e-01 ... -2.08321378e-01 -1.11958519e-01 -1.41924739e-01] [-7.67288685e-01 1.23598981e+00 1.23615511e-01 ... 3.29085797e-01 3.01855356e-01 4.14396793e-01]] [[-6.77678108e-01 -1.36428547e+00 -1.13693666e+00 ... -1.10328741e-01 5.75214207e-01 1.83748019e+00] [-3.68271656e-02 -8.85406658e-02 -5.37703216e-01 ... -2.08487540e-01 -6.39648259e-01 1.19317222e+00] [ 7.61286139e-01 3.75168562e-01 3.87631595e-01 ... -4.46299821e-01 -1.01536071e+00 -1.59659743e+00] ... [ 7.36444771e-01 1.71695232e+00 1.88127086e-01 ... -5.12018561e-01 2.65963227e-01 5.19273043e-01] [ 4.36307818e-01 2.00155348e-01 -7.46575296e-01 ... -6.35258138e-01 3.14816624e-01 -6.75642416e-02] [ 1.00143087e+00 2.83488512e-01 2.33088374e-01 ... 3.32082659e-01 -6.19686007e-01 -3.73626262e-01]] [[-4.15185452e-01 -5.59197843e-01 5.18002331e-01 ... 6.06132567e-01 4.95622277e-01 8.03150117e-01] [ 1.09419596e+00 4.07534949e-02 8.74012947e-01 ... 4.35524344e-01 -1.93320885e-01 2.55047649e-01] [ 8.31819996e-02 1.15090334e+00 -8.37539971e-01 ... -4.74847198e-01 -2.89178252e-01 1.23876953e+00] ... [ 3.26875359e-01 2.09173292e-01 5.09277701e-01 ... 8.31181586e-01 1.48313522e-01 -3.27276170e-01] [-8.21425796e-01 4.88974035e-01 -3.32413197e-01 ... -1.03283954e+00 1.12910569e+00 6.30431354e-01] [-3.57473820e-01 -6.12175226e-01 1.01781023e+00 ... -1.21897765e-01 1.18593514e+00 -6.71827495e-01]] ... [[-6.41337335e-01 9.74054169e-03 6.61556572e-02 ... 1.26312745e+00 -7.21867204e-01 7.07258821e-01] [-7.93005943e-01 -3.35384727e-01 2.82067478e-01 ... 1.22658506e-01 1.03432751e+00 -6.35792434e-01] [ 4.64282572e-01 -1.13400674e+00 -5.66822827e-01 ... -5.99891186e-01 -2.31302604e-01 1.56575680e+00] ... [-1.91225004e+00 -2.09056474e-02 -7.68533051e-01 ... 4.21844065e-01 -7.14618027e-01 -7.78729141e-01] [-2.43123502e-01 2.83660084e-01 -2.43940413e-01 ... 5.00473738e-01 4.20084059e-01 4.86098409e-01] [-3.78010049e-02 -1.46525526e+00 -7.20780253e-01 ... 2.83362538e-01 3.33109319e-01 -1.95013836e-01]] [[-4.29350108e-01 -1.75552532e-01 4.82543334e-02 ... 7.23156333e-01 -8.49944472e-01 5.16637862e-01] [ 7.58886337e-01 1.01622097e-01 -1.65412855e+00 ... 4.44542319e-02 2.20999479e-01 -1.59207618e+00] [-7.57083416e-01 6.63302422e-01 4.79452521e-01 ... 4.78843004e-01 8.62360239e-01 5.35324335e-01] ... [-7.95975253e-02 5.43914974e-01 -1.92697212e-01 ... -9.85551119e-01 -1.20476878e+00 4.06707115e-02] [-1.36612296e+00 8.00535679e-01 1.23892713e+00 ... 8.11514914e-01 -5.89272559e-01 1.17732179e+00] [-1.20493904e-01 6.78916872e-01 9.63539854e-02 ... -1.31731987e-01 1.37981009e+00 -6.09967411e-01]] [[ 1.35301426e-01 -1.20508611e+00 -6.57162130e-01 ... -7.49704540e-01 1.19964577e-01 -8.02170575e-01] [-5.45701444e-01 -1.04749632e+00 -6.27688766e-02 ... 6.64621824e-03 6.56745117e-03 -2.68959284e-01] [-3.82434785e-01 -8.00249040e-01 1.18337095e-01 ... 2.06516474e-01 6.45419359e-01 3.71185780e-01] ... [ 9.20409620e-01 6.48150265e-01 2.09037483e-01 ... -4.65369642e-01 3.77016932e-01 3.13685954e-01] [-2.46163115e-01 -9.69350561e-02 7.48268008e-01 ... -1.70945808e-01 -4.01056111e-01 1.20856524e+00] [-4.90936190e-01 -4.47355986e-01 2.07181543e-01 ... 2.11675689e-01 8.37388635e-01 4.22674455e-02]]]] [[[[ 7.01072961e-02 4.37279373e-01 -1.43628195e-01 ... -2.70032704e-01 4.57555145e-01 -3.18252414e-01] [-8.79490733e-01 -2.04021573e-01 1.33538866e+00 ... 9.47166234e-02 -8.33035946e-01 7.10474551e-02] [-6.24811530e-01 2.97629565e-01 -7.87566841e-01 ... 3.40066999e-01 2.72662520e-01 2.61713952e-01] ... [-2.64160246e-01 6.77833259e-01 3.00878137e-01 ... -4.19893801e-01 -8.60333502e-01 1.27560508e+00] [-6.69449151e-01 -4.42262888e-01 -5.06635547e-01 ... -2.51024455e-01 7.27741003e-01 -1.42107284e+00] [-2.66606897e-01 1.69788554e-01 -4.51956272e-01 ... 7.76290745e-02 1.42208874e-01 1.04088509e+00]] [[-2.09347844e-01 4.90370125e-01 -5.36152244e-01 ... -1.66397598e-02 -5.91871381e-01 5.74396193e-01] [-1.28760791e+00 -1.97242856e+00 2.64416486e-01 ... 4.50619012e-02 2.32788950e-01 -8.55698958e-02] [-1.08413249e-02 1.20663047e-01 -3.98034275e-01 ... 2.11432144e-01 -5.58826685e-01 1.92342684e-01] ... [-2.38985986e-01 7.14937627e-01 -1.40236646e-01 ... 1.91581115e-01 1.49119425e+00 -8.45394611e-01] [-2.27255851e-01 3.72070849e-01 6.27294183e-01 ... 6.54479802e-01 4.30414200e-01 -6.77485704e-01] [-5.48875630e-01 5.00957184e-02 6.10916615e-01 ... -3.70693356e-01 -5.61029434e-01 -1.69731483e-01]] [[-7.83850372e-01 -2.91292369e-01 5.07225692e-01 ... -2.73740083e-01 -8.11511159e-01 1.16352534e+00] [ 6.88864708e-01 -1.53731138e-01 2.23934233e-01 ... 9.46660787e-02 -7.49586463e-01 7.41230547e-02] [ 2.59355396e-01 2.10844465e-02 -3.18183422e-01 ... -1.44968122e-01 -1.04370618e+00 -5.29539883e-01] ... [-8.96116495e-01 -3.49486858e-01 7.55800307e-02 ... -9.42243040e-01 1.35873005e-01 -7.23207518e-02] [ 2.95125425e-01 -6.83301985e-01 1.37452289e-01 ... -1.15923929e+00 3.16310644e-01 -1.64602935e-01] [ 4.73374099e-01 -4.06721942e-02 -3.73559743e-01 ... -9.92156148e-01 -1.20707703e+00 -3.53536427e-01]] ... [[ 9.29434970e-02 6.18205145e-02 6.13005102e-01 ... -1.36772633e+00 -7.83586383e-01 -4.48217690e-01] [-1.36958516e+00 -4.72207159e-01 -2.60452002e-01 ... -1.57980561e-01 -4.54273820e-01 -1.87412485e-01] [ 2.93589264e-01 -7.84571588e-01 4.08401310e-01 ... -1.13774562e+00 -2.64614969e-01 2.08764002e-01] ... [-1.90699384e-01 4.46768701e-01 -4.62915182e-01 ... -8.47026765e-01 1.00833702e+00 9.35302079e-01] [ 4.89427119e-01 -7.49676049e-01 -6.43809736e-01 ... -7.71885753e-01 6.78642690e-02 4.65387672e-01] [-8.81770492e-01 1.12124681e+00 7.46571198e-02 ... -1.91089439e+00 -8.53821337e-01 -2.04547927e-01]] [[-3.53590727e-01 -4.85189945e-01 -1.47236079e-01 ... -9.43051636e-01 2.52997547e-01 5.03722802e-02] [ 1.83847584e-02 4.39184129e-01 -2.89517403e-01 ... -2.79553145e-01 5.24665415e-01 1.32530105e+00] [-2.01504207e+00 -1.08764696e+00 -6.21044815e-01 ... -6.48806274e-01 8.86111975e-01 -9.25634027e-01] ... [ 1.39811110e+00 -4.41503108e-01 2.35862106e-01 ... 3.79452020e-01 -7.81854331e-01 -6.13855600e-01] [-4.02331166e-02 -5.04570246e-01 -6.19304597e-01 ... 2.64535427e-01 8.99139345e-01 6.55436888e-02] [ 3.76420259e-01 -1.32933009e+00 3.36337388e-01 ... 3.76437940e-02 -6.43669665e-01 -5.79308569e-02]] [[ 1.46635592e-01 -1.71739146e-01 3.48886073e-01 ... -8.14314842e-01 1.01968896e+00 3.54214400e-01] [ 1.50502467e+00 -1.27000165e+00 -4.71845418e-02 ... 1.10909176e+00 4.17254478e-01 -1.94589809e-01] [-1.26241767e+00 4.55383211e-01 -1.56430411e+00 ... -7.74100840e-01 1.94098473e-01 7.75584221e-01] ... [-3.61208647e-01 8.31235766e-01 1.31314981e+00 ... -2.44927645e-01 -3.85487348e-01 1.76698282e-01] [ 3.86350118e-02 5.16113281e-01 -2.69746095e-01 ... 7.76594222e-01 -2.65127629e-01 -4.03621614e-01] [-2.50732541e-01 6.73590228e-02 9.61006999e-01 ... 2.66393498e-02 -7.47193933e-01 -8.34729612e-01]]] [[[ 6.20929182e-01 7.26467371e-01 1.27597427e+00 ... 5.97220302e-01 4.90451396e-01 4.23783541e-01] [-9.03117359e-02 -6.50113868e-03 -1.98060632e-01 ... -1.29295334e-01 1.32659817e+00 -1.05877995e+00] [-7.54975140e-01 -4.19042915e-01 -4.67547446e-01 ... 5.54348171e-01 8.78895819e-01 1.58333921e+00] ... [ 4.49752867e-01 6.49330974e-01 -1.69393599e-01 ... -2.34507043e-02 -3.49216282e-01 -1.94174096e-01] [-4.99523401e-01 8.18398893e-01 4.27880138e-01 ... -3.07407742e-03 9.38094854e-01 -2.28456354e+00] [ 3.26440394e-01 -9.50576007e-01 -5.94830453e-01 ... 4.30352747e-01 4.49486643e-01 3.95722508e-01]] [[-1.07403910e+00 -4.19123858e-01 4.35222745e-01 ... -9.50354517e-01 -6.63924694e-01 -4.77467537e-01] [-2.37666309e-01 5.48687518e-01 8.63277316e-01 ... -2.26561483e-02 -1.06444336e-01 2.50588715e-01] [ 5.59672952e-01 -3.00866496e-02 4.45297956e-01 ... -4.87491377e-02 9.33662057e-01 1.74227023e+00] ... [-1.55097276e-01 1.45818174e+00 5.23974419e-01 ... -9.69032884e-01 6.28151655e-01 3.38628024e-01] [-2.53116697e-01 -5.87662518e-01 -5.02440572e-01 ... -6.34393692e-01 -1.66644782e-01 2.58717574e-02] [-5.87179780e-01 2.19751149e-01 -8.53386104e-01 ... -1.22827150e-01 1.97654646e-02 -1.57980227e+00]] [[-1.89283803e-01 2.57298648e-01 3.99191350e-01 ... -8.30647796e-02 6.30867898e-01 2.86667850e-02] [ 5.19594103e-02 -8.72197807e-01 1.02017283e-01 ... -1.26364887e-01 -1.15029645e+00 3.12284559e-01] [-1.73296943e-01 2.41523325e-01 1.14453089e+00 ... 9.70408976e-01 -1.71962273e+00 6.22430623e-01] ... [-4.95658576e-01 -6.53140366e-01 9.66164470e-01 ... 2.06023023e-01 -1.18011928e+00 9.56211746e-01] [ 7.42612839e-01 6.67270184e-01 -5.09112358e-01 ... 6.29481748e-02 4.88524623e-02 3.59054208e-01] [ 2.49367490e-01 7.05894947e-01 9.86531377e-02 ... -1.11481977e+00 7.26765692e-01 -1.52828708e-01]] ... [[ 4.21140134e-01 1.01756263e+00 -5.45247614e-01 ... 6.79819584e-01 -1.28943920e+00 1.02349848e-01] [ 1.06786561e+00 -3.89474519e-02 1.56008616e-01 ... -9.47654724e-01 -6.59822524e-01 -8.14900100e-01] [-9.91729617e-01 4.67636883e-01 -6.11547709e-01 ... -4.65824664e-01 -3.42646986e-01 -7.65927434e-01] ... [ 4.42427844e-01 8.28354239e-01 1.12296991e-01 ... 7.51239121e-01 -1.51589715e+00 -1.19097185e+00] [-7.21598923e-01 4.48005974e-01 4.25341338e-01 ... -4.62051213e-01 2.19356298e-01 -1.65769458e-01] [-5.83566666e-01 2.40190193e-01 8.92577291e-01 ... 3.65448415e-01 -1.04786205e+00 1.05503047e+00]] [[-7.77899921e-01 6.61369443e-01 1.85870975e-01 ... 4.63362217e-01 -4.08074051e-01 1.16088346e-01] [ 2.94473708e-01 1.52902997e+00 3.01349133e-01 ... -5.22242725e-01 1.42552540e-01 -7.29335368e-01] [-1.45861343e-01 -5.49884915e-01 6.58889040e-02 ... 1.37284267e+00 8.81238133e-02 -1.12739372e+00] ... [-2.63540089e-01 -4.17903692e-01 1.53849736e-01 ... -3.07632983e-01 6.71354115e-01 -1.80826396e-01] [-8.19383487e-02 6.87687173e-02 1.06421947e+00 ... -3.62035125e-01 -3.43456328e-01 9.87978339e-01] [ 3.73902887e-01 -6.82192445e-01 -1.19865203e+00 ... -6.15821779e-02 5.91582693e-02 4.19921577e-01]] [[-7.37155795e-01 6.46084070e-01 -1.14280319e+00 ... 5.36886454e-01 -1.35888040e+00 1.63164735e-01] [ 7.06323564e-01 1.49008818e-02 -7.10145891e-01 ... 4.17604715e-01 -1.33126453e-01 -9.33623195e-01] [ 7.27312803e-01 1.58179033e+00 4.03113663e-01 ... -1.69760680e+00 -1.47865367e+00 3.29432338e-02] ... [ 2.33732581e-01 -1.64683312e-01 -7.49127209e-01 ... 9.39685404e-01 1.44764423e+00 5.92311382e-01] [ 3.79021466e-01 8.87935400e-01 1.96514344e+00 ... -7.76330471e-01 1.70653135e-01 4.85246271e-01] [ 1.06316261e-01 4.57825139e-02 -8.09934214e-02 ... -9.61794853e-01 -4.98914510e-01 8.93695414e-01]]] [[[ 1.83170319e-01 -6.11827791e-01 -4.37744707e-01 ... -5.26909292e-01 -1.25676077e-02 -5.09086996e-02] [-7.31008887e-01 -4.28700566e-01 -3.38655740e-01 ... -8.61000776e-01 4.86178994e-01 5.99376440e-01] [ 7.44051337e-01 -1.24910080e+00 3.90975446e-01 ... 6.76461160e-01 1.29331678e-01 -5.30795455e-01] ... [-1.16328025e+00 1.07305133e+00 1.30248022e+00 ... 3.48396599e-01 -9.10752892e-01 -1.94473132e-01] [-2.57669955e-01 1.05300176e+00 -1.06483914e-01 ... 8.91046822e-02 -7.88129643e-02 5.49446344e-01] [ 8.00253093e-01 1.40724373e+00 -5.92271030e-01 ... 2.42126256e-01 -1.06599994e-01 4.64373857e-01]] [[ 2.18267053e-01 -2.63590157e-01 1.39454293e+00 ... -2.63578206e-01 -1.12521112e-01 6.00328624e-01] [-8.16300631e-01 4.24107999e-01 -2.55918443e-01 ... -1.12526023e+00 -4.94993031e-01 -1.12727249e+00] [-4.62652802e-01 1.00155115e+00 2.95399159e-01 ... -7.44109392e-01 -1.30778551e+00 4.83900070e-01] ... [-3.86439450e-02 -1.84070647e-01 1.10419643e+00 ... -1.02009869e+00 -1.28230679e+00 -3.53129834e-01] [ 1.61786115e+00 -7.08158672e-01 -1.32385123e+00 ... -2.91531384e-01 -4.07281667e-01 5.39327025e-01] [-1.47676647e-01 -9.75636959e-01 1.75009310e+00 ... -5.36061704e-01 -5.79976797e-01 5.81038117e-01]] [[-2.34448522e-01 6.51025891e-01 -1.11271584e+00 ... 8.95395517e-01 7.75422513e-01 -3.58090490e-01] [-4.75680172e-01 6.08791053e-01 -3.36779296e-01 ... 4.96878177e-01 -1.22879744e+00 -7.53138125e-01] [ 4.87957925e-01 -2.42504090e-01 1.49235463e+00 ... -7.08466530e-01 3.01675737e-01 1.12942271e-01] ... [ 9.09188271e-01 1.74528435e-02 -7.38982484e-02 ... -4.68633264e-01 -1.80162191e-01 -7.20004857e-01] [ 3.08943897e-01 1.64507186e+00 1.44722790e-01 ... -1.28642261e+00 -6.20679915e-01 4.13499385e-01] [-2.97366381e-01 -8.09107006e-01 -2.09280595e-01 ... -1.25650316e-03 7.49066651e-01 -8.67704898e-02]] ... [[-8.91837254e-02 1.40643203e+00 -9.74885106e-01 ... 3.53401750e-02 2.58640081e-01 3.59922677e-01] [ 6.95834577e-01 7.91856647e-01 -7.38741696e-01 ... -4.24213797e-01 1.13637459e+00 -4.44548994e-01] [-5.34720004e-01 -4.73711967e-01 -1.29678190e+00 ... 1.30647838e+00 -1.23409903e+00 -6.33451641e-01] ... [ 1.64664185e+00 6.13731980e-01 2.95911819e-01 ... -2.09125757e-01 -5.53925395e-01 -7.14189559e-02] [ 2.15234756e-01 1.54306367e-01 7.84200072e-01 ... -6.53425336e-01 -1.28514305e-01 -9.60302129e-02] [-2.70232260e-01 -1.34880614e+00 -4.66298103e-01 ... 1.05028784e+00 1.78291708e-01 -2.52496123e-01]] [[ 3.13336611e-01 -1.26762199e+00 4.81185466e-01 ... 3.48363072e-01 5.07746756e-01 -8.00356448e-01] [ 6.61305547e-01 -9.03806388e-01 2.09190905e-01 ... 9.12108600e-01 3.23872030e-01 8.76453519e-01] [ 1.66371703e-01 4.97119933e-01 -1.28372610e+00 ... 8.90325084e-02 -4.79694724e-01 2.78708455e-03] ... [-1.12352335e+00 -3.86705041e-01 -5.51028013e-01 ... 1.01965773e+00 4.66289312e-01 -6.04799330e-01] [ 2.56267965e-01 -6.05246842e-01 -9.33955073e-01 ... -5.16138911e-01 -9.08671618e-01 9.13685322e-01] [-7.13396549e-01 7.58640826e-01 -7.53921270e-02 ... -1.19782686e+00 -9.81042385e-01 5.91411293e-01]] [[ 2.26951092e-01 1.17942333e+00 1.77215293e-01 ... -2.41398439e-01 -5.48307419e-01 8.18903625e-01] [ 8.12619984e-01 -3.20253432e-01 5.87097518e-02 ... -5.09820461e-01 -9.44655955e-01 5.54516137e-01] [ 2.38331151e+00 5.57181358e-01 3.54506746e-02 ... -2.47510970e-01 -3.79082747e-02 -7.65925169e-01] ... [-7.41693974e-01 9.29051280e-01 1.34325635e+00 ... 2.56463625e-02 3.04786712e-01 1.12916517e+00] [ 3.53490293e-01 3.33966732e-01 -3.69766504e-01 ... 1.03779590e+00 -3.09677333e-01 -6.01155311e-02] [ 1.20082431e-01 -5.95010102e-01 2.31417939e-01 ... -9.31952834e-01 -2.93249398e-01 1.00639331e+00]]] [[[-2.58520365e-01 3.23776543e-01 -3.58747512e-01 ... 5.96830361e-02 -8.32409084e-01 7.43119180e-01] [-2.90923536e-01 4.64291751e-01 1.42991513e-01 ... 6.14034355e-01 4.35070008e-01 8.85929018e-02] [ 2.98036286e-03 6.74505889e-01 -5.48799708e-02 ... -9.04759839e-02 3.83085221e-01 7.08332181e-01] ... [-6.75115967e-03 -2.93870032e-01 -1.70006424e-01 ... -7.31702626e-01 -7.88703859e-01 -2.97740437e-02] [ 2.62928915e+00 5.14702797e-01 -1.78574294e-01 ... 1.73052460e-01 -8.43301117e-02 4.88807142e-01] [-1.50996399e+00 2.81067431e-01 7.29324937e-01 ... 1.29474843e+00 -9.76419866e-01 1.10798396e-01]] [[-2.85286248e-01 1.19993722e+00 5.52707434e-01 ... 9.99974385e-02 -1.23150194e+00 -1.35272789e+00] [ 4.68692183e-01 -8.03509176e-01 8.72216821e-01 ... 9.52374876e-01 -1.04255497e+00 -6.70659304e-01] [ 1.08872795e+00 7.68754184e-01 -8.31290007e-01 ... 7.24313259e-01 -3.18305165e-01 8.54284108e-01] ... [-1.38584232e+00 -4.64551568e-01 1.89738214e-01 ... -1.04202676e+00 3.08465630e-01 7.70641148e-01] [ 7.47321606e-01 1.85311511e-01 -4.23573077e-01 ... -2.18534365e-01 1.05118358e+00 -1.22687232e+00] [ 1.83688506e-01 1.10954738e+00 6.87721446e-02 ... 1.74764848e+00 -9.81653452e-01 1.10262060e+00]] [[-4.77314115e-01 7.46325105e-02 8.97340834e-01 ... -1.58769703e+00 -1.29121756e-02 -1.31822631e-01] [-4.43946391e-01 -5.95624149e-01 -2.08265007e-01 ... 5.89346647e-01 -6.68656826e-02 -1.03862658e-01] [ 4.85022694e-01 -1.26509279e-01 -4.26950425e-01 ... -6.50424778e-01 -5.78185678e-01 -3.58009279e-01] ... [-8.56929868e-02 4.63985890e-01 4.26014513e-01 ... -5.07245839e-01 3.24119925e-01 -1.23095381e+00] [ 2.40549922e-01 4.77243692e-01 1.68706194e-01 ... 7.77635515e-01 -2.63647377e-01 -3.74582976e-01] [-2.78217047e-01 -1.60661757e+00 -1.41082144e+00 ... 1.12051688e-01 1.15091205e+00 5.55724092e-02]] ... [[-3.28151762e-01 -4.12611991e-01 -4.22788501e-01 ... 3.11811268e-01 -5.79034626e-01 -5.55775940e-01] [ 8.03726017e-01 -8.91886711e-01 -9.84539568e-01 ... -6.54067814e-01 -6.53397918e-01 -1.04203022e+00] [ 1.18709064e+00 3.38046908e-01 6.93316579e-01 ... 1.82718009e-01 -8.99514139e-01 -1.07634803e-02] ... [-1.02954531e+00 -1.63910702e-01 -5.60312688e-01 ... 1.01742223e-01 3.69178623e-01 -1.25834620e+00] [ 8.30603838e-01 5.68124205e-02 -2.92032845e-02 ... -1.15729518e-01 -2.59413421e-02 -2.46986389e-01] [-5.92914298e-02 2.89035857e-01 -1.67515025e-01 ... 1.34403944e+00 -6.58874214e-01 -4.34048682e-01]] [[-2.40873992e-01 -6.17022812e-01 -4.21688765e-01 ... -3.58823001e-01 -3.12751502e-01 -1.11302827e-02] [ 1.23598611e+00 3.91616046e-01 1.91502318e-01 ... -6.83657110e-01 6.04657650e-01 1.01936996e+00] [-1.71682462e-01 4.09230411e-01 -2.91944183e-02 ... 3.35497223e-02 -3.69696915e-01 1.02149928e+00] ... [ 1.28691256e-01 -2.78612021e-02 -6.48949742e-01 ... 5.81478357e-01 -5.66975772e-01 9.35852349e-01] [-1.21784441e-01 -8.05546820e-01 4.65825737e-01 ... -6.44613862e-01 -2.85062432e-01 -7.72162855e-01] [-9.96432722e-01 6.32839262e-01 1.20197460e-01 ... -7.47606099e-01 -1.26784194e+00 2.26116553e-01]] [[-1.05809295e+00 -4.44236547e-01 8.03239703e-01 ... -1.90194398e-01 -3.04454267e-01 -5.63108444e-01] [-5.91358185e-01 1.13819647e+00 -1.87105477e-01 ... -8.65151584e-01 -5.09039640e-01 1.27796936e+00] [ 7.40769088e-01 -2.40894228e-01 8.90111864e-01 ... 2.52105147e-01 -5.16890287e-02 8.09240118e-02] ... [-1.14983869e+00 1.88497439e-01 2.43840262e-01 ... -2.72770941e-01 -5.82474589e-01 5.48069477e-01] [ 4.89810575e-03 -8.92555296e-01 -5.35776794e-01 ... -1.20296618e-02 6.35813296e-01 3.78647447e-01] [-6.75052166e-01 6.11137927e-01 -1.41282094e+00 ... -9.40656543e-01 -3.81000400e-01 4.03372020e-01]]] [[[ 5.86060703e-01 -3.21647733e-01 1.94281489e-01 ... 9.35256109e-02 -6.29116535e-01 -8.90774950e-02] [ 1.64870992e-01 2.79090982e-02 2.16791600e-01 ... -8.84653747e-01 6.92089722e-02 9.23215449e-02] [-9.92465913e-01 -1.59564495e-01 -9.77263749e-02 ... -5.20729125e-01 3.11042994e-01 -4.71059501e-01] ... [-1.02800429e-02 1.33421791e+00 -1.01970088e+00 ... -1.06254578e+00 1.14805245e+00 2.37946063e-01] [ 4.33163971e-01 2.23919049e-01 8.38029206e-01 ... 3.90899628e-01 -7.23225951e-01 -4.17232394e-01] [-9.47321117e-01 -4.51300710e-01 -7.64419854e-01 ... 4.55715328e-01 -5.29759109e-01 -3.76396447e-01]] [[ 3.88403147e-01 8.67901087e-01 2.96646476e-01 ... -2.05650911e-01 -2.33726755e-01 1.65100617e-03] [-4.97733533e-01 8.43695283e-01 -3.21155697e-01 ... 1.04168463e+00 7.12807477e-01 -1.43155470e-01] [-4.46609616e-01 7.65738249e-01 8.43245447e-01 ... 6.94525003e-01 -1.22668588e+00 9.38320577e-01] ... [-7.44211257e-01 1.06125939e+00 -5.65283954e-01 ... -3.94374877e-01 -8.74151647e-01 -9.18594301e-01] [-1.48011327e-01 2.20172405e-01 -6.37888551e-01 ... -4.17897403e-02 -1.09508073e+00 -1.86269894e-01] [ 9.15501237e-01 4.69464540e-01 -3.24905306e-01 ... 1.40771270e+00 3.63137305e-01 -2.64345199e-01]] [[ 7.21438751e-02 -3.83719131e-02 -1.34185493e+00 ... 9.88265947e-02 -3.70417565e-01 5.74431904e-02] [ 1.64663978e-02 -3.64755988e-01 3.74767572e-01 ... 7.91551292e-01 -2.54452765e-01 -4.56594169e-01] [-6.92308962e-01 -4.08566177e-01 7.74984360e-02 ... -1.04323648e-01 6.54196203e-01 -7.60335743e-01] ... [-9.40721095e-01 -6.29965544e-01 -5.50657451e-01 ... 7.91415215e-01 1.05358422e+00 8.31242144e-01] [-4.92853999e-01 -2.02672626e-03 -1.30278599e+00 ... -8.18714082e-01 -6.17295623e-01 -1.07412505e+00] [ 8.96575674e-02 -6.22976005e-01 7.60209709e-02 ... 4.42855626e-01 -1.32158983e+00 -8.26321185e-01]] ... [[-8.89099613e-02 -1.59997690e+00 -1.08712651e-02 ... -3.34947914e-01 7.60288537e-01 3.24296445e-01] [ 1.11440480e+00 5.25902271e-01 -9.61616397e-01 ... 9.35389936e-01 1.09097183e+00 5.16324401e-01] [ 4.93318111e-01 2.96507869e-03 1.40091389e-01 ... 5.22319913e-01 2.41658092e-01 -3.02106500e-01] ... [-3.75166237e-01 -1.46210715e-01 -1.61689073e-02 ... -3.07852536e-01 -2.11626261e-01 9.51993167e-01] [-1.21703899e+00 -1.37076235e+00 2.81207860e-01 ... 3.68320972e-01 -2.65403599e-01 -1.02047479e+00] [ 1.09961689e+00 -1.77146047e-01 -5.04087031e-01 ... 4.05131102e-01 1.16347384e+00 -2.55843401e-01]] [[-1.04839623e+00 1.05951212e-01 -4.43454534e-02 ... 5.50688744e-01 4.75871027e-01 9.95839119e-01] [-5.45693696e-01 7.11129189e-01 1.21705472e+00 ... -4.17742848e-01 8.13349783e-01 1.99994057e-01] [-2.21401468e-01 -1.13041222e+00 4.81574267e-01 ... 5.11883378e-01 6.73588336e-01 -7.85608172e-01] ... [-1.04345989e+00 -6.76414669e-01 3.09418172e-01 ... -9.50359583e-01 1.19098628e+00 -5.74451923e-01] [ 1.75544068e-01 2.93790072e-01 2.18870908e-01 ... -3.73420149e-01 1.11345255e+00 -7.07469940e-01] [-8.74058843e-01 -3.01307380e-01 3.96578074e-01 ... -1.31579101e-01 -8.79095674e-01 -1.02628875e+00]] [[-4.04246897e-02 6.53944135e-01 -3.38454336e-01 ... -4.63891566e-01 6.09435499e-01 2.34135300e-01] [ 7.95630515e-01 8.95690203e-01 -7.07903922e-01 ... -3.57252061e-01 1.70682237e-01 -6.44951999e-01] [-2.59857744e-01 2.14922562e-01 2.00245142e-01 ... -3.47040564e-01 -1.20876670e+00 -1.07740033e+00] ... [-9.90996361e-01 8.78040433e-01 1.21485484e+00 ... 3.84852916e-01 2.73902774e-01 -5.05629063e-01] [-2.07759571e+00 1.95472494e-01 1.52764842e-01 ... 2.91495502e-01 -3.81312430e-01 5.10938823e-01] [-1.41560602e+00 -2.64340937e-01 1.01496197e-01 ... -1.10574031e+00 7.55357504e-01 -1.01702765e-01]]] [[[-4.53792401e-02 3.76711011e-01 8.73200476e-01 ... 9.26274538e-01 6.51021123e-01 5.17607868e-01] [-3.61306638e-01 -5.30073762e-01 -4.96511042e-01 ... 1.45367235e-01 5.46350181e-01 -6.80029273e-01] [ 5.44306040e-01 -6.52808905e-01 1.35446608e-01 ... 4.78209436e-01 9.04274359e-02 -2.33220905e-02] ... [ 1.38583291e+00 -8.14744234e-01 -4.56730634e-01 ... -8.11437741e-02 -2.10755968e+00 -1.12247133e+00] [-1.14246130e+00 -1.13416266e+00 -5.34009695e-01 ... 7.81092465e-01 -9.78312254e-01 5.11482120e-01] [ 4.59025383e-01 6.55694446e-03 -1.25245678e+00 ... -5.69080234e-01 -4.25561547e-01 -2.40434542e-01]] [[ 3.54512215e-01 -3.79990190e-01 1.38353817e-02 ... -4.83896017e-01 7.29841948e-01 -1.70403287e-01] [ 2.30089769e-01 -3.36315602e-01 -5.77788532e-01 ... 9.43258762e-01 -9.11581695e-01 -4.84068990e-02] [ 8.19110990e-01 1.23438787e+00 -6.78877890e-01 ... -7.47342646e-01 2.92186201e-01 2.03519017e-02] ... [ 1.77422076e-01 2.16269791e-01 -3.80733997e-01 ... -6.28583848e-01 9.31460038e-02 9.80271995e-01] [-1.90172315e-01 2.25464195e-01 -9.32766438e-01 ... -9.38270032e-01 2.43012547e-01 1.09996998e+00] [ 4.55087796e-02 -1.42072129e+00 5.33945560e-01 ... -4.45510238e-01 -2.33137548e-01 1.77938962e+00]] [[-8.78409445e-01 -9.69593823e-01 9.32239413e-01 ... 6.03668988e-01 -3.89569044e-01 3.34302306e-01] [-2.49105975e-01 -2.58434266e-01 -5.84618032e-01 ... 5.15502095e-01 -3.35969001e-01 6.42419159e-01] [ 9.38315213e-01 1.13253467e-01 4.71023202e-01 ... -1.82640803e+00 -1.85270786e-01 -1.62813783e-01] ... [-6.48017824e-01 2.55404621e-01 -2.15931818e-01 ... 7.19071150e-01 -2.36380458e-01 2.70593107e-01] [ 3.69190186e-01 1.11332524e+00 -5.10223031e-01 ... 6.07525222e-02 -9.38323796e-01 -6.53935611e-01] [-1.30307823e-01 -4.92284030e-01 -4.29969668e-01 ... 7.29334831e-01 5.10830581e-01 -6.82351112e-01]] ... [[-5.01705557e-02 -4.86075193e-01 4.25392725e-02 ... -1.31779134e-01 6.42262161e-01 2.66313583e-01] [ 5.10440826e-01 -2.42505565e-01 -8.74516845e-01 ... 5.92728734e-01 -1.39936125e+00 5.80016732e-01] [ 1.68965086e-01 -8.17860603e-01 9.42923605e-01 ... -1.65670943e+00 3.91960964e-02 1.56053805e+00] ... [ 4.20590132e-01 6.08023047e-01 -3.17464352e-01 ... 5.91163218e-01 5.49486041e-01 1.35424495e+00] [ 6.35710835e-01 -1.31155109e+00 1.03309715e+00 ... -3.09400439e-01 7.59633422e-01 1.03463328e+00] [-1.71088719e+00 -6.23312831e-01 -3.78135949e-01 ... 7.87648141e-01 -1.13937449e+00 -2.94504464e-01]] [[ 2.31631607e-01 -1.64664590e+00 -1.51451185e-01 ... 1.23639546e-01 -6.40002131e-01 -4.97914672e-01] [-1.33936942e+00 7.17158258e-01 7.57506192e-02 ... -4.99550492e-01 4.02706712e-01 -1.07976353e+00] [ 1.21170655e-01 6.35145843e-01 -3.25916678e-01 ... 1.21311200e+00 1.43030047e-01 -3.17151666e-01] ... [ 9.79729831e-01 -4.24138814e-01 -3.10199469e-01 ... -6.96823537e-01 7.60974109e-01 6.75538898e-01] [ 3.26853655e-02 1.69385865e-01 -2.22774014e-01 ... 7.27094710e-01 -7.88103998e-01 -1.73091128e-01] [-6.84546530e-01 4.02311496e-02 9.89296101e-03 ... 7.05332935e-01 -5.33720076e-01 -5.08473396e-01]] [[-4.82777208e-01 -4.87019643e-02 -5.31491339e-01 ... -4.50694114e-01 5.12167215e-01 -6.97588861e-01] [-2.60371119e-01 6.32338524e-01 1.20459425e+00 ... -3.82463843e-01 3.42792034e-01 8.20144057e-01] [-1.46797621e+00 5.34434676e-01 1.98429778e-01 ... -6.16019011e-01 -3.24863553e-01 3.32874954e-01] ... [ 9.85315740e-01 -1.10133588e+00 -1.22917879e+00 ... 2.40577951e-01 7.45473981e-01 1.15058255e+00] [-5.56531847e-01 -1.61538690e-01 3.20151418e-01 ... -1.23334050e+00 -2.29010448e-01 1.79129386e+00] [-9.98303950e-01 1.44089317e+00 -5.35812080e-01 ... -4.16065529e-02 -4.65552151e-01 7.68641949e-01]]]] ... [[[[-2.70835549e-01 6.54733062e-01 1.61078942e+00 ... -4.81868327e-01 -1.49008751e-01 2.10221455e-01] [ 1.45313278e-01 -8.99570346e-01 -2.71493018e-01 ... 6.44978702e-01 -4.00953412e-01 9.53023806e-02] [-2.35608131e-01 1.67852655e-01 6.51469529e-01 ... -5.67017853e-01 -5.74764848e-01 4.38686430e-01] ... [-3.83643478e-01 -7.97567964e-01 -8.00008923e-02 ... 1.05948709e-02 -1.00168981e-01 8.38749766e-01] [-2.88797826e-01 -4.40015078e-01 1.75582111e-01 ... -1.67595327e-01 9.03155468e-03 1.31218636e+00] [ 4.23586071e-01 1.72383830e-01 8.47988963e-01 ... 7.31450617e-02 1.76348031e+00 8.19037795e-01]] [[-9.89806652e-01 1.18875504e-01 3.96953404e-01 ... 6.33027852e-01 4.04082596e-01 2.30790880e-02] [-2.68615216e-01 6.78490996e-01 5.57568729e-01 ... 5.83717108e-01 1.29913986e-01 -1.08986545e+00] [-5.24234951e-01 1.37090594e-01 7.10593581e-01 ... -1.18987933e-02 1.45225659e-01 -4.51854944e-01] ... [-1.01481187e+00 5.31506240e-01 -4.79285419e-01 ... 7.02121079e-01 5.69539905e-01 -1.40031290e+00] [ 8.64321649e-01 1.85875249e+00 -6.11648299e-02 ... 2.25990325e-01 5.45069456e-01 1.06335592e+00] [-5.27823687e-01 5.75742066e-01 1.22386754e+00 ... -1.09264147e+00 6.61414266e-02 1.18377876e+00]] [[-5.78655303e-01 -6.85785353e-01 -9.09847021e-01 ... -1.98173583e+00 -3.86634707e-01 7.12383449e-01] [-9.63618696e-01 5.27579039e-02 -2.50146449e-01 ... 9.26076174e-01 -7.12529123e-01 1.29118383e-01] [ 3.49237621e-01 -2.78557185e-02 2.95076519e-01 ... -8.85620527e-03 -5.88089466e-01 5.67734122e-01] ... [-4.24733222e-01 4.39542346e-02 -4.13252473e-01 ... 7.89252460e-01 -1.50132952e-02 7.11500645e-01] [ 1.44204545e+00 1.76884961e+00 8.66299644e-02 ... 3.45135599e-01 1.00909144e-01 9.45780754e-01] [-6.77186847e-01 -9.79546979e-02 -4.51499730e-01 ... 7.31796503e-01 -8.00208926e-01 6.08548582e-01]] ... [[ 5.08244038e-01 9.60186481e-01 -7.97622025e-01 ... 4.91873264e-01 1.15616150e-01 7.78113455e-02] [ 1.97935045e-01 -4.25544858e-01 -5.74209630e-01 ... 4.43017691e-01 4.14963216e-01 -3.72863233e-01] [ 1.29909086e+00 5.49421370e-01 7.91072905e-01 ... -4.65548366e-01 5.31707644e-01 -3.69420022e-01] ... [-2.62421131e-01 5.66270411e-01 5.67432284e-01 ... -6.98743999e-01 -9.23466742e-01 -9.28712860e-02] [-1.07234538e+00 8.45746934e-01 5.41041017e-01 ... -4.26587909e-01 1.46858394e-01 7.06306279e-01] [-3.22579622e-01 7.50401676e-01 3.42770368e-01 ... 1.09709613e-01 5.94793320e-01 -5.49463034e-01]] [[ 8.60526562e-01 6.53595567e-01 -8.93383741e-01 ... 5.10324419e-01 2.43803740e-01 -7.46184811e-02] [ 8.73907581e-02 1.59156606e-01 -3.88670623e-01 ... -3.84219646e-01 -1.18048616e-01 4.36044335e-01] [-7.00618699e-02 1.50284588e-01 6.68326795e-01 ... 6.68406546e-01 4.83748734e-01 -1.16919160e+00] ... [-4.58910555e-01 9.38441336e-01 3.23622555e-01 ... -4.41176921e-01 -1.18551791e+00 -1.14839576e-01] [-7.31538653e-01 -5.66703558e-01 -6.38948604e-02 ... 9.94720161e-02 -6.88401699e-01 -2.54066706e-01] [-6.24935143e-02 -7.63083518e-01 3.88589144e-01 ... -1.31078064e+00 -1.12852179e-01 -7.03454375e-01]] [[ 1.22520876e+00 -8.72385561e-01 2.97651049e-02 ... 8.95124435e-01 1.86386541e-01 -4.20302421e-01] [-3.32075804e-01 -1.19587116e-01 3.81910264e-01 ... -1.39893138e+00 4.82405663e-01 -2.59897202e-01] [-5.04625775e-02 -7.50544369e-01 -1.04602528e+00 ... 2.01130509e-02 -2.03737378e-01 -8.77592623e-01] ... [-5.05368412e-01 7.47969925e-01 -7.71520659e-02 ... 1.99833751e-01 3.43456239e-01 -6.45268142e-01] [-7.87633419e-01 5.32474458e-01 -7.48445272e-01 ... -2.49073672e+00 -8.04856837e-01 5.39693236e-01] [ 5.75353026e-01 8.54734033e-02 3.30151141e-01 ... 8.72860670e-01 1.80166766e-01 8.16913784e-01]]] [[[-9.12408888e-01 -8.04439187e-01 -6.15271091e-01 ... 1.24274820e-01 -5.29128373e-01 3.99363965e-01] [ 2.99457759e-01 -6.18002415e-01 -7.05111861e-01 ... 1.22637257e-01 1.55295432e+00 3.28288555e-01] [-8.22674949e-03 -8.80553722e-01 -1.36389911e-01 ... -1.24196582e-01 -1.08570778e+00 -9.90044415e-01] ... [-1.11721659e+00 -2.29638278e-01 -9.83514845e-01 ... -4.25131977e-01 -7.35257268e-01 -3.11351508e-01] [-5.21290779e-01 4.22600269e-01 7.02268779e-01 ... -5.84805012e-01 -1.03665876e+00 3.20728838e-01] [ 1.08953416e+00 -3.33097786e-01 3.21522623e-01 ... -4.23668951e-01 -1.41360566e-01 3.95261586e-01]] [[-7.82270581e-02 5.26272535e-01 -1.29527271e+00 ... -6.25700295e-01 -3.92562717e-01 -6.69321060e-01] [-3.40085566e-01 -1.26623166e+00 2.66122580e-01 ... 3.29542130e-01 -1.00180006e+00 -5.27932763e-01] [-1.23280764e-01 -2.32099116e-01 9.81950879e-01 ... -1.44885492e+00 -2.88269341e-01 1.18140948e+00] ... [ 1.72109514e-01 -3.23575974e-01 1.09544516e-01 ... -7.14832544e-01 1.81750637e-02 -9.12317693e-01] [-5.47754705e-01 -1.11391008e+00 -1.37938827e-01 ... 2.22131148e-01 -3.58216673e-01 3.04446161e-01] [ 7.04201579e-01 -2.15757251e+00 -7.45332897e-01 ... 1.58966362e-01 1.02017537e-01 -5.69613218e-01]] [[-5.11951208e-01 -5.11274040e-02 -2.15857163e-01 ... 1.00085564e-01 5.40727139e-01 7.31713414e-01] [-3.91689651e-02 7.65462160e-01 -9.53241736e-02 ... 3.84094477e-01 -7.69152939e-01 -5.31101406e-01] [-3.84904921e-01 -4.60191406e-02 3.62331830e-02 ... 3.04223955e-01 -1.20556641e+00 -1.86370775e-01] ... [-5.50208837e-02 -1.91441095e+00 -8.72598588e-01 ... -5.09135485e-01 8.29301357e-01 -9.66059148e-01] [ 3.91100496e-02 -4.14865106e-01 -6.05953813e-01 ... -5.44324279e-01 -1.17393464e-01 3.51448134e-02] [-2.52466977e-01 5.02222814e-02 -1.61528215e-01 ... 4.96524155e-01 -7.41542578e-01 6.58664405e-01]] ... [[ 8.86880234e-02 -5.64343870e-01 -7.39639282e-01 ... 5.75690746e-01 8.40268433e-01 -1.42272919e-01] [-1.15299881e-01 -1.16973534e-01 1.37163532e+00 ... -9.47578430e-01 -5.43400705e-01 1.10079372e+00] [ 4.31585789e-01 -1.43242970e-01 1.24530666e-01 ... -5.01442015e-01 1.59123495e-01 1.05391204e+00] ... [ 7.15685412e-02 -1.13129652e+00 5.23041748e-02 ... 5.35616159e-01 7.16998875e-02 -2.26447329e-01] [-8.36407542e-01 -3.53904277e-01 -3.03256154e-01 ... 5.73631406e-01 -9.87861395e-01 -2.00714856e-01] [ 1.14142787e+00 9.98458982e-01 -9.07677770e-01 ... 5.30963600e-01 -5.72342537e-02 -4.02913719e-01]] [[ 1.15405273e+00 2.21811369e-01 -8.94884944e-01 ... 2.51886338e-01 1.84954488e+00 1.17146604e-01] [-1.65428922e-01 3.54250193e-01 -1.14478715e-01 ... 6.20967686e-01 -1.66942418e-01 -1.43485689e+00] [ 1.23974480e-01 -4.70779538e-01 3.80482405e-01 ... 1.16249180e+00 -6.42293274e-01 7.25237906e-01] ... [ 5.00114977e-01 2.94591308e-01 1.58797801e+00 ... -5.68605840e-01 8.42114687e-01 1.38568997e-01] [ 6.84066862e-02 -2.94664860e-01 -3.74517947e-01 ... -2.45626003e-01 -1.26381680e-01 6.33494377e-01] [-1.01718116e+00 5.79140149e-02 -4.08650368e-01 ... -5.72524190e-01 -7.43870139e-01 -8.87686491e-01]] [[ 1.18688774e+00 -6.71992183e-01 5.23780942e-01 ... 5.06753504e-01 8.98507684e-02 1.12491560e+00] [ 1.07587524e-01 -3.47033024e-01 -6.97345138e-01 ... -1.08319473e+00 -4.02242467e-02 7.59551227e-01] [ 1.90741271e-01 9.17530119e-01 -8.62756670e-01 ... -2.13528007e-01 -2.22299293e-01 1.57679290e-01] ... [ 9.75783944e-01 4.04611588e-01 3.94256525e-02 ... -7.26756334e-01 6.42060041e-01 -8.17020416e-01] [-4.27005559e-01 -8.69923472e-01 4.82326329e-01 ... -6.09160185e-01 -6.08012617e-01 -8.53388980e-02] [ 5.77846050e-01 -1.13273934e-01 4.61703777e-01 ... -9.64225769e-01 3.67660552e-01 -4.73323762e-01]]] [[[ 8.00159097e-01 -1.34438099e-02 1.35720706e+00 ... 2.99574256e-01 -2.69372523e-01 4.06718031e-02] [-1.40936661e+00 7.86190271e-01 -1.33045161e+00 ... -9.59583700e-01 1.04926676e-01 -1.12271917e+00] [-6.45921886e-01 8.20406824e-02 2.22044721e-01 ... 1.55409753e-01 -1.03121877e+00 1.71080381e-01] ... [ 4.84156460e-01 2.68497050e-01 3.79968822e-01 ... -5.83072066e-01 1.50218010e-01 -7.66987443e-01] [ 4.65415046e-02 1.05023451e-01 2.31279239e-01 ... -7.51368701e-01 -5.13976336e-01 -1.79937422e-01] [-4.73765999e-01 1.21279025e+00 -7.04268292e-02 ... -2.80752748e-01 6.24229014e-01 1.57703102e-01]] [[-1.81016743e-01 -1.37092522e-03 -1.89310163e-01 ... -3.25338803e-02 -4.87968951e-01 3.39158267e-01] [ 3.34465206e-01 4.68711376e-01 -1.23634195e+00 ... 5.58252990e-01 -1.78604767e-01 -6.30676985e-01] [-5.94613314e-01 -8.57800663e-01 4.20864850e-01 ... -8.49853277e-01 -3.70432258e-01 -3.26386631e-01] ... [-8.36440861e-01 3.61866236e-01 -3.29064697e-01 ... 1.34754449e-01 -1.02816594e+00 -2.36187518e-01] [-2.38361597e-01 9.65499401e-01 -8.67279232e-01 ... 8.39430392e-01 1.26635626e-01 3.07617307e-01] [-1.58287793e-01 -3.72456014e-01 -3.88338685e-01 ... 7.53448009e-01 -6.98906243e-01 3.61252159e-01]] [[ 8.69410872e-01 7.44114667e-02 9.12430510e-02 ... -7.59701133e-02 -1.80399895e-01 -2.48430803e-01] [-3.60142440e-01 -7.42546082e-01 6.56413972e-01 ... 7.91760385e-01 -3.58346313e-01 -8.60338449e-01] [-7.16650262e-02 -4.44414169e-01 -2.73797587e-02 ... -5.70362508e-01 -1.30314863e+00 -1.12684393e+00] ... [ 1.97550684e-01 -3.04651260e-01 1.89699337e-01 ... -1.12085879e+00 -5.56532383e-01 -6.39601529e-01] [-9.11390185e-01 7.88005888e-02 -2.73630977e-01 ... -9.12919581e-01 4.39712316e-01 -2.83133030e-01] [-5.78133345e-01 4.66584504e-01 9.92647111e-01 ... -8.90264273e-01 -1.44465518e+00 -5.22236109e-01]] ... [[ 8.82661462e-01 2.13448119e+00 -4.50855017e-01 ... 5.54579757e-02 -1.09084809e+00 5.47224581e-01] [-1.24299157e+00 2.48017216e+00 1.07436609e+00 ... 5.36608040e-01 -8.81748915e-01 -8.23899567e-01] [ 1.05215192e+00 -7.55531192e-01 2.60823742e-02 ... 7.78704211e-02 2.21928790e-01 -5.53457677e-01] ... [ 8.54507864e-01 -5.81598133e-02 -2.92715669e-01 ... -8.28341842e-02 9.25471708e-02 -4.68198717e-01] [-1.11771405e-01 3.05288225e-01 -4.36798155e-01 ... -9.60464180e-01 7.98449397e-01 -8.25133383e-01] [ 9.25353169e-01 -2.71540463e-01 4.85417515e-01 ... 1.57593474e-01 -2.37506583e-01 4.85662013e-01]] [[-2.10410669e-01 -9.28897202e-01 9.02477354e-02 ... 9.86744165e-01 6.53120995e-01 -3.32076460e-01] [ 8.96113753e-01 7.52022684e-01 3.29863727e-01 ... -1.28757834e+00 4.56317455e-01 -7.87215531e-01] [-6.40687346e-01 -7.56526366e-02 1.04579365e+00 ... 9.90479946e-01 -2.14909339e+00 8.04249048e-01] ... [ 4.80862260e-01 -1.05441344e+00 2.18387812e-01 ... -3.64221856e-02 1.00056565e+00 3.50780457e-01] [ 5.69687128e-01 1.27136338e+00 -9.01621059e-02 ... -1.04096341e+00 -1.12054758e-01 -7.17751861e-01] [-5.26403666e-01 9.01153207e-01 1.32145333e+00 ... -3.71890306e-01 2.03412819e+00 6.18300974e-01]] [[ 1.66383827e+00 4.80898708e-01 2.04093009e-01 ... -1.18838751e+00 -2.73592114e-01 7.31626868e-01] [-1.98201880e-01 5.97432613e-01 -8.07484150e-01 ... 1.82150924e+00 -5.15447974e-01 -2.25925297e-01] [ 5.14935553e-01 -7.95393810e-02 4.63605702e-01 ... -3.95625383e-01 1.22762179e+00 9.17322338e-01] ... [-5.98434031e-01 -4.34894145e-01 5.67775190e-01 ... -1.80145875e-02 9.49238002e-01 -1.23012757e+00] [ 1.21123981e+00 -3.64591092e-01 1.08313942e+00 ... 1.46016967e+00 1.36402249e+00 2.41665766e-01] [-2.20903888e-01 -3.69926572e-01 -2.10869133e-01 ... -8.46262813e-01 -8.96679580e-01 -4.35849935e-01]]] [[[ 3.29820871e-01 6.58328950e-01 -6.10131264e-01 ... -5.28584719e-01 1.20525110e+00 5.02521634e-01] [ 9.23983872e-01 9.26002115e-02 -1.73337132e-01 ... -5.08595258e-02 -8.10259223e-01 2.72393338e-02] [ 5.51513016e-01 4.17182148e-01 -1.27455384e-01 ... -3.65057811e-02 1.49333298e+00 -8.30237687e-01] ... [-9.25543666e-01 -5.85424364e-01 -1.57795951e-01 ... 5.34742415e-01 -1.38472068e+00 -2.50328988e-01] [-4.92672235e-01 -1.23541224e+00 7.70832837e-01 ... -7.94219851e-01 -6.05671585e-01 -2.13951349e-01] [-2.04833195e-01 9.24406409e-01 -6.03904665e-01 ... 1.55188298e+00 1.25798404e+00 6.79117292e-02]] [[-8.25161934e-01 -2.10660076e+00 3.31430286e-02 ... -5.48850536e-01 6.75134286e-02 -4.28311616e-01] [ 5.12209535e-01 8.78391027e-01 -1.06471467e+00 ... -7.27219641e-01 5.64770043e-01 2.85281867e-01] [-1.14897799e+00 1.84154272e-01 1.74878705e+00 ... -1.24039102e+00 6.42430305e-01 4.30191547e-01] ... [ 4.87384945e-01 -1.04567277e+00 4.21810806e-01 ... -6.65560514e-02 -4.52791810e-01 2.01172214e-02] [-2.94870168e-01 -1.25655365e+00 1.60213923e+00 ... 2.94289500e-01 4.01630193e-01 1.29234004e+00] [ 4.54004228e-01 -2.11143076e-01 -1.37941110e+00 ... -8.54367137e-01 1.70170441e-01 1.24014640e+00]] [[-2.01572210e-01 2.92499959e-01 7.06482291e-01 ... 1.79207191e-01 -6.66442037e-01 6.84326738e-02] [-4.85868841e-01 -5.41364133e-01 6.72812521e-01 ... 4.73746687e-01 -4.15290624e-01 3.52306843e-01] [ 1.07341635e+00 8.39455649e-02 -2.56218076e-01 ... 9.00958836e-01 6.34822696e-02 -2.94837445e-01] ... [-5.45911372e-01 -6.90364182e-01 9.11567926e-01 ... 5.83995163e-01 3.35407764e-01 1.22706580e+00] [-2.84969866e-01 3.15994501e-01 -2.56712347e-01 ... -2.25147784e-01 3.38744909e-01 2.84755796e-01] [-1.41009927e-01 4.14737105e-01 2.50555903e-01 ... -1.03242241e-01 7.82061815e-01 1.16185415e+00]] ... [[-2.81410307e-01 -4.08987880e-01 -8.15208852e-01 ... 1.21175694e+00 5.60047269e-01 -1.17132843e+00] [ 4.01156127e-01 -1.52839482e+00 -9.43428516e-01 ... -8.88955474e-01 1.86862528e+00 5.10694385e-01] [ 5.09854555e-01 5.25571704e-01 5.57653904e-01 ... 3.92915010e-01 -5.74691892e-01 -8.25800523e-02] ... [ 7.25684524e-01 -3.03134322e-01 7.36875981e-02 ... 6.71942592e-01 -7.14556500e-02 -4.12844531e-02] [-1.33811927e+00 -5.48526883e-01 8.36316682e-03 ... -4.22199577e-01 5.52497685e-01 2.23150373e-01] [-1.15894306e+00 2.99853772e-01 6.96715236e-01 ... 2.37555467e-02 2.69916475e-01 -4.75956619e-01]] [[-3.21977645e-01 -3.27554792e-01 7.11255431e-01 ... -1.49958646e+00 -9.42302823e-01 5.31485617e-01] [-7.84589350e-03 -3.25673610e-01 -7.37782419e-01 ... -3.31949219e-02 5.95495924e-02 3.08107883e-01] [-8.77248272e-02 2.49570996e-01 -1.41742241e+00 ... 5.08695126e-01 2.12125078e-01 -3.24386746e-01] ... [ 9.16136265e-01 -3.04773241e-01 1.82904005e-01 ... 5.45205832e-01 9.49439779e-02 1.44711328e+00] [-2.98759013e-01 4.62316871e-01 1.90922722e-01 ... 6.75161302e-01 -7.31792688e-01 7.29258060e-01] [ 3.72673154e-01 -6.58603489e-01 6.55205369e-01 ... -1.11915672e+00 6.43138528e-01 5.60781956e-01]] [[ 1.10101509e+00 -2.90252566e-01 6.49208009e-01 ... 1.84543550e-01 8.77369761e-01 5.50024584e-02] [-4.54196483e-01 2.96279013e-01 -3.79342139e-01 ... -4.90620375e-01 1.18982518e+00 1.31998765e+00] [ 9.62858260e-01 -5.42008638e-01 -4.03119832e-01 ... 4.89396811e-01 6.76817536e-01 -4.26424533e-01] ... [-3.40589523e-01 -3.72085124e-02 -9.42481011e-02 ... 3.04612577e-01 -1.19807506e+00 -4.14971024e-01] [ 8.99105668e-01 3.03663075e-01 1.37210608e-01 ... -3.50128800e-01 -1.23947275e+00 -1.57140005e+00] [-4.31919634e-01 -7.89225519e-01 -2.43196040e-01 ... 7.67295063e-01 -7.59900451e-01 1.04000294e+00]]] [[[-4.85757798e-01 1.30493820e+00 1.07825387e+00 ... -7.40873292e-02 3.51895630e-01 -1.41376173e+00] [ 3.93560350e-01 -1.81375399e-01 -7.59947598e-02 ... -9.69108641e-01 -5.50306678e-01 -7.87425816e-01] [-4.68417525e-01 2.40483612e-01 7.78205454e-01 ... 4.65991408e-01 9.08604860e-01 7.18763947e-01] ... [ 3.27320874e-01 8.29094425e-02 1.16703287e-01 ... 2.31333971e-02 -7.09478974e-01 -3.37852150e-01] [-6.64077818e-01 -1.52021098e+00 3.11413288e-01 ... -9.46327209e-01 1.22917265e-01 7.18671381e-01] [-9.28149819e-01 7.20296949e-02 6.76130831e-01 ... 1.59651011e-01 -9.96583700e-02 9.75197554e-01]] [[-8.11046064e-01 6.46267474e-01 -1.89470828e-01 ... -1.57191622e+00 -5.35166323e-01 -1.23210348e-01] [ 1.09320557e+00 6.42178893e-01 7.34786749e-01 ... -5.29985309e-01 1.28368235e+00 -5.64869642e-01] [ 3.76410961e-01 7.66913772e-01 -3.56724598e-02 ... -6.68030620e-01 -6.84251428e-01 -5.50993681e-01] ... [-8.95892501e-01 -7.79683769e-01 -7.71337509e-01 ... 8.03720236e-01 5.21161795e-01 -5.08937240e-01] [-6.70715809e-01 6.86793625e-02 5.96884131e-01 ... -1.78819627e-01 -1.15072572e+00 -1.18760431e+00] [ 1.00210571e+00 2.21447796e-02 -4.23735917e-01 ... 6.10911250e-01 4.80449766e-01 1.49499401e-01]] [[-3.78346235e-01 4.61897522e-01 1.85238779e-01 ... -1.07273781e+00 -3.68903011e-01 -3.94593447e-01] [ 2.41322175e-01 -8.49663794e-01 -7.45591074e-02 ... -2.78317928e-01 4.70736399e-02 -5.67403138e-01] [-7.16156065e-02 8.20239604e-01 -8.82647336e-02 ... -1.67693853e-01 -1.28995764e+00 -3.71380925e-01] ... [-1.86451167e-01 -4.78183985e-01 1.79496080e-01 ... -5.44911399e-02 -4.53157008e-01 1.09054983e+00] [-1.11205924e+00 -7.81404912e-01 -1.08412899e-01 ... 3.92096460e-01 1.07858551e+00 -8.40291321e-01] [ 2.35196069e-01 -6.77382648e-01 -1.03734240e-01 ... 2.51932144e-01 8.43257904e-01 -1.19553491e-01]] ... [[ 3.27068686e-01 2.75854349e-01 -1.15510009e-01 ... -5.83331466e-01 -3.79359663e-01 -5.89546025e-01] [ 2.55752861e-01 -1.04031849e+00 1.05322087e+00 ... -1.24690628e+00 -5.97068667e-02 8.45095277e-01] [ 2.65892595e-01 -4.02167380e-01 1.79016575e-01 ... 3.41993570e-01 1.27099857e-01 3.91160280e-01] ... [-1.07738256e+00 1.89160734e-01 1.35714030e+00 ... -2.71721900e-01 7.56469443e-02 -1.87830061e-01] [-3.30893844e-01 -2.27431446e-01 2.90924460e-01 ... -1.41000777e-01 6.16689265e-01 -7.54314184e-01] [ 2.94852138e-01 -2.95473397e-01 6.08056605e-01 ... 5.91136754e-01 -6.45402789e-01 -2.40590855e-01]] [[-2.59033412e-01 3.61122489e-01 7.37904459e-02 ... 5.49043179e-01 -1.20382142e+00 1.59509909e+00] [ 6.76066339e-01 8.13469827e-01 2.73445189e-01 ... 6.81948900e-01 -1.02815628e+00 7.41403461e-01] [ 8.27924669e-01 7.93904662e-01 1.49980143e-01 ... -7.58638740e-01 8.16908956e-01 9.22152460e-01] ... [-9.96244788e-01 1.12026525e+00 -2.50752777e-01 ... 7.78915346e-01 5.84602594e-01 -1.24962735e+00] [-1.85821280e-01 -5.81872702e-01 1.17232716e+00 ... 2.56841391e-01 -2.57159472e-02 1.61846399e+00] [ 4.30804193e-01 -4.92475539e-01 -3.23796980e-02 ... -7.73257732e-01 -2.68915385e-01 3.99719179e-02]] [[-6.63450122e-01 -7.76658416e-01 6.11045837e-01 ... 1.37995934e+00 -1.46315008e-01 1.53799187e-02] [-1.57792187e+00 7.24037766e-01 9.44792867e-01 ... 7.47159839e-01 -5.97169280e-01 -9.27300036e-01] [ 7.69217134e-01 3.56551945e-01 8.79199624e-01 ... -8.28989744e-02 4.12196487e-01 2.01050490e-01] ... [-1.29031849e+00 -5.80548167e-01 4.37760681e-01 ... 4.18247402e-01 1.59710991e+00 -3.15393567e-01] [-5.96948922e-01 -7.56629705e-01 -1.72270536e-01 ... -1.05564344e+00 2.03849271e-01 -4.14340258e-01] [-8.73149276e-01 -9.41804111e-01 -1.03963208e+00 ... -2.68475175e-01 -5.05048275e-01 1.05394274e-01]]] [[[-6.52809441e-02 -6.12257838e-01 2.22441912e+00 ... -4.11093421e-02 3.66051197e-01 -1.02083707e+00] [ 1.44873428e+00 1.00448728e+00 1.31028569e+00 ... 1.08193481e+00 1.03979445e+00 1.11725879e+00] [-3.49533528e-01 3.28470498e-01 7.53423154e-01 ... 2.31162146e-01 8.28419209e-01 -2.21696049e-01] ... [ 1.35835087e+00 4.24268395e-02 2.06963450e-01 ... -6.11714303e-01 -8.76690567e-01 -3.99347842e-01] [ 2.04726309e-01 -9.54669058e-01 1.68850291e+00 ... -5.34385085e-01 -3.83586317e-01 7.83713043e-01] [ 2.25865304e-01 8.04606140e-01 -4.15909171e-01 ... 5.65731764e-01 2.73397326e-01 -4.44366217e-01]] [[-3.74556988e-01 6.86740220e-01 -1.98371217e-01 ... 3.47148597e-01 -6.08046949e-01 2.24059924e-01] [-8.27668607e-02 8.93643439e-01 1.54626143e+00 ... -4.87234086e-01 2.21981123e-01 4.39054161e-01] [-1.02739644e+00 1.37303263e-01 1.08254540e+00 ... -4.44758207e-01 1.42295241e-01 3.68090421e-01] ... [ 4.60447818e-01 -3.77617091e-01 -1.61690223e+00 ... -7.54993558e-01 -4.30901945e-02 -9.28813279e-01] [ 2.40219086e-01 -8.15508664e-01 -1.02661514e+00 ... -7.41490304e-01 2.30335355e-01 -7.61147618e-01] [-8.16925228e-01 1.01076329e+00 3.09590369e-01 ... -4.72834080e-01 4.55124229e-01 5.56937814e-01]] [[ 7.57485867e-01 -5.71373403e-01 8.13462794e-01 ... -7.21674785e-02 -1.55344534e+00 -1.92008317e-01] [-1.70440286e-01 -4.37385082e-01 -2.76548862e-01 ... -4.11814414e-02 -8.84692907e-01 -2.31583387e-01] [-2.74510056e-01 1.40112829e+00 -1.44536227e-01 ... 6.05884016e-01 -3.72025549e-01 -3.63909036e-01] ... [-2.65720431e-02 2.45969575e-02 1.55259103e-01 ... -1.13984451e-01 -7.75140598e-02 1.29973125e+00] [-4.52287793e-02 -1.12862194e+00 -9.84018683e-01 ... 5.83905637e-01 -9.85282473e-03 5.18894017e-01] [-6.46157742e-01 5.46970069e-02 5.87990105e-01 ... 4.32744503e-01 4.75250274e-01 8.75465035e-01]] ... [[ 4.63086009e-01 3.25006276e-01 -6.48775771e-02 ... -1.12282425e-01 -5.52161038e-01 -9.42425370e-01] [ 1.21214283e+00 -6.87302649e-01 1.75526366e-01 ... -1.10485159e-01 3.30541372e-01 -3.89161050e-01] [-5.10087192e-01 5.15894592e-01 3.43573064e-01 ... -3.44193608e-01 -2.48715430e-01 1.05090477e-01] ... [-3.21608186e-01 1.99176818e-01 3.77863497e-01 ... 1.17192447e+00 -5.22577643e-01 -6.46334067e-02] [ 2.33492032e-01 1.05160010e+00 -5.95695138e-01 ... -1.76063120e-01 3.16746175e-01 -7.35899433e-02] [ 3.47214252e-01 -5.63885570e-01 -1.10950625e+00 ... -1.56407312e-01 4.66747761e-01 7.56038249e-01]] [[ 1.19184363e+00 6.68171287e-01 -9.23371792e-01 ... 7.14449227e-01 8.99558008e-01 -1.34725884e-01] [ 8.97560775e-01 -5.13527751e-01 1.21273577e+00 ... -1.21398335e-02 -1.96891874e-01 5.37678063e-01] [-1.41565549e+00 1.38133317e-02 9.08670008e-01 ... 1.50011134e+00 -9.07824874e-01 6.09561026e-01] ... [ 1.90218627e-01 5.46415687e-01 -1.23458195e+00 ... -5.96581884e-02 5.27215660e-01 1.81557226e+00] [ 2.05109209e-01 -5.54703236e-01 -2.49768570e-01 ... -1.44359994e+00 4.01770383e-01 -1.16714978e+00] [ 8.44673753e-01 3.78560692e-01 6.09581053e-01 ... 1.76359728e-01 -5.04181921e-01 -9.50185478e-01]] [[ 4.90531683e-01 -5.75232089e-01 -8.37762594e-01 ... -1.75189126e+00 -8.42032790e-01 4.80231613e-01] [-1.11436224e+00 -2.37217605e-01 3.45287055e-01 ... 1.75134130e-02 5.93651652e-01 -4.16479588e-01] [-7.04183206e-02 -6.34220421e-01 -7.00805545e-01 ... -1.44938707e+00 2.90974140e-01 -5.71139693e-01] ... [ 4.12436426e-01 -1.79108456e-01 9.40835774e-01 ... 1.33055910e-01 1.35894433e-01 -4.63987701e-02] [-6.59304380e-01 1.29389083e+00 -8.53489280e-01 ... -2.22236171e-01 -2.82893389e-01 1.27510667e+00] [ 6.00675903e-02 -2.08668053e-01 9.58234131e-01 ... 5.46341121e-01 6.18582904e-01 -7.11963296e-01]]]] [[[[ 1.71806049e+00 -7.04805553e-01 6.50813460e-01 ... 1.36086911e-01 -8.61348748e-01 -6.04141951e-01] [-1.33267319e+00 -1.17900455e+00 -1.98273882e-02 ... -5.41576862e-01 4.17020693e-02 5.23332179e-01] [-5.85374951e-01 1.40473533e+00 7.79575050e-01 ... 2.26903051e-01 -1.71032250e-01 -5.91574848e-01] ... [-7.69972265e-01 8.59176338e-01 -2.48769298e-01 ... -8.59602392e-01 1.20805967e+00 -3.36864114e-01] [ 1.05920470e+00 -4.18508314e-02 3.01199436e-01 ... 4.38051701e-01 1.78133631e+00 -6.19027801e-02] [ 1.32095528e+00 -9.49528396e-01 -1.28720790e-01 ... -3.47931176e-01 -4.09350991e-01 -1.48408294e+00]] [[-4.52172756e-02 -2.87423670e-01 -2.16152716e+00 ... -5.02525866e-01 3.91134679e-01 -1.57696116e+00] [ 1.15772676e+00 -1.89259395e-01 6.81818962e-01 ... -3.35930258e-01 -4.50179756e-01 7.54949868e-01] [-4.25879210e-01 -3.84988785e-01 -3.22987944e-01 ... 3.16370040e-01 2.44657099e-01 8.60610187e-01] ... [-1.68933845e+00 2.06976026e-01 1.00341678e-01 ... 6.97220787e-02 -7.82996535e-01 -7.32402653e-02] [ 3.43484253e-01 -2.28263110e-01 2.13225797e-01 ... 7.90905431e-02 -3.06286484e-01 9.18982089e-01] [ 2.60552794e-01 5.23110807e-01 -7.09307134e-01 ... -4.81093794e-01 -5.82144976e-01 1.50316246e-02]] [[-3.84626210e-01 1.68053791e-01 1.15045619e+00 ... -2.55142301e-01 1.42490923e+00 -1.30377674e+00] [ 7.36703873e-01 4.61661279e-01 7.13474393e-01 ... -1.23491228e+00 8.44450772e-01 -1.21327746e+00] [ 4.08930093e-01 -2.26483509e-01 3.14922296e-02 ... -4.34378505e-01 -6.55376911e-01 -4.83734906e-03] ... [-3.44358742e-01 -3.65760744e-01 1.43618727e+00 ... -1.21154273e+00 5.12775071e-02 5.55417299e-01] [ 4.10033524e-01 1.92705959e-01 1.32327870e-01 ... 6.14407361e-01 7.06587374e-01 -1.42376935e+00] [-5.30100942e-01 -1.11252797e+00 5.03462911e-01 ... 6.38715208e-01 1.05813336e+00 1.96914762e-01]] ... [[-3.10921874e-02 1.19960709e-02 -3.33239790e-03 ... -3.86791915e-01 -1.13659859e+00 7.45867431e-01] [ 1.01525986e+00 -1.22247410e+00 1.74869187e-02 ... 2.85797641e-02 -2.33648896e-01 -9.30934399e-02] [-5.23255885e-01 2.26201534e-01 3.25151116e-01 ... 1.19867451e-01 -2.58096695e-01 -9.17085111e-01] ... [-2.85559684e-01 1.03463602e+00 2.55467266e-01 ... -4.87432867e-01 1.24179375e+00 3.94820035e-01] [-3.02675128e-01 5.37378080e-02 -1.82846710e-01 ... 5.14010429e-01 -6.85709536e-01 -5.17042339e-01] [-2.92531013e-01 8.71587574e-01 -4.26300228e-01 ... -7.38691270e-01 4.88917381e-01 4.13882136e-02]] [[ 7.68970624e-02 -3.82635683e-01 3.96916449e-01 ... 3.82851452e-01 8.12017918e-01 3.82417232e-01] [-2.41376936e-01 -5.90430871e-02 -1.11403131e+00 ... -2.03504157e+00 -8.23967516e-01 1.35504276e-01] [ 7.07635701e-01 -1.02582142e-01 -2.68478375e-02 ... 1.32836020e+00 1.70915455e-01 3.16388160e-01] ... [-6.66023865e-02 1.03191698e+00 4.76059914e-01 ... -7.60092556e-01 -1.24654818e+00 -4.21036839e-01] [ 1.20078707e+00 -5.19977927e-01 7.84994245e-01 ... -1.83480906e+00 1.65730000e+00 9.30956472e-03] [ 2.35923335e-01 1.59467474e-01 -6.64107502e-01 ... -3.52793127e-01 7.60437608e-01 1.48266673e+00]] [[ 7.91185915e-01 3.74995619e-01 -6.40087366e-01 ... 5.83228290e-01 -2.05914646e-01 7.12497056e-01] [-6.24488235e-01 -1.60324502e+00 5.44673145e-01 ... -1.23133838e-01 5.97727180e-01 -4.88816559e-01] [ 9.97083485e-01 -4.13846314e-01 6.90621674e-01 ... 1.64736956e-01 1.40666887e-01 1.27338365e-01] ... [ 1.36776209e+00 1.25872448e-01 1.53796887e+00 ... -1.31799877e-02 -1.23776627e+00 3.82648230e-01] [-3.37193996e-01 3.68722379e-01 9.30413753e-02 ... -4.60836440e-01 7.44586661e-02 1.05148125e+00] [-1.91456109e-01 5.02043366e-02 -1.06568313e+00 ... 2.55848289e-01 1.64863840e-01 1.21605432e+00]]] [[[-1.22198105e+00 5.45151472e-01 8.61678720e-01 ... 7.84738362e-01 -2.77921170e-01 -1.09736824e+00] [-1.66053206e-01 -8.23024631e-01 7.56536245e-01 ... -4.71392632e-01 3.84965092e-01 -3.94317389e-01] [-4.28750694e-01 -7.01301694e-01 2.26280287e-01 ... -4.84995931e-01 -2.48842910e-01 -1.05822515e+00] ... [-6.02250099e-01 7.49982715e-01 2.56977826e-01 ... 4.62560236e-01 -3.39376032e-01 -4.21228260e-01] [ 5.70830703e-01 -1.20261079e-03 -1.32470071e-01 ... 9.85900819e-01 -2.13829987e-02 9.08108592e-01] [ 6.22043848e-01 4.65892345e-01 -3.14804137e-01 ... 2.35046700e-01 1.80162418e+00 5.08287311e-01]] [[ 1.44814897e+00 -1.26463294e+00 -1.75301239e-01 ... 3.96998435e-01 -6.70257211e-01 -1.92505762e-01] [ 4.85448569e-01 8.45654070e-01 8.39516819e-02 ... 6.91731453e-01 2.33982369e-01 -1.90104991e-02] [ 7.87630498e-01 1.32694161e+00 -6.78847790e-01 ... 6.54042363e-01 -2.41956770e-01 -1.53082299e+00] ... [-5.64216375e-01 9.30670917e-01 2.18834504e-01 ... -2.00463742e-01 4.83268909e-02 5.48642933e-01] [ 8.47435057e-01 3.84508938e-01 5.40309818e-03 ... 3.19817454e-01 -2.00477034e-01 -1.36272597e+00] [-8.55930090e-01 9.24700439e-01 -1.48774397e+00 ... -4.73287277e-04 1.24308622e+00 -3.05081129e-01]] [[-8.54983151e-01 1.07707679e+00 -2.58986384e-01 ... -2.99247324e-01 3.75394583e-01 -8.04258943e-01] [ 7.87051246e-02 1.34124815e-01 -4.77652758e-01 ... -1.14904118e+00 -1.50422835e+00 -9.00161386e-01] [ 2.31711045e-01 -5.29192805e-01 -2.17125878e-01 ... 4.08435136e-01 5.02269328e-01 -6.63457215e-01] ... [ 1.38554290e-01 8.02748382e-01 1.79693207e-01 ... -4.61319059e-01 -2.26041913e-01 1.36551833e+00] [ 6.56687140e-01 -8.49899501e-02 9.75641429e-01 ... 5.78921974e-01 3.99302095e-01 -1.64350495e-01] [ 4.93839622e-01 3.34462494e-01 -4.17624354e-01 ... 3.38252455e-01 2.61198789e-01 -9.29192364e-01]] ... [[ 5.45884185e-02 -5.24565458e-01 -1.31450236e+00 ... 8.56046200e-01 -3.94217484e-02 -6.29789889e-01] [-8.26953165e-03 -4.85386074e-01 2.30280802e-01 ... -1.13668776e+00 2.80981779e-01 -1.38392463e-01] [-7.14817941e-01 1.09757984e+00 -8.07838798e-01 ... -5.30305028e-01 -5.27755320e-01 3.03589821e-01] ... [ 2.04188466e-01 -4.07339066e-01 -7.31559634e-01 ... 1.11364293e+00 8.71076703e-01 3.22123140e-01] [ 1.74111322e-01 -2.51675341e-02 -1.37531221e-01 ... -1.31117150e-01 1.19957078e+00 8.60338926e-01] [-6.76545918e-01 5.48259854e-01 -7.51943588e-01 ... 4.09505606e-01 2.19838724e-01 -7.67035306e-01]] [[ 1.98503748e-01 -8.82645488e-01 7.62144387e-01 ... 2.27382898e-01 -2.89575636e-01 -2.00716764e-01] [-8.04750741e-01 2.15612575e-01 2.28253156e-01 ... 3.62756640e-01 8.22733715e-02 -4.89682704e-01] [-9.17517841e-01 -6.09507024e-01 -1.31487834e+00 ... 3.40681732e-01 -4.87116098e-01 -7.46425211e-01] ... [-7.11084843e-01 6.63806573e-02 6.41889691e-01 ... 5.58528244e-01 -3.49886477e-01 4.55965132e-01] [-3.63637239e-01 -1.05685031e+00 3.96310121e-01 ... -5.32552958e-01 -6.24449193e-01 8.24957490e-01] [-4.28934216e-01 -1.88676134e-01 6.57463849e-01 ... 3.10630761e-02 -5.82199335e-01 -6.84848368e-01]] [[ 3.05216998e-01 4.09941167e-01 -1.21975377e-01 ... -3.37421298e-01 8.08351040e-01 -1.28268272e-01] [ 4.45704162e-02 -3.38024855e-01 -3.66602689e-01 ... 2.68091857e-01 -9.31110978e-01 1.13284469e-01] [ 7.57570803e-01 -8.57478231e-02 6.52883947e-01 ... -9.07487497e-02 -1.28322765e-01 -1.24636568e-01] ... [ 3.60786825e-01 3.92224461e-01 2.63978988e-01 ... -7.73243248e-01 -7.89898559e-02 6.13852479e-02] [-5.93607247e-01 -1.30149114e+00 -1.91567987e-01 ... -9.70856607e-01 2.06709519e-01 -5.85448205e-01] [ 7.28409469e-01 7.75459409e-01 -5.63302219e-01 ... 4.59488392e-01 -2.74627879e-02 9.83304441e-01]]] [[[-9.19193253e-02 -3.01707327e-01 -9.36622322e-01 ... -1.42012775e+00 -3.72647971e-01 -1.16243415e-01] [-3.11936378e-01 1.46247789e-01 7.39466473e-02 ... -6.93385243e-01 -5.65507531e-01 1.20810890e+00] [ 3.18724841e-01 -8.62880766e-01 4.23075676e-01 ... 6.23933733e-01 2.29539260e-01 -2.67010450e-01] ... [ 6.28016233e-01 -1.29504696e-01 8.16963553e-01 ... -6.74217701e-01 1.17885876e+00 -1.01954436e+00] [-1.07391834e+00 -1.21363148e-01 -1.23262429e+00 ... -1.27051151e+00 2.97489762e-01 -8.57634604e-01] [ 2.11860567e-01 7.50825405e-02 1.92657039e-01 ... 1.18126132e-01 3.28047156e-01 8.45346987e-01]] [[-3.98490340e-01 -1.46798596e-01 -3.53511944e-02 ... 4.88125771e-01 -2.36675963e-02 -1.08254111e+00] [ 6.08150840e-01 -2.91163892e-01 1.16101444e+00 ... 2.37806335e-01 1.16839814e+00 6.80439115e-01] [ 5.50975740e-01 4.07682985e-01 3.44709307e-02 ... 3.06492914e-02 8.95069420e-01 -2.31350064e-01] ... [-6.18867099e-01 2.91104168e-01 -1.04782891e+00 ... 3.68394226e-01 3.61799896e-01 6.53806468e-03] [ 2.02864427e-02 5.20685971e-01 -3.60832840e-01 ... 3.33595812e-01 -1.29921067e+00 6.44370615e-01] [ 7.87911117e-01 -5.35468459e-01 -8.89779031e-01 ... 4.00973916e-01 3.45583558e-01 3.52612615e-01]] [[ 3.87830406e-01 -6.25689328e-01 -1.28189534e-01 ... -9.16931033e-01 -1.14771962e-01 1.53522205e+00] [ 2.56187677e-01 1.08112860e+00 2.43542790e-01 ... -2.02607608e+00 1.56872761e+00 -1.03020608e+00] [-8.58796358e-01 -7.30878532e-01 -4.90485847e-01 ... 6.45110071e-01 9.56209123e-01 -6.67073071e-01] ... [ 1.64414734e-01 -6.46543726e-02 5.45936167e-01 ... -5.68535745e-01 -4.54746068e-01 -1.12725461e+00] [ 1.16563523e+00 -2.79682100e-01 5.70411623e-01 ... -7.66193986e-01 1.83414960e+00 9.23011661e-01] [ 5.87569356e-01 -1.74196195e-02 9.44543004e-01 ... 1.10528326e+00 7.81862676e-01 3.36467147e-01]] ... [[-6.49552524e-01 -9.97593760e-01 1.18736959e+00 ... 7.90802777e-01 4.00850683e-01 -8.84649679e-02] [ 5.11337996e-01 7.23027825e-01 -2.54313648e-01 ... 2.82207966e-01 -1.62996221e+00 -7.24945545e-01] [ 4.36228037e-01 -8.27830434e-01 4.68381532e-02 ... 5.16485751e-01 6.22726008e-02 1.53518200e-01] ... [ 1.71251357e-01 3.97528678e-01 4.88604568e-02 ... -3.16512883e-01 9.23624098e-01 3.69887203e-01] [ 1.55698329e-01 -6.07713938e-01 1.64454710e+00 ... -1.55915177e+00 7.89905310e-01 -4.16138560e-01] [-7.43500888e-01 -7.40686506e-02 2.75217861e-01 ... 7.16695637e-02 -6.48023114e-02 1.31415522e+00]] [[-1.17729485e+00 -9.32448387e-01 -1.48967609e-01 ... 1.39511004e-01 5.67576766e-01 9.57566127e-02] [-4.51244175e-01 -1.06469166e+00 -4.43629920e-01 ... 9.70507205e-01 -5.77625573e-01 -1.07958645e-01] [-6.31567538e-01 4.11963165e-01 -3.77218604e-01 ... 4.00172472e-01 4.75711673e-01 1.09613204e+00] ... [ 9.35814619e-01 5.81286907e-01 -7.33600080e-01 ... -3.24107111e-01 2.75144339e-01 8.15180838e-01] [-1.68218923e+00 3.21203977e-01 -2.54429281e-02 ... 2.26030096e-01 4.74886775e-01 -5.11092722e-01] [-2.21256688e-02 5.72679698e-01 1.60485232e+00 ... 1.18915856e+00 -1.54716086e+00 -1.02035981e-02]] [[-6.00017369e-01 -7.59150028e-01 4.05531645e-01 ... 1.08419800e+00 -1.40634108e+00 3.78554136e-01] [-1.16632544e-01 3.82654518e-01 -9.94084537e-01 ... -3.07572950e-02 -9.25629660e-02 1.33893061e+00] [-2.07386762e-01 -3.32497388e-01 -2.49247283e-01 ... -9.17483628e-01 3.03431749e-01 -1.77012846e-01] ... [-3.03604722e-01 7.20952034e-01 -6.34184897e-01 ... -2.20050052e-01 -4.91854966e-01 9.51192901e-02] [-3.80743057e-01 -5.47228269e-02 2.11527482e-01 ... -3.22066486e-01 2.77679414e-01 2.71219939e-01] [-1.25576162e+00 -3.91973965e-02 -4.84109074e-01 ... 3.58917445e-01 1.73427105e-01 5.11911094e-01]]] [[[-7.37009048e-01 -9.27482724e-01 6.04928374e-01 ... -6.42484844e-01 -1.05637586e+00 4.05813545e-01] [ 3.15622330e-01 4.60179061e-01 -8.47731382e-02 ... 1.07722707e-01 -5.89892924e-01 -8.24409187e-01] [-1.39698803e+00 5.10435775e-02 9.14081037e-01 ... 7.25592017e-01 8.90017331e-01 -2.20876068e-01] ... [-4.53536771e-02 -7.47512162e-01 -3.01917225e-01 ... -7.21870542e-01 -5.19851506e-01 -1.91853598e-01] [ 6.67500198e-02 -1.17471270e-01 2.11418718e-01 ... 8.90828311e-01 8.26552510e-01 -9.42479491e-01] [-5.99316120e-01 1.64138407e-01 -2.97063261e-01 ... 1.14305031e+00 1.08048737e+00 -1.71693996e-01]] [[-1.75160050e-01 -7.27456212e-01 -5.42301357e-01 ... -3.42507690e-01 -7.08954334e-02 -1.55286849e-01] [-3.81343722e-01 -2.54601330e-01 -1.16540670e+00 ... 4.14958507e-01 8.64860535e-01 5.09772956e-01] [ 4.51756239e-01 6.13215379e-02 3.48925918e-01 ... -1.06365693e+00 5.69085360e-01 5.22401094e-01] ... [ 1.02463877e+00 -9.77819562e-01 6.45319968e-02 ... -2.04023030e-02 9.66132283e-01 3.97493899e-01] [-1.76756799e-01 -1.04567432e+00 -1.49316561e+00 ... 3.40860784e-01 -1.04770049e-01 1.52395621e-01] [-5.85814774e-01 2.57011741e-01 -6.57737136e-01 ... -1.41629651e-01 -1.11324139e-01 -3.36813033e-01]] [[ 3.42807740e-01 5.58854997e-01 -2.35131264e-01 ... 1.05569553e+00 1.99529573e-01 5.06056666e-01] [ 3.08883358e-02 5.06529391e-01 1.01755357e+00 ... 1.76814184e-01 1.95919907e+00 -1.13451111e+00] [ 7.66078770e-01 -8.95727277e-01 1.08037090e+00 ... 1.03097355e+00 8.48145008e-01 7.72545516e-01] ... [ 9.25482750e-01 5.03738880e-01 2.74349880e+00 ... 5.83872378e-01 -2.37058267e-01 -3.88154611e-02] [-1.81818143e-01 1.50157541e-01 -1.10087383e+00 ... 1.04869819e+00 -6.07808352e-01 -4.52601880e-01] [ 2.23204866e-01 3.25600896e-03 -1.07409132e+00 ... -3.46666127e-01 -1.06132638e+00 -4.24403138e-02]] ... [[ 2.96832770e-01 8.23813200e-01 -6.58825412e-03 ... 5.82607746e-01 -1.78904772e-01 9.34451997e-01] [ 8.51044357e-01 -1.46581322e-01 3.98078322e-01 ... -2.24856094e-01 1.32096246e-01 1.94304228e-01] [-4.56353515e-01 -9.87109900e-01 -3.55937667e-02 ... 1.11168027e-01 -9.78105247e-01 -2.38746554e-01] ... [-3.96304280e-02 -5.72999597e-01 -8.28418016e-01 ... 9.81415436e-02 3.12631615e-02 4.23896015e-01] [-8.54415596e-01 -8.70269775e-01 -3.01323086e-01 ... -9.46593404e-01 -1.14284325e+00 6.67722404e-01] [-5.73031545e-01 -6.69707954e-01 8.50365400e-01 ... -1.65608037e+00 -3.91963601e-01 -1.32529169e-01]] [[ 6.12505794e-01 5.43973083e-03 1.50613105e+00 ... -8.72354507e-02 -2.07088208e+00 -2.93565154e-01] [-1.84280086e+00 -9.65665653e-02 -8.71296108e-01 ... -5.83475709e-01 8.80885124e-01 -5.82688972e-02] [ 2.88863480e-01 1.84843779e+00 1.67149723e+00 ... 4.69198227e-01 -6.09841585e-01 -3.63429040e-01] ... [-4.32620287e-01 -6.72707558e-01 7.52212822e-01 ... -8.95239770e-01 -5.15981913e-01 5.58980167e-01] [ 3.93741280e-02 -2.67965376e-01 -2.17459917e-01 ... 2.27440149e-02 -3.65014672e-01 9.17122588e-02] [-6.98265195e-01 5.40698208e-02 4.14579779e-01 ... 9.02682543e-01 -7.03150360e-03 -2.81215668e-01]] [[ 1.12016630e+00 -7.45990634e-01 5.12974322e-01 ... -1.27636147e+00 -1.20682549e+00 1.20747790e-01] [ 4.76066083e-01 1.19445646e+00 5.03251493e-01 ... 1.98909938e-01 9.01319012e-02 -6.97026372e-01] [ 7.74428725e-01 5.30931532e-01 7.64708996e-01 ... 1.52401912e+00 -6.85725957e-02 -2.52417117e-01] ... [-1.71641254e+00 -1.02837694e+00 -5.59934855e-01 ... -1.65308081e-02 -5.70712745e-01 1.00584619e-01] [ 3.54217321e-01 1.48787570e+00 -6.50611222e-01 ... -5.42542219e-01 -3.23926687e-01 3.79510283e-01] [ 1.38916993e+00 -1.12552965e+00 -6.64639711e-01 ... 1.37908328e+00 -5.25618613e-01 -6.83237493e-01]]] [[[-9.44348276e-01 -6.25639319e-01 7.31484771e-01 ... 6.26094282e-01 -1.72709584e-01 7.85534859e-01] [ 3.97987962e-01 -2.22621784e-01 -1.41765833e+00 ... 8.32042336e-01 3.02727371e-01 -7.33821094e-01] [-5.56051126e-03 6.10079942e-03 1.15530133e+00 ... -2.28443399e-01 -3.83489579e-01 8.80471393e-02] ... [ 4.75813299e-01 -1.43702638e+00 -3.28968823e-01 ... -2.04623908e-01 1.80191040e-01 -1.13742185e+00] [-2.03685775e-01 -3.48359108e-01 4.57174361e-01 ... 2.49258295e-01 8.88676122e-02 2.22461835e-01] [-2.21018996e-02 -9.29308712e-01 -1.11854708e+00 ... 5.80691695e-01 2.80531675e-01 -3.66346985e-02]] [[-1.23676129e-01 6.30608976e-01 -8.99730325e-01 ... 1.93997908e+00 6.15733504e-01 -4.40432914e-02] [-1.09799027e+00 -3.19732547e-01 -8.58158231e-01 ... -1.07070518e+00 -1.04373181e+00 -8.36720049e-01] [-1.59632993e+00 -6.59377754e-01 2.96487004e-01 ... 6.67902827e-01 -9.25184131e-01 -7.99043357e-01] ... [-7.12121606e-01 3.01412165e-01 -1.51143596e-01 ... 4.40566361e-01 6.50814652e-01 7.77401507e-01] [ 4.02119994e-01 -1.23170280e+00 -5.00246823e-01 ... 9.67530429e-01 -1.32554042e+00 -4.00755405e-02] [ 2.09720775e-01 -1.43704027e-01 3.86428505e-01 ... 1.18375473e-01 4.46322054e-01 1.87774003e+00]] [[ 1.25423515e+00 1.57783937e+00 -6.87691927e-01 ... 4.08525944e-01 2.05261970e+00 1.82645954e-03] [-1.26342386e-01 -2.79455125e-01 -7.49739945e-01 ... -5.46999633e-01 -2.97930300e-01 3.67038131e-01] [ 3.50564480e-01 -6.27745315e-02 5.39343774e-01 ... 6.69145644e-01 -8.98289323e-01 5.89963675e-01] ... [-5.76152682e-01 -1.43858850e+00 -3.50299478e-01 ... 4.01507318e-02 -5.01080632e-01 3.25595617e-01] [-4.79590952e-01 -1.38440039e-02 -8.70067835e-01 ... 3.64259124e-01 -3.36353242e-01 -2.07328871e-01] [-1.21744168e+00 -1.98720932e-01 -7.13623047e-01 ... -3.48470330e-01 4.24477041e-01 4.05846424e-02]] ... [[ 3.48194987e-01 -7.18202651e-01 2.27233782e-01 ... 1.87802184e+00 1.34868026e-02 4.01222229e-01] [-5.61033845e-01 -8.87899578e-01 4.29514766e-01 ... -7.57470012e-01 4.07028705e-01 8.46411169e-01] [-2.01526672e-01 -1.30803898e-01 -3.75110582e-02 ... -1.28611431e-01 -1.31002617e+00 7.96802580e-01] ... [-1.03282094e+00 -7.37876892e-01 -4.39756662e-01 ... -2.79037297e-01 4.98305887e-01 -4.45814691e-02] [-2.48990104e-01 6.17269754e-01 8.92236531e-01 ... -5.18217683e-01 4.98805404e-01 -8.85690391e-01] [-1.35361159e+00 -4.01385009e-01 7.52029046e-02 ... -7.08491728e-02 2.89082434e-02 5.99962115e-01]] [[-8.96356523e-01 1.37951970e+00 7.13232577e-01 ... -1.51486367e-01 -5.38241804e-01 -6.00080192e-01] [ 5.71121097e-01 5.56612670e-01 7.27847368e-02 ... 2.72800863e-01 -1.05325907e-01 -7.79260099e-01] [ 1.05124451e-02 -1.84551552e-01 -2.07784325e-01 ... 2.06100479e-01 1.46118164e+00 -8.76485467e-01] ... [ 6.34147882e-01 -5.30158103e-01 -6.08208656e-01 ... 4.67090517e-01 1.31276441e+00 -2.69152164e-01] [ 1.20567858e+00 -1.83508301e+00 8.39370966e-01 ... -3.37337442e-02 6.87904656e-01 -7.85909116e-01] [-2.47808740e-01 -6.45135045e-01 -3.43986779e-01 ... 3.10205787e-01 -5.52721657e-02 -2.28155315e-01]] [[ 2.24548250e-01 -9.25635621e-02 6.65594518e-01 ... 2.41037924e-02 1.10244131e+00 -5.52063107e-01] [ 9.84993875e-02 8.88318941e-02 -7.92414129e-01 ... 6.43391430e-01 -9.77252960e-01 7.18511418e-02] [ 6.53320849e-01 -1.50186598e-01 -2.31479451e-01 ... -3.79278690e-01 -8.19195688e-01 1.15474105e+00] ... [ 1.05899203e+00 1.61860324e-02 7.17319101e-02 ... -7.67922163e-01 -5.24520338e-01 3.35248590e-01] [-6.44710064e-01 -7.16572225e-01 4.29084390e-01 ... 2.31651798e-01 -4.34209347e-01 -1.30192384e-01] [ 8.85762647e-02 4.17196393e-01 -6.52207911e-01 ... 2.19850928e-01 8.48544359e-01 5.19471824e-01]]] [[[-1.46263584e-01 1.71856558e+00 -3.54059368e-01 ... 1.06117201e+00 -5.91342449e-01 2.00512916e-01] [-2.24222586e-01 -3.64502430e-01 4.35230225e-01 ... -2.84075558e-01 -3.62040043e-01 8.60711455e-01] [-1.78456660e-02 -3.88404101e-01 -6.28202856e-01 ... 1.79771572e-01 -5.19699931e-01 4.95083004e-01] ... [-4.07071449e-02 -1.43283099e-01 9.66575682e-01 ... 6.25415504e-01 -4.19582903e-01 1.26057565e-01] [ 1.27136016e+00 -3.00628901e-01 3.96906018e-01 ... 3.04102987e-01 7.62264192e-01 1.06758261e+00] [-5.33766411e-02 2.88098514e-01 5.03113389e-01 ... 3.75641525e-01 1.26761353e+00 -2.37019792e-01]] [[ 1.02247012e+00 3.65074396e-01 -6.33692563e-01 ... -5.47655642e-01 7.14736223e-01 -1.02073371e+00] [-5.25833905e-01 -1.19230703e-01 -1.18120229e+00 ... -8.49294960e-01 -7.51511276e-01 -5.12379467e-01] [ 8.10273111e-01 3.72942388e-01 2.29727045e-01 ... -5.83398342e-01 -1.63782597e-01 -1.28814197e+00] ... [-1.14068091e+00 -1.25560260e+00 -1.04824817e+00 ... -3.47314954e-01 -1.11691999e+00 -5.81469059e-01] [-4.78112012e-01 -7.52924323e-01 -1.44323957e+00 ... -1.02496469e+00 -1.88755929e-01 4.38833743e-01] [-3.86715792e-02 -4.52608407e-01 -3.63406301e-01 ... -6.64423764e-01 -8.59187007e-01 -7.92958319e-01]] [[-2.52959818e-01 -1.43953875e-01 -2.21627146e-01 ... 9.81827497e-01 -1.83406338e-01 -4.84866463e-02] [ 1.21138239e+00 -3.03909332e-01 -2.06123620e-01 ... 2.50313431e-01 8.06231201e-01 3.22573572e-01] [-2.35648990e-01 -7.21356153e-01 -8.68512765e-02 ... 7.64128864e-01 3.17684323e-01 2.48708054e-02] ... [ 3.76157939e-01 3.71925443e-01 2.43654594e-01 ... -8.69620025e-01 1.25280321e+00 5.54534435e-01] [-5.01001596e-01 -9.39949006e-02 2.48245984e-01 ... -4.74762261e-01 5.05058825e-01 -3.72910619e-01] [-9.43624020e-01 5.09854496e-01 1.69387233e+00 ... -4.36502784e-01 2.31103227e-02 -7.57411942e-02]] ... [[-2.96235830e-01 2.07971871e-01 1.32367730e+00 ... 5.46927929e-01 -4.55694944e-01 -1.32051623e+00] [ 3.86536330e-01 -1.15214527e+00 7.24900186e-01 ... -1.34096432e+00 5.72109818e-01 -5.32041192e-01] [ 2.07529411e-01 2.08095804e-01 -1.51804015e-01 ... -6.27519429e-01 -2.02238902e-01 5.23232341e-01] ... [-6.26602292e-01 -6.69661999e-01 -6.35754168e-01 ... 4.85154480e-01 1.67737886e-01 7.88685858e-01] [ 2.07144782e-01 -5.52403569e-01 -2.08267584e-01 ... -1.02402937e+00 -9.58584189e-01 2.84027010e-01] [ 2.86596239e-01 8.95528615e-01 -6.57380819e-01 ... -8.77047330e-02 -2.23180130e-01 -9.69521999e-01]] [[-9.69573319e-01 -1.59277096e-01 6.75804168e-02 ... 9.20989335e-01 -7.94722438e-01 -3.71927433e-02] [ 1.23488736e+00 1.21602094e+00 9.16819632e-01 ... -3.73155445e-01 -3.69079292e-01 -4.73272987e-02] [ 1.96499620e-02 -1.41143882e+00 -5.97921669e-01 ... 3.00518930e-01 -2.81145185e-01 -1.14402913e-01] ... [ 5.84305108e-01 -1.41510558e+00 3.23268980e-01 ... -1.04776181e-01 -2.92893469e-01 -4.31143232e-02] [-1.20190334e+00 2.41700411e-01 -1.30991966e-01 ... 2.10896477e-01 3.14259708e-01 5.83962202e-01] [-5.59864044e-01 1.88144103e-01 5.52377045e-01 ... -2.69601405e-01 -5.02378106e-01 4.71484929e-01]] [[ 2.30845824e-01 -2.03420103e-01 4.74244297e-01 ... -8.43675993e-03 2.62279779e-01 -2.74183661e-01] [-4.01238166e-02 -5.77760577e-01 -3.12917493e-02 ... 6.59648001e-01 -5.62897682e-01 -3.67384732e-01] [ 1.62542248e+00 -2.35851556e-01 7.15609193e-01 ... 9.16450560e-01 -6.55899525e-01 5.52277207e-01] ... [ 7.91484416e-01 -6.14496887e-01 -4.16637152e-01 ... 6.96083128e-01 5.69855310e-02 -2.04557851e-01] [-9.22319651e-01 1.02757013e+00 2.26716906e-01 ... 1.32014409e-01 -8.00991118e-01 -7.73025036e-01] [ 1.49782911e-01 -1.25616714e-01 1.62639901e-01 ... 1.35995880e-01 8.71054769e-01 5.07574737e-01]]]] [[[[ 7.83535063e-01 9.70591784e-01 4.23578531e-01 ... 2.18836799e-01 -1.93449184e-01 -3.54072392e-01] [ 3.93853694e-01 -7.48388767e-01 -9.53112483e-01 ... 9.36874971e-02 6.56597733e-01 4.98598725e-01] [ 4.90965903e-01 -9.45722222e-01 -3.78799260e-01 ... 1.08257496e+00 -1.47885108e+00 1.16270936e+00] ... [-3.74003559e-01 -2.82120049e-01 -1.23423374e+00 ... 7.96759725e-01 -4.69044805e-01 -9.36992392e-02] [ 3.41769665e-01 6.36821747e-01 -1.10649455e+00 ... 5.56134343e-01 -1.58445328e-01 -4.91433293e-01] [ 4.53974940e-02 -1.12765729e+00 1.55914485e+00 ... -9.94053423e-01 7.47181654e-01 2.80560523e-01]] [[ 6.58372324e-03 -2.77967185e-01 3.21668148e-01 ... 3.99915099e-01 5.71747124e-01 -5.37827704e-03] [ 1.30469596e+00 -7.68115699e-01 4.12184477e-01 ... -1.53770292e+00 -4.54042584e-01 -1.05143659e-01] [ 1.15400577e+00 -1.06375289e+00 -5.55245057e-02 ... 3.08751911e-01 1.49563122e+00 -4.24793316e-03] ... [-5.45684714e-03 5.07892966e-01 -8.72960448e-01 ... -8.12412277e-02 -1.06363320e+00 3.85775059e-01] [ 1.04080474e+00 -5.43680787e-01 1.02030075e+00 ... -2.36256987e-01 2.61299051e-02 -1.31824970e+00] [-6.62165165e-01 6.05899513e-01 -3.12835127e-01 ... 1.89884380e-01 -1.44464225e-01 -8.30783308e-01]] [[-7.67362893e-01 2.63601005e-01 -1.15491939e+00 ... 3.95657301e-01 1.35037231e+00 -7.17971802e-01] [-6.29516006e-01 -1.25056553e+00 6.67966008e-01 ... -3.51826131e-01 -6.96787536e-01 -2.40271628e-01] [-1.32771385e+00 1.73571870e-01 1.06060646e-01 ... 8.16770568e-02 3.48489374e-01 -1.21886387e-01] ... [ 2.02898219e-01 -1.44311225e+00 -4.23990697e-01 ... 4.66275573e-01 -3.44595760e-01 -3.85993347e-02] [-1.42214596e+00 -2.89479434e-01 -4.51653928e-01 ... -7.57205129e-01 7.01506615e-01 -6.69784486e-01] [-1.94714278e-01 -1.05676651e+00 -1.20439902e-01 ... -3.12494695e-01 9.04793367e-02 1.27781024e-02]] ... [[-3.84967029e-01 -2.89024469e-02 8.69578347e-02 ... -2.61379451e-01 -4.53707129e-01 -1.03993557e-01] [-5.29830694e-01 -7.26283967e-01 2.47613400e-01 ... -4.64157194e-01 2.69272506e-01 2.99245566e-01] [ 1.25456795e-01 -4.36740033e-02 -1.54251862e+00 ... 7.56156206e-01 1.23131943e+00 -8.25222135e-01] ... [ 1.58183193e+00 -1.40750902e-02 -8.59463811e-02 ... -5.34660578e-01 1.33358216e+00 -1.32246837e-01] [-2.24022493e-01 -5.97936392e-01 -8.40142310e-01 ... 8.00715566e-01 -8.92625093e-01 -1.02711570e+00] [-2.39325017e-01 -1.81974664e-01 8.14081952e-02 ... 1.10563791e+00 1.06414366e+00 -2.60449588e-01]] [[ 4.71590936e-01 -4.19913530e-01 -4.08888832e-02 ... 2.67369021e-02 -2.72811145e-01 6.63572967e-01] [-4.26130474e-01 -1.64914671e-02 -2.78184384e-01 ... 4.31852758e-01 -1.78818673e-01 -1.04288578e+00] [ 1.44829178e+00 7.18147933e-01 3.73956501e-01 ... -2.08716035e-01 -2.85278291e-01 -7.07074463e-01] ... [ 3.36413860e-01 1.39617771e-01 -1.71578705e-01 ... -7.70998776e-01 6.78416193e-02 -1.57934904e-01] [ 7.44733930e-01 -1.21072435e+00 1.38846099e-01 ... 9.50371027e-01 2.59262890e-01 8.67199719e-01] [ 2.45919228e-01 4.97091204e-01 -4.94679302e-01 ... 1.19314241e+00 4.28240485e-02 -1.31713951e+00]] [[-5.82309067e-01 -8.85280669e-01 -6.95540428e-01 ... -3.16656470e-01 1.00466132e+00 -1.93038568e-01] [-1.26904333e+00 1.10260105e+00 -4.50067014e-01 ... -2.76219755e-01 -8.11870515e-01 -2.52078027e-01] [-1.01804093e-01 -1.08042300e-01 -1.39343023e-01 ... 6.17421627e-01 8.86205912e-01 -6.38159335e-01] ... [ 5.39088607e-01 -1.11748859e-01 -1.37677863e-01 ... 4.56622839e-01 -5.00635862e-01 1.16096401e+00] [ 1.01445878e+00 9.30100441e-01 9.43915904e-01 ... 1.45556610e-02 8.41857433e-01 6.41702175e-01] [-1.10776931e-01 -3.88028920e-01 -1.27456233e-01 ... -7.59065032e-01 4.84807611e-01 6.55166879e-02]]] [[[ 6.46608531e-01 -8.20197582e-01 -3.00078206e-02 ... -8.24551940e-01 -1.21238363e+00 -7.11639225e-01] [-2.33715758e-01 3.11638057e-01 7.53806055e-01 ... -1.11292374e+00 -6.67460501e-01 -1.18260431e+00] [ 2.13175282e-01 -5.12904763e-01 -1.04970801e+00 ... 4.85047586e-02 -7.58290291e-01 -6.31515980e-01] ... [ 7.32030988e-01 -1.29189521e-01 -2.39379480e-01 ... -3.83382291e-01 -2.54269838e-01 -3.59693199e-01] [-6.76857829e-01 -1.70276284e-01 -1.95064664e+00 ... 2.51956731e-01 -1.15340674e+00 -4.81931418e-01] [-1.50190488e-01 -1.60493159e+00 3.25901926e-01 ... 7.15887427e-01 1.24074566e+00 -1.95732698e-01]] [[ 2.68474907e-01 3.59142810e-01 8.82478893e-01 ... 1.10213697e+00 1.92650650e-02 9.17583629e-02] [-1.08743131e-01 6.64473593e-01 -3.57155859e-01 ... 2.10894302e-01 4.49312061e-01 1.42345858e+00] [-8.57578456e-01 4.16865617e-01 -8.31424415e-01 ... -5.88387728e-01 3.75692993e-01 6.64428055e-01] ... [-3.47396880e-02 4.81175989e-01 -1.07506335e-01 ... -4.46169555e-01 6.08097672e-01 7.23847747e-01] [ 9.28539217e-01 -1.01035702e+00 8.66603136e-01 ... 2.66000837e-01 2.64628142e-01 -2.85709083e-01] [-3.79326880e-01 2.01807186e-01 -1.27338842e-01 ... -4.39430252e-02 1.18135488e+00 1.35196579e+00]] [[-9.36921909e-02 -5.86637966e-02 -9.25875008e-01 ... 3.80953729e-01 1.23793936e+00 -4.79887843e-01] [ 3.69169503e-01 -3.54377955e-01 6.29470423e-02 ... -1.45520449e-01 -1.39908016e-01 1.59314662e-01] [-2.40195885e-01 -1.79394174e+00 4.23628598e-01 ... 7.06215143e-01 1.73747551e+00 2.06723616e-01] ... [ 1.28777897e+00 -8.11836421e-01 3.34570318e-01 ... 5.05295098e-01 1.02702522e+00 -1.00148451e+00] [ 8.15993726e-01 1.02322114e+00 -2.08284438e-01 ... -2.23508373e-01 -2.01951790e+00 4.75012243e-01] [ 5.45027435e-01 -1.73070133e-01 6.14524782e-01 ... -6.03315651e-01 -1.22550297e+00 1.48794591e-01]] ... [[ 1.42267716e+00 1.09387410e+00 4.29016292e-01 ... 5.01715720e-01 -3.09356213e-01 1.19862831e+00] [ 1.74518168e-01 -1.75881341e-01 -3.79058987e-01 ... 3.68579626e-01 6.80117548e-01 7.45206714e-01] [-1.48001611e+00 3.62307489e-01 5.32406926e-01 ... -7.63813734e-01 -4.64077085e-01 -6.15230910e-02] ... [ 5.10445714e-01 -1.58952367e+00 9.63658631e-01 ... -9.57528889e-01 1.25056767e+00 9.27830338e-01] [-1.14615858e+00 8.53768229e-01 7.11321771e-01 ... 8.74461651e-01 -6.16760612e-01 6.11994922e-01] [ 4.49639231e-01 5.65733612e-01 3.57587993e-01 ... 1.51961625e+00 4.89756197e-01 -3.31571579e-01]] [[ 1.24421561e+00 1.07292235e+00 -5.89078307e-01 ... 9.12698150e-01 2.95923859e-01 8.11163187e-01] [ 4.08670753e-01 2.38789380e-01 1.51812541e+00 ... -1.53837293e-01 -1.38329411e+00 -6.53760612e-01] [-1.24027558e-01 3.34898442e-01 -1.58822477e+00 ... 4.35806960e-01 5.99401474e-01 -1.29213586e-01] ... [ 5.56992650e-01 -2.57647336e-01 -8.09320569e-01 ... -3.80104512e-01 1.78905353e-01 1.32726729e-01] [ 1.59698114e-01 4.43721205e-01 4.37328182e-02 ... -1.14056535e-01 -1.21976602e+00 -1.18935907e+00] [ 9.95600104e-01 -1.91040289e+00 7.77866483e-01 ... -1.08324580e-01 8.24906603e-02 -1.59793770e+00]] [[-5.85234344e-01 -2.20769763e-01 -8.88670504e-01 ... -1.07771790e+00 1.05154252e+00 -3.80367935e-01] [ 9.77633428e-03 -1.26284468e+00 -1.02373064e+00 ... 4.40460771e-01 3.23168486e-01 -1.08277164e-01] [-1.01301622e+00 1.04621254e-01 -2.98490465e-01 ... 2.49490291e-01 1.21301568e+00 2.47724146e-01] ... [ 9.54065502e-01 2.59770900e-01 7.46581733e-01 ... -2.24616881e-02 -7.20128894e-01 -1.82910442e-01] [-6.30064130e-01 -5.87709069e-01 -1.18998110e+00 ... -2.42064089e-01 7.35270604e-02 -5.00626624e-01] [-8.30141366e-01 -3.07636887e-01 -1.65237576e-01 ... 8.83851275e-02 -1.17881930e+00 3.44271988e-01]]] [[[-6.37185872e-01 -8.31702590e-01 3.56784284e-01 ... 1.13654658e-01 -1.31424069e+00 1.04337966e+00] [-5.68361402e-01 -1.84017420e+00 -7.43276596e-01 ... 4.71930981e-01 7.01594055e-01 1.35254800e+00] [ 1.99540949e+00 5.25988638e-01 7.53755331e-01 ... 4.87193316e-01 5.32794595e-01 -3.70217185e-03] ... [-1.19258547e+00 4.82694596e-01 -9.02034163e-01 ... 9.09987986e-01 1.63855866e-01 -8.26598823e-01] [ 4.64832813e-01 8.98620903e-01 -3.12221378e-01 ... 2.50133246e-01 -2.76194274e-01 -1.07040727e+00] [-1.79515794e-01 1.30665064e+00 -1.09406658e-01 ... 8.95675838e-01 8.31807494e-01 -9.32040066e-02]] [[ 1.58912569e-01 1.23234296e+00 6.13119721e-01 ... -5.95307469e-01 8.07711303e-01 2.96324641e-01] [-4.19897676e-01 -1.91132441e-01 -7.64340460e-02 ... 3.22079659e-01 2.90177375e-01 1.29926574e+00] [ 1.67959774e+00 7.76226580e-01 -2.77026206e-01 ... 6.46901965e-01 8.29019010e-01 1.02030516e+00] ... [ 6.64372683e-01 7.25807250e-01 -9.79760706e-01 ... 4.32174444e-01 -4.81116354e-01 7.59130418e-01] [-6.07070923e-01 9.08548057e-01 -2.07186326e-01 ... -3.49559039e-01 8.77636522e-02 7.84177721e-01] [-1.58886492e+00 -1.55163586e-01 1.41935974e-01 ... -1.28719902e+00 -7.05090046e-01 4.62108314e-01]] [[ 1.71201086e+00 -1.41249970e-01 -1.20352423e+00 ... -1.94838569e-01 5.42047203e-01 -3.61734480e-01] [-7.91603565e-01 5.21043781e-03 -2.85851121e-01 ... 9.41685915e-01 2.39364225e-02 1.55494654e+00] [ 1.07421494e+00 8.66064578e-02 -1.42502272e+00 ... -1.11799014e+00 6.17359579e-01 7.92082489e-01] ... [-1.12424828e-01 6.66812599e-01 5.49660385e-01 ... -7.08163261e-01 6.12265944e-01 6.79990709e-01] [ 2.95083880e-01 6.68488145e-01 -9.00959849e-01 ... 7.92600393e-01 8.79866242e-01 -7.65492260e-01] [ 4.78212982e-02 -1.28435969e+00 -1.39486551e-01 ... -1.28762531e+00 1.09539831e+00 1.49204999e-01]] ... [[-2.22499818e-01 1.71251500e+00 1.76669225e-01 ... 2.53131300e-01 9.67388153e-01 1.17380738e-01] [ 1.25562441e+00 -7.16522396e-01 -7.16431439e-01 ... 4.52171773e-01 -5.58541059e-01 1.23633349e+00] [ 1.12353790e+00 -8.82318020e-01 -3.14246088e-01 ... 9.34727564e-02 3.80352825e-01 4.06198978e-01] ... [ 5.61545372e-01 -5.49572051e-01 -3.03546280e-01 ... -7.43641138e-01 1.84430584e-01 7.38477647e-01] [-1.27218819e+00 -3.53929847e-01 -1.45193920e-01 ... 8.07690084e-01 7.28124529e-02 2.17955634e-01] [-1.38275695e+00 5.11721261e-02 -4.78990793e-01 ... 8.05442870e-01 2.49832779e-01 1.41700193e-01]] [[-2.72204071e-01 -4.03335214e-01 -7.48869956e-01 ... 1.63689286e-01 1.63753286e-01 -4.30554122e-01] [-1.65890241e+00 -1.85366243e-01 -2.20324561e-01 ... 1.27235562e-01 2.80887425e-01 -3.02045703e-01] [-2.70976484e-01 -7.80989677e-02 2.52136767e-01 ... -4.42961901e-01 2.46537626e-01 7.06775486e-02] ... [ 3.83566618e-02 5.93470633e-01 1.42132446e-01 ... -1.45255968e-01 -4.10003453e-01 2.92249739e-01] [-1.12247312e+00 7.92015433e-01 8.68731797e-01 ... 1.89123362e-01 -7.84974471e-02 3.39185633e-03] [-2.25593254e-01 1.25226402e+00 -1.74757019e-01 ... 8.27663541e-01 2.14157686e-01 3.74311991e-02]] [[ 1.46158409e+00 1.58398759e+00 -2.58458316e-01 ... -7.41097450e-01 1.18841469e-01 6.70325994e-01] [-9.48547870e-02 -5.25608122e-01 -3.49881172e-01 ... -2.47058451e-01 -1.01390338e+00 -3.98509085e-01] [-7.92237997e-01 1.08354998e+00 6.21214509e-01 ... 2.46582821e-01 7.75323510e-01 3.21083039e-01] ... [ 2.17638195e-01 8.54050398e-01 -4.28457588e-01 ... 8.56640995e-01 3.65702212e-01 -9.87973154e-01] [ 4.08474267e-01 -1.93862364e-01 1.05761826e+00 ... -4.66389120e-01 4.60202843e-01 3.50298017e-01] [ 1.34641576e+00 7.45047271e-01 -3.01317394e-01 ... 8.90471160e-01 3.67808610e-01 -1.39782488e+00]]] [[[ 2.00819209e-01 1.64333373e-01 -2.25024804e-01 ... 7.54820645e-01 1.75702453e+00 -1.12250710e+00] [ 2.12482646e-01 -8.96201551e-01 1.04157770e+00 ... -1.37404835e+00 2.41837397e-01 -6.79578185e-01] [ 3.90974849e-01 -9.70821857e-01 1.45463109e-01 ... 7.08638430e-01 1.12411659e-02 -3.03376436e-01] ... [ 3.93711030e-01 -8.76566470e-01 -6.24377370e-01 ... -6.24671638e-01 -2.33222559e-01 -5.20965695e-01] [ 5.07790029e-01 6.55806720e-01 6.79628730e-01 ... 8.22276950e-01 1.25048196e+00 -7.09625065e-01] [-1.37611195e-01 1.26358247e+00 -7.70697474e-01 ... 5.55947244e-01 -5.63664913e-01 -8.34791958e-01]] [[-2.46821225e-01 -8.26464891e-01 3.86853904e-01 ... 1.32050550e+00 -3.55937183e-01 7.48265266e-01] [ 1.22474360e+00 -4.26886052e-01 1.49615899e-01 ... 1.22232676e+00 7.43556678e-01 -1.05989143e-01] [ 1.44342566e+00 9.66918170e-02 -2.92591035e-01 ... -8.97833705e-01 -1.85386658e-01 4.21847701e-01] ... [ 1.40412986e-01 5.65203249e-01 -2.66534835e-01 ... -3.39700997e-01 -5.95661104e-01 -2.54953414e-01] [ 4.74741518e-01 3.95817608e-01 -7.66321301e-01 ... -8.92320871e-02 4.27538246e-01 -2.05807034e-02] [ 4.32418615e-01 6.79405928e-01 -3.43735456e-01 ... -7.16260910e-01 2.40282655e-01 -1.56881481e-01]] [[ 6.86867759e-02 3.94191686e-03 -7.44323373e-01 ... -8.81327510e-01 -9.69382942e-01 -1.45379201e-01] [-3.40044320e-01 1.02123152e-02 -2.30275407e-01 ... -2.37526983e-01 2.93697417e-01 -2.24108055e-01] [-5.70913434e-01 -2.64903754e-01 1.26617387e-01 ... 3.07169735e-01 8.09970915e-01 6.16874695e-01] ... [ 6.24637723e-01 2.30675563e-01 -4.48201686e-01 ... 3.79879922e-01 1.14702225e+00 -1.25925496e-01] [ 9.73205507e-01 3.70302290e-01 1.36722997e-01 ... 7.55414963e-01 -3.79133880e-01 1.01597476e+00] [-2.20740467e-01 -3.57476585e-02 6.04452550e-01 ... -9.13556337e-01 1.37178481e+00 5.63726723e-02]] ... [[ 3.89056265e-01 5.10246813e-01 9.30636823e-01 ... 1.23872232e+00 7.75372386e-01 1.33209825e-01] [-1.25847876e+00 1.45739555e-01 -1.09199420e-01 ... -7.92160451e-01 4.49563831e-01 4.22899239e-02] [ 3.43045205e-01 -1.44623792e+00 -3.86696577e-01 ... 1.15151906e+00 3.69383782e-01 -3.60008419e-01] ... [-9.55574989e-01 1.13258505e+00 -6.30625606e-01 ... -1.59048009e+00 -4.31463420e-01 1.19451709e-01] [-5.84277630e-01 3.08683384e-02 -9.58750397e-02 ... -3.12255502e-01 -4.96530741e-01 6.66145742e-01] [-6.10978127e-01 -7.91008919e-02 5.91262341e-01 ... -2.78573751e-01 1.55838931e+00 -9.55922529e-02]] [[ 3.03880513e-01 -6.21786714e-01 -4.78955001e-01 ... -2.81863749e-01 2.97054142e-01 -2.91241974e-01] [ 1.73332337e-02 -2.13592857e-01 -7.62548983e-01 ... -8.62488568e-01 -1.93129444e+00 -5.65449536e-01] [ 6.72511280e-01 -7.88328588e-01 1.84372008e-01 ... 8.60417038e-02 -2.43199229e-01 2.53882945e-01] ... [-1.68150973e+00 9.97012794e-01 1.60050452e-01 ... 3.10974449e-01 5.50395250e-01 6.40301481e-02] [ 3.01169574e-01 5.16020775e-01 -2.97223747e-01 ... 1.24828354e-01 -8.67156148e-01 1.48561731e-01] [-3.50335032e-01 5.98482370e-01 9.25459806e-03 ... 5.22707701e-01 2.52218544e-01 -1.80886626e+00]] [[ 9.11926210e-01 9.41323578e-01 9.65093732e-01 ... 5.83251894e-01 6.74881101e-01 -6.62104845e-01] [ 6.39641285e-01 4.78564173e-01 5.89994490e-01 ... -1.01587772e+00 -7.22378492e-02 -8.74002129e-02] [-2.18514845e-01 6.09905481e-01 -3.24463755e-01 ... -8.07516798e-02 -1.22271031e-01 3.01467478e-01] ... [ 8.65435481e-01 -4.35005277e-01 -2.10864127e-01 ... -1.12722504e+00 -5.13240099e-01 -2.28541180e-01] [-8.86240080e-02 -2.46414915e-01 4.75485653e-01 ... 8.69645119e-01 5.57423472e-01 6.88111782e-01] [-3.31952512e-01 -5.64817309e-01 -1.15020049e+00 ... 4.72000837e-01 4.19177890e-01 -1.24950213e-02]]] [[[ 5.03507197e-01 -3.71338844e-01 -6.84106231e-01 ... 4.89693642e-01 7.58943737e-01 2.12251782e-01] [ 2.47823164e-01 3.84365581e-02 -3.73264700e-01 ... 4.80100363e-01 1.27756208e-01 5.10787785e-01] [ 3.34658742e-01 -8.41623545e-01 1.15253830e+00 ... 6.61354661e-01 -7.87529051e-01 2.36630589e-01] ... [-6.41487762e-02 8.77835989e-01 2.79764295e-01 ... 5.87528884e-01 4.54111964e-01 2.83270389e-01] [-1.73963773e+00 9.75500464e-01 -2.69275635e-01 ... -3.85986030e-01 5.21629035e-01 1.24375105e-01] [-4.24860597e-01 1.60097027e+00 -3.84713978e-01 ... 1.24811101e+00 -4.97455031e-01 -2.46746261e-02]] [[-1.20795155e+00 5.66408694e-01 1.23984134e+00 ... -5.22952020e-01 -1.11349177e+00 8.47809076e-01] [ 4.07666713e-01 -2.24775329e-01 -1.05992353e+00 ... 3.64682116e-02 1.05867060e-02 6.48715675e-01] [ 9.47714806e-01 6.98568225e-02 2.07828403e-01 ... -3.77920747e-01 -3.17362040e-01 9.26057398e-01] ... [ 1.39475852e-01 -8.54097068e-01 2.60931194e-01 ... -1.05575728e+00 -6.17017388e-01 -4.25488710e-01] [ 8.47763538e-01 6.08111024e-01 1.54878303e-01 ... -4.89821166e-01 -2.81531721e-01 -1.51484221e-01] [-2.85285473e-01 -8.15525353e-01 4.06739116e-02 ... 1.84516273e-02 2.07150936e-01 4.74666178e-01]] [[ 6.51079595e-01 1.05092871e+00 4.33077753e-01 ... 5.65675497e-01 -6.29065111e-02 1.17936075e+00] [-4.74062026e-01 -3.21291953e-01 3.42000008e-01 ... -2.93983966e-01 -3.02560061e-01 4.90426600e-01] [-3.85122776e-01 6.01899326e-01 3.05354781e-02 ... 2.06200391e-01 -6.11244738e-01 2.28146061e-01] ... [ 1.61793128e-01 -2.60474473e-01 -1.63497102e+00 ... 1.09461880e+00 1.22607425e-01 -1.21809471e+00] [-8.88520062e-01 2.39685833e-01 8.76352131e-01 ... 1.23195040e+00 8.61483142e-02 -2.42480978e-01] [ 4.66976494e-01 2.53719181e-01 4.32083428e-01 ... 9.49089468e-01 -7.88554773e-02 4.91598785e-01]] ... [[-1.43860161e-01 1.35571554e-01 -2.45421126e-01 ... 1.82784116e+00 -9.79635656e-01 -5.25364995e-01] [-7.91999698e-01 1.69928467e+00 6.63491130e-01 ... 7.73338377e-01 3.69341880e-01 2.01537347e+00] [-9.59221646e-02 -7.10146502e-02 -8.53195906e-01 ... -4.90284562e-01 1.52891636e-01 -1.71054289e-01] ... [-3.29579949e-01 -1.94111741e+00 -1.23280931e+00 ... -1.00658217e-03 2.28729546e-01 2.98412651e-01] [ 7.02440977e-01 -4.00750399e-01 5.26771955e-02 ... -4.16259319e-01 3.40267390e-01 -9.42898452e-01] [ 1.26684296e+00 3.59234303e-01 -1.01697743e+00 ... -2.02573508e-01 -1.04414320e+00 -3.03268284e-01]] [[ 6.83342278e-01 -5.19228935e-01 2.52924532e-01 ... 4.05775607e-01 1.83818564e-02 1.08785868e-01] [ 6.37634516e-01 1.71951771e-01 1.50832617e+00 ... -1.17946053e-02 -1.38132960e-01 -2.98756182e-01] [ 4.95605618e-02 -8.22982341e-02 -3.93892497e-01 ... 5.10263145e-01 2.85990089e-01 -2.60925144e-01] ... [-4.49804485e-01 -4.39901948e-02 -1.06258042e-01 ... 1.10219717e+00 1.27393931e-01 3.91000062e-01] [-8.35914016e-01 -1.06455207e+00 7.31441081e-01 ... 1.36204362e+00 -4.91246015e-01 -3.25558275e-01] [ 4.86303747e-01 2.21169877e+00 2.58821428e-01 ... -5.45359440e-02 -4.77833420e-01 -5.92267275e-01]] [[-3.76036108e-01 -4.60241973e-01 2.42666388e-03 ... 1.03758073e+00 2.12822892e-02 4.10576642e-01] [-4.07848299e-01 -6.34224191e-02 -4.34596270e-01 ... 2.13993326e-01 -2.11757779e-01 -4.96698022e-01] [ 3.74212384e-01 5.26162565e-01 -8.23498428e-01 ... 8.96628678e-01 -4.71412987e-01 1.21262670e+00] ... [-1.76902592e+00 7.32396424e-01 1.38158858e+00 ... -6.04959071e-01 -3.98448616e-01 -3.93930316e-01] [-2.34790653e-01 -3.44668813e-02 9.26806986e-01 ... -1.45119679e+00 -1.90530047e-01 -9.77539480e-01] [ 8.28848422e-01 2.57162362e-01 -2.25818336e-01 ... -8.37493181e-01 6.01749778e-01 -6.53140023e-02]]] [[[ 1.28368986e+00 1.56269562e+00 1.04681110e+00 ... 3.24068874e-01 -7.13028491e-01 1.14060771e-02] [ 7.38248825e-01 3.69668782e-01 4.25098270e-01 ... 2.16952518e-01 -6.17807865e-01 -1.40461326e-01] [ 5.63858151e-01 -1.35916382e-01 1.06385088e+00 ... -1.34167895e-01 -8.07416022e-01 1.11430740e+00] ... [ 2.22035065e-01 2.40450442e-01 5.69361567e-01 ... 2.67279893e-02 8.79079774e-02 2.01172531e-01] [-2.60962486e-01 2.31828403e+00 4.90322739e-01 ... 6.51103139e-01 1.95012832e+00 1.06088951e-01] [ 4.32490617e-01 1.05347955e+00 4.20128137e-01 ... -1.63060164e+00 3.94359976e-01 9.52576697e-01]] [[-4.43928659e-01 -1.15431094e+00 -4.19990480e-01 ... -4.27298844e-01 -1.42105949e+00 -1.72382784e+00] [ 9.21076953e-01 -6.12059057e-01 8.53156924e-01 ... -6.60360038e-01 -3.33069146e-01 4.29911464e-01] [-1.19304764e+00 -1.13730216e+00 4.20604385e-02 ... 2.40802601e-01 9.28853452e-02 -5.65994382e-01] ... [ 1.53693998e+00 1.71851739e-01 1.00834727e+00 ... 5.97536087e-01 -5.78150034e-01 5.74654818e-01] [ 1.87988803e-02 -6.87183663e-02 1.46654391e+00 ... -9.49739635e-01 -5.70586383e-01 7.22856104e-01] [ 6.64713860e-01 8.26026261e-01 -1.60340059e+00 ... -4.67789769e-01 -5.71633339e-01 -6.24943495e-01]] [[-5.35802171e-02 -2.60534346e-01 2.77133912e-01 ... 8.44219267e-01 2.13562902e-02 1.52124450e-01] [-9.86112595e-01 8.97543207e-02 1.33789897e+00 ... -8.88303220e-01 -6.86996520e-01 -4.78130221e-01] [ 4.39156741e-01 -2.84821302e-01 4.19121683e-01 ... 1.54721808e+00 2.03133774e+00 -1.10199869e+00] ... [ 3.19317281e-01 -1.51443982e+00 9.73187625e-01 ... -6.34650961e-02 -3.80314589e-01 -8.47471416e-01] [ 1.90043241e-01 2.52906084e-01 -1.59063324e-01 ... -6.79637253e-01 8.32654461e-02 -2.70142585e-01] [ 5.20929158e-01 -3.37404162e-01 1.68491566e+00 ... -1.08325028e+00 1.08863950e+00 3.91654640e-01]] ... [[-4.55055684e-01 1.06713399e-01 1.92491248e-01 ... -5.45377791e-01 5.65597825e-02 3.67998540e-01] [ 5.64924717e-01 -6.57000363e-01 -2.73163646e-01 ... 6.89717233e-01 1.70113158e+00 7.77552247e-01] [-7.03072727e-01 -9.09914792e-01 -7.90975317e-02 ... -3.74716699e-01 5.75689077e-01 -1.03090978e+00] ... [ 1.33344978e-01 2.82223463e-01 2.39574239e-01 ... 1.73656970e-01 6.93692639e-02 -4.52660412e-01] [ 1.98599711e-01 -9.10822093e-01 -6.63134038e-01 ... 1.17083573e+00 1.16365537e-01 3.04465294e-01] [-2.46373296e-01 -1.08375227e+00 7.85850212e-02 ... 5.40372217e-04 1.92015707e-01 -3.56103808e-01]] [[ 9.05259371e-01 4.96838957e-01 -4.61465925e-01 ... -3.68138440e-02 -5.31565428e-01 -5.64983666e-01] [ 4.41163331e-01 -1.00416064e+00 1.82180658e-01 ... -1.62291813e+00 -1.00798333e+00 1.05036700e+00] [-7.92805731e-01 -6.06109858e-01 -2.77545154e-02 ... -2.23851770e-01 1.09510958e+00 5.66686876e-02] ... [-4.49113309e-01 -4.24165308e-01 -5.53054392e-01 ... 5.97673297e-01 1.76995122e+00 -1.12053907e+00] [-3.63007486e-01 -3.60156536e-01 -5.13292611e-01 ... 3.84957403e-01 -1.41702265e-01 -1.72730756e+00] [-1.27810693e+00 4.64424759e-01 6.23490453e-01 ... -2.84192204e-01 6.30713552e-02 1.90426171e-01]] [[-4.91695493e-01 7.32703626e-01 8.85043263e-01 ... 2.29728743e-01 7.53885388e-01 -1.00945461e+00] [ 3.34046721e-01 -4.36679810e-01 8.65119576e-01 ... -1.17420483e+00 -5.97913675e-02 3.28675032e-01] [ 1.21687806e+00 7.62874067e-01 -2.44520977e-02 ... 2.17672780e-01 -5.17330706e-01 1.04687607e+00] ... [-1.10249266e-01 -3.96919131e-01 1.24392724e+00 ... 6.54245242e-02 -1.74895453e+00 2.31447130e-01] [ 9.94223893e-01 4.70180601e-01 -8.08942854e-01 ... 7.85066962e-01 -8.80807281e-01 -9.09594595e-01] [ 8.05005968e-01 -1.44001976e-01 8.19203138e-01 ... -5.43957949e-01 3.21770400e-01 5.62644266e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 3, 'weights_shape': (6,), 'eps': -0.05} ] | 0.23 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4620.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=-0.050000000000000003]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-1.4108 -0.2557 -0.3671 0.1209 0.6498 -0.3184 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[[[ 1.57293153e+00 -1.73008585e+00 1.35876882e+00 ... 2.57016778e+00 -3.07788467e+00 2.04401278e+00] [-3.65430117e-01 -1.28381264e+00 -1.64625371e+00 ... 7.91780591e-01 9.37812924e-01 -1.08460104e+00] [-1.47302020e+00 -4.31769878e-01 7.18027800e-02 ... 1.41923797e+00 -2.81823665e-01 -5.83705187e-01] ... [ 1.30427408e+00 -1.49685311e+00 -1.45711040e+00 ... 1.04556657e-01 7.65878499e-01 -7.35797286e-01] [ 2.00909042e+00 -1.03020608e+00 1.64924419e+00 ... -6.09891452e-02 1.87634006e-01 -1.58741009e+00] [ 1.40063310e+00 -1.60819995e+00 -5.83274066e-01 ... 1.02258921e+00 -4.71629947e-02 -2.55304646e+00]] [[-3.74492735e-01 -4.45479006e-01 -8.34256530e-01 ... 7.00621605e-01 -1.56243026e+00 2.24195182e-01] [-1.94311216e-01 1.95182908e+00 -4.65369880e-01 ... -2.34182644e+00 3.78632098e-01 -4.05516714e-01] [ 2.63389677e-01 2.56088686e+00 -9.36542034e-01 ... -6.07875168e-01 -1.23898506e+00 5.90579629e-01] ... [ 6.91639066e-01 -1.31261468e+00 1.33354470e-01 ... 8.03423822e-01 -1.23066723e+00 -1.32586762e-01] [ 1.11087978e+00 1.26586843e+00 -4.76064175e-01 ... -1.97661805e+00 1.95178103e+00 -1.91843092e+00] [-3.33686733e+00 -5.85520864e-01 3.16396022e+00 ... 1.31530553e-01 1.40318334e+00 -4.15074396e+00]] [[ 1.23453653e+00 7.53596246e-01 -6.71475351e-01 ... 1.12215054e+00 9.28507864e-01 -8.04847419e-01] [ 4.43458855e-01 -3.27447392e-02 -1.19353199e+00 ... -1.63497165e-01 8.05142164e-01 4.30191469e+00] [-6.72409296e-01 -8.50156367e-01 -1.00998783e+00 ... 9.46863055e-01 4.13275838e-01 -8.22924137e-01] ... [-9.29512084e-01 -4.68621492e-01 1.56581604e+00 ... -2.20146298e-01 2.30428267e+00 -4.94651705e-01] [ 3.08786362e-01 -1.61067963e+00 -2.31807038e-01 ... 1.60192609e+00 5.76477230e-01 -1.74007809e+00] [ 1.09885240e+00 8.93145978e-01 9.16074753e-01 ... -1.63339448e+00 -1.71745586e+00 4.84252602e-01]] ... [[-2.35857278e-01 -1.22340047e+00 -2.42771339e+00 ... 3.90725303e+00 1.15300977e+00 2.58196807e+00] [-2.21831929e-02 3.50287229e-01 -1.76657832e+00 ... -2.01037216e+00 3.44488360e-02 -1.90589285e+00] [ 2.17674017e+00 1.98672175e-01 7.59069443e-01 ... -1.48064244e+00 2.44725490e+00 2.00600907e-01] ... [ 1.73337781e+00 9.04990077e-01 1.70072424e+00 ... 3.65089273e+00 3.34482849e-01 3.82154472e-02] [-2.31467843e+00 -1.79869461e+00 -1.63528287e+00 ... 1.04016709e+00 -1.51232731e+00 -1.21414411e+00] [-1.35652232e+00 -2.02994275e+00 -1.07565963e+00 ... 6.34643435e-01 -3.15915555e-01 6.67161405e-01]] [[ 1.30173814e+00 1.53135157e+00 2.16155076e+00 ... 5.23101091e-01 9.48294520e-01 8.36419344e-01] [ 5.06739676e-01 -3.41691637e+00 -1.77485871e+00 ... -2.24811649e+00 2.61012465e-01 1.01682377e+00] [-1.53163254e+00 2.16466236e+00 1.35442233e+00 ... -9.35806632e-01 -1.12106693e+00 -1.14842045e+00] ... [-1.29456744e-01 1.53217459e+00 1.81920803e+00 ... -8.57538357e-02 -9.93948281e-02 7.50858724e-01] [ 1.20796132e+00 -2.82449222e+00 6.34399235e-01 ... 4.09614980e-01 -2.11025906e+00 3.53378117e-01] [ 1.74582884e-01 -3.55744004e-01 1.62532818e+00 ... 4.27367575e-02 -2.18408561e+00 -1.11404705e+00]] [[ 4.34927911e-01 3.01222086e-01 -2.67906737e+00 ... 3.41931939e-01 1.83515239e+00 -1.25213969e+00] [-9.65169787e-01 -7.48881459e-01 -2.01444483e+00 ... -6.15200698e-01 1.31395960e+00 -1.98249415e-01] [ 1.43748808e+00 1.01014757e+00 -1.24878693e+00 ... -2.70347834e+00 5.77812493e-01 2.78454751e-01] ... [ 5.76464891e-01 3.33814994e-02 -1.73738018e-01 ... 3.55292106e+00 1.27176499e+00 -6.60009384e-01] [ 4.29419661e+00 -1.76230341e-01 -2.30796352e-01 ... -1.51626551e+00 1.81802392e+00 3.78912129e-02] [-1.28716147e+00 2.22069359e+00 -3.86576317e-02 ... -1.94136858e+00 -2.75308466e+00 -5.78711808e-01]]] [[[ 1.07406877e-01 2.07179412e-01 1.02265418e-01 ... 4.09539431e-01 2.70925343e-01 2.49229535e-01] [-4.21925895e-02 3.77337933e-01 -8.31489861e-02 ... 2.87261039e-01 -1.59024104e-01 -1.31089259e-02] [-8.24662820e-02 3.79648656e-01 1.01027720e-01 ... 5.51531732e-01 -2.17972144e-01 4.27517802e-01] ... [ 1.14563935e-01 -1.13502070e-01 -1.30654424e-02 ... 3.73650402e-01 -2.25225538e-02 -5.78867197e-02] [ 6.33388311e-02 3.84782441e-02 -3.66214663e-01 ... 1.45443335e-01 2.28299320e-01 4.27336283e-02] [ 4.95216191e-01 3.44404966e-01 9.55394134e-02 ... 3.24469596e-01 -3.04989159e-01 1.01211205e-01]] [[ 3.40687364e-01 -1.94216706e-02 1.06275156e-01 ... 1.01467490e-03 8.76223296e-02 -5.04740002e-03] [ 2.14386396e-02 2.04495713e-01 3.04981947e-01 ... 3.46315235e-01 3.31131488e-01 -2.48475090e-01] [ 1.99587971e-01 -4.85887825e-01 2.08978932e-02 ... 4.81452763e-01 -2.41028830e-01 3.06345522e-01] ... [ 3.89919072e-01 1.02527644e-02 4.93333280e-01 ... 8.76353607e-02 -9.32045132e-02 6.96782917e-02] [-2.64398521e-03 1.78582191e-01 -2.39905268e-01 ... 5.11705160e-01 9.81750637e-02 1.33200809e-01] [-6.80734916e-03 1.42219320e-01 1.80987045e-01 ... 3.16408187e-01 2.63391912e-01 -1.64197028e-01]] [[-1.60464659e-01 -3.10177684e-01 -8.07456002e-02 ... 3.73146117e-01 9.07404542e-01 2.47160330e-01] [-8.28060433e-02 1.78926244e-01 4.20966566e-01 ... 2.17933789e-01 6.77931666e-01 -1.99716724e-02] [ 2.88246125e-01 -2.43584123e-02 1.34176672e-01 ... 9.91350412e-02 2.04053238e-01 -3.10690492e-01] ... [-5.27129352e-01 -2.09954992e-01 -4.83720154e-01 ... 5.84524013e-02 -2.00989470e-01 2.03832403e-01] [ 5.17278276e-02 5.12061954e-01 -1.34257719e-01 ... -3.75781864e-01 -2.49467492e-01 1.51372924e-01] [ 1.70403585e-01 -3.08555692e-01 4.03284162e-01 ... -5.80338109e-03 -9.72378105e-02 -4.34447140e-01]] ... [[ 2.61567086e-01 2.36380890e-01 2.84580380e-01 ... 1.99668720e-01 6.23679906e-02 5.61355241e-03] [ 2.38831282e-01 -7.18777813e-03 -2.60362476e-01 ... -7.38632903e-02 2.60817975e-01 -7.34897479e-02] [-4.39301938e-01 1.40834302e-01 1.07663348e-01 ... -3.53253365e-01 2.37275839e-01 2.64045820e-02] ... [ 6.11049868e-02 -3.25518459e-01 -2.17779011e-01 ... 4.55466807e-01 -1.18115649e-01 -3.02834451e-01] [-3.27423029e-02 1.86021864e-01 -7.53477439e-02 ... 2.42824346e-01 1.16980568e-01 -6.60044611e-01] [ 1.23004816e-01 -1.60478562e-01 1.52183948e-02 ... -2.26722080e-02 -3.60325724e-01 -3.66530418e-01]] [[-3.04402411e-01 1.02661639e-01 3.24482359e-02 ... -1.56290859e-01 -3.44042659e-01 2.52844810e-01] [-5.99563479e-01 4.08178508e-01 3.99450324e-02 ... -5.31570435e-01 1.51214436e-01 8.27037022e-02] [-5.49557149e-01 -1.93313770e-02 2.94215858e-01 ... -2.79922426e-01 -2.56414890e-01 4.97647941e-01] ... [-6.37507811e-02 9.20312181e-02 -1.88558191e-01 ... 2.65264422e-01 1.21781230e-02 -4.51478623e-02] [ 7.45195448e-02 9.85904317e-03 -3.61494631e-01 ... 1.69825092e-01 -2.73005724e-01 4.15016785e-02] [-2.84138978e-01 3.50770473e-01 2.53351837e-01 ... -1.83078945e-01 2.19266508e-02 1.45366171e-03]] [[ 1.11334302e-01 2.62191087e-01 -3.40922512e-02 ... 1.46382689e-01 1.35952309e-01 1.27129197e-01] [ 2.87435025e-01 -2.99005777e-01 -4.63859826e-01 ... -6.88378662e-02 3.00633430e-01 2.73246169e-01] [ 1.48712635e-01 6.56703353e-01 4.22972143e-02 ... 4.19295192e-01 1.82251528e-01 -4.76250857e-01] ... [-1.47817731e-01 -7.44021773e-01 -4.03704286e-01 ... 6.44485876e-02 -2.18463406e-01 -2.20580712e-01] [ 1.75945640e-01 4.17461507e-02 -2.72970684e-02 ... -2.71933347e-01 -7.02527240e-02 -1.91936240e-01] [-6.26024678e-02 1.77812167e-02 -2.04455629e-02 ... 1.76438406e-01 -2.29245439e-01 2.12329552e-02]]] [[[-1.35968134e-01 6.25366151e-01 9.43184078e-01 ... -1.12348301e-02 -5.30517876e-01 4.62494612e-01] [-5.60239792e-01 7.26440996e-02 -2.16966823e-01 ... -4.24548745e-01 4.67767239e-01 -4.49709982e-01] [-4.94503349e-01 -2.30010554e-01 -1.02518767e-01 ... 5.24603724e-01 3.52637209e-02 3.00646108e-02] ... [ 5.44021606e-01 -5.49517453e-01 5.26104629e-01 ... -2.49392495e-01 6.34565830e-01 7.54507259e-02] [-6.96379364e-01 4.06619757e-02 -1.33858696e-01 ... 8.64381373e-01 -3.71261060e-01 -3.93358730e-02] [-1.79394037e-01 1.36110917e-01 4.01882887e-01 ... -6.88948989e-01 -2.14626148e-01 -6.66806877e-01]] [[ 3.33190501e-01 6.81059480e-01 -5.09137139e-02 ... -4.73149449e-01 2.77370334e-01 1.82872862e-01] [ 2.05930278e-01 -5.20598888e-01 -2.19494358e-01 ... 8.75794664e-02 -3.01534235e-01 -1.58988088e-01] [-4.00194526e-02 -5.75734198e-01 4.75563794e-01 ... 8.59500587e-01 -3.70291531e-01 1.47412702e-01] ... [ 1.52400494e-01 4.86494184e-01 -4.29128259e-02 ... 6.80501342e-01 -2.04062499e-02 -2.51973897e-01] [ 5.51607251e-01 1.64151683e-01 5.92736661e-01 ... -1.22208394e-01 -1.21143773e-01 -2.19669804e-01] [-1.15082271e-01 4.45515662e-01 -5.34362972e-01 ... 3.88883829e-01 1.35285765e-01 -1.94600448e-01]] [[-6.06801212e-01 3.78351331e-01 -7.81048417e-01 ... -3.28748256e-01 2.34359294e-01 -4.28747386e-01] [-3.09164822e-01 -2.09451616e-01 3.90910655e-02 ... 3.52099121e-01 -2.85565227e-01 4.12723094e-01] [-3.78087349e-02 -5.77322878e-02 2.04746246e-01 ... -5.94185770e-01 3.15143257e-01 1.78214490e-01] ... [ 2.61632502e-01 -3.20982724e-01 4.88374121e-02 ... -7.73934126e-02 1.28036693e-01 -4.41223055e-01] [-1.28204748e-01 -1.72098391e-02 -2.83659905e-01 ... -3.64254299e-03 -2.22570300e-01 8.17884654e-02] [ 1.47241309e-01 4.68568541e-02 1.08458750e-01 ... 1.16619244e-01 -2.35206962e-01 6.72969222e-01]] ... [[-3.29459846e-01 4.05603379e-01 2.34173834e-01 ... 2.15899006e-01 -2.39511222e-01 -6.15052506e-02] [ 3.19939405e-01 -1.60257183e-02 1.41881198e-01 ... 1.99391350e-01 -1.55946044e-02 -5.00738025e-01] [ 1.82606652e-01 9.33066905e-01 8.14356357e-02 ... 4.29632932e-01 2.86777258e-01 -6.43319130e-01] ... [ 4.57351327e-01 -6.77713811e-01 -7.42777511e-02 ... -3.37178946e-01 -1.55540958e-01 -8.75741690e-02] [-1.28339946e-01 -2.58131504e-01 2.23852217e-01 ... 7.05885351e-01 3.92938286e-01 -8.70499909e-02] [ 6.14994109e-01 2.78328627e-01 1.55107351e-02 ... -2.29819909e-01 3.89592558e-01 -4.95328009e-02]] [[-2.87645739e-02 -1.33618146e-01 3.00883800e-01 ... -4.00303543e-01 2.57524282e-01 4.85178202e-01] [ 1.50724262e-01 -6.66221902e-02 1.27411738e-01 ... -1.18685029e-01 -4.20300886e-02 -5.91409564e-01] [-4.10127282e-01 4.24844831e-01 4.06674385e-01 ... -3.09808195e-01 7.30863094e-01 -2.08265379e-01] ... [-8.68283689e-01 8.30114126e-01 4.64910537e-01 ... 6.67213202e-01 -4.18082297e-01 -5.29533565e-01] [ 1.08533829e-01 -1.85865358e-01 -2.94567555e-01 ... -1.96645558e-01 -5.98153137e-02 -1.16753753e-03] [-3.64682317e-01 5.49582355e-02 -3.79468828e-01 ... 4.61543240e-02 4.63128418e-01 -4.49732952e-02]] [[-1.75279453e-01 3.21429819e-01 2.63018847e-01 ... 1.20360918e-01 -1.28071815e-01 2.03051209e-01] [-9.96375456e-02 1.67919159e-01 -4.28413689e-01 ... -3.61604951e-02 4.55010235e-02 -6.73886910e-02] [-5.14769793e-01 -3.15664321e-01 -6.18458986e-02 ... 1.13414988e-01 -1.84934244e-01 4.53423969e-02] ... [-1.63398430e-01 -9.68708098e-01 4.22402024e-01 ... 1.35632291e-01 2.52879318e-02 4.91164595e-01] [ 2.47005254e-01 1.73465148e-01 -7.90391192e-02 ... -1.63105816e-01 4.17142808e-01 7.53288150e-01] [-1.83621690e-01 4.93601104e-03 -7.93889016e-02 ... -1.90621436e-01 2.76384741e-01 2.36790821e-01]]] [[[ 9.11736041e-02 2.03661192e-02 1.39107063e-01 ... 1.29723653e-01 2.39161298e-01 -2.55767093e-03] [ 1.04862479e-02 1.44948646e-01 8.12522136e-03 ... -1.39695257e-02 -1.22679979e-01 4.52956595e-02] [ 9.92438719e-02 -1.17982157e-01 -1.79135174e-01 ... -1.03719182e-01 -1.20318219e-01 -2.99949422e-02] ... [-4.27052081e-02 1.15652181e-01 1.37033239e-01 ... 4.53875586e-02 -3.39219898e-01 -3.06679104e-02] [ 4.76909168e-02 1.17099233e-01 -1.51863679e-01 ... -1.16719231e-01 -4.89949202e-03 4.17382941e-02] [ 8.63662176e-03 6.58061653e-02 -1.21859185e-01 ... -9.03120637e-03 7.15331063e-02 -2.58710925e-02]] [[-2.45213330e-01 -4.28433158e-02 -1.68418571e-01 ... 2.19779506e-01 -9.94947404e-02 -3.89375282e-03] [-1.03971742e-01 -3.00233185e-01 -4.05678786e-02 ... -2.41939172e-01 -1.41834974e-01 1.32404808e-02] [-5.62499557e-03 2.21393839e-01 2.37318158e-01 ... -1.81637313e-02 5.11890501e-02 2.26956513e-02] ... [-1.13816820e-01 1.07804842e-01 4.15854156e-02 ... -1.02773495e-01 4.22755070e-02 9.64306761e-04] [ 8.48660767e-02 -2.04703528e-02 4.20553312e-02 ... -6.33235127e-02 -2.26725176e-01 -3.49880904e-02] [ 1.53436899e-01 -1.34181485e-01 -2.25791067e-01 ... -5.95451519e-03 -4.69867401e-02 9.96362511e-03]] [[ 3.78717259e-02 1.14749849e-01 -1.97572857e-02 ... 1.34971157e-01 -2.23236084e-01 1.60919905e-01] [ 1.21105812e-01 -5.15929200e-02 -6.37795106e-02 ... -5.40913176e-03 -3.70480210e-01 -1.22538522e-01] [-1.75665513e-01 8.78761336e-02 -2.38835607e-02 ... 2.67084744e-02 6.62704110e-02 -1.20251432e-01] ... [ 4.70320322e-02 -2.58062072e-02 1.49737718e-02 ... -1.41903624e-01 2.88937390e-01 -1.57774344e-01] [-1.17808633e-01 -5.72585985e-02 3.03041562e-02 ... -2.64939517e-02 1.24040790e-01 5.43225892e-02] [ 1.37477770e-01 -6.84426650e-02 -2.16388315e-01 ... 2.80523133e-02 -9.18397978e-02 7.36398529e-03]] ... [[-1.70405358e-02 9.59120467e-02 4.68553901e-02 ... 1.99813783e-01 -3.32567334e-01 5.07591292e-03] [-8.15230608e-02 -1.07028037e-01 5.13694547e-02 ... 8.90148208e-02 1.69409439e-02 8.18916783e-02] [ 1.01829372e-01 1.22298785e-01 7.02143386e-02 ... -1.17492989e-01 1.61919758e-01 -1.85034379e-01] ... [ 1.22506402e-01 6.10285662e-02 -1.62745833e-01 ... -2.71772921e-01 -7.45084062e-02 2.68985033e-02] [-2.07667366e-01 1.40142769e-01 -1.71612963e-01 ... 1.95045412e-01 -2.12143034e-01 -1.62920415e-01] [-1.33962631e-01 -8.92287269e-02 -8.83040875e-02 ... 1.76474124e-01 1.05384402e-01 -1.85645685e-01]] [[ 1.02928653e-01 -6.24332987e-02 4.00788300e-02 ... -6.24153875e-02 -1.83058441e-01 4.15574126e-02] [ 9.53485025e-04 -1.17883250e-01 1.17716707e-01 ... -6.09827340e-02 4.51199189e-02 -1.85563102e-01] [-2.46140324e-02 -7.61262327e-02 -2.55593508e-01 ... 5.85599616e-02 1.53602988e-01 -1.49294287e-01] ... [ 1.69266120e-01 -1.75237939e-01 -5.84493577e-02 ... -3.38203497e-02 -9.94715020e-02 -7.68049806e-02] [ 2.70193070e-02 -9.55166519e-02 -7.81800039e-03 ... 1.62533909e-01 1.59103766e-01 2.68051207e-01] [ 3.57230186e-01 -8.66146982e-02 2.87712514e-01 ... -1.29374817e-01 -3.14381033e-01 -7.99971372e-02]] [[-8.02712590e-02 3.12662050e-02 -4.67269160e-02 ... 2.20497116e-01 -1.41357213e-01 -5.16740680e-02] [-7.80211762e-02 -1.92253783e-01 1.51371315e-01 ... -1.14554875e-01 1.65514443e-02 1.13305546e-01] [-4.01388183e-02 -6.89615458e-02 -1.69777200e-01 ... -2.97792375e-01 1.07722431e-01 -3.49744052e-01] ... [ 9.88752348e-04 -1.55010121e-02 1.13837622e-01 ... -3.99232842e-02 -1.46585613e-01 -5.17342463e-02] [ 1.81548297e-01 -6.05103672e-02 -2.80459821e-01 ... 7.41038993e-02 -7.61975423e-02 -1.17926123e-02] [-1.11202218e-01 -1.55270725e-01 -3.86416018e-02 ... 1.13672554e-01 2.37694886e-02 -1.58460766e-01]]] [[[-1.01863194e+00 6.53663576e-01 -5.22093415e-01 ... 9.27320957e-01 1.37532437e+00 -2.28467569e-01] [-1.31861657e-01 -1.02261996e+00 -1.34353831e-01 ... -2.18323752e-01 8.77006114e-01 -5.47593459e-02] [ 7.89414108e-01 -4.65194106e-01 1.77177370e-01 ... 1.15243506e+00 -8.87582064e-01 6.76881552e-01] ... [ 1.09226310e+00 -8.25257361e-01 -7.56569445e-01 ... -1.24982104e-01 -5.68543315e-01 -6.49464130e-01] [ 9.90761399e-01 -1.63956553e-01 -1.54075432e+00 ... -4.57694679e-01 3.87983769e-01 1.14908838e+00] [ 7.04696774e-01 3.87886167e-02 -2.31588170e-01 ... 2.51970083e-01 4.36544895e-01 6.65237308e-01]] [[-5.00961721e-01 8.93413663e-01 2.45314389e-01 ... 9.88140762e-01 -2.72250324e-02 -8.70201766e-01] [ 4.98390161e-02 -4.19390440e-01 -3.86740506e-01 ... 2.25761726e-01 2.11806893e-02 1.15281105e-01] [ 8.32277060e-01 2.66836822e-01 6.80307329e-01 ... -9.12977993e-01 -5.90412080e-01 2.39413404e+00] ... [-5.37915289e-01 6.53255507e-02 7.05167770e-01 ... 7.76218653e-01 -3.81138436e-02 7.53058910e-01] [-1.13995790e+00 -3.10423940e-01 -4.28528100e-01 ... 6.00819476e-02 1.55287758e-01 -5.57671309e-01] [-1.28653407e+00 -1.20016694e+00 6.71125054e-01 ... -5.71880043e-01 -1.77857857e-02 -4.97389853e-01]] [[-2.62596428e-01 2.19048902e-01 -7.73709714e-01 ... 7.82203794e-01 5.90757951e-02 -1.09196305e+00] [ 6.02557957e-01 6.27997994e-01 -5.36615610e-01 ... -6.12440228e-01 -4.22186494e-01 -8.39251161e-01] [-9.92839634e-01 4.51897413e-01 -1.20758343e+00 ... -1.56831488e-01 2.20579386e-01 9.34211552e-01] ... [-1.17347288e+00 5.87191701e-01 -6.48739576e-01 ... 5.10661602e-01 4.27429020e-01 1.26169372e+00] [ 6.72404110e-01 -1.54478240e+00 -8.93325448e-01 ... 5.84926903e-01 1.29218891e-01 -2.38897413e-01] [-5.67310154e-01 7.08154976e-01 -8.15765262e-01 ... 3.10565859e-01 -3.01971197e-01 -7.76131451e-02]] ... [[-6.85860589e-02 2.57627722e-02 3.52290392e-01 ... 6.18332028e-01 -7.98919618e-01 6.17546499e-01] [ 7.31249511e-01 -2.22867262e-02 -5.15805840e-01 ... -1.19225883e+00 1.90642655e-01 1.27405035e+00] [-1.27407634e+00 -1.77517921e-01 -2.84047186e-01 ... 7.51428664e-01 -8.37766230e-02 -4.73061025e-01] ... [ 4.21387047e-01 -1.09421611e+00 -6.05145842e-02 ... -4.50402707e-01 -5.79067051e-01 -2.59574264e-01] [ 6.44991845e-02 4.67790335e-01 -1.75334036e-01 ... 7.85636961e-01 -2.11147130e-01 -3.41853276e-02] [-3.93238485e-01 -8.83513689e-02 1.29843354e+00 ... -1.32810310e-01 1.37779459e-01 4.22486514e-01]] [[-2.15547644e-02 5.98988056e-01 1.62387103e-01 ... 1.22747326e+00 9.94690359e-02 9.42294002e-02] [-1.10383880e+00 1.49480700e-01 -1.85283441e-02 ... -1.56934842e-01 7.84673154e-01 -2.26230577e-01] [ 8.99554074e-01 2.15219900e-01 -4.34319347e-01 ... 5.53930759e-01 -1.43581748e+00 -1.07015169e-03] ... [-2.80703515e-01 -1.19561267e+00 5.13801873e-01 ... -7.32125103e-01 9.82073188e-01 -4.05050963e-01] [ 7.29610324e-01 -1.51515245e+00 -8.37739229e-01 ... 5.18954992e-01 -3.02161276e-01 5.96232116e-01] [ 3.19492280e-01 6.35945857e-01 6.91047490e-01 ... -9.16669488e-01 -5.65297425e-01 -1.68558389e-01]] [[ 1.34295249e+00 6.97408915e-01 -5.46457589e-01 ... -3.06893200e-01 1.70222316e-02 -5.85207045e-01] [ 4.24068868e-01 1.19290933e-01 1.62892804e-01 ... -1.73554078e-01 3.01393688e-01 1.19045210e+00] [-2.27936119e-01 1.19242787e+00 4.81966138e-01 ... 5.19293308e-01 7.84431994e-02 -5.05185500e-02] ... [-2.32486561e-01 -6.12520874e-01 -2.67888397e-01 ... -1.44394851e+00 6.86191082e-01 -5.84946349e-02] [-4.35270041e-01 -8.40262324e-02 -1.21768534e+00 ... -4.63556767e-01 4.92211699e-01 -3.09680432e-01] [-5.65126054e-02 -2.70484984e-02 5.88811517e-01 ... 7.19206750e-01 6.82039380e-01 -3.02687913e-01]]] [[[-4.82974127e-02 5.39997280e-01 -1.34492785e-01 ... 1.26238123e-01 3.72275226e-02 -1.21611804e-01] [ 2.51574200e-02 -1.38377920e-01 3.37819867e-02 ... -4.53803651e-02 5.15217721e-01 -4.19796780e-02] [ 9.87837166e-02 2.23897517e-01 8.53785649e-02 ... -3.24112713e-01 3.20017666e-01 1.96231514e-01] ... [ 2.21057728e-01 2.30893523e-01 -4.13466506e-02 ... 1.92305502e-02 3.12453717e-01 -2.91322112e-01] [ 1.54425964e-01 8.33500803e-01 4.60118130e-02 ... 6.21851027e-01 3.10559779e-01 -1.92904413e-01] [ 8.06837559e-01 3.31882387e-01 -1.82024270e-01 ... -3.62281293e-01 8.56384337e-02 -3.32459152e-01]] [[-2.59919137e-01 2.87021488e-01 -1.97259530e-01 ... -3.00591886e-01 2.16976926e-01 -1.76477149e-01] [ 3.08504164e-01 -9.13448811e-01 -3.09679180e-01 ... 1.04368096e-02 -1.40023187e-01 1.55079663e-01] [-4.82134730e-01 3.95994097e-01 5.71642876e-01 ... 5.20662487e-01 -6.69345737e-01 1.85667247e-01] ... [ 6.60773039e-01 -3.80075157e-01 -3.06011409e-01 ... -4.78668720e-01 2.08705254e-02 4.05913949e-01] [-1.62295505e-01 4.00520325e-01 -1.18635468e-01 ... -4.22158390e-01 1.71572745e-01 1.07453093e-01] [-3.29790831e-01 1.31467357e-01 3.71128470e-01 ... 2.46181279e-01 -1.63854375e-01 -2.10102826e-01]] [[ 4.01324667e-02 9.85762943e-03 7.36965314e-02 ... -1.60349831e-02 3.66914183e-01 2.00921983e-01] [ 9.44238976e-02 -2.39800200e-01 -2.91623801e-01 ... -1.11377016e-01 1.26436815e-01 -1.95322499e-01] [ 1.27001390e-01 5.58832400e-02 3.27845693e-01 ... 6.19626790e-02 -3.56819965e-02 -1.59779057e-01] ... [-1.70019902e-02 -1.62176609e-01 8.79295617e-02 ... 4.76136684e-01 8.57589245e-02 8.17048669e-01] [-2.60322899e-01 -5.56116879e-01 -9.19800848e-02 ... 2.05402955e-01 -1.96194172e-01 -1.37846276e-01] [ 6.01586938e-01 5.43356121e-01 -5.88856280e-01 ... -2.93131381e-01 -3.27936023e-01 -2.22262323e-01]] ... [[-1.06070630e-01 -3.11652124e-01 -5.95086277e-01 ... 5.34519255e-01 8.50937217e-02 -2.75868684e-01] [ 3.63539942e-02 -8.25728834e-01 -6.67365640e-02 ... 4.88519594e-02 -5.22270538e-02 -1.00190938e-01] [ 7.74973273e-01 -2.10250333e-01 -4.62966412e-02 ... -6.12287343e-01 -2.16296509e-01 3.05183046e-02] ... [ 9.54248011e-01 -5.15932977e-01 -8.87079760e-02 ... -1.36587262e-01 1.15526542e-01 -2.66947836e-01] [-1.87695384e-01 -3.79780740e-01 -1.05733402e-01 ... 3.38740796e-02 -1.81372792e-01 -5.21829825e-05] [ 3.49552512e-01 -1.28113747e-01 2.95951486e-01 ... 3.53334218e-01 1.11741625e-01 -2.70660132e-01]] [[ 1.76983178e-01 -1.62015315e-02 1.76741242e-01 ... 4.76656646e-01 -8.31745327e-01 7.41026580e-01] [-3.61903548e-01 6.79343820e-01 -1.15207136e-01 ... 5.07466435e-01 -5.20252347e-01 2.01595739e-01] [ 5.37407659e-02 4.09339100e-01 -2.14424148e-01 ... -9.35866833e-02 -4.22132730e-01 -4.49579269e-01] ... [ 2.65679270e-01 1.73158258e-01 1.26378834e-01 ... -3.36624719e-02 5.94922714e-02 -4.17444110e-03] [-8.23392570e-02 1.51634552e-02 -3.94702077e-01 ... -5.16317189e-02 3.08389869e-02 2.10362077e-01] [-1.70503482e-01 -3.35589379e-01 3.80115062e-01 ... 3.70600075e-01 3.74375761e-01 7.02517945e-03]] [[ 1.68702409e-01 5.65455139e-01 3.13532919e-01 ... -3.01687479e-01 -3.23039860e-01 -1.68918639e-01] [ 4.40659979e-03 5.26934564e-02 1.43682230e-02 ... -5.49092442e-02 -1.63734540e-01 -3.67026836e-01] [ 4.17512096e-02 5.83072722e-01 6.08185232e-01 ... -1.62454415e-02 1.89281270e-01 -4.90500659e-01] ... [-2.38756120e-01 -3.90972078e-01 -1.68363512e-01 ... -5.54421127e-01 3.24646570e-02 -1.45127848e-01] [ 8.36930931e-01 8.83968472e-02 3.31531972e-01 ... -3.27211976e-01 -9.84826088e-02 4.84653682e-01] [-7.21233070e-01 1.21124968e-01 4.82184052e-01 ... 1.28827766e-01 -4.09393311e-01 7.20118806e-02]]]] [[[[-7.86094189e-01 -3.12043047e+00 3.23608756e-01 ... -8.34668875e-02 -5.99619389e-01 1.64115834e+00] [-1.90715265e+00 1.98695898e+00 7.25875199e-01 ... -2.59956002e-01 -1.09928556e-01 4.13318962e-01] [ 7.27092505e-01 6.10130131e-01 -2.51107645e+00 ... -6.05393887e-01 1.49770871e-01 1.49016738e+00] ... [ 2.01164317e+00 1.70175588e+00 -2.94679433e-01 ... -1.18849170e+00 1.66818106e+00 1.88446462e+00] [ 6.50909096e-02 -6.58182144e-01 -2.73654485e+00 ... 2.62699664e-01 1.96845138e+00 7.23549247e-01] [ 8.01869512e-01 -8.09260488e-01 1.94183540e+00 ... 5.15583515e-01 6.33293331e-01 1.05724919e+00]] [[ 3.88287038e-01 -5.65221131e-01 -2.00964713e+00 ... -3.74479198e+00 -3.55605990e-01 -4.25562084e-01] [ 1.17839110e+00 -7.20070302e-01 1.02081430e+00 ... -5.37091851e-01 -1.84626132e-01 2.30257964e+00] [-2.47772217e+00 -5.69970012e-01 1.34643388e+00 ... -1.11837018e+00 -4.57782686e-01 1.02366698e+00] ... [ 1.82343745e+00 -1.47500837e+00 -1.47170854e+00 ... -7.91400135e-01 7.59089112e-01 -8.62568080e-01] [ 2.95756552e-02 1.36849666e+00 1.51493156e+00 ... -5.00155210e-01 5.27917981e-01 -3.56587082e-01] [-2.55355477e+00 -1.92452908e+00 -1.37244821e+00 ... 8.66547942e-01 -5.09243727e-01 -3.53269672e+00]] [[ 1.01305759e+00 -3.78335625e-01 -1.31220222e+00 ... 1.41989553e+00 -9.17964131e-02 -1.14951479e+00] [ 1.92134231e-01 -2.00282884e+00 3.23741287e-01 ... -1.19463146e+00 -1.05967307e+00 -4.97592717e-01] [-2.11231962e-01 2.82836986e+00 -2.79877234e+00 ... 1.73481047e+00 -9.69408274e-01 6.72143638e-01] ... [-1.11477685e+00 1.62594104e+00 -1.41199291e-01 ... 2.44737029e+00 7.63935924e-01 -3.38018000e-01] [ 3.13794327e+00 1.65362263e+00 -3.56887192e-01 ... 4.36582446e-01 1.19648492e+00 1.65876389e+00] [ 6.56285465e-01 -3.13694930e+00 -6.63014948e-01 ... 4.96493518e-01 -4.63851020e-02 1.94467223e+00]] ... [[ 3.18453646e+00 -1.76852584e+00 2.92259431e+00 ... 3.51285964e-01 -1.86768210e+00 2.79080820e+00] [-2.98459196e+00 1.78966880e+00 2.73460001e-01 ... -1.97610366e+00 5.30282617e-01 -6.09998584e-01] [-1.82865118e-03 2.90633941e+00 1.20181441e+00 ... 1.45162117e+00 -7.94723570e-01 5.14125109e-01] ... [-5.76395810e-01 -3.41207534e-01 9.61261511e-01 ... -1.72940403e-01 -5.50500397e-03 1.04014143e-01] [-1.67066634e+00 7.76849315e-02 1.25782824e+00 ... -2.83405995e+00 1.15007472e+00 -6.58607036e-02] [ 3.76643658e-01 -1.18836892e+00 1.58258271e+00 ... 1.62887633e+00 -2.90202093e+00 -2.30898690e+00]] [[ 2.22520542e+00 -1.72807229e+00 2.28317618e+00 ... -5.95778942e-01 -1.08217084e+00 2.76352286e-01] [ 1.69984877e-01 2.08753657e+00 1.11687076e+00 ... -1.07959175e+00 -1.98688257e+00 4.07072496e+00] [ 1.73000300e+00 2.39697385e+00 -7.82222867e-01 ... 6.81105852e-01 1.36486435e+00 -2.39640683e-01] ... [-1.78545487e+00 -1.94833827e+00 -1.75474846e+00 ... 7.70991683e-01 5.88474512e-01 -2.26664707e-01] [-9.77523565e-01 -4.76596147e-01 -4.67584878e-01 ... -3.04604590e-01 -7.84535110e-01 -2.10411167e+00] [ 6.99572682e-01 -1.90516567e+00 3.46003318e+00 ... 2.77444029e+00 -6.86022997e-01 1.79297411e+00]] [[ 1.89424944e+00 8.86501253e-01 -7.00195372e-01 ... -1.31490016e+00 3.91293705e-01 1.65842116e+00] [ 3.00967908e+00 -1.54819524e+00 -2.03699732e+00 ... 1.18767250e+00 -4.74334180e-01 -1.66292667e+00] [-2.32810497e+00 3.33280635e+00 -7.47939125e-02 ... -9.24772263e-01 -3.72742862e-01 9.88260627e-01] ... [ 2.77533144e-01 -2.56295776e+00 -3.74549657e-01 ... -3.60924035e-01 -4.37185884e-01 2.23839641e+00] [-1.26478958e+00 7.53866315e-01 -3.86801660e-01 ... -2.01512739e-01 1.76679969e+00 1.76944995e+00] [-5.47532737e-01 -9.94320679e-03 2.80715078e-01 ... -2.75891733e+00 1.48261338e-01 -4.86031771e-02]]] [[[-1.10623606e-01 1.35796532e-01 1.14771456e-01 ... -1.16226874e-01 -4.05804962e-01 -3.39561582e-01] [-4.74921733e-01 -8.27078149e-02 4.87293929e-01 ... 1.76821768e-01 1.00435652e-01 1.64577216e-01] [ 2.71899819e-01 -3.63966197e-01 1.43082097e-01 ... -1.13594890e-01 1.76282227e-01 -2.25813001e-01] ... [-1.43569410e-01 -3.09771299e-01 5.37549675e-01 ... 5.17846495e-02 -2.70816296e-01 3.01279068e-01] [ 3.08837920e-01 -2.61859238e-01 1.00057848e-01 ... -2.40669966e-01 -2.08606556e-01 -5.35124429e-02] [ 1.66282263e-02 -4.26845290e-02 -9.28573906e-02 ... 5.22769928e-01 -1.85762912e-01 1.94316462e-01]] [[-3.02889869e-02 1.64940372e-01 3.09194654e-01 ... 3.43349099e-01 -4.71096896e-02 -1.53183386e-01] [-2.89502800e-01 3.59569997e-01 -5.22086322e-01 ... -3.52885932e-01 -2.09113866e-01 1.79931089e-01] [-1.62177086e-01 -1.93781289e-03 7.42618963e-02 ... -5.73304534e-01 -2.49151811e-01 -4.04058129e-01] ... [ 5.30851722e-01 -1.05129950e-01 8.15704688e-02 ... 2.71140635e-01 -6.30713031e-02 2.18209073e-01] [-3.92789133e-02 2.64105946e-02 -3.00179064e-01 ... -4.85498548e-01 2.87082732e-01 6.50474802e-02] [-2.25786939e-01 1.18268259e-01 5.45907766e-02 ... 5.92940636e-02 1.55335709e-01 2.48710126e-01]] [[-1.66276872e-01 3.61532629e-01 -1.83852360e-01 ... 5.74236363e-02 4.43501621e-02 -4.50059921e-01] [-3.45676154e-01 -4.40427214e-01 -4.08142477e-01 ... 1.71455383e-01 -1.27157956e-01 2.26376593e-01] [-2.39490017e-01 -1.72339171e-01 -4.55583513e-01 ... 2.69790590e-01 -1.90068945e-01 6.17985845e-01] ... [ 8.39300677e-02 -4.71170336e-01 2.60978520e-01 ... 3.90707344e-01 -7.66306594e-02 -2.22351134e-01] [-8.00869763e-02 1.79368630e-01 7.38596354e-05 ... 2.70525753e-01 1.67728603e-01 1.66805446e-01] [ 4.37503457e-01 1.75528437e-01 1.50144566e-02 ... -1.46481600e-02 5.58138430e-01 2.79817283e-01]] ... [[-1.09959066e-01 -2.91662157e-01 -3.86611879e-01 ... 3.40314843e-02 6.73939884e-02 -2.51438856e-01] [ 5.10138094e-01 -4.33438569e-02 -3.71405989e-01 ... 3.77057076e-01 -1.24180265e-01 -1.11980692e-01] [ 3.91021788e-01 8.07000846e-02 -9.05704573e-02 ... -2.18519568e-01 -1.50761738e-01 -3.37951779e-01] ... [-4.96855497e-01 -2.74040282e-01 -1.25061750e-01 ... -5.46596609e-02 -2.10833609e-01 3.92046422e-01] [-1.45922616e-01 -5.14700413e-02 3.32109571e-01 ... 3.67341965e-01 6.00812025e-02 1.03780918e-01] [ 3.47863734e-02 -2.74266809e-01 3.00605834e-01 ... 3.66746008e-01 2.05260366e-01 -2.23191008e-01]] [[ 1.22990884e-01 -2.75004148e-01 -2.67805666e-01 ... -2.58208752e-01 2.46792361e-01 2.56434917e-01] [ 4.43510085e-01 -8.56182305e-05 -2.24120542e-01 ... 5.44628382e-01 -1.10128708e-01 2.93230116e-01] [ 5.15899844e-02 3.21058780e-01 -7.54663646e-02 ... -2.72903770e-01 1.20327219e-01 3.81141305e-02] ... [ 9.76549089e-02 -1.23101704e-01 -2.34366462e-01 ... -3.78951550e-01 4.45764005e-01 -4.66674790e-02] [-1.41355038e-01 -2.13665701e-02 -1.93719834e-01 ... 6.54960275e-02 -2.73623198e-01 1.86728403e-01] [-1.77109819e-02 -1.06798954e-01 -4.84231189e-02 ... -1.32270485e-01 3.82995486e-01 1.56975672e-01]] [[-3.43241632e-01 -1.29774854e-01 -3.14448982e-01 ... 4.50561553e-01 -9.83249173e-02 -8.12056288e-02] [ 1.24515124e-01 6.61872625e-02 1.21251956e-01 ... -9.71416458e-02 4.00387526e-01 -1.55273033e-02] [-1.83797136e-01 1.68836545e-02 1.23555288e-02 ... -1.01959616e-01 -2.30783522e-01 -3.11849434e-02] ... [-3.18094373e-01 -2.97656864e-01 1.81004152e-01 ... 4.60984766e-01 4.98192608e-02 1.74756750e-01] [ 3.40805382e-01 -1.06925622e-01 5.75326011e-02 ... 4.59746011e-02 2.12784961e-01 -9.15220901e-02] [-3.85044999e-02 -3.84442024e-02 -6.86225221e-02 ... -1.70485809e-01 2.72119850e-01 -4.13382947e-02]]] [[[-4.43776995e-02 -4.24218148e-01 -6.37099087e-01 ... -2.05946136e-02 2.68071815e-02 3.68462473e-01] [-3.29692662e-01 1.62235647e-01 1.60287648e-01 ... 7.14187771e-02 -2.15701967e-01 6.61980331e-01] [-4.97187436e-01 6.73335731e-01 8.31217617e-02 ... -5.86408079e-01 -6.06492937e-01 1.04188077e-01] ... [ 1.97343409e-01 -1.69079721e-01 -3.52270573e-01 ... 1.93308815e-02 -4.73055512e-01 -1.37881875e-01] [-3.17305744e-01 1.58739224e-01 -5.09742141e-01 ... 8.31067562e-02 6.69813678e-02 -1.11299329e-01] [ 1.24420986e-01 8.99644122e-02 5.82336843e-01 ... -2.12187558e-01 -1.19828992e-01 -2.13378683e-01]] [[ 7.96801597e-02 2.94944912e-01 3.56809735e-01 ... 2.07544982e-01 -8.91491473e-02 -4.46725070e-01] [-5.32255828e-01 3.77192408e-01 4.67195399e-02 ... 2.32021615e-01 3.32747884e-02 -2.74412245e-01] [-4.20004994e-01 -2.60729492e-01 1.00284159e-01 ... -3.87351096e-01 1.99943289e-01 -6.47210538e-01] ... [-9.34317634e-02 8.71758699e-01 3.31824794e-02 ... -1.02229849e-01 -1.31380513e-01 -2.60269046e-01] [ 1.05434306e-01 -2.24815592e-01 3.99739556e-02 ... 7.73392200e-01 9.33918804e-02 3.82456273e-01] [-5.35149813e-01 -6.84599280e-01 -2.82506138e-01 ... -4.42568183e-01 -2.54852355e-01 2.14941919e-01]] [[-1.90651402e-01 2.08878681e-01 -6.98493898e-01 ... -1.60683095e-01 -1.78432181e-01 -3.76182497e-01] [-2.86594003e-01 4.33197767e-01 -2.96066135e-01 ... -5.22280514e-01 3.87701392e-01 -4.89384353e-01] [ 1.17056228e-01 1.48086548e-01 4.72604454e-01 ... 4.17395055e-01 -1.12758808e-01 -6.29326284e-01] ... [ 2.19690546e-01 -3.79778624e-01 -3.50748211e-01 ... 4.99928266e-01 -1.55486360e-01 2.29081601e-01] [-6.98736161e-02 2.43982449e-01 5.73452294e-01 ... -5.97992301e-01 -3.28356773e-01 6.20145202e-01] [ 1.52339056e-01 -4.36363323e-03 -1.88072801e-01 ... 2.28972808e-01 -1.30562156e-01 4.01708260e-02]] ... [[-6.53347433e-01 5.53137124e-01 5.02559364e-01 ... -6.71435073e-02 -6.23718858e-01 -4.57511604e-01] [-3.94444853e-01 2.95292169e-01 2.01962441e-01 ... -2.98624784e-01 -7.38490880e-01 2.62600631e-01] [ 2.66081184e-01 -4.13057990e-02 8.22196007e-01 ... 8.26410294e-01 -6.05377778e-02 -2.67891794e-01] ... [-2.96136737e-01 1.31935537e-01 -2.52891988e-01 ... -3.25475246e-01 2.25792989e-01 -4.53609496e-01] [-6.89618289e-03 5.27495921e-01 -2.23306477e-01 ... 2.74165094e-01 -4.51939344e-01 -1.62012845e-01] [ 2.51555949e-01 -1.66288503e-02 -8.90897121e-03 ... -5.80858409e-01 -3.60775217e-02 -6.12266362e-01]] [[ 2.16966808e-01 5.73867500e-01 4.72824395e-01 ... -3.55819553e-01 2.70439863e-01 -5.56015313e-01] [ 2.93215632e-01 2.38432735e-02 2.25634530e-01 ... 1.07046001e-01 -1.38872221e-01 5.32199442e-01] [-4.92719501e-01 1.70810267e-01 3.73984724e-01 ... 4.82259542e-01 4.20374483e-01 5.72280109e-01] ... [ 4.82811868e-01 -1.93431322e-02 2.66530037e-01 ... 5.01596630e-01 -5.22277892e-01 3.22130978e-01] [ 4.28696573e-01 -1.64042190e-02 4.72341061e-01 ... 6.65788874e-02 3.98088573e-03 -1.08341957e-02] [-2.30525911e-01 3.58105838e-01 -3.69288470e-03 ... -6.40461981e-01 -1.96398929e-01 -2.35750556e-01]] [[ 6.03294194e-01 -4.74458992e-01 8.63601118e-02 ... 2.46204138e-01 -8.41486692e-01 2.47070953e-01] [-2.73312062e-01 -1.45337477e-01 7.35379696e-01 ... 3.73634905e-01 -4.69774269e-02 1.03932631e+00] [ 2.78595775e-01 4.09119397e-01 4.10845548e-01 ... -9.96813551e-03 9.74235609e-02 2.45425701e-01] ... [ 1.23343855e-01 7.22536027e-01 3.37795287e-01 ... -4.69359905e-01 1.32453372e-03 -1.24940038e-01] [-2.14091763e-01 2.28024274e-01 -3.93799961e-01 ... 3.63317430e-02 -2.49672849e-02 -2.29785934e-01] [-3.86252075e-01 4.13916141e-01 -3.14999014e-01 ... 3.76987666e-01 -3.77271712e-01 -6.51893139e-01]]] [[[-1.94984019e-01 -8.47130194e-02 -9.14369822e-02 ... 3.51439983e-01 1.77098244e-01 -1.49854332e-01] [ 1.18090756e-01 1.80174008e-01 -1.41221181e-01 ... 1.20196052e-01 -1.07976899e-01 1.48399875e-01] [ 3.14036876e-01 -6.49239793e-02 3.64030004e-02 ... 1.65910095e-01 9.33004264e-03 1.53631717e-01] ... [ 2.03553915e-01 -4.24881987e-02 5.09478003e-02 ... 1.94454134e-01 -1.50666520e-01 1.70442715e-01] [ 1.80471055e-02 -7.07850307e-02 1.83618560e-01 ... -1.40212625e-01 3.02232951e-01 2.97247291e-01] [ 6.36087880e-02 1.58610731e-01 -2.01184489e-03 ... 1.74185902e-01 9.19258967e-02 9.81257707e-02]] [[-3.99001390e-02 7.08097965e-02 1.12563642e-02 ... -1.36827445e-02 2.38373861e-01 -1.66185442e-02] [-2.37634056e-03 -1.22928709e-01 1.97472703e-02 ... -6.50758967e-02 -3.18305045e-01 1.81182057e-01] [ 2.42874976e-02 -1.33724943e-01 -1.08559448e-02 ... -4.92973290e-02 2.22670343e-02 1.18992813e-02] ... [ 1.93630725e-01 1.47374004e-01 -2.33573347e-01 ... -7.90818408e-02 -1.37033060e-01 -8.07615295e-02] [ 1.32156491e-01 -4.29220162e-02 -1.26393139e-01 ... 4.78351340e-02 7.27349967e-02 1.56562269e-01] [ 7.52191767e-02 -1.36701137e-01 -2.29949310e-01 ... -9.41726491e-02 2.10985824e-01 -1.43039629e-01]] [[-2.33899370e-01 -4.48919423e-02 3.16241384e-02 ... 1.25801384e-01 -9.53288749e-02 2.74364561e-01] [-2.31564473e-02 -2.47015748e-02 -1.63579613e-01 ... -1.07708879e-01 -2.14258835e-01 -1.94789544e-01] [-4.50625122e-02 -4.73142527e-02 2.26830766e-02 ... 8.97025317e-03 3.22832093e-02 -1.21690325e-01] ... [-1.96048282e-02 -2.66552474e-02 1.61188822e-02 ... 4.88427021e-02 -2.31851086e-01 1.76024660e-01] [-1.37776151e-01 5.15605649e-03 2.61577815e-01 ... -6.70505017e-02 2.89757047e-02 8.91641062e-03] [ 2.36774851e-02 -1.26348838e-01 -1.92519978e-01 ... -1.49608627e-01 5.83711341e-02 1.23122759e-01]] ... [[ 1.44697383e-01 5.76193668e-02 1.12385131e-01 ... -1.11809835e-01 -1.01796389e-01 -1.25963874e-02] [-8.24413076e-02 -7.42884949e-02 4.15728129e-02 ... -1.41256928e-01 -2.20416591e-01 7.71854520e-02] [-9.88698006e-02 8.07842389e-02 -3.70389596e-02 ... -7.14942962e-02 3.41940112e-02 -2.46578202e-01] ... [-4.75621372e-02 -6.09306097e-02 2.19256312e-01 ... 1.34880826e-01 1.01005554e-01 -7.44443561e-04] [ 1.88104399e-02 -5.09625152e-02 7.19477013e-02 ... -3.69401462e-03 1.67300664e-02 2.22660527e-02] [ 1.08399458e-01 -5.15600629e-02 1.59621924e-01 ... 1.08755961e-01 2.18929723e-02 -6.34781569e-02]] [[-1.42780140e-01 -4.57909480e-02 -1.69271082e-02 ... -1.06412843e-02 3.68977189e-02 3.25602363e-03] [-2.79999971e-02 1.09932803e-01 -1.07549407e-01 ... 1.09058775e-01 1.74191058e-01 1.21351220e-01] [ 8.21684375e-02 -1.14859544e-01 -6.03515655e-02 ... 7.17802867e-02 -1.14465646e-01 1.49179593e-01] ... [-4.99648042e-02 -1.75464787e-02 -1.66793704e-01 ... 1.10353135e-01 9.66740698e-02 -1.58162355e-01] [-7.36672208e-02 -1.01150006e-01 3.86285223e-02 ... 4.17301282e-02 -2.59134937e-02 -1.57144144e-01] [ 1.30418211e-01 4.10895869e-02 5.64800128e-02 ... -1.17069244e-01 1.10075094e-01 -8.64036530e-02]] [[-5.04305474e-02 1.53139964e-01 5.97991981e-02 ... -5.97339012e-02 4.79264697e-03 7.32614323e-02] [-1.34344786e-01 -2.72833973e-01 -7.16876537e-02 ... 1.43019050e-01 -5.53719178e-02 -1.74210057e-01] [-7.37816235e-03 -9.00493264e-02 -1.05483890e-01 ... -4.06668335e-02 3.14798266e-01 -1.27490073e-01] ... [-5.35100512e-02 -1.05290838e-01 -1.29161030e-01 ... 7.16560557e-02 1.95981041e-01 1.09223805e-01] [ 3.43306847e-02 -7.62494877e-02 9.78990272e-02 ... 3.12896669e-02 -1.19503267e-01 2.64296662e-02] [-1.04881085e-01 1.55094936e-01 -1.87966645e-01 ... -1.90584496e-01 1.05995305e-01 2.45871618e-02]]] [[[-2.85801619e-01 -3.38137001e-01 -4.79462683e-01 ... 2.66754478e-01 -1.25167704e+00 1.95461556e-01] [ 8.43669772e-01 -6.24013580e-02 -6.98366880e-01 ... -7.84383833e-01 -6.98873475e-02 -4.56408828e-01] [ 3.08292890e+00 7.52130508e-01 5.34429848e-01 ... 2.73615152e-01 6.64533600e-02 1.95811298e-02] ... [ 8.80933344e-01 -3.42656761e-01 -9.29725841e-02 ... 9.31684673e-01 -4.73987639e-01 3.00622303e-02] [ 3.19370836e-01 1.15691572e-02 -4.45823610e-01 ... 4.68649790e-02 2.34591126e-01 2.81178147e-01] [-4.06240016e-01 6.21593058e-01 7.16046512e-01 ... -8.02828610e-01 1.04327582e-01 2.64831632e-01]] [[-4.71251607e-01 3.86032790e-01 -1.22540021e+00 ... 1.50315309e+00 -7.10460590e-03 -2.40607813e-01] [ 5.38757563e-01 -4.76589769e-01 3.58389020e-01 ... 6.67082787e-01 6.76650286e-01 1.18322694e+00] [-8.35492909e-01 1.87686712e-01 -3.69505376e-01 ... 3.81518640e-02 -9.93460834e-01 -5.50040126e-01] ... [-6.98922217e-01 1.48397163e-01 9.29214478e-01 ... 4.71131265e-01 -5.96093953e-01 9.36149061e-02] [ 5.50738394e-01 4.71155286e-01 -7.94188738e-01 ... -1.48174047e-01 2.03701600e-01 -4.30741340e-01] [-3.01487029e-01 2.26685181e-01 -8.17006588e-01 ... -1.05869435e-01 -2.97887297e-03 1.00486243e+00]] [[-1.11175597e+00 1.32679677e+00 1.78469509e-01 ... -5.79704940e-01 -5.03245771e-01 -6.00492895e-01] [-5.22406995e-01 1.00323093e+00 1.81911600e+00 ... -1.92235842e-01 6.47587061e-01 -5.86743772e-01] [ 1.51581809e-01 7.54288137e-01 8.56121898e-01 ... 3.67852628e-01 5.60657345e-02 5.59566589e-03] ... [ 9.18650210e-01 -9.83692527e-01 9.51935127e-02 ... 5.65084778e-02 1.97169423e-01 -1.19062889e+00] [-2.59726327e-02 -2.19561800e-01 3.70473266e-02 ... 1.56882405e-01 3.39333683e-01 -2.85778254e-01] [ 3.50972950e-01 2.09941231e-02 -1.04282749e+00 ... 3.61616284e-01 1.09722579e+00 -2.51301467e-01]] ... [[-2.57512361e-01 -1.80049166e-01 2.52308756e-01 ... -3.88622850e-01 -2.11990714e-01 2.85361528e-01] [ 4.93238658e-01 5.61903298e-01 7.30240285e-01 ... 2.37001851e-01 9.37728524e-01 -3.12993303e-02] [-2.09076926e-01 -1.16168153e+00 -6.45982802e-01 ... 1.29464015e-01 3.96139532e-01 -6.48490846e-01] ... [ 9.23760176e-01 -4.08673077e-04 -6.16155803e-01 ... -6.21140003e-01 2.14703873e-01 3.88108015e-01] [-1.17747039e-01 5.34377396e-01 -1.96250618e+00 ... -6.44365191e-01 3.47622097e-01 3.66968274e-01] [ 2.28672586e-02 2.47165918e-01 6.57033861e-01 ... 7.84333706e-01 -9.45631087e-01 -6.40946984e-01]] [[ 1.64750293e-01 -9.06779766e-02 4.75969315e-01 ... -1.39729138e-02 1.22695565e-01 -9.50072050e-01] [ 1.61434364e+00 7.06179023e-01 1.41694129e-01 ... 3.95949967e-02 -2.44945392e-01 3.89129907e-01] [-6.16889536e-01 1.63777635e-01 7.45724961e-02 ... 5.95087171e-01 -1.18142925e-01 -2.49366045e-01] ... [ 3.66523296e-01 -7.64014423e-01 3.74900877e-01 ... 1.73824668e-01 4.87113059e-01 3.87706578e-01] [ 2.53310185e-02 -4.39939439e-01 9.66598094e-02 ... -7.37408936e-01 -6.90560400e-01 -5.46024442e-01] [ 1.10553525e-01 4.42616910e-01 -6.41751945e-01 ... 2.03522816e-01 -4.18678671e-01 -3.59319299e-01]] [[-4.21207696e-01 -8.54332894e-02 1.32687175e+00 ... -1.67835340e-01 6.28414378e-02 -4.82008725e-01] [ 8.46871436e-02 -1.17606747e+00 -9.06912625e-01 ... -5.29673755e-01 1.33583581e+00 4.86906499e-01] [ 8.12400222e-01 -5.88688195e-01 4.20963556e-01 ... 8.12207162e-01 -5.18162072e-01 1.11804438e+00] ... [-7.17259228e-01 5.46536267e-01 3.60954314e-01 ... -6.13221943e-01 -1.10179651e+00 -4.24395591e-01] [ 1.15596056e+00 5.76253951e-01 7.91979581e-02 ... -5.27281687e-02 -5.23043096e-01 -3.91453922e-01] [ 1.16387951e+00 3.87347102e-01 -8.32737684e-01 ... 8.85007083e-01 7.83820510e-01 -1.43182027e+00]]] [[[ 3.56189370e-01 -3.26940194e-02 1.38507381e-01 ... 4.33930121e-02 -1.19780593e-01 -4.51430827e-01] [-5.09811640e-01 5.89459110e-03 -2.81623751e-01 ... 1.59113362e-01 -1.19564369e-01 -2.63214916e-01] [ 4.23783958e-02 -2.61878148e-02 1.26634121e-01 ... -2.51194924e-01 4.45364296e-01 -3.00840199e-01] ... [ 3.74027967e-01 1.57989025e-01 1.42225966e-01 ... -8.51150677e-02 3.32732230e-01 2.70172060e-01] [ 5.12244463e-01 2.84778088e-01 1.71384707e-01 ... -5.99628091e-01 -1.98430464e-01 3.58732082e-02] [ 6.11223057e-02 -1.28111085e-02 5.20038605e-02 ... 3.35053504e-01 3.41036588e-01 4.18996930e-01]] [[ 3.31376046e-02 3.06968957e-01 1.83339730e-01 ... 1.79329112e-01 1.69423789e-01 2.49343604e-01] [ 5.62132716e-01 9.29055393e-01 2.23308459e-01 ... -7.28013292e-02 1.35535911e-01 1.42860472e-01] [ 3.61923963e-01 -1.76238775e-01 -4.92563248e-01 ... 3.73107433e-01 -3.51115286e-01 7.47474074e-01] ... [-1.43136546e-01 3.09887022e-01 -4.45809029e-02 ... 3.26789290e-01 3.70520055e-01 1.52121395e-01] [ 1.67438969e-01 5.20451479e-02 -3.38161588e-01 ... -3.63589488e-02 -7.55048320e-02 1.30054250e-01] [-3.30063403e-01 4.31535751e-01 -5.35716340e-02 ... -2.65911996e-01 1.81050897e-01 2.24417254e-01]] [[-1.60040036e-01 2.85483301e-01 2.83828020e-01 ... -1.88293103e-02 -3.22836876e-01 3.02403960e-02] [ 2.15162113e-01 4.15246874e-01 7.72624388e-02 ... -7.31121540e-01 8.53515327e-01 -1.47335470e-01] [ 4.99618798e-01 5.58683611e-02 2.30798692e-01 ... 7.92764962e-01 -2.49898523e-01 6.35229498e-02] ... [ 2.81207174e-01 1.49557367e-01 3.98138195e-01 ... 4.04750496e-01 1.24735259e-01 -6.86949492e-02] [ 2.81469613e-01 -4.55293581e-02 -3.04831862e-01 ... -3.74513626e-01 -3.93538743e-01 -7.90149495e-02] [ 5.81588268e-01 3.28544825e-01 1.78973898e-01 ... 7.36913323e-01 -1.66387275e-01 1.04345955e-01]] ... [[ 3.30526888e-01 -1.23793423e-01 3.90427351e-01 ... 1.26112074e-01 -3.15217942e-01 3.00984621e-01] [-1.51501074e-01 -2.42094863e-02 6.72307789e-01 ... -5.96427262e-01 9.76902992e-02 -4.01163623e-02] [ 4.35904235e-01 1.80547118e-01 -2.87047535e-01 ... -5.83594739e-01 6.05357885e-01 5.16639888e-01] ... [-3.28949273e-01 8.24138299e-02 1.97092950e-01 ... 7.12817907e-02 -1.03850648e-01 2.02004403e-01] [-2.36156434e-01 3.85127366e-02 -3.65362108e-01 ... -2.67120842e-02 3.27631980e-01 -1.53916135e-01] [ 2.02696398e-01 -8.13034326e-02 1.55857995e-01 ... 1.83217898e-01 -6.44088387e-02 -4.97733615e-02]] [[-2.99178988e-01 2.19835952e-01 1.30944764e-02 ... 1.53206140e-01 -2.18934249e-02 -1.81374058e-01] [ 2.19142139e-02 9.27342027e-02 -3.38134795e-01 ... 4.56589848e-01 4.46676254e-01 -3.77452672e-02] [-1.01892743e-02 -6.87607974e-02 -1.19817063e-01 ... 1.02939479e-01 9.99829769e-02 2.05606923e-01] ... [ 1.32034540e-01 -1.95712715e-01 -1.32011361e-02 ... -5.34784257e-01 3.65243614e-01 4.67349775e-02] [ 1.04363002e-01 6.02504432e-01 -1.60392165e-01 ... 3.54253769e-01 2.57308781e-01 1.96910486e-01] [-3.86177540e-01 -5.60137212e-01 -2.95664757e-01 ... 3.44329178e-01 2.18303502e-01 1.31110787e-01]] [[ 5.82771361e-01 -3.46382648e-01 -5.05990498e-02 ... 2.09635139e-01 2.10918695e-01 -3.07461265e-02] [ 2.69997418e-01 -2.84277517e-02 6.42955527e-02 ... -1.35047302e-01 -2.30312526e-01 6.01825044e-02] [ 2.84049511e-02 -1.43252322e-02 -1.95946977e-01 ... 6.51266500e-02 -2.75215387e-01 1.09427897e-02] ... [ 8.96305203e-01 6.18467093e-01 -3.71885687e-01 ... -3.95842224e-01 4.90347266e-01 4.72313017e-02] [-6.78595454e-02 3.23831737e-02 -1.06294855e-01 ... 6.38539255e-01 2.68697947e-01 4.11344647e-01] [-1.36297569e-01 -2.96219528e-01 -4.49246585e-01 ... -4.60148782e-01 5.90786815e-01 -6.35847867e-01]]]] [[[[ 2.00595319e-01 3.06926703e+00 -1.90309000e+00 ... 9.30885196e-01 -4.64029878e-01 -2.15489817e+00] [ 1.91555619e-02 -2.31375217e+00 3.11151266e+00 ... -2.46857738e+00 1.75502634e+00 1.18475527e-01] [ 3.79596293e-01 1.53244269e+00 2.16720009e+00 ... -1.26167715e+00 -1.03786576e+00 -9.11371350e-01] ... [-2.92229843e+00 7.63466537e-01 1.25785160e+00 ... 7.20597208e-02 -2.94671774e+00 -1.46325815e+00] [ 6.67970777e-01 2.22471738e+00 -4.36802089e-01 ... 1.83941615e+00 3.81331396e+00 2.50880337e+00] [ 3.32739234e-01 2.59684038e+00 2.05667806e+00 ... -2.19713021e-02 -9.20399725e-01 2.95978218e-01]] [[ 1.49716765e-01 -3.54973793e-01 -1.85676798e-01 ... -7.94703662e-01 -7.15323210e-01 1.29076231e+00] [ 9.25917685e-01 -1.95549786e+00 -3.02804351e+00 ... -2.62357044e+00 -2.82626128e+00 -8.73866141e-01] [ 9.68621969e-01 3.60398507e+00 -1.71065405e-01 ... -1.67052567e+00 -1.06731892e-01 -6.14888012e-01] ... [-8.74978364e-01 -4.09273434e+00 -8.89501929e-01 ... 3.41381001e+00 -1.47827876e+00 -6.03739679e-01] [-9.26460564e-01 -2.57834315e-01 1.78686976e+00 ... 9.42561090e-01 1.87651649e-01 -4.37615782e-01] [ 1.79882646e+00 -1.23616052e+00 -8.64956558e-01 ... 7.26745203e-02 -6.71791732e-01 1.88665080e+00]] [[-1.89685786e+00 -1.79385221e+00 -2.23870444e+00 ... 3.94357920e+00 7.46645033e-01 -2.27347469e+00] [-8.65399003e-01 -2.41407212e-02 1.62606382e+00 ... -3.10615301e-01 1.93190232e-01 8.30525279e-01] [ 1.90689906e-01 3.34330857e-01 1.82777560e+00 ... 1.88358581e+00 -7.82541752e-01 -1.51245177e-01] ... [ 2.43038988e+00 -1.13023865e+00 5.74710608e-01 ... -2.32115816e-02 2.14413953e+00 9.27069068e-01] [-1.69866055e-01 -7.16713250e-01 4.78990227e-01 ... 2.40943730e-01 1.26116562e+00 3.05534095e-01] [-3.59891117e-01 2.58413348e-02 -9.72347915e-01 ... 3.33579108e-02 -9.17629361e-01 -1.17012501e+00]] ... [[ 7.44358242e-01 -2.29628611e+00 -6.42538965e-02 ... 4.84550148e-01 1.02494550e+00 2.90802587e-02] [-1.80870855e+00 2.13779044e+00 -1.50400186e+00 ... 1.89993429e+00 -1.34652972e+00 -5.56695700e-01] [ 1.68526995e+00 3.16723204e+00 -1.02009523e+00 ... 2.44747901e+00 -1.32296431e+00 1.44396439e-01] ... [-1.56383467e+00 7.99440861e-01 -1.13228452e+00 ... 4.23255742e-01 -1.78731370e+00 -5.28745830e-01] [-2.27925324e+00 -1.99747294e-01 7.39404738e-01 ... 5.76416925e-02 3.81117433e-01 -8.92474174e-01] [ 1.88096333e+00 -2.02603745e+00 5.31838417e-01 ... 3.73287845e+00 -1.40040982e+00 4.80962306e-01]] [[-2.95855999e+00 -7.19222426e-01 -2.79550433e+00 ... -5.03725827e-01 -8.66324365e-01 -1.14659739e+00] [-4.83605891e-01 2.36698344e-01 -3.34836984e+00 ... 6.51263177e-01 3.31442571e+00 -8.20003271e-01] [ 2.56473541e+00 2.00755954e+00 7.47169495e-01 ... -1.82965302e+00 2.13619798e-01 1.97424281e+00] ... [-3.11161709e+00 -3.73072714e-01 -1.02955675e+00 ... -3.51013482e-01 -2.22797728e+00 -2.56927937e-01] [-5.08750379e-01 -1.95114970e+00 -1.71293187e+00 ... 2.63213825e+00 -7.10778117e-01 -2.44250864e-01] [ 2.03410044e-01 4.50034440e-01 3.04791272e-01 ... 6.73399925e-01 5.82058370e-01 2.82260805e-01]] [[ 2.61699080e-01 5.95610738e-01 -5.46864092e-01 ... 1.26168644e+00 -3.10443616e+00 -1.04037118e+00] [ 2.28848103e-02 1.11132610e+00 -8.88347626e-01 ... 4.94951382e-02 1.44477665e+00 -1.28136206e+00] [-2.39631343e+00 -5.73972881e-01 -1.39341462e+00 ... 9.24309373e-01 3.65132481e-01 9.91553009e-01] ... [-1.65270305e+00 -9.51035395e-02 -9.23169315e-01 ... 2.11789632e+00 -8.56688380e-01 1.45995677e+00] [ 1.53947735e+00 1.32492614e+00 -1.61918342e+00 ... 2.47692108e+00 -8.40143383e-01 2.46504202e-01] [-1.88054121e+00 9.02508736e-01 2.29069996e+00 ... 1.78831768e+00 2.05665684e+00 -1.52588499e+00]]] [[[-2.17143327e-01 3.42660457e-01 -8.53500590e-02 ... -2.26011220e-02 -2.78226007e-02 5.35668731e-02] [ 2.46530056e-01 -1.41675144e-01 3.19013655e-01 ... -1.92598388e-01 -2.10322112e-01 -3.41422141e-01] [ 2.97953993e-01 1.32913932e-01 -1.75895438e-01 ... 4.72109705e-01 -7.98270777e-02 1.68372154e-01] ... [-4.22765374e-01 1.36696115e-01 -2.07933113e-02 ... -2.52012759e-01 -2.75797676e-02 -4.39721882e-01] [-2.33477294e-01 -2.18080804e-01 -4.87376563e-02 ... 7.42786564e-03 1.10799909e-01 3.65900725e-01] [ 5.18283360e-02 1.48329407e-01 -3.85146558e-01 ... -7.46214613e-02 2.68819064e-01 2.42454857e-01]] [[ 2.50650853e-01 -3.56522828e-01 -1.52956452e-02 ... -6.20654449e-02 -4.01800424e-02 -1.56547144e-01] [-6.91798627e-02 2.95761466e-01 -9.39280391e-02 ... 2.44377211e-01 2.72624731e-01 -1.06113806e-01] [-3.38741004e-01 -1.47237629e-01 -2.62460023e-01 ... 3.31098646e-01 4.35973823e-01 6.00290716e-01] ... [ 6.49330616e-02 1.17364846e-01 -1.91233680e-02 ... -2.66567826e-01 9.66848284e-02 2.20470071e-01] [ 5.63266695e-01 2.24164903e-01 -4.84351158e-01 ... 8.65942240e-03 -1.09733693e-01 6.61680102e-01] [ 1.03507906e-01 -6.51618540e-01 -1.17585719e-01 ... 2.62340419e-02 -3.60409282e-02 2.76787937e-01]] [[ 5.30238926e-01 3.92365038e-01 6.54792264e-02 ... -1.16331361e-01 1.91189989e-03 -2.51179248e-01] [ 8.21994618e-02 -1.88476089e-02 -2.36123756e-01 ... 3.22288811e-01 1.61077321e-01 -2.99900264e-01] [-4.42306489e-01 1.87717959e-01 2.04562768e-01 ... -1.52000368e-01 -1.06796727e-01 -3.26010615e-01] ... [ 2.67308533e-01 8.50236267e-02 1.69203728e-01 ... -6.69827834e-02 -3.87248158e-01 5.07360220e-01] [-2.03532532e-01 9.21131205e-03 1.10535555e-01 ... 3.92335683e-01 1.83743596e-01 -1.18151039e-01] [-1.80349410e-01 -2.38387063e-01 -3.49475712e-01 ... -3.95717800e-01 2.62404025e-01 5.37269294e-01]] ... [[-9.01228786e-02 -1.96620286e-01 7.39841536e-02 ... 3.26288491e-01 1.28907142e-02 -3.72330606e-01] [ 5.03467560e-01 -6.21493720e-02 -1.48166582e-01 ... 2.43136019e-01 1.44456089e-01 5.13501428e-02] [ 1.72852650e-01 -3.86441737e-01 3.55830669e-01 ... 1.59565032e-01 2.69137770e-01 -3.33888680e-02] ... [-3.70341480e-01 1.01478554e-01 1.04933053e-01 ... 6.80973455e-02 -4.50236872e-02 2.09895611e-01] [-1.89509764e-01 1.13091961e-01 -2.91563660e-01 ... 1.77885875e-01 2.80548424e-01 3.37760150e-01] [ 1.81507140e-01 -5.06634638e-02 -2.14524209e-01 ... 4.23511304e-02 -2.30428666e-01 1.78515345e-01]] [[ 1.45275652e-01 -3.41004819e-01 -3.08184564e-01 ... 3.82078201e-01 2.80456036e-01 1.81572884e-01] [ 6.90809727e-01 1.06352381e-01 4.14895043e-02 ... 1.96351171e-01 -1.78534850e-01 -3.52964282e-01] [-4.83242184e-01 2.73221061e-02 -3.25065874e-03 ... 2.23642468e-01 1.34616837e-01 -1.29868582e-01] ... [ 5.04558086e-02 9.73413587e-02 -3.76234353e-02 ... -2.86124021e-01 -7.23326132e-02 6.42550468e-01] [ 1.09634204e-02 2.38765687e-01 -1.13085516e-01 ... -1.40775800e-01 -2.07100779e-01 4.79040900e-03] [-9.08936560e-02 -3.58732969e-01 1.82042420e-02 ... -4.97268066e-02 -2.21896827e-01 -2.51457423e-01]] [[-2.24888101e-01 -1.60981044e-01 2.67272711e-01 ... 3.20278496e-01 9.70749632e-02 -1.67998314e-01] [-1.47323817e-01 -4.38702926e-02 8.14189389e-02 ... 1.81137800e-01 -2.74741590e-01 2.79719699e-02] [ 1.07687719e-01 -4.40848619e-03 4.42821197e-02 ... 1.75367326e-01 9.61327180e-02 1.01227380e-01] ... [ 1.29078910e-01 1.41913623e-01 -3.94382089e-01 ... -1.88242812e-02 -3.12773108e-01 7.43718119e-03] [-9.27571487e-03 2.23875776e-01 4.04441446e-01 ... 2.41246283e-01 1.75342202e-01 4.11519825e-01] [ 2.70586312e-01 -2.87675232e-01 -6.14126921e-02 ... 6.22747988e-02 -2.13455930e-02 8.23163241e-02]]] [[[-1.98140442e-01 4.50723320e-01 -7.05006063e-01 ... 7.53629282e-02 -1.87369570e-01 4.45828050e-01] [ 5.87994456e-01 6.95638418e-01 1.05956271e-02 ... -3.68450820e-01 -4.60535407e-01 4.65928793e-01] [ 2.80478507e-01 -8.26384872e-02 5.00500023e-01 ... -1.61461964e-01 4.48927402e-01 -9.65630785e-02] ... [ 5.24656296e-01 1.00453444e-01 -5.49217872e-03 ... -2.51441866e-01 8.25474918e-01 2.32960597e-01] [-8.94306377e-02 -6.05411828e-01 -6.80035651e-01 ... -3.01281996e-02 2.72856001e-02 2.12023959e-01] [-3.34228307e-01 -1.12776108e-01 2.05165341e-01 ... 6.52516246e-01 2.28454873e-01 -4.01687682e-01]] [[-1.63838223e-01 -9.70395505e-02 4.78832632e-01 ... 2.62111694e-01 3.57266873e-01 -1.45603091e-01] [-2.04914719e-01 5.04778177e-02 -1.19442634e-01 ... 5.51093161e-01 -4.59393919e-01 -1.32000491e-01] [-1.07705863e-02 1.18031912e-01 -2.71834970e-01 ... -4.74543691e-01 -3.06106985e-01 -4.12090242e-01] ... [-4.36959118e-01 1.77725106e-01 -4.51022536e-01 ... 1.48781583e-01 -1.66621089e-01 4.14529033e-02] [ 1.55002862e-01 2.15688244e-01 -6.81310892e-01 ... 1.52124226e-01 -5.84505379e-01 -1.83650956e-01] [-1.19807357e-02 2.34468400e-01 5.17147720e-01 ... 4.99999613e-01 1.49581552e-01 5.06319821e-01]] [[-2.69391119e-01 2.31961027e-01 1.60975516e-01 ... 2.27284152e-02 8.68568420e-01 -7.42244184e-01] [ 9.82762203e-02 -2.18013346e-01 1.92297116e-01 ... 7.93186575e-02 -4.96794939e-01 2.16035649e-01] [-6.34020418e-02 2.21337199e-01 -1.38228104e-01 ... -1.72661304e-01 -3.06163549e-01 1.98848605e-01] ... [-6.88911825e-02 -2.01379046e-01 -1.20654419e-01 ... 5.96976995e-01 -4.19424176e-01 3.34193707e-01] [ 1.14162490e-01 -8.24071616e-02 2.71901459e-01 ... -1.92948401e-01 -3.55888873e-01 7.21737623e-01] [ 3.26404750e-01 -1.14204250e-01 1.32222533e-01 ... 1.41991019e-01 -1.33889675e-01 2.36818627e-01]] ... [[-2.02795878e-01 2.34998152e-01 3.10420394e-01 ... -2.03912705e-01 3.53588730e-01 1.55037835e-01] [-2.88332459e-02 2.00464681e-01 -7.08137378e-02 ... 3.86376008e-02 -1.27148435e-01 3.71026024e-02] [ 3.93524729e-02 1.99324533e-01 2.87410691e-02 ... 1.04278994e+00 -2.03126356e-01 -1.73621088e-01] ... [-3.90774548e-01 -1.06477633e-01 4.75230783e-01 ... -4.83994752e-01 -8.08530822e-02 -6.94869041e-01] [ 1.40790552e-01 -1.35031179e-01 -3.46401274e-01 ... 4.71224695e-01 7.51273334e-02 1.97301522e-01] [-4.54884954e-02 6.23582959e-01 -1.20037666e-03 ... 7.02970698e-02 -5.75961411e-01 4.39320445e-01]] [[-3.25600266e-01 -5.09826422e-01 -2.16966063e-01 ... -3.13192725e-01 -6.87670767e-01 6.66614234e-01] [ 3.39050710e-01 -6.41672552e-01 1.17901146e-01 ... 5.01159966e-01 3.98835808e-01 8.16542506e-02] [-1.41826607e-02 -3.54579836e-02 -2.72228777e-01 ... -8.25522184e-01 4.25218910e-01 2.21320502e-02] ... [-8.20663154e-01 3.99525583e-01 -4.58235741e-01 ... 5.56057274e-01 -2.73670137e-01 -1.66126013e-01] [-2.70001143e-01 5.61618432e-02 -9.45264995e-02 ... 4.69630845e-02 -1.61539137e-01 -3.68349910e-01] [ 3.16599280e-01 -3.84195626e-01 -2.68088859e-02 ... -2.47773379e-01 5.74737601e-02 -5.37210941e-01]] [[ 2.19177231e-01 -8.36036563e-01 8.71437728e-01 ... -5.04986569e-02 3.00578233e-02 3.73403043e-01] [-3.14712793e-01 -7.16691971e-01 -1.44255208e-03 ... -5.62966228e-01 4.20891464e-01 -1.24805450e-01] [ 2.79052146e-02 3.72332841e-01 3.17563325e-01 ... -2.93967038e-01 -2.27806211e-01 2.22288400e-01] ... [ 3.75693977e-01 4.90820706e-01 -5.02162218e-01 ... 1.01226822e-01 -8.96970704e-02 2.85175145e-01] [ 2.91228980e-01 -6.93684304e-03 4.25941404e-03 ... 3.49409692e-02 -6.56565368e-01 4.35609668e-01] [ 2.51513362e-01 -3.23228896e-01 8.78005996e-02 ... 3.38994294e-01 5.00316024e-01 6.22704685e-01]]] [[[-2.64743954e-01 -5.49179576e-02 -3.13659832e-02 ... 1.80573791e-01 1.23657964e-01 7.81541467e-02] [-2.68358916e-01 4.30755131e-02 -7.65081821e-03 ... -1.63769096e-01 -1.17770977e-01 -2.00935435e-02] [ 6.26912713e-02 8.07329342e-02 -3.00110411e-02 ... -6.66914359e-02 -1.33517385e-01 1.27561048e-01] ... [ 6.26374260e-02 -1.34907793e-02 -1.15764380e-01 ... 1.48916006e-01 2.47008186e-02 -1.00431621e-01] [ 3.06584775e-01 -8.49123821e-02 4.82049920e-02 ... -9.24828500e-02 7.64189381e-03 2.17972964e-01] [ 5.75038753e-02 -1.05094396e-01 2.13618621e-01 ... 9.10582393e-02 -3.90718393e-02 2.48846844e-01]] [[ 1.50896549e-01 4.54578325e-02 1.12399824e-01 ... 2.70243734e-01 -1.28166586e-01 -6.19783625e-02] [ 1.31419301e-01 -2.12143183e-01 -6.56885430e-02 ... 1.01825804e-01 4.54444345e-03 -2.25614652e-01] [-5.78959584e-02 -9.99498516e-02 1.60837665e-01 ... 6.93364888e-02 1.26338780e-01 -6.41512647e-02] ... [-7.53252627e-03 -8.27191323e-02 -6.22128993e-02 ... -2.10343808e-01 1.08303025e-01 6.28266111e-02] [ 4.84835729e-02 -4.45678681e-02 1.36678353e-01 ... -3.61037515e-02 -2.04180002e-01 9.84448865e-02] [ 7.43562996e-04 -4.47381586e-02 6.05919510e-02 ... -1.34239376e-01 -1.28024578e-01 5.68763353e-02]] [[ 5.16826250e-02 1.76366866e-02 1.95426922e-02 ... -2.15701908e-01 7.26936618e-04 1.45166248e-01] [ 3.15643102e-02 -3.25770490e-03 -6.28985614e-02 ... 2.35033646e-01 1.39516726e-01 -1.64902955e-01] [-5.76927401e-02 5.61338812e-02 4.05503362e-02 ... 9.07380059e-02 -2.72213042e-01 -2.91187972e-01] ... [ 1.92162786e-02 1.75157934e-02 -1.50853284e-02 ... -8.98672491e-02 1.19670592e-01 9.40242335e-02] [-7.54212439e-02 -1.09194577e-01 -2.20935255e-01 ... 3.72823030e-02 -6.45623207e-02 -1.61127932e-02] [-2.40579814e-01 -2.25899145e-01 1.15195274e-01 ... 1.49190351e-01 -5.62429801e-02 8.71402584e-03]] ... [[-3.61191072e-02 3.78478803e-02 -1.24098510e-02 ... 2.32929997e-02 4.92351688e-02 -7.19943717e-02] [ 7.93292150e-02 9.30267572e-02 1.20247558e-01 ... 1.12310179e-01 1.41371921e-01 -7.78843910e-02] [-5.56379929e-03 -9.59409550e-02 -1.63519070e-01 ... 3.74499634e-02 -2.12387502e-01 4.91002835e-02] ... [-1.43335864e-01 6.26634732e-02 -1.69344023e-01 ... -7.01295510e-02 -1.44127503e-01 -1.60942078e-01] [-2.95498595e-02 1.02719247e-01 9.86656174e-02 ... 2.15774514e-02 -1.10908017e-01 -1.52746901e-01] [-1.32059231e-01 1.00049123e-01 -1.89153366e-02 ... 9.13210064e-02 1.61163926e-01 -4.29909788e-02]] [[-1.27773285e-01 7.83671066e-02 1.96216717e-01 ... 1.50288701e-01 1.53378263e-01 -7.18015730e-02] [-2.67877907e-01 -6.67154118e-02 -5.68203665e-02 ... -1.83494240e-01 -1.33037224e-01 -1.92487597e-01] [ 4.41695824e-02 -7.30258925e-03 -7.84018822e-03 ... 2.25177966e-02 -1.57181114e-01 5.62842973e-02] ... [ 1.33785143e-01 -1.40778571e-01 -2.55097412e-02 ... -1.84255749e-01 -2.56767981e-02 4.42384630e-02] [-1.86382346e-02 -1.20228603e-01 3.55214119e-01 ... 2.83432975e-02 -3.33635136e-02 -1.80108234e-01] [ 3.17440003e-01 -1.28018141e-01 9.45732147e-02 ... -1.38430580e-01 1.94484487e-01 -8.46124962e-02]] [[ 5.17324992e-02 3.62777859e-02 3.68930995e-01 ... 6.92467764e-02 -9.62314233e-02 3.43460470e-01] [-1.78898305e-01 -5.86817116e-02 1.28916740e-01 ... -1.05707020e-01 -2.34629642e-02 7.04271793e-02] [-3.14418562e-02 -1.85820431e-01 2.55895481e-02 ... -4.94117923e-02 -2.22933218e-01 -2.14984730e-01] ... [-8.14278945e-02 -1.08865552e-01 9.38413106e-03 ... 6.40011579e-02 -1.08191511e-02 2.99092401e-02] [ 1.18488505e-01 -1.03770286e-01 -1.06627792e-01 ... 5.56436926e-02 1.01308957e-01 2.87207752e-01] [ 1.06988959e-02 1.08759940e-01 -1.95605867e-02 ... -1.98071897e-01 -7.40312040e-02 -3.80633920e-02]]] [[[-1.66954458e-01 -1.18786287e+00 -1.61199474e+00 ... -8.79511952e-01 7.49719962e-02 1.23284781e+00] [ 7.28958726e-01 1.00475824e+00 7.67517567e-01 ... 1.13505280e+00 -2.91409403e-01 3.18962872e-01] [ 1.26841903e-01 1.04863420e-01 -3.09843034e-01 ... -8.12631667e-01 -6.95084393e-01 3.52242291e-01] ... [-2.57919192e-01 5.59109211e-01 -3.49879920e-01 ... -2.58089721e-01 1.18114993e-01 8.70702192e-02] [ 4.15339231e-01 1.49630025e-01 -4.20233190e-01 ... 9.75269526e-02 -9.98365760e-01 1.67513877e-01] [-1.24212027e+00 -1.08401358e+00 5.93511350e-02 ... -1.21836066e+00 -1.48170888e-01 2.95666307e-01]] [[ 1.15441000e+00 2.16182321e-01 4.20950055e-01 ... -4.64751542e-01 5.97535133e-01 1.30240515e-01] [-1.90523434e+00 -6.90885007e-01 -1.10630274e+00 ... 2.03800011e+00 -1.69874549e+00 8.40930641e-01] [-1.21376801e+00 -4.35735285e-01 -6.79234743e-01 ... -4.10624564e-01 4.32562709e-01 -5.14555693e-01] ... [-7.50518963e-02 -1.36899972e+00 -7.39780545e-01 ... 1.06144822e+00 4.97514337e-01 9.78631154e-02] [ 4.29103285e-01 1.77344605e-01 -1.09724760e+00 ... 1.11006975e+00 -8.24413717e-01 -5.98322272e-01] [-9.05824363e-01 2.71304045e-02 3.29111546e-01 ... 9.21498146e-03 3.89365047e-01 -5.23562670e-01]] [[-4.05537337e-01 -9.83512998e-02 -2.14733526e-01 ... 6.57114446e-01 -3.01021904e-01 -1.56929016e-01] [ 8.27857256e-01 -4.64384034e-02 4.66784567e-01 ... 3.43060553e-01 -9.35272872e-01 3.57724458e-01] [-6.94731716e-03 1.21913457e+00 9.24382746e-01 ... 3.87173265e-01 -1.37773383e+00 -8.08754981e-01] ... [ 4.41231400e-01 8.08833599e-01 6.62914515e-01 ... 1.00413740e+00 -5.11898518e-01 5.10121644e-01] [ 4.30585027e-01 7.93778524e-02 4.85396534e-02 ... -3.80265146e-01 -5.57571650e-01 -9.23991323e-01] [-6.28229797e-01 8.97262394e-02 2.60915071e-01 ... 1.12270489e-01 5.73836230e-02 -5.90218186e-01]] ... [[ 5.13791859e-01 -3.88056278e-01 -3.62362787e-02 ... -1.43219781e+00 -3.57399970e-01 7.70175457e-01] [-1.02607965e-01 6.14768803e-01 -3.15234989e-01 ... 5.98813519e-02 -6.50915265e-01 6.24716759e-01] [ 7.27957161e-03 -9.91598964e-02 1.14848948e+00 ... -1.42257142e+00 2.77668536e-01 6.23511314e-01] ... [ 3.15039188e-01 -3.63124967e-01 -2.54474372e-01 ... -1.95653826e-01 -3.70756179e-01 2.45389827e-02] [ 5.37126541e-01 1.89674571e-01 -4.72075045e-01 ... -1.28392959e+00 -1.06718266e+00 -6.51889145e-01] [ 5.96334577e-01 -5.16040027e-01 1.59415022e-01 ... 5.64356804e-01 -3.29596967e-01 3.61658446e-02]] [[ 9.34636056e-01 1.54375389e-01 -9.49672982e-02 ... 1.33497822e+00 -3.64375055e-01 -1.05095342e-01] [ 1.87542979e-02 8.13266098e-01 1.09735703e+00 ... -6.92180514e-01 -9.11353052e-01 5.05354345e-01] [-4.24008101e-01 2.13910088e-01 -1.37034789e-01 ... -5.12885451e-01 -4.40358818e-01 9.04514492e-02] ... [ 8.63410354e-01 -1.88408390e-01 -4.82154518e-01 ... -6.54978693e-01 -9.13292021e-02 -4.70482320e-01] [ 7.73295835e-02 -1.25137553e-01 8.56942475e-01 ... -5.71512640e-01 -3.24748526e-03 -6.10219479e-01] [ 1.44150957e-01 -7.09495127e-01 7.25672603e-01 ... 2.47434661e-01 1.23096097e+00 1.18049169e+00]] [[ 8.89575839e-01 1.72624916e-01 2.67026603e-01 ... -3.88019509e-03 -1.35427380e+00 -2.04205010e-02] [ 5.75075984e-01 -1.42166829e+00 6.95614278e-01 ... -6.56351209e-01 3.57712448e-01 -7.26050675e-01] [-7.39887804e-02 6.22498453e-01 1.24764264e+00 ... -9.62551713e-01 -4.02571522e-02 -6.82208389e-02] ... [ 5.58496192e-02 7.01937199e-01 -1.65745258e+00 ... -1.76423550e-01 1.02880239e+00 -8.84595275e-01] [-4.93038267e-01 -8.59503567e-01 7.02875078e-01 ... -1.04857713e-01 -5.18462896e-01 -4.59493607e-01] [-6.91501796e-01 -9.93651867e-01 -7.65796185e-01 ... -2.46934816e-01 -3.82414877e-01 7.56430209e-01]]] [[[-9.68315750e-02 -6.95630535e-02 3.11385512e-01 ... 1.03895634e-01 2.92188078e-01 4.84608300e-02] [ 2.27329463e-01 -4.26915698e-02 -6.01585060e-02 ... 2.30514392e-01 -7.59225845e-01 -2.37712592e-01] [ 1.24964416e-01 7.58609548e-02 9.33395922e-02 ... 2.33603835e-01 -2.23384559e-01 -2.80611277e-01] ... [ 2.23873243e-01 1.42486140e-01 2.94930339e-01 ... -7.78500795e-01 -1.33740306e-01 -1.08412340e-01] [-4.20807689e-01 5.88048398e-02 -2.40544513e-01 ... -1.73975170e-01 -1.58509806e-01 5.22105634e-01] [-7.64827728e-02 -3.41168135e-01 1.83284823e-02 ... 3.03895682e-01 -5.56622922e-01 -1.29244402e-01]] [[ 4.31861609e-01 5.47186375e-01 -1.06309861e-01 ... 8.75716150e-01 4.03834246e-02 -3.98627847e-01] [-1.65688112e-01 7.54017293e-01 -1.68094054e-01 ... 2.05705643e-01 2.48517543e-01 -1.42456099e-01] [ 1.22428469e-01 -2.57663578e-01 5.25048256e-01 ... 1.85306817e-01 1.50077477e-01 -8.70344788e-02] ... [ 2.47663662e-01 -1.00255750e-01 -1.18561275e-01 ... -3.75446200e-01 -1.00481160e-01 2.71139592e-01] [-2.75093645e-01 -3.50477099e-01 4.18136299e-01 ... 4.95032929e-02 2.66877741e-01 -1.30577147e-01] [ 1.79920033e-01 -3.50796580e-01 4.81455252e-02 ... 2.00229779e-01 -1.65830646e-02 1.13574035e-01]] [[ 1.32386655e-01 -7.81824529e-01 -4.92215961e-01 ... 2.67047733e-01 -5.41207334e-03 -1.01584032e-01] [-6.54679611e-02 3.87164243e-02 -6.46570742e-01 ... 8.32144618e-02 3.84423226e-01 -1.95503205e-01] [ 1.37795463e-01 4.72880781e-01 -8.16446617e-02 ... 1.48788318e-01 2.93969065e-01 1.59005240e-01] ... [ 3.95634443e-01 4.34940904e-01 -5.60970664e-01 ... -4.61713791e-01 2.91008562e-01 2.55145460e-01] [ 6.28930509e-01 2.80300230e-01 -5.31148352e-02 ... 3.84738902e-03 1.96826681e-01 -8.36624473e-04] [ 3.33679020e-02 -4.14308786e-01 -6.28695861e-02 ... 6.83684461e-03 -3.94194454e-01 -4.98886734e-01]] ... [[ 2.21946239e-01 2.43961960e-02 2.05969125e-01 ... 3.03004593e-01 -2.72118866e-01 1.31420374e-01] [ 2.67758906e-01 -6.96653724e-02 -4.22924459e-01 ... -4.15014595e-01 5.37457407e-01 3.83675516e-01] [-9.28632021e-02 -4.72643256e-01 -2.24641591e-01 ... 7.36594796e-01 8.53333399e-02 1.31944478e-01] ... [ 2.50217855e-01 2.91089654e-01 1.11830711e+00 ... -2.37580881e-01 -2.71725040e-02 -4.31087434e-01] [ 1.73254654e-01 -1.05775900e-01 2.45898888e-01 ... -1.79777145e-01 4.47658539e-01 3.68678197e-02] [-1.82990715e-01 -5.48392713e-01 -1.20676182e-01 ... -3.13313067e-01 1.93093449e-01 2.85186887e-01]] [[-1.34316862e-01 4.10988688e-01 2.34562054e-01 ... 1.68117940e-01 -2.62814283e-01 1.93253100e-01] [ 2.36034051e-01 2.93550670e-01 -7.14293942e-02 ... -1.25451619e-02 6.99598610e-01 3.31149369e-01] [ 1.96745638e-02 -1.03968039e-01 4.04863089e-01 ... -5.47528625e-01 -1.53762221e-01 2.65761018e-01] ... [-1.48449531e-02 9.24364850e-03 -2.67647594e-01 ... -2.29390413e-01 1.89227149e-01 -4.98181462e-01] [ 1.71080083e-01 1.79753020e-01 -3.28172654e-01 ... -3.22930336e-01 1.14841640e-01 -3.71386707e-02] [-5.03973424e-01 3.70452851e-02 -5.64569309e-02 ... -1.21649951e-01 2.27992963e-02 -1.99135020e-01]] [[ 4.51008886e-01 6.21865809e-01 1.82946883e-02 ... -7.12742731e-02 2.20719188e-01 -4.19004142e-01] [-1.22741170e-01 -3.18011135e-01 -1.94249630e-01 ... -9.20925587e-02 2.18157321e-02 -2.52195280e-02] [-1.58570215e-01 -1.39349744e-01 7.22075403e-02 ... -6.32534802e-01 3.31444651e-01 8.10893953e-01] ... [-5.77087998e-01 2.97647387e-01 7.67577440e-02 ... 7.78627694e-02 -7.11534798e-01 2.89472550e-01] [ 1.60116389e-01 -6.04661047e-01 -3.99173275e-02 ... -7.10131750e-02 5.12681901e-01 1.06495330e-02] [ 1.68582052e-01 -6.03907585e-01 -5.31557083e-01 ... -4.51178253e-01 3.01148713e-01 -1.18027151e-01]]]] ... [[[[ 1.47346258e+00 -3.11088061e+00 -9.95483994e-03 ... -1.07805490e+00 1.64747134e-01 9.27473545e-01] [-7.60477126e-01 3.24968410e+00 -4.94238734e-01 ... -1.10002422e+00 -7.09213734e-01 -4.35216397e-01] [ 2.40623331e+00 1.52671266e+00 8.37984443e-01 ... -9.84167516e-01 4.01803792e-01 -9.23323989e-01] ... [ 3.52540940e-01 -9.73593593e-01 -2.99919629e+00 ... 1.23295343e+00 -4.04972732e-01 8.75577748e-01] [-4.07120675e-01 -4.04883593e-01 -3.57397020e-01 ... -9.70596433e-01 3.08281469e+00 -2.08118463e+00] [ 1.29627848e+00 -7.73768663e-01 -1.03913140e+00 ... -3.74707532e+00 2.92913508e+00 5.00241816e-02]] [[ 1.31224978e+00 1.82726109e+00 9.72517282e-02 ... -1.69791758e+00 -7.41402626e-01 1.65641046e+00] [ 4.45407301e-01 2.23515010e+00 1.06471574e+00 ... -1.16513824e+00 2.71527290e-01 1.34257555e+00] [ 9.28275436e-02 2.12578177e+00 -2.72958577e-01 ... -2.93578482e+00 4.69325989e-01 1.56811595e-01] ... [ 1.21584728e-01 -6.61294088e-02 1.38912463e+00 ... -9.91978168e-01 -1.09683371e+00 -1.41463530e+00] [ 6.53867126e-02 1.35879862e+00 1.22350287e+00 ... -2.02702737e+00 2.61694098e+00 1.75371110e-01] [ 3.55327278e-01 1.03710163e+00 5.70328176e-01 ... -3.23168206e+00 4.47247177e-01 -2.02259779e+00]] [[ 8.14328969e-01 -2.13469982e+00 4.89594638e-01 ... 2.01747283e-01 2.46395683e+00 1.43898892e+00] [-7.17710614e-01 -1.25019252e-01 1.52968836e+00 ... -4.93220657e-01 2.08272600e+00 -3.61418396e-01] [ 4.19691391e-02 2.55213946e-01 1.27460337e+00 ... -2.12000802e-01 6.99358523e-01 1.53592896e+00] ... [-2.18258351e-01 -2.83490252e+00 -3.16499621e-01 ... 1.92164981e+00 -2.67163253e+00 2.79773259e+00] [-9.52956140e-01 -1.34709466e+00 6.05240703e-01 ... 1.27307534e+00 -2.83619070e+00 8.96743536e-01] [-1.19281971e+00 -8.74562085e-01 -1.52775988e-01 ... -3.35414568e-03 -3.00564146e+00 -1.50011992e+00]] ... [[-2.09047699e+00 -1.29220498e+00 -2.08447170e+00 ... -2.09956026e+00 1.34814000e+00 -4.67037231e-01] [-1.84028372e-01 -7.65982568e-01 -1.26578403e+00 ... 3.63336876e-02 -7.79038608e-01 3.17300177e+00] [ 1.43995416e+00 -3.79788995e-01 1.04178178e+00 ... 5.07050753e-02 3.59091572e-02 3.86150032e-02] ... [ 1.89097309e+00 1.33973256e-01 2.86772072e-01 ... -6.01614118e-01 -1.01244211e+00 -3.00356460e+00] [-3.69426012e-01 9.77689773e-02 -1.39082432e-01 ... -3.26508784e+00 -8.94925833e-01 -1.27158988e+00] [ 9.96184409e-01 -5.91250896e-01 -9.04178262e-01 ... 4.38901281e+00 3.97059679e-01 -8.12960267e-01]] [[ 8.05213451e-01 3.27682197e-01 2.90768445e-01 ... 1.31276834e+00 5.87261617e-01 7.79408038e-01] [ 1.29480028e+00 7.64605999e-01 -8.74301884e-03 ... 3.70469779e-01 -1.17583930e+00 1.27471101e+00] [ 2.34573316e-02 1.12509084e+00 2.39907670e+00 ... -1.56059951e-01 -2.46036792e+00 1.81473064e+00] ... [-1.06859088e+00 1.09302068e+00 -1.63452017e+00 ... -6.77516580e-01 -1.12289655e+00 1.03632390e+00] [ 1.06149411e+00 5.09774173e-03 4.87104923e-01 ... 7.14933991e-01 -1.93359888e+00 -3.93237066e+00] [ 7.00021923e-01 2.62020290e-01 4.44303215e-01 ... 1.65836632e-01 8.68102491e-01 -3.29935759e-01]] [[-7.63830960e-01 -1.39155257e+00 1.02147341e+00 ... 8.51248860e-01 2.87553102e-01 7.29436815e-01] [ 2.22894251e-01 -1.08732946e-01 1.41690755e+00 ... -3.14875185e-01 6.46669507e-01 -3.05768788e-01] [-9.59588110e-01 3.69434267e-01 -3.06504577e-01 ... 1.35933757e+00 9.68981743e-01 1.42921960e+00] ... [-1.25323963e+00 -6.41467988e-01 2.67713284e+00 ... -9.61453259e-01 4.03691798e-01 -2.19354942e-01] [ 2.00762129e+00 7.60395467e-01 -7.78756618e-01 ... -2.49259782e+00 -4.11611557e-01 -6.03118062e-01] [-1.31692696e+00 7.30035305e-01 1.28340816e+00 ... -2.45606542e+00 7.09104776e-01 -1.46538568e+00]]] [[[-2.24980295e-01 -8.02443743e-01 -1.32362056e-03 ... 6.03396073e-02 1.34737253e-01 3.93585891e-01] [-8.37066844e-02 1.47206321e-01 4.20871153e-02 ... -1.82674646e-01 -8.24327022e-02 -2.20781565e-02] [ 1.24987736e-01 -6.85409009e-01 -1.13137580e-01 ... -3.29650372e-01 -7.83550888e-02 2.30130568e-01] ... [ 4.16316211e-01 -1.92886293e-01 -2.36316487e-01 ... -7.31996298e-02 1.80004492e-01 -1.58942845e-02] [ 1.42066330e-01 -5.95122218e-01 -4.44452539e-02 ... 2.22843349e-01 -5.50354421e-01 -2.36446679e-01] [ 2.00205922e-01 3.01859111e-01 -4.28280652e-01 ... 3.70588690e-01 3.23256701e-01 -1.73289880e-01]] [[-1.06189884e-01 -2.40813255e-01 -2.74740577e-01 ... -2.22187921e-01 -1.74529508e-01 4.20276880e-01] [ 4.78581548e-01 1.32332578e-01 -1.72853172e-01 ... -3.98367345e-01 2.24516749e-01 -5.61949968e-01] [ 3.14543396e-01 2.13085935e-02 -9.10938308e-02 ... -2.40682781e-01 5.47910705e-02 9.19895843e-02] ... [ 1.60519660e-01 -1.45656824e-01 2.87145108e-01 ... -2.52739161e-01 8.36950168e-02 4.38033938e-01] [-2.45471641e-01 1.48150073e-02 1.51647955e-01 ... -3.00968677e-01 -1.49666816e-01 -1.53863162e-01] [-4.48228717e-02 1.41995743e-01 3.19696479e-02 ... 1.47156879e-01 6.88064024e-02 2.01344509e-02]] [[-2.22287551e-01 3.42761487e-01 -8.90651122e-02 ... 1.55350706e-02 -1.84208155e-01 2.38947064e-01] [ 5.87615892e-02 -2.43862957e-01 4.79082987e-02 ... -5.44554889e-01 3.77331600e-02 2.47025453e-02] [ 1.22134663e-01 -3.13141309e-02 3.72036844e-01 ... -1.71884984e-01 -3.89860123e-02 1.62451193e-01] ... [ 8.88871551e-02 -2.25251928e-01 -3.75077218e-01 ... -1.79189101e-01 3.50531340e-01 1.52212501e-01] [ 8.54131430e-02 3.01684558e-01 2.16794968e-01 ... -3.16840917e-01 1.90265074e-01 -5.36229275e-02] [-2.69407332e-01 -2.16093779e-01 -2.59191599e-02 ... 4.97713327e-01 7.45895654e-02 -5.42604029e-01]] ... [[-2.74841696e-01 2.40428910e-01 -2.12027118e-01 ... -4.18529809e-01 -4.15742159e-01 1.36057898e-01] [-1.92223519e-01 -1.70468867e-01 4.69193101e-01 ... 5.25189526e-02 -2.29197159e-01 6.77673995e-01] [ 3.20707381e-01 -3.75253588e-01 2.22142056e-01 ... -1.30727470e-01 7.18115866e-02 -1.84603080e-01] ... [-1.70012534e-01 -5.07746302e-02 3.26275319e-01 ... -5.46173304e-02 2.23280340e-01 -1.22773767e-01] [-4.74951178e-01 1.80642530e-02 2.74138954e-02 ... -3.34420145e-01 3.90149355e-01 3.15244496e-02] [ 3.38703066e-01 -1.32732213e-01 -1.16112567e-01 ... 4.64865752e-02 -3.60624120e-02 3.43955219e-01]] [[ 2.57875204e-01 2.04414561e-01 -1.39287278e-01 ... 2.02552691e-01 6.37888312e-02 -1.05542988e-01] [ 7.21440986e-02 -5.63743353e-01 -2.75324255e-01 ... -1.75753891e-01 1.04861431e-01 5.94263673e-02] [-1.98385641e-01 -4.75340426e-01 1.33495599e-01 ... 5.49754024e-01 -9.93330684e-03 1.70323521e-01] ... [ 3.38081539e-01 1.45700216e-01 -3.37987453e-01 ... -3.62755626e-01 5.62047213e-02 -1.45591035e-01] [-3.17175865e-01 2.28409588e-01 5.00909805e-01 ... -1.21006750e-01 1.66647017e-01 -3.79638642e-01] [-3.11604589e-01 1.21551052e-01 -5.53827956e-02 ... -2.19724283e-01 2.54121572e-01 2.59446830e-01]] [[-6.24433696e-01 1.21923229e-02 3.87680113e-01 ... -4.59028445e-02 -4.96779084e-01 -2.76665568e-01] [-3.33580166e-01 2.88826734e-01 -4.39401656e-01 ... -1.03311941e-01 -5.09572268e-01 -2.70554692e-01] [ 5.86532116e-01 2.19659448e-01 -5.84506318e-02 ... -2.05558121e-01 1.58026740e-01 -1.03505760e-01] ... [-2.25011945e-01 6.90620765e-02 3.85553986e-02 ... 1.40723616e-01 -1.90304801e-01 -5.48410356e-01] [-1.03514930e-02 4.72115539e-02 2.50817835e-01 ... 5.66201031e-01 -5.20384252e-01 2.53598839e-01] [-4.15607065e-01 -7.96056986e-02 -1.34337798e-01 ... -1.18724793e-01 3.17886472e-01 -1.35369152e-01]]] [[[-7.04128817e-02 6.01145089e-01 -7.00831972e-03 ... 7.01530993e-01 1.90728623e-02 -1.77648991e-01] [ 3.50358695e-01 1.66110814e-01 1.77410364e-01 ... 2.29481906e-01 -4.29850817e-01 1.72100618e-01] [-4.23338234e-01 2.49649033e-01 3.29908878e-01 ... 3.99724364e-01 2.34582633e-01 2.53768533e-01] ... [-6.22738563e-02 -2.91074216e-01 -8.60197067e-01 ... 2.02922881e-01 -2.56493807e-01 5.07812440e-01] [-7.97156930e-01 -3.38497907e-01 3.15337509e-01 ... 4.41547096e-01 3.98329616e-01 -1.40418261e-01] [ 1.78139552e-01 4.89971191e-01 1.20219521e-01 ... 3.07543188e-01 1.46270290e-01 5.95563352e-01]] [[-3.50752994e-02 -6.37695074e-01 5.12700491e-02 ... 4.96989749e-02 -1.80868190e-02 -3.15554947e-01] [ 3.82092208e-01 6.65754139e-01 -1.67664513e-01 ... -3.52033228e-01 -2.57480204e-01 6.19541049e-01] [-5.75803399e-01 -2.08207950e-01 1.90047324e-01 ... 2.99576670e-01 -3.71402234e-01 1.77305996e-01] ... [-1.61983550e-01 -2.36445531e-01 2.53014624e-01 ... 7.44084835e-01 -3.22098166e-01 -8.03629458e-01] [ 2.27887481e-01 2.17064753e-01 3.07243794e-01 ... 4.32808489e-01 -6.10299766e-01 -5.91081195e-02] [-1.95884988e-01 -8.64805758e-01 3.84482384e-01 ... -1.35693103e-01 -4.11836267e-01 6.81455374e-01]] [[ 7.02975765e-02 2.00430796e-01 -4.28123884e-02 ... -1.50533617e-01 -1.34228706e-01 -8.41157045e-03] [ 3.72997336e-02 -1.22918501e-01 5.00843704e-01 ... 1.36563510e-01 -2.77011365e-01 3.72412205e-01] [-3.45937699e-01 1.06089540e-01 1.39452562e-01 ... 1.48930073e-01 -4.28323485e-02 -2.66084373e-01] ... [-3.89707506e-01 3.73315930e-01 7.56528139e-01 ... -4.37572360e-01 -3.55922878e-01 -1.38792560e-01] [-2.23345026e-01 1.75313011e-01 4.43768263e-01 ... -3.84671867e-01 -7.20110834e-01 -3.48788977e-01] [ 1.18291065e-01 3.63594070e-02 3.47109109e-01 ... -8.88998687e-01 -8.38586874e-03 -2.53711760e-01]] ... [[ 1.94746196e-01 8.46519649e-01 1.81841895e-01 ... -5.11224926e-01 -4.97269422e-01 -3.83584678e-01] [ 5.34654021e-01 -6.24598972e-02 -1.32987782e-01 ... 3.20651419e-02 -4.35485601e-01 -1.16323512e-02] [ 3.31249237e-01 3.06029797e-01 4.24480736e-01 ... -7.98912905e-03 -9.09638777e-02 4.68496174e-01] ... [-3.34780037e-01 -3.69779803e-02 3.52926925e-03 ... 1.88688651e-01 -9.27316323e-02 -5.55929601e-01] [-2.01096579e-01 -8.46073508e-01 -2.49081582e-01 ... 1.05290011e-01 -2.77338251e-02 3.98783870e-02] [ 5.24568111e-02 -1.39696270e-01 7.85881817e-01 ... -5.83020627e-01 1.26501739e-01 -5.23834825e-02]] [[ 2.96247840e-01 -6.67523324e-01 2.73988366e-01 ... 2.38735273e-01 -1.38503537e-01 6.96332574e-01] [ 1.72070563e-01 4.44926441e-01 -2.76960880e-01 ... -1.16674602e-01 -2.97194362e-01 -4.17803973e-01] [ 2.16827840e-01 9.10013393e-02 -1.93014145e-01 ... 6.43106699e-01 -7.17056572e-01 -6.21461451e-01] ... [ 6.71141315e-03 3.33198875e-01 -7.86055863e-01 ... 9.04464304e-01 2.63228655e-01 -2.00944379e-01] [-5.30342162e-01 -1.36019796e-01 6.18311577e-02 ... 3.18686724e-01 1.18069828e-01 3.21840532e-02] [-7.53174305e-01 6.06216133e-01 -4.89912152e-01 ... 1.00910556e+00 4.47294861e-01 -1.05480723e-01]] [[ 1.14518918e-01 1.11162074e-01 -5.18984199e-01 ... 3.49890604e-03 2.83517778e-01 6.71720088e-01] [ 1.45886958e-01 -4.65088189e-02 7.43803918e-01 ... 1.82463124e-01 1.16851158e-01 -5.10771155e-01] [ 3.79584759e-01 -5.44246197e-01 -7.28903562e-02 ... -1.14742562e-01 7.84740388e-01 8.34169313e-02] ... [-3.44613701e-01 3.46360415e-01 8.95563602e-01 ... 3.72044504e-01 1.61317766e-01 -3.71373266e-01] [-2.61903822e-01 -6.75899565e-01 -7.22157657e-01 ... 5.48105359e-01 2.57657051e-01 -3.72098267e-01] [-1.59223042e-02 8.61839294e-01 -7.52398074e-01 ... -2.54043281e-01 2.90349185e-01 2.05318257e-01]]] [[[ 1.87705103e-02 8.13571736e-02 -5.63861467e-02 ... 6.62288163e-03 -5.79450391e-02 6.74310252e-02] [ 5.33000119e-02 2.51362356e-03 2.58423835e-02 ... 1.58864230e-01 -2.95680296e-03 -2.12131262e-01] [-1.06788494e-01 7.86567256e-02 -1.91792205e-03 ... 3.54069732e-02 -1.75137162e-01 -6.03799447e-02] ... [-5.68005778e-02 4.69422340e-02 -5.19192256e-02 ... -7.68613592e-02 -2.59840079e-02 1.52389422e-01] [ 5.02750129e-02 9.75810364e-02 -3.58313471e-02 ... 8.92031565e-02 -9.84519068e-03 4.39997092e-02] [-3.35099436e-02 8.47672150e-02 4.88705002e-02 ... 5.51483519e-02 2.68956693e-03 -6.82665706e-02]] [[ 1.90009046e-02 -2.44062413e-02 2.29957160e-02 ... -4.63955812e-02 5.75820021e-02 9.89359766e-02] [ 5.87668736e-03 1.08582459e-01 -3.38631161e-02 ... 2.98561584e-02 5.17290831e-02 1.33856267e-01] [-1.35453209e-01 -5.57625778e-02 2.21362069e-01 ... -9.12499875e-02 1.40506759e-01 -1.32999748e-01] ... [-1.69898838e-01 1.27227217e-01 3.35322082e-01 ... 3.20654139e-02 -5.87579682e-02 1.17663264e-01] [-5.29232947e-03 -1.45549579e-02 -1.04800761e-01 ... 8.58615432e-03 -9.64493454e-02 2.71062497e-02] [-1.49748147e-01 1.66371688e-01 2.46162582e-02 ... -1.83539577e-02 1.40503526e-01 -2.32618004e-02]] [[ 6.19223434e-03 6.79863011e-03 1.42006064e-02 ... -5.15036583e-02 -2.22039014e-01 -3.81556563e-02] [ 8.59626383e-02 1.62384197e-01 -4.05496806e-02 ... 2.09058002e-01 -8.97209793e-02 1.95174381e-01] [-1.24677330e-01 6.80323690e-03 2.72104591e-02 ... -2.03996733e-01 -7.51625746e-02 -9.58298706e-03] ... [-6.16258848e-03 -1.58769637e-01 2.07831040e-02 ... 2.49504268e-01 -8.55859444e-02 5.75060658e-02] [-1.96372345e-02 2.13235710e-02 -2.09389720e-02 ... 1.84311159e-02 5.52663626e-03 -8.80107731e-02] [ 2.26082876e-02 1.95379212e-01 9.37038660e-02 ... 3.12637061e-01 -9.99024045e-03 -1.52168706e-01]] ... [[-8.02402943e-02 -1.73161417e-01 -2.23754823e-01 ... -1.51461080e-01 -1.69711262e-01 -2.62579340e-02] [-1.45143077e-01 2.27313921e-01 1.62242651e-02 ... 3.80179584e-02 1.83794424e-02 3.42299461e-01] [ 4.72467840e-02 6.69549033e-02 -2.15163738e-01 ... -2.45274395e-01 9.08833072e-02 -2.01675683e-01] ... [-2.91531868e-02 -5.15674911e-02 1.85557976e-01 ... -3.00776027e-02 8.71137232e-02 7.91627765e-02] [-5.23293130e-02 -4.14770067e-01 2.14687437e-01 ... -5.12976609e-02 1.56130353e-02 2.08772365e-02] [-7.19138235e-03 2.21281722e-01 7.09303617e-02 ... -2.26804495e-01 1.09940521e-01 3.18979919e-02]] [[-7.84752294e-02 -5.77495955e-02 -6.21958673e-02 ... 4.04021256e-02 -1.75805420e-01 1.36108277e-02] [-3.08574848e-02 1.24029972e-01 5.00753941e-03 ... 3.38156745e-02 -3.60506289e-02 -1.33402972e-02] [-2.72929389e-03 -4.24937829e-02 -1.43857570e-02 ... -1.07592540e-02 -1.18685812e-01 1.45895258e-01] ... [-4.97525632e-02 -1.04680866e-01 -6.47653639e-02 ... 9.90403444e-03 -7.45666772e-02 -1.85071118e-02] [-1.49435073e-01 -7.61313364e-02 1.57991145e-02 ... -4.03298512e-02 8.60989466e-02 -1.20626137e-01] [-7.59127140e-02 1.72434434e-01 -2.34274417e-02 ... 6.08157329e-02 6.89346641e-02 5.70593439e-02]] [[ 4.38675992e-02 -1.16655700e-01 2.62924969e-01 ... -6.72925217e-03 1.46756232e-01 1.79955319e-01] [-1.98543087e-01 1.89445689e-02 2.67444532e-02 ... -3.96156795e-02 -2.88388401e-01 -9.19226781e-02] [-1.73123345e-01 1.43934816e-01 -1.22830458e-01 ... 1.40596494e-01 -4.60010022e-02 1.64242983e-02] ... [-3.40559781e-02 -4.64972295e-02 -7.46340817e-03 ... -1.06083639e-01 -2.90718246e-02 -2.61616670e-02] [ 2.57567391e-02 5.19122966e-02 -5.71557470e-02 ... -1.46727279e-01 1.79740235e-01 1.71359420e-01] [-1.48147166e-01 1.14990853e-01 8.62383619e-02 ... -6.44748509e-02 -6.63206950e-02 1.99685711e-02]]] [[[ 3.89814049e-01 -1.38946271e+00 -1.25178182e+00 ... -7.70224910e-03 -3.16036493e-01 6.96029738e-02] [ 2.57103622e-01 -6.88741624e-01 1.61017752e+00 ... 7.36484379e-02 3.75480354e-01 -1.71670783e+00] [ 3.67214143e-01 -1.30779162e-01 4.21081841e-01 ... 6.92378700e-01 5.99484622e-01 -1.04122388e+00] ... [-2.54921883e-01 1.40801585e+00 -5.41230261e-01 ... -7.43550956e-01 -3.68597329e-01 9.04450536e-01] [-5.17633148e-02 -1.86485425e-01 -3.80445749e-01 ... -4.35822457e-01 -7.88780153e-02 -3.60586792e-01] [-8.52451548e-02 -1.44450748e+00 -6.01183116e-01 ... -5.00656724e-01 -4.98659223e-01 -1.89320847e-01]] [[ 8.50164115e-01 1.06085157e+00 6.40205264e-01 ... 1.60840511e-01 -3.95611107e-01 -6.41930580e-01] [ 6.99361712e-02 -3.67534667e-01 -6.87680423e-01 ... -3.02486308e-02 -5.96619070e-01 -2.92020351e-01] [-9.25748229e-01 -5.12284756e-01 -8.84795845e-01 ... -1.89214960e-01 5.78271449e-01 -8.43272030e-01] ... [-3.74073535e-01 2.97175437e-01 7.64766395e-01 ... 6.77961171e-01 2.65553117e-01 -8.09887588e-01] [-6.70212805e-01 -1.22243010e-01 -4.69526052e-01 ... -3.74128938e-01 -1.07936151e-01 1.67693838e-01] [ 1.11333102e-01 8.74440134e-01 -1.64169833e-01 ... 6.95571125e-01 5.01833022e-01 4.04345512e-01]] [[-4.74821299e-01 -2.90224493e-01 -7.31327832e-01 ... -1.56055968e-02 4.40022707e-01 -1.82539418e-01] [ 6.47154152e-01 9.18810427e-01 5.68327904e-01 ... -8.10478404e-02 1.26644635e+00 3.85219514e-01] [-4.10470754e-01 1.23752964e+00 3.20573121e-01 ... -2.09036455e-01 -6.73681200e-01 -8.34389806e-01] ... [ 2.78408885e-01 -8.18304181e-01 6.17913544e-01 ... -4.62998927e-01 2.53426224e-01 6.92370594e-01] [ 7.11901128e-01 1.63283259e-01 -7.28934467e-01 ... -3.27155977e-01 -1.45617580e+00 2.24435329e-02] [ 9.72414851e-01 -1.75225294e+00 -6.31216407e-01 ... 3.09498668e-01 -1.57104850e-01 3.77688825e-01]] ... [[-6.91463202e-02 -1.81637868e-01 -9.44129154e-02 ... -6.96282834e-02 -3.85962576e-02 7.38211155e-01] [ 3.91276600e-03 -1.18447533e-02 -5.11873722e-01 ... 8.58190417e-01 -5.11107564e-01 -1.01337349e+00] [-2.76943922e-01 3.17649841e-01 -3.29270095e-01 ... 2.65407890e-01 -4.99321431e-01 6.88045263e-01] ... [-8.64236206e-02 4.58448172e-01 4.38590199e-01 ... 7.88444638e-01 8.38236868e-01 -1.19840614e-01] [-6.24908030e-01 -9.21325266e-01 -7.77415514e-01 ... 6.52801216e-01 -5.83923519e-01 3.88040543e-01] [ 1.42966938e+00 -1.09459090e+00 -6.50567949e-01 ... 4.93533671e-01 -6.25733852e-01 2.95832574e-01]] [[ 7.36900866e-01 -1.48468062e-01 5.65540671e-01 ... 7.17890918e-01 -5.25321662e-01 1.13492358e+00] [-1.00233376e+00 -6.64369762e-01 -2.81067073e-01 ... -7.16932535e-01 8.88184607e-01 3.77016127e-01] [ 1.88844606e-01 3.75640571e-01 -1.71713680e-01 ... -1.26407787e-01 1.02022183e+00 -9.90403146e-02] ... [-4.74166214e-01 1.99602723e-01 -5.52700996e-01 ... -2.14178368e-01 -2.28215903e-01 1.44854048e-02] [-3.33476216e-01 -1.09645285e-01 -5.83266139e-01 ... 1.60516202e-01 5.73716350e-02 -1.05715466e+00] [-4.25644636e-01 4.54753578e-01 1.10310622e-01 ... 2.10329667e-01 -8.23098242e-01 -2.31930509e-01]] [[-1.22699559e+00 1.42090574e-01 -2.95474827e-01 ... 7.95309961e-01 -7.10525364e-02 -7.91543067e-01] [-1.00073230e+00 -1.48833048e+00 4.04037654e-01 ... -4.61636603e-01 1.21188708e-01 -7.66486526e-01] [-3.67748231e-01 -9.91417229e-01 6.58496857e-01 ... 7.58959353e-01 8.30693126e-01 2.04336464e-01] ... [-1.25596488e+00 2.13557899e-01 9.14600551e-01 ... 1.34768248e+00 5.66408336e-01 -7.72461593e-02] [ 1.28403807e+00 6.50321245e-01 -1.07555342e+00 ... -3.00739616e-01 -6.43173158e-01 2.87621766e-01] [-6.37439251e-01 -3.93134445e-01 -2.50137478e-01 ... -9.93051052e-01 -3.13171357e-01 4.96872485e-01]]] [[[ 2.92411357e-01 4.22946721e-01 -6.77065402e-02 ... -1.47510737e-01 -7.55904853e-01 2.98265517e-01] [ 5.66985071e-01 -7.60421483e-03 -9.63366553e-02 ... -4.77577269e-01 -8.29609185e-02 -2.95678303e-02] [-1.06829122e-01 2.24922672e-01 -1.05309807e-01 ... -4.62156266e-01 -4.76375550e-01 1.88341931e-01] ... [-3.05787325e-01 -6.13288507e-02 1.02259204e-01 ... 1.04902685e-01 9.60375927e-03 -5.78132505e-03] [ 7.28158429e-02 -2.99877256e-01 1.70551553e-01 ... 1.71425462e-01 -5.93300343e-01 -2.04881690e-02] [ 3.61491621e-01 4.18592781e-01 -1.63415715e-01 ... 2.99746692e-01 -2.46708393e-02 -2.61923790e-01]] [[ 1.31832018e-01 1.60973102e-01 -5.47833323e-01 ... -8.26261461e-01 -9.64866132e-02 -9.16582108e-01] [ 3.39774787e-01 1.15995575e-02 -3.15675378e-01 ... 3.52746964e-01 1.62667125e-01 -1.44279346e-01] [ 1.86959937e-01 3.56289148e-01 -1.75761595e-01 ... -7.25488141e-02 3.86457890e-01 8.25145245e-02] ... [ 5.69279075e-01 4.46217120e-01 -7.18623539e-03 ... 1.47203624e-01 -6.16058521e-02 -2.70105004e-01] [ 2.58690238e-01 -1.48276865e-01 4.04699981e-01 ... -2.82433391e-01 -1.49204805e-01 -1.29250005e-01] [ 3.59623849e-01 1.69011235e-01 3.15405667e-01 ... 2.60074168e-01 -2.00213552e-01 6.53669564e-03]] [[-6.25542402e-01 3.11756402e-01 1.23153776e-01 ... 1.56874597e-01 5.43640614e-01 6.54740632e-01] [-5.75198710e-01 -3.69402289e-01 1.41992792e-01 ... -2.06731707e-01 5.39232314e-01 -2.67619729e-01] [-2.28626281e-01 2.13959232e-01 -4.48996872e-01 ... 4.19473574e-02 -1.87791273e-01 1.06053144e-01] ... [-2.25759506e-01 -9.90822688e-02 5.96303381e-02 ... 2.14286149e-01 -1.36041507e-01 -1.18724830e-01] [ 1.01736911e-01 -1.30588248e-01 3.80191207e-01 ... -4.06359524e-01 5.71549833e-01 1.32800445e-01] [-1.03674747e-01 -9.19109955e-02 -4.24506932e-01 ... -2.23452270e-01 -3.69482040e-02 -3.25795077e-02]] ... [[-3.98157313e-02 4.66390163e-01 6.22845054e-01 ... 3.17429811e-01 -7.30950907e-02 1.95763513e-01] [ 3.94590467e-01 2.35873950e-03 1.55191228e-01 ... -1.33375123e-01 -3.55594344e-02 -5.09352947e-04] [ 2.68877685e-01 -6.09452017e-02 2.49836013e-01 ... -1.56398997e-01 2.25581199e-01 -2.07274586e-01] ... [ 5.62444329e-01 6.95286334e-01 -1.00322425e-01 ... -3.07032526e-01 5.82462192e-01 -2.32834637e-01] [ 2.25955516e-01 -2.77670056e-01 -1.96449775e-02 ... 2.49607548e-01 4.45192516e-01 2.71392763e-02] [-2.13038221e-01 -3.09935868e-01 4.51706757e-04 ... 2.43309990e-01 1.64816946e-01 -1.88454717e-01]] [[-1.07761681e-01 -9.38586071e-02 2.61218101e-01 ... -2.59717166e-01 -2.84833789e-01 -1.28921807e-01] [-1.28434867e-01 -3.84545177e-01 -2.48961404e-01 ... 1.72774181e-01 1.72834948e-01 4.90946651e-01] [-8.86150181e-01 -8.82530436e-02 -7.85861850e-01 ... -4.61562663e-01 -2.82887638e-01 -4.47352946e-01] ... [ 1.16695892e-02 -1.93671227e-01 4.54143256e-01 ... 1.64767608e-01 -1.24387287e-01 -4.64474350e-01] [-7.49448761e-02 -1.53509408e-01 -1.84088215e-01 ... -3.78595084e-01 4.58234064e-02 -5.37446067e-02] [-2.07802072e-01 5.99403605e-02 1.91332966e-01 ... -3.96652222e-01 2.18944952e-01 -4.07284439e-01]] [[ 4.35448810e-02 -2.11304098e-01 -2.83298731e-01 ... -2.09563047e-01 -8.64660814e-02 5.83762646e-01] [-1.26669183e-01 -2.09715769e-01 8.87629688e-02 ... -1.58038840e-01 -5.22434711e-01 -3.54735851e-01] [ 7.34520257e-02 -2.37725779e-01 -1.56331241e-01 ... -2.65609682e-01 1.17173292e-01 -2.63666868e-01] ... [-8.46574605e-02 -2.37448961e-02 2.00955302e-01 ... -6.23506308e-03 2.67516106e-01 -1.84048265e-01] [-2.29575768e-01 2.18889982e-01 -4.03171390e-01 ... 2.34254912e-01 6.78669438e-02 2.43953884e-01] [-3.36419374e-01 -4.31370400e-02 -1.91290796e-01 ... 5.39770246e-01 -1.65143132e-01 3.78199548e-01]]]] [[[[ 8.68427336e-01 1.40690994e+00 -6.86084688e-01 ... -5.49930990e-01 -4.55490917e-01 -2.91288805e+00] [-2.80621195e+00 -1.18515658e+00 8.04285526e-01 ... 1.30179465e+00 1.84033707e-01 8.50672275e-02] [-2.14516258e+00 -6.28183246e-01 5.53501248e-01 ... -1.32426369e+00 -4.14209850e-02 9.54930127e-01] ... [-1.73453331e-01 7.96164870e-01 -4.02695924e-01 ... 8.02946985e-01 3.01776528e+00 -5.37674487e-01] [ 2.17401290e+00 5.58961928e-01 -1.22891508e-01 ... -2.77477646e+00 -2.51076365e+00 -1.09383678e+00] [ 3.58313322e-01 -6.52287841e-01 -8.94204557e-01 ... 1.04397702e+00 9.64672029e-01 -1.69339776e+00]] [[-1.06100535e+00 -1.74986017e+00 -2.38426375e+00 ... -2.16495228e+00 6.41587973e-01 1.34185791e+00] [ 4.58749950e-01 -4.66089189e-01 -5.78221142e-01 ... 4.21144664e-01 2.89307165e+00 -1.05740833e+00] [-1.38418412e+00 -7.58282781e-01 -5.68566442e-01 ... 3.64853680e-01 -1.34112561e+00 8.17753911e-01] ... [-1.08464098e+00 -7.43650734e-01 5.30307174e-01 ... -2.43863970e-01 -9.97082114e-01 -1.33389187e+00] [ 9.45454895e-01 3.32840562e-01 3.90042663e+00 ... -5.41064680e-01 1.68698058e-01 1.09757626e+00] [-1.32849658e+00 2.57444739e-01 2.54392052e+00 ... 1.16808280e-01 1.60755590e-02 6.36867166e-01]] [[ 2.77331710e-01 2.95647001e+00 5.05140960e-01 ... -1.15040600e+00 -4.06069785e-01 -8.25061977e-01] [ 1.45351088e+00 -8.77471089e-01 9.15319026e-01 ... 3.00843644e+00 -1.41072035e+00 9.78413597e-02] [-1.67586822e-02 8.22218001e-01 2.05104518e+00 ... -2.61260420e-01 1.93789458e+00 -1.27917099e+00] ... [ 2.37250543e+00 1.13819277e+00 5.47180653e-01 ... -7.14205429e-02 -1.55574703e+00 -1.03315747e+00] [ 1.17244887e+00 -1.15925539e+00 -1.22428989e+00 ... -2.02908421e+00 3.55892092e-01 1.73712611e+00] [-1.87220597e+00 2.33788300e+00 -4.77016598e-01 ... -4.71704197e+00 6.58482090e-02 -2.98007727e-01]] ... [[ 4.05466482e-02 -5.32884859e-02 -1.66041601e+00 ... -4.93527353e-01 1.00749290e+00 1.19439507e+00] [ 5.74707389e-01 -3.92288834e-01 5.42615175e-01 ... 7.43443191e-01 5.82651019e-01 -1.79708242e+00] [-1.07207203e+00 1.61715043e+00 1.89290905e+00 ... -3.22069788e+00 -4.38197076e-01 8.31386626e-01] ... [-2.93968415e+00 -1.83416963e-01 -1.04737353e+00 ... 8.16486597e-01 -1.53002769e-01 9.76712346e-01] [ 4.18396294e-01 -6.11571908e-01 1.33012116e+00 ... 8.63170803e-01 7.07568787e-03 -4.48476642e-01] [ 1.13685980e-01 -1.19789779e+00 8.99823189e-01 ... -1.65650532e-01 -1.91349435e+00 -8.70663047e-01]] [[ 2.28222036e+00 2.62091845e-01 -5.20202279e-01 ... -2.01197267e+00 2.92942643e-01 3.50721270e-01] [ 1.04230113e-01 1.14887786e+00 -5.48058152e-01 ... -8.03946257e-01 -7.53000379e-01 -1.66608822e+00] [ 4.36481667e+00 3.95713061e-01 3.14449716e+00 ... 3.06847423e-01 4.76964295e-01 -9.74643305e-02] ... [ 2.70711851e+00 2.42182449e-01 -1.24330366e+00 ... -9.70783293e-01 7.58080721e-01 -6.13554895e-01] [-1.73464060e+00 1.02688956e+00 3.68062615e-01 ... 9.76845205e-01 3.37303430e-01 1.37662554e+00] [-3.62791479e-01 3.50063831e-01 1.36091977e-01 ... -1.76407222e-03 2.48342514e+00 2.37727642e+00]] [[ 5.22663713e-01 1.25876069e+00 -1.86968684e+00 ... -1.23855007e+00 -1.62310278e+00 -1.27498281e+00] [ 1.20635897e-01 -3.55792880e-01 2.10860300e+00 ... -9.77457881e-01 -8.96323442e-01 2.76024193e-01] [-1.43889654e+00 -8.51110294e-02 -1.46055609e-01 ... -1.35368812e+00 1.67320657e+00 -2.26995134e+00] ... [-5.74963808e-01 2.36827254e+00 -7.55069971e-01 ... -6.79058790e-01 8.72788966e-01 -4.66286212e-01] [ 1.82387792e-02 2.33572513e-01 9.69158173e-01 ... 8.01550806e-01 -6.68855190e-01 -3.34507376e-01] [ 4.65905964e-02 -2.23411024e-01 1.44086003e+00 ... -5.41283116e-02 -1.57169506e-01 1.79595256e+00]]] [[[ 3.71019244e-01 3.13195214e-02 -6.04167104e-01 ... 3.02919090e-01 -3.59639712e-02 -2.23979980e-01] [ 1.12031408e-01 3.49786818e-01 -1.97035491e-01 ... 6.21875748e-02 -1.45707661e-02 -5.40695600e-02] [-2.00864360e-01 -4.46670234e-01 -9.30459425e-03 ... -9.96669829e-02 -1.05465576e-01 -1.26658201e-01] ... [ 2.59296507e-01 2.86826968e-01 2.04585195e-01 ... 3.48155946e-01 -9.41358283e-02 -7.72843212e-02] [ 5.39718717e-02 3.98888558e-01 2.67339088e-02 ... -1.92109197e-01 -4.09814835e-01 1.44268259e-01] [-3.25167537e-01 3.03459197e-01 5.02486806e-03 ... 1.27888873e-01 -1.60481319e-01 2.15305910e-01]] [[-2.47544035e-01 1.65210202e-01 -1.26885340e-01 ... -2.20472962e-01 2.49230564e-01 2.64154803e-02] [-1.53870821e-01 4.74986970e-01 -1.49161413e-01 ... -1.26566291e-01 8.54300484e-02 1.87610194e-01] [ 6.16304353e-02 -5.16916029e-02 -8.97140726e-02 ... 3.24950755e-01 -4.57315259e-02 2.26318687e-02] ... [ 4.48328882e-01 -1.35228515e-01 -1.55816361e-01 ... -3.31218302e-01 4.65971828e-02 -5.39633632e-01] [ 9.46081653e-02 3.17789882e-01 3.78538936e-01 ... -4.70146924e-01 -2.12228075e-02 2.92835105e-02] [ 2.06497282e-01 -4.58739847e-01 -4.11189079e-01 ... -4.69726890e-01 -3.77914757e-01 -2.38920286e-01]] [[ 1.72412470e-01 4.67956901e-01 2.19781011e-01 ... 1.71619162e-01 -1.02836154e-02 -2.97681808e-01] [-1.59391746e-01 -2.64184117e-01 3.39807451e-01 ... -3.09744418e-01 -2.38075882e-01 -2.31051266e-01] [ 1.38462961e-01 -1.13690317e-01 3.11535537e-01 ... -1.51099816e-01 3.36263955e-01 2.83059984e-01] ... [-5.22306561e-02 3.07106078e-01 4.63203937e-01 ... -1.01894684e-01 -1.80107102e-01 1.45867839e-01] [-1.14389539e-01 1.93501055e-01 -1.14256524e-01 ... 1.91176701e-02 -4.60138768e-01 2.81225026e-01] [ 3.75410259e-01 -7.78177232e-02 -1.23439789e-01 ... -4.82166141e-01 3.35193068e-01 3.80454123e-01]] ... [[ 2.02477381e-01 -4.51950550e-01 3.35234433e-01 ... 2.38911837e-01 -2.03815088e-01 -2.72140652e-01] [ 2.10471794e-01 -1.07642515e-02 -2.17272609e-01 ... -3.57932955e-01 2.81735003e-01 -1.78315312e-01] [-3.17213722e-02 2.60404795e-02 1.40090671e-03 ... 1.03011625e-02 1.61838353e-01 -2.74486631e-01] ... [-2.21375629e-01 -7.17561841e-02 1.46894500e-01 ... 5.80141604e-01 2.55630136e-01 3.49853009e-01] [ 1.73401758e-01 1.70636073e-01 -1.51123092e-01 ... 5.35329282e-02 3.24892640e-01 8.78294706e-02] [-2.37282351e-01 -1.63789183e-01 2.32262332e-02 ... 2.15478048e-01 -9.59489048e-02 -1.02049112e-02]] [[-2.31201686e-02 2.24993244e-01 1.97344854e-01 ... -1.42516885e-02 1.11623861e-01 -2.15132937e-01] [ 1.29088029e-01 4.32325572e-01 -1.53680712e-01 ... 1.14488438e-01 -1.68947428e-01 4.72500399e-02] [ 2.62613874e-03 3.44767779e-01 -1.11750819e-01 ... 1.01519637e-01 -3.56809556e-01 9.12927315e-02] ... [-3.02795172e-01 -4.62049097e-01 -3.21375906e-01 ... -1.40781611e-01 -6.93934321e-01 2.64872909e-01] [ 1.50780797e-01 -1.99943736e-01 3.47977668e-01 ... 5.05128264e-01 -1.80566892e-01 1.35965869e-01] [ 4.60769564e-01 -2.54459381e-02 -9.90356654e-02 ... -5.74247539e-01 -4.87228274e-01 4.17084485e-01]] [[-7.89229631e-01 1.43592313e-01 2.92246878e-01 ... 3.76849100e-02 3.28829139e-01 -9.03279632e-02] [ 1.37578383e-01 3.70422810e-01 3.50575209e-01 ... 8.54186639e-02 1.13909453e-01 -2.86190608e-03] [ 3.41696054e-01 -7.20324144e-02 -1.91172615e-01 ... 1.90570861e-01 1.32382572e-01 -5.90685010e-01] ... [ 1.61050782e-01 -3.22540969e-01 -1.26032084e-01 ... -4.29525554e-01 -5.51466882e-01 1.82897165e-01] [ 2.33299453e-02 -1.14504464e-01 1.98542193e-01 ... 2.12015644e-01 -1.59213915e-01 -1.41951278e-01] [-3.38015482e-02 1.39650822e-01 2.24721864e-01 ... 2.95057986e-02 2.15583101e-01 -8.43400583e-02]]] [[[ 2.18811934e-03 -1.97133973e-01 -7.63212740e-02 ... 3.24368030e-01 -2.21727267e-01 7.35418677e-01] [-3.60410810e-01 1.33890361e-01 7.04883188e-02 ... -7.36484170e-01 -9.69178602e-02 2.97243953e-01] [-1.11515395e-01 1.90935284e-01 -4.12114263e-02 ... -3.43869209e-01 5.57549596e-01 -4.12240922e-01] ... [ 3.95299904e-02 1.17918760e-01 4.21618551e-01 ... 4.81638134e-01 9.36091840e-02 -8.73163939e-01] [-4.08369213e-01 5.26257120e-02 3.86334836e-01 ... -2.65648663e-02 6.05684891e-02 1.40008479e-01] [-4.43567038e-01 -3.07748109e-01 -8.23843107e-02 ... -4.88373309e-01 3.21880639e-01 6.70783445e-02]] [[ 3.07865590e-01 -3.57122928e-01 1.73083588e-01 ... 5.39527178e-01 7.51354620e-02 -4.88067895e-01] [-2.58046299e-01 -7.43169844e-01 -7.49682605e-01 ... 1.31534323e-01 -4.79974806e-01 3.12697560e-01] [ 4.78385419e-01 7.99650609e-01 -4.32052702e-01 ... -5.32233655e-01 -2.54303902e-01 -3.23351383e-01] ... [-5.09064257e-01 -3.26890409e-01 -4.46764618e-01 ... -4.37218219e-01 7.69833103e-02 3.15120012e-01] [ 3.74113113e-01 5.08893840e-02 1.40176415e-01 ... -2.51004994e-01 -7.03108013e-02 4.49029766e-02] [ 2.69816015e-02 -1.04329884e-01 2.24619791e-01 ... -9.65921953e-03 -7.67254084e-02 6.74255013e-01]] [[-2.53175259e-01 3.55212629e-01 -2.42928028e-01 ... 1.69652194e-01 3.82440597e-01 -4.15955871e-01] [-9.46967229e-02 -6.01080712e-03 3.60640772e-02 ... 5.81060648e-02 -4.45148170e-01 2.31514350e-01] [ 4.43208218e-01 -3.75033051e-01 6.37319982e-02 ... 9.27912444e-02 7.47007728e-01 1.60709888e-01] ... [ 4.40098137e-01 -1.28139913e-01 1.37115523e-01 ... 6.08977377e-01 1.02929413e+00 -3.89966011e-01] [ 9.21896547e-02 2.85716325e-01 3.47988665e-01 ... 3.66304368e-01 3.13693464e-01 -2.00012684e-01] [-2.03069925e-01 -5.05181253e-02 4.77083065e-02 ... -1.18438058e-01 -2.32994050e-01 -5.80381632e-01]] ... [[ 2.16453001e-01 1.53096244e-01 -3.22729975e-01 ... 8.99973214e-01 2.47421801e-01 -5.82765818e-01] [-7.41069078e-01 6.65488124e-01 -5.06574094e-01 ... -4.18735176e-01 -5.22753239e-01 -6.76250875e-01] [-5.61092459e-02 -4.64249700e-01 2.08103776e-01 ... 1.36315122e-01 7.41209686e-02 5.83486080e-01] ... [-1.24096051e-01 -4.36324209e-01 4.42752950e-02 ... 3.61005306e-01 7.28513524e-02 3.56596053e-01] [-3.22229832e-01 -2.02946737e-01 1.75357297e-01 ... 3.59835118e-01 4.96119082e-01 2.00559422e-01] [ 6.25054389e-02 -4.86092836e-01 2.30807602e-01 ... 2.98723549e-01 2.62169451e-01 2.40045562e-01]] [[-3.52395445e-01 -2.93604434e-02 4.89146739e-01 ... -6.55785128e-02 6.16687596e-01 3.54593515e-01] [-6.79485261e-01 6.80692494e-01 -6.00913286e-01 ... -4.75318283e-01 1.67466506e-01 7.16536105e-01] [ 6.38768971e-01 -2.87581265e-01 -7.42988527e-01 ... 4.38351065e-01 2.54640933e-02 -2.15896025e-01] ... [-1.11864138e+00 -5.99340387e-02 2.58862525e-01 ... 6.76072598e-01 2.62882084e-01 -7.70312920e-02] [ 1.40304729e-01 3.96787226e-01 -7.82374591e-02 ... 1.13454193e-01 2.31655434e-01 -4.07082886e-02] [ 2.43371516e-01 -3.70275863e-02 1.78735368e-02 ... 8.68747175e-01 1.02731860e+00 -2.96250790e-01]] [[ 8.57646540e-02 1.15427868e-02 -6.17624879e-01 ... -5.21258891e-01 -6.12144113e-01 8.96737650e-02] [-4.85107638e-02 3.17907661e-01 6.70139074e-01 ... 6.47890389e-01 4.04309809e-01 -2.35348627e-01] [-1.06610765e-03 -1.65446594e-01 -2.75652111e-01 ... 3.71690005e-01 -3.31320018e-01 -7.82379955e-02] ... [ 1.28777936e-01 4.15893883e-01 7.89151937e-02 ... -1.05327554e-01 -8.31251517e-02 1.75005019e-01] [ 1.34026974e-01 -8.91082138e-02 4.39995408e-01 ... 9.22962558e-03 -5.35084188e-01 -7.64770210e-02] [ 2.12777749e-01 9.95326996e-01 -2.14587450e-01 ... -2.87425786e-01 -3.54971141e-01 -2.26029247e-01]]] [[[ 7.40401372e-02 -5.83515577e-02 4.40716016e-04 ... -1.10008707e-02 -1.78928256e-01 9.46410559e-03] [-5.18873371e-02 1.38273835e-01 -8.21443871e-02 ... -5.32459244e-02 -3.11810207e-02 1.01850875e-01] [-2.53653288e-01 -2.81336252e-02 -3.92796546e-02 ... 2.08302811e-02 -9.31087285e-02 7.75419697e-02] ... [ 1.82247639e-01 -1.12633966e-01 -9.56272613e-03 ... -1.94449797e-01 -1.83517203e-01 1.86902657e-01] [-2.09997855e-02 -4.23200242e-02 9.29691568e-02 ... -4.26843353e-02 7.30378702e-02 5.31559400e-02] [-4.66699228e-02 7.14547038e-02 -4.27303798e-02 ... 1.36591911e-01 -9.16976556e-02 6.71697706e-02]] [[ 1.01163529e-01 -6.39140606e-02 -6.40862345e-05 ... -2.42639724e-02 -3.45286261e-03 -8.97318870e-03] [ 1.39787123e-01 1.44021437e-01 1.94659270e-02 ... 1.78962901e-01 1.73540246e-02 -2.79521421e-02] [-2.91080978e-02 2.02078983e-01 6.59756586e-02 ... 4.14544232e-02 1.07965022e-01 -1.24562923e-02] ... [-1.05121635e-01 -1.03730559e-01 -3.28587310e-04 ... -5.01779793e-03 9.78597850e-02 -3.00150514e-02] [ 1.10354051e-01 -4.76966053e-02 -2.23863855e-01 ... -8.86742845e-02 1.60182603e-02 -1.05271518e-01] [ 2.29894459e-01 1.00015877e-02 2.32690588e-01 ... 1.36826828e-01 -1.52790910e-02 9.77903903e-02]] [[-5.63056059e-02 2.75952229e-03 1.03543401e-02 ... -6.60661515e-03 3.87249738e-02 3.53180096e-02] [ 1.05628595e-02 -2.89130479e-01 1.01409510e-01 ... -1.67030290e-01 1.09471701e-01 -1.39447004e-01] [ 1.49800017e-01 1.98576286e-01 2.34020039e-01 ... 7.82672036e-03 -8.80999789e-02 -3.67536873e-01] ... [ 1.75343022e-01 -1.75731882e-01 -1.72189400e-02 ... -2.41831154e-01 3.52583565e-02 1.56817466e-01] [ 2.34859083e-02 -8.02221000e-02 1.48390055e-01 ... -8.38838741e-02 -1.59298569e-01 -3.54151949e-02] [-1.21858053e-01 1.20436475e-01 1.77304223e-01 ... 2.03711361e-01 8.07291418e-02 9.29290429e-02]] ... [[-2.24948511e-01 -1.50080845e-01 -9.96924415e-02 ... -1.59474984e-01 5.94014814e-03 -2.20269114e-02] [ 4.33104485e-02 -1.43029764e-01 1.49143249e-01 ... 3.69219622e-03 -6.22365437e-02 -4.41543460e-02] [ 4.12238166e-02 5.19836694e-02 1.68221459e-01 ... -4.73889485e-02 -6.38569668e-02 9.54467654e-02] ... [ 9.88735929e-02 -7.64879435e-02 -4.42430526e-02 ... 7.19301179e-02 3.77530783e-01 1.03192449e-01] [ 6.69197440e-02 2.86731347e-02 7.73360506e-02 ... 5.40358871e-02 -5.50182648e-02 2.98668165e-02] [ 4.82595749e-02 -2.29248807e-01 1.13869838e-01 ... -2.01899260e-01 1.60585895e-01 1.10802054e-03]] [[-1.09681249e-01 -1.06241666e-01 -2.59861387e-02 ... -1.42756850e-01 1.57231823e-01 1.17727831e-01] [ 1.54399872e-02 -4.68593463e-02 -1.51947793e-02 ... 9.81899444e-03 1.06925242e-01 2.68695056e-01] [-1.64881703e-02 -1.28611475e-01 7.84476250e-02 ... 2.31938973e-01 -1.73103154e-01 1.21069930e-01] ... [ 1.32453024e-01 -1.06683455e-01 -6.94669085e-03 ... 1.55779809e-01 1.30868703e-01 -8.38243365e-02] [-4.86443937e-02 1.13196105e-01 1.03220440e-01 ... 1.85133927e-02 4.58123349e-02 -4.37805541e-02] [-2.28137225e-01 -1.11658357e-01 2.08374038e-01 ... 7.81150609e-02 9.24347341e-02 1.29549816e-01]] [[-8.53369609e-02 2.47081462e-02 -1.99482273e-02 ... -2.21518450e-03 -1.30320862e-01 -2.58671623e-02] [-2.37119824e-01 -1.10333338e-01 -2.36023352e-01 ... 1.63364261e-01 -1.28042743e-01 -8.28570351e-02] [-4.97709028e-02 5.06456681e-02 -1.78015620e-01 ... -1.29057527e-01 9.38389078e-02 1.32590428e-01] ... [-8.69349837e-02 -2.42160894e-02 -6.87064752e-02 ... 2.16697324e-02 1.25395939e-01 6.93671927e-02] [-3.24851610e-02 1.73466012e-01 -1.88246280e-01 ... 9.85193998e-03 2.53861517e-01 1.53056398e-01] [-1.85140446e-02 -6.12506680e-02 1.93271544e-02 ... -8.17402378e-02 1.10088266e-01 7.57613331e-02]]] [[[ 2.90579140e-01 9.73600149e-01 -3.70167270e-02 ... -3.73774737e-01 1.59873486e-01 2.28689402e-01] [-4.69156623e-01 1.04031825e+00 -2.14489907e-01 ... -1.73545152e-01 2.53871024e-01 4.65370536e-01] [ 8.05678785e-01 9.54154193e-01 -7.78519036e-03 ... -1.79656088e-01 -1.77661970e-01 9.15085137e-01] ... [ 8.37211668e-01 5.65399602e-02 -6.17562473e-01 ... -3.17118257e-01 -7.20237613e-01 -1.73988533e+00] [-1.50923356e-01 -7.40478873e-01 8.21216524e-01 ... -5.59242405e-02 -1.64241195e-01 -8.12488496e-02] [ 3.88752669e-02 1.18227631e-01 3.43154669e-01 ... -1.38562357e+00 -4.36589420e-01 9.35804665e-01]] [[ 8.26605737e-01 7.80546010e-01 9.62386876e-02 ... -3.42889130e-01 7.97478318e-01 8.14178407e-01] [-7.24441171e-01 -1.26436245e+00 -1.99704111e-01 ... 6.14371598e-01 -1.62891948e+00 4.40303206e-01] [ 9.95194852e-01 -1.21760957e-01 -9.20304537e-01 ... 4.71966475e-01 3.42429765e-02 3.14789683e-01] ... [-3.27552333e-02 -5.30707419e-01 -6.95016444e-01 ... 8.61134410e-01 -7.01567829e-01 2.10899186e+00] [ 7.68618643e-01 1.39023006e+00 1.07861197e+00 ... 7.62425363e-03 8.83750081e-01 -2.73931265e-01] [-5.14825046e-01 7.65993714e-01 1.15277028e+00 ... 5.38077235e-01 -1.03375649e+00 1.44925699e-01]] [[ 2.37997606e-01 5.61881602e-01 -5.98766923e-01 ... -1.62628043e+00 -2.79721856e-01 -8.55937004e-02] [-9.57896933e-02 -3.77955139e-01 -3.98608506e-01 ... 3.64051402e-01 -2.16928944e-01 1.52475989e+00] [-4.35502946e-01 -8.87714684e-01 -5.54670356e-02 ... -9.74770308e-01 2.80173093e-01 -4.08715248e-01] ... [ 6.55496046e-02 2.03926587e+00 1.07618973e-01 ... 3.52633268e-01 -5.11827707e-01 3.73508990e-01] [ 1.84409809e+00 -4.48999971e-01 -6.82953537e-01 ... -5.53937256e-01 -8.66772294e-01 -1.27015889e+00] [ 2.89788693e-01 7.82300532e-02 -1.09295949e-01 ... 8.27005386e-01 4.25021321e-01 4.26239729e-01]] ... [[-5.50246239e-01 4.85918522e-01 -7.62070060e-01 ... 3.41087759e-01 7.04730093e-01 -5.73176920e-01] [-1.89157933e-01 -6.54572606e-01 3.18684941e-03 ... 4.99097705e-01 -4.97560799e-01 2.84282148e-01] [-1.42218876e+00 -3.20943177e-01 -8.37302864e-01 ... 6.88173771e-01 -5.64568996e-01 6.99986696e-01] ... [-9.82534647e-01 2.79832393e-01 7.10312665e-01 ... 3.57031792e-01 1.07989497e-01 1.03202291e-01] [-1.73848808e-01 -6.74107552e-01 1.31624818e+00 ... -8.90889689e-02 -5.49004674e-01 -3.78546536e-01] [ 5.25129676e-01 3.93972635e-01 5.62050007e-02 ... 1.08338022e+00 -1.13806260e+00 -3.08111727e-01]] [[-5.39224386e-01 4.39529568e-01 -5.70538938e-01 ... -1.44967186e+00 -1.02454293e+00 -1.40781796e+00] [ 1.16220877e-01 2.19225788e+00 1.60477841e+00 ... -1.07873654e+00 7.27575123e-01 1.20887525e-01] [-6.71132505e-01 -6.66450337e-02 1.08788185e-01 ... -1.44061819e-01 -8.45696688e-01 1.11359119e+00] ... [ 1.02002358e+00 8.89856458e-01 -3.82410064e-02 ... 1.05223072e+00 6.85515225e-01 -7.74047598e-02] [-9.71522272e-01 9.63542581e-01 -6.73347175e-01 ... -6.51013926e-02 -1.56935409e-01 -2.27929398e-01] [-1.21720813e-01 -2.55310178e-01 9.67414856e-01 ... 1.67720056e+00 -2.32286602e-01 -1.26410592e+00]] [[ 8.94443616e-02 -1.32709052e-02 -1.32497564e-01 ... -1.21381533e+00 4.00505252e-02 -3.08485299e-01] [-3.53935838e-01 2.36100763e-01 -8.03973898e-02 ... -7.48966515e-01 -5.43239474e-01 -2.91523874e-01] [ 3.58104348e-01 -1.35125145e-02 7.55240262e-01 ... 8.52022409e-01 8.90221834e-01 -5.01306474e-01] ... [ 6.84763551e-01 6.59897506e-01 4.90598947e-01 ... -8.89974907e-02 2.62093931e-01 6.85935140e-01] [ 5.93999624e-01 -8.06228697e-01 7.98507750e-01 ... 4.26392168e-01 1.20953754e-01 -5.68368249e-02] [-1.94611296e-01 -3.66634220e-01 -2.23537251e-01 ... -1.06266344e+00 -1.64854789e+00 1.27989113e+00]]] [[[ 4.78993475e-01 1.65372849e-01 -7.90469348e-02 ... 2.83729844e-02 -4.19399261e-01 -2.43196592e-01] [-3.53118211e-01 3.36009860e-01 4.70973074e-01 ... 5.84165335e-01 -4.77401465e-02 1.23334624e-01] [-2.37223849e-01 -2.17759050e-02 5.36087453e-02 ... 4.65111583e-02 -1.19848838e-02 6.28995225e-02] ... [ 4.46706206e-01 3.00084919e-01 3.92693847e-01 ... 4.44909394e-01 4.10002112e-01 1.56165972e-01] [ 5.09127438e-01 -4.88404781e-01 3.62502217e-01 ... 4.27439272e-01 1.88308552e-01 -5.90846762e-02] [-1.51755353e-02 3.11708868e-01 -7.13598728e-02 ... -1.84357569e-01 -3.14281017e-01 -1.37934014e-01]] [[ 4.91652966e-01 -1.47875786e-01 -1.19239718e-01 ... 5.71602993e-02 -2.67767400e-01 -1.08100951e-01] [ 5.10341078e-02 -2.56733328e-01 4.99239676e-02 ... -2.68792044e-02 -1.33101806e-01 4.50853854e-01] [-1.06892757e-01 -9.47659686e-02 -2.82112688e-01 ... -6.32278264e-01 -1.43772021e-01 6.62533045e-02] ... [-1.36165053e-01 9.35815349e-02 -1.28962055e-01 ... -1.17654152e-01 1.65442675e-01 -1.70453399e-01] [ 3.20319116e-01 3.92855220e-02 -4.44590718e-01 ... 6.13594949e-01 6.28509521e-02 3.50491524e-01] [-3.51118177e-01 -1.13285892e-01 6.25494599e-01 ... -2.07153231e-01 -8.29318315e-02 -2.02895015e-01]] [[ 3.00916553e-01 -4.05679494e-01 2.67486691e-01 ... 3.71737219e-02 -4.51858014e-01 3.87400538e-01] [ 7.95301888e-03 5.04288077e-02 2.63971150e-01 ... -1.43822834e-01 -5.58867574e-01 2.19507754e-01] [ 3.52221012e-01 1.34188998e-02 -3.11914116e-01 ... 1.55024827e-01 -2.32928872e-01 -7.38316327e-02] ... [-2.19413891e-01 -9.02764872e-02 1.65947989e-01 ... -1.29523799e-01 -2.40049250e-02 -6.56706616e-02] [-3.24836940e-01 2.05853134e-01 -2.70731151e-01 ... -6.63246691e-01 -3.80887002e-01 -4.66722250e-02] [-4.71689224e-01 6.43360019e-02 1.04483500e-01 ... -2.31110916e-01 -1.72159523e-01 -3.46867055e-01]] ... [[-3.19691330e-01 -3.40904713e-01 -1.15159988e-01 ... -1.45090893e-01 8.39917004e-01 -3.47393185e-01] [ 1.26151666e-01 -2.09541380e-01 -8.35930586e-01 ... 1.01320758e-01 1.40757293e-01 -4.36863005e-01] [-6.60273954e-02 1.90115005e-01 -4.60928708e-01 ... -2.42371604e-01 -4.05178428e-01 7.25979447e-01] ... [-4.84680623e-01 3.96307744e-02 -1.48016676e-01 ... 1.19234510e-01 5.75168490e-01 5.91064282e-02] [ 1.72780812e-01 -6.29313827e-01 -1.97691947e-01 ... -3.21165204e-01 -3.07344735e-01 -1.59184143e-01] [-2.05166757e-01 9.04708803e-02 -3.52474004e-02 ... 4.77004908e-02 -6.13627255e-01 5.31695709e-02]] [[ 2.23186657e-01 3.42403650e-02 -1.88106701e-01 ... -2.12335512e-01 -1.86197430e-01 2.22425044e-01] [-1.78304926e-01 -2.77637750e-01 -2.91072279e-02 ... 1.51661023e-01 6.06910288e-02 -1.64805084e-01] [-5.87971471e-02 3.53229314e-01 4.85319018e-01 ... 1.32415637e-01 1.70240402e-01 -5.97562902e-02] ... [-6.40123904e-01 2.52009835e-02 4.01457138e-02 ... -2.58918554e-01 -3.30834687e-02 -1.23907462e-01] [ 4.83719707e-01 2.62414217e-01 1.24443471e-01 ... -5.03256500e-01 -5.08617938e-01 -1.38216525e-01] [-7.00712875e-02 1.00014247e-01 2.41514534e-01 ... 2.44559437e-01 -2.02914432e-01 -1.19631654e-02]] [[-3.27634029e-02 1.27265796e-01 1.70519337e-01 ... -2.89180577e-01 3.31621885e-01 -3.63370240e-01] [ 1.48260042e-01 1.60966575e-01 -8.65896866e-02 ... -4.59914893e-01 4.65087861e-01 -1.07202895e-01] [ 3.10168058e-01 -2.17921808e-01 -3.00230145e-01 ... -6.61804080e-01 3.16042125e-01 -1.74167335e-01] ... [ 4.27089363e-01 2.64804214e-01 2.89856829e-02 ... 1.16732866e-01 2.60397851e-01 -1.84112489e-01] [-2.41392910e-01 1.28199071e-01 -2.97677666e-01 ... -6.53297082e-02 5.05244255e-01 -1.34339750e-01] [-6.43842816e-01 -2.33584315e-01 -1.04985811e-01 ... -2.92745173e-01 4.15928036e-01 -1.74709707e-01]]]] [[[[-1.81919634e+00 -1.44053495e+00 5.66544347e-02 ... -3.61629200e+00 -4.50291455e-01 7.88148880e-01] [-1.50243973e-03 7.21010327e-01 5.13850898e-02 ... 4.11418766e-01 -3.10294890e+00 -6.68100893e-01] [ 7.97086477e-01 2.91817576e-01 -8.09295118e-01 ... -6.59146488e-01 6.34669662e-01 -2.39435166e-01] ... [ 1.26859629e+00 -1.64630964e-01 -1.87343717e+00 ... 4.14120220e-03 -1.01049340e+00 -2.42286444e-01] [-1.88032246e+00 -6.37015522e-01 1.21093595e+00 ... -7.95861602e-01 1.68763876e-01 8.51015653e-03] [ 1.11877382e+00 -5.36714494e-01 -2.98584372e-01 ... 1.37459087e+00 -2.67650753e-01 3.96587521e-01]] [[-7.45699883e-01 -2.51394415e+00 2.89871693e-01 ... 1.48129478e-01 1.69570092e-02 -8.98210883e-01] [-1.51535130e+00 -2.46038795e-01 9.51690435e-01 ... 1.88207436e+00 -6.23039663e-01 2.80875707e+00] [ 8.09404925e-02 -8.16502512e-01 -2.61101902e-01 ... -2.38710761e-01 2.06379271e+00 1.33341885e+00] ... [ 1.11142731e+00 1.36116159e+00 -9.41045523e-01 ... 3.61785024e-01 2.27846289e+00 -1.13902047e-01] [ 6.11059785e-01 1.60693541e-01 1.56180775e+00 ... -2.27245784e+00 -1.68595994e+00 6.03220463e-01] [-1.04047763e+00 2.11095288e-01 3.22110653e-01 ... -1.99716210e+00 3.72289300e-01 -1.66816562e-01]] [[-1.04062650e-02 6.65279329e-01 7.01423883e-02 ... -4.80261832e-01 1.03973055e+00 1.75206470e+00] [ 1.24866939e+00 -1.13487613e+00 1.05732703e+00 ... -2.04179570e-01 -3.97278041e-01 -8.08262050e-01] [ 3.41947317e-01 2.60718703e+00 -9.65186954e-01 ... -2.00267982e+00 1.74737048e+00 1.05084562e+00] ... [ 7.83141792e-01 -6.18895054e-01 9.61297870e-01 ... 4.74431776e-02 1.99110246e+00 -5.30280136e-02] [ 3.96751618e+00 -1.96983337e+00 -7.46834457e-01 ... -3.15905631e-01 3.07146931e+00 -1.08363898e-02] [ 6.65545642e-01 2.64542246e+00 1.03805113e+00 ... 1.86383530e-01 2.76962018e+00 -3.55435252e+00]] ... [[ 1.62580228e+00 1.08132526e-01 2.22176123e+00 ... -8.43592823e-01 -3.69268924e-01 2.22908354e+00] [ 4.87358958e-01 -1.01199830e+00 4.56420392e-01 ... 9.02994692e-01 2.59970218e-01 -7.72756875e-01] [-1.97413340e-01 -8.92560065e-01 -8.69331419e-01 ... 1.30902004e+00 -1.88285708e+00 6.65025234e-01] ... [ 4.77174491e-01 -6.15857005e-01 -1.63803148e+00 ... 6.52532458e-01 -3.10979903e-01 -2.63951468e+00] [ 3.31044495e-01 9.25288379e-01 -7.53674448e-01 ... 6.15131795e-01 8.11827838e-01 1.03067219e-01] [-3.06012726e+00 3.77284229e-01 9.74426270e-01 ... -4.97713864e-01 4.34804231e-01 5.99744916e-01]] [[ 2.44212188e-02 6.90636933e-01 9.43579972e-01 ... -2.86772251e-01 2.33512282e+00 -1.32533861e-02] [ 1.47321355e+00 8.62934589e-02 -1.08193660e+00 ... -7.21675336e-01 -1.81265616e+00 2.09267235e+00] [ 7.21304953e-01 5.34706414e-01 1.45706618e+00 ... 9.41325009e-01 2.33220410e+00 -1.67361164e+00] ... [ 1.04375136e+00 -1.51076043e+00 -8.10432196e-01 ... 8.59472513e-01 -4.31632519e+00 3.97110462e-01] [ 6.65390015e-01 2.22850412e-01 -3.48572540e+00 ... -1.43753517e+00 -1.74664751e-01 -7.08855689e-01] [-7.37238407e-01 7.14088678e-01 -3.90714914e-01 ... 9.00429845e-01 -2.27256799e+00 -1.09382534e+00]] [[ 2.76646227e-01 1.72176552e+00 1.25637841e+00 ... 5.69355011e-01 9.07300830e-01 -1.29008782e+00] [-1.17303967e+00 -1.05067897e+00 2.12088156e+00 ... 1.72236073e+00 1.73909831e+00 -3.44358861e-01] [ 1.68408382e+00 -2.89558582e-02 7.90956855e-01 ... -5.49140215e-01 -4.62727398e-02 1.74825758e-01] ... [ 1.05607235e+00 -6.27520263e-01 -4.72907972e+00 ... -2.20112801e+00 -7.46782839e-01 4.12175544e-02] [-1.21703243e+00 -1.14880490e+00 1.30041146e+00 ... 1.30227089e+00 -1.47542620e+00 1.06037343e+00] [-3.17269373e+00 1.54200912e+00 8.63051534e-01 ... 1.80104148e+00 2.12715483e+00 -1.13843119e+00]]] [[[-6.49387091e-02 1.48977384e-01 -9.40439627e-02 ... -9.48031470e-02 1.65695809e-02 -1.25886977e-01] [-2.32765768e-02 -6.01019487e-02 6.92365766e-02 ... 3.45310241e-01 -7.22024962e-02 2.40760610e-01] [-1.37007497e-02 -8.38584304e-02 1.57118961e-01 ... 4.26346481e-01 -1.27638243e-02 -7.55058050e-01] ... [ 7.67916720e-03 -3.16146344e-01 -4.98171225e-02 ... -5.60687721e-01 -4.26719874e-01 8.59482959e-02] [ 2.11310834e-01 1.73316240e-01 3.18542838e-01 ... -9.76268873e-02 3.12434286e-01 -7.07681924e-02] [-2.56537348e-02 1.71754330e-01 4.57100049e-02 ... -2.04079688e-01 3.35774928e-01 2.80823648e-01]] [[-5.59818029e-01 -3.68288994e-01 -2.57214725e-01 ... -2.17934996e-01 -3.29525322e-01 -1.86359122e-01] [ 6.15969636e-02 -3.41142535e-01 8.55904162e-01 ... -2.44184911e-01 -1.64917395e-01 -4.59883139e-02] [ 1.36604711e-01 2.93049421e-02 3.09902221e-01 ... -9.69912857e-02 7.78001726e-01 3.43091786e-01] ... [-2.72717923e-01 -1.98678553e-01 4.98789586e-02 ... -3.18585873e-01 -2.03893885e-01 -2.74173915e-01] [ 1.32584393e-01 1.61803693e-01 3.54626983e-01 ... 4.51831639e-01 -4.41351742e-01 2.10760593e-01] [ 5.44220209e-01 7.10085705e-02 1.64844215e-01 ... -8.22507679e-01 8.37099314e-01 3.13011587e-01]] [[-2.59807222e-02 -6.56086951e-02 -7.75776088e-01 ... -1.86752796e-01 -2.28896648e-01 -1.68480650e-01] [ 3.23277593e-01 -6.12963885e-02 -1.90392599e-01 ... -9.88830850e-02 -4.83238786e-01 2.19956845e-01] [-4.16974217e-01 -3.50979954e-01 3.82074304e-02 ... 9.11853015e-02 -4.27425712e-01 7.02836588e-02] ... [-3.51576865e-01 4.92043436e-01 3.80534679e-02 ... 2.82497525e-01 1.34114102e-01 -2.71881461e-01] [-3.99353534e-01 -4.49465781e-01 -5.47061004e-02 ... 2.37511061e-02 -3.54209095e-01 -1.48305103e-01] [-2.57427990e-01 3.77355516e-02 -2.02067286e-01 ... 2.30550185e-01 -2.38115221e-01 1.47761777e-01]] ... [[ 1.71552211e-01 -2.74253394e-02 -3.96642685e-01 ... -1.75486043e-01 -3.35490555e-01 2.82489240e-01] [-8.57562050e-02 1.97106123e-01 3.29903007e-01 ... 1.51313646e-02 -4.72695053e-01 -2.42272541e-01] [ 1.18066780e-01 -2.27117270e-01 -1.35332122e-01 ... -2.10173756e-01 -3.85708451e-01 2.45100021e-01] ... [-2.55022109e-01 4.53621656e-01 2.20677033e-01 ... -9.17636082e-02 -7.06421956e-02 3.02136123e-01] [-1.01073332e-01 -1.44534200e-01 1.27046645e-01 ... 1.55879959e-01 -3.66762191e-01 -6.06803894e-01] [ 1.26540706e-01 7.07708895e-02 -8.74061231e-03 ... -1.21017464e-01 -2.23428290e-02 -1.50490087e-02]] [[ 2.03789800e-01 1.54447809e-01 -7.22795427e-02 ... 2.65843213e-01 -1.84738450e-02 1.53329149e-01] [ 1.46206766e-01 3.32067192e-01 5.69316223e-02 ... -5.24704039e-01 -1.93047673e-01 2.02038959e-01] [-3.51826936e-01 -2.70289153e-01 -5.83131472e-03 ... 1.40878290e-01 -3.35458189e-01 -1.54111072e-01] ... [ 1.84186131e-01 1.33594930e-01 7.94270560e-02 ... 3.02216381e-01 4.19432849e-01 -4.56815094e-01] [ 1.94265112e-01 1.97722897e-01 3.55886728e-01 ... 3.33458245e-01 2.74197888e-02 -2.55913567e-02] [-3.20270985e-01 -7.03852922e-02 -3.63735825e-01 ... -2.59058028e-01 -1.66802611e-02 -5.30283786e-02]] [[ 2.46784906e-03 1.83519110e-01 -4.01295125e-01 ... 9.53124315e-02 -2.36055523e-01 -1.86909810e-01] [ 1.90462157e-01 -1.24821682e-02 6.67652369e-01 ... -1.06099740e-01 -1.97987348e-01 -2.09355950e-01] [-1.56272687e-02 -2.26115301e-01 3.67523611e-01 ... -4.06264998e-02 -5.46191037e-01 2.95836311e-02] ... [-9.22614038e-02 4.51446563e-01 1.96220949e-01 ... -7.47017637e-02 4.56798077e-02 -6.36755645e-01] [ 1.54116690e-01 -3.46272051e-01 1.83722287e-01 ... -5.32400727e-01 -7.55809667e-03 1.00500792e-01] [ 4.28188592e-02 -1.67960510e-01 -1.37361392e-01 ... -1.31930381e-01 -9.62395146e-02 -2.70327836e-01]]] [[[-1.29974812e-01 4.07276958e-01 4.56886053e-01 ... -6.99045658e-01 1.71298891e-01 -2.01891720e-01] [-7.21651495e-01 -3.87130588e-01 7.63455749e-01 ... -1.17091365e-01 9.32033360e-01 2.87220657e-01] [ 5.08769080e-02 -4.37650055e-01 8.73326957e-02 ... -3.49683821e-01 -6.36454344e-01 -9.58512664e-01] ... [ 8.37433785e-02 2.88385630e-01 2.64055878e-01 ... -5.69887102e-01 5.11858344e-01 4.22154844e-01] [ 3.30484837e-01 5.55067994e-02 -3.38123798e-01 ... 1.69161350e-01 -5.87657452e-01 1.29288301e-01] [ 5.76343894e-01 8.53534400e-01 4.63766567e-02 ... -3.63995820e-01 4.13767606e-01 1.90404817e-01]] [[ 1.99995879e-02 6.55669510e-01 -6.01901054e-01 ... -2.25553229e-01 3.79319519e-01 -3.34428698e-01] [-3.73718627e-02 -7.69934654e-02 -5.69691777e-01 ... 3.83790761e-01 5.49529158e-02 -2.17206299e-01] [-3.72542769e-01 -2.18222067e-01 -6.34090066e-01 ... -2.91483551e-01 1.00393498e+00 -5.65740108e-01] ... [ 2.95534451e-02 -1.34685030e-02 1.50866687e-01 ... -2.29138993e-02 3.59527230e-01 -3.05972248e-01] [-5.27322069e-02 -3.78803685e-02 4.13170904e-01 ... 1.57113802e-02 4.32913095e-01 1.50337979e-01] [ 4.30809736e-01 3.91376346e-01 -1.48121282e-01 ... 3.03288013e-01 -4.48632658e-01 1.33886248e-01]] [[-3.06118786e-01 9.38437656e-02 -6.54296041e-01 ... 3.01098138e-01 -2.28053525e-01 4.37749565e-01] [-1.96558297e-01 3.92060250e-01 2.97115892e-01 ... 3.23079705e-01 2.17706829e-01 -3.31959248e-01] [ 2.97585607e-01 4.98910010e-01 -1.38350978e-01 ... 5.06168902e-01 -1.14083074e-01 7.40492582e-01] ... [-4.19979662e-01 3.99672985e-02 2.79900551e-01 ... -6.38662721e-04 -5.05515635e-01 -1.82824045e-01] [ 2.10131437e-01 -1.54157966e-01 4.71497416e-01 ... -6.49438620e-01 1.81051716e-02 -1.96221575e-01] [ 6.35192573e-01 2.36959055e-01 2.42472410e-01 ... -2.04096109e-01 -2.58090228e-01 -2.68800527e-01]] ... [[ 4.46720004e-01 1.68027177e-01 6.06293380e-01 ... -2.91507810e-01 2.08818108e-01 3.19436669e-01] [ 3.68722647e-01 3.42284411e-01 2.77563870e-01 ... -2.88239513e-02 -6.73341095e-01 5.85033417e-01] [-6.66642040e-02 -1.15524389e-01 4.61398721e-01 ... -2.46149659e-01 1.59987971e-01 1.91681966e-01] ... [ 7.10130343e-03 2.92251915e-01 -5.59673570e-02 ... 3.50857377e-01 5.61698973e-01 6.11643493e-01] [ 3.80415559e-01 -2.23328799e-01 1.52021080e-01 ... 1.86318442e-01 5.74335575e-01 5.50685972e-02] [-6.36783615e-02 2.84668177e-01 2.36351624e-01 ... 2.66087443e-01 -5.44775784e-01 -1.01650469e-01]] [[ 1.58274826e-02 2.07870781e-01 -6.85592070e-02 ... -8.00365686e-01 3.70968252e-01 -4.62019295e-01] [ 5.01590744e-02 3.05965096e-01 6.26275182e-01 ... -2.67849267e-01 -1.73551366e-01 -2.15259522e-01] [ 2.16208950e-01 -1.94279417e-01 -8.26943159e-01 ... 3.25348258e-01 5.65512143e-02 -3.40048313e-01] ... [ 6.13602102e-01 2.11631492e-01 -3.65614921e-01 ... -3.66931558e-01 -7.69790262e-02 2.24884450e-01] [-2.42322847e-01 -1.84921250e-01 -2.05870241e-01 ... 3.31086159e-01 2.07874730e-01 -3.79112139e-02] [ 3.47626954e-01 4.69655931e-01 -3.90216202e-01 ... -6.17705584e-01 1.06733000e+00 -6.11926392e-02]] [[-6.10166490e-02 -2.08324846e-02 1.00112426e+00 ... -6.36763051e-02 4.80547965e-01 6.52720630e-01] [-1.02241844e-01 -1.26019940e-02 -1.18121505e-02 ... -8.16047847e-01 -2.41526172e-01 3.71051937e-01] [-1.35269210e-01 6.51767179e-02 6.14972293e-01 ... -1.39086023e-02 -4.17245090e-01 -4.33636755e-01] ... [-6.04471425e-03 2.16408417e-01 4.66624379e-01 ... -2.72294044e-01 -1.08444549e-01 -8.02586600e-02] [-1.19634375e-01 -1.63853347e-01 -4.04450327e-01 ... 5.25501668e-02 -8.27492833e-01 1.08442712e+00] [ 1.11030452e-01 8.42887536e-02 -3.24005514e-01 ... -7.76914597e-01 -2.69044012e-01 -4.27698165e-01]]] [[[-9.65343118e-02 -1.77043080e-01 -2.36437887e-01 ... 3.92903797e-02 -2.50105374e-02 -3.19704175e-01] [ 1.09556532e-02 6.75803795e-02 2.67497487e-02 ... -1.52875498e-01 2.95510609e-02 1.87929329e-02] [ 1.75591812e-01 -9.24331248e-02 2.03452744e-02 ... -6.04558550e-02 5.40175214e-02 2.39219032e-02] ... [ 3.81017569e-04 -2.19216824e-01 1.52119145e-01 ... 1.14829198e-01 1.86606482e-01 5.93234645e-03] [-4.36138436e-02 2.76929513e-02 2.24365294e-01 ... 6.13878407e-02 -1.31234407e-01 -4.00469154e-02] [-6.56072423e-02 -4.42824736e-02 5.45240752e-02 ... 3.92050706e-02 -1.64818078e-01 1.47690773e-01]] [[-4.14733104e-02 -9.59478971e-03 -1.09969474e-01 ... 2.45857015e-02 -6.74388036e-02 -6.51928261e-02] [-1.76661462e-01 -1.63356066e-01 1.52834222e-01 ... -1.34740369e-02 7.19675943e-02 1.40681133e-01] [-9.67328176e-02 2.11031541e-01 -4.39711921e-02 ... 1.93839833e-01 -1.21957511e-02 1.43287420e-01] ... [-1.65586546e-02 -4.20917803e-03 -1.08336173e-01 ... 6.92718551e-02 -4.35485393e-02 -2.09255055e-01] [ 2.87917238e-02 -1.33578941e-01 -1.34463841e-02 ... -4.87006158e-02 4.41058632e-03 -1.86928622e-02] [ 6.54872507e-02 -3.80645320e-02 -1.36757810e-02 ... -2.11232826e-01 -7.72031173e-02 6.85566384e-03]] [[-2.34491099e-02 -3.20313149e-03 -1.50023937e-01 ... -1.08594880e-01 -1.69269770e-01 -7.70883188e-02] [-9.27880630e-02 -8.72923583e-02 -1.50921261e-02 ... -6.64504319e-02 -3.76827642e-02 -4.04909737e-02] [ 2.58431047e-01 1.83837235e-01 -2.88280919e-02 ... -9.09286663e-02 -2.31901392e-01 7.49349520e-02] ... [-1.98777877e-02 1.80115569e-02 1.69990346e-01 ... -6.48767352e-02 8.55238140e-02 -4.81104665e-02] [ 1.90219611e-01 2.11226404e-01 -1.49202617e-02 ... -7.41943792e-02 -3.04554496e-03 -1.75263360e-01] [ 2.98823658e-02 -1.77876055e-01 7.10767135e-02 ... -6.18186221e-02 -8.86226296e-02 -1.70817941e-01]] ... [[ 3.43721882e-02 9.44230482e-02 5.60548902e-02 ... -1.70558959e-01 -9.39923227e-02 1.14937529e-01] [ 6.33325651e-02 8.58639739e-03 9.32625085e-02 ... 1.40449658e-01 9.63653550e-02 1.15732700e-01] [ 7.49145746e-02 8.08364302e-02 -4.03018072e-02 ... -1.14265643e-01 -1.64069250e-01 1.64380178e-01] ... [ 4.61870879e-02 -6.00868873e-02 1.14882223e-01 ... 3.77078764e-02 2.81649083e-01 7.22827797e-04] [-1.03794329e-01 -2.16335654e-01 2.03858957e-01 ... 1.63265411e-03 7.19985738e-02 -1.38690814e-01] [ 1.56062424e-01 -1.02560148e-01 9.45865065e-02 ... -5.19635789e-02 1.11465724e-02 2.62575895e-01]] [[ 8.01007822e-02 -7.89054949e-03 -9.05985311e-02 ... 1.98557079e-01 2.25428164e-01 2.02938944e-01] [-1.21004246e-02 -6.05206331e-03 -3.07623558e-02 ... -1.97737180e-02 1.30834326e-01 1.11871645e-01] [ 1.16791511e-02 -5.57849742e-02 -1.10254318e-01 ... 4.44766507e-02 -1.40268892e-01 -3.05028949e-02] ... [ 1.05567157e-01 3.88899818e-02 1.47142738e-01 ... 5.18473014e-02 7.16376901e-02 -4.26395461e-02] [ 5.13355285e-02 -4.53723082e-03 -7.72488490e-02 ... -5.29759154e-02 -3.35900597e-02 -1.05958618e-01] [ 3.30915749e-02 -1.76103175e-01 1.68329701e-01 ... -1.71973601e-01 1.99102476e-01 8.76031145e-02]] [[ 4.00683507e-02 3.46558481e-01 1.11018397e-01 ... -4.02751528e-02 1.07635066e-01 -1.62508175e-01] [ 4.17742170e-02 5.50874360e-02 1.43745735e-01 ... -7.36324713e-02 -2.94539630e-01 6.79326728e-02] [-1.18117243e-01 -3.45333159e-01 -1.17747873e-01 ... -2.18751635e-02 -4.55864780e-02 6.48064539e-02] ... [ 1.46020949e-01 -7.49992626e-03 1.92758113e-01 ... -1.43308491e-02 1.08205304e-02 3.28496844e-02] [-1.21210352e-01 9.63435173e-02 -8.35144892e-02 ... -1.74558505e-01 2.74599344e-02 -1.28423959e-01] [-3.85110527e-02 -5.06351031e-02 4.64116782e-02 ... 4.22064066e-02 7.40693659e-02 -1.30335463e-03]]] [[[-1.58229217e-01 3.46952885e-01 -2.77511656e-01 ... -7.42186844e-01 1.05543680e-01 2.64701158e-01] [-9.44109380e-01 1.21583685e-01 -5.07244408e-01 ... 1.21322429e+00 7.89847851e-01 1.08837140e+00] [ 7.24243104e-01 4.92691427e-01 -3.40727985e-01 ... -2.77475446e-01 -5.96645117e-01 -8.72842669e-01] ... [ 3.61452579e-01 4.32526231e-01 -1.33583689e+00 ... 4.96564023e-02 3.65894318e-01 3.60490941e-02] [ 9.97635052e-02 1.11618352e+00 1.91956177e-01 ... -2.87889212e-01 3.30667704e-01 5.36883235e-01] [ 4.37244147e-01 -3.50519925e-01 1.10248506e+00 ... -8.76667261e-01 -8.99868906e-01 -6.79661572e-01]] [[-1.11645699e+00 5.55832744e-01 -2.50760280e-03 ... -6.00603104e-01 -1.81173515e-02 -2.47467160e-01] [ 9.48666036e-01 -5.03265820e-02 2.21209377e-01 ... -5.90975821e-01 -5.62086642e-01 6.98157251e-01] [ 2.04920530e-01 -1.05519307e+00 -2.78004497e-01 ... -1.76011407e+00 -4.33058858e-01 2.06455141e-01] ... [ 3.58240128e-01 -1.47579342e-01 5.78121364e-01 ... -5.27487993e-01 2.50270963e-01 3.46522212e-01] [ 1.91748843e-01 5.71024597e-01 -2.58942842e-01 ... 2.62882650e-01 -2.15693250e-01 3.99264783e-01] [ 5.22706956e-02 -4.29866463e-03 -6.36460423e-01 ... -5.39617240e-01 -5.15267968e-01 -5.75986981e-01]] [[-4.65262204e-01 5.83479941e-01 -6.69611156e-01 ... 5.94357431e-01 6.57637566e-02 3.90836060e-01] [-7.15238154e-02 1.23230386e+00 1.02294102e-01 ... 3.96635771e-01 -3.16906184e-01 -1.08940259e-01] [ 1.14285064e+00 8.44961107e-01 -3.47542137e-01 ... 3.62522393e-01 -3.61764997e-01 1.46906734e-01] ... [ 3.98443639e-02 6.82629466e-01 1.26137042e+00 ... 3.08275610e-01 -4.37733859e-01 3.97313386e-01] [ 6.33025110e-01 -8.30637515e-01 -1.80109479e-02 ... -5.31890512e-01 -1.83897112e-02 -1.35438040e-01] [ 7.30813593e-02 -9.35283601e-02 1.03806126e+00 ... -2.32516378e-01 2.83938885e-01 1.38120232e-02]] ... [[ 3.92836183e-01 -6.79755211e-01 4.27238718e-02 ... 3.88077018e-03 3.35901916e-01 8.43272448e-01] [ 7.67300069e-01 1.52656245e+00 -4.32536662e-01 ... 2.11878687e-01 8.95761311e-01 9.14610386e-01] [ 6.58229828e-01 3.19045335e-01 6.06120169e-01 ... -6.96838439e-01 -1.52030742e+00 5.49748003e-01] ... [ 4.66400623e-01 3.82697433e-02 3.92895669e-01 ... 1.32351562e-01 6.06333256e-01 2.86386639e-01] [-6.09155297e-02 -6.92404985e-01 -2.52350718e-01 ... 6.14223862e-03 7.29800642e-01 -1.36538994e+00] [ 4.74276215e-01 -1.03452072e-01 -8.64567399e-01 ... -5.09979188e-01 -8.71419162e-02 -1.04809608e-02]] [[-8.30922604e-01 -6.32586777e-01 -6.15695179e-01 ... -1.25671620e-03 1.36416399e+00 1.06662560e+00] [-4.39661145e-02 -2.92232126e-01 -3.48190337e-01 ... 1.39246322e-02 2.02294469e-01 3.58479053e-01] [-2.23881051e-01 1.14201105e+00 -1.55044883e-01 ... 3.86197418e-01 6.20736897e-01 -1.67006612e+00] ... [ 4.88305718e-01 -1.73500866e-01 6.65930986e-01 ... -9.38698649e-01 -2.21865833e-01 -1.25179801e-03] [-3.28872681e-01 4.87547427e-01 -3.17677706e-02 ... 1.22290456e+00 3.04969490e-01 -6.84476078e-01] [-2.15373319e-02 -1.27903655e-01 1.64603972e+00 ... -9.92589176e-01 3.15783828e-01 -4.05033886e-01]] [[ 1.80363491e-01 3.23790282e-01 2.79823035e-01 ... 4.36333776e-01 1.35174215e+00 5.04232235e-02] [ 8.24694097e-01 -3.33103836e-01 1.70589060e-01 ... -8.16783249e-01 -4.80059832e-02 3.88776302e-01] [-4.65104431e-01 -3.17740619e-01 -5.97435594e-01 ... -2.66638994e-01 9.13629159e-02 1.11740910e-01] ... [-1.03683436e+00 4.14628297e-01 -6.01467164e-03 ... 4.07871872e-01 4.04034197e-01 -2.32334256e-01] [-1.78179219e-01 -1.32236445e+00 -5.99079728e-01 ... 2.22164616e-01 6.06922269e-01 -2.01647782e+00] [-1.83021024e-01 1.00116050e-02 -4.63838995e-01 ... -3.86808783e-01 -2.69218355e-01 1.05434053e-01]]] [[[-2.09558815e-01 4.22687858e-01 -1.46955609e-01 ... 1.86960116e-01 -2.91399151e-01 -1.11798756e-01] [-3.91944200e-01 -6.35380685e-01 -1.64929748e-01 ... 8.19184780e-02 2.55243868e-01 -5.27481318e-01] [ 3.20896864e-01 -8.05046618e-01 -4.87900347e-01 ... 5.96569002e-01 2.30166852e-01 1.88089535e-01] ... [ 1.30685061e-01 1.52911082e-01 4.30608779e-01 ... -2.48982668e-01 -2.48987973e-02 4.10192043e-01] [-9.20484439e-02 5.71738183e-01 -1.04620241e-01 ... -1.71898693e-01 -5.21284521e-01 3.37007105e-01] [ 4.98260297e-02 5.62985949e-02 2.75662720e-01 ... 3.44234854e-02 -2.93885291e-01 -5.37471831e-01]] [[-2.88535833e-01 9.07959566e-02 -2.94159800e-01 ... 2.23237664e-01 -7.40859389e-01 2.62733042e-01] [-2.20988065e-01 4.04481888e-02 -1.26095101e-01 ... 3.35308760e-01 1.87004551e-01 1.19518088e-02] [-6.38685301e-02 -6.10363722e-01 2.20340237e-01 ... 6.74677454e-03 -4.15510871e-02 -3.74447078e-01] ... [ 5.15012350e-03 -1.32204086e-01 3.65400352e-02 ... 1.20178334e-01 -8.34550202e-01 -8.87792334e-02] [ 5.62740147e-01 -3.11895311e-01 -4.35325772e-01 ... -6.32520258e-01 4.86437976e-02 1.77112684e-01] [-4.74879771e-01 -1.53604820e-01 -3.37817997e-01 ... 3.14054877e-01 1.49324536e-01 7.85691068e-02]] [[ 5.30625820e-01 -6.88433945e-02 4.86910075e-01 ... -4.32235718e-01 -1.10676840e-01 -3.24019454e-02] [-3.94782349e-02 4.73471165e-01 -8.61925632e-02 ... -4.67380062e-02 -7.70075500e-01 7.93004259e-02] [ 6.06740974e-02 8.13458264e-02 2.21031815e-01 ... 1.45079732e-01 -3.17773074e-01 1.76740214e-01] ... [-2.58965820e-01 -1.40860662e-01 4.62105095e-01 ... -1.48026958e-01 2.14137957e-01 5.37225246e-01] [ 2.15288755e-02 8.76449406e-01 4.97773588e-02 ... -4.19539571e-01 9.86078009e-02 3.70845318e-01] [ 6.41075671e-01 -4.48934697e-02 4.75015968e-01 ... 2.19996907e-02 8.77427496e-03 3.07323128e-01]] ... [[-1.01651445e-01 3.01282465e-01 -6.74877524e-01 ... 2.65347779e-01 -5.15190899e-01 9.94588509e-02] [ 1.12175055e-01 -1.09260874e-02 -1.24685578e-01 ... 1.61366209e-01 6.17919005e-02 3.22119832e-01] [ 9.99610350e-02 -6.07825071e-02 -1.30242825e-01 ... 9.45198894e-01 -6.79396987e-02 -3.04966241e-01] ... [-1.84809193e-02 4.80042458e-01 -2.32867539e-01 ... -5.56300819e-01 2.02384554e-02 2.75641065e-02] [-8.91903788e-02 1.47095546e-01 2.64128763e-02 ... -7.93294981e-02 -4.67095882e-01 -1.78478971e-01] [ 5.49282767e-02 -3.49151373e-01 -4.16636080e-01 ... 6.50649607e-01 -3.85507762e-01 1.24155611e-01]] [[-5.17916605e-02 -2.73439825e-01 5.48763573e-02 ... 3.16466726e-02 1.38640450e-02 -4.97421145e-01] [ 5.38469911e-01 6.67797998e-02 -3.49274307e-01 ... -2.63007015e-01 -1.25246242e-01 2.83277705e-02] [ 2.59046286e-01 -1.01588547e-01 9.93688181e-02 ... 4.00338657e-02 -1.71168983e-01 -3.56697768e-01] ... [-1.00974403e-02 -2.16158062e-01 3.75108331e-01 ... 4.22865093e-01 8.49185660e-02 -7.75454789e-02] [ 3.08798328e-02 -2.14557976e-01 1.21903956e-01 ... -4.87562567e-01 2.31870487e-01 3.32113951e-02] [ 8.66557509e-02 5.29144593e-02 -2.75974981e-02 ... 2.30812624e-01 1.67393222e-01 5.19415475e-02]] [[ 1.52583152e-01 1.05505995e-01 -3.03298258e-03 ... 1.69966489e-01 4.76083085e-02 -6.02195375e-02] [-5.67540050e-01 -4.14671265e-02 3.82306948e-02 ... 8.63571912e-02 3.31584930e-01 -6.79804265e-01] [-6.81709826e-01 -7.85518587e-01 -6.07292615e-02 ... -1.90178111e-01 3.35140795e-01 -1.54572189e-01] ... [-3.65497079e-03 2.09592640e-01 9.52492803e-02 ... 2.57814117e-02 2.46273412e-05 1.46011367e-01] [ 2.01586977e-01 4.69350696e-01 2.40631223e-01 ... 2.38635972e-01 -2.68481135e-01 -2.53360868e-01] [-3.13351184e-01 3.81059423e-02 -2.74132371e-01 ... -6.16743267e-01 9.31342423e-01 -3.13000709e-01]]]]]; ov_res: [[[[[ 1.57293165e+00 -1.73008585e+00 1.35876882e+00 ... 2.57016778e+00 -3.07788467e+00 2.04401302e+00] [-3.65430146e-01 -1.28381276e+00 -1.64625371e+00 ... 7.91780591e-01 9.37812865e-01 -1.08460104e+00] [-1.47302020e+00 -4.31769878e-01 7.18027800e-02 ... 1.41923797e+00 -2.81823665e-01 -5.83705187e-01] ... [ 1.30427408e+00 -1.49685311e+00 -1.45711029e+00 ... 1.04556665e-01 7.65878499e-01 -7.35797346e-01] [ 2.00909042e+00 -1.03020608e+00 1.64924443e+00 ... -6.09891489e-02 1.87634006e-01 -1.58741009e+00] [ 1.40063310e+00 -1.60819983e+00 -5.83274126e-01 ... 1.02258921e+00 -4.71629947e-02 -2.55304646e+00]] [[-3.74492764e-01 -4.45479006e-01 -8.34256589e-01 ... 7.00621665e-01 -1.56243038e+00 2.24195197e-01] [-1.94311202e-01 1.95182920e+00 -4.65369880e-01 ... -2.34182644e+00 3.78632128e-01 -4.05516714e-01] [ 2.63389677e-01 2.56088686e+00 -9.36542094e-01 ... -6.07875168e-01 -1.23898518e+00 5.90579629e-01] ... [ 6.91639066e-01 -1.31261468e+00 1.33354485e-01 ... 8.03423762e-01 -1.23066723e+00 -1.32586762e-01] [ 1.11087966e+00 1.26586843e+00 -4.76064175e-01 ... -1.97661805e+00 1.95178115e+00 -1.91843092e+00] [-3.33686757e+00 -5.85520864e-01 3.16396022e+00 ... 1.31530553e-01 1.40318334e+00 -4.15074444e+00]] [[ 1.23453641e+00 7.53596187e-01 -6.71475410e-01 ... 1.12215054e+00 9.28507864e-01 -8.04847479e-01] [ 4.43458885e-01 -3.27447429e-02 -1.19353211e+00 ... -1.63497165e-01 8.05142164e-01 4.30191469e+00] [-6.72409296e-01 -8.50156426e-01 -1.00998795e+00 ... 9.46862996e-01 4.13275868e-01 -8.22924197e-01] ... [-9.29512143e-01 -4.68621492e-01 1.56581616e+00 ... -2.20146298e-01 2.30428290e+00 -4.94651705e-01] [ 3.08786362e-01 -1.61067963e+00 -2.31807053e-01 ... 1.60192633e+00 5.76477230e-01 -1.74007809e+00] [ 1.09885228e+00 8.93145919e-01 9.16074753e-01 ... -1.63339436e+00 -1.71745586e+00 4.84252602e-01]] ... [[-2.35857278e-01 -1.22340047e+00 -2.42771339e+00 ... 3.90725303e+00 1.15300977e+00 2.58196807e+00] [-2.21831948e-02 3.50287259e-01 -1.76657832e+00 ... -2.01037192e+00 3.44488360e-02 -1.90589285e+00] [ 2.17674041e+00 1.98672190e-01 7.59069443e-01 ... -1.48064244e+00 2.44725490e+00 2.00600922e-01] ... [ 1.73337781e+00 9.04990077e-01 1.70072436e+00 ... 3.65089273e+00 3.34482878e-01 3.82154472e-02] [-2.31467819e+00 -1.79869461e+00 -1.63528287e+00 ... 1.04016709e+00 -1.51232719e+00 -1.21414423e+00] [-1.35652232e+00 -2.02994275e+00 -1.07565975e+00 ... 6.34643495e-01 -3.15915555e-01 6.67161405e-01]] [[ 1.30173814e+00 1.53135169e+00 2.16155100e+00 ... 5.23101091e-01 9.48294520e-01 8.36419344e-01] [ 5.06739736e-01 -3.41691661e+00 -1.77485871e+00 ... -2.24811649e+00 2.61012465e-01 1.01682377e+00] [-1.53163254e+00 2.16466236e+00 1.35442245e+00 ... -9.35806751e-01 -1.12106693e+00 -1.14842057e+00] ... [-1.29456729e-01 1.53217471e+00 1.81920815e+00 ... -8.57538357e-02 -9.93948281e-02 7.50858784e-01] [ 1.20796132e+00 -2.82449222e+00 6.34399235e-01 ... 4.09614980e-01 -2.11025906e+00 3.53378117e-01] [ 1.74582899e-01 -3.55744034e-01 1.62532818e+00 ... 4.27367575e-02 -2.18408561e+00 -1.11404705e+00]] [[ 4.34927940e-01 3.01222116e-01 -2.67906737e+00 ... 3.41931939e-01 1.83515251e+00 -1.25213969e+00] [-9.65169847e-01 -7.48881578e-01 -2.01444483e+00 ... -6.15200698e-01 1.31395960e+00 -1.98249429e-01] [ 1.43748820e+00 1.01014757e+00 -1.24878705e+00 ... -2.70347834e+00 5.77812493e-01 2.78454751e-01] ... [ 5.76464951e-01 3.33814994e-02 -1.73738018e-01 ... 3.55292130e+00 1.27176487e+00 -6.60009444e-01] [ 4.29419708e+00 -1.76230341e-01 -2.30796367e-01 ... -1.51626551e+00 1.81802404e+00 3.78912129e-02] [-1.28716159e+00 2.22069383e+00 -3.86576355e-02 ... -1.94136870e+00 -2.75308466e+00 -5.78711808e-01]]] [[[ 1.07406877e-01 2.07179397e-01 1.02265418e-01 ... 4.09539431e-01 2.70925343e-01 2.49229521e-01] [-4.21925895e-02 3.77337933e-01 -8.31489861e-02 ... 2.87261039e-01 -1.59024104e-01 -1.31089259e-02] [-8.24662820e-02 3.79648656e-01 1.01027720e-01 ... 5.51531792e-01 -2.17972144e-01 4.27517802e-01] ... [ 1.14563935e-01 -1.13502078e-01 -1.30654434e-02 ... 3.73650402e-01 -2.25225538e-02 -5.78867197e-02] [ 6.33388311e-02 3.84782478e-02 -3.66214663e-01 ... 1.45443335e-01 2.28299305e-01 4.27336283e-02] [ 4.95216191e-01 3.44404966e-01 9.55394134e-02 ... 3.24469566e-01 -3.04989159e-01 1.01211205e-01]] [[ 3.40687364e-01 -1.94216706e-02 1.06275156e-01 ... 1.01467466e-03 8.76223296e-02 -5.04740002e-03] [ 2.14386396e-02 2.04495713e-01 3.04981947e-01 ... 3.46315235e-01 3.31131488e-01 -2.48475090e-01] [ 1.99587971e-01 -4.85887825e-01 2.08978932e-02 ... 4.81452763e-01 -2.41028830e-01 3.06345522e-01] ... [ 3.89919102e-01 1.02527644e-02 4.93333280e-01 ... 8.76353607e-02 -9.32045132e-02 6.96782917e-02] [-2.64398567e-03 1.78582191e-01 -2.39905268e-01 ... 5.11705160e-01 9.81750637e-02 1.33200809e-01] [-6.80734916e-03 1.42219305e-01 1.80987030e-01 ... 3.16408187e-01 2.63391912e-01 -1.64197043e-01]] [[-1.60464674e-01 -3.10177684e-01 -8.07456002e-02 ... 3.73146147e-01 9.07404542e-01 2.47160316e-01] [-8.28060433e-02 1.78926229e-01 4.20966566e-01 ... 2.17933789e-01 6.77931666e-01 -1.99716724e-02] [ 2.88246125e-01 -2.43584123e-02 1.34176672e-01 ... 9.91350412e-02 2.04053223e-01 -3.10690492e-01] ... [-5.27129352e-01 -2.09954992e-01 -4.83720154e-01 ... 5.84523976e-02 -2.00989470e-01 2.03832388e-01] [ 5.17278276e-02 5.12061954e-01 -1.34257734e-01 ... -3.75781864e-01 -2.49467492e-01 1.51372910e-01] [ 1.70403585e-01 -3.08555692e-01 4.03284162e-01 ... -5.80338202e-03 -9.72378030e-02 -4.34447140e-01]] ... [[ 2.61567086e-01 2.36380890e-01 2.84580380e-01 ... 1.99668720e-01 6.23679943e-02 5.61355194e-03] [ 2.38831267e-01 -7.18777860e-03 -2.60362476e-01 ... -7.38632977e-02 2.60817975e-01 -7.34897405e-02] [-4.39301938e-01 1.40834287e-01 1.07663356e-01 ... -3.53253365e-01 2.37275839e-01 2.64045820e-02] ... [ 6.11049868e-02 -3.25518429e-01 -2.17779025e-01 ... 4.55466807e-01 -1.18115656e-01 -3.02834481e-01] [-3.27423029e-02 1.86021864e-01 -7.53477439e-02 ... 2.42824346e-01 1.16980568e-01 -6.60044611e-01] [ 1.23004809e-01 -1.60478562e-01 1.52183939e-02 ... -2.26722080e-02 -3.60325724e-01 -3.66530389e-01]] [[-3.04402411e-01 1.02661632e-01 3.24482359e-02 ... -1.56290859e-01 -3.44042629e-01 2.52844810e-01] [-5.99563479e-01 4.08178508e-01 3.99450362e-02 ... -5.31570375e-01 1.51214436e-01 8.27037022e-02] [-5.49557149e-01 -1.93313751e-02 2.94215858e-01 ... -2.79922456e-01 -2.56414860e-01 4.97647941e-01] ... [-6.37507811e-02 9.20312181e-02 -1.88558191e-01 ... 2.65264422e-01 1.21781221e-02 -4.51478623e-02] [ 7.45195374e-02 9.85904224e-03 -3.61494601e-01 ... 1.69825077e-01 -2.73005724e-01 4.15016785e-02] [-2.84138978e-01 3.50770503e-01 2.53351837e-01 ... -1.83078960e-01 2.19266526e-02 1.45366136e-03]] [[ 1.11334309e-01 2.62191117e-01 -3.40922512e-02 ... 1.46382675e-01 1.35952294e-01 1.27129212e-01] [ 2.87435025e-01 -2.99005806e-01 -4.63859826e-01 ... -6.88378662e-02 3.00633430e-01 2.73246199e-01] [ 1.48712635e-01 6.56703293e-01 4.22972143e-02 ... 4.19295192e-01 1.82251528e-01 -4.76250827e-01] ... [-1.47817731e-01 -7.44021773e-01 -4.03704286e-01 ... 6.44485876e-02 -2.18463406e-01 -2.20580727e-01] [ 1.75945625e-01 4.17461507e-02 -2.72970684e-02 ... -2.71933347e-01 -7.02527240e-02 -1.91936240e-01] [-6.26024678e-02 1.77812167e-02 -2.04455629e-02 ... 1.76438406e-01 -2.29245439e-01 2.12329552e-02]]] [[[-1.35968134e-01 6.25366211e-01 9.43184018e-01 ... -1.12348301e-02 -5.30517876e-01 4.62494612e-01] [-5.60239792e-01 7.26440996e-02 -2.16966823e-01 ... -4.24548715e-01 4.67767239e-01 -4.49709982e-01] [-4.94503349e-01 -2.30010539e-01 -1.02518782e-01 ... 5.24603724e-01 3.52637246e-02 3.00646126e-02] ... [ 5.44021606e-01 -5.49517453e-01 5.26104629e-01 ... -2.49392495e-01 6.34565890e-01 7.54507259e-02] [-6.96379364e-01 4.06619795e-02 -1.33858696e-01 ... 8.64381373e-01 -3.71261060e-01 -3.93358730e-02] [-1.79394037e-01 1.36110917e-01 4.01882917e-01 ... -6.88948989e-01 -2.14626148e-01 -6.66806877e-01]] [[ 3.33190501e-01 6.81059539e-01 -5.09137101e-02 ... -4.73149478e-01 2.77370363e-01 1.82872862e-01] [ 2.05930278e-01 -5.20598888e-01 -2.19494358e-01 ... 8.75794664e-02 -3.01534235e-01 -1.58988103e-01] [-4.00194526e-02 -5.75734198e-01 4.75563794e-01 ... 8.59500527e-01 -3.70291531e-01 1.47412702e-01] ... [ 1.52400479e-01 4.86494184e-01 -4.29128259e-02 ... 6.80501342e-01 -2.04062499e-02 -2.51973867e-01] [ 5.51607251e-01 1.64151683e-01 5.92736661e-01 ... -1.22208409e-01 -1.21143781e-01 -2.19669804e-01] [-1.15082279e-01 4.45515662e-01 -5.34362972e-01 ... 3.88883829e-01 1.35285765e-01 -1.94600433e-01]] [[-6.06801212e-01 3.78351331e-01 -7.81048417e-01 ... -3.28748256e-01 2.34359309e-01 -4.28747386e-01] [-3.09164822e-01 -2.09451601e-01 3.90910693e-02 ... 3.52099121e-01 -2.85565227e-01 4.12723124e-01] [-3.78087349e-02 -5.77322878e-02 2.04746246e-01 ... -5.94185770e-01 3.15143257e-01 1.78214490e-01] ... [ 2.61632502e-01 -3.20982724e-01 4.88374121e-02 ... -7.73934126e-02 1.28036678e-01 -4.41223055e-01] [-1.28204748e-01 -1.72098391e-02 -2.83659905e-01 ... -3.64254229e-03 -2.22570285e-01 8.17884654e-02] [ 1.47241324e-01 4.68568578e-02 1.08458742e-01 ... 1.16619252e-01 -2.35206962e-01 6.72969282e-01]] ... [[-3.29459846e-01 4.05603379e-01 2.34173849e-01 ... 2.15899020e-01 -2.39511222e-01 -6.15052506e-02] [ 3.19939405e-01 -1.60257164e-02 1.41881198e-01 ... 1.99391365e-01 -1.55946044e-02 -5.00738084e-01] [ 1.82606652e-01 9.33066905e-01 8.14356357e-02 ... 4.29632932e-01 2.86777288e-01 -6.43319130e-01] ... [ 4.57351327e-01 -6.77713811e-01 -7.42777511e-02 ... -3.37178946e-01 -1.55540973e-01 -8.75741690e-02] [-1.28339946e-01 -2.58131504e-01 2.23852232e-01 ... 7.05885410e-01 3.92938286e-01 -8.70499834e-02] [ 6.14994109e-01 2.78328657e-01 1.55107351e-02 ... -2.29819894e-01 3.89592588e-01 -4.95328009e-02]] [[-2.87645739e-02 -1.33618161e-01 3.00883800e-01 ... -4.00303543e-01 2.57524282e-01 4.85178232e-01] [ 1.50724277e-01 -6.66221902e-02 1.27411738e-01 ... -1.18685037e-01 -4.20300886e-02 -5.91409504e-01] [-4.10127282e-01 4.24844861e-01 4.06674355e-01 ... -3.09808195e-01 7.30863094e-01 -2.08265379e-01] ... [-8.68283689e-01 8.30114186e-01 4.64910567e-01 ... 6.67213202e-01 -4.18082297e-01 -5.29533505e-01] [ 1.08533822e-01 -1.85865372e-01 -2.94567555e-01 ... -1.96645573e-01 -5.98153099e-02 -1.16753671e-03] [-3.64682317e-01 5.49582355e-02 -3.79468828e-01 ... 4.61543277e-02 4.63128448e-01 -4.49732915e-02]] [[-1.75279468e-01 3.21429849e-01 2.63018847e-01 ... 1.20360918e-01 -1.28071830e-01 2.03051209e-01] [-9.96375456e-02 1.67919159e-01 -4.28413719e-01 ... -3.61604951e-02 4.55010273e-02 -6.73886910e-02] [-5.14769852e-01 -3.15664351e-01 -6.18458949e-02 ... 1.13414980e-01 -1.84934258e-01 4.53423969e-02] ... [-1.63398430e-01 -9.68708217e-01 4.22402024e-01 ... 1.35632291e-01 2.52879336e-02 4.91164625e-01] [ 2.47005269e-01 1.73465148e-01 -7.90391192e-02 ... -1.63105816e-01 4.17142808e-01 7.53288090e-01] [-1.83621705e-01 4.93601151e-03 -7.93889090e-02 ... -1.90621421e-01 2.76384741e-01 2.36790836e-01]]] [[[ 9.11735967e-02 2.03661192e-02 1.39107063e-01 ... 1.29723653e-01 2.39161283e-01 -2.55767093e-03] [ 1.04862470e-02 1.44948646e-01 8.12522043e-03 ... -1.39695257e-02 -1.22679979e-01 4.52956595e-02] [ 9.92438719e-02 -1.17982157e-01 -1.79135174e-01 ... -1.03719182e-01 -1.20318219e-01 -2.99949422e-02] ... [-4.27052043e-02 1.15652174e-01 1.37033239e-01 ... 4.53875586e-02 -3.39219868e-01 -3.06679122e-02] [ 4.76909168e-02 1.17099226e-01 -1.51863679e-01 ... -1.16719231e-01 -4.89949202e-03 4.17382941e-02] [ 8.63662083e-03 6.58061579e-02 -1.21859193e-01 ... -9.03120637e-03 7.15331063e-02 -2.58710925e-02]] [[-2.45213315e-01 -4.28433120e-02 -1.68418571e-01 ... 2.19779491e-01 -9.94947404e-02 -3.89375305e-03] [-1.03971742e-01 -3.00233185e-01 -4.05678749e-02 ... -2.41939172e-01 -1.41834974e-01 1.32404799e-02] [-5.62499557e-03 2.21393824e-01 2.37318143e-01 ... -1.81637313e-02 5.11890501e-02 2.26956513e-02] ... [-1.13816820e-01 1.07804835e-01 4.15854193e-02 ... -1.02773495e-01 4.22755070e-02 9.64306411e-04] [ 8.48660693e-02 -2.04703528e-02 4.20553312e-02 ... -6.33235127e-02 -2.26725176e-01 -3.49880867e-02] [ 1.53436899e-01 -1.34181485e-01 -2.25791067e-01 ... -5.95451519e-03 -4.69867364e-02 9.96362511e-03]] [[ 3.78717259e-02 1.14749841e-01 -1.97572857e-02 ... 1.34971142e-01 -2.23236084e-01 1.60919890e-01] [ 1.21105812e-01 -5.15929200e-02 -6.37795106e-02 ... -5.40913222e-03 -3.70480180e-01 -1.22538522e-01] [-1.75665513e-01 8.78761262e-02 -2.38835607e-02 ... 2.67084744e-02 6.62704110e-02 -1.20251432e-01] ... [ 4.70320322e-02 -2.58062072e-02 1.49737708e-02 ... -1.41903624e-01 2.88937390e-01 -1.57774344e-01] [-1.17808633e-01 -5.72585948e-02 3.03041562e-02 ... -2.64939517e-02 1.24040790e-01 5.43225892e-02] [ 1.37477770e-01 -6.84426650e-02 -2.16388315e-01 ... 2.80523133e-02 -9.18398052e-02 7.36398483e-03]] ... [[-1.70405358e-02 9.59120467e-02 4.68553901e-02 ... 1.99813768e-01 -3.32567334e-01 5.07591246e-03] [-8.15230682e-02 -1.07028037e-01 5.13694547e-02 ... 8.90148133e-02 1.69409420e-02 8.18916708e-02] [ 1.01829372e-01 1.22298777e-01 7.02143312e-02 ... -1.17492996e-01 1.61919758e-01 -1.85034379e-01] ... [ 1.22506402e-01 6.10285662e-02 -1.62745833e-01 ... -2.71772921e-01 -7.45083988e-02 2.68985033e-02] [-2.07667366e-01 1.40142769e-01 -1.71612963e-01 ... 1.95045412e-01 -2.12143034e-01 -1.62920415e-01] [-1.33962646e-01 -8.92287269e-02 -8.83040950e-02 ... 1.76474124e-01 1.05384402e-01 -1.85645685e-01]] [[ 1.02928646e-01 -6.24332987e-02 4.00788300e-02 ... -6.24153838e-02 -1.83058456e-01 4.15574126e-02] [ 9.53484676e-04 -1.17883250e-01 1.17716700e-01 ... -6.09827302e-02 4.51199189e-02 -1.85563087e-01] [-2.46140324e-02 -7.61262402e-02 -2.55593479e-01 ... 5.85599616e-02 1.53602973e-01 -1.49294287e-01] ... [ 1.69266105e-01 -1.75237954e-01 -5.84493540e-02 ... -3.38203460e-02 -9.94715020e-02 -7.68049806e-02] [ 2.70193070e-02 -9.55166519e-02 -7.81800039e-03 ... 1.62533909e-01 1.59103751e-01 2.68051207e-01] [ 3.57230186e-01 -8.66146982e-02 2.87712514e-01 ... -1.29374832e-01 -3.14381033e-01 -7.99971372e-02]] [[-8.02712664e-02 3.12662087e-02 -4.67269123e-02 ... 2.20497102e-01 -1.41357228e-01 -5.16740642e-02] [-7.80211836e-02 -1.92253768e-01 1.51371315e-01 ... -1.14554875e-01 1.65514424e-02 1.13305539e-01] [-4.01388183e-02 -6.89615533e-02 -1.69777215e-01 ... -2.97792345e-01 1.07722431e-01 -3.49744022e-01] ... [ 9.88751999e-04 -1.55010112e-02 1.13837622e-01 ... -3.99232842e-02 -1.46585613e-01 -5.17342426e-02] [ 1.81548297e-01 -6.05103634e-02 -2.80459791e-01 ... 7.41038993e-02 -7.61975423e-02 -1.17926132e-02] [-1.11202218e-01 -1.55270725e-01 -3.86415981e-02 ... 1.13672547e-01 2.37694886e-02 -1.58460766e-01]]] [[[-1.01863182e+00 6.53663516e-01 -5.22093415e-01 ... 9.27320957e-01 1.37532425e+00 -2.28467569e-01] [-1.31861657e-01 -1.02261984e+00 -1.34353846e-01 ... -2.18323752e-01 8.77006114e-01 -5.47593497e-02] [ 7.89414167e-01 -4.65194076e-01 1.77177355e-01 ... 1.15243506e+00 -8.87582004e-01 6.76881552e-01] ... [ 1.09226310e+00 -8.25257301e-01 -7.56569386e-01 ... -1.24982104e-01 -5.68543315e-01 -6.49464071e-01] [ 9.90761399e-01 -1.63956553e-01 -1.54075432e+00 ... -4.57694650e-01 3.87983739e-01 1.14908838e+00] [ 7.04696774e-01 3.87886129e-02 -2.31588170e-01 ... 2.51970053e-01 4.36544865e-01 6.65237308e-01]] [[-5.00961721e-01 8.93413723e-01 2.45314360e-01 ... 9.88140762e-01 -2.72250343e-02 -8.70201647e-01] [ 4.98390086e-02 -4.19390440e-01 -3.86740506e-01 ... 2.25761712e-01 2.11806837e-02 1.15281090e-01] [ 8.32277060e-01 2.66836792e-01 6.80307329e-01 ... -9.12977874e-01 -5.90412080e-01 2.39413404e+00] ... [-5.37915289e-01 6.53255433e-02 7.05167770e-01 ... 7.76218653e-01 -3.81138474e-02 7.53058851e-01] [-1.13995779e+00 -3.10423940e-01 -4.28528100e-01 ... 6.00819401e-02 1.55287758e-01 -5.57671249e-01] [-1.28653407e+00 -1.20016682e+00 6.71125054e-01 ... -5.71879983e-01 -1.77857894e-02 -4.97389823e-01]] [[-2.62596428e-01 2.19048887e-01 -7.73709655e-01 ... 7.82203853e-01 5.90757914e-02 -1.09196305e+00] [ 6.02557957e-01 6.27997935e-01 -5.36615610e-01 ... -6.12440169e-01 -4.22186524e-01 -8.39251101e-01] [-9.92839575e-01 4.51897413e-01 -1.20758343e+00 ... -1.56831503e-01 2.20579356e-01 9.34211493e-01] ... [-1.17347288e+00 5.87191701e-01 -6.48739576e-01 ... 5.10661542e-01 4.27428991e-01 1.26169360e+00] [ 6.72404170e-01 -1.54478240e+00 -8.93325388e-01 ... 5.84926903e-01 1.29218876e-01 -2.38897413e-01] [-5.67310154e-01 7.08154976e-01 -8.15765202e-01 ... 3.10565829e-01 -3.01971197e-01 -7.76131451e-02]] ... [[-6.85860589e-02 2.57627666e-02 3.52290392e-01 ... 6.18332028e-01 -7.98919559e-01 6.17546439e-01] [ 7.31249511e-01 -2.22867280e-02 -5.15805840e-01 ... -1.19225883e+00 1.90642640e-01 1.27405035e+00] [-1.27407622e+00 -1.77517936e-01 -2.84047186e-01 ... 7.51428664e-01 -8.37766305e-02 -4.73061025e-01] ... [ 4.21387017e-01 -1.09421599e+00 -6.05145842e-02 ... -4.50402677e-01 -5.79067051e-01 -2.59574294e-01] [ 6.44991770e-02 4.67790335e-01 -1.75334036e-01 ... 7.85636902e-01 -2.11147130e-01 -3.41853313e-02] [-3.93238485e-01 -8.83513764e-02 1.29843354e+00 ... -1.32810324e-01 1.37779444e-01 4.22486514e-01]] [[-2.15547681e-02 5.98987997e-01 1.62387088e-01 ... 1.22747326e+00 9.94690284e-02 9.42293853e-02] [-1.10383868e+00 1.49480686e-01 -1.85283478e-02 ... -1.56934842e-01 7.84673154e-01 -2.26230577e-01] [ 8.99554074e-01 2.15219885e-01 -4.34319347e-01 ... 5.53930700e-01 -1.43581748e+00 -1.07015634e-03] ... [-2.80703515e-01 -1.19561267e+00 5.13801813e-01 ... -7.32125044e-01 9.82073188e-01 -4.05050963e-01] [ 7.29610324e-01 -1.51515245e+00 -8.37739110e-01 ... 5.18954992e-01 -3.02161306e-01 5.96232116e-01] [ 3.19492280e-01 6.35945797e-01 6.91047490e-01 ... -9.16669428e-01 -5.65297425e-01 -1.68558404e-01]] [[ 1.34295237e+00 6.97408915e-01 -5.46457589e-01 ... -3.06893200e-01 1.70222260e-02 -5.85207045e-01] [ 4.24068868e-01 1.19290926e-01 1.62892789e-01 ... -1.73554078e-01 3.01393658e-01 1.19045210e+00] [-2.27936119e-01 1.19242787e+00 4.81966108e-01 ... 5.19293249e-01 7.84431919e-02 -5.05185537e-02] ... [-2.32486561e-01 -6.12520814e-01 -2.67888397e-01 ... -1.44394863e+00 6.86191022e-01 -5.84946349e-02] [-4.35270011e-01 -8.40262473e-02 -1.21768522e+00 ... -4.63556767e-01 4.92211670e-01 -3.09680432e-01] [-5.65126091e-02 -2.70485021e-02 5.88811457e-01 ... 7.19206750e-01 6.82039320e-01 -3.02687913e-01]]] [[[-4.82974090e-02 5.39997280e-01 -1.34492785e-01 ... 1.26238123e-01 3.72275263e-02 -1.21611789e-01] [ 2.51574200e-02 -1.38377905e-01 3.37819904e-02 ... -4.53803577e-02 5.15217721e-01 -4.19796705e-02] [ 9.87837166e-02 2.23897517e-01 8.53785649e-02 ... -3.24112713e-01 3.20017695e-01 1.96231514e-01] ... [ 2.21057728e-01 2.30893523e-01 -4.13466468e-02 ... 1.92305520e-02 3.12453717e-01 -2.91322082e-01] [ 1.54425964e-01 8.33500803e-01 4.60118167e-02 ... 6.21851027e-01 3.10559779e-01 -1.92904398e-01] [ 8.06837618e-01 3.31882358e-01 -1.82024270e-01 ... -3.62281322e-01 8.56384411e-02 -3.32459152e-01]] [[-2.59919107e-01 2.87021488e-01 -1.97259530e-01 ... -3.00591886e-01 2.16976941e-01 -1.76477149e-01] [ 3.08504134e-01 -9.13448751e-01 -3.09679151e-01 ... 1.04368115e-02 -1.40023172e-01 1.55079663e-01] [-4.82134730e-01 3.95994067e-01 5.71642876e-01 ... 5.20662487e-01 -6.69345677e-01 1.85667232e-01] ... [ 6.60773039e-01 -3.80075157e-01 -3.06011379e-01 ... -4.78668749e-01 2.08705273e-02 4.05913919e-01] [-1.62295491e-01 4.00520295e-01 -1.18635468e-01 ... -4.22158390e-01 1.71572760e-01 1.07453093e-01] [-3.29790831e-01 1.31467357e-01 3.71128440e-01 ... 2.46181279e-01 -1.63854375e-01 -2.10102811e-01]] [[ 4.01324704e-02 9.85763222e-03 7.36965314e-02 ... -1.60349794e-02 3.66914153e-01 2.00921968e-01] [ 9.44238976e-02 -2.39800185e-01 -2.91623801e-01 ... -1.11377008e-01 1.26436830e-01 -1.95322499e-01] [ 1.27001390e-01 5.58832437e-02 3.27845663e-01 ... 6.19626865e-02 -3.56819928e-02 -1.59779042e-01] ... [-1.70019884e-02 -1.62176594e-01 8.79295617e-02 ... 4.76136655e-01 8.57589319e-02 8.17048669e-01] [-2.60322899e-01 -5.56116879e-01 -9.19800699e-02 ... 2.05402955e-01 -1.96194157e-01 -1.37846276e-01] [ 6.01586938e-01 5.43356061e-01 -5.88856280e-01 ... -2.93131381e-01 -3.27936053e-01 -2.22262308e-01]] ... [[-1.06070615e-01 -3.11652124e-01 -5.95086277e-01 ... 5.34519255e-01 8.50937217e-02 -2.75868684e-01] [ 3.63539942e-02 -8.25728834e-01 -6.67365566e-02 ... 4.88519631e-02 -5.22270463e-02 -1.00190930e-01] [ 7.74973273e-01 -2.10250333e-01 -4.62966338e-02 ... -6.12287402e-01 -2.16296509e-01 3.05183046e-02] ... [ 9.54248071e-01 -5.15933037e-01 -8.87079760e-02 ... -1.36587262e-01 1.15526550e-01 -2.66947806e-01] [-1.87695369e-01 -3.79780769e-01 -1.05733395e-01 ... 3.38740796e-02 -1.81372792e-01 -5.21805705e-05] [ 3.49552453e-01 -1.28113732e-01 2.95951486e-01 ... 3.53334188e-01 1.11741625e-01 -2.70660132e-01]] [[ 1.76983178e-01 -1.62015297e-02 1.76741257e-01 ... 4.76656616e-01 -8.31745327e-01 7.41026521e-01] [-3.61903548e-01 6.79343760e-01 -1.15207136e-01 ... 5.07466435e-01 -5.20252347e-01 2.01595739e-01] [ 5.37407696e-02 4.09339070e-01 -2.14424148e-01 ... -9.35866758e-02 -4.22132730e-01 -4.49579269e-01] ... [ 2.65679270e-01 1.73158273e-01 1.26378834e-01 ... -3.36624719e-02 5.94922714e-02 -4.17443877e-03] [-8.23392496e-02 1.51634580e-02 -3.94702107e-01 ... -5.16317114e-02 3.08389887e-02 2.10362077e-01] [-1.70503467e-01 -3.35589379e-01 3.80115062e-01 ... 3.70600045e-01 3.74375731e-01 7.02518132e-03]] [[ 1.68702424e-01 5.65455079e-01 3.13532919e-01 ... -3.01687479e-01 -3.23039830e-01 -1.68918625e-01] [ 4.40660212e-03 5.26934564e-02 1.43682249e-02 ... -5.49092367e-02 -1.63734540e-01 -3.67026865e-01] [ 4.17512134e-02 5.83072722e-01 6.08185232e-01 ... -1.62454396e-02 1.89281255e-01 -4.90500659e-01] ... [-2.38756120e-01 -3.90972078e-01 -1.68363497e-01 ... -5.54421127e-01 3.24646607e-02 -1.45127848e-01] [ 8.36930990e-01 8.83968472e-02 3.31531942e-01 ... -3.27212006e-01 -9.84825939e-02 4.84653652e-01] [-7.21233070e-01 1.21124968e-01 4.82184052e-01 ... 1.28827766e-01 -4.09393311e-01 7.20118806e-02]]]] [[[[-7.86094248e-01 -3.12043071e+00 3.23608756e-01 ... -8.34669024e-02 -5.99619448e-01 1.64115834e+00] [-1.90715265e+00 1.98695898e+00 7.25875199e-01 ... -2.59956002e-01 -1.09928578e-01 4.13318962e-01] [ 7.27092505e-01 6.10130072e-01 -2.51107645e+00 ... -6.05393946e-01 1.49770856e-01 1.49016738e+00] ... [ 2.01164341e+00 1.70175588e+00 -2.94679433e-01 ... -1.18849170e+00 1.66818094e+00 1.88446474e+00] [ 6.50909021e-02 -6.58182144e-01 -2.73654509e+00 ... 2.62699664e-01 1.96845150e+00 7.23549247e-01] [ 8.01869452e-01 -8.09260488e-01 1.94183528e+00 ... 5.15583515e-01 6.33293331e-01 1.05724907e+00]] [[ 3.88287038e-01 -5.65221131e-01 -2.00964713e+00 ... -3.74479198e+00 -3.55605990e-01 -4.25562114e-01] [ 1.17839110e+00 -7.20070362e-01 1.02081430e+00 ... -5.37091911e-01 -1.84626147e-01 2.30257964e+00] [-2.47772217e+00 -5.69970012e-01 1.34643388e+00 ... -1.11837018e+00 -4.57782716e-01 1.02366698e+00] ... [ 1.82343757e+00 -1.47500837e+00 -1.47170854e+00 ... -7.91400194e-01 7.59089112e-01 -8.62568140e-01] [ 2.95756403e-02 1.36849666e+00 1.51493168e+00 ... -5.00155210e-01 5.27917981e-01 -3.56587112e-01] [-2.55355501e+00 -1.92452919e+00 -1.37244833e+00 ... 8.66548002e-01 -5.09243727e-01 -3.53269672e+00]] [[ 1.01305759e+00 -3.78335625e-01 -1.31220222e+00 ... 1.41989553e+00 -9.17964280e-02 -1.14951479e+00] [ 1.92134231e-01 -2.00282884e+00 3.23741287e-01 ... -1.19463158e+00 -1.05967319e+00 -4.97592717e-01] [-2.11231977e-01 2.82837009e+00 -2.79877234e+00 ... 1.73481059e+00 -9.69408274e-01 6.72143579e-01] ... [-1.11477685e+00 1.62594104e+00 -1.41199306e-01 ... 2.44737053e+00 7.63935924e-01 -3.38018000e-01] [ 3.13794351e+00 1.65362275e+00 -3.56887221e-01 ... 4.36582446e-01 1.19648492e+00 1.65876400e+00] [ 6.56285465e-01 -3.13694954e+00 -6.63014948e-01 ... 4.96493548e-01 -4.63851131e-02 1.94467235e+00]] ... [[ 3.18453670e+00 -1.76852596e+00 2.92259455e+00 ... 3.51285964e-01 -1.86768210e+00 2.79080820e+00] [-2.98459196e+00 1.78966880e+00 2.73459971e-01 ... -1.97610378e+00 5.30282617e-01 -6.09998643e-01] [-1.82866468e-03 2.90633941e+00 1.20181441e+00 ... 1.45162129e+00 -7.94723570e-01 5.14125168e-01] ... [-5.76395810e-01 -3.41207564e-01 9.61261511e-01 ... -1.72940433e-01 -5.50501747e-03 1.04014121e-01] [-1.67066634e+00 7.76849166e-02 1.25782824e+00 ... -2.83405995e+00 1.15007472e+00 -6.58607185e-02] [ 3.76643687e-01 -1.18836892e+00 1.58258271e+00 ... 1.62887633e+00 -2.90202093e+00 -2.30898714e+00]] [[ 2.22520542e+00 -1.72807229e+00 2.28317618e+00 ... -5.95779002e-01 -1.08217084e+00 2.76352286e-01] [ 1.69984862e-01 2.08753657e+00 1.11687076e+00 ... -1.07959187e+00 -1.98688257e+00 4.07072496e+00] [ 1.73000312e+00 2.39697385e+00 -7.82222807e-01 ... 6.81105852e-01 1.36486435e+00 -2.39640683e-01] ... [-1.78545487e+00 -1.94833827e+00 -1.75474846e+00 ... 7.70991683e-01 5.88474452e-01 -2.26664707e-01] [-9.77523565e-01 -4.76596147e-01 -4.67584878e-01 ... -3.04604590e-01 -7.84535110e-01 -2.10411167e+00] [ 6.99572682e-01 -1.90516579e+00 3.46003318e+00 ... 2.77444029e+00 -6.86022997e-01 1.79297411e+00]] [[ 1.89424956e+00 8.86501312e-01 -7.00195372e-01 ... -1.31490028e+00 3.91293734e-01 1.65842128e+00] [ 3.00967884e+00 -1.54819536e+00 -2.03699732e+00 ... 1.18767250e+00 -4.74334180e-01 -1.66292679e+00] [-2.32810497e+00 3.33280635e+00 -7.47939274e-02 ... -9.24772263e-01 -3.72742862e-01 9.88260686e-01] ... [ 2.77533174e-01 -2.56295776e+00 -3.74549687e-01 ... -3.60924065e-01 -4.37185913e-01 2.23839641e+00] [-1.26478970e+00 7.53866315e-01 -3.86801660e-01 ... -2.01512739e-01 1.76679981e+00 1.76944995e+00] [-5.47532737e-01 -9.94322076e-03 2.80715078e-01 ... -2.75891757e+00 1.48261324e-01 -4.86031920e-02]]] [[[-1.10623606e-01 1.35796517e-01 1.14771456e-01 ... -1.16226867e-01 -4.05804932e-01 -3.39561552e-01] [-4.74921703e-01 -8.27078149e-02 4.87293929e-01 ... 1.76821768e-01 1.00435644e-01 1.64577201e-01] [ 2.71899819e-01 -3.63966167e-01 1.43082097e-01 ... -1.13594882e-01 1.76282212e-01 -2.25812986e-01] ... [-1.43569410e-01 -3.09771299e-01 5.37549615e-01 ... 5.17846458e-02 -2.70816296e-01 3.01279038e-01] [ 3.08837920e-01 -2.61859238e-01 1.00057848e-01 ... -2.40669951e-01 -2.08606541e-01 -5.35124429e-02] [ 1.66282244e-02 -4.26845290e-02 -9.28573832e-02 ... 5.22769868e-01 -1.85762897e-01 1.94316462e-01]] [[-3.02889887e-02 1.64940372e-01 3.09194624e-01 ... 3.43349069e-01 -4.71096858e-02 -1.53183371e-01] [-2.89502770e-01 3.59569967e-01 -5.22086322e-01 ... -3.52885932e-01 -2.09113851e-01 1.79931089e-01] [-1.62177071e-01 -1.93781522e-03 7.42618963e-02 ... -5.73304474e-01 -2.49151811e-01 -4.04058099e-01] ... [ 5.30851722e-01 -1.05129942e-01 8.15704614e-02 ... 2.71140605e-01 -6.30713031e-02 2.18209058e-01] [-3.92789133e-02 2.64105909e-02 -3.00179064e-01 ... -4.85498548e-01 2.87082702e-01 6.50474727e-02] [-2.25786924e-01 1.18268259e-01 5.45907728e-02 ... 5.92940636e-02 1.55335695e-01 2.48710111e-01]] [[-1.66276872e-01 3.61532629e-01 -1.83852345e-01 ... 5.74236289e-02 4.43501584e-02 -4.50059921e-01] [-3.45676154e-01 -4.40427184e-01 -4.08142447e-01 ... 1.71455383e-01 -1.27157956e-01 2.26376593e-01] [-2.39490017e-01 -1.72339156e-01 -4.55583483e-01 ... 2.69790560e-01 -1.90068945e-01 6.17985845e-01] ... [ 8.39300677e-02 -4.71170306e-01 2.60978520e-01 ... 3.90707314e-01 -7.66306594e-02 -2.22351134e-01] [-8.00869763e-02 1.79368615e-01 7.38571543e-05 ... 2.70525753e-01 1.67728588e-01 1.66805431e-01] [ 4.37503427e-01 1.75528437e-01 1.50144529e-02 ... -1.46481618e-02 5.58138430e-01 2.79817283e-01]] ... [[-1.09959058e-01 -2.91662157e-01 -3.86611849e-01 ... 3.40314843e-02 6.73939809e-02 -2.51438856e-01] [ 5.10138035e-01 -4.33438569e-02 -3.71405989e-01 ... 3.77057076e-01 -1.24180265e-01 -1.11980692e-01] [ 3.91021758e-01 8.07000771e-02 -9.05704573e-02 ... -2.18519554e-01 -1.50761738e-01 -3.37951779e-01] ... [-4.96855468e-01 -2.74040282e-01 -1.25061750e-01 ... -5.46596572e-02 -2.10833609e-01 3.92046422e-01] [-1.45922616e-01 -5.14700413e-02 3.32109541e-01 ... 3.67341965e-01 6.00811988e-02 1.03780918e-01] [ 3.47863697e-02 -2.74266809e-01 3.00605804e-01 ... 3.66746008e-01 2.05260351e-01 -2.23190993e-01]] [[ 1.22990884e-01 -2.75004119e-01 -2.67805636e-01 ... -2.58208752e-01 2.46792346e-01 2.56434888e-01] [ 4.43510056e-01 -8.56207116e-05 -2.24120528e-01 ... 5.44628322e-01 -1.10128716e-01 2.93230116e-01] [ 5.15899807e-02 3.21058780e-01 -7.54663572e-02 ... -2.72903770e-01 1.20327212e-01 3.81141268e-02] ... [ 9.76549014e-02 -1.23101704e-01 -2.34366462e-01 ... -3.78951520e-01 4.45763975e-01 -4.66674790e-02] [-1.41355038e-01 -2.13665720e-02 -1.93719834e-01 ... 6.54960200e-02 -2.73623198e-01 1.86728403e-01] [-1.77109838e-02 -1.06798947e-01 -4.84231189e-02 ... -1.32270485e-01 3.82995456e-01 1.56975672e-01]] [[-3.43241602e-01 -1.29774839e-01 -3.14449012e-01 ... 4.50561553e-01 -9.83249173e-02 -8.12056214e-02] [ 1.24515116e-01 6.61872625e-02 1.21251948e-01 ... -9.71416384e-02 4.00387496e-01 -1.55273052e-02] [-1.83797136e-01 1.68836508e-02 1.23555260e-02 ... -1.01959616e-01 -2.30783522e-01 -3.11849453e-02] ... [-3.18094373e-01 -2.97656864e-01 1.81004137e-01 ... 4.60984766e-01 4.98192571e-02 1.74756736e-01] [ 3.40805352e-01 -1.06925614e-01 5.75325973e-02 ... 4.59745973e-02 2.12784961e-01 -9.15220827e-02] [-3.85044999e-02 -3.84442024e-02 -6.86225146e-02 ... -1.70485809e-01 2.72119820e-01 -4.13382947e-02]]] [[[-4.43777032e-02 -4.24218208e-01 -6.37099147e-01 ... -2.05946174e-02 2.68071778e-02 3.68462503e-01] [-3.29692692e-01 1.62235662e-01 1.60287648e-01 ... 7.14187771e-02 -2.15701967e-01 6.61980331e-01] [-4.97187495e-01 6.73335791e-01 8.31217617e-02 ... -5.86408079e-01 -6.06492937e-01 1.04188092e-01] ... [ 1.97343439e-01 -1.69079736e-01 -3.52270573e-01 ... 1.93308797e-02 -4.73055542e-01 -1.37881875e-01] [-3.17305744e-01 1.58739224e-01 -5.09742200e-01 ... 8.31067562e-02 6.69813678e-02 -1.11299336e-01] [ 1.24421000e-01 8.99644196e-02 5.82336843e-01 ... -2.12187573e-01 -1.19828999e-01 -2.13378698e-01]] [[ 7.96801597e-02 2.94944912e-01 3.56809765e-01 ... 2.07544982e-01 -8.91491547e-02 -4.46725130e-01] [-5.32255888e-01 3.77192408e-01 4.67195362e-02 ... 2.32021645e-01 3.32747884e-02 -2.74412245e-01] [-4.20005053e-01 -2.60729522e-01 1.00284159e-01 ... -3.87351155e-01 1.99943289e-01 -6.47210598e-01] ... [-9.34317634e-02 8.71758759e-01 3.31824757e-02 ... -1.02229849e-01 -1.31380513e-01 -2.60269046e-01] [ 1.05434313e-01 -2.24815607e-01 3.99739556e-02 ... 7.73392260e-01 9.33918804e-02 3.82456243e-01] [-5.35149872e-01 -6.84599340e-01 -2.82506138e-01 ... -4.42568243e-01 -2.54852355e-01 2.14941919e-01]] [[-1.90651402e-01 2.08878696e-01 -6.98493958e-01 ... -1.60683095e-01 -1.78432181e-01 -3.76182526e-01] [-2.86594003e-01 4.33197767e-01 -2.96066135e-01 ... -5.22280514e-01 3.87701362e-01 -4.89384383e-01] [ 1.17056243e-01 1.48086548e-01 4.72604424e-01 ... 4.17395025e-01 -1.12758808e-01 -6.29326284e-01] ... [ 2.19690561e-01 -3.79778653e-01 -3.50748211e-01 ... 4.99928236e-01 -1.55486360e-01 2.29081601e-01] [-6.98736235e-02 2.43982449e-01 5.73452353e-01 ... -5.97992301e-01 -3.28356802e-01 6.20145202e-01] [ 1.52339056e-01 -4.36363602e-03 -1.88072816e-01 ... 2.28972822e-01 -1.30562156e-01 4.01708260e-02]] ... [[-6.53347492e-01 5.53137124e-01 5.02559364e-01 ... -6.71435073e-02 -6.23718858e-01 -4.57511604e-01] [-3.94444883e-01 2.95292169e-01 2.01962456e-01 ... -2.98624784e-01 -7.38490880e-01 2.62600660e-01] [ 2.66081184e-01 -4.13058028e-02 8.22196007e-01 ... 8.26410353e-01 -6.05377853e-02 -2.67891794e-01] ... [-2.96136737e-01 1.31935537e-01 -2.52891988e-01 ... -3.25475246e-01 2.25792989e-01 -4.53609496e-01] [-6.89618522e-03 5.27495921e-01 -2.23306492e-01 ... 2.74165094e-01 -4.51939344e-01 -1.62012845e-01] [ 2.51555979e-01 -1.66288521e-02 -8.90897308e-03 ... -5.80858409e-01 -3.60775255e-02 -6.12266421e-01]] [[ 2.16966808e-01 5.73867500e-01 4.72824365e-01 ... -3.55819583e-01 2.70439863e-01 -5.56015313e-01] [ 2.93215632e-01 2.38432754e-02 2.25634530e-01 ... 1.07046008e-01 -1.38872221e-01 5.32199442e-01] [-4.92719561e-01 1.70810267e-01 3.73984724e-01 ... 4.82259542e-01 4.20374483e-01 5.72280109e-01] ... [ 4.82811898e-01 -1.93431340e-02 2.66530037e-01 ... 5.01596630e-01 -5.22277892e-01 3.22130978e-01] [ 4.28696573e-01 -1.64042227e-02 4.72341090e-01 ... 6.65788874e-02 3.98088386e-03 -1.08341984e-02] [-2.30525926e-01 3.58105868e-01 -3.69288679e-03 ... -6.40461981e-01 -1.96398914e-01 -2.35750556e-01]] [[ 6.03294253e-01 -4.74459022e-01 8.63601118e-02 ... 2.46204153e-01 -8.41486692e-01 2.47070953e-01] [-2.73312062e-01 -1.45337477e-01 7.35379755e-01 ... 3.73634934e-01 -4.69774306e-02 1.03932631e+00] [ 2.78595775e-01 4.09119368e-01 4.10845548e-01 ... -9.96813830e-03 9.74235684e-02 2.45425716e-01] ... [ 1.23343870e-01 7.22536027e-01 3.37795287e-01 ... -4.69359934e-01 1.32453174e-03 -1.24940045e-01] [-2.14091763e-01 2.28024274e-01 -3.93800020e-01 ... 3.63317430e-02 -2.49672886e-02 -2.29785934e-01] [-3.86252105e-01 4.13916111e-01 -3.14999044e-01 ... 3.76987666e-01 -3.77271712e-01 -6.51893198e-01]]] [[[-1.94984004e-01 -8.47130194e-02 -9.14369822e-02 ... 3.51439983e-01 1.77098259e-01 -1.49854317e-01] [ 1.18090749e-01 1.80174023e-01 -1.41221166e-01 ... 1.20196052e-01 -1.07976899e-01 1.48399889e-01] [ 3.14036846e-01 -6.49239868e-02 3.64030004e-02 ... 1.65910095e-01 9.33004357e-03 1.53631717e-01] ... [ 2.03553930e-01 -4.24882025e-02 5.09478003e-02 ... 1.94454148e-01 -1.50666520e-01 1.70442715e-01] [ 1.80471074e-02 -7.07850307e-02 1.83618560e-01 ... -1.40212625e-01 3.02232951e-01 2.97247291e-01] [ 6.36087805e-02 1.58610746e-01 -2.01184419e-03 ... 1.74185902e-01 9.19258967e-02 9.81257707e-02]] [[-3.99001390e-02 7.08097965e-02 1.12563642e-02 ... -1.36827435e-02 2.38373861e-01 -1.66185442e-02] [-2.37633986e-03 -1.22928716e-01 1.97472703e-02 ... -6.50758967e-02 -3.18305045e-01 1.81182057e-01] [ 2.42874976e-02 -1.33724943e-01 -1.08559448e-02 ... -4.92973290e-02 2.22670343e-02 1.18992832e-02] ... [ 1.93630725e-01 1.47374004e-01 -2.33573332e-01 ... -7.90818408e-02 -1.37033060e-01 -8.07615295e-02] [ 1.32156491e-01 -4.29220162e-02 -1.26393124e-01 ... 4.78351340e-02 7.27349967e-02 1.56562269e-01] [ 7.52191767e-02 -1.36701137e-01 -2.29949310e-01 ... -9.41726491e-02 2.10985824e-01 -1.43039614e-01]] [[-2.33899355e-01 -4.48919423e-02 3.16241384e-02 ... 1.25801399e-01 -9.53288749e-02 2.74364531e-01] [-2.31564455e-02 -2.47015748e-02 -1.63579598e-01 ... -1.07708879e-01 -2.14258835e-01 -1.94789544e-01] [-4.50625084e-02 -4.73142527e-02 2.26830766e-02 ... 8.97025503e-03 3.22832130e-02 -1.21690325e-01] ... [-1.96048282e-02 -2.66552456e-02 1.61188841e-02 ... 4.88427021e-02 -2.31851071e-01 1.76024675e-01] [-1.37776136e-01 5.15605696e-03 2.61577785e-01 ... -6.70505017e-02 2.89757047e-02 8.91641155e-03] [ 2.36774869e-02 -1.26348838e-01 -1.92519978e-01 ... -1.49608627e-01 5.83711341e-02 1.23122774e-01]] ... [[ 1.44697383e-01 5.76193668e-02 1.12385124e-01 ... -1.11809835e-01 -1.01796389e-01 -1.25963856e-02] [-8.24413151e-02 -7.42885023e-02 4.15728129e-02 ... -1.41256928e-01 -2.20416576e-01 7.71854520e-02] [-9.88698080e-02 8.07842389e-02 -3.70389633e-02 ... -7.14942962e-02 3.41940112e-02 -2.46578202e-01] ... [-4.75621372e-02 -6.09306097e-02 2.19256312e-01 ... 1.34880826e-01 1.01005554e-01 -7.44442805e-04] [ 1.88104417e-02 -5.09625152e-02 7.19477013e-02 ... -3.69401393e-03 1.67300683e-02 2.22660545e-02] [ 1.08399451e-01 -5.15600666e-02 1.59621939e-01 ... 1.08755969e-01 2.18929742e-02 -6.34781569e-02]] [[-1.42780140e-01 -4.57909480e-02 -1.69271063e-02 ... -1.06412834e-02 3.68977189e-02 3.25602409e-03] [-2.79999953e-02 1.09932795e-01 -1.07549414e-01 ... 1.09058782e-01 1.74191073e-01 1.21351220e-01] [ 8.21684375e-02 -1.14859544e-01 -6.03515655e-02 ... 7.17802867e-02 -1.14465646e-01 1.49179608e-01] ... [-4.99648042e-02 -1.75464768e-02 -1.66793689e-01 ... 1.10353135e-01 9.66740698e-02 -1.58162355e-01] [-7.36672208e-02 -1.01150006e-01 3.86285223e-02 ... 4.17301245e-02 -2.59134918e-02 -1.57144129e-01] [ 1.30418211e-01 4.10895832e-02 5.64800091e-02 ... -1.17069244e-01 1.10075094e-01 -8.64036530e-02]] [[-5.04305474e-02 1.53139979e-01 5.97991981e-02 ... -5.97339049e-02 4.79264744e-03 7.32614323e-02] [-1.34344786e-01 -2.72834003e-01 -7.16876537e-02 ... 1.43019065e-01 -5.53719178e-02 -1.74210057e-01] [-7.37816188e-03 -9.00493264e-02 -1.05483890e-01 ... -4.06668335e-02 3.14798266e-01 -1.27490059e-01] ... [-5.35100512e-02 -1.05290838e-01 -1.29161030e-01 ... 7.16560557e-02 1.95981055e-01 1.09223805e-01] [ 3.43306847e-02 -7.62494877e-02 9.78990272e-02 ... 3.12896669e-02 -1.19503267e-01 2.64296662e-02] [-1.04881085e-01 1.55094951e-01 -1.87966645e-01 ... -1.90584496e-01 1.05995305e-01 2.45871637e-02]]] [[[-2.85801589e-01 -3.38137001e-01 -4.79462653e-01 ... 2.66754478e-01 -1.25167704e+00 1.95461556e-01] [ 8.43669713e-01 -6.24013580e-02 -6.98366880e-01 ... -7.84383833e-01 -6.98873401e-02 -4.56408799e-01] [ 3.08292866e+00 7.52130449e-01 5.34429848e-01 ... 2.73615152e-01 6.64533600e-02 1.95811298e-02] ... [ 8.80933285e-01 -3.42656732e-01 -9.29725841e-02 ... 9.31684554e-01 -4.73987639e-01 3.00622284e-02] [ 3.19370806e-01 1.15691563e-02 -4.45823580e-01 ... 4.68649752e-02 2.34591126e-01 2.81178147e-01] [-4.06240016e-01 6.21593058e-01 7.16046453e-01 ... -8.02828610e-01 1.04327574e-01 2.64831632e-01]] [[-4.71251607e-01 3.86032790e-01 -1.22540021e+00 ... 1.50315297e+00 -7.10460590e-03 -2.40607798e-01] [ 5.38757503e-01 -4.76589769e-01 3.58389020e-01 ... 6.67082787e-01 6.76650226e-01 1.18322694e+00] [-8.35492909e-01 1.87686712e-01 -3.69505346e-01 ... 3.81518640e-02 -9.93460774e-01 -5.50040126e-01] ... [-6.98922217e-01 1.48397163e-01 9.29214418e-01 ... 4.71131265e-01 -5.96093893e-01 9.36149061e-02] [ 5.50738394e-01 4.71155256e-01 -7.94188738e-01 ... -1.48174033e-01 2.03701600e-01 -4.30741340e-01] [-3.01487029e-01 2.26685166e-01 -8.17006588e-01 ... -1.05869427e-01 -2.97887367e-03 1.00486231e+00]] [[-1.11175597e+00 1.32679665e+00 1.78469509e-01 ... -5.79704940e-01 -5.03245771e-01 -6.00492835e-01] [-5.22406995e-01 1.00323081e+00 1.81911576e+00 ... -1.92235827e-01 6.47587061e-01 -5.86743772e-01] [ 1.51581794e-01 7.54288018e-01 8.56121838e-01 ... 3.67852628e-01 5.60657308e-02 5.59566543e-03] ... [ 9.18650210e-01 -9.83692527e-01 9.51935127e-02 ... 5.65084741e-02 1.97169423e-01 -1.19062889e+00] [-2.59726308e-02 -2.19561785e-01 3.70473228e-02 ... 1.56882390e-01 3.39333683e-01 -2.85778224e-01] [ 3.50972921e-01 2.09941231e-02 -1.04282749e+00 ... 3.61616313e-01 1.09722567e+00 -2.51301467e-01]] ... [[-2.57512331e-01 -1.80049151e-01 2.52308756e-01 ... -3.88622850e-01 -2.11990699e-01 2.85361558e-01] [ 4.93238628e-01 5.61903298e-01 7.30240285e-01 ... 2.37001851e-01 9.37728465e-01 -3.12993303e-02] [-2.09076911e-01 -1.16168153e+00 -6.45982742e-01 ... 1.29464000e-01 3.96139503e-01 -6.48490787e-01] ... [ 9.23760176e-01 -4.08674008e-04 -6.16155803e-01 ... -6.21139944e-01 2.14703873e-01 3.88108015e-01] [-1.17747031e-01 5.34377337e-01 -1.96250618e+00 ... -6.44365132e-01 3.47622067e-01 3.66968274e-01] [ 2.28672586e-02 2.47165918e-01 6.57033801e-01 ... 7.84333587e-01 -9.45631146e-01 -6.40946925e-01]] [[ 1.64750278e-01 -9.06779766e-02 4.75969285e-01 ... -1.39729138e-02 1.22695558e-01 -9.50072050e-01] [ 1.61434352e+00 7.06178963e-01 1.41694114e-01 ... 3.95949930e-02 -2.44945377e-01 3.89129907e-01] [-6.16889536e-01 1.63777635e-01 7.45724961e-02 ... 5.95087171e-01 -1.18142925e-01 -2.49366015e-01] ... [ 3.66523296e-01 -7.64014482e-01 3.74900877e-01 ... 1.73824668e-01 4.87113029e-01 3.87706578e-01] [ 2.53310148e-02 -4.39939409e-01 9.66598019e-02 ... -7.37408936e-01 -6.90560400e-01 -5.46024382e-01] [ 1.10553518e-01 4.42616910e-01 -6.41751885e-01 ... 2.03522816e-01 -4.18678641e-01 -3.59319270e-01]] [[-4.21207696e-01 -8.54332894e-02 1.32687163e+00 ... -1.67835325e-01 6.28414378e-02 -4.82008696e-01] [ 8.46871361e-02 -1.17606759e+00 -9.06912684e-01 ... -5.29673755e-01 1.33583570e+00 4.86906499e-01] [ 8.12400162e-01 -5.88688195e-01 4.20963556e-01 ... 8.12207103e-01 -5.18162072e-01 1.11804426e+00] ... [-7.17259288e-01 5.46536267e-01 3.60954314e-01 ... -6.13221943e-01 -1.10179651e+00 -4.24395591e-01] [ 1.15596044e+00 5.76253951e-01 7.91979507e-02 ... -5.27281649e-02 -5.23043096e-01 -3.91453922e-01] [ 1.16387939e+00 3.87347102e-01 -8.32737684e-01 ... 8.85007024e-01 7.83820450e-01 -1.43182027e+00]]] [[[ 3.56189400e-01 -3.26940194e-02 1.38507366e-01 ... 4.33930159e-02 -1.19780593e-01 -4.51430857e-01] [-5.09811640e-01 5.89459157e-03 -2.81623751e-01 ... 1.59113362e-01 -1.19564377e-01 -2.63214916e-01] [ 4.23784032e-02 -2.61878148e-02 1.26634121e-01 ... -2.51194924e-01 4.45364356e-01 -3.00840199e-01] ... [ 3.74027967e-01 1.57989025e-01 1.42225951e-01 ... -8.51150677e-02 3.32732230e-01 2.70172060e-01] [ 5.12244463e-01 2.84778088e-01 1.71384692e-01 ... -5.99628091e-01 -1.98430479e-01 3.58732082e-02] [ 6.11223094e-02 -1.28111085e-02 5.20038642e-02 ... 3.35053504e-01 3.41036618e-01 4.18996960e-01]] [[ 3.31376046e-02 3.06968927e-01 1.83339730e-01 ... 1.79329112e-01 1.69423789e-01 2.49343589e-01] [ 5.62132776e-01 9.29055393e-01 2.23308474e-01 ... -7.28013217e-02 1.35535911e-01 1.42860487e-01] [ 3.61924022e-01 -1.76238790e-01 -4.92563248e-01 ... 3.73107433e-01 -3.51115286e-01 7.47474134e-01] ... [-1.43136546e-01 3.09887022e-01 -4.45808992e-02 ... 3.26789320e-01 3.70520115e-01 1.52121395e-01] [ 1.67438969e-01 5.20451516e-02 -3.38161558e-01 ... -3.63589525e-02 -7.55048320e-02 1.30054250e-01] [-3.30063373e-01 4.31535780e-01 -5.35716340e-02 ... -2.65911996e-01 1.81050897e-01 2.24417254e-01]] [[-1.60040051e-01 2.85483301e-01 2.83828020e-01 ... -1.88293103e-02 -3.22836846e-01 3.02403979e-02] [ 2.15162113e-01 4.15246904e-01 7.72624388e-02 ... -7.31121540e-01 8.53515327e-01 -1.47335470e-01] [ 4.99618858e-01 5.58683611e-02 2.30798692e-01 ... 7.92764962e-01 -2.49898523e-01 6.35229498e-02] ... [ 2.81207174e-01 1.49557367e-01 3.98138225e-01 ... 4.04750496e-01 1.24735259e-01 -6.86949492e-02] [ 2.81469643e-01 -4.55293581e-02 -3.04831862e-01 ... -3.74513626e-01 -3.93538773e-01 -7.90149495e-02] [ 5.81588328e-01 3.28544885e-01 1.78973898e-01 ... 7.36913383e-01 -1.66387275e-01 1.04345955e-01]] ... [[ 3.30526888e-01 -1.23793431e-01 3.90427351e-01 ... 1.26112074e-01 -3.15217942e-01 3.00984621e-01] [-1.51501074e-01 -2.42094882e-02 6.72307849e-01 ... -5.96427262e-01 9.76902992e-02 -4.01163623e-02] [ 4.35904235e-01 1.80547118e-01 -2.87047535e-01 ... -5.83594739e-01 6.05357945e-01 5.16639888e-01] ... [-3.28949243e-01 8.24138299e-02 1.97092950e-01 ... 7.12817982e-02 -1.03850655e-01 2.02004403e-01] [-2.36156449e-01 3.85127403e-02 -3.65362108e-01 ... -2.67120842e-02 3.27632010e-01 -1.53916150e-01] [ 2.02696383e-01 -8.13034326e-02 1.55857995e-01 ... 1.83217898e-01 -6.44088387e-02 -4.97733578e-02]] [[-2.99179018e-01 2.19835937e-01 1.30944774e-02 ... 1.53206140e-01 -2.18934268e-02 -1.81374058e-01] [ 2.19142139e-02 9.27342027e-02 -3.38134766e-01 ... 4.56589878e-01 4.46676314e-01 -3.77452672e-02] [-1.01892743e-02 -6.87607974e-02 -1.19817071e-01 ... 1.02939479e-01 9.99829769e-02 2.05606923e-01] ... [ 1.32034540e-01 -1.95712730e-01 -1.32011361e-02 ... -5.34784257e-01 3.65243614e-01 4.67349812e-02] [ 1.04362994e-01 6.02504432e-01 -1.60392165e-01 ... 3.54253769e-01 2.57308781e-01 1.96910486e-01] [-3.86177540e-01 -5.60137212e-01 -2.95664757e-01 ... 3.44329208e-01 2.18303502e-01 1.31110787e-01]] [[ 5.82771420e-01 -3.46382648e-01 -5.05990498e-02 ... 2.09635139e-01 2.10918695e-01 -3.07461284e-02] [ 2.69997418e-01 -2.84277517e-02 6.42955601e-02 ... -1.35047302e-01 -2.30312526e-01 6.01825081e-02] [ 2.84049492e-02 -1.43252313e-02 -1.95946991e-01 ... 6.51266500e-02 -2.75215387e-01 1.09427907e-02] ... [ 8.96305203e-01 6.18467152e-01 -3.71885657e-01 ... -3.95842224e-01 4.90347296e-01 4.72313054e-02] [-6.78595454e-02 3.23831737e-02 -1.06294855e-01 ... 6.38539255e-01 2.68697947e-01 4.11344677e-01] [-1.36297569e-01 -2.96219528e-01 -4.49246556e-01 ... -4.60148811e-01 5.90786815e-01 -6.35847926e-01]]]] [[[[ 2.00595304e-01 3.06926727e+00 -1.90309000e+00 ... 9.30885196e-01 -4.64029878e-01 -2.15489817e+00] [ 1.91555601e-02 -2.31375241e+00 3.11151266e+00 ... -2.46857738e+00 1.75502634e+00 1.18475527e-01] [ 3.79596293e-01 1.53244257e+00 2.16719985e+00 ... -1.26167727e+00 -1.03786588e+00 -9.11371410e-01] ... [-2.92229819e+00 7.63466537e-01 1.25785160e+00 ... 7.20597208e-02 -2.94671750e+00 -1.46325803e+00] [ 6.67970777e-01 2.22471738e+00 -4.36802089e-01 ... 1.83941615e+00 3.81331420e+00 2.50880337e+00] [ 3.32739234e-01 2.59684038e+00 2.05667806e+00 ... -2.19713021e-02 -9.20399725e-01 2.95978218e-01]] [[ 1.49716765e-01 -3.54973793e-01 -1.85676783e-01 ... -7.94703722e-01 -7.15323210e-01 1.29076231e+00] [ 9.25917685e-01 -1.95549786e+00 -3.02804327e+00 ... -2.62357068e+00 -2.82626152e+00 -8.73866200e-01] [ 9.68621969e-01 3.60398507e+00 -1.71065405e-01 ... -1.67052579e+00 -1.06731899e-01 -6.14888012e-01] ... [-8.74978423e-01 -4.09273386e+00 -8.89502048e-01 ... 3.41381025e+00 -1.47827876e+00 -6.03739679e-01] [-9.26460624e-01 -2.57834315e-01 1.78686976e+00 ... 9.42561030e-01 1.87651664e-01 -4.37615782e-01] [ 1.79882646e+00 -1.23616052e+00 -8.64956617e-01 ... 7.26745278e-02 -6.71791732e-01 1.88665092e+00]] [[-1.89685798e+00 -1.79385221e+00 -2.23870468e+00 ... 3.94357920e+00 7.46644974e-01 -2.27347469e+00] [-8.65399063e-01 -2.41407230e-02 1.62606382e+00 ... -3.10615301e-01 1.93190232e-01 8.30525279e-01] [ 1.90689921e-01 3.34330857e-01 1.82777548e+00 ... 1.88358581e+00 -7.82541811e-01 -1.51245192e-01] ... [ 2.43038988e+00 -1.13023865e+00 5.74710608e-01 ... -2.32115798e-02 2.14413953e+00 9.27069008e-01] [-1.69866070e-01 -7.16713250e-01 4.78990227e-01 ... 2.40943745e-01 1.26116562e+00 3.05534124e-01] [-3.59891146e-01 2.58413330e-02 -9.72347975e-01 ... 3.33579108e-02 -9.17629421e-01 -1.17012513e+00]] ... [[ 7.44358182e-01 -2.29628634e+00 -6.42538965e-02 ... 4.84550118e-01 1.02494538e+00 2.90802605e-02] [-1.80870855e+00 2.13779044e+00 -1.50400186e+00 ... 1.89993429e+00 -1.34652972e+00 -5.56695700e-01] [ 1.68526983e+00 3.16723204e+00 -1.02009535e+00 ... 2.44747901e+00 -1.32296431e+00 1.44396424e-01] ... [-1.56383467e+00 7.99440861e-01 -1.13228464e+00 ... 4.23255742e-01 -1.78731370e+00 -5.28745830e-01] [-2.27925324e+00 -1.99747294e-01 7.39404678e-01 ... 5.76416925e-02 3.81117433e-01 -8.92474234e-01] [ 1.88096321e+00 -2.02603745e+00 5.31838477e-01 ... 3.73287868e+00 -1.40040982e+00 4.80962336e-01]] [[-2.95855999e+00 -7.19222486e-01 -2.79550433e+00 ... -5.03725827e-01 -8.66324365e-01 -1.14659739e+00] [-4.83605891e-01 2.36698359e-01 -3.34836960e+00 ... 6.51263177e-01 3.31442618e+00 -8.20003271e-01] [ 2.56473541e+00 2.00755930e+00 7.47169495e-01 ... -1.82965302e+00 2.13619798e-01 1.97424293e+00] ... [-3.11161709e+00 -3.73072743e-01 -1.02955675e+00 ... -3.51013482e-01 -2.22797751e+00 -2.56927937e-01] [-5.08750379e-01 -1.95114970e+00 -1.71293199e+00 ... 2.63213825e+00 -7.10778058e-01 -2.44250864e-01] [ 2.03410059e-01 4.50034469e-01 3.04791272e-01 ... 6.73399985e-01 5.82058370e-01 2.82260805e-01]] [[ 2.61699080e-01 5.95610738e-01 -5.46864092e-01 ... 1.26168644e+00 -3.10443568e+00 -1.04037118e+00] [ 2.28848085e-02 1.11132610e+00 -8.88347745e-01 ... 4.94951382e-02 1.44477654e+00 -1.28136218e+00] [-2.39631343e+00 -5.73972821e-01 -1.39341462e+00 ... 9.24309373e-01 3.65132481e-01 9.91552949e-01] ... [-1.65270317e+00 -9.51035395e-02 -9.23169315e-01 ... 2.11789632e+00 -8.56688440e-01 1.45995677e+00] [ 1.53947735e+00 1.32492614e+00 -1.61918342e+00 ... 2.47692108e+00 -8.40143442e-01 2.46504217e-01] [-1.88054121e+00 9.02508736e-01 2.29069996e+00 ... 1.78831756e+00 2.05665684e+00 -1.52588511e+00]]] [[[-2.17143357e-01 3.42660457e-01 -8.53500664e-02 ... -2.26011239e-02 -2.78226025e-02 5.35668768e-02] [ 2.46530056e-01 -1.41675159e-01 3.19013655e-01 ... -1.92598417e-01 -2.10322127e-01 -3.41422141e-01] [ 2.97954023e-01 1.32913932e-01 -1.75895452e-01 ... 4.72109735e-01 -7.98270851e-02 1.68372169e-01] ... [-4.22765404e-01 1.36696115e-01 -2.07933132e-02 ... -2.52012789e-01 -2.75797714e-02 -4.39721912e-01] [-2.33477324e-01 -2.18080819e-01 -4.87376563e-02 ... 7.42786657e-03 1.10799916e-01 3.65900755e-01] [ 5.18283434e-02 1.48329407e-01 -3.85146558e-01 ... -7.46214613e-02 2.68819064e-01 2.42454857e-01]] [[ 2.50650853e-01 -3.56522828e-01 -1.52956452e-02 ... -6.20654486e-02 -4.01800424e-02 -1.56547159e-01] [-6.91798627e-02 2.95761466e-01 -9.39280465e-02 ... 2.44377226e-01 2.72624731e-01 -1.06113806e-01] [-3.38741034e-01 -1.47237644e-01 -2.62460053e-01 ... 3.31098676e-01 4.35973823e-01 6.00290775e-01] ... [ 6.49330616e-02 1.17364854e-01 -1.91233698e-02 ... -2.66567826e-01 9.66848359e-02 2.20470071e-01] [ 5.63266754e-01 2.24164903e-01 -4.84351158e-01 ... 8.65942333e-03 -1.09733701e-01 6.61680162e-01] [ 1.03507914e-01 -6.51618600e-01 -1.17585726e-01 ... 2.62340419e-02 -3.60409282e-02 2.76787937e-01]] [[ 5.30238986e-01 3.92365038e-01 6.54792339e-02 ... -1.16331361e-01 1.91190012e-03 -2.51179278e-01] [ 8.21994692e-02 -1.88476108e-02 -2.36123770e-01 ... 3.22288811e-01 1.61077335e-01 -2.99900293e-01] [-4.42306519e-01 1.87717959e-01 2.04562768e-01 ... -1.52000383e-01 -1.06796727e-01 -3.26010644e-01] ... [ 2.67308563e-01 8.50236267e-02 1.69203743e-01 ... -6.69827834e-02 -3.87248188e-01 5.07360280e-01] [-2.03532547e-01 9.21131205e-03 1.10535562e-01 ... 3.92335713e-01 1.83743596e-01 -1.18151046e-01] [-1.80349424e-01 -2.38387078e-01 -3.49475712e-01 ... -3.95717859e-01 2.62404025e-01 5.37269354e-01]] ... [[-9.01228860e-02 -1.96620300e-01 7.39841610e-02 ... 3.26288521e-01 1.28907152e-02 -3.72330636e-01] [ 5.03467619e-01 -6.21493720e-02 -1.48166582e-01 ... 2.43136033e-01 1.44456089e-01 5.13501465e-02] [ 1.72852665e-01 -3.86441767e-01 3.55830699e-01 ... 1.59565046e-01 2.69137800e-01 -3.33888680e-02] ... [-3.70341480e-01 1.01478562e-01 1.04933068e-01 ... 6.80973455e-02 -4.50236872e-02 2.09895626e-01] [-1.89509779e-01 1.13091968e-01 -2.91563660e-01 ... 1.77885890e-01 2.80548424e-01 3.37760150e-01] [ 1.81507140e-01 -5.06634675e-02 -2.14524224e-01 ... 4.23511341e-02 -2.30428666e-01 1.78515360e-01]] [[ 1.45275652e-01 -3.41004819e-01 -3.08184594e-01 ... 3.82078230e-01 2.80456036e-01 1.81572869e-01] [ 6.90809786e-01 1.06352389e-01 4.14895080e-02 ... 1.96351185e-01 -1.78534865e-01 -3.52964312e-01] [-4.83242244e-01 2.73221061e-02 -3.25065898e-03 ... 2.23642468e-01 1.34616822e-01 -1.29868582e-01] ... [ 5.04558124e-02 9.73413661e-02 -3.76234315e-02 ... -2.86124051e-01 -7.23326132e-02 6.42550528e-01] [ 1.09634213e-02 2.38765687e-01 -1.13085523e-01 ... -1.40775815e-01 -2.07100794e-01 4.79040900e-03] [-9.08936635e-02 -3.58732998e-01 1.82042420e-02 ... -4.97268103e-02 -2.21896857e-01 -2.51457453e-01]] [[-2.24888116e-01 -1.60981059e-01 2.67272741e-01 ... 3.20278525e-01 9.70749781e-02 -1.67998329e-01] [-1.47323832e-01 -4.38702926e-02 8.14189464e-02 ... 1.81137800e-01 -2.74741620e-01 2.79719699e-02] [ 1.07687727e-01 -4.40848665e-03 4.42821234e-02 ... 1.75367326e-01 9.61327255e-02 1.01227380e-01] ... [ 1.29078925e-01 1.41913623e-01 -3.94382119e-01 ... -1.88242849e-02 -3.12773108e-01 7.43718212e-03] [-9.27571487e-03 2.23875776e-01 4.04441476e-01 ... 2.41246283e-01 1.75342217e-01 4.11519855e-01] [ 2.70586312e-01 -2.87675261e-01 -6.14126958e-02 ... 6.22748025e-02 -2.13455949e-02 8.23163241e-02]]] [[[-1.98140427e-01 4.50723290e-01 -7.05006003e-01 ... 7.53629208e-02 -1.87369540e-01 4.45828021e-01] [ 5.87994456e-01 6.95638418e-01 1.05956234e-02 ... -3.68450820e-01 -4.60535377e-01 4.65928763e-01] [ 2.80478507e-01 -8.26384798e-02 5.00499964e-01 ... -1.61461949e-01 4.48927343e-01 -9.65630859e-02] ... [ 5.24656236e-01 1.00453429e-01 -5.49218105e-03 ... -2.51441836e-01 8.25474858e-01 2.32960582e-01] [-8.94306302e-02 -6.05411828e-01 -6.80035591e-01 ... -3.01282015e-02 2.72855964e-02 2.12023944e-01] [-3.34228277e-01 -1.12776093e-01 2.05165312e-01 ... 6.52516186e-01 2.28454858e-01 -4.01687622e-01]] [[-1.63838208e-01 -9.70395431e-02 4.78832573e-01 ... 2.62111664e-01 3.57266843e-01 -1.45603076e-01] [-2.04914704e-01 5.04778139e-02 -1.19442619e-01 ... 5.51093102e-01 -4.59393889e-01 -1.32000491e-01] [-1.07705882e-02 1.18031897e-01 -2.71834940e-01 ... -4.74543661e-01 -3.06106955e-01 -4.12090212e-01] ... [-4.36959118e-01 1.77725092e-01 -4.51022476e-01 ... 1.48781568e-01 -1.66621089e-01 4.14528958e-02] [ 1.55002847e-01 2.15688214e-01 -6.81310892e-01 ... 1.52124211e-01 -5.84505379e-01 -1.83650941e-01] [-1.19807385e-02 2.34468371e-01 5.17147660e-01 ... 4.99999583e-01 1.49581537e-01 5.06319821e-01]] [[-2.69391119e-01 2.31961027e-01 1.60975516e-01 ... 2.27284096e-02 8.68568361e-01 -7.42244065e-01] [ 9.82762128e-02 -2.18013316e-01 1.92297116e-01 ... 7.93186501e-02 -4.96794909e-01 2.16035634e-01] [-6.34020343e-02 2.21337199e-01 -1.38228089e-01 ... -1.72661290e-01 -3.06163520e-01 1.98848590e-01] ... [-6.88911825e-02 -2.01379016e-01 -1.20654412e-01 ... 5.96976995e-01 -4.19424117e-01 3.34193677e-01] [ 1.14162482e-01 -8.24071541e-02 2.71901429e-01 ... -1.92948386e-01 -3.55888844e-01 7.21737564e-01] [ 3.26404750e-01 -1.14204235e-01 1.32222518e-01 ... 1.41991019e-01 -1.33889660e-01 2.36818597e-01]] ... [[-2.02795848e-01 2.34998137e-01 3.10420364e-01 ... -2.03912675e-01 3.53588670e-01 1.55037820e-01] [-2.88332477e-02 2.00464666e-01 -7.08137304e-02 ... 3.86375934e-02 -1.27148435e-01 3.71025987e-02] [ 3.93524691e-02 1.99324504e-01 2.87410617e-02 ... 1.04278982e+00 -2.03126341e-01 -1.73621073e-01] ... [-3.90774518e-01 -1.06477626e-01 4.75230724e-01 ... -4.83994722e-01 -8.08530748e-02 -6.94868982e-01] [ 1.40790552e-01 -1.35031164e-01 -3.46401244e-01 ... 4.71224666e-01 7.51273334e-02 1.97301507e-01] [-4.54884954e-02 6.23582900e-01 -1.20037922e-03 ... 7.02970698e-02 -5.75961351e-01 4.39320415e-01]] [[-3.25600237e-01 -5.09826362e-01 -2.16966048e-01 ... -3.13192695e-01 -6.87670708e-01 6.66614175e-01] [ 3.39050710e-01 -6.41672432e-01 1.17901132e-01 ... 5.01159906e-01 3.98835778e-01 8.16542432e-02] [-1.41826626e-02 -3.54579836e-02 -2.72228777e-01 ... -8.25522125e-01 4.25218880e-01 2.21320447e-02] ... [-8.20663095e-01 3.99525553e-01 -4.58235681e-01 ... 5.56057215e-01 -2.73670137e-01 -1.66125998e-01] [-2.70001113e-01 5.61618395e-02 -9.45264921e-02 ... 4.69630733e-02 -1.61539137e-01 -3.68349880e-01] [ 3.16599250e-01 -3.84195626e-01 -2.68088877e-02 ... -2.47773349e-01 5.74737564e-02 -5.37210941e-01]] [[ 2.19177216e-01 -8.36036444e-01 8.71437669e-01 ... -5.04986569e-02 3.00578177e-02 3.73403013e-01] [-3.14712763e-01 -7.16691911e-01 -1.44255464e-03 ... -5.62966228e-01 4.20891464e-01 -1.24805436e-01] [ 2.79052090e-02 3.72332811e-01 3.17563295e-01 ... -2.93967009e-01 -2.27806196e-01 2.22288385e-01] ... [ 3.75693947e-01 4.90820646e-01 -5.02162218e-01 ... 1.01226822e-01 -8.96970555e-02 2.85175115e-01] [ 2.91228980e-01 -6.93684584e-03 4.25941078e-03 ... 3.49409617e-02 -6.56565309e-01 4.35609639e-01] [ 2.51513332e-01 -3.23228866e-01 8.78005922e-02 ... 3.38994294e-01 5.00316024e-01 6.22704685e-01]]] [[[-2.64743924e-01 -5.49179502e-02 -3.13659795e-02 ... 1.80573776e-01 1.23657942e-01 7.81541467e-02] [-2.68358856e-01 4.30755094e-02 -7.65081635e-03 ... -1.63769081e-01 -1.17770962e-01 -2.00935416e-02] [ 6.26912713e-02 8.07329267e-02 -3.00110374e-02 ... -6.66914284e-02 -1.33517370e-01 1.27561033e-01] ... [ 6.26374185e-02 -1.34907765e-02 -1.15764365e-01 ... 1.48916006e-01 2.47008167e-02 -1.00431606e-01] [ 3.06584746e-01 -8.49123746e-02 4.82049882e-02 ... -9.24828351e-02 7.64189428e-03 2.17972949e-01] [ 5.75038716e-02 -1.05094388e-01 2.13618591e-01 ... 9.10582244e-02 -3.90718356e-02 2.48846799e-01]] [[ 1.50896519e-01 4.54578288e-02 1.12399817e-01 ... 2.70243734e-01 -1.28166571e-01 -6.19783551e-02] [ 1.31419301e-01 -2.12143153e-01 -6.56885430e-02 ... 1.01825796e-01 4.54444345e-03 -2.25614637e-01] [-5.78959547e-02 -9.99498367e-02 1.60837635e-01 ... 6.93364814e-02 1.26338765e-01 -6.41512573e-02] ... [-7.53252441e-03 -8.27191249e-02 -6.22128919e-02 ... -2.10343778e-01 1.08303018e-01 6.28266111e-02] [ 4.84835654e-02 -4.45678644e-02 1.36678338e-01 ... -3.61037478e-02 -2.04179972e-01 9.84448791e-02] [ 7.43563869e-04 -4.47381511e-02 6.05919473e-02 ... -1.34239361e-01 -1.28024563e-01 5.68763316e-02]] [[ 5.16826212e-02 1.76366847e-02 1.95426922e-02 ... -2.15701893e-01 7.26937491e-04 1.45166233e-01] [ 3.15643102e-02 -3.25770350e-03 -6.28985539e-02 ... 2.35033616e-01 1.39516711e-01 -1.64902940e-01] [-5.76927364e-02 5.61338738e-02 4.05503288e-02 ... 9.07379985e-02 -2.72213012e-01 -2.91187942e-01] ... [ 1.92162767e-02 1.75157934e-02 -1.50853256e-02 ... -8.98672342e-02 1.19670585e-01 9.40242186e-02] [-7.54212365e-02 -1.09194569e-01 -2.20935225e-01 ... 3.72822993e-02 -6.45623133e-02 -1.61127914e-02] [-2.40579799e-01 -2.25899115e-01 1.15195267e-01 ... 1.49190336e-01 -5.62429763e-02 8.71402677e-03]] ... [[-3.61191034e-02 3.78478765e-02 -1.24098482e-02 ... 2.32929960e-02 4.92351651e-02 -7.19943643e-02] [ 7.93292075e-02 9.30267498e-02 1.20247550e-01 ... 1.12310164e-01 1.41371906e-01 -7.78843835e-02] [-5.56379743e-03 -9.59409401e-02 -1.63519055e-01 ... 3.74499597e-02 -2.12387487e-01 4.91002761e-02] ... [-1.43335849e-01 6.26634657e-02 -1.69343993e-01 ... -7.01295435e-02 -1.44127503e-01 -1.60942063e-01] [-2.95498576e-02 1.02719240e-01 9.86656025e-02 ... 2.15774495e-02 -1.10908009e-01 -1.52746886e-01] [-1.32059216e-01 1.00049108e-01 -1.89153347e-02 ... 9.13209990e-02 1.61163911e-01 -4.29909714e-02]] [[-1.27773270e-01 7.83670992e-02 1.96216702e-01 ... 1.50288686e-01 1.53378248e-01 -7.18015656e-02] [-2.67877877e-01 -6.67154044e-02 -5.68203591e-02 ... -1.83494225e-01 -1.33037210e-01 -1.92487568e-01] [ 4.41695787e-02 -7.30258692e-03 -7.84018636e-03 ... 2.25177947e-02 -1.57181099e-01 5.62842935e-02] ... [ 1.33785129e-01 -1.40778556e-01 -2.55097393e-02 ... -1.84255734e-01 -2.56767944e-02 4.42384593e-02] [-1.86382327e-02 -1.20228589e-01 3.55214089e-01 ... 2.83432957e-02 -3.33635062e-02 -1.80108219e-01] [ 3.17439973e-01 -1.28018126e-01 9.45731997e-02 ... -1.38430566e-01 1.94484457e-01 -8.46124813e-02]] [[ 5.17324954e-02 3.62777822e-02 3.68930966e-01 ... 6.92467615e-02 -9.62314084e-02 3.43460411e-01] [-1.78898290e-01 -5.86817041e-02 1.28916726e-01 ... -1.05707012e-01 -2.34629605e-02 7.04271719e-02] [-3.14418525e-02 -1.85820401e-01 2.55895462e-02 ... -4.94117886e-02 -2.22933188e-01 -2.14984715e-01] ... [-8.14278871e-02 -1.08865544e-01 9.38413106e-03 ... 6.40011579e-02 -1.08191483e-02 2.99092382e-02] [ 1.18488498e-01 -1.03770271e-01 -1.06627777e-01 ... 5.56436889e-02 1.01308942e-01 2.87207723e-01] [ 1.06988959e-02 1.08759925e-01 -1.95605848e-02 ... -1.98071882e-01 -7.40311965e-02 -3.80633883e-02]]] [[[-1.66954443e-01 -1.18786287e+00 -1.61199474e+00 ... -8.79512012e-01 7.49719962e-02 1.23284793e+00] [ 7.28958666e-01 1.00475824e+00 7.67517626e-01 ... 1.13505280e+00 -2.91409403e-01 3.18962872e-01] [ 1.26841918e-01 1.04863428e-01 -3.09843034e-01 ... -8.12631726e-01 -6.95084393e-01 3.52242321e-01] ... [-2.57919192e-01 5.59109211e-01 -3.49879950e-01 ... -2.58089721e-01 1.18115008e-01 8.70702192e-02] [ 4.15339231e-01 1.49630040e-01 -4.20233190e-01 ... 9.75269601e-02 -9.98365760e-01 1.67513877e-01] [-1.24212039e+00 -1.08401358e+00 5.93511350e-02 ... -1.21836066e+00 -1.48170888e-01 2.95666307e-01]] [[ 1.15441000e+00 2.16182321e-01 4.20950025e-01 ... -4.64751571e-01 5.97535133e-01 1.30240530e-01] [-1.90523422e+00 -6.90885067e-01 -1.10630274e+00 ... 2.03800035e+00 -1.69874549e+00 8.40930641e-01] [-1.21376812e+00 -4.35735315e-01 -6.79234743e-01 ... -4.10624564e-01 4.32562739e-01 -5.14555693e-01] ... [-7.50518963e-02 -1.36899972e+00 -7.39780545e-01 ... 1.06144834e+00 4.97514307e-01 9.78631228e-02] [ 4.29103285e-01 1.77344620e-01 -1.09724760e+00 ... 1.11006987e+00 -8.24413776e-01 -5.98322272e-01] [-9.05824423e-01 2.71304045e-02 3.29111546e-01 ... 9.21498146e-03 3.89365047e-01 -5.23562729e-01]] [[-4.05537367e-01 -9.83512923e-02 -2.14733526e-01 ... 6.57114506e-01 -3.01021934e-01 -1.56929016e-01] [ 8.27857316e-01 -4.64384034e-02 4.66784596e-01 ... 3.43060553e-01 -9.35272872e-01 3.57724488e-01] [-6.94731623e-03 1.21913469e+00 9.24382746e-01 ... 3.87173265e-01 -1.37773383e+00 -8.08754981e-01] ... [ 4.41231400e-01 8.08833599e-01 6.62914574e-01 ... 1.00413728e+00 -5.11898577e-01 5.10121644e-01] [ 4.30585027e-01 7.93778598e-02 4.85396534e-02 ... -3.80265176e-01 -5.57571709e-01 -9.23991323e-01] [-6.28229856e-01 8.97262469e-02 2.60915071e-01 ... 1.12270489e-01 5.73836230e-02 -5.90218246e-01]] ... [[ 5.13791859e-01 -3.88056308e-01 -3.62362787e-02 ... -1.43219781e+00 -3.57399970e-01 7.70175517e-01] [-1.02607965e-01 6.14768803e-01 -3.15234989e-01 ... 5.98813519e-02 -6.50915325e-01 6.24716759e-01] [ 7.27957254e-03 -9.91598964e-02 1.14848948e+00 ... -1.42257142e+00 2.77668536e-01 6.23511255e-01] ... [ 3.15039217e-01 -3.63124996e-01 -2.54474372e-01 ... -1.95653811e-01 -3.70756239e-01 2.45389845e-02] [ 5.37126541e-01 1.89674586e-01 -4.72075075e-01 ... -1.28392959e+00 -1.06718278e+00 -6.51889145e-01] [ 5.96334577e-01 -5.16040027e-01 1.59415022e-01 ... 5.64356744e-01 -3.29596996e-01 3.61658446e-02]] [[ 9.34636056e-01 1.54375404e-01 -9.49672982e-02 ... 1.33497834e+00 -3.64375055e-01 -1.05095342e-01] [ 1.87542979e-02 8.13266098e-01 1.09735703e+00 ... -6.92180514e-01 -9.11353111e-01 5.05354345e-01] [-4.24008131e-01 2.13910103e-01 -1.37034789e-01 ... -5.12885511e-01 -4.40358847e-01 9.04514566e-02] ... [ 8.63410354e-01 -1.88408375e-01 -4.82154548e-01 ... -6.54978752e-01 -9.13291946e-02 -4.70482349e-01] [ 7.73295835e-02 -1.25137553e-01 8.56942534e-01 ... -5.71512640e-01 -3.24748456e-03 -6.10219538e-01] [ 1.44150972e-01 -7.09495187e-01 7.25672662e-01 ... 2.47434676e-01 1.23096108e+00 1.18049169e+00]] [[ 8.89575779e-01 1.72624931e-01 2.67026603e-01 ... -3.88019439e-03 -1.35427380e+00 -2.04205029e-02] [ 5.75075984e-01 -1.42166829e+00 6.95614278e-01 ... -6.56351209e-01 3.57712418e-01 -7.26050735e-01] [-7.39887878e-02 6.22498393e-01 1.24764264e+00 ... -9.62551773e-01 -4.02571559e-02 -6.82208389e-02] ... [ 5.58496229e-02 7.01937199e-01 -1.65745270e+00 ... -1.76423565e-01 1.02880239e+00 -8.84595335e-01] [-4.93038297e-01 -8.59503627e-01 7.02875078e-01 ... -1.04857713e-01 -5.18462896e-01 -4.59493607e-01] [-6.91501796e-01 -9.93651867e-01 -7.65796244e-01 ... -2.46934816e-01 -3.82414907e-01 7.56430209e-01]]] [[[-9.68315825e-02 -6.95630535e-02 3.11385512e-01 ... 1.03895634e-01 2.92188078e-01 4.84608300e-02] [ 2.27329463e-01 -4.26915735e-02 -6.01585135e-02 ... 2.30514392e-01 -7.59225845e-01 -2.37712577e-01] [ 1.24964416e-01 7.58609548e-02 9.33395922e-02 ... 2.33603850e-01 -2.23384544e-01 -2.80611306e-01] ... [ 2.23873243e-01 1.42486140e-01 2.94930339e-01 ... -7.78500795e-01 -1.33740306e-01 -1.08412348e-01] [-4.20807689e-01 5.88048398e-02 -2.40544498e-01 ... -1.73975170e-01 -1.58509821e-01 5.22105634e-01] [-7.64827728e-02 -3.41168106e-01 1.83284823e-02 ... 3.03895712e-01 -5.56622922e-01 -1.29244417e-01]] [[ 4.31861639e-01 5.47186375e-01 -1.06309861e-01 ... 8.75716150e-01 4.03834246e-02 -3.98627847e-01] [-1.65688112e-01 7.54017234e-01 -1.68094054e-01 ... 2.05705643e-01 2.48517558e-01 -1.42456099e-01] [ 1.22428469e-01 -2.57663578e-01 5.25048256e-01 ... 1.85306847e-01 1.50077477e-01 -8.70344788e-02] ... [ 2.47663662e-01 -1.00255758e-01 -1.18561275e-01 ... -3.75446200e-01 -1.00481167e-01 2.71139622e-01] [-2.75093615e-01 -3.50477099e-01 4.18136328e-01 ... 4.95032892e-02 2.66877770e-01 -1.30577162e-01] [ 1.79920048e-01 -3.50796580e-01 4.81455214e-02 ... 2.00229794e-01 -1.65830646e-02 1.13574035e-01]] [[ 1.32386655e-01 -7.81824589e-01 -4.92215961e-01 ... 2.67047763e-01 -5.41207381e-03 -1.01584032e-01] [-6.54679611e-02 3.87164243e-02 -6.46570802e-01 ... 8.32144618e-02 3.84423286e-01 -1.95503205e-01] [ 1.37795478e-01 4.72880810e-01 -8.16446692e-02 ... 1.48788333e-01 2.93969065e-01 1.59005255e-01] ... [ 3.95634443e-01 4.34940904e-01 -5.60970664e-01 ... -4.61713791e-01 2.91008592e-01 2.55145460e-01] [ 6.28930509e-01 2.80300260e-01 -5.31148389e-02 ... 3.84738878e-03 1.96826696e-01 -8.36624880e-04] [ 3.33679058e-02 -4.14308816e-01 -6.28695935e-02 ... 6.83684414e-03 -3.94194484e-01 -4.98886764e-01]] ... [[ 2.21946269e-01 2.43961960e-02 2.05969140e-01 ... 3.03004593e-01 -2.72118866e-01 1.31420374e-01] [ 2.67758906e-01 -6.96653798e-02 -4.22924459e-01 ... -4.15014595e-01 5.37457466e-01 3.83675545e-01] [-9.28632095e-02 -4.72643286e-01 -2.24641576e-01 ... 7.36594796e-01 8.53333399e-02 1.31944478e-01] ... [ 2.50217855e-01 2.91089654e-01 1.11830711e+00 ... -2.37580866e-01 -2.71725040e-02 -4.31087434e-01] [ 1.73254684e-01 -1.05775900e-01 2.45898888e-01 ... -1.79777145e-01 4.47658569e-01 3.68678235e-02] [-1.82990715e-01 -5.48392713e-01 -1.20676190e-01 ... -3.13313067e-01 1.93093449e-01 2.85186887e-01]] [[-1.34316877e-01 4.10988718e-01 2.34562069e-01 ... 1.68117955e-01 -2.62814283e-01 1.93253100e-01] [ 2.36034080e-01 2.93550670e-01 -7.14293942e-02 ... -1.25451628e-02 6.99598551e-01 3.31149369e-01] [ 1.96745656e-02 -1.03968039e-01 4.04863089e-01 ... -5.47528684e-01 -1.53762221e-01 2.65761048e-01] ... [-1.48449540e-02 9.24364850e-03 -2.67647594e-01 ... -2.29390398e-01 1.89227164e-01 -4.98181462e-01] [ 1.71080083e-01 1.79753035e-01 -3.28172654e-01 ... -3.22930336e-01 1.14841640e-01 -3.71386707e-02] [-5.03973424e-01 3.70452851e-02 -5.64569384e-02 ... -1.21649951e-01 2.27992963e-02 -1.99135020e-01]] [[ 4.51008886e-01 6.21865869e-01 1.82946883e-02 ... -7.12742805e-02 2.20719203e-01 -4.19004142e-01] [-1.22741178e-01 -3.18011135e-01 -1.94249615e-01 ... -9.20925587e-02 2.18157340e-02 -2.52195280e-02] [-1.58570215e-01 -1.39349744e-01 7.22075403e-02 ... -6.32534802e-01 3.31444681e-01 8.10893953e-01] ... [-5.77087998e-01 2.97647417e-01 7.67577440e-02 ... 7.78627694e-02 -7.11534858e-01 2.89472580e-01] [ 1.60116389e-01 -6.04661047e-01 -3.99173275e-02 ... -7.10131824e-02 5.12681901e-01 1.06495330e-02] [ 1.68582067e-01 -6.03907585e-01 -5.31557083e-01 ... -4.51178253e-01 3.01148742e-01 -1.18027158e-01]]]] ... [[[[ 1.47346246e+00 -3.11088037e+00 -9.95486137e-03 ... -1.07805502e+00 1.64747104e-01 9.27473485e-01] [-7.60477185e-01 3.24968410e+00 -4.94238734e-01 ... -1.10002422e+00 -7.09213674e-01 -4.35216397e-01] [ 2.40623331e+00 1.52671254e+00 8.37984443e-01 ... -9.84167576e-01 4.01803762e-01 -9.23324049e-01] ... [ 3.52540910e-01 -9.73593652e-01 -2.99919605e+00 ... 1.23295331e+00 -4.04972762e-01 8.75577688e-01] [-4.07120675e-01 -4.04883564e-01 -3.57397020e-01 ... -9.70596433e-01 3.08281446e+00 -2.08118463e+00] [ 1.29627836e+00 -7.73768663e-01 -1.03913152e+00 ... -3.74707532e+00 2.92913461e+00 5.00241555e-02]] [[ 1.31224966e+00 1.82726085e+00 9.72516984e-02 ... -1.69791770e+00 -7.41402566e-01 1.65641034e+00] [ 4.45407271e-01 2.23514986e+00 1.06471562e+00 ... -1.16513824e+00 2.71527261e-01 1.34257543e+00] [ 9.28275213e-02 2.12578154e+00 -2.72958606e-01 ... -2.93578482e+00 4.69325960e-01 1.56811565e-01] ... [ 1.21584706e-01 -6.61294237e-02 1.38912451e+00 ... -9.91978168e-01 -1.09683371e+00 -1.41463530e+00] [ 6.53866827e-02 1.35879862e+00 1.22350276e+00 ... -2.02702737e+00 2.61694074e+00 1.75371081e-01] [ 3.55327278e-01 1.03710151e+00 5.70328176e-01 ... -3.23168182e+00 4.47247148e-01 -2.02259779e+00]] [[ 8.14328909e-01 -2.13469982e+00 4.89594609e-01 ... 2.01747268e-01 2.46395659e+00 1.43898869e+00] [-7.17710674e-01 -1.25019267e-01 1.52968836e+00 ... -4.93220657e-01 2.08272600e+00 -3.61418426e-01] [ 4.19691168e-02 2.55213916e-01 1.27460325e+00 ... -2.12000817e-01 6.99358463e-01 1.53592873e+00] ... [-2.18258381e-01 -2.83490252e+00 -3.16499650e-01 ... 1.92164969e+00 -2.67163253e+00 2.79773235e+00] [-9.52956200e-01 -1.34709466e+00 6.05240703e-01 ... 1.27307522e+00 -2.83619070e+00 8.96743476e-01] [-1.19281983e+00 -8.74562144e-01 -1.52776003e-01 ... -3.35416757e-03 -3.00564146e+00 -1.50012004e+00]] ... [[-2.09047699e+00 -1.29220498e+00 -2.08447170e+00 ... -2.09956026e+00 1.34813988e+00 -4.67037201e-01] [-1.84028387e-01 -7.65982628e-01 -1.26578403e+00 ... 3.63336653e-02 -7.79038608e-01 3.17300153e+00] [ 1.43995404e+00 -3.79788995e-01 1.04178166e+00 ... 5.07050529e-02 3.59091349e-02 3.86149772e-02] ... [ 1.89097309e+00 1.33973241e-01 2.86772072e-01 ... -6.01614118e-01 -1.01244211e+00 -3.00356412e+00] [-3.69426042e-01 9.77689549e-02 -1.39082447e-01 ... -3.26508784e+00 -8.94925892e-01 -1.27158988e+00] [ 9.96184349e-01 -5.91250837e-01 -9.04178321e-01 ... 4.38901281e+00 3.97059679e-01 -8.12960267e-01]] [[ 8.05213392e-01 3.27682197e-01 2.90768385e-01 ... 1.31276822e+00 5.87261617e-01 7.79407918e-01] [ 1.29480016e+00 7.64605880e-01 -8.74304026e-03 ... 3.70469749e-01 -1.17583930e+00 1.27471077e+00] [ 2.34573092e-02 1.12509072e+00 2.39907670e+00 ... -1.56059965e-01 -2.46036792e+00 1.81473053e+00] ... [-1.06859088e+00 1.09302056e+00 -1.63452017e+00 ... -6.77516580e-01 -1.12289667e+00 1.03632367e+00] [ 1.06149399e+00 5.09771984e-03 4.87104893e-01 ... 7.14933932e-01 -1.93359876e+00 -3.93237042e+00] [ 7.00021863e-01 2.62020260e-01 4.44303185e-01 ... 1.65836602e-01 8.68102372e-01 -3.29935759e-01]] [[-7.63830900e-01 -1.39155257e+00 1.02147329e+00 ... 8.51248741e-01 2.87553072e-01 7.29436755e-01] [ 2.22894236e-01 -1.08732961e-01 1.41690731e+00 ... -3.14875215e-01 6.46669447e-01 -3.05768818e-01] [-9.59588170e-01 3.69434237e-01 -3.06504607e-01 ... 1.35933745e+00 9.68981683e-01 1.42921960e+00] ... [-1.25323975e+00 -6.41467988e-01 2.67713261e+00 ... -9.61453259e-01 4.03691769e-01 -2.19354957e-01] [ 2.00762129e+00 7.60395408e-01 -7.78756559e-01 ... -2.49259782e+00 -4.11611557e-01 -6.03118062e-01] [-1.31692696e+00 7.30035186e-01 1.28340805e+00 ... -2.45606542e+00 7.09104776e-01 -1.46538568e+00]]] [[[-2.24980310e-01 -8.02443802e-01 -1.32362440e-03 ... 6.03396073e-02 1.34737238e-01 3.93585920e-01] [-8.37066919e-02 1.47206306e-01 4.20871116e-02 ... -1.82674661e-01 -8.24327096e-02 -2.20781621e-02] [ 1.24987744e-01 -6.85409069e-01 -1.13137588e-01 ... -3.29650402e-01 -7.83550963e-02 2.30130583e-01] ... [ 4.16316211e-01 -1.92886308e-01 -2.36316502e-01 ... -7.31996298e-02 1.80004492e-01 -1.58942882e-02] [ 1.42066330e-01 -5.95122218e-01 -4.44452576e-02 ... 2.22843349e-01 -5.50354421e-01 -2.36446694e-01] [ 2.00205907e-01 3.01859111e-01 -4.28280711e-01 ... 3.70588690e-01 3.23256701e-01 -1.73289895e-01]] [[-1.06189884e-01 -2.40813270e-01 -2.74740577e-01 ... -2.22187951e-01 -1.74529523e-01 4.20276910e-01] [ 4.78581578e-01 1.32332563e-01 -1.72853202e-01 ... -3.98367375e-01 2.24516749e-01 -5.61950028e-01] [ 3.14543396e-01 2.13085897e-02 -9.10938308e-02 ... -2.40682796e-01 5.47910668e-02 9.19895917e-02] ... [ 1.60519645e-01 -1.45656839e-01 2.87145108e-01 ... -2.52739191e-01 8.36950168e-02 4.38033938e-01] [-2.45471656e-01 1.48150036e-02 1.51647940e-01 ... -3.00968677e-01 -1.49666846e-01 -1.53863177e-01] [-4.48228754e-02 1.41995743e-01 3.19696479e-02 ... 1.47156879e-01 6.88064024e-02 2.01344471e-02]] [[-2.22287580e-01 3.42761487e-01 -8.90651196e-02 ... 1.55350678e-02 -1.84208184e-01 2.38947064e-01] [ 5.87615930e-02 -2.43862972e-01 4.79082949e-02 ... -5.44554949e-01 3.77331562e-02 2.47025415e-02] [ 1.22134663e-01 -3.13141383e-02 3.72036844e-01 ... -1.71884999e-01 -3.89860161e-02 1.62451193e-01] ... [ 8.88871551e-02 -2.25251958e-01 -3.75077248e-01 ... -1.79189116e-01 3.50531369e-01 1.52212501e-01] [ 8.54131356e-02 3.01684558e-01 2.16794983e-01 ... -3.16840947e-01 1.90265074e-01 -5.36229350e-02] [-2.69407362e-01 -2.16093794e-01 -2.59191655e-02 ... 4.97713357e-01 7.45895654e-02 -5.42604029e-01]] ... [[-2.74841696e-01 2.40428910e-01 -2.12027133e-01 ... -4.18529809e-01 -4.15742159e-01 1.36057898e-01] [-1.92223534e-01 -1.70468882e-01 4.69193101e-01 ... 5.25189526e-02 -2.29197174e-01 6.77674055e-01] [ 3.20707381e-01 -3.75253618e-01 2.22142056e-01 ... -1.30727485e-01 7.18115866e-02 -1.84603095e-01] ... [-1.70012534e-01 -5.07746413e-02 3.26275319e-01 ... -5.46173379e-02 2.23280340e-01 -1.22773781e-01] [-4.74951208e-01 1.80642512e-02 2.74138916e-02 ... -3.34420145e-01 3.90149355e-01 3.15244459e-02] [ 3.38703066e-01 -1.32732227e-01 -1.16112575e-01 ... 4.64865752e-02 -3.60624157e-02 3.43955219e-01]] [[ 2.57875204e-01 2.04414561e-01 -1.39287293e-01 ... 2.02552706e-01 6.37888312e-02 -1.05543002e-01] [ 7.21440986e-02 -5.63743412e-01 -2.75324285e-01 ... -1.75753921e-01 1.04861438e-01 5.94263710e-02] [-1.98385656e-01 -4.75340456e-01 1.33495614e-01 ... 5.49754024e-01 -9.93331056e-03 1.70323521e-01] ... [ 3.38081539e-01 1.45700201e-01 -3.37987483e-01 ... -3.62755656e-01 5.62047176e-02 -1.45591050e-01] [-3.17175895e-01 2.28409588e-01 5.00909805e-01 ... -1.21006757e-01 1.66647017e-01 -3.79638672e-01] [-3.11604619e-01 1.21551052e-01 -5.53828031e-02 ... -2.19724298e-01 2.54121572e-01 2.59446830e-01]] [[-6.24433756e-01 1.21923191e-02 3.87680143e-01 ... -4.59028482e-02 -4.96779114e-01 -2.76665598e-01] [-3.33580166e-01 2.88826734e-01 -4.39401716e-01 ... -1.03311948e-01 -5.09572268e-01 -2.70554662e-01] [ 5.86532176e-01 2.19659448e-01 -5.84506355e-02 ... -2.05558121e-01 1.58026755e-01 -1.03505760e-01] ... [-2.25011960e-01 6.90620765e-02 3.85553986e-02 ... 1.40723616e-01 -1.90304816e-01 -5.48410356e-01] [-1.03514977e-02 4.72115502e-02 2.50817835e-01 ... 5.66201031e-01 -5.20384252e-01 2.53598839e-01] [-4.15607095e-01 -7.96056986e-02 -1.34337813e-01 ... -1.18724801e-01 3.17886472e-01 -1.35369167e-01]]] [[[-7.04128668e-02 6.01145029e-01 -7.00831506e-03 ... 7.01530993e-01 1.90728623e-02 -1.77648976e-01] [ 3.50358635e-01 1.66110799e-01 1.77410349e-01 ... 2.29481876e-01 -4.29850757e-01 1.72100604e-01] [-4.23338175e-01 2.49649018e-01 3.29908848e-01 ... 3.99724334e-01 2.34582603e-01 2.53768504e-01] ... [-6.22738451e-02 -2.91074187e-01 -8.60197067e-01 ... 2.02922866e-01 -2.56493807e-01 5.07812381e-01] [-7.97156811e-01 -3.38497847e-01 3.15337449e-01 ... 4.41547066e-01 3.98329616e-01 -1.40418261e-01] [ 1.78139538e-01 4.89971161e-01 1.20219506e-01 ... 3.07543129e-01 1.46270260e-01 5.95563293e-01]] [[-3.50752920e-02 -6.37695014e-01 5.12700453e-02 ... 4.96989712e-02 -1.80868115e-02 -3.15554917e-01] [ 3.82092178e-01 6.65754080e-01 -1.67664498e-01 ... -3.52033198e-01 -2.57480174e-01 6.19540989e-01] [-5.75803339e-01 -2.08207935e-01 1.90047309e-01 ... 2.99576610e-01 -3.71402204e-01 1.77305982e-01] ... [-1.61983535e-01 -2.36445516e-01 2.53014594e-01 ... 7.44084775e-01 -3.22098136e-01 -8.03629398e-01] [ 2.27887467e-01 2.17064738e-01 3.07243764e-01 ... 4.32808459e-01 -6.10299647e-01 -5.91081120e-02] [-1.95884988e-01 -8.64805698e-01 3.84482354e-01 ... -1.35693088e-01 -4.11836207e-01 6.81455314e-01]] [[ 7.02975690e-02 2.00430781e-01 -4.28123809e-02 ... -1.50533602e-01 -1.34228691e-01 -8.41156580e-03] [ 3.72997336e-02 -1.22918494e-01 5.00843644e-01 ... 1.36563495e-01 -2.77011365e-01 3.72412175e-01] [-3.45937639e-01 1.06089532e-01 1.39452547e-01 ... 1.48930043e-01 -4.28323410e-02 -2.66084343e-01] ... [-3.89707446e-01 3.73315871e-01 7.56528020e-01 ... -4.37572330e-01 -3.55922818e-01 -1.38792545e-01] [-2.23345011e-01 1.75312996e-01 4.43768263e-01 ... -3.84671807e-01 -7.20110774e-01 -3.48788977e-01] [ 1.18291050e-01 3.63594070e-02 3.47109109e-01 ... -8.88998628e-01 -8.38586409e-03 -2.53711730e-01]] ... [[ 1.94746181e-01 8.46519589e-01 1.81841865e-01 ... -5.11224866e-01 -4.97269332e-01 -3.83584589e-01] [ 5.34654021e-01 -6.24598823e-02 -1.32987767e-01 ... 3.20651419e-02 -4.35485542e-01 -1.16323465e-02] [ 3.31249177e-01 3.06029767e-01 4.24480706e-01 ... -7.98912439e-03 -9.09638628e-02 4.68496144e-01] ... [-3.34780008e-01 -3.69779728e-02 3.52927274e-03 ... 1.88688621e-01 -9.27316248e-02 -5.55929482e-01] [-2.01096550e-01 -8.46073508e-01 -2.49081552e-01 ... 1.05290003e-01 -2.77338177e-02 3.98783870e-02] [ 5.24568148e-02 -1.39696255e-01 7.85881758e-01 ... -5.83020508e-01 1.26501724e-01 -5.23834713e-02]] [[ 2.96247810e-01 -6.67523205e-01 2.73988336e-01 ... 2.38735259e-01 -1.38503537e-01 6.96332514e-01] [ 1.72070533e-01 4.44926411e-01 -2.76960880e-01 ... -1.16674595e-01 -2.97194332e-01 -4.17803913e-01] [ 2.16827825e-01 9.10013318e-02 -1.93014130e-01 ... 6.43106639e-01 -7.17056453e-01 -6.21461391e-01] ... [ 6.71141641e-03 3.33198845e-01 -7.86055863e-01 ... 9.04464245e-01 2.63228625e-01 -2.00944379e-01] [-5.30342102e-01 -1.36019796e-01 6.18311577e-02 ... 3.18686664e-01 1.18069820e-01 3.21840569e-02] [-7.53174245e-01 6.06216133e-01 -4.89912093e-01 ... 1.00910544e+00 4.47294831e-01 -1.05480708e-01]] [[ 1.14518911e-01 1.11162059e-01 -5.18984139e-01 ... 3.49890976e-03 2.83517748e-01 6.71720028e-01] [ 1.45886943e-01 -4.65088114e-02 7.43803859e-01 ... 1.82463095e-01 1.16851151e-01 -5.10771096e-01] [ 3.79584730e-01 -5.44246137e-01 -7.28903413e-02 ... -1.14742547e-01 7.84740269e-01 8.34169313e-02] ... [-3.44613671e-01 3.46360385e-01 8.95563483e-01 ... 3.72044474e-01 1.61317751e-01 -3.71373236e-01] [-2.61903793e-01 -6.75899446e-01 -7.22157598e-01 ... 5.48105359e-01 2.57657051e-01 -3.72098237e-01] [-1.59222987e-02 8.61839175e-01 -7.52397954e-01 ... -2.54043251e-01 2.90349156e-01 2.05318242e-01]]] [[[ 1.87705066e-02 8.13571587e-02 -5.63861392e-02 ... 6.62287930e-03 -5.79450279e-02 6.74310178e-02] [ 5.33000045e-02 2.51362193e-03 2.58423779e-02 ... 1.58864200e-01 -2.95680366e-03 -2.12131247e-01] [-1.06788479e-01 7.86567181e-02 -1.91792310e-03 ... 3.54069695e-02 -1.75137147e-01 -6.03799336e-02] ... [-5.68005741e-02 4.69422266e-02 -5.19192182e-02 ... -7.68613443e-02 -2.59840060e-02 1.52389392e-01] [ 5.02750091e-02 9.75810289e-02 -3.58313397e-02 ... 8.92031416e-02 -9.84519068e-03 4.39997055e-02] [-3.35099436e-02 8.47672075e-02 4.88704927e-02 ... 5.51483445e-02 2.68956530e-03 -6.82665557e-02]] [[ 1.90008990e-02 -2.44062394e-02 2.29957122e-02 ... -4.63955775e-02 5.75819984e-02 9.89359617e-02] [ 5.87668503e-03 1.08582452e-01 -3.38631123e-02 ... 2.98561547e-02 5.17290756e-02 1.33856252e-01] [-1.35453194e-01 -5.57625704e-02 2.21362039e-01 ... -9.12499726e-02 1.40506729e-01 -1.32999733e-01] ... [-1.69898823e-01 1.27227202e-01 3.35322082e-01 ... 3.20654102e-02 -5.87579608e-02 1.17663249e-01] [-5.29232994e-03 -1.45549579e-02 -1.04800746e-01 ... 8.58615153e-03 -9.64493304e-02 2.71062460e-02] [-1.49748132e-01 1.66371658e-01 2.46162526e-02 ... -1.83539577e-02 1.40503496e-01 -2.32618004e-02]] [[ 6.19223202e-03 6.79862779e-03 1.42006027e-02 ... -5.15036508e-02 -2.22038984e-01 -3.81556526e-02] [ 8.59626308e-02 1.62384167e-01 -4.05496731e-02 ... 2.09057972e-01 -8.97209719e-02 1.95174351e-01] [-1.24677330e-01 6.80323457e-03 2.72104554e-02 ... -2.03996718e-01 -7.51625597e-02 -9.58298706e-03] ... [-6.16258942e-03 -1.58769637e-01 2.07831003e-02 ... 2.49504253e-01 -8.55859369e-02 5.75060584e-02] [-1.96372326e-02 2.13235673e-02 -2.09389701e-02 ... 1.84311122e-02 5.52663393e-03 -8.80107656e-02] [ 2.26082839e-02 1.95379183e-01 9.37038586e-02 ... 3.12637031e-01 -9.99024138e-03 -1.52168691e-01]] ... [[-8.02402794e-02 -1.73161402e-01 -2.23754808e-01 ... -1.51461065e-01 -1.69711247e-01 -2.62579322e-02] [-1.45143077e-01 2.27313876e-01 1.62242614e-02 ... 3.80179547e-02 1.83794368e-02 3.42299432e-01] [ 4.72467765e-02 6.69548959e-02 -2.15163723e-01 ... -2.45274350e-01 9.08832997e-02 -2.01675668e-01] ... [-2.91531850e-02 -5.15674874e-02 1.85557947e-01 ... -3.00776009e-02 8.71137157e-02 7.91627690e-02] [-5.23293093e-02 -4.14770007e-01 2.14687407e-01 ... -5.12976535e-02 1.56130316e-02 2.08772328e-02] [-7.19138328e-03 2.21281692e-01 7.09303543e-02 ... -2.26804480e-01 1.09940507e-01 3.18979882e-02]] [[-7.84752220e-02 -5.77495918e-02 -6.21958636e-02 ... 4.04021181e-02 -1.75805420e-01 1.36108249e-02] [-3.08574848e-02 1.24029949e-01 5.00753755e-03 ... 3.38156670e-02 -3.60506251e-02 -1.33402972e-02] [-2.72929482e-03 -4.24937792e-02 -1.43857570e-02 ... -1.07592540e-02 -1.18685804e-01 1.45895228e-01] ... [-4.97525558e-02 -1.04680851e-01 -6.47653639e-02 ... 9.90403164e-03 -7.45666623e-02 -1.85071118e-02] [-1.49435058e-01 -7.61313289e-02 1.57991108e-02 ... -4.03298438e-02 8.60989392e-02 -1.20626122e-01] [-7.59127066e-02 1.72434404e-01 -2.34274417e-02 ... 6.08157292e-02 6.89346567e-02 5.70593402e-02]] [[ 4.38675955e-02 -1.16655685e-01 2.62924939e-01 ... -6.72925310e-03 1.46756217e-01 1.79955304e-01] [-1.98543072e-01 1.89445652e-02 2.67444476e-02 ... -3.96156758e-02 -2.88388371e-01 -9.19226632e-02] [-1.73123330e-01 1.43934786e-01 -1.22830451e-01 ... 1.40596464e-01 -4.60009985e-02 1.64242964e-02] ... [-3.40559706e-02 -4.64972258e-02 -7.46340817e-03 ... -1.06083624e-01 -2.90718228e-02 -2.61616651e-02] [ 2.57567354e-02 5.19122928e-02 -5.71557395e-02 ... -1.46727264e-01 1.79740205e-01 1.71359390e-01] [-1.48147151e-01 1.14990838e-01 8.62383544e-02 ... -6.44748509e-02 -6.63206875e-02 1.99685674e-02]]] [[[ 3.89814049e-01 -1.38946259e+00 -1.25178182e+00 ... -7.70225096e-03 -3.16036463e-01 6.96029663e-02] [ 2.57103622e-01 -6.88741624e-01 1.61017752e+00 ... 7.36484304e-02 3.75480354e-01 -1.71670783e+00] [ 3.67214113e-01 -1.30779147e-01 4.21081841e-01 ... 6.92378640e-01 5.99484622e-01 -1.04122400e+00] ... [-2.54921854e-01 1.40801573e+00 -5.41230261e-01 ... -7.43550897e-01 -3.68597299e-01 9.04450476e-01] [-5.17633148e-02 -1.86485425e-01 -3.80445749e-01 ... -4.35822427e-01 -7.88780153e-02 -3.60586762e-01] [-8.52451473e-02 -1.44450748e+00 -6.01183057e-01 ... -5.00656664e-01 -4.98659223e-01 -1.89320832e-01]] [[ 8.50163996e-01 1.06085145e+00 6.40205264e-01 ... 1.60840511e-01 -3.95611107e-01 -6.41930521e-01] [ 6.99361637e-02 -3.67534637e-01 -6.87680483e-01 ... -3.02486327e-02 -5.96619070e-01 -2.92020351e-01] [-9.25748289e-01 -5.12284756e-01 -8.84795904e-01 ... -1.89214945e-01 5.78271449e-01 -8.43271971e-01] ... [-3.74073476e-01 2.97175407e-01 7.64766335e-01 ... 6.77961111e-01 2.65553117e-01 -8.09887648e-01] [-6.70212805e-01 -1.22242995e-01 -4.69526023e-01 ... -3.74128908e-01 -1.07936144e-01 1.67693824e-01] [ 1.11333102e-01 8.74440074e-01 -1.64169818e-01 ... 6.95571065e-01 5.01833022e-01 4.04345512e-01]] [[-4.74821270e-01 -2.90224463e-01 -7.31327832e-01 ... -1.56055996e-02 4.40022677e-01 -1.82539403e-01] [ 6.47154152e-01 9.18810308e-01 5.68327904e-01 ... -8.10478404e-02 1.26644623e+00 3.85219485e-01] [-4.10470754e-01 1.23752964e+00 3.20573092e-01 ... -2.09036455e-01 -6.73681200e-01 -8.34389806e-01] ... [ 2.78408885e-01 -8.18304181e-01 6.17913485e-01 ... -4.62998897e-01 2.53426224e-01 6.92370534e-01] [ 7.11901069e-01 1.63283259e-01 -7.28934407e-01 ... -3.27155948e-01 -1.45617580e+00 2.24435274e-02] [ 9.72414732e-01 -1.75225282e+00 -6.31216407e-01 ... 3.09498668e-01 -1.57104835e-01 3.77688795e-01]] ... [[-6.91463277e-02 -1.81637853e-01 -9.44129154e-02 ... -6.96282834e-02 -3.85962576e-02 7.38211095e-01] [ 3.91276320e-03 -1.18447552e-02 -5.11873662e-01 ... 8.58190298e-01 -5.11107564e-01 -1.01337349e+00] [-2.76943892e-01 3.17649841e-01 -3.29270095e-01 ... 2.65407890e-01 -4.99321401e-01 6.88045204e-01] ... [-8.64236206e-02 4.58448142e-01 4.38590169e-01 ... 7.88444579e-01 8.38236868e-01 -1.19840614e-01] [-6.24907970e-01 -9.21325266e-01 -7.77415514e-01 ... 6.52801156e-01 -5.83923519e-01 3.88040543e-01] [ 1.42966926e+00 -1.09459090e+00 -6.50567889e-01 ... 4.93533671e-01 -6.25733793e-01 2.95832574e-01]] [[ 7.36900806e-01 -1.48468047e-01 5.65540671e-01 ... 7.17890859e-01 -5.25321662e-01 1.13492346e+00] [-1.00233388e+00 -6.64369762e-01 -2.81067044e-01 ... -7.16932535e-01 8.88184488e-01 3.77016127e-01] [ 1.88844606e-01 3.75640541e-01 -1.71713650e-01 ... -1.26407772e-01 1.02022171e+00 -9.90403071e-02] ... [-4.74166214e-01 1.99602708e-01 -5.52700937e-01 ... -2.14178354e-01 -2.28215888e-01 1.44854030e-02] [-3.33476186e-01 -1.09645277e-01 -5.83266139e-01 ... 1.60516202e-01 5.73716275e-02 -1.05715477e+00] [-4.25644606e-01 4.54753578e-01 1.10310614e-01 ... 2.10329652e-01 -8.23098242e-01 -2.31930509e-01]] [[-1.22699559e+00 1.42090574e-01 -2.95474797e-01 ... 7.95309901e-01 -7.10525364e-02 -7.91543067e-01] [-1.00073230e+00 -1.48833048e+00 4.04037654e-01 ... -4.61636573e-01 1.21188700e-01 -7.66486466e-01] [-3.67748231e-01 -9.91417229e-01 6.58496857e-01 ... 7.58959353e-01 8.30693126e-01 2.04336464e-01] ... [-1.25596476e+00 2.13557884e-01 9.14600492e-01 ... 1.34768248e+00 5.66408336e-01 -7.72461593e-02] [ 1.28403795e+00 6.50321186e-01 -1.07555342e+00 ... -3.00739586e-01 -6.43173158e-01 2.87621766e-01] [-6.37439191e-01 -3.93134385e-01 -2.50137478e-01 ... -9.93051052e-01 -3.13171357e-01 4.96872455e-01]]] [[[ 2.92411327e-01 4.22946692e-01 -6.77065402e-02 ... -1.47510722e-01 -7.55904794e-01 2.98265517e-01] [ 5.66985071e-01 -7.60421343e-03 -9.63366553e-02 ... -4.77577209e-01 -8.29609111e-02 -2.95678265e-02] [-1.06829122e-01 2.24922672e-01 -1.05309807e-01 ... -4.62156236e-01 -4.76375490e-01 1.88341931e-01] ... [-3.05787325e-01 -6.13288507e-02 1.02259204e-01 ... 1.04902685e-01 9.60376021e-03 -5.78132318e-03] [ 7.28158429e-02 -2.99877256e-01 1.70551568e-01 ... 1.71425447e-01 -5.93300343e-01 -2.04881672e-02] [ 3.61491621e-01 4.18592781e-01 -1.63415715e-01 ... 2.99746662e-01 -2.46708393e-02 -2.61923790e-01]] [[ 1.31832004e-01 1.60973087e-01 -5.47833323e-01 ... -8.26261401e-01 -9.64866132e-02 -9.16582048e-01] [ 3.39774787e-01 1.15995575e-02 -3.15675378e-01 ... 3.52746964e-01 1.62667125e-01 -1.44279331e-01] [ 1.86959922e-01 3.56289148e-01 -1.75761580e-01 ... -7.25488141e-02 3.86457890e-01 8.25145170e-02] ... [ 5.69279134e-01 4.46217149e-01 -7.18623353e-03 ... 1.47203624e-01 -6.16058484e-02 -2.70105004e-01] [ 2.58690208e-01 -1.48276851e-01 4.04699981e-01 ... -2.82433391e-01 -1.49204805e-01 -1.29250005e-01] [ 3.59623849e-01 1.69011220e-01 3.15405667e-01 ... 2.60074168e-01 -2.00213552e-01 6.53669704e-03]] [[-6.25542402e-01 3.11756372e-01 1.23153776e-01 ... 1.56874597e-01 5.43640673e-01 6.54740691e-01] [-5.75198650e-01 -3.69402289e-01 1.41992792e-01 ... -2.06731707e-01 5.39232314e-01 -2.67619729e-01] [-2.28626281e-01 2.13959232e-01 -4.48996842e-01 ... 4.19473536e-02 -1.87791273e-01 1.06053136e-01] ... [-2.25759491e-01 -9.90822688e-02 5.96303344e-02 ... 2.14286134e-01 -1.36041492e-01 -1.18724830e-01] [ 1.01736903e-01 -1.30588248e-01 3.80191207e-01 ... -4.06359524e-01 5.71549833e-01 1.32800445e-01] [-1.03674747e-01 -9.19110030e-02 -4.24506903e-01 ... -2.23452270e-01 -3.69482003e-02 -3.25795040e-02]] ... [[-3.98157276e-02 4.66390163e-01 6.22845054e-01 ... 3.17429781e-01 -7.30950907e-02 1.95763499e-01] [ 3.94590497e-01 2.35874089e-03 1.55191228e-01 ... -1.33375108e-01 -3.55594307e-02 -5.09351550e-04] [ 2.68877685e-01 -6.09452017e-02 2.49835998e-01 ... -1.56398997e-01 2.25581184e-01 -2.07274601e-01] ... [ 5.62444329e-01 6.95286334e-01 -1.00322425e-01 ... -3.07032526e-01 5.82462251e-01 -2.32834622e-01] [ 2.25955516e-01 -2.77670056e-01 -1.96449757e-02 ... 2.49607548e-01 4.45192516e-01 2.71392763e-02] [-2.13038221e-01 -3.09935868e-01 4.51708125e-04 ... 2.43309990e-01 1.64816946e-01 -1.88454717e-01]] [[-1.07761681e-01 -9.38586071e-02 2.61218071e-01 ... -2.59717166e-01 -2.84833789e-01 -1.28921807e-01] [-1.28434867e-01 -3.84545118e-01 -2.48961389e-01 ... 1.72774181e-01 1.72834933e-01 4.90946651e-01] [-8.86150122e-01 -8.82530361e-02 -7.85861850e-01 ... -4.61562604e-01 -2.82887638e-01 -4.47352946e-01] ... [ 1.16695911e-02 -1.93671227e-01 4.54143256e-01 ... 1.64767593e-01 -1.24387279e-01 -4.64474320e-01] [-7.49448761e-02 -1.53509408e-01 -1.84088200e-01 ... -3.78595054e-01 4.58234064e-02 -5.37446067e-02] [-2.07802057e-01 5.99403605e-02 1.91332966e-01 ... -3.96652162e-01 2.18944952e-01 -4.07284379e-01]] [[ 4.35448810e-02 -2.11304098e-01 -2.83298731e-01 ... -2.09563032e-01 -8.64660740e-02 5.83762646e-01] [-1.26669168e-01 -2.09715769e-01 8.87629688e-02 ... -1.58038840e-01 -5.22434652e-01 -3.54735821e-01] [ 7.34520257e-02 -2.37725765e-01 -1.56331241e-01 ... -2.65609682e-01 1.17173284e-01 -2.63666838e-01] ... [-8.46574455e-02 -2.37448961e-02 2.00955287e-01 ... -6.23506168e-03 2.67516106e-01 -1.84048250e-01] [-2.29575768e-01 2.18889982e-01 -4.03171360e-01 ... 2.34254897e-01 6.78669438e-02 2.43953884e-01] [-3.36419344e-01 -4.31370363e-02 -1.91290796e-01 ... 5.39770305e-01 -1.65143132e-01 3.78199577e-01]]]] [[[[ 8.68427396e-01 1.40691006e+00 -6.86084688e-01 ... -5.49930930e-01 -4.55490917e-01 -2.91288805e+00] [-2.80621195e+00 -1.18515658e+00 8.04285586e-01 ... 1.30179465e+00 1.84033722e-01 8.50672349e-02] [-2.14516282e+00 -6.28183186e-01 5.53501248e-01 ... -1.32426357e+00 -4.14209738e-02 9.54930186e-01] ... [-1.73453316e-01 7.96164870e-01 -4.02695894e-01 ... 8.02946985e-01 3.01776552e+00 -5.37674487e-01] [ 2.17401290e+00 5.58961928e-01 -1.22891501e-01 ... -2.77477646e+00 -2.51076365e+00 -1.09383678e+00] [ 3.58313322e-01 -6.52287841e-01 -8.94204557e-01 ... 1.04397702e+00 9.64672029e-01 -1.69339776e+00]] [[-1.06100523e+00 -1.74986017e+00 -2.38426352e+00 ... -2.16495228e+00 6.41587973e-01 1.34185791e+00] [ 4.58749950e-01 -4.66089189e-01 -5.78221142e-01 ... 4.21144634e-01 2.89307141e+00 -1.05740833e+00] [-1.38418400e+00 -7.58282781e-01 -5.68566382e-01 ... 3.64853680e-01 -1.34112549e+00 8.17753971e-01] ... [-1.08464098e+00 -7.43650734e-01 5.30307174e-01 ... -2.43863970e-01 -9.97082055e-01 -1.33389175e+00] [ 9.45454895e-01 3.32840562e-01 3.90042663e+00 ... -5.41064680e-01 1.68698058e-01 1.09757626e+00] [-1.32849646e+00 2.57444739e-01 2.54392052e+00 ... 1.16808295e-01 1.60755701e-02 6.36867166e-01]] [[ 2.77331710e-01 2.95647025e+00 5.05140960e-01 ... -1.15040588e+00 -4.06069785e-01 -8.25061917e-01] [ 1.45351100e+00 -8.77471030e-01 9.15319026e-01 ... 3.00843668e+00 -1.41072023e+00 9.78413746e-02] [-1.67586729e-02 8.22218001e-01 2.05104494e+00 ... -2.61260390e-01 1.93789458e+00 -1.27917087e+00] ... [ 2.37250543e+00 1.13819277e+00 5.47180653e-01 ... -7.14205280e-02 -1.55574691e+00 -1.03315747e+00] [ 1.17244887e+00 -1.15925527e+00 -1.22428989e+00 ... -2.02908421e+00 3.55892092e-01 1.73712599e+00] [-1.87220597e+00 2.33788300e+00 -4.77016598e-01 ... -4.71704149e+00 6.58482239e-02 -2.98007756e-01]] ... [[ 4.05466594e-02 -5.32884710e-02 -1.66041601e+00 ... -4.93527323e-01 1.00749290e+00 1.19439518e+00] [ 5.74707389e-01 -3.92288834e-01 5.42615116e-01 ... 7.43443191e-01 5.82651019e-01 -1.79708242e+00] [-1.07207191e+00 1.61715031e+00 1.89290893e+00 ... -3.22069764e+00 -4.38197047e-01 8.31386685e-01] ... [-2.93968415e+00 -1.83416948e-01 -1.04737353e+00 ... 8.16486657e-01 -1.53002769e-01 9.76712346e-01] [ 4.18396264e-01 -6.11571908e-01 1.33012128e+00 ... 8.63170803e-01 7.07569811e-03 -4.48476642e-01] [ 1.13685995e-01 -1.19789767e+00 8.99823248e-01 ... -1.65650517e-01 -1.91349435e+00 -8.70662987e-01]] [[ 2.28222013e+00 2.62091845e-01 -5.20202279e-01 ... -2.01197267e+00 2.92942643e-01 3.50721270e-01] [ 1.04230128e-01 1.14887798e+00 -5.48058152e-01 ... -8.03946197e-01 -7.53000259e-01 -1.66608834e+00] [ 4.36481667e+00 3.95713031e-01 3.14449739e+00 ... 3.06847394e-01 4.76964295e-01 -9.74643156e-02] ... [ 2.70711827e+00 2.42182434e-01 -1.24330354e+00 ... -9.70783234e-01 7.58080781e-01 -6.13554895e-01] [-1.73464072e+00 1.02688968e+00 3.68062615e-01 ... 9.76845205e-01 3.37303430e-01 1.37662566e+00] [-3.62791508e-01 3.50063831e-01 1.36091977e-01 ... -1.76406140e-03 2.48342514e+00 2.37727642e+00]] [[ 5.22663713e-01 1.25876081e+00 -1.86968684e+00 ... -1.23855007e+00 -1.62310290e+00 -1.27498281e+00] [ 1.20635904e-01 -3.55792880e-01 2.10860300e+00 ... -9.77457881e-01 -8.96323323e-01 2.76024193e-01] [-1.43889642e+00 -8.51110145e-02 -1.46055594e-01 ... -1.35368800e+00 1.67320645e+00 -2.26995134e+00] ... [-5.74963748e-01 2.36827254e+00 -7.55069852e-01 ... -6.79058790e-01 8.72789025e-01 -4.66286212e-01] [ 1.82387903e-02 2.33572513e-01 9.69158173e-01 ... 8.01550865e-01 -6.68855131e-01 -3.34507376e-01] [ 4.65906076e-02 -2.23411024e-01 1.44086003e+00 ... -5.41282967e-02 -1.57169491e-01 1.79595244e+00]]] [[[ 3.71019244e-01 3.13195251e-02 -6.04167044e-01 ... 3.02919090e-01 -3.59639712e-02 -2.23979980e-01] [ 1.12031408e-01 3.49786818e-01 -1.97035477e-01 ... 6.21875748e-02 -1.45707643e-02 -5.40695600e-02] [-2.00864345e-01 -4.46670264e-01 -9.30459239e-03 ... -9.96669829e-02 -1.05465576e-01 -1.26658216e-01] ... [ 2.59296536e-01 2.86826968e-01 2.04585195e-01 ... 3.48155946e-01 -9.41358283e-02 -7.72843212e-02] [ 5.39718755e-02 3.98888558e-01 2.67339125e-02 ... -1.92109197e-01 -4.09814864e-01 1.44268259e-01] [-3.25167537e-01 3.03459197e-01 5.02486993e-03 ... 1.27888873e-01 -1.60481319e-01 2.15305924e-01]] [[-2.47544020e-01 1.65210202e-01 -1.26885340e-01 ... -2.20472962e-01 2.49230579e-01 2.64154822e-02] [-1.53870806e-01 4.74986970e-01 -1.49161398e-01 ... -1.26566291e-01 8.54300484e-02 1.87610209e-01] [ 6.16304353e-02 -5.16916029e-02 -8.97140726e-02 ... 3.24950755e-01 -4.57315296e-02 2.26318706e-02] ... [ 4.48328882e-01 -1.35228500e-01 -1.55816361e-01 ... -3.31218302e-01 4.65971828e-02 -5.39633632e-01] [ 9.46081653e-02 3.17789882e-01 3.78538936e-01 ... -4.70146954e-01 -2.12228037e-02 2.92835142e-02] [ 2.06497297e-01 -4.58739847e-01 -4.11189079e-01 ... -4.69726890e-01 -3.77914757e-01 -2.38920271e-01]] [[ 1.72412485e-01 4.67956930e-01 2.19781026e-01 ... 1.71619177e-01 -1.02836126e-02 -2.97681808e-01] [-1.59391746e-01 -2.64184117e-01 3.39807451e-01 ... -3.09744418e-01 -2.38075882e-01 -2.31051266e-01] [ 1.38462976e-01 -1.13690317e-01 3.11535537e-01 ... -1.51099801e-01 3.36263955e-01 2.83059984e-01] ... [-5.22306599e-02 3.07106078e-01 4.63203937e-01 ... -1.01894692e-01 -1.80107102e-01 1.45867854e-01] [-1.14389539e-01 1.93501070e-01 -1.14256524e-01 ... 1.91176720e-02 -4.60138798e-01 2.81225026e-01] [ 3.75410259e-01 -7.78177306e-02 -1.23439789e-01 ... -4.82166171e-01 3.35193068e-01 3.80454123e-01]] ... [[ 2.02477396e-01 -4.51950580e-01 3.35234433e-01 ... 2.38911867e-01 -2.03815088e-01 -2.72140682e-01] [ 2.10471809e-01 -1.07642487e-02 -2.17272609e-01 ... -3.57932985e-01 2.81735003e-01 -1.78315312e-01] [-3.17213722e-02 2.60404833e-02 1.40090869e-03 ... 1.03011644e-02 1.61838368e-01 -2.74486661e-01] ... [-2.21375629e-01 -7.17561916e-02 1.46894500e-01 ... 5.80141604e-01 2.55630136e-01 3.49853009e-01] [ 1.73401773e-01 1.70636073e-01 -1.51123092e-01 ... 5.35329320e-02 3.24892640e-01 8.78294781e-02] [-2.37282336e-01 -1.63789183e-01 2.32262369e-02 ... 2.15478063e-01 -9.59489048e-02 -1.02049094e-02]] [[-2.31201649e-02 2.24993244e-01 1.97344869e-01 ... -1.42516857e-02 1.11623861e-01 -2.15132937e-01] [ 1.29088044e-01 4.32325572e-01 -1.53680697e-01 ... 1.14488445e-01 -1.68947428e-01 4.72500399e-02] [ 2.62614083e-03 3.44767779e-01 -1.11750819e-01 ... 1.01519637e-01 -3.56809586e-01 9.12927389e-02] ... [-3.02795172e-01 -4.62049097e-01 -3.21375936e-01 ... -1.40781611e-01 -6.93934321e-01 2.64872938e-01] [ 1.50780812e-01 -1.99943736e-01 3.47977668e-01 ... 5.05128205e-01 -1.80566892e-01 1.35965869e-01] [ 4.60769564e-01 -2.54459344e-02 -9.90356654e-02 ... -5.74247539e-01 -4.87228304e-01 4.17084485e-01]] [[-7.89229631e-01 1.43592313e-01 2.92246878e-01 ... 3.76849137e-02 3.28829110e-01 -9.03279632e-02] [ 1.37578398e-01 3.70422810e-01 3.50575209e-01 ... 8.54186639e-02 1.13909453e-01 -2.86190421e-03] [ 3.41696054e-01 -7.20324144e-02 -1.91172615e-01 ... 1.90570876e-01 1.32382557e-01 -5.90685010e-01] ... [ 1.61050782e-01 -3.22540969e-01 -1.26032084e-01 ... -4.29525584e-01 -5.51466823e-01 1.82897195e-01] [ 2.33299490e-02 -1.14504464e-01 1.98542207e-01 ... 2.12015659e-01 -1.59213915e-01 -1.41951278e-01] [-3.38015445e-02 1.39650837e-01 2.24721879e-01 ... 2.95058023e-02 2.15583116e-01 -8.43400583e-02]]] [[[ 2.18812097e-03 -1.97133988e-01 -7.63212666e-02 ... 3.24368030e-01 -2.21727267e-01 7.35418677e-01] [-3.60410839e-01 1.33890361e-01 7.04883263e-02 ... -7.36484170e-01 -9.69178677e-02 2.97243953e-01] [-1.11515403e-01 1.90935299e-01 -4.12114263e-02 ... -3.43869209e-01 5.57549655e-01 -4.12240922e-01] ... [ 3.95299941e-02 1.17918760e-01 4.21618551e-01 ... 4.81638134e-01 9.36091840e-02 -8.73163939e-01] [-4.08369213e-01 5.26257120e-02 3.86334866e-01 ... -2.65648663e-02 6.05685003e-02 1.40008479e-01] [-4.43567067e-01 -3.07748109e-01 -8.23843107e-02 ... -4.88373339e-01 3.21880609e-01 6.70783445e-02]] [[ 3.07865620e-01 -3.57122928e-01 1.73083588e-01 ... 5.39527178e-01 7.51354620e-02 -4.88067925e-01] [-2.58046299e-01 -7.43169844e-01 -7.49682665e-01 ... 1.31534338e-01 -4.79974806e-01 3.12697560e-01] [ 4.78385419e-01 7.99650669e-01 -4.32052732e-01 ... -5.32233655e-01 -2.54303932e-01 -3.23351383e-01] ... [-5.09064257e-01 -3.26890409e-01 -4.46764648e-01 ... -4.37218249e-01 7.69833103e-02 3.15120041e-01] [ 3.74113142e-01 5.08893877e-02 1.40176415e-01 ... -2.51004994e-01 -7.03108013e-02 4.49029803e-02] [ 2.69816034e-02 -1.04329892e-01 2.24619791e-01 ... -9.65921860e-03 -7.67254084e-02 6.74255073e-01]] [[-2.53175259e-01 3.55212629e-01 -2.42928028e-01 ... 1.69652194e-01 3.82440627e-01 -4.15955871e-01] [-9.46967229e-02 -6.01080572e-03 3.60640772e-02 ... 5.81060685e-02 -4.45148200e-01 2.31514364e-01] [ 4.43208218e-01 -3.75033081e-01 6.37319982e-02 ... 9.27912444e-02 7.47007728e-01 1.60709903e-01] ... [ 4.40098137e-01 -1.28139898e-01 1.37115523e-01 ... 6.08977377e-01 1.02929413e+00 -3.89966011e-01] [ 9.21896547e-02 2.85716325e-01 3.47988665e-01 ... 3.66304398e-01 3.13693494e-01 -2.00012699e-01] [-2.03069940e-01 -5.05181216e-02 4.77083065e-02 ... -1.18438058e-01 -2.32994065e-01 -5.80381632e-01]] ... [[ 2.16453001e-01 1.53096244e-01 -3.22729975e-01 ... 8.99973273e-01 2.47421801e-01 -5.82765818e-01] [-7.41069019e-01 6.65488124e-01 -5.06574094e-01 ... -4.18735176e-01 -5.22753239e-01 -6.76250935e-01] [-5.61092459e-02 -4.64249700e-01 2.08103791e-01 ... 1.36315122e-01 7.41209686e-02 5.83486080e-01] ... [-1.24096058e-01 -4.36324209e-01 4.42752987e-02 ... 3.61005336e-01 7.28513524e-02 3.56596053e-01] [-3.22229832e-01 -2.02946737e-01 1.75357312e-01 ... 3.59835118e-01 4.96119082e-01 2.00559437e-01] [ 6.25054389e-02 -4.86092836e-01 2.30807602e-01 ... 2.98723549e-01 2.62169480e-01 2.40045547e-01]] [[-3.52395475e-01 -2.93604434e-02 4.89146739e-01 ... -6.55785128e-02 6.16687596e-01 3.54593545e-01] [-6.79485321e-01 6.80692494e-01 -6.00913286e-01 ... -4.75318313e-01 1.67466521e-01 7.16536105e-01] [ 6.38768971e-01 -2.87581265e-01 -7.42988586e-01 ... 4.38351065e-01 2.54640970e-02 -2.15896040e-01] ... [-1.11864138e+00 -5.99340387e-02 2.58862525e-01 ... 6.76072598e-01 2.62882054e-01 -7.70312920e-02] [ 1.40304729e-01 3.96787256e-01 -7.82374591e-02 ... 1.13454185e-01 2.31655434e-01 -4.07082886e-02] [ 2.43371516e-01 -3.70275825e-02 1.78735387e-02 ... 8.68747234e-01 1.02731872e+00 -2.96250790e-01]] [[ 8.57646614e-02 1.15427896e-02 -6.17624879e-01 ... -5.21258891e-01 -6.12144172e-01 8.96737725e-02] [-4.85107675e-02 3.17907691e-01 6.70139134e-01 ... 6.47890389e-01 4.04309809e-01 -2.35348642e-01] [-1.06610602e-03 -1.65446579e-01 -2.75652111e-01 ... 3.71690005e-01 -3.31320047e-01 -7.82379955e-02] ... [ 1.28777936e-01 4.15893912e-01 7.89152011e-02 ... -1.05327554e-01 -8.31251517e-02 1.75005034e-01] [ 1.34026974e-01 -8.91082138e-02 4.39995408e-01 ... 9.22962744e-03 -5.35084188e-01 -7.64770284e-02] [ 2.12777749e-01 9.95327115e-01 -2.14587450e-01 ... -2.87425816e-01 -3.54971141e-01 -2.26029247e-01]]] [[[ 7.40401372e-02 -5.83515540e-02 4.40715434e-04 ... -1.10008707e-02 -1.78928241e-01 9.46410466e-03] [-5.18873334e-02 1.38273820e-01 -8.21443796e-02 ... -5.32459207e-02 -3.11810169e-02 1.01850867e-01] [-2.53653258e-01 -2.81336233e-02 -3.92796546e-02 ... 2.08302792e-02 -9.31087285e-02 7.75419623e-02] ... [ 1.82247639e-01 -1.12633966e-01 -9.56272706e-03 ... -1.94449782e-01 -1.83517203e-01 1.86902642e-01] [-2.09997874e-02 -4.23200242e-02 9.29691568e-02 ... -4.26843353e-02 7.30378628e-02 5.31559363e-02] [-4.66699228e-02 7.14547038e-02 -4.27303761e-02 ... 1.36591896e-01 -9.16976482e-02 6.71697706e-02]] [[ 1.01163529e-01 -6.39140531e-02 -6.40867947e-05 ... -2.42639706e-02 -3.45286308e-03 -8.97318870e-03] [ 1.39787108e-01 1.44021437e-01 1.94659252e-02 ... 1.78962886e-01 1.73540227e-02 -2.79521402e-02] [-2.91080978e-02 2.02078968e-01 6.59756660e-02 ... 4.14544195e-02 1.07965015e-01 -1.24562914e-02] ... [-1.05121627e-01 -1.03730552e-01 -3.28587863e-04 ... -5.01779839e-03 9.78597850e-02 -3.00150495e-02] [ 1.10354051e-01 -4.76966016e-02 -2.23863840e-01 ... -8.86742771e-02 1.60182584e-02 -1.05271503e-01] [ 2.29894459e-01 1.00015868e-02 2.32690573e-01 ... 1.36826813e-01 -1.52790900e-02 9.77903828e-02]] [[-5.63056022e-02 2.75952159e-03 1.03543391e-02 ... -6.60661561e-03 3.87249701e-02 3.53180096e-02] [ 1.05628576e-02 -2.89130479e-01 1.01409510e-01 ... -1.67030290e-01 1.09471694e-01 -1.39446989e-01] [ 1.49800003e-01 1.98576272e-01 2.34020025e-01 ... 7.82671943e-03 -8.80999789e-02 -3.67536873e-01] ... [ 1.75343022e-01 -1.75731868e-01 -1.72189400e-02 ... -2.41831154e-01 3.52583528e-02 1.56817451e-01] [ 2.34859064e-02 -8.02220926e-02 1.48390055e-01 ... -8.38838667e-02 -1.59298569e-01 -3.54151949e-02] [-1.21858053e-01 1.20436467e-01 1.77304208e-01 ... 2.03711346e-01 8.07291344e-02 9.29290354e-02]] ... [[-2.24948496e-01 -1.50080830e-01 -9.96924341e-02 ... -1.59474969e-01 5.94014674e-03 -2.20269095e-02] [ 4.33104411e-02 -1.43029764e-01 1.49143249e-01 ... 3.69219529e-03 -6.22365437e-02 -4.41543460e-02] [ 4.12238166e-02 5.19836657e-02 1.68221459e-01 ... -4.73889485e-02 -6.38569593e-02 9.54467580e-02] ... [ 9.88735929e-02 -7.64879286e-02 -4.42430489e-02 ... 7.19301105e-02 3.77530754e-01 1.03192449e-01] [ 6.69197440e-02 2.86731329e-02 7.73360506e-02 ... 5.40358834e-02 -5.50182648e-02 2.98668146e-02] [ 4.82595749e-02 -2.29248792e-01 1.13869831e-01 ... -2.01899260e-01 1.60585895e-01 1.10801996e-03]] [[-1.09681241e-01 -1.06241658e-01 -2.59861369e-02 ... -1.42756835e-01 1.57231808e-01 1.17727824e-01] [ 1.54399863e-02 -4.68593389e-02 -1.51947793e-02 ... 9.81899444e-03 1.06925242e-01 2.68695056e-01] [-1.64881703e-02 -1.28611460e-01 7.84476250e-02 ... 2.31938958e-01 -1.73103139e-01 1.21069930e-01] ... [ 1.32453024e-01 -1.06683448e-01 -6.94669131e-03 ... 1.55779809e-01 1.30868688e-01 -8.38243216e-02] [-4.86443937e-02 1.13196097e-01 1.03220440e-01 ... 1.85133908e-02 4.58123311e-02 -4.37805466e-02] [-2.28137225e-01 -1.11658350e-01 2.08374023e-01 ... 7.81150535e-02 9.24347341e-02 1.29549816e-01]] [[-8.53369534e-02 2.47081444e-02 -1.99482292e-02 ... -2.21518474e-03 -1.30320862e-01 -2.58671604e-02] [-2.37119809e-01 -1.10333331e-01 -2.36023337e-01 ... 1.63364261e-01 -1.28042743e-01 -8.28570276e-02] [-4.97709028e-02 5.06456681e-02 -1.78015605e-01 ... -1.29057512e-01 9.38389078e-02 1.32590413e-01] ... [-8.69349763e-02 -2.42160875e-02 -6.87064677e-02 ... 2.16697287e-02 1.25395939e-01 6.93672001e-02] [-3.24851610e-02 1.73465997e-01 -1.88246265e-01 ... 9.85193904e-03 2.53861487e-01 1.53056398e-01] [-1.85140446e-02 -6.12506680e-02 1.93271525e-02 ... -8.17402303e-02 1.10088266e-01 7.57613331e-02]]] [[[ 2.90579110e-01 9.73599970e-01 -3.70167270e-02 ... -3.73774678e-01 1.59873456e-01 2.28689373e-01] [-4.69156563e-01 1.04031813e+00 -2.14489877e-01 ... -1.73545137e-01 2.53870994e-01 4.65370506e-01] [ 8.05678666e-01 9.54154015e-01 -7.78519083e-03 ... -1.79656073e-01 -1.77661940e-01 9.15084958e-01] ... [ 8.37211609e-01 5.65399528e-02 -6.17562413e-01 ... -3.17118227e-01 -7.20237613e-01 -1.73988521e+00] [-1.50923327e-01 -7.40478814e-01 8.21216345e-01 ... -5.59242368e-02 -1.64241195e-01 -8.12488496e-02] [ 3.88752595e-02 1.18227616e-01 3.43154639e-01 ... -1.38562346e+00 -4.36589390e-01 9.35804546e-01]] [[ 8.26605618e-01 7.80545890e-01 9.62386727e-02 ... -3.42889071e-01 7.97478259e-01 8.14178348e-01] [-7.24441171e-01 -1.26436234e+00 -1.99704096e-01 ... 6.14371479e-01 -1.62891936e+00 4.40303177e-01] [ 9.95194793e-01 -1.21760942e-01 -9.20304477e-01 ... 4.71966445e-01 3.42429690e-02 3.14789653e-01] ... [-3.27552296e-02 -5.30707359e-01 -6.95016384e-01 ... 8.61134291e-01 -7.01567769e-01 2.10899162e+00] [ 7.68618524e-01 1.39023006e+00 1.07861185e+00 ... 7.62425223e-03 8.83749962e-01 -2.73931235e-01] [-5.14824986e-01 7.65993655e-01 1.15277016e+00 ... 5.38077176e-01 -1.03375649e+00 1.44925684e-01]] [[ 2.37997577e-01 5.61881542e-01 -5.98766804e-01 ... -1.62628019e+00 -2.79721826e-01 -8.55936855e-02] [-9.57896784e-02 -3.77955079e-01 -3.98608446e-01 ... 3.64051372e-01 -2.16928929e-01 1.52475965e+00] [-4.35502887e-01 -8.87714624e-01 -5.54670319e-02 ... -9.74770248e-01 2.80173063e-01 -4.08715189e-01] ... [ 6.55495971e-02 2.03926563e+00 1.07618958e-01 ... 3.52633238e-01 -5.11827648e-01 3.73508960e-01] [ 1.84409785e+00 -4.48999882e-01 -6.82953477e-01 ... -5.53937197e-01 -8.66772234e-01 -1.27015877e+00] [ 2.89788634e-01 7.82300457e-02 -1.09295942e-01 ... 8.27005267e-01 4.25021261e-01 4.26239699e-01]] ... [[-5.50246119e-01 4.85918522e-01 -7.62070000e-01 ... 3.41087759e-01 7.04730034e-01 -5.73176861e-01] [-1.89157903e-01 -6.54572546e-01 3.18684825e-03 ... 4.99097675e-01 -4.97560740e-01 2.84282118e-01] [-1.42218864e+00 -3.20943147e-01 -8.37302744e-01 ... 6.88173711e-01 -5.64568877e-01 6.99986517e-01] ... [-9.82534528e-01 2.79832363e-01 7.10312545e-01 ... 3.57031763e-01 1.07989490e-01 1.03202276e-01] [-1.73848778e-01 -6.74107492e-01 1.31624794e+00 ... -8.90889615e-02 -5.49004614e-01 -3.78546506e-01] [ 5.25129676e-01 3.93972635e-01 5.62049896e-02 ... 1.08338010e+00 -1.13806260e+00 -3.08111697e-01]] [[-5.39224327e-01 4.39529538e-01 -5.70538878e-01 ... -1.44967175e+00 -1.02454281e+00 -1.40781772e+00] [ 1.16220862e-01 2.19225764e+00 1.60477829e+00 ... -1.07873654e+00 7.27575064e-01 1.20887510e-01] [-6.71132445e-01 -6.66450262e-02 1.08788170e-01 ... -1.44061804e-01 -8.45696628e-01 1.11359107e+00] ... [ 1.02002335e+00 8.89856398e-01 -3.82410027e-02 ... 1.05223048e+00 6.85515106e-01 -7.74047524e-02] [-9.71522212e-01 9.63542461e-01 -6.73347056e-01 ... -6.51013851e-02 -1.56935394e-01 -2.27929369e-01] [-1.21720806e-01 -2.55310148e-01 9.67414677e-01 ... 1.67720032e+00 -2.32286572e-01 -1.26410592e+00]] [[ 8.94443542e-02 -1.32709052e-02 -1.32497564e-01 ... -1.21381521e+00 4.00505215e-02 -3.08485270e-01] [-3.53935808e-01 2.36100748e-01 -8.03973824e-02 ... -7.48966396e-01 -5.43239415e-01 -2.91523814e-01] [ 3.58104289e-01 -1.35125127e-02 7.55240083e-01 ... 8.52022350e-01 8.90221715e-01 -5.01306415e-01] ... [ 6.84763432e-01 6.59897447e-01 4.90598887e-01 ... -8.89974758e-02 2.62093902e-01 6.85935020e-01] [ 5.93999565e-01 -8.06228638e-01 7.98507631e-01 ... 4.26392168e-01 1.20953739e-01 -5.68368249e-02] [-1.94611281e-01 -3.66634130e-01 -2.23537236e-01 ... -1.06266332e+00 -1.64854777e+00 1.27989101e+00]]] [[[ 4.78993416e-01 1.65372819e-01 -7.90469274e-02 ... 2.83729807e-02 -4.19399202e-01 -2.43196577e-01] [-3.53118181e-01 3.36009860e-01 4.70973015e-01 ... 5.84165335e-01 -4.77401391e-02 1.23334609e-01] [-2.37223834e-01 -2.17759013e-02 5.36087379e-02 ... 4.65111509e-02 -1.19848819e-02 6.28995150e-02] ... [ 4.46706176e-01 3.00084859e-01 3.92693788e-01 ... 4.44909364e-01 4.10002083e-01 1.56165957e-01] [ 5.09127378e-01 -4.88404751e-01 3.62502217e-01 ... 4.27439243e-01 1.88308522e-01 -5.90846688e-02] [-1.51755325e-02 3.11708838e-01 -7.13598654e-02 ... -1.84357569e-01 -3.14280987e-01 -1.37933999e-01]] [[ 4.91652936e-01 -1.47875786e-01 -1.19239703e-01 ... 5.71602955e-02 -2.67767370e-01 -1.08100943e-01] [ 5.10341041e-02 -2.56733328e-01 4.99239601e-02 ... -2.68792007e-02 -1.33101791e-01 4.50853825e-01] [-1.06892742e-01 -9.47659612e-02 -2.82112658e-01 ... -6.32278204e-01 -1.43772006e-01 6.62532970e-02] ... [-1.36165038e-01 9.35815349e-02 -1.28962040e-01 ... -1.17654137e-01 1.65442646e-01 -1.70453399e-01] [ 3.20319086e-01 3.92855220e-02 -4.44590658e-01 ... 6.13594890e-01 6.28509447e-02 3.50491494e-01] [-3.51118118e-01 -1.13285877e-01 6.25494540e-01 ... -2.07153216e-01 -8.29318240e-02 -2.02894986e-01]] [[ 3.00916493e-01 -4.05679435e-01 2.67486662e-01 ... 3.71737219e-02 -4.51857954e-01 3.87400508e-01] [ 7.95301888e-03 5.04288003e-02 2.63971120e-01 ... -1.43822819e-01 -5.58867514e-01 2.19507724e-01] [ 3.52220982e-01 1.34188980e-02 -3.11914116e-01 ... 1.55024797e-01 -2.32928842e-01 -7.38316253e-02] ... [-2.19413877e-01 -9.02764723e-02 1.65947959e-01 ... -1.29523799e-01 -2.40049232e-02 -6.56706542e-02] [-3.24836910e-01 2.05853105e-01 -2.70731121e-01 ... -6.63246572e-01 -3.80886942e-01 -4.66722213e-02] [-4.71689165e-01 6.43359944e-02 1.04483485e-01 ... -2.31110916e-01 -1.72159508e-01 -3.46867025e-01]] ... [[-3.19691271e-01 -3.40904653e-01 -1.15159981e-01 ... -1.45090878e-01 8.39916885e-01 -3.47393155e-01] [ 1.26151651e-01 -2.09541380e-01 -8.35930467e-01 ... 1.01320751e-01 1.40757278e-01 -4.36862975e-01] [-6.60273880e-02 1.90114975e-01 -4.60928649e-01 ... -2.42371589e-01 -4.05178368e-01 7.25979328e-01] ... [-4.84680563e-01 3.96307707e-02 -1.48016661e-01 ... 1.19234495e-01 5.75168431e-01 5.91064207e-02] [ 1.72780782e-01 -6.29313767e-01 -1.97691932e-01 ... -3.21165204e-01 -3.07344705e-01 -1.59184128e-01] [-2.05166742e-01 9.04708654e-02 -3.52473930e-02 ... 4.77004834e-02 -6.13627136e-01 5.31695671e-02]] [[ 2.23186627e-01 3.42403613e-02 -1.88106686e-01 ... -2.12335512e-01 -1.86197430e-01 2.22425014e-01] [-1.78304926e-01 -2.77637750e-01 -2.91072223e-02 ... 1.51661009e-01 6.06910251e-02 -1.64805084e-01] [-5.87971359e-02 3.53229284e-01 4.85318989e-01 ... 1.32415622e-01 1.70240387e-01 -5.97562827e-02] ... [-6.40123785e-01 2.52009816e-02 4.01457101e-02 ... -2.58918524e-01 -3.30834650e-02 -1.23907454e-01] [ 4.83719647e-01 2.62414187e-01 1.24443457e-01 ... -5.03256440e-01 -5.08617878e-01 -1.38216510e-01] [-7.00712726e-02 1.00014232e-01 2.41514504e-01 ... 2.44559407e-01 -2.02914417e-01 -1.19631626e-02]] [[-3.27633992e-02 1.27265781e-01 1.70519292e-01 ... -2.89180577e-01 3.31621885e-01 -3.63370180e-01] [ 1.48260027e-01 1.60966560e-01 -8.65896791e-02 ... -4.59914833e-01 4.65087831e-01 -1.07202888e-01] [ 3.10168028e-01 -2.17921808e-01 -3.00230116e-01 ... -6.61804020e-01 3.16042095e-01 -1.74167320e-01] ... [ 4.27089334e-01 2.64804155e-01 2.89856810e-02 ... 1.16732851e-01 2.60397822e-01 -1.84112489e-01] [-2.41392896e-01 1.28199056e-01 -2.97677666e-01 ... -6.53297007e-02 5.05244255e-01 -1.34339735e-01] [-6.43842816e-01 -2.33584300e-01 -1.04985796e-01 ... -2.92745173e-01 4.15928006e-01 -1.74709707e-01]]]] [[[[-1.81919634e+00 -1.44053495e+00 5.66544384e-02 ... -3.61629200e+00 -4.50291455e-01 7.88148880e-01] [-1.50243635e-03 7.21010387e-01 5.13850898e-02 ... 4.11418796e-01 -3.10294914e+00 -6.68100893e-01] [ 7.97086477e-01 2.91817576e-01 -8.09295177e-01 ... -6.59146428e-01 6.34669662e-01 -2.39435151e-01] ... [ 1.26859629e+00 -1.64630949e-01 -1.87343729e+00 ... 4.14120592e-03 -1.01049352e+00 -2.42286459e-01] [-1.88032246e+00 -6.37015522e-01 1.21093595e+00 ... -7.95861602e-01 1.68763876e-01 8.51016026e-03] [ 1.11877370e+00 -5.36714494e-01 -2.98584372e-01 ... 1.37459087e+00 -2.67650723e-01 3.96587580e-01]] [[-7.45699883e-01 -2.51394415e+00 2.89871693e-01 ... 1.48129478e-01 1.69570129e-02 -8.98211002e-01] [-1.51535141e+00 -2.46038795e-01 9.51690376e-01 ... 1.88207436e+00 -6.23039722e-01 2.80875707e+00] [ 8.09404925e-02 -8.16502631e-01 -2.61101902e-01 ... -2.38710746e-01 2.06379271e+00 1.33341885e+00] ... [ 1.11142731e+00 1.36116159e+00 -9.41045582e-01 ... 3.61785054e-01 2.27846289e+00 -1.13902040e-01] [ 6.11059844e-01 1.60693541e-01 1.56180775e+00 ... -2.27245784e+00 -1.68596005e+00 6.03220463e-01] [-1.04047775e+00 2.11095288e-01 3.22110653e-01 ... -1.99716222e+00 3.72289270e-01 -1.66816562e-01]] [[-1.04062622e-02 6.65279329e-01 7.01423883e-02 ... -4.80261832e-01 1.03973055e+00 1.75206470e+00] [ 1.24866939e+00 -1.13487625e+00 1.05732703e+00 ... -2.04179570e-01 -3.97278041e-01 -8.08262110e-01] [ 3.41947317e-01 2.60718703e+00 -9.65186954e-01 ... -2.00267982e+00 1.74737048e+00 1.05084562e+00] ... [ 7.83141792e-01 -6.18895054e-01 9.61297870e-01 ... 4.74431776e-02 1.99110246e+00 -5.30280098e-02] [ 3.96751618e+00 -1.96983337e+00 -7.46834517e-01 ... -3.15905631e-01 3.07146931e+00 -1.08363871e-02] [ 6.65545702e-01 2.64542246e+00 1.03805113e+00 ... 1.86383530e-01 2.76962018e+00 -3.55435252e+00]] ... [[ 1.62580228e+00 1.08132534e-01 2.22176147e+00 ... -8.43592823e-01 -3.69268924e-01 2.22908354e+00] [ 4.87359017e-01 -1.01199830e+00 4.56420422e-01 ... 9.02994633e-01 2.59970248e-01 -7.72756875e-01] [-1.97413340e-01 -8.92560124e-01 -8.69331479e-01 ... 1.30902016e+00 -1.88285708e+00 6.65025234e-01] ... [ 4.77174520e-01 -6.15857005e-01 -1.63803160e+00 ... 6.52532518e-01 -3.10979903e-01 -2.63951468e+00] [ 3.31044525e-01 9.25288320e-01 -7.53674507e-01 ... 6.15131795e-01 8.11827838e-01 1.03067219e-01] [-3.06012750e+00 3.77284259e-01 9.74426329e-01 ... -4.97713864e-01 4.34804261e-01 5.99744916e-01]] [[ 2.44212206e-02 6.90636933e-01 9.43579972e-01 ... -2.86772251e-01 2.33512282e+00 -1.32533824e-02] [ 1.47321355e+00 8.62934664e-02 -1.08193672e+00 ... -7.21675396e-01 -1.81265628e+00 2.09267235e+00] [ 7.21304953e-01 5.34706414e-01 1.45706630e+00 ... 9.41325009e-01 2.33220410e+00 -1.67361176e+00] ... [ 1.04375136e+00 -1.51076055e+00 -8.10432196e-01 ... 8.59472513e-01 -4.31632519e+00 3.97110492e-01] [ 6.65390074e-01 2.22850427e-01 -3.48572564e+00 ... -1.43753517e+00 -1.74664751e-01 -7.08855629e-01] [-7.37238467e-01 7.14088738e-01 -3.90714943e-01 ... 9.00429845e-01 -2.27256799e+00 -1.09382534e+00]] [[ 2.76646256e-01 1.72176564e+00 1.25637841e+00 ... 5.69355011e-01 9.07300770e-01 -1.29008782e+00] [-1.17303979e+00 -1.05067909e+00 2.12088132e+00 ... 1.72236061e+00 1.73909843e+00 -3.44358861e-01] [ 1.68408382e+00 -2.89558545e-02 7.90956855e-01 ... -5.49140215e-01 -4.62727398e-02 1.74825773e-01] ... [ 1.05607235e+00 -6.27520263e-01 -4.72907972e+00 ... -2.20112824e+00 -7.46782899e-01 4.12175581e-02] [-1.21703255e+00 -1.14880490e+00 1.30041134e+00 ... 1.30227089e+00 -1.47542620e+00 1.06037331e+00] [-3.17269373e+00 1.54200912e+00 8.63051534e-01 ... 1.80104148e+00 2.12715483e+00 -1.13843131e+00]]] [[[-6.49387017e-02 1.48977369e-01 -9.40439552e-02 ... -9.48031396e-02 1.65695809e-02 -1.25886977e-01] [-2.32765749e-02 -6.01019450e-02 6.92365766e-02 ... 3.45310241e-01 -7.22024962e-02 2.40760595e-01] [-1.37007488e-02 -8.38584229e-02 1.57118946e-01 ... 4.26346451e-01 -1.27638234e-02 -7.55057991e-01] ... [ 7.67916767e-03 -3.16146344e-01 -4.98171188e-02 ... -5.60687661e-01 -4.26719874e-01 8.59482959e-02] [ 2.11310819e-01 1.73316225e-01 3.18542838e-01 ... -9.76268798e-02 3.12434286e-01 -7.07681924e-02] [-2.56537348e-02 1.71754315e-01 4.57100049e-02 ... -2.04079702e-01 3.35774928e-01 2.80823648e-01]] [[-5.59817970e-01 -3.68289024e-01 -2.57214725e-01 ... -2.17934996e-01 -3.29525322e-01 -1.86359107e-01] [ 6.15969636e-02 -3.41142535e-01 8.55904102e-01 ... -2.44184911e-01 -1.64917409e-01 -4.59883101e-02] [ 1.36604697e-01 2.93049421e-02 3.09902191e-01 ... -9.69912857e-02 7.78001726e-01 3.43091786e-01] ... [-2.72717923e-01 -1.98678553e-01 4.98789586e-02 ... -3.18585873e-01 -2.03893885e-01 -2.74173915e-01] [ 1.32584393e-01 1.61803678e-01 3.54626983e-01 ... 4.51831609e-01 -4.41351742e-01 2.10760564e-01] [ 5.44220209e-01 7.10085705e-02 1.64844200e-01 ... -8.22507679e-01 8.37099254e-01 3.13011557e-01]] [[-2.59807203e-02 -6.56086951e-02 -7.75776088e-01 ... -1.86752796e-01 -2.28896648e-01 -1.68480664e-01] [ 3.23277593e-01 -6.12963848e-02 -1.90392599e-01 ... -9.88830775e-02 -4.83238786e-01 2.19956830e-01] [-4.16974247e-01 -3.50979954e-01 3.82074341e-02 ... 9.11853090e-02 -4.27425742e-01 7.02836588e-02] ... [-3.51576865e-01 4.92043406e-01 3.80534679e-02 ... 2.82497525e-01 1.34114102e-01 -2.71881461e-01] [-3.99353534e-01 -4.49465752e-01 -5.47061004e-02 ... 2.37511061e-02 -3.54209095e-01 -1.48305118e-01] [-2.57427990e-01 3.77355516e-02 -2.02067286e-01 ... 2.30550185e-01 -2.38115221e-01 1.47761762e-01]] ... [[ 1.71552181e-01 -2.74253394e-02 -3.96642685e-01 ... -1.75486058e-01 -3.35490555e-01 2.82489240e-01] [-8.57562050e-02 1.97106108e-01 3.29903007e-01 ... 1.51313646e-02 -4.72695023e-01 -2.42272526e-01] [ 1.18066773e-01 -2.27117270e-01 -1.35332122e-01 ... -2.10173741e-01 -3.85708451e-01 2.45100006e-01] ... [-2.55022109e-01 4.53621656e-01 2.20677033e-01 ... -9.17636007e-02 -7.06421956e-02 3.02136093e-01] [-1.01073325e-01 -1.44534200e-01 1.27046645e-01 ... 1.55879945e-01 -3.66762191e-01 -6.06803894e-01] [ 1.26540706e-01 7.07708895e-02 -8.74061137e-03 ... -1.21017456e-01 -2.23428253e-02 -1.50490077e-02]] [[ 2.03789800e-01 1.54447809e-01 -7.22795427e-02 ... 2.65843242e-01 -1.84738431e-02 1.53329134e-01] [ 1.46206766e-01 3.32067192e-01 5.69316223e-02 ... -5.24704039e-01 -1.93047673e-01 2.02038959e-01] [-3.51826936e-01 -2.70289153e-01 -5.83131379e-03 ... 1.40878290e-01 -3.35458189e-01 -1.54111087e-01] ... [ 1.84186116e-01 1.33594930e-01 7.94270560e-02 ... 3.02216381e-01 4.19432849e-01 -4.56815094e-01] [ 1.94265097e-01 1.97722882e-01 3.55886728e-01 ... 3.33458215e-01 2.74197906e-02 -2.55913567e-02] [-3.20270985e-01 -7.03852922e-02 -3.63735795e-01 ... -2.59058058e-01 -1.66802593e-02 -5.30283749e-02]] [[ 2.46784929e-03 1.83519110e-01 -4.01295096e-01 ... 9.53124315e-02 -2.36055538e-01 -1.86909795e-01] [ 1.90462142e-01 -1.24821663e-02 6.67652309e-01 ... -1.06099732e-01 -1.97987348e-01 -2.09355950e-01] [-1.56272668e-02 -2.26115301e-01 3.67523611e-01 ... -4.06264998e-02 -5.46191037e-01 2.95836329e-02] ... [-9.22613963e-02 4.51446533e-01 1.96220949e-01 ... -7.47017562e-02 4.56798077e-02 -6.36755586e-01] [ 1.54116690e-01 -3.46272051e-01 1.83722287e-01 ... -5.32400727e-01 -7.55809573e-03 1.00500800e-01] [ 4.28188592e-02 -1.67960525e-01 -1.37361392e-01 ... -1.31930366e-01 -9.62395146e-02 -2.70327836e-01]]] [[[-1.29974797e-01 4.07276899e-01 4.56885993e-01 ... -6.99045599e-01 1.71298862e-01 -2.01891690e-01] [-7.21651435e-01 -3.87130558e-01 7.63455629e-01 ... -1.17091350e-01 9.32033241e-01 2.87220627e-01] [ 5.08769043e-02 -4.37650025e-01 8.73326808e-02 ... -3.49683762e-01 -6.36454225e-01 -9.58512485e-01] ... [ 8.37433636e-02 2.88385600e-01 2.64055848e-01 ... -5.69887042e-01 5.11858284e-01 4.22154784e-01] [ 3.30484807e-01 5.55067882e-02 -3.38123739e-01 ... 1.69161320e-01 -5.87657392e-01 1.29288286e-01] [ 5.76343834e-01 8.53534281e-01 4.63766493e-02 ... -3.63995790e-01 4.13767576e-01 1.90404817e-01]] [[ 1.99995842e-02 6.55669510e-01 -6.01900935e-01 ... -2.25553185e-01 3.79319489e-01 -3.34428668e-01] [-3.73718627e-02 -7.69934580e-02 -5.69691718e-01 ... 3.83790702e-01 5.49529046e-02 -2.17206255e-01] [-3.72542739e-01 -2.18222052e-01 -6.34090006e-01 ... -2.91483492e-01 1.00393486e+00 -5.65740049e-01] ... [ 2.95534376e-02 -1.34685021e-02 1.50866672e-01 ... -2.29138974e-02 3.59527230e-01 -3.05972219e-01] [-5.27322032e-02 -3.78803685e-02 4.13170844e-01 ... 1.57113783e-02 4.32913065e-01 1.50337964e-01] [ 4.30809706e-01 3.91376317e-01 -1.48121268e-01 ... 3.03287983e-01 -4.48632628e-01 1.33886233e-01]] [[-3.06118757e-01 9.38437432e-02 -6.54295921e-01 ... 3.01098108e-01 -2.28053495e-01 4.37749535e-01] [-1.96558252e-01 3.92060220e-01 2.97115862e-01 ... 3.23079675e-01 2.17706814e-01 -3.31959188e-01] [ 2.97585577e-01 4.98909980e-01 -1.38350978e-01 ... 5.06168842e-01 -1.14083067e-01 7.40492463e-01] ... [-4.19979602e-01 3.99672948e-02 2.79900521e-01 ... -6.38664234e-04 -5.05515516e-01 -1.82824031e-01] [ 2.10131422e-01 -1.54157951e-01 4.71497387e-01 ... -6.49438560e-01 1.81051698e-02 -1.96221545e-01] [ 6.35192513e-01 2.36959025e-01 2.42472395e-01 ... -2.04096079e-01 -2.58090198e-01 -2.68800497e-01]] ... [[ 4.46719944e-01 1.68027163e-01 6.06293321e-01 ... -2.91507751e-01 2.08818093e-01 3.19436640e-01] [ 3.68722618e-01 3.42284381e-01 2.77563840e-01 ... -2.88239494e-02 -6.73341036e-01 5.85033298e-01] [-6.66642040e-02 -1.15524381e-01 4.61398661e-01 ... -2.46149614e-01 1.59987956e-01 1.91681966e-01] ... [ 7.10130064e-03 2.92251915e-01 -5.59673496e-02 ... 3.50857347e-01 5.61698914e-01 6.11643434e-01] [ 3.80415499e-01 -2.23328769e-01 1.52021065e-01 ... 1.86318412e-01 5.74335515e-01 5.50685897e-02] [-6.36783540e-02 2.84668177e-01 2.36351609e-01 ... 2.66087413e-01 -5.44775724e-01 -1.01650462e-01]] [[ 1.58274788e-02 2.07870752e-01 -6.85591996e-02 ... -8.00365567e-01 3.70968252e-01 -4.62019235e-01] [ 5.01590669e-02 3.05965066e-01 6.26275122e-01 ... -2.67849237e-01 -1.73551351e-01 -2.15259492e-01] [ 2.16208935e-01 -1.94279388e-01 -8.26943040e-01 ... 3.25348228e-01 5.65512031e-02 -3.40048254e-01] ... [ 6.13602042e-01 2.11631492e-01 -3.65614861e-01 ... -3.66931528e-01 -7.69790262e-02 2.24884436e-01] [-2.42322817e-01 -1.84921235e-01 -2.05870211e-01 ... 3.31086099e-01 2.07874715e-01 -3.79112102e-02] [ 3.47626925e-01 4.69655871e-01 -3.90216142e-01 ... -6.17705524e-01 1.06732988e+00 -6.11926354e-02]] [[-6.10166453e-02 -2.08324827e-02 1.00112414e+00 ... -6.36762977e-02 4.80547905e-01 6.52720571e-01] [-1.02241829e-01 -1.26019930e-02 -1.18121495e-02 ... -8.16047788e-01 -2.41526142e-01 3.71051937e-01] [-1.35269195e-01 6.51767030e-02 6.14972234e-01 ... -1.39086023e-02 -4.17245030e-01 -4.33636695e-01] ... [-6.04471564e-03 2.16408402e-01 4.66624349e-01 ... -2.72294015e-01 -1.08444542e-01 -8.02586526e-02] [-1.19634368e-01 -1.63853332e-01 -4.04450268e-01 ... 5.25501594e-02 -8.27492714e-01 1.08442700e+00] [ 1.11030445e-01 8.42887387e-02 -3.24005485e-01 ... -7.76914537e-01 -2.69043982e-01 -4.27698106e-01]]] [[[-9.65343043e-02 -1.77043080e-01 -2.36437872e-01 ... 3.92903760e-02 -2.50105336e-02 -3.19704145e-01] [ 1.09556522e-02 6.75803721e-02 2.67497469e-02 ... -1.52875483e-01 2.95510590e-02 1.87929310e-02] [ 1.75591797e-01 -9.24331173e-02 2.03452725e-02 ... -6.04558513e-02 5.40175177e-02 2.39219014e-02] ... [ 3.81018035e-04 -2.19216794e-01 1.52119130e-01 ... 1.14829183e-01 1.86606452e-01 5.93234645e-03] [-4.36138362e-02 2.76929494e-02 2.24365279e-01 ... 6.13878332e-02 -1.31234393e-01 -4.00469080e-02] [-6.56072348e-02 -4.42824699e-02 5.45240715e-02 ... 3.92050669e-02 -1.64818063e-01 1.47690758e-01]] [[-4.14733030e-02 -9.59478784e-03 -1.09969467e-01 ... 2.45856997e-02 -6.74388036e-02 -6.51928261e-02] [-1.76661447e-01 -1.63356066e-01 1.52834207e-01 ... -1.34740341e-02 7.19675794e-02 1.40681103e-01] [-9.67328027e-02 2.11031526e-01 -4.39711884e-02 ... 1.93839818e-01 -1.21957492e-02 1.43287390e-01] ... [-1.65586509e-02 -4.20917757e-03 -1.08336166e-01 ... 6.92718402e-02 -4.35485318e-02 -2.09255040e-01] [ 2.87917219e-02 -1.33578941e-01 -1.34463813e-02 ... -4.87006120e-02 4.41058632e-03 -1.86928604e-02] [ 6.54872432e-02 -3.80645283e-02 -1.36757791e-02 ... -2.11232811e-01 -7.72031173e-02 6.85566338e-03]] [[-2.34491061e-02 -3.20313056e-03 -1.50023922e-01 ... -1.08594880e-01 -1.69269755e-01 -7.70883113e-02] [-9.27880555e-02 -8.72923508e-02 -1.50921252e-02 ... -6.64504319e-02 -3.76827605e-02 -4.04909663e-02] [ 2.58431047e-01 1.83837220e-01 -2.88280882e-02 ... -9.09286588e-02 -2.31901377e-01 7.49349445e-02] ... [-1.98777858e-02 1.80115569e-02 1.69990316e-01 ... -6.48767352e-02 8.55238065e-02 -4.81104590e-02] [ 1.90219581e-01 2.11226374e-01 -1.49202589e-02 ... -7.41943792e-02 -3.04554380e-03 -1.75263330e-01] [ 2.98823640e-02 -1.77876040e-01 7.10767061e-02 ... -6.18186221e-02 -8.86226296e-02 -1.70817927e-01]] ... [[ 3.43721844e-02 9.44230333e-02 5.60548827e-02 ... -1.70558944e-01 -9.39923152e-02 1.14937514e-01] [ 6.33325502e-02 8.58639739e-03 9.32624936e-02 ... 1.40449643e-01 9.63653475e-02 1.15732685e-01] [ 7.49145597e-02 8.08364153e-02 -4.03018035e-02 ... -1.14265636e-01 -1.64069235e-01 1.64380163e-01] ... [ 4.61870842e-02 -6.00868799e-02 1.14882208e-01 ... 3.77078727e-02 2.81649053e-01 7.22828205e-04] [-1.03794321e-01 -2.16335624e-01 2.03858927e-01 ... 1.63265457e-03 7.19985664e-02 -1.38690814e-01] [ 1.56062409e-01 -1.02560148e-01 9.45864916e-02 ... -5.19635752e-02 1.11465724e-02 2.62575865e-01]] [[ 8.01007673e-02 -7.89054856e-03 -9.05985236e-02 ... 1.98557049e-01 2.25428149e-01 2.02938929e-01] [-1.21004228e-02 -6.05206285e-03 -3.07623502e-02 ... -1.97737161e-02 1.30834311e-01 1.11871630e-01] [ 1.16791511e-02 -5.57849705e-02 -1.10254303e-01 ... 4.44766507e-02 -1.40268877e-01 -3.05028912e-02] ... [ 1.05567142e-01 3.88899781e-02 1.47142723e-01 ... 5.18472977e-02 7.16376752e-02 -4.26395424e-02] [ 5.13355248e-02 -4.53722989e-03 -7.72488490e-02 ... -5.29759079e-02 -3.35900560e-02 -1.05958618e-01] [ 3.30915712e-02 -1.76103160e-01 1.68329686e-01 ... -1.71973601e-01 1.99102461e-01 8.76031071e-02]] [[ 4.00683470e-02 3.46558452e-01 1.11018382e-01 ... -4.02751490e-02 1.07635051e-01 -1.62508175e-01] [ 4.17742170e-02 5.50874285e-02 1.43745705e-01 ... -7.36324638e-02 -2.94539601e-01 6.79326653e-02] [-1.18117236e-01 -3.45333099e-01 -1.17747866e-01 ... -2.18751617e-02 -4.55864705e-02 6.48064390e-02] ... [ 1.46020934e-01 -7.49992533e-03 1.92758098e-01 ... -1.43308472e-02 1.08205304e-02 3.28496806e-02] [-1.21210337e-01 9.63435099e-02 -8.35144818e-02 ... -1.74558505e-01 2.74599325e-02 -1.28423959e-01] [-3.85110453e-02 -5.06350957e-02 4.64116782e-02 ... 4.22064029e-02 7.40693584e-02 -1.30335393e-03]]] [[[-1.58229217e-01 3.46952915e-01 -2.77511626e-01 ... -7.42186904e-01 1.05543673e-01 2.64701158e-01] [-9.44109380e-01 1.21583670e-01 -5.07244408e-01 ... 1.21322441e+00 7.89847791e-01 1.08837140e+00] [ 7.24243045e-01 4.92691427e-01 -3.40727955e-01 ... -2.77475446e-01 -5.96645117e-01 -8.72842610e-01] ... [ 3.61452579e-01 4.32526231e-01 -1.33583701e+00 ... 4.96564023e-02 3.65894288e-01 3.60490941e-02] [ 9.97635052e-02 1.11618340e+00 1.91956177e-01 ... -2.87889183e-01 3.30667704e-01 5.36883235e-01] [ 4.37244177e-01 -3.50519896e-01 1.10248506e+00 ... -8.76667202e-01 -8.99868906e-01 -6.79661512e-01]] [[-1.11645699e+00 5.55832744e-01 -2.50760256e-03 ... -6.00603104e-01 -1.81173496e-02 -2.47467145e-01] [ 9.48666036e-01 -5.03265783e-02 2.21209377e-01 ... -5.90975821e-01 -5.62086642e-01 6.98157251e-01] [ 2.04920530e-01 -1.05519307e+00 -2.78004497e-01 ... -1.76011407e+00 -4.33058858e-01 2.06455141e-01] ... [ 3.58240128e-01 -1.47579342e-01 5.78121364e-01 ... -5.27487993e-01 2.50270963e-01 3.46522212e-01] [ 1.91748843e-01 5.71024597e-01 -2.58942842e-01 ... 2.62882650e-01 -2.15693235e-01 3.99264812e-01] [ 5.22706993e-02 -4.29866463e-03 -6.36460364e-01 ... -5.39617240e-01 -5.15267968e-01 -5.75986981e-01]] [[-4.65262175e-01 5.83479941e-01 -6.69611216e-01 ... 5.94357431e-01 6.57637566e-02 3.90836060e-01] [-7.15238154e-02 1.23230386e+00 1.02294102e-01 ... 3.96635741e-01 -3.16906184e-01 -1.08940259e-01] [ 1.14285064e+00 8.44961047e-01 -3.47542137e-01 ... 3.62522393e-01 -3.61764997e-01 1.46906734e-01] ... [ 3.98443639e-02 6.82629466e-01 1.26137042e+00 ... 3.08275640e-01 -4.37733829e-01 3.97313386e-01] [ 6.33025110e-01 -8.30637515e-01 -1.80109479e-02 ... -5.31890512e-01 -1.83897112e-02 -1.35438055e-01] [ 7.30813593e-02 -9.35283527e-02 1.03806138e+00 ... -2.32516363e-01 2.83938885e-01 1.38120241e-02]] ... [[ 3.92836183e-01 -6.79755151e-01 4.27238755e-02 ... 3.88077064e-03 3.35901886e-01 8.43272388e-01] [ 7.67300010e-01 1.52656245e+00 -4.32536632e-01 ... 2.11878687e-01 8.95761311e-01 9.14610386e-01] [ 6.58229768e-01 3.19045335e-01 6.06120169e-01 ... -6.96838498e-01 -1.52030742e+00 5.49748003e-01] ... [ 4.66400623e-01 3.82697433e-02 3.92895699e-01 ... 1.32351547e-01 6.06333256e-01 2.86386639e-01] [-6.09155260e-02 -6.92404985e-01 -2.52350688e-01 ... 6.14223909e-03 7.29800642e-01 -1.36538994e+00] [ 4.74276215e-01 -1.03452079e-01 -8.64567399e-01 ... -5.09979188e-01 -8.71419162e-02 -1.04809608e-02]] [[-8.30922544e-01 -6.32586718e-01 -6.15695119e-01 ... -1.25671586e-03 1.36416399e+00 1.06662560e+00] [-4.39661145e-02 -2.92232126e-01 -3.48190337e-01 ... 1.39246322e-02 2.02294469e-01 3.58479023e-01] [-2.23881051e-01 1.14201105e+00 -1.55044883e-01 ... 3.86197418e-01 6.20736837e-01 -1.67006600e+00] ... [ 4.88305718e-01 -1.73500866e-01 6.65930986e-01 ... -9.38698590e-01 -2.21865818e-01 -1.25179766e-03] [-3.28872681e-01 4.87547427e-01 -3.17677706e-02 ... 1.22290456e+00 3.04969490e-01 -6.84476018e-01] [-2.15373300e-02 -1.27903655e-01 1.64603972e+00 ... -9.92589235e-01 3.15783858e-01 -4.05033857e-01]] [[ 1.80363491e-01 3.23790312e-01 2.79823035e-01 ... 4.36333776e-01 1.35174215e+00 5.04232273e-02] [ 8.24694157e-01 -3.33103806e-01 1.70589060e-01 ... -8.16783309e-01 -4.80059795e-02 3.88776302e-01] [-4.65104431e-01 -3.17740619e-01 -5.97435594e-01 ... -2.66638994e-01 9.13629159e-02 1.11740902e-01] ... [-1.03683436e+00 4.14628297e-01 -6.01467071e-03 ... 4.07871842e-01 4.04034197e-01 -2.32334241e-01] [-1.78179204e-01 -1.32236445e+00 -5.99079728e-01 ... 2.22164616e-01 6.06922269e-01 -2.01647782e+00] [-1.83021009e-01 1.00116050e-02 -4.63838965e-01 ... -3.86808783e-01 -2.69218355e-01 1.05434053e-01]]] [[[-2.09558815e-01 4.22687829e-01 -1.46955609e-01 ... 1.86960116e-01 -2.91399181e-01 -1.11798756e-01] [-3.91944200e-01 -6.35380685e-01 -1.64929748e-01 ... 8.19184780e-02 2.55243868e-01 -5.27481318e-01] [ 3.20896834e-01 -8.05046618e-01 -4.87900347e-01 ... 5.96569002e-01 2.30166852e-01 1.88089550e-01] ... [ 1.30685061e-01 1.52911082e-01 4.30608779e-01 ... -2.48982683e-01 -2.48987991e-02 4.10192043e-01] [-9.20484513e-02 5.71738243e-01 -1.04620248e-01 ... -1.71898693e-01 -5.21284521e-01 3.37007105e-01] [ 4.98260334e-02 5.62985986e-02 2.75662720e-01 ... 3.44234854e-02 -2.93885320e-01 -5.37471831e-01]] [[-2.88535833e-01 9.07959566e-02 -2.94159830e-01 ... 2.23237664e-01 -7.40859449e-01 2.62733042e-01] [-2.20988080e-01 4.04481925e-02 -1.26095116e-01 ... 3.35308760e-01 1.87004551e-01 1.19518097e-02] [-6.38685375e-02 -6.10363722e-01 2.20340252e-01 ... 6.74677407e-03 -4.15510833e-02 -3.74447078e-01] ... [ 5.15012303e-03 -1.32204086e-01 3.65400389e-02 ... 1.20178334e-01 -8.34550261e-01 -8.87792408e-02] [ 5.62740207e-01 -3.11895341e-01 -4.35325772e-01 ... -6.32520318e-01 4.86438014e-02 1.77112684e-01] [-4.74879771e-01 -1.53604835e-01 -3.37817967e-01 ... 3.14054877e-01 1.49324536e-01 7.85691068e-02]] [[ 5.30625820e-01 -6.88434020e-02 4.86910075e-01 ... -4.32235748e-01 -1.10676847e-01 -3.24019454e-02] [-3.94782387e-02 4.73471195e-01 -8.61925632e-02 ... -4.67380024e-02 -7.70075500e-01 7.93004259e-02] [ 6.06741011e-02 8.13458264e-02 2.21031815e-01 ... 1.45079732e-01 -3.17773074e-01 1.76740199e-01] ... [-2.58965820e-01 -1.40860662e-01 4.62105125e-01 ... -1.48026973e-01 2.14137957e-01 5.37225246e-01] [ 2.15288755e-02 8.76449406e-01 4.97773625e-02 ... -4.19539571e-01 9.86078009e-02 3.70845318e-01] [ 6.41075730e-01 -4.48934697e-02 4.75015968e-01 ... 2.19996907e-02 8.77427496e-03 3.07323158e-01]] ... [[-1.01651452e-01 3.01282465e-01 -6.74877524e-01 ... 2.65347749e-01 -5.15190899e-01 9.94588509e-02] [ 1.12175055e-01 -1.09260874e-02 -1.24685585e-01 ... 1.61366209e-01 6.17919043e-02 3.22119832e-01] [ 9.99610275e-02 -6.07825071e-02 -1.30242839e-01 ... 9.45198834e-01 -6.79396987e-02 -3.04966271e-01] ... [-1.84809193e-02 4.80042458e-01 -2.32867554e-01 ... -5.56300879e-01 2.02384572e-02 2.75641046e-02] [-8.91903862e-02 1.47095546e-01 2.64128745e-02 ... -7.93294981e-02 -4.67095912e-01 -1.78478971e-01] [ 5.49282804e-02 -3.49151403e-01 -4.16636080e-01 ... 6.50649667e-01 -3.85507762e-01 1.24155611e-01]] [[-5.17916642e-02 -2.73439825e-01 5.48763610e-02 ... 3.16466689e-02 1.38640450e-02 -4.97421145e-01] [ 5.38469911e-01 6.67798072e-02 -3.49274307e-01 ... -2.63007015e-01 -1.25246242e-01 2.83277705e-02] [ 2.59046286e-01 -1.01588547e-01 9.93688107e-02 ... 4.00338657e-02 -1.71168998e-01 -3.56697798e-01] ... [-1.00974403e-02 -2.16158077e-01 3.75108331e-01 ... 4.22865093e-01 8.49185586e-02 -7.75454789e-02] [ 3.08798328e-02 -2.14557990e-01 1.21903948e-01 ... -4.87562597e-01 2.31870487e-01 3.32113951e-02] [ 8.66557509e-02 5.29144630e-02 -2.75975000e-02 ... 2.30812624e-01 1.67393222e-01 5.19415513e-02]] [[ 1.52583167e-01 1.05505988e-01 -3.03298282e-03 ... 1.69966489e-01 4.76083122e-02 -6.02195375e-02] [-5.67540109e-01 -4.14671265e-02 3.82306948e-02 ... 8.63571912e-02 3.31584930e-01 -6.79804265e-01] [-6.81709886e-01 -7.85518646e-01 -6.07292615e-02 ... -1.90178111e-01 3.35140795e-01 -1.54572189e-01] ... [-3.65497125e-03 2.09592640e-01 9.52492729e-02 ... 2.57814117e-02 2.46271975e-05 1.46011382e-01] [ 2.01586992e-01 4.69350696e-01 2.40631223e-01 ... 2.38635972e-01 -2.68481165e-01 -2.53360868e-01] [-3.13351184e-01 3.81059423e-02 -2.74132371e-01 ... -6.16743267e-01 9.31342483e-01 -3.13000709e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 3, 'weights_shape': (6,)} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4622.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : int = prim::Constant[value=2]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:101 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.7226 1.7217 -0.6393 1.4406 -2.4557 -0.3266 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=3]() %12 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %5, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) fw_re: [[[[[-1.77626759e-01 -2.38291904e-01 -4.04513359e-01 ... 1.33515656e+00 3.01980734e-01 1.55284262e+00] [ 1.73995471e+00 4.75361317e-01 -1.70867398e-01 ... 7.75200009e-01 1.18665528e+00 8.80015016e-01] [ 8.74952376e-01 6.13911934e-02 -3.63905549e-01 ... -1.24092400e+00 1.08488358e-03 8.97237182e-01] ... [ 1.34902254e-01 -3.64241928e-01 -3.18384111e-01 ... -4.00321037e-01 -1.17650545e+00 2.13076800e-01] [ 1.08378023e-01 -2.11572230e-01 -4.17978734e-01 ... -1.27897352e-01 6.39782310e-01 -1.17731619e+00] [ 7.99322128e-01 -3.97198617e-01 -2.47386307e-01 ... 6.66625917e-01 -9.05413479e-02 -6.11880243e-01]] [[-2.77148396e-01 -1.53312415e-01 -8.31967354e-01 ... -1.00920939e+00 8.68662000e-01 7.18292773e-01] [-5.26018620e-01 -4.55071330e-01 -3.22617471e-01 ... -7.03930855e-01 6.99735880e-01 1.54503620e+00] [ 9.66768444e-01 2.11237341e-01 -1.57616806e+00 ... 5.01809657e-01 9.31377560e-02 -7.68855810e-01] ... [ 2.47643441e-01 -6.22014515e-02 -3.41518700e-01 ... 1.05578876e+00 7.71184146e-01 3.39081213e-02] [-1.29966962e+00 8.38049129e-02 2.70989984e-01 ... -8.92200395e-02 -3.32788408e-01 -7.81853318e-01] [-5.37465274e-01 1.71706164e+00 -8.59103084e-01 ... -1.15005493e-01 -4.51574773e-01 5.46616495e-01]] [[-3.84570569e-01 -2.73523837e-01 -1.18334219e-01 ... -9.13721025e-01 -9.08978283e-02 -7.24937320e-01] [-7.31913269e-01 -4.18869294e-02 8.14081073e-01 ... 4.68520969e-01 6.90089226e-01 1.57460228e-01] [-1.73146033e+00 7.38827705e-01 -7.52070397e-02 ... 4.51179892e-01 6.49814963e-01 -7.81925559e-01] ... [ 1.83759466e-01 3.50946695e-01 3.10657561e-01 ... 1.53557420e-01 -1.15173149e+00 -2.53989339e-01] [ 2.70434260e-01 1.52349129e-01 8.97461772e-01 ... 2.17212245e-01 4.57585067e-01 8.68731588e-02] [ 6.85305655e-01 -8.17391157e-01 -6.54970586e-01 ... 8.12999785e-01 1.09878317e-01 2.61236399e-01]] ... [[ 7.36920774e-01 -1.00300646e+00 8.74584615e-02 ... 6.74911082e-01 6.69222325e-02 -1.18136322e+00] [ 2.21742243e-02 -1.99957833e-01 -7.17774868e-01 ... 4.85686094e-01 3.22525680e-01 -2.96172109e-02] [-2.25184500e-01 -1.90582469e-01 -4.14191812e-01 ... -8.27682912e-01 -8.21743608e-01 1.10679638e+00] ... [-7.06859767e-01 1.18749306e-01 -1.28661919e+00 ... 2.51692921e-01 -1.36545092e-01 -8.30406129e-01] [-8.72983813e-01 -6.51720285e-01 -1.57264426e-01 ... 4.45835352e-01 1.66334718e-01 9.03882742e-01] [ 1.18412244e+00 -5.57734728e-01 4.49071787e-02 ... 2.67673612e-01 1.60143584e-01 8.93787861e-01]] [[ 5.61559796e-01 1.76792055e-01 -2.00426683e-01 ... -3.20070326e-01 -6.09070539e-01 7.71019042e-01] [ 1.57046109e-01 9.27283689e-02 1.40985441e+00 ... 1.16920483e+00 -4.69844788e-01 1.01376820e+00] [-5.39118648e-01 1.74979794e+00 2.51591295e-01 ... -3.49329978e-01 -8.13848138e-01 -5.37557960e-01] ... [ 1.36828947e+00 1.92153692e-01 8.69816780e-01 ... -5.07263124e-01 4.07609344e-01 3.27758521e-01] [ 6.15082495e-02 -1.46048358e-02 1.11134434e+00 ... -5.18067837e-01 3.43923032e-01 7.05098629e-01] [ 2.32855175e-02 -9.15301561e-01 6.80295408e-01 ... -2.03500003e-01 1.81357312e+00 -1.68680757e-01]] [[ 4.32449520e-01 -3.42859894e-01 -6.50927961e-01 ... 3.11036915e-01 5.80186307e-01 -1.47369653e-01] [-6.26322865e-01 2.75089681e-01 -1.16424870e+00 ... -6.63571581e-02 -8.06478560e-01 8.12969029e-01] [-6.12623870e-01 3.98162037e-01 1.63829207e-01 ... -6.34607196e-01 -2.82684952e-01 2.54407704e-01] ... [ 7.70655930e-01 2.37091288e-01 -3.26610714e-01 ... -3.10643941e-01 4.98516828e-01 1.69497383e+00] [ 1.32780805e-01 -4.24256265e-01 5.25406778e-01 ... 1.40781462e-01 2.04546899e-01 1.35302916e-01] [-6.70364261e-01 1.09086680e+00 1.80855244e-02 ... -5.77592790e-01 4.85321224e-01 -4.64980245e-01]]] [[[-9.06656504e-01 -8.22658002e-01 1.70180276e-02 ... 2.56275255e-02 -1.99235356e+00 -8.31911147e-01] [ 2.91289330e+00 9.60198417e-02 6.48928225e-01 ... -2.24121422e-01 -4.55298811e-01 -1.57521045e+00] [-3.03768301e+00 3.65648913e+00 6.25887036e-01 ... 5.97677827e-01 1.06242120e+00 1.31521031e-01] ... [-3.81855488e-01 9.54791903e-01 -2.40382600e+00 ... 3.03543949e+00 -2.54268003e+00 -2.27249789e+00] [ 1.37113297e+00 -2.48832035e+00 1.95387328e+00 ... 1.15264153e+00 -1.73288119e+00 1.22544602e-01] [-1.68202448e+00 6.67668134e-02 2.10114765e+00 ... 4.70743227e+00 -2.31368855e-01 -5.71206033e-01]] [[ 4.90612239e-01 4.18514639e-01 8.83678079e-01 ... 1.17748022e-01 -5.25788188e-01 -6.92525581e-02] [-1.58658850e+00 3.83625597e-01 -1.50076747e+00 ... -1.95185685e+00 -1.00239837e+00 4.61030267e-02] [ 1.98124337e+00 1.94315422e+00 1.14890766e+00 ... 3.72288190e-02 4.02800703e+00 5.24644196e-01] ... [ 1.01359689e+00 4.38636988e-02 1.78086913e+00 ... -9.33351696e-01 1.29048908e+00 1.44406319e+00] [-1.79276800e+00 6.20944858e-01 -4.56220359e-02 ... -1.04197371e+00 -2.41186881e+00 1.81844253e-02] [ 1.98935187e+00 2.35783172e+00 -1.43439710e+00 ... -2.22510725e-01 5.84114611e-01 1.09749985e+00]] [[ 3.79472280e+00 -3.01497269e+00 -5.44570573e-02 ... -1.00470185e+00 2.17979103e-01 2.62547779e+00] [ 4.82939005e-01 -4.51569349e-01 -3.56275558e-01 ... -1.04856753e+00 -7.91194737e-01 -1.16876155e-01] [ 4.69964325e-01 -6.86144829e-01 3.06370521e+00 ... 1.26476121e+00 1.26680267e+00 -6.84955299e-01] ... [-1.30538201e+00 1.56972349e+00 1.45405680e-01 ... -8.66421461e-01 2.44252324e+00 -1.08926904e+00] [-4.56519461e+00 -1.55266225e-01 9.16755974e-01 ... 1.34184167e-01 -2.05197310e+00 -1.39693201e+00] [-8.27920251e-03 -1.81450880e+00 -1.90033948e+00 ... 2.88102508e-01 -1.28612959e+00 8.54435265e-02]] ... [[-1.50312388e+00 -6.97721601e-01 -1.34265232e+00 ... -2.16132832e+00 4.28057146e+00 2.88489312e-01] [-1.28872716e+00 9.84571099e-01 8.17447603e-01 ... 8.35028887e-02 -1.24535978e+00 9.76875842e-01] [-2.51709414e+00 -2.26903343e+00 -3.30233693e-01 ... 1.24290371e+00 -1.62191606e+00 -2.91197157e+00] ... [-1.24177158e+00 1.22492456e+00 -1.48632383e+00 ... 2.38006502e-01 6.57681048e-01 -8.09704602e-01] [-1.47636962e+00 -9.25818443e-01 2.12147713e+00 ... -1.15874541e+00 2.87555480e+00 -1.04331255e-01] [ 6.67994320e-01 -2.58183193e+00 8.51544857e-01 ... -4.52245802e-01 1.47346747e+00 1.78656232e+00]] [[ 1.50410867e+00 2.47748569e-01 -3.63711548e+00 ... -1.44067943e+00 7.33523488e-01 2.50641537e+00] [-1.27068952e-01 -3.54120350e+00 -2.32926273e+00 ... -1.45007288e+00 1.40527058e+00 1.40094960e+00] [ 3.61225462e+00 5.76993763e-01 1.19597919e-01 ... 1.77054539e-01 -3.74277115e-01 -1.18078983e+00] ... [ 1.74987093e-01 -6.48338974e-01 -6.14835262e-01 ... -1.58089924e+00 4.99794155e-01 -6.00557983e-01] [-6.34497106e-01 2.18034291e+00 -3.81881893e-01 ... 1.61352074e+00 -9.88226414e-01 2.32786383e-03] [ 2.40672088e+00 -2.76416349e+00 2.25184870e+00 ... 2.28382707e+00 -3.30045015e-01 -4.56910276e+00]] [[-6.24848306e-01 -1.25117505e+00 -4.02999610e-01 ... -3.45459789e-01 -1.48115802e+00 -9.66167867e-01] [ 4.49620038e-01 -1.99212635e+00 -2.18727255e+00 ... -4.35595274e-01 2.14171791e+00 4.34719086e+00] [-4.93089020e-01 2.65187550e+00 -3.32565522e+00 ... -3.02415419e+00 -6.71195924e-01 -8.91695142e-01] ... [ 2.84997106e-01 1.60657430e+00 1.47610772e+00 ... -9.37779129e-01 4.28629965e-01 2.68337756e-01] [ 1.14955902e+00 2.12231874e+00 1.82521653e+00 ... 1.30495071e+00 1.38120198e+00 3.60864103e-01] [-1.31661654e+00 2.29158926e+00 3.96462440e+00 ... -3.00291538e+00 1.96733046e+00 1.46506226e+00]]] [[[-9.72130179e-01 -4.66009453e-02 -5.63814104e-01 ... 1.72447950e-01 -2.99488693e-01 5.06914198e-01] [ 1.69688269e-01 -8.47109973e-01 1.04007936e+00 ... -4.57777709e-01 -5.78668714e-01 -7.25681126e-01] [-8.49641144e-01 4.65480626e-01 -3.30160707e-01 ... 1.26562798e+00 -3.54904443e-01 -5.45955956e-01] ... [-8.68996918e-01 1.87594146e-01 3.90374847e-02 ... -5.19485474e-01 3.84261638e-01 -2.70032734e-01] [ 4.92696643e-01 9.81698096e-01 1.45990744e-01 ... 2.86552876e-01 -8.56647372e-01 -1.97407991e-01] [ 3.58567029e-01 2.31056035e-01 -4.67721850e-01 ... -2.94919163e-01 1.03524208e+00 -1.11412966e+00]] [[-5.79593480e-01 -2.05362648e-01 2.18357295e-02 ... -1.37989610e-01 9.84268904e-01 -1.16497552e+00] [ 3.68479550e-01 9.13148820e-01 -6.13148510e-01 ... -4.70598303e-02 -2.82669276e-01 1.88701041e-02] [-2.74031550e-01 -1.73315212e-01 -5.45372128e-01 ... 6.51076615e-01 8.45885992e-01 1.13958549e+00] ... [-1.50546744e-01 -1.25093979e-03 -3.30701500e-01 ... 9.94182885e-01 9.00308907e-01 -3.25456083e-01] [-2.27671340e-01 -9.68490481e-01 -8.28400671e-01 ... 7.72839367e-01 -6.29830360e-01 6.55878782e-01] [ 5.73312402e-01 8.62536550e-01 2.17398144e-02 ... -5.74501038e-01 4.04944003e-01 7.50927508e-01]] [[-1.50788218e-01 6.58882260e-01 6.69044927e-02 ... -2.65026480e-01 -1.90321410e+00 -3.88619959e-01] [ 1.61826491e-01 4.23633218e-01 -4.84254211e-02 ... 1.35330713e+00 7.84093499e-01 -4.22370791e-01] [ 7.85476625e-01 -3.60557228e-01 -4.22787309e-01 ... -8.22295606e-01 7.44642496e-01 -2.15564534e-01] ... [-7.07310915e-01 -7.32093930e-01 -3.07022691e-01 ... 1.41337037e+00 4.58285868e-01 7.68832803e-01] [-8.85780156e-02 8.64155173e-01 5.81002176e-01 ... 4.62490231e-01 -4.57435906e-01 7.46261105e-02] [ 4.89070028e-01 7.06611052e-02 3.27493161e-01 ... -4.13938433e-01 -2.83802658e-01 6.24621570e-01]] ... [[-4.72494066e-01 -4.73967284e-01 3.81626606e-01 ... 5.49270868e-01 -7.03209639e-02 -9.40203726e-01] [ 6.49188519e-01 1.26530182e+00 2.88975239e-01 ... -1.03873384e+00 6.07720017e-01 3.79738003e-01] [ 4.43050921e-01 4.95056957e-01 -1.29796997e-01 ... 2.57700652e-01 4.43663836e-01 -1.01091780e-01] ... [-9.30341780e-01 -1.97185650e-01 1.11747730e+00 ... -5.67605615e-01 -6.88256681e-01 -7.09252715e-01] [-2.27897421e-01 -6.30932599e-02 7.96566308e-02 ... 5.90043604e-01 -5.78431308e-01 -2.32402459e-01] [-5.37685491e-02 -3.44960749e-01 -1.28419268e+00 ... 1.13426614e+00 -4.94678169e-01 -5.63733935e-01]] [[ 4.70441252e-01 3.57588381e-01 -1.16271935e-01 ... -1.55167043e-01 -9.95667204e-02 -4.02691126e-01] [ 1.67498946e-01 -8.52132797e-01 4.30395246e-01 ... -6.52867615e-01 6.27452612e-01 -2.32394919e-01] [-6.60366714e-01 7.83940136e-01 5.04408121e-01 ... 1.06263697e+00 3.12874585e-01 -6.57897234e-01] ... [ 1.50179910e-02 -2.27171466e-01 -4.67116013e-02 ... -8.91725272e-02 3.36913228e-01 1.90862399e-02] [ 4.08560425e-01 1.37786686e+00 1.09488773e+00 ... 3.93731333e-02 -1.17978618e-01 -5.18769994e-02] [ 4.37766343e-01 1.75975591e-01 5.13955712e-01 ... -2.81928301e-01 1.31390333e-01 1.86972424e-01]] [[-8.53108585e-01 -1.80732131e-01 4.70738918e-01 ... 2.03865170e-01 9.45718810e-02 -1.63402706e-01] [-8.58174145e-01 6.42067075e-01 -6.23256803e-01 ... -9.31625441e-02 7.16665149e-01 -2.74204642e-01] [-1.24244735e-01 7.65608132e-01 -3.41522470e-02 ... -1.71927154e+00 5.84070385e-01 1.21355331e+00] ... [-7.09509254e-01 1.78538561e-01 -1.45731777e-01 ... 4.81561214e-01 8.87333155e-01 4.57768321e-01] [ 1.34674621e+00 -1.12772882e+00 -1.39036727e+00 ... 3.18480164e-01 -7.93052018e-01 4.48791414e-01] [ 5.68860054e-01 -7.66566038e-01 4.86017913e-01 ... 4.04441237e-01 6.97579086e-01 -2.73501217e-01]]] [[[ 1.10768998e+00 -1.34816694e+00 -1.86143482e+00 ... -7.16814816e-01 1.01851046e+00 1.53653920e+00] [-4.25022185e-01 1.64046633e+00 1.64052892e+00 ... 4.12585616e-01 5.03295839e-01 -1.10333860e+00] [-4.22697812e-01 1.14063323e+00 -2.61563212e-01 ... -8.10276344e-02 2.06021690e+00 3.23170602e-01] ... [ 1.02813375e+00 1.90530524e-01 -4.62009996e-01 ... -6.80671215e-01 -4.23452020e-01 -9.23316836e-01] [-1.58244669e+00 1.83897388e+00 3.17272365e-01 ... 9.94121253e-01 1.01899672e+00 6.07300937e-01] [ 2.88110065e+00 1.04040408e+00 -1.87714207e+00 ... 4.05140579e-01 -5.51027656e-01 -4.59051412e-03]] [[ 1.22295749e+00 -8.11361313e-01 3.74026942e+00 ... 3.51964146e-01 1.14581871e+00 5.11604309e-01] [-4.03262854e-01 -6.67899132e-01 -7.50943899e-01 ... 4.14282046e-02 -3.03251600e+00 -2.07998753e+00] [-1.28693476e-01 1.88834143e+00 9.94118989e-01 ... 9.45124388e-01 1.65974945e-02 8.63057137e-01] ... [ 5.02211392e-01 1.35702837e+00 1.88538134e-01 ... 1.69560552e-01 -1.03697097e+00 -6.83024406e-01] [ 1.63723636e+00 2.65442967e+00 -1.42158544e+00 ... -2.30503842e-01 -2.56390500e+00 -1.56496942e+00] [ 2.71503043e+00 8.86115372e-01 2.32592940e-01 ... 7.69684076e-01 6.41476035e-01 -2.76015937e-01]] [[-2.65420794e+00 7.12052584e-01 1.00781643e+00 ... 9.14475739e-01 -1.93807244e-01 1.69966078e+00] [-1.22768927e+00 -1.77667093e+00 1.34083092e+00 ... -2.74319386e+00 3.31153584e+00 2.37192988e-01] [-9.58931744e-02 -1.49935496e+00 4.34137315e-01 ... 9.99385893e-01 -1.00965095e+00 -1.40278196e+00] ... [ 9.54448640e-01 -8.91184509e-01 -1.67392480e+00 ... 8.49772811e-01 -3.84832583e-02 5.56692839e-01] [ 1.96100736e+00 1.80185884e-01 2.28485078e-01 ... -2.29039001e+00 -1.55165935e+00 2.54019618e+00] [ 8.13814938e-01 6.58817217e-02 3.90801698e-01 ... 1.51051378e+00 -2.33646679e+00 -1.39557183e+00]] ... [[ 7.21988603e-02 8.12079728e-01 -9.11662355e-03 ... -2.07566214e+00 -3.03298682e-01 2.51250172e+00] [-1.02167821e+00 7.17373133e-01 -1.78728962e+00 ... -8.28391492e-01 -9.40048337e-01 -2.68514305e-01] [ 2.27554584e+00 -1.29643095e+00 1.54249823e+00 ... 1.44238755e-01 8.71286213e-01 4.51609164e-01] ... [-3.87456447e-01 7.11226642e-01 4.31958485e+00 ... -6.31982982e-01 1.81455743e+00 7.74746060e-01] [ 2.63906568e-01 1.06027663e+00 1.30495453e+00 ... -2.12441444e+00 1.00905252e+00 1.95673728e+00] [-1.53001845e+00 -2.61159611e+00 -1.16358709e+00 ... 1.12655187e+00 6.41204119e-01 8.49835277e-01]] [[-9.03697014e-01 2.01023841e+00 -2.17572427e+00 ... -1.00401891e-02 -1.41670179e+00 2.07737541e+00] [ 2.61415029e+00 -3.06661189e-01 1.71719626e-01 ... 1.90989125e+00 -8.31483185e-01 1.09991086e+00] [ 6.79438472e-01 1.93679667e+00 -3.56564045e-01 ... -2.40729973e-01 2.36578035e+00 5.03569186e-01] ... [ 1.99634969e+00 7.95080364e-01 -1.20292163e+00 ... 7.94958472e-02 3.79002261e+00 -1.49215281e+00] [-9.51217711e-01 -3.30452651e-01 -6.65687263e-01 ... -2.26077318e+00 -3.97551346e+00 7.00015485e-01] [-3.65436196e+00 -2.00041413e-01 1.41564950e-01 ... -1.56939435e+00 -2.54023075e+00 -3.67630291e+00]] [[ 2.52665788e-01 -1.30913699e+00 -5.12696952e-02 ... 1.28820682e+00 8.06730211e-01 -2.01047087e+00] [ 3.35679460e+00 -2.52338886e+00 1.47496891e+00 ... -3.19355392e+00 8.56545985e-01 1.37738383e+00] [ 9.23820615e-01 -2.43944749e-02 -7.87621915e-01 ... 2.51886129e-01 8.43459964e-01 -1.34967238e-01] ... [ 9.00332510e-01 2.04954219e+00 -2.06753016e+00 ... -1.16249554e-01 -1.61491632e-02 1.67394245e+00] [ 1.89270079e+00 6.96350813e-01 -2.27839068e-01 ... 9.18278992e-01 1.96113586e+00 8.16334903e-01] [-2.32911634e+00 -1.13768554e+00 -7.13574529e-01 ... -3.14367592e-01 -5.69891989e-01 1.28874099e+00]]] [[[-2.52736831e+00 3.07939100e+00 1.11217380e+00 ... 3.10438466e+00 -2.67321134e+00 -2.88334399e-01] [ 1.10630035e+00 -2.03373957e+00 9.85636413e-01 ... 3.55357003e+00 -1.39247310e+00 1.02339017e+00] [ 1.85400915e+00 2.26583648e+00 1.03112769e+00 ... -8.09213042e-01 -3.20898145e-01 3.11298442e+00] ... [-3.71369863e+00 3.21303892e+00 7.77475119e-01 ... -2.98810506e+00 -1.46856058e+00 -2.57612920e+00] [-8.75835121e-01 2.48734191e-01 2.41814661e+00 ... 4.66239899e-01 -6.31791878e+00 -2.29605699e+00] [-2.04742622e+00 -4.61646527e-01 3.24827492e-01 ... -1.28862843e-01 -1.10768723e+00 -1.14462614e+00]] [[-2.16195774e+00 -2.96598649e+00 2.93360263e-01 ... -3.58396322e-01 -4.12423563e+00 -2.17150474e+00] [ 6.06225133e-01 -2.07028937e+00 -4.72165734e-01 ... -2.64262152e+00 1.06659067e+00 -2.03367329e+00] [ 1.41612303e+00 -3.76888609e+00 5.52246618e+00 ... -2.04517748e-02 1.88974679e+00 -4.98005772e+00] ... [ 1.10719211e-01 -1.40490484e+00 1.52182043e+00 ... 1.18456006e+00 2.43377471e+00 1.81947976e-01] [ 4.19740963e+00 9.46398020e-01 -1.06249440e+00 ... -1.24487352e+00 2.14563990e+00 -5.11292887e+00] [ 1.72749770e+00 -4.50291425e-01 4.10369968e+00 ... 1.05718696e+00 -1.08897120e-01 2.80134225e+00]] [[ 8.07000697e-01 1.91124117e+00 -2.15791035e+00 ... -3.74224353e+00 -4.98302317e+00 9.81558561e-02] [-9.27000284e-01 -1.99920034e+00 -3.27080131e+00 ... -1.63683498e+00 -1.62903190e+00 1.51484001e+00] [-1.01567531e+00 -6.90103695e-02 1.34372497e+00 ... -3.58662277e-01 -4.74997580e-01 -3.87420326e-01] ... [-1.19320095e+00 -3.87586641e+00 1.55292404e+00 ... 3.58437967e+00 1.18474633e-01 -1.47476268e+00] [-3.70545030e+00 2.51464653e+00 -4.21180630e+00 ... -8.55864227e-01 -2.10022020e+00 1.51831460e+00] [ 2.99937397e-01 -4.53914374e-01 1.97738194e+00 ... -2.03376341e+00 4.21406317e+00 -1.53708756e+00]] ... [[ 1.95742869e+00 -6.75542414e-01 -3.98217887e-01 ... -1.63859737e+00 2.78021336e+00 3.03110301e-01] [-2.34154463e-01 2.63118553e+00 1.29423845e+00 ... -9.45462942e-01 2.15494084e+00 -1.11052895e+00] [ 5.07614517e+00 6.74365222e-01 -1.47895539e+00 ... -2.49753046e+00 1.47861946e+00 -3.23346138e+00] ... [-6.52712762e-01 -2.80561948e+00 -1.66757143e+00 ... -3.09588969e-01 -2.38118315e+00 3.11500597e+00] [-2.03713030e-01 -2.18722844e+00 -3.52202582e+00 ... 2.51772690e+00 2.98007274e+00 2.42625785e+00] [ 3.00011301e+00 1.23538089e+00 -9.95789826e-01 ... -3.82764912e+00 -1.14660621e+00 -5.48378181e+00]] [[ 1.53311276e+00 -2.37610221e+00 -1.28000772e+00 ... -5.59697294e+00 6.31893539e+00 1.40412521e+00] [ 1.17071283e+00 -1.60308194e+00 1.67201348e-02 ... 3.09072065e+00 -2.10606024e-01 -2.08384037e+00] [ 6.72415018e+00 -8.81778538e-01 -9.65383887e-01 ... -9.67361331e-01 -9.85018730e-01 -2.34458041e+00] ... [ 3.13465786e+00 -4.20871162e+00 9.29193556e-01 ... -1.68342900e+00 3.07458138e+00 6.23244333e+00] [-1.90413490e-01 -3.71488428e+00 -1.50868595e+00 ... -4.35562849e-01 -2.30114865e+00 2.53352785e+00] [-1.00610125e+00 1.37322354e+00 6.76192939e-01 ... -1.90981400e+00 4.23586480e-02 -1.48760355e+00]] [[ 3.58834267e-01 8.52736592e-01 4.26800579e-01 ... -1.68989491e+00 -1.82481062e+00 4.13589478e+00] [-1.02738559e+00 3.04690599e+00 -1.53544807e+00 ... -6.56575322e-01 1.00310564e+00 8.54071736e-01] [ 2.38780689e+00 3.10890794e+00 3.80511236e+00 ... 1.16729617e+00 -4.11840260e-01 1.83067396e-01] ... [-1.93403900e+00 3.28458524e+00 -4.00055027e+00 ... 4.61867809e+00 4.56894696e-01 -1.03881884e+00] [ 7.61732996e-01 -1.06902587e+00 3.99176478e-01 ... 4.62070751e+00 -9.76729095e-01 3.15363789e+00] [ 3.24300528e+00 -1.99297714e+00 -1.13885868e+00 ... -5.37277031e+00 -5.37019789e-01 1.83137372e-01]]] [[[-1.77090570e-01 5.20599127e-01 -2.98372924e-01 ... 2.39342287e-01 -3.96033078e-02 3.19968253e-01] [-4.14680064e-01 -1.64003119e-01 4.74968076e-01 ... -4.15256053e-01 -8.20733905e-02 -5.18708944e-01] [ 9.57718641e-02 3.29706103e-01 -1.71766102e-01 ... 2.82663628e-02 -3.12918365e-01 5.81570342e-02] ... [ 2.02675626e-01 -1.73794225e-01 3.72721702e-01 ... 4.06735212e-01 3.59459490e-01 5.36781773e-02] [ 4.62195337e-01 -3.83627675e-02 3.34754318e-01 ... -6.74062610e-01 2.27839306e-01 1.99035704e-01] [-3.31456721e-01 -5.53330779e-01 6.43289760e-02 ... -1.17189392e-01 -1.54119760e-01 6.87410384e-02]] [[ 3.26478839e-01 -1.40321672e-01 3.48341972e-01 ... 7.92611297e-03 -7.35721216e-02 1.91958070e-01] [ 8.63766074e-01 -4.00936812e-01 2.62118638e-01 ... -3.58274072e-01 -1.23913132e-01 4.65110332e-01] [ 5.37626565e-01 -5.58525145e-01 5.14546931e-01 ... -6.43777549e-02 7.60237932e-01 6.43154830e-02] ... [-1.98924825e-01 4.40302014e-01 -9.07398984e-02 ... -1.24805599e-01 -4.01263535e-01 1.20411903e-01] [-1.37871087e-01 3.71139288e-01 3.66381615e-01 ... -3.08543831e-01 1.58304304e-01 -1.83178961e-01] [ 3.14307392e-01 1.42576456e-01 5.41155219e-01 ... -4.54895079e-01 -8.06020975e-01 -2.10320085e-01]] [[ 2.26902589e-03 5.59852302e-01 -1.75255433e-01 ... -1.89101264e-01 4.75745946e-01 -2.58348256e-01] [-2.77636588e-01 2.86998928e-01 3.89534503e-01 ... -2.09508926e-01 8.21132213e-02 9.20757800e-02] [ 1.68193415e-01 -1.99508786e-01 3.67902130e-01 ... -2.14744806e-02 -2.23730922e-01 -5.39228559e-01] ... [-3.09053421e-01 -6.07605040e-01 -4.18234527e-01 ... -5.08225143e-01 4.65232879e-01 -2.28569098e-02] [ 1.84076265e-01 -1.74576193e-01 2.41944268e-01 ... -7.26104155e-02 -4.23758738e-02 3.83530051e-01] [-2.37736732e-01 1.71581671e-01 1.01312622e-01 ... 2.10450992e-01 4.73510355e-01 -9.03272405e-02]] ... [[-4.66764241e-01 2.89419323e-01 2.67778844e-01 ... -2.32236519e-01 4.22161251e-01 7.08558857e-01] [-4.35382277e-01 -3.57927918e-01 -3.90192956e-01 ... -2.98024803e-01 5.40399134e-01 5.71758188e-02] [ 4.88948345e-01 5.99944480e-02 1.77617773e-01 ... -4.38728839e-01 -2.43245274e-01 -2.65232950e-01] ... [-5.05977869e-01 2.16327623e-01 4.17691886e-01 ... -4.98463601e-01 3.10831159e-01 -2.04730436e-01] [ 2.53352910e-01 1.53662220e-01 -3.00393134e-01 ... -3.89416397e-01 -5.91210365e-01 8.14720839e-02] [-4.35232282e-01 2.32971609e-01 -1.21318828e-02 ... 2.65375227e-01 -1.67893633e-01 1.34123710e-03]] [[-6.38181120e-02 3.01120967e-01 -1.51388183e-01 ... 1.61445874e-03 1.59042016e-01 1.83781058e-01] [ 1.16783924e-01 5.59348166e-01 -9.11101028e-02 ... -2.58037269e-01 8.10764730e-02 2.04190910e-01] [ 3.38770509e-01 2.02139601e-01 2.86340952e-01 ... 3.43847781e-01 3.72971147e-01 -1.06280118e-01] ... [-6.82958424e-01 -2.81132877e-01 -8.98470450e-03 ... -2.53052980e-01 7.11487591e-01 2.49039903e-01] [-3.59074593e-01 2.91793868e-02 3.49730879e-01 ... -1.76711857e-01 -5.98720424e-02 -7.50632465e-01] [-1.94179073e-01 -3.90515417e-01 -2.79271781e-01 ... 3.29962879e-01 2.51527876e-01 2.12291881e-01]] [[ 5.48288643e-01 5.00997342e-02 -1.55756414e-01 ... 1.40966520e-01 4.06620860e-01 3.76716673e-01] [-8.37226138e-02 1.13258377e-01 5.50202355e-02 ... -2.20644191e-01 1.80408150e-01 -9.14122760e-02] [ 5.85433142e-03 -2.32993156e-01 2.15774089e-01 ... -4.44608808e-01 3.05144608e-01 4.07901078e-01] ... [ 5.81986427e-01 1.42690226e-01 1.24992937e-01 ... -2.25411072e-01 -2.72200052e-02 1.21704340e-01] [-5.38768917e-02 1.91055357e-01 -1.31013125e-01 ... 4.15110499e-01 -1.45254388e-01 -3.02066207e-01] [ 3.51665586e-01 2.97606349e-01 -4.55150694e-01 ... 1.05167054e-01 -3.64139140e-01 3.08424890e-01]]]] [[[[ 2.01231927e-01 -1.92162856e-01 -1.48120594e+00 ... 1.50668278e-01 1.48550838e-01 4.75331247e-01] [ 4.94993120e-01 -2.76029766e-01 3.52568090e-01 ... -1.92360565e-01 3.43665600e-01 2.81374604e-01] [-2.81515211e-01 -6.20461702e-01 -2.75062621e-01 ... -5.69907501e-02 5.18457964e-02 -6.40174925e-01] ... [-5.27582765e-01 -1.41090766e-01 -6.93083644e-01 ... 6.86133265e-01 -1.84295821e+00 -9.33304787e-01] [-1.05447724e-01 -2.40263343e-01 -2.20342591e-01 ... 5.74082792e-01 -7.41392493e-01 -1.56628847e-01] [-3.09843719e-01 9.82178152e-02 -1.91124707e-01 ... 2.26895332e-01 2.47885853e-01 5.97489715e-01]] [[ 8.21494520e-01 4.19799358e-01 -8.94899547e-01 ... 1.58852458e-01 -2.75800824e-01 -3.19713920e-01] [ 3.94723684e-01 2.60947108e-01 9.22801614e-01 ... 5.29673278e-01 -1.25587213e+00 5.65199316e-01] [-1.53252214e-01 2.07097679e-01 1.13198006e+00 ... -7.32970655e-01 -1.23833098e-01 1.80569366e-01] ... [-9.42236066e-01 1.06181324e+00 1.47863507e+00 ... 3.06557119e-01 -1.37517095e+00 2.66711831e-01] [ 1.26883006e+00 -2.76514977e-01 1.07706225e+00 ... 1.42970473e-01 1.33778310e+00 1.00180960e+00] [ 8.36915374e-02 5.07779941e-02 -9.89288747e-01 ... -3.97585273e-01 8.13950956e-01 5.32475889e-01]] [[ 1.18834066e+00 7.77328253e-01 -4.34970140e-01 ... 9.59948719e-01 -3.12073920e-02 -8.74635398e-01] [ 4.63027090e-01 -1.22193968e+00 -1.17245764e-01 ... -1.41174570e-01 8.15396369e-01 6.25258923e-01] [-1.96429491e-01 -7.19794869e-01 -8.19691420e-02 ... 9.39239919e-01 -8.49890336e-02 4.20775026e-01] ... [-1.62636769e+00 -6.58561662e-02 2.20669657e-01 ... -9.63370144e-01 -1.01942050e+00 -1.08938646e+00] [-2.25433558e-01 7.26289392e-01 4.22944009e-01 ... -1.22539377e+00 -1.63140848e-01 4.02976304e-01] [-1.22049274e-02 -4.34091896e-01 -1.30516636e+00 ... 3.30460399e-01 -1.09811351e-01 1.64760128e-01]] ... [[-1.05926895e+00 -1.98647261e+00 1.05749346e-01 ... -3.79309535e-01 3.27418387e-01 2.48425022e-01] [-1.93527505e-01 1.45193946e+00 9.65121627e-01 ... -9.09677148e-01 2.88174063e-01 -1.06366873e+00] [ 3.09048504e-01 5.50241530e-01 5.99478066e-01 ... -1.10180378e+00 2.23544866e-01 7.56940663e-01] ... [ 3.60400915e-01 -3.85624677e-01 -3.47315013e-01 ... 1.41471767e+00 -8.69007051e-01 -9.68419433e-01] [ 4.29624170e-01 1.29315245e+00 -4.58434939e-01 ... 1.03937531e+00 6.45777643e-01 1.65067625e+00] [-1.00477731e+00 4.73336615e-02 7.14409709e-01 ... 1.54285312e-01 1.76725256e+00 -5.66494942e-01]] [[-2.78333634e-01 -3.88887167e-01 8.64121974e-01 ... -6.08992100e-01 -8.14060271e-01 -7.36668050e-01] [ 1.30418682e+00 -6.65458262e-01 3.76516104e-01 ... 2.61812657e-01 -1.52609497e-01 4.64601308e-01] [-3.05826455e-01 -5.07838249e-01 -1.61484852e-01 ... 6.65994659e-02 -6.94292903e-01 7.13607311e-01] ... [-5.51147878e-01 4.53004181e-01 -5.50405025e-01 ... -6.65649295e-01 8.33264351e-01 9.74442586e-02] [ 2.61368185e-01 -1.29845583e+00 1.29169655e+00 ... -1.34483659e+00 3.44863757e-02 2.99196154e-01] [-1.32100451e+00 -1.53969216e+00 3.41030061e-01 ... -7.56559670e-01 -3.45809668e-01 -1.35573614e+00]] [[ 6.12985551e-01 4.10975009e-01 -6.69467032e-01 ... -1.58819950e+00 -6.37451947e-01 8.25923502e-01] [ 1.62931427e-01 7.84829319e-01 -3.27618629e-01 ... -6.94549024e-01 6.81957006e-01 3.21237445e-01] [-1.22793055e+00 6.36824608e-01 1.03669956e-01 ... 7.55360663e-01 -3.27990890e-01 -1.41843712e+00] ... [ 7.14378655e-02 5.92137754e-01 2.80699462e-01 ... -6.58006132e-01 1.09050441e+00 1.09053016e+00] [-5.90853870e-01 5.33984423e-01 -5.05057096e-01 ... -3.62177759e-01 2.87103802e-01 -5.27882755e-01] [ 3.96522492e-01 -7.48253047e-01 -1.22834420e+00 ... 1.21871102e+00 2.93445438e-01 -8.44939888e-01]]] [[[ 2.88360119e+00 -9.88591969e-01 1.01382458e+00 ... -1.21066904e+00 1.36381403e-01 1.98772502e+00] [ 9.67631042e-01 3.60846400e-01 2.57041073e+00 ... -3.38908076e-01 -6.21668279e-01 5.26618123e-01] [-2.25631404e+00 2.09845734e+00 -8.24533582e-01 ... 2.91946650e+00 -1.73571205e+00 1.42713499e+00] ... [ 3.82914037e-01 -2.51105213e+00 -1.47035062e-01 ... 1.66116104e-01 -1.47997785e+00 -9.97731447e-01] [ 8.58864963e-01 5.90926468e-01 3.07041478e+00 ... -1.52434552e+00 -1.57691038e+00 -7.66509056e-01] [ 1.07910410e-01 -1.48183751e+00 -2.67946386e+00 ... 2.00698566e+00 -1.03741956e+00 -9.37418997e-01]] [[-1.63995731e+00 -1.54635322e+00 -1.00172913e+00 ... 1.58365178e+00 -6.56539261e-01 3.08471113e-01] [ 8.43019307e-01 -2.15516591e+00 1.39442718e+00 ... -9.33493316e-01 -2.47416139e-01 -2.80887693e-01] [ 1.89288175e+00 -8.22911620e-01 -3.88099164e-01 ... -1.29716873e+00 2.17103934e+00 -1.75274837e+00] ... [ 3.47153354e+00 -2.55881101e-01 2.84677088e-01 ... -8.79100680e-01 7.08387971e-01 -2.57901371e-01] [-1.97868562e+00 5.24369836e-01 1.97570717e+00 ... -2.60498255e-01 -1.15056884e+00 1.86571109e+00] [-1.36123931e+00 -2.35126829e+00 -6.17906332e-01 ... -7.36239195e-01 3.89128178e-01 6.20526969e-01]] [[-1.46608746e+00 -1.14606631e+00 -2.20155692e+00 ... 1.05475676e+00 2.63689113e+00 -2.21108884e-01] [ 1.74706960e+00 6.27482474e-01 3.38426971e+00 ... -4.33435857e-01 -9.11608636e-01 -9.48822200e-01] [ 1.71310425e+00 5.23764312e-01 -2.60403544e-01 ... 9.67840195e-01 2.78926444e+00 1.58387375e+00] ... [-2.70625162e+00 5.00137389e-01 7.67796814e-01 ... -3.85944813e-01 -1.15087497e+00 -1.40846416e-01] [ 2.61517048e+00 -5.32837331e-01 1.62797344e+00 ... 3.37606478e+00 1.60477734e+00 -8.50149155e-01] [-1.14358342e+00 -8.27027500e-01 6.91878982e-03 ... -1.49396226e-01 -2.82717443e+00 -2.08594298e+00]] ... [[ 1.25810921e+00 -1.93534362e+00 7.78474748e-01 ... -1.34289610e+00 -3.01102734e+00 -1.78873730e+00] [ 3.90033066e-01 2.77986622e+00 8.87179673e-02 ... -2.07333946e+00 -1.29460084e+00 -4.63105500e-01] [-1.62355125e+00 -1.52191293e+00 -3.61767244e+00 ... -1.00976843e-02 2.18654561e+00 -1.67144462e-01] ... [-1.29132473e+00 -3.86739582e-01 2.62272656e-01 ... 2.24881268e+00 2.15237665e+00 -1.52895689e+00] [-1.42204273e+00 3.36759233e+00 1.22767186e+00 ... 3.75142023e-02 -1.50723624e+00 -9.44366813e-01] [ 1.31960120e-02 -3.11914325e-01 3.15748906e+00 ... 6.04075864e-02 4.18973923e+00 1.98197380e-01]] [[ 5.70912659e-01 -3.31151187e-01 1.02487993e+00 ... 2.48727901e-03 -3.11826795e-01 -1.68085933e+00] [ 5.67007005e-01 3.49788100e-01 1.30654550e+00 ... 1.72104585e+00 -2.61156261e-02 8.34562063e-01] [ 5.53998232e-01 1.45598257e+00 -1.55303478e+00 ... -1.72275162e+00 3.40475345e+00 1.46764898e+00] ... [ 1.28414547e+00 -1.15551853e+00 -3.15616816e-01 ... 2.70990580e-01 1.68623403e-01 1.59431443e-01] [-1.50074232e+00 8.31961393e-01 2.23303616e-01 ... 2.17552230e-01 -2.17631355e-01 4.84878868e-01] [-3.05494452e+00 -4.75283861e-01 -2.18245789e-01 ... -3.49551708e-01 -5.60519159e-01 -6.66970193e-01]] [[ 5.30942202e-01 -2.50970936e+00 7.49587268e-02 ... 1.89068353e+00 2.88072634e+00 1.78405568e-01] [-6.71329677e-01 4.13954258e+00 -3.38635492e+00 ... 5.09880662e-01 -2.82439423e+00 1.43035531e+00] [ 2.43470263e+00 -5.40432751e-01 -1.05624415e-01 ... -9.14384604e-01 -1.12806475e+00 -1.34982884e+00] ... [-1.02418256e+00 9.81437445e-01 1.49642098e+00 ... 1.87264621e+00 4.23441648e+00 -1.06855680e-03] [ 3.34131455e+00 1.46896303e+00 -2.31039691e+00 ... 1.63479018e+00 -4.13594007e-01 -2.32907200e+00] [-1.19108267e-01 -7.00473785e-01 -2.23183560e+00 ... -2.00509459e-01 -1.57289791e+00 -2.85070753e+00]]] [[[-4.72081006e-01 1.03450644e+00 -2.45685756e-01 ... -2.37840060e-02 -7.07481727e-02 6.51587129e-01] [ 1.69533694e+00 -7.12370455e-01 6.83334112e-01 ... -1.95609286e-01 2.81566173e-01 5.11645436e-01] [ 2.28314251e-01 -3.65696073e-01 1.70962781e-01 ... 7.67796159e-01 -3.12538398e-03 -2.06290260e-01] ... [ 4.41054702e-01 6.12152480e-02 -2.24500805e-01 ... 4.94303197e-01 8.92992973e-01 -4.03998315e-01] [-1.63322914e+00 1.27481031e+00 7.59090304e-01 ... -1.75828600e+00 4.32390213e-01 6.87387586e-01] [-1.12579787e+00 4.72879112e-01 -2.34303668e-01 ... -3.68971601e-02 9.40007627e-01 5.09427845e-01]] [[-5.06462038e-01 4.69321370e-01 -4.02716339e-01 ... -1.11222613e+00 -2.40848690e-01 2.75151134e-01] [-2.94842303e-01 -4.35196251e-01 -3.34393717e-02 ... 4.78899956e-01 7.25665867e-01 -1.90205067e-01] [ 5.06656766e-01 4.99532878e-01 -8.44755232e-01 ... 3.09288263e-01 -1.34169972e+00 3.27783763e-01] ... [ 3.17875534e-01 -7.04289019e-01 -8.44307005e-01 ... 1.06058391e-02 3.15408200e-01 -1.62414517e-02] [-1.42977685e-01 -3.82079124e-01 2.69176424e-01 ... 1.06129736e-01 8.26705173e-02 7.60470867e-01] [ 1.12136602e+00 5.35741687e-01 -8.20043385e-01 ... -3.35922897e-01 -1.01157880e+00 1.08604901e-01]] [[-5.31840682e-01 -2.99507022e-01 2.38497525e-01 ... -5.24276435e-01 -6.50767803e-01 -6.85724318e-01] [-1.23862374e+00 1.60899103e-01 1.28927141e-01 ... 1.02545604e-01 -1.22379768e+00 -4.97039914e-01] [ 2.23701864e-01 3.21749508e-01 2.95827478e-01 ... -3.82116586e-01 6.19591177e-01 2.68764824e-01] ... [ 1.71581015e-01 -1.21354914e+00 -5.00032961e-01 ... 4.10896510e-01 -4.31913942e-01 -6.31768048e-01] [ 1.53860033e-01 2.29854792e-01 -6.18116438e-01 ... 7.32769445e-02 -7.44320512e-01 -1.79732215e+00] [-3.87251884e-01 4.82051522e-01 6.73394918e-01 ... 8.73777390e-01 2.42829353e-01 7.29108810e-01]] ... [[-1.10374546e+00 -1.44873214e+00 7.79145896e-01 ... 7.06441164e-01 3.27785790e-01 -7.32312977e-01] [ 3.09550822e-01 4.17355895e-02 -9.92066979e-01 ... -1.01701384e-02 2.98620582e-01 1.26075342e-01] [ 3.10256064e-01 -6.11411572e-01 -1.29545510e-01 ... -6.51241541e-01 1.30357289e+00 6.35389984e-02] ... [-6.23649776e-01 -5.95596731e-01 -9.23767149e-01 ... 1.29466519e-01 -1.99654356e-01 6.63597047e-01] [-1.27549857e-01 2.17253223e-01 -2.63371676e-01 ... -3.25687081e-01 -6.28365129e-02 7.69976735e-01] [ 2.29229063e-01 -4.46470886e-01 -1.68170261e+00 ... -5.80361128e-01 -1.02327430e+00 -3.11326534e-01]] [[ 3.15164328e-01 -1.29472569e-01 -2.51383752e-01 ... 2.02997163e-01 5.64355850e-01 9.03333902e-01] [-4.94318962e-01 2.28706285e-01 -2.51524687e-01 ... 7.88094997e-01 -8.98135919e-03 -1.39532849e-01] [-2.47339398e-01 -2.35359117e-01 1.23833723e-01 ... 7.74945438e-01 3.81954312e-01 9.96221304e-02] ... [-8.22608054e-01 3.36413711e-01 -1.19663127e-01 ... 6.58767223e-01 -2.18832362e-02 4.10796016e-01] [ 8.91596854e-01 5.80617189e-01 -5.91389202e-02 ... -1.63750708e-01 -2.51641214e-01 -8.67680609e-01] [-5.17281331e-02 -2.10438281e-01 -2.09494643e-02 ... 5.96227884e-01 4.60841417e-01 -1.20123006e-01]] [[ 5.61088562e-01 -2.47602863e-03 -4.59221333e-01 ... -9.90956187e-01 -5.22130243e-02 1.21098943e-02] [-5.70205033e-01 2.26269573e-01 -2.06163347e-01 ... -1.39821792e+00 7.83172846e-01 7.21435905e-01] [ 9.57590580e-01 -4.67678100e-01 3.01519185e-01 ... 9.40205008e-02 -7.44714200e-01 6.81815445e-01] ... [ 2.58709013e-01 6.38082549e-02 2.13777676e-01 ... 8.58929098e-01 9.90119398e-01 9.32870030e-01] [-3.27620089e-01 -4.97418910e-01 6.50605619e-01 ... -1.19580948e+00 -1.02963336e-01 9.48366344e-01] [-7.33568594e-02 -1.06216526e+00 6.76729620e-01 ... 5.81924617e-01 -3.27380337e-02 5.01958251e-01]]] [[[ 1.15118039e+00 -5.63240170e-01 -2.52015740e-02 ... -2.10374951e+00 3.92137885e+00 -3.90124619e-01] [ 1.90273941e+00 -9.28485543e-02 -1.61822271e+00 ... 3.01971763e-01 -6.84351265e-01 -3.03469002e-02] [-1.59925938e+00 -3.50258040e+00 1.13856591e-01 ... -1.15988648e+00 -2.20567799e+00 3.08241285e-02] ... [-1.54953629e-01 -2.19871235e+00 2.75718689e-01 ... -4.53066796e-01 1.79781079e+00 -1.59947848e+00] [ 5.95303893e-01 1.37830675e+00 -2.98638225e+00 ... 7.37553477e-01 1.92174292e+00 -5.13536394e-01] [-2.96934992e-01 -1.18343604e+00 -1.36520338e+00 ... -5.48081160e-01 -4.68166918e-01 2.45924807e+00]] [[-1.83660817e+00 4.36688848e-02 -5.71269929e-01 ... -3.22825861e+00 5.01993120e-01 -1.63175535e+00] [ 1.95077622e+00 -3.05004716e-01 3.25878859e+00 ... 3.10956836e-02 -3.82384211e-01 8.09178531e-01] [ 2.05804801e+00 5.82619011e-01 -4.07823026e-01 ... 1.19823849e+00 9.51876760e-01 1.06909461e-02] ... [ 6.02954566e-01 -2.03656554e+00 -3.80625606e-01 ... -2.30290428e-01 2.13253185e-01 6.05752110e-01] [ 3.12656552e-01 -6.95210934e-01 1.82328895e-01 ... -8.27092752e-02 -7.31696606e-01 1.80972680e-01] [-2.64059734e+00 4.05253142e-01 9.04072404e-01 ... -4.38291550e-01 7.83335507e-01 2.61221671e+00]] [[ 2.53314686e+00 1.26233375e+00 1.18029428e+00 ... -7.94281721e-01 -3.65917087e-01 2.49563664e-01] [ 3.43440247e+00 1.77720821e+00 -8.43153358e-01 ... -2.95173502e+00 1.35479200e+00 2.61681008e+00] [-7.55776286e-01 5.06470203e-01 2.39063427e-01 ... 3.01841187e+00 -1.76130211e+00 -2.73628116e-01] ... [ 1.62684596e+00 2.45415300e-01 -6.75718069e-01 ... 3.30445826e-01 -5.75743377e-01 3.31223702e+00] [ 2.19746709e+00 1.65392637e+00 3.16537237e+00 ... -4.32802960e-02 1.21821630e+00 2.57510066e+00] [-6.46430492e-01 -2.87064219e+00 -2.11182761e+00 ... -2.43404001e-01 -6.99001253e-01 8.25266898e-01]] ... [[-3.56099218e-01 1.39609981e+00 2.60341477e+00 ... 2.61114907e+00 -1.40942156e+00 -9.32115138e-01] [ 1.89424646e+00 -1.65528941e+00 9.16959524e-01 ... 1.12904096e+00 -1.60823584e-01 1.07318544e+00] [-1.10839534e+00 7.69253254e-01 -2.84816098e+00 ... -2.80970311e+00 2.44835123e-01 -1.34432268e+00] ... [-6.10944986e-01 -9.60006535e-01 6.18401289e-01 ... 1.79668498e+00 -3.27954024e-01 5.03769398e-01] [-2.58680320e+00 -8.50569010e-01 5.76943517e-01 ... -4.22205269e-01 3.23184192e-01 1.02239573e+00] [-1.23154259e+00 -5.48079908e-01 1.01546887e-02 ... 1.63392043e+00 1.26453483e+00 2.47071646e-02]] [[-1.40984082e+00 1.52544737e+00 1.19831109e+00 ... -8.31156671e-01 1.80489290e+00 -7.62091339e-01] [-1.03648210e+00 6.50961399e-01 -1.53800920e-01 ... 2.91633189e-01 2.05923125e-01 1.40154469e+00] [ 9.66578245e-01 -4.20625269e-01 -1.55641365e+00 ... 7.05334723e-01 -3.85870598e-02 -3.68108630e-01] ... [ 1.36866599e-01 2.30775401e-01 1.03303814e+00 ... -9.34643090e-01 -2.43488789e-01 2.73823678e-01] [-1.32010305e+00 -2.23158360e-01 7.71119356e-01 ... -9.73181844e-01 -4.30066854e-01 7.19058454e-01] [ 7.88931191e-01 -2.25834191e-01 -3.75987649e-01 ... -2.07795596e+00 1.36568463e+00 -1.46387815e+00]] [[ 1.00402725e+00 -2.26040149e+00 2.32558584e+00 ... 1.44454217e+00 -8.29060435e-01 -2.26521873e+00] [-1.84006727e+00 -2.98116267e-01 1.73809454e-01 ... -1.53458333e+00 -1.28570473e+00 -4.79766876e-01] [-6.27615035e-01 4.75829452e-01 6.30678609e-02 ... 2.76343799e+00 6.63920343e-01 3.90144020e-01] ... [-4.67260420e-01 4.21307981e-01 4.72377658e-01 ... -2.01047969e+00 -1.39293599e+00 2.01499522e-01] [-1.08337736e+00 9.14625347e-01 4.56358767e+00 ... -3.43228316e+00 2.38770986e+00 -5.79106450e-01] [-5.01354218e-01 -2.42807412e+00 1.44772917e-01 ... 1.18761170e+00 -2.19424629e+00 2.27591705e+00]]] [[[ 1.95067513e+00 2.14918232e+00 -1.57094315e-01 ... 2.92565435e-01 3.44274819e-01 -2.68507838e+00] [ 6.99708760e-01 -2.18900394e+00 -1.93087077e+00 ... -3.56494874e-01 1.48967123e+00 -1.18113756e+00] [ 2.81407118e-01 -3.83184361e+00 2.19926405e+00 ... -4.97960776e-01 -6.60014820e+00 1.83804363e-01] ... [ 5.14583540e+00 8.57271969e-01 -8.15069854e-01 ... 1.78510916e+00 -2.33566308e+00 -4.03086185e+00] [ 6.82355464e-02 2.90821743e+00 -2.60727191e+00 ... 3.85370195e-01 2.36159420e+00 3.19781327e+00] [-1.88508081e+00 -1.51976097e+00 -4.46774292e+00 ... 3.71081531e-02 -4.68079388e-01 2.28632116e+00]] [[-4.22192723e-01 -1.64996076e+00 -3.14043760e+00 ... 9.87642169e-01 1.19587171e+00 -7.50515997e-01] [-2.82804275e+00 -2.60839844e+00 2.33512712e+00 ... -1.81902111e+00 2.07519674e+00 -1.17108130e+00] [-3.66503096e+00 2.52529711e-01 4.61434126e+00 ... -5.83146811e-01 3.37752461e+00 1.33114254e+00] ... [ 1.16280243e-01 -2.29691815e+00 2.75861710e-01 ... 4.97036606e-01 -5.35235405e+00 1.90287912e+00] [ 1.52992570e+00 -2.08245873e+00 1.12448335e+00 ... 4.23512384e-02 4.39914131e+00 -2.44462371e+00] [ 8.10154259e-01 1.89558280e+00 -3.57751536e+00 ... 3.65442657e+00 5.84387684e+00 3.89078355e+00]] [[ 1.28067338e+00 -1.26065969e+00 3.15698385e+00 ... 3.20754910e+00 -1.15255249e+00 -1.08818889e+00] [-2.30129313e+00 1.84586990e+00 -9.40302432e-01 ... 7.23135076e-04 -9.45874870e-01 1.84506893e+00] [ 3.31398916e+00 -1.58452845e+00 2.12233448e+00 ... 2.13157988e+00 3.06096096e-02 2.03361821e+00] ... [ 3.90676022e+00 -1.10544074e+00 2.81572938e+00 ... -2.67443895e+00 8.76514792e-01 1.38042486e+00] [-4.21324760e-01 1.46792340e+00 -5.46287119e-01 ... 2.16952443e-01 4.47650099e+00 -3.69329405e+00] [ 3.57402253e+00 4.90749884e+00 4.90415752e-01 ... 1.18994868e+00 -2.18185210e+00 -7.88833380e-01]] ... [[-7.32581854e-01 -7.26665497e-01 -1.54924703e+00 ... -2.91047424e-01 -2.15106535e+00 1.94369078e-01] [ 3.81101370e-01 -2.04451084e+00 -8.99136901e-01 ... -1.83234406e+00 -1.53107321e+00 1.10009551e+00] [-8.20364535e-01 -2.31817770e+00 7.08267689e-01 ... -4.86102641e-01 -1.31627297e+00 -9.56417620e-01] ... [-2.39287710e+00 -3.97526801e-01 2.30533051e+00 ... 5.35455275e+00 1.77115035e+00 3.59845012e-01] [ 2.05519629e+00 3.74202871e+00 -3.34652662e-02 ... -1.68161893e+00 -7.90473044e-01 -2.66614944e-01] [ 5.29539680e+00 -3.44760537e+00 -1.25472951e+00 ... 2.57839489e+00 -3.02444482e+00 2.62251472e+00]] [[ 2.57966614e+00 -2.67010522e+00 5.31310940e+00 ... -2.53132081e+00 1.03983366e+00 -2.73301095e-01] [-1.06115639e+00 3.97800183e+00 -7.17873812e-01 ... -2.93573785e+00 -1.21073210e+00 -1.49293220e+00] [ 4.38377559e-01 -4.51491088e-01 8.23009193e-01 ... -1.56221032e+00 2.61121321e+00 -2.66897535e+00] ... [ 2.62762380e+00 1.84853876e+00 1.34484649e+00 ... 1.81076372e+00 1.67952895e-01 -2.76625276e-01] [-4.07285261e+00 4.50133532e-01 4.90277231e-01 ... 1.39680684e+00 -8.13613236e-01 -1.59293103e+00] [-3.59064102e+00 1.29719281e+00 1.22385055e-01 ... 4.26459551e+00 -4.83666134e+00 2.34078789e+00]] [[ 2.62081599e+00 -3.34138846e+00 2.63066626e+00 ... 6.99841928e+00 -1.40596128e+00 -1.61931932e+00] [-1.31636345e+00 -7.54140759e+00 6.09555364e-01 ... 2.10831261e+00 3.21813297e+00 -1.11534715e+00] [-3.10350251e+00 1.36409676e+00 -7.67914891e-01 ... -2.67421436e+00 -5.46911240e+00 4.41065359e+00] ... [-1.41058922e+00 -8.64906669e-01 1.43956482e+00 ... -3.41277122e+00 4.34312731e-01 -2.00899720e+00] [-1.87592041e+00 -2.91609621e+00 7.49560118e-01 ... 1.07612371e+00 -1.40226650e+00 1.63739598e+00] [-1.77881253e+00 -1.11903715e+00 6.77273870e-01 ... 2.92280650e+00 -2.89306092e+00 2.12509251e+00]]] [[[-7.20638335e-02 4.48794663e-01 -2.01592863e-01 ... -4.58856672e-02 1.17840201e-01 3.09488833e-01] [ 2.25977510e-01 4.67432767e-01 8.07307243e-01 ... 1.75447300e-01 -1.43583536e-01 2.15316936e-01] [ 2.62640089e-01 1.86120257e-01 4.67208207e-01 ... -1.47954166e-01 2.95911044e-01 1.69238135e-01] ... [-6.19667292e-01 2.60871410e-01 3.87989521e-01 ... 6.21861994e-01 3.79089117e-01 4.73242134e-01] [ 3.17941546e-01 4.23239678e-01 2.30285868e-01 ... -4.46772724e-01 2.92734623e-01 -1.88147932e-01] [ 1.25528321e-01 1.48880526e-01 -2.72976756e-01 ... 3.75463098e-01 -2.92213738e-01 1.95925720e-02]] [[-1.11218520e-01 4.44446146e-01 2.30081677e-01 ... -4.67784656e-03 -3.97925258e-01 5.59620202e-01] [-8.73504505e-02 -3.88433427e-01 1.06858417e-01 ... -5.21818064e-02 -9.86822844e-02 1.44373178e-01] [-7.17256218e-02 1.64917484e-01 -4.33929056e-01 ... -3.99108082e-01 1.69342116e-01 3.66140038e-01] ... [-2.93748975e-01 -1.84851959e-01 4.02058959e-01 ... 4.17948030e-02 -4.71093446e-01 -2.95629293e-01] [ 6.98786318e-01 1.74094501e-04 -1.29959911e-01 ... -1.56647548e-01 -1.03892786e-02 9.75728273e-01] [ 1.31800339e-01 1.78384975e-01 3.06819201e-01 ... 4.96760905e-02 -2.34084800e-01 3.02296996e-01]] [[-5.63425779e-01 -2.16876954e-01 -5.61094105e-01 ... 5.16226709e-01 1.92524180e-01 -1.48239613e-01] [ 4.79809463e-01 1.13812406e-02 7.29173468e-03 ... -6.95961654e-01 7.56958351e-02 -1.54755488e-01] [-6.16151214e-01 1.38418809e-01 1.10473700e-01 ... -4.38761450e-02 -5.90387762e-01 6.01898551e-01] ... [-1.54223368e-01 -6.59464359e-01 4.51539159e-01 ... 7.46928751e-01 -3.00184250e-01 4.28711683e-01] [ 1.06190674e-01 -2.78207868e-01 -2.16338739e-01 ... 2.60633528e-02 2.07714587e-02 5.42987883e-02] [ 2.19201356e-01 -1.25777468e-01 -2.42063016e-01 ... -9.48519230e-01 2.19937220e-01 -5.38970411e-01]] ... [[ 3.92777354e-01 -3.66027027e-01 5.52273095e-01 ... -2.53929406e-01 5.04519753e-02 4.18397456e-01] [-5.18825769e-01 2.18911946e-01 5.62294684e-02 ... -1.96927816e-01 -3.16029072e-01 6.73092425e-01] [ 1.58249354e-03 -4.97096665e-02 -2.74952620e-01 ... 2.50597805e-01 2.07987741e-01 -8.00929293e-02] ... [-1.77024886e-01 7.86215961e-02 -1.80590078e-02 ... -3.36981028e-01 -4.39385682e-01 -1.12828173e-01] [-3.25681508e-01 -3.24900568e-01 2.25222230e-01 ... 3.87294799e-01 1.56816334e-01 -1.01496592e-01] [-2.17978194e-01 1.59331501e-01 1.17763616e-01 ... 3.21666867e-01 -1.82331309e-01 -6.39342725e-01]] [[ 1.45584121e-01 -8.05677697e-02 2.88853347e-01 ... -4.61348474e-01 3.27400565e-01 -2.49589771e-01] [ 5.65220952e-01 -6.69737518e-01 -4.68013346e-01 ... 5.87015599e-02 5.29935360e-01 1.83132306e-01] [-5.83529830e-01 -1.49605334e-01 3.05450976e-01 ... 2.32769474e-01 -4.90649194e-01 -4.61701714e-02] ... [ 1.22228496e-01 -2.93648262e-02 -7.58004114e-02 ... 7.47529984e-01 -1.72996446e-02 1.66066721e-01] [-2.22058222e-01 -2.22685225e-02 -1.89091459e-01 ... 4.26366180e-01 8.66683573e-02 -9.55744758e-02] [-6.36111274e-02 1.15376487e-01 -3.17844778e-01 ... 3.41049790e-01 3.81408702e-03 7.08030984e-02]] [[-7.57697821e-02 -2.57094026e-01 -1.18509438e-02 ... -1.53153896e-01 2.79962242e-01 -8.88431892e-02] [-2.31019378e-01 4.31572199e-01 1.15670919e-01 ... 4.66644585e-01 2.00501531e-01 2.31384680e-01] [ 3.77344728e-01 3.46989065e-01 -4.87675108e-02 ... 5.41436613e-01 8.74502808e-02 -1.72892049e-01] ... [-1.40584679e-02 2.65964475e-02 -2.83412367e-01 ... -4.08352971e-01 -1.04996204e-01 1.50159404e-01] [ 1.62066489e-01 7.05668563e-03 -1.56272620e-01 ... -1.52759627e-02 1.27839968e-01 -1.10528000e-01] [ 3.42122674e-01 -5.78968942e-01 -6.00751162e-01 ... 2.31621917e-02 -2.58839816e-01 -5.55118859e-01]]]] [[[[ 2.11292788e-01 4.26031858e-01 -1.59614280e-01 ... -4.82786715e-01 -6.05746984e-01 -1.47749692e-01] [ 8.26826811e-01 6.78226888e-01 -7.15491548e-02 ... -6.51626140e-02 7.41249204e-01 -4.87337738e-01] [ 5.64294040e-01 -1.09056540e-01 3.03048138e-02 ... 9.50984120e-01 3.70391011e-01 -4.36428130e-01] ... [ 6.49362564e-01 -6.77524388e-01 -3.78447711e-01 ... 1.11123931e+00 -2.78735548e-01 3.31728548e-01] [ 1.06384702e-01 -4.09920961e-01 -7.30947971e-01 ... 3.59302998e-01 -9.53472912e-01 -2.51464456e-01] [ 1.64358819e+00 -5.24894059e-01 5.87081552e-01 ... 1.65599689e-01 -5.47383666e-01 -1.84303081e+00]] [[ 5.26629686e-01 9.25985873e-01 1.35767949e+00 ... -4.70296383e-01 -1.29484981e-01 4.36562300e-01] [ 1.85992825e+00 3.80352855e-01 8.61258209e-01 ... -3.44794393e-01 -3.81078422e-01 -7.24982679e-01] [-9.66695309e-01 -2.84425259e-01 1.13887331e-02 ... -8.31765592e-01 1.11661386e+00 -3.71129401e-02] ... [ 4.18215483e-01 8.34279597e-01 7.24679828e-01 ... 1.19062805e+00 4.00671870e-01 -9.17314470e-01] [ 6.19880140e-01 8.19524825e-01 -4.96143252e-01 ... -4.15166229e-01 -4.10425246e-01 1.04092097e+00] [ 1.15813009e-01 3.08923423e-01 1.74354628e-01 ... 2.59209841e-01 9.37866211e-01 1.15731411e-01]] [[-2.62590349e-01 -1.29088223e-01 2.24835873e-01 ... -1.00649321e+00 -2.34447807e-01 7.74266720e-01] [-1.05012429e+00 2.60507762e-01 1.74179412e-02 ... 2.14526907e-01 6.73128664e-01 5.69910824e-01] [ 8.65487099e-01 5.25994182e-01 1.60507560e-01 ... -5.78656971e-01 -5.49869597e-01 6.92454338e-01] ... [-3.89429867e-01 1.13117051e+00 -2.89958958e-02 ... -1.46637964e+00 -9.07212555e-01 2.66494900e-01] [-4.09589648e-01 1.83192384e+00 -6.95252419e-01 ... -4.48514879e-01 1.20394237e-01 2.67919660e-01] [-1.15515925e-01 6.44999444e-02 -4.26952779e-01 ... 8.07819843e-01 6.57852292e-01 2.62226820e-01]] ... [[-4.56440359e-01 8.64989698e-01 -1.17571843e+00 ... 5.78613877e-01 6.86369538e-01 1.00005007e+00] [-9.72794965e-02 -9.42492113e-02 3.00088584e-01 ... 2.17623934e-01 1.43160060e-01 3.25227678e-01] [ 2.06141457e-01 -8.80976975e-01 6.16448820e-01 ... 6.93483949e-01 -5.33768356e-01 2.89029568e-01] ... [-1.71262369e-01 1.41812280e-01 1.43733764e+00 ... -9.66059268e-01 -7.15277851e-01 3.96382540e-01] [ 1.29449415e+00 1.27460253e+00 -4.51097101e-01 ... -3.91600758e-01 -3.67451251e-01 -1.00566089e+00] [-9.02462229e-02 -5.15067935e-01 -1.78469849e+00 ... -2.46571586e-01 2.32295528e-01 -1.52338970e+00]] [[-6.53447807e-01 -2.01453432e-01 -4.00530457e-01 ... 2.25174725e-01 -4.14709508e-01 -6.10722899e-01] [ 1.04071665e+00 9.28347468e-01 2.97893971e-01 ... 9.47938859e-02 6.22688532e-01 1.38463533e+00] [ 1.20725644e+00 5.19579053e-01 1.34800360e-01 ... 8.66566598e-01 -1.41634381e+00 -8.50390196e-01] ... [ 6.19756477e-03 -8.27266216e-01 -1.05377293e+00 ... 7.38918483e-01 2.90803909e-01 -8.98067236e-01] [-4.02643889e-01 8.76063481e-02 -1.52711675e-01 ... -3.16909432e-01 -9.09844100e-01 -4.42540526e-01] [ 2.59437442e-01 -6.74678087e-01 -7.78844416e-01 ... -1.65865734e-01 -3.67507726e-01 6.09324500e-02]] [[ 2.72341788e-01 -8.92925799e-01 2.52073854e-01 ... -4.75784659e-01 -3.61313581e-01 -3.41584980e-01] [-5.00883162e-01 3.06690484e-01 3.42141122e-01 ... 2.91620698e-02 -3.88441980e-02 2.08243221e-01] [ 5.92118084e-01 1.56700522e-01 -5.71400821e-01 ... 4.19541866e-01 1.52263731e-01 3.60296071e-01] ... [ 1.95256948e-01 -6.54022157e-01 -2.33482242e-01 ... -9.14068818e-02 -5.20851910e-01 -4.59140331e-01] [-1.48873046e-01 -1.19226083e-01 5.15644252e-01 ... 2.61799097e-01 -2.29241341e-01 -8.98431599e-01] [ 9.11973938e-02 3.96214128e-01 1.98857486e-01 ... -1.71026498e-01 1.05249476e+00 4.82859433e-01]]] [[[-1.68990111e+00 1.43110490e+00 9.61794972e-01 ... -1.77271843e+00 6.63815618e-01 -1.14008462e+00] [-2.54000688e+00 -4.13407415e-01 2.87012553e+00 ... 1.50289834e+00 -1.77583683e+00 1.06496230e-01] [-2.33562857e-01 -1.19565141e+00 3.48146677e+00 ... 1.80920911e+00 6.58865094e-01 -3.59974170e+00] ... [ 1.70073032e+00 -5.70143044e-01 -2.72743672e-01 ... -2.61943054e+00 -1.23391652e+00 6.03682101e-01] [ 8.47778380e-01 -2.41532214e-02 -1.48876035e+00 ... 1.21219265e+00 -1.16826296e+00 -1.66450775e+00] [ 5.28966039e-02 3.61277431e-01 -1.52385449e+00 ... -2.62769079e+00 -1.65436816e+00 1.71601272e+00]] [[ 2.49337959e+00 2.40469962e-01 8.07730138e-01 ... 7.17752397e-01 -1.36239004e+00 3.59926701e-01] [ 5.08422077e-01 7.28382230e-01 3.43938923e+00 ... -3.08226705e+00 -1.35884428e+00 1.72453523e+00] [-3.52081347e+00 -3.09580278e+00 3.27513027e+00 ... 2.27648973e+00 -3.10580999e-01 -3.68670702e-01] ... [ 1.26532030e+00 1.16748190e+00 2.46894813e+00 ... 2.31344557e+00 -1.93763793e+00 -3.54765844e+00] [-1.32178080e+00 -2.09706974e+00 -6.80285454e-01 ... -1.24651885e+00 -1.94858536e-02 -2.61096454e+00] [ 9.00284171e-01 -4.67667055e+00 1.93961430e+00 ... 3.95958096e-01 2.12705684e+00 -1.00778544e+00]] [[ 1.94413662e+00 9.05550778e-01 -1.31807578e+00 ... -5.27887605e-02 -4.37600464e-01 1.23767447e+00] [ 1.92338002e+00 2.74428099e-01 8.06672633e-01 ... 1.97475040e+00 6.96836352e-01 -1.12758875e-01] [-1.09702146e+00 -2.26946139e+00 -7.49071836e-01 ... -8.63030732e-01 -1.20736349e+00 -3.34698915e+00] ... [ 1.79625773e+00 -1.87040377e+00 -1.21086395e+00 ... 3.00795102e+00 -1.09294224e+00 1.73500228e+00] [-2.49246433e-01 2.82156229e-01 6.57203257e-01 ... -1.33150327e+00 -4.89330441e-01 -1.12316024e+00] [ 7.90524960e-01 1.07792377e+00 -1.14659584e+00 ... 2.06671104e-01 -1.39250934e-01 -2.26376247e+00]] ... [[ 9.42947492e-02 1.06450510e+00 -4.04530138e-01 ... -2.51997888e-01 3.85791287e-02 -4.48634624e-01] [ 1.10712850e+00 -1.81182250e-01 -3.35188270e-01 ... 4.24826115e-01 -8.52263987e-01 -1.57890928e+00] [-7.19378889e-02 -1.80508032e-01 -6.33994281e-01 ... -8.58286619e-01 4.81262773e-01 2.63947463e+00] ... [-1.66778088e+00 -2.09725809e+00 -2.49568686e-01 ... 1.67889369e+00 8.67653728e-01 -1.09892118e+00] [ 2.16146246e-01 1.05707216e+00 -3.26781058e+00 ... 1.02990699e+00 1.70938039e+00 -1.52462110e-01] [-3.23013830e+00 6.19412899e-01 3.01792717e+00 ... 6.54243231e-02 1.36960793e+00 -2.88996291e+00]] [[ 9.38365400e-01 -5.75452805e-01 -3.82376671e-01 ... 3.35822642e-01 -2.73339772e+00 2.39373708e+00] [-1.89824903e+00 -4.47439098e+00 3.62179130e-01 ... -8.18036735e-01 -2.82770842e-01 2.56564951e+00] [-3.20873260e-01 7.11960852e-01 7.01301575e-01 ... 4.60660398e-01 -5.22325635e-01 6.44412041e-01] ... [ 1.99652719e+00 -1.83039474e+00 7.84079194e-01 ... 2.52841139e+00 1.04907739e+00 -6.67121708e-01] [ 2.43622124e-01 2.62668777e+00 -9.37988758e-01 ... -1.96439648e+00 -2.72080034e-01 -3.40730190e+00] [ 7.53012657e-01 3.56473595e-01 2.38588262e+00 ... -6.15951419e-01 5.73796809e-01 9.38216209e-01]] [[ 1.68925285e+00 1.33833218e+00 9.68118429e-01 ... -2.34674788e+00 -1.59903181e+00 1.52687597e+00] [ 1.71903718e+00 1.05079181e-01 1.12102723e+00 ... -1.29182553e+00 -4.93356764e-01 -2.34906960e+00] [ 4.93668020e-01 -3.08007032e-01 -8.72675240e-01 ... 2.39769787e-01 1.89521599e+00 2.08178461e-01] ... [ 8.31001043e-01 -2.75304466e-01 4.43927616e-01 ... 1.62535220e-01 -4.36624193e+00 1.98223996e+00] [-4.59435225e+00 5.57940491e-02 5.35483100e-02 ... -1.28280544e+00 -2.19471741e+00 3.09039545e+00] [-1.15584946e+00 3.95147473e-01 -1.65196049e+00 ... -2.19876742e+00 -1.53129816e+00 -1.17286587e+00]]] [[[-3.63469243e-01 -7.01158762e-01 -2.11317480e-01 ... -3.88607591e-01 3.60169202e-01 -3.47863168e-01] [ 5.15344203e-01 8.06965530e-01 -5.70330918e-02 ... -2.54877023e-02 -7.57926702e-01 -1.79547071e-01] [-1.02600098e+00 -1.10848415e+00 -7.07315445e-01 ... -2.56720722e-01 8.33104432e-01 8.85966361e-01] ... [ 1.27341345e-01 -1.65881708e-01 7.34706461e-01 ... 1.03458810e+00 7.99836665e-02 -7.65693069e-01] [-3.70128185e-01 1.29764289e-01 1.03096640e+00 ... -9.86290500e-02 -2.10834127e-02 1.17858350e+00] [ 8.22477996e-01 -1.00611091e+00 -3.15725082e-03 ... 6.41514421e-01 1.08889125e-01 8.32915425e-01]] [[-2.53478646e-01 -2.22842529e-01 -3.61743987e-01 ... 6.48451924e-01 1.57169521e+00 -3.04238051e-01] [-4.85322401e-02 -2.73239642e-01 -7.76227238e-03 ... -3.86595935e-01 6.06295526e-01 -1.35021225e-01] [-2.89598674e-01 -3.00799876e-01 3.34897846e-01 ... 3.23948532e-01 -2.98068970e-01 4.02260780e-01] ... [ 2.34844223e-01 7.54878044e-01 -7.54787505e-01 ... -3.27028096e-01 -2.62210786e-01 8.88721719e-02] [ 1.57149661e+00 -7.00432658e-01 6.90357089e-01 ... -1.84240118e-01 2.38545150e-01 4.10092771e-01] [-3.54029477e-01 -2.72976220e-01 3.51885021e-01 ... -5.23766756e-01 -1.37727261e+00 -3.96722496e-01]] [[ 7.18501031e-01 -1.75956741e-01 7.67128289e-01 ... 5.20691752e-01 9.22295511e-01 7.51342177e-01] [-1.38432658e+00 2.83451766e-01 -4.73894864e-01 ... -1.08844244e+00 -3.36231112e-01 7.42350101e-01] [-1.22406065e-01 -6.01930857e-01 -1.32844543e+00 ... 1.79385573e-01 1.00248313e+00 4.86554950e-01] ... [-1.31772310e-01 -3.46693575e-01 7.69061685e-01 ... 8.76222551e-01 3.60263526e-01 -2.73295343e-01] [-8.65738273e-01 -5.09901345e-01 4.80965227e-01 ... 7.76180029e-01 -5.71788490e-01 1.81368828e-01] [ 1.60027134e+00 -1.29948568e+00 -2.13707164e-01 ... 6.46600544e-01 9.08790410e-01 -1.19238526e-01]] ... [[-3.45223844e-01 1.61835301e+00 5.65601289e-01 ... 7.29154944e-01 5.63942909e-01 7.73694336e-01] [ 6.37784839e-01 7.29389608e-01 1.20685852e+00 ... 2.61684626e-01 1.19786310e+00 6.96309209e-01] [-8.24066520e-01 -6.64766371e-01 2.62581736e-01 ... -6.54510558e-01 4.03265834e-01 -5.63497543e-01] ... [-7.83393800e-01 -6.21336579e-01 -6.22307360e-01 ... -3.17998171e-01 1.11211729e+00 -2.05187783e-01] [ 5.81757724e-01 4.33534324e-01 -7.64264613e-02 ... -1.50590277e+00 -1.95503592e-01 2.93355972e-01] [-1.12170763e-01 9.81918573e-01 -6.61125243e-01 ... -1.62470058e-01 -6.36063933e-01 -3.00242096e-01]] [[-2.12243274e-01 -9.54757392e-01 1.84865206e-01 ... -1.22359872e+00 -5.20312637e-02 1.06045783e+00] [-4.18357611e-01 4.46793884e-02 -1.48265469e+00 ... -4.22424495e-01 -6.68554306e-01 -8.12820420e-02] [ 6.83442295e-01 6.44977033e-01 4.84153420e-01 ... -4.81640637e-01 -3.02820235e-01 -3.99720706e-02] ... [-4.39286262e-01 3.93578112e-01 8.48581254e-01 ... -6.45217001e-01 9.15286765e-02 7.56250322e-01] [-1.07062139e-01 -5.84223688e-01 -4.11732018e-01 ... 8.80021989e-01 -7.95432627e-01 1.22787736e-01] [ 2.65024543e-01 4.56942469e-01 -2.82260209e-01 ... 2.88564682e-01 1.07933521e+00 -9.44373012e-03]] [[-5.77286363e-01 -4.90685314e-01 8.01852047e-01 ... -7.93416142e-01 5.72640598e-01 -2.18372583e-01] [-7.36985922e-01 -1.71300039e-01 -4.83610481e-01 ... -3.40476751e-01 2.81574190e-01 4.21508729e-01] [ 8.62789392e-01 2.87625670e-01 -5.30753613e-01 ... 7.55489826e-01 8.18458647e-02 -1.15908355e-01] ... [-1.55567765e-01 4.29757237e-01 6.10326409e-01 ... -2.89704144e-01 7.08008051e-01 -9.32564139e-01] [ 2.69684702e-01 -5.71878493e-01 -1.34187803e-01 ... -6.97489858e-01 -3.04987311e-01 -6.89900756e-01] [-4.58007902e-01 -1.20985858e-01 3.53959471e-01 ... 1.24149430e+00 6.88694954e-01 1.70594513e-01]]] [[[-9.23182666e-01 1.72116446e+00 -7.52179384e-01 ... -7.68186331e-01 -2.49914503e+00 -1.47386742e+00] [-2.66546130e+00 -2.36911535e+00 1.47357070e+00 ... 1.62158549e+00 5.15336432e-02 -1.45607591e+00] [-1.14000118e+00 -1.05341542e+00 -4.09249693e-01 ... 2.10766029e+00 6.14736736e-01 1.28510147e-01] ... [ 5.69892585e-01 7.69594610e-01 -1.80277526e+00 ... -5.39558530e-01 -1.11255300e+00 8.97163808e-01] [ 1.48823690e+00 8.52515280e-01 1.20271432e+00 ... -1.29517210e+00 5.04693925e-01 -1.79085433e+00] [ 1.73741603e+00 1.46368098e+00 1.99892235e+00 ... -8.98551822e-01 1.20417142e+00 -3.53195131e-01]] [[-6.65957212e-01 -8.35342824e-01 -3.90216589e-01 ... 8.67650285e-02 1.18452394e+00 -6.68455124e-01] [ 1.11884284e+00 -1.43069458e+00 5.35560250e-01 ... 1.56655252e+00 -2.54039264e+00 3.18746614e+00] [ 1.81283617e+00 8.63294601e-01 -1.56205380e+00 ... 4.41488117e-01 -4.45231885e-01 2.32009068e-01] ... [-5.55279195e-01 4.90408055e-02 -1.11445677e+00 ... 1.24940622e+00 -3.43624210e+00 -2.54328966e+00] [-1.67995167e+00 1.05239654e+00 2.49661779e+00 ... 1.05996400e-01 -1.47219861e+00 4.34436738e-01] [-3.73792857e-01 -2.18543053e+00 -1.69294715e+00 ... -5.66181064e-01 3.86429167e+00 2.63528562e+00]] [[-1.25063792e-01 -1.10740268e+00 6.60016119e-01 ... 2.96241015e-01 -1.00838590e+00 -1.82752550e+00] [-6.69077218e-01 -4.77111578e-01 -5.02053618e-01 ... 9.70138609e-01 -1.40357450e-01 1.34230399e+00] [-7.84423411e-01 1.82599676e+00 8.48567247e-01 ... -2.47759834e-01 -9.56863701e-01 -2.97774762e-01] ... [ 2.57027149e-01 3.83412659e-01 8.62384975e-01 ... 1.41098678e+00 -2.32383943e+00 2.96111917e+00] [ 1.26013029e+00 8.70184302e-01 -2.80362892e+00 ... 1.13842237e+00 -1.56847692e+00 2.54610348e+00] [ 1.06963015e+00 -6.11232340e-01 -1.02192783e+00 ... 3.37220907e-01 4.93905842e-02 1.62670350e+00]] ... [[-1.24988830e+00 3.81893441e-02 -2.07226920e+00 ... -1.82135329e-01 1.81542858e-01 1.51788568e+00] [ 7.75063783e-02 -1.34386683e+00 -1.16102934e+00 ... 1.88227153e+00 -6.52660489e-01 8.61245275e-01] [-6.25216782e-01 2.19989777e+00 1.40046597e+00 ... 1.26007664e+00 2.28346959e-01 -2.38547730e+00] ... [-5.51948011e-01 8.79742205e-01 8.48816097e-01 ... -2.73855001e-01 1.68735588e+00 -1.01170588e+00] [ 2.59322834e+00 2.57649994e+00 -1.51186264e+00 ... 7.57162213e-01 3.70515548e-02 -1.29343879e+00] [ 2.48116159e+00 -1.00125289e+00 4.81114239e-01 ... 3.20683777e-01 5.19690692e-01 -2.10620618e+00]] [[ 1.87150657e-01 1.38199914e+00 1.32421374e+00 ... 1.58028018e+00 2.20074582e+00 -3.02766711e-01] [-1.17429316e+00 -1.13058650e+00 -5.77993274e-01 ... -6.14782155e-01 3.79060745e+00 -1.74483216e+00] [ 7.37208903e-01 -2.05492139e+00 -4.07430768e-01 ... -8.50651115e-02 8.94826710e-01 4.93432224e-01] ... [-1.17538750e+00 1.47737956e+00 -7.24931777e-01 ... -2.18371773e+00 -2.94578224e-01 -7.70890176e-01] [-1.35945964e+00 1.10863602e+00 6.89906895e-01 ... -8.24831426e-01 2.51812041e-01 3.40630591e-01] [ 5.94014466e-01 -6.37690604e-01 -3.09039474e-01 ... 2.50140285e+00 -1.49058223e+00 -1.13037312e+00]] [[-4.57866341e-01 7.34914184e-01 3.10288854e-02 ... -2.40121937e+00 9.96046364e-02 -1.58844495e+00] [ 1.16035914e+00 -3.85111302e-01 -2.20287538e+00 ... -5.31418264e-01 2.00488329e+00 -3.60972553e-01] [ 1.56818533e+00 -2.35166240e+00 1.79713398e-01 ... 1.43229854e+00 -1.17001347e-01 -1.06842673e+00] ... [ 2.06704795e-01 1.75164139e+00 1.01239599e-01 ... -1.75738537e+00 1.24194825e+00 -1.45525777e+00] [ 6.34305000e-01 8.90412569e-01 -1.31426382e+00 ... 2.36691427e+00 2.87740397e+00 -1.80777788e+00] [-3.43613893e-01 6.41082883e-01 2.06339955e+00 ... -1.65788460e+00 2.07060742e+00 -1.41840076e+00]]] [[[ 3.69240135e-01 2.08801794e+00 2.17330360e+00 ... 7.01653481e-01 1.81176615e+00 9.02473688e-01] [-5.45958471e+00 3.61327553e+00 -6.71842456e-01 ... -2.96240711e+00 9.86019790e-01 5.48964214e+00] [ 3.22351992e-01 -8.18817198e-01 3.03916240e+00 ... 2.54297304e+00 -1.42999566e+00 -6.50708866e+00] ... [ 1.96062577e+00 4.15786934e+00 -1.87195385e+00 ... 1.03508067e+00 -9.53164935e-01 2.25983962e-01] [ 4.05590117e-01 -1.43784985e-01 -1.22818971e+00 ... 5.36347198e+00 -9.55912292e-01 3.02433157e+00] [ 1.97738349e+00 -1.49824953e+00 1.41401958e+00 ... 1.47507918e+00 -1.52259088e+00 2.36518860e+00]] [[ 2.46124339e+00 3.74627054e-01 -1.23202467e+00 ... -2.42948627e+00 -3.35375476e+00 -2.70428872e+00] [ 2.11910629e+00 3.94008070e-01 2.38596058e+00 ... 5.68525672e-01 1.09947324e+00 -2.06814504e+00] [-6.07616901e-01 2.82382202e+00 2.27368927e+00 ... 4.86905766e+00 -2.50875354e+00 3.88726711e+00] ... [ 4.78375018e-01 -4.37156916e+00 -5.73556519e+00 ... -9.18690801e-01 8.00706208e-01 -1.27718246e+00] [-3.17699909e+00 -7.25147581e+00 -1.04030383e+00 ... 1.99296141e+00 2.56034160e+00 -1.25769961e+00] [-4.29014635e+00 2.14206624e+00 -1.01045859e+00 ... 6.14992666e+00 2.02919626e+00 1.59300709e+00]] [[-9.57698882e-01 -7.40137517e-01 2.24672294e+00 ... -3.01597059e-01 4.54703346e-02 -1.00279617e+00] [ 8.11061263e-01 -1.08124511e-02 -4.59611130e+00 ... -1.71062338e+00 -7.62306675e-02 -3.15734833e-01] [ 1.52136004e+00 -4.62527275e+00 -1.06303334e+00 ... -9.64933634e-01 -9.99074221e-01 1.72027731e+00] ... [-1.15801938e-01 -3.25415641e-01 4.31971312e+00 ... -2.86294532e+00 -4.64493513e+00 2.18331075e+00] [ 5.11500120e+00 -4.57297713e-01 -1.21358383e+00 ... 2.89210856e-01 -2.31332541e+00 8.84729624e-01] [ 2.33020449e+00 3.05429149e+00 1.14215836e-02 ... 6.89935684e-01 -1.89526165e+00 -2.53277969e+00]] ... [[-1.62487817e+00 -1.18208253e+00 -7.57949591e-01 ... -6.15298986e-01 4.91619825e+00 1.28020775e+00] [ 2.24827766e+00 2.09917021e+00 1.68610954e+00 ... 1.26385486e+00 2.33993992e-01 -1.45360470e+00] [ 4.61914110e+00 -3.59754753e+00 1.49898338e+00 ... 2.94785857e+00 -1.90624759e-01 2.13156390e+00] ... [-5.61374950e+00 1.53163695e+00 3.42204666e+00 ... 8.46625715e-02 2.01766944e+00 4.30886984e+00] [ 2.31964779e+00 1.02262807e+00 -2.85719180e+00 ... 1.11185563e+00 1.74649787e+00 1.76733983e+00] [-1.92116654e+00 -9.33167338e-01 -3.25128055e+00 ... -3.64680827e-01 7.88104415e-01 -1.36559355e+00]] [[-3.48066783e+00 6.47560507e-02 -1.55433369e+00 ... -2.31760383e-01 3.39464402e+00 -2.41791677e+00] [ 1.06679034e+00 1.73418090e-01 -1.85988650e-01 ... -3.15617561e+00 4.24380207e+00 -5.48217916e+00] [ 1.17160487e+00 -4.70280361e+00 3.45618546e-01 ... -1.03383887e+00 -2.54780960e+00 -4.21294022e+00] ... [-2.96382397e-01 -6.38697505e-01 2.56449008e+00 ... 4.32612836e-01 -3.58494163e-01 -3.37843418e+00] [-1.44603944e+00 1.40784055e-01 3.36845469e+00 ... 2.73523974e+00 1.71308172e+00 1.87981176e+00] [ 2.24704313e+00 -1.56750262e-01 1.26564395e+00 ... -1.55193901e+00 -2.55293441e+00 -2.22048998e+00]] [[-2.68633008e-01 -2.14016271e+00 -3.61411643e+00 ... 9.45272446e-01 -3.46974969e+00 5.67399383e-01] [ 1.71745348e+00 -2.68491793e+00 -3.03849149e+00 ... 2.85666013e+00 -1.16457760e+00 -1.65162110e+00] [ 2.60637188e+00 -6.46484280e+00 3.26077127e+00 ... -1.12009680e+00 4.43503523e+00 2.90797353e+00] ... [-1.41818655e+00 3.27914071e+00 -2.87140322e+00 ... 5.35479248e-01 3.68901777e+00 -2.33137059e+00] [ 2.75952339e+00 1.26370800e+00 8.50404799e-02 ... -1.11691892e+00 -4.50101733e-01 5.59058964e-01] [-3.59426588e-01 -7.14070082e-01 -2.81536651e+00 ... -6.47919357e-01 -1.83432710e+00 1.03762794e+00]]] [[[-3.18323016e-01 -1.93721838e-02 3.48951668e-01 ... 9.66002569e-02 2.95488656e-01 -3.65918040e-01] [-1.45123556e-01 -1.12774067e-01 2.33010709e-01 ... 1.93311185e-01 1.28215998e-01 4.51739937e-01] [-3.30150753e-01 5.52919656e-02 -5.26338890e-02 ... -5.33588707e-01 3.23113590e-01 2.31881782e-01] ... [ 2.83856504e-02 -2.62580067e-01 2.22334966e-01 ... 2.19208119e-03 -4.81608570e-01 2.44721174e-01] [-2.86661178e-01 -5.67442298e-01 3.75259250e-01 ... 1.86864391e-01 3.33402865e-02 -7.86713243e-01] [-6.68790340e-01 -7.21275985e-01 -9.22884196e-02 ... -7.11351857e-02 4.68341261e-01 -1.43910378e-01]] [[-1.56282276e-01 -2.01497264e-02 1.86634839e-01 ... -2.84212738e-01 -7.51312748e-02 -2.19695438e-02] [-4.59684491e-01 3.43783289e-01 1.29523128e-01 ... 9.43559483e-02 3.83250624e-01 2.24222928e-01] [-9.74872112e-02 -2.27945298e-01 -1.29428461e-01 ... -3.23623717e-01 9.45697799e-02 2.98251837e-01] ... [ 1.89201668e-01 -2.80640036e-01 3.48935388e-02 ... 2.46053971e-02 4.61429246e-02 2.08767563e-01] [ 7.29945600e-02 4.85477686e-01 -7.59367123e-02 ... 1.59869388e-01 1.57357931e-01 -6.32860243e-01] [-7.01999187e-01 -1.05868101e-01 9.89884660e-02 ... 1.56912804e-02 -7.21174330e-02 -1.02327085e+00]] [[ 3.63725811e-01 3.81276548e-01 4.92672960e-04 ... -1.52775690e-01 -2.27761492e-01 4.89097655e-01] [ 2.13324308e-01 -6.49461448e-01 2.38544777e-01 ... 4.73929048e-02 5.98621130e-01 2.80831814e-01] [ 8.98037180e-02 2.51694828e-01 -5.18378437e-01 ... 6.16042674e-01 -2.09562838e-01 2.43084356e-01] ... [-3.79357100e-01 4.89932507e-01 7.32910037e-01 ... 2.96876896e-02 2.31590077e-01 -4.84968245e-01] [-4.29460764e-01 3.07062477e-01 -5.30905366e-01 ... 1.32439122e-01 -3.47659022e-01 1.42369851e-01] [-5.07579148e-01 -9.31575149e-02 -2.76484877e-01 ... 9.76018012e-02 9.95565951e-02 -3.74769419e-01]] ... [[-2.71768510e-01 -1.11547537e-01 1.27183139e-01 ... 4.17668700e-01 -9.19574723e-02 -8.01725760e-02] [-6.67190909e-01 -3.18121731e-01 3.31342489e-01 ... -5.71689308e-01 -3.23281705e-01 3.66870314e-01] [-9.57463309e-02 2.80729920e-01 3.41709889e-02 ... -3.45983148e-01 3.22351523e-04 4.71711516e-01] ... [-3.64685357e-01 6.77928627e-02 2.58701652e-01 ... 3.35499614e-01 5.25720298e-01 -1.83929533e-01] [-9.92298365e-01 1.79894418e-01 5.01136422e-01 ... 1.91252112e-01 -3.05976361e-01 -4.80382562e-01] [-3.99685264e-01 -3.17084223e-01 5.57103872e-01 ... 5.17965257e-02 -3.52949090e-02 1.51943967e-01]] [[ 5.80848277e-01 4.14205760e-01 1.40059786e-02 ... -2.01990843e-01 2.06141442e-01 2.71010935e-01] [-2.70394087e-01 9.74480361e-02 -3.31778973e-02 ... -5.55509269e-01 1.60081550e-01 2.78478079e-02] [-2.25711584e-01 -1.15448117e-01 -8.74338597e-02 ... -2.30554491e-01 1.26518562e-01 1.50797561e-01] ... [ 4.35504287e-01 -2.01809168e-01 -4.95096594e-02 ... 1.96978435e-01 -3.52417648e-01 2.69980103e-01] [ 4.96004671e-01 -3.03932160e-01 3.94142151e-01 ... -7.65049279e-01 5.71191162e-02 -4.11590993e-01] [-5.07351533e-02 6.05962165e-02 1.02111205e-01 ... -2.11838916e-01 -4.24746633e-01 -5.92500746e-01]] [[ 2.40966175e-02 2.06903592e-01 -3.35209906e-01 ... 1.21416010e-01 -1.71542868e-01 -4.36584294e-01] [-1.73903793e-01 -8.91458839e-02 2.08117440e-01 ... -2.89607346e-01 -2.17383914e-03 -2.25938424e-01] [ 3.86752814e-01 6.30266786e-01 7.77282119e-01 ... 2.45671511e-01 6.91637695e-01 -4.49876815e-01] ... [ 2.67985649e-02 -4.52820212e-01 -3.44669759e-01 ... 6.12860322e-02 -4.82637554e-01 -4.09353405e-01] [-3.06718588e-01 1.57600008e-02 -1.81418434e-01 ... -6.80349171e-02 2.23410755e-01 -3.35393429e-01] [ 2.22815990e-01 2.70455688e-01 6.13577887e-02 ... 2.58671969e-01 4.99448895e-01 -1.45358637e-01]]]] ... [[[[ 5.26264012e-01 1.96897909e-01 -1.60088599e-01 ... -1.23706646e-02 1.02975321e+00 -1.02667797e+00] [-1.10870492e+00 1.64758360e+00 -9.74121690e-01 ... -9.98114526e-01 -5.65448284e-01 -5.52749038e-01] [-2.00113133e-01 2.23389745e-01 -6.98465466e-01 ... 1.98672265e-01 -5.94560385e-01 4.97774392e-01] ... [-1.58435389e-01 -8.39957476e-01 -3.31078768e-01 ... -7.34036624e-01 -4.66707796e-01 1.17039815e-01] [-6.61132753e-01 -2.75790215e-01 4.41003561e-01 ... -1.66364983e-01 -2.89349079e-01 6.60167575e-01] [ 4.24499333e-01 -1.10550153e+00 3.84727359e-01 ... -6.78256154e-01 -1.72034591e-01 -5.65932691e-02]] [[ 3.48237872e-01 -6.69374526e-01 4.48396295e-01 ... 2.15241127e-02 2.16851458e-01 -1.97109990e-02] [-1.77353752e+00 -9.82365847e-01 -2.83147469e-02 ... 3.44777077e-01 8.94982040e-01 -5.60882568e-01] [ 3.56912225e-01 -6.38026893e-01 1.10309660e+00 ... -9.02168274e-01 -3.34486306e-01 1.02083397e+00] ... [-5.61750419e-02 1.23481596e+00 -2.24983394e-01 ... -6.20194077e-01 2.76183784e-01 7.00542748e-01] [ 9.58101079e-02 -1.55835763e-01 1.17179382e+00 ... 8.09981465e-01 1.44332498e-01 8.44835818e-01] [ 1.32568872e+00 -2.66031027e-01 2.96603352e-01 ... -7.92953074e-01 3.56479824e-01 -5.02340674e-01]] [[ 2.36416891e-01 5.14563680e-01 -2.40595296e-01 ... 6.24635536e-03 -7.93531120e-01 1.55014098e-01] [-3.66015643e-01 -5.37824392e-01 1.55773923e-01 ... 2.79950142e-01 -1.82194039e-01 3.86895657e-01] [ 4.56364214e-01 3.52263868e-01 8.56956005e-01 ... -1.82314947e-01 6.22658253e-01 -3.60506922e-01] ... [-1.12077224e+00 -6.03191018e-01 -8.11185181e-01 ... -2.98899978e-01 1.07592298e-02 2.45673969e-01] [-3.52803975e-01 -7.50081092e-02 -8.06435406e-01 ... 6.65765703e-01 -3.39848906e-01 3.17369372e-01] [ 4.45130974e-01 7.01396942e-01 -4.74125475e-01 ... 4.01775241e-01 -3.79313916e-01 -4.13113058e-01]] ... [[-1.66486651e-02 1.49566785e-01 -6.37440085e-01 ... -5.41523576e-01 -1.27520889e-01 -5.17703414e-01] [ 4.62915599e-01 5.92475981e-02 1.80738914e+00 ... -2.62413442e-01 7.07961082e-01 -1.43405938e+00] [-2.38559172e-01 -2.49328330e-01 -8.85798097e-01 ... 5.94039202e-01 -7.08126798e-02 7.36141503e-01] ... [-2.72857577e-01 1.38071299e-01 -1.77680328e-01 ... -3.26040477e-01 3.52403551e-01 5.68884909e-01] [ 7.44302422e-02 1.34433937e+00 7.96019137e-01 ... -8.17438126e-01 -1.27370477e+00 9.63750258e-02] [ 1.42955399e+00 -7.74899662e-01 5.18195093e-01 ... -2.78110653e-01 3.20281684e-01 -2.91419089e-01]] [[-1.14000726e+00 9.14829314e-01 -2.62034267e-01 ... 1.49778414e+00 -1.38038123e+00 9.39415932e-01] [-1.62297320e+00 -1.90294132e-01 -1.58628762e+00 ... -6.74031258e-01 3.71061981e-01 -6.56790316e-01] [-1.15594804e+00 -1.15744844e-01 5.44256151e-01 ... -1.42311239e+00 -2.00305536e-01 8.96664977e-01] ... [ 3.28364410e-02 -3.73703033e-01 3.17531377e-01 ... -7.44602978e-01 1.10164976e+00 -8.91056478e-01] [-5.28550088e-01 -4.68084276e-01 7.61397660e-01 ... 3.08225632e-01 7.04746187e-01 9.90837961e-02] [ 7.59454221e-02 8.22527483e-02 -1.64553329e-01 ... -7.04146981e-01 -1.12044230e-01 -1.35317302e+00]] [[-1.05795133e+00 -5.98126292e-01 -4.47704554e-01 ... -3.85535032e-01 -2.00969934e+00 -1.40886641e+00] [ 5.81805050e-01 1.26622766e-01 1.81599170e-01 ... -1.63785182e-02 -5.02824187e-01 4.23311263e-01] [-3.56512442e-02 1.14175594e+00 -2.62716502e-01 ... -2.44658664e-01 -9.58535492e-01 -2.39014104e-02] ... [-1.15673625e+00 -9.05703679e-02 -9.37307477e-01 ... 4.71316397e-01 -3.73815119e-01 -4.59101766e-01] [ 7.69328535e-01 1.20392573e+00 3.08241159e-01 ... 4.41002041e-01 -8.69104490e-02 5.98207235e-01] [ 3.47453505e-01 4.88417625e-01 1.05239403e+00 ... -1.99106589e-01 -2.22537816e-01 -1.62692919e-01]]] [[[-1.12812901e+00 9.77404058e-01 5.25137424e-01 ... -2.43949389e+00 -3.99683213e+00 1.56968141e+00] [-1.47825167e-01 1.92139313e-01 -1.31408465e+00 ... 1.59223723e+00 -4.81128603e-01 1.21466160e+00] [-2.69702578e+00 2.00210667e+00 3.08623457e+00 ... 2.36763144e+00 3.14440995e-01 5.44745803e-01] ... [-3.33386779e-01 -2.46973825e+00 9.59379524e-02 ... 1.31089580e+00 -2.47581649e+00 7.29398549e-01] [ 1.01367509e+00 1.11623263e+00 -1.69515812e+00 ... 1.02376580e+00 -7.28700578e-01 2.17967463e+00] [-4.12067175e+00 -4.15958226e-01 1.33211124e+00 ... 3.15711713e+00 2.53131199e+00 1.12337148e+00]] [[-2.06734681e+00 3.67254943e-01 3.82203269e+00 ... 1.09555006e+00 -1.01046944e+00 -1.19623685e+00] [ 1.96306288e+00 -6.94212794e-01 2.44714522e+00 ... -6.05833650e-01 -1.93316245e+00 9.26791906e-01] [ 4.72256511e-01 -1.11480248e+00 -3.84755492e-01 ... 7.85232186e-01 1.07979453e+00 -1.37636948e+00] ... [-1.45189062e-01 1.88680500e-01 -1.09869108e-01 ... 1.07224417e+00 2.69270658e+00 1.75082231e+00] [ 2.97287971e-01 -8.58963132e-01 1.65858364e+00 ... -1.59338081e+00 -1.67330280e-01 -7.61295652e+00] [-1.51659715e+00 -8.05749357e-01 -8.79064381e-01 ... 1.12298512e+00 1.87308919e+00 5.51654339e-01]] [[ 2.48882008e+00 -7.09071159e-01 -2.31515837e+00 ... 2.56421494e+00 -7.35325754e-01 -2.31975213e-01] [ 3.25879145e+00 3.10108423e-01 3.29570389e+00 ... -2.28824139e+00 1.56576574e+00 -6.69592142e-01] [ 1.13026392e+00 -1.36449230e+00 -1.24289036e+00 ... -7.35077202e-01 2.08659220e+00 2.21238208e+00] ... [-9.83548224e-01 -2.54422355e+00 -2.64493167e-01 ... 3.37535381e+00 2.61138052e-01 -5.01451492e-01] [-2.52918696e+00 -1.97858667e+00 -6.20110810e-01 ... -3.17846704e+00 5.65341175e-01 2.23459661e-01] [ 5.19480407e-01 -1.81466186e+00 2.04831982e+00 ... 2.21507594e-01 -1.73483396e+00 5.98492920e-01]] ... [[ 1.66035965e-01 -3.15410078e-01 -4.74363983e-01 ... 1.48177457e+00 1.13585603e+00 -7.40551412e-01] [ 7.84206152e-01 2.30272055e+00 2.44817853e+00 ... -8.57148945e-01 -1.92352164e+00 1.28738880e+00] [-2.81043410e+00 6.02867544e-01 1.30167317e+00 ... 2.25104854e-01 4.91847605e-01 -3.20118093e+00] ... [-1.70185053e+00 2.37114072e+00 -1.83597124e+00 ... 1.45200801e+00 1.38081825e+00 -5.68747807e+00] [ 1.57299483e+00 -2.30056381e+00 1.07253385e+00 ... 1.46689069e+00 -1.89900064e+00 -3.17604756e+00] [ 2.21024537e+00 2.23265815e+00 1.06359613e+00 ... 6.46622539e-01 5.83421648e-01 1.25931084e+00]] [[ 1.50959492e+00 -3.78473091e+00 -2.35799575e+00 ... 1.09315443e+00 -5.34190536e-01 -5.61697125e-01] [ 1.50023770e+00 -1.13715374e+00 -3.84403133e+00 ... 1.23184478e+00 6.19959950e-01 2.73855448e+00] [ 2.17307067e+00 1.05137742e+00 3.17488581e-01 ... -1.24947989e+00 -2.75458765e+00 -1.16256118e+00] ... [-9.98913169e-01 -1.82478607e+00 4.78408813e-01 ... -4.70425487e-01 -5.26536107e-01 -9.47061539e-01] [-4.68124509e-01 3.26001406e+00 1.73124146e+00 ... 6.08131945e-01 2.68747187e+00 1.28735077e+00] [-2.61006999e+00 2.07639027e+00 1.72323358e+00 ... 4.05650520e+00 6.34152949e-01 -1.04230785e+00]] [[-1.40424585e+00 -1.66464102e+00 2.06474900e+00 ... -3.92908067e-01 1.35563159e+00 3.34747386e+00] [ 4.46825296e-01 -7.06275105e-01 -1.40126288e+00 ... -2.35914126e-01 6.38963938e-01 1.54599130e+00] [ 2.23506188e+00 3.38520080e-01 -1.23463750e+00 ... 1.50089300e+00 -1.09338892e+00 -3.94743383e-01] ... [ 1.61568308e+00 -6.12960637e-01 -3.09519482e+00 ... -2.18881547e-01 3.46489167e+00 -1.95823640e-01] [ 1.08891368e+00 7.12017238e-01 5.82918346e-01 ... -1.85369289e+00 2.28777480e+00 1.53098524e+00] [-2.14876604e+00 -2.63719749e+00 -4.21170533e-01 ... 1.88263309e+00 -9.52865660e-01 -2.28183436e+00]]] [[[ 3.81340027e-01 -1.70010477e-01 -7.67178655e-01 ... 4.49001014e-01 -4.47080657e-02 -8.05019081e-01] [-1.06818832e-01 6.87860977e-03 -3.65744084e-01 ... 4.67583388e-01 6.66847706e-01 -9.37929563e-03] [ 2.81116843e-01 -7.99472988e-01 3.70533854e-01 ... -8.79834175e-01 2.24862322e-01 5.54323256e-01] ... [ 1.81102961e-01 -2.49919459e-01 -6.28014624e-01 ... -9.02279466e-02 -1.54205918e-01 -4.00615297e-02] [ 3.14203531e-01 -6.50085956e-02 5.92179358e-01 ... 2.73465347e-02 2.87506543e-02 1.10891312e-01] [-1.93220839e-01 -6.60211921e-01 -1.70932308e-01 ... 4.86094385e-01 -3.02647729e-03 8.59670117e-02]] [[-2.30723575e-01 -1.89169037e+00 3.09754372e-01 ... 1.09314477e+00 -2.29470566e-01 -7.84423828e-01] [-9.39347208e-01 2.23398685e-01 -2.25046188e-01 ... 4.09875065e-01 -6.85242236e-01 -1.02066016e+00] [ 1.09051475e-02 2.69525677e-01 9.19248819e-01 ... -1.10570090e-02 -5.30950427e-01 -1.41500580e+00] ... [ 8.98673773e-01 5.48180938e-01 -3.23656499e-01 ... -9.84468341e-01 1.35745871e+00 4.36668247e-01] [-3.38378549e-02 -3.37158799e-01 -1.92575365e-01 ... 4.87955809e-01 1.50220203e+00 1.11390918e-01] [ 5.44113159e-01 6.47518575e-01 -6.64994121e-01 ... -3.43013942e-01 -2.82787122e-02 -6.40039980e-01]] [[ 7.16144800e-01 8.05451870e-01 1.09760773e+00 ... 7.64626086e-01 -4.80142623e-01 -7.47391224e-01] [ 5.37006259e-01 1.61505550e-01 1.29477155e+00 ... -2.92479247e-01 1.11851060e+00 8.19465160e-01] [-1.09437025e+00 2.11700305e-01 2.22599413e-02 ... -7.67765701e-01 -1.27922308e+00 1.72132209e-01] ... [-1.09810245e+00 1.33431563e-02 -9.17066753e-01 ... 7.78686881e-01 -7.25745380e-01 7.49145523e-02] [-1.17106223e+00 4.79769796e-01 -1.37757897e+00 ... -5.36584258e-01 1.02145091e-01 1.32482803e+00] [-8.20504904e-01 3.55180651e-01 3.25771958e-01 ... 1.75782830e-01 -8.53984654e-01 8.91763330e-01]] ... [[-1.63348377e-01 -6.82318211e-01 -9.67794716e-01 ... -6.71518922e-01 -2.75536865e-01 4.10515338e-01] [ 9.70535994e-01 1.74037886e+00 -6.76147521e-01 ... -7.06933662e-02 9.77456987e-01 7.24865556e-01] [-2.30992839e-01 -1.19250149e-01 2.85992891e-01 ... -6.60837889e-01 7.93243468e-01 7.88096666e-01] ... [ 3.75189513e-01 5.12174368e-01 -2.27408215e-01 ... -1.27522469e+00 -3.60336900e-01 -5.63247263e-01] [-2.09341556e-01 2.65262306e-01 1.20836639e+00 ... 6.89401031e-01 -4.97827828e-02 -1.34610605e+00] [-7.01448798e-01 1.32199931e+00 -2.89957762e-01 ... -9.39534843e-01 -2.71693945e-01 -1.42619872e+00]] [[ 1.48744062e-01 1.26119748e-01 -6.91864789e-01 ... -6.21791363e-01 -1.22703552e-01 -4.55558836e-01] [-1.87655389e-02 -6.81903720e-01 -6.84591234e-01 ... -1.92103013e-01 3.32924008e-01 1.26309741e+00] [ 2.36997902e-01 -3.21371526e-01 1.04070258e+00 ... -8.84328008e-01 -1.51702255e-01 -2.01641247e-01] ... [ 1.84396756e+00 -7.41460741e-01 -8.41871381e-01 ... 4.28883344e-01 -1.59239277e-01 1.24224186e+00] [-6.60768688e-01 -9.56544936e-01 3.55473943e-02 ... -4.57851768e-01 2.31837124e-01 -6.04722917e-01] [-1.99761555e-01 7.27130115e-01 -6.37858331e-01 ... 8.50320637e-01 3.13666701e-01 1.22597051e+00]] [[ 7.86759436e-01 3.72349292e-01 8.28178585e-01 ... -2.34382786e-02 7.86300719e-01 3.09274137e-01] [-2.79378116e-01 3.97378623e-01 -1.57171175e-01 ... 7.97630474e-02 -1.08565176e+00 3.43244642e-01] [ 1.62853643e-01 -8.73047292e-01 -1.36147916e+00 ... 5.45143902e-01 -4.64882553e-02 -1.29279745e+00] ... [-1.26864707e+00 -8.60037267e-01 -9.40818310e-01 ... -6.14743292e-01 1.00375187e+00 -2.12496161e-01] [ 4.81130630e-02 -4.69213054e-02 1.11807430e+00 ... 1.95082158e-01 1.43049693e+00 4.14768636e-01] [ 8.01494837e-01 -3.84236038e-01 -1.67326018e-01 ... -7.28156567e-01 2.07608476e-01 6.60966039e-01]]] [[[-2.44067788e+00 -5.72573483e-01 1.80041596e-01 ... 1.26546764e+00 2.84205437e+00 -1.02236116e+00] [ 9.85740185e-01 -1.05150461e+00 -2.10190749e+00 ... -3.86948943e+00 -6.34168804e-01 -1.87167466e+00] [-4.60722446e-02 1.27357042e+00 1.19230640e+00 ... 2.31299877e+00 2.50661403e-01 7.37161398e-01] ... [ 1.72027087e+00 7.77815104e-01 -1.41251907e-01 ... 6.37981415e-01 4.87702656e+00 -9.70224023e-01] [-5.27526736e-01 7.12047517e-01 2.17643142e+00 ... 1.84927821e-01 1.26131773e+00 1.18595421e+00] [-3.80971581e-01 -7.16575742e-01 -3.41943920e-01 ... 6.15343690e-01 -9.22217816e-02 -2.11181378e+00]] [[ 1.29156625e+00 7.94228017e-01 -2.76801157e+00 ... 7.32422292e-01 6.46554053e-01 -3.74765724e-01] [-3.67761493e+00 -5.37532568e-01 3.42782468e-01 ... 2.05189180e+00 -5.83326399e-01 4.36064452e-01] [ 1.49918944e-01 -2.64960051e-01 -9.13550079e-01 ... -9.22275007e-01 -5.39787889e-01 4.20341349e+00] ... [-2.30035949e+00 2.15846586e+00 -5.30744195e-01 ... -1.27526355e+00 6.22792065e-01 2.20406318e+00] [ 1.09882712e+00 -4.66811657e-01 7.61673987e-01 ... -2.06942010e+00 -1.62827671e-01 -8.40685070e-01] [-2.27777672e+00 -1.58253729e+00 2.06689835e+00 ... 1.96826720e+00 2.45385385e+00 -2.17330098e+00]] [[-5.67465246e-01 -1.66641369e-01 -1.55776991e-02 ... -1.10600674e+00 -7.37441555e-02 -1.15201938e+00] [ 6.78897724e-02 1.06157458e+00 -2.47601843e+00 ... 3.25131083e+00 9.84496057e-01 4.29229528e-01] [-3.37007356e+00 -8.37439060e-01 -1.89283803e-01 ... 4.82399970e-01 1.31936741e+00 -1.51010382e+00] ... [-6.18390024e-01 4.71331626e-01 -4.24321860e-01 ... 2.31305313e+00 -2.19274473e+00 3.93357687e-02] [ 3.75871241e-01 -5.32573879e-01 1.76053894e+00 ... -1.11245513e+00 1.58598140e-01 1.25184333e+00] [-8.88774469e-02 1.26605117e+00 -9.80179980e-02 ... 3.05991024e-01 9.15528461e-02 3.77607870e+00]] ... [[-2.43011460e-01 2.40331870e-02 -1.85400865e-03 ... 1.82720566e+00 -2.09277081e+00 -2.48079729e+00] [ 3.72813672e-01 2.30193615e-01 5.74989557e-01 ... -2.12243629e+00 -1.87584817e+00 8.93304527e-01] [-7.64086097e-02 -3.88878733e-01 6.67136788e-01 ... 2.41857871e-01 -1.19485283e+00 7.78295815e-01] ... [-1.09653902e+00 -2.80811965e-01 1.39491153e+00 ... 1.64795792e+00 1.80804694e+00 2.65778708e+00] [-1.43128586e+00 -1.61286211e+00 8.47513616e-01 ... 1.80577123e+00 -2.96218902e-01 -2.25836232e-01] [ 1.69150400e+00 1.13714099e-01 -2.25997019e+00 ... -2.85643876e-01 3.45666289e-01 7.86929429e-01]] [[-1.94753945e+00 4.63656843e-01 -5.28344333e-01 ... 3.80519331e-02 4.83269989e-01 -1.99393260e+00] [-1.42458999e+00 -9.22461689e-01 -1.29738081e+00 ... 1.15525436e+00 -1.99651510e-01 -6.51340306e-01] [-1.45762396e+00 1.44751012e+00 1.73223841e+00 ... -1.10135937e+00 -5.44121683e-01 5.59990466e-01] ... [-2.37780675e-01 4.48315680e-01 3.34201187e-01 ... -2.44346634e-01 7.75524318e-01 9.95213836e-02] [ 9.29157615e-01 8.54230642e-01 2.34049177e+00 ... -8.30527663e-01 -2.50170231e+00 -1.84307420e+00] [ 1.10258317e+00 -6.95095241e-01 1.77176785e+00 ... -5.96828103e-01 6.17481805e-02 -8.36542428e-01]] [[-7.28697926e-02 6.25781775e-01 1.24869502e+00 ... -1.13975143e+00 1.67530000e+00 -1.80304241e+00] [ 1.63055882e-01 6.94117486e-01 1.28786072e-01 ... 1.29056060e+00 -1.06820941e+00 1.45441854e+00] [ 1.42860904e-01 -1.67987549e+00 -9.38898742e-01 ... 7.68469810e-01 1.19719341e-01 5.49688458e-01] ... [-5.99710047e-02 9.33239222e-01 -9.09599662e-01 ... -1.05108276e-01 -2.88211890e-02 2.39838809e-01] [ 1.03628862e+00 6.48976639e-02 1.70285082e+00 ... 1.35164607e+00 -3.51173806e+00 8.57088804e-01] [ 1.04900968e+00 -3.34199643e+00 -2.83839369e+00 ... 1.58226505e-01 3.59957647e+00 3.41322994e+00]]] [[[-6.40507340e-01 1.98484838e+00 -1.53670418e+00 ... 3.56318295e-01 -1.92671692e+00 2.37114239e+00] [-2.73961878e+00 1.34786129e+00 -2.40807152e+00 ... 3.06246185e+00 1.20359516e+00 3.53490543e+00] [-1.46095586e+00 4.96551879e-02 -1.63020980e+00 ... -2.48222446e+00 1.33999085e+00 4.87796164e+00] ... [ 1.11693375e-01 -1.40536928e+00 3.49294925e+00 ... -1.11315835e+00 3.13211918e+00 2.70982075e+00] [ 2.94275212e+00 1.95141113e+00 -1.54900539e+00 ... 1.94010401e+00 1.21124470e+00 2.45999527e+00] [-2.48333573e+00 3.96617174e+00 -7.83717155e+00 ... 2.73840785e+00 -4.35552239e-01 1.11638999e+00]] [[ 1.62395048e+00 1.51637346e-01 4.18240488e-01 ... 1.46682656e+00 -5.17140007e+00 2.75784135e-02] [-2.87529898e+00 -1.15803921e+00 3.26022565e-01 ... 5.13002253e+00 2.63445616e+00 -2.53838086e+00] [-2.75867009e+00 -1.45001209e+00 9.17406321e-01 ... 1.94439542e+00 -1.40636981e+00 2.19243336e+00] ... [ 4.76268530e+00 -3.83379281e-01 6.92459270e-02 ... -1.05744112e+00 6.80988610e-01 -1.85729921e-01] [ 7.91553915e-01 5.94624329e+00 9.43979204e-01 ... -2.04141092e+00 6.30226946e+00 -1.88832417e-01] [-1.86094069e+00 3.08166409e+00 1.01107574e+00 ... 5.05915821e-01 1.21867549e+00 -7.72255087e+00]] [[ 5.95312059e-01 4.56420779e-01 -4.90156822e-02 ... -7.91848242e-01 1.21694708e+00 4.18612051e+00] [ 6.36214554e-01 -1.05700266e+00 3.08015442e+00 ... -1.27580070e+00 1.56551003e+00 -1.90240121e+00] [ 8.98217022e-01 2.17117620e+00 2.63835812e+00 ... 1.01970065e+00 -2.00904393e+00 1.09490800e+00] ... [-5.52579582e-01 5.00978708e+00 -6.30314469e-01 ... 5.28674312e-02 -1.02382898e+00 1.05978735e-01] [-1.97925222e+00 -3.16571546e+00 -1.46937084e+00 ... 1.00647068e+00 -7.79640496e-01 2.03788185e+00] [-5.67796826e-01 -1.25284564e+00 -4.18664789e+00 ... 2.39621520e+00 1.41921103e+00 -4.91938877e+00]] ... [[ 3.17466438e-01 -2.58210635e+00 -1.97219539e+00 ... 4.19058943e+00 -1.78989816e+00 -2.69582629e+00] [ 3.05093241e+00 9.14763927e-01 3.24186182e+00 ... -9.43423688e-01 3.01016033e-01 1.36620319e+00] [ 1.56009781e+00 6.90132260e-01 -2.06866527e+00 ... 4.37851763e+00 -2.09465480e+00 -1.84517765e+00] ... [-1.81928325e+00 -9.43134069e-01 -1.17457020e+00 ... 1.60254371e+00 -5.35256004e+00 -1.95279729e+00] [ 9.53166723e-01 3.58453155e+00 -1.38037249e-01 ... -2.57441807e+00 2.36608076e+00 5.99132597e-01] [-3.86979246e+00 -2.66771460e+00 -3.54923344e+00 ... -2.54042292e+00 -2.95575428e+00 2.37675881e+00]] [[ 3.12901711e+00 -1.03960156e+00 -1.49659240e+00 ... -3.01600552e+00 2.63781667e+00 -4.64909220e+00] [ 1.40339029e+00 -1.86974525e+00 9.46584523e-01 ... 5.14730167e+00 -3.37761712e+00 -2.69524479e+00] [ 4.63957119e+00 -1.83952236e+00 3.91220003e-01 ... 8.08831513e-01 1.32521737e+00 3.81365132e+00] ... [ 1.00155473e+00 -8.20628345e-01 4.67307663e+00 ... 1.23293245e+00 -5.35220385e-01 9.38639164e-01] [-6.23152852e-01 3.22954226e+00 -1.11465704e+00 ... 9.57914948e-01 3.26262951e+00 -2.95412040e+00] [-1.79520690e+00 2.11449027e+00 6.21100235e+00 ... -2.36168814e+00 6.98314786e-01 2.94307947e+00]] [[ 1.61444426e-01 1.04748917e+00 -4.53461409e+00 ... -5.52626193e-01 -5.64122820e+00 -4.54103994e+00] [ 5.32719135e-01 -3.22114658e+00 -1.50677657e+00 ... 1.43060589e+00 -8.32787931e-01 2.14687467e+00] [-5.74633181e-01 -2.96546030e+00 -1.26183546e+00 ... -3.05279517e+00 -2.60362840e+00 -1.47541130e+00] ... [ 9.25493240e-01 -2.20478821e+00 -1.47848332e+00 ... -9.21097577e-01 -9.67175066e-01 5.31304646e+00] [-1.13554120e+00 -2.23986197e+00 2.93712884e-01 ... 2.55773038e-01 9.56033766e-01 -2.97944570e+00] [ 6.18695438e-01 1.79989660e+00 4.53477335e+00 ... -3.47139388e-01 1.03645170e+00 -7.98902631e-01]]] [[[ 3.64383385e-02 -9.38137099e-02 -3.50047126e-02 ... 1.52208492e-01 -6.08374774e-01 -1.66796744e-01] [-4.45551455e-01 -2.72112638e-01 -2.16829151e-01 ... -2.59835422e-01 -3.65382552e-01 6.57196417e-02] [ 1.11376688e-01 4.55096692e-01 -1.12417482e-01 ... 8.46300572e-02 -1.41330838e-01 -2.80273743e-02] ... [-9.82637182e-02 -3.54261667e-01 4.27941382e-01 ... -7.34349154e-03 1.68705583e-01 -1.30430609e-01] [ 1.19220234e-01 -4.56557125e-01 2.19902128e-01 ... -2.01863930e-01 3.59503716e-01 2.92900592e-01] [-5.46169914e-02 2.61940181e-01 4.49893214e-02 ... -4.69321758e-01 -6.90746367e-01 1.66533604e-01]] [[ 1.14679858e-01 -5.20014996e-03 -9.48388875e-02 ... 1.58277638e-02 -3.17689747e-01 -5.10111153e-02] [ 3.31550568e-01 4.86116320e-01 -2.27384284e-01 ... -7.86641389e-02 -3.52550507e-01 -2.27082819e-01] [-4.02607739e-01 5.42767672e-03 1.63430870e-01 ... -6.97896862e-03 -4.50190961e-01 -1.24739751e-01] ... [-5.10002971e-01 5.66686451e-01 3.79423127e-02 ... 4.18327488e-02 -2.68899083e-01 -1.48910046e-01] [-1.89964831e-01 1.27705097e-01 6.02807701e-02 ... -1.19494610e-01 -1.26323849e-01 -6.62516475e-01] [-2.05791667e-02 3.68763685e-01 -3.32070701e-02 ... 3.90487254e-01 3.20077866e-01 -2.77185261e-01]] [[-5.23580432e-01 -4.86867070e-01 4.20714281e-02 ... 5.93248606e-01 5.27878068e-02 5.46025224e-02] [ 9.33829248e-02 4.20603007e-01 6.24799207e-02 ... -1.98855717e-02 1.85803518e-01 2.98716187e-01] [-1.79127723e-01 7.95279086e-01 -1.90113798e-01 ... 7.80385315e-01 7.91636333e-02 -3.95285577e-01] ... [ 3.84822488e-01 1.45186052e-01 3.11208665e-01 ... -1.10925183e-01 3.30098183e-03 8.18026531e-03] [ 2.44408652e-01 -2.66869903e-01 2.40543485e-01 ... -3.04585993e-01 2.88566738e-01 2.65721697e-02] [ 5.75045049e-01 -5.18985868e-01 1.18851595e-01 ... -2.74124175e-01 2.33775571e-01 -3.19799185e-01]] ... [[-3.67459625e-01 5.47630608e-01 5.87598026e-01 ... -1.89608306e-01 -3.01697552e-01 2.80875385e-01] [-1.55265823e-01 2.30767652e-02 -6.81199491e-01 ... -3.12968045e-01 -7.47253820e-02 -2.32874930e-01] [ 2.07189783e-01 -1.45733967e-01 3.76985013e-01 ... 4.03932154e-01 -4.20913287e-02 3.31601232e-01] ... [-2.80736864e-01 -4.55272973e-01 2.52890557e-01 ... 5.50208539e-02 5.87631524e-01 -4.64128293e-02] [ 1.39918134e-01 7.52492175e-02 -6.64897561e-02 ... 5.12603402e-01 -4.22729701e-02 -2.23891944e-01] [-2.06086919e-01 -1.27468675e-01 1.20652609e-01 ... 2.32939422e-02 -9.25050020e-01 -2.92097896e-01]] [[-2.03447476e-01 -4.43341613e-01 -3.30724306e-02 ... -1.54695585e-01 1.78559253e-03 3.47665727e-01] [-4.37750190e-01 -1.80394515e-01 3.46099436e-01 ... 8.67876351e-01 -7.62657583e-01 5.36621511e-01] [ 2.93271691e-01 2.42385685e-01 1.32844998e-02 ... 2.68780291e-01 8.88379589e-02 -7.25638866e-01] ... [-3.13152015e-01 1.64931446e-01 -8.75585452e-02 ... -1.17266037e-01 -4.10188705e-01 2.98846602e-01] [-3.21234874e-02 -2.04221204e-01 -1.61787216e-02 ... -1.15398332e-01 -9.47353989e-02 1.41952768e-01] [ 4.39632684e-01 -3.92175823e-01 4.30327728e-02 ... -5.15439093e-01 -2.28126347e-01 -1.02456085e-01]] [[-3.46758217e-02 -4.74861525e-02 4.29922402e-01 ... 3.20971698e-01 -1.52276233e-01 -1.17893085e-01] [-4.52812314e-01 -1.61999002e-01 3.13089229e-02 ... 1.87476836e-02 5.63820638e-02 -3.51893246e-01] [ 3.07408661e-01 -5.34561612e-02 5.79851091e-01 ... 6.06307238e-02 -1.48169147e-02 7.46676773e-02] ... [ 9.52993259e-02 5.04695237e-01 2.30446860e-01 ... 5.16560912e-01 -1.73216283e-01 1.57572925e-02] [-5.12738824e-01 -3.27447534e-01 -2.07238093e-01 ... -2.14654237e-01 -2.13422880e-01 -1.08156502e-01] [ 3.17817718e-01 7.94174299e-02 -5.33465564e-01 ... -2.59187281e-01 -1.06561612e-02 -7.46401489e-01]]]] [[[[-1.22147393e+00 -3.72514389e-02 -9.32368934e-01 ... 2.18143672e-01 -6.25373498e-02 3.07865679e-01] [ 8.30822706e-01 1.34603059e+00 -7.09034503e-01 ... 1.10477470e-01 1.90679416e-01 2.01761767e-01] [-2.00128511e-01 -3.57172549e-01 -3.78876254e-02 ... 3.56841147e-01 -1.19613588e+00 6.04141235e-01] ... [ 3.68370771e-01 1.52813390e-01 -4.18179333e-01 ... -6.52635470e-02 5.50926924e-01 3.48748595e-01] [ 4.50098038e-01 4.95463014e-02 9.99191046e-01 ... -4.84602749e-01 1.20845094e-01 8.16501915e-01] [-8.17082152e-02 -1.03092976e-01 -1.17733486e-01 ... -5.61516907e-04 -1.11424094e-02 1.49145707e-01]] [[ 7.70809650e-01 -1.25541711e+00 -3.31866533e-01 ... 1.60934973e+00 -5.98030746e-01 5.97891271e-01] [ 9.32843029e-01 5.67772746e-01 -1.36429429e+00 ... 1.07564366e+00 -1.57803953e+00 -2.55207777e-01] [ 5.45782089e-01 5.54037392e-01 -2.56852150e-01 ... -1.02179654e-01 -3.59201759e-01 4.46876645e-01] ... [-1.09935045e-01 -8.66691232e-01 1.19078481e+00 ... -1.02453864e+00 1.08416229e-01 -2.93961346e-01] [ 4.25558686e-01 7.89042354e-01 3.43479127e-01 ... 1.89639300e-01 -1.63982677e+00 2.12455556e-01] [ 8.04005146e-01 -1.95955947e-01 3.35646304e-03 ... -3.78002733e-01 2.45364189e-01 9.75894034e-01]] [[-9.81759548e-01 -8.18122685e-01 5.23587584e-01 ... -4.28673446e-01 9.85211879e-02 -2.33670652e-01] [ 6.19225442e-01 8.67347240e-01 8.57305646e-01 ... 1.91582501e-01 -2.60394961e-01 -1.12818635e+00] [ 8.61718953e-01 1.01295412e-01 3.40907991e-01 ... 4.54275161e-01 -5.87654054e-01 -4.05159146e-01] ... [ 8.10511827e-01 3.68517518e-01 -1.21866596e+00 ... -1.03672338e+00 1.08573890e+00 -1.23110521e+00] [ 1.06534612e+00 -1.35457301e+00 1.00324869e+00 ... 8.53100181e-01 -1.12736559e+00 5.55649884e-02] [-5.54432988e-01 4.06381786e-01 2.41408184e-01 ... -1.59824061e+00 -5.41386604e-01 -7.46852458e-02]] ... [[ 7.12910831e-01 -4.63167429e-01 -4.49944496e-01 ... -3.77711147e-01 2.55546212e-01 -7.66922176e-01] [ 2.86902599e-02 -2.88964361e-01 7.77215660e-01 ... 6.84638679e-01 -1.13421055e-02 -8.47600102e-01] [ 7.30569005e-01 -7.06890881e-01 -8.07366014e-01 ... 9.86716807e-01 7.42116630e-01 8.94054413e-01] ... [ 6.77842200e-01 9.84348118e-01 1.71029598e-01 ... 1.19543880e-01 1.34913683e-01 1.35999906e+00] [ 1.09886014e+00 -1.43845010e+00 -1.82276726e-01 ... 4.27300185e-01 -5.49840808e-01 1.80336937e-01] [ 1.55363798e+00 8.27056944e-01 -1.67403772e-01 ... 9.72833574e-01 -4.13621545e-01 3.57934654e-01]] [[ 2.83557504e-01 1.57472163e-01 6.41280562e-02 ... -1.15164809e-01 4.47426677e-01 -6.47650480e-01] [-2.25785673e-01 5.79727590e-01 4.01474863e-01 ... 5.10006726e-01 1.32153738e+00 1.43963192e-02] [ 4.70017284e-01 -3.38678509e-01 2.42599979e-01 ... 4.50663120e-01 -6.68103099e-01 -6.74679458e-01] ... [-3.13809037e-01 3.16161662e-01 -4.53682207e-02 ... -2.88019832e-02 -1.78398505e-01 3.33443761e-01] [ 1.30425677e-01 4.27228183e-01 7.12571070e-02 ... -2.68087298e-01 1.11691594e+00 2.24956647e-01] [-4.69142318e-01 -5.57636619e-01 4.97726381e-01 ... -9.29131806e-01 -3.71403843e-01 7.09948063e-01]] [[-2.23876119e-01 -1.48932457e-01 -7.37330556e-01 ... -1.57447815e-01 -3.05011541e-01 -9.09093320e-01] [-1.03747642e+00 -1.25431657e+00 4.62235689e-01 ... 4.97785285e-02 4.57025431e-02 -1.51568100e-01] [ 8.72020304e-01 -2.04504281e-01 3.48366410e-01 ... 4.61726069e-01 5.63963532e-01 8.78831893e-02] ... [ 4.70606744e-01 7.92627215e-01 -3.96592587e-01 ... 4.16084319e-01 1.98702246e-01 3.62536162e-01] [-1.35533273e+00 -4.18383367e-02 -3.36890280e-01 ... 4.80761737e-01 -9.51803476e-02 1.31174994e+00] [ 2.96780944e-01 3.16019624e-01 -5.83278000e-01 ... 7.46951625e-02 2.91723996e-01 -1.00451863e+00]]] [[[ 1.48735985e-01 -2.41500035e-01 -2.28534675e+00 ... 1.93086255e+00 8.15097630e-01 1.20773363e+00] [ 9.17667210e-01 -1.30057371e+00 -2.23272339e-01 ... -1.05803348e-01 1.99548498e-01 -1.67999542e+00] [ 3.10978508e+00 1.37763703e+00 4.21040356e-01 ... -5.30050755e-01 -3.14277679e-01 7.91149795e-01] ... [ 1.48807919e+00 6.33486867e-01 1.33063585e-01 ... -7.19418108e-01 -1.21078706e+00 -1.36727229e-01] [ 1.69025123e+00 3.42030931e+00 -3.26056331e-01 ... 1.23272300e-01 1.53383541e+00 2.75210202e-01] [ 2.57140660e+00 3.20852488e-01 8.05403292e-01 ... 9.19076920e-01 -2.84642863e+00 -3.12884808e+00]] [[-1.90752134e-01 -1.78895867e+00 3.09847212e+00 ... -2.69612908e+00 1.80994794e-01 -6.86822355e-01] [ 2.87245607e+00 -1.81483412e+00 -2.83626294e+00 ... -3.20658326e+00 2.53065300e+00 5.55472016e-01] [ 1.07150219e-01 2.44361401e+00 -1.00831568e+00 ... 4.31364346e+00 1.68104267e+00 -2.43714786e+00] ... [ 1.97660482e+00 -4.76622391e+00 1.19874299e+00 ... -1.32681584e+00 2.36959481e+00 -1.58227637e-01] [-4.87654328e-01 -2.92645907e+00 5.84077954e-01 ... 4.72780377e-01 1.36801052e+00 -1.68142393e-01] [ 1.33179235e+00 -1.17356241e+00 7.34100163e-01 ... -1.66262901e+00 9.26162660e-01 1.70545340e+00]] [[-6.06798232e-01 1.12236822e+00 -1.65119195e+00 ... 1.99449122e+00 -1.24833989e+00 -6.70074463e-01] [ 1.86478049e-01 1.59974456e+00 -1.02948129e+00 ... 3.50357509e+00 -6.56163871e-01 3.34577751e+00] [ 3.50407869e-01 2.93492913e+00 -2.45915508e+00 ... 3.31664062e+00 9.37544346e-01 1.10563409e+00] ... [-1.21581078e+00 3.45625311e-01 -1.50398827e+00 ... -1.24248636e+00 -7.27736712e-01 -1.11685300e+00] [-7.98218191e-01 -1.10534832e-01 -1.58645153e+00 ... 1.54806364e+00 -1.15642034e-01 1.57494831e+00] [-2.05603313e+00 -1.46967673e+00 4.65566784e-01 ... -4.88452792e-01 -4.42670286e-01 -6.31962359e-01]] ... [[-1.29847121e+00 -8.25801373e-01 -1.06551468e+00 ... -4.01929617e+00 3.56496334e+00 -4.55743700e-01] [-8.06825995e-01 -1.14126176e-01 2.29966593e+00 ... -2.35155630e+00 -3.53081882e-01 2.78047657e+00] [ 2.34973788e+00 1.90226778e-01 2.88743091e+00 ... -2.00840756e-01 1.76871955e-01 -1.08506858e+00] ... [-9.25220072e-01 1.00949550e+00 2.23209381e+00 ... -1.02307880e+00 -1.66390014e+00 -4.00620431e-01] [-2.49310088e+00 -9.56968844e-01 6.27593994e-01 ... -8.90934110e-01 -1.56234360e+00 5.96912026e-01] [ 7.19129562e-01 2.70821214e+00 1.44525158e+00 ... 1.50241613e+00 1.28656077e+00 -1.45038009e+00]] [[ 6.81317389e-01 1.32445348e-02 6.33205950e-01 ... -8.33317116e-02 1.02992141e+00 -2.68550587e+00] [ 7.36666381e-01 1.31049788e+00 -1.87266374e+00 ... 2.82625973e-01 -3.20953584e+00 -1.80477619e+00] [ 2.82590556e+00 -7.56368399e-01 1.50881338e+00 ... 3.11357588e-01 4.59124041e+00 -1.58383393e+00] ... [-1.24940407e+00 -2.49245071e+00 9.71507505e-02 ... 1.31076014e+00 1.71466291e+00 9.92123842e-01] [-1.84930074e+00 -1.06384933e+00 -5.85581541e-01 ... 2.02667809e+00 -2.01046562e+00 -1.75785255e+00] [-2.70115465e-01 -6.72101855e-01 -2.55283093e+00 ... -2.00431794e-01 -2.03723240e+00 1.75845528e+00]] [[-5.58164477e-01 1.13064885e+00 1.47594082e+00 ... -1.83936012e+00 2.15082693e+00 -9.31822598e-01] [-2.53762484e+00 5.29597223e-01 9.04680669e-01 ... 1.21724176e+00 1.73427773e+00 -2.60394335e+00] [ 1.50325370e+00 -1.89916983e-01 1.31242186e-01 ... 8.06699216e-01 1.19828260e+00 6.25444829e-01] ... [-3.61391568e+00 8.50769699e-01 6.54338002e-01 ... 2.05107188e+00 7.65118241e-01 6.60668015e-01] [-8.20351720e-01 -1.74930394e+00 5.61995685e-01 ... -8.49190801e-02 9.09949720e-01 -6.84925854e-01] [-7.36717805e-02 -1.01924312e+00 -4.65380335e+00 ... 1.41652799e+00 -3.29921365e+00 -2.99320364e+00]]] [[[-9.65454876e-01 7.13770613e-02 4.50261444e-01 ... 9.41990137e-01 -5.79084337e-01 8.55370879e-01] [-2.35166058e-01 1.11142874e+00 -5.33486456e-02 ... 6.67493582e-01 -3.84831905e-01 4.26790059e-01] [ 2.99291089e-02 2.63620075e-02 5.03542960e-01 ... 6.11788869e-01 -5.11236250e-01 5.50082028e-01] ... [-1.33484125e-01 8.42062294e-01 -4.05655771e-01 ... -8.35381866e-01 3.71877193e-01 -1.25519335e-01] [ 6.90695882e-01 -9.05379891e-01 -2.21890077e-01 ... 5.68249285e-01 5.31832993e-01 3.24799180e-01] [ 2.17763856e-01 5.24711013e-01 -7.11083770e-01 ... 1.99227899e-01 1.06507528e+00 -8.45695913e-01]] [[-1.17772210e+00 5.48404634e-01 -1.04706538e+00 ... -1.67129189e-01 4.48604733e-01 5.62784493e-01] [-2.40162596e-01 -4.40506816e-01 -3.17573063e-02 ... -4.10210118e-02 6.49369061e-01 -8.22420269e-02] [ 6.13462865e-01 -7.80803680e-01 2.56545812e-01 ... 1.23957241e+00 2.99185868e-02 1.30464220e+00] ... [-2.84688901e-02 -7.63462722e-01 2.15285555e-01 ... 9.24974203e-01 -7.52902806e-01 -5.44199347e-01] [ 1.39169657e+00 1.12122893e+00 -1.26148665e+00 ... -7.46068001e-01 -5.77392839e-02 -1.13312285e-02] [-3.93192559e-01 8.67749691e-01 1.18895404e-01 ... 1.70523152e-01 -9.21470046e-01 -2.99424052e-01]] [[-8.81676912e-01 -1.45262733e-01 -9.68397334e-02 ... 1.19615436e-01 2.85997212e-01 -5.10731116e-02] [ 1.05819666e+00 -9.00053561e-01 6.12069853e-02 ... 2.64064699e-01 1.29196629e-01 -7.39478409e-01] [ 9.88107473e-02 2.82667689e-02 8.79737362e-02 ... 1.25587034e+00 -1.53163031e-01 2.65834123e-01] ... [-1.00395906e+00 5.91574609e-01 -2.53402531e-01 ... -4.65539545e-01 -1.58208060e+00 -2.30498269e-01] [ 2.35693976e-01 1.68283790e-01 6.30415261e-01 ... -1.86907694e-01 -9.77933049e-01 -6.22892797e-01] [ 6.13311172e-01 6.46516800e-01 6.90361261e-01 ... -1.10459723e-01 -3.26816082e-01 2.94495046e-01]] ... [[-2.67455056e-02 6.26267418e-02 5.33734441e-01 ... 9.35563922e-01 -5.21473944e-01 -1.03334057e+00] [ 9.35189903e-01 -3.94915976e-03 4.05861497e-01 ... 5.62366188e-01 1.32637709e-01 -7.72011206e-02] [ 1.43545777e-01 1.56738237e-01 3.71045887e-01 ... 6.39973938e-01 1.22482836e-01 -2.48253644e-01] ... [-5.01789331e-01 8.87033403e-01 -2.42664024e-01 ... -4.05860841e-01 1.34935692e-01 -2.68885106e-01] [ 5.00885785e-01 -2.22928047e-01 -2.54033208e-02 ... 8.30588043e-01 -8.43834996e-01 6.07028529e-02] [ 2.35105455e-01 9.43841875e-01 -3.73358935e-01 ... 4.35969591e-01 -1.09015405e+00 6.52411044e-01]] [[-5.58695674e-01 7.79700875e-01 -8.62665713e-01 ... -6.97768152e-01 -6.81967914e-01 -7.34883249e-01] [-7.95457184e-01 -9.23431635e-01 1.23693156e+00 ... -4.58397448e-01 -3.55425216e-02 -4.01482761e-01] [-7.50766620e-02 -4.91454720e-01 -1.12728596e+00 ... 1.13203704e+00 1.46682337e-01 2.66126603e-01] ... [ 6.89002335e-01 3.22224587e-01 4.34738725e-01 ... -1.92748308e-01 7.73480594e-01 -2.14658417e-02] [-2.46694446e-01 -7.50782847e-01 1.84484944e-01 ... 1.46079913e-01 7.76211202e-01 2.53187865e-01] [-2.62918264e-01 -2.21098632e-01 -6.28286242e-01 ... 7.12744713e-01 -4.24396843e-01 7.62005568e-01]] [[ 1.26937062e-01 1.18393958e+00 4.95929837e-01 ... -3.71508658e-01 -6.39054120e-01 -7.81181380e-02] [ 1.49996221e-01 2.22451866e-01 -1.13502204e+00 ... -7.28976250e-01 6.06341720e-01 -1.17833471e+00] [-1.62321582e-01 1.02706540e+00 -2.35624805e-01 ... -4.52097028e-01 -1.46898854e+00 -2.02666745e-01] ... [ 2.73557276e-01 -7.26489007e-01 -6.71584997e-03 ... 5.10077536e-01 3.27143490e-01 4.19484198e-01] [-2.28562698e-01 -4.97189641e-01 -1.28479406e-01 ... -6.01391852e-01 1.00723398e+00 -1.65904090e-01] [-2.87157856e-02 4.38219130e-01 9.11335230e-01 ... -4.06496555e-01 -5.43445885e-01 3.62967879e-01]]] [[[ 8.14208806e-01 -2.52608871e+00 3.16259600e-02 ... 1.04022777e+00 4.26767617e-02 1.74748138e-01] [ 1.10568857e+00 -2.06293774e+00 -1.33041739e+00 ... 1.28629699e-01 -6.69783950e-01 7.83740103e-01] [ 8.08255672e-01 -1.58746743e+00 1.35502815e+00 ... -1.35324031e-01 -2.69197035e+00 -3.34937215e-01] ... [ 2.62605309e+00 -5.95362008e-01 -5.68668425e-01 ... -1.57740164e+00 1.20336986e+00 -2.06351757e+00] [ 1.40942648e-01 2.93949068e-01 8.14364970e-01 ... 1.02020562e+00 -3.26032549e-01 -6.00951254e-01] [ 3.24013144e-01 -3.17071843e+00 1.77947855e+00 ... -1.79930067e+00 9.38636363e-01 1.73353815e+00]] [[ 3.64486456e-01 1.19619405e+00 2.73740828e-01 ... -3.60827136e+00 -1.41877019e+00 4.95518774e-01] [-1.61450315e+00 -2.40988398e+00 -1.02762902e+00 ... -1.70399463e+00 -1.25153255e+00 7.88568795e-01] [ 3.26648325e-01 8.05765390e-01 1.33226073e+00 ... -5.12227535e-01 -9.45797026e-01 -1.21799135e+00] ... [-4.85268265e-01 -1.38365245e+00 -1.84856415e-01 ... 2.73713017e+00 2.01949215e+00 -1.42663562e+00] [ 2.37201953e+00 1.30624020e+00 1.09878278e+00 ... 1.20408082e+00 1.20031011e+00 -1.93651545e+00] [ 1.06998079e-01 8.55139315e-01 -9.14395154e-01 ... 2.30273649e-01 5.65027118e-01 1.67452753e+00]] [[-1.55823633e-01 -3.99598837e-01 -1.62677920e+00 ... 8.59370604e-02 1.04354489e+00 2.82890368e+00] [-1.15097475e+00 -1.06669796e+00 -9.31402266e-01 ... 3.47102106e-01 -2.17841163e-01 -1.45836294e+00] [ 8.13038588e-01 2.27189064e+00 4.95590955e-01 ... -7.98010901e-02 1.41091371e+00 -2.27800652e-01] ... [-1.58952042e-01 1.57891905e+00 8.24984908e-01 ... -1.36101377e+00 -7.35280573e-01 -3.03407460e-01] [-1.11913085e+00 1.27714503e+00 9.74088788e-01 ... 1.57594824e+00 -1.36585450e+00 -3.88443887e-01] [ 1.04262209e+00 -2.16923022e+00 6.59487963e-01 ... 3.44928354e-01 -4.50754501e-02 1.40910268e-01]] ... [[-4.52528000e-01 2.68066669e+00 9.96059895e-01 ... -7.74584532e-01 -1.71973658e+00 -6.71803057e-01] [ 1.91300285e+00 1.99679482e+00 2.91361856e+00 ... -1.37505305e+00 -5.03969431e-01 -3.64729702e-01] [-1.10775441e-01 -6.21650040e-01 -1.23899305e+00 ... -1.40593693e-01 4.51765418e-01 -2.78746367e+00] ... [ 4.33982283e-01 1.59778810e+00 3.46263289e-01 ... -8.23914945e-01 3.66572785e+00 2.85910159e-01] [-2.87594962e+00 3.26300907e+00 8.36415052e-01 ... -5.11628509e-01 6.01925373e-01 9.95067239e-01] [-1.46873391e+00 4.01260495e-01 8.10883224e-01 ... -4.47344869e-01 2.62541056e-01 -1.05876160e+00]] [[ 2.29276276e+00 -1.46401715e+00 -9.56211090e-01 ... -1.95478070e+00 -2.67622566e+00 -2.64839351e-01] [-2.06654739e+00 4.25772250e-01 2.34738350e+00 ... -2.58642405e-01 -3.71646881e-01 1.08115889e-01] [ 4.17195702e+00 -1.27231514e+00 1.13871121e+00 ... -3.93466383e-01 1.24410689e+00 1.96478307e+00] ... [-1.74901640e+00 -1.05874479e+00 6.08593285e-01 ... 1.45348215e+00 3.80183488e-01 -8.01437378e-01] [ 1.55445182e+00 -1.58581424e+00 1.61329818e+00 ... 1.89551234e-01 -4.36975986e-01 2.01800108e+00] [ 2.41663384e+00 8.21694136e-01 -7.09421281e-03 ... -5.09768724e-01 -9.60740149e-01 -3.48791242e-01]] [[-2.74254471e-01 -1.97661519e+00 1.29946232e+00 ... 1.25468135e+00 1.59666240e-01 1.55571628e+00] [ 3.39364100e+00 -1.48737526e+00 5.27181029e-01 ... -1.08387423e+00 -9.41228628e-01 -1.78206158e+00] [ 3.48296255e-01 1.53730094e+00 3.97240490e-01 ... -4.48575228e-01 -9.02878582e-01 -1.17922592e+00] ... [ 1.00513375e+00 3.56931865e-01 4.47181650e-02 ... 1.60634172e+00 1.91122782e+00 5.09593368e-01] [-1.91653728e-01 9.05808434e-02 -5.86366691e-02 ... 2.01427293e+00 1.98638722e-01 4.53942728e+00] [-2.71318936e+00 -8.01020145e-01 -7.49480844e-01 ... -1.17617235e-01 8.94617915e-01 -1.80078995e+00]]] [[[ 1.99108005e+00 1.59105706e+00 2.09795761e+00 ... -1.47207046e+00 -2.54734707e+00 3.61433077e+00] [ 6.03725854e-03 3.99127603e+00 -1.51515579e+00 ... 1.17832088e+00 -4.81649905e-01 5.51945829e+00] [ 1.01929915e+00 -2.20024753e+00 4.38772202e-01 ... 5.07584620e+00 -9.87229407e-01 -1.21114838e+00] ... [ 6.80430412e-01 -2.62901604e-01 3.86427712e+00 ... -1.54048884e+00 -2.42356849e+00 -2.80073595e+00] [ 1.47315085e+00 -2.73387790e+00 -3.68228436e+00 ... 1.66400301e+00 -1.72453022e+00 4.06573009e+00] [-6.71369028e+00 2.74934101e+00 1.76526070e+00 ... 3.26627016e-01 -1.22016394e+00 -3.12750667e-01]] [[ 5.86743069e+00 -1.27773976e+00 5.25676346e+00 ... -5.22165966e+00 -1.22252882e+00 -9.17465866e-01] [-4.45770168e+00 3.21614933e+00 -1.52620184e+00 ... -3.72157955e+00 1.10400426e+00 -2.55330610e+00] [ 1.17890346e+00 8.79916906e-01 -4.90726757e+00 ... 1.97878253e+00 -3.16034985e+00 4.05777359e+00] ... [-1.27443039e+00 5.97246885e-01 4.23740005e+00 ... -2.97759247e+00 -1.54895175e+00 -2.15581924e-01] [-3.51468801e+00 1.73982644e+00 1.01006785e-02 ... -4.43795824e+00 1.04556680e+00 9.32920694e-01] [-4.83745193e+00 1.52280319e+00 -9.36810493e-01 ... 2.83393383e+00 -8.41935098e-01 1.59412992e+00]] [[ 3.30763245e+00 -1.42465973e+00 3.15323055e-01 ... -2.98349112e-01 3.87599039e+00 2.98539495e+00] [-1.99491429e+00 4.95706499e-01 -1.56323183e+00 ... 6.58661008e-01 -1.16661406e+00 -2.02770162e+00] [ 1.02868855e+00 1.78985155e+00 2.91035104e+00 ... -1.64714324e+00 -3.91379029e-01 3.43280339e+00] ... [ 1.50968933e+00 -1.62668133e+00 -1.86476314e+00 ... -1.77185535e+00 9.66272771e-01 2.19141173e+00] [ 8.08177173e-01 3.41098189e+00 -4.91404980e-01 ... 8.47420335e-01 9.37660336e-01 4.49488133e-01] [ 5.06123829e+00 1.66383350e+00 4.13915920e+00 ... 3.16051698e+00 -3.18155169e+00 3.38827705e+00]] ... [[-1.37254667e+00 1.22303069e+00 6.74298429e+00 ... -3.06512284e+00 -3.08352637e+00 -3.87005830e+00] [ 1.42320085e+00 1.30181181e+00 1.43631697e-01 ... -1.40467718e-01 3.14197056e-02 -3.27636987e-01] [-1.85590178e-01 -5.99064779e+00 2.71522731e-01 ... 1.38183713e+00 1.72129869e+00 -2.80288124e+00] ... [ 1.93206024e+00 -7.80191347e-02 3.31343818e+00 ... -3.25402498e-01 2.78052306e+00 5.76174200e-01] [-3.51737809e+00 -1.84480798e+00 2.89290524e+00 ... -5.89232206e-01 -2.73271728e+00 -1.94651806e+00] [-3.04740548e+00 -1.79633069e+00 1.57378352e+00 ... -4.69270021e-01 -1.90688029e-01 -4.12191182e-01]] [[ 7.18676865e-01 -5.28273392e+00 1.62906599e+00 ... -9.14421976e-01 3.47777277e-01 -2.70577461e-01] [-7.68543541e-01 -9.75379467e-01 5.95838010e-01 ... -2.64962554e+00 5.59980392e+00 -2.05447125e+00] [ 4.10356188e+00 4.69808169e-02 -1.07067704e+00 ... 1.03716815e+00 -5.81181705e-01 2.01135921e+00] ... [ 1.21948814e+00 -7.51398504e-02 1.65197015e-01 ... 2.21469688e+00 -4.70501393e-01 -2.29582143e+00] [-3.79010469e-01 1.04300536e-01 6.05594218e-02 ... 5.47104168e+00 -1.24765396e+00 1.83742613e-01] [ 2.57989717e+00 6.00280643e-01 3.51459026e+00 ... 1.80387270e+00 2.54357457e+00 -2.29926443e+00]] [[-1.58403838e+00 2.04101181e+00 -3.97627616e+00 ... -8.67296875e-01 1.52091479e+00 -2.71045625e-01] [ 5.77751303e+00 -1.97697198e+00 1.47983718e+00 ... 1.98571229e+00 -2.58013892e+00 1.41137588e+00] [-7.99667120e-01 -1.29475260e+00 -4.06857729e+00 ... -1.03587225e-01 5.99281430e-01 1.78342628e+00] ... [ 1.86424577e+00 -1.13326585e+00 1.38690519e+00 ... -2.31697798e+00 -1.53713453e+00 -2.41441560e+00] [-3.99182129e+00 1.84308743e+00 -2.45091534e+00 ... 2.37850285e+00 2.08741236e+00 6.01917326e-01] [-3.29866624e+00 -5.68879128e-01 -2.12752676e+00 ... -2.93331385e+00 7.42968261e-01 4.55679131e+00]]] [[[ 8.04484561e-02 -2.13949695e-01 2.98774727e-02 ... -5.99168658e-01 2.14386415e-02 -9.66774896e-02] [ 2.95568164e-02 1.74198508e-01 5.47978282e-01 ... 6.98906183e-02 1.07626307e+00 6.05960488e-01] [ 5.37951350e-01 2.65174389e-01 -5.40087260e-02 ... 5.87488234e-01 3.17882538e-01 -4.72241104e-01] ... [ 7.81648457e-02 6.44593835e-02 -5.79141825e-02 ... -3.64053667e-01 1.39040321e-01 -3.03524844e-02] [-5.47855124e-02 -1.55637428e-01 -4.75796998e-01 ... 1.34004086e-01 -4.13682520e-01 3.17076653e-01] [-1.59007758e-01 -5.98124787e-02 4.15002048e-01 ... 3.40520978e-01 3.81935537e-02 -4.65384334e-01]] [[ 4.91080582e-01 -2.03331769e-01 2.72477329e-01 ... 4.05319363e-01 2.14049771e-01 5.57376504e-01] [ 1.09925166e-01 -4.42098409e-01 -2.28120208e-01 ... -1.17645875e-01 -2.16680676e-01 4.16614562e-02] [ 4.11894411e-01 -2.21069202e-01 1.02970123e-01 ... 8.33977237e-02 3.56888443e-01 5.61746657e-01] ... [-7.50091374e-02 2.24137828e-02 -4.85837191e-01 ... -1.24238208e-01 -3.15518379e-01 -2.02953275e-02] [-1.14505455e-01 1.52944341e-01 -4.66743112e-01 ... 2.74700731e-01 -5.39106488e-01 1.46668274e-02] [-1.48541421e-01 6.47915080e-02 1.14075318e-01 ... 6.09112484e-03 1.40217751e-01 -6.40805662e-02]] [[ 9.33441892e-03 -3.77210706e-01 -8.27241540e-02 ... -2.30333999e-01 4.09400612e-01 -3.81413668e-01] [-7.32101053e-02 5.21034062e-01 1.87577426e-01 ... 5.73970824e-02 6.15535021e-01 8.14319625e-02] [-7.78098106e-01 5.41006446e-01 -3.82149637e-01 ... 2.83704758e-01 -1.60240874e-01 1.74021423e-01] ... [ 6.06570959e-01 3.31427217e-01 -1.70816615e-01 ... 1.32980555e-01 2.29231790e-01 2.23838046e-01] [ 8.79963636e-02 6.20692298e-02 2.80178487e-02 ... 1.56299040e-01 -2.99551878e-02 -2.05810353e-01] [-5.36589086e-01 5.29041111e-01 -3.14807236e-01 ... 4.72260177e-01 -3.58327776e-02 -2.13096336e-01]] ... [[ 2.57180065e-01 5.30177951e-01 3.12774539e-01 ... 3.62279922e-01 -1.46141857e-01 4.55998719e-01] [-3.17552835e-01 2.52253473e-01 -3.14494520e-01 ... -2.24479809e-01 -2.61805415e-01 -3.61670464e-01] [ 1.11280292e-01 -2.61811376e-01 3.70039165e-01 ... -1.14937820e-01 -5.82300484e-01 -3.86531800e-01] ... [ 4.71950382e-01 -2.10217848e-01 -2.41375193e-01 ... -2.15375811e-01 -5.24086833e-01 2.45608110e-02] [-1.51137650e-01 4.92201507e-01 3.48389357e-01 ... 4.92718779e-02 1.35220751e-01 6.29330635e-01] [-5.33080637e-01 -2.03921080e-01 2.72730201e-01 ... -8.53128016e-01 -5.99974036e-01 -3.76549929e-01]] [[ 6.83893040e-02 8.71966407e-02 -1.50512472e-01 ... 1.32971823e-01 4.30035442e-02 4.99021322e-01] [ 4.47454125e-01 -2.26387560e-01 -7.57075772e-02 ... 4.96504188e-01 4.95074809e-01 -1.42635062e-01] [-1.22170657e-01 4.73772645e-01 2.55069792e-01 ... -2.59618491e-01 1.54678822e-01 4.01714712e-01] ... [-1.76735401e-01 -1.15294583e-01 9.89838421e-01 ... -1.18534617e-01 3.14774632e-01 7.10194930e-02] [-9.56067964e-02 -7.99402744e-02 -5.24419248e-01 ... -3.54841314e-02 7.52762780e-02 -3.02370518e-01] [ 7.25069866e-02 3.65116224e-02 1.38330981e-01 ... -1.48891732e-01 -3.46330673e-01 1.58737585e-01]] [[ 5.18965662e-01 -1.91358134e-01 1.34960622e-01 ... 1.13389879e-01 -2.99980640e-01 2.20432311e-01] [-6.95528463e-04 1.18592285e-01 -2.97777471e-03 ... -2.17632167e-02 -4.21184450e-01 -7.40537271e-02] [ 3.03700805e-01 5.21773472e-03 1.36770129e-01 ... -2.65512735e-01 -3.29725206e-01 -2.16504738e-01] ... [ 2.42717117e-02 -1.16130136e-01 1.03055798e-01 ... -9.94325709e-03 5.70784807e-01 -4.36201878e-02] [-9.24846232e-02 -2.13433072e-01 -1.55040845e-01 ... 1.57185107e-01 -8.15247595e-01 -3.41177344e-01] [ 1.55696496e-01 -2.96036482e-01 1.31045923e-01 ... 3.78451228e-01 1.68977931e-01 -7.10884988e-01]]]] [[[[-4.87497509e-01 1.00649917e+00 -4.22633946e-01 ... 9.35463965e-01 5.71355760e-01 4.32312101e-01] [ 5.79908848e-01 -1.65484264e-01 4.38855946e-01 ... -1.29598939e+00 2.56624699e-01 2.79878080e-01] [-8.35508585e-01 -1.07715690e+00 -6.76347077e-01 ... -2.85008192e-01 2.61209011e-01 -2.23818973e-01] ... [-1.67173788e-01 1.13161123e+00 -4.07536000e-01 ... -3.70932400e-01 2.69624621e-01 -1.40879754e-04] [ 1.37433112e-01 -9.08068120e-01 5.01862347e-01 ... -3.95066962e-02 2.68795609e-01 5.99815488e-01] [ 1.10476339e+00 7.32742846e-01 -2.44417161e-01 ... 4.39700186e-02 1.92674756e-01 5.22727787e-01]] [[ 1.24087071e+00 -2.51506388e-01 -1.37140775e+00 ... -1.07546115e+00 1.29711056e+00 1.25692797e+00] [-1.71343744e+00 2.19631657e-01 6.81690574e-01 ... -1.25550485e+00 6.85171306e-01 2.14955211e-01] [-4.06380594e-01 -6.02323055e-01 3.55795383e-01 ... 1.67489469e+00 -2.36519083e-01 -5.67434058e-02] ... [-4.49813962e-01 8.74743044e-01 6.45778596e-01 ... -2.84198791e-01 3.14376265e-01 -3.90176356e-01] [ 7.49492228e-01 -5.96057296e-01 7.61229157e-01 ... -1.17193088e-01 -3.25202495e-02 8.74048173e-01] [ 1.26439893e+00 4.34822112e-01 -9.67526674e-01 ... 1.03709912e+00 2.27790046e+00 1.45629084e+00]] [[ 1.22875130e+00 7.25422084e-01 7.16387630e-01 ... 1.99747132e-03 -9.35435057e-01 4.44032252e-01] [ 9.14296806e-01 1.06945625e-02 -2.26063371e+00 ... -7.22241044e-01 -1.05776942e+00 -1.46655226e+00] [ 6.49738967e-01 2.53865629e-01 -3.88482481e-01 ... 6.34570897e-01 -1.35937977e+00 -1.27107084e-01] ... [-3.87403131e-01 2.47570157e-01 1.47463465e+00 ... 7.07924366e-01 6.16737790e-02 -2.60924041e-01] [-3.24679881e-01 6.02379560e-01 3.33470345e-01 ... 1.12559199e+00 -1.53346539e-01 4.01622653e-02] [-8.53322089e-01 -7.16971576e-01 -5.16423166e-01 ... 2.96123296e-01 -3.47739130e-01 -4.96903569e-01]] ... [[-2.59451568e-01 -4.33398075e-02 -2.00966939e-01 ... -1.74339521e+00 -8.48845601e-01 -5.16004145e-01] [-8.15089226e-01 -5.74208200e-01 -4.57818836e-01 ... -3.33229363e-01 -8.08927774e-01 -6.19038761e-01] [-2.08095521e-01 1.08268484e-01 -9.71580565e-01 ... 1.60106122e-01 -2.97679931e-01 5.26758254e-01] ... [ 1.20078969e+00 1.07214548e-01 7.29819059e-01 ... -8.88392627e-02 6.58827722e-02 5.07580698e-01] [ 1.61639822e+00 1.37018359e+00 4.49443787e-01 ... -1.19607341e+00 -7.97168016e-02 8.69015992e-01] [ 4.73151542e-02 -1.22481167e+00 2.84812152e-01 ... -1.49301112e-01 -1.81579912e+00 -1.06450260e+00]] [[-1.34028897e-01 1.03393531e+00 3.65866423e-02 ... 7.09884644e-01 -5.55555820e-01 9.78149921e-02] [ 6.15715720e-02 6.57097399e-01 1.07410550e+00 ... 3.21808279e-01 1.41767967e+00 6.42692566e-01] [-3.35819662e-01 5.30689955e-01 1.35869145e+00 ... -8.86559546e-01 4.91715521e-02 -8.85940909e-01] ... [-1.02588439e+00 -1.23166549e+00 -1.79790065e-01 ... -7.06206858e-01 -4.28588241e-01 4.35398705e-03] [ 3.49999040e-01 -7.68590122e-02 -2.36182109e-01 ... -1.10632546e-01 1.58056641e+00 -6.10608578e-01] [-3.08800489e-01 1.14001131e+00 -1.36221588e+00 ... 4.00700510e-01 -7.03682780e-01 6.48649991e-01]] [[ 1.50531843e-01 -7.16632068e-01 1.38207924e+00 ... -6.18841350e-01 8.86243045e-01 7.93304026e-01] [-8.27544451e-01 -1.82438195e-01 -9.26864445e-01 ... -4.15252656e-01 9.40238759e-02 6.32043719e-01] [-2.53296316e-01 8.63417149e-01 1.63863361e-01 ... 9.96749282e-01 5.19135535e-01 -8.29375565e-01] ... [ 5.49387157e-01 -3.53386067e-02 5.86665452e-01 ... 7.49025166e-01 8.97458911e-01 -1.81037712e+00] [-6.80139244e-01 8.78045380e-01 5.19548357e-01 ... 2.52843440e-01 -4.58003283e-01 -8.84139463e-02] [-4.12551194e-01 1.24746156e+00 -4.74120647e-01 ... -3.97601992e-01 -3.12039077e-01 -4.44344223e-01]]] [[[ 1.51997730e-01 1.53342247e+00 -1.23430049e+00 ... -6.76687062e-01 -1.23932481e+00 2.87486482e+00] [-3.51322860e-01 -1.02875352e+00 -1.40557051e+00 ... -1.53571355e+00 -2.95563126e+00 8.97134095e-02] [-1.49850059e+00 -9.72696185e-01 1.05568719e+00 ... -2.21789575e+00 1.67122829e+00 -6.24212027e-01] ... [ 4.94852252e-02 -3.72534132e+00 3.09541166e-01 ... -1.21472085e+00 8.05406868e-01 -8.69558811e-01] [ 1.03330564e+00 1.45176530e+00 1.04511726e+00 ... 3.22782516e+00 -4.01651651e-01 1.33664048e+00] [-4.35196728e-01 2.62796140e+00 -3.14113140e-01 ... 1.96288705e+00 -1.80673814e+00 2.20005918e+00]] [[ 1.28309858e+00 9.00201499e-01 7.55460918e-01 ... 2.27130151e+00 1.19771309e-01 1.54803443e+00] [-3.88963252e-01 2.21186948e+00 -9.74438012e-01 ... 1.73687613e+00 3.16691780e+00 8.44590664e-01] [-1.57631505e+00 5.50337255e-01 -1.28472459e+00 ... -4.00698960e-01 -2.31165004e+00 -8.07422400e-02] ... [ 2.15024090e+00 3.27573568e-01 -2.60557365e+00 ... 1.22489110e-01 2.55039358e+00 -2.39810228e+00] [-6.98027492e-01 2.30541992e+00 7.49021351e-01 ... 1.07328081e+00 -2.16047215e+00 1.58654821e+00] [-3.35480070e+00 -2.71859646e+00 -9.57935870e-01 ... -8.18005085e-01 -1.81918979e-01 1.65476525e+00]] [[-3.16023290e-01 5.26459396e-01 1.89631835e-01 ... -1.85410964e+00 -6.24641180e-01 -3.88412571e+00] [-3.08758426e+00 -9.89066899e-01 1.36232471e+00 ... -1.55947196e+00 -2.74714756e+00 8.80826652e-01] [ 1.38581812e-01 -9.78752375e-01 3.12020588e+00 ... -2.11978033e-02 -2.42179585e+00 1.24035668e+00] ... [ 6.86515868e-01 -2.16101313e+00 -9.04592037e-01 ... 1.70520031e+00 -1.47043610e+00 -2.31513977e+00] [ 4.57425594e+00 2.05483675e+00 8.83868277e-01 ... -2.76626801e+00 4.30089355e-01 -1.05525923e+00] [ 2.13029790e+00 -2.02203298e+00 3.88715982e-01 ... -2.01994133e+00 -6.82371676e-01 4.44640517e-02]] ... [[ 2.64410883e-01 1.49707568e+00 -2.90316033e+00 ... 2.39957428e+00 2.54680395e-01 1.47352564e+00] [-1.01524842e+00 -2.59472179e+00 -2.68680406e+00 ... -9.74292159e-01 1.13302134e-01 -1.20503879e+00] [-6.27687752e-01 -1.36409295e+00 3.37224603e-01 ... 9.13057700e-02 -1.31953013e+00 3.72612858e+00] ... [-1.22856283e+00 1.12198755e-01 -1.05282784e+00 ... 2.13567066e+00 -1.57390565e-01 -1.87983334e+00] [ 8.37414682e-01 5.41375339e-01 -4.87848908e-01 ... 4.60812211e-01 -8.02371919e-01 -2.61228609e+00] [ 1.44251239e+00 -1.21790206e+00 5.93213737e-01 ... 2.26772055e-01 -2.06764746e+00 -2.31027767e-01]] [[-1.57547498e+00 2.67874479e+00 -6.19112968e-01 ... -6.49807975e-02 -2.75782526e-01 2.22814083e+00] [ 2.85773218e-01 -9.60757673e-01 -1.38485396e+00 ... -3.45694351e+00 5.68683632e-02 1.43326175e+00] [-2.22149506e-01 1.78847289e+00 -2.26238649e-02 ... 1.43145871e+00 -1.26632309e+00 -2.59112144e+00] ... [ 7.77063310e-01 2.14623618e+00 7.40327299e-01 ... 4.35811460e-01 5.73467016e-01 -1.67192176e-01] [ 2.58905983e+00 3.05077076e+00 7.83320546e-01 ... 1.59715605e+00 5.85630238e-01 1.67822301e+00] [ 1.31104863e+00 2.70952344e+00 -1.20206094e+00 ... 8.64127040e-01 5.41976452e-01 -8.92999589e-01]] [[-1.61984265e+00 9.10253882e-01 -1.57073343e+00 ... -3.52856660e+00 -2.16354966e+00 -3.39984655e+00] [ 3.84956765e+00 -1.61821532e+00 -1.01387608e+00 ... -3.65342760e+00 -8.72028232e-01 -2.08229899e+00] [-5.47301173e-01 3.33339262e+00 1.20251262e+00 ... 1.24301291e+00 1.70152473e+00 -2.34901333e+00] ... [ 2.19922200e-01 -8.71307790e-01 -4.01681423e+00 ... 8.07628095e-01 -1.72334671e+00 -9.90026772e-01] [-7.93683529e-01 -3.52821541e+00 2.06125951e+00 ... -1.85462511e+00 3.42917442e+00 -2.51628470e+00] [ 2.92239547e+00 2.50332236e+00 9.79121864e-01 ... -6.04471803e-01 -1.16850972e-01 -1.95966327e+00]]] [[[ 6.25728905e-01 5.25715709e-01 2.82444566e-01 ... 6.32465541e-01 1.40933856e-01 -9.55109358e-01] [-5.85335977e-02 1.77474260e-01 5.03768861e-01 ... -7.10425675e-01 3.74747925e-02 2.92120397e-01] [ 3.33681136e-01 5.37744045e-01 -2.09655061e-01 ... 7.68245310e-02 -4.88625497e-01 9.82231319e-01] ... [ 4.68976676e-01 -8.75339270e-01 7.72442997e-01 ... -1.05382934e-01 -5.14260530e-01 -4.96664569e-02] [ 1.79299459e-01 6.19775534e-01 -4.10288833e-02 ... -1.41034186e-01 5.33410534e-03 1.49115741e+00] [ 9.41608310e-01 -8.75150636e-02 9.62536409e-02 ... -7.48286724e-01 1.01693884e-01 -5.09965494e-02]] [[-3.25819761e-01 -2.04733148e-01 6.80947661e-01 ... -5.70223993e-03 -5.66381931e-01 4.42978054e-01] [ 9.34401453e-02 9.86102819e-02 9.85259265e-02 ... -3.23055774e-01 -6.75716162e-01 4.29348409e-01] [-8.47465813e-01 -4.20346022e-01 -6.60807312e-01 ... -1.80630341e-01 -2.35689044e-01 6.06857121e-01] ... [-6.85470760e-01 -5.61371267e-01 5.24704039e-01 ... 4.96250153e-01 4.88365978e-01 -2.95473367e-01] [-1.24919820e+00 -4.85269964e-01 3.09710354e-01 ... 1.63882270e-01 5.13434708e-01 -7.24331439e-01] [ 5.82656622e-01 1.42490700e-01 -2.69413702e-02 ... 1.48147488e+00 8.99300218e-01 -3.59724551e-01]] [[ 2.15537995e-01 1.28219187e+00 -3.78352970e-01 ... -9.92446244e-01 -5.03683507e-01 -4.27273244e-01] [-9.07928348e-01 4.69793230e-01 -2.78414965e-01 ... -5.59694648e-01 -8.02160442e-01 9.55038548e-01] [-2.91248083e-01 1.87057519e+00 -1.91931650e-01 ... 1.51823372e-01 -1.58217266e-01 -7.55861819e-01] ... [ 7.67996430e-01 6.51172519e-01 -3.35538685e-01 ... -3.94897789e-01 -6.60737932e-01 1.96082860e-01] [ 3.42832863e-01 1.04454540e-01 1.97641566e-01 ... 1.38097107e+00 1.00930870e+00 -4.81560528e-01] [ 5.22345185e-01 -4.88070399e-02 -2.65000202e-02 ... -8.71285796e-01 -1.51938036e-01 -1.28147557e-01]] ... [[-3.43392678e-02 1.16043404e-01 1.03647113e+00 ... 1.18864894e+00 -1.15378104e-01 1.86191523e+00] [-1.27058022e-03 -4.62167144e-01 1.23139966e+00 ... -1.64267406e-01 -4.38745499e-01 -3.51946324e-01] [-6.46278560e-02 -9.68266726e-01 5.83968043e-01 ... 6.59338892e-01 -3.34679574e-01 -4.85994250e-01] ... [ 4.04446989e-01 4.76327002e-01 8.91565681e-01 ... 4.52130258e-01 7.94976413e-01 1.67398691e-01] [-6.66327029e-02 -8.70334029e-01 6.70556247e-01 ... 8.22646022e-01 -2.92022586e-01 -3.37980837e-01] [-7.98405349e-01 -2.65458614e-01 -1.56427753e+00 ... 2.56091237e+00 1.00159442e+00 2.70843416e-01]] [[ 2.86499053e-01 -8.71566534e-01 6.37190938e-01 ... -2.97683060e-01 -1.58712253e-01 7.77460515e-01] [-2.01853469e-01 1.03389430e+00 -1.87566895e-02 ... 2.13022739e-01 4.18899477e-01 -2.19742507e-01] [ 8.62428129e-01 -4.25105631e-01 1.75331160e-01 ... -6.38630509e-01 -6.60201490e-01 4.52644825e-01] ... [-2.15472698e-01 9.70023692e-01 7.48254359e-01 ... -8.99493575e-01 -3.42761487e-01 -7.34191477e-01] [-7.24859834e-01 -3.26024801e-01 1.14301956e+00 ... -6.96147025e-01 -5.74945569e-01 -5.99320352e-01] [ 1.01917648e+00 -2.92473227e-01 -4.43925530e-01 ... -1.06777573e+00 -9.75414217e-01 -6.60891831e-02]] [[ 2.87940979e-01 -3.37592632e-01 -1.17733195e-01 ... 2.82220453e-01 -4.03646147e-03 -1.84618130e-01] [ 1.83493257e-01 -1.04079044e+00 -1.64978907e-01 ... 1.03264105e+00 -3.02576661e-01 -1.76059693e-01] [-9.92232680e-01 1.56412184e-01 1.85373336e-01 ... 2.87218511e-01 -1.26053572e-01 3.16277564e-01] ... [-2.64065355e-01 -3.92040998e-01 -6.13250196e-01 ... 1.01979962e-02 1.56918064e-01 6.52139425e-01] [-6.23306751e-01 -2.19192013e-01 -6.40933439e-02 ... -4.53546783e-03 -4.85293061e-01 -8.43033373e-01] [-7.93861389e-01 -4.49710310e-01 7.25111246e-01 ... -8.91505182e-01 2.20022336e-01 7.51866281e-01]]] [[[-2.68212557e-02 -3.86679888e-01 -4.22320336e-01 ... -8.05911005e-01 2.11183619e+00 -8.57179701e-01] [-1.38940120e+00 -8.91269326e-01 -2.02373251e-01 ... 2.30453730e+00 2.19192362e+00 -2.29329753e+00] [-3.53753400e+00 -7.06722617e-01 9.56733972e-02 ... 2.16891980e+00 -1.18572986e+00 -9.37461555e-01] ... [ 1.85445356e+00 1.93844545e+00 9.65769649e-01 ... -3.15400422e-01 2.82671499e+00 -9.37315047e-01] [ 1.13412738e+00 1.49026251e+00 -6.30400479e-01 ... 1.12612259e+00 -4.32038009e-01 -1.50388703e-01] [ 1.60188544e+00 -2.10248008e-01 1.99481535e+00 ... -7.76131600e-02 1.01203406e+00 2.54672319e-01]] [[ 9.43649411e-01 -9.50760007e-01 1.41711700e+00 ... 9.40145791e-01 -2.00751141e-01 4.50533748e-01] [-2.48243809e+00 8.54849994e-01 -1.14011303e-01 ... 1.86611250e-01 1.82997346e-01 -1.14639163e+00] [ 2.86964202e+00 -1.90193701e+00 -7.63625860e-01 ... 7.20046639e-01 -6.39426827e-01 -1.13307154e+00] ... [ 7.63566911e-01 1.43743837e+00 1.49168506e-01 ... -1.54361117e+00 -1.72273350e+00 4.32910293e-01] [ 4.83320296e-01 -2.96993971e-01 -1.81202447e+00 ... -1.71980548e+00 9.28771496e-01 4.00051475e-02] [ 1.44476390e+00 1.54507267e+00 1.34625837e-01 ... -5.66827469e-02 6.17033303e-01 4.76957150e-02]] [[ 1.73968935e+00 -3.06229949e+00 -1.01782370e+00 ... -2.48071980e+00 -2.37137032e+00 9.54774395e-02] [ 1.16085720e+00 -5.54300904e-01 -3.60194802e-01 ... -1.81745338e+00 -1.55115053e-01 -7.51861751e-01] [ 3.02988386e+00 -4.83745374e-02 5.33380091e-01 ... -4.34129804e-01 1.43527961e+00 9.66272771e-01] ... [-3.33585739e-01 -5.30039310e-04 -1.04994178e+00 ... -2.10515514e-01 -1.80997801e+00 -1.02884695e-01] [-2.07066107e+00 -1.43338215e+00 -1.55838645e+00 ... 3.10086870e+00 -2.90137351e-01 1.48929048e+00] [ 7.95884430e-01 -2.32459739e-01 -7.66791463e-01 ... 1.02183819e+00 -2.05298114e+00 3.58420992e+00]] ... [[-3.07558036e+00 -1.15179634e+00 2.98065543e+00 ... -2.47089386e-01 6.66930497e-01 1.84285438e+00] [-7.43898153e-01 4.09262836e-01 -2.22496939e+00 ... -1.41211367e+00 1.74201012e+00 1.01347566e+00] [-9.12564456e-01 1.37695897e+00 6.94526583e-02 ... -2.32242846e+00 -6.85792446e-01 -2.04391742e+00] ... [ 6.31429076e-01 -3.45272255e+00 -1.84503829e+00 ... 8.81474614e-01 1.92001486e+00 5.46203911e-01] [-1.70996821e+00 1.88070202e+00 -1.52990592e+00 ... -2.80118370e+00 3.30513746e-01 2.98357904e-01] [ 2.42326999e+00 -3.01782161e-01 7.49397278e-01 ... -2.52445996e-01 -2.70365119e-01 1.54518872e-01]] [[-3.74685884e-01 1.03724577e-01 -2.74071574e+00 ... 7.07644224e-01 4.81256902e-01 1.87791562e+00] [ 4.35772091e-01 -5.92262924e-01 -3.63793641e-01 ... -2.56867766e-01 9.37788606e-01 -5.39323926e-01] [-7.25064635e-01 -2.74870062e+00 -1.07547057e+00 ... 1.65630531e+00 2.46579194e+00 4.77772892e-01] ... [-1.77569115e+00 3.26828152e-01 -7.59796441e-01 ... -2.54856348e-01 -3.81452370e+00 1.70971116e-03] [ 4.90839291e+00 1.60355830e+00 1.56332850e-01 ... -1.07309294e+00 -1.87611043e-01 -1.39770436e+00] [ 1.45263863e+00 1.79736710e+00 1.87139258e-01 ... 2.46968746e+00 -4.63827431e-01 -4.56513256e-01]] [[ 7.16598690e-01 -1.54943180e+00 -4.74572718e-01 ... 1.80713129e+00 1.39712358e+00 7.79483557e-01] [ 1.51496127e-01 -1.27970743e+00 -9.39005334e-03 ... 1.93507656e-01 -6.07016027e-01 2.21988487e+00] [ 3.60629857e-01 1.53506982e+00 1.64767504e+00 ... 6.96954846e-01 -7.33802542e-02 -4.01289195e-01] ... [ 5.20544171e-01 1.15044069e+00 1.37714863e+00 ... 1.03467035e+00 -7.01434493e-01 7.69613624e-01] [ 5.01845479e-01 -1.74805641e+00 -1.08346328e-01 ... -1.35728121e+00 3.09054643e-01 5.76421678e-01] [ 1.18990088e+00 3.99704248e-01 -1.64787006e+00 ... -2.11249781e+00 1.30402291e+00 -1.69112489e-01]]] [[[ 1.22462773e+00 -3.96429491e+00 1.06173670e+00 ... 2.01600718e+00 -5.04533947e-01 -1.89362919e+00] [ 2.31090665e+00 1.39790130e+00 -2.42741966e+00 ... -2.94675016e+00 -4.06019831e+00 -1.63354129e-01] [ 3.94063282e+00 1.87946963e+00 6.17358387e-01 ... -2.40550071e-01 8.63878489e-01 7.27687299e-01] ... [ 4.84739447e+00 4.22784948e+00 -4.94182491e+00 ... 3.35646927e-01 9.44375873e-01 2.53608108e-01] [-8.73296022e-01 -1.52168620e+00 -1.86276054e+00 ... 1.75389183e+00 4.44327307e+00 3.74743176e+00] [ 6.45096898e-01 1.77367318e+00 -1.50265467e+00 ... 4.57187033e+00 4.77137041e+00 1.40453386e+00]] [[-5.17053843e+00 5.53372204e-01 -1.13126767e+00 ... -1.56561852e+00 -3.52496719e+00 -2.01919866e+00] [ 6.93777502e-02 -3.48364525e-02 -1.62393045e+00 ... -6.43281078e+00 3.62784266e-01 -1.77413726e+00] [ 3.06671119e+00 -1.15201807e+00 1.28935337e+00 ... 1.37395978e+00 -2.36406612e+00 -1.86841941e+00] ... [-5.48017658e-02 1.40628850e+00 -1.80579782e+00 ... 1.28335074e-01 -1.33914471e+00 -2.64000630e+00] [ 1.54728484e+00 2.30526543e+00 -1.09340274e+00 ... -3.02771974e+00 4.92549032e-01 -1.59264445e+00] [ 1.78678417e+00 -2.54670668e+00 -6.58173513e+00 ... 1.51395071e+00 3.63045502e+00 -1.16279352e+00]] [[-1.75771689e+00 2.04689074e+00 -3.37838364e+00 ... 3.54803371e+00 -2.43666649e+00 3.91363591e-01] [ 4.78169620e-01 -2.28574896e+00 4.74205494e+00 ... -1.43990111e+00 -1.95370269e+00 -2.59686875e+00] [ 6.42386913e-01 2.58715892e+00 7.05671683e-02 ... -2.82590604e+00 -6.52873695e-01 1.91601229e+00] ... [-1.45295191e+00 6.89031959e-01 -2.82315826e+00 ... 2.64193845e+00 1.36734962e+00 4.90454435e+00] [ 5.11111975e-01 -1.82337418e-01 -6.30859756e+00 ... 1.04403687e+00 2.93551564e+00 -7.21839666e-01] [ 2.14185691e+00 9.15934682e-01 3.21567684e-01 ... 2.76932240e+00 -1.27919388e+00 3.08132499e-01]] ... [[ 2.09379530e+00 4.42653322e+00 -3.46584511e+00 ... -1.24392271e+00 4.72169685e+00 -3.07093173e-01] [-1.64089829e-01 -2.66990161e+00 -3.94113922e+00 ... -5.01879168e+00 3.47691737e-02 -5.37337780e-01] [ 5.40275693e-01 1.02815442e-01 -1.49879920e+00 ... -2.96144009e+00 4.52660561e+00 -2.26296663e+00] ... [ 1.28969777e+00 -7.13932872e-01 6.27008557e-01 ... -3.93388128e+00 -3.41752219e+00 -8.30091298e-01] [ 2.06583762e+00 -3.25499296e+00 -1.98467815e+00 ... -8.65885690e-02 -3.55915546e-01 1.63225740e-01] [-3.66245365e+00 -3.41925788e+00 -1.85494411e+00 ... 1.75117806e-01 -2.83286142e+00 2.12817764e+00]] [[-1.72693396e+00 -3.20772910e+00 -5.74713528e-01 ... -8.30644667e-01 1.91622913e+00 -3.41008496e+00] [-7.01824367e-01 1.37433565e+00 2.28503418e+00 ... 1.85517561e+00 1.07529974e+00 -2.29131103e+00] [ 3.61782694e+00 -4.47234774e+00 7.52427652e-02 ... 2.23345184e+00 2.56469750e+00 2.86296558e+00] ... [ 1.86128509e+00 -1.98006821e+00 3.07926464e+00 ... -1.87368608e+00 2.67196488e+00 7.26667309e+00] [ 1.45422566e+00 2.01653361e+00 1.75531912e+00 ... 1.81681812e+00 1.19996333e+00 3.03455424e+00] [ 1.31949592e+00 1.50288439e+00 -3.41249633e+00 ... -1.12201250e+00 -2.51781726e+00 3.18348265e+00]] [[-5.08599758e-01 -1.26613951e+00 -4.20524836e-01 ... 2.85588622e-01 1.25769019e-01 1.88406563e+00] [-1.94474292e+00 3.81931996e+00 3.09505606e+00 ... 7.17975020e-01 1.80386293e+00 -3.17419624e+00] [-3.21701503e+00 2.47647905e+00 2.45805168e+00 ... 3.81279016e+00 1.99910909e-01 -3.36466551e+00] ... [ 2.09585142e+00 -3.39200282e+00 -1.81159616e+00 ... 1.27548397e+00 5.18146098e-01 -4.42361116e+00] [ 8.37700903e-01 4.95829582e+00 7.38717854e-01 ... 9.01450440e-02 9.49888587e-01 2.83280897e+00] [ 3.62775147e-01 2.83629966e+00 -2.44349837e+00 ... -6.86509514e+00 -6.81278706e-01 -2.94756579e+00]]] [[[-4.58054960e-01 -2.58960333e-02 4.16720808e-01 ... 1.73117876e-01 -7.83364568e-03 2.25191966e-01] [ 1.19547434e-02 -9.29393768e-02 -4.36136425e-02 ... -6.22730851e-01 6.31565675e-02 -6.73234463e-01] [-4.27861989e-01 -1.65084317e-01 4.95629668e-01 ... 3.77008706e-01 -4.76070434e-01 -3.02514523e-01] ... [-5.07868588e-01 -1.24526441e-01 1.07294726e+00 ... -8.11263621e-02 -2.63422042e-01 1.66510239e-01] [ 1.82757273e-01 -2.69166142e-01 1.37061417e-01 ... -3.21378827e-01 -1.51355416e-01 -5.04989326e-01] [-9.56033766e-01 -3.20675492e-01 -1.27651975e-01 ... -6.49232745e-01 -4.53104287e-01 -4.17025506e-01]] [[ 1.65805086e-01 -4.49291557e-01 2.87708074e-01 ... 2.65116453e-01 4.87668633e-01 -4.21187431e-01] [ 1.23145290e-01 -2.68575132e-01 1.04814088e+00 ... 3.36506903e-01 -3.32422405e-01 -1.23172842e-01] [-4.41803247e-01 -3.65468472e-01 2.51294881e-01 ... -9.94001180e-02 5.36519885e-01 6.00246012e-01] ... [-6.62566483e-01 -3.39645952e-01 -3.58193398e-01 ... -1.32474869e-01 -4.25511934e-02 -3.30957979e-01] [ 4.45213735e-01 -1.98647037e-01 9.54779983e-02 ... 2.02447399e-01 3.93839061e-01 1.75812855e-01] [-1.38926506e-01 7.73383975e-02 1.14260823e-01 ... -1.34226009e-01 -4.47420955e-01 -3.97015393e-01]] [[ 5.50236523e-01 2.39709392e-01 -5.18493414e-01 ... -2.39113912e-01 -4.43352640e-01 1.17055506e-01] [-2.52424687e-01 2.42896557e-01 -3.65121067e-01 ... -2.56445944e-01 -4.17019159e-01 2.97597259e-01] [ 9.29814160e-01 -2.65613645e-01 2.25467384e-01 ... 1.80558115e-01 6.56819522e-01 4.04971130e-02] ... [-1.68778494e-01 2.10896119e-01 -3.83260280e-01 ... -3.53886425e-01 2.77295679e-01 -6.53403848e-02] [-7.57989213e-02 -1.87447637e-01 -2.58606970e-01 ... -6.88033164e-01 -3.62510592e-01 9.19075161e-02] [-4.66072336e-02 2.59015501e-01 7.63393342e-01 ... -7.46712461e-02 -4.67906803e-01 2.71100581e-01]] ... [[ 2.04794705e-01 3.01111847e-01 -4.84248966e-01 ... 5.19343689e-02 1.57937035e-01 4.01850760e-01] [ 4.00031716e-01 -4.58509803e-01 5.24900965e-02 ... -1.62564427e-01 -1.14679262e-01 7.53026754e-02] [-3.49540591e-01 1.89316228e-01 -3.09191793e-01 ... -3.03969473e-01 3.77584606e-01 4.83666569e-01] ... [-4.31575567e-01 6.04045726e-02 3.77847582e-01 ... -1.99801564e-01 6.70000166e-02 1.83251351e-01] [ 4.49849129e-01 -1.14609651e-01 -7.06420004e-01 ... -6.33056581e-01 -2.02148423e-01 -2.72492141e-01] [-4.08373863e-01 -3.33762318e-01 2.08896063e-02 ... -6.61580265e-01 3.24523121e-01 -3.66220504e-01]] [[ 5.74441612e-01 -2.23972604e-01 -1.73949823e-01 ... 1.88180253e-01 -1.46791697e-01 -8.25418904e-02] [ 3.32051277e-01 -4.18760851e-02 3.66818219e-01 ... -1.63306773e-01 9.44268167e-01 -1.47347122e-01] [-2.19550863e-01 -1.75584361e-01 8.06963071e-02 ... 9.53631997e-02 5.03061637e-02 6.86737001e-02] ... [ 2.95208454e-01 2.83663750e-01 1.33820221e-01 ... 5.28692901e-01 -1.95168823e-01 -4.61342558e-02] [ 1.55475467e-01 2.06576496e-01 -2.61999983e-02 ... -3.96123063e-03 -3.12722862e-01 -4.23920274e-01] [ 1.05341449e-01 6.90940559e-01 5.11677921e-01 ... 3.87803435e-01 3.81075084e-01 -3.81566733e-01]] [[-2.43335605e-01 6.14785492e-01 4.27050918e-01 ... -2.09506288e-01 3.96838397e-01 5.37673533e-01] [-1.17518440e-01 -1.91818085e-03 -2.87769645e-01 ... 4.61152345e-02 -2.22352594e-01 -5.36396205e-02] [ 3.82116646e-01 2.08736151e-01 -5.65854669e-01 ... -1.47666171e-01 -2.34040953e-02 4.46232826e-01] ... [ 1.80113971e-01 1.17638454e-01 2.30753750e-01 ... 4.76479977e-01 5.39453030e-01 2.45718926e-01] [-7.31212914e-01 2.01153710e-01 8.03700760e-02 ... 2.42195517e-01 -1.45785347e-01 1.29425704e-01] [-2.31787860e-01 -6.21257350e-02 2.89529264e-01 ... -2.75410175e-01 2.51179934e-01 -5.56829609e-02]]]]]; ov_res: [[[[[-1.77626744e-01 -2.38291889e-01 -4.04513329e-01 ... 1.33515644e+00 3.01980704e-01 1.55284262e+00] [ 1.73995459e+00 4.75361258e-01 -1.70867383e-01 ... 7.75200009e-01 1.18665516e+00 8.80014956e-01] [ 8.74952316e-01 6.13911822e-02 -3.63905549e-01 ... -1.24092376e+00 1.08487951e-03 8.97237122e-01] ... [ 1.34902239e-01 -3.64241898e-01 -3.18384051e-01 ... -4.00321007e-01 -1.17650533e+00 2.13076785e-01] [ 1.08378015e-01 -2.11572200e-01 -4.17978704e-01 ... -1.27897337e-01 6.39782250e-01 -1.17731607e+00] [ 7.99322069e-01 -3.97198558e-01 -2.47386277e-01 ... 6.66625857e-01 -9.05413404e-02 -6.11880183e-01]] [[-2.77148366e-01 -1.53312415e-01 -8.31967294e-01 ... -1.00920928e+00 8.68662000e-01 7.18292713e-01] [-5.26018620e-01 -4.55071270e-01 -3.22617441e-01 ... -7.03930795e-01 6.99735820e-01 1.54503608e+00] [ 9.66768384e-01 2.11237326e-01 -1.57616794e+00 ... 5.01809597e-01 9.31377560e-02 -7.68855691e-01] ... [ 2.47643411e-01 -6.22014478e-02 -3.41518670e-01 ... 1.05578864e+00 7.71184027e-01 3.39081138e-02] [-1.29966950e+00 8.38049054e-02 2.70989954e-01 ... -8.92200395e-02 -3.32788378e-01 -7.81853199e-01] [-5.37465215e-01 1.71706152e+00 -8.59103024e-01 ... -1.15005486e-01 -4.51574713e-01 5.46616435e-01]] [[-3.84570539e-01 -2.73523837e-01 -1.18334211e-01 ... -9.13720906e-01 -9.08978283e-02 -7.24937201e-01] [-7.31913209e-01 -4.18869294e-02 8.14081073e-01 ... 4.68520910e-01 6.90089166e-01 1.57460213e-01] [-1.73146021e+00 7.38827646e-01 -7.52070323e-02 ... 4.51179832e-01 6.49814963e-01 -7.81925499e-01] ... [ 1.83759451e-01 3.50946665e-01 3.10657531e-01 ... 1.53557405e-01 -1.15173137e+00 -2.53989309e-01] [ 2.70434260e-01 1.52349114e-01 8.97461712e-01 ... 2.17212230e-01 4.57584977e-01 8.68731514e-02] [ 6.85305595e-01 -8.17391098e-01 -6.54970527e-01 ... 8.12999725e-01 1.09878302e-01 2.61236370e-01]] ... [[ 7.36920714e-01 -1.00300646e+00 8.74584466e-02 ... 6.74911022e-01 6.69222251e-02 -1.18136311e+00] [ 2.21742187e-02 -1.99957803e-01 -7.17774808e-01 ... 4.85686034e-01 3.22525650e-01 -2.96172127e-02] [-2.25184485e-01 -1.90582469e-01 -4.14191753e-01 ... -8.27682793e-01 -8.21743548e-01 1.10679638e+00] ... [-7.06859708e-01 1.18749298e-01 -1.28661907e+00 ... 2.51692891e-01 -1.36545077e-01 -8.30406070e-01] [-8.72983754e-01 -6.51720226e-01 -1.57264411e-01 ... 4.45835322e-01 1.66334704e-01 9.03882623e-01] [ 1.18412232e+00 -5.57734668e-01 4.49071713e-02 ... 2.67673582e-01 1.60143569e-01 8.93787801e-01]] [[ 5.61559737e-01 1.76792026e-01 -2.00426668e-01 ... -3.20070297e-01 -6.09070480e-01 7.71018922e-01] [ 1.57046095e-01 9.27283615e-02 1.40985417e+00 ... 1.16920471e+00 -4.69844759e-01 1.01376808e+00] [-5.39118588e-01 1.74979770e+00 2.51591265e-01 ... -3.49329948e-01 -8.13848078e-01 -5.37557900e-01] ... [ 1.36828947e+00 1.92153677e-01 8.69816720e-01 ... -5.07263124e-01 4.07609314e-01 3.27758491e-01] [ 6.15082420e-02 -1.46048395e-02 1.11134422e+00 ... -5.18067777e-01 3.43923002e-01 7.05098569e-01] [ 2.32855100e-02 -9.15301442e-01 6.80295348e-01 ... -2.03500003e-01 1.81357288e+00 -1.68680742e-01]] [[ 4.32449490e-01 -3.42859864e-01 -6.50927901e-01 ... 3.11036885e-01 5.80186248e-01 -1.47369638e-01] [-6.26322806e-01 2.75089651e-01 -1.16424859e+00 ... -6.63571581e-02 -8.06478500e-01 8.12968969e-01] [-6.12623751e-01 3.98162007e-01 1.63829193e-01 ... -6.34607136e-01 -2.82684922e-01 2.54407674e-01] ... [ 7.70655870e-01 2.37091258e-01 -3.26610684e-01 ... -3.10643911e-01 4.98516738e-01 1.69497359e+00] [ 1.32780790e-01 -4.24256206e-01 5.25406718e-01 ... 1.40781447e-01 2.04546884e-01 1.35302901e-01] [-6.70364201e-01 1.09086668e+00 1.80855189e-02 ... -5.77592731e-01 4.85321164e-01 -4.64980215e-01]]] [[[-9.06656444e-01 -8.22657883e-01 1.70180164e-02 ... 2.56275125e-02 -1.99235320e+00 -8.31911087e-01] [ 2.91289282e+00 9.60198194e-02 6.48928106e-01 ... -2.24121392e-01 -4.55298781e-01 -1.57521021e+00] [-3.03768253e+00 3.65648890e+00 6.25886917e-01 ... 5.97677708e-01 1.06242108e+00 1.31521001e-01] ... [-3.81855428e-01 9.54791725e-01 -2.40382552e+00 ... 3.03543901e+00 -2.54267955e+00 -2.27249765e+00] [ 1.37113285e+00 -2.48831987e+00 1.95387304e+00 ... 1.15264142e+00 -1.73288095e+00 1.22544579e-01] [-1.68202436e+00 6.67667910e-02 2.10114741e+00 ... 4.70743132e+00 -2.31368840e-01 -5.71206033e-01]] [[ 4.90612149e-01 4.18514580e-01 8.83677959e-01 ... 1.17748000e-01 -5.25788128e-01 -6.92525581e-02] [-1.58658826e+00 3.83625537e-01 -1.50076723e+00 ... -1.95185661e+00 -1.00239825e+00 4.61030118e-02] [ 1.98124301e+00 1.94315386e+00 1.14890754e+00 ... 3.72288078e-02 4.02800655e+00 5.24644136e-01] ... [ 1.01359665e+00 4.38636802e-02 1.78086877e+00 ... -9.33351517e-01 1.29048896e+00 1.44406295e+00] [-1.79276776e+00 6.20944798e-01 -4.56220396e-02 ... -1.04197359e+00 -2.41186833e+00 1.81844141e-02] [ 1.98935163e+00 2.35783148e+00 -1.43439698e+00 ... -2.22510710e-01 5.84114552e-01 1.09749973e+00]] [[ 3.79472256e+00 -3.01497221e+00 -5.44570610e-02 ... -1.00470161e+00 2.17979074e-01 2.62547755e+00] [ 4.82938915e-01 -4.51569319e-01 -3.56275529e-01 ... -1.04856741e+00 -7.91194677e-01 -1.16876155e-01] [ 4.69964236e-01 -6.86144710e-01 3.06370449e+00 ... 1.26476109e+00 1.26680243e+00 -6.84955239e-01] ... [-1.30538189e+00 1.56972325e+00 1.45405650e-01 ... -8.66421402e-01 2.44252300e+00 -1.08926892e+00] [-4.56519413e+00 -1.55266225e-01 9.16755855e-01 ... 1.34184137e-01 -2.05197287e+00 -1.39693189e+00] [-8.27921089e-03 -1.81450856e+00 -1.90033925e+00 ... 2.88102448e-01 -1.28612936e+00 8.54435042e-02]] ... [[-1.50312364e+00 -6.97721541e-01 -1.34265208e+00 ... -2.16132808e+00 4.28057146e+00 2.88489252e-01] [-1.28872693e+00 9.84570920e-01 8.17447484e-01 ... 8.35028663e-02 -1.24535966e+00 9.76875722e-01] [-2.51709390e+00 -2.26903319e+00 -3.30233634e-01 ... 1.24290359e+00 -1.62191570e+00 -2.91197109e+00] ... [-1.24177134e+00 1.22492445e+00 -1.48632360e+00 ... 2.38006487e-01 6.57680929e-01 -8.09704542e-01] [-1.47636938e+00 -9.25818264e-01 2.12147665e+00 ... -1.15874529e+00 2.87555432e+00 -1.04331255e-01] [ 6.67994201e-01 -2.58183169e+00 8.51544738e-01 ... -4.52245772e-01 1.47346735e+00 1.78656220e+00]] [[ 1.50410855e+00 2.47748524e-01 -3.63711500e+00 ... -1.44067919e+00 7.33523428e-01 2.50641489e+00] [-1.27068937e-01 -3.54120302e+00 -2.32926250e+00 ... -1.45007265e+00 1.40527046e+00 1.40094936e+00] [ 3.61225414e+00 5.76993704e-01 1.19597897e-01 ... 1.77054495e-01 -3.74277055e-01 -1.18078971e+00] ... [ 1.74987063e-01 -6.48338854e-01 -6.14835203e-01 ... -1.58089900e+00 4.99794096e-01 -6.00557923e-01] [-6.34496987e-01 2.18034267e+00 -3.81881863e-01 ... 1.61352050e+00 -9.88226235e-01 2.32785335e-03] [ 2.40672064e+00 -2.76416302e+00 2.25184846e+00 ... 2.28382683e+00 -3.30044985e-01 -4.56910229e+00]] [[-6.24848247e-01 -1.25117493e+00 -4.02999580e-01 ... -3.45459759e-01 -1.48115778e+00 -9.66167748e-01] [ 4.49619949e-01 -1.99212599e+00 -2.18727231e+00 ... -4.35595214e-01 2.14171767e+00 4.34719038e+00] [-4.93088990e-01 2.65187526e+00 -3.32565475e+00 ... -3.02415395e+00 -6.71195805e-01 -8.91694963e-01] ... [ 2.84997076e-01 1.60657406e+00 1.47610748e+00 ... -9.37779009e-01 4.28629875e-01 2.68337727e-01] [ 1.14955890e+00 2.12231827e+00 1.82521629e+00 ... 1.30495048e+00 1.38120186e+00 3.60864043e-01] [-1.31661630e+00 2.29158902e+00 3.96462369e+00 ... -3.00291491e+00 1.96733022e+00 1.46506202e+00]]] [[[-9.72130239e-01 -4.66009490e-02 -5.63814104e-01 ... 1.72447950e-01 -2.99488693e-01 5.06914198e-01] [ 1.69688269e-01 -8.47110033e-01 1.04007924e+00 ... -4.57777709e-01 -5.78668714e-01 -7.25681186e-01] [-8.49641144e-01 4.65480626e-01 -3.30160707e-01 ... 1.26562798e+00 -3.54904413e-01 -5.45955956e-01] ... [-8.68997037e-01 1.87594146e-01 3.90374810e-02 ... -5.19485474e-01 3.84261638e-01 -2.70032734e-01] [ 4.92696643e-01 9.81698036e-01 1.45990744e-01 ... 2.86552876e-01 -8.56647372e-01 -1.97407991e-01] [ 3.58567029e-01 2.31056020e-01 -4.67721850e-01 ... -2.94919163e-01 1.03524196e+00 -1.11412966e+00]] [[-5.79593480e-01 -2.05362648e-01 2.18357258e-02 ... -1.37989610e-01 9.84268844e-01 -1.16497564e+00] [ 3.68479520e-01 9.13148820e-01 -6.13148510e-01 ... -4.70598266e-02 -2.82669276e-01 1.88700985e-02] [-2.74031550e-01 -1.73315197e-01 -5.45372128e-01 ... 6.51076555e-01 8.45885932e-01 1.13958549e+00] ... [-1.50546744e-01 -1.25094317e-03 -3.30701470e-01 ... 9.94182825e-01 9.00308847e-01 -3.25456083e-01] [-2.27671340e-01 -9.68490481e-01 -8.28400671e-01 ... 7.72839308e-01 -6.29830360e-01 6.55878782e-01] [ 5.73312402e-01 8.62536430e-01 2.17398107e-02 ... -5.74501038e-01 4.04944003e-01 7.50927448e-01]] [[-1.50788218e-01 6.58882201e-01 6.69044852e-02 ... -2.65026450e-01 -1.90321410e+00 -3.88619959e-01] [ 1.61826491e-01 4.23633188e-01 -4.84254211e-02 ... 1.35330713e+00 7.84093499e-01 -4.22370821e-01] [ 7.85476565e-01 -3.60557228e-01 -4.22787309e-01 ... -8.22295606e-01 7.44642437e-01 -2.15564534e-01] ... [-7.07310915e-01 -7.32093990e-01 -3.07022691e-01 ... 1.41337037e+00 4.58285838e-01 7.68832803e-01] [-8.85780156e-02 8.64155173e-01 5.81002176e-01 ... 4.62490231e-01 -4.57435906e-01 7.46261030e-02] [ 4.89069998e-01 7.06611052e-02 3.27493161e-01 ... -4.13938433e-01 -2.83802658e-01 6.24621570e-01]] ... [[-4.72494066e-01 -4.73967284e-01 3.81626606e-01 ... 5.49270868e-01 -7.03209639e-02 -9.40203786e-01] [ 6.49188519e-01 1.26530170e+00 2.88975209e-01 ... -1.03873396e+00 6.07720017e-01 3.79737973e-01] [ 4.43050921e-01 4.95056957e-01 -1.29796997e-01 ... 2.57700622e-01 4.43663836e-01 -1.01091780e-01] ... [-9.30341780e-01 -1.97185650e-01 1.11747730e+00 ... -5.67605555e-01 -6.88256681e-01 -7.09252715e-01] [-2.27897421e-01 -6.30932599e-02 7.96566308e-02 ... 5.90043545e-01 -5.78431308e-01 -2.32402444e-01] [-5.37685491e-02 -3.44960719e-01 -1.28419280e+00 ... 1.13426614e+00 -4.94678140e-01 -5.63733876e-01]] [[ 4.70441252e-01 3.57588351e-01 -1.16271935e-01 ... -1.55167043e-01 -9.95667130e-02 -4.02691126e-01] [ 1.67498946e-01 -8.52132857e-01 4.30395246e-01 ... -6.52867615e-01 6.27452612e-01 -2.32394919e-01] [-6.60366774e-01 7.83940077e-01 5.04408062e-01 ... 1.06263697e+00 3.12874585e-01 -6.57897294e-01] ... [ 1.50179872e-02 -2.27171466e-01 -4.67116013e-02 ... -8.91725272e-02 3.36913198e-01 1.90862361e-02] [ 4.08560425e-01 1.37786686e+00 1.09488773e+00 ... 3.93731259e-02 -1.17978618e-01 -5.18769994e-02] [ 4.37766314e-01 1.75975576e-01 5.13955712e-01 ... -2.81928271e-01 1.31390333e-01 1.86972409e-01]] [[-8.53108585e-01 -1.80732146e-01 4.70738918e-01 ... 2.03865185e-01 9.45718735e-02 -1.63402706e-01] [-8.58174205e-01 6.42067075e-01 -6.23256743e-01 ... -9.31625515e-02 7.16665089e-01 -2.74204642e-01] [-1.24244735e-01 7.65608132e-01 -3.41522507e-02 ... -1.71927154e+00 5.84070385e-01 1.21355319e+00] ... [-7.09509313e-01 1.78538561e-01 -1.45731762e-01 ... 4.81561214e-01 8.87333095e-01 4.57768351e-01] [ 1.34674621e+00 -1.12772894e+00 -1.39036727e+00 ... 3.18480164e-01 -7.93052077e-01 4.48791414e-01] [ 5.68860054e-01 -7.66566098e-01 4.86017913e-01 ... 4.04441237e-01 6.97579026e-01 -2.73501188e-01]]] [[[ 1.10768998e+00 -1.34816694e+00 -1.86143470e+00 ... -7.16814816e-01 1.01851046e+00 1.53653920e+00] [-4.25022155e-01 1.64046645e+00 1.64052892e+00 ... 4.12585616e-01 5.03295839e-01 -1.10333848e+00] [-4.22697812e-01 1.14063323e+00 -2.61563182e-01 ... -8.10276195e-02 2.06021690e+00 3.23170602e-01] ... [ 1.02813375e+00 1.90530524e-01 -4.62009996e-01 ... -6.80671155e-01 -4.23452020e-01 -9.23316836e-01] [-1.58244646e+00 1.83897388e+00 3.17272365e-01 ... 9.94121194e-01 1.01899672e+00 6.07300937e-01] [ 2.88110065e+00 1.04040408e+00 -1.87714195e+00 ... 4.05140549e-01 -5.51027656e-01 -4.59050667e-03]] [[ 1.22295749e+00 -8.11361313e-01 3.74026918e+00 ... 3.51964116e-01 1.14581871e+00 5.11604309e-01] [-4.03262824e-01 -6.67899132e-01 -7.50943840e-01 ... 4.14282121e-02 -3.03251624e+00 -2.07998753e+00] [-1.28693476e-01 1.88834155e+00 9.94118989e-01 ... 9.45124447e-01 1.65975019e-02 8.63057077e-01] ... [ 5.02211392e-01 1.35702837e+00 1.88538119e-01 ... 1.69560552e-01 -1.03697097e+00 -6.83024406e-01] [ 1.63723636e+00 2.65442967e+00 -1.42158544e+00 ... -2.30503827e-01 -2.56390476e+00 -1.56496918e+00] [ 2.71503067e+00 8.86115372e-01 2.32592925e-01 ... 7.69684136e-01 6.41476035e-01 -2.76015908e-01]] [[-2.65420794e+00 7.12052584e-01 1.00781643e+00 ... 9.14475739e-01 -1.93807244e-01 1.69966090e+00] [-1.22768927e+00 -1.77667081e+00 1.34083092e+00 ... -2.74319363e+00 3.31153584e+00 2.37192988e-01] [-9.58931670e-02 -1.49935484e+00 4.34137285e-01 ... 9.99385834e-01 -1.00965083e+00 -1.40278184e+00] ... [ 9.54448581e-01 -8.91184449e-01 -1.67392468e+00 ... 8.49772811e-01 -3.84832509e-02 5.56692779e-01] [ 1.96100736e+00 1.80185869e-01 2.28485078e-01 ... -2.29038978e+00 -1.55165923e+00 2.54019618e+00] [ 8.13814938e-01 6.58817217e-02 3.90801728e-01 ... 1.51051390e+00 -2.33646655e+00 -1.39557183e+00]] ... [[ 7.21988603e-02 8.12079668e-01 -9.11661610e-03 ... -2.07566190e+00 -3.03298682e-01 2.51250196e+00] [-1.02167821e+00 7.17373133e-01 -1.78728938e+00 ... -8.28391433e-01 -9.40048277e-01 -2.68514305e-01] [ 2.27554584e+00 -1.29643095e+00 1.54249823e+00 ... 1.44238770e-01 8.71286273e-01 4.51609164e-01] ... [-3.87456447e-01 7.11226642e-01 4.31958437e+00 ... -6.31982982e-01 1.81455743e+00 7.74746001e-01] [ 2.63906538e-01 1.06027651e+00 1.30495453e+00 ... -2.12441444e+00 1.00905252e+00 1.95673740e+00] [-1.53001821e+00 -2.61159611e+00 -1.16358709e+00 ... 1.12655187e+00 6.41204119e-01 8.49835217e-01]] [[-9.03697014e-01 2.01023841e+00 -2.17572427e+00 ... -1.00401817e-02 -1.41670179e+00 2.07737541e+00] [ 2.61415029e+00 -3.06661159e-01 1.71719626e-01 ... 1.90989137e+00 -8.31483126e-01 1.09991086e+00] [ 6.79438472e-01 1.93679667e+00 -3.56564045e-01 ... -2.40729973e-01 2.36578035e+00 5.03569126e-01] ... [ 1.99634969e+00 7.95080304e-01 -1.20292151e+00 ... 7.94958547e-02 3.79002261e+00 -1.49215269e+00] [-9.51217711e-01 -3.30452621e-01 -6.65687263e-01 ... -2.26077294e+00 -3.97551346e+00 7.00015485e-01] [-3.65436196e+00 -2.00041398e-01 1.41564950e-01 ... -1.56939423e+00 -2.54023051e+00 -3.67630291e+00]] [[ 2.52665788e-01 -1.30913699e+00 -5.12696840e-02 ... 1.28820682e+00 8.06730211e-01 -2.01047063e+00] [ 3.35679436e+00 -2.52338886e+00 1.47496903e+00 ... -3.19355392e+00 8.56545985e-01 1.37738371e+00] [ 9.23820615e-01 -2.43944656e-02 -7.87621856e-01 ... 2.51886159e-01 8.43459904e-01 -1.34967238e-01] ... [ 9.00332451e-01 2.04954219e+00 -2.06753016e+00 ... -1.16249546e-01 -1.61491558e-02 1.67394257e+00] [ 1.89270091e+00 6.96350813e-01 -2.27839068e-01 ... 9.18278933e-01 1.96113598e+00 8.16334903e-01] [-2.32911611e+00 -1.13768542e+00 -7.13574529e-01 ... -3.14367563e-01 -5.69891930e-01 1.28874087e+00]]] [[[-2.52736831e+00 3.07939124e+00 1.11217380e+00 ... 3.10438466e+00 -2.67321157e+00 -2.88334399e-01] [ 1.10630035e+00 -2.03373957e+00 9.85636413e-01 ... 3.55357027e+00 -1.39247310e+00 1.02339017e+00] [ 1.85400927e+00 2.26583648e+00 1.03112781e+00 ... -8.09213042e-01 -3.20898116e-01 3.11298442e+00] ... [-3.71369863e+00 3.21303868e+00 7.77475119e-01 ... -2.98810506e+00 -1.46856058e+00 -2.57612920e+00] [-8.75835121e-01 2.48734206e-01 2.41814661e+00 ... 4.66239929e-01 -6.31791878e+00 -2.29605722e+00] [-2.04742622e+00 -4.61646527e-01 3.24827522e-01 ... -1.28862828e-01 -1.10768723e+00 -1.14462614e+00]] [[-2.16195774e+00 -2.96598673e+00 2.93360293e-01 ... -3.58396292e-01 -4.12423515e+00 -2.17150474e+00] [ 6.06225193e-01 -2.07028937e+00 -4.72165734e-01 ... -2.64262152e+00 1.06659079e+00 -2.03367329e+00] [ 1.41612303e+00 -3.76888585e+00 5.52246618e+00 ... -2.04517618e-02 1.88974690e+00 -4.98005772e+00] ... [ 1.10719234e-01 -1.40490484e+00 1.52182031e+00 ... 1.18456018e+00 2.43377471e+00 1.81947991e-01] [ 4.19740963e+00 9.46398020e-01 -1.06249440e+00 ... -1.24487352e+00 2.14563990e+00 -5.11292934e+00] [ 1.72749770e+00 -4.50291425e-01 4.10369968e+00 ... 1.05718708e+00 -1.08897097e-01 2.80134225e+00]] [[ 8.07000697e-01 1.91124105e+00 -2.15791035e+00 ... -3.74224353e+00 -4.98302317e+00 9.81558710e-02] [-9.27000225e-01 -1.99920034e+00 -3.27080131e+00 ... -1.63683498e+00 -1.62903190e+00 1.51483989e+00] [-1.01567531e+00 -6.90103546e-02 1.34372497e+00 ... -3.58662277e-01 -4.74997580e-01 -3.87420297e-01] ... [-1.19320095e+00 -3.87586641e+00 1.55292416e+00 ... 3.58437991e+00 1.18474647e-01 -1.47476268e+00] [-3.70545030e+00 2.51464653e+00 -4.21180630e+00 ... -8.55864227e-01 -2.10022020e+00 1.51831460e+00] [ 2.99937427e-01 -4.53914374e-01 1.97738194e+00 ... -2.03376341e+00 4.21406317e+00 -1.53708756e+00]] ... [[ 1.95742881e+00 -6.75542295e-01 -3.98217857e-01 ... -1.63859737e+00 2.78021359e+00 3.03110301e-01] [-2.34154463e-01 2.63118553e+00 1.29423833e+00 ... -9.45463002e-01 2.15494061e+00 -1.11052895e+00] [ 5.07614517e+00 6.74365282e-01 -1.47895539e+00 ... -2.49753070e+00 1.47861946e+00 -3.23346162e+00] ... [-6.52712822e-01 -2.80561948e+00 -1.66757154e+00 ... -3.09588969e-01 -2.38118315e+00 3.11500621e+00] [-2.03713015e-01 -2.18722844e+00 -3.52202582e+00 ... 2.51772690e+00 2.98007274e+00 2.42625809e+00] [ 3.00011301e+00 1.23538077e+00 -9.95789766e-01 ... -3.82764935e+00 -1.14660633e+00 -5.48378134e+00]] [[ 1.53311276e+00 -2.37610221e+00 -1.28000772e+00 ... -5.59697294e+00 6.31893539e+00 1.40412509e+00] [ 1.17071283e+00 -1.60308206e+00 1.67201515e-02 ... 3.09072089e+00 -2.10606009e-01 -2.08384037e+00] [ 6.72415018e+00 -8.81778479e-01 -9.65383828e-01 ... -9.67361391e-01 -9.85018730e-01 -2.34458041e+00] ... [ 3.13465786e+00 -4.20871162e+00 9.29193556e-01 ... -1.68342900e+00 3.07458138e+00 6.23244333e+00] [-1.90413475e-01 -3.71488452e+00 -1.50868595e+00 ... -4.35562819e-01 -2.30114889e+00 2.53352809e+00] [-1.00610125e+00 1.37322354e+00 6.76192999e-01 ... -1.90981412e+00 4.23586629e-02 -1.48760366e+00]] [[ 3.58834296e-01 8.52736652e-01 4.26800609e-01 ... -1.68989491e+00 -1.82481062e+00 4.13589478e+00] [-1.02738571e+00 3.04690599e+00 -1.53544807e+00 ... -6.56575382e-01 1.00310564e+00 8.54071796e-01] [ 2.38780689e+00 3.10890794e+00 3.80511236e+00 ... 1.16729617e+00 -4.11840260e-01 1.83067426e-01] ... [-1.93403912e+00 3.28458548e+00 -4.00055027e+00 ... 4.61867809e+00 4.56894696e-01 -1.03881884e+00] [ 7.61732996e-01 -1.06902575e+00 3.99176478e-01 ... 4.62070799e+00 -9.76729035e-01 3.15363812e+00] [ 3.24300528e+00 -1.99297726e+00 -1.13885880e+00 ... -5.37277031e+00 -5.37019789e-01 1.83137387e-01]]] [[[-1.77090570e-01 5.20599127e-01 -2.98372924e-01 ... 2.39342272e-01 -3.96033041e-02 3.19968253e-01] [-4.14680064e-01 -1.64003119e-01 4.74968076e-01 ... -4.15256023e-01 -8.20733756e-02 -5.18708944e-01] [ 9.57718715e-02 3.29706132e-01 -1.71766117e-01 ... 2.82663647e-02 -3.12918335e-01 5.81570342e-02] ... [ 2.02675626e-01 -1.73794225e-01 3.72721702e-01 ... 4.06735212e-01 3.59459490e-01 5.36781810e-02] [ 4.62195307e-01 -3.83627638e-02 3.34754348e-01 ... -6.74062610e-01 2.27839306e-01 1.99035689e-01] [-3.31456691e-01 -5.53330779e-01 6.43289760e-02 ... -1.17189385e-01 -1.54119760e-01 6.87410384e-02]] [[ 3.26478839e-01 -1.40321672e-01 3.48341942e-01 ... 7.92611483e-03 -7.35721141e-02 1.91958070e-01] [ 8.63766074e-01 -4.00936812e-01 2.62118638e-01 ... -3.58274043e-01 -1.23913124e-01 4.65110332e-01] [ 5.37626565e-01 -5.58525085e-01 5.14546931e-01 ... -6.43777475e-02 7.60237873e-01 6.43154830e-02] ... [-1.98924810e-01 4.40302014e-01 -9.07398835e-02 ... -1.24805599e-01 -4.01263535e-01 1.20411895e-01] [-1.37871087e-01 3.71139258e-01 3.66381615e-01 ... -3.08543831e-01 1.58304304e-01 -1.83178961e-01] [ 3.14307392e-01 1.42576456e-01 5.41155219e-01 ... -4.54895079e-01 -8.06020975e-01 -2.10320085e-01]] [[ 2.26902822e-03 5.59852362e-01 -1.75255448e-01 ... -1.89101279e-01 4.75745946e-01 -2.58348256e-01] [-2.77636617e-01 2.86998898e-01 3.89534473e-01 ... -2.09508941e-01 8.21132287e-02 9.20757726e-02] [ 1.68193430e-01 -1.99508771e-01 3.67902130e-01 ... -2.14744769e-02 -2.23730922e-01 -5.39228559e-01] ... [-3.09053421e-01 -6.07605040e-01 -4.18234527e-01 ... -5.08225143e-01 4.65232879e-01 -2.28569079e-02] [ 1.84076265e-01 -1.74576193e-01 2.41944268e-01 ... -7.26104081e-02 -4.23758738e-02 3.83530051e-01] [-2.37736732e-01 1.71581671e-01 1.01312630e-01 ... 2.10450992e-01 4.73510355e-01 -9.03272480e-02]] ... [[-4.66764241e-01 2.89419323e-01 2.67778844e-01 ... -2.32236505e-01 4.22161281e-01 7.08558798e-01] [-4.35382247e-01 -3.57927918e-01 -3.90192926e-01 ... -2.98024803e-01 5.40399134e-01 5.71758226e-02] [ 4.88948345e-01 5.99944480e-02 1.77617788e-01 ... -4.38728839e-01 -2.43245274e-01 -2.65232950e-01] ... [-5.05977869e-01 2.16327623e-01 4.17691886e-01 ... -4.98463571e-01 3.10831159e-01 -2.04730436e-01] [ 2.53352910e-01 1.53662220e-01 -3.00393134e-01 ... -3.89416397e-01 -5.91210306e-01 8.14720839e-02] [-4.35232282e-01 2.32971594e-01 -1.21318791e-02 ... 2.65375197e-01 -1.67893633e-01 1.34123943e-03]] [[-6.38181120e-02 3.01120967e-01 -1.51388183e-01 ... 1.61446107e-03 1.59042016e-01 1.83781058e-01] [ 1.16783932e-01 5.59348106e-01 -9.11100879e-02 ... -2.58037269e-01 8.10764730e-02 2.04190895e-01] [ 3.38770509e-01 2.02139601e-01 2.86340952e-01 ... 3.43847781e-01 3.72971147e-01 -1.06280118e-01] ... [-6.82958364e-01 -2.81132877e-01 -8.98470171e-03 ... -2.53052980e-01 7.11487532e-01 2.49039888e-01] [-3.59074563e-01 2.91793887e-02 3.49730849e-01 ... -1.76711872e-01 -5.98720387e-02 -7.50632405e-01] [-1.94179058e-01 -3.90515417e-01 -2.79271811e-01 ... 3.29962879e-01 2.51527876e-01 2.12291881e-01]] [[ 5.48288643e-01 5.00997379e-02 -1.55756429e-01 ... 1.40966520e-01 4.06620830e-01 3.76716673e-01] [-8.37226212e-02 1.13258377e-01 5.50202355e-02 ... -2.20644191e-01 1.80408135e-01 -9.14122686e-02] [ 5.85433422e-03 -2.32993141e-01 2.15774089e-01 ... -4.44608808e-01 3.05144608e-01 4.07901078e-01] ... [ 5.81986427e-01 1.42690241e-01 1.24992929e-01 ... -2.25411072e-01 -2.72200033e-02 1.21704333e-01] [-5.38768880e-02 1.91055357e-01 -1.31013125e-01 ... 4.15110499e-01 -1.45254388e-01 -3.02066207e-01] [ 3.51665556e-01 2.97606319e-01 -4.55150664e-01 ... 1.05167054e-01 -3.64139140e-01 3.08424890e-01]]]] [[[[ 2.01231927e-01 -1.92162871e-01 -1.48120594e+00 ... 1.50668278e-01 1.48550853e-01 4.75331217e-01] [ 4.94993091e-01 -2.76029795e-01 3.52568090e-01 ... -1.92360580e-01 3.43665600e-01 2.81374604e-01] [-2.81515211e-01 -6.20461702e-01 -2.75062621e-01 ... -5.69907464e-02 5.18457964e-02 -6.40174925e-01] ... [-5.27582765e-01 -1.41090766e-01 -6.93083644e-01 ... 6.86133265e-01 -1.84295809e+00 -9.33304846e-01] [-1.05447724e-01 -2.40263343e-01 -2.20342606e-01 ... 5.74082792e-01 -7.41392493e-01 -1.56628847e-01] [-3.09843719e-01 9.82178152e-02 -1.91124707e-01 ... 2.26895332e-01 2.47885838e-01 5.97489715e-01]] [[ 8.21494520e-01 4.19799358e-01 -8.94899547e-01 ... 1.58852458e-01 -2.75800824e-01 -3.19713920e-01] [ 3.94723654e-01 2.60947078e-01 9.22801614e-01 ... 5.29673278e-01 -1.25587213e+00 5.65199316e-01] [-1.53252214e-01 2.07097664e-01 1.13198006e+00 ... -7.32970595e-01 -1.23833098e-01 1.80569351e-01] ... [-9.42236125e-01 1.06181324e+00 1.47863507e+00 ... 3.06557119e-01 -1.37517095e+00 2.66711801e-01] [ 1.26883006e+00 -2.76514977e-01 1.07706225e+00 ... 1.42970473e-01 1.33778298e+00 1.00180960e+00] [ 8.36915374e-02 5.07779941e-02 -9.89288747e-01 ... -3.97585273e-01 8.13951015e-01 5.32475889e-01]] [[ 1.18834066e+00 7.77328253e-01 -4.34970170e-01 ... 9.59948719e-01 -3.12073939e-02 -8.74635458e-01] [ 4.63027060e-01 -1.22193968e+00 -1.17245756e-01 ... -1.41174555e-01 8.15396369e-01 6.25258923e-01] [-1.96429491e-01 -7.19794869e-01 -8.19691345e-02 ... 9.39239860e-01 -8.49890262e-02 4.20775026e-01] ... [-1.62636757e+00 -6.58561587e-02 2.20669642e-01 ... -9.63370144e-01 -1.01942050e+00 -1.08938646e+00] [-2.25433573e-01 7.26289392e-01 4.22943980e-01 ... -1.22539377e+00 -1.63140848e-01 4.02976274e-01] [-1.22049265e-02 -4.34091926e-01 -1.30516636e+00 ... 3.30460399e-01 -1.09811351e-01 1.64760128e-01]] ... [[-1.05926895e+00 -1.98647249e+00 1.05749346e-01 ... -3.79309565e-01 3.27418357e-01 2.48425007e-01] [-1.93527505e-01 1.45193970e+00 9.65121627e-01 ... -9.09677148e-01 2.88174063e-01 -1.06366873e+00] [ 3.09048504e-01 5.50241470e-01 5.99478066e-01 ... -1.10180378e+00 2.23544851e-01 7.56940663e-01] ... [ 3.60400885e-01 -3.85624677e-01 -3.47315043e-01 ... 1.41471767e+00 -8.69007051e-01 -9.68419433e-01] [ 4.29624170e-01 1.29315245e+00 -4.58434939e-01 ... 1.03937531e+00 6.45777583e-01 1.65067637e+00] [-1.00477731e+00 4.73336652e-02 7.14409649e-01 ... 1.54285312e-01 1.76725268e+00 -5.66494942e-01]] [[-2.78333634e-01 -3.88887167e-01 8.64121914e-01 ... -6.08992159e-01 -8.14060271e-01 -7.36668110e-01] [ 1.30418682e+00 -6.65458322e-01 3.76516074e-01 ... 2.61812657e-01 -1.52609497e-01 4.64601308e-01] [-3.05826455e-01 -5.07838249e-01 -1.61484852e-01 ... 6.65994659e-02 -6.94292903e-01 7.13607311e-01] ... [-5.51147878e-01 4.53004152e-01 -5.50405025e-01 ... -6.65649295e-01 8.33264351e-01 9.74442586e-02] [ 2.61368155e-01 -1.29845572e+00 1.29169655e+00 ... -1.34483659e+00 3.44863757e-02 2.99196124e-01] [-1.32100451e+00 -1.53969204e+00 3.41030031e-01 ... -7.56559670e-01 -3.45809668e-01 -1.35573614e+00]] [[ 6.12985551e-01 4.10975009e-01 -6.69467092e-01 ... -1.58819938e+00 -6.37451947e-01 8.25923502e-01] [ 1.62931427e-01 7.84829319e-01 -3.27618629e-01 ... -6.94549024e-01 6.81957006e-01 3.21237415e-01] [-1.22793055e+00 6.36824608e-01 1.03669964e-01 ... 7.55360603e-01 -3.27990919e-01 -1.41843712e+00] ... [ 7.14378655e-02 5.92137694e-01 2.80699432e-01 ... -6.58006132e-01 1.09050441e+00 1.09053016e+00] [-5.90853870e-01 5.33984423e-01 -5.05057096e-01 ... -3.62177730e-01 2.87103802e-01 -5.27882814e-01] [ 3.96522492e-01 -7.48253047e-01 -1.22834420e+00 ... 1.21871102e+00 2.93445438e-01 -8.44939947e-01]]] [[[ 2.88360119e+00 -9.88591969e-01 1.01382458e+00 ... -1.21066904e+00 1.36381403e-01 1.98772502e+00] [ 9.67630982e-01 3.60846400e-01 2.57041073e+00 ... -3.38908076e-01 -6.21668279e-01 5.26618123e-01] [-2.25631404e+00 2.09845734e+00 -8.24533582e-01 ... 2.91946650e+00 -1.73571229e+00 1.42713499e+00] ... [ 3.82914066e-01 -2.51105213e+00 -1.47035062e-01 ... 1.66116118e-01 -1.47997785e+00 -9.97731507e-01] [ 8.58864963e-01 5.90926409e-01 3.07041478e+00 ... -1.52434552e+00 -1.57691050e+00 -7.66509056e-01] [ 1.07910410e-01 -1.48183763e+00 -2.67946386e+00 ... 2.00698566e+00 -1.03741956e+00 -9.37419057e-01]] [[-1.63995731e+00 -1.54635322e+00 -1.00172913e+00 ... 1.58365190e+00 -6.56539321e-01 3.08471143e-01] [ 8.43019366e-01 -2.15516615e+00 1.39442718e+00 ... -9.33493316e-01 -2.47416139e-01 -2.80887693e-01] [ 1.89288175e+00 -8.22911620e-01 -3.88099194e-01 ... -1.29716873e+00 2.17103934e+00 -1.75274861e+00] ... [ 3.47153378e+00 -2.55881101e-01 2.84677118e-01 ... -8.79100680e-01 7.08387971e-01 -2.57901400e-01] [-1.97868574e+00 5.24369836e-01 1.97570729e+00 ... -2.60498285e-01 -1.15056896e+00 1.86571109e+00] [-1.36123931e+00 -2.35126829e+00 -6.17906392e-01 ... -7.36239195e-01 3.89128178e-01 6.20526969e-01]] [[-1.46608746e+00 -1.14606643e+00 -2.20155692e+00 ... 1.05475688e+00 2.63689113e+00 -2.21108899e-01] [ 1.74706960e+00 6.27482474e-01 3.38426971e+00 ... -4.33435887e-01 -9.11608636e-01 -9.48822260e-01] [ 1.71310437e+00 5.23764312e-01 -2.60403544e-01 ... 9.67840254e-01 2.78926444e+00 1.58387375e+00] ... [-2.70625162e+00 5.00137389e-01 7.67796814e-01 ... -3.85944813e-01 -1.15087509e+00 -1.40846416e-01] [ 2.61517048e+00 -5.32837331e-01 1.62797344e+00 ... 3.37606502e+00 1.60477734e+00 -8.50149155e-01] [-1.14358342e+00 -8.27027559e-01 6.91879261e-03 ... -1.49396226e-01 -2.82717443e+00 -2.08594322e+00]] ... [[ 1.25810921e+00 -1.93534362e+00 7.78474748e-01 ... -1.34289610e+00 -3.01102734e+00 -1.78873754e+00] [ 3.90033096e-01 2.77986646e+00 8.87179673e-02 ... -2.07333970e+00 -1.29460084e+00 -4.63105500e-01] [-1.62355125e+00 -1.52191293e+00 -3.61767244e+00 ... -1.00976815e-02 2.18654585e+00 -1.67144462e-01] ... [-1.29132473e+00 -3.86739582e-01 2.62272686e-01 ... 2.24881244e+00 2.15237665e+00 -1.52895689e+00] [-1.42204273e+00 3.36759257e+00 1.22767186e+00 ... 3.75142060e-02 -1.50723624e+00 -9.44366813e-01] [ 1.31960157e-02 -3.11914295e-01 3.15748906e+00 ... 6.04075864e-02 4.18973970e+00 1.98197380e-01]] [[ 5.70912659e-01 -3.31151187e-01 1.02487993e+00 ... 2.48728204e-03 -3.11826795e-01 -1.68085945e+00] [ 5.67006946e-01 3.49788100e-01 1.30654550e+00 ... 1.72104597e+00 -2.61156242e-02 8.34562123e-01] [ 5.53998232e-01 1.45598257e+00 -1.55303478e+00 ... -1.72275162e+00 3.40475345e+00 1.46764886e+00] ... [ 1.28414547e+00 -1.15551865e+00 -3.15616816e-01 ... 2.70990580e-01 1.68623433e-01 1.59431458e-01] [-1.50074244e+00 8.31961393e-01 2.23303616e-01 ... 2.17552245e-01 -2.17631370e-01 4.84878838e-01] [-3.05494452e+00 -4.75283891e-01 -2.18245775e-01 ... -3.49551708e-01 -5.60519218e-01 -6.66970253e-01]] [[ 5.30942202e-01 -2.50970936e+00 7.49587268e-02 ... 1.89068353e+00 2.88072658e+00 1.78405568e-01] [-6.71329737e-01 4.13954306e+00 -3.38635492e+00 ... 5.09880662e-01 -2.82439423e+00 1.43035543e+00] [ 2.43470263e+00 -5.40432751e-01 -1.05624415e-01 ... -9.14384604e-01 -1.12806487e+00 -1.34982884e+00] ... [-1.02418256e+00 9.81437445e-01 1.49642098e+00 ... 1.87264609e+00 4.23441696e+00 -1.06855389e-03] [ 3.34131455e+00 1.46896303e+00 -2.31039715e+00 ... 1.63479030e+00 -4.13594037e-01 -2.32907224e+00] [-1.19108260e-01 -7.00473845e-01 -2.23183584e+00 ... -2.00509459e-01 -1.57289803e+00 -2.85070753e+00]]] [[[-4.72081006e-01 1.03450656e+00 -2.45685756e-01 ... -2.37840060e-02 -7.07481727e-02 6.51587129e-01] [ 1.69533694e+00 -7.12370515e-01 6.83334112e-01 ... -1.95609286e-01 2.81566173e-01 5.11645436e-01] [ 2.28314266e-01 -3.65696073e-01 1.70962781e-01 ... 7.67796159e-01 -3.12538212e-03 -2.06290260e-01] ... [ 4.41054702e-01 6.12152480e-02 -2.24500790e-01 ... 4.94303226e-01 8.92992973e-01 -4.03998315e-01] [-1.63322902e+00 1.27481031e+00 7.59090245e-01 ... -1.75828600e+00 4.32390243e-01 6.87387586e-01] [-1.12579799e+00 4.72879142e-01 -2.34303668e-01 ... -3.68971601e-02 9.40007687e-01 5.09427845e-01]] [[-5.06462038e-01 4.69321370e-01 -4.02716339e-01 ... -1.11222625e+00 -2.40848675e-01 2.75151134e-01] [-2.94842303e-01 -4.35196251e-01 -3.34393717e-02 ... 4.78899986e-01 7.25665808e-01 -1.90205052e-01] [ 5.06656766e-01 4.99532878e-01 -8.44755292e-01 ... 3.09288263e-01 -1.34169972e+00 3.27783763e-01] ... [ 3.17875564e-01 -7.04289019e-01 -8.44307065e-01 ... 1.06058409e-02 3.15408200e-01 -1.62414499e-02] [-1.42977685e-01 -3.82079154e-01 2.69176453e-01 ... 1.06129743e-01 8.26705247e-02 7.60470867e-01] [ 1.12136602e+00 5.35741687e-01 -8.20043445e-01 ... -3.35922897e-01 -1.01157880e+00 1.08604901e-01]] [[-5.31840682e-01 -2.99507022e-01 2.38497540e-01 ... -5.24276435e-01 -6.50767863e-01 -6.85724378e-01] [-1.23862386e+00 1.60899118e-01 1.28927141e-01 ... 1.02545604e-01 -1.22379780e+00 -4.97039944e-01] [ 2.23701879e-01 3.21749508e-01 2.95827508e-01 ... -3.82116616e-01 6.19591177e-01 2.68764824e-01] ... [ 1.71581015e-01 -1.21354914e+00 -5.00032961e-01 ... 4.10896540e-01 -4.31913942e-01 -6.31768048e-01] [ 1.53860047e-01 2.29854807e-01 -6.18116438e-01 ... 7.32769445e-02 -7.44320571e-01 -1.79732203e+00] [-3.87251854e-01 4.82051551e-01 6.73394918e-01 ... 8.73777390e-01 2.42829368e-01 7.29108810e-01]] ... [[-1.10374546e+00 -1.44873226e+00 7.79145837e-01 ... 7.06441164e-01 3.27785790e-01 -7.32312977e-01] [ 3.09550852e-01 4.17355895e-02 -9.92067039e-01 ... -1.01701366e-02 2.98620611e-01 1.26075342e-01] [ 3.10256094e-01 -6.11411572e-01 -1.29545510e-01 ... -6.51241601e-01 1.30357289e+00 6.35389984e-02] ... [-6.23649776e-01 -5.95596790e-01 -9.23767209e-01 ... 1.29466534e-01 -1.99654356e-01 6.63597107e-01] [-1.27549857e-01 2.17253238e-01 -2.63371706e-01 ... -3.25687110e-01 -6.28365129e-02 7.69976676e-01] [ 2.29229078e-01 -4.46470886e-01 -1.68170261e+00 ... -5.80361128e-01 -1.02327430e+00 -3.11326534e-01]] [[ 3.15164328e-01 -1.29472569e-01 -2.51383722e-01 ... 2.02997163e-01 5.64355850e-01 9.03333843e-01] [-4.94318962e-01 2.28706300e-01 -2.51524657e-01 ... 7.88094997e-01 -8.98135733e-03 -1.39532834e-01] [-2.47339398e-01 -2.35359117e-01 1.23833723e-01 ... 7.74945438e-01 3.81954312e-01 9.96221378e-02] ... [-8.22608054e-01 3.36413711e-01 -1.19663134e-01 ... 6.58767223e-01 -2.18832344e-02 4.10796016e-01] [ 8.91596794e-01 5.80617189e-01 -5.91389164e-02 ... -1.63750708e-01 -2.51641214e-01 -8.67680609e-01] [-5.17281331e-02 -2.10438281e-01 -2.09494643e-02 ... 5.96227884e-01 4.60841447e-01 -1.20122999e-01]] [[ 5.61088562e-01 -2.47602700e-03 -4.59221333e-01 ... -9.90956247e-01 -5.22130243e-02 1.21098952e-02] [-5.70205033e-01 2.26269573e-01 -2.06163347e-01 ... -1.39821792e+00 7.83172846e-01 7.21435905e-01] [ 9.57590580e-01 -4.67678130e-01 3.01519215e-01 ... 9.40205082e-02 -7.44714200e-01 6.81815445e-01] ... [ 2.58709013e-01 6.38082549e-02 2.13777691e-01 ... 8.58929038e-01 9.90119398e-01 9.32870090e-01] [-3.27620089e-01 -4.97418880e-01 6.50605619e-01 ... -1.19580948e+00 -1.02963328e-01 9.48366404e-01] [-7.33568594e-02 -1.06216538e+00 6.76729620e-01 ... 5.81924617e-01 -3.27380337e-02 5.01958251e-01]]] [[[ 1.15118039e+00 -5.63240170e-01 -2.52015814e-02 ... -2.10374951e+00 3.92137861e+00 -3.90124619e-01] [ 1.90273952e+00 -9.28485617e-02 -1.61822259e+00 ... 3.01971734e-01 -6.84351265e-01 -3.03469077e-02] [-1.59925926e+00 -3.50258064e+00 1.13856584e-01 ... -1.15988648e+00 -2.20567775e+00 3.08241192e-02] ... [-1.54953644e-01 -2.19871235e+00 2.75718659e-01 ... -4.53066796e-01 1.79781079e+00 -1.59947836e+00] [ 5.95303893e-01 1.37830675e+00 -2.98638248e+00 ... 7.37553418e-01 1.92174280e+00 -5.13536453e-01] [-2.96934992e-01 -1.18343604e+00 -1.36520338e+00 ... -5.48081160e-01 -4.68166947e-01 2.45924807e+00]] [[-1.83660805e+00 4.36688736e-02 -5.71269929e-01 ... -3.22825861e+00 5.01993060e-01 -1.63175523e+00] [ 1.95077634e+00 -3.05004716e-01 3.25878859e+00 ... 3.10956724e-02 -3.82384211e-01 8.09178472e-01] [ 2.05804801e+00 5.82619011e-01 -4.07823026e-01 ... 1.19823849e+00 9.51876760e-01 1.06909378e-02] ... [ 6.02954507e-01 -2.03656554e+00 -3.80625635e-01 ... -2.30290443e-01 2.13253170e-01 6.05752051e-01] [ 3.12656522e-01 -6.95210993e-01 1.82328865e-01 ... -8.27092826e-02 -7.31696665e-01 1.80972680e-01] [-2.64059711e+00 4.05253112e-01 9.04072344e-01 ... -4.38291550e-01 7.83335507e-01 2.61221671e+00]] [[ 2.53314710e+00 1.26233363e+00 1.18029428e+00 ... -7.94281662e-01 -3.65917116e-01 2.49563649e-01] [ 3.43440223e+00 1.77720821e+00 -8.43153358e-01 ... -2.95173526e+00 1.35479200e+00 2.61681008e+00] [-7.55776227e-01 5.06470203e-01 2.39063427e-01 ... 3.01841187e+00 -1.76130199e+00 -2.73628145e-01] ... [ 1.62684608e+00 2.45415285e-01 -6.75718069e-01 ... 3.30445796e-01 -5.75743437e-01 3.31223679e+00] [ 2.19746709e+00 1.65392649e+00 3.16537237e+00 ... -4.32803035e-02 1.21821630e+00 2.57510066e+00] [-6.46430492e-01 -2.87064219e+00 -2.11182761e+00 ... -2.43404001e-01 -6.99001253e-01 8.25266838e-01]] ... [[-3.56099218e-01 1.39609969e+00 2.60341477e+00 ... 2.61114907e+00 -1.40942156e+00 -9.32115078e-01] [ 1.89424658e+00 -1.65528941e+00 9.16959465e-01 ... 1.12904084e+00 -1.60823584e-01 1.07318544e+00] [-1.10839534e+00 7.69253254e-01 -2.84816074e+00 ... -2.80970311e+00 2.44835123e-01 -1.34432268e+00] ... [-6.10945046e-01 -9.60006535e-01 6.18401289e-01 ... 1.79668510e+00 -3.27954024e-01 5.03769398e-01] [-2.58680320e+00 -8.50568950e-01 5.76943517e-01 ... -4.22205269e-01 3.23184162e-01 1.02239573e+00] [-1.23154259e+00 -5.48079908e-01 1.01546803e-02 ... 1.63392031e+00 1.26453483e+00 2.47071572e-02]] [[-1.40984082e+00 1.52544749e+00 1.19831109e+00 ... -8.31156611e-01 1.80489302e+00 -7.62091219e-01] [-1.03648210e+00 6.50961399e-01 -1.53800920e-01 ... 2.91633159e-01 2.05923125e-01 1.40154457e+00] [ 9.66578186e-01 -4.20625240e-01 -1.55641353e+00 ... 7.05334663e-01 -3.85870673e-02 -3.68108630e-01] ... [ 1.36866584e-01 2.30775401e-01 1.03303814e+00 ... -9.34643149e-01 -2.43488804e-01 2.73823678e-01] [-1.32010305e+00 -2.23158345e-01 7.71119297e-01 ... -9.73181844e-01 -4.30066854e-01 7.19058394e-01] [ 7.88931131e-01 -2.25834206e-01 -3.75987709e-01 ... -2.07795596e+00 1.36568463e+00 -1.46387815e+00]] [[ 1.00402713e+00 -2.26040149e+00 2.32558584e+00 ... 1.44454217e+00 -8.29060435e-01 -2.26521850e+00] [-1.84006715e+00 -2.98116237e-01 1.73809454e-01 ... -1.53458333e+00 -1.28570473e+00 -4.79766876e-01] [-6.27615094e-01 4.75829393e-01 6.30678460e-02 ... 2.76343822e+00 6.63920283e-01 3.90143961e-01] ... [-4.67260420e-01 4.21307951e-01 4.72377658e-01 ... -2.01047969e+00 -1.39293599e+00 2.01499522e-01] [-1.08337736e+00 9.14625347e-01 4.56358767e+00 ... -3.43228340e+00 2.38770986e+00 -5.79106450e-01] [-5.01354277e-01 -2.42807388e+00 1.44772917e-01 ... 1.18761170e+00 -2.19424605e+00 2.27591729e+00]]] [[[ 1.95067501e+00 2.14918208e+00 -1.57094285e-01 ... 2.92565405e-01 3.44274819e-01 -2.68507814e+00] [ 6.99708700e-01 -2.18900347e+00 -1.93087041e+00 ... -3.56494784e-01 1.48967111e+00 -1.18113744e+00] [ 2.81407088e-01 -3.83184314e+00 2.19926357e+00 ... -4.97960687e-01 -6.60014725e+00 1.83804363e-01] ... [ 5.14583492e+00 8.57271969e-01 -8.15069735e-01 ... 1.78510892e+00 -2.33566284e+00 -4.03086138e+00] [ 6.82355613e-02 2.90821719e+00 -2.60727191e+00 ... 3.85370195e-01 2.36159396e+00 3.19781280e+00] [-1.88508058e+00 -1.51976073e+00 -4.46774244e+00 ... 3.71081643e-02 -4.68079299e-01 2.28632092e+00]] [[-4.22192633e-01 -1.64996040e+00 -3.14043736e+00 ... 9.87642050e-01 1.19587147e+00 -7.50515878e-01] [-2.82804251e+00 -2.60839844e+00 2.33512712e+00 ... -1.81902087e+00 2.07519650e+00 -1.17108119e+00] [-3.66503072e+00 2.52529681e-01 4.61434031e+00 ... -5.83146751e-01 3.37752414e+00 1.33114231e+00] ... [ 1.16280243e-01 -2.29691792e+00 2.75861710e-01 ... 4.97036576e-01 -5.35235310e+00 1.90287900e+00] [ 1.52992558e+00 -2.08245850e+00 1.12448335e+00 ... 4.23512496e-02 4.39914036e+00 -2.44462347e+00] [ 8.10154200e-01 1.89558268e+00 -3.57751513e+00 ... 3.65442610e+00 5.84387636e+00 3.89078283e+00]] [[ 1.28067327e+00 -1.26065958e+00 3.15698314e+00 ... 3.20754862e+00 -1.15255237e+00 -1.08818877e+00] [-2.30129290e+00 1.84586978e+00 -9.40302253e-01 ... 7.23151665e-04 -9.45874691e-01 1.84506881e+00] [ 3.31398869e+00 -1.58452821e+00 2.12233424e+00 ... 2.13157988e+00 3.06096207e-02 2.03361797e+00] ... [ 3.90675950e+00 -1.10544062e+00 2.81572890e+00 ... -2.67443871e+00 8.76514733e-01 1.38042474e+00] [-4.21324670e-01 1.46792328e+00 -5.46287000e-01 ... 2.16952428e-01 4.47650051e+00 -3.69329357e+00] [ 3.57402205e+00 4.90749788e+00 4.90415722e-01 ... 1.18994844e+00 -2.18185186e+00 -7.88833261e-01]] ... [[-7.32581735e-01 -7.26665378e-01 -1.54924691e+00 ... -2.91047364e-01 -2.15106487e+00 1.94369063e-01] [ 3.81101340e-01 -2.04451060e+00 -8.99136722e-01 ... -1.83234382e+00 -1.53107309e+00 1.10009551e+00] [-8.20364416e-01 -2.31817746e+00 7.08267689e-01 ... -4.86102551e-01 -1.31627285e+00 -9.56417441e-01] ... [-2.39287663e+00 -3.97526711e-01 2.30533028e+00 ... 5.35455179e+00 1.77115011e+00 3.59844953e-01] [ 2.05519605e+00 3.74202800e+00 -3.34652439e-02 ... -1.68161869e+00 -7.90472865e-01 -2.66614914e-01] [ 5.29539680e+00 -3.44760513e+00 -1.25472939e+00 ... 2.57839465e+00 -3.02444458e+00 2.62251425e+00]] [[ 2.57966566e+00 -2.67010498e+00 5.31310844e+00 ... -2.53132081e+00 1.03983355e+00 -2.73301035e-01] [-1.06115615e+00 3.97800136e+00 -7.17873693e-01 ... -2.93573737e+00 -1.21073198e+00 -1.49293196e+00] [ 4.38377559e-01 -4.51490998e-01 8.23009133e-01 ... -1.56221008e+00 2.61121273e+00 -2.66897511e+00] ... [ 2.62762332e+00 1.84853852e+00 1.34484637e+00 ... 1.81076360e+00 1.67952880e-01 -2.76625216e-01] [-4.07285213e+00 4.50133502e-01 4.90277171e-01 ... 1.39680672e+00 -8.13613057e-01 -1.59293079e+00] [-3.59064054e+00 1.29719269e+00 1.22385070e-01 ... 4.26459503e+00 -4.83666039e+00 2.34078765e+00]] [[ 2.62081552e+00 -3.34138823e+00 2.63066602e+00 ... 6.99841881e+00 -1.40596104e+00 -1.61931908e+00] [-1.31636333e+00 -7.54140663e+00 6.09555304e-01 ... 2.10831237e+00 3.21813250e+00 -1.11534691e+00] [-3.10350227e+00 1.36409652e+00 -7.67914832e-01 ... -2.67421389e+00 -5.46911097e+00 4.41065311e+00] ... [-1.41058898e+00 -8.64906490e-01 1.43956470e+00 ... -3.41277099e+00 4.34312701e-01 -2.00899696e+00] [-1.87592030e+00 -2.91609597e+00 7.49560058e-01 ... 1.07612360e+00 -1.40226626e+00 1.63739586e+00] [-1.77881217e+00 -1.11903703e+00 6.77273810e-01 ... 2.92280579e+00 -2.89306068e+00 2.12509227e+00]]] [[[-7.20638260e-02 4.48794603e-01 -2.01592833e-01 ... -4.58856560e-02 1.17840178e-01 3.09488803e-01] [ 2.25977466e-01 4.67432648e-01 8.07307124e-01 ... 1.75447285e-01 -1.43583506e-01 2.15316907e-01] [ 2.62640059e-01 1.86120227e-01 4.67208147e-01 ... -1.47954136e-01 2.95911014e-01 1.69238105e-01] ... [-6.19667172e-01 2.60871381e-01 3.87989432e-01 ... 6.21861875e-01 3.79089057e-01 4.73242044e-01] [ 3.17941517e-01 4.23239589e-01 2.30285838e-01 ... -4.46772665e-01 2.92734593e-01 -1.88147902e-01] [ 1.25528306e-01 1.48880512e-01 -2.72976696e-01 ... 3.75463039e-01 -2.92213678e-01 1.95925701e-02]] [[-1.11218505e-01 4.44446057e-01 2.30081648e-01 ... -4.67784423e-03 -3.97925228e-01 5.59620082e-01] [-8.73504281e-02 -3.88433367e-01 1.06858402e-01 ... -5.21817952e-02 -9.86822695e-02 1.44373164e-01] [-7.17256144e-02 1.64917454e-01 -4.33928996e-01 ... -3.99108082e-01 1.69342101e-01 3.66139978e-01] ... [-2.93748915e-01 -1.84851930e-01 4.02058899e-01 ... 4.17947955e-02 -4.71093386e-01 -2.95629233e-01] [ 6.98786199e-01 1.74096072e-04 -1.29959881e-01 ... -1.56647518e-01 -1.03892749e-02 9.75728214e-01] [ 1.31800324e-01 1.78384960e-01 3.06819141e-01 ... 4.96760868e-02 -2.34084755e-01 3.02296937e-01]] [[-5.63425720e-01 -2.16876909e-01 -5.61094046e-01 ... 5.16226590e-01 1.92524165e-01 -1.48239598e-01] [ 4.79809374e-01 1.13812415e-02 7.29173562e-03 ... -6.95961535e-01 7.56958276e-02 -1.54755458e-01] [-6.16151094e-01 1.38418794e-01 1.10473685e-01 ... -4.38761376e-02 -5.90387702e-01 6.01898432e-01] ... [-1.54223338e-01 -6.59464240e-01 4.51539099e-01 ... 7.46928692e-01 -3.00184220e-01 4.28711623e-01] [ 1.06190667e-01 -2.78207809e-01 -2.16338709e-01 ... 2.60633510e-02 2.07714587e-02 5.42987846e-02] [ 2.19201311e-01 -1.25777453e-01 -2.42062971e-01 ... -9.48519111e-01 2.19937190e-01 -5.38970351e-01]] ... [[ 3.92777264e-01 -3.66026998e-01 5.52272975e-01 ... -2.53929377e-01 5.04519679e-02 4.18397367e-01] [-5.18825710e-01 2.18911916e-01 5.62294647e-02 ... -1.96927786e-01 -3.16029012e-01 6.73092365e-01] [ 1.58249482e-03 -4.97096553e-02 -2.74952561e-01 ... 2.50597775e-01 2.07987711e-01 -8.00929144e-02] ... [-1.77024856e-01 7.86215886e-02 -1.80590041e-02 ... -3.36980999e-01 -4.39385623e-01 -1.12828150e-01] [-3.25681478e-01 -3.24900538e-01 2.25222215e-01 ... 3.87294739e-01 1.56816319e-01 -1.01496570e-01] [-2.17978179e-01 1.59331471e-01 1.17763601e-01 ... 3.21666837e-01 -1.82331279e-01 -6.39342666e-01]] [[ 1.45584106e-01 -8.05677548e-02 2.88853317e-01 ... -4.61348414e-01 3.27400535e-01 -2.49589726e-01] [ 5.65220833e-01 -6.69737339e-01 -4.68013316e-01 ... 5.87015525e-02 5.29935241e-01 1.83132291e-01] [-5.83529770e-01 -1.49605304e-01 3.05450916e-01 ... 2.32769445e-01 -4.90649134e-01 -4.61701602e-02] ... [ 1.22228488e-01 -2.93648206e-02 -7.58003965e-02 ... 7.47529864e-01 -1.72996409e-02 1.66066706e-01] [-2.22058177e-01 -2.22685169e-02 -1.89091429e-01 ... 4.26366091e-01 8.66683498e-02 -9.55744535e-02] [-6.36111200e-02 1.15376472e-01 -3.17844719e-01 ... 3.41049731e-01 3.81408818e-03 7.08030909e-02]] [[-7.57697672e-02 -2.57093996e-01 -1.18509410e-02 ... -1.53153881e-01 2.79962212e-01 -8.88431743e-02] [-2.31019348e-01 4.31572109e-01 1.15670905e-01 ... 4.66644526e-01 2.00501516e-01 2.31384665e-01] [ 3.77344638e-01 3.46989006e-01 -4.87674996e-02 ... 5.41436553e-01 8.74502733e-02 -1.72892019e-01] ... [-1.40584651e-02 2.65964456e-02 -2.83412337e-01 ... -4.08352941e-01 -1.04996182e-01 1.50159389e-01] [ 1.62066475e-01 7.05668610e-03 -1.56272605e-01 ... -1.52759599e-02 1.27839953e-01 -1.10527977e-01] [ 3.42122614e-01 -5.78968883e-01 -6.00751102e-01 ... 2.31621880e-02 -2.58839756e-01 -5.55118799e-01]]]] [[[[ 2.11292803e-01 4.26031858e-01 -1.59614265e-01 ... -4.82786685e-01 -6.05746984e-01 -1.47749677e-01] [ 8.26826811e-01 6.78226888e-01 -7.15491623e-02 ... -6.51626214e-02 7.41249144e-01 -4.87337738e-01] [ 5.64294040e-01 -1.09056540e-01 3.03048100e-02 ... 9.50984180e-01 3.70391011e-01 -4.36428100e-01] ... [ 6.49362624e-01 -6.77524328e-01 -3.78447682e-01 ... 1.11123931e+00 -2.78735548e-01 3.31728548e-01] [ 1.06384695e-01 -4.09920931e-01 -7.30947971e-01 ... 3.59302998e-01 -9.53472793e-01 -2.51464427e-01] [ 1.64358807e+00 -5.24894059e-01 5.87081552e-01 ... 1.65599689e-01 -5.47383606e-01 -1.84303093e+00]] [[ 5.26629686e-01 9.25985813e-01 1.35767937e+00 ... -4.70296353e-01 -1.29484981e-01 4.36562300e-01] [ 1.85992825e+00 3.80352885e-01 8.61258149e-01 ... -3.44794363e-01 -3.81078422e-01 -7.24982679e-01] [-9.66695309e-01 -2.84425229e-01 1.13887303e-02 ... -8.31765592e-01 1.11661375e+00 -3.71129401e-02] ... [ 4.18215483e-01 8.34279656e-01 7.24679828e-01 ... 1.19062805e+00 4.00671870e-01 -9.17314410e-01] [ 6.19880140e-01 8.19524765e-01 -4.96143222e-01 ... -4.15166229e-01 -4.10425216e-01 1.04092097e+00] [ 1.15813002e-01 3.08923393e-01 1.74354628e-01 ... 2.59209841e-01 9.37866151e-01 1.15731403e-01]] [[-2.62590319e-01 -1.29088223e-01 2.24835873e-01 ... -1.00649309e+00 -2.34447807e-01 7.74266720e-01] [-1.05012417e+00 2.60507762e-01 1.74179394e-02 ... 2.14526907e-01 6.73128605e-01 5.69910824e-01] [ 8.65487099e-01 5.25994182e-01 1.60507560e-01 ... -5.78656912e-01 -5.49869597e-01 6.92454278e-01] ... [-3.89429867e-01 1.13117039e+00 -2.89958958e-02 ... -1.46637964e+00 -9.07212436e-01 2.66494900e-01] [-4.09589648e-01 1.83192360e+00 -6.95252419e-01 ... -4.48514849e-01 1.20394230e-01 2.67919660e-01] [-1.15515918e-01 6.44999444e-02 -4.26952749e-01 ... 8.07819843e-01 6.57852232e-01 2.62226820e-01]] ... [[-4.56440330e-01 8.64989638e-01 -1.17571843e+00 ... 5.78613877e-01 6.86369538e-01 1.00004995e+00] [-9.72795039e-02 -9.42492113e-02 3.00088584e-01 ... 2.17623934e-01 1.43160060e-01 3.25227648e-01] [ 2.06141457e-01 -8.80976975e-01 6.16448820e-01 ... 6.93483949e-01 -5.33768356e-01 2.89029568e-01] ... [-1.71262354e-01 1.41812265e-01 1.43733764e+00 ... -9.66059208e-01 -7.15277851e-01 3.96382570e-01] [ 1.29449415e+00 1.27460265e+00 -4.51097101e-01 ... -3.91600698e-01 -3.67451251e-01 -1.00566077e+00] [-9.02462229e-02 -5.15067875e-01 -1.78469849e+00 ... -2.46571571e-01 2.32295528e-01 -1.52338970e+00]] [[-6.53447807e-01 -2.01453432e-01 -4.00530457e-01 ... 2.25174740e-01 -4.14709479e-01 -6.10722899e-01] [ 1.04071653e+00 9.28347409e-01 2.97893941e-01 ... 9.47938710e-02 6.22688591e-01 1.38463533e+00] [ 1.20725644e+00 5.19579053e-01 1.34800360e-01 ... 8.66566658e-01 -1.41634369e+00 -8.50390196e-01] ... [ 6.19756244e-03 -8.27266157e-01 -1.05377293e+00 ... 7.38918483e-01 2.90803909e-01 -8.98067236e-01] [-4.02643889e-01 8.76063406e-02 -1.52711675e-01 ... -3.16909403e-01 -9.09844100e-01 -4.42540526e-01] [ 2.59437442e-01 -6.74678028e-01 -7.78844416e-01 ... -1.65865719e-01 -3.67507696e-01 6.09324425e-02]] [[ 2.72341788e-01 -8.92925739e-01 2.52073854e-01 ... -4.75784659e-01 -3.61313581e-01 -3.41584980e-01] [-5.00883162e-01 3.06690484e-01 3.42141122e-01 ... 2.91620698e-02 -3.88441980e-02 2.08243221e-01] [ 5.92118084e-01 1.56700522e-01 -5.71400762e-01 ... 4.19541836e-01 1.52263731e-01 3.60296071e-01] ... [ 1.95256948e-01 -6.54022157e-01 -2.33482242e-01 ... -9.14068818e-02 -5.20851910e-01 -4.59140301e-01] [-1.48873046e-01 -1.19226083e-01 5.15644252e-01 ... 2.61799097e-01 -2.29241326e-01 -8.98431599e-01] [ 9.11973864e-02 3.96214128e-01 1.98857501e-01 ... -1.71026498e-01 1.05249476e+00 4.82859433e-01]]] [[[-1.68990111e+00 1.43110478e+00 9.61795032e-01 ... -1.77271831e+00 6.63815618e-01 -1.14008462e+00] [-2.54000688e+00 -4.13407415e-01 2.87012553e+00 ... 1.50289834e+00 -1.77583671e+00 1.06496230e-01] [-2.33562872e-01 -1.19565141e+00 3.48146677e+00 ... 1.80920911e+00 6.58865154e-01 -3.59974170e+00] ... [ 1.70073032e+00 -5.70143044e-01 -2.72743642e-01 ... -2.61943054e+00 -1.23391652e+00 6.03682101e-01] [ 8.47778320e-01 -2.41532251e-02 -1.48876023e+00 ... 1.21219265e+00 -1.16826296e+00 -1.66450763e+00] [ 5.28966002e-02 3.61277401e-01 -1.52385449e+00 ... -2.62769055e+00 -1.65436816e+00 1.71601272e+00]] [[ 2.49337935e+00 2.40469947e-01 8.07730138e-01 ... 7.17752397e-01 -1.36238992e+00 3.59926701e-01] [ 5.08422077e-01 7.28382230e-01 3.43938923e+00 ... -3.08226681e+00 -1.35884416e+00 1.72453511e+00] [-3.52081370e+00 -3.09580278e+00 3.27513003e+00 ... 2.27648973e+00 -3.10580999e-01 -3.68670702e-01] ... [ 1.26532018e+00 1.16748190e+00 2.46894813e+00 ... 2.31344557e+00 -1.93763781e+00 -3.54765868e+00] [-1.32178080e+00 -2.09706974e+00 -6.80285394e-01 ... -1.24651873e+00 -1.94858573e-02 -2.61096430e+00] [ 9.00284171e-01 -4.67667055e+00 1.93961430e+00 ... 3.95958066e-01 2.12705684e+00 -1.00778532e+00]] [[ 1.94413650e+00 9.05550778e-01 -1.31807566e+00 ... -5.27887605e-02 -4.37600464e-01 1.23767447e+00] [ 1.92338002e+00 2.74428070e-01 8.06672573e-01 ... 1.97475028e+00 6.96836293e-01 -1.12758882e-01] [-1.09702134e+00 -2.26946115e+00 -7.49071836e-01 ... -8.63030672e-01 -1.20736337e+00 -3.34698915e+00] ... [ 1.79625762e+00 -1.87040365e+00 -1.21086383e+00 ... 3.00795102e+00 -1.09294212e+00 1.73500216e+00] [-2.49246448e-01 2.82156229e-01 6.57203257e-01 ... -1.33150315e+00 -4.89330441e-01 -1.12316024e+00] [ 7.90524960e-01 1.07792377e+00 -1.14659572e+00 ... 2.06671089e-01 -1.39250934e-01 -2.26376247e+00]] ... [[ 9.42947417e-02 1.06450510e+00 -4.04530138e-01 ... -2.51997888e-01 3.85791250e-02 -4.48634595e-01] [ 1.10712850e+00 -1.81182250e-01 -3.35188270e-01 ... 4.24826115e-01 -8.52263987e-01 -1.57890916e+00] [-7.19378963e-02 -1.80508047e-01 -6.33994222e-01 ... -8.58286560e-01 4.81262773e-01 2.63947463e+00] ... [-1.66778076e+00 -2.09725809e+00 -2.49568686e-01 ... 1.67889369e+00 8.67653787e-01 -1.09892118e+00] [ 2.16146231e-01 1.05707216e+00 -3.26781034e+00 ... 1.02990699e+00 1.70938039e+00 -1.52462125e-01] [-3.23013830e+00 6.19412899e-01 3.01792693e+00 ... 6.54243156e-02 1.36960793e+00 -2.88996291e+00]] [[ 9.38365400e-01 -5.75452745e-01 -3.82376641e-01 ... 3.35822612e-01 -2.73339748e+00 2.39373708e+00] [-1.89824891e+00 -4.47439098e+00 3.62179101e-01 ... -8.18036735e-01 -2.82770842e-01 2.56564951e+00] [-3.20873231e-01 7.11960852e-01 7.01301634e-01 ... 4.60660398e-01 -5.22325575e-01 6.44412100e-01] ... [ 1.99652719e+00 -1.83039474e+00 7.84079194e-01 ... 2.52841139e+00 1.04907739e+00 -6.67121649e-01] [ 2.43622109e-01 2.62668777e+00 -9.37988698e-01 ... -1.96439636e+00 -2.72080004e-01 -3.40730190e+00] [ 7.53012657e-01 3.56473595e-01 2.38588238e+00 ... -6.15951359e-01 5.73796809e-01 9.38216209e-01]] [[ 1.68925285e+00 1.33833206e+00 9.68118429e-01 ... -2.34674788e+00 -1.59903169e+00 1.52687597e+00] [ 1.71903718e+00 1.05079174e-01 1.12102723e+00 ... -1.29182541e+00 -4.93356764e-01 -2.34906960e+00] [ 4.93668020e-01 -3.08007032e-01 -8.72675240e-01 ... 2.39769772e-01 1.89521587e+00 2.08178446e-01] ... [ 8.31001043e-01 -2.75304466e-01 4.43927646e-01 ... 1.62535205e-01 -4.36624193e+00 1.98223984e+00] [-4.59435225e+00 5.57940416e-02 5.35483062e-02 ... -1.28280544e+00 -2.19471717e+00 3.09039545e+00] [-1.15584934e+00 3.95147473e-01 -1.65196049e+00 ... -2.19876719e+00 -1.53129816e+00 -1.17286587e+00]]] [[[-3.63469243e-01 -7.01158702e-01 -2.11317465e-01 ... -3.88607562e-01 3.60169172e-01 -3.47863138e-01] [ 5.15344143e-01 8.06965411e-01 -5.70330843e-02 ... -2.54877005e-02 -7.57926643e-01 -1.79547071e-01] [-1.02600086e+00 -1.10848403e+00 -7.07315385e-01 ... -2.56720722e-01 8.33104372e-01 8.85966241e-01] ... [ 1.27341345e-01 -1.65881708e-01 7.34706342e-01 ... 1.03458810e+00 7.99836665e-02 -7.65693009e-01] [-3.70128155e-01 1.29764274e-01 1.03096628e+00 ... -9.86290425e-02 -2.10834146e-02 1.17858326e+00] [ 8.22477937e-01 -1.00611091e+00 -3.15725314e-03 ... 6.41514361e-01 1.08889110e-01 8.32915246e-01]] [[-2.53478646e-01 -2.22842515e-01 -3.61743957e-01 ... 6.48451865e-01 1.57169509e+00 -3.04238051e-01] [-4.85322401e-02 -2.73239613e-01 -7.76227424e-03 ... -3.86595935e-01 6.06295466e-01 -1.35021225e-01] [-2.89598644e-01 -3.00799876e-01 3.34897816e-01 ... 3.23948503e-01 -2.98068941e-01 4.02260721e-01] ... [ 2.34844178e-01 7.54877985e-01 -7.54787445e-01 ... -3.27028066e-01 -2.62210757e-01 8.88721645e-02] [ 1.57149649e+00 -7.00432658e-01 6.90356970e-01 ... -1.84240103e-01 2.38545120e-01 4.10092711e-01] [-3.54029447e-01 -2.72976220e-01 3.51884961e-01 ... -5.23766756e-01 -1.37727249e+00 -3.96722466e-01]] [[ 7.18500912e-01 -1.75956741e-01 7.67128229e-01 ... 5.20691693e-01 9.22295451e-01 7.51342118e-01] [-1.38432646e+00 2.83451736e-01 -4.73894805e-01 ... -1.08844233e+00 -3.36231112e-01 7.42350042e-01] [-1.22406058e-01 -6.01930797e-01 -1.32844520e+00 ... 1.79385543e-01 1.00248301e+00 4.86554891e-01] ... [-1.31772295e-01 -3.46693546e-01 7.69061565e-01 ... 8.76222432e-01 3.60263467e-01 -2.73295343e-01] [-8.65738213e-01 -5.09901285e-01 4.80965167e-01 ... 7.76179969e-01 -5.71788430e-01 1.81368798e-01] [ 1.60027122e+00 -1.29948545e+00 -2.13707164e-01 ... 6.46600485e-01 9.08790350e-01 -1.19238511e-01]] ... [[-3.45223784e-01 1.61835289e+00 5.65601230e-01 ... 7.29154825e-01 5.63942850e-01 7.73694217e-01] [ 6.37784779e-01 7.29389489e-01 1.20685840e+00 ... 2.61684597e-01 1.19786298e+00 6.96309149e-01] [-8.24066460e-01 -6.64766312e-01 2.62581706e-01 ... -6.54510498e-01 4.03265774e-01 -5.63497543e-01] ... [-7.83393741e-01 -6.21336520e-01 -6.22307301e-01 ... -3.17998171e-01 1.11211717e+00 -2.05187783e-01] [ 5.81757665e-01 4.33534265e-01 -7.64264539e-02 ... -1.50590253e+00 -1.95503578e-01 2.93355942e-01] [-1.12170756e-01 9.81918514e-01 -6.61125124e-01 ... -1.62470043e-01 -6.36063874e-01 -3.00242066e-01]] [[-2.12243259e-01 -9.54757273e-01 1.84865177e-01 ... -1.22359860e+00 -5.20312600e-02 1.06045771e+00] [-4.18357581e-01 4.46793810e-02 -1.48265457e+00 ... -4.22424465e-01 -6.68554246e-01 -8.12820345e-02] [ 6.83442295e-01 6.44976974e-01 4.84153390e-01 ... -4.81640577e-01 -3.02820235e-01 -3.99720706e-02] ... [-4.39286262e-01 3.93578082e-01 8.48581195e-01 ... -6.45217001e-01 9.15286615e-02 7.56250262e-01] [-1.07062131e-01 -5.84223628e-01 -4.11731988e-01 ... 8.80021930e-01 -7.95432568e-01 1.22787721e-01] [ 2.65024513e-01 4.56942409e-01 -2.82260209e-01 ... 2.88564652e-01 1.07933509e+00 -9.44373105e-03]] [[-5.77286363e-01 -4.90685284e-01 8.01851928e-01 ... -7.93416023e-01 5.72640479e-01 -2.18372568e-01] [-7.36985803e-01 -1.71300039e-01 -4.83610451e-01 ... -3.40476751e-01 2.81574130e-01 4.21508700e-01] [ 8.62789333e-01 2.87625641e-01 -5.30753553e-01 ... 7.55489767e-01 8.18458498e-02 -1.15908340e-01] ... [-1.55567750e-01 4.29757178e-01 6.10326350e-01 ... -2.89704144e-01 7.08007991e-01 -9.32564080e-01] [ 2.69684672e-01 -5.71878493e-01 -1.34187788e-01 ... -6.97489858e-01 -3.04987311e-01 -6.89900696e-01] [-4.58007872e-01 -1.20985843e-01 3.53959411e-01 ... 1.24149418e+00 6.88694835e-01 1.70594499e-01]]] [[[-9.23182487e-01 1.72116423e+00 -7.52179265e-01 ... -7.68186212e-01 -2.49914455e+00 -1.47386730e+00] [-2.66546082e+00 -2.36911488e+00 1.47357059e+00 ... 1.62158525e+00 5.15336357e-02 -1.45607567e+00] [-1.14000094e+00 -1.05341530e+00 -4.09249604e-01 ... 2.10766006e+00 6.14736676e-01 1.28510132e-01] ... [ 5.69892526e-01 7.69594491e-01 -1.80277503e+00 ... -5.39558470e-01 -1.11255276e+00 8.97163689e-01] [ 1.48823667e+00 8.52515161e-01 1.20271420e+00 ... -1.29517186e+00 5.04693866e-01 -1.79085398e+00] [ 1.73741579e+00 1.46368074e+00 1.99892211e+00 ... -8.98551643e-01 1.20417130e+00 -3.53195071e-01]] [[-6.65957093e-01 -8.35342586e-01 -3.90216529e-01 ... 8.67650211e-02 1.18452382e+00 -6.68455064e-01] [ 1.11884272e+00 -1.43069434e+00 5.35560250e-01 ... 1.56655228e+00 -2.54039216e+00 3.18746543e+00] [ 1.81283605e+00 8.63294542e-01 -1.56205356e+00 ... 4.41488057e-01 -4.45231795e-01 2.32009023e-01] ... [-5.55279136e-01 4.90407981e-02 -1.11445665e+00 ... 1.24940598e+00 -3.43624187e+00 -2.54328942e+00] [-1.67995143e+00 1.05239642e+00 2.49661732e+00 ... 1.05996378e-01 -1.47219837e+00 4.34436679e-01] [-3.73792797e-01 -2.18543005e+00 -1.69294679e+00 ... -5.66180944e-01 3.86429119e+00 2.63528538e+00]] [[-1.25063762e-01 -1.10740244e+00 6.60016000e-01 ... 2.96240985e-01 -1.00838578e+00 -1.82752514e+00] [-6.69077098e-01 -4.77111459e-01 -5.02053499e-01 ... 9.70138431e-01 -1.40357420e-01 1.34230375e+00] [-7.84423232e-01 1.82599652e+00 8.48567128e-01 ... -2.47759789e-01 -9.56863523e-01 -2.97774702e-01] ... [ 2.57027119e-01 3.83412600e-01 8.62384915e-01 ... 1.41098654e+00 -2.32383895e+00 2.96111870e+00] [ 1.26013017e+00 8.70184243e-01 -2.80362868e+00 ... 1.13842225e+00 -1.56847656e+00 2.54610324e+00] [ 1.06963003e+00 -6.11232281e-01 -1.02192771e+00 ... 3.37220848e-01 4.93905805e-02 1.62670326e+00]] ... [[-1.24988818e+00 3.81893441e-02 -2.07226896e+00 ... -1.82135299e-01 1.81542844e-01 1.51788545e+00] [ 7.75063708e-02 -1.34386659e+00 -1.16102922e+00 ... 1.88227117e+00 -6.52660370e-01 8.61245215e-01] [-6.25216603e-01 2.19989753e+00 1.40046585e+00 ... 1.26007652e+00 2.28346929e-01 -2.38547683e+00] ... [-5.51947892e-01 8.79742086e-01 8.48815978e-01 ... -2.73854971e-01 1.68735564e+00 -1.01170564e+00] [ 2.59322786e+00 2.57649970e+00 -1.51186252e+00 ... 7.57162094e-01 3.70515548e-02 -1.29343867e+00] [ 2.48116112e+00 -1.00125265e+00 4.81114179e-01 ... 3.20683748e-01 5.19690633e-01 -2.10620570e+00]] [[ 1.87150627e-01 1.38199890e+00 1.32421350e+00 ... 1.58027983e+00 2.20074558e+00 -3.02766651e-01] [-1.17429292e+00 -1.13058627e+00 -5.77993155e-01 ... -6.14782035e-01 3.79060650e+00 -1.74483180e+00] [ 7.37208843e-01 -2.05492091e+00 -4.07430708e-01 ... -8.50650966e-02 8.94826591e-01 4.93432194e-01] ... [-1.17538726e+00 1.47737944e+00 -7.24931657e-01 ... -2.18371725e+00 -2.94578165e-01 -7.70890057e-01] [-1.35945940e+00 1.10863590e+00 6.89906836e-01 ... -8.24831247e-01 2.51812011e-01 3.40630531e-01] [ 5.94014406e-01 -6.37690485e-01 -3.09039444e-01 ... 2.50140262e+00 -1.49058199e+00 -1.13037288e+00]] [[-4.57866222e-01 7.34914124e-01 3.10288873e-02 ... -2.40121889e+00 9.96046290e-02 -1.58844471e+00] [ 1.16035903e+00 -3.85111243e-01 -2.20287514e+00 ... -5.31418145e-01 2.00488305e+00 -3.60972494e-01] [ 1.56818521e+00 -2.35166216e+00 1.79713383e-01 ... 1.43229842e+00 -1.17001325e-01 -1.06842649e+00] ... [ 2.06704780e-01 1.75164115e+00 1.01239584e-01 ... -1.75738502e+00 1.24194801e+00 -1.45525753e+00] [ 6.34304941e-01 8.90412450e-01 -1.31426358e+00 ... 2.36691380e+00 2.87740350e+00 -1.80777752e+00] [-3.43613833e-01 6.41082764e-01 2.06339931e+00 ... -1.65788436e+00 2.07060719e+00 -1.41840053e+00]]] [[[ 3.69240135e-01 2.08801794e+00 2.17330384e+00 ... 7.01653481e-01 1.81176627e+00 9.02473629e-01] [-5.45958471e+00 3.61327553e+00 -6.71842515e-01 ... -2.96240735e+00 9.86019790e-01 5.48964214e+00] [ 3.22351992e-01 -8.18817198e-01 3.03916240e+00 ... 2.54297304e+00 -1.42999554e+00 -6.50708866e+00] ... [ 1.96062589e+00 4.15786934e+00 -1.87195385e+00 ... 1.03508067e+00 -9.53164935e-01 2.25983992e-01] [ 4.05590117e-01 -1.43784970e-01 -1.22818959e+00 ... 5.36347198e+00 -9.55912292e-01 3.02433157e+00] [ 1.97738349e+00 -1.49824953e+00 1.41401970e+00 ... 1.47507918e+00 -1.52259088e+00 2.36518860e+00]] [[ 2.46124363e+00 3.74627084e-01 -1.23202467e+00 ... -2.42948627e+00 -3.35375500e+00 -2.70428896e+00] [ 2.11910629e+00 3.94008100e-01 2.38596058e+00 ... 5.68525672e-01 1.09947312e+00 -2.06814504e+00] [-6.07616961e-01 2.82382178e+00 2.27368927e+00 ... 4.86905766e+00 -2.50875378e+00 3.88726711e+00] ... [ 4.78375047e-01 -4.37156916e+00 -5.73556566e+00 ... -9.18690801e-01 8.00706208e-01 -1.27718234e+00] [-3.17699909e+00 -7.25147581e+00 -1.04030395e+00 ... 1.99296153e+00 2.56034160e+00 -1.25769949e+00] [-4.29014635e+00 2.14206624e+00 -1.01045871e+00 ... 6.14992619e+00 2.02919650e+00 1.59300721e+00]] [[-9.57698882e-01 -7.40137517e-01 2.24672318e+00 ... -3.01597029e-01 4.54703458e-02 -1.00279617e+00] [ 8.11061203e-01 -1.08124427e-02 -4.59611130e+00 ... -1.71062338e+00 -7.62306601e-02 -3.15734833e-01] [ 1.52136016e+00 -4.62527275e+00 -1.06303346e+00 ... -9.64933634e-01 -9.99074280e-01 1.72027743e+00] ... [-1.15801930e-01 -3.25415641e-01 4.31971264e+00 ... -2.86294556e+00 -4.64493513e+00 2.18331075e+00] [ 5.11500120e+00 -4.57297713e-01 -1.21358383e+00 ... 2.89210856e-01 -2.31332541e+00 8.84729624e-01] [ 2.33020449e+00 3.05429149e+00 1.14215910e-02 ... 6.89935625e-01 -1.89526165e+00 -2.53277969e+00]] ... [[-1.62487817e+00 -1.18208253e+00 -7.57949591e-01 ... -6.15298986e-01 4.91619873e+00 1.28020787e+00] [ 2.24827766e+00 2.09917045e+00 1.68610966e+00 ... 1.26385486e+00 2.33994022e-01 -1.45360470e+00] [ 4.61914062e+00 -3.59754753e+00 1.49898350e+00 ... 2.94785833e+00 -1.90624759e-01 2.13156390e+00] ... [-5.61374998e+00 1.53163695e+00 3.42204642e+00 ... 8.46625865e-02 2.01766944e+00 4.30886984e+00] [ 2.31964779e+00 1.02262795e+00 -2.85719204e+00 ... 1.11185551e+00 1.74649799e+00 1.76733983e+00] [-1.92116654e+00 -9.33167338e-01 -3.25128078e+00 ... -3.64680827e-01 7.88104415e-01 -1.36559343e+00]] [[-3.48066807e+00 6.47560582e-02 -1.55433357e+00 ... -2.31760383e-01 3.39464378e+00 -2.41791677e+00] [ 1.06679022e+00 1.73418090e-01 -1.85988650e-01 ... -3.15617561e+00 4.24380207e+00 -5.48217964e+00] [ 1.17160475e+00 -4.70280361e+00 3.45618546e-01 ... -1.03383899e+00 -2.54780960e+00 -4.21294022e+00] ... [-2.96382397e-01 -6.38697505e-01 2.56449008e+00 ... 4.32612836e-01 -3.58494133e-01 -3.37843442e+00] [-1.44603944e+00 1.40784070e-01 3.36845469e+00 ... 2.73523974e+00 1.71308184e+00 1.87981176e+00] [ 2.24704313e+00 -1.56750262e-01 1.26564384e+00 ... -1.55193889e+00 -2.55293441e+00 -2.22048998e+00]] [[-2.68632978e-01 -2.14016271e+00 -3.61411643e+00 ... 9.45272446e-01 -3.46974993e+00 5.67399383e-01] [ 1.71745348e+00 -2.68491793e+00 -3.03849173e+00 ... 2.85666013e+00 -1.16457760e+00 -1.65162098e+00] [ 2.60637188e+00 -6.46484280e+00 3.26077104e+00 ... -1.12009680e+00 4.43503475e+00 2.90797353e+00] ... [-1.41818655e+00 3.27914071e+00 -2.87140346e+00 ... 5.35479248e-01 3.68901753e+00 -2.33137035e+00] [ 2.75952315e+00 1.26370788e+00 8.50404873e-02 ... -1.11691904e+00 -4.50101703e-01 5.59058964e-01] [-3.59426558e-01 -7.14070082e-01 -2.81536651e+00 ... -6.47919357e-01 -1.83432698e+00 1.03762794e+00]]] [[[-3.18322986e-01 -1.93721838e-02 3.48951668e-01 ... 9.66002569e-02 2.95488656e-01 -3.65918010e-01] [-1.45123556e-01 -1.12774074e-01 2.33010709e-01 ... 1.93311185e-01 1.28215998e-01 4.51739907e-01] [-3.30150753e-01 5.52919656e-02 -5.26338853e-02 ... -5.33588707e-01 3.23113590e-01 2.31881797e-01] ... [ 2.83856504e-02 -2.62580037e-01 2.22334981e-01 ... 2.19208165e-03 -4.81608570e-01 2.44721159e-01] [-2.86661178e-01 -5.67442298e-01 3.75259221e-01 ... 1.86864406e-01 3.33402865e-02 -7.86713243e-01] [-6.68790281e-01 -7.21275985e-01 -9.22884196e-02 ... -7.11351782e-02 4.68341231e-01 -1.43910378e-01]] [[-1.56282291e-01 -2.01497246e-02 1.86634839e-01 ... -2.84212708e-01 -7.51312673e-02 -2.19695419e-02] [-4.59684521e-01 3.43783259e-01 1.29523128e-01 ... 9.43559408e-02 3.83250594e-01 2.24222943e-01] [-9.74872112e-02 -2.27945283e-01 -1.29428461e-01 ... -3.23623717e-01 9.45697725e-02 2.98251837e-01] ... [ 1.89201683e-01 -2.80640036e-01 3.48935425e-02 ... 2.46053990e-02 4.61429246e-02 2.08767548e-01] [ 7.29945600e-02 4.85477656e-01 -7.59367049e-02 ... 1.59869373e-01 1.57357931e-01 -6.32860243e-01] [-7.01999187e-01 -1.05868101e-01 9.89884585e-02 ... 1.56912822e-02 -7.21174330e-02 -1.02327085e+00]] [[ 3.63725811e-01 3.81276518e-01 4.92673542e-04 ... -1.52775690e-01 -2.27761477e-01 4.89097625e-01] [ 2.13324323e-01 -6.49461448e-01 2.38544777e-01 ... 4.73929085e-02 5.98621130e-01 2.80831814e-01] [ 8.98037106e-02 2.51694828e-01 -5.18378437e-01 ... 6.16042614e-01 -2.09562838e-01 2.43084371e-01] ... [-3.79357100e-01 4.89932477e-01 7.32910037e-01 ... 2.96876896e-02 2.31590077e-01 -4.84968275e-01] [-4.29460794e-01 3.07062477e-01 -5.30905366e-01 ... 1.32439122e-01 -3.47659051e-01 1.42369837e-01] [-5.07579148e-01 -9.31575149e-02 -2.76484847e-01 ... 9.76017937e-02 9.95565951e-02 -3.74769419e-01]] ... [[-2.71768510e-01 -1.11547537e-01 1.27183139e-01 ... 4.17668670e-01 -9.19574723e-02 -8.01725760e-02] [-6.67190909e-01 -3.18121731e-01 3.31342459e-01 ... -5.71689308e-01 -3.23281705e-01 3.66870284e-01] [-9.57463309e-02 2.80729920e-01 3.41709889e-02 ... -3.45983148e-01 3.22352134e-04 4.71711487e-01] ... [-3.64685357e-01 6.77928627e-02 2.58701652e-01 ... 3.35499585e-01 5.25720239e-01 -1.83929518e-01] [-9.92298305e-01 1.79894432e-01 5.01136422e-01 ... 1.91252127e-01 -3.05976361e-01 -4.80382562e-01] [-3.99685264e-01 -3.17084223e-01 5.57103813e-01 ... 5.17965257e-02 -3.52949053e-02 1.51943967e-01]] [[ 5.80848217e-01 4.14205760e-01 1.40059786e-02 ... -2.01990843e-01 2.06141442e-01 2.71010935e-01] [-2.70394057e-01 9.74480361e-02 -3.31778936e-02 ... -5.55509269e-01 1.60081536e-01 2.78478060e-02] [-2.25711569e-01 -1.15448117e-01 -8.74338597e-02 ... -2.30554491e-01 1.26518562e-01 1.50797561e-01] ... [ 4.35504258e-01 -2.01809138e-01 -4.95096557e-02 ... 1.96978435e-01 -3.52417648e-01 2.69980133e-01] [ 4.96004641e-01 -3.03932160e-01 3.94142121e-01 ... -7.65049279e-01 5.71191162e-02 -4.11591023e-01] [-5.07351495e-02 6.05962165e-02 1.02111198e-01 ... -2.11838916e-01 -4.24746662e-01 -5.92500746e-01]] [[ 2.40966175e-02 2.06903592e-01 -3.35209906e-01 ... 1.21416003e-01 -1.71542853e-01 -4.36584324e-01] [-1.73903793e-01 -8.91458765e-02 2.08117455e-01 ... -2.89607346e-01 -2.17383844e-03 -2.25938395e-01] [ 3.86752814e-01 6.30266786e-01 7.77282059e-01 ... 2.45671526e-01 6.91637635e-01 -4.49876845e-01] ... [ 2.67985649e-02 -4.52820241e-01 -3.44669729e-01 ... 6.12860285e-02 -4.82637525e-01 -4.09353405e-01] [-3.06718558e-01 1.57600008e-02 -1.81418419e-01 ... -6.80349171e-02 2.23410740e-01 -3.35393429e-01] [ 2.22815990e-01 2.70455688e-01 6.13577887e-02 ... 2.58671969e-01 4.99448866e-01 -1.45358637e-01]]]] ... [[[[ 5.26263893e-01 1.96897879e-01 -1.60088584e-01 ... -1.23706702e-02 1.02975297e+00 -1.02667785e+00] [-1.10870481e+00 1.64758337e+00 -9.74121630e-01 ... -9.98114407e-01 -5.65448225e-01 -5.52748978e-01] [-2.00113103e-01 2.23389715e-01 -6.98465407e-01 ... 1.98672235e-01 -5.94560325e-01 4.97774303e-01] ... [-1.58435360e-01 -8.39957356e-01 -3.31078708e-01 ... -7.34036565e-01 -4.66707766e-01 1.17039800e-01] [-6.61132693e-01 -2.75790185e-01 4.41003472e-01 ... -1.66364968e-01 -2.89349020e-01 6.60167396e-01] [ 4.24499243e-01 -1.10550141e+00 3.84727269e-01 ... -6.78256035e-01 -1.72034562e-01 -5.65932654e-02]] [[ 3.48237813e-01 -6.69374466e-01 4.48396206e-01 ... 2.15241034e-02 2.16851428e-01 -1.97110027e-02] [-1.77353716e+00 -9.82365727e-01 -2.83147488e-02 ... 3.44777018e-01 8.94981802e-01 -5.60882509e-01] [ 3.56912166e-01 -6.38026834e-01 1.10309649e+00 ... -9.02168155e-01 -3.34486246e-01 1.02083373e+00] ... [-5.61750419e-02 1.23481584e+00 -2.24983364e-01 ... -6.20193958e-01 2.76183724e-01 7.00542629e-01] [ 9.58100855e-02 -1.55835733e-01 1.17179358e+00 ... 8.09981346e-01 1.44332483e-01 8.44835699e-01] [ 1.32568860e+00 -2.66030997e-01 2.96603292e-01 ... -7.92952955e-01 3.56479764e-01 -5.02340615e-01]] [[ 2.36416861e-01 5.14563620e-01 -2.40595281e-01 ... 6.24634884e-03 -7.93531060e-01 1.55014068e-01] [-3.66015613e-01 -5.37824333e-01 1.55773878e-01 ... 2.79950082e-01 -1.82194009e-01 3.86895567e-01] [ 4.56364125e-01 3.52263808e-01 8.56955886e-01 ... -1.82314932e-01 6.22658134e-01 -3.60506892e-01] ... [-1.12077212e+00 -6.03190958e-01 -8.11185181e-01 ... -2.98899949e-01 1.07592214e-02 2.45673925e-01] [-3.52803916e-01 -7.50081092e-02 -8.06435287e-01 ... 6.65765584e-01 -3.39848846e-01 3.17369342e-01] [ 4.45130855e-01 7.01396763e-01 -4.74125415e-01 ... 4.01775151e-01 -3.79313856e-01 -4.13112998e-01]] ... [[-1.66486688e-02 1.49566770e-01 -6.37439966e-01 ... -5.41523516e-01 -1.27520859e-01 -5.17703354e-01] [ 4.62915510e-01 5.92475832e-02 1.80738902e+00 ... -2.62413412e-01 7.07960963e-01 -1.43405902e+00] [-2.38559142e-01 -2.49328300e-01 -8.85797977e-01 ... 5.94039083e-01 -7.08126724e-02 7.36141384e-01] ... [-2.72857547e-01 1.38071254e-01 -1.77680314e-01 ... -3.26040447e-01 3.52403522e-01 5.68884790e-01] [ 7.44302273e-02 1.34433913e+00 7.96019018e-01 ... -8.17438006e-01 -1.27370453e+00 9.63750109e-02] [ 1.42955387e+00 -7.74899542e-01 5.18194973e-01 ... -2.78110623e-01 3.20281625e-01 -2.91419059e-01]] [[-1.14000702e+00 9.14829135e-01 -2.62034237e-01 ... 1.49778402e+00 -1.38038099e+00 9.39415812e-01] [-1.62297297e+00 -1.90294102e-01 -1.58628738e+00 ... -6.74031198e-01 3.71061951e-01 -6.56790257e-01] [-1.15594780e+00 -1.15744837e-01 5.44256032e-01 ... -1.42311215e+00 -2.00305507e-01 8.96664858e-01] ... [ 3.28364298e-02 -3.73703003e-01 3.17531317e-01 ... -7.44602859e-01 1.10164952e+00 -8.91056359e-01] [-5.28550029e-01 -4.68084216e-01 7.61397541e-01 ... 3.08225572e-01 7.04746068e-01 9.90837887e-02] [ 7.59454072e-02 8.22527334e-02 -1.64553314e-01 ... -7.04146922e-01 -1.12044215e-01 -1.35317278e+00]] [[-1.05795121e+00 -5.98126233e-01 -4.47704494e-01 ... -3.85535002e-01 -2.00969887e+00 -1.40886629e+00] [ 5.81804931e-01 1.26622751e-01 1.81599140e-01 ... -1.63785201e-02 -5.02824128e-01 4.23311174e-01] [-3.56512442e-02 1.14175570e+00 -2.62716442e-01 ... -2.44658619e-01 -9.58535373e-01 -2.39014123e-02] ... [-1.15673602e+00 -9.05703604e-02 -9.37307417e-01 ... 4.71316308e-01 -3.73815119e-01 -4.59101707e-01] [ 7.69328415e-01 1.20392561e+00 3.08241099e-01 ... 4.41001922e-01 -8.69104415e-02 5.98207176e-01] [ 3.47453475e-01 4.88417536e-01 1.05239391e+00 ... -1.99106559e-01 -2.22537786e-01 -1.62692890e-01]]] [[[-1.12812889e+00 9.77403879e-01 5.25137305e-01 ... -2.43949366e+00 -3.99683118e+00 1.56968105e+00] [-1.47825167e-01 1.92139268e-01 -1.31408453e+00 ... 1.59223688e+00 -4.81128544e-01 1.21466136e+00] [-2.69702530e+00 2.00210643e+00 3.08623409e+00 ... 2.36763096e+00 3.14440936e-01 5.44745684e-01] ... [-3.33386749e-01 -2.46973801e+00 9.59379300e-02 ... 1.31089556e+00 -2.47581601e+00 7.29398429e-01] [ 1.01367486e+00 1.11623240e+00 -1.69515789e+00 ... 1.02376556e+00 -7.28700459e-01 2.17967415e+00] [-4.12067080e+00 -4.15958166e-01 1.33211100e+00 ... 3.15711665e+00 2.53131151e+00 1.12337124e+00]] [[-2.06734657e+00 3.67254853e-01 3.82203245e+00 ... 1.09554982e+00 -1.01046932e+00 -1.19623673e+00] [ 1.96306252e+00 -6.94212615e-01 2.44714475e+00 ... -6.05833530e-01 -1.93316221e+00 9.26791728e-01] [ 4.72256422e-01 -1.11480236e+00 -3.84755433e-01 ... 7.85232067e-01 1.07979429e+00 -1.37636936e+00] ... [-1.45189047e-01 1.88680455e-01 -1.09869100e-01 ... 1.07224393e+00 2.69270611e+00 1.75082195e+00] [ 2.97287911e-01 -8.58963072e-01 1.65858340e+00 ... -1.59338057e+00 -1.67330265e-01 -7.61295557e+00] [-1.51659691e+00 -8.05749178e-01 -8.79064381e-01 ... 1.12298489e+00 1.87308884e+00 5.51654220e-01]] [[ 2.48881936e+00 -7.09071040e-01 -2.31515813e+00 ... 2.56421447e+00 -7.35325694e-01 -2.31975183e-01] [ 3.25879097e+00 3.10108364e-01 3.29570317e+00 ... -2.28824115e+00 1.56576538e+00 -6.69592083e-01] [ 1.13026369e+00 -1.36449218e+00 -1.24289012e+00 ... -7.35077083e-01 2.08659172e+00 2.21238184e+00] ... [-9.83548105e-01 -2.54422331e+00 -2.64493138e-01 ... 3.37535310e+00 2.61138022e-01 -5.01451433e-01] [-2.52918649e+00 -1.97858644e+00 -6.20110691e-01 ... -3.17846656e+00 5.65341115e-01 2.23459631e-01] [ 5.19480348e-01 -1.81466150e+00 2.04831934e+00 ... 2.21507564e-01 -1.73483360e+00 5.98492861e-01]] ... [[ 1.66035935e-01 -3.15410048e-01 -4.74363923e-01 ... 1.48177433e+00 1.13585591e+00 -7.40551293e-01] [ 7.84206033e-01 2.30272031e+00 2.44817805e+00 ... -8.57148886e-01 -1.92352128e+00 1.28738856e+00] [-2.81043386e+00 6.02867484e-01 1.30167294e+00 ... 2.25104794e-01 4.91847545e-01 -3.20118046e+00] ... [-1.70185018e+00 2.37114024e+00 -1.83597088e+00 ... 1.45200777e+00 1.38081813e+00 -5.68747759e+00] [ 1.57299459e+00 -2.30056334e+00 1.07253361e+00 ... 1.46689045e+00 -1.89900053e+00 -3.17604709e+00] [ 2.21024489e+00 2.23265767e+00 1.06359589e+00 ... 6.46622479e-01 5.83421528e-01 1.25931048e+00]] [[ 1.50959456e+00 -3.78473020e+00 -2.35799551e+00 ... 1.09315431e+00 -5.34190476e-01 -5.61697006e-01] [ 1.50023746e+00 -1.13715374e+00 -3.84403086e+00 ... 1.23184454e+00 6.19959831e-01 2.73855400e+00] [ 2.17307043e+00 1.05137718e+00 3.17488521e-01 ... -1.24947977e+00 -2.75458717e+00 -1.16256094e+00] ... [-9.98913050e-01 -1.82478571e+00 4.78408724e-01 ... -4.70425427e-01 -5.26536047e-01 -9.47061479e-01] [-4.68124419e-01 3.26001334e+00 1.73124135e+00 ... 6.08131826e-01 2.68747139e+00 1.28735042e+00] [-2.61006951e+00 2.07639003e+00 1.72323334e+00 ... 4.05650473e+00 6.34152889e-01 -1.04230773e+00]] [[-1.40424562e+00 -1.66464078e+00 2.06474853e+00 ... -3.92908037e-01 1.35563135e+00 3.34747338e+00] [ 4.46825206e-01 -7.06275046e-01 -1.40126276e+00 ... -2.35914111e-01 6.38963819e-01 1.54599094e+00] [ 2.23506141e+00 3.38519990e-01 -1.23463726e+00 ... 1.50089264e+00 -1.09338880e+00 -3.94743323e-01] ... [ 1.61568272e+00 -6.12960517e-01 -3.09519434e+00 ... -2.18881533e-01 3.46489143e+00 -1.95823640e-01] [ 1.08891344e+00 7.12017179e-01 5.82918227e-01 ... -1.85369265e+00 2.28777432e+00 1.53098488e+00] [-2.14876580e+00 -2.63719726e+00 -4.21170473e-01 ... 1.88263261e+00 -9.52865541e-01 -2.28183413e+00]]] [[[ 3.81340027e-01 -1.70010462e-01 -7.67178714e-01 ... 4.49001014e-01 -4.47080657e-02 -8.05019081e-01] [-1.06818840e-01 6.87860791e-03 -3.65744084e-01 ... 4.67583358e-01 6.66847706e-01 -9.37929749e-03] [ 2.81116843e-01 -7.99472988e-01 3.70533854e-01 ... -8.79834235e-01 2.24862337e-01 5.54323256e-01] ... [ 1.81102961e-01 -2.49919459e-01 -6.28014624e-01 ... -9.02279466e-02 -1.54205933e-01 -4.00615297e-02] [ 3.14203531e-01 -6.50086030e-02 5.92179298e-01 ... 2.73465309e-02 2.87506524e-02 1.10891312e-01] [-1.93220824e-01 -6.60211921e-01 -1.70932308e-01 ... 4.86094385e-01 -3.02647892e-03 8.59670117e-02]] [[-2.30723560e-01 -1.89169037e+00 3.09754372e-01 ... 1.09314466e+00 -2.29470551e-01 -7.84423828e-01] [-9.39347208e-01 2.23398700e-01 -2.25046173e-01 ... 4.09875065e-01 -6.85242236e-01 -1.02066028e+00] [ 1.09051447e-02 2.69525647e-01 9.19248819e-01 ... -1.10570118e-02 -5.30950427e-01 -1.41500568e+00] ... [ 8.98673773e-01 5.48180938e-01 -3.23656499e-01 ... -9.84468341e-01 1.35745871e+00 4.36668247e-01] [-3.38378549e-02 -3.37158769e-01 -1.92575365e-01 ... 4.87955809e-01 1.50220215e+00 1.11390911e-01] [ 5.44113159e-01 6.47518575e-01 -6.64994180e-01 ... -3.43013942e-01 -2.82787122e-02 -6.40039921e-01]] [[ 7.16144800e-01 8.05451810e-01 1.09760773e+00 ... 7.64625967e-01 -4.80142593e-01 -7.47391224e-01] [ 5.37006259e-01 1.61505550e-01 1.29477155e+00 ... -2.92479217e-01 1.11851048e+00 8.19465101e-01] [-1.09437025e+00 2.11700320e-01 2.22599395e-02 ... -7.67765760e-01 -1.27922308e+00 1.72132209e-01] ... [-1.09810245e+00 1.33431545e-02 -9.17066753e-01 ... 7.78686821e-01 -7.25745440e-01 7.49145523e-02] [-1.17106223e+00 4.79769826e-01 -1.37757897e+00 ... -5.36584258e-01 1.02145091e-01 1.32482803e+00] [-8.20504904e-01 3.55180651e-01 3.25771928e-01 ... 1.75782844e-01 -8.53984714e-01 8.91763270e-01]] ... [[-1.63348362e-01 -6.82318270e-01 -9.67794716e-01 ... -6.71518922e-01 -2.75536835e-01 4.10515338e-01] [ 9.70535934e-01 1.74037898e+00 -6.76147521e-01 ... -7.06933737e-02 9.77456927e-01 7.24865496e-01] [-2.30992824e-01 -1.19250149e-01 2.85992891e-01 ... -6.60837889e-01 7.93243408e-01 7.88096666e-01] ... [ 3.75189513e-01 5.12174368e-01 -2.27408215e-01 ... -1.27522480e+00 -3.60336900e-01 -5.63247204e-01] [-2.09341556e-01 2.65262306e-01 1.20836639e+00 ... 6.89400971e-01 -4.97827791e-02 -1.34610605e+00] [-7.01448798e-01 1.32199943e+00 -2.89957732e-01 ... -9.39534843e-01 -2.71693945e-01 -1.42619860e+00]] [[ 1.48744062e-01 1.26119733e-01 -6.91864789e-01 ... -6.21791303e-01 -1.22703552e-01 -4.55558807e-01] [-1.87655408e-02 -6.81903720e-01 -6.84591293e-01 ... -1.92103013e-01 3.32923979e-01 1.26309741e+00] [ 2.36997902e-01 -3.21371526e-01 1.04070258e+00 ... -8.84328067e-01 -1.51702255e-01 -2.01641232e-01] ... [ 1.84396756e+00 -7.41460800e-01 -8.41871381e-01 ... 4.28883344e-01 -1.59239277e-01 1.24224186e+00] [-6.60768688e-01 -9.56544936e-01 3.55473943e-02 ... -4.57851768e-01 2.31837139e-01 -6.04722917e-01] [-1.99761555e-01 7.27130115e-01 -6.37858331e-01 ... 8.50320578e-01 3.13666701e-01 1.22597051e+00]] [[ 7.86759377e-01 3.72349292e-01 8.28178525e-01 ... -2.34382804e-02 7.86300659e-01 3.09274137e-01] [-2.79378116e-01 3.97378623e-01 -1.57171175e-01 ... 7.97630474e-02 -1.08565176e+00 3.43244642e-01] [ 1.62853628e-01 -8.73047352e-01 -1.36147916e+00 ... 5.45143962e-01 -4.64882553e-02 -1.29279733e+00] ... [-1.26864719e+00 -8.60037267e-01 -9.40818310e-01 ... -6.14743292e-01 1.00375175e+00 -2.12496161e-01] [ 4.81130593e-02 -4.69213016e-02 1.11807418e+00 ... 1.95082158e-01 1.43049693e+00 4.14768636e-01] [ 8.01494777e-01 -3.84236038e-01 -1.67326018e-01 ... -7.28156626e-01 2.07608491e-01 6.60965919e-01]]] [[[-2.44067788e+00 -5.72573543e-01 1.80041611e-01 ... 1.26546764e+00 2.84205461e+00 -1.02236116e+00] [ 9.85740244e-01 -1.05150473e+00 -2.10190725e+00 ... -3.86948967e+00 -6.34168863e-01 -1.87167466e+00] [-4.60722409e-02 1.27357042e+00 1.19230640e+00 ... 2.31299901e+00 2.50661403e-01 7.37161458e-01] ... [ 1.72027099e+00 7.77815104e-01 -1.41251907e-01 ... 6.37981474e-01 4.87702656e+00 -9.70224082e-01] [-5.27526736e-01 7.12047517e-01 2.17643142e+00 ... 1.84927821e-01 1.26131773e+00 1.18595409e+00] [-3.80971551e-01 -7.16575742e-01 -3.41943920e-01 ... 6.15343630e-01 -9.22217891e-02 -2.11181378e+00]] [[ 1.29156625e+00 7.94228017e-01 -2.76801133e+00 ... 7.32422292e-01 6.46554053e-01 -3.74765754e-01] [-3.67761517e+00 -5.37532568e-01 3.42782468e-01 ... 2.05189204e+00 -5.83326399e-01 4.36064482e-01] [ 1.49918944e-01 -2.64960051e-01 -9.13550079e-01 ... -9.22275066e-01 -5.39787889e-01 4.20341349e+00] ... [-2.30035925e+00 2.15846610e+00 -5.30744255e-01 ... -1.27526355e+00 6.22792065e-01 2.20406342e+00] [ 1.09882712e+00 -4.66811687e-01 7.61673987e-01 ... -2.06942010e+00 -1.62827671e-01 -8.40685070e-01] [-2.27777672e+00 -1.58253729e+00 2.06689858e+00 ... 1.96826732e+00 2.45385385e+00 -2.17330098e+00]] [[-5.67465246e-01 -1.66641369e-01 -1.55776953e-02 ... -1.10600674e+00 -7.37441555e-02 -1.15201938e+00] [ 6.78897798e-02 1.06157458e+00 -2.47601843e+00 ... 3.25131083e+00 9.84496117e-01 4.29229528e-01] [-3.37007380e+00 -8.37439060e-01 -1.89283803e-01 ... 4.82399970e-01 1.31936729e+00 -1.51010370e+00] ... [-6.18390024e-01 4.71331656e-01 -4.24321860e-01 ... 2.31305337e+00 -2.19274473e+00 3.93357724e-02] [ 3.75871211e-01 -5.32573879e-01 1.76053894e+00 ... -1.11245513e+00 1.58598155e-01 1.25184333e+00] [-8.88774469e-02 1.26605117e+00 -9.80180055e-02 ... 3.05991024e-01 9.15528536e-02 3.77607870e+00]] ... [[-2.43011460e-01 2.40331907e-02 -1.85400504e-03 ... 1.82720578e+00 -2.09277081e+00 -2.48079729e+00] [ 3.72813672e-01 2.30193630e-01 5.74989557e-01 ... -2.12243629e+00 -1.87584817e+00 8.93304527e-01] [-7.64086097e-02 -3.88878763e-01 6.67136848e-01 ... 2.41857886e-01 -1.19485295e+00 7.78295815e-01] ... [-1.09653902e+00 -2.80811965e-01 1.39491153e+00 ... 1.64795804e+00 1.80804718e+00 2.65778732e+00] [-1.43128598e+00 -1.61286211e+00 8.47513616e-01 ... 1.80577147e+00 -2.96218902e-01 -2.25836232e-01] [ 1.69150424e+00 1.13714091e-01 -2.25996995e+00 ... -2.85643876e-01 3.45666289e-01 7.86929429e-01]] [[-1.94753945e+00 4.63656843e-01 -5.28344393e-01 ... 3.80519368e-02 4.83269989e-01 -1.99393249e+00] [-1.42459011e+00 -9.22461748e-01 -1.29738081e+00 ... 1.15525448e+00 -1.99651495e-01 -6.51340306e-01] [-1.45762396e+00 1.44751012e+00 1.73223865e+00 ... -1.10135937e+00 -5.44121742e-01 5.59990406e-01] ... [-2.37780660e-01 4.48315650e-01 3.34201187e-01 ... -2.44346619e-01 7.75524318e-01 9.95213911e-02] [ 9.29157615e-01 8.54230642e-01 2.34049201e+00 ... -8.30527663e-01 -2.50170231e+00 -1.84307420e+00] [ 1.10258329e+00 -6.95095301e-01 1.77176797e+00 ... -5.96828103e-01 6.17481805e-02 -8.36542428e-01]] [[-7.28697851e-02 6.25781775e-01 1.24869502e+00 ... -1.13975143e+00 1.67530012e+00 -1.80304241e+00] [ 1.63055897e-01 6.94117427e-01 1.28786087e-01 ... 1.29056048e+00 -1.06820941e+00 1.45441854e+00] [ 1.42860934e-01 -1.67987549e+00 -9.38898742e-01 ... 7.68469870e-01 1.19719356e-01 5.49688458e-01] ... [-5.99710010e-02 9.33239222e-01 -9.09599662e-01 ... -1.05108269e-01 -2.88211871e-02 2.39838839e-01] [ 1.03628862e+00 6.48976713e-02 1.70285106e+00 ... 1.35164607e+00 -3.51173830e+00 8.57088745e-01] [ 1.04900968e+00 -3.34199667e+00 -2.83839369e+00 ... 1.58226520e-01 3.59957647e+00 3.41322994e+00]]] [[[-6.40507221e-01 1.98484802e+00 -1.53670394e+00 ... 3.56318235e-01 -1.92671657e+00 2.37114191e+00] [-2.73961854e+00 1.34786093e+00 -2.40807104e+00 ... 3.06246138e+00 1.20359492e+00 3.53490496e+00] [-1.46095562e+00 4.96551841e-02 -1.63020945e+00 ... -2.48222423e+00 1.33999062e+00 4.87796068e+00] ... [ 1.11693360e-01 -1.40536916e+00 3.49294853e+00 ... -1.11315823e+00 3.13211894e+00 2.70982027e+00] [ 2.94275188e+00 1.95141077e+00 -1.54900515e+00 ... 1.94010377e+00 1.21124446e+00 2.45999479e+00] [-2.48333549e+00 3.96617150e+00 -7.83717012e+00 ... 2.73840737e+00 -4.35552180e-01 1.11638987e+00]] [[ 1.62395024e+00 1.51637316e-01 4.18240398e-01 ... 1.46682644e+00 -5.17139959e+00 2.75784116e-02] [-2.87529826e+00 -1.15803897e+00 3.26022536e-01 ... 5.13002157e+00 2.63445592e+00 -2.53838015e+00] [-2.75866961e+00 -1.45001197e+00 9.17406261e-01 ... 1.94439507e+00 -1.40636957e+00 2.19243288e+00] ... [ 4.76268482e+00 -3.83379221e-01 6.92459196e-02 ... -1.05744088e+00 6.80988491e-01 -1.85729891e-01] [ 7.91553795e-01 5.94624233e+00 9.43979025e-01 ... -2.04141068e+00 6.30226851e+00 -1.88832387e-01] [-1.86094034e+00 3.08166337e+00 1.01107562e+00 ... 5.05915701e-01 1.21867526e+00 -7.72254944e+00]] [[ 5.95311999e-01 4.56420720e-01 -4.90156710e-02 ... -7.91848123e-01 1.21694684e+00 4.18611956e+00] [ 6.36214435e-01 -1.05700243e+00 3.08015418e+00 ... -1.27580047e+00 1.56550980e+00 -1.90240085e+00] [ 8.98216903e-01 2.17117572e+00 2.63835740e+00 ... 1.01970041e+00 -2.00904369e+00 1.09490788e+00] ... [-5.52579463e-01 5.00978565e+00 -6.30314291e-01 ... 5.28674275e-02 -1.02382886e+00 1.05978712e-01] [-1.97925198e+00 -3.16571498e+00 -1.46937060e+00 ... 1.00647044e+00 -7.79640377e-01 2.03788137e+00] [-5.67796707e-01 -1.25284541e+00 -4.18664742e+00 ... 2.39621472e+00 1.41921079e+00 -4.91938782e+00]] ... [[ 3.17466408e-01 -2.58210611e+00 -1.97219515e+00 ... 4.19058895e+00 -1.78989792e+00 -2.69582605e+00] [ 3.05093193e+00 9.14763808e-01 3.24186110e+00 ... -9.43423450e-01 3.01016003e-01 1.36620295e+00] [ 1.56009758e+00 6.90132201e-01 -2.06866479e+00 ... 4.37851667e+00 -2.09465432e+00 -1.84517729e+00] ... [-1.81928301e+00 -9.43133891e-01 -1.17457008e+00 ... 1.60254335e+00 -5.35255909e+00 -1.95279694e+00] [ 9.53166544e-01 3.58453107e+00 -1.38037235e-01 ... -2.57441759e+00 2.36608052e+00 5.99132538e-01] [-3.86979175e+00 -2.66771388e+00 -3.54923272e+00 ... -2.54042244e+00 -2.95575380e+00 2.37675858e+00]] [[ 3.12901664e+00 -1.03960145e+00 -1.49659216e+00 ... -3.01600528e+00 2.63781595e+00 -4.64909124e+00] [ 1.40339005e+00 -1.86974502e+00 9.46584404e-01 ... 5.14730024e+00 -3.37761664e+00 -2.69524407e+00] [ 4.63957024e+00 -1.83952212e+00 3.91219974e-01 ... 8.08831394e-01 1.32521713e+00 3.81365085e+00] ... [ 1.00155449e+00 -8.20628166e-01 4.67307615e+00 ... 1.23293233e+00 -5.35220325e-01 9.38638985e-01] [-6.23152733e-01 3.22954178e+00 -1.11465681e+00 ... 9.57914829e-01 3.26262927e+00 -2.95411992e+00] [-1.79520655e+00 2.11449003e+00 6.21100092e+00 ... -2.36168766e+00 6.98314667e-01 2.94307876e+00]] [[ 1.61444411e-01 1.04748905e+00 -4.53461313e+00 ... -5.52626073e-01 -5.64122772e+00 -4.54103899e+00] [ 5.32719076e-01 -3.22114635e+00 -1.50677633e+00 ... 1.43060565e+00 -8.32787812e-01 2.14687419e+00] [-5.74633121e-01 -2.96545935e+00 -1.26183522e+00 ... -3.05279446e+00 -2.60362816e+00 -1.47541106e+00] ... [ 9.25493062e-01 -2.20478797e+00 -1.47848308e+00 ... -9.21097457e-01 -9.67174888e-01 5.31304550e+00] [-1.13554096e+00 -2.23986173e+00 2.93712825e-01 ... 2.55772978e-01 9.56033587e-01 -2.97944546e+00] [ 6.18695319e-01 1.79989636e+00 4.53477287e+00 ... -3.47139329e-01 1.03645146e+00 -7.98902512e-01]]] [[[ 3.64383347e-02 -9.38137025e-02 -3.50047126e-02 ... 1.52208477e-01 -6.08374715e-01 -1.66796729e-01] [-4.45551425e-01 -2.72112608e-01 -2.16829136e-01 ... -2.59835392e-01 -3.65382552e-01 6.57196343e-02] [ 1.11376680e-01 4.55096662e-01 -1.12417474e-01 ... 8.46300572e-02 -1.41330823e-01 -2.80273724e-02] ... [-9.82637033e-02 -3.54261607e-01 4.27941322e-01 ... -7.34349107e-03 1.68705568e-01 -1.30430594e-01] [ 1.19220220e-01 -4.56557065e-01 2.19902113e-01 ... -2.01863915e-01 3.59503716e-01 2.92900562e-01] [-5.46169840e-02 2.61940151e-01 4.49893214e-02 ... -4.69321668e-01 -6.90746307e-01 1.66533589e-01]] [[ 1.14679851e-01 -5.20014949e-03 -9.48388800e-02 ... 1.58277620e-02 -3.17689717e-01 -5.10111153e-02] [ 3.31550509e-01 4.86116290e-01 -2.27384254e-01 ... -7.86641315e-02 -3.52550477e-01 -2.27082804e-01] [-4.02607679e-01 5.42767579e-03 1.63430855e-01 ... -6.97896816e-03 -4.50190902e-01 -1.24739736e-01] ... [-5.10002911e-01 5.66686392e-01 3.79423089e-02 ... 4.18327488e-02 -2.68899053e-01 -1.48910031e-01] [-1.89964816e-01 1.27705082e-01 6.02807663e-02 ... -1.19494595e-01 -1.26323834e-01 -6.62516356e-01] [-2.05791630e-02 3.68763655e-01 -3.32070664e-02 ... 3.90487194e-01 3.20077866e-01 -2.77185231e-01]] [[-5.23580372e-01 -4.86867070e-01 4.20714207e-02 ... 5.93248546e-01 5.27878031e-02 5.46025150e-02] [ 9.33829248e-02 4.20603007e-01 6.24799132e-02 ... -1.98855698e-02 1.85803503e-01 2.98716158e-01] [-1.79127708e-01 7.95278966e-01 -1.90113798e-01 ... 7.80385256e-01 7.91636258e-02 -3.95285517e-01] ... [ 3.84822458e-01 1.45186037e-01 3.11208636e-01 ... -1.10925168e-01 3.30098160e-03 8.18026531e-03] [ 2.44408637e-01 -2.66869873e-01 2.40543455e-01 ... -3.04585963e-01 2.88566709e-01 2.65721660e-02] [ 5.75044990e-01 -5.18985868e-01 1.18851580e-01 ... -2.74124146e-01 2.33775556e-01 -3.19799155e-01]] ... [[-3.67459565e-01 5.47630548e-01 5.87597966e-01 ... -1.89608276e-01 -3.01697522e-01 2.80875355e-01] [-1.55265808e-01 2.30767634e-02 -6.81199431e-01 ... -3.12968016e-01 -7.47253820e-02 -2.32874915e-01] [ 2.07189769e-01 -1.45733953e-01 3.76984954e-01 ... 4.03932124e-01 -4.20913212e-02 3.31601202e-01] ... [-2.80736834e-01 -4.55272973e-01 2.52890527e-01 ... 5.50208502e-02 5.87631464e-01 -4.64128219e-02] [ 1.39918119e-01 7.52492100e-02 -6.64897487e-02 ... 5.12603343e-01 -4.22729664e-02 -2.23891929e-01] [-2.06086904e-01 -1.27468660e-01 1.20652601e-01 ... 2.32939385e-02 -9.25049961e-01 -2.92097867e-01]] [[-2.03447461e-01 -4.43341583e-01 -3.30724269e-02 ... -1.54695570e-01 1.78559229e-03 3.47665697e-01] [-4.37750131e-01 -1.80394515e-01 3.46099406e-01 ... 8.67876232e-01 -7.62657523e-01 5.36621451e-01] [ 2.93271661e-01 2.42385656e-01 1.32844988e-02 ... 2.68780261e-01 8.88379514e-02 -7.25638866e-01] ... [-3.13151985e-01 1.64931431e-01 -8.75585377e-02 ... -1.17266022e-01 -4.10188675e-01 2.98846602e-01] [-3.21234837e-02 -2.04221189e-01 -1.61787197e-02 ... -1.15398325e-01 -9.47353840e-02 1.41952753e-01] [ 4.39632654e-01 -3.92175794e-01 4.30327691e-02 ... -5.15439093e-01 -2.28126332e-01 -1.02456078e-01]] [[-3.46758179e-02 -4.74861488e-02 4.29922342e-01 ... 3.20971668e-01 -1.52276218e-01 -1.17893070e-01] [-4.52812254e-01 -1.61998987e-01 3.13089192e-02 ... 1.87476818e-02 5.63820601e-02 -3.51893187e-01] [ 3.07408631e-01 -5.34561574e-02 5.79851031e-01 ... 6.06307201e-02 -1.48169138e-02 7.46676773e-02] ... [ 9.52993184e-02 5.04695237e-01 2.30446830e-01 ... 5.16560853e-01 -1.73216254e-01 1.57572906e-02] [-5.12738764e-01 -3.27447474e-01 -2.07238078e-01 ... -2.14654222e-01 -2.13422850e-01 -1.08156487e-01] [ 3.17817688e-01 7.94174224e-02 -5.33465505e-01 ... -2.59187251e-01 -1.06561594e-02 -7.46401489e-01]]]] [[[[-1.22147393e+00 -3.72514389e-02 -9.32368934e-01 ... 2.18143687e-01 -6.25373498e-02 3.07865679e-01] [ 8.30822706e-01 1.34603059e+00 -7.09034503e-01 ... 1.10477470e-01 1.90679416e-01 2.01761797e-01] [-2.00128496e-01 -3.57172549e-01 -3.78876254e-02 ... 3.56841147e-01 -1.19613588e+00 6.04141235e-01] ... [ 3.68370801e-01 1.52813405e-01 -4.18179363e-01 ... -6.52635470e-02 5.50926924e-01 3.48748624e-01] [ 4.50098038e-01 4.95463014e-02 9.99191046e-01 ... -4.84602749e-01 1.20845094e-01 8.16501915e-01] [-8.17082152e-02 -1.03092983e-01 -1.17733486e-01 ... -5.61516033e-04 -1.11424085e-02 1.49145707e-01]] [[ 7.70809710e-01 -1.25541711e+00 -3.31866533e-01 ... 1.60934985e+00 -5.98030746e-01 5.97891331e-01] [ 9.32843029e-01 5.67772806e-01 -1.36429429e+00 ... 1.07564378e+00 -1.57803965e+00 -2.55207747e-01] [ 5.45782089e-01 5.54037392e-01 -2.56852150e-01 ... -1.02179654e-01 -3.59201759e-01 4.46876675e-01] ... [-1.09935053e-01 -8.66691291e-01 1.19078481e+00 ... -1.02453876e+00 1.08416237e-01 -2.93961346e-01] [ 4.25558716e-01 7.89042294e-01 3.43479156e-01 ... 1.89639300e-01 -1.63982677e+00 2.12455571e-01] [ 8.04005206e-01 -1.95955932e-01 3.35646397e-03 ... -3.78002733e-01 2.45364204e-01 9.75894034e-01]] [[-9.81759608e-01 -8.18122745e-01 5.23587584e-01 ... -4.28673446e-01 9.85211879e-02 -2.33670637e-01] [ 6.19225442e-01 8.67347300e-01 8.57305706e-01 ... 1.91582531e-01 -2.60394990e-01 -1.12818646e+00] [ 8.61718953e-01 1.01295419e-01 3.40908021e-01 ... 4.54275161e-01 -5.87654054e-01 -4.05159146e-01] ... [ 8.10511827e-01 3.68517578e-01 -1.21866608e+00 ... -1.03672338e+00 1.08573902e+00 -1.23110521e+00] [ 1.06534612e+00 -1.35457301e+00 1.00324869e+00 ... 8.53100181e-01 -1.12736559e+00 5.55649921e-02] [-5.54432988e-01 4.06381786e-01 2.41408184e-01 ... -1.59824061e+00 -5.41386604e-01 -7.46852532e-02]] ... [[ 7.12910891e-01 -4.63167429e-01 -4.49944496e-01 ... -3.77711147e-01 2.55546212e-01 -7.66922235e-01] [ 2.86902618e-02 -2.88964361e-01 7.77215660e-01 ... 6.84638679e-01 -1.13421045e-02 -8.47600162e-01] [ 7.30569005e-01 -7.06890881e-01 -8.07366073e-01 ... 9.86716807e-01 7.42116630e-01 8.94054472e-01] ... [ 6.77842200e-01 9.84348118e-01 1.71029598e-01 ... 1.19543880e-01 1.34913683e-01 1.35999906e+00] [ 1.09886014e+00 -1.43845022e+00 -1.82276726e-01 ... 4.27300215e-01 -5.49840808e-01 1.80336937e-01] [ 1.55363798e+00 8.27057004e-01 -1.67403772e-01 ... 9.72833633e-01 -4.13621545e-01 3.57934654e-01]] [[ 2.83557534e-01 1.57472163e-01 6.41280636e-02 ... -1.15164816e-01 4.47426707e-01 -6.47650480e-01] [-2.25785658e-01 5.79727590e-01 4.01474893e-01 ... 5.10006726e-01 1.32153749e+00 1.43963201e-02] [ 4.70017314e-01 -3.38678509e-01 2.42599979e-01 ... 4.50663120e-01 -6.68103039e-01 -6.74679458e-01] ... [-3.13809037e-01 3.16161662e-01 -4.53682207e-02 ... -2.88019832e-02 -1.78398505e-01 3.33443761e-01] [ 1.30425692e-01 4.27228212e-01 7.12571144e-02 ... -2.68087298e-01 1.11691594e+00 2.24956661e-01] [-4.69142348e-01 -5.57636559e-01 4.97726411e-01 ... -9.29131866e-01 -3.71403843e-01 7.09948123e-01]] [[-2.23876119e-01 -1.48932457e-01 -7.37330556e-01 ... -1.57447815e-01 -3.05011541e-01 -9.09093320e-01] [-1.03747642e+00 -1.25431657e+00 4.62235749e-01 ... 4.97785285e-02 4.57025468e-02 -1.51568115e-01] [ 8.72020364e-01 -2.04504281e-01 3.48366439e-01 ... 4.61726069e-01 5.63963532e-01 8.78831968e-02] ... [ 4.70606744e-01 7.92627215e-01 -3.96592587e-01 ... 4.16084349e-01 1.98702261e-01 3.62536192e-01] [-1.35533273e+00 -4.18383367e-02 -3.36890310e-01 ... 4.80761766e-01 -9.51803550e-02 1.31174994e+00] [ 2.96780974e-01 3.16019654e-01 -5.83277941e-01 ... 7.46951625e-02 2.91723996e-01 -1.00451863e+00]]] [[[ 1.48736000e-01 -2.41500035e-01 -2.28534675e+00 ... 1.93086267e+00 8.15097630e-01 1.20773363e+00] [ 9.17667210e-01 -1.30057371e+00 -2.23272353e-01 ... -1.05803348e-01 1.99548498e-01 -1.67999542e+00] [ 3.10978508e+00 1.37763703e+00 4.21040356e-01 ... -5.30050755e-01 -3.14277679e-01 7.91149795e-01] ... [ 1.48807931e+00 6.33486867e-01 1.33063585e-01 ... -7.19418108e-01 -1.21078706e+00 -1.36727244e-01] [ 1.69025123e+00 3.42030954e+00 -3.26056331e-01 ... 1.23272307e-01 1.53383553e+00 2.75210202e-01] [ 2.57140660e+00 3.20852488e-01 8.05403352e-01 ... 9.19076920e-01 -2.84642863e+00 -3.12884808e+00]] [[-1.90752149e-01 -1.78895879e+00 3.09847212e+00 ... -2.69612908e+00 1.80994779e-01 -6.86822355e-01] [ 2.87245607e+00 -1.81483412e+00 -2.83626294e+00 ... -3.20658326e+00 2.53065300e+00 5.55472016e-01] [ 1.07150219e-01 2.44361377e+00 -1.00831556e+00 ... 4.31364346e+00 1.68104267e+00 -2.43714809e+00] ... [ 1.97660482e+00 -4.76622438e+00 1.19874299e+00 ... -1.32681596e+00 2.36959481e+00 -1.58227652e-01] [-4.87654299e-01 -2.92645907e+00 5.84077954e-01 ... 4.72780377e-01 1.36801052e+00 -1.68142393e-01] [ 1.33179235e+00 -1.17356241e+00 7.34100163e-01 ... -1.66262901e+00 9.26162660e-01 1.70545340e+00]] [[-6.06798291e-01 1.12236822e+00 -1.65119195e+00 ... 1.99449122e+00 -1.24833989e+00 -6.70074463e-01] [ 1.86478049e-01 1.59974456e+00 -1.02948129e+00 ... 3.50357533e+00 -6.56163812e-01 3.34577727e+00] [ 3.50407839e-01 2.93492913e+00 -2.45915508e+00 ... 3.31664085e+00 9.37544346e-01 1.10563421e+00] ... [-1.21581089e+00 3.45625281e-01 -1.50398827e+00 ... -1.24248636e+00 -7.27736771e-01 -1.11685300e+00] [-7.98218191e-01 -1.10534832e-01 -1.58645153e+00 ... 1.54806376e+00 -1.15642034e-01 1.57494831e+00] [-2.05603313e+00 -1.46967673e+00 4.65566754e-01 ... -4.88452792e-01 -4.42670226e-01 -6.31962359e-01]] ... [[-1.29847121e+00 -8.25801373e-01 -1.06551468e+00 ... -4.01929617e+00 3.56496358e+00 -4.55743670e-01] [-8.06825995e-01 -1.14126176e-01 2.29966593e+00 ... -2.35155630e+00 -3.53081852e-01 2.78047657e+00] [ 2.34973788e+00 1.90226778e-01 2.88743091e+00 ... -2.00840756e-01 1.76871955e-01 -1.08506858e+00] ... [-9.25220013e-01 1.00949538e+00 2.23209381e+00 ... -1.02307892e+00 -1.66390014e+00 -4.00620431e-01] [-2.49310064e+00 -9.56968784e-01 6.27594054e-01 ... -8.90934050e-01 -1.56234360e+00 5.96912026e-01] [ 7.19129562e-01 2.70821238e+00 1.44525158e+00 ... 1.50241613e+00 1.28656077e+00 -1.45038021e+00]] [[ 6.81317389e-01 1.32445339e-02 6.33206010e-01 ... -8.33317116e-02 1.02992129e+00 -2.68550587e+00] [ 7.36666381e-01 1.31049788e+00 -1.87266374e+00 ... 2.82625943e-01 -3.20953584e+00 -1.80477607e+00] [ 2.82590556e+00 -7.56368399e-01 1.50881338e+00 ... 3.11357558e-01 4.59124041e+00 -1.58383393e+00] ... [-1.24940407e+00 -2.49245071e+00 9.71507505e-02 ... 1.31076026e+00 1.71466291e+00 9.92123842e-01] [-1.84930074e+00 -1.06384933e+00 -5.85581541e-01 ... 2.02667832e+00 -2.01046562e+00 -1.75785267e+00] [-2.70115465e-01 -6.72101855e-01 -2.55283093e+00 ... -2.00431809e-01 -2.03723240e+00 1.75845504e+00]] [[-5.58164477e-01 1.13064885e+00 1.47594082e+00 ... -1.83936000e+00 2.15082669e+00 -9.31822598e-01] [-2.53762484e+00 5.29597223e-01 9.04680669e-01 ... 1.21724176e+00 1.73427761e+00 -2.60394335e+00] [ 1.50325370e+00 -1.89916998e-01 1.31242186e-01 ... 8.06699276e-01 1.19828260e+00 6.25444829e-01] ... [-3.61391568e+00 8.50769758e-01 6.54338062e-01 ... 2.05107188e+00 7.65118241e-01 6.60668075e-01] [-8.20351720e-01 -1.74930382e+00 5.61995685e-01 ... -8.49190801e-02 9.09949720e-01 -6.84925854e-01] [-7.36717731e-02 -1.01924312e+00 -4.65380335e+00 ... 1.41652799e+00 -3.29921365e+00 -2.99320340e+00]]] [[[-9.65454817e-01 7.13770613e-02 4.50261414e-01 ... 9.41990137e-01 -5.79084337e-01 8.55370879e-01] [-2.35166058e-01 1.11142874e+00 -5.33486418e-02 ... 6.67493641e-01 -3.84831935e-01 4.26790059e-01] [ 2.99291108e-02 2.63620075e-02 5.03542900e-01 ... 6.11788869e-01 -5.11236250e-01 5.50082028e-01] ... [-1.33484125e-01 8.42062294e-01 -4.05655771e-01 ... -8.35381806e-01 3.71877164e-01 -1.25519335e-01] [ 6.90695882e-01 -9.05379832e-01 -2.21890062e-01 ... 5.68249226e-01 5.31832993e-01 3.24799150e-01] [ 2.17763856e-01 5.24710953e-01 -7.11083651e-01 ... 1.99227914e-01 1.06507528e+00 -8.45695853e-01]] [[-1.17772198e+00 5.48404634e-01 -1.04706538e+00 ... -1.67129174e-01 4.48604733e-01 5.62784493e-01] [-2.40162581e-01 -4.40506846e-01 -3.17572989e-02 ... -4.10210118e-02 6.49369121e-01 -8.22420269e-02] [ 6.13462865e-01 -7.80803621e-01 2.56545812e-01 ... 1.23957241e+00 2.99185887e-02 1.30464208e+00] ... [-2.84688901e-02 -7.63462663e-01 2.15285555e-01 ... 9.24974144e-01 -7.52902746e-01 -5.44199347e-01] [ 1.39169657e+00 1.12122893e+00 -1.26148665e+00 ... -7.46067941e-01 -5.77392802e-02 -1.13312248e-02] [-3.93192559e-01 8.67749751e-01 1.18895404e-01 ... 1.70523152e-01 -9.21470046e-01 -2.99424052e-01]] [[-8.81676853e-01 -1.45262733e-01 -9.68397260e-02 ... 1.19615428e-01 2.85997182e-01 -5.10731079e-02] [ 1.05819666e+00 -9.00053442e-01 6.12069890e-02 ... 2.64064699e-01 1.29196629e-01 -7.39478350e-01] [ 9.88107473e-02 2.82667708e-02 8.79737362e-02 ... 1.25587034e+00 -1.53163031e-01 2.65834123e-01] ... [-1.00395906e+00 5.91574609e-01 -2.53402531e-01 ... -4.65539575e-01 -1.58208048e+00 -2.30498269e-01] [ 2.35693976e-01 1.68283790e-01 6.30415201e-01 ... -1.86907679e-01 -9.77932990e-01 -6.22892797e-01] [ 6.13311112e-01 6.46516800e-01 6.90361321e-01 ... -1.10459730e-01 -3.26816082e-01 2.94495046e-01]] ... [[-2.67455038e-02 6.26267493e-02 5.33734441e-01 ... 9.35563922e-01 -5.21473944e-01 -1.03334057e+00] [ 9.35189903e-01 -3.94915743e-03 4.05861467e-01 ... 5.62366188e-01 1.32637709e-01 -7.72011131e-02] [ 1.43545777e-01 1.56738237e-01 3.71045858e-01 ... 6.39973938e-01 1.22482829e-01 -2.48253629e-01] ... [-5.01789331e-01 8.87033403e-01 -2.42664009e-01 ... -4.05860841e-01 1.34935692e-01 -2.68885106e-01] [ 5.00885785e-01 -2.22928032e-01 -2.54033171e-02 ... 8.30588043e-01 -8.43834937e-01 6.07028529e-02] [ 2.35105470e-01 9.43841875e-01 -3.73358965e-01 ... 4.35969591e-01 -1.09015393e+00 6.52411044e-01]] [[-5.58695674e-01 7.79700875e-01 -8.62665713e-01 ... -6.97768092e-01 -6.81967854e-01 -7.34883189e-01] [-7.95457065e-01 -9.23431575e-01 1.23693156e+00 ... -4.58397448e-01 -3.55425216e-02 -4.01482761e-01] [-7.50766546e-02 -4.91454720e-01 -1.12728584e+00 ... 1.13203704e+00 1.46682322e-01 2.66126603e-01] ... [ 6.89002335e-01 3.22224587e-01 4.34738725e-01 ... -1.92748293e-01 7.73480594e-01 -2.14658398e-02] [-2.46694431e-01 -7.50782788e-01 1.84484959e-01 ... 1.46079898e-01 7.76211202e-01 2.53187865e-01] [-2.62918264e-01 -2.21098602e-01 -6.28286242e-01 ... 7.12744772e-01 -4.24396873e-01 7.62005568e-01]] [[ 1.26937062e-01 1.18393958e+00 4.95929837e-01 ... -3.71508658e-01 -6.39054120e-01 -7.81181380e-02] [ 1.49996221e-01 2.22451866e-01 -1.13502192e+00 ... -7.28976190e-01 6.06341660e-01 -1.17833471e+00] [-1.62321568e-01 1.02706540e+00 -2.35624805e-01 ... -4.52097058e-01 -1.46898842e+00 -2.02666745e-01] ... [ 2.73557276e-01 -7.26489007e-01 -6.71584718e-03 ... 5.10077477e-01 3.27143431e-01 4.19484168e-01] [-2.28562683e-01 -4.97189641e-01 -1.28479406e-01 ... -6.01391852e-01 1.00723398e+00 -1.65904075e-01] [-2.87157819e-02 4.38219130e-01 9.11335230e-01 ... -4.06496555e-01 -5.43445885e-01 3.62967879e-01]]] [[[ 8.14208806e-01 -2.52608871e+00 3.16259563e-02 ... 1.04022777e+00 4.26767580e-02 1.74748138e-01] [ 1.10568869e+00 -2.06293797e+00 -1.33041739e+00 ... 1.28629699e-01 -6.69783950e-01 7.83740103e-01] [ 8.08255732e-01 -1.58746755e+00 1.35502815e+00 ... -1.35324046e-01 -2.69197059e+00 -3.34937185e-01] ... [ 2.62605309e+00 -5.95362008e-01 -5.68668485e-01 ... -1.57740176e+00 1.20336998e+00 -2.06351757e+00] [ 1.40942648e-01 2.93949068e-01 8.14364970e-01 ... 1.02020562e+00 -3.26032549e-01 -6.00951254e-01] [ 3.24013144e-01 -3.17071843e+00 1.77947855e+00 ... -1.79930079e+00 9.38636422e-01 1.73353815e+00]] [[ 3.64486456e-01 1.19619405e+00 2.73740828e-01 ... -3.60827136e+00 -1.41877019e+00 4.95518744e-01] [-1.61450315e+00 -2.40988398e+00 -1.02762902e+00 ... -1.70399475e+00 -1.25153255e+00 7.88568795e-01] [ 3.26648355e-01 8.05765510e-01 1.33226073e+00 ... -5.12227535e-01 -9.45797026e-01 -1.21799135e+00] ... [-4.85268265e-01 -1.38365245e+00 -1.84856430e-01 ... 2.73713017e+00 2.01949215e+00 -1.42663562e+00] [ 2.37201953e+00 1.30624020e+00 1.09878278e+00 ... 1.20408082e+00 1.20031023e+00 -1.93651557e+00] [ 1.06998071e-01 8.55139315e-01 -9.14395154e-01 ... 2.30273664e-01 5.65027118e-01 1.67452753e+00]] [[-1.55823648e-01 -3.99598837e-01 -1.62677944e+00 ... 8.59370604e-02 1.04354501e+00 2.82890368e+00] [-1.15097475e+00 -1.06669796e+00 -9.31402326e-01 ... 3.47102106e-01 -2.17841163e-01 -1.45836318e+00] [ 8.13038647e-01 2.27189064e+00 4.95590925e-01 ... -7.98010975e-02 1.41091383e+00 -2.27800667e-01] ... [-1.58952057e-01 1.57891905e+00 8.24984968e-01 ... -1.36101377e+00 -7.35280514e-01 -3.03407460e-01] [-1.11913073e+00 1.27714515e+00 9.74088788e-01 ... 1.57594836e+00 -1.36585450e+00 -3.88443917e-01] [ 1.04262221e+00 -2.16923046e+00 6.59487963e-01 ... 3.44928354e-01 -4.50754613e-02 1.40910268e-01]] ... [[-4.52528030e-01 2.68066669e+00 9.96059954e-01 ... -7.74584532e-01 -1.71973681e+00 -6.71803117e-01] [ 1.91300285e+00 1.99679482e+00 2.91361856e+00 ... -1.37505305e+00 -5.03969431e-01 -3.64729732e-01] [-1.10775448e-01 -6.21650100e-01 -1.23899305e+00 ... -1.40593708e-01 4.51765418e-01 -2.78746390e+00] ... [ 4.33982253e-01 1.59778798e+00 3.46263319e-01 ... -8.23914945e-01 3.66572762e+00 2.85910189e-01] [-2.87594962e+00 3.26300907e+00 8.36415112e-01 ... -5.11628509e-01 6.01925313e-01 9.95067298e-01] [-1.46873415e+00 4.01260495e-01 8.10883284e-01 ... -4.47344899e-01 2.62541056e-01 -1.05876160e+00]] [[ 2.29276276e+00 -1.46401727e+00 -9.56211090e-01 ... -1.95478082e+00 -2.67622566e+00 -2.64839351e-01] [-2.06654739e+00 4.25772250e-01 2.34738350e+00 ... -2.58642375e-01 -3.71646911e-01 1.08115889e-01] [ 4.17195702e+00 -1.27231526e+00 1.13871121e+00 ... -3.93466413e-01 1.24410701e+00 1.96478307e+00] ... [-1.74901652e+00 -1.05874479e+00 6.08593285e-01 ... 1.45348203e+00 3.80183458e-01 -8.01437318e-01] [ 1.55445182e+00 -1.58581436e+00 1.61329818e+00 ... 1.89551234e-01 -4.36976016e-01 2.01800108e+00] [ 2.41663384e+00 8.21694136e-01 -7.09421840e-03 ... -5.09768784e-01 -9.60740149e-01 -3.48791271e-01]] [[-2.74254471e-01 -1.97661519e+00 1.29946232e+00 ... 1.25468147e+00 1.59666240e-01 1.55571628e+00] [ 3.39364100e+00 -1.48737538e+00 5.27181029e-01 ... -1.08387423e+00 -9.41228628e-01 -1.78206158e+00] [ 3.48296255e-01 1.53730083e+00 3.97240490e-01 ... -4.48575258e-01 -9.02878582e-01 -1.17922592e+00] ... [ 1.00513387e+00 3.56931865e-01 4.47181612e-02 ... 1.60634184e+00 1.91122782e+00 5.09593308e-01] [-1.91653728e-01 9.05808434e-02 -5.86366765e-02 ... 2.01427293e+00 1.98638722e-01 4.53942728e+00] [-2.71318960e+00 -8.01020145e-01 -7.49480844e-01 ... -1.17617242e-01 8.94617915e-01 -1.80079007e+00]]] [[[ 1.99108005e+00 1.59105706e+00 2.09795761e+00 ... -1.47207046e+00 -2.54734707e+00 3.61433053e+00] [ 6.03724224e-03 3.99127603e+00 -1.51515579e+00 ... 1.17832088e+00 -4.81649935e-01 5.51945782e+00] [ 1.01929903e+00 -2.20024753e+00 4.38772172e-01 ... 5.07584572e+00 -9.87229407e-01 -1.21114826e+00] ... [ 6.80430472e-01 -2.62901604e-01 3.86427689e+00 ... -1.54048872e+00 -2.42356849e+00 -2.80073595e+00] [ 1.47315097e+00 -2.73387814e+00 -3.68228459e+00 ... 1.66400313e+00 -1.72453022e+00 4.06572962e+00] [-6.71369028e+00 2.74934101e+00 1.76526082e+00 ... 3.26627016e-01 -1.22016382e+00 -3.12750697e-01]] [[ 5.86743021e+00 -1.27773976e+00 5.25676346e+00 ... -5.22165966e+00 -1.22252882e+00 -9.17465866e-01] [-4.45770168e+00 3.21614909e+00 -1.52620184e+00 ... -3.72157979e+00 1.10400426e+00 -2.55330610e+00] [ 1.17890334e+00 8.79916906e-01 -4.90726757e+00 ... 1.97878253e+00 -3.16035008e+00 4.05777359e+00] ... [-1.27443039e+00 5.97246885e-01 4.23740005e+00 ... -2.97759271e+00 -1.54895163e+00 -2.15581939e-01] [-3.51468801e+00 1.73982632e+00 1.01006618e-02 ... -4.43795824e+00 1.04556680e+00 9.32920694e-01] [-4.83745193e+00 1.52280319e+00 -9.36810493e-01 ... 2.83393359e+00 -8.41935158e-01 1.59413004e+00]] [[ 3.30763221e+00 -1.42465973e+00 3.15323055e-01 ... -2.98349112e-01 3.87599039e+00 2.98539495e+00] [-1.99491441e+00 4.95706469e-01 -1.56323171e+00 ... 6.58661008e-01 -1.16661406e+00 -2.02770162e+00] [ 1.02868855e+00 1.78985143e+00 2.91035080e+00 ... -1.64714324e+00 -3.91379058e-01 3.43280315e+00] ... [ 1.50968933e+00 -1.62668121e+00 -1.86476314e+00 ... -1.77185535e+00 9.66272652e-01 2.19141173e+00] [ 8.08177173e-01 3.41098166e+00 -4.91405010e-01 ... 8.47420335e-01 9.37660336e-01 4.49488133e-01] [ 5.06123877e+00 1.66383350e+00 4.13915920e+00 ... 3.16051698e+00 -3.18155169e+00 3.38827705e+00]] ... [[-1.37254667e+00 1.22303069e+00 6.74298429e+00 ... -3.06512308e+00 -3.08352637e+00 -3.87005830e+00] [ 1.42320073e+00 1.30181181e+00 1.43631682e-01 ... -1.40467733e-01 3.14196870e-02 -3.27636987e-01] [-1.85590193e-01 -5.99064827e+00 2.71522701e-01 ... 1.38183713e+00 1.72129881e+00 -2.80288148e+00] ... [ 1.93206024e+00 -7.80191496e-02 3.31343818e+00 ... -3.25402528e-01 2.78052306e+00 5.76174200e-01] [-3.51737833e+00 -1.84480798e+00 2.89290524e+00 ... -5.89232206e-01 -2.73271751e+00 -1.94651806e+00] [-3.04740572e+00 -1.79633069e+00 1.57378340e+00 ... -4.69270021e-01 -1.90688044e-01 -4.12191212e-01]] [[ 7.18676805e-01 -5.28273439e+00 1.62906599e+00 ... -9.14422035e-01 3.47777247e-01 -2.70577490e-01] [-7.68543661e-01 -9.75379527e-01 5.95837951e-01 ... -2.64962578e+00 5.59980392e+00 -2.05447125e+00] [ 4.10356188e+00 4.69807982e-02 -1.07067704e+00 ... 1.03716826e+00 -5.81181705e-01 2.01135921e+00] ... [ 1.21948826e+00 -7.51398653e-02 1.65197015e-01 ... 2.21469688e+00 -4.70501393e-01 -2.29582143e+00] [-3.79010499e-01 1.04300521e-01 6.05594032e-02 ... 5.47104168e+00 -1.24765384e+00 1.83742598e-01] [ 2.57989693e+00 6.00280583e-01 3.51459026e+00 ... 1.80387282e+00 2.54357433e+00 -2.29926443e+00]] [[-1.58403826e+00 2.04101181e+00 -3.97627616e+00 ... -8.67296934e-01 1.52091491e+00 -2.71045655e-01] [ 5.77751303e+00 -1.97697198e+00 1.47983730e+00 ... 1.98571229e+00 -2.58013916e+00 1.41137588e+00] [-7.99667120e-01 -1.29475260e+00 -4.06857777e+00 ... -1.03587247e-01 5.99281371e-01 1.78342640e+00] ... [ 1.86424589e+00 -1.13326585e+00 1.38690519e+00 ... -2.31697798e+00 -1.53713453e+00 -2.41441560e+00] [-3.99182129e+00 1.84308755e+00 -2.45091558e+00 ... 2.37850285e+00 2.08741236e+00 6.01917326e-01] [-3.29866648e+00 -5.68879128e-01 -2.12752676e+00 ... -2.93331409e+00 7.42968261e-01 4.55679131e+00]]] [[[ 8.04484487e-02 -2.13949695e-01 2.98774708e-02 ... -5.99168718e-01 2.14386415e-02 -9.66774970e-02] [ 2.95568146e-02 1.74198523e-01 5.47978282e-01 ... 6.98906183e-02 1.07626307e+00 6.05960548e-01] [ 5.37951350e-01 2.65174419e-01 -5.40087260e-02 ... 5.87488234e-01 3.17882568e-01 -4.72241133e-01] ... [ 7.81648457e-02 6.44593835e-02 -5.79141825e-02 ... -3.64053696e-01 1.39040321e-01 -3.03524882e-02] [-5.47855161e-02 -1.55637428e-01 -4.75797057e-01 ... 1.34004101e-01 -4.13682520e-01 3.17076653e-01] [-1.59007758e-01 -5.98124824e-02 4.15002048e-01 ... 3.40520978e-01 3.81935537e-02 -4.65384364e-01]] [[ 4.91080582e-01 -2.03331769e-01 2.72477329e-01 ... 4.05319363e-01 2.14049771e-01 5.57376504e-01] [ 1.09925173e-01 -4.42098439e-01 -2.28120208e-01 ... -1.17645882e-01 -2.16680676e-01 4.16614600e-02] [ 4.11894411e-01 -2.21069217e-01 1.02970123e-01 ... 8.33977237e-02 3.56888443e-01 5.61746657e-01] ... [-7.50091448e-02 2.24137828e-02 -4.85837221e-01 ... -1.24238215e-01 -3.15518379e-01 -2.02953294e-02] [-1.14505462e-01 1.52944341e-01 -4.66743141e-01 ... 2.74700761e-01 -5.39106488e-01 1.46668255e-02] [-1.48541436e-01 6.47915080e-02 1.14075311e-01 ... 6.09112205e-03 1.40217751e-01 -6.40805736e-02]] [[ 9.33441613e-03 -3.77210706e-01 -8.27241540e-02 ... -2.30333999e-01 4.09400612e-01 -3.81413698e-01] [-7.32101053e-02 5.21034062e-01 1.87577441e-01 ... 5.73970787e-02 6.15535021e-01 8.14319700e-02] [-7.78098166e-01 5.41006446e-01 -3.82149667e-01 ... 2.83704758e-01 -1.60240874e-01 1.74021423e-01] ... [ 6.06570959e-01 3.31427217e-01 -1.70816630e-01 ... 1.32980555e-01 2.29231805e-01 2.23838046e-01] [ 8.79963562e-02 6.20692261e-02 2.80178450e-02 ... 1.56299040e-01 -2.99551915e-02 -2.05810368e-01] [-5.36589086e-01 5.29041111e-01 -3.14807236e-01 ... 4.72260177e-01 -3.58327813e-02 -2.13096336e-01]] ... [[ 2.57180065e-01 5.30177951e-01 3.12774539e-01 ... 3.62279922e-01 -1.46141857e-01 4.55998719e-01] [-3.17552865e-01 2.52253473e-01 -3.14494520e-01 ... -2.24479824e-01 -2.61805415e-01 -3.61670464e-01] [ 1.11280285e-01 -2.61811376e-01 3.70039165e-01 ... -1.14937820e-01 -5.82300484e-01 -3.86531830e-01] ... [ 4.71950382e-01 -2.10217848e-01 -2.41375193e-01 ... -2.15375811e-01 -5.24086893e-01 2.45608091e-02] [-1.51137635e-01 4.92201507e-01 3.48389357e-01 ... 4.92718779e-02 1.35220751e-01 6.29330635e-01] [-5.33080697e-01 -2.03921080e-01 2.72730201e-01 ... -8.53128076e-01 -5.99974096e-01 -3.76549959e-01]] [[ 6.83893040e-02 8.71966407e-02 -1.50512487e-01 ... 1.32971838e-01 4.30035442e-02 4.99021322e-01] [ 4.47454125e-01 -2.26387560e-01 -7.57075846e-02 ... 4.96504188e-01 4.95074779e-01 -1.42635077e-01] [-1.22170672e-01 4.73772645e-01 2.55069792e-01 ... -2.59618491e-01 1.54678836e-01 4.01714712e-01] ... [-1.76735401e-01 -1.15294591e-01 9.89838421e-01 ... -1.18534632e-01 3.14774632e-01 7.10194930e-02] [-9.56067964e-02 -7.99402818e-02 -5.24419248e-01 ... -3.54841314e-02 7.52762780e-02 -3.02370518e-01] [ 7.25069866e-02 3.65116186e-02 1.38330996e-01 ... -1.48891747e-01 -3.46330702e-01 1.58737600e-01]] [[ 5.18965662e-01 -1.91358134e-01 1.34960622e-01 ... 1.13389879e-01 -2.99980640e-01 2.20432326e-01] [-6.95530907e-04 1.18592300e-01 -2.97777727e-03 ... -2.17632204e-02 -4.21184480e-01 -7.40537345e-02] [ 3.03700805e-01 5.21773240e-03 1.36770129e-01 ... -2.65512764e-01 -3.29725236e-01 -2.16504738e-01] ... [ 2.42717117e-02 -1.16130151e-01 1.03055798e-01 ... -9.94325988e-03 5.70784807e-01 -4.36201878e-02] [-9.24846306e-02 -2.13433072e-01 -1.55040845e-01 ... 1.57185122e-01 -8.15247595e-01 -3.41177344e-01] [ 1.55696511e-01 -2.96036482e-01 1.31045923e-01 ... 3.78451198e-01 1.68977946e-01 -7.10884988e-01]]]] [[[[-4.87497538e-01 1.00649929e+00 -4.22633916e-01 ... 9.35463965e-01 5.71355760e-01 4.32312131e-01] [ 5.79908907e-01 -1.65484264e-01 4.38855976e-01 ... -1.29598939e+00 2.56624699e-01 2.79878080e-01] [-8.35508525e-01 -1.07715678e+00 -6.76347077e-01 ... -2.85008192e-01 2.61209041e-01 -2.23818973e-01] ... [-1.67173773e-01 1.13161123e+00 -4.07535970e-01 ... -3.70932400e-01 2.69624621e-01 -1.40876931e-04] [ 1.37433112e-01 -9.08068120e-01 5.01862347e-01 ... -3.95066962e-02 2.68795609e-01 5.99815488e-01] [ 1.10476339e+00 7.32742786e-01 -2.44417161e-01 ... 4.39700186e-02 1.92674756e-01 5.22727787e-01]] [[ 1.24087071e+00 -2.51506388e-01 -1.37140775e+00 ... -1.07546115e+00 1.29711056e+00 1.25692797e+00] [-1.71343744e+00 2.19631657e-01 6.81690574e-01 ... -1.25550485e+00 6.85171366e-01 2.14955226e-01] [-4.06380594e-01 -6.02322996e-01 3.55795383e-01 ... 1.67489457e+00 -2.36519083e-01 -5.67434058e-02] ... [-4.49813962e-01 8.74743044e-01 6.45778596e-01 ... -2.84198791e-01 3.14376265e-01 -3.90176356e-01] [ 7.49492228e-01 -5.96057296e-01 7.61229157e-01 ... -1.17193088e-01 -3.25202458e-02 8.74048173e-01] [ 1.26439893e+00 4.34822142e-01 -9.67526674e-01 ... 1.03709912e+00 2.27790046e+00 1.45629096e+00]] [[ 1.22875130e+00 7.25422084e-01 7.16387570e-01 ... 1.99747412e-03 -9.35435057e-01 4.44032252e-01] [ 9.14296806e-01 1.06945653e-02 -2.26063395e+00 ... -7.22241044e-01 -1.05776942e+00 -1.46655226e+00] [ 6.49739027e-01 2.53865629e-01 -3.88482451e-01 ... 6.34570897e-01 -1.35937989e+00 -1.27107099e-01] ... [-3.87403131e-01 2.47570172e-01 1.47463465e+00 ... 7.07924366e-01 6.16737828e-02 -2.60924041e-01] [-3.24679852e-01 6.02379620e-01 3.33470345e-01 ... 1.12559211e+00 -1.53346524e-01 4.01622690e-02] [-8.53322089e-01 -7.16971576e-01 -5.16423225e-01 ... 2.96123326e-01 -3.47739130e-01 -4.96903539e-01]] ... [[-2.59451538e-01 -4.33398075e-02 -2.00966924e-01 ... -1.74339521e+00 -8.48845601e-01 -5.16004145e-01] [-8.15089285e-01 -5.74208200e-01 -4.57818836e-01 ... -3.33229363e-01 -8.08927715e-01 -6.19038761e-01] [-2.08095506e-01 1.08268477e-01 -9.71580505e-01 ... 1.60106137e-01 -2.97679901e-01 5.26758313e-01] ... [ 1.20078969e+00 1.07214548e-01 7.29819059e-01 ... -8.88392627e-02 6.58827797e-02 5.07580698e-01] [ 1.61639810e+00 1.37018359e+00 4.49443787e-01 ... -1.19607341e+00 -7.97168016e-02 8.69015992e-01] [ 4.73151617e-02 -1.22481155e+00 2.84812182e-01 ... -1.49301097e-01 -1.81579912e+00 -1.06450260e+00]] [[-1.34028882e-01 1.03393519e+00 3.65866460e-02 ... 7.09884644e-01 -5.55555761e-01 9.78149921e-02] [ 6.15715794e-02 6.57097399e-01 1.07410550e+00 ... 3.21808279e-01 1.41767967e+00 6.42692566e-01] [-3.35819662e-01 5.30689955e-01 1.35869157e+00 ... -8.86559546e-01 4.91715558e-02 -8.85940909e-01] ... [-1.02588439e+00 -1.23166549e+00 -1.79790050e-01 ... -7.06206858e-01 -4.28588241e-01 4.35398985e-03] [ 3.49999040e-01 -7.68590122e-02 -2.36182109e-01 ... -1.10632554e-01 1.58056653e+00 -6.10608578e-01] [-3.08800489e-01 1.14001131e+00 -1.36221588e+00 ... 4.00700510e-01 -7.03682780e-01 6.48650050e-01]] [[ 1.50531843e-01 -7.16632068e-01 1.38207924e+00 ... -6.18841350e-01 8.86243045e-01 7.93303967e-01] [-8.27544391e-01 -1.82438195e-01 -9.26864445e-01 ... -4.15252656e-01 9.40238833e-02 6.32043719e-01] [-2.53296316e-01 8.63417149e-01 1.63863361e-01 ... 9.96749222e-01 5.19135594e-01 -8.29375565e-01] ... [ 5.49387217e-01 -3.53386067e-02 5.86665511e-01 ... 7.49025166e-01 8.97458911e-01 -1.81037700e+00] [-6.80139244e-01 8.78045380e-01 5.19548357e-01 ... 2.52843440e-01 -4.58003253e-01 -8.84139389e-02] [-4.12551224e-01 1.24746168e+00 -4.74120617e-01 ... -3.97601992e-01 -3.12039077e-01 -4.44344223e-01]]] [[[ 1.51997745e-01 1.53342247e+00 -1.23430061e+00 ... -6.76687062e-01 -1.23932481e+00 2.87486482e+00] [-3.51322889e-01 -1.02875352e+00 -1.40557051e+00 ... -1.53571355e+00 -2.95563126e+00 8.97134170e-02] [-1.49850059e+00 -9.72696185e-01 1.05568731e+00 ... -2.21789575e+00 1.67122829e+00 -6.24212027e-01] ... [ 4.94852364e-02 -3.72534132e+00 3.09541166e-01 ... -1.21472085e+00 8.05406928e-01 -8.69558811e-01] [ 1.03330576e+00 1.45176530e+00 1.04511726e+00 ... 3.22782540e+00 -4.01651651e-01 1.33664048e+00] [-4.35196698e-01 2.62796140e+00 -3.14113140e-01 ... 1.96288705e+00 -1.80673826e+00 2.20005941e+00]] [[ 1.28309870e+00 9.00201440e-01 7.55460918e-01 ... 2.27130175e+00 1.19771324e-01 1.54803455e+00] [-3.88963252e-01 2.21186948e+00 -9.74437952e-01 ... 1.73687613e+00 3.16691780e+00 8.44590724e-01] [-1.57631505e+00 5.50337255e-01 -1.28472459e+00 ... -4.00698960e-01 -2.31165004e+00 -8.07422251e-02] ... [ 2.15024090e+00 3.27573568e-01 -2.60557389e+00 ... 1.22489117e-01 2.55039358e+00 -2.39810252e+00] [-6.98027492e-01 2.30541992e+00 7.49021411e-01 ... 1.07328081e+00 -2.16047239e+00 1.58654833e+00] [-3.35480070e+00 -2.71859646e+00 -9.57935870e-01 ... -8.18005085e-01 -1.81918979e-01 1.65476537e+00]] [[-3.16023290e-01 5.26459396e-01 1.89631850e-01 ... -1.85410976e+00 -6.24641180e-01 -3.88412571e+00] [-3.08758426e+00 -9.89066899e-01 1.36232483e+00 ... -1.55947208e+00 -2.74714780e+00 8.80826652e-01] [ 1.38581827e-01 -9.78752375e-01 3.12020588e+00 ... -2.11977959e-02 -2.42179585e+00 1.24035668e+00] ... [ 6.86515927e-01 -2.16101313e+00 -9.04592037e-01 ... 1.70520031e+00 -1.47043622e+00 -2.31514001e+00] [ 4.57425594e+00 2.05483675e+00 8.83868277e-01 ... -2.76626801e+00 4.30089384e-01 -1.05525923e+00] [ 2.13029790e+00 -2.02203298e+00 3.88716012e-01 ... -2.01994157e+00 -6.82371676e-01 4.44640629e-02]] ... [[ 2.64410913e-01 1.49707568e+00 -2.90316033e+00 ... 2.39957452e+00 2.54680425e-01 1.47352576e+00] [-1.01524842e+00 -2.59472179e+00 -2.68680406e+00 ... -9.74292159e-01 1.13302156e-01 -1.20503879e+00] [-6.27687752e-01 -1.36409295e+00 3.37224603e-01 ... 9.13057774e-02 -1.31953013e+00 3.72612858e+00] ... [-1.22856295e+00 1.12198763e-01 -1.05282795e+00 ... 2.13567066e+00 -1.57390565e-01 -1.87983334e+00] [ 8.37414742e-01 5.41375399e-01 -4.87848908e-01 ... 4.60812211e-01 -8.02371919e-01 -2.61228609e+00] [ 1.44251251e+00 -1.21790206e+00 5.93213797e-01 ... 2.26772070e-01 -2.06764722e+00 -2.31027752e-01]] [[-1.57547486e+00 2.67874479e+00 -6.19112909e-01 ... -6.49807900e-02 -2.75782496e-01 2.22814083e+00] [ 2.85773218e-01 -9.60757613e-01 -1.38485396e+00 ... -3.45694375e+00 5.68683743e-02 1.43326175e+00] [-2.22149506e-01 1.78847289e+00 -2.26238593e-02 ... 1.43145871e+00 -1.26632309e+00 -2.59112167e+00] ... [ 7.77063370e-01 2.14623618e+00 7.40327299e-01 ... 4.35811430e-01 5.73467016e-01 -1.67192161e-01] [ 2.58905983e+00 3.05077076e+00 7.83320606e-01 ... 1.59715605e+00 5.85630298e-01 1.67822301e+00] [ 1.31104863e+00 2.70952368e+00 -1.20206094e+00 ... 8.64127040e-01 5.41976511e-01 -8.92999589e-01]] [[-1.61984265e+00 9.10253882e-01 -1.57073343e+00 ... -3.52856636e+00 -2.16354966e+00 -3.39984655e+00] [ 3.84956789e+00 -1.61821544e+00 -1.01387620e+00 ... -3.65342784e+00 -8.72028232e-01 -2.08229899e+00] [-5.47301173e-01 3.33339286e+00 1.20251262e+00 ... 1.24301302e+00 1.70152485e+00 -2.34901357e+00] ... [ 2.19922200e-01 -8.71307850e-01 -4.01681423e+00 ... 8.07628095e-01 -1.72334683e+00 -9.90026832e-01] [-7.93683529e-01 -3.52821541e+00 2.06125975e+00 ... -1.85462511e+00 3.42917442e+00 -2.51628470e+00] [ 2.92239547e+00 2.50332260e+00 9.79121923e-01 ... -6.04471803e-01 -1.16850957e-01 -1.95966327e+00]]] [[[ 6.25728905e-01 5.25715649e-01 2.82444566e-01 ... 6.32465482e-01 1.40933841e-01 -9.55109298e-01] [-5.85336015e-02 1.77474275e-01 5.03768861e-01 ... -7.10425675e-01 3.74747925e-02 2.92120367e-01] [ 3.33681136e-01 5.37743986e-01 -2.09655046e-01 ... 7.68245235e-02 -4.88625497e-01 9.82231259e-01] ... [ 4.68976647e-01 -8.75339270e-01 7.72442937e-01 ... -1.05382934e-01 -5.14260530e-01 -4.96664569e-02] [ 1.79299459e-01 6.19775474e-01 -4.10288796e-02 ... -1.41034201e-01 5.33410395e-03 1.49115741e+00] [ 9.41608250e-01 -8.75150710e-02 9.62536335e-02 ... -7.48286724e-01 1.01693869e-01 -5.09965494e-02]] [[-3.25819761e-01 -2.04733133e-01 6.80947602e-01 ... -5.70224086e-03 -5.66381931e-01 4.42978024e-01] [ 9.34401378e-02 9.86102670e-02 9.85259116e-02 ... -3.23055774e-01 -6.75716162e-01 4.29348379e-01] [-8.47465754e-01 -4.20346022e-01 -6.60807371e-01 ... -1.80630341e-01 -2.35689029e-01 6.06857061e-01] ... [-6.85470760e-01 -5.61371267e-01 5.24704039e-01 ... 4.96250123e-01 4.88365918e-01 -2.95473367e-01] [-1.24919820e+00 -4.85269934e-01 3.09710354e-01 ... 1.63882256e-01 5.13434708e-01 -7.24331439e-01] [ 5.82656622e-01 1.42490700e-01 -2.69413702e-02 ... 1.48147476e+00 8.99300218e-01 -3.59724551e-01]] [[ 2.15537995e-01 1.28219187e+00 -3.78352970e-01 ... -9.92446244e-01 -5.03683448e-01 -4.27273244e-01] [-9.07928407e-01 4.69793230e-01 -2.78414965e-01 ... -5.59694588e-01 -8.02160382e-01 9.55038488e-01] [-2.91248083e-01 1.87057519e+00 -1.91931635e-01 ... 1.51823372e-01 -1.58217281e-01 -7.55861819e-01] ... [ 7.67996371e-01 6.51172459e-01 -3.35538715e-01 ... -3.94897789e-01 -6.60737932e-01 1.96082845e-01] [ 3.42832863e-01 1.04454532e-01 1.97641551e-01 ... 1.38097107e+00 1.00930858e+00 -4.81560528e-01] [ 5.22345126e-01 -4.88070399e-02 -2.65000183e-02 ... -8.71285737e-01 -1.51938036e-01 -1.28147572e-01]] ... [[-3.43392678e-02 1.16043404e-01 1.03647101e+00 ... 1.18864894e+00 -1.15378104e-01 1.86191523e+00] [-1.27058150e-03 -4.62167144e-01 1.23139966e+00 ... -1.64267406e-01 -4.38745499e-01 -3.51946324e-01] [-6.46278560e-02 -9.68266726e-01 5.83968043e-01 ... 6.59338832e-01 -3.34679604e-01 -4.85994279e-01] ... [ 4.04446959e-01 4.76326942e-01 8.91565621e-01 ... 4.52130258e-01 7.94976354e-01 1.67398706e-01] [-6.66326955e-02 -8.70333970e-01 6.70556247e-01 ... 8.22645962e-01 -2.92022556e-01 -3.37980837e-01] [-7.98405349e-01 -2.65458584e-01 -1.56427741e+00 ... 2.56091237e+00 1.00159442e+00 2.70843416e-01]] [[ 2.86499053e-01 -8.71566594e-01 6.37190938e-01 ... -2.97683030e-01 -1.58712253e-01 7.77460456e-01] [-2.01853454e-01 1.03389418e+00 -1.87566895e-02 ... 2.13022739e-01 4.18899447e-01 -2.19742492e-01] [ 8.62428069e-01 -4.25105661e-01 1.75331160e-01 ... -6.38630450e-01 -6.60201490e-01 4.52644765e-01] ... [-2.15472683e-01 9.70023632e-01 7.48254299e-01 ... -8.99493635e-01 -3.42761487e-01 -7.34191477e-01] [-7.24859893e-01 -3.26024801e-01 1.14301956e+00 ... -6.96147025e-01 -5.74945569e-01 -5.99320292e-01] [ 1.01917636e+00 -2.92473197e-01 -4.43925530e-01 ... -1.06777573e+00 -9.75414217e-01 -6.60891905e-02]] [[ 2.87940979e-01 -3.37592661e-01 -1.17733203e-01 ... 2.82220423e-01 -4.03646287e-03 -1.84618130e-01] [ 1.83493257e-01 -1.04079044e+00 -1.64978907e-01 ... 1.03264105e+00 -3.02576631e-01 -1.76059693e-01] [-9.92232680e-01 1.56412169e-01 1.85373336e-01 ... 2.87218511e-01 -1.26053572e-01 3.16277564e-01] ... [-2.64065355e-01 -3.92041028e-01 -6.13250196e-01 ... 1.01979952e-02 1.56918049e-01 6.52139366e-01] [-6.23306751e-01 -2.19191998e-01 -6.40933439e-02 ... -4.53546923e-03 -4.85293061e-01 -8.43033373e-01] [-7.93861389e-01 -4.49710310e-01 7.25111187e-01 ... -8.91505182e-01 2.20022336e-01 7.51866221e-01]]] [[[-2.68212520e-02 -3.86679888e-01 -4.22320366e-01 ... -8.05910945e-01 2.11183619e+00 -8.57179701e-01] [-1.38940120e+00 -8.91269267e-01 -2.02373236e-01 ... 2.30453730e+00 2.19192386e+00 -2.29329753e+00] [-3.53753400e+00 -7.06722617e-01 9.56733972e-02 ... 2.16891980e+00 -1.18572974e+00 -9.37461495e-01] ... [ 1.85445356e+00 1.93844557e+00 9.65769768e-01 ... -3.15400422e-01 2.82671499e+00 -9.37315106e-01] [ 1.13412750e+00 1.49026263e+00 -6.30400538e-01 ... 1.12612271e+00 -4.32038009e-01 -1.50388703e-01] [ 1.60188556e+00 -2.10247993e-01 1.99481547e+00 ... -7.76131675e-02 1.01203406e+00 2.54672319e-01]] [[ 9.43649411e-01 -9.50759947e-01 1.41711712e+00 ... 9.40145910e-01 -2.00751141e-01 4.50533748e-01] [-2.48243809e+00 8.54850113e-01 -1.14011303e-01 ... 1.86611265e-01 1.82997361e-01 -1.14639163e+00] [ 2.86964226e+00 -1.90193701e+00 -7.63625860e-01 ... 7.20046639e-01 -6.39426887e-01 -1.13307154e+00] ... [ 7.63566971e-01 1.43743837e+00 1.49168521e-01 ... -1.54361117e+00 -1.72273362e+00 4.32910293e-01] [ 4.83320326e-01 -2.96993971e-01 -1.81202459e+00 ... -1.71980548e+00 9.28771555e-01 4.00051512e-02] [ 1.44476390e+00 1.54507279e+00 1.34625837e-01 ... -5.66827469e-02 6.17033303e-01 4.76957224e-02]] [[ 1.73968935e+00 -3.06229973e+00 -1.01782370e+00 ... -2.48071980e+00 -2.37137032e+00 9.54774395e-02] [ 1.16085720e+00 -5.54300964e-01 -3.60194772e-01 ... -1.81745350e+00 -1.55115053e-01 -7.51861691e-01] [ 3.02988386e+00 -4.83745374e-02 5.33380091e-01 ... -4.34129834e-01 1.43527973e+00 9.66272831e-01] ... [-3.33585709e-01 -5.30036225e-04 -1.04994166e+00 ... -2.10515514e-01 -1.80997801e+00 -1.02884695e-01] [-2.07066131e+00 -1.43338215e+00 -1.55838645e+00 ... 3.10086870e+00 -2.90137351e-01 1.48929060e+00] [ 7.95884490e-01 -2.32459724e-01 -7.66791403e-01 ... 1.02183831e+00 -2.05298114e+00 3.58420968e+00]] ... [[-3.07558060e+00 -1.15179634e+00 2.98065543e+00 ... -2.47089371e-01 6.66930497e-01 1.84285438e+00] [-7.43898153e-01 4.09262806e-01 -2.22496939e+00 ... -1.41211367e+00 1.74201024e+00 1.01347578e+00] [-9.12564397e-01 1.37695909e+00 6.94526583e-02 ... -2.32242846e+00 -6.85792446e-01 -2.04391718e+00] ... [ 6.31429076e-01 -3.45272255e+00 -1.84503841e+00 ... 8.81474674e-01 1.92001498e+00 5.46203971e-01] [-1.70996821e+00 1.88070202e+00 -1.52990592e+00 ... -2.80118370e+00 3.30513775e-01 2.98357934e-01] [ 2.42326999e+00 -3.01782131e-01 7.49397278e-01 ... -2.52445996e-01 -2.70365119e-01 1.54518872e-01]] [[-3.74685884e-01 1.03724584e-01 -2.74071550e+00 ... 7.07644224e-01 4.81256902e-01 1.87791562e+00] [ 4.35772091e-01 -5.92262924e-01 -3.63793641e-01 ... -2.56867766e-01 9.37788665e-01 -5.39323926e-01] [-7.25064635e-01 -2.74870086e+00 -1.07547057e+00 ... 1.65630543e+00 2.46579194e+00 4.77772892e-01] ... [-1.77569127e+00 3.26828182e-01 -7.59796441e-01 ... -2.54856348e-01 -3.81452370e+00 1.70971430e-03] [ 4.90839291e+00 1.60355830e+00 1.56332865e-01 ... -1.07309294e+00 -1.87611043e-01 -1.39770436e+00] [ 1.45263863e+00 1.79736722e+00 1.87139273e-01 ... 2.46968746e+00 -4.63827461e-01 -4.56513286e-01]] [[ 7.16598690e-01 -1.54943180e+00 -4.74572718e-01 ... 1.80713141e+00 1.39712358e+00 7.79483616e-01] [ 1.51496142e-01 -1.27970755e+00 -9.39005055e-03 ... 1.93507686e-01 -6.07016027e-01 2.21988487e+00] [ 3.60629886e-01 1.53506982e+00 1.64767516e+00 ... 6.96954846e-01 -7.33802542e-02 -4.01289195e-01] ... [ 5.20544171e-01 1.15044069e+00 1.37714863e+00 ... 1.03467047e+00 -7.01434553e-01 7.69613624e-01] [ 5.01845479e-01 -1.74805653e+00 -1.08346328e-01 ... -1.35728121e+00 3.09054673e-01 5.76421678e-01] [ 1.18990088e+00 3.99704248e-01 -1.64787006e+00 ... -2.11249804e+00 1.30402303e+00 -1.69112504e-01]]] [[[ 1.22462761e+00 -3.96429467e+00 1.06173658e+00 ... 2.01600695e+00 -5.04533887e-01 -1.89362895e+00] [ 2.31090641e+00 1.39790118e+00 -2.42741942e+00 ... -2.94674969e+00 -4.06019783e+00 -1.63354144e-01] [ 3.94063234e+00 1.87946939e+00 6.17358267e-01 ... -2.40550071e-01 8.63878369e-01 7.27687240e-01] ... [ 4.84739399e+00 4.22784901e+00 -4.94182491e+00 ... 3.35646868e-01 9.44375753e-01 2.53608078e-01] [-8.73295963e-01 -1.52168596e+00 -1.86276031e+00 ... 1.75389159e+00 4.44327259e+00 3.74743152e+00] [ 6.45096838e-01 1.77367294e+00 -1.50265455e+00 ... 4.57186985e+00 4.77136993e+00 1.40453362e+00]] [[-5.17053843e+00 5.53372145e-01 -1.13126755e+00 ... -1.56561840e+00 -3.52496696e+00 -2.01919842e+00] [ 6.93777278e-02 -3.48364674e-02 -1.62393034e+00 ... -6.43280983e+00 3.62784207e-01 -1.77413714e+00] [ 3.06671071e+00 -1.15201795e+00 1.28935325e+00 ... 1.37395978e+00 -2.36406589e+00 -1.86841917e+00] ... [-5.48017845e-02 1.40628839e+00 -1.80579758e+00 ... 1.28335044e-01 -1.33914459e+00 -2.64000583e+00] [ 1.54728472e+00 2.30526495e+00 -1.09340262e+00 ... -3.02771950e+00 4.92548972e-01 -1.59264433e+00] [ 1.78678393e+00 -2.54670668e+00 -6.58173466e+00 ... 1.51395047e+00 3.63045454e+00 -1.16279340e+00]] [[-1.75771677e+00 2.04689050e+00 -3.37838316e+00 ... 3.54803324e+00 -2.43666625e+00 3.91363502e-01] [ 4.78169590e-01 -2.28574872e+00 4.74205494e+00 ... -1.43990111e+00 -1.95370257e+00 -2.59686852e+00] [ 6.42386854e-01 2.58715868e+00 7.05671459e-02 ... -2.82590580e+00 -6.52873695e-01 1.91601217e+00] ... [-1.45295179e+00 6.89031839e-01 -2.82315779e+00 ... 2.64193797e+00 1.36734951e+00 4.90454388e+00] [ 5.11111915e-01 -1.82337433e-01 -6.30859709e+00 ... 1.04403675e+00 2.93551517e+00 -7.21839666e-01] [ 2.14185667e+00 9.15934563e-01 3.21567655e-01 ... 2.76932216e+00 -1.27919376e+00 3.08132470e-01]] ... [[ 2.09379482e+00 4.42653275e+00 -3.46584463e+00 ... -1.24392259e+00 4.72169638e+00 -3.07093143e-01] [-1.64089829e-01 -2.66990137e+00 -3.94113874e+00 ... -5.01879120e+00 3.47691514e-02 -5.37337720e-01] [ 5.40275633e-01 1.02815412e-01 -1.49879909e+00 ... -2.96143985e+00 4.52660513e+00 -2.26296663e+00] ... [ 1.28969765e+00 -7.13932812e-01 6.27008498e-01 ... -3.93388081e+00 -3.41752195e+00 -8.30091178e-01] [ 2.06583738e+00 -3.25499249e+00 -1.98467791e+00 ... -8.65885764e-02 -3.55915517e-01 1.63225681e-01] [-3.66245341e+00 -3.41925764e+00 -1.85494387e+00 ... 1.75117776e-01 -2.83286095e+00 2.12817764e+00]] [[-1.72693372e+00 -3.20772910e+00 -5.74713469e-01 ... -8.30644548e-01 1.91622901e+00 -3.41008449e+00] [-7.01824367e-01 1.37433553e+00 2.28503394e+00 ... 1.85517538e+00 1.07529962e+00 -2.29131079e+00] [ 3.61782670e+00 -4.47234726e+00 7.52427354e-02 ... 2.23345184e+00 2.56469727e+00 2.86296511e+00] ... [ 1.86128497e+00 -1.98006809e+00 3.07926440e+00 ... -1.87368584e+00 2.67196441e+00 7.26667213e+00] [ 1.45422554e+00 2.01653337e+00 1.75531900e+00 ... 1.81681800e+00 1.19996321e+00 3.03455377e+00] [ 1.31949580e+00 1.50288427e+00 -3.41249609e+00 ... -1.12201238e+00 -2.51781702e+00 3.18348217e+00]] [[-5.08599699e-01 -1.26613939e+00 -4.20524806e-01 ... 2.85588562e-01 1.25768989e-01 1.88406527e+00] [-1.94474268e+00 3.81931949e+00 3.09505582e+00 ... 7.17974961e-01 1.80386269e+00 -3.17419577e+00] [-3.21701479e+00 2.47647882e+00 2.45805168e+00 ... 3.81278992e+00 1.99910864e-01 -3.36466503e+00] ... [ 2.09585118e+00 -3.39200234e+00 -1.81159604e+00 ... 1.27548385e+00 5.18146038e-01 -4.42361069e+00] [ 8.37700784e-01 4.95829535e+00 7.38717735e-01 ... 9.01450142e-02 9.49888468e-01 2.83280873e+00] [ 3.62775117e-01 2.83629918e+00 -2.44349813e+00 ... -6.86509418e+00 -6.81278586e-01 -2.94756556e+00]]] [[[-4.58054930e-01 -2.58960333e-02 4.16720778e-01 ... 1.73117876e-01 -7.83364754e-03 2.25191936e-01] [ 1.19547397e-02 -9.29393694e-02 -4.36136387e-02 ... -6.22730792e-01 6.31565601e-02 -6.73234403e-01] [-4.27861959e-01 -1.65084317e-01 4.95629609e-01 ... 3.77008647e-01 -4.76070404e-01 -3.02514493e-01] ... [-5.07868528e-01 -1.24526426e-01 1.07294714e+00 ... -8.11263546e-02 -2.63422042e-01 1.66510209e-01] [ 1.82757244e-01 -2.69166142e-01 1.37061402e-01 ... -3.21378797e-01 -1.51355401e-01 -5.04989326e-01] [-9.56033647e-01 -3.20675433e-01 -1.27651975e-01 ... -6.49232745e-01 -4.53104258e-01 -4.17025447e-01]] [[ 1.65805086e-01 -4.49291527e-01 2.87708044e-01 ... 2.65116423e-01 4.87668574e-01 -4.21187371e-01] [ 1.23145275e-01 -2.68575102e-01 1.04814076e+00 ... 3.36506903e-01 -3.32422405e-01 -1.23172835e-01] [-4.41803217e-01 -3.65468442e-01 2.51294881e-01 ... -9.94001105e-02 5.36519825e-01 6.00245953e-01] ... [-6.62566423e-01 -3.39645922e-01 -3.58193338e-01 ... -1.32474869e-01 -4.25511897e-02 -3.30957949e-01] [ 4.45213675e-01 -1.98647022e-01 9.54779834e-02 ... 2.02447399e-01 3.93839002e-01 1.75812826e-01] [-1.38926506e-01 7.73383901e-02 1.14260815e-01 ... -1.34225994e-01 -4.47420925e-01 -3.97015363e-01]] [[ 5.50236464e-01 2.39709362e-01 -5.18493354e-01 ... -2.39113882e-01 -4.43352580e-01 1.17055491e-01] [-2.52424657e-01 2.42896542e-01 -3.65121037e-01 ... -2.56445915e-01 -4.17019099e-01 2.97597229e-01] [ 9.29814041e-01 -2.65613645e-01 2.25467369e-01 ... 1.80558100e-01 6.56819463e-01 4.04971056e-02] ... [-1.68778479e-01 2.10896090e-01 -3.83260220e-01 ... -3.53886396e-01 2.77295649e-01 -6.53403774e-02] [-7.57989138e-02 -1.87447622e-01 -2.58606941e-01 ... -6.88033104e-01 -3.62510532e-01 9.19075012e-02] [-4.66072261e-02 2.59015471e-01 7.63393283e-01 ... -7.46712387e-02 -4.67906773e-01 2.71100551e-01]] ... [[ 2.04794675e-01 3.01111817e-01 -4.84248936e-01 ... 5.19343652e-02 1.57937020e-01 4.01850700e-01] [ 4.00031656e-01 -4.58509773e-01 5.24900928e-02 ... -1.62564427e-01 -1.14679255e-01 7.53026679e-02] [-3.49540561e-01 1.89316198e-01 -3.09191793e-01 ... -3.03969443e-01 3.77584577e-01 4.83666509e-01] ... [-4.31575507e-01 6.04045615e-02 3.77847522e-01 ... -1.99801549e-01 6.70000091e-02 1.83251336e-01] [ 4.49849069e-01 -1.14609644e-01 -7.06419885e-01 ... -6.33056521e-01 -2.02148393e-01 -2.72492111e-01] [-4.08373803e-01 -3.33762288e-01 2.08896007e-02 ... -6.61580205e-01 3.24523091e-01 -3.66220474e-01]] [[ 5.74441612e-01 -2.23972574e-01 -1.73949808e-01 ... 1.88180223e-01 -1.46791682e-01 -8.25418830e-02] [ 3.32051247e-01 -4.18760814e-02 3.66818160e-01 ... -1.63306758e-01 9.44268048e-01 -1.47347108e-01] [-2.19550848e-01 -1.75584346e-01 8.06962922e-02 ... 9.53631923e-02 5.03061600e-02 6.86736926e-02] ... [ 2.95208424e-01 2.83663720e-01 1.33820191e-01 ... 5.28692842e-01 -1.95168808e-01 -4.61342558e-02] [ 1.55475453e-01 2.06576467e-01 -2.62000002e-02 ... -3.96123296e-03 -3.12722832e-01 -4.23920244e-01] [ 1.05341434e-01 6.90940440e-01 5.11677861e-01 ... 3.87803406e-01 3.81075054e-01 -3.81566674e-01]] [[-2.43335590e-01 6.14785433e-01 4.27050889e-01 ... -2.09506273e-01 3.96838367e-01 5.37673473e-01] [-1.17518425e-01 -1.91818329e-03 -2.87769645e-01 ... 4.61152308e-02 -2.22352579e-01 -5.36396131e-02] [ 3.82116586e-01 2.08736122e-01 -5.65854609e-01 ... -1.47666156e-01 -2.34040972e-02 4.46232796e-01] ... [ 1.80113941e-01 1.17638439e-01 2.30753735e-01 ... 4.76479948e-01 5.39452970e-01 2.45718881e-01] [-7.31212795e-01 2.01153681e-01 8.03700686e-02 ... 2.42195502e-01 -1.45785332e-01 1.29425690e-01] [-2.31787831e-01 -6.21257313e-02 2.89529234e-01 ... -2.75410146e-01 2.51179904e-01 -5.56829572e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4624.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.1641 -0.1901 1.1432 1.1436 0.4566 0.2154 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.5663 0.4156 1.0988 0.5187 0.6571 0.3170 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) ce to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4865605 (Squeeze_4865604[0]:i64[], Constant_4865554[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4865605': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4868095 (Squeeze_4868094[0]:i64[], Constant_4868043[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4868095': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4870893 (Squeeze_4870892[0]:i64[], Constant_4870841[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4870893': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ typefw_re: [[[[[ 1.10543475e-01 -1.02964476e-01 -3.38884562e-01 ... -4.02857482e-01 -3.48453879e-01 5.60409069e-01] [ 6.81395471e-01 -3.81377727e-01 -1.10354495e+00 ... -1.19290121e-01 1.13574758e-01 -4.95107293e-01] [-1.81933731e-01 -1.16468978e+00 -8.82792234e-01 ... -1.07989423e-01 -4.30171192e-01 8.25060785e-01] ... [-1.11652792e+00 -1.34171557e+00 2.80495673e-01 ... 5.44770733e-02 5.44183195e-01 2.00119928e-01] [ 9.28447768e-02 -9.73224521e-01 -3.47116664e-02 ... -1.02900386e+00 2.11588051e-02 -7.61181831e-01] [-5.72286487e-01 -4.92564231e-01 5.43089449e-01 ... -5.74976742e-01 -6.50872231e-01 -4.11877453e-01]] [[-4.32307333e-01 2.97280028e-02 -4.00479496e-01 ... -3.45730454e-01 5.44596501e-02 -1.42772913e+00] [-1.06931202e-01 -6.55028999e-01 -4.74885218e-02 ... 4.29736733e-01 -5.84896691e-02 4.14902307e-02] [-6.40410781e-01 -5.62353432e-01 -7.24056661e-01 ... -6.05696976e-01 6.60790205e-01 2.59086043e-01] ... [-9.05778930e-02 -3.68525274e-02 -1.06156659e+00 ... -6.26866937e-01 7.48441160e-01 -9.00673151e-01] [ 7.04424441e-01 2.65187949e-01 -8.95713091e-01 ... 3.06370705e-01 3.69143575e-01 -7.23215401e-01] [ 2.99377322e-01 2.22439945e-01 -1.22484565e-01 ... -7.35334814e-01 -3.20764929e-01 -1.64276227e-01]] [[ 5.20324945e-01 -1.30701661e+00 8.34868103e-02 ... 3.30677271e-01 -6.45481706e-01 2.35184822e-02] [-2.98298359e-01 5.42029321e-01 3.65188122e-01 ... -3.77483696e-01 8.97872597e-02 3.08891386e-01] [ 4.01430696e-01 -6.08335257e-01 -3.28031965e-02 ... -1.21463470e-01 4.52540427e-01 -7.97499835e-01] ... [-6.35617971e-02 -5.66584840e-02 -1.22331727e+00 ... 1.39090456e-02 -6.19560122e-01 -7.76971430e-02] [ 1.54378280e-01 -3.67906660e-01 -1.56791076e-01 ... 1.06098197e-01 -3.80338609e-01 7.26512372e-02] [-1.30124122e-01 -3.41984034e-01 -7.47844458e-01 ... -1.24300706e+00 -5.84641755e-01 -6.33916616e-01]] ... [[ 7.62085676e-01 2.16294397e-02 7.78018594e-01 ... 3.60586226e-01 -1.07416883e-01 -6.31988943e-01] [-1.75345421e-01 -6.45036176e-02 3.73945057e-01 ... -5.80372930e-01 8.51496994e-01 -7.00216234e-01] [ 2.71974266e-01 -3.26026142e-01 -4.94266003e-01 ... 2.19930395e-01 -1.07936203e+00 -6.80869520e-01] ... [-4.23288077e-01 -2.21084714e-01 -2.87574381e-01 ... -3.52845877e-01 -1.07121933e+00 -6.82443619e-01] [-5.40742040e-01 -1.28079370e-01 6.60210133e-01 ... 7.96287581e-02 2.03558922e-01 -2.61257887e-01] [-4.06621128e-01 -1.20721281e+00 2.02427581e-02 ... -7.09534883e-01 4.95566666e-01 -2.90761650e-01]] [[-9.78857353e-02 -1.13174570e+00 -6.12515330e-01 ... -7.24809289e-01 -7.75953710e-01 3.47836882e-01] [ 3.13137054e-01 -7.65277267e-01 4.90808606e-01 ... 2.00150296e-01 -3.83715957e-01 1.04763128e-01] [-3.20753232e-02 -6.15834713e-01 -1.06578159e+00 ... 9.72741365e-01 2.54921585e-01 2.43883520e-01] ... [-3.29900207e-03 8.23381722e-01 -9.11756828e-02 ... 5.15641682e-02 -4.03696656e-01 -1.04728866e+00] [ 5.41114211e-01 3.43566954e-01 -5.69880269e-02 ... 2.57194757e-01 2.45600298e-01 -6.39514148e-01] [ 8.13835189e-02 -3.41744483e-01 -1.31481871e-01 ... -1.90902054e+00 -3.59346390e-01 -5.58842123e-01]] [[ 2.46780008e-01 1.68438900e+00 9.34843838e-01 ... -5.00178277e-01 -9.58033502e-01 -5.30014411e-02] [ 3.07656586e-01 -6.58343375e-01 -4.17166352e-01 ... -1.79226696e-01 -3.97244483e-01 -1.30636320e-02] [ 6.83130920e-01 4.83730346e-01 -2.86071271e-01 ... 9.96351391e-02 -5.68928778e-01 -7.66860127e-01] ... [-1.22184086e+00 2.70324767e-01 -4.50557098e-03 ... 6.05980337e-01 1.30572826e-01 4.08256024e-01] [-2.68239409e-01 -3.89831930e-01 -1.22325823e-01 ... -1.06610560e+00 -1.36727840e-01 3.55192870e-01] [-6.71641380e-02 4.35684919e-01 -8.57166708e-01 ... -2.64728576e-01 -7.37678647e-01 1.23347223e+00]]] [[[-5.48788756e-02 3.53121981e-02 -2.28387311e-01 ... -4.36992384e-02 -5.08018851e-01 -4.98267263e-01] [-4.48339194e-01 -6.20688021e-01 -8.36658716e-01 ... -9.50720847e-01 -1.43786967e-01 -4.11848798e-02] [-4.47723150e-01 -3.13707978e-01 -1.60853148e-01 ... -1.03458092e-02 3.61405343e-01 -2.09962696e-01] ... [ 1.47561327e-01 1.42414108e-01 -4.13889647e-01 ... 6.15209080e-02 -1.30347669e-01 3.07179838e-01] [-2.45943934e-01 -2.72758126e-01 8.09200779e-02 ... 2.22704738e-01 -3.14567000e-01 -3.86047900e-01] [-1.75502014e+00 4.44485962e-01 -2.11824447e-01 ... -2.10822925e-01 -3.76412332e-01 -3.63771051e-01]] [[ 1.77958176e-01 1.65886715e-01 1.13032922e-01 ... -3.11791748e-01 -5.94283283e-01 -2.27887169e-01] [ 4.12412614e-01 -5.94120733e-02 -3.91509980e-01 ... 1.39938429e-01 -2.26526618e-01 3.52421552e-01] [ 7.03531682e-01 -5.53933561e-01 -1.07492425e-01 ... -3.00116599e-01 -5.50430156e-02 1.52530745e-01] ... [ 3.54599208e-01 -7.25390732e-01 1.24319822e-01 ... 1.24390967e-01 -4.60289061e-01 -3.58242482e-01] [-2.03857005e-01 7.92624131e-02 -5.40595725e-02 ... 1.92931026e-01 4.78416868e-02 1.75445959e-01] [-8.43249679e-01 -3.80693436e-01 -2.70130634e-01 ... -7.05942288e-02 -8.69309187e-01 -1.77754074e-01]] [[ 5.55821896e-01 -2.33940691e-01 -3.67615938e-01 ... -4.95109230e-01 -6.89669609e-01 -3.20398599e-01] [-3.74584049e-01 -6.76152110e-01 -7.14671254e-01 ... -3.32140505e-01 1.54494673e-01 -4.41040434e-02] [-1.69942528e-01 -3.79669428e-01 -8.55369419e-02 ... -7.19211817e-01 1.45249277e-01 -1.63373590e-01] ... [ 5.18079281e-01 -8.20582271e-01 -5.00172496e-01 ... -2.75484622e-01 -6.06848478e-01 -5.88579774e-01] [-2.67722845e-01 -6.04550898e-01 -6.13814473e-01 ... -4.02538955e-01 -6.55705512e-01 -7.80783743e-02] [-1.00804500e-01 3.74997556e-01 -2.43981257e-01 ... -6.91215456e-01 -6.66013837e-01 3.27458829e-02]] ... [[ 5.31223230e-02 -5.23099124e-01 -6.89499021e-01 ... 2.30269387e-01 -2.61022270e-01 -4.12879467e-01] [-9.48818699e-02 -2.77489871e-01 -4.34991211e-01 ... -5.73165715e-01 3.26508373e-01 -3.45955670e-01] [ 2.81567961e-01 2.66007036e-01 -8.69218484e-02 ... 1.44788206e-01 7.07515776e-02 6.94300085e-02] ... [ 5.52633740e-02 -4.81225431e-01 -8.25008273e-01 ... -3.62869143e-01 -7.29152918e-01 -2.79239621e-02] [ 5.68900228e-01 -2.42009759e-01 -3.69602442e-01 ... -6.33137345e-01 5.79088748e-01 1.03151456e-01] [ 6.26704037e-01 -7.65440106e-01 2.77902365e-01 ... 6.27438128e-01 -1.80831537e-01 1.91713259e-01]] [[-7.33060122e-01 3.14898878e-01 -4.54174757e-01 ... -6.07677817e-01 -3.24755237e-02 -3.28011930e-01] [-3.75970304e-01 1.82194695e-01 -8.91644210e-02 ... 5.35075605e-01 7.47198164e-02 1.89944088e-01] [-3.08494419e-01 -4.24879283e-01 9.55117047e-02 ... -4.16204818e-02 -2.62699544e-01 3.45645815e-01] ... [ 8.13997816e-03 -5.08878708e-01 -4.22831595e-01 ... -1.17170465e+00 -3.96916986e-01 -4.38189000e-01] [-1.40031695e-01 5.59767127e-01 2.26479143e-01 ... 1.71462949e-02 -1.67948559e-01 1.94654688e-01] [-1.98942363e-01 -3.99458230e-01 -7.68553078e-01 ... 5.69101393e-01 -3.00941795e-01 -1.07746863e+00]] [[-2.95347363e-01 3.13816130e-01 -5.75644791e-01 ... -9.72447768e-02 7.94539273e-01 -2.34514296e-01] [ 5.31745255e-02 -4.54362124e-01 -1.67758539e-01 ... -4.02790815e-01 -3.49031121e-01 -1.79342315e-01] [-3.11151415e-01 -3.08661193e-01 -4.78345752e-01 ... -4.34519023e-01 -9.89996567e-02 -3.01245712e-02] ... [ 1.16966403e+00 4.23721582e-01 -2.82756120e-01 ... 2.36126080e-01 1.56004764e-02 -4.40300554e-01] [ 8.33207462e-03 -4.35614973e-01 6.74264282e-02 ... -5.62562011e-02 -3.63504469e-01 -2.94254906e-02] [ 1.80420801e-01 -5.76342642e-01 1.22561209e-01 ... -3.69166285e-01 8.77350926e-01 -9.64947224e-01]]] [[[ 2.42774796e+00 3.37679291e+00 -1.68117893e+00 ... 1.54962099e+00 -1.12887466e+00 1.65570974e+00] [ 1.21986367e-01 1.17480254e+00 -3.25781882e-01 ... 1.13861203e+00 5.75002193e-01 -2.79092073e-01] [-7.75915861e-01 6.01342916e-01 -9.12925601e-01 ... 9.43017900e-01 2.77132535e+00 1.55526891e-01] ... [ 1.12144911e+00 1.02611303e+00 2.99850011e+00 ... 2.08743191e+00 1.34511566e+00 -3.08468461e-01] [ 1.78811514e+00 1.51430011e+00 1.48157597e+00 ... 1.76471382e-01 1.28026187e+00 1.80466747e+00] [ 2.09952712e+00 3.73385251e-01 2.99055815e+00 ... 4.73074973e-01 1.03722763e+00 -6.27183691e-02]] [[ 1.14088202e+00 8.41512799e-01 1.78238988e-01 ... 7.23128557e-01 2.71775126e+00 9.90111709e-01] [-5.94034076e-01 1.13097656e+00 2.44103861e+00 ... 2.63333154e+00 1.92965436e+00 4.57998574e-01] [ 2.41274071e+00 4.46498752e-01 3.38791966e-01 ... -4.78426933e-01 2.75553894e+00 2.38029575e+00] ... [ 1.15004623e+00 1.03553307e+00 6.26263082e-01 ... 2.61446285e+00 2.34555936e+00 3.48943532e-01] [-9.01404142e-01 4.87150550e-01 1.17721641e+00 ... 1.42181110e+00 -6.84345782e-01 1.73712122e+00] [ 1.32728529e+00 1.91070426e+00 -3.60683277e-02 ... 2.11554074e+00 2.16870546e+00 1.01872206e+00]] [[ 1.35372591e+00 1.24656737e+00 9.54232886e-02 ... 1.61711001e+00 5.95727637e-02 3.51549506e-01] [ 2.07674527e+00 -4.02048900e-04 2.37020206e+00 ... 1.02643847e+00 9.88868713e-01 1.06695449e+00] [ 1.23506391e+00 -2.97633082e-01 3.21158075e+00 ... 2.00887227e+00 2.34404206e-01 1.53426492e+00] ... [ 1.77299798e+00 7.56628931e-01 -9.78367090e-01 ... 1.92816246e+00 4.03432751e+00 1.76905930e+00] [ 4.00138587e-01 2.51334846e-01 1.03922045e+00 ... 1.77735043e+00 4.95980650e-01 1.04290962e+00] [ 1.08775282e+00 3.87922287e-01 3.44539952e+00 ... 2.40726733e+00 1.65155590e+00 1.08529377e+00]] ... [[ 7.10051477e-01 3.19886088e+00 2.18706965e+00 ... 4.96967226e-01 8.70431662e-01 1.19477499e+00] [-2.29156956e-01 2.02715015e+00 -7.01104477e-02 ... 1.79813787e-01 1.72704756e+00 1.47891796e+00] [ 2.75231290e+00 1.84595191e+00 6.46425724e-01 ... 1.83503926e+00 8.86169851e-01 1.79266596e+00] ... [ 1.22583759e+00 1.75627863e+00 1.83511484e+00 ... 2.59972000e+00 2.01659465e+00 1.82524815e-01] [ 1.78096163e+00 1.06141877e+00 1.25340962e+00 ... 1.46674252e+00 3.60517836e+00 1.98935437e+00] [ 1.15058351e+00 7.32780159e-01 9.53715265e-01 ... 9.38955843e-01 9.78783786e-01 1.20690131e+00]] [[ 1.70827484e+00 1.04593658e+00 4.63470906e-01 ... 9.33042705e-01 -8.92617643e-01 -3.66364181e-01] [-5.95638871e-01 2.32655430e+00 1.08032882e+00 ... 8.70388091e-01 2.82939792e+00 4.72288191e-01] [ 1.28371072e+00 1.39911413e+00 6.47579670e-01 ... 8.73136699e-01 9.14777756e-01 2.65407991e+00] ... [ 8.94375801e-01 2.69700289e+00 2.41024256e+00 ... 7.74321914e-01 -2.96093404e-01 1.21701264e+00] [ 1.56859148e+00 8.87276530e-01 1.32494652e+00 ... 1.84373677e+00 -9.90890920e-01 1.12996018e+00] [ 2.55641389e+00 -2.80231893e-01 3.27764928e-01 ... 1.73832405e+00 1.13145304e+00 1.11301458e+00]] [[ 7.48608589e-01 2.68729538e-01 -2.54118395e+00 ... 1.80908477e+00 6.91130877e-01 4.89442170e-01] [ 9.60262060e-01 2.12522793e+00 -2.97196865e-01 ... 1.72680557e+00 2.19932094e-01 2.26189423e+00] [ 6.28309846e-01 2.11499643e+00 9.35266018e-01 ... 7.60929525e-01 1.93175113e+00 5.86357892e-01] ... [-5.38617253e-01 -2.96120971e-01 1.19580008e-01 ... 2.48159385e+00 3.97680998e+00 -1.18475281e-01] [ 9.95366096e-01 2.39169002e+00 8.04381430e-01 ... 3.52332091e+00 8.68918300e-01 2.62336195e-01] [ 1.57438409e+00 4.80379254e-01 9.68260705e-01 ... 8.03865194e-01 2.76792812e+00 -2.81911105e-01]]] [[[ 7.45539844e-01 1.12389779e+00 1.44962943e+00 ... 2.23184156e+00 2.06690550e-01 1.85716498e+00] [ 1.48473620e+00 1.51160526e+00 3.66562843e-01 ... 1.21673310e+00 1.50530517e+00 1.28275311e+00] [ 1.90471756e+00 1.72726071e+00 6.24527335e-01 ... 8.97722125e-01 1.45162642e+00 3.16619366e-01] ... [ 1.72946370e+00 5.72963655e-01 1.27691042e+00 ... 8.70977938e-01 9.50390935e-01 1.06645513e+00] [ 6.07205570e-01 7.91532397e-01 1.13526750e+00 ... 4.56737131e-01 3.64241153e-01 1.45485282e+00] [ 1.72379768e+00 9.49490011e-01 1.45741987e+00 ... 4.39959854e-01 9.78974104e-01 1.88923967e+00]] [[ 1.44898343e+00 1.48563051e+00 6.53286457e-01 ... 9.85058427e-01 1.40821958e+00 1.34820127e+00] [ 7.30882764e-01 1.37264085e+00 1.44274175e+00 ... 1.56694460e+00 1.54805279e+00 5.45789540e-01] [ 8.94828856e-01 2.28935289e+00 1.29640901e+00 ... 1.23355448e+00 1.00604224e+00 1.15038097e+00] ... [ 1.29658544e+00 1.55513990e+00 1.36111677e+00 ... 7.33948231e-01 1.00940180e+00 1.36924577e+00] [ 7.83160985e-01 1.25569057e+00 -2.00821772e-01 ... 1.92292118e+00 1.20141566e+00 7.34708011e-01] [ 5.95170557e-01 1.98241365e+00 1.13464522e+00 ... 1.38955128e+00 2.58868605e-01 9.25274014e-01]] [[ 6.64001882e-01 7.81998873e-01 1.37488115e+00 ... 1.12361860e+00 5.61268985e-01 2.06984177e-01] [ 3.64866614e-01 1.12917149e+00 8.95458221e-01 ... 1.27159262e+00 1.61081827e+00 1.39054000e+00] [ 1.25826049e+00 1.13350618e+00 1.45658433e+00 ... 1.26164377e+00 1.92874467e+00 1.05422521e+00] ... [ 1.16821162e-01 2.15086961e+00 1.26411915e+00 ... 6.70531631e-01 1.07758975e+00 1.28155518e+00] [ 1.92068231e+00 6.71448171e-01 4.92167264e-01 ... 1.09845746e+00 1.15806067e+00 1.34036565e+00] [ 8.72058392e-01 1.70462084e+00 8.66007686e-01 ... 1.29330647e+00 1.10364211e+00 1.43219686e+00]] ... [[ 6.20615900e-01 1.61400628e+00 1.29717469e+00 ... 8.43227804e-01 7.52683356e-02 8.48681569e-01] [ 2.60967398e+00 1.15444922e+00 9.44686413e-01 ... 1.91327798e+00 1.08496892e+00 1.56284225e+00] [ 4.98930812e-01 7.26246953e-01 7.29016215e-02 ... 1.35596788e+00 1.67372775e+00 2.32879710e+00] ... [ 2.07327509e+00 -4.79805559e-01 2.31729341e+00 ... 3.68975520e-01 1.37299240e+00 1.40585864e+00] [ 8.59817624e-01 1.71705008e+00 1.08340442e+00 ... 1.74361682e+00 1.92658687e+00 1.36347616e+00] [ 6.98742211e-01 1.14843047e+00 7.65148818e-01 ... 1.52136528e+00 3.76682043e-01 1.37457287e+00]] [[ 7.04607129e-01 8.85927558e-01 4.05668914e-01 ... 6.39614224e-01 1.13578081e+00 1.68979216e+00] [ 1.28841925e+00 7.33226895e-01 8.87921333e-01 ... 2.94903249e-01 1.29317188e+00 1.63257277e+00] [ 7.29349732e-01 9.13466573e-01 1.04520345e+00 ... 1.44396365e+00 9.74283397e-01 1.24357879e-01] ... [ 1.31946075e+00 5.16428292e-01 1.94180739e+00 ... 1.20229316e+00 9.04437661e-01 8.68668556e-01] [ 1.17280698e+00 1.05762875e+00 7.63845146e-01 ... 7.95681655e-01 1.51810849e+00 1.32934570e+00] [ 6.73375845e-01 1.11688781e+00 3.91784698e-01 ... 1.00128949e+00 5.29232264e-01 1.29539728e+00]] [[ 2.04151556e-01 1.23792601e+00 3.91524315e-01 ... 1.39374244e+00 1.54761326e+00 1.31956947e+00] [ 1.11059880e+00 9.06667352e-01 8.76233101e-01 ... 1.64381635e+00 4.59976405e-01 1.35017407e+00] [ 3.27994078e-01 1.09730124e+00 1.63686764e+00 ... 4.68569607e-01 8.45453322e-01 1.15056980e+00] ... [ 5.27644277e-01 1.39649749e+00 1.57333291e+00 ... 1.12415969e+00 8.08193982e-01 4.11778033e-01] [ 1.58421743e+00 7.24029243e-01 5.06722927e-01 ... 1.21841538e+00 1.65434778e+00 1.14504659e+00] [ 8.78003418e-01 8.72110009e-01 2.20637608e+00 ... 4.81882006e-01 2.77735740e-01 1.37700713e+00]]] [[[ 1.10548210e+00 3.92779231e-01 9.82642710e-01 ... -2.70211786e-01 6.17647052e-01 1.49116039e+00] [ 1.19492960e+00 7.64055729e-01 1.70629609e+00 ... 6.79498971e-01 1.16066122e+00 1.47061434e-03] [ 9.97795224e-01 6.21797442e-01 9.91344512e-01 ... 5.07929325e-01 1.05090392e+00 1.70610559e+00] ... [ 4.99091893e-01 2.84187078e-01 -9.09909248e-01 ... 1.61041558e+00 1.46101892e+00 -4.63808626e-01] [ 2.11639795e-02 1.85914576e+00 9.15577173e-01 ... 4.75518554e-01 1.51665127e+00 3.37604463e-01] [ 2.08214566e-01 -3.38087119e-02 5.41004121e-01 ... 1.30390906e+00 -1.02719694e-01 1.17292929e+00]] [[ 3.31567749e-02 1.09219623e+00 5.20290613e-01 ... 3.38689506e-01 -3.48332793e-01 8.50180507e-01] [ 1.13628340e+00 2.84610987e-01 4.32507962e-01 ... 5.21082819e-01 3.67741853e-01 1.28481448e+00] [ 6.57480434e-02 1.50793821e-01 3.58778924e-01 ... 3.90310287e-01 2.00006906e-02 1.97129026e-01] ... [ 4.51616079e-01 -5.86526334e-01 3.59137714e-01 ... -5.34245491e-01 7.38229334e-01 -8.68091464e-01] [ 6.71914995e-01 3.46392334e-01 2.16472924e-01 ... 8.72561634e-01 6.74412906e-01 4.51322317e-01] [ 1.24492812e+00 1.41308880e+00 1.31572872e-01 ... -6.85436249e-01 -7.75916353e-02 1.65987432e+00]] [[ 1.25220907e+00 -6.00654423e-01 1.05355442e+00 ... 1.72797525e+00 1.13470876e+00 8.53754431e-02] [ 3.03568393e-01 8.52263629e-01 7.52798557e-01 ... 3.26859295e-01 2.20905900e-01 1.84789941e-01] [ 8.27493489e-01 4.32391435e-01 5.55512011e-01 ... 4.89359468e-01 4.02562916e-01 7.87993908e-01] ... [ 1.17389955e-01 1.53801680e+00 7.67788172e-01 ... 4.99456882e-01 4.01718587e-01 1.49676394e+00] [ 3.35058987e-01 1.22351035e-01 1.39413849e-01 ... 1.82016802e+00 1.08824611e+00 -5.77877834e-02] [ 2.70830423e-01 6.46856189e-01 1.77782762e+00 ... -9.75832343e-01 1.14755964e+00 1.33364987e+00]] ... [[ 6.92190826e-01 -6.93101466e-01 1.72900796e+00 ... 1.87462457e-02 -4.77895923e-02 1.44012034e+00] [ 1.28812969e+00 1.29560903e-01 -1.99978009e-01 ... 8.06917250e-01 1.12842262e+00 -4.54426080e-01] [-7.58266985e-01 9.93032336e-01 -7.55369887e-02 ... 8.05215120e-01 -3.39222074e-01 7.12894380e-01] ... [ 5.39203048e-01 3.25973779e-01 1.35169542e+00 ... 3.76864851e-01 8.26019883e-01 4.81641591e-01] [ 6.41896963e-01 2.46575266e-01 1.34944189e+00 ... 9.49913085e-01 8.83316159e-01 2.84457535e-01] [ 3.90566140e-01 9.46116209e-01 -2.49923334e-01 ... 8.42613518e-01 1.35065234e+00 1.36581808e-01]] [[ 9.36403453e-01 1.38373005e+00 -7.04019368e-01 ... -3.30177665e-01 -2.02165261e-01 4.37114656e-01] [-1.45264482e-02 1.26395094e+00 5.17695546e-01 ... 1.48444563e-01 3.84261280e-01 -1.55948770e+00] [ 1.16918170e+00 7.46897817e-01 8.40581536e-01 ... 1.79361165e-01 -6.26640618e-01 7.91576982e-01] ... [ 1.02125669e+00 -1.11668304e-01 4.59440112e-01 ... 9.57850635e-01 2.25987002e-01 5.74390769e-01] [-3.42717618e-01 -7.28682280e-02 3.58254939e-01 ... -7.31733680e-01 3.24784100e-01 7.50833094e-01] [ 8.24605107e-01 1.18706787e+00 4.96870786e-01 ... 5.21478295e-01 -7.25753531e-02 6.31215990e-01]] [[ 1.66742432e+00 -3.50670904e-01 1.48437881e+00 ... 3.47961336e-01 3.72923046e-01 7.11239040e-01] [ 5.50459266e-01 3.51874679e-01 1.48462981e-01 ... 8.82157445e-01 1.93172705e+00 2.69354761e-01] [ 1.33031511e+00 -1.23733036e-01 2.85358783e-02 ... -6.84734046e-01 4.95254099e-01 4.63054955e-01] ... [ 1.24778974e+00 4.19114560e-01 -5.09972721e-02 ... -8.26106310e-01 3.53121400e-01 3.63681555e-01] [ 9.37964678e-01 6.19166195e-01 1.50416449e-01 ... 9.41360772e-01 1.50721622e+00 1.01680803e+00] [ 3.00531238e-01 1.58033895e+00 5.44218421e-01 ... 3.59575242e-01 1.76488197e+00 7.60250464e-02]]] [[[ 3.38697493e-01 6.22006416e-01 4.14643168e-01 ... 2.42110118e-01 -4.56022359e-02 1.48656353e-01] [ 3.57491970e-01 2.76620150e-01 3.82016450e-01 ... 2.32777387e-01 1.26687363e-01 3.92011553e-01] [-3.71911675e-02 1.87040716e-01 2.65859097e-01 ... 2.73252744e-02 2.89757431e-01 1.75684601e-01] ... [ 5.17858416e-02 -4.10721749e-01 -1.84731498e-01 ... 6.10853553e-01 4.60963190e-01 2.24313319e-01] [ 9.68352035e-02 -1.28917217e-01 3.79953355e-01 ... 2.67273247e-01 7.01434910e-01 7.49090850e-01] [ 5.62024176e-01 6.23555761e-03 5.64660192e-01 ... 5.03934205e-01 3.81401300e-01 5.06042719e-01]] [[-3.11345737e-02 4.18649800e-02 6.42312527e-01 ... 1.18825302e-01 2.82996953e-01 2.66445965e-01] [ 8.37646842e-01 3.81521165e-01 -9.12457556e-02 ... 4.27562833e-01 -4.73959655e-01 2.40854949e-01] [-7.93344826e-02 5.02344310e-01 -5.56205437e-02 ... 5.34885466e-01 4.98927921e-01 5.92343986e-01] ... [ 3.52311611e-01 -2.11279038e-02 -1.64634019e-01 ... -1.84626967e-01 -2.14117616e-01 5.42767830e-02] [ 3.13798219e-01 1.17552847e-01 1.88253075e-01 ... -3.86587858e-01 4.57983911e-01 4.88644093e-01] [-1.03443898e-01 2.49683350e-01 -2.49499202e-01 ... 2.71851700e-02 1.92011818e-01 1.72083646e-01]] [[ 4.55090195e-01 1.96851611e-01 8.11889172e-02 ... -9.73501522e-03 2.01959491e-01 4.95183259e-01] [ 4.98715863e-02 -3.80714163e-02 1.54519409e-01 ... 1.42799050e-01 -1.27341494e-01 -1.05441041e-01] [ 1.79214925e-01 -9.80952755e-02 1.41235098e-01 ... -1.81400418e-01 4.25326139e-01 -7.64717301e-03] ... [-3.84986512e-02 1.70673981e-01 9.55634296e-01 ... -3.10996026e-01 2.38501340e-01 6.94495499e-01] [-2.09379390e-01 2.49258682e-01 1.00627708e+00 ... -3.81055084e-04 4.20796638e-03 7.41583347e-01] [ 4.11422312e-01 -3.91205639e-01 2.03994468e-01 ... 2.67245173e-01 5.42548299e-01 -4.20031875e-01]] ... [[ 5.32006562e-01 -3.49175274e-01 1.47214741e-01 ... -2.42603734e-01 2.69155264e-01 3.31571907e-01] [-3.64119917e-01 1.69852883e-01 5.18534184e-01 ... -1.50156960e-01 1.13125682e+00 6.39515638e-01] [ 2.25526333e-01 2.36681789e-01 5.03498554e-01 ... 3.37392986e-01 8.55703592e-01 1.10269986e-01] ... [ 8.96920800e-01 2.06777960e-01 -2.18897894e-01 ... 1.93770811e-01 7.26740658e-02 -1.64221480e-01] [ 2.99793333e-01 3.44447613e-01 5.36955178e-01 ... 5.53608656e-01 5.39049685e-01 1.35100588e-01] [-7.43285641e-02 3.98048341e-01 8.17644179e-01 ... -3.39425504e-01 4.51602340e-01 1.55479997e-01]] [[ 3.61157089e-01 3.32715273e-01 2.64290899e-01 ... 1.21071219e-01 3.65156651e-01 2.05547169e-01] [ 7.15223074e-01 7.91773647e-02 3.36168230e-01 ... 1.33934587e-01 6.02064282e-02 7.99992681e-01] [ 2.22342446e-01 -1.30955651e-01 6.63480341e-01 ... -1.66908830e-01 -1.14131242e-01 1.84826881e-01] ... [ 3.23762000e-01 7.54338205e-01 6.44141912e-01 ... -1.80407479e-01 2.00015858e-01 4.72156882e-01] [ 2.43640170e-01 6.08839214e-01 -1.77294016e-01 ... 3.93379748e-01 3.62482518e-01 9.22313035e-01] [ 7.15972781e-01 3.34541231e-01 3.09031099e-01 ... -2.65030712e-01 4.57777649e-01 7.61003494e-02]] [[ 5.28795004e-01 1.40287012e-01 2.99600482e-01 ... 3.20430934e-01 -2.04894081e-01 6.45731211e-01] [ 2.82263994e-01 -5.61067201e-02 3.08749467e-01 ... 3.86734128e-01 -7.57200494e-02 3.00414115e-01] [ 2.07079515e-01 6.22406006e-01 1.63938493e-01 ... 4.06718820e-01 -9.64290574e-02 4.52501811e-02] ... [ 3.90971541e-01 2.67378628e-01 2.73268640e-01 ... 6.19216919e-01 2.62970515e-02 4.00708795e-01] [-1.88525990e-02 4.48022097e-01 8.20047498e-01 ... -4.89664963e-03 2.99862564e-01 1.29636496e-01] [ 2.79260755e-01 -4.55706492e-02 2.28220791e-01 ... 1.44838290e-02 1.67494431e-01 4.47135359e-01]]]] [[[[-1.23068130e+00 1.88093245e-01 -4.40010250e-01 ... -2.98540682e-01 -8.54672492e-01 2.78706670e-01] [ 5.89729071e-01 -1.03262508e+00 -5.53368688e-01 ... -3.52180712e-02 1.64441779e-01 -1.01671553e+00] [-8.14222574e-01 -8.07827935e-02 1.00298285e-01 ... 8.57588053e-01 3.66659373e-01 -7.02912271e-01] ... [-2.71580309e-01 -3.32247205e-02 -1.15730739e+00 ... 4.82158005e-01 -5.35133243e-01 -7.62715399e-01] [-2.54062593e-01 -1.85395733e-01 -9.06339049e-01 ... 4.92858350e-01 -3.13508451e-01 1.61275283e-01] [ 8.31412673e-02 1.43168315e-01 6.59752965e-01 ... -7.16845334e-01 -2.75527835e-01 -1.83055595e-01]] [[ 1.25629812e-01 1.09439284e-01 -1.52233079e-01 ... -2.50863656e-02 -3.05271238e-01 -8.62599134e-01] [-1.19297373e+00 8.31008017e-01 -1.61741841e+00 ... -1.42555416e+00 -1.10452995e-01 5.27983844e-01] [-9.73629892e-01 -8.68615285e-02 -4.18224484e-01 ... -6.77724123e-01 -1.59800470e-01 -3.16363245e-01] ... [ 1.88624382e-01 2.13777587e-01 9.26315546e-01 ... -5.89408994e-01 1.25783920e-01 -8.84523392e-01] [-4.23111796e-01 -7.67353475e-01 -1.20532043e-01 ... 4.05798435e-01 -1.81121305e-01 4.56761539e-01] [-4.87782001e-01 -3.85336548e-01 -4.41254914e-01 ... 1.63041279e-01 -6.57579780e-01 -1.07918072e+00]] [[-1.35535204e+00 -5.17342508e-01 -1.03460085e+00 ... -2.33838364e-01 -6.88094720e-02 -9.62262452e-01] [ 9.89293635e-01 1.61901396e-02 -3.74105275e-01 ... 1.55957462e-02 -7.01233804e-01 -8.55090678e-01] [-5.15225470e-01 1.30096316e-01 -4.44857925e-01 ... -7.82831669e-01 4.27962005e-01 -1.94608063e-01] ... [-8.29414546e-01 -3.55589688e-01 -6.81887567e-01 ... 6.68368816e-01 -9.65689659e-01 -9.89020914e-02] [ 1.26769245e-01 -1.04852283e+00 -1.38568074e-01 ... -7.92434752e-01 4.24860835e-01 1.99041933e-01] [-5.37009120e-01 2.77653635e-01 -4.91761090e-03 ... -3.60133380e-01 -8.73258233e-01 1.26743391e-01]] ... [[ 2.13199869e-01 3.09908032e-01 8.43637645e-01 ... 5.46916246e-01 -1.75237469e-02 -8.38734135e-02] [ 2.79171050e-01 3.12848896e-01 3.12923312e-01 ... 2.43574306e-01 1.00358737e+00 6.13186434e-02] [ 2.66059637e-02 8.05634797e-01 3.69457752e-01 ... 7.64103308e-02 2.73026347e-01 4.74944592e-01] ... [ 3.89501423e-01 -3.86439979e-01 -2.63328224e-01 ... 1.96345318e-02 -4.65817690e-01 -5.36352508e-02] [ 6.02378659e-02 -7.56778419e-01 -1.24703273e-01 ... 8.15090537e-01 -4.04527128e-01 -1.02289510e+00] [-5.69271088e-01 6.35092735e-01 -2.48073973e-02 ... -5.38311228e-02 -1.45225421e-01 -7.10865557e-01]] [[ 3.33045632e-01 -3.73088568e-01 8.21913183e-01 ... -3.40637565e-01 -2.62333870e-01 4.18772288e-02] [-7.01782405e-01 -5.07720709e-01 7.50034332e-01 ... 4.91361797e-01 7.68873096e-01 -1.97017655e-01] [-2.75031954e-01 -2.31906809e-02 -4.21558827e-01 ... 7.39309728e-01 -3.70718718e-01 -5.98105907e-01] ... [-5.62916577e-01 -6.60169601e-01 -9.53510284e-01 ... -5.28732061e-01 -5.77528715e-01 8.09554458e-02] [-1.28843084e-01 -8.47394168e-01 5.16374409e-01 ... -4.09815580e-01 9.17551339e-01 -7.57419765e-01] [ 2.41324514e-01 -7.03206837e-01 -2.93787658e-01 ... -3.74244034e-01 -9.22916114e-01 7.13111222e-01]] [[-4.15748090e-01 5.56234300e-01 -9.06744182e-01 ... -4.63762820e-01 -5.15878022e-01 -2.58582145e-01] [ 4.99082148e-01 -8.73776376e-01 5.53751588e-01 ... -9.87229198e-02 -7.72214293e-01 -6.66932166e-01] [-5.80845237e-01 -3.33120763e-01 2.18802214e-01 ... 2.31924340e-01 -3.59781757e-02 2.75727302e-01] ... [-5.16869962e-01 7.92586565e-01 -5.96619785e-01 ... -1.16644509e-01 -8.72997582e-01 -3.80275875e-01] [-5.43183386e-01 -7.05914676e-01 1.20980352e-01 ... -8.69826823e-02 -2.36034781e-01 1.41570985e-01] [ 1.10321185e-02 1.38723403e-01 4.72566217e-01 ... 1.09550548e+00 -7.65803218e-01 -1.38902330e+00]]] [[[-2.57685244e-01 2.67799348e-01 -5.82388937e-01 ... 2.38182843e-01 -5.33631206e-01 -6.73993900e-02] [-6.95756003e-02 -9.83266979e-02 -7.90302515e-01 ... -4.56099331e-01 -8.61036852e-02 6.69960976e-02] [ 5.05924262e-02 4.17866141e-01 -5.58500648e-01 ... -9.44930255e-01 1.20144941e-01 -3.88592690e-01] ... [-3.40638787e-01 -8.73318195e-01 -1.41079342e-02 ... -5.34676492e-01 -9.05929625e-01 -6.70191824e-01] [-3.86852652e-01 -8.92181098e-01 -7.27496564e-01 ... -1.70996994e-01 5.12994587e-01 1.61840335e-01] [-1.96828395e-01 -1.81203306e-01 -9.82951596e-02 ... -2.70647585e-01 3.37578475e-01 5.70873320e-01]] [[-8.19735110e-01 -3.33952606e-01 2.25188181e-01 ... -4.61997926e-01 -2.09938273e-01 -3.36357445e-01] [-8.89634192e-01 -1.82319149e-01 -5.53919435e-01 ... 4.36536968e-01 -4.61911798e-01 -1.37828305e-01] [ 3.59015346e-01 2.11066693e-01 -6.22119904e-01 ... -9.70634520e-01 -1.29464149e-01 -7.72790909e-02] ... [ 5.71716368e-01 -4.65588383e-02 -2.53028572e-01 ... 8.70563164e-02 8.67163613e-02 -2.06023127e-01] [-9.50848237e-02 7.05582127e-02 2.94767320e-01 ... -3.15781862e-01 -3.95287797e-02 -1.99116185e-01] [-4.29416031e-01 -3.27678680e-01 5.07872939e-01 ... -6.89073443e-01 3.19696516e-01 -4.91385236e-02]] [[-7.01849818e-01 1.25403460e-02 -3.33212823e-01 ... 1.28997058e-01 -2.71154881e-01 5.74512780e-01] [-4.85135049e-01 2.99197435e-03 2.46023774e-01 ... -4.07790482e-01 -3.67501080e-01 4.22841460e-01] [ 4.15832013e-01 -5.26164949e-01 -1.20307311e-01 ... 4.60352242e-01 -1.92536429e-01 8.67103860e-02] ... [ 3.35837305e-01 1.81148738e-01 4.65142190e-01 ... 2.50153869e-01 -7.85798550e-01 -5.19900098e-02] [ 1.82331219e-01 -9.74736333e-01 1.39343813e-01 ... 4.83742446e-01 5.35400026e-02 2.73260567e-02] [-3.87614548e-01 -6.92889988e-02 -2.34495655e-01 ... -1.40115857e-01 -1.35264844e-01 -2.24337518e-01]] ... [[-3.54471982e-01 -9.29062366e-01 4.98208314e-01 ... -2.51667172e-01 -5.92583120e-01 9.71143022e-02] [-4.92828697e-01 -1.01985884e+00 -1.66926295e-01 ... -4.89486068e-01 6.21683240e-01 -4.18763906e-02] [-5.38318932e-01 -3.48752856e-01 -4.75788921e-01 ... 1.13081612e-01 -4.45393741e-01 1.52385026e-01] ... [-5.74277788e-02 1.04015302e-02 -1.14597708e-01 ... -1.32946059e-01 1.46560788e-01 -1.82359032e-02] [-2.00860500e-01 -1.67562738e-01 -6.35758698e-01 ... 1.94813963e-02 -3.00728418e-02 -7.89396167e-01] [-3.03244680e-01 -5.15843093e-01 1.91857684e-02 ... -6.71425939e-01 -1.62735041e-02 -2.06667483e-01]] [[ 2.77468339e-02 -2.16817126e-01 -2.61119872e-01 ... -2.97958612e-01 -2.27704391e-01 -3.39681774e-01] [ 4.03696567e-01 -2.08891600e-01 1.94552511e-01 ... 5.62540628e-02 -1.99937135e-01 -3.03137660e-01] [ 2.40504384e-01 -5.47584236e-01 4.56729978e-02 ... -1.00496948e+00 -3.48033965e-01 1.78397804e-01] ... [-9.34776247e-01 2.92685013e-02 1.01248752e-02 ... -4.94771637e-02 -8.33188713e-01 1.92522705e-01] [-8.06267619e-01 -3.90797295e-02 4.63892043e-01 ... -5.17842770e-01 4.24488664e-01 4.42048341e-01] [-1.30106434e-01 -5.06964862e-01 -7.67228678e-02 ... -3.35875958e-01 5.80652952e-02 -8.12553763e-01]] [[-4.80956137e-01 1.41074225e-01 2.70861506e-01 ... -1.78492710e-01 1.49651632e-01 -3.90100449e-01] [-2.74112552e-01 5.13106406e-01 -5.89632273e-01 ... -1.34749818e+00 -3.24055880e-01 -8.79551828e-01] [ 6.29941404e-01 -1.18497849e-01 -7.38805354e-01 ... -2.37090439e-01 -5.69567919e-01 7.63789713e-02] ... [-3.41150492e-01 -1.77375287e-01 5.82285598e-03 ... -2.35798568e-01 4.33399379e-01 -4.70498502e-01] [-2.63590008e-01 -1.01305939e-01 3.63456398e-01 ... 5.66235065e-01 -1.40081108e-01 -4.61520970e-01] [-1.39400020e-01 -4.50094759e-01 4.17027287e-02 ... -1.87748879e-01 4.59542200e-02 -1.53748095e-01]]] [[[ 6.10813856e-01 1.58676660e+00 1.68699467e+00 ... 2.27831769e+00 2.00441861e+00 2.96856141e+00] [ 1.81771827e+00 2.44705105e+00 7.23320186e-01 ... 3.17382574e+00 6.58726931e-01 2.59119439e+00] [ 1.72304571e+00 1.26019013e+00 2.24588132e+00 ... -7.26314932e-02 1.37243676e+00 1.24674141e+00] ... [ 1.80698884e+00 3.83578509e-01 2.74820590e+00 ... -7.55298212e-02 1.35320771e+00 1.52209568e+00] [-3.05935413e-01 2.18890190e+00 1.08266175e+00 ... 2.40342426e+00 2.47302055e+00 2.33907652e+00] [ 3.96388412e-01 3.17804122e+00 8.95877481e-01 ... 2.83238006e+00 -1.92680848e+00 -3.71095657e-01]] [[ 8.95980775e-01 1.89161193e+00 1.35885954e+00 ... 2.16156673e+00 -5.97186089e-01 9.30869579e-01] [ 1.82169509e+00 1.51626682e+00 -5.50661743e-01 ... 1.56438291e+00 2.08236599e+00 2.84405422e+00] [ 7.50523090e-01 1.81385148e+00 1.49133325e+00 ... 1.47944009e+00 3.53442717e+00 3.72761726e+00] ... [ 1.64783025e+00 1.44375086e+00 2.78602958e+00 ... 2.21834540e+00 6.25916541e-01 2.18229198e+00] [ 2.74137467e-01 1.38886690e+00 4.24981505e-01 ... 1.62338269e+00 1.09354150e+00 1.94755948e+00] [ 3.38302553e-01 1.57777166e+00 2.22190928e+00 ... 1.84011054e+00 1.58273327e+00 -5.63414752e-01]] [[ 2.20369053e+00 1.35434651e+00 8.53058159e-01 ... 4.87501651e-01 8.57184112e-01 1.17582178e+00] [ 3.24947405e+00 1.99742424e+00 1.15990794e+00 ... 1.39198601e+00 6.21407390e-01 8.30728948e-01] [-2.83099800e-01 1.43941355e+00 2.54639119e-01 ... -8.10357690e-01 2.43764710e+00 1.99708998e+00] ... [ 1.21990800e+00 1.10011816e+00 1.15361780e-01 ... -1.40797883e-01 1.64955783e+00 2.48129559e+00] [ 1.75651681e+00 3.89966226e+00 -5.59104204e-01 ... 3.43991905e-01 1.44783169e-01 2.30177665e+00] [ 2.05929255e+00 1.01643372e+00 4.64709550e-01 ... -1.54869890e+00 -8.97075832e-02 1.68569076e+00]] ... [[ 1.48563564e+00 1.35702705e+00 3.47857857e+00 ... 2.08249831e+00 2.41077161e+00 2.36439133e+00] [ 1.85916209e+00 2.15825534e+00 1.26811576e+00 ... 2.49189228e-01 1.66118395e+00 1.25714600e+00] [-3.11858982e-01 1.01095987e-02 3.36155951e-01 ... 2.35837030e+00 9.15968060e-01 8.99947941e-01] ... [ 2.50066966e-01 1.86040366e+00 1.84750092e+00 ... 1.34197927e+00 -4.19286966e-01 2.20532703e+00] [-6.67290509e-01 1.89322090e+00 2.59938407e+00 ... -3.92808527e-01 3.14402890e+00 1.70165682e+00] [ 3.51209688e+00 3.36851299e-01 7.09655762e-01 ... 1.38875699e+00 1.00400424e+00 6.21491790e-01]] [[ 8.99691641e-01 -3.25267836e-02 1.49398506e+00 ... 8.23757723e-02 2.35062790e+00 2.52072573e+00] [ 1.06360602e+00 1.38771206e-01 2.18612075e+00 ... 1.61426437e+00 1.84562540e+00 2.54765296e+00] [ 3.17392230e-01 -7.50674307e-01 9.31805432e-01 ... 3.24953008e+00 7.90488839e-01 2.97821140e+00] ... [ 1.99288204e-01 1.91103899e+00 3.71137261e+00 ... -1.99094081e+00 1.22939181e+00 2.00847816e+00] [ 2.98603845e+00 2.17078876e+00 1.91719577e-01 ... 2.26095986e+00 2.35311413e+00 8.13613310e-02] [ 3.24485350e+00 2.79781485e+00 2.22250056e+00 ... 1.93520582e+00 1.38664424e+00 -9.87551153e-01]] [[ 1.07146478e+00 8.77224356e-02 1.18316948e+00 ... 2.09909487e+00 -6.64112568e-01 -3.64706963e-01] [ 1.43208325e+00 5.40247858e-02 1.30744922e+00 ... -1.43442124e-01 2.17760205e-01 5.04051089e-01] [ 1.55672383e+00 6.77472949e-01 1.48014516e-01 ... 1.62493303e-01 1.27131522e+00 -5.58758453e-02] ... [ 5.00781119e-01 2.23409009e+00 3.90020519e-01 ... 1.77967119e+00 8.41472268e-01 2.31264424e+00] [ 2.98310208e+00 1.40387225e+00 1.08403957e+00 ... 4.03760254e-01 7.73543656e-01 3.83756101e-01] [ 1.43117309e+00 2.06132364e+00 1.77407241e+00 ... 7.03058600e-01 1.37573934e+00 1.75278917e-01]]] [[[ 7.47020006e-01 1.21808112e+00 1.89808202e+00 ... 1.00204659e+00 1.68537557e+00 1.39955592e+00] [ 1.64970779e+00 1.68266296e+00 8.87738049e-01 ... 6.81703806e-01 2.04171681e+00 1.76722336e+00] [ 9.23130691e-01 1.50000870e+00 7.80378640e-01 ... 1.28726792e+00 9.84893262e-01 1.05688143e+00] ... [ 1.75333655e+00 1.27537251e+00 1.41541135e+00 ... 1.25561619e+00 1.65932155e+00 9.97009099e-01] [ 1.12578428e+00 1.07419086e+00 2.89118171e-01 ... 1.45520544e+00 4.97301668e-01 1.83869624e+00] [ 9.40947056e-01 1.14570117e+00 1.83327031e+00 ... 1.28245199e+00 9.82070386e-01 2.25857425e+00]] [[ 1.22962034e+00 2.05420089e+00 1.18312418e+00 ... 9.22661960e-01 1.50122273e+00 -7.13525945e-03] [ 6.24908149e-01 1.42520100e-01 1.74496007e+00 ... 1.02248788e+00 1.54375255e+00 2.05528808e+00] [ 8.96431446e-01 1.96536100e+00 6.66607916e-01 ... 1.57131982e+00 1.32527435e+00 8.28005791e-01] ... [ 7.71147251e-01 7.05283523e-01 5.12342572e-01 ... 1.12432465e-01 7.36827791e-01 1.31089950e+00] [ 2.06832170e+00 8.35543454e-01 1.25161934e+00 ... 5.68449199e-01 2.08501250e-01 1.40819371e+00] [ 1.09594083e+00 1.78137672e+00 1.48089147e+00 ... 1.08367646e+00 5.80380499e-01 5.46877742e-01]] [[ 9.39030170e-01 6.86188519e-01 1.11330342e+00 ... 5.68702042e-01 1.42422485e+00 9.50944662e-01] [ 1.55630326e+00 8.20369601e-01 8.49891067e-01 ... 9.73371387e-01 1.33443499e+00 1.61468554e+00] [ 5.86999953e-01 2.11885619e+00 1.28991425e+00 ... 1.17750669e+00 1.46843517e+00 9.34655130e-01] ... [ 5.29357076e-01 1.29040986e-01 1.86573517e+00 ... 7.15531766e-01 1.16893601e+00 1.99882102e+00] [ 1.61132145e+00 1.30575955e+00 1.51911521e+00 ... 1.13355041e+00 1.83235145e+00 1.02588224e+00] [ 1.75319806e-01 1.49823821e+00 1.01353300e+00 ... 4.80643451e-01 8.75375271e-01 2.07058430e+00]] ... [[ 1.14420021e+00 1.16231966e+00 1.25169563e+00 ... 5.79512298e-01 1.45159030e+00 1.60369754e+00] [ 5.61186910e-01 1.89433026e+00 1.43783343e+00 ... 1.30070210e+00 2.14479518e+00 1.19871283e+00] [ 1.27903938e+00 1.40128720e+00 1.08233619e+00 ... 1.03151357e+00 6.79816127e-01 9.16484177e-01] ... [ 1.52408338e+00 -1.48949429e-01 7.51828849e-01 ... 9.35869336e-01 1.61786675e+00 1.04227400e+00] [ 1.20772290e+00 1.16561162e+00 1.66162336e+00 ... 1.86708403e+00 3.58033985e-01 1.44206882e+00] [ 8.26080382e-01 6.82426274e-01 1.43888509e+00 ... 1.64525425e+00 1.13277090e+00 8.85894716e-01]] [[ 1.41329944e+00 1.07327330e+00 1.23395646e+00 ... 1.32663476e+00 1.92190617e-01 3.82749021e-01] [ 7.43639708e-01 9.07494843e-01 2.35536957e+00 ... 1.10678160e+00 1.25008392e+00 9.69941318e-01] [ 1.46289289e+00 8.06695282e-01 6.94177389e-01 ... 9.32332158e-01 1.35298580e-01 1.21161628e+00] ... [ 1.54843998e+00 6.79309368e-01 6.45827472e-01 ... 4.16936636e-01 1.51711833e+00 1.28666735e+00] [ 9.13716435e-01 8.39982092e-01 1.22557652e+00 ... 1.70616806e+00 1.33747244e+00 1.01883125e+00] [ 2.77332395e-01 1.52310979e+00 1.54927433e+00 ... 7.17522442e-01 1.44383073e+00 1.49646616e+00]] [[ 1.04474664e+00 2.06136078e-01 1.64217830e+00 ... 7.13893950e-01 1.45662272e+00 2.11263776e+00] [ 1.00636935e+00 1.33574855e+00 1.28988636e+00 ... 7.80770957e-01 1.17426658e+00 5.90600133e-01] [ 9.60768938e-01 1.18724275e+00 8.10092211e-01 ... 2.04237986e+00 2.46570483e-01 6.44436240e-01] ... [ 1.08178413e+00 1.62134433e+00 1.71484694e-01 ... 9.87999439e-01 1.86863530e+00 2.71431237e-01] [ 1.07920194e+00 1.31355596e+00 1.64792919e+00 ... 1.49690080e+00 7.45610237e-01 1.53102112e+00] [ 4.19958383e-01 6.29358053e-01 8.05809677e-01 ... 1.23176575e+00 1.62949264e+00 1.23039758e+00]]] [[[ 5.78871906e-01 1.13488150e+00 8.07689428e-01 ... 2.09691331e-01 5.44647813e-01 -1.24915391e-01] [ 9.34459090e-01 -6.09950498e-02 1.51086688e+00 ... 5.81775963e-01 -3.03156134e-02 5.05181313e-01] [ 6.13594830e-01 6.69457257e-01 4.58270222e-01 ... 1.36108387e+00 2.98753589e-01 1.19903892e-01] ... [ 1.10599506e+00 -2.28656054e-01 -1.13401495e-01 ... -4.47221369e-01 2.48370841e-01 4.05697644e-01] [ 9.12394524e-01 1.28055882e+00 5.97740591e-01 ... 1.86652049e-01 5.93476534e-01 3.22341263e-01] [-3.02258611e-01 -1.24886014e-01 1.30774879e+00 ... 7.99488246e-01 9.72204864e-01 1.46776894e-02]] [[ 1.38114882e+00 1.13235426e+00 2.36219525e-01 ... 1.20935880e-01 4.84131336e-01 1.10400736e+00] [ 1.70080101e+00 7.78827190e-01 1.13848758e+00 ... -1.16347924e-01 1.21810925e+00 8.86080265e-02] [-6.53054655e-01 4.08375233e-01 1.17663109e+00 ... -3.30787241e-01 8.17001611e-02 1.02006984e+00] ... [ 3.32052782e-02 1.33463240e+00 4.96309966e-01 ... 7.46655762e-01 6.19917154e-01 -8.83332491e-01] [ 4.94488657e-01 -3.21651012e-01 -1.23396292e-01 ... 3.82148117e-01 4.32208091e-01 -9.80260849e-01] [ 1.24037556e-01 2.86038309e-01 6.88437223e-01 ... 6.35048628e-01 4.79446985e-02 5.26486874e-01]] [[ 5.53780913e-01 5.97363889e-01 2.16900647e-01 ... -5.61772823e-01 3.33644480e-01 -3.89650129e-02] [ 7.28566468e-01 4.07872438e-01 9.47005093e-01 ... 1.37684560e+00 2.32974067e-01 1.82541144e+00] [ 8.66958976e-01 1.35985920e-02 8.69138300e-01 ... -6.42250944e-03 1.84173167e-01 -5.26226731e-03] ... [-7.14965165e-01 -3.04872245e-01 6.06101573e-01 ... -4.73759055e-01 3.57104056e-02 9.29727376e-01] [ 3.84045005e-01 7.73308992e-01 5.55463910e-01 ... 1.06863117e+00 1.20186830e+00 -5.45459032e-01] [-1.82441548e-01 8.70835245e-01 2.86704272e-01 ... 2.00266077e-04 -2.66442925e-01 1.14396229e-01]] ... [[-1.19870782e-01 5.20823956e-01 3.94954979e-02 ... 9.04052317e-01 6.61783457e-01 3.72550786e-01] [-6.49297535e-02 1.12154961e+00 5.64291216e-02 ... 1.45204341e+00 1.00464511e+00 1.16371465e+00] [ 9.67358053e-02 -1.56649396e-01 1.73212481e+00 ... 1.10384035e+00 -5.17518163e-01 3.52197856e-01] ... [-6.36932254e-01 1.70708001e-01 -4.19012010e-01 ... 1.93371272e+00 -2.66668409e-01 1.57157592e-02] [ 3.65167201e-01 5.13715267e-01 1.74127817e-01 ... 9.48245406e-01 5.45598149e-01 7.21835434e-01] [-6.28469825e-01 -6.48414254e-01 8.17166746e-01 ... -7.21528769e-01 1.05233347e+00 -6.38094664e-01]] [[-5.85242867e-01 2.84663796e-01 -9.52402413e-01 ... 3.21273357e-01 -3.84375900e-01 6.58541918e-01] [ 7.84005463e-01 3.32151443e-01 -4.87128288e-01 ... 4.93008226e-01 4.02817190e-01 6.72072887e-01] [-5.89179933e-01 -2.55046934e-02 2.76820213e-01 ... -7.48652935e-01 6.40257955e-01 1.04847932e+00] ... [ 1.77610278e+00 6.66320920e-01 -5.68466783e-02 ... 8.14798474e-01 3.15036416e-01 1.65140593e+00] [-4.86467406e-03 4.63056751e-02 2.69901484e-01 ... -2.07978994e-01 1.89045095e+00 -6.32632524e-02] [ 1.26756477e+00 1.18068445e+00 7.99327672e-01 ... 5.78215480e-01 7.82225072e-01 5.73975086e-01]] [[ 5.79369009e-01 6.20558560e-01 -2.59078622e-01 ... -2.03902811e-01 -1.10409141e-01 8.75211120e-01] [ 2.47102603e-01 -3.41586500e-01 7.33936548e-01 ... 5.14501095e-01 5.64958990e-01 -2.43958324e-01] [ 1.13147557e+00 -1.50858775e-01 8.50331485e-01 ... 1.54600978e-01 -3.75210106e-01 1.14208364e+00] ... [ 2.62431979e-01 3.24485272e-01 -5.04956901e-01 ... 2.74845093e-01 2.62748808e-01 3.81660730e-01] [ 6.17921293e-01 1.34648061e+00 -2.72353411e-01 ... 1.69221842e+00 9.66490030e-01 7.17945039e-01] [-6.51468560e-02 5.65891564e-01 1.21493316e+00 ... 1.11729169e+00 3.79336625e-01 1.18532693e+00]]] [[[-8.01480096e-03 6.45684779e-01 -1.17440261e-01 ... 5.37638605e-01 5.76938748e-01 6.42397642e-01] [ 2.71928161e-01 -6.65142536e-02 8.97703320e-02 ... 3.88039589e-01 -1.20561056e-01 1.90464079e-01] [ 1.43691972e-01 7.77582347e-01 3.75438809e-01 ... 1.98101014e-01 3.17014962e-01 -1.63445011e-01] ... [ 5.84938228e-01 -2.79653251e-01 -1.09103240e-01 ... 5.86193025e-01 -1.41191706e-01 5.50081611e-01] [ 1.77088469e-01 -3.28683287e-01 3.28244656e-01 ... 4.87023965e-02 1.81208313e-01 1.14113120e-02] [ 4.28917617e-01 5.64471185e-01 1.11981861e-01 ... 8.27688634e-01 -3.23905170e-01 1.87384099e-01]] [[ 3.34082870e-03 1.53324343e-02 4.82400917e-02 ... 4.07893687e-01 1.02622545e+00 -1.97084963e-01] [-1.58333987e-01 2.68323123e-01 7.40707994e-01 ... -1.30225807e-01 8.13057899e-01 -2.60121465e-01] [ 4.49073523e-01 -1.59505028e-02 -8.71402442e-01 ... 3.09744745e-01 8.06504786e-01 4.74567652e-01] ... [ 6.94809258e-01 2.28699341e-01 -1.72607213e-01 ... 3.09572667e-01 -3.71226817e-01 -1.49887413e-01] [-1.15640499e-01 -1.22528560e-01 4.26438093e-01 ... -2.38229632e-01 -1.08570397e-01 2.99812108e-01] [-1.05997421e-01 2.45386168e-01 -2.51788676e-01 ... 3.56485337e-01 1.64030984e-01 8.50792825e-02]] [[-1.20299451e-01 6.42909288e-01 3.26860011e-01 ... 5.18794656e-01 2.09222615e-01 2.62302995e-01] [ 8.71400163e-02 5.09944499e-01 6.02408638e-03 ... 2.19259128e-01 -1.62098676e-01 2.50183463e-01] [-2.08383381e-01 -6.74811974e-02 5.24764061e-01 ... 2.81578243e-01 7.44908750e-01 6.02554202e-01] ... [ 2.34623685e-01 -3.67497951e-01 -1.80230126e-01 ... 5.95759392e-01 5.08250177e-01 2.53720246e-02] [ 6.46067858e-02 -3.25997055e-01 -6.25575185e-02 ... -5.24455272e-02 3.30108374e-01 6.74449325e-01] [ 4.78764832e-01 3.64529788e-01 4.97143418e-01 ... 3.70031327e-01 1.48632556e-01 -1.69191435e-01]] ... [[ 4.13352817e-01 1.89181089e-01 6.18992746e-01 ... -8.03755224e-02 4.46646243e-01 2.30802417e-01] [ 8.96984488e-02 9.67567980e-01 -5.29040933e-01 ... 4.11583155e-01 5.98247126e-02 6.26372039e-01] [-8.50261003e-02 3.49108845e-01 7.27425739e-02 ... -5.23008406e-01 -1.77482113e-01 5.47597706e-02] ... [-3.12369227e-01 -1.49908110e-01 7.30919480e-01 ... 5.26243985e-01 3.72004628e-01 7.78202415e-01] [ 1.04005896e-01 1.10631026e-01 -7.28388950e-02 ... 1.29161105e-01 4.10283953e-01 -3.60870212e-01] [ 3.98624212e-01 -2.29479551e-01 -4.51657414e-01 ... -2.28308648e-01 -3.44705850e-01 7.04102755e-01]] [[ 5.90352595e-01 4.57708567e-01 1.35571465e-01 ... -1.44785702e-01 8.22434425e-01 -7.72838071e-02] [ 7.85636604e-01 4.83912200e-01 3.58166665e-01 ... 3.98910105e-01 2.51215547e-01 1.33919001e+00] [ 7.42724538e-01 2.75872767e-01 -5.23796976e-02 ... 9.18145955e-01 -7.71147087e-02 8.26857209e-01] ... [ 3.62103134e-01 3.89442220e-02 4.18330491e-01 ... 4.59500849e-01 2.17002317e-01 -3.04157883e-01] [-5.17511144e-02 1.56159446e-01 -9.84910280e-02 ... 2.35046268e-01 7.69620538e-02 7.04211295e-01] [-2.02155173e-01 5.57590306e-01 3.09926808e-01 ... 1.18567854e-01 -1.62969708e-01 2.77024835e-01]] [[ 2.03062683e-01 4.53341424e-01 -3.75568494e-02 ... 3.64117563e-01 1.50787294e-01 -2.08677262e-01] [ 3.83208156e-01 3.78164947e-01 -1.01214088e-01 ... 1.28661931e-01 7.81135410e-02 1.65387467e-02] [-8.35626051e-02 8.04976344e-01 -1.78699687e-01 ... 2.01091930e-01 8.95490050e-02 2.29003668e-01] ... [-3.11793596e-01 2.39649653e-01 6.12401187e-01 ... -7.36698359e-02 -5.74764311e-01 3.03599775e-01] [ 7.74747491e-01 -9.02207941e-02 3.21545452e-01 ... 3.21186721e-01 1.37203395e-01 1.00272208e-01] [ 4.68544304e-01 4.53327566e-01 7.46821687e-02 ... -8.61245692e-02 -4.01107579e-01 2.96205189e-03]]]] [[[[-8.36479589e-02 -7.18498647e-01 7.50412643e-01 ... 1.31584382e+00 -3.44377682e-02 -4.79983479e-01] [-5.10102332e-01 -5.34875751e-01 -3.16716373e-01 ... 8.18414271e-01 -4.44810778e-01 7.10259914e-01] [-4.16979432e-01 2.81882823e-01 -5.97640038e-01 ... 6.85848296e-01 7.77458698e-02 -1.25073814e+00] ... [ 1.20005704e-01 -1.18870124e-01 1.41584173e-01 ... -2.17254728e-01 1.84199810e-01 -1.30021536e+00] [ 1.02496624e+00 1.69843689e-01 1.31620741e+00 ... -6.80092394e-01 -9.76758957e-01 -4.54272449e-01] [ 3.09662193e-01 5.64104676e-01 -1.11489095e-01 ... -1.49752152e+00 -1.39322191e-01 3.21175605e-01]] [[-1.10363789e-01 3.61001045e-01 3.54158312e-01 ... 4.46321845e-01 -1.07987046e+00 -6.42294466e-01] [ 4.77520913e-01 -3.84714097e-01 6.84133247e-02 ... 1.56873297e-02 -5.96088767e-01 -3.01310867e-01] [-3.45205188e-01 -1.55624521e+00 -1.36965466e+00 ... 1.78349391e-01 -1.19367158e+00 -1.45092919e-01] ... [ 4.36181605e-01 -4.33648765e-01 -3.05699587e-01 ... -1.75794280e+00 -1.17565513e+00 4.93215680e-01] [-7.39787638e-01 -2.26339981e-01 -4.50680912e-01 ... -5.26562393e-01 5.21074295e-01 -5.89486361e-01] [ 9.40576613e-01 -3.02808285e-01 5.82779765e-01 ... -5.60745001e-01 1.43954587e+00 -4.22199041e-01]] [[-9.08637583e-01 -2.99702644e-01 -3.10693383e-01 ... -3.05917919e-01 1.30357170e+00 -2.37395748e-01] [-2.75395755e-02 -1.89249948e-01 -5.64758122e-01 ... -2.69256890e-01 3.64292920e-01 -7.23984182e-01] [-5.92620485e-02 1.08326286e-01 -4.63637024e-01 ... -2.76902348e-01 7.21107185e-01 3.07794005e-01] ... [ 4.13330972e-01 6.78122520e-01 -2.58725971e-01 ... -3.82560730e-01 -2.52303958e-01 1.19148660e+00] [-7.59293914e-01 -1.15790462e+00 -1.28575599e+00 ... -1.21970423e-01 -7.85894319e-02 1.53410271e-01] [ 8.62186193e-01 -6.73603833e-01 1.50778919e-01 ... -8.31869543e-01 6.77394748e-01 1.61034480e-01]] ... [[-3.26242805e-01 6.96299911e-01 -4.22525685e-03 ... 2.01558635e-01 -5.85274875e-01 2.42173523e-01] [-9.93214607e-01 -2.43439153e-01 -6.58784628e-01 ... 1.11933064e+00 -7.95096695e-01 1.69774368e-01] [-6.32080436e-01 4.34278876e-01 5.18082201e-01 ... 1.04428244e+00 -3.80777001e-01 4.34258074e-01] ... [-7.37336054e-02 4.20256883e-01 -3.47027630e-01 ... -2.40906790e-01 3.43569666e-01 4.02485311e-01] [-4.12019014e-01 2.30992422e-01 8.70752409e-02 ... 6.20571792e-01 -2.05084682e-02 5.41869581e-01] [ 6.60157979e-01 4.88147408e-01 3.56254727e-01 ... -1.76991507e-01 2.49025539e-01 -1.55185565e-01]] [[ 5.44930875e-01 -3.69363219e-01 -6.08330965e-01 ... -1.67465970e-01 -3.45294625e-01 -7.53229111e-02] [-7.26530671e-01 -1.16293120e+00 -7.19229355e-02 ... 6.82207048e-01 9.47596133e-01 -6.98361695e-01] [-8.47537577e-01 -2.04839960e-01 3.60571891e-01 ... 6.24711692e-01 8.76474321e-01 -3.91836613e-02] ... [ 5.45323081e-02 -9.69835699e-01 4.61738147e-02 ... -5.18561780e-01 -1.10368311e+00 5.28118201e-02] [-9.89951566e-02 -5.04159927e-01 -8.98483932e-01 ... -7.80127585e-01 -6.31880164e-01 7.41464674e-01] [-2.34704196e-01 -3.19181263e-01 -6.49698153e-02 ... -9.34744716e-01 2.44956440e-03 9.58347023e-01]] [[ 4.98944342e-01 1.41750604e-01 -3.30802351e-01 ... 2.19281092e-01 3.04033477e-02 -3.39884795e-02] [-4.77490693e-01 2.87795991e-01 9.77005482e-01 ... -1.53282106e+00 -3.06709260e-01 4.21623170e-01] [-6.07539594e-01 -1.75101906e-01 -1.40928805e+00 ... -3.59609127e-01 -2.22010970e-01 -7.68479168e-01] ... [-6.81348741e-02 2.66032517e-01 -1.36592165e-01 ... 6.68618917e-01 -6.74239516e-01 -2.35719174e-01] [ 7.04849958e-01 -4.90228385e-02 6.71388984e-01 ... 7.74337709e-01 -1.79983571e-01 9.62456986e-02] [ 3.51594865e-01 -6.95241988e-01 1.86731428e-01 ... 3.19774836e-01 2.36772954e-01 7.30525613e-01]]] [[[ 4.09641236e-01 2.48426452e-01 -2.25756899e-01 ... 2.81503558e-01 1.21618330e-01 -1.35453627e-01] [ 1.76398978e-01 3.20909828e-01 3.60763103e-01 ... -5.61674953e-01 -2.99712002e-01 -1.02638245e+00] [-3.25173408e-01 -6.11812234e-01 -2.37083733e-01 ... -7.50090301e-01 -2.62570292e-01 -5.38373053e-01] ... [ 3.79291236e-01 -5.20371675e-01 -1.71339318e-01 ... -1.84491977e-01 2.35620663e-01 -1.16889603e-01] [-1.28592715e-01 -2.05737606e-01 -2.73500025e-01 ... -3.85959923e-01 -3.38869691e-02 1.77273646e-01] [-1.04812562e+00 1.46945551e-01 -4.85977173e-01 ... -4.81030762e-01 4.00772572e-01 -6.77683413e-01]] [[-5.58915555e-01 -7.68336475e-01 -2.98744321e-01 ... 2.07318529e-01 -6.86307013e-01 -3.20211887e-01] [-9.87227634e-02 4.66560423e-02 -6.57933950e-01 ... 2.91517854e-01 -1.30002409e-01 -2.58516371e-02] [ 5.98588109e-01 -3.81686181e-01 -7.67035484e-02 ... -8.24631006e-02 -3.24500769e-01 7.17241913e-02] ... [-1.25070408e-01 -6.92470133e-01 7.29746222e-01 ... -7.99096748e-02 -5.98043084e-01 -2.91373432e-01] [-2.04443365e-01 4.42221798e-02 8.72675776e-02 ... -1.94561362e-01 8.28160644e-01 -1.99392542e-01] [ 3.76326472e-01 -6.38588369e-01 -6.60599291e-01 ... -2.16614231e-01 -6.25563145e-01 -2.02477798e-01]] [[ 1.38822049e-01 2.07657143e-01 -7.98908830e-01 ... -5.21512806e-01 1.11485824e-01 8.70482773e-02] [ 2.38959610e-01 5.79389855e-02 -4.76937234e-01 ... -5.54760322e-02 -5.19215949e-02 -6.10158741e-01] [-2.03659534e-01 -3.10024559e-01 2.50733107e-01 ... 7.29653835e-01 -4.17320989e-02 -4.16526884e-01] ... [ 2.29619332e-02 -6.19365811e-01 -1.61695525e-01 ... -4.26098108e-01 -2.31212541e-01 1.99445665e-01] [-3.78807485e-01 -2.23109782e-01 -6.94539919e-02 ... 1.35654017e-01 -2.15135768e-01 1.48595884e-01] [-5.99821150e-01 -1.28620461e-01 -2.37303406e-01 ... -2.03026041e-01 -7.41285980e-01 -4.45801169e-01]] ... [[ 7.65199900e-01 -7.08189130e-01 -3.86637956e-01 ... 4.27663296e-01 5.44345319e-01 -7.91532844e-02] [-8.54534090e-01 -6.66138709e-01 3.64172041e-01 ... -2.07425386e-01 4.13345188e-01 -5.29321373e-01] [ 3.96982431e-02 3.42366338e-01 4.48280498e-02 ... -6.19222760e-01 -5.17999947e-01 6.64503053e-02] ... [ 2.38097802e-01 2.83991665e-01 -4.44663107e-01 ... -5.66521324e-02 4.83821958e-01 -3.08789968e-01] [-1.17098071e-01 2.02584341e-01 1.49326742e-01 ... -2.07661435e-01 1.55937850e-01 -5.14160275e-01] [-4.65280516e-03 5.94445467e-01 -3.00701261e-01 ... -1.72667488e-01 -4.68698621e-01 -4.40204620e-01]] [[-2.76004523e-01 5.61006606e-01 -7.30048239e-01 ... -2.49151677e-01 -3.29275280e-01 -5.05884849e-02] [-5.32277703e-01 1.87286392e-01 -5.80668271e-01 ... 3.25181484e-02 1.71050787e-01 -7.14336010e-03] [-9.73160118e-02 3.02746505e-01 2.27540098e-02 ... -8.43229145e-03 1.93821922e-01 5.75747013e-01] ... [-6.00975394e-01 -6.29500747e-01 -1.30030513e-01 ... 4.87812847e-01 2.26710618e-01 1.93293113e-03] [-5.79527877e-02 -4.29785065e-02 -3.24359387e-02 ... -3.40198696e-01 -2.27221593e-01 -9.67909873e-01] [ 5.77910066e-01 1.31422803e-01 3.91283989e-01 ... -1.77527010e-01 -9.49608862e-01 -4.76269156e-01]] [[-1.76862642e-01 2.99619287e-01 -6.58137575e-02 ... -5.20473003e-01 5.78765392e-01 -4.61695910e-01] [-9.02671088e-03 -2.27242876e-02 3.78726460e-02 ... 2.16284856e-01 1.59898490e-01 -2.03221217e-01] [-7.32248425e-01 -5.90908229e-01 -2.74865068e-02 ... -3.50983515e-02 -6.21304572e-01 3.93868238e-02] ... [-3.03202868e-01 6.72070444e-01 -3.95796895e-01 ... -7.04826474e-01 -7.28340507e-01 2.23672226e-01] [-6.83882415e-01 -7.84288421e-02 -7.56396204e-02 ... 3.94018590e-02 6.61340535e-01 1.09583944e-01] [ 5.55282116e-01 -1.72396645e-01 1.81446016e-01 ... -2.62014776e-01 2.05075443e-02 -1.74804002e-01]]] [[[ 1.53045022e+00 1.77136660e+00 1.35893154e+00 ... 2.32765126e+00 1.57982576e+00 -1.29202560e-01] [ 4.86640483e-01 2.04171419e+00 7.19316423e-01 ... 7.13521481e-01 -5.41589737e-01 4.06501722e+00] [-8.16995740e-01 3.14318120e-01 1.42069101e+00 ... 9.87061203e-01 1.08988023e+00 2.83609986e+00] ... [ 1.70188272e+00 8.31539035e-01 6.58623099e-01 ... 1.48456383e+00 4.05740291e-01 3.28268111e-01] [ 8.58875751e-01 4.01153743e-01 1.60279739e+00 ... 3.53222877e-01 1.31490457e+00 1.79817343e+00] [ 1.22677517e+00 1.34956956e-01 1.76623380e+00 ... -6.19288564e-01 1.22672427e+00 2.13066912e+00]] [[ 1.70678449e+00 -5.66966891e-01 1.98992729e-01 ... 3.60097080e-01 1.98146486e+00 2.45180583e+00] [ 3.71371388e+00 1.30979824e+00 8.40700448e-01 ... 8.21702480e-01 -2.44976208e-01 2.50977039e-01] [ 2.85848647e-01 8.21716189e-01 1.65797019e+00 ... -7.35746175e-02 2.71349001e+00 1.17325616e+00] ... [ 3.60520869e-01 -2.67629653e-01 1.03135943e+00 ... 6.59119964e-01 -5.27193904e-01 1.16399586e+00] [ 3.27276349e+00 7.25204706e-01 1.49028015e+00 ... 1.09319770e+00 8.96138370e-01 1.96553433e+00] [ 1.74426794e+00 3.07455111e+00 1.04678476e+00 ... 8.35419357e-01 2.26269341e+00 -7.95731485e-01]] [[ 7.70680010e-01 2.35594487e+00 1.33450651e+00 ... 2.94733375e-01 9.72888246e-03 1.18918288e+00] [ 9.98146474e-01 1.41978240e+00 1.50896966e+00 ... -1.15110266e+00 3.26309085e+00 1.71534014e+00] [ 1.06815767e+00 5.27847528e-01 1.74497497e+00 ... 6.71000898e-01 1.32337081e+00 1.70903906e-01] ... [ 2.16100860e+00 6.47536337e-01 3.23965311e-01 ... 1.77518392e+00 1.40067983e+00 1.04511726e+00] [ 9.40867901e-01 2.28345585e+00 3.20767164e+00 ... 2.07752085e+00 6.82893515e-01 5.55786371e-01] [ 1.48849094e+00 3.17125201e+00 1.80114305e+00 ... 8.26929927e-01 -5.31854182e-02 3.87739038e+00]] ... [[ 2.58930057e-01 1.85661185e+00 -3.92977521e-02 ... 2.51539254e+00 -1.64290786e+00 1.09042990e+00] [-1.99182379e+00 -1.93985417e-01 2.14167571e+00 ... 1.08317065e+00 -9.30087268e-01 6.51358843e-01] [-6.90183580e-01 2.23506546e+00 2.05948997e+00 ... 2.01130724e+00 -5.31136751e-01 5.26482701e-01] ... [ 2.41167650e-01 -3.33362222e-01 2.20798016e+00 ... 4.35560912e-01 4.61514425e+00 2.01745582e+00] [ 1.14595973e+00 1.14790094e+00 1.48491585e+00 ... 1.95755228e-01 -4.14963484e-01 3.57615805e+00] [-1.03228962e+00 2.85614300e+00 1.07016432e+00 ... 3.62367004e-01 3.73808406e-02 2.20816064e+00]] [[ 9.98716474e-01 1.91979635e+00 1.02827907e+00 ... -1.06194782e+00 6.19909286e-01 2.30551624e+00] [ 3.25869769e-01 2.77428293e+00 1.77665210e+00 ... 4.51677173e-01 2.02703428e+00 1.16519523e+00] [ 7.69105479e-02 -4.61340323e-02 1.93610990e+00 ... 4.47048903e-01 9.44814265e-01 1.40535903e+00] ... [ 1.56602991e+00 1.11930227e+00 -1.30642903e+00 ... -2.99453318e-01 1.08256154e-01 1.54745924e+00] [-9.53999758e-01 5.20436764e-01 1.79971671e+00 ... 8.03709447e-01 1.76201212e+00 1.52739787e+00] [-8.47701311e-01 -4.48130190e-01 1.73820913e+00 ... -1.24857044e+00 1.09534597e+00 3.22117805e+00]] [[ 8.58062804e-01 6.05097413e-01 1.02105439e+00 ... 5.47335505e-01 1.58304656e+00 1.16237390e+00] [-1.64978459e-01 5.50842047e-01 2.10184836e+00 ... 1.02941239e+00 2.94104409e+00 1.21586323e+00] [ 1.08369482e+00 3.37534487e-01 8.02074254e-01 ... 3.38455701e+00 8.76943827e-01 8.43079627e-01] ... [ 3.01863074e+00 1.60564291e+00 1.34006572e+00 ... 2.44968414e+00 3.02241063e+00 1.13395679e+00] [ 1.51765287e+00 1.73966348e+00 -1.06243932e+00 ... -4.96780783e-01 4.94352460e-01 3.44407773e+00] [ 1.33399332e+00 2.07710576e+00 1.97012675e+00 ... 8.10286939e-01 1.56373262e+00 1.17790222e+00]]] [[[ 5.13429642e-01 1.39532018e+00 7.06296027e-01 ... -1.95163026e-01 2.64738250e+00 6.29136324e-01] [ 1.50012171e+00 1.12424695e+00 1.21088874e+00 ... 1.36162758e+00 1.53823578e+00 6.45989537e-01] [ 1.55378234e+00 1.02202237e+00 1.17264259e+00 ... 1.05539644e+00 1.89378572e+00 1.10993290e+00] ... [ 3.62406641e-01 2.07125974e+00 1.49007678e+00 ... 4.77646589e-01 1.82427537e+00 1.06086314e+00] [ 3.03682208e-01 2.25363779e+00 1.05357218e+00 ... 7.07755804e-01 1.03096116e+00 6.84140205e-01] [ 6.77554846e-01 9.35902953e-01 1.11489868e+00 ... 1.29843724e+00 1.27859569e+00 8.26779366e-01]] [[ 1.64872634e+00 1.55537736e+00 1.40727305e+00 ... -8.16927757e-03 1.41045523e+00 1.26557350e+00] [ 1.01964819e+00 9.31691527e-01 1.83058512e+00 ... 1.10543931e+00 1.00078559e+00 1.44506955e+00] [ 7.25174189e-01 1.73164368e+00 2.09261179e+00 ... 6.88084543e-01 8.93631041e-01 1.03905034e+00] ... [ 3.03673774e-01 1.77489209e+00 1.14232278e+00 ... 1.41472769e+00 9.18278396e-01 9.11916673e-01] [ 8.44357312e-01 1.01489389e+00 1.27702773e+00 ... 9.37179744e-01 1.93195641e+00 1.33513105e+00] [ 2.39524460e+00 7.46572077e-01 1.09841323e+00 ... 5.65786123e-01 9.95457828e-01 1.20679963e+00]] [[ 1.22175968e+00 8.62027705e-01 1.99027276e+00 ... 8.85897458e-01 4.80134130e-01 6.81957245e-01] [ 1.81148863e+00 7.85464108e-01 1.39260519e+00 ... 7.60027587e-01 1.38932693e+00 1.09769940e+00] [ 1.27701437e+00 2.55644917e-01 9.89826560e-01 ... 1.62573647e+00 2.07901073e+00 7.87326336e-01] ... [ 1.35773373e+00 1.40340340e+00 2.29795277e-02 ... 2.00263357e+00 1.31819832e+00 2.35143006e-01] [ 1.63589370e+00 4.41401392e-01 9.60894108e-01 ... 7.98822761e-01 1.17925525e+00 1.37231851e+00] [ 1.06294167e+00 1.21965551e+00 1.05097353e+00 ... -3.53322402e-02 1.07279301e+00 1.55372512e+00]] ... [[ 1.38162935e+00 1.03341520e+00 -6.43860642e-03 ... 6.05165839e-01 5.10663033e-01 9.92055118e-01] [ 4.01724905e-01 4.87103999e-01 1.20788598e+00 ... 2.17416596e+00 5.29655755e-01 1.92422283e+00] [ 7.94277072e-01 1.26533759e+00 1.48943377e+00 ... 7.41010547e-01 1.05400991e+00 1.73413849e+00] ... [ 1.28823948e+00 1.72948575e+00 1.41747877e-01 ... 1.13846171e+00 -3.53719324e-01 1.04871368e+00] [ 1.15188670e+00 1.25702178e+00 8.78551841e-01 ... 7.37676620e-01 1.72015846e+00 1.73800647e+00] [ 1.55510187e+00 9.52651083e-01 1.86048496e+00 ... 9.05982375e-01 1.41589320e+00 8.09353352e-01]] [[ 1.44405222e+00 7.26913631e-01 1.70842421e+00 ... 1.39885747e+00 9.73637819e-01 1.01726222e+00] [ 1.13267994e+00 1.43093109e+00 1.52780068e+00 ... 1.37738109e+00 6.88825786e-01 1.04569066e+00] [ 9.09404397e-01 1.32871091e+00 3.66992384e-01 ... 1.15593076e+00 2.13053393e+00 7.59073377e-01] ... [ 3.95136625e-01 1.66383195e+00 6.75849259e-01 ... 2.85749555e+00 6.37887299e-01 3.24633658e-01] [ 8.21137249e-01 7.81463921e-01 1.12743843e+00 ... 1.13789916e+00 2.07700801e+00 2.43050623e+00] [ 4.04665947e-01 8.34876120e-01 9.82224226e-01 ... 1.27765977e+00 1.48703396e-01 7.69747257e-01]] [[ 6.25477910e-01 1.74320233e+00 9.54834878e-01 ... 2.35666490e+00 9.45132196e-01 1.26631880e+00] [ 6.28902256e-01 1.07936180e+00 1.24010646e+00 ... 1.22371423e+00 1.69183433e+00 1.47874093e+00] [ 4.81698543e-01 1.78068745e+00 3.49141657e-01 ... 1.34728825e+00 1.54415941e+00 7.66031265e-01] ... [ 1.95347440e+00 5.31364977e-01 1.71479583e+00 ... 1.68934798e+00 1.36322510e+00 1.86909127e+00] [ 8.68440330e-01 6.03214681e-01 4.88905191e-01 ... 1.10959101e+00 9.18855846e-01 1.87194538e+00] [ 9.10920084e-01 1.38332558e+00 -2.95747191e-01 ... 1.61143029e+00 1.78004876e-01 3.75205487e-01]]] [[[ 1.04688871e+00 -9.24551964e-01 9.87162471e-01 ... 5.18241167e-01 -1.10153222e+00 9.44202393e-02] [ 8.15225244e-01 7.39046037e-01 1.21100342e+00 ... -7.60586560e-01 1.10648072e+00 2.52075553e-01] [-1.91065162e-01 1.55797791e+00 9.49855983e-01 ... 5.00583649e-01 1.37648451e+00 9.45588529e-01] ... [-3.05524003e-02 1.18719041e+00 1.02324069e+00 ... 2.00325087e-01 4.28077996e-01 3.34375259e-03] [ 4.95372742e-01 2.54934013e-01 2.86379635e-01 ... -2.68527150e-01 1.11520922e+00 7.78490901e-01] [-1.34973049e-01 -3.86664331e-01 9.85943139e-01 ... 1.70395184e+00 2.74150789e-01 2.00126097e-01]] [[ 7.29076028e-01 5.23362339e-01 2.48701945e-01 ... 5.94473183e-01 3.13098729e-01 3.07856977e-01] [ 5.84824204e-01 1.72348583e+00 5.83923534e-02 ... -1.15762985e+00 6.72047794e-01 1.46326709e+00] [ 1.07868180e-01 -7.22851813e-01 1.11945295e+00 ... 5.04652739e-01 9.29033041e-01 7.79692292e-01] ... [ 4.16256845e-01 9.16893542e-01 -1.02945018e+00 ... 1.23975515e+00 7.49920726e-01 6.30761206e-01] [ 2.71991760e-01 1.11440361e+00 3.79035622e-01 ... -6.46074340e-02 1.63322675e+00 9.21943858e-02] [ 6.79147720e-01 1.31301641e+00 1.47355449e+00 ... -1.04421549e-01 -4.01700854e-01 6.72869444e-01]] [[ 6.60711408e-01 9.03817296e-01 5.75563908e-01 ... 5.00793636e-01 3.89438719e-01 1.26519608e+00] [ 7.16602921e-01 9.50804412e-01 1.23297668e+00 ... 3.02473754e-01 7.02251792e-01 -9.72750008e-01] [ 1.08130252e+00 9.62584853e-01 -3.06299496e-02 ... 1.23477459e+00 6.26407385e-01 1.77440107e-01] ... [ 1.47017944e+00 -1.26206622e-01 6.30793273e-01 ... -2.69972116e-01 8.42363060e-01 9.70844209e-01] [ 7.52586305e-01 5.21317065e-01 -1.24229982e-01 ... 9.35027778e-01 6.29282236e-01 5.87441146e-01] [ 1.04086578e+00 1.10692406e+00 -7.81223118e-01 ... 9.03742731e-01 1.59854341e+00 3.65091383e-01]] ... [[-9.79544967e-02 -9.92609784e-02 1.13298011e+00 ... 1.00506723e+00 -1.76912650e-01 1.08396387e+00] [ 4.36767668e-01 3.87212425e-01 3.83073539e-01 ... 1.08578980e+00 1.35758724e-02 1.49200451e+00] [ 9.95742679e-01 9.21794891e-01 4.09945756e-01 ... 1.11772013e+00 1.25433433e+00 1.89801663e-01] ... [ 3.97539675e-01 1.10330319e+00 1.23019600e+00 ... 1.68141827e-01 -2.67199904e-01 1.58238959e+00] [ 9.69031036e-01 1.83787420e-01 1.05662274e+00 ... -3.34201843e-01 -1.02486275e-01 5.06503642e-01] [ 6.76533043e-01 9.73925292e-01 1.11137331e+00 ... 7.28479207e-01 -1.66795820e-01 1.94831684e-01]] [[-6.01430953e-01 9.18180227e-01 6.29069567e-01 ... 6.34438157e-01 3.17129642e-01 -7.04536080e-01] [-4.38683629e-01 4.98410136e-01 9.62716579e-01 ... 9.00183141e-01 -1.72210231e-01 -2.99913943e-01] [-5.04068255e-01 7.37442672e-01 9.98056471e-01 ... 7.51505494e-01 1.32216573e+00 1.04137503e-01] ... [ 1.92617178e+00 6.26318753e-02 1.02353132e+00 ... -7.54403532e-01 1.15804923e+00 1.08961189e+00] [-5.00730313e-02 1.15400100e+00 3.39395940e-01 ... 4.60626423e-01 -3.37371409e-01 5.89217603e-01] [ 6.35747373e-01 7.41349310e-02 -9.17563513e-02 ... 1.05625498e+00 5.83397523e-02 2.00041103e+00]] [[ 1.12196994e+00 9.88787115e-01 4.04699072e-02 ... -6.90320790e-01 -2.71225154e-01 -1.25263840e-01] [ 9.38619375e-01 7.10953057e-01 1.87539399e+00 ... -3.74500275e-01 -6.60476536e-02 -3.29355747e-01] [ 1.30085617e-01 1.55227685e+00 7.08384961e-02 ... 1.02661610e+00 9.43564177e-01 -8.25640321e-01] ... [ 4.21519727e-01 3.65035594e-01 -4.28349733e-01 ... 3.69749814e-02 8.49221528e-01 1.38363349e+00] [-1.33325815e+00 1.64540184e+00 -2.86297984e-02 ... 1.16779351e+00 1.26328540e+00 -8.17871094e-01] [ 4.59949851e-01 2.49818325e+00 2.14401498e-01 ... 7.23617077e-01 8.22383702e-01 1.30491769e+00]]] [[[ 7.80517980e-02 5.19621789e-01 2.94747740e-01 ... 1.89998895e-01 2.50056051e-02 8.30437601e-01] [ 4.37622577e-01 -1.27902210e-01 -4.88760203e-01 ... -1.35065243e-01 4.67585564e-01 2.65112251e-01] [ 4.06942695e-01 4.80961800e-01 -6.47303879e-01 ... 5.67334950e-01 4.20123398e-01 4.68185037e-01] ... [ 2.63116896e-01 9.70261991e-02 1.71759725e-01 ... 5.28057933e-01 -1.05568971e-02 1.23625374e+00] [ 6.61420584e-01 4.51791435e-01 6.13881983e-02 ... 5.28893054e-01 -2.56987333e-01 5.84780693e-01] [ 1.70371890e-01 -3.41318220e-01 7.88647950e-01 ... 6.31408870e-01 -7.74299726e-03 -9.21733305e-03]] [[ 2.44380727e-01 2.27631986e-01 -1.58191606e-01 ... 7.21043468e-01 6.94607676e-04 -1.68823805e-02] [-2.12250620e-01 -3.86642039e-01 3.22786838e-01 ... 3.32750112e-01 3.12854014e-02 5.19369483e-01] [ 2.76456833e-01 5.85568964e-01 5.73659897e-01 ... 2.91082770e-01 -1.82817042e-01 2.70818651e-01] ... [ 3.54317993e-01 2.79596448e-01 -4.45832238e-02 ... -3.53897244e-01 1.62165374e-01 3.46297741e-01] [ 1.19546503e-02 -1.55258462e-01 2.25044116e-01 ... 3.21995378e-01 5.51200390e-01 -2.54543144e-02] [ 3.37618470e-01 1.49947226e-01 -1.99855343e-01 ... 3.96250725e-01 3.47313643e-01 -1.95306107e-01]] [[ 1.58756245e-02 3.00464302e-01 5.62095344e-02 ... 3.80579293e-01 7.56971717e-01 7.64209256e-02] [ 1.67241603e-01 -1.72835235e-02 2.00330943e-01 ... 6.35597929e-02 4.98193204e-01 4.40149873e-01] [ 2.69486189e-01 -1.11377425e-01 3.20277666e-03 ... -4.40022070e-03 -2.03974023e-01 -2.30985433e-01] ... [-3.75322580e-01 3.45727742e-01 -1.01629592e-01 ... -3.15543823e-02 -4.05257732e-01 3.60839188e-01] [ 5.63048065e-01 2.17187166e-01 8.86624157e-02 ... 4.22178239e-01 -8.36740062e-03 7.01098800e-01] [ 5.13312459e-01 2.97180474e-01 7.60344416e-02 ... 1.93707809e-01 -3.54317218e-01 3.99624348e-01]] ... [[ 1.34313002e-01 3.68026823e-01 5.01372933e-01 ... 3.80975395e-01 4.22764868e-01 4.89565700e-01] [-4.81432498e-01 8.82841051e-02 -5.20122766e-01 ... 1.25953972e-01 -5.61943417e-03 4.56399858e-01] [ 5.58878899e-01 1.49272615e-02 3.34632784e-01 ... 7.31841251e-02 -4.47414368e-02 2.77294040e-01] ... [ 1.63925275e-01 1.09669417e-01 8.87577385e-02 ... 1.13362171e-01 3.51020128e-01 5.09801388e-01] [ 4.73929346e-01 6.28384352e-01 2.50831068e-01 ... -1.18243031e-01 1.03485160e-01 1.61134586e-01] [-3.50273430e-01 2.43477762e-01 1.85749695e-01 ... -4.34319265e-02 2.46626154e-01 4.31629777e-01]] [[ 7.46436656e-01 1.53327093e-01 3.24729949e-01 ... -1.45147577e-01 5.56395531e-01 7.77536556e-02] [ 1.88640416e-01 3.22566092e-01 3.87172937e-01 ... 9.03711081e-01 -1.02238216e-01 5.06163836e-02] [ 5.09351075e-01 6.82834685e-02 -1.68846305e-02 ... 1.51721358e-01 -4.94495243e-01 4.93225574e-01] ... [-1.86993275e-02 4.86679316e-01 -9.93946791e-02 ... 5.61630011e-01 2.16207746e-02 9.97616537e-03] [ 4.75521922e-01 1.41211480e-01 -2.28853315e-01 ... 7.49267042e-02 4.99551773e-01 2.49966770e-01] [ 3.58045995e-01 2.82704860e-01 1.72949970e-01 ... 6.49019420e-01 -2.79070679e-02 1.45186499e-01]] [[ 2.53224969e-01 1.11956306e-01 3.97960484e-01 ... 6.07653856e-01 -2.13227987e-01 -5.50733030e-01] [ 4.96567667e-01 6.17559433e-01 4.39217925e-01 ... -1.33558661e-01 3.80221367e-01 3.31746697e-01] [-6.32837266e-02 1.02754019e-01 7.59333670e-01 ... 5.56423545e-01 3.84346634e-01 -6.21526361e-01] ... [-2.94434309e-01 2.58764535e-01 3.07434052e-01 ... -1.68329537e-01 2.07815930e-01 4.75967944e-01] [ 2.06194311e-01 4.06662881e-01 6.90656543e-01 ... 3.92113596e-01 -1.14525840e-01 -2.29820549e-01] [-4.08542633e-01 -3.95206250e-02 -1.36325315e-01 ... -2.22754657e-01 1.54572576e-01 5.39726198e-01]]]] ... [[[[-7.65460849e-01 -2.15432592e-04 -1.09750852e-01 ... -7.87063003e-01 -3.37046623e-01 -6.56313300e-01] [ 3.72366130e-01 4.09975171e-01 -1.05888963e+00 ... -6.75718486e-01 -3.24151397e-01 -7.68467069e-01] [-5.08600473e-01 -1.18038201e+00 4.50536124e-02 ... 3.53971004e-01 -4.09218222e-01 -5.20576596e-01] ... [-2.11592898e-01 5.00985980e-01 5.69284856e-01 ... 2.88722098e-01 -4.25640762e-01 -1.53581455e-01] [ 3.19686770e-01 4.54740450e-02 1.66694865e-01 ... 1.88335076e-01 -5.74088283e-03 -4.11069803e-02] [-8.59852880e-02 -4.65297490e-01 -1.15804207e+00 ... 1.48855627e-01 -2.17276573e-01 -1.39238134e-01]] [[-8.16575348e-01 -3.29191506e-01 9.29175556e-01 ... -4.89308208e-01 -4.70433146e-01 -1.71645120e-01] [-9.98489335e-02 2.27987260e-01 4.40916836e-01 ... -6.16504848e-01 -7.19874859e-01 -1.25117922e+00] [-5.83523214e-01 -2.82480538e-01 8.13729107e-01 ... -7.45174944e-01 5.86972386e-02 -5.44554949e-01] ... [ 4.15265977e-01 -6.08276546e-01 -1.05393040e+00 ... -1.81705141e+00 -5.58142245e-01 -4.59340811e-01] [-3.32717657e-01 8.58039916e-01 -6.31447673e-01 ... -2.61536360e-01 -8.29007924e-01 -8.14814270e-01] [-3.41464467e-02 1.74669370e-01 6.05548024e-01 ... -2.64584571e-01 -1.32149428e-01 4.48638290e-01]] [[-6.70143127e-01 -3.46528500e-01 2.80744791e-01 ... 7.29236841e-01 -4.19344813e-01 5.63490748e-01] [-2.15513781e-01 4.33811933e-01 -8.35036576e-01 ... -7.20301151e-01 -2.91732103e-01 -7.64873683e-01] [-1.05008984e+00 -6.89737558e-01 4.39144582e-01 ... 4.13519681e-01 2.40364745e-01 8.07457194e-02] ... [-4.16030943e-01 -1.71614380e-03 5.67495346e-01 ... -1.14872612e-01 5.15764177e-01 -7.14683056e-01] [-8.11327100e-01 -3.50798368e-01 -1.14978008e-01 ... -9.71209168e-01 3.48606855e-01 9.31449234e-01] [-1.67104438e-01 -4.94143873e-01 -1.88833177e-01 ... -2.18806013e-01 -8.16898942e-01 -5.34840450e-02]] ... [[ 9.04251277e-01 -6.50418997e-01 1.11955255e-01 ... 2.66251594e-01 2.61244625e-01 -1.04549170e+00] [ 3.22712332e-01 -1.05119443e+00 -5.22948086e-01 ... -5.29255457e-02 -6.08058453e-01 -4.21539158e-01] [ 3.82705629e-01 -2.04081267e-01 -5.12775732e-03 ... -3.17942679e-01 5.08950174e-01 -1.98944986e-01] ... [ 6.31793320e-01 -3.34992353e-03 -7.99546763e-02 ... 5.87530673e-01 -8.11092794e-01 1.70052066e-01] [-8.01502049e-01 -1.73691288e-01 3.35262835e-01 ... 6.32330298e-01 3.02377373e-01 -4.79533225e-01] [-4.28861111e-01 -8.29370439e-01 -3.35920304e-01 ... -1.06852996e+00 1.70883432e-01 1.25055820e-01]] [[-1.16778314e+00 -7.89203703e-01 -2.85526127e-01 ... 2.56079286e-01 -1.29623675e+00 6.95523024e-01] [ 5.33167005e-01 7.44465217e-02 -1.72116935e-01 ... -3.29746485e-01 6.52906358e-01 -1.31300163e+00] [-9.15047154e-02 -2.65900791e-01 -3.74172717e-01 ... 3.28262538e-01 8.08828115e-01 -8.46972540e-02] ... [ 4.51260537e-01 -4.98554498e-01 -3.73649716e-01 ... 5.34234583e-01 -4.72269654e-01 3.24137434e-02] [-1.64701864e-01 -1.57022998e-01 -4.57466394e-01 ... -9.17259932e-01 -8.60621750e-01 -9.82183874e-01] [-2.84849763e-01 -3.57564807e-01 -4.71020252e-01 ... 1.05567372e+00 5.90526760e-01 4.68559302e-02]] [[-1.05022502e+00 8.58363271e-01 -7.79038548e-01 ... -2.25106463e-01 -4.48426753e-02 -8.02898586e-01] [ 3.80058885e-01 3.44761968e-01 -2.60655463e-01 ... -1.66171515e+00 1.50615978e+00 -1.57090098e-01] [-2.23091200e-01 -2.87602007e-01 -5.22168040e-01 ... -2.03654394e-01 -8.52508664e-01 8.66634965e-01] ... [-5.31104028e-01 -1.68064997e-01 -8.37954506e-02 ... -2.82770962e-01 3.21298927e-01 -2.56557763e-01] [-5.79176545e-02 -7.49357879e-01 3.64072829e-01 ... -1.01108086e+00 -8.36159736e-02 -1.56109309e+00] [-2.09432542e-01 1.03457654e+00 4.69049737e-02 ... -6.03000224e-01 8.37256968e-01 -6.63485587e-01]]] [[[-8.35174918e-01 -9.24520075e-01 -5.12936890e-01 ... 1.66729733e-01 4.88760144e-01 -5.63920021e-01] [ 2.05112487e-01 2.70733744e-01 -4.63510931e-01 ... -7.97521770e-01 2.99780309e-01 -3.80003840e-01] [-2.56060570e-01 -6.02294385e-01 -4.53224123e-01 ... -8.91572237e-01 -1.22618988e-01 -6.74324930e-01] ... [-5.88720202e-01 4.57381696e-01 4.79091644e-01 ... 5.12492582e-02 -1.84354991e-01 -7.12965503e-02] [ 1.38287582e-02 -7.17327416e-01 -4.11192209e-01 ... 2.17509940e-02 4.09069248e-02 -6.99816585e-01] [-8.78169954e-01 -1.31045252e-01 -6.17752457e-03 ... -6.76681638e-01 3.97748411e-01 6.19879127e-01]] [[-2.85733759e-01 -1.86431166e-02 -2.39829570e-01 ... -1.68215171e-01 -4.27479863e-01 -8.66946578e-01] [-4.66670394e-01 -2.84593940e-01 3.41089875e-01 ... 5.36497533e-01 -5.13969004e-01 5.21921039e-01] [ 1.00392729e-01 -1.64306685e-01 -4.25222218e-01 ... 2.55528927e-01 -6.39264226e-01 -5.38749337e-01] ... [-3.07451814e-01 -5.39688617e-02 -5.80529392e-01 ... -1.28463268e-01 -2.04124302e-01 -6.72342181e-01] [-2.32436314e-01 -2.04058364e-01 6.99003637e-02 ... -1.18280387e+00 2.92338997e-01 -1.35129169e-01] [-5.94025791e-01 -1.53254911e-01 3.12695414e-01 ... -4.66628164e-01 -6.99690402e-01 3.64015967e-01]] [[-6.81864500e-01 -2.33646423e-01 4.40079063e-01 ... -1.71483234e-01 4.83599842e-01 -1.12552434e-01] [-3.79092246e-01 -6.77405477e-01 -4.71766800e-01 ... -7.87432119e-02 -1.53429164e-02 -1.13971472e+00] [ 1.01518266e-01 -7.56191686e-02 1.88340798e-01 ... -2.62838304e-01 -2.47901887e-01 -2.30529562e-01] ... [-1.21293403e-01 -3.06033939e-01 1.32043600e-01 ... -2.19017938e-01 -7.10254788e-01 -2.36231968e-01] [-6.16058826e-01 -8.49216223e-01 -3.35545212e-01 ... 6.18147433e-01 -1.65524915e-01 -3.07310335e-02] [-1.64773673e-01 -1.41335279e-01 -2.19678760e-01 ... 5.22134542e-01 -5.20460725e-01 -3.43404472e-01]] ... [[-5.53650737e-01 -3.86153400e-01 2.55306274e-01 ... -3.24527621e-01 1.41768187e-01 -8.68672952e-02] [-4.75041091e-01 -8.92013073e-01 1.82625577e-01 ... -8.29786181e-01 2.03377530e-01 5.71713686e-01] [-5.82281165e-02 1.82063803e-01 -4.15561974e-01 ... 3.72522324e-01 -5.57976961e-01 3.24022800e-01] ... [-1.12233318e-01 -2.45250210e-01 2.11092472e-01 ... -2.82915145e-01 -1.10500701e-01 -2.86366612e-01] [-2.09588930e-02 2.36516282e-01 1.89003482e-01 ... -1.45076215e-01 1.58908907e-02 -2.83455938e-01] [-1.44475415e-01 2.84690291e-01 3.49973559e-01 ... -8.33807066e-02 1.39531735e-02 -2.27476418e-01]] [[-4.05932993e-01 -2.34020069e-01 1.98905006e-01 ... -2.76840955e-01 7.81367198e-02 1.97967172e-01] [-4.70152438e-01 -3.77809793e-01 7.58721471e-01 ... -5.38936317e-01 -2.97394931e-01 -1.25191703e-01] [-1.25269517e-01 -6.78719163e-01 -2.17352599e-01 ... -1.02412856e+00 -5.99044085e-01 3.80996689e-02] ... [-2.21759379e-01 -6.87038839e-01 8.49438757e-02 ... -6.46402121e-01 -1.37985691e-01 -3.08381319e-01] [ 8.29342753e-03 -4.20528084e-01 4.21664268e-01 ... 3.02679032e-01 2.86032438e-01 -3.67041528e-01] [-7.57536769e-01 -1.47919908e-01 -1.06739692e-01 ... 2.20326170e-01 -1.03254855e+00 -3.44037324e-01]] [[-8.86045456e-01 -1.25559866e-01 -5.12439847e-01 ... -3.66087779e-02 2.41817117e-01 -4.38283980e-01] [-5.15997745e-02 -2.18809173e-01 -2.84935623e-01 ... 4.32301491e-01 -4.08594131e-01 -4.18494672e-01] [-6.13341212e-01 -5.26026726e-01 1.64395452e-01 ... -1.18761253e+00 -7.70034492e-02 -5.01215398e-01] ... [-6.33667767e-01 -1.02954316e+00 -1.08941245e+00 ... -6.73250318e-01 6.86993539e-01 -6.51265502e-01] [-5.59078038e-01 -2.20816404e-01 3.19316471e-03 ... 4.56782937e-01 -1.14411876e-01 8.88400152e-03] [-3.91702533e-01 8.60986769e-01 -9.44307208e-01 ... 4.62865606e-02 -7.48112023e-01 -6.76806748e-01]]] [[[ 1.31135833e+00 2.65823483e+00 -2.50621498e-01 ... 1.77991760e+00 3.06818318e+00 -2.21934579e-02] [ 6.44766331e-01 1.21417260e+00 3.54779530e+00 ... 2.09190297e+00 -1.69410571e-01 -2.41158888e-01] [-8.52596879e-01 2.73119420e-01 2.08429098e+00 ... 1.67794847e+00 1.44577599e+00 -1.71367064e-01] ... [ 3.68827438e+00 1.35610089e-01 9.74317372e-01 ... 2.17965022e-01 3.09561300e+00 1.55192506e+00] [ 2.30773020e+00 8.37206721e-01 4.75516737e-01 ... 5.84040642e-01 2.39156032e+00 -5.48981488e-01] [ 8.51593971e-01 2.31171465e+00 1.38857400e+00 ... -1.33064032e+00 2.38127160e+00 1.29529715e+00]] [[ 1.25878537e+00 1.91857076e+00 7.16514051e-01 ... 2.17956495e+00 -1.12070814e-01 1.24214375e+00] [ 3.84781957e-01 2.80668187e+00 1.29412508e+00 ... 3.37936521e+00 1.00922191e+00 8.50820124e-01] [ 1.56607449e+00 -9.12095487e-01 1.71684873e+00 ... -6.63568199e-01 1.32172024e+00 2.92912459e+00] ... [ 1.24223208e+00 6.16364241e-01 2.56019759e+00 ... -1.15429842e+00 -5.35685360e-01 2.61491752e+00] [ 2.56988257e-01 -6.58896983e-01 -1.12061071e+00 ... 2.48253703e+00 9.90531296e-02 2.63136935e+00] [ 8.21631908e-01 -1.94516808e-01 2.05848885e+00 ... 5.61824858e-01 -7.77772248e-01 1.33352113e+00]] [[ 3.10714006e+00 8.30244362e-01 1.50088263e+00 ... -2.85670161e-01 -4.46484804e-01 7.88688302e-01] [ 8.22373152e-01 2.62153292e+00 1.20228970e+00 ... 2.37104750e+00 9.42753792e-01 2.31998110e+00] [ 1.91255617e+00 2.88291788e+00 1.47177219e+00 ... 8.31230223e-01 1.12038696e+00 1.26237249e+00] ... [ 8.88895035e-01 3.11653733e-01 1.55829632e+00 ... 8.04215610e-01 1.33169138e+00 5.64712405e-01] [ 7.28236854e-01 -4.46815461e-01 4.00659531e-01 ... -1.25653878e-01 3.09500605e-01 2.43737578e+00] [ 1.29202664e+00 7.39344180e-01 2.06216121e+00 ... 1.32718968e+00 -5.13811231e-01 1.27821016e+00]] ... [[ 1.41513959e-01 9.22868624e-02 4.06042457e-01 ... 1.50671208e+00 6.45665526e-01 -8.89314711e-02] [ 9.32078540e-01 2.71299410e+00 2.29232216e+00 ... 1.19896138e+00 1.49229717e+00 1.59450197e+00] [ 3.06402278e+00 2.34737945e+00 1.87316430e+00 ... 2.73910069e+00 1.71857703e+00 2.49462390e+00] ... [ 3.91912889e+00 -1.06176697e-02 1.53900242e+00 ... 2.70940375e+00 1.26497543e+00 1.19679260e+00] [ 1.59769571e+00 1.55387771e+00 1.72006297e+00 ... -3.38591158e-01 1.62100899e+00 7.31755614e-01] [ 1.69602072e+00 2.45075989e+00 1.22699082e+00 ... 5.15204258e-02 1.01082671e+00 7.27711380e-01]] [[ 1.69378710e+00 1.03574109e+00 6.12403452e-01 ... 1.68230021e+00 2.30751085e+00 3.17643195e-01] [ 6.95512652e-01 2.56699175e-01 2.53609157e+00 ... 6.64840817e-01 1.71484983e+00 9.89475012e-01] [ 1.61361384e+00 -3.72298837e-01 1.75595260e+00 ... 1.87467241e+00 3.67872214e+00 1.31349957e+00] ... [-4.87730831e-01 -1.06721854e+00 1.83409607e+00 ... 1.02393591e+00 2.29565072e+00 1.92718387e+00] [ 2.31250882e+00 1.82403445e+00 -6.68782055e-01 ... 2.69365883e+00 1.61692333e+00 2.57648611e+00] [ 2.21144843e+00 1.42941988e+00 3.46507579e-01 ... 4.86257821e-01 7.09648132e-01 1.56298792e+00]] [[ 1.47709286e+00 2.11781621e+00 3.17609715e+00 ... 1.49998689e+00 1.01546812e+00 -1.71340615e-01] [ 1.74118543e+00 1.20940471e+00 1.64612567e+00 ... -4.12156641e-01 -1.03622460e+00 1.81540704e+00] [-4.96622384e-01 2.44769394e-01 2.37502241e+00 ... 3.99935633e-01 3.48454189e+00 1.18040156e+00] ... [ 1.31007946e+00 1.91936266e+00 1.27146482e+00 ... -1.70896292e-01 8.93642426e-01 1.54207855e-01] [ 1.21647739e+00 1.29643142e+00 1.52660644e+00 ... 1.01931727e+00 5.14993489e-01 9.61993754e-01] [-2.65354902e-01 -5.54964244e-01 8.99109542e-01 ... -5.19760013e-01 1.64648330e+00 9.85885918e-01]]] [[[ 1.35236669e+00 1.71090245e+00 7.95765698e-01 ... 1.73040378e+00 2.21582699e+00 6.56006396e-01] [ 2.15056300e+00 1.29722977e+00 1.48815715e+00 ... 7.58028686e-01 9.84258533e-01 1.06132185e+00] [ 1.71488798e+00 9.87473428e-01 1.13434613e+00 ... 5.29228628e-01 1.63449097e+00 1.87534094e+00] ... [ 8.34652722e-01 1.17873704e+00 1.06163013e+00 ... 1.72758722e+00 1.79549861e+00 1.71202886e+00] [ 4.03876901e-01 1.09066451e+00 1.20264304e+00 ... 1.42626536e+00 1.62563717e+00 9.34078157e-01] [ 1.65864873e+00 5.26338518e-01 1.78178573e+00 ... 1.17632163e+00 8.38482976e-01 4.67353225e-01]] [[ 5.64670265e-01 7.59371638e-01 2.16359615e+00 ... 1.69954574e+00 1.53638768e+00 6.48493946e-01] [ 1.18759477e+00 1.10965288e+00 1.46086431e+00 ... 8.56404245e-01 7.53417552e-01 2.13617444e+00] [ 1.69365931e+00 1.06920457e+00 1.26092160e+00 ... 1.49740434e+00 3.76060188e-01 1.13231528e+00] ... [ 2.50472093e+00 9.61070657e-01 1.02682698e+00 ... 3.97007108e-01 1.97369719e+00 2.15783215e+00] [ 7.50291705e-01 8.94450068e-01 1.13855875e+00 ... 9.34158683e-01 9.36940134e-01 7.13539064e-01] [ 6.76312625e-01 3.95747006e-01 1.94377804e+00 ... 5.14651656e-01 1.47373736e+00 1.74714088e+00]] [[ 9.31096911e-01 1.87022376e+00 1.48527539e+00 ... 1.18602860e+00 7.98073828e-01 1.16779423e+00] [ 1.68398261e+00 1.70585394e+00 1.30756545e+00 ... 7.26631999e-01 1.08380544e+00 1.06051815e+00] [ 1.56052279e+00 1.10918987e+00 1.00186074e+00 ... 7.44085073e-01 5.11227548e-01 1.74005985e+00] ... [ 5.50189793e-01 1.40608597e+00 1.39391291e+00 ... 1.81729198e+00 6.17263675e-01 1.53219557e+00] [ 1.42127216e+00 7.58812487e-01 1.55632889e+00 ... 7.85152435e-01 1.11365750e-01 7.14940608e-01] [ 3.10619742e-01 1.22099578e+00 9.94821787e-01 ... 3.51003587e-01 1.63747919e+00 8.85211527e-01]] ... [[ 9.01964784e-01 1.25341141e+00 1.01829183e+00 ... 4.21111137e-01 1.35189712e+00 8.23662937e-01] [ 1.06998587e+00 8.51613402e-01 1.53401637e+00 ... 1.66981697e+00 1.27982748e+00 1.20467794e+00] [ 2.71652043e-01 1.10744631e+00 8.74555767e-01 ... 1.44753003e+00 1.05342484e+00 1.69226122e+00] ... [ 1.40547323e+00 6.39110208e-01 2.29039997e-01 ... 9.34541166e-01 1.69720089e+00 1.13998568e+00] [ 1.68065727e+00 1.59695709e+00 1.78639424e+00 ... 6.90783083e-01 1.01632631e+00 7.99449742e-01] [ 1.17846489e+00 1.75504911e+00 1.59015417e+00 ... 1.30034482e+00 1.15233958e+00 6.68082416e-01]] [[ 3.93426150e-01 1.53718472e+00 1.97838032e+00 ... 8.91383111e-01 1.90095472e+00 1.32054257e+00] [ 6.60707235e-01 6.21531665e-01 5.08907318e-01 ... 9.22208488e-01 1.53563869e+00 1.31049323e+00] [ 1.13671350e+00 1.84339523e+00 1.51188540e+00 ... 1.11569118e+00 1.43280399e+00 1.68275988e+00] ... [ 1.60476542e+00 1.15929866e+00 2.11808634e+00 ... 1.72040057e+00 1.09925425e+00 1.83879817e+00] [ 1.66019368e+00 7.43053436e-01 1.49158370e+00 ... 1.42328310e+00 1.23369193e+00 9.28143144e-01] [ 8.32915485e-01 1.47383881e+00 5.83845437e-01 ... 7.00520813e-01 1.60053229e+00 1.96889961e+00]] [[ 1.18795645e+00 1.23063076e+00 8.90647113e-01 ... 1.80684495e+00 9.76601899e-01 1.27923763e+00] [ 8.59772861e-01 2.54371905e+00 1.64892292e+00 ... 9.38630819e-01 1.01740444e+00 1.18831539e+00] [ 2.54926920e-01 2.57954925e-01 3.61207247e-01 ... 1.06890142e+00 8.38333428e-01 1.39805281e+00] ... [ 6.04775667e-01 9.84433532e-01 1.18834376e+00 ... 1.80403876e+00 9.64126885e-01 6.92721128e-01] [ 1.16306269e+00 7.31701553e-01 1.51410663e+00 ... 7.72041976e-01 1.87519586e+00 4.54821765e-01] [ 1.39867949e+00 1.42243242e+00 9.63386655e-01 ... 1.29322755e+00 1.13025010e+00 6.94562733e-01]]] [[[ 1.27073109e-01 -6.96044117e-02 8.62389803e-01 ... -4.53762501e-01 9.39011812e-01 5.53393841e-01] [ 7.80079246e-01 -5.25179505e-01 3.00663114e-01 ... 8.24378803e-02 3.94544512e-01 1.37986279e+00] [-6.10908866e-01 2.38207653e-01 5.24240434e-01 ... 1.09242892e+00 1.38725400e+00 -4.78266269e-01] ... [ 9.51163352e-01 4.41914469e-01 1.28900242e+00 ... -7.11122632e-01 1.82440853e+00 9.71909761e-01] [ 8.68212342e-01 1.25639129e+00 4.12567973e-01 ... 9.78061378e-01 1.61760950e+00 1.03080645e-01] [ 4.49269041e-02 -9.12619829e-02 3.80202681e-01 ... 3.09982181e-01 2.34165549e-01 5.00541151e-01]] [[ 1.59466639e-01 -3.72159421e-01 4.40702587e-01 ... 9.20427859e-01 2.35311866e+00 3.14598143e-01] [ 8.97852242e-01 1.08747981e-01 1.37310290e+00 ... 9.27086174e-01 3.46042328e-02 9.71899509e-01] [ 1.46206588e-01 1.81107864e-01 -1.33243859e+00 ... 1.44115567e+00 -4.65069354e-01 9.46499288e-01] ... [ 2.89745539e-01 2.97336549e-01 -1.27287582e-01 ... 9.02064860e-01 -8.94735754e-01 7.04147279e-01] [ 6.52701557e-01 2.62610972e-01 5.81279337e-01 ... 6.04050159e-01 1.91603124e-01 8.48708227e-02] [-8.86321664e-01 -1.28124535e-01 1.11018205e+00 ... -3.18650752e-01 1.00721979e+00 7.57392585e-01]] [[ 6.39974415e-01 2.23786521e+00 -1.83565974e-01 ... 6.26570523e-01 -6.17459416e-01 2.78800845e-01] [ 5.96018374e-01 8.29437315e-01 -3.00907165e-01 ... 8.48902106e-01 2.35087693e-01 -6.21355593e-01] [ 1.10808218e+00 1.46520287e-01 1.37089157e+00 ... 5.54517686e-01 1.13610673e+00 8.78588557e-01] ... [ 7.00364172e-01 1.47761631e+00 1.01417446e+00 ... 5.39268613e-01 6.94356620e-01 9.10641551e-02] [-7.00659871e-01 7.91004717e-01 7.65712410e-02 ... 2.54310310e-01 -5.49731255e-01 -6.77730083e-01] [ 1.12242453e-01 1.30618763e+00 4.92551595e-01 ... 3.83950472e-01 -1.44162942e-02 3.67677897e-01]] ... [[ 1.07393003e+00 1.25889981e+00 5.49407542e-01 ... -1.06716149e-01 -1.12728573e-01 -4.03509885e-02] [-3.69739801e-01 -1.00172627e+00 1.49027705e-01 ... -3.16544026e-01 -1.54704556e-01 2.44559005e-01] [-2.23034725e-01 2.23959789e-01 1.39604616e+00 ... -3.45255077e-01 3.70438516e-01 4.15403068e-01] ... [ 3.81928086e-01 3.45489621e-01 3.88297826e-01 ... -4.56058457e-02 1.01548064e+00 -1.83125094e-01] [ 5.51171117e-02 1.40092158e+00 2.59557694e-01 ... 9.20142651e-01 -2.96138078e-02 3.61884058e-01] [ 9.57734168e-01 2.08095871e-02 1.10024166e+00 ... 9.48374987e-01 6.17100060e-01 1.25739992e+00]] [[ 1.24457383e+00 -6.09520376e-01 1.84371543e+00 ... 2.99099892e-01 6.23967946e-01 1.16089630e+00] [ 1.64928555e+00 3.29921842e-01 -3.23050618e-01 ... 3.86582166e-01 5.64755082e-01 -5.06223142e-01] [ 7.95897365e-01 1.20588386e+00 2.14429235e+00 ... 6.95327103e-01 4.04719234e-01 1.46250868e+00] ... [ 2.88178831e-01 6.20879412e-01 3.01327467e-01 ... -4.29959781e-02 -5.64284146e-01 7.88906574e-01] [ 1.37809321e-01 1.07629466e+00 7.64034510e-01 ... 1.63527176e-01 1.88617826e-01 5.78720868e-01] [ 4.11178917e-01 1.21677592e-01 1.33602977e-01 ... 9.10881877e-01 7.09834769e-02 6.45831108e-01]] [[-9.72179711e-01 3.54346633e-01 -1.12969935e+00 ... 4.17887330e-01 4.18173522e-01 1.37997761e-01] [-4.83375609e-01 -5.28753884e-02 -1.68301746e-01 ... 1.10067725e+00 5.02831340e-02 -9.92929265e-02] [ 9.08659816e-01 -5.43039501e-01 -2.75875628e-01 ... 2.28505418e-01 1.90864965e-01 1.73935726e-01] ... [ 4.77009326e-01 5.34401894e-01 5.92019260e-01 ... 1.78386223e+00 8.59436274e-01 1.40808904e+00] [ 1.10081327e+00 -5.42573035e-01 1.37193656e+00 ... 3.13831061e-01 6.43276572e-01 3.56653839e-01] [ 1.77232072e-01 1.44559979e+00 4.19485062e-01 ... 1.59986460e+00 1.36667514e+00 1.02328885e+00]]] [[[ 4.62120324e-01 6.59819245e-01 1.43677354e-01 ... 3.15135241e-01 -7.70724937e-02 3.88782799e-01] [ 5.25126815e-01 1.96307272e-01 4.90510136e-01 ... -3.38748157e-01 5.92642844e-01 -3.56501639e-01] [ 6.66838944e-01 1.81997806e-01 5.33611655e-01 ... -3.58732700e-01 -5.32997847e-02 5.00759423e-01] ... [ 4.82869506e-01 9.20978069e-01 2.11727664e-01 ... -5.01077950e-01 7.89270759e-01 6.05671406e-01] [ 6.58619180e-02 3.00730318e-01 1.29790142e-01 ... -2.68146962e-01 9.27911885e-03 8.81767392e-01] [ 4.39113379e-01 -6.97626472e-02 -5.41396253e-02 ... 5.69620073e-01 6.48674428e-01 5.52332997e-01]] [[ 2.98074841e-01 5.71326725e-02 -2.63428509e-01 ... -1.39778286e-01 9.68027711e-01 3.22420150e-01] [ 2.02005938e-01 -4.35452946e-02 8.88253301e-02 ... -1.61654577e-02 8.98045361e-01 3.35728765e-01] [ 6.37409270e-01 6.23085618e-01 7.91805267e-01 ... 6.13863766e-01 3.30126256e-01 5.32542884e-01] ... [ 2.82253116e-01 5.26865661e-01 -1.57941699e-01 ... -3.79139513e-01 -6.54323548e-02 2.31169209e-01] [ 2.49916896e-01 4.29874778e-01 5.23549952e-02 ... 3.92308384e-01 2.20217168e-01 6.62259877e-01] [ 3.32959533e-01 1.02798082e-01 -5.65208010e-02 ... 3.54961723e-01 2.60369837e-01 -3.83215696e-01]] [[ 3.14141005e-01 3.53877321e-02 -1.77277535e-01 ... 5.44309616e-01 3.79132450e-01 -6.80210628e-03] [ 5.09459555e-01 4.72137839e-01 4.32044119e-01 ... 5.70621192e-02 -5.08396290e-02 1.22190185e-01] [ 3.70176971e-01 -2.44614407e-01 9.77157131e-02 ... 3.34184840e-02 2.28072122e-01 8.16330612e-01] ... [ 6.04906857e-01 9.43060696e-01 4.98532295e-01 ... 1.22302406e-01 -7.93477371e-02 -1.03800885e-01] [ 2.46629715e-01 4.76179898e-01 -8.09628069e-02 ... 1.29097447e-01 -4.90582764e-01 2.29836643e-01] [-2.42997229e-01 6.54513359e-01 -4.26425010e-01 ... -4.57495488e-02 2.64733791e-01 -4.19713140e-01]] ... [[ 4.56573546e-01 3.56051415e-01 1.68471158e-01 ... 4.83988792e-01 -3.18103313e-01 3.92730534e-01] [ 5.38497269e-01 4.27071303e-01 2.42938533e-01 ... 6.83548093e-01 7.80084133e-02 -1.92820847e-01] [ 3.96704465e-01 7.90156499e-02 2.38792062e-01 ... 3.64707410e-01 3.52889150e-01 -9.18237586e-03] ... [ 1.03418127e-01 3.09858203e-01 -1.13362379e-01 ... 7.97905028e-02 3.03420544e-01 -2.32441779e-02] [ 3.99335474e-01 -1.87294945e-01 7.88513348e-02 ... 6.35414720e-02 5.78406155e-01 -2.83252567e-01] [ 9.00969028e-01 2.91501224e-01 4.26941603e-01 ... 3.02148819e-01 6.46481335e-01 5.92049539e-01]] [[-1.31291196e-01 4.73787546e-01 -3.75845462e-01 ... -2.48882055e-01 2.81389445e-01 -1.49398774e-01] [ 1.97907314e-01 2.94846687e-02 -2.60152340e-01 ... 4.11205322e-01 -1.54568240e-01 2.05568284e-01] [ 7.95506656e-01 3.78442019e-01 2.82075256e-01 ... 1.21742420e-01 5.57618082e-01 2.73660421e-01] ... [ 2.70168722e-01 -8.40132609e-02 -8.77609253e-02 ... 3.73103291e-01 3.17075908e-01 5.42008281e-01] [ 1.59406036e-01 -8.97120163e-02 6.84219122e-01 ... 4.01206493e-01 6.81035146e-02 3.64379659e-02] [ 4.51117069e-01 -4.02476758e-01 2.91198373e-01 ... 3.42271447e-01 -1.67689279e-01 2.11950764e-01]] [[ 3.55479658e-01 2.21654996e-01 3.22327554e-01 ... 2.58050144e-01 8.12775642e-03 4.94366765e-01] [-4.92662877e-01 5.87131619e-01 7.96276748e-01 ... 1.66686609e-01 2.34630443e-02 7.24463984e-02] [ 5.33229470e-01 9.96487737e-01 3.82532448e-01 ... 2.26855502e-01 1.96428180e-01 3.13328832e-01] ... [ 2.13973641e-01 3.74237597e-02 -2.37735718e-01 ... 1.94909900e-01 -1.24366663e-01 2.11409450e-01] [ 6.86956346e-02 8.11568677e-01 -3.24554771e-01 ... -1.06127284e-01 1.48619592e-01 1.14326566e-01] [ 4.17400822e-02 3.60219002e-01 1.69377938e-01 ... 5.37499666e-01 2.14247614e-01 1.11510321e-01]]]] [[[[-1.38382480e-01 -2.06035629e-01 3.85654390e-01 ... -1.73226789e-01 -1.89491019e-01 -1.02220035e+00] [-4.54513490e-01 -2.82368381e-02 -2.06316233e-01 ... -1.08337808e+00 9.05143142e-01 -4.09193903e-01] [-1.73249826e-01 7.24492788e-01 -5.33746958e-01 ... -1.87564984e-01 -2.52408776e-02 2.41886005e-01] ... [ 6.82984948e-01 2.19609037e-01 -1.85484201e-01 ... -5.83280995e-02 -3.32494318e-01 -2.59498078e-02] [-2.31155545e-01 -8.05264950e-01 4.66989607e-01 ... -2.15178043e-01 2.86244422e-01 3.75875026e-01] [-6.42534614e-01 -4.29522902e-01 3.22539806e-01 ... -1.48783302e+00 2.08160341e-01 4.27499041e-03]] [[-7.01396525e-01 -4.83924359e-01 4.60010082e-01 ... -7.04249978e-01 -4.79564458e-01 -1.21023023e+00] [-5.37791431e-01 1.67876505e-03 -2.51706466e-02 ... -8.00816298e-01 -1.15623169e-01 -1.47745728e+00] [-1.14431751e+00 2.27196813e-01 -1.11916208e+00 ... 4.03061986e-01 2.09411588e-02 -1.80124730e-01] ... [ 4.52341050e-01 -2.83873975e-02 6.27343714e-01 ... 8.68483558e-02 3.67157251e-01 -2.13304147e-01] [-5.21156704e-03 1.59527794e-01 -4.27238643e-01 ... -2.53866315e-01 -2.47919522e-02 -9.89386886e-02] [-3.16943228e-01 1.38956219e-01 -1.06490135e-01 ... 5.31863868e-01 -8.23354051e-02 -1.81397766e-01]] [[-8.72001529e-01 -4.96642962e-02 3.98270786e-01 ... 9.40684974e-03 2.77978748e-01 4.25775908e-02] [-7.95489609e-01 1.29926458e-01 8.33294690e-02 ... -1.68157279e-01 1.22822857e+00 4.73688066e-01] [-1.07561255e+00 -1.34082341e+00 -1.76055104e-01 ... 9.23953056e-02 1.47950634e-01 -1.41055655e+00] ... [-1.12121895e-01 -5.21262527e-01 6.61664188e-01 ... -3.29567015e-01 2.79691994e-01 4.19459701e-01] [-4.28020656e-01 -1.03940237e+00 2.65882947e-02 ... -2.79848307e-01 -7.21119791e-02 1.63375109e-01] [ 1.93566605e-01 4.50186759e-01 -3.39272588e-01 ... -5.46364844e-01 -4.14914519e-01 -5.79133481e-02]] ... [[ 6.31478190e-01 -1.67858880e-02 -6.14656925e-01 ... 9.73743200e-01 1.22104518e-01 -1.03322887e+00] [-3.25444490e-01 4.44341630e-01 -2.11437374e-01 ... 1.22522995e-01 -9.60849583e-01 -5.90919614e-01] [ 2.83759624e-01 7.88741931e-02 4.55079705e-01 ... 1.46572720e-02 3.79462659e-01 -4.77477074e-01] ... [-6.85010031e-02 -9.07759517e-02 -9.02734756e-01 ... -5.33404827e-01 -1.03948855e+00 4.99603271e-01] [-5.23061335e-01 7.66921341e-01 5.95041811e-01 ... 1.65207699e-01 -7.66126215e-01 -6.38818860e-01] [-1.21128976e+00 4.85512286e-01 7.33429492e-02 ... -4.60795164e-01 -6.57347023e-01 -9.90837514e-02]] [[ 2.47142911e-01 5.10460973e-01 4.00449485e-01 ... -6.20313108e-01 -1.00003615e-01 -1.24185503e-01] [ 1.69164971e-01 -3.21718752e-01 7.53627002e-01 ... -6.92287505e-01 -1.26399541e+00 -3.37941855e-01] [-8.59275877e-01 -9.16717574e-02 4.19971436e-01 ... -4.53334987e-01 -3.61391865e-02 -3.86148989e-01] ... [ 2.33842552e-01 1.60985783e-01 6.05689406e-01 ... 9.81733084e-01 -4.58423704e-01 -4.41390067e-01] [ 6.13615930e-01 2.78276503e-01 -7.75885224e-01 ... 3.35810691e-01 6.58407286e-02 -2.59856552e-01] [-1.02320075e+00 -6.47354007e-01 1.15144201e-01 ... 2.44665742e-01 -3.04826051e-01 -6.32219970e-01]] [[-6.12606466e-01 -5.34910202e-01 -6.57995194e-02 ... 6.81910872e-01 -7.47110724e-01 1.82209626e-01] [ 1.30151406e-01 -1.31937653e-01 -2.93549359e-01 ... 1.43223237e-02 6.79704428e-01 -3.03454727e-01] [ 8.88004184e-01 -9.54395056e-01 -3.36687624e-01 ... -5.65690100e-01 5.69454849e-01 -8.28517377e-02] ... [ 3.88627574e-02 -2.09191386e-02 2.25641131e-01 ... 4.45640862e-01 -3.35554481e-01 4.50101458e-02] [-9.53221321e-01 8.77215624e-01 1.83714092e-01 ... 6.25280321e-01 5.49701869e-01 7.76244938e-01] [-6.15189135e-01 -1.01192355e+00 -7.91465402e-01 ... 2.42542192e-01 -1.80445898e+00 -5.81641309e-02]]] [[[ 1.54987186e-01 -5.49879909e-01 -6.99710786e-01 ... -2.68785115e-02 -7.86584079e-01 -1.32956073e-01] [ 9.57192201e-03 -1.92860290e-01 2.14838493e-03 ... -4.94637430e-01 -2.71615297e-01 5.20073995e-02] [-7.34721273e-02 5.59248663e-02 -4.82421741e-02 ... -2.15599731e-01 -5.25500298e-01 -2.43215546e-01] ... [-3.44984084e-01 6.92118034e-02 -1.97917223e-02 ... -2.40779117e-01 5.04027084e-02 -6.64734066e-01] [-7.51951158e-01 -5.03569126e-01 -4.69485641e-01 ... -3.74001026e-01 -1.45353764e-01 -2.65811175e-01] [-2.86013335e-01 -1.90692991e-01 -2.62680441e-01 ... -4.42271471e-01 -1.86907098e-01 3.58401954e-01]] [[ 2.57095657e-02 -3.13026398e-01 2.95933485e-01 ... 8.53785686e-03 -8.32038999e-01 2.88463712e-01] [ 1.60204068e-01 1.29360378e-01 1.57297269e-01 ... -1.36102235e+00 -6.55220568e-01 4.25706327e-01] [ 4.45878617e-02 -4.19109799e-02 7.87402783e-03 ... -2.37017199e-01 3.01019967e-01 -5.67350030e-01] ... [-3.61149192e-01 -3.48185480e-01 -4.37082201e-01 ... -7.61245489e-01 -2.19946355e-01 -6.69395208e-01] [-1.85893416e-01 -1.20164528e-01 4.52230901e-01 ... -1.15627877e-01 -1.26295358e-01 2.40526974e-01] [ 2.09252000e-01 1.96258634e-01 -5.27835786e-01 ... -5.82667552e-02 1.39824944e-02 3.49502474e-01]] [[ 1.65963277e-01 3.72994579e-02 4.14879292e-01 ... -5.46894968e-01 -4.32190448e-01 9.47578475e-02] [-1.36274517e+00 -6.91724360e-01 -5.06139576e-01 ... 8.61967802e-01 -8.09064686e-01 -2.55110711e-01] [-1.11690842e-01 -9.95914415e-02 -2.45335713e-01 ... -1.47883877e-01 -3.22854668e-01 -7.87115216e-01] ... [-3.75332922e-01 -4.65570420e-01 -1.05483450e-01 ... -2.10370645e-01 5.50626755e-01 -1.21439144e-01] [-4.29899275e-01 -5.85781932e-01 -7.11769462e-01 ... -1.49974555e-01 5.90288937e-01 -1.49528250e-01] [-5.49819767e-01 -3.48821461e-01 -5.85647285e-01 ... -9.47696030e-01 -3.79637271e-01 -5.11592686e-01]] ... [[-1.69990823e-01 1.27684608e-01 -1.16001908e-03 ... -3.05429012e-01 -5.13639927e-01 -5.65779805e-01] [-9.18141663e-01 9.28235799e-02 2.50542372e-01 ... -7.33650506e-01 -1.33466855e-01 -7.96474814e-01] [ 4.78529066e-01 5.98921716e-01 -8.02678287e-01 ... 8.69166136e-01 -1.88111201e-01 -3.95407706e-01] ... [-9.02524963e-02 -1.23498328e-01 -4.12803084e-01 ... -4.64094430e-01 -2.83044539e-02 -1.87096059e-01] [ 2.61789411e-01 8.67371500e-01 -2.37009928e-01 ... -4.04397160e-01 9.76383865e-01 4.25578654e-03] [ 3.48007560e-01 4.10959125e-01 8.49191993e-02 ... -2.60880828e-01 -1.81377411e-01 -1.83264196e-01]] [[-2.78007239e-01 -5.56048393e-01 1.29725963e-01 ... -2.40352703e-03 1.08360434e+00 -3.34540933e-01] [-2.63001502e-01 -6.29276931e-01 -1.88935667e-01 ... 4.35232848e-01 2.25446433e-01 -5.72591543e-01] [-5.34489274e-01 -6.61899567e-01 6.34280860e-01 ... -3.08238864e-01 1.38111806e+00 8.44140770e-04] ... [ 4.92556393e-02 5.81159592e-01 1.64251193e-01 ... 1.77995145e-01 -6.22798085e-01 -3.75652164e-01] [-4.47143227e-01 2.71907687e-01 -5.00789106e-01 ... -2.38253847e-01 -1.60396740e-01 -2.31031314e-01] [-7.61512756e-01 -4.58840370e-01 -4.96715695e-01 ... -3.58578652e-01 1.53632984e-01 -5.53671777e-01]] [[-7.52630293e-01 2.66275406e-01 4.00355101e-01 ... -9.53006864e-01 -4.48395908e-01 -1.92076519e-01] [-3.76715511e-02 -6.16497338e-01 -5.80856860e-01 ... -1.14222877e-01 2.24529907e-01 2.47772515e-01] [-3.11262667e-01 3.99936020e-01 -8.38719666e-01 ... -1.99511960e-01 -4.07327473e-01 -1.65983930e-01] ... [-1.02606392e+00 5.98188102e-01 2.13557288e-01 ... -9.44384813e-01 -1.84114706e-02 -9.52828955e-03] [ 6.48290217e-02 -6.68956399e-01 -4.96249467e-01 ... -5.81920221e-02 -2.65931606e-01 3.90304625e-01] [-1.68482825e-01 -2.98800170e-01 6.94499373e-01 ... 6.65058121e-02 -5.10169506e-01 -5.19986272e-01]]] [[[ 1.90465808e+00 1.92810059e-01 -7.12174401e-02 ... 4.58366990e-01 -7.95939326e-01 1.14304852e+00] [ 1.01450312e+00 2.29780316e+00 8.04017425e-01 ... 6.65543258e-01 -7.81677127e-01 2.51981354e+00] [-2.00980417e-02 2.06529304e-01 -9.24564958e-01 ... 6.23838365e-01 2.01077747e+00 -4.05676812e-01] ... [ 1.97559893e+00 1.20080344e-01 2.65360212e+00 ... 1.60921144e+00 3.46428180e+00 4.64930683e-02] [ 3.45538950e+00 7.77934253e-01 1.58761036e+00 ... -4.76129532e-01 2.15579677e+00 8.81071627e-01] [-4.34417188e-01 8.67751598e-01 1.14828205e+00 ... 6.00899279e-01 9.94079709e-01 1.53982148e-01]] [[-2.21405029e-02 5.64963408e-02 2.20999098e+00 ... 4.82197136e-01 1.06068921e+00 2.05664921e+00] [ 1.82187378e+00 -8.51804078e-01 -9.93616462e-01 ... 1.31700981e+00 3.22351265e+00 1.66131413e+00] [ 2.32705903e+00 2.68097353e+00 1.68732500e+00 ... 2.53990698e+00 -1.30889639e-01 3.05618823e-01] ... [ 1.20270699e-01 5.58426797e-01 9.41130221e-01 ... 7.40895271e-01 1.19317734e+00 1.57124698e+00] [ 2.72715664e+00 1.19427359e+00 -7.50064969e-01 ... 1.21449816e+00 2.73255438e-01 5.99214315e-01] [-8.75614524e-01 1.38035700e-01 8.90873551e-01 ... -4.39553782e-02 5.99296503e-02 4.38457906e-01]] [[ 3.48450005e-01 1.01236093e+00 -2.01278850e-01 ... 1.00934899e+00 1.62983847e+00 1.30592942e+00] [ 1.93932831e+00 4.07151282e-01 1.27817318e-01 ... -5.84279180e-01 4.91732550e+00 4.28191394e-01] [ 2.56397963e+00 2.24891543e+00 4.68577057e-01 ... 2.87036085e+00 1.99343693e+00 1.32976964e-01] ... [-8.55796576e-01 1.47003019e+00 1.80036759e+00 ... 6.08821392e-01 -6.78459525e-01 2.14265180e+00] [-4.12643641e-01 1.16535711e+00 2.68193817e+00 ... 7.30423868e-01 1.66697764e+00 2.59298754e+00] [ 3.90109062e-01 1.45346713e+00 1.51025295e+00 ... 1.73584890e+00 1.96482813e+00 1.42459130e+00]] ... [[-5.86400211e-01 1.00989604e+00 -1.65295434e+00 ... 2.54906011e+00 -8.82653594e-01 1.88329136e+00] [ 2.07510972e+00 -5.69513664e-02 1.94789827e+00 ... 5.02040684e-01 2.18634605e+00 -2.93977350e-01] [ 8.15379083e-01 2.37506390e+00 3.99947095e+00 ... 1.00829244e+00 2.41103244e+00 5.52821338e-01] ... [ 2.68165898e+00 1.61791158e+00 9.88244474e-01 ... -2.24633336e-01 2.09802365e+00 -3.60196322e-01] [ 1.70907235e+00 -1.01151325e-01 1.00800550e+00 ... 9.87634301e-01 1.00670135e+00 2.60039330e+00] [ 2.65600300e+00 7.53346741e-01 1.67146730e+00 ... 1.28129888e+00 -1.49530649e+00 1.47472024e+00]] [[ 1.88115418e+00 1.35322738e+00 1.87309098e+00 ... 2.35340238e+00 2.23934221e+00 1.24581504e+00] [ 1.67632926e+00 2.09711480e+00 2.52065396e+00 ... 1.22222745e+00 1.81928754e+00 2.08845520e+00] [ 1.81951296e+00 1.76373422e+00 8.30518544e-01 ... 2.52370477e+00 4.95495647e-01 2.32659531e+00] ... [ 1.33557248e+00 -3.44777584e-01 2.48006177e+00 ... 2.12968683e+00 1.29817748e+00 -1.00147322e-01] [ 7.45982289e-01 1.63233113e+00 1.77290261e+00 ... 2.46389580e+00 1.34525216e+00 8.89607370e-01] [ 9.35392916e-01 1.15836668e+00 9.62929964e-01 ... 2.79342294e+00 -1.53516633e-02 1.37795424e+00]] [[ 1.21020770e+00 8.17490041e-01 1.86652219e+00 ... 7.66345084e-01 8.55378628e-01 -7.27809444e-02] [ 1.60544121e+00 1.49595058e+00 5.89890361e-01 ... 9.58037019e-01 1.82267022e+00 5.23239970e-02] [ 2.51632380e+00 1.67798445e-01 1.60066152e+00 ... -8.89104158e-02 2.29991007e+00 2.01729560e+00] ... [ 1.90134883e+00 1.61554384e+00 -4.87064958e-01 ... 3.06533551e+00 5.61089694e-01 7.91842520e-01] [ 8.37014914e-01 1.76120472e+00 6.32484913e-01 ... 8.38372335e-02 9.39786613e-01 1.42363620e+00] [ 1.21337152e+00 3.51923376e-01 1.59689033e+00 ... -3.21565300e-01 5.19290149e-01 1.99981403e+00]]] [[[ 4.07861561e-01 6.39393091e-01 1.26097322e+00 ... 1.20459509e+00 1.54198086e+00 2.27562881e+00] [ 1.02437842e+00 2.93920493e+00 8.44075382e-01 ... 6.93979979e-01 2.00874829e+00 8.49111080e-01] [ 1.00665247e+00 3.36945266e-01 9.80402708e-01 ... 1.48217177e+00 1.48484862e+00 1.09506404e+00] ... [ 1.58465683e+00 5.04278064e-01 1.14376938e+00 ... 2.28699398e+00 9.90043581e-01 8.76735985e-01] [ 9.86068189e-01 1.69203305e+00 1.13955259e+00 ... 5.46391666e-01 5.47216535e-01 1.61265564e+00] [ 1.41760612e+00 1.24086821e+00 7.21087217e-01 ... 5.42886078e-01 1.07018101e+00 8.66390824e-01]] [[ 6.80241704e-01 2.69870520e-01 2.07994175e+00 ... -3.33327353e-01 1.20035982e+00 -4.48218197e-01] [ 1.18737149e+00 6.02143586e-01 1.15266919e+00 ... 1.08758473e+00 1.39960265e+00 2.15839338e+00] [ 1.09316623e+00 1.14099383e+00 1.56688178e+00 ... 1.67246282e+00 1.85144937e+00 1.76168525e+00] ... [ 2.01216483e+00 1.04469788e+00 9.45867479e-01 ... 6.17755353e-01 9.45785642e-01 1.48763824e+00] [ 8.36066604e-01 2.63420165e-01 1.40846837e+00 ... 7.38020182e-01 1.44623685e+00 1.72312140e+00] [ 1.91596401e+00 2.19199443e+00 1.12860453e+00 ... 6.04823053e-01 2.02066565e+00 6.75283074e-01]] [[ 2.10607338e+00 1.11996078e+00 1.70061958e+00 ... 1.41992235e+00 -2.59035472e-02 1.81562769e+00] [ 1.34364963e+00 5.42566597e-01 1.85361430e-02 ... 1.06138206e+00 7.40054488e-01 1.23833275e+00] [ 4.41575259e-01 1.52050292e+00 8.69845629e-01 ... 1.18329108e+00 1.22792149e+00 2.03233212e-01] ... [ 1.46151435e+00 1.00875950e+00 1.83732021e+00 ... 1.51589274e+00 1.45488763e+00 1.04863131e+00] [ 7.87652731e-01 1.56346500e+00 9.23948884e-01 ... 1.28071618e+00 1.84921253e+00 1.10792613e+00] [ 2.00163245e+00 7.15427637e-01 7.88633645e-01 ... 1.45081854e+00 7.05587387e-01 9.78158474e-01]] ... [[ 8.34850848e-01 1.79272270e+00 7.87158906e-01 ... 4.14449930e-01 8.92825961e-01 1.84443307e+00] [ 9.53649104e-01 1.65255487e+00 1.34174633e+00 ... 7.86626756e-01 1.09373796e+00 1.47911024e+00] [ 2.42419887e+00 1.68361366e-01 1.02320802e+00 ... 1.90982294e+00 1.52045810e+00 8.89792144e-01] ... [ 1.28492904e+00 9.21348274e-01 4.98126805e-01 ... 1.25693834e+00 4.70282614e-01 8.74590278e-01] [ 9.97874916e-01 8.28234732e-01 4.15843546e-01 ... 9.56574798e-01 1.65530360e+00 6.91296518e-01] [ 1.71402252e+00 1.24616575e+00 1.38336337e+00 ... 2.06707263e+00 1.46450484e+00 9.96849537e-01]] [[ 1.53928459e+00 7.70054877e-01 3.66854757e-01 ... 7.15297520e-01 8.13481808e-01 1.72066760e+00] [ 2.01937175e+00 7.32460260e-01 1.07740283e+00 ... 1.77169323e+00 1.36222363e+00 1.01962852e+00] [ 7.90784240e-01 1.44378591e+00 1.45400333e+00 ... -3.23735744e-01 1.68688428e+00 4.60460961e-01] ... [ 1.42829347e+00 1.35590887e+00 1.75768960e+00 ... 1.90064549e+00 1.18172693e+00 1.41749132e+00] [ 6.85306251e-01 9.98489738e-01 5.37787914e-01 ... 5.00003397e-01 1.71887147e+00 1.45437372e+00] [ 8.69037926e-01 1.27373636e+00 1.27793396e+00 ... 9.02875721e-01 1.94429672e+00 9.67225313e-01]] [[ 1.44090533e+00 8.09405446e-01 1.55344260e+00 ... 1.70425904e+00 1.54896176e+00 1.13863945e+00] [ 1.66675806e+00 1.66494226e+00 8.87842178e-01 ... 1.23294497e+00 4.37087148e-01 1.31653798e+00] [ 8.67713094e-01 1.16998506e+00 1.17685258e+00 ... 4.99676347e-01 2.06137776e+00 5.06597579e-01] ... [ 1.31829000e+00 5.73508561e-01 1.01748943e+00 ... 1.18710756e+00 2.07018423e+00 1.11860013e+00] [ 1.21391284e+00 1.28510201e+00 1.65047121e+00 ... 7.89720237e-01 8.90062869e-01 1.20646536e+00] [ 1.08746171e+00 9.66400743e-01 1.79066133e+00 ... 1.73688674e+00 9.99087632e-01 2.16382265e+00]]] [[[ 1.13977659e+00 5.57668924e-01 7.08259121e-02 ... -3.73356380e-02 7.15344027e-03 -5.10192811e-01] [-3.97764415e-01 1.24405801e+00 1.91072905e+00 ... 3.64093333e-01 1.83583248e+00 -2.73847371e-01] [ 1.07261670e+00 8.38965535e-01 1.32844067e+00 ... -6.61984861e-01 1.01228189e+00 6.24720752e-01] ... [ 1.15331137e+00 -1.13551021e+00 1.44345582e+00 ... -1.65049583e-01 6.78906560e-01 -8.63167085e-03] [ 3.61969560e-01 -7.59759620e-02 3.77764404e-01 ... 1.34405398e+00 9.86914098e-01 -7.44621992e-01] [-5.78426778e-01 7.03857243e-01 1.82189035e+00 ... 4.73579437e-01 5.01188397e-01 -1.22171390e+00]] [[ 4.40653563e-01 -4.17655140e-01 -3.85097831e-01 ... -1.15824378e+00 7.27627754e-01 4.09221798e-01] [ 1.41479298e-01 4.33130711e-01 1.11005378e+00 ... 8.44340503e-01 7.76698530e-01 7.45016932e-01] [ 1.84273231e+00 1.33321786e+00 1.14244647e-01 ... 1.48221552e-01 -4.87606227e-01 7.54191577e-01] ... [ 1.04518390e+00 8.44692886e-01 4.00842607e-01 ... 1.38520694e+00 9.06713426e-01 -7.08802417e-02] [ 6.90672696e-01 1.80127367e-01 -1.71984546e-02 ... 1.06370115e+00 -8.99324846e-03 -4.56246883e-01] [ 6.19628668e-01 1.69703794e+00 5.86707413e-01 ... 5.13781369e-01 8.63671362e-01 4.59337771e-01]] [[ 8.77109528e-01 1.29842901e+00 1.68495476e-01 ... -1.46564618e-02 3.16396356e-01 1.41213644e+00] [ 4.25662637e-01 1.30388141e+00 6.68655217e-01 ... 5.09876609e-01 6.60199881e-01 -8.32459450e-01] [-7.71447301e-01 1.16060305e+00 1.25522062e-01 ... 5.80096185e-01 9.51408327e-01 1.63298309e+00] ... [ 1.78758943e+00 -2.04129145e-02 -7.55624752e-03 ... -3.93450677e-01 4.08225656e-01 1.19379628e+00] [ 5.60190305e-02 -1.33742606e-02 5.69542646e-01 ... 6.91399336e-01 -1.73436254e-01 -2.35121325e-01] [ 1.11320698e+00 1.84841707e-01 6.55884892e-02 ... 1.06046712e+00 1.80031389e-01 -1.13557011e-01]] ... [[ 3.67856950e-01 6.71261072e-01 -5.39070845e-01 ... 2.07230017e-01 1.46230504e-01 6.99598730e-01] [ 5.77356637e-01 -1.80937741e-02 7.64646351e-01 ... 5.95128536e-01 4.11496043e-01 -3.06215227e-01] [ 1.16038847e+00 -2.29945824e-01 -3.01647961e-01 ... -5.14532506e-01 8.93468559e-01 9.00678635e-01] ... [ 1.99087298e+00 -3.20532434e-02 -4.83597547e-01 ... 2.11232543e+00 -1.91554546e-01 1.05200005e+00] [ 3.65893304e-01 1.38944852e+00 6.27612233e-01 ... -2.56385297e-01 3.79034191e-01 -8.65648985e-01] [ 5.22819102e-01 3.13116491e-01 1.68129826e+00 ... 1.68449390e+00 9.23259974e-01 -3.86755735e-01]] [[ 3.84271562e-01 -1.20425498e+00 6.69719100e-01 ... 1.04871249e+00 8.13597202e-01 -2.09995985e-01] [ 9.41708028e-01 -1.25278485e+00 3.25524896e-01 ... 1.84255004e+00 1.05689025e+00 -1.27900653e-02] [ 4.65629220e-01 1.45208347e+00 -3.67203414e-01 ... 1.66084266e+00 -2.39805520e-01 6.83858156e-01] ... [ 2.66094744e-01 -1.52418107e-01 1.69812930e+00 ... 6.38576388e-01 8.21651995e-01 1.96859860e+00] [-1.24649119e+00 7.95573771e-01 1.40119457e+00 ... 9.27199543e-01 1.05811489e+00 5.29259324e-01] [ 8.88745487e-01 3.05549875e-02 4.91251409e-01 ... 6.86065614e-01 1.22142231e+00 8.84073794e-01]] [[ 4.66956437e-01 -1.72158599e-01 1.12257123e+00 ... 3.67768377e-01 3.32976967e-01 -1.41916022e-01] [-3.15043256e-02 9.15131748e-01 1.19973588e+00 ... 7.10900724e-01 1.31587017e+00 7.78111637e-01] [ 1.20247531e+00 1.68978941e+00 6.07064664e-01 ... 9.70197856e-01 5.94073832e-01 7.06425309e-01] ... [ 6.71224535e-01 4.45452392e-01 8.89596283e-01 ... 1.51477098e+00 5.12114018e-02 1.12576187e+00] [ 9.57607269e-01 1.21534586e+00 2.47940365e-02 ... 9.25866604e-01 2.97356963e-01 2.44229376e-01] [ 1.06220138e+00 6.45076334e-02 1.26657867e+00 ... 1.61875829e-01 8.52064043e-02 9.02462900e-01]]] [[[ 5.84050655e-01 -2.30933428e-02 2.39399627e-01 ... 1.82470590e-01 4.86878335e-01 5.31500280e-01] [ 1.00694828e-01 3.13082188e-01 1.28792990e-02 ... 2.55232193e-02 2.91630954e-01 5.43025970e-01] [-3.95132080e-02 7.48090297e-02 5.32121718e-01 ... 1.51525885e-01 1.07445620e-01 8.18911254e-01] ... [ 9.37803313e-02 6.51853025e-01 -2.98285242e-02 ... 8.14304948e-01 -8.90476257e-02 3.95210862e-01] [ 8.96156207e-02 -5.00390053e-01 3.58804196e-01 ... -9.33607891e-02 3.48606735e-01 -1.32729575e-01] [ 1.77873343e-01 -3.21867347e-01 9.95701179e-03 ... -7.51816556e-02 4.32761669e-01 -1.47354286e-02]] [[ 2.06331074e-01 -2.09814742e-01 1.04866290e+00 ... 3.21769208e-01 -1.08237177e-01 3.92694861e-01] [ 5.65024614e-01 1.65153638e-01 4.97027934e-01 ... -5.43731213e-01 6.42263055e-01 -5.21149486e-03] [ 2.86406875e-01 3.93479168e-02 2.43758082e-01 ... 1.05699547e-01 4.57852900e-01 -2.32084140e-01] ... [ 4.31285575e-02 1.95244059e-01 -7.43611380e-02 ... 2.52246678e-01 -1.01759858e-01 5.68326652e-01] [-4.29234296e-01 -3.32734406e-01 9.31447923e-01 ... -2.67373919e-02 5.38894832e-01 2.66970783e-01] [ 7.88477838e-01 4.50138032e-01 1.15944588e+00 ... 7.75095820e-02 8.24170336e-02 -1.47780299e-01]] [[ 1.98750403e-02 3.82233769e-01 -2.48587653e-01 ... 4.72428910e-02 -5.71512692e-02 4.94973987e-01] [ 3.96611512e-01 1.91310704e-01 2.48893186e-01 ... 4.32137251e-02 -7.57907778e-02 6.73326433e-01] [ 5.85231781e-01 6.21745400e-02 1.31730056e+00 ... 2.72124350e-01 -2.64198966e-02 3.52110803e-01] ... [ 6.58913374e-01 -5.88746786e-01 1.62493750e-01 ... 2.95142233e-01 1.18135773e-01 1.08420566e-01] [ 6.66763186e-01 7.91915506e-02 4.86119717e-01 ... 1.95137650e-01 1.47263676e-01 2.16165081e-01] [ 1.27173616e-02 8.33401680e-01 1.73362970e-01 ... 1.43606260e-01 7.77933657e-01 7.34646916e-01]] ... [[ 5.72766900e-01 4.18061167e-01 6.05150998e-01 ... 8.01536907e-03 -4.50186208e-02 5.31878062e-02] [ 6.50890231e-01 -1.10107204e-02 -2.73130715e-01 ... 8.76711011e-02 4.39921886e-01 -3.83221805e-01] [-2.62077719e-01 5.56522131e-01 -9.13652256e-02 ... 4.24735934e-01 -6.46958724e-02 3.81333753e-02] ... [ 3.28056276e-01 -6.15583837e-01 4.58787233e-01 ... 7.49800563e-01 -1.50456056e-01 -1.57176200e-02] [ 8.75344336e-01 1.50669247e-01 2.32290089e-01 ... 1.06912918e-01 -6.70307055e-02 1.19540438e-01] [ 2.97526479e-01 6.07115209e-01 2.49281660e-01 ... 4.17097569e-01 6.76229894e-01 4.17606801e-01]] [[-1.75378807e-02 9.64808762e-01 -3.79986346e-01 ... 8.50951850e-01 3.41435432e-01 -3.59846652e-01] [ 3.24620396e-01 6.02366090e-01 -3.07101104e-02 ... 2.82368720e-01 2.77686507e-01 -2.77380217e-02] [-3.08785319e-01 -2.66026825e-01 6.11091554e-01 ... 6.07545264e-02 -3.42876576e-02 -7.22688809e-03] ... [ 5.57109177e-01 1.57307863e-01 4.77575809e-01 ... 7.08000839e-01 1.07292002e-02 2.46169582e-01] [ 9.03577149e-01 -3.09540285e-03 5.52134693e-01 ... -2.91929126e-01 -6.41792044e-02 2.48442367e-01] [-1.21457681e-01 -5.66338114e-02 1.88560829e-01 ... 6.90454185e-01 3.35545808e-01 6.15062773e-01]] [[ 1.22720860e-01 5.24645686e-01 -1.53410938e-02 ... 2.36529827e-01 6.16447806e-01 9.87876579e-02] [-8.16608220e-02 1.75857127e-01 -4.99426574e-02 ... -4.58297394e-02 5.56934774e-01 4.18335706e-01] [ 2.28361383e-01 3.00135761e-01 -6.26790404e-01 ... 4.23440039e-01 5.42392731e-02 2.05375761e-01] ... [ 3.32386494e-01 4.67018187e-01 2.55590320e-01 ... 3.55930269e-01 3.31911683e-01 1.73541039e-01] [-1.67966917e-01 2.83327788e-01 4.97679830e-01 ... -8.14492404e-02 9.34488893e-01 1.98967874e-01] [ 4.75806057e-01 3.36945564e-01 1.44324750e-01 ... 3.90764654e-01 -1.20851494e-01 6.50350332e-01]]]] [[[[-4.26194966e-01 1.08890511e-01 -6.49273396e-01 ... -8.58352244e-01 -5.66350043e-01 -4.88921493e-01] [-8.55412602e-01 -8.53115737e-01 -3.18515271e-01 ... 3.41447175e-01 8.72809768e-01 -6.12737954e-01] [-1.45473734e-01 1.63829461e-01 -7.96635330e-01 ... -6.03198767e-01 -8.00349593e-01 -7.01026440e-01] ... [-3.95017952e-01 2.33327612e-01 -2.19413906e-01 ... 4.03471708e-01 -5.87080121e-01 -2.59484768e-01] [ 1.74920604e-01 -7.70669103e-01 -3.86886060e-01 ... 2.64662474e-01 2.79633313e-01 -1.85927823e-02] [-4.90894079e-01 4.76067960e-01 6.84096456e-01 ... 3.68245780e-01 -6.12617433e-01 8.92719924e-01]] [[-7.11420029e-02 1.24118082e-01 -8.86254236e-02 ... 5.03090739e-01 4.34343994e-01 2.95340985e-01] [-6.96293652e-01 -1.19422972e+00 -1.72404572e-01 ... 3.38382244e-01 6.55865252e-01 -2.25343838e-01] [ 6.17964447e-01 -4.02186155e-01 -7.33049750e-01 ... 5.92982396e-05 -1.25648046e+00 1.09635498e-02] ... [ 1.68005884e-01 9.03889000e-01 -2.29815528e-01 ... -2.56267279e-01 -4.76390779e-01 1.33107877e+00] [ 2.98830364e-02 3.10821563e-01 5.13577312e-02 ... -4.15060371e-01 -1.66226700e-01 3.60994518e-01] [ 3.64496894e-02 -9.57017168e-02 -3.87296170e-01 ... 7.12620318e-01 -1.19852796e-02 8.29503834e-02]] [[-4.16619331e-01 1.15099818e-01 -2.35805258e-01 ... -7.59158373e-01 -1.95687160e-01 1.03010550e-01] [ 1.11879075e+00 8.87909308e-02 7.20685780e-01 ... -5.51656961e-01 -2.87420928e-01 -8.27896953e-01] [-1.63915381e-01 -2.37746567e-01 -1.06719232e+00 ... 9.46839571e-01 2.75978953e-01 5.03259599e-01] ... [-7.05167055e-02 -7.22811162e-01 5.92213571e-01 ... -1.51960492e-01 -2.08617330e+00 -1.11135989e-02] [-1.64794803e-01 5.56030869e-01 -6.48994684e-01 ... -1.25036985e-01 -3.96068931e-01 -5.62065281e-02] [ 2.71706544e-02 -2.60816664e-01 -4.39162225e-01 ... -6.85551688e-02 -2.94086840e-02 -1.25929165e+00]] ... [[ 8.73760521e-01 -7.94478714e-01 -1.48854628e-01 ... -9.60043907e-01 -5.34701645e-01 -7.87057355e-02] [ 3.08146030e-01 -2.37251624e-01 9.79314506e-01 ... 5.64532168e-02 -2.38343269e-01 -1.72593936e-01] [-1.39780473e-02 -2.36195937e-01 8.42283726e-01 ... -2.45046645e-01 -3.17844570e-01 -1.71992704e-01] ... [-4.64054972e-01 -4.79795597e-02 2.84605116e-01 ... 4.12374496e-01 2.69215912e-01 -5.18005967e-01] [ 9.31816101e-02 -7.04534054e-01 1.78037956e-01 ... 3.10427070e-01 6.84929192e-02 -1.44805956e+00] [-1.45517850e+00 -1.63874224e-01 -3.55759263e-01 ... -4.29419339e-01 -4.40443754e-01 7.13846534e-02]] [[-1.03137064e+00 -1.94784909e-01 -8.38719785e-01 ... -4.08762127e-01 -2.40255326e-01 1.04729101e-01] [ 1.87049210e-01 7.80816749e-02 -1.80073678e-01 ... 5.69110036e-01 7.80628771e-02 8.15530479e-01] [-1.81605235e-01 -3.43294829e-01 8.81836563e-02 ... 1.24530815e-01 -9.13069069e-01 5.89480579e-01] ... [-3.51336658e-01 6.33061767e-01 -1.17433631e+00 ... -1.88696146e-01 -6.58002377e-01 -7.84236193e-01] [ 1.09414995e+00 3.90615076e-01 4.45481509e-01 ... -7.93722689e-01 2.24833950e-01 -8.44969869e-01] [-1.85112551e-01 -1.12190580e+00 -8.80918801e-01 ... -4.78612095e-01 -2.97360681e-02 5.34665406e-01]] [[-1.79169253e-01 -6.77315235e-01 -1.28306353e+00 ... -5.05797505e-01 7.83756971e-02 -2.53948849e-02] [-7.01902330e-01 1.01831853e-01 2.36882553e-01 ... 2.56798446e-01 -1.05808247e-02 1.68146878e-01] [-7.45336354e-01 3.25166196e-01 -1.29058426e-02 ... -1.63431853e-01 -5.06319702e-01 -4.01815921e-02] ... [-7.95533583e-02 2.04992458e-01 -5.72922409e-01 ... 2.66466111e-01 1.96035102e-01 -1.13898075e+00] [ 7.65220523e-01 1.33961529e-01 -3.18017960e-01 ... 4.86192405e-01 5.12380935e-02 -4.07953620e-01] [-4.09760505e-01 -3.58851463e-01 -9.08413112e-01 ... -4.16812420e-01 6.65259957e-02 4.27945882e-01]]] [[[-6.96822479e-02 -1.15132714e-02 1.10343538e-01 ... 1.45567104e-01 -1.45153582e-01 -8.47609699e-01] [-2.49367997e-01 -4.53628600e-01 -1.09410204e-01 ... 4.71926518e-02 -1.01410401e+00 2.55802944e-02] [-3.85319382e-01 2.58341551e-01 -7.38592565e-01 ... -7.32777357e-01 -8.12392712e-01 7.36195624e-01] ... [-2.10288718e-01 -4.04543318e-02 -6.26181006e-01 ... -5.05309641e-01 -1.03166127e+00 3.42717022e-01] [-6.46145701e-01 -5.51856995e-01 -3.09178740e-01 ... -1.32882267e-01 3.53663057e-01 -4.54569042e-01] [-3.08663934e-01 4.04356718e-01 -1.18358597e-01 ... -7.00745806e-02 -3.70898992e-01 -6.09458447e-01]] [[-2.69864917e-01 -2.01342329e-01 3.71902376e-01 ... -9.15257514e-01 -6.48782611e-01 -8.78678933e-02] [ 8.02048519e-02 -8.81585121e-01 -4.90757257e-01 ... -3.39476436e-01 -5.07190883e-01 2.27016926e-01] [ 7.43021309e-01 -6.13257587e-01 -2.91740954e-01 ... 5.09619117e-01 2.08465308e-01 -1.85265511e-01] ... [-7.66624629e-01 5.39064646e-01 -8.17777812e-01 ... -5.64351737e-01 -3.59306425e-01 2.58551568e-01] [-2.39836395e-01 -5.04418731e-01 -4.53164041e-01 ... -2.89005876e-01 -6.94236457e-02 -3.68020982e-02] [-2.77664632e-01 -1.13302022e-01 2.53361672e-01 ... -3.25521260e-01 -3.91516596e-01 3.37483525e-01]] [[-2.13173732e-01 7.46892169e-02 -6.06055498e-01 ... 1.28424928e-01 -7.86314368e-01 4.76829529e-01] [-3.80460501e-01 2.61028064e-03 6.17689788e-01 ... -2.62813181e-01 -8.04648399e-02 -1.03778087e-01] [-8.02047849e-01 -5.79050362e-01 -2.34930396e-01 ... -5.23193777e-01 1.87135130e-01 7.39908636e-01] ... [-6.45907521e-01 -5.80890253e-02 -8.93153176e-02 ... -4.61384535e-01 -2.14085370e-01 8.34336281e-02] [ 1.74641222e-01 -3.79273087e-01 1.14709221e-01 ... -1.13618858e-01 -8.85021016e-02 -8.66907984e-02] [ 4.15977696e-03 2.16474980e-01 -4.67306852e-01 ... 6.19185865e-02 -1.66753486e-01 1.00805871e-01]] ... [[-7.48381555e-01 4.83528115e-02 -1.87374562e-01 ... -4.00083601e-01 -7.56049335e-01 -4.65226591e-01] [-1.47578299e-01 -1.80497184e-01 -1.41426340e-01 ... -4.20877427e-01 -2.26621211e-01 2.03919947e-01] [ 2.50209361e-01 -2.06154034e-01 -1.39879704e-01 ... -8.82935226e-01 1.38480961e-01 -4.99825537e-01] ... [-1.69245675e-01 -5.51103294e-01 -3.15564841e-01 ... -4.84057903e-01 -4.07069176e-01 -7.59160638e-01] [-3.62397432e-01 -2.27849230e-01 5.86604979e-03 ... 4.50143278e-01 -5.84026217e-01 2.11165354e-01] [ 3.75876546e-01 -4.31133583e-02 -4.04988587e-01 ... 4.65025231e-02 -3.95466328e-01 3.39349389e-01]] [[-9.30849314e-02 -3.22191417e-01 -7.21459985e-01 ... -1.34657854e-02 -4.54200715e-01 -4.34846878e-01] [-4.19251651e-01 -3.34289640e-01 -8.16716552e-01 ... -5.72635293e-01 4.69438657e-02 1.63669467e-01] [-9.52076197e-01 -5.82368731e-01 3.38319749e-01 ... 1.58386692e-01 -4.11808729e-01 8.63793194e-01] ... [ 5.21976590e-01 8.81385088e-01 1.97086826e-01 ... 4.14060950e-01 5.11830822e-02 -2.64390558e-01] [ 4.74104099e-02 -2.01450646e-01 -7.27906823e-01 ... -4.76548523e-02 -2.04906538e-01 -4.72430140e-01] [-8.92103910e-01 -5.72642125e-02 -1.92595541e-01 ... 1.52631834e-01 -1.97275534e-01 -9.81504560e-01]] [[-5.26828468e-02 3.46606553e-01 -8.25181246e-01 ... 1.11294212e-02 -6.32973254e-01 -3.41970235e-01] [-6.60746872e-01 4.93238091e-01 -2.33183801e-01 ... -3.69373500e-01 1.44920930e-01 1.05787054e-01] [-5.51045179e-01 -5.77728748e-01 2.93549418e-01 ... 1.87663853e-01 -3.68098438e-01 3.73898894e-01] ... [-2.52334386e-01 -2.06620723e-01 -3.07954531e-02 ... -4.08806026e-01 6.20940775e-02 -6.91078484e-01] [-4.53106105e-01 -1.43238939e-02 -7.40437746e-01 ... -6.73346519e-01 2.24976718e-01 -3.16162050e-01] [-1.56257197e-01 -9.51459706e-02 -1.83320984e-01 ... -9.36289907e-01 -1.69116512e-01 -3.53878811e-02]]] [[[ 1.35420635e-01 1.38098466e+00 3.73935413e+00 ... 1.72969139e+00 1.86634198e-01 1.07870281e+00] [ 1.33049476e+00 1.34950721e+00 1.77173007e+00 ... 8.02646518e-01 1.32617486e+00 1.59842372e+00] [ 4.92858171e-01 3.82520795e+00 2.99183577e-01 ... 1.59010291e+00 1.80074430e+00 1.49127364e+00] ... [ 1.69180667e+00 1.15296745e+00 1.51052046e+00 ... 1.10316968e+00 1.40030539e+00 4.84370664e-02] [-5.71625233e-01 2.67419481e+00 2.30057669e+00 ... -3.31485993e-03 2.69597709e-01 1.62631080e-01] [ 8.05795789e-01 -8.85662735e-02 1.08053792e+00 ... 1.71570316e-01 1.39122617e+00 -1.45584807e-01]] [[ 1.73637056e+00 2.53229928e+00 -7.17931092e-02 ... 2.37666607e+00 -1.10045686e-01 6.48244083e-01] [-5.24670184e-01 7.52398312e-01 3.10347295e+00 ... 1.51710939e+00 7.00209260e-01 2.35894418e+00] [ 3.63016948e-02 1.14682198e+00 9.80476797e-01 ... 1.38279974e+00 5.79521775e-01 2.25872636e+00] ... [ 9.68842626e-01 5.66327989e-01 -1.60739422e-01 ... 9.51306462e-01 8.05672824e-01 2.42320061e-01] [ 2.08782482e+00 1.07685888e+00 3.05303216e-01 ... -7.27238134e-02 2.40222335e+00 2.90485263e+00] [-4.70457703e-01 2.74654198e+00 1.45411766e+00 ... 1.64669168e+00 1.08246636e+00 5.48344195e-01]] [[ 1.11895514e+00 3.38547158e+00 2.26060128e+00 ... 2.47127461e+00 1.54022801e+00 -2.12883390e-02] [ 7.25146174e-01 -6.10877454e-01 1.13219345e+00 ... 3.88655829e+00 7.85883725e-01 2.13991261e+00] [ 2.31251073e+00 1.47385871e+00 2.88137937e+00 ... 1.56333148e+00 2.41399016e-02 1.54790294e+00] ... [ 4.46459007e+00 9.49861944e-01 7.47757912e-01 ... 2.28119373e+00 3.29511613e-01 -5.58923304e-01] [ 3.30359125e+00 -9.39040720e-01 4.66865689e-01 ... 1.47866964e+00 3.35574603e+00 1.91706192e+00] [ 1.24747813e+00 2.68185854e-01 -7.83381104e-01 ... 5.69359541e-01 2.17053080e+00 2.04803085e+00]] ... [[ 3.09102035e+00 1.36488426e+00 -4.10628468e-01 ... 2.57386833e-01 4.78892982e-01 9.06960368e-01] [ 5.38654327e-01 1.64484069e-01 3.12151861e+00 ... -8.48845482e-01 -4.15356457e-01 -1.12632501e+00] [ 9.49162364e-01 3.00538428e-02 8.74781370e-01 ... 4.19216216e-01 -2.17048712e-02 2.32244396e+00] ... [ 1.95254219e+00 1.20515537e+00 -1.12536490e+00 ... 4.69981134e-02 1.72773492e+00 -3.18348467e-01] [ 3.57683033e-01 1.06799614e+00 -1.06719226e-01 ... 5.66120505e-01 6.04778707e-01 2.08786416e+00] [ 9.61696446e-01 1.92997038e+00 -3.03418517e-01 ... 4.30520624e-02 1.41531336e+00 1.75572693e+00]] [[ 9.14586723e-01 2.85319537e-01 1.26390100e+00 ... 3.02752435e-01 2.13997269e+00 2.88376355e+00] [ 7.96501279e-01 9.90571439e-01 3.67430687e-01 ... 1.76904666e+00 1.11664248e+00 1.56937301e+00] [ 1.40776908e+00 -3.48221093e-01 1.63383877e+00 ... 1.02157331e+00 1.78510153e+00 3.52198005e-01] ... [ 1.05830681e+00 1.92961955e+00 1.36526072e+00 ... 1.50063217e+00 1.17161267e-01 2.80080271e+00] [ 1.03151250e+00 9.18189347e-01 -2.50395328e-01 ... 1.12344933e+00 3.09527278e+00 5.90423882e-01] [-5.13234556e-01 -3.87263268e-01 1.41801560e+00 ... 3.15057248e-01 -2.06315899e+00 1.79505587e+00]] [[ 1.03952467e+00 6.31694853e-01 1.10729980e+00 ... 2.01267052e+00 -1.70004725e-01 3.24963570e+00] [-1.31367639e-01 6.22832298e-01 1.80138075e+00 ... 2.67568350e-01 1.67483747e+00 2.60081887e+00] [ 2.21613812e+00 6.83861136e-01 9.39090550e-02 ... 1.80919445e+00 1.70551062e+00 1.68068790e+00] ... [-3.66926044e-01 2.49306500e-01 9.34265733e-01 ... 1.52847826e+00 1.45308971e+00 3.09857392e+00] [ 5.18960571e+00 -7.71574199e-01 4.54260916e-01 ... 2.43571949e+00 1.46706450e+00 9.96857047e-01] [ 1.28477025e+00 1.83255184e+00 4.27753866e-01 ... 2.57939965e-01 3.18539858e+00 2.43744755e+00]]] [[[ 8.92646730e-01 9.87642884e-01 1.12513459e+00 ... 1.14122522e+00 4.74776506e-01 4.51525897e-01] [ 1.18562567e+00 1.11608803e+00 1.80455580e-01 ... 1.98437774e+00 2.10015535e+00 8.69923592e-01] [ 1.42087030e+00 1.89303148e+00 4.81873125e-01 ... 9.60187495e-01 8.60833168e-01 1.11771083e+00] ... [ 2.17856264e+00 1.99852479e+00 1.49942732e+00 ... 7.98196375e-01 1.06558347e+00 4.54070330e-01] [ 1.53567982e+00 1.04671538e+00 1.87371171e+00 ... 1.62396932e+00 3.72787535e-01 6.74018919e-01] [ 1.23667622e+00 6.50750458e-01 1.76399410e+00 ... 9.89281714e-01 7.39737034e-01 1.40931022e+00]] [[ 2.20280743e+00 2.11643505e+00 1.06235063e+00 ... 1.93311036e+00 1.75400531e+00 8.97626519e-01] [ 5.63011587e-01 6.49803221e-01 1.57238400e+00 ... 1.36456919e+00 6.30187511e-01 1.03604054e+00] [ 9.74612832e-01 3.74357760e-01 8.93877923e-01 ... 1.47196877e+00 1.13409531e+00 1.89082348e+00] ... [ 2.41934106e-01 1.05690765e+00 1.65456510e+00 ... 1.40589619e+00 1.47908914e+00 1.11419439e+00] [ 1.01336849e+00 8.76554847e-01 8.25431585e-01 ... 1.25960267e+00 2.23622784e-01 1.02354932e+00] [ 5.57245493e-01 6.17767632e-01 1.10338986e+00 ... 4.29129869e-01 1.16858733e+00 1.12571287e+00]] [[ 1.32653511e+00 1.12956166e+00 3.70945781e-01 ... 4.54506725e-01 2.93743834e-02 1.63874733e+00] [ 5.78189611e-01 1.11894131e+00 1.63445067e+00 ... 1.55525792e+00 1.18409836e+00 8.21948528e-01] [ 1.66993594e+00 1.63946342e+00 6.69865906e-01 ... 9.33286011e-01 1.71786106e+00 1.15403593e+00] ... [ 1.66612649e+00 2.08351398e+00 1.22928870e+00 ... 1.68573582e+00 9.00290906e-01 1.14358366e+00] [ 1.15677989e+00 1.17643595e+00 6.65207624e-01 ... 7.71079302e-01 1.29145598e+00 9.65410411e-01] [ 1.15114546e+00 1.25461936e+00 9.37823176e-01 ... 5.40523827e-01 4.57437247e-01 7.32681334e-01]] ... [[ 9.71849144e-01 1.97839391e+00 3.98995668e-01 ... 1.47211659e+00 1.72789943e+00 1.26800132e+00] [ 1.37538087e+00 9.36030686e-01 1.26297033e+00 ... 1.10551941e+00 1.57267237e+00 1.23447788e+00] [ 1.16665757e+00 1.00331521e+00 1.62550437e+00 ... 1.32728958e+00 5.53458452e-01 1.11949682e+00] ... [ 9.57045197e-01 1.16702151e+00 1.55231214e+00 ... 1.26107252e+00 8.67864847e-01 1.28513074e+00] [ 4.83032852e-01 4.44249064e-01 1.69716406e+00 ... 7.25790203e-01 8.36632311e-01 8.52742851e-01] [ 1.38870823e+00 1.80353075e-01 4.88015026e-01 ... 7.16795981e-01 1.57713544e+00 1.51657820e+00]] [[ 1.01359224e+00 1.11410069e+00 -2.97295838e-03 ... 1.35358751e-01 1.37310481e+00 4.40755635e-01] [ 1.78750670e+00 1.50998902e+00 6.03803396e-01 ... 1.06151259e+00 9.32486415e-01 8.51857126e-01] [ 9.76521730e-01 5.19901097e-01 1.36425018e+00 ... 1.71082330e+00 1.08824360e+00 -2.52937317e-01] ... [ 1.74141347e+00 1.84735513e+00 5.49945176e-01 ... 5.43391347e-01 9.38851237e-01 2.05183768e+00] [ 1.36195922e+00 8.94434392e-01 1.43837047e+00 ... 1.85099989e-01 1.22969139e+00 1.56579494e+00] [ 1.60902786e+00 7.47229099e-01 1.37279081e+00 ... 1.72655916e+00 8.88196290e-01 1.21553051e+00]] [[ 1.65165925e+00 1.39184797e+00 1.57278359e+00 ... 9.54002023e-01 4.40629244e-01 4.77312684e-01] [ 1.50925601e+00 1.39899230e+00 1.50458264e+00 ... 5.21308064e-01 8.74480724e-01 9.93534744e-01] [ 1.27548897e+00 1.17717195e+00 1.35405242e+00 ... 4.97115433e-01 1.19652736e+00 1.09643006e+00] ... [ 8.12983990e-01 1.37467110e+00 1.12164843e+00 ... 9.48190749e-01 9.34459925e-01 1.39079404e+00] [ 1.03010511e+00 5.50450265e-01 1.01372409e+00 ... 9.85344201e-02 1.30264020e+00 1.99329197e+00] [ 9.76427615e-01 3.66499335e-01 7.10250020e-01 ... 8.31257045e-01 1.28613651e+00 9.13652778e-01]]] [[[ 1.95332527e+00 2.38185778e-01 7.43652165e-01 ... -5.01785457e-01 2.84884512e-01 1.69568896e-01] [ 3.09804320e-01 5.70380948e-02 2.81107366e-01 ... 5.10125220e-01 -2.29831859e-02 -1.76183712e-02] [ 1.42440632e-01 -6.18485510e-01 2.48930126e-01 ... 9.41380560e-02 7.94026971e-01 -2.65540302e-01] ... [ 1.54775882e+00 3.99831623e-01 5.74397504e-01 ... 1.71790910e+00 7.93797553e-01 5.82721949e-01] [-1.33125067e+00 9.49531198e-01 9.99171317e-01 ... -1.59955062e-02 6.19583309e-01 4.59208041e-01] [ 3.44067931e-01 -1.04978628e-01 5.05133092e-01 ... 7.35062838e-01 3.40024680e-01 -3.66351753e-02]] [[ 4.87433583e-01 1.32409012e+00 2.93390602e-01 ... 5.04756927e-01 -1.36899307e-01 1.06512439e+00] [ 2.69790411e-01 1.70610130e-01 -3.65212411e-01 ... 1.09152997e+00 -1.27543777e-01 4.64462161e-01] [ 5.30159831e-01 4.26401407e-01 5.37705183e-01 ... 4.27974135e-01 9.85398293e-01 6.71938583e-02] ... [ 3.25924098e-01 1.37914792e-01 9.25585270e-01 ... -3.18693668e-01 7.59458005e-01 6.25139892e-01] [ 2.54310548e-01 -1.87161431e-01 -8.01592290e-01 ... 7.98940435e-02 -4.59326863e-01 1.36847162e+00] [ 1.08629465e+00 8.24921489e-01 5.82016766e-01 ... -3.23986828e-01 7.37893462e-01 6.22628808e-01]] [[-3.59808952e-01 -7.75949180e-01 7.16792643e-01 ... 5.30402005e-01 1.35851479e+00 -9.95909646e-02] [ 7.30466008e-01 -6.93421841e-01 5.88524222e-01 ... 2.18586586e-02 7.09943235e-01 1.01711023e+00] [ 5.32484353e-01 -7.44893789e-01 -2.99947321e-01 ... -5.10561585e-01 6.80136740e-01 -4.66993451e-01] ... [ 1.17130868e-01 7.66375244e-01 6.94061279e-01 ... 9.01861265e-02 1.07040012e+00 2.43762136e-01] [-7.88546026e-01 -3.63778532e-01 1.47180915e-01 ... -4.27042335e-01 3.29248160e-01 -3.12684119e-01] [ 1.65358484e-01 4.94000882e-01 4.21769142e-01 ... 1.15898156e+00 4.71069157e-01 9.42059815e-01]] ... [[ 1.07842588e+00 1.91052866e+00 2.33570516e-01 ... 1.23135114e+00 -2.81339079e-01 6.80492163e-01] [ 2.19276357e+00 5.73997736e-01 1.25494576e+00 ... 8.61919641e-01 7.07471490e-01 7.14676082e-01] [-2.82387584e-01 1.22407794e+00 -1.42507836e-01 ... 8.10294032e-01 1.25474703e+00 -9.06506598e-01] ... [-4.38719571e-01 4.08233136e-01 7.03739300e-02 ... -2.26708308e-01 1.77731365e-01 -8.11010957e-01] [ 1.01865172e+00 8.16737473e-01 5.78894079e-01 ... 6.90650702e-01 -5.91389775e-01 1.59628093e+00] [ 5.92447557e-02 5.21357656e-01 3.65184367e-01 ... 5.30523546e-02 -5.35700880e-02 4.64993834e-01]] [[-2.64001004e-02 -4.63881455e-02 5.44020772e-01 ... 6.56295538e-01 1.20853350e-01 1.09625793e+00] [ 9.71088588e-01 -8.00911188e-02 -2.40099669e-01 ... 6.75061226e-01 6.05134249e-01 -2.85645664e-01] [-1.29192978e-01 -7.24354088e-02 2.92134583e-01 ... 5.38243413e-01 4.74635541e-01 5.30159056e-01] ... [ 1.07974112e+00 8.21770489e-01 6.26068950e-01 ... 8.04628670e-01 6.84842229e-01 -1.23716816e-01] [ 1.73484457e+00 -3.98196518e-01 1.10493994e+00 ... 6.85919940e-01 7.85936475e-01 4.78939712e-01] [ 1.09860671e+00 1.36503831e-01 1.31042147e+00 ... -3.03060282e-02 3.38377148e-01 3.84344794e-02]] [[-9.07809660e-02 9.95634317e-01 7.45270550e-01 ... 1.25922024e+00 1.93755150e+00 2.96722621e-01] [ 1.25890625e+00 1.21620214e+00 1.05572712e+00 ... 5.48245847e-01 6.84935510e-01 2.22504750e-01] [ 1.03242958e+00 1.30405176e+00 1.51707089e+00 ... 1.47013962e+00 5.28296530e-01 5.78820109e-01] ... [-1.12373099e-01 -3.77081782e-02 1.60019863e+00 ... 1.29108310e-01 -1.04312253e+00 6.91618323e-01] [ 7.06049383e-01 6.49770021e-01 1.25743747e+00 ... -1.27538466e+00 1.03472519e+00 4.91401413e-03] [ 2.91576654e-01 1.25426936e+00 7.45207131e-01 ... 4.82516557e-01 8.57635081e-01 7.30234683e-01]]] [[[ 8.52273166e-01 6.25189126e-01 3.14040482e-01 ... -1.09099627e-01 6.66647255e-01 -7.91418478e-02] [-3.07506263e-01 -5.62907271e-02 2.82990456e-01 ... 2.23977998e-01 6.89788043e-01 2.86265939e-01] [ 3.15341383e-01 4.06566888e-01 3.42272460e-01 ... 5.70325196e-01 5.89621723e-01 2.03055754e-01] ... [-2.17770070e-01 3.76561314e-01 5.70326090e-01 ... 1.37943640e-01 8.28301311e-01 -4.05628234e-01] [ 1.27908200e-01 2.63873607e-01 1.89515371e-02 ... 3.99608999e-01 3.84001344e-01 3.60592961e-01] [-9.48987678e-02 -2.04598367e-01 1.30951172e-02 ... 5.08292913e-01 -2.88566202e-01 3.11003983e-01]] [[ 4.33583409e-02 -1.87234908e-01 6.63716376e-01 ... 2.90798664e-01 1.02126583e-01 2.25461751e-01] [ 3.87108997e-02 2.34897316e-01 1.63315251e-01 ... -6.29078448e-02 4.00454849e-02 2.10584998e-01] [-6.58551931e-01 4.69820172e-01 4.98819321e-01 ... 2.01699153e-01 3.19178373e-01 2.47144118e-01] ... [ 1.25824183e-01 4.31344777e-01 -2.52946585e-01 ... -3.89860332e-01 1.44394143e-02 -1.78003773e-01] [-3.30649167e-01 4.94132817e-01 3.50912400e-02 ... 1.22217700e-01 3.27619404e-01 4.95862067e-01] [-3.46562088e-01 7.84805268e-02 3.18189472e-01 ... 1.42457828e-01 2.50316411e-01 -1.93765596e-01]] [[ 2.25836366e-01 3.06969255e-01 6.08240783e-01 ... -3.77579689e-01 4.52751994e-01 3.73827100e-01] [ 2.43156478e-01 -9.83336866e-02 2.15405181e-01 ... 2.58570433e-01 6.88487291e-01 2.90025771e-01] [ 8.28717276e-03 -1.35735855e-01 -1.10391527e-01 ... 6.21541083e-01 3.98977809e-02 3.25370491e-01] ... [-6.54196858e-01 3.85154843e-01 3.29468280e-01 ... 4.66013886e-03 -1.02236427e-01 9.25875455e-03] [-8.36854801e-02 4.78536010e-01 4.81396854e-01 ... 6.67418003e-01 -9.58529636e-02 2.22305670e-01] [ 4.74972039e-01 4.98478293e-01 1.62739694e-01 ... 1.02634478e+00 -2.58283794e-01 4.39636827e-01]] ... [[ 5.67087233e-02 -3.54966909e-01 -3.92163157e-01 ... 8.11384022e-02 -2.97137171e-01 -1.37655094e-01] [ 4.14050788e-01 -5.57011962e-01 3.86744410e-01 ... 6.65087223e-01 3.34908038e-01 4.84037638e-01] [-1.48129001e-01 7.26878941e-01 5.86274981e-01 ... 7.84642845e-02 1.72904395e-02 -2.39768296e-01] ... [ 3.58663440e-01 7.71880448e-02 5.54461122e-01 ... 3.91344786e-01 3.14142823e-01 2.12431669e-01] [ 3.12888056e-01 -2.89270371e-01 -2.41428807e-01 ... 3.68850261e-01 1.96767189e-02 -1.23219145e-03] [ 3.27802300e-01 4.71050084e-01 -4.91510808e-01 ... -6.63320273e-02 2.02640995e-01 5.93381286e-01]] [[-7.36965656e-01 1.19331993e-01 2.67400801e-01 ... -4.97134849e-02 -3.76063406e-01 -5.19778468e-02] [ 2.04898998e-01 3.25401217e-01 5.57122715e-02 ... 2.95009241e-02 5.79769135e-01 -2.52867758e-01] [ 5.67312896e-01 -2.29288518e-01 -7.84218088e-02 ... 1.17535435e-01 -1.98962599e-01 1.98404804e-01] ... [ 4.82514203e-01 6.40916675e-02 4.91365075e-01 ... 2.85560936e-01 4.17940557e-01 -2.22063601e-01] [ 6.95481718e-01 1.24834202e-01 7.14089096e-01 ... 1.96031898e-01 -6.51589781e-02 6.81637108e-01] [-5.25797367e-01 5.27391434e-01 6.76884726e-02 ... -1.82450980e-01 5.15819907e-01 -2.90760517e-01]] [[-1.92015067e-01 4.78091598e-01 -2.10024700e-01 ... 3.41620266e-01 1.59053445e-01 7.61037916e-02] [ 2.16870561e-01 -4.88092244e-01 3.73314351e-01 ... 4.19039100e-01 2.75591195e-01 -8.52002725e-02] [ 4.13651079e-01 3.45709082e-03 1.87078014e-01 ... 5.01136303e-01 -1.01725906e-01 -3.04835048e-02] ... [ 1.36822358e-01 -5.51507115e-01 3.16169322e-01 ... 3.19715053e-01 6.99100137e-01 1.28256436e-02] [-5.74303716e-02 4.43879396e-01 3.06787062e-03 ... 5.88569678e-02 5.93300462e-01 2.83577085e-01] [ 2.20448405e-01 -5.83343446e-01 -2.82628387e-02 ... -1.24195740e-01 4.28780198e-01 5.54614849e-02]]]]]; ov_res: [[[[[ 1.10543482e-01 -1.02964468e-01 -3.38884592e-01 ... -4.02857512e-01 -3.48453909e-01 5.60409129e-01] [ 6.81395531e-01 -3.81377757e-01 -1.10354495e+00 ... -1.19290113e-01 1.13574766e-01 -4.95107323e-01] [-1.81933731e-01 -1.16468978e+00 -8.82792294e-01 ... -1.07989423e-01 -4.30171221e-01 8.25060844e-01] ... [-1.11652803e+00 -1.34171569e+00 2.80495703e-01 ... 5.44770844e-02 5.44183254e-01 2.00119972e-01] [ 9.28447917e-02 -9.73224521e-01 -3.47116552e-02 ... -1.02900398e+00 2.11588033e-02 -7.61181891e-01] [-5.72286546e-01 -4.92564231e-01 5.43089509e-01 ... -5.74976742e-01 -6.50872231e-01 -4.11877483e-01]] [[-4.32307363e-01 2.97280010e-02 -4.00479496e-01 ... -3.45730484e-01 5.44596650e-02 -1.42772925e+00] [-1.06931202e-01 -6.55029058e-01 -4.74885143e-02 ... 4.29736733e-01 -5.84896579e-02 4.14902419e-02] [-6.40410781e-01 -5.62353432e-01 -7.24056721e-01 ... -6.05697036e-01 6.60790265e-01 2.59086072e-01] ... [-9.05778855e-02 -3.68525162e-02 -1.06156671e+00 ... -6.26866937e-01 7.48441219e-01 -9.00673211e-01] [ 7.04424500e-01 2.65187979e-01 -8.95713151e-01 ... 3.06370705e-01 3.69143635e-01 -7.23215401e-01] [ 2.99377382e-01 2.22439975e-01 -1.22484565e-01 ... -7.35334814e-01 -3.20764959e-01 -1.64276227e-01]] [[ 5.20325005e-01 -1.30701673e+00 8.34868103e-02 ... 3.30677301e-01 -6.45481706e-01 2.35184841e-02] [-2.98298359e-01 5.42029321e-01 3.65188152e-01 ... -3.77483696e-01 8.97872671e-02 3.08891416e-01] [ 4.01430756e-01 -6.08335316e-01 -3.28031853e-02 ... -1.21463470e-01 4.52540487e-01 -7.97499835e-01] ... [-6.35617897e-02 -5.66584766e-02 -1.22331738e+00 ... 1.39090475e-02 -6.19560122e-01 -7.76971355e-02] [ 1.54378325e-01 -3.67906690e-01 -1.56791076e-01 ... 1.06098212e-01 -3.80338639e-01 7.26512447e-02] [-1.30124122e-01 -3.41984063e-01 -7.47844517e-01 ... -1.24300718e+00 -5.84641755e-01 -6.33916616e-01]] ... [[ 7.62085736e-01 2.16294434e-02 7.78018713e-01 ... 3.60586256e-01 -1.07416883e-01 -6.31988943e-01] [-1.75345421e-01 -6.45036027e-02 3.73945087e-01 ... -5.80372930e-01 8.51496994e-01 -7.00216234e-01] [ 2.71974325e-01 -3.26026142e-01 -4.94266033e-01 ... 2.19930440e-01 -1.07936215e+00 -6.80869520e-01] ... [-4.23288107e-01 -2.21084714e-01 -2.87574381e-01 ... -3.52845907e-01 -1.07121933e+00 -6.82443619e-01] [-5.40742040e-01 -1.28079370e-01 6.60210192e-01 ... 7.96287730e-02 2.03558967e-01 -2.61257887e-01] [-4.06621158e-01 -1.20721292e+00 2.02427562e-02 ... -7.09534943e-01 4.95566756e-01 -2.90761650e-01]] [[-9.78857279e-02 -1.13174570e+00 -6.12515330e-01 ... -7.24809289e-01 -7.75953770e-01 3.47836912e-01] [ 3.13137114e-01 -7.65277267e-01 4.90808636e-01 ... 2.00150326e-01 -3.83715957e-01 1.04763135e-01] [-3.20753083e-02 -6.15834773e-01 -1.06578171e+00 ... 9.72741425e-01 2.54921615e-01 2.43883565e-01] ... [-3.29899066e-03 8.23381782e-01 -9.11756828e-02 ... 5.15641756e-02 -4.03696686e-01 -1.04728866e+00] [ 5.41114271e-01 3.43566984e-01 -5.69880195e-02 ... 2.57194787e-01 2.45600343e-01 -6.39514208e-01] [ 8.13835263e-02 -3.41744512e-01 -1.31481871e-01 ... -1.90902066e+00 -3.59346390e-01 -5.58842123e-01]] [[ 2.46780053e-01 1.68438888e+00 9.34843957e-01 ... -5.00178277e-01 -9.58033502e-01 -5.30014299e-02] [ 3.07656616e-01 -6.58343375e-01 -4.17166382e-01 ... -1.79226696e-01 -3.97244483e-01 -1.30636264e-02] [ 6.83130980e-01 4.83730406e-01 -2.86071271e-01 ... 9.96351466e-02 -5.68928778e-01 -7.66860127e-01] ... [-1.22184098e+00 2.70324796e-01 -4.50556725e-03 ... 6.05980396e-01 1.30572855e-01 4.08255994e-01] [-2.68239409e-01 -3.89831960e-01 -1.22325823e-01 ... -1.06610560e+00 -1.36727840e-01 3.55192900e-01] [-6.71641305e-02 4.35684979e-01 -8.57166767e-01 ... -2.64728576e-01 -7.37678707e-01 1.23347223e+00]]] [[[-5.48788607e-02 3.53122093e-02 -2.28387311e-01 ... -4.36992310e-02 -5.08018851e-01 -4.98267263e-01] [-4.48339194e-01 -6.20688081e-01 -8.36658776e-01 ... -9.50720847e-01 -1.43786967e-01 -4.11848687e-02] [-4.47723150e-01 -3.13707978e-01 -1.60853133e-01 ... -1.03457998e-02 3.61405343e-01 -2.09962681e-01] ... [ 1.47561342e-01 1.42414108e-01 -4.13889647e-01 ... 6.15209080e-02 -1.30347669e-01 3.07179838e-01] [-2.45943934e-01 -2.72758126e-01 8.09200853e-02 ... 2.22704738e-01 -3.14567000e-01 -3.86047900e-01] [-1.75502014e+00 4.44485962e-01 -2.11824447e-01 ... -2.10822925e-01 -3.76412332e-01 -3.63771051e-01]] [[ 1.77958176e-01 1.65886715e-01 1.13032922e-01 ... -3.11791718e-01 -5.94283342e-01 -2.27887154e-01] [ 4.12412614e-01 -5.94120659e-02 -3.91509980e-01 ... 1.39938429e-01 -2.26526603e-01 3.52421552e-01] [ 7.03531682e-01 -5.53933620e-01 -1.07492417e-01 ... -3.00116599e-01 -5.50430082e-02 1.52530760e-01] ... [ 3.54599178e-01 -7.25390732e-01 1.24319822e-01 ... 1.24390967e-01 -4.60289061e-01 -3.58242452e-01] [-2.03857005e-01 7.92624056e-02 -5.40595613e-02 ... 1.92931026e-01 4.78416905e-02 1.75445974e-01] [-8.43249679e-01 -3.80693436e-01 -2.70130634e-01 ... -7.05942214e-02 -8.69309187e-01 -1.77754059e-01]] [[ 5.55821896e-01 -2.33940691e-01 -3.67615938e-01 ... -4.95109260e-01 -6.89669609e-01 -3.20398599e-01] [-3.74584049e-01 -6.76152110e-01 -7.14671254e-01 ... -3.32140505e-01 1.54494688e-01 -4.41040322e-02] [-1.69942528e-01 -3.79669428e-01 -8.55369270e-02 ... -7.19211817e-01 1.45249292e-01 -1.63373575e-01] ... [ 5.18079340e-01 -8.20582271e-01 -5.00172496e-01 ... -2.75484622e-01 -6.06848478e-01 -5.88579774e-01] [-2.67722845e-01 -6.04550898e-01 -6.13814473e-01 ... -4.02538985e-01 -6.55705512e-01 -7.80783594e-02] [-1.00804493e-01 3.74997586e-01 -2.43981242e-01 ... -6.91215456e-01 -6.66013837e-01 3.27458903e-02]] ... [[ 5.31223305e-02 -5.23099124e-01 -6.89499080e-01 ... 2.30269387e-01 -2.61022270e-01 -4.12879467e-01] [-9.48818699e-02 -2.77489871e-01 -4.34991211e-01 ... -5.73165715e-01 3.26508373e-01 -3.45955670e-01] [ 2.81567991e-01 2.66007036e-01 -8.69218409e-02 ... 1.44788221e-01 7.07515776e-02 6.94299936e-02] ... [ 5.52633740e-02 -4.81225431e-01 -8.25008273e-01 ... -3.62869143e-01 -7.29152918e-01 -2.79239453e-02] [ 5.68900168e-01 -2.42009759e-01 -3.69602442e-01 ... -6.33137345e-01 5.79088807e-01 1.03151463e-01] [ 6.26704037e-01 -7.65440106e-01 2.77902335e-01 ... 6.27438188e-01 -1.80831537e-01 1.91713259e-01]] [[-7.33060122e-01 3.14898908e-01 -4.54174757e-01 ... -6.07677817e-01 -3.24755050e-02 -3.28011930e-01] [-3.75970304e-01 1.82194710e-01 -8.91644135e-02 ... 5.35075665e-01 7.47198239e-02 1.89944088e-01] [-3.08494389e-01 -4.24879253e-01 9.55117047e-02 ... -4.16204631e-02 -2.62699544e-01 3.45645815e-01] ... [ 8.13999865e-03 -5.08878767e-01 -4.22831625e-01 ... -1.17170453e+00 -3.96916986e-01 -4.38189000e-01] [-1.40031680e-01 5.59767187e-01 2.26479128e-01 ... 1.71463042e-02 -1.67948559e-01 1.94654703e-01] [-1.98942363e-01 -3.99458230e-01 -7.68553138e-01 ... 5.69101393e-01 -3.00941795e-01 -1.07746851e+00]] [[-2.95347363e-01 3.13816130e-01 -5.75644791e-01 ... -9.72447768e-02 7.94539332e-01 -2.34514281e-01] [ 5.31745255e-02 -4.54362124e-01 -1.67758539e-01 ... -4.02790815e-01 -3.49031121e-01 -1.79342300e-01] [-3.11151415e-01 -3.08661193e-01 -4.78345752e-01 ... -4.34519053e-01 -9.89996567e-02 -3.01245581e-02] ... [ 1.16966414e+00 4.23721641e-01 -2.82756120e-01 ... 2.36126095e-01 1.56004885e-02 -4.40300554e-01] [ 8.33208766e-03 -4.35615003e-01 6.74264431e-02 ... -5.62561862e-02 -3.63504469e-01 -2.94254795e-02] [ 1.80420786e-01 -5.76342642e-01 1.22561216e-01 ... -3.69166285e-01 8.77351046e-01 -9.64947283e-01]]] [[[ 2.42774796e+00 3.37679315e+00 -1.68117893e+00 ... 1.54962099e+00 -1.12887466e+00 1.65570974e+00] [ 1.21986330e-01 1.17480254e+00 -3.25781941e-01 ... 1.13861203e+00 5.75002134e-01 -2.79092193e-01] [-7.75915980e-01 6.01342916e-01 -9.12925601e-01 ... 9.43017840e-01 2.77132535e+00 1.55526847e-01] ... [ 1.12144911e+00 1.02611303e+00 2.99850011e+00 ... 2.08743191e+00 1.34511566e+00 -3.08468521e-01] [ 1.78811502e+00 1.51430011e+00 1.48157597e+00 ... 1.76471323e-01 1.28026187e+00 1.80466747e+00] [ 2.09952712e+00 3.73385191e-01 2.99055815e+00 ... 4.73074943e-01 1.03722763e+00 -6.27183691e-02]] [[ 1.14088202e+00 8.41512799e-01 1.78238973e-01 ... 7.23128498e-01 2.71775126e+00 9.90111709e-01] [-5.94034135e-01 1.13097656e+00 2.44103861e+00 ... 2.63333154e+00 1.92965436e+00 4.57998514e-01] [ 2.41274071e+00 4.46498722e-01 3.38791966e-01 ... -4.78427023e-01 2.75553894e+00 2.38029575e+00] ... [ 1.15004623e+00 1.03553307e+00 6.26263082e-01 ... 2.61446285e+00 2.34555936e+00 3.48943472e-01] [-9.01404202e-01 4.87150520e-01 1.17721641e+00 ... 1.42181110e+00 -6.84345841e-01 1.73712122e+00] [ 1.32728529e+00 1.91070426e+00 -3.60683501e-02 ... 2.11554050e+00 2.16870546e+00 1.01872206e+00]] [[ 1.35372591e+00 1.24656737e+00 9.54232216e-02 ... 1.61711001e+00 5.95727488e-02 3.51549447e-01] [ 2.07674503e+00 -4.02116275e-04 2.37020206e+00 ... 1.02643847e+00 9.88868654e-01 1.06695449e+00] [ 1.23506391e+00 -2.97633111e-01 3.21158075e+00 ... 2.00887227e+00 2.34404176e-01 1.53426492e+00] ... [ 1.77299798e+00 7.56628931e-01 -9.78367209e-01 ... 1.92816246e+00 4.03432751e+00 1.76905930e+00] [ 4.00138557e-01 2.51334816e-01 1.03922045e+00 ... 1.77735043e+00 4.95980591e-01 1.04290962e+00] [ 1.08775282e+00 3.87922257e-01 3.44539976e+00 ... 2.40726757e+00 1.65155590e+00 1.08529377e+00]] ... [[ 7.10051537e-01 3.19886088e+00 2.18706965e+00 ... 4.96967226e-01 8.70431602e-01 1.19477499e+00] [-2.29157031e-01 2.02715015e+00 -7.01105595e-02 ... 1.79813743e-01 1.72704756e+00 1.47891796e+00] [ 2.75231266e+00 1.84595180e+00 6.46425724e-01 ... 1.83503926e+00 8.86169851e-01 1.79266596e+00] ... [ 1.22583759e+00 1.75627863e+00 1.83511484e+00 ... 2.59972000e+00 2.01659465e+00 1.82524785e-01] [ 1.78096163e+00 1.06141877e+00 1.25340962e+00 ... 1.46674252e+00 3.60517859e+00 1.98935437e+00] [ 1.15058351e+00 7.32780159e-01 9.53715265e-01 ... 9.38955843e-01 9.78783786e-01 1.20690131e+00]] [[ 1.70827484e+00 1.04593658e+00 4.63470846e-01 ... 9.33042705e-01 -8.92617762e-01 -3.66364241e-01] [-5.95638931e-01 2.32655430e+00 1.08032882e+00 ... 8.70388091e-01 2.82939816e+00 4.72288132e-01] [ 1.28371072e+00 1.39911413e+00 6.47579610e-01 ... 8.73136640e-01 9.14777756e-01 2.65407991e+00] ... [ 8.94375801e-01 2.69700289e+00 2.41024256e+00 ... 7.74321914e-01 -2.96093464e-01 1.21701264e+00] [ 1.56859148e+00 8.87276471e-01 1.32494652e+00 ... 1.84373677e+00 -9.90891039e-01 1.12996018e+00] [ 2.55641389e+00 -2.80231953e-01 3.27764839e-01 ... 1.73832405e+00 1.13145304e+00 1.11301458e+00]] [[ 7.48608589e-01 2.68729508e-01 -2.54118395e+00 ... 1.80908477e+00 6.91130877e-01 4.89442140e-01] [ 9.60262001e-01 2.12522793e+00 -2.97196925e-01 ... 1.72680557e+00 2.19932079e-01 2.26189423e+00] [ 6.28309846e-01 2.11499643e+00 9.35266018e-01 ... 7.60929525e-01 1.93175113e+00 5.86357892e-01] ... [-5.38617313e-01 -2.96121091e-01 1.19579971e-01 ... 2.48159385e+00 3.97681022e+00 -1.18475273e-01] [ 9.95366096e-01 2.39169002e+00 8.04381430e-01 ... 3.52332139e+00 8.68918300e-01 2.62336135e-01] [ 1.57438409e+00 4.80379224e-01 9.68260705e-01 ... 8.03865194e-01 2.76792812e+00 -2.81911105e-01]]] [[[ 7.45539904e-01 1.12389791e+00 1.44962955e+00 ... 2.23184180e+00 2.06690580e-01 1.85716510e+00] [ 1.48473620e+00 1.51160538e+00 3.66562843e-01 ... 1.21673310e+00 1.50530529e+00 1.28275311e+00] [ 1.90471756e+00 1.72726071e+00 6.24527395e-01 ... 8.97722185e-01 1.45162654e+00 3.16619396e-01] ... [ 1.72946370e+00 5.72963715e-01 1.27691042e+00 ... 8.70977998e-01 9.50390935e-01 1.06645525e+00] [ 6.07205629e-01 7.91532457e-01 1.13526750e+00 ... 4.56737161e-01 3.64241183e-01 1.45485294e+00] [ 1.72379768e+00 9.49490070e-01 1.45741999e+00 ... 4.39959913e-01 9.78974164e-01 1.88923967e+00]] [[ 1.44898343e+00 1.48563063e+00 6.53286457e-01 ... 9.85058486e-01 1.40821970e+00 1.34820139e+00] [ 7.30882823e-01 1.37264097e+00 1.44274175e+00 ... 1.56694472e+00 1.54805291e+00 5.45789599e-01] [ 8.94828856e-01 2.28935289e+00 1.29640901e+00 ... 1.23355448e+00 1.00604224e+00 1.15038097e+00] ... [ 1.29658556e+00 1.55514002e+00 1.36111689e+00 ... 7.33948290e-01 1.00940192e+00 1.36924589e+00] [ 7.83161044e-01 1.25569057e+00 -2.00821742e-01 ... 1.92292118e+00 1.20141566e+00 7.34708071e-01] [ 5.95170617e-01 1.98241365e+00 1.13464534e+00 ... 1.38955128e+00 2.58868635e-01 9.25274074e-01]] [[ 6.64001942e-01 7.81998932e-01 1.37488115e+00 ... 1.12361872e+00 5.61268926e-01 2.06984192e-01] [ 3.64866674e-01 1.12917161e+00 8.95458281e-01 ... 1.27159262e+00 1.61081827e+00 1.39054000e+00] [ 1.25826049e+00 1.13350618e+00 1.45658445e+00 ... 1.26164377e+00 1.92874467e+00 1.05422521e+00] ... [ 1.16821200e-01 2.15086961e+00 1.26411927e+00 ... 6.70531631e-01 1.07758987e+00 1.28155529e+00] [ 1.92068243e+00 6.71448171e-01 4.92167264e-01 ... 1.09845757e+00 1.15806067e+00 1.34036565e+00] [ 8.72058451e-01 1.70462084e+00 8.66007686e-01 ... 1.29330659e+00 1.10364211e+00 1.43219686e+00]] ... [[ 6.20615959e-01 1.61400640e+00 1.29717481e+00 ... 8.43227863e-01 7.52683505e-02 8.48681629e-01] [ 2.60967422e+00 1.15444922e+00 9.44686472e-01 ... 1.91327810e+00 1.08496892e+00 1.56284225e+00] [ 4.98930812e-01 7.26247013e-01 7.29015991e-02 ... 1.35596800e+00 1.67372787e+00 2.32879710e+00] ... [ 2.07327509e+00 -4.79805529e-01 2.31729364e+00 ... 3.68975550e-01 1.37299240e+00 1.40585864e+00] [ 8.59817684e-01 1.71705019e+00 1.08340442e+00 ... 1.74361682e+00 1.92658699e+00 1.36347628e+00] [ 6.98742270e-01 1.14843047e+00 7.65148818e-01 ... 1.52136540e+00 3.76682043e-01 1.37457287e+00]] [[ 7.04607189e-01 8.85927618e-01 4.05668944e-01 ... 6.39614284e-01 1.13578081e+00 1.68979216e+00] [ 1.28841925e+00 7.33226955e-01 8.87921393e-01 ... 2.94903219e-01 1.29317188e+00 1.63257277e+00] [ 7.29349852e-01 9.13466632e-01 1.04520345e+00 ... 1.44396377e+00 9.74283457e-01 1.24357894e-01] ... [ 1.31946087e+00 5.16428292e-01 1.94180739e+00 ... 1.20229316e+00 9.04437661e-01 8.68668616e-01] [ 1.17280710e+00 1.05762875e+00 7.63845205e-01 ... 7.95681715e-01 1.51810849e+00 1.32934570e+00] [ 6.73375905e-01 1.11688793e+00 3.91784668e-01 ... 1.00128961e+00 5.29232323e-01 1.29539728e+00]] [[ 2.04151556e-01 1.23792601e+00 3.91524345e-01 ... 1.39374244e+00 1.54761338e+00 1.31956947e+00] [ 1.11059892e+00 9.06667411e-01 8.76233160e-01 ... 1.64381647e+00 4.59976405e-01 1.35017407e+00] [ 3.27994049e-01 1.09730124e+00 1.63686776e+00 ... 4.68569666e-01 8.45453382e-01 1.15056992e+00] ... [ 5.27644336e-01 1.39649749e+00 1.57333302e+00 ... 1.12415981e+00 8.08194041e-01 4.11778063e-01] [ 1.58421743e+00 7.24029303e-01 5.06722987e-01 ... 1.21841550e+00 1.65434790e+00 1.14504659e+00] [ 8.78003478e-01 8.72110009e-01 2.20637608e+00 ... 4.81881976e-01 2.77735740e-01 1.37700725e+00]]] [[[ 1.10548210e+00 3.92779231e-01 9.82642710e-01 ... -2.70211816e-01 6.17646992e-01 1.49116039e+00] [ 1.19492948e+00 7.64055729e-01 1.70629597e+00 ... 6.79498971e-01 1.16066110e+00 1.47066684e-03] [ 9.97795284e-01 6.21797442e-01 9.91344512e-01 ... 5.07929325e-01 1.05090392e+00 1.70610547e+00] ... [ 4.99091893e-01 2.84187108e-01 -9.09909189e-01 ... 1.61041546e+00 1.46101892e+00 -4.63808596e-01] [ 2.11640410e-02 1.85914576e+00 9.15577173e-01 ... 4.75518554e-01 1.51665127e+00 3.37604493e-01] [ 2.08214566e-01 -3.38086560e-02 5.41004181e-01 ... 1.30390906e+00 -1.02719665e-01 1.17292917e+00]] [[ 3.31568271e-02 1.09219623e+00 5.20290613e-01 ... 3.38689506e-01 -3.48332763e-01 8.50180507e-01] [ 1.13628340e+00 2.84610987e-01 4.32507962e-01 ... 5.21082819e-01 3.67741853e-01 1.28481436e+00] [ 6.57480732e-02 1.50793836e-01 3.58778954e-01 ... 3.90310287e-01 2.00007465e-02 1.97129041e-01] ... [ 4.51616079e-01 -5.86526334e-01 3.59137744e-01 ... -5.34245431e-01 7.38229334e-01 -8.68091524e-01] [ 6.71914995e-01 3.46392334e-01 2.16472939e-01 ... 8.72561634e-01 6.74412847e-01 4.51322317e-01] [ 1.24492812e+00 1.41308880e+00 1.31572902e-01 ... -6.85436249e-01 -7.75915831e-02 1.65987420e+00]] [[ 1.25220907e+00 -6.00654423e-01 1.05355442e+00 ... 1.72797513e+00 1.13470876e+00 8.53754953e-02] [ 3.03568423e-01 8.52263629e-01 7.52798557e-01 ... 3.26859325e-01 2.20905915e-01 1.84789971e-01] [ 8.27493548e-01 4.32391435e-01 5.55512011e-01 ... 4.89359468e-01 4.02562916e-01 7.87993908e-01] ... [ 1.17390014e-01 1.53801680e+00 7.67788172e-01 ... 4.99456882e-01 4.01718616e-01 1.49676394e+00] [ 3.35059017e-01 1.22351058e-01 1.39413878e-01 ... 1.82016790e+00 1.08824611e+00 -5.77877313e-02] [ 2.70830452e-01 6.46856189e-01 1.77782762e+00 ... -9.75832283e-01 1.14755952e+00 1.33364975e+00]] ... [[ 6.92190826e-01 -6.93101466e-01 1.72900796e+00 ... 1.87462773e-02 -4.77895364e-02 1.44012034e+00] [ 1.28812969e+00 1.29560918e-01 -1.99977964e-01 ... 8.06917250e-01 1.12842262e+00 -4.54426050e-01] [-7.58266985e-01 9.93032336e-01 -7.55369514e-02 ... 8.05215120e-01 -3.39222074e-01 7.12894380e-01] ... [ 5.39203048e-01 3.25973779e-01 1.35169530e+00 ... 3.76864851e-01 8.26019883e-01 4.81641591e-01] [ 6.41896963e-01 2.46575281e-01 1.34944189e+00 ... 9.49913085e-01 8.83316159e-01 2.84457564e-01] [ 3.90566140e-01 9.46116209e-01 -2.49923348e-01 ... 8.42613578e-01 1.35065234e+00 1.36581823e-01]] [[ 9.36403453e-01 1.38373005e+00 -7.04019368e-01 ... -3.30177635e-01 -2.02165172e-01 4.37114686e-01] [-1.45263886e-02 1.26395094e+00 5.17695546e-01 ... 1.48444578e-01 3.84261280e-01 -1.55948770e+00] [ 1.16918170e+00 7.46897817e-01 8.40581536e-01 ... 1.79361179e-01 -6.26640558e-01 7.91576982e-01] ... [ 1.02125669e+00 -1.11668251e-01 4.59440112e-01 ... 9.57850635e-01 2.25987017e-01 5.74390769e-01] [-3.42717618e-01 -7.28681833e-02 3.58254939e-01 ... -7.31733620e-01 3.24784130e-01 7.50833094e-01] [ 8.24605167e-01 1.18706787e+00 4.96870786e-01 ... 5.21478295e-01 -7.25753158e-02 6.31215990e-01]] [[ 1.66742432e+00 -3.50670874e-01 1.48437870e+00 ... 3.47961336e-01 3.72923046e-01 7.11239040e-01] [ 5.50459266e-01 3.51874679e-01 1.48462996e-01 ... 8.82157445e-01 1.93172705e+00 2.69354761e-01] [ 1.33031499e+00 -1.23732984e-01 2.85359211e-02 ... -6.84734046e-01 4.95254099e-01 4.63054955e-01] ... [ 1.24778974e+00 4.19114590e-01 -5.09972423e-02 ... -8.26106310e-01 3.53121430e-01 3.63681555e-01] [ 9.37964737e-01 6.19166195e-01 1.50416479e-01 ... 9.41360772e-01 1.50721610e+00 1.01680803e+00] [ 3.00531268e-01 1.58033895e+00 5.44218421e-01 ... 3.59575242e-01 1.76488185e+00 7.60250762e-02]]] [[[ 3.38697463e-01 6.22006357e-01 4.14643168e-01 ... 2.42110103e-01 -4.56022248e-02 1.48656338e-01] [ 3.57491970e-01 2.76620150e-01 3.82016450e-01 ... 2.32777372e-01 1.26687363e-01 3.92011553e-01] [-3.71911600e-02 1.87040716e-01 2.65859067e-01 ... 2.73252856e-02 2.89757401e-01 1.75684601e-01] ... [ 5.17858490e-02 -4.10721719e-01 -1.84731513e-01 ... 6.10853553e-01 4.60963190e-01 2.24313319e-01] [ 9.68351960e-02 -1.28917232e-01 3.79953355e-01 ... 2.67273247e-01 7.01434851e-01 7.49090791e-01] [ 5.62024176e-01 6.23556087e-03 5.64660192e-01 ... 5.03934145e-01 3.81401300e-01 5.06042719e-01]] [[-3.11345737e-02 4.18649912e-02 6.42312527e-01 ... 1.18825302e-01 2.82996953e-01 2.66445965e-01] [ 8.37646782e-01 3.81521165e-01 -9.12457556e-02 ... 4.27562833e-01 -4.73959655e-01 2.40854934e-01] [-7.93344751e-02 5.02344310e-01 -5.56205362e-02 ... 5.34885406e-01 4.98927891e-01 5.92343986e-01] ... [ 3.52311581e-01 -2.11279020e-02 -1.64634034e-01 ... -1.84626967e-01 -2.14117631e-01 5.42767830e-02] [ 3.13798219e-01 1.17552839e-01 1.88253075e-01 ... -3.86587828e-01 4.57983911e-01 4.88644093e-01] [-1.03443906e-01 2.49683350e-01 -2.49499202e-01 ... 2.71851793e-02 1.92011803e-01 1.72083646e-01]] [[ 4.55090195e-01 1.96851611e-01 8.11889097e-02 ... -9.73500591e-03 2.01959476e-01 4.95183259e-01] [ 4.98715900e-02 -3.80714051e-02 1.54519409e-01 ... 1.42799050e-01 -1.27341479e-01 -1.05441071e-01] [ 1.79214910e-01 -9.80952755e-02 1.41235083e-01 ... -1.81400418e-01 4.25326139e-01 -7.64717394e-03] ... [-3.84986401e-02 1.70673981e-01 9.55634296e-01 ... -3.10996026e-01 2.38501325e-01 6.94495440e-01] [-2.09379390e-01 2.49258667e-01 1.00627708e+00 ... -3.81053193e-04 4.20797430e-03 7.41583288e-01] [ 4.11422312e-01 -3.91205668e-01 2.03994468e-01 ... 2.67245144e-01 5.42548239e-01 -4.20031905e-01]] ... [[ 5.32006562e-01 -3.49175274e-01 1.47214726e-01 ... -2.42603734e-01 2.69155264e-01 3.31571877e-01] [-3.64119917e-01 1.69852883e-01 5.18534184e-01 ... -1.50156975e-01 1.13125682e+00 6.39515579e-01] [ 2.25526318e-01 2.36681774e-01 5.03498554e-01 ... 3.37392986e-01 8.55703533e-01 1.10269986e-01] ... [ 8.96920800e-01 2.06777945e-01 -2.18897894e-01 ... 1.93770811e-01 7.26740584e-02 -1.64221510e-01] [ 2.99793303e-01 3.44447613e-01 5.36955178e-01 ... 5.53608596e-01 5.39049685e-01 1.35100573e-01] [-7.43285567e-02 3.98048341e-01 8.17644119e-01 ... -3.39425504e-01 4.51602340e-01 1.55479982e-01]] [[ 3.61157089e-01 3.32715243e-01 2.64290899e-01 ... 1.21071219e-01 3.65156651e-01 2.05547169e-01] [ 7.15223074e-01 7.91773573e-02 3.36168200e-01 ... 1.33934587e-01 6.02064282e-02 7.99992621e-01] [ 2.22342446e-01 -1.30955666e-01 6.63480341e-01 ... -1.66908845e-01 -1.14131264e-01 1.84826866e-01] ... [ 3.23762000e-01 7.54338205e-01 6.44141853e-01 ... -1.80407509e-01 2.00015843e-01 4.72156882e-01] [ 2.43640155e-01 6.08839214e-01 -1.77294031e-01 ... 3.93379748e-01 3.62482488e-01 9.22313035e-01] [ 7.15972722e-01 3.34541231e-01 3.09031099e-01 ... -2.65030712e-01 4.57777649e-01 7.61003420e-02]] [[ 5.28795004e-01 1.40286997e-01 2.99600482e-01 ... 3.20430934e-01 -2.04894066e-01 6.45731151e-01] [ 2.82263994e-01 -5.61067127e-02 3.08749467e-01 ... 3.86734128e-01 -7.57200345e-02 3.00414115e-01] [ 2.07079515e-01 6.22405946e-01 1.63938493e-01 ... 4.06718820e-01 -9.64290500e-02 4.52501923e-02] ... [ 3.90971541e-01 2.67378628e-01 2.73268640e-01 ... 6.19216919e-01 2.62970552e-02 4.00708765e-01] [-1.88525878e-02 4.48022097e-01 8.20047438e-01 ... -4.89664497e-03 2.99862564e-01 1.29636481e-01] [ 2.79260755e-01 -4.55706455e-02 2.28220791e-01 ... 1.44838402e-02 1.67494431e-01 4.47135359e-01]]]] [[[[-1.23068118e+00 1.88093200e-01 -4.40010220e-01 ... -2.98540652e-01 -8.54672432e-01 2.78706610e-01] [ 5.89729071e-01 -1.03262496e+00 -5.53368688e-01 ... -3.52180786e-02 1.64441764e-01 -1.01671541e+00] [-8.14222574e-01 -8.07827935e-02 1.00298285e-01 ... 8.57588053e-01 3.66659343e-01 -7.02912211e-01] ... [-2.71580309e-01 -3.32247168e-02 -1.15730739e+00 ... 4.82157975e-01 -5.35133243e-01 -7.62715399e-01] [-2.54062593e-01 -1.85395733e-01 -9.06339049e-01 ... 4.92858350e-01 -3.13508451e-01 1.61275253e-01] [ 8.31412524e-02 1.43168300e-01 6.59752905e-01 ... -7.16845334e-01 -2.75527835e-01 -1.83055595e-01]] [[ 1.25629783e-01 1.09439276e-01 -1.52233079e-01 ... -2.50863694e-02 -3.05271238e-01 -8.62599075e-01] [-1.19297361e+00 8.31008017e-01 -1.61741841e+00 ... -1.42555404e+00 -1.10452995e-01 5.27983844e-01] [-9.73629832e-01 -8.68615285e-02 -4.18224454e-01 ... -6.77724123e-01 -1.59800470e-01 -3.16363245e-01] ... [ 1.88624367e-01 2.13777557e-01 9.26315546e-01 ... -5.89408994e-01 1.25783876e-01 -8.84523332e-01] [-4.23111796e-01 -7.67353415e-01 -1.20532043e-01 ... 4.05798435e-01 -1.81121305e-01 4.56761509e-01] [-4.87782001e-01 -3.85336548e-01 -4.41254914e-01 ... 1.63041249e-01 -6.57579780e-01 -1.07918072e+00]] [[-1.35535192e+00 -5.17342508e-01 -1.03460073e+00 ... -2.33838350e-01 -6.88094720e-02 -9.62262392e-01] [ 9.89293575e-01 1.61901433e-02 -3.74105245e-01 ... 1.55957500e-02 -7.01233804e-01 -8.55090618e-01] [-5.15225470e-01 1.30096272e-01 -4.44857895e-01 ... -7.82831609e-01 4.27962005e-01 -1.94608063e-01] ... [-8.29414487e-01 -3.55589658e-01 -6.81887567e-01 ... 6.68368757e-01 -9.65689659e-01 -9.89020914e-02] [ 1.26769215e-01 -1.04852283e+00 -1.38568074e-01 ... -7.92434633e-01 4.24860805e-01 1.99041918e-01] [-5.37009120e-01 2.77653605e-01 -4.91762208e-03 ... -3.60133380e-01 -8.73258173e-01 1.26743346e-01]] ... [[ 2.13199839e-01 3.09907973e-01 8.43637645e-01 ... 5.46916187e-01 -1.75237525e-02 -8.38734135e-02] [ 2.79171020e-01 3.12848866e-01 3.12923253e-01 ... 2.43574291e-01 1.00358725e+00 6.13186322e-02] [ 2.66059507e-02 8.05634797e-01 3.69457692e-01 ... 7.64103234e-02 2.73026288e-01 4.74944592e-01] ... [ 3.89501393e-01 -3.86439949e-01 -2.63328224e-01 ... 1.96345225e-02 -4.65817690e-01 -5.36352508e-02] [ 6.02378510e-02 -7.56778419e-01 -1.24703273e-01 ... 8.15090537e-01 -4.04527128e-01 -1.02289510e+00] [-5.69271147e-01 6.35092676e-01 -2.48074010e-02 ... -5.38311265e-02 -1.45225421e-01 -7.10865557e-01]] [[ 3.33045602e-01 -3.73088539e-01 8.21913183e-01 ... -3.40637535e-01 -2.62333840e-01 4.18772139e-02] [-7.01782405e-01 -5.07720709e-01 7.50034332e-01 ... 4.91361767e-01 7.68873096e-01 -1.97017655e-01] [-2.75031954e-01 -2.31906846e-02 -4.21558797e-01 ... 7.39309728e-01 -3.70718718e-01 -5.98105907e-01] ... [-5.62916577e-01 -6.60169601e-01 -9.53510225e-01 ... -5.28732061e-01 -5.77528715e-01 8.09554383e-02] [-1.28843084e-01 -8.47394109e-01 5.16374409e-01 ... -4.09815580e-01 9.17551279e-01 -7.57419646e-01] [ 2.41324484e-01 -7.03206837e-01 -2.93787658e-01 ... -3.74244004e-01 -9.22916055e-01 7.13111222e-01]] [[-4.15748090e-01 5.56234300e-01 -9.06744123e-01 ... -4.63762820e-01 -5.15878022e-01 -2.58582145e-01] [ 4.99082118e-01 -8.73776257e-01 5.53751528e-01 ... -9.87229273e-02 -7.72214293e-01 -6.66932166e-01] [-5.80845237e-01 -3.33120763e-01 2.18802184e-01 ... 2.31924310e-01 -3.59781757e-02 2.75727302e-01] ... [-5.16869962e-01 7.92586505e-01 -5.96619785e-01 ... -1.16644509e-01 -8.72997522e-01 -3.80275875e-01] [-5.43183386e-01 -7.05914676e-01 1.20980322e-01 ... -8.69826898e-02 -2.36034781e-01 1.41570956e-01] [ 1.10321064e-02 1.38723373e-01 4.72566217e-01 ... 1.09550536e+00 -7.65803218e-01 -1.38902318e+00]]] [[[-2.57685244e-01 2.67799348e-01 -5.82388878e-01 ... 2.38182828e-01 -5.33631206e-01 -6.73993900e-02] [-6.95756003e-02 -9.83266979e-02 -7.90302515e-01 ... -4.56099331e-01 -8.61036852e-02 6.69961125e-02] [ 5.05924486e-02 4.17866111e-01 -5.58500648e-01 ... -9.44930315e-01 1.20144948e-01 -3.88592690e-01] ... [-3.40638787e-01 -8.73318195e-01 -1.41079342e-02 ... -5.34676492e-01 -9.05929625e-01 -6.70191824e-01] [-3.86852652e-01 -8.92181158e-01 -7.27496564e-01 ... -1.70996994e-01 5.12994587e-01 1.61840364e-01] [-1.96828395e-01 -1.81203306e-01 -9.82951596e-02 ... -2.70647585e-01 3.37578475e-01 5.70873380e-01]] [[-8.19735110e-01 -3.33952606e-01 2.25188196e-01 ... -4.61997926e-01 -2.09938258e-01 -3.36357445e-01] [-8.89634252e-01 -1.82319149e-01 -5.53919435e-01 ... 4.36536998e-01 -4.61911798e-01 -1.37828305e-01] [ 3.59015346e-01 2.11066708e-01 -6.22119904e-01 ... -9.70634520e-01 -1.29464164e-01 -7.72790834e-02] ... [ 5.71716368e-01 -4.65588383e-02 -2.53028572e-01 ... 8.70563239e-02 8.67163911e-02 -2.06023127e-01] [-9.50848162e-02 7.05582276e-02 2.94767320e-01 ... -3.15781862e-01 -3.95287797e-02 -1.99116185e-01] [-4.29416031e-01 -3.27678680e-01 5.07872939e-01 ... -6.89073443e-01 3.19696516e-01 -4.91385274e-02]] [[-7.01849818e-01 1.25403441e-02 -3.33212852e-01 ... 1.28997087e-01 -2.71154881e-01 5.74512780e-01] [-4.85135049e-01 2.99198274e-03 2.46023804e-01 ... -4.07790452e-01 -3.67501080e-01 4.22841460e-01] [ 4.15832013e-01 -5.26164949e-01 -1.20307311e-01 ... 4.60352242e-01 -1.92536414e-01 8.67103934e-02] ... [ 3.35837305e-01 1.81148738e-01 4.65142190e-01 ... 2.50153899e-01 -7.85798550e-01 -5.19900098e-02] [ 1.82331234e-01 -9.74736392e-01 1.39343828e-01 ... 4.83742476e-01 5.35400249e-02 2.73260772e-02] [-3.87614548e-01 -6.92889988e-02 -2.34495655e-01 ... -1.40115842e-01 -1.35264844e-01 -2.24337503e-01]] ... [[-3.54471982e-01 -9.29062426e-01 4.98208284e-01 ... -2.51667172e-01 -5.92583120e-01 9.71143171e-02] [-4.92828697e-01 -1.01985884e+00 -1.66926295e-01 ... -4.89486068e-01 6.21683240e-01 -4.18763943e-02] [-5.38318932e-01 -3.48752856e-01 -4.75788891e-01 ... 1.13081627e-01 -4.45393741e-01 1.52385041e-01] ... [-5.74277826e-02 1.04015274e-02 -1.14597708e-01 ... -1.32946059e-01 1.46560788e-01 -1.82359014e-02] [-2.00860500e-01 -1.67562738e-01 -6.35758698e-01 ... 1.94813963e-02 -3.00728437e-02 -7.89396226e-01] [-3.03244680e-01 -5.15843093e-01 1.91857815e-02 ... -6.71425998e-01 -1.62735060e-02 -2.06667483e-01]] [[ 2.77468450e-02 -2.16817126e-01 -2.61119872e-01 ... -2.97958612e-01 -2.27704391e-01 -3.39681774e-01] [ 4.03696567e-01 -2.08891600e-01 1.94552541e-01 ... 5.62540926e-02 -1.99937135e-01 -3.03137660e-01] [ 2.40504399e-01 -5.47584236e-01 4.56730239e-02 ... -1.00496960e+00 -3.48033965e-01 1.78397819e-01] ... [-9.34776247e-01 2.92685181e-02 1.01248762e-02 ... -4.94771600e-02 -8.33188713e-01 1.92522720e-01] [-8.06267679e-01 -3.90797332e-02 4.63892043e-01 ... -5.17842770e-01 4.24488634e-01 4.42048371e-01] [-1.30106434e-01 -5.06964862e-01 -7.67228752e-02 ... -3.35875958e-01 5.80653138e-02 -8.12553823e-01]] [[-4.80956137e-01 1.41074240e-01 2.70861477e-01 ... -1.78492710e-01 1.49651632e-01 -3.90100449e-01] [-2.74112552e-01 5.13106406e-01 -5.89632273e-01 ... -1.34749818e+00 -3.24055880e-01 -8.79551888e-01] [ 6.29941344e-01 -1.18497849e-01 -7.38805413e-01 ... -2.37090439e-01 -5.69567919e-01 7.63789862e-02] ... [-3.41150522e-01 -1.77375272e-01 5.82285458e-03 ... -2.35798568e-01 4.33399349e-01 -4.70498502e-01] [-2.63590008e-01 -1.01305939e-01 3.63456398e-01 ... 5.66235065e-01 -1.40081093e-01 -4.61520970e-01] [-1.39400020e-01 -4.50094730e-01 4.17027473e-02 ... -1.87748879e-01 4.59542461e-02 -1.53748095e-01]]] [[[ 6.10813797e-01 1.58676660e+00 1.68699467e+00 ... 2.27831769e+00 2.00441885e+00 2.96856141e+00] [ 1.81771827e+00 2.44705105e+00 7.23320127e-01 ... 3.17382550e+00 6.58726871e-01 2.59119439e+00] [ 1.72304571e+00 1.26019013e+00 2.24588132e+00 ... -7.26316050e-02 1.37243664e+00 1.24674141e+00] ... [ 1.80698884e+00 3.83578479e-01 2.74820590e+00 ... -7.55299032e-02 1.35320759e+00 1.52209568e+00] [-3.05935562e-01 2.18890190e+00 1.08266163e+00 ... 2.40342426e+00 2.47302055e+00 2.33907652e+00] [ 3.96388382e-01 3.17804122e+00 8.95877421e-01 ... 2.83238006e+00 -1.92680871e+00 -3.71095747e-01]] [[ 8.95980716e-01 1.89161193e+00 1.35885954e+00 ... 2.16156673e+00 -5.97186208e-01 9.30869520e-01] [ 1.82169509e+00 1.51626682e+00 -5.50661862e-01 ... 1.56438291e+00 2.08236599e+00 2.84405422e+00] [ 7.50523090e-01 1.81385148e+00 1.49133325e+00 ... 1.47943997e+00 3.53442717e+00 3.72761726e+00] ... [ 1.64783025e+00 1.44375086e+00 2.78602934e+00 ... 2.21834540e+00 6.25916481e-01 2.18229198e+00] [ 2.74137467e-01 1.38886690e+00 4.24981445e-01 ... 1.62338269e+00 1.09354150e+00 1.94755948e+00] [ 3.38302523e-01 1.57777166e+00 2.22190952e+00 ... 1.84011066e+00 1.58273327e+00 -5.63414872e-01]] [[ 2.20369053e+00 1.35434639e+00 8.53058100e-01 ... 4.87501651e-01 8.57184112e-01 1.17582166e+00] [ 3.24947405e+00 1.99742424e+00 1.15990794e+00 ... 1.39198601e+00 6.21407330e-01 8.30728889e-01] [-2.83099860e-01 1.43941355e+00 2.54639059e-01 ... -8.10357809e-01 2.43764687e+00 1.99708998e+00] ... [ 1.21990788e+00 1.10011816e+00 1.15361743e-01 ... -1.40798032e-01 1.64955771e+00 2.48129535e+00] [ 1.75651681e+00 3.89966226e+00 -5.59104383e-01 ... 3.43991905e-01 1.44783139e-01 2.30177665e+00] [ 2.05929255e+00 1.01643372e+00 4.64709491e-01 ... -1.54869902e+00 -8.97076577e-02 1.68569076e+00]] ... [[ 1.48563564e+00 1.35702705e+00 3.47857833e+00 ... 2.08249831e+00 2.41077137e+00 2.36439133e+00] [ 1.85916221e+00 2.15825534e+00 1.26811576e+00 ... 2.49189198e-01 1.66118395e+00 1.25714600e+00] [-3.11859071e-01 1.01094805e-02 3.36155921e-01 ... 2.35837030e+00 9.15968001e-01 8.99947882e-01] ... [ 2.50066906e-01 1.86040366e+00 1.84750092e+00 ... 1.34197927e+00 -4.19287026e-01 2.20532703e+00] [-6.67290628e-01 1.89322090e+00 2.59938407e+00 ... -3.92808616e-01 3.14402890e+00 1.70165682e+00] [ 3.51209664e+00 3.36851269e-01 7.09655702e-01 ... 1.38875699e+00 1.00400424e+00 6.21491730e-01]] [[ 8.99691582e-01 -3.25268880e-02 1.49398506e+00 ... 8.23757127e-02 2.35062790e+00 2.52072573e+00] [ 1.06360602e+00 1.38771191e-01 2.18612075e+00 ... 1.61426437e+00 1.84562552e+00 2.54765296e+00] [ 3.17392170e-01 -7.50674486e-01 9.31805372e-01 ... 3.24953008e+00 7.90488780e-01 2.97821140e+00] ... [ 1.99288160e-01 1.91103899e+00 3.71137261e+00 ... -1.99094093e+00 1.22939169e+00 2.00847816e+00] [ 2.98603845e+00 2.17078876e+00 1.91719532e-01 ... 2.26095963e+00 2.35311413e+00 8.13613236e-02] [ 3.24485350e+00 2.79781461e+00 2.22250056e+00 ... 1.93520594e+00 1.38664424e+00 -9.87551332e-01]] [[ 1.07146478e+00 8.77224132e-02 1.18316936e+00 ... 2.09909487e+00 -6.64112628e-01 -3.64707083e-01] [ 1.43208325e+00 5.40247746e-02 1.30744910e+00 ... -1.43442199e-01 2.17760190e-01 5.04051030e-01] [ 1.55672383e+00 6.77472949e-01 1.48014471e-01 ... 1.62493259e-01 1.27131522e+00 -5.58759496e-02] ... [ 5.00781059e-01 2.23409009e+00 3.90020460e-01 ... 1.77967119e+00 8.41472208e-01 2.31264424e+00] [ 2.98310208e+00 1.40387213e+00 1.08403945e+00 ... 4.03760254e-01 7.73543596e-01 3.83756101e-01] [ 1.43117309e+00 2.06132388e+00 1.77407229e+00 ... 7.03058541e-01 1.37573922e+00 1.75278842e-01]]] [[[ 7.47020006e-01 1.21808112e+00 1.89808202e+00 ... 1.00204659e+00 1.68537557e+00 1.39955592e+00] [ 1.64970779e+00 1.68266284e+00 8.87738049e-01 ... 6.81703746e-01 2.04171681e+00 1.76722336e+00] [ 9.23130751e-01 1.50000870e+00 7.80378640e-01 ... 1.28726792e+00 9.84893262e-01 1.05688143e+00] ... [ 1.75333655e+00 1.27537251e+00 1.41541135e+00 ... 1.25561619e+00 1.65932155e+00 9.97009099e-01] [ 1.12578428e+00 1.07419086e+00 2.89118141e-01 ... 1.45520544e+00 4.97301608e-01 1.83869624e+00] [ 9.40947115e-01 1.14570117e+00 1.83327031e+00 ... 1.28245199e+00 9.82070386e-01 2.25857425e+00]] [[ 1.22962034e+00 2.05420089e+00 1.18312418e+00 ... 9.22661960e-01 1.50122273e+00 -7.13526784e-03] [ 6.24908149e-01 1.42520040e-01 1.74496007e+00 ... 1.02248788e+00 1.54375255e+00 2.05528808e+00] [ 8.96431446e-01 1.96536112e+00 6.66607916e-01 ... 1.57131982e+00 1.32527435e+00 8.28005791e-01] ... [ 7.71147251e-01 7.05283523e-01 5.12342513e-01 ... 1.12432383e-01 7.36827791e-01 1.31089950e+00] [ 2.06832170e+00 8.35543454e-01 1.25161934e+00 ... 5.68449199e-01 2.08501235e-01 1.40819371e+00] [ 1.09594083e+00 1.78137672e+00 1.48089147e+00 ... 1.08367646e+00 5.80380499e-01 5.46877682e-01]] [[ 9.39030170e-01 6.86188519e-01 1.11330342e+00 ... 5.68701982e-01 1.42422485e+00 9.50944662e-01] [ 1.55630326e+00 8.20369601e-01 8.49891067e-01 ... 9.73371387e-01 1.33443511e+00 1.61468554e+00] [ 5.86999953e-01 2.11885619e+00 1.28991425e+00 ... 1.17750669e+00 1.46843529e+00 9.34655130e-01] ... [ 5.29357016e-01 1.29040942e-01 1.86573517e+00 ... 7.15531766e-01 1.16893601e+00 1.99882102e+00] [ 1.61132145e+00 1.30575955e+00 1.51911521e+00 ... 1.13355041e+00 1.83235145e+00 1.02588224e+00] [ 1.75319746e-01 1.49823821e+00 1.01353300e+00 ... 4.80643392e-01 8.75375271e-01 2.07058430e+00]] ... [[ 1.14420021e+00 1.16231966e+00 1.25169563e+00 ... 5.79512239e-01 1.45159030e+00 1.60369754e+00] [ 5.61186910e-01 1.89433026e+00 1.43783343e+00 ... 1.30070210e+00 2.14479518e+00 1.19871283e+00] [ 1.27903938e+00 1.40128720e+00 1.08233619e+00 ... 1.03151357e+00 6.79816127e-01 9.16484177e-01] ... [ 1.52408338e+00 -1.48949400e-01 7.51828849e-01 ... 9.35869336e-01 1.61786675e+00 1.04227388e+00] [ 1.20772290e+00 1.16561162e+00 1.66162336e+00 ... 1.86708403e+00 3.58033925e-01 1.44206882e+00] [ 8.26080382e-01 6.82426214e-01 1.43888509e+00 ... 1.64525425e+00 1.13277090e+00 8.85894716e-01]] [[ 1.41329944e+00 1.07327330e+00 1.23395646e+00 ... 1.32663476e+00 1.92190543e-01 3.82748991e-01] [ 7.43639708e-01 9.07494843e-01 2.35536957e+00 ... 1.10678160e+00 1.25008392e+00 9.69941318e-01] [ 1.46289289e+00 8.06695282e-01 6.94177389e-01 ... 9.32332158e-01 1.35298535e-01 1.21161628e+00] ... [ 1.54843998e+00 6.79309368e-01 6.45827472e-01 ... 4.16936576e-01 1.51711833e+00 1.28666735e+00] [ 9.13716435e-01 8.39982092e-01 1.22557652e+00 ... 1.70616806e+00 1.33747244e+00 1.01883125e+00] [ 2.77332366e-01 1.52310979e+00 1.54927433e+00 ... 7.17522442e-01 1.44383073e+00 1.49646616e+00]] [[ 1.04474664e+00 2.06136033e-01 1.64217830e+00 ... 7.13893950e-01 1.45662272e+00 2.11263752e+00] [ 1.00636935e+00 1.33574855e+00 1.28988636e+00 ... 7.80770957e-01 1.17426658e+00 5.90600133e-01] [ 9.60768938e-01 1.18724275e+00 8.10092211e-01 ... 2.04237986e+00 2.46570438e-01 6.44436240e-01] ... [ 1.08178413e+00 1.62134433e+00 1.71484619e-01 ... 9.87999439e-01 1.86863530e+00 2.71431208e-01] [ 1.07920194e+00 1.31355596e+00 1.64792919e+00 ... 1.49690080e+00 7.45610297e-01 1.53102112e+00] [ 4.19958353e-01 6.29358053e-01 8.05809677e-01 ... 1.23176575e+00 1.62949264e+00 1.23039758e+00]]] [[[ 5.78871906e-01 1.13488150e+00 8.07689428e-01 ... 2.09691331e-01 5.44647813e-01 -1.24915354e-01] [ 9.34459090e-01 -6.09950460e-02 1.51086676e+00 ... 5.81775963e-01 -3.03155985e-02 5.05181313e-01] [ 6.13594830e-01 6.69457257e-01 4.58270222e-01 ... 1.36108375e+00 2.98753589e-01 1.19903922e-01] ... [ 1.10599506e+00 -2.28656083e-01 -1.13401465e-01 ... -4.47221398e-01 2.48370856e-01 4.05697644e-01] [ 9.12394524e-01 1.28055882e+00 5.97740591e-01 ... 1.86652064e-01 5.93476534e-01 3.22341263e-01] [-3.02258641e-01 -1.24885984e-01 1.30774868e+00 ... 7.99488246e-01 9.72204864e-01 1.46777024e-02]] [[ 1.38114882e+00 1.13235414e+00 2.36219540e-01 ... 1.20935902e-01 4.84131336e-01 1.10400736e+00] [ 1.70080090e+00 7.78827190e-01 1.13848758e+00 ... -1.16347909e-01 1.21810913e+00 8.86080340e-02] [-6.53054655e-01 4.08375233e-01 1.17663109e+00 ... -3.30787271e-01 8.17001686e-02 1.02006984e+00] ... [ 3.32052745e-02 1.33463240e+00 4.96309966e-01 ... 7.46655762e-01 6.19917154e-01 -8.83332491e-01] [ 4.94488657e-01 -3.21651042e-01 -1.23396292e-01 ... 3.82148117e-01 4.32208091e-01 -9.80260789e-01] [ 1.24037549e-01 2.86038339e-01 6.88437223e-01 ... 6.35048628e-01 4.79447283e-02 5.26486874e-01]] [[ 5.53780913e-01 5.97363889e-01 2.16900647e-01 ... -5.61772823e-01 3.33644480e-01 -3.89650166e-02] [ 7.28566468e-01 4.07872438e-01 9.47005093e-01 ... 1.37684560e+00 2.32974067e-01 1.82541156e+00] [ 8.66958976e-01 1.35986106e-02 8.69138300e-01 ... -6.42248942e-03 1.84173167e-01 -5.26224962e-03] ... [-7.14965224e-01 -3.04872304e-01 6.06101573e-01 ... -4.73759055e-01 3.57104130e-02 9.29727376e-01] [ 3.84045005e-01 7.73308992e-01 5.55463910e-01 ... 1.06863117e+00 1.20186830e+00 -5.45459092e-01] [-1.82441518e-01 8.70835185e-01 2.86704302e-01 ... 2.00297203e-04 -2.66442955e-01 1.14396222e-01]] ... [[-1.19870767e-01 5.20823956e-01 3.94955166e-02 ... 9.04052317e-01 6.61783457e-01 3.72550786e-01] [-6.49297535e-02 1.12154961e+00 5.64291477e-02 ... 1.45204341e+00 1.00464511e+00 1.16371465e+00] [ 9.67358202e-02 -1.56649381e-01 1.73212469e+00 ... 1.10384023e+00 -5.17518163e-01 3.52197826e-01] ... [-6.36932313e-01 1.70708016e-01 -4.19012040e-01 ... 1.93371260e+00 -2.66668409e-01 1.57157890e-02] [ 3.65167201e-01 5.13715267e-01 1.74127847e-01 ... 9.48245406e-01 5.45598149e-01 7.21835434e-01] [-6.28469884e-01 -6.48414254e-01 8.17166746e-01 ... -7.21528709e-01 1.05233347e+00 -6.38094723e-01]] [[-5.85242927e-01 2.84663796e-01 -9.52402353e-01 ... 3.21273357e-01 -3.84375900e-01 6.58541918e-01] [ 7.84005463e-01 3.32151443e-01 -4.87128317e-01 ... 4.93008226e-01 4.02817190e-01 6.72072887e-01] [-5.89179933e-01 -2.55046673e-02 2.76820213e-01 ... -7.48652935e-01 6.40257955e-01 1.04847932e+00] ... [ 1.77610266e+00 6.66320920e-01 -5.68466522e-02 ... 8.14798474e-01 3.15036416e-01 1.65140593e+00] [-4.86465450e-03 4.63057049e-02 2.69901484e-01 ... -2.07979023e-01 1.89045095e+00 -6.32632375e-02] [ 1.26756477e+00 1.18068433e+00 7.99327672e-01 ... 5.78215480e-01 7.82225072e-01 5.73975086e-01]] [[ 5.79369009e-01 6.20558560e-01 -2.59078681e-01 ... -2.03902811e-01 -1.10409126e-01 8.75211120e-01] [ 2.47102603e-01 -3.41586500e-01 7.33936548e-01 ... 5.14501095e-01 5.64958990e-01 -2.43958324e-01] [ 1.13147545e+00 -1.50858745e-01 8.50331545e-01 ... 1.54600993e-01 -3.75210136e-01 1.14208364e+00] ... [ 2.62432009e-01 3.24485272e-01 -5.04956961e-01 ... 2.74845093e-01 2.62748808e-01 3.81660730e-01] [ 6.17921293e-01 1.34648049e+00 -2.72353381e-01 ... 1.69221830e+00 9.66490030e-01 7.17945039e-01] [-6.51468411e-02 5.65891564e-01 1.21493316e+00 ... 1.11729169e+00 3.79336625e-01 1.18532693e+00]]] [[[-8.01478699e-03 6.45684779e-01 -1.17440253e-01 ... 5.37638605e-01 5.76938689e-01 6.42397642e-01] [ 2.71928161e-01 -6.65142387e-02 8.97703543e-02 ... 3.88039589e-01 -1.20561063e-01 1.90464079e-01] [ 1.43691972e-01 7.77582347e-01 3.75438809e-01 ... 1.98101029e-01 3.17014962e-01 -1.63445011e-01] ... [ 5.84938169e-01 -2.79653251e-01 -1.09103255e-01 ... 5.86192966e-01 -1.41191706e-01 5.50081611e-01] [ 1.77088469e-01 -3.28683287e-01 3.28244656e-01 ... 4.87024039e-02 1.81208313e-01 1.14113176e-02] [ 4.28917617e-01 5.64471185e-01 1.11981869e-01 ... 8.27688575e-01 -3.23905170e-01 1.87384099e-01]] [[ 3.34084406e-03 1.53324511e-02 4.82400954e-02 ... 4.07893687e-01 1.02622557e+00 -1.97084993e-01] [-1.58334002e-01 2.68323123e-01 7.40707994e-01 ... -1.30225807e-01 8.13057899e-01 -2.60121435e-01] [ 4.49073523e-01 -1.59504823e-02 -8.71402442e-01 ... 3.09744775e-01 8.06504786e-01 4.74567652e-01] ... [ 6.94809258e-01 2.28699356e-01 -1.72607228e-01 ... 3.09572697e-01 -3.71226817e-01 -1.49887428e-01] [-1.15640499e-01 -1.22528553e-01 4.26438093e-01 ... -2.38229647e-01 -1.08570419e-01 2.99812108e-01] [-1.05997413e-01 2.45386183e-01 -2.51788676e-01 ... 3.56485337e-01 1.64030984e-01 8.50792974e-02]] [[-1.20299444e-01 6.42909288e-01 3.26860011e-01 ... 5.18794656e-01 2.09222630e-01 2.62302995e-01] [ 8.71400312e-02 5.09944499e-01 6.02409802e-03 ... 2.19259143e-01 -1.62098676e-01 2.50183463e-01] [-2.08383381e-01 -6.74811825e-02 5.24764061e-01 ... 2.81578243e-01 7.44908750e-01 6.02554142e-01] ... [ 2.34623685e-01 -3.67497921e-01 -1.80230126e-01 ... 5.95759332e-01 5.08250177e-01 2.53720414e-02] [ 6.46068007e-02 -3.25997055e-01 -6.25575036e-02 ... -5.24455123e-02 3.30108374e-01 6.74449265e-01] [ 4.78764802e-01 3.64529788e-01 4.97143447e-01 ... 3.70031327e-01 1.48632571e-01 -1.69191435e-01]] ... [[ 4.13352847e-01 1.89181104e-01 6.18992746e-01 ... -8.03755000e-02 4.46646243e-01 2.30802417e-01] [ 8.96984562e-02 9.67567980e-01 -5.29040873e-01 ... 4.11583155e-01 5.98247275e-02 6.26371980e-01] [-8.50260779e-02 3.49108845e-01 7.27425963e-02 ... -5.23008287e-01 -1.77482143e-01 5.47597669e-02] ... [-3.12369257e-01 -1.49908096e-01 7.30919480e-01 ... 5.26243985e-01 3.72004628e-01 7.78202415e-01] [ 1.04005910e-01 1.10631041e-01 -7.28388876e-02 ... 1.29161105e-01 4.10283953e-01 -3.60870212e-01] [ 3.98624182e-01 -2.29479551e-01 -4.51657385e-01 ... -2.28308663e-01 -3.44705850e-01 7.04102695e-01]] [[ 5.90352535e-01 4.57708567e-01 1.35571480e-01 ... -1.44785702e-01 8.22434425e-01 -7.72837773e-02] [ 7.85636604e-01 4.83912200e-01 3.58166665e-01 ... 3.98910105e-01 2.51215547e-01 1.33919001e+00] [ 7.42724538e-01 2.75872767e-01 -5.23796752e-02 ... 9.18145955e-01 -7.71146864e-02 8.26857209e-01] ... [ 3.62103164e-01 3.89442332e-02 4.18330520e-01 ... 4.59500849e-01 2.17002317e-01 -3.04157913e-01] [-5.17511070e-02 1.56159446e-01 -9.84910131e-02 ... 2.35046268e-01 7.69620612e-02 7.04211235e-01] [-2.02155188e-01 5.57590246e-01 3.09926808e-01 ... 1.18567862e-01 -1.62969694e-01 2.77024835e-01]] [[ 2.03062683e-01 4.53341424e-01 -3.75568271e-02 ... 3.64117563e-01 1.50787309e-01 -2.08677277e-01] [ 3.83208185e-01 3.78164947e-01 -1.01214059e-01 ... 1.28661931e-01 7.81135485e-02 1.65387522e-02] [-8.35625827e-02 8.04976285e-01 -1.78699702e-01 ... 2.01091945e-01 8.95490125e-02 2.29003683e-01] ... [-3.11793596e-01 2.39649668e-01 6.12401187e-01 ... -7.36698285e-02 -5.74764311e-01 3.03599775e-01] [ 7.74747491e-01 -9.02207717e-02 3.21545452e-01 ... 3.21186721e-01 1.37203395e-01 1.00272223e-01] [ 4.68544304e-01 4.53327566e-01 7.46821836e-02 ... -8.61245543e-02 -4.01107550e-01 2.96206307e-03]]]] [[[[-8.36479515e-02 -7.18498707e-01 7.50412643e-01 ... 1.31584382e+00 -3.44377607e-02 -4.79983479e-01] [-5.10102332e-01 -5.34875691e-01 -3.16716373e-01 ... 8.18414271e-01 -4.44810778e-01 7.10259855e-01] [-4.16979402e-01 2.81882823e-01 -5.97640038e-01 ... 6.85848296e-01 7.77458921e-02 -1.25073814e+00] ... [ 1.20005690e-01 -1.18870117e-01 1.41584188e-01 ... -2.17254728e-01 1.84199840e-01 -1.30021536e+00] [ 1.02496624e+00 1.69843689e-01 1.31620741e+00 ... -6.80092335e-01 -9.76758957e-01 -4.54272419e-01] [ 3.09662223e-01 5.64104676e-01 -1.11489087e-01 ... -1.49752164e+00 -1.39322177e-01 3.21175605e-01]] [[-1.10363781e-01 3.61001074e-01 3.54158312e-01 ... 4.46321815e-01 -1.07987046e+00 -6.42294466e-01] [ 4.77520853e-01 -3.84714097e-01 6.84133396e-02 ... 1.56873502e-02 -5.96088767e-01 -3.01310867e-01] [-3.45205158e-01 -1.55624509e+00 -1.36965466e+00 ... 1.78349406e-01 -1.19367158e+00 -1.45092919e-01] ... [ 4.36181545e-01 -4.33648765e-01 -3.05699557e-01 ... -1.75794280e+00 -1.17565513e+00 4.93215650e-01] [-7.39787698e-01 -2.26339981e-01 -4.50680912e-01 ... -5.26562393e-01 5.21074295e-01 -5.89486361e-01] [ 9.40576613e-01 -3.02808285e-01 5.82779706e-01 ... -5.60745001e-01 1.43954587e+00 -4.22199011e-01]] [[-9.08637583e-01 -2.99702644e-01 -3.10693383e-01 ... -3.05917919e-01 1.30357170e+00 -2.37395734e-01] [-2.75395662e-02 -1.89249933e-01 -5.64758062e-01 ... -2.69256890e-01 3.64292949e-01 -7.23984182e-01] [-5.92620373e-02 1.08326294e-01 -4.63637024e-01 ... -2.76902318e-01 7.21107244e-01 3.07794005e-01] ... [ 4.13330972e-01 6.78122461e-01 -2.58725971e-01 ... -3.82560730e-01 -2.52303958e-01 1.19148660e+00] [-7.59293914e-01 -1.15790462e+00 -1.28575599e+00 ... -1.21970415e-01 -7.85894245e-02 1.53410271e-01] [ 8.62186193e-01 -6.73603833e-01 1.50778949e-01 ... -8.31869602e-01 6.77394748e-01 1.61034495e-01]] ... [[-3.26242775e-01 6.96299851e-01 -4.22524894e-03 ... 2.01558664e-01 -5.85274875e-01 2.42173538e-01] [-9.93214607e-01 -2.43439153e-01 -6.58784628e-01 ... 1.11933053e+00 -7.95096755e-01 1.69774383e-01] [-6.32080436e-01 4.34278876e-01 5.18082201e-01 ... 1.04428244e+00 -3.80777001e-01 4.34258014e-01] ... [-7.37335980e-02 4.20256823e-01 -3.47027600e-01 ... -2.40906775e-01 3.43569696e-01 4.02485311e-01] [-4.12019014e-01 2.30992436e-01 8.70752633e-02 ... 6.20571792e-01 -2.05084626e-02 5.41869581e-01] [ 6.60157979e-01 4.88147348e-01 3.56254727e-01 ... -1.76991507e-01 2.49025539e-01 -1.55185565e-01]] [[ 5.44930875e-01 -3.69363189e-01 -6.08330965e-01 ... -1.67465970e-01 -3.45294625e-01 -7.53229037e-02] [-7.26530671e-01 -1.16293120e+00 -7.19229206e-02 ... 6.82207048e-01 9.47596073e-01 -6.98361695e-01] [-8.47537577e-01 -2.04839960e-01 3.60571921e-01 ... 6.24711633e-01 8.76474321e-01 -3.91836502e-02] ... [ 5.45323119e-02 -9.69835758e-01 4.61738221e-02 ... -5.18561780e-01 -1.10368311e+00 5.28118350e-02] [-9.89951491e-02 -5.04159927e-01 -8.98483992e-01 ... -7.80127585e-01 -6.31880164e-01 7.41464674e-01] [-2.34704182e-01 -3.19181234e-01 -6.49698079e-02 ... -9.34744716e-01 2.44957395e-03 9.58346963e-01]] [[ 4.98944342e-01 1.41750604e-01 -3.30802351e-01 ... 2.19281122e-01 3.04033607e-02 -3.39884721e-02] [-4.77490693e-01 2.87795991e-01 9.77005482e-01 ... -1.53282118e+00 -3.06709230e-01 4.21623170e-01] [-6.07539594e-01 -1.75101891e-01 -1.40928805e+00 ... -3.59609097e-01 -2.22010955e-01 -7.68479228e-01] ... [-6.81348592e-02 2.66032547e-01 -1.36592165e-01 ... 6.68618917e-01 -6.74239457e-01 -2.35719159e-01] [ 7.04849899e-01 -4.90228273e-02 6.71388984e-01 ... 7.74337709e-01 -1.79983571e-01 9.62457061e-02] [ 3.51594865e-01 -6.95241988e-01 1.86731443e-01 ... 3.19774836e-01 2.36772954e-01 7.30525613e-01]]] [[[ 4.09641266e-01 2.48426467e-01 -2.25756913e-01 ... 2.81503588e-01 1.21618316e-01 -1.35453641e-01] [ 1.76398963e-01 3.20909858e-01 3.60763133e-01 ... -5.61674953e-01 -2.99712002e-01 -1.02638245e+00] [-3.25173438e-01 -6.11812234e-01 -2.37083748e-01 ... -7.50090241e-01 -2.62570322e-01 -5.38373113e-01] ... [ 3.79291266e-01 -5.20371675e-01 -1.71339333e-01 ... -1.84491977e-01 2.35620677e-01 -1.16889611e-01] [-1.28592715e-01 -2.05737606e-01 -2.73500025e-01 ... -3.85959923e-01 -3.38869765e-02 1.77273646e-01] [-1.04812562e+00 1.46945536e-01 -4.85977203e-01 ... -4.81030762e-01 4.00772631e-01 -6.77683413e-01]] [[-5.58915555e-01 -7.68336475e-01 -2.98744321e-01 ... 2.07318515e-01 -6.86306953e-01 -3.20211887e-01] [-9.87227708e-02 4.66560349e-02 -6.57933950e-01 ... 2.91517854e-01 -1.30002409e-01 -2.58516446e-02] [ 5.98588109e-01 -3.81686181e-01 -7.67035484e-02 ... -8.24631080e-02 -3.24500769e-01 7.17241913e-02] ... [-1.25070408e-01 -6.92470133e-01 7.29746163e-01 ... -7.99096823e-02 -5.98043084e-01 -2.91373432e-01] [-2.04443365e-01 4.42221761e-02 8.72675627e-02 ... -1.94561377e-01 8.28160644e-01 -1.99392557e-01] [ 3.76326501e-01 -6.38588369e-01 -6.60599291e-01 ... -2.16614246e-01 -6.25563085e-01 -2.02477813e-01]] [[ 1.38822034e-01 2.07657143e-01 -7.98908830e-01 ... -5.21512866e-01 1.11485809e-01 8.70482773e-02] [ 2.38959610e-01 5.79389706e-02 -4.76937234e-01 ... -5.54760359e-02 -5.19215986e-02 -6.10158741e-01] [-2.03659534e-01 -3.10024559e-01 2.50733137e-01 ... 7.29653835e-01 -4.17321026e-02 -4.16526884e-01] ... [ 2.29619369e-02 -6.19365752e-01 -1.61695525e-01 ... -4.26098108e-01 -2.31212556e-01 1.99445650e-01] [-3.78807485e-01 -2.23109797e-01 -6.94539994e-02 ... 1.35654017e-01 -2.15135783e-01 1.48595884e-01] [-5.99821150e-01 -1.28620461e-01 -2.37303406e-01 ... -2.03026056e-01 -7.41285980e-01 -4.45801169e-01]] ... [[ 7.65199959e-01 -7.08189130e-01 -3.86637986e-01 ... 4.27663296e-01 5.44345379e-01 -7.91532919e-02] [-8.54534030e-01 -6.66138709e-01 3.64172071e-01 ... -2.07425401e-01 4.13345218e-01 -5.29321373e-01] [ 3.96982245e-02 3.42366397e-01 4.48280461e-02 ... -6.19222760e-01 -5.17999947e-01 6.64502904e-02] ... [ 2.38097817e-01 2.83991724e-01 -4.44663107e-01 ... -5.66521436e-02 4.83821988e-01 -3.08789998e-01] [-1.17098078e-01 2.02584326e-01 1.49326742e-01 ... -2.07661435e-01 1.55937850e-01 -5.14160275e-01] [-4.65281773e-03 5.94445527e-01 -3.00701261e-01 ... -1.72667488e-01 -4.68698621e-01 -4.40204650e-01]] [[-2.76004523e-01 5.61006606e-01 -7.30048180e-01 ... -2.49151692e-01 -3.29275280e-01 -5.05884960e-02] [-5.32277703e-01 1.87286377e-01 -5.80668330e-01 ... 3.25181521e-02 1.71050772e-01 -7.14337220e-03] [-9.73160192e-02 3.02746534e-01 2.27539930e-02 ... -8.43230542e-03 1.93821907e-01 5.75747013e-01] ... [-6.00975454e-01 -6.29500687e-01 -1.30030528e-01 ... 4.87812847e-01 2.26710662e-01 1.93291705e-03] [-5.79527915e-02 -4.29785140e-02 -3.24359424e-02 ... -3.40198696e-01 -2.27221593e-01 -9.67909873e-01] [ 5.77910066e-01 1.31422803e-01 3.91284019e-01 ... -1.77527010e-01 -9.49608862e-01 -4.76269186e-01]] [[-1.76862657e-01 2.99619287e-01 -6.58137649e-02 ... -5.20473063e-01 5.78765392e-01 -4.61695910e-01] [-9.02671833e-03 -2.27242894e-02 3.78726386e-02 ... 2.16284841e-01 1.59898490e-01 -2.03221217e-01] [-7.32248425e-01 -5.90908229e-01 -2.74865180e-02 ... -3.50983590e-02 -6.21304572e-01 3.93868051e-02] ... [-3.03202897e-01 6.72070444e-01 -3.95796895e-01 ... -7.04826415e-01 -7.28340507e-01 2.23672211e-01] [-6.83882415e-01 -7.84288421e-02 -7.56396279e-02 ... 3.94018441e-02 6.61340594e-01 1.09583944e-01] [ 5.55282116e-01 -1.72396645e-01 1.81445986e-01 ... -2.62014776e-01 2.05075387e-02 -1.74804002e-01]]] [[[ 1.53045022e+00 1.77136660e+00 1.35893154e+00 ... 2.32765126e+00 1.57982576e+00 -1.29202485e-01] [ 4.86640453e-01 2.04171419e+00 7.19316423e-01 ... 7.13521481e-01 -5.41589737e-01 4.06501722e+00] [-8.16995740e-01 3.14318150e-01 1.42069113e+00 ... 9.87061203e-01 1.08988035e+00 2.83609986e+00] ... [ 1.70188284e+00 8.31539094e-01 6.58623099e-01 ... 1.48456383e+00 4.05740291e-01 3.28268111e-01] [ 8.58875751e-01 4.01153743e-01 1.60279739e+00 ... 3.53222877e-01 1.31490469e+00 1.79817355e+00] [ 1.22677517e+00 1.34956941e-01 1.76623392e+00 ... -6.19288504e-01 1.22672427e+00 2.13066912e+00]] [[ 1.70678449e+00 -5.66966951e-01 1.98992714e-01 ... 3.60097051e-01 1.98146486e+00 2.45180583e+00] [ 3.71371412e+00 1.30979836e+00 8.40700448e-01 ... 8.21702480e-01 -2.44976103e-01 2.50977039e-01] [ 2.85848677e-01 8.21716189e-01 1.65797031e+00 ... -7.35746026e-02 2.71349001e+00 1.17325616e+00] ... [ 3.60520869e-01 -2.67629594e-01 1.03135955e+00 ... 6.59119964e-01 -5.27193904e-01 1.16399586e+00] [ 3.27276373e+00 7.25204706e-01 1.49028015e+00 ... 1.09319770e+00 8.96138430e-01 1.96553445e+00] [ 1.74426806e+00 3.07455111e+00 1.04678488e+00 ... 8.35419357e-01 2.26269341e+00 -7.95731425e-01]] [[ 7.70680010e-01 2.35594487e+00 1.33450651e+00 ... 2.94733375e-01 9.72895324e-03 1.18918300e+00] [ 9.98146474e-01 1.41978240e+00 1.50896966e+00 ... -1.15110266e+00 3.26309085e+00 1.71534014e+00] [ 1.06815767e+00 5.27847528e-01 1.74497497e+00 ... 6.71000898e-01 1.32337093e+00 1.70903891e-01] ... [ 2.16100860e+00 6.47536337e-01 3.23965281e-01 ... 1.77518404e+00 1.40067983e+00 1.04511726e+00] [ 9.40867901e-01 2.28345609e+00 3.20767164e+00 ... 2.07752085e+00 6.82893515e-01 5.55786371e-01] [ 1.48849094e+00 3.17125225e+00 1.80114317e+00 ... 8.26929927e-01 -5.31854033e-02 3.87739062e+00]] ... [[ 2.58930057e-01 1.85661185e+00 -3.92976590e-02 ... 2.51539278e+00 -1.64290798e+00 1.09042990e+00] [-1.99182379e+00 -1.93985417e-01 2.14167571e+00 ... 1.08317077e+00 -9.30087209e-01 6.51358902e-01] [-6.90183580e-01 2.23506546e+00 2.05948997e+00 ... 2.01130748e+00 -5.31136692e-01 5.26482642e-01] ... [ 2.41167665e-01 -3.33362222e-01 2.20798016e+00 ... 4.35560882e-01 4.61514473e+00 2.01745582e+00] [ 1.14595973e+00 1.14790094e+00 1.48491597e+00 ... 1.95755213e-01 -4.14963424e-01 3.57615829e+00] [-1.03228974e+00 2.85614324e+00 1.07016432e+00 ... 3.62366974e-01 3.73807922e-02 2.20816064e+00]] [[ 9.98716533e-01 1.91979635e+00 1.02827907e+00 ... -1.06194794e+00 6.19909286e-01 2.30551624e+00] [ 3.25869739e-01 2.77428317e+00 1.77665210e+00 ... 4.51677173e-01 2.02703452e+00 1.16519523e+00] [ 7.69105256e-02 -4.61339243e-02 1.93610990e+00 ... 4.47048903e-01 9.44814265e-01 1.40535915e+00] ... [ 1.56602991e+00 1.11930227e+00 -1.30642891e+00 ... -2.99453229e-01 1.08256161e-01 1.54745924e+00] [-9.53999817e-01 5.20436764e-01 1.79971671e+00 ... 8.03709447e-01 1.76201212e+00 1.52739787e+00] [-8.47701252e-01 -4.48130220e-01 1.73820913e+00 ... -1.24857044e+00 1.09534609e+00 3.22117805e+00]] [[ 8.58062804e-01 6.05097353e-01 1.02105439e+00 ... 5.47335505e-01 1.58304667e+00 1.16237390e+00] [-1.64978355e-01 5.50842047e-01 2.10184860e+00 ... 1.02941239e+00 2.94104433e+00 1.21586335e+00] [ 1.08369482e+00 3.37534517e-01 8.02074254e-01 ... 3.38455701e+00 8.76943827e-01 8.43079627e-01] ... [ 3.01863098e+00 1.60564291e+00 1.34006572e+00 ... 2.44968414e+00 3.02241087e+00 1.13395679e+00] [ 1.51765287e+00 1.73966348e+00 -1.06243944e+00 ... -4.96780783e-01 4.94352460e-01 3.44407773e+00] [ 1.33399332e+00 2.07710576e+00 1.97012675e+00 ... 8.10286939e-01 1.56373262e+00 1.17790222e+00]]] [[[ 5.13429642e-01 1.39532006e+00 7.06295967e-01 ... -1.95163026e-01 2.64738250e+00 6.29136264e-01] [ 1.50012171e+00 1.12424684e+00 1.21088874e+00 ... 1.36162758e+00 1.53823578e+00 6.45989478e-01] [ 1.55378234e+00 1.02202225e+00 1.17264259e+00 ... 1.05539644e+00 1.89378572e+00 1.10993290e+00] ... [ 3.62406671e-01 2.07125974e+00 1.49007678e+00 ... 4.77646589e-01 1.82427537e+00 1.06086314e+00] [ 3.03682178e-01 2.25363779e+00 1.05357206e+00 ... 7.07755744e-01 1.03096116e+00 6.84140205e-01] [ 6.77554786e-01 9.35902894e-01 1.11489868e+00 ... 1.29843724e+00 1.27859557e+00 8.26779306e-01]] [[ 1.64872634e+00 1.55537736e+00 1.40727305e+00 ... -8.16930551e-03 1.41045523e+00 1.26557350e+00] [ 1.01964819e+00 9.31691468e-01 1.83058512e+00 ... 1.10543931e+00 1.00078559e+00 1.44506955e+00] [ 7.25174129e-01 1.73164356e+00 2.09261179e+00 ... 6.88084543e-01 8.93630981e-01 1.03905034e+00] ... [ 3.03673744e-01 1.77489209e+00 1.14232278e+00 ... 1.41472769e+00 9.18278337e-01 9.11916614e-01] [ 8.44357252e-01 1.01489389e+00 1.27702773e+00 ... 9.37179744e-01 1.93195641e+00 1.33513093e+00] [ 2.39524460e+00 7.46572018e-01 1.09841323e+00 ... 5.65786064e-01 9.95457828e-01 1.20679963e+00]] [[ 1.22175968e+00 8.62027645e-01 1.99027276e+00 ... 8.85897458e-01 4.80134100e-01 6.81957185e-01] [ 1.81148875e+00 7.85464048e-01 1.39260519e+00 ... 7.60027587e-01 1.38932693e+00 1.09769940e+00] [ 1.27701437e+00 2.55644888e-01 9.89826560e-01 ... 1.62573647e+00 2.07901073e+00 7.87326276e-01] ... [ 1.35773361e+00 1.40340328e+00 2.29794756e-02 ... 2.00263357e+00 1.31819832e+00 2.35142961e-01] [ 1.63589370e+00 4.41401392e-01 9.60894048e-01 ... 7.98822701e-01 1.17925525e+00 1.37231839e+00] [ 1.06294167e+00 1.21965551e+00 1.05097353e+00 ... -3.53323221e-02 1.07279289e+00 1.55372500e+00]] ... [[ 1.38162923e+00 1.03341520e+00 -6.43861201e-03 ... 6.05165780e-01 5.10663033e-01 9.92055118e-01] [ 4.01724845e-01 4.87103999e-01 1.20788586e+00 ... 2.17416596e+00 5.29655695e-01 1.92422283e+00] [ 7.94277012e-01 1.26533759e+00 1.48943377e+00 ... 7.41010487e-01 1.05400991e+00 1.73413861e+00] ... [ 1.28823948e+00 1.72948575e+00 1.41747817e-01 ... 1.13846159e+00 -3.53719413e-01 1.04871368e+00] [ 1.15188670e+00 1.25702178e+00 8.78551781e-01 ... 7.37676561e-01 1.72015846e+00 1.73800647e+00] [ 1.55510187e+00 9.52651083e-01 1.86048508e+00 ... 9.05982316e-01 1.41589320e+00 8.09353352e-01]] [[ 1.44405222e+00 7.26913571e-01 1.70842421e+00 ... 1.39885736e+00 9.73637819e-01 1.01726210e+00] [ 1.13267994e+00 1.43093109e+00 1.52780068e+00 ... 1.37738097e+00 6.88825727e-01 1.04569066e+00] [ 9.09404397e-01 1.32871091e+00 3.66992354e-01 ... 1.15593076e+00 2.13053393e+00 7.59073317e-01] ... [ 3.95136625e-01 1.66383183e+00 6.75849199e-01 ... 2.85749555e+00 6.37887239e-01 3.24633658e-01] [ 8.21137190e-01 7.81463861e-01 1.12743843e+00 ... 1.13789916e+00 2.07700801e+00 2.43050623e+00] [ 4.04665947e-01 8.34876060e-01 9.82224166e-01 ... 1.27765977e+00 1.48703367e-01 7.69747198e-01]] [[ 6.25477850e-01 1.74320233e+00 9.54834819e-01 ... 2.35666513e+00 9.45132136e-01 1.26631880e+00] [ 6.28902197e-01 1.07936180e+00 1.24010646e+00 ... 1.22371423e+00 1.69183433e+00 1.47874093e+00] [ 4.81698513e-01 1.78068745e+00 3.49141628e-01 ... 1.34728825e+00 1.54415941e+00 7.66031206e-01] ... [ 1.95347440e+00 5.31364918e-01 1.71479583e+00 ... 1.68934798e+00 1.36322510e+00 1.86909127e+00] [ 8.68440270e-01 6.03214681e-01 4.88905191e-01 ... 1.10959101e+00 9.18855786e-01 1.87194538e+00] [ 9.10920024e-01 1.38332558e+00 -2.95747221e-01 ... 1.61143029e+00 1.78004831e-01 3.75205427e-01]]] [[[ 1.04688871e+00 -9.24551904e-01 9.87162471e-01 ... 5.18241167e-01 -1.10153222e+00 9.44202021e-02] [ 8.15225244e-01 7.39046037e-01 1.21100342e+00 ... -7.60586560e-01 1.10648072e+00 2.52075553e-01] [-1.91065207e-01 1.55797791e+00 9.49855983e-01 ... 5.00583649e-01 1.37648451e+00 9.45588529e-01] ... [-3.05524245e-02 1.18719041e+00 1.02324069e+00 ... 2.00325057e-01 4.28077966e-01 3.34373466e-03] [ 4.95372713e-01 2.54933983e-01 2.86379606e-01 ... -2.68527120e-01 1.11520922e+00 7.78490841e-01] [-1.34973079e-01 -3.86664331e-01 9.85943139e-01 ... 1.70395184e+00 2.74150759e-01 2.00126067e-01]] [[ 7.29076028e-01 5.23362339e-01 2.48701930e-01 ... 5.94473183e-01 3.13098699e-01 3.07856947e-01] [ 5.84824204e-01 1.72348583e+00 5.83923310e-02 ... -1.15762985e+00 6.72047734e-01 1.46326709e+00] [ 1.07868135e-01 -7.22851813e-01 1.11945295e+00 ... 5.04652739e-01 9.29032981e-01 7.79692292e-01] ... [ 4.16256815e-01 9.16893542e-01 -1.02945006e+00 ... 1.23975515e+00 7.49920726e-01 6.30761206e-01] [ 2.71991730e-01 1.11440361e+00 3.79035592e-01 ... -6.46074712e-02 1.63322687e+00 9.21943411e-02] [ 6.79147661e-01 1.31301641e+00 1.47355461e+00 ... -1.04421578e-01 -4.01700824e-01 6.72869444e-01]] [[ 6.60711408e-01 9.03817296e-01 5.75563908e-01 ... 5.00793576e-01 3.89438719e-01 1.26519620e+00] [ 7.16602921e-01 9.50804412e-01 1.23297668e+00 ... 3.02473724e-01 7.02251792e-01 -9.72750008e-01] [ 1.08130252e+00 9.62584853e-01 -3.06299701e-02 ... 1.23477459e+00 6.26407325e-01 1.77440077e-01] ... [ 1.47017956e+00 -1.26206636e-01 6.30793214e-01 ... -2.69972146e-01 8.42363060e-01 9.70844150e-01] [ 7.52586305e-01 5.21317065e-01 -1.24230020e-01 ... 9.35027778e-01 6.29282236e-01 5.87441087e-01] [ 1.04086566e+00 1.10692406e+00 -7.81223178e-01 ... 9.03742731e-01 1.59854352e+00 3.65091354e-01]] ... [[-9.79545116e-02 -9.92609933e-02 1.13298011e+00 ... 1.00506723e+00 -1.76912665e-01 1.08396387e+00] [ 4.36767668e-01 3.87212396e-01 3.83073509e-01 ... 1.08578980e+00 1.35758566e-02 1.49200451e+00] [ 9.95742619e-01 9.21794891e-01 4.09945726e-01 ... 1.11772013e+00 1.25433433e+00 1.89801633e-01] ... [ 3.97539675e-01 1.10330319e+00 1.23019600e+00 ... 1.68141797e-01 -2.67199934e-01 1.58238971e+00] [ 9.69030976e-01 1.83787405e-01 1.05662274e+00 ... -3.34201843e-01 -1.02486320e-01 5.06503642e-01] [ 6.76533043e-01 9.73925233e-01 1.11137331e+00 ... 7.28479207e-01 -1.66795850e-01 1.94831669e-01]] [[-6.01430953e-01 9.18180227e-01 6.29069567e-01 ... 6.34438157e-01 3.17129612e-01 -7.04536080e-01] [-4.38683599e-01 4.98410106e-01 9.62716579e-01 ... 9.00183141e-01 -1.72210261e-01 -2.99913973e-01] [-5.04068255e-01 7.37442672e-01 9.98056471e-01 ... 7.51505494e-01 1.32216573e+00 1.04137465e-01] ... [ 1.92617178e+00 6.26318604e-02 1.02353132e+00 ... -7.54403472e-01 1.15804923e+00 1.08961189e+00] [-5.00730462e-02 1.15400100e+00 3.39395911e-01 ... 4.60626423e-01 -3.37371409e-01 5.89217603e-01] [ 6.35747373e-01 7.41348937e-02 -9.17563811e-02 ... 1.05625486e+00 5.83397336e-02 2.00041103e+00]] [[ 1.12196982e+00 9.88787115e-01 4.04698849e-02 ... -6.90320730e-01 -2.71225154e-01 -1.25263870e-01] [ 9.38619375e-01 7.10953057e-01 1.87539399e+00 ... -3.74500245e-01 -6.60476983e-02 -3.29355747e-01] [ 1.30085588e-01 1.55227697e+00 7.08384812e-02 ... 1.02661610e+00 9.43564177e-01 -8.25640321e-01] ... [ 4.21519697e-01 3.65035564e-01 -4.28349733e-01 ... 3.69749479e-02 8.49221468e-01 1.38363361e+00] [-1.33325803e+00 1.64540195e+00 -2.86298152e-02 ... 1.16779351e+00 1.26328540e+00 -8.17871094e-01] [ 4.59949821e-01 2.49818349e+00 2.14401484e-01 ... 7.23617077e-01 8.22383642e-01 1.30491769e+00]]] [[[ 7.80517906e-02 5.19621730e-01 2.94747710e-01 ... 1.89998880e-01 2.50056013e-02 8.30437601e-01] [ 4.37622547e-01 -1.27902210e-01 -4.88760173e-01 ... -1.35065213e-01 4.67585564e-01 2.65112221e-01] [ 4.06942666e-01 4.80961770e-01 -6.47303879e-01 ... 5.67334950e-01 4.20123398e-01 4.68185008e-01] ... [ 2.63116896e-01 9.70261917e-02 1.71759725e-01 ... 5.28057933e-01 -1.05568953e-02 1.23625374e+00] [ 6.61420584e-01 4.51791406e-01 6.13881908e-02 ... 5.28892994e-01 -2.56987303e-01 5.84780693e-01] [ 1.70371890e-01 -3.41318190e-01 7.88647950e-01 ... 6.31408870e-01 -7.74300285e-03 -9.21732839e-03]] [[ 2.44380713e-01 2.27631986e-01 -1.58191577e-01 ... 7.21043468e-01 6.94610528e-04 -1.68823935e-02] [-2.12250590e-01 -3.86642039e-01 3.22786808e-01 ... 3.32750082e-01 3.12854052e-02 5.19369423e-01] [ 2.76456803e-01 5.85568964e-01 5.73659897e-01 ... 2.91082770e-01 -1.82817012e-01 2.70818651e-01] ... [ 3.54317963e-01 2.79596418e-01 -4.45832349e-02 ... -3.53897214e-01 1.62165374e-01 3.46297741e-01] [ 1.19546391e-02 -1.55258432e-01 2.25044116e-01 ... 3.21995348e-01 5.51200390e-01 -2.54543107e-02] [ 3.37618470e-01 1.49947226e-01 -1.99855328e-01 ... 3.96250695e-01 3.47313643e-01 -1.95306093e-01]] [[ 1.58756208e-02 3.00464272e-01 5.62095232e-02 ... 3.80579263e-01 7.56971776e-01 7.64209256e-02] [ 1.67241588e-01 -1.72835309e-02 2.00330943e-01 ... 6.35597855e-02 4.98193175e-01 4.40149844e-01] [ 2.69486189e-01 -1.11377411e-01 3.20277922e-03 ... -4.40022675e-03 -2.03973994e-01 -2.30985418e-01] ... [-3.75322551e-01 3.45727742e-01 -1.01629600e-01 ... -3.15543823e-02 -4.05257702e-01 3.60839188e-01] [ 5.63048065e-01 2.17187166e-01 8.86624008e-02 ... 4.22178209e-01 -8.36741365e-03 7.01098800e-01] [ 5.13312399e-01 2.97180444e-01 7.60344416e-02 ... 1.93707809e-01 -3.54317188e-01 3.99624348e-01]] ... [[ 1.34312987e-01 3.68026793e-01 5.01372933e-01 ... 3.80975366e-01 4.22764838e-01 4.89565670e-01] [-4.81432498e-01 8.82840902e-02 -5.20122766e-01 ... 1.25953957e-01 -5.61943604e-03 4.56399828e-01] [ 5.58878899e-01 1.49272513e-02 3.34632784e-01 ... 7.31841102e-02 -4.47414443e-02 2.77294010e-01] ... [ 1.63925260e-01 1.09669410e-01 8.87577310e-02 ... 1.13362156e-01 3.51020128e-01 5.09801328e-01] [ 4.73929316e-01 6.28384352e-01 2.50831038e-01 ... -1.18243016e-01 1.03485152e-01 1.61134571e-01] [-3.50273401e-01 2.43477747e-01 1.85749680e-01 ... -4.34319228e-02 2.46626139e-01 4.31629747e-01]] [[ 7.46436656e-01 1.53327093e-01 3.24729919e-01 ... -1.45147562e-01 5.56395531e-01 7.77536482e-02] [ 1.88640416e-01 3.22566092e-01 3.87172908e-01 ... 9.03711081e-01 -1.02238208e-01 5.06163798e-02] [ 5.09351015e-01 6.82834610e-02 -1.68846417e-02 ... 1.51721343e-01 -4.94495273e-01 4.93225545e-01] ... [-1.86993293e-02 4.86679316e-01 -9.93946865e-02 ... 5.61630011e-01 2.16207784e-02 9.97616630e-03] [ 4.75521922e-01 1.41211480e-01 -2.28853285e-01 ... 7.49266967e-02 4.99551773e-01 2.49966756e-01] [ 3.58045965e-01 2.82704830e-01 1.72949970e-01 ... 6.49019361e-01 -2.79070716e-02 1.45186484e-01]] [[ 2.53224969e-01 1.11956298e-01 3.97960454e-01 ... 6.07653856e-01 -2.13227972e-01 -5.50732970e-01] [ 4.96567637e-01 6.17559433e-01 4.39217895e-01 ... -1.33558631e-01 3.80221337e-01 3.31746668e-01] [-6.32837266e-02 1.02754012e-01 7.59333670e-01 ... 5.56423545e-01 3.84346634e-01 -6.21526361e-01] ... [-2.94434279e-01 2.58764535e-01 3.07434052e-01 ... -1.68329522e-01 2.07815915e-01 4.75967944e-01] [ 2.06194311e-01 4.06662852e-01 6.90656543e-01 ... 3.92113566e-01 -1.14525810e-01 -2.29820535e-01] [-4.08542633e-01 -3.95206362e-02 -1.36325300e-01 ... -2.22754613e-01 1.54572561e-01 5.39726198e-01]]]] ... [[[[-7.65460849e-01 -2.15457127e-04 -1.09750867e-01 ... -7.87063003e-01 -3.37046623e-01 -6.56313241e-01] [ 3.72366101e-01 4.09975111e-01 -1.05888951e+00 ... -6.75718427e-01 -3.24151397e-01 -7.68467069e-01] [-5.08600473e-01 -1.18038201e+00 4.50535975e-02 ... 3.53970975e-01 -4.09218222e-01 -5.20576596e-01] ... [-2.11592898e-01 5.00985980e-01 5.69284797e-01 ... 2.88722098e-01 -4.25640762e-01 -1.53581470e-01] [ 3.19686741e-01 4.54740226e-02 1.66694835e-01 ... 1.88335061e-01 -5.74090471e-03 -4.11069952e-02] [-8.59853029e-02 -4.65297461e-01 -1.15804195e+00 ... 1.48855612e-01 -2.17276573e-01 -1.39238149e-01]] [[-8.16575289e-01 -3.29191536e-01 9.29175436e-01 ... -4.89308178e-01 -4.70433116e-01 -1.71645135e-01] [-9.98489410e-02 2.27987260e-01 4.40916777e-01 ... -6.16504788e-01 -7.19874799e-01 -1.25117922e+00] [-5.83523214e-01 -2.82480538e-01 8.13729048e-01 ... -7.45174944e-01 5.86972162e-02 -5.44554889e-01] ... [ 4.15265918e-01 -6.08276486e-01 -1.05393028e+00 ... -1.81705141e+00 -5.58142185e-01 -4.59340751e-01] [-3.32717657e-01 8.58039856e-01 -6.31447613e-01 ... -2.61536360e-01 -8.29007864e-01 -8.14814210e-01] [-3.41464616e-02 1.74669355e-01 6.05547965e-01 ... -2.64584571e-01 -1.32149443e-01 4.48638231e-01]] [[-6.70143068e-01 -3.46528500e-01 2.80744761e-01 ... 7.29236782e-01 -4.19344783e-01 5.63490689e-01] [-2.15513781e-01 4.33811873e-01 -8.35036457e-01 ... -7.20301092e-01 -2.91732103e-01 -7.64873624e-01] [-1.05008972e+00 -6.89737558e-01 4.39144552e-01 ... 4.13519681e-01 2.40364730e-01 8.07456896e-02] ... [-4.16030943e-01 -1.71615847e-03 5.67495286e-01 ... -1.14872627e-01 5.15764117e-01 -7.14682996e-01] [-8.11327040e-01 -3.50798339e-01 -1.14978023e-01 ... -9.71209109e-01 3.48606855e-01 9.31449115e-01] [-1.67104438e-01 -4.94143814e-01 -1.88833177e-01 ... -2.18806013e-01 -8.16898882e-01 -5.34840599e-02]] ... [[ 9.04251277e-01 -6.50418937e-01 1.11955218e-01 ... 2.66251564e-01 2.61244625e-01 -1.04549170e+00] [ 3.22712302e-01 -1.05119443e+00 -5.22948086e-01 ... -5.29255606e-02 -6.08058453e-01 -4.21539158e-01] [ 3.82705599e-01 -2.04081282e-01 -5.12777641e-03 ... -3.17942649e-01 5.08950174e-01 -1.98945001e-01] ... [ 6.31793261e-01 -3.34994681e-03 -7.99546912e-02 ... 5.87530613e-01 -8.11092675e-01 1.70052037e-01] [-8.01501989e-01 -1.73691288e-01 3.35262835e-01 ... 6.32330239e-01 3.02377343e-01 -4.79533195e-01] [-4.28861111e-01 -8.29370439e-01 -3.35920304e-01 ... -1.06852984e+00 1.70883402e-01 1.25055805e-01]] [[-1.16778314e+00 -7.89203644e-01 -2.85526127e-01 ... 2.56079257e-01 -1.29623663e+00 6.95522904e-01] [ 5.33167005e-01 7.44464993e-02 -1.72116935e-01 ... -3.29746485e-01 6.52906299e-01 -1.31300163e+00] [-9.15047228e-02 -2.65900791e-01 -3.74172717e-01 ... 3.28262508e-01 8.08828056e-01 -8.46972615e-02] ... [ 4.51260477e-01 -4.98554468e-01 -3.73649716e-01 ... 5.34234583e-01 -4.72269624e-01 3.24137248e-02] [-1.64701864e-01 -1.57022998e-01 -4.57466364e-01 ... -9.17259812e-01 -8.60621691e-01 -9.82183814e-01] [-2.84849763e-01 -3.57564807e-01 -4.71020222e-01 ... 1.05567348e+00 5.90526700e-01 4.68559004e-02]] [[-1.05022490e+00 8.58363211e-01 -7.79038548e-01 ... -2.25106463e-01 -4.48426977e-02 -8.02898526e-01] [ 3.80058855e-01 3.44761908e-01 -2.60655463e-01 ... -1.66171515e+00 1.50615966e+00 -1.57090098e-01] [-2.23091200e-01 -2.87602007e-01 -5.22168040e-01 ... -2.03654408e-01 -8.52508605e-01 8.66634905e-01] ... [-5.31103969e-01 -1.68064997e-01 -8.37954655e-02 ... -2.82770962e-01 3.21298897e-01 -2.56557763e-01] [-5.79176731e-02 -7.49357820e-01 3.64072829e-01 ... -1.01108086e+00 -8.36159810e-02 -1.56109309e+00] [-2.09432542e-01 1.03457642e+00 4.69049476e-02 ... -6.03000224e-01 8.37256908e-01 -6.63485587e-01]]] [[[-8.35174799e-01 -9.24520016e-01 -5.12936890e-01 ... 1.66729689e-01 4.88760054e-01 -5.63919961e-01] [ 2.05112427e-01 2.70733654e-01 -4.63510901e-01 ... -7.97521651e-01 2.99780250e-01 -3.80003810e-01] [-2.56060541e-01 -6.02294326e-01 -4.53224093e-01 ... -8.91572177e-01 -1.22618996e-01 -6.74324870e-01] ... [-5.88720143e-01 4.57381576e-01 4.79091555e-01 ... 5.12492284e-02 -1.84354976e-01 -7.12965578e-02] [ 1.38287442e-02 -7.17327297e-01 -4.11192179e-01 ... 2.17509642e-02 4.09068950e-02 -6.99816525e-01] [-8.78169894e-01 -1.31045252e-01 -6.17753947e-03 ... -6.76681638e-01 3.97748291e-01 6.19879007e-01]] [[-2.85733730e-01 -1.86431315e-02 -2.39829555e-01 ... -1.68215171e-01 -4.27479863e-01 -8.66946518e-01] [-4.66670364e-01 -2.84593910e-01 3.41089785e-01 ... 5.36497474e-01 -5.13968945e-01 5.21920919e-01] [ 1.00392684e-01 -1.64306685e-01 -4.25222188e-01 ... 2.55528867e-01 -6.39264226e-01 -5.38749337e-01] ... [-3.07451814e-01 -5.39688803e-02 -5.80529392e-01 ... -1.28463268e-01 -2.04124287e-01 -6.72342122e-01] [-2.32436299e-01 -2.04058364e-01 6.99003339e-02 ... -1.18280363e+00 2.92338938e-01 -1.35129169e-01] [-5.94025731e-01 -1.53254911e-01 3.12695324e-01 ... -4.66628134e-01 -6.99690342e-01 3.64015907e-01]] [[-6.81864440e-01 -2.33646408e-01 4.40078974e-01 ... -1.71483234e-01 4.83599782e-01 -1.12552434e-01] [-3.79092216e-01 -6.77405417e-01 -4.71766740e-01 ... -7.87432119e-02 -1.53429369e-02 -1.13971448e+00] [ 1.01518214e-01 -7.56191760e-02 1.88340738e-01 ... -2.62838274e-01 -2.47901872e-01 -2.30529562e-01] ... [-1.21293403e-01 -3.06033909e-01 1.32043555e-01 ... -2.19017923e-01 -7.10254729e-01 -2.36231953e-01] [-6.16058826e-01 -8.49216163e-01 -3.35545182e-01 ... 6.18147314e-01 -1.65524915e-01 -3.07310466e-02] [-1.64773673e-01 -1.41335264e-01 -2.19678745e-01 ... 5.22134423e-01 -5.20460725e-01 -3.43404442e-01]] ... [[-5.53650677e-01 -3.86153340e-01 2.55306184e-01 ... -3.24527591e-01 1.41768128e-01 -8.68673027e-02] [-4.75041062e-01 -8.92012954e-01 1.82625532e-01 ... -8.29786122e-01 2.03377470e-01 5.71713626e-01] [-5.82281314e-02 1.82063758e-01 -4.15561974e-01 ... 3.72522265e-01 -5.57976961e-01 3.24022710e-01] ... [-1.12233318e-01 -2.45250210e-01 2.11092427e-01 ... -2.82915115e-01 -1.10500708e-01 -2.86366612e-01] [-2.09589116e-02 2.36516207e-01 1.89003438e-01 ... -1.45076215e-01 1.58908647e-02 -2.83455938e-01] [-1.44475415e-01 2.84690201e-01 3.49973500e-01 ... -8.33807215e-02 1.39531540e-02 -2.27476418e-01]] [[-4.05932993e-01 -2.34020069e-01 1.98904961e-01 ... -2.76840925e-01 7.81366751e-02 1.97967097e-01] [-4.70152408e-01 -3.77809763e-01 7.58721411e-01 ... -5.38936257e-01 -2.97394931e-01 -1.25191703e-01] [-1.25269517e-01 -6.78719103e-01 -2.17352584e-01 ... -1.02412844e+00 -5.99044025e-01 3.80996317e-02] ... [-2.21759364e-01 -6.87038779e-01 8.49438384e-02 ... -6.46402121e-01 -1.37985691e-01 -3.08381289e-01] [ 8.29340424e-03 -4.20528084e-01 4.21664178e-01 ... 3.02678943e-01 2.86032349e-01 -3.67041469e-01] [-7.57536709e-01 -1.47919908e-01 -1.06739700e-01 ... 2.20326126e-01 -1.03254831e+00 -3.44037294e-01]] [[-8.86045396e-01 -1.25559881e-01 -5.12439847e-01 ... -3.66087854e-02 2.41817072e-01 -4.38283950e-01] [-5.15997857e-02 -2.18809158e-01 -2.84935594e-01 ... 4.32301372e-01 -4.08594131e-01 -4.18494642e-01] [-6.13341153e-01 -5.26026666e-01 1.64395407e-01 ... -1.18761230e+00 -7.70034492e-02 -5.01215398e-01] ... [-6.33667707e-01 -1.02954304e+00 -1.08941221e+00 ... -6.73250198e-01 6.86993539e-01 -6.51265442e-01] [-5.59078038e-01 -2.20816389e-01 3.19315074e-03 ... 4.56782877e-01 -1.14411883e-01 8.88397824e-03] [-3.91702503e-01 8.60986650e-01 -9.44307089e-01 ... 4.62865233e-02 -7.48111963e-01 -6.76806688e-01]]] [[[ 1.31135833e+00 2.65823460e+00 -2.50621408e-01 ... 1.77991748e+00 3.06818295e+00 -2.21934393e-02] [ 6.44766390e-01 1.21417260e+00 3.54779506e+00 ... 2.09190297e+00 -1.69410557e-01 -2.41158783e-01] [-8.52596760e-01 2.73119479e-01 2.08429098e+00 ... 1.67794847e+00 1.44577599e+00 -1.71367019e-01] ... [ 3.68827438e+00 1.35610133e-01 9.74317372e-01 ... 2.17965052e-01 3.09561276e+00 1.55192506e+00] [ 2.30773020e+00 8.37206781e-01 4.75516737e-01 ... 5.84040701e-01 2.39156032e+00 -5.48981428e-01] [ 8.51593971e-01 2.31171441e+00 1.38857400e+00 ... -1.33064020e+00 2.38127136e+00 1.29529703e+00]] [[ 1.25878537e+00 1.91857064e+00 7.16514051e-01 ... 2.17956471e+00 -1.12070754e-01 1.24214375e+00] [ 3.84782016e-01 2.80668163e+00 1.29412496e+00 ... 3.37936497e+00 1.00922179e+00 8.50820124e-01] [ 1.56607449e+00 -9.12095308e-01 1.71684861e+00 ... -6.63568139e-01 1.32172012e+00 2.92912436e+00] ... [ 1.24223197e+00 6.16364241e-01 2.56019735e+00 ... -1.15429807e+00 -5.35685241e-01 2.61491752e+00] [ 2.56988257e-01 -6.58896863e-01 -1.12061048e+00 ... 2.48253679e+00 9.90531594e-02 2.63136911e+00] [ 8.21631908e-01 -1.94516718e-01 2.05848885e+00 ... 5.61824799e-01 -7.77772069e-01 1.33352113e+00]] [[ 3.10713983e+00 8.30244362e-01 1.50088251e+00 ... -2.85670012e-01 -4.46484715e-01 7.88688362e-01] [ 8.22373152e-01 2.62153268e+00 1.20228958e+00 ... 2.37104726e+00 9.42753732e-01 2.31998086e+00] [ 1.91255593e+00 2.88291740e+00 1.47177219e+00 ... 8.31230283e-01 1.12038696e+00 1.26237237e+00] ... [ 8.88895035e-01 3.11653733e-01 1.55829620e+00 ... 8.04215610e-01 1.33169127e+00 5.64712405e-01] [ 7.28236914e-01 -4.46815342e-01 4.00659561e-01 ... -1.25653818e-01 3.09500635e-01 2.43737555e+00] [ 1.29202652e+00 7.39344180e-01 2.06216097e+00 ... 1.32718956e+00 -5.13811111e-01 1.27821016e+00]] ... [[ 1.41514003e-01 9.22869146e-02 4.06042427e-01 ... 1.50671208e+00 6.45665526e-01 -8.89314115e-02] [ 9.32078481e-01 2.71299386e+00 2.29232192e+00 ... 1.19896126e+00 1.49229705e+00 1.59450185e+00] [ 3.06402230e+00 2.34737945e+00 1.87316418e+00 ... 2.73910046e+00 1.71857691e+00 2.49462390e+00] ... [ 3.91912889e+00 -1.06176743e-02 1.53900242e+00 ... 2.70940375e+00 1.26497543e+00 1.19679260e+00] [ 1.59769559e+00 1.55387759e+00 1.72006273e+00 ... -3.38591099e-01 1.62100887e+00 7.31755614e-01] [ 1.69602060e+00 2.45075965e+00 1.22699082e+00 ... 5.15205003e-02 1.01082671e+00 7.27711439e-01]] [[ 1.69378710e+00 1.03574109e+00 6.12403452e-01 ... 1.68230021e+00 2.30751061e+00 3.17643225e-01] [ 6.95512652e-01 2.56699204e-01 2.53609133e+00 ... 6.64840877e-01 1.71484971e+00 9.89475012e-01] [ 1.61361372e+00 -3.72298777e-01 1.75595248e+00 ... 1.87467229e+00 3.67872190e+00 1.31349957e+00] ... [-4.87730771e-01 -1.06721842e+00 1.83409584e+00 ... 1.02393591e+00 2.29565072e+00 1.92718375e+00] [ 2.31250858e+00 1.82403433e+00 -6.68781936e-01 ... 2.69365859e+00 1.61692321e+00 2.57648611e+00] [ 2.21144819e+00 1.42941988e+00 3.46507639e-01 ... 4.86257821e-01 7.09648132e-01 1.56298792e+00]] [[ 1.47709274e+00 2.11781597e+00 3.17609692e+00 ... 1.49998677e+00 1.01546812e+00 -1.71340570e-01] [ 1.74118531e+00 1.20940459e+00 1.64612556e+00 ... -4.12156552e-01 -1.03622437e+00 1.81540692e+00] [-4.96622264e-01 2.44769454e-01 2.37502217e+00 ... 3.99935663e-01 3.48454189e+00 1.18040156e+00] ... [ 1.31007934e+00 1.91936243e+00 1.27146471e+00 ... -1.70896247e-01 8.93642426e-01 1.54207900e-01] [ 1.21647727e+00 1.29643130e+00 1.52660644e+00 ... 1.01931727e+00 5.14993489e-01 9.61993754e-01] [-2.65354812e-01 -5.54964125e-01 8.99109542e-01 ... -5.19759893e-01 1.64648330e+00 9.85885918e-01]]] [[[ 1.35236681e+00 1.71090245e+00 7.95765698e-01 ... 1.73040390e+00 2.21582699e+00 6.56006336e-01] [ 2.15056300e+00 1.29722977e+00 1.48815715e+00 ... 7.58028626e-01 9.84258533e-01 1.06132185e+00] [ 1.71488798e+00 9.87473428e-01 1.13434601e+00 ... 5.29228628e-01 1.63449097e+00 1.87534094e+00] ... [ 8.34652722e-01 1.17873704e+00 1.06163013e+00 ... 1.72758722e+00 1.79549849e+00 1.71202886e+00] [ 4.03876841e-01 1.09066451e+00 1.20264304e+00 ... 1.42626536e+00 1.62563717e+00 9.34078157e-01] [ 1.65864873e+00 5.26338518e-01 1.78178573e+00 ... 1.17632163e+00 8.38482976e-01 4.67353195e-01]] [[ 5.64670205e-01 7.59371579e-01 2.16359615e+00 ... 1.69954574e+00 1.53638768e+00 6.48493886e-01] [ 1.18759477e+00 1.10965288e+00 1.46086431e+00 ... 8.56404185e-01 7.53417492e-01 2.13617444e+00] [ 1.69365931e+00 1.06920445e+00 1.26092160e+00 ... 1.49740434e+00 3.76060158e-01 1.13231528e+00] ... [ 2.50472093e+00 9.61070657e-01 1.02682686e+00 ... 3.97007048e-01 1.97369719e+00 2.15783215e+00] [ 7.50291646e-01 8.94450068e-01 1.13855875e+00 ... 9.34158683e-01 9.36940134e-01 7.13539064e-01] [ 6.76312566e-01 3.95746946e-01 1.94377804e+00 ... 5.14651597e-01 1.47373736e+00 1.74714100e+00]] [[ 9.31096911e-01 1.87022376e+00 1.48527539e+00 ... 1.18602860e+00 7.98073828e-01 1.16779423e+00] [ 1.68398261e+00 1.70585394e+00 1.30756545e+00 ... 7.26631999e-01 1.08380544e+00 1.06051815e+00] [ 1.56052279e+00 1.10918987e+00 1.00186074e+00 ... 7.44085073e-01 5.11227489e-01 1.74005985e+00] ... [ 5.50189734e-01 1.40608597e+00 1.39391291e+00 ... 1.81729209e+00 6.17263675e-01 1.53219557e+00] [ 1.42127216e+00 7.58812428e-01 1.55632889e+00 ... 7.85152435e-01 1.11365698e-01 7.14940548e-01] [ 3.10619712e-01 1.22099578e+00 9.94821787e-01 ... 3.51003557e-01 1.63747919e+00 8.85211527e-01]] ... [[ 9.01964784e-01 1.25341141e+00 1.01829183e+00 ... 4.21111047e-01 1.35189712e+00 8.23662937e-01] [ 1.06998587e+00 8.51613402e-01 1.53401637e+00 ... 1.66981697e+00 1.27982748e+00 1.20467794e+00] [ 2.71652013e-01 1.10744631e+00 8.74555707e-01 ... 1.44753003e+00 1.05342484e+00 1.69226122e+00] ... [ 1.40547335e+00 6.39110148e-01 2.29039952e-01 ... 9.34541166e-01 1.69720101e+00 1.13998568e+00] [ 1.68065727e+00 1.59695709e+00 1.78639424e+00 ... 6.90783024e-01 1.01632631e+00 7.99449742e-01] [ 1.17846489e+00 1.75504911e+00 1.59015417e+00 ... 1.30034482e+00 1.15233958e+00 6.68082416e-01]] [[ 3.93426150e-01 1.53718472e+00 1.97838032e+00 ... 8.91383111e-01 1.90095472e+00 1.32054257e+00] [ 6.60707176e-01 6.21531665e-01 5.08907259e-01 ... 9.22208488e-01 1.53563869e+00 1.31049323e+00] [ 1.13671350e+00 1.84339523e+00 1.51188540e+00 ... 1.11569118e+00 1.43280399e+00 1.68275988e+00] ... [ 1.60476542e+00 1.15929866e+00 2.11808634e+00 ... 1.72040057e+00 1.09925425e+00 1.83879817e+00] [ 1.66019368e+00 7.43053377e-01 1.49158370e+00 ... 1.42328310e+00 1.23369193e+00 9.28143144e-01] [ 8.32915425e-01 1.47383881e+00 5.83845377e-01 ... 7.00520754e-01 1.60053229e+00 1.96889961e+00]] [[ 1.18795645e+00 1.23063076e+00 8.90647113e-01 ... 1.80684507e+00 9.76601899e-01 1.27923763e+00] [ 8.59772861e-01 2.54371905e+00 1.64892292e+00 ... 9.38630760e-01 1.01740444e+00 1.18831539e+00] [ 2.54926831e-01 2.57954866e-01 3.61207217e-01 ... 1.06890142e+00 8.38333368e-01 1.39805281e+00] ... [ 6.04775667e-01 9.84433532e-01 1.18834376e+00 ... 1.80403876e+00 9.64126885e-01 6.92721128e-01] [ 1.16306269e+00 7.31701493e-01 1.51410663e+00 ... 7.72041976e-01 1.87519574e+00 4.54821676e-01] [ 1.39867949e+00 1.42243242e+00 9.63386595e-01 ... 1.29322755e+00 1.13025010e+00 6.94562674e-01]]] [[[ 1.27073094e-01 -6.96043968e-02 8.62389743e-01 ... -4.53762442e-01 9.39011812e-01 5.53393841e-01] [ 7.80079246e-01 -5.25179505e-01 3.00663114e-01 ... 8.24378654e-02 3.94544512e-01 1.37986267e+00] [-6.10908866e-01 2.38207668e-01 5.24240434e-01 ... 1.09242880e+00 1.38725400e+00 -4.78266239e-01] ... [ 9.51163352e-01 4.41914439e-01 1.28900242e+00 ... -7.11122632e-01 1.82440841e+00 9.71909702e-01] [ 8.68212283e-01 1.25639129e+00 4.12567973e-01 ... 9.78061378e-01 1.61760938e+00 1.03080623e-01] [ 4.49268892e-02 -9.12619755e-02 3.80202681e-01 ... 3.09982181e-01 2.34165549e-01 5.00541151e-01]] [[ 1.59466654e-01 -3.72159421e-01 4.40702587e-01 ... 9.20427799e-01 2.35311866e+00 3.14598143e-01] [ 8.97852242e-01 1.08747959e-01 1.37310278e+00 ... 9.27086115e-01 3.46042216e-02 9.71899450e-01] [ 1.46206588e-01 1.81107879e-01 -1.33243859e+00 ... 1.44115555e+00 -4.65069294e-01 9.46499288e-01] ... [ 2.89745539e-01 2.97336549e-01 -1.27287567e-01 ... 9.02064860e-01 -8.94735694e-01 7.04147279e-01] [ 6.52701557e-01 2.62610972e-01 5.81279337e-01 ... 6.04050159e-01 1.91603124e-01 8.48708302e-02] [-8.86321664e-01 -1.28124505e-01 1.11018205e+00 ... -3.18650752e-01 1.00721979e+00 7.57392585e-01]] [[ 6.39974415e-01 2.23786521e+00 -1.83565959e-01 ... 6.26570523e-01 -6.17459416e-01 2.78800875e-01] [ 5.96018314e-01 8.29437315e-01 -3.00907165e-01 ... 8.48902106e-01 2.35087708e-01 -6.21355593e-01] [ 1.10808206e+00 1.46520302e-01 1.37089157e+00 ... 5.54517686e-01 1.13610673e+00 8.78588557e-01] ... [ 7.00364172e-01 1.47761631e+00 1.01417446e+00 ... 5.39268613e-01 6.94356620e-01 9.10641402e-02] [-7.00659871e-01 7.91004658e-01 7.65712261e-02 ... 2.54310310e-01 -5.49731314e-01 -6.77730083e-01] [ 1.12242460e-01 1.30618763e+00 4.92551595e-01 ... 3.83950472e-01 -1.44162979e-02 3.67677897e-01]] ... [[ 1.07393003e+00 1.25889969e+00 5.49407542e-01 ... -1.06716134e-01 -1.12728588e-01 -4.03509997e-02] [-3.69739771e-01 -1.00172627e+00 1.49027735e-01 ... -3.16544026e-01 -1.54704556e-01 2.44559005e-01] [-2.23034680e-01 2.23959804e-01 1.39604616e+00 ... -3.45255077e-01 3.70438516e-01 4.15403068e-01] ... [ 3.81928086e-01 3.45489621e-01 3.88297796e-01 ... -4.56058569e-02 1.01548052e+00 -1.83125094e-01] [ 5.51171042e-02 1.40092158e+00 2.59557694e-01 ... 9.20142651e-01 -2.96138152e-02 3.61884028e-01] [ 9.57734168e-01 2.08096039e-02 1.10024166e+00 ... 9.48374927e-01 6.17100000e-01 1.25739992e+00]] [[ 1.24457371e+00 -6.09520316e-01 1.84371531e+00 ... 2.99099892e-01 6.23967946e-01 1.16089630e+00] [ 1.64928555e+00 3.29921842e-01 -3.23050618e-01 ... 3.86582166e-01 5.64755082e-01 -5.06223142e-01] [ 7.95897365e-01 1.20588386e+00 2.14429212e+00 ... 6.95327103e-01 4.04719234e-01 1.46250868e+00] ... [ 2.88178831e-01 6.20879412e-01 3.01327467e-01 ... -4.29959930e-02 -5.64284086e-01 7.88906515e-01] [ 1.37809336e-01 1.07629466e+00 7.64034510e-01 ... 1.63527176e-01 1.88617840e-01 5.78720808e-01] [ 4.11178917e-01 1.21677592e-01 1.33603007e-01 ... 9.10881817e-01 7.09834695e-02 6.45831108e-01]] [[-9.72179651e-01 3.54346633e-01 -1.12969935e+00 ... 4.17887300e-01 4.18173492e-01 1.37997776e-01] [-4.83375609e-01 -5.28754033e-02 -1.68301746e-01 ... 1.10067725e+00 5.02831452e-02 -9.92929116e-02] [ 9.08659756e-01 -5.43039560e-01 -2.75875598e-01 ... 2.28505433e-01 1.90864980e-01 1.73935741e-01] ... [ 4.77009326e-01 5.34401894e-01 5.92019260e-01 ... 1.78386223e+00 8.59436274e-01 1.40808892e+00] [ 1.10081315e+00 -5.42573035e-01 1.37193656e+00 ... 3.13831061e-01 6.43276513e-01 3.56653839e-01] [ 1.77232072e-01 1.44559979e+00 4.19485062e-01 ... 1.59986460e+00 1.36667514e+00 1.02328885e+00]]] [[[ 4.62120324e-01 6.59819245e-01 1.43677339e-01 ... 3.15135241e-01 -7.70725086e-02 3.88782799e-01] [ 5.25126815e-01 1.96307272e-01 4.90510166e-01 ... -3.38748157e-01 5.92642844e-01 -3.56501669e-01] [ 6.66838944e-01 1.81997806e-01 5.33611655e-01 ... -3.58732700e-01 -5.32997921e-02 5.00759423e-01] ... [ 4.82869506e-01 9.20978129e-01 2.11727664e-01 ... -5.01078010e-01 7.89270759e-01 6.05671406e-01] [ 6.58619180e-02 3.00730318e-01 1.29790142e-01 ... -2.68147022e-01 9.27909557e-03 8.81767392e-01] [ 4.39113408e-01 -6.97626546e-02 -5.41396551e-02 ... 5.69620073e-01 6.48674428e-01 5.52332997e-01]] [[ 2.98074841e-01 5.71326576e-02 -2.63428539e-01 ... -1.39778331e-01 9.68027771e-01 3.22420150e-01] [ 2.02005938e-01 -4.35453095e-02 8.88253376e-02 ... -1.61654800e-02 8.98045421e-01 3.35728765e-01] [ 6.37409270e-01 6.23085618e-01 7.91805267e-01 ... 6.13863766e-01 3.30126256e-01 5.32542944e-01] ... [ 2.82253116e-01 5.26865661e-01 -1.57941714e-01 ... -3.79139572e-01 -6.54323772e-02 2.31169209e-01] [ 2.49916896e-01 4.29874808e-01 5.23549840e-02 ... 3.92308384e-01 2.20217168e-01 6.62259877e-01] [ 3.32959533e-01 1.02798074e-01 -5.65208122e-02 ... 3.54961753e-01 2.60369837e-01 -3.83215725e-01]] [[ 3.14141005e-01 3.53877097e-02 -1.77277550e-01 ... 5.44309616e-01 3.79132450e-01 -6.80211606e-03] [ 5.09459555e-01 4.72137839e-01 4.32044119e-01 ... 5.70621043e-02 -5.08396439e-02 1.22190185e-01] [ 3.70177001e-01 -2.44614422e-01 9.77157131e-02 ... 3.34184766e-02 2.28072122e-01 8.16330612e-01] ... [ 6.04906917e-01 9.43060696e-01 4.98532325e-01 ... 1.22302406e-01 -7.93477669e-02 -1.03800923e-01] [ 2.46629715e-01 4.76179898e-01 -8.09628367e-02 ... 1.29097447e-01 -4.90582794e-01 2.29836643e-01] [-2.42997259e-01 6.54513419e-01 -4.26425099e-01 ... -4.57495674e-02 2.64733791e-01 -4.19713140e-01]] ... [[ 4.56573546e-01 3.56051415e-01 1.68471143e-01 ... 4.83988792e-01 -3.18103343e-01 3.92730534e-01] [ 5.38497269e-01 4.27071303e-01 2.42938533e-01 ... 6.83548093e-01 7.80084059e-02 -1.92820877e-01] [ 3.96704465e-01 7.90156499e-02 2.38792062e-01 ... 3.64707440e-01 3.52889150e-01 -9.18238610e-03] ... [ 1.03418127e-01 3.09858203e-01 -1.13362402e-01 ... 7.97905028e-02 3.03420544e-01 -2.32441928e-02] [ 3.99335474e-01 -1.87294990e-01 7.88513348e-02 ... 6.35414720e-02 5.78406155e-01 -2.83252597e-01] [ 9.00969088e-01 2.91501224e-01 4.26941633e-01 ... 3.02148849e-01 6.46481335e-01 5.92049539e-01]] [[-1.31291211e-01 4.73787546e-01 -3.75845492e-01 ... -2.48882085e-01 2.81389445e-01 -1.49398804e-01] [ 1.97907314e-01 2.94846445e-02 -2.60152370e-01 ... 4.11205322e-01 -1.54568255e-01 2.05568284e-01] [ 7.95506656e-01 3.78442049e-01 2.82075256e-01 ... 1.21742420e-01 5.57618082e-01 2.73660421e-01] ... [ 2.70168722e-01 -8.40132907e-02 -8.77609402e-02 ... 3.73103291e-01 3.17075908e-01 5.42008281e-01] [ 1.59406036e-01 -8.97120312e-02 6.84219122e-01 ... 4.01206493e-01 6.81035146e-02 3.64379548e-02] [ 4.51117069e-01 -4.02476788e-01 2.91198373e-01 ... 3.42271447e-01 -1.67689309e-01 2.11950764e-01]] [[ 3.55479658e-01 2.21654996e-01 3.22327584e-01 ... 2.58050144e-01 8.12774710e-03 4.94366765e-01] [-4.92662877e-01 5.87131619e-01 7.96276808e-01 ... 1.66686594e-01 2.34630369e-02 7.24463910e-02] [ 5.33229530e-01 9.96487737e-01 3.82532448e-01 ... 2.26855502e-01 1.96428180e-01 3.13328832e-01] ... [ 2.13973626e-01 3.74237411e-02 -2.37735748e-01 ... 1.94909900e-01 -1.24366671e-01 2.11409450e-01] [ 6.86956272e-02 8.11568677e-01 -3.24554801e-01 ... -1.06127314e-01 1.48619577e-01 1.14326566e-01] [ 4.17400599e-02 3.60219002e-01 1.69377938e-01 ... 5.37499666e-01 2.14247614e-01 1.11510321e-01]]]] [[[[-1.38382480e-01 -2.06035629e-01 3.85654390e-01 ... -1.73226774e-01 -1.89491019e-01 -1.02220035e+00] [-4.54513520e-01 -2.82368343e-02 -2.06316233e-01 ... -1.08337808e+00 9.05143082e-01 -4.09193873e-01] [-1.73249811e-01 7.24492788e-01 -5.33746958e-01 ... -1.87564984e-01 -2.52408739e-02 2.41885990e-01] ... [ 6.82984948e-01 2.19609037e-01 -1.85484201e-01 ... -5.83280884e-02 -3.32494318e-01 -2.59498060e-02] [-2.31155545e-01 -8.05264950e-01 4.66989547e-01 ... -2.15178028e-01 2.86244392e-01 3.75874996e-01] [-6.42534614e-01 -4.29522902e-01 3.22539777e-01 ... -1.48783302e+00 2.08160326e-01 4.27500065e-03]] [[-7.01396525e-01 -4.83924359e-01 4.60010082e-01 ... -7.04249978e-01 -4.79564488e-01 -1.21023023e+00] [-5.37791491e-01 1.67876657e-03 -2.51706429e-02 ... -8.00816298e-01 -1.15623161e-01 -1.47745717e+00] [-1.14431751e+00 2.27196813e-01 -1.11916208e+00 ... 4.03061956e-01 2.09411550e-02 -1.80124730e-01] ... [ 4.52341050e-01 -2.83873901e-02 6.27343714e-01 ... 8.68483633e-02 3.67157251e-01 -2.13304147e-01] [-5.21156145e-03 1.59527779e-01 -4.27238613e-01 ... -2.53866315e-01 -2.47919485e-02 -9.89386812e-02] [-3.16943228e-01 1.38956204e-01 -1.06490135e-01 ... 5.31863809e-01 -8.23353976e-02 -1.81397766e-01]] [[-8.72001529e-01 -4.96642850e-02 3.98270786e-01 ... 9.40685906e-03 2.77978718e-01 4.25775871e-02] [-7.95489609e-01 1.29926443e-01 8.33294690e-02 ... -1.68157279e-01 1.22822869e+00 4.73688096e-01] [-1.07561255e+00 -1.34082329e+00 -1.76055089e-01 ... 9.23953131e-02 1.47950619e-01 -1.41055644e+00] ... [-1.12121888e-01 -5.21262527e-01 6.61664188e-01 ... -3.29567015e-01 2.79691994e-01 4.19459701e-01] [-4.28020626e-01 -1.03940237e+00 2.65882965e-02 ... -2.79848307e-01 -7.21119717e-02 1.63375109e-01] [ 1.93566591e-01 4.50186729e-01 -3.39272588e-01 ... -5.46364844e-01 -4.14914519e-01 -5.79133406e-02]] ... [[ 6.31478190e-01 -1.67858936e-02 -6.14656925e-01 ... 9.73743200e-01 1.22104503e-01 -1.03322887e+00] [-3.25444490e-01 4.44341630e-01 -2.11437359e-01 ... 1.22522987e-01 -9.60849583e-01 -5.90919614e-01] [ 2.83759594e-01 7.88742006e-02 4.55079675e-01 ... 1.46572711e-02 3.79462630e-01 -4.77477074e-01] ... [-6.85009956e-02 -9.07759443e-02 -9.02734697e-01 ... -5.33404827e-01 -1.03948855e+00 4.99603271e-01] [-5.23061335e-01 7.66921341e-01 5.95041752e-01 ... 1.65207699e-01 -7.66126215e-01 -6.38818860e-01] [-1.21128976e+00 4.85512286e-01 7.33429492e-02 ... -4.60795194e-01 -6.57346964e-01 -9.90837514e-02]] [[ 2.47142911e-01 5.10461032e-01 4.00449455e-01 ... -6.20313108e-01 -1.00003608e-01 -1.24185495e-01] [ 1.69164971e-01 -3.21718723e-01 7.53626943e-01 ... -6.92287505e-01 -1.26399541e+00 -3.37941855e-01] [-8.59275877e-01 -9.16717574e-02 4.19971436e-01 ... -4.53335017e-01 -3.61391790e-02 -3.86148989e-01] ... [ 2.33842552e-01 1.60985753e-01 6.05689406e-01 ... 9.81733084e-01 -4.58423674e-01 -4.41390038e-01] [ 6.13615930e-01 2.78276473e-01 -7.75885224e-01 ... 3.35810661e-01 6.58407360e-02 -2.59856552e-01] [-1.02320075e+00 -6.47354007e-01 1.15144208e-01 ... 2.44665712e-01 -3.04826051e-01 -6.32219970e-01]] [[-6.12606525e-01 -5.34910202e-01 -6.57995194e-02 ... 6.81910872e-01 -7.47110724e-01 1.82209626e-01] [ 1.30151391e-01 -1.31937653e-01 -2.93549359e-01 ... 1.43223340e-02 6.79704428e-01 -3.03454727e-01] [ 8.88004124e-01 -9.54394996e-01 -3.36687595e-01 ... -5.65690100e-01 5.69454789e-01 -8.28517452e-02] ... [ 3.88627648e-02 -2.09191348e-02 2.25641116e-01 ... 4.45640892e-01 -3.35554481e-01 4.50101532e-02] [-9.53221321e-01 8.77215624e-01 1.83714077e-01 ... 6.25280321e-01 5.49701929e-01 7.76244938e-01] [-6.15189135e-01 -1.01192355e+00 -7.91465342e-01 ... 2.42542163e-01 -1.80445886e+00 -5.81641234e-02]]] [[[ 1.54987186e-01 -5.49879909e-01 -6.99710786e-01 ... -2.68785171e-02 -7.86584079e-01 -1.32956088e-01] [ 9.57192108e-03 -1.92860305e-01 2.14837934e-03 ... -4.94637400e-01 -2.71615297e-01 5.20074256e-02] [-7.34721273e-02 5.59248850e-02 -4.82421704e-02 ... -2.15599731e-01 -5.25500298e-01 -2.43215546e-01] ... [-3.44984084e-01 6.92118183e-02 -1.97917260e-02 ... -2.40779117e-01 5.04027121e-02 -6.64734066e-01] [-7.51951218e-01 -5.03569126e-01 -4.69485611e-01 ... -3.74001026e-01 -1.45353764e-01 -2.65811175e-01] [-2.86013335e-01 -1.90692991e-01 -2.62680471e-01 ... -4.42271471e-01 -1.86907098e-01 3.58401984e-01]] [[ 2.57095713e-02 -3.13026398e-01 2.95933485e-01 ... 8.53785127e-03 -8.32038999e-01 2.88463771e-01] [ 1.60204083e-01 1.29360378e-01 1.57297283e-01 ... -1.36102247e+00 -6.55220568e-01 4.25706327e-01] [ 4.45878729e-02 -4.19109762e-02 7.87402410e-03 ... -2.37017214e-01 3.01019937e-01 -5.67350030e-01] ... [-3.61149192e-01 -3.48185480e-01 -4.37082201e-01 ... -7.61245489e-01 -2.19946355e-01 -6.69395208e-01] [-1.85893416e-01 -1.20164528e-01 4.52230901e-01 ... -1.15627877e-01 -1.26295358e-01 2.40526989e-01] [ 2.09252015e-01 1.96258649e-01 -5.27835846e-01 ... -5.82667515e-02 1.39824972e-02 3.49502504e-01]] [[ 1.65963292e-01 3.72994654e-02 4.14879292e-01 ... -5.46894968e-01 -4.32190448e-01 9.47578698e-02] [-1.36274529e+00 -6.91724360e-01 -5.06139576e-01 ... 8.61967802e-01 -8.09064746e-01 -2.55110711e-01] [-1.11690842e-01 -9.95914489e-02 -2.45335713e-01 ... -1.47883877e-01 -3.22854668e-01 -7.87115276e-01] ... [-3.75332922e-01 -4.65570420e-01 -1.05483457e-01 ... -2.10370645e-01 5.50626755e-01 -1.21439144e-01] [-4.29899246e-01 -5.85781932e-01 -7.11769402e-01 ... -1.49974555e-01 5.90288937e-01 -1.49528250e-01] [-5.49819767e-01 -3.48821491e-01 -5.85647285e-01 ... -9.47696030e-01 -3.79637271e-01 -5.11592686e-01]] ... [[-1.69990823e-01 1.27684623e-01 -1.16001465e-03 ... -3.05429012e-01 -5.13639927e-01 -5.65779805e-01] [-9.18141663e-01 9.28236023e-02 2.50542372e-01 ... -7.33650506e-01 -1.33466855e-01 -7.96474814e-01] [ 4.78529096e-01 5.98921716e-01 -8.02678347e-01 ... 8.69166076e-01 -1.88111201e-01 -3.95407706e-01] ... [-9.02524963e-02 -1.23498328e-01 -4.12803084e-01 ... -4.64094430e-01 -2.83044558e-02 -1.87096059e-01] [ 2.61789411e-01 8.67371559e-01 -2.37009943e-01 ... -4.04397160e-01 9.76383805e-01 4.25578374e-03] [ 3.48007560e-01 4.10959125e-01 8.49192142e-02 ... -2.60880828e-01 -1.81377411e-01 -1.83264196e-01]] [[-2.78007239e-01 -5.56048393e-01 1.29725978e-01 ... -2.40352680e-03 1.08360434e+00 -3.34540904e-01] [-2.63001531e-01 -6.29276931e-01 -1.88935667e-01 ... 4.35232848e-01 2.25446448e-01 -5.72591543e-01] [-5.34489274e-01 -6.61899507e-01 6.34280860e-01 ... -3.08238864e-01 1.38111806e+00 8.44136404e-04] ... [ 4.92556542e-02 5.81159592e-01 1.64251208e-01 ... 1.77995160e-01 -6.22798085e-01 -3.75652164e-01] [-4.47143197e-01 2.71907687e-01 -5.00789106e-01 ... -2.38253862e-01 -1.60396740e-01 -2.31031314e-01] [-7.61512756e-01 -4.58840370e-01 -4.96715665e-01 ... -3.58578652e-01 1.53632998e-01 -5.53671777e-01]] [[-7.52630293e-01 2.66275436e-01 4.00355101e-01 ... -9.53006864e-01 -4.48395878e-01 -1.92076519e-01] [-3.76715548e-02 -6.16497338e-01 -5.80856860e-01 ... -1.14222877e-01 2.24529922e-01 2.47772515e-01] [-3.11262667e-01 3.99936050e-01 -8.38719666e-01 ... -1.99511960e-01 -4.07327443e-01 -1.65983930e-01] ... [-1.02606404e+00 5.98188102e-01 2.13557303e-01 ... -9.44384754e-01 -1.84114669e-02 -9.52829514e-03] [ 6.48290291e-02 -6.68956399e-01 -4.96249467e-01 ... -5.81920259e-02 -2.65931606e-01 3.90304655e-01] [-1.68482825e-01 -2.98800200e-01 6.94499314e-01 ... 6.65058270e-02 -5.10169506e-01 -5.19986272e-01]]] [[[ 1.90465808e+00 1.92810088e-01 -7.12174326e-02 ... 4.58366990e-01 -7.95939267e-01 1.14304852e+00] [ 1.01450300e+00 2.29780316e+00 8.04017425e-01 ... 6.65543199e-01 -7.81677067e-01 2.51981354e+00] [-2.00979840e-02 2.06529319e-01 -9.24564898e-01 ... 6.23838305e-01 2.01077724e+00 -4.05676723e-01] ... [ 1.97559893e+00 1.20080352e-01 2.65360212e+00 ... 1.60921144e+00 3.46428156e+00 4.64930832e-02] [ 3.45538902e+00 7.77934253e-01 1.58761024e+00 ... -4.76129472e-01 2.15579653e+00 8.81071568e-01] [-4.34417099e-01 8.67751598e-01 1.14828193e+00 ... 6.00899220e-01 9.94079709e-01 1.53982148e-01]] [[-2.21405178e-02 5.64963706e-02 2.20999074e+00 ... 4.82197106e-01 1.06068921e+00 2.05664897e+00] [ 1.82187378e+00 -8.51804018e-01 -9.93616462e-01 ... 1.31700981e+00 3.22351241e+00 1.66131413e+00] [ 2.32705903e+00 2.68097353e+00 1.68732500e+00 ... 2.53990674e+00 -1.30889639e-01 3.05618882e-01] ... [ 1.20270744e-01 5.58426797e-01 9.41130161e-01 ... 7.40895212e-01 1.19317734e+00 1.57124698e+00] [ 2.72715664e+00 1.19427359e+00 -7.50064969e-01 ... 1.21449804e+00 2.73255438e-01 5.99214256e-01] [-8.75614405e-01 1.38035744e-01 8.90873492e-01 ... -4.39552888e-02 5.99296987e-02 4.38457936e-01]] [[ 3.48450035e-01 1.01236093e+00 -2.01278836e-01 ... 1.00934899e+00 1.62983835e+00 1.30592942e+00] [ 1.93932831e+00 4.07151282e-01 1.27817377e-01 ... -5.84279180e-01 4.91732550e+00 4.28191423e-01] [ 2.56397963e+00 2.24891543e+00 4.68577057e-01 ... 2.87036085e+00 1.99343693e+00 1.32976964e-01] ... [-8.55796456e-01 1.47003007e+00 1.80036747e+00 ... 6.08821392e-01 -6.78459525e-01 2.14265180e+00] [-4.12643582e-01 1.16535711e+00 2.68193817e+00 ... 7.30423808e-01 1.66697752e+00 2.59298730e+00] [ 3.90109062e-01 1.45346701e+00 1.51025295e+00 ... 1.73584890e+00 1.96482813e+00 1.42459118e+00]] ... [[-5.86400151e-01 1.00989604e+00 -1.65295434e+00 ... 2.54906011e+00 -8.82653594e-01 1.88329136e+00] [ 2.07510972e+00 -5.69513813e-02 1.94789827e+00 ... 5.02040684e-01 2.18634605e+00 -2.93977290e-01] [ 8.15379024e-01 2.37506366e+00 3.99947047e+00 ... 1.00829244e+00 2.41103244e+00 5.52821338e-01] ... [ 2.68165874e+00 1.61791158e+00 9.88244474e-01 ... -2.24633276e-01 2.09802365e+00 -3.60196292e-01] [ 1.70907223e+00 -1.01151258e-01 1.00800538e+00 ... 9.87634242e-01 1.00670123e+00 2.60039306e+00] [ 2.65600300e+00 7.53346682e-01 1.67146719e+00 ... 1.28129888e+00 -1.49530649e+00 1.47472024e+00]] [[ 1.88115418e+00 1.35322738e+00 1.87309086e+00 ... 2.35340238e+00 2.23934221e+00 1.24581504e+00] [ 1.67632926e+00 2.09711480e+00 2.52065396e+00 ... 1.22222745e+00 1.81928754e+00 2.08845520e+00] [ 1.81951284e+00 1.76373422e+00 8.30518544e-01 ... 2.52370477e+00 4.95495677e-01 2.32659531e+00] ... [ 1.33557248e+00 -3.44777524e-01 2.48006153e+00 ... 2.12968683e+00 1.29817736e+00 -1.00147344e-01] [ 7.45982289e-01 1.63233101e+00 1.77290261e+00 ... 2.46389580e+00 1.34525204e+00 8.89607370e-01] [ 9.35392916e-01 1.15836656e+00 9.62929964e-01 ... 2.79342294e+00 -1.53516727e-02 1.37795424e+00]] [[ 1.21020770e+00 8.17489982e-01 1.86652219e+00 ... 7.66345084e-01 8.55378628e-01 -7.27809295e-02] [ 1.60544121e+00 1.49595046e+00 5.89890301e-01 ... 9.58036959e-01 1.82267022e+00 5.23239933e-02] [ 2.51632357e+00 1.67798504e-01 1.60066140e+00 ... -8.89103189e-02 2.29991007e+00 2.01729560e+00] ... [ 1.90134871e+00 1.61554372e+00 -4.87064928e-01 ... 3.06533551e+00 5.61089694e-01 7.91842520e-01] [ 8.37014854e-01 1.76120472e+00 6.32484853e-01 ... 8.38372409e-02 9.39786553e-01 1.42363608e+00] [ 1.21337140e+00 3.51923376e-01 1.59689021e+00 ... -3.21565211e-01 5.19290149e-01 1.99981403e+00]]] [[[ 4.07861680e-01 6.39393210e-01 1.26097322e+00 ... 1.20459509e+00 1.54198086e+00 2.27562881e+00] [ 1.02437842e+00 2.93920469e+00 8.44075441e-01 ... 6.93980098e-01 2.00874829e+00 8.49111199e-01] [ 1.00665259e+00 3.36945415e-01 9.80402708e-01 ... 1.48217177e+00 1.48484862e+00 1.09506404e+00] ... [ 1.58465683e+00 5.04278183e-01 1.14376938e+00 ... 2.28699374e+00 9.90043640e-01 8.76736104e-01] [ 9.86068249e-01 1.69203305e+00 1.13955259e+00 ... 5.46391785e-01 5.47216654e-01 1.61265552e+00] [ 1.41760612e+00 1.24086821e+00 7.21087277e-01 ... 5.42886198e-01 1.07018101e+00 8.66390824e-01]] [[ 6.80241823e-01 2.69870669e-01 2.07994175e+00 ... -3.33327174e-01 1.20035982e+00 -4.48217958e-01] [ 1.18737149e+00 6.02143705e-01 1.15266919e+00 ... 1.08758473e+00 1.39960265e+00 2.15839314e+00] [ 1.09316623e+00 1.14099383e+00 1.56688178e+00 ... 1.67246282e+00 1.85144925e+00 1.76168513e+00] ... [ 2.01216483e+00 1.04469788e+00 9.45867538e-01 ... 6.17755473e-01 9.45785701e-01 1.48763824e+00] [ 8.36066663e-01 2.63420343e-01 1.40846825e+00 ... 7.38020241e-01 1.44623685e+00 1.72312129e+00] [ 1.91596389e+00 2.19199443e+00 1.12860453e+00 ... 6.04823172e-01 2.02066541e+00 6.75283194e-01]] [[ 2.10607338e+00 1.11996078e+00 1.70061946e+00 ... 1.41992235e+00 -2.59032920e-02 1.81562757e+00] [ 1.34364951e+00 5.42566717e-01 1.85363628e-02 ... 1.06138206e+00 7.40054548e-01 1.23833275e+00] [ 4.41575408e-01 1.52050292e+00 8.69845688e-01 ... 1.18329108e+00 1.22792149e+00 2.03233346e-01] ... [ 1.46151435e+00 1.00875962e+00 1.83732009e+00 ... 1.51589274e+00 1.45488763e+00 1.04863143e+00] [ 7.87652850e-01 1.56346500e+00 9.23948884e-01 ... 1.28071618e+00 1.84921241e+00 1.10792625e+00] [ 2.00163245e+00 7.15427697e-01 7.88633704e-01 ... 1.45081854e+00 7.05587506e-01 9.78158474e-01]] ... [[ 8.34850907e-01 1.79272258e+00 7.87158966e-01 ... 4.14450049e-01 8.92826021e-01 1.84443307e+00] [ 9.53649163e-01 1.65255487e+00 1.34174633e+00 ... 7.86626816e-01 1.09373808e+00 1.47911024e+00] [ 2.42419887e+00 1.68361545e-01 1.02320802e+00 ... 1.90982294e+00 1.52045810e+00 8.89792204e-01] ... [ 1.28492904e+00 9.21348274e-01 4.98126954e-01 ... 1.25693834e+00 4.70282733e-01 8.74590337e-01] [ 9.97874975e-01 8.28234851e-01 4.15843666e-01 ... 9.56574857e-01 1.65530348e+00 6.91296577e-01] [ 1.71402252e+00 1.24616575e+00 1.38336337e+00 ... 2.06707239e+00 1.46450484e+00 9.96849597e-01]] [[ 1.53928447e+00 7.70054936e-01 3.66854906e-01 ... 7.15297580e-01 8.13481867e-01 1.72066748e+00] [ 2.01937175e+00 7.32460380e-01 1.07740283e+00 ... 1.77169311e+00 1.36222363e+00 1.01962852e+00] [ 7.90784359e-01 1.44378591e+00 1.45400333e+00 ... -3.23735505e-01 1.68688416e+00 4.60461110e-01] ... [ 1.42829347e+00 1.35590887e+00 1.75768948e+00 ... 1.90064538e+00 1.18172693e+00 1.41749120e+00] [ 6.85306370e-01 9.98489797e-01 5.37788033e-01 ... 5.00003517e-01 1.71887147e+00 1.45437372e+00] [ 8.69037986e-01 1.27373636e+00 1.27793396e+00 ... 9.02875721e-01 1.94429672e+00 9.67225313e-01]] [[ 1.44090533e+00 8.09405565e-01 1.55344260e+00 ... 1.70425892e+00 1.54896164e+00 1.13863945e+00] [ 1.66675794e+00 1.66494226e+00 8.87842238e-01 ... 1.23294497e+00 4.37087268e-01 1.31653798e+00] [ 8.67713153e-01 1.16998506e+00 1.17685258e+00 ... 4.99676466e-01 2.06137753e+00 5.06597698e-01] ... [ 1.31829000e+00 5.73508680e-01 1.01748943e+00 ... 1.18710756e+00 2.07018423e+00 1.11860013e+00] [ 1.21391284e+00 1.28510201e+00 1.65047121e+00 ... 7.89720297e-01 8.90062928e-01 1.20646536e+00] [ 1.08746171e+00 9.66400802e-01 1.79066133e+00 ... 1.73688662e+00 9.99087691e-01 2.16382241e+00]]] [[[ 1.13977647e+00 5.57668924e-01 7.08259940e-02 ... -3.73355485e-02 7.15351244e-03 -5.10192692e-01] [-3.97764266e-01 1.24405789e+00 1.91072893e+00 ... 3.64093333e-01 1.83583224e+00 -2.73847282e-01] [ 1.07261670e+00 8.38965476e-01 1.32844055e+00 ... -6.61984622e-01 1.01228178e+00 6.24720752e-01] ... [ 1.15331125e+00 -1.13550997e+00 1.44345558e+00 ... -1.65049478e-01 6.78906500e-01 -8.63158610e-03] [ 3.61969560e-01 -7.59758651e-02 3.77764404e-01 ... 1.34405386e+00 9.86913979e-01 -7.44621754e-01] [-5.78426600e-01 7.03857183e-01 1.82189000e+00 ... 4.73579437e-01 5.01188397e-01 -1.22171354e+00]] [[ 4.40653563e-01 -4.17654991e-01 -3.85097712e-01 ... -1.15824354e+00 7.27627754e-01 4.09221798e-01] [ 1.41479358e-01 4.33130711e-01 1.11005378e+00 ... 8.44340444e-01 7.76698470e-01 7.45016873e-01] [ 1.84273219e+00 1.33321762e+00 1.14244692e-01 ... 1.48221597e-01 -4.87606049e-01 7.54191518e-01] ... [ 1.04518378e+00 8.44692826e-01 4.00842607e-01 ... 1.38520670e+00 9.06713367e-01 -7.08801299e-02] [ 6.90672636e-01 1.80127412e-01 -1.71983670e-02 ... 1.06370103e+00 -8.99315067e-03 -4.56246704e-01] [ 6.19628668e-01 1.69703770e+00 5.86707413e-01 ... 5.13781369e-01 8.63671303e-01 4.59337771e-01]] [[ 8.77109408e-01 1.29842889e+00 1.68495521e-01 ... -1.46563742e-02 3.16396385e-01 1.41213632e+00] [ 4.25662637e-01 1.30388129e+00 6.68655217e-01 ... 5.09876549e-01 6.60199821e-01 -8.32459271e-01] [-7.71447122e-01 1.16060293e+00 1.25522152e-01 ... 5.80096185e-01 9.51408267e-01 1.63298297e+00] ... [ 1.78758919e+00 -2.04128083e-02 -7.55617581e-03 ... -3.93450558e-01 4.08225656e-01 1.19379616e+00] [ 5.60190976e-02 -1.33741768e-02 5.69542646e-01 ... 6.91399276e-01 -1.73436135e-01 -2.35121235e-01] [ 1.11320686e+00 1.84841737e-01 6.55885637e-02 ... 1.06046700e+00 1.80031434e-01 -1.13556907e-01]] ... [[ 3.67856979e-01 6.71261013e-01 -5.39070666e-01 ... 2.07230046e-01 1.46230549e-01 6.99598670e-01] [ 5.77356577e-01 -1.80936996e-02 7.64646292e-01 ... 5.95128536e-01 4.11496043e-01 -3.06215078e-01] [ 1.16038835e+00 -2.29945675e-01 -3.01647842e-01 ... -5.14532387e-01 8.93468499e-01 9.00678575e-01] ... [ 1.99087262e+00 -3.20531577e-02 -4.83597338e-01 ... 2.11232519e+00 -1.91554442e-01 1.05199993e+00] [ 3.65893304e-01 1.38944840e+00 6.27612233e-01 ... -2.56385118e-01 3.79034191e-01 -8.65648746e-01] [ 5.22819102e-01 3.13116491e-01 1.68129814e+00 ... 1.68449378e+00 9.23259914e-01 -3.86755586e-01]] [[ 3.84271592e-01 -1.20425475e+00 6.69719040e-01 ... 1.04871237e+00 8.13597143e-01 -2.09995896e-01] [ 9.41707969e-01 -1.25278449e+00 3.25524926e-01 ... 1.84254980e+00 1.05689013e+00 -1.27899647e-02] [ 4.65629220e-01 1.45208335e+00 -3.67203265e-01 ... 1.66084242e+00 -2.39805430e-01 6.83858097e-01] ... [ 2.66094774e-01 -1.52417988e-01 1.69812906e+00 ... 6.38576329e-01 8.21651936e-01 1.96859837e+00] [-1.24649084e+00 7.95573711e-01 1.40119433e+00 ... 9.27199423e-01 1.05811477e+00 5.29259264e-01] [ 8.88745427e-01 3.05550713e-02 4.91251409e-01 ... 6.86065614e-01 1.22142220e+00 8.84073734e-01]] [[ 4.66956407e-01 -1.72158480e-01 1.12257111e+00 ... 3.67768407e-01 3.32976997e-01 -1.41915917e-01] [-3.15042324e-02 9.15131688e-01 1.19973576e+00 ... 7.10900664e-01 1.31587005e+00 7.78111577e-01] [ 1.20247519e+00 1.68978918e+00 6.07064664e-01 ... 9.70197797e-01 5.94073772e-01 7.06425250e-01] ... [ 6.71224535e-01 4.45452392e-01 8.89596224e-01 ... 1.51477087e+00 5.12114950e-02 1.12576175e+00] [ 9.57607210e-01 1.21534574e+00 2.47941334e-02 ... 9.25866544e-01 2.97356993e-01 2.44229406e-01] [ 1.06220126e+00 6.45077080e-02 1.26657856e+00 ... 1.61875859e-01 8.52064639e-02 9.02462840e-01]]] [[[ 5.84050596e-01 -2.30933167e-02 2.39399627e-01 ... 1.82470590e-01 4.86878306e-01 5.31500280e-01] [ 1.00694835e-01 3.13082159e-01 1.28793307e-02 ... 2.55232397e-02 2.91630954e-01 5.43025970e-01] [-3.95131707e-02 7.48090446e-02 5.32121718e-01 ... 1.51525885e-01 1.07445620e-01 8.18911195e-01] ... [ 9.37803388e-02 6.51853025e-01 -2.98284926e-02 ... 8.14304888e-01 -8.90475959e-02 3.95210862e-01] [ 8.96156281e-02 -5.00389993e-01 3.58804196e-01 ... -9.33607593e-02 3.48606706e-01 -1.32729530e-01] [ 1.77873343e-01 -3.21867317e-01 9.95704252e-03 ... -7.51816258e-02 4.32761669e-01 -1.47353932e-02]] [[ 2.06331074e-01 -2.09814668e-01 1.04866290e+00 ... 3.21769178e-01 -1.08237140e-01 3.92694831e-01] [ 5.65024555e-01 1.65153652e-01 4.97027904e-01 ... -5.43731153e-01 6.42263055e-01 -5.21145808e-03] [ 2.86406875e-01 3.93479392e-02 2.43758082e-01 ... 1.05699562e-01 4.57852900e-01 -2.32084081e-01] ... [ 4.31285910e-02 1.95244059e-01 -7.43610933e-02 ... 2.52246678e-01 -1.01759844e-01 5.68326652e-01] [-4.29234236e-01 -3.32734346e-01 9.31447864e-01 ... -2.67373566e-02 5.38894832e-01 2.66970783e-01] [ 7.88477778e-01 4.50138032e-01 1.15944576e+00 ... 7.75095895e-02 8.24170485e-02 -1.47780240e-01]] [[ 1.98750664e-02 3.82233739e-01 -2.48587623e-01 ... 4.72429134e-02 -5.71512319e-02 4.94973958e-01] [ 3.96611512e-01 1.91310704e-01 2.48893186e-01 ... 4.32137437e-02 -7.57907480e-02 6.73326433e-01] [ 5.85231721e-01 6.21745512e-02 1.31730044e+00 ... 2.72124350e-01 -2.64198594e-02 3.52110803e-01] ... [ 6.58913374e-01 -5.88746667e-01 1.62493750e-01 ... 2.95142204e-01 1.18135780e-01 1.08420573e-01] [ 6.66763186e-01 7.91915581e-02 4.86119717e-01 ... 1.95137650e-01 1.47263691e-01 2.16165081e-01] [ 1.27173793e-02 8.33401620e-01 1.73362970e-01 ... 1.43606260e-01 7.77933598e-01 7.34646916e-01]] ... [[ 5.72766840e-01 4.18061137e-01 6.05150998e-01 ... 8.01540166e-03 -4.50185947e-02 5.31878360e-02] [ 6.50890172e-01 -1.10106990e-02 -2.73130655e-01 ... 8.76711011e-02 4.39921856e-01 -3.83221745e-01] [-2.62077659e-01 5.56522071e-01 -9.13651809e-02 ... 4.24735934e-01 -6.46958426e-02 3.81334014e-02] ... [ 3.28056246e-01 -6.15583777e-01 4.58787233e-01 ... 7.49800503e-01 -1.50456011e-01 -1.57175902e-02] [ 8.75344217e-01 1.50669247e-01 2.32290089e-01 ... 1.06912933e-01 -6.70306683e-02 1.19540446e-01] [ 2.97526479e-01 6.07115149e-01 2.49281660e-01 ... 4.17097569e-01 6.76229835e-01 4.17606771e-01]] [[-1.75378527e-02 9.64808702e-01 -3.79986286e-01 ... 8.50951791e-01 3.41435403e-01 -3.59846592e-01] [ 3.24620366e-01 6.02366090e-01 -3.07100788e-02 ... 2.82368720e-01 2.77686507e-01 -2.77379975e-02] [-3.08785260e-01 -2.66026795e-01 6.11091495e-01 ... 6.07545376e-02 -3.42876203e-02 -7.22685596e-03] ... [ 5.57109118e-01 1.57307878e-01 4.77575809e-01 ... 7.08000779e-01 1.07292319e-02 2.46169582e-01] [ 9.03577089e-01 -3.09537444e-03 5.52134693e-01 ... -2.91929096e-01 -6.41791672e-02 2.48442352e-01] [-1.21457636e-01 -5.66337816e-02 1.88560843e-01 ... 6.90454125e-01 3.35545808e-01 6.15062773e-01]] [[ 1.22720867e-01 5.24645686e-01 -1.53410574e-02 ... 2.36529827e-01 6.16447747e-01 9.87876654e-02] [-8.16607997e-02 1.75857127e-01 -4.99426164e-02 ... -4.58297096e-02 5.56934714e-01 4.18335706e-01] [ 2.28361383e-01 3.00135761e-01 -6.26790345e-01 ... 4.23440009e-01 5.42392917e-02 2.05375761e-01] ... [ 3.32386464e-01 4.67018187e-01 2.55590320e-01 ... 3.55930239e-01 3.31911653e-01 1.73541054e-01] [-1.67966872e-01 2.83327788e-01 4.97679830e-01 ... -8.14492032e-02 9.34488833e-01 1.98967874e-01] [ 4.75806028e-01 3.36945534e-01 1.44324750e-01 ... 3.90764654e-01 -1.20851442e-01 6.50350273e-01]]]] [[[[-4.26194966e-01 1.08890526e-01 -6.49273396e-01 ... -8.58352244e-01 -5.66350043e-01 -4.88921523e-01] [-8.55412602e-01 -8.53115737e-01 -3.18515271e-01 ... 3.41447175e-01 8.72809827e-01 -6.12737954e-01] [-1.45473734e-01 1.63829476e-01 -7.96635330e-01 ... -6.03198826e-01 -8.00349593e-01 -7.01026499e-01] ... [-3.95017952e-01 2.33327612e-01 -2.19413921e-01 ... 4.03471768e-01 -5.87080181e-01 -2.59484768e-01] [ 1.74920604e-01 -7.70669103e-01 -3.86886090e-01 ... 2.64662534e-01 2.79633343e-01 -1.85927711e-02] [-4.90894109e-01 4.76068020e-01 6.84096575e-01 ... 3.68245780e-01 -6.12617493e-01 8.92719984e-01]] [[-7.11419955e-02 1.24118105e-01 -8.86254162e-02 ... 5.03090858e-01 4.34344083e-01 2.95340985e-01] [-6.96293712e-01 -1.19422972e+00 -1.72404572e-01 ... 3.38382274e-01 6.55865312e-01 -2.25343838e-01] [ 6.17964506e-01 -4.02186155e-01 -7.33049810e-01 ... 5.93080149e-05 -1.25648057e+00 1.09635629e-02] ... [ 1.68005884e-01 9.03889120e-01 -2.29815528e-01 ... -2.56267279e-01 -4.76390809e-01 1.33107877e+00] [ 2.98830476e-02 3.10821593e-01 5.13577349e-02 ... -4.15060401e-01 -1.66226700e-01 3.60994548e-01] [ 3.64496969e-02 -9.57017168e-02 -3.87296200e-01 ... 7.12620378e-01 -1.19852759e-02 8.29503909e-02]] [[-4.16619360e-01 1.15099840e-01 -2.35805243e-01 ... -7.59158432e-01 -1.95687145e-01 1.03010558e-01] [ 1.11879086e+00 8.87909457e-02 7.20685840e-01 ... -5.51656961e-01 -2.87420928e-01 -8.27896953e-01] [-1.63915381e-01 -2.37746567e-01 -1.06719232e+00 ... 9.46839631e-01 2.75978953e-01 5.03259718e-01] ... [-7.05166906e-02 -7.22811162e-01 5.92213571e-01 ... -1.51960492e-01 -2.08617353e+00 -1.11135850e-02] [-1.64794803e-01 5.56030929e-01 -6.48994744e-01 ... -1.25036970e-01 -3.96068960e-01 -5.62065132e-02] [ 2.71706544e-02 -2.60816664e-01 -4.39162254e-01 ... -6.85551539e-02 -2.94086672e-02 -1.25929165e+00]] ... [[ 8.73760581e-01 -7.94478714e-01 -1.48854628e-01 ... -9.60043907e-01 -5.34701705e-01 -7.87057281e-02] [ 3.08146060e-01 -2.37251610e-01 9.79314446e-01 ... 5.64532205e-02 -2.38343269e-01 -1.72593921e-01] [-1.39780361e-02 -2.36195937e-01 8.42283785e-01 ... -2.45046645e-01 -3.17844599e-01 -1.71992704e-01] ... [-4.64055002e-01 -4.79795523e-02 2.84605116e-01 ... 4.12374556e-01 2.69215941e-01 -5.18005967e-01] [ 9.31816176e-02 -7.04534113e-01 1.78037971e-01 ... 3.10427099e-01 6.84929267e-02 -1.44805956e+00] [-1.45517862e+00 -1.63874209e-01 -3.55759293e-01 ... -4.29419339e-01 -4.40443754e-01 7.13846684e-02]] [[-1.03137064e+00 -1.94784909e-01 -8.38719785e-01 ... -4.08762157e-01 -2.40255326e-01 1.04729109e-01] [ 1.87049225e-01 7.80816898e-02 -1.80073664e-01 ... 5.69110096e-01 7.80628845e-02 8.15530539e-01] [-1.81605235e-01 -3.43294829e-01 8.81836712e-02 ... 1.24530815e-01 -9.13069069e-01 5.89480639e-01] ... [-3.51336658e-01 6.33061886e-01 -1.17433631e+00 ... -1.88696146e-01 -6.58002377e-01 -7.84236133e-01] [ 1.09414995e+00 3.90615106e-01 4.45481539e-01 ... -7.93722689e-01 2.24833980e-01 -8.44969869e-01] [-1.85112551e-01 -1.12190580e+00 -8.80918801e-01 ... -4.78612095e-01 -2.97360588e-02 5.34665465e-01]] [[-1.79169253e-01 -6.77315235e-01 -1.28306365e+00 ... -5.05797505e-01 7.83757120e-02 -2.53948737e-02] [-7.01902390e-01 1.01831868e-01 2.36882567e-01 ... 2.56798446e-01 -1.05808154e-02 1.68146893e-01] [-7.45336354e-01 3.25166225e-01 -1.29058287e-02 ... -1.63431838e-01 -5.06319761e-01 -4.01815809e-02] ... [-7.95533434e-02 2.04992458e-01 -5.72922468e-01 ... 2.66466171e-01 1.96035102e-01 -1.13898075e+00] [ 7.65220582e-01 1.33961543e-01 -3.18017960e-01 ... 4.86192495e-01 5.12381010e-02 -4.07953650e-01] [-4.09760535e-01 -3.58851463e-01 -9.08413053e-01 ... -4.16812450e-01 6.65260032e-02 4.27945942e-01]]] [[[-6.96822479e-02 -1.15132593e-02 1.10343531e-01 ... 1.45567104e-01 -1.45153582e-01 -8.47609699e-01] [-2.49367997e-01 -4.53628600e-01 -1.09410204e-01 ... 4.71926667e-02 -1.01410413e+00 2.55803075e-02] [-3.85319382e-01 2.58341521e-01 -7.38592625e-01 ... -7.32777357e-01 -8.12392771e-01 7.36195624e-01] ... [-2.10288718e-01 -4.04543318e-02 -6.26181066e-01 ... -5.05309641e-01 -1.03166127e+00 3.42717022e-01] [-6.46145761e-01 -5.51856995e-01 -3.09178710e-01 ... -1.32882267e-01 3.53663027e-01 -4.54569042e-01] [-3.08663934e-01 4.04356718e-01 -1.18358597e-01 ... -7.00745732e-02 -3.70898992e-01 -6.09458506e-01]] [[-2.69864917e-01 -2.01342329e-01 3.71902347e-01 ... -9.15257514e-01 -6.48782611e-01 -8.78678933e-02] [ 8.02048668e-02 -8.81585121e-01 -4.90757257e-01 ... -3.39476436e-01 -5.07190824e-01 2.27016956e-01] [ 7.43021369e-01 -6.13257647e-01 -2.91740954e-01 ... 5.09619057e-01 2.08465308e-01 -1.85265496e-01] ... [-7.66624629e-01 5.39064646e-01 -8.17777872e-01 ... -5.64351737e-01 -3.59306425e-01 2.58551538e-01] [-2.39836395e-01 -5.04418731e-01 -4.53164041e-01 ... -2.89005876e-01 -6.94236383e-02 -3.68020870e-02] [-2.77664632e-01 -1.13302022e-01 2.53361672e-01 ... -3.25521260e-01 -3.91516596e-01 3.37483495e-01]] [[-2.13173732e-01 7.46892318e-02 -6.06055439e-01 ... 1.28424942e-01 -7.86314368e-01 4.76829529e-01] [-3.80460471e-01 2.61028204e-03 6.17689788e-01 ... -2.62813181e-01 -8.04648399e-02 -1.03778087e-01] [-8.02047849e-01 -5.79050362e-01 -2.34930396e-01 ... -5.23193777e-01 1.87135130e-01 7.39908576e-01] ... [-6.45907521e-01 -5.80890179e-02 -8.93153176e-02 ... -4.61384505e-01 -2.14085370e-01 8.34336355e-02] [ 1.74641222e-01 -3.79273057e-01 1.14709228e-01 ... -1.13618858e-01 -8.85021016e-02 -8.66907984e-02] [ 4.15978860e-03 2.16474995e-01 -4.67306852e-01 ... 6.19185865e-02 -1.66753486e-01 1.00805871e-01]] ... [[-7.48381555e-01 4.83528040e-02 -1.87374562e-01 ... -4.00083601e-01 -7.56049395e-01 -4.65226591e-01] [-1.47578299e-01 -1.80497184e-01 -1.41426340e-01 ... -4.20877397e-01 -2.26621211e-01 2.03919947e-01] [ 2.50209332e-01 -2.06154034e-01 -1.39879704e-01 ... -8.82935226e-01 1.38480961e-01 -4.99825537e-01] ... [-1.69245675e-01 -5.51103294e-01 -3.15564841e-01 ... -4.84057903e-01 -4.07069206e-01 -7.59160697e-01] [-3.62397432e-01 -2.27849230e-01 5.86605817e-03 ... 4.50143248e-01 -5.84026217e-01 2.11165354e-01] [ 3.75876516e-01 -4.31133509e-02 -4.04988557e-01 ... 4.65025194e-02 -3.95466328e-01 3.39349359e-01]] [[-9.30849388e-02 -3.22191417e-01 -7.21459985e-01 ... -1.34657836e-02 -4.54200715e-01 -4.34846878e-01] [-4.19251621e-01 -3.34289610e-01 -8.16716611e-01 ... -5.72635353e-01 4.69438732e-02 1.63669467e-01] [-9.52076197e-01 -5.82368731e-01 3.38319749e-01 ... 1.58386692e-01 -4.11808729e-01 8.63793254e-01] ... [ 5.21976590e-01 8.81385148e-01 1.97086826e-01 ... 4.14060950e-01 5.11830822e-02 -2.64390558e-01] [ 4.74104211e-02 -2.01450646e-01 -7.27906823e-01 ... -4.76548411e-02 -2.04906538e-01 -4.72430140e-01] [-8.92103910e-01 -5.72642051e-02 -1.92595541e-01 ... 1.52631834e-01 -1.97275534e-01 -9.81504560e-01]] [[-5.26828468e-02 3.46606523e-01 -8.25181305e-01 ... 1.11294296e-02 -6.32973254e-01 -3.41970235e-01] [-6.60746872e-01 4.93238121e-01 -2.33183801e-01 ... -3.69373500e-01 1.44920930e-01 1.05787054e-01] [-5.51045179e-01 -5.77728748e-01 2.93549389e-01 ... 1.87663868e-01 -3.68098438e-01 3.73898864e-01] ... [-2.52334386e-01 -2.06620723e-01 -3.07954438e-02 ... -4.08806026e-01 6.20940775e-02 -6.91078544e-01] [-4.53106105e-01 -1.43238921e-02 -7.40437806e-01 ... -6.73346519e-01 2.24976733e-01 -3.16162050e-01] [-1.56257197e-01 -9.51459706e-02 -1.83320984e-01 ... -9.36289966e-01 -1.69116512e-01 -3.53878774e-02]]] [[[ 1.35420650e-01 1.38098466e+00 3.73935413e+00 ... 1.72969139e+00 1.86634213e-01 1.07870281e+00] [ 1.33049476e+00 1.34950721e+00 1.77173007e+00 ... 8.02646577e-01 1.32617486e+00 1.59842372e+00] [ 4.92858171e-01 3.82520819e+00 2.99183577e-01 ... 1.59010291e+00 1.80074430e+00 1.49127364e+00] ... [ 1.69180667e+00 1.15296733e+00 1.51052046e+00 ... 1.10316968e+00 1.40030539e+00 4.84371111e-02] [-5.71625233e-01 2.67419481e+00 2.30057669e+00 ... -3.31495260e-03 2.69597709e-01 1.62631124e-01] [ 8.05795789e-01 -8.85663405e-02 1.08053792e+00 ... 1.71570376e-01 1.39122617e+00 -1.45584807e-01]] [[ 1.73637056e+00 2.53229904e+00 -7.17931315e-02 ... 2.37666583e+00 -1.10045776e-01 6.48244143e-01] [-5.24670243e-01 7.52398312e-01 3.10347271e+00 ... 1.51710939e+00 7.00209260e-01 2.35894418e+00] [ 3.63016911e-02 1.14682198e+00 9.80476797e-01 ... 1.38279974e+00 5.79521775e-01 2.25872636e+00] ... [ 9.68842626e-01 5.66327989e-01 -1.60739496e-01 ... 9.51306462e-01 8.05672884e-01 2.42320061e-01] [ 2.08782482e+00 1.07685888e+00 3.05303246e-01 ... -7.27238208e-02 2.40222335e+00 2.90485263e+00] [-4.70457703e-01 2.74654174e+00 1.45411766e+00 ... 1.64669168e+00 1.08246636e+00 5.48344195e-01]] [[ 1.11895514e+00 3.38547158e+00 2.26060104e+00 ... 2.47127461e+00 1.54022801e+00 -2.12884247e-02] [ 7.25146174e-01 -6.10877454e-01 1.13219345e+00 ... 3.88655829e+00 7.85883725e-01 2.13991261e+00] [ 2.31251073e+00 1.47385871e+00 2.88137937e+00 ... 1.56333148e+00 2.41398755e-02 1.54790294e+00] ... [ 4.46459007e+00 9.49861944e-01 7.47757912e-01 ... 2.28119373e+00 3.29511613e-01 -5.58923304e-01] [ 3.30359101e+00 -9.39040661e-01 4.66865659e-01 ... 1.47866964e+00 3.35574579e+00 1.91706192e+00] [ 1.24747813e+00 2.68185854e-01 -7.83381104e-01 ... 5.69359601e-01 2.17053080e+00 2.04803085e+00]] ... [[ 3.09102011e+00 1.36488426e+00 -4.10628557e-01 ... 2.57386863e-01 4.78893012e-01 9.06960368e-01] [ 5.38654327e-01 1.64484113e-01 3.12151861e+00 ... -8.48845482e-01 -4.15356547e-01 -1.12632477e+00] [ 9.49162364e-01 3.00538372e-02 8.74781370e-01 ... 4.19216216e-01 -2.17049755e-02 2.32244372e+00] ... [ 1.95254219e+00 1.20515537e+00 -1.12536490e+00 ... 4.69981171e-02 1.72773492e+00 -3.18348497e-01] [ 3.57683033e-01 1.06799614e+00 -1.06719270e-01 ... 5.66120505e-01 6.04778707e-01 2.08786416e+00] [ 9.61696386e-01 1.92997038e+00 -3.03418577e-01 ... 4.30520251e-02 1.41531336e+00 1.75572693e+00]] [[ 9.14586723e-01 2.85319537e-01 1.26390100e+00 ... 3.02752405e-01 2.13997269e+00 2.88376355e+00] [ 7.96501279e-01 9.90571439e-01 3.67430687e-01 ... 1.76904666e+00 1.11664236e+00 1.56937301e+00] [ 1.40776908e+00 -3.48221123e-01 1.63383877e+00 ... 1.02157331e+00 1.78510153e+00 3.52198005e-01] ... [ 1.05830681e+00 1.92961943e+00 1.36526072e+00 ... 1.50063217e+00 1.17161296e-01 2.80080271e+00] [ 1.03151250e+00 9.18189347e-01 -2.50395328e-01 ... 1.12344933e+00 3.09527278e+00 5.90423942e-01] [-5.13234615e-01 -3.87263268e-01 1.41801560e+00 ... 3.15057278e-01 -2.06315875e+00 1.79505587e+00]] [[ 1.03952467e+00 6.31694913e-01 1.10729980e+00 ... 2.01267052e+00 -1.70004725e-01 3.24963570e+00] [-1.31367624e-01 6.22832298e-01 1.80138075e+00 ... 2.67568350e-01 1.67483747e+00 2.60081887e+00] [ 2.21613812e+00 6.83861136e-01 9.39090997e-02 ... 1.80919456e+00 1.70551062e+00 1.68068790e+00] ... [-3.66926074e-01 2.49306500e-01 9.34265733e-01 ... 1.52847826e+00 1.45308983e+00 3.09857392e+00] [ 5.18960571e+00 -7.71574259e-01 4.54260975e-01 ... 2.43571949e+00 1.46706438e+00 9.96857047e-01] [ 1.28477025e+00 1.83255172e+00 4.27753925e-01 ... 2.57939965e-01 3.18539858e+00 2.43744755e+00]]] [[[ 8.92646670e-01 9.87642825e-01 1.12513459e+00 ... 1.14122510e+00 4.74776417e-01 4.51525778e-01] [ 1.18562555e+00 1.11608791e+00 1.80455491e-01 ... 1.98437762e+00 2.10015512e+00 8.69923532e-01] [ 1.42087030e+00 1.89303148e+00 4.81873035e-01 ... 9.60187435e-01 8.60833108e-01 1.11771071e+00] ... [ 2.17856264e+00 1.99852467e+00 1.49942732e+00 ... 7.98196316e-01 1.06558335e+00 4.54070240e-01] [ 1.53567982e+00 1.04671526e+00 1.87371171e+00 ... 1.62396932e+00 3.72787416e-01 6.74018800e-01] [ 1.23667610e+00 6.50750399e-01 1.76399398e+00 ... 9.89281595e-01 7.39736974e-01 1.40931022e+00]] [[ 2.20280719e+00 2.11643505e+00 1.06235051e+00 ... 1.93311024e+00 1.75400519e+00 8.97626460e-01] [ 5.63011527e-01 6.49803162e-01 1.57238400e+00 ... 1.36456907e+00 6.30187452e-01 1.03604043e+00] [ 9.74612772e-01 3.74357641e-01 8.93877864e-01 ... 1.47196877e+00 1.13409519e+00 1.89082348e+00] ... [ 2.41933957e-01 1.05690765e+00 1.65456510e+00 ... 1.40589619e+00 1.47908914e+00 1.11419439e+00] [ 1.01336849e+00 8.76554787e-01 8.25431526e-01 ... 1.25960267e+00 2.23622665e-01 1.02354932e+00] [ 5.57245374e-01 6.17767572e-01 1.10338974e+00 ... 4.29129779e-01 1.16858721e+00 1.12571287e+00]] [[ 1.32653499e+00 1.12956154e+00 3.70945662e-01 ... 4.54506636e-01 2.93742865e-02 1.63874733e+00] [ 5.78189492e-01 1.11894119e+00 1.63445067e+00 ... 1.55525792e+00 1.18409824e+00 8.21948469e-01] [ 1.66993594e+00 1.63946342e+00 6.69865847e-01 ... 9.33285892e-01 1.71786094e+00 1.15403593e+00] ... [ 1.66612649e+00 2.08351398e+00 1.22928858e+00 ... 1.68573582e+00 9.00290847e-01 1.14358354e+00] [ 1.15677989e+00 1.17643583e+00 6.65207565e-01 ... 7.71079242e-01 1.29145586e+00 9.65410352e-01] [ 1.15114534e+00 1.25461924e+00 9.37823117e-01 ... 5.40523767e-01 4.57437158e-01 7.32681274e-01]] ... [[ 9.71849084e-01 1.97839391e+00 3.98995548e-01 ... 1.47211647e+00 1.72789943e+00 1.26800120e+00] [ 1.37538075e+00 9.36030626e-01 1.26297021e+00 ... 1.10551941e+00 1.57267237e+00 1.23447788e+00] [ 1.16665745e+00 1.00331521e+00 1.62550437e+00 ... 1.32728958e+00 5.53458333e-01 1.11949682e+00] ... [ 9.57045138e-01 1.16702139e+00 1.55231214e+00 ... 1.26107252e+00 8.67864788e-01 1.28513074e+00] [ 4.83032763e-01 4.44248974e-01 1.69716406e+00 ... 7.25790143e-01 8.36632252e-01 8.52742791e-01] [ 1.38870811e+00 1.80352971e-01 4.88014907e-01 ... 7.16795921e-01 1.57713544e+00 1.51657820e+00]] [[ 1.01359212e+00 1.11410069e+00 -2.97301915e-03 ... 1.35358632e-01 1.37310481e+00 4.40755576e-01] [ 1.78750670e+00 1.50998902e+00 6.03803277e-01 ... 1.06151247e+00 9.32486355e-01 8.51857066e-01] [ 9.76521671e-01 5.19900978e-01 1.36425018e+00 ... 1.71082330e+00 1.08824360e+00 -2.52937466e-01] ... [ 1.74141335e+00 1.84735501e+00 5.49945056e-01 ... 5.43391228e-01 9.38851118e-01 2.05183768e+00] [ 1.36195922e+00 8.94434273e-01 1.43837047e+00 ... 1.85099885e-01 1.22969139e+00 1.56579483e+00] [ 1.60902774e+00 7.47229040e-01 1.37279081e+00 ... 1.72655916e+00 8.88196230e-01 1.21553040e+00]] [[ 1.65165925e+00 1.39184797e+00 1.57278359e+00 ... 9.54001963e-01 4.40629154e-01 4.77312595e-01] [ 1.50925601e+00 1.39899218e+00 1.50458264e+00 ... 5.21307945e-01 8.74480665e-01 9.93534684e-01] [ 1.27548885e+00 1.17717195e+00 1.35405242e+00 ... 4.97115344e-01 1.19652724e+00 1.09642994e+00] ... [ 8.12983930e-01 1.37467110e+00 1.12164831e+00 ... 9.48190689e-01 9.34459805e-01 1.39079392e+00] [ 1.03010499e+00 5.50450206e-01 1.01372409e+00 ... 9.85342711e-02 1.30264020e+00 1.99329185e+00] [ 9.76427555e-01 3.66499215e-01 7.10249960e-01 ... 8.31256926e-01 1.28613651e+00 9.13652658e-01]]] [[[ 1.95332527e+00 2.38185778e-01 7.43652165e-01 ... -5.01785457e-01 2.84884512e-01 1.69568896e-01] [ 3.09804350e-01 5.70381097e-02 2.81107396e-01 ... 5.10125220e-01 -2.29831561e-02 -1.76183637e-02] [ 1.42440647e-01 -6.18485570e-01 2.48930126e-01 ... 9.41380635e-02 7.94027030e-01 -2.65540302e-01] ... [ 1.54775882e+00 3.99831623e-01 5.74397504e-01 ... 1.71790910e+00 7.93797553e-01 5.82721949e-01] [-1.33125067e+00 9.49531257e-01 9.99171317e-01 ... -1.59954764e-02 6.19583309e-01 4.59208071e-01] [ 3.44067931e-01 -1.04978614e-01 5.05133092e-01 ... 7.35062897e-01 3.40024680e-01 -3.66351753e-02]] [[ 4.87433612e-01 1.32409000e+00 2.93390572e-01 ... 5.04756987e-01 -1.36899292e-01 1.06512451e+00] [ 2.69790381e-01 1.70610130e-01 -3.65212470e-01 ... 1.09152997e+00 -1.27543777e-01 4.64462191e-01] [ 5.30159831e-01 4.26401436e-01 5.37705243e-01 ... 4.27974135e-01 9.85398293e-01 6.71938658e-02] ... [ 3.25924128e-01 1.37914777e-01 9.25585270e-01 ... -3.18693668e-01 7.59458005e-01 6.25139892e-01] [ 2.54310548e-01 -1.87161401e-01 -8.01592410e-01 ... 7.98940733e-02 -4.59326893e-01 1.36847162e+00] [ 1.08629477e+00 8.24921489e-01 5.82016826e-01 ... -3.23986858e-01 7.37893462e-01 6.22628808e-01]] [[-3.59808981e-01 -7.75949240e-01 7.16792643e-01 ... 5.30402005e-01 1.35851479e+00 -9.95909497e-02] [ 7.30466008e-01 -6.93421841e-01 5.88524222e-01 ... 2.18586568e-02 7.09943235e-01 1.01711023e+00] [ 5.32484353e-01 -7.44893849e-01 -2.99947321e-01 ... -5.10561585e-01 6.80136740e-01 -4.66993451e-01] ... [ 1.17130905e-01 7.66375303e-01 6.94061339e-01 ... 9.01861563e-02 1.07040024e+00 2.43762136e-01] [-7.88546085e-01 -3.63778591e-01 1.47180915e-01 ... -4.27042335e-01 3.29248190e-01 -3.12684149e-01] [ 1.65358484e-01 4.94000882e-01 4.21769142e-01 ... 1.15898156e+00 4.71069187e-01 9.42059815e-01]] ... [[ 1.07842600e+00 1.91052866e+00 2.33570531e-01 ... 1.23135114e+00 -2.81339079e-01 6.80492163e-01] [ 2.19276357e+00 5.73997796e-01 1.25494576e+00 ... 8.61919641e-01 7.07471490e-01 7.14676082e-01] [-2.82387674e-01 1.22407794e+00 -1.42507806e-01 ... 8.10294032e-01 1.25474703e+00 -9.06506598e-01] ... [-4.38719630e-01 4.08233136e-01 7.03739598e-02 ... -2.26708367e-01 1.77731365e-01 -8.11010957e-01] [ 1.01865184e+00 8.16737473e-01 5.78894138e-01 ... 6.90650702e-01 -5.91389835e-01 1.59628093e+00] [ 5.92447780e-02 5.21357715e-01 3.65184367e-01 ... 5.30523695e-02 -5.35700582e-02 4.64993834e-01]] [[-2.64000781e-02 -4.63881232e-02 5.44020832e-01 ... 6.56295538e-01 1.20853379e-01 1.09625804e+00] [ 9.71088588e-01 -8.00911263e-02 -2.40099698e-01 ... 6.75061285e-01 6.05134249e-01 -2.85645664e-01] [-1.29192948e-01 -7.24353790e-02 2.92134613e-01 ... 5.38243473e-01 4.74635541e-01 5.30159056e-01] ... [ 1.07974112e+00 8.21770489e-01 6.26068950e-01 ... 8.04628670e-01 6.84842229e-01 -1.23716809e-01] [ 1.73484457e+00 -3.98196548e-01 1.10494006e+00 ... 6.85919940e-01 7.85936534e-01 4.78939742e-01] [ 1.09860682e+00 1.36503831e-01 1.31042147e+00 ... -3.03060040e-02 3.38377148e-01 3.84344831e-02]] [[-9.07809585e-02 9.95634317e-01 7.45270550e-01 ... 1.25922036e+00 1.93755150e+00 2.96722621e-01] [ 1.25890625e+00 1.21620214e+00 1.05572724e+00 ... 5.48245847e-01 6.84935510e-01 2.22504750e-01] [ 1.03242958e+00 1.30405176e+00 1.51707089e+00 ... 1.47013974e+00 5.28296530e-01 5.78820109e-01] ... [-1.12373091e-01 -3.77081595e-02 1.60019863e+00 ... 1.29108325e-01 -1.04312265e+00 6.91618323e-01] [ 7.06049383e-01 6.49770021e-01 1.25743747e+00 ... -1.27538466e+00 1.03472519e+00 4.91402391e-03] [ 2.91576654e-01 1.25426936e+00 7.45207131e-01 ... 4.82516587e-01 8.57635081e-01 7.30234683e-01]]] [[[ 8.52273226e-01 6.25189066e-01 3.14040482e-01 ... -1.09099664e-01 6.66647255e-01 -7.91418403e-02] [-3.07506263e-01 -5.62907346e-02 2.82990456e-01 ... 2.23977983e-01 6.89788043e-01 2.86265939e-01] [ 3.15341383e-01 4.06566888e-01 3.42272460e-01 ... 5.70325255e-01 5.89621723e-01 2.03055739e-01] ... [-2.17770100e-01 3.76561314e-01 5.70326030e-01 ... 1.37943640e-01 8.28301311e-01 -4.05628264e-01] [ 1.27908185e-01 2.63873607e-01 1.89515464e-02 ... 3.99608999e-01 3.84001344e-01 3.60592961e-01] [-9.48987678e-02 -2.04598397e-01 1.30951209e-02 ... 5.08292913e-01 -2.88566232e-01 3.11003983e-01]] [[ 4.33583446e-02 -1.87234938e-01 6.63716316e-01 ... 2.90798664e-01 1.02126576e-01 2.25461751e-01] [ 3.87109071e-02 2.34897316e-01 1.63315251e-01 ... -6.29078373e-02 4.00454849e-02 2.10584983e-01] [-6.58551931e-01 4.69820172e-01 4.98819321e-01 ... 2.01699153e-01 3.19178343e-01 2.47144118e-01] ... [ 1.25824168e-01 4.31344777e-01 -2.52946585e-01 ... -3.89860332e-01 1.44394124e-02 -1.78003788e-01] [-3.30649197e-01 4.94132847e-01 3.50912437e-02 ... 1.22217692e-01 3.27619404e-01 4.95862067e-01] [-3.46562117e-01 7.84805194e-02 3.18189472e-01 ... 1.42457828e-01 2.50316381e-01 -1.93765610e-01]] [[ 2.25836366e-01 3.06969255e-01 6.08240783e-01 ... -3.77579689e-01 4.52752024e-01 3.73827100e-01] [ 2.43156463e-01 -9.83336866e-02 2.15405181e-01 ... 2.58570433e-01 6.88487291e-01 2.90025771e-01] [ 8.28716625e-03 -1.35735899e-01 -1.10391527e-01 ... 6.21541083e-01 3.98977809e-02 3.25370491e-01] ... [-6.54196858e-01 3.85154843e-01 3.29468280e-01 ... 4.66013420e-03 -1.02236435e-01 9.25876107e-03] [-8.36854726e-02 4.78536040e-01 4.81396854e-01 ... 6.67417943e-01 -9.58529562e-02 2.22305670e-01] [ 4.74972039e-01 4.98478293e-01 1.62739694e-01 ... 1.02634478e+00 -2.58283824e-01 4.39636827e-01]] ... [[ 5.67087196e-02 -3.54966938e-01 -3.92163187e-01 ... 8.11383948e-02 -2.97137201e-01 -1.37655124e-01] [ 4.14050817e-01 -5.57012022e-01 3.86744410e-01 ... 6.65087223e-01 3.34908038e-01 4.84037638e-01] [-1.48129016e-01 7.26878941e-01 5.86274981e-01 ... 7.84642771e-02 1.72904376e-02 -2.39768326e-01] ... [ 3.58663440e-01 7.71880373e-02 5.54461122e-01 ... 3.91344786e-01 3.14142823e-01 2.12431669e-01] [ 3.12888056e-01 -2.89270401e-01 -2.41428837e-01 ... 3.68850261e-01 1.96767170e-02 -1.23219588e-03] [ 3.27802300e-01 4.71050084e-01 -4.91510808e-01 ... -6.63320273e-02 2.02640980e-01 5.93381226e-01]] [[-7.36965716e-01 1.19331986e-01 2.67400801e-01 ... -4.97134849e-02 -3.76063436e-01 -5.19778393e-02] [ 2.04898998e-01 3.25401217e-01 5.57122566e-02 ... 2.95009334e-02 5.79769135e-01 -2.52867788e-01] [ 5.67312956e-01 -2.29288533e-01 -7.84218088e-02 ... 1.17535420e-01 -1.98962644e-01 1.98404804e-01] ... [ 4.82514203e-01 6.40916675e-02 4.91365075e-01 ... 2.85560966e-01 4.17940557e-01 -2.22063646e-01] [ 6.95481718e-01 1.24834187e-01 7.14089096e-01 ... 1.96031883e-01 -6.51589856e-02 6.81637108e-01] [-5.25797427e-01 5.27391434e-01 6.76884577e-02 ... -1.82451010e-01 5.15819907e-01 -2.90760547e-01]] [[-1.92015097e-01 4.78091598e-01 -2.10024744e-01 ... 3.41620266e-01 1.59053445e-01 7.61037841e-02] [ 2.16870561e-01 -4.88092273e-01 3.73314351e-01 ... 4.19039100e-01 2.75591195e-01 -8.52002800e-02] [ 4.13651109e-01 3.45708267e-03 1.87078014e-01 ... 5.01136303e-01 -1.01725936e-01 -3.04834992e-02] ... [ 1.36822358e-01 -5.51507175e-01 3.16169322e-01 ... 3.19715053e-01 6.99100196e-01 1.28256427e-02] [-5.74303679e-02 4.43879426e-01 3.06786993e-03 ... 5.88569567e-02 5.93300462e-01 2.83577055e-01] [ 2.20448405e-01 -5.83343446e-01 -2.82628369e-02 ... -1.24195762e-01 4.28780198e-01 5.54614849e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4626.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=1.0000000000000001e-05]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.6783 -0.3670 -1.3357 -0.2391 1.1118 0.5472 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[[[ 1.65789649e-01 -1.09888695e-01 2.77977258e-01 ... 9.57349539e-01 -2.27555886e-01 2.22250193e-01] [ 1.18320870e+00 -7.09188804e-02 -9.92729440e-02 ... -1.73912311e+00 1.14725447e+00 -4.24209714e-01] [-3.01656902e-01 -1.79531649e-02 3.33219588e-01 ... 7.86144495e-01 7.81559408e-01 9.31669176e-01] ... [ 1.17569923e+00 4.11256343e-01 6.62222803e-01 ... 8.66148949e-01 -3.00537467e-01 1.20657539e+00] [ 1.42817572e-01 2.62826622e-01 -1.03050864e+00 ... -2.43533567e-01 1.25711238e+00 1.15654416e-01] [-1.02708054e+00 1.42100146e-02 -1.41298866e+00 ... -1.05872369e+00 -3.48119467e-01 3.71188042e-04]] [[ 2.81412870e-01 4.96586978e-01 -1.86357349e-01 ... -7.45722592e-01 -1.40632367e+00 3.09296548e-01] [-3.30428392e-01 -5.87576985e-01 -2.55916923e-01 ... 6.86863391e-03 5.61143123e-02 9.52743828e-01] [-6.01332366e-01 -4.91563529e-01 -9.63299870e-02 ... 1.85033157e-01 7.14891016e-01 3.33459616e-01] ... [-3.27612191e-01 1.04859078e+00 2.32126102e-01 ... 1.38105184e-01 -3.80087495e-01 5.10078788e-01] [ 2.72453219e-01 -2.18360215e-01 -3.23300660e-01 ... 2.14633763e-01 -2.66426027e-01 1.62541375e-01] [ 6.65873110e-01 1.83126003e-01 -5.21617606e-02 ... 9.23554003e-02 -9.89757478e-01 -5.13689756e-01]] [[-6.22672260e-01 -1.41087043e+00 1.93531036e-01 ... 3.54572803e-01 1.76517338e-01 -8.05484831e-01] [-9.01860073e-02 -3.31559747e-01 -9.45914164e-02 ... 6.62365079e-01 1.24184299e+00 -1.82157457e-01] [-3.78793061e-01 -7.72214115e-01 3.97931963e-01 ... -1.83408821e+00 -1.09514415e+00 -5.92513010e-02] ... [ 4.44454730e-01 2.55902827e-01 6.54996336e-01 ... 1.45608395e-01 1.09895241e+00 8.99601698e-01] [-2.90997893e-01 -9.26956475e-01 -3.98696035e-01 ... 4.32820261e-01 -8.13502610e-01 9.34061646e-01] [-6.53152645e-01 4.62284923e-01 1.06470823e+00 ... -9.51309204e-01 5.80890596e-01 1.23040831e+00]] ... [[ 8.98027956e-01 7.44570851e-01 2.39420265e-01 ... -6.65511563e-02 -1.17855859e+00 6.04013428e-02] [ 2.29927540e-01 5.93240142e-01 1.30229115e+00 ... -3.90503526e-01 1.87345184e-02 9.50550854e-01] [ 1.87826961e-01 -4.79090720e-01 8.74404192e-01 ... -9.90510345e-01 -1.61024618e+00 -9.75879610e-01] ... [ 2.60639817e-01 -1.85271740e-01 -7.55288005e-02 ... 3.47209871e-01 -2.18848899e-01 -4.84290540e-01] [ 9.20082450e-01 7.09708929e-01 -5.83248496e-01 ... -2.80500889e-01 -1.90626532e-01 2.32673123e-01] [ 7.16265678e-01 6.75645292e-01 -2.49701142e-01 ... 1.13155246e+00 3.93995084e-02 -1.94417462e-01]] [[-5.71530759e-01 7.10468352e-01 1.81621373e-01 ... -1.36080086e-01 -3.29837412e-01 -3.86472136e-01] [ 1.29157737e-01 1.78391963e-01 -1.66532469e+00 ... 2.42296141e-02 -5.55029929e-01 1.06570315e+00] [ 8.91355693e-01 2.16065068e-03 -9.29634571e-01 ... 4.30240422e-01 -1.29938200e-01 -2.25433540e-02] ... [ 6.23322666e-01 -1.74100146e-01 1.36513567e+00 ... 4.88251328e-01 -3.78783382e-02 5.95149815e-01] [-2.09330529e-01 9.61169004e-01 6.42502367e-01 ... -5.56315482e-01 -1.05345599e-01 -3.63305539e-01] [-3.58324856e-01 -9.68563080e-01 -1.33500829e-01 ... 9.41101685e-02 6.27376854e-01 2.42880568e-01]] [[-8.69802475e-01 -9.94953036e-01 1.74416363e+00 ... 8.56002331e-01 -2.33641624e-01 5.24998546e-01] [ 3.11695635e-01 2.80949861e-01 -1.79301232e-01 ... 1.74078798e+00 3.86940874e-02 8.05264652e-01] [-8.30058694e-01 -1.19606905e-01 -3.60052377e-01 ... 6.33280039e-01 1.81726784e-01 7.62275517e-01] ... [ 1.55513007e-02 4.98803258e-01 9.78171885e-01 ... 4.39661503e-01 6.00617051e-01 2.79767334e-01] [-6.24469519e-01 -4.44007725e-01 -2.13411659e-01 ... 2.04978228e-01 1.21713979e-02 4.79506344e-01] [ 1.21650612e+00 9.38823968e-02 1.35899901e-01 ... 7.29446933e-02 9.67586398e-01 1.42529404e+00]]] [[[-1.59431875e-01 -2.19249442e-01 -5.49933761e-02 ... -8.27285051e-02 -7.60570243e-02 -3.37405264e-01] [-5.11619262e-02 -4.93281543e-01 2.51450926e-01 ... 4.54396009e-01 -4.77118969e-01 -6.94979966e-01] [ 2.31588289e-01 -2.96884596e-01 -4.38889354e-01 ... -6.17052056e-02 1.67273566e-01 2.61174500e-01] ... [-1.13888942e-01 4.02885154e-02 2.01561838e-01 ... -7.02930167e-02 -1.41286254e-02 2.57904734e-02] [ 1.61362886e-01 -4.68165636e-01 -1.46335661e-01 ... -1.23356096e-01 -2.93079704e-01 1.90501893e-03] [ 5.56258678e-01 -2.35686079e-01 2.41171494e-01 ... -2.85276920e-01 2.70308018e-01 -6.81410730e-03]] [[-2.15308085e-01 2.72571892e-01 5.31795144e-01 ... -6.62545264e-01 -1.00079224e-01 -1.04692176e-01] [ 2.21683502e-01 -1.81352973e-01 -7.80102193e-01 ... 2.62408197e-01 1.33042810e-02 5.37424207e-01] [ 3.90377432e-01 3.34611312e-02 3.59492868e-01 ... -2.70787477e-01 2.92283297e-01 2.99344331e-01] ... [-3.69170904e-02 -7.54258633e-02 1.28097579e-01 ... -4.86588627e-02 1.02510322e-02 1.63317740e-01] [ 4.08140212e-01 -7.10367084e-01 4.67724472e-01 ... -8.68405044e-01 -1.97741002e-01 -2.69349992e-01] [ 3.70175928e-01 -1.96062371e-01 -6.75539672e-01 ... -6.21282458e-02 1.26215130e-01 -2.39707157e-01]] [[ 1.99788839e-01 4.40445006e-01 2.68871486e-01 ... -2.16265023e-01 3.40326697e-01 -2.57080555e-01] [-6.71470702e-01 4.99329567e-02 -5.72520256e-01 ... 2.37949684e-01 3.71859521e-01 5.89637272e-02] [-5.62568009e-01 -2.87528098e-01 1.64865151e-01 ... -6.61922172e-02 -4.06235317e-03 2.04378158e-01] ... [ 6.46366119e-01 -5.15321612e-01 4.07016367e-01 ... 3.05623990e-02 7.34813929e-01 3.24954391e-01] [ 4.73169327e-01 -6.38262570e-01 -2.18789920e-01 ... 1.60017192e-01 1.00547008e-01 -2.41483808e-01] [ 3.97819430e-01 1.56503275e-01 -2.67318152e-02 ... 6.33365452e-01 -3.46826553e-01 -4.51869696e-01]] ... [[ 4.84945208e-01 1.67206749e-01 -7.26598920e-03 ... -5.49631655e-01 9.21380371e-02 2.48145416e-01] [-2.43335828e-01 -8.81121829e-02 -3.43005955e-01 ... -2.63822734e-01 -3.07315618e-01 -1.36577606e-01] [ 3.51924337e-02 9.39735547e-02 3.83394003e-01 ... 1.08806752e-01 2.97624975e-01 -7.63584860e-04] ... [-6.64419085e-02 1.32065624e-01 -3.53297114e-01 ... 1.50680631e-01 -5.08860588e-01 3.90385598e-01] [-1.20474726e-01 2.05926463e-01 -5.98596573e-01 ... -3.93650979e-01 -2.85399765e-01 -4.19766396e-01] [ 1.06748320e-01 7.52719343e-01 -1.22486293e-01 ... 3.32219809e-01 2.90917084e-02 -2.97256649e-01]] [[-3.08463097e-01 -2.54587620e-01 1.36583567e-01 ... -4.33309972e-01 -1.32563794e-02 -2.80905038e-01] [ 5.28030209e-02 7.88383424e-01 -6.72985792e-01 ... 1.15969986e-01 6.48152223e-03 -2.23463088e-01] [-5.40437968e-03 1.02093415e-02 -2.66899578e-02 ... -2.13421918e-02 5.15556037e-01 -3.52864355e-01] ... [ 3.96945745e-01 -4.38637018e-01 1.33965731e-01 ... 1.58161834e-01 -8.56630981e-01 4.31824267e-01] [-2.27768078e-01 -5.02071306e-02 -5.93037829e-02 ... 4.13649648e-01 1.67062834e-01 -4.49217498e-01] [ 1.61635831e-01 -2.12008685e-01 3.89838994e-01 ... -7.44758993e-02 -4.51045543e-01 -1.17977701e-01]] [[ 6.07792139e-01 2.29895294e-01 -7.23972395e-02 ... 4.00295965e-02 -1.50016516e-01 1.05723165e-01] [-5.43438256e-01 -3.65379989e-01 7.29778111e-02 ... 2.71752208e-01 5.91678023e-01 -7.04015672e-01] [-2.29144201e-01 -3.43075544e-01 -1.30129442e-01 ... -3.03259820e-01 3.22121412e-01 7.16248155e-01] ... [-1.66319460e-02 -8.00284803e-01 -3.84200364e-02 ... -7.90255964e-02 3.36377412e-01 2.82934487e-01] [ 1.13074869e-01 -1.83440074e-01 2.45382011e-01 ... -4.49841499e-01 -2.69199520e-01 -2.11899862e-01] [ 6.88154578e-01 4.72156793e-01 6.00071289e-02 ... 2.94086218e-01 -1.18298167e-02 9.79176164e-01]]] [[[ 7.70581543e-01 1.77735135e-01 1.41866601e+00 ... 1.69659722e+00 2.16968203e+00 -4.45668131e-01] [-3.42064798e-02 -1.61014044e+00 1.02912748e+00 ... 5.35043655e-03 1.60598540e+00 -5.11479437e-01] [ 1.20908272e+00 3.00783181e+00 -2.93733883e+00 ... 2.34214354e+00 2.43164182e+00 6.28090501e-01] ... [-3.11858892e+00 1.07703841e+00 -8.82474899e-01 ... 1.61217821e+00 4.32172008e-02 2.11787558e+00] [-2.21921086e+00 -1.67785370e+00 2.45259738e+00 ... 3.25780582e+00 -1.51807153e+00 -5.24972022e-01] [ 1.00178123e+00 -3.02828431e-01 -1.16869140e+00 ... 1.48711407e+00 -2.52842855e+00 -4.97223884e-02]] [[-2.85025883e+00 -3.32054406e-01 2.04877481e-01 ... -2.14402342e+00 3.31158549e-01 -3.57113051e+00] [ 1.39285767e+00 -1.15203118e+00 -2.59376740e+00 ... 2.85467029e-01 -1.28764641e+00 6.00600898e-01] [-1.43276215e+00 -1.25886250e+00 -1.28196263e+00 ... 9.39027071e-01 2.98862100e-01 -1.05968690e+00] ... [-8.65854204e-01 -1.08637106e+00 1.71085969e-01 ... 2.15401387e+00 -4.05410737e-01 1.41527310e-01] [-1.15907836e+00 1.39393076e-01 -2.20253304e-01 ... 8.05894956e-02 6.89670324e-01 -4.25022066e-01] [-1.67844281e-01 1.51650691e+00 -1.11224639e+00 ... -1.68320298e-01 3.47856551e-01 6.98130906e-01]] [[ 2.42096916e-01 -9.31885839e-01 8.08471441e-01 ... -5.17353952e-01 -6.27596259e-01 -1.71063292e+00] [-1.20061147e+00 8.81304264e-01 9.68558729e-01 ... -5.56797743e-01 -1.61867321e+00 -2.22900912e-01] [ 9.03200746e-01 -7.26953566e-01 -3.41659880e+00 ... -1.09199870e+00 -7.19042361e-01 2.54369438e-01] ... [ 1.02006264e-01 1.38945401e+00 6.41875982e-01 ... 1.49062037e+00 -1.22660697e+00 -1.53716898e+00] [-2.48039551e-02 -6.12237692e-01 -2.24013567e-01 ... 1.32863212e+00 -2.21746564e+00 9.60410476e-01] [-2.51187056e-01 -1.00163013e-01 7.55027652e-01 ... 4.67565805e-01 1.77054536e+00 2.22015762e+00]] ... [[-5.76632738e-01 1.76204264e+00 -1.43979341e-01 ... 1.34181723e-01 2.81020969e-01 4.12428901e-02] [-3.50641400e-01 -5.58435261e-01 -3.23539078e-01 ... 9.22872663e-01 -4.55909878e-01 -1.55951774e+00] [ 1.80773652e+00 6.34593964e-01 9.97811258e-02 ... 3.86630774e-01 4.73391682e-01 1.98078978e+00] ... [ 6.67847455e-01 1.86689699e+00 1.19293523e+00 ... -5.79289079e-01 -8.33318308e-02 4.61481422e-01] [-6.14161134e-01 -1.47751987e+00 -2.96152800e-01 ... 5.87960899e-01 2.12120318e+00 -2.80101299e-01] [-7.10480690e-01 -2.36159921e+00 -4.76071024e+00 ... 2.72187173e-01 -9.63416755e-01 -1.02295065e+00]] [[-1.28833425e+00 -8.11499596e-01 -5.40243268e-01 ... -2.09873080e+00 -1.92273688e+00 -1.13278043e+00] [ 5.20224452e-01 -1.25162220e+00 2.89705110e+00 ... -2.73371249e-01 -1.94228542e+00 -1.80433166e+00] [ 1.28655434e+00 -2.89973927e+00 -2.99988985e-01 ... -4.33750823e-02 -1.27990949e+00 -9.87472057e-01] ... [ 9.98564303e-01 1.58747450e-01 5.01157284e-01 ... -3.70302498e-01 -1.50691342e+00 -1.07331015e-01] [ 1.98445308e+00 5.54540217e-01 2.22090483e+00 ... -3.10524344e-01 2.90286875e+00 -1.57194972e+00] [ 2.85267138e+00 2.55873346e+00 3.17530179e+00 ... -1.92958713e+00 6.04537308e-01 8.84658575e-01]] [[ 1.82524967e+00 1.76442814e+00 1.41565716e+00 ... -8.00634742e-01 -6.05273724e-01 6.98339581e-01] [ 1.42725229e+00 1.79555789e-01 -2.50670671e+00 ... 2.04550222e-01 -6.18329823e-01 1.02882743e+00] [-1.27949381e+00 9.20664608e-01 -2.37657219e-01 ... 3.10143661e+00 -5.79362273e-01 -2.59338832e+00] ... [ 1.64493299e+00 2.08366722e-01 3.23646218e-01 ... 6.77092560e-03 7.55302370e-01 5.17152488e-01] [ 7.82413006e-01 4.85164344e-01 -1.58217835e+00 ... 2.31022215e+00 3.07104826e+00 9.21375006e-02] [ 1.19053447e+00 2.14882112e+00 -2.15644503e+00 ... 9.37789679e-01 -1.12246573e+00 -4.24451530e-01]]] [[[-1.65070474e-01 1.12326644e-01 2.03846797e-01 ... -6.15581051e-02 2.35294536e-01 2.01008350e-01] [ 5.02530532e-03 2.44545471e-02 -3.38036381e-02 ... 3.35492432e-01 -1.22031413e-01 1.64347604e-01] [ 3.07790227e-02 5.01753628e-01 -8.16379786e-02 ... -4.41538393e-01 -2.95608103e-01 -9.74252671e-02] ... [-2.45586466e-02 9.83708575e-02 2.52714306e-01 ... -1.77405208e-01 -2.45017797e-01 -1.34908453e-01] [-1.06772430e-01 -3.44494842e-02 5.86392999e-01 ... 2.69007772e-01 -1.97292745e-01 -8.41800347e-02] [ 2.34700769e-01 6.93282411e-02 -2.90250611e-02 ... 1.30398825e-01 9.46605355e-02 -1.82367936e-01]] [[-2.67023534e-01 6.16660491e-02 1.62849814e-01 ... 8.22382942e-02 2.40714446e-01 2.07168292e-02] [ 5.49751997e-01 -1.09060563e-01 -3.91067984e-03 ... -2.83626229e-01 2.73644835e-01 9.93169397e-02] [-4.81396884e-01 -4.54439828e-03 -1.44227982e-01 ... -3.36625814e-01 2.90033221e-01 1.62894100e-01] ... [-4.32420760e-01 1.07618295e-01 1.78751834e-02 ... -1.72780722e-01 3.74587208e-01 -4.28740263e-01] [ 2.99954772e-01 4.53137636e-01 1.91343904e-01 ... 3.17205071e-01 1.27553046e-01 -1.60105266e-02] [-5.57820573e-02 1.12570748e-01 -2.92372525e-01 ... -1.40197858e-01 -2.26966679e-01 6.35848492e-02]] [[-1.76837504e-01 7.46614859e-02 2.32494056e-01 ... -1.80173256e-02 1.55422419e-01 8.49979818e-02] [ 1.56904697e-01 -4.35012639e-01 4.48407419e-02 ... 1.07911952e-01 -2.91446835e-01 -3.75493705e-01] [-2.82254326e-03 -9.64719802e-02 -7.33183771e-02 ... 1.03604957e-01 3.84890325e-02 4.31257367e-01] ... [-1.90604806e-01 -2.97489196e-01 -2.79399574e-01 ... -2.93010712e-01 -3.94245774e-01 1.60692148e-02] [ 6.80770772e-03 -8.89333189e-02 -1.09365925e-01 ... -3.19233388e-01 -9.74994525e-02 2.72629678e-01] [ 5.77439032e-02 -1.59076378e-02 -7.37109184e-02 ... -2.22425699e-01 4.93648916e-01 5.40437102e-01]] ... [[ 1.93156973e-01 4.62904833e-02 1.27393469e-01 ... 3.29015285e-01 -9.70660374e-02 -1.74556509e-01] [-1.37674987e-01 -1.29399255e-01 2.62777060e-01 ... -3.67240131e-01 -2.18114510e-01 2.45913282e-01] [ 3.13484997e-01 7.49334931e-01 -9.72578228e-02 ... 1.67124733e-01 2.03536347e-01 8.15774053e-02] ... [ 2.43366450e-01 -6.09174408e-02 -3.65064442e-01 ... 4.86467361e-01 1.81309506e-01 3.84405144e-02] [ 5.51322242e-03 3.47260296e-01 1.68197919e-02 ... -2.03730371e-02 -7.47566149e-02 -5.83318584e-02] [-6.33652732e-02 1.31174857e-02 -7.79114664e-02 ... -7.64906257e-02 -2.43284985e-01 5.66713631e-01]] [[ 8.89514387e-02 3.66206765e-02 4.08368111e-01 ... 4.47973423e-02 6.72188252e-02 4.27017324e-02] [ 1.02833346e-01 7.79861510e-02 3.34583819e-01 ... -1.12657942e-01 -2.50199944e-01 -5.12713969e-01] [-4.71466146e-02 -4.37645726e-02 -5.67557383e-03 ... -1.05650667e-02 -1.85142890e-01 -3.02376458e-03] ... [ 3.48158628e-02 1.79173842e-01 -3.22940797e-02 ... 4.98861633e-02 1.02231681e-01 -2.32210562e-01] [-4.31363344e-01 1.85825691e-01 2.91925669e-01 ... -2.40182459e-01 -1.46352008e-01 -1.28153220e-01] [ 3.15116942e-01 3.66751812e-02 -4.44002897e-01 ... 1.02097876e-01 8.40323120e-02 3.57122838e-01]] [[ 1.45717099e-01 -1.79024562e-01 -3.03892314e-01 ... 2.58312553e-01 -1.03369370e-01 -1.15868419e-01] [ 3.33716303e-01 1.81383654e-01 1.86365042e-02 ... 3.17014188e-01 2.46520504e-01 3.41624022e-02] [ 2.37450376e-02 -1.67274147e-01 -1.18863001e-01 ... -3.34801704e-01 -1.95987388e-01 1.85464457e-01] ... [-2.79346973e-01 2.61422426e-01 -2.07632706e-01 ... 2.31472418e-01 2.61667848e-01 -1.72576129e-01] [ 1.58168286e-01 -3.87987751e-03 9.48898718e-02 ... 3.47119778e-01 2.98114955e-01 1.79686457e-01] [-2.42132336e-01 9.62397754e-02 -2.12172821e-01 ... 4.57620442e-01 -2.55234957e-01 1.51500732e-01]]] [[[ 4.36480436e-03 -2.12467149e-01 -1.15837288e+00 ... -8.53416860e-01 8.21278453e-01 -1.10305560e+00] [-2.81686842e-01 6.49235845e-01 -1.04316080e+00 ... 1.03880689e-01 -8.01542342e-01 -1.80123878e+00] [-8.13941002e-01 5.20633042e-01 -1.63187325e-01 ... -2.62668282e-01 7.77969003e-01 7.29685843e-01] ... [-9.57676828e-01 -1.77167237e-01 1.67121947e+00 ... 1.26534688e+00 1.37420344e+00 9.50827658e-01] [ 1.26802862e+00 -9.73967612e-02 -3.08486909e-01 ... -7.97311306e-01 -1.30341554e+00 -1.28540111e+00] [ 1.52119637e-01 4.71026033e-01 2.30891541e-01 ... -4.41222757e-01 -9.13750827e-01 -6.82822466e-01]] [[ 1.85349691e+00 4.48927969e-01 1.23641968e+00 ... 9.66562808e-01 -1.04296613e+00 -5.02768695e-01] [-1.31084740e+00 9.90361154e-01 -1.34148264e+00 ... -2.01339698e+00 2.43855014e-01 2.21605420e+00] [-5.50556362e-01 -1.58161879e+00 -1.17168486e+00 ... 9.36092615e-01 -2.18939161e+00 -1.31714094e+00] ... [-9.30343926e-01 1.68411124e+00 -1.00276220e+00 ... 3.83685052e-01 -9.37156379e-01 2.19815183e+00] [ 2.79309928e-01 7.84719646e-01 6.19196184e-02 ... -6.14237487e-01 3.53111327e-01 -1.28857887e+00] [-2.46679619e-01 -3.10756356e-01 -1.12126422e+00 ... 3.75662357e-01 9.80314374e-01 -5.13885319e-02]] [[ 6.62239552e-01 1.89738536e+00 -2.17800573e-01 ... 5.27771235e-01 1.14397955e+00 4.38592345e-01] [ 2.45458093e-02 8.49622309e-01 5.77251375e-01 ... -1.59605515e+00 -9.98795033e-01 -1.52421677e+00] [ 9.99285877e-01 1.47331285e+00 -3.17164361e-01 ... 3.18824738e-01 7.40505695e-01 5.48373699e-01] ... [-1.66359380e-01 1.01732433e+00 -2.97351629e-01 ... -2.23007965e+00 -6.44750714e-01 3.88004988e-01] [ 5.83638489e-01 9.67752039e-01 -9.40301955e-01 ... 6.02440275e-02 3.60655725e-01 2.11278176e+00] [ 3.17540914e-01 5.79778135e-01 4.31389548e-02 ... -5.02996892e-02 -1.98508859e-01 3.82631242e-01]] ... [[-3.12590562e-02 4.69900012e-01 -7.04543710e-01 ... -7.09869444e-01 1.37021887e+00 2.32519060e-01] [ 3.10666382e-01 -2.11910009e+00 1.27989876e+00 ... -1.81844568e+00 -2.08209082e-01 -7.73922086e-01] [ 6.00300618e-02 9.60201681e-01 7.26013184e-01 ... -2.22366638e-02 3.27173740e-01 2.56231475e+00] ... [ 3.14126700e-01 1.70778072e+00 -9.59370077e-01 ... 1.31304514e+00 4.56385225e-01 -4.00070548e-01] [ 8.39081347e-01 6.59689486e-01 2.03706121e+00 ... 7.50057757e-01 6.49638355e-01 1.13793600e+00] [ 2.83495098e-01 -1.77117693e+00 6.26630783e-02 ... -9.09675002e-01 2.11285558e-02 4.04224806e-02]] [[ 8.17294419e-01 -3.51291388e-01 -3.65445912e-01 ... 6.36911273e-01 5.69926985e-02 2.66376287e-01] [-1.10054977e-01 4.59269673e-01 -4.61433351e-01 ... 2.75217980e-01 -1.60216212e+00 -1.98525417e+00] [ 3.16367793e+00 2.22491932e+00 -4.90177095e-01 ... -1.35691082e+00 1.31888106e-01 3.51138294e-01] ... [-6.04922712e-01 2.61048555e-01 6.01270020e-01 ... 1.14379489e+00 4.08219278e-01 1.01489675e+00] [ 1.21874845e+00 1.14737689e+00 -3.73069704e-01 ... 2.11212173e-01 2.50117207e+00 -2.27633536e-01] [-1.53171408e+00 6.60957396e-02 9.82376277e-01 ... -1.13750152e-01 1.37082028e+00 1.24755621e+00]] [[ 2.26206589e+00 9.35975850e-01 1.66826427e+00 ... -1.17660546e+00 6.41734958e-01 2.93542027e+00] [-2.95798016e+00 -2.02223921e+00 1.58216858e+00 ... -1.70104146e-01 2.51744509e-01 -4.83907878e-01] [ 9.43522036e-01 -9.78533566e-01 -3.62555528e+00 ... -3.09584886e-01 -7.34945774e-01 -8.87845337e-01] ... [ 1.45934904e+00 -5.00649869e-01 -2.81032503e-01 ... 1.18694708e-01 -1.03161462e-01 -4.67488647e-01] [-1.12180674e+00 4.95887011e-01 -1.69562846e-01 ... -3.42931837e-01 6.25830173e-01 8.93744051e-01] [ 4.01876748e-01 5.89371324e-01 1.08232427e+00 ... -1.98459625e-01 2.79344946e-01 -6.28291667e-01]]] [[[ 2.17633620e-01 -4.54444408e-01 2.99600273e-01 ... 3.53393018e-01 1.53171003e-01 -2.86146194e-01] [-6.42919719e-01 3.85868311e-01 5.27374685e-01 ... 6.73800826e-01 -1.19675541e+00 -1.09529346e-02] [ 1.72212407e-01 -7.06134081e-01 3.93548235e-02 ... -2.38660008e-01 -6.25356197e-01 -3.29875827e-01] ... [-6.71754360e-01 1.26696670e+00 -6.41566992e-01 ... 5.85919619e-01 9.56734717e-02 -5.59079573e-02] [ 3.46346259e-01 3.78405005e-01 -4.16558295e-01 ... -1.15969919e-01 -2.64204115e-01 2.25850880e-01] [ 7.35167742e-01 1.67103767e-01 -8.63076970e-02 ... 1.10797517e-01 -5.41142583e-01 -9.49245393e-01]] [[-7.75046408e-01 -5.08087814e-01 -3.03395689e-01 ... 5.71862698e-01 -2.91427940e-01 7.38469958e-01] [ 2.15061665e-01 8.71069059e-02 -5.09222686e-01 ... 6.63941085e-01 1.18730620e-01 -2.10184321e-01] [ 7.73735404e-01 -7.89652765e-02 -4.77806538e-01 ... -1.30698800e-01 -3.23821902e-02 -1.93629101e-01] ... [ 5.10970294e-01 -3.50584835e-02 5.99001721e-02 ... 1.39625520e-01 -2.28428230e-01 4.36947405e-01] [-9.72495303e-02 -1.12546653e-01 -9.92849469e-02 ... -6.70555174e-01 3.04648072e-01 7.77896610e-04] [-5.12490988e-01 1.07753277e-01 -8.04305911e-01 ... -4.76332933e-01 1.67258322e-01 -8.63678932e-01]] [[ 5.48703194e-01 5.77797890e-01 1.59142599e-01 ... -4.55721140e-01 -5.87713957e-01 -8.61133516e-01] [-7.96017766e-01 -2.18975291e-01 -1.69147804e-01 ... 2.81562358e-01 -7.38952875e-01 1.07267462e-02] [ 1.69971988e-01 -3.12509567e-01 -5.14571108e-02 ... 3.33152860e-01 -7.32150018e-01 3.54777336e-01] ... [-4.53001797e-01 1.00758404e-01 2.71971878e-02 ... -5.33503711e-01 -2.92696297e-01 -8.04876328e-01] [ 5.02865732e-01 1.90450139e-02 -2.41948903e-01 ... 1.89561695e-01 1.75392315e-01 -7.42635429e-01] [-1.00077622e-01 -2.88327038e-01 -8.94426107e-01 ... 7.85725772e-01 3.56332630e-01 -9.98926759e-01]] ... [[ 1.03115171e-01 -6.32048965e-01 6.50606379e-02 ... -2.68941462e-01 1.08022952e+00 -1.24471712e+00] [-1.03125371e-01 -1.15644140e-02 9.35717225e-01 ... -1.15618134e+00 5.15361965e-01 1.04073450e-01] [-3.17775071e-01 -1.01699375e-01 -9.81050551e-01 ... -5.26032984e-01 -1.17196250e-04 -4.65059355e-02] ... [ 5.83714068e-01 3.53053063e-01 -5.77373385e-01 ... -2.18128897e-02 -5.22815526e-01 6.08540058e-01] [-4.38933849e-01 1.07097208e+00 2.28656381e-01 ... 7.34323978e-01 -4.96805102e-01 7.00976513e-03] [ 8.22870806e-02 1.95281282e-01 3.83629575e-02 ... -6.90903068e-02 -7.31465295e-02 -2.07326904e-01]] [[ 4.28501010e-01 6.07506752e-01 4.33931738e-01 ... 5.81294894e-01 2.46807545e-01 -1.06884912e-01] [-1.01627726e-02 -6.13430798e-01 2.64618993e-01 ... -5.22889435e-01 -2.40204066e-01 -6.78600132e-01] [ 6.16706491e-01 -2.22108543e-01 1.17787234e-01 ... -1.25687644e-01 -1.50081813e-01 -5.49756110e-01] ... [-2.90477753e-01 -1.54516578e-01 -1.06482410e+00 ... -2.59798676e-01 -7.39902332e-02 3.33092391e-01] [-9.48503166e-02 -4.17379737e-02 -2.08271697e-01 ... 1.38027906e-01 2.56511778e-01 -1.22091584e-01] [-7.95705676e-01 -5.49477696e-01 -1.05523610e+00 ... -1.14621401e+00 6.25887334e-01 3.06884587e-01]] [[-7.44749382e-02 5.33993393e-02 2.62045205e-01 ... -6.11557007e-01 4.65123624e-01 6.93755865e-01] [-1.06736319e-02 -8.49157214e-01 9.99829590e-01 ... -5.29093027e-01 1.37638986e+00 7.35809267e-01] [ 6.25370026e-01 -7.61519521e-02 2.05305398e-01 ... 7.99856663e-01 -7.58016407e-01 -3.51964086e-01] ... [-1.90809652e-01 2.94450402e-01 -5.09104133e-01 ... -2.02918917e-01 2.55790055e-01 9.82337892e-01] [-6.40051346e-03 1.08393097e+00 4.59389418e-01 ... 5.96726000e-01 2.88234472e-01 -3.71880352e-01] [ 6.59396887e-01 7.42068112e-01 3.87623549e-01 ... 6.67136386e-02 -6.06516898e-01 1.14472687e-01]]]] [[[[-2.77672559e-01 6.39708400e-01 -3.43093514e-01 ... 2.29386732e-01 5.50697327e-01 5.23450434e-01] [ 1.19065732e-01 2.66178370e-01 -2.70744771e-01 ... -5.28766066e-02 7.29715049e-01 -7.97238946e-01] [ 2.39982963e-01 1.57101440e+00 4.08094317e-01 ... -5.49725533e-01 -3.67277920e-01 3.20467919e-01] ... [-3.20902884e-01 9.93901670e-01 -3.39906514e-01 ... -1.43856451e-01 2.24531904e-01 -3.09095114e-01] [ 1.60847461e+00 -6.89658582e-01 4.05755430e-01 ... 3.87992948e-01 -5.10929227e-01 2.02007008e+00] [-5.22791982e-01 -7.56517723e-02 8.60137641e-01 ... -5.56909144e-01 9.50197995e-01 -8.34793568e-01]] [[-4.99431610e-01 -2.02264294e-01 -5.44148266e-01 ... -6.75826907e-01 1.87699243e-01 1.25053907e-02] [-1.16431087e-01 -1.19741011e+00 7.01476276e-01 ... -1.09660947e+00 -1.10660359e-01 4.06035520e-02] [ 8.62785876e-02 -5.07936180e-01 -9.95080322e-02 ... -6.33425057e-01 -1.31799805e+00 -5.43942630e-01] ... [-2.58201271e-01 -1.22695279e+00 7.05786645e-01 ... -1.61294997e-01 -5.82424819e-01 7.42169991e-02] [-3.33425283e-01 4.79889631e-01 5.92532635e-01 ... 6.22096777e-01 -8.44861567e-01 -3.14485192e-01] [ 1.41941059e+00 6.38976157e-01 9.43444297e-02 ... 1.25263548e+00 -1.68197043e-02 1.13457453e+00]] [[-2.19625756e-01 -2.56532282e-01 1.27626050e+00 ... 3.33175153e-01 -9.15578604e-02 1.07961786e+00] [ 1.62748665e-01 -1.44813967e+00 4.01285350e-01 ... 3.84835154e-01 -4.74922121e-01 -5.74265540e-01] [-6.89387470e-02 -5.27508855e-01 1.07992065e+00 ... -5.16204596e-01 -7.52861619e-01 -5.39135784e-02] ... [ 1.06739676e+00 1.01878112e-02 -6.91498995e-01 ... 7.23752975e-01 1.13564098e+00 4.96281505e-01] [-7.88636744e-01 1.28103852e-01 1.98230430e-01 ... 5.78337610e-01 1.13597417e+00 8.10982525e-01] [ 8.37263227e-01 2.94959009e-01 1.00985259e-01 ... 8.05251226e-02 7.14576185e-01 -1.31502211e-01]] ... [[-4.67972070e-01 8.94296348e-01 2.13733286e-01 ... -2.22316638e-01 -4.04757522e-02 -1.67365715e-01] [ 8.05978954e-01 -1.53381124e-01 1.82972625e-02 ... -3.41088086e-01 1.00383446e-01 -7.88878083e-01] [ 1.09483693e-02 2.03031003e-01 9.02836770e-02 ... 1.70065522e+00 1.12329066e+00 -1.01712084e+00] ... [ 6.17700815e-01 -5.53274632e-01 3.86665016e-01 ... 7.04597235e-01 -1.14547394e-01 2.55101204e-01] [ 1.53219908e-01 -7.85984099e-01 -5.52791238e-01 ... 1.00307494e-01 -1.54006884e-01 8.07269931e-01] [ 9.65129912e-01 2.58685857e-01 1.13409579e+00 ... 5.57303667e-01 6.37646973e-01 5.49346030e-01]] [[-1.18267167e+00 1.07799336e-01 7.80107617e-01 ... -6.68919226e-03 1.22174358e+00 -2.23365039e-01] [ 8.72990415e-02 -4.92355675e-01 3.36905718e-02 ... -3.69489670e-01 2.08633430e-02 -1.34904087e+00] [ 9.03037071e-01 4.39100474e-01 1.40561843e+00 ... -4.82963204e-01 8.21698368e-01 -6.63151383e-01] ... [-1.96357235e-01 6.91464424e-01 -3.08381587e-01 ... -8.79393756e-01 -2.48248890e-01 -1.63603079e+00] [-2.92097062e-01 -8.50648522e-01 -1.48259151e+00 ... -9.47466850e-01 3.29000115e-01 -7.63576850e-02] [ 8.00544322e-01 -1.38427174e+00 6.62987351e-01 ... 5.34180440e-02 3.12891603e-01 4.50065613e-01]] [[ 7.94845462e-01 -2.91727394e-01 7.99186587e-01 ... -1.32846117e-01 -2.74461180e-01 -9.96779427e-02] [ 6.50729001e-01 3.59419644e-01 -1.43827784e+00 ... -3.55962813e-01 -6.89254761e-01 -2.75586843e-01] [-8.11550558e-01 -4.38505143e-01 -5.73881447e-01 ... -3.26328129e-01 6.18302763e-01 7.14967251e-02] ... [-3.43519568e-01 1.36924848e-01 3.24816257e-01 ... -2.13649437e-01 5.75800359e-01 9.24043357e-01] [-5.69736548e-02 3.49904597e-01 8.34375992e-02 ... 5.05344093e-01 8.49331200e-01 1.34858161e-01] [-6.93316638e-01 3.78073663e-01 6.21011496e-01 ... 1.72605485e-01 2.42900282e-01 7.89362729e-01]]] [[[ 2.29012549e-01 -1.13255173e-01 -1.07137114e-01 ... -3.13827880e-02 -1.08931027e-01 7.57505745e-02] [ 1.11488864e-01 5.24890423e-01 -9.83830929e-01 ... -1.64462984e-01 -1.36274144e-01 -1.74482018e-02] [-6.09447248e-02 -2.47742459e-01 2.71998733e-01 ... -1.46694437e-01 -1.71757400e-01 -5.03729701e-01] ... [ 6.27864480e-01 1.30868265e-02 8.94482881e-02 ... 1.35181665e-01 -5.86862683e-01 1.38824582e-01] [-6.07140064e-01 1.89623848e-01 4.15780604e-01 ... -1.67302877e-01 7.39926100e-01 -1.05547585e-01] [ 2.46968657e-01 -3.04505318e-01 2.94992059e-01 ... 3.26428264e-01 5.10229886e-01 -8.11774313e-01]] [[ 8.61706793e-01 2.97814429e-01 1.24551930e-01 ... 3.89736108e-02 1.61212236e-01 4.89016175e-01] [ 1.77024007e-01 -1.47013202e-01 9.35647339e-02 ... 5.13483644e-01 -5.58606863e-01 2.08144695e-01] [-1.45369157e-01 1.66553244e-01 -3.66732448e-01 ... -3.63113135e-02 -8.60489726e-01 -4.61706340e-01] ... [ 6.49700344e-01 -2.38250151e-01 1.66030943e-01 ... -6.01750255e-01 2.45661125e-01 -2.87714005e-01] [ 6.10295031e-03 -2.36758485e-01 -8.50673951e-03 ... -1.28588751e-02 4.56197038e-02 -3.75858665e-01] [ 5.27819753e-01 6.65639222e-01 -5.35255373e-01 ... 4.74092290e-02 -2.34423969e-02 4.99635875e-01]] [[-1.96795911e-01 2.79497594e-01 2.64847249e-01 ... 3.28401059e-01 -1.32665858e-01 -2.38581046e-01] [-2.42740929e-01 -1.91939175e-01 3.22122276e-01 ... 3.13621044e-01 2.53085673e-01 2.45421946e-01] [-2.72368938e-01 -4.34267551e-01 2.92427335e-02 ... 1.52551442e-01 1.17564864e-01 -2.26296395e-01] ... [-3.23611081e-01 2.10519955e-01 3.34048420e-01 ... 7.24627793e-01 3.46004844e-01 -5.44275999e-01] [ 3.96241963e-01 3.17960262e-01 5.01774251e-02 ... -1.68407604e-01 -6.87446415e-01 -1.71949029e-01] [ 8.88116211e-02 -1.09624416e-01 -3.72573406e-01 ... -5.46532929e-01 -1.10860601e-01 -8.18284079e-02]] ... [[ 2.78728932e-01 -7.79554725e-01 4.82452661e-01 ... 1.53081581e-01 -4.45708185e-01 -2.12503806e-01] [ 7.95741528e-02 -3.35417628e-01 -3.03089231e-01 ... 7.82040894e-01 -2.25860313e-01 3.34799916e-01] [ 2.07716018e-01 3.03578824e-01 3.65652055e-01 ... 3.04660767e-01 -3.69187802e-01 -1.19489968e-01] ... [ 3.52096081e-01 1.33652419e-01 -2.78553277e-01 ... -7.06717312e-01 -5.94004802e-02 2.10339010e-01] [-5.15672192e-02 2.48973012e-01 -3.80533278e-01 ... 5.97923160e-01 1.24840848e-01 -2.07205594e-01] [-4.25648898e-01 -5.05090177e-01 -1.13119572e-01 ... -4.74121690e-01 -6.12651408e-02 -3.37671876e-01]] [[-4.26971734e-01 6.46505177e-01 5.46506464e-01 ... 4.69515324e-01 -3.47349942e-01 -3.46630156e-01] [ 8.94764885e-02 -2.68168509e-01 1.65466517e-01 ... -2.75698960e-01 -9.48552638e-02 -7.14042962e-01] [-4.89513934e-01 -7.11251080e-01 1.26499549e-01 ... 2.89935470e-01 6.72169924e-01 -4.44494672e-02] ... [-3.87006029e-02 7.01819241e-01 2.32756764e-01 ... -4.49379325e-01 -5.96442580e-01 6.65836334e-01] [ 1.45201042e-01 -1.98306471e-01 4.52156246e-01 ... -1.38919204e-01 5.44143580e-02 -5.73853916e-03] [ 1.93436384e-01 -2.09547728e-01 3.22321206e-01 ... -1.79459438e-01 -4.77853179e-01 -3.54726374e-01]] [[ 8.43714893e-01 3.83301318e-01 1.29800692e-01 ... -4.65035081e-01 -2.74189383e-01 4.85846668e-01] [-1.01420037e-01 -4.57127750e-01 1.92388058e-01 ... -3.80015612e-01 3.78229827e-01 -5.26684105e-01] [-3.39435279e-01 -4.35110420e-01 -4.25411731e-01 ... 1.52871341e-01 -4.70615298e-01 -4.29764807e-01] ... [-6.86191857e-01 1.33801550e-01 2.62942046e-01 ... -3.57113719e-01 -2.60531455e-01 -6.99296951e-01] [-5.18347807e-02 3.24137300e-01 7.17724800e-01 ... 2.19739690e-01 -2.42430300e-01 4.77945447e-01] [-3.14713903e-02 1.09804854e-01 -6.27936900e-01 ... 1.93281904e-01 5.35086729e-02 9.24191475e-02]]] [[[-1.71063948e+00 1.53819174e-01 -2.39932060e+00 ... 1.11878049e+00 -5.95167875e-01 -3.53688836e-01] [-1.20959640e+00 1.38031796e-01 3.73168617e-01 ... 4.79896367e-01 2.16768456e+00 1.50557637e+00] [ 1.06596528e-02 -1.85382044e+00 1.56923354e+00 ... 5.55467382e-02 1.54409373e+00 1.88430774e+00] ... [ 8.81616473e-02 1.69547641e+00 1.06310058e+00 ... 3.76451433e-01 -3.04618537e-01 3.05342376e-02] [-6.74621046e-01 -3.25079966e+00 5.93771398e-01 ... -2.32052291e-03 7.54643261e-01 1.54043067e+00] [ 2.16978550e+00 4.83422019e-02 -3.41529757e-01 ... 1.17135346e+00 -7.42986917e-01 7.79118299e-01]] [[-1.42880595e+00 -4.16241258e-01 3.49631518e-01 ... 1.27013242e+00 -2.11288643e+00 -1.51980746e+00] [-4.88075495e-01 8.11984092e-02 1.30562818e+00 ... -2.04417729e+00 -7.01809466e-01 -1.73653233e+00] [ 1.38158470e-01 -1.42226088e+00 -1.95073235e+00 ... 1.20040500e+00 -5.89701951e-01 -8.56764391e-02] ... [ 5.55705786e-01 -3.00945807e-02 -1.28147888e+00 ... -1.64532948e+00 -4.77816388e-02 8.16465616e-01] [ 9.81884360e-01 -2.92884886e-01 -2.49706674e+00 ... 1.37482774e+00 3.34613502e-01 -3.62207204e-01] [-2.86307192e+00 -2.57648182e+00 -1.81492662e+00 ... 2.33860087e+00 -1.80499291e+00 -9.04453248e-02]] [[-2.26263022e+00 1.56966686e+00 1.75449145e+00 ... -1.82598245e+00 -1.62781703e+00 -7.58828163e-01] [ 3.40582818e-01 1.02900541e+00 1.20778225e-01 ... -1.45591033e+00 3.21944308e+00 2.70817137e+00] [ 4.78158087e-01 1.44992933e-01 -1.53523290e+00 ... -9.68442559e-02 -1.78360987e+00 2.78499901e-01] ... [-3.09578323e+00 -3.30584466e-01 1.14134037e+00 ... 3.06040376e-01 -1.29989064e+00 -3.84350359e-01] [ 2.43878990e-01 1.86122084e+00 3.37044448e-01 ... -6.34221852e-01 9.78358746e-01 1.21222183e-01] [ 9.58323896e-01 1.18578160e+00 1.26949048e+00 ... -8.10999990e-01 1.46652782e+00 -3.54657978e-01]] ... [[-4.49972600e-01 -2.60106236e-01 1.17376626e+00 ... -2.68369436e+00 -1.19371605e+00 1.15827692e+00] [-2.50113577e-01 -2.42965385e-01 -1.15609455e+00 ... 1.63005495e+00 6.18033588e-01 -4.86256123e-01] [-5.07331967e-01 1.95171043e-01 -1.29552734e+00 ... 1.61622119e+00 -7.29467332e-01 -1.44825363e+00] ... [-1.64193416e+00 1.04105771e+00 -1.44169912e-01 ... -8.76446605e-01 2.84837103e+00 -9.57862139e-01] [ 5.88729799e-01 7.99192250e-01 1.73726213e+00 ... -1.26436800e-01 -1.78307247e+00 9.48802352e-01] [-1.14463830e+00 -1.68343997e+00 3.56038928e-01 ... 1.14958322e+00 -1.65932453e+00 -2.83486795e+00]] [[ 2.33523321e+00 2.90655583e-01 -6.50464118e-01 ... 1.37920868e+00 -1.04131961e+00 -5.86875081e-01] [ 1.13846326e+00 -1.50151443e+00 1.94115114e+00 ... -1.84585667e+00 -4.23292667e-01 -1.66151786e+00] [ 2.62713403e-01 -6.07498944e-01 1.13343143e+00 ... 8.81059766e-01 7.52570093e-01 -5.71695745e-01] ... [-3.76616955e-01 1.96036786e-01 1.14229202e+00 ... -2.37441748e-01 -2.06008196e+00 -1.02120769e+00] [ 7.86751360e-02 -6.73305452e-01 1.13231957e+00 ... 3.68462920e-01 1.01172760e-01 3.70246220e+00] [-2.27455282e+00 -4.36450183e-01 -1.94920862e+00 ... -5.12055218e-01 1.07349360e+00 -6.04101717e-01]] [[ 4.39624339e-01 2.97719836e-01 -2.41662472e-01 ... -4.34263527e-01 3.64766002e-01 5.33238113e-01] [ 3.28208596e-01 9.58259642e-01 -1.14107049e+00 ... 1.74126732e+00 -4.41747487e-01 -3.56751271e-02] [ 4.26537156e-01 -1.09116137e+00 -1.65752709e+00 ... 6.84919536e-01 8.30995440e-01 -3.94450831e+00] ... [ 2.11465740e+00 9.44730341e-01 1.10457230e+00 ... 2.56624699e-01 -4.43275511e-01 2.67304331e-01] [-1.03029144e+00 6.52531803e-01 -5.53631544e-01 ... 2.00542855e+00 -1.35517406e+00 1.91782141e+00] [ 1.02407843e-01 -1.02810764e+00 -1.76478612e+00 ... 7.87964702e-01 8.30140591e-01 -2.81403780e-01]]] [[[-4.08024728e-01 -3.87125462e-01 -2.51971394e-01 ... 2.82232851e-01 2.82570690e-01 1.92793906e-01] [ 1.17573790e-01 -8.18549097e-02 6.84514567e-02 ... -6.72579855e-02 2.76075363e-01 9.42941383e-02] [-3.48375775e-02 3.49919319e-01 2.26508945e-01 ... -1.24009714e-01 3.41390878e-01 1.58083346e-02] ... [ 4.52447906e-02 -6.68110624e-02 -1.14905782e-01 ... 3.40560935e-02 -2.84662366e-01 -1.78436548e-01] [ 2.51298219e-01 -1.26656875e-01 1.34057403e-01 ... 3.37612182e-02 3.43451709e-01 -2.05313414e-01] [-5.94145179e-01 -5.79041123e-01 -8.32490474e-02 ... 3.77075732e-01 7.72874951e-02 -8.70737731e-02]] [[ 7.60218129e-02 9.25818458e-03 1.23216741e-01 ... 8.95632058e-02 1.07011832e-02 -1.34649858e-01] [-2.51831859e-01 -1.21358847e-02 -9.35848579e-02 ... -4.24112260e-01 4.67486709e-01 -1.24001577e-01] [-1.37755439e-01 2.84793168e-01 -3.54354590e-01 ... -9.86095220e-02 2.55078048e-01 -2.49736346e-02] ... [-2.94448793e-01 -1.83338001e-01 -2.97690570e-01 ... 4.73830879e-01 5.51824331e-01 -2.03264922e-01] [ 2.52102166e-01 -9.57598239e-02 1.54903978e-01 ... 4.38328870e-02 -1.55880705e-01 -1.18959382e-01] [-9.00498107e-02 -5.30175388e-01 -1.70619607e-01 ... 1.99202925e-01 -1.81064531e-01 -3.60048890e-01]] [[ 1.69851586e-01 -3.35340440e-01 -1.76506475e-01 ... 2.49894276e-01 -1.99932948e-01 3.85363191e-01] [ 4.25446481e-02 1.95496678e-01 -4.94737625e-02 ... -9.86632425e-03 -5.00359774e-01 1.51516512e-01] [ 4.05307710e-01 -7.41282329e-02 -1.76288895e-02 ... -4.00163502e-01 2.94788718e-01 -3.91274542e-02] ... [ 2.20804989e-01 2.78028607e-01 -2.40398332e-01 ... -2.90580064e-01 -4.92979199e-01 1.09115474e-01] [ 1.22260682e-01 1.94330916e-01 -2.19550669e-01 ... -1.44851640e-01 -2.13642940e-01 -8.05475283e-04] [-3.37497622e-01 -3.84558499e-01 4.73042279e-01 ... 1.16067350e-01 -1.41293351e-02 3.71695042e-01]] ... [[-2.70075440e-01 1.06377251e-01 -2.86000252e-01 ... -1.86801165e-01 9.51222032e-02 4.63196225e-02] [-2.47395903e-01 1.34827560e-02 -1.43466359e-02 ... 2.06419244e-01 -9.33217034e-02 -2.03653991e-01] [-1.08688302e-01 -2.15336904e-01 -2.25292854e-02 ... 2.96073079e-01 1.04764709e-02 2.33503953e-02] ... [-3.06191444e-01 2.22910941e-01 -3.81330587e-02 ... -1.22758918e-01 1.95578039e-01 2.51618236e-01] [ 8.11948404e-02 -1.80196598e-01 -3.90757993e-02 ... 2.65549012e-02 1.45882279e-01 2.98284709e-01] [-8.15995857e-02 -4.31829304e-01 -8.33490938e-02 ... -2.70937802e-03 2.39121422e-01 2.12020054e-02]] [[-1.31470591e-01 -9.20978412e-02 -2.15897709e-01 ... 3.93583253e-02 1.92385048e-01 -1.42144084e-01] [ 1.04805246e-01 -2.40607917e-01 -4.78967577e-02 ... 4.33064789e-01 8.82234350e-02 3.15161556e-01] [ 1.59775652e-02 1.05083734e-01 1.82630107e-01 ... 9.49509209e-04 3.63839000e-01 -2.55393326e-01] ... [-1.03362083e-01 -2.06991732e-01 1.00286461e-01 ... 4.85782363e-02 1.23565448e-02 1.63233355e-02] [ 1.99400291e-01 -2.25960821e-01 3.60539049e-01 ... 7.78013319e-02 3.24532501e-02 -1.02306850e-01] [-3.54896039e-02 -3.96793410e-02 3.67949493e-02 ... 4.79893029e-01 -2.28930503e-01 -7.85996467e-02]] [[-2.57795691e-01 -1.03598289e-01 3.42173539e-02 ... -5.00029922e-02 5.43991514e-02 -9.30658206e-02] [ 9.00616422e-02 -3.24079841e-01 1.42169774e-01 ... -9.36586261e-02 -1.42463773e-01 3.07258070e-01] [ 2.35562265e-01 1.58369973e-01 -3.95809067e-03 ... 1.23721577e-01 -2.40369871e-01 -1.53593093e-01] ... [-5.62005192e-02 -2.55224138e-01 4.14895080e-02 ... -1.60011828e-01 -2.41240755e-01 -6.04094844e-03] [-2.27468416e-01 5.73252924e-02 -1.41059950e-01 ... 2.94842795e-02 -4.60136861e-01 3.32586616e-01] [-7.40266144e-02 -1.50883934e-02 3.82521838e-01 ... 4.19352293e-01 1.06346086e-01 1.49409816e-01]]] [[[ 1.75669956e+00 -8.62481296e-01 -5.33592522e-01 ... -1.53763425e+00 -1.94755629e-01 -1.32112789e+00] [ 1.25748563e+00 -2.52320242e+00 -7.63714671e-01 ... 6.22532129e-01 8.21229339e-01 3.16210002e-01] [ 4.68201376e-02 1.18455017e+00 -6.88513398e-01 ... 2.09253216e+00 2.09152341e+00 1.41673893e-01] ... [-8.77462983e-01 9.16417420e-01 6.09135866e-01 ... 1.71691000e+00 5.16879678e-01 -1.02946508e+00] [ 2.72451669e-01 -1.62993327e-01 -6.28886700e-01 ... 1.13353050e+00 -3.71522844e-01 -1.00779271e+00] [ 5.51519096e-01 -4.48229253e-01 5.68269134e-01 ... 1.10474730e+00 -1.31809622e-01 4.97127697e-03]] [[-4.44682986e-01 -4.66826141e-01 -6.02934778e-01 ... -1.00725293e+00 -8.60897839e-01 1.55156684e+00] [ 1.19196212e+00 1.09130728e+00 -1.80234206e+00 ... -5.95359623e-01 6.87514246e-01 -9.27561283e-01] [-4.02566761e-01 8.21565807e-01 -7.25458682e-01 ... 7.40265027e-02 -1.85073480e-01 9.29318666e-01] ... [ 1.44704556e+00 -8.21871281e-01 -2.65637457e-01 ... -8.06928992e-01 3.68119061e-01 5.37497401e-01] [-3.83760333e-01 1.90372932e+00 -1.90903676e+00 ... 6.79144442e-01 -5.64957500e-01 -2.25926533e-01] [-1.34302425e+00 -6.98513746e-01 -1.11746108e-02 ... -4.64639753e-01 1.07844651e+00 -5.01480699e-01]] [[ 1.60705733e+00 8.16669017e-02 4.21077013e-01 ... -4.82334137e-01 -1.83961904e+00 -8.56033027e-01] [ 1.87909305e+00 -1.02105474e+00 -2.26658583e+00 ... 1.43982008e-01 9.22659338e-02 1.27091932e+00] [ 3.32923174e-01 1.55911162e-01 1.37561560e+00 ... 1.06581938e+00 8.13192010e-01 1.88305986e+00] ... [-3.98123622e-01 3.29811275e-01 -6.52301848e-01 ... 2.51806951e+00 -7.64161825e-01 7.35283196e-01] [-3.69855344e-01 1.97089210e-01 1.21537888e+00 ... -1.43051553e+00 -7.17739165e-01 9.68679428e-01] [-5.00721812e-01 2.10944581e+00 1.05959558e+00 ... -7.50714362e-01 1.29344797e+00 6.56218588e-01]] ... [[ 5.21281481e-01 -3.43799174e-01 2.56231576e-01 ... -3.48607033e-01 -1.07046080e+00 -5.13396144e-01] [ 8.71485889e-01 1.91477275e+00 1.25587034e+00 ... 4.16313112e-01 5.65402508e-01 2.17808247e+00] [-1.37174439e+00 7.87878931e-01 -1.21377334e-01 ... -5.28400660e-01 -1.21164644e+00 1.12563729e+00] ... [ 5.50135635e-02 -1.90102494e+00 -1.01595712e+00 ... -6.99390590e-01 9.65408087e-01 -1.14510512e+00] [ 1.08520305e+00 -7.60071814e-01 1.75822532e+00 ... -2.95124578e+00 -1.49855256e+00 -6.42722309e-01] [ 7.19092607e-01 6.24727070e-01 -8.51494551e-01 ... 2.77739692e+00 -9.27825809e-01 -8.55122447e-01]] [[-8.19146633e-01 -7.48547792e-01 6.49713039e-01 ... -3.90567571e-01 1.33398354e+00 8.59626830e-02] [ 8.77678156e-01 2.45573449e+00 -9.40273628e-02 ... 2.41132632e-01 8.69923890e-01 -9.94915247e-01] [ 4.79720503e-01 1.05367148e+00 -2.50830024e-01 ... -6.41175449e-01 2.44828224e-01 2.42641091e-01] ... [ 6.34160399e-01 -1.33842063e+00 1.51820457e+00 ... -2.41213411e-01 6.29154861e-01 -5.34619391e-01] [ 1.46292016e-01 6.71157241e-01 -1.07068455e+00 ... 2.15729809e+00 9.89585578e-01 9.61401165e-01] [ 6.93864226e-01 1.22297120e+00 -2.38679513e-01 ... -4.15191323e-01 4.63741362e-01 3.40985984e-01]] [[-1.51658261e+00 1.01382661e+00 -4.15202975e-01 ... 1.05055630e+00 -1.11345208e+00 6.65827513e-01] [-9.88941729e-01 -6.91530526e-01 -1.54475760e+00 ... -2.01540068e-01 -2.03463387e+00 -1.77940011e+00] [ 5.07549524e-01 -1.15570843e+00 -7.18905330e-02 ... -1.44506419e+00 1.41192639e+00 2.03528070e+00] ... [ 4.14070785e-01 9.21807945e-01 -2.01323628e+00 ... -1.72231328e+00 2.31226063e+00 3.78771615e+00] [ 1.25641599e-01 -1.36923850e+00 6.50609195e-01 ... -2.33100966e-01 9.08312872e-02 3.23220044e-01] [-8.06858599e-01 -2.42630410e+00 4.39408839e-01 ... -5.42224288e-01 4.98390794e-02 -1.65156633e-01]]] [[[-3.60257536e-01 -5.65104425e-01 -7.46717930e-01 ... -2.42542699e-02 3.10198456e-01 -9.96883586e-02] [ 3.88914585e-01 3.37849349e-01 5.66185236e-01 ... -5.47780059e-02 4.63672400e-01 -5.49161077e-01] [-1.31269187e-01 -1.09994662e+00 9.41726148e-01 ... -4.55046266e-01 -5.15715107e-02 -8.00265551e-01] ... [ 5.63878119e-01 -7.27724791e-01 -2.22299412e-01 ... 5.61267734e-01 3.39115679e-01 -1.02113104e+00] [-1.72483250e-01 2.26757392e-01 3.39270979e-01 ... 8.64828154e-02 -4.01207596e-01 1.58794209e-01] [-7.84592628e-01 -1.46588445e-01 -9.30614863e-03 ... -7.12906003e-01 2.20948935e-01 -3.76687646e-01]] [[ 6.76439643e-01 -5.88120036e-02 7.49428011e-03 ... 9.90117490e-01 2.52038985e-01 -6.43533766e-01] [-5.29844701e-01 -1.24066494e-01 6.85566008e-01 ... 4.55701292e-01 -6.38177276e-01 1.09132552e+00] [-3.95669520e-01 1.07245728e-01 -2.78859645e-01 ... -9.50425804e-01 9.36778009e-01 -9.85700309e-01] ... [-1.01234354e-01 -5.56539185e-02 -1.42408878e-01 ... -6.73023164e-01 -1.87925756e-01 -4.69385624e-01] [-5.68546414e-01 -3.54242146e-01 -9.79951397e-02 ... -1.66175470e-01 -8.01459253e-01 9.48800325e-01] [-4.30300862e-01 5.93309760e-01 3.75164539e-01 ... 4.59346563e-01 -3.40659797e-01 -6.40286267e-01]] [[ 1.12605882e+00 4.02240492e-02 -5.23007959e-02 ... 6.36374474e-01 -4.40409333e-02 -1.74167559e-01] [-1.28744364e-01 -7.63045728e-01 1.06966853e-01 ... 5.23679554e-01 -5.01505196e-01 4.54266131e-01] [-1.04077899e+00 -2.39184260e-01 6.08461380e-01 ... 4.05940831e-01 -8.50235403e-01 7.51709402e-01] ... [-1.62688002e-01 -1.18157104e-01 -3.35487872e-01 ... 6.58555865e-01 3.63258928e-01 1.54928267e-01] [-6.15944639e-02 2.40396768e-01 5.35931904e-03 ... -1.77006170e-01 -7.72847772e-01 5.26062474e-02] [-3.64237130e-01 1.54549569e-01 4.00893390e-01 ... -1.43842161e-01 3.26281860e-02 1.23000935e-01]] ... [[-9.11339879e-01 2.85893619e-01 3.33633095e-01 ... 1.03954025e-01 8.69530618e-01 -5.71203567e-02] [-2.09103629e-01 3.61190557e-01 6.15787446e-01 ... 2.39033118e-01 -1.22500755e-01 3.63115072e-01] [-2.44797394e-01 -4.38741803e-01 4.80575681e-01 ... -4.04105335e-01 6.04839660e-02 7.00644135e-01] ... [-2.65141547e-01 -1.29122913e+00 -2.06779957e-01 ... -5.58106564e-02 -2.09219769e-01 1.65234447e-01] [ 1.19179392e+00 -8.69842291e-01 -8.24571967e-01 ... 1.19308949e+00 -1.70867115e-01 -5.69924414e-01] [-7.66052224e-04 -4.68882710e-01 3.25345367e-01 ... -6.29210651e-01 9.43944156e-02 -3.54202092e-01]] [[-2.79063046e-01 -4.14554566e-01 6.28679156e-01 ... 4.04080525e-02 -1.19231677e+00 1.95660144e-01] [-1.91129446e-01 5.68976641e-01 -3.82562935e-01 ... 1.63099736e-01 -8.82119238e-02 -3.82733457e-02] [-1.86566129e-01 -3.90831113e-01 -4.85576123e-01 ... 4.75134850e-02 8.88172835e-02 2.39447225e-03] ... [ 2.77456772e-02 5.02203286e-01 -7.01255277e-02 ... 1.76019534e-01 2.88192481e-01 1.45729989e-01] [-4.06221747e-01 7.85291135e-01 -5.00335872e-01 ... -3.99543375e-01 -1.59877837e-01 7.14995623e-01] [ 5.50612435e-02 -4.95092906e-02 8.46699625e-02 ... 1.03875625e+00 3.36218238e-01 5.80586255e-01]] [[-5.60469747e-01 -5.79364419e-01 -6.12212300e-01 ... 4.49076556e-02 -9.17919576e-01 -1.95452660e-01] [ 4.20060635e-01 2.69935161e-01 4.02272344e-01 ... -1.51649547e+00 4.70735013e-01 1.15353012e+00] [-3.91027957e-01 3.70088577e-01 4.05999243e-01 ... -3.63213420e-01 -5.65833785e-02 6.85811415e-02] ... [ 7.78273195e-02 2.26116944e-02 6.34734273e-01 ... -6.83955133e-01 -2.40434125e-01 1.04922004e-01] [-8.02984163e-02 6.49397969e-01 1.47570550e+00 ... -1.17591906e+00 -2.93502480e-01 -2.31039688e-01] [ 8.90828595e-02 -9.22646761e-01 -2.41654322e-01 ... -3.32957655e-01 4.41985250e-01 -1.95395634e-01]]]] [[[[-1.90749392e-01 -9.19500828e-01 -4.14677523e-02 ... 1.30635602e-02 4.67026860e-01 2.53132045e-01] [-7.60285199e-01 -1.42265153e+00 4.94594090e-02 ... -5.15567899e-01 3.57667893e-01 3.06860685e-01] [ 8.24407160e-01 -1.58671588e-01 5.88810623e-01 ... 1.24540019e+00 -2.90619321e-02 -1.40211487e+00] ... [-5.95352650e-01 -4.19624746e-01 -9.30466294e-01 ... 8.54201376e-01 -5.26550293e-01 9.77467299e-01] [ 4.88757975e-02 -8.88624310e-01 1.37230074e+00 ... -1.64613187e-01 5.39602637e-01 -3.05441171e-01] [ 4.26513314e-01 9.97814119e-01 -4.74802911e-01 ... 2.66495675e-01 1.30410776e-01 1.02792573e+00]] [[-5.95277667e-01 1.97274417e-01 -1.82145849e-01 ... -1.51269771e-02 -2.67473347e-02 5.00133038e-01] [ 6.09149277e-01 -8.90223682e-01 1.25455499e+00 ... -9.01473641e-01 6.64345622e-01 5.34930050e-01] [ 5.55661201e-01 -5.58514237e-01 -6.16097413e-02 ... 8.23324174e-02 6.37004435e-01 -1.01887679e+00] ... [ 4.51024547e-02 -4.44468021e-01 3.87058645e-01 ... -1.00668706e-01 -1.22742027e-01 -4.04251292e-02] [ 3.31661731e-01 3.90698701e-01 -6.35809541e-01 ... -1.37118489e-01 -3.30270916e-01 -3.86653066e-01] [-2.29199767e-01 -5.38085066e-02 -1.22530508e+00 ... 3.17282051e-01 8.89602363e-01 5.06741814e-02]] [[-2.66808681e-02 -7.14218974e-01 -1.53342485e-01 ... -9.22529280e-01 8.76025498e-01 3.39582950e-01] [ 5.90938985e-01 7.88404271e-02 6.50768042e-01 ... -3.71432483e-01 2.53992945e-01 -8.08650672e-01] [ 5.75894296e-01 -5.47943488e-02 -8.18184137e-01 ... 1.16809286e-01 -6.28711522e-01 -9.14881110e-01] ... [ 8.34051430e-01 3.79237950e-01 1.30761695e+00 ... -3.98558825e-01 -4.46488708e-01 -1.74790278e-01] [-1.14529514e+00 -9.83322918e-01 4.81143236e-01 ... -3.48020136e-01 -3.59996349e-01 2.01262295e-01] [ 3.79853427e-01 8.91673863e-01 -2.56859124e-01 ... 9.74814057e-01 4.86345857e-01 2.55573958e-01]] ... [[ 1.13678610e+00 -6.48788452e-01 -1.37719467e-01 ... -5.13361931e-01 -7.92631924e-01 3.72226685e-01] [ 2.98359077e-02 -1.32253692e-01 1.48405254e+00 ... 4.90371883e-01 -1.62504092e-01 -2.71047205e-01] [-5.19523025e-01 -1.26127407e-01 2.12680444e-01 ... -2.77808607e-01 3.09935570e-01 2.10654989e-01] ... [-1.17845535e+00 -9.85448807e-03 2.09127456e-01 ... 9.31731939e-01 -3.55695397e-01 1.27423167e+00] [ 1.75598096e-02 -1.32166564e+00 -5.41280508e-01 ... -8.05533051e-01 5.15630424e-01 5.09026229e-01] [-4.52156484e-01 -1.39130771e+00 5.55738509e-01 ... -8.97884369e-02 1.68495381e+00 1.47814706e-01]] [[-3.39365959e-01 -1.14878029e-01 6.25031173e-01 ... -5.17956734e-01 -6.64715171e-02 -4.80675876e-01] [-1.25546202e-01 7.81123042e-01 -3.73061001e-01 ... 1.55102384e+00 -5.24326146e-01 -6.18780851e-01] [ 4.44102108e-01 8.70476007e-01 -1.34005201e+00 ... 1.46193278e+00 -8.43131065e-01 1.85188189e-01] ... [ 4.53116626e-01 5.99800229e-01 8.33656564e-02 ... 8.80284786e-01 9.18136686e-02 -1.35521770e+00] [ 3.74481708e-01 -3.60406160e-01 -1.48210073e+00 ... 9.48276222e-02 1.32370543e+00 -3.57498050e-01] [ 7.38236129e-01 5.47064185e-01 1.09966546e-01 ... -1.87274128e-01 -1.34793472e+00 -6.98375285e-01]] [[ 5.01865894e-02 -5.63872993e-01 -8.69446278e-01 ... 3.97434592e-01 -9.25894082e-01 6.87692404e-01] [-6.81731880e-01 4.34851408e-01 4.47283179e-01 ... -1.34545743e-01 9.70058978e-01 1.20869018e-01] [-3.25586885e-01 -2.07316726e-01 5.18455148e-01 ... 5.95517635e-01 -6.60343885e-01 -1.00083160e+00] ... [ 3.31300080e-01 4.07281853e-02 3.19138229e-01 ... 3.50433320e-01 5.41047990e-01 4.72751856e-01] [ 8.25906575e-01 -5.64034462e-01 -1.95762947e-01 ... 7.08100796e-01 1.48862258e-01 2.79159904e-01] [-4.83218372e-01 6.02516830e-01 1.50752509e+00 ... -1.22968271e-01 3.32289040e-01 4.09474701e-01]]] [[[-1.61971390e-01 2.16796964e-01 -7.05154315e-02 ... -3.54997933e-01 1.35876499e-02 4.02876467e-01] [-3.59028459e-01 6.95453167e-01 2.09346712e-02 ... 9.17673588e-01 -2.22484112e-01 4.15333137e-02] [-2.81761080e-01 1.70405746e-01 4.45996858e-02 ... 1.13609433e-01 1.24922320e-01 -6.65584981e-01] ... [-1.04051185e+00 -9.77617875e-02 -1.30067915e-01 ... 5.36298677e-02 -8.63752365e-02 -4.10236329e-01] [ 4.59739834e-01 -3.34006131e-01 -8.14706460e-02 ... 2.74456441e-01 -2.48794451e-01 -6.80086374e-01] [-1.62047654e-01 -1.07391894e-01 3.19499254e-01 ... -2.86668450e-01 -1.58274427e-01 1.04577832e-01]] [[-2.14972883e-01 -9.99829099e-02 4.56169277e-01 ... -3.71404171e-01 -3.64928961e-01 3.96111131e-01] [ 2.11824447e-01 -8.59868899e-02 -5.54652870e-01 ... -9.20469701e-01 1.62408035e-02 -1.69356838e-01] [-7.33929798e-02 -4.80662286e-01 -5.24727821e-01 ... 2.23008230e-01 3.21807086e-01 4.87118483e-01] ... [ 2.73337156e-01 1.27793297e-01 -2.52020329e-01 ... 1.09851010e-01 2.45227769e-01 9.98283476e-02] [-2.74321705e-01 1.54598802e-01 4.64231879e-01 ... -1.55590117e-01 6.26985192e-01 2.02284455e-01] [-1.80386052e-01 -5.24632454e-01 -5.16717732e-01 ... -3.18372220e-01 -5.98113239e-01 3.12042654e-01]] [[-8.21225494e-02 1.67741835e-01 -2.25939915e-01 ... 5.88525534e-01 4.91756380e-01 -9.14827362e-02] [ 4.93412167e-01 -1.02502912e-01 -6.68552160e-01 ... 6.67278707e-01 -9.60022435e-02 4.67156231e-01] [ 1.21130690e-01 -5.09668291e-01 -4.07874852e-01 ... 3.60766888e-01 -6.38939664e-02 2.88590461e-01] ... [ 2.45748740e-02 -8.88636056e-03 9.29870605e-01 ... -5.44517398e-01 4.57466900e-01 -1.06444761e-01] [ 3.90319258e-01 2.43806988e-01 -1.44524366e-01 ... -3.54280293e-01 -2.29256392e-01 1.52615145e-01] [-1.71435177e-01 -5.72113395e-01 1.28954276e-01 ... 2.70135552e-01 -7.36252442e-02 2.75785655e-01]] ... [[ 1.89526439e-01 3.32430936e-02 -7.29194164e-01 ... -3.95879239e-01 1.21169845e-02 -1.22355506e-01] [ 6.99345767e-01 7.20716938e-02 9.37688425e-02 ... 2.38146007e-01 -2.82539785e-01 5.05336463e-01] [ 1.98939443e-01 -3.41048129e-02 -8.56744591e-03 ... -2.93015331e-01 -4.39298123e-01 -7.92425796e-02] ... [ 5.85135520e-01 4.90442872e-01 1.12188071e-01 ... -3.15415949e-01 7.83617273e-02 -2.38960728e-01] [-3.17737877e-01 -3.63197535e-01 -2.67818838e-01 ... 1.83876202e-01 3.31774205e-01 -3.50400180e-01] [ 1.16734207e-01 -1.28098592e-01 -2.95995027e-01 ... -8.56624991e-02 1.91693693e-01 2.31257468e-01]] [[ 6.22001708e-01 -9.87775028e-02 4.90343302e-01 ... -3.49518508e-01 -1.27396375e-01 -3.44193876e-01] [-9.08496082e-02 5.53794682e-01 -3.53911698e-01 ... -7.18491137e-01 1.07460797e-01 -2.98802853e-01] [-2.67183304e-01 -1.92405805e-01 -2.34069392e-01 ... 1.20930940e-01 -1.94754615e-01 -1.34383425e-01] ... [-3.36559355e-01 -2.88581908e-01 -4.45355982e-01 ... -2.88497746e-01 4.19751227e-01 -2.80964106e-01] [ 1.63763121e-01 1.05120480e+00 6.51378572e-01 ... -4.79796171e-01 -2.65105844e-01 5.21091409e-02] [-6.25385761e-01 4.29166704e-01 -1.61625713e-01 ... -1.42455325e-01 1.10641666e-01 -1.62424296e-01]] [[-4.17732775e-01 3.03391486e-01 2.78583765e-01 ... 2.69908011e-01 2.68540561e-01 -5.50605237e-01] [-4.06189919e-01 1.72780156e-01 6.89046800e-01 ... 1.93670452e-01 2.94245362e-01 -2.83596069e-01] [ 3.31667691e-01 2.47447595e-01 1.36546537e-01 ... 2.71812171e-01 2.28644997e-01 -4.68576364e-02] ... [ 1.64320860e-02 -1.82864025e-01 2.87666798e-01 ... 1.51108623e-01 1.94504738e-01 2.84910828e-01] [-2.66998291e-01 -8.64616856e-02 -5.66064775e-01 ... -4.90466028e-01 4.17999148e-01 2.52668321e-01] [-5.65280318e-01 -9.00049955e-02 2.50615418e-01 ... -8.49240366e-03 -2.40158483e-01 -4.10584360e-01]]] [[[-3.73111939e+00 2.62409449e+00 1.46875930e+00 ... -5.10400772e-01 -5.05195379e-01 7.21250117e-01] [-2.08561015e+00 -1.26346397e+00 -1.67758477e+00 ... 1.98647308e+00 -4.92447048e-01 1.51545286e+00] [-2.88195372e-01 -2.87307441e-01 8.26358914e-01 ... -7.09684908e-01 -4.50179070e-01 2.22881198e+00] ... [-2.32020974e-01 1.40808773e+00 -1.70479858e+00 ... 9.35485780e-01 9.44514215e-01 2.38948613e-01] [-1.50331843e+00 1.08601511e+00 -2.51423836e+00 ... 5.68955183e-01 -1.25135994e+00 1.12083542e+00] [ 3.59983349e+00 -2.05453205e+00 2.70821619e+00 ... 7.88011551e-01 1.33495808e+00 -6.08157158e-01]] [[-1.81174588e+00 -1.71455300e+00 -8.77190471e-01 ... -2.82276154e-01 2.26619649e+00 2.76672423e-01] [ 2.83401299e+00 7.33971953e-01 -7.54099667e-01 ... -2.16833782e+00 1.28321421e+00 -9.33581471e-01] [ 1.25394297e+00 -7.16842115e-01 9.14852440e-01 ... 9.99677062e-01 -8.63275588e-01 -1.61841258e-01] ... [ 1.35347009e+00 -2.50108171e+00 1.74870777e+00 ... 1.82677138e+00 -8.13155890e-01 1.87041366e+00] [ 1.63722885e+00 8.46138418e-01 -2.41515660e+00 ... 2.78238273e+00 -1.94312119e+00 -1.42260122e+00] [-1.12439775e+00 1.26051164e+00 -5.92214465e-01 ... -2.00366187e+00 -8.88996065e-01 -4.27103162e-01]] [[ 8.60328019e-01 -3.79685014e-01 2.26184058e+00 ... -5.31331062e-01 2.15487456e+00 -2.43440747e+00] [-7.66227320e-02 -3.93350311e-02 -2.66539741e+00 ... -1.06325853e+00 -4.13824826e-01 -5.82020342e-01] [-1.89283395e+00 -1.16582012e+00 -2.08633924e+00 ... -2.31409371e-01 -1.66471291e+00 1.59856319e+00] ... [-1.59459209e+00 2.40890121e+00 1.51971948e+00 ... -8.80304933e-01 -7.07012773e-01 2.19186211e+00] [-1.07884789e+00 -5.14785051e-01 1.86547804e+00 ... 3.55865479e-01 -1.47030735e+00 -1.65111792e+00] [-1.05174458e+00 1.67840767e+00 1.30981994e+00 ... -2.60448515e-01 -1.96773088e+00 -4.99817014e-01]] ... [[-1.93668032e+00 1.18100977e+00 -6.19678795e-01 ... -7.30632961e-01 1.18219674e+00 7.56266117e-01] [ 7.33805835e-01 -1.79454029e+00 -1.01055717e+00 ... -2.40554601e-01 1.04812646e+00 -1.35599184e+00] [-1.39002442e-01 1.30471373e+00 -2.16942811e+00 ... 4.39878672e-01 -1.23414703e-01 1.39368570e+00] ... [ 1.85225475e+00 -1.42321019e-02 7.90392578e-01 ... -3.62534374e-01 -3.52793038e-01 1.72908545e-01] [ 1.26850593e+00 1.20410514e+00 9.76150334e-02 ... -6.63934529e-01 5.01587033e-01 6.61178827e-01] [ 3.16064060e-02 -1.10206819e+00 7.88033783e-01 ... -1.90321654e-01 1.14546359e-01 -1.27471411e+00]] [[-3.21954131e-01 -1.08996439e+00 -3.83229882e-01 ... -6.39044583e-01 3.32729012e-01 8.87923837e-01] [ 1.48683894e+00 -1.93838477e-01 -3.17475885e-01 ... 1.00396693e+00 -1.43065536e+00 1.38267815e+00] [ 2.25037575e+00 2.44400561e-01 -1.12174690e+00 ... -1.21508217e+00 -1.83910024e+00 -1.22953856e+00] ... [ 1.47161078e+00 -2.98462033e+00 5.28880179e-01 ... -5.07023513e-01 -4.24818695e-01 7.37089097e-01] [-2.45421350e-01 1.56860566e+00 -6.27122104e-01 ... 1.69030756e-01 -1.67461526e+00 4.30622607e-01] [-1.10907030e+00 -3.08378339e-01 -7.49510109e-01 ... -1.47216296e+00 -1.03337955e+00 -2.50169325e+00]] [[-3.12608629e-01 -2.94080091e+00 -9.61141050e-01 ... 6.27237976e-01 -4.55112159e-01 -2.59180284e+00] [-3.47690868e+00 -2.19365931e+00 -5.05217731e-01 ... 8.49852264e-01 5.50735652e-01 2.23442960e+00] [ 8.64498317e-01 2.24365544e+00 -6.08743250e-01 ... -1.36359239e+00 3.72451585e-04 2.80112118e-01] ... [-1.63720950e-01 -1.91864967e+00 1.42342949e+00 ... 7.34294772e-01 1.84875762e+00 -6.47070289e-01] [-1.36075854e-01 1.20362210e+00 -3.21895629e-01 ... 8.12804699e-01 -2.09588122e+00 -4.64475691e-01] [ 1.71591341e+00 7.17979908e-01 -1.81925201e+00 ... -1.40384984e+00 2.61165404e+00 9.99382019e-01]]] [[[ 2.99392223e-01 -3.36347729e-01 -1.09192491e-01 ... 6.20411225e-02 -2.92501152e-01 -1.82156369e-01] [ 1.09645668e-02 -8.74591172e-02 -6.25611961e-01 ... 6.72260225e-02 -2.74023175e-01 -3.27226496e-03] [-8.02493766e-02 -7.71903470e-02 1.15146786e-01 ... 1.29037276e-01 2.05156550e-01 8.16030055e-03] ... [ 1.78149492e-01 -2.21440151e-01 -1.92529913e-02 ... -1.46532625e-01 3.01600188e-01 1.28095478e-01] [-9.93546322e-02 -7.19755366e-02 -1.37463555e-01 ... 4.10652190e-01 2.43085310e-01 -1.96601048e-01] [ 1.79929547e-02 -1.82497844e-01 -4.93685782e-01 ... 5.23134768e-01 -3.45639348e-01 -3.05307567e-01]] [[ 2.28620067e-01 -5.89541852e-01 -3.49831209e-02 ... -3.82087729e-03 -2.57631987e-01 3.14670317e-02] [-2.82876492e-01 2.47137129e-01 2.05173001e-01 ... 6.82838634e-02 -1.42900229e-01 -3.46700966e-01] [-1.82288662e-01 -4.05879617e-01 5.77450506e-02 ... 7.09162734e-04 -2.13395864e-01 3.68152440e-01] ... [-3.55532229e-01 2.76697189e-01 -1.30165055e-01 ... -2.81218320e-01 -2.01414511e-01 3.55053157e-01] [ 1.69963226e-01 -3.30051720e-01 4.04142626e-02 ... 7.34912753e-01 2.22657338e-01 -2.45410413e-01] [-2.23607093e-01 6.55661225e-01 2.85534412e-01 ... 1.29351169e-01 1.72169268e-01 -1.02464028e-01]] [[ 5.33704102e-01 -1.38226179e-02 -1.58840567e-01 ... -2.22310632e-01 -3.19431186e-01 -4.65121776e-01] [ 1.59550205e-01 3.81677568e-01 9.15685073e-02 ... 1.22918956e-01 5.99824131e-01 8.54232013e-02] [-1.36111870e-01 4.10473585e-01 5.94912805e-02 ... 4.25784677e-01 -1.26329586e-01 -2.80081406e-02] ... [-8.96473415e-03 -2.71010976e-02 6.27800465e-01 ... -1.49769142e-01 1.04277454e-01 2.74323761e-01] [-2.28140742e-01 8.54328349e-02 1.04902625e-01 ... 8.62622634e-03 -1.70003563e-01 3.28813523e-01] [-1.89357147e-01 -1.52159989e-01 -3.17253321e-01 ... 1.95053801e-01 1.15683720e-01 1.47994161e-01]] ... [[-3.51982445e-01 -2.34630108e-01 -1.37568176e-01 ... 4.76886392e-01 4.96225923e-01 -1.50846392e-01] [ 6.92698956e-01 -6.46785870e-02 3.26915160e-02 ... 2.53792517e-02 1.92261070e-01 -3.87466222e-01] [ 3.49549413e-01 3.13203722e-01 -5.77338152e-02 ... 3.96897271e-02 -1.91618845e-01 -1.36514857e-01] ... [ 5.37144579e-02 2.22473398e-01 1.60432719e-02 ... 6.01342507e-02 -2.23594569e-02 -6.62610471e-01] [ 1.63223162e-01 -1.44555435e-01 2.91658252e-01 ... 3.01032662e-01 -3.77550572e-01 2.13256553e-01] [-4.62150350e-02 -4.73580963e-04 1.52136266e-01 ... -5.65122962e-01 -5.18347956e-02 -3.15894306e-01]] [[-2.11790338e-01 1.71888769e-01 -1.30875096e-01 ... 1.27718270e-01 2.00951561e-01 6.62769079e-02] [ 1.33463383e-01 4.49043393e-01 1.28186554e-01 ... 1.93113774e-01 -1.33321792e-01 2.23701864e-01] [ 8.39515999e-02 -6.12098463e-02 -7.93426186e-02 ... -2.51450296e-02 -2.92124689e-01 2.91611165e-01] ... [-5.55598587e-02 -2.03021049e-01 -3.12955320e-01 ... -1.41632557e-01 3.70947212e-01 -2.11627230e-01] [-1.38560236e-01 5.56769632e-02 -2.73331553e-01 ... 1.15798533e-01 -2.32147470e-01 1.40445158e-01] [-2.60322541e-01 4.44218695e-01 4.04675066e-01 ... 3.99965234e-02 -1.44034997e-02 -1.09719180e-01]] [[ 4.38819051e-01 1.28434598e-01 -3.84416491e-01 ... -1.98376685e-01 1.36782154e-01 2.43733093e-01] [-1.20830843e-02 3.73462260e-01 -1.13149606e-01 ... 1.47220820e-01 1.86314225e-01 -2.41397098e-01] [-2.89568044e-02 -5.02050221e-02 -4.88012284e-02 ... -6.77485690e-02 2.63925821e-01 3.78099456e-02] ... [ 5.11008129e-02 2.05214351e-01 1.73519835e-01 ... 5.54201186e-01 1.53144807e-01 -3.94126534e-01] [-1.65866122e-01 -2.19023302e-01 -3.50153029e-01 ... 1.98568642e-01 5.98348789e-02 -4.12720531e-01] [-3.43436673e-02 3.64522576e-01 8.72343704e-02 ... 3.95622440e-02 -5.31341195e-01 3.56788874e-01]]] [[[ 1.86321473e+00 -1.16186333e+00 1.25482535e+00 ... -6.41765520e-02 1.26767635e+00 1.32364690e-01] [-1.73819757e+00 1.25878167e+00 2.63956875e-01 ... -1.97769612e-01 -7.08405793e-01 1.24164008e-01] [-9.16496396e-01 -1.48537695e-01 8.81690085e-01 ... -2.36038375e+00 -1.33116221e+00 1.06529450e+00] ... [ 2.20494676e+00 -9.35562670e-01 -1.62127554e+00 ... 2.65688121e-01 -8.71573150e-01 9.77211893e-02] [-1.17458510e+00 1.38091254e+00 7.60722756e-01 ... -8.42314184e-01 -7.75072694e-01 -1.83129036e+00] [-2.95509279e-01 -8.35684240e-01 -1.33670434e-01 ... 5.89045063e-02 -1.30870119e-01 4.74979460e-01]] [[-2.48626065e+00 2.13578510e+00 -1.69269294e-01 ... 3.06614616e-04 9.06509399e-01 -1.58677900e+00] [ 2.24195272e-01 -9.63824689e-02 -1.23141670e+00 ... 4.91442740e-01 2.11733744e-01 -1.26010459e-02] [ 1.03109264e+00 -4.36616480e-01 7.35703528e-01 ... -4.86469299e-01 -8.69381607e-01 4.48748529e-01] ... [-1.38153696e+00 2.09649825e+00 4.50344265e-01 ... -5.82974195e-01 -4.82949376e-01 -2.67535400e+00] [ 6.05955064e-01 -2.45262042e-01 -7.66163170e-01 ... -1.17294884e+00 -9.67389122e-02 -4.89620209e-01] [ 5.29801697e-02 -1.15599859e+00 -1.00102699e+00 ... -1.26426601e+00 -7.10966527e-01 -5.80453038e-01]] [[ 3.58486205e-01 1.95997179e+00 -1.02989852e+00 ... 7.80552579e-03 -8.48286524e-02 4.32888538e-01] [-3.45307559e-01 -5.92507064e-01 -8.04870665e-01 ... -5.17083228e-01 1.37424457e+00 -1.72979236e+00] [-1.86721057e-01 3.29585731e-01 5.03952324e-01 ... -3.51822406e-01 -7.04667628e-01 -1.03221571e+00] ... [-7.10917711e-01 -1.00600767e+00 -8.19745421e-01 ... 7.83664644e-01 -1.25021923e+00 7.31745422e-01] [ 6.29298270e-01 1.89674687e+00 7.73634434e-01 ... -4.91419509e-02 7.22388253e-02 1.36222690e-01] [-3.66509467e-01 1.15998900e+00 1.26091504e+00 ... -3.32737231e+00 4.98784184e-01 -8.20415318e-01]] ... [[ 1.35434771e+00 1.23124802e+00 -6.68448925e-01 ... 8.65013123e-01 1.17614448e+00 -3.76739055e-01] [-1.25942576e+00 -1.07576048e+00 -5.01960874e-01 ... -1.42219818e+00 2.47844271e-02 1.36414027e+00] [-1.18879175e+00 4.22974646e-01 2.09680006e-01 ... 4.52352494e-01 -9.08443391e-01 3.38807613e-01] ... [ 1.04393899e-01 -7.12477207e-01 -9.22699273e-02 ... 4.38298494e-01 -1.86764348e+00 -9.88506436e-01] [-1.19723928e+00 -1.00415707e+00 -1.42936575e+00 ... -1.45803779e-01 -2.82249421e-01 3.17566931e-01] [ 1.03567171e+00 1.62244827e-01 -2.96700144e+00 ... -7.82224655e-01 -1.25265813e+00 -1.53525835e-02]] [[ 3.39940757e-01 -3.27809304e-02 1.43332934e+00 ... 7.15190351e-01 4.48440641e-01 -7.76558042e-01] [ 1.22045159e+00 1.98800075e+00 -4.78155971e-01 ... 5.51939249e-01 -7.68439233e-01 -1.32393312e+00] [ 4.76316363e-01 8.82080734e-01 1.82152164e+00 ... -2.26848558e-01 1.07368088e+00 -4.26158667e-01] ... [-3.04110169e-01 -5.92005968e-01 -1.01510489e+00 ... -1.89817071e-01 -7.70150185e-01 -7.57873595e-01] [ 6.24542534e-01 4.50835824e-01 -2.53929043e+00 ... -3.40314239e-01 1.39087212e+00 -1.10618055e+00] [-5.90584636e-01 5.61974227e-01 -1.48325965e-01 ... 6.35562420e-01 8.91363472e-02 1.63742995e+00]] [[ 7.43187904e-01 -1.59399164e+00 1.77611268e+00 ... 1.12280023e+00 1.61714268e+00 2.16374755e+00] [-1.20071733e+00 7.81484306e-01 1.00422418e-03 ... -1.33977842e+00 -1.26102483e+00 -1.00620210e-01] [ 1.16539073e+00 -5.59094906e-01 -1.31015766e+00 ... -9.87835303e-02 -8.03218961e-01 1.41812599e+00] ... [-5.59079111e-01 4.81665343e-01 -2.17039299e+00 ... -1.63712800e+00 9.83887017e-01 -8.35140347e-01] [-5.98871112e-02 -1.03590202e+00 -4.08949070e-02 ... 7.18594253e-01 2.60867715e-01 4.12890077e-01] [-6.40489221e-01 1.59948778e+00 -1.22974038e+00 ... -1.26052633e-01 8.74688864e-01 -8.41111317e-02]]] [[[-4.90760863e-01 1.01961648e+00 8.59495282e-01 ... -6.52101755e-01 5.17936766e-01 -9.34036970e-01] [ 3.65609407e-01 1.56379819e-01 -2.34951422e-01 ... -2.70280056e-02 6.53776705e-01 1.66046008e-01] [ 1.73628509e-01 -6.08978152e-01 -2.36421525e-01 ... 2.92123437e-01 2.75593817e-01 -9.79798138e-01] ... [-1.99718043e-01 -2.50323005e-02 2.04653949e-01 ... 5.89174747e-01 1.17985262e-02 -8.88560057e-01] [ 1.29891738e-01 -4.15744036e-01 -9.19649959e-01 ... 6.43109918e-01 -8.18966329e-01 4.37960595e-01] [ 5.50603271e-02 8.87200892e-01 7.69214630e-01 ... 3.39788556e-01 -7.04824179e-02 -1.74773067e-01]] [[-2.07134604e-01 1.48252755e-01 3.27214628e-01 ... 3.52733135e-01 5.73441267e-01 -1.17386349e-01] [ 5.66005051e-01 -1.75937384e-01 -8.89614050e-04 ... -4.54341829e-01 4.38851595e-01 9.17891026e-01] [ 6.71113133e-01 -1.80549193e-02 1.85180053e-01 ... -6.37645960e-01 -2.83972979e-01 8.30826938e-01] ... [-5.38687944e-01 5.38999557e-01 7.47961223e-01 ... -5.01035154e-01 1.43479437e-01 -4.79079306e-01] [ 5.77459820e-02 -1.64335534e-01 -1.83607303e-02 ... -4.48265821e-01 2.93164939e-01 -4.52588737e-01] [-2.58257389e-01 -5.01729608e-01 1.08941901e+00 ... -2.38704041e-01 -3.88477504e-01 -1.70532495e-01]] [[ 7.86130369e-01 5.63426435e-01 -7.55096972e-01 ... -1.74533859e-01 5.90789020e-01 -7.36809850e-01] [ 4.90535021e-01 1.64342970e-01 -1.87394306e-01 ... 2.85852432e-01 -2.72460073e-01 -5.16274512e-01] [ 7.59130716e-01 -6.96371049e-02 3.37856889e-01 ... 6.69871509e-01 4.20988910e-02 6.74121128e-03] ... [-1.15017450e+00 1.64573386e-01 -3.83963317e-01 ... -2.67756402e-01 -7.55673870e-02 4.20846701e-01] [-6.52630091e-01 8.32629859e-01 -3.15245730e-03 ... -2.39228219e-01 -2.26314217e-01 1.88621134e-01] [ 6.97584629e-01 6.61149248e-02 8.64131749e-03 ... 7.20409632e-01 -5.36739409e-01 -3.34836364e-01]] ... [[-1.21806353e-01 4.39494371e-01 6.04285359e-01 ... -1.08406320e-01 1.75294466e-02 -2.98154593e-01] [ 7.93101549e-01 -2.51961678e-01 -8.11456665e-02 ... -1.19409926e-01 -2.14544937e-01 -4.78302121e-01] [ 7.64935255e-01 5.71921766e-02 -1.38955474e-01 ... 4.67908502e-01 9.78825465e-02 -3.68964761e-01] ... [ 4.37675923e-01 5.47380269e-01 -1.88976526e-01 ... -7.18138879e-03 1.22246128e-02 -7.00808406e-01] [-3.29695702e-01 1.07129812e+00 -6.61216378e-01 ... -9.59681094e-01 2.93104649e-01 4.60881740e-01] [ 6.83002099e-02 -3.54569167e-01 -5.49653411e-01 ... 2.89914589e-02 -5.84875531e-02 -3.54226142e-01]] [[-8.40164661e-01 4.70085323e-01 -3.78024951e-03 ... -2.14084223e-01 -2.32624814e-01 -1.10581353e-01] [ 6.63670540e-01 1.06913626e+00 -9.47634697e-01 ... -1.26267374e+00 8.00918460e-01 5.06634891e-01] [ 3.25173050e-01 2.39869341e-01 -5.70565641e-01 ... -6.79580092e-01 -3.46641600e-01 5.61372042e-01] ... [-8.04976463e-01 -1.84133828e-01 3.32195729e-01 ... 3.96820545e-01 9.91374999e-02 -3.70418221e-01] [ 3.16797405e-01 -4.23975170e-01 -7.91207135e-01 ... 9.47087854e-02 -1.53509146e-02 1.39518607e+00] [-3.97232383e-01 -1.26125395e-01 1.72205389e-01 ... -2.84978356e-02 -2.90029496e-01 3.41942996e-01]] [[-2.75412738e-01 -6.10550106e-01 6.12334669e-01 ... 2.46807009e-01 1.16919041e-01 -1.68207750e-01] [ 5.88972926e-01 4.13741618e-01 9.28095520e-01 ... -3.84099513e-01 -2.68167436e-01 4.03859377e-01] [-5.50299168e-01 3.54632407e-01 -4.90238816e-02 ... -7.40283430e-01 5.80751121e-01 -4.90855277e-01] ... [ 9.14344713e-02 -7.59423792e-01 -9.76707458e-01 ... 6.79767668e-01 -8.96370113e-02 8.89231801e-01] [-8.40003252e-01 1.44343182e-01 -9.18003440e-01 ... -2.35902607e-01 -5.62560201e-01 4.82829124e-01] [ 2.85809338e-01 -4.12029475e-01 -3.87704462e-01 ... 1.51345745e-01 3.51247229e-02 -5.55166006e-01]]]] ... [[[[-3.10453266e-01 -5.48791945e-01 1.90699428e-01 ... 1.97405398e-01 -6.96315706e-01 6.52595144e-03] [-1.05577171e+00 1.50280857e+00 -1.41372287e+00 ... 9.30876955e-02 -2.40650311e-01 4.29085940e-01] [ 8.11260521e-01 1.76124290e-01 -1.74439073e+00 ... -3.27786177e-01 3.21730644e-01 -1.46728635e-01] ... [ 6.49380386e-01 -6.94187224e-01 -7.88462281e-01 ... 1.27231038e+00 -5.93229592e-01 1.50171769e+00] [-1.62000418e-01 5.65200925e-01 7.27153793e-02 ... -1.07340539e+00 -6.25359118e-01 -6.78446710e-01] [ 3.00133795e-01 5.45407057e-01 9.78396386e-02 ... 5.94848454e-01 -7.11476624e-01 3.35972637e-01]] [[-5.33026099e-01 9.11291063e-01 9.60399508e-01 ... 1.03322908e-01 3.61443341e-01 -1.01944399e+00] [-7.54925787e-01 5.75282037e-01 -8.14694881e-01 ... 3.00388992e-01 7.04101324e-02 7.47444749e-01] [-1.47483742e+00 7.53218889e-01 -3.85990113e-01 ... -3.32033724e-01 -3.08057159e-01 -9.56161320e-01] ... [ 7.99408615e-01 7.07253516e-01 -6.88760936e-01 ... 6.68721318e-01 -1.02585125e+00 3.74236345e-01] [ 3.99324566e-01 5.82234383e-01 1.09243274e-01 ... 6.11746490e-01 5.46564758e-01 -4.61131297e-02] [ 4.53375697e-01 4.84091252e-01 -7.96897829e-01 ... 6.61098897e-01 -7.83033192e-01 -8.49344134e-01]] [[ 8.91188800e-01 1.13027729e-01 3.68733585e-01 ... -3.20129305e-01 -4.90244618e-03 -3.21864069e-01] [-7.66619742e-01 5.37928939e-02 6.85585678e-01 ... 3.47635508e-01 5.60297787e-01 -5.55528700e-01] [-1.05764925e+00 6.83428109e-01 1.12075961e+00 ... -1.69260633e+00 4.30935957e-02 -2.61602867e-02] ... [ 9.26339328e-01 -2.93772846e-01 -7.54863262e-01 ... -1.16260850e+00 9.61168408e-01 -1.06565106e+00] [ 1.07234287e+00 -1.12412000e+00 1.07489812e+00 ... -5.95427275e-01 -9.13085192e-02 -1.06537795e+00] [-9.88230109e-02 5.73780417e-01 2.01652557e-01 ... 9.38739836e-01 -1.10993457e+00 -1.39768529e+00]] ... [[-2.89063662e-01 1.98912874e-01 1.26709712e+00 ... -2.48162031e-01 -6.86139822e-01 1.39933813e+00] [ 3.73729646e-01 -1.46195793e+00 -3.52652930e-02 ... -1.06971574e+00 9.00306582e-01 -1.19167435e+00] [ 1.44541562e+00 -1.25104237e+00 4.10424769e-01 ... -3.85387719e-01 -9.44645330e-03 -5.48195481e-01] ... [-1.00356758e-01 1.21940112e+00 1.86542779e-01 ... 1.25147080e+00 -1.00563741e+00 -4.32061285e-01] [ 1.00087523e+00 7.35618234e-01 1.65724599e+00 ... 4.70128804e-01 -1.01038647e+00 4.55130696e-01] [-1.02335647e-01 8.46223235e-01 1.02827594e-01 ... -1.20166540e-01 4.42291588e-01 8.41548622e-01]] [[-1.62754580e-02 5.11693992e-02 3.16740312e-02 ... -2.70872921e-01 2.45798573e-01 -3.72697651e-01] [ 7.69742250e-01 1.23830445e-01 -9.97186482e-01 ... -1.35545659e+00 -5.09894550e-01 -3.41641992e-01] [-4.04268861e-01 4.24347967e-01 -2.48641983e-01 ... -6.22994363e-01 5.45521140e-01 1.81499243e-01] ... [ 2.01570272e-01 6.99234128e-01 8.15549612e-01 ... 2.32073814e-01 9.34439719e-01 2.43735015e-01] [-2.32335865e-01 -6.06133819e-01 -1.95369154e-01 ... 3.60815048e-01 -4.47417766e-01 1.79660589e-01] [-1.44039941e+00 4.79905099e-01 6.13217771e-01 ... -1.58363506e-01 4.85952906e-02 1.30430982e-01]] [[ 4.99029517e-01 7.26271048e-02 7.13106468e-02 ... -3.92101139e-01 3.92098188e-01 -1.90089449e-01] [-1.52544722e-01 -6.30024970e-01 -3.43190491e-01 ... 1.49966383e+00 2.71631837e-01 -2.74605066e-01] [-9.10213828e-01 -5.14421880e-01 -4.09944445e-01 ... -4.07644272e-01 -5.69482863e-01 1.48407638e+00] ... [-1.11881435e+00 -3.79945412e-02 -1.42929733e+00 ... 9.11756158e-01 -1.27510631e+00 -1.37803435e-01] [ 5.15560746e-01 -5.28826296e-01 -6.93120897e-01 ... -6.95948005e-01 4.13971931e-01 -3.55643392e-01] [-3.54462564e-01 1.47017747e-01 1.07066989e-01 ... -2.69109964e-01 4.94999260e-01 -1.39787328e+00]]] [[[ 3.75414401e-01 5.23906291e-01 -1.52475178e-01 ... 2.92025879e-02 -1.92060933e-01 -5.26091099e-01] [-1.10126987e-01 -4.67858642e-01 -3.39429617e-01 ... -6.83039188e-01 1.99736208e-01 -1.15822487e-01] [ 8.48147646e-02 1.62438005e-01 5.19159555e-01 ... -6.09151125e-01 -2.53775537e-01 5.84709048e-01] ... [-5.78576177e-02 -1.89030200e-01 1.33531106e+00 ... -5.77955425e-01 3.14523548e-01 -6.06750667e-01] [-5.05227089e-01 -7.37433657e-02 -6.20881796e-01 ... -1.56230301e-01 -3.29278767e-01 5.97513244e-02] [ 4.38141078e-01 3.20099384e-01 -6.33884728e-01 ... 6.38842940e-01 -8.86777580e-01 -4.37171489e-01]] [[ 6.88441023e-02 -5.53139865e-01 -2.19803303e-01 ... -8.18792656e-02 3.47937465e-01 -4.32003945e-01] [-1.67598352e-01 -1.77092388e-01 3.68291616e-01 ... 7.12724328e-01 5.83755791e-01 1.12477407e-01] [-3.12550128e-01 3.05286378e-01 9.87684205e-02 ... -2.84135580e-01 -3.24368447e-01 -4.52636808e-01] ... [-1.26018763e-01 7.95530915e-01 -2.92338789e-01 ... 5.12711167e-01 7.68706262e-01 8.43696475e-01] [ 8.06899667e-02 3.54370564e-01 9.21426356e-01 ... 5.33739269e-01 2.40555510e-01 -6.74165487e-02] [-1.01748213e-01 -7.21799314e-01 1.30185381e-01 ... 1.17241338e-01 3.96111161e-01 2.14405164e-01]] [[-3.12610358e-01 4.62099731e-01 6.01666152e-01 ... -1.41524911e-01 -1.64513230e-01 -3.96560401e-01] [-9.46559086e-02 2.80190229e-01 1.44710094e-01 ... -2.13979378e-01 2.98164934e-01 -3.41211289e-01] [-3.57517838e-01 -2.82432824e-01 -5.97682059e-01 ... 1.72325969e-01 4.45005149e-01 -5.02019286e-01] ... [-6.65723503e-01 -1.84618056e-01 -2.83846259e-01 ... -3.12328227e-02 8.05178583e-01 -4.81094748e-01] [-2.41032794e-01 3.29472087e-02 6.37114525e-01 ... -1.51564432e-02 1.32855237e-01 3.86892617e-01] [ 3.17702413e-01 2.67763644e-01 1.51263028e-01 ... -2.10424900e-01 -1.76546589e-01 1.97189122e-01]] ... [[-3.69418740e-01 5.52214026e-01 4.72789049e-01 ... -1.80113725e-02 7.49394596e-02 1.20022804e-01] [-1.16237864e-01 3.53625357e-01 -2.70915359e-01 ... -3.15778732e-01 6.95571840e-01 3.08389440e-02] [ 2.59625584e-01 -2.42778569e-01 -4.27105725e-01 ... 5.20619750e-01 -1.10879935e-01 2.54053399e-02] ... [-5.60110174e-02 -4.70333453e-03 -6.11231506e-01 ... 4.07966793e-01 -1.09999090e-01 5.15855312e-01] [-4.09194112e-01 -7.40319788e-01 -5.49926832e-02 ... -4.57141131e-01 -3.01117599e-01 4.61157933e-02] [ 1.46133244e-01 5.58215439e-01 -3.53829712e-01 ... 1.03432000e-01 -2.33976349e-01 7.78402984e-02]] [[ 3.75753999e-01 2.51685262e-01 2.79696226e-01 ... -1.87489152e-01 6.43780231e-02 -3.83867502e-01] [-4.63447899e-01 8.21097195e-02 4.99131866e-02 ... -5.89528494e-02 3.25381339e-01 1.85828611e-01] [ 2.29660407e-01 5.60159624e-01 -4.87954736e-01 ... -7.03090057e-03 1.28954902e-01 1.36865810e-01] ... [ 1.14026725e-01 -8.33641067e-02 -5.83186820e-02 ... 5.18932045e-02 -9.10454616e-02 1.14172488e-01] [ 4.78842817e-02 3.95404488e-01 -2.44913980e-01 ... -1.52882442e-01 -1.18114196e-01 1.57963187e-01] [-2.44043231e-01 -4.68990088e-01 -2.04078943e-01 ... 4.70992833e-01 9.84230544e-03 8.83647740e-01]] [[-5.24123967e-01 4.86060977e-01 9.68827546e-01 ... -2.40616292e-01 2.54300624e-01 -6.60714805e-01] [ 8.52206424e-02 5.97176664e-02 4.56411451e-01 ... -1.30166471e-01 3.55465114e-01 -2.28036270e-01] [ 9.08595249e-02 -3.96970302e-01 2.37150788e-02 ... 5.51297404e-02 6.35667920e-01 5.42139053e-01] ... [ 1.36340931e-01 6.39667036e-03 2.54499078e-01 ... 2.56707668e-01 -1.20900504e-01 4.12000924e-01] [ 2.64561474e-01 6.57561719e-02 -2.85212457e-01 ... 8.42907667e-01 9.15697142e-02 5.22208214e-03] [ 5.43136835e-01 -5.08487403e-01 5.37227273e-01 ... 6.97965086e-01 2.84546137e-01 3.44155103e-01]]] [[[-1.44200611e+00 3.86409402e-01 -7.92378068e-01 ... 9.93299410e-02 2.00546455e+00 6.60488784e-01] [ 4.76905763e-01 3.68662439e-02 -1.18046248e+00 ... -1.83828449e+00 -6.50543749e-01 5.08519650e-01] [ 1.08089674e+00 -2.54257774e+00 1.09722424e+00 ... 1.72999930e+00 5.77749193e-01 -2.40166974e+00] ... [ 1.17143905e+00 1.05205023e+00 6.57522678e-01 ... -9.98994648e-01 -1.54451168e+00 -4.22503293e-01] [-2.03505683e+00 -2.44757581e+00 1.55542457e+00 ... -3.36376101e-01 8.37456524e-01 -3.08049226e+00] [-5.62487364e-01 1.27167284e+00 3.62739325e-01 ... -2.59226131e+00 -1.20505571e+00 2.85462499e+00]] [[ 1.40163040e+00 1.49309897e+00 -5.12076199e-01 ... 2.63215899e-01 3.36302459e-01 7.59266376e-01] [-1.34839070e+00 -1.40726137e+00 2.50546002e+00 ... -2.28129840e+00 2.97651321e-01 5.74593842e-01] [-8.39830697e-01 4.95383263e-01 -1.38307595e+00 ... 2.24820271e-01 -1.20558286e+00 -1.67338908e+00] ... [-1.18870270e+00 1.18811756e-01 -8.05705011e-01 ... -2.72665691e+00 -9.71080601e-01 -1.54732406e+00] [-6.70842767e-01 2.71038320e-02 -1.97094941e+00 ... -7.30793297e-01 -8.20391953e-01 -4.73147094e-01] [-2.10090876e+00 -8.73414457e-01 1.04510331e+00 ... -3.80425602e-01 7.28919148e-01 2.54796505e-01]] [[-5.82619965e-01 4.35153335e-01 -1.34856606e+00 ... -2.15127692e-02 -2.03316450e+00 -8.05399641e-02] [ 6.76821098e-02 6.88157380e-02 1.94382644e+00 ... 4.60997880e-01 -1.03517997e+00 -1.27160931e+00] [-7.04615891e-01 -1.45180714e+00 -9.75921005e-02 ... 8.63108099e-01 1.72190046e+00 -1.44264960e+00] ... [-7.94670939e-01 9.52536225e-01 -8.51868689e-01 ... -1.20693088e+00 -4.65409197e-02 -1.70719221e-01] [ 1.84849184e-02 9.10132229e-01 2.06057143e+00 ... 7.93625534e-01 -5.60648322e-01 -1.48012012e-01] [-8.04026544e-01 1.21388130e-01 1.08890720e-01 ... 9.59528863e-01 1.71502221e+00 -1.61287713e+00]] ... [[-9.94788766e-01 9.79564041e-02 -1.39834091e-01 ... 2.09869599e+00 2.96030045e-01 4.70639974e-01] [-1.14040887e+00 -2.28465348e-01 -1.39800000e+00 ... -3.47402841e-01 -9.46882784e-01 -1.61680233e+00] [ 8.45611095e-01 -8.51787031e-02 1.21521270e+00 ... -1.96570456e+00 1.47276625e-01 -9.59703267e-01] ... [-4.76038724e-01 2.87312597e-01 1.43482244e+00 ... -6.98378384e-01 6.76934600e-01 2.83208668e-01] [-4.24119145e-01 1.17101920e+00 7.30881810e-01 ... -6.37798488e-01 -1.41826200e+00 -6.26169443e-01] [-3.37091982e-01 1.32885307e-01 1.18603587e+00 ... 4.82417464e-01 -1.17998791e+00 -8.99210155e-01]] [[ 8.10410678e-01 4.04774010e-01 -9.55643892e-01 ... -4.62764114e-01 -1.11870849e+00 -1.00539422e+00] [ 1.09194684e+00 7.77713299e-01 -3.79944491e+00 ... 6.85698807e-01 5.22893727e-01 1.75884545e+00] [ 2.29416311e-01 2.56829441e-01 -6.76002920e-01 ... -1.69238245e+00 -8.66952464e-02 -1.93719232e+00] ... [ 1.93137333e-01 -3.66472065e-01 1.46950710e+00 ... 6.29408360e-01 8.92501324e-02 -7.10854352e-01] [-3.07941344e-02 2.66661793e-01 -3.47677991e-02 ... -7.46649683e-01 -1.59923851e+00 1.59848595e+00] [-1.13946581e+00 -1.09544146e+00 -1.35885108e+00 ... 9.94031668e-01 -4.74392772e-01 6.69829845e-01]] [[ 1.39479160e+00 6.26292750e-02 1.06358266e+00 ... 1.26908720e+00 7.25575686e-02 9.72209990e-01] [ 8.13927054e-01 1.10354984e+00 -1.41834724e+00 ... -5.37688017e-01 -1.24194607e-01 1.13101184e+00] [-6.32847190e-01 -1.17499065e+00 6.87194765e-01 ... -3.37642163e-01 -1.26875835e-02 -1.97370791e+00] ... [ 8.59051049e-01 1.84518051e+00 -9.33155656e-01 ... 8.67963612e-01 3.84471565e-01 2.11786318e+00] [-1.16988921e+00 3.43685776e-01 4.78023738e-01 ... 2.22272035e-02 -4.21816945e-01 -3.69824499e-01] [-5.43445528e-01 7.91525483e-01 7.46062994e-01 ... -1.75955284e+00 -8.76305997e-02 1.36829114e+00]]] [[[-9.55127403e-02 -2.87743956e-01 1.15449294e-01 ... -3.64264965e-01 1.13561273e-01 1.46592502e-02] [ 6.73432574e-02 -2.21708547e-02 8.21200237e-02 ... 4.58510220e-01 2.29412271e-03 -1.73659846e-01] [ 1.30518228e-01 -4.86338837e-03 -1.57443896e-01 ... -1.00732349e-01 -8.57702568e-02 -1.53271317e-01] ... [ 1.04666375e-01 5.37690163e-01 3.74576330e-01 ... 4.54693800e-03 -4.04128611e-01 -3.24518114e-01] [-1.30680040e-01 6.08103424e-02 -1.48339272e-01 ... -2.64900714e-01 -9.13597420e-02 9.14782733e-02] [-2.52153158e-01 -1.61235571e-01 -1.36214733e-01 ... 2.28028342e-01 5.20342171e-01 2.41195545e-01]] [[ 1.66076019e-01 7.58436099e-02 3.81483942e-01 ... 1.43676504e-01 -5.94012067e-02 1.37754846e-02] [-1.58577133e-02 5.02094626e-02 -2.70035286e-02 ... 1.61779802e-02 -3.12482476e-01 2.94276893e-01] [ 3.15587103e-01 -2.86011095e-03 1.78969771e-01 ... 2.22689752e-02 -5.84091097e-02 -8.07272419e-02] ... [-1.93044730e-02 -1.32714868e-01 2.22622789e-02 ... 2.36785322e-01 6.32340431e-01 1.83374599e-01] [-2.83873796e-01 2.11417764e-01 -4.21779841e-01 ... -3.32236364e-02 2.07648620e-01 -4.31039900e-01] [-8.95444527e-02 -1.25643715e-01 -3.34371716e-01 ... 4.84728307e-01 -9.25976783e-02 2.19527498e-01]] [[-1.45251632e-01 -2.86654364e-02 -2.60108747e-02 ... 2.53066290e-02 -7.35870227e-02 -3.50998566e-02] [-1.28417835e-01 2.91073263e-01 -2.65041143e-01 ... -5.07585287e-01 -9.65070128e-02 3.07838261e-01] [-3.42596829e-01 -2.55306214e-01 -7.54933134e-02 ... 6.15688823e-02 -7.28362724e-02 -2.36145690e-01] ... [ 6.47104234e-02 2.25244046e-04 4.10802543e-01 ... -2.46688858e-01 -1.64094910e-01 1.46418393e-01] [-3.89926255e-01 -4.27346289e-01 3.46429020e-01 ... 2.83959955e-01 -3.32975477e-01 4.60509390e-01] [ 3.55072230e-01 5.06983325e-02 2.06377916e-03 ... -2.11855859e-01 -1.08806334e-01 -1.66232809e-01]] ... [[ 9.68307257e-02 -1.86836332e-01 1.89285606e-01 ... -1.09468490e-01 3.10221225e-01 -2.92705595e-01] [ 3.18290472e-01 -3.61519337e-01 2.10022211e-01 ... 1.05415247e-02 1.34911478e-01 1.73192471e-01] [-2.17933923e-01 1.37757808e-02 7.71225151e-03 ... 9.75890756e-02 -2.63271123e-01 2.58800447e-01] ... [ 6.50727972e-02 4.23695780e-02 7.27447821e-03 ... 1.50151059e-01 -2.34785020e-01 -1.28165781e-01] [-9.54306275e-02 1.02531791e-01 1.66933343e-01 ... 3.92775059e-01 -1.08412415e-01 -4.61229861e-01] [ 2.82899946e-01 -2.31954217e-01 1.80908442e-01 ... 1.71275049e-01 1.59445047e-01 7.72638395e-02]] [[-3.37165385e-01 -2.29885146e-01 -3.47800702e-01 ... 8.08435455e-02 -1.30814789e-02 -2.12120607e-01] [ 4.00163859e-01 -2.09854916e-01 4.31990139e-02 ... -7.19435439e-02 8.29350650e-02 -1.28997028e-01] [-1.82358623e-02 -1.70286164e-01 8.38937908e-02 ... 4.51431036e-01 -1.65644020e-01 3.80656511e-01] ... [-3.10464859e-01 -3.21129501e-01 -2.26929873e-01 ... -1.35071054e-01 -9.92850214e-02 1.92877904e-01] [-4.28824089e-02 -9.84705761e-02 -7.00979903e-02 ... -2.23944187e-01 -2.27798715e-01 1.85819596e-01] [-2.02418029e-01 9.43996012e-02 1.63907796e-01 ... 4.24024649e-02 -1.45595133e-01 -6.90363646e-02]] [[-2.57756412e-02 -2.00717464e-01 -5.89637995e-01 ... 5.23677170e-02 7.74970427e-02 3.64446454e-02] [-1.53733790e-01 -5.00699759e-01 -1.26038074e-01 ... 2.82044083e-01 -4.02830392e-02 9.91226584e-02] [ 5.28191440e-02 1.39243286e-02 1.39613926e-01 ... -5.84523119e-02 -9.96525213e-02 1.84430420e-01] ... [-2.45022029e-01 -3.13385397e-01 -3.28044891e-01 ... -3.39120805e-01 -2.55825277e-02 -1.60070360e-01] [ 2.36537188e-01 -2.18919098e-01 1.69453815e-01 ... -7.82846734e-02 -4.32547322e-03 2.62510702e-02] [ 5.01718581e-01 -2.01240644e-01 -4.34423774e-01 ... -2.67762899e-01 8.15743804e-02 3.12538058e-01]]] [[[-9.27671969e-01 -2.09716901e-01 -5.20057380e-01 ... -2.31822014e-01 3.32200825e-01 2.25278735e+00] [ 2.00125024e-01 4.68473464e-01 -2.02138877e+00 ... 1.86923778e+00 -9.34854031e-01 -2.89284915e-01] [-1.15661490e+00 -1.59607959e+00 -1.54405451e+00 ... 1.33650780e+00 1.88320488e-01 -1.41507775e-01] ... [ 1.47114813e+00 1.43414390e+00 -3.79024649e+00 ... -6.99644446e-01 -3.61952960e-01 4.50984448e-01] [-7.11134553e-01 1.72741190e-02 -1.51340151e+00 ... 1.13251901e+00 -2.15955949e+00 1.91047859e+00] [-1.27995682e+00 3.00355375e-01 -2.46021375e-01 ... -5.27611487e-02 -3.44460577e-01 -3.73038948e-01]] [[ 5.29137552e-01 9.32196558e-01 -2.82223523e-01 ... -1.63797629e+00 1.02519107e+00 2.15850338e-01] [-3.08006227e-01 7.26717889e-01 -4.00647461e-01 ... 5.44079185e-01 -1.84705079e-01 -1.80818081e+00] [-1.35745668e+00 -3.90501112e-01 1.68954706e+00 ... -8.92509103e-01 1.57517064e+00 1.24691641e+00] ... [ 1.92724311e+00 3.69611621e-01 1.26762843e+00 ... 1.09913006e-01 -1.01130474e+00 -4.66052830e-01] [ 1.12775671e+00 2.31047168e-01 2.76524472e+00 ... -1.67938840e+00 -2.32081860e-01 2.12591678e-01] [-3.36060286e-01 2.65276504e+00 4.13246185e-01 ... -6.88961506e-01 2.89290667e+00 1.50682116e-02]] [[ 1.69176664e-02 -9.62299466e-01 -2.15284854e-01 ... -9.48201895e-01 6.11004531e-01 -1.15851533e+00] [-5.62927350e-02 -1.01348424e+00 2.21249342e+00 ... -5.08952975e-01 -1.47506022e+00 -1.29328773e-03] [-3.04168105e+00 -1.81419730e+00 -1.50843108e+00 ... -3.49842578e-01 -2.85286099e-01 2.39767957e+00] ... [-2.62511373e-01 6.34884834e-01 9.63125944e-01 ... -1.97440505e+00 -1.41975987e+00 5.30358434e-01] [-2.79326707e-01 -7.83572078e-01 -8.06365609e-01 ... -7.59923697e-01 -4.71635342e-01 -9.77466047e-01] [ 4.23439354e-01 5.59648454e-01 5.16008556e-01 ... -1.99907279e+00 1.10571861e+00 -2.21297359e+00]] ... [[-2.14759991e-01 1.65052474e+00 5.54970622e-01 ... -1.14498802e-01 -9.39373255e-01 -6.34360254e-01] [ 2.13935733e+00 1.30691850e+00 -1.45029795e+00 ... -4.75924522e-01 -3.82821470e-01 5.98529637e-01] [ 1.56789482e+00 1.37532270e+00 -9.66621220e-01 ... 1.48556486e-01 -8.22633266e-01 -1.47666186e-01] ... [-1.47328818e+00 1.47113717e+00 9.78247344e-01 ... 5.56185663e-01 1.19159484e+00 6.11007065e-02] [-1.99672592e+00 -1.16196740e+00 2.03184649e-01 ... -2.08736753e+00 -3.61058325e-01 1.63782525e+00] [-7.96285510e-01 -9.15965319e-01 -6.12001061e-01 ... 2.75771022e+00 -1.34024358e+00 -5.22281788e-02]] [[ 2.69585311e-01 -7.92099953e-01 1.38395783e-02 ... 8.57265770e-01 1.01602209e+00 7.24389181e-02] [-2.85170645e-01 5.63392341e-01 1.49397922e+00 ... -2.83025742e+00 -2.06933573e-01 4.04074490e-01] [ 1.16968262e+00 2.02420497e+00 -1.22719753e+00 ... 1.37267500e-01 -7.14418516e-02 1.77116084e+00] ... [-3.08993697e-01 -1.30118698e-01 1.71603382e+00 ... 1.96700320e-01 -7.42900297e-02 -9.27851975e-01] [-6.08863294e-01 -2.70709276e+00 1.31593382e+00 ... 2.54706532e-01 3.69280994e-01 -4.94932771e-01] [-1.50396454e+00 -1.57300711e+00 9.90481526e-02 ... 6.33619547e-01 -5.13483226e-01 -1.24969220e+00]] [[-9.92187023e-01 5.74417353e-01 4.29596096e-01 ... -2.38190818e+00 2.03979111e+00 1.44418240e+00] [ 6.65263116e-01 -9.00648654e-01 -6.12216651e-01 ... 7.15916932e-01 -7.21790910e-01 -1.30249158e-01] [ 8.10858428e-01 1.97651565e+00 -1.76680356e-01 ... -1.14118254e+00 -8.13087225e-01 1.65729916e+00] ... [ 3.13206725e-02 -1.60611010e+00 1.59578168e+00 ... -6.11776352e-01 -1.74696267e-01 5.25799990e-01] [ 1.22886419e+00 -9.11563337e-01 -4.37869877e-01 ... 7.45850384e-01 -3.48987848e-01 -1.30060530e+00] [-3.37038338e-01 -4.94615197e-01 -3.92415434e-01 ... -1.43889815e-01 -1.05983114e+00 -5.60770452e-01]]] [[[-7.11329997e-01 1.86080113e-01 7.82000184e-01 ... 2.73899168e-01 -2.32822716e-01 6.70258701e-01] [ 1.82084329e-02 -2.93831378e-01 7.25081623e-01 ... -3.30419570e-01 4.85258400e-01 1.87595353e-01] [-2.73688674e-01 8.10309768e-01 -3.59794557e-01 ... 8.05185735e-01 4.53683764e-01 7.86100566e-01] ... [-7.26399422e-02 -4.97899681e-01 1.67608693e-01 ... 1.51301235e-01 4.95862067e-01 5.21681666e-01] [ 6.60454154e-01 3.06816604e-02 -7.62696117e-02 ... 1.04088974e+00 7.15255976e-01 5.85719287e-01] [-4.07561868e-01 -5.65278709e-01 -5.18618643e-01 ... -6.44931078e-01 7.29407519e-02 -5.68556666e-01]] [[-5.10518312e-01 -2.15138972e-01 6.44694984e-01 ... -4.43986565e-01 -2.42673084e-01 4.14031267e-01] [ 2.36695826e-01 -8.30715656e-01 -3.10797065e-01 ... -1.80749759e-01 -3.50663573e-01 -3.20359379e-01] [ 5.19697785e-01 -7.26099074e-01 -7.13429749e-01 ... 6.29514754e-01 3.30163479e-01 -2.87539363e-01] ... [-6.71338856e-01 -2.45901734e-01 -5.06791711e-01 ... 2.38501787e-01 -6.63792431e-01 3.31495613e-01] [ 1.38548696e+00 1.14375226e-01 5.19525886e-01 ... -1.70949876e-01 5.49672097e-02 -1.96504980e-01] [ 4.03571464e-02 -3.36955428e-01 1.01825273e+00 ... -3.02407324e-01 -4.93963122e-01 1.25653177e-01]] [[ 3.15933079e-01 1.61542282e-01 3.53020310e-01 ... 2.16533393e-01 -1.53112113e-02 2.11377114e-01] [ 5.84462464e-01 -4.16309461e-02 3.00740957e-01 ... -1.31335288e-01 -2.49805450e-01 -1.27698854e-01] [-5.64758420e-01 -4.61038202e-01 -3.83484870e-01 ... 9.48280655e-03 -1.77329317e-01 5.57412684e-01] ... [-1.95417240e-01 2.40693182e-01 7.92135596e-02 ... -6.15499496e-01 -3.82480800e-01 -3.27119753e-02] [ 1.26870096e+00 -2.08474696e-01 -1.29897237e+00 ... 1.05570173e+00 -2.43556157e-01 3.26835901e-01] [ 6.23953640e-01 -8.40066150e-02 -7.20167346e-03 ... -8.80708873e-01 3.81957144e-01 -6.13525391e-01]] ... [[-1.44642681e-01 -1.01390839e+00 -1.28788602e+00 ... -2.56515145e-01 -3.15344743e-02 4.19782609e-01] [-3.20900589e-01 -5.06723642e-01 1.98036999e-01 ... 9.85026777e-01 5.80589056e-01 7.06621349e-01] [-5.67023605e-02 5.87431490e-01 5.41155636e-01 ... 1.93499553e+00 3.33864428e-02 -5.25040105e-02] ... [ 1.28828645e-01 -8.99154663e-01 1.70664504e-01 ... -1.71062201e-01 -9.05140042e-02 -3.51286262e-01] [ 8.26082006e-02 -2.34715372e-01 1.68731153e-01 ... 5.51034629e-01 -1.38381675e-01 -3.38958323e-01] [-5.09342372e-01 2.67263353e-01 -2.70451576e-01 ... 2.55748183e-01 3.82951684e-02 3.82323205e-01]] [[ 1.05547178e+00 -2.84878552e-01 4.36262190e-02 ... 8.85575294e-01 -6.97685778e-01 4.93931063e-02] [-2.48154297e-01 -2.65818566e-01 -1.11456044e-01 ... 7.98277736e-01 1.86454102e-01 4.35776532e-01] [ 3.33788842e-01 9.42563474e-01 6.23935796e-02 ... 3.02935809e-01 -6.43046200e-01 -4.69912142e-02] ... [-1.86515585e-01 -8.73308063e-01 -2.77798977e-02 ... 4.63716835e-01 -1.00320064e-01 7.58680701e-01] [ 5.26291609e-01 -1.86240599e-01 -3.06067616e-01 ... 9.78854716e-01 -5.79361141e-01 1.35003412e+00] [ 1.38511136e-01 1.01036298e+00 5.50639145e-02 ... 1.16733484e-01 8.97613466e-02 1.48894414e-01]] [[-6.50616050e-01 4.51049894e-01 6.02095723e-01 ... 1.16703308e+00 -8.67176354e-01 2.51568586e-01] [-8.05755734e-01 1.23297833e-01 -8.25841606e-01 ... 8.50508869e-01 -6.92732751e-01 -5.36918223e-01] [ 8.48084033e-01 -5.95511436e-01 5.16834438e-01 ... 1.20011680e-01 8.27710569e-01 4.60492909e-01] ... [ 3.60370159e-01 -5.35148904e-02 -4.43570882e-01 ... -7.47373700e-01 -1.08823609e+00 1.32496107e+00] [-7.61394873e-02 -6.66230559e-01 -3.71638656e-01 ... -1.74360629e-02 2.67257374e-02 -3.71349066e-01] [-2.95044273e-01 5.57746112e-01 1.90894991e-01 ... -4.49062288e-01 6.31098449e-01 1.11830592e-01]]]] [[[[-1.32554839e-03 -7.48340368e-01 1.31904408e-01 ... 1.17166504e-01 3.86785828e-02 -1.82693213e-01] [ 7.75297284e-01 -5.25197983e-01 8.92103374e-01 ... 7.78181791e-01 -8.26290846e-01 -2.08023954e-02] [-1.19290614e+00 1.59168597e-02 4.88780379e-01 ... 4.68673468e-01 -1.49613526e-02 1.68566898e-01] ... [ 7.01644003e-01 1.47495076e-01 -7.25490674e-02 ... -7.93157518e-01 2.96538800e-01 -1.33093667e+00] [ 4.35759366e-01 2.59290904e-01 8.27863932e-01 ... 1.45737708e+00 -5.35013974e-01 -6.25216186e-01] [ 6.84548438e-01 1.99493825e+00 -3.39565873e-01 ... -5.26662529e-01 5.52042544e-01 7.24374771e-01]] [[ 6.22150421e-01 -1.81160003e-01 -4.28431988e-01 ... 2.85676777e-01 3.74251992e-01 -9.12753642e-01] [-7.85238683e-01 -2.51016051e-01 1.49411574e-01 ... 9.34293047e-02 -1.65692449e-01 -1.40550828e+00] [ 3.61015685e-02 -3.10905367e-01 -7.93789089e-01 ... 2.55170166e-01 -8.18480909e-01 -1.72744429e+00] ... [ 1.29435015e+00 8.75207007e-01 -7.46071458e-01 ... 4.78497654e-01 -1.28672612e+00 -3.55837554e-01] [ 8.93465817e-01 -8.80060792e-01 -3.52252305e-01 ... -2.18248054e-01 1.15327501e+00 3.64254117e-01] [ 3.05195957e-01 6.00535758e-02 3.16751063e-01 ... 1.03439248e+00 -6.04983151e-01 -4.03564513e-01]] [[-6.32630229e-01 -9.85596538e-01 -1.81327417e-01 ... -4.02274609e-01 -1.19102228e+00 2.74762958e-01] [-4.50807847e-02 3.68107826e-01 3.03430576e-02 ... 8.31393242e-01 -1.54624701e+00 -2.82520592e-01] [ 3.78277659e-01 -2.15601757e-01 -1.04615569e-01 ... -1.36559236e+00 -1.06070030e+00 1.02775797e-01] ... [-8.70421052e-01 -1.19771063e+00 -1.11184537e+00 ... -1.79299748e+00 -4.39111739e-01 1.52377045e+00] [-5.72503090e-01 2.32582420e-01 -9.97658849e-01 ... -1.42274916e+00 1.21440017e+00 -1.04727745e+00] [-4.32238430e-01 5.57094872e-01 5.27518094e-01 ... 7.55000785e-02 1.11951184e+00 -3.83488715e-01]] ... [[-1.99459684e+00 4.61801827e-01 1.14392436e+00 ... -4.47370969e-02 5.42673692e-02 1.06369042e+00] [-1.54670274e+00 1.32364583e+00 5.29812336e-01 ... -7.37558585e-03 -4.96942252e-01 -4.42330807e-01] [ 5.04567623e-01 -1.08463120e+00 -4.00301255e-02 ... 9.51777697e-02 1.68502107e-01 -3.71450871e-01] ... [-9.98766184e-01 -8.02507162e-01 -2.08295807e-01 ... 7.78164983e-01 -8.81714761e-01 -1.26598060e-01] [ 2.13830784e-01 1.44456315e+00 -8.36274251e-02 ... 6.08960509e-01 4.36589569e-01 4.65755254e-01] [-3.67265314e-01 5.35357177e-01 2.59503663e-01 ... 1.18398893e+00 -9.63719249e-01 6.08676910e-01]] [[ 1.04861677e+00 1.31023169e+00 4.80290726e-02 ... 3.02024603e-01 -7.98197925e-01 7.66304791e-01] [ 4.13054138e-01 4.41883326e-01 1.60131097e+00 ... 3.78717482e-01 2.29638636e-01 5.30722141e-01] [-1.29689527e+00 8.40896964e-02 -4.60823417e-01 ... -1.24222767e+00 -6.50361598e-01 5.69435656e-01] ... [ 2.32847869e-01 -7.81214833e-01 -6.44993842e-01 ... -1.13949406e+00 1.25232935e-01 9.85187888e-01] [ 6.99731886e-01 -1.98832899e-01 4.13182348e-01 ... -1.15985014e-01 1.68761820e-01 -2.04828307e-01] [ 7.88909674e-01 -1.20156385e-01 -5.01313806e-01 ... 6.99482501e-01 -6.47089958e-01 4.86575335e-01]] [[ 1.16045010e+00 -5.21380067e-01 -5.61480999e-01 ... 8.52126122e-01 -2.80105155e-02 6.98917508e-01] [-7.03041196e-01 3.72475088e-01 9.47261930e-01 ... -6.65613590e-03 -6.59334362e-01 -7.12694377e-02] [ 8.77839047e-03 -6.02630198e-01 -2.82142848e-01 ... 2.42671043e-01 1.86689436e-01 4.77818817e-01] ... [ 4.54563469e-01 -7.38670886e-01 -3.48544717e-01 ... 1.01126306e-01 -2.45220542e-01 -2.99910661e-02] [ 2.00251698e+00 -1.14047825e+00 8.79160106e-01 ... 1.44129962e-01 -4.17618722e-01 -5.62755048e-01] [-4.68543261e-01 8.18024695e-01 -8.05734158e-01 ... -6.19037926e-01 -1.10811567e+00 3.93550247e-01]]] [[[-2.50831008e-01 -7.58829266e-02 -6.92912221e-01 ... -1.57167822e-01 -1.47733204e-02 3.80670391e-02] [-3.44350599e-02 -1.62252814e-01 -8.70460212e-01 ... -5.44429660e-01 2.66566947e-02 2.77054220e-01] [ 3.33431512e-02 1.86102018e-01 2.35822886e-01 ... -3.43191952e-01 2.34036535e-01 4.09473479e-01] ... [-5.61652005e-01 2.40521431e-01 -3.84766132e-01 ... 1.16919562e-01 6.97229922e-01 -1.09283030e-01] [-6.22406065e-01 1.73904449e-01 -6.08677328e-01 ... -2.32382163e-01 -4.79317784e-01 -2.53884405e-01] [-1.31976828e-02 2.46608421e-01 1.18349381e-01 ... -6.89584970e-01 -4.58497465e-01 -4.03368384e-01]] [[-2.65500665e-01 2.59455711e-01 -2.01870147e-02 ... 1.59928545e-01 6.13864623e-02 5.31438179e-03] [-2.02908870e-02 5.49557745e-01 1.24725990e-01 ... -3.36385190e-01 2.13349685e-02 2.61549383e-01] [ 6.15003183e-02 4.68070865e-01 -9.34008509e-02 ... 1.28309011e-01 -2.86405366e-02 -1.14078693e-01] ... [ 5.44907510e-01 -1.20727211e-01 2.80126953e-03 ... 1.04689382e-01 4.02701721e-02 9.33386236e-02] [-7.65701532e-01 9.72956195e-02 -3.66023660e-01 ... 6.00896299e-01 -5.85715830e-01 2.29136750e-01] [-3.22288275e-01 4.13035095e-01 -1.38460785e-01 ... -5.23956239e-01 2.33803894e-02 1.69495150e-01]] [[ 1.82658583e-02 -1.80466667e-01 -5.59813678e-01 ... -2.74499774e-01 1.89790413e-01 2.41001204e-01] [ 2.58131951e-01 1.91744596e-01 -1.57277316e-01 ... 4.35941041e-01 1.87737923e-02 -2.92695165e-01] [-6.76282883e-01 1.15108863e-01 -2.77539313e-01 ... 3.06031436e-01 -1.94176733e-01 -2.03281585e-02] ... [ 6.08268142e-01 6.05135620e-01 -8.49824548e-02 ... 4.27930981e-01 2.75125086e-01 9.52878669e-02] [-8.65075946e-01 3.93501788e-01 6.50659442e-01 ... -6.16243362e-01 -3.27058762e-01 5.18228829e-01] [-1.55624598e-01 -3.60301167e-01 -3.31548423e-01 ... 1.18813366e-01 1.65078148e-01 2.91801333e-01]] ... [[ 2.39957690e-01 3.04721355e-01 2.32662737e-01 ... -2.54510403e-01 -4.77553964e-01 2.14291692e-01] [ 8.81945565e-02 2.26585910e-01 -2.09726170e-01 ... 6.24096394e-01 2.96724945e-01 -2.22963154e-01] [-1.77403897e-01 -1.40022218e-01 -6.70948029e-01 ... -6.02886200e-01 -1.47280693e-01 -6.08986951e-02] ... [ 7.85089850e-01 -5.72985262e-02 -5.14665425e-01 ... -5.54299587e-03 1.23180933e-01 -2.11214676e-01] [-1.68799713e-01 4.15492982e-01 6.83894515e-01 ... 9.66452122e-01 2.16840491e-01 2.23728418e-01] [ 2.00280249e-01 -4.22826290e-01 -7.87462413e-01 ... 2.75442719e-01 -3.94172370e-01 2.59724736e-01]] [[-4.42228287e-01 -4.52341139e-01 2.80607641e-01 ... 1.23183005e-01 -2.77765125e-01 2.56063819e-01] [ 3.08545142e-01 5.15101142e-02 2.58456985e-03 ... -6.22640312e-01 4.30872351e-01 2.89259385e-02] [ 1.97402269e-01 -2.12160274e-01 -1.07353508e-01 ... -2.58728620e-02 3.31083506e-01 6.20371044e-01] ... [ 2.98157454e-01 -5.09256124e-01 6.76615596e-01 ... 8.42884406e-02 -5.41929722e-01 1.58875212e-01] [-2.31586322e-02 1.85373455e-01 -5.03604054e-01 ... -3.29136997e-01 6.45119511e-03 3.10276926e-01] [ 1.46929294e-01 -4.05227929e-01 1.54057264e-01 ... 2.24680513e-01 -9.35854688e-02 -1.48058236e-01]] [[-3.23121130e-01 -1.21388070e-01 1.76509514e-01 ... -3.56192201e-01 1.60509378e-01 1.85832813e-01] [-6.37310743e-01 -1.38043821e-01 -1.36082238e-02 ... -1.88235447e-01 3.25550442e-03 4.53889251e-01] [ 4.15219307e-01 1.13945588e-01 5.92262251e-03 ... 2.41360161e-02 1.83007121e-01 -4.32410419e-01] ... [-2.00192854e-01 1.82283357e-01 4.24794704e-01 ... -5.55439666e-02 2.00243503e-01 2.71946549e-01] [ 5.05395770e-01 3.24631393e-01 4.76264358e-01 ... 2.12326616e-01 -2.30138719e-01 -3.47057968e-01] [-1.31345317e-01 6.96469545e-01 -2.74873704e-01 ... -3.06310803e-01 1.42334178e-01 2.27832817e-03]]] [[[ 1.10933125e+00 -1.26179266e+00 3.82085681e-01 ... 2.84687400e-01 2.90680349e-01 3.00178051e-01] [-1.14081550e+00 -9.18906927e-01 -1.10570121e+00 ... 1.16528809e+00 -3.12803268e-01 1.28196430e+00] [ 3.99273694e-01 6.27594173e-01 1.67413577e-01 ... 1.77030778e+00 -1.37243283e+00 2.21429300e+00] ... [-2.69685316e+00 6.47776574e-02 -1.24170661e+00 ... -1.85823166e+00 1.42407691e+00 1.62253428e+00] [ 2.66373587e+00 1.57144082e+00 -1.74696013e-01 ... 9.47195172e-01 -6.80843368e-02 8.41122687e-01] [-8.90721917e-01 -8.76908362e-01 -1.66031146e+00 ... -8.49047899e-01 -1.97617203e-01 -9.79757309e-01]] [[ 1.27555609e+00 -7.77568221e-01 -3.68003416e+00 ... 1.87031913e+00 8.21548104e-01 1.50983906e+00] [-1.45038337e-01 -7.13407755e-01 -8.67639557e-02 ... -3.39380920e-01 7.52031088e-01 1.22612846e+00] [ 8.78199518e-01 3.68936634e+00 1.71810710e+00 ... -1.10546684e+00 1.04430354e+00 -1.67579770e+00] ... [ 1.17724335e+00 -1.59664631e+00 -3.26076269e+00 ... 1.72693408e+00 -1.12887430e+00 9.15045559e-01] [-4.20507073e-01 -1.85864651e+00 -4.90131229e-01 ... -1.57812762e+00 1.62153685e+00 9.90016937e-01] [-1.84114766e+00 1.70635831e+00 -1.23551786e+00 ... -1.34529614e+00 4.32869345e-01 8.51912975e-01]] [[-1.05476010e+00 6.91437840e-01 -1.44131613e+00 ... 7.94857025e-01 -1.48300672e+00 -6.46937966e-01] [-3.40270543e+00 -1.85571909e+00 6.41882479e-01 ... -1.55548501e+00 -3.39936972e-01 -1.18578047e-01] [ 3.46683413e-01 5.29195257e-02 -2.18204141e+00 ... 5.22612810e-01 1.29369116e+00 2.12247157e+00] ... [-7.96611756e-02 -2.14100480e+00 2.45751214e+00 ... 5.15165627e-01 1.08798575e+00 -7.17758983e-02] [-1.02910531e+00 -2.55966043e+00 -1.17025518e+00 ... -1.14088106e+00 1.53138697e+00 1.46580637e-01] [-1.91284275e+00 1.08537531e+00 -1.03864622e+00 ... -1.84806037e+00 -5.52929044e-01 4.11008596e-02]] ... [[ 1.71685684e+00 -3.24212730e-01 -1.17559368e-02 ... -1.74017802e-01 7.71992803e-01 -1.22475851e+00] [ 3.42629403e-01 -2.92716175e-01 8.49052146e-02 ... 2.05647016e+00 -4.74180192e-01 5.18053055e-01] [ 1.36483312e+00 3.28732681e+00 1.42873287e-01 ... 2.37650275e+00 -3.05637288e+00 -3.43281507e-01] ... [-2.14501166e+00 -9.07533824e-01 -4.48384106e-01 ... 1.60311949e+00 -6.81007981e-01 -4.22117949e-01] [ 3.76309603e-01 -1.40515709e+00 8.59348893e-01 ... -2.79908538e+00 -9.66876209e-01 2.32641459e+00] [-1.34413338e+00 -1.88419044e+00 -6.22086823e-01 ... -6.76947892e-01 -8.38019371e-01 -7.34555781e-01]] [[-9.49649096e-01 -9.64016438e-01 9.47557509e-01 ... 1.87449300e+00 8.53325307e-01 9.96233106e-01] [ 1.48924923e+00 -6.18931472e-01 -5.49345911e-01 ... -6.56753480e-01 -2.00795150e+00 7.81473279e-01] [ 2.23282552e+00 -1.24488294e+00 -3.34920955e+00 ... -4.89117146e-01 -1.71040642e+00 -1.85758340e+00] ... [ 9.89035904e-01 9.02707577e-01 1.09141648e+00 ... 3.33549351e-01 -9.06166852e-01 -8.15718099e-02] [ 6.29775107e-01 1.24330485e+00 1.65962920e-01 ... -8.80943537e-01 -6.00828648e-01 -9.61184680e-01] [ 7.31109202e-01 -3.01931524e+00 1.77887356e+00 ... -1.22179890e+00 2.56510425e+00 1.29340172e+00]] [[-6.24266803e-01 -7.57729173e-01 -1.52600467e+00 ... 1.17697693e-01 -1.03946888e+00 -1.66717276e-01] [-1.62833023e+00 -1.85266531e+00 1.10689151e+00 ... 5.46707511e-01 -1.31302679e+00 2.14129716e-01] [ 8.96695316e-01 -2.28195643e+00 1.20678425e+00 ... -1.41002345e+00 7.90458143e-01 1.72631180e+00] ... [ 4.85450923e-01 -2.61400670e-01 2.17094111e+00 ... 1.09624255e+00 9.97378170e-01 2.20903134e+00] [ 1.35670722e+00 -1.58769358e-02 1.10104430e+00 ... -1.25401378e-01 -2.00270200e+00 -9.36328590e-01] [ 3.64939183e-01 -3.67135495e-01 1.88787782e+00 ... -1.23826563e+00 1.04268178e-01 3.76240045e-01]]] [[[-1.03472359e-02 1.30318373e-01 1.04500696e-01 ... -1.54052615e-01 1.58587709e-01 1.71388850e-01] [ 2.48595402e-02 5.41620791e-01 -2.99521118e-01 ... -1.81623802e-01 -2.69134138e-02 8.05955660e-03] [-2.67948538e-01 4.06069458e-01 1.67834312e-01 ... -2.42497399e-01 -1.72036421e-02 6.31610379e-02] ... [-9.84782204e-02 2.71167755e-01 2.36898482e-01 ... -3.39155495e-02 -1.08732931e-01 1.87159941e-01] [-5.90435863e-02 1.71189472e-01 -2.55504757e-01 ... 3.44946146e-01 -6.07676864e-01 -2.77295232e-01] [-1.65345669e-01 3.17389071e-01 8.78181830e-02 ... -2.96688169e-01 4.32894140e-01 1.07566141e-01]] [[ 1.68671701e-02 -2.61715770e-01 2.82563984e-01 ... 2.67357260e-01 1.99934572e-01 3.16478550e-01] [-2.95396686e-01 5.02997776e-03 -1.31562188e-01 ... 5.43443680e-01 -2.37794500e-02 -3.16899717e-02] [-1.62867829e-01 -2.35275641e-01 2.72959203e-01 ... 2.94116437e-01 9.16702822e-02 2.09236741e-01] ... [ 2.77883142e-01 3.02297860e-01 1.46261662e-01 ... -1.81192622e-01 2.07589529e-02 4.83038336e-01] [-1.63727403e-02 -1.82832271e-01 -1.11135297e-01 ... 1.80218741e-01 2.21861228e-01 4.36575823e-02] [ 1.92469552e-01 3.21142673e-01 -3.34651381e-01 ... 8.47290531e-02 -2.43112564e-01 -2.40956217e-01]] [[ 3.25167179e-01 6.75271973e-02 -4.73292202e-01 ... 3.07296962e-01 -6.36539936e-01 -3.67819995e-01] [-2.57078689e-02 -1.30402386e-01 3.67179245e-01 ... 4.95980633e-03 -2.53346562e-01 -3.94805878e-01] [ 1.95482880e-01 -3.13124239e-01 1.18769765e-01 ... 4.11755890e-02 1.76364049e-01 7.77108818e-02] ... [ 1.20854147e-01 -2.53491729e-01 -5.96493296e-02 ... -2.84832567e-01 -2.47716993e-01 -6.28933311e-02] [-1.62834153e-01 5.26837306e-03 2.33686477e-01 ... -5.75035810e-02 -9.54933316e-02 1.29060373e-01] [ 1.92523763e-01 3.51670504e-01 4.41459678e-02 ... 2.22163349e-01 -3.27732891e-01 -1.02681182e-01]] ... [[-4.22218405e-02 9.08734053e-02 8.01200606e-03 ... -3.03539094e-02 2.35062778e-01 1.80837259e-01] [ 1.46208867e-01 2.02876166e-01 1.55335048e-03 ... -1.27787068e-01 1.52053699e-01 3.64274085e-02] [ 4.05622602e-01 7.00024962e-02 -1.40557081e-01 ... 1.69327110e-01 -2.30315998e-01 8.59773066e-03] ... [ 2.52866000e-01 4.01248690e-03 2.94845104e-01 ... 7.57227764e-02 5.79716563e-01 4.06958312e-01] [ 3.24550509e-01 2.01556832e-01 3.52643758e-01 ... 4.01475251e-01 5.26902497e-01 4.42281812e-01] [-1.70145407e-02 2.39729941e-01 -6.73738774e-03 ... -1.94634702e-02 -2.70842284e-01 -1.94024891e-01]] [[ 4.40664917e-01 -7.61251301e-02 -3.89734209e-01 ... 2.03416333e-01 -2.51883417e-01 -8.34199414e-02] [ 4.28167522e-01 1.49512172e-01 -1.13413438e-01 ... 1.23547554e-01 -1.54199272e-01 -1.11742668e-01] [ 1.75390076e-02 -3.07820112e-01 1.89163983e-01 ... -2.31213287e-01 -5.35957217e-02 1.08849272e-01] ... [ 3.45530696e-02 -4.74940628e-01 1.93108320e-01 ... -2.51675189e-01 -2.23342702e-02 5.12072220e-02] [-5.27153127e-02 2.36364618e-01 1.93441242e-01 ... -5.75937331e-02 1.58601210e-01 -4.25207883e-01] [ 1.52416527e-01 -5.12752123e-02 1.63671285e-01 ... 1.44545138e-01 1.66654646e-01 8.16366747e-02]] [[-9.32227895e-02 6.47738427e-02 -9.94622707e-02 ... 1.83989838e-01 -3.98091048e-01 -1.41007915e-01] [ 1.94283620e-01 4.44598258e-01 -1.46681234e-01 ... -1.33389577e-01 2.93091357e-01 -5.60259521e-01] [-3.35728616e-01 -1.23525612e-01 6.21690869e-01 ... -5.35460003e-02 1.33278638e-01 -2.21677385e-02] ... [ 1.47692874e-01 -3.61421108e-02 1.06781654e-01 ... -1.00866869e-01 -3.46729070e-01 -3.80798541e-02] [ 2.05620125e-01 -1.37439771e-02 -1.21216506e-01 ... -1.79254740e-01 -9.92383808e-02 2.06472501e-01] [ 1.15316905e-01 8.79938602e-02 4.52309996e-01 ... 5.66304214e-02 -5.06881252e-02 5.34542426e-02]]] [[[-1.91251552e+00 -1.61684632e-01 1.08225453e+00 ... -4.84613366e-02 5.88776410e-01 -1.65287745e+00] [-1.07937008e-01 -1.37890935e+00 5.35036802e-01 ... 1.08421016e+00 -7.08047748e-01 9.92213011e-01] [ 9.26369607e-01 4.87998039e-01 -1.66924790e-01 ... 7.60580957e-01 -3.56838882e-01 5.27288437e-01] ... [ 1.99099019e-01 6.20746434e-01 -1.42992961e+00 ... -9.00676906e-01 -1.66322008e-01 -1.47183061e+00] [ 7.41157770e-01 -1.15817475e+00 -1.14202321e-01 ... 2.47583985e+00 1.06431353e+00 3.97203803e-01] [ 1.80837250e+00 -1.06253183e+00 -1.75991237e+00 ... 2.98997760e-01 -9.15381908e-01 4.29311126e-01]] [[-1.05006289e+00 -7.74314761e-01 -1.77489609e-01 ... 2.49619126e+00 1.32975423e+00 -9.82531190e-01] [-9.42614794e-01 7.98449576e-01 -2.71069020e-01 ... 3.71869832e-01 1.03930235e+00 7.66190410e-01] [-4.21531528e-01 -9.89497006e-02 -1.18710816e+00 ... -5.08802116e-01 1.21664250e+00 1.82900107e+00] ... [ 6.89782560e-01 -1.13364077e+00 -3.96883935e-01 ... -1.47647119e+00 -8.97639394e-01 1.44554675e+00] [ 1.67574704e+00 -1.04249291e-01 -5.96430480e-01 ... 1.91184819e-01 -2.63885371e-02 1.24193525e+00] [-4.00038302e-01 -1.61444890e+00 5.85172892e-01 ... -8.34893286e-01 -5.45729876e-01 4.52301502e-01]] [[-6.03416800e-01 3.56453180e-01 -3.00854534e-01 ... -1.91775218e-01 -7.34795153e-01 -3.54891449e-01] [ 1.78396654e+00 -9.45259094e-01 -7.01238513e-01 ... 4.02162910e-01 3.60809833e-01 1.05225003e+00] [ 1.70516685e-01 -5.56536257e-01 -1.57925057e+00 ... -8.23889017e-01 3.01188737e-01 -8.42012942e-01] ... [ 1.24046668e-01 9.52447355e-01 -1.29174620e-01 ... -4.91017520e-01 4.07906145e-01 -3.05261850e-01] [-4.03769374e-01 6.61205888e-01 -1.38507569e+00 ... -2.28509575e-01 -1.55557692e+00 5.84416509e-01] [-1.54473662e+00 1.51562542e-01 2.06563878e+00 ... -1.35723984e+00 -5.29711843e-01 -2.95949697e-01]] ... [[-1.91519350e-01 1.45176375e+00 -7.55541384e-01 ... -1.51763654e+00 4.68734086e-01 -1.03389692e+00] [-1.94183207e+00 -1.37380853e-01 1.87244177e+00 ... -1.00103712e+00 2.82232672e-01 1.50408041e+00] [-1.11803079e+00 -1.75446796e+00 -3.91459018e-01 ... -2.46141219e+00 1.53691426e-01 7.81774342e-01] ... [-8.27124000e-01 -1.11918700e+00 -1.52267098e+00 ... 2.10320517e-01 3.93161297e-01 8.44246387e-01] [-1.08295596e+00 1.19184685e+00 4.71671164e-01 ... 1.03930068e+00 2.27536893e+00 -9.22763228e-01] [-3.15883726e-01 1.11316037e+00 -7.20685482e-01 ... -1.38634109e+00 1.82799041e+00 -2.13458800e+00]] [[ 1.28002799e+00 -7.14600861e-01 -5.71877956e-01 ... 1.75967380e-01 3.93506616e-01 -1.12914288e+00] [ 2.48256132e-01 -2.29797304e-01 -6.23081684e-01 ... 5.36962867e-01 -7.47882724e-01 -6.02130890e-01] [ 1.23544872e+00 1.63596439e+00 1.29304242e+00 ... -3.42518896e-01 -3.45932603e-01 -1.34732831e+00] ... [ 2.33780789e+00 -3.05435956e-01 1.40751374e+00 ... -1.10480177e+00 -2.23251760e-01 -6.20807946e-01] [ 5.50341249e-01 -2.38828212e-01 3.23267043e-01 ... 2.20322698e-01 -8.83879125e-01 3.69352728e-01] [ 1.88473451e+00 -2.18336985e-01 -1.70216084e+00 ... 2.09200931e+00 1.14061125e-02 1.30188632e+00]] [[ 5.16569674e-01 2.52811253e-01 9.59898770e-01 ... 1.19500327e+00 -5.38837969e-01 -7.95222074e-03] [-1.37719846e+00 -6.57766759e-02 1.77519202e+00 ... -1.81680703e+00 1.88658059e+00 -1.19168687e+00] [ 2.37053204e+00 9.85075533e-01 -4.43123251e-01 ... -4.56783652e-01 -1.39422670e-01 -1.88468957e+00] ... [-5.50664783e-01 -8.60225677e-01 6.06143117e-01 ... 1.16446920e-01 1.82930148e+00 -3.34137321e-01] [-4.68246266e-03 -4.25460160e-01 -7.88856149e-01 ... 2.43271136e+00 -6.09364569e-01 1.94478594e-02] [-1.68042496e-01 1.78835571e-01 1.11485279e+00 ... 1.68671489e-01 -1.01359129e+00 -1.20413947e+00]]] [[[-2.36784294e-01 6.90716028e-01 -9.20910388e-04 ... -4.91613030e-01 -3.18568647e-01 -1.91597357e-01] [ 2.35872328e-01 1.70037746e-01 4.19006735e-01 ... -1.72087535e-01 -1.36981654e+00 -8.54452431e-01] [ 7.35756755e-01 6.75943434e-01 7.50453696e-02 ... -4.98207182e-01 8.70683432e-01 -6.37827516e-01] ... [ 6.64455056e-01 4.40345883e-01 5.55885546e-02 ... 3.11516583e-01 2.18060091e-01 5.78935564e-01] [ 2.84804642e-01 -3.45149010e-01 -6.05979919e-01 ... -2.34915316e-01 5.77097654e-01 -2.99271911e-01] [-1.07404733e+00 -8.10295880e-01 3.72230411e-01 ... -3.36205900e-01 -4.74265605e-01 -6.48567498e-01]] [[-1.45444632e-01 6.62204742e-01 -1.14153542e-01 ... 1.50639379e+00 -6.18830264e-01 2.01974496e-01] [-4.86427285e-02 6.66468143e-01 -6.11120701e-01 ... 5.85167587e-01 -8.10143173e-01 1.71136141e-01] [-9.26196724e-02 -1.58999592e-01 -1.01828575e-01 ... -5.96448742e-02 -5.60470104e-01 2.18130618e-01] ... [-7.20377922e-01 -2.81508476e-01 4.79254335e-01 ... -7.93543458e-01 3.85343641e-01 7.17483163e-01] [ 3.12232733e-01 5.92826843e-01 -9.46333587e-01 ... 4.52799559e-01 -3.39437015e-02 9.14281130e-01] [-4.73228656e-03 -2.75881797e-01 8.95454288e-01 ... -6.49496019e-02 4.11796331e-01 -1.10095158e-01]] [[ 7.19244540e-01 2.24636253e-02 9.60328132e-02 ... 1.31569135e+00 4.30469960e-01 -2.12861821e-01] [-6.22014217e-02 -2.46669531e-01 4.54455838e-02 ... -1.74883828e-01 -1.88874498e-01 -1.53301910e-01] [-5.46736777e-01 -1.22334242e+00 -5.29035866e-01 ... -1.41008615e+00 -1.52603376e+00 -1.93221271e-01] ... [-2.48759210e-01 3.19939703e-01 -4.47371341e-02 ... 2.63817638e-01 -4.26555723e-02 -9.86890197e-01] [-4.87589329e-01 3.92864197e-01 4.83225256e-01 ... -1.84095299e+00 -1.06172955e+00 -1.02768850e+00] [-2.28760824e-01 6.23465180e-01 -9.15383399e-01 ... 4.97388929e-01 -8.20057929e-01 3.03507447e-01]] ... [[-3.21298808e-01 -3.32381018e-02 -8.11252557e-03 ... -3.69985461e-01 6.93422556e-01 3.08961868e-01] [ 2.07885683e-01 1.09239757e-01 1.07968338e-01 ... -3.01928461e-01 7.85511956e-02 -3.43327701e-01] [ 5.41675031e-01 -4.68587816e-01 -1.53817207e-01 ... -2.48201460e-01 3.79092693e-01 4.07919943e-01] ... [-7.21669793e-02 2.87601352e-01 3.78810078e-01 ... -4.33375761e-02 5.09486757e-02 -3.19095179e-02] [ 4.68867838e-01 3.46869946e-01 1.03292957e-01 ... -7.95079529e-01 3.46799761e-01 3.79237473e-01] [-2.21971020e-01 4.87985849e-01 2.86213666e-01 ... 1.01377390e-01 2.19713189e-02 -3.51651579e-01]] [[-4.36448544e-01 1.94763171e-03 -6.92723572e-01 ... -3.55811924e-01 5.27625680e-01 -5.80336988e-01] [ 3.73054892e-01 -8.61061394e-01 3.40466350e-01 ... -2.16470331e-01 4.99598265e-01 9.66598749e-01] [-4.43498015e-01 -1.86521232e-01 -4.28333580e-01 ... -7.54272640e-01 -3.04291040e-01 -3.94098252e-01] ... [ 3.11041921e-01 -7.74904430e-01 5.38074598e-02 ... 1.48303315e-01 9.61756930e-02 1.10266137e+00] [ 4.68393922e-01 -7.47464597e-01 -1.73300788e-01 ... -4.08660501e-01 -2.35688850e-01 6.94238842e-01] [ 7.01693356e-01 3.06052923e-01 1.12581588e-01 ... 2.17836220e-02 1.12642512e-01 4.74310756e-01]] [[ 3.78308803e-01 9.07076895e-01 -3.63808542e-01 ... 5.03342211e-01 8.84873122e-02 4.21993732e-01] [ 5.35067856e-01 -1.06297445e+00 -3.32258314e-01 ... 3.58407408e-01 7.77311683e-01 9.12216723e-01] [-1.05825357e-01 2.28346616e-01 -9.27977920e-01 ... 2.73740664e-02 -7.91189551e-01 -3.89020950e-01] ... [ 4.73306924e-01 -3.78662318e-01 5.91206253e-01 ... -1.58344314e-01 -2.62604356e-01 -1.26980925e+00] [-1.85384721e-01 -4.51539516e-01 -7.61894956e-02 ... -6.33499265e-01 -4.51233298e-01 -7.22205162e-01] [-4.94133949e-01 -2.87668258e-02 -3.01342964e-01 ... -4.73179877e-01 1.59588373e+00 -7.68372834e-01]]]] [[[[ 2.51835227e-01 -2.44152635e-01 -8.75444829e-01 ... 1.15719306e+00 9.69014168e-01 6.14267290e-01] [ 1.72638905e+00 -1.29231423e-01 -1.50241241e-01 ... -5.90359867e-01 2.14217246e-01 2.09693238e-01] [ 2.05171004e-01 1.30063891e+00 -2.88413465e-01 ... -5.37185013e-01 1.68260229e+00 7.77679205e-01] ... [-8.67805541e-01 6.82058096e-01 2.64715821e-01 ... 1.55383959e-01 7.22187519e-01 3.41757715e-01] [-3.85528505e-01 9.40042436e-01 -5.52250624e-01 ... 3.65675688e-01 1.73116612e+00 -5.13421178e-01] [-1.02279234e+00 4.03558463e-03 3.12226623e-01 ... 3.06971043e-01 -1.15759957e+00 6.91369399e-02]] [[-8.85918677e-01 -5.77935100e-01 8.60508025e-01 ... -7.05535054e-01 -8.53770912e-01 5.14812648e-01] [ 7.21523106e-01 5.79847991e-01 -1.44949509e-03 ... -6.25524044e-01 -7.85341442e-01 6.55177593e-01] [ 6.11984670e-01 -4.11325186e-01 -3.41226488e-01 ... 2.13709250e-01 2.06108302e-01 -8.88523281e-01] ... [-1.13649763e-01 5.88243246e-01 -3.78887296e-01 ... -4.75619495e-01 5.78481615e-01 3.40971887e-01] [-5.18827260e-01 1.14821804e+00 -1.36459410e-01 ... -2.70537853e-01 -8.81335676e-01 -2.00530533e-02] [-1.37122333e-01 1.99855602e+00 -4.19957727e-01 ... 6.21917248e-01 -1.85702586e+00 1.59173763e+00]] [[-2.19227314e-01 8.33293855e-01 -2.31305405e-01 ... 7.69886151e-02 -9.20067370e-01 1.22276962e+00] [-2.33785704e-01 2.52582312e-01 2.91396491e-02 ... -1.63154185e+00 -5.38352549e-01 4.94743347e-01] [ 9.45384920e-01 7.20750928e-01 8.96963894e-01 ... 9.45076466e-01 1.66058168e-01 -3.98807824e-01] ... [ 7.15973794e-01 7.43337095e-01 -6.26084059e-02 ... -2.65602648e-01 3.76795717e-02 3.97368550e-01] [ 6.83183149e-02 -5.28775632e-01 5.07497489e-01 ... 2.06882119e-01 8.42452981e-03 4.21150059e-01] [ 5.87111831e-01 3.16954732e-01 4.61421341e-01 ... 6.52211964e-01 2.50665516e-01 2.77423620e-01]] ... [[ 2.39458054e-01 -1.64059639e+00 7.92729795e-01 ... -5.09764552e-01 -6.66743517e-01 -8.56694460e-01] [-7.66273260e-01 8.16267729e-02 -4.33094233e-01 ... -6.10356152e-01 -5.90383172e-01 1.18916297e+00] [-4.39877063e-02 4.10056651e-01 4.91268992e-01 ... 1.94425106e-01 -6.02959156e-01 -6.61793113e-01] ... [ 4.87415552e-01 -2.41831288e-01 1.02337897e-01 ... -4.43886727e-01 -4.57104504e-01 -5.26371121e-01] [-8.06470037e-01 -1.70067096e+00 1.69744098e+00 ... -1.94717973e-01 -5.37265837e-01 2.70394415e-01] [-1.28232211e-01 1.51061028e-01 4.41895723e-01 ... 2.00930789e-01 -5.77325702e-01 -4.92706805e-01]] [[ 6.74852371e-01 6.06132984e-01 1.08429241e+00 ... 3.10724646e-01 9.62090254e-01 4.86387461e-01] [ 4.51609604e-02 -2.15063006e-01 3.79669130e-01 ... -4.99878556e-01 1.16026783e+00 2.08863035e-01] [-6.24979436e-01 4.98809040e-01 9.08375025e-01 ... -5.01054823e-01 4.34917957e-01 1.24293280e+00] ... [-6.76802039e-01 1.87963441e-01 -5.09393811e-01 ... -2.94707596e-01 -1.40573776e+00 1.23311853e+00] [ 1.90025091e-01 -2.48709083e-01 -6.97623670e-01 ... -1.73286185e-01 -1.43453270e-01 -1.72820115e+00] [ 5.80647886e-01 4.82422322e-01 -2.71547407e-01 ... -7.98105001e-01 4.21134233e-01 -1.03574347e+00]] [[-8.35523248e-01 -6.51849359e-02 5.76231897e-01 ... 8.06254089e-01 -2.08075553e-01 6.47831440e-01] [ 2.70552486e-02 -1.72745377e-01 3.55441302e-01 ... 4.63823915e-01 -7.67496288e-01 1.29693165e-01] [-1.27437383e-01 -5.62079728e-01 -3.21801931e-01 ... 3.90618116e-01 1.50138184e-01 3.19406718e-01] ... [ 5.86251795e-01 -6.97883487e-01 2.94103771e-01 ... 4.65760753e-02 7.72004545e-01 8.80846620e-01] [ 7.93000996e-01 -4.91525859e-01 -8.14618587e-01 ... -7.69472122e-01 -4.19919401e-01 8.99250731e-02] [-7.76671886e-01 1.03602183e+00 -1.08983614e-01 ... 6.22046471e-01 -2.26656094e-01 7.13920057e-01]]] [[[-7.77162135e-01 -7.05852211e-01 3.41760367e-02 ... -2.01644480e-01 -1.55235842e-01 4.48253453e-01] [-5.08248389e-01 9.22906771e-03 -1.99569032e-01 ... 3.78018826e-01 -4.62587774e-01 -3.37030172e-01] [ 3.39876622e-01 -1.44460037e-01 -1.62157312e-01 ... 3.78182918e-01 -5.89568973e-01 -1.52834356e-01] ... [-9.49088335e-02 -3.37768614e-01 6.23655736e-01 ... -5.59178233e-01 -3.42890322e-01 -2.06070170e-01] [ 7.45485008e-01 1.72005653e-01 1.93968266e-01 ... 2.10229442e-01 -3.24077725e-01 5.54713666e-01] [-3.70098501e-01 2.50856727e-01 2.92379111e-02 ... 2.47973472e-01 4.42274600e-01 -3.88835296e-02]] [[-9.96328220e-02 2.29469081e-03 -4.55421329e-01 ... 2.91656077e-01 -6.29855096e-01 -3.99650902e-01] [-3.20419014e-01 1.04072154e-01 -4.34665471e-01 ... 1.37434006e-01 -3.80839139e-01 3.65755826e-01] [-3.72920595e-02 -2.61704952e-01 6.68015361e-01 ... -2.20444754e-01 -2.25086108e-01 -4.74314183e-01] ... [ 9.80045125e-02 1.47459194e-01 5.09989858e-02 ... 4.80074026e-02 1.34854957e-01 6.75209224e-01] [ 4.02620547e-02 -2.66936719e-01 1.62194043e-01 ... -1.33972719e-01 -3.93772215e-01 -2.63163120e-01] [ 3.14525068e-01 4.78225425e-02 -3.29803169e-01 ... 2.02877447e-01 -5.05527914e-01 -1.90266579e-01]] [[ 2.54418943e-02 3.68527025e-01 -2.07746984e-03 ... -2.63871044e-01 -2.63111502e-01 -5.98752677e-01] [-1.80958822e-01 5.95536493e-02 -4.50747579e-01 ... 3.07614040e-02 2.96839237e-01 2.13240787e-01] [ 2.48166293e-01 -1.06650136e-01 -3.54994267e-01 ... -3.07626784e-01 2.13045508e-01 7.39394069e-01] ... [ 8.80758762e-02 -3.34644884e-01 4.65969771e-01 ... 3.91141444e-01 -2.63400048e-01 1.29648775e-01] [ 1.47588879e-01 2.73315698e-01 3.47203612e-01 ... 1.21838920e-01 -3.29031050e-01 2.18287662e-01] [ 5.86214244e-01 1.18047573e-01 4.43431079e-01 ... 4.36615348e-01 1.85603146e-02 5.52934110e-01]] ... [[ 3.23806912e-01 -6.76309049e-01 3.53780240e-02 ... -1.56176314e-01 -2.71892726e-01 -1.87002942e-01] [-2.30334058e-01 -7.79868215e-02 1.50617510e-01 ... 6.61145270e-01 1.02086395e-01 -5.06243110e-01] [-8.25304165e-02 2.60677710e-02 -1.20517753e-01 ... 4.39076602e-01 2.96384156e-01 -3.77804011e-01] ... [-3.01215142e-01 -1.54652596e-01 5.26879668e-01 ... -1.85537919e-01 -2.27013901e-01 1.65238634e-01] [-4.63299602e-01 2.56293535e-01 -1.84753180e-01 ... 1.55760840e-01 2.26170599e-01 3.70263487e-01] [ 4.69962507e-01 -1.42368257e-01 5.17564416e-01 ... 5.64255655e-01 -2.46532843e-01 -1.42941973e-03]] [[-8.95770192e-02 -5.90040565e-01 -3.51770043e-01 ... -8.27381536e-02 2.71011651e-01 -4.56824780e-01] [ 8.84587020e-02 3.08181979e-02 6.02552056e-01 ... -7.54518732e-02 -2.65395135e-01 -2.22949952e-01] [ 2.24129409e-01 -2.83799738e-01 5.29972434e-01 ... -2.57348746e-01 2.76607543e-01 3.50233197e-01] ... [-8.04371089e-02 5.88116646e-01 -3.84357393e-01 ... -2.03479752e-01 1.56631202e-01 -3.70149523e-01] [-3.04196104e-02 4.12148684e-01 4.70096946e-01 ... -4.41587865e-01 2.75330275e-01 2.95099258e-01] [ 3.74085724e-01 4.94724452e-01 -5.56257546e-01 ... 2.21253678e-01 -2.33304560e-01 3.45312834e-01]] [[ 6.49374247e-01 4.16155517e-01 -1.86919376e-01 ... 6.52907133e-01 3.88696492e-01 -2.50770628e-01] [ 3.62307429e-02 4.40141886e-01 4.90950137e-01 ... -3.01434904e-01 1.49739295e-01 4.07189965e-01] [-7.50345215e-02 -2.61520483e-02 -2.00779706e-01 ... -2.90791303e-01 -5.03747284e-01 -7.55439639e-01] ... [-8.15479830e-02 1.93122551e-01 2.26593569e-01 ... 3.43346000e-01 -2.36077294e-01 2.79840022e-01] [ 1.48371875e-01 -3.00560951e-01 -8.74391198e-01 ... -3.19484204e-01 -3.71572018e-01 2.80823469e-01] [-2.20005319e-01 -3.81989926e-01 -1.70589373e-01 ... 5.19877911e-01 1.15259983e-01 -3.49117756e-01]]] [[[-1.43261933e+00 -4.20447022e-01 8.98411453e-01 ... -1.37538576e+00 -3.96610022e-01 1.47746146e+00] [-4.76487517e-01 -6.44986510e-01 -3.31234962e-01 ... -2.22810721e+00 1.42160892e-01 2.16036415e+00] [-3.09471905e-01 -1.27928090e+00 8.60307887e-02 ... -2.48232579e+00 7.11339951e-01 1.07021582e+00] ... [ 1.91371679e-01 -1.93569526e-01 -2.15441632e+00 ... -8.71301770e-01 8.63444328e-01 -2.56006384e+00] [-1.41325438e+00 -1.08155859e+00 -1.71836317e+00 ... 6.12686872e-01 -1.66468132e+00 -1.61826611e+00] [ 2.08526075e-01 -1.92189455e+00 -2.20873761e+00 ... -1.15368760e+00 2.75733995e+00 9.47126627e-01]] [[-6.55544281e-01 -4.15837795e-01 -1.21420610e+00 ... -3.06918693e+00 1.69128075e-01 6.91294968e-01] [-7.24275231e-01 -2.64903355e+00 -2.35061526e-01 ... -2.53244400e-01 -1.01221061e+00 4.32078034e-01] [-1.00852907e+00 2.37730324e-01 1.12558496e+00 ... 4.57946301e-01 -2.07867837e+00 1.68278265e+00] ... [ 8.44812989e-01 3.63058716e-01 2.15244579e+00 ... -4.36958164e-01 5.81631243e-01 4.33528632e-01] [-1.25088781e-01 8.09690118e-01 -1.29478741e+00 ... 8.00418377e-01 1.53559470e+00 -2.14382505e+00] [-2.13256121e+00 -4.45550174e-01 -1.68987119e+00 ... -1.77706051e+00 -2.16664985e-01 7.06486285e-01]] [[-3.26661682e+00 -3.20005393e+00 2.05128360e+00 ... 1.16154075e+00 -2.21357632e+00 -1.68571758e+00] [-9.88007784e-01 1.04739594e+00 -7.61698246e-01 ... 7.32418597e-01 -5.38459122e-01 2.11328125e+00] [-1.83890648e-02 1.14122534e+00 -8.29255641e-01 ... -8.79081845e-01 -7.05484390e-01 -6.76244318e-01] ... [-1.00911927e+00 -1.74220347e+00 -1.07738614e+00 ... -7.34279394e-01 1.18897569e+00 -7.33379245e-01] [ 2.35159844e-01 3.13950014e+00 -8.78673315e-01 ... 8.05161774e-01 -1.31577528e+00 -2.82445639e-01] [ 5.80413043e-01 -1.08922100e+00 2.74509025e+00 ... -1.18017209e+00 -1.54005194e+00 -1.54925779e-01]] ... [[-5.63299477e-01 2.45354939e+00 1.59443498e+00 ... -1.73638856e+00 1.35499573e+00 1.02646470e+00] [-9.95076597e-01 5.94942868e-01 -8.23383689e-01 ... -5.83631039e-01 1.81502306e+00 2.18937469e+00] [ 5.58433950e-01 -3.34598750e-01 -2.14588732e-01 ... -2.46726465e+00 -4.79258388e-01 2.19864711e-01] ... [ 4.10962820e-01 1.94988823e+00 -5.39962232e-01 ... -1.01258755e+00 1.13171268e+00 -4.12365701e-03] [ 2.34754300e+00 -1.62436438e+00 1.51233280e+00 ... -1.09268323e-01 -2.50160170e+00 2.60287929e+00] [-1.57522798e+00 1.36805356e+00 -1.25191438e+00 ... 2.74672103e+00 4.15553004e-01 -1.46703124e+00]] [[-2.22255036e-01 9.93114293e-01 1.48396540e+00 ... 3.95579767e+00 -1.30543840e+00 -4.38539863e-01] [ 9.82601404e-01 -1.99088645e+00 -2.08666229e+00 ... -1.93709493e+00 1.41434264e+00 -1.59809804e+00] [ 1.84572196e+00 -1.12413621e+00 9.79591489e-01 ... -3.51378173e-01 -1.05192626e+00 -1.90031314e+00] ... [ 1.63739598e+00 -6.17501378e-01 -8.80462602e-02 ... -2.87190795e-01 1.05958819e+00 -3.30910295e-01] [-1.57036877e+00 -1.09736300e+00 -1.56134415e+00 ... -2.41461515e+00 1.41215801e+00 -1.83641386e+00] [ 8.27257574e-01 -1.05693841e+00 5.92686646e-02 ... 1.45251966e+00 9.62776303e-01 -1.76901650e+00]] [[ 7.02700093e-02 -4.89908904e-02 -9.11570072e-01 ... 9.25316751e-01 2.85557002e-01 1.33164561e+00] [-1.29744971e+00 2.24217027e-02 9.02887285e-01 ... -3.18188405e+00 7.38350034e-01 5.17037809e-01] [-9.18420732e-01 -3.57918471e-01 5.81367016e-01 ... -1.91549242e-01 1.10329270e+00 -1.85328022e-01] ... [ 1.10755384e+00 -1.10871911e+00 -9.82587874e-01 ... 2.16186926e-01 -5.74960887e-01 4.39232975e-01] [-1.50028050e+00 -4.74691652e-02 -1.19707751e+00 ... -1.58571944e-01 -9.73722756e-01 -1.42814982e+00] [-3.40756506e-01 5.94705164e-01 -1.40672731e+00 ... 7.69673705e-01 9.82426703e-01 5.37800014e-01]]] [[[ 8.42769220e-02 1.77187756e-01 -1.80637658e-01 ... 3.85848917e-02 -6.39038831e-02 -9.91982296e-02] [ 1.56193286e-01 -6.04934655e-02 9.57987532e-02 ... 1.30850121e-01 9.75879189e-03 2.11858094e-01] [-1.49348691e-01 6.91656722e-04 -4.33991194e-01 ... -1.64273724e-01 -3.79640758e-02 8.33042040e-02] ... [ 8.37375745e-02 4.67636675e-01 -1.96617901e-01 ... -2.34166101e-01 -5.48445880e-01 -5.56691848e-02] [ 2.78012417e-02 6.43779412e-02 -3.41722579e-03 ... 3.23449969e-01 3.45427185e-01 -4.07602876e-01] [-2.13319331e-01 -7.10894987e-02 -1.89768165e-01 ... -3.37299734e-01 1.82491153e-01 2.28468612e-01]] [[-3.86252373e-01 1.33906439e-01 -1.67986438e-01 ... -8.80340636e-02 -1.42706767e-01 2.11940616e-01] [ 3.73583525e-01 3.50759998e-02 3.78906876e-01 ... -5.21099329e-01 9.96436179e-02 -3.20342153e-01] [ 3.82282674e-01 1.06997184e-01 -7.50114843e-02 ... 3.41404490e-02 -2.35978767e-01 2.37903044e-01] ... [ 1.02265991e-01 3.28514218e-01 2.77131796e-01 ... 2.15800285e-01 1.39023170e-01 -2.56014496e-01] [ 3.46286967e-02 3.25525403e-01 -7.61972815e-02 ... 1.38352528e-01 -3.21873367e-01 5.87606281e-02] [-1.80804655e-01 -1.34907171e-01 7.82056376e-02 ... 1.73576418e-02 9.05680433e-02 -1.50154710e-01]] [[-7.94614479e-02 1.62107006e-01 -2.52637459e-04 ... -7.47004524e-02 1.72066435e-01 -6.31533414e-02] [ 9.64798704e-02 -3.69478852e-01 6.55826703e-02 ... 1.51407989e-02 4.64924037e-01 3.12470496e-02] [-2.21663266e-01 1.69493794e-01 3.39786679e-01 ... 2.10412353e-01 2.93121964e-01 -3.37139279e-01] ... [-1.59806594e-01 3.11474115e-01 1.64426602e-02 ... -4.72820967e-01 4.82890338e-01 3.42215419e-01] [ 2.35840958e-02 -3.30845788e-02 -5.88949807e-02 ... -4.51042414e-01 1.62956446e-01 5.47682345e-02] [-1.64587840e-01 -9.12593007e-02 -2.17414752e-01 ... -1.17275603e-02 -1.88494533e-01 -3.09995502e-01]] ... [[-3.63762900e-02 3.07855755e-01 -1.43270656e-01 ... -1.19353514e-02 7.54911453e-02 -2.86639810e-01] [-3.08960807e-02 -3.85148078e-01 -1.71137437e-01 ... 9.11831260e-02 2.81600822e-02 -2.92474270e-01] [ 9.63579416e-02 2.66260952e-01 -8.26171637e-02 ... -3.20116371e-01 -1.33057937e-01 6.37964785e-01] ... [ 4.58096147e-01 -1.94359556e-01 -3.24718326e-01 ... -1.57950044e-01 -1.57076403e-01 1.99328929e-01] [ 8.34587291e-02 1.37252986e-01 3.35569978e-01 ... 5.35833240e-01 -1.49153292e-01 -1.35563672e-01] [-3.12568814e-01 1.60408288e-01 -1.92025900e-01 ... 4.56655473e-01 7.68825263e-02 -3.42712373e-01]] [[ 2.59742975e-01 6.44708797e-02 2.04033166e-01 ... 1.44413263e-01 -1.17381282e-01 1.46824867e-01] [-4.81649250e-01 -1.30942151e-01 -1.24319363e-02 ... -6.53021596e-03 -1.69504419e-01 -2.65862904e-02] [-2.79769629e-01 5.20499945e-02 1.80727318e-01 ... 1.37228370e-01 -8.32744986e-02 -3.04282695e-01] ... [-1.64923638e-01 -1.39935151e-01 -4.34328675e-01 ... 6.86012581e-02 -5.24041176e-01 1.13837145e-01] [ 1.92117259e-01 1.95605278e-01 -3.28889757e-01 ... -2.19125375e-01 3.22881192e-02 -7.17098173e-03] [ 3.51917982e-01 -1.28573015e-01 -8.46907124e-02 ... -6.26673251e-02 1.06389381e-01 -9.07378793e-02]] [[ 1.37659773e-01 -2.26192400e-01 6.00349419e-02 ... 1.58927247e-01 -7.62193725e-02 -3.16922396e-01] [-2.53267139e-01 1.37429342e-01 2.18231529e-01 ... 6.88711852e-02 -2.24979997e-01 -2.44148508e-01] [-2.66585916e-01 1.73730135e-01 -6.59183025e-01 ... 1.29918810e-02 9.24462751e-02 2.97183860e-02] ... [ 2.54891291e-02 -7.60642469e-01 1.75860748e-01 ... 2.24644169e-01 -1.47793233e-01 -7.27928281e-02] [-2.06631377e-01 9.48828310e-02 3.99605334e-01 ... -3.29756945e-01 -1.74098730e-01 5.96430421e-01] [ 2.20786572e-01 -5.32458089e-02 -4.57528833e-04 ... -8.67410377e-02 8.23299959e-02 -1.64864540e-01]]] [[[ 7.84628093e-01 -1.65395349e-01 -1.65200007e+00 ... 2.26543024e-02 -1.01756310e+00 9.84153032e-01] [-3.72012973e-01 -1.79918182e+00 1.22863448e+00 ... 2.68323958e-01 1.06794700e-01 5.98534226e-01] [ 1.12416804e+00 -3.41037822e+00 -4.83183414e-01 ... 1.73999548e+00 -1.19862235e+00 -3.75297815e-01] ... [ 3.53568375e-01 6.93512201e-01 1.08261013e+00 ... -1.54339015e+00 1.23647165e+00 1.18688476e+00] [ 1.13948965e+00 2.07328939e+00 -2.20636427e-01 ... -7.05919147e-01 -6.81159943e-02 3.39854211e-01] [-1.29414663e-01 -1.27703118e+00 -4.91832316e-01 ... 1.78599942e+00 7.98388183e-01 -1.59082270e+00]] [[ 2.83401996e-01 1.19666360e-01 -1.32104373e+00 ... -1.62020254e+00 3.41305494e-01 4.75970000e-01] [-1.50670552e+00 -3.63631278e-01 -1.33367789e+00 ... -6.20625079e-01 6.02419674e-01 -4.06206280e-01] [-1.53131127e+00 -8.14855099e-01 -1.31712222e+00 ... 6.02634717e-03 1.68450117e+00 1.29305804e+00] ... [ 1.76255211e-01 -8.18060756e-01 -3.17199409e-01 ... 3.25049609e-01 7.09809840e-01 5.71636021e-01] [ 9.26362276e-01 -1.29600310e+00 -2.04126787e+00 ... -1.64606273e+00 1.38712823e+00 -1.41557109e+00] [ 1.79544881e-01 2.39886865e-01 1.15098071e+00 ... 4.90252763e-01 -1.98956573e+00 -2.93778896e-01]] [[-4.47472990e-01 -1.88278377e-01 -2.97008336e-01 ... 1.15575492e+00 4.86983716e-01 1.51290023e+00] [-1.28240192e+00 4.91891205e-01 1.03666377e+00 ... 1.37656510e+00 -1.49799377e-01 -9.74971712e-01] [-5.90228617e-01 -8.31211135e-02 -2.46516109e+00 ... -1.45473337e+00 -8.82286608e-01 2.09044531e-01] ... [ 1.17287703e-01 -1.97421944e+00 -1.22773819e-01 ... 4.17833924e-02 3.12556362e+00 4.34299737e-01] [-1.74477065e+00 -3.80000055e-01 2.07906753e-01 ... 9.25461650e-01 -1.17961061e+00 -7.87437260e-01] [ 3.94208312e-01 1.17612612e+00 1.83249748e+00 ... -4.50436980e-01 -5.64001560e-01 -2.53701031e-01]] ... [[ 1.94049358e+00 3.28903139e-01 -9.55577850e-01 ... 1.27956367e+00 9.42556441e-01 7.50759184e-01] [ 1.54358178e-01 -1.75954819e+00 -4.12179887e-01 ... -4.41847555e-02 1.33279121e+00 2.17845604e-01] [ 2.12114811e-01 5.18870294e-01 -6.20130420e-01 ... -2.26352528e-01 3.15288812e-01 6.88616574e-01] ... [-7.43685603e-01 -1.11384237e+00 -7.42781401e-01 ... 1.57146886e-01 2.47879446e-01 8.40743165e-03] [-1.22744787e+00 5.79136550e-01 -8.13322186e-01 ... -7.56785512e-01 2.83914983e-01 -7.72298276e-02] [-1.16371644e+00 -4.99870032e-01 1.94941509e+00 ... -3.93651187e-01 1.12366414e+00 -2.78464943e-01]] [[ 7.13522494e-01 7.84681812e-02 2.76292229e+00 ... 2.02197820e-01 -2.59300685e+00 7.67486811e-01] [ 1.50999367e-01 -1.30061924e+00 1.04153510e-02 ... -1.36167383e+00 -7.98171222e-01 1.48770571e+00] [ 5.93040407e-01 1.90829241e+00 -1.34930658e+00 ... 5.31516194e-01 1.45581043e+00 1.51727572e-01] ... [-1.24074852e+00 2.46059839e-02 1.73944426e+00 ... -1.04805613e+00 -4.11708541e-02 -9.88275111e-02] [ 2.14750671e+00 1.60985708e+00 -1.36690545e+00 ... 1.36242318e+00 2.70813018e-01 -1.13121367e+00] [ 3.69784355e-01 -1.49673074e-01 -1.43616736e+00 ... -1.09538078e+00 2.78145909e-01 -2.61879593e-01]] [[-8.93289745e-02 9.06905904e-02 -1.03759766e+00 ... 1.19180596e+00 -5.35822093e-01 4.58747149e-02] [-2.11476028e-01 -1.61462748e+00 -2.93732435e-01 ... 1.11607146e+00 2.30225883e-02 -5.39813876e-01] [-7.44687140e-01 1.52789700e+00 2.80622435e+00 ... 6.54486537e-01 -1.38103127e+00 -2.05812430e+00] ... [ 2.35807610e+00 -1.56290829e+00 -7.44232059e-01 ... -6.30706310e-01 -2.04224324e+00 -3.14724475e-01] [ 7.93839753e-01 -3.02393407e-01 -1.06190667e-01 ... -4.15559947e-01 -4.35181856e-01 2.54193854e+00] [ 3.01711768e-01 -3.81330043e-01 -6.82329535e-02 ... 3.34520757e-01 6.08927369e-01 -5.11031628e-01]]] [[[-3.23224038e-01 1.24287248e-01 -4.59096543e-02 ... 4.06811416e-01 2.36698896e-01 6.95842385e-01] [-3.70659202e-01 9.75161731e-01 5.14476240e-01 ... -3.11999023e-01 3.49939615e-01 -4.02252495e-01] [ 1.18480957e+00 3.00841093e-01 1.02267645e-01 ... -3.68146330e-01 -1.24490762e+00 4.68952805e-02] ... [ 3.67354572e-01 3.94662321e-01 -2.81576127e-01 ... -9.46434677e-01 -7.13892758e-01 2.80372798e-01] [-1.70543924e-01 -7.91906774e-01 -3.71370435e-01 ... -7.70957172e-01 5.31271696e-01 3.56558800e-01] [-1.71690449e-01 -1.91302195e-01 5.97931594e-02 ... -7.30473042e-01 2.33871490e-01 6.07316829e-02]] [[ 8.50353301e-01 4.36305106e-01 -2.83071727e-01 ... 5.53215742e-01 -5.40554374e-02 -2.69479424e-01] [-9.76902723e-01 -4.85432953e-01 -3.16046067e-02 ... 6.17836237e-01 1.03718549e-01 -6.83053255e-01] [-9.77184832e-01 -7.30240822e-01 -2.19695792e-01 ... -4.36043620e-01 -2.75200427e-01 -3.91414434e-01] ... [ 6.50592029e-01 -3.36017385e-02 1.09102869e+00 ... 1.46311939e+00 1.78965524e-01 -7.78208196e-01] [ 8.24356377e-01 4.87347186e-01 1.20437397e-02 ... -4.87377197e-01 -1.16170518e-01 1.56664205e+00] [-3.25128049e-01 -4.68892843e-01 2.31748000e-01 ... -3.99292231e-01 -8.67329910e-02 -3.23554069e-01]] [[-6.96420848e-01 -7.83106610e-02 -1.58419681e+00 ... 9.77749825e-01 -4.16567832e-01 1.01194763e+00] [-7.69470930e-01 2.21154124e-01 3.94661129e-01 ... -2.92808950e-01 -4.84674335e-01 -3.72297794e-01] [ 1.77270517e-01 4.32789415e-01 -6.92807257e-01 ... 1.67291746e-01 3.95656437e-01 1.80126145e-01] ... [-8.70320559e-01 -1.51269883e-01 8.81563276e-02 ... -1.31850047e-02 -2.66877919e-01 1.00678325e+00] [-5.62553644e-01 1.66084647e-01 7.58234322e-01 ... -7.76373804e-01 2.72844881e-01 4.18917626e-01] [ 3.83933663e-01 -4.88244653e-01 3.85215014e-01 ... -4.44721393e-02 -7.46305346e-01 5.56831285e-02]] ... [[-8.55863929e-01 -2.31532147e-03 -2.14304492e-01 ... -4.48864400e-01 9.18414176e-01 -3.51232171e-01] [-2.13215038e-01 8.17211270e-01 -2.70827979e-01 ... -6.40858650e-01 -1.29774868e-01 -3.93967360e-01] [ 3.68450493e-01 1.23399734e-01 8.32214832e-01 ... 1.95571214e-01 1.38453215e-01 -3.11423868e-01] ... [-1.32004380e+00 -1.80858634e-02 8.40043187e-01 ... 1.94318011e-01 -1.98028963e-02 -2.33714476e-01] [-6.82554841e-01 7.23812342e-01 -3.35465968e-02 ... -1.64972767e-01 9.03936177e-02 7.10798085e-01] [-1.86019570e-01 -2.35297084e-01 7.50955045e-01 ... 6.20402694e-02 7.76056468e-01 -2.86516547e-01]] [[ 1.02574706e-01 -1.69579312e-01 -6.91681926e-04 ... -7.16256499e-01 -1.12424624e+00 1.15930092e+00] [ 1.58025372e+00 4.17612880e-01 6.04192734e-01 ... -1.72221184e-01 -1.44159257e-01 3.30817342e-01] [ 3.19373757e-01 -1.61624432e+00 3.76157135e-01 ... 9.65238586e-02 -1.19216584e-01 2.96666682e-01] ... [-1.62974775e-01 -4.26693618e-01 -2.58268774e-01 ... -4.49878424e-01 -2.67786086e-01 2.35000178e-01] [-3.67552429e-01 2.97123641e-01 1.93226889e-01 ... 9.76478577e-01 -3.98529202e-01 8.82708311e-01] [-5.18344700e-01 2.12340713e-01 6.82120919e-01 ... -3.79696459e-01 -6.73348904e-01 -1.05296038e-01]] [[-5.56189001e-01 -9.43143815e-02 1.26936644e-01 ... 7.39232421e-01 -1.04765451e+00 3.55197564e-02] [ 2.54991651e-01 -2.97005445e-01 7.74299204e-01 ... -3.64757292e-02 5.62008619e-01 -5.43986619e-01] [ 5.36342449e-02 -3.73325855e-01 -8.21306035e-02 ... 1.40285045e-01 -6.52210832e-01 7.79664159e-01] ... [-2.82738686e-01 2.43306741e-01 -5.41755259e-01 ... -1.02297045e-01 3.36489111e-01 2.06337035e-01] [ 9.81646240e-01 4.07452434e-01 5.13506591e-01 ... 7.73558438e-01 -7.58197725e-01 3.81810293e-02] [-3.18513006e-01 1.92406431e-01 5.55337593e-02 ... 4.18885440e-01 2.07920253e-01 3.73402536e-01]]]]]; ov_res: [[[[[ 1.65789649e-01 -1.09888695e-01 2.77977258e-01 ... 9.57349539e-01 -2.27555901e-01 2.22250193e-01] [ 1.18320870e+00 -7.09188879e-02 -9.92729515e-02 ... -1.73912323e+00 1.14725447e+00 -4.24209714e-01] [-3.01656902e-01 -1.79531667e-02 3.33219588e-01 ... 7.86144495e-01 7.81559408e-01 9.31669235e-01] ... [ 1.17569923e+00 4.11256403e-01 6.62222803e-01 ... 8.66149068e-01 -3.00537467e-01 1.20657539e+00] [ 1.42817572e-01 2.62826622e-01 -1.03050864e+00 ... -2.43533581e-01 1.25711250e+00 1.15654416e-01] [-1.02708054e+00 1.42100146e-02 -1.41298866e+00 ... -1.05872369e+00 -3.48119438e-01 3.71187227e-04]] [[ 2.81412840e-01 4.96587008e-01 -1.86357349e-01 ... -7.45722532e-01 -1.40632379e+00 3.09296548e-01] [-3.30428392e-01 -5.87576985e-01 -2.55916923e-01 ... 6.86863344e-03 5.61143197e-02 9.52743828e-01] [-6.01332366e-01 -4.91563559e-01 -9.63299870e-02 ... 1.85033157e-01 7.14891076e-01 3.33459616e-01] ... [-3.27612191e-01 1.04859090e+00 2.32126102e-01 ... 1.38105184e-01 -3.80087495e-01 5.10078788e-01] [ 2.72453219e-01 -2.18360215e-01 -3.23300660e-01 ... 2.14633763e-01 -2.66426027e-01 1.62541375e-01] [ 6.65873170e-01 1.83126017e-01 -5.21617606e-02 ... 9.23554003e-02 -9.89757478e-01 -5.13689756e-01]] [[-6.22672319e-01 -1.41087055e+00 1.93531021e-01 ... 3.54572833e-01 1.76517352e-01 -8.05484831e-01] [-9.01860073e-02 -3.31559777e-01 -9.45914239e-02 ... 6.62365079e-01 1.24184299e+00 -1.82157472e-01] [-3.78793031e-01 -7.72214115e-01 3.97931963e-01 ... -1.83408833e+00 -1.09514415e+00 -5.92513084e-02] ... [ 4.44454759e-01 2.55902827e-01 6.54996336e-01 ... 1.45608395e-01 1.09895241e+00 8.99601758e-01] [-2.90997893e-01 -9.26956475e-01 -3.98696035e-01 ... 4.32820290e-01 -8.13502610e-01 9.34061706e-01] [-6.53152645e-01 4.62284952e-01 1.06470823e+00 ... -9.51309264e-01 5.80890596e-01 1.23040831e+00]] ... [[ 8.98027956e-01 7.44570911e-01 2.39420265e-01 ... -6.65511563e-02 -1.17855859e+00 6.04013465e-02] [ 2.29927540e-01 5.93240201e-01 1.30229127e+00 ... -3.90503556e-01 1.87345184e-02 9.50550854e-01] [ 1.87826961e-01 -4.79090720e-01 8.74404252e-01 ... -9.90510404e-01 -1.61024630e+00 -9.75879669e-01] ... [ 2.60639817e-01 -1.85271755e-01 -7.55288079e-02 ... 3.47209901e-01 -2.18848914e-01 -4.84290570e-01] [ 9.20082450e-01 7.09708989e-01 -5.83248496e-01 ... -2.80500919e-01 -1.90626532e-01 2.32673123e-01] [ 7.16265738e-01 6.75645292e-01 -2.49701142e-01 ... 1.13155246e+00 3.93995084e-02 -1.94417462e-01]] [[-5.71530759e-01 7.10468411e-01 1.81621373e-01 ... -1.36080101e-01 -3.29837441e-01 -3.86472136e-01] [ 1.29157737e-01 1.78391963e-01 -1.66532481e+00 ... 2.42296141e-02 -5.55029929e-01 1.06570315e+00] [ 8.91355753e-01 2.16064975e-03 -9.29634571e-01 ... 4.30240452e-01 -1.29938215e-01 -2.25433558e-02] ... [ 6.23322725e-01 -1.74100131e-01 1.36513567e+00 ... 4.88251358e-01 -3.78783382e-02 5.95149875e-01] [-2.09330544e-01 9.61169064e-01 6.42502427e-01 ... -5.56315482e-01 -1.05345599e-01 -3.63305539e-01] [-3.58324826e-01 -9.68563080e-01 -1.33500844e-01 ... 9.41101685e-02 6.27376914e-01 2.42880583e-01]] [[-8.69802475e-01 -9.94953036e-01 1.74416363e+00 ... 8.56002390e-01 -2.33641624e-01 5.24998605e-01] [ 3.11695635e-01 2.80949861e-01 -1.79301232e-01 ... 1.74078798e+00 3.86940874e-02 8.05264711e-01] [-8.30058694e-01 -1.19606912e-01 -3.60052377e-01 ... 6.33280039e-01 1.81726784e-01 7.62275577e-01] ... [ 1.55512998e-02 4.98803258e-01 9.78171945e-01 ... 4.39661562e-01 6.00617051e-01 2.79767334e-01] [-6.24469519e-01 -4.44007695e-01 -2.13411674e-01 ... 2.04978228e-01 1.21713979e-02 4.79506403e-01] [ 1.21650624e+00 9.38823968e-02 1.35899886e-01 ... 7.29446933e-02 9.67586458e-01 1.42529392e+00]]] [[[-1.59431875e-01 -2.19249442e-01 -5.49933761e-02 ... -8.27285051e-02 -7.60570243e-02 -3.37405264e-01] [-5.11619225e-02 -4.93281543e-01 2.51450926e-01 ... 4.54395980e-01 -4.77118999e-01 -6.94979966e-01] [ 2.31588274e-01 -2.96884596e-01 -4.38889325e-01 ... -6.17052019e-02 1.67273566e-01 2.61174470e-01] ... [-1.13888942e-01 4.02885154e-02 2.01561823e-01 ... -7.02930093e-02 -1.41286245e-02 2.57904734e-02] [ 1.61362886e-01 -4.68165636e-01 -1.46335661e-01 ... -1.23356096e-01 -2.93079704e-01 1.90501940e-03] [ 5.56258678e-01 -2.35686094e-01 2.41171494e-01 ... -2.85276920e-01 2.70307988e-01 -6.81410683e-03]] [[-2.15308100e-01 2.72571892e-01 5.31795144e-01 ... -6.62545264e-01 -1.00079231e-01 -1.04692183e-01] [ 2.21683487e-01 -1.81352958e-01 -7.80102193e-01 ... 2.62408197e-01 1.33042820e-02 5.37424207e-01] [ 3.90377432e-01 3.34611312e-02 3.59492868e-01 ... -2.70787507e-01 2.92283297e-01 2.99344331e-01] ... [-3.69170904e-02 -7.54258633e-02 1.28097594e-01 ... -4.86588627e-02 1.02510322e-02 1.63317740e-01] [ 4.08140212e-01 -7.10367084e-01 4.67724442e-01 ... -8.68404925e-01 -1.97741002e-01 -2.69350022e-01] [ 3.70175928e-01 -1.96062371e-01 -6.75539672e-01 ... -6.21282458e-02 1.26215130e-01 -2.39707172e-01]] [[ 1.99788839e-01 4.40445006e-01 2.68871486e-01 ... -2.16265038e-01 3.40326697e-01 -2.57080555e-01] [-6.71470702e-01 4.99329567e-02 -5.72520256e-01 ... 2.37949669e-01 3.71859521e-01 5.89637272e-02] [-5.62568009e-01 -2.87528098e-01 1.64865151e-01 ... -6.61922172e-02 -4.06235270e-03 2.04378158e-01] ... [ 6.46366119e-01 -5.15321672e-01 4.07016367e-01 ... 3.05623990e-02 7.34813988e-01 3.24954391e-01] [ 4.73169327e-01 -6.38262630e-01 -2.18789935e-01 ... 1.60017207e-01 1.00547008e-01 -2.41483822e-01] [ 3.97819430e-01 1.56503275e-01 -2.67318115e-02 ... 6.33365452e-01 -3.46826583e-01 -4.51869696e-01]] ... [[ 4.84945178e-01 1.67206749e-01 -7.26598920e-03 ... -5.49631655e-01 9.21380371e-02 2.48145416e-01] [-2.43335828e-01 -8.81121755e-02 -3.43005985e-01 ... -2.63822764e-01 -3.07315618e-01 -1.36577606e-01] [ 3.51924337e-02 9.39735547e-02 3.83394003e-01 ... 1.08806759e-01 2.97624975e-01 -7.63584336e-04] ... [-6.64419085e-02 1.32065624e-01 -3.53297144e-01 ... 1.50680631e-01 -5.08860648e-01 3.90385568e-01] [-1.20474726e-01 2.05926448e-01 -5.98596573e-01 ... -3.93651009e-01 -2.85399765e-01 -4.19766426e-01] [ 1.06748320e-01 7.52719402e-01 -1.22486293e-01 ... 3.32219779e-01 2.90917084e-02 -2.97256649e-01]] [[-3.08463097e-01 -2.54587620e-01 1.36583567e-01 ... -4.33310002e-01 -1.32563785e-02 -2.80905038e-01] [ 5.28030246e-02 7.88383484e-01 -6.72985792e-01 ... 1.15969986e-01 6.48152223e-03 -2.23463103e-01] [-5.40437922e-03 1.02093425e-02 -2.66899560e-02 ... -2.13421918e-02 5.15556037e-01 -3.52864355e-01] ... [ 3.96945745e-01 -4.38637018e-01 1.33965731e-01 ... 1.58161834e-01 -8.56630981e-01 4.31824267e-01] [-2.27768078e-01 -5.02071269e-02 -5.93037829e-02 ... 4.13649619e-01 1.67062834e-01 -4.49217528e-01] [ 1.61635831e-01 -2.12008685e-01 3.89838964e-01 ... -7.44758993e-02 -4.51045543e-01 -1.17977694e-01]] [[ 6.07792139e-01 2.29895294e-01 -7.23972321e-02 ... 4.00295965e-02 -1.50016502e-01 1.05723165e-01] [-5.43438315e-01 -3.65379989e-01 7.29778111e-02 ... 2.71752179e-01 5.91678023e-01 -7.04015672e-01] [-2.29144216e-01 -3.43075544e-01 -1.30129442e-01 ... -3.03259820e-01 3.22121412e-01 7.16248155e-01] ... [-1.66319460e-02 -8.00284803e-01 -3.84200327e-02 ... -7.90255964e-02 3.36377412e-01 2.82934487e-01] [ 1.13074869e-01 -1.83440074e-01 2.45382011e-01 ... -4.49841529e-01 -2.69199520e-01 -2.11899877e-01] [ 6.88154578e-01 4.72156793e-01 6.00071289e-02 ... 2.94086218e-01 -1.18298167e-02 9.79176223e-01]]] [[[ 7.70581484e-01 1.77735135e-01 1.41866589e+00 ... 1.69659710e+00 2.16968179e+00 -4.45668131e-01] [-3.42064761e-02 -1.61014056e+00 1.02912748e+00 ... 5.35043888e-03 1.60598540e+00 -5.11479437e-01] [ 1.20908272e+00 3.00783181e+00 -2.93733859e+00 ... 2.34214354e+00 2.43164182e+00 6.28090501e-01] ... [-3.11858892e+00 1.07703841e+00 -8.82474899e-01 ... 1.61217821e+00 4.32172082e-02 2.11787558e+00] [-2.21921110e+00 -1.67785370e+00 2.45259738e+00 ... 3.25780606e+00 -1.51807153e+00 -5.24972022e-01] [ 1.00178123e+00 -3.02828431e-01 -1.16869140e+00 ... 1.48711395e+00 -2.52842879e+00 -4.97223847e-02]] [[-2.85025883e+00 -3.32054406e-01 2.04877496e-01 ... -2.14402366e+00 3.31158549e-01 -3.57113051e+00] [ 1.39285755e+00 -1.15203118e+00 -2.59376764e+00 ... 2.85467029e-01 -1.28764653e+00 6.00600898e-01] [-1.43276215e+00 -1.25886250e+00 -1.28196275e+00 ... 9.39027011e-01 2.98862129e-01 -1.05968690e+00] ... [-8.65854263e-01 -1.08637106e+00 1.71085969e-01 ... 2.15401387e+00 -4.05410737e-01 1.41527310e-01] [-1.15907848e+00 1.39393091e-01 -2.20253289e-01 ... 8.05894956e-02 6.89670265e-01 -4.25022066e-01] [-1.67844281e-01 1.51650691e+00 -1.11224639e+00 ... -1.68320313e-01 3.47856551e-01 6.98130906e-01]] [[ 2.42096931e-01 -9.31885839e-01 8.08471382e-01 ... -5.17353952e-01 -6.27596259e-01 -1.71063292e+00] [-1.20061147e+00 8.81304204e-01 9.68558729e-01 ... -5.56797743e-01 -1.61867309e+00 -2.22900897e-01] [ 9.03200686e-01 -7.26953626e-01 -3.41659880e+00 ... -1.09199870e+00 -7.19042361e-01 2.54369438e-01] ... [ 1.02006264e-01 1.38945401e+00 6.41875923e-01 ... 1.49062037e+00 -1.22660697e+00 -1.53716898e+00] [-2.48039532e-02 -6.12237692e-01 -2.24013552e-01 ... 1.32863212e+00 -2.21746588e+00 9.60410416e-01] [-2.51187027e-01 -1.00163005e-01 7.55027652e-01 ... 4.67565805e-01 1.77054536e+00 2.22015738e+00]] ... [[-5.76632738e-01 1.76204252e+00 -1.43979341e-01 ... 1.34181723e-01 2.81020969e-01 4.12428938e-02] [-3.50641400e-01 -5.58435261e-01 -3.23539078e-01 ... 9.22872603e-01 -4.55909848e-01 -1.55951762e+00] [ 1.80773652e+00 6.34593964e-01 9.97811258e-02 ... 3.86630774e-01 4.73391682e-01 1.98078966e+00] ... [ 6.67847455e-01 1.86689699e+00 1.19293511e+00 ... -5.79289079e-01 -8.33318233e-02 4.61481452e-01] [-6.14161134e-01 -1.47751987e+00 -2.96152800e-01 ... 5.87960958e-01 2.12120318e+00 -2.80101299e-01] [-7.10480690e-01 -2.36159945e+00 -4.76071024e+00 ... 2.72187173e-01 -9.63416815e-01 -1.02295065e+00]] [[-1.28833425e+00 -8.11499655e-01 -5.40243268e-01 ... -2.09873080e+00 -1.92273688e+00 -1.13278055e+00] [ 5.20224392e-01 -1.25162220e+00 2.89705133e+00 ... -2.73371249e-01 -1.94228542e+00 -1.80433166e+00] [ 1.28655434e+00 -2.89973927e+00 -2.99988985e-01 ... -4.33750786e-02 -1.27990949e+00 -9.87472057e-01] ... [ 9.98564243e-01 1.58747450e-01 5.01157284e-01 ... -3.70302498e-01 -1.50691354e+00 -1.07331023e-01] [ 1.98445296e+00 5.54540217e-01 2.22090483e+00 ... -3.10524315e-01 2.90286875e+00 -1.57194984e+00] [ 2.85267138e+00 2.55873346e+00 3.17530179e+00 ... -1.92958724e+00 6.04537308e-01 8.84658515e-01]] [[ 1.82524967e+00 1.76442814e+00 1.41565704e+00 ... -8.00634801e-01 -6.05273724e-01 6.98339581e-01] [ 1.42725229e+00 1.79555789e-01 -2.50670671e+00 ... 2.04550236e-01 -6.18329763e-01 1.02882743e+00] [-1.27949393e+00 9.20664549e-01 -2.37657219e-01 ... 3.10143661e+00 -5.79362273e-01 -2.59338832e+00] ... [ 1.64493299e+00 2.08366737e-01 3.23646218e-01 ... 6.77092792e-03 7.55302429e-01 5.17152488e-01] [ 7.82413006e-01 4.85164344e-01 -1.58217847e+00 ... 2.31022215e+00 3.07104850e+00 9.21374932e-02] [ 1.19053447e+00 2.14882112e+00 -2.15644503e+00 ... 9.37789619e-01 -1.12246573e+00 -4.24451500e-01]]] [[[-1.65070459e-01 1.12326629e-01 2.03846782e-01 ... -6.15580976e-02 2.35294506e-01 2.01008335e-01] [ 5.02530392e-03 2.44545434e-02 -3.38036381e-02 ... 3.35492373e-01 -1.22031391e-01 1.64347574e-01] [ 3.07790171e-02 5.01753628e-01 -8.16379711e-02 ... -4.41538334e-01 -2.95608073e-01 -9.74252522e-02] ... [-2.45586429e-02 9.83708426e-02 2.52714247e-01 ... -1.77405193e-01 -2.45017782e-01 -1.34908438e-01] [-1.06772415e-01 -3.44494805e-02 5.86392999e-01 ... 2.69007742e-01 -1.97292715e-01 -8.41800198e-02] [ 2.34700739e-01 6.93282336e-02 -2.90250573e-02 ... 1.30398810e-01 9.46605280e-02 -1.82367921e-01]] [[-2.67023504e-01 6.16660416e-02 1.62849784e-01 ... 8.22382867e-02 2.40714386e-01 2.07168255e-02] [ 5.49751997e-01 -1.09060541e-01 -3.91068030e-03 ... -2.83626199e-01 2.73644805e-01 9.93169323e-02] [-4.81396794e-01 -4.54439875e-03 -1.44227967e-01 ... -3.36625755e-01 2.90033162e-01 1.62894085e-01] ... [-4.32420731e-01 1.07618280e-01 1.78751815e-02 ... -1.72780707e-01 3.74587148e-01 -4.28740233e-01] [ 2.99954712e-01 4.53137577e-01 1.91343889e-01 ... 3.17205042e-01 1.27553031e-01 -1.60105247e-02] [-5.57820573e-02 1.12570740e-01 -2.92372495e-01 ... -1.40197843e-01 -2.26966649e-01 6.35848418e-02]] [[-1.76837489e-01 7.46614784e-02 2.32494026e-01 ... -1.80173256e-02 1.55422404e-01 8.49979743e-02] [ 1.56904683e-01 -4.35012579e-01 4.48407345e-02 ... 1.07911937e-01 -2.91446805e-01 -3.75493675e-01] [-2.82254349e-03 -9.64719728e-02 -7.33183622e-02 ... 1.03604943e-01 3.84890251e-02 4.31257278e-01] ... [-1.90604791e-01 -2.97489166e-01 -2.79399574e-01 ... -2.93010682e-01 -3.94245744e-01 1.60692129e-02] [ 6.80770632e-03 -8.89333114e-02 -1.09365910e-01 ... -3.19233388e-01 -9.74994376e-02 2.72629648e-01] [ 5.77438921e-02 -1.59076359e-02 -7.37109035e-02 ... -2.22425669e-01 4.93648916e-01 5.40437043e-01]] ... [[ 1.93156958e-01 4.62904759e-02 1.27393469e-01 ... 3.29015225e-01 -9.70660299e-02 -1.74556479e-01] [-1.37674958e-01 -1.29399240e-01 2.62777030e-01 ... -3.67240101e-01 -2.18114495e-01 2.45913237e-01] [ 3.13484937e-01 7.49334872e-01 -9.72578004e-02 ... 1.67124704e-01 2.03536317e-01 8.15773979e-02] ... [ 2.43366420e-01 -6.09174296e-02 -3.65064412e-01 ... 4.86467332e-01 1.81309476e-01 3.84405069e-02] [ 5.51322103e-03 3.47260237e-01 1.68197900e-02 ... -2.03730352e-02 -7.47566000e-02 -5.83318509e-02] [-6.33652583e-02 1.31174838e-02 -7.79114515e-02 ... -7.64906183e-02 -2.43284985e-01 5.66713572e-01]] [[ 8.89514312e-02 3.66206691e-02 4.08368051e-01 ... 4.47973348e-02 6.72188178e-02 4.27017249e-02] [ 1.02833338e-01 7.79861435e-02 3.34583789e-01 ... -1.12657927e-01 -2.50199944e-01 -5.12713909e-01] [-4.71466146e-02 -4.37645689e-02 -5.67557383e-03 ... -1.05650667e-02 -1.85142860e-01 -3.02376505e-03] ... [ 3.48158553e-02 1.79173812e-01 -3.22940797e-02 ... 4.98861559e-02 1.02231666e-01 -2.32210532e-01] [-4.31363314e-01 1.85825661e-01 2.91925609e-01 ... -2.40182444e-01 -1.46351978e-01 -1.28153205e-01] [ 3.15116882e-01 3.66751775e-02 -4.44002867e-01 ... 1.02097861e-01 8.40323046e-02 3.57122779e-01]] [[ 1.45717070e-01 -1.79024532e-01 -3.03892314e-01 ... 2.58312523e-01 -1.03369355e-01 -1.15868405e-01] [ 3.33716273e-01 1.81383640e-01 1.86365023e-02 ... 3.17014158e-01 2.46520460e-01 3.41623947e-02] [ 2.37450339e-02 -1.67274132e-01 -1.18862979e-01 ... -3.34801674e-01 -1.95987359e-01 1.85464427e-01] ... [-2.79346973e-01 2.61422396e-01 -2.07632676e-01 ... 2.31472403e-01 2.61667788e-01 -1.72576100e-01] [ 1.58168271e-01 -3.87987751e-03 9.48898569e-02 ... 3.47119719e-01 2.98114896e-01 1.79686442e-01] [-2.42132321e-01 9.62397605e-02 -2.12172791e-01 ... 4.57620382e-01 -2.55234927e-01 1.51500717e-01]]] [[[ 4.36480762e-03 -2.12467104e-01 -1.15837276e+00 ... -8.53416741e-01 8.21278334e-01 -1.10305548e+00] [-2.81686813e-01 6.49235725e-01 -1.04316056e+00 ... 1.03880674e-01 -8.01542163e-01 -1.80123830e+00] [-8.13940942e-01 5.20632923e-01 -1.63187295e-01 ... -2.62668252e-01 7.77968884e-01 7.29685724e-01] ... [-9.57676649e-01 -1.77167207e-01 1.67121923e+00 ... 1.26534665e+00 1.37420332e+00 9.50827479e-01] [ 1.26802850e+00 -9.73967388e-02 -3.08486879e-01 ... -7.97311187e-01 -1.30341530e+00 -1.28540087e+00] [ 1.52119607e-01 4.71025944e-01 2.30891526e-01 ... -4.41222697e-01 -9.13750708e-01 -6.82822287e-01]] [[ 1.85349667e+00 4.48927879e-01 1.23641968e+00 ... 9.66562688e-01 -1.04296601e+00 -5.02768636e-01] [-1.31084716e+00 9.90360975e-01 -1.34148240e+00 ... -2.01339650e+00 2.43854985e-01 2.21605372e+00] [-5.50556302e-01 -1.58161843e+00 -1.17168462e+00 ... 9.36092436e-01 -2.18939114e+00 -1.31714070e+00] ... [-9.30343807e-01 1.68411100e+00 -1.00276196e+00 ... 3.83684993e-01 -9.37156200e-01 2.19815159e+00] [ 2.79309899e-01 7.84719467e-01 6.19196109e-02 ... -6.14237428e-01 3.53111267e-01 -1.28857863e+00] [-2.46679559e-01 -3.10756326e-01 -1.12126398e+00 ... 3.75662297e-01 9.80314195e-01 -5.13885207e-02]] [[ 6.62239492e-01 1.89738512e+00 -2.17800513e-01 ... 5.27771115e-01 1.14397955e+00 4.38592285e-01] [ 2.45458093e-02 8.49622190e-01 5.77251256e-01 ... -1.59605479e+00 -9.98794913e-01 -1.52421653e+00] [ 9.99285698e-01 1.47331262e+00 -3.17164332e-01 ... 3.18824708e-01 7.40505576e-01 5.48373580e-01] ... [-1.66359335e-01 1.01732409e+00 -2.97351569e-01 ... -2.23007941e+00 -6.44750655e-01 3.88004929e-01] [ 5.83638370e-01 9.67751920e-01 -9.40301776e-01 ... 6.02440201e-02 3.60655665e-01 2.11278152e+00] [ 3.17540854e-01 5.79778016e-01 4.31389511e-02 ... -5.02996780e-02 -1.98508814e-01 3.82631153e-01]] ... [[-3.12590525e-02 4.69899923e-01 -7.04543650e-01 ... -7.09869325e-01 1.37021875e+00 2.32519031e-01] [ 3.10666323e-01 -2.11909962e+00 1.27989864e+00 ... -1.81844532e+00 -2.08209023e-01 -7.73921967e-01] [ 6.00300543e-02 9.60201502e-01 7.26013064e-01 ... -2.22366564e-02 3.27173680e-01 2.56231451e+00] ... [ 3.14126641e-01 1.70778060e+00 -9.59369957e-01 ... 1.31304502e+00 4.56385136e-01 -4.00070488e-01] [ 8.39081168e-01 6.59689426e-01 2.03706074e+00 ... 7.50057638e-01 6.49638236e-01 1.13793588e+00] [ 2.83495039e-01 -1.77117658e+00 6.26630709e-02 ... -9.09674823e-01 2.11285576e-02 4.04224806e-02]] [[ 8.17294240e-01 -3.51291329e-01 -3.65445882e-01 ... 6.36911154e-01 5.69926910e-02 2.66376257e-01] [-1.10054955e-01 4.59269613e-01 -4.61433262e-01 ... 2.75217921e-01 -1.60216177e+00 -1.98525381e+00] [ 3.16367745e+00 2.22491908e+00 -4.90177006e-01 ... -1.35691059e+00 1.31888092e-01 3.51138204e-01] ... [-6.04922593e-01 2.61048526e-01 6.01269901e-01 ... 1.14379478e+00 4.08219218e-01 1.01489651e+00] [ 1.21874833e+00 1.14737678e+00 -3.73069644e-01 ... 2.11212143e-01 2.50117159e+00 -2.27633491e-01] [-1.53171372e+00 6.60957247e-02 9.82376099e-01 ... -1.13750122e-01 1.37082005e+00 1.24755609e+00]] [[ 2.26206541e+00 9.35975671e-01 1.66826403e+00 ... -1.17660522e+00 6.41734898e-01 2.93541956e+00] [-2.95797968e+00 -2.02223873e+00 1.58216834e+00 ... -1.70104116e-01 2.51744479e-01 -4.83907789e-01] [ 9.43521917e-01 -9.78533447e-01 -3.62555480e+00 ... -3.09584856e-01 -7.34945655e-01 -8.87845218e-01] ... [ 1.45934880e+00 -5.00649750e-01 -2.81032443e-01 ... 1.18694693e-01 -1.03161439e-01 -4.67488557e-01] [-1.12180650e+00 4.95886922e-01 -1.69562802e-01 ... -3.42931777e-01 6.25830054e-01 8.93743873e-01] [ 4.01876688e-01 5.89371204e-01 1.08232403e+00 ... -1.98459595e-01 2.79344887e-01 -6.28291547e-01]]] [[[ 2.17633575e-01 -4.54444349e-01 2.99600214e-01 ... 3.53392959e-01 1.53170973e-01 -2.86146164e-01] [-6.42919600e-01 3.85868281e-01 5.27374625e-01 ... 6.73800766e-01 -1.19675541e+00 -1.09529318e-02] [ 1.72212377e-01 -7.06133902e-01 3.93548198e-02 ... -2.38659978e-01 -6.25356078e-01 -3.29875767e-01] ... [-6.71754241e-01 1.26696646e+00 -6.41566813e-01 ... 5.85919559e-01 9.56734642e-02 -5.59079461e-02] [ 3.46346229e-01 3.78404945e-01 -4.16558236e-01 ... -1.15969904e-01 -2.64204085e-01 2.25850850e-01] [ 7.35167682e-01 1.67103738e-01 -8.63076746e-02 ... 1.10797502e-01 -5.41142523e-01 -9.49245214e-01]] [[-7.75046289e-01 -5.08087754e-01 -3.03395659e-01 ... 5.71862698e-01 -2.91427910e-01 7.38469899e-01] [ 2.15061635e-01 8.71068984e-02 -5.09222567e-01 ... 6.63941026e-01 1.18730612e-01 -2.10184306e-01] [ 7.73735344e-01 -7.89652616e-02 -4.77806479e-01 ... -1.30698785e-01 -3.23821865e-02 -1.93629086e-01] ... [ 5.10970235e-01 -3.50584760e-02 5.99001646e-02 ... 1.39625490e-01 -2.28428200e-01 4.36947346e-01] [-9.72495079e-02 -1.12546630e-01 -9.92849320e-02 ... -6.70555055e-01 3.04648042e-01 7.77898415e-04] [-5.12490928e-01 1.07753269e-01 -8.04305792e-01 ... -4.76332843e-01 1.67258307e-01 -8.63678753e-01]] [[ 5.48703134e-01 5.77797890e-01 1.59142569e-01 ... -4.55721080e-01 -5.87713897e-01 -8.61133397e-01] [-7.96017647e-01 -2.18975276e-01 -1.69147789e-01 ... 2.81562328e-01 -7.38952696e-01 1.07267471e-02] [ 1.69971973e-01 -3.12509507e-01 -5.14571033e-02 ... 3.33152801e-01 -7.32149899e-01 3.54777277e-01] ... [-4.53001738e-01 1.00758396e-01 2.71971878e-02 ... -5.33503592e-01 -2.92696267e-01 -8.04876208e-01] [ 5.02865672e-01 1.90450121e-02 -2.41948873e-01 ... 1.89561665e-01 1.75392285e-01 -7.42635250e-01] [-1.00077599e-01 -2.88327008e-01 -8.94425988e-01 ... 7.85725713e-01 3.56332570e-01 -9.98926640e-01]] ... [[ 1.03115164e-01 -6.32048845e-01 6.50606304e-02 ... -2.68941432e-01 1.08022952e+00 -1.24471700e+00] [-1.03125356e-01 -1.15644094e-02 9.35717165e-01 ... -1.15618110e+00 5.15361905e-01 1.04073443e-01] [-3.17775041e-01 -1.01699352e-01 -9.81050372e-01 ... -5.26032925e-01 -1.17194359e-04 -4.65059243e-02] ... [ 5.83714008e-01 3.53053004e-01 -5.77373266e-01 ... -2.18128841e-02 -5.22815466e-01 6.08539999e-01] [-4.38933790e-01 1.07097197e+00 2.28656352e-01 ... 7.34323859e-01 -4.96805012e-01 7.00976606e-03] [ 8.22870731e-02 1.95281252e-01 3.83629538e-02 ... -6.90902919e-02 -7.31465146e-02 -2.07326874e-01]] [[ 4.28500950e-01 6.07506752e-01 4.33931679e-01 ... 5.81294835e-01 2.46807516e-01 -1.06884889e-01] [-1.01627698e-02 -6.13430679e-01 2.64618963e-01 ... -5.22889376e-01 -2.40204036e-01 -6.78599954e-01] [ 6.16706431e-01 -2.22108513e-01 1.17787220e-01 ... -1.25687629e-01 -1.50081798e-01 -5.49755991e-01] ... [-2.90477723e-01 -1.54516563e-01 -1.06482387e+00 ... -2.59798646e-01 -7.39902183e-02 3.33092332e-01] [-9.48503017e-02 -4.17379662e-02 -2.08271682e-01 ... 1.38027892e-01 2.56511748e-01 -1.22091562e-01] [-7.95705497e-01 -5.49477577e-01 -1.05523598e+00 ... -1.14621389e+00 6.25887275e-01 3.06884557e-01]] [[-7.44749159e-02 5.33993356e-02 2.62045145e-01 ... -6.11556888e-01 4.65123534e-01 6.93755805e-01] [-1.06736282e-02 -8.49157095e-01 9.99829531e-01 ... -5.29092968e-01 1.37638962e+00 7.35809267e-01] [ 6.25369966e-01 -7.61519298e-02 2.05305368e-01 ... 7.99856544e-01 -7.58016348e-01 -3.51964056e-01] ... [-1.90809622e-01 2.94450372e-01 -5.09104073e-01 ... -2.02918902e-01 2.55789995e-01 9.82337832e-01] [-6.40051067e-03 1.08393085e+00 4.59389359e-01 ... 5.96725941e-01 2.88234413e-01 -3.71880293e-01] [ 6.59396887e-01 7.42068052e-01 3.87623489e-01 ... 6.67136312e-02 -6.06516778e-01 1.14472672e-01]]]] [[[[-2.77672559e-01 6.39708400e-01 -3.43093544e-01 ... 2.29386732e-01 5.50697327e-01 5.23450434e-01] [ 1.19065732e-01 2.66178370e-01 -2.70744771e-01 ... -5.28766029e-02 7.29715049e-01 -7.97239006e-01] [ 2.39982963e-01 1.57101452e+00 4.08094317e-01 ... -5.49725533e-01 -3.67277920e-01 3.20467919e-01] ... [-3.20902884e-01 9.93901670e-01 -3.39906543e-01 ... -1.43856466e-01 2.24531904e-01 -3.09095114e-01] [ 1.60847473e+00 -6.89658582e-01 4.05755430e-01 ... 3.87992948e-01 -5.10929227e-01 2.02007031e+00] [-5.22791982e-01 -7.56517649e-02 8.60137641e-01 ... -5.56909204e-01 9.50198054e-01 -8.34793568e-01]] [[-4.99431610e-01 -2.02264309e-01 -5.44148326e-01 ... -6.75826907e-01 1.87699243e-01 1.25053944e-02] [-1.16431095e-01 -1.19741023e+00 7.01476276e-01 ... -1.09660959e+00 -1.10660367e-01 4.06035557e-02] [ 8.62785876e-02 -5.07936180e-01 -9.95080397e-02 ... -6.33425057e-01 -1.31799817e+00 -5.43942630e-01] ... [-2.58201271e-01 -1.22695279e+00 7.05786645e-01 ... -1.61294997e-01 -5.82424879e-01 7.42170066e-02] [-3.33425313e-01 4.79889631e-01 5.92532635e-01 ... 6.22096777e-01 -8.44861567e-01 -3.14485192e-01] [ 1.41941071e+00 6.38976157e-01 9.43444297e-02 ... 1.25263548e+00 -1.68197006e-02 1.13457453e+00]] [[-2.19625771e-01 -2.56532311e-01 1.27626050e+00 ... 3.33175182e-01 -9.15578604e-02 1.07961786e+00] [ 1.62748665e-01 -1.44813967e+00 4.01285350e-01 ... 3.84835154e-01 -4.74922150e-01 -5.74265540e-01] [-6.89387470e-02 -5.27508855e-01 1.07992065e+00 ... -5.16204596e-01 -7.52861619e-01 -5.39135784e-02] ... [ 1.06739676e+00 1.01878149e-02 -6.91498995e-01 ... 7.23752975e-01 1.13564110e+00 4.96281505e-01] [-7.88636744e-01 1.28103867e-01 1.98230430e-01 ... 5.78337610e-01 1.13597429e+00 8.10982585e-01] [ 8.37263227e-01 2.94959009e-01 1.00985259e-01 ... 8.05251300e-02 7.14576185e-01 -1.31502211e-01]] ... [[-4.67972070e-01 8.94296408e-01 2.13733286e-01 ... -2.22316653e-01 -4.04757522e-02 -1.67365730e-01] [ 8.05978954e-01 -1.53381124e-01 1.82972662e-02 ... -3.41088116e-01 1.00383453e-01 -7.88878143e-01] [ 1.09483730e-02 2.03031003e-01 9.02836844e-02 ... 1.70065546e+00 1.12329066e+00 -1.01712084e+00] ... [ 6.17700815e-01 -5.53274691e-01 3.86665046e-01 ... 7.04597235e-01 -1.14547402e-01 2.55101204e-01] [ 1.53219908e-01 -7.85984159e-01 -5.52791238e-01 ... 1.00307502e-01 -1.54006884e-01 8.07269931e-01] [ 9.65129912e-01 2.58685857e-01 1.13409591e+00 ... 5.57303667e-01 6.37647033e-01 5.49346030e-01]] [[-1.18267179e+00 1.07799336e-01 7.80107558e-01 ... -6.68918947e-03 1.22174358e+00 -2.23365054e-01] [ 8.72990415e-02 -4.92355675e-01 3.36905755e-02 ... -3.69489670e-01 2.08633468e-02 -1.34904087e+00] [ 9.03037131e-01 4.39100444e-01 1.40561855e+00 ... -4.82963204e-01 8.21698427e-01 -6.63151383e-01] ... [-1.96357235e-01 6.91464484e-01 -3.08381587e-01 ... -8.79393816e-01 -2.48248905e-01 -1.63603079e+00] [-2.92097092e-01 -8.50648582e-01 -1.48259151e+00 ... -9.47466969e-01 3.29000145e-01 -7.63576850e-02] [ 8.00544381e-01 -1.38427174e+00 6.62987351e-01 ... 5.34180477e-02 3.12891603e-01 4.50065613e-01]] [[ 7.94845462e-01 -2.91727394e-01 7.99186647e-01 ... -1.32846132e-01 -2.74461180e-01 -9.96779501e-02] [ 6.50729001e-01 3.59419674e-01 -1.43827784e+00 ... -3.55962813e-01 -6.89254761e-01 -2.75586843e-01] [-8.11550617e-01 -4.38505143e-01 -5.73881507e-01 ... -3.26328158e-01 6.18302763e-01 7.14967325e-02] ... [-3.43519598e-01 1.36924863e-01 3.24816257e-01 ... -2.13649452e-01 5.75800359e-01 9.24043357e-01] [-5.69736548e-02 3.49904597e-01 8.34376067e-02 ... 5.05344152e-01 8.49331260e-01 1.34858161e-01] [-6.93316698e-01 3.78073692e-01 6.21011496e-01 ... 1.72605500e-01 2.42900297e-01 7.89362729e-01]]] [[[ 2.29012534e-01 -1.13255173e-01 -1.07137114e-01 ... -3.13827880e-02 -1.08931027e-01 7.57505745e-02] [ 1.11488864e-01 5.24890423e-01 -9.83830988e-01 ... -1.64462969e-01 -1.36274144e-01 -1.74482036e-02] [-6.09447211e-02 -2.47742459e-01 2.71998733e-01 ... -1.46694437e-01 -1.71757415e-01 -5.03729701e-01] ... [ 6.27864480e-01 1.30868247e-02 8.94482881e-02 ... 1.35181665e-01 -5.86862683e-01 1.38824582e-01] [-6.07140064e-01 1.89623863e-01 4.15780604e-01 ... -1.67302877e-01 7.39926159e-01 -1.05547585e-01] [ 2.46968672e-01 -3.04505318e-01 2.94992059e-01 ... 3.26428264e-01 5.10229886e-01 -8.11774373e-01]] [[ 8.61706793e-01 2.97814429e-01 1.24551937e-01 ... 3.89736071e-02 1.61212236e-01 4.89016175e-01] [ 1.77024007e-01 -1.47013217e-01 9.35647413e-02 ... 5.13483584e-01 -5.58606863e-01 2.08144695e-01] [-1.45369157e-01 1.66553244e-01 -3.66732448e-01 ... -3.63113135e-02 -8.60489786e-01 -4.61706340e-01] ... [ 6.49700344e-01 -2.38250136e-01 1.66030958e-01 ... -6.01750314e-01 2.45661125e-01 -2.87714005e-01] [ 6.10294938e-03 -2.36758485e-01 -8.50674137e-03 ... -1.28588770e-02 4.56197001e-02 -3.75858694e-01] [ 5.27819753e-01 6.65639222e-01 -5.35255373e-01 ... 4.74092290e-02 -2.34424006e-02 4.99635875e-01]] [[-1.96795925e-01 2.79497623e-01 2.64847249e-01 ... 3.28401059e-01 -1.32665858e-01 -2.38581046e-01] [-2.42740929e-01 -1.91939175e-01 3.22122306e-01 ... 3.13621044e-01 2.53085673e-01 2.45421946e-01] [-2.72368938e-01 -4.34267551e-01 2.92427316e-02 ... 1.52551442e-01 1.17564864e-01 -2.26296410e-01] ... [-3.23611051e-01 2.10519955e-01 3.34048420e-01 ... 7.24627793e-01 3.46004874e-01 -5.44275999e-01] [ 3.96241933e-01 3.17960262e-01 5.01774251e-02 ... -1.68407604e-01 -6.87446475e-01 -1.71949029e-01] [ 8.88116211e-02 -1.09624416e-01 -3.72573435e-01 ... -5.46532989e-01 -1.10860601e-01 -8.18284079e-02]] ... [[ 2.78728932e-01 -7.79554725e-01 4.82452631e-01 ... 1.53081581e-01 -4.45708215e-01 -2.12503806e-01] [ 7.95741454e-02 -3.35417628e-01 -3.03089231e-01 ... 7.82040834e-01 -2.25860298e-01 3.34799886e-01] [ 2.07716018e-01 3.03578824e-01 3.65652055e-01 ... 3.04660767e-01 -3.69187832e-01 -1.19489968e-01] ... [ 3.52096081e-01 1.33652419e-01 -2.78553277e-01 ... -7.06717312e-01 -5.94004802e-02 2.10339025e-01] [-5.15672192e-02 2.48973012e-01 -3.80533278e-01 ... 5.97923219e-01 1.24840848e-01 -2.07205594e-01] [-4.25648898e-01 -5.05090237e-01 -1.13119580e-01 ... -4.74121690e-01 -6.12651445e-02 -3.37671876e-01]] [[-4.26971763e-01 6.46505177e-01 5.46506464e-01 ... 4.69515324e-01 -3.47349972e-01 -3.46630186e-01] [ 8.94764885e-02 -2.68168509e-01 1.65466532e-01 ... -2.75698960e-01 -9.48552638e-02 -7.14042962e-01] [-4.89513963e-01 -7.11251080e-01 1.26499549e-01 ... 2.89935470e-01 6.72169983e-01 -4.44494672e-02] ... [-3.87006067e-02 7.01819241e-01 2.32756779e-01 ... -4.49379355e-01 -5.96442580e-01 6.65836334e-01] [ 1.45201042e-01 -1.98306486e-01 4.52156246e-01 ... -1.38919219e-01 5.44143543e-02 -5.73854055e-03] [ 1.93436384e-01 -2.09547728e-01 3.22321206e-01 ... -1.79459438e-01 -4.77853149e-01 -3.54726374e-01]] [[ 8.43714893e-01 3.83301347e-01 1.29800707e-01 ... -4.65035111e-01 -2.74189383e-01 4.85846698e-01] [-1.01420037e-01 -4.57127750e-01 1.92388058e-01 ... -3.80015612e-01 3.78229797e-01 -5.26684165e-01] [-3.39435279e-01 -4.35110420e-01 -4.25411701e-01 ... 1.52871341e-01 -4.70615298e-01 -4.29764837e-01] ... [-6.86191857e-01 1.33801550e-01 2.62942076e-01 ... -3.57113719e-01 -2.60531455e-01 -6.99296951e-01] [-5.18347807e-02 3.24137330e-01 7.17724800e-01 ... 2.19739690e-01 -2.42430300e-01 4.77945477e-01] [-3.14713903e-02 1.09804854e-01 -6.27936900e-01 ... 1.93281889e-01 5.35086766e-02 9.24191475e-02]]] [[[-1.71063948e+00 1.53819159e-01 -2.39932060e+00 ... 1.11878049e+00 -5.95167875e-01 -3.53688836e-01] [-1.20959640e+00 1.38031781e-01 3.73168617e-01 ... 4.79896367e-01 2.16768456e+00 1.50557625e+00] [ 1.06596481e-02 -1.85382032e+00 1.56923354e+00 ... 5.55467345e-02 1.54409361e+00 1.88430774e+00] ... [ 8.81616399e-02 1.69547641e+00 1.06310058e+00 ... 3.76451433e-01 -3.04618537e-01 3.05342339e-02] [-6.74620986e-01 -3.25079966e+00 5.93771398e-01 ... -2.32052803e-03 7.54643261e-01 1.54043067e+00] [ 2.16978550e+00 4.83421907e-02 -3.41529757e-01 ... 1.17135346e+00 -7.42986858e-01 7.79118240e-01]] [[-1.42880595e+00 -4.16241229e-01 3.49631518e-01 ... 1.27013242e+00 -2.11288643e+00 -1.51980746e+00] [-4.88075495e-01 8.11984017e-02 1.30562818e+00 ... -2.04417729e+00 -7.01809466e-01 -1.73653233e+00] [ 1.38158455e-01 -1.42226088e+00 -1.95073235e+00 ... 1.20040500e+00 -5.89701951e-01 -8.56764391e-02] ... [ 5.55705726e-01 -3.00945863e-02 -1.28147888e+00 ... -1.64532948e+00 -4.77816388e-02 8.16465616e-01] [ 9.81884301e-01 -2.92884886e-01 -2.49706650e+00 ... 1.37482750e+00 3.34613502e-01 -3.62207174e-01] [-2.86307192e+00 -2.57648182e+00 -1.81492651e+00 ... 2.33860087e+00 -1.80499279e+00 -9.04453248e-02]] [[-2.26263022e+00 1.56966674e+00 1.75449145e+00 ... -1.82598245e+00 -1.62781703e+00 -7.58828223e-01] [ 3.40582788e-01 1.02900529e+00 1.20778218e-01 ... -1.45591021e+00 3.21944284e+00 2.70817137e+00] [ 4.78158087e-01 1.44992933e-01 -1.53523290e+00 ... -9.68442634e-02 -1.78360987e+00 2.78499901e-01] ... [-3.09578323e+00 -3.30584466e-01 1.14134037e+00 ... 3.06040376e-01 -1.29989076e+00 -3.84350330e-01] [ 2.43878990e-01 1.86122084e+00 3.37044418e-01 ... -6.34221852e-01 9.78358746e-01 1.21222168e-01] [ 9.58323836e-01 1.18578160e+00 1.26949048e+00 ... -8.10999930e-01 1.46652782e+00 -3.54657948e-01]] ... [[-4.49972570e-01 -2.60106236e-01 1.17376626e+00 ... -2.68369436e+00 -1.19371605e+00 1.15827692e+00] [-2.50113547e-01 -2.42965370e-01 -1.15609455e+00 ... 1.63005483e+00 6.18033588e-01 -4.86256093e-01] [-5.07331908e-01 1.95171028e-01 -1.29552722e+00 ... 1.61622107e+00 -7.29467273e-01 -1.44825351e+00] ... [-1.64193404e+00 1.04105771e+00 -1.44169912e-01 ... -8.76446545e-01 2.84837055e+00 -9.57862139e-01] [ 5.88729739e-01 7.99192190e-01 1.73726213e+00 ... -1.26436800e-01 -1.78307247e+00 9.48802352e-01] [-1.14463830e+00 -1.68343997e+00 3.56038928e-01 ... 1.14958322e+00 -1.65932441e+00 -2.83486819e+00]] [[ 2.33523321e+00 2.90655583e-01 -6.50464118e-01 ... 1.37920868e+00 -1.04131961e+00 -5.86875081e-01] [ 1.13846326e+00 -1.50151443e+00 1.94115114e+00 ... -1.84585667e+00 -4.23292637e-01 -1.66151786e+00] [ 2.62713403e-01 -6.07498944e-01 1.13343143e+00 ... 8.81059706e-01 7.52570033e-01 -5.71695685e-01] ... [-3.76616955e-01 1.96036771e-01 1.14229202e+00 ... -2.37441733e-01 -2.06008196e+00 -1.02120769e+00] [ 7.86751285e-02 -6.73305452e-01 1.13231957e+00 ... 3.68462920e-01 1.01172753e-01 3.70246172e+00] [-2.27455258e+00 -4.36450183e-01 -1.94920850e+00 ... -5.12055218e-01 1.07349348e+00 -6.04101658e-01]] [[ 4.39624339e-01 2.97719836e-01 -2.41662458e-01 ... -4.34263498e-01 3.64766002e-01 5.33238113e-01] [ 3.28208596e-01 9.58259583e-01 -1.14107049e+00 ... 1.74126720e+00 -4.41747487e-01 -3.56751308e-02] [ 4.26537156e-01 -1.09116137e+00 -1.65752697e+00 ... 6.84919596e-01 8.30995381e-01 -3.94450831e+00] ... [ 2.11465740e+00 9.44730282e-01 1.10457230e+00 ... 2.56624699e-01 -4.43275511e-01 2.67304331e-01] [-1.03029144e+00 6.52531803e-01 -5.53631544e-01 ... 2.00542855e+00 -1.35517406e+00 1.91782141e+00] [ 1.02407835e-01 -1.02810764e+00 -1.76478612e+00 ... 7.87964702e-01 8.30140591e-01 -2.81403780e-01]]] [[[-4.08024728e-01 -3.87125462e-01 -2.51971394e-01 ... 2.82232851e-01 2.82570690e-01 1.92793921e-01] [ 1.17573790e-01 -8.18549097e-02 6.84514642e-02 ... -6.72579780e-02 2.76075363e-01 9.42941457e-02] [-3.48375738e-02 3.49919319e-01 2.26508960e-01 ... -1.24009706e-01 3.41390878e-01 1.58083364e-02] ... [ 4.52447906e-02 -6.68110549e-02 -1.14905782e-01 ... 3.40560935e-02 -2.84662396e-01 -1.78436562e-01] [ 2.51298219e-01 -1.26656860e-01 1.34057418e-01 ... 3.37612182e-02 3.43451709e-01 -2.05313414e-01] [-5.94145238e-01 -5.79041123e-01 -8.32490399e-02 ... 3.77075732e-01 7.72875026e-02 -8.70737657e-02]] [[ 7.60218203e-02 9.25818644e-03 1.23216748e-01 ... 8.95632133e-02 1.07011851e-02 -1.34649843e-01] [-2.51831859e-01 -1.21358838e-02 -9.35848504e-02 ... -4.24112231e-01 4.67486709e-01 -1.24001585e-01] [-1.37755439e-01 2.84793168e-01 -3.54354560e-01 ... -9.86095145e-02 2.55078048e-01 -2.49736346e-02] ... [-2.94448763e-01 -1.83338001e-01 -2.97690570e-01 ... 4.73830879e-01 5.51824331e-01 -2.03264922e-01] [ 2.52102166e-01 -9.57598239e-02 1.54903993e-01 ... 4.38328870e-02 -1.55880705e-01 -1.18959382e-01] [-9.00498033e-02 -5.30175447e-01 -1.70619592e-01 ... 1.99202940e-01 -1.81064531e-01 -3.60048890e-01]] [[ 1.69851601e-01 -3.35340440e-01 -1.76506475e-01 ... 2.49894291e-01 -1.99932933e-01 3.85363221e-01] [ 4.25446481e-02 1.95496678e-01 -4.94737625e-02 ... -9.86632332e-03 -5.00359774e-01 1.51516512e-01] [ 4.05307710e-01 -7.41282329e-02 -1.76288895e-02 ... -4.00163472e-01 2.94788748e-01 -3.91274542e-02] ... [ 2.20805004e-01 2.78028607e-01 -2.40398347e-01 ... -2.90580064e-01 -4.92979199e-01 1.09115481e-01] [ 1.22260690e-01 1.94330916e-01 -2.19550669e-01 ... -1.44851640e-01 -2.13642940e-01 -8.05473945e-04] [-3.37497622e-01 -3.84558499e-01 4.73042279e-01 ... 1.16067357e-01 -1.41293341e-02 3.71695042e-01]] ... [[-2.70075411e-01 1.06377259e-01 -2.86000282e-01 ... -1.86801165e-01 9.51222032e-02 4.63196225e-02] [-2.47395903e-01 1.34827578e-02 -1.43466350e-02 ... 2.06419244e-01 -9.33217034e-02 -2.03653976e-01] [-1.08688302e-01 -2.15336889e-01 -2.25292835e-02 ... 2.96073079e-01 1.04764719e-02 2.33503953e-02] ... [-3.06191444e-01 2.22910956e-01 -3.81330587e-02 ... -1.22758918e-01 1.95578039e-01 2.51618236e-01] [ 8.11948404e-02 -1.80196583e-01 -3.90757993e-02 ... 2.65549012e-02 1.45882279e-01 2.98284739e-01] [-8.15995857e-02 -4.31829274e-01 -8.33490938e-02 ... -2.70937686e-03 2.39121422e-01 2.12020054e-02]] [[-1.31470591e-01 -9.20978412e-02 -2.15897694e-01 ... 3.93583216e-02 1.92385063e-01 -1.42144084e-01] [ 1.04805246e-01 -2.40607917e-01 -4.78967577e-02 ... 4.33064789e-01 8.82234424e-02 3.15161556e-01] [ 1.59775671e-02 1.05083734e-01 1.82630122e-01 ... 9.49510606e-04 3.63839000e-01 -2.55393296e-01] ... [-1.03362083e-01 -2.06991732e-01 1.00286469e-01 ... 4.85782400e-02 1.23565458e-02 1.63233373e-02] [ 1.99400291e-01 -2.25960806e-01 3.60539079e-01 ... 7.78013319e-02 3.24532501e-02 -1.02306843e-01] [-3.54896002e-02 -3.96793373e-02 3.67949493e-02 ... 4.79893059e-01 -2.28930503e-01 -7.85996467e-02]] [[-2.57795691e-01 -1.03598289e-01 3.42173539e-02 ... -5.00029922e-02 5.43991514e-02 -9.30658206e-02] [ 9.00616422e-02 -3.24079841e-01 1.42169774e-01 ... -9.36586261e-02 -1.42463773e-01 3.07258070e-01] [ 2.35562280e-01 1.58369973e-01 -3.95808928e-03 ... 1.23721585e-01 -2.40369871e-01 -1.53593078e-01] ... [-5.62005155e-02 -2.55224138e-01 4.14895080e-02 ... -1.60011828e-01 -2.41240740e-01 -6.04094705e-03] [-2.27468401e-01 5.73252961e-02 -1.41059950e-01 ... 2.94842795e-02 -4.60136861e-01 3.32586616e-01] [-7.40266070e-02 -1.50883915e-02 3.82521838e-01 ... 4.19352293e-01 1.06346093e-01 1.49409831e-01]]] [[[ 1.75669956e+00 -8.62481356e-01 -5.33592582e-01 ... -1.53763437e+00 -1.94755644e-01 -1.32112801e+00] [ 1.25748563e+00 -2.52320266e+00 -7.63714731e-01 ... 6.22532129e-01 8.21229339e-01 3.16210002e-01] [ 4.68201376e-02 1.18455017e+00 -6.88513458e-01 ... 2.09253216e+00 2.09152341e+00 1.41673878e-01] ... [-8.77463102e-01 9.16417480e-01 6.09135866e-01 ... 1.71691012e+00 5.16879678e-01 -1.02946520e+00] [ 2.72451669e-01 -1.62993342e-01 -6.28886700e-01 ... 1.13353050e+00 -3.71522874e-01 -1.00779271e+00] [ 5.51519096e-01 -4.48229283e-01 5.68269074e-01 ... 1.10474730e+00 -1.31809637e-01 4.97127045e-03]] [[-4.44683015e-01 -4.66826171e-01 -6.02934837e-01 ... -1.00725305e+00 -8.60897899e-01 1.55156684e+00] [ 1.19196224e+00 1.09130740e+00 -1.80234206e+00 ... -5.95359623e-01 6.87514305e-01 -9.27561402e-01] [-4.02566820e-01 8.21565807e-01 -7.25458741e-01 ... 7.40265027e-02 -1.85073495e-01 9.29318666e-01] ... [ 1.44704556e+00 -8.21871400e-01 -2.65637457e-01 ... -8.06928992e-01 3.68119031e-01 5.37497401e-01] [-3.83760363e-01 1.90372932e+00 -1.90903687e+00 ... 6.79144442e-01 -5.64957500e-01 -2.25926548e-01] [-1.34302425e+00 -6.98513806e-01 -1.11746183e-02 ... -4.64639783e-01 1.07844651e+00 -5.01480699e-01]] [[ 1.60705745e+00 8.16669017e-02 4.21077043e-01 ... -4.82334167e-01 -1.83961916e+00 -8.56033087e-01] [ 1.87909317e+00 -1.02105474e+00 -2.26658583e+00 ... 1.43982023e-01 9.22659338e-02 1.27091944e+00] [ 3.32923144e-01 1.55911148e-01 1.37561560e+00 ... 1.06581938e+00 8.13192010e-01 1.88305998e+00] ... [-3.98123652e-01 3.29811275e-01 -6.52301908e-01 ... 2.51806951e+00 -7.64161885e-01 7.35283196e-01] [-3.69855374e-01 1.97089225e-01 1.21537888e+00 ... -1.43051553e+00 -7.17739224e-01 9.68679488e-01] [-5.00721872e-01 2.10944605e+00 1.05959558e+00 ... -7.50714362e-01 1.29344809e+00 6.56218588e-01]] ... [[ 5.21281421e-01 -3.43799204e-01 2.56231576e-01 ... -3.48607063e-01 -1.07046092e+00 -5.13396204e-01] [ 8.71485889e-01 1.91477287e+00 1.25587046e+00 ... 4.16313112e-01 5.65402448e-01 2.17808270e+00] [-1.37174451e+00 7.87878871e-01 -1.21377341e-01 ... -5.28400719e-01 -1.21164656e+00 1.12563729e+00] ... [ 5.50135635e-02 -1.90102506e+00 -1.01595724e+00 ... -6.99390590e-01 9.65408087e-01 -1.14510524e+00] [ 1.08520305e+00 -7.60071874e-01 1.75822532e+00 ... -2.95124578e+00 -1.49855268e+00 -6.42722368e-01] [ 7.19092667e-01 6.24727070e-01 -8.51494610e-01 ... 2.77739692e+00 -9.27825868e-01 -8.55122507e-01]] [[-8.19146693e-01 -7.48547912e-01 6.49713039e-01 ... -3.90567601e-01 1.33398354e+00 8.59626830e-02] [ 8.77678216e-01 2.45573473e+00 -9.40273702e-02 ... 2.41132647e-01 8.69923949e-01 -9.94915307e-01] [ 4.79720503e-01 1.05367148e+00 -2.50830054e-01 ... -6.41175508e-01 2.44828224e-01 2.42641091e-01] ... [ 6.34160399e-01 -1.33842075e+00 1.51820469e+00 ... -2.41213411e-01 6.29154861e-01 -5.34619451e-01] [ 1.46292001e-01 6.71157241e-01 -1.07068455e+00 ... 2.15729809e+00 9.89585578e-01 9.61401165e-01] [ 6.93864226e-01 1.22297120e+00 -2.38679528e-01 ... -4.15191352e-01 4.63741362e-01 3.40985954e-01]] [[-1.51658261e+00 1.01382661e+00 -4.15203005e-01 ... 1.05055642e+00 -1.11345208e+00 6.65827513e-01] [-9.88941789e-01 -6.91530585e-01 -1.54475772e+00 ... -2.01540083e-01 -2.03463411e+00 -1.77940023e+00] [ 5.07549524e-01 -1.15570855e+00 -7.18905404e-02 ... -1.44506419e+00 1.41192639e+00 2.03528070e+00] ... [ 4.14070755e-01 9.21807945e-01 -2.01323652e+00 ... -1.72231328e+00 2.31226087e+00 3.78771615e+00] [ 1.25641599e-01 -1.36923862e+00 6.50609255e-01 ... -2.33100981e-01 9.08312872e-02 3.23220074e-01] [-8.06858659e-01 -2.42630410e+00 4.39408809e-01 ... -5.42224288e-01 4.98390757e-02 -1.65156633e-01]]] [[[-3.60257536e-01 -5.65104425e-01 -7.46717930e-01 ... -2.42542699e-02 3.10198426e-01 -9.96883586e-02] [ 3.88914555e-01 3.37849319e-01 5.66185236e-01 ... -5.47780022e-02 4.63672370e-01 -5.49161017e-01] [-1.31269172e-01 -1.09994650e+00 9.41726148e-01 ... -4.55046266e-01 -5.15715145e-02 -8.00265491e-01] ... [ 5.63878119e-01 -7.27724731e-01 -2.22299412e-01 ... 5.61267674e-01 3.39115649e-01 -1.02113104e+00] [-1.72483250e-01 2.26757362e-01 3.39270949e-01 ... 8.64828154e-02 -4.01207566e-01 1.58794194e-01] [-7.84592628e-01 -1.46588445e-01 -9.30615235e-03 ... -7.12905943e-01 2.20948905e-01 -3.76687646e-01]] [[ 6.76439643e-01 -5.88120036e-02 7.49427639e-03 ... 9.90117431e-01 2.52038956e-01 -6.43533707e-01] [-5.29844701e-01 -1.24066487e-01 6.85565948e-01 ... 4.55701262e-01 -6.38177216e-01 1.09132552e+00] [-3.95669520e-01 1.07245721e-01 -2.78859615e-01 ... -9.50425804e-01 9.36777890e-01 -9.85700309e-01] ... [-1.01234347e-01 -5.56539185e-02 -1.42408863e-01 ... -6.73023164e-01 -1.87925756e-01 -4.69385594e-01] [-5.68546414e-01 -3.54242146e-01 -9.79951397e-02 ... -1.66175485e-01 -8.01459193e-01 9.48800206e-01] [-4.30300862e-01 5.93309700e-01 3.75164509e-01 ... 4.59346533e-01 -3.40659797e-01 -6.40286267e-01]] [[ 1.12605870e+00 4.02240455e-02 -5.23007959e-02 ... 6.36374414e-01 -4.40409333e-02 -1.74167559e-01] [-1.28744364e-01 -7.63045669e-01 1.06966846e-01 ... 5.23679554e-01 -5.01505196e-01 4.54266131e-01] [-1.04077899e+00 -2.39184260e-01 6.08461320e-01 ... 4.05940771e-01 -8.50235343e-01 7.51709342e-01] ... [-1.62688002e-01 -1.18157104e-01 -3.35487872e-01 ... 6.58555806e-01 3.63258898e-01 1.54928252e-01] [-6.15944639e-02 2.40396738e-01 5.35931531e-03 ... -1.77006170e-01 -7.72847712e-01 5.26062399e-02] [-3.64237100e-01 1.54549539e-01 4.00893390e-01 ... -1.43842161e-01 3.26281860e-02 1.23000927e-01]] ... [[-9.11339879e-01 2.85893589e-01 3.33633095e-01 ... 1.03954017e-01 8.69530559e-01 -5.71203604e-02] [-2.09103614e-01 3.61190498e-01 6.15787387e-01 ... 2.39033103e-01 -1.22500747e-01 3.63115013e-01] [-2.44797379e-01 -4.38741773e-01 4.80575651e-01 ... -4.04105335e-01 6.04839586e-02 7.00644076e-01] ... [-2.65141547e-01 -1.29122901e+00 -2.06779957e-01 ... -5.58106527e-02 -2.09219784e-01 1.65234417e-01] [ 1.19179392e+00 -8.69842291e-01 -8.24571908e-01 ... 1.19308937e+00 -1.70867115e-01 -5.69924414e-01] [-7.66055426e-04 -4.68882680e-01 3.25345308e-01 ... -6.29210591e-01 9.43944082e-02 -3.54202092e-01]] [[-2.79063046e-01 -4.14554566e-01 6.28679156e-01 ... 4.04080525e-02 -1.19231677e+00 1.95660129e-01] [-1.91129446e-01 5.68976641e-01 -3.82562935e-01 ... 1.63099721e-01 -8.82119238e-02 -3.82733457e-02] [-1.86566129e-01 -3.90831113e-01 -4.85576123e-01 ... 4.75134775e-02 8.88172761e-02 2.39446899e-03] ... [ 2.77456734e-02 5.02203286e-01 -7.01255351e-02 ... 1.76019520e-01 2.88192451e-01 1.45729974e-01] [-4.06221747e-01 7.85291076e-01 -5.00335813e-01 ... -3.99543345e-01 -1.59877837e-01 7.14995563e-01] [ 5.50612397e-02 -4.95092906e-02 8.46699551e-02 ... 1.03875613e+00 3.36218178e-01 5.80586255e-01]] [[-5.60469747e-01 -5.79364359e-01 -6.12212241e-01 ... 4.49076518e-02 -9.17919517e-01 -1.95452660e-01] [ 4.20060605e-01 2.69935131e-01 4.02272314e-01 ... -1.51649547e+00 4.70734984e-01 1.15352988e+00] [-3.91027927e-01 3.70088577e-01 4.05999213e-01 ... -3.63213390e-01 -5.65833785e-02 6.85811341e-02] ... [ 7.78273121e-02 2.26116907e-02 6.34734213e-01 ... -6.83955073e-01 -2.40434140e-01 1.04921997e-01] [-8.02984163e-02 6.49397969e-01 1.47570550e+00 ... -1.17591906e+00 -2.93502450e-01 -2.31039688e-01] [ 8.90828520e-02 -9.22646642e-01 -2.41654322e-01 ... -3.32957655e-01 4.41985220e-01 -1.95395634e-01]]]] [[[[-1.90749407e-01 -9.19500828e-01 -4.14677560e-02 ... 1.30635584e-02 4.67026889e-01 2.53132045e-01] [-7.60285258e-01 -1.42265153e+00 4.94594052e-02 ... -5.15567899e-01 3.57667893e-01 3.06860656e-01] [ 8.24407160e-01 -1.58671588e-01 5.88810682e-01 ... 1.24540019e+00 -2.90619340e-02 -1.40211499e+00] ... [-5.95352650e-01 -4.19624746e-01 -9.30466294e-01 ... 8.54201436e-01 -5.26550233e-01 9.77467299e-01] [ 4.88757975e-02 -8.88624310e-01 1.37230074e+00 ... -1.64613187e-01 5.39602637e-01 -3.05441171e-01] [ 4.26513314e-01 9.97814119e-01 -4.74802911e-01 ... 2.66495705e-01 1.30410776e-01 1.02792573e+00]] [[-5.95277607e-01 1.97274417e-01 -1.82145864e-01 ... -1.51269790e-02 -2.67473366e-02 5.00133097e-01] [ 6.09149337e-01 -8.90223682e-01 1.25455499e+00 ... -9.01473641e-01 6.64345682e-01 5.34930050e-01] [ 5.55661201e-01 -5.58514237e-01 -6.16097450e-02 ... 8.23324248e-02 6.37004435e-01 -1.01887679e+00] ... [ 4.51024547e-02 -4.44468021e-01 3.87058645e-01 ... -1.00668713e-01 -1.22742034e-01 -4.04251292e-02] [ 3.31661731e-01 3.90698701e-01 -6.35809541e-01 ... -1.37118503e-01 -3.30270946e-01 -3.86653066e-01] [-2.29199782e-01 -5.38085103e-02 -1.22530508e+00 ... 3.17282081e-01 8.89602423e-01 5.06741814e-02]] [[-2.66808718e-02 -7.14218915e-01 -1.53342500e-01 ... -9.22529280e-01 8.76025558e-01 3.39582950e-01] [ 5.90939045e-01 7.88404271e-02 6.50768101e-01 ... -3.71432483e-01 2.53992945e-01 -8.08650672e-01] [ 5.75894356e-01 -5.47943525e-02 -8.18184137e-01 ... 1.16809279e-01 -6.28711522e-01 -9.14881051e-01] ... [ 8.34051430e-01 3.79237980e-01 1.30761695e+00 ... -3.98558855e-01 -4.46488708e-01 -1.74790278e-01] [-1.14529514e+00 -9.83322978e-01 4.81143236e-01 ... -3.48020107e-01 -3.59996319e-01 2.01262280e-01] [ 3.79853457e-01 8.91673863e-01 -2.56859124e-01 ... 9.74814057e-01 4.86345887e-01 2.55573958e-01]] ... [[ 1.13678622e+00 -6.48788393e-01 -1.37719467e-01 ... -5.13361931e-01 -7.92631924e-01 3.72226715e-01] [ 2.98359059e-02 -1.32253692e-01 1.48405254e+00 ... 4.90371943e-01 -1.62504092e-01 -2.71047205e-01] [-5.19523025e-01 -1.26127422e-01 2.12680444e-01 ... -2.77808607e-01 3.09935570e-01 2.10655004e-01] ... [-1.17845535e+00 -9.85448994e-03 2.09127456e-01 ... 9.31731939e-01 -3.55695397e-01 1.27423167e+00] [ 1.75598077e-02 -1.32166564e+00 -5.41280508e-01 ... -8.05533111e-01 5.15630424e-01 5.09026289e-01] [-4.52156484e-01 -1.39130771e+00 5.55738509e-01 ... -8.97884443e-02 1.68495393e+00 1.47814706e-01]] [[-3.39365959e-01 -1.14878036e-01 6.25031173e-01 ... -5.17956734e-01 -6.64715245e-02 -4.80675846e-01] [-1.25546217e-01 7.81123042e-01 -3.73061001e-01 ... 1.55102384e+00 -5.24326086e-01 -6.18780851e-01] [ 4.44102138e-01 8.70476007e-01 -1.34005201e+00 ... 1.46193278e+00 -8.43131065e-01 1.85188189e-01] ... [ 4.53116685e-01 5.99800229e-01 8.33656564e-02 ... 8.80284846e-01 9.18136686e-02 -1.35521770e+00] [ 3.74481708e-01 -3.60406131e-01 -1.48210084e+00 ... 9.48276222e-02 1.32370543e+00 -3.57498050e-01] [ 7.38236129e-01 5.47064245e-01 1.09966546e-01 ... -1.87274128e-01 -1.34793460e+00 -6.98375285e-01]] [[ 5.01865894e-02 -5.63872933e-01 -8.69446278e-01 ... 3.97434622e-01 -9.25894141e-01 6.87692404e-01] [-6.81731880e-01 4.34851438e-01 4.47283179e-01 ... -1.34545758e-01 9.70059037e-01 1.20869018e-01] [-3.25586915e-01 -2.07316726e-01 5.18455207e-01 ... 5.95517635e-01 -6.60343885e-01 -1.00083160e+00] ... [ 3.31300080e-01 4.07281816e-02 3.19138229e-01 ... 3.50433320e-01 5.41047990e-01 4.72751886e-01] [ 8.25906575e-01 -5.64034462e-01 -1.95762962e-01 ... 7.08100796e-01 1.48862258e-01 2.79159904e-01] [-4.83218372e-01 6.02516830e-01 1.50752521e+00 ... -1.22968271e-01 3.32289040e-01 4.09474730e-01]]] [[[-1.61971375e-01 2.16796964e-01 -7.05154240e-02 ... -3.54997933e-01 1.35876508e-02 4.02876437e-01] [-3.59028459e-01 6.95453107e-01 2.09346712e-02 ... 9.17673528e-01 -2.22484112e-01 4.15333137e-02] [-2.81761080e-01 1.70405746e-01 4.45996858e-02 ... 1.13609433e-01 1.24922313e-01 -6.65584981e-01] ... [-1.04051173e+00 -9.77617800e-02 -1.30067900e-01 ... 5.36298715e-02 -8.63752291e-02 -4.10236329e-01] [ 4.59739774e-01 -3.34006131e-01 -8.14706385e-02 ... 2.74456441e-01 -2.48794466e-01 -6.80086374e-01] [-1.62047654e-01 -1.07391886e-01 3.19499224e-01 ... -2.86668420e-01 -1.58274412e-01 1.04577832e-01]] [[-2.14972869e-01 -9.99828950e-02 4.56169218e-01 ... -3.71404141e-01 -3.64928961e-01 3.96111131e-01] [ 2.11824447e-01 -8.59868899e-02 -5.54652870e-01 ... -9.20469701e-01 1.62408035e-02 -1.69356823e-01] [-7.33929798e-02 -4.80662286e-01 -5.24727821e-01 ... 2.23008230e-01 3.21807057e-01 4.87118453e-01] ... [ 2.73337126e-01 1.27793297e-01 -2.52020329e-01 ... 1.09851010e-01 2.45227754e-01 9.98283401e-02] [-2.74321675e-01 1.54598802e-01 4.64231849e-01 ... -1.55590102e-01 6.26985133e-01 2.02284440e-01] [-1.80386037e-01 -5.24632394e-01 -5.16717732e-01 ... -3.18372220e-01 -5.98113239e-01 3.12042654e-01]] [[-8.21225420e-02 1.67741835e-01 -2.25939929e-01 ... 5.88525534e-01 4.91756350e-01 -9.14827287e-02] [ 4.93412137e-01 -1.02502912e-01 -6.68552160e-01 ... 6.67278707e-01 -9.60022360e-02 4.67156202e-01] [ 1.21130690e-01 -5.09668231e-01 -4.07874852e-01 ... 3.60766858e-01 -6.38939589e-02 2.88590431e-01] ... [ 2.45748777e-02 -8.88635963e-03 9.29870546e-01 ... -5.44517398e-01 4.57466871e-01 -1.06444754e-01] [ 3.90319198e-01 2.43806973e-01 -1.44524366e-01 ... -3.54280293e-01 -2.29256392e-01 1.52615145e-01] [-1.71435177e-01 -5.72113395e-01 1.28954262e-01 ... 2.70135552e-01 -7.36252367e-02 2.75785655e-01]] ... [[ 1.89526424e-01 3.32430936e-02 -7.29194164e-01 ... -3.95879239e-01 1.21169854e-02 -1.22355498e-01] [ 6.99345708e-01 7.20716938e-02 9.37688425e-02 ... 2.38145992e-01 -2.82539785e-01 5.05336463e-01] [ 1.98939428e-01 -3.41048092e-02 -8.56744498e-03 ... -2.93015331e-01 -4.39298123e-01 -7.92425722e-02] ... [ 5.85135460e-01 4.90442812e-01 1.12188071e-01 ... -3.15415949e-01 7.83617273e-02 -2.38960728e-01] [-3.17737877e-01 -3.63197565e-01 -2.67818838e-01 ... 1.83876187e-01 3.31774205e-01 -3.50400180e-01] [ 1.16734207e-01 -1.28098592e-01 -2.95995027e-01 ... -8.56624916e-02 1.91693678e-01 2.31257454e-01]] [[ 6.22001648e-01 -9.87775028e-02 4.90343273e-01 ... -3.49518508e-01 -1.27396375e-01 -3.44193846e-01] [-9.08496082e-02 5.53794682e-01 -3.53911698e-01 ... -7.18491137e-01 1.07460797e-01 -2.98802853e-01] [-2.67183304e-01 -1.92405790e-01 -2.34069392e-01 ... 1.20930932e-01 -1.94754601e-01 -1.34383410e-01] ... [-3.36559355e-01 -2.88581908e-01 -4.45356011e-01 ... -2.88497746e-01 4.19751227e-01 -2.80964106e-01] [ 1.63763121e-01 1.05120480e+00 6.51378512e-01 ... -4.79796171e-01 -2.65105844e-01 5.21091446e-02] [-6.25385702e-01 4.29166704e-01 -1.61625698e-01 ... -1.42455310e-01 1.10641666e-01 -1.62424296e-01]] [[-4.17732745e-01 3.03391457e-01 2.78583735e-01 ... 2.69907981e-01 2.68540531e-01 -5.50605237e-01] [-4.06189919e-01 1.72780156e-01 6.89046741e-01 ... 1.93670437e-01 2.94245332e-01 -2.83596069e-01] [ 3.31667662e-01 2.47447580e-01 1.36546537e-01 ... 2.71812141e-01 2.28644967e-01 -4.68576364e-02] ... [ 1.64320860e-02 -1.82864025e-01 2.87666768e-01 ... 1.51108623e-01 1.94504738e-01 2.84910828e-01] [-2.66998291e-01 -8.64616781e-02 -5.66064775e-01 ... -4.90465999e-01 4.17999119e-01 2.52668291e-01] [-5.65280259e-01 -9.00049880e-02 2.50615418e-01 ... -8.49240273e-03 -2.40158483e-01 -4.10584360e-01]]] [[[-3.73111939e+00 2.62409425e+00 1.46875930e+00 ... -5.10400772e-01 -5.05195379e-01 7.21250117e-01] [-2.08561015e+00 -1.26346397e+00 -1.67758477e+00 ... 1.98647308e+00 -4.92447048e-01 1.51545298e+00] [-2.88195372e-01 -2.87307441e-01 8.26358855e-01 ... -7.09684908e-01 -4.50179070e-01 2.22881198e+00] ... [-2.32020974e-01 1.40808773e+00 -1.70479870e+00 ... 9.35485780e-01 9.44514215e-01 2.38948628e-01] [-1.50331855e+00 1.08601511e+00 -2.51423860e+00 ... 5.68955183e-01 -1.25135994e+00 1.12083530e+00] [ 3.59983373e+00 -2.05453229e+00 2.70821595e+00 ... 7.88011611e-01 1.33495808e+00 -6.08157158e-01]] [[-1.81174600e+00 -1.71455300e+00 -8.77190471e-01 ... -2.82276154e-01 2.26619649e+00 2.76672423e-01] [ 2.83401299e+00 7.33971953e-01 -7.54099727e-01 ... -2.16833806e+00 1.28321421e+00 -9.33581531e-01] [ 1.25394297e+00 -7.16842175e-01 9.14852440e-01 ... 9.99677002e-01 -8.63275588e-01 -1.61841244e-01] ... [ 1.35347009e+00 -2.50108171e+00 1.74870765e+00 ... 1.82677126e+00 -8.13155890e-01 1.87041378e+00] [ 1.63722873e+00 8.46138418e-01 -2.41515684e+00 ... 2.78238273e+00 -1.94312131e+00 -1.42260134e+00] [-1.12439787e+00 1.26051164e+00 -5.92214465e-01 ... -2.00366187e+00 -8.88996065e-01 -4.27103162e-01]] [[ 8.60328078e-01 -3.79684985e-01 2.26184058e+00 ... -5.31331062e-01 2.15487480e+00 -2.43440747e+00] [-7.66227320e-02 -3.93350273e-02 -2.66539764e+00 ... -1.06325853e+00 -4.13824826e-01 -5.82020283e-01] [-1.89283407e+00 -1.16582012e+00 -2.08633947e+00 ... -2.31409356e-01 -1.66471303e+00 1.59856319e+00] ... [-1.59459221e+00 2.40890121e+00 1.51971960e+00 ... -8.80304992e-01 -7.07012832e-01 2.19186234e+00] [-1.07884789e+00 -5.14785051e-01 1.86547804e+00 ... 3.55865508e-01 -1.47030735e+00 -1.65111792e+00] [-1.05174470e+00 1.67840767e+00 1.30981994e+00 ... -2.60448515e-01 -1.96773100e+00 -4.99817014e-01]] ... [[-1.93668032e+00 1.18100977e+00 -6.19678795e-01 ... -7.30633020e-01 1.18219674e+00 7.56266177e-01] [ 7.33805835e-01 -1.79454041e+00 -1.01055717e+00 ... -2.40554586e-01 1.04812646e+00 -1.35599184e+00] [-1.39002442e-01 1.30471373e+00 -2.16942835e+00 ... 4.39878672e-01 -1.23414703e-01 1.39368570e+00] ... [ 1.85225475e+00 -1.42320953e-02 7.90392578e-01 ... -3.62534404e-01 -3.52793038e-01 1.72908559e-01] [ 1.26850581e+00 1.20410526e+00 9.76150408e-02 ... -6.63934529e-01 5.01587093e-01 6.61178827e-01] [ 3.16064097e-02 -1.10206819e+00 7.88033783e-01 ... -1.90321639e-01 1.14546366e-01 -1.27471411e+00]] [[-3.21954131e-01 -1.08996451e+00 -3.83229882e-01 ... -6.39044583e-01 3.32729012e-01 8.87923837e-01] [ 1.48683894e+00 -1.93838477e-01 -3.17475855e-01 ... 1.00396693e+00 -1.43065536e+00 1.38267815e+00] [ 2.25037599e+00 2.44400576e-01 -1.12174690e+00 ... -1.21508229e+00 -1.83910036e+00 -1.22953856e+00] ... [ 1.47161078e+00 -2.98462033e+00 5.28880179e-01 ... -5.07023513e-01 -4.24818695e-01 7.37089038e-01] [-2.45421350e-01 1.56860566e+00 -6.27122104e-01 ... 1.69030756e-01 -1.67461526e+00 4.30622667e-01] [-1.10907030e+00 -3.08378309e-01 -7.49510169e-01 ... -1.47216308e+00 -1.03337967e+00 -2.50169325e+00]] [[-3.12608629e-01 -2.94080091e+00 -9.61141109e-01 ... 6.27237976e-01 -4.55112159e-01 -2.59180284e+00] [-3.47690892e+00 -2.19365931e+00 -5.05217791e-01 ... 8.49852264e-01 5.50735712e-01 2.23442960e+00] [ 8.64498317e-01 2.24365544e+00 -6.08743250e-01 ... -1.36359227e+00 3.72458016e-04 2.80112147e-01] ... [-1.63720950e-01 -1.91864967e+00 1.42342949e+00 ... 7.34294772e-01 1.84875751e+00 -6.47070289e-01] [-1.36075854e-01 1.20362198e+00 -3.21895629e-01 ... 8.12804699e-01 -2.09588122e+00 -4.64475691e-01] [ 1.71591341e+00 7.17979848e-01 -1.81925225e+00 ... -1.40384996e+00 2.61165428e+00 9.99382019e-01]]] [[[ 2.99392253e-01 -3.36347789e-01 -1.09192513e-01 ... 6.20411262e-02 -2.92501181e-01 -1.82156399e-01] [ 1.09645640e-02 -8.74591395e-02 -6.25612020e-01 ... 6.72260225e-02 -2.74023235e-01 -3.27226985e-03] [-8.02493915e-02 -7.71903619e-02 1.15146793e-01 ... 1.29037291e-01 2.05156580e-01 8.16029776e-03] ... [ 1.78149506e-01 -2.21440181e-01 -1.92529988e-02 ... -1.46532655e-01 3.01600218e-01 1.28095493e-01] [-9.93546471e-02 -7.19755515e-02 -1.37463585e-01 ... 4.10652220e-01 2.43085355e-01 -1.96601078e-01] [ 1.79929547e-02 -1.82497889e-01 -4.93685842e-01 ... 5.23134828e-01 -3.45639437e-01 -3.05307597e-01]] [[ 2.28620082e-01 -5.89541972e-01 -3.49831283e-02 ... -3.82088195e-03 -2.57632047e-01 3.14670317e-02] [-2.82876521e-01 2.47137159e-01 2.05173016e-01 ... 6.82838634e-02 -1.42900258e-01 -3.46700996e-01] [-1.82288691e-01 -4.05879676e-01 5.77450544e-02 ... 7.09158543e-04 -2.13395894e-01 3.68152499e-01] ... [-3.55532289e-01 2.76697218e-01 -1.30165070e-01 ... -2.81218350e-01 -2.01414540e-01 3.55053216e-01] [ 1.69963256e-01 -3.30051810e-01 4.04142663e-02 ... 7.34912872e-01 2.22657368e-01 -2.45410457e-01] [-2.23607123e-01 6.55661345e-01 2.85534441e-01 ... 1.29351184e-01 1.72169283e-01 -1.02464043e-01]] [[ 5.33704221e-01 -1.38226245e-02 -1.58840597e-01 ... -2.22310662e-01 -3.19431216e-01 -4.65121835e-01] [ 1.59550220e-01 3.81677628e-01 9.15685147e-02 ... 1.22918963e-01 5.99824190e-01 8.54232013e-02] [-1.36111885e-01 4.10473645e-01 5.94912842e-02 ... 4.25784737e-01 -1.26329616e-01 -2.80081481e-02] ... [-8.96474067e-03 -2.71011069e-02 6.27800584e-01 ... -1.49769172e-01 1.04277454e-01 2.74323791e-01] [-2.28140771e-01 8.54328424e-02 1.04902633e-01 ... 8.62622261e-03 -1.70003608e-01 3.28813553e-01] [-1.89357176e-01 -1.52160019e-01 -3.17253381e-01 ... 1.95053831e-01 1.15683727e-01 1.47994176e-01]] ... [[-3.51982534e-01 -2.34630153e-01 -1.37568191e-01 ... 4.76886451e-01 4.96225983e-01 -1.50846422e-01] [ 6.92699075e-01 -6.46786019e-02 3.26915197e-02 ... 2.53792498e-02 1.92261085e-01 -3.87466282e-01] [ 3.49549443e-01 3.13203752e-01 -5.77338263e-02 ... 3.96897309e-02 -1.91618875e-01 -1.36514887e-01] ... [ 5.37144616e-02 2.22473413e-01 1.60432719e-02 ... 6.01342507e-02 -2.23594625e-02 -6.62610590e-01] [ 1.63223177e-01 -1.44555464e-01 2.91658282e-01 ... 3.01032722e-01 -3.77550632e-01 2.13256583e-01] [-4.62150425e-02 -4.73585300e-04 1.52136281e-01 ... -5.65123022e-01 -5.18348068e-02 -3.15894365e-01]] [[-2.11790368e-01 1.71888784e-01 -1.30875126e-01 ... 1.27718285e-01 2.00951576e-01 6.62769079e-02] [ 1.33463398e-01 4.49043453e-01 1.28186569e-01 ... 1.93113789e-01 -1.33321807e-01 2.23701894e-01] [ 8.39516073e-02 -6.12098649e-02 -7.93426409e-02 ... -2.51450371e-02 -2.92124718e-01 2.91611195e-01] ... [-5.55598661e-02 -2.03021079e-01 -3.12955350e-01 ... -1.41632587e-01 3.70947242e-01 -2.11627260e-01] [-1.38560265e-01 5.56769706e-02 -2.73331612e-01 ... 1.15798540e-01 -2.32147515e-01 1.40445158e-01] [-2.60322571e-01 4.44218755e-01 4.04675126e-01 ... 3.99965271e-02 -1.44035062e-02 -1.09719194e-01]] [[ 4.38819110e-01 1.28434613e-01 -3.84416550e-01 ... -1.98376730e-01 1.36782169e-01 2.43733123e-01] [-1.20830899e-02 3.73462290e-01 -1.13149621e-01 ... 1.47220835e-01 1.86314240e-01 -2.41397128e-01] [-2.89568137e-02 -5.02050333e-02 -4.88012359e-02 ... -6.77485839e-02 2.63925850e-01 3.78099494e-02] ... [ 5.11008166e-02 2.05214366e-01 1.73519850e-01 ... 5.54201245e-01 1.53144822e-01 -3.94126594e-01] [-1.65866151e-01 -2.19023332e-01 -3.50153089e-01 ... 1.98568672e-01 5.98348826e-02 -4.12720591e-01] [-3.43436748e-02 3.64522636e-01 8.72343779e-02 ... 3.95622477e-02 -5.31341255e-01 3.56788933e-01]]] [[[ 1.86321497e+00 -1.16186345e+00 1.25482559e+00 ... -6.41765371e-02 1.26767659e+00 1.32364720e-01] [-1.73819757e+00 1.25878191e+00 2.63956904e-01 ... -1.97769612e-01 -7.08405793e-01 1.24164037e-01] [-9.16496456e-01 -1.48537695e-01 8.81690204e-01 ... -2.36038399e+00 -1.33116221e+00 1.06529462e+00] ... [ 2.20494676e+00 -9.35562730e-01 -1.62127566e+00 ... 2.65688181e-01 -8.71573269e-01 9.77212265e-02] [-1.17458510e+00 1.38091266e+00 7.60722876e-01 ... -8.42314243e-01 -7.75072753e-01 -1.83129048e+00] [-2.95509279e-01 -8.35684299e-01 -1.33670434e-01 ... 5.89045323e-02 -1.30870119e-01 4.74979520e-01]] [[-2.48626113e+00 2.13578534e+00 -1.69269294e-01 ... 3.06634087e-04 9.06509519e-01 -1.58677912e+00] [ 2.24195316e-01 -9.63824615e-02 -1.23141682e+00 ... 4.91442800e-01 2.11733773e-01 -1.26010273e-02] [ 1.03109276e+00 -4.36616480e-01 7.35703647e-01 ... -4.86469299e-01 -8.69381666e-01 4.48748589e-01] ... [-1.38153708e+00 2.09649849e+00 4.50344324e-01 ... -5.82974255e-01 -4.82949406e-01 -2.67535424e+00] [ 6.05955184e-01 -2.45262042e-01 -7.66163170e-01 ... -1.17294896e+00 -9.67389047e-02 -4.89620239e-01] [ 5.29801957e-02 -1.15599871e+00 -1.00102711e+00 ... -1.26426613e+00 -7.10966527e-01 -5.80453038e-01]] [[ 3.58486265e-01 1.95997202e+00 -1.02989864e+00 ... 7.80554628e-03 -8.48286450e-02 4.32888597e-01] [-3.45307559e-01 -5.92507124e-01 -8.04870725e-01 ... -5.17083228e-01 1.37424481e+00 -1.72979248e+00] [-1.86721057e-01 3.29585791e-01 5.03952384e-01 ... -3.51822406e-01 -7.04667628e-01 -1.03221571e+00] ... [-7.10917771e-01 -1.00600779e+00 -8.19745481e-01 ... 7.83664703e-01 -1.25021935e+00 7.31745541e-01] [ 6.29298389e-01 1.89674711e+00 7.73634553e-01 ... -4.91419323e-02 7.22388476e-02 1.36222720e-01] [-3.66509467e-01 1.15998924e+00 1.26091516e+00 ... -3.32737279e+00 4.98784274e-01 -8.20415378e-01]] ... [[ 1.35434783e+00 1.23124814e+00 -6.68448985e-01 ... 8.65013182e-01 1.17614460e+00 -3.76739055e-01] [-1.25942588e+00 -1.07576060e+00 -5.01960933e-01 ... -1.42219830e+00 2.47844476e-02 1.36414039e+00] [-1.18879187e+00 4.22974706e-01 2.09680036e-01 ... 4.52352554e-01 -9.08443451e-01 3.38807672e-01] ... [ 1.04393929e-01 -7.12477267e-01 -9.22699124e-02 ... 4.38298553e-01 -1.86764359e+00 -9.88506496e-01] [-1.19723940e+00 -1.00415719e+00 -1.42936575e+00 ... -1.45803779e-01 -2.82249421e-01 3.17566991e-01] [ 1.03567183e+00 1.62244871e-01 -2.96700168e+00 ... -7.82224715e-01 -1.25265825e+00 -1.53525658e-02]] [[ 3.39940786e-01 -3.27809155e-02 1.43332958e+00 ... 7.15190470e-01 4.48440701e-01 -7.76558101e-01] [ 1.22045171e+00 1.98800099e+00 -4.78155971e-01 ... 5.51939309e-01 -7.68439293e-01 -1.32393324e+00] [ 4.76316422e-01 8.82080853e-01 1.82152188e+00 ... -2.26848572e-01 1.07368112e+00 -4.26158637e-01] ... [-3.04110169e-01 -5.92005968e-01 -1.01510501e+00 ... -1.89817071e-01 -7.70150244e-01 -7.57873595e-01] [ 6.24542654e-01 4.50835884e-01 -2.53929067e+00 ... -3.40314239e-01 1.39087236e+00 -1.10618055e+00] [-5.90584695e-01 5.61974287e-01 -1.48325965e-01 ... 6.35562479e-01 8.91363770e-02 1.63743019e+00]] [[ 7.43187964e-01 -1.59399176e+00 1.77611279e+00 ... 1.12280047e+00 1.61714292e+00 2.16374755e+00] [-1.20071745e+00 7.81484365e-01 1.00424374e-03 ... -1.33977854e+00 -1.26102495e+00 -1.00620195e-01] [ 1.16539085e+00 -5.59094965e-01 -1.31015778e+00 ... -9.87835228e-02 -8.03219020e-01 1.41812611e+00] ... [-5.59079170e-01 4.81665403e-01 -2.17039323e+00 ... -1.63712811e+00 9.83887136e-01 -8.35140407e-01] [-5.98870926e-02 -1.03590214e+00 -4.08948958e-02 ... 7.18594372e-01 2.60867745e-01 4.12890136e-01] [-6.40489280e-01 1.59948790e+00 -1.22974038e+00 ... -1.26052618e-01 8.74688983e-01 -8.41111168e-02]]] [[[-4.90760922e-01 1.01961660e+00 8.59495401e-01 ... -6.52101815e-01 5.17936826e-01 -9.34037089e-01] [ 3.65609497e-01 1.56379834e-01 -2.34951437e-01 ... -2.70279981e-02 6.53776824e-01 1.66046038e-01] [ 1.73628539e-01 -6.08978271e-01 -2.36421540e-01 ... 2.92123497e-01 2.75593877e-01 -9.79798317e-01] ... [-1.99718058e-01 -2.50322949e-02 2.04653993e-01 ... 5.89174807e-01 1.17985383e-02 -8.88560116e-01] [ 1.29891753e-01 -4.15744096e-01 -9.19650018e-01 ... 6.43109977e-01 -8.18966448e-01 4.37960654e-01] [ 5.50603457e-02 8.87201071e-01 7.69214749e-01 ... 3.39788616e-01 -7.04824254e-02 -1.74773082e-01]] [[-2.07134634e-01 1.48252785e-01 3.27214688e-01 ... 3.52733195e-01 5.73441386e-01 -1.17386363e-01] [ 5.66005170e-01 -1.75937399e-01 -8.89604446e-04 ... -4.54341888e-01 4.38851684e-01 9.17891145e-01] [ 6.71113253e-01 -1.80549119e-02 1.85180098e-01 ... -6.37645960e-01 -2.83973008e-01 8.30826998e-01] ... [-5.38688004e-01 5.38999677e-01 7.47961342e-01 ... -5.01035213e-01 1.43479466e-01 -4.79079336e-01] [ 5.77460006e-02 -1.64335534e-01 -1.83607228e-02 ... -4.48265851e-01 2.93164968e-01 -4.52588767e-01] [-2.58257389e-01 -5.01729667e-01 1.08941925e+00 ... -2.38704041e-01 -3.88477534e-01 -1.70532495e-01]] [[ 7.86130488e-01 5.63426495e-01 -7.55097091e-01 ... -1.74533889e-01 5.90789080e-01 -7.36809909e-01] [ 4.90535110e-01 1.64342999e-01 -1.87394321e-01 ... 2.85852462e-01 -2.72460103e-01 -5.16274571e-01] [ 7.59130836e-01 -6.96371123e-02 3.37856919e-01 ... 6.69871628e-01 4.20989059e-02 6.74122153e-03] ... [-1.15017474e+00 1.64573431e-01 -3.83963346e-01 ... -2.67756432e-01 -7.55673870e-02 4.20846760e-01] [-6.52630150e-01 8.32630038e-01 -3.15244799e-03 ... -2.39228234e-01 -2.26314217e-01 1.88621178e-01] [ 6.97584689e-01 6.61149397e-02 8.64132773e-03 ... 7.20409751e-01 -5.36739469e-01 -3.34836423e-01]] ... [[-1.21806361e-01 4.39494461e-01 6.04285479e-01 ... -1.08406328e-01 1.75294578e-02 -2.98154622e-01] [ 7.93101728e-01 -2.51961678e-01 -8.11456665e-02 ... -1.19409934e-01 -2.14544967e-01 -4.78302181e-01] [ 7.64935374e-01 5.71921952e-02 -1.38955474e-01 ... 4.67908591e-01 9.78825688e-02 -3.68964791e-01] ... [ 4.37675983e-01 5.47380328e-01 -1.88976541e-01 ... -7.18137994e-03 1.22246249e-02 -7.00808465e-01] [-3.29695731e-01 1.07129824e+00 -6.61216438e-01 ... -9.59681213e-01 2.93104708e-01 4.60881829e-01] [ 6.83002323e-02 -3.54569197e-01 -5.49653471e-01 ... 2.89914738e-02 -5.84875494e-02 -3.54226172e-01]] [[-8.40164781e-01 4.70085412e-01 -3.78024043e-03 ... -2.14084238e-01 -2.32624829e-01 -1.10581361e-01] [ 6.63670659e-01 1.06913650e+00 -9.47634816e-01 ... -1.26267397e+00 8.00918579e-01 5.06635010e-01] [ 3.25173110e-01 2.39869371e-01 -5.70565701e-01 ... -6.79580152e-01 -3.46641630e-01 5.61372161e-01] ... [-8.04976583e-01 -1.84133843e-01 3.32195789e-01 ... 3.96820605e-01 9.91375148e-02 -3.70418251e-01] [ 3.16797465e-01 -4.23975229e-01 -7.91207194e-01 ... 9.47088078e-02 -1.53509062e-02 1.39518619e+00] [-3.97232413e-01 -1.26125410e-01 1.72205433e-01 ... -2.84978282e-02 -2.90029526e-01 3.41943055e-01]] [[-2.75412768e-01 -6.10550165e-01 6.12334728e-01 ... 2.46807069e-01 1.16919063e-01 -1.68207750e-01] [ 5.88972986e-01 4.13741678e-01 9.28095698e-01 ... -3.84099543e-01 -2.68167436e-01 4.03859437e-01] [-5.50299287e-01 3.54632467e-01 -4.90238816e-02 ... -7.40283549e-01 5.80751181e-01 -4.90855306e-01] ... [ 9.14344937e-02 -7.59423852e-01 -9.76707578e-01 ... 6.79767787e-01 -8.96370187e-02 8.89231980e-01] [-8.40003312e-01 1.44343212e-01 -9.18003500e-01 ... -2.35902637e-01 -5.62560260e-01 4.82829213e-01] [ 2.85809427e-01 -4.12029535e-01 -3.87704492e-01 ... 1.51345775e-01 3.51247378e-02 -5.55166066e-01]]]] ... [[[[-3.10453236e-01 -5.48792005e-01 1.90699428e-01 ... 1.97405398e-01 -6.96315706e-01 6.52595377e-03] [-1.05577171e+00 1.50280869e+00 -1.41372287e+00 ... 9.30877030e-02 -2.40650326e-01 4.29085940e-01] [ 8.11260581e-01 1.76124305e-01 -1.74439085e+00 ... -3.27786177e-01 3.21730673e-01 -1.46728635e-01] ... [ 6.49380386e-01 -6.94187164e-01 -7.88462281e-01 ... 1.27231050e+00 -5.93229651e-01 1.50171769e+00] [-1.62000418e-01 5.65200925e-01 7.27153867e-02 ... -1.07340539e+00 -6.25359118e-01 -6.78446770e-01] [ 3.00133824e-01 5.45407057e-01 9.78396460e-02 ... 5.94848514e-01 -7.11476624e-01 3.35972667e-01]] [[-5.33026099e-01 9.11291122e-01 9.60399568e-01 ... 1.03322916e-01 3.61443371e-01 -1.01944399e+00] [-7.54925787e-01 5.75282097e-01 -8.14694881e-01 ... 3.00388992e-01 7.04101324e-02 7.47444808e-01] [-1.47483754e+00 7.53218949e-01 -3.85990113e-01 ... -3.32033753e-01 -3.08057189e-01 -9.56161320e-01] ... [ 7.99408674e-01 7.07253516e-01 -6.88760936e-01 ... 6.68721378e-01 -1.02585125e+00 3.74236375e-01] [ 3.99324596e-01 5.82234442e-01 1.09243274e-01 ... 6.11746490e-01 5.46564758e-01 -4.61131297e-02] [ 4.53375727e-01 4.84091282e-01 -7.96897829e-01 ... 6.61098957e-01 -7.83033192e-01 -8.49344075e-01]] [[ 8.91188860e-01 1.13027737e-01 3.68733585e-01 ... -3.20129335e-01 -4.90244385e-03 -3.21864098e-01] [-7.66619682e-01 5.37928939e-02 6.85585678e-01 ... 3.47635537e-01 5.60297787e-01 -5.55528760e-01] [-1.05764914e+00 6.83428109e-01 1.12075961e+00 ... -1.69260657e+00 4.30935994e-02 -2.61602830e-02] ... [ 9.26339328e-01 -2.93772876e-01 -7.54863203e-01 ... -1.16260850e+00 9.61168468e-01 -1.06565106e+00] [ 1.07234299e+00 -1.12412000e+00 1.07489812e+00 ... -5.95427275e-01 -9.13085267e-02 -1.06537795e+00] [-9.88230109e-02 5.73780417e-01 2.01652572e-01 ... 9.38739955e-01 -1.10993457e+00 -1.39768529e+00]] ... [[-2.89063662e-01 1.98912889e-01 1.26709723e+00 ... -2.48162031e-01 -6.86139822e-01 1.39933813e+00] [ 3.73729646e-01 -1.46195805e+00 -3.52652892e-02 ... -1.06971574e+00 9.00306642e-01 -1.19167435e+00] [ 1.44541574e+00 -1.25104225e+00 4.10424769e-01 ... -3.85387719e-01 -9.44645144e-03 -5.48195481e-01] ... [-1.00356765e-01 1.21940124e+00 1.86542794e-01 ... 1.25147092e+00 -1.00563741e+00 -4.32061285e-01] [ 1.00087535e+00 7.35618293e-01 1.65724599e+00 ... 4.70128834e-01 -1.01038647e+00 4.55130696e-01] [-1.02335647e-01 8.46223295e-01 1.02827601e-01 ... -1.20166540e-01 4.42291617e-01 8.41548681e-01]] [[-1.62754562e-02 5.11693992e-02 3.16740349e-02 ... -2.70872921e-01 2.45798573e-01 -3.72697651e-01] [ 7.69742370e-01 1.23830445e-01 -9.97186482e-01 ... -1.35545659e+00 -5.09894550e-01 -3.41642022e-01] [-4.04268861e-01 4.24347997e-01 -2.48641998e-01 ... -6.22994423e-01 5.45521140e-01 1.81499258e-01] ... [ 2.01570272e-01 6.99234188e-01 8.15549672e-01 ... 2.32073829e-01 9.34439778e-01 2.43735015e-01] [-2.32335865e-01 -6.06133878e-01 -1.95369169e-01 ... 3.60815048e-01 -4.47417796e-01 1.79660603e-01] [-1.44039953e+00 4.79905128e-01 6.13217771e-01 ... -1.58363521e-01 4.85952944e-02 1.30430982e-01]] [[ 4.99029517e-01 7.26271123e-02 7.13106468e-02 ... -3.92101139e-01 3.92098188e-01 -1.90089449e-01] [-1.52544722e-01 -6.30024970e-01 -3.43190491e-01 ... 1.49966395e+00 2.71631837e-01 -2.74605095e-01] [-9.10213768e-01 -5.14421880e-01 -4.09944475e-01 ... -4.07644272e-01 -5.69482863e-01 1.48407638e+00] ... [-1.11881435e+00 -3.79945375e-02 -1.42929745e+00 ... 9.11756217e-01 -1.27510631e+00 -1.37803435e-01] [ 5.15560746e-01 -5.28826356e-01 -6.93120837e-01 ... -6.95947945e-01 4.13971961e-01 -3.55643392e-01] [-3.54462564e-01 1.47017747e-01 1.07066996e-01 ... -2.69109994e-01 4.94999260e-01 -1.39787340e+00]]] [[[ 3.75414371e-01 5.23906231e-01 -1.52475178e-01 ... 2.92025860e-02 -1.92060933e-01 -5.26091099e-01] [-1.10126980e-01 -4.67858642e-01 -3.39429587e-01 ... -6.83039188e-01 1.99736208e-01 -1.15822487e-01] [ 8.48147646e-02 1.62438005e-01 5.19159496e-01 ... -6.09151125e-01 -2.53775537e-01 5.84708989e-01] ... [-5.78576215e-02 -1.89030200e-01 1.33531094e+00 ... -5.77955425e-01 3.14523548e-01 -6.06750667e-01] [-5.05227089e-01 -7.37433657e-02 -6.20881855e-01 ... -1.56230301e-01 -3.29278767e-01 5.97513244e-02] [ 4.38141048e-01 3.20099384e-01 -6.33884728e-01 ... 6.38842881e-01 -8.86777520e-01 -4.37171489e-01]] [[ 6.88441023e-02 -5.53139865e-01 -2.19803289e-01 ... -8.18792656e-02 3.47937465e-01 -4.32003945e-01] [-1.67598352e-01 -1.77092373e-01 3.68291587e-01 ... 7.12724328e-01 5.83755732e-01 1.12477399e-01] [-3.12550128e-01 3.05286348e-01 9.87684131e-02 ... -2.84135580e-01 -3.24368417e-01 -4.52636808e-01] ... [-1.26018748e-01 7.95530915e-01 -2.92338759e-01 ... 5.12711108e-01 7.68706322e-01 8.43696475e-01] [ 8.06899592e-02 3.54370534e-01 9.21426296e-01 ... 5.33739269e-01 2.40555510e-01 -6.74165413e-02] [-1.01748206e-01 -7.21799314e-01 1.30185381e-01 ... 1.17241338e-01 3.96111131e-01 2.14405164e-01]] [[-3.12610328e-01 4.62099701e-01 6.01666152e-01 ... -1.41524911e-01 -1.64513230e-01 -3.96560431e-01] [-9.46559086e-02 2.80190200e-01 1.44710094e-01 ... -2.13979363e-01 2.98164904e-01 -3.41211289e-01] [-3.57517838e-01 -2.82432824e-01 -5.97682059e-01 ... 1.72325969e-01 4.45005119e-01 -5.02019286e-01] ... [-6.65723503e-01 -1.84618041e-01 -2.83846229e-01 ... -3.12328245e-02 8.05178583e-01 -4.81094778e-01] [-2.41032779e-01 3.29472087e-02 6.37114525e-01 ... -1.51564442e-02 1.32855237e-01 3.86892587e-01] [ 3.17702383e-01 2.67763644e-01 1.51263013e-01 ... -2.10424900e-01 -1.76546589e-01 1.97189108e-01]] ... [[-3.69418740e-01 5.52214026e-01 4.72789019e-01 ... -1.80113744e-02 7.49394596e-02 1.20022796e-01] [-1.16237849e-01 3.53625357e-01 -2.70915359e-01 ... -3.15778732e-01 6.95571840e-01 3.08389422e-02] [ 2.59625584e-01 -2.42778569e-01 -4.27105784e-01 ... 5.20619750e-01 -1.10879928e-01 2.54053380e-02] ... [-5.60110174e-02 -4.70333593e-03 -6.11231446e-01 ... 4.07966793e-01 -1.09999090e-01 5.15855253e-01] [-4.09194142e-01 -7.40319729e-01 -5.49926832e-02 ... -4.57141131e-01 -3.01117599e-01 4.61157896e-02] [ 1.46133229e-01 5.58215380e-01 -3.53829682e-01 ... 1.03432000e-01 -2.33976349e-01 7.78402984e-02]] [[ 3.75753969e-01 2.51685262e-01 2.79696226e-01 ... -1.87489152e-01 6.43780231e-02 -3.83867502e-01] [-4.63447899e-01 8.21097195e-02 4.99131829e-02 ... -5.89528494e-02 3.25381309e-01 1.85828581e-01] [ 2.29660407e-01 5.60159624e-01 -4.87954736e-01 ... -7.03090103e-03 1.28954887e-01 1.36865795e-01] ... [ 1.14026718e-01 -8.33641067e-02 -5.83186783e-02 ... 5.18932007e-02 -9.10454616e-02 1.14172481e-01] [ 4.78842780e-02 3.95404428e-01 -2.44913980e-01 ... -1.52882442e-01 -1.18114188e-01 1.57963172e-01] [-2.44043216e-01 -4.68990117e-01 -2.04078928e-01 ... 4.70992804e-01 9.84230358e-03 8.83647740e-01]] [[-5.24124026e-01 4.86060917e-01 9.68827546e-01 ... -2.40616277e-01 2.54300624e-01 -6.60714865e-01] [ 8.52206349e-02 5.97176589e-02 4.56411391e-01 ... -1.30166456e-01 3.55465114e-01 -2.28036270e-01] [ 9.08595175e-02 -3.96970302e-01 2.37150770e-02 ... 5.51297404e-02 6.35667920e-01 5.42139053e-01] ... [ 1.36340931e-01 6.39666850e-03 2.54499048e-01 ... 2.56707668e-01 -1.20900497e-01 4.12000895e-01] [ 2.64561474e-01 6.57561645e-02 -2.85212427e-01 ... 8.42907727e-01 9.15697142e-02 5.22208074e-03] [ 5.43136775e-01 -5.08487403e-01 5.37227213e-01 ... 6.97965026e-01 2.84546137e-01 3.44155103e-01]]] [[[-1.44200623e+00 3.86409432e-01 -7.92378128e-01 ... 9.93299410e-02 2.00546455e+00 6.60488784e-01] [ 4.76905763e-01 3.68662402e-02 -1.18046248e+00 ... -1.83828473e+00 -6.50543809e-01 5.08519650e-01] [ 1.08089674e+00 -2.54257798e+00 1.09722424e+00 ... 1.72999930e+00 5.77749252e-01 -2.40166974e+00] ... [ 1.17143905e+00 1.05205035e+00 6.57522678e-01 ... -9.98994708e-01 -1.54451180e+00 -4.22503293e-01] [-2.03505683e+00 -2.44757581e+00 1.55542457e+00 ... -3.36376101e-01 8.37456524e-01 -3.08049226e+00] [-5.62487364e-01 1.27167284e+00 3.62739325e-01 ... -2.59226155e+00 -1.20505571e+00 2.85462523e+00]] [[ 1.40163028e+00 1.49309897e+00 -5.12076259e-01 ... 2.63215929e-01 3.36302489e-01 7.59266376e-01] [-1.34839082e+00 -1.40726149e+00 2.50546002e+00 ... -2.28129864e+00 2.97651350e-01 5.74593842e-01] [-8.39830756e-01 4.95383292e-01 -1.38307607e+00 ... 2.24820286e-01 -1.20558286e+00 -1.67338920e+00] ... [-1.18870282e+00 1.18811764e-01 -8.05705070e-01 ... -2.72665691e+00 -9.71080661e-01 -1.54732418e+00] [-6.70842826e-01 2.71038301e-02 -1.97094965e+00 ... -7.30793357e-01 -8.20391953e-01 -4.73147124e-01] [-2.10090899e+00 -8.73414457e-01 1.04510331e+00 ... -3.80425632e-01 7.28919208e-01 2.54796535e-01]] [[-5.82620025e-01 4.35153365e-01 -1.34856606e+00 ... -2.15127729e-02 -2.03316474e+00 -8.05399790e-02] [ 6.76821098e-02 6.88157305e-02 1.94382632e+00 ... 4.60997909e-01 -1.03517997e+00 -1.27160931e+00] [-7.04615951e-01 -1.45180726e+00 -9.75921080e-02 ... 8.63108099e-01 1.72190034e+00 -1.44264972e+00] ... [-7.94670880e-01 9.52536225e-01 -8.51868689e-01 ... -1.20693088e+00 -4.65409309e-02 -1.70719221e-01] [ 1.84849147e-02 9.10132289e-01 2.06057167e+00 ... 7.93625534e-01 -5.60648322e-01 -1.48012027e-01] [-8.04026604e-01 1.21388137e-01 1.08890712e-01 ... 9.59528863e-01 1.71502233e+00 -1.61287725e+00]] ... [[-9.94788826e-01 9.79564041e-02 -1.39834106e-01 ... 2.09869599e+00 2.96030074e-01 4.70639974e-01] [-1.14040899e+00 -2.28465363e-01 -1.39800012e+00 ... -3.47402871e-01 -9.46882844e-01 -1.61680245e+00] [ 8.45611095e-01 -8.51787105e-02 1.21521270e+00 ... -1.96570468e+00 1.47276625e-01 -9.59703267e-01] ... [-4.76038754e-01 2.87312597e-01 1.43482232e+00 ... -6.98378384e-01 6.76934540e-01 2.83208668e-01] [-4.24119145e-01 1.17101920e+00 7.30881810e-01 ... -6.37798548e-01 -1.41826200e+00 -6.26169443e-01] [-3.37092012e-01 1.32885307e-01 1.18603599e+00 ... 4.82417464e-01 -1.17998791e+00 -8.99210155e-01]] [[ 8.10410738e-01 4.04774010e-01 -9.55643892e-01 ... -4.62764144e-01 -1.11870861e+00 -1.00539434e+00] [ 1.09194696e+00 7.77713358e-01 -3.79944491e+00 ... 6.85698748e-01 5.22893727e-01 1.75884545e+00] [ 2.29416311e-01 2.56829441e-01 -6.76002920e-01 ... -1.69238269e+00 -8.66952464e-02 -1.93719244e+00] ... [ 1.93137348e-01 -3.66472095e-01 1.46950698e+00 ... 6.29408360e-01 8.92501324e-02 -7.10854411e-01] [-3.07941400e-02 2.66661793e-01 -3.47678028e-02 ... -7.46649683e-01 -1.59923851e+00 1.59848607e+00] [-1.13946581e+00 -1.09544146e+00 -1.35885119e+00 ... 9.94031668e-01 -4.74392802e-01 6.69829905e-01]] [[ 1.39479160e+00 6.26292676e-02 1.06358266e+00 ... 1.26908720e+00 7.25575611e-02 9.72209990e-01] [ 8.13927054e-01 1.10354996e+00 -1.41834736e+00 ... -5.37688076e-01 -1.24194615e-01 1.13101184e+00] [-6.32847250e-01 -1.17499065e+00 6.87194824e-01 ... -3.37642193e-01 -1.26875890e-02 -1.97370803e+00] ... [ 8.59051108e-01 1.84518063e+00 -9.33155656e-01 ... 8.67963672e-01 3.84471595e-01 2.11786318e+00] [-1.16988933e+00 3.43685806e-01 4.78023767e-01 ... 2.22271997e-02 -4.21816975e-01 -3.69824499e-01] [-5.43445528e-01 7.91525543e-01 7.46063054e-01 ... -1.75955296e+00 -8.76306072e-02 1.36829114e+00]]] [[[-9.55127329e-02 -2.87743926e-01 1.15449302e-01 ... -3.64264965e-01 1.13561280e-01 1.46592511e-02] [ 6.73432574e-02 -2.21708547e-02 8.21200237e-02 ... 4.58510220e-01 2.29412364e-03 -1.73659846e-01] [ 1.30518228e-01 -4.86338744e-03 -1.57443896e-01 ... -1.00732349e-01 -8.57702494e-02 -1.53271332e-01] ... [ 1.04666375e-01 5.37690163e-01 3.74576360e-01 ... 4.54693940e-03 -4.04128611e-01 -3.24518114e-01] [-1.30680040e-01 6.08103499e-02 -1.48339272e-01 ... -2.64900714e-01 -9.13597345e-02 9.14782807e-02] [-2.52153158e-01 -1.61235571e-01 -1.36214733e-01 ... 2.28028327e-01 5.20342171e-01 2.41195530e-01]] [[ 1.66076005e-01 7.58436099e-02 3.81483942e-01 ... 1.43676490e-01 -5.94012067e-02 1.37754856e-02] [-1.58577114e-02 5.02094626e-02 -2.70035286e-02 ... 1.61779821e-02 -3.12482476e-01 2.94276893e-01] [ 3.15587103e-01 -2.86010979e-03 1.78969756e-01 ... 2.22689770e-02 -5.84091097e-02 -8.07272419e-02] ... [-1.93044711e-02 -1.32714882e-01 2.22622789e-02 ... 2.36785308e-01 6.32340491e-01 1.83374614e-01] [-2.83873796e-01 2.11417764e-01 -4.21779841e-01 ... -3.32236364e-02 2.07648620e-01 -4.31039900e-01] [-8.95444453e-02 -1.25643715e-01 -3.34371686e-01 ... 4.84728307e-01 -9.25976709e-02 2.19527483e-01]] [[-1.45251632e-01 -2.86654364e-02 -2.60108747e-02 ... 2.53066290e-02 -7.35870227e-02 -3.50998528e-02] [-1.28417835e-01 2.91073233e-01 -2.65041143e-01 ... -5.07585227e-01 -9.65070128e-02 3.07838231e-01] [-3.42596829e-01 -2.55306214e-01 -7.54933059e-02 ... 6.15688860e-02 -7.28362650e-02 -2.36145705e-01] ... [ 6.47104234e-02 2.25245094e-04 4.10802543e-01 ... -2.46688873e-01 -1.64094910e-01 1.46418393e-01] [-3.89926255e-01 -4.27346259e-01 3.46428990e-01 ... 2.83959955e-01 -3.32975447e-01 4.60509390e-01] [ 3.55072230e-01 5.06983325e-02 2.06378032e-03 ... -2.11855873e-01 -1.08806327e-01 -1.66232824e-01]] ... [[ 9.68307331e-02 -1.86836347e-01 1.89285606e-01 ... -1.09468490e-01 3.10221195e-01 -2.92705595e-01] [ 3.18290472e-01 -3.61519337e-01 2.10022211e-01 ... 1.05415266e-02 1.34911478e-01 1.73192471e-01] [-2.17933923e-01 1.37757817e-02 7.71225290e-03 ... 9.75890756e-02 -2.63271123e-01 2.58800447e-01] ... [ 6.50727972e-02 4.23695780e-02 7.27447914e-03 ... 1.50151059e-01 -2.34785020e-01 -1.28165781e-01] [-9.54306200e-02 1.02531791e-01 1.66933343e-01 ... 3.92775059e-01 -1.08412407e-01 -4.61229861e-01] [ 2.82899916e-01 -2.31954232e-01 1.80908427e-01 ... 1.71275049e-01 1.59445047e-01 7.72638470e-02]] [[-3.37165415e-01 -2.29885146e-01 -3.47800702e-01 ... 8.08435455e-02 -1.30814770e-02 -2.12120607e-01] [ 4.00163859e-01 -2.09854916e-01 4.31990139e-02 ... -7.19435364e-02 8.29350725e-02 -1.28997028e-01] [-1.82358604e-02 -1.70286164e-01 8.38937983e-02 ... 4.51431036e-01 -1.65644035e-01 3.80656511e-01] ... [-3.10464859e-01 -3.21129501e-01 -2.26929858e-01 ... -1.35071069e-01 -9.92850214e-02 1.92877889e-01] [-4.28824089e-02 -9.84705687e-02 -7.00979903e-02 ... -2.23944187e-01 -2.27798715e-01 1.85819581e-01] [-2.02418029e-01 9.43996012e-02 1.63907796e-01 ... 4.24024612e-02 -1.45595148e-01 -6.90363571e-02]] [[-2.57756393e-02 -2.00717479e-01 -5.89637935e-01 ... 5.23677170e-02 7.74970427e-02 3.64446454e-02] [-1.53733790e-01 -5.00699759e-01 -1.26038074e-01 ... 2.82044083e-01 -4.02830392e-02 9.91226658e-02] [ 5.28191440e-02 1.39243305e-02 1.39613926e-01 ... -5.84523119e-02 -9.96525139e-02 1.84430420e-01] ... [-2.45022029e-01 -3.13385397e-01 -3.28044891e-01 ... -3.39120805e-01 -2.55825277e-02 -1.60070360e-01] [ 2.36537188e-01 -2.18919113e-01 1.69453815e-01 ... -7.82846734e-02 -4.32547182e-03 2.62510702e-02] [ 5.01718640e-01 -2.01240644e-01 -4.34423774e-01 ... -2.67762899e-01 8.15743804e-02 3.12538058e-01]]] [[[-9.27672029e-01 -2.09716916e-01 -5.20057440e-01 ... -2.31822014e-01 3.32200825e-01 2.25278711e+00] [ 2.00125024e-01 4.68473464e-01 -2.02138877e+00 ... 1.86923790e+00 -9.34854090e-01 -2.89284915e-01] [-1.15661502e+00 -1.59607959e+00 -1.54405463e+00 ... 1.33650780e+00 1.88320488e-01 -1.41507789e-01] ... [ 1.47114825e+00 1.43414402e+00 -3.79024673e+00 ... -6.99644446e-01 -3.61953020e-01 4.50984478e-01] [-7.11134553e-01 1.72741134e-02 -1.51340163e+00 ... 1.13251901e+00 -2.15955973e+00 1.91047859e+00] [-1.27995682e+00 3.00355375e-01 -2.46021390e-01 ... -5.27611598e-02 -3.44460636e-01 -3.73038977e-01]] [[ 5.29137552e-01 9.32196558e-01 -2.82223552e-01 ... -1.63797629e+00 1.02519119e+00 2.15850338e-01] [-3.08006257e-01 7.26717949e-01 -4.00647491e-01 ... 5.44079185e-01 -1.84705094e-01 -1.80818093e+00] [-1.35745668e+00 -3.90501171e-01 1.68954718e+00 ... -8.92509162e-01 1.57517064e+00 1.24691653e+00] ... [ 1.92724323e+00 3.69611621e-01 1.26762843e+00 ... 1.09913006e-01 -1.01130474e+00 -4.66052860e-01] [ 1.12775671e+00 2.31047168e-01 2.76524472e+00 ... -1.67938852e+00 -2.32081860e-01 2.12591678e-01] [-3.36060315e-01 2.65276504e+00 4.13246185e-01 ... -6.88961506e-01 2.89290667e+00 1.50682069e-02]] [[ 1.69176627e-02 -9.62299526e-01 -2.15284854e-01 ... -9.48201895e-01 6.11004591e-01 -1.15851533e+00] [-5.62927425e-02 -1.01348424e+00 2.21249342e+00 ... -5.08952975e-01 -1.47506034e+00 -1.29329273e-03] [-3.04168129e+00 -1.81419742e+00 -1.50843108e+00 ... -3.49842608e-01 -2.85286129e-01 2.39767957e+00] ... [-2.62511373e-01 6.34884894e-01 9.63126004e-01 ... -1.97440517e+00 -1.41975999e+00 5.30358434e-01] [-2.79326737e-01 -7.83572137e-01 -8.06365669e-01 ... -7.59923756e-01 -4.71635371e-01 -9.77466106e-01] [ 4.23439324e-01 5.59648514e-01 5.16008556e-01 ... -1.99907279e+00 1.10571861e+00 -2.21297359e+00]] ... [[-2.14760005e-01 1.65052485e+00 5.54970622e-01 ... -1.14498809e-01 -9.39373255e-01 -6.34360313e-01] [ 2.13935757e+00 1.30691862e+00 -1.45029795e+00 ... -4.75924581e-01 -3.82821530e-01 5.98529637e-01] [ 1.56789482e+00 1.37532270e+00 -9.66621220e-01 ... 1.48556486e-01 -8.22633266e-01 -1.47666201e-01] ... [-1.47328830e+00 1.47113729e+00 9.78247344e-01 ... 5.56185663e-01 1.19159496e+00 6.11007027e-02] [-1.99672604e+00 -1.16196752e+00 2.03184664e-01 ... -2.08736777e+00 -3.61058384e-01 1.63782537e+00] [-7.96285570e-01 -9.15965378e-01 -6.12001121e-01 ... 2.75771022e+00 -1.34024358e+00 -5.22281900e-02]] [[ 2.69585311e-01 -7.92099953e-01 1.38395736e-02 ... 8.57265830e-01 1.01602209e+00 7.24389106e-02] [-2.85170674e-01 5.63392401e-01 1.49397933e+00 ... -2.83025742e+00 -2.06933588e-01 4.04074490e-01] [ 1.16968262e+00 2.02420497e+00 -1.22719765e+00 ... 1.37267500e-01 -7.14418516e-02 1.77116096e+00] ... [-3.08993727e-01 -1.30118713e-01 1.71603382e+00 ... 1.96700335e-01 -7.42900372e-02 -9.27852035e-01] [-6.08863294e-01 -2.70709300e+00 1.31593382e+00 ... 2.54706562e-01 3.69280994e-01 -4.94932801e-01] [-1.50396454e+00 -1.57300723e+00 9.90481600e-02 ... 6.33619606e-01 -5.13483226e-01 -1.24969220e+00]] [[-9.92187023e-01 5.74417353e-01 4.29596066e-01 ... -2.38190818e+00 2.03979135e+00 1.44418240e+00] [ 6.65263176e-01 -9.00648713e-01 -6.12216592e-01 ... 7.15916991e-01 -7.21790969e-01 -1.30249158e-01] [ 8.10858488e-01 1.97651565e+00 -1.76680356e-01 ... -1.14118254e+00 -8.13087285e-01 1.65729928e+00] ... [ 3.13206688e-02 -1.60611010e+00 1.59578168e+00 ... -6.11776352e-01 -1.74696282e-01 5.25799990e-01] [ 1.22886419e+00 -9.11563337e-01 -4.37869936e-01 ... 7.45850444e-01 -3.48987907e-01 -1.30060530e+00] [-3.37038368e-01 -4.94615257e-01 -3.92415464e-01 ... -1.43889815e-01 -1.05983126e+00 -5.60770452e-01]]] [[[-7.11329937e-01 1.86080113e-01 7.82000244e-01 ... 2.73899138e-01 -2.32822716e-01 6.70258701e-01] [ 1.82084292e-02 -2.93831378e-01 7.25081623e-01 ... -3.30419570e-01 4.85258371e-01 1.87595353e-01] [-2.73688674e-01 8.10309768e-01 -3.59794557e-01 ... 8.05185735e-01 4.53683764e-01 7.86100507e-01] ... [-7.26399422e-02 -4.97899652e-01 1.67608678e-01 ... 1.51301220e-01 4.95862067e-01 5.21681726e-01] [ 6.60454094e-01 3.06816567e-02 -7.62696117e-02 ... 1.04088974e+00 7.15255976e-01 5.85719287e-01] [-4.07561868e-01 -5.65278649e-01 -5.18618643e-01 ... -6.44931078e-01 7.29407519e-02 -5.68556666e-01]] [[-5.10518312e-01 -2.15138972e-01 6.44694984e-01 ... -4.43986565e-01 -2.42673084e-01 4.14031237e-01] [ 2.36695826e-01 -8.30715597e-01 -3.10797065e-01 ... -1.80749759e-01 -3.50663543e-01 -3.20359379e-01] [ 5.19697785e-01 -7.26099074e-01 -7.13429749e-01 ... 6.29514694e-01 3.30163479e-01 -2.87539333e-01] ... [-6.71338856e-01 -2.45901734e-01 -5.06791711e-01 ... 2.38501772e-01 -6.63792372e-01 3.31495613e-01] [ 1.38548684e+00 1.14375219e-01 5.19525886e-01 ... -1.70949891e-01 5.49672097e-02 -1.96504995e-01] [ 4.03571427e-02 -3.36955398e-01 1.01825273e+00 ... -3.02407324e-01 -4.93963093e-01 1.25653163e-01]] [[ 3.15933079e-01 1.61542282e-01 3.53020281e-01 ... 2.16533378e-01 -1.53112123e-02 2.11377114e-01] [ 5.84462464e-01 -4.16309498e-02 3.00740957e-01 ... -1.31335288e-01 -2.49805465e-01 -1.27698854e-01] [-5.64758360e-01 -4.61038202e-01 -3.83484870e-01 ... 9.48280375e-03 -1.77329332e-01 5.57412624e-01] ... [-1.95417240e-01 2.40693182e-01 7.92135596e-02 ... -6.15499496e-01 -3.82480800e-01 -3.27119790e-02] [ 1.26870096e+00 -2.08474696e-01 -1.29897237e+00 ... 1.05570173e+00 -2.43556172e-01 3.26835901e-01] [ 6.23953640e-01 -8.40066150e-02 -7.20167579e-03 ... -8.80708814e-01 3.81957144e-01 -6.13525331e-01]] ... [[-1.44642696e-01 -1.01390839e+00 -1.28788602e+00 ... -2.56515145e-01 -3.15344743e-02 4.19782579e-01] [-3.20900589e-01 -5.06723642e-01 1.98036984e-01 ... 9.85026777e-01 5.80588996e-01 7.06621349e-01] [-5.67023605e-02 5.87431490e-01 5.41155577e-01 ... 1.93499541e+00 3.33864391e-02 -5.25040142e-02] ... [ 1.28828645e-01 -8.99154603e-01 1.70664504e-01 ... -1.71062216e-01 -9.05139968e-02 -3.51286262e-01] [ 8.26082006e-02 -2.34715387e-01 1.68731123e-01 ... 5.51034629e-01 -1.38381690e-01 -3.38958323e-01] [-5.09342372e-01 2.67263353e-01 -2.70451576e-01 ... 2.55748153e-01 3.82951647e-02 3.82323235e-01]] [[ 1.05547178e+00 -2.84878552e-01 4.36262190e-02 ... 8.85575294e-01 -6.97685719e-01 4.93931063e-02] [-2.48154312e-01 -2.65818566e-01 -1.11456044e-01 ... 7.98277736e-01 1.86454087e-01 4.35776532e-01] [ 3.33788842e-01 9.42563474e-01 6.23935759e-02 ... 3.02935839e-01 -6.43046200e-01 -4.69912179e-02] ... [-1.86515599e-01 -8.73308063e-01 -2.77799014e-02 ... 4.63716835e-01 -1.00320064e-01 7.58680701e-01] [ 5.26291609e-01 -1.86240599e-01 -3.06067586e-01 ... 9.78854716e-01 -5.79361141e-01 1.35003400e+00] [ 1.38511136e-01 1.01036298e+00 5.50639108e-02 ... 1.16733477e-01 8.97613466e-02 1.48894399e-01]] [[-6.50616050e-01 4.51049894e-01 6.02095664e-01 ... 1.16703296e+00 -8.67176354e-01 2.51568586e-01] [-8.05755675e-01 1.23297825e-01 -8.25841546e-01 ... 8.50508869e-01 -6.92732692e-01 -5.36918223e-01] [ 8.48083973e-01 -5.95511436e-01 5.16834438e-01 ... 1.20011672e-01 8.27710569e-01 4.60492879e-01] ... [ 3.60370159e-01 -5.35148904e-02 -4.43570882e-01 ... -7.47373700e-01 -1.08823609e+00 1.32496095e+00] [-7.61394873e-02 -6.66230559e-01 -3.71638656e-01 ... -1.74360648e-02 2.67257337e-02 -3.71349037e-01] [-2.95044273e-01 5.57746112e-01 1.90894976e-01 ... -4.49062258e-01 6.31098449e-01 1.11830592e-01]]]] [[[[-1.32554967e-03 -7.48340368e-01 1.31904423e-01 ... 1.17166512e-01 3.86785828e-02 -1.82693228e-01] [ 7.75297344e-01 -5.25197983e-01 8.92103493e-01 ... 7.78181851e-01 -8.26290846e-01 -2.08023973e-02] [-1.19290614e+00 1.59168597e-02 4.88780409e-01 ... 4.68673468e-01 -1.49613535e-02 1.68566898e-01] ... [ 7.01644063e-01 1.47495091e-01 -7.25490674e-02 ... -7.93157518e-01 2.96538830e-01 -1.33093667e+00] [ 4.35759395e-01 2.59290904e-01 8.27863932e-01 ... 1.45737720e+00 -5.35013974e-01 -6.25216246e-01] [ 6.84548497e-01 1.99493837e+00 -3.39565873e-01 ... -5.26662529e-01 5.52042544e-01 7.24374831e-01]] [[ 6.22150421e-01 -1.81159988e-01 -4.28431988e-01 ... 2.85676807e-01 3.74252021e-01 -9.12753701e-01] [-7.85238683e-01 -2.51016080e-01 1.49411589e-01 ... 9.34293121e-02 -1.65692449e-01 -1.40550840e+00] [ 3.61015685e-02 -3.10905397e-01 -7.93789089e-01 ... 2.55170137e-01 -8.18480909e-01 -1.72744441e+00] ... [ 1.29435027e+00 8.75207067e-01 -7.46071458e-01 ... 4.78497714e-01 -1.28672624e+00 -3.55837554e-01] [ 8.93465877e-01 -8.80060852e-01 -3.52252305e-01 ... -2.18248069e-01 1.15327513e+00 3.64254117e-01] [ 3.05195957e-01 6.00535795e-02 3.16751063e-01 ... 1.03439248e+00 -6.04983211e-01 -4.03564543e-01]] [[-6.32630229e-01 -9.85596538e-01 -1.81327432e-01 ... -4.02274579e-01 -1.19102228e+00 2.74762958e-01] [-4.50807884e-02 3.68107855e-01 3.03430595e-02 ... 8.31393242e-01 -1.54624701e+00 -2.82520622e-01] [ 3.78277659e-01 -2.15601787e-01 -1.04615569e-01 ... -1.36559236e+00 -1.06070030e+00 1.02775797e-01] ... [-8.70421052e-01 -1.19771063e+00 -1.11184525e+00 ... -1.79299748e+00 -4.39111739e-01 1.52377057e+00] [-5.72503090e-01 2.32582435e-01 -9.97658849e-01 ... -1.42274916e+00 1.21440017e+00 -1.04727757e+00] [-4.32238400e-01 5.57094872e-01 5.27518153e-01 ... 7.55000785e-02 1.11951184e+00 -3.83488715e-01]] ... [[-1.99459696e+00 4.61801857e-01 1.14392436e+00 ... -4.47371043e-02 5.42673692e-02 1.06369042e+00] [-1.54670274e+00 1.32364595e+00 5.29812336e-01 ... -7.37558724e-03 -4.96942282e-01 -4.42330837e-01] [ 5.04567683e-01 -1.08463120e+00 -4.00301293e-02 ... 9.51777697e-02 1.68502122e-01 -3.71450871e-01] ... [-9.98766184e-01 -8.02507162e-01 -2.08295822e-01 ... 7.78165042e-01 -8.81714761e-01 -1.26598075e-01] [ 2.13830784e-01 1.44456327e+00 -8.36274251e-02 ... 6.08960569e-01 4.36589599e-01 4.65755284e-01] [-3.67265314e-01 5.35357237e-01 2.59503663e-01 ... 1.18398893e+00 -9.63719308e-01 6.08676910e-01]] [[ 1.04861689e+00 1.31023180e+00 4.80290689e-02 ... 3.02024603e-01 -7.98197925e-01 7.66304791e-01] [ 4.13054168e-01 4.41883385e-01 1.60131097e+00 ... 3.78717512e-01 2.29638636e-01 5.30722141e-01] [-1.29689527e+00 8.40897039e-02 -4.60823447e-01 ... -1.24222767e+00 -6.50361598e-01 5.69435656e-01] ... [ 2.32847869e-01 -7.81214833e-01 -6.44993842e-01 ... -1.13949406e+00 1.25232950e-01 9.85187948e-01] [ 6.99731886e-01 -1.98832914e-01 4.13182378e-01 ... -1.15985021e-01 1.68761835e-01 -2.04828337e-01] [ 7.88909674e-01 -1.20156378e-01 -5.01313806e-01 ... 6.99482620e-01 -6.47090018e-01 4.86575365e-01]] [[ 1.16045022e+00 -5.21380067e-01 -5.61481059e-01 ... 8.52126181e-01 -2.80105174e-02 6.98917508e-01] [-7.03041196e-01 3.72475117e-01 9.47261930e-01 ... -6.65613683e-03 -6.59334362e-01 -7.12694377e-02] [ 8.77838954e-03 -6.02630198e-01 -2.82142878e-01 ... 2.42671043e-01 1.86689436e-01 4.77818847e-01] ... [ 4.54563498e-01 -7.38670945e-01 -3.48544747e-01 ... 1.01126313e-01 -2.45220557e-01 -2.99910679e-02] [ 2.00251698e+00 -1.14047825e+00 8.79160166e-01 ... 1.44129977e-01 -4.17618722e-01 -5.62755048e-01] [-4.68543261e-01 8.18024695e-01 -8.05734098e-01 ... -6.19037926e-01 -1.10811567e+00 3.93550277e-01]]] [[[-2.50831038e-01 -7.58829340e-02 -6.92912281e-01 ... -1.57167822e-01 -1.47733213e-02 3.80670428e-02] [-3.44350599e-02 -1.62252828e-01 -8.70460272e-01 ... -5.44429660e-01 2.66566947e-02 2.77054220e-01] [ 3.33431512e-02 1.86102018e-01 2.35822886e-01 ... -3.43191952e-01 2.34036535e-01 4.09473509e-01] ... [-5.61652005e-01 2.40521431e-01 -3.84766132e-01 ... 1.16919570e-01 6.97229922e-01 -1.09283037e-01] [-6.22406125e-01 1.73904449e-01 -6.08677328e-01 ... -2.32382178e-01 -4.79317814e-01 -2.53884435e-01] [-1.31976828e-02 2.46608436e-01 1.18349381e-01 ... -6.89584970e-01 -4.58497465e-01 -4.03368413e-01]] [[-2.65500665e-01 2.59455740e-01 -2.01870147e-02 ... 1.59928545e-01 6.13864586e-02 5.31438226e-03] [-2.02908888e-02 5.49557745e-01 1.24725997e-01 ... -3.36385220e-01 2.13349685e-02 2.61549383e-01] [ 6.15003183e-02 4.68070865e-01 -9.34008434e-02 ... 1.28309026e-01 -2.86405347e-02 -1.14078686e-01] ... [ 5.44907510e-01 -1.20727211e-01 2.80126976e-03 ... 1.04689389e-01 4.02701721e-02 9.33386236e-02] [-7.65701532e-01 9.72956270e-02 -3.66023660e-01 ... 6.00896299e-01 -5.85715830e-01 2.29136750e-01] [-3.22288275e-01 4.13035095e-01 -1.38460785e-01 ... -5.23956239e-01 2.33803913e-02 1.69495150e-01]] [[ 1.82658602e-02 -1.80466682e-01 -5.59813678e-01 ... -2.74499774e-01 1.89790428e-01 2.41001189e-01] [ 2.58131951e-01 1.91744611e-01 -1.57277301e-01 ... 4.35941041e-01 1.87737942e-02 -2.92695194e-01] [-6.76282942e-01 1.15108870e-01 -2.77539343e-01 ... 3.06031436e-01 -1.94176748e-01 -2.03281585e-02] ... [ 6.08268201e-01 6.05135620e-01 -8.49824622e-02 ... 4.27930981e-01 2.75125057e-01 9.52878669e-02] [-8.65076005e-01 3.93501818e-01 6.50659442e-01 ... -6.16243362e-01 -3.27058762e-01 5.18228829e-01] [-1.55624598e-01 -3.60301167e-01 -3.31548423e-01 ... 1.18813381e-01 1.65078163e-01 2.91801333e-01]] ... [[ 2.39957690e-01 3.04721355e-01 2.32662752e-01 ... -2.54510403e-01 -4.77553993e-01 2.14291692e-01] [ 8.81945565e-02 2.26585910e-01 -2.09726185e-01 ... 6.24096394e-01 2.96724945e-01 -2.22963184e-01] [-1.77403897e-01 -1.40022218e-01 -6.70948088e-01 ... -6.02886260e-01 -1.47280693e-01 -6.08986989e-02] ... [ 7.85089850e-01 -5.72985262e-02 -5.14665425e-01 ... -5.54299587e-03 1.23180941e-01 -2.11214676e-01] [-1.68799728e-01 4.15493011e-01 6.83894515e-01 ... 9.66452122e-01 2.16840491e-01 2.23728418e-01] [ 2.00280249e-01 -4.22826290e-01 -7.87462473e-01 ... 2.75442719e-01 -3.94172400e-01 2.59724736e-01]] [[-4.42228317e-01 -4.52341139e-01 2.80607641e-01 ... 1.23183012e-01 -2.77765155e-01 2.56063819e-01] [ 3.08545142e-01 5.15101179e-02 2.58457032e-03 ... -6.22640312e-01 4.30872351e-01 2.89259385e-02] [ 1.97402269e-01 -2.12160289e-01 -1.07353508e-01 ... -2.58728638e-02 3.31083506e-01 6.20371044e-01] ... [ 2.98157483e-01 -5.09256124e-01 6.76615655e-01 ... 8.42884406e-02 -5.41929722e-01 1.58875227e-01] [-2.31586341e-02 1.85373470e-01 -5.03604054e-01 ... -3.29136997e-01 6.45119650e-03 3.10276926e-01] [ 1.46929309e-01 -4.05227959e-01 1.54057279e-01 ... 2.24680513e-01 -9.35854688e-02 -1.48058236e-01]] [[-3.23121130e-01 -1.21388070e-01 1.76509529e-01 ... -3.56192201e-01 1.60509378e-01 1.85832813e-01] [-6.37310803e-01 -1.38043821e-01 -1.36082238e-02 ... -1.88235462e-01 3.25550488e-03 4.53889281e-01] [ 4.15219307e-01 1.13945603e-01 5.92262251e-03 ... 2.41360180e-02 1.83007121e-01 -4.32410419e-01] ... [-2.00192869e-01 1.82283372e-01 4.24794704e-01 ... -5.55439703e-02 2.00243518e-01 2.71946549e-01] [ 5.05395830e-01 3.24631423e-01 4.76264387e-01 ... 2.12326616e-01 -2.30138734e-01 -3.47057998e-01] [-1.31345317e-01 6.96469545e-01 -2.74873704e-01 ... -3.06310833e-01 1.42334193e-01 2.27832864e-03]]] [[[ 1.10933125e+00 -1.26179278e+00 3.82085741e-01 ... 2.84687430e-01 2.90680349e-01 3.00178081e-01] [-1.14081550e+00 -9.18906987e-01 -1.10570133e+00 ... 1.16528809e+00 -3.12803268e-01 1.28196442e+00] [ 3.99273753e-01 6.27594173e-01 1.67413577e-01 ... 1.77030778e+00 -1.37243283e+00 2.21429300e+00] ... [-2.69685340e+00 6.47776723e-02 -1.24170673e+00 ... -1.85823190e+00 1.42407703e+00 1.62253439e+00] [ 2.66373587e+00 1.57144094e+00 -1.74696013e-01 ... 9.47195113e-01 -6.80843368e-02 8.41122687e-01] [-8.90721917e-01 -8.76908422e-01 -1.66031146e+00 ... -8.49047959e-01 -1.97617218e-01 -9.79757369e-01]] [[ 1.27555621e+00 -7.77568281e-01 -3.68003440e+00 ... 1.87031913e+00 8.21548164e-01 1.50983906e+00] [-1.45038351e-01 -7.13407815e-01 -8.67639631e-02 ... -3.39380920e-01 7.52031147e-01 1.22612846e+00] [ 8.78199518e-01 3.68936634e+00 1.71810710e+00 ... -1.10546696e+00 1.04430354e+00 -1.67579770e+00] ... [ 1.17724335e+00 -1.59664643e+00 -3.26076293e+00 ... 1.72693419e+00 -1.12887442e+00 9.15045619e-01] [-4.20507044e-01 -1.85864663e+00 -4.90131259e-01 ... -1.57812774e+00 1.62153697e+00 9.90016997e-01] [-1.84114790e+00 1.70635831e+00 -1.23551798e+00 ... -1.34529626e+00 4.32869375e-01 8.51912975e-01]] [[-1.05476010e+00 6.91437900e-01 -1.44131613e+00 ... 7.94857025e-01 -1.48300683e+00 -6.46937966e-01] [-3.40270567e+00 -1.85571921e+00 6.41882539e-01 ... -1.55548513e+00 -3.39936942e-01 -1.18578047e-01] [ 3.46683443e-01 5.29195331e-02 -2.18204165e+00 ... 5.22612870e-01 1.29369116e+00 2.12247157e+00] ... [-7.96611756e-02 -2.14100480e+00 2.45751214e+00 ... 5.15165687e-01 1.08798575e+00 -7.17758983e-02] [-1.02910531e+00 -2.55966067e+00 -1.17025530e+00 ... -1.14088118e+00 1.53138697e+00 1.46580651e-01] [-1.91284287e+00 1.08537543e+00 -1.03864622e+00 ... -1.84806049e+00 -5.52929044e-01 4.11008634e-02]] ... [[ 1.71685696e+00 -3.24212730e-01 -1.17559340e-02 ... -1.74017802e-01 7.71992862e-01 -1.22475863e+00] [ 3.42629433e-01 -2.92716175e-01 8.49052221e-02 ... 2.05647016e+00 -4.74180192e-01 5.18053114e-01] [ 1.36483300e+00 3.28732705e+00 1.42873287e-01 ... 2.37650275e+00 -3.05637312e+00 -3.43281507e-01] ... [-2.14501166e+00 -9.07533884e-01 -4.48384106e-01 ... 1.60311949e+00 -6.81008041e-01 -4.22117949e-01] [ 3.76309633e-01 -1.40515709e+00 8.59348953e-01 ... -2.79908538e+00 -9.66876268e-01 2.32641459e+00] [-1.34413350e+00 -1.88419068e+00 -6.22086823e-01 ... -6.76947951e-01 -8.38019431e-01 -7.34555840e-01]] [[-9.49649155e-01 -9.64016497e-01 9.47557509e-01 ... 1.87449300e+00 8.53325367e-01 9.96233165e-01] [ 1.48924923e+00 -6.18931472e-01 -5.49345970e-01 ... -6.56753480e-01 -2.00795174e+00 7.81473279e-01] [ 2.23282576e+00 -1.24488294e+00 -3.34920955e+00 ... -4.89117146e-01 -1.71040654e+00 -1.85758352e+00] ... [ 9.89035964e-01 9.02707577e-01 1.09141648e+00 ... 3.33549351e-01 -9.06166971e-01 -8.15718099e-02] [ 6.29775107e-01 1.24330497e+00 1.65962934e-01 ... -8.80943596e-01 -6.00828648e-01 -9.61184680e-01] [ 7.31109262e-01 -3.01931548e+00 1.77887368e+00 ... -1.22179902e+00 2.56510448e+00 1.29340172e+00]] [[-6.24266803e-01 -7.57729173e-01 -1.52600479e+00 ... 1.17697693e-01 -1.03946900e+00 -1.66717291e-01] [-1.62833035e+00 -1.85266542e+00 1.10689151e+00 ... 5.46707511e-01 -1.31302691e+00 2.14129731e-01] [ 8.96695256e-01 -2.28195643e+00 1.20678425e+00 ... -1.41002357e+00 7.90458202e-01 1.72631180e+00] ... [ 4.85450953e-01 -2.61400670e-01 2.17094135e+00 ... 1.09624255e+00 9.97378170e-01 2.20903134e+00] [ 1.35670733e+00 -1.58769321e-02 1.10104430e+00 ... -1.25401393e-01 -2.00270200e+00 -9.36328650e-01] [ 3.64939183e-01 -3.67135495e-01 1.88787782e+00 ... -1.23826575e+00 1.04268193e-01 3.76240075e-01]]] [[[-1.03472369e-02 1.30318373e-01 1.04500696e-01 ... -1.54052615e-01 1.58587724e-01 1.71388865e-01] [ 2.48595402e-02 5.41620851e-01 -2.99521148e-01 ... -1.81623802e-01 -2.69134138e-02 8.05955566e-03] [-2.67948538e-01 4.06069428e-01 1.67834312e-01 ... -2.42497414e-01 -1.72036439e-02 6.31610379e-02] ... [-9.84782279e-02 2.71167755e-01 2.36898497e-01 ... -3.39155458e-02 -1.08732931e-01 1.87159956e-01] [-5.90435900e-02 1.71189487e-01 -2.55504757e-01 ... 3.44946176e-01 -6.07676923e-01 -2.77295232e-01] [-1.65345669e-01 3.17389071e-01 8.78181830e-02 ... -2.96688199e-01 4.32894140e-01 1.07566141e-01]] [[ 1.68671701e-02 -2.61715800e-01 2.82563984e-01 ... 2.67357260e-01 1.99934572e-01 3.16478550e-01] [-2.95396715e-01 5.02997683e-03 -1.31562188e-01 ... 5.43443680e-01 -2.37794518e-02 -3.16899717e-02] [-1.62867829e-01 -2.35275641e-01 2.72959203e-01 ... 2.94116437e-01 9.16702747e-02 2.09236756e-01] ... [ 2.77883142e-01 3.02297831e-01 1.46261677e-01 ... -1.81192622e-01 2.07589529e-02 4.83038336e-01] [-1.63727421e-02 -1.82832271e-01 -1.11135297e-01 ... 1.80218756e-01 2.21861243e-01 4.36575823e-02] [ 1.92469567e-01 3.21142673e-01 -3.34651411e-01 ... 8.47290605e-02 -2.43112579e-01 -2.40956202e-01]] [[ 3.25167209e-01 6.75271899e-02 -4.73292232e-01 ... 3.07296962e-01 -6.36539996e-01 -3.67819995e-01] [-2.57078707e-02 -1.30402371e-01 3.67179245e-01 ... 4.95980540e-03 -2.53346592e-01 -3.94805878e-01] [ 1.95482895e-01 -3.13124239e-01 1.18769765e-01 ... 4.11755890e-02 1.76364064e-01 7.77108818e-02] ... [ 1.20854147e-01 -2.53491759e-01 -5.96493334e-02 ... -2.84832597e-01 -2.47717023e-01 -6.28933311e-02] [-1.62834167e-01 5.26837260e-03 2.33686477e-01 ... -5.75035848e-02 -9.54933316e-02 1.29060388e-01] [ 1.92523763e-01 3.51670504e-01 4.41459715e-02 ... 2.22163364e-01 -3.27732921e-01 -1.02681190e-01]] ... [[-4.22218442e-02 9.08734053e-02 8.01200606e-03 ... -3.03539131e-02 2.35062793e-01 1.80837274e-01] [ 1.46208867e-01 2.02876166e-01 1.55334978e-03 ... -1.27787054e-01 1.52053714e-01 3.64274085e-02] [ 4.05622572e-01 7.00024962e-02 -1.40557081e-01 ... 1.69327110e-01 -2.30315983e-01 8.59773066e-03] ... [ 2.52866000e-01 4.01248643e-03 2.94845104e-01 ... 7.57227838e-02 5.79716504e-01 4.06958312e-01] [ 3.24550509e-01 2.01556846e-01 3.52643728e-01 ... 4.01475251e-01 5.26902497e-01 4.42281812e-01] [-1.70145426e-02 2.39729941e-01 -6.73738867e-03 ... -1.94634702e-02 -2.70842314e-01 -1.94024891e-01]] [[ 4.40664917e-01 -7.61251375e-02 -3.89734209e-01 ... 2.03416348e-01 -2.51883417e-01 -8.34199488e-02] [ 4.28167522e-01 1.49512172e-01 -1.13413446e-01 ... 1.23547554e-01 -1.54199272e-01 -1.11742668e-01] [ 1.75390076e-02 -3.07820112e-01 1.89163998e-01 ... -2.31213272e-01 -5.35957254e-02 1.08849265e-01] ... [ 3.45530659e-02 -4.74940658e-01 1.93108320e-01 ... -2.51675189e-01 -2.23342720e-02 5.12072220e-02] [-5.27153164e-02 2.36364618e-01 1.93441257e-01 ... -5.75937331e-02 1.58601224e-01 -4.25207913e-01] [ 1.52416542e-01 -5.12752123e-02 1.63671285e-01 ... 1.44545153e-01 1.66654661e-01 8.16366747e-02]] [[-9.32227969e-02 6.47738427e-02 -9.94622707e-02 ... 1.83989853e-01 -3.98091078e-01 -1.41007915e-01] [ 1.94283634e-01 4.44598258e-01 -1.46681234e-01 ... -1.33389577e-01 2.93091327e-01 -5.60259521e-01] [-3.35728645e-01 -1.23525605e-01 6.21690929e-01 ... -5.35460040e-02 1.33278653e-01 -2.21677404e-02] ... [ 1.47692874e-01 -3.61421071e-02 1.06781654e-01 ... -1.00866877e-01 -3.46729070e-01 -3.80798578e-02] [ 2.05620140e-01 -1.37439780e-02 -1.21216513e-01 ... -1.79254740e-01 -9.92383808e-02 2.06472516e-01] [ 1.15316905e-01 8.79938528e-02 4.52309996e-01 ... 5.66304252e-02 -5.06881252e-02 5.34542426e-02]]] [[[-1.91251540e+00 -1.61684632e-01 1.08225453e+00 ... -4.84613292e-02 5.88776410e-01 -1.65287733e+00] [-1.07937001e-01 -1.37890923e+00 5.35036802e-01 ... 1.08421016e+00 -7.08047748e-01 9.92212951e-01] [ 9.26369548e-01 4.87998039e-01 -1.66924790e-01 ... 7.60580957e-01 -3.56838852e-01 5.27288437e-01] ... [ 1.99099034e-01 6.20746374e-01 -1.42992961e+00 ... -9.00676847e-01 -1.66321993e-01 -1.47183061e+00] [ 7.41157770e-01 -1.15817475e+00 -1.14202321e-01 ... 2.47583961e+00 1.06431341e+00 3.97203803e-01] [ 1.80837250e+00 -1.06253183e+00 -1.75991225e+00 ... 2.98997760e-01 -9.15381908e-01 4.29311126e-01]] [[-1.05006289e+00 -7.74314761e-01 -1.77489594e-01 ... 2.49619126e+00 1.32975435e+00 -9.82531190e-01] [-9.42614794e-01 7.98449576e-01 -2.71069020e-01 ... 3.71869832e-01 1.03930235e+00 7.66190350e-01] [-4.21531498e-01 -9.89496931e-02 -1.18710804e+00 ... -5.08802116e-01 1.21664250e+00 1.82900107e+00] ... [ 6.89782500e-01 -1.13364077e+00 -3.96883935e-01 ... -1.47647107e+00 -8.97639394e-01 1.44554663e+00] [ 1.67574704e+00 -1.04249276e-01 -5.96430480e-01 ... 1.91184834e-01 -2.63885316e-02 1.24193525e+00] [-4.00038272e-01 -1.61444890e+00 5.85172892e-01 ... -8.34893227e-01 -5.45729876e-01 4.52301532e-01]] [[-6.03416800e-01 3.56453210e-01 -3.00854504e-01 ... -1.91775233e-01 -7.34795213e-01 -3.54891449e-01] [ 1.78396666e+00 -9.45259035e-01 -7.01238513e-01 ... 4.02162939e-01 3.60809863e-01 1.05225003e+00] [ 1.70516670e-01 -5.56536257e-01 -1.57925045e+00 ... -8.23889017e-01 3.01188737e-01 -8.42013001e-01] ... [ 1.24046676e-01 9.52447355e-01 -1.29174605e-01 ... -4.91017520e-01 4.07906145e-01 -3.05261850e-01] [-4.03769344e-01 6.61205888e-01 -1.38507569e+00 ... -2.28509560e-01 -1.55557680e+00 5.84416509e-01] [-1.54473650e+00 1.51562542e-01 2.06563878e+00 ... -1.35723972e+00 -5.29711843e-01 -2.95949697e-01]] ... [[-1.91519350e-01 1.45176375e+00 -7.55541384e-01 ... -1.51763654e+00 4.68734086e-01 -1.03389692e+00] [-1.94183207e+00 -1.37380838e-01 1.87244177e+00 ... -1.00103712e+00 2.82232672e-01 1.50408053e+00] [-1.11803067e+00 -1.75446796e+00 -3.91458988e-01 ... -2.46141195e+00 1.53691426e-01 7.81774282e-01] ... [-8.27124000e-01 -1.11918700e+00 -1.52267087e+00 ... 2.10320517e-01 3.93161297e-01 8.44246328e-01] [-1.08295596e+00 1.19184697e+00 4.71671164e-01 ... 1.03930068e+00 2.27536893e+00 -9.22763228e-01] [-3.15883696e-01 1.11316025e+00 -7.20685482e-01 ... -1.38634098e+00 1.82799041e+00 -2.13458800e+00]] [[ 1.28002810e+00 -7.14600861e-01 -5.71877956e-01 ... 1.75967366e-01 3.93506616e-01 -1.12914288e+00] [ 2.48256147e-01 -2.29797289e-01 -6.23081744e-01 ... 5.36962867e-01 -7.47882724e-01 -6.02130830e-01] [ 1.23544860e+00 1.63596439e+00 1.29304242e+00 ... -3.42518896e-01 -3.45932603e-01 -1.34732819e+00] ... [ 2.33780789e+00 -3.05435926e-01 1.40751374e+00 ... -1.10480177e+00 -2.23251760e-01 -6.20807946e-01] [ 5.50341249e-01 -2.38828227e-01 3.23267072e-01 ... 2.20322713e-01 -8.83879125e-01 3.69352728e-01] [ 1.88473463e+00 -2.18336970e-01 -1.70216072e+00 ... 2.09200931e+00 1.14061171e-02 1.30188632e+00]] [[ 5.16569674e-01 2.52811223e-01 9.59898710e-01 ... 1.19500327e+00 -5.38837969e-01 -7.95221608e-03] [-1.37719846e+00 -6.57766685e-02 1.77519190e+00 ... -1.81680691e+00 1.88658059e+00 -1.19168687e+00] [ 2.37053204e+00 9.85075474e-01 -4.43123221e-01 ... -4.56783623e-01 -1.39422655e-01 -1.88468957e+00] ... [-5.50664783e-01 -8.60225618e-01 6.06143117e-01 ... 1.16446920e-01 1.82930148e+00 -3.34137321e-01] [-4.68245754e-03 -4.25460160e-01 -7.88856149e-01 ... 2.43271136e+00 -6.09364629e-01 1.94478650e-02] [-1.68042481e-01 1.78835571e-01 1.11485267e+00 ... 1.68671489e-01 -1.01359129e+00 -1.20413947e+00]]] [[[-2.36784294e-01 6.90716028e-01 -9.20908176e-04 ... -4.91613060e-01 -3.18568677e-01 -1.91597357e-01] [ 2.35872328e-01 1.70037746e-01 4.19006735e-01 ... -1.72087535e-01 -1.36981654e+00 -8.54452431e-01] [ 7.35756814e-01 6.75943434e-01 7.50453696e-02 ... -4.98207211e-01 8.70683491e-01 -6.37827516e-01] ... [ 6.64455116e-01 4.40345883e-01 5.55885620e-02 ... 3.11516553e-01 2.18060106e-01 5.78935623e-01] [ 2.84804672e-01 -3.45149040e-01 -6.05979860e-01 ... -2.34915331e-01 5.77097654e-01 -2.99271911e-01] [-1.07404733e+00 -8.10295880e-01 3.72230411e-01 ... -3.36205930e-01 -4.74265635e-01 -6.48567498e-01]] [[-1.45444632e-01 6.62204802e-01 -1.14153542e-01 ... 1.50639379e+00 -6.18830264e-01 2.01974496e-01] [-4.86427248e-02 6.66468203e-01 -6.11120641e-01 ... 5.85167706e-01 -8.10143173e-01 1.71136156e-01] [-9.26196724e-02 -1.58999592e-01 -1.01828575e-01 ... -5.96448742e-02 -5.60470104e-01 2.18130633e-01] ... [-7.20377982e-01 -2.81508476e-01 4.79254365e-01 ... -7.93543458e-01 3.85343641e-01 7.17483222e-01] [ 3.12232733e-01 5.92826843e-01 -9.46333647e-01 ... 4.52799588e-01 -3.39436978e-02 9.14281130e-01] [-4.73228423e-03 -2.75881797e-01 8.95454347e-01 ... -6.49496019e-02 4.11796361e-01 -1.10095158e-01]] [[ 7.19244599e-01 2.24636272e-02 9.60328132e-02 ... 1.31569135e+00 4.30469960e-01 -2.12861806e-01] [-6.22014217e-02 -2.46669531e-01 4.54455875e-02 ... -1.74883828e-01 -1.88874498e-01 -1.53301895e-01] [-5.46736836e-01 -1.22334242e+00 -5.29035926e-01 ... -1.41008615e+00 -1.52603388e+00 -1.93221256e-01] ... [-2.48759225e-01 3.19939703e-01 -4.47371341e-02 ... 2.63817668e-01 -4.26555723e-02 -9.86890197e-01] [-4.87589359e-01 3.92864197e-01 4.83225256e-01 ... -1.84095299e+00 -1.06172955e+00 -1.02768850e+00] [-2.28760809e-01 6.23465240e-01 -9.15383399e-01 ... 4.97388929e-01 -8.20057929e-01 3.03507477e-01]] ... [[-3.21298838e-01 -3.32381018e-02 -8.11252370e-03 ... -3.69985461e-01 6.93422616e-01 3.08961868e-01] [ 2.07885697e-01 1.09239757e-01 1.07968338e-01 ... -3.01928490e-01 7.85512030e-02 -3.43327731e-01] [ 5.41675031e-01 -4.68587816e-01 -1.53817207e-01 ... -2.48201460e-01 3.79092693e-01 4.07919943e-01] ... [-7.21669793e-02 2.87601352e-01 3.78810048e-01 ... -4.33375724e-02 5.09486832e-02 -3.19095179e-02] [ 4.68867838e-01 3.46869946e-01 1.03292957e-01 ... -7.95079589e-01 3.46799761e-01 3.79237443e-01] [-2.21971035e-01 4.87985849e-01 2.86213666e-01 ... 1.01377390e-01 2.19713226e-02 -3.51651609e-01]] [[-4.36448574e-01 1.94763404e-03 -6.92723572e-01 ... -3.55811954e-01 5.27625680e-01 -5.80336988e-01] [ 3.73054892e-01 -8.61061394e-01 3.40466350e-01 ... -2.16470331e-01 4.99598265e-01 9.66598749e-01] [-4.43498015e-01 -1.86521232e-01 -4.28333610e-01 ... -7.54272640e-01 -3.04291040e-01 -3.94098282e-01] ... [ 3.11041921e-01 -7.74904370e-01 5.38074598e-02 ... 1.48303330e-01 9.61756930e-02 1.10266149e+00] [ 4.68393922e-01 -7.47464657e-01 -1.73300773e-01 ... -4.08660531e-01 -2.35688850e-01 6.94238901e-01] [ 7.01693416e-01 3.06052893e-01 1.12581596e-01 ... 2.17836257e-02 1.12642519e-01 4.74310756e-01]] [[ 3.78308803e-01 9.07076895e-01 -3.63808542e-01 ... 5.03342211e-01 8.84873196e-02 4.21993732e-01] [ 5.35067856e-01 -1.06297445e+00 -3.32258373e-01 ... 3.58407408e-01 7.77311683e-01 9.12216783e-01] [-1.05825357e-01 2.28346631e-01 -9.27977920e-01 ... 2.73740701e-02 -7.91189611e-01 -3.89021009e-01] ... [ 4.73306924e-01 -3.78662318e-01 5.91206253e-01 ... -1.58344314e-01 -2.62604356e-01 -1.26980925e+00] [-1.85384721e-01 -4.51539546e-01 -7.61894956e-02 ... -6.33499205e-01 -4.51233327e-01 -7.22205162e-01] [-4.94133979e-01 -2.87668239e-02 -3.01342994e-01 ... -4.73179907e-01 1.59588385e+00 -7.68372834e-01]]]] [[[[ 2.51835227e-01 -2.44152650e-01 -8.75444889e-01 ... 1.15719306e+00 9.69014227e-01 6.14267290e-01] [ 1.72638917e+00 -1.29231423e-01 -1.50241241e-01 ... -5.90359867e-01 2.14217260e-01 2.09693238e-01] [ 2.05171004e-01 1.30063891e+00 -2.88413465e-01 ... -5.37184954e-01 1.68260241e+00 7.77679205e-01] ... [-8.67805600e-01 6.82058156e-01 2.64715821e-01 ... 1.55383974e-01 7.22187519e-01 3.41757745e-01] [-3.85528505e-01 9.40042436e-01 -5.52250624e-01 ... 3.65675718e-01 1.73116624e+00 -5.13421118e-01] [-1.02279234e+00 4.03559068e-03 3.12226623e-01 ... 3.06971073e-01 -1.15759957e+00 6.91369474e-02]] [[-8.85918736e-01 -5.77935100e-01 8.60508025e-01 ... -7.05535114e-01 -8.53770912e-01 5.14812648e-01] [ 7.21523046e-01 5.79847991e-01 -1.44948927e-03 ... -6.25524044e-01 -7.85341501e-01 6.55177593e-01] [ 6.11984670e-01 -4.11325157e-01 -3.41226518e-01 ... 2.13709250e-01 2.06108317e-01 -8.88523281e-01] ... [-1.13649763e-01 5.88243306e-01 -3.78887296e-01 ... -4.75619465e-01 5.78481615e-01 3.40971917e-01] [-5.18827260e-01 1.14821792e+00 -1.36459395e-01 ... -2.70537853e-01 -8.81335676e-01 -2.00530495e-02] [-1.37122333e-01 1.99855614e+00 -4.19957727e-01 ... 6.21917307e-01 -1.85702586e+00 1.59173775e+00]] [[-2.19227299e-01 8.33293915e-01 -2.31305420e-01 ... 7.69886300e-02 -9.20067430e-01 1.22276974e+00] [-2.33785704e-01 2.52582312e-01 2.91396566e-02 ... -1.63154185e+00 -5.38352549e-01 4.94743377e-01] [ 9.45384920e-01 7.20750928e-01 8.96963894e-01 ... 9.45076466e-01 1.66058168e-01 -3.98807824e-01] ... [ 7.15973794e-01 7.43337095e-01 -6.26084059e-02 ... -2.65602678e-01 3.76795791e-02 3.97368580e-01] [ 6.83183223e-02 -5.28775632e-01 5.07497549e-01 ... 2.06882134e-01 8.42453633e-03 4.21150088e-01] [ 5.87111890e-01 3.16954732e-01 4.61421400e-01 ... 6.52211964e-01 2.50665516e-01 2.77423620e-01]] ... [[ 2.39458069e-01 -1.64059639e+00 7.92729795e-01 ... -5.09764552e-01 -6.66743517e-01 -8.56694460e-01] [-7.66273260e-01 8.16267803e-02 -4.33094203e-01 ... -6.10356152e-01 -5.90383172e-01 1.18916309e+00] [-4.39877063e-02 4.10056680e-01 4.91269022e-01 ... 1.94425121e-01 -6.02959156e-01 -6.61793113e-01] ... [ 4.87415612e-01 -2.41831288e-01 1.02337904e-01 ... -4.43886727e-01 -4.57104474e-01 -5.26371121e-01] [-8.06470037e-01 -1.70067096e+00 1.69744110e+00 ... -1.94717988e-01 -5.37265837e-01 2.70394415e-01] [-1.28232196e-01 1.51061028e-01 4.41895753e-01 ... 2.00930789e-01 -5.77325702e-01 -4.92706776e-01]] [[ 6.74852431e-01 6.06132984e-01 1.08429253e+00 ... 3.10724646e-01 9.62090254e-01 4.86387491e-01] [ 4.51609679e-02 -2.15063006e-01 3.79669189e-01 ... -4.99878526e-01 1.16026783e+00 2.08863050e-01] [-6.24979436e-01 4.98809040e-01 9.08375084e-01 ... -5.01054823e-01 4.34917986e-01 1.24293292e+00] ... [-6.76801980e-01 1.87963456e-01 -5.09393811e-01 ... -2.94707596e-01 -1.40573776e+00 1.23311853e+00] [ 1.90025106e-01 -2.48709068e-01 -6.97623730e-01 ... -1.73286200e-01 -1.43453270e-01 -1.72820115e+00] [ 5.80647886e-01 4.82422352e-01 -2.71547407e-01 ... -7.98105001e-01 4.21134233e-01 -1.03574347e+00]] [[-8.35523307e-01 -6.51849359e-02 5.76231956e-01 ... 8.06254089e-01 -2.08075553e-01 6.47831500e-01] [ 2.70552561e-02 -1.72745362e-01 3.55441332e-01 ... 4.63823974e-01 -7.67496288e-01 1.29693180e-01] [-1.27437383e-01 -5.62079668e-01 -3.21801931e-01 ... 3.90618145e-01 1.50138184e-01 3.19406748e-01] ... [ 5.86251795e-01 -6.97883546e-01 2.94103801e-01 ... 4.65760827e-02 7.72004545e-01 8.80846679e-01] [ 7.93000996e-01 -4.91525859e-01 -8.14618647e-01 ... -7.69472182e-01 -4.19919372e-01 8.99250805e-02] [-7.76671886e-01 1.03602183e+00 -1.08983614e-01 ... 6.22046471e-01 -2.26656094e-01 7.13920057e-01]]] [[[-7.77162194e-01 -7.05852211e-01 3.41760330e-02 ... -2.01644495e-01 -1.55235842e-01 4.48253483e-01] [-5.08248389e-01 9.22906399e-03 -1.99569046e-01 ... 3.78018826e-01 -4.62587774e-01 -3.37030172e-01] [ 3.39876592e-01 -1.44460037e-01 -1.62157327e-01 ... 3.78182948e-01 -5.89568973e-01 -1.52834341e-01] ... [-9.49088410e-02 -3.37768644e-01 6.23655677e-01 ... -5.59178233e-01 -3.42890322e-01 -2.06070170e-01] [ 7.45485008e-01 1.72005653e-01 1.93968236e-01 ... 2.10229427e-01 -3.24077725e-01 5.54713666e-01] [-3.70098472e-01 2.50856698e-01 2.92379074e-02 ... 2.47973472e-01 4.42274600e-01 -3.88835296e-02]] [[-9.96328220e-02 2.29468779e-03 -4.55421329e-01 ... 2.91656047e-01 -6.29855096e-01 -3.99650872e-01] [-3.20419014e-01 1.04072154e-01 -4.34665471e-01 ... 1.37434006e-01 -3.80839109e-01 3.65755796e-01] [-3.72920632e-02 -2.61704952e-01 6.68015361e-01 ... -2.20444769e-01 -2.25086123e-01 -4.74314183e-01] ... [ 9.80045050e-02 1.47459179e-01 5.09989820e-02 ... 4.80073988e-02 1.34854957e-01 6.75209165e-01] [ 4.02620509e-02 -2.66936749e-01 1.62194028e-01 ... -1.33972719e-01 -3.93772185e-01 -2.63163120e-01] [ 3.14525038e-01 4.78225425e-02 -3.29803169e-01 ... 2.02877417e-01 -5.05527854e-01 -1.90266579e-01]] [[ 2.54418906e-02 3.68527025e-01 -2.07747286e-03 ... -2.63871044e-01 -2.63111532e-01 -5.98752677e-01] [-1.80958822e-01 5.95536493e-02 -4.50747579e-01 ... 3.07614021e-02 2.96839207e-01 2.13240772e-01] [ 2.48166278e-01 -1.06650136e-01 -3.54994267e-01 ... -3.07626784e-01 2.13045493e-01 7.39394009e-01] ... [ 8.80758762e-02 -3.34644914e-01 4.65969801e-01 ... 3.91141444e-01 -2.63400078e-01 1.29648775e-01] [ 1.47588879e-01 2.73315668e-01 3.47203583e-01 ... 1.21838912e-01 -3.29031050e-01 2.18287647e-01] [ 5.86214244e-01 1.18047565e-01 4.43431079e-01 ... 4.36615348e-01 1.85603108e-02 5.52934110e-01]] ... [[ 3.23806912e-01 -6.76309049e-01 3.53780203e-02 ... -1.56176314e-01 -2.71892726e-01 -1.87002942e-01] [-2.30334088e-01 -7.79868215e-02 1.50617510e-01 ... 6.61145210e-01 1.02086388e-01 -5.06243110e-01] [-8.25304240e-02 2.60677692e-02 -1.20517761e-01 ... 4.39076602e-01 2.96384126e-01 -3.77803952e-01] ... [-3.01215172e-01 -1.54652596e-01 5.26879668e-01 ... -1.85537934e-01 -2.27013916e-01 1.65238619e-01] [-4.63299572e-01 2.56293505e-01 -1.84753165e-01 ... 1.55760840e-01 2.26170570e-01 3.70263487e-01] [ 4.69962507e-01 -1.42368257e-01 5.17564416e-01 ... 5.64255655e-01 -2.46532843e-01 -1.42942276e-03]] [[-8.95770192e-02 -5.90040565e-01 -3.51770073e-01 ... -8.27381611e-02 2.71011621e-01 -4.56824780e-01] [ 8.84587020e-02 3.08181942e-02 6.02552056e-01 ... -7.54518807e-02 -2.65395135e-01 -2.22949967e-01] [ 2.24129409e-01 -2.83799767e-01 5.29972434e-01 ... -2.57348776e-01 2.76607513e-01 3.50233197e-01] ... [-8.04371089e-02 5.88116646e-01 -3.84357393e-01 ... -2.03479767e-01 1.56631202e-01 -3.70149493e-01] [-3.04196123e-02 4.12148714e-01 4.70096976e-01 ... -4.41587865e-01 2.75330275e-01 2.95099229e-01] [ 3.74085695e-01 4.94724452e-01 -5.56257546e-01 ... 2.21253663e-01 -2.33304560e-01 3.45312834e-01]] [[ 6.49374247e-01 4.16155517e-01 -1.86919376e-01 ... 6.52907133e-01 3.88696522e-01 -2.50770628e-01] [ 3.62307392e-02 4.40141886e-01 4.90950137e-01 ... -3.01434904e-01 1.49739295e-01 4.07189965e-01] [-7.50345290e-02 -2.61520501e-02 -2.00779721e-01 ... -2.90791333e-01 -5.03747284e-01 -7.55439639e-01] ... [-8.15479830e-02 1.93122521e-01 2.26593554e-01 ... 3.43346000e-01 -2.36077309e-01 2.79840022e-01] [ 1.48371875e-01 -3.00560951e-01 -8.74391258e-01 ... -3.19484204e-01 -3.71571988e-01 2.80823439e-01] [-2.20005333e-01 -3.81989956e-01 -1.70589373e-01 ... 5.19877911e-01 1.15259983e-01 -3.49117756e-01]]] [[[-1.43261933e+00 -4.20447022e-01 8.98411393e-01 ... -1.37538564e+00 -3.96610022e-01 1.47746146e+00] [-4.76487517e-01 -6.44986451e-01 -3.31234962e-01 ... -2.22810721e+00 1.42160878e-01 2.16036391e+00] [-3.09471905e-01 -1.27928102e+00 8.60307813e-02 ... -2.48232579e+00 7.11339891e-01 1.07021582e+00] ... [ 1.91371664e-01 -1.93569541e-01 -2.15441608e+00 ... -8.71301770e-01 8.63444269e-01 -2.56006384e+00] [-1.41325438e+00 -1.08155859e+00 -1.71836305e+00 ... 6.12686872e-01 -1.66468132e+00 -1.61826599e+00] [ 2.08526060e-01 -1.92189443e+00 -2.20873761e+00 ... -1.15368772e+00 2.75733972e+00 9.47126567e-01]] [[-6.55544281e-01 -4.15837795e-01 -1.21420610e+00 ... -3.06918693e+00 1.69128075e-01 6.91294909e-01] [-7.24275231e-01 -2.64903355e+00 -2.35061541e-01 ... -2.53244400e-01 -1.01221061e+00 4.32078034e-01] [-1.00852907e+00 2.37730309e-01 1.12558484e+00 ... 4.57946301e-01 -2.07867837e+00 1.68278265e+00] ... [ 8.44812870e-01 3.63058716e-01 2.15244579e+00 ... -4.36958164e-01 5.81631243e-01 4.33528632e-01] [-1.25088781e-01 8.09690058e-01 -1.29478741e+00 ... 8.00418317e-01 1.53559470e+00 -2.14382505e+00] [-2.13256121e+00 -4.45550174e-01 -1.68987119e+00 ... -1.77706051e+00 -2.16664970e-01 7.06486285e-01]] [[-3.26661682e+00 -3.20005393e+00 2.05128360e+00 ... 1.16154063e+00 -2.21357632e+00 -1.68571758e+00] [-9.88007843e-01 1.04739594e+00 -7.61698246e-01 ... 7.32418537e-01 -5.38459122e-01 2.11328125e+00] [-1.83890760e-02 1.14122534e+00 -8.29255700e-01 ... -8.79081905e-01 -7.05484450e-01 -6.76244378e-01] ... [-1.00911927e+00 -1.74220335e+00 -1.07738614e+00 ... -7.34279394e-01 1.18897557e+00 -7.33379245e-01] [ 2.35159829e-01 3.13949990e+00 -8.78673315e-01 ... 8.05161715e-01 -1.31577539e+00 -2.82445639e-01] [ 5.80413043e-01 -1.08922100e+00 2.74509001e+00 ... -1.18017209e+00 -1.54005194e+00 -1.54925779e-01]] ... [[-5.63299417e-01 2.45354939e+00 1.59443498e+00 ... -1.73638856e+00 1.35499573e+00 1.02646458e+00] [-9.95076656e-01 5.94942808e-01 -8.23383689e-01 ... -5.83631039e-01 1.81502295e+00 2.18937469e+00] [ 5.58433950e-01 -3.34598750e-01 -2.14588732e-01 ... -2.46726465e+00 -4.79258388e-01 2.19864696e-01] ... [ 4.10962790e-01 1.94988835e+00 -5.39962232e-01 ... -1.01258767e+00 1.13171268e+00 -4.12366772e-03] [ 2.34754300e+00 -1.62436438e+00 1.51233280e+00 ... -1.09268323e-01 -2.50160146e+00 2.60287929e+00] [-1.57522786e+00 1.36805356e+00 -1.25191438e+00 ... 2.74672079e+00 4.15552974e-01 -1.46703112e+00]] [[-2.22255051e-01 9.93114173e-01 1.48396540e+00 ... 3.95579720e+00 -1.30543840e+00 -4.38539863e-01] [ 9.82601285e-01 -1.99088645e+00 -2.08666229e+00 ... -1.93709481e+00 1.41434252e+00 -1.59809792e+00] [ 1.84572196e+00 -1.12413621e+00 9.79591429e-01 ... -3.51378173e-01 -1.05192626e+00 -1.90031302e+00] ... [ 1.63739598e+00 -6.17501378e-01 -8.80462676e-02 ... -2.87190765e-01 1.05958819e+00 -3.30910295e-01] [-1.57036865e+00 -1.09736300e+00 -1.56134403e+00 ... -2.41461492e+00 1.41215801e+00 -1.83641374e+00] [ 8.27257514e-01 -1.05693841e+00 5.92686534e-02 ... 1.45251966e+00 9.62776184e-01 -1.76901639e+00]] [[ 7.02699944e-02 -4.89908978e-02 -9.11570072e-01 ... 9.25316751e-01 2.85557002e-01 1.33164549e+00] [-1.29744971e+00 2.24216897e-02 9.02887225e-01 ... -3.18188429e+00 7.38349915e-01 5.17037749e-01] [-9.18420732e-01 -3.57918471e-01 5.81366956e-01 ... -1.91549242e-01 1.10329270e+00 -1.85328007e-01] ... [ 1.10755384e+00 -1.10871911e+00 -9.82587934e-01 ... 2.16186926e-01 -5.74960887e-01 4.39232975e-01] [-1.50028038e+00 -4.74691726e-02 -1.19707763e+00 ... -1.58571944e-01 -9.73722756e-01 -1.42814970e+00] [-3.40756476e-01 5.94705105e-01 -1.40672731e+00 ... 7.69673645e-01 9.82426643e-01 5.37800014e-01]]] [[[ 8.42769295e-02 1.77187756e-01 -1.80637658e-01 ... 3.85848917e-02 -6.39038756e-02 -9.91982222e-02] [ 1.56193286e-01 -6.04934655e-02 9.57987532e-02 ... 1.30850136e-01 9.75879095e-03 2.11858094e-01] [-1.49348676e-01 6.91656081e-04 -4.33991164e-01 ... -1.64273724e-01 -3.79640758e-02 8.33042040e-02] ... [ 8.37375745e-02 4.67636704e-01 -1.96617886e-01 ... -2.34166086e-01 -5.48445880e-01 -5.56691848e-02] [ 2.78012399e-02 6.43779412e-02 -3.41722625e-03 ... 3.23449969e-01 3.45427185e-01 -4.07602876e-01] [-2.13319317e-01 -7.10894912e-02 -1.89768165e-01 ... -3.37299734e-01 1.82491153e-01 2.28468612e-01]] [[-3.86252373e-01 1.33906439e-01 -1.67986438e-01 ... -8.80340561e-02 -1.42706767e-01 2.11940616e-01] [ 3.73583525e-01 3.50759961e-02 3.78906846e-01 ... -5.21099329e-01 9.96436179e-02 -3.20342153e-01] [ 3.82282674e-01 1.06997184e-01 -7.50114843e-02 ... 3.41404453e-02 -2.35978752e-01 2.37903029e-01] ... [ 1.02265991e-01 3.28514218e-01 2.77131826e-01 ... 2.15800285e-01 1.39023185e-01 -2.56014466e-01] [ 3.46286967e-02 3.25525403e-01 -7.61972815e-02 ... 1.38352528e-01 -3.21873397e-01 5.87606244e-02] [-1.80804655e-01 -1.34907171e-01 7.82056376e-02 ... 1.73576418e-02 9.05680433e-02 -1.50154710e-01]] [[-7.94614479e-02 1.62107006e-01 -2.52638099e-04 ... -7.47004524e-02 1.72066435e-01 -6.31533340e-02] [ 9.64798704e-02 -3.69478852e-01 6.55826703e-02 ... 1.51407970e-02 4.64924037e-01 3.12470477e-02] [-2.21663252e-01 1.69493809e-01 3.39786649e-01 ... 2.10412353e-01 2.93121964e-01 -3.37139279e-01] ... [-1.59806594e-01 3.11474115e-01 1.64426584e-02 ... -4.72820938e-01 4.82890308e-01 3.42215389e-01] [ 2.35840939e-02 -3.30845788e-02 -5.88949807e-02 ... -4.51042384e-01 1.62956446e-01 5.47682345e-02] [-1.64587840e-01 -9.12592933e-02 -2.17414737e-01 ... -1.17275603e-02 -1.88494518e-01 -3.09995502e-01]] ... [[-3.63762900e-02 3.07855755e-01 -1.43270642e-01 ... -1.19353514e-02 7.54911453e-02 -2.86639810e-01] [-3.08960825e-02 -3.85148078e-01 -1.71137437e-01 ... 9.11831260e-02 2.81600803e-02 -2.92474270e-01] [ 9.63579416e-02 2.66260952e-01 -8.26171562e-02 ... -3.20116341e-01 -1.33057922e-01 6.37964785e-01] ... [ 4.58096147e-01 -1.94359556e-01 -3.24718326e-01 ... -1.57950044e-01 -1.57076403e-01 1.99328929e-01] [ 8.34587291e-02 1.37252986e-01 3.35569978e-01 ... 5.35833180e-01 -1.49153292e-01 -1.35563672e-01] [-3.12568784e-01 1.60408288e-01 -1.92025900e-01 ... 4.56655473e-01 7.68825263e-02 -3.42712373e-01]] [[ 2.59742975e-01 6.44708872e-02 2.04033181e-01 ... 1.44413263e-01 -1.17381282e-01 1.46824867e-01] [-4.81649250e-01 -1.30942151e-01 -1.24319363e-02 ... -6.53021643e-03 -1.69504419e-01 -2.65862904e-02] [-2.79769599e-01 5.20499907e-02 1.80727318e-01 ... 1.37228370e-01 -8.32744986e-02 -3.04282695e-01] ... [-1.64923623e-01 -1.39935136e-01 -4.34328675e-01 ... 6.86012581e-02 -5.24041176e-01 1.13837145e-01] [ 1.92117259e-01 1.95605263e-01 -3.28889757e-01 ... -2.19125360e-01 3.22881192e-02 -7.17098173e-03] [ 3.51917982e-01 -1.28573015e-01 -8.46907124e-02 ... -6.26673251e-02 1.06389381e-01 -9.07378793e-02]] [[ 1.37659773e-01 -2.26192385e-01 6.00349419e-02 ... 1.58927232e-01 -7.62193650e-02 -3.16922396e-01] [-2.53267139e-01 1.37429342e-01 2.18231529e-01 ... 6.88711852e-02 -2.24979982e-01 -2.44148508e-01] [-2.66585916e-01 1.73730135e-01 -6.59183025e-01 ... 1.29918801e-02 9.24462751e-02 2.97183841e-02] ... [ 2.54891273e-02 -7.60642469e-01 1.75860748e-01 ... 2.24644169e-01 -1.47793218e-01 -7.27928281e-02] [-2.06631377e-01 9.48828310e-02 3.99605334e-01 ... -3.29756945e-01 -1.74098730e-01 5.96430421e-01] [ 2.20786572e-01 -5.32458089e-02 -4.57529444e-04 ... -8.67410377e-02 8.23299959e-02 -1.64864540e-01]]] [[[ 7.84628093e-01 -1.65395364e-01 -1.65200007e+00 ... 2.26543061e-02 -1.01756310e+00 9.84153032e-01] [-3.72013003e-01 -1.79918182e+00 1.22863448e+00 ... 2.68323958e-01 1.06794715e-01 5.98534226e-01] [ 1.12416792e+00 -3.41037822e+00 -4.83183444e-01 ... 1.73999548e+00 -1.19862235e+00 -3.75297815e-01] ... [ 3.53568375e-01 6.93512201e-01 1.08261013e+00 ... -1.54339027e+00 1.23647153e+00 1.18688476e+00] [ 1.13948965e+00 2.07328939e+00 -2.20636427e-01 ... -7.05919206e-01 -6.81159943e-02 3.39854211e-01] [-1.29414663e-01 -1.27703118e+00 -4.91832346e-01 ... 1.78599954e+00 7.98388183e-01 -1.59082270e+00]] [[ 2.83401996e-01 1.19666368e-01 -1.32104385e+00 ... -1.62020254e+00 3.41305494e-01 4.75970000e-01] [-1.50670564e+00 -3.63631308e-01 -1.33367789e+00 ... -6.20625079e-01 6.02419734e-01 -4.06206310e-01] [-1.53131139e+00 -8.14855099e-01 -1.31712222e+00 ... 6.02635043e-03 1.68450129e+00 1.29305816e+00] ... [ 1.76255211e-01 -8.18060815e-01 -3.17199439e-01 ... 3.25049609e-01 7.09809780e-01 5.71636021e-01] [ 9.26362276e-01 -1.29600310e+00 -2.04126787e+00 ... -1.64606273e+00 1.38712835e+00 -1.41557109e+00] [ 1.79544896e-01 2.39886865e-01 1.15098071e+00 ... 4.90252763e-01 -1.98956585e+00 -2.93778896e-01]] [[-4.47473019e-01 -1.88278377e-01 -2.97008336e-01 ... 1.15575504e+00 4.86983716e-01 1.51290023e+00] [-1.28240192e+00 4.91891205e-01 1.03666377e+00 ... 1.37656510e+00 -1.49799362e-01 -9.74971771e-01] [-5.90228617e-01 -8.31211135e-02 -2.46516109e+00 ... -1.45473349e+00 -8.82286668e-01 2.09044546e-01] ... [ 1.17287710e-01 -1.97421944e+00 -1.22773819e-01 ... 4.17833924e-02 3.12556362e+00 4.34299737e-01] [-1.74477065e+00 -3.80000085e-01 2.07906768e-01 ... 9.25461650e-01 -1.17961073e+00 -7.87437320e-01] [ 3.94208312e-01 1.17612612e+00 1.83249748e+00 ... -4.50437009e-01 -5.64001501e-01 -2.53701031e-01]] ... [[ 1.94049370e+00 3.28903139e-01 -9.55577850e-01 ... 1.27956367e+00 9.42556500e-01 7.50759184e-01] [ 1.54358178e-01 -1.75954819e+00 -4.12179887e-01 ... -4.41847518e-02 1.33279121e+00 2.17845619e-01] [ 2.12114811e-01 5.18870294e-01 -6.20130420e-01 ... -2.26352513e-01 3.15288782e-01 6.88616574e-01] ... [-7.43685663e-01 -1.11384249e+00 -7.42781401e-01 ... 1.57146886e-01 2.47879446e-01 8.40743538e-03] [-1.22744799e+00 5.79136550e-01 -8.13322246e-01 ... -7.56785512e-01 2.83914953e-01 -7.72298276e-02] [-1.16371644e+00 -4.99870062e-01 1.94941509e+00 ... -3.93651217e-01 1.12366414e+00 -2.78464943e-01]] [[ 7.13522494e-01 7.84681812e-02 2.76292229e+00 ... 2.02197835e-01 -2.59300661e+00 7.67486870e-01] [ 1.50999367e-01 -1.30061924e+00 1.04153547e-02 ... -1.36167383e+00 -7.98171282e-01 1.48770571e+00] [ 5.93040466e-01 1.90829241e+00 -1.34930670e+00 ... 5.31516254e-01 1.45581043e+00 1.51727572e-01] ... [-1.24074864e+00 2.46059876e-02 1.73944426e+00 ... -1.04805613e+00 -4.11708504e-02 -9.88275036e-02] [ 2.14750671e+00 1.60985720e+00 -1.36690545e+00 ... 1.36242318e+00 2.70813018e-01 -1.13121355e+00] [ 3.69784355e-01 -1.49673074e-01 -1.43616748e+00 ... -1.09538090e+00 2.78145939e-01 -2.61879593e-01]] [[-8.93289745e-02 9.06905904e-02 -1.03759766e+00 ... 1.19180596e+00 -5.35822093e-01 4.58747149e-02] [-2.11476028e-01 -1.61462760e+00 -2.93732435e-01 ... 1.11607146e+00 2.30225921e-02 -5.39813876e-01] [-7.44687200e-01 1.52789700e+00 2.80622435e+00 ... 6.54486537e-01 -1.38103139e+00 -2.05812430e+00] ... [ 2.35807633e+00 -1.56290829e+00 -7.44232059e-01 ... -6.30706370e-01 -2.04224324e+00 -3.14724505e-01] [ 7.93839812e-01 -3.02393436e-01 -1.06190659e-01 ... -4.15559977e-01 -4.35181886e-01 2.54193854e+00] [ 3.01711738e-01 -3.81330073e-01 -6.82329461e-02 ... 3.34520727e-01 6.08927369e-01 -5.11031687e-01]]] [[[-3.23224038e-01 1.24287248e-01 -4.59096469e-02 ... 4.06811386e-01 2.36698881e-01 6.95842326e-01] [-3.70659202e-01 9.75161672e-01 5.14476240e-01 ... -3.11999023e-01 3.49939585e-01 -4.02252495e-01] [ 1.18480957e+00 3.00841093e-01 1.02267645e-01 ... -3.68146300e-01 -1.24490762e+00 4.68952805e-02] ... [ 3.67354572e-01 3.94662321e-01 -2.81576127e-01 ... -9.46434677e-01 -7.13892698e-01 2.80372798e-01] [-1.70543909e-01 -7.91906774e-01 -3.71370435e-01 ... -7.70957112e-01 5.31271696e-01 3.56558770e-01] [-1.71690464e-01 -1.91302195e-01 5.97931631e-02 ... -7.30473042e-01 2.33871475e-01 6.07316829e-02]] [[ 8.50353241e-01 4.36305106e-01 -2.83071727e-01 ... 5.53215742e-01 -5.40554337e-02 -2.69479424e-01] [-9.76902723e-01 -4.85432953e-01 -3.16046029e-02 ... 6.17836237e-01 1.03718549e-01 -6.83053255e-01] [-9.77184832e-01 -7.30240822e-01 -2.19695792e-01 ... -4.36043590e-01 -2.75200427e-01 -3.91414404e-01] ... [ 6.50592029e-01 -3.36017348e-02 1.09102869e+00 ... 1.46311927e+00 1.78965509e-01 -7.78208137e-01] [ 8.24356318e-01 4.87347156e-01 1.20437406e-02 ... -4.87377197e-01 -1.16170511e-01 1.56664205e+00] [-3.25128049e-01 -4.68892843e-01 2.31747985e-01 ... -3.99292231e-01 -8.67329910e-02 -3.23554039e-01]] [[-6.96420789e-01 -7.83106610e-02 -1.58419669e+00 ... 9.77749825e-01 -4.16567832e-01 1.01194763e+00] [-7.69470930e-01 2.21154109e-01 3.94661099e-01 ... -2.92808920e-01 -4.84674305e-01 -3.72297794e-01] [ 1.77270502e-01 4.32789415e-01 -6.92807198e-01 ... 1.67291731e-01 3.95656437e-01 1.80126131e-01] ... [-8.70320559e-01 -1.51269883e-01 8.81563276e-02 ... -1.31850038e-02 -2.66877919e-01 1.00678325e+00] [-5.62553585e-01 1.66084647e-01 7.58234322e-01 ... -7.76373804e-01 2.72844881e-01 4.18917596e-01] [ 3.83933634e-01 -4.88244653e-01 3.85214984e-01 ... -4.44721319e-02 -7.46305346e-01 5.56831323e-02]] ... [[-8.55863929e-01 -2.31531984e-03 -2.14304492e-01 ... -4.48864400e-01 9.18414116e-01 -3.51232171e-01] [-2.13215038e-01 8.17211270e-01 -2.70827979e-01 ... -6.40858650e-01 -1.29774854e-01 -3.93967360e-01] [ 3.68450463e-01 1.23399742e-01 8.32214773e-01 ... 1.95571199e-01 1.38453215e-01 -3.11423868e-01] ... [-1.32004380e+00 -1.80858634e-02 8.40043128e-01 ... 1.94318011e-01 -1.98028926e-02 -2.33714476e-01] [-6.82554841e-01 7.23812401e-01 -3.35465930e-02 ... -1.64972767e-01 9.03936177e-02 7.10798025e-01] [-1.86019555e-01 -2.35297084e-01 7.50955045e-01 ... 6.20402731e-02 7.76056409e-01 -2.86516547e-01]] [[ 1.02574706e-01 -1.69579312e-01 -6.91680238e-04 ... -7.16256440e-01 -1.12424624e+00 1.15930092e+00] [ 1.58025360e+00 4.17612851e-01 6.04192734e-01 ... -1.72221199e-01 -1.44159257e-01 3.30817312e-01] [ 3.19373727e-01 -1.61624432e+00 3.76157105e-01 ... 9.65238586e-02 -1.19216584e-01 2.96666682e-01] ... [-1.62974790e-01 -4.26693618e-01 -2.58268774e-01 ... -4.49878395e-01 -2.67786056e-01 2.35000163e-01] [-3.67552429e-01 2.97123641e-01 1.93226889e-01 ... 9.76478517e-01 -3.98529232e-01 8.82708251e-01] [-5.18344641e-01 2.12340713e-01 6.82120860e-01 ... -3.79696429e-01 -6.73348844e-01 -1.05296031e-01]] [[-5.56189001e-01 -9.43143815e-02 1.26936659e-01 ... 7.39232361e-01 -1.04765451e+00 3.55197601e-02] [ 2.54991621e-01 -2.97005445e-01 7.74299145e-01 ... -3.64757292e-02 5.62008560e-01 -5.43986619e-01] [ 5.36342449e-02 -3.73325855e-01 -8.21305960e-02 ... 1.40285030e-01 -6.52210832e-01 7.79664099e-01] ... [-2.82738686e-01 2.43306726e-01 -5.41755259e-01 ... -1.02297038e-01 3.36489111e-01 2.06337035e-01] [ 9.81646240e-01 4.07452434e-01 5.13506532e-01 ... 7.73558378e-01 -7.58197725e-01 3.81810330e-02] [-3.18513006e-01 1.92406431e-01 5.55337593e-02 ... 4.18885410e-01 2.07920253e-01 3.73402536e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': True, 'eps': 0.0} ] | 0.23 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4628.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : NoneType = prim::Constant() %4 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %5 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %6 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %7 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.]() %self.bias : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.3995 0.8080 1.3828 0.7134 0.3648 0.5625 [ CPUFloatType{6} ]]() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value=-0.2132 1.3857 -0.8916 0.0795 0.2557 -0.8335 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %13 : int = aten::size(%x.1, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %14 : int = aten::mul(%12, %13) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int = aten::floordiv(%14, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %16 : int[] = prim::ListConstruct(%15, %self.n_groups) %17 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::slice(%17, %self.n_groups, %3, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %19 : int[] = aten::list(%18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %20 : int[] = aten::add(%16, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%20, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %22 : int = aten::len(%20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %23 : int = aten::sub(%22, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%23, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %27 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %28 : int = aten::__getitem__(%20, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %28) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %30 : bool = aten::eq(%size_prods, %5) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%30) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %31 : str = aten::format(%6, %20) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%31, %7) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %32 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%32) (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4873691 (Squeeze_4873690[0]:i64[], Constant_4873645[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4873691': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4876631 (Squeeze_4876630[0]:i64[], Constant_4876585[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4876631': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4879429 (Squeeze_4879428[0]:i64[], Constant_4879383[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4879429': Arguments do not have the safw_re: [[[[[ 4.68702912e-01 1.01721548e-01 5.28469563e-01 ... -3.51550803e-02 2.39351645e-01 7.55955398e-01] [ 3.24727744e-01 3.11605960e-01 5.32826960e-01 ... 4.73871768e-01 5.30514479e-01 4.97805506e-01] [ 3.24432969e-01 5.10638297e-01 6.70931458e-01 ... 3.60091448e-01 3.80762070e-01 1.87429130e-01] ... [ 2.12717474e-01 3.31430405e-01 2.94324994e-01 ... 3.85380119e-01 7.34339237e-01 6.86820090e-01] [ 5.01417756e-01 3.58302683e-01 4.10954714e-01 ... 4.82189983e-01 6.39897764e-01 4.62998122e-01] [ 1.82154551e-01 3.52491796e-01 1.40732646e-01 ... 4.70627159e-01 3.84994149e-01 1.96838886e-01]] [[ 6.43476069e-01 1.93257987e-01 3.42870623e-01 ... 3.21597248e-01 7.25144863e-01 4.52072084e-01] [ 4.44445282e-01 5.27227104e-01 4.31656539e-01 ... 4.29812938e-01 5.30946791e-01 5.23218870e-01] [ 1.35311052e-01 4.31600094e-01 4.04896647e-01 ... 4.60632801e-01 6.18196130e-01 3.22423637e-01] ... [-2.09412217e-01 6.13190234e-01 4.06276703e-01 ... 4.43411589e-01 5.48118174e-01 4.01180506e-01] [ 7.34042168e-01 5.75971425e-01 2.93202728e-01 ... 6.48907721e-01 6.08007967e-01 1.18752137e-01] [ 1.32887229e-01 2.65928179e-01 5.78338563e-01 ... 2.72237062e-01 5.28859973e-01 7.67821133e-01]] [[ 1.78325072e-01 5.44506252e-01 2.60623693e-01 ... 4.35059249e-01 6.11762881e-01 3.73030722e-01] [ 7.31586158e-01 4.46384370e-01 1.18960284e-01 ... 7.24158406e-01 8.92192662e-01 4.80917364e-01] [ 2.71975011e-01 2.94592053e-01 5.27561545e-01 ... 7.67591476e-01 2.91972071e-01 5.33621192e-01] ... [ 3.59263241e-01 5.50890148e-01 6.52684569e-01 ... 5.61425328e-01 -1.52870510e-02 4.72879142e-01] [ 2.96567619e-01 5.30464798e-02 3.73860538e-01 ... 1.64213747e-01 1.61211237e-01 4.24042821e-01] [ 4.54798728e-01 3.34856957e-01 4.49033856e-01 ... 6.64964736e-01 4.19524640e-01 1.95139959e-01]] ... [[ 2.96908170e-01 3.23092580e-01 6.34965062e-01 ... 3.90057594e-01 1.44544229e-01 5.81865609e-01] [ 3.87782007e-01 3.61145735e-02 5.70634782e-01 ... 5.23876965e-01 2.61810690e-01 5.83750486e-01] [ 2.80567646e-01 2.96312451e-01 -2.73648620e-01 ... 1.37376547e-01 3.94192219e-01 5.99966049e-01] ... [ 2.69306839e-01 2.45935112e-01 8.67892444e-01 ... 1.98662162e-01 5.56318879e-01 3.86092752e-01] [ 6.03147566e-01 5.77327669e-01 8.13087225e-01 ... 3.76944929e-01 5.63739538e-01 3.75561059e-01] [ 3.38261239e-02 3.64959985e-01 2.60764152e-01 ... 4.61141348e-01 2.92527765e-01 5.88702202e-01]] [[ 5.08160949e-01 1.33848518e-01 5.81450388e-02 ... 3.42052281e-01 4.28883433e-01 4.42509443e-01] [ 1.09458968e-01 2.61673778e-01 2.98091114e-01 ... 2.28503451e-01 3.89691293e-01 6.96976066e-01] [ 4.89046752e-01 5.68028212e-01 3.86470973e-01 ... 7.08099842e-01 5.19507468e-01 8.32801104e-01] ... [ 5.87224364e-01 4.38042581e-01 5.12202322e-01 ... 3.22141290e-01 2.83799201e-01 1.64141729e-01] [ 4.88639683e-01 6.65702820e-02 7.59515107e-01 ... 5.90039551e-01 3.49152505e-01 4.52343136e-01] [ 2.80288517e-01 4.83431488e-01 3.97491992e-01 ... 6.09346509e-01 2.95074105e-01 4.36228305e-01]] [[ 2.95254171e-01 4.78034854e-01 3.48819464e-01 ... 4.52705711e-01 2.89485633e-01 4.12739635e-01] [ 5.73918045e-01 2.75201052e-01 4.21147764e-01 ... 7.31868863e-01 5.91974914e-01 1.46693304e-01] [ 2.66985267e-01 1.55220747e-01 5.55276632e-01 ... 3.94393876e-02 2.78511912e-01 5.74410081e-01] ... [ 3.07991385e-01 6.91426516e-01 4.29193348e-01 ... 1.29728511e-01 3.60857606e-01 3.61330770e-02] [ 7.98680902e-01 2.30760291e-01 2.03206509e-01 ... 2.78942496e-01 5.91856003e-01 4.57751036e-01] [ 1.56810358e-01 1.13960192e-01 8.24745953e-01 ... 4.71349895e-01 2.30782881e-01 5.57056427e-01]]] [[[ 1.94060393e-02 1.98005164e+00 -1.07206559e+00 ... 3.73928070e-01 4.10667562e+00 -4.38653082e-01] [ 2.12265635e+00 1.89194489e+00 1.12842035e+00 ... 1.00931454e+00 2.58265543e+00 9.11106646e-01] [ 8.81454051e-01 1.81315172e+00 -1.19821107e+00 ... -1.13123178e-01 -7.84831583e-01 -1.38709234e-04] ... [-9.56466675e-01 -1.98649362e-01 -2.82102525e-02 ... 2.75032711e+00 7.48311996e-01 2.81640553e+00] [ 1.94695127e+00 -8.43594670e-01 1.86332250e+00 ... 8.54615271e-01 -7.39444017e-01 -3.50560308e-01] [ 8.29064012e-01 -6.31342530e-01 2.47784182e-01 ... 4.49210554e-01 1.78011727e+00 3.51218081e+00]] [[ 7.51599550e-01 1.26749086e+00 2.00256729e+00 ... 5.24763882e-01 7.84258693e-02 1.54593289e+00] [ 1.38584471e+00 8.33940446e-01 6.13908350e-01 ... 1.15616369e+00 3.20326462e-02 1.06654930e+00] [-6.06139958e-01 -8.13824654e-01 2.34466624e+00 ... -8.12815845e-01 -1.15251160e+00 3.56254354e-02] ... [ 8.00345123e-01 1.60189688e+00 -1.29818749e+00 ... -1.94107264e-01 4.73935097e-01 -9.02002454e-02] [ 4.02188969e+00 -4.06820148e-01 2.09282231e+00 ... 1.70053601e+00 -2.82283753e-01 1.74266911e+00] [ 3.36005807e-01 4.64617682e+00 2.47191215e+00 ... -5.61378717e-01 7.66520381e-01 1.78144860e+00]] [[-9.82569396e-01 1.82486743e-01 -1.68913460e+00 ... 2.24500489e+00 7.96647668e-01 1.88222814e+00] [ 1.11053419e+00 -1.40591466e+00 2.07561398e+00 ... 2.12482262e+00 6.90135300e-01 2.59620118e+00] [ 1.99380410e+00 1.53547263e+00 1.74699283e+00 ... 2.26008415e+00 3.39677882e+00 4.90270472e+00] ... [ 6.96659267e-01 2.19908118e+00 1.88681161e+00 ... 1.50246358e+00 5.42122304e-01 8.38355839e-01] [ 9.70299661e-01 1.45857644e+00 4.63120031e+00 ... -1.43920779e-01 2.70413756e-01 -1.36967468e+00] [ 1.74093992e-01 1.86678684e+00 1.68629003e+00 ... 2.92077041e+00 5.87880313e-02 5.06003976e-01]] ... [[-3.99690598e-01 7.49868274e-01 -6.31911457e-02 ... 3.76706958e-01 2.19262719e+00 -8.13199580e-01] [ 1.06731498e+00 -6.19601727e-01 1.49556887e+00 ... -5.81805110e-01 4.83874530e-01 2.90578771e+00] [ 1.98150849e+00 3.02016473e+00 8.12260747e-01 ... -7.07047284e-01 8.82594943e-01 -1.72166872e+00] ... [ 1.09700572e+00 1.45862305e+00 -6.35904372e-01 ... 1.79438806e+00 6.92631423e-01 2.83095270e-01] [ 2.32592797e+00 1.62999153e+00 1.84932256e+00 ... 1.43421698e+00 -3.08386356e-01 1.32763064e+00] [-4.91997506e-03 1.00581574e+00 6.55813932e-01 ... 1.50111818e+00 2.31285596e+00 3.27478528e+00]] [[-2.05971003e-01 7.13986039e-01 1.32333517e+00 ... 2.10184526e+00 4.51562256e-01 -8.60381901e-01] [ 1.82152939e+00 1.47373044e+00 1.87105274e+00 ... -6.18970878e-02 1.02932477e+00 -6.04133546e-01] [ 1.44198716e+00 2.16785574e+00 2.20439196e+00 ... 1.57613981e+00 -3.60334009e-01 -2.77728939e+00] ... [ 1.82357776e+00 2.20921016e+00 -1.84591568e+00 ... 2.30899620e+00 9.23236012e-01 2.19863272e+00] [-2.10998988e+00 9.19050872e-01 -4.41906631e-01 ... -6.24429770e-02 2.69910741e+00 2.22125202e-01] [-6.07433677e-01 1.18634236e+00 -4.75092351e-01 ... -3.04610848e-01 2.85169125e+00 -6.98794127e-01]] [[ 1.99257040e+00 3.23922157e+00 8.60390663e-01 ... 8.86863828e-01 1.04930234e+00 2.85620427e+00] [ 4.94982034e-01 -9.99296963e-01 3.26116562e-01 ... 2.41127062e+00 2.28861642e+00 -3.66765261e-01] [ 1.11334908e+00 -3.19866800e+00 -4.44983184e-01 ... 2.96545482e+00 9.49235499e-01 -9.92853820e-01] ... [ 9.33565557e-01 8.57120454e-01 3.44287306e-01 ... 1.52180982e+00 1.95221233e+00 -4.08455521e-01] [ 1.77038836e+00 3.72061110e+00 7.34955072e-01 ... -9.56092656e-01 8.97078633e-01 8.36762071e-01] [-2.37626955e-01 6.89483523e-01 1.42670035e+00 ... 2.82012844e+00 1.19586015e+00 1.47777259e+00]]] [[[ 1.66460335e+00 -6.93766102e-02 -5.96388757e-01 ... 1.72837365e+00 9.27043974e-01 1.53360033e+00] [ 1.90856802e+00 1.83842170e+00 7.83970296e-01 ... 2.35048938e+00 1.60305274e+00 2.36678672e+00] [ 1.39941323e+00 2.26732230e+00 1.80849493e+00 ... 1.80950582e+00 2.15614128e+00 1.49309945e+00] ... [ 1.63688910e+00 1.43081796e+00 1.87689710e+00 ... 1.32775927e+00 -2.98077930e-02 9.35188413e-01] [ 1.78217542e+00 8.97289738e-02 2.43245649e+00 ... 2.04927397e+00 1.86196291e+00 1.82181501e+00] [ 5.38955092e-01 6.97260559e-01 8.71467650e-01 ... 2.69022489e+00 1.22035813e+00 2.71172047e+00]] [[ 9.10737693e-01 9.52296197e-01 7.11389065e-01 ... 8.41958523e-01 2.43957281e+00 2.59358811e+00] [ 1.67734921e+00 1.78653300e+00 1.60335970e+00 ... 1.47604358e+00 1.48809147e+00 2.15902901e+00] [ 8.42271388e-01 9.44193900e-01 6.06362939e-01 ... 1.86012566e+00 1.24606764e+00 2.47421336e+00] ... [ 2.29802299e+00 1.18367457e+00 1.87445915e+00 ... 1.45483506e+00 9.85832512e-01 1.59895849e+00] [ 8.04081619e-01 2.84676456e+00 -7.38525093e-01 ... 9.46842730e-01 1.68226480e+00 2.68815398e-01] [ 1.12980676e+00 1.11097351e-01 6.42829120e-01 ... 1.11247671e+00 2.61091232e+00 2.05433631e+00]] [[ 2.79889512e+00 -1.84934884e-01 1.83456361e+00 ... 2.33848715e+00 1.79289901e+00 1.49744010e+00] [ 3.22457409e+00 2.02288008e+00 1.48731744e+00 ... 6.86255395e-01 2.38714886e+00 3.52135807e-01] [ 2.28460684e-01 1.81605649e+00 2.05746984e+00 ... 2.80626798e+00 1.46042848e+00 1.03235030e+00] ... [ 1.02477670e+00 4.05139923e-01 3.07483315e+00 ... 2.05514050e+00 4.54212093e+00 2.04286671e+00] [ 2.31079865e+00 1.13765550e+00 9.15177166e-01 ... 1.25589979e+00 2.84588754e-01 8.83538485e-01] [ 8.99258971e-01 8.53023469e-01 8.11442509e-02 ... 1.90163100e+00 2.98315501e+00 2.07105184e+00]] ... [[ 1.04217792e+00 2.33185196e+00 2.28439522e+00 ... 1.16879213e+00 1.65830284e-01 2.41331053e+00] [ 1.88133335e+00 1.27559507e+00 1.44843781e+00 ... 2.06589603e+00 2.46081686e+00 1.07845330e+00] [ 1.57272458e+00 8.86457145e-01 1.76335466e+00 ... 6.27657622e-02 2.15263486e+00 -7.06780702e-02] ... [ 4.93546873e-01 -3.95463437e-01 1.94543636e+00 ... 7.59157598e-01 5.35611391e-01 1.71913862e+00] [ 1.02956235e+00 1.97120383e-01 1.60813439e+00 ... 1.98578262e+00 1.44116473e+00 1.71276200e+00] [ 2.12284565e+00 2.74933076e+00 2.28441930e+00 ... 1.73491144e+00 2.45292211e+00 2.28152204e+00]] [[ 2.68344665e+00 7.12303519e-01 2.14628839e+00 ... 2.60958344e-01 -1.09162919e-01 9.67291296e-01] [ 2.43103719e+00 2.32876301e+00 2.46121034e-01 ... 3.71164232e-01 1.20922303e+00 3.13818812e+00] [ 1.02343667e+00 2.21268487e+00 4.09686297e-01 ... 1.42517781e+00 2.93254328e+00 1.31593907e+00] ... [ 2.87862241e-01 7.72045970e-01 3.66886199e-01 ... 1.37689590e+00 1.50987744e+00 1.01392932e-01] [ 1.45858967e+00 4.45381075e-01 1.14199638e+00 ... 4.82051671e-01 1.97403848e+00 -4.32077467e-01] [ 2.96076357e-01 2.17049265e+00 1.31270480e+00 ... 9.64168072e-01 1.51808190e+00 1.23074985e+00]] [[ 2.33071589e+00 6.89863861e-01 1.18870401e+00 ... 1.08370638e+00 2.50684285e+00 1.52956760e+00] [ 1.58664024e+00 2.03089595e-01 1.84426022e+00 ... 1.23789501e+00 6.67806208e-01 1.44521236e+00] [ 1.37223685e+00 8.49983752e-01 1.04447055e+00 ... 1.24850535e+00 5.48728824e-01 2.09120202e+00] ... [ 2.54216105e-01 4.49319720e-01 2.15219235e+00 ... 1.03924239e+00 2.22566199e+00 2.11516094e+00] [ 9.65969801e-01 3.94276977e+00 3.59425426e-01 ... 7.79154956e-01 -1.01361036e-01 9.18947279e-01] [ 7.12427676e-01 1.97726357e+00 1.54981351e+00 ... 1.36347461e+00 8.64567637e-01 7.49119878e-01]]] [[[ 7.52902985e-01 9.33653057e-01 8.24577332e-01 ... 7.51501501e-01 6.65525079e-01 7.60460794e-01] [ 6.49205863e-01 7.12171197e-01 7.49642670e-01 ... 7.06996262e-01 8.57294261e-01 6.46296442e-01] [ 7.85934150e-01 8.67047012e-01 7.83935130e-01 ... 6.61983728e-01 6.80897951e-01 6.32750034e-01] ... [ 7.72619247e-01 5.93817115e-01 6.57173932e-01 ... 8.35437179e-01 7.79299140e-01 7.93354750e-01] [ 6.67889357e-01 5.60499966e-01 5.58889151e-01 ... 6.63753510e-01 7.13371873e-01 5.83773255e-01] [ 6.56019986e-01 7.48616159e-01 6.25597596e-01 ... 7.18255699e-01 5.49246192e-01 7.78516531e-01]] [[ 7.12199330e-01 7.31800497e-01 7.25915372e-01 ... 7.34994471e-01 7.55326390e-01 7.20179498e-01] [ 7.05013037e-01 7.65033901e-01 8.25385690e-01 ... 7.12715626e-01 6.21203125e-01 7.62296319e-01] [ 5.93883693e-01 6.87837183e-01 6.95381522e-01 ... 7.33377874e-01 7.58625031e-01 8.07950497e-01] ... [ 6.08734369e-01 5.48574984e-01 6.71817362e-01 ... 7.62890279e-01 8.33617032e-01 7.93072462e-01] [ 6.69956505e-01 5.49169600e-01 7.99423933e-01 ... 6.80900514e-01 8.78568470e-01 6.57895565e-01] [ 6.59509540e-01 6.45950675e-01 7.75162399e-01 ... 6.55392289e-01 7.66094923e-01 7.72379160e-01]] [[ 6.56246841e-01 7.53057182e-01 8.33060503e-01 ... 8.21636915e-01 7.78818190e-01 7.93276131e-01] [ 7.26986766e-01 8.96280527e-01 6.80760860e-01 ... 7.24984109e-01 7.30795085e-01 6.70206487e-01] [ 7.15278804e-01 8.53062212e-01 8.33234072e-01 ... 6.57062531e-01 6.85951531e-01 8.20934415e-01] ... [ 7.02393830e-01 7.68118739e-01 7.90433526e-01 ... 7.34724164e-01 6.78941786e-01 8.26649606e-01] [ 6.29335821e-01 8.23703408e-01 7.52981603e-01 ... 6.49785161e-01 8.57274830e-01 6.75086677e-01] [ 7.74016798e-01 6.90107763e-01 6.23368740e-01 ... 5.79516292e-01 5.74612558e-01 5.69153786e-01]] ... [[ 5.64774036e-01 7.09915340e-01 6.16010725e-01 ... 7.25594878e-01 7.49028325e-01 5.76282561e-01] [ 8.15888882e-01 7.10302830e-01 7.52876461e-01 ... 7.27456033e-01 7.33244359e-01 5.97857356e-01] [ 8.09648097e-01 6.45969987e-01 6.65753484e-01 ... 7.63215363e-01 7.33533859e-01 6.51468217e-01] ... [ 7.50051141e-01 6.82893753e-01 6.97169244e-01 ... 4.94173735e-01 6.51446104e-01 7.43587852e-01] [ 9.16738153e-01 7.90855169e-01 6.18027747e-01 ... 6.56189203e-01 6.89354539e-01 7.99154639e-01] [ 8.19255233e-01 7.39713788e-01 6.98804140e-01 ... 6.85941994e-01 7.55938351e-01 8.39984655e-01]] [[ 6.36763752e-01 8.06693554e-01 5.23950815e-01 ... 6.91759288e-01 7.32493758e-01 7.66702831e-01] [ 7.88847446e-01 5.81672847e-01 6.85055971e-01 ... 8.20412755e-01 7.31131852e-01 6.43490791e-01] [ 7.37922549e-01 7.18733490e-01 7.98934698e-01 ... 8.20054650e-01 7.06193984e-01 8.42108667e-01] ... [ 6.53927267e-01 7.43299663e-01 7.77520835e-01 ... 7.87022531e-01 7.89995730e-01 6.26933515e-01] [ 7.03302622e-01 5.11734664e-01 6.74830794e-01 ... 7.84597158e-01 7.82236397e-01 6.61256969e-01] [ 5.88804781e-01 7.91473687e-01 5.06765246e-01 ... 6.71616733e-01 7.10061789e-01 6.64958596e-01]] [[ 8.23049903e-01 7.22534776e-01 7.56531596e-01 ... 7.34712541e-01 8.11234474e-01 7.48653054e-01] [ 6.18227959e-01 8.16395402e-01 7.65955210e-01 ... 6.54659331e-01 5.53853095e-01 5.66884577e-01] [ 5.92369676e-01 6.08860135e-01 6.57892823e-01 ... 7.94368148e-01 7.41447330e-01 7.87310541e-01] ... [ 8.39636624e-01 6.84506893e-01 6.63589478e-01 ... 7.13116825e-01 6.66971803e-01 6.67328656e-01] [ 7.85979867e-01 5.81227422e-01 6.53898656e-01 ... 6.84326887e-01 6.85165346e-01 7.04061866e-01] [ 7.54726946e-01 8.42109203e-01 6.48029149e-01 ... 6.60241723e-01 6.16744220e-01 7.91522205e-01]]] [[[ 3.65405768e-01 5.32548606e-01 6.50851786e-01 ... 3.24810416e-01 4.08883274e-01 5.63345961e-02] [ 2.58032113e-01 4.89127815e-01 2.73249894e-01 ... 2.55622596e-01 2.63816893e-01 1.94141835e-01] [ 5.34224987e-01 1.90369487e-01 2.66272843e-01 ... -1.40483111e-01 -6.62857154e-03 3.25419933e-01] ... [ 1.64455146e-01 5.13893843e-01 1.72216266e-01 ... 3.64473820e-01 4.17059302e-01 2.68494695e-01] [ 7.60648489e-01 3.46222460e-01 8.50481093e-01 ... 3.34039867e-01 4.37567919e-01 2.33943477e-01] [ 1.34526446e-01 4.65921015e-01 4.19743478e-01 ... 9.79400501e-02 2.04944566e-01 5.15626073e-01]] [[-1.77883618e-02 3.47011000e-01 2.90032439e-02 ... 4.37260538e-01 8.41147542e-01 4.37955782e-02] [ 3.77937168e-01 3.04458171e-01 6.26641393e-01 ... 6.79435804e-02 4.23687875e-01 5.96424580e-01] [ 6.01038277e-01 3.93491745e-01 -2.34416630e-02 ... 2.88250327e-01 3.78206432e-01 -3.76928836e-01] ... [ 6.04013145e-01 5.43826878e-01 7.09607959e-01 ... 3.76020610e-01 9.44534004e-01 3.80668670e-01] [-1.95342615e-01 2.90925115e-01 5.54420240e-03 ... 6.20821714e-01 8.16439390e-02 3.97510052e-01] [ 5.16551495e-01 1.20344367e-02 2.41647035e-01 ... 3.27582181e-01 -7.36784786e-02 6.60208285e-01]] [[ 1.12906170e+00 3.48274767e-01 9.53894615e-01 ... 4.07656804e-02 4.06657994e-01 -4.02694643e-02] [ 5.21471441e-01 1.95079505e-01 1.47755653e-01 ... 3.46890539e-01 8.52531120e-02 4.97434109e-01] [ 2.05018759e-01 -8.59116316e-02 7.44057596e-01 ... 5.78347981e-01 3.40871423e-01 5.33332825e-01] ... [ 2.39683673e-01 6.57764912e-01 2.47911096e-01 ... -2.33742744e-01 4.18254346e-01 -3.83877195e-02] [ 7.85542607e-01 3.97964805e-01 9.04498696e-02 ... 3.54106963e-01 -8.12483281e-02 7.86678076e-01] [ 4.38038021e-01 2.35781074e-01 9.51558471e-01 ... 3.12580168e-01 2.64999986e-01 1.84274703e-01]] ... [[ 7.19161570e-01 5.60432851e-01 6.27797306e-01 ... 2.78563589e-01 1.71257704e-01 5.03410935e-01] [ 4.40345705e-01 1.60998464e-01 1.06820859e-01 ... 5.60126424e-01 6.11109018e-01 5.11668146e-01] [ 2.30219275e-01 3.88721079e-01 1.68040633e-01 ... 6.67393029e-01 -1.36797398e-01 3.89224946e-01] ... [ 4.92853932e-02 5.44279993e-01 2.73888797e-01 ... 4.90057290e-01 5.94400823e-01 3.28593016e-01] [ 5.35143137e-01 8.51415545e-02 3.62681925e-01 ... 3.12170517e-02 6.87828958e-01 4.28740054e-01] [ 5.72103977e-01 2.07227394e-01 5.54863632e-01 ... 6.99544609e-01 6.15442134e-02 4.20129836e-01]] [[ 4.67659235e-01 2.96475291e-01 2.03037545e-01 ... 4.10894677e-02 6.10445023e-01 7.41091371e-01] [ 3.14232975e-01 5.31443954e-01 6.37278318e-01 ... 4.82207328e-01 6.71567857e-01 4.18981522e-01] [ 8.35147679e-01 4.13913906e-01 5.22797167e-01 ... 5.51851869e-01 5.07266164e-01 4.64926481e-01] ... [ 7.98251688e-01 4.45009708e-01 5.33208072e-01 ... 3.63436900e-02 -3.07562917e-01 -9.32299271e-02] [ 6.21843457e-01 4.82501447e-01 2.01177374e-01 ... 3.02958727e-01 3.53074253e-01 2.21416399e-01] [ 3.82277697e-01 4.22128648e-01 4.82487351e-01 ... 8.93554568e-01 6.03119910e-01 3.53037179e-01]] [[ 6.35055155e-02 6.19999886e-01 3.82071018e-01 ... 5.81578672e-01 4.30134416e-01 6.51376903e-01] [ 6.75337017e-01 2.59113491e-01 4.15761113e-01 ... 5.17099679e-01 4.15822357e-01 5.17859817e-01] [ 7.58247435e-01 4.35397863e-01 2.93978244e-01 ... 4.82633598e-02 4.38956767e-01 -1.20036140e-01] ... [ 2.28209645e-01 4.24703687e-01 9.95877460e-02 ... 1.85832724e-01 2.16356546e-01 2.61935383e-01] [ 5.10771036e-01 7.73721814e-01 6.29019260e-01 ... 2.58869439e-01 4.39227194e-01 1.05140500e-01] [ 7.40358651e-01 3.96758616e-01 3.84101450e-01 ... 9.62103188e-01 5.67437828e-01 6.51409701e-02]]] [[[-3.95988256e-01 -2.30918273e-01 1.29232502e+00 ... -4.70118895e-02 2.29621217e-01 6.58594131e-01] [-1.65124738e+00 7.69711956e-02 -1.34680605e+00 ... 9.62616205e-01 -8.92933607e-02 9.53821480e-01] [ 1.30752397e+00 -5.28068915e-02 -3.99799943e-01 ... 2.80008495e-01 4.08822179e-01 1.05525613e+00] ... [-3.56301889e-02 8.55566621e-01 1.26795363e+00 ... -6.54091775e-01 1.00153637e+00 1.18532062e+00] [-8.78611878e-02 4.01371419e-01 2.45690480e-01 ... 9.60126460e-01 1.82742119e+00 -8.38864565e-01] [ 1.62546579e-02 9.50069726e-01 2.03206822e-01 ... 7.93203712e-01 -8.16943705e-01 1.95447072e-01]] [[ 5.88944554e-01 1.81855857e+00 -9.57065701e-01 ... 9.55133200e-01 1.68340012e-01 7.14759827e-01] [-3.57854247e-01 1.31401002e+00 6.15411639e-01 ... 1.04696250e+00 1.50623888e-01 8.98844004e-01] [ 7.52191782e-01 -1.70186982e-01 -1.37107521e-01 ... 1.67715859e+00 9.78372097e-01 -8.05198729e-01] ... [-9.58557963e-01 -3.67058180e-02 1.58086646e+00 ... 2.25700364e-01 -4.42830861e-01 3.83443266e-01] [ 8.20990205e-01 7.16583967e-01 4.41686362e-01 ... 1.76836431e+00 1.35190928e+00 6.58599555e-01] [ 1.91628015e+00 1.23088348e+00 7.55543351e-01 ... 1.95849597e+00 1.31950629e+00 -1.01177013e+00]] [[-2.38694414e-01 7.82101870e-01 1.25162482e+00 ... 7.65599787e-01 1.47272015e+00 2.17636013e+00] [ 3.84827793e-01 1.33806026e+00 5.85000873e-01 ... 9.30527210e-01 5.21560609e-01 1.81936598e+00] [ 1.70949149e+00 5.11901379e-01 -3.25256348e-01 ... 7.17163831e-02 7.38629401e-02 -1.81852341e-01] ... [ 1.29418099e+00 9.06866416e-02 1.79913878e-01 ... 5.23404300e-01 6.72231257e-01 1.14406872e+00] [-3.55295300e-01 1.53115225e+00 3.61405402e-01 ... -4.41371620e-01 8.63671482e-01 -6.37933373e-01] [-7.77892843e-02 1.01347923e+00 -8.10930908e-01 ... 1.31530654e+00 1.13793933e+00 1.83083153e+00]] ... [[ 6.16365850e-01 8.16457093e-01 6.02220953e-01 ... -4.27356176e-02 -1.02294612e+00 9.14536476e-01] [-1.02070737e+00 8.87414396e-01 1.24676454e+00 ... 8.93223584e-02 5.65901101e-01 5.95929444e-01] [-7.45813310e-01 -1.41051859e-01 9.61943090e-01 ... 1.62645423e+00 -8.29467624e-02 -5.02535462e-01] ... [ 1.69434404e+00 7.31812596e-01 4.39555831e-02 ... 1.47858620e+00 7.80610859e-01 3.35806608e-01] [ 4.48256254e-01 1.02303183e+00 3.12439799e-01 ... 3.26830119e-01 2.91323423e-01 2.16838375e-01] [ 4.52320695e-01 1.97446615e-01 1.24494076e+00 ... 6.55867696e-01 5.68056583e-01 8.01276147e-01]] [[ 1.43391967e+00 -1.73592478e-01 4.09888178e-02 ... 2.44782612e-01 1.38970041e+00 1.45388019e+00] [ 2.83636302e-01 1.23899472e+00 -1.17771256e+00 ... 1.04902339e+00 1.28191972e+00 1.20683372e+00] [ 1.16977584e+00 -3.16835754e-02 4.20904428e-01 ... 6.38318837e-01 2.67951012e-01 8.72067392e-01] ... [ 1.55529749e+00 1.02994823e+00 5.84417224e-01 ... -1.18192043e-02 4.24289942e-01 3.71135980e-01] [-8.68641138e-01 4.83533710e-01 6.20458536e-02 ... 8.49862278e-01 9.85335171e-01 3.16284239e-01] [ 3.70174974e-01 -8.78897235e-02 4.12116319e-01 ... -2.90552825e-01 -3.89320925e-02 1.36941159e+00]] [[ 1.30632794e+00 7.10800707e-01 4.51751828e-01 ... -4.47727680e-01 1.06343246e+00 5.22946537e-01] [ 1.84958076e+00 -2.75963873e-01 3.50109488e-01 ... 1.12152362e+00 1.09976208e+00 8.72069895e-01] [ 5.47275841e-01 5.92970312e-01 9.80451345e-01 ... 9.85605717e-01 -1.07648087e+00 1.37229323e+00] ... [ 4.87507954e-02 1.36524513e-01 -1.36976898e-01 ... 3.31158733e+00 1.81745708e+00 3.39570224e-01] [ 3.81453447e-02 9.51677799e-01 1.63484275e-01 ... 8.06509197e-01 5.88234961e-01 3.14512365e-02] [ 5.28461099e-01 1.34127426e+00 7.13026226e-01 ... -6.07236028e-02 -2.25324202e-02 1.00473452e+00]]]] [[[[ 6.31219804e-01 2.58902937e-01 4.65568364e-01 ... 2.71710515e-01 5.40818870e-01 4.02446568e-01] [ 6.59668505e-01 3.54401886e-01 -5.00364192e-02 ... 1.22971438e-01 3.09986115e-01 1.91072181e-01] [ 5.39291799e-01 3.47989500e-01 7.64465749e-01 ... 3.49364430e-01 5.21814525e-01 3.75741750e-01] ... [ 2.79121965e-01 5.14360249e-01 1.41214311e-01 ... 6.44976556e-01 3.66337806e-01 4.45480138e-01] [ 4.10882682e-01 2.82966763e-01 5.70065081e-01 ... 6.29919052e-01 4.96469587e-01 4.77399707e-01] [ 7.99980462e-01 2.73345977e-01 -6.31164089e-02 ... 2.58965671e-01 2.11757258e-01 3.20071429e-01]] [[ 2.96669811e-01 3.35407704e-01 1.42022774e-01 ... 5.74285924e-01 3.61170501e-01 4.48899150e-01] [ 7.94873089e-02 3.48414481e-01 4.56214517e-01 ... 4.77809936e-01 5.23186982e-01 3.37477863e-01] [ 3.12397927e-01 1.35818467e-01 4.60577339e-01 ... 6.08251870e-01 2.30998874e-01 4.95480001e-01] ... [ 6.33771539e-01 8.16245377e-02 2.04091623e-01 ... 8.36099625e-01 2.69087136e-01 4.86616403e-01] [ 2.55254135e-02 2.89801240e-01 2.08143480e-02 ... 6.09088063e-01 4.51472104e-01 6.64132416e-01] [ 3.46489519e-01 3.96046638e-01 5.88737726e-01 ... 3.55857611e-01 4.78983939e-01 1.68432385e-01]] [[ 4.48439151e-01 5.39563417e-01 3.19423944e-01 ... 3.58931929e-01 4.57315117e-01 5.55252850e-01] [ 6.02510929e-01 4.94579554e-01 5.46810746e-01 ... 1.86018616e-01 -5.67519851e-02 7.96060920e-01] [ 1.92638353e-01 7.54773438e-01 1.72758698e-01 ... 3.86086345e-01 6.65704429e-01 3.79047543e-01] ... [ 5.58685362e-01 6.21197283e-01 3.95295262e-01 ... 3.33028764e-01 7.78522110e-03 2.90610284e-01] [ 2.58119881e-01 5.22861600e-01 5.52498221e-01 ... 2.20393449e-01 1.50976345e-01 5.94027281e-01] [ 5.10999978e-01 3.40924829e-01 3.25022429e-01 ... 4.86717999e-01 4.37725902e-01 3.71181875e-01]] ... [[ 3.16359460e-01 2.76568383e-01 5.17981231e-01 ... 5.10054350e-01 4.93492842e-01 1.60486341e-01] [ 3.18315357e-01 7.15145946e-01 3.84061545e-01 ... 5.44066191e-01 4.31002021e-01 1.96727231e-01] [ 2.83642858e-02 2.42121726e-01 1.75185278e-01 ... 7.18164563e-01 3.80081207e-01 7.56795347e-01] ... [ 6.00851119e-01 7.13345826e-01 1.37770370e-01 ... 5.75485945e-01 4.25680727e-01 3.62896264e-01] [ 3.53410751e-01 3.02653491e-01 2.29047462e-01 ... 4.80213076e-01 1.11971423e-01 3.31088901e-01] [ 6.13380075e-01 5.31318504e-03 6.32691801e-01 ... 2.96093732e-01 5.31639397e-01 9.48384523e-01]] [[ 8.06413770e-01 5.01150608e-01 4.08744395e-01 ... 6.05245471e-01 4.80166316e-01 4.81849223e-01] [ 4.67331141e-01 1.53977415e-02 5.32749414e-01 ... 3.00033808e-01 2.13092774e-01 5.34757435e-01] [ 2.39578888e-01 5.02266049e-01 4.50621158e-01 ... 7.38518266e-03 3.34571004e-01 5.08361518e-01] ... [ 3.88403147e-01 3.43731195e-01 3.93527657e-01 ... 6.18567944e-01 1.35021448e-01 4.47383761e-01] [ 2.11777940e-01 5.19468427e-01 5.74247632e-03 ... 4.46898937e-01 6.73572302e-01 2.97055513e-01] [ 4.00555134e-01 3.25962514e-01 4.13869172e-01 ... 3.52892607e-01 -1.19100943e-01 -4.09169085e-02]] [[ 1.71648487e-01 4.12767470e-01 4.39303726e-01 ... 4.03235465e-01 3.47271919e-01 -3.23794000e-02] [ 3.80994618e-01 -3.20186801e-02 7.22058773e-01 ... 5.61565518e-01 3.98451000e-01 2.65382022e-01] [ 5.98337233e-01 2.38738239e-01 3.80783260e-01 ... 5.06394446e-01 3.04475695e-01 4.01353762e-02] ... [-4.18353314e-03 6.68063760e-01 6.83437169e-01 ... 6.12830043e-01 1.39739782e-01 5.37067115e-01] [-6.87656105e-02 3.61556262e-01 5.99162757e-01 ... 3.15553814e-01 4.83312935e-01 7.85554290e-01] [ 7.32481897e-01 1.85717687e-01 8.61168027e-01 ... 5.49017012e-01 5.05673647e-01 5.82514167e-01]]] [[[-2.48816037e+00 2.84298901e-02 1.01406562e+00 ... -1.19829810e+00 -1.70555890e-01 4.59705740e-01] [ 6.45676017e-01 9.86559749e-01 1.82843530e+00 ... -8.89309719e-02 1.67086637e+00 -5.61187565e-01] [ 2.18091679e+00 3.13429022e+00 -1.17776656e+00 ... 2.26487541e+00 -2.98416764e-01 1.55487847e+00] ... [-1.52654290e+00 -2.17543960e-01 -7.11718917e-01 ... -1.04722965e+00 2.43548250e+00 -4.73683387e-01] [ 3.21289480e-01 -2.61037922e+00 -1.54484238e-03 ... -3.01025569e-01 2.39330935e+00 2.24637508e+00] [ 2.93194389e+00 1.69298720e+00 3.42448497e+00 ... 1.53304172e+00 1.03328478e+00 2.23574042e+00]] [[ 1.21165955e+00 2.88674116e-01 1.26311734e-01 ... 2.46404719e+00 2.72295761e+00 2.50128460e+00] [-1.07289732e+00 7.65920460e-01 2.77779603e+00 ... 2.63323259e+00 1.70686436e+00 2.81403828e+00] [ 1.20341837e+00 1.30907094e+00 2.47571731e+00 ... 7.68948793e-01 3.91021109e+00 3.45522857e+00] ... [ 1.99765623e+00 -6.82364702e-02 1.06959879e+00 ... 1.90578604e+00 6.49082541e-01 5.79016566e-01] [ 5.04054487e-01 1.51498640e+00 1.84297800e+00 ... 5.49660861e-01 1.34583056e+00 1.37265399e-01] [ 7.20315650e-02 -9.74704474e-02 -1.27040669e-01 ... 1.97862136e+00 1.92950523e+00 1.13825667e+00]] [[ 1.13766259e-02 2.00434566e+00 4.59332848e+00 ... 2.23740077e+00 3.29612017e+00 9.03223813e-01] [ 1.27980983e+00 2.62361646e+00 1.45590484e+00 ... -1.22375143e+00 1.38807786e+00 1.26259375e+00] [ 9.59169328e-01 -6.98500097e-01 8.70906770e-01 ... 1.80754614e+00 -1.13252366e+00 8.12451839e-01] ... [ 3.75991344e+00 4.05530781e-01 1.36993754e+00 ... 8.43908846e-01 2.00362706e+00 -6.52649641e-01] [ 1.40639985e+00 -1.87038672e+00 1.27477616e-01 ... 1.03405309e+00 7.17845082e-01 -2.13447526e-01] [ 2.36469507e+00 -2.40490213e-01 1.18628049e+00 ... 4.74373341e-01 1.30343640e+00 2.75393110e-02]] ... [[ 1.67621958e+00 -1.00582707e+00 -1.77361333e+00 ... 1.01013684e+00 1.94187427e+00 -7.71581709e-01] [ 7.43314445e-01 2.76981950e+00 9.98320341e-01 ... 6.69515371e-01 2.82248712e+00 1.42710137e+00] [ 2.67079949e-01 8.33676159e-01 1.02352297e+00 ... 3.86667103e-01 7.50612542e-02 1.63549197e+00] ... [-1.43199384e+00 4.78969961e-01 -1.47708450e-02 ... -1.00014091e+00 2.59976745e+00 9.79523242e-01] [ 6.67678490e-02 3.05088615e+00 1.67156971e+00 ... 6.74855351e-01 -1.01246154e+00 1.05800331e+00] [ 5.23260096e-03 1.65941238e+00 3.35729933e+00 ... 1.67466640e+00 1.25134349e+00 -6.28817260e-01]] [[ 1.75021839e+00 6.22534573e-01 1.41488481e+00 ... 4.30230767e-01 1.42163074e+00 1.47473967e+00] [ 2.56365490e+00 -1.17524290e+00 6.35050356e-01 ... 2.73226857e+00 -4.13667236e-04 1.72681677e+00] [ 5.85244536e-01 1.22795552e-01 2.67763638e+00 ... 1.49554145e+00 2.67042160e+00 1.00948071e+00] ... [-8.35298419e-01 -1.29041806e-01 1.31284118e+00 ... 1.09918332e+00 -1.36270654e+00 3.92844343e+00] [ 2.59130502e+00 -1.12850718e-01 9.02344346e-01 ... 1.43992826e-01 2.00207949e-01 1.53613508e+00] [ 5.26293814e-01 -7.59435654e-01 7.06645966e-01 ... 2.31762505e+00 3.20114732e+00 1.59669936e+00]] [[ 1.50837111e+00 5.14300764e-01 2.08799243e+00 ... 2.06956840e+00 9.77402508e-01 -8.50437641e-01] [ 1.53284156e+00 1.39840186e+00 -1.21846116e+00 ... 4.29030657e-01 2.51312584e-01 4.41676527e-01] [ 3.06657314e+00 1.77273011e+00 1.20233965e+00 ... 6.28912508e-01 -1.36404681e+00 2.26606667e-01] ... [ 1.10049689e+00 2.22948551e+00 -4.29557383e-01 ... 5.04793346e-01 1.21828604e+00 2.22511554e+00] [ 1.33568716e+00 3.32030445e-01 4.92702216e-01 ... 2.05683446e+00 4.12288237e+00 5.92152059e-01] [-1.06169331e+00 2.15500832e+00 1.86164141e+00 ... 2.46578979e+00 5.45731068e+00 -9.29121152e-02]]] [[[-1.35393396e-01 6.77548707e-01 1.43928027e+00 ... 1.44554949e+00 5.70986509e-01 -7.18978047e-02] [ 7.25196064e-01 2.78246880e+00 2.09682155e+00 ... 1.71929073e+00 1.95150518e+00 8.82206023e-01] [ 8.62613797e-01 1.68301260e+00 6.32327557e-01 ... 8.78053963e-01 1.92160165e+00 1.58111060e+00] ... [ 5.00338912e-01 2.42204213e+00 -1.24946487e+00 ... 1.30160046e+00 5.35766184e-01 1.68039644e+00] [ 1.84597719e+00 1.17562962e+00 1.82020533e+00 ... 2.09165311e+00 1.90387309e+00 1.07789934e+00] [-6.01885468e-03 2.85289717e+00 1.51624453e+00 ... 8.89323831e-01 -4.83973503e-01 4.48465124e-02]] [[ 2.78271580e+00 1.62366343e+00 4.54559088e-01 ... 4.92032290e-01 6.72660768e-01 1.43742001e+00] [ 9.48530316e-01 1.05875993e+00 1.02533472e+00 ... 5.82504451e-01 3.08573556e+00 3.54298472e+00] [ 3.00101900e+00 1.58449101e+00 3.33355993e-01 ... 1.23262608e+00 8.75875652e-01 2.33976698e+00] ... [ 2.61917734e+00 1.41053998e+00 2.10190344e+00 ... 2.41568327e+00 8.97426903e-01 1.71566820e+00] [ 1.22401893e+00 4.68164861e-01 1.58782315e+00 ... 9.80919659e-01 1.66360068e+00 2.37408042e+00] [ 1.67646766e+00 5.61241269e-01 2.46672177e+00 ... 1.43791652e+00 2.76427054e+00 2.39772916e+00]] [[ 3.11132967e-01 1.57294676e-01 7.77791977e-01 ... 2.10553384e+00 1.52764583e+00 3.54671985e-01] [ 2.12517381e-01 6.58154249e-01 -2.33005434e-01 ... -5.36567494e-02 2.43781996e+00 1.53445292e+00] [ 1.82276499e+00 9.61218238e-01 3.58167410e+00 ... 1.00643969e+00 1.29717839e+00 3.13522434e+00] ... [ 1.07665312e+00 1.07090187e+00 3.66628319e-01 ... 1.32939351e+00 2.11407661e+00 1.31441152e+00] [ 8.92756045e-01 3.63233639e-03 2.94135928e+00 ... 1.89614201e+00 1.27066243e+00 2.74487853e+00] [ 4.77125198e-01 1.91999125e+00 2.51779866e+00 ... -3.33274640e-02 3.22189283e+00 4.93908763e-01]] ... [[ 2.09187818e+00 6.25100851e-01 9.65499878e-01 ... 3.11938167e+00 1.69909108e+00 1.72321796e+00] [ 1.96859837e+00 -1.65310666e-01 2.16403580e+00 ... 2.37183499e+00 1.31219733e+00 1.17929983e+00] [ 2.96448064e+00 2.25348294e-01 2.39575911e+00 ... 9.31577265e-01 -1.97132863e-02 2.58915687e+00] ... [ 1.98150861e+00 2.57386446e+00 2.45223427e+00 ... 2.23896444e-01 2.11468387e+00 2.17283082e+00] [ 2.42045736e+00 9.85922098e-01 4.53755781e-02 ... 2.21929145e+00 1.67336595e+00 2.29212451e+00] [ 2.10366392e+00 -3.34438503e-01 3.97813886e-01 ... 1.96995568e+00 1.38858867e+00 6.12056017e-01]] [[ 1.04299033e+00 2.53441763e+00 2.95116693e-01 ... 1.39557874e+00 1.38265300e+00 1.77119279e+00] [ 5.23745775e-01 2.67281604e+00 2.91719007e+00 ... 1.82076681e+00 1.39575148e+00 5.76375663e-01] [ 1.80218804e+00 2.28824997e+00 1.64787900e+00 ... 2.21147990e+00 1.54420662e+00 3.35856885e-01] ... [ 1.20770454e+00 1.18654060e+00 1.17967474e+00 ... -9.30078998e-02 1.98061848e+00 1.36160266e+00] [ 1.99713504e+00 2.32923031e+00 2.68129778e+00 ... 8.38168919e-01 8.96698236e-01 1.44439518e+00] [-5.61840832e-01 1.71620727e+00 8.97464633e-01 ... 1.14324903e+00 2.31319118e+00 1.03886163e+00]] [[ 5.85379004e-01 2.24868536e+00 1.53666782e+00 ... 1.59294415e+00 5.41406751e-01 2.41114426e+00] [ 1.00620282e+00 1.87295949e+00 1.84515309e+00 ... 2.23651424e-01 1.36215639e+00 1.22577155e+00] [ 1.12273061e+00 3.86847705e-01 9.89828169e-01 ... 9.36344683e-01 2.50583917e-01 1.93767607e-01] ... [-9.54482853e-01 1.52957487e+00 1.70395231e+00 ... 1.34417629e+00 1.64863682e+00 4.18502986e-01] [ 5.16126096e-01 2.15700364e+00 1.31206071e+00 ... 1.33750582e+00 2.13067532e+00 1.40411139e+00] [ 1.37339759e+00 7.72694051e-01 1.30822361e+00 ... 2.06078196e+00 2.82955736e-01 2.31912327e+00]]] [[[ 7.52736628e-01 7.22057223e-01 7.15931773e-01 ... 7.23496258e-01 8.64645600e-01 7.95314431e-01] [ 8.10344338e-01 7.44469166e-01 8.23101461e-01 ... 6.53414011e-01 7.38209665e-01 8.59635293e-01] [ 6.92813337e-01 6.81119502e-01 6.54646218e-01 ... 7.43586540e-01 7.97206581e-01 6.58089578e-01] ... [ 7.83139706e-01 5.98567605e-01 7.38164604e-01 ... 7.39526808e-01 5.74763775e-01 6.72067285e-01] [ 8.82703722e-01 6.75900221e-01 6.26064360e-01 ... 6.87538147e-01 6.30866051e-01 6.14108980e-01] [ 8.33262146e-01 6.47922516e-01 8.51360798e-01 ... 7.55790353e-01 7.11113036e-01 7.26899266e-01]] [[ 7.19644606e-01 6.43244505e-01 6.68097496e-01 ... 7.75762081e-01 6.71392620e-01 6.47810280e-01] [ 6.78235710e-01 7.95112729e-01 6.12096190e-01 ... 7.70161331e-01 8.05922568e-01 6.04598224e-01] [ 6.29865527e-01 6.22268915e-01 5.11185884e-01 ... 8.12943339e-01 7.06925690e-01 7.82432020e-01] ... [ 6.88144624e-01 7.33382225e-01 8.12836885e-01 ... 6.97941720e-01 7.73022950e-01 9.56597805e-01] [ 5.72120190e-01 5.92554450e-01 7.71442175e-01 ... 7.54273653e-01 8.20624292e-01 8.21572959e-01] [ 7.86084712e-01 6.59637749e-01 7.54380584e-01 ... 7.58907437e-01 6.18527532e-01 6.54595733e-01]] [[ 7.37107933e-01 7.58657277e-01 5.68072736e-01 ... 6.94541872e-01 7.22899139e-01 7.64358521e-01] [ 8.07839155e-01 8.11229587e-01 7.04454303e-01 ... 8.18023622e-01 6.15438879e-01 8.52015376e-01] [ 7.90697217e-01 6.82290018e-01 8.37251842e-01 ... 6.59682989e-01 6.69222057e-01 7.29287922e-01] ... [ 7.14823067e-01 7.60399580e-01 5.43191254e-01 ... 7.88210690e-01 7.33729124e-01 7.30264962e-01] [ 6.91712320e-01 6.09564006e-01 6.42892718e-01 ... 6.95656002e-01 7.12148964e-01 8.08256745e-01] [ 8.65734935e-01 7.49724150e-01 7.83447623e-01 ... 7.67118156e-01 8.10842395e-01 6.67718410e-01]] ... [[ 6.72567189e-01 6.95549846e-01 6.89025342e-01 ... 7.38387942e-01 7.08513737e-01 6.64623380e-01] [ 6.35782123e-01 8.54863346e-01 6.11792684e-01 ... 5.95885158e-01 7.97088206e-01 7.16900587e-01] [ 6.96932018e-01 5.98570049e-01 7.12746561e-01 ... 7.94389486e-01 6.67599499e-01 7.73180783e-01] ... [ 6.91459894e-01 7.38023102e-01 6.57630384e-01 ... 6.21349216e-01 7.23834455e-01 6.69878006e-01] [ 6.31018937e-01 6.88048542e-01 6.87020361e-01 ... 7.02203810e-01 5.95553875e-01 6.32395089e-01] [ 8.02045107e-01 4.62322325e-01 6.02849662e-01 ... 6.05328381e-01 6.99741244e-01 5.55849731e-01]] [[ 7.99840927e-01 5.68360507e-01 6.32937491e-01 ... 7.61486530e-01 6.87748909e-01 6.79686546e-01] [ 8.28998625e-01 6.71703398e-01 7.71207929e-01 ... 7.23048806e-01 7.70302534e-01 8.07977736e-01] [ 8.11811805e-01 6.49582505e-01 7.22935796e-01 ... 6.00296021e-01 8.10898542e-01 6.33480430e-01] ... [ 7.70493388e-01 6.20907247e-01 7.55384207e-01 ... 7.71061122e-01 5.78126848e-01 7.27690220e-01] [ 6.67351127e-01 9.53851342e-01 8.25231135e-01 ... 7.42627800e-01 6.71979904e-01 7.21221685e-01] [ 7.41886020e-01 5.23939431e-01 7.25048363e-01 ... 8.38963270e-01 6.31059945e-01 7.34645009e-01]] [[ 7.57678926e-01 6.89228356e-01 7.23696649e-01 ... 7.64705181e-01 7.00900435e-01 6.88637257e-01] [ 8.09466422e-01 7.80677915e-01 8.53460133e-01 ... 7.32176006e-01 7.46952653e-01 7.55534887e-01] [ 5.55869401e-01 8.09636950e-01 8.58854234e-01 ... 5.95734060e-01 7.75180221e-01 6.71059608e-01] ... [ 6.19252741e-01 7.62786329e-01 6.45711005e-01 ... 6.02722645e-01 6.38832927e-01 6.39310956e-01] [ 6.58850551e-01 7.02287078e-01 7.71013200e-01 ... 7.28918910e-01 7.48782456e-01 6.85527802e-01] [ 7.88543344e-01 7.53310263e-01 7.83317327e-01 ... 7.91403532e-01 6.31232142e-01 6.66263282e-01]]] [[[ 1.33434385e-01 1.74936995e-01 3.60153377e-01 ... 5.30495346e-01 6.42855167e-01 3.96933377e-01] [ 5.04755497e-01 4.44918811e-01 3.81301612e-01 ... 2.39196002e-01 1.35360315e-01 5.46061516e-01] [ 9.19919908e-01 3.67078185e-01 4.97788519e-01 ... 3.39093745e-01 5.80856085e-01 7.26531744e-01] ... [ 3.32742333e-01 5.12410402e-01 6.93039775e-01 ... 5.03375828e-01 2.50692219e-01 -7.71991983e-02] [ 4.51752126e-01 7.25156069e-01 -2.52941586e-02 ... 3.36713791e-01 2.80282617e-01 5.61327994e-01] [ 6.84545040e-02 -3.65069285e-02 1.69862106e-01 ... 6.31028712e-01 1.64703041e-01 6.22369707e-01]] [[ 3.52358699e-01 6.32905722e-01 6.73356771e-01 ... 3.57637405e-01 4.40117300e-01 3.69945496e-01] [ 2.79640853e-01 2.57191062e-01 1.61894143e-01 ... 4.00080025e-01 7.32294023e-01 -6.03896007e-02] [ 8.47594619e-01 3.80739927e-01 3.12179655e-01 ... 1.58345282e-01 2.25143909e-01 8.26277494e-01] ... [ 3.57924074e-01 6.72152638e-01 3.28168452e-01 ... 5.90248108e-01 5.29186070e-01 8.14679563e-02] [ 5.95044494e-01 3.85627538e-01 6.83830500e-01 ... 5.02858877e-01 9.27397050e-03 8.61296594e-01] [ 5.58858335e-01 6.48584843e-01 3.87778848e-01 ... -1.09801572e-02 7.19727695e-01 3.53949100e-01]] [[ 3.63481462e-01 5.92446506e-01 7.80205071e-01 ... 6.12049341e-01 3.81619602e-01 2.04472080e-01] [ 3.33114266e-01 3.68059039e-01 1.93821877e-01 ... 6.21658981e-01 1.06105795e-02 5.71584463e-01] [ 4.25034523e-01 4.82854247e-01 5.74715316e-01 ... 2.80515313e-01 3.52066100e-01 3.81290466e-01] ... [ 5.69167078e-01 5.43586969e-01 2.35134780e-01 ... 3.14875662e-01 6.71814263e-01 1.39516741e-01] [ 5.08673862e-02 3.44637424e-01 3.84345382e-01 ... 5.21859169e-01 8.80345404e-01 5.96370459e-01] [ 3.09743404e-01 2.70555471e-03 1.18438736e-01 ... 3.52481492e-02 3.02975029e-01 1.64150447e-01]] ... [[ 2.85741717e-01 2.34682560e-01 -4.26826626e-01 ... 5.84933341e-01 2.35483468e-01 7.52386153e-02] [ 4.35228735e-01 4.26932782e-01 5.80311976e-02 ... 1.93029061e-01 4.58461583e-01 -1.38037667e-01] [ 4.94049162e-01 9.18892398e-02 3.17573696e-01 ... 6.42581761e-01 1.90520793e-01 4.19959664e-01] ... [ 2.66647041e-01 4.16640639e-01 2.99386710e-01 ... 4.17757362e-01 5.00582337e-01 3.13680440e-01] [ 5.33448458e-01 6.22271523e-02 4.42808092e-01 ... 2.88782030e-01 7.27189004e-01 3.66705924e-01] [ 5.19590974e-01 2.68381219e-02 4.74878222e-01 ... 2.10918169e-02 -1.11036636e-01 2.73398906e-01]] [[ 5.23811400e-01 3.91118020e-01 4.25608195e-02 ... 2.49447316e-01 3.73928457e-01 4.03779626e-01] [-9.58718136e-02 6.60640121e-01 8.68866086e-01 ... 2.70997643e-01 5.84793329e-01 5.76106966e-01] [-4.01130691e-02 3.97828102e-01 5.19776106e-01 ... 3.36999707e-02 4.68766123e-01 4.38203141e-02] ... [ 2.12835912e-02 2.97082871e-01 7.31941283e-01 ... 3.24430734e-01 4.95302677e-01 2.39212111e-01] [ 3.28266621e-01 -7.42835132e-03 1.26497999e-01 ... 5.67132235e-01 6.01374686e-01 3.63696516e-01] [ 2.81439930e-01 2.46527568e-01 4.52765226e-01 ... 4.08145972e-02 4.25868452e-01 7.58424580e-01]] [[ 3.96053076e-01 5.81412494e-01 1.30912110e-01 ... 2.27846786e-01 4.42233443e-01 2.97610223e-01] [ 6.33721173e-01 4.14243221e-01 4.88417238e-01 ... 7.90182710e-01 5.02843797e-01 6.70101166e-01] [ 1.83124408e-01 1.82399929e-01 4.71412838e-01 ... 6.69709444e-01 5.27031347e-02 8.80638063e-01] ... [ 1.12916507e-01 1.95510119e-01 3.32307845e-01 ... 3.26779634e-01 3.32241952e-01 3.15045178e-01] [ 3.67156476e-01 4.12324876e-01 5.91799974e-01 ... 2.65034765e-01 4.54060525e-01 3.08422327e-01] [-7.26741552e-02 6.17085338e-01 4.91789579e-01 ... 3.91688854e-01 2.72535503e-01 8.42930794e-01]]] [[[ 5.91155708e-01 3.05873770e-02 8.89953732e-01 ... 2.19989967e+00 7.35114396e-01 1.04661904e-01] [ 5.59132695e-01 5.83673120e-01 1.82444179e+00 ... 2.77881086e-01 2.24504280e+00 -5.20947762e-02] [ 6.40767217e-01 -1.03863619e-01 -1.31071448e-01 ... -1.31780759e-01 9.59858179e-01 1.25393987e+00] ... [-6.81624413e-01 -3.73884708e-01 6.29357994e-02 ... 1.01145184e+00 8.25170159e-01 2.43599325e-01] [-7.05086350e-01 3.39001685e-01 3.24633092e-01 ... 5.48314452e-01 2.47453839e-01 1.23902369e+00] [-1.16228616e+00 1.00483024e+00 1.52641499e+00 ... -1.79723114e-01 3.07736188e-01 6.47169888e-01]] [[ 8.38304460e-01 3.69706810e-01 7.30324566e-01 ... -1.49556601e+00 6.88934147e-01 8.23545218e-01] [ 7.33357787e-01 3.31148922e-01 -7.95555636e-02 ... 1.09547079e+00 7.34979689e-01 1.83594382e+00] [ 1.66448855e+00 5.26281953e-01 2.30700940e-01 ... 2.03601074e+00 1.33306348e+00 6.18017837e-02] ... [ 7.80953526e-01 1.86794472e+00 3.95760268e-01 ... 1.47703505e+00 1.84232450e+00 -1.04157209e+00] [ 1.29979658e+00 -6.68963969e-01 8.94175410e-01 ... 1.63694727e+00 -2.82906562e-01 9.10127699e-01] [ 1.05513126e-01 7.69882977e-01 3.30999106e-01 ... 7.24239707e-01 9.95448709e-01 -5.77312350e-01]] [[ 2.38735557e-01 -6.39321446e-01 1.47072005e+00 ... 1.29243290e+00 4.60950404e-01 1.07563198e+00] [ 1.76515913e+00 -3.22873741e-01 -1.48390865e+00 ... 6.02458298e-01 1.32096391e-02 -3.13997626e-01] [-5.17797589e-01 1.54560208e+00 5.29184997e-01 ... -8.86015892e-02 1.07260501e+00 1.43848169e+00] ... [ 4.60824460e-01 1.80278957e-01 2.25487709e+00 ... 6.44060433e-01 8.50825071e-01 5.15786827e-01] [ 1.00593078e+00 1.54913795e+00 6.08960092e-01 ... 1.06775963e+00 1.94112778e-01 2.03174174e-01] [ 1.06333695e-01 1.02948618e+00 -3.32684308e-01 ... 5.21792114e-01 1.50574327e+00 -3.63781542e-01]] ... [[ 2.21496709e-02 -3.44007373e-01 2.74408877e-01 ... -5.45609117e-01 1.26532435e+00 -1.89825997e-01] [ 1.13436973e+00 4.68778238e-03 6.36705697e-01 ... 1.41718781e+00 -4.47938919e-01 -3.69046628e-01] [ 1.36693668e+00 9.41872418e-01 6.31293952e-01 ... 4.94964749e-01 -8.14345121e-01 -4.89808977e-01] ... [ 1.41210985e+00 4.96413797e-01 2.04232693e-01 ... -3.49389374e-01 -2.75215983e-01 1.26289234e-01] [ 3.59209836e-01 1.44739008e+00 3.78801882e-01 ... -3.15813750e-01 -1.63286138e+00 -1.04263413e+00] [ 1.42120326e+00 9.23959017e-01 7.79671744e-02 ... 9.68929291e-01 8.55960071e-01 1.07406914e+00]] [[ 9.32642639e-01 1.99040338e-01 -8.44825804e-01 ... 3.92458260e-01 6.16296902e-02 1.21736860e+00] [ 7.67253041e-01 8.93823743e-01 5.25793791e-01 ... 1.01977110e+00 2.30639124e+00 1.33412278e+00] [ 2.27206588e-01 2.05501437e+00 1.56539071e+00 ... 1.03557611e+00 8.16210330e-01 9.65272248e-01] ... [ 6.51512086e-01 -1.39260948e-01 1.20823991e+00 ... 9.42295134e-01 -1.82848945e-01 6.88855588e-01] [ 4.19163913e-01 1.12866127e+00 4.23765659e-01 ... -1.04411650e+00 -9.04924333e-01 -2.78010219e-01] [ 1.59398925e+00 6.02365017e-01 1.90504360e+00 ... -5.30058265e-01 2.49003649e-01 8.31127405e-01]] [[ 2.49059224e+00 -2.64401168e-01 -1.07063279e-01 ... -5.24480104e-01 2.59699449e-02 1.51553643e+00] [ 6.47981986e-02 1.46763635e+00 8.69496644e-01 ... -2.85580575e-01 5.01921415e-01 -1.20665096e-01] [ 4.99981135e-01 1.93763351e+00 1.70813727e+00 ... 2.38237917e-01 7.60525316e-02 1.25307977e+00] ... [-7.33735919e-01 1.15197659e+00 7.67642438e-01 ... -3.37639600e-01 1.93781406e-01 9.64320779e-01] [ 1.31536627e+00 1.15503705e+00 5.90981841e-01 ... 3.49930197e-01 2.33918977e+00 1.03688931e+00] [ 6.00076079e-01 1.41130781e+00 1.24743235e+00 ... 2.55555797e+00 7.75683045e-01 1.46186376e+00]]]] [[[[ 3.40797871e-01 4.71802473e-01 1.34637386e-01 ... 5.15728652e-01 -9.19184089e-03 3.08641374e-01] [ 9.49502409e-01 3.87067556e-01 3.50192815e-01 ... 2.78278172e-01 2.96805799e-01 2.54619569e-01] [ 3.62725139e-01 -1.55333839e-02 2.12770909e-01 ... 3.50241572e-01 1.84189379e-01 1.93892524e-01] ... [ 3.81619602e-01 1.91781968e-01 2.50611305e-01 ... 3.92281890e-01 4.74302202e-01 2.19348609e-01] [ 3.49562228e-01 6.22815311e-01 4.40946907e-01 ... 5.52883744e-01 5.83737075e-01 4.45611060e-01] [ 3.49980116e-01 1.89228982e-01 3.87203932e-01 ... 5.81546307e-01 6.97467744e-01 2.87371010e-01]] [[ 2.86693066e-01 1.79746717e-01 3.90059471e-01 ... 8.48770082e-01 3.39189708e-01 2.88389653e-01] [ 6.76973403e-01 5.70325375e-01 7.14622080e-01 ... 4.38164234e-01 2.33364299e-01 2.19202548e-01] [ 5.82560301e-01 4.75696146e-01 3.60769838e-01 ... 5.32844067e-02 4.40087944e-01 7.57785976e-01] ... [ 3.21540385e-01 3.69515091e-01 3.61575902e-01 ... 2.21323162e-01 4.05043036e-01 7.56008804e-01] [ 4.91201669e-01 6.79741561e-01 3.94568890e-01 ... 4.94742095e-01 6.28314912e-01 1.53579071e-01] [ 6.34394169e-01 2.33139381e-01 3.65041822e-01 ... -7.40706623e-02 1.73793018e-01 6.44996524e-01]] [[ 5.26367486e-01 9.62477744e-01 7.50902295e-01 ... 2.04539448e-01 2.90066451e-01 6.17570221e-01] [ 1.46009609e-01 3.88600647e-01 3.46976846e-01 ... 5.43656051e-01 3.99737090e-01 5.00845969e-01] [ 2.30929792e-01 6.66278064e-01 5.44770837e-01 ... 1.68743879e-01 4.34652060e-01 -1.39089599e-01] ... [ 3.03644717e-01 5.37535131e-01 1.39548913e-01 ... 2.11782232e-01 6.56514764e-01 1.41792133e-01] [ 3.37196440e-01 5.34676969e-01 8.54312956e-01 ... 2.19403520e-01 1.45649627e-01 7.62851059e-01] [ 4.20741886e-02 2.92095453e-01 5.59708893e-01 ... 2.63865888e-01 1.34710863e-01 5.11937559e-01]] ... [[ 5.52782178e-01 4.52118248e-01 5.59140861e-01 ... 6.12673044e-01 6.29137993e-01 6.88075125e-01] [ 3.04735005e-01 1.92263886e-01 1.05143718e-01 ... 9.83639508e-02 4.41104412e-01 9.48790610e-01] [ 5.30923903e-01 2.95998633e-01 1.28522426e-01 ... 5.07210791e-01 5.31695724e-01 3.80537361e-01] ... [ 3.54874074e-01 3.58010262e-01 5.19626252e-02 ... 3.05754393e-01 6.99407995e-01 2.54029334e-01] [ 6.08401299e-01 4.76098329e-01 1.59328267e-01 ... 7.11021960e-01 1.46260202e-01 1.15097500e-01] [ 5.32096684e-01 4.13851172e-01 -1.72616675e-01 ... 4.25233364e-01 1.94270030e-01 1.83067217e-01]] [[ 6.91688061e-01 3.36737186e-01 1.93937078e-01 ... 7.14641273e-01 1.20505698e-01 1.27698481e-01] [ 2.15043649e-01 7.06942976e-01 5.38074672e-01 ... 3.83886725e-01 2.08346933e-01 5.16329050e-01] [ 5.25424719e-01 4.00120020e-01 6.78903222e-01 ... 4.48770881e-01 1.91150710e-01 2.40395278e-01] ... [ 6.08544707e-01 4.92979974e-01 5.26724577e-01 ... 3.12827736e-01 5.07771432e-01 4.98329282e-01] [ 7.62639344e-01 6.80673778e-01 5.32937229e-01 ... 6.63606763e-01 2.86463320e-01 3.38881284e-01] [ 3.73572588e-01 4.14351046e-01 3.82079840e-01 ... 5.28006673e-01 4.34731990e-01 4.84627515e-01]] [[ 3.55734289e-01 3.59926730e-01 2.14938030e-01 ... 4.18608487e-01 6.65956855e-01 2.48771727e-01] [ 5.07048965e-02 3.47420901e-01 7.92522550e-01 ... 3.64783943e-01 2.27942005e-01 1.55631065e-01] [ 1.35310754e-01 6.26589894e-01 4.19239104e-01 ... 2.47817308e-01 7.07601666e-01 5.25717020e-01] ... [ 2.79563665e-01 5.52848637e-01 3.50470632e-01 ... 1.24173231e-01 6.87006652e-01 1.04333319e-01] [ 3.75084192e-01 3.24032724e-01 2.57854223e-01 ... 3.91182423e-01 4.73228723e-01 2.14791015e-01] [ 4.69006538e-01 3.87012482e-01 5.36015749e-01 ... 3.49459559e-01 4.41201717e-01 1.34511694e-01]]] [[[ 1.27574968e+00 1.01792403e-01 -3.16133589e-01 ... 2.32533407e+00 4.71684456e-01 8.00018609e-01] [ 1.68653059e+00 -3.17680311e+00 1.85623682e+00 ... 4.32849109e-01 1.38174206e-01 1.15084994e+00] [ 1.94259569e-01 -3.16766572e+00 1.81176782e+00 ... -4.67128187e-01 6.74685717e-01 -2.20482564e+00] ... [ 1.07642484e+00 2.14332700e+00 9.58480716e-01 ... -4.70409244e-01 -6.53475344e-01 -4.62451309e-01] [ 1.61687338e+00 3.63370037e+00 -7.41414428e-01 ... 1.15474892e+00 -1.41207886e+00 2.60978293e+00] [-1.49821699e+00 -2.78589249e-01 7.90426373e-01 ... 3.26482511e+00 1.54795074e+00 5.38761556e-01]] [[ 1.80589736e+00 7.35328853e-01 4.36728090e-01 ... 4.76244658e-01 2.80794382e+00 2.26136804e+00] [ 7.92443037e-01 4.84426051e-01 2.30516267e+00 ... 4.32050514e+00 1.40902257e+00 1.99193084e+00] [ 1.10953510e+00 -4.70686883e-01 4.12241429e-01 ... 2.99500990e+00 -1.15275645e+00 9.35853720e-02] ... [ 5.31775951e-01 -1.20490801e+00 -1.68606967e-01 ... 2.65769458e+00 1.61553085e+00 -2.04614902e+00] [-3.82980704e-01 2.67806506e+00 -1.21361710e-01 ... -3.78366053e-01 9.79791522e-01 4.41713423e-01] [ 2.77691555e+00 5.61025977e-01 9.86731589e-01 ... 1.54042196e+00 1.03926903e-03 -7.30396137e-02]] [[ 3.09512305e+00 3.55875540e+00 1.76983917e+00 ... 7.56902814e-01 9.04017091e-01 -5.48126251e-02] [-1.38538992e+00 3.02737308e+00 2.41660428e+00 ... -2.37452492e-01 -8.20114017e-01 -1.23738074e+00] [ 6.32518828e-01 -2.08497405e+00 1.56631112e+00 ... 1.80910957e+00 1.87432438e-01 -4.08659270e-03] ... [-3.00454378e-01 -3.25902849e-02 6.82059288e-01 ... 1.09682584e+00 1.67852557e+00 5.31463385e-01] [-8.12516689e-01 -9.17340636e-01 2.94625378e+00 ... 9.63544697e-02 4.28360081e+00 8.29613686e-01] [ 2.58548522e+00 2.10021996e+00 2.47482634e+00 ... 1.57232857e+00 2.28668928e+00 2.92998338e+00]] ... [[-6.93636179e-01 3.38532138e+00 1.39946073e-01 ... 1.37144434e+00 5.91085494e-01 -2.15324745e-01] [ 1.58453631e+00 1.10691130e+00 -1.17122501e-01 ... 1.99451649e+00 7.60988832e-01 9.91704404e-01] [ 1.25585091e+00 1.13270891e+00 5.72786629e-01 ... 2.16837478e+00 1.06240416e+00 7.67840326e-01] ... [ 4.63220090e-01 -3.55681330e-01 6.54200852e-01 ... 2.07866758e-01 -6.88436091e-01 -1.68191507e-01] [-1.01654100e+00 2.17785001e-01 1.71739054e+00 ... 1.32019329e+00 1.38279974e+00 3.19996983e-01] [ 8.12498212e-01 -2.21365714e+00 -1.09661913e+00 ... 7.10482121e-01 2.95278025e+00 8.10553253e-01]] [[-2.09271625e-01 2.30126333e+00 1.91845894e+00 ... -5.12965381e-01 1.81649339e+00 -4.42542255e-01] [-3.57531965e-01 -3.13316166e-01 8.54861975e-01 ... 3.80171180e+00 8.36825430e-01 6.71991169e-01] [ 1.10421956e+00 3.07726693e+00 8.70821476e-01 ... 2.60542417e+00 1.85457134e+00 1.69643259e+00] ... [ 1.15379846e+00 4.09746885e+00 -3.41725588e-01 ... -1.37672961e-01 -6.12955876e-02 2.93173194e+00] [ 1.44919467e+00 -1.67198777e+00 5.67969024e-01 ... 3.57589185e-01 2.25411272e+00 2.89766240e+00] [ 5.47326863e-01 1.12695718e+00 9.70540404e-01 ... -2.34959602e+00 -5.30600131e-01 -2.18664229e-01]] [[ 1.89051890e+00 -7.03547120e-01 2.12676430e+00 ... 1.61388516e+00 -4.78621632e-01 -1.18503559e+00] [-4.87926126e-01 4.90120322e-01 -8.47806692e-01 ... 3.31823111e+00 1.29451239e+00 1.07946074e+00] [ 2.10830164e+00 4.49734628e-01 3.87806559e+00 ... 1.69539332e+00 8.94473970e-01 2.05687121e-01] ... [ 9.31047261e-01 1.25367570e+00 -3.39684933e-01 ... 1.76556361e+00 -1.38157284e+00 -9.85920012e-01] [ 8.90061796e-01 1.94420540e+00 2.51012301e+00 ... 3.47476816e+00 1.64318120e+00 1.38512254e-01] [-2.02469993e+00 -2.88856000e-01 2.04185891e+00 ... 2.03993392e+00 9.33131650e-02 -1.37786782e+00]]] [[[ 1.26259649e+00 2.19455600e+00 1.08799624e+00 ... 4.81591672e-01 2.03836179e+00 2.55780602e+00] [ 1.34769928e+00 1.83940911e+00 2.76274055e-01 ... 3.18359226e-01 3.20603609e+00 8.35378706e-01] [ 1.75000262e+00 9.98898149e-01 7.48075485e-01 ... 4.70330834e-01 1.05032086e+00 1.46760583e+00] ... [ 1.36496806e+00 2.24893951e+00 9.82634544e-01 ... 2.75331378e-01 9.22530517e-02 1.93410480e+00] [ 6.32126212e-01 2.90051126e+00 1.54500067e+00 ... 2.95039326e-01 1.01987219e+00 2.25458050e+00] [ 2.11501646e+00 1.65777695e+00 9.71204102e-01 ... 1.83393109e+00 -2.36251429e-01 8.68912101e-01]] [[ 1.95657349e+00 1.16957390e+00 7.87531257e-01 ... 2.29330897e+00 1.32579792e+00 1.18652380e+00] [-8.52475524e-01 1.25415182e+00 8.22114348e-01 ... 2.15335512e+00 2.09600878e+00 -6.66180104e-02] [ 1.16222847e+00 1.19745994e+00 2.64355683e+00 ... 7.41730452e-01 2.63915157e+00 7.21202612e-01] ... [ 1.00217187e+00 1.80511487e+00 2.39452124e+00 ... 1.84302163e+00 2.79479623e+00 1.52677405e+00] [ 3.60225886e-01 -3.63025993e-01 2.88078928e+00 ... 1.67039728e+00 1.61740887e+00 1.14381683e+00] [ 2.31817770e+00 1.11066973e+00 7.80209899e-01 ... 3.20201367e-01 1.04449141e+00 2.34892821e+00]] [[ 1.24343789e+00 -3.33779827e-02 1.48142397e+00 ... 1.94090569e+00 5.65112472e-01 1.11137140e+00] [ 2.09600806e+00 9.55443442e-01 1.18886876e+00 ... 1.64185357e+00 1.97898901e+00 1.45846713e+00] [ 1.31949306e+00 5.44485092e-01 8.73595357e-01 ... -4.26573992e-01 1.88872564e+00 1.74289274e+00] ... [ 4.61066455e-01 7.81245708e-01 1.46630692e+00 ... 2.00698107e-01 2.09772751e-01 1.01362109e+00] [ 2.22359896e+00 6.00802600e-01 2.69385695e+00 ... 2.07585025e+00 1.45674396e+00 8.09472144e-01] [ 2.38858151e+00 1.45454741e+00 9.34138536e-01 ... 2.22199106e+00 2.47528529e+00 7.05508292e-01]] ... [[ 7.30122149e-01 1.79324079e+00 2.44474578e+00 ... 1.98975968e+00 9.47137833e-01 2.17212605e+00] [ 2.13644886e+00 1.35355628e+00 3.85233283e+00 ... 1.98561060e+00 1.90178192e+00 2.89668274e+00] [ 6.31449282e-01 1.32391894e+00 1.70716202e+00 ... 1.40658689e+00 1.63830101e+00 2.22148633e+00] ... [ 5.74819863e-01 1.47555864e+00 2.03734636e+00 ... 1.89338303e+00 4.01004076e-01 1.04085171e+00] [ 1.53830099e+00 2.54978329e-01 8.21219105e-03 ... 2.45150661e+00 1.72570646e+00 3.08703399e+00] [ 5.49652755e-01 6.71040475e-01 1.59730041e+00 ... 1.28334165e+00 1.17679238e+00 7.08711505e-01]] [[ 1.21933687e+00 1.70731914e+00 1.48706019e+00 ... 3.35117161e-01 2.72637343e+00 4.76039559e-01] [-1.50026709e-01 2.78744054e+00 6.66392028e-01 ... 2.42308569e+00 7.66536236e-01 2.09417820e+00] [ 3.21235418e+00 1.85269976e+00 1.42046535e+00 ... 3.69646579e-01 3.02229905e+00 2.31819272e+00] ... [ 1.67504871e+00 3.21115702e-01 1.70617640e+00 ... 1.74188125e+00 1.46051478e+00 1.44052875e+00] [ 2.06484342e+00 1.51431823e+00 1.84861350e+00 ... 2.16521573e+00 -1.83000728e-01 2.50719452e+00] [ 2.83202672e+00 2.09475470e+00 7.04084575e-01 ... 8.81758332e-01 2.11563611e+00 4.56385225e-01]] [[ 9.42048848e-01 1.21940339e+00 8.56142044e-02 ... 1.63053596e+00 2.01078081e+00 3.58382750e+00] [ 9.27501544e-02 4.59443569e-01 1.51406062e+00 ... 8.98990273e-01 1.46748161e+00 1.08221292e+00] [ 1.85489404e+00 1.98645815e-01 8.55382860e-01 ... 1.94654179e+00 1.27474856e+00 1.94314504e+00] ... [ 1.86292183e+00 1.48487985e+00 2.49110579e+00 ... 1.64083552e+00 1.61947930e+00 3.33139753e+00] [ 1.15218353e+00 2.17648849e-01 2.63641858e+00 ... 1.98359275e+00 1.93445337e+00 2.44395638e+00] [ 2.85708690e+00 1.64122820e+00 2.22045588e+00 ... 2.40692115e+00 5.03245533e-01 1.19771028e+00]]] [[[ 6.28620744e-01 7.34905243e-01 7.74020255e-01 ... 7.85518825e-01 6.96996093e-01 7.24072099e-01] [ 7.61531115e-01 6.50654733e-01 8.29251885e-01 ... 8.21789622e-01 6.35375261e-01 6.34910583e-01] [ 6.64960682e-01 7.88177073e-01 6.94699764e-01 ... 6.88681126e-01 7.64614582e-01 7.22465694e-01] ... [ 7.01351881e-01 7.45017052e-01 6.65108979e-01 ... 7.74595439e-01 5.61214089e-01 6.69693649e-01] [ 6.34180188e-01 6.84380949e-01 7.96541214e-01 ... 6.79707229e-01 7.09469318e-01 6.17529333e-01] [ 8.37391376e-01 7.23074257e-01 6.57316625e-01 ... 6.49006069e-01 7.50937819e-01 6.52352273e-01]] [[ 9.84758019e-01 7.45868385e-01 7.01266289e-01 ... 6.88469946e-01 6.44076705e-01 7.07541943e-01] [ 6.38440907e-01 6.98314905e-01 8.61265719e-01 ... 8.04011881e-01 7.12312222e-01 5.49420476e-01] [ 7.60506034e-01 6.65912330e-01 8.13176036e-01 ... 6.46852374e-01 6.94817960e-01 6.87473118e-01] ... [ 6.31638825e-01 6.24372900e-01 6.07483208e-01 ... 7.97717333e-01 8.18569541e-01 6.16696835e-01] [ 8.00618112e-01 6.69430673e-01 6.95364058e-01 ... 6.86091900e-01 6.82360113e-01 6.54358447e-01] [ 6.71818435e-01 8.83931756e-01 6.65423989e-01 ... 7.48718679e-01 8.51165652e-01 6.24722719e-01]] [[ 6.51479006e-01 7.35676110e-01 7.20665574e-01 ... 7.11080134e-01 6.62492871e-01 7.87614703e-01] [ 6.34089708e-01 8.20966601e-01 7.17859507e-01 ... 7.38710344e-01 6.68160617e-01 7.18159735e-01] [ 6.64894700e-01 6.74189985e-01 7.25491822e-01 ... 7.28897393e-01 6.92071080e-01 7.60398030e-01] ... [ 5.94439387e-01 6.22546911e-01 6.94926560e-01 ... 6.51955068e-01 8.38915706e-01 8.14350903e-01] [ 7.27007508e-01 7.33832717e-01 6.49766862e-01 ... 5.98780870e-01 6.71234965e-01 8.66290212e-01] [ 6.74007475e-01 7.93818355e-01 6.46779656e-01 ... 7.76129425e-01 6.41316831e-01 6.60185695e-01]] ... [[ 6.29363239e-01 6.32335305e-01 6.72310829e-01 ... 7.02778399e-01 8.09015810e-01 6.73541367e-01] [ 7.61928558e-01 7.47639954e-01 5.49322307e-01 ... 7.24806905e-01 6.69739902e-01 6.92310870e-01] [ 6.41281426e-01 6.82886541e-01 6.77613735e-01 ... 6.73900485e-01 8.19641888e-01 5.78615487e-01] ... [ 7.29061484e-01 7.73861885e-01 7.40674436e-01 ... 7.98092961e-01 6.49332166e-01 6.76703215e-01] [ 6.25165641e-01 6.98280513e-01 5.85721433e-01 ... 6.31621301e-01 7.06899881e-01 7.39799738e-01] [ 7.14176357e-01 5.85115254e-01 8.67622852e-01 ... 7.95609117e-01 8.96036565e-01 8.32984924e-01]] [[ 7.75579393e-01 7.27929413e-01 7.64604151e-01 ... 6.97565734e-01 8.27559531e-01 6.13000691e-01] [ 7.79567361e-01 7.52471924e-01 7.11716831e-01 ... 6.98518515e-01 5.83006740e-01 8.27582359e-01] [ 7.36434698e-01 5.27224660e-01 6.67163491e-01 ... 6.90677106e-01 6.44624949e-01 6.55472755e-01] ... [ 7.54120111e-01 7.13111281e-01 8.78327072e-01 ... 6.33950472e-01 5.62386870e-01 6.51714325e-01] [ 7.06244886e-01 6.94770336e-01 8.01897764e-01 ... 5.71421802e-01 6.73678279e-01 6.13803804e-01] [ 8.25029731e-01 7.97090411e-01 6.92178726e-01 ... 7.01420069e-01 6.07236087e-01 5.63199103e-01]] [[ 7.58598864e-01 7.06220150e-01 7.73105085e-01 ... 7.91884840e-01 6.88708246e-01 7.49047101e-01] [ 6.67485654e-01 6.48947895e-01 7.44133413e-01 ... 8.17248464e-01 6.57084644e-01 6.30657911e-01] [ 7.33117342e-01 7.82125115e-01 6.10282183e-01 ... 7.07898378e-01 7.02806175e-01 7.14702666e-01] ... [ 7.45066285e-01 7.16511965e-01 7.73750424e-01 ... 5.79861939e-01 7.19299138e-01 8.77593994e-01] [ 6.76165879e-01 7.38654256e-01 7.87194669e-01 ... 6.93884373e-01 7.14255691e-01 7.29932606e-01] [ 7.47143090e-01 7.59089351e-01 6.33418500e-01 ... 7.66091228e-01 6.93635285e-01 7.68180311e-01]]] [[[ 6.21986508e-01 8.81904885e-02 4.24287558e-01 ... 5.32788098e-01 4.68061298e-01 4.33249533e-01] [ 2.12132156e-01 2.03535080e-01 2.07352385e-01 ... 7.83745274e-02 4.44371849e-01 2.48344570e-01] [ 2.60318011e-01 3.25304419e-01 4.76144165e-01 ... 5.56289911e-01 7.94944882e-01 6.95405185e-01] ... [ 2.08639979e-01 7.07171142e-01 6.99011743e-01 ... 6.60074592e-01 5.18649220e-01 4.05083895e-01] [ 3.81293952e-01 2.30056778e-01 5.05522966e-01 ... 2.90985227e-01 4.02403111e-03 3.32233131e-01] [ 6.17803037e-01 4.72858787e-01 2.82577723e-01 ... 6.75497890e-01 7.59347439e-01 5.77763915e-01]] [[ 6.20868087e-01 2.24303901e-01 5.15930414e-01 ... 3.87296915e-01 1.16788425e-01 3.45051467e-01] [-3.22127016e-03 7.33927310e-01 5.77907503e-01 ... 1.67931646e-01 5.92101097e-01 1.37790501e-01] [ 3.24092150e-01 5.08728921e-01 2.79550612e-01 ... 9.64554772e-02 4.41562682e-01 8.87975872e-01] ... [ 1.53113782e-01 1.09575737e+00 4.31615770e-01 ... 1.29548371e-01 3.03183079e-01 2.84300923e-01] [ 3.18121105e-01 3.09793234e-01 7.77987018e-02 ... 3.67636681e-01 4.71469671e-01 2.98556328e-01] [-9.59725454e-02 4.25003618e-01 1.22159362e-01 ... 1.33329397e-02 4.48766500e-01 3.36286783e-01]] [[ 5.28298497e-01 1.80330753e-01 6.44065142e-02 ... 4.11164373e-01 2.83941299e-01 2.80246258e-01] [ 2.35271752e-01 1.67166039e-01 5.30042827e-01 ... 6.47935688e-01 -2.00112704e-02 6.60022616e-01] [ 4.22608197e-01 4.32262957e-01 4.68653411e-01 ... 4.63851213e-01 -3.16023380e-02 2.73466855e-01] ... [ 5.17023504e-01 3.83666784e-01 6.12476647e-01 ... 2.03798518e-01 6.54182494e-01 2.17477664e-01] [ 4.91480947e-01 2.04632685e-01 1.17938370e-01 ... 7.05125153e-01 4.94723260e-01 4.08047795e-01] [-1.44131199e-01 3.60707581e-01 5.57289183e-01 ... 2.35553473e-01 4.06377286e-01 5.93476117e-01]] ... [[ 2.74716496e-01 1.62002236e-01 4.46182489e-01 ... 6.14586651e-01 7.62940496e-02 3.23994458e-01] [-1.87909216e-01 2.86585480e-01 -7.17414916e-02 ... 6.21613741e-01 4.63934064e-01 2.01083884e-01] [ 7.22870231e-01 3.74787360e-01 7.10455716e-01 ... -7.50311418e-04 2.07986400e-01 9.12630677e-01] ... [-1.74242388e-02 3.49648863e-01 2.09992096e-01 ... 4.19854224e-01 6.57486081e-01 1.82483107e-01] [ 4.07755762e-01 -4.75126952e-02 4.22124892e-01 ... 2.85170346e-01 4.00716096e-01 3.06481987e-01] [ 4.52748626e-01 5.84685564e-01 4.70270932e-01 ... 7.90285289e-01 -4.24796849e-01 6.85768664e-01]] [[ 2.60097116e-01 2.23495126e-01 6.89737976e-01 ... 2.78292179e-01 2.55704373e-01 2.79746115e-01] [ 4.37777698e-01 3.34738761e-01 5.43032289e-01 ... 3.71341974e-01 5.18327475e-01 6.14789724e-01] [ 3.52775604e-01 2.74749160e-01 4.54202294e-01 ... 3.54514569e-01 4.30401564e-01 6.00769281e-01] ... [ 2.68690109e-01 5.88611253e-02 4.75417882e-01 ... 5.12279391e-01 7.64538288e-01 4.43854928e-01] [ 2.65014488e-02 8.33159447e-01 2.67916441e-01 ... 2.06680700e-01 4.10767496e-01 3.38032603e-01] [ 5.39039016e-01 2.73169547e-01 2.00488210e-01 ... 1.27708882e-01 5.49901538e-02 -4.72131334e-02]] [[ 2.32260421e-01 1.82636499e-01 3.86316061e-01 ... 2.28363629e-02 -4.63064248e-03 1.21293858e-01] [ 5.21365583e-01 3.59946668e-01 8.34123552e-01 ... 3.59381557e-01 5.29324472e-01 2.79143393e-01] [ 5.34401059e-01 1.26546144e-01 8.83465171e-01 ... 5.75273812e-01 5.42459905e-01 2.97795117e-01] ... [ 5.15363872e-01 2.39464298e-01 4.74065661e-01 ... 5.94688892e-01 5.98500669e-01 1.98421348e-02] [ 2.76698023e-01 5.85344136e-01 5.67983836e-02 ... 3.67085077e-02 4.40888643e-01 3.57613325e-01] [ 6.70163631e-01 1.96930781e-01 5.39889634e-01 ... 6.33807659e-01 6.45338178e-01 1.30562842e-01]]] [[[ 1.11392999e+00 4.55592394e-01 1.29730248e+00 ... -1.05116630e+00 -1.49129498e+00 6.94420457e-01] [ 1.84103027e-01 1.54751098e+00 1.17382455e+00 ... 9.17959452e-01 6.98915958e-01 7.49516964e-01] [ 5.00161886e-01 2.18632722e+00 2.91463971e+00 ... -8.36868808e-02 6.47608399e-01 8.75301063e-01] ... [ 1.72883391e+00 4.34211314e-01 3.86155516e-01 ... -1.43836987e+00 7.41249800e-01 -4.37949210e-01] [-2.26768881e-01 1.37481022e+00 -6.48607790e-01 ... 1.14269626e+00 6.52156830e-01 4.55153018e-01] [ 9.73191619e-01 6.10025883e-01 -9.40999508e-01 ... 2.62149666e-02 3.22715700e-01 -2.22588450e-01]] [[ 1.81950963e+00 -4.78080273e-01 2.93286085e-01 ... 9.11726415e-01 1.76592660e+00 3.60598266e-01] [ 1.42046714e+00 -1.10925126e+00 1.61180533e-02 ... 1.52667928e+00 3.19546074e-01 4.29394454e-01] [-3.75353932e-01 1.04067194e+00 -5.91165245e-01 ... 2.64078093e+00 1.44521352e-02 9.24866796e-01] ... [-5.61791837e-01 2.97591776e-01 6.27440214e-01 ... 1.80382431e+00 -7.69093215e-01 1.00732124e+00] [-9.60685134e-01 6.25086010e-01 -3.40346754e-01 ... 7.15093434e-01 -3.21804196e-01 3.75945151e-01] [ 1.09770620e+00 3.88951331e-01 1.05347955e+00 ... 6.86065495e-01 -1.38311595e-01 3.40252280e-01]] [[ 1.12307107e+00 -9.26544845e-01 -1.74287155e-01 ... 3.46257925e-01 -1.81761354e-01 -6.61617890e-02] [ 6.00778222e-01 -6.86570704e-01 -2.14738443e-01 ... 2.26197648e+00 1.61884081e+00 -3.57158601e-01] [ 9.80523586e-01 1.23171961e+00 1.02046037e+00 ... -6.88328385e-01 1.07088733e+00 -8.15017641e-01] ... [-7.00035170e-02 -5.06226838e-01 -3.64435822e-01 ... -5.31232953e-01 3.82725775e-01 4.10292335e-02] [ 2.79028893e-01 -8.00399005e-01 1.69402862e+00 ... 1.57229078e+00 -2.10155323e-01 5.09441137e-01] [ 9.24075663e-01 4.55619812e-01 5.46280630e-02 ... 2.72764981e-01 1.06803608e+00 6.46181107e-01]] ... [[ 3.89969528e-01 -3.14763606e-01 6.99185073e-01 ... 1.70696557e+00 -4.06348556e-01 2.00476170e-01] [ 2.09311152e+00 1.02972515e-01 1.15774310e+00 ... -5.45252502e-01 1.48302639e+00 6.11091666e-02] [-1.97541371e-01 1.11366892e+00 1.10708153e+00 ... 1.75423634e+00 1.08531415e+00 2.48600721e-01] ... [ 1.84883583e+00 -1.62058258e+00 1.12008333e+00 ... 5.33165216e-01 -5.32751501e-01 1.82657516e+00] [-1.33200717e+00 1.25017154e+00 1.69708693e+00 ... 3.51628065e-01 -2.87358195e-01 3.35388221e-02] [ 2.19615889e+00 8.26268613e-01 7.06891716e-01 ... -6.20870769e-01 1.79680824e+00 -5.06043613e-01]] [[ 1.68417335e+00 1.64452899e+00 8.39396358e-01 ... -1.16564178e+00 -1.98151663e-01 9.45332944e-01] [ 5.12872577e-01 5.50930858e-01 1.00719368e+00 ... 1.99846041e+00 2.25597188e-01 1.34183037e+00] [ 2.14726877e+00 5.68567872e-01 2.69457746e+00 ... -2.66486526e-01 1.15924025e+00 -3.51873308e-01] ... [ 1.48118591e+00 1.06274927e+00 -1.07995577e-01 ... 1.25618792e+00 6.65438175e-01 7.18233049e-01] [ 1.96570456e+00 1.75270841e-01 7.94251919e-01 ... -3.12682427e-02 1.53699446e+00 1.14505279e+00] [ 3.85781914e-01 1.00744896e-01 3.37873608e-01 ... 8.68163407e-01 3.97313416e-01 1.11413431e+00]] [[-9.99204695e-01 -9.19424295e-01 -8.66689906e-02 ... 1.13034344e+00 6.01733685e-01 1.57821989e+00] [-2.45672196e-01 8.15907657e-01 8.64982069e-01 ... -2.62780845e-01 -5.11351451e-02 6.53142154e-01] [ 8.02831829e-01 1.10794127e+00 8.91471982e-01 ... -9.09816504e-01 1.14446545e+00 -2.99077965e-02] ... [ 5.48494160e-01 1.21674931e+00 4.13419247e-01 ... 7.02152073e-01 -7.91433334e-01 1.31697953e+00] [ 1.42964828e+00 6.46733582e-01 7.31478810e-01 ... 9.91477370e-01 8.28303039e-01 1.84281480e+00] [-1.05267262e+00 2.95877337e-01 4.17473733e-01 ... -4.14975077e-01 1.84523833e+00 1.94074380e+00]]]] ... [[[[ 4.10871208e-01 3.14023554e-01 -2.29505915e-02 ... 6.99279487e-01 4.89635706e-01 2.55506605e-01] [ 6.92063689e-01 6.33940220e-01 5.20153008e-02 ... 4.99758959e-01 1.87251225e-01 9.39203560e-01] [ 5.32464802e-01 2.81684160e-01 4.44811255e-01 ... 3.62844557e-01 6.93526089e-01 7.39512026e-01] ... [ 4.58315760e-01 5.14871776e-01 3.49865019e-01 ... 4.96999860e-01 4.13492262e-01 -2.59153917e-02] [ 4.37345803e-02 1.15810066e-01 1.04217343e-01 ... 6.79096699e-01 6.29158974e-01 3.82042140e-01] [ 3.42796594e-01 6.02389097e-01 4.16865259e-01 ... 3.75667453e-01 4.77950424e-01 8.55973884e-02]] [[ 1.34775624e-01 4.13179904e-01 7.52018929e-01 ... 2.74857640e-01 1.99496776e-01 4.18766230e-01] [ 6.73461258e-01 4.27951545e-01 4.79561836e-01 ... 4.84642476e-01 6.77576125e-01 4.94578481e-01] [ 5.56484878e-01 4.03346896e-01 4.92065638e-01 ... 8.17418873e-01 3.44490349e-01 6.73809111e-01] ... [ 4.73701537e-01 3.09718072e-01 5.69682121e-01 ... 3.20625931e-01 3.45306396e-01 4.10523474e-01] [ 4.75590616e-01 -9.69240256e-03 7.45603740e-01 ... 7.64693737e-01 7.74304867e-01 4.57723349e-01] [ 4.28865373e-01 3.43718708e-01 6.71419859e-01 ... 3.62882167e-01 4.68812943e-01 2.39984408e-01]] [[ 8.90898526e-01 2.19512627e-01 -5.52636161e-02 ... 1.54989183e-01 3.58276874e-01 4.68587637e-01] [ 5.49562931e-01 4.62748796e-01 8.38447690e-01 ... -1.70635596e-01 5.64660788e-01 -1.33104082e-02] [ 4.51987356e-01 7.24879727e-02 4.37537104e-01 ... 3.14563543e-01 3.47727269e-01 2.23907128e-01] ... [ 5.07513940e-01 3.03562194e-01 2.60925531e-01 ... 3.50749433e-01 2.01202467e-01 3.55660647e-01] [ 3.66849393e-01 1.66833803e-01 3.59765381e-01 ... 5.67720413e-01 7.47535050e-01 7.50146091e-01] [ 8.70713890e-01 3.17804784e-01 3.85163724e-01 ... 2.61474460e-01 1.27704844e-01 3.27015400e-01]] ... [[ 2.96521485e-01 4.61518824e-01 7.94782996e-01 ... 2.06200123e-01 2.12848112e-01 3.73332530e-01] [ 1.81092530e-01 6.23688623e-02 4.17402357e-01 ... 2.69026190e-01 4.42205220e-01 4.80301410e-01] [ 3.56852889e-01 3.25488687e-01 2.24359006e-01 ... 5.10312796e-01 3.65588635e-01 5.80052078e-01] ... [ 2.14884803e-01 1.87804803e-01 6.37434661e-01 ... 4.48056012e-01 4.28527236e-01 3.08145791e-01] [ 2.09745184e-01 9.37056839e-02 3.38882476e-01 ... 4.22198653e-01 5.38170516e-01 6.79767430e-01] [ 3.08260694e-02 4.22337383e-01 2.78565556e-01 ... 5.09859204e-01 3.44029516e-01 1.00331262e-01]] [[-4.07652929e-02 6.67526901e-01 5.13767958e-01 ... 4.65378553e-01 9.11544859e-02 2.33349651e-01] [ 6.40374124e-01 4.73049700e-01 2.73579746e-01 ... 5.70637465e-01 3.48071963e-01 2.39973858e-01] [ 3.60870868e-01 5.85532546e-01 3.72315198e-01 ... 1.25972956e-01 4.44715798e-01 2.43545204e-01] ... [ 3.56365293e-01 1.79176703e-01 2.95184642e-01 ... 3.04512411e-01 2.20589370e-01 4.91596490e-01] [ 3.07200879e-01 5.76272249e-01 7.02446103e-01 ... 4.07007605e-01 4.37519342e-01 4.38073993e-01] [ 3.07201684e-01 7.54073739e-01 4.64183629e-01 ... 5.33943951e-01 4.59175140e-01 5.49797297e-01]] [[ 3.17039967e-01 3.78588259e-01 6.30337000e-01 ... 5.18938243e-01 3.32816094e-01 2.85173804e-01] [ 3.71785074e-01 5.97181618e-01 7.69257396e-02 ... -2.27558916e-03 6.53697550e-01 3.02076578e-01] [ 1.12990066e-01 3.64062637e-01 3.93046111e-01 ... 4.76348728e-01 3.90037864e-01 7.04898000e-01] ... [ 6.78498685e-01 2.42297381e-01 2.58013397e-01 ... -1.54062167e-01 6.16660118e-01 4.71597522e-01] [ 2.62812018e-01 2.85216719e-01 6.06854439e-01 ... 5.88293612e-01 5.79576075e-01 5.04585922e-01] [ 6.85352743e-01 4.81344163e-01 5.54349124e-01 ... -6.69853166e-02 -7.48888357e-03 1.78560555e-01]]] [[[ 1.20295525e-01 1.97836661e+00 3.26546520e-01 ... 9.99269783e-01 -2.78017558e-02 1.89940500e+00] [ 1.03050995e+00 2.20105624e+00 3.49793702e-01 ... 5.80343246e-01 4.28582668e-01 3.94742787e-01] [ 1.32914960e+00 -4.21278000e-01 -5.88027716e-01 ... -1.24549687e+00 -7.79999197e-01 1.13012302e+00] ... [-2.12068200e+00 1.66291237e+00 1.29582191e+00 ... 3.38935882e-01 4.45235825e+00 3.90043616e+00] [ 2.06443381e+00 -1.46858859e+00 2.67144501e-01 ... 1.01720080e-01 2.30478382e+00 2.15955424e+00] [ 4.00419652e-01 3.27975225e+00 2.96847773e+00 ... 8.62919509e-01 -1.46043599e+00 1.58571339e+00]] [[-1.64910987e-01 -8.27894449e-01 2.61563921e+00 ... -3.63687485e-01 1.91553578e-01 1.84061158e+00] [ 2.06247973e+00 8.11411440e-01 -1.42675996e+00 ... -1.32932031e+00 2.35857797e+00 9.02024880e-02] [ 1.07244682e+00 -8.88622165e-01 2.95557790e-02 ... 1.52099401e-01 -4.97407198e-01 9.45158720e-01] ... [ 1.74228787e+00 2.57283163e+00 9.57575738e-02 ... 1.91076362e+00 1.65846515e+00 -1.26025653e+00] [ 1.01732850e+00 4.40911591e-01 2.54540062e+00 ... 8.76900554e-01 1.64112234e+00 2.30572081e+00] [ 1.99955535e+00 -1.11938345e+00 2.00941467e+00 ... 8.39848220e-01 -1.51306522e+00 -1.31805909e+00]] [[-5.77970505e-01 2.93381906e+00 2.94164896e+00 ... -2.66275406e-01 2.88747096e+00 6.86774552e-01] [-1.73349547e+00 4.58189547e-01 -1.33197224e+00 ... -1.79232061e+00 3.13135982e-01 3.18255568e+00] [ 2.24300766e+00 3.39079827e-01 -1.30475271e+00 ... 9.87292826e-01 -1.02939260e+00 -1.59494412e+00] ... [ 2.61405659e+00 -6.74456179e-01 1.02788043e+00 ... 1.41367960e+00 1.11455882e+00 -1.48873419e-01] [ 1.33489096e+00 8.53960395e-01 1.25727248e+00 ... 3.00238204e+00 1.22384943e-01 1.55909657e+00] [ 4.69313383e-01 8.66014510e-02 -1.32265520e+00 ... -3.78942877e-01 5.68584859e-01 6.62022412e-01]] ... [[-3.28011179e+00 3.44834757e+00 4.02144156e-02 ... 4.75346506e-01 -9.54756677e-01 -4.80338991e-01] [ 3.35184857e-02 2.30508137e+00 2.97576857e+00 ... 1.30745685e+00 5.31764627e-01 2.70165896e+00] [ 7.11022019e-01 -1.65203893e+00 -2.48412743e-01 ... 3.02196711e-01 6.88757956e-01 -1.15145910e+00] ... [-7.09612191e-01 1.24873948e+00 2.88330138e-01 ... 4.56417531e-01 1.94427705e+00 -5.08087993e-01] [ 1.88464940e+00 1.39567757e+00 -1.23829997e+00 ... 2.27283812e+00 1.56125617e+00 7.34728634e-01] [-2.03383490e-01 1.23302007e+00 1.24644244e+00 ... -1.88822195e-01 -6.51646376e-01 1.12976241e+00]] [[-3.42554562e-02 -8.82796228e-01 4.52892113e+00 ... 4.38519418e-01 2.93859029e+00 1.59496975e+00] [-9.27398980e-01 -1.63879704e+00 -9.77025628e-01 ... 3.03651452e-01 2.20280719e+00 3.38693333e+00] [ 7.82053992e-02 2.09592938e+00 2.34349823e+00 ... 1.43890178e+00 -9.80667830e-01 1.67270303e+00] ... [ 2.14624262e+00 -1.83305573e+00 1.42649949e+00 ... 7.62833297e-01 1.22501627e-01 1.35454822e+00] [ 2.16473508e+00 3.07343459e+00 1.18113649e+00 ... 1.33067071e-01 1.47595370e+00 1.76559538e-02] [ 7.59196937e-01 3.01822853e+00 2.43446589e+00 ... -9.82312620e-01 5.49286425e-01 1.07757223e+00]] [[ 1.23704684e+00 2.83827829e+00 2.91197085e+00 ... 2.70671678e+00 -3.39014828e-01 7.51489818e-01] [-2.32656574e+00 7.88614333e-01 2.51232922e-01 ... 2.97770411e-01 2.57774758e+00 7.62095690e-01] [ 3.47572237e-01 -1.12390840e+00 -4.88095462e-01 ... 1.40317452e+00 1.55239969e-01 5.43294430e-01] ... [ 3.07999182e+00 1.63810766e+00 3.04952890e-01 ... 6.67134106e-01 -6.35330141e-01 2.84158349e+00] [ 3.82012701e+00 9.90330517e-01 1.56149912e+00 ... 2.90377617e+00 8.09848785e-01 1.83925366e+00] [ 3.22919965e-01 1.17558742e+00 -1.58636093e+00 ... -2.85390139e-01 7.55199432e-01 2.91461301e+00]]] [[[ 2.04356432e+00 1.41987407e+00 1.13490272e+00 ... 1.83803582e+00 9.77117777e-01 1.02683830e+00] [ 2.38223553e+00 1.03980255e+00 2.56262839e-01 ... 2.11054325e+00 3.11740160e-01 1.81899881e+00] [ 1.14149773e+00 1.36870706e+00 1.48836792e+00 ... 3.23423684e-01 -3.63665760e-01 1.03389359e+00] ... [ 2.51988029e+00 9.96915281e-01 4.43122625e-01 ... 2.96437645e+00 1.51603198e+00 1.11086488e+00] [ 1.67245197e+00 3.25889921e+00 1.56193113e+00 ... 2.17792392e+00 1.74223471e+00 1.32174265e+00] [ 6.71324134e-01 2.46307230e+00 1.74562645e+00 ... 1.08166444e+00 9.21842873e-01 1.87247169e+00]] [[ 1.65000522e+00 2.13488626e+00 8.74353290e-01 ... 7.78928518e-01 1.32159197e+00 1.15384412e+00] [ 2.16350722e+00 1.32036042e+00 8.04719627e-01 ... 1.24609494e+00 5.63441336e-01 2.59189534e+00] [ 1.84668601e+00 1.91143584e+00 1.10776484e+00 ... 8.19623291e-01 1.38697982e+00 9.23529446e-01] ... [ 1.11225319e+00 1.21277273e+00 9.21840668e-01 ... 2.60583091e+00 1.37659144e+00 1.68578839e+00] [-9.96790707e-01 1.64589238e+00 8.83043885e-01 ... 1.32817078e+00 9.36409056e-01 1.52177143e+00] [ 2.47140074e+00 1.91842628e+00 2.80417275e+00 ... 9.66460705e-01 2.10739827e+00 2.12493753e+00]] [[ 7.01579869e-01 1.69971573e+00 1.48066139e+00 ... 2.92031670e+00 7.43004024e-01 2.69910169e+00] [ 2.54711598e-01 1.81324005e+00 2.40065646e+00 ... 2.37733173e+00 2.06420493e+00 1.62030590e+00] [ 3.85935783e+00 2.35991049e+00 -4.45117429e-02 ... 1.65567005e+00 3.23360968e+00 1.52551818e+00] ... [ 2.45070457e-02 3.29434067e-01 1.17696428e+00 ... 1.43143785e+00 2.47437310e+00 3.69891673e-01] [ 7.83904314e-01 1.60323739e+00 1.19323897e+00 ... 3.17269236e-01 1.90673733e+00 1.09712541e+00] [ 1.30937016e+00 5.90881228e-01 4.03377533e-01 ... 1.36856973e+00 7.64602661e-01 7.01440454e-01]] ... [[ 1.59387791e+00 9.63588178e-01 1.89046049e+00 ... 2.14440608e+00 2.14651990e+00 1.31046498e+00] [ 8.38153422e-01 3.25342321e+00 2.01110911e+00 ... 2.23386005e-01 2.27416301e+00 1.49835992e+00] [ 4.50674206e-01 2.11723280e+00 2.00367236e+00 ... 1.00776970e+00 1.22338569e+00 1.73032892e+00] ... [ 1.30760980e+00 3.85668659e+00 8.42153430e-01 ... 2.18353105e+00 1.46952009e+00 1.24953032e+00] [ 1.79644156e+00 1.39344251e+00 1.76798427e+00 ... 8.93617392e-01 1.24202275e+00 1.28141797e+00] [ 8.84442210e-01 2.13591623e+00 7.35376120e-01 ... 1.72910616e-01 2.39485884e+00 1.19343650e+00]] [[ 2.71838129e-01 7.97244668e-01 1.97443712e+00 ... 8.65028858e-01 1.32643414e+00 2.77296019e+00] [ 2.06789899e+00 2.06713271e+00 1.59096861e+00 ... 2.52085090e+00 5.00666082e-01 2.27352723e-01] [ 2.69238329e+00 1.18218029e+00 2.46451497e+00 ... 1.63186097e+00 8.44125092e-01 1.72222281e+00] ... [ 1.05249047e+00 5.70802093e-01 1.68208325e+00 ... 3.87262672e-01 1.61972594e+00 1.19432461e+00] [ 1.51424015e+00 2.18447638e+00 2.67832613e+00 ... 1.03224385e+00 5.53867102e-01 1.77523553e+00] [ 2.62491870e+00 1.93417001e+00 3.54234838e+00 ... 2.11531901e+00 1.24756145e+00 2.12356853e+00]] [[ 2.62380719e-01 1.28072071e+00 4.92344886e-01 ... 1.56825209e+00 1.47723305e+00 8.33045006e-01] [ 1.37981248e+00 9.38363373e-01 1.71390879e+00 ... 2.01771760e+00 1.20313716e+00 1.72650874e+00] [ 2.12104273e+00 3.45281696e+00 2.04253650e+00 ... 2.48597622e+00 8.53569686e-01 5.48161604e-02] ... [ 2.81697273e+00 2.67765373e-01 2.63175368e+00 ... 1.19614255e+00 1.66840601e+00 1.65137124e+00] [ 1.31206727e+00 6.62069917e-01 3.76593441e-01 ... 3.68929774e-01 1.11988842e+00 2.70798445e+00] [ 4.30461198e-01 1.97519481e+00 1.24357104e+00 ... 2.88917518e+00 1.74337983e+00 1.55801797e+00]]] [[[ 6.80797577e-01 7.08747387e-01 7.35337973e-01 ... 5.82821786e-01 7.45709479e-01 7.78182983e-01] [ 6.28371298e-01 7.88029313e-01 6.28866136e-01 ... 7.56352127e-01 8.15633237e-01 8.10989439e-01] [ 5.57129145e-01 6.66559219e-01 7.42827773e-01 ... 6.56625390e-01 6.84417427e-01 6.79266810e-01] ... [ 5.23868501e-01 7.24572062e-01 6.94945037e-01 ... 7.75597811e-01 8.74281943e-01 7.47017443e-01] [ 6.69080257e-01 7.04070389e-01 6.41071260e-01 ... 7.91523576e-01 7.05539584e-01 5.68015158e-01] [ 7.55506814e-01 8.35389912e-01 7.96693802e-01 ... 6.23971283e-01 7.50663459e-01 6.88970864e-01]] [[ 7.58862555e-01 7.60072649e-01 8.14022601e-01 ... 7.55443394e-01 7.33316839e-01 7.17735887e-01] [ 5.76782525e-01 6.01419806e-01 7.26601303e-01 ... 6.20776534e-01 6.10977590e-01 5.42980969e-01] [ 6.65558100e-01 6.33333445e-01 6.51394606e-01 ... 8.07554722e-01 6.34284019e-01 7.38823891e-01] ... [ 6.28714561e-01 5.64593256e-01 7.07553625e-01 ... 7.34175563e-01 8.19415629e-01 7.26954699e-01] [ 7.46936679e-01 8.25950682e-01 6.39398456e-01 ... 7.47478366e-01 7.03923047e-01 6.81994557e-01] [ 7.36920834e-01 6.58946633e-01 7.27459371e-01 ... 5.84424913e-01 7.41567373e-01 6.94133222e-01]] [[ 6.61664188e-01 6.57368720e-01 6.99838102e-01 ... 8.37798595e-01 7.15659559e-01 6.08637452e-01] [ 7.37585604e-01 6.82575583e-01 7.03461826e-01 ... 7.77115464e-01 7.09965169e-01 6.92575157e-01] [ 6.31549597e-01 5.98472118e-01 7.27439880e-01 ... 7.46113002e-01 6.17257595e-01 7.23207653e-01] ... [ 7.51911283e-01 7.57016361e-01 6.76285148e-01 ... 8.07787776e-01 6.92101836e-01 6.09579325e-01] [ 7.93685377e-01 8.04052413e-01 7.06237614e-01 ... 7.96530485e-01 7.13266671e-01 5.58598280e-01] [ 6.51361227e-01 6.50940418e-01 6.35780692e-01 ... 7.11578190e-01 5.89608788e-01 7.92325497e-01]] ... [[ 7.72886157e-01 6.60687745e-01 7.78189182e-01 ... 7.43026376e-01 8.14925849e-01 6.93395495e-01] [ 7.01489508e-01 5.55025458e-01 6.61477923e-01 ... 7.75618672e-01 7.59427726e-01 7.04217136e-01] [ 6.03339016e-01 6.52740002e-01 5.36655486e-01 ... 5.91606855e-01 7.70326972e-01 7.70422518e-01] ... [ 7.07167447e-01 7.20622420e-01 6.98200583e-01 ... 7.86903322e-01 7.19367266e-01 6.40498877e-01] [ 7.89482176e-01 6.49959922e-01 6.99485123e-01 ... 7.41442621e-01 6.74240708e-01 6.78893626e-01] [ 7.07066059e-01 7.84547925e-01 7.43717253e-01 ... 5.33792377e-01 5.16392469e-01 7.75946796e-01]] [[ 7.54737973e-01 6.71887457e-01 7.72358358e-01 ... 7.54077673e-01 7.98671901e-01 7.18258739e-01] [ 6.08711421e-01 7.99468935e-01 6.47381186e-01 ... 8.17956924e-01 6.63831472e-01 5.35376847e-01] [ 7.02087462e-01 7.06317604e-01 5.75277984e-01 ... 6.25514567e-01 8.29336584e-01 6.10937059e-01] ... [ 7.25191176e-01 6.60001099e-01 7.24881589e-01 ... 7.46925890e-01 7.08758175e-01 8.29337597e-01] [ 8.44056904e-01 8.17685366e-01 6.27750695e-01 ... 8.26767027e-01 6.96253896e-01 6.43377304e-01] [ 8.13015044e-01 7.91221976e-01 6.58847868e-01 ... 6.12612247e-01 6.26972795e-01 5.88337123e-01]] [[ 7.04710960e-01 6.12707376e-01 6.56311512e-01 ... 7.20335186e-01 7.55992770e-01 8.00684214e-01] [ 7.36817598e-01 5.66988468e-01 6.24186754e-01 ... 6.61392570e-01 6.99528694e-01 5.52517176e-01] [ 6.96657419e-01 6.78274691e-01 7.96440542e-01 ... 6.99854016e-01 7.12751567e-01 7.41068661e-01] ... [ 7.41858065e-01 7.88247943e-01 7.53898621e-01 ... 8.16838682e-01 6.98776603e-01 7.54866421e-01] [ 8.62724125e-01 8.07282269e-01 6.29440248e-01 ... 6.51649714e-01 6.68328881e-01 7.50254571e-01] [ 7.64212370e-01 6.08854711e-01 6.23299181e-01 ... 7.34051406e-01 7.19889224e-01 5.10072052e-01]]] [[[ 3.36136729e-01 3.82125288e-01 5.74510753e-01 ... 3.89020622e-01 3.63115370e-01 2.39665508e-01] [ 5.88873148e-01 3.87382746e-01 4.36795712e-01 ... 6.31639123e-01 3.10196817e-01 5.90159535e-01] [ 3.00770640e-01 2.80969113e-01 3.42570335e-01 ... 4.21263367e-01 3.77633095e-01 3.65256220e-01] ... [ 1.21211655e-01 3.57819170e-01 3.31993014e-01 ... 1.59272641e-01 6.29196703e-01 2.09941864e-01] [ 5.77774167e-01 4.49396878e-01 7.78836071e-01 ... 5.22491693e-01 7.64462948e-01 2.13444293e-01] [ 7.50556231e-01 -1.38907433e-01 7.14087665e-01 ... -1.52809680e-01 3.74799073e-02 5.46922982e-01]] [[ 5.89902520e-01 1.56983119e-02 3.52373719e-01 ... 5.34895718e-01 2.99195617e-01 9.94477212e-01] [ 2.91929185e-01 3.52739841e-01 5.18890500e-01 ... 5.09309053e-01 6.88656330e-01 4.13236059e-02] [ 7.03755915e-01 5.76634645e-01 5.99185050e-01 ... 3.26437980e-01 1.26805127e-01 3.46990526e-01] ... [ 5.97862422e-01 7.73053050e-01 4.28020321e-02 ... 2.92316258e-01 -9.83964652e-02 1.99895963e-01] [ 5.70378840e-01 5.13185203e-01 -4.28567052e-01 ... 6.03256226e-01 4.02705759e-01 2.05525547e-01] [ 5.70166945e-01 4.34822559e-01 1.04764104e-01 ... 1.12326801e-01 7.60022223e-01 4.78245556e-01]] [[-2.32435837e-02 4.61982012e-01 7.67540693e-01 ... 1.49091259e-02 3.33123982e-01 5.62661111e-01] [ 8.74713659e-02 2.33518302e-01 2.76064396e-01 ... 5.55276453e-01 5.54922640e-01 3.38059850e-02] [ 2.30998397e-01 7.19554842e-01 2.34019488e-01 ... 4.51807767e-01 6.59713745e-01 5.24630666e-01] ... [ 1.07204258e-01 2.03054458e-01 4.38945949e-01 ... 1.91661805e-01 2.10384011e-01 3.09669495e-01] [ 9.97884154e-01 1.50784522e-01 2.94588476e-01 ... 8.36079866e-02 4.05012369e-01 5.57490170e-01] [ 1.06617190e-01 6.78502202e-01 -7.44521096e-02 ... 9.79352370e-02 1.90960467e-01 7.80475795e-01]] ... [[ 3.79364610e-01 -1.80105213e-02 7.03516662e-01 ... 5.67631543e-01 3.13425422e-01 -2.27900043e-01] [ 7.25131035e-01 8.39300096e-01 7.11118057e-02 ... -1.38873816e-01 1.65319517e-01 2.56285816e-01] [ 3.53871047e-01 5.75520217e-01 3.53942156e-01 ... 5.58355927e-01 7.56628513e-01 5.14042616e-01] ... [ 5.68772376e-01 -3.20838168e-02 6.67493224e-01 ... 4.31798220e-01 2.28442162e-01 8.79477561e-01] [ 1.40678570e-01 9.65823829e-01 4.20853376e-01 ... 6.68914676e-01 4.37028497e-01 1.08887725e-01] [ 3.89078826e-01 6.23576283e-01 3.28442186e-01 ... 4.67587680e-01 4.50509727e-01 5.02903759e-01]] [[ 3.43430936e-01 3.19159150e-01 2.08661810e-01 ... 4.49553132e-02 -1.26961404e-02 5.48387527e-01] [ 5.00039756e-01 7.48811543e-01 2.63201654e-01 ... 1.79134995e-01 6.63915932e-01 3.32310110e-01] [ 3.69531155e-01 2.20996737e-01 9.28692520e-01 ... 4.57150722e-03 2.44779721e-01 5.62831223e-01] ... [ 3.82214822e-02 1.31657436e-01 1.26211032e-01 ... 4.82942611e-01 5.57764709e-01 4.99232054e-01] [-1.71026036e-01 5.78907847e-01 5.91566384e-01 ... 3.39516699e-01 8.11491087e-02 7.39380956e-01] [ 2.57911414e-01 5.51373184e-01 3.53580326e-01 ... 1.67082503e-01 5.75797200e-01 5.29064797e-02]] [[ 4.95917410e-01 2.30561107e-01 1.27892196e-01 ... 2.90143251e-01 3.01206082e-01 4.92211133e-01] [-1.13107920e-01 2.17618838e-01 3.90714139e-01 ... 1.39834374e-01 7.07992733e-01 3.05030376e-01] [ 2.91504204e-01 8.89117599e-01 -6.50850609e-02 ... 8.01383257e-01 1.15407025e-02 2.89798677e-01] ... [ 3.45210642e-01 5.63803732e-01 5.56415379e-01 ... -4.00746353e-02 1.97686777e-01 7.46026039e-01] [ 2.38750637e-01 7.00301588e-01 -3.53242387e-03 ... 3.06045055e-01 3.58834833e-01 4.59090203e-01] [ 4.23441678e-01 3.77846420e-01 2.24269137e-01 ... 5.01129448e-01 2.70961165e-01 3.05810124e-01]]] [[[ 8.16525638e-01 5.73985338e-01 -4.08733226e-02 ... 6.04069650e-01 1.55963624e+00 5.73187172e-01] [ 3.44274968e-01 1.90495610e+00 -1.51411802e-01 ... 1.07219493e+00 -2.78718919e-01 2.51320958e+00] [ 6.34839773e-01 1.26445794e+00 1.20983219e+00 ... 5.51645041e-01 2.05673194e+00 2.88463742e-01] ... [ 1.88341022e+00 3.58941376e-01 1.10054374e+00 ... 8.89843524e-01 1.27632117e+00 1.20291150e+00] [ 1.92584860e+00 5.33462107e-01 1.33031321e+00 ... -1.15388632e+00 5.54129422e-01 2.07133114e-01] [-4.94037747e-01 5.73606372e-01 3.82287085e-01 ... 1.51612949e+00 -9.28721651e-02 -2.05527604e-01]] [[ 8.61471117e-01 1.11045018e-01 3.85407329e-01 ... 4.21590447e-01 -2.58005112e-01 1.25520813e+00] [ 8.37377608e-01 1.24509466e+00 -2.51216263e-01 ... -4.79350835e-01 7.09937066e-02 -2.13049337e-01] [ 7.77934194e-01 -2.07702085e-01 7.54699945e-01 ... 1.81859982e+00 -2.24541813e-01 9.77953434e-01] ... [ 3.75461042e-01 -7.31543899e-02 9.79687452e-01 ... -3.40846665e-02 2.73088008e-01 2.20321659e-02] [ 5.57770371e-01 2.52960116e-01 1.62085009e+00 ... 7.37383783e-01 -7.13983059e-01 2.72657961e-01] [ 3.35016511e-02 3.70070755e-01 8.63263130e-01 ... 1.67769992e+00 1.19539452e+00 -7.00057924e-01]] [[ 6.06012821e-01 1.49788409e-01 1.15113783e+00 ... 1.43842304e+00 1.79949388e-01 7.65115142e-01] [-6.81322277e-01 1.71395928e-01 -6.95834979e-02 ... 1.51267791e+00 5.19912168e-02 8.48397315e-01] [ 2.78821558e-01 1.18903780e+00 6.61357880e-01 ... 2.75058246e+00 -5.29385358e-02 8.45806658e-01] ... [-7.42121458e-01 1.16769516e+00 1.67436242e+00 ... 1.47457731e+00 9.80993450e-01 6.78598821e-01] [ 5.12083650e-01 8.71133566e-01 1.29125822e+00 ... 5.45781016e-01 8.68669689e-01 1.22354805e-01] [ 7.16204405e-01 2.22826391e-01 7.48227000e-01 ... -2.63384730e-01 1.35249600e-01 -2.49218553e-01]] ... [[ 1.01679659e+00 5.92719078e-01 9.70886499e-02 ... 1.20338106e+00 1.56209335e-01 -2.77182251e-01] [-2.39713684e-01 1.91224563e+00 -1.02203715e+00 ... 1.50679469e+00 1.08140695e+00 -1.66398621e+00] [ 3.07396621e-01 9.53682184e-01 -1.15545884e-01 ... 1.14610565e+00 -6.72773838e-01 1.59217334e+00] ... [ 1.24140763e+00 -2.01445967e-02 -8.85935724e-01 ... 7.45822191e-01 -2.90523469e-01 1.04665719e-01] [ 8.33232999e-01 2.82080722e+00 8.37528646e-01 ... 1.73075902e+00 -6.93146110e-01 9.80382442e-01] [-5.19739330e-01 3.60723138e-01 1.08300722e+00 ... 1.05324483e+00 2.67679954e+00 7.67211378e-01]] [[-4.14524645e-01 2.07435703e+00 3.44228715e-01 ... 6.21277213e-01 -4.70855981e-01 4.98555273e-01] [ 1.97624409e+00 9.83555436e-01 8.05224955e-01 ... -6.04638577e-01 7.58263290e-01 6.72450960e-01] [-2.58313417e-01 -2.08143041e-01 3.84834796e-01 ... 3.54828298e-01 1.00837159e+00 -3.58241022e-01] ... [ 1.23235250e+00 1.29846251e+00 -2.60708541e-01 ... 6.69125617e-01 8.70124698e-01 -9.79953051e-01] [-2.29234174e-01 -7.32331157e-01 4.17606950e-01 ... -1.41187072e-01 -8.12882125e-01 -3.33694726e-01] [-3.52460384e-01 1.28008032e+00 2.09661752e-01 ... 1.71467826e-01 -2.49271598e-02 -6.83759972e-02]] [[ 4.51647311e-01 1.95719588e+00 -3.97700012e-01 ... 1.21481502e+00 -7.58766592e-01 1.69437695e+00] [ 1.98696518e+00 8.05360675e-01 1.47145760e+00 ... 2.54149556e-01 4.38856423e-01 7.16316700e-01] [ 8.09807241e-01 1.00487970e-01 1.50564206e+00 ... 1.43685484e+00 -1.17345822e+00 1.47262204e+00] ... [ 1.03492010e+00 -8.96867830e-03 -1.91090256e-01 ... 2.46314518e-02 1.21780312e+00 1.65711451e+00] [-6.53935492e-01 1.07636857e+00 -3.73058170e-01 ... -6.63448155e-01 8.19406033e-01 -4.33564901e-01] [ 1.33999729e+00 1.31026185e+00 2.16326070e+00 ... -4.63437915e-01 -1.77847064e+00 1.88388598e+00]]]] [[[[ 3.27368021e-01 1.77878946e-01 6.97340071e-01 ... 5.48890173e-01 1.17254592e-01 6.06493771e-01] [ 7.20688283e-01 7.86017358e-01 2.35193685e-01 ... 3.85847449e-01 1.26309320e-01 4.15361375e-01] [ 1.56455278e-01 4.39713478e-01 4.77018207e-01 ... 3.98471564e-01 5.48337936e-01 1.41009733e-01] ... [ 4.82321978e-01 3.75619322e-01 6.71987355e-01 ... 5.28964341e-01 3.17017287e-01 5.52575588e-01] [ 4.90072250e-01 4.43787664e-01 3.54103774e-01 ... 7.68056452e-01 3.66656214e-01 3.02422762e-01] [ 5.46923220e-01 2.99697548e-01 5.44698298e-01 ... 1.65494636e-01 4.37728614e-01 3.28219056e-01]] [[ 7.54602551e-01 1.00014791e-01 3.91022891e-01 ... 4.98303145e-01 2.49235570e-01 2.07272664e-01] [ 5.04741371e-01 6.61281943e-01 6.39123201e-01 ... 6.33957267e-01 3.61665845e-01 5.97906351e-01] [ 3.47752422e-01 2.37135932e-01 4.28238045e-03 ... 4.48683739e-01 2.94292957e-01 7.27352202e-02] ... [ 2.98770934e-01 6.80647910e-01 5.43249309e-01 ... 4.40479308e-01 7.26394236e-01 1.54776961e-01] [ 6.80338919e-01 3.34165663e-01 1.17951773e-01 ... 7.07976699e-01 3.16034645e-01 2.27052987e-01] [ 2.83099681e-01 2.28828713e-01 2.07200333e-01 ... 8.60781789e-01 1.53233945e-01 4.83131289e-01]] [[ 4.83373553e-01 2.09314570e-01 1.96080178e-01 ... 3.09117079e-01 3.52677435e-01 2.59064823e-01] [ 3.04627776e-01 4.87498671e-01 3.17207873e-01 ... 4.44045573e-01 -1.49292842e-01 2.43457451e-01] [ 3.22859675e-01 5.56537330e-01 4.91968662e-01 ... 5.05947173e-01 4.08515334e-01 1.10157192e-01] ... [ 5.54204285e-01 4.10870254e-01 4.20269310e-01 ... 3.41253728e-01 6.19363189e-01 4.18743283e-01] [ 7.34197438e-01 3.71387750e-01 3.99012685e-01 ... 6.60229981e-01 7.08170384e-02 7.29770139e-02] [ 5.75141907e-01 5.20681083e-01 3.44867468e-01 ... 4.35390711e-01 1.18878171e-01 3.22130263e-01]] ... [[ 4.11579549e-01 7.79348761e-02 7.06497490e-01 ... 3.27196032e-01 3.04810107e-01 4.01231378e-01] [ 5.06824911e-01 7.11442232e-01 1.38159066e-01 ... 3.59717816e-01 5.32948315e-01 -4.33832966e-02] [-1.00336066e-02 3.75491321e-01 5.47491491e-01 ... 5.11156656e-02 3.62521648e-01 5.75362206e-01] ... [ 5.16124129e-01 2.64870793e-01 3.56676608e-01 ... 3.49298939e-02 6.18761599e-01 1.85559243e-01] [ 6.69108272e-01 3.51910949e-01 5.25353074e-01 ... 3.58831018e-01 4.06196937e-02 3.22008163e-01] [ 5.35858870e-01 4.68722761e-01 3.48890841e-01 ... 4.97394651e-01 -4.21429761e-02 1.54228419e-01]] [[ 6.71285093e-01 1.28127515e-01 3.95207435e-01 ... 5.57474136e-01 2.91419178e-01 3.90623271e-01] [ 5.20513594e-01 7.51529574e-01 7.24613965e-01 ... 3.37840080e-01 4.63335782e-01 1.91121146e-01] [ 1.06714174e-01 7.12976992e-01 6.28266335e-01 ... 2.65052021e-01 5.66874087e-01 3.44756871e-01] ... [ 5.32604516e-01 8.71386051e-01 2.44752556e-01 ... 1.83404624e-01 6.51013374e-01 1.55927822e-01] [-1.56348079e-01 5.18863738e-01 2.98396289e-01 ... -5.13727963e-03 5.22583663e-01 6.15270734e-01] [ 3.28620344e-01 7.55785629e-02 4.83730942e-01 ... 3.91070276e-01 4.74881567e-02 -2.74158884e-02]] [[ 6.41178548e-01 2.97180206e-01 5.51992357e-01 ... 5.10239720e-01 5.65384388e-01 2.20082045e-01] [ 2.08515152e-01 6.42303407e-01 2.56228626e-01 ... 9.74861741e-01 5.62882006e-01 3.80812645e-01] [ 1.27717987e-01 2.37785727e-02 4.81942892e-01 ... 7.68349648e-01 5.16266227e-01 2.53227383e-01] ... [ 4.66038287e-01 3.68875384e-01 6.67391539e-01 ... -9.20897350e-02 4.73322451e-01 2.35227332e-01] [ 4.89365667e-01 8.56910288e-01 6.76021934e-01 ... 6.36160254e-01 2.59330690e-01 3.51128340e-01] [ 3.37434143e-01 3.93729389e-01 3.28198701e-01 ... 1.96323276e-01 7.47117460e-01 4.69438165e-01]]] [[[ 1.96495306e+00 8.42151046e-01 -3.51487732e+00 ... 2.12238097e+00 2.27787042e+00 2.43810102e-01] [-1.57175744e+00 1.54555213e+00 1.16333127e+00 ... 1.11437571e+00 1.13939393e+00 2.45748973e+00] [ 1.71226394e+00 2.98069239e-01 1.32975733e+00 ... 8.05488601e-02 2.24795938e-01 -1.03860986e+00] ... [ 1.17939746e+00 3.42823315e+00 1.89734507e+00 ... 8.65805089e-01 1.66223511e-01 -1.71266186e+00] [-1.97434470e-01 -2.23886490e-01 1.57104146e+00 ... -1.37241673e+00 1.51467848e+00 1.94487333e+00] [ 2.01960111e+00 1.23603058e+00 4.22549188e-01 ... 4.40899432e-01 2.85931611e+00 -4.69859630e-01]] [[ 7.28227735e-01 1.38817978e+00 -1.28749299e+00 ... -7.42249191e-02 3.49475980e-01 -3.77553254e-02] [ 9.22607005e-01 2.10507488e+00 1.55349314e+00 ... 2.27188796e-01 3.65697742e-01 1.22544003e+00] [ 3.62353921e-01 9.90112841e-01 5.13788164e-01 ... 8.91794562e-01 -1.12596714e+00 3.28030705e+00] ... [ 1.71259522e+00 2.06531191e+00 -1.82329088e-01 ... 3.23864281e-01 9.79995966e-01 9.31200743e-01] [ 5.69048002e-02 -2.68814683e+00 3.46846618e-02 ... 1.78093519e-02 1.22049880e+00 2.73646379e+00] [ 5.21622539e-01 2.30720949e+00 -1.59878552e-01 ... 1.70962071e+00 5.02054334e-01 4.97840673e-01]] [[ 1.12657464e+00 1.33366191e+00 1.51128531e+00 ... 8.85537148e-01 1.48672199e+00 9.89344060e-01] [-1.10961616e+00 2.23173046e+00 1.67261648e+00 ... 1.58517909e+00 -5.04301310e-01 -1.36668980e+00] [-1.49419117e+00 2.29206538e+00 2.42534995e-01 ... 5.64875901e-01 -2.67731160e-01 3.38929033e+00] ... [-7.87754834e-01 1.99720883e+00 -5.54277189e-02 ... -5.45870900e-01 6.34939969e-01 1.28706479e+00] [ 1.03459358e+00 -2.27887750e+00 -5.34965098e-01 ... -1.39705315e-01 1.84749448e+00 -1.68615088e-01] [ 2.00643373e+00 2.11328721e+00 1.57835826e-01 ... 1.51062739e+00 6.57290339e-01 2.50079129e-02]] ... [[ 2.09978056e+00 6.46380708e-02 -1.45692098e+00 ... -1.94079602e+00 3.11817706e-01 7.08470523e-01] [ 2.33543730e+00 2.43429685e+00 6.19279444e-01 ... 3.90623957e-01 2.56820589e-01 -2.00101209e+00] [ 3.96128803e-01 8.71143192e-02 4.40891647e+00 ... 2.93921423e+00 -1.76597726e+00 -5.06541967e-01] ... [ 7.26022065e-01 1.23971915e+00 -9.00857449e-01 ... 2.34126496e+00 9.22815561e-01 3.52283239e-01] [ 3.08140945e+00 -2.97150671e-01 2.06878686e+00 ... -2.66022950e-01 1.96740282e+00 1.14525430e-01] [-1.74872565e+00 2.18999696e+00 2.18191695e+00 ... 1.49465191e+00 3.98708200e+00 9.85963762e-01]] [[ 1.31168246e+00 -1.40999329e+00 -1.47878313e+00 ... 2.23593450e+00 3.58477026e-01 1.48206198e+00] [ 1.55842578e+00 -8.62100959e-01 3.72857973e-02 ... 7.87115172e-02 -2.75518540e-02 2.53529787e-01] [ 1.43963838e+00 -6.53479457e-01 3.25672269e+00 ... -9.34470668e-02 4.79668051e-01 -5.36914349e-01] ... [ 2.99059242e-01 1.19698954e+00 3.12258720e-01 ... 5.85404336e-01 8.46052051e-01 1.76254570e+00] [-9.67102826e-01 -1.97605431e+00 1.46719360e+00 ... -1.80727065e+00 -4.77140665e-01 -4.68904078e-01] [ 4.68203831e+00 -8.42327416e-01 1.56281722e+00 ... 3.41697478e+00 3.53868753e-01 2.26467475e-01]] [[ 1.07676482e+00 6.64749205e-01 -5.22283390e-02 ... -6.19347811e-01 9.29606438e-01 1.81786311e+00] [ 1.82357013e+00 -8.78909707e-01 2.20377421e+00 ... -9.18340385e-01 6.16772413e-01 1.23884523e+00] [ 7.43739009e-01 5.13375342e-01 1.73014462e+00 ... 8.96976352e-01 1.77534747e+00 1.77368617e+00] ... [ 2.11690331e+00 -1.90823996e+00 2.40953660e+00 ... 1.76104057e+00 1.07354152e+00 1.16845980e-01] [-8.11076071e-03 -1.48392212e+00 2.50069165e+00 ... -9.05273259e-01 2.87829161e-01 1.61098942e-01] [ 1.57658970e+00 1.14515090e+00 -1.46519554e+00 ... 1.40550107e-01 1.95852864e+00 1.01751065e+00]]] [[[ 2.82096577e+00 -2.11845171e-02 9.67415392e-01 ... 1.89853430e+00 4.89175111e-01 2.08968496e+00] [ 2.21113992e+00 1.45616412e+00 -1.05906856e+00 ... 1.34007490e+00 2.66006303e+00 1.02888060e+00] [ 2.09664130e+00 2.05496454e+00 2.15619493e+00 ... 1.43944943e+00 3.10311079e+00 9.68658984e-01] ... [ 1.08962524e+00 1.83414257e+00 9.83401358e-01 ... 1.78594172e+00 2.06184554e+00 1.86322224e+00] [ 2.96462560e+00 9.89588618e-01 1.64420247e+00 ... 1.45425892e+00 1.02173138e+00 1.64922059e+00] [ 8.69074821e-01 2.79349238e-01 2.12694836e+00 ... 7.92037249e-01 1.30813599e+00 4.11548585e-01]] [[ 6.88168466e-01 1.41363168e+00 1.49921119e+00 ... 1.12412727e+00 1.20073986e+00 1.70941186e+00] [ 2.14870262e+00 2.14329123e+00 7.91535676e-01 ... 1.50356948e+00 3.06739390e-01 1.52907956e+00] [ 5.26233494e-01 -2.57565886e-01 1.87983632e+00 ... 2.20462608e+00 2.25992846e+00 1.38314176e+00] ... [ 3.78536820e-01 4.34288770e-01 1.36076164e+00 ... 1.90425074e+00 1.46258271e+00 2.58497548e+00] [ 6.66818619e-01 8.68672013e-01 1.80938542e+00 ... 1.32377601e+00 1.76171720e+00 2.55087781e+00] [ 1.12676013e+00 9.90534902e-01 5.67952991e-01 ... -1.55697256e-01 1.85849309e+00 1.59067881e+00]] [[ 1.97299528e+00 1.12065172e+00 2.23903513e+00 ... 1.15929401e+00 2.25892019e+00 2.35229540e+00] [ 1.50339544e+00 1.72845876e+00 6.18976712e-01 ... 9.15403366e-01 -1.98980570e-01 1.27488017e+00] [ 1.33158958e+00 1.90410650e+00 1.47417903e+00 ... 2.28062487e+00 1.22617984e+00 2.67818832e+00] ... [ 2.28615642e+00 3.84216636e-01 9.03246164e-01 ... 7.03956306e-01 1.32615733e+00 7.45343447e-01] [ 2.03103423e+00 1.87550700e+00 6.10329628e-01 ... 2.97742367e+00 1.32454705e+00 1.70352906e-02] [-8.01221371e-01 8.59059572e-01 1.44042623e+00 ... 4.74568725e-01 1.70071089e+00 -1.79310918e-01]] ... [[ 7.14635193e-01 1.33282948e+00 1.44616246e+00 ... 1.59666681e+00 1.05954492e+00 2.72673678e+00] [ 1.09325731e+00 1.56927717e+00 -3.31626445e-01 ... 7.75047541e-02 5.34972548e-01 3.41037059e+00] [ 8.76900733e-01 2.12898135e+00 9.67936039e-01 ... 1.31391656e+00 -5.15051186e-01 2.10091591e+00] ... [ 1.05050063e+00 1.17492354e+00 2.15758657e+00 ... 7.42220163e-01 1.04757071e+00 1.60355568e-01] [ 1.15701890e+00 7.48298109e-01 1.44142151e+00 ... -4.34655584e-02 8.22508335e-01 3.26847410e+00] [ 1.05245435e+00 7.70644307e-01 1.50027418e+00 ... 2.51219130e+00 6.31238222e-01 2.42146826e+00]] [[ 6.70318604e-01 2.28315187e+00 6.64080799e-01 ... 1.87459564e+00 1.74261487e+00 1.11525965e+00] [ 6.88493490e-01 1.49189603e+00 1.01112461e+00 ... 1.62014246e+00 1.15559556e-01 2.26688313e+00] [ 2.88295889e+00 1.29251945e+00 2.87259722e+00 ... 2.08430505e+00 1.05378067e+00 9.15273368e-01] ... [ 1.54428983e+00 1.38850462e+00 2.71625161e+00 ... 1.36662149e+00 2.34888554e+00 1.38883150e+00] [-1.66160852e-01 2.99940735e-01 9.12507653e-01 ... 2.00710130e+00 2.39580941e+00 2.41816115e+00] [ 1.30092412e-01 2.47656727e+00 2.37972474e+00 ... 1.06524682e+00 1.26872158e+00 4.88735259e-01]] [[-1.12060588e-02 4.61535871e-01 1.24152708e+00 ... 3.61321837e-01 1.67462304e-01 2.07076764e+00] [ 3.51578760e+00 1.07348822e-01 9.99011815e-01 ... 5.88127077e-01 1.23071349e+00 2.31850100e+00] [ 7.83256531e-01 6.35098442e-02 9.65579867e-01 ... 9.49374676e-01 1.66332945e-01 1.13289356e+00] ... [ 1.82185733e+00 1.53206420e+00 6.85960054e-01 ... 9.10106778e-01 1.55093217e+00 2.48441863e+00] [ 2.72257638e+00 4.65556890e-01 -4.98991348e-02 ... 4.28084046e-01 1.83316553e+00 4.92774338e-01] [ 2.79504752e+00 5.84288120e-01 2.08395934e+00 ... 6.57169819e-01 7.86584735e-01 2.69994110e-01]]] [[[ 7.69390345e-01 6.87776208e-01 7.28457987e-01 ... 6.72646940e-01 6.92748904e-01 7.15658963e-01] [ 6.12738669e-01 7.16920197e-01 4.65845287e-01 ... 6.72574103e-01 7.64309645e-01 5.89865208e-01] [ 7.39633083e-01 7.47031629e-01 7.08434403e-01 ... 5.80620527e-01 6.68800533e-01 8.35955441e-01] ... [ 6.66900456e-01 7.44995117e-01 6.95569754e-01 ... 8.24285686e-01 5.66076756e-01 6.66623533e-01] [ 6.70428395e-01 6.09513819e-01 7.92217851e-01 ... 7.20686853e-01 8.44556749e-01 6.74438834e-01] [ 7.20782876e-01 7.50852287e-01 8.60818267e-01 ... 7.75634468e-01 8.74460638e-01 8.10769200e-01]] [[ 6.77872777e-01 8.80449057e-01 7.22922325e-01 ... 7.08796024e-01 7.25589156e-01 8.46012235e-01] [ 7.67157793e-01 8.00769687e-01 8.19475770e-01 ... 5.81048787e-01 7.22136676e-01 7.82380342e-01] [ 7.07976162e-01 6.54026031e-01 6.81418180e-01 ... 7.93822885e-01 7.08077312e-01 6.69821441e-01] ... [ 6.62998796e-01 6.77835703e-01 8.20745707e-01 ... 8.12962294e-01 6.67296112e-01 5.75886250e-01] [ 7.57927716e-01 6.75087154e-01 7.45325506e-01 ... 8.25200737e-01 7.03065753e-01 7.14313865e-01] [ 6.69186234e-01 6.97727680e-01 6.48369849e-01 ... 7.87624419e-01 6.79583371e-01 7.25423455e-01]] [[ 7.61457026e-01 7.08926082e-01 8.28614831e-01 ... 7.33281553e-01 7.02120185e-01 6.44831955e-01] [ 6.47983909e-01 6.37710392e-01 6.87553227e-01 ... 7.45554566e-01 7.41887927e-01 7.80683339e-01] [ 8.07936966e-01 6.47504508e-01 7.29414344e-01 ... 8.24597657e-01 7.35212922e-01 7.19510913e-01] ... [ 6.67085826e-01 6.93672359e-01 7.67780006e-01 ... 7.34355390e-01 7.05326557e-01 6.90852642e-01] [ 7.20899701e-01 7.86560297e-01 7.30819881e-01 ... 8.21866333e-01 7.22333074e-01 7.57447481e-01] [ 7.98670471e-01 6.32749736e-01 8.16814840e-01 ... 7.81943560e-01 5.48756540e-01 5.60540080e-01]] ... [[ 6.50687873e-01 7.66392887e-01 7.45773017e-01 ... 7.25708127e-01 7.49357760e-01 6.01544380e-01] [ 6.85931265e-01 7.43562460e-01 7.47834980e-01 ... 6.76426828e-01 7.45109618e-01 6.24831498e-01] [ 6.66852891e-01 7.91554868e-01 4.76581693e-01 ... 6.76238596e-01 6.79471314e-01 5.12616456e-01] ... [ 5.82279980e-01 6.14188671e-01 6.60124779e-01 ... 7.35055447e-01 7.31370449e-01 8.12208593e-01] [ 7.22714961e-01 7.06146598e-01 6.27084434e-01 ... 7.41949022e-01 6.28637969e-01 7.65488386e-01] [ 7.30167031e-01 7.71082699e-01 8.21108699e-01 ... 6.64757311e-01 8.70429039e-01 8.62174392e-01]] [[ 6.67497277e-01 8.42377067e-01 7.00940609e-01 ... 7.69288599e-01 5.14305472e-01 7.60363936e-01] [ 6.76943123e-01 8.12845945e-01 5.67835152e-01 ... 7.02948451e-01 7.31661081e-01 7.40658462e-01] [ 6.90739810e-01 7.78625011e-01 8.16781700e-01 ... 6.75275803e-01 7.50816762e-01 7.42737055e-01] ... [ 7.19590485e-01 7.21861005e-01 7.45630205e-01 ... 6.67827070e-01 8.10644329e-01 6.62516475e-01] [ 7.88373590e-01 5.96885681e-01 5.96508086e-01 ... 5.15521765e-01 7.27222800e-01 5.83313823e-01] [ 6.16266966e-01 8.20057511e-01 7.03247547e-01 ... 6.09159708e-01 6.18235588e-01 6.18904293e-01]] [[ 6.47652566e-01 7.45201588e-01 7.69569933e-01 ... 5.94072461e-01 4.58921701e-01 7.21006632e-01] [ 7.70813048e-01 7.80461431e-01 6.77341461e-01 ... 7.12740242e-01 6.83862448e-01 6.64204240e-01] [ 7.30493367e-01 7.19445586e-01 7.40395010e-01 ... 7.78084159e-01 4.85120028e-01 5.73613882e-01] ... [ 6.22104704e-01 5.91074347e-01 6.27439439e-01 ... 6.72384024e-01 6.23225152e-01 7.10419893e-01] [ 6.16640151e-01 7.66896427e-01 7.93001831e-01 ... 7.76709974e-01 7.40046740e-01 8.75234365e-01] [ 8.46760452e-01 8.91023099e-01 5.57947218e-01 ... 7.86755741e-01 9.11426425e-01 6.03637755e-01]]] [[[ 3.36601347e-01 2.62656748e-01 2.91082054e-01 ... 5.17431438e-01 9.17854548e-01 6.85720980e-01] [ 5.89595854e-01 2.53638983e-01 5.29352367e-01 ... 3.71953249e-01 1.98475793e-01 6.37164056e-01] [ 3.97061646e-01 2.03993216e-01 1.19010374e-01 ... 5.67778349e-01 2.03239456e-01 2.63459146e-01] ... [ 8.62319544e-02 6.03790283e-01 5.10532200e-01 ... -4.44476277e-01 4.10578668e-01 4.96400863e-01] [ 2.30891272e-01 1.45028800e-01 2.03727968e-02 ... 2.55515546e-01 4.66031879e-01 1.11838162e-01] [ 7.37962365e-01 2.03052744e-01 3.74729574e-01 ... 3.44600916e-01 6.11953020e-01 2.99477637e-01]] [[ 2.29612753e-01 7.44249582e-01 5.07734954e-01 ... 4.47196588e-02 6.16764843e-01 5.11256993e-01] [ 6.29466116e-01 9.76011157e-01 7.27680385e-01 ... 3.84986997e-01 3.64945382e-01 6.62188709e-01] [-2.24547550e-01 1.71973556e-01 4.40333337e-01 ... 5.74015342e-02 5.06899655e-01 -1.80215240e-01] ... [ 4.65181977e-01 1.88259810e-01 3.34239811e-01 ... 1.49500430e-01 4.85015929e-01 3.61421406e-01] [ 9.89901543e-01 5.74576795e-01 5.32936037e-01 ... 5.51764071e-01 3.48853879e-02 4.80908573e-01] [ 4.35224533e-01 -8.45336914e-02 1.32673889e-01 ... 5.66402435e-01 5.72939038e-01 4.08856347e-02]] [[ 5.53155303e-01 9.19651270e-01 1.71223000e-01 ... 3.73455197e-01 -2.18756534e-02 6.14748240e-01] [ 6.95038617e-01 9.85118032e-01 4.74395037e-01 ... 3.16917807e-01 4.22198147e-01 -1.88359186e-01] [ 6.20541573e-01 6.14183009e-01 -2.65772268e-02 ... 4.52715516e-01 2.83098042e-01 6.33223876e-02] ... [ 5.10988355e-01 1.91049308e-01 1.77688092e-01 ... 6.13216221e-01 8.43562186e-02 9.05111954e-02] [ 1.02065951e-01 4.70485628e-01 4.02718812e-01 ... 4.39760089e-01 1.08840156e+00 9.70053524e-02] [ 3.63051623e-01 3.63162905e-01 5.97115040e-01 ... 1.13981873e-01 6.06943667e-01 4.89250392e-01]] ... [[ 4.53015089e-01 6.19858205e-01 2.05807075e-01 ... 1.91510066e-01 6.83907807e-01 2.95679599e-01] [ 3.71869713e-01 2.66362727e-01 1.97539121e-01 ... 4.73818742e-02 2.24241704e-01 3.65217566e-01] [ 2.64364123e-01 -3.43310810e-03 4.39331234e-01 ... 1.25865042e-01 5.25337636e-01 3.46924365e-01] ... [ 8.01397085e-01 3.77367347e-01 4.35984761e-01 ... 7.79789269e-01 4.41262096e-01 1.44275308e-01] [ 2.64513999e-01 5.26799083e-01 2.83030331e-01 ... -6.47715554e-02 6.57259643e-01 4.42840308e-01] [ 6.40667379e-01 3.42186064e-01 7.97962368e-01 ... 4.53169107e-01 3.40444893e-01 1.89294130e-01]] [[ 5.92275739e-01 1.80680737e-01 2.29884870e-02 ... 1.56247124e-01 3.94505978e-01 5.55992186e-01] [ 8.35449219e-01 -1.60167709e-01 1.17296115e-01 ... 5.23955166e-01 3.27466339e-01 2.63649940e-01] [ 4.24823552e-01 1.28632277e-01 5.82170248e-01 ... 4.84471858e-01 2.75579959e-01 4.70015764e-01] ... [ 9.18256760e-01 1.77980483e-01 2.87800550e-01 ... 3.18153113e-01 3.17807108e-01 5.13794780e-01] [ 7.88634866e-02 7.35521793e-01 5.84980607e-01 ... 4.19416688e-02 -4.22158092e-02 6.30703866e-01] [ 7.82358885e-01 2.87273258e-01 1.44508600e-01 ... 2.45100990e-01 4.51087594e-01 5.27234316e-01]] [[ 6.65668175e-02 4.74030226e-01 2.49032870e-01 ... -1.40610993e-01 2.86453605e-01 4.09639388e-01] [-1.27735689e-01 4.23547655e-01 6.98941648e-01 ... 3.84521991e-01 8.17207098e-02 3.65057468e-01] [ 5.54781079e-01 5.12517869e-01 3.10826063e-01 ... 6.60297930e-01 1.05920859e-01 3.62429321e-01] ... [ 8.54796588e-01 3.49737883e-01 7.57960200e-01 ... 1.34017825e-01 5.06133139e-01 2.79282272e-01] [-2.20614254e-01 4.89080548e-01 4.84699905e-01 ... 2.37953201e-01 9.46362436e-01 4.13388282e-01] [ 4.08704281e-01 1.46372870e-01 7.55188167e-01 ... 1.44966945e-01 1.15662619e-01 6.94259405e-02]]] [[[-8.04581285e-01 1.03204213e-01 9.53078270e-02 ... 1.51260436e+00 9.81069505e-01 4.79528636e-01] [-7.83631206e-01 6.39998496e-01 9.18113172e-01 ... 3.74026924e-01 -3.74524117e-01 3.96838248e-01] [ 1.46934772e+00 6.83529258e-01 9.83323336e-01 ... -2.30765402e-01 1.27210808e+00 -1.50779319e+00] ... [ 1.42374051e+00 -4.27923352e-02 -2.13524356e-01 ... 1.42550349e+00 -9.08791482e-01 2.00559080e-01] [ 2.59181987e-02 1.65745258e+00 1.98407853e+00 ... -1.42646320e-02 1.01091242e+00 1.82105982e+00] [ 2.82754421e-01 1.18000114e+00 -1.20388716e-01 ... -2.16390312e-01 1.33088923e+00 8.79484236e-01]] [[ 4.57046449e-01 4.34651971e-01 6.73424304e-01 ... 1.42291224e+00 7.85108447e-01 9.08647031e-02] [ 9.84637260e-01 5.90869367e-01 8.85836124e-01 ... 1.27341545e+00 -7.22867727e-01 3.28488141e-01] [ 1.08456516e+00 -3.19560349e-01 1.75719604e-01 ... 9.00104880e-01 1.32506788e+00 3.35369170e-01] ... [ 1.78367448e+00 -7.35489428e-02 4.24363799e-02 ... 1.34495556e+00 1.23004049e-01 2.89364606e-01] [ 7.10583448e-01 1.62062243e-01 -5.32824099e-01 ... 1.10228062e+00 5.91347992e-01 1.12296236e+00] [ 2.15460360e-01 1.75032780e-01 2.21402144e+00 ... -1.07566953e+00 -3.22672933e-01 1.00291955e+00]] [[ 1.38567841e+00 6.05125248e-01 1.65183353e+00 ... 1.04743505e+00 1.44791257e+00 1.16522753e+00] [ 4.73066479e-01 -7.40545988e-01 6.94108009e-01 ... 1.06559074e+00 1.99952707e-01 2.45230389e+00] [ 9.07429874e-01 1.05948234e+00 8.04542005e-01 ... -1.10154971e-01 3.23339283e-01 -9.33828056e-01] ... [ 5.93928576e-01 4.78471905e-01 -1.19438756e+00 ... 1.70435035e+00 -4.42720614e-02 8.75928521e-01] [-2.50591785e-01 8.02634954e-01 6.58658624e-01 ... 6.39276803e-01 -1.44229621e-01 2.08260751e+00] [ 8.73741448e-01 4.98310655e-01 1.31468666e+00 ... 3.21463943e-01 6.56686664e-01 1.49976790e+00]] ... [[-1.30564570e-01 7.18357384e-01 3.05263519e-01 ... 1.55920839e+00 2.83315301e-01 1.38169599e+00] [ 7.49560237e-01 4.15717959e-01 8.94203842e-01 ... 1.05323195e-01 5.34767687e-01 2.22069025e-01] [-5.45603216e-01 1.23304999e+00 8.58311296e-01 ... 1.00882518e+00 6.21343195e-01 1.86616123e-01] ... [ 1.16987979e+00 7.72696674e-01 6.47752881e-01 ... -5.72028518e-01 1.49030662e+00 -5.27666152e-01] [ 8.14019263e-01 6.22967422e-01 1.04469740e+00 ... 2.46076733e-01 7.65527904e-01 1.26953375e+00] [ 4.48416561e-01 5.96472859e-01 3.88585955e-01 ... 3.62488657e-01 9.57479358e-01 1.05136549e+00]] [[ 1.41848302e+00 2.12241459e+00 1.08222854e+00 ... -7.30603933e-01 -6.44131601e-01 1.13750231e+00] [ 7.83275723e-01 -1.89233869e-01 2.39493608e+00 ... 1.14160204e+00 8.85605812e-01 1.17725885e+00] [-4.03420925e-01 -7.54913151e-01 1.04861343e+00 ... 4.02031958e-01 1.11935079e+00 7.08856344e-01] ... [-5.57755351e-01 8.34794164e-01 -3.92697126e-01 ... 1.53623652e+00 1.36603582e+00 -6.97666287e-01] [-5.16905427e-01 -7.16893375e-01 6.52524650e-01 ... -5.76085709e-02 -5.96546710e-01 -3.18491161e-01] [ 9.71513510e-01 -5.22411726e-02 5.05552351e-01 ... 7.04111099e-01 8.53547633e-01 8.51814926e-01]] [[ 3.45906943e-01 -8.78587186e-01 1.79392397e-01 ... 1.03738987e+00 1.02964270e+00 7.96619505e-02] [ 5.24361491e-01 1.34690630e+00 1.76577103e+00 ... -8.28073025e-02 3.90398264e-01 9.15463746e-01] [ 1.29765160e-02 7.87597835e-01 -3.09714317e-01 ... -1.39352784e-01 1.54038131e+00 5.77278674e-01] ... [ 5.59113264e-01 6.65323734e-01 1.71786642e+00 ... 6.27018213e-01 1.33684897e+00 2.19479978e-01] [ 1.59380913e+00 5.03318489e-01 -4.03050065e-01 ... 1.12297051e-01 -1.47122338e-01 1.98569417e+00] [-9.93481457e-01 1.50777650e+00 1.00321278e-01 ... -1.13209657e-01 5.86025640e-02 1.15320909e+00]]]] [[[[ 1.22089021e-01 7.11325824e-01 3.27458858e-01 ... 5.86285405e-02 1.65362835e-01 5.14697194e-01] [ 2.53899932e-01 6.91103399e-01 4.54595149e-01 ... 7.35963523e-01 3.21800858e-01 3.25097591e-01] [ 1.13631040e-01 2.63179421e-01 3.41209441e-01 ... 1.66862756e-01 1.89032942e-01 3.14727098e-01] ... [ 4.31738675e-01 9.40045953e-01 5.56351602e-01 ... 4.50219631e-01 5.80477715e-01 8.48273933e-01] [ 8.77933204e-01 5.15423238e-01 2.96532899e-01 ... 4.62923050e-01 6.79653168e-01 3.99606347e-01] [ 3.01515371e-01 1.46270707e-01 5.45449972e-01 ... 4.18758720e-01 8.93786371e-01 7.68830538e-01]] [[-5.63552938e-02 6.67603970e-01 1.53751418e-01 ... 2.35993434e-02 3.52262646e-01 4.29707080e-01] [ 3.36698145e-01 4.32332963e-01 1.52839541e-01 ... 7.03259647e-01 4.38934475e-01 9.91732955e-01] [ 1.92412704e-01 2.30242223e-01 7.26281166e-01 ... 3.45292985e-01 3.48970115e-01 6.75378859e-01] ... [ 4.06233966e-01 4.20098394e-01 6.10527873e-01 ... 4.00153190e-01 1.82892755e-01 6.69414282e-01] [ 2.96743810e-01 5.05784392e-01 2.81844437e-01 ... 4.71419573e-01 3.84614527e-01 2.50718296e-01] [ 3.71192172e-02 5.70617139e-01 1.10319085e-01 ... 1.16917804e-01 6.46240354e-01 3.31403702e-01]] [[ 6.64016902e-02 3.92022848e-01 3.74597609e-01 ... 1.70882896e-01 2.77917296e-01 3.90654415e-01] [ 6.07911170e-01 3.87858927e-01 2.79388338e-01 ... 5.45826077e-01 4.18440878e-01 3.37852478e-01] [ 2.89542079e-01 3.28259975e-01 3.30906540e-01 ... -2.39360686e-02 4.31859940e-01 2.43217036e-01] ... [ 1.25677902e-02 3.34726810e-01 5.18021844e-02 ... 3.93927872e-01 6.19393468e-01 -1.73172932e-02] [ 3.21741968e-01 2.48505950e-01 7.87935734e-01 ... 2.51238883e-01 4.21856314e-01 -1.19290709e-01] [ 2.71691412e-01 2.70352542e-01 5.29344797e-01 ... 3.80094886e-01 4.77613568e-01 2.48193294e-01]] ... [[ 3.00046891e-01 5.80083847e-01 3.13351184e-01 ... 2.58604646e-01 5.24921238e-01 3.97338986e-01] [ 1.61807209e-01 6.71190381e-01 3.23081076e-01 ... 9.40502658e-02 3.06285471e-01 3.94106656e-01] [ 2.20156893e-01 3.96486640e-01 7.12521493e-01 ... 5.76759219e-01 4.61219281e-01 3.74534428e-01] ... [ 5.70506275e-01 5.44999301e-01 4.46356349e-02 ... 6.00448251e-01 3.76856506e-01 3.81815046e-01] [ 2.90664166e-01 4.80669558e-01 5.43585002e-01 ... 1.16522647e-01 1.39599487e-01 1.71031073e-01] [ 6.75912559e-01 3.02819997e-01 7.32387125e-01 ... 2.94268608e-01 4.43456620e-01 2.63089597e-01]] [[ 5.09653091e-01 7.68324733e-01 6.12742305e-01 ... 3.43533367e-01 4.45212126e-01 2.39311308e-01] [ 4.85358536e-01 3.33834738e-01 4.48037744e-01 ... 3.73475850e-01 5.86371541e-01 3.23483348e-01] [ 1.87932968e-01 6.43171549e-01 2.87441313e-01 ... 5.72231770e-01 3.86721909e-01 3.50588411e-01] ... [ 2.62811273e-01 1.71102807e-01 6.00238025e-01 ... 3.79765987e-01 5.14467061e-01 4.54903096e-01] [ 4.82886493e-01 4.84462082e-01 1.99343950e-01 ... 3.40365052e-01 1.03269309e-01 2.63315141e-01] [ 1.86028615e-01 1.70448065e-01 5.70040464e-01 ... 1.67860724e-02 4.22623992e-01 4.58037406e-01]] [[ 5.01315117e-01 5.29986739e-01 6.00398064e-01 ... 3.93335402e-01 5.51171958e-01 4.88582328e-02] [ 6.87322199e-01 4.68281209e-01 3.03829134e-01 ... 3.99416447e-01 5.90784848e-01 6.84086800e-01] [ 1.97848275e-01 4.17920798e-01 -2.51033530e-02 ... 2.04727635e-01 1.40343800e-01 3.88485759e-01] ... [ 5.30527115e-01 5.25262713e-01 2.70461049e-02 ... 1.16545939e+00 -7.55296946e-02 3.90846968e-01] [ 4.07817423e-01 6.12700224e-01 6.25080407e-01 ... 2.91925102e-01 1.54881299e-01 2.08959654e-01] [ 2.20468789e-01 4.43086654e-01 3.97701710e-01 ... 9.44936693e-01 5.86674869e-01 6.38351798e-01]]] [[[ 7.78706893e-02 -9.53910172e-01 3.16223431e+00 ... 1.04912865e+00 4.21841592e-01 -8.89668941e-01] [ 7.58561259e-03 8.35507512e-01 3.00147629e+00 ... 1.39931488e+00 1.65135598e+00 2.63110304e+00] [-9.70546126e-01 1.15753181e-01 1.69338667e+00 ... -3.64417195e-01 2.69763947e+00 2.53113937e+00] ... [ 1.52325499e+00 1.47592813e-01 2.99540830e+00 ... -4.32960212e-01 -3.82165074e-01 -1.95479298e+00] [ 1.34940910e+00 1.79437411e+00 1.77759254e+00 ... -7.67319739e-01 1.67836225e+00 1.20959222e+00] [ 2.93431354e+00 2.04190895e-01 3.20251083e+00 ... -1.19748318e+00 1.17347455e+00 5.68235993e-01]] [[ 4.23807055e-01 8.35787237e-01 2.33267903e+00 ... -1.41280845e-01 -1.33965182e+00 1.43380482e-02] [ 6.66418314e-01 3.88999081e+00 2.71984053e+00 ... 9.69776750e-01 8.42559755e-01 2.50125670e+00] [ 4.40116912e-01 1.08135951e+00 5.66733003e-01 ... 1.45642853e+00 2.81364107e+00 1.06036246e+00] ... [ 1.53608036e+00 1.21806836e+00 1.12154377e+00 ... 8.54034722e-02 -1.50456190e+00 2.20863056e+00] [ 8.66056800e-01 6.93579674e-01 1.10405898e+00 ... -4.29545343e-01 6.34417981e-02 8.49901676e-01] [ 1.86372268e+00 3.54052949e+00 1.81739345e-01 ... 2.07250643e+00 -6.90707192e-02 -4.48778877e-03]] [[-4.77345549e-02 -1.25920549e-01 7.03637123e-01 ... 1.47398245e+00 -3.69941682e-01 3.19022775e-01] [-1.30109238e-02 6.45375609e-01 9.73877072e-01 ... 1.55851901e+00 1.76485431e+00 1.06049788e+00] [-1.20644963e+00 8.41787815e-01 -6.90544367e-01 ... 1.95281720e+00 1.51787579e+00 2.18823314e+00] ... [ 2.60957527e+00 9.62191582e-01 1.52219331e+00 ... 1.38775319e-01 5.47970474e-01 -1.30178189e+00] [ 4.83590454e-01 2.61597228e+00 1.87385154e+00 ... -1.24176085e+00 5.05162239e-01 2.06984997e+00] [-1.82836843e+00 1.07991263e-01 3.73950094e-01 ... 3.60016763e-01 2.38409233e+00 5.18063605e-01]] ... [[ 4.24928725e-01 1.49064481e+00 -5.20821571e-01 ... 8.73193920e-01 2.34319735e+00 -1.84658575e+00] [ 1.50413287e+00 2.53841329e+00 9.07163680e-01 ... 1.17223907e+00 1.97772348e+00 3.42800927e+00] [ 7.98370779e-01 -1.75734353e+00 1.69320750e+00 ... 2.91678667e+00 -5.58581412e-01 -1.51609707e+00] ... [ 1.63255405e+00 3.89637768e-01 2.25476193e+00 ... -6.67838514e-01 1.32573113e-01 6.15276575e-01] [ 6.70698702e-01 2.25979865e-01 -5.00183105e-01 ... 2.10834932e+00 2.47703123e+00 -2.93747485e-01] [ 2.57069683e+00 -2.85790420e+00 7.91170001e-01 ... 2.39389920e+00 1.82387161e+00 4.06092167e+00]] [[ 6.58996582e-01 -5.40556610e-01 3.05097461e-01 ... -2.78985643e+00 2.94582033e+00 3.65883052e-01] [ 1.24869108e+00 -7.02046603e-02 1.81412697e+00 ... 1.97298601e-01 2.10154772e-01 3.09724283e+00] [ 2.89779425e-01 1.68037128e+00 -3.07574332e-01 ... -6.28221571e-01 5.99233150e-01 1.32298004e-02] ... [ 1.56300211e+00 1.41086257e+00 -8.93537223e-01 ... 1.06851196e+00 3.40394783e+00 2.21766129e-01] [-1.03625548e+00 3.55579257e-01 9.70679462e-01 ... 1.40915662e-01 3.76559764e-01 2.16684437e+00] [-3.28599274e-01 4.40421724e+00 2.33787155e+00 ... -2.91301608e-01 -2.79278636e+00 2.85886335e+00]] [[ 2.73746848e+00 1.42438161e+00 -4.58156228e-01 ... -2.53674954e-01 1.59360528e+00 2.18327832e+00] [ 4.48563248e-01 4.02461815e+00 9.76883113e-01 ... 1.68445218e+00 5.19523501e-01 9.56609964e-01] [-2.06978130e+00 1.75447315e-02 3.17887521e+00 ... 2.64510900e-01 -4.27471548e-02 3.05893242e-01] ... [ 2.25354242e+00 2.61832929e+00 2.39146495e+00 ... 5.62493205e-02 1.82129586e+00 7.94141352e-01] [ 1.19366491e+00 2.62138629e+00 3.05337727e-01 ... 1.38166356e+00 1.63164878e+00 1.42694151e+00] [ 2.58345127e+00 -6.76496774e-02 1.41693890e+00 ... 8.26267377e-02 -4.58553672e-01 -1.46018875e+00]]] [[[ 9.62971926e-01 1.07485962e+00 9.62421834e-01 ... 1.76067495e+00 1.96865332e+00 2.96115685e+00] [-5.15294038e-02 1.29527259e+00 4.73325998e-01 ... 1.75622451e+00 2.52181387e+00 -3.30679178e-01] [ 7.74253607e-01 1.03154731e+00 9.67245102e-01 ... 2.77537560e+00 1.63831365e+00 1.07840872e+00] ... [ 1.94835544e+00 2.78660202e+00 1.22295713e+00 ... 1.78820026e+00 1.31943238e+00 5.56325018e-01] [-1.35377228e-01 9.63286340e-01 1.87318158e+00 ... 6.34911299e-01 1.18361735e+00 1.27827609e+00] [ 1.62462676e+00 8.63552928e-01 1.02188683e+00 ... 2.90550542e+00 -6.89885676e-01 1.35264313e+00]] [[ 1.71529865e+00 3.08444881e+00 -3.02852690e-01 ... 7.26865888e-01 2.04878473e+00 8.34853053e-01] [ 1.79767799e+00 2.55581880e+00 1.80385542e+00 ... 1.24752617e+00 2.06346846e+00 2.61258554e+00] [ 3.08998632e+00 3.49012554e-01 2.30846381e+00 ... 1.91833830e+00 1.48611569e+00 1.32244432e+00] ... [ 1.87843180e+00 2.00141597e+00 4.39853281e-01 ... 7.08061278e-01 2.00782013e+00 7.89933205e-01] [ 1.67801070e+00 3.09945107e-01 2.50772023e+00 ... 7.16278195e-01 2.81840181e+00 1.52100849e+00] [ 2.70637178e+00 2.16637754e+00 1.49208283e+00 ... 1.84586608e+00 -7.47878611e-01 1.09077144e+00]] [[ 8.97006571e-01 2.32156086e+00 2.60013652e+00 ... 6.46457016e-01 1.55676377e+00 2.12110686e+00] [ 2.38292456e+00 1.26239705e+00 1.53286135e+00 ... 4.09724772e-01 3.35022354e+00 3.45342070e-01] [ 1.04332292e+00 2.13322711e+00 2.49497843e+00 ... 8.95301580e-01 1.66327870e+00 2.52395988e+00] ... [ 1.03594184e+00 1.98939848e+00 1.75328648e+00 ... 9.72164869e-01 2.57932037e-01 2.61577535e+00] [ 1.37518954e+00 7.16521859e-01 1.38598311e+00 ... 2.78588200e+00 1.01982617e+00 1.85380757e+00] [ 2.75156140e+00 1.92031956e+00 2.07187510e+00 ... 9.02220130e-01 -1.43711656e-01 1.07307625e+00]] ... [[ 1.31322014e+00 -6.42710328e-02 1.69052839e-01 ... 2.53968930e+00 3.20949817e+00 9.97891963e-01] [ 2.94875860e+00 1.22162640e+00 2.19451666e+00 ... 2.37869763e+00 2.00386477e+00 2.44133592e-01] [ 1.93293452e+00 2.23315358e+00 1.19920969e+00 ... 2.64489508e+00 5.55187762e-01 1.18315125e+00] ... [ 7.59802163e-01 1.97673202e+00 1.91573274e+00 ... 1.13705182e+00 8.70170712e-01 1.29245031e+00] [ 1.59953010e+00 2.32246208e+00 2.12560391e+00 ... 9.76213753e-01 8.54588687e-01 1.31294453e+00] [ 1.70524967e+00 1.10109687e+00 2.21607184e+00 ... 2.45159173e+00 2.37462950e+00 1.27689719e+00]] [[ 1.63984931e+00 -4.11444098e-01 -9.71715674e-02 ... 2.16022515e+00 1.85049200e+00 1.23261082e+00] [-6.29919052e-01 1.73834753e+00 1.85865909e-01 ... 3.08957744e+00 8.67685303e-03 1.58801591e+00] [ 1.58935165e+00 3.11964929e-01 1.36959350e+00 ... -2.67861456e-01 1.63074565e+00 1.58108521e+00] ... [ 1.32428312e+00 8.39440703e-01 2.51560140e+00 ... 2.93528318e+00 2.09855390e+00 1.07507932e+00] [ 2.23154092e+00 1.22007465e+00 2.97815949e-01 ... 2.81218505e+00 1.62495124e+00 9.41222489e-01] [-7.05891728e-01 -7.63957679e-01 1.42290831e+00 ... 2.70033747e-01 3.99383843e-01 -5.31714439e-01]] [[ 1.40238011e+00 2.69613028e+00 6.12034500e-01 ... 1.97164252e-01 4.51554656e-01 2.30228615e+00] [ 1.38345718e+00 3.72784615e+00 2.89902163e+00 ... 1.46681333e+00 2.62805915e+00 8.33134353e-01] [ 1.82062352e+00 1.09390938e+00 1.61106503e+00 ... 5.17778397e-01 2.77107477e+00 1.58099091e+00] ... [ 1.13222373e+00 5.64230561e-01 2.66107845e+00 ... 1.78987873e+00 2.35216603e-01 9.02699709e-01] [ 2.84710258e-01 1.13511503e+00 8.48959267e-01 ... 2.57279563e+00 1.36147380e+00 1.72289118e-01] [ 1.25650978e+00 2.19404912e+00 1.36237741e+00 ... 1.55178115e-01 1.42440748e+00 2.32687140e+00]]] [[[ 8.69943202e-01 8.91316473e-01 7.41105378e-01 ... 7.87230492e-01 8.22066605e-01 7.26622164e-01] [ 7.86836147e-01 7.51844764e-01 7.46029556e-01 ... 6.64777577e-01 7.29287505e-01 6.13033473e-01] [ 7.40755200e-01 6.88333452e-01 6.88641131e-01 ... 7.57809162e-01 7.26272643e-01 6.39179647e-01] ... [ 7.44904697e-01 6.91192627e-01 6.45335615e-01 ... 6.80231273e-01 7.08330154e-01 8.64286363e-01] [ 8.75693917e-01 7.32376397e-01 6.69798076e-01 ... 8.26350808e-01 5.97508907e-01 7.06459701e-01] [ 6.60081923e-01 7.74468184e-01 7.07075238e-01 ... 6.77654982e-01 8.27578187e-01 7.06503212e-01]] [[ 6.87136590e-01 6.87335253e-01 6.28790081e-01 ... 6.40814543e-01 7.99534261e-01 6.00147426e-01] [ 6.68286145e-01 6.63178682e-01 7.43773580e-01 ... 6.85579121e-01 6.91717386e-01 6.28189683e-01] [ 6.56688213e-01 7.91047275e-01 5.94950795e-01 ... 7.10126042e-01 7.27371037e-01 7.78964579e-01] ... [ 7.55259812e-01 6.37107134e-01 6.61717713e-01 ... 6.76483929e-01 6.87485039e-01 7.42760301e-01] [ 6.33472502e-01 7.41335630e-01 7.64043748e-01 ... 6.98796630e-01 7.82912731e-01 7.15147793e-01] [ 7.47696757e-01 7.06609249e-01 6.63321137e-01 ... 6.87384009e-01 6.68087304e-01 7.81272352e-01]] [[ 7.55231082e-01 6.89575791e-01 7.23056376e-01 ... 7.33125746e-01 7.11198509e-01 7.82589912e-01] [ 6.48539126e-01 6.70449853e-01 6.43680632e-01 ... 8.13279152e-01 7.71868944e-01 6.35041296e-01] [ 5.60318589e-01 7.51223564e-01 7.87649095e-01 ... 6.36182189e-01 7.60521591e-01 7.63631940e-01] ... [ 7.31702566e-01 6.92305446e-01 7.30379701e-01 ... 8.19621205e-01 7.74202228e-01 7.35728681e-01] [ 6.82429492e-01 7.03475833e-01 7.11993277e-01 ... 5.99545658e-01 8.37740481e-01 7.32729554e-01] [ 6.34113014e-01 6.79099381e-01 7.95285821e-01 ... 6.60061061e-01 6.50458395e-01 5.88265657e-01]] ... [[ 6.96259022e-01 7.34152317e-01 7.11686194e-01 ... 8.27448726e-01 7.27697015e-01 7.43895650e-01] [ 6.07802153e-01 7.02340007e-01 6.96700931e-01 ... 7.82810986e-01 8.00198793e-01 7.69321859e-01] [ 7.17195451e-01 7.62660921e-01 7.89711356e-01 ... 6.13388240e-01 7.12551177e-01 8.43868315e-01] ... [ 6.86534882e-01 6.53245449e-01 6.95482731e-01 ... 7.13039041e-01 8.03106844e-01 6.34054065e-01] [ 8.03686023e-01 7.40450621e-01 6.70712829e-01 ... 7.69901872e-01 7.53064394e-01 7.87955225e-01] [ 8.06892574e-01 6.42704725e-01 7.23648727e-01 ... 7.68365681e-01 8.55495870e-01 7.86119342e-01]] [[ 5.72657347e-01 6.58827066e-01 7.49243319e-01 ... 6.59742653e-01 8.33129346e-01 6.90964460e-01] [ 7.39739418e-01 6.63172603e-01 6.51732981e-01 ... 7.23360419e-01 5.68915367e-01 7.34623492e-01] [ 6.83190286e-01 6.92384064e-01 8.57326210e-01 ... 6.77960336e-01 6.96968675e-01 7.14992702e-01] ... [ 6.18189752e-01 7.20713913e-01 5.52632153e-01 ... 6.66748106e-01 7.22767711e-01 8.33886802e-01] [ 5.80160856e-01 5.68246245e-01 6.44379914e-01 ... 6.18630588e-01 6.62446916e-01 5.44141829e-01] [ 9.00258660e-01 6.95012569e-01 7.34618247e-01 ... 7.05021560e-01 8.52923393e-01 7.36174464e-01]] [[ 7.55403697e-01 7.26078033e-01 7.41289914e-01 ... 7.76434362e-01 7.52513885e-01 7.19295979e-01] [ 7.63436913e-01 7.13734746e-01 8.17423820e-01 ... 8.03329229e-01 6.64965451e-01 5.71797311e-01] [ 7.85947263e-01 6.17402136e-01 6.70061111e-01 ... 6.62563920e-01 8.14430177e-01 7.32073128e-01] ... [ 6.31543994e-01 9.28301275e-01 6.30414188e-01 ... 6.65492594e-01 7.58445740e-01 7.05401123e-01] [ 7.75638878e-01 5.21405339e-01 7.99423337e-01 ... 7.02392340e-01 7.43392885e-01 7.88090289e-01] [ 7.26318598e-01 6.38772964e-01 6.72602117e-01 ... 8.06247473e-01 5.76109231e-01 7.64289081e-01]]] [[[ 3.52082998e-01 1.93683267e-01 9.12011087e-01 ... 4.96904582e-01 3.67953300e-01 2.82032996e-01] [ 3.84938896e-01 -2.16463700e-01 -7.41364062e-02 ... 3.23444903e-01 8.76950324e-01 2.34357789e-01] [ 4.37565953e-01 7.43830383e-01 2.02325463e-01 ... 2.09933162e-01 6.17217481e-01 1.84440136e-01] ... [ 3.22284520e-01 1.07881494e-01 3.56892675e-01 ... 7.73710534e-02 8.67163360e-01 -4.77780066e-02] [-8.15812424e-02 4.44058180e-01 1.66893318e-01 ... 1.57621458e-01 -5.89165799e-02 1.92575380e-01] [ 8.23314130e-01 6.57059908e-01 2.59545773e-01 ... 3.62691700e-01 3.94673914e-01 7.40108669e-01]] [[ 3.35359454e-01 2.52090752e-01 6.10744357e-01 ... 6.13728642e-01 1.08544397e+00 2.55991012e-01] [ 2.44725943e-01 5.40745735e-01 4.89011019e-01 ... 6.69453025e-01 3.76620680e-01 6.91961825e-01] [ 1.97121367e-01 4.19595033e-01 1.25748619e-01 ... 4.06139940e-01 1.77257657e-01 1.15437895e-01] ... [ 4.07522887e-01 5.00950336e-01 2.60346055e-01 ... 7.33722746e-01 2.16568828e-01 1.48209944e-01] [ 3.09759885e-01 4.58469361e-01 7.38830805e-01 ... 1.48935750e-01 9.93776694e-02 4.21699375e-01] [ 2.33192399e-01 5.56205869e-01 1.96590021e-01 ... 3.91294569e-01 7.08792746e-01 6.22708380e-01]] [[ 3.67212683e-01 5.63565850e-01 -1.59642905e-01 ... 1.40497878e-01 1.24028482e-01 4.38335985e-01] [ 5.98534107e-01 3.73387098e-01 6.15552723e-01 ... 3.79744828e-01 7.03159332e-01 1.69622451e-01] [ 4.20378417e-01 6.80218101e-01 4.38921750e-01 ... 2.30616882e-01 -8.69707093e-02 1.21668100e-01] ... [ 1.90637320e-01 1.99577451e-01 1.88442469e-01 ... 2.74745762e-01 3.18915248e-01 5.60857773e-01] [-5.70560321e-02 1.97417200e-01 -3.34731229e-02 ... 2.18874142e-01 6.35025382e-01 1.12516470e-01] [ 2.87676692e-01 6.70702696e-01 4.20548320e-02 ... 7.36822963e-01 5.03431737e-01 5.15972078e-01]] ... [[ 3.18777502e-01 3.47560823e-01 4.81407791e-01 ... 2.83160478e-01 5.81609309e-01 1.68571882e-02] [ 4.27112609e-01 8.73758733e-01 2.90353894e-01 ... 5.74446499e-01 -3.46025437e-01 4.92426753e-01] [ 3.14293027e-01 3.13125610e-01 2.47456759e-01 ... 7.49967024e-02 8.54023337e-01 4.86817420e-01] ... [ 8.30131054e-01 3.10850382e-01 6.66494966e-01 ... 3.81522417e-01 -9.09849778e-02 5.19034386e-01] [-1.25491679e-01 -1.43029198e-01 4.57096219e-01 ... 9.88671109e-02 3.13129753e-01 6.80788457e-01] [ 3.72042239e-01 3.83894295e-02 2.27037594e-01 ... 1.33950785e-01 5.70918739e-01 8.02421570e-01]] [[ 3.85090172e-01 7.94714466e-02 2.10161403e-01 ... 1.10379301e-01 3.55469674e-01 4.99093443e-01] [ 8.14987496e-02 6.42028630e-01 4.04502451e-01 ... 3.38851780e-01 5.67643821e-01 2.05803901e-01] [ 2.57246405e-01 4.40357089e-01 6.18846476e-01 ... 6.81762934e-01 6.84606433e-02 2.91308939e-01] ... [ 3.68308991e-01 1.74773216e-01 2.51300126e-01 ... 6.45999908e-01 -1.02878049e-01 2.53930390e-01] [ 5.31612515e-01 2.77518064e-01 3.03956985e-01 ... 4.62262243e-01 2.06805170e-01 -1.34544462e-01] [ 1.09636523e-02 2.48280510e-01 5.16659498e-01 ... 1.40184224e-01 2.73822993e-01 -3.13013703e-01]] [[ 5.73430419e-01 1.61686718e-01 5.66487551e-01 ... 8.44345570e-01 7.88827300e-01 2.43725702e-01] [ 1.78038985e-01 4.06062067e-01 4.31926280e-01 ... 1.27320871e-01 7.28007555e-01 3.67500991e-01] [ 4.19355184e-03 3.47426236e-02 5.14209211e-01 ... 5.57233989e-01 3.49997044e-01 4.97935504e-01] ... [ 2.68733889e-01 3.06440860e-01 7.31119633e-01 ... 1.86402708e-01 7.67278016e-01 6.94043422e-03] [ 2.32777242e-02 9.94310975e-01 3.08423698e-01 ... 4.17314798e-01 1.20413564e-01 -4.70922619e-01] [ 4.35217470e-01 3.44015688e-01 4.02246714e-01 ... 2.28601456e-01 6.66339159e-01 2.92759717e-01]]] [[[-2.76790679e-01 5.40830076e-01 -1.04375921e-01 ... 1.23558962e+00 3.36819708e-01 1.03562617e+00] [ 1.26533210e+00 1.90535235e+00 1.45173445e-01 ... 9.10345435e-01 1.61606598e+00 6.10685766e-01] [ 2.61460692e-01 -8.09870481e-01 7.14585423e-01 ... 8.98828268e-01 3.09569687e-01 2.23108530e-02] ... [ 1.72990322e-01 -2.93207526e-01 7.62105346e-01 ... -3.61105800e-02 -2.06385106e-02 -8.38818192e-01] [-1.81826246e+00 2.92142868e-01 9.65945303e-01 ... 1.59094191e+00 -7.12628424e-01 3.37365478e-01] [ 1.83839691e+00 2.69961119e-01 -3.90951484e-01 ... 7.91277111e-01 6.31258965e-01 4.38203752e-01]] [[ 4.95113552e-01 1.12975490e+00 1.90667808e+00 ... 1.07924020e+00 4.29853171e-01 9.94182825e-01] [ 1.27095804e-01 3.79341274e-01 1.91836679e+00 ... -2.00068176e-01 -1.18812644e+00 6.14045382e-01] [ 7.81910717e-01 7.42323458e-01 7.03188360e-01 ... 1.17359471e+00 7.29312420e-01 3.29965502e-01] ... [ 4.68361288e-01 8.55318606e-01 4.56686705e-01 ... 5.89772999e-01 -8.88543785e-01 -3.76201458e-02] [-4.46999460e-01 -4.66007978e-01 -5.56997061e-01 ... -1.19686522e-01 1.28191960e+00 6.74851000e-01] [ 1.24961102e+00 1.02701974e+00 6.45595714e-02 ... -1.32272577e+00 4.88193929e-01 1.71881378e+00]] [[-2.80868173e-01 3.99145067e-01 1.49775052e+00 ... -2.77527422e-01 1.40769041e+00 1.67939091e+00] [ 1.69046021e+00 -1.79197669e+00 -7.37858638e-02 ... 4.97326255e-01 8.74939978e-01 -4.07818228e-01] [-2.79551446e-01 2.19496942e+00 -6.35203421e-02 ... 1.00390446e+00 1.13244045e+00 9.18291628e-01] ... [ 2.15610790e+00 -8.03659186e-02 1.22426605e+00 ... 3.06872070e-01 -7.67241240e-01 1.43182969e+00] [-3.68176028e-02 3.23308349e-01 1.87146318e+00 ... -7.20344603e-01 1.19977646e-01 1.48250389e+00] [ 1.82559133e-01 6.65357769e-01 -1.53866380e-01 ... 2.54608959e-01 1.93556398e-01 2.05450749e+00]] ... [[ 1.27772242e-02 7.44573176e-01 2.00502753e+00 ... -5.08677602e-01 -6.31749630e-02 1.74494088e+00] [-9.71635997e-01 7.05856025e-01 1.50698781e+00 ... 1.81559992e+00 3.86783592e-02 1.12003481e+00] [ 9.77670491e-01 1.53577173e+00 3.80310595e-01 ... 4.21387196e-01 6.65853024e-01 -1.84736565e-01] ... [ 1.51774287e+00 2.24065900e-01 8.74522209e-01 ... 2.89515018e-01 1.64919877e+00 6.28052413e-01] [ 1.23427749e+00 2.24870563e+00 6.92831099e-01 ... 2.74431948e-02 1.38772130e+00 1.44601750e+00] [ 1.29216266e+00 -7.68413782e-01 9.10504878e-01 ... 8.63710940e-01 1.33877927e-02 2.12187380e-01]] [[ 7.32139647e-01 1.75084352e-01 -4.41362739e-01 ... -6.52891517e-01 5.98400891e-01 1.05126834e+00] [ 5.82681596e-01 1.28804553e+00 -6.47120118e-01 ... 1.47955716e+00 9.69635367e-01 -5.05836070e-01] [ 1.18075407e+00 1.07875562e+00 1.09368360e+00 ... 8.77737552e-02 -3.01655322e-01 2.80915856e-01] ... [ 1.09358919e+00 4.64870632e-01 -1.06775187e-01 ... 2.27482140e-01 1.26690137e+00 -5.26774108e-01] [ 2.02678967e+00 9.65034008e-01 -2.24661961e-01 ... 1.35453761e+00 1.39437020e+00 1.67825341e+00] [ 2.66137391e-01 2.36656755e-01 8.09099674e-02 ... 1.63820601e+00 1.29010212e+00 -4.67045233e-02]] [[ 2.56940150e+00 9.52784956e-01 -6.92585528e-01 ... 1.66742635e+00 2.13161275e-01 3.10388476e-01] [ 4.33972299e-01 -1.04369497e+00 2.64479965e-01 ... 5.03675640e-01 7.53507376e-01 6.19321287e-01] [ 3.79240125e-01 -1.32989812e+00 -1.06594908e+00 ... 9.33281839e-01 9.67801332e-01 1.13673210e-01] ... [-8.16836774e-01 1.13967049e+00 1.29664016e+00 ... 1.31617153e+00 -1.58494730e-02 1.15798473e+00] [-2.94945002e-01 -2.29129836e-01 1.39241040e+00 ... -4.04833211e-03 2.70818204e-01 1.18746758e+00] [-5.72072044e-02 9.29896712e-01 -1.13734424e+00 ... -1.85781449e-01 -5.90217054e-01 6.61251724e-01]]]]]; ov_res: [[[[[ 4.68702942e-01 1.01721570e-01 5.28469563e-01 ... -3.51550430e-02 2.39351660e-01 7.55955398e-01] [ 3.24727774e-01 3.11605990e-01 5.32826960e-01 ... 4.73871768e-01 5.30514479e-01 4.97805506e-01] [ 3.24432999e-01 5.10638297e-01 6.70931458e-01 ... 3.60091448e-01 3.80762070e-01 1.87429145e-01] ... [ 2.12717474e-01 3.31430435e-01 2.94324994e-01 ... 3.85380149e-01 7.34339237e-01 6.86820090e-01] [ 5.01417816e-01 3.58302683e-01 4.10954714e-01 ... 4.82190013e-01 6.39897704e-01 4.62998122e-01] [ 1.82154581e-01 3.52491826e-01 1.40732661e-01 ... 4.70627159e-01 3.84994149e-01 1.96838900e-01]] [[ 6.43476129e-01 1.93258002e-01 3.42870623e-01 ... 3.21597248e-01 7.25144863e-01 4.52072084e-01] [ 4.44445282e-01 5.27227104e-01 4.31656569e-01 ... 4.29812968e-01 5.30946791e-01 5.23218870e-01] [ 1.35311052e-01 4.31600124e-01 4.04896647e-01 ... 4.60632801e-01 6.18196130e-01 3.22423637e-01] ... [-2.09412217e-01 6.13190234e-01 4.06276703e-01 ... 4.43411589e-01 5.48118174e-01 4.01180506e-01] [ 7.34042168e-01 5.75971425e-01 2.93202728e-01 ... 6.48907721e-01 6.08007967e-01 1.18752144e-01] [ 1.32887259e-01 2.65928179e-01 5.78338563e-01 ... 2.72237092e-01 5.28859973e-01 7.67821133e-01]] [[ 1.78325087e-01 5.44506252e-01 2.60623723e-01 ... 4.35059249e-01 6.11762881e-01 3.73030722e-01] [ 7.31586158e-01 4.46384370e-01 1.18960313e-01 ... 7.24158347e-01 8.92192662e-01 4.80917394e-01] [ 2.71975011e-01 2.94592083e-01 5.27561605e-01 ... 7.67591476e-01 2.91972071e-01 5.33621192e-01] ... [ 3.59263241e-01 5.50890148e-01 6.52684569e-01 ... 5.61425328e-01 -1.52870277e-02 4.72879171e-01] [ 2.96567619e-01 5.30465059e-02 3.73860568e-01 ... 1.64213747e-01 1.61211267e-01 4.24042851e-01] [ 4.54798728e-01 3.34856987e-01 4.49033886e-01 ... 6.64964736e-01 4.19524670e-01 1.95139974e-01]] ... [[ 2.96908170e-01 3.23092580e-01 6.34965062e-01 ... 3.90057594e-01 1.44544244e-01 5.81865609e-01] [ 3.87782007e-01 3.61145809e-02 5.70634782e-01 ... 5.23876965e-01 2.61810690e-01 5.83750486e-01] [ 2.80567646e-01 2.96312481e-01 -2.73648560e-01 ... 1.37376562e-01 3.94192249e-01 5.99966049e-01] ... [ 2.69306868e-01 2.45935127e-01 8.67892385e-01 ... 1.98662162e-01 5.56318879e-01 3.86092752e-01] [ 6.03147566e-01 5.77327669e-01 8.13087225e-01 ... 3.76944959e-01 5.63739538e-01 3.75561088e-01] [ 3.38261500e-02 3.64960015e-01 2.60764152e-01 ... 4.61141348e-01 2.92527765e-01 5.88702202e-01]] [[ 5.08160949e-01 1.33848533e-01 5.81450649e-02 ... 3.42052281e-01 4.28883433e-01 4.42509443e-01] [ 1.09458983e-01 2.61673808e-01 2.98091114e-01 ... 2.28503466e-01 3.89691293e-01 6.96976066e-01] [ 4.89046752e-01 5.68028212e-01 3.86470973e-01 ... 7.08099842e-01 5.19507468e-01 8.32801104e-01] ... [ 5.87224364e-01 4.38042581e-01 5.12202322e-01 ... 3.22141320e-01 2.83799201e-01 1.64141729e-01] [ 4.88639712e-01 6.65702820e-02 7.59515107e-01 ... 5.90039551e-01 3.49152505e-01 4.52343136e-01] [ 2.80288547e-01 4.83431488e-01 3.97491992e-01 ... 6.09346509e-01 2.95074105e-01 4.36228305e-01]] [[ 2.95254201e-01 4.78034884e-01 3.48819494e-01 ... 4.52705741e-01 2.89485663e-01 4.12739664e-01] [ 5.73918045e-01 2.75201052e-01 4.21147764e-01 ... 7.31868863e-01 5.91974914e-01 1.46693304e-01] [ 2.66985267e-01 1.55220747e-01 5.55276632e-01 ... 3.94394100e-02 2.78511912e-01 5.74410081e-01] ... [ 3.07991415e-01 6.91426516e-01 4.29193348e-01 ... 1.29728511e-01 3.60857606e-01 3.61331068e-02] [ 7.98680842e-01 2.30760306e-01 2.03206524e-01 ... 2.78942496e-01 5.91856003e-01 4.57751036e-01] [ 1.56810373e-01 1.13960199e-01 8.24745953e-01 ... 4.71349895e-01 2.30782896e-01 5.57056427e-01]]] [[[ 1.94061194e-02 1.98005164e+00 -1.07206547e+00 ... 3.73928070e-01 4.10667562e+00 -4.38653052e-01] [ 2.12265635e+00 1.89194489e+00 1.12842035e+00 ... 1.00931454e+00 2.58265543e+00 9.11106646e-01] [ 8.81454110e-01 1.81315172e+00 -1.19821095e+00 ... -1.13123097e-01 -7.84831524e-01 -1.38657590e-04] ... [-9.56466496e-01 -1.98649272e-01 -2.82101687e-02 ... 2.75032711e+00 7.48312056e-01 2.81640553e+00] [ 1.94695127e+00 -8.43594551e-01 1.86332250e+00 ... 8.54615271e-01 -7.39444017e-01 -3.50560248e-01] [ 8.29064012e-01 -6.31342411e-01 2.47784168e-01 ... 4.49210584e-01 1.78011727e+00 3.51218081e+00]] [[ 7.51599550e-01 1.26749086e+00 2.00256729e+00 ... 5.24763942e-01 7.84259066e-02 1.54593289e+00] [ 1.38584471e+00 8.33940446e-01 6.13908350e-01 ... 1.15616357e+00 3.20327282e-02 1.06654930e+00] [-6.06139898e-01 -8.13824534e-01 2.34466624e+00 ... -8.12815726e-01 -1.15251160e+00 3.56254876e-02] ... [ 8.00345123e-01 1.60189688e+00 -1.29818749e+00 ... -1.94107205e-01 4.73935097e-01 -9.02002156e-02] [ 4.02188921e+00 -4.06820029e-01 2.09282231e+00 ... 1.70053601e+00 -2.82283694e-01 1.74266911e+00] [ 3.36005837e-01 4.64617682e+00 2.47191215e+00 ... -5.61378658e-01 7.66520381e-01 1.78144860e+00]] [[-9.82569277e-01 1.82486758e-01 -1.68913436e+00 ... 2.24500489e+00 7.96647727e-01 1.88222814e+00] [ 1.11053419e+00 -1.40591455e+00 2.07561398e+00 ... 2.12482262e+00 6.90135300e-01 2.59620118e+00] [ 1.99380410e+00 1.53547263e+00 1.74699283e+00 ... 2.26008415e+00 3.39677882e+00 4.90270472e+00] ... [ 6.96659267e-01 2.19908118e+00 1.88681173e+00 ... 1.50246358e+00 5.42122364e-01 8.38355839e-01] [ 9.70299661e-01 1.45857644e+00 4.63120031e+00 ... -1.43920735e-01 2.70413756e-01 -1.36967468e+00] [ 1.74093992e-01 1.86678684e+00 1.68629003e+00 ... 2.92077041e+00 5.87881319e-02 5.06003976e-01]] ... [[-3.99690509e-01 7.49868333e-01 -6.31910935e-02 ... 3.76706958e-01 2.19262719e+00 -8.13199461e-01] [ 1.06731498e+00 -6.19601607e-01 1.49556875e+00 ... -5.81805110e-01 4.83874559e-01 2.90578771e+00] [ 1.98150861e+00 3.02016473e+00 8.12260747e-01 ... -7.07047224e-01 8.82594943e-01 -1.72166860e+00] ... [ 1.09700572e+00 1.45862305e+00 -6.35904312e-01 ... 1.79438806e+00 6.92631423e-01 2.83095300e-01] [ 2.32592773e+00 1.62999153e+00 1.84932256e+00 ... 1.43421698e+00 -3.08386266e-01 1.32763064e+00] [-4.91991872e-03 1.00581574e+00 6.55813932e-01 ... 1.50111818e+00 2.31285572e+00 3.27478528e+00]] [[-2.05970913e-01 7.13986099e-01 1.32333517e+00 ... 2.10184526e+00 4.51562315e-01 -8.60381722e-01] [ 1.82152939e+00 1.47373044e+00 1.87105274e+00 ... -6.18970394e-02 1.02932477e+00 -6.04133427e-01] [ 1.44198716e+00 2.16785574e+00 2.20439196e+00 ... 1.57613981e+00 -3.60333949e-01 -2.77728915e+00] ... [ 1.82357776e+00 2.20921016e+00 -1.84591568e+00 ... 2.30899620e+00 9.23236012e-01 2.19863272e+00] [-2.10998964e+00 9.19050872e-01 -4.41906512e-01 ... -6.24428876e-02 2.69910741e+00 2.22125217e-01] [-6.07433617e-01 1.18634236e+00 -4.75092292e-01 ... -3.04610729e-01 2.85169125e+00 -6.98793948e-01]] [[ 1.99257052e+00 3.23922157e+00 8.60390663e-01 ... 8.86863828e-01 1.04930234e+00 2.85620427e+00] [ 4.94982064e-01 -9.99296963e-01 3.26116562e-01 ... 2.41127038e+00 2.28861642e+00 -3.66765141e-01] [ 1.11334908e+00 -3.19866800e+00 -4.44983065e-01 ... 2.96545482e+00 9.49235499e-01 -9.92853820e-01] ... [ 9.33565557e-01 8.57120514e-01 3.44287306e-01 ... 1.52180982e+00 1.95221233e+00 -4.08455461e-01] [ 1.77038836e+00 3.72061110e+00 7.34955072e-01 ... -9.56092536e-01 8.97078633e-01 8.36762071e-01] [-2.37626925e-01 6.89483523e-01 1.42670035e+00 ... 2.82012844e+00 1.19586015e+00 1.47777259e+00]]] [[[ 1.66460335e+00 -6.93766475e-02 -5.96388817e-01 ... 1.72837353e+00 9.27043974e-01 1.53360033e+00] [ 1.90856791e+00 1.83842158e+00 7.83970237e-01 ... 2.35048938e+00 1.60305262e+00 2.36678672e+00] [ 1.39941323e+00 2.26732230e+00 1.80849493e+00 ... 1.80950582e+00 2.15614128e+00 1.49309945e+00] ... [ 1.63688898e+00 1.43081796e+00 1.87689698e+00 ... 1.32775927e+00 -2.98078116e-02 9.35188413e-01] [ 1.78217542e+00 8.97290036e-02 2.43245649e+00 ... 2.04927373e+00 1.86196291e+00 1.82181501e+00] [ 5.38955033e-01 6.97260559e-01 8.71467590e-01 ... 2.69022489e+00 1.22035813e+00 2.71172047e+00]] [[ 9.10737693e-01 9.52296197e-01 7.11389005e-01 ... 8.41958463e-01 2.43957281e+00 2.59358788e+00] [ 1.67734921e+00 1.78653300e+00 1.60335970e+00 ... 1.47604358e+00 1.48809147e+00 2.15902877e+00] [ 8.42271328e-01 9.44193900e-01 6.06362939e-01 ... 1.86012566e+00 1.24606764e+00 2.47421312e+00] ... [ 2.29802299e+00 1.18367457e+00 1.87445903e+00 ... 1.45483494e+00 9.85832512e-01 1.59895849e+00] [ 8.04081619e-01 2.84676456e+00 -7.38525093e-01 ... 9.46842730e-01 1.68226480e+00 2.68815368e-01] [ 1.12980676e+00 1.11097395e-01 6.42829120e-01 ... 1.11247671e+00 2.61091232e+00 2.05433631e+00]] [[ 2.79889512e+00 -1.84934869e-01 1.83456361e+00 ... 2.33848715e+00 1.79289889e+00 1.49744010e+00] [ 3.22457385e+00 2.02288008e+00 1.48731744e+00 ... 6.86255336e-01 2.38714862e+00 3.52135807e-01] [ 2.28460670e-01 1.81605649e+00 2.05746984e+00 ... 2.80626798e+00 1.46042848e+00 1.03235030e+00] ... [ 1.02477670e+00 4.05139893e-01 3.07483315e+00 ... 2.05514026e+00 4.54212046e+00 2.04286671e+00] [ 2.31079865e+00 1.13765550e+00 9.15177107e-01 ... 1.25589979e+00 2.84588784e-01 8.83538425e-01] [ 8.99258971e-01 8.53023469e-01 8.11442062e-02 ... 1.90163088e+00 2.98315501e+00 2.07105160e+00]] ... [[ 1.04217792e+00 2.33185196e+00 2.28439522e+00 ... 1.16879213e+00 1.65830314e-01 2.41331053e+00] [ 1.88133335e+00 1.27559507e+00 1.44843769e+00 ... 2.06589603e+00 2.46081662e+00 1.07845330e+00] [ 1.57272458e+00 8.86457086e-01 1.76335466e+00 ... 6.27657399e-02 2.15263462e+00 -7.06781149e-02] ... [ 4.93546873e-01 -3.95463496e-01 1.94543636e+00 ... 7.59157598e-01 5.35611391e-01 1.71913862e+00] [ 1.02956235e+00 1.97120354e-01 1.60813439e+00 ... 1.98578262e+00 1.44116473e+00 1.71276200e+00] [ 2.12284565e+00 2.74933052e+00 2.28441906e+00 ... 1.73491144e+00 2.45292211e+00 2.28152204e+00]] [[ 2.68344665e+00 7.12303519e-01 2.14628816e+00 ... 2.60958314e-01 -1.09162949e-01 9.67291296e-01] [ 2.43103719e+00 2.32876301e+00 2.46121079e-01 ... 3.71164232e-01 1.20922303e+00 3.13818789e+00] [ 1.02343667e+00 2.21268487e+00 4.09686238e-01 ... 1.42517781e+00 2.93254328e+00 1.31593907e+00] ... [ 2.87862182e-01 7.72045910e-01 3.66886228e-01 ... 1.37689590e+00 1.50987732e+00 1.01392880e-01] [ 1.45858967e+00 4.45381075e-01 1.14199638e+00 ... 4.82051611e-01 1.97403848e+00 -4.32077497e-01] [ 2.96076357e-01 2.17049241e+00 1.31270480e+00 ... 9.64168072e-01 1.51808190e+00 1.23074985e+00]] [[ 2.33071589e+00 6.89863861e-01 1.18870401e+00 ... 1.08370638e+00 2.50684261e+00 1.52956760e+00] [ 1.58664012e+00 2.03089565e-01 1.84426022e+00 ... 1.23789501e+00 6.67806208e-01 1.44521236e+00] [ 1.37223685e+00 8.49983752e-01 1.04447055e+00 ... 1.24850535e+00 5.48728824e-01 2.09120178e+00] ... [ 2.54216105e-01 4.49319720e-01 2.15219235e+00 ... 1.03924239e+00 2.22566199e+00 2.11516094e+00] [ 9.65969801e-01 3.94276977e+00 3.59425396e-01 ... 7.79154897e-01 -1.01361051e-01 9.18947279e-01] [ 7.12427616e-01 1.97726357e+00 1.54981339e+00 ... 1.36347461e+00 8.64567637e-01 7.49119878e-01]]] [[[ 7.52902985e-01 9.33653057e-01 8.24577272e-01 ... 7.51501501e-01 6.65525019e-01 7.60460734e-01] [ 6.49205804e-01 7.12171197e-01 7.49642670e-01 ... 7.06996262e-01 8.57294261e-01 6.46296442e-01] [ 7.85934150e-01 8.67047012e-01 7.83935130e-01 ... 6.61983669e-01 6.80897951e-01 6.32750034e-01] ... [ 7.72619188e-01 5.93817115e-01 6.57173932e-01 ... 8.35437179e-01 7.79299080e-01 7.93354750e-01] [ 6.67889357e-01 5.60499907e-01 5.58889151e-01 ... 6.63753510e-01 7.13371813e-01 5.83773196e-01] [ 6.56019926e-01 7.48616159e-01 6.25597596e-01 ... 7.18255699e-01 5.49246132e-01 7.78516471e-01]] [[ 7.12199330e-01 7.31800497e-01 7.25915372e-01 ... 7.34994411e-01 7.55326331e-01 7.20179498e-01] [ 7.05013037e-01 7.65033841e-01 8.25385630e-01 ... 7.12715566e-01 6.21203125e-01 7.62296259e-01] [ 5.93883693e-01 6.87837183e-01 6.95381522e-01 ... 7.33377814e-01 7.58625031e-01 8.07950497e-01] ... [ 6.08734369e-01 5.48574924e-01 6.71817303e-01 ... 7.62890279e-01 8.33617032e-01 7.93072462e-01] [ 6.69956446e-01 5.49169540e-01 7.99423933e-01 ... 6.80900514e-01 8.78568470e-01 6.57895565e-01] [ 6.59509540e-01 6.45950675e-01 7.75162399e-01 ... 6.55392230e-01 7.66094923e-01 7.72379160e-01]] [[ 6.56246841e-01 7.53057122e-01 8.33060503e-01 ... 8.21636915e-01 7.78818190e-01 7.93276131e-01] [ 7.26986766e-01 8.96280527e-01 6.80760801e-01 ... 7.24984109e-01 7.30795085e-01 6.70206487e-01] [ 7.15278804e-01 8.53062153e-01 8.33234072e-01 ... 6.57062531e-01 6.85951531e-01 8.20934355e-01] ... [ 7.02393770e-01 7.68118739e-01 7.90433526e-01 ... 7.34724164e-01 6.78941727e-01 8.26649606e-01] [ 6.29335821e-01 8.23703408e-01 7.52981603e-01 ... 6.49785101e-01 8.57274830e-01 6.75086677e-01] [ 7.74016798e-01 6.90107763e-01 6.23368740e-01 ... 5.79516232e-01 5.74612558e-01 5.69153786e-01]] ... [[ 5.64774036e-01 7.09915340e-01 6.16010725e-01 ... 7.25594878e-01 7.49028325e-01 5.76282501e-01] [ 8.15888882e-01 7.10302830e-01 7.52876461e-01 ... 7.27456033e-01 7.33244300e-01 5.97857296e-01] [ 8.09648097e-01 6.45969987e-01 6.65753484e-01 ... 7.63215363e-01 7.33533859e-01 6.51468217e-01] ... [ 7.50051141e-01 6.82893753e-01 6.97169244e-01 ... 4.94173706e-01 6.51446044e-01 7.43587852e-01] [ 9.16738153e-01 7.90855169e-01 6.18027747e-01 ... 6.56189203e-01 6.89354539e-01 7.99154639e-01] [ 8.19255233e-01 7.39713788e-01 6.98804140e-01 ... 6.85941994e-01 7.55938351e-01 8.39984655e-01]] [[ 6.36763752e-01 8.06693554e-01 5.23950815e-01 ... 6.91759288e-01 7.32493758e-01 7.66702831e-01] [ 7.88847446e-01 5.81672788e-01 6.85055912e-01 ... 8.20412755e-01 7.31131852e-01 6.43490791e-01] [ 7.37922549e-01 7.18733490e-01 7.98934698e-01 ... 8.20054650e-01 7.06193984e-01 8.42108607e-01] ... [ 6.53927267e-01 7.43299663e-01 7.77520835e-01 ... 7.87022531e-01 7.89995730e-01 6.26933455e-01] [ 7.03302622e-01 5.11734664e-01 6.74830794e-01 ... 7.84597158e-01 7.82236397e-01 6.61256969e-01] [ 5.88804781e-01 7.91473687e-01 5.06765246e-01 ... 6.71616733e-01 7.10061789e-01 6.64958537e-01]] [[ 8.23049903e-01 7.22534716e-01 7.56531596e-01 ... 7.34712541e-01 8.11234415e-01 7.48653054e-01] [ 6.18227959e-01 8.16395342e-01 7.65955210e-01 ... 6.54659331e-01 5.53853095e-01 5.66884577e-01] [ 5.92369616e-01 6.08860135e-01 6.57892823e-01 ... 7.94368088e-01 7.41447330e-01 7.87310541e-01] ... [ 8.39636564e-01 6.84506893e-01 6.63589478e-01 ... 7.13116825e-01 6.66971803e-01 6.67328656e-01] [ 7.85979867e-01 5.81227422e-01 6.53898656e-01 ... 6.84326887e-01 6.85165346e-01 7.04061866e-01] [ 7.54726946e-01 8.42109203e-01 6.48029149e-01 ... 6.60241723e-01 6.16744161e-01 7.91522145e-01]]] [[[ 3.65405738e-01 5.32548606e-01 6.50851786e-01 ... 3.24810416e-01 4.08883274e-01 5.63345701e-02] [ 2.58032113e-01 4.89127815e-01 2.73249894e-01 ... 2.55622596e-01 2.63816893e-01 1.94141835e-01] [ 5.34224987e-01 1.90369487e-01 2.66272843e-01 ... -1.40483156e-01 -6.62858319e-03 3.25419933e-01] ... [ 1.64455146e-01 5.13893843e-01 1.72216251e-01 ... 3.64473820e-01 4.17059302e-01 2.68494695e-01] [ 7.60648489e-01 3.46222460e-01 8.50481093e-01 ... 3.34039867e-01 4.37567919e-01 2.33943477e-01] [ 1.34526446e-01 4.65921015e-01 4.19743478e-01 ... 9.79400128e-02 2.04944566e-01 5.15626073e-01]] [[-1.77883860e-02 3.47010970e-01 2.90032253e-02 ... 4.37260538e-01 8.41147542e-01 4.37955447e-02] [ 3.77937168e-01 3.04458141e-01 6.26641393e-01 ... 6.79435432e-02 4.23687845e-01 5.96424580e-01] [ 6.01038277e-01 3.93491715e-01 -2.34416761e-02 ... 2.88250327e-01 3.78206432e-01 -3.76928896e-01] ... [ 6.04013145e-01 5.43826878e-01 7.09607959e-01 ... 3.76020610e-01 9.44533944e-01 3.80668670e-01] [-1.95342675e-01 2.90925115e-01 5.54418191e-03 ... 6.20821714e-01 8.16439241e-02 3.97510052e-01] [ 5.16551495e-01 1.20344227e-02 2.41647035e-01 ... 3.27582181e-01 -7.36785159e-02 6.60208285e-01]] [[ 1.12906170e+00 3.48274767e-01 9.53894615e-01 ... 4.07656543e-02 4.06657994e-01 -4.02694941e-02] [ 5.21471441e-01 1.95079505e-01 1.47755653e-01 ... 3.46890539e-01 8.52530897e-02 4.97434109e-01] [ 2.05018774e-01 -8.59116688e-02 7.44057596e-01 ... 5.78347981e-01 3.40871423e-01 5.33332825e-01] ... [ 2.39683673e-01 6.57764852e-01 2.47911096e-01 ... -2.33742788e-01 4.18254346e-01 -3.83877568e-02] [ 7.85542607e-01 3.97964776e-01 9.04498622e-02 ... 3.54106933e-01 -8.12483579e-02 7.86678076e-01] [ 4.38038021e-01 2.35781059e-01 9.51558411e-01 ... 3.12580168e-01 2.64999986e-01 1.84274703e-01]] ... [[ 7.19161570e-01 5.60432851e-01 6.27797306e-01 ... 2.78563589e-01 1.71257704e-01 5.03410935e-01] [ 4.40345705e-01 1.60998464e-01 1.06820822e-01 ... 5.60126424e-01 6.11109018e-01 5.11668146e-01] [ 2.30219260e-01 3.88721049e-01 1.68040633e-01 ... 6.67393029e-01 -1.36797413e-01 3.89224917e-01] ... [ 4.92853858e-02 5.44279993e-01 2.73888797e-01 ... 4.90057290e-01 5.94400823e-01 3.28593016e-01] [ 5.35143137e-01 8.51415172e-02 3.62681895e-01 ... 3.12170181e-02 6.87828898e-01 4.28740054e-01] [ 5.72103977e-01 2.07227394e-01 5.54863632e-01 ... 6.99544609e-01 6.15442023e-02 4.20129836e-01]] [[ 4.67659235e-01 2.96475291e-01 2.03037545e-01 ... 4.10894491e-02 6.10445023e-01 7.41091311e-01] [ 3.14232945e-01 5.31443954e-01 6.37278259e-01 ... 4.82207328e-01 6.71567857e-01 4.18981522e-01] [ 8.35147679e-01 4.13913906e-01 5.22797167e-01 ... 5.51851869e-01 5.07266223e-01 4.64926481e-01] ... [ 7.98251688e-01 4.45009679e-01 5.33208072e-01 ... 3.63436788e-02 -3.07562917e-01 -9.32299569e-02] [ 6.21843398e-01 4.82501447e-01 2.01177359e-01 ... 3.02958727e-01 3.53074253e-01 2.21416384e-01] [ 3.82277668e-01 4.22128648e-01 4.82487351e-01 ... 8.93554509e-01 6.03119910e-01 3.53037179e-01]] [[ 6.35055006e-02 6.19999886e-01 3.82071018e-01 ... 5.81578672e-01 4.30134416e-01 6.51376903e-01] [ 6.75337017e-01 2.59113491e-01 4.15761083e-01 ... 5.17099679e-01 4.15822357e-01 5.17859757e-01] [ 7.58247435e-01 4.35397863e-01 2.93978244e-01 ... 4.82633375e-02 4.38956767e-01 -1.20036162e-01] ... [ 2.28209630e-01 4.24703687e-01 9.95877311e-02 ... 1.85832724e-01 2.16356531e-01 2.61935383e-01] [ 5.10771036e-01 7.73721814e-01 6.29019201e-01 ... 2.58869439e-01 4.39227194e-01 1.05140492e-01] [ 7.40358651e-01 3.96758616e-01 3.84101450e-01 ... 9.62103188e-01 5.67437828e-01 6.51409328e-02]]] [[[-3.95988137e-01 -2.30918258e-01 1.29232502e+00 ... -4.70118709e-02 2.29621246e-01 6.58594131e-01] [-1.65124726e+00 7.69712254e-02 -1.34680605e+00 ... 9.62616205e-01 -8.92933384e-02 9.53821480e-01] [ 1.30752397e+00 -5.28068319e-02 -3.99799883e-01 ... 2.80008525e-01 4.08822209e-01 1.05525613e+00] ... [-3.56301554e-02 8.55566621e-01 1.26795363e+00 ... -6.54091656e-01 1.00153637e+00 1.18532050e+00] [-8.78611505e-02 4.01371449e-01 2.45690510e-01 ... 9.60126519e-01 1.82742131e+00 -8.38864505e-01] [ 1.62546802e-02 9.50069785e-01 2.03206852e-01 ... 7.93203712e-01 -8.16943586e-01 1.95447102e-01]] [[ 5.88944554e-01 1.81855857e+00 -9.57065582e-01 ... 9.55133200e-01 1.68340042e-01 7.14759827e-01] [-3.57854158e-01 1.31401002e+00 6.15411639e-01 ... 1.04696250e+00 1.50623932e-01 8.98844004e-01] [ 7.52191782e-01 -1.70186952e-01 -1.37107506e-01 ... 1.67715859e+00 9.78372157e-01 -8.05198610e-01] ... [-9.58557844e-01 -3.67057957e-02 1.58086658e+00 ... 2.25700378e-01 -4.42830801e-01 3.83443296e-01] [ 8.20990205e-01 7.16583967e-01 4.41686362e-01 ... 1.76836431e+00 1.35190928e+00 6.58599555e-01] [ 1.91628015e+00 1.23088348e+00 7.55543351e-01 ... 1.95849597e+00 1.31950629e+00 -1.01177001e+00]] [[-2.38694400e-01 7.82101870e-01 1.25162470e+00 ... 7.65599787e-01 1.47272015e+00 2.17636013e+00] [ 3.84827822e-01 1.33806026e+00 5.85000932e-01 ... 9.30527210e-01 5.21560669e-01 1.81936598e+00] [ 1.70949161e+00 5.11901379e-01 -3.25256228e-01 ... 7.17164204e-02 7.38629848e-02 -1.81852281e-01] ... [ 1.29418099e+00 9.06866938e-02 1.79913893e-01 ... 5.23404360e-01 6.72231257e-01 1.14406872e+00] [-3.55295211e-01 1.53115225e+00 3.61405432e-01 ... -4.41371530e-01 8.63671482e-01 -6.37933314e-01] [-7.77892619e-02 1.01347923e+00 -8.10930789e-01 ... 1.31530654e+00 1.13793933e+00 1.83083165e+00]] ... [[ 6.16365850e-01 8.16457093e-01 6.02221012e-01 ... -4.27355878e-02 -1.02294600e+00 9.14536476e-01] [-1.02070725e+00 8.87414396e-01 1.24676454e+00 ... 8.93223733e-02 5.65901101e-01 5.95929444e-01] [-7.45813251e-01 -1.41051844e-01 9.61943090e-01 ... 1.62645423e+00 -8.29467475e-02 -5.02535343e-01] ... [ 1.69434404e+00 7.31812596e-01 4.39556204e-02 ... 1.47858620e+00 7.80610859e-01 3.35806638e-01] [ 4.48256284e-01 1.02303183e+00 3.12439829e-01 ... 3.26830149e-01 2.91323453e-01 2.16838419e-01] [ 4.52320695e-01 1.97446644e-01 1.24494076e+00 ... 6.55867755e-01 5.68056583e-01 8.01276147e-01]] [[ 1.43391967e+00 -1.73592463e-01 4.09888625e-02 ... 2.44782642e-01 1.38970053e+00 1.45388031e+00] [ 2.83636361e-01 1.23899472e+00 -1.17771256e+00 ... 1.04902339e+00 1.28191960e+00 1.20683372e+00] [ 1.16977584e+00 -3.16835381e-02 4.20904458e-01 ... 6.38318837e-01 2.67951012e-01 8.72067392e-01] ... [ 1.55529749e+00 1.02994823e+00 5.84417224e-01 ... -1.18191494e-02 4.24289972e-01 3.71135980e-01] [-8.68640959e-01 4.83533710e-01 6.20458871e-02 ... 8.49862337e-01 9.85335171e-01 3.16284299e-01] [ 3.70174974e-01 -8.78896639e-02 4.12116349e-01 ... -2.90552765e-01 -3.89320441e-02 1.36941159e+00]] [[ 1.30632794e+00 7.10800767e-01 4.51751858e-01 ... -4.47727621e-01 1.06343246e+00 5.22946596e-01] [ 1.84958076e+00 -2.75963843e-01 3.50109518e-01 ... 1.12152362e+00 1.09976208e+00 8.72069895e-01] [ 5.47275841e-01 5.92970371e-01 9.80451345e-01 ... 9.85605657e-01 -1.07648075e+00 1.37229323e+00] ... [ 4.87508476e-02 1.36524558e-01 -1.36976853e-01 ... 3.31158757e+00 1.81745708e+00 3.39570254e-01] [ 3.81453596e-02 9.51677799e-01 1.63484290e-01 ... 8.06509197e-01 5.88234961e-01 3.14512663e-02] [ 5.28461099e-01 1.34127426e+00 7.13026226e-01 ... -6.07235655e-02 -2.25323904e-02 1.00473452e+00]]]] [[[[ 6.31219804e-01 2.58902937e-01 4.65568364e-01 ... 2.71710515e-01 5.40818870e-01 4.02446568e-01] [ 6.59668505e-01 3.54401886e-01 -5.00364117e-02 ... 1.22971445e-01 3.09986115e-01 1.91072196e-01] [ 5.39291799e-01 3.47989529e-01 7.64465749e-01 ... 3.49364460e-01 5.21814525e-01 3.75741780e-01] ... [ 2.79121965e-01 5.14360309e-01 1.41214311e-01 ... 6.44976616e-01 3.66337806e-01 4.45480168e-01] [ 4.10882711e-01 2.82966763e-01 5.70065081e-01 ... 6.29919052e-01 4.96469587e-01 4.77399707e-01] [ 7.99980462e-01 2.73345977e-01 -6.31164312e-02 ... 2.58965671e-01 2.11757258e-01 3.20071429e-01]] [[ 2.96669811e-01 3.35407704e-01 1.42022774e-01 ... 5.74285924e-01 3.61170530e-01 4.48899150e-01] [ 7.94873089e-02 3.48414481e-01 4.56214517e-01 ... 4.77809936e-01 5.23186982e-01 3.37477863e-01] [ 3.12397927e-01 1.35818467e-01 4.60577339e-01 ... 6.08251929e-01 2.30998874e-01 4.95480031e-01] ... [ 6.33771598e-01 8.16245452e-02 2.04091638e-01 ... 8.36099625e-01 2.69087136e-01 4.86616403e-01] [ 2.55254209e-02 2.89801270e-01 2.08143592e-02 ... 6.09088063e-01 4.51472104e-01 6.64132416e-01] [ 3.46489519e-01 3.96046638e-01 5.88737726e-01 ... 3.55857611e-01 4.78983939e-01 1.68432400e-01]] [[ 4.48439151e-01 5.39563417e-01 3.19423974e-01 ... 3.58931929e-01 4.57315117e-01 5.55252850e-01] [ 6.02510989e-01 4.94579554e-01 5.46810806e-01 ... 1.86018631e-01 -5.67519851e-02 7.96060920e-01] [ 1.92638367e-01 7.54773498e-01 1.72758698e-01 ... 3.86086375e-01 6.65704489e-01 3.79047573e-01] ... [ 5.58685362e-01 6.21197283e-01 3.95295292e-01 ... 3.33028764e-01 7.78521504e-03 2.90610284e-01] [ 2.58119881e-01 5.22861600e-01 5.52498221e-01 ... 2.20393464e-01 1.50976360e-01 5.94027281e-01] [ 5.10999978e-01 3.40924829e-01 3.25022429e-01 ... 4.86717999e-01 4.37725902e-01 3.71181875e-01]] ... [[ 3.16359460e-01 2.76568383e-01 5.17981291e-01 ... 5.10054350e-01 4.93492842e-01 1.60486326e-01] [ 3.18315357e-01 7.15145946e-01 3.84061545e-01 ... 5.44066191e-01 4.31002021e-01 1.96727246e-01] [ 2.83642858e-02 2.42121741e-01 1.75185293e-01 ... 7.18164563e-01 3.80081207e-01 7.56795347e-01] ... [ 6.00851178e-01 7.13345826e-01 1.37770385e-01 ... 5.75485945e-01 4.25680727e-01 3.62896264e-01] [ 3.53410751e-01 3.02653491e-01 2.29047477e-01 ... 4.80213106e-01 1.11971438e-01 3.31088901e-01] [ 6.13380075e-01 5.31318644e-03 6.32691860e-01 ... 2.96093732e-01 5.31639457e-01 9.48384583e-01]] [[ 8.06413829e-01 5.01150608e-01 4.08744395e-01 ... 6.05245471e-01 4.80166316e-01 4.81849223e-01] [ 4.67331141e-01 1.53977554e-02 5.32749474e-01 ... 3.00033808e-01 2.13092774e-01 5.34757435e-01] [ 2.39578888e-01 5.02266049e-01 4.50621158e-01 ... 7.38519151e-03 3.34571034e-01 5.08361518e-01] ... [ 3.88403147e-01 3.43731195e-01 3.93527687e-01 ... 6.18567944e-01 1.35021478e-01 4.47383761e-01] [ 2.11777955e-01 5.19468427e-01 5.74248331e-03 ... 4.46898937e-01 6.73572302e-01 2.97055513e-01] [ 4.00555134e-01 3.25962543e-01 4.13869172e-01 ... 3.52892607e-01 -1.19100951e-01 -4.09168825e-02]] [[ 1.71648487e-01 4.12767500e-01 4.39303756e-01 ... 4.03235465e-01 3.47271919e-01 -3.23793739e-02] [ 3.80994618e-01 -3.20186876e-02 7.22058773e-01 ... 5.61565518e-01 3.98451000e-01 2.65382022e-01] [ 5.98337233e-01 2.38738254e-01 3.80783260e-01 ... 5.06394506e-01 3.04475695e-01 4.01353724e-02] ... [-4.18351963e-03 6.68063760e-01 6.83437169e-01 ... 6.12830043e-01 1.39739797e-01 5.37067115e-01] [-6.87655956e-02 3.61556292e-01 5.99162757e-01 ... 3.15553814e-01 4.83312935e-01 7.85554290e-01] [ 7.32481897e-01 1.85717687e-01 8.61168087e-01 ... 5.49017012e-01 5.05673647e-01 5.82514167e-01]]] [[[-2.48816037e+00 2.84298100e-02 1.01406562e+00 ... -1.19829822e+00 -1.70555964e-01 4.59705710e-01] [ 6.45676017e-01 9.86559749e-01 1.82843530e+00 ... -8.89310241e-02 1.67086637e+00 -5.61187625e-01] [ 2.18091679e+00 3.13429022e+00 -1.17776656e+00 ... 2.26487565e+00 -2.98416823e-01 1.55487847e+00] ... [-1.52654314e+00 -2.17544049e-01 -7.11718976e-01 ... -1.04722965e+00 2.43548250e+00 -4.73683447e-01] [ 3.21289480e-01 -2.61037946e+00 -1.54487195e-03 ... -3.01025659e-01 2.39330935e+00 2.24637532e+00] [ 2.93194389e+00 1.69298720e+00 3.42448497e+00 ... 1.53304172e+00 1.03328478e+00 2.23574042e+00]] [[ 1.21165955e+00 2.88674116e-01 1.26311734e-01 ... 2.46404696e+00 2.72295761e+00 2.50128460e+00] [-1.07289755e+00 7.65920460e-01 2.77779603e+00 ... 2.63323259e+00 1.70686436e+00 2.81403828e+00] [ 1.20341837e+00 1.30907094e+00 2.47571731e+00 ... 7.68948793e-01 3.91021132e+00 3.45522857e+00] ... [ 1.99765623e+00 -6.82365671e-02 1.06959879e+00 ... 1.90578604e+00 6.49082541e-01 5.79016566e-01] [ 5.04054487e-01 1.51498640e+00 1.84297800e+00 ... 5.49660861e-01 1.34583056e+00 1.37265399e-01] [ 7.20315427e-02 -9.74704772e-02 -1.27040699e-01 ... 1.97862136e+00 1.92950511e+00 1.13825667e+00]] [[ 1.13765914e-02 2.00434566e+00 4.59332895e+00 ... 2.23740077e+00 3.29612017e+00 9.03223813e-01] [ 1.27980983e+00 2.62361622e+00 1.45590484e+00 ... -1.22375166e+00 1.38807786e+00 1.26259375e+00] [ 9.59169328e-01 -6.98500276e-01 8.70906770e-01 ... 1.80754602e+00 -1.13252378e+00 8.12451839e-01] ... [ 3.75991368e+00 4.05530751e-01 1.36993754e+00 ... 8.43908846e-01 2.00362706e+00 -6.52649701e-01] [ 1.40639985e+00 -1.87038684e+00 1.27477601e-01 ... 1.03405309e+00 7.17845082e-01 -2.13447571e-01] [ 2.36469507e+00 -2.40490302e-01 1.18628049e+00 ... 4.74373311e-01 1.30343640e+00 2.75392178e-02]] ... [[ 1.67621958e+00 -1.00582719e+00 -1.77361345e+00 ... 1.01013684e+00 1.94187427e+00 -7.71581769e-01] [ 7.43314445e-01 2.76981950e+00 9.98320341e-01 ... 6.69515371e-01 2.82248712e+00 1.42710137e+00] [ 2.67079949e-01 8.33676159e-01 1.02352297e+00 ... 3.86667073e-01 7.50611871e-02 1.63549197e+00] ... [-1.43199396e+00 4.78969961e-01 -1.47709344e-02 ... -1.00014091e+00 2.59976745e+00 9.79523242e-01] [ 6.67677745e-02 3.05088615e+00 1.67156971e+00 ... 6.74855351e-01 -1.01246166e+00 1.05800331e+00] [ 5.23255905e-03 1.65941238e+00 3.35729933e+00 ... 1.67466629e+00 1.25134349e+00 -6.28817379e-01]] [[ 1.75021839e+00 6.22534573e-01 1.41488481e+00 ... 4.30230767e-01 1.42163086e+00 1.47473967e+00] [ 2.56365490e+00 -1.17524314e+00 6.35050356e-01 ... 2.73226857e+00 -4.13689530e-04 1.72681665e+00] [ 5.85244536e-01 1.22795537e-01 2.67763638e+00 ... 1.49554145e+00 2.67042160e+00 1.00948071e+00] ... [-8.35298479e-01 -1.29041836e-01 1.31284118e+00 ... 1.09918332e+00 -1.36270666e+00 3.92844367e+00] [ 2.59130502e+00 -1.12850793e-01 9.02344286e-01 ... 1.43992826e-01 2.00207949e-01 1.53613520e+00] [ 5.26293814e-01 -7.59435773e-01 7.06645966e-01 ... 2.31762505e+00 3.20114708e+00 1.59669936e+00]] [[ 1.50837111e+00 5.14300764e-01 2.08799243e+00 ... 2.06956840e+00 9.77402508e-01 -8.50437820e-01] [ 1.53284156e+00 1.39840198e+00 -1.21846128e+00 ... 4.29030627e-01 2.51312584e-01 4.41676527e-01] [ 3.06657314e+00 1.77273011e+00 1.20233965e+00 ... 6.28912508e-01 -1.36404693e+00 2.26606637e-01] ... [ 1.10049689e+00 2.22948551e+00 -4.29557413e-01 ... 5.04793346e-01 1.21828604e+00 2.22511554e+00] [ 1.33568716e+00 3.32030416e-01 4.92702186e-01 ... 2.05683446e+00 4.12288237e+00 5.92152059e-01] [-1.06169343e+00 2.15500832e+00 1.86164141e+00 ... 2.46578979e+00 5.45731068e+00 -9.29122120e-02]]] [[[-1.35393471e-01 6.77548707e-01 1.43928015e+00 ... 1.44554949e+00 5.70986509e-01 -7.18978494e-02] [ 7.25196064e-01 2.78246880e+00 2.09682155e+00 ... 1.71929073e+00 1.95150518e+00 8.82206023e-01] [ 8.62613738e-01 1.68301260e+00 6.32327557e-01 ... 8.78054023e-01 1.92160177e+00 1.58111048e+00] ... [ 5.00338912e-01 2.42204213e+00 -1.24946487e+00 ... 1.30160046e+00 5.35766184e-01 1.68039644e+00] [ 1.84597719e+00 1.17562950e+00 1.82020533e+00 ... 2.09165311e+00 1.90387309e+00 1.07789934e+00] [-6.01887330e-03 2.85289717e+00 1.51624453e+00 ... 8.89323831e-01 -4.83973533e-01 4.48465236e-02]] [[ 2.78271580e+00 1.62366331e+00 4.54559058e-01 ... 4.92032260e-01 6.72660708e-01 1.43742001e+00] [ 9.48530257e-01 1.05875981e+00 1.02533472e+00 ... 5.82504451e-01 3.08573556e+00 3.54298472e+00] [ 3.00101900e+00 1.58449090e+00 3.33355993e-01 ... 1.23262596e+00 8.75875652e-01 2.33976698e+00] ... [ 2.61917734e+00 1.41053998e+00 2.10190344e+00 ... 2.41568327e+00 8.97426963e-01 1.71566820e+00] [ 1.22401893e+00 4.68164891e-01 1.58782303e+00 ... 9.80919659e-01 1.66360068e+00 2.37408042e+00] [ 1.67646766e+00 5.61241210e-01 2.46672177e+00 ... 1.43791640e+00 2.76427054e+00 2.39772916e+00]] [[ 3.11132967e-01 1.57294616e-01 7.77791977e-01 ... 2.10553384e+00 1.52764583e+00 3.54671985e-01] [ 2.12517306e-01 6.58154190e-01 -2.33005494e-01 ... -5.36568016e-02 2.43781996e+00 1.53445280e+00] [ 1.82276499e+00 9.61218178e-01 3.58167434e+00 ... 1.00643969e+00 1.29717839e+00 3.13522434e+00] ... [ 1.07665312e+00 1.07090187e+00 3.66628259e-01 ... 1.32939351e+00 2.11407661e+00 1.31441152e+00] [ 8.92755985e-01 3.63228028e-03 2.94135928e+00 ... 1.89614201e+00 1.27066243e+00 2.74487853e+00] [ 4.77125168e-01 1.91999125e+00 2.51779866e+00 ... -3.33274603e-02 3.22189307e+00 4.93908733e-01]] ... [[ 2.09187818e+00 6.25100851e-01 9.65499818e-01 ... 3.11938167e+00 1.69909108e+00 1.72321796e+00] [ 1.96859848e+00 -1.65310740e-01 2.16403580e+00 ... 2.37183523e+00 1.31219733e+00 1.17929971e+00] [ 2.96448064e+00 2.25348309e-01 2.39575911e+00 ... 9.31577206e-01 -1.97133422e-02 2.58915687e+00] ... [ 1.98150861e+00 2.57386422e+00 2.45223451e+00 ... 2.23896444e-01 2.11468387e+00 2.17283082e+00] [ 2.42045736e+00 9.85922039e-01 4.53755073e-02 ... 2.21929145e+00 1.67336583e+00 2.29212451e+00] [ 2.10366392e+00 -3.34438473e-01 3.97813827e-01 ... 1.96995580e+00 1.38858867e+00 6.12055957e-01]] [[ 1.04299033e+00 2.53441763e+00 2.95116693e-01 ... 1.39557862e+00 1.38265288e+00 1.77119279e+00] [ 5.23745716e-01 2.67281628e+00 2.91719007e+00 ... 1.82076681e+00 1.39575148e+00 5.76375663e-01] [ 1.80218804e+00 2.28824997e+00 1.64787900e+00 ... 2.21147990e+00 1.54420662e+00 3.35856915e-01] ... [ 1.20770454e+00 1.18654060e+00 1.17967463e+00 ... -9.30079669e-02 1.98061848e+00 1.36160266e+00] [ 1.99713492e+00 2.32923031e+00 2.68129802e+00 ... 8.38168919e-01 8.96698236e-01 1.44439518e+00] [-5.61840892e-01 1.71620727e+00 8.97464693e-01 ... 1.14324903e+00 2.31319118e+00 1.03886163e+00]] [[ 5.85379064e-01 2.24868536e+00 1.53666770e+00 ... 1.59294403e+00 5.41406751e-01 2.41114426e+00] [ 1.00620282e+00 1.87295949e+00 1.84515309e+00 ... 2.23651350e-01 1.36215639e+00 1.22577155e+00] [ 1.12273061e+00 3.86847705e-01 9.89828110e-01 ... 9.36344624e-01 2.50583887e-01 1.93767577e-01] ... [-9.54482913e-01 1.52957487e+00 1.70395231e+00 ... 1.34417629e+00 1.64863682e+00 4.18502957e-01] [ 5.16126096e-01 2.15700364e+00 1.31206071e+00 ... 1.33750582e+00 2.13067532e+00 1.40411127e+00] [ 1.37339747e+00 7.72694111e-01 1.30822349e+00 ... 2.06078196e+00 2.82955706e-01 2.31912327e+00]]] [[[ 7.52736568e-01 7.22057164e-01 7.15931714e-01 ... 7.23496199e-01 8.64645600e-01 7.95314431e-01] [ 8.10344279e-01 7.44469106e-01 8.23101461e-01 ... 6.53414011e-01 7.38209605e-01 8.59635293e-01] [ 6.92813337e-01 6.81119442e-01 6.54646218e-01 ... 7.43586481e-01 7.97206581e-01 6.58089519e-01] ... [ 7.83139706e-01 5.98567545e-01 7.38164544e-01 ... 7.39526808e-01 5.74763715e-01 6.72067225e-01] [ 8.82703662e-01 6.75900161e-01 6.26064301e-01 ... 6.87538147e-01 6.30866051e-01 6.14108980e-01] [ 8.33262146e-01 6.47922516e-01 8.51360798e-01 ... 7.55790353e-01 7.11113036e-01 7.26899266e-01]] [[ 7.19644606e-01 6.43244505e-01 6.68097436e-01 ... 7.75762022e-01 6.71392560e-01 6.47810221e-01] [ 6.78235710e-01 7.95112729e-01 6.12096131e-01 ... 7.70161331e-01 8.05922508e-01 6.04598165e-01] [ 6.29865468e-01 6.22268915e-01 5.11185825e-01 ... 8.12943280e-01 7.06925690e-01 7.82431960e-01] ... [ 6.88144624e-01 7.33382225e-01 8.12836826e-01 ... 6.97941720e-01 7.73022890e-01 9.56597805e-01] [ 5.72120130e-01 5.92554450e-01 7.71442175e-01 ... 7.54273593e-01 8.20624292e-01 8.21572959e-01] [ 7.86084712e-01 6.59637749e-01 7.54380524e-01 ... 7.58907437e-01 6.18527472e-01 6.54595733e-01]] [[ 7.37107933e-01 7.58657217e-01 5.68072677e-01 ... 6.94541872e-01 7.22899079e-01 7.64358521e-01] [ 8.07839155e-01 8.11229587e-01 7.04454303e-01 ... 8.18023622e-01 6.15438879e-01 8.52015376e-01] [ 7.90697217e-01 6.82290018e-01 8.37251842e-01 ... 6.59682930e-01 6.69222057e-01 7.29287922e-01] ... [ 7.14823067e-01 7.60399520e-01 5.43191254e-01 ... 7.88210630e-01 7.33729124e-01 7.30264962e-01] [ 6.91712260e-01 6.09564006e-01 6.42892659e-01 ... 6.95655942e-01 7.12148905e-01 8.08256745e-01] [ 8.65734935e-01 7.49724150e-01 7.83447623e-01 ... 7.67118156e-01 8.10842335e-01 6.67718410e-01]] ... [[ 6.72567129e-01 6.95549846e-01 6.89025342e-01 ... 7.38387942e-01 7.08513677e-01 6.64623380e-01] [ 6.35782063e-01 8.54863346e-01 6.11792684e-01 ... 5.95885098e-01 7.97088206e-01 7.16900527e-01] [ 6.96932018e-01 5.98569989e-01 7.12746561e-01 ... 7.94389427e-01 6.67599499e-01 7.73180723e-01] ... [ 6.91459835e-01 7.38023043e-01 6.57630384e-01 ... 6.21349156e-01 7.23834455e-01 6.69877946e-01] [ 6.31018877e-01 6.88048482e-01 6.87020302e-01 ... 7.02203810e-01 5.95553875e-01 6.32395029e-01] [ 8.02045047e-01 4.62322265e-01 6.02849662e-01 ... 6.05328321e-01 6.99741244e-01 5.55849671e-01]] [[ 7.99840868e-01 5.68360448e-01 6.32937431e-01 ... 7.61486530e-01 6.87748849e-01 6.79686487e-01] [ 8.28998625e-01 6.71703339e-01 7.71207869e-01 ... 7.23048747e-01 7.70302474e-01 8.07977736e-01] [ 8.11811745e-01 6.49582505e-01 7.22935796e-01 ... 6.00295961e-01 8.10898483e-01 6.33480430e-01] ... [ 7.70493329e-01 6.20907247e-01 7.55384207e-01 ... 7.71061063e-01 5.78126788e-01 7.27690220e-01] [ 6.67351127e-01 9.53851342e-01 8.25231075e-01 ... 7.42627800e-01 6.71979845e-01 7.21221685e-01] [ 7.41886020e-01 5.23939371e-01 7.25048304e-01 ... 8.38963211e-01 6.31059885e-01 7.34644949e-01]] [[ 7.57678926e-01 6.89228356e-01 7.23696649e-01 ... 7.64705122e-01 7.00900435e-01 6.88637197e-01] [ 8.09466362e-01 7.80677915e-01 8.53460133e-01 ... 7.32175946e-01 7.46952593e-01 7.55534828e-01] [ 5.55869341e-01 8.09636891e-01 8.58854234e-01 ... 5.95734060e-01 7.75180221e-01 6.71059549e-01] ... [ 6.19252682e-01 7.62786329e-01 6.45710945e-01 ... 6.02722645e-01 6.38832867e-01 6.39310956e-01] [ 6.58850491e-01 7.02287018e-01 7.71013200e-01 ... 7.28918850e-01 7.48782396e-01 6.85527802e-01] [ 7.88543284e-01 7.53310263e-01 7.83317268e-01 ... 7.91403532e-01 6.31232083e-01 6.66263223e-01]]] [[[ 1.33434355e-01 1.74936965e-01 3.60153377e-01 ... 5.30495405e-01 6.42855167e-01 3.96933377e-01] [ 5.04755497e-01 4.44918841e-01 3.81301612e-01 ... 2.39195988e-01 1.35360286e-01 5.46061575e-01] [ 9.19919968e-01 3.67078215e-01 4.97788548e-01 ... 3.39093745e-01 5.80856144e-01 7.26531863e-01] ... [ 3.32742333e-01 5.12410402e-01 6.93039834e-01 ... 5.03375888e-01 2.50692219e-01 -7.71992505e-02] [ 4.51752156e-01 7.25156128e-01 -2.52942108e-02 ... 3.36713791e-01 2.80282617e-01 5.61328053e-01] [ 6.84544742e-02 -3.65069956e-02 1.69862092e-01 ... 6.31028771e-01 1.64703012e-01 6.22369707e-01]] [[ 3.52358699e-01 6.32905841e-01 6.73356771e-01 ... 3.57637435e-01 4.40117329e-01 3.69945496e-01] [ 2.79640853e-01 2.57191062e-01 1.61894128e-01 ... 4.00080025e-01 7.32294083e-01 -6.03896640e-02] [ 8.47594738e-01 3.80739927e-01 3.12179655e-01 ... 1.58345252e-01 2.25143895e-01 8.26277554e-01] ... [ 3.57924074e-01 6.72152698e-01 3.28168452e-01 ... 5.90248108e-01 5.29186130e-01 8.14679116e-02] [ 5.95044494e-01 3.85627538e-01 6.83830559e-01 ... 5.02858937e-01 9.27391089e-03 8.61296713e-01] [ 5.58858395e-01 6.48584902e-01 3.87778878e-01 ... -1.09802037e-02 7.19727755e-01 3.53949130e-01]] [[ 3.63481462e-01 5.92446566e-01 7.80205131e-01 ... 6.12049401e-01 3.81619632e-01 2.04472065e-01] [ 3.33114266e-01 3.68059039e-01 1.93821862e-01 ... 6.21659040e-01 1.06105153e-02 5.71584523e-01] [ 4.25034523e-01 4.82854277e-01 5.74715376e-01 ... 2.80515313e-01 3.52066100e-01 3.81290466e-01] ... [ 5.69167137e-01 5.43587029e-01 2.35134766e-01 ... 3.14875662e-01 6.71814322e-01 1.39516711e-01] [ 5.08673489e-02 3.44637424e-01 3.84345382e-01 ... 5.21859169e-01 8.80345523e-01 5.96370459e-01] [ 3.09743404e-01 2.70551094e-03 1.18438698e-01 ... 3.52481045e-02 3.02975029e-01 1.64150417e-01]] ... [[ 2.85741717e-01 2.34682545e-01 -4.26826715e-01 ... 5.84933341e-01 2.35483468e-01 7.52385780e-02] [ 4.35228735e-01 4.26932812e-01 5.80311492e-02 ... 1.93029046e-01 4.58461612e-01 -1.38037741e-01] [ 4.94049191e-01 9.18891951e-02 3.17573696e-01 ... 6.42581820e-01 1.90520778e-01 4.19959664e-01] ... [ 2.66647041e-01 4.16640669e-01 2.99386680e-01 ... 4.17757362e-01 5.00582397e-01 3.13680440e-01] [ 5.33448517e-01 6.22271225e-02 4.42808121e-01 ... 2.88782001e-01 7.27189064e-01 3.66705954e-01] [ 5.19591033e-01 2.68380847e-02 4.74878252e-01 ... 2.10917592e-02 -1.11036688e-01 2.73398876e-01]] [[ 5.23811460e-01 3.91118020e-01 4.25607860e-02 ... 2.49447316e-01 3.73928457e-01 4.03779656e-01] [-9.58718881e-02 6.60640180e-01 8.68866146e-01 ... 2.70997643e-01 5.84793389e-01 5.76106966e-01] [-4.01131138e-02 3.97828102e-01 5.19776165e-01 ... 3.36999185e-02 4.68766153e-01 4.38202694e-02] ... [ 2.12835297e-02 2.97082871e-01 7.31941342e-01 ... 3.24430734e-01 4.95302707e-01 2.39212096e-01] [ 3.28266621e-01 -7.42840534e-03 1.26497984e-01 ... 5.67132294e-01 6.01374745e-01 3.63696516e-01] [ 2.81439930e-01 2.46527568e-01 4.52765256e-01 ... 4.08145487e-02 4.25868452e-01 7.58424699e-01]] [[ 3.96053106e-01 5.81412554e-01 1.30912080e-01 ... 2.27846771e-01 4.42233473e-01 2.97610193e-01] [ 6.33721232e-01 4.14243221e-01 4.88417268e-01 ... 7.90182769e-01 5.02843857e-01 6.70101225e-01] [ 1.83124393e-01 1.82399914e-01 4.71412867e-01 ... 6.69709504e-01 5.27030863e-02 8.80638123e-01] ... [ 1.12916470e-01 1.95510089e-01 3.32307845e-01 ... 3.26779634e-01 3.32241952e-01 3.15045178e-01] [ 3.67156476e-01 4.12324876e-01 5.91800034e-01 ... 2.65034765e-01 4.54060525e-01 3.08422327e-01] [-7.26742148e-02 6.17085397e-01 4.91789579e-01 ... 3.91688854e-01 2.72535503e-01 8.42930853e-01]]] [[[ 5.91155708e-01 3.05873062e-02 8.89953852e-01 ... 2.19989991e+00 7.35114396e-01 1.04661837e-01] [ 5.59132695e-01 5.83673120e-01 1.82444191e+00 ... 2.77881086e-01 2.24504304e+00 -5.20948395e-02] [ 6.40767217e-01 -1.03863671e-01 -1.31071508e-01 ... -1.31780863e-01 9.59858239e-01 1.25393987e+00] ... [-6.81624532e-01 -3.73884827e-01 6.29357323e-02 ... 1.01145196e+00 8.25170159e-01 2.43599311e-01] [-7.05086470e-01 3.39001656e-01 3.24633092e-01 ... 5.48314452e-01 2.47453824e-01 1.23902380e+00] [-1.16228628e+00 1.00483024e+00 1.52641511e+00 ... -1.79723203e-01 3.07736188e-01 6.47169888e-01]] [[ 8.38304520e-01 3.69706780e-01 7.30324566e-01 ... -1.49556625e+00 6.88934147e-01 8.23545277e-01] [ 7.33357847e-01 3.31148922e-01 -7.95556307e-02 ... 1.09547079e+00 7.34979689e-01 1.83594394e+00] [ 1.66448879e+00 5.26281953e-01 2.30700880e-01 ... 2.03601074e+00 1.33306360e+00 6.18017167e-02] ... [ 7.80953586e-01 1.86794484e+00 3.95760268e-01 ... 1.47703516e+00 1.84232461e+00 -1.04157233e+00] [ 1.29979670e+00 -6.68964028e-01 8.94175410e-01 ... 1.63694727e+00 -2.82906622e-01 9.10127699e-01] [ 1.05513081e-01 7.69882977e-01 3.30999076e-01 ... 7.24239707e-01 9.95448768e-01 -5.77312469e-01]] [[ 2.38735557e-01 -6.39321625e-01 1.47072017e+00 ... 1.29243302e+00 4.60950404e-01 1.07563210e+00] [ 1.76515913e+00 -3.22873861e-01 -1.48390877e+00 ... 6.02458298e-01 1.32095795e-02 -3.13997686e-01] [-5.17797709e-01 1.54560220e+00 5.29184997e-01 ... -8.86016637e-02 1.07260501e+00 1.43848181e+00] ... [ 4.60824460e-01 1.80278912e-01 2.25487709e+00 ... 6.44060493e-01 8.50825131e-01 5.15786827e-01] [ 1.00593078e+00 1.54913807e+00 6.08960092e-01 ... 1.06775975e+00 1.94112763e-01 2.03174174e-01] [ 1.06333621e-01 1.02948618e+00 -3.32684398e-01 ... 5.21792114e-01 1.50574338e+00 -3.63781601e-01]] ... [[ 2.21495926e-02 -3.44007432e-01 2.74408847e-01 ... -5.45609176e-01 1.26532447e+00 -1.89826101e-01] [ 1.13436985e+00 4.68771113e-03 6.36705756e-01 ... 1.41718793e+00 -4.47939068e-01 -3.69046748e-01] [ 1.36693668e+00 9.41872537e-01 6.31293952e-01 ... 4.94964749e-01 -8.14345241e-01 -4.89809066e-01] ... [ 1.41210985e+00 4.96413797e-01 2.04232663e-01 ... -3.49389523e-01 -2.75216073e-01 1.26289219e-01] [ 3.59209836e-01 1.44739020e+00 3.78801882e-01 ... -3.15813839e-01 -1.63286161e+00 -1.04263437e+00] [ 1.42120326e+00 9.23959076e-01 7.79671371e-02 ... 9.68929350e-01 8.55960131e-01 1.07406914e+00]] [[ 9.32642698e-01 1.99040294e-01 -8.44825923e-01 ... 3.92458230e-01 6.16296306e-02 1.21736872e+00] [ 7.67253041e-01 8.93823802e-01 5.25793791e-01 ... 1.01977110e+00 2.30639124e+00 1.33412290e+00] [ 2.27206543e-01 2.05501461e+00 1.56539071e+00 ... 1.03557611e+00 8.16210389e-01 9.65272307e-01] ... [ 6.51512146e-01 -1.39261022e-01 1.20823991e+00 ... 9.42295194e-01 -1.82849035e-01 6.88855588e-01] [ 4.19163913e-01 1.12866139e+00 4.23765659e-01 ... -1.04411662e+00 -9.04924512e-01 -2.78010309e-01] [ 1.59398937e+00 6.02365017e-01 1.90504384e+00 ... -5.30058384e-01 2.49003634e-01 8.31127465e-01]] [[ 2.49059248e+00 -2.64401287e-01 -1.07063375e-01 ... -5.24480283e-01 2.59698778e-02 1.51553643e+00] [ 6.47981316e-02 1.46763647e+00 8.69496703e-01 ... -2.85580724e-01 5.01921415e-01 -1.20665178e-01] [ 4.99981135e-01 1.93763363e+00 1.70813739e+00 ... 2.38237873e-01 7.60524720e-02 1.25307989e+00] ... [-7.33736098e-01 1.15197659e+00 7.67642498e-01 ... -3.37639719e-01 1.93781361e-01 9.64320838e-01] [ 1.31536627e+00 1.15503705e+00 5.90981841e-01 ... 3.49930197e-01 2.33919001e+00 1.03688931e+00] [ 6.00076139e-01 1.41130793e+00 1.24743247e+00 ... 2.55555820e+00 7.75683045e-01 1.46186388e+00]]]] [[[[ 3.40797871e-01 4.71802443e-01 1.34637386e-01 ... 5.15728652e-01 -9.19184182e-03 3.08641374e-01] [ 9.49502409e-01 3.87067556e-01 3.50192815e-01 ... 2.78278172e-01 2.96805769e-01 2.54619539e-01] [ 3.62725139e-01 -1.55333895e-02 2.12770909e-01 ... 3.50241572e-01 1.84189379e-01 1.93892509e-01] ... [ 3.81619602e-01 1.91781968e-01 2.50611275e-01 ... 3.92281890e-01 4.74302202e-01 2.19348595e-01] [ 3.49562198e-01 6.22815311e-01 4.40946907e-01 ... 5.52883744e-01 5.83737075e-01 4.45611060e-01] [ 3.49980116e-01 1.89228967e-01 3.87203932e-01 ... 5.81546307e-01 6.97467744e-01 2.87371010e-01]] [[ 2.86693066e-01 1.79746717e-01 3.90059471e-01 ... 8.48770082e-01 3.39189708e-01 2.88389653e-01] [ 6.76973403e-01 5.70325375e-01 7.14622080e-01 ... 4.38164234e-01 2.33364299e-01 2.19202548e-01] [ 5.82560301e-01 4.75696146e-01 3.60769838e-01 ... 5.32844067e-02 4.40087944e-01 7.57785976e-01] ... [ 3.21540356e-01 3.69515091e-01 3.61575872e-01 ... 2.21323147e-01 4.05043036e-01 7.56008863e-01] [ 4.91201669e-01 6.79741621e-01 3.94568890e-01 ... 4.94742095e-01 6.28314912e-01 1.53579086e-01] [ 6.34394228e-01 2.33139381e-01 3.65041822e-01 ... -7.40706995e-02 1.73793018e-01 6.44996583e-01]] [[ 5.26367486e-01 9.62477803e-01 7.50902355e-01 ... 2.04539448e-01 2.90066421e-01 6.17570221e-01] [ 1.46009609e-01 3.88600647e-01 3.46976817e-01 ... 5.43656051e-01 3.99737090e-01 5.00845969e-01] [ 2.30929792e-01 6.66278064e-01 5.44770837e-01 ... 1.68743879e-01 4.34652060e-01 -1.39089614e-01] ... [ 3.03644717e-01 5.37535071e-01 1.39548913e-01 ... 2.11782217e-01 6.56514764e-01 1.41792133e-01] [ 3.37196410e-01 5.34676969e-01 8.54312956e-01 ... 2.19403505e-01 1.45649612e-01 7.62851059e-01] [ 4.20741960e-02 2.92095453e-01 5.59708893e-01 ... 2.63865888e-01 1.34710863e-01 5.11937559e-01]] ... [[ 5.52782178e-01 4.52118248e-01 5.59140861e-01 ... 6.12673044e-01 6.29137993e-01 6.88075125e-01] [ 3.04735005e-01 1.92263886e-01 1.05143726e-01 ... 9.83639583e-02 4.41104412e-01 9.48790669e-01] [ 5.30923903e-01 2.95998633e-01 1.28522426e-01 ... 5.07210791e-01 5.31695724e-01 3.80537361e-01] ... [ 3.54874074e-01 3.58010262e-01 5.19626178e-02 ... 3.05754364e-01 6.99408054e-01 2.54029304e-01] [ 6.08401299e-01 4.76098329e-01 1.59328267e-01 ... 7.11021960e-01 1.46260187e-01 1.15097500e-01] [ 5.32096684e-01 4.13851142e-01 -1.72616720e-01 ... 4.25233364e-01 1.94270030e-01 1.83067217e-01]] [[ 6.91688061e-01 3.36737186e-01 1.93937063e-01 ... 7.14641333e-01 1.20505691e-01 1.27698466e-01] [ 2.15043649e-01 7.06943035e-01 5.38074672e-01 ... 3.83886725e-01 2.08346918e-01 5.16329050e-01] [ 5.25424719e-01 4.00120020e-01 6.78903222e-01 ... 4.48770881e-01 1.91150695e-01 2.40395263e-01] ... [ 6.08544707e-01 4.92979974e-01 5.26724517e-01 ... 3.12827736e-01 5.07771432e-01 4.98329282e-01] [ 7.62639344e-01 6.80673778e-01 5.32937169e-01 ... 6.63606763e-01 2.86463320e-01 3.38881284e-01] [ 3.73572558e-01 4.14351016e-01 3.82079840e-01 ... 5.28006673e-01 4.34731990e-01 4.84627515e-01]] [[ 3.55734289e-01 3.59926730e-01 2.14938030e-01 ... 4.18608487e-01 6.65956855e-01 2.48771727e-01] [ 5.07048890e-02 3.47420901e-01 7.92522609e-01 ... 3.64783943e-01 2.27942005e-01 1.55631065e-01] [ 1.35310754e-01 6.26589954e-01 4.19239074e-01 ... 2.47817293e-01 7.07601666e-01 5.25717020e-01] ... [ 2.79563636e-01 5.52848637e-01 3.50470632e-01 ... 1.24173246e-01 6.87006652e-01 1.04333334e-01] [ 3.75084192e-01 3.24032724e-01 2.57854193e-01 ... 3.91182423e-01 4.73228723e-01 2.14791000e-01] [ 4.69006538e-01 3.87012482e-01 5.36015749e-01 ... 3.49459559e-01 4.41201717e-01 1.34511694e-01]]] [[[ 1.27574968e+00 1.01792388e-01 -3.16133589e-01 ... 2.32533407e+00 4.71684456e-01 8.00018609e-01] [ 1.68653059e+00 -3.17680311e+00 1.85623693e+00 ... 4.32849169e-01 1.38174236e-01 1.15084994e+00] [ 1.94259569e-01 -3.16766548e+00 1.81176782e+00 ... -4.67128187e-01 6.74685717e-01 -2.20482540e+00] ... [ 1.07642484e+00 2.14332700e+00 9.58480716e-01 ... -4.70409244e-01 -6.53475463e-01 -4.62451309e-01] [ 1.61687350e+00 3.63370061e+00 -7.41414487e-01 ... 1.15474904e+00 -1.41207898e+00 2.60978293e+00] [-1.49821711e+00 -2.78589249e-01 7.90426373e-01 ... 3.26482511e+00 1.54795074e+00 5.38761556e-01]] [[ 1.80589736e+00 7.35328853e-01 4.36728150e-01 ... 4.76244658e-01 2.80794382e+00 2.26136804e+00] [ 7.92443097e-01 4.84426051e-01 2.30516243e+00 ... 4.32050514e+00 1.40902257e+00 1.99193084e+00] [ 1.10953510e+00 -4.70686823e-01 4.12241459e-01 ... 2.99500990e+00 -1.15275657e+00 9.35853720e-02] ... [ 5.31775951e-01 -1.20490801e+00 -1.68606952e-01 ... 2.65769458e+00 1.61553085e+00 -2.04614902e+00] [-3.82980645e-01 2.67806530e+00 -1.21361665e-01 ... -3.78366053e-01 9.79791522e-01 4.41713452e-01] [ 2.77691555e+00 5.61025977e-01 9.86731589e-01 ... 1.54042196e+00 1.03927194e-03 -7.30395466e-02]] [[ 3.09512305e+00 3.55875516e+00 1.76983917e+00 ... 7.56902874e-01 9.04017091e-01 -5.48126101e-02] [-1.38538992e+00 3.02737308e+00 2.41660404e+00 ... -2.37452477e-01 -8.20114076e-01 -1.23738086e+00] [ 6.32518828e-01 -2.08497381e+00 1.56631124e+00 ... 1.80910957e+00 1.87432468e-01 -4.08656336e-03] ... [-3.00454378e-01 -3.25902775e-02 6.82059288e-01 ... 1.09682584e+00 1.67852557e+00 5.31463385e-01] [-8.12516749e-01 -9.17340755e-01 2.94625378e+00 ... 9.63545218e-02 4.28360033e+00 8.29613686e-01] [ 2.58548522e+00 2.10021996e+00 2.47482634e+00 ... 1.57232857e+00 2.28668904e+00 2.92998338e+00]] ... [[-6.93636239e-01 3.38532138e+00 1.39946088e-01 ... 1.37144434e+00 5.91085494e-01 -2.15324715e-01] [ 1.58453631e+00 1.10691130e+00 -1.17122456e-01 ... 1.99451649e+00 7.60988891e-01 9.91704404e-01] [ 1.25585091e+00 1.13270891e+00 5.72786629e-01 ... 2.16837478e+00 1.06240416e+00 7.67840326e-01] ... [ 4.63220119e-01 -3.55681270e-01 6.54200852e-01 ... 2.07866803e-01 -6.88436210e-01 -1.68191507e-01] [-1.01654112e+00 2.17785046e-01 1.71739054e+00 ... 1.32019329e+00 1.38279974e+00 3.19997042e-01] [ 8.12498212e-01 -2.21365714e+00 -1.09661913e+00 ... 7.10482121e-01 2.95278025e+00 8.10553253e-01]] [[-2.09271625e-01 2.30126333e+00 1.91845894e+00 ... -5.12965381e-01 1.81649339e+00 -4.42542225e-01] [-3.57531935e-01 -3.13316107e-01 8.54861975e-01 ... 3.80171180e+00 8.36825430e-01 6.71991169e-01] [ 1.10421956e+00 3.07726693e+00 8.70821476e-01 ... 2.60542417e+00 1.85457134e+00 1.69643259e+00] ... [ 1.15379846e+00 4.09746885e+00 -3.41725588e-01 ... -1.37672961e-01 -6.12956025e-02 2.93173194e+00] [ 1.44919479e+00 -1.67198777e+00 5.67969024e-01 ... 3.57589215e-01 2.25411272e+00 2.89766240e+00] [ 5.47326863e-01 1.12695718e+00 9.70540404e-01 ... -2.34959602e+00 -5.30600131e-01 -2.18664244e-01]] [[ 1.89051890e+00 -7.03547120e-01 2.12676430e+00 ... 1.61388516e+00 -4.78621632e-01 -1.18503571e+00] [-4.87926126e-01 4.90120351e-01 -8.47806752e-01 ... 3.31823111e+00 1.29451239e+00 1.07946074e+00] [ 2.10830164e+00 4.49734628e-01 3.87806559e+00 ... 1.69539332e+00 8.94473970e-01 2.05687150e-01] ... [ 9.31047261e-01 1.25367570e+00 -3.39684904e-01 ... 1.76556361e+00 -1.38157284e+00 -9.85920012e-01] [ 8.90061796e-01 1.94420552e+00 2.51012301e+00 ... 3.47476792e+00 1.64318120e+00 1.38512298e-01] [-2.02469969e+00 -2.88855940e-01 2.04185891e+00 ... 2.03993392e+00 9.33131501e-02 -1.37786794e+00]]] [[[ 1.26259649e+00 2.19455600e+00 1.08799624e+00 ... 4.81591731e-01 2.03836179e+00 2.55780602e+00] [ 1.34769928e+00 1.83940899e+00 2.76274085e-01 ... 3.18359315e-01 3.20603609e+00 8.35378647e-01] [ 1.75000250e+00 9.98898149e-01 7.48075485e-01 ... 4.70330864e-01 1.05032086e+00 1.46760583e+00] ... [ 1.36496806e+00 2.24893928e+00 9.82634485e-01 ... 2.75331438e-01 9.22530666e-02 1.93410480e+00] [ 6.32126212e-01 2.90051150e+00 1.54500067e+00 ... 2.95039415e-01 1.01987219e+00 2.25458050e+00] [ 2.11501646e+00 1.65777695e+00 9.71204042e-01 ... 1.83393097e+00 -2.36251354e-01 8.68912041e-01]] [[ 1.95657349e+00 1.16957390e+00 7.87531257e-01 ... 2.29330897e+00 1.32579792e+00 1.18652380e+00] [-8.52475524e-01 1.25415182e+00 8.22114348e-01 ... 2.15335488e+00 2.09600878e+00 -6.66178986e-02] [ 1.16222847e+00 1.19745994e+00 2.64355659e+00 ... 7.41730392e-01 2.63915181e+00 7.21202612e-01] ... [ 1.00217175e+00 1.80511475e+00 2.39452124e+00 ... 1.84302151e+00 2.79479647e+00 1.52677405e+00] [ 3.60225946e-01 -3.63025874e-01 2.88078928e+00 ... 1.67039728e+00 1.61740887e+00 1.14381683e+00] [ 2.31817794e+00 1.11066973e+00 7.80209899e-01 ... 3.20201367e-01 1.04449141e+00 2.34892821e+00]] [[ 1.24343789e+00 -3.33779454e-02 1.48142397e+00 ... 1.94090569e+00 5.65112472e-01 1.11137140e+00] [ 2.09600806e+00 9.55443382e-01 1.18886876e+00 ... 1.64185345e+00 1.97898901e+00 1.45846713e+00] [ 1.31949306e+00 5.44485092e-01 8.73595357e-01 ... -4.26574022e-01 1.88872552e+00 1.74289274e+00] ... [ 4.61066484e-01 7.81245708e-01 1.46630692e+00 ... 2.00698137e-01 2.09772795e-01 1.01362109e+00] [ 2.22359896e+00 6.00802600e-01 2.69385695e+00 ... 2.07585025e+00 1.45674396e+00 8.09472144e-01] [ 2.38858151e+00 1.45454741e+00 9.34138477e-01 ... 2.22199106e+00 2.47528553e+00 7.05508232e-01]] ... [[ 7.30122149e-01 1.79324067e+00 2.44474554e+00 ... 1.98975956e+00 9.47137833e-01 2.17212605e+00] [ 2.13644886e+00 1.35355628e+00 3.85233259e+00 ... 1.98561060e+00 1.90178180e+00 2.89668274e+00] [ 6.31449282e-01 1.32391894e+00 1.70716202e+00 ... 1.40658689e+00 1.63830101e+00 2.22148609e+00] ... [ 5.74819803e-01 1.47555864e+00 2.03734612e+00 ... 1.89338303e+00 4.01004106e-01 1.04085171e+00] [ 1.53830099e+00 2.54978389e-01 8.21223110e-03 ... 2.45150661e+00 1.72570646e+00 3.08703399e+00] [ 5.49652755e-01 6.71040475e-01 1.59730041e+00 ... 1.28334153e+00 1.17679238e+00 7.08711505e-01]] [[ 1.21933687e+00 1.70731914e+00 1.48706019e+00 ... 3.35117251e-01 2.72637343e+00 4.76039588e-01] [-1.50026664e-01 2.78744054e+00 6.66392028e-01 ... 2.42308593e+00 7.66536236e-01 2.09417796e+00] [ 3.21235394e+00 1.85269976e+00 1.42046535e+00 ... 3.69646668e-01 3.02229905e+00 2.31819272e+00] ... [ 1.67504871e+00 3.21115732e-01 1.70617640e+00 ... 1.74188113e+00 1.46051478e+00 1.44052875e+00] [ 2.06484342e+00 1.51431823e+00 1.84861350e+00 ... 2.16521549e+00 -1.83000684e-01 2.50719452e+00] [ 2.83202672e+00 2.09475470e+00 7.04084575e-01 ... 8.81758332e-01 2.11563611e+00 4.56385255e-01]] [[ 9.42048788e-01 1.21940339e+00 8.56142640e-02 ... 1.63053584e+00 2.01078081e+00 3.58382750e+00] [ 9.27501693e-02 4.59443629e-01 1.51406062e+00 ... 8.98990273e-01 1.46748161e+00 1.08221292e+00] [ 1.85489392e+00 1.98645860e-01 8.55382800e-01 ... 1.94654179e+00 1.27474856e+00 1.94314492e+00] ... [ 1.86292183e+00 1.48487985e+00 2.49110579e+00 ... 1.64083552e+00 1.61947918e+00 3.33139729e+00] [ 1.15218353e+00 2.17648879e-01 2.63641858e+00 ... 1.98359275e+00 1.93445325e+00 2.44395638e+00] [ 2.85708666e+00 1.64122808e+00 2.22045588e+00 ... 2.40692115e+00 5.03245473e-01 1.19771028e+00]]] [[[ 6.28620803e-01 7.34905243e-01 7.74020255e-01 ... 7.85518885e-01 6.96996093e-01 7.24072099e-01] [ 7.61531174e-01 6.50654733e-01 8.29251885e-01 ... 8.21789622e-01 6.35375261e-01 6.34910583e-01] [ 6.64960682e-01 7.88177133e-01 6.94699824e-01 ... 6.88681126e-01 7.64614582e-01 7.22465694e-01] ... [ 7.01351881e-01 7.45017052e-01 6.65108979e-01 ... 7.74595439e-01 5.61214089e-01 6.69693649e-01] [ 6.34180188e-01 6.84380949e-01 7.96541274e-01 ... 6.79707229e-01 7.09469318e-01 6.17529333e-01] [ 8.37391376e-01 7.23074257e-01 6.57316625e-01 ... 6.49006069e-01 7.50937879e-01 6.52352273e-01]] [[ 9.84758019e-01 7.45868385e-01 7.01266289e-01 ... 6.88469946e-01 6.44076705e-01 7.07541943e-01] [ 6.38440907e-01 6.98314905e-01 8.61265779e-01 ... 8.04011941e-01 7.12312222e-01 5.49420476e-01] [ 7.60506094e-01 6.65912330e-01 8.13176095e-01 ... 6.46852374e-01 6.94817960e-01 6.87473118e-01] ... [ 6.31638825e-01 6.24372900e-01 6.07483208e-01 ... 7.97717392e-01 8.18569601e-01 6.16696835e-01] [ 8.00618112e-01 6.69430673e-01 6.95364058e-01 ... 6.86091900e-01 6.82360172e-01 6.54358447e-01] [ 6.71818435e-01 8.83931756e-01 6.65423989e-01 ... 7.48718679e-01 8.51165712e-01 6.24722719e-01]] [[ 6.51479065e-01 7.35676110e-01 7.20665634e-01 ... 7.11080134e-01 6.62492871e-01 7.87614703e-01] [ 6.34089708e-01 8.20966601e-01 7.17859507e-01 ... 7.38710344e-01 6.68160617e-01 7.18159735e-01] [ 6.64894700e-01 6.74189985e-01 7.25491822e-01 ... 7.28897393e-01 6.92071080e-01 7.60398030e-01] ... [ 5.94439387e-01 6.22546911e-01 6.94926560e-01 ... 6.51955068e-01 8.38915706e-01 8.14350963e-01] [ 7.27007508e-01 7.33832717e-01 6.49766862e-01 ... 5.98780870e-01 6.71234965e-01 8.66290212e-01] [ 6.74007475e-01 7.93818355e-01 6.46779656e-01 ... 7.76129425e-01 6.41316831e-01 6.60185754e-01]] ... [[ 6.29363239e-01 6.32335305e-01 6.72310829e-01 ... 7.02778399e-01 8.09015810e-01 6.73541367e-01] [ 7.61928558e-01 7.47639954e-01 5.49322307e-01 ... 7.24806905e-01 6.69739902e-01 6.92310870e-01] [ 6.41281426e-01 6.82886541e-01 6.77613735e-01 ... 6.73900485e-01 8.19641948e-01 5.78615487e-01] ... [ 7.29061484e-01 7.73861885e-01 7.40674436e-01 ... 7.98093021e-01 6.49332166e-01 6.76703215e-01] [ 6.25165641e-01 6.98280513e-01 5.85721433e-01 ... 6.31621301e-01 7.06899881e-01 7.39799738e-01] [ 7.14176357e-01 5.85115254e-01 8.67622912e-01 ... 7.95609176e-01 8.96036565e-01 8.32984984e-01]] [[ 7.75579393e-01 7.27929413e-01 7.64604151e-01 ... 6.97565734e-01 8.27559531e-01 6.13000691e-01] [ 7.79567361e-01 7.52471924e-01 7.11716831e-01 ... 6.98518515e-01 5.83006740e-01 8.27582419e-01] [ 7.36434698e-01 5.27224660e-01 6.67163491e-01 ... 6.90677106e-01 6.44624949e-01 6.55472755e-01] ... [ 7.54120171e-01 7.13111281e-01 8.78327131e-01 ... 6.33950472e-01 5.62386870e-01 6.51714385e-01] [ 7.06244886e-01 6.94770336e-01 8.01897824e-01 ... 5.71421802e-01 6.73678279e-01 6.13803804e-01] [ 8.25029731e-01 7.97090411e-01 6.92178726e-01 ... 7.01420069e-01 6.07236087e-01 5.63199103e-01]] [[ 7.58598864e-01 7.06220150e-01 7.73105145e-01 ... 7.91884899e-01 6.88708246e-01 7.49047101e-01] [ 6.67485654e-01 6.48947895e-01 7.44133413e-01 ... 8.17248464e-01 6.57084644e-01 6.30657911e-01] [ 7.33117342e-01 7.82125175e-01 6.10282183e-01 ... 7.07898378e-01 7.02806175e-01 7.14702666e-01] ... [ 7.45066285e-01 7.16511965e-01 7.73750424e-01 ... 5.79861939e-01 7.19299138e-01 8.77594054e-01] [ 6.76165879e-01 7.38654256e-01 7.87194669e-01 ... 6.93884373e-01 7.14255691e-01 7.29932606e-01] [ 7.47143090e-01 7.59089410e-01 6.33418500e-01 ... 7.66091228e-01 6.93635285e-01 7.68180370e-01]]] [[[ 6.21986508e-01 8.81904364e-02 4.24287558e-01 ... 5.32788098e-01 4.68061298e-01 4.33249533e-01] [ 2.12132126e-01 2.03535050e-01 2.07352355e-01 ... 7.83744901e-02 4.44371849e-01 2.48344555e-01] [ 2.60318011e-01 3.25304419e-01 4.76144165e-01 ... 5.56289911e-01 7.94944942e-01 6.95405245e-01] ... [ 2.08639964e-01 7.07171202e-01 6.99011743e-01 ... 6.60074592e-01 5.18649280e-01 4.05083895e-01] [ 3.81293923e-01 2.30056763e-01 5.05522966e-01 ... 2.90985197e-01 4.02397709e-03 3.32233101e-01] [ 6.17803097e-01 4.72858787e-01 2.82577693e-01 ... 6.75497890e-01 7.59347498e-01 5.77763975e-01]] [[ 6.20868146e-01 2.24303871e-01 5.15930474e-01 ... 3.87296915e-01 1.16788380e-01 3.45051467e-01] [-3.22133675e-03 7.33927369e-01 5.77907503e-01 ... 1.67931616e-01 5.92101097e-01 1.37790456e-01] [ 3.24092150e-01 5.08728981e-01 2.79550582e-01 ... 9.64554474e-02 4.41562682e-01 8.87975931e-01] ... [ 1.53113738e-01 1.09575748e+00 4.31615770e-01 ... 1.29548341e-01 3.03183049e-01 2.84300894e-01] [ 3.18121076e-01 3.09793234e-01 7.77986571e-02 ... 3.67636681e-01 4.71469671e-01 2.98556298e-01] [-9.59726050e-02 4.25003618e-01 1.22159332e-01 ... 1.33328913e-02 4.48766500e-01 3.36286753e-01]] [[ 5.28298497e-01 1.80330709e-01 6.44064695e-02 ... 4.11164373e-01 2.83941299e-01 2.80246228e-01] [ 2.35271722e-01 1.67166010e-01 5.30042827e-01 ... 6.47935688e-01 -2.00113244e-02 6.60022676e-01] [ 4.22608197e-01 4.32262957e-01 4.68653411e-01 ... 4.63851213e-01 -3.16024050e-02 2.73466825e-01] ... [ 5.17023504e-01 3.83666784e-01 6.12476707e-01 ... 2.03798488e-01 6.54182494e-01 2.17477635e-01] [ 4.91480947e-01 2.04632670e-01 1.17938332e-01 ... 7.05125153e-01 4.94723260e-01 4.08047795e-01] [-1.44131273e-01 3.60707581e-01 5.57289183e-01 ... 2.35553443e-01 4.06377286e-01 5.93476117e-01]] ... [[ 2.74716467e-01 1.62002206e-01 4.46182489e-01 ... 6.14586711e-01 7.62939975e-02 3.23994428e-01] [-1.87909305e-01 2.86585450e-01 -7.17415735e-02 ... 6.21613801e-01 4.63934064e-01 2.01083854e-01] [ 7.22870290e-01 3.74787360e-01 7.10455716e-01 ... -7.50356296e-04 2.07986370e-01 9.12630737e-01] ... [-1.74242929e-02 3.49648863e-01 2.09992066e-01 ... 4.19854224e-01 6.57486141e-01 1.82483062e-01] [ 4.07755762e-01 -4.75127660e-02 4.22124892e-01 ... 2.85170317e-01 4.00716066e-01 3.06481987e-01] [ 4.52748626e-01 5.84685564e-01 4.70270932e-01 ... 7.90285349e-01 -4.24796969e-01 6.85768723e-01]] [[ 2.60097086e-01 2.23495096e-01 6.89737976e-01 ... 2.78292149e-01 2.55704343e-01 2.79746085e-01] [ 4.37777698e-01 3.34738731e-01 5.43032289e-01 ... 3.71341974e-01 5.18327534e-01 6.14789784e-01] [ 3.52775574e-01 2.74749130e-01 4.54202294e-01 ... 3.54514539e-01 4.30401564e-01 6.00769281e-01] ... [ 2.68690079e-01 5.88610880e-02 4.75417882e-01 ... 5.12279391e-01 7.64538348e-01 4.43854928e-01] [ 2.65013948e-02 8.33159506e-01 2.67916411e-01 ... 2.06680670e-01 4.10767466e-01 3.38032603e-01] [ 5.39039075e-01 2.73169518e-01 2.00488180e-01 ... 1.27708837e-01 5.49901016e-02 -4.72132042e-02]] [[ 2.32260376e-01 1.82636470e-01 3.86316031e-01 ... 2.28363201e-02 -4.63070441e-03 1.21293813e-01] [ 5.21365643e-01 3.59946668e-01 8.34123611e-01 ... 3.59381557e-01 5.29324532e-01 2.79143363e-01] [ 5.34401059e-01 1.26546100e-01 8.83465230e-01 ... 5.75273812e-01 5.42459905e-01 2.97795087e-01] ... [ 5.15363872e-01 2.39464268e-01 4.74065661e-01 ... 5.94688952e-01 5.98500669e-01 1.98420938e-02] [ 2.76697993e-01 5.85344195e-01 5.67983426e-02 ... 3.67084406e-02 4.40888613e-01 3.57613325e-01] [ 6.70163691e-01 1.96930736e-01 5.39889693e-01 ... 6.33807719e-01 6.45338237e-01 1.30562812e-01]]] [[[ 1.11393011e+00 4.55592394e-01 1.29730260e+00 ... -1.05116653e+00 -1.49129522e+00 6.94420516e-01] [ 1.84103027e-01 1.54751098e+00 1.17382467e+00 ... 9.17959511e-01 6.98916018e-01 7.49517024e-01] [ 5.00161886e-01 2.18632746e+00 2.91464019e+00 ... -8.36869553e-02 6.47608459e-01 8.75301123e-01] ... [ 1.72883403e+00 4.34211314e-01 3.86155516e-01 ... -1.43837023e+00 7.41249800e-01 -4.37949270e-01] [-2.26768941e-01 1.37481034e+00 -6.48607969e-01 ... 1.14269638e+00 6.52156830e-01 4.55153018e-01] [ 9.73191679e-01 6.10025942e-01 -9.40999746e-01 ... 2.62148920e-02 3.22715729e-01 -2.22588539e-01]] [[ 1.81950974e+00 -4.78080362e-01 2.93286085e-01 ... 9.11726475e-01 1.76592684e+00 3.60598266e-01] [ 1.42046738e+00 -1.10925150e+00 1.61180142e-02 ... 1.52667940e+00 3.19546074e-01 4.29394454e-01] [-3.75354052e-01 1.04067206e+00 -5.91165364e-01 ... 2.64078116e+00 1.44520914e-02 9.24866915e-01] ... [-5.61792016e-01 2.97591776e-01 6.27440274e-01 ... 1.80382442e+00 -7.69093394e-01 1.00732124e+00] [-9.60685253e-01 6.25086069e-01 -3.40346813e-01 ... 7.15093493e-01 -3.21804285e-01 3.75945151e-01] [ 1.09770632e+00 3.88951331e-01 1.05347955e+00 ... 6.86065555e-01 -1.38311669e-01 3.40252310e-01]] [[ 1.12307107e+00 -9.26545024e-01 -1.74287230e-01 ... 3.46257925e-01 -1.81761414e-01 -6.61618859e-02] [ 6.00778222e-01 -6.86570823e-01 -2.14738533e-01 ... 2.26197672e+00 1.61884093e+00 -3.57158661e-01] [ 9.80523646e-01 1.23171961e+00 1.02046037e+00 ... -6.88328564e-01 1.07088745e+00 -8.15017760e-01] ... [-7.00035766e-02 -5.06226957e-01 -3.64435971e-01 ... -5.31233072e-01 3.82725745e-01 4.10292037e-02] [ 2.79028893e-01 -8.00399125e-01 1.69402874e+00 ... 1.57229090e+00 -2.10155427e-01 5.09441197e-01] [ 9.24075723e-01 4.55619812e-01 5.46280257e-02 ... 2.72764981e-01 1.06803620e+00 6.46181107e-01]] ... [[ 3.89969528e-01 -3.14763725e-01 6.99185073e-01 ... 1.70696580e+00 -4.06348705e-01 2.00476140e-01] [ 2.09311175e+00 1.02972463e-01 1.15774310e+00 ... -5.45252562e-01 1.48302650e+00 6.11090958e-02] [-1.97541475e-01 1.11366904e+00 1.10708153e+00 ... 1.75423646e+00 1.08531427e+00 2.48600706e-01] ... [ 1.84883606e+00 -1.62058282e+00 1.12008333e+00 ... 5.33165216e-01 -5.32751679e-01 1.82657540e+00] [-1.33200729e+00 1.25017154e+00 1.69708705e+00 ... 3.51628065e-01 -2.87358314e-01 3.35387625e-02] [ 2.19615912e+00 8.26268673e-01 7.06891716e-01 ... -6.20870888e-01 1.79680836e+00 -5.06043732e-01]] [[ 1.68417346e+00 1.64452910e+00 8.39396417e-01 ... -1.16564178e+00 -1.98151737e-01 9.45333004e-01] [ 5.12872636e-01 5.50930858e-01 1.00719380e+00 ... 1.99846053e+00 2.25597173e-01 1.34183049e+00] [ 2.14726901e+00 5.68567872e-01 2.69457769e+00 ... -2.66486585e-01 1.15924037e+00 -3.51873457e-01] ... [ 1.48118603e+00 1.06274927e+00 -1.07995644e-01 ... 1.25618792e+00 6.65438235e-01 7.18233049e-01] [ 1.96570480e+00 1.75270841e-01 7.94251978e-01 ... -3.12683247e-02 1.53699458e+00 1.14505291e+00] [ 3.85781914e-01 1.00744829e-01 3.37873608e-01 ... 8.68163466e-01 3.97313386e-01 1.11413431e+00]] [[-9.99204874e-01 -9.19424534e-01 -8.66690651e-02 ... 1.13034356e+00 6.01733685e-01 1.57822001e+00] [-2.45672300e-01 8.15907717e-01 8.64982128e-01 ... -2.62780935e-01 -5.11351973e-02 6.53142154e-01] [ 8.02831888e-01 1.10794139e+00 8.91472101e-01 ... -9.09816682e-01 1.14446557e+00 -2.99078748e-02] ... [ 5.48494160e-01 1.21674943e+00 4.13419247e-01 ... 7.02152133e-01 -7.91433513e-01 1.31697965e+00] [ 1.42964828e+00 6.46733642e-01 7.31478810e-01 ... 9.91477370e-01 8.28303039e-01 1.84281492e+00] [-1.05267286e+00 2.95877367e-01 4.17473733e-01 ... -4.14975196e-01 1.84523845e+00 1.94074404e+00]]]] ... [[[[ 4.10871208e-01 3.14023554e-01 -2.29505934e-02 ... 6.99279487e-01 4.89635706e-01 2.55506605e-01] [ 6.92063630e-01 6.33940220e-01 5.20152934e-02 ... 4.99758929e-01 1.87251225e-01 9.39203560e-01] [ 5.32464802e-01 2.81684160e-01 4.44811255e-01 ... 3.62844557e-01 6.93526089e-01 7.39512026e-01] ... [ 4.58315760e-01 5.14871776e-01 3.49865019e-01 ... 4.96999830e-01 4.13492262e-01 -2.59154215e-02] [ 4.37345654e-02 1.15810059e-01 1.04217328e-01 ... 6.79096639e-01 6.29158974e-01 3.82042110e-01] [ 3.42796564e-01 6.02389097e-01 4.16865230e-01 ... 3.75667453e-01 4.77950424e-01 8.55973884e-02]] [[ 1.34775624e-01 4.13179904e-01 7.52018929e-01 ... 2.74857640e-01 1.99496761e-01 4.18766230e-01] [ 6.73461199e-01 4.27951515e-01 4.79561836e-01 ... 4.84642476e-01 6.77576125e-01 4.94578481e-01] [ 5.56484818e-01 4.03346896e-01 4.92065609e-01 ... 8.17418873e-01 3.44490349e-01 6.73809052e-01] ... [ 4.73701537e-01 3.09718072e-01 5.69682121e-01 ... 3.20625901e-01 3.45306367e-01 4.10523444e-01] [ 4.75590616e-01 -9.69241746e-03 7.45603740e-01 ... 7.64693737e-01 7.74304926e-01 4.57723349e-01] [ 4.28865373e-01 3.43718708e-01 6.71419859e-01 ... 3.62882167e-01 4.68812943e-01 2.39984393e-01]] [[ 8.90898526e-01 2.19512612e-01 -5.52636012e-02 ... 1.54989168e-01 3.58276844e-01 4.68587637e-01] [ 5.49562931e-01 4.62748796e-01 8.38447690e-01 ... -1.70635611e-01 5.64660788e-01 -1.33104157e-02] [ 4.51987356e-01 7.24879727e-02 4.37537104e-01 ... 3.14563513e-01 3.47727269e-01 2.23907113e-01] ... [ 5.07513940e-01 3.03562194e-01 2.60925531e-01 ... 3.50749433e-01 2.01202452e-01 3.55660617e-01] [ 3.66849363e-01 1.66833803e-01 3.59765351e-01 ... 5.67720413e-01 7.47535050e-01 7.50146091e-01] [ 8.70713890e-01 3.17804754e-01 3.85163724e-01 ... 2.61474431e-01 1.27704844e-01 3.27015400e-01]] ... [[ 2.96521485e-01 4.61518794e-01 7.94782996e-01 ... 2.06200108e-01 2.12848097e-01 3.73332500e-01] [ 1.81092516e-01 6.23688661e-02 4.17402327e-01 ... 2.69026160e-01 4.42205220e-01 4.80301380e-01] [ 3.56852889e-01 3.25488657e-01 2.24358991e-01 ... 5.10312736e-01 3.65588605e-01 5.80052078e-01] ... [ 2.14884788e-01 1.87804803e-01 6.37434661e-01 ... 4.48056012e-01 4.28527206e-01 3.08145791e-01] [ 2.09745169e-01 9.37056765e-02 3.38882446e-01 ... 4.22198653e-01 5.38170516e-01 6.79767430e-01] [ 3.08260750e-02 4.22337383e-01 2.78565526e-01 ... 5.09859204e-01 3.44029486e-01 1.00331269e-01]] [[-4.07652818e-02 6.67526960e-01 5.13767958e-01 ... 4.65378553e-01 9.11544934e-02 2.33349636e-01] [ 6.40374124e-01 4.73049700e-01 2.73579717e-01 ... 5.70637465e-01 3.48071933e-01 2.39973843e-01] [ 3.60870838e-01 5.85532486e-01 3.72315198e-01 ... 1.25972942e-01 4.44715798e-01 2.43545189e-01] ... [ 3.56365263e-01 1.79176718e-01 2.95184612e-01 ... 3.04512382e-01 2.20589355e-01 4.91596460e-01] [ 3.07200879e-01 5.76272249e-01 7.02446103e-01 ... 4.07007605e-01 4.37519312e-01 4.38073963e-01] [ 3.07201684e-01 7.54073739e-01 4.64183599e-01 ... 5.33943951e-01 4.59175140e-01 5.49797297e-01]] [[ 3.17039967e-01 3.78588229e-01 6.30337000e-01 ... 5.18938243e-01 3.32816064e-01 2.85173774e-01] [ 3.71785074e-01 5.97181618e-01 7.69257322e-02 ... -2.27559451e-03 6.53697550e-01 3.02076578e-01] [ 1.12990074e-01 3.64062607e-01 3.93046111e-01 ... 4.76348698e-01 3.90037864e-01 7.04898000e-01] ... [ 6.78498685e-01 2.42297351e-01 2.58013368e-01 ... -1.54062167e-01 6.16660118e-01 4.71597493e-01] [ 2.62812018e-01 2.85216719e-01 6.06854439e-01 ... 5.88293612e-01 5.79576075e-01 5.04585862e-01] [ 6.85352743e-01 4.81344163e-01 5.54349065e-01 ... -6.69853389e-02 -7.48888357e-03 1.78560540e-01]]] [[[ 1.20295517e-01 1.97836661e+00 3.26546550e-01 ... 9.99269783e-01 -2.78017502e-02 1.89940488e+00] [ 1.03050995e+00 2.20105624e+00 3.49793732e-01 ... 5.80343246e-01 4.28582698e-01 3.94742787e-01] [ 1.32914960e+00 -4.21277970e-01 -5.88027775e-01 ... -1.24549687e+00 -7.79999256e-01 1.13012302e+00] ... [-2.12068224e+00 1.66291237e+00 1.29582179e+00 ... 3.38935882e-01 4.45235825e+00 3.90043592e+00] [ 2.06443381e+00 -1.46858871e+00 2.67144531e-01 ... 1.01720124e-01 2.30478382e+00 2.15955424e+00] [ 4.00419682e-01 3.27975202e+00 2.96847749e+00 ... 8.62919509e-01 -1.46043599e+00 1.58571339e+00]] [[-1.64910957e-01 -8.27894449e-01 2.61563897e+00 ... -3.63687515e-01 1.91553563e-01 1.84061158e+00] [ 2.06247973e+00 8.11411440e-01 -1.42676008e+00 ... -1.32932043e+00 2.35857773e+00 9.02024806e-02] [ 1.07244682e+00 -8.88622165e-01 2.95557883e-02 ... 1.52099401e-01 -4.97407228e-01 9.45158720e-01] ... [ 1.74228787e+00 2.57283163e+00 9.57575440e-02 ... 1.91076362e+00 1.65846503e+00 -1.26025653e+00] [ 1.01732850e+00 4.40911591e-01 2.54540062e+00 ... 8.76900494e-01 1.64112234e+00 2.30572057e+00] [ 1.99955523e+00 -1.11938357e+00 2.00941467e+00 ... 8.39848220e-01 -1.51306522e+00 -1.31805921e+00]] [[-5.77970505e-01 2.93381906e+00 2.94164896e+00 ... -2.66275406e-01 2.88747072e+00 6.86774552e-01] [-1.73349547e+00 4.58189547e-01 -1.33197236e+00 ... -1.79232061e+00 3.13135982e-01 3.18255544e+00] [ 2.24300766e+00 3.39079827e-01 -1.30475271e+00 ... 9.87292826e-01 -1.02939260e+00 -1.59494412e+00] ... [ 2.61405635e+00 -6.74456179e-01 1.02788043e+00 ... 1.41367960e+00 1.11455882e+00 -1.48873374e-01] [ 1.33489096e+00 8.53960395e-01 1.25727248e+00 ... 3.00238180e+00 1.22384973e-01 1.55909657e+00] [ 4.69313383e-01 8.66014585e-02 -1.32265520e+00 ... -3.78942877e-01 5.68584800e-01 6.62022352e-01]] ... [[-3.28011179e+00 3.44834733e+00 4.02143933e-02 ... 4.75346476e-01 -9.54756677e-01 -4.80338961e-01] [ 3.35184783e-02 2.30508113e+00 2.97576833e+00 ... 1.30745685e+00 5.31764627e-01 2.70165873e+00] [ 7.11022019e-01 -1.65203893e+00 -2.48412713e-01 ... 3.02196711e-01 6.88757956e-01 -1.15145898e+00] ... [-7.09612191e-01 1.24873948e+00 2.88330168e-01 ... 4.56417531e-01 1.94427705e+00 -5.08088052e-01] [ 1.88464940e+00 1.39567757e+00 -1.23830009e+00 ... 2.27283812e+00 1.56125617e+00 7.34728634e-01] [-2.03383461e-01 1.23302007e+00 1.24644244e+00 ... -1.88822210e-01 -6.51646554e-01 1.12976229e+00]] [[-3.42554189e-02 -8.82796228e-01 4.52892065e+00 ... 4.38519448e-01 2.93859029e+00 1.59496975e+00] [-9.27398980e-01 -1.63879716e+00 -9.77025688e-01 ... 3.03651452e-01 2.20280719e+00 3.38693309e+00] [ 7.82053843e-02 2.09592938e+00 2.34349799e+00 ... 1.43890178e+00 -9.80668008e-01 1.67270303e+00] ... [ 2.14624262e+00 -1.83305573e+00 1.42649937e+00 ... 7.62833297e-01 1.22501634e-01 1.35454822e+00] [ 2.16473508e+00 3.07343435e+00 1.18113649e+00 ... 1.33067071e-01 1.47595370e+00 1.76559910e-02] [ 7.59196937e-01 3.01822853e+00 2.43446589e+00 ... -9.82312739e-01 5.49286425e-01 1.07757223e+00]] [[ 1.23704684e+00 2.83827806e+00 2.91197062e+00 ... 2.70671678e+00 -3.39014828e-01 7.51489818e-01] [-2.32656550e+00 7.88614333e-01 2.51232922e-01 ... 2.97770411e-01 2.57774734e+00 7.62095690e-01] [ 3.47572237e-01 -1.12390840e+00 -4.88095433e-01 ... 1.40317452e+00 1.55239969e-01 5.43294430e-01] ... [ 3.07999182e+00 1.63810766e+00 3.04952919e-01 ... 6.67134106e-01 -6.35330141e-01 2.84158325e+00] [ 3.82012701e+00 9.90330517e-01 1.56149900e+00 ... 2.90377593e+00 8.09848785e-01 1.83925366e+00] [ 3.22919965e-01 1.17558730e+00 -1.58636093e+00 ... -2.85390139e-01 7.55199373e-01 2.91461301e+00]]] [[[ 2.04356432e+00 1.41987407e+00 1.13490272e+00 ... 1.83803582e+00 9.77117777e-01 1.02683830e+00] [ 2.38223553e+00 1.03980255e+00 2.56262839e-01 ... 2.11054325e+00 3.11740190e-01 1.81899881e+00] [ 1.14149773e+00 1.36870706e+00 1.48836803e+00 ... 3.23423713e-01 -3.63665730e-01 1.03389359e+00] ... [ 2.51988029e+00 9.96915281e-01 4.43122625e-01 ... 2.96437645e+00 1.51603198e+00 1.11086488e+00] [ 1.67245209e+00 3.25889921e+00 1.56193113e+00 ... 2.17792392e+00 1.74223471e+00 1.32174265e+00] [ 6.71324134e-01 2.46307254e+00 1.74562645e+00 ... 1.08166444e+00 9.21842873e-01 1.87247169e+00]] [[ 1.65000522e+00 2.13488626e+00 8.74353290e-01 ... 7.78928518e-01 1.32159197e+00 1.15384412e+00] [ 2.16350722e+00 1.32036042e+00 8.04719567e-01 ... 1.24609506e+00 5.63441336e-01 2.59189534e+00] [ 1.84668612e+00 1.91143596e+00 1.10776484e+00 ... 8.19623291e-01 1.38697982e+00 9.23529506e-01] ... [ 1.11225319e+00 1.21277273e+00 9.21840608e-01 ... 2.60583115e+00 1.37659144e+00 1.68578839e+00] [-9.96790707e-01 1.64589238e+00 8.83043826e-01 ... 1.32817078e+00 9.36409056e-01 1.52177143e+00] [ 2.47140074e+00 1.91842628e+00 2.80417299e+00 ... 9.66460705e-01 2.10739827e+00 2.12493753e+00]] [[ 7.01579869e-01 1.69971573e+00 1.48066139e+00 ... 2.92031693e+00 7.43004024e-01 2.69910192e+00] [ 2.54711598e-01 1.81324005e+00 2.40065670e+00 ... 2.37733173e+00 2.06420493e+00 1.62030590e+00] [ 3.85935783e+00 2.35991049e+00 -4.45116758e-02 ... 1.65567005e+00 3.23360968e+00 1.52551818e+00] ... [ 2.45070811e-02 3.29434067e-01 1.17696428e+00 ... 1.43143785e+00 2.47437310e+00 3.69891673e-01] [ 7.83904374e-01 1.60323739e+00 1.19323897e+00 ... 3.17269266e-01 1.90673733e+00 1.09712541e+00] [ 1.30937016e+00 5.90881169e-01 4.03377563e-01 ... 1.36856973e+00 7.64602661e-01 7.01440394e-01]] ... [[ 1.59387791e+00 9.63588238e-01 1.89046049e+00 ... 2.14440632e+00 2.14651990e+00 1.31046498e+00] [ 8.38153422e-01 3.25342345e+00 2.01110911e+00 ... 2.23386064e-01 2.27416301e+00 1.49835992e+00] [ 4.50674206e-01 2.11723280e+00 2.00367236e+00 ... 1.00776970e+00 1.22338569e+00 1.73032892e+00] ... [ 1.30760980e+00 3.85668683e+00 8.42153430e-01 ... 2.18353105e+00 1.46952009e+00 1.24953032e+00] [ 1.79644156e+00 1.39344251e+00 1.76798427e+00 ... 8.93617392e-01 1.24202275e+00 1.28141797e+00] [ 8.84442270e-01 2.13591623e+00 7.35376120e-01 ... 1.72910631e-01 2.39485884e+00 1.19343650e+00]] [[ 2.71838129e-01 7.97244668e-01 1.97443712e+00 ... 8.65028799e-01 1.32643414e+00 2.77296042e+00] [ 2.06789899e+00 2.06713295e+00 1.59096861e+00 ... 2.52085090e+00 5.00666082e-01 2.27352738e-01] [ 2.69238353e+00 1.18218029e+00 2.46451497e+00 ... 1.63186097e+00 8.44125092e-01 1.72222281e+00] ... [ 1.05249047e+00 5.70802093e-01 1.68208325e+00 ... 3.87262762e-01 1.61972606e+00 1.19432461e+00] [ 1.51424015e+00 2.18447638e+00 2.67832613e+00 ... 1.03224385e+00 5.53867042e-01 1.77523553e+00] [ 2.62491870e+00 1.93417001e+00 3.54234862e+00 ... 2.11531901e+00 1.24756145e+00 2.12356853e+00]] [[ 2.62380749e-01 1.28072071e+00 4.92344886e-01 ... 1.56825209e+00 1.47723317e+00 8.33045006e-01] [ 1.37981248e+00 9.38363314e-01 1.71390879e+00 ... 2.01771760e+00 1.20313716e+00 1.72650874e+00] [ 2.12104273e+00 3.45281696e+00 2.04253650e+00 ... 2.48597622e+00 8.53569686e-01 5.48162162e-02] ... [ 2.81697273e+00 2.67765373e-01 2.63175392e+00 ... 1.19614255e+00 1.66840601e+00 1.65137124e+00] [ 1.31206727e+00 6.62069917e-01 3.76593500e-01 ... 3.68929774e-01 1.11988842e+00 2.70798469e+00] [ 4.30461258e-01 1.97519493e+00 1.24357104e+00 ... 2.88917518e+00 1.74337995e+00 1.55801797e+00]]] [[[ 6.80797517e-01 7.08747327e-01 7.35337973e-01 ... 5.82821727e-01 7.45709479e-01 7.78182983e-01] [ 6.28371239e-01 7.88029253e-01 6.28866136e-01 ... 7.56352127e-01 8.15633237e-01 8.10989439e-01] [ 5.57129145e-01 6.66559160e-01 7.42827773e-01 ... 6.56625390e-01 6.84417367e-01 6.79266810e-01] ... [ 5.23868501e-01 7.24572062e-01 6.94944978e-01 ... 7.75597751e-01 8.74281943e-01 7.47017443e-01] [ 6.69080257e-01 7.04070389e-01 6.41071260e-01 ... 7.91523576e-01 7.05539584e-01 5.68015158e-01] [ 7.55506754e-01 8.35389853e-01 7.96693742e-01 ... 6.23971224e-01 7.50663400e-01 6.88970804e-01]] [[ 7.58862555e-01 7.60072589e-01 8.14022601e-01 ... 7.55443335e-01 7.33316839e-01 7.17735887e-01] [ 5.76782525e-01 6.01419806e-01 7.26601243e-01 ... 6.20776474e-01 6.10977590e-01 5.42980909e-01] [ 6.65558040e-01 6.33333385e-01 6.51394606e-01 ... 8.07554662e-01 6.34283960e-01 7.38823891e-01] ... [ 6.28714561e-01 5.64593196e-01 7.07553625e-01 ... 7.34175563e-01 8.19415629e-01 7.26954699e-01] [ 7.46936679e-01 8.25950682e-01 6.39398396e-01 ... 7.47478306e-01 7.03923047e-01 6.81994557e-01] [ 7.36920774e-01 6.58946633e-01 7.27459371e-01 ... 5.84424913e-01 7.41567314e-01 6.94133222e-01]] [[ 6.61664188e-01 6.57368660e-01 6.99838042e-01 ... 8.37798595e-01 7.15659559e-01 6.08637452e-01] [ 7.37585604e-01 6.82575524e-01 7.03461826e-01 ... 7.77115464e-01 7.09965169e-01 6.92575157e-01] [ 6.31549597e-01 5.98472118e-01 7.27439880e-01 ... 7.46113002e-01 6.17257595e-01 7.23207593e-01] ... [ 7.51911283e-01 7.57016361e-01 6.76285148e-01 ... 8.07787716e-01 6.92101836e-01 6.09579265e-01] [ 7.93685317e-01 8.04052353e-01 7.06237614e-01 ... 7.96530485e-01 7.13266671e-01 5.58598220e-01] [ 6.51361167e-01 6.50940359e-01 6.35780632e-01 ... 7.11578131e-01 5.89608729e-01 7.92325437e-01]] ... [[ 7.72886097e-01 6.60687745e-01 7.78189182e-01 ... 7.43026316e-01 8.14925849e-01 6.93395495e-01] [ 7.01489449e-01 5.55025399e-01 6.61477923e-01 ... 7.75618672e-01 7.59427667e-01 7.04217076e-01] [ 6.03339016e-01 6.52739942e-01 5.36655426e-01 ... 5.91606796e-01 7.70326972e-01 7.70422459e-01] ... [ 7.07167387e-01 7.20622420e-01 6.98200583e-01 ... 7.86903322e-01 7.19367266e-01 6.40498817e-01] [ 7.89482176e-01 6.49959922e-01 6.99485064e-01 ... 7.41442621e-01 6.74240708e-01 6.78893626e-01] [ 7.07066000e-01 7.84547925e-01 7.43717253e-01 ... 5.33792377e-01 5.16392410e-01 7.75946796e-01]] [[ 7.54737973e-01 6.71887398e-01 7.72358358e-01 ... 7.54077673e-01 7.98671901e-01 7.18258679e-01] [ 6.08711362e-01 7.99468935e-01 6.47381186e-01 ... 8.17956865e-01 6.63831472e-01 5.35376787e-01] [ 7.02087462e-01 7.06317604e-01 5.75277984e-01 ... 6.25514507e-01 8.29336524e-01 6.10937059e-01] ... [ 7.25191176e-01 6.60001099e-01 7.24881589e-01 ... 7.46925890e-01 7.08758175e-01 8.29337597e-01] [ 8.44056904e-01 8.17685306e-01 6.27750635e-01 ... 8.26767027e-01 6.96253896e-01 6.43377304e-01] [ 8.13014984e-01 7.91221976e-01 6.58847809e-01 ... 6.12612247e-01 6.26972795e-01 5.88337123e-01]] [[ 7.04710901e-01 6.12707376e-01 6.56311512e-01 ... 7.20335186e-01 7.55992711e-01 8.00684214e-01] [ 7.36817598e-01 5.66988468e-01 6.24186695e-01 ... 6.61392570e-01 6.99528635e-01 5.52517116e-01] [ 6.96657360e-01 6.78274691e-01 7.96440542e-01 ... 6.99854016e-01 7.12751567e-01 7.41068661e-01] ... [ 7.41858065e-01 7.88247883e-01 7.53898561e-01 ... 8.16838622e-01 6.98776543e-01 7.54866362e-01] [ 8.62724066e-01 8.07282209e-01 6.29440248e-01 ... 6.51649714e-01 6.68328822e-01 7.50254512e-01] [ 7.64212310e-01 6.08854711e-01 6.23299181e-01 ... 7.34051347e-01 7.19889224e-01 5.10072052e-01]]] [[[ 3.36136758e-01 3.82125288e-01 5.74510753e-01 ... 3.89020652e-01 3.63115370e-01 2.39665523e-01] [ 5.88873148e-01 3.87382776e-01 4.36795712e-01 ... 6.31639063e-01 3.10196817e-01 5.90159535e-01] [ 3.00770640e-01 2.80969143e-01 3.42570335e-01 ... 4.21263367e-01 3.77633095e-01 3.65256250e-01] ... [ 1.21211663e-01 3.57819170e-01 3.31993014e-01 ... 1.59272641e-01 6.29196703e-01 2.09941879e-01] [ 5.77774167e-01 4.49396878e-01 7.78836071e-01 ... 5.22491693e-01 7.64462948e-01 2.13444293e-01] [ 7.50556231e-01 -1.38907462e-01 7.14087665e-01 ... -1.52809680e-01 3.74798849e-02 5.46923041e-01]] [[ 5.89902520e-01 1.56983081e-02 3.52373719e-01 ... 5.34895718e-01 2.99195617e-01 9.94477212e-01] [ 2.91929215e-01 3.52739841e-01 5.18890500e-01 ... 5.09309053e-01 6.88656330e-01 4.13235910e-02] [ 7.03755915e-01 5.76634645e-01 5.99185109e-01 ... 3.26437980e-01 1.26805127e-01 3.46990556e-01] ... [ 5.97862422e-01 7.73053050e-01 4.28020023e-02 ... 2.92316258e-01 -9.83964875e-02 1.99895978e-01] [ 5.70378900e-01 5.13185203e-01 -4.28567082e-01 ... 6.03256226e-01 4.02705759e-01 2.05525547e-01] [ 5.70166945e-01 4.34822589e-01 1.04764082e-01 ... 1.12326801e-01 7.60022223e-01 4.78245586e-01]] [[-2.32435949e-02 4.61982012e-01 7.67540693e-01 ... 1.49090942e-02 3.33123982e-01 5.62661111e-01] [ 8.74713659e-02 2.33518302e-01 2.76064396e-01 ... 5.55276513e-01 5.54922640e-01 3.38059701e-02] [ 2.30998382e-01 7.19554782e-01 2.34019488e-01 ... 4.51807767e-01 6.59713745e-01 5.24630725e-01] ... [ 1.07204244e-01 2.03054473e-01 4.38945949e-01 ... 1.91661805e-01 2.10384011e-01 3.09669495e-01] [ 9.97884154e-01 1.50784522e-01 2.94588476e-01 ... 8.36079642e-02 4.05012369e-01 5.57490170e-01] [ 1.06617160e-01 6.78502202e-01 -7.44521245e-02 ... 9.79352295e-02 1.90960467e-01 7.80475795e-01]] ... [[ 3.79364610e-01 -1.80105492e-02 7.03516662e-01 ... 5.67631543e-01 3.13425422e-01 -2.27900043e-01] [ 7.25131094e-01 8.39300096e-01 7.11117983e-02 ... -1.38873845e-01 1.65319517e-01 2.56285816e-01] [ 3.53871047e-01 5.75520217e-01 3.53942156e-01 ... 5.58355987e-01 7.56628513e-01 5.14042616e-01] ... [ 5.68772376e-01 -3.20838280e-02 6.67493224e-01 ... 4.31798220e-01 2.28442177e-01 8.79477561e-01] [ 1.40678555e-01 9.65823829e-01 4.20853376e-01 ... 6.68914676e-01 4.37028497e-01 1.08887717e-01] [ 3.89078826e-01 6.23576283e-01 3.28442186e-01 ... 4.67587680e-01 4.50509757e-01 5.02903759e-01]] [[ 3.43430936e-01 3.19159150e-01 2.08661824e-01 ... 4.49552871e-02 -1.26961721e-02 5.48387527e-01] [ 5.00039756e-01 7.48811543e-01 2.63201654e-01 ... 1.79134980e-01 6.63915932e-01 3.32310110e-01] [ 3.69531155e-01 2.20996737e-01 9.28692520e-01 ... 4.57148859e-03 2.44779721e-01 5.62831223e-01] ... [ 3.82214822e-02 1.31657436e-01 1.26211017e-01 ... 4.82942641e-01 5.57764709e-01 4.99232084e-01] [-1.71026036e-01 5.78907847e-01 5.91566443e-01 ... 3.39516699e-01 8.11490864e-02 7.39380956e-01] [ 2.57911444e-01 5.51373184e-01 3.53580326e-01 ... 1.67082489e-01 5.75797200e-01 5.29064722e-02]] [[ 4.95917439e-01 2.30561122e-01 1.27892196e-01 ... 2.90143251e-01 3.01206082e-01 4.92211133e-01] [-1.13107942e-01 2.17618838e-01 3.90714139e-01 ... 1.39834389e-01 7.07992733e-01 3.05030376e-01] [ 2.91504204e-01 8.89117599e-01 -6.50850758e-02 ... 8.01383257e-01 1.15406839e-02 2.89798677e-01] ... [ 3.45210642e-01 5.63803732e-01 5.56415379e-01 ... -4.00746427e-02 1.97686777e-01 7.46026039e-01] [ 2.38750637e-01 7.00301588e-01 -3.53244995e-03 ... 3.06045055e-01 3.58834833e-01 4.59090233e-01] [ 4.23441678e-01 3.77846420e-01 2.24269137e-01 ... 5.01129508e-01 2.70961165e-01 3.05810124e-01]]] [[[ 8.16525638e-01 5.73985338e-01 -4.08733338e-02 ... 6.04069650e-01 1.55963624e+00 5.73187172e-01] [ 3.44274968e-01 1.90495610e+00 -1.51411816e-01 ... 1.07219493e+00 -2.78718919e-01 2.51320958e+00] [ 6.34839773e-01 1.26445794e+00 1.20983219e+00 ... 5.51645041e-01 2.05673194e+00 2.88463742e-01] ... [ 1.88341022e+00 3.58941376e-01 1.10054374e+00 ... 8.89843524e-01 1.27632117e+00 1.20291150e+00] [ 1.92584860e+00 5.33462107e-01 1.33031321e+00 ... -1.15388632e+00 5.54129422e-01 2.07133099e-01] [-4.94037718e-01 5.73606372e-01 3.82287085e-01 ... 1.51612949e+00 -9.28721800e-02 -2.05527619e-01]] [[ 8.61471117e-01 1.11045040e-01 3.85407329e-01 ... 4.21590447e-01 -2.58005112e-01 1.25520813e+00] [ 8.37377608e-01 1.24509466e+00 -2.51216233e-01 ... -4.79350746e-01 7.09936991e-02 -2.13049307e-01] [ 7.77934194e-01 -2.07702100e-01 7.54699945e-01 ... 1.81859982e+00 -2.24541798e-01 9.77953434e-01] ... [ 3.75461042e-01 -7.31543973e-02 9.79687393e-01 ... -3.40846553e-02 2.73088008e-01 2.20321529e-02] [ 5.57770371e-01 2.52960116e-01 1.62085009e+00 ... 7.37383783e-01 -7.13982999e-01 2.72657961e-01] [ 3.35016511e-02 3.70070755e-01 8.63263130e-01 ... 1.67769992e+00 1.19539452e+00 -7.00057864e-01]] [[ 6.06012821e-01 1.49788424e-01 1.15113783e+00 ... 1.43842304e+00 1.79949388e-01 7.65115142e-01] [-6.81322217e-01 1.71395928e-01 -6.95834979e-02 ... 1.51267791e+00 5.19912355e-02 8.48397315e-01] [ 2.78821558e-01 1.18903780e+00 6.61357880e-01 ... 2.75058246e+00 -5.29385284e-02 8.45806658e-01] ... [-7.42121458e-01 1.16769505e+00 1.67436242e+00 ... 1.47457743e+00 9.80993450e-01 6.78598821e-01] [ 5.12083650e-01 8.71133566e-01 1.29125810e+00 ... 5.45781016e-01 8.68669689e-01 1.22354820e-01] [ 7.16204405e-01 2.22826377e-01 7.48227000e-01 ... -2.63384700e-01 1.35249600e-01 -2.49218568e-01]] ... [[ 1.01679659e+00 5.92719078e-01 9.70886275e-02 ... 1.20338106e+00 1.56209335e-01 -2.77182251e-01] [-2.39713654e-01 1.91224575e+00 -1.02203703e+00 ... 1.50679469e+00 1.08140695e+00 -1.66398621e+00] [ 3.07396621e-01 9.53682184e-01 -1.15545899e-01 ... 1.14610565e+00 -6.72773778e-01 1.59217334e+00] ... [ 1.24140763e+00 -2.01445874e-02 -8.85935664e-01 ... 7.45822191e-01 -2.90523440e-01 1.04665712e-01] [ 8.33232999e-01 2.82080722e+00 8.37528646e-01 ... 1.73075914e+00 -6.93146110e-01 9.80382442e-01] [-5.19739270e-01 3.60723168e-01 1.08300710e+00 ... 1.05324483e+00 2.67679954e+00 7.67211378e-01]] [[-4.14524615e-01 2.07435703e+00 3.44228715e-01 ... 6.21277213e-01 -4.70855892e-01 4.98555273e-01] [ 1.97624421e+00 9.83555436e-01 8.05224895e-01 ... -6.04638517e-01 7.58263350e-01 6.72450960e-01] [-2.58313417e-01 -2.08143041e-01 3.84834826e-01 ... 3.54828298e-01 1.00837159e+00 -3.58240962e-01] ... [ 1.23235250e+00 1.29846251e+00 -2.60708541e-01 ... 6.69125617e-01 8.70124698e-01 -9.79952931e-01] [-2.29234144e-01 -7.32331097e-01 4.17606950e-01 ... -1.41187057e-01 -8.12882066e-01 -3.33694637e-01] [-3.52460355e-01 1.28008020e+00 2.09661752e-01 ... 1.71467811e-01 -2.49271486e-02 -6.83760121e-02]] [[ 4.51647311e-01 1.95719588e+00 -3.97699982e-01 ... 1.21481490e+00 -7.58766592e-01 1.69437695e+00] [ 1.98696506e+00 8.05360675e-01 1.47145760e+00 ... 2.54149556e-01 4.38856453e-01 7.16316700e-01] [ 8.09807241e-01 1.00487985e-01 1.50564206e+00 ... 1.43685484e+00 -1.17345810e+00 1.47262204e+00] ... [ 1.03492010e+00 -8.96868948e-03 -1.91090271e-01 ... 2.46314369e-02 1.21780312e+00 1.65711451e+00] [-6.53935492e-01 1.07636857e+00 -3.73058081e-01 ... -6.63448155e-01 8.19405973e-01 -4.33564842e-01] [ 1.33999729e+00 1.31026185e+00 2.16326070e+00 ... -4.63437885e-01 -1.77847064e+00 1.88388598e+00]]]] [[[[ 3.27368021e-01 1.77878991e-01 6.97340012e-01 ... 5.48890114e-01 1.17254630e-01 6.06493771e-01] [ 7.20688224e-01 7.86017299e-01 2.35193700e-01 ... 3.85847479e-01 1.26309365e-01 4.15361404e-01] [ 1.56455293e-01 4.39713478e-01 4.77018207e-01 ... 3.98471564e-01 5.48337877e-01 1.41009778e-01] ... [ 4.82321978e-01 3.75619352e-01 6.71987355e-01 ... 5.28964341e-01 3.17017317e-01 5.52575529e-01] [ 4.90072221e-01 4.43787664e-01 3.54103774e-01 ... 7.68056393e-01 3.66656214e-01 3.02422762e-01] [ 5.46923220e-01 2.99697578e-01 5.44698298e-01 ... 1.65494680e-01 4.37728614e-01 3.28219086e-01]] [[ 7.54602492e-01 1.00014821e-01 3.91022921e-01 ... 4.98303115e-01 2.49235600e-01 2.07272679e-01] [ 5.04741371e-01 6.61281884e-01 6.39123201e-01 ... 6.33957207e-01 3.61665845e-01 5.97906351e-01] [ 3.47752452e-01 2.37135962e-01 4.28243121e-03 ... 4.48683739e-01 2.94292986e-01 7.27352723e-02] ... [ 2.98770934e-01 6.80647850e-01 5.43249309e-01 ... 4.40479308e-01 7.26394176e-01 1.54776990e-01] [ 6.80338860e-01 3.34165692e-01 1.17951818e-01 ... 7.07976699e-01 3.16034675e-01 2.27053002e-01] [ 2.83099681e-01 2.28828728e-01 2.07200378e-01 ... 8.60781729e-01 1.53233975e-01 4.83131289e-01]] [[ 4.83373553e-01 2.09314585e-01 1.96080223e-01 ... 3.09117109e-01 3.52677435e-01 2.59064823e-01] [ 3.04627806e-01 4.87498671e-01 3.17207873e-01 ... 4.44045573e-01 -1.49292737e-01 2.43457466e-01] [ 3.22859704e-01 5.56537330e-01 4.91968662e-01 ... 5.05947113e-01 4.08515334e-01 1.10157229e-01] ... [ 5.54204226e-01 4.10870284e-01 4.20269310e-01 ... 3.41253757e-01 6.19363189e-01 4.18743283e-01] [ 7.34197438e-01 3.71387750e-01 3.99012715e-01 ... 6.60229981e-01 7.08170831e-02 7.29770586e-02] [ 5.75141907e-01 5.20681083e-01 3.44867468e-01 ... 4.35390711e-01 1.18878208e-01 3.22130293e-01]] ... [[ 4.11579579e-01 7.79349133e-02 7.06497431e-01 ... 3.27196062e-01 3.04810107e-01 4.01231378e-01] [ 5.06824911e-01 7.11442232e-01 1.38159096e-01 ... 3.59717816e-01 5.32948256e-01 -4.33831960e-02] [-1.00335628e-02 3.75491321e-01 5.47491491e-01 ... 5.11157140e-02 3.62521678e-01 5.75362206e-01] ... [ 5.16124129e-01 2.64870822e-01 3.56676608e-01 ... 3.49299349e-02 6.18761539e-01 1.85559273e-01] [ 6.69108212e-01 3.51910949e-01 5.25353074e-01 ... 3.58831018e-01 4.06197347e-02 3.22008163e-01] [ 5.35858810e-01 4.68722761e-01 3.48890871e-01 ... 4.97394651e-01 -4.21428718e-02 1.54228434e-01]] [[ 6.71285033e-01 1.28127530e-01 3.95207465e-01 ... 5.57474136e-01 2.91419208e-01 3.90623271e-01] [ 5.20513594e-01 7.51529515e-01 7.24613905e-01 ... 3.37840080e-01 4.63335782e-01 1.91121191e-01] [ 1.06714211e-01 7.12976933e-01 6.28266335e-01 ... 2.65052050e-01 5.66874087e-01 3.44756871e-01] ... [ 5.32604516e-01 8.71386051e-01 2.44752571e-01 ... 1.83404654e-01 6.51013374e-01 1.55927867e-01] [-1.56347945e-01 5.18863738e-01 2.98396319e-01 ... -5.13722794e-03 5.22583663e-01 6.15270734e-01] [ 3.28620374e-01 7.55786076e-02 4.83730912e-01 ... 3.91070276e-01 4.74882089e-02 -2.74158530e-02]] [[ 6.41178548e-01 2.97180235e-01 5.51992357e-01 ... 5.10239720e-01 5.65384388e-01 2.20082074e-01] [ 2.08515182e-01 6.42303407e-01 2.56228685e-01 ... 9.74861681e-01 5.62882006e-01 3.80812675e-01] [ 1.27718002e-01 2.37786286e-02 4.81942892e-01 ... 7.68349588e-01 5.16266227e-01 2.53227413e-01] ... [ 4.66038287e-01 3.68875384e-01 6.67391539e-01 ... -9.20896381e-02 4.73322451e-01 2.35227346e-01] [ 4.89365637e-01 8.56910229e-01 6.76021874e-01 ... 6.36160195e-01 2.59330720e-01 3.51128370e-01] [ 3.37434173e-01 3.93729389e-01 3.28198701e-01 ... 1.96323305e-01 7.47117460e-01 4.69438136e-01]]] [[[ 1.96495283e+00 8.42151046e-01 -3.51487708e+00 ... 2.12238073e+00 2.27787042e+00 2.43810162e-01] [-1.57175720e+00 1.54555213e+00 1.16333127e+00 ... 1.11437571e+00 1.13939381e+00 2.45748949e+00] [ 1.71226394e+00 2.98069298e-01 1.32975721e+00 ... 8.05489421e-02 2.24796012e-01 -1.03860974e+00] ... [ 1.17939746e+00 3.42823291e+00 1.89734507e+00 ... 8.65805089e-01 1.66223571e-01 -1.71266162e+00] [-1.97434425e-01 -2.23886386e-01 1.57104146e+00 ... -1.37241638e+00 1.51467836e+00 1.94487321e+00] [ 2.01960111e+00 1.23603046e+00 4.22549218e-01 ... 4.40899462e-01 2.85931611e+00 -4.69859511e-01]] [[ 7.28227735e-01 1.38817978e+00 -1.28749275e+00 ... -7.42247477e-02 3.49476039e-01 -3.77551802e-02] [ 9.22607005e-01 2.10507464e+00 1.55349314e+00 ... 2.27188870e-01 3.65697801e-01 1.22544003e+00] [ 3.62353981e-01 9.90112841e-01 5.13788164e-01 ... 8.91794562e-01 -1.12596679e+00 3.28030705e+00] ... [ 1.71259511e+00 2.06531167e+00 -1.82328984e-01 ... 3.23864371e-01 9.79995966e-01 9.31200743e-01] [ 5.69049492e-02 -2.68814659e+00 3.46847624e-02 ... 1.78094469e-02 1.22049880e+00 2.73646355e+00] [ 5.21622598e-01 2.30720949e+00 -1.59878448e-01 ... 1.70962071e+00 5.02054334e-01 4.97840703e-01]] [[ 1.12657464e+00 1.33366191e+00 1.51128531e+00 ... 8.85537148e-01 1.48672187e+00 9.89344060e-01] [-1.10961604e+00 2.23173046e+00 1.67261636e+00 ... 1.58517897e+00 -5.04301190e-01 -1.36668956e+00] [-1.49419093e+00 2.29206514e+00 2.42535070e-01 ... 5.64875901e-01 -2.67731100e-01 3.38929009e+00] ... [-7.87754655e-01 1.99720871e+00 -5.54276742e-02 ... -5.45870662e-01 6.34939969e-01 1.28706467e+00] [ 1.03459358e+00 -2.27887726e+00 -5.34965038e-01 ... -1.39705136e-01 1.84749436e+00 -1.68614969e-01] [ 2.00643349e+00 2.11328721e+00 1.57835886e-01 ... 1.51062727e+00 6.57290339e-01 2.50080153e-02]] ... [[ 2.09978056e+00 6.46381378e-02 -1.45692074e+00 ... -1.94079566e+00 3.11817765e-01 7.08470523e-01] [ 2.33543706e+00 2.43429661e+00 6.19279444e-01 ... 3.90623987e-01 2.56820679e-01 -2.00101209e+00] [ 3.96128833e-01 8.71143565e-02 4.40891600e+00 ... 2.93921399e+00 -1.76597703e+00 -5.06541908e-01] ... [ 7.26022065e-01 1.23971903e+00 -9.00857210e-01 ... 2.34126496e+00 9.22815561e-01 3.52283299e-01] [ 3.08140922e+00 -2.97150612e-01 2.06878686e+00 ... -2.66022861e-01 1.96740270e+00 1.14525542e-01] [-1.74872530e+00 2.18999696e+00 2.18191671e+00 ... 1.49465191e+00 3.98708153e+00 9.85963762e-01]] [[ 1.31168246e+00 -1.40999305e+00 -1.47878289e+00 ... 2.23593450e+00 3.58477056e-01 1.48206198e+00] [ 1.55842566e+00 -8.62100780e-01 3.72859202e-02 ... 7.87115917e-02 -2.75517423e-02 2.53529876e-01] [ 1.43963838e+00 -6.53479278e-01 3.25672245e+00 ... -9.34469178e-02 4.79668111e-01 -5.36914110e-01] ... [ 2.99059302e-01 1.19698954e+00 3.12258810e-01 ... 5.85404396e-01 8.46052051e-01 1.76254559e+00] [-9.67102528e-01 -1.97605395e+00 1.46719348e+00 ... -1.80727041e+00 -4.77140516e-01 -4.68903989e-01] [ 4.68203783e+00 -8.42327237e-01 1.56281722e+00 ... 3.41697454e+00 3.53868812e-01 2.26467550e-01]] [[ 1.07676482e+00 6.64749205e-01 -5.22283018e-02 ... -6.19347692e-01 9.29606438e-01 1.81786311e+00] [ 1.82357013e+00 -8.78909469e-01 2.20377398e+00 ... -9.18340147e-01 6.16772413e-01 1.23884523e+00] [ 7.43739009e-01 5.13375401e-01 1.73014462e+00 ... 8.96976352e-01 1.77534747e+00 1.77368605e+00] ... [ 2.11690331e+00 -1.90823948e+00 2.40953660e+00 ... 1.76104045e+00 1.07354152e+00 1.16846055e-01] [-8.11070204e-03 -1.48392189e+00 2.50069165e+00 ... -9.05273080e-01 2.87829250e-01 1.61099017e-01] [ 1.57658970e+00 1.14515090e+00 -1.46519530e+00 ... 1.40550166e-01 1.95852840e+00 1.01751065e+00]]] [[[ 2.82096553e+00 -2.11844407e-02 9.67415392e-01 ... 1.89853418e+00 4.89175200e-01 2.08968472e+00] [ 2.21113968e+00 1.45616412e+00 -1.05906808e+00 ... 1.34007490e+00 2.66006279e+00 1.02888060e+00] [ 2.09664106e+00 2.05496430e+00 2.15619493e+00 ... 1.43944943e+00 3.10311055e+00 9.68659043e-01] ... [ 1.08962524e+00 1.83414245e+00 9.83401358e-01 ... 1.78594172e+00 2.06184530e+00 1.86322212e+00] [ 2.96462560e+00 9.89588678e-01 1.64420247e+00 ... 1.45425880e+00 1.02173138e+00 1.64922059e+00] [ 8.69074821e-01 2.79349327e-01 2.12694812e+00 ... 7.92037249e-01 1.30813599e+00 4.11548704e-01]] [[ 6.88168466e-01 1.41363168e+00 1.49921107e+00 ... 1.12412727e+00 1.20073986e+00 1.70941174e+00] [ 2.14870238e+00 2.14329100e+00 7.91535735e-01 ... 1.50356936e+00 3.06739450e-01 1.52907944e+00] [ 5.26233613e-01 -2.57565737e-01 1.87983632e+00 ... 2.20462608e+00 2.25992846e+00 1.38314176e+00] ... [ 3.78536940e-01 4.34288800e-01 1.36076164e+00 ... 1.90425074e+00 1.46258271e+00 2.58497524e+00] [ 6.66818678e-01 8.68671954e-01 1.80938530e+00 ... 1.32377589e+00 1.76171708e+00 2.55087757e+00] [ 1.12676013e+00 9.90534902e-01 5.67953110e-01 ... -1.55697092e-01 1.85849297e+00 1.59067881e+00]] [[ 1.97299528e+00 1.12065172e+00 2.23903489e+00 ... 1.15929401e+00 2.25891995e+00 2.35229540e+00] [ 1.50339532e+00 1.72845864e+00 6.18976772e-01 ... 9.15403366e-01 -1.98980406e-01 1.27488017e+00] [ 1.33158958e+00 1.90410638e+00 1.47417891e+00 ... 2.28062487e+00 1.22617984e+00 2.67818809e+00] ... [ 2.28615618e+00 3.84216726e-01 9.03246164e-01 ... 7.03956366e-01 1.32615733e+00 7.45343506e-01] [ 2.03103423e+00 1.87550700e+00 6.10329688e-01 ... 2.97742343e+00 1.32454705e+00 1.70354191e-02] [-8.01221013e-01 8.59059632e-01 1.44042623e+00 ... 4.74568784e-01 1.70071089e+00 -1.79310769e-01]] ... [[ 7.14635253e-01 1.33282948e+00 1.44616246e+00 ... 1.59666681e+00 1.05954504e+00 2.72673655e+00] [ 1.09325731e+00 1.56927705e+00 -3.31626266e-01 ... 7.75048882e-02 5.34972608e-01 3.41037011e+00] [ 8.76900733e-01 2.12898111e+00 9.67936039e-01 ... 1.31391656e+00 -5.15051007e-01 2.10091591e+00] ... [ 1.05050063e+00 1.17492354e+00 2.15758657e+00 ... 7.42220163e-01 1.04757071e+00 1.60355642e-01] [ 1.15701890e+00 7.48298228e-01 1.44142151e+00 ... -4.34653983e-02 8.22508395e-01 3.26847386e+00] [ 1.05245435e+00 7.70644307e-01 1.50027418e+00 ... 2.51219106e+00 6.31238341e-01 2.42146826e+00]] [[ 6.70318604e-01 2.28315163e+00 6.64080858e-01 ... 1.87459564e+00 1.74261475e+00 1.11525965e+00] [ 6.88493490e-01 1.49189603e+00 1.01112461e+00 ... 1.62014246e+00 1.15559682e-01 2.26688290e+00] [ 2.88295865e+00 1.29251945e+00 2.87259698e+00 ... 2.08430481e+00 1.05378079e+00 9.15273368e-01] ... [ 1.54428971e+00 1.38850462e+00 2.71625113e+00 ... 1.36662138e+00 2.34888530e+00 1.38883150e+00] [-1.66160703e-01 2.99940795e-01 9.12507713e-01 ... 2.00710130e+00 2.39580941e+00 2.41816092e+00] [ 1.30092531e-01 2.47656727e+00 2.37972450e+00 ... 1.06524682e+00 1.26872158e+00 4.88735288e-01]] [[-1.12059275e-02 4.61535960e-01 1.24152708e+00 ... 3.61321867e-01 1.67462423e-01 2.07076740e+00] [ 3.51578736e+00 1.07348911e-01 9.99011874e-01 ... 5.88127196e-01 1.23071349e+00 2.31850076e+00] [ 7.83256531e-01 6.35099486e-02 9.65579867e-01 ... 9.49374676e-01 1.66333035e-01 1.13289356e+00] ... [ 1.82185733e+00 1.53206420e+00 6.85960114e-01 ... 9.10106778e-01 1.55093205e+00 2.48441839e+00] [ 2.72257614e+00 4.65556949e-01 -4.98989709e-02 ... 4.28084075e-01 1.83316541e+00 4.92774457e-01] [ 2.79504728e+00 5.84288180e-01 2.08395934e+00 ... 6.57169819e-01 7.86584795e-01 2.69994199e-01]]] [[[ 7.69390345e-01 6.87776208e-01 7.28457987e-01 ... 6.72646880e-01 6.92748904e-01 7.15658963e-01] [ 6.12738669e-01 7.16920197e-01 4.65845287e-01 ... 6.72574103e-01 7.64309645e-01 5.89865148e-01] [ 7.39633083e-01 7.47031629e-01 7.08434403e-01 ... 5.80620527e-01 6.68800533e-01 8.35955381e-01] ... [ 6.66900456e-01 7.44995117e-01 6.95569754e-01 ... 8.24285686e-01 5.66076756e-01 6.66623533e-01] [ 6.70428336e-01 6.09513819e-01 7.92217851e-01 ... 7.20686853e-01 8.44556749e-01 6.74438834e-01] [ 7.20782876e-01 7.50852287e-01 8.60818267e-01 ... 7.75634468e-01 8.74460638e-01 8.10769200e-01]] [[ 6.77872777e-01 8.80449057e-01 7.22922266e-01 ... 7.08796024e-01 7.25589156e-01 8.46012235e-01] [ 7.67157793e-01 8.00769687e-01 8.19475770e-01 ... 5.81048787e-01 7.22136676e-01 7.82380342e-01] [ 7.07976162e-01 6.54026031e-01 6.81418180e-01 ... 7.93822885e-01 7.08077312e-01 6.69821441e-01] ... [ 6.62998796e-01 6.77835703e-01 8.20745707e-01 ... 8.12962294e-01 6.67296112e-01 5.75886250e-01] [ 7.57927716e-01 6.75087154e-01 7.45325506e-01 ... 8.25200737e-01 7.03065753e-01 7.14313865e-01] [ 6.69186234e-01 6.97727680e-01 6.48369849e-01 ... 7.87624419e-01 6.79583371e-01 7.25423455e-01]] [[ 7.61457026e-01 7.08926082e-01 8.28614771e-01 ... 7.33281553e-01 7.02120185e-01 6.44831955e-01] [ 6.47983909e-01 6.37710392e-01 6.87553227e-01 ... 7.45554566e-01 7.41887927e-01 7.80683339e-01] [ 8.07936966e-01 6.47504508e-01 7.29414344e-01 ... 8.24597657e-01 7.35212922e-01 7.19510913e-01] ... [ 6.67085826e-01 6.93672359e-01 7.67779946e-01 ... 7.34355390e-01 7.05326557e-01 6.90852642e-01] [ 7.20899701e-01 7.86560297e-01 7.30819881e-01 ... 8.21866333e-01 7.22333074e-01 7.57447481e-01] [ 7.98670471e-01 6.32749736e-01 8.16814840e-01 ... 7.81943560e-01 5.48756540e-01 5.60540080e-01]] ... [[ 6.50687873e-01 7.66392887e-01 7.45773017e-01 ... 7.25708127e-01 7.49357760e-01 6.01544380e-01] [ 6.85931265e-01 7.43562460e-01 7.47834980e-01 ... 6.76426828e-01 7.45109618e-01 6.24831498e-01] [ 6.66852891e-01 7.91554868e-01 4.76581663e-01 ... 6.76238596e-01 6.79471314e-01 5.12616456e-01] ... [ 5.82279980e-01 6.14188671e-01 6.60124779e-01 ... 7.35055447e-01 7.31370449e-01 8.12208593e-01] [ 7.22714961e-01 7.06146598e-01 6.27084434e-01 ... 7.41949022e-01 6.28637969e-01 7.65488386e-01] [ 7.30167031e-01 7.71082699e-01 8.21108699e-01 ... 6.64757311e-01 8.70429039e-01 8.62174392e-01]] [[ 6.67497218e-01 8.42377067e-01 7.00940609e-01 ... 7.69288599e-01 5.14305472e-01 7.60363936e-01] [ 6.76943123e-01 8.12845945e-01 5.67835152e-01 ... 7.02948451e-01 7.31661081e-01 7.40658462e-01] [ 6.90739810e-01 7.78625011e-01 8.16781700e-01 ... 6.75275803e-01 7.50816762e-01 7.42737055e-01] ... [ 7.19590485e-01 7.21861005e-01 7.45630205e-01 ... 6.67827070e-01 8.10644329e-01 6.62516475e-01] [ 7.88373590e-01 5.96885681e-01 5.96508086e-01 ... 5.15521765e-01 7.27222800e-01 5.83313823e-01] [ 6.16266966e-01 8.20057511e-01 7.03247547e-01 ... 6.09159708e-01 6.18235588e-01 6.18904293e-01]] [[ 6.47652566e-01 7.45201588e-01 7.69569933e-01 ... 5.94072461e-01 4.58921731e-01 7.21006632e-01] [ 7.70813048e-01 7.80461431e-01 6.77341461e-01 ... 7.12740242e-01 6.83862448e-01 6.64204240e-01] [ 7.30493367e-01 7.19445586e-01 7.40395010e-01 ... 7.78084159e-01 4.85120058e-01 5.73613882e-01] ... [ 6.22104704e-01 5.91074347e-01 6.27439439e-01 ... 6.72384024e-01 6.23225152e-01 7.10419893e-01] [ 6.16640151e-01 7.66896427e-01 7.93001831e-01 ... 7.76709974e-01 7.40046740e-01 8.75234425e-01] [ 8.46760452e-01 8.91023099e-01 5.57947218e-01 ... 7.86755741e-01 9.11426425e-01 6.03637755e-01]]] [[[ 3.36601347e-01 2.62656778e-01 2.91082054e-01 ... 5.17431438e-01 9.17854548e-01 6.85720980e-01] [ 5.89595854e-01 2.53638983e-01 5.29352367e-01 ... 3.71953279e-01 1.98475808e-01 6.37164056e-01] [ 3.97061646e-01 2.03993231e-01 1.19010381e-01 ... 5.67778349e-01 2.03239471e-01 2.63459176e-01] ... [ 8.62319693e-02 6.03790283e-01 5.10532200e-01 ... -4.44476217e-01 4.10578698e-01 4.96400863e-01] [ 2.30891287e-01 1.45028830e-01 2.03728080e-02 ... 2.55515546e-01 4.66031879e-01 1.11838184e-01] [ 7.37962365e-01 2.03052759e-01 3.74729574e-01 ... 3.44600946e-01 6.11953020e-01 2.99477637e-01]] [[ 2.29612768e-01 7.44249582e-01 5.07734954e-01 ... 4.47196811e-02 6.16764843e-01 5.11256993e-01] [ 6.29466116e-01 9.76011217e-01 7.27680385e-01 ... 3.84986997e-01 3.64945412e-01 6.62188709e-01] [-2.24547490e-01 1.71973586e-01 4.40333337e-01 ... 5.74015453e-02 5.06899655e-01 -1.80215180e-01] ... [ 4.65181977e-01 1.88259825e-01 3.34239841e-01 ... 1.49500459e-01 4.85015959e-01 3.61421436e-01] [ 9.89901543e-01 5.74576795e-01 5.32936037e-01 ... 5.51764071e-01 3.48854139e-02 4.80908602e-01] [ 4.35224533e-01 -8.45336691e-02 1.32673904e-01 ... 5.66402495e-01 5.72939038e-01 4.08856645e-02]] [[ 5.53155303e-01 9.19651330e-01 1.71223015e-01 ... 3.73455197e-01 -2.18756255e-02 6.14748240e-01] [ 6.95038617e-01 9.85118032e-01 4.74395037e-01 ... 3.16917837e-01 4.22198147e-01 -1.88359112e-01] [ 6.20541573e-01 6.14183009e-01 -2.65771933e-02 ... 4.52715516e-01 2.83098042e-01 6.33224174e-02] ... [ 5.10988355e-01 1.91049308e-01 1.77688107e-01 ... 6.13216221e-01 8.43562409e-02 9.05112252e-02] [ 1.02065966e-01 4.70485628e-01 4.02718842e-01 ... 4.39760119e-01 1.08840156e+00 9.70053673e-02] [ 3.63051623e-01 3.63162905e-01 5.97115040e-01 ... 1.13981895e-01 6.06943667e-01 4.89250422e-01]] ... [[ 4.53015089e-01 6.19858265e-01 2.05807105e-01 ... 1.91510066e-01 6.83907807e-01 2.95679599e-01] [ 3.71869743e-01 2.66362756e-01 1.97539151e-01 ... 4.73818816e-02 2.24241719e-01 3.65217596e-01] [ 2.64364123e-01 -3.43307434e-03 4.39331234e-01 ... 1.25865057e-01 5.25337636e-01 3.46924394e-01] ... [ 8.01397085e-01 3.77367347e-01 4.35984790e-01 ... 7.79789329e-01 4.41262096e-01 1.44275323e-01] [ 2.64513999e-01 5.26799083e-01 2.83030361e-01 ... -6.47715256e-02 6.57259643e-01 4.42840338e-01] [ 6.40667379e-01 3.42186064e-01 7.97962368e-01 ... 4.53169137e-01 3.40444922e-01 1.89294145e-01]] [[ 5.92275739e-01 1.80680752e-01 2.29885243e-02 ... 1.56247139e-01 3.94506007e-01 5.55992246e-01] [ 8.35449219e-01 -1.60167620e-01 1.17296137e-01 ... 5.23955166e-01 3.27466369e-01 2.63649970e-01] [ 4.24823582e-01 1.28632307e-01 5.82170248e-01 ... 4.84471858e-01 2.75579959e-01 4.70015764e-01] ... [ 9.18256819e-01 1.77980512e-01 2.87800580e-01 ... 3.18153143e-01 3.17807108e-01 5.13794780e-01] [ 7.88635015e-02 7.35521793e-01 5.84980607e-01 ... 4.19417024e-02 -4.22157943e-02 6.30703866e-01] [ 7.82358885e-01 2.87273258e-01 1.44508615e-01 ... 2.45101005e-01 4.51087594e-01 5.27234316e-01]] [[ 6.65668547e-02 4.74030226e-01 2.49032885e-01 ... -1.40610963e-01 2.86453605e-01 4.09639418e-01] [-1.27735674e-01 4.23547655e-01 6.98941648e-01 ... 3.84521991e-01 8.17207396e-02 3.65057468e-01] [ 5.54781139e-01 5.12517869e-01 3.10826063e-01 ... 6.60297930e-01 1.05920896e-01 3.62429321e-01] ... [ 8.54796588e-01 3.49737912e-01 7.57960200e-01 ... 1.34017840e-01 5.06133139e-01 2.79282302e-01] [-2.20614225e-01 4.89080578e-01 4.84699905e-01 ... 2.37953216e-01 9.46362495e-01 4.13388282e-01] [ 4.08704281e-01 1.46372885e-01 7.55188167e-01 ... 1.44966975e-01 1.15662642e-01 6.94259778e-02]]] [[[-8.04581285e-01 1.03204213e-01 9.53078046e-02 ... 1.51260424e+00 9.81069505e-01 4.79528606e-01] [-7.83631206e-01 6.39998496e-01 9.18113172e-01 ... 3.74026895e-01 -3.74524117e-01 3.96838218e-01] [ 1.46934772e+00 6.83529198e-01 9.83323276e-01 ... -2.30765387e-01 1.27210808e+00 -1.50779331e+00] ... [ 1.42374051e+00 -4.27923687e-02 -2.13524386e-01 ... 1.42550349e+00 -9.08791482e-01 2.00559065e-01] [ 2.59181615e-02 1.65745246e+00 1.98407841e+00 ... -1.42646283e-02 1.01091242e+00 1.82105982e+00] [ 2.82754391e-01 1.18000102e+00 -1.20388716e-01 ... -2.16390342e-01 1.33088911e+00 8.79484177e-01]] [[ 4.57046449e-01 4.34651941e-01 6.73424244e-01 ... 1.42291224e+00 7.85108447e-01 9.08646956e-02] [ 9.84637201e-01 5.90869367e-01 8.85836065e-01 ... 1.27341545e+00 -7.22867727e-01 3.28488141e-01] [ 1.08456516e+00 -3.19560379e-01 1.75719589e-01 ... 9.00104880e-01 1.32506788e+00 3.35369140e-01] ... [ 1.78367448e+00 -7.35489428e-02 4.24363501e-02 ... 1.34495556e+00 1.23004019e-01 2.89364576e-01] [ 7.10583448e-01 1.62062243e-01 -5.32824099e-01 ... 1.10228062e+00 5.91347933e-01 1.12296224e+00] [ 2.15460360e-01 1.75032750e-01 2.21402144e+00 ... -1.07566953e+00 -3.22672963e-01 1.00291955e+00]] [[ 1.38567829e+00 6.05125189e-01 1.65183353e+00 ... 1.04743505e+00 1.44791245e+00 1.16522753e+00] [ 4.73066449e-01 -7.40545928e-01 6.94107950e-01 ... 1.06559074e+00 1.99952677e-01 2.45230365e+00] [ 9.07429814e-01 1.05948234e+00 8.04542005e-01 ... -1.10154971e-01 3.23339254e-01 -9.33828115e-01] ... [ 5.93928576e-01 4.78471875e-01 -1.19438767e+00 ... 1.70435035e+00 -4.42720987e-02 8.75928521e-01] [-2.50591785e-01 8.02634954e-01 6.58658624e-01 ... 6.39276743e-01 -1.44229636e-01 2.08260751e+00] [ 8.73741388e-01 4.98310626e-01 1.31468666e+00 ... 3.21463943e-01 6.56686664e-01 1.49976790e+00]] ... [[-1.30564585e-01 7.18357325e-01 3.05263489e-01 ... 1.55920839e+00 2.83315301e-01 1.38169599e+00] [ 7.49560177e-01 4.15717930e-01 8.94203782e-01 ... 1.05323210e-01 5.34767628e-01 2.22069010e-01] [-5.45603216e-01 1.23304999e+00 8.58311236e-01 ... 1.00882518e+00 6.21343136e-01 1.86616108e-01] ... [ 1.16987979e+00 7.72696674e-01 6.47752821e-01 ... -5.72028577e-01 1.49030662e+00 -5.27666152e-01] [ 8.14019203e-01 6.22967422e-01 1.04469740e+00 ... 2.46076718e-01 7.65527844e-01 1.26953375e+00] [ 4.48416501e-01 5.96472859e-01 3.88585925e-01 ... 3.62488627e-01 9.57479298e-01 1.05136549e+00]] [[ 1.41848302e+00 2.12241459e+00 1.08222854e+00 ... -7.30603993e-01 -6.44131601e-01 1.13750231e+00] [ 7.83275723e-01 -1.89233884e-01 2.39493585e+00 ... 1.14160204e+00 8.85605812e-01 1.17725885e+00] [-4.03420955e-01 -7.54913211e-01 1.04861331e+00 ... 4.02031928e-01 1.11935067e+00 7.08856344e-01] ... [-5.57755411e-01 8.34794164e-01 -3.92697126e-01 ... 1.53623652e+00 1.36603570e+00 -6.97666287e-01] [-5.16905367e-01 -7.16893375e-01 6.52524590e-01 ... -5.76085672e-02 -5.96546769e-01 -3.18491220e-01] [ 9.71513510e-01 -5.22411913e-02 5.05552351e-01 ... 7.04111040e-01 8.53547573e-01 8.51814926e-01]] [[ 3.45906943e-01 -8.78587186e-01 1.79392368e-01 ... 1.03738987e+00 1.02964258e+00 7.96619207e-02] [ 5.24361432e-01 1.34690630e+00 1.76577103e+00 ... -8.28072950e-02 3.90398234e-01 9.15463686e-01] [ 1.29764872e-02 7.87597835e-01 -3.09714288e-01 ... -1.39352784e-01 1.54038131e+00 5.77278674e-01] ... [ 5.59113204e-01 6.65323734e-01 1.71786642e+00 ... 6.27018154e-01 1.33684897e+00 2.19479978e-01] [ 1.59380913e+00 5.03318429e-01 -4.03050065e-01 ... 1.12297043e-01 -1.47122368e-01 1.98569405e+00] [-9.93481517e-01 1.50777638e+00 1.00321271e-01 ... -1.13209657e-01 5.86025640e-02 1.15320909e+00]]]] [[[[ 1.22089043e-01 7.11325884e-01 3.27458858e-01 ... 5.86285628e-02 1.65362850e-01 5.14697194e-01] [ 2.53899932e-01 6.91103458e-01 4.54595178e-01 ... 7.35963523e-01 3.21800888e-01 3.25097591e-01] [ 1.13631047e-01 2.63179421e-01 3.41209441e-01 ... 1.66862771e-01 1.89032957e-01 3.14727098e-01] ... [ 4.31738675e-01 9.40046012e-01 5.56351602e-01 ... 4.50219631e-01 5.80477715e-01 8.48273933e-01] [ 8.77933264e-01 5.15423238e-01 2.96532929e-01 ... 4.62923080e-01 6.79653168e-01 3.99606347e-01] [ 3.01515400e-01 1.46270707e-01 5.45449972e-01 ... 4.18758750e-01 8.93786371e-01 7.68830597e-01]] [[-5.63552640e-02 6.67604029e-01 1.53751448e-01 ... 2.35993527e-02 3.52262676e-01 4.29707110e-01] [ 3.36698145e-01 4.32332963e-01 1.52839556e-01 ... 7.03259706e-01 4.38934475e-01 9.91733015e-01] [ 1.92412719e-01 2.30242252e-01 7.26281226e-01 ... 3.45293015e-01 3.48970115e-01 6.75378919e-01] ... [ 4.06233996e-01 4.20098424e-01 6.10527873e-01 ... 4.00153190e-01 1.82892770e-01 6.69414282e-01] [ 2.96743810e-01 5.05784452e-01 2.81844437e-01 ... 4.71419603e-01 3.84614557e-01 2.50718325e-01] [ 3.71192396e-02 5.70617139e-01 1.10319100e-01 ... 1.16917804e-01 6.46240354e-01 3.31403702e-01]] [[ 6.64017051e-02 3.92022848e-01 3.74597609e-01 ... 1.70882925e-01 2.77917325e-01 3.90654415e-01] [ 6.07911229e-01 3.87858957e-01 2.79388338e-01 ... 5.45826137e-01 4.18440908e-01 3.37852478e-01] [ 2.89542079e-01 3.28259975e-01 3.30906570e-01 ... -2.39360649e-02 4.31859940e-01 2.43217051e-01] ... [ 1.25678098e-02 3.34726810e-01 5.18021956e-02 ... 3.93927872e-01 6.19393528e-01 -1.73172820e-02] [ 3.21741998e-01 2.48505980e-01 7.87935793e-01 ... 2.51238883e-01 4.21856344e-01 -1.19290724e-01] [ 2.71691412e-01 2.70352572e-01 5.29344857e-01 ... 3.80094886e-01 4.77613598e-01 2.48193309e-01]] ... [[ 3.00046921e-01 5.80083907e-01 3.13351184e-01 ... 2.58604646e-01 5.24921238e-01 3.97338986e-01] [ 1.61807209e-01 6.71190381e-01 3.23081076e-01 ... 9.40502807e-02 3.06285471e-01 3.94106686e-01] [ 2.20156908e-01 3.96486640e-01 7.12521493e-01 ... 5.76759219e-01 4.61219311e-01 3.74534428e-01] ... [ 5.70506275e-01 5.44999301e-01 4.46356498e-02 ... 6.00448310e-01 3.76856536e-01 3.81815046e-01] [ 2.90664166e-01 4.80669558e-01 5.43585002e-01 ... 1.16522662e-01 1.39599502e-01 1.71031103e-01] [ 6.75912559e-01 3.02820027e-01 7.32387125e-01 ... 2.94268638e-01 4.43456650e-01 2.63089627e-01]] [[ 5.09653091e-01 7.68324733e-01 6.12742305e-01 ... 3.43533397e-01 4.45212156e-01 2.39311337e-01] [ 4.85358566e-01 3.33834738e-01 4.48037744e-01 ... 3.73475879e-01 5.86371541e-01 3.23483348e-01] [ 1.87932983e-01 6.43171549e-01 2.87441313e-01 ... 5.72231829e-01 3.86721939e-01 3.50588411e-01] ... [ 2.62811273e-01 1.71102837e-01 6.00238025e-01 ... 3.79766017e-01 5.14467061e-01 4.54903096e-01] [ 4.82886523e-01 4.84462082e-01 1.99343964e-01 ... 3.40365082e-01 1.03269316e-01 2.63315171e-01] [ 1.86028644e-01 1.70448095e-01 5.70040524e-01 ... 1.67860724e-02 4.22623992e-01 4.58037436e-01]] [[ 5.01315176e-01 5.29986799e-01 6.00398064e-01 ... 3.93335432e-01 5.51171958e-01 4.88582551e-02] [ 6.87322199e-01 4.68281209e-01 3.03829134e-01 ... 3.99416447e-01 5.90784848e-01 6.84086800e-01] [ 1.97848275e-01 4.17920798e-01 -2.51033548e-02 ... 2.04727650e-01 1.40343830e-01 3.88485759e-01] ... [ 5.30527115e-01 5.25262713e-01 2.70461049e-02 ... 1.16545951e+00 -7.55297095e-02 3.90846997e-01] [ 4.07817453e-01 6.12700224e-01 6.25080466e-01 ... 2.91925102e-01 1.54881299e-01 2.08959684e-01] [ 2.20468819e-01 4.43086684e-01 3.97701710e-01 ... 9.44936752e-01 5.86674929e-01 6.38351858e-01]]] [[[ 7.78707266e-02 -9.53910291e-01 3.16223431e+00 ... 1.04912865e+00 4.21841562e-01 -8.89668941e-01] [ 7.58552784e-03 8.35507452e-01 3.00147605e+00 ... 1.39931488e+00 1.65135598e+00 2.63110280e+00] [-9.70546126e-01 1.15753151e-01 1.69338679e+00 ... -3.64417225e-01 2.69763947e+00 2.53113937e+00] ... [ 1.52325487e+00 1.47592813e-01 2.99540830e+00 ... -4.32960302e-01 -3.82165134e-01 -1.95479298e+00] [ 1.34940898e+00 1.79437411e+00 1.77759254e+00 ... -7.67319679e-01 1.67836225e+00 1.20959222e+00] [ 2.93431354e+00 2.04190865e-01 3.20251083e+00 ... -1.19748318e+00 1.17347455e+00 5.68235993e-01]] [[ 4.23807055e-01 8.35787177e-01 2.33267879e+00 ... -1.41280830e-01 -1.33965194e+00 1.43380156e-02] [ 6.66418314e-01 3.88999081e+00 2.71984053e+00 ... 9.69776690e-01 8.42559695e-01 2.50125647e+00] [ 4.40116912e-01 1.08135939e+00 5.66733003e-01 ... 1.45642853e+00 2.81364083e+00 1.06036246e+00] ... [ 1.53608036e+00 1.21806836e+00 1.12154377e+00 ... 8.54033753e-02 -1.50456190e+00 2.20863032e+00] [ 8.66056740e-01 6.93579614e-01 1.10405898e+00 ... -4.29545283e-01 6.34417906e-02 8.49901676e-01] [ 1.86372268e+00 3.54052949e+00 1.81739345e-01 ... 2.07250643e+00 -6.90707490e-02 -4.48787864e-03]] [[-4.77345437e-02 -1.25920504e-01 7.03637123e-01 ... 1.47398245e+00 -3.69941652e-01 3.19022745e-01] [-1.30109778e-02 6.45375550e-01 9.73877072e-01 ... 1.55851901e+00 1.76485431e+00 1.06049788e+00] [-1.20644975e+00 8.41787755e-01 -6.90544307e-01 ... 1.95281708e+00 1.51787579e+00 2.18823290e+00] ... [ 2.60957503e+00 9.62191582e-01 1.52219319e+00 ... 1.38775304e-01 5.47970474e-01 -1.30178189e+00] [ 4.83590424e-01 2.61597228e+00 1.87385154e+00 ... -1.24176085e+00 5.05162179e-01 2.06984973e+00] [-1.82836843e+00 1.07991263e-01 3.73950064e-01 ... 3.60016733e-01 2.38409233e+00 5.18063545e-01]] ... [[ 4.24928695e-01 1.49064481e+00 -5.20821512e-01 ... 8.73193920e-01 2.34319735e+00 -1.84658575e+00] [ 1.50413287e+00 2.53841329e+00 9.07163680e-01 ... 1.17223907e+00 1.97772348e+00 3.42800903e+00] [ 7.98370719e-01 -1.75734365e+00 1.69320750e+00 ... 2.91678667e+00 -5.58581412e-01 -1.51609707e+00] ... [ 1.63255405e+00 3.89637709e-01 2.25476170e+00 ... -6.67838573e-01 1.32573083e-01 6.15276515e-01] [ 6.70698643e-01 2.25979835e-01 -5.00183165e-01 ... 2.10834908e+00 2.47703099e+00 -2.93747544e-01] [ 2.57069659e+00 -2.85790420e+00 7.91170001e-01 ... 2.39389896e+00 1.82387161e+00 4.06092167e+00]] [[ 6.58996522e-01 -5.40556610e-01 3.05097431e-01 ... -2.78985643e+00 2.94582033e+00 3.65883052e-01] [ 1.24869108e+00 -7.02047348e-02 1.81412697e+00 ... 1.97298586e-01 2.10154772e-01 3.09724283e+00] [ 2.89779395e-01 1.68037117e+00 -3.07574391e-01 ... -6.28221571e-01 5.99233150e-01 1.32297939e-02] ... [ 1.56300199e+00 1.41086257e+00 -8.93537343e-01 ... 1.06851184e+00 3.40394783e+00 2.21766114e-01] [-1.03625548e+00 3.55579227e-01 9.70679462e-01 ... 1.40915647e-01 3.76559734e-01 2.16684437e+00] [-3.28599304e-01 4.40421724e+00 2.33787131e+00 ... -2.91301668e-01 -2.79278660e+00 2.85886335e+00]] [[ 2.73746848e+00 1.42438161e+00 -4.58156288e-01 ... -2.53674895e-01 1.59360528e+00 2.18327832e+00] [ 4.48563218e-01 4.02461815e+00 9.76883054e-01 ... 1.68445206e+00 5.19523442e-01 9.56609964e-01] [-2.06978130e+00 1.75447389e-02 3.17887521e+00 ... 2.64510870e-01 -4.27472182e-02 3.05893213e-01] ... [ 2.25354218e+00 2.61832929e+00 2.39146471e+00 ... 5.62492497e-02 1.82129586e+00 7.94141352e-01] [ 1.19366479e+00 2.62138605e+00 3.05337697e-01 ... 1.38166356e+00 1.63164866e+00 1.42694139e+00] [ 2.58345127e+00 -6.76496625e-02 1.41693878e+00 ... 8.26267079e-02 -4.58553642e-01 -1.46018887e+00]]] [[[ 9.62971926e-01 1.07485962e+00 9.62421834e-01 ... 1.76067495e+00 1.96865332e+00 2.96115708e+00] [-5.15293926e-02 1.29527271e+00 4.73325998e-01 ... 1.75622451e+00 2.52181387e+00 -3.30679148e-01] [ 7.74253547e-01 1.03154731e+00 9.67245042e-01 ... 2.77537584e+00 1.63831365e+00 1.07840872e+00] ... [ 1.94835544e+00 2.78660226e+00 1.22295725e+00 ... 1.78820026e+00 1.31943238e+00 5.56325018e-01] [-1.35377213e-01 9.63286340e-01 1.87318158e+00 ... 6.34911299e-01 1.18361735e+00 1.27827609e+00] [ 1.62462676e+00 8.63552928e-01 1.02188683e+00 ... 2.90550542e+00 -6.89885616e-01 1.35264313e+00]] [[ 1.71529865e+00 3.08444905e+00 -3.02852631e-01 ... 7.26865828e-01 2.04878473e+00 8.34853053e-01] [ 1.79767799e+00 2.55581880e+00 1.80385542e+00 ... 1.24752617e+00 2.06346846e+00 2.61258554e+00] [ 3.08998632e+00 3.49012613e-01 2.30846381e+00 ... 1.91833830e+00 1.48611569e+00 1.32244432e+00] ... [ 1.87843180e+00 2.00141621e+00 4.39853281e-01 ... 7.08061278e-01 2.00782013e+00 7.89933264e-01] [ 1.67801070e+00 3.09945136e-01 2.50772023e+00 ... 7.16278195e-01 2.81840181e+00 1.52100861e+00] [ 2.70637178e+00 2.16637754e+00 1.49208283e+00 ... 1.84586608e+00 -7.47878671e-01 1.09077144e+00]] [[ 8.97006571e-01 2.32156110e+00 2.60013652e+00 ... 6.46457016e-01 1.55676377e+00 2.12110686e+00] [ 2.38292456e+00 1.26239705e+00 1.53286135e+00 ... 4.09724832e-01 3.35022354e+00 3.45342040e-01] [ 1.04332292e+00 2.13322711e+00 2.49497843e+00 ... 8.95301580e-01 1.66327870e+00 2.52395988e+00] ... [ 1.03594184e+00 1.98939848e+00 1.75328660e+00 ... 9.72164869e-01 2.57932067e-01 2.61577535e+00] [ 1.37518954e+00 7.16521859e-01 1.38598311e+00 ... 2.78588200e+00 1.01982617e+00 1.85380769e+00] [ 2.75156140e+00 1.92031968e+00 2.07187510e+00 ... 9.02220130e-01 -1.43711701e-01 1.07307625e+00]] ... [[ 1.31322014e+00 -6.42710105e-02 1.69052884e-01 ... 2.53968954e+00 3.20949817e+00 9.97891903e-01] [ 2.94875884e+00 1.22162640e+00 2.19451666e+00 ... 2.37869763e+00 2.00386477e+00 2.44133636e-01] [ 1.93293452e+00 2.23315358e+00 1.19920969e+00 ... 2.64489508e+00 5.55187702e-01 1.18315125e+00] ... [ 7.59802163e-01 1.97673202e+00 1.91573274e+00 ... 1.13705182e+00 8.70170653e-01 1.29245031e+00] [ 1.59953010e+00 2.32246232e+00 2.12560391e+00 ... 9.76213753e-01 8.54588628e-01 1.31294453e+00] [ 1.70524967e+00 1.10109687e+00 2.21607184e+00 ... 2.45159173e+00 2.37462950e+00 1.27689719e+00]] [[ 1.63984931e+00 -4.11444068e-01 -9.71715972e-02 ... 2.16022515e+00 1.85049200e+00 1.23261082e+00] [-6.29919112e-01 1.73834753e+00 1.85865879e-01 ... 3.08957744e+00 8.67691170e-03 1.58801591e+00] [ 1.58935165e+00 3.11964959e-01 1.36959350e+00 ... -2.67861426e-01 1.63074565e+00 1.58108521e+00] ... [ 1.32428312e+00 8.39440703e-01 2.51560163e+00 ... 2.93528318e+00 2.09855390e+00 1.07507932e+00] [ 2.23154092e+00 1.22007465e+00 2.97815919e-01 ... 2.81218505e+00 1.62495124e+00 9.41222489e-01] [-7.05891609e-01 -7.63957739e-01 1.42290831e+00 ... 2.70033717e-01 3.99383873e-01 -5.31714439e-01]] [[ 1.40238011e+00 2.69613028e+00 6.12034500e-01 ... 1.97164252e-01 4.51554686e-01 2.30228615e+00] [ 1.38345718e+00 3.72784638e+00 2.89902163e+00 ... 1.46681333e+00 2.62805915e+00 8.33134353e-01] [ 1.82062352e+00 1.09390938e+00 1.61106503e+00 ... 5.17778337e-01 2.77107501e+00 1.58099091e+00] ... [ 1.13222373e+00 5.64230561e-01 2.66107845e+00 ... 1.78987885e+00 2.35216603e-01 9.02699709e-01] [ 2.84710288e-01 1.13511503e+00 8.48959267e-01 ... 2.57279563e+00 1.36147380e+00 1.72289178e-01] [ 1.25650978e+00 2.19404912e+00 1.36237752e+00 ... 1.55178159e-01 1.42440748e+00 2.32687163e+00]]] [[[ 8.69943202e-01 8.91316473e-01 7.41105378e-01 ... 7.87230492e-01 8.22066665e-01 7.26622164e-01] [ 7.86836147e-01 7.51844764e-01 7.46029615e-01 ... 6.64777637e-01 7.29287505e-01 6.13033473e-01] [ 7.40755200e-01 6.88333452e-01 6.88641191e-01 ... 7.57809222e-01 7.26272643e-01 6.39179707e-01] ... [ 7.44904757e-01 6.91192627e-01 6.45335615e-01 ... 6.80231273e-01 7.08330154e-01 8.64286363e-01] [ 8.75693977e-01 7.32376397e-01 6.69798076e-01 ... 8.26350808e-01 5.97508967e-01 7.06459701e-01] [ 6.60081923e-01 7.74468243e-01 7.07075238e-01 ... 6.77655041e-01 8.27578247e-01 7.06503272e-01]] [[ 6.87136590e-01 6.87335253e-01 6.28790081e-01 ... 6.40814543e-01 7.99534261e-01 6.00147426e-01] [ 6.68286145e-01 6.63178742e-01 7.43773580e-01 ... 6.85579121e-01 6.91717446e-01 6.28189743e-01] [ 6.56688213e-01 7.91047275e-01 5.94950795e-01 ... 7.10126042e-01 7.27371037e-01 7.78964579e-01] ... [ 7.55259812e-01 6.37107193e-01 6.61717772e-01 ... 6.76483989e-01 6.87485099e-01 7.42760301e-01] [ 6.33472502e-01 7.41335630e-01 7.64043748e-01 ... 6.98796690e-01 7.82912731e-01 7.15147793e-01] [ 7.47696757e-01 7.06609309e-01 6.63321197e-01 ... 6.87384069e-01 6.68087304e-01 7.81272352e-01]] [[ 7.55231082e-01 6.89575791e-01 7.23056376e-01 ... 7.33125746e-01 7.11198509e-01 7.82589912e-01] [ 6.48539186e-01 6.70449853e-01 6.43680692e-01 ... 8.13279212e-01 7.71869004e-01 6.35041356e-01] [ 5.60318589e-01 7.51223564e-01 7.87649095e-01 ... 6.36182189e-01 7.60521650e-01 7.63631940e-01] ... [ 7.31702626e-01 6.92305505e-01 7.30379701e-01 ... 8.19621265e-01 7.74202228e-01 7.35728741e-01] [ 6.82429492e-01 7.03475833e-01 7.11993277e-01 ... 5.99545658e-01 8.37740541e-01 7.32729614e-01] [ 6.34113014e-01 6.79099381e-01 7.95285881e-01 ... 6.60061121e-01 6.50458395e-01 5.88265717e-01]] ... [[ 6.96259022e-01 7.34152317e-01 7.11686194e-01 ... 8.27448785e-01 7.27697074e-01 7.43895650e-01] [ 6.07802212e-01 7.02340007e-01 6.96700990e-01 ... 7.82810986e-01 8.00198853e-01 7.69321859e-01] [ 7.17195451e-01 7.62660980e-01 7.89711356e-01 ... 6.13388240e-01 7.12551177e-01 8.43868375e-01] ... [ 6.86534882e-01 6.53245449e-01 6.95482790e-01 ... 7.13039100e-01 8.03106844e-01 6.34054065e-01] [ 8.03686023e-01 7.40450621e-01 6.70712829e-01 ... 7.69901872e-01 7.53064394e-01 7.87955225e-01] [ 8.06892574e-01 6.42704725e-01 7.23648727e-01 ... 7.68365741e-01 8.55495870e-01 7.86119342e-01]] [[ 5.72657347e-01 6.58827066e-01 7.49243319e-01 ... 6.59742653e-01 8.33129406e-01 6.90964460e-01] [ 7.39739478e-01 6.63172603e-01 6.51733041e-01 ... 7.23360419e-01 5.68915427e-01 7.34623492e-01] [ 6.83190286e-01 6.92384124e-01 8.57326210e-01 ... 6.77960336e-01 6.96968675e-01 7.14992762e-01] ... [ 6.18189812e-01 7.20713913e-01 5.52632153e-01 ... 6.66748106e-01 7.22767711e-01 8.33886802e-01] [ 5.80160916e-01 5.68246245e-01 6.44379973e-01 ... 6.18630588e-01 6.62446916e-01 5.44141829e-01] [ 9.00258720e-01 6.95012569e-01 7.34618306e-01 ... 7.05021560e-01 8.52923453e-01 7.36174464e-01]] [[ 7.55403757e-01 7.26078033e-01 7.41289914e-01 ... 7.76434362e-01 7.52513945e-01 7.19295979e-01] [ 7.63436973e-01 7.13734746e-01 8.17423820e-01 ... 8.03329289e-01 6.64965510e-01 5.71797371e-01] [ 7.85947263e-01 6.17402196e-01 6.70061111e-01 ... 6.62563980e-01 8.14430177e-01 7.32073188e-01] ... [ 6.31543994e-01 9.28301275e-01 6.30414248e-01 ... 6.65492594e-01 7.58445740e-01 7.05401123e-01] [ 7.75638878e-01 5.21405339e-01 7.99423397e-01 ... 7.02392340e-01 7.43392944e-01 7.88090348e-01] [ 7.26318598e-01 6.38773024e-01 6.72602117e-01 ... 8.06247532e-01 5.76109290e-01 7.64289081e-01]]] [[[ 3.52082998e-01 1.93683296e-01 9.12011087e-01 ... 4.96904612e-01 3.67953300e-01 2.82033026e-01] [ 3.84938896e-01 -2.16463730e-01 -7.41363764e-02 ... 3.23444933e-01 8.76950324e-01 2.34357804e-01] [ 4.37565953e-01 7.43830442e-01 2.02325478e-01 ... 2.09933177e-01 6.17217541e-01 1.84440136e-01] ... [ 3.22284520e-01 1.07881494e-01 3.56892705e-01 ... 7.73710683e-02 8.67163420e-01 -4.77779992e-02] [-8.15812349e-02 4.44058180e-01 1.66893318e-01 ... 1.57621473e-01 -5.89165576e-02 1.92575380e-01] [ 8.23314130e-01 6.57059968e-01 2.59545803e-01 ... 3.62691730e-01 3.94673944e-01 7.40108728e-01]] [[ 3.35359454e-01 2.52090782e-01 6.10744357e-01 ... 6.13728642e-01 1.08544397e+00 2.55991042e-01] [ 2.44725958e-01 5.40745735e-01 4.89011049e-01 ... 6.69453084e-01 3.76620680e-01 6.91961884e-01] [ 1.97121367e-01 4.19595033e-01 1.25748634e-01 ... 4.06139970e-01 1.77257672e-01 1.15437903e-01] ... [ 4.07522917e-01 5.00950336e-01 2.60346055e-01 ... 7.33722746e-01 2.16568828e-01 1.48209944e-01] [ 3.09759885e-01 4.58469391e-01 7.38830805e-01 ... 1.48935750e-01 9.93776992e-02 4.21699375e-01] [ 2.33192414e-01 5.56205869e-01 1.96590036e-01 ... 3.91294569e-01 7.08792746e-01 6.22708380e-01]] [[ 3.67212683e-01 5.63565910e-01 -1.59642950e-01 ... 1.40497893e-01 1.24028489e-01 4.38336015e-01] [ 5.98534167e-01 3.73387098e-01 6.15552783e-01 ... 3.79744828e-01 7.03159392e-01 1.69622466e-01] [ 4.20378447e-01 6.80218160e-01 4.38921779e-01 ... 2.30616897e-01 -8.69706869e-02 1.21668108e-01] ... [ 1.90637335e-01 1.99577466e-01 1.88442469e-01 ... 2.74745762e-01 3.18915278e-01 5.60857773e-01] [-5.70560060e-02 1.97417215e-01 -3.34731080e-02 ... 2.18874156e-01 6.35025382e-01 1.12516485e-01] [ 2.87676722e-01 6.70702696e-01 4.20548543e-02 ... 7.36823022e-01 5.03431737e-01 5.15972078e-01]] ... [[ 3.18777502e-01 3.47560823e-01 4.81407821e-01 ... 2.83160508e-01 5.81609309e-01 1.68571938e-02] [ 4.27112639e-01 8.73758733e-01 2.90353924e-01 ... 5.74446559e-01 -3.46025467e-01 4.92426783e-01] [ 3.14293027e-01 3.13125610e-01 2.47456789e-01 ... 7.49967173e-02 8.54023397e-01 4.86817449e-01] ... [ 8.30131114e-01 3.10850412e-01 6.66495025e-01 ... 3.81522447e-01 -9.09849554e-02 5.19034386e-01] [-1.25491679e-01 -1.43029183e-01 4.57096249e-01 ... 9.88671407e-02 3.13129753e-01 6.80788457e-01] [ 3.72042269e-01 3.83894444e-02 2.27037609e-01 ... 1.33950800e-01 5.70918739e-01 8.02421629e-01]] [[ 3.85090202e-01 7.94714615e-02 2.10161403e-01 ... 1.10379316e-01 3.55469704e-01 4.99093443e-01] [ 8.14987570e-02 6.42028689e-01 4.04502451e-01 ... 3.38851810e-01 5.67643881e-01 2.05803916e-01] [ 2.57246435e-01 4.40357119e-01 6.18846476e-01 ... 6.81762993e-01 6.84606656e-02 2.91308969e-01] ... [ 3.68308991e-01 1.74773231e-01 2.51300126e-01 ... 6.45999908e-01 -1.02878034e-01 2.53930420e-01] [ 5.31612515e-01 2.77518094e-01 3.03957015e-01 ... 4.62262243e-01 2.06805184e-01 -1.34544447e-01] [ 1.09636644e-02 2.48280525e-01 5.16659498e-01 ... 1.40184224e-01 2.73822993e-01 -3.13013703e-01]] [[ 5.73430419e-01 1.61686733e-01 5.66487610e-01 ... 8.44345629e-01 7.88827300e-01 2.43725717e-01] [ 1.78038985e-01 4.06062096e-01 4.31926280e-01 ... 1.27320871e-01 7.28007615e-01 3.67501020e-01] [ 4.19358630e-03 3.47426310e-02 5.14209270e-01 ... 5.57233989e-01 3.49997073e-01 4.97935534e-01] ... [ 2.68733919e-01 3.06440860e-01 7.31119692e-01 ... 1.86402723e-01 7.67278075e-01 6.94044121e-03] [ 2.32777502e-02 9.94311035e-01 3.08423728e-01 ... 4.17314798e-01 1.20413564e-01 -4.70922649e-01] [ 4.35217500e-01 3.44015718e-01 4.02246714e-01 ... 2.28601471e-01 6.66339219e-01 2.92759746e-01]]] [[[-2.76790679e-01 5.40830016e-01 -1.04375958e-01 ... 1.23558962e+00 3.36819649e-01 1.03562617e+00] [ 1.26533210e+00 1.90535223e+00 1.45173401e-01 ... 9.10345376e-01 1.61606586e+00 6.10685706e-01] [ 2.61460632e-01 -8.09870541e-01 7.14585423e-01 ... 8.98828208e-01 3.09569657e-01 2.23108493e-02] ... [ 1.72990292e-01 -2.93207586e-01 7.62105346e-01 ... -3.61105986e-02 -2.06385422e-02 -8.38818192e-01] [-1.81826234e+00 2.92142838e-01 9.65945244e-01 ... 1.59094191e+00 -7.12628484e-01 3.37365448e-01] [ 1.83839679e+00 2.69961059e-01 -3.90951544e-01 ... 7.91277051e-01 6.31258905e-01 4.38203722e-01]] [[ 4.95113522e-01 1.12975490e+00 1.90667808e+00 ... 1.07924020e+00 4.29853141e-01 9.94182825e-01] [ 1.27095804e-01 3.79341245e-01 1.91836667e+00 ... -2.00068191e-01 -1.18812633e+00 6.14045382e-01] [ 7.81910717e-01 7.42323399e-01 7.03188360e-01 ... 1.17359459e+00 7.29312420e-01 3.29965442e-01] ... [ 4.68361259e-01 8.55318546e-01 4.56686676e-01 ... 5.89772940e-01 -8.88543904e-01 -3.76201794e-02] [-4.46999520e-01 -4.66008067e-01 -5.56997120e-01 ... -1.19686559e-01 1.28191960e+00 6.74851000e-01] [ 1.24961102e+00 1.02701974e+00 6.45595714e-02 ... -1.32272553e+00 4.88193899e-01 1.71881366e+00]] [[-2.80868202e-01 3.99145037e-01 1.49775040e+00 ... -2.77527511e-01 1.40769041e+00 1.67939079e+00] [ 1.69046009e+00 -1.79197657e+00 -7.37858564e-02 ... 4.97326195e-01 8.74939919e-01 -4.07818317e-01] [-2.79551536e-01 2.19496942e+00 -6.35203719e-02 ... 1.00390446e+00 1.13244045e+00 9.18291569e-01] ... [ 2.15610766e+00 -8.03659409e-02 1.22426593e+00 ... 3.06872040e-01 -7.67241359e-01 1.43182957e+00] [-3.68176214e-02 3.23308319e-01 1.87146318e+00 ... -7.20344663e-01 1.19977653e-01 1.48250389e+00] [ 1.82559088e-01 6.65357709e-01 -1.53866380e-01 ... 2.54608899e-01 1.93556368e-01 2.05450749e+00]] ... [[ 1.27772270e-02 7.44573116e-01 2.00502753e+00 ... -5.08677721e-01 -6.31749555e-02 1.74494088e+00] [-9.71636057e-01 7.05855966e-01 1.50698781e+00 ... 1.81559980e+00 3.86783630e-02 1.12003481e+00] [ 9.77670431e-01 1.53577161e+00 3.80310565e-01 ... 4.21387166e-01 6.65852964e-01 -1.84736580e-01] ... [ 1.51774275e+00 2.24065855e-01 8.74522150e-01 ... 2.89514959e-01 1.64919865e+00 6.28052413e-01] [ 1.23427737e+00 2.24870563e+00 6.92831039e-01 ... 2.74431966e-02 1.38772130e+00 1.44601738e+00] [ 1.29216266e+00 -7.68413782e-01 9.10504818e-01 ... 8.63710880e-01 1.33877769e-02 2.12187335e-01]] [[ 7.32139587e-01 1.75084323e-01 -4.41362798e-01 ... -6.52891517e-01 5.98400891e-01 1.05126834e+00] [ 5.82681596e-01 1.28804553e+00 -6.47120178e-01 ... 1.47955716e+00 9.69635308e-01 -5.05836070e-01] [ 1.18075407e+00 1.07875562e+00 1.09368348e+00 ... 8.77737403e-02 -3.01655412e-01 2.80915827e-01] ... [ 1.09358919e+00 4.64870602e-01 -1.06775187e-01 ... 2.27482110e-01 1.26690137e+00 -5.26774108e-01] [ 2.02678967e+00 9.65033948e-01 -2.24661976e-01 ... 1.35453761e+00 1.39437008e+00 1.67825329e+00] [ 2.66137362e-01 2.36656711e-01 8.09099451e-02 ... 1.63820589e+00 1.29010212e+00 -4.67045605e-02]] [[ 2.56940150e+00 9.52784956e-01 -6.92585528e-01 ... 1.66742623e+00 2.13161230e-01 3.10388416e-01] [ 4.33972269e-01 -1.04369509e+00 2.64479935e-01 ... 5.03675580e-01 7.53507316e-01 6.19321227e-01] [ 3.79240096e-01 -1.32989800e+00 -1.06594920e+00 ... 9.33281779e-01 9.67801273e-01 1.13673188e-01] ... [-8.16836774e-01 1.13967037e+00 1.29664016e+00 ... 1.31617141e+00 -1.58494748e-02 1.15798473e+00] [-2.94945031e-01 -2.29129866e-01 1.39241040e+00 ... -4.04837122e-03 2.70818144e-01 1.18746758e+00] [-5.72072156e-02 9.29896653e-01 -1.13734424e+00 ... -1.85781464e-01 -5.90217113e-01 6.61251724e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_group_norm.py::TestGroupNorm::test_conv2d[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'ndim': 5} - params:{'n_groups': 2, 'weights_shape': (6,), 'bias': False, 'eps': 0.0001} ] | 0.11 | |
|
----------------------------- Captured stdout call ----------------------------- 5 graph(%self : __torch__.test_group_norm.___torch_mangle_4630.aten_group_norm, %x.1 : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:66 %3 : int = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:35 %4 : int = prim::Constant[value=1]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:51 %5 : str = prim::Constant[value="Expected more than 1 value per channel when training, got input size {}"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 %6 : str = prim::Constant[value="builtins.ValueError"]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:14 %self.eps : float = prim::Constant[value=0.0001]() %self.bias : NoneType = prim::Constant() %self.weight : Float(6, strides=[1], requires_grad=0, device=cpu) = prim::Constant[value= 0.6958 1.3602 -0.2702 1.5256 0.1067 -0.5646 [ CPUFloatType{6} ]]() %self.n_groups : int = prim::Constant[value=2]() %11 : int = aten::size(%x.1, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %12 : int = aten::size(%x.1, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:40 %13 : int = aten::mul(%11, %12) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %14 : int = aten::floordiv(%13, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:24 %15 : int[] = prim::ListConstruct(%14, %self.n_groups) %16 : int[] = aten::size(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %17 : int[] = aten::slice(%16, %self.n_groups, %self.bias, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:88 %18 : int[] = aten::list(%17) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:83 %19 : int[] = aten::add(%15, %18) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2523:23 %size_prods.1 : int = aten::__getitem__(%19, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2408:17 %21 : int = aten::len(%19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %22 : int = aten::sub(%21, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:19 %size_prods : int = prim::Loop(%22, %2, %size_prods.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2409:4 block0(%i.1 : int, %size_prods.11 : int): %26 : int = aten::add(%i.1, %self.n_groups) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:27 %27 : int = aten::__getitem__(%19, %26) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:22 %size_prods.5 : int = aten::mul(%size_prods.11, %27) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2410:8 -> (%2, %size_prods.5) %29 : bool = aten::eq(%size_prods, %4) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:7 = prim::If(%29) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2411:4 block0(): %30 : str = aten::format(%5, %19) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:25 = prim::RaiseException(%30, %6) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2412:8 -> () block1(): -> () %31 : Tensor = aten::group_norm(%x.1, %self.n_groups, %self.weight, %self.bias, %self.eps, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2524:11 return (%31) fw_re: [[[[[ 5.48740961e-02 -1.03003764e+00 7.90114582e-01 ... 7.61890471e-01 6.92074299e-01 -9.65919942e-02] [-1.52055845e-01 -1.09240329e+00 -3.70866388e-01 ... 7.58232832e-01 -1.30265212e+00 8.65387246e-02] [ 1.39142132e+00 5.88418126e-01 -4.62018669e-01 ... -2.28898644e-01 1.13441674e-02 1.16274631e+00] ... [-7.96160340e-01 3.69338959e-01 -3.91920328e-01 ... 3.35065871e-01 -8.98882672e-02 -4.67916764e-03] [ 1.58248156e-01 -9.39103365e-02 -9.14998502e-02 ... -7.45657802e-01 -1.04576039e+00 -4.39061224e-01] [-1.11972481e-01 4.20800298e-02 -6.08379602e-01 ... 5.73337317e-01 9.29632187e-02 -8.85032713e-01]] [[-7.29161024e-01 -1.83104485e-01 1.04262376e+00 ... -8.64881635e-01 4.15901467e-02 -1.13962376e+00] [-4.04185027e-01 1.11966097e+00 9.82181251e-01 ... 4.99447167e-01 -4.25464064e-01 -2.75427848e-01] [-1.38348889e+00 1.00546077e-01 -3.20300043e-01 ... 1.50613636e-01 -1.68962610e+00 1.34931779e+00] ... [-7.40701556e-01 -1.03171062e+00 -6.53126165e-02 ... -2.90047646e-01 1.34614110e-01 -4.43956703e-02] [ 3.27452552e-03 -4.04889524e-01 -3.53152186e-01 ... -7.38729239e-01 2.48271182e-01 7.80980706e-01] [ 1.31201744e+00 -4.24449086e-01 3.12088225e-02 ... -5.13336658e-01 5.49670398e-01 -4.04826850e-01]] [[ 5.42328358e-01 1.04041874e-01 -5.88215768e-01 ... -1.68178901e-02 2.71335822e-02 -1.01468217e+00] [-2.82839000e-01 -1.13607335e+00 -3.37497741e-02 ... 1.07736280e-02 -1.49595156e-01 3.65598321e-01] [-7.91419387e-01 -4.94145244e-01 1.11467218e+00 ... 1.22928119e+00 1.02102149e+00 7.24002868e-02] ... [ 2.91627198e-01 -1.55006617e-01 -1.17428303e+00 ... -6.06811821e-01 4.98105317e-01 -4.02042478e-01] [ 5.01829863e-01 3.93137485e-01 -3.40309948e-01 ... -3.42579126e-01 8.63106668e-01 -1.39945114e+00] [-4.79401439e-01 9.40180242e-01 -7.60343596e-02 ... -9.43177879e-01 1.30479360e+00 -4.10210729e-01]] ... [[ 4.71532226e-01 -2.25350827e-01 3.70756648e-02 ... -4.12083805e-01 4.63187814e-01 4.31598544e-01] [-6.05733454e-01 -7.12275267e-01 -9.14006606e-02 ... 1.73547435e+00 9.08006847e-01 1.29248992e-01] [-5.06549120e-01 2.74090409e-01 -7.61445016e-02 ... -2.77480543e-01 -3.65318090e-01 6.09408796e-01] ... [ 1.08624351e+00 6.83811426e-01 7.84199178e-01 ... 1.58018079e-02 -1.21670794e+00 -1.89123631e-01] [ 5.36591232e-01 -5.95642626e-01 5.69411516e-01 ... 8.01258147e-01 7.78362870e-01 2.73181587e-01] [ 1.91986516e-01 2.73099333e-01 4.51694399e-01 ... 5.41470230e-01 8.04046988e-01 9.75923538e-02]] [[ 6.37005627e-01 -7.49783397e-01 9.09135580e-01 ... -6.44772232e-01 4.49224770e-01 1.49007595e+00] [-3.05746198e-01 5.92007220e-01 -1.20177388e+00 ... 2.54819185e-01 1.59309313e-01 3.46801102e-01] [ 3.12500566e-01 4.63502668e-02 -1.30667543e+00 ... -9.01332125e-02 -1.58500835e-01 -9.88097370e-01] ... [-9.77428138e-01 -6.17552459e-01 -1.02779579e+00 ... 3.26769620e-01 4.49167937e-01 -1.55442148e-01] [ 3.04534018e-01 -1.20879781e+00 -2.36805782e-01 ... -2.56131530e-01 -1.16437124e-02 4.50194553e-02] [-6.39441848e-01 -1.71686441e-01 4.59207110e-02 ... -8.09699893e-01 1.01719832e+00 -4.48107928e-01]] [[ 7.83408642e-01 9.97021735e-01 -1.86177179e-01 ... -7.01422930e-01 1.45925954e-01 1.59817219e-01] [ 4.64767337e-01 3.46150130e-01 7.06698954e-01 ... 2.87736118e-01 7.72557795e-01 3.10795635e-01] [ 2.89836496e-01 -8.03540945e-01 -9.06295538e-01 ... 8.29219759e-01 -1.71159953e-01 2.98672915e-01] ... [-1.14264302e-01 -4.14720029e-01 4.95848894e-01 ... -1.12590587e+00 7.47435987e-01 -2.69562066e-01] [ 8.32565650e-02 1.89282045e-01 1.32708991e+00 ... 5.73251247e-01 1.50868017e-02 1.99872181e-01] [-6.71626031e-02 3.64225358e-01 -3.23605150e-01 ... 6.49461627e-01 5.74827552e-01 -2.67879903e-01]]] [[[ 1.22238731e+00 7.72255242e-01 1.34488082e+00 ... 1.13056505e+00 -2.03380704e+00 -2.74225414e-01] [ 1.50362718e+00 4.67471331e-02 2.86229819e-01 ... -4.05217320e-01 2.66059971e+00 -1.73962617e+00] [ 1.66801202e+00 7.78133929e-01 -1.95747268e+00 ... -2.31802535e+00 5.72069526e-01 5.43688595e-01] ... [-4.10751438e+00 2.62760580e-01 -2.45875955e-01 ... 4.58140552e-01 -1.11094356e+00 -7.43098676e-01] [ 9.56960678e-01 -1.00110948e-01 -2.82549590e-01 ... -1.31934428e+00 -1.22813344e+00 1.11073600e-02] [-2.12453917e-01 -1.87981677e+00 -1.73875237e+00 ... -1.61324784e-01 8.69161189e-01 1.37828016e+00]] [[-8.77575219e-01 -1.06841707e+00 2.19564223e+00 ... 1.46147108e+00 -2.57553816e-01 1.71269178e+00] [ 8.82645965e-01 8.13068092e-01 6.42447233e-01 ... -2.71699023e+00 -9.70784307e-01 -1.77370143e+00] [ 5.97745597e-01 2.52889812e-01 3.57369781e-01 ... -3.13719082e+00 -4.53024834e-01 7.08824635e-01] ... [-8.78344595e-01 -2.11931378e-01 -2.03812575e+00 ... 9.23603952e-01 -2.64580250e+00 -2.08059967e-01] [-2.98350841e-01 -1.47475347e-01 -2.52429545e-01 ... 8.95903349e-01 2.22802296e-01 -5.29572546e-01] [ 1.90212429e+00 -1.48113418e+00 3.94540071e-01 ... -1.57758069e+00 1.19999099e+00 1.06575119e+00]] [[ 3.66304457e-01 5.32238603e-01 -3.54921675e+00 ... 2.77958179e+00 1.19446421e+00 1.85147524e-01] [-3.18491673e+00 -4.55258012e-01 -1.83763170e+00 ... -8.43670368e-01 5.64785063e-01 2.30892444e+00] [-4.41554248e-01 -2.74082989e-01 1.84832096e+00 ... 9.50667024e-01 -1.25692308e+00 7.59283364e-01] ... [ 2.66496754e+00 9.29885387e-01 3.31251335e+00 ... 1.16875339e+00 -4.01147842e-01 -8.86774480e-01] [ 3.00420165e-01 1.80490267e+00 -5.25729835e-01 ... 3.22479439e+00 -3.69730368e-02 2.22826409e+00] [-5.67723215e-02 1.66193330e+00 -2.92113930e-01 ... 6.38720572e-01 -2.28006378e-01 2.47083783e-01]] ... [[ 2.77583456e+00 -2.63996100e+00 1.08738232e+00 ... 1.23671556e+00 -7.24538267e-01 -3.45525742e-01] [-7.79721797e-01 -4.28431779e-02 -1.10576248e+00 ... 1.63044477e+00 -7.01248884e-01 3.89036119e-01] [ 1.30591011e+00 3.59019667e-01 1.23239450e-01 ... -1.30298245e+00 -7.87266791e-01 -1.42296946e+00] ... [-4.03236121e-01 1.18662322e+00 -4.40852433e-01 ... 7.67292798e-01 1.02020729e+00 -3.81467432e-01] [ 7.91455030e-01 8.43489230e-01 1.06326431e-01 ... -2.53602266e+00 -2.28128359e-01 5.51959574e-01] [ 2.00098681e+00 -1.25611424e+00 2.36235690e+00 ... 7.53172636e-01 1.10479283e+00 -1.73586094e+00]] [[ 8.45910549e-01 4.21526223e-01 4.11108792e-01 ... 1.58087730e+00 9.23292637e-01 -2.05750656e+00] [ 2.32272792e+00 -1.93818247e+00 3.00701857e-02 ... -2.23917723e+00 -1.11092186e+00 -7.32164443e-01] [-2.57892561e+00 -1.48600146e-01 4.52315301e-01 ... 1.51654470e+00 -1.47359565e-01 5.91374516e-01] ... [ 3.28162670e-01 7.90437281e-01 -1.01418710e+00 ... 3.71911734e-01 1.03262472e+00 -6.71230182e-02] [-2.41285396e+00 -3.42160344e-01 1.38165641e+00 ... 2.00986290e+00 1.11366332e+00 -7.55472183e-01] [-1.81652248e+00 1.11842787e+00 -7.40407526e-01 ... -1.28233641e-01 1.35652995e+00 2.70409799e+00]] [[-2.57216763e+00 -7.25552440e-01 5.55998981e-01 ... -2.38002464e-01 1.28395629e+00 1.09288800e+00] [ 1.44945455e+00 3.75631124e-01 8.25990796e-01 ... 2.90932727e+00 2.37427330e+00 8.12892914e-01] [-1.11272752e+00 5.02572358e-02 3.32613111e+00 ... -5.95700741e-01 -5.08736610e-01 2.03559208e+00] ... [-4.11959052e-01 -5.00865161e-01 1.27845919e+00 ... 1.73747575e+00 -1.72405028e+00 7.67310679e-01] [-8.22989643e-01 1.29218554e+00 2.01381397e+00 ... -1.33140075e+00 1.72298148e-01 -2.94629049e+00] [-2.75787425e+00 1.39783895e+00 3.81106168e-01 ... 1.98240781e+00 1.16705322e+00 -3.61531520e+00]]] [[[ 3.82892579e-01 7.53002167e-02 4.79752868e-01 ... 1.21933468e-01 -6.78511858e-02 1.46632135e-01] [-2.67158169e-02 2.51154125e-01 5.01439571e-01 ... 2.43059292e-01 3.88225131e-02 2.03510672e-01] [-7.60510147e-01 -3.39987315e-02 3.21250796e-01 ... -2.36338332e-01 4.15898830e-01 9.37343687e-02] ... [ 7.57103711e-02 4.64493483e-02 2.04705283e-01 ... 5.53481951e-02 -7.85211921e-02 2.87265539e-01] [-4.66346174e-01 1.94210827e-01 3.37468624e-01 ... -2.76712906e-02 5.19814253e-01 -1.91964865e-01] [-6.38453290e-02 -8.80458653e-02 1.35662422e-01 ... -3.65904629e-01 -2.15994388e-01 -4.32988226e-01]] [[-1.69589207e-01 2.39511237e-01 3.91891688e-01 ... -1.18563838e-01 -2.26198863e-02 2.72501707e-01] [-1.69008344e-01 5.75956941e-01 -5.97004175e-01 ... 2.14268088e-01 -8.92496705e-02 6.24623075e-02] [-3.45767379e-01 -3.08217317e-01 -6.26549721e-02 ... -4.91138771e-02 1.39565930e-01 1.84773415e-01] ... [-5.49829185e-01 -3.88016105e-01 3.03292572e-01 ... 1.28665611e-01 -1.70136198e-01 -8.20388645e-03] [-1.44341600e-03 9.59711298e-02 -7.70632327e-01 ... 1.79422572e-01 3.57118726e-01 -1.34091213e-01] [-2.47835979e-01 7.95347914e-02 -6.47398233e-02 ... 8.97477865e-02 1.25791997e-01 2.56318390e-01]] [[ 6.44904673e-02 2.31257170e-01 1.26162320e-01 ... -2.27116033e-01 3.83924007e-01 -2.48441800e-01] [ 4.94881682e-02 -2.30543032e-01 -1.45941630e-01 ... 5.62687032e-03 9.63111892e-02 -6.22461131e-03] [-2.33831167e-01 -1.18505895e-01 2.15419143e-01 ... 4.89475548e-01 -2.27106929e-01 -9.07874294e-03] ... [-4.52393889e-01 -1.44327298e-01 -1.44735768e-01 ... 1.40811533e-01 3.65924351e-02 3.07775065e-02] [ 4.27989125e-01 -4.37366664e-01 -2.68132478e-01 ... -5.24095520e-02 -1.31554514e-01 -4.59623933e-01] [ 4.24135327e-01 1.25962794e-01 -1.59487531e-01 ... -4.14298847e-03 1.37723133e-01 2.88932413e-01]] ... [[ 1.86235502e-01 -3.64425004e-01 -9.85699967e-02 ... -2.04757988e-01 6.82051331e-02 -5.41020572e-01] [-1.66413814e-01 -9.21817869e-02 3.22510242e-01 ... -9.20442194e-02 -1.88744798e-01 -1.79433107e-01] [-1.24387845e-01 -2.06176370e-01 6.22305453e-01 ... 1.01659996e-02 1.98665932e-01 9.07860324e-02] ... [-7.64215067e-02 1.80000827e-01 -2.17698723e-01 ... 5.41381538e-03 3.34594175e-02 -1.74868613e-01] [ 2.05412865e-01 -4.46936071e-01 6.58239007e-01 ... -1.27339900e-01 2.56723523e-01 -1.33836582e-01] [-1.64206758e-01 -3.41937929e-01 6.73751011e-02 ... -2.04024941e-01 -9.45117101e-02 -1.88894458e-02]] [[-2.01219901e-01 9.84461233e-02 3.32027704e-01 ... -2.57117361e-01 -2.30524629e-01 7.70993754e-02] [-1.65824726e-01 -4.52994317e-01 -6.88510165e-02 ... 1.40166640e-01 2.57131159e-01 -9.47347134e-02] [ 2.10742503e-01 2.45566025e-01 -2.47054584e-02 ... -4.90770042e-02 -1.35694042e-01 -2.53760248e-01] ... [-4.00972158e-01 2.07358778e-01 -2.72991061e-01 ... 8.59046802e-02 -7.52266943e-02 -6.83428049e-02] [-2.30747342e-01 4.74287048e-02 1.21250652e-01 ... -3.50968659e-01 -2.97894537e-01 -4.18315768e-01] [ 2.55291343e-01 4.70145583e-01 3.76792014e-01 ... -1.94717512e-01 -2.46305838e-01 -3.52306843e-01]] [[-7.24095523e-01 3.80870968e-01 -4.51752871e-01 ... 1.35657504e-01 -1.21162489e-01 -2.51196653e-01] [-5.31052798e-02 -6.70885921e-01 2.61699826e-01 ... 2.08880857e-01 -3.14093918e-01 -4.16018888e-02] [ 2.33037308e-01 -1.49512105e-02 1.67930812e-01 ... 2.55199492e-01 -2.90466100e-02 7.43122935e-01] ... [ 1.48941755e-01 -5.31779192e-02 8.51897337e-03 ... -4.43745077e-01 -1.76004730e-02 2.06316784e-01] [-2.22004160e-01 1.39137611e-01 -2.18141556e-01 ... -3.81972566e-02 6.38053939e-02 -3.28049451e-01] [-2.78134584e-01 2.97911912e-01 3.60411033e-02 ... 1.40743226e-01 -1.88773170e-01 -2.46024981e-01]]] [[[-1.39336658e+00 1.99206924e+00 -3.62434775e-01 ... -7.43576050e-01 1.06877223e-01 -4.63199876e-02] [-7.03024983e-01 -1.76269388e+00 1.89965680e-01 ... -1.69801342e+00 4.66479629e-01 1.15317988e+00] [-1.64057791e+00 1.12694055e-01 6.14197016e-01 ... -2.71117747e-01 3.73650223e-01 -9.33672488e-01] ... [-1.88282442e+00 1.66135144e+00 6.73106983e-02 ... -2.06008601e+00 1.67775917e+00 -1.47641754e+00] [-1.24348855e+00 3.65524322e-01 -3.90589023e+00 ... -9.96885478e-01 -1.86114442e+00 3.46808344e-01] [ 7.02484310e-01 1.75491184e-01 -6.90013468e-01 ... 2.23358703e+00 2.56732732e-01 9.43297148e-02]] [[-8.36482823e-01 -2.84286642e+00 2.72600389e+00 ... -1.09940290e+00 6.27207518e-01 2.10869148e-01] [-2.47120643e+00 -2.95936084e+00 -5.54259494e-02 ... -7.71213710e-01 6.19159818e-01 7.95670226e-02] [ 4.65181023e-01 1.89473450e+00 -2.26550603e+00 ... 2.19167614e+00 -2.31409013e-01 -1.24592721e+00] ... [-1.12126136e+00 2.64768815e+00 1.59401506e-01 ... 1.14265658e-01 -3.98210931e+00 2.24651098e+00] [ 1.12627470e+00 4.59163666e+00 -1.76847890e-01 ... -4.08987856e+00 1.70056283e-01 -5.27441464e-02] [ 1.01319313e+00 1.82688391e+00 -9.63335991e-01 ... -1.22901738e+00 -4.14742994e+00 3.75451267e-01]] [[ 1.18201962e-02 3.54803038e+00 -1.31527877e+00 ... 5.65924287e-01 -2.06763476e-01 -1.49681449e+00] [ 5.86323023e-01 1.12122595e-02 3.37199640e+00 ... -5.98658442e-01 -5.33011973e-01 1.73620129e+00] [-3.79638374e-02 5.82021356e-01 1.64358234e+00 ... 3.53438139e-01 2.88682014e-01 -1.09382033e+00] ... [-2.05866754e-01 3.71236682e-01 -7.89995849e-01 ... 6.15241528e-01 -2.59295225e-01 1.19354653e+00] [-1.09572589e+00 6.86848879e-01 3.06067443e+00 ... 8.50837886e-01 1.68523395e+00 -3.03234506e+00] [-1.49732649e+00 1.44459677e+00 -2.79278183e+00 ... 6.63755774e-01 -1.52282798e+00 -1.46669507e+00]] ... [[-1.97561860e-01 -2.64500737e+00 1.38673234e+00 ... -1.80981624e+00 -1.42761230e+00 1.68639028e+00] [-1.69210267e+00 -1.09541953e+00 -1.33026063e+00 ... 9.81183827e-01 -2.19387269e+00 1.61878741e+00] [ 7.13594794e-01 3.92476726e+00 -9.31198671e-02 ... 1.10706210e+00 3.34967399e+00 -3.31905937e+00] ... [-2.14500204e-01 2.05201936e+00 2.25159621e+00 ... -7.90684760e-01 7.51711428e-01 1.59562576e+00] [ 1.87804073e-01 5.70196688e-01 -6.63821757e-01 ... -2.45679474e+00 -1.80718088e+00 -1.32425320e+00] [-6.95818067e-01 -5.24415016e-01 -6.76124036e-01 ... 1.07016182e+00 2.16221511e-01 -1.58166695e+00]] [[ 1.67678809e+00 -6.44870818e-01 -5.20982221e-02 ... -3.62756371e-01 -1.28641888e-01 4.27623034e-01] [-6.00670278e-01 1.09471440e+00 -5.85822284e-01 ... 3.40187132e-01 1.01723317e-02 -3.19789976e-01] [ 4.12373483e-01 1.06371868e+00 -4.20278043e-01 ... -1.78022957e+00 -5.74704587e-01 3.48038173e+00] ... [-1.50607383e+00 -9.82433259e-01 9.99986157e-02 ... -1.36081767e+00 -2.81768322e+00 1.06946099e+00] [-1.93927133e+00 -2.08788350e-01 5.96331656e-01 ... -8.97175431e-01 -1.21982276e-01 -6.08586490e-01] [ 6.56079650e-01 1.07288027e+00 8.71326864e-01 ... 2.33078790e+00 -1.85859859e+00 2.18996978e+00]] [[ 3.86019850e+00 2.57737875e+00 4.58134443e-01 ... 8.21972713e-02 5.14955759e-01 2.66074038e+00] [ 4.29863274e-01 9.60948169e-01 -1.27371281e-01 ... -1.75438225e+00 -2.00070667e+00 -5.16863428e-02] [ 1.79163110e+00 -1.65287465e-01 -6.10424459e-01 ... 4.73382995e-02 2.93486476e-01 -2.46806145e+00] ... [-2.59057355e+00 1.28294551e+00 -1.76664218e-01 ... -1.38278675e+00 3.67474604e+00 -7.58620262e-01] [ 3.04592580e-01 -2.72879672e+00 1.83002740e-01 ... -3.15268850e+00 2.00135970e+00 1.19411981e+00] [-9.51030672e-01 4.72935766e-01 6.16782784e-01 ... 9.40708518e-01 -1.65917826e+00 -1.17655063e+00]]] [[[-1.62917838e-01 5.35893179e-02 3.28541175e-02 ... -2.30122581e-02 6.68528229e-02 1.21768080e-01] [ 8.26022029e-02 -2.98320632e-02 6.28049001e-02 ... -8.03983733e-02 8.76913965e-02 -3.92416827e-02] [ 4.87151220e-02 4.86515500e-02 7.91197643e-02 ... 3.00852377e-02 1.52089298e-01 1.45149697e-02] ... [-6.44524535e-03 -1.04156122e-01 -6.84334263e-02 ... 6.05743378e-02 5.16243204e-02 8.23689103e-02] [ 4.40018997e-02 1.72116840e-03 -1.18266448e-01 ... -1.77273096e-03 1.62364051e-01 -1.40607551e-01] [ 1.20589525e-01 -5.72390109e-03 -2.75830855e-03 ... -3.19196507e-02 -1.49305277e-02 8.43003485e-03]] [[-2.54843608e-02 8.03031921e-02 -2.36911383e-02 ... -1.50808413e-02 5.02691194e-02 -4.04744828e-03] [ 1.17115311e-01 -3.07581667e-02 2.20002253e-02 ... -9.17344168e-03 8.29607993e-02 -1.32900709e-02] [ 1.84691809e-02 2.00775713e-01 1.19214416e-01 ... 2.56914552e-02 8.08453485e-02 -1.77189067e-01] ... [ 7.05247521e-02 4.88997810e-02 -1.82231680e-01 ... 9.84973535e-02 -7.77494833e-02 -1.64125264e-01] [-8.81089717e-02 1.46876961e-01 6.88068196e-02 ... -1.95150718e-01 -1.71211079e-01 2.30000652e-02] [ 2.33809520e-02 9.54882577e-02 -7.61942416e-02 ... -3.20922047e-01 -1.18669219e-01 1.85453162e-01]] [[ 8.92912894e-02 1.15316682e-01 1.48082465e-01 ... -2.31909603e-01 1.19419508e-01 1.16142988e-01] [-9.14584398e-02 -7.99501408e-03 -1.96452662e-01 ... 5.44003909e-03 -1.01833209e-01 -1.09001640e-02] [-2.68173963e-02 -1.87839679e-02 -1.60857305e-01 ... 4.55100238e-02 2.61999276e-02 -1.52738377e-01] ... [-3.54216658e-02 -1.34118408e-01 -8.41641352e-02 ... -1.49974421e-01 1.79891754e-03 -8.25342909e-02] [-7.58720096e-03 1.42177045e-01 3.15145925e-02 ... 1.87533155e-01 3.88884135e-02 -1.76223621e-01] [ 7.48798251e-02 2.58814972e-02 -6.04631193e-03 ... 8.83210301e-02 -2.25345809e-02 -1.55299097e-01]] ... [[-2.45818384e-02 9.60167497e-02 2.46363506e-02 ... 9.64921787e-02 -1.83830000e-02 8.82110745e-02] [ 1.38787985e-01 8.62004161e-02 -1.16861105e-01 ... 7.36868531e-02 -5.56502193e-02 5.68033084e-02] [ 4.67910878e-02 -3.81716453e-02 5.48161864e-02 ... 4.74806540e-02 1.43417343e-01 -1.09904697e-02] ... [ 2.74699908e-02 7.40603432e-02 -1.00687958e-01 ... -2.73516271e-02 1.16658978e-01 -1.87051684e-01] [-7.03082234e-02 4.37024161e-02 4.37064506e-02 ... -8.84406641e-02 6.55409619e-02 2.23885830e-02] [ 1.08705260e-01 -4.36498336e-02 -8.65843240e-03 ... 3.73850837e-02 1.68955568e-02 -1.10295434e-02]] [[-7.45690707e-03 -1.24860533e-01 5.28541692e-02 ... -3.47341187e-02 1.05597906e-01 -9.88993943e-02] [ 4.81438860e-02 2.43097544e-02 -4.55003157e-02 ... 2.97899172e-02 -1.18436135e-01 1.40046671e-01] [-6.38985485e-02 9.78565663e-02 -8.45256597e-02 ... -6.69223815e-02 4.99512330e-02 -7.83100650e-02] ... [ 6.10416755e-02 6.44918978e-02 -1.71772733e-01 ... -1.22459352e-01 1.36239594e-02 1.14563949e-01] [-1.14679277e-01 -1.49118947e-03 3.22308093e-02 ... 9.33015496e-02 1.19854502e-01 1.52402356e-01] [ 2.00243056e-01 9.20963958e-02 1.03118576e-01 ... -5.64865321e-02 3.36539410e-02 -2.60261800e-02]] [[ 6.98769242e-02 -6.17254302e-02 -9.21203420e-02 ... -5.87595738e-02 -6.45915642e-02 5.98152615e-02] [ 2.70478055e-02 -1.49359358e-02 -8.58905762e-02 ... -1.14840798e-01 -1.10192738e-01 -3.44218910e-02] [ 2.54797749e-02 1.46522015e-01 1.41179934e-01 ... 1.27079766e-02 1.34459838e-01 -1.29440099e-01] ... [-2.11820230e-02 1.42673150e-01 3.92944850e-02 ... 1.72573239e-01 -3.31241190e-02 -1.28206924e-01] [-2.16397405e-01 3.24589610e-02 1.60118379e-02 ... 3.16394977e-02 -4.60671484e-02 6.22594170e-02] [-6.05604313e-02 -2.49380004e-02 -1.65813908e-01 ... 7.52291083e-02 3.16906273e-02 2.77742427e-02]]] [[[-1.25093997e-01 8.74148607e-02 -5.29328883e-01 ... 2.67408311e-01 -4.16048735e-01 1.32546782e-01] [-2.63230562e-01 -4.44121003e-01 5.52010477e-01 ... -3.61848712e-01 1.13906907e-02 -3.87159064e-02] [-1.89615324e-01 3.55046056e-02 -2.60932595e-01 ... 1.74966872e-01 5.73049366e-01 -1.75101742e-01] ... [ 4.57658887e-01 9.21764970e-01 6.17757022e-01 ... 3.79514873e-01 -1.19023189e-01 -1.66898370e+00] [ 1.46292159e-02 2.35127341e-02 3.35289061e-01 ... -8.53691816e-01 -1.90202606e+00 -1.63091309e-02] [-1.93779260e-01 -1.06900227e+00 7.62301087e-01 ... -6.91330373e-01 8.34362924e-01 -1.53370261e+00]] [[-7.17084825e-01 -3.02160591e-01 -3.46486092e-01 ... 8.61505747e-01 -1.61728293e-01 2.64061302e-01] [ 7.12566435e-01 6.83967412e-01 -5.21229386e-01 ... -1.69456303e-01 2.36090943e-01 2.07557052e-01] [-6.07173264e-01 3.29726040e-01 2.64022108e-02 ... 3.84799808e-01 -3.44101757e-01 8.47624838e-02] ... [ 3.60781350e-03 4.22744267e-02 1.26991212e-01 ... 8.44236791e-01 1.36537150e-01 -5.01263380e-01] [-6.34900093e-01 -1.60791669e-02 1.20510602e+00 ... 5.62663913e-01 4.14512724e-01 -2.81977028e-01] [ 8.25429261e-01 7.75224745e-01 -1.02391469e+00 ... 4.65087950e-01 2.17137605e-01 6.50471449e-02]] [[ 4.96470511e-01 9.94210839e-01 -7.23447323e-01 ... 1.75264764e+00 3.72457765e-02 -5.89030147e-01] [ 3.91315520e-01 9.67324972e-01 -6.78564459e-02 ... 3.74705017e-01 -4.45766866e-01 -2.43787810e-01] [-3.32867026e-01 2.82305449e-01 3.35908473e-01 ... -2.07845554e-01 -5.13800919e-01 -3.79131466e-01] ... [-2.20300287e-01 -6.34784102e-01 7.97229469e-01 ... -8.92999470e-01 -2.30316222e-01 5.76290227e-02] [-5.75949013e-01 3.97816807e-01 2.72549957e-01 ... -4.87614870e-01 7.44701684e-01 2.65689678e-02] [ 3.74259591e-01 -6.17200136e-01 7.72269815e-02 ... -1.25953346e-01 -2.22714216e-01 -4.33170497e-01]] ... [[-1.61645949e-01 -7.36367479e-02 3.99163812e-01 ... -7.62207925e-01 1.70047492e-01 -7.61186242e-01] [ 3.82842034e-01 3.29734827e-03 -8.68098557e-01 ... -5.29870868e-01 9.65322196e-01 5.61356604e-01] [ 7.84522593e-01 2.02390090e-01 4.06176150e-01 ... -1.23394203e+00 -2.41476342e-01 -1.22026436e-01] ... [ 1.40839014e-02 1.48601636e-01 -4.34493691e-01 ... 5.89556038e-01 4.84793097e-01 -7.39839748e-02] [-1.21303864e-01 -2.47372210e-01 8.59144777e-02 ... -2.59638596e-02 4.28798229e-01 6.49575412e-01] [-3.38740945e-01 -6.80448174e-01 -3.13904285e-01 ... 9.84291434e-01 4.40186024e-01 1.27165341e+00]] [[ 1.23672262e-01 6.77804053e-01 -6.61842406e-01 ... -6.04762375e-01 -7.28984028e-02 -5.11426255e-02] [-1.75428420e-01 -1.04007190e-02 -1.03638899e+00 ... -2.05336705e-01 -5.56378007e-01 -4.96982858e-02] [-2.97210515e-01 7.50609115e-02 3.04086834e-01 ... -2.30639540e-02 1.18554211e+00 -1.15950906e+00] ... [ 1.10523060e-01 -6.20640874e-01 8.57319906e-02 ... 6.08190536e-01 -1.48670003e-01 2.46053711e-01] [-1.38048053e+00 8.40454325e-02 -2.88271278e-01 ... -6.99485183e-01 -2.42290705e-01 -6.39471114e-01] [-7.35770404e-01 3.53875518e-01 -9.52517986e-01 ... 2.31636148e-02 -7.51836896e-01 -1.26110047e-01]] [[-2.68103004e-01 1.40415970e-02 2.20726550e-01 ... 1.11115528e-02 -9.18260276e-01 4.79391336e-01] [ 1.06003463e+00 -1.20366447e-01 9.30391967e-01 ... -3.07092011e-01 -7.72034347e-01 5.47582865e-01] [-1.22392893e+00 3.97924483e-01 -2.70578474e-01 ... -6.17744803e-01 6.69032484e-02 5.43021448e-02] ... [-4.94831711e-01 -1.88101694e-01 -1.98502123e-01 ... -5.20151198e-01 7.05964267e-01 -2.05116972e-01] [-1.85217395e-01 -3.74016255e-01 6.01448119e-01 ... -1.86892495e-01 -4.51630801e-01 3.99010926e-01] [-1.01580274e+00 2.28503659e-01 2.37418473e-01 ... -9.11211073e-02 4.70570475e-01 -1.84453294e-01]]]] [[[[ 1.89169392e-01 1.85661107e-01 1.17863822e+00 ... -4.99421924e-01 3.50702137e-01 -1.27303690e-01] [ 1.01476169e+00 -6.79304540e-01 -1.28312767e-01 ... 5.38960338e-01 6.02281868e-01 8.30023348e-01] [-1.25510812e-01 6.83656156e-01 1.78058103e-01 ... 3.58768017e-03 2.05868140e-01 3.71879727e-01] ... [ 3.24744016e-01 -3.86780262e-01 -1.33667454e-01 ... 1.58026740e-01 4.26156819e-01 1.24617577e+00] [ 7.19082355e-01 3.03519011e-01 -8.85078013e-01 ... 1.39246237e+00 -5.57024539e-01 4.24753696e-01] [-1.28857946e+00 -3.02202940e-01 2.61816174e-01 ... 7.48349011e-01 -1.45197392e-01 1.72398639e+00]] [[ 9.52725470e-01 1.93430066e-01 -4.90663946e-01 ... -1.14336705e+00 1.10475326e+00 1.40399969e+00] [-2.87305802e-01 1.16822720e+00 -7.09332943e-01 ... -5.79119563e-01 2.26876527e-01 -3.07420492e-01] [ 4.83771116e-01 -8.56532454e-01 -1.09450661e-01 ... -1.64418435e+00 1.08384669e+00 -3.48750889e-01] ... [-7.11393893e-01 -9.55291927e-01 1.82749167e-01 ... 2.22045839e-01 4.67940509e-01 -3.56568515e-01] [ 4.69831139e-01 1.48149550e-01 -1.90595821e-01 ... 4.69275981e-01 4.05083686e-01 2.73843616e-01] [-1.80559933e-01 -9.54467952e-02 3.47357213e-01 ... 7.92821869e-02 -5.57867587e-01 -6.93268955e-01]] [[ 3.10734063e-01 1.68367773e-02 6.21613562e-01 ... 4.87954199e-01 -1.44585752e+00 4.95499879e-01] [ 1.10533595e+00 -1.94343716e-01 7.21099913e-01 ... -4.77193534e-01 -2.54573196e-01 1.93934238e+00] [-8.50700200e-01 1.24524832e+00 -1.62698492e-01 ... 3.96221370e-01 1.20248044e+00 7.87532628e-01] ... [-2.09379077e+00 1.81487262e-01 -1.79422960e-01 ... -1.13613749e+00 -5.90955853e-01 4.50687706e-01] [ 1.66568637e-01 -3.74437541e-01 7.58178771e-01 ... -5.07613242e-01 -6.09486461e-01 -8.70845020e-01] [ 9.49978590e-01 7.46819437e-01 -1.11172831e+00 ... 4.44315255e-01 -1.21864891e+00 2.04100050e-02]] ... [[ 2.80620426e-01 -8.10906112e-01 -3.47735226e-01 ... 1.36376500e-01 -5.01174510e-01 -5.75119078e-01] [ 4.11007494e-01 -1.58732128e+00 4.44038570e-01 ... -2.86744535e-01 -9.70100224e-01 -1.47809759e-01] [-1.66920173e+00 -5.22227352e-03 -9.41399872e-01 ... -1.86184204e+00 1.55971634e+00 -2.70797670e-01] ... [-8.56336057e-01 -3.06586444e-01 8.95705104e-01 ... -1.52157164e+00 1.51266849e+00 -1.00735211e+00] [-6.46875143e-01 3.13695192e-01 -1.93573967e-01 ... -1.10034287e-01 -5.50062954e-01 5.46186924e-01] [-2.56586432e-01 2.64039665e-01 5.52995384e-01 ... 2.05065355e-01 8.99621904e-01 -2.18578592e-01]] [[-7.71255076e-01 6.09008908e-01 1.25356030e+00 ... 5.42369723e-01 -1.22344053e+00 -6.06182635e-01] [-6.18700325e-01 1.62163866e+00 -2.40369618e-01 ... 5.57908773e-01 -3.50289017e-01 -1.10224152e+00] [ 4.44035940e-02 6.27177894e-01 -4.55016822e-01 ... 1.80248886e-01 7.40560144e-02 2.08973980e+00] ... [-7.00505257e-01 -2.27949753e-01 -1.26864719e+00 ... -3.75642702e-02 -5.98721981e-01 6.26817703e-01] [-4.84308183e-01 5.78985393e-01 9.69296396e-01 ... 2.36217096e-01 -7.55030870e-01 -9.90136147e-01] [-1.62036970e-01 -6.86639965e-01 6.79027319e-01 ... 4.55226481e-01 2.22232789e-01 5.46190798e-01]] [[ 2.81475872e-01 -1.05543280e+00 1.12872064e-01 ... 6.06187999e-01 6.62081301e-01 7.49458492e-01] [ 1.20181656e+00 8.32071543e-01 5.14193654e-01 ... -4.72910404e-01 -1.90287128e-01 8.23000908e-01] [-3.75004023e-01 4.38156039e-01 -2.27139071e-01 ... 8.06548357e-01 -7.95791924e-01 8.75623763e-01] ... [ 6.02344751e-01 -3.47923450e-02 -4.66467202e-01 ... -2.39942491e-01 -3.11190158e-01 -6.33918643e-01] [ 2.68217236e-01 2.78513640e-01 -8.59387964e-02 ... 8.06636214e-01 4.66030002e-01 -1.93336248e-01] [-1.61497191e-01 3.16062480e-01 -4.97892648e-01 ... 9.75215673e-01 9.84822735e-02 -6.21379793e-01]]] [[[-4.56763476e-01 6.62441730e-01 4.29456294e-01 ... 2.09708929e+00 1.78851366e-01 1.45217788e+00] [ 5.25388956e-01 -1.78076029e-01 7.42555976e-01 ... -7.07391143e-01 -2.15586972e+00 3.66705823e+00] [-4.18635756e-02 -2.97647309e+00 -8.04643333e-01 ... 6.49717078e-02 1.01718116e+00 -7.51849890e-01] ... [ 1.49390233e+00 -5.01931906e-01 -1.20246045e-01 ... 2.64090943e+00 2.70011497e+00 -2.35496730e-01] [ 9.30581033e-01 1.82072973e+00 -7.07030535e-01 ... 4.80686933e-01 -2.46492624e+00 -1.34267724e+00] [-1.85889769e+00 1.69883430e+00 -4.97323126e-01 ... 6.26509964e-01 3.11511308e-01 -3.55945110e-01]] [[ 2.49859548e+00 7.85145044e-01 -3.25919271e+00 ... 7.19654739e-01 -1.73949850e+00 -2.45377612e+00] [-1.39258966e-01 1.74703765e+00 -1.90541014e-01 ... 1.09624934e+00 -1.54503000e+00 -2.66783267e-01] [-1.25549972e-01 2.41663170e+00 1.48079526e+00 ... -6.25820458e-01 -3.18993807e+00 2.78761148e-01] ... [-2.62184560e-01 1.86532810e-01 -3.21784019e-01 ... -6.27580881e-01 4.91593957e-01 -2.67305422e+00] [-1.29873706e-02 5.71236014e-01 -1.25188673e+00 ... -9.99734178e-02 1.47307754e+00 -4.34384727e+00] [-1.64998993e-01 1.74058020e+00 3.13640982e-01 ... -1.31669390e+00 1.10091174e+00 7.04483211e-01]] [[ 1.19364858e+00 1.67471564e+00 4.73907322e-01 ... 1.71992195e+00 2.49384785e+00 -5.68875730e-01] [-9.60233331e-01 2.48692083e+00 6.55951977e-01 ... -6.30146086e-01 1.34171891e+00 2.00506663e+00] [-1.06837936e-01 1.90440726e+00 -2.27191091e+00 ... -7.51853526e-01 2.64345616e-01 2.88978726e-01] ... [ 1.34705985e+00 6.53862000e-01 -1.30634773e+00 ... -9.83413875e-01 -2.15444779e+00 -2.14440846e+00] [ 1.20757926e+00 -3.26049834e-01 -5.97616374e-01 ... 9.88478586e-02 1.81817317e+00 -6.39806390e-01] [ 6.19989753e-01 -3.48421884e+00 4.67340559e-01 ... 2.63878679e+00 8.28693926e-01 -1.71657300e+00]] ... [[ 5.12112617e-01 2.17369580e+00 1.76898885e+00 ... 1.71308851e+00 -9.07185435e-01 -1.65660846e+00] [ 1.41439772e+00 1.36388862e+00 -2.63558805e-01 ... 5.80122411e-01 -1.37652290e+00 -4.60953712e-01] [ 3.47924143e-01 2.43440652e+00 6.37560487e-01 ... -5.89392073e-02 1.17829323e+00 -9.68203366e-01] ... [ 5.92074156e-01 3.04332614e-01 5.58784194e-02 ... -1.87843010e-01 -2.11124253e+00 1.20696507e-01] [ 4.43173647e-01 -2.12150073e+00 1.09666836e+00 ... 1.26174057e+00 -1.38707805e+00 3.98149550e-01] [ 1.72076821e-01 -2.06731415e+00 -4.86497432e-01 ... 5.38395979e-02 -1.95562017e+00 -1.47104934e-01]] [[ 1.91311026e+00 1.33773482e+00 2.27351278e-01 ... -3.89697462e-01 -2.25627065e-01 3.90341330e+00] [-2.89060283e+00 -1.29614127e+00 2.90503055e-01 ... -3.54705788e-02 3.90793175e-01 1.14322686e+00] [-1.44922614e+00 -2.34862477e-01 5.68504632e-01 ... 8.61400515e-02 -1.91931868e+00 3.93944770e-01] ... [ 3.10894537e+00 6.27565607e-02 1.99449301e+00 ... -1.10278094e+00 2.40891725e-01 6.16872132e-01] [ 1.07912433e+00 -1.16896057e+00 -2.26574111e+00 ... -9.52068388e-01 -1.25858569e+00 -9.42999899e-01] [-3.53330187e-02 1.41267610e+00 1.49651492e+00 ... 3.14042300e-01 -7.57665098e-01 1.99643338e+00]] [[ 5.97356677e-01 8.65566850e-01 1.06242502e+00 ... -1.79400995e-01 -3.45452261e+00 2.42814749e-01] [ 1.66929018e+00 -1.21861160e+00 2.10423708e+00 ... 9.01582778e-01 -2.23138049e-01 1.86969995e+00] [-1.62180662e+00 -1.22624779e+00 1.26422870e+00 ... 2.02191997e+00 2.13491750e+00 -8.46415401e-01] ... [ 2.91081309e+00 -3.45268771e-02 -3.95915173e-02 ... 3.03304434e+00 -7.94602275e-01 2.44257107e-01] [-6.26590431e-01 2.26983142e+00 -2.47463882e-01 ... 1.54372811e+00 -1.83566916e+00 1.43020141e+00] [ 1.30182195e+00 1.42793441e+00 2.17894459e+00 ... 9.10294533e-01 1.24361634e+00 -1.13349748e+00]]] [[[ 1.44063935e-01 -1.01633504e-01 -2.16242433e-01 ... -1.78502068e-01 1.99331462e-01 -2.47384489e-01] [ 6.00258559e-02 3.59700322e-01 -9.37332958e-02 ... 1.46667600e-01 3.94074351e-01 4.19095039e-01] [ 3.54310751e-01 1.24392904e-01 -7.15321302e-03 ... 1.94996715e-01 -3.16470951e-01 -3.08730155e-01] ... [ 1.13117628e-01 3.36588621e-01 2.21097972e-02 ... -2.73780733e-01 -2.09727767e-03 -3.73037726e-01] [ 6.93801463e-01 5.02574891e-02 6.80701137e-01 ... 2.82870859e-01 -1.81650832e-01 2.65690219e-02] [-1.35811090e-01 -4.55716938e-01 -1.55694420e-02 ... -7.43108094e-02 2.27593228e-01 2.13904992e-01]] [[ 3.40535820e-01 -2.62486190e-01 -2.64614314e-01 ... 4.45298135e-01 -7.65000582e-02 -1.76919792e-02] [-1.83058366e-01 -3.28232884e-01 4.47651595e-01 ... 6.01257682e-01 -2.20208362e-01 3.72190438e-02] [ 1.80874184e-01 -4.20554519e-01 -2.26374060e-01 ... 3.15506347e-02 -1.15403175e-01 1.27633184e-01] ... [ 3.83026719e-01 4.74712923e-02 3.55240613e-01 ... -3.34343016e-01 -3.19597751e-01 2.82238275e-02] [-2.37990752e-01 8.85385498e-02 -5.25180876e-01 ... -2.08295435e-01 2.61431396e-01 2.17019662e-01] [ 3.42619956e-01 -4.68033195e-01 -1.67903528e-01 ... 6.98197633e-02 3.31913650e-01 2.87682414e-01]] [[-1.59285054e-01 1.08023308e-01 -8.45062956e-02 ... -5.17341316e-01 -2.51862794e-01 -1.70523703e-01] [ 8.10078532e-02 5.42287469e-01 -6.48192465e-02 ... 7.73957297e-02 -1.08866625e-01 -5.89728355e-02] [ 2.52064407e-01 3.61402154e-01 -2.34850079e-01 ... 1.77356616e-01 2.22469047e-01 -9.84767824e-02] ... [-2.99714029e-01 -1.16480410e-01 -9.02362466e-02 ... 8.95878077e-02 1.75723270e-01 -2.02340811e-01] [ 4.46220309e-01 -1.36422560e-01 5.12089133e-01 ... -6.96314946e-02 1.20687209e-01 -3.70899797e-03] [ 3.31463695e-01 9.89920124e-02 2.27871891e-02 ... 7.35948086e-02 4.19216812e-01 3.32255840e-01]] ... [[ 4.96724108e-03 -6.25545457e-02 6.02558963e-02 ... -9.48606431e-02 1.72944441e-01 -2.40091998e-02] [-2.91148741e-02 3.74473363e-01 1.99544504e-02 ... 3.00954521e-01 -3.22281003e-01 1.23853371e-01] [ 6.21742785e-01 -9.93969068e-02 -3.45332056e-01 ... -1.13853291e-01 1.69271588e-01 -2.62778133e-01] ... [ 7.12049156e-02 2.24067107e-01 9.40150917e-02 ... 1.60999447e-02 -2.02888802e-01 -1.17709853e-01] [-2.04099640e-02 3.02805789e-02 -5.43773472e-01 ... -1.08647486e-03 7.02602789e-02 1.62537605e-01] [-4.03759778e-01 -9.16442722e-02 4.71493125e-01 ... -2.20202848e-01 4.43309136e-02 -1.20194137e-01]] [[-5.75220942e-01 -1.10341594e-01 3.38196158e-02 ... 1.46246478e-01 -1.48814276e-01 -3.06489170e-01] [ 3.36742401e-01 -1.68977544e-01 -5.22846654e-02 ... 3.56085062e-01 1.72617897e-01 2.22939014e-01] [ 4.14268792e-01 2.02363968e-01 6.62931651e-02 ... 5.44377193e-02 -9.23177898e-02 -1.80104464e-01] ... [-5.99887874e-03 -2.29856167e-02 -4.26333368e-01 ... -1.94265619e-01 -1.43195182e-01 -9.09522548e-02] [-5.44846617e-02 -3.39915007e-01 -3.70237231e-01 ... -4.49282438e-01 3.11246943e-02 -5.37195951e-02] [ 1.91088498e-01 -5.85085988e-01 -2.83777658e-02 ... -4.30480629e-01 -2.45981533e-02 1.75935030e-01]] [[-2.11668298e-01 7.94476643e-02 -1.20746106e-01 ... 3.49902421e-01 -3.35600197e-01 -3.78629535e-01] [ 3.61228585e-01 1.78585529e-01 3.42651457e-01 ... -4.49660532e-02 1.46128565e-01 -4.50053178e-02] [ 6.88128993e-02 -2.84435451e-01 2.82573272e-02 ... -1.02630429e-01 1.25684147e-03 -2.21492320e-01] ... [ 1.70271933e-01 -8.16316053e-04 -1.68793008e-01 ... -5.45806408e-01 -1.88032553e-01 -3.32702368e-01] [ 3.49642783e-01 -4.26144361e-01 1.10583849e-01 ... 3.05590834e-02 -2.22658902e-01 1.93421260e-01] [-3.43802661e-01 4.06695046e-02 4.31444138e-01 ... -3.01085562e-01 2.65730694e-02 -1.10587656e-01]]] [[[-2.16513181e+00 -5.38333714e-01 3.01630586e-01 ... -7.78973877e-01 7.75483549e-01 -2.22180343e+00] [-1.06461644e+00 -1.37771845e+00 1.00670421e+00 ... -3.20000410e-01 -2.20907047e-01 1.32433093e+00] [-1.98508060e+00 1.16827822e+00 3.06393683e-01 ... 2.75852847e+00 -8.46975982e-01 1.51804388e-01] ... [-1.95968866e+00 -1.10501063e+00 1.66996765e+00 ... -5.10751188e-01 -2.31307197e+00 1.87362063e+00] [-8.14337909e-01 -2.16031766e+00 -9.67202112e-02 ... 8.50120485e-02 1.15058327e+00 -5.08941352e-01] [-1.84616613e+00 2.64092684e+00 4.60897416e-01 ... -6.76003218e-01 -5.53648710e-01 -1.71297520e-01]] [[ 2.72524692e-02 -1.38604367e+00 1.28888464e+00 ... 2.41126347e+00 4.74178314e-01 8.63551557e-01] [ 2.50154763e-01 -1.38034058e+00 2.64708138e+00 ... 2.22514287e-01 1.44719768e+00 -8.63195419e-01] [-1.92487344e-01 1.49319375e+00 1.92910624e+00 ... -1.57539904e+00 -4.58341539e-01 -1.79973078e+00] ... [ 1.92419016e+00 -2.30310369e+00 8.77980649e-01 ... -7.46657610e-01 -1.08951318e+00 -1.93858171e+00] [-3.02667356e+00 5.83625734e-01 -9.42788363e-01 ... 7.06132233e-01 -1.15314221e+00 -5.83671927e-01] [-3.65726280e+00 -6.00993216e-01 9.69668090e-01 ... -4.29325581e-01 5.35680115e-01 -1.16975379e+00]] [[-2.78493911e-01 1.57928884e+00 -6.91478774e-02 ... -2.71877003e+00 -3.62555414e-01 -1.15226877e+00] [-8.09864521e-01 4.73974310e-02 -2.13804066e-01 ... 1.83901286e+00 2.89981556e+00 -2.57635295e-01] [ 1.13114893e+00 1.91152304e-01 1.30891371e+00 ... 1.34300244e+00 2.10817528e+00 4.38018501e-01] ... [ 2.09804463e+00 -2.32132792e+00 -1.31233859e+00 ... -2.24773765e-01 -1.53266281e-01 1.11120379e+00] [ 1.25964129e+00 -4.47279453e-01 -7.65471160e-01 ... 1.45245016e+00 1.31338501e+00 -2.75981784e-01] [ 1.88788497e+00 3.78441930e-01 -1.57110488e+00 ... -1.67141449e+00 -9.78417933e-01 4.52178419e-01]] ... [[ 2.56514847e-01 -3.79461825e-01 -1.53477287e+00 ... -1.21050477e+00 5.48166037e-01 -1.04663408e+00] [-1.94668591e+00 1.55953348e+00 1.67865205e+00 ... -1.55492377e+00 1.65243399e+00 -1.51767254e+00] [ 1.11831999e+00 -4.60843071e-02 -2.75323462e+00 ... -1.53511286e+00 8.38581994e-02 2.84057379e+00] ... [-7.45943069e-01 1.03039777e+00 2.84521174e+00 ... -2.45114303e+00 1.70890585e-01 -2.28764415e+00] [ 8.30452085e-01 1.62482715e+00 6.57276511e-01 ... -5.40429831e-01 7.98324645e-01 -1.65877092e+00] [-7.14125216e-01 -1.51202857e+00 -1.81046343e+00 ... 1.02652061e+00 -3.80711126e+00 -1.59300053e+00]] [[ 1.63630688e+00 2.97525978e+00 -9.04742420e-01 ... -1.82159305e-01 2.65327901e-01 -1.46482289e+00] [-1.41004002e+00 1.63140047e+00 1.08156800e+00 ... 1.79086483e+00 2.27563456e-01 -4.44171485e-04] [ 7.09217370e-01 -1.71582294e+00 7.62163103e-01 ... -6.93672299e-01 3.10976362e+00 -1.20173164e-01] ... [-1.27970231e+00 -5.33394754e-01 6.41667902e-01 ... 7.79537931e-02 8.07312310e-01 -2.83968210e+00] [-2.17024422e+00 -1.84733164e+00 -3.49445850e-01 ... -8.47691119e-01 -7.81425387e-02 -9.02759194e-01] [-3.13088584e+00 -7.97661126e-01 1.43907797e+00 ... -3.05187440e+00 -2.46009365e-01 8.79222572e-01]] [[-3.06321345e-02 6.46191061e-01 -5.12796402e-01 ... -4.28600341e-01 -2.18207741e+00 -3.77272773e+00] [-5.35892010e-01 -7.00637996e-02 -1.70509422e+00 ... 1.32366693e+00 -1.15598595e+00 2.91315532e+00] [ 1.41676009e-01 1.12386477e+00 -1.19911969e+00 ... 7.13164389e-01 1.27060592e-01 3.75702214e+00] ... [ 1.25748551e+00 2.73219109e+00 5.97547770e-01 ... -1.62004137e+00 -6.95735753e-01 -2.18219534e-01] [ 1.09698498e+00 3.64783049e-01 -4.49006587e-01 ... 5.01529910e-02 2.28551602e+00 1.25888443e+00] [-8.45705211e-01 1.38311302e-02 -7.23558366e-02 ... -1.57306254e+00 1.13614869e+00 1.08481562e+00]]] [[[-3.34054028e-04 -1.52650073e-01 -9.06311348e-02 ... 1.40260205e-01 4.84040193e-03 1.00597642e-01] [ 6.98590502e-02 -1.15558840e-01 -1.14099883e-01 ... 1.19551839e-02 -2.11450770e-01 3.60763744e-02] [ 3.61024216e-02 4.66209538e-02 1.53755248e-01 ... -2.80491680e-01 5.13752289e-02 3.99980322e-02] ... [ 4.13546637e-02 3.79620939e-02 -2.13817805e-02 ... 7.16051236e-02 4.48444393e-03 -4.53461148e-02] [-1.26004264e-01 -1.55095279e-01 -1.34644046e-01 ... 9.06271115e-02 7.35357478e-02 1.17776841e-01] [ 1.75145380e-02 7.79606551e-02 4.78481874e-02 ... 1.44403294e-01 1.76168289e-02 1.18299916e-01]] [[ 3.48270551e-05 -2.58148074e-01 7.51243159e-02 ... -8.74551609e-02 -5.02960347e-02 -5.04217744e-02] [-5.73486695e-03 5.69986664e-02 -7.76141062e-02 ... 3.33772339e-02 -2.72054851e-01 -2.33068205e-02] [ 3.00828535e-02 4.94073592e-02 9.75520685e-02 ... -3.48059982e-02 -6.20976724e-02 -5.58517203e-02] ... [-3.75535078e-02 6.37973323e-02 -5.81665635e-02 ... 1.45462394e-01 4.32112738e-02 -3.05795491e-01] [ 1.38923734e-01 1.24403790e-01 9.29222722e-03 ... -1.07368574e-01 1.99495792e-01 -2.76008584e-02] [-5.59530109e-02 1.42880355e-03 -5.74393235e-02 ... -6.02829345e-02 3.94136012e-02 7.85720050e-02]] [[-3.85528207e-02 -5.85628599e-02 -4.89194095e-02 ... -1.02351598e-01 1.10378772e-01 -1.09371707e-01] [ 5.70738129e-02 9.03567746e-02 2.44430706e-01 ... 1.74640678e-02 -2.23270699e-01 5.26103899e-02] [ 7.46664777e-02 -3.37941721e-02 2.00929139e-02 ... -1.30508423e-01 -7.91502446e-02 1.23869710e-01] ... [-1.25570297e-01 -1.88674331e-01 -1.07024275e-01 ... -1.10865876e-01 -1.09968953e-01 -1.71325810e-03] [-1.31807581e-01 2.37722978e-01 -2.25529876e-02 ... 2.95846369e-02 6.70812801e-02 8.52691084e-02] [ 1.99242085e-01 1.29321059e-02 3.33939120e-02 ... 1.55520752e-01 5.70190735e-02 -6.29151016e-02]] ... [[ 1.33476630e-01 1.09534547e-01 1.32164523e-01 ... -1.31110802e-01 -1.14717402e-01 -2.26155683e-01] [-1.69763416e-01 -8.46809372e-02 -1.66163236e-01 ... 1.05302714e-01 1.68200269e-01 -1.91596553e-01] [-8.19930509e-02 -3.50717828e-02 2.36057937e-01 ... -1.36920868e-03 3.85621674e-02 -6.02476150e-02] ... [ 6.92601278e-02 -3.00952531e-02 -1.21739032e-02 ... 6.81699514e-02 9.52980965e-02 -6.07430823e-02] [-1.60968021e-01 -8.67457390e-02 -5.63188791e-02 ... 2.44496651e-02 2.41322108e-02 -1.19934738e-01] [ 2.00829148e-01 3.41220759e-02 1.25776112e-01 ... 1.35606781e-01 -9.31733176e-02 6.37714416e-02]] [[-1.01767220e-01 2.44941667e-01 1.58528805e-01 ... 1.29563168e-01 -1.16537973e-01 -1.47779182e-01] [-1.32250011e-01 6.15830086e-02 -9.62545201e-02 ... -1.42105848e-01 8.92149732e-02 -1.51316077e-01] [-1.61799565e-02 -7.18643144e-02 2.42217064e-01 ... 1.03785463e-01 3.07508148e-02 -3.50643545e-02] ... [-6.94057206e-03 1.25685766e-01 1.09886050e-01 ... 2.05076307e-01 2.45806992e-01 -1.41967274e-03] [ 1.92002188e-02 -4.34497409e-02 3.03876996e-02 ... -6.63254410e-02 -9.32460837e-03 6.65315315e-02] [-1.59631759e-01 6.17412524e-03 -5.32009266e-02 ... 4.73689958e-02 -1.15444642e-02 9.78977308e-02]] [[-1.41934454e-01 -3.78629491e-02 -6.19562678e-02 ... 2.96499841e-02 -4.54058759e-02 -1.35410437e-02] [ 9.00455713e-02 -1.35219824e-02 -1.40150383e-01 ... -7.60454908e-02 -1.06441684e-01 -5.30322688e-03] [ 1.20739387e-02 -1.51867568e-01 -6.32175282e-02 ... -1.63455516e-01 4.62816693e-02 -5.56575023e-02] ... [ 3.60443071e-02 1.17474109e-01 -1.10971011e-01 ... 1.01003423e-01 1.16889909e-01 -6.92961887e-02] [-2.52339602e-01 1.45644829e-01 1.76649451e-01 ... 6.90576434e-02 -3.47407274e-02 -4.08846922e-02] [ 9.51837003e-02 9.71261263e-02 -1.82140339e-02 ... 1.20569974e-01 7.41439462e-02 7.86901265e-02]]] [[[ 1.66266486e-01 -6.46603286e-01 3.93948019e-01 ... -7.50469685e-01 1.61012024e-01 6.62716508e-01] [-9.34423029e-01 -1.49174184e-01 5.33666983e-02 ... -3.74882221e-01 4.11916226e-01 -3.24860960e-01] [ 7.81416416e-01 8.49715099e-02 2.96036154e-01 ... 7.65703917e-02 3.90906096e-01 -1.18326950e+00] ... [-2.56998748e-01 1.50823325e-01 -7.75286257e-01 ... -2.02483192e-01 -1.03577387e+00 -2.92945385e-01] [ 7.86139071e-01 1.02006406e-01 4.64789085e-02 ... -9.36703801e-01 -7.09777892e-01 4.63499516e-01] [-5.84080935e-01 -1.23386657e+00 -5.66704571e-01 ... 8.90962660e-01 -6.17971309e-02 6.22628689e-01]] [[ 5.20451903e-01 6.13868833e-02 1.10465658e+00 ... 7.31942773e-01 1.94693685e-01 9.79503989e-01] [ 6.36430144e-01 6.53156579e-01 2.15469718e-01 ... -2.67014086e-01 -1.62121892e-01 -1.84598923e-01] [ 8.01114514e-02 -7.51303956e-02 2.66495705e-01 ... -1.63286829e+00 -3.12370688e-01 -3.23930830e-01] ... [-1.01003706e+00 9.04971063e-02 -2.13199973e-01 ... 3.60740185e-01 -3.44437957e-01 -1.55338332e-01] [-6.29360616e-01 6.94066733e-02 -3.25734437e-01 ... 2.11457744e-01 -6.93071723e-01 -3.86501729e-01] [ 7.46034011e-02 2.64967412e-01 -1.51096821e-01 ... 1.71161592e-01 7.08753407e-01 1.73905179e-01]] [[-6.77212119e-01 4.72485900e-01 6.26043260e-01 ... 3.90644819e-01 1.80753797e-01 7.39085197e-01] [ 6.80166949e-03 5.25715113e-01 -1.12561159e-01 ... 1.11315930e+00 5.99173307e-01 -6.30215228e-01] [ 3.38751882e-01 -6.96350455e-01 4.75606173e-01 ... 2.73238420e-01 1.33214772e-01 4.42913085e-01] ... [ 5.02897263e-01 -5.01234531e-02 9.98179335e-03 ... -7.20418274e-01 5.16406894e-01 5.91816120e-02] [ 1.02421272e+00 -7.49325827e-02 3.05112660e-01 ... -8.38308752e-01 -1.10006094e+00 1.70969784e-01] [ 2.86947876e-01 -1.08773613e+00 1.01910222e+00 ... 8.95995125e-02 -2.29050204e-01 -1.40998378e-01]] ... [[ 3.36035222e-01 -3.85152906e-01 6.47189498e-01 ... -6.15135849e-01 -9.13224876e-01 -5.47440171e-01] [ 2.39053667e-01 -3.48369688e-01 -2.25450639e-02 ... 1.21951036e-01 3.68590087e-01 8.06545317e-01] [ 6.33983672e-01 -1.25580561e+00 1.19168669e-01 ... 7.20297396e-01 -1.95622429e-01 -9.43767548e-01] ... [-7.09457546e-02 -4.73367065e-01 9.80112016e-01 ... -1.80759564e-01 -4.98642713e-01 -1.00908089e+00] [-9.10863876e-01 -2.03323290e-01 1.90120175e-01 ... 2.79281080e-01 5.11391521e-01 7.26735890e-02] [ 5.27893186e-01 4.13033575e-01 9.13793296e-02 ... 1.25080049e-01 -2.46013880e-01 1.28467858e-01]] [[ 2.88891464e-01 9.03148472e-01 -5.53261817e-01 ... 2.07966901e-02 8.86336923e-01 -1.68160945e-02] [-7.94117153e-01 -1.05076838e+00 3.05858791e-01 ... 2.43825182e-01 2.11013943e-01 -1.44220501e-01] [ 1.67966131e-02 -6.34954125e-02 7.40797967e-02 ... -1.90005213e-01 6.23902023e-01 5.22201121e-01] ... [-5.29119790e-01 -1.69753939e-01 7.68420994e-01 ... 2.18671873e-01 5.00670314e-01 6.95433021e-02] [ 1.14866745e+00 1.30047393e+00 -5.30903101e-01 ... 2.07206726e-01 1.58770114e-01 4.82432753e-01] [ 1.13903260e+00 -5.20223975e-01 -1.83755279e-01 ... 5.28842919e-02 5.91740131e-01 -3.90828401e-01]] [[ 7.71741986e-01 5.73775947e-01 -5.22945598e-02 ... -1.93002313e-01 -1.10165966e+00 -1.99514821e-01] [-8.49266768e-01 3.98926511e-02 8.86440873e-02 ... 6.95635080e-02 2.60646999e-01 3.78469974e-01] [-6.85994208e-01 4.27575856e-01 1.26749992e-01 ... 8.75743926e-01 -1.80290997e-01 1.10038921e-01] ... [ 6.61052465e-01 -8.87049854e-01 -7.18925074e-02 ... -1.98381290e-01 -4.42129940e-01 3.98821384e-01] [ 8.10302645e-02 -3.60087365e-01 -7.23138869e-01 ... -1.59622061e+00 3.62981886e-01 1.63822219e-01] [-2.21625879e-01 2.24678650e-01 -8.16249073e-01 ... -1.06590867e+00 -4.90218610e-01 -2.21638143e-01]]]] [[[[-7.76867568e-01 -1.49461091e-01 -1.07588422e+00 ... -2.67138869e-01 8.36089671e-01 -1.15704966e+00] [ 4.89121079e-01 -2.44361743e-01 8.94500837e-02 ... -2.17492692e-02 -2.31291413e-01 1.14535570e-01] [ 1.73859689e-02 2.44601464e+00 1.49343455e+00 ... 7.05854475e-01 2.99060911e-01 2.38493457e-01] ... [-8.57911706e-02 -1.35425448e+00 8.82653147e-02 ... -2.94461042e-01 -5.01193702e-01 -2.53514111e-01] [-3.80099118e-01 8.62267852e-01 -5.04650593e-01 ... 5.63585877e-01 -9.72701728e-01 1.21979523e+00] [ 1.19677603e+00 -5.10643125e-01 9.35928285e-01 ... -1.10535061e+00 -2.67434418e-01 -6.11511528e-01]] [[-4.77705836e-01 -2.07125232e-01 -3.63946915e-01 ... 7.90555358e-01 -4.22685057e-01 6.71319306e-01] [ 1.17768180e+00 2.45829448e-02 -1.06232680e-01 ... -5.35898745e-01 3.06365699e-01 -9.50442970e-01] [ 8.90868843e-01 1.56746066e+00 7.91155756e-01 ... 1.23936519e-01 -3.68569076e-01 4.32487071e-01] ... [ 5.85000992e-01 -2.55336851e-01 5.66093996e-02 ... -1.68784663e-01 -6.08632922e-01 9.51686740e-01] [-2.38064647e-01 1.11463852e-02 2.50203490e-01 ... -5.96196532e-01 9.17004198e-02 -4.87988740e-02] [ 2.15022042e-01 -5.91572046e-01 2.30985358e-01 ... -8.03700864e-01 1.67493248e+00 3.51989597e-01]] [[-3.21565002e-01 8.93597007e-01 1.10654950e+00 ... -9.71962392e-01 1.74688354e-01 1.36467636e-01] [-1.80911803e+00 -4.29514572e-02 5.51387429e-01 ... -1.60829484e+00 5.42833745e-01 4.14556079e-02] [ 1.29671693e-01 -5.93254745e-01 2.95315713e-01 ... 7.18320906e-01 6.33788049e-01 3.51657212e-01] ... [-4.34409201e-01 2.46696979e-01 -2.31486380e-01 ... 2.28567109e-01 6.85910344e-01 -4.58457500e-01] [-3.90859276e-01 5.95433339e-02 6.95147663e-02 ... 2.47920290e-01 -1.19610138e-01 3.98393929e-01] [-3.83363634e-01 3.62227947e-01 -9.64695692e-01 ... 1.72548503e-01 9.24112916e-01 -1.69019893e-01]] ... [[ 2.19454959e-01 -4.47940677e-01 -4.66558337e-02 ... -6.21395409e-01 6.58307672e-01 -1.95265666e-01] [ 8.74789119e-01 8.46405447e-01 2.61052847e-01 ... 7.89137781e-01 1.13341665e+00 6.76243484e-01] [-1.24559283e-01 1.03617407e-01 -1.46709847e+00 ... -4.19577817e-03 -3.89183342e-01 -1.98779643e-01] ... [-1.48566827e-01 -1.10812819e+00 -7.65262961e-01 ... 1.62674934e-01 3.69245261e-01 -7.98812807e-01] [ 2.11120754e-01 -3.49458069e-01 2.54254192e-01 ... 1.11741386e-01 1.28000820e+00 1.04640335e-01] [-1.27029443e+00 4.64804351e-01 1.53607965e-01 ... -5.23511767e-01 -2.54383624e-01 -2.99692661e-01]] [[-2.45425344e-01 7.52463400e-01 9.33622718e-01 ... 4.12223935e-01 6.36579096e-01 1.00372815e+00] [-1.38197815e+00 2.47014850e-01 -3.31919044e-01 ... 8.47247839e-02 1.13508463e-01 3.24968606e-01] [-3.51036102e-01 7.45609522e-01 -9.85286832e-01 ... -9.49182153e-01 -2.27602571e-02 4.32007521e-01] ... [ 7.32419252e-01 6.67743087e-02 7.09576428e-01 ... -7.02799559e-01 -9.70357835e-01 -3.94935012e-01] [-1.70718837e+00 -1.07662845e+00 8.16854119e-01 ... -4.66416806e-01 -3.42312098e-01 3.55607063e-01] [-8.79023969e-01 -3.42705101e-01 6.51040316e-01 ... -1.41596913e+00 -9.26234245e-01 1.42661408e-01]] [[-2.51076877e-01 -7.25624681e-01 7.49621630e-01 ... -1.04664855e-01 -6.53030217e-01 -1.13134600e-01] [-5.77080727e-01 4.68537003e-01 2.00949478e+00 ... -7.09992826e-01 4.10964161e-01 1.32833958e+00] [-1.09999396e-01 5.42638600e-01 7.77016401e-01 ... 1.19192693e-02 3.16509038e-01 7.11749554e-01] ... [ 1.86590242e+00 -6.44665778e-01 -6.57517537e-02 ... -2.54975498e-01 4.00313944e-01 -2.01939523e-01] [ 2.46773168e-01 -5.37134111e-01 9.62298572e-01 ... -7.18434036e-01 1.02950037e-01 -7.75193810e-01] [-2.43370831e-01 -8.98665786e-01 -1.15298554e-01 ... -6.35588840e-02 -2.75173664e-01 1.03019275e-01]]] [[[-1.78314948e+00 9.98818159e-01 -2.98480093e-01 ... -3.24536800e-01 7.29912639e-01 1.20834872e-01] [-1.50641847e+00 -1.77359104e+00 -1.75105834e+00 ... 1.50539443e-01 1.08944952e+00 -9.06717539e-01] [-5.58712423e-01 1.22838545e+00 -1.08823586e+00 ... -1.16717353e-01 1.11508417e+00 3.50661659e+00] ... [ 3.43754172e-01 1.91224003e+00 -8.19253504e-01 ... -2.39889216e+00 -1.88758397e+00 -4.51990873e-01] [ 1.14236593e+00 1.19826007e+00 -1.17279685e+00 ... -1.28239572e+00 2.03599882e+00 -1.89539641e-01] [ 2.26123500e+00 -3.18092853e-01 7.42411315e-02 ... 1.29158640e+00 -6.85318589e-01 5.41498780e-01]] [[-1.68055904e+00 1.42606843e+00 2.56124783e+00 ... -2.33991951e-01 -5.28033435e-01 -1.37223795e-01] [ 7.92315423e-01 -3.40361387e-01 -3.22115135e+00 ... -1.71775639e+00 3.73048455e-01 5.80021143e-01] [-2.30189347e+00 -1.09221792e+00 -6.18986368e-01 ... -4.34471071e-01 -1.03795028e+00 -4.74677563e-01] ... [ 1.53477108e+00 -9.64893997e-01 -3.18374205e+00 ... -1.99303412e+00 -2.34733939e+00 2.40062141e+00] [-7.63603806e-01 7.65720785e-01 3.38677669e+00 ... -5.24873972e-01 -3.99376214e-01 2.56045341e+00] [-1.18732488e+00 -1.62116694e+00 2.20052791e+00 ... -1.37570345e+00 1.65655181e-01 3.14656162e+00]] [[ 1.83158064e+00 2.30900884e+00 -1.19481575e+00 ... 2.33141869e-01 1.29317236e+00 -2.13958716e+00] [-4.50622052e-01 -8.37471843e-01 -1.35557485e+00 ... -2.54881120e+00 -1.33485305e+00 5.98239675e-02] [ 6.80330217e-01 -1.20997286e+00 -2.49597430e+00 ... -2.50849873e-01 9.20336246e-01 1.73218623e-01] ... [ 8.74635994e-01 1.05598509e+00 -1.59638739e+00 ... -2.88325191e+00 3.34351718e-01 -2.38714433e+00] [-6.39548540e-01 -1.94567072e+00 3.33327502e-01 ... 6.67523324e-01 2.52265066e-01 2.38625574e+00] [ 9.16779995e-01 2.16212702e+00 7.13497162e-01 ... 1.05965614e+00 9.00996268e-01 7.94621766e-01]] ... [[ 2.50445604e+00 1.11142218e+00 -2.24441266e+00 ... 7.18760639e-02 -9.09776568e-01 -1.06953013e+00] [-5.18573999e-01 6.15078032e-01 -1.85085690e+00 ... 4.87722129e-01 3.54127169e-01 -1.26395679e+00] [-1.07548583e+00 8.78594875e-01 -2.11197877e+00 ... -2.23358059e+00 1.49748516e+00 -2.67120719e+00] ... [ 5.62110424e-01 9.00653601e-01 6.88542187e-01 ... 2.86270738e+00 1.63226438e+00 1.67918706e+00] [-4.01795842e-02 -1.57011300e-01 -7.78566599e-01 ... 8.19986641e-01 -1.32313633e+00 6.59597576e-01] [-1.91750157e+00 -1.10250556e+00 1.00099421e+00 ... 5.68027377e-01 4.75338101e-01 1.49590325e+00]] [[ 4.14705336e-01 1.90518892e+00 2.70065141e+00 ... 1.39365983e+00 -1.80714458e-01 1.07106590e+00] [ 4.34212506e-01 -2.12329209e-01 1.80460775e+00 ... -1.55916631e+00 2.70162791e-01 -1.64604187e-02] [ 1.20402777e+00 1.14429677e+00 -3.88963401e-01 ... 6.09308720e-01 5.30881993e-02 -1.26541483e+00] ... [ 1.31944346e+00 -6.18428111e-01 3.38911176e-01 ... -5.45936465e-01 -4.16000396e-01 -3.18822527e+00] [-3.63955784e+00 2.71494448e-01 9.67308819e-01 ... -1.19810033e+00 2.55200416e-01 -1.10805476e+00] [-1.79161811e+00 -8.39388847e-01 -1.35250580e+00 ... 1.96741238e-01 1.80801883e-01 -7.63789356e-01]] [[-8.73093724e-01 -2.22516823e+00 -1.14118242e+00 ... 1.93954980e+00 -1.56094861e+00 -1.14769220e+00] [ 1.69271156e-01 -3.85847718e-01 2.63145542e+00 ... 8.19642365e-01 -1.49374163e+00 -1.19341588e+00] [-1.16966140e+00 -3.34261566e-01 -1.03220809e+00 ... -3.75977099e-01 5.90488732e-01 1.42288005e+00] ... [ 1.43288243e+00 1.99026978e+00 -3.89694214e+00 ... 1.04998246e-01 7.67910004e-01 9.53238547e-01] [-8.31227675e-02 -4.57049131e-01 -3.34159076e-01 ... -1.13460147e+00 -1.56702805e+00 -3.02877892e-02] [ 6.70044661e-01 -1.52124494e-01 4.07396019e-01 ... -1.69077623e+00 -1.15806472e+00 4.51743335e-01]]] [[[ 2.94098351e-02 1.91589326e-01 -2.96989322e-01 ... 4.70125943e-01 1.46773711e-01 -1.36219915e-02] [ 2.76664257e-01 4.92190152e-01 -1.24650553e-01 ... -5.46432078e-01 1.28388584e-01 1.52007848e-01] [ 1.68053031e-01 2.88878921e-02 -4.13861156e-01 ... -2.90898144e-01 -1.54301316e-01 5.17385975e-02] ... [-1.65648498e-02 -1.68562740e-01 2.55377918e-01 ... 2.11003318e-01 2.15346932e-01 -2.06655607e-01] [ 1.03213824e-01 5.06564498e-01 2.26805627e-01 ... 1.37622029e-01 1.08240515e-01 -2.62782425e-01] [-6.24991506e-02 1.82014517e-02 -1.89242810e-01 ... -1.84586450e-01 1.79993868e-01 2.59309649e-01]] [[-1.20782427e-01 7.51318410e-02 -4.68435317e-01 ... 2.46702507e-01 9.21606123e-02 4.81316060e-01] [ 1.92485183e-01 -2.00797871e-01 -2.58928724e-02 ... 2.73716331e-01 -3.06637108e-01 -1.52807832e-01] [ 1.78789005e-01 -9.62903127e-02 -3.66178423e-01 ... -8.21818039e-03 -2.27791116e-01 8.09741169e-02] ... [ 4.23147902e-02 -1.17576189e-01 -3.57245177e-01 ... -8.47852677e-02 -4.94807482e-01 3.73229027e-01] [ 7.20436350e-02 8.33536908e-02 -6.45839199e-02 ... 5.65890111e-02 1.69932216e-01 1.02712832e-01] [ 1.60524145e-01 1.25520363e-01 -3.33866388e-01 ... -2.50903547e-01 2.31928855e-01 -6.44841909e-01]] [[-1.52327031e-01 3.78057994e-02 1.00578338e-01 ... -5.08531332e-01 -2.44953930e-01 -4.63741183e-01] [-3.83026391e-01 9.41571891e-02 1.96637139e-02 ... -1.52074888e-01 1.67297229e-01 3.62464413e-02] [ 2.43864749e-02 -9.26407799e-02 -9.43159964e-03 ... 1.78139567e-01 7.95146152e-02 -1.55943260e-01] ... [ 1.98737353e-01 -2.93226272e-01 4.52535123e-01 ... 2.42555663e-01 2.15538189e-01 -2.50489656e-02] [ 7.24198595e-02 1.18202589e-01 2.41055891e-01 ... -7.45422766e-02 -1.11852907e-01 -6.17872411e-03] [ 2.73931593e-01 1.84878260e-01 1.93089768e-02 ... -1.39850259e-01 -2.35567719e-01 1.11657925e-01]] ... [[ 2.32270867e-01 -2.02570319e-01 -2.09338114e-01 ... 1.33176342e-01 8.69275033e-02 9.61391535e-03] [-1.37629107e-01 1.49825603e-01 -2.81067103e-01 ... -8.87976959e-02 1.38262480e-01 -1.22255161e-01] [-7.58765116e-02 8.83496851e-02 -4.99232769e-01 ... 2.74055630e-01 -4.56156582e-02 2.39828020e-01] ... [-6.79625571e-02 3.67057383e-01 3.50569248e-01 ... -2.48142034e-01 -5.93653210e-02 -3.82668048e-01] [ 1.78984646e-02 -5.12629092e-01 -1.72511414e-01 ... -1.55577898e-01 -1.17491089e-01 -4.98931408e-01] [-7.14997530e-01 8.25742334e-02 1.32996097e-01 ... -1.48261562e-01 -1.37414023e-01 1.94877699e-01]] [[ 9.31531042e-02 -3.88681173e-01 1.68177171e-03 ... 1.18379071e-01 -5.79809621e-02 5.00732541e-01] [ 5.66823602e-01 -2.66514450e-01 -1.69809222e-01 ... -2.14259431e-01 -9.65481326e-02 1.95519608e-02] [-1.64331168e-01 9.00047943e-02 -2.09027514e-01 ... -3.16128552e-01 -8.59802067e-02 4.11561668e-01] ... [-1.98620528e-01 -2.34971344e-01 5.47632873e-02 ... 3.76794249e-01 -3.01761508e-01 -1.76070064e-01] [ 2.31918693e-01 4.34440911e-01 -1.34933934e-01 ... 3.65208387e-02 -1.77101240e-01 1.94101915e-01] [ 2.45236885e-02 1.61342204e-01 -8.09747428e-02 ... -7.99800307e-02 4.62649353e-02 1.18345171e-01]] [[ 3.77479225e-01 2.92015634e-02 -3.49290557e-02 ... 1.71860889e-01 -9.06461701e-02 -1.57667994e-01] [-3.06070358e-01 -5.60902715e-01 -4.40216720e-01 ... 2.14283913e-01 6.70578554e-02 -4.03955400e-01] [ 2.71430999e-01 2.98027880e-03 -7.28340670e-02 ... -2.63167977e-01 -4.05090600e-02 -1.21064499e-01] ... [-3.97233605e-01 1.71863556e-01 -2.62789458e-01 ... -5.93750272e-03 2.20319167e-01 1.09815225e-01] [-1.92245752e-01 -4.24080282e-01 -2.26656824e-01 ... 1.95164993e-01 2.23739147e-02 -2.38748237e-01] [-9.66103841e-03 2.69683540e-01 -5.86703755e-02 ... 4.72625382e-02 3.76965851e-01 3.44458759e-01]]] [[[-3.86929482e-01 -3.57446998e-01 -6.68565333e-01 ... 5.38972259e-01 -4.86992486e-02 -2.67137837e+00] [-3.33412081e-01 -1.11428535e+00 -8.94363761e-01 ... -1.59995317e+00 -6.26028776e-01 -1.28785536e-01] [ 1.25646746e+00 2.56127596e-01 -2.28882447e-01 ... -1.91882205e+00 1.40091050e+00 9.78202522e-01] ... [-1.37707198e+00 5.31958222e-01 1.71833944e+00 ... -4.44409370e-01 1.37537205e+00 -1.28962505e+00] [ 1.77890098e+00 1.15931845e+00 -1.00462675e+00 ... 9.49636519e-01 1.84020698e+00 7.43707538e-01] [ 9.43772137e-01 -1.93370536e-01 1.35114825e+00 ... 3.92043233e-01 8.33701249e-03 -9.22987387e-02]] [[ 2.05538511e+00 1.01988316e+00 2.13099980e+00 ... 3.33115816e-01 1.95586526e+00 -6.04080677e-01] [-2.17413321e-01 -1.06194830e+00 5.73072374e-01 ... -1.52075052e+00 1.36679411e+00 -6.27616704e-01] [ 2.06037641e+00 -1.20221710e+00 2.72072583e-01 ... 2.55190492e+00 2.31195283e+00 2.43170452e+00] ... [-4.05376554e-01 1.89395010e+00 6.51670396e-01 ... 1.54453731e+00 3.88439815e-03 -2.10776734e+00] [-1.57168388e+00 -8.41956258e-01 -8.01164448e-01 ... 9.01884615e-01 1.81432891e+00 -2.70556569e+00] [ 8.08299422e-01 1.36041129e+00 -2.89756000e-01 ... -2.15261030e+00 1.67948508e+00 -1.63040161e+00]] [[ 2.29681277e+00 -4.37651348e+00 3.03831387e+00 ... -2.55506754e+00 1.93104103e-01 4.91448402e-01] [ 2.65958929e+00 -7.96608865e-01 2.05380607e+00 ... 1.61720896e+00 6.20008290e-01 4.44934636e-01] [ 6.15815401e-01 1.25614333e+00 5.25817692e-01 ... -6.12013757e-01 -1.31919765e+00 3.26275444e+00] ... [-1.98953569e+00 -1.95102763e+00 -1.91471541e+00 ... -4.76934940e-01 -2.34840706e-01 -6.24769986e-01] [-4.27731752e-01 -7.54327416e-01 -8.09876919e-02 ... 1.51698709e-01 1.58819509e+00 -3.20622492e+00] [-1.14961408e-01 3.19313347e-01 2.47225857e+00 ... -2.50408101e+00 6.64835274e-01 1.18722096e-01]] ... [[ 5.40429652e-01 -6.96916878e-01 -1.19490659e+00 ... 3.02887774e+00 2.07217094e-02 1.84247088e+00] [-7.50599444e-01 2.67277598e+00 7.92810738e-01 ... -1.27754688e+00 2.87734866e-01 9.21985745e-01] [-2.59607267e+00 1.46645916e+00 1.81173956e+00 ... -3.21740746e+00 3.76508087e-01 -1.86444962e+00] ... [-1.43353879e+00 -1.25809348e+00 -1.35475039e-01 ... 1.67273426e+00 -7.82627821e-01 3.52898717e-01] [-6.12432957e-01 -1.21518123e+00 2.70503616e+00 ... -3.24324131e-01 -4.01972383e-01 1.45900249e+00] [-1.34404159e+00 7.09837317e-01 -7.28719413e-01 ... -6.71001911e-01 2.70149755e+00 -2.77122211e+00]] [[-4.42784011e-01 1.87361217e+00 -9.65652823e-01 ... -4.22857881e-01 -2.10081315e+00 -4.69917446e-01] [ 1.47852361e+00 -1.09886837e+00 3.95904362e-01 ... 2.21669269e+00 4.71544534e-01 1.14125967e+00] [ 9.32999432e-01 -2.15547904e-01 4.00708497e-01 ... -1.96804571e+00 3.33459735e-01 -2.60155916e-01] ... [-9.57088649e-01 -4.40662354e-01 -1.19853187e+00 ... 1.83163106e+00 2.07066059e+00 3.37579036e+00] [ 3.14370334e-01 1.20550358e+00 2.25893617e+00 ... 1.92195702e+00 -3.45216179e+00 -3.30761194e+00] [-5.85421436e-02 -4.62757528e-01 1.35530376e+00 ... 2.50663257e+00 7.93030798e-01 -7.07589567e-01]] [[-1.83911908e+00 -1.70934653e+00 4.59258944e-01 ... 1.75374126e+00 2.86839068e-01 -7.32659474e-02] [-1.36588955e+00 -1.71299443e-01 -1.86755157e+00 ... -5.08118927e-01 -1.96728313e+00 5.41773081e-01] [ 6.57782137e-01 -2.71483850e+00 -6.96981370e-01 ... -1.01842391e+00 -2.17890501e+00 -4.63283837e-01] ... [-5.86999357e-01 1.86439490e+00 -4.71920109e+00 ... 3.60162449e+00 -1.38140810e+00 5.98553717e-01] [-4.93891425e-02 4.06833351e-01 -1.10283959e+00 ... 1.15447052e-01 8.20586920e-01 -1.85237050e-01] [ 3.04788470e-01 1.05276895e+00 1.23299360e+00 ... 1.79863542e-01 1.09339905e+00 -2.79223859e-01]]] [[[-8.93866457e-03 -1.37773510e-02 -6.85316627e-04 ... 7.74123445e-02 -1.91372961e-01 3.16245072e-02] [ 7.66657218e-02 -2.26702675e-01 1.93075508e-01 ... -1.65395346e-02 -1.40560329e-01 9.84746739e-02] [ 9.27063450e-02 4.36023250e-03 -6.54855445e-02 ... 2.90388502e-02 -4.87240665e-02 -9.80450809e-02] ... [ 1.18377611e-01 2.48734728e-02 -1.28773913e-01 ... -1.49979055e-01 1.47705838e-01 -3.75936367e-02] [ 2.47337166e-02 -2.45660141e-01 -1.68355659e-01 ... 9.51703414e-02 4.04750481e-02 -8.33810344e-02] [ 3.59365493e-02 1.07693281e-02 1.97940110e-03 ... -1.67483056e-03 1.05617717e-01 -4.84354272e-02]] [[ 1.52950495e-01 5.17333969e-02 -9.80334207e-02 ... -2.92220898e-02 1.08024120e-01 -4.08057272e-02] [-1.26639739e-01 4.72153313e-02 -2.79486626e-02 ... -9.21617076e-02 -1.66588008e-01 -1.04890212e-01] [ 1.57849386e-01 2.45803990e-03 -2.32991632e-02 ... 1.14864051e-01 1.58476681e-01 1.54715583e-01] ... [ 9.97345895e-02 7.64068961e-02 1.91922352e-01 ... 7.30363727e-02 -2.87188520e-03 -6.81949109e-02] [ 3.96970324e-02 -1.04184255e-01 7.82393441e-02 ... 1.41091406e-01 -2.28628397e-01 3.46192151e-01] [-4.99086268e-02 -9.89663526e-02 -1.62245557e-01 ... -6.14501089e-02 3.13290255e-03 9.48592722e-02]] [[-1.96711235e-02 -1.12900153e-01 -6.29289299e-02 ... -2.13857600e-03 -1.26195386e-01 5.94328865e-02] [ 3.89456488e-02 4.75878976e-02 1.38378650e-01 ... -4.53772210e-02 7.40990043e-02 1.39830813e-01] [-6.84027672e-02 6.84893057e-02 -7.09850863e-02 ... -1.25637338e-01 7.24257901e-02 -2.49992087e-02] ... [-1.01385480e-02 -5.55433612e-03 -9.18531641e-02 ... 1.01869926e-01 -7.84856007e-02 1.19147003e-01] [ 8.84983465e-02 1.49281830e-01 8.32920596e-02 ... -6.28944188e-02 -1.07440606e-01 6.05187491e-02] [ 7.90146440e-02 5.34211425e-03 -1.27845019e-01 ... -1.21278174e-01 2.83696465e-02 -1.82269230e-01]] ... [[ 2.75894374e-01 -2.25949332e-01 -6.12119697e-02 ... -5.77558726e-02 1.01448007e-01 1.06429122e-01] [ 1.14281707e-01 2.46490687e-02 3.09365578e-02 ... -6.62272237e-03 -1.48039967e-01 -2.07184106e-01] [ 1.65813118e-01 -1.30624279e-01 1.01615421e-01 ... 3.40023153e-02 -1.60074070e-01 -6.66470453e-02] ... [ 4.82480880e-03 -4.04533297e-02 7.00902566e-02 ... 1.17043778e-01 -1.10619955e-01 1.20018031e-02] [ 7.08357617e-02 6.63595945e-02 -8.18470400e-03 ... -4.43450324e-02 -1.87241107e-01 6.68134391e-02] [-1.25128910e-01 -1.86013266e-01 4.67240438e-02 ... 6.97219223e-02 -3.11223883e-02 2.42329597e-01]] [[-1.46007612e-01 1.33512676e-01 3.48600112e-02 ... -1.06099271e-03 5.78551143e-02 -8.69328678e-02] [ 1.10615641e-01 2.02721506e-02 -3.40712331e-02 ... -1.16825625e-01 1.55597344e-01 -7.90788978e-02] [ 5.25620691e-02 6.47484064e-02 -2.09898800e-02 ... 9.16771777e-03 5.30855805e-02 -9.97173414e-03] ... [-1.03595972e-01 -1.04464315e-01 3.59987952e-02 ... -3.69104296e-02 -4.57587801e-02 -6.53355047e-02] [-1.86385978e-02 -6.12558015e-02 -1.48006878e-03 ... 6.29262477e-02 -9.15821549e-03 8.53668749e-02] [ 1.01619981e-01 1.82525605e-01 3.66005376e-02 ... -1.87299430e-01 8.53015706e-02 2.85132527e-02]] [[ 4.31426167e-02 -6.44787624e-02 -2.00944141e-01 ... 4.52302098e-02 -1.18651658e-01 5.36266901e-02] [ 7.59569407e-02 -9.31725949e-02 -1.94625691e-01 ... -1.45261064e-01 -1.07412888e-02 1.15880750e-01] [ 4.13313368e-03 -7.60271549e-02 4.28882204e-02 ... 9.75635052e-02 -4.06140909e-02 2.40364894e-02] ... [ 7.80812949e-02 4.19460833e-02 -3.08357924e-02 ... -5.11258990e-02 8.01793020e-03 1.66002020e-01] [ 6.63014203e-02 1.21323191e-01 -9.58480164e-02 ... 6.73524439e-02 -1.04765981e-01 -4.49783579e-02] [-8.10309649e-02 -1.44039407e-01 5.17943762e-02 ... -6.54111290e-03 -2.53617652e-02 -3.43221836e-02]]] [[[-2.08891198e-01 6.71366453e-01 -2.82508314e-01 ... 1.90378316e-02 9.37649533e-02 1.40997577e+00] [ 1.00893760e+00 -1.22178271e-01 -2.45268628e-01 ... 1.62173044e-02 -4.70047891e-01 1.40969366e-01] [ 5.44897735e-01 1.30179107e-01 -2.48160005e-01 ... 2.73233235e-01 5.51689982e-01 -4.34313297e-01] ... [-6.24387026e-01 -1.94753110e-01 4.82210934e-01 ... -8.94434378e-02 7.59026827e-03 -3.81522290e-02] [ 1.13238040e-02 2.14626342e-01 -2.89124697e-01 ... 4.23994750e-01 2.23090360e-03 1.85629755e-01] [ 5.85713148e-01 3.34557056e-01 -7.48931646e-01 ... 3.00260663e-01 -2.06217207e-02 4.67493057e-01]] [[ 5.32419384e-01 -2.00920388e-01 -3.59279960e-01 ... 1.96019575e-01 -1.05298793e+00 -1.00770183e-02] [ 8.44729066e-01 -3.13501656e-01 -4.76900697e-01 ... -7.78581202e-02 -6.20653201e-03 3.03547502e-01] [-1.43896624e-01 8.33899260e-01 -2.69080758e-01 ... 4.73718166e-01 -3.68114442e-01 3.42326611e-01] ... [-7.64155746e-01 2.20960259e-01 -3.88034955e-02 ... 3.15608472e-01 -6.51255131e-01 -1.98702529e-01] [ 7.24239111e-01 -1.82450950e-01 -5.29806376e-01 ... -9.14466023e-01 -5.33320487e-01 4.34654236e-01] [ 2.50748307e-01 -1.63894072e-01 9.09084268e-03 ... 2.10833132e-01 -9.42352951e-01 -1.11124551e+00]] [[-2.94642597e-01 -5.48475742e-01 -4.87619668e-01 ... 2.25156769e-02 -4.52499747e-01 -1.40062749e+00] [-4.83728617e-01 7.26683617e-01 1.16878033e+00 ... 2.51022965e-01 6.94238782e-01 -6.55608177e-01] [-3.91510546e-01 -5.05713880e-01 1.46246946e+00 ... -4.47421521e-02 -2.41336927e-01 5.50276995e-01] ... [ 8.71088326e-01 1.55128837e-01 -3.33676726e-01 ... -3.73032302e-01 -3.99783067e-02 9.75812018e-01] [-2.63948925e-02 6.17567636e-02 -1.17297733e+00 ... -3.37307513e-01 -5.54986298e-01 3.95045131e-01] [-3.47152531e-01 -6.10535026e-01 -7.28132576e-02 ... 1.61114320e-01 -1.51555538e+00 -1.50799584e-02]] ... [[-4.26982969e-01 8.49263728e-01 -4.95743126e-01 ... -4.68465894e-01 1.01117849e+00 1.18416607e+00] [-1.65309310e-01 2.50386387e-01 7.85425007e-01 ... 1.23188831e-01 -1.15145326e+00 5.58419108e-01] [-3.83739859e-01 -5.90179920e-01 5.67214072e-01 ... 6.91277325e-01 1.78376868e-01 -1.03134379e-01] ... [-2.64624327e-01 -1.13678324e+00 -3.67849410e-01 ... 5.36081254e-01 -8.06381777e-02 3.88745278e-01] [ 2.06894577e-01 -1.64568090e+00 -6.77182223e-04 ... -5.33104718e-01 -2.55266339e-01 -2.79744387e-01] [ 2.03278482e-01 3.69828731e-01 -2.10522637e-01 ... -3.63131464e-01 -9.17987302e-02 1.39733866e-01]] [[ 2.08757758e-01 3.09103101e-01 2.95709223e-01 ... 2.57642984e-01 2.48565421e-01 -8.44764635e-02] [-2.35558406e-01 5.64078569e-01 -1.80534944e-01 ... -4.82234992e-02 -1.00679624e+00 3.92330706e-01] [-2.13366851e-01 7.47748375e-01 -1.15880929e-01 ... -6.73408449e-01 6.23259842e-01 -4.97702628e-01] ... [-6.91019058e-01 -4.09110695e-01 -5.72491586e-02 ... 4.72308636e-01 -3.26903433e-01 -4.01551038e-01] [ 2.72236735e-01 -5.18566132e-01 1.70054480e-01 ... -2.61332572e-01 1.58178210e-01 -1.05554104e+00] [-3.59127522e-01 2.86035657e-01 -3.37695152e-01 ... -3.36850017e-01 -1.66605443e-01 -5.29159233e-02]] [[ 1.26536265e-01 3.58109951e-01 -2.18967736e-01 ... -7.44558424e-02 5.27311921e-01 1.46316588e-01] [ 1.03192724e-01 -1.16532290e+00 -5.24007976e-01 ... -1.44401893e-01 -1.14820814e+00 -1.67420670e-01] [-1.09028369e-01 -1.91802159e-01 -1.24004371e-01 ... 2.52157934e-02 -6.15675449e-01 3.17947775e-01] ... [ 5.78665376e-01 -1.00038850e+00 2.40942374e-01 ... -6.98805749e-01 -3.30511630e-01 3.43217462e-01] [ 5.62131941e-01 -3.20558757e-01 1.63892046e-01 ... -9.71476376e-01 3.26781839e-01 6.10655785e-01] [-1.64643809e-01 1.11004844e-01 -2.78921098e-01 ... -7.33437777e-01 -6.38517559e-01 1.39557142e-02]]]] ... [[[[-7.55618453e-01 -9.23248589e-01 -8.22890162e-01 ... -8.03519607e-01 4.26898479e-01 -2.25788563e-01] [-1.87920123e-01 1.41888964e+00 2.93900877e-01 ... 5.16487919e-02 1.17319191e+00 -1.86088547e-01] [-6.49489462e-02 -1.35019898e-01 7.98208416e-01 ... 1.05112441e-01 3.05626001e-02 -4.80227679e-01] ... [ 8.50215405e-02 -1.93621531e-01 -2.23812595e-01 ... -5.63324869e-01 -1.02125144e+00 -5.99050283e-01] [-7.21409142e-01 -1.18786231e-01 -4.13758785e-01 ... -1.00336313e+00 1.31054357e-01 -9.48497236e-01] [-2.51063909e-02 4.07593548e-01 1.76990274e-02 ... 1.32302070e+00 -1.47460163e+00 -8.65641475e-01]] [[ 6.03257060e-01 -6.79800749e-01 -8.19203019e-01 ... -8.40016782e-01 -5.22404075e-01 -1.02892719e-01] [-6.67823404e-02 1.89156306e+00 3.02881777e-01 ... -4.89709675e-01 1.26625910e-01 6.58935010e-02] [-8.12676549e-02 5.79844713e-01 3.95332053e-02 ... 5.55724978e-01 3.12244087e-01 1.46766937e+00] ... [-7.77928412e-01 -1.10743141e+00 -1.26710677e+00 ... 2.30861038e-01 -2.35063270e-01 -2.83483773e-01] [-5.25341392e-01 -5.77502996e-02 -3.86598140e-01 ... 5.42439759e-01 6.28881156e-02 3.76478106e-01] [-6.65462971e-01 -1.56929418e-01 -9.84688222e-01 ... -1.86219347e+00 4.60521221e-01 -5.20206809e-01]] [[ 1.42686486e-01 2.85268545e-01 2.52462476e-01 ... 3.33199918e-01 -3.80044699e-01 -4.67007719e-02] [ 1.69267550e-01 1.23620570e+00 1.26534373e-01 ... 1.39554804e-02 -6.37525141e-01 3.02444667e-01] [ 5.46005778e-02 -1.87175330e-02 -1.13207676e-01 ... -8.36980641e-01 2.18016207e-01 -4.09741223e-01] ... [-3.02462608e-01 8.93710792e-01 7.11729944e-01 ... -2.58910865e-01 2.75376141e-01 -1.63334727e-01] [ 3.75548184e-01 -2.20211610e-01 -5.90525508e-01 ... 2.27391168e-01 -5.41837215e-01 -8.23348939e-01] [ 1.67707041e-01 -1.73212722e-01 -4.59282368e-01 ... -3.09660763e-01 1.66562259e+00 -3.70046049e-01]] ... [[-3.46090525e-01 7.34179258e-01 1.40852571e+00 ... 2.35494703e-01 6.36791646e-01 7.73258686e-01] [ 4.63257462e-01 -1.34408462e+00 -5.16688585e-01 ... 1.41838169e+00 2.58464932e-01 3.16423833e-01] [ 3.82128716e-01 1.20089674e+00 -5.57751060e-01 ... -1.71541944e-01 -3.35960463e-02 3.64257425e-01] ... [ 4.71682027e-02 3.34019959e-01 -5.44501722e-01 ... -9.75887775e-01 -4.85758901e-01 4.52324957e-01] [ 1.13544977e+00 2.07807049e-02 4.20839727e-01 ... -2.56504208e-01 1.27728558e+00 -1.03151584e+00] [-2.12757923e-02 -8.92170727e-01 -3.39807928e-01 ... 5.06474614e-01 4.67309430e-02 -1.38169572e-01]] [[-3.72256070e-01 3.70046854e-01 -8.76928627e-01 ... 3.03052276e-01 1.52384901e+00 -7.38328248e-02] [ 8.51795077e-02 1.17294800e+00 -6.86565995e-01 ... 4.06542629e-01 1.04872513e+00 7.28125334e-01] [ 1.85606885e+00 6.71819329e-01 -1.04380774e+00 ... 2.61355817e-01 -3.64650935e-01 1.05786586e+00] ... [-1.31389701e+00 4.07526731e-01 2.16794699e-01 ... 1.08110571e+00 -5.73709846e-01 1.05236697e+00] [-8.55735600e-01 5.45152962e-01 -1.11624885e+00 ... -1.25593737e-01 1.02419806e+00 -3.38160962e-01] [-7.54044473e-01 1.19310059e-02 1.07687020e+00 ... 1.83719218e-01 -6.10329360e-02 1.28437921e-01]] [[-3.33991289e-01 -4.27629590e-01 4.48152684e-02 ... -1.03569007e+00 4.77965653e-01 -4.47331011e-01] [ 5.89069843e-01 -7.03567028e-01 1.43334642e-01 ... 1.51796937e+00 -9.53337967e-01 5.70415854e-01] [ 5.01153827e-01 -1.22750366e+00 -3.84748042e-01 ... -6.51919127e-01 -4.99241441e-01 -3.38577598e-01] ... [ 4.44120735e-01 -3.12145144e-01 -8.13750267e-01 ... 4.72610623e-01 -4.49009210e-01 1.23545063e+00] [ 2.96134293e-01 -9.55312192e-01 -2.77307928e-01 ... 8.06435525e-01 1.02125371e+00 3.75770688e-01] [ 6.73655808e-01 1.20424676e+00 -3.99607360e-01 ... -4.89983931e-02 8.03070605e-01 8.30083489e-01]]] [[[ 7.38667771e-02 -3.78540468e+00 2.35126185e+00 ... -3.82623577e+00 9.73714650e-01 1.36230552e+00] [ 6.24461882e-02 1.99917912e+00 -1.25164795e+00 ... 2.06328593e-02 -6.01735830e-01 -1.22989845e+00] [ 6.88219726e-01 1.61091411e+00 3.44284296e-01 ... 1.87765941e-01 -1.32168442e-01 1.52992737e+00] ... [ 1.24681860e-01 -8.39929998e-01 -6.46764100e-01 ... 1.82434216e-01 2.48856282e+00 8.22396040e-01] [-1.77038461e-02 1.76172316e+00 -7.84949303e-01 ... -1.18641400e+00 -2.76076406e-01 -4.43975002e-01] [ 1.47249138e+00 -8.06881309e-01 -5.93477070e-01 ... -2.96217489e+00 -1.19308650e+00 3.54117841e-01]] [[ 4.10192013e-01 -1.46481597e+00 1.40126324e+00 ... -1.98884940e+00 3.20102167e+00 -2.61518621e+00] [-1.83917701e+00 -8.86351585e-01 -1.57057539e-01 ... 1.75266421e+00 1.39475667e+00 8.72646928e-01] [-7.87443146e-02 -1.56314588e+00 -2.31789052e-01 ... -1.06456256e+00 -1.57491282e-01 -9.77598429e-01] ... [-3.72138500e+00 8.16991627e-01 9.74724889e-01 ... 5.07679105e-01 1.37554920e+00 -1.17761016e+00] [ 1.12431383e+00 2.10569000e+00 -8.57155502e-01 ... 1.39616609e+00 -1.54744339e+00 3.24191093e-01] [-5.67652702e-01 1.92674804e+00 -1.34181702e+00 ... -8.62866759e-01 -4.58032787e-01 2.22044826e-01]] [[ 5.59241295e-01 -2.74692595e-01 9.90056932e-01 ... 1.73996508e+00 -9.89373401e-02 -1.18437672e+00] [-1.56367850e+00 -6.09847546e-01 -3.02811917e-02 ... -5.36818624e-01 8.59979153e-01 -3.19803149e-01] [-1.47930002e+00 -8.80549192e-01 6.60009205e-01 ... -8.34803879e-01 -6.57685280e-01 -2.74381757e-01] ... [-3.40532482e-01 8.36213410e-01 -9.62965786e-01 ... -2.65488088e-01 1.82883847e+00 -8.03807080e-01] [ 5.45783162e-01 2.17715859e-01 3.33293200e-01 ... -8.59540761e-01 -5.20618618e-01 -9.34877574e-01] [-1.74135375e+00 2.21645549e-01 1.09740803e-02 ... 1.98677981e+00 -1.55726969e+00 -1.82425231e-01]] ... [[ 1.24517274e+00 2.95002878e-01 -1.18448865e+00 ... 1.12119175e-01 1.40604997e+00 -2.40100908e+00] [-3.91338497e-01 7.81192362e-01 8.85948539e-01 ... -1.20616436e-01 1.64446163e+00 3.65724176e-01] [-1.31239498e+00 -1.83535770e-01 -1.44864345e+00 ... -8.07466581e-02 2.81826295e-02 -9.09299314e-01] ... [ 6.14156365e-01 4.34584945e-01 -1.42028630e+00 ... -9.94960845e-01 -1.71975935e+00 -1.15644085e+00] [ 2.44278598e+00 -2.45225644e+00 -1.38029420e+00 ... -2.24066949e+00 4.93979305e-01 3.67888784e+00] [ 2.83257395e-01 1.71264267e+00 8.76824141e-01 ... -1.45582509e+00 -8.74763966e-01 -1.41502440e+00]] [[-4.75270450e-01 6.61137933e-03 -2.67824292e-01 ... -8.33812892e-01 -3.73825401e-01 1.14339912e+00] [ 2.29871243e-01 -1.90978181e+00 -6.42032623e-01 ... 1.32213843e+00 8.67711246e-01 5.46221361e-02] [-1.90152049e+00 -3.40981215e-01 1.54974461e+00 ... -1.51964736e+00 -1.40365648e+00 1.50445849e-01] ... [-2.02113632e-02 3.47648907e+00 -4.07749563e-01 ... 7.72713363e-01 -9.17261660e-01 5.67405641e-01] [ 1.85964668e+00 4.19973344e-01 2.44520259e+00 ... 1.52590060e+00 2.07451272e+00 -1.50023088e-01] [-1.43813181e+00 2.74154614e-03 9.11589384e-01 ... 3.17010462e-01 8.86868834e-01 9.89240050e-01]] [[ 4.58166480e-01 3.31522912e-01 7.90264726e-01 ... 2.84947246e-01 -1.39718091e+00 -3.47893953e+00] [ 1.14076865e+00 -2.95044684e+00 -3.67727429e-01 ... -4.93087798e-01 1.21237803e+00 1.49474883e+00] [ 1.10877025e+00 1.86543274e+00 3.91834021e+00 ... -7.19183385e-01 -2.10126281e+00 9.42379236e-03] ... [-1.26422906e+00 1.33350343e-01 6.18971288e-01 ... -8.29182863e-01 1.98954499e+00 -4.51458871e-01] [ 1.61643171e+00 7.58022428e-01 -8.49199414e-01 ... 2.42158756e-01 -9.46802020e-01 1.91681635e+00] [-1.90198040e+00 -1.72090840e+00 2.07577443e+00 ... 7.57863939e-01 1.82295668e+00 -1.07718778e+00]]] [[[-3.64643991e-01 -3.48498553e-01 -4.37014885e-02 ... 2.12076202e-01 -8.57694671e-02 1.02934120e-02] [-1.83156118e-01 -1.44008383e-01 2.24963322e-01 ... -1.67375132e-01 -6.31797433e-01 6.31488115e-02] [-3.01728159e-01 4.01858151e-01 -3.34221303e-01 ... 5.21519184e-01 -4.49617237e-01 -4.18866068e-01] ... [ 5.50546825e-01 -1.06571004e-01 8.02426413e-02 ... 1.46258295e-01 8.95638838e-02 -1.80528536e-02] [ 2.14191109e-01 4.07340899e-02 -7.66746700e-02 ... -6.02135435e-02 5.03740087e-02 -9.89275612e-03] [-6.37088194e-02 -1.20462896e-02 -3.56318116e-01 ... -1.63120672e-01 2.54889041e-01 1.44824505e-01]] [[-4.56066042e-01 2.58632571e-01 -5.67334965e-02 ... -8.32205191e-02 -1.57542810e-01 -5.03475428e-01] [-1.78348944e-01 1.29091725e-01 -2.90752083e-01 ... -7.11554810e-02 -3.72548550e-02 -1.83987409e-01] [-2.45101511e-01 2.46100336e-01 -2.96291590e-01 ... 1.02752671e-01 1.45564899e-01 -3.04401722e-02] ... [ 1.29629001e-01 -4.02656376e-01 -3.14451158e-01 ... -2.41355643e-01 -2.20060870e-02 9.11719278e-02] [-1.36040688e-01 2.12738991e-01 1.75022751e-01 ... -1.10519879e-01 -1.18752889e-01 1.78531613e-02] [ 3.40959221e-01 -4.88458462e-02 -1.60826057e-01 ... -2.00414062e-01 9.59108993e-02 1.49472915e-02]] [[ 1.07668750e-01 2.46462196e-01 4.19528298e-02 ... 2.62420714e-01 4.28313494e-01 -2.47992083e-01] [ 4.16534662e-01 1.16441220e-01 -2.86390632e-01 ... 1.56489000e-01 2.77435094e-01 3.94505143e-01] [-2.80509979e-01 -1.12479463e-01 -3.91544342e-01 ... -1.55270169e-03 -1.34339809e-01 9.73030180e-02] ... [ 1.30779088e-01 2.28031963e-01 -9.92614627e-02 ... 4.42078859e-01 2.89402574e-01 -2.15974569e-01] [ 1.57284871e-01 3.05189967e-01 -1.07919894e-01 ... 6.24705032e-02 1.06039867e-01 3.22415009e-02] [-2.09296659e-01 1.95220098e-01 -8.78762975e-02 ... 1.29068509e-01 -6.83088154e-02 -8.04996770e-03]] ... [[-7.97361732e-02 4.06934440e-01 3.62395542e-04 ... 3.42504501e-01 1.55638993e-01 -6.70248926e-01] [-1.04961328e-01 -3.32975611e-02 -2.39142135e-01 ... -2.30637550e-01 3.21117342e-01 -1.86572850e-01] [-6.11914247e-02 3.11395079e-01 2.22915739e-01 ... -4.79644328e-01 5.89075029e-01 3.93145025e-01] ... [-9.20061320e-02 -4.06235829e-02 -2.10846514e-01 ... -1.42424619e-02 -1.13581493e-02 3.85628194e-01] [ 2.69928247e-01 -3.38067204e-01 -2.99254864e-01 ... 3.78097743e-01 -4.35880631e-01 -1.95051104e-01] [ 3.12850833e-01 -1.79296941e-01 -2.75517493e-01 ... 4.11675610e-02 -8.63048062e-02 3.30198556e-01]] [[ 3.50763410e-01 4.08572378e-03 -2.69470036e-01 ... 8.39042887e-02 3.89782861e-02 -4.32396293e-01] [ 6.78729191e-02 -9.74726677e-02 1.06401883e-01 ... -2.66070981e-02 3.23815882e-01 -2.08175898e-01] [-1.70474768e-01 -1.51786385e-02 -6.03234112e-01 ... -2.01220289e-01 2.33172905e-02 -8.74664634e-02] ... [ 1.60383195e-01 -4.53750342e-01 8.06447789e-02 ... 3.30695957e-02 -2.18700349e-01 -3.03977332e-03] [ 3.72179627e-01 4.94018197e-02 -2.47853786e-01 ... 1.28873870e-01 1.83659747e-01 -1.99205965e-01] [-4.37736124e-01 8.42196345e-02 2.14708686e-01 ... -1.99094221e-01 -2.53618956e-01 2.85574645e-01]] [[ 1.13269128e-01 -3.72192979e-01 2.57394820e-01 ... -6.00337191e-03 -2.93481767e-01 -5.97928837e-02] [-1.51601275e-02 1.97636038e-01 -3.69043231e-01 ... 3.66839886e-01 1.67545393e-01 -1.31223470e-01] [-9.60288718e-02 -4.72896785e-01 7.91135848e-01 ... 6.32942617e-02 -2.52400935e-01 -2.09798533e-02] ... [ 4.33698177e-01 3.43346566e-01 2.08121940e-01 ... -4.15568128e-02 1.55037548e-02 4.41676319e-01] [ 5.90181202e-02 -5.33368438e-03 -2.77949989e-01 ... 1.64565176e-01 4.40480858e-01 3.29044372e-01] [-1.51020531e-02 -5.00995815e-02 1.52486756e-01 ... 6.90708682e-02 1.22008575e-02 -1.63411796e-01]]] [[[ 1.10373926e+00 9.79677141e-01 -1.12478137e+00 ... -9.79699612e-01 4.08409739e+00 1.25958681e+00] [ 6.56252503e-01 -8.21145415e-01 2.37282544e-01 ... 2.96707898e-01 -8.83244634e-01 1.88343501e+00] [ 1.50566876e-01 1.23961854e+00 2.04792714e+00 ... -8.67284536e-01 7.82102704e-01 -2.11139560e-01] ... [-2.35349941e+00 3.61515045e-01 -3.89234924e+00 ... 1.77304089e-01 -1.44553185e+00 -2.62296104e+00] [ 8.76949787e-01 1.24452102e+00 -1.11968017e+00 ... -1.00806880e+00 1.43977022e+00 -1.04168318e-01] [-2.86571264e+00 1.07664442e+00 7.29420364e-01 ... -1.79161176e-01 -7.35759914e-01 3.58386636e-01]] [[ 2.17740417e+00 -1.19487382e-01 -1.43571779e-01 ... -1.81628597e+00 -7.79249966e-01 -3.60616326e-01] [-9.04417187e-02 -1.11429095e+00 1.57951045e+00 ... -5.23912311e-01 -1.99219728e+00 -1.47969916e-01] [-7.80837178e-01 1.40301526e+00 1.54424950e-01 ... 3.01510239e+00 -2.55317658e-01 1.75047427e-01] ... [-9.82016698e-02 -1.64976895e-01 -1.22691357e+00 ... 1.42511582e+00 -6.93480849e-01 3.25427055e+00] [ 1.96106529e+00 -4.11214605e-02 -1.19515955e+00 ... 9.86725867e-01 1.07909545e-01 -2.53576517e-01] [-2.21824288e+00 9.75629985e-01 -5.19744158e-01 ... -8.70716333e-01 1.48858547e+00 2.51820350e+00]] [[ 4.82924610e-01 -9.51093853e-01 -1.05365419e+00 ... 3.29816133e-01 3.01369834e+00 -5.17340183e-01] [ 6.34769127e-02 -3.51387024e-01 2.62802434e+00 ... -4.83717620e-01 -4.05844069e+00 -5.04842550e-02] [ 2.44765544e+00 -7.67144740e-01 1.42818129e+00 ... 1.39784396e+00 -2.69352269e+00 -1.26287770e+00] ... [-1.48964608e+00 1.50688326e+00 -2.21674013e+00 ... -3.46682024e+00 -1.08496118e+00 9.01950300e-01] [-1.10095119e+00 -4.04170752e-01 1.25337863e+00 ... 8.76638889e-01 2.50997305e+00 2.87789607e+00] [ 2.41898584e+00 -1.11733533e-01 1.00310969e+00 ... -2.81956887e+00 1.39005280e+00 1.90874004e+00]] ... [[-3.50763559e+00 1.76227903e+00 2.19305182e+00 ... -2.49898100e+00 -2.02326226e+00 -5.34035146e-01] [ 9.17777538e-01 -5.36070764e-01 -5.16534448e-01 ... -2.33700442e+00 -1.99143723e-01 -1.23119104e+00] [ 1.02685070e+00 1.53921485e-01 1.25805259e+00 ... -3.14192653e+00 -1.06728339e+00 -2.20396757e+00] ... [ 5.36851883e-01 5.25749885e-02 -3.85765386e+00 ... -1.43128201e-01 -1.03211296e+00 8.47332954e-01] [-8.97751987e-01 -6.55920133e-02 -2.38643718e+00 ... -7.84448326e-01 3.85570645e+00 -2.67778611e+00] [ 2.03788805e+00 -1.89078808e-01 -5.64538956e-01 ... -1.45180190e+00 1.95999563e+00 1.16710889e+00]] [[ 1.64448547e+00 1.52814046e-01 -3.24417734e+00 ... -2.23954630e+00 8.02585125e-01 -5.79961121e-01] [-1.13035524e+00 -1.62514985e+00 -2.07499992e-02 ... 1.98390305e+00 -4.64344651e-01 -1.53184235e+00] [ 1.89422834e+00 -1.76792181e+00 3.01787329e+00 ... -8.95982206e-01 2.46522740e-01 7.04462886e-01] ... [ 2.39477468e+00 1.51179075e+00 1.81051910e+00 ... -9.79819655e-01 1.15321207e+00 3.19662452e-01] [-7.94891655e-01 9.91588607e-02 1.76105142e+00 ... -6.12518907e-01 -6.91720188e-01 -1.50498128e+00] [-8.61907840e-01 -4.41338792e-02 -3.02502251e+00 ... 2.36880207e+00 -6.90935791e-01 -7.85242021e-01]] [[-3.22036147e-01 -1.99227154e+00 2.36690307e+00 ... 2.08237708e-01 6.51689693e-02 1.26080215e+00] [-7.97052979e-01 2.34565854e+00 -9.16564688e-02 ... 1.06713273e-01 -3.59313786e-01 -1.09757781e+00] [ 2.66262726e-03 -1.42720580e+00 3.65399808e-01 ... -7.82852888e-01 2.63837308e-01 1.26262105e+00] ... [ 2.51045227e+00 2.07369900e+00 4.75343883e-01 ... 1.81682515e+00 2.71573710e+00 -1.69066596e+00] [ 2.38499260e+00 -7.04146087e-01 -1.09249318e+00 ... 3.58690977e-01 8.38914514e-01 -1.43060207e+00] [-8.62927884e-02 -2.49341702e+00 2.04762101e+00 ... -1.03431523e+00 -2.66955853e+00 -1.05859053e+00]]] [[[ 8.15167874e-02 2.34864419e-03 2.59845573e-02 ... 1.02650791e-01 2.79215723e-02 1.11500323e-01] [ 7.92617872e-02 -1.67768523e-01 -3.56228352e-02 ... -8.09677839e-02 -2.03507870e-01 -4.92948778e-02] [ 5.03389835e-02 -1.47995062e-03 3.25043574e-02 ... -5.92390113e-02 6.36653379e-02 -7.02703372e-02] ... [ 9.67480019e-02 1.66331623e-02 1.55356750e-01 ... -1.10660288e-02 -2.78764386e-02 -3.39971967e-02] [ 1.50690996e-03 -4.29894775e-02 1.19164765e-01 ... -2.52000153e-01 1.19999394e-01 -2.78853606e-02] [-2.99409986e-01 -2.75539756e-02 2.09502075e-02 ... 5.12286201e-02 2.88928952e-02 1.65138058e-02]] [[ 3.93651463e-02 -1.27117440e-01 -2.00809743e-02 ... -4.83327024e-02 1.01598255e-01 -6.18033968e-02] [ 7.41760209e-02 -1.86650589e-01 3.85239497e-02 ... -4.58123125e-02 -7.37474710e-02 1.53543979e-01] [-5.38481623e-02 3.63273025e-02 -1.07445970e-01 ... -9.66791287e-02 1.57077923e-01 1.33337542e-01] ... [-5.06620593e-02 1.16239503e-01 -2.16897950e-02 ... -2.56065596e-02 -1.42427787e-01 2.14480475e-01] [ 3.35317887e-02 -3.87314968e-02 -6.78519681e-02 ... -1.27275065e-01 -2.93196887e-01 -1.30456626e-01] [ 7.18977768e-03 6.15807772e-02 1.77924961e-01 ... 1.80897163e-03 1.41804367e-01 -1.52083993e-01]] [[-2.19932348e-02 2.71430910e-02 -2.87878159e-02 ... -2.79435050e-02 -7.63673037e-02 6.06785417e-02] [-9.60460231e-02 -6.40958622e-02 9.21478719e-02 ... 1.67976931e-01 7.07347766e-02 -1.38207272e-01] [ 1.77882329e-01 -4.82474938e-02 -1.71010017e-01 ... 8.30558315e-02 4.64480668e-02 9.29675996e-02] ... [ 6.74893260e-02 1.85811028e-01 2.44925618e-02 ... 1.78841362e-03 5.77898659e-02 -4.41114232e-02] [ 2.65500933e-01 -2.57594381e-02 -9.23687294e-02 ... 1.95950463e-01 1.23088788e-02 -1.38053382e-02] [ 2.73075048e-02 -1.35423124e-01 -1.57774583e-01 ... 1.20315820e-01 1.07524013e-02 2.22373739e-01]] ... [[-1.13619030e-01 1.23216519e-02 1.56004811e-02 ... 4.99336682e-02 4.15468104e-02 7.72891790e-02] [ 1.18901648e-01 1.13336183e-02 -2.22737521e-01 ... 7.45849907e-02 -4.39212993e-02 1.41289473e-01] [ 2.30906978e-02 9.44628555e-04 -1.34272754e-01 ... -3.13614488e-01 7.09373653e-02 5.74404933e-03] ... [ 3.41386087e-02 1.06520616e-02 -3.41151915e-02 ... -1.35354251e-01 -1.05787382e-01 1.83555111e-01] [ 9.41721126e-02 -2.09150463e-03 -2.20161472e-02 ... -8.41495916e-02 -1.09522147e-02 -1.63020138e-02] [-5.36013432e-02 -2.83704381e-02 5.75895468e-03 ... 7.38243237e-02 9.56263114e-03 -8.28490779e-03]] [[-2.22724453e-02 -3.35512906e-02 6.15676865e-02 ... 3.48179825e-02 1.62286982e-01 8.52621123e-02] [ 1.53133810e-01 8.93517062e-02 -1.19044341e-01 ... -9.41671729e-02 -1.23798467e-01 -3.02572817e-01] [-6.20405301e-02 2.04870880e-01 7.61784092e-02 ... 6.91796020e-02 8.88173804e-02 -1.45750549e-02] ... [-1.19799562e-01 6.20601838e-03 -4.78739478e-02 ... 7.52529204e-02 -1.51891291e-01 2.88868770e-02] [-5.61989062e-02 -1.16652055e-02 1.13706067e-01 ... -5.32981269e-02 -1.28375292e-02 -5.92811219e-02] [ 1.42999038e-01 4.12635021e-02 1.18259564e-02 ... -9.26157832e-02 1.19650150e-02 3.48991640e-02]] [[ 3.47637497e-02 4.75458838e-02 1.33016825e-01 ... -5.72883114e-02 -9.69098583e-02 1.42942563e-01] [ 1.26980692e-01 -1.55091703e-01 -4.04507993e-03 ... -5.12719266e-02 -1.14313267e-01 5.62997125e-02] [ 7.29234070e-02 4.61747646e-02 -8.32808465e-02 ... 1.55355288e-02 -1.86081663e-01 -7.39680305e-02] ... [ 4.01273854e-02 -6.22452423e-02 2.31351435e-01 ... -3.46859545e-02 -8.57281387e-02 8.98180753e-02] [-2.06397027e-02 1.04848109e-01 2.19619080e-01 ... -1.20334122e-02 -1.19356420e-02 1.50391176e-01] [-2.92121321e-01 -8.61798450e-02 5.56793287e-02 ... 8.82008448e-02 1.44350007e-02 9.87436697e-02]]] [[[-3.74949276e-01 3.40930969e-01 -4.75310013e-02 ... 3.74951780e-01 9.24987078e-01 3.18471879e-01] [ 4.04092222e-01 6.00721650e-02 3.30006838e-01 ... -5.96353561e-02 8.31755996e-02 6.52780175e-01] [ 7.30214268e-02 5.87283671e-01 -2.53594816e-01 ... -4.39140081e-01 -2.05539718e-01 -1.01644218e+00] ... [-5.26194513e-01 3.83707374e-01 7.03232825e-01 ... 6.23871148e-01 5.57702780e-01 4.99897659e-01] [ 6.60184503e-01 4.55584496e-01 1.16280474e-01 ... 1.07913637e+00 -6.73116604e-03 4.34048384e-01] [-8.35364699e-01 8.33874106e-01 7.94707179e-01 ... 1.98685825e-01 -2.73349583e-01 5.68985522e-01]] [[ 4.05924380e-01 3.51028472e-01 5.18157184e-01 ... 7.79737294e-01 -6.85406983e-01 -1.08200753e+00] [-2.93544263e-01 -3.70104006e-03 8.05689245e-02 ... 3.40081573e-01 -2.60979563e-01 1.00246918e+00] [-3.67728233e-01 -3.35645676e-01 1.39970791e+00 ... 7.07562506e-01 1.70906559e-01 1.35402703e+00] ... [-2.62321085e-01 -8.19921494e-01 -8.54316771e-01 ... -1.66597694e-01 7.52189517e-01 -3.91041100e-01] [ 8.66425455e-01 2.19358042e-01 6.62845135e-01 ... 5.34436405e-01 2.19396263e-01 -1.03057064e-01] [-4.08832341e-01 4.00150657e-01 -1.83503479e-01 ... 1.69783890e-01 1.32455066e-01 4.48304474e-01]] [[ 9.77765471e-02 2.49283373e-01 -1.27463686e+00 ... -2.71902442e-01 -5.73685229e-01 -6.98379874e-01] [ 8.63293037e-02 -1.85175166e-01 1.00443669e-01 ... 4.38387692e-01 -5.79090357e-01 -1.50578821e+00] [ 7.90729880e-01 2.94813842e-01 2.05670536e-01 ... -6.40910789e-02 -7.63342738e-01 -1.23113334e+00] ... [-2.44954363e-01 -6.80777550e-01 -2.71296911e-02 ... -7.94987798e-01 -1.19561458e+00 8.61530662e-01] [ 5.08638501e-01 -2.23852694e-01 -5.09653747e-01 ... -2.50302225e-01 -3.59542429e-01 -9.55689549e-01] [-1.36439264e-01 -5.16460657e-01 3.79107147e-01 ... 3.11942607e-01 4.05716412e-02 -9.70807076e-02]] ... [[ 3.33269656e-01 2.07343847e-01 1.51989889e+00 ... 2.57999957e-01 -3.12532246e-01 7.25861862e-02] [ 1.90532841e-02 -4.95410204e-01 -4.33374941e-01 ... -1.74668700e-01 5.35847396e-02 -4.75116909e-01] [-8.12101305e-01 4.64751661e-01 -2.55028218e-01 ... 8.95951316e-02 -3.83780658e-01 -5.02458274e-01] ... [ 8.60443532e-01 2.58745737e-02 1.36132944e+00 ... -6.08508885e-01 1.22518903e-02 -4.24615204e-01] [-4.04889584e-02 5.55401966e-02 8.06988835e-01 ... 3.04518074e-01 7.08008051e-01 9.24007520e-02] [-8.15394744e-02 6.70959353e-01 -6.11808658e-01 ... 2.22056746e-01 -4.16755825e-01 -4.18097407e-01]] [[-4.67036545e-01 7.68941879e-01 -6.37129188e-01 ... 9.43395421e-02 8.85291845e-02 -1.46798480e+00] [ 8.58340859e-01 6.04043782e-01 -3.24861795e-01 ... 1.27027974e-01 -6.12044573e-01 -3.52123111e-01] [ 1.44864655e+00 -8.17241788e-01 -3.18053275e-01 ... -7.35483885e-01 -3.63211989e-01 -1.25476563e+00] ... [-1.70959458e-01 1.77395418e-01 -2.81958371e-01 ... -5.00114858e-01 1.94210425e-01 6.91542149e-01] [ 1.51803801e-02 -6.22966170e-01 -3.35926384e-01 ... -5.56179404e-01 -4.90025848e-01 7.06611201e-02] [ 3.07392385e-02 -2.33175568e-02 4.05135453e-01 ... -5.63636482e-01 6.00164711e-01 -5.35421312e-01]] [[ 1.88051276e-02 -1.11566223e-01 -3.84256005e-01 ... -4.74891156e-01 -4.93882418e-01 -7.09967732e-01] [-3.80076736e-01 6.15339100e-01 -8.69594574e-01 ... 5.80415785e-01 -4.00785863e-01 -6.65831327e-01] [-1.37654424e+00 2.13592678e-01 1.38258010e-01 ... 2.75074035e-01 -3.94892573e-01 -3.31855744e-01] ... [-8.83245289e-01 5.41105390e-01 -8.29254150e-01 ... 5.94899654e-01 3.52532387e-01 -6.95275426e-01] [ 7.79920220e-01 -4.10771370e-03 7.42497668e-02 ... 1.08984828e+00 -9.29733634e-01 -1.17739914e-02] [-2.45054886e-02 -2.68668473e-01 -1.06462729e+00 ... -9.99065280e-01 2.34793678e-01 -1.09895878e-01]]]] [[[[ 5.85264921e-01 9.53027681e-02 -1.63548458e+00 ... 6.96833909e-01 -3.81383717e-01 1.33309484e-01] [-1.50196105e-01 -1.50683498e+00 -3.92742574e-01 ... -6.18802667e-01 6.40951097e-01 -6.32690489e-01] [ 1.08229414e-01 -6.31444871e-01 3.38034481e-01 ... -6.66707873e-01 -4.97537367e-02 4.70455796e-01] ... [ 2.62373269e-01 6.66813226e-03 -3.39322776e-01 ... 3.20444316e-01 -2.15443410e-02 1.02244353e+00] [ 1.08882077e-02 -3.15381318e-01 2.96571523e-01 ... -2.94966668e-01 -6.63679898e-01 -4.60506320e-01] [ 2.76036188e-02 3.66429538e-01 3.08423974e-02 ... -1.35682869e+00 6.88817441e-01 -6.70295238e-01]] [[ 9.62293744e-01 -6.65854096e-01 1.69946939e-01 ... -3.13456595e-01 8.34207296e-01 -4.45910320e-02] [-9.63272095e-01 1.26134604e-01 6.07036471e-01 ... 6.26743257e-01 2.41282791e-01 -2.98325658e-01] [-5.80339074e-01 6.01709247e-01 7.43339539e-01 ... 5.83996534e-01 3.34076047e-01 1.39513314e-02] ... [-7.33500183e-01 7.30603635e-01 3.69419366e-01 ... 6.95864737e-01 -1.00989819e+00 1.40584648e-01] [-4.72984284e-01 4.89212632e-01 9.23683882e-01 ... 1.73642814e-01 7.20936060e-01 9.47928149e-03] [-9.48369503e-01 -3.48771662e-01 5.37860468e-02 ... -3.74742448e-01 -4.00849938e-01 -8.89818184e-03]] [[-5.76899827e-01 -1.32392907e+00 -5.21376990e-02 ... 7.06573308e-01 -1.66802669e+00 -6.39317989e-01] [ 8.55970234e-02 -1.09197639e-01 -1.06155407e+00 ... -2.71170378e-01 1.54705274e+00 1.69962382e+00] [-4.90484625e-01 5.43670356e-01 -9.81300697e-02 ... 7.09757805e-01 5.45748174e-01 -3.06338549e-01] ... [ 2.46409208e-01 -5.27404547e-01 -5.14114320e-01 ... 4.66217957e-02 3.71320844e-01 5.42177022e-01] [-1.08634986e-01 -4.43901032e-01 -1.00080049e+00 ... 2.92950183e-01 5.54848254e-01 -2.01788649e-01] [-3.18884403e-01 -5.14303803e-01 -2.70948768e-01 ... -5.12131155e-01 -1.68882847e+00 4.46760982e-01]] ... [[ 6.40265465e-01 6.83874488e-01 -9.44975391e-02 ... 3.90802234e-01 -1.13443661e+00 1.98170722e-01] [ 6.66415691e-01 1.79595029e+00 -2.58329093e-01 ... -4.10671979e-01 -1.69736147e-01 6.97261155e-01] [ 1.34564447e+00 2.21364036e-01 -2.39483938e-01 ... 3.43927354e-01 3.68302315e-01 1.30122495e+00] ... [ 5.21263629e-02 4.47526807e-03 -1.03419542e+00 ... -4.04032707e-01 9.96599317e-01 6.29119337e-01] [ 1.12109214e-01 -1.00548342e-01 5.01440108e-01 ... -1.14563465e+00 -6.41522050e-01 -4.98188436e-02] [ 3.99962544e-01 -1.10838437e+00 2.35642686e-01 ... 9.39124003e-02 -9.22097504e-01 -1.55049098e+00]] [[-3.51946324e-01 -1.23978496e+00 -8.75105143e-01 ... -1.79473743e-01 -2.36884236e-01 8.35429728e-02] [ 4.49876606e-01 8.31345096e-02 1.55708563e+00 ... 3.08034271e-01 -4.95275080e-01 4.61008877e-01] [ 9.00485694e-01 6.46554232e-02 -3.35503876e-01 ... 1.90225458e+00 -1.06734999e-01 -1.12930246e-01] ... [ 1.57057509e-01 4.72454756e-01 8.11329186e-01 ... 2.95875967e-01 7.96482801e-01 1.84509337e+00] [ 1.04729688e+00 4.84254122e-01 7.52931952e-01 ... 4.22866523e-01 4.93654132e-01 -7.53220856e-01] [ 1.34715199e+00 1.03026986e+00 -6.08518004e-01 ... 7.98999488e-01 -1.90648407e-01 2.13446066e-01]] [[ 6.37741148e-01 1.56222773e+00 -7.61922896e-02 ... -4.43509072e-01 6.82536960e-01 8.48380625e-01] [-1.71955419e+00 -2.41691649e-01 9.70854759e-01 ... -1.31847334e+00 4.43085462e-01 -4.46504056e-01] [ 9.82338428e-01 -4.17302072e-01 -1.74650382e-02 ... 1.04269087e+00 -4.18985076e-02 -5.62230386e-02] ... [-1.25151181e+00 6.66967332e-01 7.07376242e-01 ... -1.36580920e+00 -3.19465727e-01 3.69269013e-01] [-6.13238811e-01 -1.21546245e+00 -1.04668725e+00 ... -1.04448581e-02 9.76251289e-02 -5.71416080e-01] [-2.21256733e-01 3.63177627e-01 9.44120228e-01 ... -1.40401796e-01 -1.11561334e-02 -6.65318549e-01]]] [[[ 2.88627863e+00 1.20457661e+00 1.06497371e+00 ... -3.67529094e-01 -2.56440997e-01 2.24658108e+00] [-1.51470995e+00 8.30153406e-01 -5.91141403e-01 ... 3.81202221e-01 -1.53214443e+00 2.84735501e-01] [-6.38791561e-01 -1.19602099e-01 4.04916912e-01 ... -1.78890035e-01 -1.13445878e-01 -1.97149146e+00] ... [ 7.65056014e-01 -3.44218493e+00 -1.77233994e+00 ... -1.79023433e+00 6.73193574e-01 1.25385332e+00] [-2.10155869e+00 7.11325705e-01 1.98959529e+00 ... -2.29514450e-01 -3.40333009e+00 -2.44293630e-01] [ 5.95097244e-01 6.37990952e-01 3.91991645e-01 ... -6.45577073e-01 4.40476924e-01 2.97719908e+00]] [[ 3.92926145e+00 2.97114253e-02 4.98196661e-01 ... 3.73569995e-01 -4.65906292e-01 -3.99263650e-02] [-3.73355865e-01 1.41096199e+00 -2.18213815e-02 ... -5.08946180e-01 -1.50107294e-01 1.94222558e+00] [-1.42024171e+00 1.52351364e-01 2.92711091e+00 ... 5.47295332e-01 1.53558707e+00 3.42301130e+00] ... [ 2.13645673e+00 -4.06857193e-01 7.18729377e-01 ... 1.11425269e+00 -8.93642157e-02 1.45645058e+00] [-1.11104417e+00 1.19554842e+00 -3.78880173e-01 ... 9.46431905e-02 -2.96867549e-01 4.21326607e-01] [ 9.29713130e-01 -7.27664292e-01 5.45084178e-01 ... -3.00000727e-01 2.72935891e+00 -2.86955297e-01]] [[-5.73190153e-01 3.03544849e-01 -1.51263440e+00 ... -3.64484126e-03 1.51143566e-01 -6.60540760e-01] [ 4.26647484e-01 -1.48167241e+00 -1.06919825e+00 ... -8.38172972e-01 -8.26846778e-01 1.25934947e+00] [ 8.35998654e-02 -3.70687991e-01 2.14020848e-01 ... -1.36415982e+00 -2.52345490e+00 2.29791951e+00] ... [-1.40954208e+00 4.24949378e-02 -7.50640810e-01 ... -1.08480108e+00 -2.55845737e+00 5.56907296e-01] [-1.68174422e+00 -1.19849026e+00 -6.00689292e-01 ... -8.40320587e-01 3.02278757e+00 7.15129733e-01] [-6.79396212e-01 -6.91726685e-01 -1.81202874e-01 ... 2.37159908e-01 -9.66962755e-01 2.13140512e+00]] ... [[ 7.55259395e-02 2.84892946e-01 1.79055130e+00 ... 1.04406750e+00 2.69136250e-01 -1.10938919e+00] [-5.51880121e-01 -1.06691623e+00 -2.69970357e-01 ... 7.37005711e-01 -8.01716566e-01 2.78332710e+00] [-1.37628877e+00 1.60212743e+00 1.32894897e+00 ... 1.14980710e+00 7.72579968e-01 3.31008047e-01] ... [-2.30726063e-01 -8.48515153e-01 -5.71790814e-01 ... 8.52099657e-02 1.40388083e+00 -5.74066818e-01] [ 4.87921946e-02 -1.60296500e+00 -3.61723304e-01 ... 3.62120599e-01 -1.21483636e+00 -9.42042395e-02] [ 6.46186173e-01 1.09248936e+00 4.31415319e-01 ... -5.98815739e-01 -2.38989949e+00 -7.46117592e-01]] [[ 2.23494768e-01 3.87196690e-01 1.17473316e+00 ... 8.84125173e-01 9.85879540e-01 -7.97757924e-01] [-8.46684575e-01 -1.23378408e+00 2.65745580e-01 ... -2.39621431e-01 2.82793373e-01 -1.11672258e+00] [-3.33984399e+00 2.43665981e+00 -1.53826618e+00 ... -2.08435988e+00 -8.54467869e-01 3.46762314e-02] ... [ 7.30363950e-02 -1.65472257e+00 3.54925603e-01 ... -1.96403837e+00 -1.72374293e-01 1.41213953e+00] [ 1.92088723e+00 1.25325799e+00 -3.53540391e-01 ... -1.47511387e+00 -1.44598126e+00 -1.57161474e+00] [ 2.64165497e+00 1.38718462e+00 -1.16462684e+00 ... -1.37639415e+00 8.01481307e-01 -2.13304734e+00]] [[ 1.99851155e-01 -8.10691178e-01 1.14584875e+00 ... -1.34785175e+00 -1.96235144e+00 8.21461201e-01] [-6.79621339e-01 1.60401249e+00 1.21997714e+00 ... -7.89303064e-01 -1.06100571e+00 1.36529732e+00] [ 1.07858062e+00 7.78998017e-01 9.78710115e-01 ... 6.94357455e-01 3.23032618e-01 -6.73273802e-01] ... [-1.50643826e+00 -2.53762722e-01 6.39207363e-01 ... 5.23784518e-01 -2.47078240e-01 8.66184115e-01] [ 5.86683713e-02 2.01111412e+00 -7.55765796e-01 ... 7.94596136e-01 -3.24661779e+00 -1.13505030e+00] [-2.55196953e+00 -4.00725424e-01 5.49224257e-01 ... -5.51730335e-01 6.56325996e-01 7.09898397e-02]]] [[[ 5.22275269e-02 -8.43285546e-02 3.17648649e-01 ... 6.91476643e-01 -3.51135433e-01 -4.02556092e-01] [-3.04882556e-01 -2.44603813e-01 -2.47679546e-01 ... 3.28946650e-01 5.00365019e-01 -1.95792377e-01] [-3.63530368e-01 2.16780156e-01 4.11984295e-01 ... -1.85601145e-01 -1.08401440e-01 -9.48412195e-02] ... [ 1.03327394e-01 9.17709693e-02 -4.47649769e-02 ... 2.79931784e-01 -3.94841224e-01 2.62926277e-02] [-4.42708462e-01 1.67493895e-01 3.52229744e-01 ... -1.97985724e-01 -1.42729238e-01 3.95277828e-01] [-2.68623322e-01 -1.79023191e-01 -2.74486244e-01 ... -3.96214157e-01 -4.10470426e-01 9.88259912e-02]] [[-6.22689947e-02 -2.03621328e-01 1.60196558e-01 ... 2.60470927e-01 4.16817129e-01 1.60100326e-01] [-9.89218131e-02 2.12399304e-01 3.30670148e-01 ... -7.33255967e-02 1.90826148e-01 2.64383882e-01] [-1.97846413e-01 -9.28846076e-02 -3.53362203e-01 ... 2.13807464e-01 -4.50745970e-01 3.00239176e-01] ... [-2.57210936e-02 2.62541294e-01 6.46844730e-02 ... -1.71902478e-01 -5.89205930e-03 1.09311976e-01] [ 6.43326864e-02 1.45319879e-01 4.92164046e-02 ... 2.27035861e-02 -3.07069004e-01 2.13082403e-01] [-1.09652236e-01 4.12508190e-01 -1.02265909e-01 ... -4.50742804e-02 -3.87265921e-01 3.61046568e-02]] [[ 5.38829565e-02 -7.33580291e-01 -2.36814499e-01 ... -2.20138624e-01 3.86639327e-01 -3.10218155e-01] [ 4.78746951e-01 2.07726792e-01 2.51085252e-01 ... 2.44369775e-01 -3.40959758e-01 -3.51996690e-01] [ 2.64947653e-01 1.29685760e-01 1.57158598e-01 ... 1.06830999e-01 8.21409225e-02 3.39864254e-01] ... [-2.83249944e-01 4.32639807e-01 3.78819168e-01 ... -2.38019675e-02 -5.83304245e-05 -2.31511910e-02] [ 8.46515372e-02 -1.56572446e-01 -1.32562071e-01 ... 1.57982875e-02 -1.80003673e-01 1.52048334e-01] [-2.17485934e-01 3.87515649e-02 1.82321265e-01 ... 7.42793679e-02 1.20347261e-01 -1.01834610e-01]] ... [[-1.87841132e-01 2.03835994e-01 -6.19857982e-02 ... -3.83804403e-02 2.17837304e-01 -7.44748592e-01] [ 2.47885555e-01 3.64905149e-02 4.58599664e-02 ... -1.20248944e-01 -4.56241995e-01 7.28332400e-01] [ 1.20947152e-01 2.27247030e-01 -1.22278422e-01 ... -1.64983258e-01 2.70873874e-01 4.21746701e-01] ... [-2.82769632e-02 2.74900764e-01 1.25787944e-01 ... 2.78294623e-01 3.80583793e-01 -1.48581907e-01] [ 2.11015776e-01 -3.24750468e-02 2.77321693e-02 ... -9.36103240e-02 1.72032937e-01 2.33593434e-01] [ 1.78005368e-01 3.08300853e-02 -2.27573380e-01 ... -1.63185835e-01 -1.99301243e-01 1.41817881e-02]] [[-9.81629342e-02 1.84106335e-01 -2.11083770e-01 ... -3.46362293e-01 1.52169093e-01 1.22455716e-01] [-8.46653730e-02 4.12694030e-02 -2.10655630e-02 ... -9.36767757e-02 2.41440579e-01 9.57395658e-02] [-5.53612038e-02 -2.78392494e-01 4.72252756e-01 ... -2.89710045e-01 -2.57151395e-01 9.65555906e-02] ... [ 2.66864508e-01 -5.23348190e-02 -1.04628494e-02 ... 2.03654125e-01 2.44647250e-01 3.74826193e-02] [ 5.97650521e-02 -1.16575785e-01 3.40290852e-02 ... -1.24446526e-01 3.62196892e-01 -5.74613847e-02] [ 2.15314664e-02 -5.78109741e-01 -1.40460089e-01 ... -3.45660418e-01 -5.72030663e-01 7.09027946e-02]] [[ 3.45818698e-01 1.57288060e-01 1.04677893e-01 ... -4.13188599e-02 2.22467870e-01 -1.70785487e-01] [-4.98546101e-02 -1.32292390e-01 -3.39466244e-01 ... 1.97126180e-01 -9.53900442e-02 2.21385762e-01] [-1.74879491e-01 -6.53768182e-02 -3.54750365e-01 ... 2.40911439e-01 3.63882095e-01 -7.50924885e-01] ... [-1.86677843e-01 8.46037939e-02 -1.84853315e-01 ... 1.30161464e-01 -1.28872812e-01 9.51208547e-02] [-2.18512341e-01 -1.15673274e-01 -3.49305809e-01 ... 1.19567558e-01 -2.27358773e-01 -2.07415611e-01] [-2.47050852e-01 -4.47313815e-01 -6.83887750e-02 ... 5.62740266e-01 -3.35774809e-01 -9.27595645e-02]]] [[[ 8.61556470e-01 2.53962874e-01 5.48189700e-01 ... -1.76108730e+00 -2.09459281e+00 -2.73873389e-01] [-1.53767657e+00 1.18983662e+00 6.84584260e-01 ... -9.59500015e-01 -9.98835027e-01 -2.01248813e+00] [-7.58242309e-01 1.22250473e+00 1.03542936e+00 ... -9.27902162e-01 3.45555156e-01 4.68309075e-01] ... [-1.80588555e+00 -2.03729868e+00 -6.94635391e-01 ... 3.40440691e-01 6.26823604e-02 5.98460376e-01] [-7.36849308e-01 -3.52963781e+00 -7.21317410e-01 ... -2.23911190e+00 6.08238161e-01 -3.04679561e+00] [ 1.72332215e+00 -2.25384045e+00 -1.44575596e+00 ... -1.13600624e+00 1.98000169e+00 -9.32660639e-01]] [[ 1.09654272e+00 -3.04632998e+00 -9.78717566e-01 ... 1.27459967e+00 5.01671806e-02 1.09367050e-01] [-5.96598625e-01 6.47399187e-01 -2.70434093e+00 ... 3.73694211e-01 -3.08987290e-01 4.18992430e-01] [ 1.56024063e+00 1.43584573e+00 3.95558524e+00 ... 6.97562575e-01 -5.66246688e-01 4.57123339e-01] ... [ 3.38668108e+00 7.23706007e-01 -1.47605881e-01 ... 1.64999747e+00 1.50432515e+00 -4.54645967e+00] [-2.55786920e+00 5.58858752e-01 -5.37179232e-01 ... -1.02493858e+00 -1.93862832e+00 -2.23770881e+00] [-1.32959950e+00 1.81471753e+00 1.40772498e+00 ... 6.19584739e-01 3.79859662e+00 -1.25281584e+00]] [[-1.61525652e-01 -1.15615678e+00 -8.65303576e-02 ... 9.95380759e-01 -5.37240207e-01 -9.70975041e-01] [ 1.11478078e+00 -7.99226463e-01 -3.32915831e+00 ... 1.32699743e-01 9.03220892e-01 -1.27463317e+00] [-2.98344254e+00 -1.26266301e+00 -1.60946637e-01 ... 9.74906266e-01 -2.09651038e-01 3.52983773e-01] ... [-5.22446275e-01 9.89598215e-01 8.80802214e-01 ... -1.45954883e+00 -8.05498138e-02 1.05578208e+00] [-5.84711194e-01 -8.83214891e-01 1.38153565e+00 ... 7.06394315e-01 9.15977597e-01 8.11117962e-02] [-2.46233821e+00 -2.66429257e+00 2.05384231e+00 ... -1.01608217e+00 2.02344799e+00 -5.30085921e-01]] ... [[ 1.11916035e-01 1.64211595e+00 6.33331165e-02 ... 7.30725288e-01 -1.54983079e+00 1.20847428e+00] [-2.36163691e-01 1.69821894e+00 -5.09125553e-02 ... -6.02893829e-01 -1.18587387e+00 9.56475377e-01] [-8.34903359e-01 -3.24530303e-02 -9.78370547e-01 ... 7.41780281e-01 -8.89106274e-01 -1.91566765e-01] ... [-6.11955747e-02 -4.18744296e-01 6.05160415e-01 ... -1.35150111e+00 -8.27550948e-01 -1.87279892e+00] [-5.83819568e-01 -1.48414898e+00 -9.90878701e-01 ... -2.90686727e-01 -5.22250593e-01 -1.93847525e+00] [ 6.83436573e-01 5.83352327e-01 2.93387562e-01 ... 1.14600830e-01 -6.21622145e-01 2.21481252e+00]] [[ 9.41174805e-01 1.67408168e+00 -7.38909841e-01 ... -7.21791089e-01 1.61513579e+00 3.83312523e-01] [ 5.81732988e-01 6.85623586e-01 -1.93896401e+00 ... -7.58036733e-01 3.29684556e-01 -2.98562479e+00] [-1.05200753e-01 -5.08428097e-01 -1.19752467e+00 ... -9.70548868e-01 -2.87580466e+00 3.41104794e+00] ... [-1.55403829e+00 -1.89458914e-02 -1.79593384e+00 ... 6.11720145e-01 1.36561191e+00 -1.10714245e+00] [-4.81831223e-01 -6.73331439e-01 -8.11093226e-02 ... -9.48609114e-01 3.12425399e+00 -1.17435241e+00] [ 1.59155226e+00 -1.39530170e+00 -5.82144797e-01 ... 1.00395095e+00 9.97609258e-01 8.20392668e-01]] [[-2.28435087e+00 -1.08120668e+00 2.84741831e+00 ... 1.72090614e+00 -2.36548185e+00 -2.63965279e-01] [-2.90812522e-01 -1.71314311e+00 8.82805884e-01 ... 1.48990226e+00 -3.07335901e+00 -3.44211006e+00] [ 1.89176917e+00 1.37054861e-01 -5.57435751e-01 ... -3.27727497e-01 2.79536247e+00 1.54540515e+00] ... [ 1.29369557e+00 -9.38827455e-01 3.02615905e+00 ... 1.57392347e+00 -7.11050093e-01 1.25928128e+00] [ 1.37501612e-01 -9.32922512e-02 -9.70463455e-01 ... -6.48228288e-01 -1.05978990e+00 -4.55330372e-01] [ 7.03273892e-01 1.74166763e+00 4.99235213e-01 ... -1.09722352e+00 2.74732649e-01 5.80790937e-01]]] [[[ 1.06634624e-01 -9.33454111e-02 1.20901518e-01 ... 9.36609805e-02 -4.19285521e-02 1.10683534e-02] [ 1.51666120e-01 1.19671822e-02 6.73556030e-02 ... -7.57535920e-02 -3.78632694e-02 1.21490605e-01] [-9.59543064e-02 6.27004579e-02 -4.11978625e-02 ... 8.95903781e-02 1.30833030e-01 7.21843988e-02] ... [ 1.52936742e-01 -2.72383261e-02 9.40691773e-03 ... -1.05701178e-01 5.76272793e-02 6.59083501e-02] [-8.24590400e-02 -6.16999343e-02 3.83082847e-03 ... -7.15286061e-02 -1.05154432e-01 2.93446779e-02] [ 5.10461302e-03 6.81120381e-02 1.72015391e-02 ... 8.18524510e-02 -1.32888794e-01 2.65759788e-02]] [[-1.10888362e-01 -3.31758112e-02 1.85703650e-01 ... -2.29276314e-01 1.27565533e-01 -8.29385146e-02] [ 9.33543816e-02 4.83148918e-02 1.34798307e-02 ... 4.68604043e-02 -6.32684007e-02 5.74455261e-02] [ 1.83425546e-02 -2.25764997e-02 1.68369085e-01 ... 1.26502469e-01 1.75091922e-01 1.56914860e-01] ... [-2.19554943e-03 -3.76308411e-02 3.50298807e-02 ... 8.81096125e-02 2.87000705e-02 -1.74010359e-02] [ 9.02313665e-02 1.31663820e-02 1.77781358e-02 ... 1.92434132e-01 1.08412482e-01 1.85604423e-01] [ 2.71132112e-01 -1.25546813e-01 -4.44584191e-02 ... 8.33506808e-02 1.14959233e-01 -4.39064056e-02]] [[ 1.34044752e-01 6.10192046e-02 -1.79314628e-01 ... -3.44776213e-02 -7.74046183e-02 -2.18825996e-01] [-5.18025085e-03 3.77812311e-02 6.79116771e-02 ... -9.39071085e-03 5.65424897e-02 3.83826554e-01] [-1.70811012e-01 -2.31518477e-01 4.03733030e-02 ... -6.10613339e-02 6.51502833e-02 -2.76629418e-01] ... [ 6.02685995e-02 -7.17154518e-02 5.74485473e-02 ... 8.72057825e-02 4.93839793e-02 -7.35671353e-03] [ 3.96245643e-02 -9.73417014e-02 -2.26971164e-01 ... -1.47202983e-01 -5.30933924e-02 1.06159970e-01] [ 2.58796252e-02 -1.18283398e-01 -1.02132298e-01 ... -2.99378540e-02 6.57106861e-02 2.90367613e-03]] ... [[ 3.96620221e-02 -6.50058985e-02 -3.52418363e-01 ... -1.02333717e-01 -3.52758676e-01 1.04071096e-01] [ 1.17407572e-02 -3.95104252e-02 -6.47515804e-02 ... -2.14732941e-02 -1.61583528e-01 6.35955557e-02] [ 1.28487289e-01 3.19236778e-02 8.58111084e-02 ... -8.41837823e-02 8.33270773e-02 1.54843871e-02] ... [ 2.26747349e-01 -8.59395638e-02 3.32528502e-02 ... 1.85619742e-01 -5.25457896e-02 -1.49518028e-01] [ 1.01800092e-01 9.11171883e-02 1.83640253e-02 ... -6.56480342e-03 -3.72067472e-04 -6.38704142e-03] [-2.16456756e-01 6.33996911e-03 7.61603564e-02 ... -1.78545378e-02 1.41577229e-01 -1.44901231e-01]] [[-1.44283906e-01 1.50874645e-01 7.36048073e-02 ... -6.48515299e-02 9.22761187e-02 1.72259420e-01] [ 4.47104871e-02 -2.95725721e-03 -9.13823172e-02 ... 8.00810382e-03 8.43815729e-02 2.15752438e-01] [-5.38066849e-02 -1.13384776e-01 -9.88877639e-02 ... 5.15325852e-02 -4.41113710e-02 -2.38180719e-02] ... [ 8.47618282e-02 6.47647446e-03 -6.65775388e-02 ... 8.36641043e-02 9.42344964e-02 -6.31571636e-02] [ 3.68110053e-02 8.38889107e-02 4.41114530e-02 ... 5.25944829e-02 6.09960705e-02 -1.88506246e-01] [-9.13412571e-02 -2.42246822e-01 -2.52618850e-03 ... 1.54802561e-01 -1.44619063e-01 4.65857722e-02]] [[-5.92201389e-03 -2.64517348e-02 2.83824420e-03 ... -9.97780543e-03 -8.24433658e-03 -1.03936493e-01] [-1.04546649e-02 2.93625481e-02 1.01102129e-01 ... 1.58437267e-01 5.82828857e-02 6.23578951e-02] [-1.07982054e-01 -5.72998822e-02 3.54101270e-01 ... -2.10255664e-02 2.79608339e-01 -1.77711070e-01] ... [-1.28820091e-01 2.04918049e-02 -2.04605274e-02 ... 2.50375327e-02 5.17254770e-02 -3.44369008e-04] [-3.27881868e-03 -1.34234965e-01 1.06953405e-01 ... -6.87781498e-02 -6.40772283e-02 2.24543452e-01] [-6.97420910e-02 -1.17829911e-01 1.15366459e-01 ... 7.26021901e-02 1.09832272e-01 -2.19390867e-03]]] [[[ 1.00620151e+00 3.26634258e-01 -1.51479557e-01 ... 1.04108930e+00 -2.17572227e-01 -5.40534794e-01] [-1.10678434e+00 1.83603525e-01 6.12676084e-01 ... -4.01420653e-01 9.97522354e-01 -4.74322617e-01] [ 2.60281175e-01 -6.08453095e-01 -3.71584922e-01 ... -2.59311646e-01 -6.40251860e-02 8.94156247e-02] ... [-8.42835009e-02 1.92785278e-01 1.12541795e-01 ... 1.38550615e+00 -2.58366555e-01 8.06671605e-02] [-1.81438291e+00 -5.94301522e-01 1.39579847e-01 ... -2.79619783e-01 -6.64127469e-02 5.63338041e-01] [-6.67921901e-02 6.61426187e-01 -4.90286499e-01 ... 1.37954623e-01 -4.55338135e-02 5.31649351e-01]] [[-8.85549784e-01 4.92294692e-03 -4.50951040e-01 ... 5.70996940e-01 -7.97409594e-01 -2.37184837e-01] [ 8.43872905e-01 7.51654148e-01 -1.92033693e-01 ... 4.95138228e-01 3.55227202e-01 1.06440395e-01] [-1.07655108e+00 2.44790569e-01 5.11944771e-01 ... -2.94978440e-01 -5.41079082e-02 -4.05218393e-01] ... [-1.44565642e-01 2.83886403e-01 5.36079586e-01 ... 1.12035051e-02 1.03484738e+00 -4.52681817e-02] [ 5.40983498e-01 4.78618950e-01 -1.76346395e-02 ... 6.99608102e-02 -1.27687082e-01 -5.42609096e-02] [-8.74872208e-02 1.54438749e-01 -1.18613076e+00 ... -1.19398248e+00 -8.74657705e-02 3.30388367e-01]] [[-4.59489703e-01 -7.36522138e-01 4.85642701e-02 ... -7.84754217e-01 -3.35881919e-01 4.89322662e-01] [-1.81237206e-01 -5.79538465e-01 5.56378961e-01 ... -1.99599549e-01 -8.38361204e-01 7.89503038e-01] [-8.95061672e-01 4.01794314e-01 -1.08781368e-01 ... -1.15957034e+00 1.42568439e-01 9.90955085e-02] ... [-8.06917608e-01 -4.90672678e-01 4.41637456e-01 ... 7.33593479e-02 -1.86292425e-01 7.40284920e-02] [ 5.57211041e-01 -5.63851655e-01 -1.14355481e+00 ... -1.24307625e-01 -6.17502570e-01 -5.25313437e-01] [-8.70591626e-02 -6.38762832e-01 -4.69773598e-02 ... -3.90713960e-01 1.22462943e-01 1.67980269e-01]] ... [[-5.04248083e-01 3.72590095e-01 7.00669229e-01 ... 2.73841381e-01 -6.69372499e-01 -4.91665363e-01] [-7.01110482e-01 -9.08649385e-01 -3.86098504e-01 ... -3.67179364e-01 9.96331275e-01 -7.64158368e-01] [-1.98505878e-01 -3.71443659e-01 -1.23058982e-01 ... 1.92550689e-01 -3.13241243e-01 3.18110436e-01] ... [-6.44979835e-01 -7.45717645e-01 -3.61518741e-01 ... 9.57747042e-01 -2.66146481e-01 -1.02365696e+00] [ 1.08503866e+00 3.15584958e-01 9.22334611e-01 ... -6.51692972e-02 -4.39639777e-01 -4.25746739e-01] [-1.27320528e-01 -3.24208313e-03 -8.27626824e-01 ... -3.17236781e-01 -4.95051295e-01 -1.30261421e+00]] [[-6.09339535e-01 7.50713050e-01 4.69971389e-01 ... 1.77416354e-01 -4.31270413e-02 5.57754993e-01] [ 9.88409296e-02 8.60757411e-01 -3.90615948e-02 ... -6.80524766e-01 2.62346685e-01 8.05848658e-01] [-2.74416566e-01 1.21812217e-01 9.64426398e-02 ... -7.01870739e-01 -8.25325072e-01 -8.12804699e-02] ... [ 9.47140276e-01 -9.21081841e-01 6.44733191e-01 ... 8.13045323e-01 4.22023535e-01 4.53110993e-01] [-6.15565218e-02 4.00735766e-01 3.87465239e-01 ... 5.76904155e-02 8.10497999e-01 -4.25977409e-01] [ 8.36311579e-02 -7.73237228e-01 1.94600016e-01 ... -5.86229146e-01 -2.24130809e-01 3.07863474e-01]] [[-1.13350540e-01 1.61468804e-01 4.98405360e-02 ... -4.62767839e-01 -6.79004073e-01 2.56883919e-01] [-4.59665731e-02 -9.13846433e-01 -8.48991811e-01 ... 8.59537959e-01 -3.03369164e-01 -3.72437865e-01] [-3.06713521e-01 3.55430484e-01 -1.08350992e+00 ... -5.46084523e-01 -1.06691313e+00 5.26034236e-01] ... [-6.66284263e-02 -4.37523931e-01 4.81427670e-01 ... -1.62090257e-01 -5.79045713e-01 -2.22281143e-01] [-1.01324654e+00 -6.56700805e-02 1.22812577e-01 ... -2.99904048e-01 -4.28146392e-01 1.73785836e-01] [ 6.45881072e-02 -3.05070251e-01 1.41531646e-01 ... 5.85856020e-01 6.74748719e-01 -4.05901343e-01]]]] [[[[ 9.14591551e-01 8.60337377e-01 -8.50565016e-01 ... 6.47793293e-01 -9.24926460e-01 -5.64092457e-01] [-1.70958355e-01 2.53724635e-01 -2.75524165e-02 ... 1.44071773e-01 2.34288439e-01 8.22374046e-01] [-9.33302402e-01 -1.28272384e-01 -2.59442955e-01 ... -8.99905741e-01 8.57265055e-01 2.52691898e-02] ... [ 8.01408350e-01 -2.17454478e-01 1.46398902e-01 ... -1.52973580e+00 1.61394805e-01 3.52006517e-02] [-4.81910110e-01 -5.44603944e-01 -1.24682570e+00 ... 9.93860483e-01 -5.43681905e-02 4.87554729e-01] [-2.80120168e-02 -3.82463604e-01 -9.95844722e-01 ... -2.14321390e-01 1.01008511e+00 5.08233368e-01]] [[-5.20602278e-02 4.59590197e-01 -1.38953790e-01 ... 8.10557008e-01 -2.93522865e-01 -2.49875829e-01] [-8.50259244e-01 -1.67263821e-01 1.21214676e+00 ... -1.55978847e+00 -1.00119662e+00 -3.92915785e-01] [-9.69024539e-01 1.07190871e+00 -2.97899485e-01 ... 2.69081712e-01 -4.28309292e-01 -8.13587546e-01] ... [-5.33030331e-01 -4.32892561e-01 3.79485011e-01 ... 3.11293453e-01 1.46787450e-01 3.97792086e-02] [-4.88170594e-01 -5.76293349e-01 -1.02033818e+00 ... 9.85545367e-02 4.53874201e-01 -1.62361860e+00] [ 2.07405820e-01 1.55475509e+00 -2.18374934e-02 ... -7.30071902e-01 1.41790345e-01 4.01399940e-01]] [[ 4.85274076e-01 -5.78870058e-01 8.18712175e-01 ... 1.05710126e-01 -3.59101325e-01 1.63212463e-01] [-1.02414954e+00 8.09009969e-01 -7.97297060e-01 ... -7.40692496e-01 -2.73776919e-01 7.56090760e-01] [-1.16302148e-01 -3.32703143e-01 -7.32429981e-01 ... -3.47441971e-01 -3.62638593e-01 1.20030546e+00] ... [-4.74573299e-02 -8.29476178e-01 -1.44149050e-01 ... -5.88705838e-01 -8.19492340e-02 3.80145073e-01] [ 6.05932511e-02 1.45096254e+00 -4.44688827e-01 ... 3.01390439e-01 -1.41038585e+00 2.50906516e-02] [ 6.26982212e-01 6.71656251e-01 5.39382994e-01 ... 1.79278493e+00 2.55161583e-01 -1.09618790e-01]] ... [[-2.12267399e-01 -3.40004772e-01 -4.66806680e-01 ... -5.39224386e-01 5.04606627e-02 -2.18527406e-01] [-7.05192149e-01 -3.03390324e-01 -1.54597151e+00 ... 4.42963332e-01 -1.10571015e+00 4.55208980e-02] [ 1.01986682e+00 4.46692854e-01 -1.14902580e+00 ... 1.41086683e-01 -1.66677937e-01 -5.97478710e-02] ... [ 4.39472981e-02 -2.21813321e-01 -2.46782318e-01 ... -3.89644712e-01 1.66537315e-01 -2.70350724e-01] [-1.83671817e-01 -1.34195268e-01 -5.58937967e-01 ... 4.25732285e-01 8.81127834e-01 3.68973948e-02] [ 1.94045439e-01 -2.83894509e-01 -1.33457375e+00 ... 8.36414456e-01 -4.90438491e-01 4.50661778e-01]] [[-8.05592775e-01 -3.70682538e-01 -8.45172331e-02 ... 8.67726922e-01 7.27770984e-01 -3.79305869e-01] [ 8.55085254e-02 -1.71532845e+00 -4.42345470e-01 ... -1.03938341e+00 4.14705485e-01 7.96767414e-01] [-3.49257946e-01 2.25633875e-01 -5.87327927e-02 ... -8.64258528e-01 4.61724311e-01 7.15692341e-02] ... [ 1.74603379e+00 7.44354129e-01 2.32541054e-01 ... -9.21798289e-01 -2.74231702e-01 3.31921130e-01] [ 3.66542339e-01 -6.71168864e-01 4.23781693e-01 ... -1.94467623e-02 6.70166671e-01 -8.80448818e-01] [ 5.76258719e-01 2.17146024e-01 -9.08457875e-01 ... 5.84593892e-01 7.20186889e-01 -6.64189994e-01]] [[ 3.50614756e-01 -6.20809160e-02 4.31690425e-01 ... -8.77209961e-01 -1.38218164e+00 -5.09987891e-01] [ 4.74602878e-01 6.55873775e-01 -5.69829226e-01 ... 2.47951210e-01 7.15641379e-01 -8.97152066e-01] [ 5.38597643e-01 5.30417562e-01 5.41061938e-01 ... -5.44022501e-01 -8.50061998e-02 1.40020460e-01] ... [ 9.57193747e-02 1.27801716e+00 1.03374243e-01 ... -1.20579684e+00 3.15728843e-01 -1.17952383e+00] [-7.90947199e-01 -4.96380985e-01 -9.04852927e-01 ... -8.93466711e-01 4.18538451e-01 7.67516851e-01] [ 2.00065523e-01 -1.16743350e+00 -5.23315132e-01 ... 5.13504863e-01 -2.64918238e-01 9.58748579e-01]]] [[[-8.32442343e-02 8.68704915e-01 1.23439407e+00 ... 4.77323920e-01 3.87976646e+00 1.34895182e+00] [ 2.19453788e+00 -1.33976758e-01 7.11753726e-01 ... -7.51335680e-01 2.98347533e-01 -1.78136981e+00] [-3.57227373e+00 1.69132113e+00 1.29048026e+00 ... 1.31450093e+00 -1.32711637e+00 4.10659701e-01] ... [-4.72271442e-01 1.11176562e+00 -1.55954063e+00 ... 3.74636233e-01 1.43195450e+00 1.97844028e+00] [-1.67831194e+00 1.98216581e+00 -5.66906035e-01 ... 9.91923690e-01 2.87571818e-01 -2.35184598e+00] [ 1.83186150e+00 6.34429634e-01 -6.17570400e-01 ... 1.13930094e+00 -2.42770523e-01 1.52797115e+00]] [[ 4.70182717e-01 2.81030893e-01 7.47434080e-01 ... -1.47537661e+00 -2.40043330e+00 1.08283174e+00] [ 1.01436055e+00 -1.85218513e+00 1.86530054e+00 ... 1.15120041e+00 -1.55907845e+00 1.21499276e+00] [-4.12648916e-01 1.24169660e+00 -1.21512502e-01 ... -1.90068185e-01 -1.09511256e+00 2.70682001e+00] ... [ 1.40916836e+00 -1.81502116e+00 7.69947350e-01 ... 6.55951858e-01 8.15383315e-01 -2.73409534e+00] [ 8.02437842e-01 -5.10257006e-01 3.63213867e-01 ... 4.66692030e-01 -1.09889793e+00 9.61095810e-01] [ 1.89131260e+00 -6.90564930e-01 1.82006705e+00 ... 2.17169523e-01 8.18235457e-01 1.70857489e+00]] [[ 2.76961893e-01 -7.63671985e-03 2.65085268e+00 ... -1.43353939e+00 1.64837241e+00 1.56082392e+00] [-1.55602479e+00 -1.70752871e+00 -1.46024740e+00 ... 1.55416584e+00 1.10785329e+00 -1.47324991e+00] [ 2.14880037e+00 6.29932880e-01 -4.44979399e-01 ... 1.82535574e-01 -5.05073726e-01 -3.31733003e-02] ... [-4.18591142e-01 -1.32498372e+00 -3.91068995e-01 ... 2.22515154e+00 -4.63940948e-01 -1.62860954e+00] [-4.92291063e-01 -7.50258803e-01 -1.10457981e+00 ... 1.46610069e+00 1.86481702e+00 -1.08835971e+00] [ 2.04657006e+00 1.33726513e+00 9.20404017e-01 ... -4.81344432e-01 9.94255662e-01 -1.15328562e+00]] ... [[-8.22119042e-02 -8.59722197e-02 2.75984645e+00 ... 7.35604346e-01 2.37772560e+00 1.67883778e+00] [ 1.45920277e+00 8.12292993e-01 5.24242043e-01 ... -1.64162230e+00 -4.62032616e-01 -1.58020735e+00] [ 2.52478385e+00 -1.07143521e+00 2.14582682e+00 ... 1.73347640e+00 -1.56722748e+00 1.92322373e-01] ... [ 3.03055912e-01 -6.29976273e-01 5.09633482e-01 ... -9.51435089e-01 3.82258534e-01 -1.66530788e+00] [-6.61371231e-01 -2.97045636e+00 -1.30281556e+00 ... -2.75962637e-03 1.12315500e+00 1.30469203e-01] [-4.88734126e-01 -1.10418804e-01 1.37171054e+00 ... 7.71165788e-01 9.27930653e-01 -9.31557655e-01]] [[ 5.15814424e-01 5.07668138e-01 -3.63710783e-02 ... -5.85517406e-01 -8.56696129e-01 -2.20969772e+00] [-1.14647520e+00 1.25351095e+00 2.42097840e-01 ... -1.11837469e-01 -7.49149203e-01 1.10077786e+00] [ 1.43907666e+00 -1.89014363e+00 -1.98895550e+00 ... -5.50592899e-01 7.79476613e-02 -1.18730068e+00] ... [ 3.14146996e-01 4.90259171e-01 -6.48038507e-01 ... 5.51402628e-01 1.09742093e+00 6.26746953e-01] [ 1.49829507e+00 -5.85735202e-01 6.10443279e-02 ... -1.31187630e+00 -3.85090172e-01 -1.67711103e+00] [-2.49870086e+00 -2.42804027e+00 -9.46894944e-01 ... -6.33871257e-01 3.10847664e+00 -1.33882284e+00]] [[ 2.37772584e+00 9.24064636e-01 1.78169429e+00 ... -2.11266816e-01 -6.40076637e-01 2.23586023e-01] [-3.85210097e-01 4.57516879e-01 3.64721298e+00 ... -2.09285879e+00 6.32338405e-01 2.22059464e+00] [-8.12301338e-01 -1.75651479e+00 -3.32685447e+00 ... 1.84661376e+00 1.06728208e+00 2.04264373e-01] ... [ 1.07593226e+00 -1.00295115e+00 -6.88972294e-01 ... 1.40185595e+00 -5.67552805e-01 -2.76112646e-01] [ 9.89197344e-02 1.50135279e+00 1.47726250e+00 ... -6.15454614e-01 9.81904507e-01 -2.01138422e-01] [-4.95112956e-01 2.50895880e-02 2.65733743e+00 ... 6.16911173e-01 2.65867996e+00 1.02820933e+00]]] [[[-6.07800603e-01 -4.79476340e-02 -1.24900222e-01 ... 2.20787898e-01 2.00503781e-01 -5.64931154e-01] [ 1.74440160e-01 3.05007249e-02 -3.41008939e-02 ... -8.06374177e-02 -3.77787501e-02 6.61734790e-02] [-3.65947753e-01 -1.65634722e-01 2.55403165e-02 ... 9.79346130e-03 -1.90949515e-01 4.42972034e-01] ... [ 5.54786503e-01 8.64122584e-02 3.90754968e-01 ... 3.01436245e-01 5.20009696e-01 -1.82838663e-01] [-1.11832814e-02 -3.20917875e-01 -7.63963209e-03 ... 2.97801435e-01 -1.75638273e-02 2.45534834e-02] [ 2.54292399e-01 2.61645287e-01 -7.97072053e-02 ... -1.85121343e-01 1.75678059e-01 -3.83132219e-01]] [[-1.30997419e-01 3.48330736e-01 -2.59657234e-01 ... -3.06038201e-01 -2.42587730e-01 3.80679548e-01] [-1.96968645e-01 1.28137290e-01 -7.72407278e-02 ... 3.93360227e-01 -4.57474470e-01 -1.14195131e-01] [ 2.37074986e-01 1.09163309e-02 4.42332804e-01 ... -1.17289066e-01 2.20539704e-01 -2.98686832e-01] ... [ 2.10306704e-01 2.29228199e-01 3.27112705e-01 ... 7.14331642e-02 -3.81923884e-01 2.02842578e-01] [-1.82564020e-01 -3.41758460e-01 1.63215756e-01 ... 8.11538659e-03 2.38171071e-01 2.15123191e-01] [-8.64987727e-03 2.79229522e-01 -9.25289541e-02 ... 4.03741211e-01 -1.05315857e-01 5.38506806e-01]] [[-7.44062245e-01 -2.79823035e-01 2.88980365e-01 ... -9.29464102e-02 -3.12980384e-01 2.73801059e-01] [ 1.04679398e-01 -5.86259179e-02 -1.45673290e-01 ... -1.65478364e-01 2.07456425e-02 -4.66887206e-02] [-1.92593277e-01 6.00529499e-02 -3.03021688e-02 ... 5.94912767e-02 -2.42515340e-01 -4.51380014e-01] ... [-3.28516185e-01 -1.44290790e-01 1.14206307e-01 ... -2.48334587e-01 8.46808776e-02 -8.04994442e-03] [ 1.56433508e-01 -1.44763514e-01 3.08464020e-01 ... -3.74212444e-01 -2.41725132e-01 -6.75014853e-01] [-3.55734788e-02 -2.88058184e-02 4.33794521e-02 ... -4.87989128e-01 -1.73130035e-01 1.35223269e-01]] ... [[-2.48732001e-01 -5.79977930e-02 1.88751668e-01 ... -1.22538336e-01 -3.90428394e-01 -4.68625687e-02] [-4.95352477e-01 -2.70212770e-01 -1.16426155e-01 ... 1.86903346e-02 1.67331815e-01 -8.31666738e-02] [-2.05429038e-03 -2.01547220e-01 1.01664729e-01 ... -2.22272873e-01 -4.28411923e-02 2.23582715e-01] ... [ 4.68710028e-02 4.26389948e-02 1.56534076e-01 ... -2.67714143e-01 -8.66713524e-02 2.42596999e-01] [-2.30881885e-01 -3.92156154e-01 2.29790956e-01 ... -2.04637647e-01 -2.54973710e-01 -2.58141160e-02] [-9.89810377e-02 1.04550146e-01 -3.69900882e-01 ... 3.13102067e-01 -3.93172652e-02 -2.10958764e-01]] [[-1.55291021e-01 5.69108486e-01 1.97275609e-01 ... -2.99944729e-01 4.70314056e-01 -1.99669808e-01] [-4.57294255e-01 1.42769605e-01 1.81202311e-02 ... 9.82514173e-02 -8.89866501e-02 4.92870957e-02] [ 8.41743574e-02 -1.23807631e-01 -1.36207417e-01 ... 3.41705680e-01 -3.11839640e-01 -6.86304688e-01] ... [-2.74282531e-03 -7.87088368e-03 3.38095665e-01 ... 6.98266737e-03 9.15543810e-02 5.49141727e-02] [ 1.89348295e-01 5.67369573e-02 1.95948362e-01 ... 1.30039053e-02 -3.34018677e-01 -3.33469175e-02] [-1.33325115e-01 -2.23007068e-01 1.68964326e-01 ... -1.72967806e-01 -2.76352048e-01 3.78332399e-02]] [[ 8.33005980e-02 9.11616459e-02 -8.90112817e-02 ... 2.98219919e-01 5.32331727e-02 4.16489720e-01] [ 6.06106341e-01 1.88710973e-01 2.77847975e-01 ... 2.26799235e-01 2.50492811e-01 -9.43688899e-02] [-3.36062402e-01 4.31081429e-02 1.83494925e-01 ... -2.58415192e-02 -2.33575001e-01 -1.19667090e-01] ... [ 2.02700227e-01 9.82791707e-02 1.61350086e-01 ... -4.02405821e-02 -4.88472790e-01 -2.48462200e-01] [ 4.23478693e-01 -7.69076720e-02 2.40299582e-01 ... -2.23844394e-01 -3.01141083e-01 -1.78818122e-01] [ 2.18423650e-01 5.09934500e-02 3.38750422e-01 ... -1.62633255e-01 -1.28692240e-01 -1.44015029e-02]]] [[[ 1.43354845e+00 4.43220329e+00 -9.90782022e-01 ... 2.52737164e+00 -8.90118420e-01 3.11051273e+00] [-6.09543800e-01 3.61683190e-01 1.96662277e-01 ... -5.19476295e-01 -7.38826931e-01 1.38557291e+00] [-1.42820859e+00 3.34418833e-01 -2.26162291e+00 ... 2.12343976e-01 1.78548372e+00 2.71744013e-01] ... [ 1.25683606e+00 2.34983444e+00 -1.62624824e+00 ... 2.18137786e-01 6.35437012e-01 -9.99840274e-02] [-3.00646710e+00 6.13506557e-03 1.13118601e+00 ... -1.13244379e+00 1.16110706e+00 1.00845242e+00] [-7.97085285e-01 -2.94675851e+00 -8.88576150e-01 ... 4.92893219e-01 -8.71418953e-01 1.06760597e+00]] [[-2.26014996e+00 -6.37448132e-01 -2.28777185e-01 ... 2.78979570e-01 1.55921650e+00 2.21720529e+00] [-4.38994497e-01 7.45777905e-01 2.22202882e-01 ... 1.03256524e+00 7.08017766e-01 1.70612484e-01] [ 1.09866388e-01 -1.53261769e+00 1.31265998e+00 ... -6.13051414e-01 9.40836608e-01 8.76949430e-01] ... [-1.90586126e+00 -2.90401489e-01 1.05829966e+00 ... 4.78685349e-01 9.66731459e-02 -1.75087535e+00] [ 9.56004560e-02 7.13817894e-01 -2.06571078e+00 ... 2.16142088e-02 7.63029397e-01 1.03525825e-01] [-7.14481762e-03 3.94295275e-01 5.19969642e-01 ... -2.08400393e+00 -2.22606301e+00 6.21060729e-01]] [[ 7.30959892e-01 5.47229528e-01 9.47915733e-01 ... -2.62767911e+00 4.61173010e+00 1.46967876e+00] [-6.25972211e-01 1.67519256e-01 -7.03772068e-01 ... 1.73636937e+00 4.79084492e-01 3.62536192e+00] [-7.63482332e-01 1.50155997e+00 -5.49848735e-01 ... 8.27693164e-01 8.40105653e-01 3.13434869e-01] ... [ 1.67975652e+00 -2.70490736e-01 -2.23695993e+00 ... -5.60907900e-01 8.60880136e-01 -3.06160510e-01] [-1.34556735e+00 -1.17024124e+00 -1.25453818e+00 ... 3.03455663e+00 1.15265965e+00 1.17275298e+00] [ 3.08663577e-01 1.80829212e-01 5.19864202e-01 ... 1.61383653e+00 -1.04440415e+00 -9.99980271e-02]] ... [[ 2.92009532e-01 -1.56226158e+00 1.81149757e+00 ... 2.21154594e+00 -3.18833321e-01 9.85058427e-01] [ 4.88851577e-01 1.54005682e+00 -2.29856651e-02 ... 1.96335822e-01 1.04303110e+00 -2.29931545e+00] [ 2.06684589e+00 -1.04909527e+00 3.58061790e-01 ... 2.02248955e+00 -2.55690169e+00 1.55251980e+00] ... [ 9.40112174e-01 -1.26787269e+00 -1.68930650e+00 ... 1.98298499e-01 -2.66920269e-01 -3.18895054e+00] [-1.20287418e+00 1.73136556e+00 1.51389444e+00 ... -1.12085986e+00 7.20133483e-01 -7.13805556e-01] [-5.29767275e-01 8.40430796e-01 -4.25331593e-01 ... -1.87717587e-01 1.10815072e+00 2.99643326e+00]] [[ 9.85741079e-01 3.85430157e-01 -7.07263827e-01 ... -8.25278878e-01 -9.82562304e-01 6.16780639e-01] [-3.37959933e+00 -2.89315075e-01 -1.00542140e+00 ... 4.01235676e+00 6.56723320e-01 1.78599679e+00] [-1.46426165e+00 -4.74149853e-01 -1.43341649e+00 ... 1.37859002e-01 3.44117618e+00 1.30285466e+00] ... [ 1.13341045e+00 1.81121361e+00 8.37516546e-01 ... 1.59710205e+00 3.51093799e-01 9.42503929e-01] [-6.50245771e-02 -5.15690863e-01 4.20947611e-01 ... 1.14857697e+00 2.69146174e-01 -1.32688880e+00] [ 9.73169804e-01 -2.99088097e+00 2.31377339e+00 ... -3.15316498e-01 1.22362626e+00 -9.92861569e-01]] [[ 5.63052595e-01 2.64268923e+00 -1.30988851e-01 ... 1.04097748e+00 -1.81808615e+00 -1.75522506e+00] [ 4.72873151e-01 1.82048336e-01 1.06938779e+00 ... -3.20526183e-01 -6.64813161e-01 -2.52732456e-01] [ 2.09412575e+00 1.02892077e+00 1.36061060e+00 ... 6.13772154e-01 -4.40734816e+00 8.85508776e-01] ... [-1.35049716e-01 1.32859004e+00 1.68198034e-01 ... -6.00706697e-01 -1.87546146e+00 -6.98248744e-01] [ 2.66169381e+00 2.98011214e-01 -1.83124006e+00 ... 1.56904054e+00 -3.10524893e+00 1.11548686e+00] [-1.33004093e+00 6.99859619e-01 5.93436360e-01 ... -8.63742232e-01 2.40936542e+00 4.24053133e-01]]] [[[-3.49238850e-02 1.54592525e-02 -2.06628069e-01 ... 4.72886348e-03 3.68722677e-02 1.70802418e-02] [-1.64170656e-02 1.74232244e-01 1.94098383e-01 ... 3.22201513e-02 2.00239107e-01 -8.90256166e-02] [-4.42415709e-03 -7.94021338e-02 1.38356984e-01 ... -1.78121701e-02 2.46727057e-02 -1.15170434e-01] ... [-5.53271621e-02 -1.01421684e-01 9.51793045e-02 ... -7.49796107e-02 2.21820444e-01 -8.16659406e-02] [ 7.41782337e-02 8.35430473e-02 -1.03811480e-01 ... -1.95838988e-01 -6.24062121e-02 -8.94966125e-02] [ 5.06630391e-02 5.27331568e-02 -2.51303792e-01 ... -1.36706859e-01 1.15077863e-04 1.98193127e-03]] [[-3.41534093e-02 -3.58056184e-03 2.77104806e-02 ... 5.15639856e-02 1.99311882e-01 3.19749832e-01] [ 1.20738167e-02 -6.63112430e-03 -1.26272023e-01 ... -1.48229033e-01 -4.06634547e-02 -4.43879776e-02] [-1.63729191e-02 -5.61119579e-02 2.87017167e-01 ... 5.50904684e-02 -9.90432426e-02 1.85914516e-01] ... [-3.33318934e-02 3.98842469e-02 5.31383492e-02 ... -1.23532571e-01 -1.49547100e-01 1.57483399e-01] [-1.11834094e-01 -2.40466654e-01 1.21873245e-01 ... -4.84183319e-02 -6.98638856e-02 8.56650397e-02] [-7.87563846e-02 6.39734091e-03 3.50816324e-02 ... -8.61507505e-02 -2.68739052e-02 6.73741549e-02]] [[ 4.25100364e-02 5.12281153e-03 -1.49734588e-02 ... 1.39856830e-01 1.08750902e-01 2.24460680e-02] [ 4.88021486e-02 -6.40197769e-02 -1.07611589e-01 ... -1.86903309e-02 -3.13864611e-02 1.56688005e-01] [-9.29016545e-02 6.22441284e-02 -3.48329693e-02 ... -9.04955566e-02 1.87768221e-01 4.62606139e-02] ... [ 1.04517736e-01 1.08945020e-01 -3.41335498e-03 ... -1.39881849e-01 -5.39745130e-02 9.21306163e-02] [-1.08240470e-01 -1.79680184e-01 -2.65124980e-02 ... 7.75468722e-02 1.15065552e-01 9.71174538e-02] [-1.22996494e-01 1.22922204e-01 4.42853160e-02 ... 1.29809901e-02 8.67163464e-02 8.21311101e-02]] ... [[-1.08134426e-01 -1.41485840e-01 -1.18240409e-01 ... 1.44285426e-01 6.09317757e-02 -2.47876998e-02] [ 2.62717932e-01 -1.28373191e-01 9.50184390e-02 ... -3.00984029e-02 1.02454089e-01 2.00085223e-01] [ 1.17916903e-02 -8.84805527e-03 1.31439656e-01 ... 1.14787333e-01 5.83263375e-02 -1.94397178e-02] ... [-4.78044560e-04 2.43146718e-03 -6.00653281e-03 ... -7.87406638e-02 2.31447071e-02 -2.54555885e-02] [ 1.77665785e-01 -2.35733036e-02 3.17853019e-02 ... -1.15327828e-01 8.01672339e-02 5.32055534e-02] [ 1.30356345e-02 -3.65849286e-02 1.09031290e-01 ... 1.32407025e-01 5.16010560e-02 -1.12917356e-01]] [[-1.05700130e-02 2.76481777e-01 2.54034884e-02 ... 3.73484120e-02 -4.61326055e-02 1.66859284e-01] [ 1.04209878e-01 -8.87672752e-02 -1.30603909e-01 ... 5.08003533e-02 -1.78546116e-01 -1.57725930e-01] [-6.37590587e-02 1.62732825e-02 1.01440884e-01 ... -6.87580332e-02 7.50570968e-02 5.92427403e-02] ... [-9.29557011e-02 1.56419590e-01 -3.37619707e-02 ... -1.02337219e-01 -1.18531145e-01 -4.47857603e-02] [ 1.73733328e-02 -4.56067696e-02 -5.84592335e-02 ... 7.97624514e-02 -6.64208503e-03 9.98810604e-02] [-2.05478184e-02 -4.85266224e-02 -2.70127077e-02 ... -1.69123076e-02 -1.24712639e-01 4.29545939e-02]] [[-1.83899179e-01 -1.50579616e-01 1.84123084e-01 ... 4.69309539e-02 9.41648260e-02 6.00193664e-02] [-9.15537030e-02 5.36005870e-02 -3.00444178e-02 ... 1.65487416e-02 -1.14582665e-01 -1.66669786e-02] [-1.92712843e-02 1.50654390e-02 -5.09620681e-02 ... -4.89085307e-03 9.66705680e-02 1.97001342e-02] ... [-2.01204374e-01 -1.80040658e-01 -8.28729570e-02 ... 3.17295082e-02 6.12724088e-02 -1.80189535e-02] [-3.83186489e-02 -2.56804060e-02 -6.33453280e-02 ... 1.29086390e-01 4.95300740e-02 -4.31751534e-02] [-1.25063553e-01 9.48829353e-02 1.35980457e-01 ... 1.82341874e-01 -6.15967922e-02 2.58814007e-01]]] [[[ 8.38403761e-01 -3.70633662e-01 7.32720613e-01 ... 7.64904261e-01 6.46372080e-01 -2.57140249e-01] [-2.79184043e-01 1.12212622e+00 -3.32394063e-01 ... -2.34777942e-01 -4.45620567e-01 -6.03761852e-01] [-3.81399602e-01 -4.10373837e-01 1.00619622e-01 ... 7.40780592e-01 3.29528823e-02 -5.63702166e-01] ... [-1.60713300e-01 -8.10440302e-01 3.73628616e-01 ... -3.11304443e-03 -4.87707943e-01 -3.46927106e-01] [ 1.69890299e-02 3.88900280e-01 -7.40153015e-01 ... -4.31367874e-01 -3.37909132e-01 1.23272970e-01] [-2.75388539e-01 -8.40744972e-01 4.42835659e-01 ... 3.57372522e-01 4.52199042e-01 -5.08864939e-01]] [[-1.75490171e-01 6.53948411e-02 -1.94122687e-01 ... 8.04685652e-02 1.40443489e-01 2.84549117e-01] [ 6.97060078e-02 -9.62712705e-01 2.03750879e-02 ... -6.54773057e-01 -3.18657637e-01 -7.37201631e-01] [-8.36392820e-01 -4.25279140e-02 -2.23623395e-01 ... 1.73061416e-01 1.47024155e-01 2.52899736e-01] ... [-6.48158014e-01 1.60227641e-01 1.10662386e-01 ... -8.46594691e-01 4.20709223e-01 -2.81353474e-01] [ 4.22665447e-01 -5.72229922e-01 1.74164578e-01 ... -4.84952219e-02 -1.01346684e+00 4.12580043e-01] [-1.02331027e-01 -4.02216852e-01 -5.14475882e-01 ... 3.93126816e-01 -5.62395453e-01 4.20512170e-01]] [[-3.35055254e-02 -1.16505623e+00 1.65470338e+00 ... -1.64824760e+00 -4.06546742e-01 -1.25076950e-01] [-3.01259398e-01 7.79225379e-02 5.58022261e-02 ... 9.47223976e-03 1.99318171e-01 3.04027259e-01] [ 6.10264778e-01 -4.13419276e-01 -6.77043125e-02 ... -8.07422698e-01 -6.84677184e-01 4.02643949e-01] ... [ 3.45169395e-01 6.40022531e-02 1.75190881e-01 ... 4.84119803e-01 1.81690156e-01 1.33641958e-01] [-4.04494256e-01 -6.02636449e-02 2.53783047e-01 ... 2.74619088e-02 4.97548759e-01 5.26152909e-01] [ 3.16157863e-02 -2.96941549e-01 -6.82904959e-01 ... -1.13721423e-01 5.19461215e-01 5.20644724e-01]] ... [[-1.69272423e-01 -7.63508737e-01 7.85721600e-01 ... -1.28292143e+00 -1.20709315e-01 -2.06531301e-01] [ 1.56353682e-01 1.34552026e+00 -1.95597485e-01 ... 5.42143434e-02 -2.68952847e-01 -3.87625337e-01] [ 2.54685640e-01 3.70954543e-01 1.07016611e+00 ... -4.49180216e-01 -3.80922891e-02 -1.46875940e-02] ... [ 4.17703390e-01 3.76379281e-01 5.42735383e-02 ... 4.17994559e-01 8.17450583e-02 6.39061213e-01] [ 5.21462485e-02 -6.80009723e-01 1.21497214e-01 ... 6.66030705e-01 4.86690283e-01 -1.95286676e-01] [ 1.84758440e-01 -1.34811699e+00 -2.71863222e-01 ... -7.37715364e-01 9.50452387e-01 -1.01772618e+00]] [[ 7.44229019e-01 9.77511168e-01 7.41617024e-01 ... -4.93357241e-01 -6.21984601e-01 1.51064026e+00] [-1.09890413e+00 -1.10473529e-01 1.53179377e-01 ... -4.64707166e-02 -3.37158740e-01 -2.56109506e-01] [ 5.92218578e-01 -6.05235934e-01 2.83771813e-01 ... 3.30214739e-01 7.48242378e-01 7.73617983e-01] ... [ 7.44675100e-01 1.16660953e+00 -3.33296806e-02 ... -1.26042873e-01 -3.20138246e-01 8.84067118e-01] [-4.09392655e-01 5.57759330e-02 -6.79628611e-01 ... 9.33383763e-01 -5.22758424e-01 -8.83603334e-01] [ 2.06135303e-01 -1.94738358e-01 6.41960979e-01 ... 4.08343345e-01 -6.51575148e-01 -2.26247869e-02]] [[-5.02189219e-01 4.51985031e-01 9.97909829e-02 ... -7.65127897e-01 6.74436212e-01 -3.40786092e-02] [-3.35610449e-01 1.75176375e-02 -5.45758367e-01 ... -1.14334309e+00 -5.55124938e-01 -6.34282887e-01] [ 7.97586620e-01 5.58700599e-02 -1.06929019e-01 ... -1.37888062e+00 8.37046146e-01 -6.48768246e-01] ... [ 1.88550666e-01 -8.86210501e-01 -9.20418084e-01 ... 3.45078781e-02 1.83731079e-01 -1.16122104e-01] [ 2.02248655e-02 -1.05178797e+00 -2.47555032e-01 ... 7.43763864e-01 4.36234474e-02 4.81755495e-01] [-8.66226435e-01 -9.89751577e-01 -1.17055476e+00 ... -3.08317482e-01 -2.47732121e-02 1.09019053e+00]]]]]; ov_res: [[[[[ 5.48740961e-02 -1.03003764e+00 7.90114641e-01 ... 7.61890471e-01 6.92074299e-01 -9.65920016e-02] [-1.52055860e-01 -1.09240329e+00 -3.70866418e-01 ... 7.58232832e-01 -1.30265212e+00 8.65387246e-02] [ 1.39142132e+00 5.88418126e-01 -4.62018698e-01 ... -2.28898644e-01 1.13441665e-02 1.16274631e+00] ... [-7.96160340e-01 3.69338989e-01 -3.91920358e-01 ... 3.35065901e-01 -8.98882672e-02 -4.67916904e-03] [ 1.58248171e-01 -9.39103439e-02 -9.14998427e-02 ... -7.45657802e-01 -1.04576039e+00 -4.39061224e-01] [-1.11972488e-01 4.20800298e-02 -6.08379662e-01 ... 5.73337317e-01 9.29632187e-02 -8.85032713e-01]] [[-7.29160964e-01 -1.83104500e-01 1.04262376e+00 ... -8.64881635e-01 4.15901467e-02 -1.13962376e+00] [-4.04185057e-01 1.11966109e+00 9.82181251e-01 ... 4.99447167e-01 -4.25464123e-01 -2.75427818e-01] [-1.38348877e+00 1.00546069e-01 -3.20300043e-01 ... 1.50613651e-01 -1.68962610e+00 1.34931779e+00] ... [-7.40701616e-01 -1.03171062e+00 -6.53126165e-02 ... -2.90047675e-01 1.34614110e-01 -4.43956703e-02] [ 3.27452458e-03 -4.04889554e-01 -3.53152215e-01 ... -7.38729239e-01 2.48271182e-01 7.80980766e-01] [ 1.31201756e+00 -4.24449116e-01 3.12088225e-02 ... -5.13336658e-01 5.49670339e-01 -4.04826880e-01]] [[ 5.42328358e-01 1.04041874e-01 -5.88215828e-01 ... -1.68178920e-02 2.71335822e-02 -1.01468217e+00] [-2.82839000e-01 -1.13607335e+00 -3.37497778e-02 ... 1.07736271e-02 -1.49595171e-01 3.65598321e-01] [-7.91419387e-01 -4.94145274e-01 1.11467230e+00 ... 1.22928119e+00 1.02102149e+00 7.24002868e-02] ... [ 2.91627198e-01 -1.55006617e-01 -1.17428291e+00 ... -6.06811881e-01 4.98105288e-01 -4.02042478e-01] [ 5.01829863e-01 3.93137485e-01 -3.40309948e-01 ... -3.42579126e-01 8.63106728e-01 -1.39945114e+00] [-4.79401469e-01 9.40180302e-01 -7.60343671e-02 ... -9.43177819e-01 1.30479372e+00 -4.10210758e-01]] ... [[ 4.71532226e-01 -2.25350842e-01 3.70756648e-02 ... -4.12083834e-01 4.63187814e-01 4.31598574e-01] [-6.05733454e-01 -7.12275207e-01 -9.14006606e-02 ... 1.73547435e+00 9.08006907e-01 1.29248992e-01] [-5.06549120e-01 2.74090409e-01 -7.61445016e-02 ... -2.77480513e-01 -3.65318120e-01 6.09408796e-01] ... [ 1.08624351e+00 6.83811426e-01 7.84199178e-01 ... 1.58018079e-02 -1.21670806e+00 -1.89123631e-01] [ 5.36591232e-01 -5.95642686e-01 5.69411516e-01 ... 8.01258147e-01 7.78362930e-01 2.73181587e-01] [ 1.91986531e-01 2.73099333e-01 4.51694399e-01 ... 5.41470230e-01 8.04046988e-01 9.75923538e-02]] [[ 6.37005627e-01 -7.49783456e-01 9.09135580e-01 ... -6.44772291e-01 4.49224770e-01 1.49007595e+00] [-3.05746198e-01 5.92007160e-01 -1.20177388e+00 ... 2.54819185e-01 1.59309328e-01 3.46801102e-01] [ 3.12500566e-01 4.63502668e-02 -1.30667543e+00 ... -9.01332051e-02 -1.58500835e-01 -9.88097370e-01] ... [-9.77428138e-01 -6.17552519e-01 -1.02779579e+00 ... 3.26769620e-01 4.49167937e-01 -1.55442148e-01] [ 3.04534048e-01 -1.20879781e+00 -2.36805797e-01 ... -2.56131530e-01 -1.16437133e-02 4.50194515e-02] [-6.39441907e-01 -1.71686441e-01 4.59207073e-02 ... -8.09699953e-01 1.01719832e+00 -4.48107928e-01]] [[ 7.83408701e-01 9.97021854e-01 -1.86177194e-01 ... -7.01422930e-01 1.45925969e-01 1.59817219e-01] [ 4.64767337e-01 3.46150130e-01 7.06699014e-01 ... 2.87736148e-01 7.72557795e-01 3.10795665e-01] [ 2.89836526e-01 -8.03541005e-01 -9.06295478e-01 ... 8.29219818e-01 -1.71159953e-01 2.98672944e-01] ... [-1.14264317e-01 -4.14720058e-01 4.95848894e-01 ... -1.12590587e+00 7.47436047e-01 -2.69562066e-01] [ 8.32565725e-02 1.89282060e-01 1.32708991e+00 ... 5.73251247e-01 1.50868008e-02 1.99872196e-01] [-6.71626031e-02 3.64225358e-01 -3.23605150e-01 ... 6.49461627e-01 5.74827552e-01 -2.67879903e-01]]] [[[ 1.22238731e+00 7.72255242e-01 1.34488082e+00 ... 1.13056505e+00 -2.03380704e+00 -2.74225444e-01] [ 1.50362730e+00 4.67471331e-02 2.86229819e-01 ... -4.05217320e-01 2.66059971e+00 -1.73962617e+00] [ 1.66801214e+00 7.78133988e-01 -1.95747280e+00 ... -2.31802535e+00 5.72069585e-01 5.43688655e-01] ... [-4.10751438e+00 2.62760580e-01 -2.45875955e-01 ... 4.58140552e-01 -1.11094356e+00 -7.43098736e-01] [ 9.56960738e-01 -1.00110956e-01 -2.82549590e-01 ... -1.31934440e+00 -1.22813356e+00 1.11073591e-02] [-2.12453917e-01 -1.87981677e+00 -1.73875237e+00 ... -1.61324784e-01 8.69161129e-01 1.37828028e+00]] [[-8.77575278e-01 -1.06841707e+00 2.19564223e+00 ... 1.46147120e+00 -2.57553816e-01 1.71269190e+00] [ 8.82645965e-01 8.13068032e-01 6.42447233e-01 ... -2.71699023e+00 -9.70784366e-01 -1.77370143e+00] [ 5.97745597e-01 2.52889812e-01 3.57369781e-01 ... -3.13719106e+00 -4.53024834e-01 7.08824575e-01] ... [-8.78344655e-01 -2.11931378e-01 -2.03812551e+00 ... 9.23603952e-01 -2.64580250e+00 -2.08059952e-01] [-2.98350871e-01 -1.47475362e-01 -2.52429545e-01 ... 8.95903289e-01 2.22802296e-01 -5.29572546e-01] [ 1.90212440e+00 -1.48113406e+00 3.94540101e-01 ... -1.57758069e+00 1.19999099e+00 1.06575119e+00]] [[ 3.66304487e-01 5.32238603e-01 -3.54921699e+00 ... 2.77958179e+00 1.19446421e+00 1.85147524e-01] [-3.18491673e+00 -4.55257982e-01 -1.83763158e+00 ... -8.43670428e-01 5.64785063e-01 2.30892444e+00] [-4.41554219e-01 -2.74082989e-01 1.84832096e+00 ... 9.50667083e-01 -1.25692320e+00 7.59283364e-01] ... [ 2.66496754e+00 9.29885387e-01 3.31251335e+00 ... 1.16875339e+00 -4.01147813e-01 -8.86774540e-01] [ 3.00420195e-01 1.80490279e+00 -5.25729775e-01 ... 3.22479439e+00 -3.69730368e-02 2.22826409e+00] [-5.67723215e-02 1.66193342e+00 -2.92113960e-01 ... 6.38720632e-01 -2.28006378e-01 2.47083768e-01]] ... [[ 2.77583456e+00 -2.63996100e+00 1.08738232e+00 ... 1.23671556e+00 -7.24538326e-01 -3.45525771e-01] [-7.79721856e-01 -4.28431816e-02 -1.10576248e+00 ... 1.63044500e+00 -7.01248944e-01 3.89036149e-01] [ 1.30590999e+00 3.59019667e-01 1.23239450e-01 ... -1.30298257e+00 -7.87266850e-01 -1.42296946e+00] ... [-4.03236121e-01 1.18662322e+00 -4.40852463e-01 ... 7.67292738e-01 1.02020729e+00 -3.81467462e-01] [ 7.91454971e-01 8.43489230e-01 1.06326431e-01 ... -2.53602266e+00 -2.28128344e-01 5.51959574e-01] [ 2.00098681e+00 -1.25611436e+00 2.36235690e+00 ... 7.53172576e-01 1.10479283e+00 -1.73586094e+00]] [[ 8.45910609e-01 4.21526253e-01 4.11108792e-01 ... 1.58087730e+00 9.23292577e-01 -2.05750656e+00] [ 2.32272792e+00 -1.93818247e+00 3.00701838e-02 ... -2.23917723e+00 -1.11092198e+00 -7.32164502e-01] [-2.57892561e+00 -1.48600146e-01 4.52315301e-01 ... 1.51654470e+00 -1.47359580e-01 5.91374516e-01] ... [ 3.28162670e-01 7.90437222e-01 -1.01418722e+00 ... 3.71911764e-01 1.03262460e+00 -6.71230257e-02] [-2.41285372e+00 -3.42160374e-01 1.38165653e+00 ... 2.00986290e+00 1.11366320e+00 -7.55472243e-01] [-1.81652236e+00 1.11842787e+00 -7.40407586e-01 ... -1.28233641e-01 1.35653007e+00 2.70409799e+00]] [[-2.57216763e+00 -7.25552499e-01 5.55998981e-01 ... -2.38002464e-01 1.28395629e+00 1.09288800e+00] [ 1.44945467e+00 3.75631124e-01 8.25990856e-01 ... 2.90932727e+00 2.37427330e+00 8.12892914e-01] [-1.11272752e+00 5.02572358e-02 3.32613111e+00 ... -5.95700741e-01 -5.08736610e-01 2.03559208e+00] ... [-4.11959052e-01 -5.00865161e-01 1.27845907e+00 ... 1.73747587e+00 -1.72405028e+00 7.67310679e-01] [-8.22989762e-01 1.29218554e+00 2.01381421e+00 ... -1.33140087e+00 1.72298163e-01 -2.94629049e+00] [-2.75787425e+00 1.39783907e+00 3.81106198e-01 ... 1.98240793e+00 1.16705322e+00 -3.61531568e+00]]] [[[ 3.82892549e-01 7.53002167e-02 4.79752839e-01 ... 1.21933460e-01 -6.78511858e-02 1.46632135e-01] [-2.67158169e-02 2.51154125e-01 5.01439512e-01 ... 2.43059292e-01 3.88225093e-02 2.03510687e-01] [-7.60510087e-01 -3.39987278e-02 3.21250767e-01 ... -2.36338332e-01 4.15898800e-01 9.37343612e-02] ... [ 7.57103637e-02 4.64493446e-02 2.04705268e-01 ... 5.53481951e-02 -7.85211921e-02 2.87265509e-01] [-4.66346145e-01 1.94210827e-01 3.37468594e-01 ... -2.76712887e-02 5.19814253e-01 -1.91964850e-01] [-6.38453290e-02 -8.80458727e-02 1.35662407e-01 ... -3.65904659e-01 -2.15994388e-01 -4.32988256e-01]] [[-1.69589207e-01 2.39511237e-01 3.91891688e-01 ... -1.18563838e-01 -2.26198863e-02 2.72501707e-01] [-1.69008344e-01 5.75956941e-01 -5.97004116e-01 ... 2.14268088e-01 -8.92496631e-02 6.24623038e-02] [-3.45767379e-01 -3.08217347e-01 -6.26549721e-02 ... -4.91138734e-02 1.39565915e-01 1.84773415e-01] ... [-5.49829185e-01 -3.88016105e-01 3.03292543e-01 ... 1.28665611e-01 -1.70136198e-01 -8.20388552e-03] [-1.44341565e-03 9.59711298e-02 -7.70632267e-01 ... 1.79422572e-01 3.57118696e-01 -1.34091198e-01] [-2.47835964e-01 7.95347914e-02 -6.47398233e-02 ... 8.97477791e-02 1.25791997e-01 2.56318390e-01]] [[ 6.44904599e-02 2.31257170e-01 1.26162320e-01 ... -2.27116019e-01 3.83924007e-01 -2.48441786e-01] [ 4.94881645e-02 -2.30543017e-01 -1.45941615e-01 ... 5.62687032e-03 9.63111818e-02 -6.22461084e-03] [-2.33831152e-01 -1.18505895e-01 2.15419143e-01 ... 4.89475518e-01 -2.27106914e-01 -9.07874200e-03] ... [-4.52393889e-01 -1.44327283e-01 -1.44735754e-01 ... 1.40811533e-01 3.65924314e-02 3.07775065e-02] [ 4.27989095e-01 -4.37366635e-01 -2.68132478e-01 ... -5.24095558e-02 -1.31554514e-01 -4.59623933e-01] [ 4.24135298e-01 1.25962794e-01 -1.59487531e-01 ... -4.14298801e-03 1.37723133e-01 2.88932383e-01]] ... [[ 1.86235502e-01 -3.64425004e-01 -9.85699967e-02 ... -2.04757974e-01 6.82051256e-02 -5.41020572e-01] [-1.66413814e-01 -9.21817869e-02 3.22510213e-01 ... -9.20442194e-02 -1.88744798e-01 -1.79433107e-01] [-1.24387838e-01 -2.06176355e-01 6.22305453e-01 ... 1.01659987e-02 1.98665947e-01 9.07860324e-02] ... [-7.64215067e-02 1.80000842e-01 -2.17698708e-01 ... 5.41381584e-03 3.34594175e-02 -1.74868599e-01] [ 2.05412865e-01 -4.46936071e-01 6.58239067e-01 ... -1.27339900e-01 2.56723523e-01 -1.33836582e-01] [-1.64206758e-01 -3.41937929e-01 6.73751011e-02 ... -2.04024926e-01 -9.45117176e-02 -1.88894458e-02]] [[-2.01219887e-01 9.84461159e-02 3.32027674e-01 ... -2.57117361e-01 -2.30524629e-01 7.70993754e-02] [-1.65824726e-01 -4.52994287e-01 -6.88510165e-02 ... 1.40166625e-01 2.57131159e-01 -9.47347134e-02] [ 2.10742503e-01 2.45566025e-01 -2.47054566e-02 ... -4.90770005e-02 -1.35694027e-01 -2.53760248e-01] ... [-4.00972188e-01 2.07358792e-01 -2.72991061e-01 ... 8.59046802e-02 -7.52266943e-02 -6.83428049e-02] [-2.30747342e-01 4.74287011e-02 1.21250652e-01 ... -3.50968659e-01 -2.97894537e-01 -4.18315768e-01] [ 2.55291343e-01 4.70145583e-01 3.76791984e-01 ... -1.94717497e-01 -2.46305823e-01 -3.52306843e-01]] [[-7.24095464e-01 3.80870938e-01 -4.51752841e-01 ... 1.35657504e-01 -1.21162489e-01 -2.51196653e-01] [-5.31052798e-02 -6.70885861e-01 2.61699826e-01 ... 2.08880857e-01 -3.14093947e-01 -4.16018851e-02] [ 2.33037323e-01 -1.49512095e-02 1.67930827e-01 ... 2.55199492e-01 -2.90466100e-02 7.43122935e-01] ... [ 1.48941770e-01 -5.31779155e-02 8.51897337e-03 ... -4.43745047e-01 -1.76004730e-02 2.06316784e-01] [-2.22004145e-01 1.39137611e-01 -2.18141541e-01 ... -3.81972566e-02 6.38053864e-02 -3.28049451e-01] [-2.78134584e-01 2.97911882e-01 3.60411033e-02 ... 1.40743226e-01 -1.88773155e-01 -2.46024966e-01]]] [[[-1.39336669e+00 1.99206936e+00 -3.62434804e-01 ... -7.43576109e-01 1.06877238e-01 -4.63199914e-02] [-7.03025043e-01 -1.76269400e+00 1.89965710e-01 ... -1.69801354e+00 4.66479659e-01 1.15318012e+00] [-1.64057815e+00 1.12694062e-01 6.14197075e-01 ... -2.71117747e-01 3.73650253e-01 -9.33672547e-01] ... [-1.88282454e+00 1.66135156e+00 6.73107132e-02 ... -2.06008625e+00 1.67775929e+00 -1.47641766e+00] [-1.24348855e+00 3.65524352e-01 -3.90589070e+00 ... -9.96885538e-01 -1.86114454e+00 3.46808374e-01] [ 7.02484369e-01 1.75491199e-01 -6.90013528e-01 ... 2.23358703e+00 2.56732762e-01 9.43297222e-02]] [[-8.36482882e-01 -2.84286666e+00 2.72600412e+00 ... -1.09940290e+00 6.27207577e-01 2.10869163e-01] [-2.47120690e+00 -2.95936108e+00 -5.54259568e-02 ... -7.71213710e-01 6.19159818e-01 7.95670226e-02] [ 4.65181082e-01 1.89473450e+00 -2.26550627e+00 ... 2.19167638e+00 -2.31409043e-01 -1.24592733e+00] ... [-1.12126148e+00 2.64768839e+00 1.59401506e-01 ... 1.14265665e-01 -3.98210979e+00 2.24651098e+00] [ 1.12627482e+00 4.59163761e+00 -1.76847905e-01 ... -4.08987904e+00 1.70056313e-01 -5.27441539e-02] [ 1.01319325e+00 1.82688415e+00 -9.63336110e-01 ... -1.22901750e+00 -4.14743042e+00 3.75451326e-01]] [[ 1.18201971e-02 3.54803085e+00 -1.31527889e+00 ... 5.65924287e-01 -2.06763506e-01 -1.49681461e+00] [ 5.86323082e-01 1.12122605e-02 3.37199664e+00 ... -5.98658562e-01 -5.33012033e-01 1.73620141e+00] [-3.79638411e-02 5.82021415e-01 1.64358246e+00 ... 3.53438169e-01 2.88682044e-01 -1.09382033e+00] ... [-2.05866769e-01 3.71236712e-01 -7.89995909e-01 ... 6.15241587e-01 -2.59295255e-01 1.19354677e+00] [-1.09572601e+00 6.86848938e-01 3.06067467e+00 ... 8.50837946e-01 1.68523395e+00 -3.03234529e+00] [-1.49732661e+00 1.44459689e+00 -2.79278207e+00 ... 6.63755834e-01 -1.52282810e+00 -1.46669519e+00]] ... [[-1.97561875e-01 -2.64500761e+00 1.38673246e+00 ... -1.80981648e+00 -1.42761242e+00 1.68639052e+00] [-1.69210279e+00 -1.09541965e+00 -1.33026063e+00 ... 9.81184006e-01 -2.19387293e+00 1.61878753e+00] [ 7.13594854e-01 3.92476749e+00 -9.31198746e-02 ... 1.10706234e+00 3.34967446e+00 -3.31905937e+00] ... [-2.14500204e-01 2.05201960e+00 2.25159645e+00 ... -7.90684819e-01 7.51711488e-01 1.59562588e+00] [ 1.87804088e-01 5.70196748e-01 -6.63821816e-01 ... -2.45679522e+00 -1.80718100e+00 -1.32425320e+00] [-6.95818126e-01 -5.24415076e-01 -6.76124096e-01 ... 1.07016194e+00 2.16221541e-01 -1.58166718e+00]] [[ 1.67678833e+00 -6.44870877e-01 -5.20982258e-02 ... -3.62756431e-01 -1.28641903e-01 4.27623063e-01] [-6.00670338e-01 1.09471452e+00 -5.85822344e-01 ... 3.40187192e-01 1.01723326e-02 -3.19790035e-01] [ 4.12373543e-01 1.06371891e+00 -4.20278102e-01 ... -1.78022969e+00 -5.74704647e-01 3.48038220e+00] ... [-1.50607383e+00 -9.82433259e-01 9.99986306e-02 ... -1.36081779e+00 -2.81768346e+00 1.06946111e+00] [-1.93927157e+00 -2.08788365e-01 5.96331716e-01 ... -8.97175491e-01 -1.21982299e-01 -6.08586550e-01] [ 6.56079710e-01 1.07288039e+00 8.71326983e-01 ... 2.33078790e+00 -1.85859883e+00 2.18996978e+00]] [[ 3.86019897e+00 2.57737923e+00 4.58134472e-01 ... 8.21972787e-02 5.14955819e-01 2.66074061e+00] [ 4.29863334e-01 9.60948288e-01 -1.27371311e-01 ... -1.75438261e+00 -2.00070691e+00 -5.16863503e-02] [ 1.79163122e+00 -1.65287480e-01 -6.10424519e-01 ... 4.73383069e-02 2.93486506e-01 -2.46806169e+00] ... [-2.59057403e+00 1.28294563e+00 -1.76664233e-01 ... -1.38278687e+00 3.67474651e+00 -7.58620322e-01] [ 3.04592609e-01 -2.72879696e+00 1.83002755e-01 ... -3.15268874e+00 2.00135970e+00 1.19411993e+00] [-9.51030791e-01 4.72935796e-01 6.16782904e-01 ... 9.40708637e-01 -1.65917850e+00 -1.17655075e+00]]] [[[-1.62917852e-01 5.35893254e-02 3.28541212e-02 ... -2.30122618e-02 6.68528304e-02 1.21768087e-01] [ 8.26022178e-02 -2.98320670e-02 6.28049076e-02 ... -8.03983733e-02 8.76914039e-02 -3.92416827e-02] [ 4.87151258e-02 4.86515537e-02 7.91197717e-02 ... 3.00852414e-02 1.52089313e-01 1.45149715e-02] ... [-6.44524582e-03 -1.04156129e-01 -6.84334338e-02 ... 6.05743453e-02 5.16243242e-02 8.23689252e-02] [ 4.40019034e-02 1.72116864e-03 -1.18266463e-01 ... -1.77273119e-03 1.62364066e-01 -1.40607566e-01] [ 1.20589525e-01 -5.72390156e-03 -2.75830878e-03 ... -3.19196545e-02 -1.49305295e-02 8.43003578e-03]] [[-2.54843626e-02 8.03031996e-02 -2.36911401e-02 ... -1.50808431e-02 5.02691232e-02 -4.04744875e-03] [ 1.17115319e-01 -3.07581704e-02 2.20002271e-02 ... -9.17344261e-03 8.29608142e-02 -1.32900728e-02] [ 1.84691828e-02 2.00775728e-01 1.19214423e-01 ... 2.56914571e-02 8.08453560e-02 -1.77189082e-01] ... [ 7.05247670e-02 4.88997847e-02 -1.82231709e-01 ... 9.84973609e-02 -7.77494833e-02 -1.64125279e-01] [-8.81089792e-02 1.46876976e-01 6.88068345e-02 ... -1.95150748e-01 -1.71211109e-01 2.30000690e-02] [ 2.33809520e-02 9.54882652e-02 -7.61942416e-02 ... -3.20922077e-01 -1.18669227e-01 1.85453176e-01]] [[ 8.92913043e-02 1.15316689e-01 1.48082465e-01 ... -2.31909633e-01 1.19419508e-01 1.16142988e-01] [-9.14584473e-02 -7.99501501e-03 -1.96452692e-01 ... 5.44003956e-03 -1.01833217e-01 -1.09001650e-02] [-2.68173981e-02 -1.87839698e-02 -1.60857320e-01 ... 4.55100238e-02 2.61999294e-02 -1.52738392e-01] ... [-3.54216695e-02 -1.34118423e-01 -8.41641426e-02 ... -1.49974450e-01 1.79891777e-03 -8.25342983e-02] [-7.58720189e-03 1.42177060e-01 3.15145962e-02 ... 1.87533170e-01 3.88884172e-02 -1.76223651e-01] [ 7.48798326e-02 2.58815009e-02 -6.04631286e-03 ... 8.83210450e-02 -2.25345828e-02 -1.55299127e-01]] ... [[-2.45818421e-02 9.60167646e-02 2.46363524e-02 ... 9.64921936e-02 -1.83830019e-02 8.82110819e-02] [ 1.38787985e-01 8.62004235e-02 -1.16861127e-01 ... 7.36868680e-02 -5.56502193e-02 5.68033159e-02] [ 4.67910916e-02 -3.81716527e-02 5.48161939e-02 ... 4.74806614e-02 1.43417343e-01 -1.09904706e-02] ... [ 2.74699926e-02 7.40603507e-02 -1.00687966e-01 ... -2.73516271e-02 1.16658986e-01 -1.87051699e-01] [-7.03082308e-02 4.37024236e-02 4.37064581e-02 ... -8.84406716e-02 6.55409768e-02 2.23885849e-02] [ 1.08705260e-01 -4.36498374e-02 -8.65843333e-03 ... 3.73850875e-02 1.68955587e-02 -1.10295443e-02]] [[-7.45690754e-03 -1.24860547e-01 5.28541766e-02 ... -3.47341225e-02 1.05597921e-01 -9.88994092e-02] [ 4.81438898e-02 2.43097562e-02 -4.55003195e-02 ... 2.97899209e-02 -1.18436150e-01 1.40046686e-01] [-6.38985559e-02 9.78565812e-02 -8.45256597e-02 ... -6.69223815e-02 4.99512367e-02 -7.83100650e-02] ... [ 6.10416830e-02 6.44919053e-02 -1.71772748e-01 ... -1.22459359e-01 1.36239603e-02 1.14563964e-01] [-1.14679284e-01 -1.49118947e-03 3.22308131e-02 ... 9.33015570e-02 1.19854517e-01 1.52402371e-01] [ 2.00243071e-01 9.20964107e-02 1.03118591e-01 ... -5.64865358e-02 3.36539447e-02 -2.60261837e-02]] [[ 6.98769316e-02 -6.17254339e-02 -9.21203494e-02 ... -5.87595776e-02 -6.45915717e-02 5.98152727e-02] [ 2.70478074e-02 -1.49359368e-02 -8.58905762e-02 ... -1.14840820e-01 -1.10192753e-01 -3.44218947e-02] [ 2.54797768e-02 1.46522030e-01 1.41179949e-01 ... 1.27079776e-02 1.34459853e-01 -1.29440129e-01] ... [-2.11820249e-02 1.42673165e-01 3.92944850e-02 ... 1.72573254e-01 -3.31241228e-02 -1.28206939e-01] [-2.16397420e-01 3.24589610e-02 1.60118379e-02 ... 3.16395015e-02 -4.60671522e-02 6.22594245e-02] [-6.05604351e-02 -2.49380022e-02 -1.65813923e-01 ... 7.52291158e-02 3.16906273e-02 2.77742445e-02]]] [[[-1.25094011e-01 8.74148682e-02 -5.29328883e-01 ... 2.67408311e-01 -4.16048795e-01 1.32546782e-01] [-2.63230562e-01 -4.44121063e-01 5.52010477e-01 ... -3.61848712e-01 1.13906916e-02 -3.87159064e-02] [-1.89615339e-01 3.55046093e-02 -2.60932624e-01 ... 1.74966887e-01 5.73049426e-01 -1.75101742e-01] ... [ 4.57658917e-01 9.21765089e-01 6.17757082e-01 ... 3.79514903e-01 -1.19023204e-01 -1.66898382e+00] [ 1.46292169e-02 2.35127360e-02 3.35289061e-01 ... -8.53691816e-01 -1.90202618e+00 -1.63091328e-02] [-1.93779260e-01 -1.06900239e+00 7.62301207e-01 ... -6.91330433e-01 8.34362984e-01 -1.53370273e+00]] [[-7.17084825e-01 -3.02160621e-01 -3.46486121e-01 ... 8.61505806e-01 -1.61728308e-01 2.64061302e-01] [ 7.12566495e-01 6.83967471e-01 -5.21229386e-01 ... -1.69456318e-01 2.36090973e-01 2.07557067e-01] [-6.07173324e-01 3.29726070e-01 2.64022127e-02 ... 3.84799838e-01 -3.44101816e-01 8.47624987e-02] ... [ 3.60781397e-03 4.22744341e-02 1.26991227e-01 ... 8.44236851e-01 1.36537150e-01 -5.01263440e-01] [-6.34900093e-01 -1.60791688e-02 1.20510602e+00 ... 5.62663972e-01 4.14512753e-01 -2.81977057e-01] [ 8.25429261e-01 7.75224805e-01 -1.02391481e+00 ... 4.65087950e-01 2.17137620e-01 6.50471523e-02]] [[ 4.96470511e-01 9.94210958e-01 -7.23447382e-01 ... 1.75264776e+00 3.72457802e-02 -5.89030206e-01] [ 3.91315520e-01 9.67325032e-01 -6.78564534e-02 ... 3.74705017e-01 -4.45766896e-01 -2.43787825e-01] [-3.32867056e-01 2.82305479e-01 3.35908473e-01 ... -2.07845569e-01 -5.13800979e-01 -3.79131496e-01] ... [-2.20300287e-01 -6.34784102e-01 7.97229528e-01 ... -8.92999530e-01 -2.30316237e-01 5.76290302e-02] [-5.75949073e-01 3.97816807e-01 2.72549987e-01 ... -4.87614930e-01 7.44701803e-01 2.65689716e-02] [ 3.74259591e-01 -6.17200196e-01 7.72269890e-02 ... -1.25953346e-01 -2.22714230e-01 -4.33170557e-01]] ... [[-1.61645949e-01 -7.36367553e-02 3.99163812e-01 ... -7.62207985e-01 1.70047507e-01 -7.61186242e-01] [ 3.82842064e-01 3.29734851e-03 -8.68098557e-01 ... -5.29870927e-01 9.65322316e-01 5.61356604e-01] [ 7.84522712e-01 2.02390090e-01 4.06176150e-01 ... -1.23394215e+00 -2.41476357e-01 -1.22026451e-01] ... [ 1.40839024e-02 1.48601651e-01 -4.34493750e-01 ... 5.89556098e-01 4.84793127e-01 -7.39839748e-02] [-1.21303864e-01 -2.47372225e-01 8.59144926e-02 ... -2.59638596e-02 4.28798229e-01 6.49575472e-01] [-3.38741004e-01 -6.80448174e-01 -3.13904345e-01 ... 9.84291553e-01 4.40186054e-01 1.27165341e+00]] [[ 1.23672269e-01 6.77804112e-01 -6.61842465e-01 ... -6.04762375e-01 -7.28984103e-02 -5.11426292e-02] [-1.75428420e-01 -1.04007190e-02 -1.03638899e+00 ... -2.05336720e-01 -5.56378067e-01 -4.96982895e-02] [-2.97210544e-01 7.50609189e-02 3.04086834e-01 ... -2.30639540e-02 1.18554211e+00 -1.15950918e+00] ... [ 1.10523067e-01 -6.20640934e-01 8.57319906e-02 ... 6.08190596e-01 -1.48670018e-01 2.46053740e-01] [-1.38048065e+00 8.40454400e-02 -2.88271308e-01 ... -6.99485183e-01 -2.42290720e-01 -6.39471114e-01] [-7.35770464e-01 3.53875548e-01 -9.52517986e-01 ... 2.31636167e-02 -7.51836896e-01 -1.26110047e-01]] [[-2.68103033e-01 1.40415989e-02 2.20726579e-01 ... 1.11115528e-02 -9.18260336e-01 4.79391366e-01] [ 1.06003475e+00 -1.20366454e-01 9.30392027e-01 ... -3.07092041e-01 -7.72034347e-01 5.47582924e-01] [-1.22392905e+00 3.97924483e-01 -2.70578474e-01 ... -6.17744803e-01 6.69032559e-02 5.43021485e-02] ... [-4.94831771e-01 -1.88101709e-01 -1.98502123e-01 ... -5.20151258e-01 7.05964386e-01 -2.05116987e-01] [-1.85217410e-01 -3.74016315e-01 6.01448238e-01 ... -1.86892509e-01 -4.51630861e-01 3.99010926e-01] [-1.01580286e+00 2.28503674e-01 2.37418488e-01 ... -9.11211073e-02 4.70570475e-01 -1.84453294e-01]]]] [[[[ 1.89169392e-01 1.85661107e-01 1.17863822e+00 ... -4.99421895e-01 3.50702107e-01 -1.27303690e-01] [ 1.01476169e+00 -6.79304540e-01 -1.28312767e-01 ... 5.38960338e-01 6.02281868e-01 8.30023348e-01] [-1.25510812e-01 6.83656156e-01 1.78058103e-01 ... 3.58767854e-03 2.05868140e-01 3.71879727e-01] ... [ 3.24744016e-01 -3.86780292e-01 -1.33667454e-01 ... 1.58026740e-01 4.26156789e-01 1.24617577e+00] [ 7.19082296e-01 3.03519011e-01 -8.85078013e-01 ... 1.39246237e+00 -5.57024539e-01 4.24753666e-01] [-1.28857946e+00 -3.02202940e-01 2.61816204e-01 ... 7.48349011e-01 -1.45197392e-01 1.72398639e+00]] [[ 9.52725410e-01 1.93430066e-01 -4.90663946e-01 ... -1.14336705e+00 1.10475326e+00 1.40399969e+00] [-2.87305772e-01 1.16822720e+00 -7.09332943e-01 ... -5.79119623e-01 2.26876512e-01 -3.07420492e-01] [ 4.83771086e-01 -8.56532454e-01 -1.09450661e-01 ... -1.64418435e+00 1.08384681e+00 -3.48750889e-01] ... [-7.11393893e-01 -9.55291986e-01 1.82749167e-01 ... 2.22045839e-01 4.67940509e-01 -3.56568545e-01] [ 4.69831109e-01 1.48149550e-01 -1.90595821e-01 ... 4.69275922e-01 4.05083656e-01 2.73843616e-01] [-1.80559948e-01 -9.54467952e-02 3.47357213e-01 ... 7.92821869e-02 -5.57867646e-01 -6.93268955e-01]] [[ 3.10734063e-01 1.68367755e-02 6.21613503e-01 ... 4.87954170e-01 -1.44585752e+00 4.95499879e-01] [ 1.10533595e+00 -1.94343731e-01 7.21099913e-01 ... -4.77193534e-01 -2.54573196e-01 1.93934238e+00] [-8.50700200e-01 1.24524820e+00 -1.62698478e-01 ... 3.96221340e-01 1.20248044e+00 7.87532568e-01] ... [-2.09379077e+00 1.81487262e-01 -1.79422945e-01 ... -1.13613749e+00 -5.90955853e-01 4.50687706e-01] [ 1.66568637e-01 -3.74437541e-01 7.58178771e-01 ... -5.07613242e-01 -6.09486461e-01 -8.70845020e-01] [ 9.49978590e-01 7.46819437e-01 -1.11172831e+00 ... 4.44315225e-01 -1.21864891e+00 2.04100031e-02]] ... [[ 2.80620426e-01 -8.10906112e-01 -3.47735256e-01 ... 1.36376500e-01 -5.01174510e-01 -5.75119078e-01] [ 4.11007494e-01 -1.58732140e+00 4.44038540e-01 ... -2.86744535e-01 -9.70100284e-01 -1.47809759e-01] [-1.66920161e+00 -5.22227539e-03 -9.41399932e-01 ... -1.86184192e+00 1.55971634e+00 -2.70797670e-01] ... [-8.56336117e-01 -3.06586444e-01 8.95705104e-01 ... -1.52157176e+00 1.51266849e+00 -1.00735199e+00] [-6.46875143e-01 3.13695192e-01 -1.93573967e-01 ... -1.10034280e-01 -5.50063014e-01 5.46186864e-01] [-2.56586432e-01 2.64039665e-01 5.52995384e-01 ... 2.05065355e-01 8.99621844e-01 -2.18578592e-01]] [[-7.71255076e-01 6.09008908e-01 1.25356030e+00 ... 5.42369664e-01 -1.22344053e+00 -6.06182635e-01] [-6.18700325e-01 1.62163854e+00 -2.40369618e-01 ... 5.57908714e-01 -3.50289047e-01 -1.10224152e+00] [ 4.44035865e-02 6.27177835e-01 -4.55016851e-01 ... 1.80248871e-01 7.40560070e-02 2.08973980e+00] ... [-7.00505257e-01 -2.27949753e-01 -1.26864719e+00 ... -3.75642702e-02 -5.98722041e-01 6.26817644e-01] [-4.84308183e-01 5.78985393e-01 9.69296396e-01 ... 2.36217111e-01 -7.55030870e-01 -9.90136147e-01] [-1.62036970e-01 -6.86639965e-01 6.79027259e-01 ... 4.55226451e-01 2.22232789e-01 5.46190739e-01]] [[ 2.81475872e-01 -1.05543280e+00 1.12872072e-01 ... 6.06187940e-01 6.62081301e-01 7.49458432e-01] [ 1.20181656e+00 8.32071424e-01 5.14193594e-01 ... -4.72910434e-01 -1.90287128e-01 8.23000908e-01] [-3.75004023e-01 4.38156009e-01 -2.27139071e-01 ... 8.06548297e-01 -7.95791924e-01 8.75623703e-01] ... [ 6.02344751e-01 -3.47923450e-02 -4.66467202e-01 ... -2.39942491e-01 -3.11190158e-01 -6.33918643e-01] [ 2.68217236e-01 2.78513640e-01 -8.59388039e-02 ... 8.06636214e-01 4.66029972e-01 -1.93336248e-01] [-1.61497191e-01 3.16062480e-01 -4.97892648e-01 ... 9.75215614e-01 9.84822810e-02 -6.21379793e-01]]] [[[-4.56763476e-01 6.62441730e-01 4.29456294e-01 ... 2.09708929e+00 1.78851381e-01 1.45217788e+00] [ 5.25388956e-01 -1.78076014e-01 7.42555976e-01 ... -7.07391143e-01 -2.15586972e+00 3.66705823e+00] [-4.18635830e-02 -2.97647333e+00 -8.04643393e-01 ... 6.49717003e-02 1.01718116e+00 -7.51849949e-01] ... [ 1.49390233e+00 -5.01931906e-01 -1.20246060e-01 ... 2.64090967e+00 2.70011497e+00 -2.35496715e-01] [ 9.30581033e-01 1.82072973e+00 -7.07030535e-01 ... 4.80686933e-01 -2.46492624e+00 -1.34267724e+00] [-1.85889769e+00 1.69883418e+00 -4.97323126e-01 ... 6.26509964e-01 3.11511308e-01 -3.55945110e-01]] [[ 2.49859524e+00 7.85145044e-01 -3.25919271e+00 ... 7.19654739e-01 -1.73949850e+00 -2.45377612e+00] [-1.39258966e-01 1.74703753e+00 -1.90541014e-01 ... 1.09624934e+00 -1.54503000e+00 -2.66783267e-01] [-1.25549987e-01 2.41663170e+00 1.48079526e+00 ... -6.25820458e-01 -3.18993807e+00 2.78761148e-01] ... [-2.62184560e-01 1.86532825e-01 -3.21784019e-01 ... -6.27580881e-01 4.91593957e-01 -2.67305422e+00] [-1.29873743e-02 5.71236014e-01 -1.25188673e+00 ... -9.99734253e-02 1.47307754e+00 -4.34384727e+00] [-1.64999008e-01 1.74058020e+00 3.13640982e-01 ... -1.31669390e+00 1.10091174e+00 7.04483092e-01]] [[ 1.19364858e+00 1.67471564e+00 4.73907322e-01 ... 1.71992183e+00 2.49384761e+00 -5.68875730e-01] [-9.60233390e-01 2.48692083e+00 6.55951977e-01 ... -6.30146027e-01 1.34171891e+00 2.00506663e+00] [-1.06837943e-01 1.90440726e+00 -2.27191091e+00 ... -7.51853526e-01 2.64345616e-01 2.88978726e-01] ... [ 1.34705973e+00 6.53862000e-01 -1.30634773e+00 ... -9.83413935e-01 -2.15444779e+00 -2.14440846e+00] [ 1.20757926e+00 -3.26049805e-01 -5.97616315e-01 ... 9.88478512e-02 1.81817317e+00 -6.39806390e-01] [ 6.19989753e-01 -3.48421884e+00 4.67340589e-01 ... 2.63878679e+00 8.28693926e-01 -1.71657300e+00]] ... [[ 5.12112617e-01 2.17369580e+00 1.76898885e+00 ... 1.71308851e+00 -9.07185495e-01 -1.65660858e+00] [ 1.41439772e+00 1.36388862e+00 -2.63558805e-01 ... 5.80122411e-01 -1.37652290e+00 -4.60953683e-01] [ 3.47924143e-01 2.43440628e+00 6.37560487e-01 ... -5.89392148e-02 1.17829323e+00 -9.68203425e-01] ... [ 5.92074156e-01 3.04332614e-01 5.58784157e-02 ... -1.87843010e-01 -2.11124253e+00 1.20696500e-01] [ 4.43173677e-01 -2.12150073e+00 1.09666824e+00 ... 1.26174045e+00 -1.38707805e+00 3.98149580e-01] [ 1.72076836e-01 -2.06731391e+00 -4.86497432e-01 ... 5.38395904e-02 -1.95562029e+00 -1.47104934e-01]] [[ 1.91311014e+00 1.33773482e+00 2.27351278e-01 ... -3.89697462e-01 -2.25627050e-01 3.90341330e+00] [-2.89060283e+00 -1.29614127e+00 2.90503055e-01 ... -3.54705825e-02 3.90793175e-01 1.14322674e+00] [-1.44922614e+00 -2.34862477e-01 5.68504632e-01 ... 8.61400440e-02 -1.91931868e+00 3.93944770e-01] ... [ 3.10894537e+00 6.27565607e-02 1.99449301e+00 ... -1.10278094e+00 2.40891725e-01 6.16872132e-01] [ 1.07912433e+00 -1.16896057e+00 -2.26574111e+00 ... -9.52068388e-01 -1.25858569e+00 -9.42999959e-01] [-3.53330225e-02 1.41267622e+00 1.49651492e+00 ... 3.14042330e-01 -7.57665098e-01 1.99643350e+00]] [[ 5.97356677e-01 8.65566850e-01 1.06242490e+00 ... -1.79400995e-01 -3.45452261e+00 2.42814749e-01] [ 1.66929018e+00 -1.21861172e+00 2.10423708e+00 ... 9.01582778e-01 -2.23138049e-01 1.86969995e+00] [-1.62180662e+00 -1.22624791e+00 1.26422858e+00 ... 2.02191997e+00 2.13491750e+00 -8.46415401e-01] ... [ 2.91081309e+00 -3.45268771e-02 -3.95915210e-02 ... 3.03304434e+00 -7.94602334e-01 2.44257107e-01] [-6.26590431e-01 2.26983142e+00 -2.47463882e-01 ... 1.54372799e+00 -1.83566916e+00 1.43020129e+00] [ 1.30182195e+00 1.42793441e+00 2.17894459e+00 ... 9.10294473e-01 1.24361634e+00 -1.13349760e+00]]] [[[ 1.44063935e-01 -1.01633497e-01 -2.16242418e-01 ... -1.78502053e-01 1.99331462e-01 -2.47384489e-01] [ 6.00258522e-02 3.59700322e-01 -9.37332958e-02 ... 1.46667600e-01 3.94074380e-01 4.19095039e-01] [ 3.54310751e-01 1.24392897e-01 -7.15321163e-03 ... 1.94996715e-01 -3.16470921e-01 -3.08730155e-01] ... [ 1.13117628e-01 3.36588621e-01 2.21097972e-02 ... -2.73780704e-01 -2.09727674e-03 -3.73037696e-01] [ 6.93801463e-01 5.02574854e-02 6.80701137e-01 ... 2.82870859e-01 -1.81650817e-01 2.65690237e-02] [-1.35811090e-01 -4.55716908e-01 -1.55694410e-02 ... -7.43108094e-02 2.27593228e-01 2.13904977e-01]] [[ 3.40535820e-01 -2.62486160e-01 -2.64614314e-01 ... 4.45298105e-01 -7.65000507e-02 -1.76919773e-02] [-1.83058351e-01 -3.28232855e-01 4.47651565e-01 ... 6.01257682e-01 -2.20208347e-01 3.72190401e-02] [ 1.80874184e-01 -4.20554489e-01 -2.26374045e-01 ... 3.15506347e-02 -1.15403175e-01 1.27633169e-01] ... [ 3.83026719e-01 4.74712923e-02 3.55240613e-01 ... -3.34342986e-01 -3.19597751e-01 2.82238275e-02] [-2.37990737e-01 8.85385498e-02 -5.25180817e-01 ... -2.08295420e-01 2.61431426e-01 2.17019662e-01] [ 3.42619956e-01 -4.68033195e-01 -1.67903513e-01 ... 6.98197633e-02 3.31913650e-01 2.87682414e-01]] [[-1.59285039e-01 1.08023301e-01 -8.45062882e-02 ... -5.17341316e-01 -2.51862764e-01 -1.70523703e-01] [ 8.10078457e-02 5.42287469e-01 -6.48192465e-02 ... 7.73957223e-02 -1.08866617e-01 -5.89728355e-02] [ 2.52064407e-01 3.61402154e-01 -2.34850064e-01 ... 1.77356616e-01 2.22469047e-01 -9.84767824e-02] ... [-2.99713999e-01 -1.16480403e-01 -9.02362466e-02 ... 8.95878077e-02 1.75723270e-01 -2.02340797e-01] [ 4.46220279e-01 -1.36422545e-01 5.12089133e-01 ... -6.96314946e-02 1.20687209e-01 -3.70899704e-03] [ 3.31463665e-01 9.89920124e-02 2.27871910e-02 ... 7.35948086e-02 4.19216782e-01 3.32255840e-01]] ... [[ 4.96724155e-03 -6.25545457e-02 6.02558963e-02 ... -9.48606357e-02 1.72944456e-01 -2.40091961e-02] [-2.91148722e-02 3.74473333e-01 1.99544504e-02 ... 3.00954521e-01 -3.22281003e-01 1.23853363e-01] [ 6.21742725e-01 -9.93969068e-02 -3.45332026e-01 ... -1.13853283e-01 1.69271588e-01 -2.62778133e-01] ... [ 7.12049082e-02 2.24067107e-01 9.40150917e-02 ... 1.60999447e-02 -2.02888787e-01 -1.17709853e-01] [-2.04099622e-02 3.02805789e-02 -5.43773472e-01 ... -1.08647393e-03 7.02602714e-02 1.62537605e-01] [-4.03759748e-01 -9.16442648e-02 4.71493125e-01 ... -2.20202848e-01 4.43309061e-02 -1.20194130e-01]] [[-5.75220883e-01 -1.10341594e-01 3.38196158e-02 ... 1.46246478e-01 -1.48814261e-01 -3.06489140e-01] [ 3.36742401e-01 -1.68977529e-01 -5.22846617e-02 ... 3.56085062e-01 1.72617882e-01 2.22939014e-01] [ 4.14268792e-01 2.02363953e-01 6.62931576e-02 ... 5.44377118e-02 -9.23177823e-02 -1.80104449e-01] ... [-5.99887734e-03 -2.29856148e-02 -4.26333338e-01 ... -1.94265619e-01 -1.43195167e-01 -9.09522548e-02] [-5.44846617e-02 -3.39915007e-01 -3.70237201e-01 ... -4.49282438e-01 3.11246943e-02 -5.37195951e-02] [ 1.91088483e-01 -5.85085928e-01 -2.83777621e-02 ... -4.30480629e-01 -2.45981514e-02 1.75935030e-01]] [[-2.11668283e-01 7.94476569e-02 -1.20746098e-01 ... 3.49902391e-01 -3.35600197e-01 -3.78629506e-01] [ 3.61228585e-01 1.78585529e-01 3.42651457e-01 ... -4.49660532e-02 1.46128565e-01 -4.50053178e-02] [ 6.88128993e-02 -2.84435421e-01 2.82573272e-02 ... -1.02630422e-01 1.25684228e-03 -2.21492320e-01] ... [ 1.70271933e-01 -8.16315238e-04 -1.68792993e-01 ... -5.45806348e-01 -1.88032538e-01 -3.32702309e-01] [ 3.49642783e-01 -4.26144332e-01 1.10583842e-01 ... 3.05590834e-02 -2.22658888e-01 1.93421245e-01] [-3.43802631e-01 4.06695046e-02 4.31444138e-01 ... -3.01085562e-01 2.65730694e-02 -1.10587649e-01]]] [[[-2.16513181e+00 -5.38333714e-01 3.01630586e-01 ... -7.78973937e-01 7.75483608e-01 -2.22180343e+00] [-1.06461656e+00 -1.37771857e+00 1.00670421e+00 ... -3.20000440e-01 -2.20907077e-01 1.32433093e+00] [-1.98508060e+00 1.16827822e+00 3.06393683e-01 ... 2.75852871e+00 -8.46976042e-01 1.51804402e-01] ... [-1.95968866e+00 -1.10501075e+00 1.66996777e+00 ... -5.10751128e-01 -2.31307197e+00 1.87362075e+00] [-8.14337909e-01 -2.16031790e+00 -9.67202038e-02 ... 8.50120634e-02 1.15058327e+00 -5.08941352e-01] [-1.84616613e+00 2.64092684e+00 4.60897446e-01 ... -6.76003218e-01 -5.53648651e-01 -1.71297520e-01]] [[ 2.72524767e-02 -1.38604379e+00 1.28888464e+00 ... 2.41126347e+00 4.74178374e-01 8.63551557e-01] [ 2.50154763e-01 -1.38034070e+00 2.64708161e+00 ... 2.22514302e-01 1.44719768e+00 -8.63195479e-01] [-1.92487344e-01 1.49319375e+00 1.92910635e+00 ... -1.57539904e+00 -4.58341539e-01 -1.79973078e+00] ... [ 1.92419040e+00 -2.30310369e+00 8.77980590e-01 ... -7.46657610e-01 -1.08951330e+00 -1.93858171e+00] [-3.02667356e+00 5.83625793e-01 -9.42788422e-01 ... 7.06132233e-01 -1.15314233e+00 -5.83671927e-01] [-3.65726280e+00 -6.00993276e-01 9.69668090e-01 ... -4.29325551e-01 5.35680175e-01 -1.16975391e+00]] [[-2.78493881e-01 1.57928884e+00 -6.91478699e-02 ... -2.71877003e+00 -3.62555385e-01 -1.15226877e+00] [-8.09864581e-01 4.73974347e-02 -2.13804051e-01 ... 1.83901298e+00 2.89981580e+00 -2.57635295e-01] [ 1.13114893e+00 1.91152304e-01 1.30891371e+00 ... 1.34300244e+00 2.10817552e+00 4.38018531e-01] ... [ 2.09804487e+00 -2.32132792e+00 -1.31233871e+00 ... -2.24773750e-01 -1.53266281e-01 1.11120379e+00] [ 1.25964129e+00 -4.47279483e-01 -7.65471280e-01 ... 1.45245016e+00 1.31338501e+00 -2.75981814e-01] [ 1.88788509e+00 3.78441960e-01 -1.57110488e+00 ... -1.67141461e+00 -9.78417993e-01 4.52178448e-01]] ... [[ 2.56514847e-01 -3.79461825e-01 -1.53477275e+00 ... -1.21050489e+00 5.48166037e-01 -1.04663420e+00] [-1.94668591e+00 1.55953360e+00 1.67865229e+00 ... -1.55492377e+00 1.65243411e+00 -1.51767254e+00] [ 1.11831999e+00 -4.60842997e-02 -2.75323462e+00 ... -1.53511286e+00 8.38582069e-02 2.84057403e+00] ... [-7.45943069e-01 1.03039777e+00 2.84521174e+00 ... -2.45114326e+00 1.70890599e-01 -2.28764415e+00] [ 8.30452085e-01 1.62482727e+00 6.57276571e-01 ... -5.40429831e-01 7.98324645e-01 -1.65877092e+00] [-7.14125276e-01 -1.51202857e+00 -1.81046343e+00 ... 1.02652061e+00 -3.80711126e+00 -1.59300065e+00]] [[ 1.63630712e+00 2.97526002e+00 -9.04742479e-01 ... -1.82159305e-01 2.65327901e-01 -1.46482289e+00] [-1.41004002e+00 1.63140059e+00 1.08156788e+00 ... 1.79086494e+00 2.27563441e-01 -4.44165111e-04] [ 7.09217429e-01 -1.71582282e+00 7.62163103e-01 ... -6.93672299e-01 3.10976386e+00 -1.20173156e-01] ... [-1.27970243e+00 -5.33394754e-01 6.41667902e-01 ... 7.79538006e-02 8.07312310e-01 -2.83968210e+00] [-2.17024422e+00 -1.84733152e+00 -3.49445820e-01 ... -8.47691178e-01 -7.81425387e-02 -9.02759314e-01] [-3.13088584e+00 -7.97661185e-01 1.43907809e+00 ... -3.05187416e+00 -2.46009380e-01 8.79222572e-01]] [[-3.06321271e-02 6.46191120e-01 -5.12796402e-01 ... -4.28600341e-01 -2.18207741e+00 -3.77272749e+00] [-5.35892010e-01 -7.00637922e-02 -1.70509410e+00 ... 1.32366693e+00 -1.15598607e+00 2.91315556e+00] [ 1.41676024e-01 1.12386477e+00 -1.19911981e+00 ... 7.13164449e-01 1.27060592e-01 3.75702238e+00] ... [ 1.25748551e+00 2.73219132e+00 5.97547829e-01 ... -1.62004137e+00 -6.95735812e-01 -2.18219534e-01] [ 1.09698498e+00 3.64783078e-01 -4.49006617e-01 ... 5.01530059e-02 2.28551626e+00 1.25888431e+00] [-8.45705330e-01 1.38311377e-02 -7.23558366e-02 ... -1.57306254e+00 1.13614869e+00 1.08481562e+00]]] [[[-3.34053591e-04 -1.52650073e-01 -9.06311348e-02 ... 1.40260205e-01 4.84040240e-03 1.00597650e-01] [ 6.98590502e-02 -1.15558833e-01 -1.14099883e-01 ... 1.19551839e-02 -2.11450770e-01 3.60763781e-02] [ 3.61024216e-02 4.66209538e-02 1.53755248e-01 ... -2.80491680e-01 5.13752326e-02 3.99980322e-02] ... [ 4.13546674e-02 3.79620939e-02 -2.13817805e-02 ... 7.16051236e-02 4.48444393e-03 -4.53461148e-02] [-1.26004264e-01 -1.55095279e-01 -1.34644032e-01 ... 9.06271115e-02 7.35357478e-02 1.17776848e-01] [ 1.75145399e-02 7.79606476e-02 4.78481911e-02 ... 1.44403294e-01 1.76168308e-02 1.18299931e-01]] [[ 3.48274843e-05 -2.58148074e-01 7.51243159e-02 ... -8.74551609e-02 -5.02960309e-02 -5.04217744e-02] [-5.73486695e-03 5.69986664e-02 -7.76141062e-02 ... 3.33772339e-02 -2.72054851e-01 -2.33068205e-02] [ 3.00828572e-02 4.94073629e-02 9.75520685e-02 ... -3.48059982e-02 -6.20976724e-02 -5.58517240e-02] ... [-3.75535078e-02 6.37973249e-02 -5.81665635e-02 ... 1.45462409e-01 4.32112776e-02 -3.05795521e-01] [ 1.38923749e-01 1.24403797e-01 9.29222815e-03 ... -1.07368574e-01 1.99495792e-01 -2.76008602e-02] [-5.59530184e-02 1.42880401e-03 -5.74393272e-02 ... -6.02829382e-02 3.94136049e-02 7.85720050e-02]] [[-3.85528170e-02 -5.85628599e-02 -4.89194095e-02 ... -1.02351606e-01 1.10378779e-01 -1.09371714e-01] [ 5.70738129e-02 9.03567746e-02 2.44430706e-01 ... 1.74640678e-02 -2.23270699e-01 5.26103899e-02] [ 7.46664777e-02 -3.37941721e-02 2.00929139e-02 ... -1.30508408e-01 -7.91502520e-02 1.23869710e-01] ... [-1.25570297e-01 -1.88674331e-01 -1.07024275e-01 ... -1.10865876e-01 -1.09968953e-01 -1.71325763e-03] [-1.31807581e-01 2.37723008e-01 -2.25529894e-02 ... 2.95846388e-02 6.70812801e-02 8.52691084e-02] [ 1.99242100e-01 1.29321059e-02 3.33939157e-02 ... 1.55520752e-01 5.70190698e-02 -6.29151016e-02]] ... [[ 1.33476645e-01 1.09534554e-01 1.32164523e-01 ... -1.31110787e-01 -1.14717394e-01 -2.26155698e-01] [-1.69763401e-01 -8.46809372e-02 -1.66163236e-01 ... 1.05302714e-01 1.68200284e-01 -1.91596553e-01] [-8.19930509e-02 -3.50717828e-02 2.36057952e-01 ... -1.36920833e-03 3.85621674e-02 -6.02476187e-02] ... [ 6.92601278e-02 -3.00952531e-02 -1.21739032e-02 ... 6.81699589e-02 9.52981040e-02 -6.07430860e-02] [-1.60968035e-01 -8.67457464e-02 -5.63188829e-02 ... 2.44496651e-02 2.41322145e-02 -1.19934738e-01] [ 2.00829148e-01 3.41220759e-02 1.25776112e-01 ... 1.35606781e-01 -9.31733176e-02 6.37714416e-02]] [[-1.01767227e-01 2.44941667e-01 1.58528820e-01 ... 1.29563168e-01 -1.16537966e-01 -1.47779167e-01] [-1.32250026e-01 6.15830123e-02 -9.62545201e-02 ... -1.42105848e-01 8.92149732e-02 -1.51316077e-01] [-1.61799584e-02 -7.18643218e-02 2.42217064e-01 ... 1.03785463e-01 3.07508167e-02 -3.50643545e-02] ... [-6.94057159e-03 1.25685781e-01 1.09886050e-01 ... 2.05076307e-01 2.45807007e-01 -1.41967228e-03] [ 1.92002188e-02 -4.34497409e-02 3.03877033e-02 ... -6.63254410e-02 -9.32460837e-03 6.65315315e-02] [-1.59631759e-01 6.17412571e-03 -5.32009304e-02 ... 4.73689958e-02 -1.15444632e-02 9.78977308e-02]] [[-1.41934454e-01 -3.78629453e-02 -6.19562715e-02 ... 2.96499860e-02 -4.54058722e-02 -1.35410447e-02] [ 9.00455713e-02 -1.35219833e-02 -1.40150383e-01 ... -7.60454908e-02 -1.06441677e-01 -5.30322688e-03] [ 1.20739387e-02 -1.51867583e-01 -6.32175356e-02 ... -1.63455516e-01 4.62816730e-02 -5.56575060e-02] ... [ 3.60443071e-02 1.17474109e-01 -1.10971011e-01 ... 1.01003416e-01 1.16889916e-01 -6.92961887e-02] [-2.52339602e-01 1.45644829e-01 1.76649466e-01 ... 6.90576434e-02 -3.47407237e-02 -4.08846922e-02] [ 9.51837003e-02 9.71261263e-02 -1.82140339e-02 ... 1.20569982e-01 7.41439536e-02 7.86901265e-02]]] [[[ 1.66266471e-01 -6.46603286e-01 3.93947989e-01 ... -7.50469685e-01 1.61012024e-01 6.62716448e-01] [-9.34422970e-01 -1.49174184e-01 5.33666946e-02 ... -3.74882191e-01 4.11916196e-01 -3.24860930e-01] [ 7.81416357e-01 8.49715024e-02 2.96036124e-01 ... 7.65703842e-02 3.90906066e-01 -1.18326950e+00] ... [-2.56998748e-01 1.50823325e-01 -7.75286198e-01 ... -2.02483192e-01 -1.03577387e+00 -2.92945355e-01] [ 7.86139071e-01 1.02006406e-01 4.64789011e-02 ... -9.36703742e-01 -7.09777892e-01 4.63499516e-01] [-5.84080875e-01 -1.23386645e+00 -5.66704631e-01 ... 8.90962601e-01 -6.17971346e-02 6.22628629e-01]] [[ 5.20451903e-01 6.13868758e-02 1.10465646e+00 ... 7.31942713e-01 1.94693670e-01 9.79503930e-01] [ 6.36430085e-01 6.53156519e-01 2.15469703e-01 ... -2.67014086e-01 -1.62121892e-01 -1.84598923e-01] [ 8.01114514e-02 -7.51304030e-02 2.66495675e-01 ... -1.63286817e+00 -3.12370658e-01 -3.23930830e-01] ... [-1.01003706e+00 9.04970914e-02 -2.13199973e-01 ... 3.60740185e-01 -3.44437927e-01 -1.55338317e-01] [-6.29360616e-01 6.94066659e-02 -3.25734407e-01 ... 2.11457729e-01 -6.93071663e-01 -3.86501670e-01] [ 7.46033937e-02 2.64967382e-01 -1.51096821e-01 ... 1.71161577e-01 7.08753347e-01 1.73905149e-01]] [[-6.77212119e-01 4.72485870e-01 6.26043260e-01 ... 3.90644819e-01 1.80753782e-01 7.39085138e-01] [ 6.80166669e-03 5.25715053e-01 -1.12561151e-01 ... 1.11315918e+00 5.99173307e-01 -6.30215228e-01] [ 3.38751853e-01 -6.96350396e-01 4.75606173e-01 ... 2.73238420e-01 1.33214772e-01 4.42913085e-01] ... [ 5.02897203e-01 -5.01234531e-02 9.98179149e-03 ... -7.20418274e-01 5.16406894e-01 5.91816083e-02] [ 1.02421260e+00 -7.49325827e-02 3.05112660e-01 ... -8.38308692e-01 -1.10006094e+00 1.70969754e-01] [ 2.86947876e-01 -1.08773613e+00 1.01910210e+00 ... 8.95995051e-02 -2.29050189e-01 -1.40998363e-01]] ... [[ 3.36035222e-01 -3.85152876e-01 6.47189498e-01 ... -6.15135849e-01 -9.13224876e-01 -5.47440112e-01] [ 2.39053637e-01 -3.48369628e-01 -2.25450657e-02 ... 1.21951036e-01 3.68590087e-01 8.06545258e-01] [ 6.33983612e-01 -1.25580561e+00 1.19168654e-01 ... 7.20297277e-01 -1.95622429e-01 -9.43767488e-01] ... [-7.09457621e-02 -4.73367035e-01 9.80111957e-01 ... -1.80759564e-01 -4.98642683e-01 -1.00908089e+00] [-9.10863876e-01 -2.03323275e-01 1.90120161e-01 ... 2.79281080e-01 5.11391461e-01 7.26735815e-02] [ 5.27893186e-01 4.13033575e-01 9.13793296e-02 ... 1.25080034e-01 -2.46013865e-01 1.28467843e-01]] [[ 2.88891464e-01 9.03148413e-01 -5.53261817e-01 ... 2.07966864e-02 8.86336863e-01 -1.68160945e-02] [-7.94117153e-01 -1.05076826e+00 3.05858791e-01 ... 2.43825167e-01 2.11013928e-01 -1.44220516e-01] [ 1.67966094e-02 -6.34954125e-02 7.40797967e-02 ... -1.90005213e-01 6.23901963e-01 5.22201121e-01] ... [-5.29119730e-01 -1.69753939e-01 7.68420875e-01 ... 2.18671858e-01 5.00670254e-01 6.95432946e-02] [ 1.14866745e+00 1.30047381e+00 -5.30903041e-01 ... 2.07206696e-01 1.58770099e-01 4.82432723e-01] [ 1.13903260e+00 -5.20223916e-01 -1.83755279e-01 ... 5.28842881e-02 5.91740072e-01 -3.90828371e-01]] [[ 7.71741986e-01 5.73775887e-01 -5.22945598e-02 ... -1.93002313e-01 -1.10165954e+00 -1.99514806e-01] [-8.49266768e-01 3.98926474e-02 8.86440873e-02 ... 6.95635080e-02 2.60646969e-01 3.78469974e-01] [-6.85994208e-01 4.27575827e-01 1.26749992e-01 ... 8.75743866e-01 -1.80290997e-01 1.10038914e-01] ... [ 6.61052346e-01 -8.87049794e-01 -7.18925074e-02 ... -1.98381290e-01 -4.42129880e-01 3.98821384e-01] [ 8.10302496e-02 -3.60087365e-01 -7.23138809e-01 ... -1.59622061e+00 3.62981886e-01 1.63822219e-01] [-2.21625879e-01 2.24678621e-01 -8.16249073e-01 ... -1.06590867e+00 -4.90218580e-01 -2.21638143e-01]]]] [[[[-7.76867568e-01 -1.49461091e-01 -1.07588410e+00 ... -2.67138839e-01 8.36089671e-01 -1.15704966e+00] [ 4.89121050e-01 -2.44361714e-01 8.94500837e-02 ... -2.17492636e-02 -2.31291384e-01 1.14535570e-01] [ 1.73859727e-02 2.44601464e+00 1.49343443e+00 ... 7.05854475e-01 2.99060911e-01 2.38493472e-01] ... [-8.57911631e-02 -1.35425448e+00 8.82653221e-02 ... -2.94461012e-01 -5.01193643e-01 -2.53514111e-01] [-3.80099118e-01 8.62267792e-01 -5.04650533e-01 ... 5.63585818e-01 -9.72701728e-01 1.21979511e+00] [ 1.19677603e+00 -5.10643125e-01 9.35928285e-01 ... -1.10535061e+00 -2.67434418e-01 -6.11511469e-01]] [[-4.77705836e-01 -2.07125232e-01 -3.63946885e-01 ... 7.90555358e-01 -4.22685087e-01 6.71319246e-01] [ 1.17768180e+00 2.45829504e-02 -1.06232680e-01 ... -5.35898745e-01 3.06365699e-01 -9.50442910e-01] [ 8.90868843e-01 1.56746054e+00 7.91155696e-01 ... 1.23936519e-01 -3.68569106e-01 4.32487041e-01] ... [ 5.85000992e-01 -2.55336851e-01 5.66094033e-02 ... -1.68784648e-01 -6.08632922e-01 9.51686680e-01] [-2.38064632e-01 1.11463889e-02 2.50203490e-01 ... -5.96196532e-01 9.17004272e-02 -4.87988703e-02] [ 2.15022057e-01 -5.91572046e-01 2.30985373e-01 ... -8.03700805e-01 1.67493236e+00 3.51989597e-01]] [[-3.21564972e-01 8.93597007e-01 1.10654938e+00 ... -9.71962392e-01 1.74688354e-01 1.36467636e-01] [-1.80911791e+00 -4.29514535e-02 5.51387370e-01 ... -1.60829484e+00 5.42833745e-01 4.14556116e-02] [ 1.29671693e-01 -5.93254745e-01 2.95315742e-01 ... 7.18320906e-01 6.33788049e-01 3.51657212e-01] ... [-4.34409201e-01 2.46696979e-01 -2.31486365e-01 ... 2.28567123e-01 6.85910344e-01 -4.58457470e-01] [-3.90859276e-01 5.95433339e-02 6.95147663e-02 ... 2.47920290e-01 -1.19610123e-01 3.98393929e-01] [-3.83363634e-01 3.62227947e-01 -9.64695692e-01 ... 1.72548503e-01 9.24112856e-01 -1.69019893e-01]] ... [[ 2.19454974e-01 -4.47940677e-01 -4.66558300e-02 ... -6.21395409e-01 6.58307672e-01 -1.95265636e-01] [ 8.74789059e-01 8.46405447e-01 2.61052847e-01 ... 7.89137721e-01 1.13341665e+00 6.76243424e-01] [-1.24559276e-01 1.03617400e-01 -1.46709836e+00 ... -4.19577304e-03 -3.89183372e-01 -1.98779613e-01] ... [-1.48566827e-01 -1.10812819e+00 -7.65262961e-01 ... 1.62674919e-01 3.69245261e-01 -7.98812807e-01] [ 2.11120754e-01 -3.49458069e-01 2.54254192e-01 ... 1.11741379e-01 1.28000808e+00 1.04640335e-01] [-1.27029431e+00 4.64804322e-01 1.53607965e-01 ... -5.23511767e-01 -2.54383624e-01 -2.99692631e-01]] [[-2.45425329e-01 7.52463341e-01 9.33622718e-01 ... 4.12223876e-01 6.36579037e-01 1.00372815e+00] [-1.38197815e+00 2.47014865e-01 -3.31919044e-01 ... 8.47247839e-02 1.13508463e-01 3.24968606e-01] [-3.51036102e-01 7.45609522e-01 -9.85286772e-01 ... -9.49182212e-01 -2.27602497e-02 4.32007492e-01] ... [ 7.32419252e-01 6.67743087e-02 7.09576428e-01 ... -7.02799559e-01 -9.70357835e-01 -3.94935012e-01] [-1.70718825e+00 -1.07662845e+00 8.16854060e-01 ... -4.66416806e-01 -3.42312098e-01 3.55607033e-01] [-8.79023969e-01 -3.42705101e-01 6.51040256e-01 ... -1.41596901e+00 -9.26234186e-01 1.42661408e-01]] [[-2.51076847e-01 -7.25624621e-01 7.49621570e-01 ... -1.04664847e-01 -6.53030217e-01 -1.13134593e-01] [-5.77080727e-01 4.68536973e-01 2.00949478e+00 ... -7.09992826e-01 4.10964131e-01 1.32833946e+00] [-1.09999388e-01 5.42638540e-01 7.77016401e-01 ... 1.19192740e-02 3.16509068e-01 7.11749494e-01] ... [ 1.86590230e+00 -6.44665778e-01 -6.57517388e-02 ... -2.54975468e-01 4.00313914e-01 -2.01939508e-01] [ 2.46773168e-01 -5.37134111e-01 9.62298512e-01 ... -7.18434036e-01 1.02950037e-01 -7.75193751e-01] [-2.43370816e-01 -8.98665786e-01 -1.15298547e-01 ... -6.35588765e-02 -2.75173664e-01 1.03019275e-01]]] [[[-1.78314948e+00 9.98818219e-01 -2.98480093e-01 ... -3.24536800e-01 7.29912639e-01 1.20834872e-01] [-1.50641847e+00 -1.77359104e+00 -1.75105846e+00 ... 1.50539458e-01 1.08944952e+00 -9.06717598e-01] [-5.58712363e-01 1.22838545e+00 -1.08823586e+00 ... -1.16717346e-01 1.11508417e+00 3.50661683e+00] ... [ 3.43754232e-01 1.91223991e+00 -8.19253564e-01 ... -2.39889216e+00 -1.88758409e+00 -4.51990873e-01] [ 1.14236593e+00 1.19826007e+00 -1.17279685e+00 ... -1.28239572e+00 2.03599882e+00 -1.89539626e-01] [ 2.26123500e+00 -3.18092883e-01 7.42411390e-02 ... 1.29158652e+00 -6.85318589e-01 5.41498780e-01]] [[-1.68055904e+00 1.42606831e+00 2.56124783e+00 ... -2.33991951e-01 -5.28033495e-01 -1.37223795e-01] [ 7.92315423e-01 -3.40361387e-01 -3.22115159e+00 ... -1.71775639e+00 3.73048455e-01 5.80021143e-01] [-2.30189347e+00 -1.09221804e+00 -6.18986368e-01 ... -4.34471041e-01 -1.03795040e+00 -4.74677533e-01] ... [ 1.53477120e+00 -9.64894056e-01 -3.18374205e+00 ... -1.99303424e+00 -2.34733939e+00 2.40062118e+00] [-7.63603806e-01 7.65720844e-01 3.38677669e+00 ... -5.24873972e-01 -3.99376214e-01 2.56045365e+00] [-1.18732500e+00 -1.62116706e+00 2.20052814e+00 ... -1.37570345e+00 1.65655181e-01 3.14656186e+00]] [[ 1.83158064e+00 2.30900884e+00 -1.19481575e+00 ... 2.33141899e-01 1.29317236e+00 -2.13958716e+00] [-4.50622022e-01 -8.37471902e-01 -1.35557485e+00 ... -2.54881120e+00 -1.33485305e+00 5.98239787e-02] [ 6.80330217e-01 -1.20997286e+00 -2.49597430e+00 ... -2.50849873e-01 9.20336246e-01 1.73218638e-01] ... [ 8.74635935e-01 1.05598509e+00 -1.59638751e+00 ... -2.88325214e+00 3.34351718e-01 -2.38714433e+00] [-6.39548481e-01 -1.94567072e+00 3.33327532e-01 ... 6.67523324e-01 2.52265066e-01 2.38625574e+00] [ 9.16780055e-01 2.16212702e+00 7.13497221e-01 ... 1.05965614e+00 9.00996268e-01 7.94621825e-01]] ... [[ 2.50445604e+00 1.11142218e+00 -2.24441266e+00 ... 7.18760714e-02 -9.09776628e-01 -1.06953025e+00] [-5.18573999e-01 6.15078092e-01 -1.85085702e+00 ... 4.87722158e-01 3.54127198e-01 -1.26395679e+00] [-1.07548583e+00 8.78594875e-01 -2.11197901e+00 ... -2.23358059e+00 1.49748516e+00 -2.67120719e+00] ... [ 5.62110484e-01 9.00653541e-01 6.88542187e-01 ... 2.86270738e+00 1.63226449e+00 1.67918718e+00] [-4.01795767e-02 -1.57011285e-01 -7.78566658e-01 ... 8.19986641e-01 -1.32313633e+00 6.59597576e-01] [-1.91750157e+00 -1.10250568e+00 1.00099421e+00 ... 5.68027437e-01 4.75338161e-01 1.49590325e+00]] [[ 4.14705366e-01 1.90518904e+00 2.70065141e+00 ... 1.39365995e+00 -1.80714443e-01 1.07106590e+00] [ 4.34212536e-01 -2.12329224e-01 1.80460775e+00 ... -1.55916631e+00 2.70162791e-01 -1.64604094e-02] [ 1.20402777e+00 1.14429677e+00 -3.88963372e-01 ... 6.09308720e-01 5.30882068e-02 -1.26541495e+00] ... [ 1.31944346e+00 -6.18428051e-01 3.38911206e-01 ... -5.45936406e-01 -4.16000396e-01 -3.18822503e+00] [-3.63955808e+00 2.71494448e-01 9.67308819e-01 ... -1.19810045e+00 2.55200446e-01 -1.10805488e+00] [-1.79161823e+00 -8.39388847e-01 -1.35250580e+00 ... 1.96741253e-01 1.80801898e-01 -7.63789415e-01]] [[-8.73093724e-01 -2.22516823e+00 -1.14118242e+00 ... 1.93954992e+00 -1.56094861e+00 -1.14769220e+00] [ 1.69271171e-01 -3.85847718e-01 2.63145542e+00 ... 8.19642365e-01 -1.49374163e+00 -1.19341588e+00] [-1.16966152e+00 -3.34261566e-01 -1.03220820e+00 ... -3.75977069e-01 5.90488732e-01 1.42288005e+00] ... [ 1.43288243e+00 1.99026978e+00 -3.89694214e+00 ... 1.04998261e-01 7.67909944e-01 9.53238547e-01] [-8.31227526e-02 -4.57049102e-01 -3.34159076e-01 ... -1.13460159e+00 -1.56702805e+00 -3.02877799e-02] [ 6.70044661e-01 -1.52124479e-01 4.07396048e-01 ... -1.69077623e+00 -1.15806484e+00 4.51743394e-01]]] [[[ 2.94098333e-02 1.91589326e-01 -2.96989322e-01 ... 4.70125943e-01 1.46773711e-01 -1.36219924e-02] [ 2.76664257e-01 4.92190152e-01 -1.24650560e-01 ... -5.46432078e-01 1.28388569e-01 1.52007848e-01] [ 1.68053016e-01 2.88878921e-02 -4.13861156e-01 ... -2.90898144e-01 -1.54301316e-01 5.17385937e-02] ... [-1.65648498e-02 -1.68562740e-01 2.55377918e-01 ... 2.11003333e-01 2.15346932e-01 -2.06655607e-01] [ 1.03213817e-01 5.06564498e-01 2.26805642e-01 ... 1.37622029e-01 1.08240515e-01 -2.62782454e-01] [-6.24991506e-02 1.82014499e-02 -1.89242810e-01 ... -1.84586450e-01 1.79993868e-01 2.59309649e-01]] [[-1.20782435e-01 7.51318336e-02 -4.68435287e-01 ... 2.46702492e-01 9.21606049e-02 4.81316060e-01] [ 1.92485169e-01 -2.00797871e-01 -2.58928724e-02 ... 2.73716331e-01 -3.06637079e-01 -1.52807832e-01] [ 1.78788990e-01 -9.62903202e-02 -3.66178423e-01 ... -8.21818318e-03 -2.27791116e-01 8.09741095e-02] ... [ 4.23147902e-02 -1.17576189e-01 -3.57245177e-01 ... -8.47852677e-02 -4.94807452e-01 3.73229027e-01] [ 7.20436275e-02 8.33536834e-02 -6.45839125e-02 ... 5.65890111e-02 1.69932231e-01 1.02712825e-01] [ 1.60524160e-01 1.25520349e-01 -3.33866388e-01 ... -2.50903547e-01 2.31928855e-01 -6.44841909e-01]] [[-1.52327016e-01 3.78057957e-02 1.00578330e-01 ... -5.08531332e-01 -2.44953930e-01 -4.63741153e-01] [-3.83026391e-01 9.41571817e-02 1.96637139e-02 ... -1.52074903e-01 1.67297214e-01 3.62464413e-02] [ 2.43864711e-02 -9.26407799e-02 -9.43160057e-03 ... 1.78139567e-01 7.95146078e-02 -1.55943245e-01] ... [ 1.98737353e-01 -2.93226272e-01 4.52535123e-01 ... 2.42555663e-01 2.15538189e-01 -2.50489656e-02] [ 7.24198520e-02 1.18202582e-01 2.41055891e-01 ... -7.45422840e-02 -1.11852907e-01 -6.17872551e-03] [ 2.73931563e-01 1.84878260e-01 1.93089750e-02 ... -1.39850259e-01 -2.35567719e-01 1.11657925e-01]] ... [[ 2.32270882e-01 -2.02570319e-01 -2.09338099e-01 ... 1.33176357e-01 8.69274959e-02 9.61391348e-03] [-1.37629107e-01 1.49825588e-01 -2.81067103e-01 ... -8.87977034e-02 1.38262480e-01 -1.22255161e-01] [-7.58765191e-02 8.83496776e-02 -4.99232769e-01 ... 2.74055630e-01 -4.56156544e-02 2.39828020e-01] ... [-6.79625571e-02 3.67057383e-01 3.50569248e-01 ... -2.48142019e-01 -5.93653210e-02 -3.82668048e-01] [ 1.78984609e-02 -5.12629092e-01 -1.72511414e-01 ... -1.55577898e-01 -1.17491096e-01 -4.98931378e-01] [-7.14997470e-01 8.25742334e-02 1.32996097e-01 ... -1.48261562e-01 -1.37414023e-01 1.94877699e-01]] [[ 9.31531042e-02 -3.88681173e-01 1.68176997e-03 ... 1.18379064e-01 -5.79809658e-02 5.00732541e-01] [ 5.66823602e-01 -2.66514450e-01 -1.69809222e-01 ... -2.14259431e-01 -9.65481400e-02 1.95519589e-02] [-1.64331153e-01 9.00047943e-02 -2.09027499e-01 ... -3.16128522e-01 -8.59802142e-02 4.11561668e-01] ... [-1.98620528e-01 -2.34971330e-01 5.47632910e-02 ... 3.76794249e-01 -3.01761478e-01 -1.76070064e-01] [ 2.31918693e-01 4.34440911e-01 -1.34933934e-01 ... 3.65208387e-02 -1.77101225e-01 1.94101915e-01] [ 2.45236848e-02 1.61342204e-01 -8.09747353e-02 ... -7.99800381e-02 4.62649353e-02 1.18345171e-01]] [[ 3.77479225e-01 2.92015616e-02 -3.49290594e-02 ... 1.71860889e-01 -9.06461775e-02 -1.57667994e-01] [-3.06070328e-01 -5.60902715e-01 -4.40216690e-01 ... 2.14283913e-01 6.70578480e-02 -4.03955370e-01] [ 2.71430999e-01 2.98027694e-03 -7.28340745e-02 ... -2.63167977e-01 -4.05090600e-02 -1.21064506e-01] ... [-3.97233576e-01 1.71863556e-01 -2.62789458e-01 ... -5.93750458e-03 2.20319167e-01 1.09815218e-01] [-1.92245737e-01 -4.24080253e-01 -2.26656824e-01 ... 1.95165008e-01 2.23739129e-02 -2.38748223e-01] [-9.66104027e-03 2.69683540e-01 -5.86703718e-02 ... 4.72625382e-02 3.76965880e-01 3.44458759e-01]]] [[[-3.86929482e-01 -3.57446969e-01 -6.68565273e-01 ... 5.38972318e-01 -4.86992374e-02 -2.67137837e+00] [-3.33412081e-01 -1.11428547e+00 -8.94363821e-01 ... -1.59995317e+00 -6.26028717e-01 -1.28785536e-01] [ 1.25646734e+00 2.56127596e-01 -2.28882432e-01 ... -1.91882193e+00 1.40091050e+00 9.78202522e-01] ... [-1.37707198e+00 5.31958222e-01 1.71833956e+00 ... -4.44409341e-01 1.37537205e+00 -1.28962505e+00] [ 1.77890122e+00 1.15931845e+00 -1.00462687e+00 ... 9.49636519e-01 1.84020698e+00 7.43707597e-01] [ 9.43772137e-01 -1.93370551e-01 1.35114825e+00 ... 3.92043233e-01 8.33701901e-03 -9.22987387e-02]] [[ 2.05538511e+00 1.01988316e+00 2.13099980e+00 ... 3.33115816e-01 1.95586538e+00 -6.04080617e-01] [-2.17413306e-01 -1.06194830e+00 5.73072374e-01 ... -1.52075052e+00 1.36679411e+00 -6.27616704e-01] [ 2.06037664e+00 -1.20221710e+00 2.72072613e-01 ... 2.55190492e+00 2.31195307e+00 2.43170476e+00] ... [-4.05376554e-01 1.89395034e+00 6.51670456e-01 ... 1.54453731e+00 3.88440420e-03 -2.10776734e+00] [-1.57168376e+00 -8.41956258e-01 -8.01164448e-01 ... 9.01884556e-01 1.81432891e+00 -2.70556569e+00] [ 8.08299363e-01 1.36041129e+00 -2.89756000e-01 ... -2.15261006e+00 1.67948520e+00 -1.63040137e+00]] [[ 2.29681277e+00 -4.37651348e+00 3.03831387e+00 ... -2.55506730e+00 1.93104103e-01 4.91448432e-01] [ 2.65958953e+00 -7.96608865e-01 2.05380607e+00 ... 1.61720896e+00 6.20008290e-01 4.44934666e-01] [ 6.15815401e-01 1.25614333e+00 5.25817692e-01 ... -6.12013757e-01 -1.31919765e+00 3.26275444e+00] ... [-1.98953557e+00 -1.95102763e+00 -1.91471529e+00 ... -4.76934910e-01 -2.34840706e-01 -6.24769986e-01] [-4.27731723e-01 -7.54327416e-01 -8.09876844e-02 ... 1.51698709e-01 1.58819520e+00 -3.20622516e+00] [-1.14961401e-01 3.19313347e-01 2.47225857e+00 ... -2.50408101e+00 6.64835274e-01 1.18722096e-01]] ... [[ 5.40429711e-01 -6.96916819e-01 -1.19490659e+00 ... 3.02887797e+00 2.07217149e-02 1.84247088e+00] [-7.50599384e-01 2.67277598e+00 7.92810678e-01 ... -1.27754688e+00 2.87734866e-01 9.21985686e-01] [-2.59607267e+00 1.46645916e+00 1.81173956e+00 ... -3.21740770e+00 3.76508087e-01 -1.86444950e+00] ... [-1.43353879e+00 -1.25809348e+00 -1.35475039e-01 ... 1.67273438e+00 -7.82627821e-01 3.52898747e-01] [-6.12432897e-01 -1.21518123e+00 2.70503616e+00 ... -3.24324131e-01 -4.01972353e-01 1.45900238e+00] [-1.34404159e+00 7.09837377e-01 -7.28719413e-01 ... -6.71001911e-01 2.70149755e+00 -2.77122211e+00]] [[-4.42783982e-01 1.87361228e+00 -9.65652823e-01 ... -4.22857881e-01 -2.10081315e+00 -4.69917417e-01] [ 1.47852361e+00 -1.09886837e+00 3.95904392e-01 ... 2.21669292e+00 4.71544564e-01 1.14125967e+00] [ 9.32999432e-01 -2.15547889e-01 4.00708526e-01 ... -1.96804559e+00 3.33459735e-01 -2.60155886e-01] ... [-9.57088649e-01 -4.40662324e-01 -1.19853187e+00 ... 1.83163130e+00 2.07066059e+00 3.37579060e+00] [ 3.14370334e-01 1.20550358e+00 2.25893617e+00 ... 1.92195714e+00 -3.45216179e+00 -3.30761194e+00] [-5.85421398e-02 -4.62757528e-01 1.35530376e+00 ... 2.50663280e+00 7.93030739e-01 -7.07589567e-01]] [[-1.83911908e+00 -1.70934641e+00 4.59258974e-01 ... 1.75374126e+00 2.86839068e-01 -7.32659400e-02] [-1.36588955e+00 -1.71299443e-01 -1.86755157e+00 ... -5.08118868e-01 -1.96728301e+00 5.41773140e-01] [ 6.57782137e-01 -2.71483850e+00 -6.96981311e-01 ... -1.01842391e+00 -2.17890501e+00 -4.63283807e-01] ... [-5.86999357e-01 1.86439502e+00 -4.71920109e+00 ... 3.60162473e+00 -1.38140810e+00 5.98553777e-01] [-4.93891388e-02 4.06833410e-01 -1.10283959e+00 ... 1.15447059e-01 8.20586860e-01 -1.85237050e-01] [ 3.04788470e-01 1.05276895e+00 1.23299360e+00 ... 1.79863557e-01 1.09339905e+00 -2.79223859e-01]]] [[[-8.93866457e-03 -1.37773510e-02 -6.85316219e-04 ... 7.74123445e-02 -1.91372946e-01 3.16245109e-02] [ 7.66657293e-02 -2.26702660e-01 1.93075523e-01 ... -1.65395346e-02 -1.40560329e-01 9.84746739e-02] [ 9.27063450e-02 4.36023297e-03 -6.54855445e-02 ... 2.90388539e-02 -4.87240665e-02 -9.80450809e-02] ... [ 1.18377611e-01 2.48734728e-02 -1.28773913e-01 ... -1.49979055e-01 1.47705838e-01 -3.75936367e-02] [ 2.47337166e-02 -2.45660141e-01 -1.68355659e-01 ... 9.51703414e-02 4.04750519e-02 -8.33810344e-02] [ 3.59365530e-02 1.07693290e-02 1.97940134e-03 ... -1.67483022e-03 1.05617717e-01 -4.84354235e-02]] [[ 1.52950495e-01 5.17334007e-02 -9.80334207e-02 ... -2.92220879e-02 1.08024120e-01 -4.08057235e-02] [-1.26639739e-01 4.72153351e-02 -2.79486608e-02 ... -9.21617150e-02 -1.66587994e-01 -1.04890212e-01] [ 1.57849401e-01 2.45804014e-03 -2.32991632e-02 ... 1.14864051e-01 1.58476681e-01 1.54715583e-01] ... [ 9.97345895e-02 7.64068961e-02 1.91922367e-01 ... 7.30363727e-02 -2.87188473e-03 -6.81949034e-02] [ 3.96970324e-02 -1.04184262e-01 7.82393441e-02 ... 1.41091406e-01 -2.28628397e-01 3.46192151e-01] [-4.99086268e-02 -9.89663526e-02 -1.62245542e-01 ... -6.14501089e-02 3.13290278e-03 9.48592722e-02]] [[-1.96711235e-02 -1.12900145e-01 -6.29289299e-02 ... -2.13857554e-03 -1.26195386e-01 5.94328865e-02] [ 3.89456525e-02 4.75878976e-02 1.38378650e-01 ... -4.53772172e-02 7.40990043e-02 1.39830813e-01] [-6.84027672e-02 6.84893057e-02 -7.09850863e-02 ... -1.25637338e-01 7.24257901e-02 -2.49992087e-02] ... [-1.01385480e-02 -5.55433566e-03 -9.18531641e-02 ... 1.01869926e-01 -7.84856007e-02 1.19147018e-01] [ 8.84983465e-02 1.49281830e-01 8.32920596e-02 ... -6.28944188e-02 -1.07440606e-01 6.05187453e-02] [ 7.90146366e-02 5.34211472e-03 -1.27845004e-01 ... -1.21278167e-01 2.83696484e-02 -1.82269230e-01]] ... [[ 2.75894403e-01 -2.25949347e-01 -6.12119734e-02 ... -5.77558726e-02 1.01448007e-01 1.06429130e-01] [ 1.14281714e-01 2.46490687e-02 3.09365597e-02 ... -6.62272237e-03 -1.48039967e-01 -2.07184106e-01] [ 1.65813118e-01 -1.30624279e-01 1.01615421e-01 ... 3.40023153e-02 -1.60074070e-01 -6.66470453e-02] ... [ 4.82480973e-03 -4.04533297e-02 7.00902566e-02 ... 1.17043786e-01 -1.10619947e-01 1.20018031e-02] [ 7.08357617e-02 6.63595945e-02 -8.18470400e-03 ... -4.43450324e-02 -1.87241107e-01 6.68134317e-02] [-1.25128910e-01 -1.86013266e-01 4.67240475e-02 ... 6.97219223e-02 -3.11223883e-02 2.42329612e-01]] [[-1.46007612e-01 1.33512691e-01 3.48600149e-02 ... -1.06099236e-03 5.78551181e-02 -8.69328678e-02] [ 1.10615641e-01 2.02721506e-02 -3.40712294e-02 ... -1.16825633e-01 1.55597359e-01 -7.90789053e-02] [ 5.25620729e-02 6.47484064e-02 -2.09898800e-02 ... 9.16771777e-03 5.30855805e-02 -9.97173321e-03] ... [-1.03595972e-01 -1.04464322e-01 3.59987989e-02 ... -3.69104296e-02 -4.57587801e-02 -6.53355047e-02] [-1.86385997e-02 -6.12558015e-02 -1.48006831e-03 ... 6.29262403e-02 -9.15821549e-03 8.53668749e-02] [ 1.01619981e-01 1.82525605e-01 3.66005413e-02 ... -1.87299430e-01 8.53015780e-02 2.85132546e-02]] [[ 4.31426205e-02 -6.44787624e-02 -2.00944141e-01 ... 4.52302136e-02 -1.18651651e-01 5.36266975e-02] [ 7.59569407e-02 -9.31725949e-02 -1.94625691e-01 ... -1.45261064e-01 -1.07412888e-02 1.15880758e-01] [ 4.13313368e-03 -7.60271624e-02 4.28882241e-02 ... 9.75634977e-02 -4.06140909e-02 2.40364913e-02] ... [ 7.80813023e-02 4.19460870e-02 -3.08357906e-02 ... -5.11258990e-02 8.01793113e-03 1.66002020e-01] [ 6.63014203e-02 1.21323198e-01 -9.58480239e-02 ... 6.73524439e-02 -1.04765981e-01 -4.49783541e-02] [-8.10309723e-02 -1.44039407e-01 5.17943799e-02 ... -6.54111290e-03 -2.53617652e-02 -3.43221836e-02]]] [[[-2.08891198e-01 6.71366394e-01 -2.82508343e-01 ... 1.90378316e-02 9.37649459e-02 1.40997577e+00] [ 1.00893760e+00 -1.22178271e-01 -2.45268613e-01 ... 1.62173025e-02 -4.70047861e-01 1.40969366e-01] [ 5.44897735e-01 1.30179107e-01 -2.48160020e-01 ... 2.73233235e-01 5.51689982e-01 -4.34313297e-01] ... [-6.24387085e-01 -1.94753125e-01 4.82210934e-01 ... -8.94434378e-02 7.59026641e-03 -3.81522328e-02] [ 1.13238022e-02 2.14626327e-01 -2.89124668e-01 ... 4.23994750e-01 2.23090127e-03 1.85629740e-01] [ 5.85713089e-01 3.34557086e-01 -7.48931646e-01 ... 3.00260663e-01 -2.06217226e-02 4.67493057e-01]] [[ 5.32419384e-01 -2.00920403e-01 -3.59279931e-01 ... 1.96019560e-01 -1.05298793e+00 -1.00770202e-02] [ 8.44729006e-01 -3.13501656e-01 -4.76900667e-01 ... -7.78581202e-02 -6.20653387e-03 3.03547502e-01] [-1.43896624e-01 8.33899260e-01 -2.69080788e-01 ... 4.73718166e-01 -3.68114412e-01 3.42326611e-01] ... [-7.64155746e-01 2.20960245e-01 -3.88034955e-02 ... 3.15608501e-01 -6.51255131e-01 -1.98702544e-01] [ 7.24239051e-01 -1.82450965e-01 -5.29806376e-01 ... -9.14466083e-01 -5.33320427e-01 4.34654236e-01] [ 2.50748277e-01 -1.63894072e-01 9.09084082e-03 ... 2.10833117e-01 -9.42352951e-01 -1.11124551e+00]] [[-2.94642597e-01 -5.48475742e-01 -4.87619638e-01 ... 2.25156751e-02 -4.52499717e-01 -1.40062749e+00] [-4.83728588e-01 7.26683557e-01 1.16878021e+00 ... 2.51022935e-01 6.94238722e-01 -6.55608177e-01] [-3.91510546e-01 -5.05713880e-01 1.46246934e+00 ... -4.47421558e-02 -2.41336927e-01 5.50276995e-01] ... [ 8.71088266e-01 1.55128822e-01 -3.33676726e-01 ... -3.73032272e-01 -3.99783067e-02 9.75811958e-01] [-2.63948925e-02 6.17567636e-02 -1.17297733e+00 ... -3.37307513e-01 -5.54986238e-01 3.95045131e-01] [-3.47152501e-01 -6.10535085e-01 -7.28132576e-02 ... 1.61114290e-01 -1.51555538e+00 -1.50799602e-02]] ... [[-4.26982969e-01 8.49263728e-01 -4.95743096e-01 ... -4.68465865e-01 1.01117837e+00 1.18416595e+00] [-1.65309325e-01 2.50386357e-01 7.85424948e-01 ... 1.23188823e-01 -1.15145326e+00 5.58419108e-01] [-3.83739859e-01 -5.90179920e-01 5.67214072e-01 ... 6.91277325e-01 1.78376839e-01 -1.03134379e-01] ... [-2.64624327e-01 -1.13678324e+00 -3.67849410e-01 ... 5.36081254e-01 -8.06381702e-02 3.88745248e-01] [ 2.06894562e-01 -1.64568090e+00 -6.77184493e-04 ... -5.33104658e-01 -2.55266339e-01 -2.79744387e-01] [ 2.03278467e-01 3.69828701e-01 -2.10522652e-01 ... -3.63131434e-01 -9.17987302e-02 1.39733851e-01]] [[ 2.08757728e-01 3.09103072e-01 2.95709223e-01 ... 2.57642984e-01 2.48565391e-01 -8.44764635e-02] [-2.35558420e-01 5.64078569e-01 -1.80534959e-01 ... -4.82234992e-02 -1.00679624e+00 3.92330706e-01] [-2.13366866e-01 7.47748256e-01 -1.15880921e-01 ... -6.73408449e-01 6.23259783e-01 -4.97702628e-01] ... [-6.91019058e-01 -4.09110665e-01 -5.72491586e-02 ... 4.72308636e-01 -3.26903403e-01 -4.01551008e-01] [ 2.72236735e-01 -5.18566072e-01 1.70054480e-01 ... -2.61332572e-01 1.58178195e-01 -1.05554104e+00] [-3.59127522e-01 2.86035657e-01 -3.37695152e-01 ... -3.36850017e-01 -1.66605443e-01 -5.29159270e-02]] [[ 1.26536265e-01 3.58109981e-01 -2.18967736e-01 ... -7.44558424e-02 5.27311921e-01 1.46316573e-01] [ 1.03192717e-01 -1.16532290e+00 -5.24007976e-01 ... -1.44401893e-01 -1.14820814e+00 -1.67420685e-01] [-1.09028377e-01 -1.91802159e-01 -1.24004371e-01 ... 2.52157934e-02 -6.15675449e-01 3.17947775e-01] ... [ 5.78665316e-01 -1.00038850e+00 2.40942359e-01 ... -6.98805749e-01 -3.30511630e-01 3.43217432e-01] [ 5.62131941e-01 -3.20558727e-01 1.63892031e-01 ... -9.71476376e-01 3.26781839e-01 6.10655725e-01] [-1.64643809e-01 1.11004837e-01 -2.78921098e-01 ... -7.33437777e-01 -6.38517499e-01 1.39557123e-02]]]] ... [[[[-7.55618453e-01 -9.23248589e-01 -8.22890103e-01 ... -8.03519607e-01 4.26898509e-01 -2.25788578e-01] [-1.87920138e-01 1.41888976e+00 2.93900877e-01 ... 5.16487956e-02 1.17319202e+00 -1.86088562e-01] [-6.49489537e-02 -1.35019884e-01 7.98208475e-01 ... 1.05112441e-01 3.05626001e-02 -4.80227679e-01] ... [ 8.50215405e-02 -1.93621546e-01 -2.23812610e-01 ... -5.63324928e-01 -1.02125144e+00 -5.99050343e-01] [-7.21409082e-01 -1.18786223e-01 -4.13758814e-01 ... -1.00336313e+00 1.31054372e-01 -9.48497176e-01] [-2.51063909e-02 4.07593548e-01 1.76990274e-02 ... 1.32302082e+00 -1.47460151e+00 -8.65641475e-01]] [[ 6.03257060e-01 -6.79800749e-01 -8.19203019e-01 ... -8.40016723e-01 -5.22404134e-01 -1.02892719e-01] [-6.67823404e-02 1.89156306e+00 3.02881777e-01 ... -4.89709705e-01 1.26625925e-01 6.58935085e-02] [-8.12676549e-02 5.79844713e-01 3.95332053e-02 ... 5.55724978e-01 3.12244087e-01 1.46766937e+00] ... [-7.77928412e-01 -1.10743141e+00 -1.26710677e+00 ... 2.30861023e-01 -2.35063270e-01 -2.83483773e-01] [-5.25341392e-01 -5.77503033e-02 -3.86598140e-01 ... 5.42439759e-01 6.28881156e-02 3.76478106e-01] [-6.65462971e-01 -1.56929418e-01 -9.84688163e-01 ... -1.86219347e+00 4.60521221e-01 -5.20206869e-01]] [[ 1.42686501e-01 2.85268545e-01 2.52462476e-01 ... 3.33199918e-01 -3.80044699e-01 -4.67007719e-02] [ 1.69267565e-01 1.23620570e+00 1.26534373e-01 ... 1.39554814e-02 -6.37525141e-01 3.02444667e-01] [ 5.46005778e-02 -1.87175348e-02 -1.13207683e-01 ... -8.36980641e-01 2.18016222e-01 -4.09741223e-01] ... [-3.02462608e-01 8.93710792e-01 7.11730003e-01 ... -2.58910865e-01 2.75376141e-01 -1.63334727e-01] [ 3.75548184e-01 -2.20211625e-01 -5.90525508e-01 ... 2.27391168e-01 -5.41837215e-01 -8.23348939e-01] [ 1.67707056e-01 -1.73212722e-01 -4.59282398e-01 ... -3.09660792e-01 1.66562271e+00 -3.70046049e-01]] ... [[-3.46090525e-01 7.34179318e-01 1.40852571e+00 ... 2.35494703e-01 6.36791706e-01 7.73258746e-01] [ 4.63257432e-01 -1.34408462e+00 -5.16688585e-01 ... 1.41838169e+00 2.58464932e-01 3.16423833e-01] [ 3.82128716e-01 1.20089686e+00 -5.57751060e-01 ... -1.71541944e-01 -3.35960463e-02 3.64257455e-01] ... [ 4.71682027e-02 3.34019959e-01 -5.44501781e-01 ... -9.75887775e-01 -4.85758930e-01 4.52324927e-01] [ 1.13544989e+00 2.07807049e-02 4.20839757e-01 ... -2.56504238e-01 1.27728570e+00 -1.03151584e+00] [-2.12757941e-02 -8.92170727e-01 -3.39807957e-01 ... 5.06474614e-01 4.67309505e-02 -1.38169587e-01]] [[-3.72256100e-01 3.70046824e-01 -8.76928568e-01 ... 3.03052276e-01 1.52384913e+00 -7.38328248e-02] [ 8.51795152e-02 1.17294800e+00 -6.86565995e-01 ... 4.06542629e-01 1.04872513e+00 7.28125453e-01] [ 1.85606885e+00 6.71819329e-01 -1.04380774e+00 ... 2.61355817e-01 -3.64650905e-01 1.05786598e+00] ... [-1.31389701e+00 4.07526731e-01 2.16794685e-01 ... 1.08110583e+00 -5.73709846e-01 1.05236709e+00] [-8.55735540e-01 5.45153022e-01 -1.11624885e+00 ... -1.25593737e-01 1.02419806e+00 -3.38160962e-01] [-7.54044414e-01 1.19310059e-02 1.07687020e+00 ... 1.83719203e-01 -6.10329397e-02 1.28437921e-01]] [[-3.33991289e-01 -4.27629620e-01 4.48152758e-02 ... -1.03569007e+00 4.77965653e-01 -4.47331041e-01] [ 5.89069843e-01 -7.03567028e-01 1.43334642e-01 ... 1.51796937e+00 -9.53337908e-01 5.70415854e-01] [ 5.01153827e-01 -1.22750366e+00 -3.84748042e-01 ... -6.51919186e-01 -4.99241441e-01 -3.38577628e-01] ... [ 4.44120735e-01 -3.12145174e-01 -8.13750267e-01 ... 4.72610652e-01 -4.49009210e-01 1.23545074e+00] [ 2.96134293e-01 -9.55312192e-01 -2.77307957e-01 ... 8.06435585e-01 1.02125371e+00 3.75770688e-01] [ 6.73655808e-01 1.20424676e+00 -3.99607390e-01 ... -4.89983968e-02 8.03070664e-01 8.30083489e-01]]] [[[ 7.38667771e-02 -3.78540444e+00 2.35126209e+00 ... -3.82623553e+00 9.73714590e-01 1.36230552e+00] [ 6.24461845e-02 1.99917912e+00 -1.25164795e+00 ... 2.06328575e-02 -6.01735830e-01 -1.22989845e+00] [ 6.88219726e-01 1.61091411e+00 3.44284296e-01 ... 1.87765956e-01 -1.32168442e-01 1.52992737e+00] ... [ 1.24681853e-01 -8.39929938e-01 -6.46764040e-01 ... 1.82434216e-01 2.48856306e+00 8.22395980e-01] [-1.77038461e-02 1.76172316e+00 -7.84949362e-01 ... -1.18641400e+00 -2.76076406e-01 -4.43975002e-01] [ 1.47249138e+00 -8.06881309e-01 -5.93477070e-01 ... -2.96217489e+00 -1.19308639e+00 3.54117811e-01]] [[ 4.10191983e-01 -1.46481586e+00 1.40126324e+00 ... -1.98884916e+00 3.20102167e+00 -2.61518598e+00] [-1.83917689e+00 -8.86351585e-01 -1.57057539e-01 ... 1.75266421e+00 1.39475667e+00 8.72646928e-01] [-7.87443072e-02 -1.56314576e+00 -2.31789038e-01 ... -1.06456256e+00 -1.57491282e-01 -9.77598429e-01] ... [-3.72138500e+00 8.16991568e-01 9.74724889e-01 ... 5.07679105e-01 1.37554920e+00 -1.17761016e+00] [ 1.12431371e+00 2.10569000e+00 -8.57155442e-01 ... 1.39616621e+00 -1.54744327e+00 3.24191064e-01] [-5.67652702e-01 1.92674804e+00 -1.34181702e+00 ... -8.62866759e-01 -4.58032787e-01 2.22044826e-01]] [[ 5.59241295e-01 -2.74692595e-01 9.90056932e-01 ... 1.73996496e+00 -9.89373401e-02 -1.18437672e+00] [-1.56367838e+00 -6.09847546e-01 -3.02811898e-02 ... -5.36818624e-01 8.59979093e-01 -3.19803119e-01] [-1.47929990e+00 -8.80549192e-01 6.60009205e-01 ... -8.34803820e-01 -6.57685220e-01 -2.74381727e-01] ... [-3.40532482e-01 8.36213410e-01 -9.62965727e-01 ... -2.65488088e-01 1.82883859e+00 -8.03807020e-01] [ 5.45783162e-01 2.17715859e-01 3.33293200e-01 ... -8.59540761e-01 -5.20618558e-01 -9.34877574e-01] [-1.74135363e+00 2.21645549e-01 1.09740794e-02 ... 1.98677981e+00 -1.55726957e+00 -1.82425216e-01]] ... [[ 1.24517274e+00 2.95002878e-01 -1.18448853e+00 ... 1.12119175e-01 1.40605009e+00 -2.40100884e+00] [-3.91338497e-01 7.81192303e-01 8.85948539e-01 ... -1.20616429e-01 1.64446163e+00 3.65724146e-01] [-1.31239498e+00 -1.83535755e-01 -1.44864345e+00 ... -8.07466507e-02 2.81826295e-02 -9.09299314e-01] ... [ 6.14156365e-01 4.34584916e-01 -1.42028618e+00 ... -9.94960785e-01 -1.71975923e+00 -1.15644085e+00] [ 2.44278598e+00 -2.45225620e+00 -1.38029408e+00 ... -2.24066925e+00 4.93979275e-01 3.67888784e+00] [ 2.83257395e-01 1.71264279e+00 8.76824141e-01 ... -1.45582509e+00 -8.74763966e-01 -1.41502416e+00]] [[-4.75270450e-01 6.61137886e-03 -2.67824262e-01 ... -8.33812892e-01 -3.73825431e-01 1.14339900e+00] [ 2.29871243e-01 -1.90978169e+00 -6.42032623e-01 ... 1.32213831e+00 8.67711246e-01 5.46221323e-02] [-1.90152025e+00 -3.40981185e-01 1.54974461e+00 ... -1.51964724e+00 -1.40365636e+00 1.50445834e-01] ... [-2.02113613e-02 3.47648883e+00 -4.07749563e-01 ... 7.72713304e-01 -9.17261600e-01 5.67405641e-01] [ 1.85964668e+00 4.19973284e-01 2.44520259e+00 ... 1.52590060e+00 2.07451272e+00 -1.50023088e-01] [-1.43813181e+00 2.74154590e-03 9.11589324e-01 ... 3.17010462e-01 8.86868715e-01 9.89240050e-01]] [[ 4.58166480e-01 3.31522912e-01 7.90264666e-01 ... 2.84947246e-01 -1.39718091e+00 -3.47893929e+00] [ 1.14076865e+00 -2.95044684e+00 -3.67727399e-01 ... -4.93087798e-01 1.21237791e+00 1.49474895e+00] [ 1.10877025e+00 1.86543286e+00 3.91834021e+00 ... -7.19183385e-01 -2.10126281e+00 9.42379236e-03] ... [-1.26422906e+00 1.33350328e-01 6.18971229e-01 ... -8.29182863e-01 1.98954511e+00 -4.51458842e-01] [ 1.61643171e+00 7.58022428e-01 -8.49199414e-01 ... 2.42158756e-01 -9.46802020e-01 1.91681635e+00] [-1.90198028e+00 -1.72090816e+00 2.07577443e+00 ... 7.57863939e-01 1.82295668e+00 -1.07718766e+00]]] [[[-3.64644021e-01 -3.48498583e-01 -4.37014885e-02 ... 2.12076202e-01 -8.57694596e-02 1.02934130e-02] [-1.83156118e-01 -1.44008383e-01 2.24963322e-01 ... -1.67375118e-01 -6.31797493e-01 6.31488115e-02] [-3.01728159e-01 4.01858121e-01 -3.34221303e-01 ... 5.21519184e-01 -4.49617237e-01 -4.18866068e-01] ... [ 5.50546825e-01 -1.06570996e-01 8.02426413e-02 ... 1.46258295e-01 8.95638838e-02 -1.80528536e-02] [ 2.14191109e-01 4.07340862e-02 -7.66746700e-02 ... -6.02135435e-02 5.03740050e-02 -9.89275705e-03] [-6.37088194e-02 -1.20462896e-02 -3.56318116e-01 ... -1.63120672e-01 2.54889041e-01 1.44824505e-01]] [[-4.56066042e-01 2.58632571e-01 -5.67334965e-02 ... -8.32205117e-02 -1.57542810e-01 -5.03475428e-01] [-1.78348944e-01 1.29091725e-01 -2.90752083e-01 ... -7.11554736e-02 -3.72548513e-02 -1.83987409e-01] [-2.45101497e-01 2.46100321e-01 -2.96291590e-01 ... 1.02752663e-01 1.45564899e-01 -3.04401703e-02] ... [ 1.29628986e-01 -4.02656406e-01 -3.14451158e-01 ... -2.41355643e-01 -2.20060851e-02 9.11719278e-02] [-1.36040673e-01 2.12738991e-01 1.75022751e-01 ... -1.10519879e-01 -1.18752889e-01 1.78531613e-02] [ 3.40959191e-01 -4.88458462e-02 -1.60826042e-01 ... -2.00414047e-01 9.59108993e-02 1.49472905e-02]] [[ 1.07668743e-01 2.46462196e-01 4.19528261e-02 ... 2.62420714e-01 4.28313464e-01 -2.47992083e-01] [ 4.16534632e-01 1.16441213e-01 -2.86390632e-01 ... 1.56489000e-01 2.77435094e-01 3.94505113e-01] [-2.80509979e-01 -1.12479463e-01 -3.91544342e-01 ... -1.55270169e-03 -1.34339795e-01 9.73030180e-02] ... [ 1.30779088e-01 2.28031963e-01 -9.92614627e-02 ... 4.42078829e-01 2.89402574e-01 -2.15974554e-01] [ 1.57284871e-01 3.05189937e-01 -1.07919887e-01 ... 6.24704994e-02 1.06039867e-01 3.22415009e-02] [-2.09296644e-01 1.95220098e-01 -8.78762975e-02 ... 1.29068509e-01 -6.83088154e-02 -8.04996770e-03]] ... [[-7.97361732e-02 4.06934410e-01 3.62395484e-04 ... 3.42504472e-01 1.55638993e-01 -6.70248926e-01] [-1.04961321e-01 -3.32975611e-02 -2.39142120e-01 ... -2.30637535e-01 3.21117312e-01 -1.86572850e-01] [-6.11914247e-02 3.11395079e-01 2.22915739e-01 ... -4.79644328e-01 5.89074969e-01 3.93144995e-01] ... [-9.20061320e-02 -4.06235866e-02 -2.10846499e-01 ... -1.42424619e-02 -1.13581484e-02 3.85628164e-01] [ 2.69928247e-01 -3.38067204e-01 -2.99254864e-01 ... 3.78097713e-01 -4.35880631e-01 -1.95051104e-01] [ 3.12850803e-01 -1.79296926e-01 -2.75517523e-01 ... 4.11675572e-02 -8.63048062e-02 3.30198526e-01]] [[ 3.50763381e-01 4.08572378e-03 -2.69470036e-01 ... 8.39042887e-02 3.89782824e-02 -4.32396293e-01] [ 6.78729191e-02 -9.74726677e-02 1.06401876e-01 ... -2.66070981e-02 3.23815882e-01 -2.08175898e-01] [-1.70474768e-01 -1.51786376e-02 -6.03234172e-01 ... -2.01220274e-01 2.33172905e-02 -8.74664560e-02] ... [ 1.60383195e-01 -4.53750342e-01 8.06447864e-02 ... 3.30695957e-02 -2.18700334e-01 -3.03977332e-03] [ 3.72179598e-01 4.94018160e-02 -2.47853771e-01 ... 1.28873870e-01 1.83659747e-01 -1.99205965e-01] [-4.37736124e-01 8.42196345e-02 2.14708686e-01 ... -1.99094221e-01 -2.53618956e-01 2.85574615e-01]] [[ 1.13269128e-01 -3.72193009e-01 2.57394791e-01 ... -6.00337191e-03 -2.93481797e-01 -5.97928874e-02] [-1.51601275e-02 1.97636038e-01 -3.69043231e-01 ... 3.66839886e-01 1.67545393e-01 -1.31223470e-01] [-9.60288718e-02 -4.72896785e-01 7.91135788e-01 ... 6.32942617e-02 -2.52400905e-01 -2.09798533e-02] ... [ 4.33698148e-01 3.43346566e-01 2.08121926e-01 ... -4.15568128e-02 1.55037548e-02 4.41676289e-01] [ 5.90181164e-02 -5.33368438e-03 -2.77949989e-01 ... 1.64565176e-01 4.40480798e-01 3.29044342e-01] [-1.51020521e-02 -5.00995815e-02 1.52486756e-01 ... 6.90708682e-02 1.22008575e-02 -1.63411781e-01]]] [[[ 1.10373926e+00 9.79677081e-01 -1.12478125e+00 ... -9.79699552e-01 4.08409739e+00 1.25958669e+00] [ 6.56252444e-01 -8.21145415e-01 2.37282529e-01 ... 2.96707898e-01 -8.83244634e-01 1.88343489e+00] [ 1.50566876e-01 1.23961854e+00 2.04792714e+00 ... -8.67284477e-01 7.82102764e-01 -2.11139545e-01] ... [-2.35349941e+00 3.61515045e-01 -3.89234924e+00 ... 1.77304074e-01 -1.44553173e+00 -2.62296081e+00] [ 8.76949728e-01 1.24452102e+00 -1.11968005e+00 ... -1.00806868e+00 1.43977022e+00 -1.04168326e-01] [-2.86571240e+00 1.07664430e+00 7.29420304e-01 ... -1.79161176e-01 -7.35759974e-01 3.58386606e-01]] [[ 2.17740393e+00 -1.19487382e-01 -1.43571779e-01 ... -1.81628585e+00 -7.79249907e-01 -3.60616326e-01] [-9.04417112e-02 -1.11429095e+00 1.57951045e+00 ... -5.23912370e-01 -1.99219716e+00 -1.47969916e-01] [-7.80837119e-01 1.40301526e+00 1.54424950e-01 ... 3.01510239e+00 -2.55317658e-01 1.75047427e-01] ... [-9.82016698e-02 -1.64976895e-01 -1.22691357e+00 ... 1.42511582e+00 -6.93480849e-01 3.25427055e+00] [ 1.96106529e+00 -4.11214605e-02 -1.19515944e+00 ... 9.86725867e-01 1.07909538e-01 -2.53576487e-01] [-2.21824265e+00 9.75630045e-01 -5.19744158e-01 ... -8.70716333e-01 1.48858547e+00 2.51820326e+00]] [[ 4.82924581e-01 -9.51093793e-01 -1.05365419e+00 ... 3.29816133e-01 3.01369834e+00 -5.17340183e-01] [ 6.34769052e-02 -3.51386994e-01 2.62802434e+00 ... -4.83717620e-01 -4.05844069e+00 -5.04842512e-02] [ 2.44765544e+00 -7.67144680e-01 1.42818129e+00 ... 1.39784396e+00 -2.69352269e+00 -1.26287758e+00] ... [-1.48964596e+00 1.50688326e+00 -2.21674013e+00 ... -3.46682000e+00 -1.08496106e+00 9.01950359e-01] [-1.10095119e+00 -4.04170752e-01 1.25337863e+00 ... 8.76638830e-01 2.50997281e+00 2.87789607e+00] [ 2.41898584e+00 -1.11733541e-01 1.00310981e+00 ... -2.81956863e+00 1.39005280e+00 1.90873992e+00]] ... [[-3.50763535e+00 1.76227903e+00 2.19305182e+00 ... -2.49898076e+00 -2.02326226e+00 -5.34035206e-01] [ 9.17777479e-01 -5.36070764e-01 -5.16534448e-01 ... -2.33700442e+00 -1.99143723e-01 -1.23119104e+00] [ 1.02685058e+00 1.53921470e-01 1.25805259e+00 ... -3.14192653e+00 -1.06728327e+00 -2.20396733e+00] ... [ 5.36851883e-01 5.25749847e-02 -3.85765362e+00 ... -1.43128201e-01 -1.03211284e+00 8.47332954e-01] [-8.97751927e-01 -6.55920133e-02 -2.38643718e+00 ... -7.84448206e-01 3.85570621e+00 -2.67778587e+00] [ 2.03788805e+00 -1.89078808e-01 -5.64538956e-01 ... -1.45180178e+00 1.95999563e+00 1.16710889e+00]] [[ 1.64448547e+00 1.52814031e-01 -3.24417734e+00 ... -2.23954630e+00 8.02585125e-01 -5.79961061e-01] [-1.13035512e+00 -1.62514973e+00 -2.07500011e-02 ... 1.98390305e+00 -4.64344651e-01 -1.53184223e+00] [ 1.89422834e+00 -1.76792169e+00 3.01787305e+00 ... -8.95982206e-01 2.46522740e-01 7.04462886e-01] ... [ 2.39477468e+00 1.51179075e+00 1.81051910e+00 ... -9.79819596e-01 1.15321207e+00 3.19662452e-01] [-7.94891596e-01 9.91588533e-02 1.76105142e+00 ... -6.12518847e-01 -6.91720188e-01 -1.50498128e+00] [-8.61907721e-01 -4.41338792e-02 -3.02502227e+00 ... 2.36880207e+00 -6.90935791e-01 -7.85241902e-01]] [[-3.22036117e-01 -1.99227142e+00 2.36690307e+00 ... 2.08237723e-01 6.51689619e-02 1.26080215e+00] [-7.97052920e-01 2.34565830e+00 -9.16564688e-02 ... 1.06713265e-01 -3.59313786e-01 -1.09757781e+00] [ 2.66262470e-03 -1.42720580e+00 3.65399808e-01 ... -7.82852888e-01 2.63837308e-01 1.26262105e+00] ... [ 2.51045227e+00 2.07369900e+00 4.75343883e-01 ... 1.81682503e+00 2.71573710e+00 -1.69066584e+00] [ 2.38499260e+00 -7.04146028e-01 -1.09249318e+00 ... 3.58690947e-01 8.38914454e-01 -1.43060195e+00] [-8.62927884e-02 -2.49341702e+00 2.04762101e+00 ... -1.03431511e+00 -2.66955853e+00 -1.05859053e+00]]] [[[ 8.15167949e-02 2.34864396e-03 2.59845573e-02 ... 1.02650799e-01 2.79215705e-02 1.11500330e-01] [ 7.92617872e-02 -1.67768523e-01 -3.56228389e-02 ... -8.09677765e-02 -2.03507870e-01 -4.92948815e-02] [ 5.03389835e-02 -1.47995085e-03 3.25043574e-02 ... -5.92390113e-02 6.36653379e-02 -7.02703372e-02] ... [ 9.67480019e-02 1.66331641e-02 1.55356750e-01 ... -1.10660288e-02 -2.78764404e-02 -3.39971967e-02] [ 1.50690973e-03 -4.29894775e-02 1.19164772e-01 ... -2.52000153e-01 1.19999394e-01 -2.78853606e-02] [-2.99409986e-01 -2.75539774e-02 2.09502093e-02 ... 5.12286201e-02 2.88928952e-02 1.65138077e-02]] [[ 3.93651463e-02 -1.27117440e-01 -2.00809725e-02 ... -4.83327061e-02 1.01598255e-01 -6.18033968e-02] [ 7.41760209e-02 -1.86650574e-01 3.85239497e-02 ... -4.58123162e-02 -7.37474710e-02 1.53543979e-01] [-5.38481623e-02 3.63272987e-02 -1.07445970e-01 ... -9.66791287e-02 1.57077923e-01 1.33337542e-01] ... [-5.06620593e-02 1.16239503e-01 -2.16897931e-02 ... -2.56065596e-02 -1.42427787e-01 2.14480489e-01] [ 3.35317850e-02 -3.87315005e-02 -6.78519756e-02 ... -1.27275065e-01 -2.93196887e-01 -1.30456612e-01] [ 7.18977721e-03 6.15807809e-02 1.77924961e-01 ... 1.80897152e-03 1.41804382e-01 -1.52083993e-01]] [[-2.19932348e-02 2.71430891e-02 -2.87878178e-02 ... -2.79435087e-02 -7.63673037e-02 6.06785454e-02] [-9.60460231e-02 -6.40958622e-02 9.21478719e-02 ... 1.67976931e-01 7.07347840e-02 -1.38207272e-01] [ 1.77882344e-01 -4.82474938e-02 -1.71010017e-01 ... 8.30558389e-02 4.64480631e-02 9.29675996e-02] ... [ 6.74893260e-02 1.85811028e-01 2.44925618e-02 ... 1.78841338e-03 5.77898733e-02 -4.41114269e-02] [ 2.65500933e-01 -2.57594381e-02 -9.23687294e-02 ... 1.95950463e-01 1.23088779e-02 -1.38053391e-02] [ 2.73075029e-02 -1.35423124e-01 -1.57774597e-01 ... 1.20315813e-01 1.07524004e-02 2.22373754e-01]] ... [[-1.13619030e-01 1.23216510e-02 1.56004801e-02 ... 4.99336682e-02 4.15468067e-02 7.72891864e-02] [ 1.18901648e-01 1.13336183e-02 -2.22737521e-01 ... 7.45849907e-02 -4.39212993e-02 1.41289473e-01] [ 2.30906997e-02 9.44628380e-04 -1.34272754e-01 ... -3.13614488e-01 7.09373653e-02 5.74404933e-03] ... [ 3.41386050e-02 1.06520606e-02 -3.41151953e-02 ... -1.35354251e-01 -1.05787382e-01 1.83555126e-01] [ 9.41721126e-02 -2.09150463e-03 -2.20161472e-02 ... -8.41495842e-02 -1.09522147e-02 -1.63020138e-02] [-5.36013432e-02 -2.83704400e-02 5.75895468e-03 ... 7.38243237e-02 9.56263114e-03 -8.28490779e-03]] [[-2.22724453e-02 -3.35512944e-02 6.15676865e-02 ... 3.48179787e-02 1.62286982e-01 8.52621123e-02] [ 1.53133810e-01 8.93517062e-02 -1.19044341e-01 ... -9.41671729e-02 -1.23798467e-01 -3.02572817e-01] [-6.20405301e-02 2.04870880e-01 7.61784092e-02 ... 6.91796020e-02 8.88173878e-02 -1.45750539e-02] ... [-1.19799569e-01 6.20601745e-03 -4.78739478e-02 ... 7.52529278e-02 -1.51891291e-01 2.88868770e-02] [-5.61989024e-02 -1.16652055e-02 1.13706067e-01 ... -5.32981306e-02 -1.28375292e-02 -5.92811182e-02] [ 1.42999053e-01 4.12635058e-02 1.18259564e-02 ... -9.26157832e-02 1.19650140e-02 3.48991640e-02]] [[ 3.47637497e-02 4.75458838e-02 1.33016840e-01 ... -5.72883077e-02 -9.69098583e-02 1.42942563e-01] [ 1.26980692e-01 -1.55091703e-01 -4.04507993e-03 ... -5.12719303e-02 -1.14313267e-01 5.62997162e-02] [ 7.29234070e-02 4.61747646e-02 -8.32808465e-02 ... 1.55355288e-02 -1.86081648e-01 -7.39680305e-02] ... [ 4.01273817e-02 -6.22452386e-02 2.31351435e-01 ... -3.46859545e-02 -8.57281387e-02 8.98180753e-02] [-2.06397027e-02 1.04848109e-01 2.19619080e-01 ... -1.20334132e-02 -1.19356429e-02 1.50391176e-01] [-2.92121321e-01 -8.61798376e-02 5.56793287e-02 ... 8.82008448e-02 1.44350016e-02 9.87436771e-02]]] [[[-3.74949276e-01 3.40930969e-01 -4.75309975e-02 ... 3.74951780e-01 9.24987018e-01 3.18471849e-01] [ 4.04092193e-01 6.00721650e-02 3.30006838e-01 ... -5.96353561e-02 8.31755996e-02 6.52780116e-01] [ 7.30214268e-02 5.87283611e-01 -2.53594816e-01 ... -4.39140111e-01 -2.05539718e-01 -1.01644218e+00] ... [-5.26194513e-01 3.83707374e-01 7.03232825e-01 ... 6.23871148e-01 5.57702780e-01 4.99897629e-01] [ 6.60184443e-01 4.55584496e-01 1.16280474e-01 ... 1.07913637e+00 -6.73116464e-03 4.34048384e-01] [-8.35364699e-01 8.33874047e-01 7.94707119e-01 ... 1.98685840e-01 -2.73349583e-01 5.68985522e-01]] [[ 4.05924380e-01 3.51028442e-01 5.18157184e-01 ... 7.79737294e-01 -6.85406983e-01 -1.08200765e+00] [-2.93544263e-01 -3.70103912e-03 8.05689171e-02 ... 3.40081543e-01 -2.60979533e-01 1.00246918e+00] [-3.67728233e-01 -3.35645705e-01 1.39970791e+00 ... 7.07562506e-01 1.70906574e-01 1.35402703e+00] ... [-2.62321085e-01 -8.19921494e-01 -8.54316831e-01 ... -1.66597679e-01 7.52189517e-01 -3.91041100e-01] [ 8.66425455e-01 2.19358057e-01 6.62845135e-01 ... 5.34436405e-01 2.19396278e-01 -1.03057064e-01] [-4.08832341e-01 4.00150687e-01 -1.83503464e-01 ... 1.69783905e-01 1.32455066e-01 4.48304474e-01]] [[ 9.77765396e-02 2.49283388e-01 -1.27463686e+00 ... -2.71902412e-01 -5.73685229e-01 -6.98379874e-01] [ 8.63293037e-02 -1.85175151e-01 1.00443661e-01 ... 4.38387662e-01 -5.79090416e-01 -1.50578833e+00] [ 7.90729880e-01 2.94813842e-01 2.05670536e-01 ... -6.40910715e-02 -7.63342738e-01 -1.23113334e+00] ... [-2.44954363e-01 -6.80777550e-01 -2.71296930e-02 ... -7.94987738e-01 -1.19561458e+00 8.61530662e-01] [ 5.08638501e-01 -2.23852694e-01 -5.09653747e-01 ... -2.50302196e-01 -3.59542429e-01 -9.55689549e-01] [-1.36439264e-01 -5.16460657e-01 3.79107147e-01 ... 3.11942607e-01 4.05716412e-02 -9.70807076e-02]] ... [[ 3.33269656e-01 2.07343847e-01 1.51989877e+00 ... 2.57999957e-01 -3.12532276e-01 7.25861788e-02] [ 1.90532841e-02 -4.95410234e-01 -4.33374941e-01 ... -1.74668700e-01 5.35847396e-02 -4.75116938e-01] [-8.12101305e-01 4.64751631e-01 -2.55028218e-01 ... 8.95951316e-02 -3.83780658e-01 -5.02458274e-01] ... [ 8.60443473e-01 2.58745737e-02 1.36132944e+00 ... -6.08508885e-01 1.22518903e-02 -4.24615204e-01] [-4.04889546e-02 5.55401966e-02 8.06988835e-01 ... 3.04518044e-01 7.08007991e-01 9.24007446e-02] [-8.15394744e-02 6.70959353e-01 -6.11808658e-01 ... 2.22056761e-01 -4.16755825e-01 -4.18097436e-01]] [[-4.67036545e-01 7.68941879e-01 -6.37129247e-01 ... 9.43395421e-02 8.85291845e-02 -1.46798480e+00] [ 8.58340859e-01 6.04043722e-01 -3.24861795e-01 ... 1.27027974e-01 -6.12044573e-01 -3.52123082e-01] [ 1.44864666e+00 -8.17241788e-01 -3.18053275e-01 ... -7.35483885e-01 -3.63212019e-01 -1.25476563e+00] ... [-1.70959458e-01 1.77395418e-01 -2.81958371e-01 ... -5.00114918e-01 1.94210425e-01 6.91542089e-01] [ 1.51803810e-02 -6.22966111e-01 -3.35926384e-01 ... -5.56179404e-01 -4.90025878e-01 7.06611276e-02] [ 3.07392366e-02 -2.33175550e-02 4.05135423e-01 ... -5.63636482e-01 6.00164711e-01 -5.35421312e-01]] [[ 1.88051276e-02 -1.11566223e-01 -3.84256035e-01 ... -4.74891156e-01 -4.93882418e-01 -7.09967732e-01] [-3.80076736e-01 6.15339100e-01 -8.69594574e-01 ... 5.80415726e-01 -4.00785893e-01 -6.65831387e-01] [-1.37654424e+00 2.13592678e-01 1.38258010e-01 ... 2.75074035e-01 -3.94892573e-01 -3.31855744e-01] ... [-8.83245289e-01 5.41105330e-01 -8.29254091e-01 ... 5.94899654e-01 3.52532387e-01 -6.95275426e-01] [ 7.79920161e-01 -4.10771277e-03 7.42497593e-02 ... 1.08984828e+00 -9.29733634e-01 -1.17739905e-02] [-2.45054886e-02 -2.68668443e-01 -1.06462729e+00 ... -9.99065280e-01 2.34793678e-01 -1.09895878e-01]]]] [[[[ 5.85264921e-01 9.53027532e-02 -1.63548458e+00 ... 6.96833849e-01 -3.81383717e-01 1.33309484e-01] [-1.50196090e-01 -1.50683498e+00 -3.92742574e-01 ... -6.18802607e-01 6.40951097e-01 -6.32690489e-01] [ 1.08229414e-01 -6.31444871e-01 3.38034481e-01 ... -6.66707873e-01 -4.97537442e-02 4.70455796e-01] ... [ 2.62373269e-01 6.66812947e-03 -3.39322776e-01 ... 3.20444316e-01 -2.15443447e-02 1.02244341e+00] [ 1.08882040e-02 -3.15381318e-01 2.96571523e-01 ... -2.94966668e-01 -6.63679898e-01 -4.60506320e-01] [ 2.76036151e-02 3.66429538e-01 3.08423955e-02 ... -1.35682857e+00 6.88817382e-01 -6.70295238e-01]] [[ 9.62293744e-01 -6.65854096e-01 1.69946939e-01 ... -3.13456595e-01 8.34207296e-01 -4.45910357e-02] [-9.63272035e-01 1.26134589e-01 6.07036471e-01 ... 6.26743257e-01 2.41282776e-01 -2.98325628e-01] [-5.80339074e-01 6.01709247e-01 7.43339539e-01 ... 5.83996475e-01 3.34076047e-01 1.39513277e-02] ... [-7.33500123e-01 7.30603635e-01 3.69419366e-01 ... 6.95864677e-01 -1.00989830e+00 1.40584648e-01] [-4.72984284e-01 4.89212632e-01 9.23683822e-01 ... 1.73642814e-01 7.20936120e-01 9.47927777e-03] [-9.48369443e-01 -3.48771662e-01 5.37860394e-02 ... -3.74742448e-01 -4.00849938e-01 -8.89818463e-03]] [[-5.76899827e-01 -1.32392895e+00 -5.21376953e-02 ... 7.06573248e-01 -1.66802657e+00 -6.39317989e-01] [ 8.55970234e-02 -1.09197639e-01 -1.06155419e+00 ... -2.71170378e-01 1.54705262e+00 1.69962382e+00] [-4.90484625e-01 5.43670356e-01 -9.81300697e-02 ... 7.09757805e-01 5.45748174e-01 -3.06338549e-01] ... [ 2.46409208e-01 -5.27404547e-01 -5.14114320e-01 ... 4.66217883e-02 3.71320844e-01 5.42177022e-01] [-1.08634986e-01 -4.43901002e-01 -1.00080049e+00 ... 2.92950183e-01 5.54848254e-01 -2.01788649e-01] [-3.18884403e-01 -5.14303803e-01 -2.70948768e-01 ... -5.12131155e-01 -1.68882847e+00 4.46760982e-01]] ... [[ 6.40265465e-01 6.83874488e-01 -9.44975391e-02 ... 3.90802234e-01 -1.13443661e+00 1.98170722e-01] [ 6.66415632e-01 1.79595029e+00 -2.58329093e-01 ... -4.10671949e-01 -1.69736147e-01 6.97261155e-01] [ 1.34564447e+00 2.21364021e-01 -2.39483923e-01 ... 3.43927354e-01 3.68302345e-01 1.30122495e+00] ... [ 5.21263592e-02 4.47526528e-03 -1.03419542e+00 ... -4.04032707e-01 9.96599317e-01 6.29119337e-01] [ 1.12109222e-01 -1.00548342e-01 5.01440108e-01 ... -1.14563465e+00 -6.41522050e-01 -4.98188473e-02] [ 3.99962544e-01 -1.10838425e+00 2.35642686e-01 ... 9.39123929e-02 -9.22097445e-01 -1.55049109e+00]] [[-3.51946324e-01 -1.23978496e+00 -8.75105143e-01 ... -1.79473743e-01 -2.36884221e-01 8.35429728e-02] [ 4.49876606e-01 8.31345096e-02 1.55708551e+00 ... 3.08034271e-01 -4.95275080e-01 4.61008877e-01] [ 9.00485694e-01 6.46554232e-02 -3.35503846e-01 ... 1.90225446e+00 -1.06734999e-01 -1.12930246e-01] ... [ 1.57057494e-01 4.72454756e-01 8.11329126e-01 ... 2.95875967e-01 7.96482801e-01 1.84509325e+00] [ 1.04729688e+00 4.84254122e-01 7.52931893e-01 ... 4.22866523e-01 4.93654132e-01 -7.53220856e-01] [ 1.34715199e+00 1.03026986e+00 -6.08518004e-01 ... 7.98999429e-01 -1.90648407e-01 2.13446066e-01]] [[ 6.37741148e-01 1.56222761e+00 -7.61922821e-02 ... -4.43509072e-01 6.82536900e-01 8.48380625e-01] [-1.71955419e+00 -2.41691634e-01 9.70854759e-01 ... -1.31847322e+00 4.43085462e-01 -4.46504027e-01] [ 9.82338369e-01 -4.17302072e-01 -1.74650401e-02 ... 1.04269075e+00 -4.18985076e-02 -5.62230386e-02] ... [-1.25151169e+00 6.66967332e-01 7.07376182e-01 ... -1.36580908e+00 -3.19465727e-01 3.69269013e-01] [-6.13238811e-01 -1.21546245e+00 -1.04668725e+00 ... -1.04448609e-02 9.76251289e-02 -5.71416020e-01] [-2.21256748e-01 3.63177627e-01 9.44120228e-01 ... -1.40401796e-01 -1.11561362e-02 -6.65318489e-01]]] [[[ 2.88627863e+00 1.20457661e+00 1.06497371e+00 ... -3.67529094e-01 -2.56440997e-01 2.24658108e+00] [-1.51471007e+00 8.30153406e-01 -5.91141403e-01 ... 3.81202221e-01 -1.53214443e+00 2.84735501e-01] [-6.38791502e-01 -1.19602099e-01 4.04916883e-01 ... -1.78890020e-01 -1.13445878e-01 -1.97149134e+00] ... [ 7.65056014e-01 -3.44218493e+00 -1.77233994e+00 ... -1.79023433e+00 6.73193574e-01 1.25385332e+00] [-2.10155869e+00 7.11325705e-01 1.98959517e+00 ... -2.29514450e-01 -3.40332985e+00 -2.44293630e-01] [ 5.95097244e-01 6.37990952e-01 3.91991645e-01 ... -6.45577073e-01 4.40476924e-01 2.97719908e+00]] [[ 3.92926145e+00 2.97114179e-02 4.98196661e-01 ... 3.73569995e-01 -4.65906262e-01 -3.99263687e-02] [-3.73355895e-01 1.41096199e+00 -2.18213852e-02 ... -5.08946121e-01 -1.50107294e-01 1.94222558e+00] [-1.42024183e+00 1.52351350e-01 2.92711091e+00 ... 5.47295332e-01 1.53558707e+00 3.42301130e+00] ... [ 2.13645673e+00 -4.06857193e-01 7.18729436e-01 ... 1.11425257e+00 -8.93642157e-02 1.45645058e+00] [-1.11104417e+00 1.19554842e+00 -3.78880143e-01 ... 9.46431905e-02 -2.96867549e-01 4.21326607e-01] [ 9.29713130e-01 -7.27664292e-01 5.45084178e-01 ... -3.00000727e-01 2.72935915e+00 -2.86955297e-01]] [[-5.73190153e-01 3.03544849e-01 -1.51263440e+00 ... -3.64484685e-03 1.51143551e-01 -6.60540760e-01] [ 4.26647484e-01 -1.48167241e+00 -1.06919837e+00 ... -8.38172913e-01 -8.26846778e-01 1.25934947e+00] [ 8.35998654e-02 -3.70687962e-01 2.14020833e-01 ... -1.36415982e+00 -2.52345490e+00 2.29791951e+00] ... [-1.40954208e+00 4.24949341e-02 -7.50640750e-01 ... -1.08480120e+00 -2.55845737e+00 5.56907296e-01] [-1.68174422e+00 -1.19849026e+00 -6.00689292e-01 ... -8.40320587e-01 3.02278757e+00 7.15129673e-01] [-6.79396212e-01 -6.91726685e-01 -1.81202874e-01 ... 2.37159893e-01 -9.66962755e-01 2.13140512e+00]] ... [[ 7.55259320e-02 2.84892946e-01 1.79055130e+00 ... 1.04406750e+00 2.69136250e-01 -1.10938907e+00] [-5.51880062e-01 -1.06691623e+00 -2.69970328e-01 ... 7.37005711e-01 -8.01716566e-01 2.78332710e+00] [-1.37628877e+00 1.60212731e+00 1.32894897e+00 ... 1.14980710e+00 7.72579968e-01 3.31008047e-01] ... [-2.30726048e-01 -8.48515153e-01 -5.71790814e-01 ... 8.52099508e-02 1.40388083e+00 -5.74066818e-01] [ 4.87921908e-02 -1.60296500e+00 -3.61723304e-01 ... 3.62120569e-01 -1.21483636e+00 -9.42042470e-02] [ 6.46186173e-01 1.09248924e+00 4.31415319e-01 ... -5.98815680e-01 -2.38989949e+00 -7.46117592e-01]] [[ 2.23494753e-01 3.87196720e-01 1.17473304e+00 ... 8.84125113e-01 9.85879540e-01 -7.97757864e-01] [-8.46684575e-01 -1.23378408e+00 2.65745580e-01 ... -2.39621431e-01 2.82793373e-01 -1.11672258e+00] [-3.33984423e+00 2.43665981e+00 -1.53826606e+00 ... -2.08435988e+00 -8.54467869e-01 3.46762240e-02] ... [ 7.30363801e-02 -1.65472257e+00 3.54925573e-01 ... -1.96403825e+00 -1.72374293e-01 1.41213965e+00] [ 1.92088711e+00 1.25325799e+00 -3.53540391e-01 ... -1.47511375e+00 -1.44598126e+00 -1.57161462e+00] [ 2.64165473e+00 1.38718450e+00 -1.16462684e+00 ... -1.37639415e+00 8.01481247e-01 -2.13304734e+00]] [[ 1.99851140e-01 -8.10691118e-01 1.14584875e+00 ... -1.34785163e+00 -1.96235144e+00 8.21461201e-01] [-6.79621398e-01 1.60401249e+00 1.21997714e+00 ... -7.89303064e-01 -1.06100571e+00 1.36529732e+00] [ 1.07858062e+00 7.78998077e-01 9.78710055e-01 ... 6.94357455e-01 3.23032618e-01 -6.73273742e-01] ... [-1.50643826e+00 -2.53762722e-01 6.39207363e-01 ... 5.23784518e-01 -2.47078240e-01 8.66184115e-01] [ 5.86683638e-02 2.01111412e+00 -7.55765796e-01 ... 7.94596136e-01 -3.24661779e+00 -1.13505018e+00] [-2.55196953e+00 -4.00725424e-01 5.49224257e-01 ... -5.51730335e-01 6.56325996e-01 7.09898323e-02]]] [[[ 5.22275306e-02 -8.43285620e-02 3.17648649e-01 ... 6.91476643e-01 -3.51135433e-01 -4.02556121e-01] [-3.04882586e-01 -2.44603813e-01 -2.47679561e-01 ... 3.28946680e-01 5.00365019e-01 -1.95792392e-01] [-3.63530368e-01 2.16780156e-01 4.11984324e-01 ... -1.85601160e-01 -1.08401448e-01 -9.48412195e-02] ... [ 1.03327401e-01 9.17709693e-02 -4.47649769e-02 ... 2.79931784e-01 -3.94841224e-01 2.62926314e-02] [-4.42708492e-01 1.67493910e-01 3.52229774e-01 ... -1.97985724e-01 -1.42729253e-01 3.95277828e-01] [-2.68623322e-01 -1.79023191e-01 -2.74486244e-01 ... -3.96214157e-01 -4.10470456e-01 9.88259986e-02]] [[-6.22689985e-02 -2.03621343e-01 1.60196558e-01 ... 2.60470927e-01 4.16817129e-01 1.60100341e-01] [-9.89218131e-02 2.12399304e-01 3.30670178e-01 ... -7.33255967e-02 1.90826148e-01 2.64383912e-01] [-1.97846428e-01 -9.28846076e-02 -3.53362203e-01 ... 2.13807479e-01 -4.50745970e-01 3.00239176e-01] ... [-2.57210936e-02 2.62541324e-01 6.46844804e-02 ... -1.71902493e-01 -5.89205837e-03 1.09311983e-01] [ 6.43326938e-02 1.45319879e-01 4.92164046e-02 ... 2.27035880e-02 -3.07069033e-01 2.13082403e-01] [-1.09652244e-01 4.12508190e-01 -1.02265917e-01 ... -4.50742804e-02 -3.87265950e-01 3.61046568e-02]] [[ 5.38829565e-02 -7.33580291e-01 -2.36814514e-01 ... -2.20138624e-01 3.86639357e-01 -3.10218155e-01] [ 4.78746980e-01 2.07726806e-01 2.51085281e-01 ... 2.44369790e-01 -3.40959758e-01 -3.51996690e-01] [ 2.64947653e-01 1.29685760e-01 1.57158613e-01 ... 1.06831007e-01 8.21409225e-02 3.39864254e-01] ... [-2.83249974e-01 4.32639837e-01 3.78819168e-01 ... -2.38019694e-02 -5.83293840e-05 -2.31511928e-02] [ 8.46515372e-02 -1.56572461e-01 -1.32562086e-01 ... 1.57982893e-02 -1.80003688e-01 1.52048334e-01] [-2.17485934e-01 3.87515686e-02 1.82321280e-01 ... 7.42793754e-02 1.20347269e-01 -1.01834610e-01]] ... [[-1.87841132e-01 2.03836009e-01 -6.19857982e-02 ... -3.83804440e-02 2.17837319e-01 -7.44748652e-01] [ 2.47885570e-01 3.64905149e-02 4.58599664e-02 ... -1.20248951e-01 -4.56242025e-01 7.28332400e-01] [ 1.20947160e-01 2.27247044e-01 -1.22278422e-01 ... -1.64983258e-01 2.70873845e-01 4.21746701e-01] ... [-2.82769613e-02 2.74900764e-01 1.25787944e-01 ... 2.78294623e-01 3.80583793e-01 -1.48581922e-01] [ 2.11015776e-01 -3.24750505e-02 2.77321693e-02 ... -9.36103314e-02 1.72032952e-01 2.33593449e-01] [ 1.78005382e-01 3.08300871e-02 -2.27573395e-01 ... -1.63185850e-01 -1.99301243e-01 1.41817899e-02]] [[-9.81629342e-02 1.84106350e-01 -2.11083770e-01 ... -3.46362293e-01 1.52169093e-01 1.22455724e-01] [-8.46653730e-02 4.12694030e-02 -2.10655611e-02 ... -9.36767757e-02 2.41440594e-01 9.57395732e-02] [-5.53612038e-02 -2.78392524e-01 4.72252786e-01 ... -2.89710075e-01 -2.57151395e-01 9.65555981e-02] ... [ 2.66864508e-01 -5.23348190e-02 -1.04628485e-02 ... 2.03654140e-01 2.44647250e-01 3.74826230e-02] [ 5.97650521e-02 -1.16575792e-01 3.40290852e-02 ... -1.24446526e-01 3.62196922e-01 -5.74613884e-02] [ 2.15314664e-02 -5.78109741e-01 -1.40460089e-01 ... -3.45660448e-01 -5.72030663e-01 7.09027946e-02]] [[ 3.45818698e-01 1.57288074e-01 1.04677893e-01 ... -4.13188599e-02 2.22467884e-01 -1.70785502e-01] [-4.98546138e-02 -1.32292390e-01 -3.39466244e-01 ... 1.97126180e-01 -9.53900516e-02 2.21385777e-01] [-1.74879491e-01 -6.53768182e-02 -3.54750365e-01 ... 2.40911454e-01 3.63882095e-01 -7.50924945e-01] ... [-1.86677843e-01 8.46037939e-02 -1.84853315e-01 ... 1.30161464e-01 -1.28872812e-01 9.51208547e-02] [-2.18512356e-01 -1.15673281e-01 -3.49305809e-01 ... 1.19567566e-01 -2.27358788e-01 -2.07415611e-01] [-2.47050866e-01 -4.47313845e-01 -6.83887675e-02 ... 5.62740266e-01 -3.35774839e-01 -9.27595720e-02]]] [[[ 8.61556470e-01 2.53962845e-01 5.48189700e-01 ... -1.76108742e+00 -2.09459281e+00 -2.73873389e-01] [-1.53767669e+00 1.18983674e+00 6.84584260e-01 ... -9.59500074e-01 -9.98835027e-01 -2.01248813e+00] [-7.58242249e-01 1.22250485e+00 1.03542936e+00 ... -9.27902162e-01 3.45555156e-01 4.68309104e-01] ... [-1.80588555e+00 -2.03729868e+00 -6.94635391e-01 ... 3.40440691e-01 6.26823530e-02 5.98460436e-01] [-7.36849248e-01 -3.52963805e+00 -7.21317410e-01 ... -2.23911190e+00 6.08238161e-01 -3.04679585e+00] [ 1.72332227e+00 -2.25384045e+00 -1.44575596e+00 ... -1.13600612e+00 1.98000169e+00 -9.32660639e-01]] [[ 1.09654272e+00 -3.04632998e+00 -9.78717566e-01 ... 1.27459979e+00 5.01671731e-02 1.09367043e-01] [-5.96598566e-01 6.47399247e-01 -2.70434093e+00 ... 3.73694211e-01 -3.08987290e-01 4.18992460e-01] [ 1.56024075e+00 1.43584585e+00 3.95558500e+00 ... 6.97562575e-01 -5.66246748e-01 4.57123369e-01] ... [ 3.38668084e+00 7.23706007e-01 -1.47605896e-01 ... 1.64999759e+00 1.50432527e+00 -4.54646015e+00] [-2.55786920e+00 5.58858812e-01 -5.37179232e-01 ... -1.02493858e+00 -1.93862832e+00 -2.23770881e+00] [-1.32959950e+00 1.81471753e+00 1.40772498e+00 ... 6.19584799e-01 3.79859638e+00 -1.25281584e+00]] [[-1.61525667e-01 -1.15615678e+00 -8.65303725e-02 ... 9.95380759e-01 -5.37240207e-01 -9.70974982e-01] [ 1.11478078e+00 -7.99226463e-01 -3.32915878e+00 ... 1.32699743e-01 9.03220952e-01 -1.27463317e+00] [-2.98344254e+00 -1.26266301e+00 -1.60946652e-01 ... 9.74906266e-01 -2.09651053e-01 3.52983773e-01] ... [-5.22446275e-01 9.89598274e-01 8.80802214e-01 ... -1.45954883e+00 -8.05498287e-02 1.05578220e+00] [-5.84711194e-01 -8.83214891e-01 1.38153565e+00 ... 7.06394374e-01 9.15977716e-01 8.11117887e-02] [-2.46233821e+00 -2.66429281e+00 2.05384231e+00 ... -1.01608217e+00 2.02344799e+00 -5.30085862e-01]] ... [[ 1.11916021e-01 1.64211607e+00 6.33331090e-02 ... 7.30725288e-01 -1.54983079e+00 1.20847428e+00] [-2.36163720e-01 1.69821894e+00 -5.09125628e-02 ... -6.02893829e-01 -1.18587387e+00 9.56475377e-01] [-8.34903300e-01 -3.24530378e-02 -9.78370547e-01 ... 7.41780281e-01 -8.89106333e-01 -1.91566765e-01] ... [-6.11955822e-02 -4.18744296e-01 6.05160415e-01 ... -1.35150111e+00 -8.27550948e-01 -1.87279880e+00] [-5.83819568e-01 -1.48414898e+00 -9.90878761e-01 ... -2.90686756e-01 -5.22250593e-01 -1.93847537e+00] [ 6.83436573e-01 5.83352327e-01 2.93387562e-01 ... 1.14600822e-01 -6.21622145e-01 2.21481252e+00]] [[ 9.41174865e-01 1.67408168e+00 -7.38909781e-01 ... -7.21791089e-01 1.61513579e+00 3.83312523e-01] [ 5.81732988e-01 6.85623646e-01 -1.93896401e+00 ... -7.58036733e-01 3.29684556e-01 -2.98562479e+00] [-1.05200768e-01 -5.08428097e-01 -1.19752467e+00 ... -9.70548868e-01 -2.87580466e+00 3.41104794e+00] ... [-1.55403829e+00 -1.89459007e-02 -1.79593384e+00 ... 6.11720145e-01 1.36561191e+00 -1.10714245e+00] [-4.81831223e-01 -6.73331439e-01 -8.11093301e-02 ... -9.48609054e-01 3.12425375e+00 -1.17435241e+00] [ 1.59155214e+00 -1.39530170e+00 -5.82144797e-01 ... 1.00395095e+00 9.97609317e-01 8.20392728e-01]] [[-2.28435087e+00 -1.08120668e+00 2.84741831e+00 ... 1.72090614e+00 -2.36548185e+00 -2.63965309e-01] [-2.90812552e-01 -1.71314323e+00 8.82805943e-01 ... 1.48990238e+00 -3.07335925e+00 -3.44211006e+00] [ 1.89176929e+00 1.37054846e-01 -5.57435751e-01 ... -3.27727497e-01 2.79536247e+00 1.54540527e+00] ... [ 1.29369557e+00 -9.38827455e-01 3.02615905e+00 ... 1.57392359e+00 -7.11050093e-01 1.25928128e+00] [ 1.37501597e-01 -9.32922661e-02 -9.70463455e-01 ... -6.48228288e-01 -1.05978990e+00 -4.55330372e-01] [ 7.03273952e-01 1.74166763e+00 4.99235243e-01 ... -1.09722364e+00 2.74732649e-01 5.80790937e-01]]] [[[ 1.06634624e-01 -9.33454111e-02 1.20901518e-01 ... 9.36609805e-02 -4.19285521e-02 1.10683534e-02] [ 1.51666120e-01 1.19671812e-02 6.73556030e-02 ... -7.57535920e-02 -3.78632657e-02 1.21490598e-01] [-9.59543064e-02 6.27004579e-02 -4.11978625e-02 ... 8.95903781e-02 1.30833030e-01 7.21843988e-02] ... [ 1.52936727e-01 -2.72383280e-02 9.40691680e-03 ... -1.05701178e-01 5.76272793e-02 6.59083501e-02] [-8.24590400e-02 -6.16999343e-02 3.83082801e-03 ... -7.15286061e-02 -1.05154432e-01 2.93446798e-02] [ 5.10461256e-03 6.81120381e-02 1.72015391e-02 ... 8.18524510e-02 -1.32888794e-01 2.65759788e-02]] [[-1.10888362e-01 -3.31758112e-02 1.85703650e-01 ... -2.29276329e-01 1.27565533e-01 -8.29385146e-02] [ 9.33543816e-02 4.83148918e-02 1.34798307e-02 ... 4.68604080e-02 -6.32684007e-02 5.74455261e-02] [ 1.83425546e-02 -2.25764997e-02 1.68369085e-01 ... 1.26502469e-01 1.75091922e-01 1.56914860e-01] ... [-2.19554990e-03 -3.76308411e-02 3.50298807e-02 ... 8.81096199e-02 2.87000723e-02 -1.74010377e-02] [ 9.02313665e-02 1.31663820e-02 1.77781358e-02 ... 1.92434132e-01 1.08412482e-01 1.85604423e-01] [ 2.71132112e-01 -1.25546813e-01 -4.44584154e-02 ... 8.33506808e-02 1.14959225e-01 -4.39064056e-02]] [[ 1.34044752e-01 6.10192083e-02 -1.79314628e-01 ... -3.44776213e-02 -7.74046183e-02 -2.18826011e-01] [-5.18025178e-03 3.77812348e-02 6.79116771e-02 ... -9.39071178e-03 5.65424897e-02 3.83826524e-01] [-1.70810997e-01 -2.31518492e-01 4.03733030e-02 ... -6.10613376e-02 6.51502833e-02 -2.76629418e-01] ... [ 6.02686033e-02 -7.17154518e-02 5.74485511e-02 ... 8.72057825e-02 4.93839793e-02 -7.35671399e-03] [ 3.96245681e-02 -9.73417088e-02 -2.26971179e-01 ... -1.47202983e-01 -5.30933887e-02 1.06159970e-01] [ 2.58796252e-02 -1.18283398e-01 -1.02132291e-01 ... -2.99378522e-02 6.57106861e-02 2.90367566e-03]] ... [[ 3.96620221e-02 -6.50058985e-02 -3.52418363e-01 ... -1.02333717e-01 -3.52758676e-01 1.04071096e-01] [ 1.17407572e-02 -3.95104215e-02 -6.47515804e-02 ... -2.14732941e-02 -1.61583528e-01 6.35955632e-02] [ 1.28487274e-01 3.19236778e-02 8.58111158e-02 ... -8.41837749e-02 8.33270773e-02 1.54843861e-02] ... [ 2.26747319e-01 -8.59395638e-02 3.32528502e-02 ... 1.85619757e-01 -5.25457896e-02 -1.49518013e-01] [ 1.01800092e-01 9.11171883e-02 1.83640253e-02 ... -6.56480389e-03 -3.72067996e-04 -6.38704188e-03] [-2.16456756e-01 6.33996865e-03 7.61603564e-02 ... -1.78545378e-02 1.41577214e-01 -1.44901246e-01]] [[-1.44283921e-01 1.50874645e-01 7.36048073e-02 ... -6.48515299e-02 9.22761261e-02 1.72259420e-01] [ 4.47104909e-02 -2.95725768e-03 -9.13823172e-02 ... 8.00810382e-03 8.43815729e-02 2.15752438e-01] [-5.38066849e-02 -1.13384776e-01 -9.88877639e-02 ... 5.15325889e-02 -4.41113673e-02 -2.38180719e-02] ... [ 8.47618356e-02 6.47647399e-03 -6.65775388e-02 ... 8.36641043e-02 9.42344964e-02 -6.31571636e-02] [ 3.68110053e-02 8.38889107e-02 4.41114530e-02 ... 5.25944866e-02 6.09960705e-02 -1.88506246e-01] [-9.13412571e-02 -2.42246822e-01 -2.52618920e-03 ... 1.54802561e-01 -1.44619063e-01 4.65857759e-02]] [[-5.92201436e-03 -2.64517348e-02 2.83824373e-03 ... -9.97780636e-03 -8.24433658e-03 -1.03936486e-01] [-1.04546649e-02 2.93625500e-02 1.01102129e-01 ... 1.58437282e-01 5.82828894e-02 6.23578951e-02] [-1.07982054e-01 -5.72998822e-02 3.54101270e-01 ... -2.10255664e-02 2.79608339e-01 -1.77711070e-01] ... [-1.28820106e-01 2.04918049e-02 -2.04605274e-02 ... 2.50375327e-02 5.17254807e-02 -3.44369560e-04] [-3.27881938e-03 -1.34234965e-01 1.06953405e-01 ... -6.87781498e-02 -6.40772283e-02 2.24543452e-01] [-6.97420910e-02 -1.17829911e-01 1.15366459e-01 ... 7.26021901e-02 1.09832264e-01 -2.19390937e-03]]] [[[ 1.00620151e+00 3.26634258e-01 -1.51479557e-01 ... 1.04108930e+00 -2.17572227e-01 -5.40534794e-01] [-1.10678434e+00 1.83603525e-01 6.12676084e-01 ... -4.01420653e-01 9.97522354e-01 -4.74322617e-01] [ 2.60281175e-01 -6.08453155e-01 -3.71584922e-01 ... -2.59311676e-01 -6.40251786e-02 8.94156247e-02] ... [-8.42834935e-02 1.92785263e-01 1.12541802e-01 ... 1.38550615e+00 -2.58366555e-01 8.06671679e-02] [-1.81438291e+00 -5.94301462e-01 1.39579862e-01 ... -2.79619783e-01 -6.64127395e-02 5.63338041e-01] [-6.67921901e-02 6.61426127e-01 -4.90286499e-01 ... 1.37954637e-01 -4.55338098e-02 5.31649411e-01]] [[-8.85549784e-01 4.92294971e-03 -4.50951040e-01 ... 5.70996940e-01 -7.97409594e-01 -2.37184837e-01] [ 8.43872905e-01 7.51654148e-01 -1.92033693e-01 ... 4.95138228e-01 3.55227202e-01 1.06440403e-01] [-1.07655108e+00 2.44790554e-01 5.11944771e-01 ... -2.94978440e-01 -5.41079082e-02 -4.05218393e-01] ... [-1.44565642e-01 2.83886403e-01 5.36079586e-01 ... 1.12035079e-02 1.03484738e+00 -4.52681780e-02] [ 5.40983498e-01 4.78618950e-01 -1.76346377e-02 ... 6.99608102e-02 -1.27687082e-01 -5.42609096e-02] [-8.74872133e-02 1.54438749e-01 -1.18613076e+00 ... -1.19398248e+00 -8.74657631e-02 3.30388337e-01]] [[-4.59489703e-01 -7.36522138e-01 4.85642739e-02 ... -7.84754217e-01 -3.35881919e-01 4.89322633e-01] [-1.81237206e-01 -5.79538465e-01 5.56378961e-01 ... -1.99599564e-01 -8.38361204e-01 7.89503038e-01] [-8.95061672e-01 4.01794314e-01 -1.08781368e-01 ... -1.15957022e+00 1.42568454e-01 9.90955159e-02] ... [-8.06917608e-01 -4.90672708e-01 4.41637456e-01 ... 7.33593479e-02 -1.86292425e-01 7.40284920e-02] [ 5.57211041e-01 -5.63851655e-01 -1.14355481e+00 ... -1.24307625e-01 -6.17502570e-01 -5.25313437e-01] [-8.70591626e-02 -6.38762891e-01 -4.69773561e-02 ... -3.90713960e-01 1.22462943e-01 1.67980269e-01]] ... [[-5.04248083e-01 3.72590095e-01 7.00669229e-01 ... 2.73841381e-01 -6.69372499e-01 -4.91665363e-01] [-7.01110482e-01 -9.08649385e-01 -3.86098534e-01 ... -3.67179394e-01 9.96331275e-01 -7.64158368e-01] [-1.98505878e-01 -3.71443659e-01 -1.23058982e-01 ... 1.92550689e-01 -3.13241214e-01 3.18110436e-01] ... [-6.44979835e-01 -7.45717704e-01 -3.61518741e-01 ... 9.57747042e-01 -2.66146481e-01 -1.02365685e+00] [ 1.08503866e+00 3.15584958e-01 9.22334611e-01 ... -6.51692972e-02 -4.39639777e-01 -4.25746739e-01] [-1.27320528e-01 -3.24208033e-03 -8.27626765e-01 ... -3.17236781e-01 -4.95051324e-01 -1.30261421e+00]] [[-6.09339535e-01 7.50712991e-01 4.69971359e-01 ... 1.77416354e-01 -4.31270413e-02 5.57754993e-01] [ 9.88409370e-02 8.60757411e-01 -3.90615910e-02 ... -6.80524766e-01 2.62346685e-01 8.05848658e-01] [-2.74416566e-01 1.21812224e-01 9.64426398e-02 ... -7.01870799e-01 -8.25325072e-01 -8.12804699e-02] ... [ 9.47140217e-01 -9.21081841e-01 6.44733131e-01 ... 8.13045323e-01 4.22023505e-01 4.53110963e-01] [-6.15565181e-02 4.00735766e-01 3.87465239e-01 ... 5.76904155e-02 8.10497999e-01 -4.25977409e-01] [ 8.36311579e-02 -7.73237228e-01 1.94600001e-01 ... -5.86229146e-01 -2.24130809e-01 3.07863444e-01]] [[-1.13350533e-01 1.61468804e-01 4.98405360e-02 ... -4.62767839e-01 -6.79004073e-01 2.56883919e-01] [-4.59665693e-02 -9.13846374e-01 -8.48991811e-01 ... 8.59537899e-01 -3.03369164e-01 -3.72437865e-01] [-3.06713551e-01 3.55430484e-01 -1.08350992e+00 ... -5.46084523e-01 -1.06691313e+00 5.26034176e-01] ... [-6.66284189e-02 -4.37523931e-01 4.81427670e-01 ... -1.62090257e-01 -5.79045773e-01 -2.22281158e-01] [-1.01324654e+00 -6.56700805e-02 1.22812577e-01 ... -2.99904048e-01 -4.28146392e-01 1.73785836e-01] [ 6.45881146e-02 -3.05070251e-01 1.41531646e-01 ... 5.85856020e-01 6.74748778e-01 -4.05901343e-01]]]] [[[[ 9.14591551e-01 8.60337317e-01 -8.50565016e-01 ... 6.47793293e-01 -9.24926519e-01 -5.64092457e-01] [-1.70958355e-01 2.53724635e-01 -2.75524184e-02 ... 1.44071773e-01 2.34288439e-01 8.22374046e-01] [-9.33302343e-01 -1.28272384e-01 -2.59442925e-01 ... -8.99905682e-01 8.57265055e-01 2.52691843e-02] ... [ 8.01408291e-01 -2.17454463e-01 1.46398902e-01 ... -1.52973592e+00 1.61394805e-01 3.52006517e-02] [-4.81910110e-01 -5.44603884e-01 -1.24682570e+00 ... 9.93860483e-01 -5.43681905e-02 4.87554729e-01] [-2.80120168e-02 -3.82463574e-01 -9.95844722e-01 ... -2.14321375e-01 1.01008511e+00 5.08233428e-01]] [[-5.20602278e-02 4.59590197e-01 -1.38953805e-01 ... 8.10556948e-01 -2.93522865e-01 -2.49875814e-01] [-8.50259244e-01 -1.67263821e-01 1.21214676e+00 ... -1.55978847e+00 -1.00119662e+00 -3.92915785e-01] [-9.69024599e-01 1.07190871e+00 -2.97899485e-01 ... 2.69081712e-01 -4.28309262e-01 -8.13587546e-01] ... [-5.33030272e-01 -4.32892561e-01 3.79485011e-01 ... 3.11293453e-01 1.46787450e-01 3.97792086e-02] [-4.88170594e-01 -5.76293290e-01 -1.02033818e+00 ... 9.85545292e-02 4.53874230e-01 -1.62361848e+00] [ 2.07405820e-01 1.55475509e+00 -2.18374971e-02 ... -7.30071843e-01 1.41790330e-01 4.01399910e-01]] [[ 4.85274076e-01 -5.78870058e-01 8.18712115e-01 ... 1.05710119e-01 -3.59101355e-01 1.63212463e-01] [-1.02414954e+00 8.09009969e-01 -7.97297001e-01 ... -7.40692496e-01 -2.73776919e-01 7.56090760e-01] [-1.16302155e-01 -3.32703114e-01 -7.32429922e-01 ... -3.47441941e-01 -3.62638563e-01 1.20030546e+00] ... [-4.74573299e-02 -8.29476118e-01 -1.44149050e-01 ... -5.88705838e-01 -8.19492340e-02 3.80145073e-01] [ 6.05932511e-02 1.45096254e+00 -4.44688827e-01 ... 3.01390439e-01 -1.41038573e+00 2.50906497e-02] [ 6.26982212e-01 6.71656251e-01 5.39382994e-01 ... 1.79278493e+00 2.55161583e-01 -1.09618790e-01]] ... [[-2.12267384e-01 -3.40004742e-01 -4.66806680e-01 ... -5.39224386e-01 5.04606627e-02 -2.18527406e-01] [-7.05192149e-01 -3.03390324e-01 -1.54597151e+00 ... 4.42963332e-01 -1.10571015e+00 4.55208942e-02] [ 1.01986682e+00 4.46692824e-01 -1.14902580e+00 ... 1.41086668e-01 -1.66677952e-01 -5.97478747e-02] ... [ 4.39472906e-02 -2.21813306e-01 -2.46782318e-01 ... -3.89644712e-01 1.66537300e-01 -2.70350695e-01] [-1.83671802e-01 -1.34195268e-01 -5.58937907e-01 ... 4.25732285e-01 8.81127775e-01 3.68973874e-02] [ 1.94045439e-01 -2.83894479e-01 -1.33457375e+00 ... 8.36414397e-01 -4.90438491e-01 4.50661778e-01]] [[-8.05592835e-01 -3.70682508e-01 -8.45172331e-02 ... 8.67726862e-01 7.27771044e-01 -3.79305840e-01] [ 8.55085254e-02 -1.71532834e+00 -4.42345440e-01 ... -1.03938341e+00 4.14705485e-01 7.96767354e-01] [-3.49257916e-01 2.25633875e-01 -5.87327927e-02 ... -8.64258468e-01 4.61724311e-01 7.15692267e-02] ... [ 1.74603379e+00 7.44354188e-01 2.32541054e-01 ... -9.21798348e-01 -2.74231702e-01 3.31921130e-01] [ 3.66542369e-01 -6.71168864e-01 4.23781693e-01 ... -1.94467641e-02 6.70166671e-01 -8.80448759e-01] [ 5.76258719e-01 2.17146024e-01 -9.08457816e-01 ... 5.84593892e-01 7.20186889e-01 -6.64189994e-01]] [[ 3.50614756e-01 -6.20809197e-02 4.31690425e-01 ... -8.77209961e-01 -1.38218164e+00 -5.09987891e-01] [ 4.74602878e-01 6.55873775e-01 -5.69829166e-01 ... 2.47951210e-01 7.15641379e-01 -8.97152066e-01] [ 5.38597643e-01 5.30417562e-01 5.41061938e-01 ... -5.44022501e-01 -8.50061998e-02 1.40020445e-01] ... [ 9.57193673e-02 1.27801716e+00 1.03374235e-01 ... -1.20579672e+00 3.15728813e-01 -1.17952383e+00] [-7.90947199e-01 -4.96380985e-01 -9.04852927e-01 ... -8.93466711e-01 4.18538451e-01 7.67516851e-01] [ 2.00065523e-01 -1.16743350e+00 -5.23315132e-01 ... 5.13504863e-01 -2.64918238e-01 9.58748579e-01]]] [[[-8.32442418e-02 8.68704975e-01 1.23439407e+00 ... 4.77323949e-01 3.87976646e+00 1.34895182e+00] [ 2.19453788e+00 -1.33976772e-01 7.11753726e-01 ... -7.51335740e-01 2.98347533e-01 -1.78136992e+00] [-3.57227397e+00 1.69132113e+00 1.29048038e+00 ... 1.31450093e+00 -1.32711637e+00 4.10659730e-01] ... [-4.72271472e-01 1.11176574e+00 -1.55954075e+00 ... 3.74636292e-01 1.43195462e+00 1.97844052e+00] [-1.67831194e+00 1.98216581e+00 -5.66906035e-01 ... 9.91923690e-01 2.87571818e-01 -2.35184598e+00] [ 1.83186162e+00 6.34429693e-01 -6.17570400e-01 ... 1.13930094e+00 -2.42770538e-01 1.52797127e+00]] [[ 4.70182717e-01 2.81030923e-01 7.47434139e-01 ... -1.47537673e+00 -2.40043330e+00 1.08283174e+00] [ 1.01436067e+00 -1.85218525e+00 1.86530054e+00 ... 1.15120053e+00 -1.55907845e+00 1.21499288e+00] [-4.12648916e-01 1.24169672e+00 -1.21512517e-01 ... -1.90068185e-01 -1.09511268e+00 2.70682025e+00] ... [ 1.40916848e+00 -1.81502116e+00 7.69947350e-01 ... 6.55951917e-01 8.15383315e-01 -2.73409534e+00] [ 8.02437842e-01 -5.10257006e-01 3.63213867e-01 ... 4.66692060e-01 -1.09889793e+00 9.61095870e-01] [ 1.89131260e+00 -6.90564990e-01 1.82006717e+00 ... 2.17169508e-01 8.18235517e-01 1.70857501e+00]] [[ 2.76961893e-01 -7.63672544e-03 2.65085268e+00 ... -1.43353939e+00 1.64837241e+00 1.56082392e+00] [-1.55602491e+00 -1.70752871e+00 -1.46024752e+00 ... 1.55416584e+00 1.10785341e+00 -1.47325003e+00] [ 2.14880037e+00 6.29932940e-01 -4.44979429e-01 ... 1.82535559e-01 -5.05073726e-01 -3.31733078e-02] ... [-4.18591142e-01 -1.32498384e+00 -3.91068965e-01 ... 2.22515178e+00 -4.63940978e-01 -1.62860966e+00] [-4.92291063e-01 -7.50258803e-01 -1.10457993e+00 ... 1.46610069e+00 1.86481702e+00 -1.08835971e+00] [ 2.04657006e+00 1.33726525e+00 9.20404017e-01 ... -4.81344432e-01 9.94255722e-01 -1.15328574e+00]] ... [[-8.22119117e-02 -8.59722346e-02 2.75984669e+00 ... 7.35604405e-01 2.37772560e+00 1.67883790e+00] [ 1.45920277e+00 8.12292993e-01 5.24242043e-01 ... -1.64162242e+00 -4.62032646e-01 -1.58020747e+00] [ 2.52478409e+00 -1.07143533e+00 2.14582705e+00 ... 1.73347640e+00 -1.56722748e+00 1.92322358e-01] ... [ 3.03055882e-01 -6.29976273e-01 5.09633541e-01 ... -9.51435089e-01 3.82258564e-01 -1.66530788e+00] [-6.61371231e-01 -2.97045660e+00 -1.30281556e+00 ... -2.75963149e-03 1.12315500e+00 1.30469203e-01] [-4.88734126e-01 -1.10418811e-01 1.37171054e+00 ... 7.71165907e-01 9.27930653e-01 -9.31557655e-01]] [[ 5.15814424e-01 5.07668138e-01 -3.63710858e-02 ... -5.85517466e-01 -8.56696129e-01 -2.20969772e+00] [-1.14647532e+00 1.25351095e+00 2.42097840e-01 ... -1.11837484e-01 -7.49149263e-01 1.10077798e+00] [ 1.43907678e+00 -1.89014375e+00 -1.98895550e+00 ... -5.50592899e-01 7.79476538e-02 -1.18730068e+00] ... [ 3.14146996e-01 4.90259200e-01 -6.48038566e-01 ... 5.51402628e-01 1.09742093e+00 6.26747012e-01] [ 1.49829531e+00 -5.85735202e-01 6.10443279e-02 ... -1.31187642e+00 -3.85090172e-01 -1.67711103e+00] [-2.49870110e+00 -2.42804050e+00 -9.46894944e-01 ... -6.33871317e-01 3.10847664e+00 -1.33882284e+00]] [[ 2.37772608e+00 9.24064755e-01 1.78169429e+00 ... -2.11266831e-01 -6.40076637e-01 2.23586023e-01] [-3.85210127e-01 4.57516909e-01 3.64721322e+00 ... -2.09285879e+00 6.32338464e-01 2.22059464e+00] [-8.12301338e-01 -1.75651491e+00 -3.32685447e+00 ... 1.84661376e+00 1.06728208e+00 2.04264373e-01] ... [ 1.07593226e+00 -1.00295126e+00 -6.88972294e-01 ... 1.40185595e+00 -5.67552805e-01 -2.76112676e-01] [ 9.89197344e-02 1.50135291e+00 1.47726250e+00 ... -6.15454674e-01 9.81904566e-01 -2.01138422e-01] [-4.95112985e-01 2.50895862e-02 2.65733743e+00 ... 6.16911173e-01 2.65867996e+00 1.02820945e+00]]] [[[-6.07800603e-01 -4.79476303e-02 -1.24900214e-01 ... 2.20787898e-01 2.00503767e-01 -5.64931154e-01] [ 1.74440160e-01 3.05007249e-02 -3.41008902e-02 ... -8.06374177e-02 -3.77787463e-02 6.61734790e-02] [-3.65947753e-01 -1.65634722e-01 2.55403165e-02 ... 9.79346130e-03 -1.90949500e-01 4.42972004e-01] ... [ 5.54786503e-01 8.64122584e-02 3.90754938e-01 ... 3.01436245e-01 5.20009637e-01 -1.82838649e-01] [-1.11832796e-02 -3.20917875e-01 -7.63963070e-03 ... 2.97801435e-01 -1.75638273e-02 2.45534834e-02] [ 2.54292399e-01 2.61645287e-01 -7.97072053e-02 ... -1.85121343e-01 1.75678059e-01 -3.83132219e-01]] [[-1.30997419e-01 3.48330706e-01 -2.59657234e-01 ... -3.06038201e-01 -2.42587730e-01 3.80679548e-01] [-1.96968645e-01 1.28137290e-01 -7.72407278e-02 ... 3.93360227e-01 -4.57474470e-01 -1.14195123e-01] [ 2.37074971e-01 1.09163309e-02 4.42332804e-01 ... -1.17289066e-01 2.20539704e-01 -2.98686832e-01] ... [ 2.10306704e-01 2.29228199e-01 3.27112705e-01 ... 7.14331642e-02 -3.81923884e-01 2.02842563e-01] [-1.82564020e-01 -3.41758430e-01 1.63215756e-01 ... 8.11538659e-03 2.38171071e-01 2.15123191e-01] [-8.64987634e-03 2.79229492e-01 -9.25289541e-02 ... 4.03741211e-01 -1.05315857e-01 5.38506806e-01]] [[-7.44062245e-01 -2.79823005e-01 2.88980335e-01 ... -9.29464176e-02 -3.12980384e-01 2.73801029e-01] [ 1.04679391e-01 -5.86259142e-02 -1.45673275e-01 ... -1.65478364e-01 2.07456406e-02 -4.66887169e-02] [-1.92593277e-01 6.00529499e-02 -3.03021669e-02 ... 5.94912805e-02 -2.42515340e-01 -4.51380014e-01] ... [-3.28516155e-01 -1.44290775e-01 1.14206307e-01 ... -2.48334572e-01 8.46808702e-02 -8.04994255e-03] [ 1.56433508e-01 -1.44763529e-01 3.08464020e-01 ... -3.74212444e-01 -2.41725117e-01 -6.75014853e-01] [-3.55734751e-02 -2.88058165e-02 4.33794521e-02 ... -4.87989128e-01 -1.73130035e-01 1.35223269e-01]] ... [[-2.48731986e-01 -5.79977855e-02 1.88751653e-01 ... -1.22538336e-01 -3.90428364e-01 -4.68625687e-02] [-4.95352477e-01 -2.70212770e-01 -1.16426155e-01 ... 1.86903346e-02 1.67331815e-01 -8.31666738e-02] [-2.05428945e-03 -2.01547220e-01 1.01664722e-01 ... -2.22272873e-01 -4.28411886e-02 2.23582700e-01] ... [ 4.68710028e-02 4.26389948e-02 1.56534076e-01 ... -2.67714143e-01 -8.66713524e-02 2.42596984e-01] [-2.30881885e-01 -3.92156154e-01 2.29790956e-01 ... -2.04637647e-01 -2.54973710e-01 -2.58141141e-02] [-9.89810303e-02 1.04550146e-01 -3.69900882e-01 ... 3.13102037e-01 -3.93172614e-02 -2.10958764e-01]] [[-1.55291021e-01 5.69108486e-01 1.97275594e-01 ... -2.99944729e-01 4.70314056e-01 -1.99669793e-01] [-4.57294255e-01 1.42769590e-01 1.81202311e-02 ... 9.82514098e-02 -8.89866576e-02 4.92870994e-02] [ 8.41743574e-02 -1.23807631e-01 -1.36207402e-01 ... 3.41705680e-01 -3.11839610e-01 -6.86304688e-01] ... [-2.74282438e-03 -7.87088275e-03 3.38095635e-01 ... 6.98266830e-03 9.15543735e-02 5.49141727e-02] [ 1.89348280e-01 5.67369610e-02 1.95948362e-01 ... 1.30039053e-02 -3.34018677e-01 -3.33469175e-02] [-1.33325100e-01 -2.23007068e-01 1.68964311e-01 ... -1.72967806e-01 -2.76352018e-01 3.78332399e-02]] [[ 8.33005905e-02 9.11616459e-02 -8.90112817e-02 ... 2.98219919e-01 5.32331765e-02 4.16489691e-01] [ 6.06106281e-01 1.88710973e-01 2.77847975e-01 ... 2.26799235e-01 2.50492781e-01 -9.43688899e-02] [-3.36062402e-01 4.31081429e-02 1.83494925e-01 ... -2.58415192e-02 -2.33575001e-01 -1.19667083e-01] ... [ 2.02700227e-01 9.82791707e-02 1.61350086e-01 ... -4.02405784e-02 -4.88472790e-01 -2.48462200e-01] [ 4.23478663e-01 -7.69076645e-02 2.40299568e-01 ... -2.23844394e-01 -3.01141083e-01 -1.78818122e-01] [ 2.18423635e-01 5.09934500e-02 3.38750422e-01 ... -1.62633255e-01 -1.28692240e-01 -1.44015020e-02]]] [[[ 1.43354845e+00 4.43220329e+00 -9.90782022e-01 ... 2.52737188e+00 -8.90118420e-01 3.11051273e+00] [-6.09543800e-01 3.61683160e-01 1.96662262e-01 ... -5.19476295e-01 -7.38826931e-01 1.38557303e+00] [-1.42820859e+00 3.34418803e-01 -2.26162291e+00 ... 2.12343976e-01 1.78548384e+00 2.71744013e-01] ... [ 1.25683618e+00 2.34983468e+00 -1.62624824e+00 ... 2.18137786e-01 6.35436952e-01 -9.99840200e-02] [-3.00646687e+00 6.13506464e-03 1.13118613e+00 ... -1.13244367e+00 1.16110718e+00 1.00845253e+00] [-7.97085285e-01 -2.94675851e+00 -8.88576090e-01 ... 4.92893249e-01 -8.71418893e-01 1.06760597e+00]] [[-2.26014996e+00 -6.37448132e-01 -2.28777200e-01 ... 2.78979570e-01 1.55921662e+00 2.21720552e+00] [-4.38994467e-01 7.45777905e-01 2.22202867e-01 ... 1.03256536e+00 7.08017766e-01 1.70612484e-01] [ 1.09866396e-01 -1.53261757e+00 1.31265998e+00 ... -6.13051414e-01 9.40836668e-01 8.76949489e-01] ... [-1.90586138e+00 -2.90401489e-01 1.05829978e+00 ... 4.78685319e-01 9.66731459e-02 -1.75087535e+00] [ 9.56004560e-02 7.13817954e-01 -2.06571078e+00 ... 2.16142070e-02 7.63029397e-01 1.03525825e-01] [-7.14481808e-03 3.94295245e-01 5.19969642e-01 ... -2.08400393e+00 -2.22606301e+00 6.21060729e-01]] [[ 7.30959892e-01 5.47229528e-01 9.47915792e-01 ... -2.62767911e+00 4.61173058e+00 1.46967888e+00] [-6.25972211e-01 1.67519256e-01 -7.03772128e-01 ... 1.73636937e+00 4.79084492e-01 3.62536192e+00] [-7.63482332e-01 1.50156009e+00 -5.49848795e-01 ... 8.27693164e-01 8.40105712e-01 3.13434869e-01] ... [ 1.67975652e+00 -2.70490766e-01 -2.23695993e+00 ... -5.60907900e-01 8.60880196e-01 -3.06160510e-01] [-1.34556735e+00 -1.17024124e+00 -1.25453806e+00 ... 3.03455663e+00 1.15265977e+00 1.17275298e+00] [ 3.08663577e-01 1.80829227e-01 5.19864202e-01 ... 1.61383653e+00 -1.04440403e+00 -9.99980271e-02]] ... [[ 2.92009562e-01 -1.56226170e+00 1.81149757e+00 ... 2.21154594e+00 -3.18833351e-01 9.85058427e-01] [ 4.88851577e-01 1.54005694e+00 -2.29856651e-02 ... 1.96335822e-01 1.04303110e+00 -2.29931569e+00] [ 2.06684589e+00 -1.04909527e+00 3.58061790e-01 ... 2.02248955e+00 -2.55690169e+00 1.55251980e+00] ... [ 9.40112174e-01 -1.26787269e+00 -1.68930638e+00 ... 1.98298469e-01 -2.66920269e-01 -3.18895030e+00] [-1.20287418e+00 1.73136568e+00 1.51389456e+00 ... -1.12085974e+00 7.20133483e-01 -7.13805556e-01] [-5.29767275e-01 8.40430796e-01 -4.25331593e-01 ... -1.87717587e-01 1.10815072e+00 2.99643326e+00]] [[ 9.85741079e-01 3.85430127e-01 -7.07263827e-01 ... -8.25278878e-01 -9.82562304e-01 6.16780639e-01] [-3.37959933e+00 -2.89315104e-01 -1.00542140e+00 ... 4.01235676e+00 6.56723380e-01 1.78599679e+00] [-1.46426165e+00 -4.74149853e-01 -1.43341637e+00 ... 1.37859017e-01 3.44117594e+00 1.30285478e+00] ... [ 1.13341045e+00 1.81121361e+00 8.37516606e-01 ... 1.59710217e+00 3.51093799e-01 9.42503989e-01] [-6.50245845e-02 -5.15690863e-01 4.20947611e-01 ... 1.14857709e+00 2.69146174e-01 -1.32688880e+00] [ 9.73169863e-01 -2.99088097e+00 2.31377339e+00 ... -3.15316498e-01 1.22362626e+00 -9.92861569e-01]] [[ 5.63052654e-01 2.64268923e+00 -1.30988851e-01 ... 1.04097760e+00 -1.81808615e+00 -1.75522494e+00] [ 4.72873122e-01 1.82048336e-01 1.06938779e+00 ... -3.20526183e-01 -6.64813221e-01 -2.52732456e-01] [ 2.09412575e+00 1.02892089e+00 1.36061072e+00 ... 6.13772154e-01 -4.40734816e+00 8.85508835e-01] ... [-1.35049716e-01 1.32859015e+00 1.68198049e-01 ... -6.00706697e-01 -1.87546146e+00 -6.98248744e-01] [ 2.66169381e+00 2.98011184e-01 -1.83124006e+00 ... 1.56904054e+00 -3.10524869e+00 1.11548686e+00] [-1.33004093e+00 6.99859619e-01 5.93436301e-01 ... -8.63742173e-01 2.40936518e+00 4.24053133e-01]]] [[[-3.49238850e-02 1.54592516e-02 -2.06628069e-01 ... 4.72886302e-03 3.68722677e-02 1.70802418e-02] [-1.64170656e-02 1.74232244e-01 1.94098383e-01 ... 3.22201513e-02 2.00239107e-01 -8.90256166e-02] [-4.42415755e-03 -7.94021264e-02 1.38356984e-01 ... -1.78121701e-02 2.46727038e-02 -1.15170434e-01] ... [-5.53271621e-02 -1.01421684e-01 9.51793045e-02 ... -7.49796033e-02 2.21820444e-01 -8.16659406e-02] [ 7.41782337e-02 8.35430473e-02 -1.03811480e-01 ... -1.95838988e-01 -6.24062084e-02 -8.94966051e-02] [ 5.06630391e-02 5.27331531e-02 -2.51303762e-01 ... -1.36706859e-01 1.15077775e-04 1.98193104e-03]] [[-3.41534093e-02 -3.58056184e-03 2.77104788e-02 ... 5.15639856e-02 1.99311882e-01 3.19749832e-01] [ 1.20738167e-02 -6.63112476e-03 -1.26272023e-01 ... -1.48229033e-01 -4.06634547e-02 -4.43879776e-02] [-1.63729209e-02 -5.61119542e-02 2.87017167e-01 ... 5.50904684e-02 -9.90432426e-02 1.85914516e-01] ... [-3.33318934e-02 3.98842469e-02 5.31383455e-02 ... -1.23532563e-01 -1.49547085e-01 1.57483399e-01] [-1.11834079e-01 -2.40466654e-01 1.21873245e-01 ... -4.84183356e-02 -6.98638856e-02 8.56650397e-02] [-7.87563771e-02 6.39734091e-03 3.50816324e-02 ... -8.61507505e-02 -2.68739052e-02 6.73741549e-02]] [[ 4.25100327e-02 5.12281107e-03 -1.49734598e-02 ... 1.39856830e-01 1.08750902e-01 2.24460661e-02] [ 4.88021486e-02 -6.40197694e-02 -1.07611589e-01 ... -1.86903309e-02 -3.13864611e-02 1.56688005e-01] [-9.29016471e-02 6.22441322e-02 -3.48329693e-02 ... -9.04955491e-02 1.87768221e-01 4.62606139e-02] ... [ 1.04517736e-01 1.08945020e-01 -3.41335498e-03 ... -1.39881834e-01 -5.39745130e-02 9.21306163e-02] [-1.08240470e-01 -1.79680184e-01 -2.65124999e-02 ... 7.75468722e-02 1.15065560e-01 9.71174538e-02] [-1.22996487e-01 1.22922204e-01 4.42853160e-02 ... 1.29809910e-02 8.67163464e-02 8.21311101e-02]] ... [[-1.08134419e-01 -1.41485840e-01 -1.18240401e-01 ... 1.44285426e-01 6.09317794e-02 -2.47876998e-02] [ 2.62717903e-01 -1.28373191e-01 9.50184390e-02 ... -3.00984047e-02 1.02454089e-01 2.00085238e-01] [ 1.17916903e-02 -8.84805527e-03 1.31439656e-01 ... 1.14787340e-01 5.83263375e-02 -1.94397178e-02] ... [-4.78044676e-04 2.43146671e-03 -6.00653281e-03 ... -7.87406564e-02 2.31447071e-02 -2.54555885e-02] [ 1.77665770e-01 -2.35733055e-02 3.17853019e-02 ... -1.15327828e-01 8.01672414e-02 5.32055497e-02] [ 1.30356345e-02 -3.65849286e-02 1.09031297e-01 ... 1.32407039e-01 5.16010560e-02 -1.12917356e-01]] [[-1.05700130e-02 2.76481777e-01 2.54034884e-02 ... 3.73484120e-02 -4.61326055e-02 1.66859284e-01] [ 1.04209878e-01 -8.87672752e-02 -1.30603924e-01 ... 5.08003533e-02 -1.78546116e-01 -1.57725915e-01] [-6.37590587e-02 1.62732825e-02 1.01440884e-01 ... -6.87580258e-02 7.50570968e-02 5.92427440e-02] ... [-9.29557011e-02 1.56419590e-01 -3.37619707e-02 ... -1.02337211e-01 -1.18531130e-01 -4.47857603e-02] [ 1.73733328e-02 -4.56067733e-02 -5.84592335e-02 ... 7.97624514e-02 -6.64208550e-03 9.98810604e-02] [-2.05478184e-02 -4.85266224e-02 -2.70127077e-02 ... -1.69123076e-02 -1.24712639e-01 4.29545939e-02]] [[-1.83899179e-01 -1.50579602e-01 1.84123084e-01 ... 4.69309539e-02 9.41648260e-02 6.00193702e-02] [-9.15537030e-02 5.36005870e-02 -3.00444197e-02 ... 1.65487416e-02 -1.14582673e-01 -1.66669786e-02] [-1.92712862e-02 1.50654390e-02 -5.09620681e-02 ... -4.89085307e-03 9.66705680e-02 1.97001342e-02] ... [-2.01204360e-01 -1.80040658e-01 -8.28729570e-02 ... 3.17295082e-02 6.12724125e-02 -1.80189535e-02] [-3.83186489e-02 -2.56804060e-02 -6.33453205e-02 ... 1.29086405e-01 4.95300703e-02 -4.31751534e-02] [-1.25063539e-01 9.48829353e-02 1.35980457e-01 ... 1.82341889e-01 -6.15967847e-02 2.58814007e-01]]] [[[ 8.38403761e-01 -3.70633692e-01 7.32720554e-01 ... 7.64904261e-01 6.46372139e-01 -2.57140249e-01] [-2.79184043e-01 1.12212634e+00 -3.32394063e-01 ... -2.34777942e-01 -4.45620596e-01 -6.03761911e-01] [-3.81399602e-01 -4.10373867e-01 1.00619629e-01 ... 7.40780592e-01 3.29528823e-02 -5.63702226e-01] ... [-1.60713285e-01 -8.10440361e-01 3.73628616e-01 ... -3.11304419e-03 -4.87707943e-01 -3.46927106e-01] [ 1.69890299e-02 3.88900280e-01 -7.40153015e-01 ... -4.31367904e-01 -3.37909162e-01 1.23272970e-01] [-2.75388539e-01 -8.40744972e-01 4.42835659e-01 ... 3.57372522e-01 4.52199042e-01 -5.08864939e-01]] [[-1.75490171e-01 6.53948411e-02 -1.94122687e-01 ... 8.04685727e-02 1.40443489e-01 2.84549147e-01] [ 6.97060078e-02 -9.62712765e-01 2.03750897e-02 ... -6.54773116e-01 -3.18657637e-01 -7.37201750e-01] [-8.36392820e-01 -4.25279178e-02 -2.23623410e-01 ... 1.73061430e-01 1.47024155e-01 2.52899766e-01] ... [-6.48158073e-01 1.60227641e-01 1.10662393e-01 ... -8.46594691e-01 4.20709252e-01 -2.81353474e-01] [ 4.22665417e-01 -5.72229981e-01 1.74164593e-01 ... -4.84952256e-02 -1.01346695e+00 4.12580043e-01] [-1.02331027e-01 -4.02216852e-01 -5.14475882e-01 ... 3.93126845e-01 -5.62395453e-01 4.20512140e-01]] [[-3.35055254e-02 -1.16505623e+00 1.65470338e+00 ... -1.64824772e+00 -4.06546772e-01 -1.25076950e-01] [-3.01259428e-01 7.79225454e-02 5.58022261e-02 ... 9.47224163e-03 1.99318185e-01 3.04027259e-01] [ 6.10264778e-01 -4.13419276e-01 -6.77043200e-02 ... -8.07422698e-01 -6.84677184e-01 4.02643949e-01] ... [ 3.45169395e-01 6.40022531e-02 1.75190896e-01 ... 4.84119803e-01 1.81690156e-01 1.33641958e-01] [-4.04494256e-01 -6.02636486e-02 2.53783047e-01 ... 2.74619106e-02 4.97548759e-01 5.26152968e-01] [ 3.16157900e-02 -2.96941549e-01 -6.82905018e-01 ... -1.13721423e-01 5.19461215e-01 5.20644724e-01]] ... [[-1.69272423e-01 -7.63508737e-01 7.85721600e-01 ... -1.28292155e+00 -1.20709315e-01 -2.06531301e-01] [ 1.56353697e-01 1.34552026e+00 -1.95597485e-01 ... 5.42143434e-02 -2.68952847e-01 -3.87625337e-01] [ 2.54685640e-01 3.70954514e-01 1.07016599e+00 ... -4.49180245e-01 -3.80922928e-02 -1.46875940e-02] ... [ 4.17703390e-01 3.76379281e-01 5.42735383e-02 ... 4.17994559e-01 8.17450583e-02 6.39061213e-01] [ 5.21462485e-02 -6.80009782e-01 1.21497229e-01 ... 6.66030705e-01 4.86690283e-01 -1.95286676e-01] [ 1.84758455e-01 -1.34811711e+00 -2.71863222e-01 ... -7.37715423e-01 9.50452328e-01 -1.01772630e+00]] [[ 7.44229019e-01 9.77511168e-01 7.41616964e-01 ... -4.93357271e-01 -6.21984661e-01 1.51064014e+00] [-1.09890425e+00 -1.10473529e-01 1.53179392e-01 ... -4.64707203e-02 -3.37158769e-01 -2.56109506e-01] [ 5.92218578e-01 -6.05235934e-01 2.83771843e-01 ... 3.30214739e-01 7.48242438e-01 7.73617983e-01] ... [ 7.44675040e-01 1.16660964e+00 -3.33296768e-02 ... -1.26042873e-01 -3.20138276e-01 8.84067059e-01] [-4.09392685e-01 5.57759330e-02 -6.79628670e-01 ... 9.33383763e-01 -5.22758424e-01 -8.83603334e-01] [ 2.06135318e-01 -1.94738358e-01 6.41960979e-01 ... 4.08343345e-01 -6.51575208e-01 -2.26247851e-02]] [[-5.02189279e-01 4.51985031e-01 9.97909904e-02 ... -7.65127897e-01 6.74436212e-01 -3.40786092e-02] [-3.35610479e-01 1.75176393e-02 -5.45758367e-01 ... -1.14334309e+00 -5.55124998e-01 -6.34282947e-01] [ 7.97586620e-01 5.58700636e-02 -1.06929019e-01 ... -1.37888074e+00 8.37046146e-01 -6.48768246e-01] ... [ 1.88550681e-01 -8.86210561e-01 -9.20418084e-01 ... 3.45078819e-02 1.83731079e-01 -1.16122104e-01] [ 2.02248693e-02 -1.05178809e+00 -2.47555017e-01 ... 7.43763864e-01 4.36234511e-02 4.81755465e-01] [-8.66226494e-01 -9.89751637e-01 -1.17055488e+00 ... -3.08317512e-01 -2.47732121e-02 1.09019053e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4631.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 9.03235078e-01 2.30575395e+00 4.79832351e-01 ... -6.15785778e-01 -1.10066317e-01 5.43274641e-01] [-1.36028156e-01 2.52771109e-01 1.87062070e-01 ... 2.22610757e-01 4.76871312e-01 -1.23140359e+00] [ 1.30970240e+00 1.41191638e+00 -2.06093979e+00 ... 1.16357958e+00 2.65984505e-01 -1.01800716e+00] ... [ 2.23624930e-01 -1.15404403e+00 6.59393370e-01 ... -6.56678379e-02 -7.08252668e-01 -2.16938591e+00] [ 5.10512948e-01 -1.07971442e+00 5.57394326e-01 ... 8.59387994e-01 -6.22883677e-01 3.39641094e-01] [ 3.23478401e-01 -2.73470376e-02 1.06323957e+00 ... -1.46751869e+00 4.33382690e-01 4.24788535e-01]] [[ 2.91526586e-01 9.38496351e-01 1.53144121e+00 ... -1.36931753e+00 5.68833947e-01 -9.81190383e-01] [-4.33434099e-01 6.16993785e-01 -2.07300472e+00 ... -7.91889548e-01 1.31200993e+00 5.77516109e-02] [ 1.12093210e+00 5.74721336e-01 -8.31613064e-01 ... 8.00271690e-01 -5.13255537e-01 -1.85397589e+00] ... [-1.12668693e+00 -3.33212465e-01 6.10848665e-01 ... -2.77474213e+00 -2.60078847e-01 3.66581827e-02] [-1.12454319e+00 2.46207640e-01 2.04895228e-01 ... -4.82114941e-01 4.22047883e-01 1.83311194e-01] [-1.11203277e+00 1.92710891e-01 -1.88142216e+00 ... -1.56642184e-01 1.77445388e+00 1.05470486e-01]] [[ 3.28493565e-01 -1.43685102e+00 -1.71981692e+00 ... 1.18883240e+00 -4.78183746e-01 -3.10527589e-02] [-4.07554299e-01 3.80680829e-01 1.95862889e+00 ... -5.11644185e-01 8.88454020e-01 -1.70661259e+00] [-1.10635363e-01 -1.68001378e+00 9.11407948e-01 ... -1.63007557e+00 6.12995207e-01 -1.32114542e+00] ... [-2.76973188e-01 9.95384395e-01 -1.54977009e-01 ... 1.59582436e+00 4.74652320e-01 6.32524788e-01] [-5.15916884e-01 -1.33707106e+00 -4.11956638e-01 ... -1.40791044e-01 -1.72295368e+00 -3.81374449e-01] [-1.14234352e+00 3.00150275e-01 1.64350533e+00 ... -9.44327414e-01 -5.77744424e-01 -8.34570944e-01]] ... [[-3.66120160e-01 8.42275679e-01 -9.18325007e-01 ... 1.03887856e+00 1.38456786e+00 2.69343644e-01] [-8.41290295e-01 -1.46402162e-03 6.34913743e-01 ... 1.26941741e+00 -1.32233584e+00 8.56440663e-01] [-1.45269227e+00 -6.00229859e-01 -2.82165825e-01 ... -1.95293796e+00 8.98150623e-01 -6.06655329e-02] ... [-4.56341892e-01 -1.23622298e+00 3.06831598e-01 ... -3.38504910e-01 6.46682501e-01 -4.70737875e-01] [-3.49656701e-01 -2.54977524e-01 -7.93398798e-01 ... -1.08875215e+00 -2.76114440e+00 1.44481892e-03] [-1.25983134e-01 7.05464333e-02 1.20111442e+00 ... -6.15519166e-01 8.67606252e-02 -9.60287571e-01]] [[ 1.19487524e+00 -1.89913377e-01 -3.54496241e-01 ... -1.23482823e+00 -2.56463081e-01 -5.39101005e-01] [-1.83706746e-01 -7.01571167e-01 4.02783632e-01 ... 5.23336053e-01 -9.15458798e-02 -1.23910022e+00] [ 2.72656918e-01 8.83613348e-01 4.94948089e-01 ... 3.85756679e-02 -7.28420496e-01 3.45359504e-01] ... [-1.00381601e+00 -5.26535273e-01 -2.50007796e+00 ... -8.94154191e-01 -1.77076280e+00 1.70658338e+00] [ 3.41885835e-01 1.58726513e+00 1.07184076e+00 ... -8.88520241e-01 4.39173281e-01 1.45770502e+00] [ 2.68883801e+00 1.41332972e+00 2.95869052e-01 ... -3.05664018e-02 9.88331199e-01 1.55586040e+00]] [[ 3.88610870e-01 1.86300051e+00 -1.50890613e+00 ... 1.83163261e+00 7.79693544e-01 -1.99275732e-01] [ 5.88871419e-01 2.16942668e+00 -1.33732462e+00 ... -2.17197418e-01 -6.18928671e-01 -2.49014091e+00] [ 3.30009162e-02 4.35832679e-01 4.75936681e-01 ... 9.37506914e-01 -8.47365916e-01 4.66264218e-01] ... [ 1.03021666e-01 3.44516963e-01 4.12935466e-01 ... -6.62119389e-02 -1.05634201e+00 3.09156680e+00] [ 6.24726295e-01 -1.20247543e-01 -2.60576278e-01 ... 6.97839379e-01 2.95745909e-01 9.13244665e-01] [-7.85852730e-01 2.13443693e-02 -1.18087852e+00 ... 2.74195641e-01 1.82010841e+00 8.74109492e-02]]]; ov_res: [[[ 9.03235078e-01 2.30575395e+00 4.79832351e-01 ... -6.15785778e-01 -1.10066317e-01 5.43274641e-01] [-1.36028156e-01 2.52771109e-01 1.87062070e-01 ... 2.22610757e-01 4.76871312e-01 -1.23140359e+00] [ 1.30970240e+00 1.41191638e+00 -2.06093979e+00 ... 1.16357958e+00 2.65984505e-01 -1.01800716e+00] ... [ 2.23624930e-01 -1.15404403e+00 6.59393370e-01 ... -6.56678379e-02 -7.08252668e-01 -2.16938591e+00] [ 5.10512948e-01 -1.07971442e+00 5.57394326e-01 ... 8.59387994e-01 -6.22883677e-01 3.39641094e-01] [ 3.23478401e-01 -2.73470376e-02 1.06323957e+00 ... -1.46751869e+00 4.33382690e-01 4.24788535e-01]] [[ 2.91526586e-01 9.38496351e-01 1.53144121e+00 ... -1.36931753e+00 5.68833947e-01 -9.81190383e-01] [-4.33434099e-01 6.16993785e-01 -2.07300472e+00 ... -7.91889548e-01 1.31200993e+00 5.77516109e-02] [ 1.12093210e+00 5.74721336e-01 -8.31613064e-01 ... 8.00271690e-01 -5.13255537e-01 -1.85397589e+00] ... [-1.12668693e+00 -3.33212465e-01 6.10848665e-01 ... -2.77474213e+00 -2.60078847e-01 3.66581827e-02] [-1.12454319e+00 2.46207640e-01 2.04895228e-01 ... -4.82114941e-01 4.22047883e-01 1.83311194e-01] [-1.11203277e+00 1.92710891e-01 -1.88142216e+00 ... -1.56642184e-01 1.77445388e+00 1.05470486e-01]] [[ 3.28493565e-01 -1.43685102e+00 -1.71981692e+00 ... 1.18883240e+00 -4.78183746e-01 -3.10527589e-02] [-4.07554299e-01 3.80680829e-01 1.95862889e+00 ... -5.11644185e-01 8.88454020e-01 -1.70661259e+00] [-1.10635363e-01 -1.68001378e+00 9.11407948e-01 ... -1.63007557e+00 6.12995207e-01 -1.32114542e+00] ... [-2.76973188e-01 9.95384395e-01 -1.54977009e-01 ... 1.59582436e+00 4.74652320e-01 6.32524788e-01] [-5.15916884e-01 -1.33707106e+00 -4.11956638e-01 ... -1.40791044e-01 -1.72295368e+00 -3.81374449e-01] [-1.14234352e+00 3.00150275e-01 1.64350533e+00 ... -9.44327414e-01 -5.77744424e-01 -8.34570944e-01]] ... [[-3.66120160e-01 8.42275679e-01 -9.18325007e-01 ... 1.03887856e+00 1.38456786e+00 2.69343644e-01] [-8.41290295e-01 -1.46402162e-03 6.34913743e-01 ... 1.26941741e+00 -1.32233584e+00 8.56440663e-01] [-1.45269227e+00 -6.00229859e-01 -2.82165825e-01 ... -1.95293796e+00 8.98150623e-01 -6.06655329e-02] ... [-4.56341892e-01 -1.23622298e+00 3.06831598e-01 ... -3.38504910e-01 6.46682501e-01 -4.70737875e-01] [-3.49656701e-01 -2.54977524e-01 -7.93398798e-01 ... -1.08875215e+00 -2.76114440e+00 1.44481892e-03] [-1.25983134e-01 7.05464333e-02 1.20111442e+00 ... -6.15519166e-01 8.67606252e-02 -9.60287571e-01]] [[ 1.19487524e+00 -1.89913377e-01 -3.54496241e-01 ... -1.23482823e+00 -2.56463081e-01 -5.39101005e-01] [-1.83706746e-01 -7.01571167e-01 4.02783632e-01 ... 5.23336053e-01 -9.15458798e-02 -1.23910022e+00] [ 2.72656918e-01 8.83613348e-01 4.94948089e-01 ... 3.85756679e-02 -7.28420496e-01 3.45359504e-01] ... [-1.00381601e+00 -5.26535273e-01 -2.50007796e+00 ... -8.94154191e-01 -1.77076280e+00 1.70658338e+00] [ 3.41885835e-01 1.58726513e+00 1.07184076e+00 ... -8.88520241e-01 4.39173281e-01 1.45770502e+00] [ 2.68883801e+00 1.41332972e+00 2.95869052e-01 ... -3.05664018e-02 9.88331199e-01 1.55586040e+00]] [[ 3.88610870e-01 1.86300051e+00 -1.50890613e+00 ... 1.83163261e+00 7.79693544e-01 -1.99275732e-01] [ 5.88871419e-01 2.16942668e+00 -1.33732462e+00 ... -2.17197418e-01 -6.18928671e-01 -2.49014091e+00] [ 3.30009162e-02 4.35832679e-01 4.75936681e-01 ... 9.37506914e-01 -8.47365916e-01 4.66264218e-01] ... [ 1.03021666e-01 3.44516963e-01 4.12935466e-01 ... -6.62119389e-02 -1.05634201e+00 3.09156680e+00] [ 6.24726295e-01 -1.20247543e-01 -2.60576278e-01 ... 6.97839379e-01 2.95745909e-01 9.13244665e-01] [-7.85852730e-01 2.13443693e-02 -1.18087852e+00 ... 2.74195641e-01 1.82010841e+00 8.74109492e-02]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:1 - kernel_size:[3, 2] ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4633.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 1.19243050e+00 2.87711293e-01 -4.25306648e-01 ... -8.04172218e-01 6.90969527e-02 -1.68176461e-02] [-4.01064664e-01 7.92174399e-01 -7.91392028e-01 ... -1.21460831e+00 -2.05885363e+00 -2.78841114e+00] [-4.26536381e-01 -4.18866158e-01 -3.52937132e-01 ... -1.27597129e+00 5.78414440e-01 -6.95363224e-01] ... [ 1.26698822e-01 -1.93408573e+00 -1.08739173e+00 ... 1.64420474e+00 2.01753879e+00 1.35371447e+00] [ 1.30286705e+00 -1.33340132e+00 7.83987045e-01 ... 8.55622292e-02 8.39969695e-01 -1.14702612e-01] [-1.38408974e-01 -8.77366006e-01 -2.99149193e-03 ... 4.50587392e-01 2.70541042e-01 1.80406189e+00]] [[ 2.67466933e-01 -1.24480247e+00 -1.23540235e+00 ... 1.04748547e+00 1.30358386e+00 -4.02743429e-01] [-8.20544958e-01 2.97411382e-01 4.86408859e-01 ... 1.17035151e+00 -2.45336819e+00 -1.82869405e-01] [-1.18681026e+00 2.02016282e+00 2.55656868e-01 ... 2.03132915e+00 -1.34078360e+00 -3.57771248e-01] ... [-1.24107361e+00 -1.03774881e+00 -4.88052458e-01 ... 9.36230898e-01 1.04501820e+00 2.22890681e-04] [ 1.53666914e-01 -1.16374195e+00 -9.08217803e-02 ... 7.43082285e-01 2.68378806e+00 3.06281000e-01] [ 1.26601982e+00 -2.08267045e+00 1.91586792e+00 ... -2.87816137e-01 4.20675099e-01 -1.11097538e+00]] [[ 5.00921190e-01 -2.63233989e-01 8.93354952e-01 ... -9.24043544e-03 2.55778074e-01 1.51279306e+00] [-5.09760864e-02 1.17121398e+00 6.34417355e-01 ... 6.80618703e-01 8.16715658e-01 -4.67615634e-01] [-1.17093873e+00 -5.25395036e-01 1.72323537e+00 ... 1.06317770e+00 4.20240521e-01 -9.47495937e-01] ... [ 9.50530265e-03 -9.27441001e-01 9.03840125e-01 ... 6.40364110e-01 3.26257825e-01 -4.99405563e-01] [ 7.01220930e-02 2.11875939e+00 -1.52140260e-01 ... 3.56500715e-01 -5.33151984e-01 -4.07008767e-01] [-1.42043793e+00 -4.29710507e-01 -1.31404257e+00 ... 1.91822624e+00 1.04069030e+00 -4.76620235e-02]] ... [[ 1.37230670e+00 8.89409602e-01 8.09168339e-01 ... -9.34768200e-01 -1.36320150e+00 -8.40811312e-01] [-3.72653902e-01 -2.67360479e-01 4.39206600e-01 ... -1.07558954e+00 -6.22710526e-01 6.75726473e-01] [-3.57429117e-01 -1.23294234e+00 -1.07001638e+00 ... 4.97317463e-01 2.11079979e+00 -2.85033673e-01] ... [ 3.43291909e-01 -6.70774281e-01 -8.14102352e-01 ... -5.47065139e-01 -6.62566483e-01 -7.45564044e-01] [ 4.42273289e-01 -1.26351034e-02 8.59673858e-01 ... 1.63998461e+00 1.03974140e+00 -2.48461246e+00] [ 1.03038058e-01 5.11439191e-03 -1.42401946e+00 ... -3.10554123e+00 5.71620405e-01 2.63350010e-01]] [[-5.12999833e-01 -6.55640244e-01 -1.41688621e+00 ... 1.95979571e+00 2.09239557e-01 -6.82388321e-02] [-1.07144959e-01 5.49703479e-01 7.35386491e-01 ... -8.51100124e-03 -1.84420633e+00 -6.22715354e-01] [ 1.52832270e+00 -1.01007961e-01 9.43179578e-02 ... -5.05152404e-01 -1.59221208e+00 -8.59245360e-01] ... [ 1.26937711e+00 -1.46321726e+00 -1.51222539e+00 ... 1.06765187e+00 6.90945327e-01 -1.11211807e-01] [ 8.65002751e-01 -1.08615838e-01 3.17785501e-01 ... 7.79977024e-01 -5.38995087e-01 -9.59493756e-01] [ 1.48944461e+00 -4.96487737e-01 1.60446274e+00 ... -4.96136755e-01 -5.32739282e-01 -2.53643338e-02]] [[ 2.15829611e-02 -3.58516246e-01 5.26811659e-01 ... 1.12234914e+00 1.12201476e+00 1.61493564e+00] [ 1.47175038e+00 -5.45883060e-01 2.29510933e-01 ... 6.93570793e-01 -2.81683683e-01 2.17357397e-01] [ 1.61865997e+00 2.02955294e+00 8.55479315e-02 ... 9.74663615e-01 4.62651253e-01 6.08517468e-01] ... [-1.18761063e+00 1.23267734e+00 1.16872475e-01 ... 6.74982369e-02 7.81348765e-01 8.54106620e-02] [ 4.61719632e-02 -2.54095018e-01 -5.88155210e-01 ... 1.48937309e+00 8.79688978e-01 4.78702962e-01] [ 8.90026152e-01 -1.11041534e+00 1.72165179e+00 ... 8.12410831e-01 1.07522798e+00 -1.19034863e+00]]]; ov_res: [[[ 1.19243050e+00 2.87711293e-01 -4.25306648e-01 ... -8.04172218e-01 6.90969527e-02 -1.68176461e-02] [-4.01064664e-01 7.92174399e-01 -7.91392028e-01 ... -1.21460831e+00 -2.05885363e+00 -2.78841114e+00] [-4.26536381e-01 -4.18866158e-01 -3.52937132e-01 ... -1.27597129e+00 5.78414440e-01 -6.95363224e-01] ... [ 1.26698822e-01 -1.93408573e+00 -1.08739173e+00 ... 1.64420474e+00 2.01753879e+00 1.35371447e+00] [ 1.30286705e+00 -1.33340132e+00 7.83987045e-01 ... 8.55622292e-02 8.39969695e-01 -1.14702612e-01] [-1.38408974e-01 -8.77366006e-01 -2.99149193e-03 ... 4.50587392e-01 2.70541042e-01 1.80406189e+00]] [[ 2.67466933e-01 -1.24480247e+00 -1.23540235e+00 ... 1.04748547e+00 1.30358386e+00 -4.02743429e-01] [-8.20544958e-01 2.97411382e-01 4.86408859e-01 ... 1.17035151e+00 -2.45336819e+00 -1.82869405e-01] [-1.18681026e+00 2.02016282e+00 2.55656868e-01 ... 2.03132915e+00 -1.34078360e+00 -3.57771248e-01] ... [-1.24107361e+00 -1.03774881e+00 -4.88052458e-01 ... 9.36230898e-01 1.04501820e+00 2.22890681e-04] [ 1.53666914e-01 -1.16374195e+00 -9.08217803e-02 ... 7.43082285e-01 2.68378806e+00 3.06281000e-01] [ 1.26601982e+00 -2.08267045e+00 1.91586792e+00 ... -2.87816137e-01 4.20675099e-01 -1.11097538e+00]] [[ 5.00921190e-01 -2.63233989e-01 8.93354952e-01 ... -9.24043544e-03 2.55778074e-01 1.51279306e+00] [-5.09760864e-02 1.17121398e+00 6.34417355e-01 ... 6.80618703e-01 8.16715658e-01 -4.67615634e-01] [-1.17093873e+00 -5.25395036e-01 1.72323537e+00 ... 1.06317770e+00 4.20240521e-01 -9.47495937e-01] ... [ 9.50530265e-03 -9.27441001e-01 9.03840125e-01 ... 6.40364110e-01 3.26257825e-01 -4.99405563e-01] [ 7.01220930e-02 2.11875939e+00 -1.52140260e-01 ... 3.56500715e-01 -5.33151984e-01 -4.07008767e-01] [-1.42043793e+00 -4.29710507e-01 -1.31404257e+00 ... 1.91822624e+00 1.04069030e+00 -4.76620235e-02]] ... [[ 1.37230670e+00 8.89409602e-01 8.09168339e-01 ... -9.34768200e-01 -1.36320150e+00 -8.40811312e-01] [-3.72653902e-01 -2.67360479e-01 4.39206600e-01 ... -1.07558954e+00 -6.22710526e-01 6.75726473e-01] [-3.57429117e-01 -1.23294234e+00 -1.07001638e+00 ... 4.97317463e-01 2.11079979e+00 -2.85033673e-01] ... [ 3.43291909e-01 -6.70774281e-01 -8.14102352e-01 ... -5.47065139e-01 -6.62566483e-01 -7.45564044e-01] [ 4.42273289e-01 -1.26351034e-02 8.59673858e-01 ... 1.63998461e+00 1.03974140e+00 -2.48461246e+00] [ 1.03038058e-01 5.11439191e-03 -1.42401946e+00 ... -3.10554123e+00 5.71620405e-01 2.63350010e-01]] [[-5.12999833e-01 -6.55640244e-01 -1.41688621e+00 ... 1.95979571e+00 2.09239557e-01 -6.82388321e-02] [-1.07144959e-01 5.49703479e-01 7.35386491e-01 ... -8.51100124e-03 -1.84420633e+00 -6.22715354e-01] [ 1.52832270e+00 -1.01007961e-01 9.43179578e-02 ... -5.05152404e-01 -1.59221208e+00 -8.59245360e-01] ... [ 1.26937711e+00 -1.46321726e+00 -1.51222539e+00 ... 1.06765187e+00 6.90945327e-01 -1.11211807e-01] [ 8.65002751e-01 -1.08615838e-01 3.17785501e-01 ... 7.79977024e-01 -5.38995087e-01 -9.59493756e-01] [ 1.48944461e+00 -4.96487737e-01 1.60446274e+00 ... -4.96136755e-01 -5.32739282e-01 -2.53643338e-02]] [[ 2.15829611e-02 -3.58516246e-01 5.26811659e-01 ... 1.12234914e+00 1.12201476e+00 1.61493564e+00] [ 1.47175038e+00 -5.45883060e-01 2.29510933e-01 ... 6.93570793e-01 -2.81683683e-01 2.17357397e-01] [ 1.61865997e+00 2.02955294e+00 8.55479315e-02 ... 9.74663615e-01 4.62651253e-01 6.08517468e-01] ... [-1.18761063e+00 1.23267734e+00 1.16872475e-01 ... 6.74982369e-02 7.81348765e-01 8.54106620e-02] [ 4.61719632e-02 -2.54095018e-01 -5.88155210e-01 ... 1.48937309e+00 8.79688978e-01 4.78702962e-01] [ 8.90026152e-01 -1.11041534e+00 1.72165179e+00 ... 8.12410831e-01 1.07522798e+00 -1.19034863e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4635.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.15936509 0.65346366 0.5676921 ... -0.5950505 0.46651834 -0.50734425] [-0.36892706 0.9034865 -1.2389998 ... 0.22637337 -0.81847125 -1.0898092 ] [-0.5709553 0.9581479 0.18162334 ... -1.4063584 -1.277566 0.9729635 ] ... [ 0.9049924 0.51792246 0.08443525 ... -0.251049 0.22092351 -0.8216969 ] [-0.40829057 0.92341685 -1.3431119 ... 1.2299899 -0.8200249 -1.6966763 ] [ 0.49745613 -0.7090517 -1.3490901 ... -1.94961 -0.850612 -0.51522565]] [[ 0.03284015 -0.3330452 -0.08713314 ... 0.94796354 -1.5458808 0.46599472] [ 1.9725373 0.61478835 -0.6954643 ... 0.24610883 3.116687 0.27158335] [ 1.2283918 -0.6703666 0.70888287 ... 0.89159894 -1.6374139 -2.1766973 ] ... [-1.5124943 0.79176515 -0.36808375 ... 0.7273638 0.28570753 -0.35742652] [ 0.30982754 0.5784311 -0.08307672 ... -0.50937146 2.2531822 -0.185401 ] [ 0.48777443 -0.25850424 -0.8251611 ... 1.7610587 1.0875919 0.37159488]] [[-0.37062362 -0.69081324 -1.036993 ... -0.08556899 1.1951345 0.4009826 ] [-1.6839851 -0.74807066 1.1889645 ... 0.69315207 -0.81362176 -0.6954454 ] [ 1.5606122 -1.421581 -0.6430452 ... -1.3193464 -1.3476084 1.2673614 ] ... [-0.93458027 0.16652189 0.26693293 ... 2.255709 -0.59796757 1.2671413 ] [ 2.643443 -0.3405911 -1.1822565 ... -0.24384515 -2.205586 0.06643754] [ 0.08279272 0.6301586 -0.4770726 ... 0.24260521 -1.4197445 1.4462953 ]] ... [[-0.35806444 -1.1915274 -1.2471844 ... 0.2788159 0.0073461 0.70111597] [-0.58648336 -1.6604459 0.6881403 ... 2.0444598 0.43544155 -1.0206317 ] [-1.1271824 -0.37648863 0.8383788 ... -0.48761958 -0.8337417 0.2685829 ] ... [ 1.3035879 1.420588 1.1657965 ... 0.6233478 -1.1913371 -1.8225768 ] [ 1.2388532 0.2944517 -0.736764 ... 0.92567235 -0.29455876 -1.0268626 ] [ 0.73309493 -0.6599379 -0.28534332 ... 0.0575929 0.22605611 0.1616249 ]] [[ 0.5863125 0.19438802 -0.5021577 ... -0.36721745 0.64692813 0.51745516] [-0.88410044 -0.52466565 -1.544086 ... 2.1105435 0.9727411 -1.1009752 ] [ 2.6915865 0.7962717 -1.3721702 ... -0.65113044 0.24216801 -0.17094417] ... [ 1.6382024 0.26546958 0.33727562 ... -0.72587365 0.5992294 -0.5340488 ] [-0.5577667 0.34265333 1.7575256 ... -1.417491 1.5721495 0.5260186 ] [ 1.0142016 0.5382333 -0.43831724 ... 0.33413273 -0.6420932 1.1767008 ]] [[-0.2950905 0.84419495 0.91710746 ... 1.7058219 0.20746255 0.08120254] [ 0.73139435 0.5603599 0.06173339 ... 0.31857082 1.3104795 1.7531403 ] [ 0.00754893 -0.07204037 -1.1076367 ... -0.80310374 -1.9587187 2.2782161 ] ... [-0.63136417 0.9769665 -0.5142114 ... -0.7292561 -0.19164482 1.1891317 ] [ 0.1181152 1.8033969 0.02444558 ... 1.6589124 0.63169664 -1.6671447 ] [ 1.2673992 0.64644694 -1.9102575 ... 0.92038125 -1.3217577 2.5071485 ]]]; ov_res: [[[ 0.15936509 0.65346366 0.5676921 ... -0.5950505 0.46651834 -0.50734425] [-0.36892706 0.9034865 -1.2389998 ... 0.22637337 -0.81847125 -1.0898092 ] [-0.5709553 0.9581479 0.18162334 ... -1.4063584 -1.277566 0.9729635 ] ... [ 0.9049924 0.51792246 0.08443525 ... -0.251049 0.22092351 -0.8216969 ] [-0.40829057 0.92341685 -1.3431119 ... 1.2299899 -0.8200249 -1.6966763 ] [ 0.49745613 -0.7090517 -1.3490901 ... -1.94961 -0.850612 -0.51522565]] [[ 0.03284015 -0.3330452 -0.08713314 ... 0.94796354 -1.5458808 0.46599472] [ 1.9725373 0.61478835 -0.6954643 ... 0.24610883 3.116687 0.27158335] [ 1.2283918 -0.6703666 0.70888287 ... 0.89159894 -1.6374139 -2.1766973 ] ... [-1.5124943 0.79176515 -0.36808375 ... 0.7273638 0.28570753 -0.35742652] [ 0.30982754 0.5784311 -0.08307672 ... -0.50937146 2.2531822 -0.185401 ] [ 0.48777443 -0.25850424 -0.8251611 ... 1.7610587 1.0875919 0.37159488]] [[-0.37062362 -0.69081324 -1.036993 ... -0.08556899 1.1951345 0.4009826 ] [-1.6839851 -0.74807066 1.1889645 ... 0.69315207 -0.81362176 -0.6954454 ] [ 1.5606122 -1.421581 -0.6430452 ... -1.3193464 -1.3476084 1.2673614 ] ... [-0.93458027 0.16652189 0.26693293 ... 2.255709 -0.59796757 1.2671413 ] [ 2.643443 -0.3405911 -1.1822565 ... -0.24384515 -2.205586 0.06643754] [ 0.08279272 0.6301586 -0.4770726 ... 0.24260521 -1.4197445 1.4462953 ]] ... [[-0.35806444 -1.1915274 -1.2471844 ... 0.2788159 0.0073461 0.70111597] [-0.58648336 -1.6604459 0.6881403 ... 2.0444598 0.43544155 -1.0206317 ] [-1.1271824 -0.37648863 0.8383788 ... -0.48761958 -0.8337417 0.2685829 ] ... [ 1.3035879 1.420588 1.1657965 ... 0.6233478 -1.1913371 -1.8225768 ] [ 1.2388532 0.2944517 -0.736764 ... 0.92567235 -0.29455876 -1.0268626 ] [ 0.73309493 -0.6599379 -0.28534332 ... 0.0575929 0.22605611 0.1616249 ]] [[ 0.5863125 0.19438802 -0.5021577 ... -0.36721745 0.64692813 0.51745516] [-0.88410044 -0.52466565 -1.544086 ... 2.1105435 0.9727411 -1.1009752 ] [ 2.6915865 0.7962717 -1.3721702 ... -0.65113044 0.24216801 -0.17094417] ... [ 1.6382024 0.26546958 0.33727562 ... -0.72587365 0.5992294 -0.5340488 ] [-0.5577667 0.34265333 1.7575256 ... -1.417491 1.5721495 0.5260186 ] [ 1.0142016 0.5382333 -0.43831724 ... 0.33413273 -0.6420932 1.1767008 ]] [[-0.2950905 0.84419495 0.91710746 ... 1.7058219 0.20746255 0.08120254] [ 0.73139435 0.5603599 0.06173339 ... 0.31857082 1.3104795 1.7531403 ] [ 0.00754893 -0.07204037 -1.1076367 ... -0.80310374 -1.9587187 2.2782161 ] ... [-0.63136417 0.9769665 -0.5142114 ... -0.7292561 -0.19164482 1.1891317 ] [ 0.1181152 1.8033969 0.02444558 ... 1.6589124 0.63169664 -1.6671447 ] [ 1.2673992 0.64644694 -1.9102575 ... 0.92038125 -1.3217577 2.5071485 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4637.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-0.24912623 -1.6844885 0.31105527 ... -1.9513605 1.0258588 0.1375389 ] [ 1.292018 0.3139706 0.24448976 ... -1.6925888 -0.2044606 0.9724415 ] [-0.2268892 -0.22150022 0.6680322 ... -0.95441353 1.407476 -0.01841653] ... [ 0.49913728 0.8469301 -1.6243249 ... 0.03953609 -0.17328215 -0.14080815] [ 0.5544727 -0.8126808 -0.14845249 ... 1.180836 -1.427411 -2.0966709 ] [ 2.0358741 -0.29796016 -1.9534215 ... 0.54709613 1.9354185 -1.1566534 ]] [[-1.1506178 -0.58712655 -0.71977514 ... 1.9068664 0.88767445 -1.4617574 ] [-0.7173897 0.11778476 -0.9601883 ... 0.45719042 0.60872614 0.8716749 ] [-2.3325574 0.11197794 -1.6898043 ... 2.0616395 -0.48075125 -1.6876931 ] ... [ 0.5815599 0.192363 1.0679452 ... 0.7452359 2.077599 0.30721036] [ 0.36246607 2.35092 1.6603997 ... -0.30694395 -0.17749618 -0.25156912] [ 1.6810405 -0.20349784 -0.74211407 ... -0.07638825 -0.53925437 -0.47436377]] [[ 1.2762353 -0.9182017 -0.23652408 ... 0.1500004 -0.8946836 1.2140584 ] [ 0.7856932 -0.96076584 -1.5458721 ... 0.436727 -1.1311452 -0.9507368 ] [-0.6930752 1.2559965 -1.6484594 ... 1.6802055 -0.2806479 0.32210362] ... [-1.8751043 -1.2010902 -0.4010927 ... -0.75570416 0.88258886 -1.83348 ] [-1.0950352 1.1242255 0.99185175 ... -1.8957767 1.4170552 0.19634269] [-0.3055893 -0.954642 -0.26519635 ... -2.006908 -1.5394073 0.5536125 ]] ... [[-0.08804809 -0.22266531 -1.285985 ... 0.00386233 2.1664786 0.7625122 ] [ 0.13470209 -0.42685556 -0.24714856 ... -0.5182706 -0.7224727 0.5878147 ] [-0.5973643 -0.26291823 -1.188672 ... 0.41368392 -0.21873151 1.4557374 ] ... [ 0.7120733 -1.3446195 -0.11660579 ... 1.3141677 0.12799443 0.22405255] [ 0.7569969 1.0438159 0.69407195 ... -0.5682061 0.7210166 -1.5429124 ] [ 0.36851278 0.869903 -0.27088693 ... 0.44397134 -0.3158645 0.8079087 ]] [[-0.08689215 0.48089242 1.0362507 ... 1.6377689 -1.2177447 -0.37940046] [-1.5659667 -0.7234195 1.1548004 ... 0.5210578 1.2466097 0.8425461 ] [-0.578916 -0.54006165 -1.5562371 ... -1.4700346 0.22200985 0.17819798] ... [ 0.6059864 -0.11357247 -0.00589272 ... -0.43977737 -0.3607579 0.80471706] [-0.13062784 -0.86366355 -0.08188725 ... -1.0662347 1.6356443 -0.2760506 ] [-0.30678895 -1.2576721 0.18022275 ... 0.93893844 -0.7325326 0.5577439 ]] [[-0.48666793 0.3378072 1.165288 ... 0.67944014 -0.28699148 -0.33911368] [ 0.2761947 -0.45009196 0.06882421 ... 0.16195183 0.135077 1.0351672 ] [ 0.9771284 -0.9765239 -0.10677123 ... 1.6170259 -1.3409142 -0.6666791 ] ... [ 0.03423905 -0.46495396 0.334039 ... -1.1246799 -1.2407265 1.2635047 ] [ 2.2604413 1.0317106 0.50534993 ... -0.5502489 1.5190151 -1.254219 ] [-1.4466723 -0.21559374 -1.2630763 ... -0.21830288 0.90145296 -0.31044027]]]; ov_res: [[[-0.24912623 -1.6844885 0.31105527 ... -1.9513605 1.0258588 0.1375389 ] [ 1.292018 0.3139706 0.24448976 ... -1.6925888 -0.2044606 0.9724415 ] [-0.2268892 -0.22150022 0.6680322 ... -0.95441353 1.407476 -0.01841653] ... [ 0.49913728 0.8469301 -1.6243249 ... 0.03953609 -0.17328215 -0.14080815] [ 0.5544727 -0.8126808 -0.14845249 ... 1.180836 -1.427411 -2.0966709 ] [ 2.0358741 -0.29796016 -1.9534215 ... 0.54709613 1.9354185 -1.1566534 ]] [[-1.1506178 -0.58712655 -0.71977514 ... 1.9068664 0.88767445 -1.4617574 ] [-0.7173897 0.11778476 -0.9601883 ... 0.45719042 0.60872614 0.8716749 ] [-2.3325574 0.11197794 -1.6898043 ... 2.0616395 -0.48075125 -1.6876931 ] ... [ 0.5815599 0.192363 1.0679452 ... 0.7452359 2.077599 0.30721036] [ 0.36246607 2.35092 1.6603997 ... -0.30694395 -0.17749618 -0.25156912] [ 1.6810405 -0.20349784 -0.74211407 ... -0.07638825 -0.53925437 -0.47436377]] [[ 1.2762353 -0.9182017 -0.23652408 ... 0.1500004 -0.8946836 1.2140584 ] [ 0.7856932 -0.96076584 -1.5458721 ... 0.436727 -1.1311452 -0.9507368 ] [-0.6930752 1.2559965 -1.6484594 ... 1.6802055 -0.2806479 0.32210362] ... [-1.8751043 -1.2010902 -0.4010927 ... -0.75570416 0.88258886 -1.83348 ] [-1.0950352 1.1242255 0.99185175 ... -1.8957767 1.4170552 0.19634269] [-0.3055893 -0.954642 -0.26519635 ... -2.006908 -1.5394073 0.5536125 ]] ... [[-0.08804809 -0.22266531 -1.285985 ... 0.00386233 2.1664786 0.7625122 ] [ 0.13470209 -0.42685556 -0.24714856 ... -0.5182706 -0.7224727 0.5878147 ] [-0.5973643 -0.26291823 -1.188672 ... 0.41368392 -0.21873151 1.4557374 ] ... [ 0.7120733 -1.3446195 -0.11660579 ... 1.3141677 0.12799443 0.22405255] [ 0.7569969 1.0438159 0.69407195 ... -0.5682061 0.7210166 -1.5429124 ] [ 0.36851278 0.869903 -0.27088693 ... 0.44397134 -0.3158645 0.8079087 ]] [[-0.08689215 0.48089242 1.0362507 ... 1.6377689 -1.2177447 -0.37940046] [-1.5659667 -0.7234195 1.1548004 ... 0.5210578 1.2466097 0.8425461 ] [-0.578916 -0.54006165 -1.5562371 ... -1.4700346 0.22200985 0.17819798] ... [ 0.6059864 -0.11357247 -0.00589272 ... -0.43977737 -0.3607579 0.80471706] [-0.13062784 -0.86366355 -0.08188725 ... -1.0662347 1.6356443 -0.2760506 ] [-0.30678895 -1.2576721 0.18022275 ... 0.93893844 -0.7325326 0.5577439 ]] [[-0.48666793 0.3378072 1.165288 ... 0.67944014 -0.28699148 -0.33911368] [ 0.2761947 -0.45009196 0.06882421 ... 0.16195183 0.135077 1.0351672 ] [ 0.9771284 -0.9765239 -0.10677123 ... 1.6170259 -1.3409142 -0.6666791 ] ... [ 0.03423905 -0.46495396 0.334039 ... -1.1246799 -1.2407265 1.2635047 ] [ 2.2604413 1.0317106 0.50534993 ... -0.5502489 1.5190151 -1.254219 ] [-1.4466723 -0.21559374 -1.2630763 ... -0.21830288 0.90145296 -0.31044027]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4639.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 1.0136216 0.7769642 -0.5658874 ... 1.4277079 0.39290926 0.4995503 ] [ 0.92236096 0.2704559 -0.37529483 ... 0.1182375 -0.4528573 0.80461234] [-1.3720533 0.79884064 0.38496247 ... -0.38230276 0.32703212 -0.47447276]] [[-0.18727286 0.5139609 0.23273587 ... 0.43545806 -2.5086741 0.04295025] [ 0.82690233 -0.8988926 0.34358203 ... 0.8659305 1.4529116 0.5286747 ] [-1.394024 -1.63587 -2.0323572 ... -0.9872573 1.6627955 0.5564969 ]] [[-1.3170624 -2.1914713 -0.05678767 ... 0.5408372 0.32803747 -0.55224925] [-0.6136352 0.5609545 -0.60671777 ... -0.8244746 1.1969117 1.0410454 ] [ 1.2157927 -1.0634719 1.3368837 ... -2.5441706 1.2270743 -2.447886 ]] ... [[ 0.4614885 -0.5228775 0.17491247 ... 0.24664016 -1.3993455 -1.9617305 ] [ 0.6586598 1.1688825 0.1014086 ... -1.8691628 1.5104367 0.9459306 ] [-1.0507883 -0.78403825 0.09060533 ... 2.159345 0.46816465 -0.8683186 ]] [[-1.3797708 -1.3358241 -0.9122222 ... 1.6292142 0.4167604 0.70678157] [ 0.2013983 0.02970881 -0.8887366 ... -2.2324088 1.1766237 0.8277549 ] [-1.5765148 -1.1812196 -0.6737005 ... -0.09633973 -2.118369 1.4787941 ]] [[-0.9684002 -1.0606445 -0.74958557 ... 0.81535757 1.141359 -1.2509687 ] [-0.6639661 1.0263422 0.31216773 ... 0.6584608 -1.2363724 -0.17711489] [-1.3406131 0.5246712 0.630839 ... 0.05838901 0.69910413 0.9491118 ]]]; ov_res: [[[ 1.0136216 0.7769642 -0.5658874 ... 1.4277079 0.39290926 0.4995503 ] [ 0.92236096 0.2704559 -0.37529483 ... 0.1182375 -0.4528573 0.80461234] [-1.3720533 0.79884064 0.38496247 ... -0.38230276 0.32703212 -0.47447276]] [[-0.18727286 0.5139609 0.23273587 ... 0.43545806 -2.5086741 0.04295025] [ 0.82690233 -0.8988926 0.34358203 ... 0.8659305 1.4529116 0.5286747 ] [-1.394024 -1.63587 -2.0323572 ... -0.9872573 1.6627955 0.5564969 ]] [[-1.3170624 -2.1914713 -0.05678767 ... 0.5408372 0.32803747 -0.55224925] [-0.6136352 0.5609545 -0.60671777 ... -0.8244746 1.1969117 1.0410454 ] [ 1.2157927 -1.0634719 1.3368837 ... -2.5441706 1.2270743 -2.447886 ]] ... [[ 0.4614885 -0.5228775 0.17491247 ... 0.24664016 -1.3993455 -1.9617305 ] [ 0.6586598 1.1688825 0.1014086 ... -1.8691628 1.5104367 0.9459306 ] [-1.0507883 -0.78403825 0.09060533 ... 2.159345 0.46816465 -0.8683186 ]] [[-1.3797708 -1.3358241 -0.9122222 ... 1.6292142 0.4167604 0.70678157] [ 0.2013983 0.02970881 -0.8887366 ... -2.2324088 1.1766237 0.8277549 ] [-1.5765148 -1.1812196 -0.6737005 ... -0.09633973 -2.118369 1.4787941 ]] [[-0.9684002 -1.0606445 -0.74958557 ... 0.81535757 1.141359 -1.2509687 ] [-0.6639661 1.0263422 0.31216773 ... 0.6584608 -1.2363724 -0.17711489] [-1.3406131 0.5246712 0.630839 ... 0.05838901 0.69910413 0.9491118 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4641.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.14138466 0.46147388 0.27532178 ... 1.2370262 -0.587406 -0.7046906 ] [ 0.6475193 0.95290047 0.5266572 ... 1.3154384 -0.8531187 -0.2928319 ] [ 1.060566 0.73054427 -1.6844559 ... -0.07461262 -0.34903753 -0.05989174] ... [-0.00768006 -1.1048306 0.486697 ... 1.4682543 1.1519418 -0.5651193 ] [ 0.75263125 0.63756466 -0.81248695 ... -0.27300268 -1.9832468 0.15012191] [-0.34770316 -0.01844406 0.20336398 ... -0.7462206 -1.1793418 -2.3309128 ]] [[-1.2329296 -1.8539525 0.14203493 ... -0.95139366 -1.1391549 1.2055622 ] [-1.270256 1.7743068 -1.8818331 ... 0.35392085 0.05670176 -1.1308998 ] [-0.19430496 -0.88329077 -0.6589012 ... -0.20876724 -0.00327664 1.0503435 ] ... [ 0.01528536 -0.78153974 0.36317533 ... -0.4379915 -0.09200331 -0.7821999 ] [-0.8209277 -0.81545764 -0.35440436 ... -1.7526275 -0.763929 -0.810329 ] [-0.3395724 -0.34686753 -1.9701031 ... 0.52926606 0.02553844 -1.0614188 ]] [[-0.23461588 -0.52148336 -0.668216 ... -0.9254344 -0.35449982 -0.12116414] [-1.395453 0.20565452 -0.28887105 ... -1.9876786 2.04934 0.07048288] [ 0.7282558 -1.2903836 -1.3109113 ... 0.16699375 -1.0846093 0.9193949 ] ... [-1.0999292 0.40570942 0.10126695 ... 0.05863576 0.6627634 -1.0906436 ] [ 2.669234 -0.4824474 -0.11005449 ... -0.8644979 -0.28315744 0.11123417] [-0.8297995 0.303727 -0.46059418 ... -1.340774 -0.8280705 0.23487353]] ... [[ 0.7569165 0.6758206 0.7321753 ... 1.2048095 0.7329082 -0.19450372] [-0.797421 -1.1873937 -0.05875146 ... 0.09786654 0.4561411 0.3011695 ] [-1.0345205 -1.05137 -0.22197546 ... 1.1433665 -0.3490799 -0.31675455] ... [-0.79763806 -0.15292452 -2.5502427 ... 1.0986105 -0.67237324 -0.83497185] [-2.0751288 -0.28743488 0.2433052 ... 0.39262483 -0.9329891 0.09434114] [ 1.2233698 -0.52695835 -1.6441317 ... 0.05044741 -0.36075497 0.14519061]] [[ 1.0276405 1.4208971 -0.64458954 ... -0.50257576 -1.2494947 0.5402077 ] [ 0.89376396 -0.52997535 0.09386942 ... 0.71680456 -0.22651985 -0.5477775 ] [ 0.3866749 0.36140862 0.79863805 ... -1.14954 -0.36897588 0.16949615] ... [ 0.78900206 -1.1884668 -0.7316476 ... -0.5099147 -0.3094313 -0.9909599 ] [ 1.0638465 0.96944475 -1.0677456 ... -0.5103854 0.7982329 -0.1316321 ] [-1.2123941 0.9700889 -2.1202223 ... -1.411616 -2.2289855 -0.6848746 ]] [[ 1.0784866 -1.4316422 0.23588157 ... -0.79600364 0.27662286 0.9204831 ] [ 0.8625305 1.8216473 -0.6336418 ... 0.39710367 1.9633931 -1.6984793 ] [ 0.13479641 0.43274885 1.8950164 ... 1.1136165 -0.25523385 2.4208746 ] ... [-0.08769172 -0.13485079 0.3973099 ... -1.2349634 -0.18918052 1.0775918 ] [ 0.0620841 2.2541406 0.47282344 ... 1.258493 -0.99446 -0.15982015] [-0.01537954 0.94690925 1.0911494 ... 0.50327516 0.65553737 0.34149775]]]; ov_res: [[[ 0.14138466 0.46147388 0.27532178 ... 1.2370262 -0.587406 -0.7046906 ] [ 0.6475193 0.95290047 0.5266572 ... 1.3154384 -0.8531187 -0.2928319 ] [ 1.060566 0.73054427 -1.6844559 ... -0.07461262 -0.34903753 -0.05989174] ... [-0.00768006 -1.1048306 0.486697 ... 1.4682543 1.1519418 -0.5651193 ] [ 0.75263125 0.63756466 -0.81248695 ... -0.27300268 -1.9832468 0.15012191] [-0.34770316 -0.01844406 0.20336398 ... -0.7462206 -1.1793418 -2.3309128 ]] [[-1.2329296 -1.8539525 0.14203493 ... -0.95139366 -1.1391549 1.2055622 ] [-1.270256 1.7743068 -1.8818331 ... 0.35392085 0.05670176 -1.1308998 ] [-0.19430496 -0.88329077 -0.6589012 ... -0.20876724 -0.00327664 1.0503435 ] ... [ 0.01528536 -0.78153974 0.36317533 ... -0.4379915 -0.09200331 -0.7821999 ] [-0.8209277 -0.81545764 -0.35440436 ... -1.7526275 -0.763929 -0.810329 ] [-0.3395724 -0.34686753 -1.9701031 ... 0.52926606 0.02553844 -1.0614188 ]] [[-0.23461588 -0.52148336 -0.668216 ... -0.9254344 -0.35449982 -0.12116414] [-1.395453 0.20565452 -0.28887105 ... -1.9876786 2.04934 0.07048288] [ 0.7282558 -1.2903836 -1.3109113 ... 0.16699375 -1.0846093 0.9193949 ] ... [-1.0999292 0.40570942 0.10126695 ... 0.05863576 0.6627634 -1.0906436 ] [ 2.669234 -0.4824474 -0.11005449 ... -0.8644979 -0.28315744 0.11123417] [-0.8297995 0.303727 -0.46059418 ... -1.340774 -0.8280705 0.23487353]] ... [[ 0.7569165 0.6758206 0.7321753 ... 1.2048095 0.7329082 -0.19450372] [-0.797421 -1.1873937 -0.05875146 ... 0.09786654 0.4561411 0.3011695 ] [-1.0345205 -1.05137 -0.22197546 ... 1.1433665 -0.3490799 -0.31675455] ... [-0.79763806 -0.15292452 -2.5502427 ... 1.0986105 -0.67237324 -0.83497185] [-2.0751288 -0.28743488 0.2433052 ... 0.39262483 -0.9329891 0.09434114] [ 1.2233698 -0.52695835 -1.6441317 ... 0.05044741 -0.36075497 0.14519061]] [[ 1.0276405 1.4208971 -0.64458954 ... -0.50257576 -1.2494947 0.5402077 ] [ 0.89376396 -0.52997535 0.09386942 ... 0.71680456 -0.22651985 -0.5477775 ] [ 0.3866749 0.36140862 0.79863805 ... -1.14954 -0.36897588 0.16949615] ... [ 0.78900206 -1.1884668 -0.7316476 ... -0.5099147 -0.3094313 -0.9909599 ] [ 1.0638465 0.96944475 -1.0677456 ... -0.5103854 0.7982329 -0.1316321 ] [-1.2123941 0.9700889 -2.1202223 ... -1.411616 -2.2289855 -0.6848746 ]] [[ 1.0784866 -1.4316422 0.23588157 ... -0.79600364 0.27662286 0.9204831 ] [ 0.8625305 1.8216473 -0.6336418 ... 0.39710367 1.9633931 -1.6984793 ] [ 0.13479641 0.43274885 1.8950164 ... 1.1136165 -0.25523385 2.4208746 ] ... [-0.08769172 -0.13485079 0.3973099 ... -1.2349634 -0.18918052 1.0775918 ] [ 0.0620841 2.2541406 0.47282344 ... 1.258493 -0.99446 -0.15982015] [-0.01537954 0.94690925 1.0911494 ... 0.50327516 0.65553737 0.34149775]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4643.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-4.7968480e-01 -1.1985589e+00 5.1092714e-01 ... -1.6893917e-01 6.4930475e-01 -4.8625979e-01] [-4.8269880e-01 5.1215816e-01 1.1645528e+00 ... -9.2681706e-01 -1.8273327e+00 9.4111254e-03] [ 6.2650287e-01 2.4161341e+00 -1.8091685e-01 ... -1.0336978e+00 -4.2301747e-01 -1.2888997e+00] ... [-6.7874469e-02 -1.0567565e+00 5.6969967e-02 ... 2.2054493e-01 -8.6399978e-01 2.1167943e-02] [-9.8999083e-01 -1.4915243e+00 -8.7027007e-01 ... -9.6385205e-01 1.8379632e-01 -1.6322863e+00] [-7.0800340e-01 1.2993258e-01 -5.6284275e-02 ... -1.1726459e+00 -5.9179115e-01 1.8259218e+00]] [[-1.1385306e+00 9.9270868e-01 -4.4359404e-01 ... 4.5690554e-01 6.6092066e-02 -3.8960719e-01] [-7.6583952e-01 1.0662405e+00 -9.9894637e-01 ... 1.2308929e-01 -8.1937099e-01 -7.1995121e-01] [ 7.7543207e-03 -7.9987413e-01 9.8363835e-01 ... 5.1054382e-01 7.5526196e-01 8.3751855e-03] ... [ 1.0349909e+00 -4.2538428e-01 -7.5924343e-01 ... 4.1272022e-02 5.8473778e-01 -1.9698817e+00] [-2.8056985e-01 -5.0210685e-01 -5.1060385e-01 ... -8.7880743e-01 1.4182963e+00 2.3247282e-01] [-4.0584090e-01 4.0643680e-01 5.0299245e-01 ... 4.7730482e-01 1.5991305e-01 3.5732663e+00]] [[-2.0684540e-01 -1.6512675e+00 2.6257701e-02 ... 9.2353725e-01 1.2792078e+00 1.5391387e+00] [-3.8441315e-01 1.9213907e+00 9.5962858e-01 ... 1.1687293e+00 8.9131963e-01 3.5062280e-01] [-8.7570399e-01 -1.1445743e+00 1.2072766e+00 ... 5.8078158e-01 -1.5998112e+00 3.1816787e-01] ... [-2.5032907e+00 3.5181966e-01 -3.5775194e-01 ... 2.0389563e-01 1.1725203e-01 -1.2611308e+00] [-1.3426036e-01 1.3274802e+00 1.2709080e+00 ... -9.1429549e-01 9.3052578e-01 1.1963648e+00] [-1.3472717e+00 1.4929192e+00 8.7536663e-01 ... -5.0113056e-02 -4.8821297e-01 -2.8589353e-01]] ... [[ 2.7916813e+00 -1.5148982e+00 8.0335802e-01 ... -1.8189022e-01 -1.3262105e+00 -7.5633395e-01] [ 2.1567720e-01 1.4906563e+00 3.3038414e-01 ... 1.0285949e+00 -1.3552286e-01 -2.1865907e+00] [-1.3478909e+00 -1.2417272e+00 -6.6846828e-03 ... -1.7106771e+00 -6.8330896e-01 1.4932187e+00] ... [ 4.0732723e-01 -2.2706862e-01 1.2461802e+00 ... -9.3389988e-01 -8.2544428e-01 -3.4355009e-01] [-4.4965410e-01 -1.5545702e-01 4.1977670e-02 ... -7.9089218e-01 3.2237059e-01 -4.6144763e-01] [-1.3713706e-01 9.8074786e-02 -1.5751184e+00 ... -3.2767421e-01 1.6226743e-01 -9.1146445e-01]] [[ 1.6003170e+00 1.9947329e+00 3.7887087e-01 ... -1.0540909e+00 7.9745770e-01 -1.5662022e+00] [ 2.9133782e-03 4.9800852e-01 -1.1246411e+00 ... -1.1103841e+00 -9.5113117e-01 -1.6477634e-01] [-1.1998475e+00 -3.2130581e-01 6.3880974e-01 ... 3.0628610e-01 2.2125649e+00 8.4985983e-01] ... [ 8.8581449e-01 -5.1000041e-01 -1.7386965e+00 ... -1.3484471e-01 -1.4843796e+00 -6.9702633e-02] [ 7.0835316e-01 6.8061453e-01 2.2756389e-01 ... 3.8996759e-01 -5.7287633e-01 2.0844569e+00] [ 5.2315551e-01 -3.6161464e-01 -4.6466428e-01 ... -2.9818085e-01 4.9704343e-01 5.7347745e-01]] [[ 1.3124967e+00 6.8393856e-02 7.4059296e-01 ... 8.9783949e-01 3.2459247e-01 -6.3619739e-01] [-2.6850384e-01 1.3328476e+00 -5.4886866e-01 ... -1.1127534e-01 -6.2976187e-01 -2.1837363e+00] [-1.1424444e+00 -1.2484471e+00 2.2826467e-01 ... -8.1889832e-01 1.1345001e-01 -1.4341405e-01] ... [ 4.2120644e-01 6.1926126e-01 6.5208387e-01 ... -5.5881709e-01 1.3911874e+00 -1.6363274e+00] [ 1.1234338e+00 -1.2688742e+00 -4.2035246e-01 ... -9.1750562e-01 1.5083764e+00 5.7585365e-01] [-3.0867890e-02 -6.2037832e-01 -4.3435434e-01 ... 1.1731496e+00 2.5032231e-01 -1.3147334e+00]]]; ov_res: [[[-4.7968480e-01 -1.1985589e+00 5.1092714e-01 ... -1.6893917e-01 6.4930475e-01 -4.8625979e-01] [-4.8269880e-01 5.1215816e-01 1.1645528e+00 ... -9.2681706e-01 -1.8273327e+00 9.4111254e-03] [ 6.2650287e-01 2.4161341e+00 -1.8091685e-01 ... -1.0336978e+00 -4.2301747e-01 -1.2888997e+00] ... [-6.7874469e-02 -1.0567565e+00 5.6969967e-02 ... 2.2054493e-01 -8.6399978e-01 2.1167943e-02] [-9.8999083e-01 -1.4915243e+00 -8.7027007e-01 ... -9.6385205e-01 1.8379632e-01 -1.6322863e+00] [-7.0800340e-01 1.2993258e-01 -5.6284275e-02 ... -1.1726459e+00 -5.9179115e-01 1.8259218e+00]] [[-1.1385306e+00 9.9270868e-01 -4.4359404e-01 ... 4.5690554e-01 6.6092066e-02 -3.8960719e-01] [-7.6583952e-01 1.0662405e+00 -9.9894637e-01 ... 1.2308929e-01 -8.1937099e-01 -7.1995121e-01] [ 7.7543207e-03 -7.9987413e-01 9.8363835e-01 ... 5.1054382e-01 7.5526196e-01 8.3751855e-03] ... [ 1.0349909e+00 -4.2538428e-01 -7.5924343e-01 ... 4.1272022e-02 5.8473778e-01 -1.9698817e+00] [-2.8056985e-01 -5.0210685e-01 -5.1060385e-01 ... -8.7880743e-01 1.4182963e+00 2.3247282e-01] [-4.0584090e-01 4.0643680e-01 5.0299245e-01 ... 4.7730482e-01 1.5991305e-01 3.5732663e+00]] [[-2.0684540e-01 -1.6512675e+00 2.6257701e-02 ... 9.2353725e-01 1.2792078e+00 1.5391387e+00] [-3.8441315e-01 1.9213907e+00 9.5962858e-01 ... 1.1687293e+00 8.9131963e-01 3.5062280e-01] [-8.7570399e-01 -1.1445743e+00 1.2072766e+00 ... 5.8078158e-01 -1.5998112e+00 3.1816787e-01] ... [-2.5032907e+00 3.5181966e-01 -3.5775194e-01 ... 2.0389563e-01 1.1725203e-01 -1.2611308e+00] [-1.3426036e-01 1.3274802e+00 1.2709080e+00 ... -9.1429549e-01 9.3052578e-01 1.1963648e+00] [-1.3472717e+00 1.4929192e+00 8.7536663e-01 ... -5.0113056e-02 -4.8821297e-01 -2.8589353e-01]] ... [[ 2.7916813e+00 -1.5148982e+00 8.0335802e-01 ... -1.8189022e-01 -1.3262105e+00 -7.5633395e-01] [ 2.1567720e-01 1.4906563e+00 3.3038414e-01 ... 1.0285949e+00 -1.3552286e-01 -2.1865907e+00] [-1.3478909e+00 -1.2417272e+00 -6.6846828e-03 ... -1.7106771e+00 -6.8330896e-01 1.4932187e+00] ... [ 4.0732723e-01 -2.2706862e-01 1.2461802e+00 ... -9.3389988e-01 -8.2544428e-01 -3.4355009e-01] [-4.4965410e-01 -1.5545702e-01 4.1977670e-02 ... -7.9089218e-01 3.2237059e-01 -4.6144763e-01] [-1.3713706e-01 9.8074786e-02 -1.5751184e+00 ... -3.2767421e-01 1.6226743e-01 -9.1146445e-01]] [[ 1.6003170e+00 1.9947329e+00 3.7887087e-01 ... -1.0540909e+00 7.9745770e-01 -1.5662022e+00] [ 2.9133782e-03 4.9800852e-01 -1.1246411e+00 ... -1.1103841e+00 -9.5113117e-01 -1.6477634e-01] [-1.1998475e+00 -3.2130581e-01 6.3880974e-01 ... 3.0628610e-01 2.2125649e+00 8.4985983e-01] ... [ 8.8581449e-01 -5.1000041e-01 -1.7386965e+00 ... -1.3484471e-01 -1.4843796e+00 -6.9702633e-02] [ 7.0835316e-01 6.8061453e-01 2.2756389e-01 ... 3.8996759e-01 -5.7287633e-01 2.0844569e+00] [ 5.2315551e-01 -3.6161464e-01 -4.6466428e-01 ... -2.9818085e-01 4.9704343e-01 5.7347745e-01]] [[ 1.3124967e+00 6.8393856e-02 7.4059296e-01 ... 8.9783949e-01 3.2459247e-01 -6.3619739e-01] [-2.6850384e-01 1.3328476e+00 -5.4886866e-01 ... -1.1127534e-01 -6.2976187e-01 -2.1837363e+00] [-1.1424444e+00 -1.2484471e+00 2.2826467e-01 ... -8.1889832e-01 1.1345001e-01 -1.4341405e-01] ... [ 4.2120644e-01 6.1926126e-01 6.5208387e-01 ... -5.5881709e-01 1.3911874e+00 -1.6363274e+00] [ 1.1234338e+00 -1.2688742e+00 -4.2035246e-01 ... -9.1750562e-01 1.5083764e+00 5.7585365e-01] [-3.0867890e-02 -6.2037832e-01 -4.3435434e-01 ... 1.1731496e+00 2.5032231e-01 -1.3147334e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4645.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.40896422 -0.0766992 0.28151542 ... 0.05791558 1.3719046 0.6745905 ] [-1.0031786 -1.9792356 -0.57298064 ... 0.44129804 1.3600336 -2.452891 ] [ 0.18685491 1.5399406 -1.1575716 ... 0.05971938 1.4818134 -1.2197526 ] ... [-1.3623083 1.1679537 0.7033818 ... 0.39117292 -0.1158541 -0.0634171 ] [ 1.2606044 -0.35574517 -0.25930956 ... 0.6214641 -0.0989165 1.0353208 ] [ 0.6376196 0.376063 1.0481846 ... -0.3521707 1.1414015 1.6439283 ]] [[-2.0611804 0.89701355 0.45495707 ... 0.04164238 0.18940124 -0.25643876] [-0.93850356 0.33766326 -2.725166 ... -0.11624324 -1.2492347 0.8622992 ] [ 0.9750966 -2.268877 -0.3762072 ... 1.0700785 1.088431 -1.8660383 ] ... [-0.0856791 2.350661 0.55467474 ... 0.14369912 1.155421 1.3914025 ] [ 0.76322013 0.8462564 0.33585963 ... 0.31730682 0.10943683 -1.0870614 ] [ 0.9677588 0.6324588 0.45555377 ... -0.9776409 0.13439454 -0.10877748]] [[ 0.6409628 -0.90501696 -0.9766896 ... -2.834844 -0.36148036 0.2729842 ] [-0.799143 -0.45759493 0.03951161 ... 0.5857921 1.2135876 -0.16619633] [ 1.1324282 -0.23789665 1.1390615 ... 1.0908308 -0.6755719 -0.9814267 ] ... [-0.9654209 0.5385102 0.21518841 ... -1.2567855 -0.7929578 0.38797292] [-1.115846 -0.0766016 0.18352556 ... 0.5869514 0.12008651 -0.49417248] [-1.7108264 -1.0053241 -0.8684898 ... -1.0157037 -0.49914396 2.3020656 ]] ... [[ 0.42845115 1.9043803 1.130957 ... -0.57412314 0.5051238 1.0655785 ] [-1.0178992 0.95724565 -1.5437177 ... 0.10214615 2.2719622 0.2809383 ] [-2.0981681 -0.17308937 -1.0076503 ... 0.3821896 -1.1704483 -1.0181974 ] ... [ 0.18681262 1.006549 0.46246144 ... -0.42995802 1.056445 1.1235387 ] [-0.49013805 -0.2787628 0.40695664 ... -1.8167809 -0.07164892 1.8061445 ] [-0.6004015 -1.9178283 0.4008836 ... -1.2237921 1.5957637 0.9173894 ]] [[-2.3856757 1.0509763 0.32639986 ... -0.9737188 0.39860374 0.7918601 ] [-1.9092562 -0.7497477 -0.21066138 ... 0.4730451 -1.9209784 1.3189694 ] [ 1.5502187 -0.7043211 -1.3211923 ... -0.813385 -0.18245162 1.7245047 ] ... [ 1.7188311 0.03532006 -1.1004932 ... -0.48060372 -1.9894059 -0.09185771] [-0.4714721 -0.5660783 0.35151294 ... 0.9077243 0.9729366 -0.60979617] [-0.15336348 0.8825789 -0.67257166 ... -1.0662329 -0.54411775 1.5124452 ]] [[-0.92917883 0.7197797 -0.6878793 ... 0.8749303 -0.06257168 0.27807483] [-1.8492589 1.5558189 -0.20991296 ... -1.2670456 -0.2034782 -0.26124907] [ 0.13401347 -1.3745421 0.63802016 ... 0.7375049 1.209301 1.1113201 ] ... [ 0.6560719 0.43876982 -0.3750575 ... -1.2677399 -1.2320746 0.97664577] [-1.2353141 0.04887404 -0.76903933 ... 0.8976665 0.54678917 -0.201991 ] [-0.16967742 0.87693197 -1.34593 ... 1.1379863 -2.1110823 0.08104109]]]; ov_res: [[[ 0.40896422 -0.0766992 0.28151542 ... 0.05791558 1.3719046 0.6745905 ] [-1.0031786 -1.9792356 -0.57298064 ... 0.44129804 1.3600336 -2.452891 ] [ 0.18685491 1.5399406 -1.1575716 ... 0.05971938 1.4818134 -1.2197526 ] ... [-1.3623083 1.1679537 0.7033818 ... 0.39117292 -0.1158541 -0.0634171 ] [ 1.2606044 -0.35574517 -0.25930956 ... 0.6214641 -0.0989165 1.0353208 ] [ 0.6376196 0.376063 1.0481846 ... -0.3521707 1.1414015 1.6439283 ]] [[-2.0611804 0.89701355 0.45495707 ... 0.04164238 0.18940124 -0.25643876] [-0.93850356 0.33766326 -2.725166 ... -0.11624324 -1.2492347 0.8622992 ] [ 0.9750966 -2.268877 -0.3762072 ... 1.0700785 1.088431 -1.8660383 ] ... [-0.0856791 2.350661 0.55467474 ... 0.14369912 1.155421 1.3914025 ] [ 0.76322013 0.8462564 0.33585963 ... 0.31730682 0.10943683 -1.0870614 ] [ 0.9677588 0.6324588 0.45555377 ... -0.9776409 0.13439454 -0.10877748]] [[ 0.6409628 -0.90501696 -0.9766896 ... -2.834844 -0.36148036 0.2729842 ] [-0.799143 -0.45759493 0.03951161 ... 0.5857921 1.2135876 -0.16619633] [ 1.1324282 -0.23789665 1.1390615 ... 1.0908308 -0.6755719 -0.9814267 ] ... [-0.9654209 0.5385102 0.21518841 ... -1.2567855 -0.7929578 0.38797292] [-1.115846 -0.0766016 0.18352556 ... 0.5869514 0.12008651 -0.49417248] [-1.7108264 -1.0053241 -0.8684898 ... -1.0157037 -0.49914396 2.3020656 ]] ... [[ 0.42845115 1.9043803 1.130957 ... -0.57412314 0.5051238 1.0655785 ] [-1.0178992 0.95724565 -1.5437177 ... 0.10214615 2.2719622 0.2809383 ] [-2.0981681 -0.17308937 -1.0076503 ... 0.3821896 -1.1704483 -1.0181974 ] ... [ 0.18681262 1.006549 0.46246144 ... -0.42995802 1.056445 1.1235387 ] [-0.49013805 -0.2787628 0.40695664 ... -1.8167809 -0.07164892 1.8061445 ] [-0.6004015 -1.9178283 0.4008836 ... -1.2237921 1.5957637 0.9173894 ]] [[-2.3856757 1.0509763 0.32639986 ... -0.9737188 0.39860374 0.7918601 ] [-1.9092562 -0.7497477 -0.21066138 ... 0.4730451 -1.9209784 1.3189694 ] [ 1.5502187 -0.7043211 -1.3211923 ... -0.813385 -0.18245162 1.7245047 ] ... [ 1.7188311 0.03532006 -1.1004932 ... -0.48060372 -1.9894059 -0.09185771] [-0.4714721 -0.5660783 0.35151294 ... 0.9077243 0.9729366 -0.60979617] [-0.15336348 0.8825789 -0.67257166 ... -1.0662329 -0.54411775 1.5124452 ]] [[-0.92917883 0.7197797 -0.6878793 ... 0.8749303 -0.06257168 0.27807483] [-1.8492589 1.5558189 -0.20991296 ... -1.2670456 -0.2034782 -0.26124907] [ 0.13401347 -1.3745421 0.63802016 ... 0.7375049 1.209301 1.1113201 ] ... [ 0.6560719 0.43876982 -0.3750575 ... -1.2677399 -1.2320746 0.97664577] [-1.2353141 0.04887404 -0.76903933 ... 0.8976665 0.54678917 -0.201991 ] [-0.16967742 0.87693197 -1.34593 ... 1.1379863 -2.1110823 0.08104109]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4647.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 5.17498612e-01 8.51933420e-01 -4.57606107e-01 ... -9.71463084e-01 1.05810381e-01 -5.25322437e-01] [-2.67361123e-02 2.85438120e-01 -1.46266329e+00 ... -1.21382606e+00 1.40584862e+00 1.62094402e+00] [ 8.77322912e-01 -1.32492828e+00 -6.51082098e-01 ... -4.83474851e-01 1.96236455e+00 1.23731947e+00] ... [-4.24110711e-01 -1.19185209e+00 -6.20729744e-01 ... -9.21446458e-02 -8.26767206e-01 2.35864997e+00] [ 6.97996020e-01 5.30167997e-01 -2.08362192e-01 ... -8.44612479e-01 6.04224950e-03 -9.22783375e-01] [-4.58041549e-01 -3.34762901e-01 -1.59050211e-01 ... -5.59917510e-01 -5.70138752e-01 -1.00968170e+00]] [[ 6.47365093e-01 -7.58912981e-01 -3.50212812e-01 ... -4.70745265e-01 -2.92900771e-01 9.42264974e-01] [-7.83493161e-01 -4.80709076e-01 6.37049258e-01 ... -1.02786851e+00 9.64254856e-01 -8.24503005e-02] [ 1.12470579e+00 4.18563038e-02 -3.61527801e-01 ... 6.32328808e-01 7.28527844e-01 -3.60207893e-02] ... [-7.60918409e-02 4.75366831e-01 7.03951895e-01 ... -5.22817552e-01 1.63724577e+00 2.55693942e-01] [-3.67180943e-01 1.44816905e-01 -6.80629432e-01 ... -8.48946214e-01 -1.06348574e+00 -8.57633129e-02] [-1.41671467e+00 5.00428259e-01 -1.60460591e-01 ... 5.68922400e-01 2.30849177e-01 -2.11871117e-01]] [[-5.94847679e-01 -5.54075420e-01 7.37175524e-01 ... 1.26164779e-01 -9.23310697e-01 -1.62789837e-01] [ 1.28647673e+00 3.00536513e-01 -1.42894721e+00 ... 1.20765328e+00 7.42289066e-01 -1.19654484e-01] [ 1.45158577e+00 7.14295328e-01 8.24198127e-01 ... -1.53969347e-01 3.07245314e-01 -8.43583763e-01] ... [ 1.45638740e+00 8.28943789e-01 3.30621272e-01 ... -5.31307697e-01 3.58142465e-01 7.35092819e-01] [ 9.19571936e-01 8.20209563e-01 1.65963292e-01 ... 1.19984305e+00 -4.15463686e-01 -1.47156298e+00] [-5.15744448e-01 -1.89667785e+00 5.52336574e-01 ... -1.76190114e+00 1.79497886e+00 1.27701795e+00]] ... [[ 1.99198020e+00 2.13924602e-01 1.80159581e+00 ... 1.09582268e-01 9.55442309e-01 -5.27249873e-01] [ 7.68889487e-01 4.53864902e-01 -2.19905585e-01 ... 3.56957823e-01 1.13393486e-01 2.30001307e+00] [ 2.92370051e-01 4.09210443e-01 -9.19992998e-02 ... 6.20558321e-01 1.04022110e+00 8.71423781e-01] ... [ 3.71720463e-01 -1.45671535e+00 1.05911303e+00 ... 4.11315858e-01 8.27964425e-01 9.23458934e-01] [-1.63946494e-01 7.46522725e-01 8.38868558e-01 ... 1.36320427e-01 -6.37335718e-01 5.48308948e-03] [ 1.71278286e+00 1.04786384e+00 -3.59847397e-03 ... 9.13971961e-01 1.53203797e+00 1.94743991e-01]] [[-5.75894058e-01 1.42212665e+00 -9.00558174e-01 ... 1.25570905e+00 -1.64580584e+00 -1.17112911e+00] [ 6.09762818e-02 -9.62346196e-01 -2.32071318e-02 ... -3.88070613e-01 -1.16079152e+00 -1.42163181e+00] [-1.51131642e+00 8.70679021e-02 1.27274346e+00 ... 2.14606428e+00 1.03787577e+00 1.34881794e+00] ... [ 1.38335073e+00 -5.24539411e-01 6.49994493e-01 ... 1.25963438e+00 -9.60909724e-01 -6.20890379e-01] [-1.39776695e+00 -6.82233870e-01 4.50074703e-01 ... 4.16189760e-01 5.59125364e-01 -9.39707756e-02] [-8.46129477e-01 7.56743431e-01 3.88131648e-01 ... -1.90611923e+00 -3.91402721e-01 -2.90773487e+00]] [[-3.49199593e-01 4.80787270e-02 -2.24903440e+00 ... 2.04810190e+00 -2.25744176e+00 2.11217189e+00] [ 1.57943177e+00 6.23789847e-01 4.11153495e-01 ... 2.04508567e+00 -4.09498692e-01 -5.15141189e-01] [-1.38753176e+00 1.93941176e+00 -1.32923812e-01 ... 1.95145532e-01 -7.34536529e-01 3.64444077e-01] ... [-1.81601942e+00 -1.10887811e-01 1.04752772e-01 ... 3.02956611e-01 1.29522693e+00 -1.35740256e+00] [-1.11260474e-01 -1.51972628e+00 3.19515914e-01 ... -1.06353068e+00 -1.37621522e+00 -5.16612470e-01] [ 1.57665670e+00 1.26548740e-03 9.14035499e-01 ... 1.12333095e+00 5.97211301e-01 2.25841546e+00]]]; ov_res: [[[ 5.17498612e-01 8.51933420e-01 -4.57606107e-01 ... -9.71463084e-01 1.05810381e-01 -5.25322437e-01] [-2.67361123e-02 2.85438120e-01 -1.46266329e+00 ... -1.21382606e+00 1.40584862e+00 1.62094402e+00] [ 8.77322912e-01 -1.32492828e+00 -6.51082098e-01 ... -4.83474851e-01 1.96236455e+00 1.23731947e+00] ... [-4.24110711e-01 -1.19185209e+00 -6.20729744e-01 ... -9.21446458e-02 -8.26767206e-01 2.35864997e+00] [ 6.97996020e-01 5.30167997e-01 -2.08362192e-01 ... -8.44612479e-01 6.04224950e-03 -9.22783375e-01] [-4.58041549e-01 -3.34762901e-01 -1.59050211e-01 ... -5.59917510e-01 -5.70138752e-01 -1.00968170e+00]] [[ 6.47365093e-01 -7.58912981e-01 -3.50212812e-01 ... -4.70745265e-01 -2.92900771e-01 9.42264974e-01] [-7.83493161e-01 -4.80709076e-01 6.37049258e-01 ... -1.02786851e+00 9.64254856e-01 -8.24503005e-02] [ 1.12470579e+00 4.18563038e-02 -3.61527801e-01 ... 6.32328808e-01 7.28527844e-01 -3.60207893e-02] ... [-7.60918409e-02 4.75366831e-01 7.03951895e-01 ... -5.22817552e-01 1.63724577e+00 2.55693942e-01] [-3.67180943e-01 1.44816905e-01 -6.80629432e-01 ... -8.48946214e-01 -1.06348574e+00 -8.57633129e-02] [-1.41671467e+00 5.00428259e-01 -1.60460591e-01 ... 5.68922400e-01 2.30849177e-01 -2.11871117e-01]] [[-5.94847679e-01 -5.54075420e-01 7.37175524e-01 ... 1.26164779e-01 -9.23310697e-01 -1.62789837e-01] [ 1.28647673e+00 3.00536513e-01 -1.42894721e+00 ... 1.20765328e+00 7.42289066e-01 -1.19654484e-01] [ 1.45158577e+00 7.14295328e-01 8.24198127e-01 ... -1.53969347e-01 3.07245314e-01 -8.43583763e-01] ... [ 1.45638740e+00 8.28943789e-01 3.30621272e-01 ... -5.31307697e-01 3.58142465e-01 7.35092819e-01] [ 9.19571936e-01 8.20209563e-01 1.65963292e-01 ... 1.19984305e+00 -4.15463686e-01 -1.47156298e+00] [-5.15744448e-01 -1.89667785e+00 5.52336574e-01 ... -1.76190114e+00 1.79497886e+00 1.27701795e+00]] ... [[ 1.99198020e+00 2.13924602e-01 1.80159581e+00 ... 1.09582268e-01 9.55442309e-01 -5.27249873e-01] [ 7.68889487e-01 4.53864902e-01 -2.19905585e-01 ... 3.56957823e-01 1.13393486e-01 2.30001307e+00] [ 2.92370051e-01 4.09210443e-01 -9.19992998e-02 ... 6.20558321e-01 1.04022110e+00 8.71423781e-01] ... [ 3.71720463e-01 -1.45671535e+00 1.05911303e+00 ... 4.11315858e-01 8.27964425e-01 9.23458934e-01] [-1.63946494e-01 7.46522725e-01 8.38868558e-01 ... 1.36320427e-01 -6.37335718e-01 5.48308948e-03] [ 1.71278286e+00 1.04786384e+00 -3.59847397e-03 ... 9.13971961e-01 1.53203797e+00 1.94743991e-01]] [[-5.75894058e-01 1.42212665e+00 -9.00558174e-01 ... 1.25570905e+00 -1.64580584e+00 -1.17112911e+00] [ 6.09762818e-02 -9.62346196e-01 -2.32071318e-02 ... -3.88070613e-01 -1.16079152e+00 -1.42163181e+00] [-1.51131642e+00 8.70679021e-02 1.27274346e+00 ... 2.14606428e+00 1.03787577e+00 1.34881794e+00] ... [ 1.38335073e+00 -5.24539411e-01 6.49994493e-01 ... 1.25963438e+00 -9.60909724e-01 -6.20890379e-01] [-1.39776695e+00 -6.82233870e-01 4.50074703e-01 ... 4.16189760e-01 5.59125364e-01 -9.39707756e-02] [-8.46129477e-01 7.56743431e-01 3.88131648e-01 ... -1.90611923e+00 -3.91402721e-01 -2.90773487e+00]] [[-3.49199593e-01 4.80787270e-02 -2.24903440e+00 ... 2.04810190e+00 -2.25744176e+00 2.11217189e+00] [ 1.57943177e+00 6.23789847e-01 4.11153495e-01 ... 2.04508567e+00 -4.09498692e-01 -5.15141189e-01] [-1.38753176e+00 1.93941176e+00 -1.32923812e-01 ... 1.95145532e-01 -7.34536529e-01 3.64444077e-01] ... [-1.81601942e+00 -1.10887811e-01 1.04752772e-01 ... 3.02956611e-01 1.29522693e+00 -1.35740256e+00] [-1.11260474e-01 -1.51972628e+00 3.19515914e-01 ... -1.06353068e+00 -1.37621522e+00 -5.16612470e-01] [ 1.57665670e+00 1.26548740e-03 9.14035499e-01 ... 1.12333095e+00 5.97211301e-01 2.25841546e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4649.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.10487762 -1.6106774 1.371681 ... 0.83345425 0.5687248 -0.2861859 ] [-0.6285852 -0.70105755 0.24835075 ... 1.6441219 -0.4760058 1.6169318 ] [ 0.7324933 -0.07746055 -1.3786876 ... 0.04070047 -0.97700953 -1.2631799 ]] [[-0.6343031 1.2675833 -2.1836896 ... 0.80883944 -0.6613765 0.2545553 ] [-0.2549399 0.48004383 -2.5018737 ... 0.07297678 -0.387719 0.21157522] [-0.88166857 0.2674512 -0.46345997 ... -0.28430715 0.3045981 -0.32874867]] [[-0.23960522 -0.3793871 1.0367433 ... 1.8002352 3.1734526 -0.3640594 ] [-1.0814272 1.8852398 0.24170934 ... -0.05077542 -0.4679785 -0.4918457 ] [ 0.09381199 -0.19225764 1.0028834 ... -0.5399402 -0.74862546 -0.82018995]] ... [[-2.6749635 -1.7656941 -1.1898742 ... 0.71759367 0.2010618 -2.1169949 ] [ 1.3593206 -1.4666288 -0.29576498 ... -0.97578955 -0.2054452 1.1238822 ] [ 1.1929142 -0.01068926 0.11919055 ... -0.58485425 1.0684752 -0.82564735]] [[-0.0334632 0.61703515 2.5010886 ... -0.3301695 0.54116637 1.5006553 ] [-0.9005755 1.699305 -1.2775233 ... -0.5361829 -1.2088228 -0.51818025] [ 1.9328816 -0.57685465 -0.78854877 ... -0.02147226 1.6406112 -1.2845634 ]] [[-0.03019543 -1.0070206 -0.63919395 ... -0.78816825 1.6584163 -0.944577 ] [ 2.0573084 -0.09768949 0.99580234 ... -0.96280223 1.3296413 0.7336871 ] [-0.00887229 0.94353485 -0.08234677 ... -0.18602496 0.41682243 -1.1406099 ]]]; ov_res: [[[ 0.10487762 -1.6106774 1.371681 ... 0.83345425 0.5687248 -0.2861859 ] [-0.6285852 -0.70105755 0.24835075 ... 1.6441219 -0.4760058 1.6169318 ] [ 0.7324933 -0.07746055 -1.3786876 ... 0.04070047 -0.97700953 -1.2631799 ]] [[-0.6343031 1.2675833 -2.1836896 ... 0.80883944 -0.6613765 0.2545553 ] [-0.2549399 0.48004383 -2.5018737 ... 0.07297678 -0.387719 0.21157522] [-0.88166857 0.2674512 -0.46345997 ... -0.28430715 0.3045981 -0.32874867]] [[-0.23960522 -0.3793871 1.0367433 ... 1.8002352 3.1734526 -0.3640594 ] [-1.0814272 1.8852398 0.24170934 ... -0.05077542 -0.4679785 -0.4918457 ] [ 0.09381199 -0.19225764 1.0028834 ... -0.5399402 -0.74862546 -0.82018995]] ... [[-2.6749635 -1.7656941 -1.1898742 ... 0.71759367 0.2010618 -2.1169949 ] [ 1.3593206 -1.4666288 -0.29576498 ... -0.97578955 -0.2054452 1.1238822 ] [ 1.1929142 -0.01068926 0.11919055 ... -0.58485425 1.0684752 -0.82564735]] [[-0.0334632 0.61703515 2.5010886 ... -0.3301695 0.54116637 1.5006553 ] [-0.9005755 1.699305 -1.2775233 ... -0.5361829 -1.2088228 -0.51818025] [ 1.9328816 -0.57685465 -0.78854877 ... -0.02147226 1.6406112 -1.2845634 ]] [[-0.03019543 -1.0070206 -0.63919395 ... -0.78816825 1.6584163 -0.944577 ] [ 2.0573084 -0.09768949 0.99580234 ... -0.96280223 1.3296413 0.7336871 ] [-0.00887229 0.94353485 -0.08234677 ... -0.18602496 0.41682243 -1.1406099 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4651.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 1.4637995 -0.77435005 1.5017666 ... -0.49718344 0.65300214 -0.61341786] [-0.77435005 1.5017666 -1.904808 ... 0.65300214 -0.61341786 -0.17079243] [ 1.5017666 -1.904808 -0.7732756 ... -0.61341786 -0.17079243 -0.2617357 ] ... [-1.1384858 -0.63466024 -1.0316449 ... 0.19274019 0.5673797 -0.04363573] [-0.63466024 -1.0316449 -1.0197847 ... 0.5673797 -0.04363573 0.53738976] [-1.0316449 -1.0197847 0.17550007 ... -0.04363573 0.53738976 0.9477986 ]] [[ 0.5991636 -0.79632473 -0.07988203 ... 0.07272699 -0.5341735 0.14046179] [-0.79632473 -0.07988203 0.24895251 ... -0.5341735 0.14046179 -0.3238767 ] [-0.07988203 0.24895251 -1.3361752 ... 0.14046179 -0.3238767 1.0299743 ] ... [-1.7996287 2.409949 0.24419144 ... -1.5421252 -0.2293223 0.5673167 ] [ 2.409949 0.24419144 0.92171884 ... -0.2293223 0.5673167 -1.9616139 ] [ 0.24419144 0.92171884 -0.49688622 ... 0.5673167 -1.9616139 1.1084846 ]] [[-1.2881382 -0.7022532 1.731788 ... 0.6638484 -0.7378081 1.2091323 ] [-0.7022532 1.731788 -0.634328 ... -0.7378081 1.2091323 0.7583466 ] [ 1.731788 -0.634328 -0.68181896 ... 1.2091323 0.7583466 0.77674955] ... [-0.51302874 1.0876005 0.85194767 ... 0.6412429 -0.7329586 2.9108312 ] [ 1.0876005 0.85194767 0.756878 ... -0.7329586 2.9108312 0.5472809 ] [ 0.85194767 0.756878 -0.13043582 ... 2.9108312 0.5472809 0.7001533 ]] ... [[ 1.0194446 -0.10944234 1.4781318 ... 0.22475609 -0.48945427 -0.64707834] [-0.10944234 1.4781318 0.09808259 ... -0.48945427 -0.64707834 0.9915009 ] [ 1.4781318 0.09808259 0.8527859 ... -0.64707834 0.9915009 0.9244183 ] ... [ 0.43538988 -0.39453164 -0.3014839 ... -0.23379514 2.1611133 0.46508136] [-0.39453164 -0.3014839 0.7194328 ... 2.1611133 0.46508136 1.0986602 ] [-0.3014839 0.7194328 -0.80349845 ... 0.46508136 1.0986602 -0.90534854]] [[-0.06616415 -0.79647017 -0.7385795 ... 0.85252833 -1.3424976 -1.2112739 ] [-0.79647017 -0.7385795 0.19475564 ... -1.3424976 -1.2112739 -0.4559729 ] [-0.7385795 0.19475564 0.21585949 ... -1.2112739 -0.4559729 -0.26465672] ... [-1.114961 0.09837379 0.25028905 ... 0.5304551 -0.67115533 0.56139094] [ 0.09837379 0.25028905 0.1487069 ... -0.67115533 0.56139094 0.38679093] [ 0.25028905 0.1487069 -0.9924789 ... 0.56139094 0.38679093 -0.01713731]] [[ 1.8094565 -0.5950313 0.48027226 ... 0.6008061 0.5346652 0.18584912] [-0.5950313 0.48027226 1.8949077 ... 0.5346652 0.18584912 0.5047222 ] [ 0.48027226 1.8949077 0.3918507 ... 0.18584912 0.5047222 1.0936074 ] ... [-0.29939678 -0.44923106 0.6234538 ... -0.20551886 -0.15353286 0.3678028 ] [-0.44923106 0.6234538 0.16000924 ... -0.15353286 0.3678028 0.44291228] [ 0.6234538 0.16000924 0.2069164 ... 0.3678028 0.44291228 1.0227342 ]]]; ov_res: [[[ 1.4637995 -0.77435005 1.5017666 ... -0.49718344 0.65300214 -0.61341786] [-0.77435005 1.5017666 -1.904808 ... 0.65300214 -0.61341786 -0.17079243] [ 1.5017666 -1.904808 -0.7732756 ... -0.61341786 -0.17079243 -0.2617357 ] ... [-1.1384858 -0.63466024 -1.0316449 ... 0.19274019 0.5673797 -0.04363573] [-0.63466024 -1.0316449 -1.0197847 ... 0.5673797 -0.04363573 0.53738976] [-1.0316449 -1.0197847 0.17550007 ... -0.04363573 0.53738976 0.9477986 ]] [[ 0.5991636 -0.79632473 -0.07988203 ... 0.07272699 -0.5341735 0.14046179] [-0.79632473 -0.07988203 0.24895251 ... -0.5341735 0.14046179 -0.3238767 ] [-0.07988203 0.24895251 -1.3361752 ... 0.14046179 -0.3238767 1.0299743 ] ... [-1.7996287 2.409949 0.24419144 ... -1.5421252 -0.2293223 0.5673167 ] [ 2.409949 0.24419144 0.92171884 ... -0.2293223 0.5673167 -1.9616139 ] [ 0.24419144 0.92171884 -0.49688622 ... 0.5673167 -1.9616139 1.1084846 ]] [[-1.2881382 -0.7022532 1.731788 ... 0.6638484 -0.7378081 1.2091323 ] [-0.7022532 1.731788 -0.634328 ... -0.7378081 1.2091323 0.7583466 ] [ 1.731788 -0.634328 -0.68181896 ... 1.2091323 0.7583466 0.77674955] ... [-0.51302874 1.0876005 0.85194767 ... 0.6412429 -0.7329586 2.9108312 ] [ 1.0876005 0.85194767 0.756878 ... -0.7329586 2.9108312 0.5472809 ] [ 0.85194767 0.756878 -0.13043582 ... 2.9108312 0.5472809 0.7001533 ]] ... [[ 1.0194446 -0.10944234 1.4781318 ... 0.22475609 -0.48945427 -0.64707834] [-0.10944234 1.4781318 0.09808259 ... -0.48945427 -0.64707834 0.9915009 ] [ 1.4781318 0.09808259 0.8527859 ... -0.64707834 0.9915009 0.9244183 ] ... [ 0.43538988 -0.39453164 -0.3014839 ... -0.23379514 2.1611133 0.46508136] [-0.39453164 -0.3014839 0.7194328 ... 2.1611133 0.46508136 1.0986602 ] [-0.3014839 0.7194328 -0.80349845 ... 0.46508136 1.0986602 -0.90534854]] [[-0.06616415 -0.79647017 -0.7385795 ... 0.85252833 -1.3424976 -1.2112739 ] [-0.79647017 -0.7385795 0.19475564 ... -1.3424976 -1.2112739 -0.4559729 ] [-0.7385795 0.19475564 0.21585949 ... -1.2112739 -0.4559729 -0.26465672] ... [-1.114961 0.09837379 0.25028905 ... 0.5304551 -0.67115533 0.56139094] [ 0.09837379 0.25028905 0.1487069 ... -0.67115533 0.56139094 0.38679093] [ 0.25028905 0.1487069 -0.9924789 ... 0.56139094 0.38679093 -0.01713731]] [[ 1.8094565 -0.5950313 0.48027226 ... 0.6008061 0.5346652 0.18584912] [-0.5950313 0.48027226 1.8949077 ... 0.5346652 0.18584912 0.5047222 ] [ 0.48027226 1.8949077 0.3918507 ... 0.18584912 0.5047222 1.0936074 ] ... [-0.29939678 -0.44923106 0.6234538 ... -0.20551886 -0.15353286 0.3678028 ] [-0.44923106 0.6234538 0.16000924 ... -0.15353286 0.3678028 0.44291228] [ 0.6234538 0.16000924 0.2069164 ... 0.3678028 0.44291228 1.0227342 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4653.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.10171708 0.6648217 1.1033757 ... 0.8981806 -1.1112736 0.69445854] [ 0.6648217 1.1033757 -0.8223848 ... -1.1112736 0.69445854 0.60815185] [ 0.7882759 1.0690461 2.128026 ... -0.6501909 0.1392891 -1.0097013 ] ... [ 1.145309 0.50996405 0.20570213 ... 0.9948111 -0.49401438 -1.5250522 ] [-0.3915813 0.09823872 0.5807413 ... -0.47935957 0.7212558 0.2027566 ] [ 0.09823872 0.5807413 -0.02402155 ... 0.7212558 0.2027566 -1.2352002 ]] [[ 0.75522476 0.6530909 0.87949437 ... -1.0474752 -0.30613923 0.12529708] [ 0.6530909 0.87949437 0.47153923 ... -0.30613923 0.12529708 0.93513364] [ 0.23361461 -0.56105787 -0.08815672 ... 0.42435765 -0.312546 -0.8777622 ] ... [ 0.10165196 -0.41580382 0.7511615 ... 1.1799293 -0.5331547 0.1826328 ] [-0.41174266 1.6925707 0.18607737 ... 0.7828364 -0.9654313 0.31160367] [ 1.6925707 0.18607737 0.1708944 ... -0.9654313 0.31160367 -1.3317577 ]] [[-1.4878736 -0.6775938 0.86569333 ... 0.7874817 -1.0969768 -0.65951663] [-0.6775938 0.86569333 0.73945355 ... -1.0969768 -0.65951663 -0.10977544] [-0.14937834 0.31895044 -1.1954867 ... -1.8862253 -1.3436147 0.27629265] ... [-0.92299676 1.7995965 0.4710957 ... -0.36486462 0.11805689 -0.8322848 ] [-0.2716009 0.8266315 -0.8683188 ... -0.46125048 -0.00377283 -0.40322402] [ 0.8266315 -0.8683188 0.24053174 ... -0.00377283 -0.40322402 -1.6632042 ]] ... [[ 2.3708134 0.8354202 -1.4063042 ... 0.7104878 -2.0729494 1.6887754 ] [ 0.8354202 -1.4063042 0.3523941 ... -2.0729494 1.6887754 0.78755516] [ 0.24917403 -1.4138769 -0.3863646 ... -0.8140354 0.36929923 -0.10113842] ... [-0.82390684 0.08480058 0.6085441 ... -0.31964743 1.7947841 -0.9518208 ] [ 1.387159 -0.19511265 1.0449057 ... 1.2433729 0.5328501 -0.2530511 ] [-0.19511265 1.0449057 -1.8271642 ... 0.5328501 -0.2530511 2.3548617 ]] [[-1.11643 -0.41665325 -0.10627431 ... 0.8937024 -0.250876 -0.0460516 ] [-0.41665325 -0.10627431 -0.77957684 ... -0.250876 -0.0460516 -0.33101082] [ 0.27226016 0.3865436 -0.3939107 ... 0.35664833 0.226532 -0.6655948 ] ... [ 0.03938247 0.08340374 0.5438349 ... -0.07515302 2.614346 0.0324381 ] [ 0.83558816 1.5093818 0.59917724 ... 1.0830277 -0.73806924 -0.4513265 ] [ 1.5093818 0.59917724 0.30162692 ... -0.73806924 -0.4513265 -0.94022304]] [[-1.9099534 0.11196996 0.26422963 ... -0.23357709 -1.2097657 -0.11971842] [ 0.11196996 0.26422963 -1.0844445 ... -1.2097657 -0.11971842 -0.6978676 ] [ 0.89694303 -0.5581365 -0.2290196 ... 0.05322915 -0.9848614 -1.8715852 ] ... [-1.2574474 -1.3962301 1.5628488 ... -0.8803441 -0.8926181 0.48810455] [ 1.6556747 -0.7847719 1.3216169 ... 0.9693334 0.36564484 -0.8694009 ] [-0.7847719 1.3216169 -1.360667 ... 0.36564484 -0.8694009 0.06673751]]]; ov_res: [[[-0.10171708 0.6648217 1.1033757 ... 0.8981806 -1.1112736 0.69445854] [ 0.6648217 1.1033757 -0.8223848 ... -1.1112736 0.69445854 0.60815185] [ 0.7882759 1.0690461 2.128026 ... -0.6501909 0.1392891 -1.0097013 ] ... [ 1.145309 0.50996405 0.20570213 ... 0.9948111 -0.49401438 -1.5250522 ] [-0.3915813 0.09823872 0.5807413 ... -0.47935957 0.7212558 0.2027566 ] [ 0.09823872 0.5807413 -0.02402155 ... 0.7212558 0.2027566 -1.2352002 ]] [[ 0.75522476 0.6530909 0.87949437 ... -1.0474752 -0.30613923 0.12529708] [ 0.6530909 0.87949437 0.47153923 ... -0.30613923 0.12529708 0.93513364] [ 0.23361461 -0.56105787 -0.08815672 ... 0.42435765 -0.312546 -0.8777622 ] ... [ 0.10165196 -0.41580382 0.7511615 ... 1.1799293 -0.5331547 0.1826328 ] [-0.41174266 1.6925707 0.18607737 ... 0.7828364 -0.9654313 0.31160367] [ 1.6925707 0.18607737 0.1708944 ... -0.9654313 0.31160367 -1.3317577 ]] [[-1.4878736 -0.6775938 0.86569333 ... 0.7874817 -1.0969768 -0.65951663] [-0.6775938 0.86569333 0.73945355 ... -1.0969768 -0.65951663 -0.10977544] [-0.14937834 0.31895044 -1.1954867 ... -1.8862253 -1.3436147 0.27629265] ... [-0.92299676 1.7995965 0.4710957 ... -0.36486462 0.11805689 -0.8322848 ] [-0.2716009 0.8266315 -0.8683188 ... -0.46125048 -0.00377283 -0.40322402] [ 0.8266315 -0.8683188 0.24053174 ... -0.00377283 -0.40322402 -1.6632042 ]] ... [[ 2.3708134 0.8354202 -1.4063042 ... 0.7104878 -2.0729494 1.6887754 ] [ 0.8354202 -1.4063042 0.3523941 ... -2.0729494 1.6887754 0.78755516] [ 0.24917403 -1.4138769 -0.3863646 ... -0.8140354 0.36929923 -0.10113842] ... [-0.82390684 0.08480058 0.6085441 ... -0.31964743 1.7947841 -0.9518208 ] [ 1.387159 -0.19511265 1.0449057 ... 1.2433729 0.5328501 -0.2530511 ] [-0.19511265 1.0449057 -1.8271642 ... 0.5328501 -0.2530511 2.3548617 ]] [[-1.11643 -0.41665325 -0.10627431 ... 0.8937024 -0.250876 -0.0460516 ] [-0.41665325 -0.10627431 -0.77957684 ... -0.250876 -0.0460516 -0.33101082] [ 0.27226016 0.3865436 -0.3939107 ... 0.35664833 0.226532 -0.6655948 ] ... [ 0.03938247 0.08340374 0.5438349 ... -0.07515302 2.614346 0.0324381 ] [ 0.83558816 1.5093818 0.59917724 ... 1.0830277 -0.73806924 -0.4513265 ] [ 1.5093818 0.59917724 0.30162692 ... -0.73806924 -0.4513265 -0.94022304]] [[-1.9099534 0.11196996 0.26422963 ... -0.23357709 -1.2097657 -0.11971842] [ 0.11196996 0.26422963 -1.0844445 ... -1.2097657 -0.11971842 -0.6978676 ] [ 0.89694303 -0.5581365 -0.2290196 ... 0.05322915 -0.9848614 -1.8715852 ] ... [-1.2574474 -1.3962301 1.5628488 ... -0.8803441 -0.8926181 0.48810455] [ 1.6556747 -0.7847719 1.3216169 ... 0.9693334 0.36564484 -0.8694009 ] [-0.7847719 1.3216169 -1.360667 ... 0.36564484 -0.8694009 0.06673751]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4655.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[-0.32587722 0.6565082 0.43090957 ... -0.30133456 0.05544285 -0.7296062 ] [ 0.6565082 0.43090957 0.32269624 ... 0.05544285 -0.7296062 -0.7763409 ] [ 0.43090957 0.32269624 -1.4023143 ... -0.7296062 -0.7763409 -0.27005094] ... [ 0.00685264 0.00486143 0.05246868 ... -0.7999626 1.2257802 -0.29719865] [ 0.00486143 0.05246868 -0.40345943 ... 1.2257802 -0.29719865 0.17304762] [ 0.05246868 -0.40345943 0.3788546 ... -0.29719865 0.17304762 -0.35702392]] [[-1.4556085 -1.3683133 0.4888825 ... -1.6427476 0.86739874 -0.09135999] [-1.3683133 0.4888825 -0.01145571 ... 0.86739874 -0.09135999 0.04088214] [ 0.4888825 -0.01145571 0.55342585 ... -0.09135999 0.04088214 -0.30246252] ... [-0.72163355 -0.5751622 0.76772267 ... -0.25768563 1.8546691 -0.5586015 ] [-0.5751622 0.76772267 -0.35226592 ... 1.8546691 -0.5586015 0.44260255] [ 0.76772267 -0.35226592 0.7070041 ... -0.5586015 0.44260255 0.01303044]] [[ 1.6828641 -1.6447717 2.0590763 ... 0.05750207 -0.56724703 -0.26903442] [-1.6447717 2.0590763 0.3647378 ... -0.56724703 -0.26903442 0.53298426] [ 2.0590763 0.3647378 0.15408792 ... -0.26903442 0.53298426 -2.3783693 ] ... [-0.0599923 -0.5973998 -0.5091465 ... -0.49896908 -0.84059024 0.46475405] [-0.5973998 -0.5091465 1.1328955 ... -0.84059024 0.46475405 -0.43960476] [-0.5091465 1.1328955 -1.0081879 ... 0.46475405 -0.43960476 -0.07077892]] ... [[ 0.11010839 -0.53614205 1.2730595 ... -1.9633496 -0.12128021 -1.6253856 ] [-0.53614205 1.2730595 -1.0440588 ... -0.12128021 -1.6253856 0.5181644 ] [ 1.2730595 -1.0440588 -0.26188177 ... -1.6253856 0.5181644 0.15856023] ... [ 0.14551215 0.18125245 -0.45022058 ... -0.2914588 -2.7644346 0.49186057] [ 0.18125245 -0.45022058 -1.2258569 ... -2.7644346 0.49186057 0.8037859 ] [-0.45022058 -1.2258569 -0.7868631 ... 0.49186057 0.8037859 -0.11113294]] [[-1.9600697 -0.36623788 0.8680735 ... -1.1852666 -1.1476641 -1.0314728 ] [-0.36623788 0.8680735 0.4341241 ... -1.1476641 -1.0314728 1.848575 ] [ 0.8680735 0.4341241 1.3739 ... -1.0314728 1.848575 0.3452332 ] ... [ 0.99519104 0.13978365 1.2578604 ... 0.16645998 0.28353947 1.3644027 ] [ 0.13978365 1.2578604 -1.2883741 ... 0.28353947 1.3644027 1.9785362 ] [ 1.2578604 -1.2883741 0.45614934 ... 1.3644027 1.9785362 2.6360173 ]] [[ 1.2667899 -0.24925993 -0.08083848 ... 0.498383 0.88093567 -1.4494036 ] [-0.24925993 -0.08083848 0.91653603 ... 0.88093567 -1.4494036 0.52830046] [-0.08083848 0.91653603 -0.27396587 ... -1.4494036 0.52830046 2.114797 ] ... [ 1.6860298 2.0620956 -0.59197474 ... -0.18021299 1.6226934 -0.14184006] [ 2.0620956 -0.59197474 0.25869268 ... 1.6226934 -0.14184006 -0.18420896] [-0.59197474 0.25869268 0.91588145 ... -0.14184006 -0.18420896 -0.27856985]]]; ov_res: [[[-0.32587722 0.6565082 0.43090957 ... -0.30133456 0.05544285 -0.7296062 ] [ 0.6565082 0.43090957 0.32269624 ... 0.05544285 -0.7296062 -0.7763409 ] [ 0.43090957 0.32269624 -1.4023143 ... -0.7296062 -0.7763409 -0.27005094] ... [ 0.00685264 0.00486143 0.05246868 ... -0.7999626 1.2257802 -0.29719865] [ 0.00486143 0.05246868 -0.40345943 ... 1.2257802 -0.29719865 0.17304762] [ 0.05246868 -0.40345943 0.3788546 ... -0.29719865 0.17304762 -0.35702392]] [[-1.4556085 -1.3683133 0.4888825 ... -1.6427476 0.86739874 -0.09135999] [-1.3683133 0.4888825 -0.01145571 ... 0.86739874 -0.09135999 0.04088214] [ 0.4888825 -0.01145571 0.55342585 ... -0.09135999 0.04088214 -0.30246252] ... [-0.72163355 -0.5751622 0.76772267 ... -0.25768563 1.8546691 -0.5586015 ] [-0.5751622 0.76772267 -0.35226592 ... 1.8546691 -0.5586015 0.44260255] [ 0.76772267 -0.35226592 0.7070041 ... -0.5586015 0.44260255 0.01303044]] [[ 1.6828641 -1.6447717 2.0590763 ... 0.05750207 -0.56724703 -0.26903442] [-1.6447717 2.0590763 0.3647378 ... -0.56724703 -0.26903442 0.53298426] [ 2.0590763 0.3647378 0.15408792 ... -0.26903442 0.53298426 -2.3783693 ] ... [-0.0599923 -0.5973998 -0.5091465 ... -0.49896908 -0.84059024 0.46475405] [-0.5973998 -0.5091465 1.1328955 ... -0.84059024 0.46475405 -0.43960476] [-0.5091465 1.1328955 -1.0081879 ... 0.46475405 -0.43960476 -0.07077892]] ... [[ 0.11010839 -0.53614205 1.2730595 ... -1.9633496 -0.12128021 -1.6253856 ] [-0.53614205 1.2730595 -1.0440588 ... -0.12128021 -1.6253856 0.5181644 ] [ 1.2730595 -1.0440588 -0.26188177 ... -1.6253856 0.5181644 0.15856023] ... [ 0.14551215 0.18125245 -0.45022058 ... -0.2914588 -2.7644346 0.49186057] [ 0.18125245 -0.45022058 -1.2258569 ... -2.7644346 0.49186057 0.8037859 ] [-0.45022058 -1.2258569 -0.7868631 ... 0.49186057 0.8037859 -0.11113294]] [[-1.9600697 -0.36623788 0.8680735 ... -1.1852666 -1.1476641 -1.0314728 ] [-0.36623788 0.8680735 0.4341241 ... -1.1476641 -1.0314728 1.848575 ] [ 0.8680735 0.4341241 1.3739 ... -1.0314728 1.848575 0.3452332 ] ... [ 0.99519104 0.13978365 1.2578604 ... 0.16645998 0.28353947 1.3644027 ] [ 0.13978365 1.2578604 -1.2883741 ... 0.28353947 1.3644027 1.9785362 ] [ 1.2578604 -1.2883741 0.45614934 ... 1.3644027 1.9785362 2.6360173 ]] [[ 1.2667899 -0.24925993 -0.08083848 ... 0.498383 0.88093567 -1.4494036 ] [-0.24925993 -0.08083848 0.91653603 ... 0.88093567 -1.4494036 0.52830046] [-0.08083848 0.91653603 -0.27396587 ... -1.4494036 0.52830046 2.114797 ] ... [ 1.6860298 2.0620956 -0.59197474 ... -0.18021299 1.6226934 -0.14184006] [ 2.0620956 -0.59197474 0.25869268 ... 1.6226934 -0.14184006 -0.18420896] [-0.59197474 0.25869268 0.91588145 ... -0.14184006 -0.18420896 -0.27856985]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4657.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 1.7972788 0.36714345 -0.79654634 ... -1.4548854 2.1565363 0.08867349] [ 0.36714345 -0.79654634 -1.6163825 ... 2.1565363 0.08867349 0.43676504] [ 0.41107062 0.23950623 1.7858424 ... 0.03819642 -0.7660791 -1.8864359 ] ... [-0.9256157 1.160789 -1.348748 ... 0.5403051 -1.186056 0.69485676] [ 1.182689 0.3760505 -1.3449373 ... -0.881382 0.02354326 -0.6957059 ] [ 0.3760505 -1.3449373 0.8921366 ... 0.02354326 -0.6957059 0.29805958]] [[-0.3461855 -0.1926896 -2.3927758 ... -0.6905059 1.156817 1.4341207 ] [-0.1926896 -2.3927758 0.01492796 ... 1.156817 1.4341207 -0.03116618] [ 0.24683362 0.5974825 -0.48226544 ... -1.2474698 0.36963993 -1.3407267 ] ... [-0.73112917 -1.1974313 1.9754755 ... -0.82888883 -1.1325029 0.72408646] [ 1.2741997 0.45015252 -0.0241439 ... 0.23867524 1.0128455 -1.3204925 ] [ 0.45015252 -0.0241439 -0.8222671 ... 1.0128455 -1.3204925 1.388714 ]] [[ 2.1159768 -0.15922447 0.28237638 ... -1.4160581 -0.32361665 1.5194992 ] [-0.15922447 0.28237638 1.733803 ... -0.32361665 1.5194992 0.09477834] [ 0.31222576 -2.0659335 -0.03530428 ... 1.632782 0.47538835 -0.24690044] ... [-0.01788949 1.7045491 0.03151972 ... -0.981661 -1.0913963 1.744608 ] [ 0.26399156 1.3430667 0.992296 ... -0.80503273 -1.3794491 -1.7115316 ] [ 1.3430667 0.992296 -0.7077527 ... -1.3794491 -1.7115316 -0.58905655]] ... [[-0.92401147 0.0411698 0.45730662 ... -0.2484976 -0.14902559 -0.58385944] [ 0.0411698 0.45730662 0.05717073 ... -0.14902559 -0.58385944 0.45044178] [-0.98738587 -0.38310245 -0.82235336 ... -0.7742484 0.1087136 -0.9532184 ] ... [ 0.37424988 0.5673618 -0.8388028 ... -0.41280776 0.5746648 0.18165642] [ 0.31136322 0.36544317 -1.1276542 ... -1.4513124 0.9887397 0.44050875] [ 0.36544317 -1.1276542 -0.5471736 ... 0.9887397 0.44050875 0.09211323]] [[-2.2365038 -0.54988617 0.3534557 ... 0.4272659 -0.1550518 1.4027712 ] [-0.54988617 0.3534557 -0.7044092 ... -0.1550518 1.4027712 0.07128922] [-1.1303529 0.21991026 -0.9249938 ... 0.57643986 0.78592724 1.0149585 ] ... [ 0.64497966 1.2846731 0.67891556 ... -0.13807982 0.43141794 -1.3602984 ] [ 1.2481441 0.39138117 -0.64783245 ... 0.618493 -1.7922188 2.7856624 ] [ 0.39138117 -0.64783245 0.5219921 ... -1.7922188 2.7856624 0.24548076]] [[ 0.02588631 0.6695226 0.47124442 ... -1.289305 0.58789206 1.2750093 ] [ 0.6695226 0.47124442 -0.03621458 ... 0.58789206 1.2750093 -1.7397592 ] [ 0.65100384 -0.72505176 -0.6775781 ... -0.55810004 -1.7295513 0.58796966] ... [ 0.02614074 1.0356513 1.6146374 ... -0.72087854 0.20718808 0.15547927] [-1.9796736 0.10563459 0.5925453 ... 0.4331914 -0.313835 -0.23745565] [ 0.10563459 0.5925453 -0.72239816 ... -0.313835 -0.23745565 -0.06158908]]]; ov_res: [[[ 1.7972788 0.36714345 -0.79654634 ... -1.4548854 2.1565363 0.08867349] [ 0.36714345 -0.79654634 -1.6163825 ... 2.1565363 0.08867349 0.43676504] [ 0.41107062 0.23950623 1.7858424 ... 0.03819642 -0.7660791 -1.8864359 ] ... [-0.9256157 1.160789 -1.348748 ... 0.5403051 -1.186056 0.69485676] [ 1.182689 0.3760505 -1.3449373 ... -0.881382 0.02354326 -0.6957059 ] [ 0.3760505 -1.3449373 0.8921366 ... 0.02354326 -0.6957059 0.29805958]] [[-0.3461855 -0.1926896 -2.3927758 ... -0.6905059 1.156817 1.4341207 ] [-0.1926896 -2.3927758 0.01492796 ... 1.156817 1.4341207 -0.03116618] [ 0.24683362 0.5974825 -0.48226544 ... -1.2474698 0.36963993 -1.3407267 ] ... [-0.73112917 -1.1974313 1.9754755 ... -0.82888883 -1.1325029 0.72408646] [ 1.2741997 0.45015252 -0.0241439 ... 0.23867524 1.0128455 -1.3204925 ] [ 0.45015252 -0.0241439 -0.8222671 ... 1.0128455 -1.3204925 1.388714 ]] [[ 2.1159768 -0.15922447 0.28237638 ... -1.4160581 -0.32361665 1.5194992 ] [-0.15922447 0.28237638 1.733803 ... -0.32361665 1.5194992 0.09477834] [ 0.31222576 -2.0659335 -0.03530428 ... 1.632782 0.47538835 -0.24690044] ... [-0.01788949 1.7045491 0.03151972 ... -0.981661 -1.0913963 1.744608 ] [ 0.26399156 1.3430667 0.992296 ... -0.80503273 -1.3794491 -1.7115316 ] [ 1.3430667 0.992296 -0.7077527 ... -1.3794491 -1.7115316 -0.58905655]] ... [[-0.92401147 0.0411698 0.45730662 ... -0.2484976 -0.14902559 -0.58385944] [ 0.0411698 0.45730662 0.05717073 ... -0.14902559 -0.58385944 0.45044178] [-0.98738587 -0.38310245 -0.82235336 ... -0.7742484 0.1087136 -0.9532184 ] ... [ 0.37424988 0.5673618 -0.8388028 ... -0.41280776 0.5746648 0.18165642] [ 0.31136322 0.36544317 -1.1276542 ... -1.4513124 0.9887397 0.44050875] [ 0.36544317 -1.1276542 -0.5471736 ... 0.9887397 0.44050875 0.09211323]] [[-2.2365038 -0.54988617 0.3534557 ... 0.4272659 -0.1550518 1.4027712 ] [-0.54988617 0.3534557 -0.7044092 ... -0.1550518 1.4027712 0.07128922] [-1.1303529 0.21991026 -0.9249938 ... 0.57643986 0.78592724 1.0149585 ] ... [ 0.64497966 1.2846731 0.67891556 ... -0.13807982 0.43141794 -1.3602984 ] [ 1.2481441 0.39138117 -0.64783245 ... 0.618493 -1.7922188 2.7856624 ] [ 0.39138117 -0.64783245 0.5219921 ... -1.7922188 2.7856624 0.24548076]] [[ 0.02588631 0.6695226 0.47124442 ... -1.289305 0.58789206 1.2750093 ] [ 0.6695226 0.47124442 -0.03621458 ... 0.58789206 1.2750093 -1.7397592 ] [ 0.65100384 -0.72505176 -0.6775781 ... -0.55810004 -1.7295513 0.58796966] ... [ 0.02614074 1.0356513 1.6146374 ... -0.72087854 0.20718808 0.15547927] [-1.9796736 0.10563459 0.5925453 ... 0.4331914 -0.313835 -0.23745565] [ 0.10563459 0.5925453 -0.72239816 ... -0.313835 -0.23745565 -0.06158908]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4659.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-1.8909519 -2.0216413 0.78190154 ... 0.3937455 1.4400418 -1.0268419 ] [-0.6661803 0.13272193 0.74295443 ... 0.09337619 -0.4412574 -1.6430262 ] [-1.3040007 0.45240667 -0.86398315 ... 1.4532796 -1.1933831 1.6172534 ]] [[-0.7487323 -0.6546813 -1.9695737 ... -0.2230177 0.88716006 0.23105752] [ 0.7271328 -1.2211803 0.87551934 ... -0.59644884 -0.48242635 0.29525694] [-0.4624361 -0.2484576 0.87919414 ... 0.34189498 0.3891528 -2.258003 ]] [[-0.22844021 -1.047375 -1.5146916 ... -1.4419948 0.7412471 -1.4455372 ] [ 0.02819524 -1.8829846 0.8007661 ... 0.60245734 0.17574748 -0.8991781 ] [ 1.2037107 -1.8493575 -0.6726785 ... -0.52284 -0.22873707 0.06509662]] ... [[-0.75328505 1.4085534 -0.60845304 ... -1.5471007 3.15105 1.4692873 ] [-1.4867417 2.6206913 0.64889795 ... -1.5577166 -0.901918 -2.5989797 ] [ 0.35091138 0.8013729 0.42903176 ... 1.0331417 -0.16342558 -0.9344397 ]] [[-1.6327009 -1.748386 0.90733004 ... 0.5881961 0.1693418 1.3772962 ] [-0.32744196 2.7124891 -0.6729373 ... -1.425319 -1.8226562 1.6450119 ] [ 0.05446078 0.6317363 0.66202176 ... -0.41057116 -2.5448003 -0.03523106]] [[-0.17689277 0.34738618 -0.03575601 ... -0.42032954 -1.10002 0.40551415] [ 0.4263012 -2.0771062 1.9627682 ... 0.06975093 -0.7624272 0.1588185 ] [ 0.17831597 -1.3197671 -0.47329 ... -0.4835938 0.81181294 0.5410862 ]]]; ov_res: [[[-1.8909519 -2.0216413 0.78190154 ... 0.3937455 1.4400418 -1.0268419 ] [-0.6661803 0.13272193 0.74295443 ... 0.09337619 -0.4412574 -1.6430262 ] [-1.3040007 0.45240667 -0.86398315 ... 1.4532796 -1.1933831 1.6172534 ]] [[-0.7487323 -0.6546813 -1.9695737 ... -0.2230177 0.88716006 0.23105752] [ 0.7271328 -1.2211803 0.87551934 ... -0.59644884 -0.48242635 0.29525694] [-0.4624361 -0.2484576 0.87919414 ... 0.34189498 0.3891528 -2.258003 ]] [[-0.22844021 -1.047375 -1.5146916 ... -1.4419948 0.7412471 -1.4455372 ] [ 0.02819524 -1.8829846 0.8007661 ... 0.60245734 0.17574748 -0.8991781 ] [ 1.2037107 -1.8493575 -0.6726785 ... -0.52284 -0.22873707 0.06509662]] ... [[-0.75328505 1.4085534 -0.60845304 ... -1.5471007 3.15105 1.4692873 ] [-1.4867417 2.6206913 0.64889795 ... -1.5577166 -0.901918 -2.5989797 ] [ 0.35091138 0.8013729 0.42903176 ... 1.0331417 -0.16342558 -0.9344397 ]] [[-1.6327009 -1.748386 0.90733004 ... 0.5881961 0.1693418 1.3772962 ] [-0.32744196 2.7124891 -0.6729373 ... -1.425319 -1.8226562 1.6450119 ] [ 0.05446078 0.6317363 0.66202176 ... -0.41057116 -2.5448003 -0.03523106]] [[-0.17689277 0.34738618 -0.03575601 ... -0.42032954 -1.10002 0.40551415] [ 0.4263012 -2.0771062 1.9627682 ... 0.06975093 -0.7624272 0.1588185 ] [ 0.17831597 -1.3197671 -0.47329 ... -0.4835938 0.81181294 0.5410862 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4661.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 1.26664734e+00 1.36767530e+00 1.26691973e+00 ... 9.84802783e-01 4.25589204e-01 5.40430844e-01] [-6.98685646e-01 -1.65776853e-02 -9.66952622e-01 ... -5.51596045e-01 -1.28502026e-01 1.81922305e+00] [ 7.27395833e-01 1.77678192e+00 5.53821981e-01 ... 1.34212053e+00 1.10188580e+00 1.01605165e+00] ... [ 5.56611359e-01 -9.88807976e-01 -9.88031328e-01 ... -2.62806326e-01 -2.45185047e-01 -1.69676170e-01] [ 1.02559197e+00 1.62857115e+00 -3.17577809e-01 ... 4.30683523e-01 -3.82374167e-01 -7.96537638e-01] [ 6.48957610e-01 1.45681548e+00 -6.55567870e-02 ... -2.08580256e+00 7.71541595e-01 -4.85142648e-01]] [[-9.76414621e-01 -1.97294474e-01 1.03207338e+00 ... -1.68891966e-01 1.69298363e+00 -2.35681868e+00] [-5.83029032e-01 4.55788732e-01 5.38650528e-02 ... 6.11435711e-01 8.69801044e-01 -1.37381458e+00] [ 2.28272915e+00 -4.01924908e-01 3.67371768e-01 ... 1.08842683e+00 7.96821356e-01 -3.55004609e-01] ... [-1.17774105e+00 -2.82908291e-01 6.73328936e-02 ... 8.00609291e-01 -9.98258665e-02 4.11436826e-01] [-6.67144120e-01 -1.64872214e-01 -5.54873526e-01 ... -5.75215936e-01 -6.70035362e-01 -4.68049377e-01] [-1.11140180e+00 5.54933429e-01 -6.22563720e-01 ... 9.19962347e-01 -7.69279063e-01 -1.28682375e-01]] [[ 1.42462984e-01 -1.39636114e-01 -3.03706564e-02 ... -1.41160119e+00 1.30548131e+00 7.83900082e-01] [-1.34582400e-01 2.57173806e-01 3.50376695e-01 ... -7.32809603e-01 7.30995536e-01 1.20232892e+00] [ 5.18290758e-01 5.89644969e-01 -7.85420954e-01 ... -1.53138745e+00 -1.10266134e-01 -1.10037136e+00] ... [ 1.92476124e-01 1.45378590e+00 -1.70390666e+00 ... -3.52481425e-01 9.87122476e-01 -1.06235385e+00] [-1.14435220e+00 -1.85531974e+00 1.31143367e+00 ... -1.63963783e+00 1.54963753e-03 -7.93853700e-01] [-5.61621308e-01 -2.81152815e-01 -1.10054171e+00 ... -4.97157313e-02 -2.03162766e+00 1.10318601e+00]] ... [[ 4.86249298e-01 -2.57912135e+00 2.76264668e-01 ... 4.75605391e-02 -8.67270008e-02 -3.29114199e-01] [-4.29556519e-01 -1.63293326e+00 -7.22563505e-01 ... -1.50814104e+00 -1.44399330e-01 -1.93394780e+00] [-6.22253045e-02 2.06917548e+00 1.83263570e-01 ... -4.04232442e-02 -1.41826725e+00 -5.82292639e-02] ... [-6.85521007e-01 -7.58109570e-01 9.40582395e-01 ... -1.84940004e+00 9.98404384e-01 -1.64356694e-01] [-1.26117766e+00 9.88389373e-01 -1.89616561e+00 ... 9.47294235e-01 1.13928366e+00 1.13271475e+00] [ 1.33468330e-01 -9.64548349e-01 1.86852977e-01 ... 1.72918010e+00 1.42457116e+00 4.71054167e-01]] [[ 1.22101367e+00 1.10255110e+00 1.15585017e+00 ... -2.02124262e+00 3.79497975e-01 -7.65909776e-02] [ 1.65256917e+00 -8.94238830e-01 6.09347343e-01 ... 1.38103592e+00 -6.42870545e-01 -1.65380466e+00] [-1.28085518e+00 -1.85729951e-01 1.57387030e+00 ... 1.37007785e+00 2.23207086e-01 -4.46190119e-01] ... [-2.78831863e+00 -5.10533929e-01 8.15175354e-01 ... 8.34519625e-01 3.11115712e-01 -2.95653462e-01] [-4.43403125e-01 -6.65376067e-01 1.39638022e-01 ... -1.44311774e+00 -2.71383643e+00 -1.02926302e+00] [-1.62526536e+00 1.34075511e+00 -3.37659746e-01 ... 1.61832559e+00 4.43115592e-01 3.90380651e-01]] [[ 2.81997859e-01 -4.96460021e-01 -1.23725498e+00 ... 1.28714597e+00 -1.45959601e-01 1.41678035e-01] [-1.85257030e+00 5.81185937e-01 3.72819602e-01 ... 7.28655815e-01 -2.21644950e+00 3.54275525e-01] [-1.79187655e+00 1.61818731e+00 1.00831771e+00 ... 1.84085429e+00 6.91678524e-01 -2.00577450e+00] ... [-1.29798567e+00 3.24355984e+00 2.44053409e-01 ... -2.47237131e-01 6.21648192e-01 4.77205008e-01] [ 1.59207821e+00 -1.37935281e+00 -1.34965706e+00 ... 9.60223436e-01 -2.37286115e+00 -3.11937898e-01] [ 1.64540446e+00 8.94545913e-01 1.14489412e+00 ... -3.95372689e-01 2.46279195e-01 4.51161236e-01]]]; ov_res: [[[ 1.26664734e+00 1.36767530e+00 1.26691973e+00 ... 9.84802783e-01 4.25589204e-01 5.40430844e-01] [-6.98685646e-01 -1.65776853e-02 -9.66952622e-01 ... -5.51596045e-01 -1.28502026e-01 1.81922305e+00] [ 7.27395833e-01 1.77678192e+00 5.53821981e-01 ... 1.34212053e+00 1.10188580e+00 1.01605165e+00] ... [ 5.56611359e-01 -9.88807976e-01 -9.88031328e-01 ... -2.62806326e-01 -2.45185047e-01 -1.69676170e-01] [ 1.02559197e+00 1.62857115e+00 -3.17577809e-01 ... 4.30683523e-01 -3.82374167e-01 -7.96537638e-01] [ 6.48957610e-01 1.45681548e+00 -6.55567870e-02 ... -2.08580256e+00 7.71541595e-01 -4.85142648e-01]] [[-9.76414621e-01 -1.97294474e-01 1.03207338e+00 ... -1.68891966e-01 1.69298363e+00 -2.35681868e+00] [-5.83029032e-01 4.55788732e-01 5.38650528e-02 ... 6.11435711e-01 8.69801044e-01 -1.37381458e+00] [ 2.28272915e+00 -4.01924908e-01 3.67371768e-01 ... 1.08842683e+00 7.96821356e-01 -3.55004609e-01] ... [-1.17774105e+00 -2.82908291e-01 6.73328936e-02 ... 8.00609291e-01 -9.98258665e-02 4.11436826e-01] [-6.67144120e-01 -1.64872214e-01 -5.54873526e-01 ... -5.75215936e-01 -6.70035362e-01 -4.68049377e-01] [-1.11140180e+00 5.54933429e-01 -6.22563720e-01 ... 9.19962347e-01 -7.69279063e-01 -1.28682375e-01]] [[ 1.42462984e-01 -1.39636114e-01 -3.03706564e-02 ... -1.41160119e+00 1.30548131e+00 7.83900082e-01] [-1.34582400e-01 2.57173806e-01 3.50376695e-01 ... -7.32809603e-01 7.30995536e-01 1.20232892e+00] [ 5.18290758e-01 5.89644969e-01 -7.85420954e-01 ... -1.53138745e+00 -1.10266134e-01 -1.10037136e+00] ... [ 1.92476124e-01 1.45378590e+00 -1.70390666e+00 ... -3.52481425e-01 9.87122476e-01 -1.06235385e+00] [-1.14435220e+00 -1.85531974e+00 1.31143367e+00 ... -1.63963783e+00 1.54963753e-03 -7.93853700e-01] [-5.61621308e-01 -2.81152815e-01 -1.10054171e+00 ... -4.97157313e-02 -2.03162766e+00 1.10318601e+00]] ... [[ 4.86249298e-01 -2.57912135e+00 2.76264668e-01 ... 4.75605391e-02 -8.67270008e-02 -3.29114199e-01] [-4.29556519e-01 -1.63293326e+00 -7.22563505e-01 ... -1.50814104e+00 -1.44399330e-01 -1.93394780e+00] [-6.22253045e-02 2.06917548e+00 1.83263570e-01 ... -4.04232442e-02 -1.41826725e+00 -5.82292639e-02] ... [-6.85521007e-01 -7.58109570e-01 9.40582395e-01 ... -1.84940004e+00 9.98404384e-01 -1.64356694e-01] [-1.26117766e+00 9.88389373e-01 -1.89616561e+00 ... 9.47294235e-01 1.13928366e+00 1.13271475e+00] [ 1.33468330e-01 -9.64548349e-01 1.86852977e-01 ... 1.72918010e+00 1.42457116e+00 4.71054167e-01]] [[ 1.22101367e+00 1.10255110e+00 1.15585017e+00 ... -2.02124262e+00 3.79497975e-01 -7.65909776e-02] [ 1.65256917e+00 -8.94238830e-01 6.09347343e-01 ... 1.38103592e+00 -6.42870545e-01 -1.65380466e+00] [-1.28085518e+00 -1.85729951e-01 1.57387030e+00 ... 1.37007785e+00 2.23207086e-01 -4.46190119e-01] ... [-2.78831863e+00 -5.10533929e-01 8.15175354e-01 ... 8.34519625e-01 3.11115712e-01 -2.95653462e-01] [-4.43403125e-01 -6.65376067e-01 1.39638022e-01 ... -1.44311774e+00 -2.71383643e+00 -1.02926302e+00] [-1.62526536e+00 1.34075511e+00 -3.37659746e-01 ... 1.61832559e+00 4.43115592e-01 3.90380651e-01]] [[ 2.81997859e-01 -4.96460021e-01 -1.23725498e+00 ... 1.28714597e+00 -1.45959601e-01 1.41678035e-01] [-1.85257030e+00 5.81185937e-01 3.72819602e-01 ... 7.28655815e-01 -2.21644950e+00 3.54275525e-01] [-1.79187655e+00 1.61818731e+00 1.00831771e+00 ... 1.84085429e+00 6.91678524e-01 -2.00577450e+00] ... [-1.29798567e+00 3.24355984e+00 2.44053409e-01 ... -2.47237131e-01 6.21648192e-01 4.77205008e-01] [ 1.59207821e+00 -1.37935281e+00 -1.34965706e+00 ... 9.60223436e-01 -2.37286115e+00 -3.11937898e-01] [ 1.64540446e+00 8.94545913e-01 1.14489412e+00 ... -3.95372689e-01 2.46279195e-01 4.51161236e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4663.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 4.25899506e-01 -6.06817424e-01 -1.68511260e+00 ... 7.16739774e-01 -2.22462282e-01 7.90291131e-01] [-3.06154937e-01 2.08710328e-01 -1.11213386e+00 ... -8.58542383e-01 1.58226585e+00 -2.80664489e-02] [-8.10017824e-01 1.34289667e-01 -1.21159196e+00 ... -1.60903096e-01 2.34330177e-01 9.17098820e-01] ... [-1.12109646e-01 6.51191354e-01 3.25001657e-01 ... -1.91481817e+00 -1.34107679e-01 -2.50314689e+00] [-2.14324072e-01 4.09188241e-01 -8.94183993e-01 ... 2.13506743e-01 5.15913218e-02 4.02401954e-01] [-1.16704240e-01 -6.75999224e-01 9.72523540e-02 ... -1.72296536e+00 8.19298849e-02 6.46867812e-01]] [[-1.31476080e+00 1.03891838e+00 9.67130423e-01 ... 1.01176369e+00 -2.72374123e-01 7.00086236e-01] [ 1.67806983e+00 -1.36008275e+00 -2.25687668e-01 ... 9.40693989e-02 5.71431220e-01 -1.88477948e-01] [ 1.11683893e+00 1.60498977e-01 -1.59399778e-01 ... 1.21270251e+00 4.34523463e-01 6.82123244e-01] ... [ 1.38434649e+00 -2.96846223e+00 5.83312571e-01 ... 6.00620471e-02 7.73263454e-01 1.85227799e+00] [-3.98486286e-01 4.80748445e-01 -1.11841452e+00 ... -1.20073247e+00 9.37108457e-01 -1.77164361e-01] [-2.22039366e+00 9.06423211e-01 -3.05945307e-01 ... -3.71327609e-01 2.81988736e-02 -8.85282040e-01]] [[-9.77380425e-02 3.48235399e-01 -9.80625749e-01 ... -7.69096851e-01 -7.27781773e-01 -8.38921607e-01] [-1.26949883e+00 1.11522682e-01 -1.15494919e+00 ... 5.55698872e-01 1.89545441e+00 9.73872724e-04] [-1.45198181e-01 1.79044083e-01 -8.73646319e-01 ... 1.42869353e+00 -1.53729475e+00 6.94690704e-01] ... [ 9.70326602e-01 -9.57488194e-02 -7.34685585e-02 ... -6.01200461e-01 7.53946722e-01 -9.96921003e-01] [-8.14113498e-01 -1.25494266e+00 1.81681645e+00 ... -4.41686623e-02 -1.81846321e-01 -6.00547075e-01] [ 4.18700874e-01 -6.02769613e-01 -1.24661171e+00 ... -5.51718354e-01 3.22828978e-01 -9.35444713e-01]] ... [[-8.70878175e-02 1.31997907e+00 2.35293508e-01 ... -4.34785277e-01 1.53270692e-01 6.05931878e-01] [ 2.25302434e+00 -1.83827221e+00 2.69919068e-01 ... -2.48415649e-01 -3.98309886e-01 1.49512184e+00] [ 3.66712928e-01 -8.85652661e-01 -2.08140071e-03 ... -2.00973034e+00 -1.44361162e+00 -1.01727629e+00] ... [-2.46193539e-02 1.43807158e-01 6.61772043e-02 ... 4.49563056e-01 1.53426349e+00 8.75214115e-02] [ 4.01820362e-01 -4.77349102e-01 1.31156683e+00 ... 3.90080690e-01 -1.26400375e+00 -1.87481344e+00] [ 6.19594157e-01 -1.26583353e-01 -3.02519709e-01 ... 5.67243159e-01 5.38425446e-01 6.52173340e-01]] [[ 1.53476074e-01 3.33420664e-01 -9.01568949e-01 ... -1.19115496e+00 -4.64229763e-01 3.33491921e-01] [-7.35739693e-02 -1.33745089e-01 8.61121893e-01 ... -9.38853741e-01 -2.50147969e-01 -4.25406486e-01] [-4.48022902e-01 3.39982539e-01 1.64629027e-01 ... -2.26656720e-01 9.66932595e-01 1.92880481e-02] ... [ 3.73544425e-01 -1.25882852e+00 2.05029821e+00 ... -4.73066837e-01 -3.01255643e-01 -5.45813024e-01] [-6.59571707e-01 7.93806165e-02 9.69766855e-01 ... -6.53901279e-01 -1.69289565e+00 6.41240105e-02] [ 3.12069748e-02 5.59906125e-01 3.72213542e-01 ... -5.27643383e-01 -9.51929450e-01 -6.69651851e-02]] [[-1.22038496e+00 -1.44486928e+00 7.07331777e-01 ... 3.90295416e-01 -9.08725023e-01 3.92377198e-01] [-2.15951300e+00 1.52410972e+00 3.98203582e-01 ... 1.53335154e+00 7.07756698e-01 -5.26263118e-01] [ 1.43732226e+00 1.46996284e+00 -2.40369201e+00 ... 1.02863467e+00 1.04134750e+00 -1.05498105e-01] ... [ 9.04743254e-01 4.14114058e-01 1.52474916e+00 ... 1.54093206e+00 3.89264286e-01 -4.71493781e-01] [ 1.77880332e-01 -1.07961714e+00 6.19263053e-01 ... 7.10514903e-01 -1.00287902e+00 1.57098091e+00] [ 4.15204197e-01 1.21821928e+00 8.94450665e-01 ... 5.90003073e-01 2.30706048e+00 -8.53646457e-01]]]; ov_res: [[[ 4.25899506e-01 -6.06817424e-01 -1.68511260e+00 ... 7.16739774e-01 -2.22462282e-01 7.90291131e-01] [-3.06154937e-01 2.08710328e-01 -1.11213386e+00 ... -8.58542383e-01 1.58226585e+00 -2.80664489e-02] [-8.10017824e-01 1.34289667e-01 -1.21159196e+00 ... -1.60903096e-01 2.34330177e-01 9.17098820e-01] ... [-1.12109646e-01 6.51191354e-01 3.25001657e-01 ... -1.91481817e+00 -1.34107679e-01 -2.50314689e+00] [-2.14324072e-01 4.09188241e-01 -8.94183993e-01 ... 2.13506743e-01 5.15913218e-02 4.02401954e-01] [-1.16704240e-01 -6.75999224e-01 9.72523540e-02 ... -1.72296536e+00 8.19298849e-02 6.46867812e-01]] [[-1.31476080e+00 1.03891838e+00 9.67130423e-01 ... 1.01176369e+00 -2.72374123e-01 7.00086236e-01] [ 1.67806983e+00 -1.36008275e+00 -2.25687668e-01 ... 9.40693989e-02 5.71431220e-01 -1.88477948e-01] [ 1.11683893e+00 1.60498977e-01 -1.59399778e-01 ... 1.21270251e+00 4.34523463e-01 6.82123244e-01] ... [ 1.38434649e+00 -2.96846223e+00 5.83312571e-01 ... 6.00620471e-02 7.73263454e-01 1.85227799e+00] [-3.98486286e-01 4.80748445e-01 -1.11841452e+00 ... -1.20073247e+00 9.37108457e-01 -1.77164361e-01] [-2.22039366e+00 9.06423211e-01 -3.05945307e-01 ... -3.71327609e-01 2.81988736e-02 -8.85282040e-01]] [[-9.77380425e-02 3.48235399e-01 -9.80625749e-01 ... -7.69096851e-01 -7.27781773e-01 -8.38921607e-01] [-1.26949883e+00 1.11522682e-01 -1.15494919e+00 ... 5.55698872e-01 1.89545441e+00 9.73872724e-04] [-1.45198181e-01 1.79044083e-01 -8.73646319e-01 ... 1.42869353e+00 -1.53729475e+00 6.94690704e-01] ... [ 9.70326602e-01 -9.57488194e-02 -7.34685585e-02 ... -6.01200461e-01 7.53946722e-01 -9.96921003e-01] [-8.14113498e-01 -1.25494266e+00 1.81681645e+00 ... -4.41686623e-02 -1.81846321e-01 -6.00547075e-01] [ 4.18700874e-01 -6.02769613e-01 -1.24661171e+00 ... -5.51718354e-01 3.22828978e-01 -9.35444713e-01]] ... [[-8.70878175e-02 1.31997907e+00 2.35293508e-01 ... -4.34785277e-01 1.53270692e-01 6.05931878e-01] [ 2.25302434e+00 -1.83827221e+00 2.69919068e-01 ... -2.48415649e-01 -3.98309886e-01 1.49512184e+00] [ 3.66712928e-01 -8.85652661e-01 -2.08140071e-03 ... -2.00973034e+00 -1.44361162e+00 -1.01727629e+00] ... [-2.46193539e-02 1.43807158e-01 6.61772043e-02 ... 4.49563056e-01 1.53426349e+00 8.75214115e-02] [ 4.01820362e-01 -4.77349102e-01 1.31156683e+00 ... 3.90080690e-01 -1.26400375e+00 -1.87481344e+00] [ 6.19594157e-01 -1.26583353e-01 -3.02519709e-01 ... 5.67243159e-01 5.38425446e-01 6.52173340e-01]] [[ 1.53476074e-01 3.33420664e-01 -9.01568949e-01 ... -1.19115496e+00 -4.64229763e-01 3.33491921e-01] [-7.35739693e-02 -1.33745089e-01 8.61121893e-01 ... -9.38853741e-01 -2.50147969e-01 -4.25406486e-01] [-4.48022902e-01 3.39982539e-01 1.64629027e-01 ... -2.26656720e-01 9.66932595e-01 1.92880481e-02] ... [ 3.73544425e-01 -1.25882852e+00 2.05029821e+00 ... -4.73066837e-01 -3.01255643e-01 -5.45813024e-01] [-6.59571707e-01 7.93806165e-02 9.69766855e-01 ... -6.53901279e-01 -1.69289565e+00 6.41240105e-02] [ 3.12069748e-02 5.59906125e-01 3.72213542e-01 ... -5.27643383e-01 -9.51929450e-01 -6.69651851e-02]] [[-1.22038496e+00 -1.44486928e+00 7.07331777e-01 ... 3.90295416e-01 -9.08725023e-01 3.92377198e-01] [-2.15951300e+00 1.52410972e+00 3.98203582e-01 ... 1.53335154e+00 7.07756698e-01 -5.26263118e-01] [ 1.43732226e+00 1.46996284e+00 -2.40369201e+00 ... 1.02863467e+00 1.04134750e+00 -1.05498105e-01] ... [ 9.04743254e-01 4.14114058e-01 1.52474916e+00 ... 1.54093206e+00 3.89264286e-01 -4.71493781e-01] [ 1.77880332e-01 -1.07961714e+00 6.19263053e-01 ... 7.10514903e-01 -1.00287902e+00 1.57098091e+00] [ 4.15204197e-01 1.21821928e+00 8.94450665e-01 ... 5.90003073e-01 2.30706048e+00 -8.53646457e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4665.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-4.4943431e-01 -3.3404842e-01 1.0174289e+00 ... -8.8051535e-02 7.4752891e-01 -2.1973878e-02] [-4.6789047e-01 -2.7672726e-01 1.3799115e-01 ... 9.7442947e-02 -4.5594087e-01 -8.3121336e-01] [-2.6542294e-01 3.8476624e-02 -9.4836217e-01 ... -3.9042167e-02 7.7837062e-01 -1.9637834e-01] ... [-1.0763822e+00 -9.9938788e-02 -1.1319994e-01 ... -1.5257032e+00 -1.3470223e+00 5.1120269e-01] [-8.4647733e-01 1.3258569e-01 2.3636742e+00 ... 5.2565223e-01 -1.5342477e-01 1.3781551e+00] [-6.7059898e-01 -6.3592714e-01 -1.2544460e+00 ... 3.7938324e-01 -7.7260643e-01 -6.1451286e-01]] [[-1.2675991e+00 -4.4427287e-02 4.0793994e-01 ... 4.3740040e-01 -9.2189121e-01 -1.4192613e+00] [ 7.5133437e-01 4.3881595e-01 5.6303102e-01 ... -4.5645210e-01 7.5268793e-01 -4.9032721e-01] [ 9.3117610e-02 -7.1275043e-01 -1.9408756e-01 ... 1.2486106e+00 1.6955483e-01 -6.2309855e-01] ... [ 1.8917174e+00 1.2790884e+00 1.2400805e+00 ... -4.4000769e-01 -2.1017337e+00 8.2723761e-01] [ 3.4581885e-01 -1.8980471e+00 -5.4496157e-01 ... 1.6261369e+00 -3.3861652e-01 9.5209189e-02] [-1.5361741e+00 -1.3209562e-01 -1.2306625e-01 ... -4.2050004e-01 -1.3511053e-01 -1.6029137e-01]] [[-1.1841063e+00 6.6111201e-01 -1.8371016e-03 ... 1.6236269e-01 1.4487710e+00 6.6109686e-03] [-7.1403337e-01 -4.7368839e-01 -1.1994549e-01 ... -1.0280502e-01 -1.3859786e+00 9.6185678e-01] [-1.7933860e+00 6.1621928e-01 1.3936217e+00 ... 9.5809263e-01 9.7211432e-01 -6.0226995e-01] ... [ 9.4434386e-01 4.9980262e-01 2.3624139e-01 ... 2.4475136e+00 -6.5900856e-01 8.6281812e-01] [ 1.0531520e+00 -4.9297181e-01 1.7134315e+00 ... -2.3747191e+00 2.5272277e-01 1.7153143e+00] [ 6.9207019e-01 1.0088683e+00 -7.1994150e-01 ... 1.5203126e-01 -2.5467643e-01 5.3307760e-01]] ... [[-3.6345291e-01 1.4728318e+00 -6.3466609e-01 ... -7.9743093e-01 1.8707857e+00 6.1344173e-02] [ 5.7772416e-01 9.9344146e-01 1.7327408e+00 ... -3.3381730e-01 4.9220908e-01 -9.9239838e-01] [ 7.5740144e-02 -5.1235032e-01 1.3701360e-01 ... 1.2098191e-01 -1.7458110e+00 -3.0384856e-01] ... [-1.6647929e+00 1.3777953e+00 -1.9725753e-02 ... -9.3400705e-01 2.5795558e-03 -1.4655572e+00] [ 4.9765494e-01 6.7578703e-01 -1.4437732e+00 ... -1.5803241e+00 5.4701447e-01 9.5414829e-01] [-4.8175254e-01 -6.8249211e-02 -9.0876502e-01 ... -5.0924015e-01 1.1023238e+00 2.6420251e-01]] [[ 8.6474448e-01 5.6718534e-01 1.1595573e+00 ... 7.4611403e-02 -1.9651683e-01 4.5848593e-01] [ 9.0805626e-01 9.0139133e-01 -9.9191439e-01 ... -1.6587965e+00 1.3863360e+00 2.1087747e+00] [-8.7863222e-02 2.6255125e-01 9.1426694e-01 ... -3.7589473e-01 7.6026195e-01 -5.1915824e-01] ... [-3.7525544e-01 -3.8475183e-01 -1.0315607e+00 ... -9.6215379e-01 2.6388626e+00 1.1369526e+00] [ 6.3923717e-01 -9.0339875e-01 -4.6024457e-01 ... 1.6982340e+00 -1.0517132e+00 8.0898452e-01] [ 1.2308656e+00 9.0243074e-04 -2.6943231e-01 ... 1.2466581e-01 1.6557226e+00 4.4176173e-01]] [[ 7.5737429e-01 1.0559950e+00 8.7424177e-01 ... -1.8933775e-01 -1.5068991e-01 6.4661801e-01] [ 1.0638101e+00 -1.5875565e-01 4.4859785e-01 ... 3.9650011e-01 2.1557004e+00 -1.4198087e+00] [ 9.2332894e-01 7.2556877e-01 -8.3073342e-01 ... 3.4312677e-01 3.6202452e-01 9.0866745e-01] ... [ 7.7424282e-01 -3.2231751e-01 5.5471729e-02 ... 2.1845791e+00 4.2052850e-02 -8.9674687e-01] [-1.1032794e+00 -5.2442282e-01 -2.7953380e-01 ... 2.3097796e+00 5.7721531e-01 -6.9186950e-01] [ 2.4569723e-01 2.0993776e+00 -1.2339743e+00 ... -9.3869764e-01 -2.7007893e-01 -8.0058414e-01]]]; ov_res: [[[-4.4943431e-01 -3.3404842e-01 1.0174289e+00 ... -8.8051535e-02 7.4752891e-01 -2.1973878e-02] [-4.6789047e-01 -2.7672726e-01 1.3799115e-01 ... 9.7442947e-02 -4.5594087e-01 -8.3121336e-01] [-2.6542294e-01 3.8476624e-02 -9.4836217e-01 ... -3.9042167e-02 7.7837062e-01 -1.9637834e-01] ... [-1.0763822e+00 -9.9938788e-02 -1.1319994e-01 ... -1.5257032e+00 -1.3470223e+00 5.1120269e-01] [-8.4647733e-01 1.3258569e-01 2.3636742e+00 ... 5.2565223e-01 -1.5342477e-01 1.3781551e+00] [-6.7059898e-01 -6.3592714e-01 -1.2544460e+00 ... 3.7938324e-01 -7.7260643e-01 -6.1451286e-01]] [[-1.2675991e+00 -4.4427287e-02 4.0793994e-01 ... 4.3740040e-01 -9.2189121e-01 -1.4192613e+00] [ 7.5133437e-01 4.3881595e-01 5.6303102e-01 ... -4.5645210e-01 7.5268793e-01 -4.9032721e-01] [ 9.3117610e-02 -7.1275043e-01 -1.9408756e-01 ... 1.2486106e+00 1.6955483e-01 -6.2309855e-01] ... [ 1.8917174e+00 1.2790884e+00 1.2400805e+00 ... -4.4000769e-01 -2.1017337e+00 8.2723761e-01] [ 3.4581885e-01 -1.8980471e+00 -5.4496157e-01 ... 1.6261369e+00 -3.3861652e-01 9.5209189e-02] [-1.5361741e+00 -1.3209562e-01 -1.2306625e-01 ... -4.2050004e-01 -1.3511053e-01 -1.6029137e-01]] [[-1.1841063e+00 6.6111201e-01 -1.8371016e-03 ... 1.6236269e-01 1.4487710e+00 6.6109686e-03] [-7.1403337e-01 -4.7368839e-01 -1.1994549e-01 ... -1.0280502e-01 -1.3859786e+00 9.6185678e-01] [-1.7933860e+00 6.1621928e-01 1.3936217e+00 ... 9.5809263e-01 9.7211432e-01 -6.0226995e-01] ... [ 9.4434386e-01 4.9980262e-01 2.3624139e-01 ... 2.4475136e+00 -6.5900856e-01 8.6281812e-01] [ 1.0531520e+00 -4.9297181e-01 1.7134315e+00 ... -2.3747191e+00 2.5272277e-01 1.7153143e+00] [ 6.9207019e-01 1.0088683e+00 -7.1994150e-01 ... 1.5203126e-01 -2.5467643e-01 5.3307760e-01]] ... [[-3.6345291e-01 1.4728318e+00 -6.3466609e-01 ... -7.9743093e-01 1.8707857e+00 6.1344173e-02] [ 5.7772416e-01 9.9344146e-01 1.7327408e+00 ... -3.3381730e-01 4.9220908e-01 -9.9239838e-01] [ 7.5740144e-02 -5.1235032e-01 1.3701360e-01 ... 1.2098191e-01 -1.7458110e+00 -3.0384856e-01] ... [-1.6647929e+00 1.3777953e+00 -1.9725753e-02 ... -9.3400705e-01 2.5795558e-03 -1.4655572e+00] [ 4.9765494e-01 6.7578703e-01 -1.4437732e+00 ... -1.5803241e+00 5.4701447e-01 9.5414829e-01] [-4.8175254e-01 -6.8249211e-02 -9.0876502e-01 ... -5.0924015e-01 1.1023238e+00 2.6420251e-01]] [[ 8.6474448e-01 5.6718534e-01 1.1595573e+00 ... 7.4611403e-02 -1.9651683e-01 4.5848593e-01] [ 9.0805626e-01 9.0139133e-01 -9.9191439e-01 ... -1.6587965e+00 1.3863360e+00 2.1087747e+00] [-8.7863222e-02 2.6255125e-01 9.1426694e-01 ... -3.7589473e-01 7.6026195e-01 -5.1915824e-01] ... [-3.7525544e-01 -3.8475183e-01 -1.0315607e+00 ... -9.6215379e-01 2.6388626e+00 1.1369526e+00] [ 6.3923717e-01 -9.0339875e-01 -4.6024457e-01 ... 1.6982340e+00 -1.0517132e+00 8.0898452e-01] [ 1.2308656e+00 9.0243074e-04 -2.6943231e-01 ... 1.2466581e-01 1.6557226e+00 4.4176173e-01]] [[ 7.5737429e-01 1.0559950e+00 8.7424177e-01 ... -1.8933775e-01 -1.5068991e-01 6.4661801e-01] [ 1.0638101e+00 -1.5875565e-01 4.4859785e-01 ... 3.9650011e-01 2.1557004e+00 -1.4198087e+00] [ 9.2332894e-01 7.2556877e-01 -8.3073342e-01 ... 3.4312677e-01 3.6202452e-01 9.0866745e-01] ... [ 7.7424282e-01 -3.2231751e-01 5.5471729e-02 ... 2.1845791e+00 4.2052850e-02 -8.9674687e-01] [-1.1032794e+00 -5.2442282e-01 -2.7953380e-01 ... 2.3097796e+00 5.7721531e-01 -6.9186950e-01] [ 2.4569723e-01 2.0993776e+00 -1.2339743e+00 ... -9.3869764e-01 -2.7007893e-01 -8.0058414e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4667.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 9.94814992e-01 -1.14246809e+00 7.10355222e-01 ... -3.86629999e-01 1.06339121e+00 4.15224344e-01] [ 8.34949195e-01 -2.10575032e+00 2.66550183e-01 ... 8.08174968e-01 -1.28137255e+00 3.78413826e-01] [ 2.26858571e-01 -3.13352227e+00 -6.02185011e-01 ... -1.52087021e+00 -5.02816856e-01 -4.21376646e-01] ... [ 3.02846283e-01 5.09573281e-01 -3.62489998e-01 ... 1.39793801e+00 1.02268982e+00 -1.92361438e+00] [ 7.35148966e-01 -6.35726750e-02 -7.10820019e-01 ... 5.55036724e-01 -1.08967268e+00 -6.97567523e-01] [-5.01170009e-02 6.93494380e-01 -1.59424260e-01 ... 8.52253139e-01 -2.12476921e+00 4.21739161e-01]] [[-3.34836036e-01 1.30220854e+00 9.80370998e-01 ... 1.59836662e+00 -5.85067630e-01 -1.22365141e-02] [ 1.12123811e+00 -1.12952268e+00 2.35120043e-01 ... 1.08677804e+00 -5.88397682e-01 -6.44865036e-01] [ 4.34025019e-01 -1.11280870e+00 1.04504144e+00 ... 6.54598415e-01 1.13130486e+00 1.39063835e-01] ... [ 1.14871478e+00 -1.60900319e+00 -6.90688848e-01 ... -5.90913415e-01 2.22329021e+00 -3.44752252e-01] [-2.98597598e+00 -1.56441188e+00 -1.61816072e+00 ... -2.63478309e-01 -5.78832746e-01 -3.97975117e-01] [ 1.09977506e-01 4.87728089e-01 -4.52416569e-01 ... 9.97189939e-01 6.11737490e-01 1.31356072e+00]] [[-1.14621913e+00 7.84251094e-01 -1.24707365e+00 ... 1.20777977e+00 6.11143745e-02 1.21377015e+00] [ 2.63844043e-01 1.28635657e+00 -1.23518038e+00 ... -9.37261999e-01 -3.06079745e-01 -6.12074673e-01] [-1.34773836e-01 1.09486747e+00 -5.48146725e-01 ... -6.16825402e-01 4.94047642e-01 -3.55658323e-01] ... [ 1.01122880e+00 -8.15914810e-01 6.06201410e-01 ... 6.37068331e-01 2.11750722e+00 9.02877569e-01] [-2.17587993e-01 1.65238547e+00 -1.38848436e+00 ... 8.60628113e-02 -1.04316735e+00 -1.65002084e+00] [ 1.15705466e+00 -5.62644638e-02 -1.87734008e+00 ... 3.91594060e-02 -1.08813465e+00 1.73373842e+00]] ... [[ 6.48261666e-01 -8.56316984e-01 -1.66520500e+00 ... 1.48940340e-01 1.07057333e+00 -5.63979447e-01] [-8.02453637e-01 -3.56295913e-01 -8.32640350e-01 ... -1.06456876e+00 1.03844941e+00 9.37773645e-01] [ 7.48657465e-01 1.35866022e+00 -4.27982628e-01 ... 5.90593398e-01 2.15084240e-01 -2.95073795e+00] ... [-2.66606122e-01 2.94937164e-01 -4.86189276e-01 ... -2.62415022e-01 -2.58798420e-01 -9.94967341e-01] [ 1.75368398e-01 -1.09802842e+00 1.74603999e-01 ... -1.15944088e+00 -2.13866377e+00 6.47783041e-01] [ 1.29774773e+00 -1.29882002e+00 7.33964682e-01 ... -1.16693832e-01 1.28048027e+00 -4.31953132e-01]] [[ 1.42200637e+00 9.39381778e-01 -1.10917449e+00 ... -1.74332929e+00 -1.23313701e+00 1.12153065e+00] [ 1.62853813e+00 -2.76626199e-01 2.12041646e-01 ... -7.16405585e-02 -4.42130595e-01 -1.46703100e+00] [-8.53099644e-01 -3.28943767e-02 -4.65648502e-01 ... 2.93975258e+00 2.62100428e-01 -1.14825714e+00] ... [-3.57246011e-01 -1.84446204e+00 -6.71019033e-02 ... -4.25678521e-01 4.66154486e-01 1.62464172e-01] [ 5.04158914e-01 -1.84049869e+00 -1.50807095e+00 ... -1.69591331e+00 -1.20345974e+00 5.38329720e-01] [-6.74889445e-01 -1.31547594e+00 -1.32177496e+00 ... 1.15193307e+00 1.20245598e-01 1.68573475e+00]] [[ 3.27235878e-01 1.38150752e-01 -1.31379282e+00 ... -2.73118198e-01 6.68686628e-01 -5.00638008e-01] [-1.46154022e+00 -1.79257125e-01 1.86721742e-01 ... -1.20154011e+00 6.47939622e-01 1.76479924e+00] [ 2.81208605e-01 7.05337077e-02 -3.44988495e-01 ... 2.14157566e-01 -8.26589540e-02 1.90531582e-01] ... [ 1.40102649e+00 3.09030741e-01 1.52643010e-01 ... -1.81043971e+00 1.38247240e+00 1.20069897e+00] [-7.26777911e-01 9.13052380e-01 -3.06477421e-03 ... 1.91788599e-01 3.70271474e-01 2.74002880e-01] [ 8.99954855e-01 1.26336619e-01 3.08274865e-01 ... -2.95714438e-01 1.54027212e+00 6.33795440e-01]]]; ov_res: [[[ 9.94814992e-01 -1.14246809e+00 7.10355222e-01 ... -3.86629999e-01 1.06339121e+00 4.15224344e-01] [ 8.34949195e-01 -2.10575032e+00 2.66550183e-01 ... 8.08174968e-01 -1.28137255e+00 3.78413826e-01] [ 2.26858571e-01 -3.13352227e+00 -6.02185011e-01 ... -1.52087021e+00 -5.02816856e-01 -4.21376646e-01] ... [ 3.02846283e-01 5.09573281e-01 -3.62489998e-01 ... 1.39793801e+00 1.02268982e+00 -1.92361438e+00] [ 7.35148966e-01 -6.35726750e-02 -7.10820019e-01 ... 5.55036724e-01 -1.08967268e+00 -6.97567523e-01] [-5.01170009e-02 6.93494380e-01 -1.59424260e-01 ... 8.52253139e-01 -2.12476921e+00 4.21739161e-01]] [[-3.34836036e-01 1.30220854e+00 9.80370998e-01 ... 1.59836662e+00 -5.85067630e-01 -1.22365141e-02] [ 1.12123811e+00 -1.12952268e+00 2.35120043e-01 ... 1.08677804e+00 -5.88397682e-01 -6.44865036e-01] [ 4.34025019e-01 -1.11280870e+00 1.04504144e+00 ... 6.54598415e-01 1.13130486e+00 1.39063835e-01] ... [ 1.14871478e+00 -1.60900319e+00 -6.90688848e-01 ... -5.90913415e-01 2.22329021e+00 -3.44752252e-01] [-2.98597598e+00 -1.56441188e+00 -1.61816072e+00 ... -2.63478309e-01 -5.78832746e-01 -3.97975117e-01] [ 1.09977506e-01 4.87728089e-01 -4.52416569e-01 ... 9.97189939e-01 6.11737490e-01 1.31356072e+00]] [[-1.14621913e+00 7.84251094e-01 -1.24707365e+00 ... 1.20777977e+00 6.11143745e-02 1.21377015e+00] [ 2.63844043e-01 1.28635657e+00 -1.23518038e+00 ... -9.37261999e-01 -3.06079745e-01 -6.12074673e-01] [-1.34773836e-01 1.09486747e+00 -5.48146725e-01 ... -6.16825402e-01 4.94047642e-01 -3.55658323e-01] ... [ 1.01122880e+00 -8.15914810e-01 6.06201410e-01 ... 6.37068331e-01 2.11750722e+00 9.02877569e-01] [-2.17587993e-01 1.65238547e+00 -1.38848436e+00 ... 8.60628113e-02 -1.04316735e+00 -1.65002084e+00] [ 1.15705466e+00 -5.62644638e-02 -1.87734008e+00 ... 3.91594060e-02 -1.08813465e+00 1.73373842e+00]] ... [[ 6.48261666e-01 -8.56316984e-01 -1.66520500e+00 ... 1.48940340e-01 1.07057333e+00 -5.63979447e-01] [-8.02453637e-01 -3.56295913e-01 -8.32640350e-01 ... -1.06456876e+00 1.03844941e+00 9.37773645e-01] [ 7.48657465e-01 1.35866022e+00 -4.27982628e-01 ... 5.90593398e-01 2.15084240e-01 -2.95073795e+00] ... [-2.66606122e-01 2.94937164e-01 -4.86189276e-01 ... -2.62415022e-01 -2.58798420e-01 -9.94967341e-01] [ 1.75368398e-01 -1.09802842e+00 1.74603999e-01 ... -1.15944088e+00 -2.13866377e+00 6.47783041e-01] [ 1.29774773e+00 -1.29882002e+00 7.33964682e-01 ... -1.16693832e-01 1.28048027e+00 -4.31953132e-01]] [[ 1.42200637e+00 9.39381778e-01 -1.10917449e+00 ... -1.74332929e+00 -1.23313701e+00 1.12153065e+00] [ 1.62853813e+00 -2.76626199e-01 2.12041646e-01 ... -7.16405585e-02 -4.42130595e-01 -1.46703100e+00] [-8.53099644e-01 -3.28943767e-02 -4.65648502e-01 ... 2.93975258e+00 2.62100428e-01 -1.14825714e+00] ... [-3.57246011e-01 -1.84446204e+00 -6.71019033e-02 ... -4.25678521e-01 4.66154486e-01 1.62464172e-01] [ 5.04158914e-01 -1.84049869e+00 -1.50807095e+00 ... -1.69591331e+00 -1.20345974e+00 5.38329720e-01] [-6.74889445e-01 -1.31547594e+00 -1.32177496e+00 ... 1.15193307e+00 1.20245598e-01 1.68573475e+00]] [[ 3.27235878e-01 1.38150752e-01 -1.31379282e+00 ... -2.73118198e-01 6.68686628e-01 -5.00638008e-01] [-1.46154022e+00 -1.79257125e-01 1.86721742e-01 ... -1.20154011e+00 6.47939622e-01 1.76479924e+00] [ 2.81208605e-01 7.05337077e-02 -3.44988495e-01 ... 2.14157566e-01 -8.26589540e-02 1.90531582e-01] ... [ 1.40102649e+00 3.09030741e-01 1.52643010e-01 ... -1.81043971e+00 1.38247240e+00 1.20069897e+00] [-7.26777911e-01 9.13052380e-01 -3.06477421e-03 ... 1.91788599e-01 3.70271474e-01 2.74002880e-01] [ 8.99954855e-01 1.26336619e-01 3.08274865e-01 ... -2.95714438e-01 1.54027212e+00 6.33795440e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:0 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4669.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.13128313 -1.1427253 -1.3012656 ... -1.0262103 -0.85323524 -0.5864163 ] [-2.0726252 -1.1361545 -1.0416048 ... 1.4785591 -0.51647866 -0.1042372 ] [ 0.23779789 -0.69939554 -0.37066463 ... 0.47289026 0.02994181 -0.3895551 ]] [[ 0.656946 0.60707796 -0.7446868 ... 1.0323821 -0.8452109 -0.85119617] [ 0.11997432 -0.26362014 0.08251822 ... 1.6845746 1.0286994 -1.7417253 ] [-1.6426115 -0.54023254 0.08578567 ... 2.3842282 1.4620914 0.19218475]] [[ 1.2776463 0.9503079 -1.1690912 ... -0.29841188 -2.241213 -1.5060698 ] [-0.14757867 -0.8380483 -0.5307341 ... 0.4810455 1.2383932 -1.3253119 ] [ 0.59024644 -0.6406991 -1.5309726 ... -1.3777035 1.112171 -0.11731469]] ... [[-2.1324317 -0.43223935 0.56216645 ... 0.09810153 0.05103404 2.1522295 ] [ 1.4498924 -0.04905874 0.658686 ... 0.7725712 -1.6026566 0.03450949] [-1.1224283 0.85385746 0.44493556 ... -0.556666 -0.46238375 1.1306045 ]] [[ 0.8767944 0.33873996 1.4170574 ... 0.12934902 0.4654998 -1.651587 ] [ 0.5705911 -0.59519035 1.8647544 ... -1.3166859 1.259184 0.1659386 ] [ 0.39443266 0.09798001 -0.53923225 ... 0.14072287 0.6898719 -1.8760856 ]] [[-1.0389216 1.2010158 0.47855592 ... -0.57521874 -0.1713138 -0.31243432] [ 0.24145289 -1.3448772 0.686309 ... -0.51282233 0.3283814 -0.36423287] [-1.0064018 0.18765827 -0.20337111 ... -1.3873345 -0.7534631 0.76259345]]]; ov_res: [[[ 0.13128313 -1.1427253 -1.3012656 ... -1.0262103 -0.85323524 -0.5864163 ] [-2.0726252 -1.1361545 -1.0416048 ... 1.4785591 -0.51647866 -0.1042372 ] [ 0.23779789 -0.69939554 -0.37066463 ... 0.47289026 0.02994181 -0.3895551 ]] [[ 0.656946 0.60707796 -0.7446868 ... 1.0323821 -0.8452109 -0.85119617] [ 0.11997432 -0.26362014 0.08251822 ... 1.6845746 1.0286994 -1.7417253 ] [-1.6426115 -0.54023254 0.08578567 ... 2.3842282 1.4620914 0.19218475]] [[ 1.2776463 0.9503079 -1.1690912 ... -0.29841188 -2.241213 -1.5060698 ] [-0.14757867 -0.8380483 -0.5307341 ... 0.4810455 1.2383932 -1.3253119 ] [ 0.59024644 -0.6406991 -1.5309726 ... -1.3777035 1.112171 -0.11731469]] ... [[-2.1324317 -0.43223935 0.56216645 ... 0.09810153 0.05103404 2.1522295 ] [ 1.4498924 -0.04905874 0.658686 ... 0.7725712 -1.6026566 0.03450949] [-1.1224283 0.85385746 0.44493556 ... -0.556666 -0.46238375 1.1306045 ]] [[ 0.8767944 0.33873996 1.4170574 ... 0.12934902 0.4654998 -1.651587 ] [ 0.5705911 -0.59519035 1.8647544 ... -1.3166859 1.259184 0.1659386 ] [ 0.39443266 0.09798001 -0.53923225 ... 0.14072287 0.6898719 -1.8760856 ]] [[-1.0389216 1.2010158 0.47855592 ... -0.57521874 -0.1713138 -0.31243432] [ 0.24145289 -1.3448772 0.686309 ... -0.51282233 0.3283814 -0.36423287] [-1.0064018 0.18765827 -0.20337111 ... -1.3873345 -0.7534631 0.76259345]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4671.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4673.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4675.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4677.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4679.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4681.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4683.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.5265188 -2.0146446 0. ] [ 0. 0. 0. ... -1.4075298 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.23227262 -0.9386971 0. ] [ 0. 0. 0. ... 0.15085576 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.51054054 0.5760572 0. ] [ 0. 0. 0. ... 0.6536687 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4758235 0.03410957 0. ] [ 0. 0. 0. ... -1.0113184 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9198237 -0.60397184 0. ] [ 0. 0. 0. ... -0.32585633 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8038552 -0.23917755 0. ] [ 0. 0. 0. ... 3.2106872 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.5265188 -2.0146446 0. ] [ 0. 0. 0. ... -1.4075298 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.23227262 -0.9386971 0. ] [ 0. 0. 0. ... 0.15085576 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.51054054 0.5760572 0. ] [ 0. 0. 0. ... 0.6536687 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4758235 0.03410957 0. ] [ 0. 0. 0. ... -1.0113184 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9198237 -0.60397184 0. ] [ 0. 0. 0. ... -0.32585633 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8038552 -0.23917755 0. ] [ 0. 0. 0. ... 3.2106872 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4685.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.34890577 1.7805778 -0.8018552 ] [ 0. 0. 0. ... 0.68236387 -2.904782 0. ] [ 0. 0. 0. ... -1.4142418 0.6758706 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.8743999 1.7485626 -0.0271602 ] [ 0. 0. 0. ... 0.18827431 -1.3011271 0. ] [ 0. 0. 0. ... -0.38269895 -1.1282583 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2371917 -0.45844844 1.3640758 ] [ 0. 0. 0. ... 0.62451315 0.49203712 0. ] [ 0. 0. 0. ... 1.5522141 0.6726484 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.5676501 -2.0541656 -0.06577651] [ 0. 0. 0. ... -0.63348305 1.4305001 0. ] [ 0. 0. 0. ... -0.10215294 -0.64180434 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.585193 0.0981908 1.9275863 ] [ 0. 0. 0. ... 0.6272324 -0.59796387 0. ] [ 0. 0. 0. ... -1.2258224 -0.9997785 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19669066 1.1773394 -1.4829727 ] [ 0. 0. 0. ... 0.14550143 -0.19189127 0. ] [ 0. 0. 0. ... 0.98200583 -1.1754847 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.34890577 1.7805778 -0.8018552 ] [ 0. 0. 0. ... 0.68236387 -2.904782 0. ] [ 0. 0. 0. ... -1.4142418 0.6758706 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.8743999 1.7485626 -0.0271602 ] [ 0. 0. 0. ... 0.18827431 -1.3011271 0. ] [ 0. 0. 0. ... -0.38269895 -1.1282583 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2371917 -0.45844844 1.3640758 ] [ 0. 0. 0. ... 0.62451315 0.49203712 0. ] [ 0. 0. 0. ... 1.5522141 0.6726484 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.5676501 -2.0541656 -0.06577651] [ 0. 0. 0. ... -0.63348305 1.4305001 0. ] [ 0. 0. 0. ... -0.10215294 -0.64180434 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.585193 0.0981908 1.9275863 ] [ 0. 0. 0. ... 0.6272324 -0.59796387 0. ] [ 0. 0. 0. ... -1.2258224 -0.9997785 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19669066 1.1773394 -1.4829727 ] [ 0. 0. 0. ... 0.14550143 -0.19189127 0. ] [ 0. 0. 0. ... 0.98200583 -1.1754847 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4687.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4689.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:3 - kernel_size:[2, 3] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4691.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4693.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.0729722 1.9923301 0. ] [ 0. 0. 0. ... 1.9923301 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.3521785 ... 0. 0. 0. ] [ 0. -0.3521785 -0.9192749 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3392104 -0.4673106 0. ] [ 0. 0. 0. ... -0.4673106 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.1355169 ... 0. 0. 0. ] [ 0. 0.1355169 -0.4453195 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1577673 1.4285209 0. ] [ 0. 0. 0. ... 1.4285209 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.68499166 ... 0. 0. 0. ] [ 0. -0.68499166 1.0250497 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.42742398 0.5128598 0. ] [ 0. 0. 0. ... 0.5128598 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.2790207 ... 0. 0. 0. ] [ 0. 1.2790207 -0.9434131 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5108318 0.65871775 0. ] [ 0. 0. 0. ... 0.65871775 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.47704917 ... 0. 0. 0. ] [ 0. -0.47704917 1.70702 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.760938 0.56919545 0. ] [ 0. 0. 0. ... 0.56919545 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.8693611 ... 0. 0. 0. ] [ 0. -0.8693611 0.40743834 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.0729722 1.9923301 0. ] [ 0. 0. 0. ... 1.9923301 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.3521785 ... 0. 0. 0. ] [ 0. -0.3521785 -0.9192749 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3392104 -0.4673106 0. ] [ 0. 0. 0. ... -0.4673106 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.1355169 ... 0. 0. 0. ] [ 0. 0.1355169 -0.4453195 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1577673 1.4285209 0. ] [ 0. 0. 0. ... 1.4285209 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.68499166 ... 0. 0. 0. ] [ 0. -0.68499166 1.0250497 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.42742398 0.5128598 0. ] [ 0. 0. 0. ... 0.5128598 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.2790207 ... 0. 0. 0. ] [ 0. 1.2790207 -0.9434131 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5108318 0.65871775 0. ] [ 0. 0. 0. ... 0.65871775 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.47704917 ... 0. 0. 0. ] [ 0. -0.47704917 1.70702 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.760938 0.56919545 0. ] [ 0. 0. 0. ... 0.56919545 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.8693611 ... 0. 0. 0. ] [ 0. -0.8693611 0.40743834 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4695.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.49734384 1.7415187 1.2633575 ] [ 0. 0. 0. ... 1.7415187 1.2633575 0. ] [ 0. 0. 0. ... 1.2633575 0. 0. ] ... [ 0. 0. -0.56990373 ... 0. 0. 0. ] [ 0. -0.56990373 0.08047757 ... 0. 0. 0. ] [-0.56990373 0.08047757 -0.28538573 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19508423 0.7064715 -0.8001338 ] [ 0. 0. 0. ... 0.7064715 -0.8001338 0. ] [ 0. 0. 0. ... -0.8001338 0. 0. ] ... [ 0. 0. 0.42729887 ... 0. 0. 0. ] [ 0. 0.42729887 0.9917736 ... 0. 0. 0. ] [ 0.42729887 0.9917736 0.6650067 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.2951138 -0.13068245 0.3192517 ] [ 0. 0. 0. ... -0.13068245 0.3192517 0. ] [ 0. 0. 0. ... 0.3192517 0. 0. ] ... [ 0. 0. -1.1017286 ... 0. 0. 0. ] [ 0. -1.1017286 0.32385403 ... 0. 0. 0. ] [-1.1017286 0.32385403 -0.7318957 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.76185745 0.36282167 0.16313992] [ 0. 0. 0. ... 0.36282167 0.16313992 0. ] [ 0. 0. 0. ... 0.16313992 0. 0. ] ... [ 0. 0. -0.5494808 ... 0. 0. 0. ] [ 0. -0.5494808 0.7790176 ... 0. 0. 0. ] [-0.5494808 0.7790176 2.0355613 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2583266 -1.5709788 0.43856624] [ 0. 0. 0. ... -1.5709788 0.43856624 0. ] [ 0. 0. 0. ... 0.43856624 0. 0. ] ... [ 0. 0. -0.3865033 ... 0. 0. 0. ] [ 0. -0.3865033 0.6097033 ... 0. 0. 0. ] [-0.3865033 0.6097033 1.9472796 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.12202374 1.0223784 -1.4440356 ] [ 0. 0. 0. ... 1.0223784 -1.4440356 0. ] [ 0. 0. 0. ... -1.4440356 0. 0. ] ... [ 0. 0. 1.3260336 ... 0. 0. 0. ] [ 0. 1.3260336 0.14208345 ... 0. 0. 0. ] [ 1.3260336 0.14208345 -0.10212737 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.49734384 1.7415187 1.2633575 ] [ 0. 0. 0. ... 1.7415187 1.2633575 0. ] [ 0. 0. 0. ... 1.2633575 0. 0. ] ... [ 0. 0. -0.56990373 ... 0. 0. 0. ] [ 0. -0.56990373 0.08047757 ... 0. 0. 0. ] [-0.56990373 0.08047757 -0.28538573 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19508423 0.7064715 -0.8001338 ] [ 0. 0. 0. ... 0.7064715 -0.8001338 0. ] [ 0. 0. 0. ... -0.8001338 0. 0. ] ... [ 0. 0. 0.42729887 ... 0. 0. 0. ] [ 0. 0.42729887 0.9917736 ... 0. 0. 0. ] [ 0.42729887 0.9917736 0.6650067 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.2951138 -0.13068245 0.3192517 ] [ 0. 0. 0. ... -0.13068245 0.3192517 0. ] [ 0. 0. 0. ... 0.3192517 0. 0. ] ... [ 0. 0. -1.1017286 ... 0. 0. 0. ] [ 0. -1.1017286 0.32385403 ... 0. 0. 0. ] [-1.1017286 0.32385403 -0.7318957 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.76185745 0.36282167 0.16313992] [ 0. 0. 0. ... 0.36282167 0.16313992 0. ] [ 0. 0. 0. ... 0.16313992 0. 0. ] ... [ 0. 0. -0.5494808 ... 0. 0. 0. ] [ 0. -0.5494808 0.7790176 ... 0. 0. 0. ] [-0.5494808 0.7790176 2.0355613 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2583266 -1.5709788 0.43856624] [ 0. 0. 0. ... -1.5709788 0.43856624 0. ] [ 0. 0. 0. ... 0.43856624 0. 0. ] ... [ 0. 0. -0.3865033 ... 0. 0. 0. ] [ 0. -0.3865033 0.6097033 ... 0. 0. 0. ] [-0.3865033 0.6097033 1.9472796 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.12202374 1.0223784 -1.4440356 ] [ 0. 0. 0. ... 1.0223784 -1.4440356 0. ] [ 0. 0. 0. ... -1.4440356 0. 0. ] ... [ 0. 0. 1.3260336 ... 0. 0. 0. ] [ 0. 1.3260336 0.14208345 ... 0. 0. 0. ] [ 1.3260336 0.14208345 -0.10212737 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4697.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:3 - kernel_size:[1, 1] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4699.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4701.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4703.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4705.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4707.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:5 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4709.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4711.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.1226971 0.649379 -0.22476384] [ 0. 0. 0. ... -0.17073873 0.34949794 -0.1673986 ] [ 0. 0. 0. ... 0.00843014 0.61917055 1.8118553 ] ... [ 0. 0.6886552 -1.553235 ... 0. 0. 0. ] [-0.11384159 -1.5122254 0.13958016 ... 0. 0. 0. ] [-1.7229817 0.01832735 1.1263512 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3114425 0.3759896 -0.2653003 ] [ 0. 0. 0. ... 0.21286315 -0.04017434 -0.9342115 ] [ 0. 0. 0. ... -1.5764958 0.49378252 -0.975017 ] ... [ 0. 2.4292464 0.11935852 ... 0. 0. 0. ] [ 1.3705462 -0.5010639 0.95652837 ... 0. 0. 0. ] [-1.3434749 0.09338787 -0.15809429 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9125042 1.5598488 -0.3354238 ] [ 0. 0. 0. ... 0.57817173 -0.60029334 0.38120598] [ 0. 0. 0. ... -1.0719651 0.13605142 -1.3134302 ] ... [ 0. -0.00495598 -1.1586276 ... 0. 0. 0. ] [-1.3594457 -1.0354307 0.37681663 ... 0. 0. 0. ] [ 0.03861941 -0.7446913 -1.5635339 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8982717 0.14900142 -0.91822535] [ 0. 0. 0. ... -0.04609848 -0.35849124 0.33564547] [ 0. 0. 0. ... 1.0147682 1.1519097 -0.4109567 ] ... [ 0. 0.63283044 -0.20229568 ... 0. 0. 0. ] [ 1.5635637 -1.336333 0.9136355 ... 0. 0. 0. ] [ 1.1270773 -0.1625942 1.725306 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2882746 -1.4974033 0.94635254] [ 0. 0. 0. ... 0.19838002 -0.37550098 0.11131235] [ 0. 0. 0. ... 0.9331011 -1.3273616 -1.498299 ] ... [ 0. 0.67544115 0.77756685 ... 0. 0. 0. ] [-0.6796198 -0.84352744 -0.09241826 ... 0. 0. 0. ] [-0.48082662 -0.689021 -0.77418125 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1599673 -0.692649 -0.25971106] [ 0. 0. 0. ... 1.211969 -0.3271419 0.71333176] [ 0. 0. 0. ... -2.3297126 0.46340367 0.03180861] ... [ 0. -0.2194655 -0.18670605 ... 0. 0. 0. ] [-1.614877 0.8267732 0.3828866 ... 0. 0. 0. ] [-3.3962011 -0.3413377 -0.2798619 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.1226971 0.649379 -0.22476384] [ 0. 0. 0. ... -0.17073873 0.34949794 -0.1673986 ] [ 0. 0. 0. ... 0.00843014 0.61917055 1.8118553 ] ... [ 0. 0.6886552 -1.553235 ... 0. 0. 0. ] [-0.11384159 -1.5122254 0.13958016 ... 0. 0. 0. ] [-1.7229817 0.01832735 1.1263512 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3114425 0.3759896 -0.2653003 ] [ 0. 0. 0. ... 0.21286315 -0.04017434 -0.9342115 ] [ 0. 0. 0. ... -1.5764958 0.49378252 -0.975017 ] ... [ 0. 2.4292464 0.11935852 ... 0. 0. 0. ] [ 1.3705462 -0.5010639 0.95652837 ... 0. 0. 0. ] [-1.3434749 0.09338787 -0.15809429 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9125042 1.5598488 -0.3354238 ] [ 0. 0. 0. ... 0.57817173 -0.60029334 0.38120598] [ 0. 0. 0. ... -1.0719651 0.13605142 -1.3134302 ] ... [ 0. -0.00495598 -1.1586276 ... 0. 0. 0. ] [-1.3594457 -1.0354307 0.37681663 ... 0. 0. 0. ] [ 0.03861941 -0.7446913 -1.5635339 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8982717 0.14900142 -0.91822535] [ 0. 0. 0. ... -0.04609848 -0.35849124 0.33564547] [ 0. 0. 0. ... 1.0147682 1.1519097 -0.4109567 ] ... [ 0. 0.63283044 -0.20229568 ... 0. 0. 0. ] [ 1.5635637 -1.336333 0.9136355 ... 0. 0. 0. ] [ 1.1270773 -0.1625942 1.725306 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2882746 -1.4974033 0.94635254] [ 0. 0. 0. ... 0.19838002 -0.37550098 0.11131235] [ 0. 0. 0. ... 0.9331011 -1.3273616 -1.498299 ] ... [ 0. 0.67544115 0.77756685 ... 0. 0. 0. ] [-0.6796198 -0.84352744 -0.09241826 ... 0. 0. 0. ] [-0.48082662 -0.689021 -0.77418125 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1599673 -0.692649 -0.25971106] [ 0. 0. 0. ... 1.211969 -0.3271419 0.71333176] [ 0. 0. 0. ... -2.3297126 0.46340367 0.03180861] ... [ 0. -0.2194655 -0.18670605 ... 0. 0. 0. ] [-1.614877 0.8267732 0.3828866 ... 0. 0. 0. ] [-3.3962011 -0.3413377 -0.2798619 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4713.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.6909299 -0.07589033 0.31071097] [ 0. 0. 0. ... -0.37956432 2.0874999 0. ] [ 0. -0.3027885 -0.6585485 ... -0.4237926 -1.8524405 -0.09299542] ... [ 0.30627453 -1.2983115 0.59587437 ... -0.5846 -0.82135427 0. ] [ 0. 0.4777867 0.00576121 ... 0.57184494 -0.2961749 0.02864464] [-0.70797473 -0.5284892 0.0659708 ... -1.4779664 1.2316912 0. ]] [[ 0. 0. 0. ... -0.9876433 1.558236 -0.8722787 ] [ 0. 0. 0. ... -0.2185412 1.6550909 0. ] [ 0. 0.9290572 -0.3507685 ... -1.7795905 0.63484114 0.29329258] ... [ 1.4088929 -0.54890794 -0.62143373 ... 0.5484719 1.2315692 0. ] [ 0. -0.44850814 0.01041769 ... 0.15906222 0.07207663 -0.98074996] [ 0.82485926 1.2474102 0.36151195 ... 0.12809873 -1.0123575 0. ]] [[ 0. 0. 0. ... -0.2602722 0.05017627 -0.11080393] [ 0. 0. 0. ... -2.6700954 -1.489132 0. ] [ 0. 0.21267217 0.19798839 ... 0.5344014 -0.3292877 0.3165949 ] ... [-0.66013974 -0.23576482 1.901554 ... 0.09005111 -1.3423588 0. ] [ 0. -0.31506586 -0.87276846 ... -0.42367157 -0.47428378 0.04202919] [ 0.47734877 2.2481441 1.7727664 ... -0.28931963 -0.32361317 0. ]] ... [[ 0. 0. 0. ... -0.34962398 1.7008466 -0.8083331 ] [ 0. 0. 0. ... -0.1578221 1.0599179 0. ] [ 0. -0.09260327 0.49685836 ... -0.86295176 0.26010272 -0.12514786] ... [ 0.38669303 0.85821915 -0.42738032 ... -0.27193046 -0.82913727 0. ] [ 0. -0.10732472 -0.64577466 ... -0.5434288 -1.0515814 -0.16711539] [ 0.45841455 0.8083373 0.4307446 ... -0.6053052 0.2673772 0. ]] [[ 0. 0. 0. ... -0.04133281 1.2114291 -0.67053246] [ 0. 0. 0. ... 2.2185748 1.0474312 0. ] [ 0. -0.9543483 -0.13150446 ... -0.9966497 0.8105538 0.11985672] ... [-0.55704397 -0.7939988 1.068883 ... 1.5842183 -0.17540868 0. ] [ 0. -0.40814725 0.78808993 ... 0.8126173 0.28212464 0.5276521 ] [ 0.7871101 1.3760844 1.3540038 ... 0.49163386 -0.17825694 0. ]] [[ 0. 0. 0. ... 0.3668396 0.62871134 0.40410587] [ 0. 0. 0. ... 0.8680099 0.02478612 0. ] [ 0. -0.95607823 -2.603029 ... -2.1580043 0.25490215 -0.00552215] ... [-0.32600275 0.28390473 1.3810658 ... -0.2674244 1.3762391 0. ] [ 0. -0.96900576 0.33647108 ... -1.8900533 1.1834152 0.1717693 ] [-1.1122946 0.5979715 -0.6115073 ... 0.31517977 -2.528026 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.6909299 -0.07589033 0.31071097] [ 0. 0. 0. ... -0.37956432 2.0874999 0. ] [ 0. -0.3027885 -0.6585485 ... -0.4237926 -1.8524405 -0.09299542] ... [ 0.30627453 -1.2983115 0.59587437 ... -0.5846 -0.82135427 0. ] [ 0. 0.4777867 0.00576121 ... 0.57184494 -0.2961749 0.02864464] [-0.70797473 -0.5284892 0.0659708 ... -1.4779664 1.2316912 0. ]] [[ 0. 0. 0. ... -0.9876433 1.558236 -0.8722787 ] [ 0. 0. 0. ... -0.2185412 1.6550909 0. ] [ 0. 0.9290572 -0.3507685 ... -1.7795905 0.63484114 0.29329258] ... [ 1.4088929 -0.54890794 -0.62143373 ... 0.5484719 1.2315692 0. ] [ 0. -0.44850814 0.01041769 ... 0.15906222 0.07207663 -0.98074996] [ 0.82485926 1.2474102 0.36151195 ... 0.12809873 -1.0123575 0. ]] [[ 0. 0. 0. ... -0.2602722 0.05017627 -0.11080393] [ 0. 0. 0. ... -2.6700954 -1.489132 0. ] [ 0. 0.21267217 0.19798839 ... 0.5344014 -0.3292877 0.3165949 ] ... [-0.66013974 -0.23576482 1.901554 ... 0.09005111 -1.3423588 0. ] [ 0. -0.31506586 -0.87276846 ... -0.42367157 -0.47428378 0.04202919] [ 0.47734877 2.2481441 1.7727664 ... -0.28931963 -0.32361317 0. ]] ... [[ 0. 0. 0. ... -0.34962398 1.7008466 -0.8083331 ] [ 0. 0. 0. ... -0.1578221 1.0599179 0. ] [ 0. -0.09260327 0.49685836 ... -0.86295176 0.26010272 -0.12514786] ... [ 0.38669303 0.85821915 -0.42738032 ... -0.27193046 -0.82913727 0. ] [ 0. -0.10732472 -0.64577466 ... -0.5434288 -1.0515814 -0.16711539] [ 0.45841455 0.8083373 0.4307446 ... -0.6053052 0.2673772 0. ]] [[ 0. 0. 0. ... -0.04133281 1.2114291 -0.67053246] [ 0. 0. 0. ... 2.2185748 1.0474312 0. ] [ 0. -0.9543483 -0.13150446 ... -0.9966497 0.8105538 0.11985672] ... [-0.55704397 -0.7939988 1.068883 ... 1.5842183 -0.17540868 0. ] [ 0. -0.40814725 0.78808993 ... 0.8126173 0.28212464 0.5276521 ] [ 0.7871101 1.3760844 1.3540038 ... 0.49163386 -0.17825694 0. ]] [[ 0. 0. 0. ... 0.3668396 0.62871134 0.40410587] [ 0. 0. 0. ... 0.8680099 0.02478612 0. ] [ 0. -0.95607823 -2.603029 ... -2.1580043 0.25490215 -0.00552215] ... [-0.32600275 0.28390473 1.3810658 ... -0.2674244 1.3762391 0. ] [ 0. -0.96900576 0.33647108 ... -1.8900533 1.1834152 0.1717693 ] [-1.1122946 0.5979715 -0.6115073 ... 0.31517977 -2.528026 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4715.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.5083762 0.16745147 -0.12890951] [ 0. 0. 0. ... 0.30944827 0.29442877 1.2984551 ] [ 0. 0. 0. ... -0.6583615 0.47338662 0.5137726 ] ... [ 0. -0.59363526 -1.437394 ... -0.8838688 -0.23527718 0.6211573 ] [ 0.5174011 1.1936321 2.21961 ... 0.26679474 -0.43862545 0.08511702] [-0.54157054 -0.09358644 -0.5368773 ... 2.0647156 -0.7187009 0.0187045 ]] [[ 0. 0. 0. ... 0.37196228 -0.10743326 -0.82922155] [ 0. 0. 0. ... 0.6681328 -0.51565856 0.70005876] [ 0. 0. 0. ... -1.9440315 0.7963184 -2.4849672 ] ... [ 0. 0.32643843 1.2219602 ... 0.3401829 0.5597532 0.11608431] [ 0.8077398 -0.7678948 -0.6387603 ... 0.05536496 2.5144672 -1.5668353 ] [ 0.14106123 0.00679241 2.1753998 ... -2.5573716 -1.6777363 0.9984176 ]] [[ 0. 0. 0. ... 0.4654184 0.5645489 -1.3273056 ] [ 0. 0. 0. ... 0.5140653 0.44782522 -1.1034681 ] [ 0. 0. 0. ... -0.68978083 -0.11921572 1.2550789 ] ... [ 0. 1.0116957 -0.37487575 ... -0.44476423 -0.13621075 -1.2905967 ] [ 1.6509103 -0.35688728 -1.7830397 ... 0.84225476 1.1124784 0.8068084 ] [ 1.0312331 0.38189015 2.2341533 ... 0.20825143 0.06395175 -1.5303906 ]] ... [[ 0. 0. 0. ... 0.531676 0.15883395 -0.49736145] [ 0. 0. 0. ... 0.14923991 -0.29286996 -0.9245417 ] [ 0. 0. 0. ... 0.5287076 -0.8473307 -0.06638835] ... [ 0. -0.18427649 1.6072375 ... -0.24580368 0.11673735 -0.24405421] [ 1.1801682 0.04743951 1.7175366 ... -2.120488 0.43924716 -0.62449974] [-0.6286867 -0.07970004 -0.07452822 ... 0.55557746 -0.69522727 -0.6246305 ]] [[ 0. 0. 0. ... 0.07454784 0.7753424 1.0126472 ] [ 0. 0. 0. ... -1.1782712 0.96938276 -1.2547561 ] [ 0. 0. 0. ... -0.15868127 -0.77503365 0.2625027 ] ... [ 0. -0.3274228 -0.34144396 ... -0.42698616 -0.2551802 0.07292018] [-0.3061253 0.649777 -0.51050615 ... -0.6617308 1.132234 0.07953931] [-0.8738048 -1.6785315 0.48614585 ... 0.09828311 1.1094403 -0.8434465 ]] [[ 0. 0. 0. ... 0.27776712 0.6550161 -0.9370563 ] [ 0. 0. 0. ... -0.70563346 -0.09705281 -0.07081906] [ 0. 0. 0. ... 0.07174558 1.1815499 -0.9392467 ] ... [ 0. -0.30539685 0.7550724 ... 0.29676962 0.08516771 -1.2608187 ] [ 0.59562206 0.44549567 -0.14268833 ... 0.22791167 0.12121966 -2.8310528 ] [ 0.81579554 1.0974665 -0.59067386 ... 0.17447151 0.14571095 0.6211457 ]]]; ov_res: [[[ 0. 0. 0. ... 0.5083762 0.16745147 -0.12890951] [ 0. 0. 0. ... 0.30944827 0.29442877 1.2984551 ] [ 0. 0. 0. ... -0.6583615 0.47338662 0.5137726 ] ... [ 0. -0.59363526 -1.437394 ... -0.8838688 -0.23527718 0.6211573 ] [ 0.5174011 1.1936321 2.21961 ... 0.26679474 -0.43862545 0.08511702] [-0.54157054 -0.09358644 -0.5368773 ... 2.0647156 -0.7187009 0.0187045 ]] [[ 0. 0. 0. ... 0.37196228 -0.10743326 -0.82922155] [ 0. 0. 0. ... 0.6681328 -0.51565856 0.70005876] [ 0. 0. 0. ... -1.9440315 0.7963184 -2.4849672 ] ... [ 0. 0.32643843 1.2219602 ... 0.3401829 0.5597532 0.11608431] [ 0.8077398 -0.7678948 -0.6387603 ... 0.05536496 2.5144672 -1.5668353 ] [ 0.14106123 0.00679241 2.1753998 ... -2.5573716 -1.6777363 0.9984176 ]] [[ 0. 0. 0. ... 0.4654184 0.5645489 -1.3273056 ] [ 0. 0. 0. ... 0.5140653 0.44782522 -1.1034681 ] [ 0. 0. 0. ... -0.68978083 -0.11921572 1.2550789 ] ... [ 0. 1.0116957 -0.37487575 ... -0.44476423 -0.13621075 -1.2905967 ] [ 1.6509103 -0.35688728 -1.7830397 ... 0.84225476 1.1124784 0.8068084 ] [ 1.0312331 0.38189015 2.2341533 ... 0.20825143 0.06395175 -1.5303906 ]] ... [[ 0. 0. 0. ... 0.531676 0.15883395 -0.49736145] [ 0. 0. 0. ... 0.14923991 -0.29286996 -0.9245417 ] [ 0. 0. 0. ... 0.5287076 -0.8473307 -0.06638835] ... [ 0. -0.18427649 1.6072375 ... -0.24580368 0.11673735 -0.24405421] [ 1.1801682 0.04743951 1.7175366 ... -2.120488 0.43924716 -0.62449974] [-0.6286867 -0.07970004 -0.07452822 ... 0.55557746 -0.69522727 -0.6246305 ]] [[ 0. 0. 0. ... 0.07454784 0.7753424 1.0126472 ] [ 0. 0. 0. ... -1.1782712 0.96938276 -1.2547561 ] [ 0. 0. 0. ... -0.15868127 -0.77503365 0.2625027 ] ... [ 0. -0.3274228 -0.34144396 ... -0.42698616 -0.2551802 0.07292018] [-0.3061253 0.649777 -0.51050615 ... -0.6617308 1.132234 0.07953931] [-0.8738048 -1.6785315 0.48614585 ... 0.09828311 1.1094403 -0.8434465 ]] [[ 0. 0. 0. ... 0.27776712 0.6550161 -0.9370563 ] [ 0. 0. 0. ... -0.70563346 -0.09705281 -0.07081906] [ 0. 0. 0. ... 0.07174558 1.1815499 -0.9392467 ] ... [ 0. -0.30539685 0.7550724 ... 0.29676962 0.08516771 -1.2608187 ] [ 0.59562206 0.44549567 -0.14268833 ... 0.22791167 0.12121966 -2.8310528 ] [ 0.81579554 1.0974665 -0.59067386 ... 0.17447151 0.14571095 0.6211457 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4717.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.9029359 1.4394245 0.05021427] [ 0. 0. 0. ... 0.10383995 0.95812273 0. ] [ 0. -1.456268 -1.0437669 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4380785 -0.21277949 0. ] [ 0. -1.254178 0.51552624 ... 0. 0. 0. ] [ 0.72769743 1.1397408 0.8009184 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2227775 -0.948377 -0.7624396 ] [ 0. 0. 0. ... -0.6505061 -0.00654555 0. ] [ 0. 0.34443116 1.0723946 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.6291053 0.86929303 0. ] [ 0. 0.12657951 0.07594305 ... 0. 0. 0. ] [ 0.10857765 0.8918387 -0.43112725 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2880401 0.35725382 -1.3717885 ] [ 0. 0. 0. ... 2.229273 -0.1699713 0. ] [ 0. -0.85966206 0.12176581 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.33130756 -0.9738848 0. ] [ 0. -1.7761585 1.338946 ... 0. 0. 0. ] [ 1.5276661 0.15674692 -0.15641317 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.1450874 -0.40980107 -1.3774319 ] [ 0. 0. 0. ... 0.49478042 0.20049277 0. ] [ 0. 0.8164667 -0.39098018 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.30951902 -1.0535697 0. ] [ 0. 0.99326044 -1.3374902 ... 0. 0. 0. ] [-1.0784019 -2.5276985 -0.5109836 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7541045 0.5356028 0.1818087 ] [ 0. 0. 0. ... -1.1072886 -1.1162385 0. ] [ 0. -1.3853432 0.56807446 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.5355699 -0.45072114 0. ] [ 0. -0.07895107 -0.57720286 ... 0. 0. 0. ] [ 0.40257686 0.6880334 0.8645587 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.98099333 0.8355311 -0.6681108 ] [ 0. 0. 0. ... 0.29835233 1.3907049 0. ] [ 0. 0.3743809 -0.12715593 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.92362326 -1.9828732 0. ] [ 0. 0.8316742 -1.5898569 ... 0. 0. 0. ] [-1.063959 -0.05192484 0.84097695 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.9029359 1.4394245 0.05021427] [ 0. 0. 0. ... 0.10383995 0.95812273 0. ] [ 0. -1.456268 -1.0437669 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4380785 -0.21277949 0. ] [ 0. -1.254178 0.51552624 ... 0. 0. 0. ] [ 0.72769743 1.1397408 0.8009184 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2227775 -0.948377 -0.7624396 ] [ 0. 0. 0. ... -0.6505061 -0.00654555 0. ] [ 0. 0.34443116 1.0723946 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.6291053 0.86929303 0. ] [ 0. 0.12657951 0.07594305 ... 0. 0. 0. ] [ 0.10857765 0.8918387 -0.43112725 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2880401 0.35725382 -1.3717885 ] [ 0. 0. 0. ... 2.229273 -0.1699713 0. ] [ 0. -0.85966206 0.12176581 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.33130756 -0.9738848 0. ] [ 0. -1.7761585 1.338946 ... 0. 0. 0. ] [ 1.5276661 0.15674692 -0.15641317 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.1450874 -0.40980107 -1.3774319 ] [ 0. 0. 0. ... 0.49478042 0.20049277 0. ] [ 0. 0.8164667 -0.39098018 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.30951902 -1.0535697 0. ] [ 0. 0.99326044 -1.3374902 ... 0. 0. 0. ] [-1.0784019 -2.5276985 -0.5109836 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7541045 0.5356028 0.1818087 ] [ 0. 0. 0. ... -1.1072886 -1.1162385 0. ] [ 0. -1.3853432 0.56807446 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.5355699 -0.45072114 0. ] [ 0. -0.07895107 -0.57720286 ... 0. 0. 0. ] [ 0.40257686 0.6880334 0.8645587 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.98099333 0.8355311 -0.6681108 ] [ 0. 0. 0. ... 0.29835233 1.3907049 0. ] [ 0. 0.3743809 -0.12715593 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.92362326 -1.9828732 0. ] [ 0. 0.8316742 -1.5898569 ... 0. 0. 0. ] [-1.063959 -0.05192484 0.84097695 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4719.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... -0.7644373 -2.1396735 -1.2312568 ] [ 0. 0. 0. ... -2.164062 -0.74138784 -0.31564113] [ 0. 0. 0. ... 0.17655791 0.62054473 0.7723103 ]] [[ 0. 0. 0. ... -0.65721416 0.5606348 -0.02563669] [ 0. 0. 0. ... -0.6531559 0.7736908 0.91894436] [ 0. 0. 0. ... 0.3713042 0.5859527 0.32036644]] [[ 0. 0. 0. ... 1.1738065 -0.616077 -0.1916241 ] [ 0. 0. 0. ... 0.01918364 -0.76846856 -0.11032903] [ 0. 0. 0. ... -0.42209736 -0.5252834 1.8890938 ]] ... [[ 0. 0. 0. ... 0.73121226 -0.7462621 -0.26233646] [ 0. 0. 0. ... 1.5945905 -1.6567097 0.75176454] [ 0. 0. 0. ... 0.6931175 0.02357887 -1.6274058 ]] [[ 0. 0. 0. ... 1.383138 1.1965139 -1.147804 ] [ 0. 0. 0. ... -0.9089292 0.5615222 0.10297643] [ 0. 0. 0. ... 2.3372865 3.1997368 -1.3742982 ]] [[ 0. 0. 0. ... 0.4461667 -1.2158357 0.06001003] [ 0. 0. 0. ... -0.6065115 -0.57116467 -0.7437115 ] [ 0. 0. 0. ... 1.1267608 0.99582493 -0.66023964]]]; ov_res: [[[ 0. 0. 0. ... -0.7644373 -2.1396735 -1.2312568 ] [ 0. 0. 0. ... -2.164062 -0.74138784 -0.31564113] [ 0. 0. 0. ... 0.17655791 0.62054473 0.7723103 ]] [[ 0. 0. 0. ... -0.65721416 0.5606348 -0.02563669] [ 0. 0. 0. ... -0.6531559 0.7736908 0.91894436] [ 0. 0. 0. ... 0.3713042 0.5859527 0.32036644]] [[ 0. 0. 0. ... 1.1738065 -0.616077 -0.1916241 ] [ 0. 0. 0. ... 0.01918364 -0.76846856 -0.11032903] [ 0. 0. 0. ... -0.42209736 -0.5252834 1.8890938 ]] ... [[ 0. 0. 0. ... 0.73121226 -0.7462621 -0.26233646] [ 0. 0. 0. ... 1.5945905 -1.6567097 0.75176454] [ 0. 0. 0. ... 0.6931175 0.02357887 -1.6274058 ]] [[ 0. 0. 0. ... 1.383138 1.1965139 -1.147804 ] [ 0. 0. 0. ... -0.9089292 0.5615222 0.10297643] [ 0. 0. 0. ... 2.3372865 3.1997368 -1.3742982 ]] [[ 0. 0. 0. ... 0.4461667 -1.2158357 0.06001003] [ 0. 0. 0. ... -0.6065115 -0.57116467 -0.7437115 ] [ 0. 0. 0. ... 1.1267608 0.99582493 -0.66023964]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4721.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.8737403e-01 -1.6751494e-01 1.2584993e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6101854e-01 -1.3098906e+00 -5.8440447e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.4861611e-03 -1.6006361e+00 0.0000000e+00] ... [ 0.0000000e+00 -1.8677820e+00 -2.7696806e-01 ... 3.9218709e-01 8.8468999e-01 -1.4333324e-01] [-8.5664868e-02 -1.0121422e+00 -6.5778874e-02 ... -4.5881009e-01 -1.7920519e-01 1.7666930e+00] [ 6.5750366e-01 3.3404231e-01 3.7492044e-02 ... -1.1662366e-01 3.6353716e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 3.3273149e-02 -1.7195623e-01 -1.7586246e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.1439543e+00 -7.6096433e-01 -8.2123256e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4992263e+00 -6.6488987e-01 0.0000000e+00] ... [ 0.0000000e+00 -1.7913339e+00 -1.2777187e-01 ... 6.4892304e-01 -1.0674894e+00 1.2671711e-01] [-8.9929342e-01 6.6562402e-01 1.9300642e+00 ... 1.7394383e+00 4.3518910e-01 1.3491397e+00] [ 8.4874105e-01 -8.5260624e-01 1.8189603e-01 ... -3.2691300e-01 -3.8613033e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.3208292e-01 -2.7586317e-01 4.2136380e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.3687021e-01 -1.0898025e+00 -6.1334062e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.7764497e-01 1.9041272e-01 0.0000000e+00] ... [ 0.0000000e+00 6.5144412e-02 -1.0354991e+00 ... -1.2592303e+00 -7.4546766e-01 -8.1901503e-01] [ 1.3942679e+00 5.2959895e-01 1.6359428e-01 ... 5.5063373e-01 -1.2577319e+00 8.4341943e-01] [ 2.1836631e-02 -7.3100333e-03 -1.1925958e+00 ... -5.4072303e-01 -5.7130551e-01 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.2050911e-01 2.0583095e-01 1.1098113e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6852536e+00 1.6709974e+00 -7.1523249e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -9.2435646e-01 -3.7851661e-01 0.0000000e+00] ... [ 0.0000000e+00 4.0795186e-01 2.3552465e+00 ... -1.2050525e-01 4.4427520e-01 8.9915581e-02] [ 1.8518652e+00 -1.1240708e-01 -1.5244133e+00 ... -1.0628601e+00 4.3515965e-01 -1.7390412e+00] [ 4.6357369e-01 -9.1611528e-01 4.0836992e+00 ... -1.3681364e-02 7.5293070e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8536242e+00 -7.2225279e-01 9.6660770e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1966420e-01 1.3708921e-01 -3.7550634e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.3809092e-01 2.7200329e-01 0.0000000e+00] ... [ 0.0000000e+00 7.2778022e-01 8.2182169e-01 ... 1.6879354e+00 -1.1449614e+00 4.9392845e-02] [-1.1667954e+00 -2.8641664e-03 1.5273402e+00 ... -8.6704904e-01 1.2730533e+00 7.5113401e-02] [-1.0126674e+00 -1.1085848e+00 4.2320350e-01 ... 2.2939475e+00 1.8288869e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.5013568e-01 2.3463751e-01 1.0871900e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.9010888e-01 7.0595133e-01 -9.0673941e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.5670810e-01 2.5143850e-01 0.0000000e+00] ... [ 0.0000000e+00 -2.7237060e-02 -1.4979914e+00 ... -2.0791562e-01 -6.5434510e-01 -4.0080687e-01] [-1.0887778e+00 5.2332425e-01 -1.1047385e+00 ... -5.4933101e-01 1.4195522e+00 7.7781871e-02] [-1.1454712e+00 -4.5839980e-01 -2.6223434e-02 ... -5.1165336e-01 1.4261440e+00 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.8737403e-01 -1.6751494e-01 1.2584993e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6101854e-01 -1.3098906e+00 -5.8440447e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.4861611e-03 -1.6006361e+00 0.0000000e+00] ... [ 0.0000000e+00 -1.8677820e+00 -2.7696806e-01 ... 3.9218709e-01 8.8468999e-01 -1.4333324e-01] [-8.5664868e-02 -1.0121422e+00 -6.5778874e-02 ... -4.5881009e-01 -1.7920519e-01 1.7666930e+00] [ 6.5750366e-01 3.3404231e-01 3.7492044e-02 ... -1.1662366e-01 3.6353716e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 3.3273149e-02 -1.7195623e-01 -1.7586246e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.1439543e+00 -7.6096433e-01 -8.2123256e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4992263e+00 -6.6488987e-01 0.0000000e+00] ... [ 0.0000000e+00 -1.7913339e+00 -1.2777187e-01 ... 6.4892304e-01 -1.0674894e+00 1.2671711e-01] [-8.9929342e-01 6.6562402e-01 1.9300642e+00 ... 1.7394383e+00 4.3518910e-01 1.3491397e+00] [ 8.4874105e-01 -8.5260624e-01 1.8189603e-01 ... -3.2691300e-01 -3.8613033e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.3208292e-01 -2.7586317e-01 4.2136380e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.3687021e-01 -1.0898025e+00 -6.1334062e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.7764497e-01 1.9041272e-01 0.0000000e+00] ... [ 0.0000000e+00 6.5144412e-02 -1.0354991e+00 ... -1.2592303e+00 -7.4546766e-01 -8.1901503e-01] [ 1.3942679e+00 5.2959895e-01 1.6359428e-01 ... 5.5063373e-01 -1.2577319e+00 8.4341943e-01] [ 2.1836631e-02 -7.3100333e-03 -1.1925958e+00 ... -5.4072303e-01 -5.7130551e-01 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.2050911e-01 2.0583095e-01 1.1098113e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6852536e+00 1.6709974e+00 -7.1523249e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -9.2435646e-01 -3.7851661e-01 0.0000000e+00] ... [ 0.0000000e+00 4.0795186e-01 2.3552465e+00 ... -1.2050525e-01 4.4427520e-01 8.9915581e-02] [ 1.8518652e+00 -1.1240708e-01 -1.5244133e+00 ... -1.0628601e+00 4.3515965e-01 -1.7390412e+00] [ 4.6357369e-01 -9.1611528e-01 4.0836992e+00 ... -1.3681364e-02 7.5293070e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8536242e+00 -7.2225279e-01 9.6660770e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1966420e-01 1.3708921e-01 -3.7550634e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.3809092e-01 2.7200329e-01 0.0000000e+00] ... [ 0.0000000e+00 7.2778022e-01 8.2182169e-01 ... 1.6879354e+00 -1.1449614e+00 4.9392845e-02] [-1.1667954e+00 -2.8641664e-03 1.5273402e+00 ... -8.6704904e-01 1.2730533e+00 7.5113401e-02] [-1.0126674e+00 -1.1085848e+00 4.2320350e-01 ... 2.2939475e+00 1.8288869e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.5013568e-01 2.3463751e-01 1.0871900e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.9010888e-01 7.0595133e-01 -9.0673941e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.5670810e-01 2.5143850e-01 0.0000000e+00] ... [ 0.0000000e+00 -2.7237060e-02 -1.4979914e+00 ... -2.0791562e-01 -6.5434510e-01 -4.0080687e-01] [-1.0887778e+00 5.2332425e-01 -1.1047385e+00 ... -5.4933101e-01 1.4195522e+00 7.7781871e-02] [-1.1454712e+00 -4.5839980e-01 -2.6223434e-02 ... -5.1165336e-01 1.4261440e+00 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4723.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -1.9294751 -1.6320676 -1.5684421 ] [ 0. 0. 0. ... 0.48733884 0.47515774 0.358302 ] [ 0. 0.3232193 0.1988691 ... 0.45994288 1.4189825 -1.4726299 ] ... [-0.18286407 -1.2804966 -0.3035587 ... 0.9278248 0.99630755 1.8721833 ] [ 0. 1.1314963 0.26038584 ... 0. 0. 0. ] [-0.5400824 -0.19236434 -1.1975011 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.02968138 -0.93499357 -1.4717495 ] [ 0. 0. 0. ... 0.9146737 -0.5214856 0.45578605] [ 0. 0.13811144 -0.5713154 ... -0.16241242 -0.9369942 0.14649533] ... [ 0.09442836 -1.0379715 -0.27172577 ... 0.6717998 -0.7274005 -0.27328667] [ 0. -0.8088346 1.6911209 ... 0. 0. 0. ] [-0.96672696 1.7926006 2.1574411 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2352359 0.03973905 -0.7391137 ] [ 0. 0. 0. ... 0.9652599 0.09952743 1.3108947 ] [ 0. -0.12957288 2.1765044 ... 0.6738511 0.00749173 0.8250614 ] ... [ 2.4841144 1.1704121 0.8046082 ... 0.5014775 0.6219201 -0.8747416 ] [ 0. 0.7700381 1.1049418 ... 0. 0. 0. ] [ 0.1664483 0.0705973 -0.6577549 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1547521 -1.4939067 1.8083671 ] [ 0. 0. 0. ... -0.2049513 1.1076242 -0.5182491 ] [ 0. -1.3077251 -1.1521847 ... 1.0245984 -1.1937642 0.346398 ] ... [ 0.6232465 -0.05947123 0.02947542 ... 0.95743406 2.441608 2.4750073 ] [ 0. 0.4697424 1.0878574 ... 0. 0. 0. ] [-0.61327285 -0.00834641 -0.080222 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6323172 -0.4752517 -1.0692024 ] [ 0. 0. 0. ... 1.2462462 -1.1730723 -0.69296944] [ 0. 0.53463155 0.83061683 ... -0.77098817 -0.19345854 0.07158305] ... [-0.5747834 -1.1728328 -0.7902781 ... -0.61105317 -0.38187143 0.70893997] [ 0. 1.4972267 0.9274307 ... 0. 0. 0. ] [-0.53569406 0.6861604 -0.49506915 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4979083 -0.6039292 0.31840426] [ 0. 0. 0. ... 1.2150762 -0.24928692 0.766757 ] [ 0. -0.6507464 -0.6153449 ... -0.7147966 0.41062027 -0.67300856] ... [-0.39115894 0.31368318 1.195329 ... -0.34133458 -0.5798183 -0.6054925 ] [ 0. 0.58161277 0.20850012 ... 0. 0. 0. ] [ 1.1803939 -0.8147363 -1.1441303 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.9294751 -1.6320676 -1.5684421 ] [ 0. 0. 0. ... 0.48733884 0.47515774 0.358302 ] [ 0. 0.3232193 0.1988691 ... 0.45994288 1.4189825 -1.4726299 ] ... [-0.18286407 -1.2804966 -0.3035587 ... 0.9278248 0.99630755 1.8721833 ] [ 0. 1.1314963 0.26038584 ... 0. 0. 0. ] [-0.5400824 -0.19236434 -1.1975011 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.02968138 -0.93499357 -1.4717495 ] [ 0. 0. 0. ... 0.9146737 -0.5214856 0.45578605] [ 0. 0.13811144 -0.5713154 ... -0.16241242 -0.9369942 0.14649533] ... [ 0.09442836 -1.0379715 -0.27172577 ... 0.6717998 -0.7274005 -0.27328667] [ 0. -0.8088346 1.6911209 ... 0. 0. 0. ] [-0.96672696 1.7926006 2.1574411 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2352359 0.03973905 -0.7391137 ] [ 0. 0. 0. ... 0.9652599 0.09952743 1.3108947 ] [ 0. -0.12957288 2.1765044 ... 0.6738511 0.00749173 0.8250614 ] ... [ 2.4841144 1.1704121 0.8046082 ... 0.5014775 0.6219201 -0.8747416 ] [ 0. 0.7700381 1.1049418 ... 0. 0. 0. ] [ 0.1664483 0.0705973 -0.6577549 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1547521 -1.4939067 1.8083671 ] [ 0. 0. 0. ... -0.2049513 1.1076242 -0.5182491 ] [ 0. -1.3077251 -1.1521847 ... 1.0245984 -1.1937642 0.346398 ] ... [ 0.6232465 -0.05947123 0.02947542 ... 0.95743406 2.441608 2.4750073 ] [ 0. 0.4697424 1.0878574 ... 0. 0. 0. ] [-0.61327285 -0.00834641 -0.080222 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6323172 -0.4752517 -1.0692024 ] [ 0. 0. 0. ... 1.2462462 -1.1730723 -0.69296944] [ 0. 0.53463155 0.83061683 ... -0.77098817 -0.19345854 0.07158305] ... [-0.5747834 -1.1728328 -0.7902781 ... -0.61105317 -0.38187143 0.70893997] [ 0. 1.4972267 0.9274307 ... 0. 0. 0. ] [-0.53569406 0.6861604 -0.49506915 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4979083 -0.6039292 0.31840426] [ 0. 0. 0. ... 1.2150762 -0.24928692 0.766757 ] [ 0. -0.6507464 -0.6153449 ... -0.7147966 0.41062027 -0.67300856] ... [-0.39115894 0.31368318 1.195329 ... -0.34133458 -0.5798183 -0.6054925 ] [ 0. 0.58161277 0.20850012 ... 0. 0. 0. ] [ 1.1803939 -0.8147363 -1.1441303 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4725.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.25716576 -0.7395452 -0.11623523] [ 0. 0. 0. ... 0.7461153 1.2740278 -1.3424642 ] [ 0. 0. 0. ... 1.2024939 -1.1730793 0. ] ... [ 0. 2.2800179 0.99596083 ... 0. 0. 0. ] [ 0.34666985 0.5753187 -0.5768064 ... 0. 0. 0. ] [-0.5025542 -0.22160877 -0.8476715 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08570863 -1.3713626 0.9387862 ] [ 0. 0. 0. ... -0.48655495 0.9555984 2.0357842 ] [ 0. 0. 0. ... -0.5146046 0.17667325 0. ] ... [ 0. 1.8181206 -1.044174 ... 0. 0. 0. ] [-0.40879744 0.48971412 0.50184554 ... 0. 0. 0. ] [ 0.12460218 -0.19572599 0.4926916 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.04418415 -0.61242473 0.06596559] [ 0. 0. 0. ... 1.8728706 -0.69998914 -1.8382224 ] [ 0. 0. 0. ... -0.68340695 1.2373266 0. ] ... [ 0. 1.1896273 -0.93665737 ... 0. 0. 0. ] [ 0.27349377 -0.79452103 -0.3133703 ... 0. 0. 0. ] [-0.96717477 -0.08635427 0.38179862 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.1718487 1.475679 0.339228 ] [ 0. 0. 0. ... 0.38883817 -0.5613581 0.5087321 ] [ 0. 0. 0. ... 0.96898 -0.34760675 0. ] ... [ 0. 0.29931286 -2.390545 ... 0. 0. 0. ] [-1.2025137 1.1330491 -0.4132978 ... 0. 0. 0. ] [-1.2630048 -1.0540015 -0.29670113 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1439123 -0.99841076 0.72012633] [ 0. 0. 0. ... 0.7405023 0.73030823 -0.25424787] [ 0. 0. 0. ... 1.9987628 -0.04612917 0. ] ... [ 0. 0.1279284 0.01781355 ... 0. 0. 0. ] [ 0.5086954 1.3759782 -0.21645878 ... 0. 0. 0. ] [ 0.24768795 0.21396412 1.2314262 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.9985325 0.6928263 0.74637514] [ 0. 0. 0. ... 0.01209971 0.19724093 0.9143294 ] [ 0. 0. 0. ... 1.0481097 -2.3600168 0. ] ... [ 0. -0.76737815 1.7793982 ... 0. 0. 0. ] [ 3.4075804 0.13336483 -0.23249276 ... 0. 0. 0. ] [ 0.6508793 2.6935282 -0.7325035 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.25716576 -0.7395452 -0.11623523] [ 0. 0. 0. ... 0.7461153 1.2740278 -1.3424642 ] [ 0. 0. 0. ... 1.2024939 -1.1730793 0. ] ... [ 0. 2.2800179 0.99596083 ... 0. 0. 0. ] [ 0.34666985 0.5753187 -0.5768064 ... 0. 0. 0. ] [-0.5025542 -0.22160877 -0.8476715 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08570863 -1.3713626 0.9387862 ] [ 0. 0. 0. ... -0.48655495 0.9555984 2.0357842 ] [ 0. 0. 0. ... -0.5146046 0.17667325 0. ] ... [ 0. 1.8181206 -1.044174 ... 0. 0. 0. ] [-0.40879744 0.48971412 0.50184554 ... 0. 0. 0. ] [ 0.12460218 -0.19572599 0.4926916 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.04418415 -0.61242473 0.06596559] [ 0. 0. 0. ... 1.8728706 -0.69998914 -1.8382224 ] [ 0. 0. 0. ... -0.68340695 1.2373266 0. ] ... [ 0. 1.1896273 -0.93665737 ... 0. 0. 0. ] [ 0.27349377 -0.79452103 -0.3133703 ... 0. 0. 0. ] [-0.96717477 -0.08635427 0.38179862 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.1718487 1.475679 0.339228 ] [ 0. 0. 0. ... 0.38883817 -0.5613581 0.5087321 ] [ 0. 0. 0. ... 0.96898 -0.34760675 0. ] ... [ 0. 0.29931286 -2.390545 ... 0. 0. 0. ] [-1.2025137 1.1330491 -0.4132978 ... 0. 0. 0. ] [-1.2630048 -1.0540015 -0.29670113 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1439123 -0.99841076 0.72012633] [ 0. 0. 0. ... 0.7405023 0.73030823 -0.25424787] [ 0. 0. 0. ... 1.9987628 -0.04612917 0. ] ... [ 0. 0.1279284 0.01781355 ... 0. 0. 0. ] [ 0.5086954 1.3759782 -0.21645878 ... 0. 0. 0. ] [ 0.24768795 0.21396412 1.2314262 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.9985325 0.6928263 0.74637514] [ 0. 0. 0. ... 0.01209971 0.19724093 0.9143294 ] [ 0. 0. 0. ... 1.0481097 -2.3600168 0. ] ... [ 0. -0.76737815 1.7793982 ... 0. 0. 0. ] [ 3.4075804 0.13336483 -0.23249276 ... 0. 0. 0. ] [ 0.6508793 2.6935282 -0.7325035 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4727.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.02875745e-01 8.17051351e-01 -5.39885640e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.18408941e-01 3.44503820e-01 4.08641934e-01] [ 0.00000000e+00 9.41208184e-01 1.07104349e+00 ... -2.67200142e-01 -4.83390510e-01 -3.98977906e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.58851445e-01 -1.22915849e-01 -8.30218911e-01] [ 0.00000000e+00 -8.14094543e-01 2.19318628e-01 ... 2.27981046e-01 -9.71050084e-01 -3.85774791e-01] [ 2.10021317e-01 8.90877664e-01 2.27320388e-01 ... 2.20972806e-01 -5.65598667e-01 -1.13024735e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.07409763e-01 -1.19516265e+00 6.12809777e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.24831864e-01 1.33463219e-01 2.62690067e-01] [ 0.00000000e+00 -1.41477776e+00 -4.02616918e-01 ... -6.58463776e-01 3.95455770e-03 -1.95752108e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -2.66326189e-01 2.01856300e-01 -1.89620107e-01] [ 0.00000000e+00 8.44588935e-01 -1.17662513e+00 ... 2.01508403e+00 2.66775101e-01 -7.84962595e-01] [-3.93178523e-01 -1.53923047e+00 -1.50537753e+00 ... -5.90897501e-01 -4.11317468e-01 -2.36926898e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.22610223e+00 -1.33496380e+00 8.27777624e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.46221089e-01 -7.96122849e-01 -1.05714738e+00] [ 0.00000000e+00 8.64766359e-01 -1.02614574e-01 ... -9.22042012e-01 -2.53279973e-02 -9.75081444e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.15109706e+00 -4.48179126e-01 3.30900103e-01] [ 0.00000000e+00 3.34723175e-01 -4.12621796e-01 ... 1.23229301e+00 8.28810260e-02 -6.08195066e-01] [ 1.57018095e-01 7.17235684e-01 1.18434453e+00 ... -7.05314159e-01 1.58504808e+00 -2.56470710e-01]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.28809315e-01 5.31370640e-01 -4.12459612e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.24134094e-01 -1.73889947e+00 -7.40524888e-01] [ 0.00000000e+00 -3.25520694e-01 -1.27888262e+00 ... 1.42547882e+00 1.26659024e+00 1.59887180e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.12376952e-01 5.93819320e-01 -7.84500062e-01] [ 0.00000000e+00 -9.99310195e-01 -1.82020470e-01 ... 4.20359552e-01 -2.06235692e-01 -1.14926589e+00] [-6.74699068e-01 5.06684184e-01 -7.11131319e-02 ... -9.12897170e-01 -1.46812320e+00 -1.31489372e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -4.31644380e-01 -1.84473544e-01 2.31048420e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.47176862e-01 8.53218973e-01 -3.51620883e-01] [ 0.00000000e+00 3.42239141e-01 1.36720097e+00 ... -1.02158330e-01 1.12753499e+00 4.71558273e-02] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.35200417e+00 -1.39158440e+00 3.97107363e-01] [ 0.00000000e+00 4.82252270e-01 -1.61415622e-01 ... 2.94759214e-01 8.26335251e-01 -4.13036525e-01] [ 1.01696241e+00 3.88561264e-02 1.40140697e-01 ... 1.00359447e-01 -1.92431164e+00 -2.97702760e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.02755186e-03 5.30938506e-01 2.89422832e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.36420282e-06 8.59153450e-01 -3.34649205e-01] [ 0.00000000e+00 -6.69420838e-01 -7.02787889e-03 ... 1.00998439e-01 6.01925850e-01 1.70729235e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.01170528e-01 5.76864362e-01 -1.05534606e-01] [ 0.00000000e+00 -2.06431794e+00 -9.92433250e-01 ... -1.77965879e+00 -2.64876962e-01 5.69734335e-01] [ 1.54001132e-01 1.80581546e+00 -2.46191239e+00 ... -2.46445966e+00 1.88624716e+00 -9.79412675e-01]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.02875745e-01 8.17051351e-01 -5.39885640e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.18408941e-01 3.44503820e-01 4.08641934e-01] [ 0.00000000e+00 9.41208184e-01 1.07104349e+00 ... -2.67200142e-01 -4.83390510e-01 -3.98977906e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.58851445e-01 -1.22915849e-01 -8.30218911e-01] [ 0.00000000e+00 -8.14094543e-01 2.19318628e-01 ... 2.27981046e-01 -9.71050084e-01 -3.85774791e-01] [ 2.10021317e-01 8.90877664e-01 2.27320388e-01 ... 2.20972806e-01 -5.65598667e-01 -1.13024735e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.07409763e-01 -1.19516265e+00 6.12809777e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.24831864e-01 1.33463219e-01 2.62690067e-01] [ 0.00000000e+00 -1.41477776e+00 -4.02616918e-01 ... -6.58463776e-01 3.95455770e-03 -1.95752108e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -2.66326189e-01 2.01856300e-01 -1.89620107e-01] [ 0.00000000e+00 8.44588935e-01 -1.17662513e+00 ... 2.01508403e+00 2.66775101e-01 -7.84962595e-01] [-3.93178523e-01 -1.53923047e+00 -1.50537753e+00 ... -5.90897501e-01 -4.11317468e-01 -2.36926898e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.22610223e+00 -1.33496380e+00 8.27777624e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.46221089e-01 -7.96122849e-01 -1.05714738e+00] [ 0.00000000e+00 8.64766359e-01 -1.02614574e-01 ... -9.22042012e-01 -2.53279973e-02 -9.75081444e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.15109706e+00 -4.48179126e-01 3.30900103e-01] [ 0.00000000e+00 3.34723175e-01 -4.12621796e-01 ... 1.23229301e+00 8.28810260e-02 -6.08195066e-01] [ 1.57018095e-01 7.17235684e-01 1.18434453e+00 ... -7.05314159e-01 1.58504808e+00 -2.56470710e-01]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.28809315e-01 5.31370640e-01 -4.12459612e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.24134094e-01 -1.73889947e+00 -7.40524888e-01] [ 0.00000000e+00 -3.25520694e-01 -1.27888262e+00 ... 1.42547882e+00 1.26659024e+00 1.59887180e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.12376952e-01 5.93819320e-01 -7.84500062e-01] [ 0.00000000e+00 -9.99310195e-01 -1.82020470e-01 ... 4.20359552e-01 -2.06235692e-01 -1.14926589e+00] [-6.74699068e-01 5.06684184e-01 -7.11131319e-02 ... -9.12897170e-01 -1.46812320e+00 -1.31489372e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -4.31644380e-01 -1.84473544e-01 2.31048420e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.47176862e-01 8.53218973e-01 -3.51620883e-01] [ 0.00000000e+00 3.42239141e-01 1.36720097e+00 ... -1.02158330e-01 1.12753499e+00 4.71558273e-02] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.35200417e+00 -1.39158440e+00 3.97107363e-01] [ 0.00000000e+00 4.82252270e-01 -1.61415622e-01 ... 2.94759214e-01 8.26335251e-01 -4.13036525e-01] [ 1.01696241e+00 3.88561264e-02 1.40140697e-01 ... 1.00359447e-01 -1.92431164e+00 -2.97702760e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.02755186e-03 5.30938506e-01 2.89422832e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.36420282e-06 8.59153450e-01 -3.34649205e-01] [ 0.00000000e+00 -6.69420838e-01 -7.02787889e-03 ... 1.00998439e-01 6.01925850e-01 1.70729235e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.01170528e-01 5.76864362e-01 -1.05534606e-01] [ 0.00000000e+00 -2.06431794e+00 -9.92433250e-01 ... -1.77965879e+00 -2.64876962e-01 5.69734335e-01] [ 1.54001132e-01 1.80581546e+00 -2.46191239e+00 ... -2.46445966e+00 1.88624716e+00 -9.79412675e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:2 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4729.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.07906619 -0.7193245 0.6405496 ] [ 0. 0. 0. ... 0.46259546 0.29306352 -0.84519666] [ 0. 0. 0. ... -0.44018897 -0.02308964 0.01703711]] [[ 0. 0. 0. ... 0.8613632 2.0061877 0.05458238] [ 0. 0. 0. ... 0.7395768 -0.10113891 0.07608224] [ 0. 0. 0. ... -0.3473364 -0.5510785 -1.4302089 ]] [[ 0. 0. 0. ... -1.9018868 0.03719103 0.62358856] [ 0. 0. 0. ... -0.48439944 -0.39808813 2.0426857 ] [ 0. 0. 0. ... 0.10324239 -0.37095496 -0.41255718]] ... [[ 0. 0. 0. ... 1.2303444 0.5661369 -1.0523777 ] [ 0. 0. 0. ... 1.041653 1.416584 1.8234304 ] [ 0. 0. 0. ... -1.0601226 0.58900934 -1.2530034 ]] [[ 0. 0. 0. ... 0.04010836 1.4364661 0.29305005] [ 0. 0. 0. ... 0.5424201 -0.08612147 1.2152653 ] [ 0. 0. 0. ... -0.7407391 1.2616578 0.50080884]] [[ 0. 0. 0. ... -1.0675865 0.13395038 -0.01400451] [ 0. 0. 0. ... -0.17562129 0.76828086 0.04169213] [ 0. 0. 0. ... 0.7136271 -0.13915506 -2.1819565 ]]]; ov_res: [[[ 0. 0. 0. ... -0.07906619 -0.7193245 0.6405496 ] [ 0. 0. 0. ... 0.46259546 0.29306352 -0.84519666] [ 0. 0. 0. ... -0.44018897 -0.02308964 0.01703711]] [[ 0. 0. 0. ... 0.8613632 2.0061877 0.05458238] [ 0. 0. 0. ... 0.7395768 -0.10113891 0.07608224] [ 0. 0. 0. ... -0.3473364 -0.5510785 -1.4302089 ]] [[ 0. 0. 0. ... -1.9018868 0.03719103 0.62358856] [ 0. 0. 0. ... -0.48439944 -0.39808813 2.0426857 ] [ 0. 0. 0. ... 0.10324239 -0.37095496 -0.41255718]] ... [[ 0. 0. 0. ... 1.2303444 0.5661369 -1.0523777 ] [ 0. 0. 0. ... 1.041653 1.416584 1.8234304 ] [ 0. 0. 0. ... -1.0601226 0.58900934 -1.2530034 ]] [[ 0. 0. 0. ... 0.04010836 1.4364661 0.29305005] [ 0. 0. 0. ... 0.5424201 -0.08612147 1.2152653 ] [ 0. 0. 0. ... -0.7407391 1.2616578 0.50080884]] [[ 0. 0. 0. ... -1.0675865 0.13395038 -0.01400451] [ 0. 0. 0. ... -0.17562129 0.76828086 0.04169213] [ 0. 0. 0. ... 0.7136271 -0.13915506 -2.1819565 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4731.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 2.3575163 0.6798164 0.07784263] [ 0. 0. 0. ... 0.6798164 0.07784263 0.31267056] [ 0. 0. 0. ... 0.07784263 0.31267056 0.5786008 ] ... [ 0. 0.04793884 0.66834116 ... -0.42439005 1.8964728 -1.1827525 ] [ 0.04793884 0.66834116 1.0313715 ... 1.8964728 -1.1827525 1.072902 ] [ 0.66834116 1.0313715 -1.3055444 ... -1.1827525 1.072902 -1.7918104 ]] [[ 0. 0. 0. ... -0.7418913 -1.700302 0.7359857 ] [ 0. 0. 0. ... -1.700302 0.7359857 -0.3752979 ] [ 0. 0. 0. ... 0.7359857 -0.3752979 -0.58994526] ... [ 0. -0.13057053 -0.36910945 ... 0.67029834 -0.2805867 -1.5955689 ] [-0.13057053 -0.36910945 0.6406323 ... -0.2805867 -1.5955689 0.4410008 ] [-0.36910945 0.6406323 0.3426883 ... -1.5955689 0.4410008 0.803368 ]] [[ 0. 0. 0. ... -0.5681963 -1.1405748 -0.40987876] [ 0. 0. 0. ... -1.1405748 -0.40987876 0.49783978] [ 0. 0. 0. ... -0.40987876 0.49783978 0.23850945] ... [ 0. -1.7758026 -0.5801485 ... -0.08817441 -0.08436976 0.8431971 ] [-1.7758026 -0.5801485 -0.40160295 ... -0.08436976 0.8431971 0.47721192] [-0.5801485 -0.40160295 -0.4606045 ... 0.8431971 0.47721192 -1.1452689 ]] ... [[ 0. 0. 0. ... -1.1516546 -1.9544843 -0.28331774] [ 0. 0. 0. ... -1.9544843 -0.28331774 -0.962674 ] [ 0. 0. 0. ... -0.28331774 -0.962674 -0.00760379] ... [ 0. -0.08231802 -1.2739886 ... 1.0269877 2.2880228 -0.45774484] [-0.08231802 -1.2739886 -0.9648023 ... 2.2880228 -0.45774484 -0.74643207] [-1.2739886 -0.9648023 -0.1345212 ... -0.45774484 -0.74643207 0.31932852]] [[ 0. 0. 0. ... 2.3828163 -1.4155322 -1.5253565 ] [ 0. 0. 0. ... -1.4155322 -1.5253565 0.413458 ] [ 0. 0. 0. ... -1.5253565 0.413458 -0.3730563 ] ... [ 0. 1.0978807 0.38441357 ... -0.46190718 0.8347037 0.7792759 ] [ 1.0978807 0.38441357 0.9182924 ... 0.8347037 0.7792759 -0.4568611 ] [ 0.38441357 0.9182924 -0.30383953 ... 0.7792759 -0.4568611 -0.49313688]] [[ 0. 0. 0. ... -0.47420993 -0.07067499 0.43964857] [ 0. 0. 0. ... -0.07067499 0.43964857 -0.08246096] [ 0. 0. 0. ... 0.43964857 -0.08246096 -0.19871715] ... [ 0. 1.2686975 -0.5087809 ... 1.1214843 0.07678851 0.478962 ] [ 1.2686975 -0.5087809 0.46507058 ... 0.07678851 0.478962 1.1800711 ] [-0.5087809 0.46507058 -0.62624145 ... 0.478962 1.1800711 0.8552384 ]]]; ov_res: [[[ 0. 0. 0. ... 2.3575163 0.6798164 0.07784263] [ 0. 0. 0. ... 0.6798164 0.07784263 0.31267056] [ 0. 0. 0. ... 0.07784263 0.31267056 0.5786008 ] ... [ 0. 0.04793884 0.66834116 ... -0.42439005 1.8964728 -1.1827525 ] [ 0.04793884 0.66834116 1.0313715 ... 1.8964728 -1.1827525 1.072902 ] [ 0.66834116 1.0313715 -1.3055444 ... -1.1827525 1.072902 -1.7918104 ]] [[ 0. 0. 0. ... -0.7418913 -1.700302 0.7359857 ] [ 0. 0. 0. ... -1.700302 0.7359857 -0.3752979 ] [ 0. 0. 0. ... 0.7359857 -0.3752979 -0.58994526] ... [ 0. -0.13057053 -0.36910945 ... 0.67029834 -0.2805867 -1.5955689 ] [-0.13057053 -0.36910945 0.6406323 ... -0.2805867 -1.5955689 0.4410008 ] [-0.36910945 0.6406323 0.3426883 ... -1.5955689 0.4410008 0.803368 ]] [[ 0. 0. 0. ... -0.5681963 -1.1405748 -0.40987876] [ 0. 0. 0. ... -1.1405748 -0.40987876 0.49783978] [ 0. 0. 0. ... -0.40987876 0.49783978 0.23850945] ... [ 0. -1.7758026 -0.5801485 ... -0.08817441 -0.08436976 0.8431971 ] [-1.7758026 -0.5801485 -0.40160295 ... -0.08436976 0.8431971 0.47721192] [-0.5801485 -0.40160295 -0.4606045 ... 0.8431971 0.47721192 -1.1452689 ]] ... [[ 0. 0. 0. ... -1.1516546 -1.9544843 -0.28331774] [ 0. 0. 0. ... -1.9544843 -0.28331774 -0.962674 ] [ 0. 0. 0. ... -0.28331774 -0.962674 -0.00760379] ... [ 0. -0.08231802 -1.2739886 ... 1.0269877 2.2880228 -0.45774484] [-0.08231802 -1.2739886 -0.9648023 ... 2.2880228 -0.45774484 -0.74643207] [-1.2739886 -0.9648023 -0.1345212 ... -0.45774484 -0.74643207 0.31932852]] [[ 0. 0. 0. ... 2.3828163 -1.4155322 -1.5253565 ] [ 0. 0. 0. ... -1.4155322 -1.5253565 0.413458 ] [ 0. 0. 0. ... -1.5253565 0.413458 -0.3730563 ] ... [ 0. 1.0978807 0.38441357 ... -0.46190718 0.8347037 0.7792759 ] [ 1.0978807 0.38441357 0.9182924 ... 0.8347037 0.7792759 -0.4568611 ] [ 0.38441357 0.9182924 -0.30383953 ... 0.7792759 -0.4568611 -0.49313688]] [[ 0. 0. 0. ... -0.47420993 -0.07067499 0.43964857] [ 0. 0. 0. ... -0.07067499 0.43964857 -0.08246096] [ 0. 0. 0. ... 0.43964857 -0.08246096 -0.19871715] ... [ 0. 1.2686975 -0.5087809 ... 1.1214843 0.07678851 0.478962 ] [ 1.2686975 -0.5087809 0.46507058 ... 0.07678851 0.478962 1.1800711 ] [-0.5087809 0.46507058 -0.62624145 ... 0.478962 1.1800711 0.8552384 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4733.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.79331028e-01 -3.19162965e+00 -5.67313313e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.19162965e+00 -5.67313313e-01 -1.28185058e+00] [ 0.00000000e+00 -6.35924563e-02 -3.65575582e-01 ... 2.03468800e-01 9.43716466e-02 4.94762324e-02] ... [ 7.84963608e-01 -1.10647321e+00 -1.19875264e+00 ... 1.03158750e-01 -7.02881575e-01 6.79013729e-01] [ 0.00000000e+00 -2.44285166e-02 -1.32789576e+00 ... -6.21280074e-01 -3.52010541e-02 5.05918920e-01] [-2.44285166e-02 -1.32789576e+00 -2.52216846e-01 ... -3.52010541e-02 5.05918920e-01 1.01018369e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.88270789e-01 -1.26341534e+00 1.86935914e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.26341534e+00 1.86935914e+00 1.16021287e+00] [ 0.00000000e+00 -1.66111457e+00 6.92726016e-01 ... -8.21494818e-01 -1.40693617e+00 1.71410882e+00] ... [ 8.85329485e-01 1.61450791e+00 -4.94948119e-01 ... -1.82947159e+00 -5.39558053e-01 -8.06059957e-01] [ 0.00000000e+00 -9.78750825e-01 7.36560151e-02 ... -1.34983405e-01 -5.73107779e-01 3.85142654e-01] [-9.78750825e-01 7.36560151e-02 -1.40241158e+00 ... -5.73107779e-01 3.85142654e-01 4.49249119e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.21345919e-01 -1.07326591e+00 -1.01998937e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.07326591e+00 -1.01998937e+00 -4.39889163e-01] [ 0.00000000e+00 -2.12684345e+00 -2.96912372e-01 ... -1.86556530e+00 5.20036936e-01 4.55573469e-01] ... [-7.00843036e-01 -3.38650912e-01 1.86875332e-02 ... -1.31181383e+00 -1.12169886e+00 1.64789140e+00] [ 0.00000000e+00 1.28279376e+00 -1.05358803e+00 ... -1.02270022e-03 -2.32770652e-01 1.01787329e+00] [ 1.28279376e+00 -1.05358803e+00 -9.37744319e-01 ... -2.32770652e-01 1.01787329e+00 -4.53485042e-01]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.35600734e-01 -7.88262665e-01 -1.86819899e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.88262665e-01 -1.86819899e+00 -1.14828610e+00] [ 0.00000000e+00 2.36084962e+00 1.06563187e+00 ... 4.40220356e-01 -6.62596166e-01 -5.79813898e-01] ... [ 5.42262912e-01 1.72268182e-01 -7.30692506e-01 ... -6.85240686e-01 9.80724633e-01 1.04996061e+00] [ 0.00000000e+00 5.35103559e-01 9.12598968e-01 ... 1.79084194e+00 -5.39051443e-02 4.81998831e-01] [ 5.35103559e-01 9.12598968e-01 1.41148686e-01 ... -5.39051443e-02 4.81998831e-01 1.46593785e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.16255724e+00 7.77804106e-02 1.35408044e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.77804106e-02 1.35408044e+00 1.84637761e+00] [ 0.00000000e+00 -1.67810842e-01 3.13039403e-03 ... 8.97760034e-01 8.03728640e-01 8.93053532e-01] ... [ 6.25239909e-01 1.06720805e-01 2.37955615e-01 ... 1.09800839e+00 -8.03466439e-01 6.38552725e-01] [ 0.00000000e+00 -2.53192000e-02 -5.31060815e-01 ... -4.87086028e-01 -8.39513481e-01 -1.33003378e+00] [-2.53192000e-02 -5.31060815e-01 3.36472481e-01 ... -8.39513481e-01 -1.33003378e+00 -7.27840245e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.08272888e-02 2.78347164e-01 1.80060303e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.78347164e-01 1.80060303e+00 -2.61092973e+00] [ 0.00000000e+00 2.02701497e+00 -1.96800375e+00 ... -8.80796731e-01 -2.67655551e-01 -1.47055590e+00] ... [-7.45726645e-01 -6.21554315e-01 4.23434645e-01 ... -1.48356080e+00 -2.43460566e-01 -3.14538211e-01] [ 0.00000000e+00 9.98190567e-02 5.41027216e-03 ... 7.53834024e-02 6.63666949e-02 3.00830543e-01] [ 9.98190567e-02 5.41027216e-03 -1.35095119e+00 ... 6.63666949e-02 3.00830543e-01 1.94504455e-01]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.79331028e-01 -3.19162965e+00 -5.67313313e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.19162965e+00 -5.67313313e-01 -1.28185058e+00] [ 0.00000000e+00 -6.35924563e-02 -3.65575582e-01 ... 2.03468800e-01 9.43716466e-02 4.94762324e-02] ... [ 7.84963608e-01 -1.10647321e+00 -1.19875264e+00 ... 1.03158750e-01 -7.02881575e-01 6.79013729e-01] [ 0.00000000e+00 -2.44285166e-02 -1.32789576e+00 ... -6.21280074e-01 -3.52010541e-02 5.05918920e-01] [-2.44285166e-02 -1.32789576e+00 -2.52216846e-01 ... -3.52010541e-02 5.05918920e-01 1.01018369e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.88270789e-01 -1.26341534e+00 1.86935914e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.26341534e+00 1.86935914e+00 1.16021287e+00] [ 0.00000000e+00 -1.66111457e+00 6.92726016e-01 ... -8.21494818e-01 -1.40693617e+00 1.71410882e+00] ... [ 8.85329485e-01 1.61450791e+00 -4.94948119e-01 ... -1.82947159e+00 -5.39558053e-01 -8.06059957e-01] [ 0.00000000e+00 -9.78750825e-01 7.36560151e-02 ... -1.34983405e-01 -5.73107779e-01 3.85142654e-01] [-9.78750825e-01 7.36560151e-02 -1.40241158e+00 ... -5.73107779e-01 3.85142654e-01 4.49249119e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.21345919e-01 -1.07326591e+00 -1.01998937e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.07326591e+00 -1.01998937e+00 -4.39889163e-01] [ 0.00000000e+00 -2.12684345e+00 -2.96912372e-01 ... -1.86556530e+00 5.20036936e-01 4.55573469e-01] ... [-7.00843036e-01 -3.38650912e-01 1.86875332e-02 ... -1.31181383e+00 -1.12169886e+00 1.64789140e+00] [ 0.00000000e+00 1.28279376e+00 -1.05358803e+00 ... -1.02270022e-03 -2.32770652e-01 1.01787329e+00] [ 1.28279376e+00 -1.05358803e+00 -9.37744319e-01 ... -2.32770652e-01 1.01787329e+00 -4.53485042e-01]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.35600734e-01 -7.88262665e-01 -1.86819899e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.88262665e-01 -1.86819899e+00 -1.14828610e+00] [ 0.00000000e+00 2.36084962e+00 1.06563187e+00 ... 4.40220356e-01 -6.62596166e-01 -5.79813898e-01] ... [ 5.42262912e-01 1.72268182e-01 -7.30692506e-01 ... -6.85240686e-01 9.80724633e-01 1.04996061e+00] [ 0.00000000e+00 5.35103559e-01 9.12598968e-01 ... 1.79084194e+00 -5.39051443e-02 4.81998831e-01] [ 5.35103559e-01 9.12598968e-01 1.41148686e-01 ... -5.39051443e-02 4.81998831e-01 1.46593785e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.16255724e+00 7.77804106e-02 1.35408044e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.77804106e-02 1.35408044e+00 1.84637761e+00] [ 0.00000000e+00 -1.67810842e-01 3.13039403e-03 ... 8.97760034e-01 8.03728640e-01 8.93053532e-01] ... [ 6.25239909e-01 1.06720805e-01 2.37955615e-01 ... 1.09800839e+00 -8.03466439e-01 6.38552725e-01] [ 0.00000000e+00 -2.53192000e-02 -5.31060815e-01 ... -4.87086028e-01 -8.39513481e-01 -1.33003378e+00] [-2.53192000e-02 -5.31060815e-01 3.36472481e-01 ... -8.39513481e-01 -1.33003378e+00 -7.27840245e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.08272888e-02 2.78347164e-01 1.80060303e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.78347164e-01 1.80060303e+00 -2.61092973e+00] [ 0.00000000e+00 2.02701497e+00 -1.96800375e+00 ... -8.80796731e-01 -2.67655551e-01 -1.47055590e+00] ... [-7.45726645e-01 -6.21554315e-01 4.23434645e-01 ... -1.48356080e+00 -2.43460566e-01 -3.14538211e-01] [ 0.00000000e+00 9.98190567e-02 5.41027216e-03 ... 7.53834024e-02 6.63666949e-02 3.00830543e-01] [ 9.98190567e-02 5.41027216e-03 -1.35095119e+00 ... 6.63666949e-02 3.00830543e-01 1.94504455e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4735.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.3460691 -0.44306713 -2.0488672 ] [ 0. 0. 0. ... -0.44306713 -2.0488672 -0.1214214 ] [ 0. 0. 0. ... -2.0488672 -0.1214214 0.34211585] ... [ 0. 1.2155555 1.5751829 ... -0.05477801 -1.3220375 0.19160359] [ 1.2155555 1.5751829 -0.2809952 ... -1.3220375 0.19160359 0.4555798 ] [ 1.5751829 -0.2809952 -1.4095812 ... 0.19160359 0.4555798 -1.9873575 ]] [[ 0. 0. 0. ... -0.20623043 0.39765346 -1.7228471 ] [ 0. 0. 0. ... 0.39765346 -1.7228471 0.9889718 ] [ 0. 0. 0. ... -1.7228471 0.9889718 0.01136832] ... [ 0. 0.32435375 0.27367258 ... 1.2923845 0.18595769 1.8738344 ] [ 0.32435375 0.27367258 0.7396916 ... 0.18595769 1.8738344 0.11819172] [ 0.27367258 0.7396916 -0.04412987 ... 1.8738344 0.11819172 0.29947647]] [[ 0. 0. 0. ... -0.6864027 0.19017527 -0.18891682] [ 0. 0. 0. ... 0.19017527 -0.18891682 0.45546123] [ 0. 0. 0. ... -0.18891682 0.45546123 0.57395595] ... [ 0. 1.0381163 0.35008955 ... -0.62466455 1.5069491 0.25099453] [ 1.0381163 0.35008955 -0.4643075 ... 1.5069491 0.25099453 -0.5270501 ] [ 0.35008955 -0.4643075 -0.05672737 ... 0.25099453 -0.5270501 0.632694 ]] ... [[ 0. 0. 0. ... -0.10492896 0.19871289 0.80161095] [ 0. 0. 0. ... 0.19871289 0.80161095 -0.6500473 ] [ 0. 0. 0. ... 0.80161095 -0.6500473 -1.3812004 ] ... [ 0. -1.2440035 0.08949485 ... -0.13329239 -0.8314015 -0.28157836] [-1.2440035 0.08949485 -0.11764015 ... -0.8314015 -0.28157836 0.9448194 ] [ 0.08949485 -0.11764015 0.4063531 ... -0.28157836 0.9448194 0.24269687]] [[ 0. 0. 0. ... 0.41111866 -0.129836 -0.54189116] [ 0. 0. 0. ... -0.129836 -0.54189116 -0.44013643] [ 0. 0. 0. ... -0.54189116 -0.44013643 -1.1856866 ] ... [ 0. 0.6060115 -0.37436303 ... 0.55196744 1.0492378 -0.01494335] [ 0.6060115 -0.37436303 -2.175773 ... 1.0492378 -0.01494335 -0.26777765] [-0.37436303 -2.175773 -0.09372329 ... -0.01494335 -0.26777765 -0.7812111 ]] [[ 0. 0. 0. ... -0.96015733 -0.8578559 -1.2728661 ] [ 0. 0. 0. ... -0.8578559 -1.2728661 2.3167238 ] [ 0. 0. 0. ... -1.2728661 2.3167238 -1.0431563 ] ... [ 0. 0.11357296 -0.25483042 ... -1.1259638 1.2626936 -0.8994387 ] [ 0.11357296 -0.25483042 -0.26473874 ... 1.2626936 -0.8994387 -1.804108 ] [-0.25483042 -0.26473874 -0.06003528 ... -0.8994387 -1.804108 -1.1206363 ]]]; ov_res: [[[ 0. 0. 0. ... 0.3460691 -0.44306713 -2.0488672 ] [ 0. 0. 0. ... -0.44306713 -2.0488672 -0.1214214 ] [ 0. 0. 0. ... -2.0488672 -0.1214214 0.34211585] ... [ 0. 1.2155555 1.5751829 ... -0.05477801 -1.3220375 0.19160359] [ 1.2155555 1.5751829 -0.2809952 ... -1.3220375 0.19160359 0.4555798 ] [ 1.5751829 -0.2809952 -1.4095812 ... 0.19160359 0.4555798 -1.9873575 ]] [[ 0. 0. 0. ... -0.20623043 0.39765346 -1.7228471 ] [ 0. 0. 0. ... 0.39765346 -1.7228471 0.9889718 ] [ 0. 0. 0. ... -1.7228471 0.9889718 0.01136832] ... [ 0. 0.32435375 0.27367258 ... 1.2923845 0.18595769 1.8738344 ] [ 0.32435375 0.27367258 0.7396916 ... 0.18595769 1.8738344 0.11819172] [ 0.27367258 0.7396916 -0.04412987 ... 1.8738344 0.11819172 0.29947647]] [[ 0. 0. 0. ... -0.6864027 0.19017527 -0.18891682] [ 0. 0. 0. ... 0.19017527 -0.18891682 0.45546123] [ 0. 0. 0. ... -0.18891682 0.45546123 0.57395595] ... [ 0. 1.0381163 0.35008955 ... -0.62466455 1.5069491 0.25099453] [ 1.0381163 0.35008955 -0.4643075 ... 1.5069491 0.25099453 -0.5270501 ] [ 0.35008955 -0.4643075 -0.05672737 ... 0.25099453 -0.5270501 0.632694 ]] ... [[ 0. 0. 0. ... -0.10492896 0.19871289 0.80161095] [ 0. 0. 0. ... 0.19871289 0.80161095 -0.6500473 ] [ 0. 0. 0. ... 0.80161095 -0.6500473 -1.3812004 ] ... [ 0. -1.2440035 0.08949485 ... -0.13329239 -0.8314015 -0.28157836] [-1.2440035 0.08949485 -0.11764015 ... -0.8314015 -0.28157836 0.9448194 ] [ 0.08949485 -0.11764015 0.4063531 ... -0.28157836 0.9448194 0.24269687]] [[ 0. 0. 0. ... 0.41111866 -0.129836 -0.54189116] [ 0. 0. 0. ... -0.129836 -0.54189116 -0.44013643] [ 0. 0. 0. ... -0.54189116 -0.44013643 -1.1856866 ] ... [ 0. 0.6060115 -0.37436303 ... 0.55196744 1.0492378 -0.01494335] [ 0.6060115 -0.37436303 -2.175773 ... 1.0492378 -0.01494335 -0.26777765] [-0.37436303 -2.175773 -0.09372329 ... -0.01494335 -0.26777765 -0.7812111 ]] [[ 0. 0. 0. ... -0.96015733 -0.8578559 -1.2728661 ] [ 0. 0. 0. ... -0.8578559 -1.2728661 2.3167238 ] [ 0. 0. 0. ... -1.2728661 2.3167238 -1.0431563 ] ... [ 0. 0.11357296 -0.25483042 ... -1.1259638 1.2626936 -0.8994387 ] [ 0.11357296 -0.25483042 -0.26473874 ... 1.2626936 -0.8994387 -1.804108 ] [-0.25483042 -0.26473874 -0.06003528 ... -0.8994387 -1.804108 -1.1206363 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4737.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.02194949 -0.7901948 -1.1810012 ] [ 0. 0. 0. ... -0.7901948 -1.1810012 -1.1522492 ] [ 0. -0.95905685 1.183797 ... -0.7244202 -1.8403139 0.66373146] ... [ 0. 0. 0. ... -0.2574196 -0.41893625 0.94399905] [ 0. -0.03762326 0.03261 ... 1.1793594 -1.3846875 0.26229447] [-0.03762326 0.03261 1.3715295 ... -1.3846875 0.26229447 0.53007275]] [[ 0. 0. 0. ... 0.15642412 0.2520632 0.01829854] [ 0. 0. 0. ... 0.2520632 0.01829854 1.2275976 ] [ 0. -0.33634678 1.5562761 ... 2.019479 -0.641197 -0.81266725] ... [ 0. 0. 0. ... -1.155232 -1.572276 -2.8793917 ] [ 0. 0.65790606 -0.5242015 ... 0.77824366 0.6018166 -0.27505445] [ 0.65790606 -0.5242015 0.57951933 ... 0.6018166 -0.27505445 -2.0493727 ]] [[ 0. 0. 0. ... -2.457336 1.2021053 -1.0539575 ] [ 0. 0. 0. ... 1.2021053 -1.0539575 1.0329996 ] [ 0. -1.2777473 1.3527712 ... 0.8915937 1.039344 0.12878844] ... [ 0. 0. 0. ... -0.45416743 1.0640793 0.9204046 ] [ 0. -0.15896505 -0.93174094 ... -0.6742065 1.283532 0.5230674 ] [-0.15896505 -0.93174094 -3.4594014 ... 1.283532 0.5230674 -0.16887352]] ... [[ 0. 0. 0. ... -0.70165384 0.75849265 -0.49502447] [ 0. 0. 0. ... 0.75849265 -0.49502447 0.75118494] [ 0. -1.5302881 0.94509023 ... 0.23509967 -0.6288275 -1.2151288 ] ... [ 0. 0. 0. ... 0.77416754 -0.53999454 -0.18538678] [ 0. -0.02328355 -1.6295538 ... -1.6359215 -1.877012 -0.60510814] [-0.02328355 -1.6295538 -0.30181655 ... -1.877012 -0.60510814 -1.0785506 ]] [[ 0. 0. 0. ... -0.05195195 2.0953805 0.5573516 ] [ 0. 0. 0. ... 2.0953805 0.5573516 0.07345936] [ 0. 0.58929044 0.35502142 ... -0.35650745 0.9141548 0.7644477 ] ... [ 0. 0. 0. ... -0.8362089 0.49561536 -0.51528955] [ 0. 0.70332164 -1.1108791 ... 2.496191 -0.17650998 0.8990062 ] [ 0.70332164 -1.1108791 -1.1894704 ... -0.17650998 0.8990062 0.83991396]] [[ 0. 0. 0. ... 0.06405454 0.70448625 0.45588326] [ 0. 0. 0. ... 0.70448625 0.45588326 0.63691396] [ 0. 1.223345 -0.4747743 ... 0.07434312 0.47155476 0.11206497] ... [ 0. 0. 0. ... -1.290897 0.7885285 -0.20297217] [ 0. -0.29407918 -0.9200362 ... 0.1398553 -0.20281379 -0.3664193 ] [-0.29407918 -0.9200362 -1.7896839 ... -0.20281379 -0.3664193 0.38505018]]]; ov_res: [[[ 0. 0. 0. ... 0.02194949 -0.7901948 -1.1810012 ] [ 0. 0. 0. ... -0.7901948 -1.1810012 -1.1522492 ] [ 0. -0.95905685 1.183797 ... -0.7244202 -1.8403139 0.66373146] ... [ 0. 0. 0. ... -0.2574196 -0.41893625 0.94399905] [ 0. -0.03762326 0.03261 ... 1.1793594 -1.3846875 0.26229447] [-0.03762326 0.03261 1.3715295 ... -1.3846875 0.26229447 0.53007275]] [[ 0. 0. 0. ... 0.15642412 0.2520632 0.01829854] [ 0. 0. 0. ... 0.2520632 0.01829854 1.2275976 ] [ 0. -0.33634678 1.5562761 ... 2.019479 -0.641197 -0.81266725] ... [ 0. 0. 0. ... -1.155232 -1.572276 -2.8793917 ] [ 0. 0.65790606 -0.5242015 ... 0.77824366 0.6018166 -0.27505445] [ 0.65790606 -0.5242015 0.57951933 ... 0.6018166 -0.27505445 -2.0493727 ]] [[ 0. 0. 0. ... -2.457336 1.2021053 -1.0539575 ] [ 0. 0. 0. ... 1.2021053 -1.0539575 1.0329996 ] [ 0. -1.2777473 1.3527712 ... 0.8915937 1.039344 0.12878844] ... [ 0. 0. 0. ... -0.45416743 1.0640793 0.9204046 ] [ 0. -0.15896505 -0.93174094 ... -0.6742065 1.283532 0.5230674 ] [-0.15896505 -0.93174094 -3.4594014 ... 1.283532 0.5230674 -0.16887352]] ... [[ 0. 0. 0. ... -0.70165384 0.75849265 -0.49502447] [ 0. 0. 0. ... 0.75849265 -0.49502447 0.75118494] [ 0. -1.5302881 0.94509023 ... 0.23509967 -0.6288275 -1.2151288 ] ... [ 0. 0. 0. ... 0.77416754 -0.53999454 -0.18538678] [ 0. -0.02328355 -1.6295538 ... -1.6359215 -1.877012 -0.60510814] [-0.02328355 -1.6295538 -0.30181655 ... -1.877012 -0.60510814 -1.0785506 ]] [[ 0. 0. 0. ... -0.05195195 2.0953805 0.5573516 ] [ 0. 0. 0. ... 2.0953805 0.5573516 0.07345936] [ 0. 0.58929044 0.35502142 ... -0.35650745 0.9141548 0.7644477 ] ... [ 0. 0. 0. ... -0.8362089 0.49561536 -0.51528955] [ 0. 0.70332164 -1.1108791 ... 2.496191 -0.17650998 0.8990062 ] [ 0.70332164 -1.1108791 -1.1894704 ... -0.17650998 0.8990062 0.83991396]] [[ 0. 0. 0. ... 0.06405454 0.70448625 0.45588326] [ 0. 0. 0. ... 0.70448625 0.45588326 0.63691396] [ 0. 1.223345 -0.4747743 ... 0.07434312 0.47155476 0.11206497] ... [ 0. 0. 0. ... -1.290897 0.7885285 -0.20297217] [ 0. -0.29407918 -0.9200362 ... 0.1398553 -0.20281379 -0.3664193 ] [-0.29407918 -0.9200362 -1.7896839 ... -0.20281379 -0.3664193 0.38505018]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4739.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.74214494 1.7251314 1.3556261 ] [ 0. 0. 0. ... -1.153727 -1.9859636 -1.3687736 ] [ 0. 0. 0. ... -0.4007667 0.49789232 -0.20006572]] [[ 0. 0. 0. ... -0.28384292 0.7780463 -0.5369287 ] [ 0. 0. 0. ... 0.31628016 -0.77776015 -1.1615247 ] [ 0. 0. 0. ... 0.2520586 -0.724727 0.03470474]] [[ 0. 0. 0. ... -1.0650696 0.8243513 -1.0937929 ] [ 0. 0. 0. ... 1.4678601 -0.20762968 -0.8652504 ] [ 0. 0. 0. ... 0.22891073 -0.67577255 -0.20531015]] ... [[ 0. 0. 0. ... -1.0350285 0.11845967 -3.0420356 ] [ 0. 0. 0. ... -0.5720607 -0.9120241 -0.03461317] [ 0. 0. 0. ... -1.3039097 1.4842494 -0.8651465 ]] [[ 0. 0. 0. ... -1.9781713 -1.0480728 0.2101439 ] [ 0. 0. 0. ... 1.8066609 -1.0300034 -1.2500966 ] [ 0. 0. 0. ... -1.1754038 0.86081743 0.48577198]] [[ 0. 0. 0. ... 0.23004204 -0.1288946 1.12431 ] [ 0. 0. 0. ... -1.0812225 1.2319773 -1.9428302 ] [ 0. 0. 0. ... -0.26294345 -1.9892634 0.02051217]]]; ov_res: [[[ 0. 0. 0. ... 0.74214494 1.7251314 1.3556261 ] [ 0. 0. 0. ... -1.153727 -1.9859636 -1.3687736 ] [ 0. 0. 0. ... -0.4007667 0.49789232 -0.20006572]] [[ 0. 0. 0. ... -0.28384292 0.7780463 -0.5369287 ] [ 0. 0. 0. ... 0.31628016 -0.77776015 -1.1615247 ] [ 0. 0. 0. ... 0.2520586 -0.724727 0.03470474]] [[ 0. 0. 0. ... -1.0650696 0.8243513 -1.0937929 ] [ 0. 0. 0. ... 1.4678601 -0.20762968 -0.8652504 ] [ 0. 0. 0. ... 0.22891073 -0.67577255 -0.20531015]] ... [[ 0. 0. 0. ... -1.0350285 0.11845967 -3.0420356 ] [ 0. 0. 0. ... -0.5720607 -0.9120241 -0.03461317] [ 0. 0. 0. ... -1.3039097 1.4842494 -0.8651465 ]] [[ 0. 0. 0. ... -1.9781713 -1.0480728 0.2101439 ] [ 0. 0. 0. ... 1.8066609 -1.0300034 -1.2500966 ] [ 0. 0. 0. ... -1.1754038 0.86081743 0.48577198]] [[ 0. 0. 0. ... 0.23004204 -0.1288946 1.12431 ] [ 0. 0. 0. ... -1.0812225 1.2319773 -1.9428302 ] [ 0. 0. 0. ... -0.26294345 -1.9892634 0.02051217]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4741.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.13118039 1.2491803 1.6378891 ] [ 0. 0. 0. ... -0.7346872 0.0054525 -0.4797964 ] [ 0. 0. 0. ... -0.02133026 -0.2824189 0. ] ... [ 0. 0.3628942 0.10380774 ... 0. 0. 0. ] [ 0.13477072 1.0240953 0.45093125 ... 0. 0. 0. ] [ 0.43461007 0.9462278 -1.0113772 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.21335645 -0.46008015 0.9291294 ] [ 0. 0. 0. ... 0.15621161 -0.06670928 -1.1965097 ] [ 0. 0. 0. ... -0.07448895 0.39010578 0. ] ... [ 0. -0.31954017 -1.9520708 ... 0. 0. 0. ] [-0.01291131 0.21110402 -0.21396564 ... 0. 0. 0. ] [-1.2786403 0.46985635 1.592865 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1412438 -0.34835494 0.65187037] [ 0. 0. 0. ... -1.4311298 1.0888178 -0.19288737] [ 0. 0. 0. ... -0.57307637 -1.1082925 0. ] ... [ 0. 1.8416786 -0.56843334 ... 0. 0. 0. ] [-1.3552717 -1.663582 -0.01357241 ... 0. 0. 0. ] [-0.42567396 -0.20043224 1.4262414 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.5599399 -0.34069732 -0.0065207 ] [ 0. 0. 0. ... 0.10574774 -0.2714696 -0.47059 ] [ 0. 0. 0. ... -0.01517463 0.12055198 0. ] ... [ 0. 0.8930158 0.6985976 ... 0. 0. 0. ] [-2.9061406 0.07744466 0.40736595 ... 0. 0. 0. ] [-0.6669996 1.8139917 1.5812236 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.54850656 0.79540354 -0.2270209 ] [ 0. 0. 0. ... 1.3173522 1.2472625 -0.27491453] [ 0. 0. 0. ... -0.49094874 -0.4784003 0. ] ... [ 0. -0.48756948 -0.1859578 ... 0. 0. 0. ] [-0.7398897 0.3541664 -0.84160227 ... 0. 0. 0. ] [ 0.35555518 -0.63817775 1.7271312 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35659072 -0.80616105 0.2670722 ] [ 0. 0. 0. ... 0.6233795 0.3442944 -0.9108857 ] [ 0. 0. 0. ... 1.464006 -0.8568692 0. ] ... [ 0. -0.4842687 1.0288808 ... 0. 0. 0. ] [ 0.13539399 -1.3763264 0.27306694 ... 0. 0. 0. ] [ 0.61610156 1.0650533 0.35224876 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.13118039 1.2491803 1.6378891 ] [ 0. 0. 0. ... -0.7346872 0.0054525 -0.4797964 ] [ 0. 0. 0. ... -0.02133026 -0.2824189 0. ] ... [ 0. 0.3628942 0.10380774 ... 0. 0. 0. ] [ 0.13477072 1.0240953 0.45093125 ... 0. 0. 0. ] [ 0.43461007 0.9462278 -1.0113772 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.21335645 -0.46008015 0.9291294 ] [ 0. 0. 0. ... 0.15621161 -0.06670928 -1.1965097 ] [ 0. 0. 0. ... -0.07448895 0.39010578 0. ] ... [ 0. -0.31954017 -1.9520708 ... 0. 0. 0. ] [-0.01291131 0.21110402 -0.21396564 ... 0. 0. 0. ] [-1.2786403 0.46985635 1.592865 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1412438 -0.34835494 0.65187037] [ 0. 0. 0. ... -1.4311298 1.0888178 -0.19288737] [ 0. 0. 0. ... -0.57307637 -1.1082925 0. ] ... [ 0. 1.8416786 -0.56843334 ... 0. 0. 0. ] [-1.3552717 -1.663582 -0.01357241 ... 0. 0. 0. ] [-0.42567396 -0.20043224 1.4262414 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.5599399 -0.34069732 -0.0065207 ] [ 0. 0. 0. ... 0.10574774 -0.2714696 -0.47059 ] [ 0. 0. 0. ... -0.01517463 0.12055198 0. ] ... [ 0. 0.8930158 0.6985976 ... 0. 0. 0. ] [-2.9061406 0.07744466 0.40736595 ... 0. 0. 0. ] [-0.6669996 1.8139917 1.5812236 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.54850656 0.79540354 -0.2270209 ] [ 0. 0. 0. ... 1.3173522 1.2472625 -0.27491453] [ 0. 0. 0. ... -0.49094874 -0.4784003 0. ] ... [ 0. -0.48756948 -0.1859578 ... 0. 0. 0. ] [-0.7398897 0.3541664 -0.84160227 ... 0. 0. 0. ] [ 0.35555518 -0.63817775 1.7271312 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35659072 -0.80616105 0.2670722 ] [ 0. 0. 0. ... 0.6233795 0.3442944 -0.9108857 ] [ 0. 0. 0. ... 1.464006 -0.8568692 0. ] ... [ 0. -0.4842687 1.0288808 ... 0. 0. 0. ] [ 0.13539399 -1.3763264 0.27306694 ... 0. 0. 0. ] [ 0.61610156 1.0650533 0.35224876 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4743.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.5201511 -1.7608727 -2.041395 ] [ 0. 0. 0. ... -1.0133898 0.48360234 -0.05323817] [ 0. -0.7271488 -1.6924635 ... -1.3113556 -0.6273713 -1.6616843 ] ... [ 0.7999219 0.870793 -1.7351948 ... -0.47167313 -0.78603625 0.15075445] [ 0. 0.6376022 -0.18878262 ... -1.2183652 0.5357304 1.674911 ] [ 0.43563733 0.2780519 -1.0979415 ... 1.8509741 2.3453174 0.21947534]] [[ 0. 0. 0. ... -1.1564492 0.39863563 0.00738124] [ 0. 0. 0. ... -1.1920094 0.32928282 -0.9265438 ] [ 0. -0.2984721 -0.12342356 ... -1.2864746 1.702199 0.11971793] ... [ 1.6631948 -0.58831257 0.9472117 ... -0.85147023 -1.4088452 0.33882356] [ 0. 0.30737308 1.7126219 ... -0.06600445 0.745206 -0.00846566] [ 0.5554006 -2.1206872 0.2392445 ... -0.15197788 0.60978216 0.33992118]] [[ 0. 0. 0. ... -0.7101473 0.20709425 1.4809451 ] [ 0. 0. 0. ... -0.28753248 -0.42209542 -0.7106084 ] [ 0. 0.4482784 -1.119959 ... -0.55753803 0.47723827 0.29082564] ... [ 0.63610774 0.46050736 0.40066406 ... 2.754055 -0.07164137 0.37869138] [ 0. 1.2115861 -0.3952173 ... -1.0829406 -0.12204187 0.4942357 ] [-0.8347312 0.82972485 -0.2759575 ... 0.8595563 0.47992843 1.8089398 ]] ... [[ 0. 0. 0. ... -0.22034238 -0.7066462 -1.4803432 ] [ 0. 0. 0. ... 0.47382343 -0.14326309 0.33005553] [ 0. 0.6466479 -0.968839 ... -0.35348785 -0.25031078 -0.05564766] ... [ 0.20408772 1.3656203 0.87207067 ... -0.4687098 1.2810091 0.03594057] [ 0. -0.78138053 0.27445516 ... -1.1304617 1.3940637 0.8531183 ] [ 0.11316577 1.1841347 -0.1646398 ... 1.1880836 -1.3691554 0.51665026]] [[ 0. 0. 0. ... 0.43059635 -0.0881109 -1.1933656 ] [ 0. 0. 0. ... 0.52063787 0.48390788 0.20271634] [ 0. -1.1195097 -0.2958542 ... -2.1812706 1.0285943 0.45285416] ... [-0.8864714 0.91974723 -0.13295157 ... 1.4220438 0.9668521 -1.0256329 ] [ 0. 1.3174723 0.38121203 ... -0.03943636 -0.08590813 -1.0265262 ] [ 0.4620043 1.3705624 1.0996459 ... -0.06605121 -1.1740966 -0.7371454 ]] [[ 0. 0. 0. ... -1.7545991 1.0065426 0.35432282] [ 0. 0. 0. ... 0.52278495 -0.3916435 -0.11943218] [ 0. 2.4483473 1.3814942 ... 1.5185355 1.1160271 -1.0154123 ] ... [-2.4152117 0.6389659 -1.4645975 ... -0.19351937 0.29091534 0.45189747] [ 0. -0.87282723 1.444816 ... 0.76976174 0.3409517 1.5075433 ] [-0.04565761 -0.18819848 -0.2931799 ... -0.14599615 -1.2115263 0.0983699 ]]]; ov_res: [[[ 0. 0. 0. ... -0.5201511 -1.7608727 -2.041395 ] [ 0. 0. 0. ... -1.0133898 0.48360234 -0.05323817] [ 0. -0.7271488 -1.6924635 ... -1.3113556 -0.6273713 -1.6616843 ] ... [ 0.7999219 0.870793 -1.7351948 ... -0.47167313 -0.78603625 0.15075445] [ 0. 0.6376022 -0.18878262 ... -1.2183652 0.5357304 1.674911 ] [ 0.43563733 0.2780519 -1.0979415 ... 1.8509741 2.3453174 0.21947534]] [[ 0. 0. 0. ... -1.1564492 0.39863563 0.00738124] [ 0. 0. 0. ... -1.1920094 0.32928282 -0.9265438 ] [ 0. -0.2984721 -0.12342356 ... -1.2864746 1.702199 0.11971793] ... [ 1.6631948 -0.58831257 0.9472117 ... -0.85147023 -1.4088452 0.33882356] [ 0. 0.30737308 1.7126219 ... -0.06600445 0.745206 -0.00846566] [ 0.5554006 -2.1206872 0.2392445 ... -0.15197788 0.60978216 0.33992118]] [[ 0. 0. 0. ... -0.7101473 0.20709425 1.4809451 ] [ 0. 0. 0. ... -0.28753248 -0.42209542 -0.7106084 ] [ 0. 0.4482784 -1.119959 ... -0.55753803 0.47723827 0.29082564] ... [ 0.63610774 0.46050736 0.40066406 ... 2.754055 -0.07164137 0.37869138] [ 0. 1.2115861 -0.3952173 ... -1.0829406 -0.12204187 0.4942357 ] [-0.8347312 0.82972485 -0.2759575 ... 0.8595563 0.47992843 1.8089398 ]] ... [[ 0. 0. 0. ... -0.22034238 -0.7066462 -1.4803432 ] [ 0. 0. 0. ... 0.47382343 -0.14326309 0.33005553] [ 0. 0.6466479 -0.968839 ... -0.35348785 -0.25031078 -0.05564766] ... [ 0.20408772 1.3656203 0.87207067 ... -0.4687098 1.2810091 0.03594057] [ 0. -0.78138053 0.27445516 ... -1.1304617 1.3940637 0.8531183 ] [ 0.11316577 1.1841347 -0.1646398 ... 1.1880836 -1.3691554 0.51665026]] [[ 0. 0. 0. ... 0.43059635 -0.0881109 -1.1933656 ] [ 0. 0. 0. ... 0.52063787 0.48390788 0.20271634] [ 0. -1.1195097 -0.2958542 ... -2.1812706 1.0285943 0.45285416] ... [-0.8864714 0.91974723 -0.13295157 ... 1.4220438 0.9668521 -1.0256329 ] [ 0. 1.3174723 0.38121203 ... -0.03943636 -0.08590813 -1.0265262 ] [ 0.4620043 1.3705624 1.0996459 ... -0.06605121 -1.1740966 -0.7371454 ]] [[ 0. 0. 0. ... -1.7545991 1.0065426 0.35432282] [ 0. 0. 0. ... 0.52278495 -0.3916435 -0.11943218] [ 0. 2.4483473 1.3814942 ... 1.5185355 1.1160271 -1.0154123 ] ... [-2.4152117 0.6389659 -1.4645975 ... -0.19351937 0.29091534 0.45189747] [ 0. -0.87282723 1.444816 ... 0.76976174 0.3409517 1.5075433 ] [-0.04565761 -0.18819848 -0.2931799 ... -0.14599615 -1.2115263 0.0983699 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4745.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.03295407 -1.1425236 0.42604083] [ 0. 0. 0. ... -1.6007018 2.1375597 0.17573877] [ 0. 0. 0. ... 0.41090426 1.5108973 0. ] ... [ 0. -0.56723636 0.9598703 ... -0.25937587 -2.3657355 -0.3717012 ] [-1.4899489 0.09873961 -0.8091282 ... -0.5487101 -0.17146912 1.2052556 ] [-1.0808998 0.70261854 -1.0227779 ... -2.1570716 0.8937106 0. ]] [[ 0. 0. 0. ... 0.6903683 -0.9676918 -0.41217738] [ 0. 0. 0. ... 0.73586816 1.062783 -0.731907 ] [ 0. 0. 0. ... -0.40528226 1.4643289 0. ] ... [ 0. 0.16828324 -0.85393846 ... -0.25430596 -0.3425795 0.06111652] [-0.50504035 0.45560738 -1.3837456 ... 1.216856 0.70452124 0.1261981 ] [ 0.57214403 0.62055 -0.95195466 ... 0.34256226 0.8418892 0. ]] [[ 0. 0. 0. ... 0.01768211 -0.10442887 1.9809033 ] [ 0. 0. 0. ... -0.08485464 -1.195723 -1.304884 ] [ 0. 0. 0. ... -0.4680152 0.54395586 0. ] ... [ 0. -0.18316524 2.0210605 ... 0.35401645 -0.02438085 -3.0869644 ] [ 0.3844647 -1.0868374 -0.46833047 ... 0.7557157 0.281091 0.1992191 ] [ 1.3134674 -0.00984686 -0.8965998 ... 0.4420108 0.721892 0. ]] ... [[ 0. 0. 0. ... -0.01075497 0.46861592 1.0976218 ] [ 0. 0. 0. ... 0.5770641 -0.36148408 1.3235844 ] [ 0. 0. 0. ... -1.3169785 -0.94484043 0. ] ... [ 0. 1.3154296 -1.0356654 ... -0.34059843 1.5829016 0.4941199 ] [-1.8401012 -0.7371215 0.2033294 ... -0.06908569 -1.0850059 -0.36226973] [ 0.3394092 0.31265998 0.6358673 ... 0.33055487 1.8772331 0. ]] [[ 0. 0. 0. ... -1.0006467 0.8918707 -1.230398 ] [ 0. 0. 0. ... 0.24906906 0.1857981 -1.4998794 ] [ 0. 0. 0. ... -0.53937495 -0.39900917 0. ] ... [ 0. -0.3367086 0.9569023 ... 0.15899621 0.56399196 -1.1003399 ] [-1.1742804 0.6914465 0.61917025 ... 1.3498815 0.7825775 -0.01903726] [-1.0287011 -0.8123034 -0.518022 ... 0.02339972 0.6284265 0. ]] [[ 0. 0. 0. ... 0.93563795 0.9682434 -0.8725443 ] [ 0. 0. 0. ... 1.0124986 0.78769046 0.36154783] [ 0. 0. 0. ... 0.833484 0.73279554 0. ] ... [ 0. 0.87859684 -0.3111266 ... -1.1311723 0.7991688 0.00448052] [-0.27024755 -1.3864158 0.42643085 ... 0.38178575 -0.9001625 -0.17112963] [ 0.8340837 -0.67452466 0.6013959 ... 0.6808658 -1.7411832 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.03295407 -1.1425236 0.42604083] [ 0. 0. 0. ... -1.6007018 2.1375597 0.17573877] [ 0. 0. 0. ... 0.41090426 1.5108973 0. ] ... [ 0. -0.56723636 0.9598703 ... -0.25937587 -2.3657355 -0.3717012 ] [-1.4899489 0.09873961 -0.8091282 ... -0.5487101 -0.17146912 1.2052556 ] [-1.0808998 0.70261854 -1.0227779 ... -2.1570716 0.8937106 0. ]] [[ 0. 0. 0. ... 0.6903683 -0.9676918 -0.41217738] [ 0. 0. 0. ... 0.73586816 1.062783 -0.731907 ] [ 0. 0. 0. ... -0.40528226 1.4643289 0. ] ... [ 0. 0.16828324 -0.85393846 ... -0.25430596 -0.3425795 0.06111652] [-0.50504035 0.45560738 -1.3837456 ... 1.216856 0.70452124 0.1261981 ] [ 0.57214403 0.62055 -0.95195466 ... 0.34256226 0.8418892 0. ]] [[ 0. 0. 0. ... 0.01768211 -0.10442887 1.9809033 ] [ 0. 0. 0. ... -0.08485464 -1.195723 -1.304884 ] [ 0. 0. 0. ... -0.4680152 0.54395586 0. ] ... [ 0. -0.18316524 2.0210605 ... 0.35401645 -0.02438085 -3.0869644 ] [ 0.3844647 -1.0868374 -0.46833047 ... 0.7557157 0.281091 0.1992191 ] [ 1.3134674 -0.00984686 -0.8965998 ... 0.4420108 0.721892 0. ]] ... [[ 0. 0. 0. ... -0.01075497 0.46861592 1.0976218 ] [ 0. 0. 0. ... 0.5770641 -0.36148408 1.3235844 ] [ 0. 0. 0. ... -1.3169785 -0.94484043 0. ] ... [ 0. 1.3154296 -1.0356654 ... -0.34059843 1.5829016 0.4941199 ] [-1.8401012 -0.7371215 0.2033294 ... -0.06908569 -1.0850059 -0.36226973] [ 0.3394092 0.31265998 0.6358673 ... 0.33055487 1.8772331 0. ]] [[ 0. 0. 0. ... -1.0006467 0.8918707 -1.230398 ] [ 0. 0. 0. ... 0.24906906 0.1857981 -1.4998794 ] [ 0. 0. 0. ... -0.53937495 -0.39900917 0. ] ... [ 0. -0.3367086 0.9569023 ... 0.15899621 0.56399196 -1.1003399 ] [-1.1742804 0.6914465 0.61917025 ... 1.3498815 0.7825775 -0.01903726] [-1.0287011 -0.8123034 -0.518022 ... 0.02339972 0.6284265 0. ]] [[ 0. 0. 0. ... 0.93563795 0.9682434 -0.8725443 ] [ 0. 0. 0. ... 1.0124986 0.78769046 0.36154783] [ 0. 0. 0. ... 0.833484 0.73279554 0. ] ... [ 0. 0.87859684 -0.3111266 ... -1.1311723 0.7991688 0.00448052] [-0.27024755 -1.3864158 0.42643085 ... 0.38178575 -0.9001625 -0.17112963] [ 0.8340837 -0.67452466 0.6013959 ... 0.6808658 -1.7411832 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4747.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 1.6168575 0.8129787 0.65067524] [ 0. 0. 0. ... -0.8624563 0.9099055 1.9473166 ] [ 0. -0.21173775 -0.4288586 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.5141839 -0.72227055 -2.0633912 ] [ 0. -0.05474585 0.24844682 ... 0. 0. 0. ] [-0.68759334 0.3711849 -1.0743207 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0677756 1.0329096 1.8127565 ] [ 0. 0. 0. ... -0.65925133 -1.3846318 -2.3299732 ] [ 0. -1.1502414 -0.6301641 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.2687943 1.2449551 -0.44433078] [ 0. -1.2292496 0.4566399 ... 0. 0. 0. ] [ 0.38422063 1.7082424 -0.85380363 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.00240627 -0.3698386 -0.5123294 ] [ 0. 0. 0. ... -0.5515043 -1.4877338 1.6517535 ] [ 0. 0.9593551 1.2367884 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.01074258 -0.9629435 -0.26385415] [ 0. -0.27494287 -0.6207717 ... 0. 0. 0. ] [-0.879072 -1.5368675 0.14827207 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4790176 1.3662485 1.2777885 ] [ 0. 0. 0. ... -1.0851876 -1.0346389 -0.29397616] [ 0. -0.4271456 -0.9631925 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3316969 0.07549184 0.7266008 ] [ 0. -0.14721867 -0.2893679 ... 0. 0. 0. ] [ 0.525093 -0.232048 -0.28507328 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5060421 -1.7707902 -1.2795401 ] [ 0. 0. 0. ... 0.5167426 -1.491032 -0.18046868] [ 0. -0.7931983 -0.74527526 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.27732632 -0.36894193 -0.4662288 ] [ 0. 0.02018519 -0.9847286 ... 0. 0. 0. ] [ 1.8245708 0.13318826 -0.06073254 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.0751125 0.9128535 0.15698649] [ 0. 0. 0. ... -0.69896877 1.3334051 0.6426204 ] [ 0. 0.06910146 0.07258557 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.263737 0.3462475 0.6423248 ] [ 0. 0.7190529 -0.587689 ... 0. 0. 0. ] [-0.92219746 1.3777972 -0.47602507 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.6168575 0.8129787 0.65067524] [ 0. 0. 0. ... -0.8624563 0.9099055 1.9473166 ] [ 0. -0.21173775 -0.4288586 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.5141839 -0.72227055 -2.0633912 ] [ 0. -0.05474585 0.24844682 ... 0. 0. 0. ] [-0.68759334 0.3711849 -1.0743207 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0677756 1.0329096 1.8127565 ] [ 0. 0. 0. ... -0.65925133 -1.3846318 -2.3299732 ] [ 0. -1.1502414 -0.6301641 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.2687943 1.2449551 -0.44433078] [ 0. -1.2292496 0.4566399 ... 0. 0. 0. ] [ 0.38422063 1.7082424 -0.85380363 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.00240627 -0.3698386 -0.5123294 ] [ 0. 0. 0. ... -0.5515043 -1.4877338 1.6517535 ] [ 0. 0.9593551 1.2367884 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.01074258 -0.9629435 -0.26385415] [ 0. -0.27494287 -0.6207717 ... 0. 0. 0. ] [-0.879072 -1.5368675 0.14827207 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4790176 1.3662485 1.2777885 ] [ 0. 0. 0. ... -1.0851876 -1.0346389 -0.29397616] [ 0. -0.4271456 -0.9631925 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3316969 0.07549184 0.7266008 ] [ 0. -0.14721867 -0.2893679 ... 0. 0. 0. ] [ 0.525093 -0.232048 -0.28507328 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5060421 -1.7707902 -1.2795401 ] [ 0. 0. 0. ... 0.5167426 -1.491032 -0.18046868] [ 0. -0.7931983 -0.74527526 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.27732632 -0.36894193 -0.4662288 ] [ 0. 0.02018519 -0.9847286 ... 0. 0. 0. ] [ 1.8245708 0.13318826 -0.06073254 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.0751125 0.9128535 0.15698649] [ 0. 0. 0. ... -0.69896877 1.3334051 0.6426204 ] [ 0. 0.06910146 0.07258557 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.263737 0.3462475 0.6423248 ] [ 0. 0.7190529 -0.587689 ... 0. 0. 0. ] [-0.92219746 1.3777972 -0.47602507 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:1 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4749.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.2988307 -0.39787695 -0.5563723 ] [ 0. 0. 0. ... -0.29456067 -0.16103363 0.51572496] [ 0. 0. 0. ... 0.3757921 -0.4838735 0.9407753 ]] [[ 0. 0. 0. ... 0.67559314 0.82434225 -1.688225 ] [ 0. 0. 0. ... -1.0776697 -1.5531749 2.5754352 ] [ 0. 0. 0. ... 0.09747072 -2.0403652 -1.1314924 ]] [[ 0. 0. 0. ... -1.2456237 -2.5046947 0.26444048] [ 0. 0. 0. ... -2.0209875 -0.37321383 1.433535 ] [ 0. 0. 0. ... 0.62345934 -1.2190919 0.80218047]] ... [[ 0. 0. 0. ... 0.17228548 -1.9141635 0.39553058] [ 0. 0. 0. ... 1.0705557 0.49090442 2.3246634 ] [ 0. 0. 0. ... -0.15651125 0.8720121 -0.5995807 ]] [[ 0. 0. 0. ... 0.4575872 0.11160472 -0.06847325] [ 0. 0. 0. ... 0.05457044 -0.95496583 0.8600704 ] [ 0. 0. 0. ... 0.5171045 0.2620677 0.6797981 ]] [[ 0. 0. 0. ... -0.04960166 1.8962994 0.6535604 ] [ 0. 0. 0. ... -1.2009395 -1.9446994 -1.8055234 ] [ 0. 0. 0. ... 0.2722017 0.37453926 1.1356777 ]]]; ov_res: [[[ 0. 0. 0. ... -1.2988307 -0.39787695 -0.5563723 ] [ 0. 0. 0. ... -0.29456067 -0.16103363 0.51572496] [ 0. 0. 0. ... 0.3757921 -0.4838735 0.9407753 ]] [[ 0. 0. 0. ... 0.67559314 0.82434225 -1.688225 ] [ 0. 0. 0. ... -1.0776697 -1.5531749 2.5754352 ] [ 0. 0. 0. ... 0.09747072 -2.0403652 -1.1314924 ]] [[ 0. 0. 0. ... -1.2456237 -2.5046947 0.26444048] [ 0. 0. 0. ... -2.0209875 -0.37321383 1.433535 ] [ 0. 0. 0. ... 0.62345934 -1.2190919 0.80218047]] ... [[ 0. 0. 0. ... 0.17228548 -1.9141635 0.39553058] [ 0. 0. 0. ... 1.0705557 0.49090442 2.3246634 ] [ 0. 0. 0. ... -0.15651125 0.8720121 -0.5995807 ]] [[ 0. 0. 0. ... 0.4575872 0.11160472 -0.06847325] [ 0. 0. 0. ... 0.05457044 -0.95496583 0.8600704 ] [ 0. 0. 0. ... 0.5171045 0.2620677 0.6797981 ]] [[ 0. 0. 0. ... -0.04960166 1.8962994 0.6535604 ] [ 0. 0. 0. ... -1.2009395 -1.9446994 -1.8055234 ] [ 0. 0. 0. ... 0.2722017 0.37453926 1.1356777 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4751.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 1.4029452 0.54964876 0. ] [ 0. 0. 0. ... 0.08964542 0.02143544 0. ] [ 0. 0. 0. ... 1.4452918 2.0170932 0. ] ... [ 0. 0. 0. ... 1.8036255 -0.44887677 0. ] [ 0. 0. 0. ... -1.3495024 -0.57619566 0. ] [ 0. 0. 0. ... 0.03569556 -0.54156524 0. ]] [[ 0. 0. 0. ... -0.39720047 1.4536277 0. ] [ 0. 0. 0. ... -1.5009817 1.0931898 0. ] [ 0. 0. 0. ... -0.40803954 -0.6568582 0. ] ... [ 0. 0. 0. ... 0.22429337 -0.19533834 0. ] [ 0. 0. 0. ... -0.71361893 -0.9736538 0. ] [ 0. 0. 0. ... 0.43858036 0.74042827 0. ]] [[ 0. 0. 0. ... 0.4494799 0.9101338 0. ] [ 0. 0. 0. ... 1.2613562 -0.7109458 0. ] [ 0. 0. 0. ... -0.79089653 0.87531674 0. ] ... [ 0. 0. 0. ... -0.9412816 2.2009609 0. ] [ 0. 0. 0. ... -1.6317337 1.2818742 0. ] [ 0. 0. 0. ... -0.43149772 0.7290475 0. ]] ... [[ 0. 0. 0. ... 1.4198207 -0.90255827 0. ] [ 0. 0. 0. ... -0.64728636 -0.11345772 0. ] [ 0. 0. 0. ... 2.7226965 -0.3346059 0. ] ... [ 0. 0. 0. ... -0.815197 2.282549 0. ] [ 0. 0. 0. ... -0.7804175 -0.4557186 0. ] [ 0. 0. 0. ... 0.17443284 -0.89484596 0. ]] [[ 0. 0. 0. ... -0.2583503 1.5847149 0. ] [ 0. 0. 0. ... 0.69497156 0.10638407 0. ] [ 0. 0. 0. ... 1.6482532 0.420039 0. ] ... [ 0. 0. 0. ... 0.97322595 -1.3639927 0. ] [ 0. 0. 0. ... 1.0418646 0.2492107 0. ] [ 0. 0. 0. ... -1.2841076 -1.0150348 0. ]] [[ 0. 0. 0. ... -0.46984777 -0.19114298 0. ] [ 0. 0. 0. ... 0.60771084 -1.3902742 0. ] [ 0. 0. 0. ... 0.9400103 1.1492887 0. ] ... [ 0. 0. 0. ... -0.24987452 -0.13472858 0. ] [ 0. 0. 0. ... 1.4910278 -1.1872845 0. ] [ 0. 0. 0. ... 0.2169019 -0.5806239 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.4029452 0.54964876 0. ] [ 0. 0. 0. ... 0.08964542 0.02143544 0. ] [ 0. 0. 0. ... 1.4452918 2.0170932 0. ] ... [ 0. 0. 0. ... 1.8036255 -0.44887677 0. ] [ 0. 0. 0. ... -1.3495024 -0.57619566 0. ] [ 0. 0. 0. ... 0.03569556 -0.54156524 0. ]] [[ 0. 0. 0. ... -0.39720047 1.4536277 0. ] [ 0. 0. 0. ... -1.5009817 1.0931898 0. ] [ 0. 0. 0. ... -0.40803954 -0.6568582 0. ] ... [ 0. 0. 0. ... 0.22429337 -0.19533834 0. ] [ 0. 0. 0. ... -0.71361893 -0.9736538 0. ] [ 0. 0. 0. ... 0.43858036 0.74042827 0. ]] [[ 0. 0. 0. ... 0.4494799 0.9101338 0. ] [ 0. 0. 0. ... 1.2613562 -0.7109458 0. ] [ 0. 0. 0. ... -0.79089653 0.87531674 0. ] ... [ 0. 0. 0. ... -0.9412816 2.2009609 0. ] [ 0. 0. 0. ... -1.6317337 1.2818742 0. ] [ 0. 0. 0. ... -0.43149772 0.7290475 0. ]] ... [[ 0. 0. 0. ... 1.4198207 -0.90255827 0. ] [ 0. 0. 0. ... -0.64728636 -0.11345772 0. ] [ 0. 0. 0. ... 2.7226965 -0.3346059 0. ] ... [ 0. 0. 0. ... -0.815197 2.282549 0. ] [ 0. 0. 0. ... -0.7804175 -0.4557186 0. ] [ 0. 0. 0. ... 0.17443284 -0.89484596 0. ]] [[ 0. 0. 0. ... -0.2583503 1.5847149 0. ] [ 0. 0. 0. ... 0.69497156 0.10638407 0. ] [ 0. 0. 0. ... 1.6482532 0.420039 0. ] ... [ 0. 0. 0. ... 0.97322595 -1.3639927 0. ] [ 0. 0. 0. ... 1.0418646 0.2492107 0. ] [ 0. 0. 0. ... -1.2841076 -1.0150348 0. ]] [[ 0. 0. 0. ... -0.46984777 -0.19114298 0. ] [ 0. 0. 0. ... 0.60771084 -1.3902742 0. ] [ 0. 0. 0. ... 0.9400103 1.1492887 0. ] ... [ 0. 0. 0. ... -0.24987452 -0.13472858 0. ] [ 0. 0. 0. ... 1.4910278 -1.1872845 0. ] [ 0. 0. 0. ... 0.2169019 -0.5806239 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4753.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.08045691 0.06291948 0. ] [ 0. 0. 0. ... -0.5940516 -1.2631785 0. ] [ 0. 0. 0. ... -1.532133 -0.5720688 0. ] ... [ 0. 0. 0. ... 0.6789978 1.2077404 0. ] [ 0. 0.6770852 0.7388422 ... 0. 0. 0. ] [ 0. -1.5443239 0.17707337 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5354389 0.7251377 0. ] [ 0. 0. 0. ... 0.6951343 0.4896069 0. ] [ 0. 0. 0. ... -0.99114394 1.112464 0. ] ... [ 0. 0. 0. ... 0.95684254 -0.6724403 0. ] [ 0. 0.8948598 0.2595254 ... 0. 0. 0. ] [ 0. -1.77132 1.6098996 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.94155455 0.12094406 0. ] [ 0. 0. 0. ... 1.1389883 0.03095252 0. ] [ 0. 0. 0. ... 0.5506773 0.577049 0. ] ... [ 0. 0. 0. ... 1.8585594 0.20097825 0. ] [ 0. 0.00442478 0.8917209 ... 0. 0. 0. ] [ 0. -1.4964343 -0.351749 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.6341306 -0.5019666 0. ] [ 0. 0. 0. ... 2.188839 -0.6966912 0. ] [ 0. 0. 0. ... 0.36860138 -1.260428 0. ] ... [ 0. 0. 0. ... -1.69823 -0.707011 0. ] [ 0. 2.3705995 -0.7214126 ... 0. 0. 0. ] [ 0. -0.66090685 -2.6092224 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.894861 0.5170786 0. ] [ 0. 0. 0. ... -1.4243509 0.00988067 0. ] [ 0. 0. 0. ... -0.22746305 2.0791442 0. ] ... [ 0. 0. 0. ... 0.22994615 -1.5567117 0. ] [ 0. 0.45444095 0.38108227 ... 0. 0. 0. ] [ 0. 0.35999057 -0.53258663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.41898692 -0.68728846 0. ] [ 0. 0. 0. ... -0.24915156 -0.44265065 0. ] [ 0. 0. 0. ... 0.76591116 -0.24016418 0. ] ... [ 0. 0. 0. ... 0.2555867 0.57469046 0. ] [ 0. -1.3475047 -0.2912239 ... 0. 0. 0. ] [ 0. 0.5720565 1.3754889 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.08045691 0.06291948 0. ] [ 0. 0. 0. ... -0.5940516 -1.2631785 0. ] [ 0. 0. 0. ... -1.532133 -0.5720688 0. ] ... [ 0. 0. 0. ... 0.6789978 1.2077404 0. ] [ 0. 0.6770852 0.7388422 ... 0. 0. 0. ] [ 0. -1.5443239 0.17707337 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5354389 0.7251377 0. ] [ 0. 0. 0. ... 0.6951343 0.4896069 0. ] [ 0. 0. 0. ... -0.99114394 1.112464 0. ] ... [ 0. 0. 0. ... 0.95684254 -0.6724403 0. ] [ 0. 0.8948598 0.2595254 ... 0. 0. 0. ] [ 0. -1.77132 1.6098996 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.94155455 0.12094406 0. ] [ 0. 0. 0. ... 1.1389883 0.03095252 0. ] [ 0. 0. 0. ... 0.5506773 0.577049 0. ] ... [ 0. 0. 0. ... 1.8585594 0.20097825 0. ] [ 0. 0.00442478 0.8917209 ... 0. 0. 0. ] [ 0. -1.4964343 -0.351749 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.6341306 -0.5019666 0. ] [ 0. 0. 0. ... 2.188839 -0.6966912 0. ] [ 0. 0. 0. ... 0.36860138 -1.260428 0. ] ... [ 0. 0. 0. ... -1.69823 -0.707011 0. ] [ 0. 2.3705995 -0.7214126 ... 0. 0. 0. ] [ 0. -0.66090685 -2.6092224 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.894861 0.5170786 0. ] [ 0. 0. 0. ... -1.4243509 0.00988067 0. ] [ 0. 0. 0. ... -0.22746305 2.0791442 0. ] ... [ 0. 0. 0. ... 0.22994615 -1.5567117 0. ] [ 0. 0.45444095 0.38108227 ... 0. 0. 0. ] [ 0. 0.35999057 -0.53258663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.41898692 -0.68728846 0. ] [ 0. 0. 0. ... -0.24915156 -0.44265065 0. ] [ 0. 0. 0. ... 0.76591116 -0.24016418 0. ] ... [ 0. 0. 0. ... 0.2555867 0.57469046 0. ] [ 0. -1.3475047 -0.2912239 ... 0. 0. 0. ] [ 0. 0.5720565 1.3754889 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4755.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.83842194 -1.7482399 0. ] [ 0. 0. 0. ... 1.3494023 1.9280437 0. ] [ 0. 0. 0. ... -0.6548323 -0.21903731 0. ] ... [ 0. -0.00340255 1.7644551 ... 0. 0. 0. ] [ 0. -0.4646755 -0.00919905 ... 0. 0. 0. ] [ 0. 0.0261784 0.11707386 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8243959 -0.42325628 0. ] [ 0. 0. 0. ... 0.59805626 -0.6135498 0. ] [ 0. 0. 0. ... 0.3518804 -0.955491 0. ] ... [ 0. 0.1826514 0.6447632 ... 0. 0. 0. ] [ 0. -1.5204009 -0.78208303 ... 0. 0. 0. ] [ 0. 0.09276413 -0.00596103 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.625128 -0.5263756 0. ] [ 0. 0. 0. ... 0.83555496 -0.11223289 0. ] [ 0. 0. 0. ... 0.4881513 -0.20627823 0. ] ... [ 0. 0.26445904 1.0178559 ... 0. 0. 0. ] [ 0. 0.8421521 0.22259437 ... 0. 0. 0. ] [ 0. 0.5259829 -0.66407865 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.1221416 0.56511873 0. ] [ 0. 0. 0. ... -0.46386367 0.12480788 0. ] [ 0. 0. 0. ... 0.3665337 0.16956219 0. ] ... [ 0. -0.17964603 1.7124993 ... 0. 0. 0. ] [ 0. -0.7867711 0.61728746 ... 0. 0. 0. ] [ 0. 0.18611445 0.68851733 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.43765914 -0.01975859 0. ] [ 0. 0. 0. ... 0.0068549 1.8958293 0. ] [ 0. 0. 0. ... -0.7862031 0.27764475 0. ] ... [ 0. 0.03696924 -0.07877594 ... 0. 0. 0. ] [ 0. 2.3542309 -1.324322 ... 0. 0. 0. ] [ 0. 0.51067334 1.6109663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2315322 -0.28129485 0. ] [ 0. 0. 0. ... 0.70941955 0.54547286 0. ] [ 0. 0. 0. ... -1.4756496 -2.0250945 0. ] ... [ 0. 1.1099355 -0.41986942 ... 0. 0. 0. ] [ 0. 1.1434616 0.67159545 ... 0. 0. 0. ] [ 0. -2.0000582 -0.6767139 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.83842194 -1.7482399 0. ] [ 0. 0. 0. ... 1.3494023 1.9280437 0. ] [ 0. 0. 0. ... -0.6548323 -0.21903731 0. ] ... [ 0. -0.00340255 1.7644551 ... 0. 0. 0. ] [ 0. -0.4646755 -0.00919905 ... 0. 0. 0. ] [ 0. 0.0261784 0.11707386 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8243959 -0.42325628 0. ] [ 0. 0. 0. ... 0.59805626 -0.6135498 0. ] [ 0. 0. 0. ... 0.3518804 -0.955491 0. ] ... [ 0. 0.1826514 0.6447632 ... 0. 0. 0. ] [ 0. -1.5204009 -0.78208303 ... 0. 0. 0. ] [ 0. 0.09276413 -0.00596103 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.625128 -0.5263756 0. ] [ 0. 0. 0. ... 0.83555496 -0.11223289 0. ] [ 0. 0. 0. ... 0.4881513 -0.20627823 0. ] ... [ 0. 0.26445904 1.0178559 ... 0. 0. 0. ] [ 0. 0.8421521 0.22259437 ... 0. 0. 0. ] [ 0. 0.5259829 -0.66407865 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.1221416 0.56511873 0. ] [ 0. 0. 0. ... -0.46386367 0.12480788 0. ] [ 0. 0. 0. ... 0.3665337 0.16956219 0. ] ... [ 0. -0.17964603 1.7124993 ... 0. 0. 0. ] [ 0. -0.7867711 0.61728746 ... 0. 0. 0. ] [ 0. 0.18611445 0.68851733 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.43765914 -0.01975859 0. ] [ 0. 0. 0. ... 0.0068549 1.8958293 0. ] [ 0. 0. 0. ... -0.7862031 0.27764475 0. ] ... [ 0. 0.03696924 -0.07877594 ... 0. 0. 0. ] [ 0. 2.3542309 -1.324322 ... 0. 0. 0. ] [ 0. 0.51067334 1.6109663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2315322 -0.28129485 0. ] [ 0. 0. 0. ... 0.70941955 0.54547286 0. ] [ 0. 0. 0. ... -1.4756496 -2.0250945 0. ] ... [ 0. 1.1099355 -0.41986942 ... 0. 0. 0. ] [ 0. 1.1434616 0.67159545 ... 0. 0. 0. ] [ 0. -2.0000582 -0.6767139 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4757.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 1.9675106 -1.7309794 0. ] [ 0. 0. 0. ... -0.68851244 -1.1240364 0. ] [ 0. 0. 0. ... -1.285301 -1.3545275 0. ] ... [ 0. 0. 0. ... 1.8718271 -1.8335638 0. ] [ 0. 0. 0. ... 0.33194163 -1.3861486 0. ] [ 0. 0. 0. ... -0.5497421 1.1850287 0. ]] [[ 0. 0. 0. ... 0.418657 -0.6131817 0. ] [ 0. 0. 0. ... -0.60707045 0.8885549 0. ] [ 0. 0. 0. ... -0.44283426 0.561822 0. ] ... [ 0. 0. 0. ... -0.6401054 0.13333116 0. ] [ 0. 0. 0. ... 0.60184443 0.3637031 0. ] [ 0. 0. 0. ... 0.18494248 -0.01746585 0. ]] [[ 0. 0. 0. ... 0.11223868 -2.0936809 0. ] [ 0. 0. 0. ... 1.3328164 -0.1877084 0. ] [ 0. 0. 0. ... -2.4789028 0.50160116 0. ] ... [ 0. 0. 0. ... -0.5271348 -2.1344903 0. ] [ 0. 0. 0. ... 0.79846925 2.1340284 0. ] [ 0. 0. 0. ... 0.26614973 -0.25830528 0. ]] ... [[ 0. 0. 0. ... -0.902019 -0.5860342 0. ] [ 0. 0. 0. ... -0.7071243 0.7249905 0. ] [ 0. 0. 0. ... -0.5518951 0.9414903 0. ] ... [ 0. 0. 0. ... 0.6385873 -1.590175 0. ] [ 0. 0. 0. ... 0.8533672 -0.94622797 0. ] [ 0. 0. 0. ... 1.1771417 1.3473811 0. ]] [[ 0. 0. 0. ... 1.0249006 -0.7145503 0. ] [ 0. 0. 0. ... -1.1074516 -0.12140634 0. ] [ 0. 0. 0. ... -0.7207281 -2.058998 0. ] ... [ 0. 0. 0. ... 1.4645903 0.828358 0. ] [ 0. 0. 0. ... 0.2139355 -1.4179407 0. ] [ 0. 0. 0. ... 0.6993936 -1.7360387 0. ]] [[ 0. 0. 0. ... -0.02945777 -0.57009643 0. ] [ 0. 0. 0. ... -0.31850833 -0.7348128 0. ] [ 0. 0. 0. ... -0.21896312 0.06124297 0. ] ... [ 0. 0. 0. ... 0.305893 1.166005 0. ] [ 0. 0. 0. ... -3.133592 -2.4211848 0. ] [ 0. 0. 0. ... -0.2515608 -2.179361 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.9675106 -1.7309794 0. ] [ 0. 0. 0. ... -0.68851244 -1.1240364 0. ] [ 0. 0. 0. ... -1.285301 -1.3545275 0. ] ... [ 0. 0. 0. ... 1.8718271 -1.8335638 0. ] [ 0. 0. 0. ... 0.33194163 -1.3861486 0. ] [ 0. 0. 0. ... -0.5497421 1.1850287 0. ]] [[ 0. 0. 0. ... 0.418657 -0.6131817 0. ] [ 0. 0. 0. ... -0.60707045 0.8885549 0. ] [ 0. 0. 0. ... -0.44283426 0.561822 0. ] ... [ 0. 0. 0. ... -0.6401054 0.13333116 0. ] [ 0. 0. 0. ... 0.60184443 0.3637031 0. ] [ 0. 0. 0. ... 0.18494248 -0.01746585 0. ]] [[ 0. 0. 0. ... 0.11223868 -2.0936809 0. ] [ 0. 0. 0. ... 1.3328164 -0.1877084 0. ] [ 0. 0. 0. ... -2.4789028 0.50160116 0. ] ... [ 0. 0. 0. ... -0.5271348 -2.1344903 0. ] [ 0. 0. 0. ... 0.79846925 2.1340284 0. ] [ 0. 0. 0. ... 0.26614973 -0.25830528 0. ]] ... [[ 0. 0. 0. ... -0.902019 -0.5860342 0. ] [ 0. 0. 0. ... -0.7071243 0.7249905 0. ] [ 0. 0. 0. ... -0.5518951 0.9414903 0. ] ... [ 0. 0. 0. ... 0.6385873 -1.590175 0. ] [ 0. 0. 0. ... 0.8533672 -0.94622797 0. ] [ 0. 0. 0. ... 1.1771417 1.3473811 0. ]] [[ 0. 0. 0. ... 1.0249006 -0.7145503 0. ] [ 0. 0. 0. ... -1.1074516 -0.12140634 0. ] [ 0. 0. 0. ... -0.7207281 -2.058998 0. ] ... [ 0. 0. 0. ... 1.4645903 0.828358 0. ] [ 0. 0. 0. ... 0.2139355 -1.4179407 0. ] [ 0. 0. 0. ... 0.6993936 -1.7360387 0. ]] [[ 0. 0. 0. ... -0.02945777 -0.57009643 0. ] [ 0. 0. 0. ... -0.31850833 -0.7348128 0. ] [ 0. 0. 0. ... -0.21896312 0.06124297 0. ] ... [ 0. 0. 0. ... 0.305893 1.166005 0. ] [ 0. 0. 0. ... -3.133592 -2.4211848 0. ] [ 0. 0. 0. ... -0.2515608 -2.179361 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4759.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4761.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0992677e+00 1.0469505e+00 -1.2165273e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.7401958e-01 -8.4781700e-01 6.5760767e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.0230377e-01 6.3920325e-01 0.0000000e+00] ... [ 0.0000000e+00 2.0927305e+00 1.1429503e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -8.2584649e-01 -1.3424858e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.7017032e+00 1.7590309e+00 -1.7627952e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -9.1565832e-02 -9.4873327e-01 1.7998160e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6157697e-01 -7.5557697e-01 -3.1104264e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1521358e+00 -1.2465994e+00 0.0000000e+00] ... [ 0.0000000e+00 1.9446222e+00 -3.2938963e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.4595305e-01 2.5383849e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.4738320e-01 1.6221231e+00 -1.9371191e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4056149e+00 -3.9613109e-02 3.7909463e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.2032694e-01 -1.0956358e+00 3.2119158e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.3980162e-01 6.7430300e-01 0.0000000e+00] ... [ 0.0000000e+00 1.7767204e+00 6.5499532e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.3036475e+00 6.8539369e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.6107016e+00 -3.3887985e-01 -1.0554118e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8486358e-01 1.5922888e-01 -1.2286851e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.8882525e+00 -5.0229424e-01 -1.4411426e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.9625705e-01 -1.9092813e+00 0.0000000e+00] ... [ 0.0000000e+00 1.8464729e-01 -2.6338387e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 9.7625268e-01 -1.7661023e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-6.0641193e-01 -2.3873322e+00 -1.1236825e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.6729480e-01 1.9860090e+00 2.9533461e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2723947e+00 -1.2480826e+00 -3.0980214e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.4654680e+00 -7.6059240e-01 0.0000000e+00] ... [ 0.0000000e+00 7.2908109e-01 1.0857077e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -4.1641676e-01 7.5810027e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.8342912e-02 4.8151550e-01 -1.2842994e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.3592758e+00 1.6777462e-01 4.3470335e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 3.4917876e-04 -7.7906233e-01 -1.0259328e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.2102687e-01 5.1735330e-01 0.0000000e+00] ... [ 0.0000000e+00 -3.1665605e-01 -1.2471883e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.5560817e-01 1.5775459e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.8572904e-01 -6.9284350e-01 -1.6121255e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0992677e+00 1.0469505e+00 -1.2165273e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.7401958e-01 -8.4781700e-01 6.5760767e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.0230377e-01 6.3920325e-01 0.0000000e+00] ... [ 0.0000000e+00 2.0927305e+00 1.1429503e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -8.2584649e-01 -1.3424858e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-1.7017032e+00 1.7590309e+00 -1.7627952e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -9.1565832e-02 -9.4873327e-01 1.7998160e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6157697e-01 -7.5557697e-01 -3.1104264e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1521358e+00 -1.2465994e+00 0.0000000e+00] ... [ 0.0000000e+00 1.9446222e+00 -3.2938963e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -3.4595305e-01 2.5383849e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.4738320e-01 1.6221231e+00 -1.9371191e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4056149e+00 -3.9613109e-02 3.7909463e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.2032694e-01 -1.0956358e+00 3.2119158e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.3980162e-01 6.7430300e-01 0.0000000e+00] ... [ 0.0000000e+00 1.7767204e+00 6.5499532e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.3036475e+00 6.8539369e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.6107016e+00 -3.3887985e-01 -1.0554118e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8486358e-01 1.5922888e-01 -1.2286851e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.8882525e+00 -5.0229424e-01 -1.4411426e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.9625705e-01 -1.9092813e+00 0.0000000e+00] ... [ 0.0000000e+00 1.8464729e-01 -2.6338387e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 9.7625268e-01 -1.7661023e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-6.0641193e-01 -2.3873322e+00 -1.1236825e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.6729480e-01 1.9860090e+00 2.9533461e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2723947e+00 -1.2480826e+00 -3.0980214e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.4654680e+00 -7.6059240e-01 0.0000000e+00] ... [ 0.0000000e+00 7.2908109e-01 1.0857077e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -4.1641676e-01 7.5810027e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.8342912e-02 4.8151550e-01 -1.2842994e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.3592758e+00 1.6777462e-01 4.3470335e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 3.4917876e-04 -7.7906233e-01 -1.0259328e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.2102687e-01 5.1735330e-01 0.0000000e+00] ... [ 0.0000000e+00 -3.1665605e-01 -1.2471883e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -2.5560817e-01 1.5775459e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.8572904e-01 -6.9284350e-01 -1.6121255e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4763.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.81628692e+00 -8.72904599e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.96120113e-01 7.70738661e-01 0.00000000e+00] [ 0.00000000e+00 1.23848867e+00 2.32611513e+00 ... 1.53965402e+00 4.43613946e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.40989959e-01 -6.88158631e-01 ... 9.77528870e-01 2.47171569e+00 0.00000000e+00] [ 0.00000000e+00 -2.43492007e+00 1.92437112e+00 ... 7.98550010e-01 -9.40812230e-01 0.00000000e+00] [ 0.00000000e+00 -5.02037287e-01 -4.51894671e-01 ... 2.58782983e-01 7.51094282e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.81730169e-01 4.69130099e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.66836905e-01 -1.74336523e-01 0.00000000e+00] [ 0.00000000e+00 7.30231166e-01 -1.12330019e-01 ... 9.47219551e-01 -8.76818895e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.37626588e-01 -7.46279061e-02 ... 3.08865875e-01 1.83112861e-03 0.00000000e+00] [ 0.00000000e+00 3.15222144e-01 3.37398291e-01 ... 1.10852337e+00 1.40314496e+00 0.00000000e+00] [ 0.00000000e+00 1.03496432e+00 -2.23197317e+00 ... 2.05195045e+00 3.52667809e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.87827730e-01 -2.47311264e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.33079433e+00 -1.25938785e+00 0.00000000e+00] [ 0.00000000e+00 2.77115852e-01 9.84379053e-01 ... -8.92790973e-01 -4.39072192e-01 0.00000000e+00] ... [ 0.00000000e+00 1.70405304e+00 -1.06525302e+00 ... -7.60657907e-01 4.30029541e-01 0.00000000e+00] [ 0.00000000e+00 -8.91139686e-01 -1.63464379e+00 ... -5.03017426e-01 8.67520392e-01 0.00000000e+00] [ 0.00000000e+00 3.42668325e-01 7.39818275e-01 ... 4.09985520e-03 2.05677867e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.86814922e-01 1.61590531e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.88840401e-01 -1.17921150e+00 0.00000000e+00] [ 0.00000000e+00 6.27537429e-01 1.30349219e+00 ... -5.33423066e-01 1.93794334e+00 0.00000000e+00] ... [ 0.00000000e+00 3.37550193e-01 -1.18562663e+00 ... -4.68520522e-01 -3.85865241e-01 0.00000000e+00] [ 0.00000000e+00 -3.90600041e-02 5.84465526e-02 ... -2.96530008e-01 -7.90715396e-01 0.00000000e+00] [ 0.00000000e+00 -3.93949211e-01 6.20785296e-01 ... 7.19808578e-01 -1.05476320e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.38335481e-01 8.95351231e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.57011658e-01 -5.34431815e-01 0.00000000e+00] [ 0.00000000e+00 1.39446422e-01 3.96650970e-01 ... -1.09239781e+00 8.96207571e-01 0.00000000e+00] ... [ 0.00000000e+00 1.67991281e+00 -4.29808736e-01 ... 1.24658310e+00 1.22909284e+00 0.00000000e+00] [ 0.00000000e+00 -1.18325794e+00 6.20703220e-01 ... -2.11063576e+00 -9.03348625e-02 0.00000000e+00] [ 0.00000000e+00 -1.14406502e+00 -2.77213156e-01 ... 1.22408986e+00 3.08909208e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.34654450e-01 -4.68820244e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.08956909e+00 -1.11020672e+00 0.00000000e+00] [ 0.00000000e+00 1.68915093e+00 1.28536177e+00 ... -2.72877282e-03 1.21895934e-03 0.00000000e+00] ... [ 0.00000000e+00 -1.20354140e+00 3.21789868e-02 ... -2.09273100e+00 -4.19650763e-01 0.00000000e+00] [ 0.00000000e+00 8.26632082e-02 -1.19013441e+00 ... 5.27061880e-01 1.08426526e-01 0.00000000e+00] [ 0.00000000e+00 8.47996593e-01 1.08735017e-01 ... -1.39834452e+00 -1.83379579e+00 0.00000000e+00]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.81628692e+00 -8.72904599e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.96120113e-01 7.70738661e-01 0.00000000e+00] [ 0.00000000e+00 1.23848867e+00 2.32611513e+00 ... 1.53965402e+00 4.43613946e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.40989959e-01 -6.88158631e-01 ... 9.77528870e-01 2.47171569e+00 0.00000000e+00] [ 0.00000000e+00 -2.43492007e+00 1.92437112e+00 ... 7.98550010e-01 -9.40812230e-01 0.00000000e+00] [ 0.00000000e+00 -5.02037287e-01 -4.51894671e-01 ... 2.58782983e-01 7.51094282e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.81730169e-01 4.69130099e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.66836905e-01 -1.74336523e-01 0.00000000e+00] [ 0.00000000e+00 7.30231166e-01 -1.12330019e-01 ... 9.47219551e-01 -8.76818895e-01 0.00000000e+00] ... [ 0.00000000e+00 -6.37626588e-01 -7.46279061e-02 ... 3.08865875e-01 1.83112861e-03 0.00000000e+00] [ 0.00000000e+00 3.15222144e-01 3.37398291e-01 ... 1.10852337e+00 1.40314496e+00 0.00000000e+00] [ 0.00000000e+00 1.03496432e+00 -2.23197317e+00 ... 2.05195045e+00 3.52667809e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.87827730e-01 -2.47311264e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.33079433e+00 -1.25938785e+00 0.00000000e+00] [ 0.00000000e+00 2.77115852e-01 9.84379053e-01 ... -8.92790973e-01 -4.39072192e-01 0.00000000e+00] ... [ 0.00000000e+00 1.70405304e+00 -1.06525302e+00 ... -7.60657907e-01 4.30029541e-01 0.00000000e+00] [ 0.00000000e+00 -8.91139686e-01 -1.63464379e+00 ... -5.03017426e-01 8.67520392e-01 0.00000000e+00] [ 0.00000000e+00 3.42668325e-01 7.39818275e-01 ... 4.09985520e-03 2.05677867e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.86814922e-01 1.61590531e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.88840401e-01 -1.17921150e+00 0.00000000e+00] [ 0.00000000e+00 6.27537429e-01 1.30349219e+00 ... -5.33423066e-01 1.93794334e+00 0.00000000e+00] ... [ 0.00000000e+00 3.37550193e-01 -1.18562663e+00 ... -4.68520522e-01 -3.85865241e-01 0.00000000e+00] [ 0.00000000e+00 -3.90600041e-02 5.84465526e-02 ... -2.96530008e-01 -7.90715396e-01 0.00000000e+00] [ 0.00000000e+00 -3.93949211e-01 6.20785296e-01 ... 7.19808578e-01 -1.05476320e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.38335481e-01 8.95351231e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.57011658e-01 -5.34431815e-01 0.00000000e+00] [ 0.00000000e+00 1.39446422e-01 3.96650970e-01 ... -1.09239781e+00 8.96207571e-01 0.00000000e+00] ... [ 0.00000000e+00 1.67991281e+00 -4.29808736e-01 ... 1.24658310e+00 1.22909284e+00 0.00000000e+00] [ 0.00000000e+00 -1.18325794e+00 6.20703220e-01 ... -2.11063576e+00 -9.03348625e-02 0.00000000e+00] [ 0.00000000e+00 -1.14406502e+00 -2.77213156e-01 ... 1.22408986e+00 3.08909208e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.34654450e-01 -4.68820244e-01 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.08956909e+00 -1.11020672e+00 0.00000000e+00] [ 0.00000000e+00 1.68915093e+00 1.28536177e+00 ... -2.72877282e-03 1.21895934e-03 0.00000000e+00] ... [ 0.00000000e+00 -1.20354140e+00 3.21789868e-02 ... -2.09273100e+00 -4.19650763e-01 0.00000000e+00] [ 0.00000000e+00 8.26632082e-02 -1.19013441e+00 ... 5.27061880e-01 1.08426526e-01 0.00000000e+00] [ 0.00000000e+00 8.47996593e-01 1.08735017e-01 ... -1.39834452e+00 -1.83379579e+00 0.00000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4765.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.91410303 -0.1666096 -0.26723683] [ 0. 0. 0. ... -0.32091078 -0.261533 1.1086305 ] [ 0. 0. 0. ... 1.5321583 -0.8448036 0. ] ... [ 0. -0.41676307 -1.5214616 ... -0.00542459 -1.3241457 -0.35120568] [ 0. 1.1454597 -2.167051 ... 1.047243 -1.2848209 -0.8826509 ] [ 1.2234608 -1.0824401 1.4272897 ... -0.00861749 -0.24336147 0. ]] [[ 0. 0. 0. ... 1.8702003 -0.27072236 -0.4291368 ] [ 0. 0. 0. ... -0.4357651 1.3315692 -0.12577763] [ 0. 0. 0. ... 1.4184321 -1.3749473 0. ] ... [ 0. -1.2151953 0.80768985 ... -0.93533766 -0.08339638 0.49701142] [ 0. 0.25358632 -1.4355328 ... -0.8897282 -0.05849489 -0.45193213] [-1.677772 0.18372092 -0.22443965 ... -0.8649704 0.3968268 0. ]] [[ 0. 0. 0. ... -0.37899172 -1.0695559 -0.5540048 ] [ 0. 0. 0. ... -0.5374258 1.1680106 -0.74493927] [ 0. 0. 0. ... -0.6883215 0.3106328 0. ] ... [ 0. -0.8927087 2.9572558 ... -1.2144141 -2.2455301 -0.44085568] [ 0. -1.0421498 1.224108 ... 0.96000683 0.3737178 0.7533233 ] [ 1.5694803 1.4064375 0.4184206 ... -0.40501305 -0.36979866 0. ]] ... [[ 0. 0. 0. ... 0.01671443 0.43898523 0.25973344] [ 0. 0. 0. ... -0.7444545 0.4452002 -1.2048368 ] [ 0. 0. 0. ... -0.22984591 -0.7501054 0. ] ... [ 0. 0.08584639 0.88899213 ... -1.0809034 -0.40814477 -0.3564227 ] [ 0. 1.1239359 -0.6379726 ... 0.5882221 0.09259948 2.0319605 ] [ 0.03105574 1.3189639 -1.121282 ... -2.1198277 0.3319074 0. ]] [[ 0. 0. 0. ... 1.5254352 -0.7255684 -1.0051204 ] [ 0. 0. 0. ... 1.4815638 -0.39360857 -0.7908847 ] [ 0. 0. 0. ... -0.20854084 0.3298251 0. ] ... [ 0. 0.21548913 -0.18358997 ... 0.0277014 -1.0619575 0.5467259 ] [ 0. 1.6489259 1.6056204 ... 1.6427649 -0.2980344 -0.6874455 ] [-0.01379549 0.39047024 -0.23677859 ... -1.6216385 -1.6170875 0. ]] [[ 0. 0. 0. ... 0.74911994 -0.55094945 -0.07365075] [ 0. 0. 0. ... -0.35253358 -0.10995767 -0.02862837] [ 0. 0. 0. ... -0.6316325 -0.60019165 0. ] ... [ 0. -0.41910607 1.0799823 ... -0.8411942 0.7357887 -1.8710725 ] [ 0. -0.8259664 0.5217107 ... 0.10352737 -1.8686036 0.11671674] [-0.10815145 0.9205366 -1.0622768 ... 1.252836 0.9919251 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.91410303 -0.1666096 -0.26723683] [ 0. 0. 0. ... -0.32091078 -0.261533 1.1086305 ] [ 0. 0. 0. ... 1.5321583 -0.8448036 0. ] ... [ 0. -0.41676307 -1.5214616 ... -0.00542459 -1.3241457 -0.35120568] [ 0. 1.1454597 -2.167051 ... 1.047243 -1.2848209 -0.8826509 ] [ 1.2234608 -1.0824401 1.4272897 ... -0.00861749 -0.24336147 0. ]] [[ 0. 0. 0. ... 1.8702003 -0.27072236 -0.4291368 ] [ 0. 0. 0. ... -0.4357651 1.3315692 -0.12577763] [ 0. 0. 0. ... 1.4184321 -1.3749473 0. ] ... [ 0. -1.2151953 0.80768985 ... -0.93533766 -0.08339638 0.49701142] [ 0. 0.25358632 -1.4355328 ... -0.8897282 -0.05849489 -0.45193213] [-1.677772 0.18372092 -0.22443965 ... -0.8649704 0.3968268 0. ]] [[ 0. 0. 0. ... -0.37899172 -1.0695559 -0.5540048 ] [ 0. 0. 0. ... -0.5374258 1.1680106 -0.74493927] [ 0. 0. 0. ... -0.6883215 0.3106328 0. ] ... [ 0. -0.8927087 2.9572558 ... -1.2144141 -2.2455301 -0.44085568] [ 0. -1.0421498 1.224108 ... 0.96000683 0.3737178 0.7533233 ] [ 1.5694803 1.4064375 0.4184206 ... -0.40501305 -0.36979866 0. ]] ... [[ 0. 0. 0. ... 0.01671443 0.43898523 0.25973344] [ 0. 0. 0. ... -0.7444545 0.4452002 -1.2048368 ] [ 0. 0. 0. ... -0.22984591 -0.7501054 0. ] ... [ 0. 0.08584639 0.88899213 ... -1.0809034 -0.40814477 -0.3564227 ] [ 0. 1.1239359 -0.6379726 ... 0.5882221 0.09259948 2.0319605 ] [ 0.03105574 1.3189639 -1.121282 ... -2.1198277 0.3319074 0. ]] [[ 0. 0. 0. ... 1.5254352 -0.7255684 -1.0051204 ] [ 0. 0. 0. ... 1.4815638 -0.39360857 -0.7908847 ] [ 0. 0. 0. ... -0.20854084 0.3298251 0. ] ... [ 0. 0.21548913 -0.18358997 ... 0.0277014 -1.0619575 0.5467259 ] [ 0. 1.6489259 1.6056204 ... 1.6427649 -0.2980344 -0.6874455 ] [-0.01379549 0.39047024 -0.23677859 ... -1.6216385 -1.6170875 0. ]] [[ 0. 0. 0. ... 0.74911994 -0.55094945 -0.07365075] [ 0. 0. 0. ... -0.35253358 -0.10995767 -0.02862837] [ 0. 0. 0. ... -0.6316325 -0.60019165 0. ] ... [ 0. -0.41910607 1.0799823 ... -0.8411942 0.7357887 -1.8710725 ] [ 0. -0.8259664 0.5217107 ... 0.10352737 -1.8686036 0.11671674] [-0.10815145 0.9205366 -1.0622768 ... 1.252836 0.9919251 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:2 - kernel_size:[2, 2] ] | 0.07 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4767.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.3345381 -1.3245277 0. ] [ 0. 0. 0. ... -1.0486078 -1.8124293 0. ] [ 0. -0.15718351 0.376254 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.3021766 -0.9005899 0. ] [ 0. 0.42510304 0.9194907 ... 0. 0. 0. ] [ 0. -0.05460405 -0.8178112 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.73936635 -0.5742564 0. ] [ 0. 0. 0. ... -0.5887455 1.1830282 0. ] [ 0. 2.1157455 1.2144469 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.7859428 -0.6572708 0. ] [ 0. -0.12407176 -1.0206912 ... 0. 0. 0. ] [ 0. 0.32827863 0.9194227 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3351845 1.0159037 0. ] [ 0. 0. 0. ... 0.55911446 -0.4414236 0. ] [ 0. 0.32657504 -0.50985986 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.4664862 -0.74801016 0. ] [ 0. 0.15588795 0.87469447 ... 0. 0. 0. ] [ 0. -0.8778024 2.0958133 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2469683 0.4859153 0. ] [ 0. 0. 0. ... -1.2289492 0.4578551 0. ] [ 0. 0.7830677 0.3723837 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.75603473 0.9032738 0. ] [ 0. -0.06533849 0.45355147 ... 0. 0. 0. ] [ 0. -0.33276138 -1.8060178 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43785602 -0.5958775 0. ] [ 0. 0. 0. ... -0.27494282 -0.33434358 0. ] [ 0. 0.3233017 0.21224265 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.5138698 0.8223452 0. ] [ 0. 2.2687972 -1.0008074 ... 0. 0. 0. ] [ 0. 0.9268584 -1.6294987 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.7443587 0.5794382 0. ] [ 0. 0. 0. ... 0.22763003 -0.7709909 0. ] [ 0. 0.78348887 -0.05308081 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.38507003 -1.1064844 0. ] [ 0. -0.32792723 0.6935566 ... 0. 0. 0. ] [ 0. -0.4357316 -0.5743074 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.3345381 -1.3245277 0. ] [ 0. 0. 0. ... -1.0486078 -1.8124293 0. ] [ 0. -0.15718351 0.376254 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.3021766 -0.9005899 0. ] [ 0. 0.42510304 0.9194907 ... 0. 0. 0. ] [ 0. -0.05460405 -0.8178112 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.73936635 -0.5742564 0. ] [ 0. 0. 0. ... -0.5887455 1.1830282 0. ] [ 0. 2.1157455 1.2144469 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.7859428 -0.6572708 0. ] [ 0. -0.12407176 -1.0206912 ... 0. 0. 0. ] [ 0. 0.32827863 0.9194227 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3351845 1.0159037 0. ] [ 0. 0. 0. ... 0.55911446 -0.4414236 0. ] [ 0. 0.32657504 -0.50985986 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.4664862 -0.74801016 0. ] [ 0. 0.15588795 0.87469447 ... 0. 0. 0. ] [ 0. -0.8778024 2.0958133 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2469683 0.4859153 0. ] [ 0. 0. 0. ... -1.2289492 0.4578551 0. ] [ 0. 0.7830677 0.3723837 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.75603473 0.9032738 0. ] [ 0. -0.06533849 0.45355147 ... 0. 0. 0. ] [ 0. -0.33276138 -1.8060178 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43785602 -0.5958775 0. ] [ 0. 0. 0. ... -0.27494282 -0.33434358 0. ] [ 0. 0.3233017 0.21224265 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.5138698 0.8223452 0. ] [ 0. 2.2687972 -1.0008074 ... 0. 0. 0. ] [ 0. 0.9268584 -1.6294987 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.7443587 0.5794382 0. ] [ 0. 0. 0. ... 0.22763003 -0.7709909 0. ] [ 0. 0.78348887 -0.05308081 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.38507003 -1.1064844 0. ] [ 0. -0.32792723 0.6935566 ... 0. 0. 0. ] [ 0. -0.4357316 -0.5743074 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4769.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4771.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.13598281 -0.15503569 1.7961638 ] [ 0. 0. 0. ... -0.15503569 1.7961638 0.07821102] [ 0. 0. 0. ... 1.7961638 0.07821102 0. ] ... [ 0. -1.4524177 -0.64116454 ... 0. 0. 0. ] [-1.4524177 -0.64116454 0.13542672 ... 0. 0. 0. ] [-0.64116454 0.13542672 -0.03934936 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3534895 -0.14316729 -1.3654449 ] [ 0. 0. 0. ... -0.14316729 -1.3654449 1.1962895 ] [ 0. 0. 0. ... -1.3654449 1.1962895 0. ] ... [ 0. -0.8708684 0.98145586 ... 0. 0. 0. ] [-0.8708684 0.98145586 0.03044327 ... 0. 0. 0. ] [ 0.98145586 0.03044327 -0.87750906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.4638017 0.23230745 0.8017409 ] [ 0. 0. 0. ... 0.23230745 0.8017409 1.6731049 ] [ 0. 0. 0. ... 0.8017409 1.6731049 0. ] ... [ 0. 0.32853326 1.091393 ... 0. 0. 0. ] [ 0.32853326 1.091393 -0.6202815 ... 0. 0. 0. ] [ 1.091393 -0.6202815 2.4443953 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.6046364 0.04405478 -1.1245456 ] [ 0. 0. 0. ... 0.04405478 -1.1245456 0.10745591] [ 0. 0. 0. ... -1.1245456 0.10745591 0. ] ... [ 0. 0.0826837 0.4746034 ... 0. 0. 0. ] [ 0.0826837 0.4746034 -0.10225607 ... 0. 0. 0. ] [ 0.4746034 -0.10225607 -1.364867 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1682669 0.3091305 0.7248036 ] [ 0. 0. 0. ... 0.3091305 0.7248036 0.7273538 ] [ 0. 0. 0. ... 0.7248036 0.7273538 0. ] ... [ 0. 0.8549284 -1.2545456 ... 0. 0. 0. ] [ 0.8549284 -1.2545456 -2.3395054 ... 0. 0. 0. ] [-1.2545456 -2.3395054 -0.18498862 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.78023046 -1.9439795 0.7345965 ] [ 0. 0. 0. ... -1.9439795 0.7345965 -1.2722238 ] [ 0. 0. 0. ... 0.7345965 -1.2722238 0. ] ... [ 0. -0.42112368 0.2563582 ... 0. 0. 0. ] [-0.42112368 0.2563582 -2.3893652 ... 0. 0. 0. ] [ 0.2563582 -2.3893652 -0.74190384 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.13598281 -0.15503569 1.7961638 ] [ 0. 0. 0. ... -0.15503569 1.7961638 0.07821102] [ 0. 0. 0. ... 1.7961638 0.07821102 0. ] ... [ 0. -1.4524177 -0.64116454 ... 0. 0. 0. ] [-1.4524177 -0.64116454 0.13542672 ... 0. 0. 0. ] [-0.64116454 0.13542672 -0.03934936 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3534895 -0.14316729 -1.3654449 ] [ 0. 0. 0. ... -0.14316729 -1.3654449 1.1962895 ] [ 0. 0. 0. ... -1.3654449 1.1962895 0. ] ... [ 0. -0.8708684 0.98145586 ... 0. 0. 0. ] [-0.8708684 0.98145586 0.03044327 ... 0. 0. 0. ] [ 0.98145586 0.03044327 -0.87750906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.4638017 0.23230745 0.8017409 ] [ 0. 0. 0. ... 0.23230745 0.8017409 1.6731049 ] [ 0. 0. 0. ... 0.8017409 1.6731049 0. ] ... [ 0. 0.32853326 1.091393 ... 0. 0. 0. ] [ 0.32853326 1.091393 -0.6202815 ... 0. 0. 0. ] [ 1.091393 -0.6202815 2.4443953 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.6046364 0.04405478 -1.1245456 ] [ 0. 0. 0. ... 0.04405478 -1.1245456 0.10745591] [ 0. 0. 0. ... -1.1245456 0.10745591 0. ] ... [ 0. 0.0826837 0.4746034 ... 0. 0. 0. ] [ 0.0826837 0.4746034 -0.10225607 ... 0. 0. 0. ] [ 0.4746034 -0.10225607 -1.364867 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1682669 0.3091305 0.7248036 ] [ 0. 0. 0. ... 0.3091305 0.7248036 0.7273538 ] [ 0. 0. 0. ... 0.7248036 0.7273538 0. ] ... [ 0. 0.8549284 -1.2545456 ... 0. 0. 0. ] [ 0.8549284 -1.2545456 -2.3395054 ... 0. 0. 0. ] [-1.2545456 -2.3395054 -0.18498862 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.78023046 -1.9439795 0.7345965 ] [ 0. 0. 0. ... -1.9439795 0.7345965 -1.2722238 ] [ 0. 0. 0. ... 0.7345965 -1.2722238 0. ] ... [ 0. -0.42112368 0.2563582 ... 0. 0. 0. ] [-0.42112368 0.2563582 -2.3893652 ... 0. 0. 0. ] [ 0.2563582 -2.3893652 -0.74190384 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4773.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -2.593224 2.033205 -0.5542049 ] [ 0. 0. 0. ... 2.033205 -0.5542049 0. ] [ 0. 0.7452444 -0.54109037 ... -0.17876394 -1.5190538 0.21261905] ... [-0.7379832 -2.203351 1.3664248 ... -1.0175973 0.3382555 0. ] [ 0. -1.2252128 0.62910026 ... 0. 0. 0. ] [-1.2252128 0.62910026 -0.8967271 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.5153883 -0.30285043 1.6369605 ] [ 0. 0. 0. ... -0.30285043 1.6369605 0. ] [ 0. -1.7729377 1.2201589 ... -0.84785485 0.87897617 -0.00669486] ... [-1.2786895 -1.5918025 0.5404905 ... -0.46729094 -0.9957436 0. ] [ 0. 1.7500678 -0.73185295 ... 0. 0. 0. ] [ 1.7500678 -0.73185295 -1.864076 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8795613 0.3046449 0.93656003] [ 0. 0. 0. ... 0.3046449 0.93656003 0. ] [ 0. 1.2644728 0.6430679 ... 0.35354272 -0.45662946 1.6647872 ] ... [-1.6346775 -0.40620446 1.3852612 ... -0.44039083 -1.5397754 0. ] [ 0. 0.5330003 0.10060426 ... 0. 0. 0. ] [ 0.5330003 0.10060426 -0.00350192 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1986874 1.0133697 0.28575936] [ 0. 0. 0. ... 1.0133697 0.28575936 0. ] [ 0. 0.33446962 0.804989 ... 0.24236232 -1.2436279 -1.0807025 ] ... [ 0.9802791 -0.4974679 -0.14749098 ... 0.34723324 0.16672465 0. ] [ 0. 1.1055676 0.98328525 ... 0. 0. 0. ] [ 1.1055676 0.98328525 -0.60255617 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.58965355 0.2534408 0.37621748] [ 0. 0. 0. ... 0.2534408 0.37621748 0. ] [ 0. -2.2011425 0.3062703 ... 1.7089906 0.28805888 0.53929454] ... [ 0.51397985 -0.7918868 -0.32899866 ... -0.09057377 -1.323107 0. ] [ 0. -0.07830427 0.36903775 ... 0. 0. 0. ] [-0.07830427 0.36903775 -1.1266906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.16961516 -0.9517007 -0.16745627] [ 0. 0. 0. ... -0.9517007 -0.16745627 0. ] [ 0. 0.9256875 0.13806002 ... 1.5443412 -0.86543506 -0.1252042 ] ... [ 0.11739773 -2.0819566 -0.30916476 ... 0.32197592 1.043828 0. ] [ 0. 1.387849 -0.39107007 ... 0. 0. 0. ] [ 1.387849 -0.39107007 0.5744685 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -2.593224 2.033205 -0.5542049 ] [ 0. 0. 0. ... 2.033205 -0.5542049 0. ] [ 0. 0.7452444 -0.54109037 ... -0.17876394 -1.5190538 0.21261905] ... [-0.7379832 -2.203351 1.3664248 ... -1.0175973 0.3382555 0. ] [ 0. -1.2252128 0.62910026 ... 0. 0. 0. ] [-1.2252128 0.62910026 -0.8967271 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.5153883 -0.30285043 1.6369605 ] [ 0. 0. 0. ... -0.30285043 1.6369605 0. ] [ 0. -1.7729377 1.2201589 ... -0.84785485 0.87897617 -0.00669486] ... [-1.2786895 -1.5918025 0.5404905 ... -0.46729094 -0.9957436 0. ] [ 0. 1.7500678 -0.73185295 ... 0. 0. 0. ] [ 1.7500678 -0.73185295 -1.864076 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8795613 0.3046449 0.93656003] [ 0. 0. 0. ... 0.3046449 0.93656003 0. ] [ 0. 1.2644728 0.6430679 ... 0.35354272 -0.45662946 1.6647872 ] ... [-1.6346775 -0.40620446 1.3852612 ... -0.44039083 -1.5397754 0. ] [ 0. 0.5330003 0.10060426 ... 0. 0. 0. ] [ 0.5330003 0.10060426 -0.00350192 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1986874 1.0133697 0.28575936] [ 0. 0. 0. ... 1.0133697 0.28575936 0. ] [ 0. 0.33446962 0.804989 ... 0.24236232 -1.2436279 -1.0807025 ] ... [ 0.9802791 -0.4974679 -0.14749098 ... 0.34723324 0.16672465 0. ] [ 0. 1.1055676 0.98328525 ... 0. 0. 0. ] [ 1.1055676 0.98328525 -0.60255617 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.58965355 0.2534408 0.37621748] [ 0. 0. 0. ... 0.2534408 0.37621748 0. ] [ 0. -2.2011425 0.3062703 ... 1.7089906 0.28805888 0.53929454] ... [ 0.51397985 -0.7918868 -0.32899866 ... -0.09057377 -1.323107 0. ] [ 0. -0.07830427 0.36903775 ... 0. 0. 0. ] [-0.07830427 0.36903775 -1.1266906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.16961516 -0.9517007 -0.16745627] [ 0. 0. 0. ... -0.9517007 -0.16745627 0. ] [ 0. 0.9256875 0.13806002 ... 1.5443412 -0.86543506 -0.1252042 ] ... [ 0.11739773 -2.0819566 -0.30916476 ... 0.32197592 1.043828 0. ] [ 0. 1.387849 -0.39107007 ... 0. 0. 0. ] [ 1.387849 -0.39107007 0.5744685 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4775.aten_im2col, %x.1 : Tensor): %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.47472802 1.4006855 -0.26781687] [ 0. 0. 0. ... 1.4006855 -0.26781687 0.9957826 ] [ 0. 0. 0. ... -0.26781687 0.9957826 0. ] ... [ 0. 0.13499719 1.1196138 ... 0. 0. 0. ] [ 0.13499719 1.1196138 -0.13277598 ... 0. 0. 0. ] [ 1.1196138 -0.13277598 0.9661243 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2853508 -0.42859045 1.3566762 ] [ 0. 0. 0. ... -0.42859045 1.3566762 0.35138685] [ 0. 0. 0. ... 1.3566762 0.35138685 0. ] ... [ 0. 0.50378793 -0.72135454 ... 0. 0. 0. ] [ 0.50378793 -0.72135454 -1.5821629 ... 0. 0. 0. ] [-0.72135454 -1.5821629 -0.634016 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7812243 0.9575449 0.82002413] [ 0. 0. 0. ... 0.9575449 0.82002413 -0.77863145] [ 0. 0. 0. ... 0.82002413 -0.77863145 0. ] ... [ 0. -0.8946488 1.4108789 ... 0. 0. 0. ] [-0.8946488 1.4108789 0.9640152 ... 0. 0. 0. ] [ 1.4108789 0.9640152 0.69756263 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.7457639 -1.4944059 -1.2291865 ] [ 0. 0. 0. ... -1.4944059 -1.2291865 0.49508464] [ 0. 0. 0. ... -1.2291865 0.49508464 0. ] ... [ 0. 0.56902444 0.28023916 ... 0. 0. 0. ] [ 0.56902444 0.28023916 1.2486234 ... 0. 0. 0. ] [ 0.28023916 1.2486234 -0.4934971 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.1176789 0.6840346 -0.55243284] [ 0. 0. 0. ... 0.6840346 -0.55243284 -1.0106584 ] [ 0. 0. 0. ... -0.55243284 -1.0106584 0. ] ... [ 0. 1.200434 0.43654087 ... 0. 0. 0. ] [ 1.200434 0.43654087 1.5526122 ... 0. 0. 0. ] [ 0.43654087 1.5526122 0.50648975 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81356674 0.6351308 -0.34571022] [ 0. 0. 0. ... 0.6351308 -0.34571022 -0.03620907] [ 0. 0. 0. ... -0.34571022 -0.03620907 0. ] ... [ 0. -1.4697586 -0.08665514 ... 0. 0. 0. ] [-1.4697586 -0.08665514 -0.08701827 ... 0. 0. 0. ] [-0.08665514 -0.08701827 -0.07535534 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.47472802 1.4006855 -0.26781687] [ 0. 0. 0. ... 1.4006855 -0.26781687 0.9957826 ] [ 0. 0. 0. ... -0.26781687 0.9957826 0. ] ... [ 0. 0.13499719 1.1196138 ... 0. 0. 0. ] [ 0.13499719 1.1196138 -0.13277598 ... 0. 0. 0. ] [ 1.1196138 -0.13277598 0.9661243 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2853508 -0.42859045 1.3566762 ] [ 0. 0. 0. ... -0.42859045 1.3566762 0.35138685] [ 0. 0. 0. ... 1.3566762 0.35138685 0. ] ... [ 0. 0.50378793 -0.72135454 ... 0. 0. 0. ] [ 0.50378793 -0.72135454 -1.5821629 ... 0. 0. 0. ] [-0.72135454 -1.5821629 -0.634016 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7812243 0.9575449 0.82002413] [ 0. 0. 0. ... 0.9575449 0.82002413 -0.77863145] [ 0. 0. 0. ... 0.82002413 -0.77863145 0. ] ... [ 0. -0.8946488 1.4108789 ... 0. 0. 0. ] [-0.8946488 1.4108789 0.9640152 ... 0. 0. 0. ] [ 1.4108789 0.9640152 0.69756263 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.7457639 -1.4944059 -1.2291865 ] [ 0. 0. 0. ... -1.4944059 -1.2291865 0.49508464] [ 0. 0. 0. ... -1.2291865 0.49508464 0. ] ... [ 0. 0.56902444 0.28023916 ... 0. 0. 0. ] [ 0.56902444 0.28023916 1.2486234 ... 0. 0. 0. ] [ 0.28023916 1.2486234 -0.4934971 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.1176789 0.6840346 -0.55243284] [ 0. 0. 0. ... 0.6840346 -0.55243284 -1.0106584 ] [ 0. 0. 0. ... -0.55243284 -1.0106584 0. ] ... [ 0. 1.200434 0.43654087 ... 0. 0. 0. ] [ 1.200434 0.43654087 1.5526122 ... 0. 0. 0. ] [ 0.43654087 1.5526122 0.50648975 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81356674 0.6351308 -0.34571022] [ 0. 0. 0. ... 0.6351308 -0.34571022 -0.03620907] [ 0. 0. 0. ... -0.34571022 -0.03620907 0. ] ... [ 0. -1.4697586 -0.08665514 ... 0. 0. 0. ] [-1.4697586 -0.08665514 -0.08701827 ... 0. 0. 0. ] [-0.08665514 -0.08701827 -0.07535534 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4777.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.43527684 -1.2248771 1.002887 ] [ 0. 0. 0. ... -1.2248771 1.002887 0. ] [ 0. 1.0950744 0.64025295 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.847498 -0.14394006 0. ] [ 0. 0.20356181 0.68717045 ... 0. 0. 0. ] [ 0.20356181 0.68717045 1.0556474 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8886809 1.3702407 -0.12354785] [ 0. 0. 0. ... 1.3702407 -0.12354785 0. ] [ 0. 1.2363989 1.4059011 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.35474837 -1.7930807 0. ] [ 0. 1.3910568 -0.36292538 ... 0. 0. 0. ] [ 1.3910568 -0.36292538 -0.0096327 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.00420358 0.79905945 0.8700734 ] [ 0. 0. 0. ... 0.79905945 0.8700734 0. ] [ 0. 0.51130164 0.08242629 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.542327 1.0336555 0. ] [ 0. -0.27427644 0.73464954 ... 0. 0. 0. ] [-0.27427644 0.73464954 -1.0988878 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.01799322 -0.63217634 -0.40025643] [ 0. 0. 0. ... -0.63217634 -0.40025643 0. ] [ 0. -1.0794903 -2.167279 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.3282611 -0.6338962 0. ] [ 0. -0.07853805 -2.0649395 ... 0. 0. 0. ] [-0.07853805 -2.0649395 1.4021015 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7246837 -0.7522637 -1.3807448 ] [ 0. 0. 0. ... -0.7522637 -1.3807448 0. ] [ 0. 1.8820206 -0.46085465 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 2.271283 0.87016344 0. ] [ 0. -0.7285929 -0.24433179 ... 0. 0. 0. ] [-0.7285929 -0.24433179 -0.68425846 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4924597 1.7409067 -1.5310124 ] [ 0. 0. 0. ... 1.7409067 -1.5310124 0. ] [ 0. 0.8332943 1.1771218 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.43641123 -0.97948635 0. ] [ 0. 1.1439561 -0.49237594 ... 0. 0. 0. ] [ 1.1439561 -0.49237594 -0.8101618 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.43527684 -1.2248771 1.002887 ] [ 0. 0. 0. ... -1.2248771 1.002887 0. ] [ 0. 1.0950744 0.64025295 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.847498 -0.14394006 0. ] [ 0. 0.20356181 0.68717045 ... 0. 0. 0. ] [ 0.20356181 0.68717045 1.0556474 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8886809 1.3702407 -0.12354785] [ 0. 0. 0. ... 1.3702407 -0.12354785 0. ] [ 0. 1.2363989 1.4059011 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.35474837 -1.7930807 0. ] [ 0. 1.3910568 -0.36292538 ... 0. 0. 0. ] [ 1.3910568 -0.36292538 -0.0096327 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.00420358 0.79905945 0.8700734 ] [ 0. 0. 0. ... 0.79905945 0.8700734 0. ] [ 0. 0.51130164 0.08242629 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.542327 1.0336555 0. ] [ 0. -0.27427644 0.73464954 ... 0. 0. 0. ] [-0.27427644 0.73464954 -1.0988878 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.01799322 -0.63217634 -0.40025643] [ 0. 0. 0. ... -0.63217634 -0.40025643 0. ] [ 0. -1.0794903 -2.167279 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.3282611 -0.6338962 0. ] [ 0. -0.07853805 -2.0649395 ... 0. 0. 0. ] [-0.07853805 -2.0649395 1.4021015 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7246837 -0.7522637 -1.3807448 ] [ 0. 0. 0. ... -0.7522637 -1.3807448 0. ] [ 0. 1.8820206 -0.46085465 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 2.271283 0.87016344 0. ] [ 0. -0.7285929 -0.24433179 ... 0. 0. 0. ] [-0.7285929 -0.24433179 -0.68425846 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4924597 1.7409067 -1.5310124 ] [ 0. 0. 0. ... 1.7409067 -1.5310124 0. ] [ 0. 0.8332943 1.1771218 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.43641123 -0.97948635 0. ] [ 0. 1.1439561 -0.49237594 ... 0. 0. 0. ] [ 1.1439561 -0.49237594 -0.8101618 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4779.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4781.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.8077337 0.5845573 1.004125 ] [ 0. 0. 0. ... -0.22509117 1.7095007 -1.3081888 ] [ 0. 0. 0. ... 0.5764272 -0.08345748 0. ] ... [ 0. 0. 0. ... -0.23359616 -0.07513674 0.84725225] [ 0. 0. 0. ... -1.1501601 -0.03941115 -0.2432965 ] [ 0. 0. 0. ... 0.9863367 0.42311558 0. ]] [[ 0. 0. 0. ... -0.7789627 0.9469243 0.43512547] [ 0. 0. 0. ... 0.30864084 -0.8079889 1.563545 ] [ 0. 0. 0. ... -1.3026199 -0.775494 0. ] ... [ 0. 0. 0. ... -2.0508726 -2.5423343 0.06516896] [ 0. 0. 0. ... -0.3617163 1.3473818 -0.1547195 ] [ 0. 0. 0. ... -0.738891 1.139373 0. ]] [[ 0. 0. 0. ... -0.82420176 -1.3474047 0.3299887 ] [ 0. 0. 0. ... 0.38529503 -0.5456726 0.07515629] [ 0. 0. 0. ... -0.12141696 -0.19845852 0. ] ... [ 0. 0. 0. ... -0.39850163 1.6467187 -0.7022656 ] [ 0. 0. 0. ... 1.0288521 -0.34046236 -0.3691114 ] [ 0. 0. 0. ... 0.3569002 1.1243037 0. ]] ... [[ 0. 0. 0. ... 0.9376099 -1.8524451 -0.28598416] [ 0. 0. 0. ... 0.01627238 -1.4553396 0.8116807 ] [ 0. 0. 0. ... -0.37611648 -0.30102813 0. ] ... [ 0. 0. 0. ... 0.9046274 1.0908308 -0.88477844] [ 0. 0. 0. ... -0.4885983 0.05443337 -0.6694119 ] [ 0. 0. 0. ... -0.5672779 -0.08248246 0. ]] [[ 0. 0. 0. ... 1.8216707 -0.00334586 -0.05200932] [ 0. 0. 0. ... -0.11456563 -0.15728363 0.5134958 ] [ 0. 0. 0. ... 0.24364237 1.5767632 0. ] ... [ 0. 0. 0. ... 1.9837846 1.0594858 1.1165754 ] [ 0. 0. 0. ... -0.13373421 0.62209845 -0.6352582 ] [ 0. 0. 0. ... -2.28925 2.2879262 0. ]] [[ 0. 0. 0. ... -0.20286635 -0.743292 0.0500922 ] [ 0. 0. 0. ... -0.6488293 -0.6785661 -0.48590636] [ 0. 0. 0. ... -0.99022686 -0.77213454 0. ] ... [ 0. 0. 0. ... 1.3108745 0.6174837 0.28394526] [ 0. 0. 0. ... 1.2134002 -1.7040288 0.5951131 ] [ 0. 0. 0. ... 0.64803684 0.41682023 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.8077337 0.5845573 1.004125 ] [ 0. 0. 0. ... -0.22509117 1.7095007 -1.3081888 ] [ 0. 0. 0. ... 0.5764272 -0.08345748 0. ] ... [ 0. 0. 0. ... -0.23359616 -0.07513674 0.84725225] [ 0. 0. 0. ... -1.1501601 -0.03941115 -0.2432965 ] [ 0. 0. 0. ... 0.9863367 0.42311558 0. ]] [[ 0. 0. 0. ... -0.7789627 0.9469243 0.43512547] [ 0. 0. 0. ... 0.30864084 -0.8079889 1.563545 ] [ 0. 0. 0. ... -1.3026199 -0.775494 0. ] ... [ 0. 0. 0. ... -2.0508726 -2.5423343 0.06516896] [ 0. 0. 0. ... -0.3617163 1.3473818 -0.1547195 ] [ 0. 0. 0. ... -0.738891 1.139373 0. ]] [[ 0. 0. 0. ... -0.82420176 -1.3474047 0.3299887 ] [ 0. 0. 0. ... 0.38529503 -0.5456726 0.07515629] [ 0. 0. 0. ... -0.12141696 -0.19845852 0. ] ... [ 0. 0. 0. ... -0.39850163 1.6467187 -0.7022656 ] [ 0. 0. 0. ... 1.0288521 -0.34046236 -0.3691114 ] [ 0. 0. 0. ... 0.3569002 1.1243037 0. ]] ... [[ 0. 0. 0. ... 0.9376099 -1.8524451 -0.28598416] [ 0. 0. 0. ... 0.01627238 -1.4553396 0.8116807 ] [ 0. 0. 0. ... -0.37611648 -0.30102813 0. ] ... [ 0. 0. 0. ... 0.9046274 1.0908308 -0.88477844] [ 0. 0. 0. ... -0.4885983 0.05443337 -0.6694119 ] [ 0. 0. 0. ... -0.5672779 -0.08248246 0. ]] [[ 0. 0. 0. ... 1.8216707 -0.00334586 -0.05200932] [ 0. 0. 0. ... -0.11456563 -0.15728363 0.5134958 ] [ 0. 0. 0. ... 0.24364237 1.5767632 0. ] ... [ 0. 0. 0. ... 1.9837846 1.0594858 1.1165754 ] [ 0. 0. 0. ... -0.13373421 0.62209845 -0.6352582 ] [ 0. 0. 0. ... -2.28925 2.2879262 0. ]] [[ 0. 0. 0. ... -0.20286635 -0.743292 0.0500922 ] [ 0. 0. 0. ... -0.6488293 -0.6785661 -0.48590636] [ 0. 0. 0. ... -0.99022686 -0.77213454 0. ] ... [ 0. 0. 0. ... 1.3108745 0.6174837 0.28394526] [ 0. 0. 0. ... 1.2134002 -1.7040288 0.5951131 ] [ 0. 0. 0. ... 0.64803684 0.41682023 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4783.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.6268953 1.527348 0. ] [ 0. 0. 0. ... -0.21821137 -1.0413063 0. ] [ 0. 0. 0. ... -0.0529063 0.20689 0. ] ... [ 0. 0. 0. ... -1.1752759 -1.5788324 0. ] [ 0. 0.20669003 -1.1757157 ... 0. 0. 0. ] [ 0. -0.72098076 -1.7531933 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81760156 -0.6120433 0. ] [ 0. 0. 0. ... -0.8037491 1.3286234 0. ] [ 0. 0. 0. ... 0.5824169 0.16351208 0. ] ... [ 0. 0. 0. ... 0.32029232 0.87338513 0. ] [ 0. 0.03834468 -1.8090974 ... 0. 0. 0. ] [ 0. 0.02975913 -0.5153194 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6806479 -0.31492528 0. ] [ 0. 0. 0. ... -0.457862 1.7354704 0. ] [ 0. 0. 0. ... -0.28727525 -0.7204237 0. ] ... [ 0. 0. 0. ... 0.11774536 1.1488346 0. ] [ 0. 0.52958107 0.30403417 ... 0. 0. 0. ] [ 0. 0.7888241 1.2117938 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8754036 -1.3526326 0. ] [ 0. 0. 0. ... -1.2260569 -0.9485643 0. ] [ 0. 0. 0. ... 0.55595773 -0.2951322 0. ] ... [ 0. 0. 0. ... -1.53005 -0.5291081 0. ] [ 0. -0.4682374 0.46513844 ... 0. 0. 0. ] [ 0. 2.3746789 -0.42810345 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.872356 1.0266424 0. ] [ 0. 0. 0. ... -1.533753 0.4853277 0. ] [ 0. 0. 0. ... -0.71244776 -0.16597962 0. ] ... [ 0. 0. 0. ... -0.7641962 0.21769446 0. ] [ 0. -0.6548002 -0.10064652 ... 0. 0. 0. ] [ 0. -1.2210847 -0.8972209 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9470995 0.961524 0. ] [ 0. 0. 0. ... -1.063305 0.9146801 0. ] [ 0. 0. 0. ... 1.4015259 1.4110122 0. ] ... [ 0. 0. 0. ... -1.0050706 -0.53713757 0. ] [ 0. 0.0606935 -0.95232576 ... 0. 0. 0. ] [ 0. 0.3918499 0.7220399 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.6268953 1.527348 0. ] [ 0. 0. 0. ... -0.21821137 -1.0413063 0. ] [ 0. 0. 0. ... -0.0529063 0.20689 0. ] ... [ 0. 0. 0. ... -1.1752759 -1.5788324 0. ] [ 0. 0.20669003 -1.1757157 ... 0. 0. 0. ] [ 0. -0.72098076 -1.7531933 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81760156 -0.6120433 0. ] [ 0. 0. 0. ... -0.8037491 1.3286234 0. ] [ 0. 0. 0. ... 0.5824169 0.16351208 0. ] ... [ 0. 0. 0. ... 0.32029232 0.87338513 0. ] [ 0. 0.03834468 -1.8090974 ... 0. 0. 0. ] [ 0. 0.02975913 -0.5153194 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6806479 -0.31492528 0. ] [ 0. 0. 0. ... -0.457862 1.7354704 0. ] [ 0. 0. 0. ... -0.28727525 -0.7204237 0. ] ... [ 0. 0. 0. ... 0.11774536 1.1488346 0. ] [ 0. 0.52958107 0.30403417 ... 0. 0. 0. ] [ 0. 0.7888241 1.2117938 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8754036 -1.3526326 0. ] [ 0. 0. 0. ... -1.2260569 -0.9485643 0. ] [ 0. 0. 0. ... 0.55595773 -0.2951322 0. ] ... [ 0. 0. 0. ... -1.53005 -0.5291081 0. ] [ 0. -0.4682374 0.46513844 ... 0. 0. 0. ] [ 0. 2.3746789 -0.42810345 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.872356 1.0266424 0. ] [ 0. 0. 0. ... -1.533753 0.4853277 0. ] [ 0. 0. 0. ... -0.71244776 -0.16597962 0. ] ... [ 0. 0. 0. ... -0.7641962 0.21769446 0. ] [ 0. -0.6548002 -0.10064652 ... 0. 0. 0. ] [ 0. -1.2210847 -0.8972209 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9470995 0.961524 0. ] [ 0. 0. 0. ... -1.063305 0.9146801 0. ] [ 0. 0. 0. ... 1.4015259 1.4110122 0. ] ... [ 0. 0. 0. ... -1.0050706 -0.53713757 0. ] [ 0. 0.0606935 -0.95232576 ... 0. 0. 0. ] [ 0. 0.3918499 0.7220399 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4785.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.69584996 -0.20783795 0.06113558] [ 0. 0. 0. ... -0.02101137 -0.9560643 -0.16607456] [ 0. 0. 0. ... 1.3170733 0.48653996 0. ] ... [ 0. 3.0135732 0.73119307 ... 0. 0. 0. ] [ 0. -0.5962885 1.0040648 ... 0. 0. 0. ] [-0.7373292 -0.21365057 -1.606974 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.07555185 -3.3918233 -0.38569167] [ 0. 0. 0. ... -1.18976 3.947383 0.30787128] [ 0. 0. 0. ... -1.9227976 -1.0791974 0. ] ... [ 0. 0.5610656 -0.74860406 ... 0. 0. 0. ] [ 0. 0.28020272 1.3835028 ... 0. 0. 0. ] [-2.165241 -0.47848332 -1.3272538 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.74356765 0.64887035 0.5384731 ] [ 0. 0. 0. ... 2.1777549 -2.46573 1.4015574 ] [ 0. 0. 0. ... 0.24406044 1.4771179 0. ] ... [ 0. -0.08647512 1.1361495 ... 0. 0. 0. ] [ 0. -0.4491118 0.25255272 ... 0. 0. 0. ] [-0.12525198 -1.0913639 0.3398359 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8438323 -1.8533621 0.4476527 ] [ 0. 0. 0. ... -0.56322867 0.86656547 -0.97746146] [ 0. 0. 0. ... 0.6807764 -1.1704911 0. ] ... [ 0. 0.8174509 -3.0839305 ... 0. 0. 0. ] [ 0. -1.3541243 0.10756363 ... 0. 0. 0. ] [-1.6701733 0.14724325 0.57469493 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.7829292 0.29360145 -0.22203685] [ 0. 0. 0. ... 0.25956833 1.2065724 -2.574128 ] [ 0. 0. 0. ... -0.9500881 -0.96410733 0. ] ... [ 0. -1.1530453 0.8186773 ... 0. 0. 0. ] [ 0. 0.6635222 -0.26063275 ... 0. 0. 0. ] [-0.30114475 0.33938536 0.86708516 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5364011 -1.7651601 1.4613574 ] [ 0. 0. 0. ... -0.01277823 0.01693679 -0.9343268 ] [ 0. 0. 0. ... -0.35498002 1.6601088 0. ] ... [ 0. 0.28199226 0.40244493 ... 0. 0. 0. ] [ 0. 1.0586416 -1.2312411 ... 0. 0. 0. ] [-2.4558642 -0.3649474 -0.7195949 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.69584996 -0.20783795 0.06113558] [ 0. 0. 0. ... -0.02101137 -0.9560643 -0.16607456] [ 0. 0. 0. ... 1.3170733 0.48653996 0. ] ... [ 0. 3.0135732 0.73119307 ... 0. 0. 0. ] [ 0. -0.5962885 1.0040648 ... 0. 0. 0. ] [-0.7373292 -0.21365057 -1.606974 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.07555185 -3.3918233 -0.38569167] [ 0. 0. 0. ... -1.18976 3.947383 0.30787128] [ 0. 0. 0. ... -1.9227976 -1.0791974 0. ] ... [ 0. 0.5610656 -0.74860406 ... 0. 0. 0. ] [ 0. 0.28020272 1.3835028 ... 0. 0. 0. ] [-2.165241 -0.47848332 -1.3272538 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.74356765 0.64887035 0.5384731 ] [ 0. 0. 0. ... 2.1777549 -2.46573 1.4015574 ] [ 0. 0. 0. ... 0.24406044 1.4771179 0. ] ... [ 0. -0.08647512 1.1361495 ... 0. 0. 0. ] [ 0. -0.4491118 0.25255272 ... 0. 0. 0. ] [-0.12525198 -1.0913639 0.3398359 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.8438323 -1.8533621 0.4476527 ] [ 0. 0. 0. ... -0.56322867 0.86656547 -0.97746146] [ 0. 0. 0. ... 0.6807764 -1.1704911 0. ] ... [ 0. 0.8174509 -3.0839305 ... 0. 0. 0. ] [ 0. -1.3541243 0.10756363 ... 0. 0. 0. ] [-1.6701733 0.14724325 0.57469493 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.7829292 0.29360145 -0.22203685] [ 0. 0. 0. ... 0.25956833 1.2065724 -2.574128 ] [ 0. 0. 0. ... -0.9500881 -0.96410733 0. ] ... [ 0. -1.1530453 0.8186773 ... 0. 0. 0. ] [ 0. 0.6635222 -0.26063275 ... 0. 0. 0. ] [-0.30114475 0.33938536 0.86708516 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5364011 -1.7651601 1.4613574 ] [ 0. 0. 0. ... -0.01277823 0.01693679 -0.9343268 ] [ 0. 0. 0. ... -0.35498002 1.6601088 0. ] ... [ 0. 0.28199226 0.40244493 ... 0. 0. 0. ] [ 0. 1.0586416 -1.2312411 ... 0. 0. 0. ] [-2.4558642 -0.3649474 -0.7195949 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4787.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.7242886 -0.3321397 0. ] [ 0. 0. 0. ... -1.0398935 -1.1665686 0. ] [ 0. 0. 0. ... 0.4755221 -1.1320536 0. ] ... [ 0. 0. 0. ... 0.6105999 1.0382206 0. ] [ 0. 0. 0. ... -0.5936054 -0.10168505 0. ] [ 0. 0. 0. ... -0.18813404 0.05289583 0. ]] [[ 0. 0. 0. ... -0.28088805 -0.21693395 0. ] [ 0. 0. 0. ... -1.2347447 0.40049437 0. ] [ 0. 0. 0. ... 0.9614324 1.1922132 0. ] ... [ 0. 0. 0. ... 0.74131125 0.3939308 0. ] [ 0. 0. 0. ... 0.68079656 -1.3217744 0. ] [ 0. 0. 0. ... -1.3335205 0.6152047 0. ]] [[ 0. 0. 0. ... -0.28462002 -0.8461834 0. ] [ 0. 0. 0. ... 0.34687006 -1.8468076 0. ] [ 0. 0. 0. ... 0.3411756 -1.2029812 0. ] ... [ 0. 0. 0. ... -1.1843308 1.2731513 0. ] [ 0. 0. 0. ... -0.41552112 -0.06227789 0. ] [ 0. 0. 0. ... 0.01257436 -1.1798598 0. ]] ... [[ 0. 0. 0. ... 0.2675456 -0.32493532 0. ] [ 0. 0. 0. ... 0.7605147 0.6733322 0. ] [ 0. 0. 0. ... 2.5641785 -1.0807128 0. ] ... [ 0. 0. 0. ... 0.2594086 1.3980163 0. ] [ 0. 0. 0. ... 0.3011654 -1.7340434 0. ] [ 0. 0. 0. ... -0.6576345 -0.32394204 0. ]] [[ 0. 0. 0. ... 1.2479622 2.0839956 0. ] [ 0. 0. 0. ... -1.0946114 -0.72592264 0. ] [ 0. 0. 0. ... 1.5474253 -1.3082551 0. ] ... [ 0. 0. 0. ... 2.7896688 -0.80717325 0. ] [ 0. 0. 0. ... -0.47831634 0.7206129 0. ] [ 0. 0. 0. ... 1.6775109 1.0833477 0. ]] [[ 0. 0. 0. ... 1.1235023 0.27023414 0. ] [ 0. 0. 0. ... -1.2294432 0.06865901 0. ] [ 0. 0. 0. ... 0.30309367 -0.33961505 0. ] ... [ 0. 0. 0. ... -1.0142015 -1.6632701 0. ] [ 0. 0. 0. ... -0.00409235 -1.9305491 0. ] [ 0. 0. 0. ... -0.43402287 0.36458084 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.7242886 -0.3321397 0. ] [ 0. 0. 0. ... -1.0398935 -1.1665686 0. ] [ 0. 0. 0. ... 0.4755221 -1.1320536 0. ] ... [ 0. 0. 0. ... 0.6105999 1.0382206 0. ] [ 0. 0. 0. ... -0.5936054 -0.10168505 0. ] [ 0. 0. 0. ... -0.18813404 0.05289583 0. ]] [[ 0. 0. 0. ... -0.28088805 -0.21693395 0. ] [ 0. 0. 0. ... -1.2347447 0.40049437 0. ] [ 0. 0. 0. ... 0.9614324 1.1922132 0. ] ... [ 0. 0. 0. ... 0.74131125 0.3939308 0. ] [ 0. 0. 0. ... 0.68079656 -1.3217744 0. ] [ 0. 0. 0. ... -1.3335205 0.6152047 0. ]] [[ 0. 0. 0. ... -0.28462002 -0.8461834 0. ] [ 0. 0. 0. ... 0.34687006 -1.8468076 0. ] [ 0. 0. 0. ... 0.3411756 -1.2029812 0. ] ... [ 0. 0. 0. ... -1.1843308 1.2731513 0. ] [ 0. 0. 0. ... -0.41552112 -0.06227789 0. ] [ 0. 0. 0. ... 0.01257436 -1.1798598 0. ]] ... [[ 0. 0. 0. ... 0.2675456 -0.32493532 0. ] [ 0. 0. 0. ... 0.7605147 0.6733322 0. ] [ 0. 0. 0. ... 2.5641785 -1.0807128 0. ] ... [ 0. 0. 0. ... 0.2594086 1.3980163 0. ] [ 0. 0. 0. ... 0.3011654 -1.7340434 0. ] [ 0. 0. 0. ... -0.6576345 -0.32394204 0. ]] [[ 0. 0. 0. ... 1.2479622 2.0839956 0. ] [ 0. 0. 0. ... -1.0946114 -0.72592264 0. ] [ 0. 0. 0. ... 1.5474253 -1.3082551 0. ] ... [ 0. 0. 0. ... 2.7896688 -0.80717325 0. ] [ 0. 0. 0. ... -0.47831634 0.7206129 0. ] [ 0. 0. 0. ... 1.6775109 1.0833477 0. ]] [[ 0. 0. 0. ... 1.1235023 0.27023414 0. ] [ 0. 0. 0. ... -1.2294432 0.06865901 0. ] [ 0. 0. 0. ... 0.30309367 -0.33961505 0. ] ... [ 0. 0. 0. ... -1.0142015 -1.6632701 0. ] [ 0. 0. 0. ... -0.00409235 -1.9305491 0. ] [ 0. 0. 0. ... -0.43402287 0.36458084 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:3 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4789.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4791.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.66171646 -2.187137 0.08171018 ... -0.5529804 -1.4079729 -2.2096233 ] [-2.187137 0.08171018 -0.2897196 ... -1.4079729 -2.2096233 1.0653235 ] [ 0.08171018 -0.2897196 0.4709446 ... -2.2096233 1.0653235 0.7915887 ] ... [ 0.36512846 2.3377426 0.5208538 ... -0.6442607 -2.5207596 -0.8092432 ] [ 2.3377426 0.5208538 -0.8789675 ... -2.5207596 -0.8092432 1.5001601 ] [ 0.5208538 -0.8789675 -0.54916996 ... -0.8092432 1.5001601 1.3705878 ]] [[ 0.43900383 -0.2863675 -0.1801236 ... -0.955245 0.02219436 0.20957254] [-0.2863675 -0.1801236 0.76424176 ... 0.02219436 0.20957254 0.68308944] [-0.1801236 0.76424176 1.3100104 ... 0.20957254 0.68308944 -0.6661993 ] ... [-0.9194836 -1.0674279 -1.0904881 ... 0.68267286 -0.15603796 0.41091418] [-1.0674279 -1.0904881 -0.1886928 ... -0.15603796 0.41091418 -1.2058762 ] [-1.0904881 -0.1886928 0.23640665 ... 0.41091418 -1.2058762 0.24480385]] [[-0.988324 -0.39206937 -1.702579 ... -0.29260325 0.84839976 -0.5143391 ] [-0.39206937 -1.702579 0.68815726 ... 0.84839976 -0.5143391 -1.7361184 ] [-1.702579 0.68815726 1.492728 ... -0.5143391 -1.7361184 0.7914463 ] ... [-0.06817564 -1.5039988 -0.9260404 ... 1.4991828 -0.2028641 0.10680804] [-1.5039988 -0.9260404 -0.10514835 ... -0.2028641 0.10680804 0.14546928] [-0.9260404 -0.10514835 0.1629526 ... 0.10680804 0.14546928 -0.36735228]] ... [[ 1.0448766 -0.47098097 -2.0435262 ... 1.4278175 -0.07281113 -0.50573593] [-0.47098097 -2.0435262 -0.90256155 ... -0.07281113 -0.50573593 0.97789174] [-2.0435262 -0.90256155 1.0649209 ... -0.50573593 0.97789174 -1.1367658 ] ... [ 1.7140012 -0.5253226 0.5218143 ... -0.8843791 -1.4945574 -0.3377883 ] [-0.5253226 0.5218143 -2.1439 ... -1.4945574 -0.3377883 -1.2449718 ] [ 0.5218143 -2.1439 -0.23741508 ... -0.3377883 -1.2449718 -0.28354675]] [[-0.8050569 0.36045334 -0.0443261 ... 1.1775609 0.6551297 1.5413584 ] [ 0.36045334 -0.0443261 -0.2755827 ... 0.6551297 1.5413584 -1.005411 ] [-0.0443261 -0.2755827 -1.1247199 ... 1.5413584 -1.005411 -0.30610743] ... [-0.82727075 1.0311351 -2.0413992 ... 0.2392935 -0.08346985 -0.44297737] [ 1.0311351 -2.0413992 -0.7953662 ... -0.08346985 -0.44297737 -1.1552683 ] [-2.0413992 -0.7953662 0.04909185 ... -0.44297737 -1.1552683 0.6403981 ]] [[-1.2191604 0.7203982 -1.6281097 ... 0.353669 -1.0019199 0.29440233] [ 0.7203982 -1.6281097 -1.1897258 ... -1.0019199 0.29440233 -0.3147694 ] [-1.6281097 -1.1897258 -1.7421732 ... 0.29440233 -0.3147694 0.5714048 ] ... [-0.26465818 -0.05585304 -1.7591811 ... 1.7455215 0.2065586 0.31975195] [-0.05585304 -1.7591811 -0.8354686 ... 0.2065586 0.31975195 -0.18629843] [-1.7591811 -0.8354686 -2.9817 ... 0.31975195 -0.18629843 -0.65419793]]]; ov_res: [[[-0.66171646 -2.187137 0.08171018 ... -0.5529804 -1.4079729 -2.2096233 ] [-2.187137 0.08171018 -0.2897196 ... -1.4079729 -2.2096233 1.0653235 ] [ 0.08171018 -0.2897196 0.4709446 ... -2.2096233 1.0653235 0.7915887 ] ... [ 0.36512846 2.3377426 0.5208538 ... -0.6442607 -2.5207596 -0.8092432 ] [ 2.3377426 0.5208538 -0.8789675 ... -2.5207596 -0.8092432 1.5001601 ] [ 0.5208538 -0.8789675 -0.54916996 ... -0.8092432 1.5001601 1.3705878 ]] [[ 0.43900383 -0.2863675 -0.1801236 ... -0.955245 0.02219436 0.20957254] [-0.2863675 -0.1801236 0.76424176 ... 0.02219436 0.20957254 0.68308944] [-0.1801236 0.76424176 1.3100104 ... 0.20957254 0.68308944 -0.6661993 ] ... [-0.9194836 -1.0674279 -1.0904881 ... 0.68267286 -0.15603796 0.41091418] [-1.0674279 -1.0904881 -0.1886928 ... -0.15603796 0.41091418 -1.2058762 ] [-1.0904881 -0.1886928 0.23640665 ... 0.41091418 -1.2058762 0.24480385]] [[-0.988324 -0.39206937 -1.702579 ... -0.29260325 0.84839976 -0.5143391 ] [-0.39206937 -1.702579 0.68815726 ... 0.84839976 -0.5143391 -1.7361184 ] [-1.702579 0.68815726 1.492728 ... -0.5143391 -1.7361184 0.7914463 ] ... [-0.06817564 -1.5039988 -0.9260404 ... 1.4991828 -0.2028641 0.10680804] [-1.5039988 -0.9260404 -0.10514835 ... -0.2028641 0.10680804 0.14546928] [-0.9260404 -0.10514835 0.1629526 ... 0.10680804 0.14546928 -0.36735228]] ... [[ 1.0448766 -0.47098097 -2.0435262 ... 1.4278175 -0.07281113 -0.50573593] [-0.47098097 -2.0435262 -0.90256155 ... -0.07281113 -0.50573593 0.97789174] [-2.0435262 -0.90256155 1.0649209 ... -0.50573593 0.97789174 -1.1367658 ] ... [ 1.7140012 -0.5253226 0.5218143 ... -0.8843791 -1.4945574 -0.3377883 ] [-0.5253226 0.5218143 -2.1439 ... -1.4945574 -0.3377883 -1.2449718 ] [ 0.5218143 -2.1439 -0.23741508 ... -0.3377883 -1.2449718 -0.28354675]] [[-0.8050569 0.36045334 -0.0443261 ... 1.1775609 0.6551297 1.5413584 ] [ 0.36045334 -0.0443261 -0.2755827 ... 0.6551297 1.5413584 -1.005411 ] [-0.0443261 -0.2755827 -1.1247199 ... 1.5413584 -1.005411 -0.30610743] ... [-0.82727075 1.0311351 -2.0413992 ... 0.2392935 -0.08346985 -0.44297737] [ 1.0311351 -2.0413992 -0.7953662 ... -0.08346985 -0.44297737 -1.1552683 ] [-2.0413992 -0.7953662 0.04909185 ... -0.44297737 -1.1552683 0.6403981 ]] [[-1.2191604 0.7203982 -1.6281097 ... 0.353669 -1.0019199 0.29440233] [ 0.7203982 -1.6281097 -1.1897258 ... -1.0019199 0.29440233 -0.3147694 ] [-1.6281097 -1.1897258 -1.7421732 ... 0.29440233 -0.3147694 0.5714048 ] ... [-0.26465818 -0.05585304 -1.7591811 ... 1.7455215 0.2065586 0.31975195] [-0.05585304 -1.7591811 -0.8354686 ... 0.2065586 0.31975195 -0.18629843] [-1.7591811 -0.8354686 -2.9817 ... 0.31975195 -0.18629843 -0.65419793]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4793.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-1.4585092 -2.0222208 1.3657656 ... -0.32868642 -0.5175568 -0.18671359] [-2.0222208 1.3657656 -0.7934169 ... -0.5175568 -0.18671359 0.13431807] [-0.36760867 3.0180805 2.4793193 ... 0.6509608 -0.52269083 0.1438541 ] ... [ 0.5073896 1.203922 1.1415758 ... -0.43403634 0.1251062 -0.5783253 ] [ 0.07275555 -0.893804 -1.0093766 ... -1.2506764 -1.6499983 1.3208328 ] [-0.893804 -1.0093766 0.52973896 ... -1.6499983 1.3208328 0.50669336]] [[-0.4573977 1.0182445 -0.27173695 ... 0.8338298 1.1809942 -2.5813966 ] [ 1.0182445 -0.27173695 0.11502454 ... 1.1809942 -2.5813966 1.2188212 ] [-0.20728397 -0.27674752 -1.9060874 ... -0.84417 2.7155445 0.28150457] ... [ 0.52733105 0.03219261 0.70366925 ... 1.2623634 1.0184499 -0.1427538 ] [-1.9257149 1.6899675 -0.29199547 ... -1.7147479 0.73163974 1.2059581 ] [ 1.6899675 -0.29199547 -0.53056 ... 0.73163974 1.2059581 1.2850168 ]] [[ 0.10448231 -2.2061918 -0.77019083 ... 0.20407818 -0.49667534 0.5949889 ] [-2.2061918 -0.77019083 -1.2649233 ... -0.49667534 0.5949889 -0.95069826] [-1.1133162 -0.8910201 0.26100376 ... 0.57324123 1.2403207 -0.88664675] ... [-0.92833924 0.11984515 -0.26717612 ... 2.551219 0.08614771 1.6668266 ] [-0.30752015 -0.8332842 0.8702517 ... -0.5095475 0.793865 0.36741754] [-0.8332842 0.8702517 -0.6302787 ... 0.793865 0.36741754 0.03172886]] ... [[-0.3762002 -0.17675245 -0.46713215 ... -1.4197711 -0.22190386 0.39407593] [-0.17675245 -0.46713215 0.09623804 ... -0.22190386 0.39407593 -1.1542301 ] [-0.2609688 -1.7788107 -0.37499303 ... -0.43057302 -1.1134611 0.44282228] ... [ 0.76124495 -0.28261924 -0.75268924 ... -0.37146723 -1.2930132 -1.3898064 ] [ 1.760627 -0.66195667 1.7428466 ... 0.5957901 -1.0736336 -0.90816414] [-0.66195667 1.7428466 1.4357342 ... -1.0736336 -0.90816414 0.46818954]] [[ 0.7123914 0.8454102 -0.6296952 ... 0.2771056 1.0966967 0.41601047] [ 0.8454102 -0.6296952 -1.4749316 ... 1.0966967 0.41601047 -0.4901538 ] [ 0.03796798 0.30366054 0.35351732 ... 1.991898 0.6296462 0.5057162 ] ... [-0.7861885 -0.9449402 -1.0845233 ... -0.6917947 1.1566108 -0.23639253] [ 0.63700044 -0.8174056 0.30924958 ... 1.3226073 1.3233399 0.2973574 ] [-0.8174056 0.30924958 -0.41623932 ... 1.3233399 0.2973574 1.1471404 ]] [[-0.97323346 2.3262475 -0.95932424 ... -0.17407064 0.4556278 -0.26078364] [ 2.3262475 -0.95932424 -1.7394983 ... 0.4556278 -0.26078364 -0.22137885] [-1.1897752 -1.1174878 2.2617261 ... -0.28629938 -2.4634778 0.9379707 ] ... [ 0.2013079 -0.26771924 1.1719321 ... -0.24916883 2.0213342 -0.18954879] [-1.098656 0.74146646 -1.1201596 ... -0.1437195 -1.0404408 -0.42098534] [ 0.74146646 -1.1201596 0.37320414 ... -1.0404408 -0.42098534 -1.5254413 ]]]; ov_res: [[[-1.4585092 -2.0222208 1.3657656 ... -0.32868642 -0.5175568 -0.18671359] [-2.0222208 1.3657656 -0.7934169 ... -0.5175568 -0.18671359 0.13431807] [-0.36760867 3.0180805 2.4793193 ... 0.6509608 -0.52269083 0.1438541 ] ... [ 0.5073896 1.203922 1.1415758 ... -0.43403634 0.1251062 -0.5783253 ] [ 0.07275555 -0.893804 -1.0093766 ... -1.2506764 -1.6499983 1.3208328 ] [-0.893804 -1.0093766 0.52973896 ... -1.6499983 1.3208328 0.50669336]] [[-0.4573977 1.0182445 -0.27173695 ... 0.8338298 1.1809942 -2.5813966 ] [ 1.0182445 -0.27173695 0.11502454 ... 1.1809942 -2.5813966 1.2188212 ] [-0.20728397 -0.27674752 -1.9060874 ... -0.84417 2.7155445 0.28150457] ... [ 0.52733105 0.03219261 0.70366925 ... 1.2623634 1.0184499 -0.1427538 ] [-1.9257149 1.6899675 -0.29199547 ... -1.7147479 0.73163974 1.2059581 ] [ 1.6899675 -0.29199547 -0.53056 ... 0.73163974 1.2059581 1.2850168 ]] [[ 0.10448231 -2.2061918 -0.77019083 ... 0.20407818 -0.49667534 0.5949889 ] [-2.2061918 -0.77019083 -1.2649233 ... -0.49667534 0.5949889 -0.95069826] [-1.1133162 -0.8910201 0.26100376 ... 0.57324123 1.2403207 -0.88664675] ... [-0.92833924 0.11984515 -0.26717612 ... 2.551219 0.08614771 1.6668266 ] [-0.30752015 -0.8332842 0.8702517 ... -0.5095475 0.793865 0.36741754] [-0.8332842 0.8702517 -0.6302787 ... 0.793865 0.36741754 0.03172886]] ... [[-0.3762002 -0.17675245 -0.46713215 ... -1.4197711 -0.22190386 0.39407593] [-0.17675245 -0.46713215 0.09623804 ... -0.22190386 0.39407593 -1.1542301 ] [-0.2609688 -1.7788107 -0.37499303 ... -0.43057302 -1.1134611 0.44282228] ... [ 0.76124495 -0.28261924 -0.75268924 ... -0.37146723 -1.2930132 -1.3898064 ] [ 1.760627 -0.66195667 1.7428466 ... 0.5957901 -1.0736336 -0.90816414] [-0.66195667 1.7428466 1.4357342 ... -1.0736336 -0.90816414 0.46818954]] [[ 0.7123914 0.8454102 -0.6296952 ... 0.2771056 1.0966967 0.41601047] [ 0.8454102 -0.6296952 -1.4749316 ... 1.0966967 0.41601047 -0.4901538 ] [ 0.03796798 0.30366054 0.35351732 ... 1.991898 0.6296462 0.5057162 ] ... [-0.7861885 -0.9449402 -1.0845233 ... -0.6917947 1.1566108 -0.23639253] [ 0.63700044 -0.8174056 0.30924958 ... 1.3226073 1.3233399 0.2973574 ] [-0.8174056 0.30924958 -0.41623932 ... 1.3233399 0.2973574 1.1471404 ]] [[-0.97323346 2.3262475 -0.95932424 ... -0.17407064 0.4556278 -0.26078364] [ 2.3262475 -0.95932424 -1.7394983 ... 0.4556278 -0.26078364 -0.22137885] [-1.1897752 -1.1174878 2.2617261 ... -0.28629938 -2.4634778 0.9379707 ] ... [ 0.2013079 -0.26771924 1.1719321 ... -0.24916883 2.0213342 -0.18954879] [-1.098656 0.74146646 -1.1201596 ... -0.1437195 -1.0404408 -0.42098534] [ 0.74146646 -1.1201596 0.37320414 ... -1.0404408 -0.42098534 -1.5254413 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4795.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-2.79225326e+00 1.51706159e+00 -4.48559731e-01 ... 1.60876632e+00 -5.07571638e-01 -6.82048023e-01] [ 1.51706159e+00 -4.48559731e-01 -1.63946152e-01 ... -5.07571638e-01 -6.82048023e-01 -4.02063914e-02] [-4.48559731e-01 -1.63946152e-01 -4.14822111e-03 ... -6.82048023e-01 -4.02063914e-02 -4.20553952e-01] ... [ 5.67965746e-01 -2.20522061e-01 4.76107687e-01 ... -2.32026458e+00 -8.09742749e-01 -3.00707608e-01] [-2.20522061e-01 4.76107687e-01 1.24401763e-01 ... -8.09742749e-01 -3.00707608e-01 8.33562195e-01] [ 4.76107687e-01 1.24401763e-01 1.00345099e+00 ... -3.00707608e-01 8.33562195e-01 9.12884057e-01]] [[ 8.48852336e-01 1.90077364e-01 -4.12550837e-01 ... -1.97338551e-01 3.53016078e-01 -2.39744082e-01] [ 1.90077364e-01 -4.12550837e-01 -3.84497261e+00 ... 3.53016078e-01 -2.39744082e-01 1.11469162e+00] [-4.12550837e-01 -3.84497261e+00 -8.92499089e-01 ... -2.39744082e-01 1.11469162e+00 5.97810030e-01] ... [ 1.28281224e+00 4.38510329e-02 -2.18165722e-02 ... 1.33202326e+00 7.75295138e-01 -1.17027140e+00] [ 4.38510329e-02 -2.18165722e-02 2.11712718e+00 ... 7.75295138e-01 -1.17027140e+00 1.39391494e+00] [-2.18165722e-02 2.11712718e+00 -9.69662443e-02 ... -1.17027140e+00 1.39391494e+00 1.64052427e+00]] [[ 9.18888152e-01 5.06060004e-01 -1.03091896e+00 ... -6.76055670e-01 -2.02247187e-01 -2.71691799e-01] [ 5.06060004e-01 -1.03091896e+00 -9.46800530e-01 ... -2.02247187e-01 -2.71691799e-01 -1.12520385e+00] [-1.03091896e+00 -9.46800530e-01 -4.36834186e-01 ... -2.71691799e-01 -1.12520385e+00 4.50756997e-02] ... [-1.14775300e-01 5.23223042e-01 7.99423456e-01 ... 1.23475301e+00 4.94663686e-01 4.78573889e-02] [ 5.23223042e-01 7.99423456e-01 -1.62671554e+00 ... 4.94663686e-01 4.78573889e-02 1.04309154e+00] [ 7.99423456e-01 -1.62671554e+00 -1.15088248e+00 ... 4.78573889e-02 1.04309154e+00 -1.48852229e+00]] ... [[ 1.01467860e+00 1.25125602e-01 -1.69735074e-01 ... -3.71146828e-01 -7.34136105e-01 -2.00829860e-02] [ 1.25125602e-01 -1.69735074e-01 1.36617863e+00 ... -7.34136105e-01 -2.00829860e-02 3.53100926e-01] [-1.69735074e-01 1.36617863e+00 4.89551455e-01 ... -2.00829860e-02 3.53100926e-01 4.12147313e-01] ... [ 3.89878720e-01 5.17296255e-01 2.03353786e+00 ... -1.01760030e+00 8.28521073e-01 -6.19455338e-01] [ 5.17296255e-01 2.03353786e+00 2.31799200e-01 ... 8.28521073e-01 -6.19455338e-01 5.95472395e-01] [ 2.03353786e+00 2.31799200e-01 4.33013618e-01 ... -6.19455338e-01 5.95472395e-01 8.32244992e-01]] [[-1.11750519e+00 2.42953944e+00 4.00603831e-01 ... -3.82281765e-02 -1.32262474e-02 -7.37145782e-01] [ 2.42953944e+00 4.00603831e-01 7.32003093e-01 ... -1.32262474e-02 -7.37145782e-01 1.06317826e-01] [ 4.00603831e-01 7.32003093e-01 -6.03626788e-01 ... -7.37145782e-01 1.06317826e-01 -8.17418218e-01] ... [-4.71006960e-01 -1.99183360e-01 1.74616933e+00 ... -6.28319457e-02 -1.01574495e-01 -4.69484150e-01] [-1.99183360e-01 1.74616933e+00 1.16325307e+00 ... -1.01574495e-01 -4.69484150e-01 6.95142925e-01] [ 1.74616933e+00 1.16325307e+00 -3.52211952e-01 ... -4.69484150e-01 6.95142925e-01 -7.74325967e-01]] [[ 3.52150458e-03 -5.98499954e-01 3.03087801e-01 ... 2.36601067e+00 -2.47726649e-01 8.79489541e-01] [-5.98499954e-01 3.03087801e-01 5.06109297e-01 ... -2.47726649e-01 8.79489541e-01 1.52494049e+00] [ 3.03087801e-01 5.06109297e-01 -3.97025973e-01 ... 8.79489541e-01 1.52494049e+00 -1.76783967e+00] ... [ 1.16444993e+00 1.95805883e+00 4.77220982e-01 ... -2.01340723e+00 1.57339835e+00 7.15087175e-01] [ 1.95805883e+00 4.77220982e-01 6.02829754e-01 ... 1.57339835e+00 7.15087175e-01 -1.29300505e-01] [ 4.77220982e-01 6.02829754e-01 7.57127762e-01 ... 7.15087175e-01 -1.29300505e-01 1.48153877e+00]]]; ov_res: [[[-2.79225326e+00 1.51706159e+00 -4.48559731e-01 ... 1.60876632e+00 -5.07571638e-01 -6.82048023e-01] [ 1.51706159e+00 -4.48559731e-01 -1.63946152e-01 ... -5.07571638e-01 -6.82048023e-01 -4.02063914e-02] [-4.48559731e-01 -1.63946152e-01 -4.14822111e-03 ... -6.82048023e-01 -4.02063914e-02 -4.20553952e-01] ... [ 5.67965746e-01 -2.20522061e-01 4.76107687e-01 ... -2.32026458e+00 -8.09742749e-01 -3.00707608e-01] [-2.20522061e-01 4.76107687e-01 1.24401763e-01 ... -8.09742749e-01 -3.00707608e-01 8.33562195e-01] [ 4.76107687e-01 1.24401763e-01 1.00345099e+00 ... -3.00707608e-01 8.33562195e-01 9.12884057e-01]] [[ 8.48852336e-01 1.90077364e-01 -4.12550837e-01 ... -1.97338551e-01 3.53016078e-01 -2.39744082e-01] [ 1.90077364e-01 -4.12550837e-01 -3.84497261e+00 ... 3.53016078e-01 -2.39744082e-01 1.11469162e+00] [-4.12550837e-01 -3.84497261e+00 -8.92499089e-01 ... -2.39744082e-01 1.11469162e+00 5.97810030e-01] ... [ 1.28281224e+00 4.38510329e-02 -2.18165722e-02 ... 1.33202326e+00 7.75295138e-01 -1.17027140e+00] [ 4.38510329e-02 -2.18165722e-02 2.11712718e+00 ... 7.75295138e-01 -1.17027140e+00 1.39391494e+00] [-2.18165722e-02 2.11712718e+00 -9.69662443e-02 ... -1.17027140e+00 1.39391494e+00 1.64052427e+00]] [[ 9.18888152e-01 5.06060004e-01 -1.03091896e+00 ... -6.76055670e-01 -2.02247187e-01 -2.71691799e-01] [ 5.06060004e-01 -1.03091896e+00 -9.46800530e-01 ... -2.02247187e-01 -2.71691799e-01 -1.12520385e+00] [-1.03091896e+00 -9.46800530e-01 -4.36834186e-01 ... -2.71691799e-01 -1.12520385e+00 4.50756997e-02] ... [-1.14775300e-01 5.23223042e-01 7.99423456e-01 ... 1.23475301e+00 4.94663686e-01 4.78573889e-02] [ 5.23223042e-01 7.99423456e-01 -1.62671554e+00 ... 4.94663686e-01 4.78573889e-02 1.04309154e+00] [ 7.99423456e-01 -1.62671554e+00 -1.15088248e+00 ... 4.78573889e-02 1.04309154e+00 -1.48852229e+00]] ... [[ 1.01467860e+00 1.25125602e-01 -1.69735074e-01 ... -3.71146828e-01 -7.34136105e-01 -2.00829860e-02] [ 1.25125602e-01 -1.69735074e-01 1.36617863e+00 ... -7.34136105e-01 -2.00829860e-02 3.53100926e-01] [-1.69735074e-01 1.36617863e+00 4.89551455e-01 ... -2.00829860e-02 3.53100926e-01 4.12147313e-01] ... [ 3.89878720e-01 5.17296255e-01 2.03353786e+00 ... -1.01760030e+00 8.28521073e-01 -6.19455338e-01] [ 5.17296255e-01 2.03353786e+00 2.31799200e-01 ... 8.28521073e-01 -6.19455338e-01 5.95472395e-01] [ 2.03353786e+00 2.31799200e-01 4.33013618e-01 ... -6.19455338e-01 5.95472395e-01 8.32244992e-01]] [[-1.11750519e+00 2.42953944e+00 4.00603831e-01 ... -3.82281765e-02 -1.32262474e-02 -7.37145782e-01] [ 2.42953944e+00 4.00603831e-01 7.32003093e-01 ... -1.32262474e-02 -7.37145782e-01 1.06317826e-01] [ 4.00603831e-01 7.32003093e-01 -6.03626788e-01 ... -7.37145782e-01 1.06317826e-01 -8.17418218e-01] ... [-4.71006960e-01 -1.99183360e-01 1.74616933e+00 ... -6.28319457e-02 -1.01574495e-01 -4.69484150e-01] [-1.99183360e-01 1.74616933e+00 1.16325307e+00 ... -1.01574495e-01 -4.69484150e-01 6.95142925e-01] [ 1.74616933e+00 1.16325307e+00 -3.52211952e-01 ... -4.69484150e-01 6.95142925e-01 -7.74325967e-01]] [[ 3.52150458e-03 -5.98499954e-01 3.03087801e-01 ... 2.36601067e+00 -2.47726649e-01 8.79489541e-01] [-5.98499954e-01 3.03087801e-01 5.06109297e-01 ... -2.47726649e-01 8.79489541e-01 1.52494049e+00] [ 3.03087801e-01 5.06109297e-01 -3.97025973e-01 ... 8.79489541e-01 1.52494049e+00 -1.76783967e+00] ... [ 1.16444993e+00 1.95805883e+00 4.77220982e-01 ... -2.01340723e+00 1.57339835e+00 7.15087175e-01] [ 1.95805883e+00 4.77220982e-01 6.02829754e-01 ... 1.57339835e+00 7.15087175e-01 -1.29300505e-01] [ 4.77220982e-01 6.02829754e-01 7.57127762e-01 ... 7.15087175e-01 -1.29300505e-01 1.48153877e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4797.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.4202723 -0.8923943 -1.6260427 ... -0.48122743 -2.5695412 -0.7533671 ] [-0.8923943 -1.6260427 1.5139685 ... -2.5695412 -0.7533671 -0.49335912] [-0.57481736 0.82871556 0.09416901 ... -0.55305016 1.006734 -0.26365215] ... [ 0.4689565 0.6873177 0.9098752 ... 0.12084726 -1.3379229 -0.4867612 ] [-0.09313512 0.48089132 -0.44961149 ... 0.08110283 -1.3534302 -0.93285733] [ 0.48089132 -0.44961149 0.7690346 ... -1.3534302 -0.93285733 0.9543381 ]] [[ 1.100349 -0.99284863 0.37661868 ... 1.0296942 1.6530966 0.95667505] [-0.99284863 0.37661868 0.31466788 ... 1.6530966 0.95667505 -0.540729 ] [ 1.2278367 0.9291538 -1.0629954 ... -1.7061695 0.9047292 -0.22590424] ... [-0.19584195 -2.627106 1.0039624 ... -1.233189 -0.3475718 -0.24288829] [-0.7448565 0.33454752 1.4923213 ... 0.22804736 2.209433 -1.9263612 ] [ 0.33454752 1.4923213 0.42424703 ... 2.209433 -1.9263612 0.4877964 ]] [[-0.20242506 0.64801073 0.62861145 ... -1.8002316 -0.58318526 -1.2371832 ] [ 0.64801073 0.62861145 0.5196942 ... -0.58318526 -1.2371832 0.47478324] [ 2.1216316 0.04696924 0.5841529 ... 0.17886028 1.365268 -1.3263743 ] ... [-1.026007 -1.3294133 -0.41219258 ... 1.3526037 0.14507344 1.2895339 ] [-0.48794365 1.6676794 -0.21134505 ... 0.52095354 -0.06215456 0.06622518] [ 1.6676794 -0.21134505 0.21882084 ... -0.06215456 0.06622518 -0.68556 ]] ... [[-0.48852307 0.29926106 -0.2592608 ... 1.7614485 0.45935333 -1.6514728 ] [ 0.29926106 -0.2592608 -0.6493684 ... 0.45935333 -1.6514728 -0.5378809 ] [-0.26657248 0.31553245 0.74042207 ... 1.0621454 -0.27599168 -0.15653601] ... [-0.12223108 -0.49160293 0.4243004 ... -2.5723033 -0.8057601 -1.2627745 ] [-0.8313427 -0.93241173 -0.94378483 ... 0.2912175 -1.0256513 1.2817854 ] [-0.93241173 -0.94378483 0.29838315 ... -1.0256513 1.2817854 0.11643159]] [[ 0.17156228 2.0689783 0.14123175 ... 2.019269 0.575558 -0.8364278 ] [ 2.0689783 0.14123175 -0.73986346 ... 0.575558 -0.8364278 0.26988474] [ 1.2871873 -0.7358702 -0.4172131 ... -1.2136358 0.01865526 -0.51534635] ... [-1.4895219 -0.73868096 -0.0920032 ... -0.6489403 -1.5311674 -0.7052412 ] [ 0.41207567 2.5522654 -1.5642306 ... -1.2628628 1.2507377 0.76728857] [ 2.5522654 -1.5642306 -0.86562973 ... 1.2507377 0.76728857 -1.0732714 ]] [[ 0.25421184 0.38756996 -0.11433666 ... -0.63567686 -0.47996554 -0.50154305] [ 0.38756996 -0.11433666 -0.4565684 ... -0.47996554 -0.50154305 0.7731295 ] [ 0.8283267 0.85739464 -2.9514432 ... -0.5902048 0.2611258 -0.76582557] ... [ 0.9371834 -0.3904881 -0.464709 ... -0.1337489 0.5855356 0.29784292] [ 0.642031 1.234747 -0.88132715 ... -0.29333976 0.69683534 0.02500268] [ 1.234747 -0.88132715 -0.46793932 ... 0.69683534 0.02500268 0.01162331]]]; ov_res: [[[-0.4202723 -0.8923943 -1.6260427 ... -0.48122743 -2.5695412 -0.7533671 ] [-0.8923943 -1.6260427 1.5139685 ... -2.5695412 -0.7533671 -0.49335912] [-0.57481736 0.82871556 0.09416901 ... -0.55305016 1.006734 -0.26365215] ... [ 0.4689565 0.6873177 0.9098752 ... 0.12084726 -1.3379229 -0.4867612 ] [-0.09313512 0.48089132 -0.44961149 ... 0.08110283 -1.3534302 -0.93285733] [ 0.48089132 -0.44961149 0.7690346 ... -1.3534302 -0.93285733 0.9543381 ]] [[ 1.100349 -0.99284863 0.37661868 ... 1.0296942 1.6530966 0.95667505] [-0.99284863 0.37661868 0.31466788 ... 1.6530966 0.95667505 -0.540729 ] [ 1.2278367 0.9291538 -1.0629954 ... -1.7061695 0.9047292 -0.22590424] ... [-0.19584195 -2.627106 1.0039624 ... -1.233189 -0.3475718 -0.24288829] [-0.7448565 0.33454752 1.4923213 ... 0.22804736 2.209433 -1.9263612 ] [ 0.33454752 1.4923213 0.42424703 ... 2.209433 -1.9263612 0.4877964 ]] [[-0.20242506 0.64801073 0.62861145 ... -1.8002316 -0.58318526 -1.2371832 ] [ 0.64801073 0.62861145 0.5196942 ... -0.58318526 -1.2371832 0.47478324] [ 2.1216316 0.04696924 0.5841529 ... 0.17886028 1.365268 -1.3263743 ] ... [-1.026007 -1.3294133 -0.41219258 ... 1.3526037 0.14507344 1.2895339 ] [-0.48794365 1.6676794 -0.21134505 ... 0.52095354 -0.06215456 0.06622518] [ 1.6676794 -0.21134505 0.21882084 ... -0.06215456 0.06622518 -0.68556 ]] ... [[-0.48852307 0.29926106 -0.2592608 ... 1.7614485 0.45935333 -1.6514728 ] [ 0.29926106 -0.2592608 -0.6493684 ... 0.45935333 -1.6514728 -0.5378809 ] [-0.26657248 0.31553245 0.74042207 ... 1.0621454 -0.27599168 -0.15653601] ... [-0.12223108 -0.49160293 0.4243004 ... -2.5723033 -0.8057601 -1.2627745 ] [-0.8313427 -0.93241173 -0.94378483 ... 0.2912175 -1.0256513 1.2817854 ] [-0.93241173 -0.94378483 0.29838315 ... -1.0256513 1.2817854 0.11643159]] [[ 0.17156228 2.0689783 0.14123175 ... 2.019269 0.575558 -0.8364278 ] [ 2.0689783 0.14123175 -0.73986346 ... 0.575558 -0.8364278 0.26988474] [ 1.2871873 -0.7358702 -0.4172131 ... -1.2136358 0.01865526 -0.51534635] ... [-1.4895219 -0.73868096 -0.0920032 ... -0.6489403 -1.5311674 -0.7052412 ] [ 0.41207567 2.5522654 -1.5642306 ... -1.2628628 1.2507377 0.76728857] [ 2.5522654 -1.5642306 -0.86562973 ... 1.2507377 0.76728857 -1.0732714 ]] [[ 0.25421184 0.38756996 -0.11433666 ... -0.63567686 -0.47996554 -0.50154305] [ 0.38756996 -0.11433666 -0.4565684 ... -0.47996554 -0.50154305 0.7731295 ] [ 0.8283267 0.85739464 -2.9514432 ... -0.5902048 0.2611258 -0.76582557] ... [ 0.9371834 -0.3904881 -0.464709 ... -0.1337489 0.5855356 0.29784292] [ 0.642031 1.234747 -0.88132715 ... -0.29333976 0.69683534 0.02500268] [ 1.234747 -0.88132715 -0.46793932 ... 0.69683534 0.02500268 0.01162331]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4799.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 1.0186173 0.31877005 0.8407839 ... -2.6829479 -1.3206346 -0.37316015] [-0.40853056 0.7589101 -0.2439297 ... -0.56667906 0.58634573 1.3842554 ] [-1.1267617 0.06781305 0.9104888 ... -0.22025548 0.68519 0.27928206]] [[-0.29384062 -0.12918536 0.3795892 ... 0.8164419 0.9499998 1.0223737 ] [ 0.49824592 -0.66583836 -0.2614123 ... -1.578637 1.1984953 -0.19433859] [ 1.283483 -0.72129494 1.2895939 ... -0.19236438 -0.5335947 1.0202898 ]] [[-0.4732819 1.1665784 0.78957796 ... -0.22125542 -1.5341858 1.7644669 ] [-0.6452473 1.8965883 0.7455273 ... 1.9645243 0.47891313 -0.53350955] [-0.9036273 1.2149571 -0.5705894 ... -0.2595535 -2.3662813 0.04330488]] ... [[ 0.35338223 0.40030313 0.5665444 ... -0.09165339 -1.0704885 0.27246106] [-0.25595042 0.14383811 -0.09098474 ... 0.85743207 -2.0952036 0.42422256] [-0.1532696 0.43583035 0.5613441 ... 0.7304563 0.5006528 1.7144 ]] [[-1.9233183 0.34745514 0.27741942 ... 0.22419044 -0.49492395 -1.1134684 ] [-0.58943415 0.62503725 -0.08967143 ... -1.6761259 -0.2520034 0.04988958] [-0.14545058 -0.3180448 0.6658762 ... -0.50661516 -0.22736822 -0.1828716 ]] [[ 0.4576307 1.0277748 0.70480883 ... -0.04867031 -2.718044 0.3343162 ] [-0.28157672 1.0804454 -0.20028973 ... -1.0601485 1.5713198 -0.43772066] [-0.6507483 0.8808675 0.4173983 ... 1.2432255 -0.35400063 -0.16483761]]]; ov_res: [[[ 1.0186173 0.31877005 0.8407839 ... -2.6829479 -1.3206346 -0.37316015] [-0.40853056 0.7589101 -0.2439297 ... -0.56667906 0.58634573 1.3842554 ] [-1.1267617 0.06781305 0.9104888 ... -0.22025548 0.68519 0.27928206]] [[-0.29384062 -0.12918536 0.3795892 ... 0.8164419 0.9499998 1.0223737 ] [ 0.49824592 -0.66583836 -0.2614123 ... -1.578637 1.1984953 -0.19433859] [ 1.283483 -0.72129494 1.2895939 ... -0.19236438 -0.5335947 1.0202898 ]] [[-0.4732819 1.1665784 0.78957796 ... -0.22125542 -1.5341858 1.7644669 ] [-0.6452473 1.8965883 0.7455273 ... 1.9645243 0.47891313 -0.53350955] [-0.9036273 1.2149571 -0.5705894 ... -0.2595535 -2.3662813 0.04330488]] ... [[ 0.35338223 0.40030313 0.5665444 ... -0.09165339 -1.0704885 0.27246106] [-0.25595042 0.14383811 -0.09098474 ... 0.85743207 -2.0952036 0.42422256] [-0.1532696 0.43583035 0.5613441 ... 0.7304563 0.5006528 1.7144 ]] [[-1.9233183 0.34745514 0.27741942 ... 0.22419044 -0.49492395 -1.1134684 ] [-0.58943415 0.62503725 -0.08967143 ... -1.6761259 -0.2520034 0.04988958] [-0.14545058 -0.3180448 0.6658762 ... -0.50661516 -0.22736822 -0.1828716 ]] [[ 0.4576307 1.0277748 0.70480883 ... -0.04867031 -2.718044 0.3343162 ] [-0.28157672 1.0804454 -0.20028973 ... -1.0601485 1.5713198 -0.43772066] [-0.6507483 0.8808675 0.4173983 ... 1.2432255 -0.35400063 -0.16483761]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4801.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 1.5714957 -0.54509497 -0.27189007 ... -1.2374717 -1.5799592 0.33949924] [-0.27189007 -1.082394 0.02073714 ... 0.33949924 -0.63295734 2.791328 ] [ 0.02073714 0.8657578 1.6310989 ... 2.791328 0.6526906 -0.4357342 ] ... [-0.379108 0.5918857 -0.03989034 ... 0.8690219 0.34875906 0.771741 ] [-0.03989034 0.89279234 -1.0493144 ... 0.771741 -0.7625292 -0.18904085] [-1.0493144 -0.0430609 0.3789363 ... -0.18904085 -0.43104723 -0.32113904]] [[-0.35317215 -1.6459689 0.6736622 ... -0.41738588 0.2814755 -0.46386144] [ 0.6736622 -2.661529 0.7829788 ... -0.46386144 1.9587803 -0.06530789] [ 0.7829788 -1.2225121 0.12973872 ... -0.06530789 -0.9306304 0.8331481 ] ... [ 1.1428727 1.1864661 1.5647074 ... -0.20875326 -0.40921912 -0.5738937 ] [ 1.5647074 1.1877625 -0.5241367 ... -0.5738937 0.9042782 -1.2844963 ] [-0.5241367 0.15221974 0.32387027 ... -1.2844963 0.49847165 0.15637317]] [[ 2.7709675 0.8604849 -0.42906064 ... -1.1193384 1.4876869 0.6357036 ] [-0.42906064 0.923709 -0.80713177 ... 0.6357036 0.48882085 -0.75633454] [-0.80713177 -1.7258288 -0.09627971 ... -0.75633454 -0.90649474 1.4622259 ] ... [-0.31731048 -0.2645512 -1.1671333 ... -0.86851144 -1.069468 0.18563578] [-1.1671333 -0.8048158 0.75056964 ... 0.18563578 0.7231081 0.08334445] [ 0.75056964 -1.32074 0.11714379 ... 0.08334445 1.4654472 -0.45322213]] ... [[-0.22185226 0.01388085 0.35039085 ... -0.02365569 1.0309924 0.20422377] [ 0.35039085 0.1737365 0.65350705 ... 0.20422377 -0.52903754 0.34837374] [ 0.65350705 -1.0532308 -0.48727554 ... 0.34837374 -0.71958405 -0.05039321] ... [ 0.9446158 -0.9398151 1.284318 ... 1.7404872 -0.76905507 -1.3091226 ] [ 1.284318 -1.3289586 -0.34044763 ... -1.3091226 0.9098279 -1.336696 ] [-0.34044763 -0.3765524 0.32014018 ... -1.336696 0.44068137 -0.06310947]] [[ 0.67756426 1.0918118 -0.09841431 ... 0.5022436 1.1900345 -1.3208642 ] [-0.09841431 -0.5818366 1.3738439 ... -1.3208642 0.4538212 0.5070383 ] [ 1.3738439 -0.34259224 0.37693518 ... 0.5070383 -0.83584607 -0.16894181] ... [ 2.7598016 0.3159516 0.02301816 ... 0.79411894 0.45208633 1.0749482 ] [ 0.02301816 0.5129993 -2.2669885 ... 1.0749482 -1.0271078 0.5475929 ] [-2.2669885 -0.16704181 0.76144135 ... 0.5475929 -0.4393478 1.7231617 ]] [[ 0.5537743 1.2181321 0.69537395 ... 0.4308532 1.7555798 0.4923281 ] [ 0.69537395 0.23141673 0.15052786 ... 0.4923281 0.21973412 -0.05721238] [ 0.15052786 0.3804771 1.2099355 ... -0.05721238 1.1151758 0.7159201 ] ... [-2.6443644 -1.5288723 -0.36901808 ... -0.01568076 0.13249993 -0.93770427] [-0.36901808 -1.1719474 1.2175119 ... -0.93770427 1.124795 0.09280965] [ 1.2175119 0.5150421 -1.2194464 ... 0.09280965 -0.99069196 -0.02921388]]]; ov_res: [[[ 1.5714957 -0.54509497 -0.27189007 ... -1.2374717 -1.5799592 0.33949924] [-0.27189007 -1.082394 0.02073714 ... 0.33949924 -0.63295734 2.791328 ] [ 0.02073714 0.8657578 1.6310989 ... 2.791328 0.6526906 -0.4357342 ] ... [-0.379108 0.5918857 -0.03989034 ... 0.8690219 0.34875906 0.771741 ] [-0.03989034 0.89279234 -1.0493144 ... 0.771741 -0.7625292 -0.18904085] [-1.0493144 -0.0430609 0.3789363 ... -0.18904085 -0.43104723 -0.32113904]] [[-0.35317215 -1.6459689 0.6736622 ... -0.41738588 0.2814755 -0.46386144] [ 0.6736622 -2.661529 0.7829788 ... -0.46386144 1.9587803 -0.06530789] [ 0.7829788 -1.2225121 0.12973872 ... -0.06530789 -0.9306304 0.8331481 ] ... [ 1.1428727 1.1864661 1.5647074 ... -0.20875326 -0.40921912 -0.5738937 ] [ 1.5647074 1.1877625 -0.5241367 ... -0.5738937 0.9042782 -1.2844963 ] [-0.5241367 0.15221974 0.32387027 ... -1.2844963 0.49847165 0.15637317]] [[ 2.7709675 0.8604849 -0.42906064 ... -1.1193384 1.4876869 0.6357036 ] [-0.42906064 0.923709 -0.80713177 ... 0.6357036 0.48882085 -0.75633454] [-0.80713177 -1.7258288 -0.09627971 ... -0.75633454 -0.90649474 1.4622259 ] ... [-0.31731048 -0.2645512 -1.1671333 ... -0.86851144 -1.069468 0.18563578] [-1.1671333 -0.8048158 0.75056964 ... 0.18563578 0.7231081 0.08334445] [ 0.75056964 -1.32074 0.11714379 ... 0.08334445 1.4654472 -0.45322213]] ... [[-0.22185226 0.01388085 0.35039085 ... -0.02365569 1.0309924 0.20422377] [ 0.35039085 0.1737365 0.65350705 ... 0.20422377 -0.52903754 0.34837374] [ 0.65350705 -1.0532308 -0.48727554 ... 0.34837374 -0.71958405 -0.05039321] ... [ 0.9446158 -0.9398151 1.284318 ... 1.7404872 -0.76905507 -1.3091226 ] [ 1.284318 -1.3289586 -0.34044763 ... -1.3091226 0.9098279 -1.336696 ] [-0.34044763 -0.3765524 0.32014018 ... -1.336696 0.44068137 -0.06310947]] [[ 0.67756426 1.0918118 -0.09841431 ... 0.5022436 1.1900345 -1.3208642 ] [-0.09841431 -0.5818366 1.3738439 ... -1.3208642 0.4538212 0.5070383 ] [ 1.3738439 -0.34259224 0.37693518 ... 0.5070383 -0.83584607 -0.16894181] ... [ 2.7598016 0.3159516 0.02301816 ... 0.79411894 0.45208633 1.0749482 ] [ 0.02301816 0.5129993 -2.2669885 ... 1.0749482 -1.0271078 0.5475929 ] [-2.2669885 -0.16704181 0.76144135 ... 0.5475929 -0.4393478 1.7231617 ]] [[ 0.5537743 1.2181321 0.69537395 ... 0.4308532 1.7555798 0.4923281 ] [ 0.69537395 0.23141673 0.15052786 ... 0.4923281 0.21973412 -0.05721238] [ 0.15052786 0.3804771 1.2099355 ... -0.05721238 1.1151758 0.7159201 ] ... [-2.6443644 -1.5288723 -0.36901808 ... -0.01568076 0.13249993 -0.93770427] [-0.36901808 -1.1719474 1.2175119 ... -0.93770427 1.124795 0.09280965] [ 1.2175119 0.5150421 -1.2194464 ... 0.09280965 -0.99069196 -0.02921388]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4803.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.46106824 -0.01538486 -1.8807629 ... 0.29932448 -0.09618647 -1.296263 ] [-1.8807629 0.45676103 -0.45533875 ... -1.296263 -0.39901412 0.6208997 ] [-1.9343112 -0.6518746 -0.20808943 ... 0.43702608 -0.46476215 -0.19515532] ... [-1.1475562 -1.1886988 -0.34305218 ... 0.6234891 0.86353326 -0.97248554] [-0.40360263 -1.121232 -0.3330749 ... -0.56174654 -0.5651811 -0.42660844] [-0.3330749 0.4632697 0.9450838 ... -0.42660844 -0.53286433 0.26740524]] [[ 0.3263546 0.659538 -0.7598733 ... 2.1109722 0.29840103 0.8462598 ] [-0.7598733 -1.1460961 0.259264 ... 0.8462598 0.24943277 -0.7335469 ] [ 0.09881662 0.47733635 0.35417843 ... 0.30247957 0.2093989 0.28941226] ... [-0.4932344 -0.6681978 0.1051598 ... -0.20405068 0.37163928 0.22033603] [ 0.96781987 -1.0832436 -0.7221964 ... 0.6109132 0.16074267 0.09779618] [-0.7221964 -0.00530777 -1.4942353 ... 0.09779618 -0.03935515 -0.44172853]] [[-1.4764017 -0.74814546 0.3511957 ... -0.27319628 0.09975304 -0.02941265] [ 0.3511957 -1.2968335 0.2745004 ... -0.02941265 -1.0128963 -1.2098029 ] [-0.15406907 0.5418814 -2.858827 ... -0.73691046 1.8194705 0.70387447] ... [ 0.42929998 -0.43447426 -0.87467366 ... -0.66628313 -2.0558598 1.3673078 ] [-0.3055108 0.3963087 -0.24389686 ... -0.37257868 -2.9821527 2.2516663 ] [-0.24389686 0.5609689 -0.76009625 ... 2.2516663 0.17172411 -0.2893386 ]] ... [[ 0.5582372 -1.0422391 -0.26475346 ... -0.68705016 1.4207386 0.00988352] [-0.26475346 2.5734363 -0.44075468 ... 0.00988352 1.1248024 0.17437477] [ 1.0228207 -0.80142176 2.5288837 ... -0.78868234 -0.36982715 -0.21496893] ... [-0.11214957 4.060766 -1.3623722 ... 0.6362308 0.09270787 0.7015294 ] [-0.8195983 1.9481889 -1.8873421 ... 0.901091 -1.5364895 -1.5507237 ] [-1.8873421 0.9439464 0.00494667 ... -1.5507237 -0.3256929 -0.33453816]] [[ 1.5353601 1.3942242 -0.99810934 ... -1.0530581 1.297185 -0.12780005] [-0.99810934 -0.97077614 -0.2114215 ... -0.12780005 -0.4668739 0.5700546 ] [ 0.5505727 -0.15198803 -0.99833935 ... 0.19768216 0.17459807 -0.8555372 ] ... [ 0.66940033 0.8630227 0.03307408 ... 1.1915812 -1.6769454 1.6889718 ] [-0.4278467 -0.18475886 -0.96915644 ... 0.11315712 -0.33748558 -0.97947997] [-0.96915644 0.6712094 -0.92313945 ... -0.97947997 2.029094 0.30750814]] [[-1.5841566 -0.98267984 -1.5805933 ... -0.92639095 -0.09352965 -0.8272143 ] [-1.5805933 0.7028821 0.7614496 ... -0.8272143 1.1013646 1.3631363 ] [-0.07828581 0.56013286 0.87759686 ... -0.18685102 -0.49397272 2.1368837 ] ... [ 1.0362219 -0.5917158 -0.32569206 ... -0.6285188 -0.791621 0.96377736] [-1.4852341 -0.6792564 0.7165616 ... 0.77668744 -0.12761177 0.17242664] [ 0.7165616 -1.1594311 0.69704914 ... 0.17242664 0.77061254 -0.549687 ]]]; ov_res: [[[ 0.46106824 -0.01538486 -1.8807629 ... 0.29932448 -0.09618647 -1.296263 ] [-1.8807629 0.45676103 -0.45533875 ... -1.296263 -0.39901412 0.6208997 ] [-1.9343112 -0.6518746 -0.20808943 ... 0.43702608 -0.46476215 -0.19515532] ... [-1.1475562 -1.1886988 -0.34305218 ... 0.6234891 0.86353326 -0.97248554] [-0.40360263 -1.121232 -0.3330749 ... -0.56174654 -0.5651811 -0.42660844] [-0.3330749 0.4632697 0.9450838 ... -0.42660844 -0.53286433 0.26740524]] [[ 0.3263546 0.659538 -0.7598733 ... 2.1109722 0.29840103 0.8462598 ] [-0.7598733 -1.1460961 0.259264 ... 0.8462598 0.24943277 -0.7335469 ] [ 0.09881662 0.47733635 0.35417843 ... 0.30247957 0.2093989 0.28941226] ... [-0.4932344 -0.6681978 0.1051598 ... -0.20405068 0.37163928 0.22033603] [ 0.96781987 -1.0832436 -0.7221964 ... 0.6109132 0.16074267 0.09779618] [-0.7221964 -0.00530777 -1.4942353 ... 0.09779618 -0.03935515 -0.44172853]] [[-1.4764017 -0.74814546 0.3511957 ... -0.27319628 0.09975304 -0.02941265] [ 0.3511957 -1.2968335 0.2745004 ... -0.02941265 -1.0128963 -1.2098029 ] [-0.15406907 0.5418814 -2.858827 ... -0.73691046 1.8194705 0.70387447] ... [ 0.42929998 -0.43447426 -0.87467366 ... -0.66628313 -2.0558598 1.3673078 ] [-0.3055108 0.3963087 -0.24389686 ... -0.37257868 -2.9821527 2.2516663 ] [-0.24389686 0.5609689 -0.76009625 ... 2.2516663 0.17172411 -0.2893386 ]] ... [[ 0.5582372 -1.0422391 -0.26475346 ... -0.68705016 1.4207386 0.00988352] [-0.26475346 2.5734363 -0.44075468 ... 0.00988352 1.1248024 0.17437477] [ 1.0228207 -0.80142176 2.5288837 ... -0.78868234 -0.36982715 -0.21496893] ... [-0.11214957 4.060766 -1.3623722 ... 0.6362308 0.09270787 0.7015294 ] [-0.8195983 1.9481889 -1.8873421 ... 0.901091 -1.5364895 -1.5507237 ] [-1.8873421 0.9439464 0.00494667 ... -1.5507237 -0.3256929 -0.33453816]] [[ 1.5353601 1.3942242 -0.99810934 ... -1.0530581 1.297185 -0.12780005] [-0.99810934 -0.97077614 -0.2114215 ... -0.12780005 -0.4668739 0.5700546 ] [ 0.5505727 -0.15198803 -0.99833935 ... 0.19768216 0.17459807 -0.8555372 ] ... [ 0.66940033 0.8630227 0.03307408 ... 1.1915812 -1.6769454 1.6889718 ] [-0.4278467 -0.18475886 -0.96915644 ... 0.11315712 -0.33748558 -0.97947997] [-0.96915644 0.6712094 -0.92313945 ... -0.97947997 2.029094 0.30750814]] [[-1.5841566 -0.98267984 -1.5805933 ... -0.92639095 -0.09352965 -0.8272143 ] [-1.5805933 0.7028821 0.7614496 ... -0.8272143 1.1013646 1.3631363 ] [-0.07828581 0.56013286 0.87759686 ... -0.18685102 -0.49397272 2.1368837 ] ... [ 1.0362219 -0.5917158 -0.32569206 ... -0.6285188 -0.791621 0.96377736] [-1.4852341 -0.6792564 0.7165616 ... 0.77668744 -0.12761177 0.17242664] [ 0.7165616 -1.1594311 0.69704914 ... 0.17242664 0.77061254 -0.549687 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4805.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-0.19987261 1.9864688 1.1190584 ... -0.39012533 -1.3553696 0.5556754 ] [ 1.1190584 1.2256091 1.2219167 ... 0.5556754 0.56464946 1.0004 ] [ 1.2219167 0.69306314 0.02742806 ... 1.0004 0.9125532 -1.5203592 ] ... [ 0.43191877 -0.5446676 0.33797878 ... -1.4750979 -1.2755274 -0.9345061 ] [ 0.33797878 1.6187074 0.15358973 ... -0.9345061 -0.20381078 0.7128431 ] [ 0.15358973 -0.5139688 0.02745358 ... 0.7128431 -0.80084395 -0.8648952 ]] [[-0.36924872 -1.0197216 0.6455332 ... 0.21384099 -2.5490415 -0.10571592] [ 0.6455332 -0.9405784 0.04604859 ... -0.10571592 1.1434305 -0.04841842] [ 0.04604859 1.3256259 0.8424611 ... -0.04841842 -0.1139994 -0.64480036] ... [-1.0542487 0.10721919 -0.8435679 ... 0.43541133 1.0113013 -3.412976 ] [-0.8435679 -1.2174647 1.9638443 ... -3.412976 0.05547379 0.6657944 ] [ 1.9638443 -0.31033635 -0.18305737 ... 0.6657944 1.3258327 1.080231 ]] [[ 1.3212547 -0.02835174 0.64501685 ... -0.33316013 -0.85594434 1.0804311 ] [ 0.64501685 0.3015347 0.27947408 ... 1.0804311 0.16026613 -1.008218 ] [ 0.27947408 -2.0846674 -0.70164686 ... -1.008218 -0.8442317 0.05336511] ... [-1.068912 -0.3640082 -0.19467859 ... -0.3510772 1.8672917 0.08906434] [-0.19467859 -0.43799478 -1.936891 ... 0.08906434 -0.09224888 -0.31289938] [-1.936891 -1.2655877 -0.47682413 ... -0.31289938 -0.22498727 0.33225372]] ... [[-1.903217 0.13080908 2.3691163 ... 0.19734849 0.31605512 0.34593117] [ 2.3691163 -1.681484 1.1175296 ... 0.34593117 -2.6959376 0.7841883 ] [ 1.1175296 1.1609088 0.95781726 ... 0.7841883 0.48798183 -0.23759213] ... [-0.40306333 0.47023702 1.304931 ... -0.11992206 -2.3815289 -1.3079904 ] [ 1.304931 0.18890467 -1.127838 ... -1.3079904 -0.18176864 -0.6871341 ] [-1.127838 -0.28289458 -0.53055745 ... -0.6871341 2.2905107 0.46213895]] [[-0.7066179 0.2166437 0.51952845 ... 0.59218043 0.21972892 2.904673 ] [ 0.51952845 -0.42610645 2.2953832 ... 2.904673 -0.8512948 0.13284625] [ 2.2953832 1.2006997 0.43485728 ... 0.13284625 -0.85112673 -1.0873572 ] ... [-0.9245766 -0.64645004 -1.0280676 ... -1.0725716 0.74460536 1.7468544 ] [-1.0280676 0.94521326 0.09748071 ... 1.7468544 -1.3696529 0.3410923 ] [ 0.09748071 -2.0932255 0.38454846 ... 0.3410923 -0.2534584 -0.6450946 ]] [[ 0.6298398 -0.42457712 0.6116369 ... -1.41384 -0.7835758 -4.206542 ] [ 0.6116369 0.683032 0.15429136 ... -4.206542 0.45611328 -0.26348972] [ 0.15429136 -0.5128725 -0.98800313 ... -0.26348972 0.43307137 0.4006075 ] ... [ 1.3352604 -0.02820586 0.15463936 ... 0.46092913 -0.15778543 -1.2152612 ] [ 0.15463936 1.7315344 -0.09092534 ... -1.2152612 0.6004696 -1.0296942 ] [-0.09092534 2.4320798 1.292228 ... -1.0296942 0.6672558 1.5587709 ]]]; ov_res: [[[-0.19987261 1.9864688 1.1190584 ... -0.39012533 -1.3553696 0.5556754 ] [ 1.1190584 1.2256091 1.2219167 ... 0.5556754 0.56464946 1.0004 ] [ 1.2219167 0.69306314 0.02742806 ... 1.0004 0.9125532 -1.5203592 ] ... [ 0.43191877 -0.5446676 0.33797878 ... -1.4750979 -1.2755274 -0.9345061 ] [ 0.33797878 1.6187074 0.15358973 ... -0.9345061 -0.20381078 0.7128431 ] [ 0.15358973 -0.5139688 0.02745358 ... 0.7128431 -0.80084395 -0.8648952 ]] [[-0.36924872 -1.0197216 0.6455332 ... 0.21384099 -2.5490415 -0.10571592] [ 0.6455332 -0.9405784 0.04604859 ... -0.10571592 1.1434305 -0.04841842] [ 0.04604859 1.3256259 0.8424611 ... -0.04841842 -0.1139994 -0.64480036] ... [-1.0542487 0.10721919 -0.8435679 ... 0.43541133 1.0113013 -3.412976 ] [-0.8435679 -1.2174647 1.9638443 ... -3.412976 0.05547379 0.6657944 ] [ 1.9638443 -0.31033635 -0.18305737 ... 0.6657944 1.3258327 1.080231 ]] [[ 1.3212547 -0.02835174 0.64501685 ... -0.33316013 -0.85594434 1.0804311 ] [ 0.64501685 0.3015347 0.27947408 ... 1.0804311 0.16026613 -1.008218 ] [ 0.27947408 -2.0846674 -0.70164686 ... -1.008218 -0.8442317 0.05336511] ... [-1.068912 -0.3640082 -0.19467859 ... -0.3510772 1.8672917 0.08906434] [-0.19467859 -0.43799478 -1.936891 ... 0.08906434 -0.09224888 -0.31289938] [-1.936891 -1.2655877 -0.47682413 ... -0.31289938 -0.22498727 0.33225372]] ... [[-1.903217 0.13080908 2.3691163 ... 0.19734849 0.31605512 0.34593117] [ 2.3691163 -1.681484 1.1175296 ... 0.34593117 -2.6959376 0.7841883 ] [ 1.1175296 1.1609088 0.95781726 ... 0.7841883 0.48798183 -0.23759213] ... [-0.40306333 0.47023702 1.304931 ... -0.11992206 -2.3815289 -1.3079904 ] [ 1.304931 0.18890467 -1.127838 ... -1.3079904 -0.18176864 -0.6871341 ] [-1.127838 -0.28289458 -0.53055745 ... -0.6871341 2.2905107 0.46213895]] [[-0.7066179 0.2166437 0.51952845 ... 0.59218043 0.21972892 2.904673 ] [ 0.51952845 -0.42610645 2.2953832 ... 2.904673 -0.8512948 0.13284625] [ 2.2953832 1.2006997 0.43485728 ... 0.13284625 -0.85112673 -1.0873572 ] ... [-0.9245766 -0.64645004 -1.0280676 ... -1.0725716 0.74460536 1.7468544 ] [-1.0280676 0.94521326 0.09748071 ... 1.7468544 -1.3696529 0.3410923 ] [ 0.09748071 -2.0932255 0.38454846 ... 0.3410923 -0.2534584 -0.6450946 ]] [[ 0.6298398 -0.42457712 0.6116369 ... -1.41384 -0.7835758 -4.206542 ] [ 0.6116369 0.683032 0.15429136 ... -4.206542 0.45611328 -0.26348972] [ 0.15429136 -0.5128725 -0.98800313 ... -0.26348972 0.43307137 0.4006075 ] ... [ 1.3352604 -0.02820586 0.15463936 ... 0.46092913 -0.15778543 -1.2152612 ] [ 0.15463936 1.7315344 -0.09092534 ... -1.2152612 0.6004696 -1.0296942 ] [-0.09092534 2.4320798 1.292228 ... -1.0296942 0.6672558 1.5587709 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4807.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.33602944 -0.49426827 -0.45835447 ... -0.2254321 0.49909985 -1.0546304 ] [-0.45835447 -0.49933323 1.8379257 ... -1.0546304 -0.7999435 -0.5841949 ] [ 1.0784189 -0.31901008 -0.6234071 ... 1.5265168 -0.26348695 -0.5586178 ] ... [ 0.61901253 -1.3982258 -2.010327 ... -1.082363 -1.2804791 0.20078598] [-0.9488568 -0.43467844 1.359769 ... 1.1143262 0.06521016 -1.1240889 ] [ 1.359769 -0.2560024 0.16174644 ... -1.1240889 1.100898 0.17287493]] [[-0.01168511 -0.03636038 -0.67465013 ... 1.4452791 -0.01181558 0.34948188] [-0.67465013 -0.3075735 -0.11346538 ... 0.34948188 1.4560348 0.62863314] [-0.7745614 3.3074806 2.3239605 ... 1.1693183 -1.2641137 0.6537496 ] ... [-1.219329 0.39051726 0.33373183 ... -0.82539207 0.57049793 0.56342113] [-0.54204166 -0.32973534 -3.8863387 ... -0.36445135 -3.2555833 0.6727128 ] [-3.8863387 -0.67554057 -0.00800513 ... 0.6727128 -2.1160758 0.07538541]] [[ 0.16884625 -1.3183596 0.25047442 ... -1.5353031 -0.6996439 0.7309567 ] [ 0.25047442 1.5265205 0.9643244 ... 0.7309567 -1.5577348 0.17845735] [ 1.2211492 0.3098869 -0.22049938 ... -1.2219328 1.4021484 0.9881835 ] ... [ 1.3447809 2.2200816 0.84627527 ... -0.40742484 -0.13765764 1.1963074 ] [ 0.6376283 -0.81218505 1.2396404 ... 0.144014 -0.35782787 -0.3762518 ] [ 1.2396404 -0.13723499 1.5929621 ... -0.3762518 -0.59058064 -0.01586792]] ... [[ 0.26447037 -0.68091005 1.5552936 ... 0.21183509 -0.80837315 0.41796976] [ 1.5552936 0.01105257 -0.37585974 ... 0.41796976 -0.15576798 0.21636866] [ 0.89729255 1.2004881 1.2134343 ... -0.9965614 -0.64850867 -0.8747683 ] ... [ 0.47455126 -0.14111711 -2.0390983 ... 0.5524587 2.9665082 -0.3411215 ] [-0.01780475 0.6030616 0.2322285 ... -0.02735466 -0.14743349 0.04742667] [ 0.2322285 1.3209373 -1.4664999 ... 0.04742667 -0.5091313 -0.48117864]] [[ 0.38583967 -0.67958826 0.82292706 ... -0.79779625 -0.03868283 0.799497 ] [ 0.82292706 0.02630185 -0.07942594 ... 0.799497 -0.14916192 -0.9657981 ] [-0.24381371 -1.5469724 -0.9967729 ... -0.56036866 -0.6922617 -1.9358014 ] ... [-1.4990658 0.10084801 -1.8524042 ... 0.13391319 0.23657148 -1.3334885 ] [-1.5866497 -1.1696073 0.84320754 ... 1.1058964 1.6185566 0.5478967 ] [ 0.84320754 0.6546881 -1.5079603 ... 0.5478967 -1.3812318 -0.3236868 ]] [[-0.53695583 -0.08458997 0.9585046 ... -0.82188684 -0.5154437 -0.04812523] [ 0.9585046 -2.017554 -1.3403207 ... -0.04812523 0.52863353 -2.3241498 ] [-0.35642797 0.0691993 0.70401025 ... -0.5039212 -1.6128373 1.4983528 ] ... [-1.0823956 0.4568879 -0.72598255 ... 0.92395484 -0.22963236 -0.58163375] [ 1.2407268 0.846776 0.2287664 ... 0.10778047 -0.5464675 -0.69077307] [ 0.2287664 -0.61428833 -0.44096157 ... -0.69077307 0.60148156 -0.07327399]]]; ov_res: [[[-0.33602944 -0.49426827 -0.45835447 ... -0.2254321 0.49909985 -1.0546304 ] [-0.45835447 -0.49933323 1.8379257 ... -1.0546304 -0.7999435 -0.5841949 ] [ 1.0784189 -0.31901008 -0.6234071 ... 1.5265168 -0.26348695 -0.5586178 ] ... [ 0.61901253 -1.3982258 -2.010327 ... -1.082363 -1.2804791 0.20078598] [-0.9488568 -0.43467844 1.359769 ... 1.1143262 0.06521016 -1.1240889 ] [ 1.359769 -0.2560024 0.16174644 ... -1.1240889 1.100898 0.17287493]] [[-0.01168511 -0.03636038 -0.67465013 ... 1.4452791 -0.01181558 0.34948188] [-0.67465013 -0.3075735 -0.11346538 ... 0.34948188 1.4560348 0.62863314] [-0.7745614 3.3074806 2.3239605 ... 1.1693183 -1.2641137 0.6537496 ] ... [-1.219329 0.39051726 0.33373183 ... -0.82539207 0.57049793 0.56342113] [-0.54204166 -0.32973534 -3.8863387 ... -0.36445135 -3.2555833 0.6727128 ] [-3.8863387 -0.67554057 -0.00800513 ... 0.6727128 -2.1160758 0.07538541]] [[ 0.16884625 -1.3183596 0.25047442 ... -1.5353031 -0.6996439 0.7309567 ] [ 0.25047442 1.5265205 0.9643244 ... 0.7309567 -1.5577348 0.17845735] [ 1.2211492 0.3098869 -0.22049938 ... -1.2219328 1.4021484 0.9881835 ] ... [ 1.3447809 2.2200816 0.84627527 ... -0.40742484 -0.13765764 1.1963074 ] [ 0.6376283 -0.81218505 1.2396404 ... 0.144014 -0.35782787 -0.3762518 ] [ 1.2396404 -0.13723499 1.5929621 ... -0.3762518 -0.59058064 -0.01586792]] ... [[ 0.26447037 -0.68091005 1.5552936 ... 0.21183509 -0.80837315 0.41796976] [ 1.5552936 0.01105257 -0.37585974 ... 0.41796976 -0.15576798 0.21636866] [ 0.89729255 1.2004881 1.2134343 ... -0.9965614 -0.64850867 -0.8747683 ] ... [ 0.47455126 -0.14111711 -2.0390983 ... 0.5524587 2.9665082 -0.3411215 ] [-0.01780475 0.6030616 0.2322285 ... -0.02735466 -0.14743349 0.04742667] [ 0.2322285 1.3209373 -1.4664999 ... 0.04742667 -0.5091313 -0.48117864]] [[ 0.38583967 -0.67958826 0.82292706 ... -0.79779625 -0.03868283 0.799497 ] [ 0.82292706 0.02630185 -0.07942594 ... 0.799497 -0.14916192 -0.9657981 ] [-0.24381371 -1.5469724 -0.9967729 ... -0.56036866 -0.6922617 -1.9358014 ] ... [-1.4990658 0.10084801 -1.8524042 ... 0.13391319 0.23657148 -1.3334885 ] [-1.5866497 -1.1696073 0.84320754 ... 1.1058964 1.6185566 0.5478967 ] [ 0.84320754 0.6546881 -1.5079603 ... 0.5478967 -1.3812318 -0.3236868 ]] [[-0.53695583 -0.08458997 0.9585046 ... -0.82188684 -0.5154437 -0.04812523] [ 0.9585046 -2.017554 -1.3403207 ... -0.04812523 0.52863353 -2.3241498 ] [-0.35642797 0.0691993 0.70401025 ... -0.5039212 -1.6128373 1.4983528 ] ... [-1.0823956 0.4568879 -0.72598255 ... 0.92395484 -0.22963236 -0.58163375] [ 1.2407268 0.846776 0.2287664 ... 0.10778047 -0.5464675 -0.69077307] [ 0.2287664 -0.61428833 -0.44096157 ... -0.69077307 0.60148156 -0.07327399]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4809.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 1.5176632 -0.3069285 -0.7328745 ... -0.21293136 -0.12960048 0.07675961] [ 0.60989577 -1.35249 1.5237883 ... 1.4403629 -0.2840517 -0.57026917] [-1.8290263 1.222256 -1.4895017 ... -0.44483572 -0.06425752 1.3522148 ]] [[ 0.4559146 -0.30070448 -0.10262454 ... 0.8443323 0.13846277 -0.9777724 ] [-0.4254791 1.2896007 -1.3529043 ... 1.8926889 -0.34882736 0.4180904 ] [-0.68277436 -0.3395249 1.0799493 ... -1.5636655 1.0839214 -1.7434931 ]] [[-1.9145207 1.0388612 -0.4019461 ... -0.47502536 -0.14628805 -0.3638012 ] [-0.3300967 -1.5080935 0.06437703 ... 1.5140918 0.11457472 -0.809781 ] [ 0.47922787 0.3623648 1.7270175 ... 0.19717892 0.18297513 -2.026712 ]] ... [[-0.43227673 1.7208987 -0.16076112 ... -1.4019907 0.07462497 -0.28719756] [ 0.04211153 1.3651818 -0.68495715 ... -0.53081775 -1.7213316 0.34615457] [-0.6905946 0.41183344 -0.18175241 ... -0.14049771 -2.2384357 -0.78443676]] [[-0.99830514 -1.269013 0.91013855 ... -0.5323396 -0.46499705 -1.1232586 ] [ 0.00266234 -1.244209 0.7963687 ... -0.45195258 0.2046365 0.46282893] [-1.1226077 0.91302854 -0.07558884 ... -1.8660319 -0.34976384 0.46257606]] [[-0.5618538 -1.3751656 -0.26814795 ... 0.47276002 -1.0296254 0.21218322] [-0.7821195 -1.8567502 -1.8037372 ... 0.5855407 -1.1931771 2.4692938 ] [ 0.9169222 1.3864347 1.4356816 ... 0.4422661 0.7646198 -0.18100554]]]; ov_res: [[[ 1.5176632 -0.3069285 -0.7328745 ... -0.21293136 -0.12960048 0.07675961] [ 0.60989577 -1.35249 1.5237883 ... 1.4403629 -0.2840517 -0.57026917] [-1.8290263 1.222256 -1.4895017 ... -0.44483572 -0.06425752 1.3522148 ]] [[ 0.4559146 -0.30070448 -0.10262454 ... 0.8443323 0.13846277 -0.9777724 ] [-0.4254791 1.2896007 -1.3529043 ... 1.8926889 -0.34882736 0.4180904 ] [-0.68277436 -0.3395249 1.0799493 ... -1.5636655 1.0839214 -1.7434931 ]] [[-1.9145207 1.0388612 -0.4019461 ... -0.47502536 -0.14628805 -0.3638012 ] [-0.3300967 -1.5080935 0.06437703 ... 1.5140918 0.11457472 -0.809781 ] [ 0.47922787 0.3623648 1.7270175 ... 0.19717892 0.18297513 -2.026712 ]] ... [[-0.43227673 1.7208987 -0.16076112 ... -1.4019907 0.07462497 -0.28719756] [ 0.04211153 1.3651818 -0.68495715 ... -0.53081775 -1.7213316 0.34615457] [-0.6905946 0.41183344 -0.18175241 ... -0.14049771 -2.2384357 -0.78443676]] [[-0.99830514 -1.269013 0.91013855 ... -0.5323396 -0.46499705 -1.1232586 ] [ 0.00266234 -1.244209 0.7963687 ... -0.45195258 0.2046365 0.46282893] [-1.1226077 0.91302854 -0.07558884 ... -1.8660319 -0.34976384 0.46257606]] [[-0.5618538 -1.3751656 -0.26814795 ... 0.47276002 -1.0296254 0.21218322] [-0.7821195 -1.8567502 -1.8037372 ... 0.5855407 -1.1931771 2.4692938 ] [ 0.9169222 1.3864347 1.4356816 ... 0.4422661 0.7646198 -0.18100554]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4811.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-6.87704533e-02 2.30506912e-01 4.97328848e-01 ... -1.01270592e+00 -4.09851134e-01 -1.16449714e+00] [ 1.12156618e+00 -3.67380977e-01 1.72056139e-01 ... 3.12060434e-02 1.39453903e-01 9.75497305e-01] [-4.32217598e-01 5.38547575e-01 1.13217449e+00 ... 1.60580468e+00 -1.59739733e+00 -2.45131469e+00] ... [-5.54435253e-01 -1.77913356e+00 -2.10260987e-01 ... 3.68011206e-01 7.57276535e-01 6.68663979e-01] [ 8.57946336e-01 -3.58921051e-01 -8.76724124e-01 ... -1.35664594e+00 -2.59386599e-02 -1.81750166e+00] [-6.61756575e-01 3.13313723e-01 2.20060587e-01 ... 1.71512163e+00 -4.65223342e-01 -2.13133305e-01]] [[-2.15408134e+00 -7.94899940e-01 8.85038853e-01 ... -1.90807605e+00 -6.19390786e-01 1.42598057e+00] [ 5.90582013e-01 -1.25675833e+00 1.02442950e-01 ... -3.42375666e-01 1.45745146e+00 9.33976412e-01] [-1.42077363e+00 4.61379327e-02 2.63523483e+00 ... -5.60966551e-01 6.00370884e-01 -1.08566499e+00] ... [-3.02419186e-01 1.85547316e+00 -2.14513588e+00 ... -5.45931756e-01 1.10738672e-01 -7.28180885e-01] [ 3.85618806e-01 -9.15463090e-01 -8.67649019e-02 ... 7.40379155e-01 5.96985295e-02 8.43084931e-01] [-3.12969722e-02 -7.84981772e-02 6.92220330e-01 ... -1.58560193e+00 -4.12044525e-01 7.67239749e-01]] [[ 1.40430480e-01 -5.48824012e-01 -2.02843472e-02 ... 1.13796389e+00 6.51349604e-01 -2.71931440e-01] [ 6.79398835e-01 -2.63937682e-01 -7.06644416e-01 ... -1.74319100e+00 1.64383367e-01 6.11979485e-01] [-1.89882249e-01 1.72582135e-01 1.08250308e+00 ... -3.32695812e-01 4.81702000e-01 5.92162549e-01] ... [-3.84310722e-01 -6.08949661e-01 2.90864259e-01 ... -1.93912983e-01 -1.24686098e+00 1.35466945e+00] [-1.55076754e+00 4.70055073e-01 2.10423303e+00 ... 1.50747156e+00 -2.82152504e-01 2.69412130e-01] [ 2.14487338e+00 -1.20170675e-01 1.38977766e+00 ... 3.78669560e-01 4.66694087e-02 -3.98370802e-01]] ... [[ 8.11182201e-01 4.74589258e-01 -2.38098785e-01 ... -1.21865320e+00 -1.05743551e+00 -1.98799288e+00] [-1.76727879e+00 -3.09779257e-01 1.22220218e+00 ... -3.35213375e+00 2.51862496e-01 3.75177443e-01] [ 4.72805083e-01 -6.01569176e-01 6.16167843e-01 ... -1.21312551e-01 5.42734340e-02 -4.82335240e-01] ... [ 4.75742668e-01 -1.26833653e+00 -1.66644776e+00 ... -4.49747682e-01 9.40056264e-01 -4.64154840e-01] [-2.27445197e+00 -3.36946547e-01 -1.05853963e+00 ... 9.71475523e-03 3.70406061e-02 7.24125147e-01] [-1.71173525e+00 1.56116456e-01 3.22842896e-01 ... -8.85578573e-01 -2.36393854e-01 -2.21314609e-01]] [[-1.23934841e+00 -1.17705369e+00 -8.96069467e-01 ... -4.09790367e-01 -1.11939676e-01 -5.45955122e-01] [-1.28255391e+00 2.76219875e-01 4.51466113e-01 ... -8.14711154e-01 -1.11608171e+00 -1.17997766e+00] [-1.45260561e+00 -1.15831709e+00 -1.22980964e+00 ... -9.05731142e-01 -1.06363082e+00 -7.31089771e-01] ... [ 1.24226660e-01 -2.30739027e-01 1.50596738e-01 ... 1.05666947e+00 1.76115775e+00 5.79263866e-01] [ 1.37802765e-01 -2.01823282e+00 -7.70389140e-01 ... -1.91807830e+00 -4.30310369e-01 2.69435763e-01] [ 3.00374001e-01 2.43055243e-02 -3.33596826e-01 ... -1.41321993e+00 -4.19267118e-01 -5.66286385e-01]] [[-5.79066873e-01 2.57796586e-01 -5.35598159e-01 ... 5.17575264e-01 -6.81628644e-01 2.42234802e+00] [-8.00777435e-01 -4.03186709e-01 9.48776424e-01 ... 1.78836787e+00 -6.46526098e-01 1.19756544e+00] [ 5.90731800e-01 2.23644328e+00 -1.70337403e+00 ... 4.49018091e-01 8.11691344e-01 -2.50055432e-01] ... [ 7.06039250e-01 8.05364788e-01 1.17363095e+00 ... -1.27882615e-01 4.02428895e-01 7.35244036e-01] [-9.41820323e-01 -5.62097907e-01 1.48251444e-01 ... 4.91021089e-02 -4.50872600e-01 1.55983293e+00] [-1.24247171e-01 1.59187424e+00 1.50354087e+00 ... 4.28849638e-01 -3.30007030e-03 7.83175826e-02]]]; ov_res: [[[-6.87704533e-02 2.30506912e-01 4.97328848e-01 ... -1.01270592e+00 -4.09851134e-01 -1.16449714e+00] [ 1.12156618e+00 -3.67380977e-01 1.72056139e-01 ... 3.12060434e-02 1.39453903e-01 9.75497305e-01] [-4.32217598e-01 5.38547575e-01 1.13217449e+00 ... 1.60580468e+00 -1.59739733e+00 -2.45131469e+00] ... [-5.54435253e-01 -1.77913356e+00 -2.10260987e-01 ... 3.68011206e-01 7.57276535e-01 6.68663979e-01] [ 8.57946336e-01 -3.58921051e-01 -8.76724124e-01 ... -1.35664594e+00 -2.59386599e-02 -1.81750166e+00] [-6.61756575e-01 3.13313723e-01 2.20060587e-01 ... 1.71512163e+00 -4.65223342e-01 -2.13133305e-01]] [[-2.15408134e+00 -7.94899940e-01 8.85038853e-01 ... -1.90807605e+00 -6.19390786e-01 1.42598057e+00] [ 5.90582013e-01 -1.25675833e+00 1.02442950e-01 ... -3.42375666e-01 1.45745146e+00 9.33976412e-01] [-1.42077363e+00 4.61379327e-02 2.63523483e+00 ... -5.60966551e-01 6.00370884e-01 -1.08566499e+00] ... [-3.02419186e-01 1.85547316e+00 -2.14513588e+00 ... -5.45931756e-01 1.10738672e-01 -7.28180885e-01] [ 3.85618806e-01 -9.15463090e-01 -8.67649019e-02 ... 7.40379155e-01 5.96985295e-02 8.43084931e-01] [-3.12969722e-02 -7.84981772e-02 6.92220330e-01 ... -1.58560193e+00 -4.12044525e-01 7.67239749e-01]] [[ 1.40430480e-01 -5.48824012e-01 -2.02843472e-02 ... 1.13796389e+00 6.51349604e-01 -2.71931440e-01] [ 6.79398835e-01 -2.63937682e-01 -7.06644416e-01 ... -1.74319100e+00 1.64383367e-01 6.11979485e-01] [-1.89882249e-01 1.72582135e-01 1.08250308e+00 ... -3.32695812e-01 4.81702000e-01 5.92162549e-01] ... [-3.84310722e-01 -6.08949661e-01 2.90864259e-01 ... -1.93912983e-01 -1.24686098e+00 1.35466945e+00] [-1.55076754e+00 4.70055073e-01 2.10423303e+00 ... 1.50747156e+00 -2.82152504e-01 2.69412130e-01] [ 2.14487338e+00 -1.20170675e-01 1.38977766e+00 ... 3.78669560e-01 4.66694087e-02 -3.98370802e-01]] ... [[ 8.11182201e-01 4.74589258e-01 -2.38098785e-01 ... -1.21865320e+00 -1.05743551e+00 -1.98799288e+00] [-1.76727879e+00 -3.09779257e-01 1.22220218e+00 ... -3.35213375e+00 2.51862496e-01 3.75177443e-01] [ 4.72805083e-01 -6.01569176e-01 6.16167843e-01 ... -1.21312551e-01 5.42734340e-02 -4.82335240e-01] ... [ 4.75742668e-01 -1.26833653e+00 -1.66644776e+00 ... -4.49747682e-01 9.40056264e-01 -4.64154840e-01] [-2.27445197e+00 -3.36946547e-01 -1.05853963e+00 ... 9.71475523e-03 3.70406061e-02 7.24125147e-01] [-1.71173525e+00 1.56116456e-01 3.22842896e-01 ... -8.85578573e-01 -2.36393854e-01 -2.21314609e-01]] [[-1.23934841e+00 -1.17705369e+00 -8.96069467e-01 ... -4.09790367e-01 -1.11939676e-01 -5.45955122e-01] [-1.28255391e+00 2.76219875e-01 4.51466113e-01 ... -8.14711154e-01 -1.11608171e+00 -1.17997766e+00] [-1.45260561e+00 -1.15831709e+00 -1.22980964e+00 ... -9.05731142e-01 -1.06363082e+00 -7.31089771e-01] ... [ 1.24226660e-01 -2.30739027e-01 1.50596738e-01 ... 1.05666947e+00 1.76115775e+00 5.79263866e-01] [ 1.37802765e-01 -2.01823282e+00 -7.70389140e-01 ... -1.91807830e+00 -4.30310369e-01 2.69435763e-01] [ 3.00374001e-01 2.43055243e-02 -3.33596826e-01 ... -1.41321993e+00 -4.19267118e-01 -5.66286385e-01]] [[-5.79066873e-01 2.57796586e-01 -5.35598159e-01 ... 5.17575264e-01 -6.81628644e-01 2.42234802e+00] [-8.00777435e-01 -4.03186709e-01 9.48776424e-01 ... 1.78836787e+00 -6.46526098e-01 1.19756544e+00] [ 5.90731800e-01 2.23644328e+00 -1.70337403e+00 ... 4.49018091e-01 8.11691344e-01 -2.50055432e-01] ... [ 7.06039250e-01 8.05364788e-01 1.17363095e+00 ... -1.27882615e-01 4.02428895e-01 7.35244036e-01] [-9.41820323e-01 -5.62097907e-01 1.48251444e-01 ... 4.91021089e-02 -4.50872600e-01 1.55983293e+00] [-1.24247171e-01 1.59187424e+00 1.50354087e+00 ... 4.28849638e-01 -3.30007030e-03 7.83175826e-02]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4813.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.03633688 0.8628996 -1.6779214 ... -0.3302475 0.08197331 1.0565659 ] [-0.2537048 0.8426449 -1.4121873 ... -0.7568591 1.8776693 -0.52525675] [ 0.1801897 0.69552696 0.8024265 ... -0.33690646 1.8493044 0.4817143 ] ... [ 1.5183744 -0.8506321 1.0761634 ... -0.39242113 1.1331497 1.376647 ] [-0.05692248 -1.8988767 -1.0298613 ... 0.54354215 0.13983822 -0.82563114] [-1.9803492 1.381132 1.762511 ... 1.8585508 -1.1662843 -0.20135735]] [[-0.35820282 0.76068586 1.0921742 ... -0.3476266 1.2500333 -0.0646111 ] [ 0.47956893 -0.6909083 1.4455326 ... -0.8217122 0.67896146 -1.2887226 ] [-0.14273107 -0.71559817 -0.8813999 ... -0.7948217 0.27395722 -0.88344556] ... [-1.2113028 -0.53198737 0.14284183 ... -0.42770022 -1.0195397 0.30800995] [ 0.68658954 -0.6936696 -0.13354972 ... -0.7189759 0.3433241 -0.26580855] [-1.2855325 -0.49261376 -0.9741812 ... -0.5456657 0.78525794 0.6687624 ]] [[-2.7827234 0.38189122 1.8310126 ... -0.6216843 1.320955 -0.3540029 ] [-0.19416508 -1.8258588 -0.25547606 ... -0.16466473 0.0732372 -1.8869686 ] [-1.0864236 -0.40400484 -0.7857032 ... -0.8915779 1.2923039 0.4241138 ] ... [-0.06925923 1.6154015 1.15112 ... -1.5249363 -1.9384378 -0.49228904] [-1.0537382 -2.2949233 -0.33480954 ... -1.3271589 -0.67108697 0.90593284] [-0.13351439 -0.25975475 -2.491153 ... -1.1385083 -0.22365865 0.986184 ]] ... [[-1.7575657 0.82340235 0.5282818 ... 0.315628 0.24910788 -1.6737157 ] [-0.45195028 -0.6303937 -0.42992046 ... -0.58279055 -1.6213357 -1.0091739 ] [ 1.2088282 -2.2385159 1.3086374 ... 1.0372181 -2.147782 0.8343207 ] ... [-1.1493917 -1.5493675 -2.160047 ... -0.7146968 0.364565 -1.1443156 ] [-0.9408961 1.2070343 -0.05406069 ... -0.2958144 -0.02490649 0.07969399] [ 0.35124123 -2.413191 -1.1477374 ... 0.1557584 0.4538457 0.87931204]] [[ 0.8308434 0.11184308 -0.6373052 ... 1.3922002 -1.8671618 -0.34056258] [ 1.571927 -0.6337503 -0.03748717 ... -0.785527 0.64100593 0.37242576] [ 0.65456337 0.71004564 1.0907019 ... -1.386203 -0.04992704 0.69954437] ... [-0.6814733 -0.12692389 -0.80632585 ... -2.576286 0.5191352 -1.2192087 ] [-0.09369315 1.239931 1.4082626 ... -0.20200932 -0.14555715 -0.10283282] [ 0.3715855 0.3852941 -0.38449526 ... 1.1568513 -0.6217576 -0.2170222 ]] [[-2.121914 -0.79116887 -0.11356554 ... 1.1507577 -0.9479155 0.02505153] [ 1.1952298 -0.5612174 0.68771845 ... 1.491643 -0.25817502 -1.646571 ] [-0.02669161 -0.87330896 -0.5096048 ... -0.97613543 -1.3475091 0.90505034] ... [-1.4181126 -0.26215556 -0.8197149 ... 0.9570646 -1.118527 -0.01815463] [ 0.47363383 0.18706337 -1.5405977 ... 1.9373162 -0.5550863 0.38044426] [ 1.5562621 1.1232134 2.814614 ... 1.2420726 -0.68545175 0.6137284 ]]]; ov_res: [[[ 0.03633688 0.8628996 -1.6779214 ... -0.3302475 0.08197331 1.0565659 ] [-0.2537048 0.8426449 -1.4121873 ... -0.7568591 1.8776693 -0.52525675] [ 0.1801897 0.69552696 0.8024265 ... -0.33690646 1.8493044 0.4817143 ] ... [ 1.5183744 -0.8506321 1.0761634 ... -0.39242113 1.1331497 1.376647 ] [-0.05692248 -1.8988767 -1.0298613 ... 0.54354215 0.13983822 -0.82563114] [-1.9803492 1.381132 1.762511 ... 1.8585508 -1.1662843 -0.20135735]] [[-0.35820282 0.76068586 1.0921742 ... -0.3476266 1.2500333 -0.0646111 ] [ 0.47956893 -0.6909083 1.4455326 ... -0.8217122 0.67896146 -1.2887226 ] [-0.14273107 -0.71559817 -0.8813999 ... -0.7948217 0.27395722 -0.88344556] ... [-1.2113028 -0.53198737 0.14284183 ... -0.42770022 -1.0195397 0.30800995] [ 0.68658954 -0.6936696 -0.13354972 ... -0.7189759 0.3433241 -0.26580855] [-1.2855325 -0.49261376 -0.9741812 ... -0.5456657 0.78525794 0.6687624 ]] [[-2.7827234 0.38189122 1.8310126 ... -0.6216843 1.320955 -0.3540029 ] [-0.19416508 -1.8258588 -0.25547606 ... -0.16466473 0.0732372 -1.8869686 ] [-1.0864236 -0.40400484 -0.7857032 ... -0.8915779 1.2923039 0.4241138 ] ... [-0.06925923 1.6154015 1.15112 ... -1.5249363 -1.9384378 -0.49228904] [-1.0537382 -2.2949233 -0.33480954 ... -1.3271589 -0.67108697 0.90593284] [-0.13351439 -0.25975475 -2.491153 ... -1.1385083 -0.22365865 0.986184 ]] ... [[-1.7575657 0.82340235 0.5282818 ... 0.315628 0.24910788 -1.6737157 ] [-0.45195028 -0.6303937 -0.42992046 ... -0.58279055 -1.6213357 -1.0091739 ] [ 1.2088282 -2.2385159 1.3086374 ... 1.0372181 -2.147782 0.8343207 ] ... [-1.1493917 -1.5493675 -2.160047 ... -0.7146968 0.364565 -1.1443156 ] [-0.9408961 1.2070343 -0.05406069 ... -0.2958144 -0.02490649 0.07969399] [ 0.35124123 -2.413191 -1.1477374 ... 0.1557584 0.4538457 0.87931204]] [[ 0.8308434 0.11184308 -0.6373052 ... 1.3922002 -1.8671618 -0.34056258] [ 1.571927 -0.6337503 -0.03748717 ... -0.785527 0.64100593 0.37242576] [ 0.65456337 0.71004564 1.0907019 ... -1.386203 -0.04992704 0.69954437] ... [-0.6814733 -0.12692389 -0.80632585 ... -2.576286 0.5191352 -1.2192087 ] [-0.09369315 1.239931 1.4082626 ... -0.20200932 -0.14555715 -0.10283282] [ 0.3715855 0.3852941 -0.38449526 ... 1.1568513 -0.6217576 -0.2170222 ]] [[-2.121914 -0.79116887 -0.11356554 ... 1.1507577 -0.9479155 0.02505153] [ 1.1952298 -0.5612174 0.68771845 ... 1.491643 -0.25817502 -1.646571 ] [-0.02669161 -0.87330896 -0.5096048 ... -0.97613543 -1.3475091 0.90505034] ... [-1.4181126 -0.26215556 -0.8197149 ... 0.9570646 -1.118527 -0.01815463] [ 0.47363383 0.18706337 -1.5405977 ... 1.9373162 -0.5550863 0.38044426] [ 1.5562621 1.1232134 2.814614 ... 1.2420726 -0.68545175 0.6137284 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4815.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.31231117 -0.54632336 0.03222322 ... 0.6976871 0.7211465 -1.9658282 ] [ 0.33777365 0.6555079 -1.0950792 ... 0.18428385 -0.33868775 0.55375206] [ 0.98971605 -1.4108086 0.2659959 ... -3.5501304 -1.6713747 0.13005637] ... [ 1.1019018 0.3050785 -1.4000247 ... -0.17855935 -2.3299527 -0.96375555] [ 0.04715297 -0.50235724 -1.2706338 ... 0.32472005 0.4666407 -1.2355958 ] [-0.1032918 0.40184438 -0.8316015 ... -0.65811425 0.82080066 1.072752 ]] [[ 0.9200238 -0.1113705 -0.5201533 ... 1.0989149 -1.3181148 -0.77444994] [-0.70561534 0.28530315 -0.7153932 ... 0.68090117 0.27307266 -1.4080861 ] [-0.8081489 0.24615884 0.36186144 ... -0.19681147 -0.9402034 0.36606893] ... [ 0.8610984 -0.8099769 0.455647 ... 0.8881559 -1.2257974 0.7284083 ] [-0.79427564 -0.47254366 -0.9541119 ... 0.33188498 -0.47979 -1.2553718 ] [ 0.652789 1.2818543 2.4588733 ... -0.16964294 1.6123779 0.18829575]] [[ 0.649342 -0.28654206 0.1695606 ... 0.51928425 0.95939875 -0.61587465] [ 0.28854522 -0.24073349 -0.7239092 ... 0.64311737 1.5497533 -0.07397084] [ 0.29954925 0.3511979 0.05163781 ... 0.16796164 0.03725388 -0.59852827] ... [ 1.7532817 0.12733033 0.43521118 ... 0.7137269 -0.4227071 0.04761374] [ 1.0797478 -0.47916937 -0.34816206 ... -0.7198395 0.35825554 -1.8299568 ] [ 0.05052305 -0.7235888 -0.8587586 ... -0.19704871 -0.3818501 0.02277446]] ... [[-1.5731297 1.8596792 1.0354992 ... -0.4747383 -1.3174456 0.1949805 ] [-0.3346382 0.7719989 -0.24570619 ... 0.72373354 -0.2294267 -0.15872778] [-0.56318325 -0.70265317 0.83303714 ... 0.38141593 -0.3547264 -1.2793008 ] ... [ 0.07266575 -0.10164364 -0.18033324 ... 0.17872709 0.13892849 -0.92555165] [ 0.2544812 0.94355005 -1.1459423 ... -0.986384 0.7091061 -1.2757336 ] [ 0.20882526 0.42658153 -1.2197216 ... -0.30970457 -0.04491751 -0.8876437 ]] [[-0.12332027 -1.1092104 0.42493767 ... 0.59117883 -0.6255509 0.30039388] [-0.54054034 0.55673015 0.9400062 ... 0.07419314 0.10907436 -0.6037121 ] [ 0.10602205 -0.7541422 1.2706444 ... -0.5906942 -1.7317511 0.9090013 ] ... [-0.3051811 -0.27340567 1.9295539 ... 0.14220764 1.1075547 -0.61312777] [ 0.3154518 1.9065673 -0.2367173 ... 0.06811719 -0.05767337 0.822141 ] [ 1.6544045 0.07873486 0.11821831 ... -0.20256272 0.28438443 0.3461589 ]] [[-0.31181264 0.23684317 0.610979 ... -0.53554964 0.17714839 2.9594429 ] [-1.3972346 0.9704593 1.3777306 ... 0.8714293 -1.3576584 -0.39738804] [-0.3294436 -1.2859262 -1.4349848 ... 0.6845335 0.70986044 -0.310094 ] ... [ 2.0561833 1.2122205 0.15793276 ... -0.7283714 0.17286806 1.7729069 ] [ 0.55420893 0.31049502 -0.667122 ... -1.1040384 0.7576551 1.1657128 ] [ 1.8645858 0.3263537 1.484133 ... 1.3386028 -0.65359086 -0.3067376 ]]]; ov_res: [[[ 0.31231117 -0.54632336 0.03222322 ... 0.6976871 0.7211465 -1.9658282 ] [ 0.33777365 0.6555079 -1.0950792 ... 0.18428385 -0.33868775 0.55375206] [ 0.98971605 -1.4108086 0.2659959 ... -3.5501304 -1.6713747 0.13005637] ... [ 1.1019018 0.3050785 -1.4000247 ... -0.17855935 -2.3299527 -0.96375555] [ 0.04715297 -0.50235724 -1.2706338 ... 0.32472005 0.4666407 -1.2355958 ] [-0.1032918 0.40184438 -0.8316015 ... -0.65811425 0.82080066 1.072752 ]] [[ 0.9200238 -0.1113705 -0.5201533 ... 1.0989149 -1.3181148 -0.77444994] [-0.70561534 0.28530315 -0.7153932 ... 0.68090117 0.27307266 -1.4080861 ] [-0.8081489 0.24615884 0.36186144 ... -0.19681147 -0.9402034 0.36606893] ... [ 0.8610984 -0.8099769 0.455647 ... 0.8881559 -1.2257974 0.7284083 ] [-0.79427564 -0.47254366 -0.9541119 ... 0.33188498 -0.47979 -1.2553718 ] [ 0.652789 1.2818543 2.4588733 ... -0.16964294 1.6123779 0.18829575]] [[ 0.649342 -0.28654206 0.1695606 ... 0.51928425 0.95939875 -0.61587465] [ 0.28854522 -0.24073349 -0.7239092 ... 0.64311737 1.5497533 -0.07397084] [ 0.29954925 0.3511979 0.05163781 ... 0.16796164 0.03725388 -0.59852827] ... [ 1.7532817 0.12733033 0.43521118 ... 0.7137269 -0.4227071 0.04761374] [ 1.0797478 -0.47916937 -0.34816206 ... -0.7198395 0.35825554 -1.8299568 ] [ 0.05052305 -0.7235888 -0.8587586 ... -0.19704871 -0.3818501 0.02277446]] ... [[-1.5731297 1.8596792 1.0354992 ... -0.4747383 -1.3174456 0.1949805 ] [-0.3346382 0.7719989 -0.24570619 ... 0.72373354 -0.2294267 -0.15872778] [-0.56318325 -0.70265317 0.83303714 ... 0.38141593 -0.3547264 -1.2793008 ] ... [ 0.07266575 -0.10164364 -0.18033324 ... 0.17872709 0.13892849 -0.92555165] [ 0.2544812 0.94355005 -1.1459423 ... -0.986384 0.7091061 -1.2757336 ] [ 0.20882526 0.42658153 -1.2197216 ... -0.30970457 -0.04491751 -0.8876437 ]] [[-0.12332027 -1.1092104 0.42493767 ... 0.59117883 -0.6255509 0.30039388] [-0.54054034 0.55673015 0.9400062 ... 0.07419314 0.10907436 -0.6037121 ] [ 0.10602205 -0.7541422 1.2706444 ... -0.5906942 -1.7317511 0.9090013 ] ... [-0.3051811 -0.27340567 1.9295539 ... 0.14220764 1.1075547 -0.61312777] [ 0.3154518 1.9065673 -0.2367173 ... 0.06811719 -0.05767337 0.822141 ] [ 1.6544045 0.07873486 0.11821831 ... -0.20256272 0.28438443 0.3461589 ]] [[-0.31181264 0.23684317 0.610979 ... -0.53554964 0.17714839 2.9594429 ] [-1.3972346 0.9704593 1.3777306 ... 0.8714293 -1.3576584 -0.39738804] [-0.3294436 -1.2859262 -1.4349848 ... 0.6845335 0.70986044 -0.310094 ] ... [ 2.0561833 1.2122205 0.15793276 ... -0.7283714 0.17286806 1.7729069 ] [ 0.55420893 0.31049502 -0.667122 ... -1.1040384 0.7576551 1.1657128 ] [ 1.8645858 0.3263537 1.484133 ... 1.3386028 -0.65359086 -0.3067376 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4817.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.6278148 0.3455384 0.8818925 ... -0.32212314 -1.4509151 1.4818567 ] [ 0.4295622 1.4397101 0.3804839 ... -1.216569 0.36468565 0.14743002] [-0.6233493 0.5594321 -0.07268189 ... 0.14156297 -0.08289406 -0.2633871 ] ... [-0.21604514 0.39135808 0.7460531 ... -1.4567627 -0.38767695 1.3582937 ] [ 0.1518736 -1.4947901 0.16116753 ... 0.25156426 -0.79238 -1.2115693 ] [-1.0263569 -0.33005866 0.34560636 ... -0.38735884 1.405397 -0.7192924 ]] [[-1.8705561 -1.9777198 -0.14007765 ... 0.53163075 0.44087982 -0.8364469 ] [-0.7317596 1.1398492 -0.6121129 ... 0.5649704 0.8735613 -1.3723207 ] [-0.05658985 0.6204239 -0.35715157 ... -0.6938187 0.440685 -0.02111184] ... [-1.0202655 -0.22400552 0.17964458 ... 0.45295215 0.6500551 0.04816708] [ 1.8238236 -0.777123 -0.9433019 ... -0.629697 0.06269679 -0.62949485] [-0.04459485 0.85370076 -1.316314 ... 0.9806737 0.6620196 -0.389318 ]] [[-1.5443357 -0.8111117 0.15222844 ... 1.0319414 -0.4739741 -0.05301999] [ 0.90077996 1.3678805 -0.11905893 ... -0.70619607 0.6993906 1.4475125 ] [-0.8397936 -0.5037705 0.76159775 ... -0.30726206 0.9176196 -0.51446074] ... [ 0.15260836 -0.34248063 -0.86510956 ... 0.15193796 0.63764465 -1.4618134 ] [ 0.17728369 0.275036 0.06167925 ... -0.17256673 -0.65169156 0.73277456] [-0.04270249 -1.3273733 -1.1982508 ... 3.1551154 0.37180418 0.51250637]] ... [[-1.1957835 -0.13568379 0.20830943 ... -0.24932961 -0.8543307 0.4284277 ] [ 1.2974786 -0.52318454 0.5083416 ... -0.83982736 -1.2318561 -2.3563442 ] [-2.3439155 0.22447628 1.7577847 ... -0.61102 2.6339438 0.8286081 ] ... [-0.32845235 -0.09420809 -0.9758644 ... 0.92239445 1.7011108 0.31994855] [-0.5631571 -0.9143312 -0.72871286 ... 0.07540538 -0.13480337 0.88012844] [ 0.5610668 1.4858807 0.01567365 ... 1.7126477 -0.20746893 1.1800364 ]] [[ 0.97168434 -0.65460885 0.17309119 ... -0.17228796 1.1654959 1.062721 ] [ 0.25646847 0.98662484 -0.76993835 ... 0.890915 0.11749789 -0.92628145] [ 0.74654037 -0.83593464 0.86074257 ... -0.34646326 -0.6465744 -1.1587362 ] ... [ 0.27379307 0.14749606 -0.1831037 ... -2.562446 0.21391721 -1.0613191 ] [-0.2422654 -0.16881283 -0.1426548 ... -1.0475714 2.185206 0.26298413] [ 1.5837461 -0.32018328 -0.9233221 ... 1.149553 0.0839385 -0.47505352]] [[ 0.62491006 -0.9386286 0.3036424 ... 0.10992681 -1.433799 -0.82150865] [ 0.74040955 -0.48135877 -0.15185076 ... 0.2828942 1.6211897 -0.347248 ] [-1.2601783 1.4867876 -0.12225588 ... 0.13077913 -0.7168251 -0.21361329] ... [-0.9801931 0.29615963 -0.82200795 ... -0.5623916 0.6392218 -0.1835847 ] [ 1.8461349 2.4843233 -0.17255561 ... -0.48694146 0.1428874 0.80259645] [-1.4513512 0.5283355 -1.6852539 ... 0.93908036 1.5993549 0.41866633]]]; ov_res: [[[ 0.6278148 0.3455384 0.8818925 ... -0.32212314 -1.4509151 1.4818567 ] [ 0.4295622 1.4397101 0.3804839 ... -1.216569 0.36468565 0.14743002] [-0.6233493 0.5594321 -0.07268189 ... 0.14156297 -0.08289406 -0.2633871 ] ... [-0.21604514 0.39135808 0.7460531 ... -1.4567627 -0.38767695 1.3582937 ] [ 0.1518736 -1.4947901 0.16116753 ... 0.25156426 -0.79238 -1.2115693 ] [-1.0263569 -0.33005866 0.34560636 ... -0.38735884 1.405397 -0.7192924 ]] [[-1.8705561 -1.9777198 -0.14007765 ... 0.53163075 0.44087982 -0.8364469 ] [-0.7317596 1.1398492 -0.6121129 ... 0.5649704 0.8735613 -1.3723207 ] [-0.05658985 0.6204239 -0.35715157 ... -0.6938187 0.440685 -0.02111184] ... [-1.0202655 -0.22400552 0.17964458 ... 0.45295215 0.6500551 0.04816708] [ 1.8238236 -0.777123 -0.9433019 ... -0.629697 0.06269679 -0.62949485] [-0.04459485 0.85370076 -1.316314 ... 0.9806737 0.6620196 -0.389318 ]] [[-1.5443357 -0.8111117 0.15222844 ... 1.0319414 -0.4739741 -0.05301999] [ 0.90077996 1.3678805 -0.11905893 ... -0.70619607 0.6993906 1.4475125 ] [-0.8397936 -0.5037705 0.76159775 ... -0.30726206 0.9176196 -0.51446074] ... [ 0.15260836 -0.34248063 -0.86510956 ... 0.15193796 0.63764465 -1.4618134 ] [ 0.17728369 0.275036 0.06167925 ... -0.17256673 -0.65169156 0.73277456] [-0.04270249 -1.3273733 -1.1982508 ... 3.1551154 0.37180418 0.51250637]] ... [[-1.1957835 -0.13568379 0.20830943 ... -0.24932961 -0.8543307 0.4284277 ] [ 1.2974786 -0.52318454 0.5083416 ... -0.83982736 -1.2318561 -2.3563442 ] [-2.3439155 0.22447628 1.7577847 ... -0.61102 2.6339438 0.8286081 ] ... [-0.32845235 -0.09420809 -0.9758644 ... 0.92239445 1.7011108 0.31994855] [-0.5631571 -0.9143312 -0.72871286 ... 0.07540538 -0.13480337 0.88012844] [ 0.5610668 1.4858807 0.01567365 ... 1.7126477 -0.20746893 1.1800364 ]] [[ 0.97168434 -0.65460885 0.17309119 ... -0.17228796 1.1654959 1.062721 ] [ 0.25646847 0.98662484 -0.76993835 ... 0.890915 0.11749789 -0.92628145] [ 0.74654037 -0.83593464 0.86074257 ... -0.34646326 -0.6465744 -1.1587362 ] ... [ 0.27379307 0.14749606 -0.1831037 ... -2.562446 0.21391721 -1.0613191 ] [-0.2422654 -0.16881283 -0.1426548 ... -1.0475714 2.185206 0.26298413] [ 1.5837461 -0.32018328 -0.9233221 ... 1.149553 0.0839385 -0.47505352]] [[ 0.62491006 -0.9386286 0.3036424 ... 0.10992681 -1.433799 -0.82150865] [ 0.74040955 -0.48135877 -0.15185076 ... 0.2828942 1.6211897 -0.347248 ] [-1.2601783 1.4867876 -0.12225588 ... 0.13077913 -0.7168251 -0.21361329] ... [-0.9801931 0.29615963 -0.82200795 ... -0.5623916 0.6392218 -0.1835847 ] [ 1.8461349 2.4843233 -0.17255561 ... -0.48694146 0.1428874 0.80259645] [-1.4513512 0.5283355 -1.6852539 ... 0.93908036 1.5993549 0.41866633]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:3 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4819.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-2.9055517e+00 4.8089299e-01 1.5469433e+00 ... -8.7185577e-02 2.5845912e-01 7.1006501e-01] [-1.2678682e+00 -5.2094442e-01 -1.2704287e-01 ... -1.5531251e-01 1.5546330e+00 6.0644686e-01] [-9.5355141e-01 -1.3002586e+00 9.1428286e-01 ... -4.7745103e-01 1.0130888e+00 8.0306244e-01]] [[ 2.0410016e+00 -1.9166919e+00 2.5061974e-01 ... -1.0872937e+00 -8.8594675e-01 -9.6980810e-01] [-1.3369261e+00 -2.5211694e+00 1.6591356e+00 ... -2.4918331e-01 -4.3614358e-01 1.1021455e-01] [-7.8046429e-01 -3.5625342e-02 1.1200911e+00 ... 1.7414925e-01 7.5238341e-01 -1.3614693e-01]] [[ 2.3105575e-01 -9.5500660e-01 -1.2621620e+00 ... -8.2018673e-01 8.9173955e-01 3.0919945e-01] [-2.5063202e+00 5.7118688e-02 1.7632359e+00 ... -1.2304281e+00 -3.5412902e-01 -9.0028845e-02] [-7.3096305e-01 3.6451645e-02 -2.5240245e-01 ... 4.6603423e-02 7.5753337e-01 -1.1465845e+00]] ... [[ 6.4677411e-01 4.3133327e-01 7.1501476e-01 ... 1.1127114e+00 -7.3572278e-01 9.0191704e-01] [-5.9441662e-01 -8.9083439e-01 1.5394296e-01 ... 1.4367590e+00 -1.3219123e+00 8.1617214e-02] [ 9.5454967e-01 4.8433056e-01 4.6597016e-01 ... 1.1272695e+00 -2.1833303e+00 -1.5966814e+00]] [[-7.0764810e-01 -6.1876559e-01 2.3434480e-01 ... -1.2144219e+00 -2.0370512e+00 9.0235054e-02] [-1.0605627e+00 1.3024983e+00 -1.7018471e+00 ... 1.0319570e+00 9.7464556e-01 6.4952451e-01] [ 9.3140751e-02 8.7883365e-01 -7.7816254e-01 ... 1.1241716e+00 5.5103272e-01 6.0649633e-01]] [[ 8.0815887e-01 6.7153478e-01 4.2089787e-01 ... -2.4844010e+00 7.8020817e-01 1.4082954e+00] [ 8.4708118e-01 2.1724936e-03 -1.7088426e+00 ... -7.3174226e-01 -2.6019245e-01 -3.4486800e-01] [-1.0204347e+00 -1.0061507e+00 1.1468241e+00 ... -4.9200386e-01 -8.0322886e-01 1.3622950e+00]]]; ov_res: [[[-2.9055517e+00 4.8089299e-01 1.5469433e+00 ... -8.7185577e-02 2.5845912e-01 7.1006501e-01] [-1.2678682e+00 -5.2094442e-01 -1.2704287e-01 ... -1.5531251e-01 1.5546330e+00 6.0644686e-01] [-9.5355141e-01 -1.3002586e+00 9.1428286e-01 ... -4.7745103e-01 1.0130888e+00 8.0306244e-01]] [[ 2.0410016e+00 -1.9166919e+00 2.5061974e-01 ... -1.0872937e+00 -8.8594675e-01 -9.6980810e-01] [-1.3369261e+00 -2.5211694e+00 1.6591356e+00 ... -2.4918331e-01 -4.3614358e-01 1.1021455e-01] [-7.8046429e-01 -3.5625342e-02 1.1200911e+00 ... 1.7414925e-01 7.5238341e-01 -1.3614693e-01]] [[ 2.3105575e-01 -9.5500660e-01 -1.2621620e+00 ... -8.2018673e-01 8.9173955e-01 3.0919945e-01] [-2.5063202e+00 5.7118688e-02 1.7632359e+00 ... -1.2304281e+00 -3.5412902e-01 -9.0028845e-02] [-7.3096305e-01 3.6451645e-02 -2.5240245e-01 ... 4.6603423e-02 7.5753337e-01 -1.1465845e+00]] ... [[ 6.4677411e-01 4.3133327e-01 7.1501476e-01 ... 1.1127114e+00 -7.3572278e-01 9.0191704e-01] [-5.9441662e-01 -8.9083439e-01 1.5394296e-01 ... 1.4367590e+00 -1.3219123e+00 8.1617214e-02] [ 9.5454967e-01 4.8433056e-01 4.6597016e-01 ... 1.1272695e+00 -2.1833303e+00 -1.5966814e+00]] [[-7.0764810e-01 -6.1876559e-01 2.3434480e-01 ... -1.2144219e+00 -2.0370512e+00 9.0235054e-02] [-1.0605627e+00 1.3024983e+00 -1.7018471e+00 ... 1.0319570e+00 9.7464556e-01 6.4952451e-01] [ 9.3140751e-02 8.7883365e-01 -7.7816254e-01 ... 1.1241716e+00 5.5103272e-01 6.0649633e-01]] [[ 8.0815887e-01 6.7153478e-01 4.2089787e-01 ... -2.4844010e+00 7.8020817e-01 1.4082954e+00] [ 8.4708118e-01 2.1724936e-03 -1.7088426e+00 ... -7.3174226e-01 -2.6019245e-01 -3.4486800e-01] [-1.0204347e+00 -1.0061507e+00 1.1468241e+00 ... -4.9200386e-01 -8.0322886e-01 1.3622950e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4821.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 3.53179604e-01 -1.61589563e+00 -1.73362231e+00 ... 1.06571400e+00 -8.66696477e-01 7.83149675e-02] [-1.73362231e+00 -2.73860514e-01 1.10808432e-01 ... 7.83149675e-02 -2.85817027e-01 -1.65465391e+00] [ 1.10808432e-01 -1.02025703e-01 1.58206213e+00 ... -1.65465391e+00 2.52304733e-01 5.29168725e-01] ... [ 1.90828967e+00 1.14066088e+00 1.12602353e+00 ... -1.27385151e+00 1.50762749e+00 -1.39448547e+00] [ 1.12602353e+00 5.98224282e-01 2.18600389e-02 ... -1.39448547e+00 5.49253941e-01 -1.15470636e+00] [ 2.18600389e-02 9.98055816e-01 2.37127209e+00 ... -1.15470636e+00 -6.41748011e-02 1.11783135e+00]] [[ 1.27380729e+00 -4.50464152e-02 5.82209289e-01 ... -1.40352237e+00 5.10310471e-01 2.28399801e+00] [ 5.82209289e-01 2.72558093e-01 -1.29961598e+00 ... 2.28399801e+00 1.73771694e-01 -7.19680429e-01] [-1.29961598e+00 -6.33286417e-01 7.22390294e-01 ... -7.19680429e-01 4.38331306e-01 -8.15443873e-01] ... [-1.68647373e+00 1.58421159e+00 -4.94264811e-01 ... -1.24498166e-01 -1.24748874e+00 3.25958371e-01] [-4.94264811e-01 3.78492147e-01 2.40737647e-01 ... 3.25958371e-01 1.89393437e+00 2.63512909e-01] [ 2.40737647e-01 -6.62723482e-01 -8.27791810e-01 ... 2.63512909e-01 1.81969225e+00 1.46281242e+00]] [[ 1.18219346e-01 9.22520161e-01 -9.40306365e-01 ... 1.16667427e-01 9.90903020e-01 1.83177114e-01] [-9.40306365e-01 1.46656942e+00 7.85878241e-01 ... 1.83177114e-01 2.48433247e-01 -6.97109342e-01] [ 7.85878241e-01 1.09667635e+00 -1.51704812e+00 ... -6.97109342e-01 1.26881599e-01 1.64145744e+00] ... [ 3.74607742e-01 -9.07837927e-01 -2.91504574e+00 ... 5.30619919e-01 -5.96620440e-01 2.44174957e-01] [-2.91504574e+00 -3.44266564e-01 1.13372803e-01 ... 2.44174957e-01 -7.78054669e-02 2.74556756e-01] [ 1.13372803e-01 -1.79035634e-01 2.63907492e-01 ... 2.74556756e-01 3.31218004e-01 4.06499777e-04]] ... [[ 1.42318797e+00 -9.40215766e-01 1.02046758e-01 ... 9.83729839e-01 -1.97598964e-01 4.40013051e-01] [ 1.02046758e-01 1.47836924e+00 1.27850199e+00 ... 4.40013051e-01 -2.07101658e-01 2.26187438e-01] [ 1.27850199e+00 7.65605092e-01 -1.39720833e+00 ... 2.26187438e-01 1.22664511e+00 -3.70629609e-01] ... [ 1.08012056e+00 2.58045048e-01 -2.14164376e-01 ... 1.93893421e+00 1.74915910e-01 2.21614623e+00] [-2.14164376e-01 -5.25046647e-01 7.75819361e-01 ... 2.21614623e+00 -8.25105071e-01 5.35050869e-01] [ 7.75819361e-01 -9.36163545e-01 -8.08988333e-01 ... 5.35050869e-01 -2.04706860e+00 -8.33577812e-01]] [[-3.61876190e-01 9.24325824e-01 -1.44363129e+00 ... 1.24301910e+00 1.16181576e+00 -3.27711463e-01] [-1.44363129e+00 5.65110803e-01 -2.74444729e-01 ... -3.27711463e-01 1.53344381e+00 2.62484813e+00] [-2.74444729e-01 1.49406791e+00 -9.72195089e-01 ... 2.62484813e+00 3.42236459e-01 1.23263454e+00] ... [ 4.75753158e-01 1.25049472e+00 3.21535140e-01 ... 5.02439618e-01 -2.30597353e+00 2.07809830e+00] [ 3.21535140e-01 -4.01746690e-01 -1.20061564e+00 ... 2.07809830e+00 -1.96480021e-01 1.83407950e+00] [-1.20061564e+00 -3.49546708e-02 2.25609154e-01 ... 1.83407950e+00 -1.57625926e+00 1.31274283e+00]] [[ 1.68099272e+00 -9.37915802e-01 -3.95665079e-01 ... -1.07696772e+00 -1.45034954e-01 8.73614848e-01] [-3.95665079e-01 7.28223994e-02 1.64370835e-01 ... 8.73614848e-01 6.34867609e-01 -5.89130163e-01] [ 1.64370835e-01 -1.94535398e+00 -3.96220058e-01 ... -5.89130163e-01 -4.44104075e-01 2.26571530e-01] ... [ 5.06201327e-01 6.80662632e-01 1.24616468e+00 ... 4.21907634e-01 -1.36618778e-01 3.20716598e-03] [ 1.24616468e+00 4.91294652e-01 -5.91094136e-01 ... 3.20716598e-03 -1.34538507e+00 -2.13236427e+00] [-5.91094136e-01 5.73487282e-02 3.00627351e-01 ... -2.13236427e+00 -5.02570748e-01 6.59370780e-01]]]; ov_res: [[[ 3.53179604e-01 -1.61589563e+00 -1.73362231e+00 ... 1.06571400e+00 -8.66696477e-01 7.83149675e-02] [-1.73362231e+00 -2.73860514e-01 1.10808432e-01 ... 7.83149675e-02 -2.85817027e-01 -1.65465391e+00] [ 1.10808432e-01 -1.02025703e-01 1.58206213e+00 ... -1.65465391e+00 2.52304733e-01 5.29168725e-01] ... [ 1.90828967e+00 1.14066088e+00 1.12602353e+00 ... -1.27385151e+00 1.50762749e+00 -1.39448547e+00] [ 1.12602353e+00 5.98224282e-01 2.18600389e-02 ... -1.39448547e+00 5.49253941e-01 -1.15470636e+00] [ 2.18600389e-02 9.98055816e-01 2.37127209e+00 ... -1.15470636e+00 -6.41748011e-02 1.11783135e+00]] [[ 1.27380729e+00 -4.50464152e-02 5.82209289e-01 ... -1.40352237e+00 5.10310471e-01 2.28399801e+00] [ 5.82209289e-01 2.72558093e-01 -1.29961598e+00 ... 2.28399801e+00 1.73771694e-01 -7.19680429e-01] [-1.29961598e+00 -6.33286417e-01 7.22390294e-01 ... -7.19680429e-01 4.38331306e-01 -8.15443873e-01] ... [-1.68647373e+00 1.58421159e+00 -4.94264811e-01 ... -1.24498166e-01 -1.24748874e+00 3.25958371e-01] [-4.94264811e-01 3.78492147e-01 2.40737647e-01 ... 3.25958371e-01 1.89393437e+00 2.63512909e-01] [ 2.40737647e-01 -6.62723482e-01 -8.27791810e-01 ... 2.63512909e-01 1.81969225e+00 1.46281242e+00]] [[ 1.18219346e-01 9.22520161e-01 -9.40306365e-01 ... 1.16667427e-01 9.90903020e-01 1.83177114e-01] [-9.40306365e-01 1.46656942e+00 7.85878241e-01 ... 1.83177114e-01 2.48433247e-01 -6.97109342e-01] [ 7.85878241e-01 1.09667635e+00 -1.51704812e+00 ... -6.97109342e-01 1.26881599e-01 1.64145744e+00] ... [ 3.74607742e-01 -9.07837927e-01 -2.91504574e+00 ... 5.30619919e-01 -5.96620440e-01 2.44174957e-01] [-2.91504574e+00 -3.44266564e-01 1.13372803e-01 ... 2.44174957e-01 -7.78054669e-02 2.74556756e-01] [ 1.13372803e-01 -1.79035634e-01 2.63907492e-01 ... 2.74556756e-01 3.31218004e-01 4.06499777e-04]] ... [[ 1.42318797e+00 -9.40215766e-01 1.02046758e-01 ... 9.83729839e-01 -1.97598964e-01 4.40013051e-01] [ 1.02046758e-01 1.47836924e+00 1.27850199e+00 ... 4.40013051e-01 -2.07101658e-01 2.26187438e-01] [ 1.27850199e+00 7.65605092e-01 -1.39720833e+00 ... 2.26187438e-01 1.22664511e+00 -3.70629609e-01] ... [ 1.08012056e+00 2.58045048e-01 -2.14164376e-01 ... 1.93893421e+00 1.74915910e-01 2.21614623e+00] [-2.14164376e-01 -5.25046647e-01 7.75819361e-01 ... 2.21614623e+00 -8.25105071e-01 5.35050869e-01] [ 7.75819361e-01 -9.36163545e-01 -8.08988333e-01 ... 5.35050869e-01 -2.04706860e+00 -8.33577812e-01]] [[-3.61876190e-01 9.24325824e-01 -1.44363129e+00 ... 1.24301910e+00 1.16181576e+00 -3.27711463e-01] [-1.44363129e+00 5.65110803e-01 -2.74444729e-01 ... -3.27711463e-01 1.53344381e+00 2.62484813e+00] [-2.74444729e-01 1.49406791e+00 -9.72195089e-01 ... 2.62484813e+00 3.42236459e-01 1.23263454e+00] ... [ 4.75753158e-01 1.25049472e+00 3.21535140e-01 ... 5.02439618e-01 -2.30597353e+00 2.07809830e+00] [ 3.21535140e-01 -4.01746690e-01 -1.20061564e+00 ... 2.07809830e+00 -1.96480021e-01 1.83407950e+00] [-1.20061564e+00 -3.49546708e-02 2.25609154e-01 ... 1.83407950e+00 -1.57625926e+00 1.31274283e+00]] [[ 1.68099272e+00 -9.37915802e-01 -3.95665079e-01 ... -1.07696772e+00 -1.45034954e-01 8.73614848e-01] [-3.95665079e-01 7.28223994e-02 1.64370835e-01 ... 8.73614848e-01 6.34867609e-01 -5.89130163e-01] [ 1.64370835e-01 -1.94535398e+00 -3.96220058e-01 ... -5.89130163e-01 -4.44104075e-01 2.26571530e-01] ... [ 5.06201327e-01 6.80662632e-01 1.24616468e+00 ... 4.21907634e-01 -1.36618778e-01 3.20716598e-03] [ 1.24616468e+00 4.91294652e-01 -5.91094136e-01 ... 3.20716598e-03 -1.34538507e+00 -2.13236427e+00] [-5.91094136e-01 5.73487282e-02 3.00627351e-01 ... -2.13236427e+00 -5.02570748e-01 6.59370780e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4823.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 7.27620482e-01 5.08741558e-01 -1.11863768e+00 ... -5.91820836e-01 -1.25936759e+00 1.42015815e+00] [-1.11863768e+00 -8.87243867e-01 2.86392033e-01 ... 1.42015815e+00 1.77448916e+00 2.17598557e+00] [-2.10482121e-01 -1.21076278e-01 4.14116085e-01 ... 4.43945587e-01 3.14261943e-01 -3.43748361e-01] ... [ 9.44739163e-01 -1.05703640e+00 -2.49820903e-01 ... 2.13254407e-01 -2.13147298e-01 -5.04679739e-01] [-4.98096734e-01 -9.77697298e-02 8.07192445e-01 ... -1.54486132e+00 8.50378692e-01 5.27359724e-01] [ 8.07192445e-01 6.24231637e-01 3.80291283e-01 ... 5.27359724e-01 2.78921694e-01 5.77439368e-02]] [[-5.49154997e-01 6.62259996e-01 -1.91131270e+00 ... 1.25250506e+00 6.74470603e-01 1.63172746e+00] [-1.91131270e+00 1.03818335e-01 -2.79777437e-01 ... 1.63172746e+00 2.74318314e+00 -1.06215847e+00] [ 7.92850256e-02 -1.78696439e-02 -9.31066990e-01 ... 8.03592622e-01 1.27712822e+00 -2.25575924e-01] ... [-9.69646275e-01 -4.22976941e-01 -1.46881425e+00 ... 3.96248460e-01 1.45365655e-01 1.05418539e+00] [ 1.25078344e+00 1.70142436e+00 -4.33732808e-01 ... 4.08383518e-01 2.16142997e-01 2.43406877e-01] [-4.33732808e-01 -5.09449363e-01 4.32606228e-02 ... 2.43406877e-01 -5.61603189e-01 -3.89033526e-01]] [[-1.26383531e+00 8.98939312e-01 -4.23934937e-01 ... -2.21121907e+00 -8.01235735e-01 -6.60625339e-01] [-4.23934937e-01 -1.06610990e+00 -1.31234527e+00 ... -6.60625339e-01 1.09688604e+00 -7.52476871e-01] [-1.20705418e-01 1.16650701e+00 1.17322063e+00 ... -6.14907332e-02 -4.12302732e-01 1.25318944e+00] ... [-4.37428117e-01 1.55020341e-01 -9.74218249e-01 ... -4.35990602e-01 -7.96783388e-01 -5.94912529e-01] [-1.53290009e+00 -1.70041263e+00 8.58676678e-04 ... -9.34235454e-01 -9.88041088e-02 -9.26097482e-02] [ 8.58676678e-04 1.48868310e+00 1.12584424e+00 ... -9.26097482e-02 8.75203311e-01 4.82554227e-01]] ... [[-9.06937756e-03 1.59039199e+00 1.16974699e+00 ... 1.64171290e+00 -9.97499466e-01 7.74362907e-02] [ 1.16974699e+00 2.70203900e+00 5.63920200e-01 ... 7.74362907e-02 8.68325233e-01 -1.38351166e+00] [ 1.08080864e+00 1.25849473e+00 1.87009537e+00 ... -4.08180892e-01 6.17070317e-01 8.72656822e-01] ... [ 1.39965165e+00 6.41509116e-01 2.99302757e-01 ... -4.15887386e-01 2.17314541e-01 9.60884750e-01] [-1.36758828e+00 -3.78634721e-01 4.73092943e-01 ... 1.75100970e+00 -1.04566562e+00 1.59316242e+00] [ 4.73092943e-01 -5.77995777e-01 -1.16001356e+00 ... 1.59316242e+00 6.01407111e-01 -1.11742675e+00]] [[-6.88978910e-01 -1.91143048e+00 1.63406229e+00 ... 1.84628665e-01 1.19489884e+00 -6.68471754e-01] [ 1.63406229e+00 -4.94832486e-01 -2.07855606e+00 ... -6.68471754e-01 -2.62417823e-01 -3.11749965e-01] [-1.45198166e-01 7.70474195e-01 -7.64142931e-01 ... 5.58925986e-01 8.38362873e-01 7.85778344e-01] ... [-6.73239231e-01 1.01972520e+00 -1.44567263e+00 ... 8.48384853e-03 1.21575963e+00 -8.45860481e-01] [-1.10786483e-01 -8.90618980e-01 -2.46835694e-01 ... 4.23347116e-01 6.44664288e-01 3.73969764e-01] [-2.46835694e-01 -1.09243739e+00 1.92242369e-01 ... 3.73969764e-01 9.11256909e-01 -1.39437273e-01]] [[-1.13256454e+00 5.12623906e-01 -5.64524472e-01 ... -7.04238653e-01 -2.70046383e-01 -8.49396467e-01] [-5.64524472e-01 -1.95356929e+00 1.84267759e-01 ... -8.49396467e-01 3.95948559e-01 6.80953026e-01] [-2.56347090e-01 9.42803085e-01 2.96459645e-01 ... 8.68591547e-01 7.05898046e-01 -7.03510761e-01] ... [-1.87620509e+00 -5.55534720e-01 3.42028022e-01 ... -1.45400774e+00 -2.91961908e-01 1.31469452e+00] [-2.45155573e+00 -5.28311031e-03 8.66445526e-03 ... -1.02496994e+00 -2.50271583e+00 -4.69617903e-01] [ 8.66445526e-03 1.97491258e-01 1.50564063e+00 ... -4.69617903e-01 -2.90471733e-01 2.02072620e+00]]]; ov_res: [[[ 7.27620482e-01 5.08741558e-01 -1.11863768e+00 ... -5.91820836e-01 -1.25936759e+00 1.42015815e+00] [-1.11863768e+00 -8.87243867e-01 2.86392033e-01 ... 1.42015815e+00 1.77448916e+00 2.17598557e+00] [-2.10482121e-01 -1.21076278e-01 4.14116085e-01 ... 4.43945587e-01 3.14261943e-01 -3.43748361e-01] ... [ 9.44739163e-01 -1.05703640e+00 -2.49820903e-01 ... 2.13254407e-01 -2.13147298e-01 -5.04679739e-01] [-4.98096734e-01 -9.77697298e-02 8.07192445e-01 ... -1.54486132e+00 8.50378692e-01 5.27359724e-01] [ 8.07192445e-01 6.24231637e-01 3.80291283e-01 ... 5.27359724e-01 2.78921694e-01 5.77439368e-02]] [[-5.49154997e-01 6.62259996e-01 -1.91131270e+00 ... 1.25250506e+00 6.74470603e-01 1.63172746e+00] [-1.91131270e+00 1.03818335e-01 -2.79777437e-01 ... 1.63172746e+00 2.74318314e+00 -1.06215847e+00] [ 7.92850256e-02 -1.78696439e-02 -9.31066990e-01 ... 8.03592622e-01 1.27712822e+00 -2.25575924e-01] ... [-9.69646275e-01 -4.22976941e-01 -1.46881425e+00 ... 3.96248460e-01 1.45365655e-01 1.05418539e+00] [ 1.25078344e+00 1.70142436e+00 -4.33732808e-01 ... 4.08383518e-01 2.16142997e-01 2.43406877e-01] [-4.33732808e-01 -5.09449363e-01 4.32606228e-02 ... 2.43406877e-01 -5.61603189e-01 -3.89033526e-01]] [[-1.26383531e+00 8.98939312e-01 -4.23934937e-01 ... -2.21121907e+00 -8.01235735e-01 -6.60625339e-01] [-4.23934937e-01 -1.06610990e+00 -1.31234527e+00 ... -6.60625339e-01 1.09688604e+00 -7.52476871e-01] [-1.20705418e-01 1.16650701e+00 1.17322063e+00 ... -6.14907332e-02 -4.12302732e-01 1.25318944e+00] ... [-4.37428117e-01 1.55020341e-01 -9.74218249e-01 ... -4.35990602e-01 -7.96783388e-01 -5.94912529e-01] [-1.53290009e+00 -1.70041263e+00 8.58676678e-04 ... -9.34235454e-01 -9.88041088e-02 -9.26097482e-02] [ 8.58676678e-04 1.48868310e+00 1.12584424e+00 ... -9.26097482e-02 8.75203311e-01 4.82554227e-01]] ... [[-9.06937756e-03 1.59039199e+00 1.16974699e+00 ... 1.64171290e+00 -9.97499466e-01 7.74362907e-02] [ 1.16974699e+00 2.70203900e+00 5.63920200e-01 ... 7.74362907e-02 8.68325233e-01 -1.38351166e+00] [ 1.08080864e+00 1.25849473e+00 1.87009537e+00 ... -4.08180892e-01 6.17070317e-01 8.72656822e-01] ... [ 1.39965165e+00 6.41509116e-01 2.99302757e-01 ... -4.15887386e-01 2.17314541e-01 9.60884750e-01] [-1.36758828e+00 -3.78634721e-01 4.73092943e-01 ... 1.75100970e+00 -1.04566562e+00 1.59316242e+00] [ 4.73092943e-01 -5.77995777e-01 -1.16001356e+00 ... 1.59316242e+00 6.01407111e-01 -1.11742675e+00]] [[-6.88978910e-01 -1.91143048e+00 1.63406229e+00 ... 1.84628665e-01 1.19489884e+00 -6.68471754e-01] [ 1.63406229e+00 -4.94832486e-01 -2.07855606e+00 ... -6.68471754e-01 -2.62417823e-01 -3.11749965e-01] [-1.45198166e-01 7.70474195e-01 -7.64142931e-01 ... 5.58925986e-01 8.38362873e-01 7.85778344e-01] ... [-6.73239231e-01 1.01972520e+00 -1.44567263e+00 ... 8.48384853e-03 1.21575963e+00 -8.45860481e-01] [-1.10786483e-01 -8.90618980e-01 -2.46835694e-01 ... 4.23347116e-01 6.44664288e-01 3.73969764e-01] [-2.46835694e-01 -1.09243739e+00 1.92242369e-01 ... 3.73969764e-01 9.11256909e-01 -1.39437273e-01]] [[-1.13256454e+00 5.12623906e-01 -5.64524472e-01 ... -7.04238653e-01 -2.70046383e-01 -8.49396467e-01] [-5.64524472e-01 -1.95356929e+00 1.84267759e-01 ... -8.49396467e-01 3.95948559e-01 6.80953026e-01] [-2.56347090e-01 9.42803085e-01 2.96459645e-01 ... 8.68591547e-01 7.05898046e-01 -7.03510761e-01] ... [-1.87620509e+00 -5.55534720e-01 3.42028022e-01 ... -1.45400774e+00 -2.91961908e-01 1.31469452e+00] [-2.45155573e+00 -5.28311031e-03 8.66445526e-03 ... -1.02496994e+00 -2.50271583e+00 -4.69617903e-01] [ 8.66445526e-03 1.97491258e-01 1.50564063e+00 ... -4.69617903e-01 -2.90471733e-01 2.02072620e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4825.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-1.31907690e+00 2.40164861e-01 1.23127446e-01 ... -3.97889763e-01 -3.02950692e+00 3.04011375e-01] [ 1.23127446e-01 -9.29686666e-01 2.77075231e-01 ... 3.04011375e-01 5.85244715e-01 -5.84541857e-01] [ 2.77075231e-01 8.36509317e-02 -3.75161916e-01 ... -5.84541857e-01 -1.66347432e+00 4.56040412e-01] ... [-2.36930907e-01 -9.90626097e-01 9.75502670e-01 ... 2.85278291e-01 -5.83810866e-01 -2.43493244e-02] [ 9.75502670e-01 -7.99527705e-01 -2.93417364e-01 ... -2.43493244e-02 3.12585402e+00 2.89148390e-01] [-2.93417364e-01 2.04943871e+00 -3.96985173e-01 ... 2.89148390e-01 9.44620788e-01 -7.40670040e-02]] [[ 4.45524752e-01 9.84533191e-01 7.89102018e-02 ... -4.40124631e-01 -7.33552754e-01 3.63486856e-01] [ 7.89102018e-02 -2.19003367e+00 -1.99064583e-01 ... 3.63486856e-01 -1.85911155e+00 1.09425938e+00] [-1.99064583e-01 1.60544884e+00 1.53111589e+00 ... 1.09425938e+00 3.60290349e-01 -7.80701101e-01] ... [-3.34481120e-01 9.09576416e-01 8.06525648e-01 ... 1.71405900e+00 -1.12683201e+00 3.14025551e-01] [ 8.06525648e-01 5.07830620e-01 -7.72686899e-01 ... 3.14025551e-01 1.02131927e+00 1.82061398e-04] [-7.72686899e-01 -1.51664579e+00 -9.16003108e-01 ... 1.82061398e-04 2.06775570e+00 3.55937362e-01]] [[ 1.36346757e+00 1.15301955e+00 -1.14430821e+00 ... 1.54043269e+00 2.19811052e-01 1.37541854e+00] [-1.14430821e+00 -3.98790002e-01 -6.36335373e-01 ... 1.37541854e+00 -1.04224324e+00 9.78094041e-01] [-6.36335373e-01 4.85279143e-01 1.00350201e+00 ... 9.78094041e-01 -2.19271138e-01 1.39681780e+00] ... [-8.93873751e-01 9.21965539e-01 4.21597928e-01 ... -1.95544392e-01 -1.01312423e+00 7.48873293e-01] [ 4.21597928e-01 4.57559109e-01 -1.33158755e+00 ... 7.48873293e-01 -8.90521705e-01 -9.19759691e-01] [-1.33158755e+00 1.08688486e+00 4.40340012e-01 ... -9.19759691e-01 5.90767086e-01 7.86182731e-02]] ... [[ 9.31477368e-01 -5.02925776e-02 -1.83513391e+00 ... -9.80033755e-01 2.10914403e-01 1.22275949e+00] [-1.83513391e+00 -1.53160155e+00 -4.40003425e-01 ... 1.22275949e+00 1.45638883e+00 -2.74648041e-01] [-4.40003425e-01 6.55310094e-01 -1.20032899e-01 ... -2.74648041e-01 2.38380218e+00 -1.07163889e-03] ... [ 2.19294810e+00 -3.32485378e-01 -4.06264305e-01 ... 4.85054888e-02 1.41256523e+00 1.10745348e-01] [-4.06264305e-01 -2.70799398e-01 -2.41882825e+00 ... 1.10745348e-01 -7.50676394e-01 -1.19881237e+00] [-2.41882825e+00 3.22362155e-01 3.44588548e-01 ... -1.19881237e+00 9.08317804e-01 -6.10540174e-02]] [[ 2.91035205e-01 -2.67840648e+00 1.80559945e+00 ... 1.24953055e+00 -3.17993701e-01 -7.82329440e-02] [ 1.80559945e+00 1.47177196e+00 7.55581260e-01 ... -7.82329440e-02 -1.17103136e+00 -1.60914564e+00] [ 7.55581260e-01 -1.50381219e+00 1.80174842e-01 ... -1.60914564e+00 6.48028433e-01 2.53046572e-01] ... [-3.57004762e-01 -3.06822360e-01 2.42437649e+00 ... -9.06840488e-02 1.01157928e+00 -7.29247928e-01] [ 2.42437649e+00 -6.14268601e-01 4.87350613e-01 ... -7.29247928e-01 9.97228563e-01 -4.09533143e-01] [ 4.87350613e-01 1.34753585e+00 -7.38248110e-01 ... -4.09533143e-01 4.17480677e-01 -2.94448078e-01]] [[-1.53093621e-01 -1.44833699e-01 7.39574671e-01 ... -2.30085224e-01 -1.38476312e+00 5.81036508e-01] [ 7.39574671e-01 -2.94509418e-02 1.34950474e-01 ... 5.81036508e-01 9.16305929e-02 1.00250971e+00] [ 1.34950474e-01 -1.68747461e+00 3.18379819e-01 ... 1.00250971e+00 -1.25617588e+00 -1.95894673e-01] ... [ 1.37718928e+00 7.34388769e-01 1.01286426e-01 ... -9.46447194e-01 -1.99269071e-01 -2.36209941e+00] [ 1.01286426e-01 9.00999904e-01 1.30732328e-01 ... -2.36209941e+00 1.10325134e+00 1.11433482e+00] [ 1.30732328e-01 -3.17656845e-01 -3.96548420e-01 ... 1.11433482e+00 -6.72563255e-01 -1.81349397e+00]]]; ov_res: [[[-1.31907690e+00 2.40164861e-01 1.23127446e-01 ... -3.97889763e-01 -3.02950692e+00 3.04011375e-01] [ 1.23127446e-01 -9.29686666e-01 2.77075231e-01 ... 3.04011375e-01 5.85244715e-01 -5.84541857e-01] [ 2.77075231e-01 8.36509317e-02 -3.75161916e-01 ... -5.84541857e-01 -1.66347432e+00 4.56040412e-01] ... [-2.36930907e-01 -9.90626097e-01 9.75502670e-01 ... 2.85278291e-01 -5.83810866e-01 -2.43493244e-02] [ 9.75502670e-01 -7.99527705e-01 -2.93417364e-01 ... -2.43493244e-02 3.12585402e+00 2.89148390e-01] [-2.93417364e-01 2.04943871e+00 -3.96985173e-01 ... 2.89148390e-01 9.44620788e-01 -7.40670040e-02]] [[ 4.45524752e-01 9.84533191e-01 7.89102018e-02 ... -4.40124631e-01 -7.33552754e-01 3.63486856e-01] [ 7.89102018e-02 -2.19003367e+00 -1.99064583e-01 ... 3.63486856e-01 -1.85911155e+00 1.09425938e+00] [-1.99064583e-01 1.60544884e+00 1.53111589e+00 ... 1.09425938e+00 3.60290349e-01 -7.80701101e-01] ... [-3.34481120e-01 9.09576416e-01 8.06525648e-01 ... 1.71405900e+00 -1.12683201e+00 3.14025551e-01] [ 8.06525648e-01 5.07830620e-01 -7.72686899e-01 ... 3.14025551e-01 1.02131927e+00 1.82061398e-04] [-7.72686899e-01 -1.51664579e+00 -9.16003108e-01 ... 1.82061398e-04 2.06775570e+00 3.55937362e-01]] [[ 1.36346757e+00 1.15301955e+00 -1.14430821e+00 ... 1.54043269e+00 2.19811052e-01 1.37541854e+00] [-1.14430821e+00 -3.98790002e-01 -6.36335373e-01 ... 1.37541854e+00 -1.04224324e+00 9.78094041e-01] [-6.36335373e-01 4.85279143e-01 1.00350201e+00 ... 9.78094041e-01 -2.19271138e-01 1.39681780e+00] ... [-8.93873751e-01 9.21965539e-01 4.21597928e-01 ... -1.95544392e-01 -1.01312423e+00 7.48873293e-01] [ 4.21597928e-01 4.57559109e-01 -1.33158755e+00 ... 7.48873293e-01 -8.90521705e-01 -9.19759691e-01] [-1.33158755e+00 1.08688486e+00 4.40340012e-01 ... -9.19759691e-01 5.90767086e-01 7.86182731e-02]] ... [[ 9.31477368e-01 -5.02925776e-02 -1.83513391e+00 ... -9.80033755e-01 2.10914403e-01 1.22275949e+00] [-1.83513391e+00 -1.53160155e+00 -4.40003425e-01 ... 1.22275949e+00 1.45638883e+00 -2.74648041e-01] [-4.40003425e-01 6.55310094e-01 -1.20032899e-01 ... -2.74648041e-01 2.38380218e+00 -1.07163889e-03] ... [ 2.19294810e+00 -3.32485378e-01 -4.06264305e-01 ... 4.85054888e-02 1.41256523e+00 1.10745348e-01] [-4.06264305e-01 -2.70799398e-01 -2.41882825e+00 ... 1.10745348e-01 -7.50676394e-01 -1.19881237e+00] [-2.41882825e+00 3.22362155e-01 3.44588548e-01 ... -1.19881237e+00 9.08317804e-01 -6.10540174e-02]] [[ 2.91035205e-01 -2.67840648e+00 1.80559945e+00 ... 1.24953055e+00 -3.17993701e-01 -7.82329440e-02] [ 1.80559945e+00 1.47177196e+00 7.55581260e-01 ... -7.82329440e-02 -1.17103136e+00 -1.60914564e+00] [ 7.55581260e-01 -1.50381219e+00 1.80174842e-01 ... -1.60914564e+00 6.48028433e-01 2.53046572e-01] ... [-3.57004762e-01 -3.06822360e-01 2.42437649e+00 ... -9.06840488e-02 1.01157928e+00 -7.29247928e-01] [ 2.42437649e+00 -6.14268601e-01 4.87350613e-01 ... -7.29247928e-01 9.97228563e-01 -4.09533143e-01] [ 4.87350613e-01 1.34753585e+00 -7.38248110e-01 ... -4.09533143e-01 4.17480677e-01 -2.94448078e-01]] [[-1.53093621e-01 -1.44833699e-01 7.39574671e-01 ... -2.30085224e-01 -1.38476312e+00 5.81036508e-01] [ 7.39574671e-01 -2.94509418e-02 1.34950474e-01 ... 5.81036508e-01 9.16305929e-02 1.00250971e+00] [ 1.34950474e-01 -1.68747461e+00 3.18379819e-01 ... 1.00250971e+00 -1.25617588e+00 -1.95894673e-01] ... [ 1.37718928e+00 7.34388769e-01 1.01286426e-01 ... -9.46447194e-01 -1.99269071e-01 -2.36209941e+00] [ 1.01286426e-01 9.00999904e-01 1.30732328e-01 ... -2.36209941e+00 1.10325134e+00 1.11433482e+00] [ 1.30732328e-01 -3.17656845e-01 -3.96548420e-01 ... 1.11433482e+00 -6.72563255e-01 -1.81349397e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4827.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-4.82802778e-01 5.74193895e-01 3.79128493e-02 ... -1.44113339e-02 4.68665361e-01 -1.41276801e+00] [ 3.79128493e-02 8.87242615e-01 -7.79247642e-01 ... -1.41276801e+00 -1.77926660e-01 -1.10013449e+00] [-6.92011297e-01 -9.45089385e-02 8.57291102e-01 ... -4.86854047e-01 -8.36217344e-01 -6.91663861e-01] ... [ 2.12439686e-01 2.35030100e-01 -1.52381524e-01 ... 2.17962384e-01 1.75090933e+00 9.12463367e-02] [-1.30931318e+00 7.96281040e-01 4.45054382e-01 ... 6.84787184e-02 -1.98452517e-01 -8.92854705e-02] [ 4.45054382e-01 -1.76977706e+00 2.00734806e+00 ... -8.92854705e-02 -7.93197036e-01 1.18983400e+00]] [[-9.49279144e-02 -6.45921975e-02 1.14956403e+00 ... -8.17462265e-01 -1.29299748e+00 1.59115958e+00] [ 1.14956403e+00 9.87550989e-02 1.85568109e-01 ... 1.59115958e+00 6.19447291e-01 -1.71138465e+00] [ 2.50069529e-01 -2.16625724e-02 2.79932339e-02 ... 5.74190199e-01 2.34807774e-01 3.41502070e-01] ... [ 3.48065734e-01 2.52440363e-01 2.24730921e+00 ... -1.25038648e+00 4.87887174e-01 9.93420780e-01] [ 4.78791110e-02 -7.21537232e-01 2.27167495e-02 ... -5.77081330e-02 1.19383347e+00 7.35965371e-01] [ 2.27167495e-02 2.33033538e+00 -1.34728357e-01 ... 7.35965371e-01 -4.95022386e-01 2.51852304e-01]] [[-3.26218843e-01 -8.29607606e-01 -1.07977509e+00 ... 1.73908412e+00 5.95886528e-01 7.70858526e-01] [-1.07977509e+00 -8.69558156e-01 -7.73968995e-01 ... 7.70858526e-01 2.85438716e-01 -4.09938008e-01] [ 1.15974426e+00 2.74569929e-01 3.79404455e-01 ... 4.74993944e-01 -1.19959557e+00 4.46168274e-01] ... [ 4.55406696e-01 2.46710467e+00 -4.48129505e-01 ... 5.35881400e-01 1.65781856e+00 -7.96004653e-01] [ 1.11699641e+00 -3.19924623e-01 -9.11750078e-01 ... 5.83327532e-01 -1.48237288e-01 2.06151843e+00] [-9.11750078e-01 -6.32092878e-02 2.59292815e-02 ... 2.06151843e+00 -1.78798330e+00 -1.06845528e-03]] ... [[-5.09045303e-01 4.56577778e-01 -1.00773942e+00 ... -2.78790498e+00 -7.43374586e-01 -8.66819978e-01] [-1.00773942e+00 3.74423116e-01 -4.59838659e-01 ... -8.66819978e-01 -7.05408752e-01 1.05388403e+00] [ 8.09745848e-01 -6.39072418e-01 1.09078526e+00 ... 1.08421493e+00 -1.53378391e+00 1.92873228e+00] ... [ 8.11167598e-01 3.07953924e-01 4.54114109e-01 ... -9.71836388e-01 9.33053732e-01 8.48706961e-02] [ 9.72000778e-01 -1.05854559e+00 -1.86697352e+00 ... 2.92582840e-01 -2.13884735e+00 1.27079642e+00] [-1.86697352e+00 7.02749848e-01 1.72650263e-01 ... 1.27079642e+00 1.07885849e+00 -4.74364817e-01]] [[-1.94526345e-01 -9.05462980e-01 1.36314309e+00 ... 5.06061137e-01 -1.11026734e-01 -1.38001919e+00] [ 1.36314309e+00 1.16886251e-01 1.38487756e+00 ... -1.38001919e+00 3.79152358e-01 8.82641852e-01] [ 4.68325287e-01 6.10442936e-01 -3.51126164e-01 ... -8.19757044e-01 -2.54225910e-01 5.16811490e-01] ... [-1.54031411e-01 2.23132655e-01 8.33405375e-01 ... -5.38240671e-01 -4.53367770e-01 7.74550617e-01] [ 1.31348670e+00 5.52893221e-01 -3.04671240e+00 ... 1.51463509e+00 1.12915015e+00 -1.51367271e+00] [-3.04671240e+00 -7.60382175e-01 1.83671987e+00 ... -1.51367271e+00 1.27647316e+00 -3.56149673e-02]] [[-1.03224790e+00 1.11013043e+00 -3.70934010e-01 ... 1.88612151e+00 -3.46652359e-01 -5.42954266e-01] [-3.70934010e-01 -1.80744934e+00 1.16251957e+00 ... -5.42954266e-01 2.33940387e+00 -2.07462478e+00] [ 7.50584543e-01 -4.73671347e-01 -2.04529858e+00 ... -8.75937164e-01 9.60954368e-01 -6.88883245e-01] ... [ 1.07752836e+00 5.51164389e-01 -1.53761888e+00 ... 1.88865095e-01 2.97922087e+00 6.34063065e-01] [ 1.09877169e+00 -1.36541152e+00 6.21817589e-01 ... 2.70784348e-01 1.71449631e-01 1.29000366e-01] [ 6.21817589e-01 -1.51247728e+00 8.75651777e-01 ... 1.29000366e-01 -6.84319615e-01 7.06760883e-01]]]; ov_res: [[[-4.82802778e-01 5.74193895e-01 3.79128493e-02 ... -1.44113339e-02 4.68665361e-01 -1.41276801e+00] [ 3.79128493e-02 8.87242615e-01 -7.79247642e-01 ... -1.41276801e+00 -1.77926660e-01 -1.10013449e+00] [-6.92011297e-01 -9.45089385e-02 8.57291102e-01 ... -4.86854047e-01 -8.36217344e-01 -6.91663861e-01] ... [ 2.12439686e-01 2.35030100e-01 -1.52381524e-01 ... 2.17962384e-01 1.75090933e+00 9.12463367e-02] [-1.30931318e+00 7.96281040e-01 4.45054382e-01 ... 6.84787184e-02 -1.98452517e-01 -8.92854705e-02] [ 4.45054382e-01 -1.76977706e+00 2.00734806e+00 ... -8.92854705e-02 -7.93197036e-01 1.18983400e+00]] [[-9.49279144e-02 -6.45921975e-02 1.14956403e+00 ... -8.17462265e-01 -1.29299748e+00 1.59115958e+00] [ 1.14956403e+00 9.87550989e-02 1.85568109e-01 ... 1.59115958e+00 6.19447291e-01 -1.71138465e+00] [ 2.50069529e-01 -2.16625724e-02 2.79932339e-02 ... 5.74190199e-01 2.34807774e-01 3.41502070e-01] ... [ 3.48065734e-01 2.52440363e-01 2.24730921e+00 ... -1.25038648e+00 4.87887174e-01 9.93420780e-01] [ 4.78791110e-02 -7.21537232e-01 2.27167495e-02 ... -5.77081330e-02 1.19383347e+00 7.35965371e-01] [ 2.27167495e-02 2.33033538e+00 -1.34728357e-01 ... 7.35965371e-01 -4.95022386e-01 2.51852304e-01]] [[-3.26218843e-01 -8.29607606e-01 -1.07977509e+00 ... 1.73908412e+00 5.95886528e-01 7.70858526e-01] [-1.07977509e+00 -8.69558156e-01 -7.73968995e-01 ... 7.70858526e-01 2.85438716e-01 -4.09938008e-01] [ 1.15974426e+00 2.74569929e-01 3.79404455e-01 ... 4.74993944e-01 -1.19959557e+00 4.46168274e-01] ... [ 4.55406696e-01 2.46710467e+00 -4.48129505e-01 ... 5.35881400e-01 1.65781856e+00 -7.96004653e-01] [ 1.11699641e+00 -3.19924623e-01 -9.11750078e-01 ... 5.83327532e-01 -1.48237288e-01 2.06151843e+00] [-9.11750078e-01 -6.32092878e-02 2.59292815e-02 ... 2.06151843e+00 -1.78798330e+00 -1.06845528e-03]] ... [[-5.09045303e-01 4.56577778e-01 -1.00773942e+00 ... -2.78790498e+00 -7.43374586e-01 -8.66819978e-01] [-1.00773942e+00 3.74423116e-01 -4.59838659e-01 ... -8.66819978e-01 -7.05408752e-01 1.05388403e+00] [ 8.09745848e-01 -6.39072418e-01 1.09078526e+00 ... 1.08421493e+00 -1.53378391e+00 1.92873228e+00] ... [ 8.11167598e-01 3.07953924e-01 4.54114109e-01 ... -9.71836388e-01 9.33053732e-01 8.48706961e-02] [ 9.72000778e-01 -1.05854559e+00 -1.86697352e+00 ... 2.92582840e-01 -2.13884735e+00 1.27079642e+00] [-1.86697352e+00 7.02749848e-01 1.72650263e-01 ... 1.27079642e+00 1.07885849e+00 -4.74364817e-01]] [[-1.94526345e-01 -9.05462980e-01 1.36314309e+00 ... 5.06061137e-01 -1.11026734e-01 -1.38001919e+00] [ 1.36314309e+00 1.16886251e-01 1.38487756e+00 ... -1.38001919e+00 3.79152358e-01 8.82641852e-01] [ 4.68325287e-01 6.10442936e-01 -3.51126164e-01 ... -8.19757044e-01 -2.54225910e-01 5.16811490e-01] ... [-1.54031411e-01 2.23132655e-01 8.33405375e-01 ... -5.38240671e-01 -4.53367770e-01 7.74550617e-01] [ 1.31348670e+00 5.52893221e-01 -3.04671240e+00 ... 1.51463509e+00 1.12915015e+00 -1.51367271e+00] [-3.04671240e+00 -7.60382175e-01 1.83671987e+00 ... -1.51367271e+00 1.27647316e+00 -3.56149673e-02]] [[-1.03224790e+00 1.11013043e+00 -3.70934010e-01 ... 1.88612151e+00 -3.46652359e-01 -5.42954266e-01] [-3.70934010e-01 -1.80744934e+00 1.16251957e+00 ... -5.42954266e-01 2.33940387e+00 -2.07462478e+00] [ 7.50584543e-01 -4.73671347e-01 -2.04529858e+00 ... -8.75937164e-01 9.60954368e-01 -6.88883245e-01] ... [ 1.07752836e+00 5.51164389e-01 -1.53761888e+00 ... 1.88865095e-01 2.97922087e+00 6.34063065e-01] [ 1.09877169e+00 -1.36541152e+00 6.21817589e-01 ... 2.70784348e-01 1.71449631e-01 1.29000366e-01] [ 6.21817589e-01 -1.51247728e+00 8.75651777e-01 ... 1.29000366e-01 -6.84319615e-01 7.06760883e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:0 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4829.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-1.35154235e+00 1.31340992e+00 1.25678539e+00 ... 9.64829087e-01 -4.42984968e-01 6.01098895e-01] [ 4.03726339e-01 1.17227495e+00 -1.67604521e-01 ... -1.86248139e-01 2.35851452e-01 1.95598360e-02] [ 1.05445111e+00 -8.74182463e-01 2.33904533e-02 ... -4.34159786e-01 -2.38454998e-01 -9.93247092e-01]] [[-9.97629762e-01 -7.27950752e-01 9.26700175e-01 ... 4.19714183e-01 1.63674247e+00 -9.62276459e-01] [ 2.96777129e-01 -2.18697116e-01 -1.20094764e+00 ... 1.38512456e+00 3.57851654e-01 3.10842037e-01] [ 3.69066387e-01 -1.08668828e+00 7.60637105e-01 ... 1.17383905e-01 -6.33194089e-01 4.48774844e-01]] [[-3.87923509e-01 1.17977905e+00 3.29845369e-01 ... 1.59611166e-01 -1.25392795e+00 1.15616906e+00] [ 2.14256316e-01 3.76804978e-01 7.14366794e-01 ... 1.37430325e-01 8.74678791e-01 6.23788357e-01] [-6.67758882e-01 -3.26732583e-02 -6.20179474e-01 ... -1.28277135e+00 4.52883214e-01 -1.56570399e+00]] ... [[-5.86646259e-01 1.47511590e+00 1.30765867e+00 ... -1.53054237e+00 8.41757059e-01 -2.44586021e-01] [ 1.83567524e+00 -1.06729436e+00 1.92749709e-01 ... -1.45706952e-01 1.07969820e+00 -1.55206060e+00] [-8.66653249e-02 1.39514017e+00 -5.58875985e-02 ... 1.96966276e-01 6.34606183e-01 1.39410353e+00]] [[ 1.92923820e+00 -4.69154060e-01 1.23350763e+00 ... 6.09373629e-01 4.40416873e-01 -2.89346039e-01] [-3.51010740e-01 -4.43914294e-01 1.46405172e+00 ... 1.56436026e+00 6.71329796e-01 -8.24432135e-01] [ 1.12517715e+00 -1.18705976e+00 -3.40857238e-01 ... 3.72375101e-01 3.87068361e-01 -2.81480551e-01]] [[ 6.24840111e-02 -6.41406655e-01 -2.08510494e+00 ... -1.17852175e+00 -1.00527036e+00 -1.91843212e+00] [-4.26081806e-01 -7.40055859e-01 -5.06631255e-01 ... 2.04606032e+00 -8.63494098e-01 9.34450388e-01] [ 1.08348274e+00 1.65836111e-01 -7.94199705e-01 ... 1.91437639e-03 -7.63691068e-02 -2.54099995e-01]]]; ov_res: [[[-1.35154235e+00 1.31340992e+00 1.25678539e+00 ... 9.64829087e-01 -4.42984968e-01 6.01098895e-01] [ 4.03726339e-01 1.17227495e+00 -1.67604521e-01 ... -1.86248139e-01 2.35851452e-01 1.95598360e-02] [ 1.05445111e+00 -8.74182463e-01 2.33904533e-02 ... -4.34159786e-01 -2.38454998e-01 -9.93247092e-01]] [[-9.97629762e-01 -7.27950752e-01 9.26700175e-01 ... 4.19714183e-01 1.63674247e+00 -9.62276459e-01] [ 2.96777129e-01 -2.18697116e-01 -1.20094764e+00 ... 1.38512456e+00 3.57851654e-01 3.10842037e-01] [ 3.69066387e-01 -1.08668828e+00 7.60637105e-01 ... 1.17383905e-01 -6.33194089e-01 4.48774844e-01]] [[-3.87923509e-01 1.17977905e+00 3.29845369e-01 ... 1.59611166e-01 -1.25392795e+00 1.15616906e+00] [ 2.14256316e-01 3.76804978e-01 7.14366794e-01 ... 1.37430325e-01 8.74678791e-01 6.23788357e-01] [-6.67758882e-01 -3.26732583e-02 -6.20179474e-01 ... -1.28277135e+00 4.52883214e-01 -1.56570399e+00]] ... [[-5.86646259e-01 1.47511590e+00 1.30765867e+00 ... -1.53054237e+00 8.41757059e-01 -2.44586021e-01] [ 1.83567524e+00 -1.06729436e+00 1.92749709e-01 ... -1.45706952e-01 1.07969820e+00 -1.55206060e+00] [-8.66653249e-02 1.39514017e+00 -5.58875985e-02 ... 1.96966276e-01 6.34606183e-01 1.39410353e+00]] [[ 1.92923820e+00 -4.69154060e-01 1.23350763e+00 ... 6.09373629e-01 4.40416873e-01 -2.89346039e-01] [-3.51010740e-01 -4.43914294e-01 1.46405172e+00 ... 1.56436026e+00 6.71329796e-01 -8.24432135e-01] [ 1.12517715e+00 -1.18705976e+00 -3.40857238e-01 ... 3.72375101e-01 3.87068361e-01 -2.81480551e-01]] [[ 6.24840111e-02 -6.41406655e-01 -2.08510494e+00 ... -1.17852175e+00 -1.00527036e+00 -1.91843212e+00] [-4.26081806e-01 -7.40055859e-01 -5.06631255e-01 ... 2.04606032e+00 -8.63494098e-01 9.34450388e-01] [ 1.08348274e+00 1.65836111e-01 -7.94199705e-01 ... 1.91437639e-03 -7.63691068e-02 -2.54099995e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4831.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4833.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4835.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4837.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %3) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4839.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4841.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4843.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4845.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4847.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:2 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4849.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:3 - kernel_size:[2, 3] ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4851.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4853.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.12916003 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.66413134 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.06346143 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.0540682 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.897786 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.6317622 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.3078471 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.9679655 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.47094607 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.13068017 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.25569987 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.3081443 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.12916003 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.66413134 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.06346143 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.0540682 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.897786 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.6317622 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.3078471 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.9679655 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.47094607 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.13068017 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.25569987 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.3081443 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4855.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.177304 -1.9528567 -0.46622506] [ 0. 0. 0. ... 0.9689442 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.5716789 ... 0. 0. 0. ] [-0.55743355 0.35802993 1.0419668 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.7266854 -0.7943912 -0.09625207] [ 0. 0. 0. ... 0.03106904 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.67888033 ... 0. 0. 0. ] [-0.52784026 2.209494 0.24493957 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4178608 -1.7307675 1.3442789 ] [ 0. 0. 0. ... 0.7724351 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.7797626 ... 0. 0. 0. ] [-0.74262136 0.06686899 -0.33908334 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1765817 -1.003877 -0.08385875] [ 0. 0. 0. ... -0.630746 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.25814602 ... 0. 0. 0. ] [ 0.3381139 0.2681557 -1.4625902 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.03014801 -0.20807362 -0.72561234] [ 0. 0. 0. ... 1.1755296 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.0601795 ... 0. 0. 0. ] [-0.7210528 -2.06094 -0.2397074 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1978815 1.539669 -0.05022189] [ 0. 0. 0. ... 1.1668831 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.2411805 ... 0. 0. 0. ] [ 1.4790597 -1.4127163 1.9523143 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.177304 -1.9528567 -0.46622506] [ 0. 0. 0. ... 0.9689442 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.5716789 ... 0. 0. 0. ] [-0.55743355 0.35802993 1.0419668 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.7266854 -0.7943912 -0.09625207] [ 0. 0. 0. ... 0.03106904 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.67888033 ... 0. 0. 0. ] [-0.52784026 2.209494 0.24493957 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4178608 -1.7307675 1.3442789 ] [ 0. 0. 0. ... 0.7724351 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.7797626 ... 0. 0. 0. ] [-0.74262136 0.06686899 -0.33908334 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1765817 -1.003877 -0.08385875] [ 0. 0. 0. ... -0.630746 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.25814602 ... 0. 0. 0. ] [ 0.3381139 0.2681557 -1.4625902 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.03014801 -0.20807362 -0.72561234] [ 0. 0. 0. ... 1.1755296 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.0601795 ... 0. 0. 0. ] [-0.7210528 -2.06094 -0.2397074 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1978815 1.539669 -0.05022189] [ 0. 0. 0. ... 1.1668831 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.2411805 ... 0. 0. 0. ] [ 1.4790597 -1.4127163 1.9523143 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4857.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:3 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4859.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4861.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4863.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4865.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4867.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[5, 5]]() %4 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:5 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4869.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4871.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 1.0373985 2.8558576 0.74367136] [ 0. 0. 0. ... 2.8558576 0.74367136 1.1762701 ] [ 0. 0. 0. ... 0.74367136 1.1762701 0. ] ... [ 0. 1.9766247 0.14858891 ... 0. 0. 0. ] [ 1.9766247 0.14858891 -0.5302482 ... 0. 0. 0. ] [ 0.14858891 -0.5302482 -0.15987416 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.0914433 1.2572625 0.1636578 ] [ 0. 0. 0. ... 1.2572625 0.1636578 0.49561474] [ 0. 0. 0. ... 0.1636578 0.49561474 0. ] ... [ 0. -0.15052116 -0.26118425 ... 0. 0. 0. ] [-0.15052116 -0.26118425 -1.756615 ... 0. 0. 0. ] [-0.26118425 -1.756615 0.05221101 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5051786 2.6851346 -1.4928856 ] [ 0. 0. 0. ... 2.6851346 -1.4928856 -0.2288189 ] [ 0. 0. 0. ... -1.4928856 -0.2288189 0. ] ... [ 0. 0.9415277 -0.1090537 ... 0. 0. 0. ] [ 0.9415277 -0.1090537 -0.7864294 ... 0. 0. 0. ] [-0.1090537 -0.7864294 -0.07488614 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.0147479 -1.0024097 -0.3251674 ] [ 0. 0. 0. ... -1.0024097 -0.3251674 0.68482417] [ 0. 0. 0. ... -0.3251674 0.68482417 0. ] ... [ 0. 0.8172423 -0.37342313 ... 0. 0. 0. ] [ 0.8172423 -0.37342313 0.45068425 ... 0. 0. 0. ] [-0.37342313 0.45068425 -0.20963651 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.432783 -0.08091032 -0.94694203] [ 0. 0. 0. ... -0.08091032 -0.94694203 -0.03404969] [ 0. 0. 0. ... -0.94694203 -0.03404969 0. ] ... [ 0. 1.1046116 -0.2524655 ... 0. 0. 0. ] [ 1.1046116 -0.2524655 0.8645029 ... 0. 0. 0. ] [-0.2524655 0.8645029 0.25396994 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.6080029 0.1394255 -1.5674431 ] [ 0. 0. 0. ... 0.1394255 -1.5674431 0.6828474 ] [ 0. 0. 0. ... -1.5674431 0.6828474 0. ] ... [ 0. 1.6702328 0.55298406 ... 0. 0. 0. ] [ 1.6702328 0.55298406 -1.4148036 ... 0. 0. 0. ] [ 0.55298406 -1.4148036 2.1874611 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.0373985 2.8558576 0.74367136] [ 0. 0. 0. ... 2.8558576 0.74367136 1.1762701 ] [ 0. 0. 0. ... 0.74367136 1.1762701 0. ] ... [ 0. 1.9766247 0.14858891 ... 0. 0. 0. ] [ 1.9766247 0.14858891 -0.5302482 ... 0. 0. 0. ] [ 0.14858891 -0.5302482 -0.15987416 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.0914433 1.2572625 0.1636578 ] [ 0. 0. 0. ... 1.2572625 0.1636578 0.49561474] [ 0. 0. 0. ... 0.1636578 0.49561474 0. ] ... [ 0. -0.15052116 -0.26118425 ... 0. 0. 0. ] [-0.15052116 -0.26118425 -1.756615 ... 0. 0. 0. ] [-0.26118425 -1.756615 0.05221101 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5051786 2.6851346 -1.4928856 ] [ 0. 0. 0. ... 2.6851346 -1.4928856 -0.2288189 ] [ 0. 0. 0. ... -1.4928856 -0.2288189 0. ] ... [ 0. 0.9415277 -0.1090537 ... 0. 0. 0. ] [ 0.9415277 -0.1090537 -0.7864294 ... 0. 0. 0. ] [-0.1090537 -0.7864294 -0.07488614 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.0147479 -1.0024097 -0.3251674 ] [ 0. 0. 0. ... -1.0024097 -0.3251674 0.68482417] [ 0. 0. 0. ... -0.3251674 0.68482417 0. ] ... [ 0. 0.8172423 -0.37342313 ... 0. 0. 0. ] [ 0.8172423 -0.37342313 0.45068425 ... 0. 0. 0. ] [-0.37342313 0.45068425 -0.20963651 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.432783 -0.08091032 -0.94694203] [ 0. 0. 0. ... -0.08091032 -0.94694203 -0.03404969] [ 0. 0. 0. ... -0.94694203 -0.03404969 0. ] ... [ 0. 1.1046116 -0.2524655 ... 0. 0. 0. ] [ 1.1046116 -0.2524655 0.8645029 ... 0. 0. 0. ] [-0.2524655 0.8645029 0.25396994 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.6080029 0.1394255 -1.5674431 ] [ 0. 0. 0. ... 0.1394255 -1.5674431 0.6828474 ] [ 0. 0. 0. ... -1.5674431 0.6828474 0. ] ... [ 0. 1.6702328 0.55298406 ... 0. 0. 0. ] [ 1.6702328 0.55298406 -1.4148036 ... 0. 0. 0. ] [ 0.55298406 -1.4148036 2.1874611 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4873.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... -0.12099122 -0.6622846 -1.597134 ] [ 0. 0. 0. ... -0.6622846 -1.597134 0. ] [ 0. -1.0796292 -0.27487707 ... 0.6582951 -0.5136934 0.32208067] ... [-0.7262247 0.5508627 0.4560262 ... 1.8780721 -0.33272022 0. ] [ 0. -0.44824162 2.3720813 ... 0. 0. 0. ] [-0.44824162 2.3720813 -0.3666324 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2609399 0.41308063 -1.0999702 ] [ 0. 0. 0. ... 0.41308063 -1.0999702 0. ] [ 0. 2.0872483 -0.81622964 ... -0.23725657 -0.37650996 0.2736441 ] ... [-0.43579307 -0.73347974 -0.44222674 ... 2.8426077 1.066029 0. ] [ 0. -0.7887889 0.21035016 ... 0. 0. 0. ] [-0.7887889 0.21035016 0.5138273 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.683569 -0.3068418 0.5024669 ] [ 0. 0. 0. ... -0.3068418 0.5024669 0. ] [ 0. 1.3572077 -0.2781152 ... -0.16002612 -1.3462995 0.34162828] ... [-1.0376017 -0.85304844 -0.7483452 ... -0.85991365 -1.2626873 0. ] [ 0. 1.4693433 0.29379734 ... 0. 0. 0. ] [ 1.4693433 0.29379734 0.9722297 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.15105608 0.03713595 0.9806244 ] [ 0. 0. 0. ... 0.03713595 0.9806244 0. ] [ 0. 1.2921288 1.6191843 ... 0.09747229 -1.3921278 0.78791285] ... [ 1.2610835 0.69166607 0.5490362 ... -0.63366663 1.4606187 0. ] [ 0. 1.519652 0.02760172 ... 0. 0. 0. ] [ 1.519652 0.02760172 -0.30579582 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.03249056 -0.778351 0.00510979] [ 0. 0. 0. ... -0.778351 0.00510979 0. ] [ 0. 0.9039405 0.69688255 ... -0.72901666 -0.06853853 0.5991705 ] ... [-1.1452736 -1.7529099 -0.03184241 ... 0.7688549 1.7844642 0. ] [ 0. 0.25801677 -0.7690982 ... 0. 0. 0. ] [ 0.25801677 -0.7690982 1.4634074 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.9162791 -0.4326152 -0.88956267] [ 0. 0. 0. ... -0.4326152 -0.88956267 0. ] [ 0. 0.4913678 0.1577364 ... -0.10554967 -0.68640095 0.9328218 ] ... [-2.1894462 1.1949581 -1.4363835 ... -0.8558875 -0.14429703 0. ] [ 0. -0.1746718 1.2640516 ... 0. 0. 0. ] [-0.1746718 1.2640516 -1.2534952 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.12099122 -0.6622846 -1.597134 ] [ 0. 0. 0. ... -0.6622846 -1.597134 0. ] [ 0. -1.0796292 -0.27487707 ... 0.6582951 -0.5136934 0.32208067] ... [-0.7262247 0.5508627 0.4560262 ... 1.8780721 -0.33272022 0. ] [ 0. -0.44824162 2.3720813 ... 0. 0. 0. ] [-0.44824162 2.3720813 -0.3666324 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.2609399 0.41308063 -1.0999702 ] [ 0. 0. 0. ... 0.41308063 -1.0999702 0. ] [ 0. 2.0872483 -0.81622964 ... -0.23725657 -0.37650996 0.2736441 ] ... [-0.43579307 -0.73347974 -0.44222674 ... 2.8426077 1.066029 0. ] [ 0. -0.7887889 0.21035016 ... 0. 0. 0. ] [-0.7887889 0.21035016 0.5138273 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.683569 -0.3068418 0.5024669 ] [ 0. 0. 0. ... -0.3068418 0.5024669 0. ] [ 0. 1.3572077 -0.2781152 ... -0.16002612 -1.3462995 0.34162828] ... [-1.0376017 -0.85304844 -0.7483452 ... -0.85991365 -1.2626873 0. ] [ 0. 1.4693433 0.29379734 ... 0. 0. 0. ] [ 1.4693433 0.29379734 0.9722297 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.15105608 0.03713595 0.9806244 ] [ 0. 0. 0. ... 0.03713595 0.9806244 0. ] [ 0. 1.2921288 1.6191843 ... 0.09747229 -1.3921278 0.78791285] ... [ 1.2610835 0.69166607 0.5490362 ... -0.63366663 1.4606187 0. ] [ 0. 1.519652 0.02760172 ... 0. 0. 0. ] [ 1.519652 0.02760172 -0.30579582 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.03249056 -0.778351 0.00510979] [ 0. 0. 0. ... -0.778351 0.00510979 0. ] [ 0. 0.9039405 0.69688255 ... -0.72901666 -0.06853853 0.5991705 ] ... [-1.1452736 -1.7529099 -0.03184241 ... 0.7688549 1.7844642 0. ] [ 0. 0.25801677 -0.7690982 ... 0. 0. 0. ] [ 0.25801677 -0.7690982 1.4634074 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.9162791 -0.4326152 -0.88956267] [ 0. 0. 0. ... -0.4326152 -0.88956267 0. ] [ 0. 0.4913678 0.1577364 ... -0.10554967 -0.68640095 0.9328218 ] ... [-2.1894462 1.1949581 -1.4363835 ... -0.8558875 -0.14429703 0. ] [ 0. -0.1746718 1.2640516 ... 0. 0. 0. ] [-0.1746718 1.2640516 -1.2534952 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4875.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... -0.23891988 -0.28222618 0.6510425 ] [ 0. 0. 0. ... -0.28222618 0.6510425 0.7408757 ] [ 0. 0. 0. ... 0.6510425 0.7408757 0. ] ... [ 0. -0.7537704 0.8213026 ... 0. 0. 0. ] [-0.7537704 0.8213026 0.93675697 ... 0. 0. 0. ] [ 0.8213026 0.93675697 -1.3771104 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5836493 0.97446746 0.67852885] [ 0. 0. 0. ... 0.97446746 0.67852885 -0.5341752 ] [ 0. 0. 0. ... 0.67852885 -0.5341752 0. ] ... [ 0. 1.5934043 0.4461654 ... 0. 0. 0. ] [ 1.5934043 0.4461654 0.98200613 ... 0. 0. 0. ] [ 0.4461654 0.98200613 -0.69511896 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.915328 0.79496557 -0.15204974] [ 0. 0. 0. ... 0.79496557 -0.15204974 -0.4985746 ] [ 0. 0. 0. ... -0.15204974 -0.4985746 0. ] ... [ 0. 0.5735745 0.41495115 ... 0. 0. 0. ] [ 0.5735745 0.41495115 0.16665298 ... 0. 0. 0. ] [ 0.41495115 0.16665298 1.2548821 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.47702122 2.105768 -1.0150077 ] [ 0. 0. 0. ... 2.105768 -1.0150077 0.74664074] [ 0. 0. 0. ... -1.0150077 0.74664074 0. ] ... [ 0. -0.28492042 0.66014314 ... 0. 0. 0. ] [-0.28492042 0.66014314 2.591619 ... 0. 0. 0. ] [ 0.66014314 2.591619 -0.29309767 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.02242788 -1.5749811 0.75439715] [ 0. 0. 0. ... -1.5749811 0.75439715 -1.0384889 ] [ 0. 0. 0. ... 0.75439715 -1.0384889 0. ] ... [ 0. -2.1214564 -1.0490141 ... 0. 0. 0. ] [-2.1214564 -1.0490141 0.63067526 ... 0. 0. 0. ] [-1.0490141 0.63067526 -0.37890252 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1919285 -0.12857097 -0.80829465] [ 0. 0. 0. ... -0.12857097 -0.80829465 0.57765925] [ 0. 0. 0. ... -0.80829465 0.57765925 0. ] ... [ 0. 0.16123596 -1.1505604 ... 0. 0. 0. ] [ 0.16123596 -1.1505604 1.088326 ... 0. 0. 0. ] [-1.1505604 1.088326 1.0820093 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.23891988 -0.28222618 0.6510425 ] [ 0. 0. 0. ... -0.28222618 0.6510425 0.7408757 ] [ 0. 0. 0. ... 0.6510425 0.7408757 0. ] ... [ 0. -0.7537704 0.8213026 ... 0. 0. 0. ] [-0.7537704 0.8213026 0.93675697 ... 0. 0. 0. ] [ 0.8213026 0.93675697 -1.3771104 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5836493 0.97446746 0.67852885] [ 0. 0. 0. ... 0.97446746 0.67852885 -0.5341752 ] [ 0. 0. 0. ... 0.67852885 -0.5341752 0. ] ... [ 0. 1.5934043 0.4461654 ... 0. 0. 0. ] [ 1.5934043 0.4461654 0.98200613 ... 0. 0. 0. ] [ 0.4461654 0.98200613 -0.69511896 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.915328 0.79496557 -0.15204974] [ 0. 0. 0. ... 0.79496557 -0.15204974 -0.4985746 ] [ 0. 0. 0. ... -0.15204974 -0.4985746 0. ] ... [ 0. 0.5735745 0.41495115 ... 0. 0. 0. ] [ 0.5735745 0.41495115 0.16665298 ... 0. 0. 0. ] [ 0.41495115 0.16665298 1.2548821 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.47702122 2.105768 -1.0150077 ] [ 0. 0. 0. ... 2.105768 -1.0150077 0.74664074] [ 0. 0. 0. ... -1.0150077 0.74664074 0. ] ... [ 0. -0.28492042 0.66014314 ... 0. 0. 0. ] [-0.28492042 0.66014314 2.591619 ... 0. 0. 0. ] [ 0.66014314 2.591619 -0.29309767 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.02242788 -1.5749811 0.75439715] [ 0. 0. 0. ... -1.5749811 0.75439715 -1.0384889 ] [ 0. 0. 0. ... 0.75439715 -1.0384889 0. ] ... [ 0. -2.1214564 -1.0490141 ... 0. 0. 0. ] [-2.1214564 -1.0490141 0.63067526 ... 0. 0. 0. ] [-1.0490141 0.63067526 -0.37890252 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1919285 -0.12857097 -0.80829465] [ 0. 0. 0. ... -0.12857097 -0.80829465 0.57765925] [ 0. 0. 0. ... -0.80829465 0.57765925 0. ] ... [ 0. 0.16123596 -1.1505604 ... 0. 0. 0. ] [ 0.16123596 -1.1505604 1.088326 ... 0. 0. 0. ] [-1.1505604 1.088326 1.0820093 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4877.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6085043e+00 -5.5363840e-01 -6.9537628e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.5363840e-01 -6.9537628e-01 0.0000000e+00] [ 0.0000000e+00 -3.3152711e-01 1.9578660e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2799368e+00 -1.2088741e+00 0.0000000e+00] [ 0.0000000e+00 -6.2942237e-01 5.4156566e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-6.2942237e-01 5.4156566e-01 1.7778168e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.9631252e-01 -8.0634281e-02 -2.6672548e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -8.0634281e-02 -2.6672548e-01 0.0000000e+00] [ 0.0000000e+00 1.2789315e+00 3.6809969e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.5853403e-01 -3.9705080e-01 0.0000000e+00] [ 0.0000000e+00 3.8760775e-01 8.3263326e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.8760775e-01 8.3263326e-01 4.4989830e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6748536e-01 1.8892330e+00 1.0428897e-03] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8892330e+00 1.0428897e-03 0.0000000e+00] [ 0.0000000e+00 6.6631228e-02 9.7291447e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2311703e+00 -4.9380594e-01 0.0000000e+00] [ 0.0000000e+00 2.0067970e-01 1.2808233e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.0067970e-01 1.2808233e+00 -7.1397561e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.0565710e+00 4.8272413e-01 -9.6049011e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.8272413e-01 -9.6049011e-01 0.0000000e+00] [ 0.0000000e+00 -7.5619310e-01 5.4218543e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0677024e+00 -7.5940651e-01 0.0000000e+00] [ 0.0000000e+00 -4.5990396e-01 2.5941830e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.5990396e-01 2.5941830e+00 -1.7555152e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.7935352e-01 -7.3169351e-01 -1.8125364e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.3169351e-01 -1.8125364e-01 0.0000000e+00] [ 0.0000000e+00 -1.2182770e+00 7.2168928e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.5349880e+00 1.0814928e+00 0.0000000e+00] [ 0.0000000e+00 6.7283928e-01 -5.5115223e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 6.7283928e-01 -5.5115223e-01 1.2718054e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.7760553e-01 1.6778405e-01 7.8884494e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.6778405e-01 7.8884494e-01 0.0000000e+00] [ 0.0000000e+00 -4.6064287e-02 1.7957001e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.4534835e-01 -1.2298023e+00 0.0000000e+00] [ 0.0000000e+00 -3.7129569e-01 -2.3734937e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.7129569e-01 -2.3734937e+00 -1.1198821e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6085043e+00 -5.5363840e-01 -6.9537628e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.5363840e-01 -6.9537628e-01 0.0000000e+00] [ 0.0000000e+00 -3.3152711e-01 1.9578660e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2799368e+00 -1.2088741e+00 0.0000000e+00] [ 0.0000000e+00 -6.2942237e-01 5.4156566e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-6.2942237e-01 5.4156566e-01 1.7778168e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -5.9631252e-01 -8.0634281e-02 -2.6672548e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -8.0634281e-02 -2.6672548e-01 0.0000000e+00] [ 0.0000000e+00 1.2789315e+00 3.6809969e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.5853403e-01 -3.9705080e-01 0.0000000e+00] [ 0.0000000e+00 3.8760775e-01 8.3263326e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 3.8760775e-01 8.3263326e-01 4.4989830e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.6748536e-01 1.8892330e+00 1.0428897e-03] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.8892330e+00 1.0428897e-03 0.0000000e+00] [ 0.0000000e+00 6.6631228e-02 9.7291447e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2311703e+00 -4.9380594e-01 0.0000000e+00] [ 0.0000000e+00 2.0067970e-01 1.2808233e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 2.0067970e-01 1.2808233e+00 -7.1397561e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.0565710e+00 4.8272413e-01 -9.6049011e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.8272413e-01 -9.6049011e-01 0.0000000e+00] [ 0.0000000e+00 -7.5619310e-01 5.4218543e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0677024e+00 -7.5940651e-01 0.0000000e+00] [ 0.0000000e+00 -4.5990396e-01 2.5941830e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-4.5990396e-01 2.5941830e+00 -1.7555152e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -4.7935352e-01 -7.3169351e-01 -1.8125364e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.3169351e-01 -1.8125364e-01 0.0000000e+00] [ 0.0000000e+00 -1.2182770e+00 7.2168928e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.5349880e+00 1.0814928e+00 0.0000000e+00] [ 0.0000000e+00 6.7283928e-01 -5.5115223e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 6.7283928e-01 -5.5115223e-01 1.2718054e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.7760553e-01 1.6778405e-01 7.8884494e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.6778405e-01 7.8884494e-01 0.0000000e+00] [ 0.0000000e+00 -4.6064287e-02 1.7957001e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.4534835e-01 -1.2298023e+00 0.0000000e+00] [ 0.0000000e+00 -3.7129569e-01 -2.3734937e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.7129569e-01 -2.3734937e+00 -1.1198821e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4879.aten_im2col, %x.1 : Tensor): %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %3 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.kernel_size, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%3) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4881.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.7614189 0.8542057 0.18709852] [ 0. 0. 0. ... 0.18709852 -0.02371008 -0.67624605] [ 0. 0. 0. ... -0.67624605 0.07484375 0. ] ... [ 0. -1.4372414 1.2162904 ... 0. 0. 0. ] [ 1.2162904 -0.32706457 -0.4785626 ... 0. 0. 0. ] [-0.4785626 -2.4569068 0.8267459 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2050256 0.41788867 -1.9328455 ] [ 0. 0. 0. ... -1.9328455 0.31364974 -1.6965426 ] [ 0. 0. 0. ... -1.6965426 0.92866886 0. ] ... [ 0. 1.3374472 0.24341394 ... 0. 0. 0. ] [ 0.24341394 -0.7546148 0.06917214 ... 0. 0. 0. ] [ 0.06917214 -1.3609289 -0.16194564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.7844062 -0.11236695 -0.21645541] [ 0. 0. 0. ... -0.21645541 0.71266556 -0.7636408 ] [ 0. 0. 0. ... -0.7636408 0.99281526 0. ] ... [ 0. 0.34793887 0.44316357 ... 0. 0. 0. ] [ 0.44316357 1.6636351 0.08393309 ... 0. 0. 0. ] [ 0.08393309 -1.3364848 0.78193176 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.06353197 -0.9583197 -1.0053204 ] [ 0. 0. 0. ... -1.0053204 -0.11722957 -1.4961983 ] [ 0. 0. 0. ... -1.4961983 0.40508705 0. ] ... [ 0. -0.76244664 1.2132884 ... 0. 0. 0. ] [ 1.2132884 0.9162152 1.1450447 ... 0. 0. 0. ] [ 1.1450447 0.93067473 0.07757743 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.9657633 -0.8024648 0.48372567] [ 0. 0. 0. ... 0.48372567 -1.0442206 0.31959733] [ 0. 0. 0. ... 0.31959733 0.61333686 0. ] ... [ 0. 0.12682576 0.36194745 ... 0. 0. 0. ] [ 0.36194745 1.0713581 0.49198177 ... 0. 0. 0. ] [ 0.49198177 -0.6575004 -0.12342319 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.21543778 -0.9082729 -1.0882697 ] [ 0. 0. 0. ... -1.0882697 0.8503331 -0.03640468] [ 0. 0. 0. ... -0.03640468 -1.3472911 0. ] ... [ 0. 0.85923576 0.10254404 ... 0. 0. 0. ] [ 0.10254404 -2.1771705 1.1996585 ... 0. 0. 0. ] [ 1.1996585 -0.3277225 -2.0449624 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.7614189 0.8542057 0.18709852] [ 0. 0. 0. ... 0.18709852 -0.02371008 -0.67624605] [ 0. 0. 0. ... -0.67624605 0.07484375 0. ] ... [ 0. -1.4372414 1.2162904 ... 0. 0. 0. ] [ 1.2162904 -0.32706457 -0.4785626 ... 0. 0. 0. ] [-0.4785626 -2.4569068 0.8267459 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2050256 0.41788867 -1.9328455 ] [ 0. 0. 0. ... -1.9328455 0.31364974 -1.6965426 ] [ 0. 0. 0. ... -1.6965426 0.92866886 0. ] ... [ 0. 1.3374472 0.24341394 ... 0. 0. 0. ] [ 0.24341394 -0.7546148 0.06917214 ... 0. 0. 0. ] [ 0.06917214 -1.3609289 -0.16194564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.7844062 -0.11236695 -0.21645541] [ 0. 0. 0. ... -0.21645541 0.71266556 -0.7636408 ] [ 0. 0. 0. ... -0.7636408 0.99281526 0. ] ... [ 0. 0.34793887 0.44316357 ... 0. 0. 0. ] [ 0.44316357 1.6636351 0.08393309 ... 0. 0. 0. ] [ 0.08393309 -1.3364848 0.78193176 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.06353197 -0.9583197 -1.0053204 ] [ 0. 0. 0. ... -1.0053204 -0.11722957 -1.4961983 ] [ 0. 0. 0. ... -1.4961983 0.40508705 0. ] ... [ 0. -0.76244664 1.2132884 ... 0. 0. 0. ] [ 1.2132884 0.9162152 1.1450447 ... 0. 0. 0. ] [ 1.1450447 0.93067473 0.07757743 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.9657633 -0.8024648 0.48372567] [ 0. 0. 0. ... 0.48372567 -1.0442206 0.31959733] [ 0. 0. 0. ... 0.31959733 0.61333686 0. ] ... [ 0. 0.12682576 0.36194745 ... 0. 0. 0. ] [ 0.36194745 1.0713581 0.49198177 ... 0. 0. 0. ] [ 0.49198177 -0.6575004 -0.12342319 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.21543778 -0.9082729 -1.0882697 ] [ 0. 0. 0. ... -1.0882697 0.8503331 -0.03640468] [ 0. 0. 0. ... -0.03640468 -1.3472911 0. ] ... [ 0. 0.85923576 0.10254404 ... 0. 0. 0. ] [ 0.10254404 -2.1771705 1.1996585 ... 0. 0. 0. ] [ 1.1996585 -0.3277225 -2.0449624 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4883.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.23671278 -0.923697 -0.07274005] [ 0. 0. 0. ... -0.07274005 0.00786723 0. ] [ 0. -1.6060233 -0.7166812 ... 0.97831637 -1.3624477 0.1592239 ] ... [-0.79833466 -0.93587244 -0.34274328 ... -0.7971257 -0.45349807 0. ] [ 0. 0.36609113 0.9686592 ... 0. 0. 0. ] [ 0.9686592 -2.0836937 0.9439815 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60486025 1.0889943 0.7446419 ] [ 0. 0. 0. ... 0.7446419 -0.40849933 0. ] [ 0. 0.7390239 -0.05268309 ... 1.4843872 0.84502697 0.8564056 ] ... [ 0.00860305 1.7158759 -1.1256739 ... 1.0533352 -1.2078722 0. ] [ 0. -0.6151401 0.22282681 ... 0. 0. 0. ] [ 0.22282681 -0.61525136 0.9750001 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1243324 0.8768891 0.29019308] [ 0. 0. 0. ... 0.29019308 1.8587426 0. ] [ 0. -0.00321818 -0.1307784 ... 1.1705253 -0.7510216 -0.3746802 ] ... [ 1.0180869 1.3808919 -0.60681653 ... 1.22154 -0.9274962 0. ] [ 0. -0.9251686 1.575559 ... 0. 0. 0. ] [ 1.575559 0.16124108 0.7237798 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.883481 -1.304236 0.53564084] [ 0. 0. 0. ... 0.53564084 -0.69986767 0. ] [ 0. 0.8230743 0.14674826 ... -0.50554734 -1.3624028 -0.18332534] ... [-0.14111312 -0.39475864 -0.21059282 ... -0.253444 2.4444726 0. ] [ 0. -0.86669624 -0.5396607 ... 0. 0. 0. ] [-0.5396607 -0.14895144 0.72596323 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.68503124 -0.5281415 0.23834899] [ 0. 0. 0. ... 0.23834899 -0.97322243 0. ] [ 0. -0.30099115 2.2985291 ... 0.5910837 0.20494375 -0.46726394] ... [ 0.8841496 0.6030888 0.6153313 ... -0.28162813 0.36211577 0. ] [ 0. -0.16041172 0.02698892 ... 0. 0. 0. ] [ 0.02698892 -0.506699 -0.60374725 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.62647593 0.14557384 0.01126881] [ 0. 0. 0. ... 0.01126881 -0.30131513 0. ] [ 0. -3.120433 0.32616073 ... -1.3052115 -1.1087432 1.0758787 ] ... [-0.25355965 1.2266036 -2.2559435 ... -0.37844074 0.23703787 0. ] [ 0. -0.2287657 -0.8328775 ... 0. 0. 0. ] [-0.8328775 0.8174421 -0.30582488 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.23671278 -0.923697 -0.07274005] [ 0. 0. 0. ... -0.07274005 0.00786723 0. ] [ 0. -1.6060233 -0.7166812 ... 0.97831637 -1.3624477 0.1592239 ] ... [-0.79833466 -0.93587244 -0.34274328 ... -0.7971257 -0.45349807 0. ] [ 0. 0.36609113 0.9686592 ... 0. 0. 0. ] [ 0.9686592 -2.0836937 0.9439815 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60486025 1.0889943 0.7446419 ] [ 0. 0. 0. ... 0.7446419 -0.40849933 0. ] [ 0. 0.7390239 -0.05268309 ... 1.4843872 0.84502697 0.8564056 ] ... [ 0.00860305 1.7158759 -1.1256739 ... 1.0533352 -1.2078722 0. ] [ 0. -0.6151401 0.22282681 ... 0. 0. 0. ] [ 0.22282681 -0.61525136 0.9750001 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1243324 0.8768891 0.29019308] [ 0. 0. 0. ... 0.29019308 1.8587426 0. ] [ 0. -0.00321818 -0.1307784 ... 1.1705253 -0.7510216 -0.3746802 ] ... [ 1.0180869 1.3808919 -0.60681653 ... 1.22154 -0.9274962 0. ] [ 0. -0.9251686 1.575559 ... 0. 0. 0. ] [ 1.575559 0.16124108 0.7237798 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.883481 -1.304236 0.53564084] [ 0. 0. 0. ... 0.53564084 -0.69986767 0. ] [ 0. 0.8230743 0.14674826 ... -0.50554734 -1.3624028 -0.18332534] ... [-0.14111312 -0.39475864 -0.21059282 ... -0.253444 2.4444726 0. ] [ 0. -0.86669624 -0.5396607 ... 0. 0. 0. ] [-0.5396607 -0.14895144 0.72596323 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.68503124 -0.5281415 0.23834899] [ 0. 0. 0. ... 0.23834899 -0.97322243 0. ] [ 0. -0.30099115 2.2985291 ... 0.5910837 0.20494375 -0.46726394] ... [ 0.8841496 0.6030888 0.6153313 ... -0.28162813 0.36211577 0. ] [ 0. -0.16041172 0.02698892 ... 0. 0. 0. ] [ 0.02698892 -0.506699 -0.60374725 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.62647593 0.14557384 0.01126881] [ 0. 0. 0. ... 0.01126881 -0.30131513 0. ] [ 0. -3.120433 0.32616073 ... -1.3052115 -1.1087432 1.0758787 ] ... [-0.25355965 1.2266036 -2.2559435 ... -0.37844074 0.23703787 0. ] [ 0. -0.2287657 -0.8328775 ... 0. 0. 0. ] [-0.8328775 0.8174421 -0.30582488 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4885.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.40463746 -0.5126314 -0.8863888 ] [ 0. 0. 0. ... -0.8863888 -0.02505034 -0.8932832 ] [ 0. 0. 0. ... -0.8932832 0.25806466 0. ] ... [ 0. -1.6442294 1.4390157 ... 0. 0. 0. ] [ 1.4390157 0.10728465 2.073041 ... 0. 0. 0. ] [ 2.073041 -1.9458724 0.90728104 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.0635375 -0.31967223 0.19037892] [ 0. 0. 0. ... 0.19037892 1.0796602 -0.96011543] [ 0. 0. 0. ... -0.96011543 0.46795118 0. ] ... [ 0. -0.29972842 0.14811604 ... 0. 0. 0. ] [ 0.14811604 0.6500129 -0.27754167 ... 0. 0. 0. ] [-0.27754167 -0.96640766 0.6677615 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5178683 -0.31445342 0.17406003] [ 0. 0. 0. ... 0.17406003 -0.24933025 -0.84154737] [ 0. 0. 0. ... -0.84154737 -1.5237435 0. ] ... [ 0. -1.5425211 -0.55988264 ... 0. 0. 0. ] [-0.55988264 -0.3625643 1.1677883 ... 0. 0. 0. ] [ 1.1677883 -0.72264475 0.89018434 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.17052498 -1.1640629 -0.11024924] [ 0. 0. 0. ... -0.11024924 -1.3476552 0.7489408 ] [ 0. 0. 0. ... 0.7489408 1.5599744 0. ] ... [ 0. 0.15369105 0.89991486 ... 0. 0. 0. ] [ 0.89991486 -0.59251404 -0.9837312 ... 0. 0. 0. ] [-0.9837312 0.06125179 0.2619524 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.79768807 1.1538608 -1.2029908 ] [ 0. 0. 0. ... -1.2029908 1.8241705 -0.8105824 ] [ 0. 0. 0. ... -0.8105824 -0.3597514 0. ] ... [ 0. -0.19186361 -0.8489461 ... 0. 0. 0. ] [-0.8489461 0.49536037 2.2206686 ... 0. 0. 0. ] [ 2.2206686 0.39887634 0.389144 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.4211551 -0.30039936 1.1854347 ] [ 0. 0. 0. ... 1.1854347 1.5516446 1.1819139 ] [ 0. 0. 0. ... 1.1819139 -0.16459948 0. ] ... [ 0. 0.24856518 0.1354337 ... 0. 0. 0. ] [ 0.1354337 0.20280865 0.6098232 ... 0. 0. 0. ] [ 0.6098232 0.679429 0.03604316 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.40463746 -0.5126314 -0.8863888 ] [ 0. 0. 0. ... -0.8863888 -0.02505034 -0.8932832 ] [ 0. 0. 0. ... -0.8932832 0.25806466 0. ] ... [ 0. -1.6442294 1.4390157 ... 0. 0. 0. ] [ 1.4390157 0.10728465 2.073041 ... 0. 0. 0. ] [ 2.073041 -1.9458724 0.90728104 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.0635375 -0.31967223 0.19037892] [ 0. 0. 0. ... 0.19037892 1.0796602 -0.96011543] [ 0. 0. 0. ... -0.96011543 0.46795118 0. ] ... [ 0. -0.29972842 0.14811604 ... 0. 0. 0. ] [ 0.14811604 0.6500129 -0.27754167 ... 0. 0. 0. ] [-0.27754167 -0.96640766 0.6677615 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5178683 -0.31445342 0.17406003] [ 0. 0. 0. ... 0.17406003 -0.24933025 -0.84154737] [ 0. 0. 0. ... -0.84154737 -1.5237435 0. ] ... [ 0. -1.5425211 -0.55988264 ... 0. 0. 0. ] [-0.55988264 -0.3625643 1.1677883 ... 0. 0. 0. ] [ 1.1677883 -0.72264475 0.89018434 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.17052498 -1.1640629 -0.11024924] [ 0. 0. 0. ... -0.11024924 -1.3476552 0.7489408 ] [ 0. 0. 0. ... 0.7489408 1.5599744 0. ] ... [ 0. 0.15369105 0.89991486 ... 0. 0. 0. ] [ 0.89991486 -0.59251404 -0.9837312 ... 0. 0. 0. ] [-0.9837312 0.06125179 0.2619524 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.79768807 1.1538608 -1.2029908 ] [ 0. 0. 0. ... -1.2029908 1.8241705 -0.8105824 ] [ 0. 0. 0. ... -0.8105824 -0.3597514 0. ] ... [ 0. -0.19186361 -0.8489461 ... 0. 0. 0. ] [-0.8489461 0.49536037 2.2206686 ... 0. 0. 0. ] [ 2.2206686 0.39887634 0.389144 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.4211551 -0.30039936 1.1854347 ] [ 0. 0. 0. ... 1.1854347 1.5516446 1.1819139 ] [ 0. 0. 0. ... 1.1819139 -0.16459948 0. ] ... [ 0. 0.24856518 0.1354337 ... 0. 0. 0. ] [ 0.1354337 0.20280865 0.6098232 ... 0. 0. 0. ] [ 0.6098232 0.679429 0.03604316 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4887.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.4441495 0.23510598 -0.6344172 ] [ 0. 0. 0. ... -0.6344172 -0.662732 0. ] [ 0. -0.10114077 0.07945114 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.5463727 0.07925831 0. ] [ 0. -0.79631984 -0.14659178 ... 0. 0. 0. ] [-0.14659178 -1.5739895 -1.3294251 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.3762743 2.0347824 -0.3467332 ] [ 0. 0. 0. ... -0.3467332 -0.24401392 0. ] [ 0. -0.7224575 -0.12688075 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 2.05223 -0.600568 0. ] [ 0. -0.789503 -1.0796409 ... 0. 0. 0. ] [-1.0796409 0.08875021 0.9530752 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.06457319 0.1309637 -0.32645416] [ 0. 0. 0. ... -0.32645416 -1.0822166 0. ] [ 0. 0.10941271 0.00855581 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.9860402 0.62267953 0. ] [ 0. 2.060113 -0.12006095 ... 0. 0. 0. ] [-0.12006095 2.3175316 0.19193518 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.5632527 -0.13201104 0.76419175] [ 0. 0. 0. ... 0.76419175 0.02793859 0. ] [ 0. 0.8638998 0.06058845 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.3717956 -0.7024048 0. ] [ 0. -1.2977403 -0.2886151 ... 0. 0. 0. ] [-0.2886151 1.2781835 -0.69035584 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.14669906 0.05945312 0.8136339 ] [ 0. 0. 0. ... 0.8136339 -0.3901577 0. ] [ 0. -0.11562391 -1.1381656 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4437772 0.43979692 0. ] [ 0. 0.51412094 -0.8341854 ... 0. 0. 0. ] [-0.8341854 0.8761545 0.28179178 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.25344232 -1.3781158 -0.01678996] [ 0. 0. 0. ... -0.01678996 -0.33646506 0. ] [ 0. -1.4182737 0.6486692 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.74956286 0.94399387 0. ] [ 0. 0.12581997 -0.25331044 ... 0. 0. 0. ] [-0.25331044 0.14713398 -0.58704925 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.4441495 0.23510598 -0.6344172 ] [ 0. 0. 0. ... -0.6344172 -0.662732 0. ] [ 0. -0.10114077 0.07945114 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.5463727 0.07925831 0. ] [ 0. -0.79631984 -0.14659178 ... 0. 0. 0. ] [-0.14659178 -1.5739895 -1.3294251 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.3762743 2.0347824 -0.3467332 ] [ 0. 0. 0. ... -0.3467332 -0.24401392 0. ] [ 0. -0.7224575 -0.12688075 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 2.05223 -0.600568 0. ] [ 0. -0.789503 -1.0796409 ... 0. 0. 0. ] [-1.0796409 0.08875021 0.9530752 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.06457319 0.1309637 -0.32645416] [ 0. 0. 0. ... -0.32645416 -1.0822166 0. ] [ 0. 0.10941271 0.00855581 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.9860402 0.62267953 0. ] [ 0. 2.060113 -0.12006095 ... 0. 0. 0. ] [-0.12006095 2.3175316 0.19193518 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.5632527 -0.13201104 0.76419175] [ 0. 0. 0. ... 0.76419175 0.02793859 0. ] [ 0. 0.8638998 0.06058845 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.3717956 -0.7024048 0. ] [ 0. -1.2977403 -0.2886151 ... 0. 0. 0. ] [-0.2886151 1.2781835 -0.69035584 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.14669906 0.05945312 0.8136339 ] [ 0. 0. 0. ... 0.8136339 -0.3901577 0. ] [ 0. -0.11562391 -1.1381656 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4437772 0.43979692 0. ] [ 0. 0.51412094 -0.8341854 ... 0. 0. 0. ] [-0.8341854 0.8761545 0.28179178 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.25344232 -1.3781158 -0.01678996] [ 0. 0. 0. ... -0.01678996 -0.33646506 0. ] [ 0. -1.4182737 0.6486692 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.74956286 0.94399387 0. ] [ 0. 0.12581997 -0.25331044 ... 0. 0. 0. ] [-0.25331044 0.14713398 -0.58704925 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4889.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4891.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.8746738 -1.0077097 0.16356678] [ 0. 0. 0. ... 0.33785805 -0.8398111 -0.27809274] [ 0. 0. 0. ... -1.7302505 0.62352085 0. ] ... [ 0. 0.8340305 0.8325235 ... 0. 0. 0. ] [-1.0008365 0.16767702 -1.059392 ... 0. 0. 0. ] [ 0.90702057 1.0261557 0.5641695 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8904692 1.6768363 2.1870468 ] [ 0. 0. 0. ... 0.26495138 -0.5597518 -1.5438714 ] [ 0. 0. 0. ... -1.206501 0.32310364 0. ] ... [ 0. 0.40611422 0.22940955 ... 0. 0. 0. ] [-0.3701179 0.3853885 -0.9129175 ... 0. 0. 0. ] [-0.43844694 0.12535898 0.8166948 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0765738 -0.3672194 -1.4601264 ] [ 0. 0. 0. ... -0.6329635 -0.8922483 0.37132436] [ 0. 0. 0. ... -0.7457421 3.4503868 0. ] ... [ 0. -1.0711529 -0.9860042 ... 0. 0. 0. ] [ 0.6290166 -0.76990813 1.644998 ... 0. 0. 0. ] [ 0.08912694 -0.65765905 -1.2151085 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.3422998 0.52495456 -0.6795167 ] [ 0. 0. 0. ... 0.05942726 0.14557804 0.53840035] [ 0. 0. 0. ... 0.450478 0.19224185 0. ] ... [ 0. 0.6299496 -0.08758992 ... 0. 0. 0. ] [-0.35541657 0.40386784 1.6347606 ... 0. 0. 0. ] [ 0.8939481 0.87199265 0.7329253 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9909997 -0.7233175 -1.6177284 ] [ 0. 0. 0. ... -0.35985532 0.21236409 -1.6314558 ] [ 0. 0. 0. ... 0.72547936 1.1583663 0. ] ... [ 0. -0.44172528 -1.6729652 ... 0. 0. 0. ] [-0.11734355 -0.36919937 0.6895903 ... 0. 0. 0. ] [ 0.3458075 0.33406878 0.845722 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.39204907 0.18534306 -0.83812946] [ 0. 0. 0. ... 0.5384241 1.2621619 0.7027789 ] [ 0. 0. 0. ... 1.2240002 -0.97773254 0. ] ... [ 0. 0.54024357 -0.4064224 ... 0. 0. 0. ] [-0.2626472 -0.3391012 1.1442945 ... 0. 0. 0. ] [ 0.85579544 0.03258834 -1.2315401 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.8746738 -1.0077097 0.16356678] [ 0. 0. 0. ... 0.33785805 -0.8398111 -0.27809274] [ 0. 0. 0. ... -1.7302505 0.62352085 0. ] ... [ 0. 0.8340305 0.8325235 ... 0. 0. 0. ] [-1.0008365 0.16767702 -1.059392 ... 0. 0. 0. ] [ 0.90702057 1.0261557 0.5641695 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8904692 1.6768363 2.1870468 ] [ 0. 0. 0. ... 0.26495138 -0.5597518 -1.5438714 ] [ 0. 0. 0. ... -1.206501 0.32310364 0. ] ... [ 0. 0.40611422 0.22940955 ... 0. 0. 0. ] [-0.3701179 0.3853885 -0.9129175 ... 0. 0. 0. ] [-0.43844694 0.12535898 0.8166948 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0765738 -0.3672194 -1.4601264 ] [ 0. 0. 0. ... -0.6329635 -0.8922483 0.37132436] [ 0. 0. 0. ... -0.7457421 3.4503868 0. ] ... [ 0. -1.0711529 -0.9860042 ... 0. 0. 0. ] [ 0.6290166 -0.76990813 1.644998 ... 0. 0. 0. ] [ 0.08912694 -0.65765905 -1.2151085 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.3422998 0.52495456 -0.6795167 ] [ 0. 0. 0. ... 0.05942726 0.14557804 0.53840035] [ 0. 0. 0. ... 0.450478 0.19224185 0. ] ... [ 0. 0.6299496 -0.08758992 ... 0. 0. 0. ] [-0.35541657 0.40386784 1.6347606 ... 0. 0. 0. ] [ 0.8939481 0.87199265 0.7329253 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9909997 -0.7233175 -1.6177284 ] [ 0. 0. 0. ... -0.35985532 0.21236409 -1.6314558 ] [ 0. 0. 0. ... 0.72547936 1.1583663 0. ] ... [ 0. -0.44172528 -1.6729652 ... 0. 0. 0. ] [-0.11734355 -0.36919937 0.6895903 ... 0. 0. 0. ] [ 0.3458075 0.33406878 0.845722 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.39204907 0.18534306 -0.83812946] [ 0. 0. 0. ... 0.5384241 1.2621619 0.7027789 ] [ 0. 0. 0. ... 1.2240002 -0.97773254 0. ] ... [ 0. 0.54024357 -0.4064224 ... 0. 0. 0. ] [-0.2626472 -0.3391012 1.1442945 ... 0. 0. 0. ] [ 0.85579544 0.03258834 -1.2315401 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4893.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -3.032898 1.5677481 -2.2106082 ] [ 0. 0. 0. ... 0.10115737 0.4395798 0. ] [ 0. 0.9018351 0.20427051 ... 0.581774 0.3705359 1.1467865 ] ... [-0.09511935 -1.6570686 -0.38048232 ... -0.36468646 -0.61626357 0. ] [ 0. -1.2978927 0.699161 ... 0. 0. 0. ] [-0.9818085 -1.1188774 0.04789798 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60289145 -0.10103851 -0.23100568] [ 0. 0. 0. ... -0.19015266 -0.01764922 0. ] [ 0. 0.93514395 1.6923748 ... 0.27499664 -1.8724163 -0.46043727] ... [-0.13082612 0.7887162 0.21995233 ... 1.8840632 -0.01279736 0. ] [ 0. -0.7628518 0.17356072 ... 0. 0. 0. ] [-0.27047378 0.29734367 0.36242208 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.91150016 0.5546306 -1.6458547 ] [ 0. 0. 0. ... 0.20135087 0.8437032 0. ] [ 0. -1.6481757 -0.08325905 ... -1.0528836 1.615097 -2.437884 ] ... [-0.7403982 -1.088492 2.0240498 ... 0.8224521 1.9917028 0. ] [ 0. -0.25266644 0.6523288 ... 0. 0. 0. ] [-1.7244122 0.18180111 0.25250456 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.0650035 -1.187195 0.64922464] [ 0. 0. 0. ... -0.5740793 1.3470154 0. ] [ 0. -0.2339913 0.08995053 ... -0.3999259 -0.9223885 -0.641385 ] ... [-1.851133 0.23821591 -3.4762046 ... 0.64341795 -0.19048111 0. ] [ 0. 0.18030715 -0.48022148 ... 0. 0. 0. ] [-1.974032 0.5293028 0.38858008 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08298334 -1.3809806 0.21857262] [ 0. 0. 0. ... 1.038807 -0.29044986 0. ] [ 0. -0.10657962 -1.277559 ... 1.2576073 -0.09475421 0.87905884] ... [-0.66995156 1.0364423 -1.4278904 ... -0.25690663 -2.109485 0. ] [ 0. 0.9300212 -1.5959644 ... 0. 0. 0. ] [ 1.153787 -0.60054284 -1.8457441 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5882121 -0.9602807 -1.5437613 ] [ 0. 0. 0. ... 1.6683418 -0.24111095 0. ] [ 0. -0.33079886 -0.554081 ... 0.04345322 -1.5412453 -1.3862247 ] ... [ 2.2732732 1.1334631 -0.98861784 ... -2.1091025 1.1305498 0. ] [ 0. -1.042733 -0.31259423 ... 0. 0. 0. ] [ 1.6281607 -0.8139481 -2.4847488 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -3.032898 1.5677481 -2.2106082 ] [ 0. 0. 0. ... 0.10115737 0.4395798 0. ] [ 0. 0.9018351 0.20427051 ... 0.581774 0.3705359 1.1467865 ] ... [-0.09511935 -1.6570686 -0.38048232 ... -0.36468646 -0.61626357 0. ] [ 0. -1.2978927 0.699161 ... 0. 0. 0. ] [-0.9818085 -1.1188774 0.04789798 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60289145 -0.10103851 -0.23100568] [ 0. 0. 0. ... -0.19015266 -0.01764922 0. ] [ 0. 0.93514395 1.6923748 ... 0.27499664 -1.8724163 -0.46043727] ... [-0.13082612 0.7887162 0.21995233 ... 1.8840632 -0.01279736 0. ] [ 0. -0.7628518 0.17356072 ... 0. 0. 0. ] [-0.27047378 0.29734367 0.36242208 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.91150016 0.5546306 -1.6458547 ] [ 0. 0. 0. ... 0.20135087 0.8437032 0. ] [ 0. -1.6481757 -0.08325905 ... -1.0528836 1.615097 -2.437884 ] ... [-0.7403982 -1.088492 2.0240498 ... 0.8224521 1.9917028 0. ] [ 0. -0.25266644 0.6523288 ... 0. 0. 0. ] [-1.7244122 0.18180111 0.25250456 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.0650035 -1.187195 0.64922464] [ 0. 0. 0. ... -0.5740793 1.3470154 0. ] [ 0. -0.2339913 0.08995053 ... -0.3999259 -0.9223885 -0.641385 ] ... [-1.851133 0.23821591 -3.4762046 ... 0.64341795 -0.19048111 0. ] [ 0. 0.18030715 -0.48022148 ... 0. 0. 0. ] [-1.974032 0.5293028 0.38858008 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08298334 -1.3809806 0.21857262] [ 0. 0. 0. ... 1.038807 -0.29044986 0. ] [ 0. -0.10657962 -1.277559 ... 1.2576073 -0.09475421 0.87905884] ... [-0.66995156 1.0364423 -1.4278904 ... -0.25690663 -2.109485 0. ] [ 0. 0.9300212 -1.5959644 ... 0. 0. 0. ] [ 1.153787 -0.60054284 -1.8457441 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5882121 -0.9602807 -1.5437613 ] [ 0. 0. 0. ... 1.6683418 -0.24111095 0. ] [ 0. -0.33079886 -0.554081 ... 0.04345322 -1.5412453 -1.3862247 ] ... [ 2.2732732 1.1334631 -0.98861784 ... -2.1091025 1.1305498 0. ] [ 0. -1.042733 -0.31259423 ... 0. 0. 0. ] [ 1.6281607 -0.8139481 -2.4847488 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4895.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.32538417 -2.4843524 1.3782264 ] [ 0. 0. 0. ... 0.3380877 0.6293452 0.9752551 ] [ 0. 0. 0. ... 0.51221746 0.74436486 0. ] ... [ 0. 0.27764162 0.7672514 ... 0. 0. 0. ] [ 0.6861277 -0.14564136 -0.6204904 ... 0. 0. 0. ] [-0.10138852 0.58376527 -0.15653801 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.16320463 -0.28000748 -0.45039785] [ 0. 0. 0. ... 0.3593508 0.62488705 -0.58488387] [ 0. 0. 0. ... 0.89897114 0.7076641 0. ] ... [ 0. -1.2541403 -1.4097061 ... 0. 0. 0. ] [ 0.39352736 -0.1696099 0.47788244 ... 0. 0. 0. ] [-0.35000816 0.5722898 0.09590184 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.918381 -1.726399 -0.65596515] [ 0. 0. 0. ... 0.37256324 -2.6302118 0.04392974] [ 0. 0. 0. ... 2.0086026 -0.29227918 0. ] ... [ 0. -0.6773165 0.1249105 ... 0. 0. 0. ] [-1.8878218 -0.9930513 -0.05003165 ... 0. 0. 0. ] [-1.64135 -0.5894439 0.01932117 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.5387864 1.9467746 0.40768763] [ 0. 0. 0. ... 0.5060506 -0.0654415 -0.70128185] [ 0. 0. 0. ... -0.36974084 1.1836241 0. ] ... [ 0. -1.04862 0.18291171 ... 0. 0. 0. ] [ 1.3865869 -0.50781196 -1.2351983 ... 0. 0. 0. ] [-1.4631431 0.36386323 -1.6506205 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.29946 -1.0698738 -0.8199306 ] [ 0. 0. 0. ... -1.2759322 0.21685122 -1.2800331 ] [ 0. 0. 0. ... -1.2571461 -1.9904687 0. ] ... [ 0. -0.92539215 -0.6292827 ... 0. 0. 0. ] [ 0.09932544 0.88288856 -0.3087336 ... 0. 0. 0. ] [-0.86149645 0.95344204 0.36603284 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.74954087 2.0629888 -1.2701608 ] [ 0. 0. 0. ... 0.3191715 0.43961808 0.687608 ] [ 0. 0. 0. ... -1.4106901 1.2316707 0. ] ... [ 0. -3.0051298 -3.2344313 ... 0. 0. 0. ] [ 1.9326895 0.19904748 0.21444881 ... 0. 0. 0. ] [-0.65174216 -1.6186124 -0.17713045 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.32538417 -2.4843524 1.3782264 ] [ 0. 0. 0. ... 0.3380877 0.6293452 0.9752551 ] [ 0. 0. 0. ... 0.51221746 0.74436486 0. ] ... [ 0. 0.27764162 0.7672514 ... 0. 0. 0. ] [ 0.6861277 -0.14564136 -0.6204904 ... 0. 0. 0. ] [-0.10138852 0.58376527 -0.15653801 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.16320463 -0.28000748 -0.45039785] [ 0. 0. 0. ... 0.3593508 0.62488705 -0.58488387] [ 0. 0. 0. ... 0.89897114 0.7076641 0. ] ... [ 0. -1.2541403 -1.4097061 ... 0. 0. 0. ] [ 0.39352736 -0.1696099 0.47788244 ... 0. 0. 0. ] [-0.35000816 0.5722898 0.09590184 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.918381 -1.726399 -0.65596515] [ 0. 0. 0. ... 0.37256324 -2.6302118 0.04392974] [ 0. 0. 0. ... 2.0086026 -0.29227918 0. ] ... [ 0. -0.6773165 0.1249105 ... 0. 0. 0. ] [-1.8878218 -0.9930513 -0.05003165 ... 0. 0. 0. ] [-1.64135 -0.5894439 0.01932117 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.5387864 1.9467746 0.40768763] [ 0. 0. 0. ... 0.5060506 -0.0654415 -0.70128185] [ 0. 0. 0. ... -0.36974084 1.1836241 0. ] ... [ 0. -1.04862 0.18291171 ... 0. 0. 0. ] [ 1.3865869 -0.50781196 -1.2351983 ... 0. 0. 0. ] [-1.4631431 0.36386323 -1.6506205 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.29946 -1.0698738 -0.8199306 ] [ 0. 0. 0. ... -1.2759322 0.21685122 -1.2800331 ] [ 0. 0. 0. ... -1.2571461 -1.9904687 0. ] ... [ 0. -0.92539215 -0.6292827 ... 0. 0. 0. ] [ 0.09932544 0.88288856 -0.3087336 ... 0. 0. 0. ] [-0.86149645 0.95344204 0.36603284 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.74954087 2.0629888 -1.2701608 ] [ 0. 0. 0. ... 0.3191715 0.43961808 0.687608 ] [ 0. 0. 0. ... -1.4106901 1.2316707 0. ] ... [ 0. -3.0051298 -3.2344313 ... 0. 0. 0. ] [ 1.9326895 0.19904748 0.21444881 ... 0. 0. 0. ] [-0.65174216 -1.6186124 -0.17713045 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:3 - kernel_size:[2, 2] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4897.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.00636096 -0.5684979 0.7094764 ] [ 0. 0. 0. ... 1.9917936 -0.8967335 0. ] [ 0. -0.24869326 -0.8144341 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.23856352 -0.36231127 0. ] [ 0. -0.7714192 0.1209847 ... 0. 0. 0. ] [ 0.5636277 0.78829765 0.03293815 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9710023 -0.4976785 0.21549152] [ 0. 0. 0. ... -1.8725424 1.511087 0. ] [ 0. 0.6098278 -2.321181 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.6263537 1.7349354 0. ] [ 0. -0.19408268 2.7676225 ... 0. 0. 0. ] [-0.7980846 1.6581168 -0.9822925 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1014812 0.4810812 -0.4894641 ] [ 0. 0. 0. ... 0.35151112 -0.2791967 0. ] [ 0. -0.5287681 0.50770473 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.599889 -0.23129904 0. ] [ 0. -0.66356975 1.697735 ... 0. 0. 0. ] [ 0.7999811 0.4303333 1.0318741 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.1354422 -0.5457797 -0.65125966] [ 0. 0. 0. ... -2.3609595 0.75391877 0. ] [ 0. -0.48282495 0.9921195 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.6576078 1.2166209 0. ] [ 0. -2.9532044 -1.0872144 ... 0. 0. 0. ] [ 0.9910582 -0.2787036 0.62586313 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.1706235 1.0160664 -0.381202 ] [ 0. 0. 0. ... 0.28055918 0.8352112 0. ] [ 0. -0.39211932 1.575465 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.1387336 0.03661685 0. ] [ 0. 3.090117 -0.280984 ... 0. 0. 0. ] [ 0.7332225 1.4632212 -0.6564924 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5191532 -0.5231061 -0.7178865 ] [ 0. 0. 0. ... 0.06256198 2.1435854 0. ] [ 0. 0.54843944 -0.16824661 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.18943775 -2.3423998 0. ] [ 0. -0.168459 0.10930232 ... 0. 0. 0. ] [ 0.0805525 0.05369484 0.65161127 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.00636096 -0.5684979 0.7094764 ] [ 0. 0. 0. ... 1.9917936 -0.8967335 0. ] [ 0. -0.24869326 -0.8144341 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.23856352 -0.36231127 0. ] [ 0. -0.7714192 0.1209847 ... 0. 0. 0. ] [ 0.5636277 0.78829765 0.03293815 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9710023 -0.4976785 0.21549152] [ 0. 0. 0. ... -1.8725424 1.511087 0. ] [ 0. 0.6098278 -2.321181 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.6263537 1.7349354 0. ] [ 0. -0.19408268 2.7676225 ... 0. 0. 0. ] [-0.7980846 1.6581168 -0.9822925 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1014812 0.4810812 -0.4894641 ] [ 0. 0. 0. ... 0.35151112 -0.2791967 0. ] [ 0. -0.5287681 0.50770473 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.599889 -0.23129904 0. ] [ 0. -0.66356975 1.697735 ... 0. 0. 0. ] [ 0.7999811 0.4303333 1.0318741 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.1354422 -0.5457797 -0.65125966] [ 0. 0. 0. ... -2.3609595 0.75391877 0. ] [ 0. -0.48282495 0.9921195 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.6576078 1.2166209 0. ] [ 0. -2.9532044 -1.0872144 ... 0. 0. 0. ] [ 0.9910582 -0.2787036 0.62586313 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.1706235 1.0160664 -0.381202 ] [ 0. 0. 0. ... 0.28055918 0.8352112 0. ] [ 0. -0.39211932 1.575465 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.1387336 0.03661685 0. ] [ 0. 3.090117 -0.280984 ... 0. 0. 0. ] [ 0.7332225 1.4632212 -0.6564924 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5191532 -0.5231061 -0.7178865 ] [ 0. 0. 0. ... 0.06256198 2.1435854 0. ] [ 0. 0.54843944 -0.16824661 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.18943775 -2.3423998 0. ] [ 0. -0.168459 0.10930232 ... 0. 0. 0. ] [ 0.0805525 0.05369484 0.65161127 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:3 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4899.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4901.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.8528109 2.1650229 0.59327143] [ 0. 0. 0. ... 0.59327143 0.7978476 -1.2751656 ] [ 0. 0. 0. ... -1.2751656 1.0128828 0. ] ... [ 0. 0.04276755 0.55414444 ... 0. 0. 0. ] [ 0.55414444 0.21123141 0.7816238 ... 0. 0. 0. ] [ 0.7816238 0.140491 -1.907948 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08630027 0.5817643 -0.5643199 ] [ 0. 0. 0. ... -0.5643199 -1.2575662 -0.6233186 ] [ 0. 0. 0. ... -0.6233186 0.42510763 0. ] ... [ 0. 2.036844 -1.1421317 ... 0. 0. 0. ] [-1.1421317 0.31064466 -1.4814978 ... 0. 0. 0. ] [-1.4814978 -1.3016567 -0.6333452 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7477448 -0.5048904 -1.9257483 ] [ 0. 0. 0. ... -1.9257483 1.620893 -1.4617783 ] [ 0. 0. 0. ... -1.4617783 -0.6842516 0. ] ... [ 0. -1.6257751 -0.34662727 ... 0. 0. 0. ] [-0.34662727 0.16864753 -0.31367457 ... 0. 0. 0. ] [-0.31367457 0.6172413 -1.2383572 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.95743436 0.3510371 0.03162887] [ 0. 0. 0. ... 0.03162887 -0.29440448 0.21956964] [ 0. 0. 0. ... 0.21956964 -1.5248544 0. ] ... [ 0. 0.9057154 0.23391755 ... 0. 0. 0. ] [ 0.23391755 -0.774624 -0.5745455 ... 0. 0. 0. ] [-0.5745455 0.9785512 -2.3690403 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.61265 0.03245492 2.3721943 ] [ 0. 0. 0. ... 2.3721943 0.40597963 0.84835434] [ 0. 0. 0. ... 0.84835434 -0.48813242 0. ] ... [ 0. 2.729338 0.12221704 ... 0. 0. 0. ] [ 0.12221704 -1.5490885 -1.042671 ... 0. 0. 0. ] [-1.042671 1.0965708 -1.1756029 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.81518394 -0.34436482 0.77291256] [ 0. 0. 0. ... 0.77291256 0.2548023 -0.15452415] [ 0. 0. 0. ... -0.15452415 0.21740389 0. ] ... [ 0. 1.6584578 -0.03783372 ... 0. 0. 0. ] [-0.03783372 -1.3676089 0.16945232 ... 0. 0. 0. ] [ 0.16945232 -0.9337081 -0.5880663 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.8528109 2.1650229 0.59327143] [ 0. 0. 0. ... 0.59327143 0.7978476 -1.2751656 ] [ 0. 0. 0. ... -1.2751656 1.0128828 0. ] ... [ 0. 0.04276755 0.55414444 ... 0. 0. 0. ] [ 0.55414444 0.21123141 0.7816238 ... 0. 0. 0. ] [ 0.7816238 0.140491 -1.907948 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.08630027 0.5817643 -0.5643199 ] [ 0. 0. 0. ... -0.5643199 -1.2575662 -0.6233186 ] [ 0. 0. 0. ... -0.6233186 0.42510763 0. ] ... [ 0. 2.036844 -1.1421317 ... 0. 0. 0. ] [-1.1421317 0.31064466 -1.4814978 ... 0. 0. 0. ] [-1.4814978 -1.3016567 -0.6333452 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7477448 -0.5048904 -1.9257483 ] [ 0. 0. 0. ... -1.9257483 1.620893 -1.4617783 ] [ 0. 0. 0. ... -1.4617783 -0.6842516 0. ] ... [ 0. -1.6257751 -0.34662727 ... 0. 0. 0. ] [-0.34662727 0.16864753 -0.31367457 ... 0. 0. 0. ] [-0.31367457 0.6172413 -1.2383572 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.95743436 0.3510371 0.03162887] [ 0. 0. 0. ... 0.03162887 -0.29440448 0.21956964] [ 0. 0. 0. ... 0.21956964 -1.5248544 0. ] ... [ 0. 0.9057154 0.23391755 ... 0. 0. 0. ] [ 0.23391755 -0.774624 -0.5745455 ... 0. 0. 0. ] [-0.5745455 0.9785512 -2.3690403 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.61265 0.03245492 2.3721943 ] [ 0. 0. 0. ... 2.3721943 0.40597963 0.84835434] [ 0. 0. 0. ... 0.84835434 -0.48813242 0. ] ... [ 0. 2.729338 0.12221704 ... 0. 0. 0. ] [ 0.12221704 -1.5490885 -1.042671 ... 0. 0. 0. ] [-1.042671 1.0965708 -1.1756029 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.81518394 -0.34436482 0.77291256] [ 0. 0. 0. ... 0.77291256 0.2548023 -0.15452415] [ 0. 0. 0. ... -0.15452415 0.21740389 0. ] ... [ 0. 1.6584578 -0.03783372 ... 0. 0. 0. ] [-0.03783372 -1.3676089 0.16945232 ... 0. 0. 0. ] [ 0.16945232 -0.9337081 -0.5880663 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4903.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.00341358 -0.5021211 0.23236592] [ 0. 0. 0. ... 0.23236592 1.7070892 0. ] [ 0. 2.5868506 -1.872054 ... -1.6602117 0.04664996 0.7247288 ] ... [-1.1434155 0.65108144 0.14904663 ... -0.64789474 1.6512737 0. ] [ 0. -1.9019927 0.5300233 ... 0. 0. 0. ] [ 0.5300233 1.111113 -1.6784087 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0226552 -0.9826192 1.5862905 ] [ 0. 0. 0. ... 1.5862905 -1.6659365 0. ] [ 0. -0.2695388 0.9282932 ... -1.6537197 0.67090493 -1.8506048 ] ... [ 0.6456519 -0.5178023 0.3844322 ... 0.53500396 1.5732808 0. ] [ 0. -1.1952492 1.8418217 ... 0. 0. 0. ] [ 1.8418217 1.7754494 -0.21281551 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.76129967 0.07770304 -1.937773 ] [ 0. 0. 0. ... -1.937773 -0.01942031 0. ] [ 0. 1.6267334 -1.1769881 ... 0.3204001 -0.41038114 -0.43758437] ... [ 0.35615996 0.4811575 -0.76375 ... -0.8195697 0.5951264 0. ] [ 0. 0.3592062 0.3711204 ... 0. 0. 0. ] [ 0.3711204 1.1162149 -1.4531025 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.4185832 0.5738607 -0.2643229 ] [ 0. 0. 0. ... -0.2643229 0.8777675 0. ] [ 0. -1.6705436 -1.9364096 ... -0.5411642 0.47974703 1.0661999 ] ... [-1.4934535 0.09370349 -1.2676163 ... -0.42062324 0.06363906 0. ] [ 0. -0.0837035 -2.72518 ... 0. 0. 0. ] [-2.72518 -2.7477362 -1.4652607 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.77693856 -0.12416076 -0.1624398 ] [ 0. 0. 0. ... -0.1624398 0.34205562 0. ] [ 0. 0.21603994 0.40226647 ... 0.14565225 -0.08324302 -0.19219218] ... [ 2.724302 -0.13466837 0.27308673 ... 0.24622466 -2.4728646 0. ] [ 0. 2.2611494 0.09867378 ... 0. 0. 0. ] [ 0.09867378 1.5716599 -0.9859066 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.7846705 -0.96698487 -0.06476786] [ 0. 0. 0. ... -0.06476786 1.0669603 0. ] [ 0. -0.43606943 0.56436783 ... 0.26166764 0.29339737 0.97334594] ... [-2.5125518 -0.8339316 -0.43219432 ... 1.518344 -0.15254967 0. ] [ 0. -0.5137227 0.07578558 ... 0. 0. 0. ] [ 0.07578558 -2.7144465 0.7494586 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.00341358 -0.5021211 0.23236592] [ 0. 0. 0. ... 0.23236592 1.7070892 0. ] [ 0. 2.5868506 -1.872054 ... -1.6602117 0.04664996 0.7247288 ] ... [-1.1434155 0.65108144 0.14904663 ... -0.64789474 1.6512737 0. ] [ 0. -1.9019927 0.5300233 ... 0. 0. 0. ] [ 0.5300233 1.111113 -1.6784087 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0226552 -0.9826192 1.5862905 ] [ 0. 0. 0. ... 1.5862905 -1.6659365 0. ] [ 0. -0.2695388 0.9282932 ... -1.6537197 0.67090493 -1.8506048 ] ... [ 0.6456519 -0.5178023 0.3844322 ... 0.53500396 1.5732808 0. ] [ 0. -1.1952492 1.8418217 ... 0. 0. 0. ] [ 1.8418217 1.7754494 -0.21281551 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.76129967 0.07770304 -1.937773 ] [ 0. 0. 0. ... -1.937773 -0.01942031 0. ] [ 0. 1.6267334 -1.1769881 ... 0.3204001 -0.41038114 -0.43758437] ... [ 0.35615996 0.4811575 -0.76375 ... -0.8195697 0.5951264 0. ] [ 0. 0.3592062 0.3711204 ... 0. 0. 0. ] [ 0.3711204 1.1162149 -1.4531025 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.4185832 0.5738607 -0.2643229 ] [ 0. 0. 0. ... -0.2643229 0.8777675 0. ] [ 0. -1.6705436 -1.9364096 ... -0.5411642 0.47974703 1.0661999 ] ... [-1.4934535 0.09370349 -1.2676163 ... -0.42062324 0.06363906 0. ] [ 0. -0.0837035 -2.72518 ... 0. 0. 0. ] [-2.72518 -2.7477362 -1.4652607 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.77693856 -0.12416076 -0.1624398 ] [ 0. 0. 0. ... -0.1624398 0.34205562 0. ] [ 0. 0.21603994 0.40226647 ... 0.14565225 -0.08324302 -0.19219218] ... [ 2.724302 -0.13466837 0.27308673 ... 0.24622466 -2.4728646 0. ] [ 0. 2.2611494 0.09867378 ... 0. 0. 0. ] [ 0.09867378 1.5716599 -0.9859066 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.7846705 -0.96698487 -0.06476786] [ 0. 0. 0. ... -0.06476786 1.0669603 0. ] [ 0. -0.43606943 0.56436783 ... 0.26166764 0.29339737 0.97334594] ... [-2.5125518 -0.8339316 -0.43219432 ... 1.518344 -0.15254967 0. ] [ 0. -0.5137227 0.07578558 ... 0. 0. 0. ] [ 0.07578558 -2.7144465 0.7494586 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4905.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 1.6624103 0.7212661 -1.916216 ] [ 0. 0. 0. ... -1.916216 -1.2632474 0.7964301 ] [ 0. 0. 0. ... 0.7964301 0.6841872 0. ] ... [ 0. -0.4775448 1.4864187 ... 0. 0. 0. ] [ 1.4864187 0.23527814 1.2108463 ... 0. 0. 0. ] [ 1.2108463 -0.61211705 -1.7351367 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3628467 -1.0900855 1.1461062 ] [ 0. 0. 0. ... 1.1461062 -2.080988 0.1816426 ] [ 0. 0. 0. ... 0.1816426 -0.06641634 0. ] ... [ 0. 0.69753784 1.1286036 ... 0. 0. 0. ] [ 1.1286036 -0.9210289 -0.6663919 ... 0. 0. 0. ] [-0.6663919 -0.9823084 -0.51634824 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.10383349 0.9409631 -0.68672144] [ 0. 0. 0. ... -0.68672144 0.00859373 0.40255156] [ 0. 0. 0. ... 0.40255156 0.8693182 0. ] ... [ 0. 1.7464868 -0.11617753 ... 0. 0. 0. ] [-0.11617753 0.97965324 -0.04807737 ... 0. 0. 0. ] [-0.04807737 -0.16597508 -0.9033915 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.53785414 -1.367047 -0.42098054] [ 0. 0. 0. ... -0.42098054 -0.94487405 -0.1743433 ] [ 0. 0. 0. ... -0.1743433 -0.11290216 0. ] ... [ 0. 0.6430975 -1.0186594 ... 0. 0. 0. ] [-1.0186594 -1.2623641 -0.7234699 ... 0. 0. 0. ] [-0.7234699 -0.02728211 1.185255 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.81961155 0.11717358 1.1964471 ] [ 0. 0. 0. ... 1.1964471 -2.3634973 0.49275148] [ 0. 0. 0. ... 0.49275148 -0.74061406 0. ] ... [ 0. 0.22190215 -0.954719 ... 0. 0. 0. ] [-0.954719 0.06912989 1.4176359 ... 0. 0. 0. ] [ 1.4176359 2.1152165 -0.53172463 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.59815574 -0.8321398 0.2501158 ] [ 0. 0. 0. ... 0.2501158 -0.6366223 0.12375165] [ 0. 0. 0. ... 0.12375165 0.12816174 0. ] ... [ 0. -1.1244295 -0.6538739 ... 0. 0. 0. ] [-0.6538739 -0.17081025 1.3721584 ... 0. 0. 0. ] [ 1.3721584 0.5890986 0.3292515 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.6624103 0.7212661 -1.916216 ] [ 0. 0. 0. ... -1.916216 -1.2632474 0.7964301 ] [ 0. 0. 0. ... 0.7964301 0.6841872 0. ] ... [ 0. -0.4775448 1.4864187 ... 0. 0. 0. ] [ 1.4864187 0.23527814 1.2108463 ... 0. 0. 0. ] [ 1.2108463 -0.61211705 -1.7351367 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3628467 -1.0900855 1.1461062 ] [ 0. 0. 0. ... 1.1461062 -2.080988 0.1816426 ] [ 0. 0. 0. ... 0.1816426 -0.06641634 0. ] ... [ 0. 0.69753784 1.1286036 ... 0. 0. 0. ] [ 1.1286036 -0.9210289 -0.6663919 ... 0. 0. 0. ] [-0.6663919 -0.9823084 -0.51634824 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.10383349 0.9409631 -0.68672144] [ 0. 0. 0. ... -0.68672144 0.00859373 0.40255156] [ 0. 0. 0. ... 0.40255156 0.8693182 0. ] ... [ 0. 1.7464868 -0.11617753 ... 0. 0. 0. ] [-0.11617753 0.97965324 -0.04807737 ... 0. 0. 0. ] [-0.04807737 -0.16597508 -0.9033915 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.53785414 -1.367047 -0.42098054] [ 0. 0. 0. ... -0.42098054 -0.94487405 -0.1743433 ] [ 0. 0. 0. ... -0.1743433 -0.11290216 0. ] ... [ 0. 0.6430975 -1.0186594 ... 0. 0. 0. ] [-1.0186594 -1.2623641 -0.7234699 ... 0. 0. 0. ] [-0.7234699 -0.02728211 1.185255 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.81961155 0.11717358 1.1964471 ] [ 0. 0. 0. ... 1.1964471 -2.3634973 0.49275148] [ 0. 0. 0. ... 0.49275148 -0.74061406 0. ] ... [ 0. 0.22190215 -0.954719 ... 0. 0. 0. ] [-0.954719 0.06912989 1.4176359 ... 0. 0. 0. ] [ 1.4176359 2.1152165 -0.53172463 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.59815574 -0.8321398 0.2501158 ] [ 0. 0. 0. ... 0.2501158 -0.6366223 0.12375165] [ 0. 0. 0. ... 0.12375165 0.12816174 0. ] ... [ 0. -1.1244295 -0.6538739 ... 0. 0. 0. ] [-0.6538739 -0.17081025 1.3721584 ... 0. 0. 0. ] [ 1.3721584 0.5890986 0.3292515 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4907.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -1.4154595 0.3351776 0.61366266] [ 0. 0. 0. ... 0.61366266 0.35971305 0. ] [ 0. 1.6722608 -1.7662102 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.12870935 -0.36510524 0. ] [ 0. 0.6113887 -0.02664976 ... 0. 0. 0. ] [-0.02664976 -0.77698684 -0.33562854 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9953195 1.4070488 1.0261096 ] [ 0. 0. 0. ... 1.0261096 0.15900744 0. ] [ 0. -0.2678659 -0.3442753 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.8084756 -1.1117109 0. ] [ 0. -0.79906666 -1.9929854 ... 0. 0. 0. ] [-1.9929854 -1.6241642 -0.14437324 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5596417 -0.50864756 1.5845304 ] [ 0. 0. 0. ... 1.5845304 0.24013455 0. ] [ 0. 0.11632372 -0.15190542 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.8909646 1.1655546 0. ] [ 0. -0.5279942 -1.2202988 ... 0. 0. 0. ] [-1.2202988 -1.6632191 -0.09858897 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1022625 0.60728395 -1.8312682 ] [ 0. 0. 0. ... -1.8312682 1.3013707 0. ] [ 0. 1.9652644 -0.12387125 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.61000997 0.2289305 0. ] [ 0. -2.6378727 -1.0075538 ... 0. 0. 0. ] [-1.0075538 2.929047 1.8503802 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.34345266 0.18985847 -0.03155541] [ 0. 0. 0. ... -0.03155541 0.5938122 0. ] [ 0. 2.4668736 0.71069235 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.74329096 1.0888395 0. ] [ 0. -0.51705617 -0.5693484 ... 0. 0. 0. ] [-0.5693484 1.250087 -0.35952672 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.20805056 -0.71608293 -1.518252 ] [ 0. 0. 0. ... -1.518252 -0.08940642 0. ] [ 0. 0.25011876 0.10207061 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.6768561 -0.43547127 0. ] [ 0. -0.05554872 -0.1715761 ... 0. 0. 0. ] [-0.1715761 1.2295815 1.0661867 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.4154595 0.3351776 0.61366266] [ 0. 0. 0. ... 0.61366266 0.35971305 0. ] [ 0. 1.6722608 -1.7662102 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.12870935 -0.36510524 0. ] [ 0. 0.6113887 -0.02664976 ... 0. 0. 0. ] [-0.02664976 -0.77698684 -0.33562854 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9953195 1.4070488 1.0261096 ] [ 0. 0. 0. ... 1.0261096 0.15900744 0. ] [ 0. -0.2678659 -0.3442753 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.8084756 -1.1117109 0. ] [ 0. -0.79906666 -1.9929854 ... 0. 0. 0. ] [-1.9929854 -1.6241642 -0.14437324 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5596417 -0.50864756 1.5845304 ] [ 0. 0. 0. ... 1.5845304 0.24013455 0. ] [ 0. 0.11632372 -0.15190542 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.8909646 1.1655546 0. ] [ 0. -0.5279942 -1.2202988 ... 0. 0. 0. ] [-1.2202988 -1.6632191 -0.09858897 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.1022625 0.60728395 -1.8312682 ] [ 0. 0. 0. ... -1.8312682 1.3013707 0. ] [ 0. 1.9652644 -0.12387125 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.61000997 0.2289305 0. ] [ 0. -2.6378727 -1.0075538 ... 0. 0. 0. ] [-1.0075538 2.929047 1.8503802 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.34345266 0.18985847 -0.03155541] [ 0. 0. 0. ... -0.03155541 0.5938122 0. ] [ 0. 2.4668736 0.71069235 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.74329096 1.0888395 0. ] [ 0. -0.51705617 -0.5693484 ... 0. 0. 0. ] [-0.5693484 1.250087 -0.35952672 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.20805056 -0.71608293 -1.518252 ] [ 0. 0. 0. ... -1.518252 -0.08940642 0. ] [ 0. 0.25011876 0.10207061 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.6768561 -0.43547127 0. ] [ 0. -0.05554872 -0.1715761 ... 0. 0. 0. ] [-0.1715761 1.2295815 1.0661867 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:1 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4909.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4911.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4913.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.39543083 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.53916794 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2215399 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.3990966 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8664573 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.46911496 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.131678 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.1185421 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.67408806 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.87373143 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.31995365 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.1137525 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.39543083 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.53916794 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2215399 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.3990966 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8664573 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.46911496 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.131678 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.1185421 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.67408806 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.87373143 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.31995365 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.1137525 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4915.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 1.6173464 0.8751681 0. ] [ 0. 0. 0. ... 0.8751681 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.16217686 ... 0. 0. 0. ] [ 0. 0.16217686 0.23010729 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.6761124 -0.03585571 0. ] [ 0. 0. 0. ... -0.03585571 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 2.1612155 ... 0. 0. 0. ] [ 0. 2.1612155 0.0069361 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.22230867 -0.33547738 0. ] [ 0. 0. 0. ... -0.33547738 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.42914623 ... 0. 0. 0. ] [ 0. 0.42914623 1.7601591 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4442217 0.02882297 0. ] [ 0. 0. 0. ... 0.02882297 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.70424247 ... 0. 0. 0. ] [ 0. -0.70424247 0.4674033 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3479866 -0.6669371 0. ] [ 0. 0. 0. ... -0.6669371 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.95072997 ... 0. 0. 0. ] [ 0. -0.95072997 1.2383728 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.59655863 -0.48938867 0. ] [ 0. 0. 0. ... -0.48938867 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.169098 ... 0. 0. 0. ] [ 0. -2.169098 0.96611506 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.6173464 0.8751681 0. ] [ 0. 0. 0. ... 0.8751681 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.16217686 ... 0. 0. 0. ] [ 0. 0.16217686 0.23010729 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.6761124 -0.03585571 0. ] [ 0. 0. 0. ... -0.03585571 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 2.1612155 ... 0. 0. 0. ] [ 0. 2.1612155 0.0069361 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.22230867 -0.33547738 0. ] [ 0. 0. 0. ... -0.33547738 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.42914623 ... 0. 0. 0. ] [ 0. 0.42914623 1.7601591 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.4442217 0.02882297 0. ] [ 0. 0. 0. ... 0.02882297 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.70424247 ... 0. 0. 0. ] [ 0. -0.70424247 0.4674033 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3479866 -0.6669371 0. ] [ 0. 0. 0. ... -0.6669371 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.95072997 ... 0. 0. 0. ] [ 0. -0.95072997 1.2383728 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.59655863 -0.48938867 0. ] [ 0. 0. 0. ... -0.48938867 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.169098 ... 0. 0. 0. ] [ 0. -2.169098 0.96611506 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4917.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4919.aten_im2col, %x.1 : Tensor): %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4921.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4640520e+00 -1.1755840e+00 9.8312736e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.8312736e-01 -1.8123903e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.6211483e-01 -8.7798774e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.7798774e-02 2.1146290e+00 6.1851490e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1721945e-01 -2.9105399e+00 -2.5170112e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.5170112e+00 -2.1407871e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.6017100e-01 1.0366577e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.0366577e+00 -9.2388070e-01 6.4700019e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.9703013e-02 -4.0675172e-01 -1.0241708e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0241708e+00 -2.0185331e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.2365615e-01 -2.6265347e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.6265347e+00 8.1300616e-01 -3.6889094e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.4202119e-04 1.3153286e+00 -1.1828961e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.1828961e+00 2.4401733e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.8561473e-01 -3.0301663e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.0301663e-01 6.9867879e-01 -1.8496320e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.3549964e+00 8.0561042e-01 -1.2957136e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2957136e-01 -1.1196421e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.1873431e+00 1.0188559e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.0188559e+00 -3.6303324e-01 -2.5359404e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.0589416e-01 2.3154095e-01 4.9966493e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.9966493e-01 -9.2223138e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.3992443e+00 -8.6611694e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.6611694e-01 3.2498971e-01 4.8893833e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.4640520e+00 -1.1755840e+00 9.8312736e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.8312736e-01 -1.8123903e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.6211483e-01 -8.7798774e-02 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.7798774e-02 2.1146290e+00 6.1851490e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1721945e-01 -2.9105399e+00 -2.5170112e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.5170112e+00 -2.1407871e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.6017100e-01 1.0366577e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.0366577e+00 -9.2388070e-01 6.4700019e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.9703013e-02 -4.0675172e-01 -1.0241708e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.0241708e+00 -2.0185331e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.2365615e-01 -2.6265347e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-2.6265347e+00 8.1300616e-01 -3.6889094e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.4202119e-04 1.3153286e+00 -1.1828961e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.1828961e+00 2.4401733e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 7.8561473e-01 -3.0301663e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-3.0301663e-01 6.9867879e-01 -1.8496320e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.3549964e+00 8.0561042e-01 -1.2957136e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.2957136e-01 -1.1196421e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 1.1873431e+00 1.0188559e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 1.0188559e+00 -3.6303324e-01 -2.5359404e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.0589416e-01 2.3154095e-01 4.9966493e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 4.9966493e-01 -9.2223138e-01 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 -1.3992443e+00 -8.6611694e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [-8.6611694e-01 3.2498971e-01 4.8893833e-01 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4923.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.40017885 -0.81946725 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.6056755 0.11445758 0. ] ... [ 0. 1.0323038 1.7793754 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.33218294 2.0814688 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8062673 -0.4462455 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.08000419 -0.15526733 0. ] ... [ 0. -1.3588326 -0.65991867 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.498211 0.41460088 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.89704627 -0.34613943 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.5714337 1.5478636 0. ] ... [ 0. 0.93603015 1.0200146 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.5731412 -1.3926092 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.28386542 -0.8999506 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.0273846 0.38493285 0. ] ... [ 0. 0.77705306 -0.05187486 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.06295905 -0.06438628 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.65875393 -0.998467 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.3494905 0.80465347 0. ] ... [ 0. -0.913092 -1.5792031 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.1883059 -0.58053035 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39928326 -0.38734543 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.05364836 0.4743899 0. ] ... [ 0. 1.2893578 -0.27834943 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5954159 1.7946389 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.40017885 -0.81946725 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.6056755 0.11445758 0. ] ... [ 0. 1.0323038 1.7793754 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.33218294 2.0814688 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8062673 -0.4462455 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.08000419 -0.15526733 0. ] ... [ 0. -1.3588326 -0.65991867 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.498211 0.41460088 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.89704627 -0.34613943 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.5714337 1.5478636 0. ] ... [ 0. 0.93603015 1.0200146 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.5731412 -1.3926092 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.28386542 -0.8999506 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.0273846 0.38493285 0. ] ... [ 0. 0.77705306 -0.05187486 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.06295905 -0.06438628 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.65875393 -0.998467 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.3494905 0.80465347 0. ] ... [ 0. -0.913092 -1.5792031 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.1883059 -0.58053035 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39928326 -0.38734543 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.05364836 0.4743899 0. ] ... [ 0. 1.2893578 -0.27834943 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5954159 1.7946389 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4925.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.87391865 0.33759832 0.62251306] [ 0. 0. 0. ... 0.62251306 -1.3021468 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5113749 0.70093256 ... 0. 0. 0. ] [ 0.70093256 -0.0599867 -0.35418993 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.01474631 0.04191769 -0.39160913] [ 0. 0. 0. ... -0.39160913 -0.05910046 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.543601 0.6315586 ... 0. 0. 0. ] [ 0.6315586 -1.0412877 -0.86636245 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.27898657 0.78133917 -0.60309464] [ 0. 0. 0. ... -0.60309464 -0.22606456 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5020429 -0.48677433 ... 0. 0. 0. ] [-0.48677433 0.2969468 -1.6942409 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6555101 1.0552266 0.33549333] [ 0. 0. 0. ... 0.33549333 -2.3103943 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.0400869 -0.9731367 ... 0. 0. 0. ] [-0.9731367 0.19586219 0.38466588 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8600798 -0.65545464 2.409591 ] [ 0. 0. 0. ... 2.409591 -0.7502963 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.26877683 0.89223284 ... 0. 0. 0. ] [ 0.89223284 -0.20813552 0.5120065 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.09855431 -0.51643836 0.13226612] [ 0. 0. 0. ... 0.13226612 -0.37753382 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.839673 -0.9891635 ... 0. 0. 0. ] [-0.9891635 0.09696251 0.71661824 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.87391865 0.33759832 0.62251306] [ 0. 0. 0. ... 0.62251306 -1.3021468 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5113749 0.70093256 ... 0. 0. 0. ] [ 0.70093256 -0.0599867 -0.35418993 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.01474631 0.04191769 -0.39160913] [ 0. 0. 0. ... -0.39160913 -0.05910046 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.543601 0.6315586 ... 0. 0. 0. ] [ 0.6315586 -1.0412877 -0.86636245 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.27898657 0.78133917 -0.60309464] [ 0. 0. 0. ... -0.60309464 -0.22606456 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.5020429 -0.48677433 ... 0. 0. 0. ] [-0.48677433 0.2969468 -1.6942409 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6555101 1.0552266 0.33549333] [ 0. 0. 0. ... 0.33549333 -2.3103943 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.0400869 -0.9731367 ... 0. 0. 0. ] [-0.9731367 0.19586219 0.38466588 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8600798 -0.65545464 2.409591 ] [ 0. 0. 0. ... 2.409591 -0.7502963 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.26877683 0.89223284 ... 0. 0. 0. ] [ 0.89223284 -0.20813552 0.5120065 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.09855431 -0.51643836 0.13226612] [ 0. 0. 0. ... 0.13226612 -0.37753382 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.839673 -0.9891635 ... 0. 0. 0. ] [-0.9891635 0.09696251 0.71661824 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4927.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.2610203 1.9417568 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.48204967 -0.00973463 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43347612 -0.44264433 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.92415726 -0.0988564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.3699169 1.0600013 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.507541 -0.85747665 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6020003 0.21693015 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.5143867 -0.17606206 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5828402 -0.48138928 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.18841101 1.3760154 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5127003 0.28828853 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.0600048 0.21397154 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.2610203 1.9417568 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.48204967 -0.00973463 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43347612 -0.44264433 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.92415726 -0.0988564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.3699169 1.0600013 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.507541 -0.85747665 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6020003 0.21693015 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.5143867 -0.17606206 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5828402 -0.48138928 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.18841101 1.3760154 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5127003 0.28828853 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.0600048 0.21397154 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:2 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4929.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4931.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.73752016 -0.2729276 0.3408066 ] [ 0. 0. 0. ... -2.09514 -0.46048242 0.2633494 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.4934924 -0.23758973 -0.74295044 ... 0. 0. 0. ] [ 0.39987493 0.39431837 0.5416722 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.258384 1.2171364 -0.3443739 ] [ 0. 0. 0. ... 0.14188957 -1.5574 0.87057984] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0811485 0.94999015 -0.05278429 ... 0. 0. 0. ] [ 0.1391199 -0.81377894 -0.08566851 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5324041 -0.39395714 0.6156149 ] [ 0. 0. 0. ... 1.7671436 0.36893487 -0.1941767 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.587415 -0.169093 0.20703828 ... 0. 0. 0. ] [-0.21306194 -1.6937635 -1.0585572 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.99603003 1.2974787 -0.17352805] [ 0. 0. 0. ... -0.03681799 0.9183677 -0.16002922] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.1743412 0.22876981 0.09911341 ... 0. 0. 0. ] [-0.41642305 -0.8192158 0.35457662 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.48889032 -0.31764743 -0.44936153] [ 0. 0. 0. ... 0.2535415 -2.8326988 1.5255771 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.00318213 0.44883254 -1.5919601 ... 0. 0. 0. ] [-0.06433496 -1.8179007 2.376864 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.46570495 0.17540847 -1.3952353 ] [ 0. 0. 0. ... -0.62894577 -0.14428769 -0.49610513] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.2979951 0.84906983 -1.4712089 ... 0. 0. 0. ] [-0.8633117 -0.3193356 0.8715603 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.73752016 -0.2729276 0.3408066 ] [ 0. 0. 0. ... -2.09514 -0.46048242 0.2633494 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.4934924 -0.23758973 -0.74295044 ... 0. 0. 0. ] [ 0.39987493 0.39431837 0.5416722 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.258384 1.2171364 -0.3443739 ] [ 0. 0. 0. ... 0.14188957 -1.5574 0.87057984] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.0811485 0.94999015 -0.05278429 ... 0. 0. 0. ] [ 0.1391199 -0.81377894 -0.08566851 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.5324041 -0.39395714 0.6156149 ] [ 0. 0. 0. ... 1.7671436 0.36893487 -0.1941767 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.587415 -0.169093 0.20703828 ... 0. 0. 0. ] [-0.21306194 -1.6937635 -1.0585572 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.99603003 1.2974787 -0.17352805] [ 0. 0. 0. ... -0.03681799 0.9183677 -0.16002922] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.1743412 0.22876981 0.09911341 ... 0. 0. 0. ] [-0.41642305 -0.8192158 0.35457662 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.48889032 -0.31764743 -0.44936153] [ 0. 0. 0. ... 0.2535415 -2.8326988 1.5255771 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.00318213 0.44883254 -1.5919601 ... 0. 0. 0. ] [-0.06433496 -1.8179007 2.376864 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.46570495 0.17540847 -1.3952353 ] [ 0. 0. 0. ... -0.62894577 -0.14428769 -0.49610513] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.2979951 0.84906983 -1.4712089 ... 0. 0. 0. ] [-0.8633117 -0.3193356 0.8715603 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4933.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.06524219 0.6009023 0.80833447] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.3957961 0.9727021 0.58989704] ... [-0.31169006 0.2681314 -1.5762227 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.7197374 -0.44494215 0.541453 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.53432095 -0.06620303 -0.40337208] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.079143 0.05958058 -0.0820929 ] ... [-0.5788784 0.2564902 -1.210726 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.9948749 0.12926783 -1.2417747 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.12968574 0.16016649 -0.40428984] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.3541773 0.23547919 0.83961457] ... [ 1.6059059 -0.15991157 0.55727583 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.03573173 0.93220556 2.3256118 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.33136454 0.4437659 -1.126466 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.6184351 0.802246 -0.11316872] ... [-1.3929588 -0.7066179 -0.02057945 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.7327144 0.41473913 -0.8730354 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5287077 -0.55939037 1.6041725 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.2997447 0.86875385 1.3097711 ] ... [-2.2902348 -0.5021999 -0.3225112 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.7310549 1.1986395 0.32095653 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16679345 -0.02696226 0.42491928] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.0549895 0.235094 -0.17632197] ... [ 1.4473805 -0.6521998 1.1876653 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.72728395 -0.6453245 1.222644 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.06524219 0.6009023 0.80833447] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.3957961 0.9727021 0.58989704] ... [-0.31169006 0.2681314 -1.5762227 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.7197374 -0.44494215 0.541453 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.53432095 -0.06620303 -0.40337208] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.079143 0.05958058 -0.0820929 ] ... [-0.5788784 0.2564902 -1.210726 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.9948749 0.12926783 -1.2417747 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.12968574 0.16016649 -0.40428984] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.3541773 0.23547919 0.83961457] ... [ 1.6059059 -0.15991157 0.55727583 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.03573173 0.93220556 2.3256118 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.33136454 0.4437659 -1.126466 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.6184351 0.802246 -0.11316872] ... [-1.3929588 -0.7066179 -0.02057945 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.7327144 0.41473913 -0.8730354 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5287077 -0.55939037 1.6041725 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.2997447 0.86875385 1.3097711 ] ... [-2.2902348 -0.5021999 -0.3225112 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.7310549 1.1986395 0.32095653 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16679345 -0.02696226 0.42491928] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.0549895 0.235094 -0.17632197] ... [ 1.4473805 -0.6521998 1.1876653 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.72728395 -0.6453245 1.222644 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4935.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.81187224 -1.0354681 1.504153 ] [ 0. 0. 0. ... 0.09493084 -0.36993024 1.3821275 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.9366537 0.5353813 -0.24599811 ... 0. 0. 0. ] [-0.3183115 -1.1230791 0.02516758 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3666981 0.2545975 0.08134596] [ 0. 0. 0. ... -1.2599999 -0.07896805 -0.07168912] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.52490944 0.4194568 -0.64940935 ... 0. 0. 0. ] [-0.5452211 0.44971383 0.14580834 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35515893 -0.11077044 1.5771054 ] [ 0. 0. 0. ... -0.85144687 -0.25431877 -0.74701315] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-1.0198158 1.5351671 -0.93088245 ... 0. 0. 0. ] [-0.6815183 0.27915147 -0.11632608 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.01494399 0.12379611 0.07278243] [ 0. 0. 0. ... 0.9972317 0.9210951 -0.16077694] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.3493993 0.14706779 -0.36468667 ... 0. 0. 0. ] [-0.38104764 -1.1367215 2.2108023 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8857249 0.0965866 -1.2769269 ] [ 0. 0. 0. ... 0.1783968 1.2695105 0.48086885] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.7707311 -1.7771068 0.6486258 ... 0. 0. 0. ] [-1.471176 0.9492024 1.0276953 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9982954 0.34709957 -1.21569 ] [ 0. 0. 0. ... -1.5175273 0.95388806 -0.318771 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.70928043 0.17885862 0.40143257 ... 0. 0. 0. ] [ 0.10541849 1.4296532 1.0256836 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.81187224 -1.0354681 1.504153 ] [ 0. 0. 0. ... 0.09493084 -0.36993024 1.3821275 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.9366537 0.5353813 -0.24599811 ... 0. 0. 0. ] [-0.3183115 -1.1230791 0.02516758 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.3666981 0.2545975 0.08134596] [ 0. 0. 0. ... -1.2599999 -0.07896805 -0.07168912] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.52490944 0.4194568 -0.64940935 ... 0. 0. 0. ] [-0.5452211 0.44971383 0.14580834 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35515893 -0.11077044 1.5771054 ] [ 0. 0. 0. ... -0.85144687 -0.25431877 -0.74701315] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-1.0198158 1.5351671 -0.93088245 ... 0. 0. 0. ] [-0.6815183 0.27915147 -0.11632608 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.01494399 0.12379611 0.07278243] [ 0. 0. 0. ... 0.9972317 0.9210951 -0.16077694] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-2.3493993 0.14706779 -0.36468667 ... 0. 0. 0. ] [-0.38104764 -1.1367215 2.2108023 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8857249 0.0965866 -1.2769269 ] [ 0. 0. 0. ... 0.1783968 1.2695105 0.48086885] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.7707311 -1.7771068 0.6486258 ... 0. 0. 0. ] [-1.471176 0.9492024 1.0276953 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9982954 0.34709957 -1.21569 ] [ 0. 0. 0. ... -1.5175273 0.95388806 -0.318771 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.70928043 0.17885862 0.40143257 ... 0. 0. 0. ] [ 0.10541849 1.4296532 1.0256836 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4937.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -1.6159035 1.0198348 0.37834513] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.40835494 -0.23514259 -0.29858884 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.27704367 -1.24961 -2.1382124 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.04235527 -0.36428607 -0.53028995 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.3823921 -0.01257316 -0.0812205 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.88900787 -0.49860078 -0.6988516 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.31813332 -0.4252883 -0.19133566] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.59336483 0.69444597 0.74758846 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.07883218 1.2602472 1.2885642 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.9866283 1.927289 -0.988775 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.90527177 1.5513605 0.43767092] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.2825894 -0.13178816 1.052727 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.6159035 1.0198348 0.37834513] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.40835494 -0.23514259 -0.29858884 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.27704367 -1.24961 -2.1382124 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.04235527 -0.36428607 -0.53028995 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.3823921 -0.01257316 -0.0812205 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.88900787 -0.49860078 -0.6988516 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.31813332 -0.4252883 -0.19133566] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.59336483 0.69444597 0.74758846 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.07883218 1.2602472 1.2885642 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.9866283 1.927289 -0.988775 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.90527177 1.5513605 0.43767092] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.2825894 -0.13178816 1.052727 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4939.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4941.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.kernel_size, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4943.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.8383833 1.0898054 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.0646462 1.4630002 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4122628 -1.6160597 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3046378 1.2784528 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.22556633 -0.20920914 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.95284784 1.4861015 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6012883 -0.43006784 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.6923978 -1.2764055 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4198623 0.02128283 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.1763325 0.47517842 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.75991946 -2.1716151 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.03742524 -1.1783429 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.8383833 1.0898054 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.0646462 1.4630002 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4122628 -1.6160597 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3046378 1.2784528 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.22556633 -0.20920914 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.95284784 1.4861015 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6012883 -0.43006784 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.6923978 -1.2764055 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.4198623 0.02128283 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.1763325 0.47517842 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.75991946 -2.1716151 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.03742524 -1.1783429 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4945.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.07672108 1.5380661 0.20397273] [ 0. 0. 0. ... 0.20397273 0.84366596 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.45856488 -0.79213667 ... 0. 0. 0. ] [-0.79213667 -0.15130132 -1.4030491 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3998642 -1.1031325 -1.5013553 ] [ 0. 0. 0. ... -1.5013553 0.34917745 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.34586146 -0.7446619 ... 0. 0. 0. ] [-0.7446619 1.1576868 0.51756024 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43021455 -0.38661015 1.177036 ] [ 0. 0. 0. ... 1.177036 -1.1733416 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.7902175 -1.8998173 ... 0. 0. 0. ] [-1.8998173 -0.40739104 1.4525834 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.78398407 -0.65748775 0.9364371 ] [ 0. 0. 0. ... 0.9364371 0.3975073 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.2764437 -0.20154694 ... 0. 0. 0. ] [-0.20154694 -0.8513195 0.9695766 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2425289 -0.25378138 -0.23431687] [ 0. 0. 0. ... -0.23431687 -0.8850989 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2974746 0.8133519 ... 0. 0. 0. ] [ 0.8133519 -0.40764552 0.39020768 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.37178162 0.27789745 1.3775345 ] [ 0. 0. 0. ... 1.3775345 1.6145736 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.25868696 -0.99526966 ... 0. 0. 0. ] [-0.99526966 0.98107386 -1.8434091 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.07672108 1.5380661 0.20397273] [ 0. 0. 0. ... 0.20397273 0.84366596 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.45856488 -0.79213667 ... 0. 0. 0. ] [-0.79213667 -0.15130132 -1.4030491 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3998642 -1.1031325 -1.5013553 ] [ 0. 0. 0. ... -1.5013553 0.34917745 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.34586146 -0.7446619 ... 0. 0. 0. ] [-0.7446619 1.1576868 0.51756024 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.43021455 -0.38661015 1.177036 ] [ 0. 0. 0. ... 1.177036 -1.1733416 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.7902175 -1.8998173 ... 0. 0. 0. ] [-1.8998173 -0.40739104 1.4525834 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.78398407 -0.65748775 0.9364371 ] [ 0. 0. 0. ... 0.9364371 0.3975073 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.2764437 -0.20154694 ... 0. 0. 0. ] [-0.20154694 -0.8513195 0.9695766 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2425289 -0.25378138 -0.23431687] [ 0. 0. 0. ... -0.23431687 -0.8850989 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2974746 0.8133519 ... 0. 0. 0. ] [ 0.8133519 -0.40764552 0.39020768 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.37178162 0.27789745 1.3775345 ] [ 0. 0. 0. ... 1.3775345 1.6145736 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.25868696 -0.99526966 ... 0. 0. 0. ] [-0.99526966 0.98107386 -1.8434091 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4947.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %self.padding, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:1 - padding:[2, 3] - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4949.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.kernel_size) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4951.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-7.9874420e-01 3.4051280e+00 -4.6768001e-01 ... 1.3023930e+00 -4.1775525e-01 5.9601980e-01] [ 3.4051280e+00 -4.6768001e-01 6.7912586e-02 ... -4.1775525e-01 5.9601980e-01 -1.4061295e+00] [-4.6768001e-01 6.7912586e-02 -5.6462437e-01 ... 5.9601980e-01 -1.4061295e+00 -1.5936713e+00] ... [ 4.8141816e-01 7.9212391e-01 4.1724280e-01 ... -8.1390351e-01 -4.1592920e-01 1.6152266e-01] [ 7.9212391e-01 4.1724280e-01 -9.9449563e-01 ... -4.1592920e-01 1.6152266e-01 -6.2512249e-01] [ 4.1724280e-01 -9.9449563e-01 -4.1902384e-01 ... 1.6152266e-01 -6.2512249e-01 -2.3491833e-01]] [[-6.7929572e-01 1.7456442e-01 4.0208563e-01 ... 1.0614710e+00 1.1406655e-01 -9.2234099e-01] [ 1.7456442e-01 4.0208563e-01 1.2498984e-01 ... 1.1406655e-01 -9.2234099e-01 -8.9669988e-02] [ 4.0208563e-01 1.2498984e-01 -1.3824443e+00 ... -9.2234099e-01 -8.9669988e-02 1.4045351e+00] ... [ 3.0988285e-01 2.9467893e-01 -8.3445124e-02 ... -6.4824927e-01 3.9469174e-01 1.3758968e-01] [ 2.9467893e-01 -8.3445124e-02 3.7355587e-01 ... 3.9469174e-01 1.3758968e-01 1.6383395e-01] [-8.3445124e-02 3.7355587e-01 -1.8176142e-02 ... 1.3758968e-01 1.6383395e-01 -1.1797111e+00]] [[ 3.7156469e-01 1.2371210e+00 1.7193491e+00 ... 6.3430202e-01 -1.1518072e+00 -3.3645201e-01] [ 1.2371210e+00 1.7193491e+00 -8.9547580e-01 ... -1.1518072e+00 -3.3645201e-01 -4.4202995e-01] [ 1.7193491e+00 -8.9547580e-01 -1.2867140e+00 ... -3.3645201e-01 -4.4202995e-01 -1.2867318e-01] ... [-8.4481396e-02 4.2960075e-01 2.4253228e-03 ... 3.3456632e-01 4.7095525e-01 -1.7869842e+00] [ 4.2960075e-01 2.4253228e-03 7.1295965e-01 ... 4.7095525e-01 -1.7869842e+00 -1.0108768e+00] [ 2.4253228e-03 7.1295965e-01 1.6528468e+00 ... -1.7869842e+00 -1.0108768e+00 -1.3036488e-01]] ... [[ 7.4536741e-01 -1.2737566e+00 -1.6646753e-01 ... -1.5981399e-01 8.2977533e-02 -9.4472492e-01] [-1.2737566e+00 -1.6646753e-01 -5.5460012e-01 ... 8.2977533e-02 -9.4472492e-01 4.1148522e-01] [-1.6646753e-01 -5.5460012e-01 -4.1727170e-01 ... -9.4472492e-01 4.1148522e-01 1.3714983e-01] ... [-8.5548475e-02 4.5108786e-01 8.4543300e-01 ... 7.6424700e-01 4.3744826e-01 -7.4853593e-01] [ 4.5108786e-01 8.4543300e-01 2.0958900e+00 ... 4.3744826e-01 -7.4853593e-01 -9.2216623e-01] [ 8.4543300e-01 2.0958900e+00 4.8765090e-01 ... -7.4853593e-01 -9.2216623e-01 1.1610657e-01]] [[-1.0071460e-01 1.8486351e+00 8.5562921e-01 ... -5.6553441e-01 2.6711924e-02 -1.3652717e+00] [ 1.8486351e+00 8.5562921e-01 -8.9328647e-01 ... 2.6711924e-02 -1.3652717e+00 2.4139197e-01] [ 8.5562921e-01 -8.9328647e-01 1.8342364e+00 ... -1.3652717e+00 2.4139197e-01 -1.4035566e-01] ... [ 7.3442852e-01 2.8933102e-01 1.0874927e+00 ... 6.8675943e-02 6.4943933e-01 -1.9553800e+00] [ 2.8933102e-01 1.0874927e+00 -2.6042771e-01 ... 6.4943933e-01 -1.9553800e+00 -2.7467287e-01] [ 1.0874927e+00 -2.6042771e-01 4.8290941e-01 ... -1.9553800e+00 -2.7467287e-01 -4.7167334e-01]] [[ 5.3984374e-01 6.6580653e-02 -2.3115246e-01 ... -5.7363594e-01 -4.6723300e-01 5.7879919e-01] [ 6.6580653e-02 -2.3115246e-01 -1.9370869e+00 ... -4.6723300e-01 5.7879919e-01 1.9181715e+00] [-2.3115246e-01 -1.9370869e+00 9.1723275e-01 ... 5.7879919e-01 1.9181715e+00 2.2295365e+00] ... [-5.6192654e-01 -8.6735815e-01 -7.3165409e-02 ... -6.0502905e-01 7.5239277e-01 -9.4001926e-03] [-8.6735815e-01 -7.3165409e-02 -5.0944912e-01 ... 7.5239277e-01 -9.4001926e-03 -5.7519448e-01] [-7.3165409e-02 -5.0944912e-01 -1.3975589e-01 ... -9.4001926e-03 -5.7519448e-01 -4.3612143e-01]]]; ov_res: [[[-7.9874420e-01 3.4051280e+00 -4.6768001e-01 ... 1.3023930e+00 -4.1775525e-01 5.9601980e-01] [ 3.4051280e+00 -4.6768001e-01 6.7912586e-02 ... -4.1775525e-01 5.9601980e-01 -1.4061295e+00] [-4.6768001e-01 6.7912586e-02 -5.6462437e-01 ... 5.9601980e-01 -1.4061295e+00 -1.5936713e+00] ... [ 4.8141816e-01 7.9212391e-01 4.1724280e-01 ... -8.1390351e-01 -4.1592920e-01 1.6152266e-01] [ 7.9212391e-01 4.1724280e-01 -9.9449563e-01 ... -4.1592920e-01 1.6152266e-01 -6.2512249e-01] [ 4.1724280e-01 -9.9449563e-01 -4.1902384e-01 ... 1.6152266e-01 -6.2512249e-01 -2.3491833e-01]] [[-6.7929572e-01 1.7456442e-01 4.0208563e-01 ... 1.0614710e+00 1.1406655e-01 -9.2234099e-01] [ 1.7456442e-01 4.0208563e-01 1.2498984e-01 ... 1.1406655e-01 -9.2234099e-01 -8.9669988e-02] [ 4.0208563e-01 1.2498984e-01 -1.3824443e+00 ... -9.2234099e-01 -8.9669988e-02 1.4045351e+00] ... [ 3.0988285e-01 2.9467893e-01 -8.3445124e-02 ... -6.4824927e-01 3.9469174e-01 1.3758968e-01] [ 2.9467893e-01 -8.3445124e-02 3.7355587e-01 ... 3.9469174e-01 1.3758968e-01 1.6383395e-01] [-8.3445124e-02 3.7355587e-01 -1.8176142e-02 ... 1.3758968e-01 1.6383395e-01 -1.1797111e+00]] [[ 3.7156469e-01 1.2371210e+00 1.7193491e+00 ... 6.3430202e-01 -1.1518072e+00 -3.3645201e-01] [ 1.2371210e+00 1.7193491e+00 -8.9547580e-01 ... -1.1518072e+00 -3.3645201e-01 -4.4202995e-01] [ 1.7193491e+00 -8.9547580e-01 -1.2867140e+00 ... -3.3645201e-01 -4.4202995e-01 -1.2867318e-01] ... [-8.4481396e-02 4.2960075e-01 2.4253228e-03 ... 3.3456632e-01 4.7095525e-01 -1.7869842e+00] [ 4.2960075e-01 2.4253228e-03 7.1295965e-01 ... 4.7095525e-01 -1.7869842e+00 -1.0108768e+00] [ 2.4253228e-03 7.1295965e-01 1.6528468e+00 ... -1.7869842e+00 -1.0108768e+00 -1.3036488e-01]] ... [[ 7.4536741e-01 -1.2737566e+00 -1.6646753e-01 ... -1.5981399e-01 8.2977533e-02 -9.4472492e-01] [-1.2737566e+00 -1.6646753e-01 -5.5460012e-01 ... 8.2977533e-02 -9.4472492e-01 4.1148522e-01] [-1.6646753e-01 -5.5460012e-01 -4.1727170e-01 ... -9.4472492e-01 4.1148522e-01 1.3714983e-01] ... [-8.5548475e-02 4.5108786e-01 8.4543300e-01 ... 7.6424700e-01 4.3744826e-01 -7.4853593e-01] [ 4.5108786e-01 8.4543300e-01 2.0958900e+00 ... 4.3744826e-01 -7.4853593e-01 -9.2216623e-01] [ 8.4543300e-01 2.0958900e+00 4.8765090e-01 ... -7.4853593e-01 -9.2216623e-01 1.1610657e-01]] [[-1.0071460e-01 1.8486351e+00 8.5562921e-01 ... -5.6553441e-01 2.6711924e-02 -1.3652717e+00] [ 1.8486351e+00 8.5562921e-01 -8.9328647e-01 ... 2.6711924e-02 -1.3652717e+00 2.4139197e-01] [ 8.5562921e-01 -8.9328647e-01 1.8342364e+00 ... -1.3652717e+00 2.4139197e-01 -1.4035566e-01] ... [ 7.3442852e-01 2.8933102e-01 1.0874927e+00 ... 6.8675943e-02 6.4943933e-01 -1.9553800e+00] [ 2.8933102e-01 1.0874927e+00 -2.6042771e-01 ... 6.4943933e-01 -1.9553800e+00 -2.7467287e-01] [ 1.0874927e+00 -2.6042771e-01 4.8290941e-01 ... -1.9553800e+00 -2.7467287e-01 -4.7167334e-01]] [[ 5.3984374e-01 6.6580653e-02 -2.3115246e-01 ... -5.7363594e-01 -4.6723300e-01 5.7879919e-01] [ 6.6580653e-02 -2.3115246e-01 -1.9370869e+00 ... -4.6723300e-01 5.7879919e-01 1.9181715e+00] [-2.3115246e-01 -1.9370869e+00 9.1723275e-01 ... 5.7879919e-01 1.9181715e+00 2.2295365e+00] ... [-5.6192654e-01 -8.6735815e-01 -7.3165409e-02 ... -6.0502905e-01 7.5239277e-01 -9.4001926e-03] [-8.6735815e-01 -7.3165409e-02 -5.0944912e-01 ... 7.5239277e-01 -9.4001926e-03 -5.7519448e-01] [-7.3165409e-02 -5.0944912e-01 -1.3975589e-01 ... -9.4001926e-03 -5.7519448e-01 -4.3612143e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4953.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-0.17102072 -1.4858087 1.3672577 ... 0.4392718 1.2774082 -2.4395776 ] [-1.4858087 1.3672577 0.9645663 ... 1.2774082 -2.4395776 1.3133773 ] [ 0.9015107 1.4086969 0.932836 ... 0.851296 0.6957388 -0.7358765 ] ... [-0.83242977 -0.11605601 -0.27986342 ... -0.24764927 -0.19872795 0.9237676 ] [-0.49340814 -0.602067 -0.12853329 ... 0.20077032 0.6448642 1.033749 ] [-0.602067 -0.12853329 -1.1445274 ... 0.6448642 1.033749 -0.8990981 ]] [[ 1.3861086 0.4602721 -2.613788 ... 0.19717401 -0.1862808 -0.89123297] [ 0.4602721 -2.613788 -2.2062244 ... -0.1862808 -0.89123297 0.3081011 ] [ 0.32090083 -1.2420402 2.531699 ... 0.21026355 2.0560827 0.28687152] ... [-0.23709615 -1.1868316 0.77646387 ... -0.24858367 1.2574445 0.8867583 ] [-0.9578384 1.0751009 -0.02991073 ... 0.01574345 -0.9989643 1.0904342 ] [ 1.0751009 -0.02991073 -0.9170688 ... -0.9989643 1.0904342 -0.67447793]] [[ 0.5413455 -0.57909536 0.01034448 ... 0.5445512 -0.5723087 -0.6825664 ] [-0.57909536 0.01034448 0.92785656 ... -0.5723087 -0.6825664 1.2328783 ] [-0.9827988 -0.93162304 -0.08847576 ... -0.37111557 -0.6555878 2.3429413 ] ... [ 0.67945415 -0.1316628 -2.9246714 ... -0.7417198 1.3491606 0.94346124] [ 0.3074734 -0.18140554 -0.15330543 ... -1.2298881 -0.23833323 0.06158933] [-0.18140554 -0.15330543 0.35204694 ... -0.23833323 0.06158933 1.3875837 ]] ... [[-0.6289409 -1.150141 -0.9946157 ... -1.9617821 1.4738125 -1.1236391 ] [-1.150141 -0.9946157 0.1767962 ... 1.4738125 -1.1236391 -0.08045597] [-1.8425571 0.7096923 0.04897838 ... 0.7158934 0.843137 0.72912276] ... [ 0.32007456 1.5087141 -0.57912165 ... 0.62333494 0.9864269 -0.978918 ] [-0.07592272 -1.0899429 -0.08293354 ... -0.50505716 2.3159173 -0.33274594] [-1.0899429 -0.08293354 -1.6060609 ... 2.3159173 -0.33274594 1.3697708 ]] [[ 0.98132724 0.34627292 -2.038976 ... 0.59709686 -1.5170192 -0.96140146] [ 0.34627292 -2.038976 -0.98696244 ... -1.5170192 -0.96140146 -0.7908393 ] [-0.2621394 0.50274485 0.8223171 ... -0.91062844 0.38275203 -1.4809593 ] ... [ 0.17601283 1.6386292 -0.30624107 ... 1.4872605 -0.83126754 0.17788926] [-0.21644285 -1.5950924 1.0283298 ... 1.6012568 0.00563313 1.9258581 ] [-1.5950924 1.0283298 0.03271774 ... 0.00563313 1.9258581 -0.42306703]] [[-0.9437143 0.8362489 -0.36679116 ... 0.1513071 1.7910544 2.431035 ] [ 0.8362489 -0.36679116 -0.11348347 ... 1.7910544 2.431035 2.1839025 ] [-1.762251 -0.2430973 0.09176814 ... 0.95595056 0.87234783 -0.1836465 ] ... [-0.2855829 0.9187188 -0.05553925 ... 2.366393 -0.4325564 -0.94198984] [-0.07085466 0.8723347 0.36612487 ... -0.32211822 0.43508607 0.11028602] [ 0.8723347 0.36612487 0.03761308 ... 0.43508607 0.11028602 1.0289316 ]]]; ov_res: [[[-0.17102072 -1.4858087 1.3672577 ... 0.4392718 1.2774082 -2.4395776 ] [-1.4858087 1.3672577 0.9645663 ... 1.2774082 -2.4395776 1.3133773 ] [ 0.9015107 1.4086969 0.932836 ... 0.851296 0.6957388 -0.7358765 ] ... [-0.83242977 -0.11605601 -0.27986342 ... -0.24764927 -0.19872795 0.9237676 ] [-0.49340814 -0.602067 -0.12853329 ... 0.20077032 0.6448642 1.033749 ] [-0.602067 -0.12853329 -1.1445274 ... 0.6448642 1.033749 -0.8990981 ]] [[ 1.3861086 0.4602721 -2.613788 ... 0.19717401 -0.1862808 -0.89123297] [ 0.4602721 -2.613788 -2.2062244 ... -0.1862808 -0.89123297 0.3081011 ] [ 0.32090083 -1.2420402 2.531699 ... 0.21026355 2.0560827 0.28687152] ... [-0.23709615 -1.1868316 0.77646387 ... -0.24858367 1.2574445 0.8867583 ] [-0.9578384 1.0751009 -0.02991073 ... 0.01574345 -0.9989643 1.0904342 ] [ 1.0751009 -0.02991073 -0.9170688 ... -0.9989643 1.0904342 -0.67447793]] [[ 0.5413455 -0.57909536 0.01034448 ... 0.5445512 -0.5723087 -0.6825664 ] [-0.57909536 0.01034448 0.92785656 ... -0.5723087 -0.6825664 1.2328783 ] [-0.9827988 -0.93162304 -0.08847576 ... -0.37111557 -0.6555878 2.3429413 ] ... [ 0.67945415 -0.1316628 -2.9246714 ... -0.7417198 1.3491606 0.94346124] [ 0.3074734 -0.18140554 -0.15330543 ... -1.2298881 -0.23833323 0.06158933] [-0.18140554 -0.15330543 0.35204694 ... -0.23833323 0.06158933 1.3875837 ]] ... [[-0.6289409 -1.150141 -0.9946157 ... -1.9617821 1.4738125 -1.1236391 ] [-1.150141 -0.9946157 0.1767962 ... 1.4738125 -1.1236391 -0.08045597] [-1.8425571 0.7096923 0.04897838 ... 0.7158934 0.843137 0.72912276] ... [ 0.32007456 1.5087141 -0.57912165 ... 0.62333494 0.9864269 -0.978918 ] [-0.07592272 -1.0899429 -0.08293354 ... -0.50505716 2.3159173 -0.33274594] [-1.0899429 -0.08293354 -1.6060609 ... 2.3159173 -0.33274594 1.3697708 ]] [[ 0.98132724 0.34627292 -2.038976 ... 0.59709686 -1.5170192 -0.96140146] [ 0.34627292 -2.038976 -0.98696244 ... -1.5170192 -0.96140146 -0.7908393 ] [-0.2621394 0.50274485 0.8223171 ... -0.91062844 0.38275203 -1.4809593 ] ... [ 0.17601283 1.6386292 -0.30624107 ... 1.4872605 -0.83126754 0.17788926] [-0.21644285 -1.5950924 1.0283298 ... 1.6012568 0.00563313 1.9258581 ] [-1.5950924 1.0283298 0.03271774 ... 0.00563313 1.9258581 -0.42306703]] [[-0.9437143 0.8362489 -0.36679116 ... 0.1513071 1.7910544 2.431035 ] [ 0.8362489 -0.36679116 -0.11348347 ... 1.7910544 2.431035 2.1839025 ] [-1.762251 -0.2430973 0.09176814 ... 0.95595056 0.87234783 -0.1836465 ] ... [-0.2855829 0.9187188 -0.05553925 ... 2.366393 -0.4325564 -0.94198984] [-0.07085466 0.8723347 0.36612487 ... -0.32211822 0.43508607 0.11028602] [ 0.8723347 0.36612487 0.03761308 ... 0.43508607 0.11028602 1.0289316 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4955.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.07212894 0.21583694 0.14699882 ... 0.96265686 -0.12166836 1.2898217 ] [ 0.21583694 0.14699882 0.6047915 ... -0.12166836 1.2898217 1.1276282 ] [ 0.14699882 0.6047915 -0.96713805 ... 1.2898217 1.1276282 -0.69190216] ... [ 1.1830126 -1.1935558 -0.19242504 ... 0.38080737 -0.2766079 -0.34371164] [-1.1935558 -0.19242504 0.4416198 ... -0.2766079 -0.34371164 0.09957639] [-0.19242504 0.4416198 -0.36594367 ... -0.34371164 0.09957639 -0.21805479]] [[-1.7140626 -0.2280401 -0.57989687 ... 0.43392923 0.14441462 2.2022464 ] [-0.2280401 -0.57989687 1.2070084 ... 0.14441462 2.2022464 0.16857134] [-0.57989687 1.2070084 -2.0017188 ... 2.2022464 0.16857134 -0.67736477] ... [-1.0684553 -0.61074543 1.7861078 ... 0.69569516 -0.9939448 0.0668291 ] [-0.61074543 1.7861078 0.00756952 ... -0.9939448 0.0668291 0.07889169] [ 1.7861078 0.00756952 -1.0464485 ... 0.0668291 0.07889169 -1.5920008 ]] [[ 0.48269418 0.37140658 -0.7634714 ... 0.48321 -0.38485488 -0.5699907 ] [ 0.37140658 -0.7634714 -0.8117466 ... -0.38485488 -0.5699907 -0.83082414] [-0.7634714 -0.8117466 -0.6402229 ... -0.5699907 -0.83082414 -1.245173 ] ... [ 2.7766106 -0.18243282 1.7452294 ... 0.60784894 -0.6588677 0.5753772 ] [-0.18243282 1.7452294 1.1994524 ... -0.6588677 0.5753772 0.5766717 ] [ 1.7452294 1.1994524 -0.6619743 ... 0.5753772 0.5766717 -0.00354976]] ... [[-0.9174989 0.03546348 -1.1656678 ... -0.9765013 2.4453042 -1.391632 ] [ 0.03546348 -1.1656678 0.36415002 ... 2.4453042 -1.391632 0.8564916 ] [-1.1656678 0.36415002 -0.725725 ... -1.391632 0.8564916 -0.7228539 ] ... [ 0.64925784 0.88777375 -1.8495451 ... 0.24366699 -2.8578796 -1.8399286 ] [ 0.88777375 -1.8495451 -1.203988 ... -2.8578796 -1.8399286 -0.95989764] [-1.8495451 -1.203988 -1.4390044 ... -1.8399286 -0.95989764 -1.907791 ]] [[ 1.1000326 -1.3118861 -0.7371555 ... 0.03117028 -1.4009789 1.2275517 ] [-1.3118861 -0.7371555 -0.49219617 ... -1.4009789 1.2275517 -0.24354845] [-0.7371555 -0.49219617 0.97694886 ... 1.2275517 -0.24354845 -0.49361575] ... [ 1.8427081 -0.58319753 -0.8803978 ... 0.33451614 -1.0067806 -1.8938638 ] [-0.58319753 -0.8803978 -0.39995807 ... -1.0067806 -1.8938638 -1.5978249 ] [-0.8803978 -0.39995807 0.35201046 ... -1.8938638 -1.5978249 0.63613266]] [[ 0.64795893 0.6894816 0.51692975 ... 0.5091713 1.3016319 0.14869264] [ 0.6894816 0.51692975 0.9549788 ... 1.3016319 0.14869264 0.16435717] [ 0.51692975 0.9549788 -0.30691183 ... 0.14869264 0.16435717 1.3737372 ] ... [-1.6170206 -2.5660727 0.9674366 ... 0.8183309 -2.278001 -0.39167866] [-2.5660727 0.9674366 1.7216414 ... -2.278001 -0.39167866 -0.48059267] [ 0.9674366 1.7216414 -0.12484074 ... -0.39167866 -0.48059267 -1.3005664 ]]]; ov_res: [[[ 0.07212894 0.21583694 0.14699882 ... 0.96265686 -0.12166836 1.2898217 ] [ 0.21583694 0.14699882 0.6047915 ... -0.12166836 1.2898217 1.1276282 ] [ 0.14699882 0.6047915 -0.96713805 ... 1.2898217 1.1276282 -0.69190216] ... [ 1.1830126 -1.1935558 -0.19242504 ... 0.38080737 -0.2766079 -0.34371164] [-1.1935558 -0.19242504 0.4416198 ... -0.2766079 -0.34371164 0.09957639] [-0.19242504 0.4416198 -0.36594367 ... -0.34371164 0.09957639 -0.21805479]] [[-1.7140626 -0.2280401 -0.57989687 ... 0.43392923 0.14441462 2.2022464 ] [-0.2280401 -0.57989687 1.2070084 ... 0.14441462 2.2022464 0.16857134] [-0.57989687 1.2070084 -2.0017188 ... 2.2022464 0.16857134 -0.67736477] ... [-1.0684553 -0.61074543 1.7861078 ... 0.69569516 -0.9939448 0.0668291 ] [-0.61074543 1.7861078 0.00756952 ... -0.9939448 0.0668291 0.07889169] [ 1.7861078 0.00756952 -1.0464485 ... 0.0668291 0.07889169 -1.5920008 ]] [[ 0.48269418 0.37140658 -0.7634714 ... 0.48321 -0.38485488 -0.5699907 ] [ 0.37140658 -0.7634714 -0.8117466 ... -0.38485488 -0.5699907 -0.83082414] [-0.7634714 -0.8117466 -0.6402229 ... -0.5699907 -0.83082414 -1.245173 ] ... [ 2.7766106 -0.18243282 1.7452294 ... 0.60784894 -0.6588677 0.5753772 ] [-0.18243282 1.7452294 1.1994524 ... -0.6588677 0.5753772 0.5766717 ] [ 1.7452294 1.1994524 -0.6619743 ... 0.5753772 0.5766717 -0.00354976]] ... [[-0.9174989 0.03546348 -1.1656678 ... -0.9765013 2.4453042 -1.391632 ] [ 0.03546348 -1.1656678 0.36415002 ... 2.4453042 -1.391632 0.8564916 ] [-1.1656678 0.36415002 -0.725725 ... -1.391632 0.8564916 -0.7228539 ] ... [ 0.64925784 0.88777375 -1.8495451 ... 0.24366699 -2.8578796 -1.8399286 ] [ 0.88777375 -1.8495451 -1.203988 ... -2.8578796 -1.8399286 -0.95989764] [-1.8495451 -1.203988 -1.4390044 ... -1.8399286 -0.95989764 -1.907791 ]] [[ 1.1000326 -1.3118861 -0.7371555 ... 0.03117028 -1.4009789 1.2275517 ] [-1.3118861 -0.7371555 -0.49219617 ... -1.4009789 1.2275517 -0.24354845] [-0.7371555 -0.49219617 0.97694886 ... 1.2275517 -0.24354845 -0.49361575] ... [ 1.8427081 -0.58319753 -0.8803978 ... 0.33451614 -1.0067806 -1.8938638 ] [-0.58319753 -0.8803978 -0.39995807 ... -1.0067806 -1.8938638 -1.5978249 ] [-0.8803978 -0.39995807 0.35201046 ... -1.8938638 -1.5978249 0.63613266]] [[ 0.64795893 0.6894816 0.51692975 ... 0.5091713 1.3016319 0.14869264] [ 0.6894816 0.51692975 0.9549788 ... 1.3016319 0.14869264 0.16435717] [ 0.51692975 0.9549788 -0.30691183 ... 0.14869264 0.16435717 1.3737372 ] ... [-1.6170206 -2.5660727 0.9674366 ... 0.8183309 -2.278001 -0.39167866] [-2.5660727 0.9674366 1.7216414 ... -2.278001 -0.39167866 -0.48059267] [ 0.9674366 1.7216414 -0.12484074 ... -0.39167866 -0.48059267 -1.3005664 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4957.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 7.4085039e-01 -9.1666055e-01 5.5458838e-01 ... 9.1255736e-01 -1.3620551e+00 -1.3532894e+00] [-9.1666055e-01 5.5458838e-01 8.7280631e-01 ... -1.3620551e+00 -1.3532894e+00 -1.7668967e+00] [-1.1040537e+00 3.2839417e-03 1.0279386e+00 ... -1.8982561e+00 -5.1728451e-01 -1.5394950e+00] ... [ 4.6048105e-01 1.1708941e-01 6.1943299e-01 ... -3.0733889e-01 -9.8107749e-01 -8.4101641e-01] [-6.5990502e-01 3.4073043e+00 -3.2146871e-01 ... -4.7794878e-01 6.8210703e-01 -6.9852376e-01] [ 3.4073043e+00 -3.2146871e-01 -6.6191755e-02 ... 6.8210703e-01 -6.9852376e-01 -1.3938470e-01]] [[-4.4756708e-01 -8.9727873e-01 -4.6195939e-01 ... -5.9351504e-01 1.5419006e+00 -5.5059016e-01] [-8.9727873e-01 -4.6195939e-01 3.1982604e-01 ... 1.5419006e+00 -5.5059016e-01 -5.5952674e-01] [ 1.5020081e-01 -3.0887078e-02 -1.3673416e+00 ... -6.2795800e-01 1.7363117e+00 8.4381986e-01] ... [ 1.1153847e-01 8.9644110e-01 -1.2658418e+00 ... 5.0207496e-01 -3.6287895e-01 7.0016995e-02] [ 3.3282992e-01 -6.0478127e-01 5.0484252e-01 ... -1.8203777e+00 -1.1756539e+00 -3.7481481e-01] [-6.0478127e-01 5.0484252e-01 3.3061522e-01 ... -1.1756539e+00 -3.7481481e-01 -9.1125762e-01]] [[-1.7070926e+00 -1.1120814e+00 2.6262522e-01 ... 5.3458977e-01 2.6916930e-01 -3.5363516e-01] [-1.1120814e+00 2.6262522e-01 1.6258473e+00 ... 2.6916930e-01 -3.5363516e-01 1.5746248e+00] [ 5.1992428e-01 7.1115631e-01 -1.8692713e+00 ... 1.3356780e-01 -1.5330943e+00 -1.4529486e+00] ... [ 1.8650120e-01 5.7779336e-01 -4.3946046e-01 ... -2.1732037e-01 -1.2949568e+00 -9.8894048e-01] [ 8.7931031e-01 1.4308075e+00 -8.1147307e-01 ... -4.7335908e-01 -8.9594311e-01 8.0674961e-02] [ 1.4308075e+00 -8.1147307e-01 4.8015767e-01 ... -8.9594311e-01 8.0674961e-02 8.6263739e-02]] ... [[-3.5133311e-01 1.7623996e+00 -1.6448389e+00 ... 4.1983375e-01 3.6997390e-01 -9.2016029e-01] [ 1.7623996e+00 -1.6448389e+00 1.6151831e-01 ... 3.6997390e-01 -9.2016029e-01 -9.2239571e-01] [ 2.8702998e-01 -9.7911507e-01 -1.4495280e+00 ... -1.8803477e-01 -7.1535671e-01 -4.5531762e-01] ... [ 2.0755050e+00 -4.1730937e-01 2.0412710e+00 ... -7.8876173e-01 -7.2038442e-01 -1.2019420e+00] [-1.8965799e-01 -4.4218203e-01 3.3578956e-01 ... -9.2191088e-01 -8.7457287e-01 5.8070958e-01] [-4.4218203e-01 3.3578956e-01 2.3947597e-01 ... -8.7457287e-01 5.8070958e-01 7.3498189e-02]] [[ 4.8332497e-01 8.2612127e-01 9.1954052e-01 ... 2.2986224e+00 -1.4580703e+00 2.7091110e-01] [ 8.2612127e-01 9.1954052e-01 2.1432879e+00 ... -1.4580703e+00 2.7091110e-01 -8.8884306e-01] [-6.0033566e-01 -1.3375460e+00 5.1189411e-01 ... -4.5123550e-01 8.1851196e-01 -1.0722148e+00] ... [ 9.3260372e-01 4.0205985e-01 1.1008847e+00 ... -1.2038165e+00 1.8288856e+00 -5.7681376e-01] [-1.4840829e+00 -6.4066440e-01 1.5638013e-01 ... 5.6785691e-01 -1.0668191e+00 4.2730701e-01] [-6.4066440e-01 1.5638013e-01 7.3248094e-01 ... -1.0668191e+00 4.2730701e-01 -1.2331424e+00]] [[ 1.2685263e+00 5.3064090e-01 -1.3024765e+00 ... 1.4146312e+00 1.2817371e+00 8.7139851e-01] [ 5.3064090e-01 -1.3024765e+00 2.0095749e+00 ... 1.2817371e+00 8.7139851e-01 5.2873737e-01] [-1.3722217e+00 2.5596714e-01 -9.5282480e-02 ... 6.6496074e-01 2.9938683e-01 2.9140282e-01] ... [ 7.5699061e-01 -1.0926505e+00 -1.8121757e-01 ... -2.0004723e+00 5.7373065e-01 2.2331285e-01] [ 9.8641711e-01 -9.2183411e-01 7.6289856e-01 ... -8.9481461e-01 -1.9850950e+00 -4.4053262e-01] [-9.2183411e-01 7.6289856e-01 5.8099043e-01 ... -1.9850950e+00 -4.4053262e-01 -1.3126029e+00]]]; ov_res: [[[ 7.4085039e-01 -9.1666055e-01 5.5458838e-01 ... 9.1255736e-01 -1.3620551e+00 -1.3532894e+00] [-9.1666055e-01 5.5458838e-01 8.7280631e-01 ... -1.3620551e+00 -1.3532894e+00 -1.7668967e+00] [-1.1040537e+00 3.2839417e-03 1.0279386e+00 ... -1.8982561e+00 -5.1728451e-01 -1.5394950e+00] ... [ 4.6048105e-01 1.1708941e-01 6.1943299e-01 ... -3.0733889e-01 -9.8107749e-01 -8.4101641e-01] [-6.5990502e-01 3.4073043e+00 -3.2146871e-01 ... -4.7794878e-01 6.8210703e-01 -6.9852376e-01] [ 3.4073043e+00 -3.2146871e-01 -6.6191755e-02 ... 6.8210703e-01 -6.9852376e-01 -1.3938470e-01]] [[-4.4756708e-01 -8.9727873e-01 -4.6195939e-01 ... -5.9351504e-01 1.5419006e+00 -5.5059016e-01] [-8.9727873e-01 -4.6195939e-01 3.1982604e-01 ... 1.5419006e+00 -5.5059016e-01 -5.5952674e-01] [ 1.5020081e-01 -3.0887078e-02 -1.3673416e+00 ... -6.2795800e-01 1.7363117e+00 8.4381986e-01] ... [ 1.1153847e-01 8.9644110e-01 -1.2658418e+00 ... 5.0207496e-01 -3.6287895e-01 7.0016995e-02] [ 3.3282992e-01 -6.0478127e-01 5.0484252e-01 ... -1.8203777e+00 -1.1756539e+00 -3.7481481e-01] [-6.0478127e-01 5.0484252e-01 3.3061522e-01 ... -1.1756539e+00 -3.7481481e-01 -9.1125762e-01]] [[-1.7070926e+00 -1.1120814e+00 2.6262522e-01 ... 5.3458977e-01 2.6916930e-01 -3.5363516e-01] [-1.1120814e+00 2.6262522e-01 1.6258473e+00 ... 2.6916930e-01 -3.5363516e-01 1.5746248e+00] [ 5.1992428e-01 7.1115631e-01 -1.8692713e+00 ... 1.3356780e-01 -1.5330943e+00 -1.4529486e+00] ... [ 1.8650120e-01 5.7779336e-01 -4.3946046e-01 ... -2.1732037e-01 -1.2949568e+00 -9.8894048e-01] [ 8.7931031e-01 1.4308075e+00 -8.1147307e-01 ... -4.7335908e-01 -8.9594311e-01 8.0674961e-02] [ 1.4308075e+00 -8.1147307e-01 4.8015767e-01 ... -8.9594311e-01 8.0674961e-02 8.6263739e-02]] ... [[-3.5133311e-01 1.7623996e+00 -1.6448389e+00 ... 4.1983375e-01 3.6997390e-01 -9.2016029e-01] [ 1.7623996e+00 -1.6448389e+00 1.6151831e-01 ... 3.6997390e-01 -9.2016029e-01 -9.2239571e-01] [ 2.8702998e-01 -9.7911507e-01 -1.4495280e+00 ... -1.8803477e-01 -7.1535671e-01 -4.5531762e-01] ... [ 2.0755050e+00 -4.1730937e-01 2.0412710e+00 ... -7.8876173e-01 -7.2038442e-01 -1.2019420e+00] [-1.8965799e-01 -4.4218203e-01 3.3578956e-01 ... -9.2191088e-01 -8.7457287e-01 5.8070958e-01] [-4.4218203e-01 3.3578956e-01 2.3947597e-01 ... -8.7457287e-01 5.8070958e-01 7.3498189e-02]] [[ 4.8332497e-01 8.2612127e-01 9.1954052e-01 ... 2.2986224e+00 -1.4580703e+00 2.7091110e-01] [ 8.2612127e-01 9.1954052e-01 2.1432879e+00 ... -1.4580703e+00 2.7091110e-01 -8.8884306e-01] [-6.0033566e-01 -1.3375460e+00 5.1189411e-01 ... -4.5123550e-01 8.1851196e-01 -1.0722148e+00] ... [ 9.3260372e-01 4.0205985e-01 1.1008847e+00 ... -1.2038165e+00 1.8288856e+00 -5.7681376e-01] [-1.4840829e+00 -6.4066440e-01 1.5638013e-01 ... 5.6785691e-01 -1.0668191e+00 4.2730701e-01] [-6.4066440e-01 1.5638013e-01 7.3248094e-01 ... -1.0668191e+00 4.2730701e-01 -1.2331424e+00]] [[ 1.2685263e+00 5.3064090e-01 -1.3024765e+00 ... 1.4146312e+00 1.2817371e+00 8.7139851e-01] [ 5.3064090e-01 -1.3024765e+00 2.0095749e+00 ... 1.2817371e+00 8.7139851e-01 5.2873737e-01] [-1.3722217e+00 2.5596714e-01 -9.5282480e-02 ... 6.6496074e-01 2.9938683e-01 2.9140282e-01] ... [ 7.5699061e-01 -1.0926505e+00 -1.8121757e-01 ... -2.0004723e+00 5.7373065e-01 2.2331285e-01] [ 9.8641711e-01 -9.2183411e-01 7.6289856e-01 ... -8.9481461e-01 -1.9850950e+00 -4.4053262e-01] [-9.2183411e-01 7.6289856e-01 5.8099043e-01 ... -1.9850950e+00 -4.4053262e-01 -1.3126029e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4959.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.30221727 -0.35105038 -0.40407178 ... -0.8730791 -0.9824828 1.5381266 ] [-1.6336061 -0.73436964 2.5307677 ... 0.5273406 0.5492827 0.5829589 ] [ 1.3121245 1.2122002 0.9836749 ... -0.9932275 1.8278993 -0.22034638]] [[-1.082224 0.81185967 0.24473414 ... -0.10226846 1.922512 0.897301 ] [ 0.16411686 -2.072486 0.3975054 ... -1.92528 -0.05089485 -0.40130195] [ 0.40868777 -0.20866679 0.7625214 ... -1.2618991 0.95990986 1.898148 ]] [[ 1.4522939 -0.81442547 1.7621547 ... -1.3874818 -1.1076398 -0.5124668 ] [-0.6575461 0.15576579 -0.04642632 ... -1.6971409 1.1667696 0.8117573 ] [ 0.0505389 -0.85387075 0.32597768 ... 1.7330853 -0.67210895 0.5446661 ]] ... [[ 0.36985585 -1.2024937 -0.21334429 ... -0.6872563 0.9453142 -1.0574334 ] [ 0.2757106 0.01455951 -0.4791507 ... 0.3344301 -0.10569782 0.90960807] [-0.88843024 -0.8908963 1.0242136 ... -0.90302026 -0.41215387 1.3685757 ]] [[-0.42437962 0.16280292 -0.13806388 ... 2.1642048 -0.70736355 -0.9618889 ] [ 0.645097 1.1069391 0.21925767 ... 2.136168 0.47149533 -0.516467 ] [ 1.5192492 0.00655388 0.00605713 ... 1.1763091 0.61181307 1.8524536 ]] [[ 0.0933329 -0.2009296 -1.454624 ... 1.1554279 1.1485733 0.6454802 ] [ 0.16015883 0.86548674 -0.6974598 ... 0.4771716 -0.03742743 -1.0887936 ] [ 1.7497492 -0.6467734 -0.7093186 ... 0.05703708 0.7183074 -0.34075192]]]; ov_res: [[[ 0.30221727 -0.35105038 -0.40407178 ... -0.8730791 -0.9824828 1.5381266 ] [-1.6336061 -0.73436964 2.5307677 ... 0.5273406 0.5492827 0.5829589 ] [ 1.3121245 1.2122002 0.9836749 ... -0.9932275 1.8278993 -0.22034638]] [[-1.082224 0.81185967 0.24473414 ... -0.10226846 1.922512 0.897301 ] [ 0.16411686 -2.072486 0.3975054 ... -1.92528 -0.05089485 -0.40130195] [ 0.40868777 -0.20866679 0.7625214 ... -1.2618991 0.95990986 1.898148 ]] [[ 1.4522939 -0.81442547 1.7621547 ... -1.3874818 -1.1076398 -0.5124668 ] [-0.6575461 0.15576579 -0.04642632 ... -1.6971409 1.1667696 0.8117573 ] [ 0.0505389 -0.85387075 0.32597768 ... 1.7330853 -0.67210895 0.5446661 ]] ... [[ 0.36985585 -1.2024937 -0.21334429 ... -0.6872563 0.9453142 -1.0574334 ] [ 0.2757106 0.01455951 -0.4791507 ... 0.3344301 -0.10569782 0.90960807] [-0.88843024 -0.8908963 1.0242136 ... -0.90302026 -0.41215387 1.3685757 ]] [[-0.42437962 0.16280292 -0.13806388 ... 2.1642048 -0.70736355 -0.9618889 ] [ 0.645097 1.1069391 0.21925767 ... 2.136168 0.47149533 -0.516467 ] [ 1.5192492 0.00655388 0.00605713 ... 1.1763091 0.61181307 1.8524536 ]] [[ 0.0933329 -0.2009296 -1.454624 ... 1.1554279 1.1485733 0.6454802 ] [ 0.16015883 0.86548674 -0.6974598 ... 0.4771716 -0.03742743 -1.0887936 ] [ 1.7497492 -0.6467734 -0.7093186 ... 0.05703708 0.7183074 -0.34075192]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4961.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.6723842 -0.9854965 -0.10559052 ... -0.10319707 -0.61240256 -1.6901659 ] [-0.10559052 -1.0656424 -0.26642317 ... -1.6901659 -0.23853867 0.4623469 ] [-0.26642317 0.690105 -0.46186343 ... 0.4623469 -0.83830404 -1.7762842 ] ... [ 0.71921843 0.04677568 -0.6274345 ... -0.2514054 -0.24081336 0.3281854 ] [-0.6274345 0.39837036 0.19129972 ... 0.3281854 1.3159769 -2.2451785 ] [ 0.19129972 -0.40293056 0.38928747 ... -2.2451785 0.4264768 0.15777393]] [[-0.38963708 -1.0575083 1.1061475 ... 0.9960759 0.35777417 0.82114345] [ 1.1061475 0.44018456 -0.540767 ... 0.82114345 -1.08214 -1.6472534 ] [-0.540767 1.1219057 1.0185136 ... -1.6472534 -0.60404 -1.493498 ] ... [ 0.08807723 -0.14489008 -0.84037054 ... -0.1344595 -1.8015596 0.05502091] [-0.84037054 -1.3348608 -0.7195495 ... 0.05502091 0.10295556 -0.5570994 ] [-0.7195495 -1.117804 0.633231 ... -0.5570994 -0.33843392 0.11421622]] [[-0.69546586 0.66166025 -0.6616935 ... 1.4830449 -0.05051669 -0.42412865] [-0.6616935 -0.17920104 -0.09758941 ... -0.42412865 0.15546834 0.58310884] [-0.09758941 0.6935229 -1.38272 ... 0.58310884 -1.1887798 -0.33868885] ... [-0.66976094 -0.37979156 -0.36272946 ... 0.84197426 1.0426478 -0.5053469 ] [-0.36272946 0.44285864 0.7777156 ... -0.5053469 -0.5936573 -0.37013802] [ 0.7777156 -0.9680913 -1.30069 ... -0.37013802 1.7132424 -1.7250531 ]] ... [[-1.1139299 0.83393365 0.27526444 ... -0.3829555 1.336584 -0.03249897] [ 0.27526444 1.078629 -0.82615376 ... -0.03249897 0.25870898 1.5918349 ] [-0.82615376 -1.4981561 0.2414436 ... 1.5918349 -0.38976395 0.89579934] ... [-0.13593708 -0.572832 -0.19826682 ... 0.59577215 2.1194632 -0.56693166] [-0.19826682 0.6671209 -0.33315164 ... -0.56693166 -0.33686668 -1.4978099 ] [-0.33315164 0.8532734 -1.3144222 ... -1.4978099 -1.357779 1.5296619 ]] [[ 0.25660646 -0.6173252 1.7211581 ... 2.4990127 -1.3738352 -0.05003061] [ 1.7211581 -0.431757 1.8901843 ... -0.05003061 0.23296073 0.06071333] [ 1.8901843 -0.16543053 0.8110955 ... 0.06071333 -0.10732684 -0.92777026] ... [ 0.22618715 0.803674 1.5031321 ... -0.46322197 0.62619203 2.6913407 ] [ 1.5031321 0.53721416 0.4241908 ... 2.6913407 0.70911705 -0.94372207] [ 0.4241908 0.05120123 0.50909674 ... -0.94372207 -0.22560522 0.3521428 ]] [[-1.2048144 -1.1596938 -0.15782239 ... -1.7703812 -0.5116326 -0.7747262 ] [-0.15782239 -0.86503595 -0.14593033 ... -0.7747262 1.7333068 -1.7866192 ] [-0.14593033 -0.11486339 -0.905393 ... -1.7866192 -0.87651986 0.42632484] ... [-0.95391744 1.9895208 0.2969904 ... 1.4678273 -1.1460682 1.2294494 ] [ 0.2969904 0.32534483 -0.62250024 ... 1.2294494 -1.5209877 1.0933036 ] [-0.62250024 0.6731703 1.034695 ... 1.0933036 1.0918268 -2.0030427 ]]]; ov_res: [[[ 0.6723842 -0.9854965 -0.10559052 ... -0.10319707 -0.61240256 -1.6901659 ] [-0.10559052 -1.0656424 -0.26642317 ... -1.6901659 -0.23853867 0.4623469 ] [-0.26642317 0.690105 -0.46186343 ... 0.4623469 -0.83830404 -1.7762842 ] ... [ 0.71921843 0.04677568 -0.6274345 ... -0.2514054 -0.24081336 0.3281854 ] [-0.6274345 0.39837036 0.19129972 ... 0.3281854 1.3159769 -2.2451785 ] [ 0.19129972 -0.40293056 0.38928747 ... -2.2451785 0.4264768 0.15777393]] [[-0.38963708 -1.0575083 1.1061475 ... 0.9960759 0.35777417 0.82114345] [ 1.1061475 0.44018456 -0.540767 ... 0.82114345 -1.08214 -1.6472534 ] [-0.540767 1.1219057 1.0185136 ... -1.6472534 -0.60404 -1.493498 ] ... [ 0.08807723 -0.14489008 -0.84037054 ... -0.1344595 -1.8015596 0.05502091] [-0.84037054 -1.3348608 -0.7195495 ... 0.05502091 0.10295556 -0.5570994 ] [-0.7195495 -1.117804 0.633231 ... -0.5570994 -0.33843392 0.11421622]] [[-0.69546586 0.66166025 -0.6616935 ... 1.4830449 -0.05051669 -0.42412865] [-0.6616935 -0.17920104 -0.09758941 ... -0.42412865 0.15546834 0.58310884] [-0.09758941 0.6935229 -1.38272 ... 0.58310884 -1.1887798 -0.33868885] ... [-0.66976094 -0.37979156 -0.36272946 ... 0.84197426 1.0426478 -0.5053469 ] [-0.36272946 0.44285864 0.7777156 ... -0.5053469 -0.5936573 -0.37013802] [ 0.7777156 -0.9680913 -1.30069 ... -0.37013802 1.7132424 -1.7250531 ]] ... [[-1.1139299 0.83393365 0.27526444 ... -0.3829555 1.336584 -0.03249897] [ 0.27526444 1.078629 -0.82615376 ... -0.03249897 0.25870898 1.5918349 ] [-0.82615376 -1.4981561 0.2414436 ... 1.5918349 -0.38976395 0.89579934] ... [-0.13593708 -0.572832 -0.19826682 ... 0.59577215 2.1194632 -0.56693166] [-0.19826682 0.6671209 -0.33315164 ... -0.56693166 -0.33686668 -1.4978099 ] [-0.33315164 0.8532734 -1.3144222 ... -1.4978099 -1.357779 1.5296619 ]] [[ 0.25660646 -0.6173252 1.7211581 ... 2.4990127 -1.3738352 -0.05003061] [ 1.7211581 -0.431757 1.8901843 ... -0.05003061 0.23296073 0.06071333] [ 1.8901843 -0.16543053 0.8110955 ... 0.06071333 -0.10732684 -0.92777026] ... [ 0.22618715 0.803674 1.5031321 ... -0.46322197 0.62619203 2.6913407 ] [ 1.5031321 0.53721416 0.4241908 ... 2.6913407 0.70911705 -0.94372207] [ 0.4241908 0.05120123 0.50909674 ... -0.94372207 -0.22560522 0.3521428 ]] [[-1.2048144 -1.1596938 -0.15782239 ... -1.7703812 -0.5116326 -0.7747262 ] [-0.15782239 -0.86503595 -0.14593033 ... -0.7747262 1.7333068 -1.7866192 ] [-0.14593033 -0.11486339 -0.905393 ... -1.7866192 -0.87651986 0.42632484] ... [-0.95391744 1.9895208 0.2969904 ... 1.4678273 -1.1460682 1.2294494 ] [ 0.2969904 0.32534483 -0.62250024 ... 1.2294494 -1.5209877 1.0933036 ] [-0.62250024 0.6731703 1.034695 ... 1.0933036 1.0918268 -2.0030427 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4963.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.9473739 -0.06414369 -0.9720383 ... -0.39230415 -1.2113413 -1.5625736 ] [-0.9720383 0.41648683 -1.0863631 ... -1.5625736 0.56076074 -0.15179978] [-1.6691123 -0.24124113 -0.96627295 ... -0.01049839 -0.0994521 0.51643556] ... [ 0.53655416 0.12263483 -0.43061176 ... -0.09565381 1.9727529 0.73390186] [ 1.173698 -2.0186894 0.9630798 ... -1.2973304 1.7404431 0.6678405 ] [ 0.9630798 -0.10145495 0.75847596 ... 0.6678405 -0.68544745 0.1451181 ]] [[ 0.10696506 -0.45178926 -0.39229026 ... 1.562153 -0.29539752 0.6389584 ] [-0.39229026 -0.45505878 -0.2975939 ... 0.6389584 -1.1253453 -0.39013466] [-0.7901176 -1.2683946 1.5141631 ... -0.59524405 -0.21237575 1.3303307 ] ... [-2.1818068 0.46753374 0.3431004 ... 0.515319 2.1174974 -1.5501511 ] [ 0.6287763 0.6569649 2.0882108 ... -2.6759648 0.10918107 -1.0876783 ] [ 2.0882108 -1.6220148 0.24608447 ... -1.0876783 0.15446563 0.41285107]] [[ 1.5417582 1.75737 -3.0070953 ... 1.0556866 2.368981 -0.6985008 ] [-3.0070953 1.0720166 0.45616764 ... -0.6985008 0.1867082 -1.4465518 ] [ 0.09099521 0.33846486 0.12309623 ... -0.20034173 -1.109428 -0.6796337 ] ... [ 0.81373495 -0.41456914 -2.2304006 ... 0.30459878 -0.015598 0.30114782] [-0.92620057 -2.3941066 2.1045313 ... 0.87610173 1.0969323 -0.48355138] [ 2.1045313 -0.32917556 -0.04754185 ... -0.48355138 0.00518768 0.24098025]] ... [[ 0.3983058 -0.36226353 2.5241554 ... -0.13491194 -0.00325259 0.53247565] [ 2.5241554 0.2936862 -0.35671294 ... 0.53247565 0.20427568 0.17102589] [-2.8586352 -0.39399722 1.4952917 ... -1.5798452 0.20366046 -0.48282477] ... [ 0.50144273 -0.24419397 -0.41718832 ... 0.10422101 -1.5846092 1.4153572 ] [ 0.32482353 -0.11520638 0.4972015 ... -0.6443822 -0.74069005 2.781402 ] [ 0.4972015 0.5366649 -0.69545907 ... 2.781402 0.29554537 -0.7652479 ]] [[-1.0739993 -1.3327042 -1.4280794 ... -0.78335404 -2.0794957 0.5417694 ] [-1.4280794 0.7443849 0.4091878 ... 0.5417694 0.87559384 1.2630342 ] [-0.50540835 -2.4078379 1.3524967 ... -0.43922538 0.06196406 1.104222 ] ... [-1.886541 0.02100582 0.23910025 ... 0.02890321 0.6755948 -1.1379826 ] [-0.59089756 -1.908764 0.4422642 ... -2.428964 1.0147299 -1.0462269 ] [ 0.4422642 -2.1044834 0.6917412 ... -1.0462269 -0.06937607 -0.49915475]] [[ 1.8506058 0.24246961 -1.1521258 ... -0.24033517 0.46007785 -1.2489132 ] [-1.1521258 -1.2123201 -0.43262365 ... -1.2489132 0.16338699 -0.3414641 ] [ 1.65474 0.5129636 -0.18689449 ... 0.9822014 -0.45707482 -0.9874472 ] ... [-1.4883271 0.9166501 -0.43493256 ... 0.60494167 0.56430995 -0.8470431 ] [-2.5879314 0.1546385 -0.5272203 ... 0.177749 0.47096363 0.39329475] [-0.5272203 -0.6349179 0.5247071 ... 0.39329475 0.17874238 0.10244427]]]; ov_res: [[[ 0.9473739 -0.06414369 -0.9720383 ... -0.39230415 -1.2113413 -1.5625736 ] [-0.9720383 0.41648683 -1.0863631 ... -1.5625736 0.56076074 -0.15179978] [-1.6691123 -0.24124113 -0.96627295 ... -0.01049839 -0.0994521 0.51643556] ... [ 0.53655416 0.12263483 -0.43061176 ... -0.09565381 1.9727529 0.73390186] [ 1.173698 -2.0186894 0.9630798 ... -1.2973304 1.7404431 0.6678405 ] [ 0.9630798 -0.10145495 0.75847596 ... 0.6678405 -0.68544745 0.1451181 ]] [[ 0.10696506 -0.45178926 -0.39229026 ... 1.562153 -0.29539752 0.6389584 ] [-0.39229026 -0.45505878 -0.2975939 ... 0.6389584 -1.1253453 -0.39013466] [-0.7901176 -1.2683946 1.5141631 ... -0.59524405 -0.21237575 1.3303307 ] ... [-2.1818068 0.46753374 0.3431004 ... 0.515319 2.1174974 -1.5501511 ] [ 0.6287763 0.6569649 2.0882108 ... -2.6759648 0.10918107 -1.0876783 ] [ 2.0882108 -1.6220148 0.24608447 ... -1.0876783 0.15446563 0.41285107]] [[ 1.5417582 1.75737 -3.0070953 ... 1.0556866 2.368981 -0.6985008 ] [-3.0070953 1.0720166 0.45616764 ... -0.6985008 0.1867082 -1.4465518 ] [ 0.09099521 0.33846486 0.12309623 ... -0.20034173 -1.109428 -0.6796337 ] ... [ 0.81373495 -0.41456914 -2.2304006 ... 0.30459878 -0.015598 0.30114782] [-0.92620057 -2.3941066 2.1045313 ... 0.87610173 1.0969323 -0.48355138] [ 2.1045313 -0.32917556 -0.04754185 ... -0.48355138 0.00518768 0.24098025]] ... [[ 0.3983058 -0.36226353 2.5241554 ... -0.13491194 -0.00325259 0.53247565] [ 2.5241554 0.2936862 -0.35671294 ... 0.53247565 0.20427568 0.17102589] [-2.8586352 -0.39399722 1.4952917 ... -1.5798452 0.20366046 -0.48282477] ... [ 0.50144273 -0.24419397 -0.41718832 ... 0.10422101 -1.5846092 1.4153572 ] [ 0.32482353 -0.11520638 0.4972015 ... -0.6443822 -0.74069005 2.781402 ] [ 0.4972015 0.5366649 -0.69545907 ... 2.781402 0.29554537 -0.7652479 ]] [[-1.0739993 -1.3327042 -1.4280794 ... -0.78335404 -2.0794957 0.5417694 ] [-1.4280794 0.7443849 0.4091878 ... 0.5417694 0.87559384 1.2630342 ] [-0.50540835 -2.4078379 1.3524967 ... -0.43922538 0.06196406 1.104222 ] ... [-1.886541 0.02100582 0.23910025 ... 0.02890321 0.6755948 -1.1379826 ] [-0.59089756 -1.908764 0.4422642 ... -2.428964 1.0147299 -1.0462269 ] [ 0.4422642 -2.1044834 0.6917412 ... -1.0462269 -0.06937607 -0.49915475]] [[ 1.8506058 0.24246961 -1.1521258 ... -0.24033517 0.46007785 -1.2489132 ] [-1.1521258 -1.2123201 -0.43262365 ... -1.2489132 0.16338699 -0.3414641 ] [ 1.65474 0.5129636 -0.18689449 ... 0.9822014 -0.45707482 -0.9874472 ] ... [-1.4883271 0.9166501 -0.43493256 ... 0.60494167 0.56430995 -0.8470431 ] [-2.5879314 0.1546385 -0.5272203 ... 0.177749 0.47096363 0.39329475] [-0.5272203 -0.6349179 0.5247071 ... 0.39329475 0.17874238 0.10244427]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4965.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.34233657 -1.7116073 -1.5230821 ... 0.22929697 -0.2050462 -0.3191237 ] [-1.5230821 -0.15596485 -0.11639588 ... -0.3191237 1.0078433 1.283009 ] [-0.11639588 -0.08660305 0.9198345 ... 1.283009 0.10239774 0.30285245] ... [-0.708239 0.18077196 1.0872232 ... 0.04467303 -0.22691882 -0.08823843] [ 1.0872232 -1.6470542 -1.9418707 ... -0.08823843 0.55009025 -0.14409202] [-1.9418707 0.20547974 -1.1145543 ... -0.14409202 -1.1611527 0.88557523]] [[ 1.7616978 -0.5727129 -0.18714601 ... 1.0332603 -1.0234406 0.6093478 ] [-0.18714601 1.2161335 1.2951994 ... 0.6093478 -0.18875456 1.2239231 ] [ 1.2951994 -1.3587015 0.2951637 ... 1.2239231 0.11110409 2.158077 ] ... [-0.06755953 1.34386 -0.11919714 ... -0.12471816 1.9099108 1.6954869 ] [-0.11919714 2.7464938 0.12982188 ... 1.6954869 -1.4064046 0.96136355] [ 0.12982188 -1.0263126 0.7177826 ... 0.96136355 -0.90888923 1.140624 ]] [[-0.47068894 0.23628184 -1.130879 ... 0.25806433 0.9326492 0.2899263 ] [-1.130879 -0.35334763 -0.5242572 ... 0.2899263 -0.4361475 -0.8464905 ] [-0.5242572 -0.20314904 -1.6990129 ... -0.8464905 1.0161933 1.4434074 ] ... [-0.1705761 0.3623396 1.0155947 ... -0.86022174 -0.7413915 0.9326427 ] [ 1.0155947 -0.88678944 0.07796621 ... 0.9326427 -0.965245 -0.00709289] [ 0.07796621 1.2832623 -1.2297924 ... -0.00709289 0.42192602 0.7473718 ]] ... [[-0.590058 -0.13499163 0.48579866 ... 0.04918304 0.66864467 -0.52479684] [ 0.48579866 1.2733691 1.8418626 ... -0.52479684 -0.9518259 -0.9122601 ] [ 1.8418626 -0.57427686 -0.35595125 ... -0.9122601 -0.9101079 1.6728946 ] ... [-0.69601846 1.811838 0.9348581 ... 2.2411628 -0.12292171 -0.8979624 ] [ 0.9348581 -0.3007331 -0.7696403 ... -0.8979624 0.20824282 -1.6233982 ] [-0.7696403 -2.2624307 0.03022962 ... -1.6233982 1.802828 -0.18994412]] [[-0.18471116 -1.6215196 -0.63911295 ... -0.5286356 0.29849908 -1.5147657 ] [-0.63911295 0.10944694 -0.75663775 ... -1.5147657 -0.06308924 -0.4091529 ] [-0.75663775 -1.772572 -1.1362604 ... -0.4091529 -0.5539459 -1.2730349 ] ... [ 2.0621452 0.2887894 1.2724665 ... -1.7664832 -0.01587737 -0.29978234] [ 1.2724665 -0.20030211 0.3276311 ... -0.29978234 -1.1194941 0.34585384] [ 0.3276311 0.15231279 -0.39225823 ... 0.34585384 -1.9829489 0.6523388 ]] [[ 0.57100505 0.5957695 -0.5810845 ... 0.5414386 -0.80028147 -0.9955882 ] [-0.5810845 0.17101464 0.03355015 ... -0.9955882 -1.3625686 -0.13918073] [ 0.03355015 1.1986287 -0.9968473 ... -0.13918073 0.7676506 -2.2854924 ] ... [ 0.69896775 -0.05518048 0.38560507 ... 0.9050934 -1.1560807 -1.0922865 ] [ 0.38560507 -0.6290401 -1.6124071 ... -1.0922865 0.54757583 0.82186013] [-1.6124071 0.7343024 2.8767855 ... 0.82186013 0.81755924 -0.3438481 ]]]; ov_res: [[[ 0.34233657 -1.7116073 -1.5230821 ... 0.22929697 -0.2050462 -0.3191237 ] [-1.5230821 -0.15596485 -0.11639588 ... -0.3191237 1.0078433 1.283009 ] [-0.11639588 -0.08660305 0.9198345 ... 1.283009 0.10239774 0.30285245] ... [-0.708239 0.18077196 1.0872232 ... 0.04467303 -0.22691882 -0.08823843] [ 1.0872232 -1.6470542 -1.9418707 ... -0.08823843 0.55009025 -0.14409202] [-1.9418707 0.20547974 -1.1145543 ... -0.14409202 -1.1611527 0.88557523]] [[ 1.7616978 -0.5727129 -0.18714601 ... 1.0332603 -1.0234406 0.6093478 ] [-0.18714601 1.2161335 1.2951994 ... 0.6093478 -0.18875456 1.2239231 ] [ 1.2951994 -1.3587015 0.2951637 ... 1.2239231 0.11110409 2.158077 ] ... [-0.06755953 1.34386 -0.11919714 ... -0.12471816 1.9099108 1.6954869 ] [-0.11919714 2.7464938 0.12982188 ... 1.6954869 -1.4064046 0.96136355] [ 0.12982188 -1.0263126 0.7177826 ... 0.96136355 -0.90888923 1.140624 ]] [[-0.47068894 0.23628184 -1.130879 ... 0.25806433 0.9326492 0.2899263 ] [-1.130879 -0.35334763 -0.5242572 ... 0.2899263 -0.4361475 -0.8464905 ] [-0.5242572 -0.20314904 -1.6990129 ... -0.8464905 1.0161933 1.4434074 ] ... [-0.1705761 0.3623396 1.0155947 ... -0.86022174 -0.7413915 0.9326427 ] [ 1.0155947 -0.88678944 0.07796621 ... 0.9326427 -0.965245 -0.00709289] [ 0.07796621 1.2832623 -1.2297924 ... -0.00709289 0.42192602 0.7473718 ]] ... [[-0.590058 -0.13499163 0.48579866 ... 0.04918304 0.66864467 -0.52479684] [ 0.48579866 1.2733691 1.8418626 ... -0.52479684 -0.9518259 -0.9122601 ] [ 1.8418626 -0.57427686 -0.35595125 ... -0.9122601 -0.9101079 1.6728946 ] ... [-0.69601846 1.811838 0.9348581 ... 2.2411628 -0.12292171 -0.8979624 ] [ 0.9348581 -0.3007331 -0.7696403 ... -0.8979624 0.20824282 -1.6233982 ] [-0.7696403 -2.2624307 0.03022962 ... -1.6233982 1.802828 -0.18994412]] [[-0.18471116 -1.6215196 -0.63911295 ... -0.5286356 0.29849908 -1.5147657 ] [-0.63911295 0.10944694 -0.75663775 ... -1.5147657 -0.06308924 -0.4091529 ] [-0.75663775 -1.772572 -1.1362604 ... -0.4091529 -0.5539459 -1.2730349 ] ... [ 2.0621452 0.2887894 1.2724665 ... -1.7664832 -0.01587737 -0.29978234] [ 1.2724665 -0.20030211 0.3276311 ... -0.29978234 -1.1194941 0.34585384] [ 0.3276311 0.15231279 -0.39225823 ... 0.34585384 -1.9829489 0.6523388 ]] [[ 0.57100505 0.5957695 -0.5810845 ... 0.5414386 -0.80028147 -0.9955882 ] [-0.5810845 0.17101464 0.03355015 ... -0.9955882 -1.3625686 -0.13918073] [ 0.03355015 1.1986287 -0.9968473 ... -0.13918073 0.7676506 -2.2854924 ] ... [ 0.69896775 -0.05518048 0.38560507 ... 0.9050934 -1.1560807 -1.0922865 ] [ 0.38560507 -0.6290401 -1.6124071 ... -1.0922865 0.54757583 0.82186013] [-1.6124071 0.7343024 2.8767855 ... 0.82186013 0.81755924 -0.3438481 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4967.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.07246574 -0.7092706 -0.69897896 ... 0.0258447 -1.3360996 0.27158922] [-0.69897896 0.73338425 -1.0282727 ... 0.27158922 0.931948 0.6594108 ] [-0.87370723 0.79564804 -0.24085884 ... 0.6975144 0.7933135 0.0808959 ] ... [-1.2362652 0.72926927 -0.5104067 ... 0.17489012 -2.5240488 1.2976278 ] [-0.70754945 -0.7446181 1.653214 ... 1.5300311 -1.4322075 0.84873074] [ 1.653214 0.003 0.03103115 ... 0.84873074 1.1656181 -0.1551995 ]] [[ 1.3563637 0.12020236 0.91812193 ... -1.2085283 0.87025106 -0.48031145] [ 0.91812193 -1.7764293 0.5663214 ... -0.48031145 1.880692 0.3125099 ] [-0.18119858 -0.6846733 0.41379407 ... 1.1512276 -2.3705018 0.43506163] ... [-0.38538733 -1.3067563 -0.5393294 ... 0.0113222 -1.1007617 -0.44443822] [ 1.1417519 0.03159084 -1.0507418 ... 0.72489774 -0.02390707 -2.1863146 ] [-1.0507418 -0.7008569 0.70811796 ... -2.1863146 -0.6776968 -0.41613895]] [[-0.13936614 0.16436549 0.8633178 ... -1.0334946 -0.1480869 1.5943226 ] [ 0.8633178 -1.3783265 -0.6789572 ... 1.5943226 0.46066132 -1.4464202 ] [ 0.94935507 -0.83539623 1.395502 ... -0.29005688 0.68897504 -0.21615022] ... [ 0.6065119 1.880884 -0.96801484 ... 0.7378263 1.4280736 1.6550643 ] [-1.2877686 -1.5726777 2.1268353 ... -1.1432298 -1.0767058 -0.05900037] [ 2.1268353 0.20614196 1.2593865 ... -0.05900037 0.896885 1.198901 ]] ... [[ 0.2938037 -0.25406533 0.10255814 ... 0.34958634 0.23087105 0.3460028 ] [ 0.10255814 -1.1995845 -1.351807 ... 0.3460028 0.5415654 1.1142215 ] [ 0.17619203 1.54813 -0.8810198 ... 0.28088525 -2.2551026 0.74126446] ... [-1.2960871 -0.9774543 -0.5879752 ... 0.06247829 1.2286997 -0.85353136] [-1.1075947 0.6940425 0.48061895 ... 0.18550669 -1.5968993 -0.42848796] [ 0.48061895 0.2537706 -0.19807875 ... -0.42848796 -1.6904231 0.5854699 ]] [[ 0.93312794 -0.19421649 1.1972115 ... 0.37070218 1.7743862 -1.6982065 ] [ 1.1972115 -0.48660296 0.13454919 ... -1.6982065 2.371608 -0.82425636] [-0.67300224 -1.1546668 -0.09577114 ... -0.16921592 1.6704699 -0.801441 ] ... [-0.18487665 -0.36264387 1.4575123 ... 1.0195866 0.46317762 -1.3541877 ] [ 1.6967571 -1.5400462 -0.82376015 ... 0.1338474 0.8831384 0.12845646] [-0.82376015 -0.09249491 -0.66215444 ... 0.12845646 0.9595863 -0.4929975 ]] [[-0.12322783 -0.06189476 -0.952084 ... 0.8426278 0.93259245 -2.432827 ] [-0.952084 -0.46218607 0.18108755 ... -2.432827 -1.0339172 0.6802322 ] [ 0.33120826 -1.2621851 -1.0596392 ... 1.2961565 1.4877031 0.98770654] ... [-0.22071497 -0.47092396 0.7273272 ... -0.10427538 1.2399606 0.46394077] [-1.2071787 0.15708607 -0.9058556 ... 0.0701296 1.1497471 0.21687251] [-0.9058556 -0.69828784 -0.7582294 ... 0.21687251 -1.1034355 0.23164155]]]; ov_res: [[[-0.07246574 -0.7092706 -0.69897896 ... 0.0258447 -1.3360996 0.27158922] [-0.69897896 0.73338425 -1.0282727 ... 0.27158922 0.931948 0.6594108 ] [-0.87370723 0.79564804 -0.24085884 ... 0.6975144 0.7933135 0.0808959 ] ... [-1.2362652 0.72926927 -0.5104067 ... 0.17489012 -2.5240488 1.2976278 ] [-0.70754945 -0.7446181 1.653214 ... 1.5300311 -1.4322075 0.84873074] [ 1.653214 0.003 0.03103115 ... 0.84873074 1.1656181 -0.1551995 ]] [[ 1.3563637 0.12020236 0.91812193 ... -1.2085283 0.87025106 -0.48031145] [ 0.91812193 -1.7764293 0.5663214 ... -0.48031145 1.880692 0.3125099 ] [-0.18119858 -0.6846733 0.41379407 ... 1.1512276 -2.3705018 0.43506163] ... [-0.38538733 -1.3067563 -0.5393294 ... 0.0113222 -1.1007617 -0.44443822] [ 1.1417519 0.03159084 -1.0507418 ... 0.72489774 -0.02390707 -2.1863146 ] [-1.0507418 -0.7008569 0.70811796 ... -2.1863146 -0.6776968 -0.41613895]] [[-0.13936614 0.16436549 0.8633178 ... -1.0334946 -0.1480869 1.5943226 ] [ 0.8633178 -1.3783265 -0.6789572 ... 1.5943226 0.46066132 -1.4464202 ] [ 0.94935507 -0.83539623 1.395502 ... -0.29005688 0.68897504 -0.21615022] ... [ 0.6065119 1.880884 -0.96801484 ... 0.7378263 1.4280736 1.6550643 ] [-1.2877686 -1.5726777 2.1268353 ... -1.1432298 -1.0767058 -0.05900037] [ 2.1268353 0.20614196 1.2593865 ... -0.05900037 0.896885 1.198901 ]] ... [[ 0.2938037 -0.25406533 0.10255814 ... 0.34958634 0.23087105 0.3460028 ] [ 0.10255814 -1.1995845 -1.351807 ... 0.3460028 0.5415654 1.1142215 ] [ 0.17619203 1.54813 -0.8810198 ... 0.28088525 -2.2551026 0.74126446] ... [-1.2960871 -0.9774543 -0.5879752 ... 0.06247829 1.2286997 -0.85353136] [-1.1075947 0.6940425 0.48061895 ... 0.18550669 -1.5968993 -0.42848796] [ 0.48061895 0.2537706 -0.19807875 ... -0.42848796 -1.6904231 0.5854699 ]] [[ 0.93312794 -0.19421649 1.1972115 ... 0.37070218 1.7743862 -1.6982065 ] [ 1.1972115 -0.48660296 0.13454919 ... -1.6982065 2.371608 -0.82425636] [-0.67300224 -1.1546668 -0.09577114 ... -0.16921592 1.6704699 -0.801441 ] ... [-0.18487665 -0.36264387 1.4575123 ... 1.0195866 0.46317762 -1.3541877 ] [ 1.6967571 -1.5400462 -0.82376015 ... 0.1338474 0.8831384 0.12845646] [-0.82376015 -0.09249491 -0.66215444 ... 0.12845646 0.9595863 -0.4929975 ]] [[-0.12322783 -0.06189476 -0.952084 ... 0.8426278 0.93259245 -2.432827 ] [-0.952084 -0.46218607 0.18108755 ... -2.432827 -1.0339172 0.6802322 ] [ 0.33120826 -1.2621851 -1.0596392 ... 1.2961565 1.4877031 0.98770654] ... [-0.22071497 -0.47092396 0.7273272 ... -0.10427538 1.2399606 0.46394077] [-1.2071787 0.15708607 -0.9058556 ... 0.0701296 1.1497471 0.21687251] [-0.9058556 -0.69828784 -0.7582294 ... 0.21687251 -1.1034355 0.23164155]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4969.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-1.0902418 1.7898567 -2.490854 ... 0.97082424 -0.4785176 1.4664131 ] [ 1.9367353 -1.3418605 -0.9645178 ... 2.1903555 1.1444589 -0.33524692] [ 1.4582604 0.680369 1.4024715 ... -1.1557126 1.0644403 0.85217345]] [[ 0.21230218 0.05831675 1.2798167 ... -1.3621309 -0.04110005 -0.23833723] [-2.0837061 -0.6289747 0.0078813 ... -0.06189821 0.4830987 1.4425081 ] [ 1.2289189 0.12175772 0.15330318 ... -1.3027369 0.5164379 -0.7632142 ]] [[ 0.40806314 1.5758716 0.17342246 ... 0.5341164 0.05305786 0.97038174] [-1.0237317 -0.8922228 0.8153246 ... 1.516181 -0.6241423 -0.7132526 ] [-1.3973253 -0.6173398 0.5602629 ... 0.5846084 0.05913257 1.2473867 ]] ... [[ 0.8045611 -1.5001601 -0.9556555 ... -0.84737355 0.5588003 0.6420415 ] [-0.42083994 0.21288489 0.19506276 ... -0.21700472 -0.18258563 -0.63296205] [ 1.0784953 0.18022269 0.43334028 ... 0.40083867 0.10065556 -0.04787018]] [[ 1.1085253 -0.16090634 0.38433275 ... 1.1471897 0.51021194 0.4629827 ] [ 0.31634042 0.09329771 -1.0502286 ... 1.4664606 0.05736823 1.0349437 ] [ 1.7346389 0.17741767 -1.8519844 ... -0.28750935 -1.1326326 -0.9941032 ]] [[ 2.080943 -0.42394018 0.35382336 ... -0.64696616 0.1821415 0.52943116] [ 0.18292919 1.9341733 1.7832348 ... -0.8924463 1.727512 1.4276546 ] [ 1.2438724 -0.5079882 -0.2800052 ... 1.0213569 -0.5015091 -0.5060222 ]]]; ov_res: [[[-1.0902418 1.7898567 -2.490854 ... 0.97082424 -0.4785176 1.4664131 ] [ 1.9367353 -1.3418605 -0.9645178 ... 2.1903555 1.1444589 -0.33524692] [ 1.4582604 0.680369 1.4024715 ... -1.1557126 1.0644403 0.85217345]] [[ 0.21230218 0.05831675 1.2798167 ... -1.3621309 -0.04110005 -0.23833723] [-2.0837061 -0.6289747 0.0078813 ... -0.06189821 0.4830987 1.4425081 ] [ 1.2289189 0.12175772 0.15330318 ... -1.3027369 0.5164379 -0.7632142 ]] [[ 0.40806314 1.5758716 0.17342246 ... 0.5341164 0.05305786 0.97038174] [-1.0237317 -0.8922228 0.8153246 ... 1.516181 -0.6241423 -0.7132526 ] [-1.3973253 -0.6173398 0.5602629 ... 0.5846084 0.05913257 1.2473867 ]] ... [[ 0.8045611 -1.5001601 -0.9556555 ... -0.84737355 0.5588003 0.6420415 ] [-0.42083994 0.21288489 0.19506276 ... -0.21700472 -0.18258563 -0.63296205] [ 1.0784953 0.18022269 0.43334028 ... 0.40083867 0.10065556 -0.04787018]] [[ 1.1085253 -0.16090634 0.38433275 ... 1.1471897 0.51021194 0.4629827 ] [ 0.31634042 0.09329771 -1.0502286 ... 1.4664606 0.05736823 1.0349437 ] [ 1.7346389 0.17741767 -1.8519844 ... -0.28750935 -1.1326326 -0.9941032 ]] [[ 2.080943 -0.42394018 0.35382336 ... -0.64696616 0.1821415 0.52943116] [ 0.18292919 1.9341733 1.7832348 ... -0.8924463 1.727512 1.4276546 ] [ 1.2438724 -0.5079882 -0.2800052 ... 1.0213569 -0.5015091 -0.5060222 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4971.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.07734871 -1.3979814 -0.44674546 ... 0.1134345 -1.2921444 0.20656681] [ 0.7140184 0.74113196 -1.3184355 ... 0.19976495 2.272581 -0.44597435] [ 0.8961167 1.7109604 0.16495185 ... 0.52836263 -0.22514169 0.89334697] ... [-0.33914334 -0.6218829 -0.8621942 ... 0.8192885 -0.4477185 0.04590631] [ 0.31299704 -0.5804796 -1.0123205 ... -1.0566263 -1.5515094 -0.6497624 ] [-1.3867003 0.5302223 0.53550637 ... -0.2615809 0.8610795 -0.20257422]] [[-0.7152651 -0.20790412 0.29267883 ... 0.11739424 -1.4637583 -0.3496863 ] [ 0.8985287 1.1348486 -0.7359524 ... 0.8096596 0.3184608 -0.4389291 ] [-0.4455896 -1.1315687 1.7285051 ... -0.27512228 -0.6872383 0.6684255 ] ... [ 0.24183677 -0.24823624 -0.07542001 ... -0.53827757 0.05933688 0.49491113] [-1.0483259 1.7823757 0.694772 ... -0.49713585 -0.26493925 -0.82895446] [-1.272179 0.86555207 -0.42446342 ... -0.8599418 1.5747913 0.07303612]] [[-0.21590526 -0.28032514 -0.60160655 ... 0.39205387 1.1541682 -0.01216047] [-0.31069866 0.45019552 0.9534395 ... 2.147604 0.5117505 0.8449118 ] [-0.23828249 0.4340353 -1.5037804 ... 0.05299897 -1.1519074 -0.09891265] ... [ 1.3367234 -1.2774612 0.8717086 ... -1.9876462 -0.07957415 -1.8182926 ] [-0.43239248 -0.37861505 2.322939 ... 1.1193161 -0.14996871 0.73764175] [-1.3217587 -1.2388347 0.01547762 ... 2.0109696 -2.823549 0.8684532 ]] ... [[ 1.2431936 -0.05177369 -0.2961997 ... -1.2417605 -2.1615674 2.0779552 ] [-0.28836048 0.49459007 0.17158997 ... -0.38136452 -0.42546016 0.04490958] [ 1.5159683 0.91484034 0.72303987 ... -0.26087713 0.36435175 0.39536116] ... [-0.3224316 -1.7031158 0.05211259 ... -1.5736611 0.7916563 -0.03556132] [-0.9720512 2.322246 -0.78232133 ... -1.08931 1.0878738 -1.5615463 ] [ 0.10967451 -0.38801396 -0.32637224 ... 1.5164182 -0.22836414 -3.5861275 ]] [[-0.4278075 -1.7288674 -0.8160481 ... 0.71400034 0.3635423 -1.2946748 ] [ 1.183643 1.3881387 -1.2757688 ... 0.15559179 0.44293836 -0.6127477 ] [-1.9436774 -2.6988344 0.71228224 ... 0.08157711 -0.57442397 -0.30154622] ... [-2.1061578 0.8453629 -0.9652311 ... 0.47244558 1.295967 0.8695126 ] [ 0.9570572 -0.7027864 -0.54545456 ... 1.6892022 0.5119025 -0.22283234] [-0.22877365 1.3511685 1.2940782 ... -1.9727243 2.0373168 1.0592475 ]] [[-0.7402318 2.360196 -0.37538898 ... -1.7026788 -0.00962272 0.45963922] [-0.9230657 -0.8988707 -0.2551647 ... 1.9047668 -0.7826755 -0.5256123 ] [-0.72013605 -1.606129 1.2776657 ... 0.719683 0.23554648 -0.2508931 ] ... [-0.48482543 0.6144617 -1.1338154 ... -0.6006696 -0.79479927 -0.28749713] [ 0.37093988 -1.249623 0.6762239 ... 1.0309471 -0.7410038 0.43058324] [ 1.5726668 0.11843237 -1.533743 ... -0.14631051 0.6429395 -0.00911162]]]; ov_res: [[[ 0.07734871 -1.3979814 -0.44674546 ... 0.1134345 -1.2921444 0.20656681] [ 0.7140184 0.74113196 -1.3184355 ... 0.19976495 2.272581 -0.44597435] [ 0.8961167 1.7109604 0.16495185 ... 0.52836263 -0.22514169 0.89334697] ... [-0.33914334 -0.6218829 -0.8621942 ... 0.8192885 -0.4477185 0.04590631] [ 0.31299704 -0.5804796 -1.0123205 ... -1.0566263 -1.5515094 -0.6497624 ] [-1.3867003 0.5302223 0.53550637 ... -0.2615809 0.8610795 -0.20257422]] [[-0.7152651 -0.20790412 0.29267883 ... 0.11739424 -1.4637583 -0.3496863 ] [ 0.8985287 1.1348486 -0.7359524 ... 0.8096596 0.3184608 -0.4389291 ] [-0.4455896 -1.1315687 1.7285051 ... -0.27512228 -0.6872383 0.6684255 ] ... [ 0.24183677 -0.24823624 -0.07542001 ... -0.53827757 0.05933688 0.49491113] [-1.0483259 1.7823757 0.694772 ... -0.49713585 -0.26493925 -0.82895446] [-1.272179 0.86555207 -0.42446342 ... -0.8599418 1.5747913 0.07303612]] [[-0.21590526 -0.28032514 -0.60160655 ... 0.39205387 1.1541682 -0.01216047] [-0.31069866 0.45019552 0.9534395 ... 2.147604 0.5117505 0.8449118 ] [-0.23828249 0.4340353 -1.5037804 ... 0.05299897 -1.1519074 -0.09891265] ... [ 1.3367234 -1.2774612 0.8717086 ... -1.9876462 -0.07957415 -1.8182926 ] [-0.43239248 -0.37861505 2.322939 ... 1.1193161 -0.14996871 0.73764175] [-1.3217587 -1.2388347 0.01547762 ... 2.0109696 -2.823549 0.8684532 ]] ... [[ 1.2431936 -0.05177369 -0.2961997 ... -1.2417605 -2.1615674 2.0779552 ] [-0.28836048 0.49459007 0.17158997 ... -0.38136452 -0.42546016 0.04490958] [ 1.5159683 0.91484034 0.72303987 ... -0.26087713 0.36435175 0.39536116] ... [-0.3224316 -1.7031158 0.05211259 ... -1.5736611 0.7916563 -0.03556132] [-0.9720512 2.322246 -0.78232133 ... -1.08931 1.0878738 -1.5615463 ] [ 0.10967451 -0.38801396 -0.32637224 ... 1.5164182 -0.22836414 -3.5861275 ]] [[-0.4278075 -1.7288674 -0.8160481 ... 0.71400034 0.3635423 -1.2946748 ] [ 1.183643 1.3881387 -1.2757688 ... 0.15559179 0.44293836 -0.6127477 ] [-1.9436774 -2.6988344 0.71228224 ... 0.08157711 -0.57442397 -0.30154622] ... [-2.1061578 0.8453629 -0.9652311 ... 0.47244558 1.295967 0.8695126 ] [ 0.9570572 -0.7027864 -0.54545456 ... 1.6892022 0.5119025 -0.22283234] [-0.22877365 1.3511685 1.2940782 ... -1.9727243 2.0373168 1.0592475 ]] [[-0.7402318 2.360196 -0.37538898 ... -1.7026788 -0.00962272 0.45963922] [-0.9230657 -0.8988707 -0.2551647 ... 1.9047668 -0.7826755 -0.5256123 ] [-0.72013605 -1.606129 1.2776657 ... 0.719683 0.23554648 -0.2508931 ] ... [-0.48482543 0.6144617 -1.1338154 ... -0.6006696 -0.79479927 -0.28749713] [ 0.37093988 -1.249623 0.6762239 ... 1.0309471 -0.7410038 0.43058324] [ 1.5726668 0.11843237 -1.533743 ... -0.14631051 0.6429395 -0.00911162]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4973.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.5629381 -0.76424485 0.9471149 ... 0.5632685 0.09737215 0.02273862] [-0.88928986 -0.7818165 -0.13132972 ... -1.2044046 -1.1125863 -2.6627426 ] [ 0.2635684 -1.7861406 -1.0769002 ... -1.2842851 -1.0116166 -0.66655153] ... [ 0.6253444 0.94525653 0.82225347 ... -0.04351464 0.05261859 -1.232138 ] [-0.03632417 -2.3747227 0.05498036 ... -0.01241963 0.06215951 -1.3741509 ] [ 2.1823761 -0.6500797 0.54434866 ... -0.5830087 0.862787 1.0526521 ]] [[ 0.18933648 1.7183133 -2.4433684 ... 1.0373892 -0.7272297 -1.4505792 ] [ 0.06765129 -1.1572909 -0.5597541 ... -1.3062682 -0.9283263 0.8730543 ] [-0.86665714 0.93902445 0.89347786 ... 0.28380054 0.7242467 0.12984768] ... [ 1.7744449 0.8270532 0.3699107 ... -0.4846846 0.10147469 0.2711564 ] [ 2.036469 -0.25145036 -1.0769601 ... 1.1625737 -0.7223995 0.549935 ] [-0.575554 0.527492 -0.6200457 ... 0.7940692 0.503075 0.43054658]] [[ 2.090548 -0.39475784 0.9713457 ... -1.1699077 -0.99652255 1.875845 ] [ 0.02335294 -0.19159146 0.6632463 ... -0.9743166 0.2699431 1.4310132 ] [-0.6950892 1.4298568 1.0273073 ... 0.6820073 -0.09656284 -0.7249827 ] ... [ 0.16629034 0.77563065 -0.2606071 ... -0.10260668 1.5501076 -0.7530198 ] [-0.45063287 -0.4322077 3.2081537 ... -0.7673487 -0.64315116 -2.0092542 ] [ 1.26831 0.05059065 -1.3655562 ... 1.3714265 0.2682852 -0.10615129]] ... [[ 0.78402835 -1.5331513 0.20583017 ... 0.4334135 0.9026263 0.61116356] [ 0.7316068 -0.47555912 0.8583974 ... -1.9037759 0.98908883 -0.5889873 ] [ 1.4517039 0.32945815 -1.911691 ... -1.3050967 1.2487024 0.66816306] ... [-0.12132267 0.62086254 -0.46804363 ... -1.5398214 -0.9545762 1.0602851 ] [-2.383914 -0.676265 -0.37979057 ... 0.51874393 0.20270208 0.29308927] [-0.05409325 0.1412999 -1.850782 ... 0.08716452 1.4909992 -0.5933839 ]] [[ 1.1092871 0.02237948 0.15270405 ... -0.2687541 -0.1077793 -1.2212762 ] [-0.5912679 0.03517582 0.34874284 ... -2.4764411 -0.46767935 -0.90912247] [-0.05140019 -0.5868929 0.95073384 ... 0.35790613 -1.9517769 0.48483396] ... [-1.2610348 -1.3473642 -0.22612569 ... -0.4633469 -1.0282967 1.1821392 ] [-1.823723 -0.9273282 1.0348356 ... -0.70504177 0.04691122 -1.8606715 ] [ 1.351597 1.580196 -0.6624277 ... 0.13575324 -0.19974484 0.6608475 ]] [[ 0.53059167 -0.86002046 0.0230087 ... 0.438995 0.32807645 -1.6296142 ] [ 1.2367345 0.29424322 -0.154183 ... -1.7828352 0.27949718 0.11276986] [ 0.3320986 0.03564622 0.78090644 ... -0.15915944 1.4188657 -1.5337563 ] ... [-1.8164601 -0.98856604 -0.02044614 ... 1.1603276 -1.7825295 1.3383386 ] [ 0.23414314 -0.8739195 -0.8774257 ... 0.19926448 -2.023235 -1.5520687 ] [ 0.01445351 0.5159129 -0.79093885 ... -0.86882 1.2119755 1.7910292 ]]]; ov_res: [[[ 0.5629381 -0.76424485 0.9471149 ... 0.5632685 0.09737215 0.02273862] [-0.88928986 -0.7818165 -0.13132972 ... -1.2044046 -1.1125863 -2.6627426 ] [ 0.2635684 -1.7861406 -1.0769002 ... -1.2842851 -1.0116166 -0.66655153] ... [ 0.6253444 0.94525653 0.82225347 ... -0.04351464 0.05261859 -1.232138 ] [-0.03632417 -2.3747227 0.05498036 ... -0.01241963 0.06215951 -1.3741509 ] [ 2.1823761 -0.6500797 0.54434866 ... -0.5830087 0.862787 1.0526521 ]] [[ 0.18933648 1.7183133 -2.4433684 ... 1.0373892 -0.7272297 -1.4505792 ] [ 0.06765129 -1.1572909 -0.5597541 ... -1.3062682 -0.9283263 0.8730543 ] [-0.86665714 0.93902445 0.89347786 ... 0.28380054 0.7242467 0.12984768] ... [ 1.7744449 0.8270532 0.3699107 ... -0.4846846 0.10147469 0.2711564 ] [ 2.036469 -0.25145036 -1.0769601 ... 1.1625737 -0.7223995 0.549935 ] [-0.575554 0.527492 -0.6200457 ... 0.7940692 0.503075 0.43054658]] [[ 2.090548 -0.39475784 0.9713457 ... -1.1699077 -0.99652255 1.875845 ] [ 0.02335294 -0.19159146 0.6632463 ... -0.9743166 0.2699431 1.4310132 ] [-0.6950892 1.4298568 1.0273073 ... 0.6820073 -0.09656284 -0.7249827 ] ... [ 0.16629034 0.77563065 -0.2606071 ... -0.10260668 1.5501076 -0.7530198 ] [-0.45063287 -0.4322077 3.2081537 ... -0.7673487 -0.64315116 -2.0092542 ] [ 1.26831 0.05059065 -1.3655562 ... 1.3714265 0.2682852 -0.10615129]] ... [[ 0.78402835 -1.5331513 0.20583017 ... 0.4334135 0.9026263 0.61116356] [ 0.7316068 -0.47555912 0.8583974 ... -1.9037759 0.98908883 -0.5889873 ] [ 1.4517039 0.32945815 -1.911691 ... -1.3050967 1.2487024 0.66816306] ... [-0.12132267 0.62086254 -0.46804363 ... -1.5398214 -0.9545762 1.0602851 ] [-2.383914 -0.676265 -0.37979057 ... 0.51874393 0.20270208 0.29308927] [-0.05409325 0.1412999 -1.850782 ... 0.08716452 1.4909992 -0.5933839 ]] [[ 1.1092871 0.02237948 0.15270405 ... -0.2687541 -0.1077793 -1.2212762 ] [-0.5912679 0.03517582 0.34874284 ... -2.4764411 -0.46767935 -0.90912247] [-0.05140019 -0.5868929 0.95073384 ... 0.35790613 -1.9517769 0.48483396] ... [-1.2610348 -1.3473642 -0.22612569 ... -0.4633469 -1.0282967 1.1821392 ] [-1.823723 -0.9273282 1.0348356 ... -0.70504177 0.04691122 -1.8606715 ] [ 1.351597 1.580196 -0.6624277 ... 0.13575324 -0.19974484 0.6608475 ]] [[ 0.53059167 -0.86002046 0.0230087 ... 0.438995 0.32807645 -1.6296142 ] [ 1.2367345 0.29424322 -0.154183 ... -1.7828352 0.27949718 0.11276986] [ 0.3320986 0.03564622 0.78090644 ... -0.15915944 1.4188657 -1.5337563 ] ... [-1.8164601 -0.98856604 -0.02044614 ... 1.1603276 -1.7825295 1.3383386 ] [ 0.23414314 -0.8739195 -0.8774257 ... 0.19926448 -2.023235 -1.5520687 ] [ 0.01445351 0.5159129 -0.79093885 ... -0.86882 1.2119755 1.7910292 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4975.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[-0.12650461 -0.13839239 -0.6731954 ... 2.2754543 -0.45588547 -1.2295289 ] [-2.1970294 -0.0594475 -1.6907835 ... 0.5531754 1.2648013 -0.7394088 ] [-0.63494354 -0.12559517 -0.23132591 ... 0.3901409 0.57289654 1.2399766 ] ... [ 0.9183648 -0.23791519 -1.6044465 ... -0.24854235 -0.1050123 -0.02345959] [ 0.16235767 -1.8201393 -0.6765091 ... 1.1303169 -0.7786678 -1.0299553 ] [-1.0946319 -0.61018825 -0.55465144 ... 0.17703366 2.0671203 0.5256546 ]] [[ 0.2569798 -0.67348576 -0.47054362 ... 0.1287623 1.6904281 -1.9131172 ] [ 2.141449 -0.49460292 0.91379184 ... 0.06186906 0.17326698 2.8587947 ] [-0.17409113 -0.18295056 -0.1874493 ... -1.2218536 0.5129065 0.04294794] ... [-1.3183285 -1.637334 0.5784333 ... -0.35630724 1.3541574 2.2223961 ] [ 0.60770667 -2.323832 -0.47876722 ... -0.09761363 0.33714467 -0.51834464] [ 1.0068496 1.1307539 0.36098638 ... -0.6895213 0.43957442 0.2004585 ]] [[ 0.0701377 -0.11975856 0.7888995 ... 0.19245428 0.15671079 -0.4210791 ] [-0.40413308 -0.8907878 0.02418127 ... -0.39095002 -1.32488 -0.77040064] [-2.1920648 -0.29207295 0.33507535 ... -2.2417092 -0.6359802 -0.36299226] ... [ 1.0168045 -1.2063268 1.155212 ... 0.09250864 -1.2892267 0.91112655] [-0.24718902 -0.68057925 -0.33307433 ... -0.09999055 0.64470416 0.46722576] [-0.55076027 -1.7505028 0.94905335 ... -1.1479996 0.14404981 -0.2104254 ]] ... [[-0.20793465 0.01058944 -1.1949445 ... 0.32502678 -1.5401008 -1.3610758 ] [ 0.6191585 1.4594136 0.9335094 ... 1.2065306 -0.5457354 -0.99367344] [ 0.28378004 1.3114539 0.69699514 ... 0.44533542 -2.1595063 -0.70475435] ... [ 0.5796028 0.6951825 -0.2689565 ... -0.3623419 -0.16103643 1.3193291 ] [ 0.13217379 -2.3590121 -2.1657565 ... 0.04269763 2.2250311 0.5678989 ] [ 0.05796735 -1.9017259 0.00588796 ... -0.9687546 0.6886885 -0.17825544]] [[ 2.081573 0.40876323 -0.9974222 ... -0.6918657 1.4569744 -0.07005452] [ 1.5659477 0.25071844 -0.82428515 ... -1.220558 -0.07964353 -1.0260035 ] [-1.1708276 -1.3261516 0.13668475 ... 0.79735416 -0.44172534 -0.12511773] ... [ 0.03162726 1.0670257 -0.65783715 ... 0.661266 1.841147 -0.45893222] [ 0.7570551 -0.73522455 -0.4252506 ... -0.09759267 1.5612191 -1.0828358 ] [-0.45848298 -0.11916005 -0.8002805 ... -1.4741766 -1.9907049 0.6118854 ]] [[ 0.8571996 -0.6210864 0.13213427 ... -1.0504128 -0.95777303 2.0591412 ] [-1.531359 0.34609097 0.05005882 ... -0.97002876 1.7834883 0.39962578] [ 0.71704394 -0.95251393 -0.25593856 ... 0.93943065 -0.59921956 -0.84018904] ... [-1.6912152 -0.52781206 0.13116343 ... -0.5625409 -0.32070866 1.2923478 ] [-0.45509535 0.9916502 0.81283617 ... 0.91905534 -0.2753711 2.9046628 ] [ 0.36206415 1.6892296 -0.714004 ... -0.70590323 0.37519336 0.87501544]]]; ov_res: [[[-0.12650461 -0.13839239 -0.6731954 ... 2.2754543 -0.45588547 -1.2295289 ] [-2.1970294 -0.0594475 -1.6907835 ... 0.5531754 1.2648013 -0.7394088 ] [-0.63494354 -0.12559517 -0.23132591 ... 0.3901409 0.57289654 1.2399766 ] ... [ 0.9183648 -0.23791519 -1.6044465 ... -0.24854235 -0.1050123 -0.02345959] [ 0.16235767 -1.8201393 -0.6765091 ... 1.1303169 -0.7786678 -1.0299553 ] [-1.0946319 -0.61018825 -0.55465144 ... 0.17703366 2.0671203 0.5256546 ]] [[ 0.2569798 -0.67348576 -0.47054362 ... 0.1287623 1.6904281 -1.9131172 ] [ 2.141449 -0.49460292 0.91379184 ... 0.06186906 0.17326698 2.8587947 ] [-0.17409113 -0.18295056 -0.1874493 ... -1.2218536 0.5129065 0.04294794] ... [-1.3183285 -1.637334 0.5784333 ... -0.35630724 1.3541574 2.2223961 ] [ 0.60770667 -2.323832 -0.47876722 ... -0.09761363 0.33714467 -0.51834464] [ 1.0068496 1.1307539 0.36098638 ... -0.6895213 0.43957442 0.2004585 ]] [[ 0.0701377 -0.11975856 0.7888995 ... 0.19245428 0.15671079 -0.4210791 ] [-0.40413308 -0.8907878 0.02418127 ... -0.39095002 -1.32488 -0.77040064] [-2.1920648 -0.29207295 0.33507535 ... -2.2417092 -0.6359802 -0.36299226] ... [ 1.0168045 -1.2063268 1.155212 ... 0.09250864 -1.2892267 0.91112655] [-0.24718902 -0.68057925 -0.33307433 ... -0.09999055 0.64470416 0.46722576] [-0.55076027 -1.7505028 0.94905335 ... -1.1479996 0.14404981 -0.2104254 ]] ... [[-0.20793465 0.01058944 -1.1949445 ... 0.32502678 -1.5401008 -1.3610758 ] [ 0.6191585 1.4594136 0.9335094 ... 1.2065306 -0.5457354 -0.99367344] [ 0.28378004 1.3114539 0.69699514 ... 0.44533542 -2.1595063 -0.70475435] ... [ 0.5796028 0.6951825 -0.2689565 ... -0.3623419 -0.16103643 1.3193291 ] [ 0.13217379 -2.3590121 -2.1657565 ... 0.04269763 2.2250311 0.5678989 ] [ 0.05796735 -1.9017259 0.00588796 ... -0.9687546 0.6886885 -0.17825544]] [[ 2.081573 0.40876323 -0.9974222 ... -0.6918657 1.4569744 -0.07005452] [ 1.5659477 0.25071844 -0.82428515 ... -1.220558 -0.07964353 -1.0260035 ] [-1.1708276 -1.3261516 0.13668475 ... 0.79735416 -0.44172534 -0.12511773] ... [ 0.03162726 1.0670257 -0.65783715 ... 0.661266 1.841147 -0.45893222] [ 0.7570551 -0.73522455 -0.4252506 ... -0.09759267 1.5612191 -1.0828358 ] [-0.45848298 -0.11916005 -0.8002805 ... -1.4741766 -1.9907049 0.6118854 ]] [[ 0.8571996 -0.6210864 0.13213427 ... -1.0504128 -0.95777303 2.0591412 ] [-1.531359 0.34609097 0.05005882 ... -0.97002876 1.7834883 0.39962578] [ 0.71704394 -0.95251393 -0.25593856 ... 0.93943065 -0.59921956 -0.84018904] ... [-1.6912152 -0.52781206 0.13116343 ... -0.5625409 -0.32070866 1.2923478 ] [-0.45509535 0.9916502 0.81283617 ... 0.91905534 -0.2753711 2.9046628 ] [ 0.36206415 1.6892296 -0.714004 ... -0.70590323 0.37519336 0.87501544]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4977.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 9.59031582e-02 3.49130511e-01 1.93599558e+00 ... -1.39409840e+00 -3.10306907e-01 3.47704440e-02] [-9.37823355e-01 8.26939270e-02 -1.37461531e+00 ... 1.97188258e-01 1.67762673e+00 -4.95952904e-01] [-3.02854490e+00 -2.24442288e-01 -9.45931911e-01 ... 2.73897797e-01 -9.03262377e-01 7.45539010e-01] ... [ 2.57981926e-01 -2.78675437e-01 -1.12820804e+00 ... 3.59265924e-01 1.81609905e+00 -1.30992556e+00] [ 5.34585238e-01 -1.56382024e+00 1.39180338e+00 ... 4.15858448e-01 3.40671062e-01 2.68630743e-01] [-4.25667763e-01 1.10958087e+00 -2.32858464e-01 ... -1.31682515e+00 6.51739538e-01 -1.00110650e+00]] [[-1.46225882e+00 1.64218116e+00 -3.63068730e-01 ... 2.93358028e-01 5.26870310e-01 3.56386393e-01] [ 7.63613870e-03 -1.37530375e+00 -2.19296312e+00 ... 2.80150485e+00 5.56340098e-01 -6.12011850e-01] [ 1.02620041e+00 9.36523139e-01 3.11310828e-01 ... -3.36466372e-01 2.97221273e-01 -2.19860578e+00] ... [ 6.89348459e-01 2.81280851e+00 8.46767366e-01 ... 1.46548831e+00 2.17184916e-01 4.06431913e-01] [-2.51436383e-01 -2.07370305e+00 -1.98132917e-01 ... -5.20149052e-01 -8.07099819e-01 -1.38965952e+00] [ 5.22114515e-01 -1.31336403e+00 1.22629797e+00 ... -3.15335751e-01 -1.28077805e+00 1.22377729e+00]] [[ 1.15507424e+00 8.34897399e-01 -1.69199252e+00 ... -6.23509645e-01 -4.00051683e-01 1.39094234e-01] [ 9.91069853e-01 -3.91985506e-01 5.64209461e-01 ... 9.30851161e-01 -5.07111073e-01 1.86683595e+00] [-4.42532390e-01 5.56101024e-01 2.74107993e-01 ... 8.65584075e-01 5.53808868e-01 -7.60113537e-01] ... [ 1.12708640e+00 7.50619546e-02 -1.48959160e+00 ... -1.35031203e-03 5.19101143e-01 1.03570461e+00] [-1.44736373e+00 -7.07233906e-01 4.74984378e-01 ... 1.48017013e+00 1.91240296e-01 5.64161956e-01] [ 1.05333045e-01 1.02994967e+00 -1.18491423e+00 ... -8.48927021e-01 1.87737152e-01 2.03025013e-01]] ... [[-1.33211219e+00 -3.65925372e-01 7.17532098e-01 ... -1.06589890e+00 1.91713542e-01 -1.79287267e+00] [ 9.78036702e-01 4.84539121e-01 -1.84763587e+00 ... 1.29063857e+00 -1.71264553e+00 -2.25890255e+00] [ 1.30323517e+00 8.90278280e-01 8.15069020e-01 ... -1.52902257e+00 8.19122136e-01 1.19564645e-01] ... [-2.42644101e-02 1.02742934e+00 2.47807875e-01 ... -4.98506099e-01 -1.21077287e+00 2.23941550e-01] [ 2.46188736e+00 -2.58925796e+00 -1.75303295e-01 ... -6.63905919e-01 -6.56322956e-01 1.37965727e+00] [ 8.93443346e-01 1.62290514e-01 5.45490794e-02 ... 8.75742495e-01 6.28558338e-01 -1.54153037e+00]] [[ 3.70731056e-01 9.96617973e-02 -8.32244635e-01 ... -1.69440281e+00 4.95102614e-01 -5.45864999e-02] [ 1.52584687e-01 5.85941933e-02 -3.36760640e-01 ... 1.05344224e+00 -1.34544790e+00 -6.12674415e-01] [-6.02517799e-02 1.10716331e+00 7.13895857e-01 ... 8.13007236e-01 7.19100058e-01 8.13153863e-01] ... [-6.37422442e-01 -1.40831590e+00 3.61420631e-01 ... -3.25468302e+00 -6.71412885e-01 1.05778778e+00] [ 1.69526115e-01 1.22058403e+00 -2.48774499e-01 ... -3.90540779e-01 2.82025069e-01 -2.66956592e+00] [-1.92080989e-01 -3.34949046e-01 3.45296234e-01 ... -1.87000081e-01 -7.17754185e-01 4.60261881e-01]] [[ 1.07350743e+00 2.00343871e+00 1.34995019e+00 ... 2.48182446e-01 1.95272374e+00 4.13932294e-01] [-2.48597771e-01 -1.79799783e+00 9.54168856e-01 ... -1.48573709e+00 7.60850489e-01 -4.05470699e-01] [ 1.30525678e-01 -7.30601788e-01 1.92488229e+00 ... -1.13861358e+00 4.03015882e-01 1.64893717e-01] ... [ 5.70630372e-01 -2.73204774e-01 -9.21030998e-01 ... -1.63723278e+00 8.73651654e-02 1.87308073e+00] [ 1.95413566e+00 3.70718360e-01 -1.90538123e-01 ... 1.83884859e-01 5.57750881e-01 -6.94317043e-01] [-4.69321936e-01 7.12658167e-01 3.53039026e-01 ... 3.42949599e-01 -1.81223407e-01 -1.94523901e-01]]]; ov_res: [[[ 9.59031582e-02 3.49130511e-01 1.93599558e+00 ... -1.39409840e+00 -3.10306907e-01 3.47704440e-02] [-9.37823355e-01 8.26939270e-02 -1.37461531e+00 ... 1.97188258e-01 1.67762673e+00 -4.95952904e-01] [-3.02854490e+00 -2.24442288e-01 -9.45931911e-01 ... 2.73897797e-01 -9.03262377e-01 7.45539010e-01] ... [ 2.57981926e-01 -2.78675437e-01 -1.12820804e+00 ... 3.59265924e-01 1.81609905e+00 -1.30992556e+00] [ 5.34585238e-01 -1.56382024e+00 1.39180338e+00 ... 4.15858448e-01 3.40671062e-01 2.68630743e-01] [-4.25667763e-01 1.10958087e+00 -2.32858464e-01 ... -1.31682515e+00 6.51739538e-01 -1.00110650e+00]] [[-1.46225882e+00 1.64218116e+00 -3.63068730e-01 ... 2.93358028e-01 5.26870310e-01 3.56386393e-01] [ 7.63613870e-03 -1.37530375e+00 -2.19296312e+00 ... 2.80150485e+00 5.56340098e-01 -6.12011850e-01] [ 1.02620041e+00 9.36523139e-01 3.11310828e-01 ... -3.36466372e-01 2.97221273e-01 -2.19860578e+00] ... [ 6.89348459e-01 2.81280851e+00 8.46767366e-01 ... 1.46548831e+00 2.17184916e-01 4.06431913e-01] [-2.51436383e-01 -2.07370305e+00 -1.98132917e-01 ... -5.20149052e-01 -8.07099819e-01 -1.38965952e+00] [ 5.22114515e-01 -1.31336403e+00 1.22629797e+00 ... -3.15335751e-01 -1.28077805e+00 1.22377729e+00]] [[ 1.15507424e+00 8.34897399e-01 -1.69199252e+00 ... -6.23509645e-01 -4.00051683e-01 1.39094234e-01] [ 9.91069853e-01 -3.91985506e-01 5.64209461e-01 ... 9.30851161e-01 -5.07111073e-01 1.86683595e+00] [-4.42532390e-01 5.56101024e-01 2.74107993e-01 ... 8.65584075e-01 5.53808868e-01 -7.60113537e-01] ... [ 1.12708640e+00 7.50619546e-02 -1.48959160e+00 ... -1.35031203e-03 5.19101143e-01 1.03570461e+00] [-1.44736373e+00 -7.07233906e-01 4.74984378e-01 ... 1.48017013e+00 1.91240296e-01 5.64161956e-01] [ 1.05333045e-01 1.02994967e+00 -1.18491423e+00 ... -8.48927021e-01 1.87737152e-01 2.03025013e-01]] ... [[-1.33211219e+00 -3.65925372e-01 7.17532098e-01 ... -1.06589890e+00 1.91713542e-01 -1.79287267e+00] [ 9.78036702e-01 4.84539121e-01 -1.84763587e+00 ... 1.29063857e+00 -1.71264553e+00 -2.25890255e+00] [ 1.30323517e+00 8.90278280e-01 8.15069020e-01 ... -1.52902257e+00 8.19122136e-01 1.19564645e-01] ... [-2.42644101e-02 1.02742934e+00 2.47807875e-01 ... -4.98506099e-01 -1.21077287e+00 2.23941550e-01] [ 2.46188736e+00 -2.58925796e+00 -1.75303295e-01 ... -6.63905919e-01 -6.56322956e-01 1.37965727e+00] [ 8.93443346e-01 1.62290514e-01 5.45490794e-02 ... 8.75742495e-01 6.28558338e-01 -1.54153037e+00]] [[ 3.70731056e-01 9.96617973e-02 -8.32244635e-01 ... -1.69440281e+00 4.95102614e-01 -5.45864999e-02] [ 1.52584687e-01 5.85941933e-02 -3.36760640e-01 ... 1.05344224e+00 -1.34544790e+00 -6.12674415e-01] [-6.02517799e-02 1.10716331e+00 7.13895857e-01 ... 8.13007236e-01 7.19100058e-01 8.13153863e-01] ... [-6.37422442e-01 -1.40831590e+00 3.61420631e-01 ... -3.25468302e+00 -6.71412885e-01 1.05778778e+00] [ 1.69526115e-01 1.22058403e+00 -2.48774499e-01 ... -3.90540779e-01 2.82025069e-01 -2.66956592e+00] [-1.92080989e-01 -3.34949046e-01 3.45296234e-01 ... -1.87000081e-01 -7.17754185e-01 4.60261881e-01]] [[ 1.07350743e+00 2.00343871e+00 1.34995019e+00 ... 2.48182446e-01 1.95272374e+00 4.13932294e-01] [-2.48597771e-01 -1.79799783e+00 9.54168856e-01 ... -1.48573709e+00 7.60850489e-01 -4.05470699e-01] [ 1.30525678e-01 -7.30601788e-01 1.92488229e+00 ... -1.13861358e+00 4.03015882e-01 1.64893717e-01] ... [ 5.70630372e-01 -2.73204774e-01 -9.21030998e-01 ... -1.63723278e+00 8.73651654e-02 1.87308073e+00] [ 1.95413566e+00 3.70718360e-01 -1.90538123e-01 ... 1.83884859e-01 5.57750881e-01 -6.94317043e-01] [-4.69321936e-01 7.12658167e-01 3.53039026e-01 ... 3.42949599e-01 -1.81223407e-01 -1.94523901e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4979.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[-0.79404396 0.54161465 -0.6285841 ... -0.2363154 -0.32669792 -0.93045753] [ 0.16158399 -0.50954556 -0.13824633 ... 0.53572416 1.801528 -1.0150875 ] [ 0.9175462 0.24925819 -0.4363321 ... 0.44960338 1.2783659 0.80806303]] [[-0.5769136 0.00744421 2.302625 ... -0.26195365 0.96377444 -0.1808715 ] [-1.525181 0.46681812 -0.45576066 ... 1.3094032 -0.48036727 0.18032452] [-0.5061021 0.13810508 -0.4999058 ... 1.2050865 0.39752725 -1.9242563 ]] [[ 1.5594752 1.101779 0.44815055 ... 0.5746326 1.0403888 -1.0487365 ] [-1.149149 -1.5827659 -0.2749014 ... -0.7416549 -0.6947197 -1.1031713 ] [ 0.72273576 -1.2212155 -0.27067 ... -0.21088323 0.8559855 0.8564858 ]] ... [[ 0.34230265 1.5386927 0.89333767 ... -0.4321573 0.23987886 -1.6519647 ] [ 1.3059146 -0.932169 0.41939485 ... 1.6048687 1.8345603 1.1368834 ] [-0.14726989 2.8084955 1.1249352 ... -0.5667668 1.2763233 -0.71495205]] [[ 0.78965116 0.6792047 -0.39443308 ... 0.10746834 0.7551986 -0.14067763] [-0.98287797 -0.9921384 -0.5135011 ... 2.4765596 -0.24054386 0.6159126 ] [ 1.2606798 1.5786841 0.31298193 ... -0.74926776 -0.03256427 -0.19754474]] [[-1.8875254 1.9066039 0.23746127 ... -0.3739929 0.2817023 -2.71988 ] [ 0.7012036 1.3984172 1.0140195 ... 0.16816777 0.21321142 0.6004948 ] [ 1.0158774 -0.26904133 -0.4221113 ... 0.89495206 -0.88364077 -1.1545498 ]]]; ov_res: [[[-0.79404396 0.54161465 -0.6285841 ... -0.2363154 -0.32669792 -0.93045753] [ 0.16158399 -0.50954556 -0.13824633 ... 0.53572416 1.801528 -1.0150875 ] [ 0.9175462 0.24925819 -0.4363321 ... 0.44960338 1.2783659 0.80806303]] [[-0.5769136 0.00744421 2.302625 ... -0.26195365 0.96377444 -0.1808715 ] [-1.525181 0.46681812 -0.45576066 ... 1.3094032 -0.48036727 0.18032452] [-0.5061021 0.13810508 -0.4999058 ... 1.2050865 0.39752725 -1.9242563 ]] [[ 1.5594752 1.101779 0.44815055 ... 0.5746326 1.0403888 -1.0487365 ] [-1.149149 -1.5827659 -0.2749014 ... -0.7416549 -0.6947197 -1.1031713 ] [ 0.72273576 -1.2212155 -0.27067 ... -0.21088323 0.8559855 0.8564858 ]] ... [[ 0.34230265 1.5386927 0.89333767 ... -0.4321573 0.23987886 -1.6519647 ] [ 1.3059146 -0.932169 0.41939485 ... 1.6048687 1.8345603 1.1368834 ] [-0.14726989 2.8084955 1.1249352 ... -0.5667668 1.2763233 -0.71495205]] [[ 0.78965116 0.6792047 -0.39443308 ... 0.10746834 0.7551986 -0.14067763] [-0.98287797 -0.9921384 -0.5135011 ... 2.4765596 -0.24054386 0.6159126 ] [ 1.2606798 1.5786841 0.31298193 ... -0.74926776 -0.03256427 -0.19754474]] [[-1.8875254 1.9066039 0.23746127 ... -0.3739929 0.2817023 -2.71988 ] [ 0.7012036 1.3984172 1.0140195 ... 0.16816777 0.21321142 0.6004948 ] [ 1.0158774 -0.26904133 -0.4221113 ... 0.89495206 -0.88364077 -1.1545498 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4981.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 5.24796508e-02 -3.06305766e-01 2.48906717e-01 ... -1.08509994e+00 -2.02081037e+00 -9.43352938e-01] [ 2.48906717e-01 8.92743647e-01 3.60877132e+00 ... -9.43352938e-01 -6.37829542e-01 -6.29644811e-01] [ 3.60877132e+00 -2.08766192e-01 -4.20933306e-01 ... -6.29644811e-01 -1.30289912e+00 -9.38736916e-01] ... [-5.08125901e-01 -4.76863608e-02 2.73858517e-01 ... -1.82748288e-01 -3.75488490e-01 -7.79669642e-01] [ 2.73858517e-01 -7.59643674e-01 -3.47153336e-01 ... -7.79669642e-01 6.92752481e-01 -8.19780707e-01] [-3.47153336e-01 -1.84647173e-01 -5.22961438e-01 ... -8.19780707e-01 -4.00361083e-02 4.96381849e-01]] [[ 7.58561850e-01 -8.50418031e-01 6.78390741e-01 ... 9.50829148e-01 1.14459515e+00 3.95514727e-01] [ 6.78390741e-01 -2.16609240e+00 1.16256404e+00 ... 3.95514727e-01 -1.40721953e+00 -1.63620079e+00] [ 1.16256404e+00 1.53376365e+00 6.12386107e-01 ... -1.63620079e+00 -1.08601105e+00 -2.96423793e-01] ... [-1.36397886e+00 -1.43587902e-01 -9.99464214e-01 ... 3.86373013e-01 -6.55355096e-01 5.65040410e-01] [-9.99464214e-01 3.40578586e-01 1.47795022e+00 ... 5.65040410e-01 1.10984349e-03 1.21408415e+00] [ 1.47795022e+00 -1.20276630e-01 5.74674189e-01 ... 1.21408415e+00 -8.45791161e-01 1.03179955e+00]] [[-1.39699447e+00 3.84752959e-01 1.90264672e-01 ... 6.52130485e-01 -5.26464820e-01 -9.17923748e-01] [ 1.90264672e-01 2.30261907e-01 -2.27868184e-01 ... -9.17923748e-01 1.63007414e+00 3.28381732e-02] [-2.27868184e-01 1.49308002e+00 1.35002601e+00 ... 3.28381732e-02 1.08785951e+00 1.12700129e+00] ... [-1.13396913e-01 -1.29896140e+00 -6.33416951e-01 ... 8.17480862e-01 -2.82763332e-01 6.82920516e-01] [-6.33416951e-01 5.22149265e-01 -1.15034485e+00 ... 6.82920516e-01 1.34575695e-01 1.85528135e+00] [-1.15034485e+00 4.80003804e-02 4.13306862e-01 ... 1.85528135e+00 -4.34704095e-01 -9.90547687e-02]] ... [[-1.02746034e+00 -9.77227151e-01 6.75217092e-01 ... -9.99489427e-01 1.24278259e+00 -5.27326882e-01] [ 6.75217092e-01 7.60962248e-01 2.76086122e-01 ... -5.27326882e-01 -4.84778494e-01 5.44491172e-01] [ 2.76086122e-01 1.18532252e+00 -9.34043452e-02 ... 5.44491172e-01 -6.50857389e-01 1.00706792e+00] ... [-1.13018006e-01 1.49700642e+00 -5.24965405e-01 ... -3.29089105e-01 3.93882811e-01 -7.09866405e-01] [-5.24965405e-01 1.32078364e-01 2.32752010e-01 ... -7.09866405e-01 -7.38151431e-01 2.88734198e+00] [ 2.32752010e-01 -5.14460325e-01 1.62820148e+00 ... 2.88734198e+00 5.27226269e-01 -7.28460550e-01]] [[-1.10799789e+00 -1.00369596e+00 2.47248411e+00 ... -1.64045084e+00 6.31150901e-01 4.90915775e-02] [ 2.47248411e+00 -4.53280598e-01 -2.55174071e-01 ... 4.90915775e-02 -1.63996056e-01 1.31466165e-01] [-2.55174071e-01 -1.49957240e+00 6.46215752e-02 ... 1.31466165e-01 1.54489326e+00 3.24509263e-01] ... [ 5.71568966e-01 -1.70148253e+00 9.90155861e-02 ... -8.79724741e-01 8.21144521e-01 -2.56372362e-01] [ 9.90155861e-02 -3.23929459e-01 -1.95358440e-01 ... -2.56372362e-01 -1.90855935e-02 1.35507977e+00] [-1.95358440e-01 -1.72743931e-01 -5.51902950e-01 ... 1.35507977e+00 -1.19112122e+00 1.20908439e+00]] [[-2.15671110e+00 2.80422747e-01 -1.15042305e+00 ... -2.97909617e-01 -2.53686994e-01 -1.99746990e+00] [-1.15042305e+00 -2.25714803e-01 5.00501990e-01 ... -1.99746990e+00 5.48265874e-01 -1.59153193e-01] [ 5.00501990e-01 -5.86516321e-01 6.34089947e-01 ... -1.59153193e-01 6.95511937e-01 1.58696365e+00] ... [ 1.15817897e-01 8.16365108e-02 -2.01765680e+00 ... 2.26879299e-01 1.46403825e+00 -1.47340447e-01] [-2.01765680e+00 -1.75847077e+00 -6.31075799e-01 ... -1.47340447e-01 -5.65342069e-01 -4.97933567e-01] [-6.31075799e-01 1.79578543e+00 3.53191525e-01 ... -4.97933567e-01 1.23679917e-02 1.01755095e+00]]]; ov_res: [[[ 5.24796508e-02 -3.06305766e-01 2.48906717e-01 ... -1.08509994e+00 -2.02081037e+00 -9.43352938e-01] [ 2.48906717e-01 8.92743647e-01 3.60877132e+00 ... -9.43352938e-01 -6.37829542e-01 -6.29644811e-01] [ 3.60877132e+00 -2.08766192e-01 -4.20933306e-01 ... -6.29644811e-01 -1.30289912e+00 -9.38736916e-01] ... [-5.08125901e-01 -4.76863608e-02 2.73858517e-01 ... -1.82748288e-01 -3.75488490e-01 -7.79669642e-01] [ 2.73858517e-01 -7.59643674e-01 -3.47153336e-01 ... -7.79669642e-01 6.92752481e-01 -8.19780707e-01] [-3.47153336e-01 -1.84647173e-01 -5.22961438e-01 ... -8.19780707e-01 -4.00361083e-02 4.96381849e-01]] [[ 7.58561850e-01 -8.50418031e-01 6.78390741e-01 ... 9.50829148e-01 1.14459515e+00 3.95514727e-01] [ 6.78390741e-01 -2.16609240e+00 1.16256404e+00 ... 3.95514727e-01 -1.40721953e+00 -1.63620079e+00] [ 1.16256404e+00 1.53376365e+00 6.12386107e-01 ... -1.63620079e+00 -1.08601105e+00 -2.96423793e-01] ... [-1.36397886e+00 -1.43587902e-01 -9.99464214e-01 ... 3.86373013e-01 -6.55355096e-01 5.65040410e-01] [-9.99464214e-01 3.40578586e-01 1.47795022e+00 ... 5.65040410e-01 1.10984349e-03 1.21408415e+00] [ 1.47795022e+00 -1.20276630e-01 5.74674189e-01 ... 1.21408415e+00 -8.45791161e-01 1.03179955e+00]] [[-1.39699447e+00 3.84752959e-01 1.90264672e-01 ... 6.52130485e-01 -5.26464820e-01 -9.17923748e-01] [ 1.90264672e-01 2.30261907e-01 -2.27868184e-01 ... -9.17923748e-01 1.63007414e+00 3.28381732e-02] [-2.27868184e-01 1.49308002e+00 1.35002601e+00 ... 3.28381732e-02 1.08785951e+00 1.12700129e+00] ... [-1.13396913e-01 -1.29896140e+00 -6.33416951e-01 ... 8.17480862e-01 -2.82763332e-01 6.82920516e-01] [-6.33416951e-01 5.22149265e-01 -1.15034485e+00 ... 6.82920516e-01 1.34575695e-01 1.85528135e+00] [-1.15034485e+00 4.80003804e-02 4.13306862e-01 ... 1.85528135e+00 -4.34704095e-01 -9.90547687e-02]] ... [[-1.02746034e+00 -9.77227151e-01 6.75217092e-01 ... -9.99489427e-01 1.24278259e+00 -5.27326882e-01] [ 6.75217092e-01 7.60962248e-01 2.76086122e-01 ... -5.27326882e-01 -4.84778494e-01 5.44491172e-01] [ 2.76086122e-01 1.18532252e+00 -9.34043452e-02 ... 5.44491172e-01 -6.50857389e-01 1.00706792e+00] ... [-1.13018006e-01 1.49700642e+00 -5.24965405e-01 ... -3.29089105e-01 3.93882811e-01 -7.09866405e-01] [-5.24965405e-01 1.32078364e-01 2.32752010e-01 ... -7.09866405e-01 -7.38151431e-01 2.88734198e+00] [ 2.32752010e-01 -5.14460325e-01 1.62820148e+00 ... 2.88734198e+00 5.27226269e-01 -7.28460550e-01]] [[-1.10799789e+00 -1.00369596e+00 2.47248411e+00 ... -1.64045084e+00 6.31150901e-01 4.90915775e-02] [ 2.47248411e+00 -4.53280598e-01 -2.55174071e-01 ... 4.90915775e-02 -1.63996056e-01 1.31466165e-01] [-2.55174071e-01 -1.49957240e+00 6.46215752e-02 ... 1.31466165e-01 1.54489326e+00 3.24509263e-01] ... [ 5.71568966e-01 -1.70148253e+00 9.90155861e-02 ... -8.79724741e-01 8.21144521e-01 -2.56372362e-01] [ 9.90155861e-02 -3.23929459e-01 -1.95358440e-01 ... -2.56372362e-01 -1.90855935e-02 1.35507977e+00] [-1.95358440e-01 -1.72743931e-01 -5.51902950e-01 ... 1.35507977e+00 -1.19112122e+00 1.20908439e+00]] [[-2.15671110e+00 2.80422747e-01 -1.15042305e+00 ... -2.97909617e-01 -2.53686994e-01 -1.99746990e+00] [-1.15042305e+00 -2.25714803e-01 5.00501990e-01 ... -1.99746990e+00 5.48265874e-01 -1.59153193e-01] [ 5.00501990e-01 -5.86516321e-01 6.34089947e-01 ... -1.59153193e-01 6.95511937e-01 1.58696365e+00] ... [ 1.15817897e-01 8.16365108e-02 -2.01765680e+00 ... 2.26879299e-01 1.46403825e+00 -1.47340447e-01] [-2.01765680e+00 -1.75847077e+00 -6.31075799e-01 ... -1.47340447e-01 -5.65342069e-01 -4.97933567e-01] [-6.31075799e-01 1.79578543e+00 3.53191525e-01 ... -4.97933567e-01 1.23679917e-02 1.01755095e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4983.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.920731 0.5948415 0.9463033 ... 0.03399991 0.4054676 -1.6540688 ] [ 0.9463033 -0.29414836 -0.16774003 ... -1.6540688 1.4294314 -2.620883 ] [-0.05859214 0.3304264 0.41599947 ... 0.74241847 0.7623587 0.4585257 ] ... [-1.5590478 0.9591591 -1.0023133 ... 0.88622195 -0.45211494 -0.43018815] [-1.6540086 -0.54965794 -0.29768643 ... -0.15587594 0.18416534 0.97648484] [-0.29768643 -0.70803523 -0.43468538 ... 0.97648484 -0.3462473 0.21830255]] [[ 0.14119738 -0.89687455 1.1104983 ... -0.23154017 0.2910118 2.5144832 ] [ 1.1104983 0.92906994 0.6643875 ... 2.5144832 -1.6598918 -0.04986656] [-0.40681994 -0.60255814 3.2653952 ... 1.4394912 1.4390607 0.10538386] ... [ 0.00332205 0.01559868 -1.5002811 ... -0.90717024 -0.77373564 -0.278628 ] [ 0.9424533 -1.5070919 -0.7142754 ... 0.15395074 -0.7615467 1.8983519 ] [-0.7142754 2.0836658 -0.4324982 ... 1.8983519 0.10751439 -0.8271239 ]] [[-0.5437175 -0.966763 -0.43850508 ... 0.5816864 -0.05938639 -0.03860055] [-0.43850508 -0.60753214 -0.5504455 ... -0.03860055 0.35220423 -1.048419 ] [ 0.7827646 -0.69625854 1.404248 ... 1.1021241 0.597566 -0.21494684] ... [ 0.32831788 -0.2330791 1.2511852 ... -0.18756239 -1.0254675 -0.78346723] [-0.89133537 0.6799287 -1.3505613 ... -1.7507923 -0.7777883 -0.42309216] [-1.3505613 1.0469649 0.07745126 ... -0.42309216 -1.500361 0.65553087]] ... [[ 3.1326625 -0.9727402 -1.8010484 ... -1.7810259 1.737741 -0.7532556 ] [-1.8010484 0.710881 -0.48346287 ... -0.7532556 2.1782103 -0.6494109 ] [ 0.3461205 -0.5850541 -0.7005868 ... 0.05231464 1.062843 0.83346474] ... [ 0.40777382 0.22697131 0.18142691 ... 0.8615845 -0.45180777 1.4810495 ] [-0.8265184 -1.6171519 -0.7669641 ... -1.3883524 0.5808718 0.2002957 ] [-0.7669641 -0.01803151 0.91053575 ... 0.2002957 0.24739432 0.57534033]] [[ 2.508717 0.37173864 1.0284655 ... -0.48576313 -0.17912716 -0.19952007] [ 1.0284655 -0.23937048 -0.1739497 ... -0.19952007 -0.4616762 0.61086494] [ 0.291114 -1.0093095 0.08492031 ... 1.4563067 0.5436502 -1.5503607 ] ... [ 1.244337 -1.1553477 -0.9032898 ... 1.4144351 -1.0226402 -0.29448867] [-0.80701256 2.0085602 1.5881494 ... -0.2800398 -0.9949453 -0.04810128] [ 1.5881494 2.241201 -0.20356593 ... -0.04810128 0.280472 1.435115 ]] [[-1.4856329 -0.585291 -1.5998857 ... 0.23982151 -0.24945727 1.7482246 ] [-1.5998857 2.2492127 -0.6497743 ... 1.7482246 2.8404222 -0.20672785] [-0.84728956 0.1358616 -0.68045163 ... -0.45879325 -1.1468104 -1.0274957 ] ... [-0.9156291 1.7329403 0.95356154 ... -1.2019722 0.26047912 -0.11372469] [-0.9112263 -0.06049756 0.5732266 ... 1.761133 -1.4739687 -0.7495466 ] [ 0.5732266 0.26366016 -0.30187535 ... -0.7495466 -0.4015001 -0.5056152 ]]]; ov_res: [[[ 0.920731 0.5948415 0.9463033 ... 0.03399991 0.4054676 -1.6540688 ] [ 0.9463033 -0.29414836 -0.16774003 ... -1.6540688 1.4294314 -2.620883 ] [-0.05859214 0.3304264 0.41599947 ... 0.74241847 0.7623587 0.4585257 ] ... [-1.5590478 0.9591591 -1.0023133 ... 0.88622195 -0.45211494 -0.43018815] [-1.6540086 -0.54965794 -0.29768643 ... -0.15587594 0.18416534 0.97648484] [-0.29768643 -0.70803523 -0.43468538 ... 0.97648484 -0.3462473 0.21830255]] [[ 0.14119738 -0.89687455 1.1104983 ... -0.23154017 0.2910118 2.5144832 ] [ 1.1104983 0.92906994 0.6643875 ... 2.5144832 -1.6598918 -0.04986656] [-0.40681994 -0.60255814 3.2653952 ... 1.4394912 1.4390607 0.10538386] ... [ 0.00332205 0.01559868 -1.5002811 ... -0.90717024 -0.77373564 -0.278628 ] [ 0.9424533 -1.5070919 -0.7142754 ... 0.15395074 -0.7615467 1.8983519 ] [-0.7142754 2.0836658 -0.4324982 ... 1.8983519 0.10751439 -0.8271239 ]] [[-0.5437175 -0.966763 -0.43850508 ... 0.5816864 -0.05938639 -0.03860055] [-0.43850508 -0.60753214 -0.5504455 ... -0.03860055 0.35220423 -1.048419 ] [ 0.7827646 -0.69625854 1.404248 ... 1.1021241 0.597566 -0.21494684] ... [ 0.32831788 -0.2330791 1.2511852 ... -0.18756239 -1.0254675 -0.78346723] [-0.89133537 0.6799287 -1.3505613 ... -1.7507923 -0.7777883 -0.42309216] [-1.3505613 1.0469649 0.07745126 ... -0.42309216 -1.500361 0.65553087]] ... [[ 3.1326625 -0.9727402 -1.8010484 ... -1.7810259 1.737741 -0.7532556 ] [-1.8010484 0.710881 -0.48346287 ... -0.7532556 2.1782103 -0.6494109 ] [ 0.3461205 -0.5850541 -0.7005868 ... 0.05231464 1.062843 0.83346474] ... [ 0.40777382 0.22697131 0.18142691 ... 0.8615845 -0.45180777 1.4810495 ] [-0.8265184 -1.6171519 -0.7669641 ... -1.3883524 0.5808718 0.2002957 ] [-0.7669641 -0.01803151 0.91053575 ... 0.2002957 0.24739432 0.57534033]] [[ 2.508717 0.37173864 1.0284655 ... -0.48576313 -0.17912716 -0.19952007] [ 1.0284655 -0.23937048 -0.1739497 ... -0.19952007 -0.4616762 0.61086494] [ 0.291114 -1.0093095 0.08492031 ... 1.4563067 0.5436502 -1.5503607 ] ... [ 1.244337 -1.1553477 -0.9032898 ... 1.4144351 -1.0226402 -0.29448867] [-0.80701256 2.0085602 1.5881494 ... -0.2800398 -0.9949453 -0.04810128] [ 1.5881494 2.241201 -0.20356593 ... -0.04810128 0.280472 1.435115 ]] [[-1.4856329 -0.585291 -1.5998857 ... 0.23982151 -0.24945727 1.7482246 ] [-1.5998857 2.2492127 -0.6497743 ... 1.7482246 2.8404222 -0.20672785] [-0.84728956 0.1358616 -0.68045163 ... -0.45879325 -1.1468104 -1.0274957 ] ... [-0.9156291 1.7329403 0.95356154 ... -1.2019722 0.26047912 -0.11372469] [-0.9112263 -0.06049756 0.5732266 ... 1.761133 -1.4739687 -0.7495466 ] [ 0.5732266 0.26366016 -0.30187535 ... -0.7495466 -0.4015001 -0.5056152 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4985.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.51398945 -0.572692 0.7759537 ... 0.2837917 -0.6147102 0.3813197 ] [ 0.7759537 -1.3871052 -1.450135 ... 0.3813197 0.9163534 0.64531124] [-1.450135 0.27341557 -0.9835122 ... 0.64531124 -1.2388567 -1.0426834 ] ... [-1.3257427 -0.14438568 -1.744836 ... -0.5272845 -1.5204382 -0.84345764] [-1.744836 -0.10067423 1.4136537 ... -0.84345764 1.6694134 -1.7442251 ] [ 1.4136537 1.18541 -1.5187289 ... -1.7442251 0.04939116 -0.8746488 ]] [[ 1.1199502 0.7453573 1.3439679 ... 0.46442962 -0.5395563 1.0814164 ] [ 1.3439679 -0.24910101 -0.5619352 ... 1.0814164 0.20281473 0.23246229] [-0.5619352 0.40481588 -0.24124804 ... 0.23246229 -0.784327 0.99340665] ... [-0.4403658 -0.69566953 0.6203669 ... -0.18643893 -1.1827601 0.58414584] [ 0.6203669 1.0065272 -1.7029265 ... 0.58414584 0.50303924 -0.168509 ] [-1.7029265 -0.07356355 0.533028 ... -0.168509 1.5055258 2.244243 ]] [[ 0.3041259 -0.5780669 -1.5326132 ... 1.0698267 0.44297546 -0.7398877 ] [-1.5326132 -0.40479353 0.57656795 ... -0.7398877 1.8050419 -1.4233878 ] [ 0.57656795 -1.1105989 0.73401415 ... -1.4233878 -1.1903689 -0.25188413] ... [-1.3241979 1.1873771 0.82312906 ... 0.4047307 0.05293545 1.1017963 ] [ 0.82312906 1.1283295 0.1968093 ... 1.1017963 1.3513006 0.47599787] [ 0.1968093 -2.1342092 -0.5242419 ... 0.47599787 -1.056634 -1.4126366 ]] ... [[ 0.08617662 0.6007923 1.9557656 ... -0.79097354 0.3264993 0.7304546 ] [ 1.9557656 -0.51987016 -0.55849004 ... 0.7304546 -0.53800714 -0.79167485] [-0.55849004 -0.40757066 -0.4576975 ... -0.79167485 -1.2706778 0.5564204 ] ... [ 1.1612197 -1.305691 -0.25194776 ... -2.3722792 0.21668248 -0.21693124] [-0.25194776 -1.6139748 0.46095896 ... -0.21693124 0.7837338 0.59595144] [ 0.46095896 0.68781465 1.2920169 ... 0.59595144 -0.92927426 0.24520753]] [[-0.4193792 -0.38040712 -0.9813608 ... -0.68117005 -0.14021482 -1.8597454 ] [-0.9813608 0.42077684 1.0669576 ... -1.8597454 1.7665215 0.6774126 ] [ 1.0669576 -0.37847468 -1.0475864 ... 0.6774126 -0.5300751 -0.35700452] ... [ 0.47627667 0.36842975 -2.2223597 ... 1.7742398 0.23744361 -3.0447497 ] [-2.2223597 -1.0417887 -0.14340581 ... -3.0447497 -0.12091509 -1.3122034 ] [-0.14340581 0.8400176 -1.7357298 ... -1.3122034 -0.5442587 -1.6677922 ]] [[-0.3270208 -0.3326506 0.21841457 ... 0.18094851 -0.30731824 -0.21732928] [ 0.21841457 0.47590923 -0.5801797 ... -0.21732928 -0.11027091 0.14891024] [-0.5801797 -0.03564442 1.0547366 ... 0.14891024 0.12961252 -0.6176247 ] ... [-1.1297221 -0.74076414 -0.10918897 ... 0.7327156 -0.30478132 -0.41422686] [-0.10918897 -0.89050317 0.11464072 ... -0.41422686 1.2488011 0.8209875 ] [ 0.11464072 0.7653762 1.1091734 ... 0.8209875 0.80606776 -0.76864105]]]; ov_res: [[[ 0.51398945 -0.572692 0.7759537 ... 0.2837917 -0.6147102 0.3813197 ] [ 0.7759537 -1.3871052 -1.450135 ... 0.3813197 0.9163534 0.64531124] [-1.450135 0.27341557 -0.9835122 ... 0.64531124 -1.2388567 -1.0426834 ] ... [-1.3257427 -0.14438568 -1.744836 ... -0.5272845 -1.5204382 -0.84345764] [-1.744836 -0.10067423 1.4136537 ... -0.84345764 1.6694134 -1.7442251 ] [ 1.4136537 1.18541 -1.5187289 ... -1.7442251 0.04939116 -0.8746488 ]] [[ 1.1199502 0.7453573 1.3439679 ... 0.46442962 -0.5395563 1.0814164 ] [ 1.3439679 -0.24910101 -0.5619352 ... 1.0814164 0.20281473 0.23246229] [-0.5619352 0.40481588 -0.24124804 ... 0.23246229 -0.784327 0.99340665] ... [-0.4403658 -0.69566953 0.6203669 ... -0.18643893 -1.1827601 0.58414584] [ 0.6203669 1.0065272 -1.7029265 ... 0.58414584 0.50303924 -0.168509 ] [-1.7029265 -0.07356355 0.533028 ... -0.168509 1.5055258 2.244243 ]] [[ 0.3041259 -0.5780669 -1.5326132 ... 1.0698267 0.44297546 -0.7398877 ] [-1.5326132 -0.40479353 0.57656795 ... -0.7398877 1.8050419 -1.4233878 ] [ 0.57656795 -1.1105989 0.73401415 ... -1.4233878 -1.1903689 -0.25188413] ... [-1.3241979 1.1873771 0.82312906 ... 0.4047307 0.05293545 1.1017963 ] [ 0.82312906 1.1283295 0.1968093 ... 1.1017963 1.3513006 0.47599787] [ 0.1968093 -2.1342092 -0.5242419 ... 0.47599787 -1.056634 -1.4126366 ]] ... [[ 0.08617662 0.6007923 1.9557656 ... -0.79097354 0.3264993 0.7304546 ] [ 1.9557656 -0.51987016 -0.55849004 ... 0.7304546 -0.53800714 -0.79167485] [-0.55849004 -0.40757066 -0.4576975 ... -0.79167485 -1.2706778 0.5564204 ] ... [ 1.1612197 -1.305691 -0.25194776 ... -2.3722792 0.21668248 -0.21693124] [-0.25194776 -1.6139748 0.46095896 ... -0.21693124 0.7837338 0.59595144] [ 0.46095896 0.68781465 1.2920169 ... 0.59595144 -0.92927426 0.24520753]] [[-0.4193792 -0.38040712 -0.9813608 ... -0.68117005 -0.14021482 -1.8597454 ] [-0.9813608 0.42077684 1.0669576 ... -1.8597454 1.7665215 0.6774126 ] [ 1.0669576 -0.37847468 -1.0475864 ... 0.6774126 -0.5300751 -0.35700452] ... [ 0.47627667 0.36842975 -2.2223597 ... 1.7742398 0.23744361 -3.0447497 ] [-2.2223597 -1.0417887 -0.14340581 ... -3.0447497 -0.12091509 -1.3122034 ] [-0.14340581 0.8400176 -1.7357298 ... -1.3122034 -0.5442587 -1.6677922 ]] [[-0.3270208 -0.3326506 0.21841457 ... 0.18094851 -0.30731824 -0.21732928] [ 0.21841457 0.47590923 -0.5801797 ... -0.21732928 -0.11027091 0.14891024] [-0.5801797 -0.03564442 1.0547366 ... 0.14891024 0.12961252 -0.6176247 ] ... [-1.1297221 -0.74076414 -0.10918897 ... 0.7327156 -0.30478132 -0.41422686] [-0.10918897 -0.89050317 0.11464072 ... -0.41422686 1.2488011 0.8209875 ] [ 0.11464072 0.7653762 1.1091734 ... 0.8209875 0.80606776 -0.76864105]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4987.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 1.58923060e-01 -3.94973606e-01 1.12916946e+00 ... -5.16051590e-01 -9.27205265e-01 -7.10096180e-01] [ 1.12916946e+00 2.66118217e+00 2.54342842e+00 ... -7.10096180e-01 -1.30953982e-01 1.03395808e+00] [-3.69294196e-01 -4.31170940e-01 3.69008005e-01 ... 3.62103194e-01 -6.73072815e-01 1.10624015e+00] ... [ 6.88472614e-02 -8.84762466e-01 1.88532221e+00 ... 1.51079714e-01 -1.17147245e-01 4.44229871e-01] [ 7.64235109e-02 2.91513592e-01 3.78558785e-01 ... -8.75505090e-01 4.30543035e-01 -5.41431308e-01] [ 3.78558785e-01 6.53091967e-01 1.98931038e+00 ... -5.41431308e-01 3.88198823e-01 -5.71638644e-01]] [[ 4.86211419e-01 1.14547384e+00 -3.81458759e-01 ... -6.38530850e-01 4.94442135e-01 -1.53223062e+00] [-3.81458759e-01 6.96907640e-01 4.15414006e-01 ... -1.53223062e+00 -9.84194458e-01 -1.15004134e+00] [-1.71934938e+00 -1.06480849e+00 -6.65364027e-01 ... 1.26300797e-01 -2.00020719e+00 7.63807297e-01] ... [-2.82255793e+00 -3.96547198e-01 -1.68632698e+00 ... -1.40997279e+00 8.76234174e-01 -1.46940306e-01] [-1.58116829e+00 -4.72922653e-01 1.24200888e-01 ... 2.61403829e-01 4.91401941e-01 1.00869440e-01] [ 1.24200888e-01 -4.53890145e-01 -1.42256415e+00 ... 1.00869440e-01 4.75829899e-01 1.47220230e+00]] [[-3.38092670e-02 3.26558799e-01 -1.20185673e+00 ... -1.04295516e+00 4.68989521e-01 -2.10021162e+00] [-1.20185673e+00 1.39630520e+00 7.37257302e-01 ... -2.10021162e+00 2.19153690e+00 3.66269886e-01] [ 2.12451243e+00 -1.05919969e+00 8.71492982e-01 ... -3.04782987e-01 6.58105075e-01 3.28774713e-02] ... [ 6.11679971e-01 -4.13464457e-01 1.33075893e+00 ... -6.30780339e-01 2.20531095e-02 -5.64091504e-01] [-4.51902896e-01 -4.37259972e-02 2.01997423e+00 ... -6.50612473e-01 -1.69175923e+00 3.43582630e-01] [ 2.01997423e+00 5.63202858e-01 -1.22766864e+00 ... 3.43582630e-01 1.09465182e+00 1.73878801e+00]] ... [[-1.25267163e-01 -3.44938278e-01 3.80666032e-02 ... 4.89046484e-01 -1.74202584e-02 -1.46429145e+00] [ 3.80666032e-02 -8.99257958e-02 5.11965692e-01 ... -1.46429145e+00 -1.36162722e+00 9.77191627e-01] [-1.05117786e+00 -2.26269633e-01 4.01973605e-01 ... -1.25494277e+00 -6.69514000e-01 -2.67116904e-01] ... [-1.23681629e+00 -1.10296857e+00 9.37589884e-01 ... 1.89416599e+00 -8.60188544e-01 -1.20200031e-03] [ 2.36888218e+00 -1.20778179e+00 2.35238123e+00 ... 1.53697401e-01 3.04558724e-01 3.33370775e-01] [ 2.35238123e+00 -1.27122271e+00 -1.17284000e+00 ... 3.33370775e-01 -2.56689399e-01 1.82122901e-01]] [[ 7.79773533e-01 -5.04204584e-03 -9.07114267e-01 ... -2.09308892e-01 5.15973389e-01 -2.29169741e-01] [-9.07114267e-01 -7.98408687e-02 -1.23479009e+00 ... -2.29169741e-01 5.28070152e-01 4.25788522e-01] [ 1.98458743e+00 4.86803830e-01 -3.26377094e-01 ... 1.33555746e+00 -1.31226540e+00 -1.16138160e+00] ... [ 1.05130625e+00 -4.09354061e-01 -7.92450964e-01 ... 1.94101942e+00 9.10071850e-01 -1.39275706e+00] [ 3.23856026e-01 -1.61707008e+00 -4.59052473e-02 ... 4.28225905e-01 4.29282337e-01 -1.84631801e+00] [-4.59052473e-02 -6.11286871e-02 4.83777493e-01 ... -1.84631801e+00 2.02985501e+00 -7.59693086e-01]] [[-6.68294668e-01 5.64732134e-01 4.52630132e-01 ... 2.84090281e-01 -6.77163064e-01 -1.09468186e+00] [ 4.52630132e-01 8.86563540e-01 7.32186377e-01 ... -1.09468186e+00 3.51769179e-01 2.88334894e+00] [-1.52093041e+00 5.28545380e-02 -1.37467873e+00 ... -5.88819265e-01 -1.57452011e+00 -1.44068408e+00] ... [ 1.86961353e+00 -2.87201554e-01 3.05727214e-01 ... -1.03141534e+00 -7.38897860e-01 1.03668439e+00] [-1.24846447e+00 -3.89652252e-01 -1.69951105e+00 ... -6.16051972e-01 8.63506868e-02 1.02416551e+00] [-1.69951105e+00 1.14440334e+00 5.96719645e-02 ... 1.02416551e+00 -1.91632524e-01 5.24380088e-01]]]; ov_res: [[[ 1.58923060e-01 -3.94973606e-01 1.12916946e+00 ... -5.16051590e-01 -9.27205265e-01 -7.10096180e-01] [ 1.12916946e+00 2.66118217e+00 2.54342842e+00 ... -7.10096180e-01 -1.30953982e-01 1.03395808e+00] [-3.69294196e-01 -4.31170940e-01 3.69008005e-01 ... 3.62103194e-01 -6.73072815e-01 1.10624015e+00] ... [ 6.88472614e-02 -8.84762466e-01 1.88532221e+00 ... 1.51079714e-01 -1.17147245e-01 4.44229871e-01] [ 7.64235109e-02 2.91513592e-01 3.78558785e-01 ... -8.75505090e-01 4.30543035e-01 -5.41431308e-01] [ 3.78558785e-01 6.53091967e-01 1.98931038e+00 ... -5.41431308e-01 3.88198823e-01 -5.71638644e-01]] [[ 4.86211419e-01 1.14547384e+00 -3.81458759e-01 ... -6.38530850e-01 4.94442135e-01 -1.53223062e+00] [-3.81458759e-01 6.96907640e-01 4.15414006e-01 ... -1.53223062e+00 -9.84194458e-01 -1.15004134e+00] [-1.71934938e+00 -1.06480849e+00 -6.65364027e-01 ... 1.26300797e-01 -2.00020719e+00 7.63807297e-01] ... [-2.82255793e+00 -3.96547198e-01 -1.68632698e+00 ... -1.40997279e+00 8.76234174e-01 -1.46940306e-01] [-1.58116829e+00 -4.72922653e-01 1.24200888e-01 ... 2.61403829e-01 4.91401941e-01 1.00869440e-01] [ 1.24200888e-01 -4.53890145e-01 -1.42256415e+00 ... 1.00869440e-01 4.75829899e-01 1.47220230e+00]] [[-3.38092670e-02 3.26558799e-01 -1.20185673e+00 ... -1.04295516e+00 4.68989521e-01 -2.10021162e+00] [-1.20185673e+00 1.39630520e+00 7.37257302e-01 ... -2.10021162e+00 2.19153690e+00 3.66269886e-01] [ 2.12451243e+00 -1.05919969e+00 8.71492982e-01 ... -3.04782987e-01 6.58105075e-01 3.28774713e-02] ... [ 6.11679971e-01 -4.13464457e-01 1.33075893e+00 ... -6.30780339e-01 2.20531095e-02 -5.64091504e-01] [-4.51902896e-01 -4.37259972e-02 2.01997423e+00 ... -6.50612473e-01 -1.69175923e+00 3.43582630e-01] [ 2.01997423e+00 5.63202858e-01 -1.22766864e+00 ... 3.43582630e-01 1.09465182e+00 1.73878801e+00]] ... [[-1.25267163e-01 -3.44938278e-01 3.80666032e-02 ... 4.89046484e-01 -1.74202584e-02 -1.46429145e+00] [ 3.80666032e-02 -8.99257958e-02 5.11965692e-01 ... -1.46429145e+00 -1.36162722e+00 9.77191627e-01] [-1.05117786e+00 -2.26269633e-01 4.01973605e-01 ... -1.25494277e+00 -6.69514000e-01 -2.67116904e-01] ... [-1.23681629e+00 -1.10296857e+00 9.37589884e-01 ... 1.89416599e+00 -8.60188544e-01 -1.20200031e-03] [ 2.36888218e+00 -1.20778179e+00 2.35238123e+00 ... 1.53697401e-01 3.04558724e-01 3.33370775e-01] [ 2.35238123e+00 -1.27122271e+00 -1.17284000e+00 ... 3.33370775e-01 -2.56689399e-01 1.82122901e-01]] [[ 7.79773533e-01 -5.04204584e-03 -9.07114267e-01 ... -2.09308892e-01 5.15973389e-01 -2.29169741e-01] [-9.07114267e-01 -7.98408687e-02 -1.23479009e+00 ... -2.29169741e-01 5.28070152e-01 4.25788522e-01] [ 1.98458743e+00 4.86803830e-01 -3.26377094e-01 ... 1.33555746e+00 -1.31226540e+00 -1.16138160e+00] ... [ 1.05130625e+00 -4.09354061e-01 -7.92450964e-01 ... 1.94101942e+00 9.10071850e-01 -1.39275706e+00] [ 3.23856026e-01 -1.61707008e+00 -4.59052473e-02 ... 4.28225905e-01 4.29282337e-01 -1.84631801e+00] [-4.59052473e-02 -6.11286871e-02 4.83777493e-01 ... -1.84631801e+00 2.02985501e+00 -7.59693086e-01]] [[-6.68294668e-01 5.64732134e-01 4.52630132e-01 ... 2.84090281e-01 -6.77163064e-01 -1.09468186e+00] [ 4.52630132e-01 8.86563540e-01 7.32186377e-01 ... -1.09468186e+00 3.51769179e-01 2.88334894e+00] [-1.52093041e+00 5.28545380e-02 -1.37467873e+00 ... -5.88819265e-01 -1.57452011e+00 -1.44068408e+00] ... [ 1.86961353e+00 -2.87201554e-01 3.05727214e-01 ... -1.03141534e+00 -7.38897860e-01 1.03668439e+00] [-1.24846447e+00 -3.89652252e-01 -1.69951105e+00 ... -6.16051972e-01 8.63506868e-02 1.02416551e+00] [-1.69951105e+00 1.14440334e+00 5.96719645e-02 ... 1.02416551e+00 -1.91632524e-01 5.24380088e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:0 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4989.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 6.58109725e-01 -1.42007828e+00 1.66364264e+00 ... 8.27395678e-01 1.69296837e+00 -8.87697995e-01] [ 8.33843350e-01 1.76846468e+00 -6.24526381e-01 ... -5.08555360e-02 -3.25643122e-01 4.95274723e-01] [ 7.07731485e-01 -2.31483087e-01 -1.12978005e+00 ... 8.89061928e-01 -1.86664343e+00 5.39363205e-01]] [[-2.42580906e-01 1.07290365e-01 4.36181307e-01 ... -1.21591163e+00 -1.93528092e+00 -1.03339553e+00] [ 1.34357326e-02 5.38609803e-01 -4.12545323e-01 ... 5.87162614e-01 -9.75531265e-02 1.53592199e-01] [-1.40773857e+00 4.80161250e-01 -3.35549206e-01 ... -3.94586980e-01 -9.30161119e-01 -1.78369439e+00]] [[ 5.78318238e-01 5.03343046e-01 4.02300745e-01 ... 1.36517495e-01 -5.35605252e-01 1.19090796e+00] [ 1.22745681e+00 -1.93985295e+00 3.87628376e-01 ... -3.44129838e-02 -6.10282004e-01 -4.92330104e-01] [-5.95856607e-01 1.00229800e+00 2.21991467e+00 ... -5.96353486e-02 1.14925039e+00 1.80968690e+00]] ... [[-1.08329773e+00 8.59066188e-01 -6.81598663e-01 ... -1.74550343e+00 3.12724859e-01 -1.63270032e+00] [-3.27579796e-01 1.47626805e+00 -8.89293373e-01 ... -1.15986669e+00 7.26709068e-01 1.94503176e+00] [ 3.29219788e-01 -9.20751929e-01 1.21717310e+00 ... 4.13711786e-01 -4.70590621e-01 5.39282501e-01]] [[-1.86161622e-01 2.81356603e-01 -4.32606608e-01 ... -1.65835962e-01 7.04566599e-04 2.19145823e+00] [-3.58524978e-01 -4.90862817e-01 -3.59196126e-01 ... -2.33265668e-01 7.62386322e-01 1.14512599e+00] [ 1.71540290e-01 1.78096667e-01 -9.43621337e-01 ... -6.09418571e-01 6.52673721e-01 -1.39887556e-01]] [[-1.56718969e+00 -4.46679860e-01 1.74736585e-02 ... 2.67136544e-01 2.87528515e-01 -1.34108186e+00] [-6.99642837e-01 5.91784358e-01 6.49700105e-01 ... 8.79125655e-01 1.52320099e+00 7.28719056e-01] [ 1.46456078e-01 2.52622217e-01 -2.77429700e-01 ... 8.04059803e-01 -2.62199497e+00 -1.78583249e-01]]]; ov_res: [[[ 6.58109725e-01 -1.42007828e+00 1.66364264e+00 ... 8.27395678e-01 1.69296837e+00 -8.87697995e-01] [ 8.33843350e-01 1.76846468e+00 -6.24526381e-01 ... -5.08555360e-02 -3.25643122e-01 4.95274723e-01] [ 7.07731485e-01 -2.31483087e-01 -1.12978005e+00 ... 8.89061928e-01 -1.86664343e+00 5.39363205e-01]] [[-2.42580906e-01 1.07290365e-01 4.36181307e-01 ... -1.21591163e+00 -1.93528092e+00 -1.03339553e+00] [ 1.34357326e-02 5.38609803e-01 -4.12545323e-01 ... 5.87162614e-01 -9.75531265e-02 1.53592199e-01] [-1.40773857e+00 4.80161250e-01 -3.35549206e-01 ... -3.94586980e-01 -9.30161119e-01 -1.78369439e+00]] [[ 5.78318238e-01 5.03343046e-01 4.02300745e-01 ... 1.36517495e-01 -5.35605252e-01 1.19090796e+00] [ 1.22745681e+00 -1.93985295e+00 3.87628376e-01 ... -3.44129838e-02 -6.10282004e-01 -4.92330104e-01] [-5.95856607e-01 1.00229800e+00 2.21991467e+00 ... -5.96353486e-02 1.14925039e+00 1.80968690e+00]] ... [[-1.08329773e+00 8.59066188e-01 -6.81598663e-01 ... -1.74550343e+00 3.12724859e-01 -1.63270032e+00] [-3.27579796e-01 1.47626805e+00 -8.89293373e-01 ... -1.15986669e+00 7.26709068e-01 1.94503176e+00] [ 3.29219788e-01 -9.20751929e-01 1.21717310e+00 ... 4.13711786e-01 -4.70590621e-01 5.39282501e-01]] [[-1.86161622e-01 2.81356603e-01 -4.32606608e-01 ... -1.65835962e-01 7.04566599e-04 2.19145823e+00] [-3.58524978e-01 -4.90862817e-01 -3.59196126e-01 ... -2.33265668e-01 7.62386322e-01 1.14512599e+00] [ 1.71540290e-01 1.78096667e-01 -9.43621337e-01 ... -6.09418571e-01 6.52673721e-01 -1.39887556e-01]] [[-1.56718969e+00 -4.46679860e-01 1.74736585e-02 ... 2.67136544e-01 2.87528515e-01 -1.34108186e+00] [-6.99642837e-01 5.91784358e-01 6.49700105e-01 ... 8.79125655e-01 1.52320099e+00 7.28719056e-01] [ 1.46456078e-01 2.52622217e-01 -2.77429700e-01 ... 8.04059803e-01 -2.62199497e+00 -1.78583249e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4991.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4993.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4995.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4997.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:1 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_4999.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5001.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5003.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5005.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 2.2989132 0.25082994 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3270713 1.8186646 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3079216 -1.334822 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2287189 0.38519284 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.901871 -1.1892194 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8291551 -0.15714231 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 2.2989132 0.25082994 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3270713 1.8186646 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3079216 -1.334822 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2287189 0.38519284 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.901871 -1.1892194 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.8291551 -0.15714231 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5007.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:2 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5009.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5011.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5013.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.25721171 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.64749825 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39661834 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.06622082 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4255828 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.27022114 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2637339 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.22784828 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1359887 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.1900612 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1676517 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.7078958 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.25721171 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.64749825 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39661834 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.06622082 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4255828 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.27022114 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2637339 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.22784828 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1359887 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.1900612 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1676517 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.7078958 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5015.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.73567903 -0.4019346 0.21831049] [ 0. 0. 0. ... -0.28947884 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.30313557 ... 0. 0. 0. ] [-1.3783603 0.17540012 1.3327183 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.8688409 0.40351543 0.717189 ] [ 0. 0. 0. ... 0.2164444 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.731516 ... 0. 0. 0. ] [ 0.2974996 -0.8145993 0.24319105 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.9546698 -0.2954068 0.22419651] [ 0. 0. 0. ... 1.8548822 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.43372622 ... 0. 0. 0. ] [ 1.8603704 1.0292681 0.9375486 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.4793142 1.3085461 -1.1809874 ] [ 0. 0. 0. ... -0.99652827 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.3203648 ... 0. 0. 0. ] [-1.5736041 0.4833533 -0.04014001 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6113785 2.6286278 0.03498986] [ 0. 0. 0. ... 0.4448786 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.8844963 ... 0. 0. 0. ] [ 1.0772343 -0.44554114 1.8656102 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.68689424 1.0860312 1.0667317 ] [ 0. 0. 0. ... 0.51952535 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.00942573 ... 0. 0. 0. ] [-0.48651946 0.6289563 0.25578272 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.73567903 -0.4019346 0.21831049] [ 0. 0. 0. ... -0.28947884 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.30313557 ... 0. 0. 0. ] [-1.3783603 0.17540012 1.3327183 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.8688409 0.40351543 0.717189 ] [ 0. 0. 0. ... 0.2164444 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.731516 ... 0. 0. 0. ] [ 0.2974996 -0.8145993 0.24319105 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.9546698 -0.2954068 0.22419651] [ 0. 0. 0. ... 1.8548822 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.43372622 ... 0. 0. 0. ] [ 1.8603704 1.0292681 0.9375486 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.4793142 1.3085461 -1.1809874 ] [ 0. 0. 0. ... -0.99652827 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.3203648 ... 0. 0. 0. ] [-1.5736041 0.4833533 -0.04014001 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.6113785 2.6286278 0.03498986] [ 0. 0. 0. ... 0.4448786 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.8844963 ... 0. 0. 0. ] [ 1.0772343 -0.44554114 1.8656102 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.68689424 1.0860312 1.0667317 ] [ 0. 0. 0. ... 0.51952535 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.00942573 ... 0. 0. 0. ] [-0.48651946 0.6289563 0.25578272 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5017.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:3 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5019.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5021.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5023.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5025.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5027.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:5 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5029.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[5, 5]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5031.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.6760767 -1.0717388 -0.15625297] [ 0. 0. 0. ... -1.0717388 -0.15625297 1.0217298 ] [ 0. 0. 0. ... -0.15625297 1.0217298 0. ] ... [ 0. -0.3550628 0.9492359 ... 0. 0. 0. ] [-0.3550628 0.9492359 0.43776792 ... 0. 0. 0. ] [ 0.9492359 0.43776792 -0.7152519 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8447805 0.19061688 0.84343755] [ 0. 0. 0. ... 0.19061688 0.84343755 -0.9285758 ] [ 0. 0. 0. ... 0.84343755 -0.9285758 0. ] ... [ 0. -1.2517009 0.9694825 ... 0. 0. 0. ] [-1.2517009 0.9694825 -0.68732077 ... 0. 0. 0. ] [ 0.9694825 -0.68732077 0.5581022 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5262693 -2.1116772 -0.7478085 ] [ 0. 0. 0. ... -2.1116772 -0.7478085 2.1194506 ] [ 0. 0. 0. ... -0.7478085 2.1194506 0. ] ... [ 0. -0.9869972 -1.2716534 ... 0. 0. 0. ] [-0.9869972 -1.2716534 -0.84663165 ... 0. 0. 0. ] [-1.2716534 -0.84663165 1.2557075 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.07182746 -0.3898652 -2.181576 ] [ 0. 0. 0. ... -0.3898652 -2.181576 -0.48036614] [ 0. 0. 0. ... -2.181576 -0.48036614 0. ] ... [ 0. -1.2594516 -0.31276676 ... 0. 0. 0. ] [-1.2594516 -0.31276676 -0.77583355 ... 0. 0. 0. ] [-0.31276676 -0.77583355 -1.1924425 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5197253 0.7461275 -0.5126659 ] [ 0. 0. 0. ... 0.7461275 -0.5126659 1.0641441 ] [ 0. 0. 0. ... -0.5126659 1.0641441 0. ] ... [ 0. -0.25620094 -1.3066825 ... 0. 0. 0. ] [-0.25620094 -1.3066825 -2.1971915 ... 0. 0. 0. ] [-1.3066825 -2.1971915 -0.7135568 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9251304 -1.2153085 0.67112595] [ 0. 0. 0. ... -1.2153085 0.67112595 0.04755305] [ 0. 0. 0. ... 0.67112595 0.04755305 0. ] ... [ 0. 0.35909665 0.37394896 ... 0. 0. 0. ] [ 0.35909665 0.37394896 -0.7197218 ... 0. 0. 0. ] [ 0.37394896 -0.7197218 -0.51994616 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.6760767 -1.0717388 -0.15625297] [ 0. 0. 0. ... -1.0717388 -0.15625297 1.0217298 ] [ 0. 0. 0. ... -0.15625297 1.0217298 0. ] ... [ 0. -0.3550628 0.9492359 ... 0. 0. 0. ] [-0.3550628 0.9492359 0.43776792 ... 0. 0. 0. ] [ 0.9492359 0.43776792 -0.7152519 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8447805 0.19061688 0.84343755] [ 0. 0. 0. ... 0.19061688 0.84343755 -0.9285758 ] [ 0. 0. 0. ... 0.84343755 -0.9285758 0. ] ... [ 0. -1.2517009 0.9694825 ... 0. 0. 0. ] [-1.2517009 0.9694825 -0.68732077 ... 0. 0. 0. ] [ 0.9694825 -0.68732077 0.5581022 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.5262693 -2.1116772 -0.7478085 ] [ 0. 0. 0. ... -2.1116772 -0.7478085 2.1194506 ] [ 0. 0. 0. ... -0.7478085 2.1194506 0. ] ... [ 0. -0.9869972 -1.2716534 ... 0. 0. 0. ] [-0.9869972 -1.2716534 -0.84663165 ... 0. 0. 0. ] [-1.2716534 -0.84663165 1.2557075 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.07182746 -0.3898652 -2.181576 ] [ 0. 0. 0. ... -0.3898652 -2.181576 -0.48036614] [ 0. 0. 0. ... -2.181576 -0.48036614 0. ] ... [ 0. -1.2594516 -0.31276676 ... 0. 0. 0. ] [-1.2594516 -0.31276676 -0.77583355 ... 0. 0. 0. ] [-0.31276676 -0.77583355 -1.1924425 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.5197253 0.7461275 -0.5126659 ] [ 0. 0. 0. ... 0.7461275 -0.5126659 1.0641441 ] [ 0. 0. 0. ... -0.5126659 1.0641441 0. ] ... [ 0. -0.25620094 -1.3066825 ... 0. 0. 0. ] [-0.25620094 -1.3066825 -2.1971915 ... 0. 0. 0. ] [-1.3066825 -2.1971915 -0.7135568 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9251304 -1.2153085 0.67112595] [ 0. 0. 0. ... -1.2153085 0.67112595 0.04755305] [ 0. 0. 0. ... 0.67112595 0.04755305 0. ] ... [ 0. 0.35909665 0.37394896 ... 0. 0. 0. ] [ 0.35909665 0.37394896 -0.7197218 ... 0. 0. 0. ] [ 0.37394896 -0.7197218 -0.51994616 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:1 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5033.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.53646171e+00 7.99387693e-01 -8.11137795e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.99387693e-01 -8.11137795e-01 0.00000000e+00] [ 0.00000000e+00 8.16942096e-01 -3.40340167e-01 ... -1.35351956e+00 1.14951682e+00 1.42020309e+00] ... [ 5.12483478e-01 -9.54082251e-01 -3.37830842e-01 ... 7.75903165e-01 -1.25540197e+00 0.00000000e+00] [ 0.00000000e+00 -6.72590435e-01 -1.00288248e+00 ... 1.94470191e+00 1.46763253e+00 4.90777344e-01] [-6.72590435e-01 -1.00288248e+00 -5.15380561e-01 ... 1.46763253e+00 4.90777344e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.41277611e-01 1.20887232e+00 8.48828107e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.20887232e+00 8.48828107e-02 0.00000000e+00] [ 0.00000000e+00 -2.20969006e-01 -7.38093197e-01 ... -1.03836143e+00 2.62289977e+00 2.90112585e-01] ... [-1.20544934e+00 5.04268825e-01 1.24261558e+00 ... -2.76495636e-01 -4.93249774e-01 0.00000000e+00] [ 0.00000000e+00 1.14343214e+00 -4.28128988e-01 ... -9.80989158e-01 2.56297216e-02 -1.08168483e-01] [ 1.14343214e+00 -4.28128988e-01 -6.03609145e-01 ... 2.56297216e-02 -1.08168483e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -4.82991308e-01 2.18282080e+00 -2.32516661e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.18282080e+00 -2.32516661e-01 0.00000000e+00] [ 0.00000000e+00 -1.48652661e+00 1.50556183e+00 ... -4.91992235e-01 7.80783772e-01 1.04431823e-01] ... [-1.10517716e+00 3.55552405e-01 1.00724983e+00 ... -1.43294871e-01 -6.25593901e-01 0.00000000e+00] [ 0.00000000e+00 -6.29523933e-01 -6.87501490e-01 ... 1.86227083e+00 1.21653783e+00 -1.45917571e+00] [-6.29523933e-01 -6.87501490e-01 -6.78820550e-01 ... 1.21653783e+00 -1.45917571e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.60922545e-01 4.00041848e-01 -3.71850848e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.00041848e-01 -3.71850848e-01 0.00000000e+00] [ 0.00000000e+00 7.63407469e-01 -9.60475624e-01 ... 9.45352018e-01 1.09800589e+00 -3.53914052e-02] ... [-1.46920037e+00 4.07750398e-01 5.65066040e-01 ... 3.03878605e-01 -8.59799213e-04 0.00000000e+00] [ 0.00000000e+00 3.11387450e-01 -1.57172635e-01 ... -1.05716184e-01 -5.56074858e-01 8.20289254e-01] [ 3.11387450e-01 -1.57172635e-01 -1.47489175e-01 ... -5.56074858e-01 8.20289254e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.02124178e-01 -5.59300520e-02 -1.72421068e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.59300520e-02 -1.72421068e-01 0.00000000e+00] [ 0.00000000e+00 -2.67117709e-01 2.17047501e-02 ... -7.31269896e-01 -3.91233623e-01 -5.16696334e-01] ... [ 9.74908993e-02 -2.53010511e-01 -2.36235738e-01 ... 7.65643895e-01 1.49592400e-01 0.00000000e+00] [ 0.00000000e+00 -2.11072117e-01 6.55410409e-01 ... -5.45469940e-01 5.64185739e-01 -1.48881805e+00] [-2.11072117e-01 6.55410409e-01 -6.89268827e-01 ... 5.64185739e-01 -1.48881805e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.70516729e-01 6.70470893e-01 3.25553924e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.70470893e-01 3.25553924e-01 0.00000000e+00] [ 0.00000000e+00 -3.77857387e-01 3.91929924e-01 ... 9.96310472e-01 -2.33799934e-01 -6.84301078e-01] ... [-1.19816089e+00 -2.08876729e+00 -8.49107385e-01 ... 1.14862156e+00 -3.86952817e-01 0.00000000e+00] [ 0.00000000e+00 1.94411606e-01 -5.15144527e-01 ... -3.40335518e-01 -1.07220006e+00 3.22355151e-01] [ 1.94411606e-01 -5.15144527e-01 -7.59003386e-02 ... -1.07220006e+00 3.22355151e-01 0.00000000e+00]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.53646171e+00 7.99387693e-01 -8.11137795e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.99387693e-01 -8.11137795e-01 0.00000000e+00] [ 0.00000000e+00 8.16942096e-01 -3.40340167e-01 ... -1.35351956e+00 1.14951682e+00 1.42020309e+00] ... [ 5.12483478e-01 -9.54082251e-01 -3.37830842e-01 ... 7.75903165e-01 -1.25540197e+00 0.00000000e+00] [ 0.00000000e+00 -6.72590435e-01 -1.00288248e+00 ... 1.94470191e+00 1.46763253e+00 4.90777344e-01] [-6.72590435e-01 -1.00288248e+00 -5.15380561e-01 ... 1.46763253e+00 4.90777344e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.41277611e-01 1.20887232e+00 8.48828107e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.20887232e+00 8.48828107e-02 0.00000000e+00] [ 0.00000000e+00 -2.20969006e-01 -7.38093197e-01 ... -1.03836143e+00 2.62289977e+00 2.90112585e-01] ... [-1.20544934e+00 5.04268825e-01 1.24261558e+00 ... -2.76495636e-01 -4.93249774e-01 0.00000000e+00] [ 0.00000000e+00 1.14343214e+00 -4.28128988e-01 ... -9.80989158e-01 2.56297216e-02 -1.08168483e-01] [ 1.14343214e+00 -4.28128988e-01 -6.03609145e-01 ... 2.56297216e-02 -1.08168483e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -4.82991308e-01 2.18282080e+00 -2.32516661e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.18282080e+00 -2.32516661e-01 0.00000000e+00] [ 0.00000000e+00 -1.48652661e+00 1.50556183e+00 ... -4.91992235e-01 7.80783772e-01 1.04431823e-01] ... [-1.10517716e+00 3.55552405e-01 1.00724983e+00 ... -1.43294871e-01 -6.25593901e-01 0.00000000e+00] [ 0.00000000e+00 -6.29523933e-01 -6.87501490e-01 ... 1.86227083e+00 1.21653783e+00 -1.45917571e+00] [-6.29523933e-01 -6.87501490e-01 -6.78820550e-01 ... 1.21653783e+00 -1.45917571e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.60922545e-01 4.00041848e-01 -3.71850848e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 4.00041848e-01 -3.71850848e-01 0.00000000e+00] [ 0.00000000e+00 7.63407469e-01 -9.60475624e-01 ... 9.45352018e-01 1.09800589e+00 -3.53914052e-02] ... [-1.46920037e+00 4.07750398e-01 5.65066040e-01 ... 3.03878605e-01 -8.59799213e-04 0.00000000e+00] [ 0.00000000e+00 3.11387450e-01 -1.57172635e-01 ... -1.05716184e-01 -5.56074858e-01 8.20289254e-01] [ 3.11387450e-01 -1.57172635e-01 -1.47489175e-01 ... -5.56074858e-01 8.20289254e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.02124178e-01 -5.59300520e-02 -1.72421068e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.59300520e-02 -1.72421068e-01 0.00000000e+00] [ 0.00000000e+00 -2.67117709e-01 2.17047501e-02 ... -7.31269896e-01 -3.91233623e-01 -5.16696334e-01] ... [ 9.74908993e-02 -2.53010511e-01 -2.36235738e-01 ... 7.65643895e-01 1.49592400e-01 0.00000000e+00] [ 0.00000000e+00 -2.11072117e-01 6.55410409e-01 ... -5.45469940e-01 5.64185739e-01 -1.48881805e+00] [-2.11072117e-01 6.55410409e-01 -6.89268827e-01 ... 5.64185739e-01 -1.48881805e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.70516729e-01 6.70470893e-01 3.25553924e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 6.70470893e-01 3.25553924e-01 0.00000000e+00] [ 0.00000000e+00 -3.77857387e-01 3.91929924e-01 ... 9.96310472e-01 -2.33799934e-01 -6.84301078e-01] ... [-1.19816089e+00 -2.08876729e+00 -8.49107385e-01 ... 1.14862156e+00 -3.86952817e-01 0.00000000e+00] [ 0.00000000e+00 1.94411606e-01 -5.15144527e-01 ... -3.40335518e-01 -1.07220006e+00 3.22355151e-01] [ 1.94411606e-01 -5.15144527e-01 -7.59003386e-02 ... -1.07220006e+00 3.22355151e-01 0.00000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5035.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.28488103 -0.18864608 1.5088915 ] [ 0. 0. 0. ... -0.18864608 1.5088915 0.24965528] [ 0. 0. 0. ... 1.5088915 0.24965528 0. ] ... [ 0. 0.17808884 0.5971841 ... -1.9731292 -2.6103852 0.32927424] [ 0.17808884 0.5971841 -0.22636485 ... -2.6103852 0.32927424 0.54237336] [ 0.5971841 -0.22636485 -1.2373132 ... 0.32927424 0.54237336 0. ]] [[ 0. 0. 0. ... 0.26977834 -1.3016822 -0.07861534] [ 0. 0. 0. ... -1.3016822 -0.07861534 -0.93600935] [ 0. 0. 0. ... -0.07861534 -0.93600935 0. ] ... [ 0. -0.18908104 1.7980562 ... 0.5487986 -0.92892927 0.6357218 ] [-0.18908104 1.7980562 -2.0445266 ... -0.92892927 0.6357218 0.8169412 ] [ 1.7980562 -2.0445266 -0.11318019 ... 0.6357218 0.8169412 0. ]] [[ 0. 0. 0. ... 0.2506952 -0.33862996 0.76349664] [ 0. 0. 0. ... -0.33862996 0.76349664 -0.23074658] [ 0. 0. 0. ... 0.76349664 -0.23074658 0. ] ... [ 0. 0.1503534 -0.7358333 ... 0.7296258 -1.4636544 -0.58261156] [ 0.1503534 -0.7358333 0.117637 ... -1.4636544 -0.58261156 0.55166644] [-0.7358333 0.117637 0.06309684 ... -0.58261156 0.55166644 0. ]] ... [[ 0. 0. 0. ... -0.60179675 -2.5052466 0.20148507] [ 0. 0. 0. ... -2.5052466 0.20148507 -0.71989495] [ 0. 0. 0. ... 0.20148507 -0.71989495 0. ] ... [ 0. 0.11232372 0.65671194 ... -0.26623526 -0.29114276 -0.6182923 ] [ 0.11232372 0.65671194 0.09072948 ... -0.29114276 -0.6182923 0.5977931 ] [ 0.65671194 0.09072948 1.1711992 ... -0.6182923 0.5977931 0. ]] [[ 0. 0. 0. ... 1.7118922 0.86600256 -1.6024096 ] [ 0. 0. 0. ... 0.86600256 -1.6024096 0.19152905] [ 0. 0. 0. ... -1.6024096 0.19152905 0. ] ... [ 0. 0.17701791 -0.00956861 ... 0.2788712 -0.2477385 -3.1925097 ] [ 0.17701791 -0.00956861 0.03693669 ... -0.2477385 -3.1925097 -0.46909058] [-0.00956861 0.03693669 2.3355613 ... -3.1925097 -0.46909058 0. ]] [[ 0. 0. 0. ... 0.27268967 -0.44339374 0.37877288] [ 0. 0. 0. ... -0.44339374 0.37877288 1.1816372 ] [ 0. 0. 0. ... 0.37877288 1.1816372 0. ] ... [ 0. 0.51316476 0.22625846 ... 0.05845491 0.92773175 0.59832644] [ 0.51316476 0.22625846 -0.30850536 ... 0.92773175 0.59832644 0.85134125] [ 0.22625846 -0.30850536 -0.694073 ... 0.59832644 0.85134125 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.28488103 -0.18864608 1.5088915 ] [ 0. 0. 0. ... -0.18864608 1.5088915 0.24965528] [ 0. 0. 0. ... 1.5088915 0.24965528 0. ] ... [ 0. 0.17808884 0.5971841 ... -1.9731292 -2.6103852 0.32927424] [ 0.17808884 0.5971841 -0.22636485 ... -2.6103852 0.32927424 0.54237336] [ 0.5971841 -0.22636485 -1.2373132 ... 0.32927424 0.54237336 0. ]] [[ 0. 0. 0. ... 0.26977834 -1.3016822 -0.07861534] [ 0. 0. 0. ... -1.3016822 -0.07861534 -0.93600935] [ 0. 0. 0. ... -0.07861534 -0.93600935 0. ] ... [ 0. -0.18908104 1.7980562 ... 0.5487986 -0.92892927 0.6357218 ] [-0.18908104 1.7980562 -2.0445266 ... -0.92892927 0.6357218 0.8169412 ] [ 1.7980562 -2.0445266 -0.11318019 ... 0.6357218 0.8169412 0. ]] [[ 0. 0. 0. ... 0.2506952 -0.33862996 0.76349664] [ 0. 0. 0. ... -0.33862996 0.76349664 -0.23074658] [ 0. 0. 0. ... 0.76349664 -0.23074658 0. ] ... [ 0. 0.1503534 -0.7358333 ... 0.7296258 -1.4636544 -0.58261156] [ 0.1503534 -0.7358333 0.117637 ... -1.4636544 -0.58261156 0.55166644] [-0.7358333 0.117637 0.06309684 ... -0.58261156 0.55166644 0. ]] ... [[ 0. 0. 0. ... -0.60179675 -2.5052466 0.20148507] [ 0. 0. 0. ... -2.5052466 0.20148507 -0.71989495] [ 0. 0. 0. ... 0.20148507 -0.71989495 0. ] ... [ 0. 0.11232372 0.65671194 ... -0.26623526 -0.29114276 -0.6182923 ] [ 0.11232372 0.65671194 0.09072948 ... -0.29114276 -0.6182923 0.5977931 ] [ 0.65671194 0.09072948 1.1711992 ... -0.6182923 0.5977931 0. ]] [[ 0. 0. 0. ... 1.7118922 0.86600256 -1.6024096 ] [ 0. 0. 0. ... 0.86600256 -1.6024096 0.19152905] [ 0. 0. 0. ... -1.6024096 0.19152905 0. ] ... [ 0. 0.17701791 -0.00956861 ... 0.2788712 -0.2477385 -3.1925097 ] [ 0.17701791 -0.00956861 0.03693669 ... -0.2477385 -3.1925097 -0.46909058] [-0.00956861 0.03693669 2.3355613 ... -3.1925097 -0.46909058 0. ]] [[ 0. 0. 0. ... 0.27268967 -0.44339374 0.37877288] [ 0. 0. 0. ... -0.44339374 0.37877288 1.1816372 ] [ 0. 0. 0. ... 0.37877288 1.1816372 0. ] ... [ 0. 0.51316476 0.22625846 ... 0.05845491 0.92773175 0.59832644] [ 0.51316476 0.22625846 -0.30850536 ... 0.92773175 0.59832644 0.85134125] [ 0.22625846 -0.30850536 -0.694073 ... 0.59832644 0.85134125 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5037.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -3.1801586 1.0441405 0.05543513] [ 0. 0. 0. ... 1.0441405 0.05543513 0. ] [ 0. -1.1937906 -0.98381567 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3363615 1.2215315 0. ] [ 0. 0.9072542 -0.41473454 ... 0. 0. 0. ] [ 0.9072542 -0.41473454 0.22809331 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.33627895 1.0953465 -0.01544886] [ 0. 0. 0. ... 1.0953465 -0.01544886 0. ] [ 0. -0.12809008 -2.3711846 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.12061691 -2.4612105 0. ] [ 0. 1.5932642 0.33662388 ... 0. 0. 0. ] [ 1.5932642 0.33662388 0.5385103 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.6469947 -0.35777107 -0.30696255] [ 0. 0. 0. ... -0.35777107 -0.30696255 0. ] [ 0. 1.5053304 -0.62004435 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3148316 -0.16742627 0. ] [ 0. 0.6412718 0.6928425 ... 0. 0. 0. ] [ 0.6412718 0.6928425 1.6654233 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 2.2138627 -0.40516445 -0.39404643] [ 0. 0. 0. ... -0.40516445 -0.39404643 0. ] [ 0. -0.01879056 0.11475803 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.818961 0.06694897 0. ] [ 0. 0.8854915 1.1740688 ... 0. 0. 0. ] [ 0.8854915 1.1740688 1.3500214 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.51342255 -0.24195768 -1.4259971 ] [ 0. 0. 0. ... -0.24195768 -1.4259971 0. ] [ 0. -0.3734279 -1.5376021 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.2824297 -0.84045416 0. ] [ 0. 0.26873243 0.6592872 ... 0. 0. 0. ] [ 0.26873243 0.6592872 -0.40914163 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.58303213 0.4659447 -0.1256341 ] [ 0. 0. 0. ... 0.4659447 -0.1256341 0. ] [ 0. -0.6870089 0.22310258 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4475721 -0.5010366 0. ] [ 0. -0.807934 1.8715574 ... 0. 0. 0. ] [-0.807934 1.8715574 2.0029702 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -3.1801586 1.0441405 0.05543513] [ 0. 0. 0. ... 1.0441405 0.05543513 0. ] [ 0. -1.1937906 -0.98381567 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3363615 1.2215315 0. ] [ 0. 0.9072542 -0.41473454 ... 0. 0. 0. ] [ 0.9072542 -0.41473454 0.22809331 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.33627895 1.0953465 -0.01544886] [ 0. 0. 0. ... 1.0953465 -0.01544886 0. ] [ 0. -0.12809008 -2.3711846 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.12061691 -2.4612105 0. ] [ 0. 1.5932642 0.33662388 ... 0. 0. 0. ] [ 1.5932642 0.33662388 0.5385103 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.6469947 -0.35777107 -0.30696255] [ 0. 0. 0. ... -0.35777107 -0.30696255 0. ] [ 0. 1.5053304 -0.62004435 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.3148316 -0.16742627 0. ] [ 0. 0.6412718 0.6928425 ... 0. 0. 0. ] [ 0.6412718 0.6928425 1.6654233 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 2.2138627 -0.40516445 -0.39404643] [ 0. 0. 0. ... -0.40516445 -0.39404643 0. ] [ 0. -0.01879056 0.11475803 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.818961 0.06694897 0. ] [ 0. 0.8854915 1.1740688 ... 0. 0. 0. ] [ 0.8854915 1.1740688 1.3500214 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.51342255 -0.24195768 -1.4259971 ] [ 0. 0. 0. ... -0.24195768 -1.4259971 0. ] [ 0. -0.3734279 -1.5376021 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.2824297 -0.84045416 0. ] [ 0. 0.26873243 0.6592872 ... 0. 0. 0. ] [ 0.26873243 0.6592872 -0.40914163 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.58303213 0.4659447 -0.1256341 ] [ 0. 0. 0. ... 0.4659447 -0.1256341 0. ] [ 0. -0.6870089 0.22310258 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 1.4475721 -0.5010366 0. ] [ 0. -0.807934 1.8715574 ... 0. 0. 0. ] [-0.807934 1.8715574 2.0029702 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:1 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5039.aten_im2col, %x.1 : Tensor): %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %4 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%4) fw_re: [[[ 0. 0. 0. ... 0.632125 -0.18875168 0. ] [ 0. 0. 0. ... 1.599349 1.1777707 0. ] [ 0. 0. 0. ... 0.56528443 0.09483068 0. ]] [[ 0. 0. 0. ... 0.2979776 1.9660289 0. ] [ 0. 0. 0. ... 0.62927556 0.3181126 0. ] [ 0. 0. 0. ... -0.03597675 -0.26311168 0. ]] [[ 0. 0. 0. ... -0.22632374 -0.28625533 0. ] [ 0. 0. 0. ... 0.26674902 0.7080371 0. ] [ 0. 0. 0. ... 0.720268 1.6096983 0. ]] ... [[ 0. 0. 0. ... -1.2052382 -0.31466267 0. ] [ 0. 0. 0. ... 0.8395053 -0.2015866 0. ] [ 0. 0. 0. ... 3.3329654 0.11468441 0. ]] [[ 0. 0. 0. ... -0.26360726 -0.59134066 0. ] [ 0. 0. 0. ... -0.5991588 0.40602446 0. ] [ 0. 0. 0. ... -1.5704162 -1.5028565 0. ]] [[ 0. 0. 0. ... 0.5306096 1.2388062 0. ] [ 0. 0. 0. ... 0.37439987 -0.05517103 0. ] [ 0. 0. 0. ... -1.5792288 -0.10272767 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.632125 -0.18875168 0. ] [ 0. 0. 0. ... 1.599349 1.1777707 0. ] [ 0. 0. 0. ... 0.56528443 0.09483068 0. ]] [[ 0. 0. 0. ... 0.2979776 1.9660289 0. ] [ 0. 0. 0. ... 0.62927556 0.3181126 0. ] [ 0. 0. 0. ... -0.03597675 -0.26311168 0. ]] [[ 0. 0. 0. ... -0.22632374 -0.28625533 0. ] [ 0. 0. 0. ... 0.26674902 0.7080371 0. ] [ 0. 0. 0. ... 0.720268 1.6096983 0. ]] ... [[ 0. 0. 0. ... -1.2052382 -0.31466267 0. ] [ 0. 0. 0. ... 0.8395053 -0.2015866 0. ] [ 0. 0. 0. ... 3.3329654 0.11468441 0. ]] [[ 0. 0. 0. ... -0.26360726 -0.59134066 0. ] [ 0. 0. 0. ... -0.5991588 0.40602446 0. ] [ 0. 0. 0. ... -1.5704162 -1.5028565 0. ]] [[ 0. 0. 0. ... 0.5306096 1.2388062 0. ] [ 0. 0. 0. ... 0.37439987 -0.05517103 0. ] [ 0. 0. 0. ... -1.5792288 -0.10272767 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5041.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.64973205 -0.27716574 0.17459854] [ 0. 0. 0. ... 0.17459854 -0.11607744 -0.05685032] [ 0. 0. 0. ... -0.05685032 1.3510001 0. ] ... [ 0. -0.8047059 -0.28939715 ... 1.003319 0.6633713 0.29240015] [-0.28939715 -0.03622815 -0.56223863 ... 0.29240015 0.18410191 -0.561879 ] [-0.56223863 -0.7773895 2.9298933 ... -0.561879 0.06662084 0. ]] [[ 0. 0. 0. ... 0.7066765 0.12240765 -1.762369 ] [ 0. 0. 0. ... -1.762369 -0.50338906 1.6056492 ] [ 0. 0. 0. ... 1.6056492 1.1335428 0. ] ... [ 0. 1.5564443 -0.21774597 ... -1.4063649 0.971067 1.3161143 ] [-0.21774597 2.1167624 -0.4545191 ... 1.3161143 0.5863057 -1.1971922 ] [-0.4545191 0.6973282 1.0422112 ... -1.1971922 0.6236572 0. ]] [[ 0. 0. 0. ... -1.6725051 0.44754404 -1.078467 ] [ 0. 0. 0. ... -1.078467 -0.54954946 0.49715567] [ 0. 0. 0. ... 0.49715567 0.11943179 0. ] ... [ 0. 1.3022473 1.0193697 ... 0.31316707 -1.1232289 1.7526695 ] [ 1.0193697 -0.29598957 -0.95839936 ... 1.7526695 -1.3922192 -0.9736767 ] [-0.95839936 0.7501799 0.04006871 ... -0.9736767 -1.5573728 0. ]] ... [[ 0. 0. 0. ... 2.0765848 -0.31650087 -0.17244455] [ 0. 0. 0. ... -0.17244455 -0.09564234 -0.219031 ] [ 0. 0. 0. ... -0.219031 -0.86976075 0. ] ... [ 0. -0.42247075 -0.38393345 ... -0.20830062 -0.8251838 0.63324 ] [-0.38393345 0.58608335 0.11642385 ... 0.63324 0.41608492 -0.6706323 ] [ 0.11642385 0.24716277 0.6545903 ... -0.6706323 0.03182904 0. ]] [[ 0. 0. 0. ... 1.1634185 -0.22898489 -0.0064049 ] [ 0. 0. 0. ... -0.0064049 -0.8101833 -1.127361 ] [ 0. 0. 0. ... -1.127361 1.610641 0. ] ... [ 0. -0.15985379 -0.78980905 ... -0.6463203 -0.6321527 1.0659502 ] [-0.78980905 1.0632429 -1.6495486 ... 1.0659502 -1.0679998 -0.6081737 ] [-1.6495486 1.1478306 -0.42657247 ... -0.6081737 -0.21460037 0. ]] [[ 0. 0. 0. ... -0.9872481 -0.8822205 -1.2185231 ] [ 0. 0. 0. ... -1.2185231 -0.9612865 0.5055848 ] [ 0. 0. 0. ... 0.5055848 2.2164207 0. ] ... [ 0. 0.80998105 -0.6866605 ... -0.76720774 -1.2617289 2.840531 ] [-0.6866605 -0.6588716 -1.3437093 ... 2.840531 0.97031 -0.17457023] [-1.3437093 -0.20664768 0.40857252 ... -0.17457023 1.6413358 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.64973205 -0.27716574 0.17459854] [ 0. 0. 0. ... 0.17459854 -0.11607744 -0.05685032] [ 0. 0. 0. ... -0.05685032 1.3510001 0. ] ... [ 0. -0.8047059 -0.28939715 ... 1.003319 0.6633713 0.29240015] [-0.28939715 -0.03622815 -0.56223863 ... 0.29240015 0.18410191 -0.561879 ] [-0.56223863 -0.7773895 2.9298933 ... -0.561879 0.06662084 0. ]] [[ 0. 0. 0. ... 0.7066765 0.12240765 -1.762369 ] [ 0. 0. 0. ... -1.762369 -0.50338906 1.6056492 ] [ 0. 0. 0. ... 1.6056492 1.1335428 0. ] ... [ 0. 1.5564443 -0.21774597 ... -1.4063649 0.971067 1.3161143 ] [-0.21774597 2.1167624 -0.4545191 ... 1.3161143 0.5863057 -1.1971922 ] [-0.4545191 0.6973282 1.0422112 ... -1.1971922 0.6236572 0. ]] [[ 0. 0. 0. ... -1.6725051 0.44754404 -1.078467 ] [ 0. 0. 0. ... -1.078467 -0.54954946 0.49715567] [ 0. 0. 0. ... 0.49715567 0.11943179 0. ] ... [ 0. 1.3022473 1.0193697 ... 0.31316707 -1.1232289 1.7526695 ] [ 1.0193697 -0.29598957 -0.95839936 ... 1.7526695 -1.3922192 -0.9736767 ] [-0.95839936 0.7501799 0.04006871 ... -0.9736767 -1.5573728 0. ]] ... [[ 0. 0. 0. ... 2.0765848 -0.31650087 -0.17244455] [ 0. 0. 0. ... -0.17244455 -0.09564234 -0.219031 ] [ 0. 0. 0. ... -0.219031 -0.86976075 0. ] ... [ 0. -0.42247075 -0.38393345 ... -0.20830062 -0.8251838 0.63324 ] [-0.38393345 0.58608335 0.11642385 ... 0.63324 0.41608492 -0.6706323 ] [ 0.11642385 0.24716277 0.6545903 ... -0.6706323 0.03182904 0. ]] [[ 0. 0. 0. ... 1.1634185 -0.22898489 -0.0064049 ] [ 0. 0. 0. ... -0.0064049 -0.8101833 -1.127361 ] [ 0. 0. 0. ... -1.127361 1.610641 0. ] ... [ 0. -0.15985379 -0.78980905 ... -0.6463203 -0.6321527 1.0659502 ] [-0.78980905 1.0632429 -1.6495486 ... 1.0659502 -1.0679998 -0.6081737 ] [-1.6495486 1.1478306 -0.42657247 ... -0.6081737 -0.21460037 0. ]] [[ 0. 0. 0. ... -0.9872481 -0.8822205 -1.2185231 ] [ 0. 0. 0. ... -1.2185231 -0.9612865 0.5055848 ] [ 0. 0. 0. ... 0.5055848 2.2164207 0. ] ... [ 0. 0.80998105 -0.6866605 ... -0.76720774 -1.2617289 2.840531 ] [-0.6866605 -0.6588716 -1.3437093 ... 2.840531 0.97031 -0.17457023] [-1.3437093 -0.20664768 0.40857252 ... -0.17457023 1.6413358 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5043.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.26615593 -0.462052 -1.0498095 ] [ 0. 0. 0. ... -1.0498095 -2.4453862 0. ] [ 0. 1.9839742 0.5935734 ... -0.5626871 0.4044637 0.5248373 ] ... [-0.81602186 0.50320584 0.99526775 ... 0.19170648 0.7376679 0. ] [ 0. 0.5821365 -1.1629308 ... 0.4133235 0.8389125 0.07211459] [-1.1629308 -1.4487438 0.10886475 ... 0.07211459 0.37106258 0. ]] [[ 0. 0. 0. ... -0.3499903 0.1197098 -0.3086755 ] [ 0. 0. 0. ... -0.3086755 1.879153 0. ] [ 0. -1.1364399 0.38214746 ... 0.05931022 -0.81383544 1.1081095 ] ... [ 0.38384604 2.6826715 1.4368373 ... -1.0109357 1.5588948 0. ] [ 0. 0.89526504 2.184282 ... -1.1176299 0.22360481 -1.777265 ] [ 2.184282 -1.4925916 -0.11739898 ... -1.777265 0.27828446 0. ]] [[ 0. 0. 0. ... 0.40741846 1.1917396 2.0003436 ] [ 0. 0. 0. ... 2.0003436 -0.06018327 0. ] [ 0. 0.14201593 -0.70193195 ... -0.06026362 -0.96564466 1.3475428 ] ... [-0.15145615 0.86535966 -1.00476 ... 0.07858864 1.2355726 0. ] [ 0. -0.9892913 1.4617437 ... 0.0853509 -1.2988883 0.9767869 ] [ 1.4617437 1.1819719 -1.4737607 ... 0.9767869 -1.2569995 0. ]] ... [[ 0. 0. 0. ... 1.0657406 0.7511433 -0.8193329 ] [ 0. 0. 0. ... -0.8193329 -0.1360911 0. ] [ 0. 0.7303265 1.1514689 ... 1.700382 0.27476096 -1.5925341 ] ... [ 0.44422168 0.11551108 1.1650032 ... 0.3719129 0.95084256 0. ] [ 0. -0.21199888 0.7994715 ... 0.9708771 -0.05868831 2.113385 ] [ 0.7994715 -1.1652337 -0.7801526 ... 2.113385 0.9402439 0. ]] [[ 0. 0. 0. ... 0.95421195 -1.7551236 -0.6870886 ] [ 0. 0. 0. ... -0.6870886 -1.1052812 0. ] [ 0. -0.2855908 -0.3412239 ... 0.08678132 -1.031746 -0.06732917] ... [ 1.7322726 0.67505944 2.0085168 ... 0.22581366 -0.00519433 0. ] [ 0. -0.8347179 -1.3643047 ... -0.75262165 -0.05462843 0.20654655] [-1.3643047 0.6635368 -0.19099188 ... 0.20654655 -0.07658069 0. ]] [[ 0. 0. 0. ... -0.28028473 1.4756432 -0.16083094] [ 0. 0. 0. ... -0.16083094 0.03496349 0. ] [ 0. -0.0320334 -0.21255346 ... -1.8402741 0.64746803 0.45426413] ... [-0.9063964 -0.19206214 -1.4803907 ... 2.1916628 0.6217226 0. ] [ 0. 1.3130785 -0.7928715 ... 1.2663572 0.4304424 -2.3776226 ] [-0.7928715 0.3296288 -0.4896926 ... -2.3776226 -0.49242935 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.26615593 -0.462052 -1.0498095 ] [ 0. 0. 0. ... -1.0498095 -2.4453862 0. ] [ 0. 1.9839742 0.5935734 ... -0.5626871 0.4044637 0.5248373 ] ... [-0.81602186 0.50320584 0.99526775 ... 0.19170648 0.7376679 0. ] [ 0. 0.5821365 -1.1629308 ... 0.4133235 0.8389125 0.07211459] [-1.1629308 -1.4487438 0.10886475 ... 0.07211459 0.37106258 0. ]] [[ 0. 0. 0. ... -0.3499903 0.1197098 -0.3086755 ] [ 0. 0. 0. ... -0.3086755 1.879153 0. ] [ 0. -1.1364399 0.38214746 ... 0.05931022 -0.81383544 1.1081095 ] ... [ 0.38384604 2.6826715 1.4368373 ... -1.0109357 1.5588948 0. ] [ 0. 0.89526504 2.184282 ... -1.1176299 0.22360481 -1.777265 ] [ 2.184282 -1.4925916 -0.11739898 ... -1.777265 0.27828446 0. ]] [[ 0. 0. 0. ... 0.40741846 1.1917396 2.0003436 ] [ 0. 0. 0. ... 2.0003436 -0.06018327 0. ] [ 0. 0.14201593 -0.70193195 ... -0.06026362 -0.96564466 1.3475428 ] ... [-0.15145615 0.86535966 -1.00476 ... 0.07858864 1.2355726 0. ] [ 0. -0.9892913 1.4617437 ... 0.0853509 -1.2988883 0.9767869 ] [ 1.4617437 1.1819719 -1.4737607 ... 0.9767869 -1.2569995 0. ]] ... [[ 0. 0. 0. ... 1.0657406 0.7511433 -0.8193329 ] [ 0. 0. 0. ... -0.8193329 -0.1360911 0. ] [ 0. 0.7303265 1.1514689 ... 1.700382 0.27476096 -1.5925341 ] ... [ 0.44422168 0.11551108 1.1650032 ... 0.3719129 0.95084256 0. ] [ 0. -0.21199888 0.7994715 ... 0.9708771 -0.05868831 2.113385 ] [ 0.7994715 -1.1652337 -0.7801526 ... 2.113385 0.9402439 0. ]] [[ 0. 0. 0. ... 0.95421195 -1.7551236 -0.6870886 ] [ 0. 0. 0. ... -0.6870886 -1.1052812 0. ] [ 0. -0.2855908 -0.3412239 ... 0.08678132 -1.031746 -0.06732917] ... [ 1.7322726 0.67505944 2.0085168 ... 0.22581366 -0.00519433 0. ] [ 0. -0.8347179 -1.3643047 ... -0.75262165 -0.05462843 0.20654655] [-1.3643047 0.6635368 -0.19099188 ... 0.20654655 -0.07658069 0. ]] [[ 0. 0. 0. ... -0.28028473 1.4756432 -0.16083094] [ 0. 0. 0. ... -0.16083094 0.03496349 0. ] [ 0. -0.0320334 -0.21255346 ... -1.8402741 0.64746803 0.45426413] ... [-0.9063964 -0.19206214 -1.4803907 ... 2.1916628 0.6217226 0. ] [ 0. 1.3130785 -0.7928715 ... 1.2663572 0.4304424 -2.3776226 ] [-0.7928715 0.3296288 -0.4896926 ... -2.3776226 -0.49242935 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5045.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 1.5716718 -0.49343738 -0.03264422] [ 0. 0. 0. ... -0.03264422 -1.3901601 1.282564 ] [ 0. 0. 0. ... 1.282564 -1.2621553 0. ] ... [ 0. 0.6289808 1.2440022 ... -0.550951 1.891521 0.41140297] [ 1.2440022 -1.9157286 -1.8437706 ... 0.41140297 0.02562747 0.25155056] [-1.8437706 -0.11303795 0.3162236 ... 0.25155056 2.3201988 0. ]] [[ 0. 0. 0. ... 1.5940644 -1.1117692 1.3581593 ] [ 0. 0. 0. ... 1.3581593 -0.32100886 0.57821465] [ 0. 0. 0. ... 0.57821465 -0.3574114 0. ] ... [ 0. -0.57571876 -0.57389575 ... 0.8375274 -0.9416065 0.21328391] [-0.57389575 0.29883525 1.509827 ... 0.21328391 0.3868156 1.2310958 ] [ 1.509827 1.1978304 0.64540917 ... 1.2310958 -1.2909658 0. ]] [[ 0. 0. 0. ... -0.24414906 0.41861135 0.30083635] [ 0. 0. 0. ... 0.30083635 1.0381459 0.34431922] [ 0. 0. 0. ... 0.34431922 0.22667588 0. ] ... [ 0. 0.0780418 0.6697554 ... -1.2876824 0.35960197 -0.1744606 ] [ 0.6697554 -0.44036084 -0.21210407 ... -0.1744606 -0.88935024 -0.7984177 ] [-0.21210407 -1.0177246 -0.45506415 ... -0.7984177 -0.5473659 0. ]] ... [[ 0. 0. 0. ... -0.9590408 0.06943256 1.257557 ] [ 0. 0. 0. ... 1.257557 -1.133546 1.0539838 ] [ 0. 0. 0. ... 1.0539838 -0.3365275 0. ] ... [ 0. 1.7565514 -2.0121899 ... 0.1381928 -0.6897539 0.4454237 ] [-2.0121899 0.7696518 -0.62699115 ... 0.4454237 -0.8285326 -0.53213245] [-0.62699115 0.78292304 1.5445218 ... -0.53213245 0.753705 0. ]] [[ 0. 0. 0. ... -2.1842558 -0.2009801 0.25618374] [ 0. 0. 0. ... 0.25618374 0.16090873 -0.03470557] [ 0. 0. 0. ... -0.03470557 0.5005841 0. ] ... [ 0. -0.44435468 -1.6150547 ... 1.1844299 -0.05244408 0.490525 ] [-1.6150547 -0.6216209 0.29396096 ... 0.490525 -0.19723424 0.01487899] [ 0.29396096 0.35171914 0.6253493 ... 0.01487899 -1.3645748 0. ]] [[ 0. 0. 0. ... 0.05740187 -1.1038575 1.1929 ] [ 0. 0. 0. ... 1.1929 -0.4438451 -0.95351183] [ 0. 0. 0. ... -0.95351183 2.6567311 0. ] ... [ 0. 0.5933374 -0.06405547 ... 0.17411913 0.21140964 1.3303552 ] [-0.06405547 -1.830939 -0.5160087 ... 1.3303552 -0.7602942 0.331226 ] [-0.5160087 -0.9592831 -0.72932374 ... 0.331226 -0.96789956 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.5716718 -0.49343738 -0.03264422] [ 0. 0. 0. ... -0.03264422 -1.3901601 1.282564 ] [ 0. 0. 0. ... 1.282564 -1.2621553 0. ] ... [ 0. 0.6289808 1.2440022 ... -0.550951 1.891521 0.41140297] [ 1.2440022 -1.9157286 -1.8437706 ... 0.41140297 0.02562747 0.25155056] [-1.8437706 -0.11303795 0.3162236 ... 0.25155056 2.3201988 0. ]] [[ 0. 0. 0. ... 1.5940644 -1.1117692 1.3581593 ] [ 0. 0. 0. ... 1.3581593 -0.32100886 0.57821465] [ 0. 0. 0. ... 0.57821465 -0.3574114 0. ] ... [ 0. -0.57571876 -0.57389575 ... 0.8375274 -0.9416065 0.21328391] [-0.57389575 0.29883525 1.509827 ... 0.21328391 0.3868156 1.2310958 ] [ 1.509827 1.1978304 0.64540917 ... 1.2310958 -1.2909658 0. ]] [[ 0. 0. 0. ... -0.24414906 0.41861135 0.30083635] [ 0. 0. 0. ... 0.30083635 1.0381459 0.34431922] [ 0. 0. 0. ... 0.34431922 0.22667588 0. ] ... [ 0. 0.0780418 0.6697554 ... -1.2876824 0.35960197 -0.1744606 ] [ 0.6697554 -0.44036084 -0.21210407 ... -0.1744606 -0.88935024 -0.7984177 ] [-0.21210407 -1.0177246 -0.45506415 ... -0.7984177 -0.5473659 0. ]] ... [[ 0. 0. 0. ... -0.9590408 0.06943256 1.257557 ] [ 0. 0. 0. ... 1.257557 -1.133546 1.0539838 ] [ 0. 0. 0. ... 1.0539838 -0.3365275 0. ] ... [ 0. 1.7565514 -2.0121899 ... 0.1381928 -0.6897539 0.4454237 ] [-2.0121899 0.7696518 -0.62699115 ... 0.4454237 -0.8285326 -0.53213245] [-0.62699115 0.78292304 1.5445218 ... -0.53213245 0.753705 0. ]] [[ 0. 0. 0. ... -2.1842558 -0.2009801 0.25618374] [ 0. 0. 0. ... 0.25618374 0.16090873 -0.03470557] [ 0. 0. 0. ... -0.03470557 0.5005841 0. ] ... [ 0. -0.44435468 -1.6150547 ... 1.1844299 -0.05244408 0.490525 ] [-1.6150547 -0.6216209 0.29396096 ... 0.490525 -0.19723424 0.01487899] [ 0.29396096 0.35171914 0.6253493 ... 0.01487899 -1.3645748 0. ]] [[ 0. 0. 0. ... 0.05740187 -1.1038575 1.1929 ] [ 0. 0. 0. ... 1.1929 -0.4438451 -0.95351183] [ 0. 0. 0. ... -0.95351183 2.6567311 0. ] ... [ 0. 0.5933374 -0.06405547 ... 0.17411913 0.21140964 1.3303552 ] [-0.06405547 -1.830939 -0.5160087 ... 1.3303552 -0.7602942 0.331226 ] [-0.5160087 -0.9592831 -0.72932374 ... 0.331226 -0.96789956 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5047.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.59287083e-01 8.98106873e-01 5.56290388e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 5.56290388e-01 1.77221060e+00 0.00000000e+00] [ 0.00000000e+00 -5.38086355e-01 1.06019413e+00 ... 7.99550414e-02 -7.21612990e-01 -4.27009463e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.08133136e-02 2.81950347e-02 0.00000000e+00] [ 0.00000000e+00 2.61472136e-01 -6.71439946e-01 ... 1.76107633e+00 -1.62248120e-01 -4.70638275e-01] [-6.71439946e-01 -6.53057754e-01 1.15425444e+00 ... -4.70638275e-01 -8.26135933e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.62070990e-01 9.24341142e-01 1.86757779e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.86757779e+00 -1.73892066e-01 0.00000000e+00] [ 0.00000000e+00 -2.18646789e+00 -4.26641166e-01 ... 2.18841958e+00 -1.41678989e+00 1.53610075e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.44954097e-01 2.05421686e+00 0.00000000e+00] [ 0.00000000e+00 2.41528884e-01 9.44752097e-01 ... 4.34407175e-01 6.90404713e-01 9.53205347e-01] [ 9.44752097e-01 1.44751060e+00 -1.26032794e+00 ... 9.53205347e-01 5.63774645e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.32214004e-03 8.87731910e-01 2.99842268e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.99842268e-01 1.14289010e+00 0.00000000e+00] [ 0.00000000e+00 -3.43323290e-01 -6.39356971e-01 ... -1.83362380e-01 -2.30395412e+00 7.10130334e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.97441995e-01 -4.42691952e-01 0.00000000e+00] [ 0.00000000e+00 -2.08369806e-01 9.82816160e-01 ... -7.17969894e-01 9.03757140e-02 7.69354522e-01] [ 9.82816160e-01 1.75245798e+00 -1.55739510e+00 ... 7.69354522e-01 -1.99119478e-01 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.24273288e-01 1.54281994e-02 -6.83921278e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.83921278e-02 1.70119762e+00 0.00000000e+00] [ 0.00000000e+00 -1.29175365e+00 2.08412600e+00 ... -1.01859081e+00 -2.06147000e-01 -1.39343619e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.97768593e-01 -1.37296247e+00 0.00000000e+00] [ 0.00000000e+00 -3.87331605e-01 -1.14520884e+00 ... -1.04352558e+00 2.64545202e-01 -9.38625276e-01] [-1.14520884e+00 1.98125792e+00 -1.78967714e-01 ... -9.38625276e-01 -1.49920881e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.85203063e+00 -1.94920048e-01 -7.17712998e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.17712998e-01 4.18797791e-01 0.00000000e+00] [ 0.00000000e+00 1.45008028e+00 -2.49289230e-01 ... -5.85304433e-03 1.01466991e-01 -8.76418173e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.31266105e-01 -2.03099787e-01 0.00000000e+00] [ 0.00000000e+00 -1.80795467e+00 -1.95069890e-03 ... -7.66269088e-01 8.93390298e-01 9.54170153e-02] [-1.95069890e-03 3.47070880e-02 -3.22705388e-01 ... 9.54170153e-02 2.57344812e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.60762340e-02 -1.17120039e+00 8.15946639e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.15946639e-01 5.14595032e-01 0.00000000e+00] [ 0.00000000e+00 -4.90905225e-01 -1.59200537e+00 ... 2.43690085e+00 2.74509430e-01 1.64204133e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.27643251e-01 8.10945213e-01 0.00000000e+00] [ 0.00000000e+00 -2.24856451e-01 -1.05839744e-01 ... -1.61378157e+00 -5.03516674e-01 -1.51539874e+00] [-1.05839744e-01 1.36186612e+00 1.04310080e-01 ... -1.51539874e+00 6.46211028e-01 0.00000000e+00]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.59287083e-01 8.98106873e-01 5.56290388e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 5.56290388e-01 1.77221060e+00 0.00000000e+00] [ 0.00000000e+00 -5.38086355e-01 1.06019413e+00 ... 7.99550414e-02 -7.21612990e-01 -4.27009463e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.08133136e-02 2.81950347e-02 0.00000000e+00] [ 0.00000000e+00 2.61472136e-01 -6.71439946e-01 ... 1.76107633e+00 -1.62248120e-01 -4.70638275e-01] [-6.71439946e-01 -6.53057754e-01 1.15425444e+00 ... -4.70638275e-01 -8.26135933e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.62070990e-01 9.24341142e-01 1.86757779e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.86757779e+00 -1.73892066e-01 0.00000000e+00] [ 0.00000000e+00 -2.18646789e+00 -4.26641166e-01 ... 2.18841958e+00 -1.41678989e+00 1.53610075e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 9.44954097e-01 2.05421686e+00 0.00000000e+00] [ 0.00000000e+00 2.41528884e-01 9.44752097e-01 ... 4.34407175e-01 6.90404713e-01 9.53205347e-01] [ 9.44752097e-01 1.44751060e+00 -1.26032794e+00 ... 9.53205347e-01 5.63774645e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.32214004e-03 8.87731910e-01 2.99842268e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.99842268e-01 1.14289010e+00 0.00000000e+00] [ 0.00000000e+00 -3.43323290e-01 -6.39356971e-01 ... -1.83362380e-01 -2.30395412e+00 7.10130334e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.97441995e-01 -4.42691952e-01 0.00000000e+00] [ 0.00000000e+00 -2.08369806e-01 9.82816160e-01 ... -7.17969894e-01 9.03757140e-02 7.69354522e-01] [ 9.82816160e-01 1.75245798e+00 -1.55739510e+00 ... 7.69354522e-01 -1.99119478e-01 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.24273288e-01 1.54281994e-02 -6.83921278e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.83921278e-02 1.70119762e+00 0.00000000e+00] [ 0.00000000e+00 -1.29175365e+00 2.08412600e+00 ... -1.01859081e+00 -2.06147000e-01 -1.39343619e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 2.97768593e-01 -1.37296247e+00 0.00000000e+00] [ 0.00000000e+00 -3.87331605e-01 -1.14520884e+00 ... -1.04352558e+00 2.64545202e-01 -9.38625276e-01] [-1.14520884e+00 1.98125792e+00 -1.78967714e-01 ... -9.38625276e-01 -1.49920881e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.85203063e+00 -1.94920048e-01 -7.17712998e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.17712998e-01 4.18797791e-01 0.00000000e+00] [ 0.00000000e+00 1.45008028e+00 -2.49289230e-01 ... -5.85304433e-03 1.01466991e-01 -8.76418173e-01] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -8.31266105e-01 -2.03099787e-01 0.00000000e+00] [ 0.00000000e+00 -1.80795467e+00 -1.95069890e-03 ... -7.66269088e-01 8.93390298e-01 9.54170153e-02] [-1.95069890e-03 3.47070880e-02 -3.22705388e-01 ... 9.54170153e-02 2.57344812e-01 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.60762340e-02 -1.17120039e+00 8.15946639e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 8.15946639e-01 5.14595032e-01 0.00000000e+00] [ 0.00000000e+00 -4.90905225e-01 -1.59200537e+00 ... 2.43690085e+00 2.74509430e-01 1.64204133e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.27643251e-01 8.10945213e-01 0.00000000e+00] [ 0.00000000e+00 -2.24856451e-01 -1.05839744e-01 ... -1.61378157e+00 -5.03516674e-01 -1.51539874e+00] [-1.05839744e-01 1.36186612e+00 1.04310080e-01 ... -1.51539874e+00 6.46211028e-01 0.00000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5049.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.08971988 -0.15629645 0. ] [ 0. 0. 0. ... 1.0103261 -2.1395657 0. ] [ 0. 0. 0. ... 1.9500151 1.3758967 0. ]] [[ 0. 0. 0. ... -0.23886229 -0.1220815 0. ] [ 0. 0. 0. ... 0.07866313 -0.5220155 0. ] [ 0. 0. 0. ... 0.24307008 0.03654374 0. ]] [[ 0. 0. 0. ... -2.857818 0.7657866 0. ] [ 0. 0. 0. ... 0.14583017 0.05023201 0. ] [ 0. 0. 0. ... 0.08710185 2.093296 0. ]] ... [[ 0. 0. 0. ... 0.18080056 1.675752 0. ] [ 0. 0. 0. ... -0.18997133 -0.4687862 0. ] [ 0. 0. 0. ... 0.0065356 1.0050882 0. ]] [[ 0. 0. 0. ... 0.6217555 0.56963676 0. ] [ 0. 0. 0. ... -0.53273886 -0.14027835 0. ] [ 0. 0. 0. ... 0.34603366 -1.5606211 0. ]] [[ 0. 0. 0. ... -0.4878729 -0.1427475 0. ] [ 0. 0. 0. ... 1.0879983 -1.4942125 0. ] [ 0. 0. 0. ... -0.6831185 0.31504124 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.08971988 -0.15629645 0. ] [ 0. 0. 0. ... 1.0103261 -2.1395657 0. ] [ 0. 0. 0. ... 1.9500151 1.3758967 0. ]] [[ 0. 0. 0. ... -0.23886229 -0.1220815 0. ] [ 0. 0. 0. ... 0.07866313 -0.5220155 0. ] [ 0. 0. 0. ... 0.24307008 0.03654374 0. ]] [[ 0. 0. 0. ... -2.857818 0.7657866 0. ] [ 0. 0. 0. ... 0.14583017 0.05023201 0. ] [ 0. 0. 0. ... 0.08710185 2.093296 0. ]] ... [[ 0. 0. 0. ... 0.18080056 1.675752 0. ] [ 0. 0. 0. ... -0.18997133 -0.4687862 0. ] [ 0. 0. 0. ... 0.0065356 1.0050882 0. ]] [[ 0. 0. 0. ... 0.6217555 0.56963676 0. ] [ 0. 0. 0. ... -0.53273886 -0.14027835 0. ] [ 0. 0. 0. ... 0.34603366 -1.5606211 0. ]] [[ 0. 0. 0. ... -0.4878729 -0.1427475 0. ] [ 0. 0. 0. ... 1.0879983 -1.4942125 0. ] [ 0. 0. 0. ... -0.6831185 0.31504124 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5051.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.05548632e-02 1.70705926e+00 -1.11064458e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.06902766e-01 1.81094900e-01 -3.99955243e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.19315505e+00 7.72201777e-01 0.00000000e+00] ... [ 0.00000000e+00 1.57627606e+00 -7.54771888e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.18881529e-02 4.68169779e-01 -5.12795299e-02 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-9.31678534e-01 -7.31451750e-01 -1.05161555e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.55985188e-01 1.07047963e+00 -9.38277423e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.73357880e-01 3.46312374e-02 -3.32049191e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.13392627e-01 -8.09990048e-01 0.00000000e+00] ... [ 0.00000000e+00 -3.42817247e-01 5.46521366e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.11188957e-01 -5.51464856e-01 -9.81854796e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.19703138e+00 -6.37364835e-02 1.96600839e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.67790365e-01 -7.31401265e-01 3.37304264e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.43800932e-03 -2.76192307e-01 5.06556034e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.08606291e+00 4.14025962e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.40474904e+00 -1.66934681e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.98787409e-02 4.95233089e-02 -1.60475659e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 3.61007899e-01 1.31847575e-01 1.65502891e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.40636432e-01 9.37405109e-01 9.30600464e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.82095408e-01 6.29610360e-01 -4.63372558e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.86057317e-01 -1.52168572e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.91903979e-01 2.57508039e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-4.40365411e-02 -1.93400013e+00 -9.46816921e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.29223609e+00 -2.07190573e-01 1.94356352e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.30647594e-02 2.40969944e+00 -8.45156431e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.90777779e-01 -1.77024350e-01 -1.10418618e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.43626475e-01 1.00450778e+00 0.00000000e+00] ... [ 0.00000000e+00 2.11263657e+00 5.56878187e-02 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-6.67223394e-01 -4.89723176e-01 7.78117537e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.07670629e-01 1.69221610e-01 1.22943270e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.47551942e+00 -1.30042887e+00 2.50541687e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.20560902e-01 1.22820020e+00 -7.25138187e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.46824241e-01 2.65988201e-01 0.00000000e+00] ... [ 0.00000000e+00 6.81476951e-01 1.07036650e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-9.84866977e-01 -1.46396816e+00 -1.18638766e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 1.18649983e+00 -3.92001390e-01 5.41489720e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]; ov_res: [[[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.05548632e-02 1.70705926e+00 -1.11064458e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.06902766e-01 1.81094900e-01 -3.99955243e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.19315505e+00 7.72201777e-01 0.00000000e+00] ... [ 0.00000000e+00 1.57627606e+00 -7.54771888e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.18881529e-02 4.68169779e-01 -5.12795299e-02 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-9.31678534e-01 -7.31451750e-01 -1.05161555e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.55985188e-01 1.07047963e+00 -9.38277423e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.73357880e-01 3.46312374e-02 -3.32049191e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -5.13392627e-01 -8.09990048e-01 0.00000000e+00] ... [ 0.00000000e+00 -3.42817247e-01 5.46521366e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.11188957e-01 -5.51464856e-01 -9.81854796e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.19703138e+00 -6.37364835e-02 1.96600839e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -9.67790365e-01 -7.31401265e-01 3.37304264e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.43800932e-03 -2.76192307e-01 5.06556034e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.08606291e+00 4.14025962e-01 0.00000000e+00] ... [ 0.00000000e+00 -1.40474904e+00 -1.66934681e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-3.98787409e-02 4.95233089e-02 -1.60475659e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 3.61007899e-01 1.31847575e-01 1.65502891e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -3.40636432e-01 9.37405109e-01 9.30600464e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.82095408e-01 6.29610360e-01 -4.63372558e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.86057317e-01 -1.52168572e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.91903979e-01 2.57508039e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-4.40365411e-02 -1.93400013e+00 -9.46816921e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-1.29223609e+00 -2.07190573e-01 1.94356352e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 7.30647594e-02 2.40969944e+00 -8.45156431e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -1.90777779e-01 -1.77024350e-01 -1.10418618e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -7.43626475e-01 1.00450778e+00 0.00000000e+00] ... [ 0.00000000e+00 2.11263657e+00 5.56878187e-02 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-6.67223394e-01 -4.89723176e-01 7.78117537e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-2.07670629e-01 1.69221610e-01 1.22943270e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 1.47551942e+00 -1.30042887e+00 2.50541687e-01] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 3.20560902e-01 1.22820020e+00 -7.25138187e-02] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -6.46824241e-01 2.65988201e-01 0.00000000e+00] ... [ 0.00000000e+00 6.81476951e-01 1.07036650e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [-9.84866977e-01 -1.46396816e+00 -1.18638766e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 1.18649983e+00 -3.92001390e-01 5.41489720e-01 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5053.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 2.5097546e-01 -6.1306798e-01 1.2594527e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.0429193e-01 6.6917270e-01 0.0000000e+00] [ 0.0000000e+00 -1.6619096e+00 -1.5539296e+00 ... 1.3019531e+00 1.6913712e+00 3.1354332e-01] ... [ 1.9005820e-01 -1.3007424e+00 5.4624099e-01 ... 5.1955503e-01 -2.4328305e-01 0.0000000e+00] [ 0.0000000e+00 1.2229027e+00 1.6307468e+00 ... 9.8101825e-01 7.6933759e-01 -8.8774383e-01] [ 1.0564417e-01 -2.0274061e-01 -9.6494138e-01 ... 8.1850374e-01 -1.3623677e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.7432705e-01 1.5404081e-01 1.3374364e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.1460235e-01 2.9295015e-01 0.0000000e+00] [ 0.0000000e+00 -4.7096536e-02 -3.4529203e-01 ... 1.6537905e-01 5.4553366e-01 -2.6142055e-01] ... [-5.6087929e-01 -1.8062972e+00 6.7071974e-01 ... -1.7947340e-01 -5.2731272e-02 0.0000000e+00] [ 0.0000000e+00 1.0167818e+00 -1.2154316e+00 ... 4.6100527e-02 1.6417387e+00 9.0338057e-01] [-2.6754931e-01 1.4766200e+00 6.6538554e-01 ... -3.5707268e-01 1.4993044e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.0760348e+00 5.1150131e-01 -2.6293874e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.8142635e-03 9.0944625e-02 0.0000000e+00] [ 0.0000000e+00 -1.4922148e+00 4.0796673e-01 ... -2.0135658e+00 -1.0880820e+00 -1.6964105e+00] ... [-2.3628578e-01 -1.6929455e+00 1.3759504e-01 ... -4.5353466e-01 -7.6956153e-02 0.0000000e+00] [ 0.0000000e+00 -4.9404234e-01 1.3295341e+00 ... 2.8536947e+00 2.9054949e-01 -4.2159158e-01] [-9.4705231e-02 1.3437937e-01 -9.9463129e-01 ... -5.1144350e-01 2.0417912e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1995473e+00 7.8225476e-01 1.4929081e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.9917958e-01 9.9964571e-01 0.0000000e+00] [ 0.0000000e+00 -1.2898576e+00 2.6259437e-01 ... 1.2201724e+00 -4.6613035e-01 -2.8190625e-01] ... [-1.0584414e+00 -9.6546823e-01 1.8985852e+00 ... -4.5467049e-01 -1.0395660e+00 0.0000000e+00] [ 0.0000000e+00 3.9131591e-01 9.6971798e-01 ... -4.3625537e-01 8.4147829e-01 1.0760480e+00] [ 1.0998268e+00 -4.6914943e-02 1.0889063e+00 ... 4.2296040e-01 1.0382156e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1134512e+00 -2.5873685e-01 -1.5380816e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 2.7071824e+00 -9.3096602e-01 0.0000000e+00] [ 0.0000000e+00 3.1207746e-01 5.4596257e-01 ... -5.2007461e-01 -3.5040438e-02 1.6374236e-01] ... [ 7.9106271e-01 1.5597740e-01 -3.6408311e-01 ... 4.5637915e-01 6.8948507e-01 0.0000000e+00] [ 0.0000000e+00 7.3552042e-01 -6.0935032e-01 ... 1.7381306e-01 -2.5721446e-01 -1.4538016e+00] [-1.5296239e-01 1.2372462e+00 -8.9183480e-01 ... -1.3913295e+00 -5.7195783e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.2175041e-01 1.1798488e+00 8.2254303e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.5066054e-01 -1.0977758e+00 0.0000000e+00] [ 0.0000000e+00 -2.2265899e+00 2.7688387e-01 ... 1.5317495e-01 -9.7168112e-01 6.8151385e-01] ... [-1.2859658e+00 8.8979608e-01 -6.3097364e-01 ... -4.1091582e-01 2.8364241e-01 0.0000000e+00] [ 0.0000000e+00 2.1177897e-01 1.4281293e+00 ... -9.0563260e-02 6.6191316e-01 -1.0736104e+00] [-3.6706045e-01 -3.4542853e-01 1.6506488e+00 ... -2.2336723e-01 -5.2955461e-01 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 2.5097546e-01 -6.1306798e-01 1.2594527e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.0429193e-01 6.6917270e-01 0.0000000e+00] [ 0.0000000e+00 -1.6619096e+00 -1.5539296e+00 ... 1.3019531e+00 1.6913712e+00 3.1354332e-01] ... [ 1.9005820e-01 -1.3007424e+00 5.4624099e-01 ... 5.1955503e-01 -2.4328305e-01 0.0000000e+00] [ 0.0000000e+00 1.2229027e+00 1.6307468e+00 ... 9.8101825e-01 7.6933759e-01 -8.8774383e-01] [ 1.0564417e-01 -2.0274061e-01 -9.6494138e-01 ... 8.1850374e-01 -1.3623677e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.7432705e-01 1.5404081e-01 1.3374364e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.1460235e-01 2.9295015e-01 0.0000000e+00] [ 0.0000000e+00 -4.7096536e-02 -3.4529203e-01 ... 1.6537905e-01 5.4553366e-01 -2.6142055e-01] ... [-5.6087929e-01 -1.8062972e+00 6.7071974e-01 ... -1.7947340e-01 -5.2731272e-02 0.0000000e+00] [ 0.0000000e+00 1.0167818e+00 -1.2154316e+00 ... 4.6100527e-02 1.6417387e+00 9.0338057e-01] [-2.6754931e-01 1.4766200e+00 6.6538554e-01 ... -3.5707268e-01 1.4993044e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.0760348e+00 5.1150131e-01 -2.6293874e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.8142635e-03 9.0944625e-02 0.0000000e+00] [ 0.0000000e+00 -1.4922148e+00 4.0796673e-01 ... -2.0135658e+00 -1.0880820e+00 -1.6964105e+00] ... [-2.3628578e-01 -1.6929455e+00 1.3759504e-01 ... -4.5353466e-01 -7.6956153e-02 0.0000000e+00] [ 0.0000000e+00 -4.9404234e-01 1.3295341e+00 ... 2.8536947e+00 2.9054949e-01 -4.2159158e-01] [-9.4705231e-02 1.3437937e-01 -9.9463129e-01 ... -5.1144350e-01 2.0417912e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.1995473e+00 7.8225476e-01 1.4929081e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.9917958e-01 9.9964571e-01 0.0000000e+00] [ 0.0000000e+00 -1.2898576e+00 2.6259437e-01 ... 1.2201724e+00 -4.6613035e-01 -2.8190625e-01] ... [-1.0584414e+00 -9.6546823e-01 1.8985852e+00 ... -4.5467049e-01 -1.0395660e+00 0.0000000e+00] [ 0.0000000e+00 3.9131591e-01 9.6971798e-01 ... -4.3625537e-01 8.4147829e-01 1.0760480e+00] [ 1.0998268e+00 -4.6914943e-02 1.0889063e+00 ... 4.2296040e-01 1.0382156e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1134512e+00 -2.5873685e-01 -1.5380816e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 2.7071824e+00 -9.3096602e-01 0.0000000e+00] [ 0.0000000e+00 3.1207746e-01 5.4596257e-01 ... -5.2007461e-01 -3.5040438e-02 1.6374236e-01] ... [ 7.9106271e-01 1.5597740e-01 -3.6408311e-01 ... 4.5637915e-01 6.8948507e-01 0.0000000e+00] [ 0.0000000e+00 7.3552042e-01 -6.0935032e-01 ... 1.7381306e-01 -2.5721446e-01 -1.4538016e+00] [-1.5296239e-01 1.2372462e+00 -8.9183480e-01 ... -1.3913295e+00 -5.7195783e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 6.2175041e-01 1.1798488e+00 8.2254303e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -7.5066054e-01 -1.0977758e+00 0.0000000e+00] [ 0.0000000e+00 -2.2265899e+00 2.7688387e-01 ... 1.5317495e-01 -9.7168112e-01 6.8151385e-01] ... [-1.2859658e+00 8.8979608e-01 -6.3097364e-01 ... -4.1091582e-01 2.8364241e-01 0.0000000e+00] [ 0.0000000e+00 2.1177897e-01 1.4281293e+00 ... -9.0563260e-02 6.6191316e-01 -1.0736104e+00] [-3.6706045e-01 -3.4542853e-01 1.6506488e+00 ... -2.2336723e-01 -5.2955461e-01 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5055.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.24222156 0.08432967 -1.1846315 ] [ 0. 0. 0. ... 1.4054147 0.27298748 -2.6896305 ] [ 0. 0. 0. ... -1.712302 1.0664195 0. ] ... [ 0. 0.27178484 -1.4829171 ... -0.51444906 2.321316 0.9074426 ] [-0.49966818 -0.49152437 -0.47161606 ... 0.11124562 -1.3815562 -0.79475737] [-0.9745913 1.3219326 -0.6896441 ... -0.08570059 -0.6761008 0. ]] [[ 0. 0. 0. ... 0.96892023 -1.3888546 0.767963 ] [ 0. 0. 0. ... 0.81424975 -0.5054464 0.07146639] [ 0. 0. 0. ... -0.04665108 0.7026896 0. ] ... [ 0. -0.25608167 -0.02301596 ... 0.45565736 1.1279706 -0.3058411 ] [-2.2062628 -0.66462666 1.3891623 ... -1.0055736 -0.09296476 0.60385317] [-0.3214893 -0.4189641 0.5103964 ... -0.38965565 -0.7749123 0. ]] [[ 0. 0. 0. ... -1.9245379 -0.15984152 -0.47610512] [ 0. 0. 0. ... 1.4484749 0.52058405 -0.90534633] [ 0. 0. 0. ... -0.5053743 0.51414174 0. ] ... [ 0. -1.8714322 -0.3724594 ... 0.38887796 -0.6370807 -0.47006562] [-0.20419736 -0.10686521 -1.1291775 ... 0.0475103 0.6012523 0.64169323] [-1.1980951 -0.50448686 1.7919098 ... 0.4087644 0.7714138 0. ]] ... [[ 0. 0. 0. ... -0.6083269 1.4581974 -0.28997996] [ 0. 0. 0. ... -0.4794525 -0.6307645 0.76671076] [ 0. 0. 0. ... -0.34746525 0.00812196 0. ] ... [ 0. 0.14778684 1.2152257 ... -1.5371746 -0.96018636 -0.6895781 ] [-0.23999178 0.7506097 -0.26370007 ... 2.1363757 0.15534708 0.38532785] [-0.8511534 -1.5604502 0.98248243 ... -0.38986796 0.81928694 0. ]] [[ 0. 0. 0. ... 0.33603603 0.23135225 0.32036126] [ 0. 0. 0. ... -1.1766847 -0.22489075 -0.23568924] [ 0. 0. 0. ... 0.38679665 -0.9109268 0. ] ... [ 0. 1.9799086 0.5835803 ... -0.13277777 -0.8469664 0.5825423 ] [ 0.48817635 -1.0934582 -0.62062323 ... 1.5089334 -0.06616324 1.2756312 ] [-1.7512022 -0.46870986 0.9041371 ... -0.2489023 1.6291277 0. ]] [[ 0. 0. 0. ... 0.11431329 -1.1692775 0.15513156] [ 0. 0. 0. ... -2.1706192 2.6324165 -0.56694686] [ 0. 0. 0. ... 1.8905448 0.6153068 0. ] ... [ 0. -0.19635911 0.71422917 ... 0.9553934 2.1772664 -0.66077054] [-1.4410311 -0.40134135 1.1749767 ... 0.81792647 -0.6797507 -0.6821906 ] [-0.78031576 -0.8483781 -0.05934443 ... -0.81470793 -1.0320133 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.24222156 0.08432967 -1.1846315 ] [ 0. 0. 0. ... 1.4054147 0.27298748 -2.6896305 ] [ 0. 0. 0. ... -1.712302 1.0664195 0. ] ... [ 0. 0.27178484 -1.4829171 ... -0.51444906 2.321316 0.9074426 ] [-0.49966818 -0.49152437 -0.47161606 ... 0.11124562 -1.3815562 -0.79475737] [-0.9745913 1.3219326 -0.6896441 ... -0.08570059 -0.6761008 0. ]] [[ 0. 0. 0. ... 0.96892023 -1.3888546 0.767963 ] [ 0. 0. 0. ... 0.81424975 -0.5054464 0.07146639] [ 0. 0. 0. ... -0.04665108 0.7026896 0. ] ... [ 0. -0.25608167 -0.02301596 ... 0.45565736 1.1279706 -0.3058411 ] [-2.2062628 -0.66462666 1.3891623 ... -1.0055736 -0.09296476 0.60385317] [-0.3214893 -0.4189641 0.5103964 ... -0.38965565 -0.7749123 0. ]] [[ 0. 0. 0. ... -1.9245379 -0.15984152 -0.47610512] [ 0. 0. 0. ... 1.4484749 0.52058405 -0.90534633] [ 0. 0. 0. ... -0.5053743 0.51414174 0. ] ... [ 0. -1.8714322 -0.3724594 ... 0.38887796 -0.6370807 -0.47006562] [-0.20419736 -0.10686521 -1.1291775 ... 0.0475103 0.6012523 0.64169323] [-1.1980951 -0.50448686 1.7919098 ... 0.4087644 0.7714138 0. ]] ... [[ 0. 0. 0. ... -0.6083269 1.4581974 -0.28997996] [ 0. 0. 0. ... -0.4794525 -0.6307645 0.76671076] [ 0. 0. 0. ... -0.34746525 0.00812196 0. ] ... [ 0. 0.14778684 1.2152257 ... -1.5371746 -0.96018636 -0.6895781 ] [-0.23999178 0.7506097 -0.26370007 ... 2.1363757 0.15534708 0.38532785] [-0.8511534 -1.5604502 0.98248243 ... -0.38986796 0.81928694 0. ]] [[ 0. 0. 0. ... 0.33603603 0.23135225 0.32036126] [ 0. 0. 0. ... -1.1766847 -0.22489075 -0.23568924] [ 0. 0. 0. ... 0.38679665 -0.9109268 0. ] ... [ 0. 1.9799086 0.5835803 ... -0.13277777 -0.8469664 0.5825423 ] [ 0.48817635 -1.0934582 -0.62062323 ... 1.5089334 -0.06616324 1.2756312 ] [-1.7512022 -0.46870986 0.9041371 ... -0.2489023 1.6291277 0. ]] [[ 0. 0. 0. ... 0.11431329 -1.1692775 0.15513156] [ 0. 0. 0. ... -2.1706192 2.6324165 -0.56694686] [ 0. 0. 0. ... 1.8905448 0.6153068 0. ] ... [ 0. -0.19635911 0.71422917 ... 0.9553934 2.1772664 -0.66077054] [-1.4410311 -0.40134135 1.1749767 ... 0.81792647 -0.6797507 -0.6821906 ] [-0.78031576 -0.8483781 -0.05934443 ... -0.81470793 -1.0320133 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5057.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.97093475 -1.3049785 0.361057 ] [ 0. 0. 0. ... 0.526769 0.6068467 0. ] [ 0. 1.4139013 0.07146417 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.5933584 -0.73105884 0. ] [ 0. -0.52118725 -0.4202436 ... 0. 0. 0. ] [-0.36834547 1.6820492 -1.4056002 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2677736 1.1584741 -1.1770383 ] [ 0. 0. 0. ... -0.08742805 0.16543123 0. ] [ 0. 0.34201485 -0.44415733 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.0760086 1.1235163 0. ] [ 0. 0.53015095 -2.5745592 ... 0. 0. 0. ] [ 0.15512179 0.8365165 2.1796334 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19921537 -0.37600175 -1.0671259 ] [ 0. 0. 0. ... -1.0907313 0.39279574 0. ] [ 0. -0.05166651 0.80169314 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.05270171 1.5103636 0. ] [ 0. 1.2478629 0.87086195 ... 0. 0. 0. ] [-0.9999616 0.6272658 0.41443312 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.514161 -1.189016 0.9507783 ] [ 0. 0. 0. ... 0.34153038 -0.31027713 0. ] [ 0. -0.9300976 -0.4912418 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.6316183 -1.3134234 0. ] [ 0. -1.0044689 -1.074523 ... 0. 0. 0. ] [-0.6783901 -0.5373347 1.3915557 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.523001 1.617318 0.53680545] [ 0. 0. 0. ... -0.57501984 0.14177161 0. ] [ 0. 0.03180387 -0.0842149 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.9321907 0.56741357 0. ] [ 0. 0.83762586 -0.344089 ... 0. 0. 0. ] [-1.1781293 -1.1207601 -0.93887293 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.40044162 1.093365 0.5610768 ] [ 0. 0. 0. ... 1.2077304 0.6836373 0. ] [ 0. -1.2041878 0.8564684 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.2940395 -2.7528007 0. ] [ 0. 2.294262 0.6447787 ... 0. 0. 0. ] [-0.0113301 -0.1149842 0.6566903 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.97093475 -1.3049785 0.361057 ] [ 0. 0. 0. ... 0.526769 0.6068467 0. ] [ 0. 1.4139013 0.07146417 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.5933584 -0.73105884 0. ] [ 0. -0.52118725 -0.4202436 ... 0. 0. 0. ] [-0.36834547 1.6820492 -1.4056002 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2677736 1.1584741 -1.1770383 ] [ 0. 0. 0. ... -0.08742805 0.16543123 0. ] [ 0. 0.34201485 -0.44415733 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.0760086 1.1235163 0. ] [ 0. 0.53015095 -2.5745592 ... 0. 0. 0. ] [ 0.15512179 0.8365165 2.1796334 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.19921537 -0.37600175 -1.0671259 ] [ 0. 0. 0. ... -1.0907313 0.39279574 0. ] [ 0. -0.05166651 0.80169314 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.05270171 1.5103636 0. ] [ 0. 1.2478629 0.87086195 ... 0. 0. 0. ] [-0.9999616 0.6272658 0.41443312 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.514161 -1.189016 0.9507783 ] [ 0. 0. 0. ... 0.34153038 -0.31027713 0. ] [ 0. -0.9300976 -0.4912418 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -2.6316183 -1.3134234 0. ] [ 0. -1.0044689 -1.074523 ... 0. 0. 0. ] [-0.6783901 -0.5373347 1.3915557 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.523001 1.617318 0.53680545] [ 0. 0. 0. ... -0.57501984 0.14177161 0. ] [ 0. 0.03180387 -0.0842149 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.9321907 0.56741357 0. ] [ 0. 0.83762586 -0.344089 ... 0. 0. 0. ] [-1.1781293 -1.1207601 -0.93887293 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.40044162 1.093365 0.5610768 ] [ 0. 0. 0. ... 1.2077304 0.6836373 0. ] [ 0. -1.2041878 0.8564684 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.2940395 -2.7528007 0. ] [ 0. 2.294262 0.6447787 ... 0. 0. 0. ] [-0.0113301 -0.1149842 0.6566903 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:3 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5059.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.37335932 0.7289722 0. ] [ 0. 0. 0. ... -0.7147323 0.3256217 0. ] [ 0. 0. 0. ... 0.63835984 1.7605501 0. ]] [[ 0. 0. 0. ... 0.48141706 1.499938 0. ] [ 0. 0. 0. ... 2.0230296 -0.7098804 0. ] [ 0. 0. 0. ... 0.84893847 0.91900706 0. ]] [[ 0. 0. 0. ... 0.83230096 0.7790362 0. ] [ 0. 0. 0. ... -0.9085537 0.04703433 0. ] [ 0. 0. 0. ... -0.8665829 -0.20767112 0. ]] ... [[ 0. 0. 0. ... -1.23356 -1.4774172 0. ] [ 0. 0. 0. ... -0.30485278 -0.65385455 0. ] [ 0. 0. 0. ... 0.5647107 -1.403398 0. ]] [[ 0. 0. 0. ... 2.3430126 -0.6305226 0. ] [ 0. 0. 0. ... 0.14343876 0.11043935 0. ] [ 0. 0. 0. ... -0.2991415 0.24410367 0. ]] [[ 0. 0. 0. ... 1.1057289 -1.405847 0. ] [ 0. 0. 0. ... 1.7718327 -0.25918043 0. ] [ 0. 0. 0. ... 0.70324105 -1.0523849 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.37335932 0.7289722 0. ] [ 0. 0. 0. ... -0.7147323 0.3256217 0. ] [ 0. 0. 0. ... 0.63835984 1.7605501 0. ]] [[ 0. 0. 0. ... 0.48141706 1.499938 0. ] [ 0. 0. 0. ... 2.0230296 -0.7098804 0. ] [ 0. 0. 0. ... 0.84893847 0.91900706 0. ]] [[ 0. 0. 0. ... 0.83230096 0.7790362 0. ] [ 0. 0. 0. ... -0.9085537 0.04703433 0. ] [ 0. 0. 0. ... -0.8665829 -0.20767112 0. ]] ... [[ 0. 0. 0. ... -1.23356 -1.4774172 0. ] [ 0. 0. 0. ... -0.30485278 -0.65385455 0. ] [ 0. 0. 0. ... 0.5647107 -1.403398 0. ]] [[ 0. 0. 0. ... 2.3430126 -0.6305226 0. ] [ 0. 0. 0. ... 0.14343876 0.11043935 0. ] [ 0. 0. 0. ... -0.2991415 0.24410367 0. ]] [[ 0. 0. 0. ... 1.1057289 -1.405847 0. ] [ 0. 0. 0. ... 1.7718327 -0.25918043 0. ] [ 0. 0. 0. ... 0.70324105 -1.0523849 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5061.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.5493114 -0.11195666 1.1588674 ] [ 0. 0. 0. ... 1.1588674 -0.09987027 -0.17966636] [ 0. 0. 0. ... -0.17966636 -1.7849333 0. ] ... [ 0. 0.44331244 0.04404882 ... 0. 0. 0. ] [ 0.04404882 -1.5911822 -1.3437638 ... 0. 0. 0. ] [-1.3437638 0.00449867 0.5360823 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2737064 -0.5368909 -0.25844783] [ 0. 0. 0. ... -0.25844783 0.38952136 1.3670008 ] [ 0. 0. 0. ... 1.3670008 0.62075955 0. ] ... [ 0. 1.3277102 -0.23902276 ... 0. 0. 0. ] [-0.23902276 -1.4384339 1.7140602 ... 0. 0. 0. ] [ 1.7140602 -1.6632186 2.7503593 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.817577 0.6345944 -0.37589222] [ 0. 0. 0. ... -0.37589222 0.56607234 0.25742948] [ 0. 0. 0. ... 0.25742948 1.2427003 0. ] ... [ 0. 1.092001 1.4779665 ... 0. 0. 0. ] [ 1.4779665 -1.188812 -0.03657719 ... 0. 0. 0. ] [-0.03657719 0.20318778 -0.9872432 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.6441982 1.0098007 -0.06634019] [ 0. 0. 0. ... -0.06634019 -0.30891094 -1.0547324 ] [ 0. 0. 0. ... -1.0547324 -0.65515834 0. ] ... [ 0. -0.40519175 0.6893786 ... 0. 0. 0. ] [ 0.6893786 -0.9912578 0.04228549 ... 0. 0. 0. ] [ 0.04228549 0.8339388 0.10639974 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.38741565 -0.261103 1.35609 ] [ 0. 0. 0. ... 1.35609 -0.9248312 -0.07515367] [ 0. 0. 0. ... -0.07515367 -1.7629021 0. ] ... [ 0. 0.4279328 -0.5176556 ... 0. 0. 0. ] [-0.5176556 -0.147655 -1.8038614 ... 0. 0. 0. ] [-1.8038614 -0.5297378 0.12675926 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.11620239 -0.9409866 -1.7479073 ] [ 0. 0. 0. ... -1.7479073 0.13362259 -0.9437994 ] [ 0. 0. 0. ... -0.9437994 0.1514113 0. ] ... [ 0. 1.6087319 -2.8075397 ... 0. 0. 0. ] [-2.8075397 0.9089654 -0.3902841 ... 0. 0. 0. ] [-0.3902841 0.7394315 0.02157745 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.5493114 -0.11195666 1.1588674 ] [ 0. 0. 0. ... 1.1588674 -0.09987027 -0.17966636] [ 0. 0. 0. ... -0.17966636 -1.7849333 0. ] ... [ 0. 0.44331244 0.04404882 ... 0. 0. 0. ] [ 0.04404882 -1.5911822 -1.3437638 ... 0. 0. 0. ] [-1.3437638 0.00449867 0.5360823 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.2737064 -0.5368909 -0.25844783] [ 0. 0. 0. ... -0.25844783 0.38952136 1.3670008 ] [ 0. 0. 0. ... 1.3670008 0.62075955 0. ] ... [ 0. 1.3277102 -0.23902276 ... 0. 0. 0. ] [-0.23902276 -1.4384339 1.7140602 ... 0. 0. 0. ] [ 1.7140602 -1.6632186 2.7503593 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.817577 0.6345944 -0.37589222] [ 0. 0. 0. ... -0.37589222 0.56607234 0.25742948] [ 0. 0. 0. ... 0.25742948 1.2427003 0. ] ... [ 0. 1.092001 1.4779665 ... 0. 0. 0. ] [ 1.4779665 -1.188812 -0.03657719 ... 0. 0. 0. ] [-0.03657719 0.20318778 -0.9872432 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.6441982 1.0098007 -0.06634019] [ 0. 0. 0. ... -0.06634019 -0.30891094 -1.0547324 ] [ 0. 0. 0. ... -1.0547324 -0.65515834 0. ] ... [ 0. -0.40519175 0.6893786 ... 0. 0. 0. ] [ 0.6893786 -0.9912578 0.04228549 ... 0. 0. 0. ] [ 0.04228549 0.8339388 0.10639974 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.38741565 -0.261103 1.35609 ] [ 0. 0. 0. ... 1.35609 -0.9248312 -0.07515367] [ 0. 0. 0. ... -0.07515367 -1.7629021 0. ] ... [ 0. 0.4279328 -0.5176556 ... 0. 0. 0. ] [-0.5176556 -0.147655 -1.8038614 ... 0. 0. 0. ] [-1.8038614 -0.5297378 0.12675926 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.11620239 -0.9409866 -1.7479073 ] [ 0. 0. 0. ... -1.7479073 0.13362259 -0.9437994 ] [ 0. 0. 0. ... -0.9437994 0.1514113 0. ] ... [ 0. 1.6087319 -2.8075397 ... 0. 0. 0. ] [-2.8075397 0.9089654 -0.3902841 ... 0. 0. 0. ] [-0.3902841 0.7394315 0.02157745 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5063.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.6761821 -0.03894885 0.02244349] [ 0. 0. 0. ... 0.02244349 -0.63316756 0. ] [ 0. 0.48462886 0.7080468 ... 2.1619368 1.7439725 -0.7623464 ] ... [ 0.05825724 -1.1012923 0.49850276 ... -1.7995691 1.1407679 0. ] [ 0. 0.5512793 -0.50469124 ... -0.66654795 0.18016154 0.03040788] [-0.50469124 1.1941338 -1.4365898 ... 0.03040788 -1.368356 0. ]] [[ 0. 0. 0. ... -0.30867994 0.9999781 1.9019378 ] [ 0. 0. 0. ... 1.9019378 1.3066318 0. ] [ 0. -0.9583416 0.7698229 ... -0.33686393 -1.1682583 0.6960502 ] ... [-0.9167501 -0.18289255 -1.8448125 ... 0.81424147 1.0519773 0. ] [ 0. 0.31673717 0.9053784 ... -0.5463587 0.6568763 -0.19078186] [ 0.9053784 0.3814039 -0.87629163 ... -0.19078186 -0.7814929 0. ]] [[ 0. 0. 0. ... 0.09032682 -0.01195386 1.674262 ] [ 0. 0. 0. ... 1.674262 -0.03624921 0. ] [ 0. 0.9564168 -0.52578396 ... 0.4513945 0.12330854 -1.3240945 ] ... [ 0.30129123 -0.04293605 1.8819243 ... 0.8000553 -0.55613464 0. ] [ 0. -0.27891523 1.0845971 ... 0.10957114 -0.03136873 0.54305774] [ 1.0845971 -1.4109132 -1.060531 ... 0.54305774 0.23644535 0. ]] ... [[ 0. 0. 0. ... -1.0084788 0.3625402 1.3618885 ] [ 0. 0. 0. ... 1.3618885 -3.1512513 0. ] [ 0. 0.0205522 0.5095193 ... -1.1676455 0.8709791 0.74054396] ... [ 0.84497213 -1.9261278 0.78242254 ... 1.9666386 0.55950195 0. ] [ 0. 0.19906327 -1.1574043 ... 1.1673387 0.40209708 -0.29043144] [-1.1574043 1.6159073 -1.6171709 ... -0.29043144 0.08401709 0. ]] [[ 0. 0. 0. ... 1.6138645 1.3294665 -0.6989605 ] [ 0. 0. 0. ... -0.6989605 -0.20162904 0. ] [ 0. -0.06897448 -1.0869288 ... -2.438474 1.7067611 -0.9646592 ] ... [ 0.31059423 0.18901625 0.7271322 ... 0.98370206 -1.0822258 0. ] [ 0. -0.29384243 0.3557569 ... -0.6755331 0.28375223 1.7681923 ] [ 0.3557569 0.12513049 0.22388569 ... 1.7681923 2.287639 0. ]] [[ 0. 0. 0. ... 0.44926924 -1.1789296 -1.122816 ] [ 0. 0. 0. ... -1.122816 -0.3977387 0. ] [ 0. 0.96552795 -1.5785567 ... 1.5321528 -0.5048739 0.5711808 ] ... [ 1.2238655 1.2137853 0.43027118 ... -0.9119398 1.3175642 0. ] [ 0. 2.9312198 -0.04936021 ... -0.72741157 0.64315176 -0.22000225] [-0.04936021 -0.12771566 0.38936788 ... -0.22000225 -0.3492259 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.6761821 -0.03894885 0.02244349] [ 0. 0. 0. ... 0.02244349 -0.63316756 0. ] [ 0. 0.48462886 0.7080468 ... 2.1619368 1.7439725 -0.7623464 ] ... [ 0.05825724 -1.1012923 0.49850276 ... -1.7995691 1.1407679 0. ] [ 0. 0.5512793 -0.50469124 ... -0.66654795 0.18016154 0.03040788] [-0.50469124 1.1941338 -1.4365898 ... 0.03040788 -1.368356 0. ]] [[ 0. 0. 0. ... -0.30867994 0.9999781 1.9019378 ] [ 0. 0. 0. ... 1.9019378 1.3066318 0. ] [ 0. -0.9583416 0.7698229 ... -0.33686393 -1.1682583 0.6960502 ] ... [-0.9167501 -0.18289255 -1.8448125 ... 0.81424147 1.0519773 0. ] [ 0. 0.31673717 0.9053784 ... -0.5463587 0.6568763 -0.19078186] [ 0.9053784 0.3814039 -0.87629163 ... -0.19078186 -0.7814929 0. ]] [[ 0. 0. 0. ... 0.09032682 -0.01195386 1.674262 ] [ 0. 0. 0. ... 1.674262 -0.03624921 0. ] [ 0. 0.9564168 -0.52578396 ... 0.4513945 0.12330854 -1.3240945 ] ... [ 0.30129123 -0.04293605 1.8819243 ... 0.8000553 -0.55613464 0. ] [ 0. -0.27891523 1.0845971 ... 0.10957114 -0.03136873 0.54305774] [ 1.0845971 -1.4109132 -1.060531 ... 0.54305774 0.23644535 0. ]] ... [[ 0. 0. 0. ... -1.0084788 0.3625402 1.3618885 ] [ 0. 0. 0. ... 1.3618885 -3.1512513 0. ] [ 0. 0.0205522 0.5095193 ... -1.1676455 0.8709791 0.74054396] ... [ 0.84497213 -1.9261278 0.78242254 ... 1.9666386 0.55950195 0. ] [ 0. 0.19906327 -1.1574043 ... 1.1673387 0.40209708 -0.29043144] [-1.1574043 1.6159073 -1.6171709 ... -0.29043144 0.08401709 0. ]] [[ 0. 0. 0. ... 1.6138645 1.3294665 -0.6989605 ] [ 0. 0. 0. ... -0.6989605 -0.20162904 0. ] [ 0. -0.06897448 -1.0869288 ... -2.438474 1.7067611 -0.9646592 ] ... [ 0.31059423 0.18901625 0.7271322 ... 0.98370206 -1.0822258 0. ] [ 0. -0.29384243 0.3557569 ... -0.6755331 0.28375223 1.7681923 ] [ 0.3557569 0.12513049 0.22388569 ... 1.7681923 2.287639 0. ]] [[ 0. 0. 0. ... 0.44926924 -1.1789296 -1.122816 ] [ 0. 0. 0. ... -1.122816 -0.3977387 0. ] [ 0. 0.96552795 -1.5785567 ... 1.5321528 -0.5048739 0.5711808 ] ... [ 1.2238655 1.2137853 0.43027118 ... -0.9119398 1.3175642 0. ] [ 0. 2.9312198 -0.04936021 ... -0.72741157 0.64315176 -0.22000225] [-0.04936021 -0.12771566 0.38936788 ... -0.22000225 -0.3492259 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5065.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.6665967e+00 -9.5954913e-01 7.9398966e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.9398966e-01 1.7102422e-01 -1.3854275e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.3854275e+00 -4.1284114e-01 0.0000000e+00] ... [ 0.0000000e+00 8.5199213e-01 6.6140151e-01 ... 1.9099504e-02 -2.1407107e-01 -6.6342777e-01] [ 6.6140151e-01 1.4852437e+00 9.5303074e-02 ... -6.6342777e-01 3.3720934e-01 1.6461102e+00] [ 9.5303074e-02 1.8839390e+00 -5.2967733e-01 ... 1.6461102e+00 3.4799418e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.2919799e+00 -1.8327726e-01 9.3959022e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.3959022e-01 1.3225594e+00 5.9002560e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.9002560e-02 1.0622801e+00 0.0000000e+00] ... [ 0.0000000e+00 7.3374355e-01 -1.2385529e+00 ... -4.7525868e-01 -6.1799645e-01 4.0269038e-01] [-1.2385529e+00 1.9026716e+00 -1.2836509e+00 ... 4.0269038e-01 1.5221467e+00 6.7538875e-01] [-1.2836509e+00 6.0623366e-01 -5.7383776e-01 ... 6.7538875e-01 -5.0944906e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6474234e-01 8.9163530e-01 1.3380118e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.3380118e-01 2.2319212e+00 -6.8914843e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.8914843e-01 -2.2322111e-01 0.0000000e+00] ... [ 0.0000000e+00 -3.0900377e-01 -1.5940204e+00 ... 6.2843703e-02 -2.8422121e-02 -9.1094893e-01] [-1.5940204e+00 -1.2395049e+00 3.2981053e-01 ... -9.1094893e-01 1.2008631e+00 8.4416676e-01] [ 3.2981053e-01 6.7336017e-01 7.4609715e-01 ... 8.4416676e-01 3.9226702e-01 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1000079e-01 1.4257394e+00 1.0198901e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.0198901e+00 -1.8372632e+00 1.1926752e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1926752e+00 1.3292161e+00 0.0000000e+00] ... [ 0.0000000e+00 -6.3002002e-01 -3.9439008e-02 ... 2.9101741e-01 -3.4304013e+00 -6.9448572e-01] [-3.9439008e-02 -2.5783524e-02 -9.8957807e-01 ... -6.9448572e-01 5.8460319e-01 -2.1280618e+00] [-9.8957807e-01 -1.1526549e+00 2.5546429e-01 ... -2.1280618e+00 -2.2560349e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.9551085e-01 -6.9460863e-01 1.7400429e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.7400429e+00 -1.6282256e-01 -3.1957570e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.1957570e-01 -4.6064985e-01 0.0000000e+00] ... [ 0.0000000e+00 1.0831593e+00 -8.7374294e-01 ... -7.5426924e-01 -1.1698837e-01 1.6665111e-01] [-8.7374294e-01 5.9807066e-02 -4.4148764e-01 ... 1.6665111e-01 1.0790441e+00 -1.9630569e-01] [-4.4148764e-01 -1.3854911e+00 -1.1500406e+00 ... -1.9630569e-01 5.9061980e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.6837711e-01 -9.7268492e-01 -2.5391680e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.5391680e-01 3.9353698e-02 1.9195409e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.9195409e+00 9.7946882e-01 0.0000000e+00] ... [ 0.0000000e+00 3.1194687e+00 -1.6485147e+00 ... -3.3998270e-02 6.1884445e-01 8.7101734e-01] [-1.6485147e+00 9.3598342e-01 -2.2718392e-01 ... 8.7101734e-01 -4.2704520e-01 2.3551061e+00] [-2.2718392e-01 -2.7917318e+00 3.4477004e-01 ... 2.3551061e+00 -2.8005848e-04 0.0000000e+00]]]; ov_res: [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.6665967e+00 -9.5954913e-01 7.9398966e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 7.9398966e-01 1.7102422e-01 -1.3854275e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.3854275e+00 -4.1284114e-01 0.0000000e+00] ... [ 0.0000000e+00 8.5199213e-01 6.6140151e-01 ... 1.9099504e-02 -2.1407107e-01 -6.6342777e-01] [ 6.6140151e-01 1.4852437e+00 9.5303074e-02 ... -6.6342777e-01 3.3720934e-01 1.6461102e+00] [ 9.5303074e-02 1.8839390e+00 -5.2967733e-01 ... 1.6461102e+00 3.4799418e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.2919799e+00 -1.8327726e-01 9.3959022e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.3959022e-01 1.3225594e+00 5.9002560e-02] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 5.9002560e-02 1.0622801e+00 0.0000000e+00] ... [ 0.0000000e+00 7.3374355e-01 -1.2385529e+00 ... -4.7525868e-01 -6.1799645e-01 4.0269038e-01] [-1.2385529e+00 1.9026716e+00 -1.2836509e+00 ... 4.0269038e-01 1.5221467e+00 6.7538875e-01] [-1.2836509e+00 6.0623366e-01 -5.7383776e-01 ... 6.7538875e-01 -5.0944906e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 8.6474234e-01 8.9163530e-01 1.3380118e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.3380118e-01 2.2319212e+00 -6.8914843e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -6.8914843e-01 -2.2322111e-01 0.0000000e+00] ... [ 0.0000000e+00 -3.0900377e-01 -1.5940204e+00 ... 6.2843703e-02 -2.8422121e-02 -9.1094893e-01] [-1.5940204e+00 -1.2395049e+00 3.2981053e-01 ... -9.1094893e-01 1.2008631e+00 8.4416676e-01] [ 3.2981053e-01 6.7336017e-01 7.4609715e-01 ... 8.4416676e-01 3.9226702e-01 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1000079e-01 1.4257394e+00 1.0198901e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.0198901e+00 -1.8372632e+00 1.1926752e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.1926752e+00 1.3292161e+00 0.0000000e+00] ... [ 0.0000000e+00 -6.3002002e-01 -3.9439008e-02 ... 2.9101741e-01 -3.4304013e+00 -6.9448572e-01] [-3.9439008e-02 -2.5783524e-02 -9.8957807e-01 ... -6.9448572e-01 5.8460319e-01 -2.1280618e+00] [-9.8957807e-01 -1.1526549e+00 2.5546429e-01 ... -2.1280618e+00 -2.2560349e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -1.9551085e-01 -6.9460863e-01 1.7400429e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.7400429e+00 -1.6282256e-01 -3.1957570e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -3.1957570e-01 -4.6064985e-01 0.0000000e+00] ... [ 0.0000000e+00 1.0831593e+00 -8.7374294e-01 ... -7.5426924e-01 -1.1698837e-01 1.6665111e-01] [-8.7374294e-01 5.9807066e-02 -4.4148764e-01 ... 1.6665111e-01 1.0790441e+00 -1.9630569e-01] [-4.4148764e-01 -1.3854911e+00 -1.1500406e+00 ... -1.9630569e-01 5.9061980e-01 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 9.6837711e-01 -9.7268492e-01 -2.5391680e-01] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... -2.5391680e-01 3.9353698e-02 1.9195409e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 1.9195409e+00 9.7946882e-01 0.0000000e+00] ... [ 0.0000000e+00 3.1194687e+00 -1.6485147e+00 ... -3.3998270e-02 6.1884445e-01 8.7101734e-01] [-1.6485147e+00 9.3598342e-01 -2.2718392e-01 ... 8.7101734e-01 -4.2704520e-01 2.3551061e+00] [-2.2718392e-01 -2.7917318e+00 3.4477004e-01 ... 2.3551061e+00 -2.8005848e-04 0.0000000e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5067.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %3, %2, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.14192277 -1.2346324 0.09692186] [ 0. 0. 0. ... 0.09692186 -2.093493 0. ] [ 0. -1.8882383 0.42620283 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.14198737 1.8584645 0. ] [ 0. 0.18682832 -0.41852352 ... 0. 0. 0. ] [-0.41852352 -1.3783604 -2.162825 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1364732 -0.03722928 0.7323683 ] [ 0. 0. 0. ... 0.7323683 -0.5060551 0. ] [ 0. 1.8731207 -0.08922175 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.1617289 1.3024527 0. ] [ 0. 0.09331683 0.5114853 ... 0. 0. 0. ] [ 0.5114853 0.67121327 1.3449243 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2915188 0.33237186 -2.2574277 ] [ 0. 0. 0. ... -2.2574277 -0.8922245 0. ] [ 0. 1.3396327 -2.1208577 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.6563237 0.9162827 0. ] [ 0. -1.076609 -1.1594971 ... 0. 0. 0. ] [-1.1594971 -0.05329916 0.21206975 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.2463614 0.1268134 -0.22289835] [ 0. 0. 0. ... -0.22289835 1.0843177 0. ] [ 0. 0.16517255 0.03627713 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.6981213 -1.1790298 0. ] [ 0. -0.55264443 -0.47097293 ... 0. 0. 0. ] [-0.47097293 -0.14773709 0.31641597 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1867077 1.4759333 -0.09806182] [ 0. 0. 0. ... -0.09806182 0.39772728 0. ] [ 0. 0.9207625 0.12238379 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.4619012 -1.0312293 0. ] [ 0. 0.32901058 0.21206605 ... 0. 0. 0. ] [ 0.21206605 0.04803167 0.2855795 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3245252 -0.44521847 0.94463515] [ 0. 0. 0. ... 0.94463515 0.5493857 0. ] [ 0. -1.140358 1.7854317 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.1506134 -0.6436726 0. ] [ 0. 0.13916376 -0.8457549 ... 0. 0. 0. ] [-0.8457549 2.3000395 -0.2054677 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.14192277 -1.2346324 0.09692186] [ 0. 0. 0. ... 0.09692186 -2.093493 0. ] [ 0. -1.8882383 0.42620283 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.14198737 1.8584645 0. ] [ 0. 0.18682832 -0.41852352 ... 0. 0. 0. ] [-0.41852352 -1.3783604 -2.162825 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1364732 -0.03722928 0.7323683 ] [ 0. 0. 0. ... 0.7323683 -0.5060551 0. ] [ 0. 1.8731207 -0.08922175 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.1617289 1.3024527 0. ] [ 0. 0.09331683 0.5114853 ... 0. 0. 0. ] [ 0.5114853 0.67121327 1.3449243 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2915188 0.33237186 -2.2574277 ] [ 0. 0. 0. ... -2.2574277 -0.8922245 0. ] [ 0. 1.3396327 -2.1208577 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.6563237 0.9162827 0. ] [ 0. -1.076609 -1.1594971 ... 0. 0. 0. ] [-1.1594971 -0.05329916 0.21206975 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.2463614 0.1268134 -0.22289835] [ 0. 0. 0. ... -0.22289835 1.0843177 0. ] [ 0. 0.16517255 0.03627713 ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0.6981213 -1.1790298 0. ] [ 0. -0.55264443 -0.47097293 ... 0. 0. 0. ] [-0.47097293 -0.14773709 0.31641597 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.1867077 1.4759333 -0.09806182] [ 0. 0. 0. ... -0.09806182 0.39772728 0. ] [ 0. 0.9207625 0.12238379 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -0.4619012 -1.0312293 0. ] [ 0. 0.32901058 0.21206605 ... 0. 0. 0. ] [ 0.21206605 0.04803167 0.2855795 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.3245252 -0.44521847 0.94463515] [ 0. 0. 0. ... 0.94463515 0.5493857 0. ] [ 0. -1.140358 1.7854317 ... 0. 0. 0. ] ... [ 0. 0. 0. ... -1.1506134 -0.6436726 0. ] [ 0. 0.13916376 -0.8457549 ... 0. 0. 0. ] [-0.8457549 2.3000395 -0.2054677 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:1 - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5069.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.8207499 -0.7039162 0. ] [ 0. 0. 0. ... -1.1420805 0.3690603 0. ] [ 0. 0. 0. ... -0.474193 -1.0463082 0. ]] [[ 0. 0. 0. ... -0.8079391 -0.4002071 0. ] [ 0. 0. 0. ... -1.5894715 -0.28847814 0. ] [ 0. 0. 0. ... 0.00936903 0.20865977 0. ]] [[ 0. 0. 0. ... -0.23087525 -0.43749353 0. ] [ 0. 0. 0. ... 1.6260561 -0.42484897 0. ] [ 0. 0. 0. ... 1.4332497 0.41297153 0. ]] ... [[ 0. 0. 0. ... 1.4460721 1.4870292 0. ] [ 0. 0. 0. ... 0.21219322 0.21299027 0. ] [ 0. 0. 0. ... 0.5726987 -0.73320764 0. ]] [[ 0. 0. 0. ... -0.49043396 0.49081302 0. ] [ 0. 0. 0. ... 0.6497869 0.03392568 0. ] [ 0. 0. 0. ... 0.3283151 1.5797222 0. ]] [[ 0. 0. 0. ... -1.6313359 -1.3480948 0. ] [ 0. 0. 0. ... 1.7541565 2.0566401 0. ] [ 0. 0. 0. ... 1.4845175 -0.14359294 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.8207499 -0.7039162 0. ] [ 0. 0. 0. ... -1.1420805 0.3690603 0. ] [ 0. 0. 0. ... -0.474193 -1.0463082 0. ]] [[ 0. 0. 0. ... -0.8079391 -0.4002071 0. ] [ 0. 0. 0. ... -1.5894715 -0.28847814 0. ] [ 0. 0. 0. ... 0.00936903 0.20865977 0. ]] [[ 0. 0. 0. ... -0.23087525 -0.43749353 0. ] [ 0. 0. 0. ... 1.6260561 -0.42484897 0. ] [ 0. 0. 0. ... 1.4332497 0.41297153 0. ]] ... [[ 0. 0. 0. ... 1.4460721 1.4870292 0. ] [ 0. 0. 0. ... 0.21219322 0.21299027 0. ] [ 0. 0. 0. ... 0.5726987 -0.73320764 0. ]] [[ 0. 0. 0. ... -0.49043396 0.49081302 0. ] [ 0. 0. 0. ... 0.6497869 0.03392568 0. ] [ 0. 0. 0. ... 0.3283151 1.5797222 0. ]] [[ 0. 0. 0. ... -1.6313359 -1.3480948 0. ] [ 0. 0. 0. ... 1.7541565 2.0566401 0. ] [ 0. 0. 0. ... 1.4845175 -0.14359294 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:1 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5071.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:1 - kernel_size:[3, 2] ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5073.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.52318245 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.54291636 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.63397413 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.91248584 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.81298035 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.36126435 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39725763 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.22036727 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.1305396 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6230792 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.94535494 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.34556255 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.02029576 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 2.0843313 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.200854 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81152314 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.809312 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.3436154 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.52318245 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.54291636 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.63397413 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.91248584 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.81298035 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.36126435 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.39725763 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.22036727 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.1305396 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.6230792 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.94535494 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.34556255 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.02029576 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 2.0843313 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -1.200854 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.81152314 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.809312 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.3436154 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:1 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5075.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 1.5498399 -0.35198474 0. ] [ 0. 0. 0. ... -0.35198474 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.98705834 ... 0. 0. 0. ] [ 0. -0.98705834 -0.3678706 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9154312 -1.3780274 0. ] [ 0. 0. 0. ... -1.3780274 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.51151145 ... 0. 0. 0. ] [ 0. 0.51151145 -1.3205787 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35494506 1.6057167 0. ] [ 0. 0. 0. ... 1.6057167 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.8198524 ... 0. 0. 0. ] [ 0. 1.8198524 -0.7150016 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.22707117 0.09878182 0. ] [ 0. 0. 0. ... 0.09878182 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.13081694 ... 0. 0. 0. ] [ 0. 0.13081694 0.88014895 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.18927859 1.0325663 0. ] [ 0. 0. 0. ... 1.0325663 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.204608 ... 0. 0. 0. ] [ 0. 1.204608 -2.7595906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16074705 -2.1281195 0. ] [ 0. 0. 0. ... -2.1281195 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.024685 ... 0. 0. 0. ] [ 0. -2.024685 0.5404511 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.5498399 -0.35198474 0. ] [ 0. 0. 0. ... -0.35198474 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -0.98705834 ... 0. 0. 0. ] [ 0. -0.98705834 -0.3678706 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9154312 -1.3780274 0. ] [ 0. 0. 0. ... -1.3780274 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.51151145 ... 0. 0. 0. ] [ 0. 0.51151145 -1.3205787 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.35494506 1.6057167 0. ] [ 0. 0. 0. ... 1.6057167 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.8198524 ... 0. 0. 0. ] [ 0. 1.8198524 -0.7150016 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.22707117 0.09878182 0. ] [ 0. 0. 0. ... 0.09878182 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0.13081694 ... 0. 0. 0. ] [ 0. 0.13081694 0.88014895 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.18927859 1.0325663 0. ] [ 0. 0. 0. ... 1.0325663 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 1.204608 ... 0. 0. 0. ] [ 0. 1.204608 -2.7595906 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16074705 -2.1281195 0. ] [ 0. 0. 0. ... -2.1281195 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. -2.024685 ... 0. 0. 0. ] [ 0. -2.024685 0.5404511 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:1 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5077.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:1 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5079.aten_im2col, %x.1 : Tensor): %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:2 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5081.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.13802195 -0.2661181 -1.1728506 ] [ 0. 0. 0. ... -1.1728506 -0.81510866 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0935729 0.26190683 ... 0. 0. 0. ] [ 0.26190683 0.15343064 -0.53281647 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3297298 -0.2540551 -1.9456952 ] [ 0. 0. 0. ... -1.9456952 0.6802879 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3691456 0.09213562 ... 0. 0. 0. ] [ 0.09213562 -0.39818007 -2.4951596 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.20244911 -0.52735585 -1.0675808 ] [ 0. 0. 0. ... -1.0675808 0.14540215 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.7890045 0.25993812 ... 0. 0. 0. ] [ 0.25993812 0.4236173 0.6138397 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.03684345 0.13843839 1.1035206 ] [ 0. 0. 0. ... 1.1035206 0.20591815 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.137627 0.10187209 ... 0. 0. 0. ] [ 0.10187209 1.4625561 -1.0531389 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2324928 -0.4252385 1.0569752 ] [ 0. 0. 0. ... 1.0569752 -0.19180728 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.20441304 0.78730524 ... 0. 0. 0. ] [ 0.78730524 0.8090231 -0.03869765 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0222049 0.9893795 0.7664482 ] [ 0. 0. 0. ... 0.7664482 -1.618688 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.5372604 0.64902633 ... 0. 0. 0. ] [ 0.64902633 1.1867595 -0.6784558 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.13802195 -0.2661181 -1.1728506 ] [ 0. 0. 0. ... -1.1728506 -0.81510866 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0935729 0.26190683 ... 0. 0. 0. ] [ 0.26190683 0.15343064 -0.53281647 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3297298 -0.2540551 -1.9456952 ] [ 0. 0. 0. ... -1.9456952 0.6802879 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3691456 0.09213562 ... 0. 0. 0. ] [ 0.09213562 -0.39818007 -2.4951596 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.20244911 -0.52735585 -1.0675808 ] [ 0. 0. 0. ... -1.0675808 0.14540215 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.7890045 0.25993812 ... 0. 0. 0. ] [ 0.25993812 0.4236173 0.6138397 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.03684345 0.13843839 1.1035206 ] [ 0. 0. 0. ... 1.1035206 0.20591815 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.137627 0.10187209 ... 0. 0. 0. ] [ 0.10187209 1.4625561 -1.0531389 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2324928 -0.4252385 1.0569752 ] [ 0. 0. 0. ... 1.0569752 -0.19180728 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.20441304 0.78730524 ... 0. 0. 0. ] [ 0.78730524 0.8090231 -0.03869765 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0222049 0.9893795 0.7664482 ] [ 0. 0. 0. ... 0.7664482 -1.618688 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.5372604 0.64902633 ... 0. 0. 0. ] [ 0.64902633 1.1867595 -0.6784558 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:2 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5083.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.9274315 0.38631812 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.4785857 0.8023749 0. ] ... [ 0. 0.48699152 -0.2577169 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.7357057 -1.3728269 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.84286904 1.5274957 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.43043146 0.78844887 0. ] ... [ 0. 0.6667014 0.81786084 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.89756733 1.1690542 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0808213 0.90919703 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.7941285 -0.11850775 0. ] ... [ 0. 1.6083392 -0.51055855 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.1649194 -1.6879596 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.0302087 1.7830575 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.1372455 0.832703 0. ] ... [ 0. -0.15791725 -0.32117778 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.6717023 -1.2935276 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.25252837 -0.02737998 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.31268215 0.56063855 0. ] ... [ 0. 1.2261096 0.2997541 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.9359806 0.11240189 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5905275 0.3460479 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.41099313 0.10497786 0. ] ... [ 0. 2.073849 1.5084481 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.23804957 -0.04225207 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.9274315 0.38631812 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.4785857 0.8023749 0. ] ... [ 0. 0.48699152 -0.2577169 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.7357057 -1.3728269 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.84286904 1.5274957 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.43043146 0.78844887 0. ] ... [ 0. 0.6667014 0.81786084 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.89756733 1.1690542 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0808213 0.90919703 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.7941285 -0.11850775 0. ] ... [ 0. 1.6083392 -0.51055855 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.1649194 -1.6879596 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 1.0302087 1.7830575 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.1372455 0.832703 0. ] ... [ 0. -0.15791725 -0.32117778 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.6717023 -1.2935276 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.25252837 -0.02737998 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.31268215 0.56063855 0. ] ... [ 0. 1.2261096 0.2997541 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.9359806 0.11240189 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5905275 0.3460479 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.41099313 0.10497786 0. ] ... [ 0. 2.073849 1.5084481 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.23804957 -0.04225207 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:2 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5085.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -1.2784889 0.7667114 0.7466939 ] [ 0. 0. 0. ... 0.7466939 -0.3614247 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.51985997 -0.9194881 ... 0. 0. 0. ] [-0.9194881 -0.49891475 -0.8986873 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9362123 1.0021629 0.7122939 ] [ 0. 0. 0. ... 0.7122939 0.20367983 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.4455172 1.5811181 ... 0. 0. 0. ] [ 1.5811181 -0.44271463 -0.17883055 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2798861 0.5280091 2.0946262 ] [ 0. 0. 0. ... 2.0946262 -0.48936465 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.96449834 -1.63946 ... 0. 0. 0. ] [-1.63946 0.9364837 0.20792432 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.25808448 -1.3568946 0.323143 ] [ 0. 0. 0. ... 0.323143 0.5575932 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.1986965 -0.15305758 ... 0. 0. 0. ] [-0.15305758 -0.4120076 -0.61690223 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.0089335 0.5869679 0.6108396 ] [ 0. 0. 0. ... 0.6108396 -0.1770951 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.43509668 0.29016906 ... 0. 0. 0. ] [ 0.29016906 0.18052615 -0.1838965 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8189993 0.9998223 -0.67964906] [ 0. 0. 0. ... -0.67964906 1.3657961 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.167226 0.11598685 ... 0. 0. 0. ] [ 0.11598685 0.6449223 0.5084248 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -1.2784889 0.7667114 0.7466939 ] [ 0. 0. 0. ... 0.7466939 -0.3614247 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.51985997 -0.9194881 ... 0. 0. 0. ] [-0.9194881 -0.49891475 -0.8986873 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.9362123 1.0021629 0.7122939 ] [ 0. 0. 0. ... 0.7122939 0.20367983 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.4455172 1.5811181 ... 0. 0. 0. ] [ 1.5811181 -0.44271463 -0.17883055 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.2798861 0.5280091 2.0946262 ] [ 0. 0. 0. ... 2.0946262 -0.48936465 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.96449834 -1.63946 ... 0. 0. 0. ] [-1.63946 0.9364837 0.20792432 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.25808448 -1.3568946 0.323143 ] [ 0. 0. 0. ... 0.323143 0.5575932 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.1986965 -0.15305758 ... 0. 0. 0. ] [-0.15305758 -0.4120076 -0.61690223 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 2.0089335 0.5869679 0.6108396 ] [ 0. 0. 0. ... 0.6108396 -0.1770951 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.43509668 0.29016906 ... 0. 0. 0. ] [ 0.29016906 0.18052615 -0.1838965 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.8189993 0.9998223 -0.67964906] [ 0. 0. 0. ... -0.67964906 1.3657961 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.167226 0.11598685 ... 0. 0. 0. ] [ 0.11598685 0.6449223 0.5084248 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:2 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5087.aten_im2col, %x.1 : Tensor): %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... -0.2615568 -0.15540251 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.4513242 -2.2410588 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7539856 -0.3639968 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.4419099 0.4174573 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1324698 2.0442686 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.20244835 0.7812689 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2514459 -0.6063599 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.56590676 1.3103961 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9132886 1.2444885 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2139937 0.98552454 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.12625337 -2.406505 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.1708978 1.1191508 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.2615568 -0.15540251 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -2.4513242 -2.2410588 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.7539856 -0.3639968 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.4419099 0.4174573 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1324698 2.0442686 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.20244835 0.7812689 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -1.2514459 -0.6063599 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.56590676 1.3103961 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.9132886 1.2444885 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2139937 0.98552454 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.12625337 -2.406505 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.1708978 1.1191508 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:2 - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5089.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:3 - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5091.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 1.7543558 -1.6118144 -0.16799441] [ 0. 0. 0. ... 0.69778967 -0.8246467 -1.5873674 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.8013091 0.9989136 0.4970745 ... 0. 0. 0. ] [ 0.13853018 -0.57421887 -0.6300564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.0368178 -1.6919662 -0.8948792 ] [ 0. 0. 0. ... 0.20331034 -0.38858277 -0.11489645] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.37459105 -1.252807 0.38011184 ... 0. 0. 0. ] [ 0.02116052 0.4198091 1.4672663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.12365235 -0.6228583 -0.5770569 ] [ 0. 0. 0. ... 1.5515118 -0.99424547 -1.2764903 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.3159621 0.77142173 1.2293277 ... 0. 0. 0. ] [-0.6548759 0.48410156 -0.65040934 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.03503199 1.5849628 1.5073429 ] [ 0. 0. 0. ... 0.81905615 -0.6885407 0.8282651 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2368094 0.04685405 0.03387033 ... 0. 0. 0. ] [ 0.49283952 0.406973 0.67091787 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2681549 -1.4511195 1.1217728 ] [ 0. 0. 0. ... -0.62538475 0.9536632 0.5229682 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.05600283 1.0188484 -1.3582932 ... 0. 0. 0. ] [ 2.5348544 0.5284187 -0.5130533 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.32626125 -0.08080105 -2.3021264 ] [ 0. 0. 0. ... 0.35670617 1.1141925 0.56936985] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-3.0611784 -0.53418976 0.54426825 ... 0. 0. 0. ] [-0.47951135 -0.27132735 0.34880626 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 1.7543558 -1.6118144 -0.16799441] [ 0. 0. 0. ... 0.69778967 -0.8246467 -1.5873674 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.8013091 0.9989136 0.4970745 ... 0. 0. 0. ] [ 0.13853018 -0.57421887 -0.6300564 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.0368178 -1.6919662 -0.8948792 ] [ 0. 0. 0. ... 0.20331034 -0.38858277 -0.11489645] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.37459105 -1.252807 0.38011184 ... 0. 0. 0. ] [ 0.02116052 0.4198091 1.4672663 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.12365235 -0.6228583 -0.5770569 ] [ 0. 0. 0. ... 1.5515118 -0.99424547 -1.2764903 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.3159621 0.77142173 1.2293277 ... 0. 0. 0. ] [-0.6548759 0.48410156 -0.65040934 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.03503199 1.5849628 1.5073429 ] [ 0. 0. 0. ... 0.81905615 -0.6885407 0.8282651 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.2368094 0.04685405 0.03387033 ... 0. 0. 0. ] [ 0.49283952 0.406973 0.67091787 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.2681549 -1.4511195 1.1217728 ] [ 0. 0. 0. ... -0.62538475 0.9536632 0.5229682 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0.05600283 1.0188484 -1.3582932 ... 0. 0. 0. ] [ 2.5348544 0.5284187 -0.5130533 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.32626125 -0.08080105 -2.3021264 ] [ 0. 0. 0. ... 0.35670617 1.1141925 0.56936985] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-3.0611784 -0.53418976 0.54426825 ... 0. 0. 0. ] [-0.47951135 -0.27132735 0.34880626 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:3 - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5093.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.7162986 0.4434278 -0.93017256] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.6006854 -0.5893637 -0.6472101 ] ... [ 1.5959252 0.8690969 1.9080057 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 3.2002301 -0.4574024 -1.6467155 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1304326 0.36716416 0.86881673] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.5546317 0.9196536 -0.17918473] ... [-1.335065 0.39625505 2.2448368 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.5309055 -0.606177 0.73924965 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.07083796 0.73445946 0.609323 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.2626952 -0.15921037 -0.92678607] ... [ 2.149496 -0.15288582 -0.60161483 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.41306317 -1.034186 1.2377096 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.72753674 0.04052955 -0.37914532] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.34955814 -1.4477124 -0.3233851 ] ... [ 1.3260875 1.8421061 -0.6224078 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.88186526 0.15332094 0.262322 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5737452 1.7437664 -1.5316526 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.46490964 -2.5773168 0.41020662] ... [ 0.9094566 2.415936 -1.6574032 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.4322591 0.86868167 0.86040413 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1202494 -0.9993059 -1.510685 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.760096 -1.2130481 0.2647964 ] ... [-0.1838622 0.3313941 0.70991236 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.6881177 0.5457857 -0.6504217 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.7162986 0.4434278 -0.93017256] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.6006854 -0.5893637 -0.6472101 ] ... [ 1.5959252 0.8690969 1.9080057 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 3.2002301 -0.4574024 -1.6467155 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.1304326 0.36716416 0.86881673] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.5546317 0.9196536 -0.17918473] ... [-1.335065 0.39625505 2.2448368 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.5309055 -0.606177 0.73924965 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.07083796 0.73445946 0.609323 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.2626952 -0.15921037 -0.92678607] ... [ 2.149496 -0.15288582 -0.60161483 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.41306317 -1.034186 1.2377096 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.72753674 0.04052955 -0.37914532] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.34955814 -1.4477124 -0.3233851 ] ... [ 1.3260875 1.8421061 -0.6224078 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.88186526 0.15332094 0.262322 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.5737452 1.7437664 -1.5316526 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.46490964 -2.5773168 0.41020662] ... [ 0.9094566 2.415936 -1.6574032 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0.4322591 0.86868167 0.86040413 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1202494 -0.9993059 -1.510685 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.760096 -1.2130481 0.2647964 ] ... [-0.1838622 0.3313941 0.70991236 ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.6881177 0.5457857 -0.6504217 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:3 - kernel_size:[3, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5095.aten_im2col, %x.1 : Tensor): %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[ 0. 0. 0. ... 0.806467 -0.6339346 0.57434916] [ 0. 0. 0. ... -0.45138058 -1.1634473 0.2908347 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.57228714 -0.82800835 -0.37637758 ... 0. 0. 0. ] [-1.6285143 -1.3682957 0.9695185 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.0619206 -0.07573604 0.6412011 ] [ 0. 0. 0. ... -0.09890698 -0.7353894 -0.9822657 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.21035907 1.2410775 -1.1224325 ... 0. 0. 0. ] [-0.56644404 1.3126222 1.0478548 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4407728 0.08057009 1.248249 ] [ 0. 0. 0. ... -2.0834396 0.064173 0.08701258] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.1820229 -0.62046283 -0.34248158 ... 0. 0. 0. ] [-0.9449143 0.8998541 0.68878907 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.73281467 -1.7992184 -0.6060374 ] [ 0. 0. 0. ... -0.6172875 -0.2973829 0.27956605] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-3.5492811 -2.0201483 0.9458115 ... 0. 0. 0. ] [ 0.05501327 -0.9826542 0.6260961 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.86627895 1.588714 -0.34389 ] [ 0. 0. 0. ... 0.4725513 -0.3231402 -0.24716066] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.41809672 0.3016761 1.435625 ... 0. 0. 0. ] [-0.7130497 0.8957318 0.21745679 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.09025527 0.87358254 -0.6002401 ] [ 0. 0. 0. ... 2.0405369 -0.5895578 0.40082306] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.4568476 1.3213418 -0.3216194 ... 0. 0. 0. ] [ 1.2950406 -0.16445431 0.69475865 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.806467 -0.6339346 0.57434916] [ 0. 0. 0. ... -0.45138058 -1.1634473 0.2908347 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.57228714 -0.82800835 -0.37637758 ... 0. 0. 0. ] [-1.6285143 -1.3682957 0.9695185 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -2.0619206 -0.07573604 0.6412011 ] [ 0. 0. 0. ... -0.09890698 -0.7353894 -0.9822657 ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.21035907 1.2410775 -1.1224325 ... 0. 0. 0. ] [-0.56644404 1.3126222 1.0478548 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.4407728 0.08057009 1.248249 ] [ 0. 0. 0. ... -2.0834396 0.064173 0.08701258] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 1.1820229 -0.62046283 -0.34248158 ... 0. 0. 0. ] [-0.9449143 0.8998541 0.68878907 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.73281467 -1.7992184 -0.6060374 ] [ 0. 0. 0. ... -0.6172875 -0.2973829 0.27956605] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-3.5492811 -2.0201483 0.9458115 ... 0. 0. 0. ] [ 0.05501327 -0.9826542 0.6260961 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.86627895 1.588714 -0.34389 ] [ 0. 0. 0. ... 0.4725513 -0.3231402 -0.24716066] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.41809672 0.3016761 1.435625 ... 0. 0. 0. ] [-0.7130497 0.8957318 0.21745679 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.09025527 0.87358254 -0.6002401 ] [ 0. 0. 0. ... 2.0405369 -0.5895578 0.40082306] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [-0.4568476 1.3213418 -0.3216194 ... 0. 0. 0. ] [ 1.2950406 -0.16445431 0.69475865 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:3 - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5097.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.09087519 -1.5189912 0.68318325] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.00436006 1.8578352 -1.6269258 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.48731512 -0.95593196 -0.47370794] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.362385 -1.9011122 -1.1403137 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16243584 0.07254899 -0.82028055] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.0816343 -1.5492471 1.7626618 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.6995589 -0.9618278 -0.2494043 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.04981332 -2.1091123 -2.2925398 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3116982 -0.7135817 -0.36157814] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.3916436 -0.44507474 -1.6514112 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1011109 -0.2430083 0.3984959 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.9648848 -0.86241627 -0.82489234 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.09087519 -1.5189912 0.68318325] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.00436006 1.8578352 -1.6269258 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.48731512 -0.95593196 -0.47370794] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.362385 -1.9011122 -1.1403137 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.16243584 0.07254899 -0.82028055] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-1.0816343 -1.5492471 1.7626618 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.6995589 -0.9618278 -0.2494043 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-0.04981332 -2.1091123 -2.2925398 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -1.3116982 -0.7135817 -0.36157814] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 1.3916436 -0.44507474 -1.6514112 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.1011109 -0.2430083 0.3984959 ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [-2.9648848 -0.86241627 -0.82489234 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:3 - kernel_size:[1, 1] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5099.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 3]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 3] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5101.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[2, 3]]() %5 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.kernel_size, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%5) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5103.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... -0.20091623 -0.10780425 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.3828567 1.1151991 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3032087 -0.68592 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60097814 -0.95478207 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.61351013 1.5078766 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.879161 0.24066927 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8392508 -2.489913 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.4576932 0.23404221 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.3819952 -0.10700807 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.41742477 -0.62765956 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.47526145 0.31042543 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0048974 -0.02184688 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.54139525 -1.5396962 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.18186943 0.17603296 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.09434244 -0.62899977 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0092697 1.0977743 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.1685281 -0.74623257 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.6285172 0.6904314 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... -0.20091623 -0.10780425 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.3828567 1.1151991 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.3032087 -0.68592 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.60097814 -0.95478207 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -0.61351013 1.5078766 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.879161 0.24066927 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.8392508 -2.489913 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 1.4576932 0.23404221 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.3819952 -0.10700807 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... -0.41742477 -0.62765956 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.47526145 0.31042543 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 2.0048974 -0.02184688 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.54139525 -1.5396962 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0.18186943 0.17603296 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.09434244 -0.62899977 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 1.0092697 1.0977743 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... -1.1685281 -0.74623257 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.6285172 0.6904314 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:(1, 2) - kernel_size:[3, 3] ] | 0.07 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5105.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[ 0. 0. 0. ... 0.37848797 -1.1214086 -0.50551665] [ 0. 0. 0. ... -0.50551665 -1.8928607 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.3979858 1.8304679 ... 0. 0. 0. ] [ 1.8304679 -0.57767576 -0.60982496 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.04501449 -0.8250175 -0.76410425] [ 0. 0. 0. ... -0.76410425 -0.47605973 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2949703 -1.1369501 ... 0. 0. 0. ] [-1.1369501 -1.7961245 -0.65855604 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.61925966 2.1149185 0.02297417] [ 0. 0. 0. ... 0.02297417 1.0567582 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.33494234 0.16863973 ... 0. 0. 0. ] [ 0.16863973 -1.462342 -0.49084854 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.5205077 -0.41373277 1.2857643 ] [ 0. 0. 0. ... 1.2857643 0.59352636 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.8609174 -2.6622207 ... 0. 0. 0. ] [-2.6622207 0.6961238 -1.4698905 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.09363911 0.83952713 0.80509657] [ 0. 0. 0. ... 0.80509657 0.0479145 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.34811503 -1.2444037 ... 0. 0. 0. ] [-1.2444037 -1.4703797 -0.1424295 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.07718255 -0.3170328 0.4707816 ] [ 0. 0. 0. ... 0.4707816 1.8995788 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.2929415 1.3040571 ... 0. 0. 0. ] [ 1.3040571 0.87048775 -0.6226541 ... 0. 0. 0. ]]]; ov_res: [[[ 0. 0. 0. ... 0.37848797 -1.1214086 -0.50551665] [ 0. 0. 0. ... -0.50551665 -1.8928607 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.3979858 1.8304679 ... 0. 0. 0. ] [ 1.8304679 -0.57767576 -0.60982496 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.04501449 -0.8250175 -0.76410425] [ 0. 0. 0. ... -0.76410425 -0.47605973 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 1.2949703 -1.1369501 ... 0. 0. 0. ] [-1.1369501 -1.7961245 -0.65855604 ... 0. 0. 0. ]] [[ 0. 0. 0. ... -0.61925966 2.1149185 0.02297417] [ 0. 0. 0. ... 0.02297417 1.0567582 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.33494234 0.16863973 ... 0. 0. 0. ] [ 0.16863973 -1.462342 -0.49084854 ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0.5205077 -0.41373277 1.2857643 ] [ 0. 0. 0. ... 1.2857643 0.59352636 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -0.8609174 -2.6622207 ... 0. 0. 0. ] [-2.6622207 0.6961238 -1.4698905 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.09363911 0.83952713 0.80509657] [ 0. 0. 0. ... 0.80509657 0.0479145 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0.34811503 -1.2444037 ... 0. 0. 0. ] [-1.2444037 -1.4703797 -0.1424295 ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0.07718255 -0.3170328 0.4707816 ] [ 0. 0. 0. ... 0.4707816 1.8995788 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. -1.2929415 1.3040571 ... 0. 0. 0. ] [ 1.3040571 0.87048775 -0.6226541 ... 0. 0. 0. ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:(1, 2) - kernel_size:[2, 2] ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5107.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[2, 2]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_im2col.py::TestIm2Col::test_exp[ ie_device:CPU - precision:FP32 - stride:[2, 1] - padding:[2, 3] - dilation:(1, 2) - kernel_size:[1, 1] ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_im2col.___torch_mangle_5109.aten_im2col, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.padding : int[] = prim::Constant[value=[2, 3]]() %self.kernel_size : int[] = prim::Constant[value=[1, 1]]() %6 : Tensor = aten::im2col(%x.1, %self.kernel_size, %2, %self.padding, %self.stride) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:4684:11 return (%6) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:0.01 - inplace:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5110.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=0.01]() %result.1 : Tensor = aten::leaky_relu_(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1630:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.1) return (%4) fw_re: [[[[ 1.11850941e+00 -3.27152293e-03 3.61491814e-02 ... 2.19191745e-01 2.47935152e+00 7.85289407e-01] [-1.60854235e-02 -1.21935373e-02 4.32446599e-01 ... -3.92998615e-03 1.75794923e+00 9.98155892e-01] [-2.04444805e-04 2.98317164e-01 1.82727778e+00 ... -1.29075609e-02 1.15748322e+00 -6.95643807e-03] ... [-5.50848432e-03 -2.37757731e-02 6.48331285e-01 ... 3.62602383e-01 -6.09723059e-03 -3.80180054e-03] [-1.89834908e-02 1.43148601e+00 1.08573472e+00 ... -4.53679170e-03 -9.87356529e-03 -2.27107257e-02] [ 7.55768955e-01 -8.99801310e-03 -1.15003465e-02 ... -4.95330198e-03 -2.67834030e-02 2.27185041e-01]] [[ 2.70491004e-01 -6.16760366e-03 -1.29085360e-02 ... 1.48099884e-01 -1.33055020e-02 9.96538162e-01] [-8.68123170e-05 -1.99245885e-02 2.60260791e-01 ... -9.65441484e-03 -1.32776760e-02 -1.17552578e-02] [-3.15289991e-03 -1.48640033e-02 2.78126687e-01 ... -8.50853231e-03 -1.99949089e-02 5.90247333e-01] ... [ 1.29873359e+00 -1.95044391e-02 1.13962901e+00 ... -5.15960623e-03 1.97842140e-02 1.41015232e-01] [-1.62725188e-02 -5.24846464e-03 -1.08131357e-02 ... -6.71551377e-03 1.65260410e+00 -1.24940639e-02] [ 5.26754111e-02 -6.61562476e-03 8.02669764e-01 ... -8.92548065e-04 1.16699204e-01 -3.09571042e-03]] [[-1.63306110e-02 -4.96165315e-03 1.33802259e+00 ... -8.18220712e-03 1.68971491e+00 -1.04543241e-02] [ 2.78187752e-01 -5.06650237e-03 -6.04863791e-03 ... -1.25930365e-03 1.86198056e-01 1.37151504e+00] [ 1.78842103e+00 -1.05760321e-02 5.95458865e-01 ... 9.66803551e-01 1.53719556e+00 -7.85424840e-03] ... [ 1.28513336e+00 5.47080457e-01 1.19756527e-01 ... 2.63954431e-01 1.61462620e-01 3.22312385e-01] [ 2.17763782e-01 -7.87085854e-03 -1.18728112e-02 ... 4.22630668e-01 1.44374683e-01 5.71207106e-01] [-1.00669796e-02 1.37132370e+00 -5.76200848e-03 ... 1.00051856e+00 -6.53833710e-03 2.28728056e+00]]]]; ov_res: [[[[ 1.11850941e+00 -3.27152293e-03 3.61491814e-02 ... 2.19191745e-01 2.47935152e+00 7.85289407e-01] [-1.60854235e-02 -1.21935373e-02 4.32446599e-01 ... -3.92998615e-03 1.75794923e+00 9.98155892e-01] [-2.04444805e-04 2.98317164e-01 1.82727778e+00 ... -1.29075609e-02 1.15748322e+00 -6.95643807e-03] ... [-5.50848432e-03 -2.37757731e-02 6.48331285e-01 ... 3.62602383e-01 -6.09723059e-03 -3.80180054e-03] [-1.89834908e-02 1.43148601e+00 1.08573472e+00 ... -4.53679170e-03 -9.87356529e-03 -2.27107257e-02] [ 7.55768955e-01 -8.99801310e-03 -1.15003465e-02 ... -4.95330198e-03 -2.67834030e-02 2.27185041e-01]] [[ 2.70491004e-01 -6.16760366e-03 -1.29085360e-02 ... 1.48099884e-01 -1.33055020e-02 9.96538162e-01] [-8.68123170e-05 -1.99245885e-02 2.60260791e-01 ... -9.65441484e-03 -1.32776760e-02 -1.17552578e-02] [-3.15289991e-03 -1.48640033e-02 2.78126687e-01 ... -8.50853231e-03 -1.99949089e-02 5.90247333e-01] ... [ 1.29873359e+00 -1.95044391e-02 1.13962901e+00 ... -5.15960623e-03 1.97842140e-02 1.41015232e-01] [-1.62725188e-02 -5.24846464e-03 -1.08131357e-02 ... -6.71551377e-03 1.65260410e+00 -1.24940639e-02] [ 5.26754111e-02 -6.61562476e-03 8.02669764e-01 ... -8.92548065e-04 1.16699204e-01 -3.09571042e-03]] [[-1.63306110e-02 -4.96165315e-03 1.33802259e+00 ... -8.18220712e-03 1.68971491e+00 -1.04543241e-02] [ 2.78187752e-01 -5.06650237e-03 -6.04863791e-03 ... -1.25930365e-03 1.86198056e-01 1.37151504e+00] [ 1.78842103e+00 -1.05760321e-02 5.95458865e-01 ... 9.66803551e-01 1.53719556e+00 -7.85424840e-03] ... [ 1.28513336e+00 5.47080457e-01 1.19756527e-01 ... 2.63954431e-01 1.61462620e-01 3.22312385e-01] [ 2.17763782e-01 -7.87085854e-03 -1.18728112e-02 ... 4.22630668e-01 1.44374683e-01 5.71207106e-01] [-1.00669796e-02 1.37132370e+00 -5.76200848e-03 ... 1.00051856e+00 -6.53833710e-03 2.28728056e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 1.11850941e+00 -3.27152293e-03 3.61491814e-02 ... 2.19191745e-01 2.47935152e+00 7.85289407e-01] [-1.60854235e-02 -1.21935373e-02 4.32446599e-01 ... -3.92998615e-03 1.75794923e+00 9.98155892e-01] [-2.04444805e-04 2.98317164e-01 1.82727778e+00 ... -1.29075609e-02 1.15748322e+00 -6.95643807e-03] ... [-5.50848432e-03 -2.37757731e-02 6.48331285e-01 ... 3.62602383e-01 -6.09723059e-03 -3.80180054e-03] [-1.89834908e-02 1.43148601e+00 1.08573472e+00 ... -4.53679170e-03 -9.87356529e-03 -2.27107257e-02] [ 7.55768955e-01 -8.99801310e-03 -1.15003465e-02 ... -4.95330198e-03 -2.67834030e-02 2.27185041e-01]] [[ 2.70491004e-01 -6.16760366e-03 -1.29085360e-02 ... 1.48099884e-01 -1.33055020e-02 9.96538162e-01] [-8.68123170e-05 -1.99245885e-02 2.60260791e-01 ... -9.65441484e-03 -1.32776760e-02 -1.17552578e-02] [-3.15289991e-03 -1.48640033e-02 2.78126687e-01 ... -8.50853231e-03 -1.99949089e-02 5.90247333e-01] ... [ 1.29873359e+00 -1.95044391e-02 1.13962901e+00 ... -5.15960623e-03 1.97842140e-02 1.41015232e-01] [-1.62725188e-02 -5.24846464e-03 -1.08131357e-02 ... -6.71551377e-03 1.65260410e+00 -1.24940639e-02] [ 5.26754111e-02 -6.61562476e-03 8.02669764e-01 ... -8.92548065e-04 1.16699204e-01 -3.09571042e-03]] [[-1.63306110e-02 -4.96165315e-03 1.33802259e+00 ... -8.18220712e-03 1.68971491e+00 -1.04543241e-02] [ 2.78187752e-01 -5.06650237e-03 -6.04863791e-03 ... -1.25930365e-03 1.86198056e-01 1.37151504e+00] [ 1.78842103e+00 -1.05760321e-02 5.95458865e-01 ... 9.66803551e-01 1.53719556e+00 -7.85424840e-03] ... [ 1.28513336e+00 5.47080457e-01 1.19756527e-01 ... 2.63954431e-01 1.61462620e-01 3.22312385e-01] [ 2.17763782e-01 -7.87085854e-03 -1.18728112e-02 ... 4.22630668e-01 1.44374683e-01 5.71207106e-01] [-1.00669796e-02 1.37132370e+00 -5.76200848e-03 ... 1.00051856e+00 -6.53833710e-03 2.28728056e+00]]]]; ov_res: [[[[ 1.11850941e+00 -3.27152293e-03 3.61491814e-02 ... 2.19191745e-01 2.47935152e+00 7.85289407e-01] [-1.60854235e-02 -1.21935373e-02 4.32446599e-01 ... -3.92998615e-03 1.75794923e+00 9.98155892e-01] [-2.04444805e-04 2.98317164e-01 1.82727778e+00 ... -1.29075609e-02 1.15748322e+00 -6.95643807e-03] ... [-5.50848432e-03 -2.37757731e-02 6.48331285e-01 ... 3.62602383e-01 -6.09723059e-03 -3.80180054e-03] [-1.89834908e-02 1.43148601e+00 1.08573472e+00 ... -4.53679170e-03 -9.87356529e-03 -2.27107257e-02] [ 7.55768955e-01 -8.99801310e-03 -1.15003465e-02 ... -4.95330198e-03 -2.67834030e-02 2.27185041e-01]] [[ 2.70491004e-01 -6.16760366e-03 -1.29085360e-02 ... 1.48099884e-01 -1.33055020e-02 9.96538162e-01] [-8.68123170e-05 -1.99245885e-02 2.60260791e-01 ... -9.65441484e-03 -1.32776760e-02 -1.17552578e-02] [-3.15289991e-03 -1.48640033e-02 2.78126687e-01 ... -8.50853231e-03 -1.99949089e-02 5.90247333e-01] ... [ 1.29873359e+00 -1.95044391e-02 1.13962901e+00 ... -5.15960623e-03 1.97842140e-02 1.41015232e-01] [-1.62725188e-02 -5.24846464e-03 -1.08131357e-02 ... -6.71551377e-03 1.65260410e+00 -1.24940639e-02] [ 5.26754111e-02 -6.61562476e-03 8.02669764e-01 ... -8.92548065e-04 1.16699204e-01 -3.09571042e-03]] [[-1.63306110e-02 -4.96165315e-03 1.33802259e+00 ... -8.18220712e-03 1.68971491e+00 -1.04543241e-02] [ 2.78187752e-01 -5.06650237e-03 -6.04863791e-03 ... -1.25930365e-03 1.86198056e-01 1.37151504e+00] [ 1.78842103e+00 -1.05760321e-02 5.95458865e-01 ... 9.66803551e-01 1.53719556e+00 -7.85424840e-03] ... [ 1.28513336e+00 5.47080457e-01 1.19756527e-01 ... 2.63954431e-01 1.61462620e-01 3.22312385e-01] [ 2.17763782e-01 -7.87085854e-03 -1.18728112e-02 ... 4.22630668e-01 1.44374683e-01 5.71207106e-01] [-1.00669796e-02 1.37132370e+00 -5.76200848e-03 ... 1.00051856e+00 -6.53833710e-03 2.28728056e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:0.01 - inplace:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5112.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=0.01]() %result.3 : Tensor = aten::leaky_relu(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1632:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.3) return (%4) fw_re: [[[[-1.5088596 0.03549724 0.0361061 ... -0.79462355 1.2306486 -1.3230587 ] [-1.5956339 -0.22640415 0.5218878 ... -0.07247655 -0.25630483 -1.9930087 ] [ 0.5703228 -0.03127352 0.27045506 ... -1.7203767 -0.37283802 -0.71506745] ... [-0.11566922 -1.3187945 0.15579905 ... 2.7781496 0.6039674 -0.34041172] [ 0.02057694 0.62200046 -0.9897953 ... -0.27932414 0.38242444 -0.70956904] [ 1.2272859 0.78319937 0.8160368 ... -0.46221197 2.6835322 0.793059 ]] [[-0.1802453 -0.5260908 0.24681073 ... -0.11403719 -0.10625203 -1.9478928 ] [-0.60540456 1.647337 -1.3317158 ... 0.870215 -0.61532587 0.72242373] [ 1.3321723 -0.31109244 2.2034767 ... 1.0192895 -0.31492797 -0.04219573] ... [ 0.08885022 1.3689336 0.20784654 ... 1.5195394 -0.6631605 1.7627937 ] [ 2.2902353 -0.07035511 0.88508356 ... -1.3754097 -0.0670174 -0.9013046 ] [ 0.81109303 -0.14683288 0.2755328 ... 0.52171135 1.1399925 2.6874576 ]] [[-1.2785981 0.1892032 -1.0150249 ... -1.2405503 -0.43208313 -0.04561521] [ 0.24004002 1.4305433 0.73419136 ... -0.8060464 -0.1383916 -0.794406 ] [ 0.36581352 0.6467103 -0.48655888 ... 0.5289864 -1.2387872 -1.6354208 ] ... [ 1.1140423 0.81817615 0.13653432 ... -0.43098143 0.9839512 -0.6527501 ] [-1.3397118 0.7582223 0.8360733 ... -1.3992414 0.24575774 -0.765719 ] [-0.6914258 0.13689353 -0.40724128 ... -1.6440421 -0.32932436 -0.11652038]]]]; ov_res: [[[[-1.5088596 0.03549724 0.0361061 ... -0.79462355 1.2306486 -1.3230587 ] [-1.5956339 -0.22640415 0.5218878 ... -0.07247655 -0.25630483 -1.9930087 ] [ 0.5703228 -0.03127352 0.27045506 ... -1.7203767 -0.37283802 -0.71506745] ... [-0.11566922 -1.3187945 0.15579905 ... 2.7781496 0.6039674 -0.34041172] [ 0.02057694 0.62200046 -0.9897953 ... -0.27932414 0.38242444 -0.70956904] [ 1.2272859 0.78319937 0.8160368 ... -0.46221197 2.6835322 0.793059 ]] [[-0.1802453 -0.5260908 0.24681073 ... -0.11403719 -0.10625203 -1.9478928 ] [-0.60540456 1.647337 -1.3317158 ... 0.870215 -0.61532587 0.72242373] [ 1.3321723 -0.31109244 2.2034767 ... 1.0192895 -0.31492797 -0.04219573] ... [ 0.08885022 1.3689336 0.20784654 ... 1.5195394 -0.6631605 1.7627937 ] [ 2.2902353 -0.07035511 0.88508356 ... -1.3754097 -0.0670174 -0.9013046 ] [ 0.81109303 -0.14683288 0.2755328 ... 0.52171135 1.1399925 2.6874576 ]] [[-1.2785981 0.1892032 -1.0150249 ... -1.2405503 -0.43208313 -0.04561521] [ 0.24004002 1.4305433 0.73419136 ... -0.8060464 -0.1383916 -0.794406 ] [ 0.36581352 0.6467103 -0.48655888 ... 0.5289864 -1.2387872 -1.6354208 ] ... [ 1.1140423 0.81817615 0.13653432 ... -0.43098143 0.9839512 -0.6527501 ] [-1.3397118 0.7582223 0.8360733 ... -1.3992414 0.24575774 -0.765719 ] [-0.6914258 0.13689353 -0.40724128 ... -1.6440421 -0.32932436 -0.11652038]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[-1.50885964e-02 3.54972407e-02 3.61061022e-02 ... -7.94623513e-03 1.23064864e+00 -1.32305874e-02] [-1.59563385e-02 -2.26404145e-03 5.21887779e-01 ... -7.24765472e-04 -2.56304815e-03 -1.99300870e-02] [ 5.70322812e-01 -3.12735210e-04 2.70455062e-01 ... -1.72037669e-02 -3.72838019e-03 -7.15067424e-03] ... [-1.15669216e-03 -1.31879449e-02 1.55799046e-01 ... 2.77814960e+00 6.03967428e-01 -3.40411719e-03] [ 2.05769408e-02 6.22000456e-01 -9.89795290e-03 ... -2.79324129e-03 3.82424444e-01 -7.09569035e-03] [ 1.22728586e+00 7.83199370e-01 8.16036820e-01 ... -4.62211948e-03 2.68353224e+00 7.93058991e-01]] [[-1.80245296e-03 -5.26090804e-03 2.46810734e-01 ... -1.14037190e-03 -1.06252031e-03 -1.94789283e-02] [-6.05404563e-03 1.64733696e+00 -1.33171575e-02 ... 8.70214999e-01 -6.15325850e-03 7.22423732e-01] [ 1.33217227e+00 -3.11092427e-03 2.20347667e+00 ... 1.01928949e+00 -3.14927963e-03 -4.21957317e-04] ... [ 8.88502151e-02 1.36893356e+00 2.07846537e-01 ... 1.51953936e+00 -6.63160486e-03 1.76279366e+00] [ 2.29023528e+00 -7.03551108e-04 8.85083556e-01 ... -1.37540968e-02 -6.70173962e-04 -9.01304558e-03] [ 8.11093032e-01 -1.46832876e-03 2.75532812e-01 ... 5.21711349e-01 1.13999248e+00 2.68745756e+00]] [[-1.27859805e-02 1.89203203e-01 -1.01502491e-02 ... -1.24055026e-02 -4.32083104e-03 -4.56152135e-04] [ 2.40040019e-01 1.43054330e+00 7.34191358e-01 ... -8.06046370e-03 -1.38391601e-03 -7.94405956e-03] [ 3.65813524e-01 6.46710277e-01 -4.86558862e-03 ... 5.28986394e-01 -1.23878717e-02 -1.63542069e-02] ... [ 1.11404228e+00 8.18176150e-01 1.36534318e-01 ... -4.30981396e-03 9.83951211e-01 -6.52750069e-03] [-1.33971171e-02 7.58222282e-01 8.36073279e-01 ... -1.39924139e-02 2.45757744e-01 -7.65718985e-03] [-6.91425800e-03 1.36893526e-01 -4.07241285e-03 ... -1.64404213e-02 -3.29324347e-03 -1.16520375e-03]]]]; ov_res: [[[[-1.50885964e-02 3.54972407e-02 3.61061022e-02 ... -7.94623513e-03 1.23064864e+00 -1.32305874e-02] [-1.59563385e-02 -2.26404145e-03 5.21887779e-01 ... -7.24765472e-04 -2.56304815e-03 -1.99300870e-02] [ 5.70322812e-01 -3.12735210e-04 2.70455062e-01 ... -1.72037669e-02 -3.72838019e-03 -7.15067424e-03] ... [-1.15669216e-03 -1.31879449e-02 1.55799046e-01 ... 2.77814960e+00 6.03967428e-01 -3.40411719e-03] [ 2.05769408e-02 6.22000456e-01 -9.89795290e-03 ... -2.79324129e-03 3.82424444e-01 -7.09569035e-03] [ 1.22728586e+00 7.83199370e-01 8.16036820e-01 ... -4.62211948e-03 2.68353224e+00 7.93058991e-01]] [[-1.80245296e-03 -5.26090804e-03 2.46810734e-01 ... -1.14037190e-03 -1.06252031e-03 -1.94789283e-02] [-6.05404563e-03 1.64733696e+00 -1.33171575e-02 ... 8.70214999e-01 -6.15325850e-03 7.22423732e-01] [ 1.33217227e+00 -3.11092427e-03 2.20347667e+00 ... 1.01928949e+00 -3.14927963e-03 -4.21957317e-04] ... [ 8.88502151e-02 1.36893356e+00 2.07846537e-01 ... 1.51953936e+00 -6.63160486e-03 1.76279366e+00] [ 2.29023528e+00 -7.03551108e-04 8.85083556e-01 ... -1.37540968e-02 -6.70173962e-04 -9.01304558e-03] [ 8.11093032e-01 -1.46832876e-03 2.75532812e-01 ... 5.21711349e-01 1.13999248e+00 2.68745756e+00]] [[-1.27859805e-02 1.89203203e-01 -1.01502491e-02 ... -1.24055026e-02 -4.32083104e-03 -4.56152135e-04] [ 2.40040019e-01 1.43054330e+00 7.34191358e-01 ... -8.06046370e-03 -1.38391601e-03 -7.94405956e-03] [ 3.65813524e-01 6.46710277e-01 -4.86558862e-03 ... 5.28986394e-01 -1.23878717e-02 -1.63542069e-02] ... [ 1.11404228e+00 8.18176150e-01 1.36534318e-01 ... -4.30981396e-03 9.83951211e-01 -6.52750069e-03] [-1.33971171e-02 7.58222282e-01 8.36073279e-01 ... -1.39924139e-02 2.45757744e-01 -7.65718985e-03] [-6.91425800e-03 1.36893526e-01 -4.07241285e-03 ... -1.64404213e-02 -3.29324347e-03 -1.16520375e-03]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:1.01 - inplace:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5114.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=1.01]() %result.1 : Tensor = aten::leaky_relu_(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1630:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.1) return (%4) fw_re: [[[[ 0.23410341 -0.9640842 -0.37237814 ... 0.42666408 -0.6809244 0.7466108 ] [ 1.2350857 1.5273594 0.15635216 ... -0.06958083 -0.23393668 -0.68600905] [-0.14780225 0.12021655 0.67211413 ... -1.6039166 0.08873843 0.11826338] ... [ 0.30478737 -1.3685012 1.4183187 ... 0.27365947 -0.83884233 -2.1312573 ] [-1.4619834 -0.06515878 0.5661273 ... 1.2779597 -0.8095502 2.6392567 ] [ 0.02415166 -1.129245 -1.893654 ... -0.80648315 -0.03597467 -0.5103142 ]] [[-1.2328252 1.6834463 0.5671512 ... -0.45625117 0.3200503 0.16406621] [ 0.3648536 -1.3615556 1.4841983 ... 1.0427841 -0.71918315 -0.8653167 ] [-1.3484619 1.8399317 -0.38812083 ... 0.09774932 0.2516324 -1.7163125 ] ... [-0.8770955 0.20746265 0.65257394 ... -1.2283261 0.44199002 1.2894418 ] [ 1.8303384 0.8628543 0.53683364 ... -0.08255189 0.00658084 -0.22062474] [ 1.2810572 1.5849446 -1.2936805 ... 0.74983037 -0.3653419 0.16207549]] [[ 1.26727 1.1138923 2.05559 ... 0.7237224 0.06277566 -0.29170793] [-0.19611105 -0.16799538 -0.88609725 ... -0.48099875 -0.10464839 -0.06638424] [ 0.8234816 -1.0485896 1.224896 ... 0.03045608 -0.4468702 -1.4646142 ] ... [-0.09719062 -0.6814784 0.00586805 ... 1.4180044 -0.27022296 -1.0957694 ] [-0.19078997 0.803492 0.7134604 ... 1.2052083 0.5484376 -0.13300595] [-0.74636567 -1.5726634 2.010238 ... -0.5713386 -1.0642576 -0.3733156 ]]]]; ov_res: [[[[ 0.23410341 -0.9640842 -0.37237814 ... 0.42666408 -0.6809244 0.7466108 ] [ 1.2350857 1.5273594 0.15635216 ... -0.06958083 -0.23393668 -0.68600905] [-0.14780225 0.12021655 0.67211413 ... -1.6039166 0.08873843 0.11826338] ... [ 0.30478737 -1.3685012 1.4183187 ... 0.27365947 -0.83884233 -2.1312573 ] [-1.4619834 -0.06515878 0.5661273 ... 1.2779597 -0.8095502 2.6392567 ] [ 0.02415166 -1.129245 -1.893654 ... -0.80648315 -0.03597467 -0.5103142 ]] [[-1.2328252 1.6834463 0.5671512 ... -0.45625117 0.3200503 0.16406621] [ 0.3648536 -1.3615556 1.4841983 ... 1.0427841 -0.71918315 -0.8653167 ] [-1.3484619 1.8399317 -0.38812083 ... 0.09774932 0.2516324 -1.7163125 ] ... [-0.8770955 0.20746265 0.65257394 ... -1.2283261 0.44199002 1.2894418 ] [ 1.8303384 0.8628543 0.53683364 ... -0.08255189 0.00658084 -0.22062474] [ 1.2810572 1.5849446 -1.2936805 ... 0.74983037 -0.3653419 0.16207549]] [[ 1.26727 1.1138923 2.05559 ... 0.7237224 0.06277566 -0.29170793] [-0.19611105 -0.16799538 -0.88609725 ... -0.48099875 -0.10464839 -0.06638424] [ 0.8234816 -1.0485896 1.224896 ... 0.03045608 -0.4468702 -1.4646142 ] ... [-0.09719062 -0.6814784 0.00586805 ... 1.4180044 -0.27022296 -1.0957694 ] [-0.19078997 0.803492 0.7134604 ... 1.2052083 0.5484376 -0.13300595] [-0.74636567 -1.5726634 2.010238 ... -0.5713386 -1.0642576 -0.3733156 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 0.23410341 -0.9640842 -0.37237814 ... 0.42666408 -0.6809244 0.7466108 ] [ 1.2350857 1.5273594 0.15635216 ... -0.06958083 -0.23393668 -0.68600905] [-0.14780225 0.12021655 0.67211413 ... -1.6039166 0.08873843 0.11826338] ... [ 0.30478737 -1.3685012 1.4183187 ... 0.27365947 -0.83884233 -2.1312573 ] [-1.4619834 -0.06515878 0.5661273 ... 1.2779597 -0.8095502 2.6392567 ] [ 0.02415166 -1.129245 -1.893654 ... -0.80648315 -0.03597467 -0.5103142 ]] [[-1.2328252 1.6834463 0.5671512 ... -0.45625117 0.3200503 0.16406621] [ 0.3648536 -1.3615556 1.4841983 ... 1.0427841 -0.71918315 -0.8653167 ] [-1.3484619 1.8399317 -0.38812083 ... 0.09774932 0.2516324 -1.7163125 ] ... [-0.8770955 0.20746265 0.65257394 ... -1.2283261 0.44199002 1.2894418 ] [ 1.8303384 0.8628543 0.53683364 ... -0.08255189 0.00658084 -0.22062474] [ 1.2810572 1.5849446 -1.2936805 ... 0.74983037 -0.3653419 0.16207549]] [[ 1.26727 1.1138923 2.05559 ... 0.7237224 0.06277566 -0.29170793] [-0.19611105 -0.16799538 -0.88609725 ... -0.48099875 -0.10464839 -0.06638424] [ 0.8234816 -1.0485896 1.224896 ... 0.03045608 -0.4468702 -1.4646142 ] ... [-0.09719062 -0.6814784 0.00586805 ... 1.4180044 -0.27022296 -1.0957694 ] [-0.19078997 0.803492 0.7134604 ... 1.2052083 0.5484376 -0.13300595] [-0.74636567 -1.5726634 2.010238 ... -0.5713386 -1.0642576 -0.3733156 ]]]]; ov_res: [[[[ 0.23410341 -0.9640842 -0.37237814 ... 0.42666408 -0.6809244 0.7466108 ] [ 1.2350857 1.5273594 0.15635216 ... -0.06958083 -0.23393668 -0.68600905] [-0.14780225 0.12021655 0.67211413 ... -1.6039166 0.08873843 0.11826338] ... [ 0.30478737 -1.3685012 1.4183187 ... 0.27365947 -0.83884233 -2.1312573 ] [-1.4619834 -0.06515878 0.5661273 ... 1.2779597 -0.8095502 2.6392567 ] [ 0.02415166 -1.129245 -1.893654 ... -0.80648315 -0.03597467 -0.5103142 ]] [[-1.2328252 1.6834463 0.5671512 ... -0.45625117 0.3200503 0.16406621] [ 0.3648536 -1.3615556 1.4841983 ... 1.0427841 -0.71918315 -0.8653167 ] [-1.3484619 1.8399317 -0.38812083 ... 0.09774932 0.2516324 -1.7163125 ] ... [-0.8770955 0.20746265 0.65257394 ... -1.2283261 0.44199002 1.2894418 ] [ 1.8303384 0.8628543 0.53683364 ... -0.08255189 0.00658084 -0.22062474] [ 1.2810572 1.5849446 -1.2936805 ... 0.74983037 -0.3653419 0.16207549]] [[ 1.26727 1.1138923 2.05559 ... 0.7237224 0.06277566 -0.29170793] [-0.19611105 -0.16799538 -0.88609725 ... -0.48099875 -0.10464839 -0.06638424] [ 0.8234816 -1.0485896 1.224896 ... 0.03045608 -0.4468702 -1.4646142 ] ... [-0.09719062 -0.6814784 0.00586805 ... 1.4180044 -0.27022296 -1.0957694 ] [-0.19078997 0.803492 0.7134604 ... 1.2052083 0.5484376 -0.13300595] [-0.74636567 -1.5726634 2.010238 ... -0.5713386 -1.0642576 -0.3733156 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:1.01 - inplace:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5116.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=1.01]() %result.3 : Tensor = aten::leaky_relu(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1632:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.3) return (%4) fw_re: [[[[-0.35513118 -0.32193828 -2.1327279 ... 1.2787849 -1.4490405 0.306319 ] [-1.3728881 1.0914028 -0.7158303 ... -1.5740443 -1.9678074 -0.4730043 ] [-0.6553364 0.08650982 0.5752655 ... 0.7390083 0.8809935 0.4307825 ] ... [-0.2721261 -0.93092006 0.12195991 ... -0.04502021 1.1879746 3.0452948 ] [-0.93584967 0.81035036 0.6820208 ... 2.4582741 -0.11156155 -1.3571504 ] [ 0.13485898 1.022868 0.60114557 ... 0.23022708 -0.46871644 -0.13994363]] [[ 0.861205 -1.2589588 -0.19506302 ... 0.6263832 -0.12097166 -1.8078071 ] [-0.61789143 -0.7512981 -0.78380096 ... 2.1920118 2.4826143 -0.73307014] [-0.89351267 0.3430641 0.59294236 ... 1.3873479 0.55338025 -2.0848994 ] ... [ 1.230942 -1.2491179 -0.24127544 ... -0.5315895 1.8024616 0.02408877] [ 1.1155515 -0.27715254 1.8064488 ... -2.0085316 -2.4575694 -1.6003939 ] [ 0.58461875 -0.18676996 -0.32185844 ... 0.14665169 -1.2753829 -1.3959903 ]] [[-0.62385565 0.91512483 -0.62984633 ... 1.3946333 -0.12330785 -0.18895996] [-2.0934732 0.51082546 -1.1402273 ... -1.4956465 1.9249473 -0.60635066] [-0.99134666 -1.7932726 0.51525396 ... -0.02568694 1.3316205 -1.2273062 ] ... [ 0.2742354 0.6925013 -0.47803208 ... 0.03444891 0.29647395 -0.7595408 ] [ 0.6463475 0.7719854 -0.11420435 ... 0.59793675 1.4415928 -0.23026606] [ 0.17337343 0.28330708 -0.20805658 ... -0.20012495 -0.4429335 -0.95347846]]]]; ov_res: [[[[-0.35513118 -0.32193828 -2.1327279 ... 1.2787849 -1.4490405 0.306319 ] [-1.3728881 1.0914028 -0.7158303 ... -1.5740443 -1.9678074 -0.4730043 ] [-0.6553364 0.08650982 0.5752655 ... 0.7390083 0.8809935 0.4307825 ] ... [-0.2721261 -0.93092006 0.12195991 ... -0.04502021 1.1879746 3.0452948 ] [-0.93584967 0.81035036 0.6820208 ... 2.4582741 -0.11156155 -1.3571504 ] [ 0.13485898 1.022868 0.60114557 ... 0.23022708 -0.46871644 -0.13994363]] [[ 0.861205 -1.2589588 -0.19506302 ... 0.6263832 -0.12097166 -1.8078071 ] [-0.61789143 -0.7512981 -0.78380096 ... 2.1920118 2.4826143 -0.73307014] [-0.89351267 0.3430641 0.59294236 ... 1.3873479 0.55338025 -2.0848994 ] ... [ 1.230942 -1.2491179 -0.24127544 ... -0.5315895 1.8024616 0.02408877] [ 1.1155515 -0.27715254 1.8064488 ... -2.0085316 -2.4575694 -1.6003939 ] [ 0.58461875 -0.18676996 -0.32185844 ... 0.14665169 -1.2753829 -1.3959903 ]] [[-0.62385565 0.91512483 -0.62984633 ... 1.3946333 -0.12330785 -0.18895996] [-2.0934732 0.51082546 -1.1402273 ... -1.4956465 1.9249473 -0.60635066] [-0.99134666 -1.7932726 0.51525396 ... -0.02568694 1.3316205 -1.2273062 ] ... [ 0.2742354 0.6925013 -0.47803208 ... 0.03444891 0.29647395 -0.7595408 ] [ 0.6463475 0.7719854 -0.11420435 ... 0.59793675 1.4415928 -0.23026606] [ 0.17337343 0.28330708 -0.20805658 ... -0.20012495 -0.4429335 -0.95347846]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[-0.35868248 -0.32515764 -2.154055 ... 1.2787849 -1.4635309 0.306319 ] [-1.386617 1.0914028 -0.7229886 ... -1.5897847 -1.9874854 -0.47773436] [-0.66188973 0.08650982 0.5752655 ... 0.7390083 0.8809935 0.4307825 ] ... [-0.27484736 -0.94022924 0.12195991 ... -0.04547041 1.1879746 3.0452948 ] [-0.94520813 0.81035036 0.6820208 ... 2.4582741 -0.11267716 -1.3707219 ] [ 0.13485898 1.022868 0.60114557 ... 0.23022708 -0.4734036 -0.14134306]] [[ 0.861205 -1.2715484 -0.19701365 ... 0.6263832 -0.12218137 -1.8258852 ] [-0.62407035 -0.7588111 -0.791639 ... 2.1920118 2.4826143 -0.74040085] [-0.90244776 0.3430641 0.59294236 ... 1.3873479 0.55338025 -2.1057484 ] ... [ 1.230942 -1.2616091 -0.2436882 ... -0.5369054 1.8024616 0.02408877] [ 1.1155515 -0.27992406 1.8064488 ... -2.028617 -2.482145 -1.6163979 ] [ 0.58461875 -0.18863766 -0.32507703 ... 0.14665169 -1.2881367 -1.4099501 ]] [[-0.63009423 0.91512483 -0.6361448 ... 1.3946333 -0.12454093 -0.19084956] [-2.114408 0.51082546 -1.1516296 ... -1.510603 1.9249473 -0.6124142 ] [-1.0012602 -1.8112053 0.51525396 ... -0.02594381 1.3316205 -1.2395793 ] ... [ 0.2742354 0.6925013 -0.4828124 ... 0.03444891 0.29647395 -0.7671362 ] [ 0.6463475 0.7719854 -0.11534639 ... 0.59793675 1.4415928 -0.23256873] [ 0.17337343 0.28330708 -0.21013714 ... -0.20212619 -0.44736284 -0.96301323]]]]; ov_res: [[[[-0.35868248 -0.32515764 -2.154055 ... 1.2787849 -1.4635309 0.306319 ] [-1.386617 1.0914028 -0.7229886 ... -1.5897847 -1.9874854 -0.47773436] [-0.66188973 0.08650982 0.5752655 ... 0.7390083 0.8809935 0.4307825 ] ... [-0.27484736 -0.94022924 0.12195991 ... -0.04547041 1.1879746 3.0452948 ] [-0.94520813 0.81035036 0.6820208 ... 2.4582741 -0.11267716 -1.3707219 ] [ 0.13485898 1.022868 0.60114557 ... 0.23022708 -0.4734036 -0.14134306]] [[ 0.861205 -1.2715484 -0.19701365 ... 0.6263832 -0.12218137 -1.8258852 ] [-0.62407035 -0.7588111 -0.791639 ... 2.1920118 2.4826143 -0.74040085] [-0.90244776 0.3430641 0.59294236 ... 1.3873479 0.55338025 -2.1057484 ] ... [ 1.230942 -1.2616091 -0.2436882 ... -0.5369054 1.8024616 0.02408877] [ 1.1155515 -0.27992406 1.8064488 ... -2.028617 -2.482145 -1.6163979 ] [ 0.58461875 -0.18863766 -0.32507703 ... 0.14665169 -1.2881367 -1.4099501 ]] [[-0.63009423 0.91512483 -0.6361448 ... 1.3946333 -0.12454093 -0.19084956] [-2.114408 0.51082546 -1.1516296 ... -1.510603 1.9249473 -0.6124142 ] [-1.0012602 -1.8112053 0.51525396 ... -0.02594381 1.3316205 -1.2395793 ] ... [ 0.2742354 0.6925013 -0.4828124 ... 0.03444891 0.29647395 -0.7671362 ] [ 0.6463475 0.7719854 -0.11534639 ... 0.59793675 1.4415928 -0.23256873] [ 0.17337343 0.28330708 -0.21013714 ... -0.20212619 -0.44736284 -0.96301323]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:-0.01 - inplace:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5118.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=-0.01]() %result.1 : Tensor = aten::leaky_relu_(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1630:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.1) return (%4) fw_re: [[[[1.76178478e-02 1.60244899e-03 2.43443608e-01 ... 8.35821498e-03 6.88192725e-01 6.45691872e-01] [9.69713554e-03 4.30111913e-03 2.21328363e-01 ... 1.01038795e-02 2.18524635e-02 2.12335531e-02] [2.70995800e-03 4.05924208e-03 1.57694519e-02 ... 8.47344398e-01 8.67957532e-01 2.66623916e-03] ... [6.06395543e-01 4.13173974e-01 6.04683340e-01 ... 3.74157041e-01 8.79757106e-01 1.82791566e-03] [2.20119720e-03 2.72383128e-04 1.94613740e-01 ... 1.48705852e+00 3.05632681e-01 1.70668233e-02] [5.00407934e-01 5.20735420e-03 8.17653374e-04 ... 6.63787313e-03 2.91260355e-03 1.60414010e-01]] [[1.04218245e+00 5.07074874e-03 2.39359937e-03 ... 4.04660910e-01 3.32420379e-01 1.30384499e-02] [2.20146164e-01 5.18198848e-01 6.84060574e-01 ... 2.34726653e-03 1.79760605e-01 8.39679688e-02] [3.48631246e-03 8.27344786e-03 1.93017721e-02 ... 3.02822795e-03 1.99706759e-03 1.46221220e-02] ... [1.32097030e+00 6.61952654e-03 1.37387104e-02 ... 3.86254728e-01 1.20776044e-02 1.61721849e+00] [4.65912908e-01 8.41634214e-01 6.39128923e-01 ... 8.31258576e-03 1.23257124e+00 2.83453753e-03] [1.67742949e-02 1.13619402e-01 4.84268321e-03 ... 7.32070068e-03 2.43311912e-01 5.34177711e-03]] [[8.01613554e-04 6.22305989e-01 1.29862893e-02 ... 8.86963157e-04 3.05554713e-03 4.17927420e-03] [3.90707612e-01 1.43264604e+00 1.16882278e-02 ... 5.14419973e-01 2.41597128e+00 6.76326677e-02] [6.94858900e-05 2.12698895e-03 1.58182049e+00 ... 1.15477573e-02 1.80236679e-02 9.12816644e-01] ... [5.77500165e-01 1.53216279e+00 7.26055936e-04 ... 1.68128848e+00 4.61562485e-01 2.50506103e-02] [5.83235323e-01 1.11623425e-02 1.60958292e-03 ... 1.14851510e-02 2.79578916e-03 1.14795042e-03] [1.19347209e-02 1.75711441e+00 3.37977111e-01 ... 5.87176671e-03 4.56836641e-01 4.49721247e-01]]]]; ov_res: [[[[1.76178478e-02 1.60244899e-03 2.43443608e-01 ... 8.35821498e-03 6.88192725e-01 6.45691872e-01] [9.69713554e-03 4.30111913e-03 2.21328363e-01 ... 1.01038795e-02 2.18524635e-02 2.12335531e-02] [2.70995800e-03 4.05924208e-03 1.57694519e-02 ... 8.47344398e-01 8.67957532e-01 2.66623916e-03] ... [6.06395543e-01 4.13173974e-01 6.04683340e-01 ... 3.74157041e-01 8.79757106e-01 1.82791566e-03] [2.20119720e-03 2.72383128e-04 1.94613740e-01 ... 1.48705852e+00 3.05632681e-01 1.70668233e-02] [5.00407934e-01 5.20735420e-03 8.17653374e-04 ... 6.63787313e-03 2.91260355e-03 1.60414010e-01]] [[1.04218245e+00 5.07074874e-03 2.39359937e-03 ... 4.04660910e-01 3.32420379e-01 1.30384499e-02] [2.20146164e-01 5.18198848e-01 6.84060574e-01 ... 2.34726653e-03 1.79760605e-01 8.39679688e-02] [3.48631246e-03 8.27344786e-03 1.93017721e-02 ... 3.02822795e-03 1.99706759e-03 1.46221220e-02] ... [1.32097030e+00 6.61952654e-03 1.37387104e-02 ... 3.86254728e-01 1.20776044e-02 1.61721849e+00] [4.65912908e-01 8.41634214e-01 6.39128923e-01 ... 8.31258576e-03 1.23257124e+00 2.83453753e-03] [1.67742949e-02 1.13619402e-01 4.84268321e-03 ... 7.32070068e-03 2.43311912e-01 5.34177711e-03]] [[8.01613554e-04 6.22305989e-01 1.29862893e-02 ... 8.86963157e-04 3.05554713e-03 4.17927420e-03] [3.90707612e-01 1.43264604e+00 1.16882278e-02 ... 5.14419973e-01 2.41597128e+00 6.76326677e-02] [6.94858900e-05 2.12698895e-03 1.58182049e+00 ... 1.15477573e-02 1.80236679e-02 9.12816644e-01] ... [5.77500165e-01 1.53216279e+00 7.26055936e-04 ... 1.68128848e+00 4.61562485e-01 2.50506103e-02] [5.83235323e-01 1.11623425e-02 1.60958292e-03 ... 1.14851510e-02 2.79578916e-03 1.14795042e-03] [1.19347209e-02 1.75711441e+00 3.37977111e-01 ... 5.87176671e-03 4.56836641e-01 4.49721247e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[1.76178478e-02 1.60244899e-03 2.43443608e-01 ... 8.35821498e-03 6.88192725e-01 6.45691872e-01] [9.69713554e-03 4.30111913e-03 2.21328363e-01 ... 1.01038795e-02 2.18524635e-02 2.12335531e-02] [2.70995800e-03 4.05924208e-03 1.57694519e-02 ... 8.47344398e-01 8.67957532e-01 2.66623916e-03] ... [6.06395543e-01 4.13173974e-01 6.04683340e-01 ... 3.74157041e-01 8.79757106e-01 1.82791566e-03] [2.20119720e-03 2.72383128e-04 1.94613740e-01 ... 1.48705852e+00 3.05632681e-01 1.70668233e-02] [5.00407934e-01 5.20735420e-03 8.17653374e-04 ... 6.63787313e-03 2.91260355e-03 1.60414010e-01]] [[1.04218245e+00 5.07074874e-03 2.39359937e-03 ... 4.04660910e-01 3.32420379e-01 1.30384499e-02] [2.20146164e-01 5.18198848e-01 6.84060574e-01 ... 2.34726653e-03 1.79760605e-01 8.39679688e-02] [3.48631246e-03 8.27344786e-03 1.93017721e-02 ... 3.02822795e-03 1.99706759e-03 1.46221220e-02] ... [1.32097030e+00 6.61952654e-03 1.37387104e-02 ... 3.86254728e-01 1.20776044e-02 1.61721849e+00] [4.65912908e-01 8.41634214e-01 6.39128923e-01 ... 8.31258576e-03 1.23257124e+00 2.83453753e-03] [1.67742949e-02 1.13619402e-01 4.84268321e-03 ... 7.32070068e-03 2.43311912e-01 5.34177711e-03]] [[8.01613554e-04 6.22305989e-01 1.29862893e-02 ... 8.86963157e-04 3.05554713e-03 4.17927420e-03] [3.90707612e-01 1.43264604e+00 1.16882278e-02 ... 5.14419973e-01 2.41597128e+00 6.76326677e-02] [6.94858900e-05 2.12698895e-03 1.58182049e+00 ... 1.15477573e-02 1.80236679e-02 9.12816644e-01] ... [5.77500165e-01 1.53216279e+00 7.26055936e-04 ... 1.68128848e+00 4.61562485e-01 2.50506103e-02] [5.83235323e-01 1.11623425e-02 1.60958292e-03 ... 1.14851510e-02 2.79578916e-03 1.14795042e-03] [1.19347209e-02 1.75711441e+00 3.37977111e-01 ... 5.87176671e-03 4.56836641e-01 4.49721247e-01]]]]; ov_res: [[[[1.76178478e-02 1.60244899e-03 2.43443608e-01 ... 8.35821498e-03 6.88192725e-01 6.45691872e-01] [9.69713554e-03 4.30111913e-03 2.21328363e-01 ... 1.01038795e-02 2.18524635e-02 2.12335531e-02] [2.70995800e-03 4.05924208e-03 1.57694519e-02 ... 8.47344398e-01 8.67957532e-01 2.66623916e-03] ... [6.06395543e-01 4.13173974e-01 6.04683340e-01 ... 3.74157041e-01 8.79757106e-01 1.82791566e-03] [2.20119720e-03 2.72383128e-04 1.94613740e-01 ... 1.48705852e+00 3.05632681e-01 1.70668233e-02] [5.00407934e-01 5.20735420e-03 8.17653374e-04 ... 6.63787313e-03 2.91260355e-03 1.60414010e-01]] [[1.04218245e+00 5.07074874e-03 2.39359937e-03 ... 4.04660910e-01 3.32420379e-01 1.30384499e-02] [2.20146164e-01 5.18198848e-01 6.84060574e-01 ... 2.34726653e-03 1.79760605e-01 8.39679688e-02] [3.48631246e-03 8.27344786e-03 1.93017721e-02 ... 3.02822795e-03 1.99706759e-03 1.46221220e-02] ... [1.32097030e+00 6.61952654e-03 1.37387104e-02 ... 3.86254728e-01 1.20776044e-02 1.61721849e+00] [4.65912908e-01 8.41634214e-01 6.39128923e-01 ... 8.31258576e-03 1.23257124e+00 2.83453753e-03] [1.67742949e-02 1.13619402e-01 4.84268321e-03 ... 7.32070068e-03 2.43311912e-01 5.34177711e-03]] [[8.01613554e-04 6.22305989e-01 1.29862893e-02 ... 8.86963157e-04 3.05554713e-03 4.17927420e-03] [3.90707612e-01 1.43264604e+00 1.16882278e-02 ... 5.14419973e-01 2.41597128e+00 6.76326677e-02] [6.94858900e-05 2.12698895e-03 1.58182049e+00 ... 1.15477573e-02 1.80236679e-02 9.12816644e-01] ... [5.77500165e-01 1.53216279e+00 7.26055936e-04 ... 1.68128848e+00 4.61562485e-01 2.50506103e-02] [5.83235323e-01 1.11623425e-02 1.60958292e-03 ... 1.14851510e-02 2.79578916e-03 1.14795042e-03] [1.19347209e-02 1.75711441e+00 3.37977111e-01 ... 5.87176671e-03 4.56836641e-01 4.49721247e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_leaky_relu.py::TestLeakyRelu::test_leaky_relu[ ie_device:CPU - precision:FP32 - alpha:-0.01 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_leaky_relu.___torch_mangle_5120.aten_leaky_relu, %x.1 : Tensor): %self.alpha : float = prim::Constant[value=-0.01]() %result.3 : Tensor = aten::leaky_relu(%x.1, %self.alpha) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1632:17 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.3) return (%4) fw_re: [[[[ 1.5108132 0.6858161 -1.7883751 ... -1.2584157 -1.4061724 0.07987817] [ 1.4371663 -0.08566468 -0.46846375 ... -0.46052596 0.7809926 0.22686833] [ 0.6420929 -0.8628518 -1.840062 ... -0.95544255 -0.94418746 0.17269188] ... [ 0.72068435 0.6663556 0.6049185 ... 0.5202854 0.01405525 0.9014554 ] [ 0.19344673 -0.2482347 0.6764132 ... 0.24178335 0.6486238 -0.95036864] [-0.09008888 0.31445372 1.1166236 ... 0.20480095 0.02888461 -0.20762801]] [[ 1.1976264 0.13763185 0.74800205 ... 0.53211975 0.4812634 0.32885283] [ 0.22641619 -1.0203128 1.6869396 ... -0.37231228 1.3954164 0.01295578] [-0.10929577 -0.06265934 0.3908004 ... 0.5996763 0.1899604 -0.60171217] ... [ 0.1704131 0.07508913 0.9371625 ... -0.0511395 -1.0090747 0.12826951] [ 0.07446185 -0.23820215 -0.15164192 ... 0.9068208 -0.02332108 2.2561839 ] [ 0.5253896 -1.0019497 -0.40879583 ... -0.66718054 -1.7635018 0.74572915]] [[ 0.24454196 -0.83547884 -1.1094278 ... 0.41198128 -0.81581885 -0.14701168] [ 0.5500192 0.549894 -0.823709 ... -0.10654493 -0.7862035 -1.9249108 ] [-0.640246 0.0673662 2.5706 ... -0.6722124 0.33218658 0.06479143] ... [ 0.8396004 -0.28342795 1.1317565 ... 1.2868038 -1.53134 0.9402699 ] [ 0.71377236 0.6134811 -0.8359797 ... 0.03570747 -0.07638454 0.17160407] [ 0.8471375 0.79775715 1.0996326 ... 0.7591254 -0.72596353 -0.3847596 ]]]]; ov_res: [[[[ 1.5108132 0.6858161 -1.7883751 ... -1.2584157 -1.4061724 0.07987817] [ 1.4371663 -0.08566468 -0.46846375 ... -0.46052596 0.7809926 0.22686833] [ 0.6420929 -0.8628518 -1.840062 ... -0.95544255 -0.94418746 0.17269188] ... [ 0.72068435 0.6663556 0.6049185 ... 0.5202854 0.01405525 0.9014554 ] [ 0.19344673 -0.2482347 0.6764132 ... 0.24178335 0.6486238 -0.95036864] [-0.09008888 0.31445372 1.1166236 ... 0.20480095 0.02888461 -0.20762801]] [[ 1.1976264 0.13763185 0.74800205 ... 0.53211975 0.4812634 0.32885283] [ 0.22641619 -1.0203128 1.6869396 ... -0.37231228 1.3954164 0.01295578] [-0.10929577 -0.06265934 0.3908004 ... 0.5996763 0.1899604 -0.60171217] ... [ 0.1704131 0.07508913 0.9371625 ... -0.0511395 -1.0090747 0.12826951] [ 0.07446185 -0.23820215 -0.15164192 ... 0.9068208 -0.02332108 2.2561839 ] [ 0.5253896 -1.0019497 -0.40879583 ... -0.66718054 -1.7635018 0.74572915]] [[ 0.24454196 -0.83547884 -1.1094278 ... 0.41198128 -0.81581885 -0.14701168] [ 0.5500192 0.549894 -0.823709 ... -0.10654493 -0.7862035 -1.9249108 ] [-0.640246 0.0673662 2.5706 ... -0.6722124 0.33218658 0.06479143] ... [ 0.8396004 -0.28342795 1.1317565 ... 1.2868038 -1.53134 0.9402699 ] [ 0.71377236 0.6134811 -0.8359797 ... 0.03570747 -0.07638454 0.17160407] [ 0.8471375 0.79775715 1.0996326 ... 0.7591254 -0.72596353 -0.3847596 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[1.5108132e+00 6.8581611e-01 1.7883752e-02 ... 1.2584156e-02 1.4061724e-02 7.9878174e-02] [1.4371663e+00 8.5664680e-04 4.6846373e-03 ... 4.6052593e-03 7.8099263e-01 2.2686833e-01] [6.4209288e-01 8.6285174e-03 1.8400621e-02 ... 9.5544253e-03 9.4418740e-03 1.7269188e-01] ... [7.2068435e-01 6.6635561e-01 6.0491848e-01 ... 5.2028543e-01 1.4055246e-02 9.0145540e-01] [1.9344673e-01 2.4823470e-03 6.7641318e-01 ... 2.4178335e-01 6.4862382e-01 9.5036859e-03] [9.0088882e-04 3.1445372e-01 1.1166236e+00 ... 2.0480095e-01 2.8884608e-02 2.0762801e-03]] [[1.1976264e+00 1.3763185e-01 7.4800205e-01 ... 5.3211975e-01 4.8126340e-01 3.2885283e-01] [2.2641619e-01 1.0203128e-02 1.6869396e+00 ... 3.7231226e-03 1.3954164e+00 1.2955779e-02] [1.0929577e-03 6.2659336e-04 3.9080039e-01 ... 5.9967631e-01 1.8996041e-01 6.0171215e-03] ... [1.7041311e-01 7.5089134e-02 9.3716252e-01 ... 5.1139505e-04 1.0090747e-02 1.2826951e-01] [7.4461855e-02 2.3820214e-03 1.5164191e-03 ... 9.0682077e-01 2.3321083e-04 2.2561839e+00] [5.2538961e-01 1.0019496e-02 4.0879580e-03 ... 6.6718054e-03 1.7635018e-02 7.4572915e-01]] [[2.4454196e-01 8.3547886e-03 1.1094278e-02 ... 4.1198128e-01 8.1581883e-03 1.4701168e-03] [5.5001920e-01 5.4989398e-01 8.2370900e-03 ... 1.0654493e-03 7.8620352e-03 1.9249108e-02] [6.4024595e-03 6.7366198e-02 2.5706000e+00 ... 6.7221243e-03 3.3218658e-01 6.4791426e-02] ... [8.3960038e-01 2.8342796e-03 1.1317565e+00 ... 1.2868038e+00 1.5313400e-02 9.4026989e-01] [7.1377236e-01 6.1348110e-01 8.3597964e-03 ... 3.5707474e-02 7.6384546e-04 1.7160407e-01] [8.4713751e-01 7.9775715e-01 1.0996326e+00 ... 7.5912541e-01 7.2596353e-03 3.8475960e-03]]]]; ov_res: [[[[1.5108132e+00 6.8581611e-01 1.7883752e-02 ... 1.2584156e-02 1.4061724e-02 7.9878174e-02] [1.4371663e+00 8.5664680e-04 4.6846373e-03 ... 4.6052593e-03 7.8099263e-01 2.2686833e-01] [6.4209288e-01 8.6285174e-03 1.8400621e-02 ... 9.5544253e-03 9.4418740e-03 1.7269188e-01] ... [7.2068435e-01 6.6635561e-01 6.0491848e-01 ... 5.2028543e-01 1.4055246e-02 9.0145540e-01] [1.9344673e-01 2.4823470e-03 6.7641318e-01 ... 2.4178335e-01 6.4862382e-01 9.5036859e-03] [9.0088882e-04 3.1445372e-01 1.1166236e+00 ... 2.0480095e-01 2.8884608e-02 2.0762801e-03]] [[1.1976264e+00 1.3763185e-01 7.4800205e-01 ... 5.3211975e-01 4.8126340e-01 3.2885283e-01] [2.2641619e-01 1.0203128e-02 1.6869396e+00 ... 3.7231226e-03 1.3954164e+00 1.2955779e-02] [1.0929577e-03 6.2659336e-04 3.9080039e-01 ... 5.9967631e-01 1.8996041e-01 6.0171215e-03] ... [1.7041311e-01 7.5089134e-02 9.3716252e-01 ... 5.1139505e-04 1.0090747e-02 1.2826951e-01] [7.4461855e-02 2.3820214e-03 1.5164191e-03 ... 9.0682077e-01 2.3321083e-04 2.2561839e+00] [5.2538961e-01 1.0019496e-02 4.0879580e-03 ... 6.6718054e-03 1.7635018e-02 7.4572915e-01]] [[2.4454196e-01 8.3547886e-03 1.1094278e-02 ... 4.1198128e-01 8.1581883e-03 1.4701168e-03] [5.5001920e-01 5.4989398e-01 8.2370900e-03 ... 1.0654493e-03 7.8620352e-03 1.9249108e-02] [6.4024595e-03 6.7366198e-02 2.5706000e+00 ... 6.7221243e-03 3.3218658e-01 6.4791426e-02] ... [8.3960038e-01 2.8342796e-03 1.1317565e+00 ... 1.2868038e+00 1.5313400e-02 9.4026989e-01] [7.1377236e-01 6.1348110e-01 8.3597964e-03 ... 3.5707474e-02 7.6384546e-04 1.7160407e-01] [8.4713751e-01 7.9775715e-01 1.0996326e+00 ... 7.5912541e-01 7.2596353e-03 3.8475960e-03]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len[ ie_device:CPU - precision:FP32 - input_tensor:[[[ 1.87341085 0.30255743 -0.77057812]] [[-1.24344635 0.2835964 -0.09897041]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5121.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:64 %3 : NoneType = prim::Constant() %4 : int = aten::len(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:39 %5 : Tensor = aten::as_tensor(%4, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:23 return (%5) fw_re: 2; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len[ ie_device:CPU - precision:FP32 - input_tensor:[[ 0.32545942 0.2570968 1.28733124 -0.28706076 -1.05374277 -0.15764736 0.14828123] [-0.55279985 -1.08682619 1.53170283 -1.29100306 0.21290948 0.07822279 -1.35352489] [ 0.06325296 -0.58688324 0.43343826 -0.89361142 0.84023224 0.35538187 0.16821865]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5123.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:64 %3 : NoneType = prim::Constant() %4 : int = aten::len(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:39 %5 : Tensor = aten::as_tensor(%4, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:23 return (%5) fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len[ ie_device:CPU - precision:FP32 - input_tensor:[[[[-0.28386523 1.37981696 -0.66200744 -0.51981429] [ 1.50888993 -1.75520774 0.22348406 -1.56365592] [-2.36062258 0.14785357 0.39727004 1.09246027] [-1.770553 0.88299208 -0.53810216 0.27611971]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5125.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:64 %3 : NoneType = prim::Constant() %4 : int = aten::len(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:39 %5 : Tensor = aten::as_tensor(%4, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:22:23 return (%5) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len_int_list[ ie_device:CPU - precision:FP32 - input_tensor:[[[ 1.87341085 0.30255743 -0.77057812]] [[-1.24344635 0.2835964 -0.09897041]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5127.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:60 %3 : NoneType = prim::Constant() %int_list.1 : int[] = aten::size(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:32:27 %5 : int = aten::len(%int_list.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:39 %6 : Tensor = aten::as_tensor(%5, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:23 return (%6) me element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::sub with schema: aten::sub.int(int a, int b) -> int: Check 'element::Type::merge(result_et, node->get_input_element_type(0), node->get_input_element_type(1))' failed at src/core/src/op/util/elementwise_args.cpp:19: While validating node 'v1::Subtract Subtract_4882369 (Squeeze_4882368[0]:i64[], Constant_4882323[0]:i32[]) -> (dynamic[...])' with friendly_name 'Subtract_4882369': Arguments do not have the same element type (arg0 element type: i64, arg1 element type: i32). Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len_int_list[ ie_device:CPU - precision:FP32 - input_tensor:[[ 0.32545942 0.2570968 1.28733124 -0.28706076 -1.05374277 -0.15764736 0.14828123] [-0.55279985 -1.08682619 1.53170283 -1.29100306 0.21290948 0.07822279 -1.35352489] [ 0.06325296 -0.58688324 0.43343826 -0.89361142 0.84023224 0.35538187 0.16821865]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5129.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:60 %3 : NoneType = prim::Constant() %int_list.1 : int[] = aten::size(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:32:27 %5 : int = aten::len(%int_list.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:39 %6 : Tensor = aten::as_tensor(%5, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:23 return (%6) fw_re: 2; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_len.py::TestLen::test_len_int_list[ ie_device:CPU - precision:FP32 - input_tensor:[[[[-0.28386523 1.37981696 -0.66200744 -0.51981429] [ 1.50888993 -1.75520774 0.22348406 -1.56365592] [-2.36062258 0.14785357 0.39727004 1.09246027] [-1.770553 0.88299208 -0.53810216 0.27611971]]]] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_len.___torch_mangle_5131.aten_len, %input_tensor.1 : Tensor): %2 : int = prim::Constant[value=3]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:60 %3 : NoneType = prim::Constant() %int_list.1 : int[] = aten::size(%input_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:32:27 %5 : int = aten::len(%int_list.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:39 %6 : Tensor = aten::as_tensor(%5, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_len.py:33:23 return (%6) fw_re: 4; ov_res: 4 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_size_listunpack[ ie_device:CPU - precision:FP32 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5132.prim_listunpack, %in1.1 : Tensor, %in2 : Tensor, %in3 : Tensor, %in4 : Tensor): %5 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:25:29 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%5) %10 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%10) fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_size_slice_listunpack[ ie_device:CPU - precision:FP32 - slices:(0, 2, 1) ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5134.prim_listunpack, %in1.1 : Tensor, %in2 : Tensor, %in3 : Tensor, %in4 : Tensor): %self.step : int = prim::Constant[value=1]() %self.stop : int = prim::Constant[value=2]() %self.start : int = prim::Constant[value=0]() %8 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %9 : int[] = aten::slice(%8, %self.start, %self.stop, %self.step) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %a.1 : int, %b.1 : int = prim::ListUnpack(%9) %12 : (int, int) = prim::TupleConstruct(%a.1, %b.1) return (%12) fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_size_slice_listunpack[ ie_device:CPU - precision:FP32 - slices:(0, 4, 2) ] | 0.23 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5136.prim_listunpack, %in1.1 : Tensor, %in2 : Tensor, %in3 : Tensor, %in4 : Tensor): %self.step : int = prim::Constant[value=2]() %self.stop : int = prim::Constant[value=4]() %self.start : int = prim::Constant[value=0]() %8 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %9 : int[] = aten::slice(%8, %self.start, %self.stop, %self.step) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %a.1 : int, %b.1 : int = prim::ListUnpack(%9) %12 : (int, int) = prim::TupleConstruct(%a.1, %b.1) return (%12) fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_size_slice_listunpack[ ie_device:CPU - precision:FP32 - slices:(-1, -3, -1) ] | 0.23 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5138.prim_listunpack, %in1.1 : Tensor, %in2 : Tensor, %in3 : Tensor, %in4 : Tensor): %self.stop : int = prim::Constant[value=-3]() %self.start : int = prim::Constant[value=-1]() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %8 : int[] = aten::slice(%7, %self.start, %self.stop, %self.start) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %a.1 : int, %b.1 : int = prim::ListUnpack(%8) %11 : (int, int) = prim::TupleConstruct(%a.1, %b.1) return (%11) fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_size_slice_listunpack[ ie_device:CPU - precision:FP32 - slices:(-3, -1, 1) ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5140.prim_listunpack, %in1.1 : Tensor, %in2 : Tensor, %in3 : Tensor, %in4 : Tensor): %self.step : int = prim::Constant[value=1]() %self.stop : int = prim::Constant[value=-1]() %self.start : int = prim::Constant[value=-3]() %8 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %9 : int[] = aten::slice(%8, %self.start, %self.stop, %self.step) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:45:23 %a.1 : int, %b.1 : int = prim::ListUnpack(%9) %12 : (int, int) = prim::TupleConstruct(%a.1, %b.1) return (%12) fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_append_listunpack[ ie_device:CPU - precision:FP32 ] | 0.38 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5142.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %in_list.1 : Tensor[] = prim::ListConstruct(%in1.1, %in2.1) %6 : Tensor[] = aten::append(%in_list.1, %in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:56:16 %7 : Tensor[] = aten::append(%in_list.1, %in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:57:16 %a.1 : Tensor, %b.1 : Tensor, %c.1 : Tensor, %d.1 : Tensor = prim::ListUnpack(%in_list.1) %12 : (Tensor, Tensor, Tensor, Tensor) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%12) fw_re: [[[[ 9.14817843e-02 3.62751890e-01 -4.37591635e-01 ... 4.97788105e-01 -1.27459223e-01 -3.20915350e-02] [-6.09229495e-01 -6.56859259e-01 -2.06867765e+00 ... 2.74544532e+00 -1.12620328e-01 -1.11117577e+00] [ 7.19168834e-01 -1.02051720e+00 2.23016969e-01 ... 8.76222805e-01 -1.10244541e+00 1.77565017e-01] ... [-5.71272563e-01 -1.66800928e-01 7.37502597e-01 ... 4.99892038e-01 -1.89348991e-01 -1.02743311e+00] [-2.82302550e-01 -2.18532773e-01 1.06142666e-01 ... -4.05698942e-01 -9.94876560e-01 1.95198028e+00] [-2.47500042e-02 8.04114333e-02 1.00336851e+00 ... 3.64600033e-02 3.01421425e-01 1.26280938e+00]] [[-2.29755319e-01 3.02789422e-01 -5.89771995e-01 ... 7.08101952e-01 1.14469866e+00 -4.74453324e-01] [-1.42852795e+00 3.72247731e-01 -5.57619315e-01 ... -2.78605899e+00 -2.89644094e-01 -9.73407189e-01] [-1.82095079e-01 -1.16309291e+00 -1.03589887e+00 ... 2.22634636e-01 6.34684736e-01 -1.12393611e+00] ... [ 2.18605502e+00 -3.34729204e-01 6.67692928e-01 ... 2.19273011e-01 1.49846963e+00 -4.86560867e-01] [-3.29372267e-01 -7.77315261e-02 1.89884799e-01 ... 1.76006140e+00 -7.83286194e-01 1.92751825e+00] [ 1.16083965e+00 -1.54500614e-01 -2.06579290e-01 ... -2.55619302e-01 -6.36843989e-01 9.44730321e-01]] [[ 4.00744151e-01 6.40586476e-01 -6.47583546e-02 ... -2.07422270e-01 9.04223725e-01 -1.04827263e+00] [ 1.42332803e+00 -1.78284551e+00 2.55517477e+00 ... -1.81404650e-02 1.99691166e+00 9.18704431e-01] [ 6.49077950e-01 7.56320312e-01 -7.89039573e-01 ... -1.28487224e+00 -1.00974739e-01 -2.22338237e-01] ... [-3.04321051e-01 4.86550545e-01 5.07209006e-01 ... -1.67234981e+00 -6.27986997e-01 -1.72404288e-01] [ 7.33940877e-01 1.80081741e+00 2.52564872e+00 ... -3.85853992e-01 1.08044346e+00 7.75862999e-01] [-7.63076593e-01 2.20551235e+00 2.45804553e-01 ... 1.89656054e+00 -4.02644066e-01 -5.25951603e-01]]] [[[-7.72175081e-01 -6.81089826e-01 1.00787369e+00 ... -2.01534420e+00 -9.47799692e-02 -3.81412241e-01] [-1.74380683e-01 -5.00871327e-01 9.52956480e-01 ... 1.12116595e+00 -1.78676163e+00 3.81194960e-02] [ 1.05756576e+00 -1.40335717e+00 5.41594078e-01 ... -2.76689690e-01 -1.18548402e+00 6.73877880e-01] ... [-1.02384219e+00 -1.37464899e+00 2.05047883e-01 ... 3.54891910e-01 -2.23882463e-01 1.11236946e-01] [-2.89438837e-01 -3.96880790e-01 1.34381951e+00 ... -1.20479938e-01 2.83994720e-01 -2.97808299e-01] [-9.23101125e-02 2.45250581e+00 -6.11466437e-01 ... 1.45147256e-01 -6.99981283e-01 4.90133798e-02]] [[ 1.33098130e+00 1.28528898e+00 -1.38974410e+00 ... 3.71264619e-01 8.05539910e-01 2.65910873e-01] [-7.29334993e-01 1.46274523e-02 -1.09788546e-02 ... 2.74796806e+00 8.12273070e-01 -1.07113704e+00] [-2.87335489e-01 3.57770374e-01 5.03766078e-02 ... 5.24621332e-01 4.46971564e-01 1.87730417e+00] ... [-8.65221134e-01 1.22264138e-01 1.24517411e-01 ... -3.01200853e+00 8.96986754e-01 -3.50996712e-01] [-1.27286978e+00 3.40438231e-01 -1.64632404e-01 ... -5.30210109e-01 -2.04137117e-01 6.83457135e-01] [-1.85047656e+00 -1.17618463e+00 -6.91097367e-01 ... -1.59510578e+00 -1.33155628e-01 -5.05686322e-01]] [[ 6.36052131e-01 1.83504053e-01 5.40429512e-01 ... 4.10793788e-01 6.94860523e-01 -1.31893245e-02] [ 7.25364294e-01 -8.96770572e-02 -6.14215178e-01 ... 9.11313867e-01 1.48188481e+00 4.96311475e-01] [ 5.11178995e-01 -7.88777590e-01 6.23687930e-01 ... 1.86957072e+00 3.21463203e-01 5.23986584e-01] ... [-9.80799974e-01 1.88859709e+00 -2.65267606e-01 ... -7.57076932e-02 -5.03675159e-01 -6.56903652e-01] [-4.00597023e-01 -1.59264889e+00 9.84150500e-01 ... -9.52369637e-01 -7.76207361e-01 9.65877281e-01] [-1.34130118e+00 1.28062551e+00 4.11226828e-01 ... 1.14881036e+00 -2.56394730e-02 -7.23540657e-02]]] [[[ 3.85137076e-02 -2.13609948e-02 -1.76026343e-01 ... -6.13985020e-01 1.65157080e+00 2.23798235e+00] [ 6.63516295e-02 -1.89882921e+00 5.23964345e-01 ... 2.43459703e-01 -9.70577368e-01 -1.18866283e+00] [-1.73766999e+00 3.88047792e-01 1.57170632e+00 ... -9.12452371e-03 7.68752900e-01 -4.43673491e-01] ... [-6.90659123e-01 -5.86833745e-01 6.05632887e-01 ... 4.45489002e-01 3.13957617e-01 7.77773836e-01] [ 2.14074470e+00 1.63743933e-02 3.42116229e-02 ... 1.58301968e+00 -2.70572108e+00 1.09641666e+00] [ 3.60721952e-01 9.66192627e-01 3.14911812e-01 ... 1.45658880e+00 -8.01512898e-01 3.51535755e-01]] [[-4.85446996e-01 9.73983524e-02 1.37305838e-01 ... 9.67441484e-01 4.70133183e-01 -8.78575376e-01] [ 1.78259612e+00 -1.23052268e+00 -1.60093129e+00 ... -6.09976613e-01 -8.56203133e-01 1.86345473e+00] [ 5.92193728e-01 -1.86689097e-01 -1.06401316e+00 ... -1.20251031e+00 -1.46422694e-01 -1.34357805e-01] ... [ 1.25584080e+00 5.16976331e-01 -9.69966188e-02 ... 3.80030683e-01 6.20665484e-01 1.86228812e-01] [ 2.63394572e+00 7.59061487e-02 -3.87690059e-01 ... -3.92984504e-01 4.43055931e-01 -3.54096475e-01] [ 8.53331854e-01 4.62756855e-01 -7.23929151e-01 ... -2.37620398e+00 1.31882565e-01 -1.35016527e+00]] [[-4.39043279e-01 -3.37702566e-01 -1.21142843e+00 ... -1.11090100e+00 -1.83551358e+00 -8.74523864e-01] [-1.13847903e+00 3.11382081e-01 6.44591357e-01 ... 3.85642057e-01 -7.85599307e-02 2.93090635e-01] [-9.18289945e-01 3.23822099e-01 -8.67651610e-01 ... 6.22765135e-03 2.00956516e-02 2.88252208e-01] ... [-3.54561169e-01 6.59511528e-01 -6.74815569e-01 ... -9.89565722e-01 6.33973403e-01 -3.09555738e-01] [-3.03119889e-01 -2.83914246e+00 3.56024717e-01 ... -5.74320057e-01 1.52006185e+00 -1.05737246e+00] [-2.16524629e-01 9.64205891e-01 -6.24369771e-01 ... 7.24269554e-03 -2.29966525e-01 3.20560216e-01]]] ... [[[ 1.14515799e-01 -1.29225921e-01 9.68505700e-01 ... 4.29670933e-01 9.85097397e-01 9.18946122e-01] [ 3.88498437e-02 1.22901045e+00 -4.59876260e-01 ... -1.16026467e+00 -3.40934317e-01 1.04289750e-01] [-1.29821854e-01 1.37464820e+00 6.90640107e-01 ... 1.13149268e+00 8.78982184e-01 -2.72018171e-01] ... [-7.07355000e-01 1.64576147e-01 -2.29223804e-03 ... -1.85720763e-01 6.96544718e-01 4.36683767e-01] [-7.10696261e-01 -1.68527498e+00 -2.05903193e-01 ... -3.77703900e-01 -7.46584485e-01 3.51087898e-01] [ 8.94496339e-01 7.95135862e-01 1.55974417e+00 ... 9.24916681e-01 -8.98564415e-01 -2.54150942e-01]] [[-3.51595502e-01 -3.10387824e-01 -2.37714698e-01 ... 4.41085416e-01 -7.05426726e-01 -3.45147221e-01] [ 2.84416721e-01 -1.45587736e-01 2.31149486e+00 ... -1.22526697e+00 -4.76882773e-01 1.58974248e+00] [-2.67281129e-01 5.65166636e-02 -2.06759171e-02 ... -7.33732470e-01 1.81873018e-01 3.37868943e-01] ... [ 5.78118595e-01 -1.34351288e-01 9.83445202e-01 ... 1.43917947e-01 -4.88795910e-01 -1.03797774e+00] [ 7.16375902e-01 -3.76716990e-01 1.40990867e+00 ... -4.79095824e-01 1.25494757e-01 3.53891311e-03] [-3.88491341e-01 1.55875112e-01 -1.18697660e-01 ... 1.73349853e+00 -1.98026809e+00 1.38395912e+00]] [[ 1.87982538e-01 1.38500374e-01 3.25837148e-01 ... -7.41427598e-01 -5.00391402e-01 -3.04704915e-01] [ 1.75330752e+00 1.53795941e+00 1.42582675e+00 ... 1.95175021e-02 1.48820989e-01 1.25617713e+00] [-1.02003625e+00 -1.19386640e+00 1.22362951e+00 ... -5.45726765e-01 -4.50409671e-01 1.27360897e+00] ... [ 8.14187287e-01 -1.80418726e-01 -5.78711517e-01 ... -6.83080969e-01 1.73217173e+00 1.42961016e+00] [-5.29276382e-02 -1.51114444e+00 -9.39253916e-01 ... -7.87018777e-01 3.18448702e-02 2.18295855e+00] [ 3.36735093e-01 3.69479766e-01 -2.15137709e-01 ... -1.25960800e+00 2.17461798e-01 1.14950301e-01]]] [[[-1.27226165e+00 -1.93346560e-01 7.30943334e-02 ... 7.68498373e-01 1.08849683e+00 1.03045962e+00] [ 2.46309912e-01 -3.38415946e-01 8.79910588e-01 ... 8.63046294e-01 4.83276224e-01 1.77560051e+00] [ 1.09071643e+00 3.05009126e-01 6.80824691e-01 ... -1.69956031e+00 7.90186249e-01 1.77393628e-01] ... [ 5.87693214e-01 5.00790447e-01 -1.33785124e+00 ... -9.27909481e-01 -5.00944075e-01 -9.68448272e-01] [-6.49227726e-01 -4.99150629e-01 1.94181975e+00 ... -1.05418496e+00 -7.37484657e-01 1.37200108e+00] [ 9.41375310e-01 3.32341515e-01 -1.68938035e+00 ... 6.76475010e-01 1.90572489e+00 -7.16757599e-02]] [[-2.69315213e+00 4.57230820e-01 -3.89949367e-01 ... 3.07056758e-01 1.26415269e+00 7.13029418e-01] [ 6.33911925e-01 5.80470837e-01 2.20564695e+00 ... 7.30015698e-01 -1.12264429e+00 4.75341519e-02] [-1.07649506e-01 2.57104447e-01 -7.37967733e-01 ... 4.73414290e-01 -2.35354299e-01 -3.92469387e-01] ... [-1.84170827e+00 -2.20553948e-01 4.63784432e-01 ... 3.41030492e-01 9.15783414e-01 4.29192726e-02] [-6.32694182e-01 -1.46633081e+00 -1.01556612e+00 ... -4.37983232e-01 6.06358962e-01 9.74618902e-01] [ 6.70734080e-01 -2.70488954e-01 2.46888030e+00 ... 6.63703804e-01 -3.47363355e-01 -4.60869574e-01]] [[-3.40421860e-01 5.74515354e-01 -4.86601778e-01 ... 1.42738512e+00 -8.13290139e-01 -1.40080755e+00] [ 1.47856584e+00 1.19483670e+00 3.37493898e-02 ... 1.30485649e+00 -3.69403013e-01 -4.90620529e-01] [-5.89392613e-01 -3.55194174e-02 -3.76568762e-01 ... -4.39148294e-01 8.79888296e-01 -6.67679440e-01] ... [ 8.14381543e-02 -1.91458527e+00 2.62715364e-01 ... 1.79623392e+00 -3.20493079e-01 -3.66419134e-01] [-1.85415474e+00 -1.88679588e+00 2.01889008e-01 ... 1.41195407e+00 6.54359588e-01 1.04746144e-01] [-1.18878203e+00 2.57648808e+00 -1.26327283e+00 ... 1.17869245e+00 2.51933955e-01 -5.60358455e-01]]] [[[-3.88807126e-02 -8.27491097e-01 -5.13800787e-01 ... -7.30792191e-01 -1.31989865e+00 -1.02405017e+00] [-4.24713205e-01 -2.50823464e-02 -1.24935198e+00 ... -2.01893350e-01 1.98426473e+00 2.17775470e-01] [-2.01467346e-01 4.26088811e-01 -1.62511230e+00 ... -2.81299306e+00 -4.38193508e-01 -1.08676551e+00] ... [ 6.54333083e-01 1.60288166e+00 2.71735300e-01 ... 5.13127372e-01 2.11298370e-01 -1.20696256e+00] [ 1.78382838e+00 -1.48304809e+00 -5.33821414e-01 ... 5.17328714e-01 -7.31184064e-01 -2.36813323e-02] [ 1.62255022e+00 -1.57183528e+00 1.01581388e+00 ... -5.76196813e-01 -1.25310541e+00 1.81575047e+00]] [[-2.80085093e-01 1.49783503e+00 -1.37314977e+00 ... 1.90502301e+00 1.40794714e-01 6.50481192e-01] [ 1.55254962e+00 1.43689561e+00 -6.71553666e-01 ... -5.76736190e-01 -7.77737723e-01 1.39040496e-01] [-3.91540322e-01 1.49333642e+00 -6.29163671e-01 ... -1.47580262e+00 -5.45174010e-02 8.97034070e-01] ... [-7.99493800e-02 -5.27121927e-01 4.68916750e-01 ... -7.74129254e-01 -3.17323600e-01 1.30310111e+00] [-1.54685088e+00 -3.21760146e-01 -5.04653885e-02 ... 9.44888008e-01 -1.47199465e+00 -8.58025795e-01] [ 9.44220133e-02 3.47557437e-01 -7.91530050e-01 ... -1.62097800e+00 7.37421801e-01 -7.04084967e-01]] [[-1.58823854e+00 -1.17456746e+00 1.42102144e+00 ... -2.58901253e+00 1.06950579e-01 -1.94986951e+00] [-7.94800121e-01 5.91680984e-01 -1.71116785e+00 ... 7.56254804e-01 -3.14555091e-01 2.70716379e+00] [ 2.45821299e-01 -4.77095382e-01 -9.32361112e-02 ... -2.88645027e-01 -1.06306581e+00 -1.26590046e+00] ... [-1.24150212e+00 1.92610641e+00 3.39584315e-01 ... 1.84185751e+00 7.81012228e-01 -7.79188328e-01] [ 5.69903635e-01 2.11962170e-02 7.51469516e-01 ... 4.09521553e-01 -6.49473282e-01 2.77835065e-01] [-1.18578967e+00 2.27491956e+00 -7.10595538e-01 ... 8.79162986e-01 1.10065987e-01 -2.42011719e-01]]]]; ov_res: [[[[ 9.14817825e-02 3.62751901e-01 -4.37591642e-01 ... 4.97788101e-01 -1.27459228e-01 -3.20915356e-02] [-6.09229505e-01 -6.56859279e-01 -2.06867766e+00 ... 2.74544525e+00 -1.12620331e-01 -1.11117578e+00] [ 7.19168842e-01 -1.02051723e+00 2.23016962e-01 ... 8.76222789e-01 -1.10244536e+00 1.77565023e-01] ... [-5.71272552e-01 -1.66800931e-01 7.37502575e-01 ... 4.99892026e-01 -1.89348996e-01 -1.02743316e+00] [-2.82302558e-01 -2.18532771e-01 1.06142670e-01 ... -4.05698955e-01 -9.94876564e-01 1.95198023e+00] [-2.47500036e-02 8.04114342e-02 1.00336850e+00 ... 3.64600047e-02 3.01421434e-01 1.26280940e+00]] [[-2.29755312e-01 3.02789420e-01 -5.89771986e-01 ... 7.08101928e-01 1.14469862e+00 -4.74453330e-01] [-1.42852795e+00 3.72247726e-01 -5.57619333e-01 ... -2.78605890e+00 -2.89644092e-01 -9.73407209e-01] [-1.82095081e-01 -1.16309285e+00 -1.03589892e+00 ... 2.22634643e-01 6.34684741e-01 -1.12393606e+00] ... [ 2.18605494e+00 -3.34729195e-01 6.67692900e-01 ... 2.19273016e-01 1.49846959e+00 -4.86560881e-01] [-3.29372257e-01 -7.77315274e-02 1.89884797e-01 ... 1.76006138e+00 -7.83286214e-01 1.92751825e+00] [ 1.16083968e+00 -1.54500619e-01 -2.06579283e-01 ... -2.55619287e-01 -6.36843979e-01 9.44730341e-01]] [[ 4.00744140e-01 6.40586495e-01 -6.47583529e-02 ... -2.07422271e-01 9.04223740e-01 -1.04827261e+00] [ 1.42332804e+00 -1.78284550e+00 2.55517483e+00 ... -1.81404650e-02 1.99691164e+00 9.18704450e-01] [ 6.49077952e-01 7.56320298e-01 -7.89039552e-01 ... -1.28487229e+00 -1.00974739e-01 -2.22338229e-01] ... [-3.04321051e-01 4.86550540e-01 5.07209003e-01 ... -1.67234981e+00 -6.27986968e-01 -1.72404289e-01] [ 7.33940899e-01 1.80081737e+00 2.52564883e+00 ... -3.85854006e-01 1.08044350e+00 7.75862992e-01] [-7.63076603e-01 2.20551229e+00 2.45804548e-01 ... 1.89656055e+00 -4.02644068e-01 -5.25951624e-01]]] [[[-7.72175074e-01 -6.81089818e-01 1.00787365e+00 ... -2.01534414e+00 -9.47799683e-02 -3.81412238e-01] [-1.74380690e-01 -5.00871301e-01 9.52956498e-01 ... 1.12116599e+00 -1.78676164e+00 3.81194949e-02] [ 1.05756581e+00 -1.40335715e+00 5.41594088e-01 ... -2.76689678e-01 -1.18548405e+00 6.73877895e-01] ... [-1.02384222e+00 -1.37464893e+00 2.05047876e-01 ... 3.54891896e-01 -2.23882467e-01 1.11236945e-01] [-2.89438844e-01 -3.96880776e-01 1.34381950e+00 ... -1.20479941e-01 2.83994734e-01 -2.97808290e-01] [-9.23101157e-02 2.45250583e+00 -6.11466408e-01 ... 1.45147249e-01 -6.99981272e-01 4.90133800e-02]] [[ 1.33098125e+00 1.28528893e+00 -1.38974404e+00 ... 3.71264607e-01 8.05539906e-01 2.65910864e-01] [-7.29335010e-01 1.46274520e-02 -1.09788543e-02 ... 2.74796796e+00 8.12273085e-01 -1.07113707e+00] [-2.87335485e-01 3.57770383e-01 5.03766090e-02 ... 5.24621308e-01 4.46971565e-01 1.87730420e+00] ... [-8.65221143e-01 1.22264139e-01 1.24517411e-01 ... -3.01200843e+00 8.96986783e-01 -3.50996703e-01] [-1.27286983e+00 3.40438217e-01 -1.64632410e-01 ... -5.30210137e-01 -2.04137117e-01 6.83457136e-01] [-1.85047650e+00 -1.17618465e+00 -6.91097379e-01 ... -1.59510577e+00 -1.33155629e-01 -5.05686343e-01]] [[ 6.36052132e-01 1.83504060e-01 5.40429533e-01 ... 4.10793781e-01 6.94860518e-01 -1.31893242e-02] [ 7.25364268e-01 -8.96770582e-02 -6.14215195e-01 ... 9.11313891e-01 1.48188484e+00 4.96311486e-01] [ 5.11178970e-01 -7.88777590e-01 6.23687923e-01 ... 1.86957073e+00 3.21463197e-01 5.23986578e-01] ... [-9.80799973e-01 1.88859713e+00 -2.65267611e-01 ... -7.57076964e-02 -5.03675163e-01 -6.56903625e-01] [-4.00597036e-01 -1.59264886e+00 9.84150529e-01 ... -9.52369630e-01 -7.76207387e-01 9.65877295e-01] [-1.34130120e+00 1.28062546e+00 4.11226839e-01 ... 1.14881039e+00 -2.56394725e-02 -7.23540634e-02]]] [[[ 3.85137089e-02 -2.13609952e-02 -1.76026344e-01 ... -6.13985002e-01 1.65157080e+00 2.23798227e+00] [ 6.63516298e-02 -1.89882922e+00 5.23964345e-01 ... 2.43459702e-01 -9.70577359e-01 -1.18866289e+00] [-1.73766994e+00 3.88047785e-01 1.57170630e+00 ... -9.12452396e-03 7.68752873e-01 -4.43673491e-01] ... [-6.90659106e-01 -5.86833775e-01 6.05632901e-01 ... 4.45488989e-01 3.13957602e-01 7.77773857e-01] [ 2.14074469e+00 1.63743924e-02 3.42116244e-02 ... 1.58301973e+00 -2.70572114e+00 1.09641671e+00] [ 3.60721946e-01 9.66192603e-01 3.14911813e-01 ... 1.45658875e+00 -8.01512897e-01 3.51535767e-01]] [[-4.85446990e-01 9.73983556e-02 1.37305841e-01 ... 9.67441499e-01 4.70133185e-01 -8.78575385e-01] [ 1.78259611e+00 -1.23052263e+00 -1.60093129e+00 ... -6.09976590e-01 -8.56203139e-01 1.86345470e+00] [ 5.92193723e-01 -1.86689094e-01 -1.06401312e+00 ... -1.20251036e+00 -1.46422699e-01 -1.34357810e-01] ... [ 1.25584078e+00 5.16976357e-01 -9.69966203e-02 ... 3.80030692e-01 6.20665491e-01 1.86228812e-01] [ 2.63394570e+00 7.59061500e-02 -3.87690067e-01 ... -3.92984509e-01 4.43055928e-01 -3.54096472e-01] [ 8.53331864e-01 4.62756842e-01 -7.23929167e-01 ... -2.37620401e+00 1.31882563e-01 -1.35016525e+00]] [[-4.39043283e-01 -3.37702572e-01 -1.21142840e+00 ... -1.11090100e+00 -1.83551359e+00 -8.74523878e-01] [-1.13847899e+00 3.11382085e-01 6.44591331e-01 ... 3.85642052e-01 -7.85599276e-02 2.93090641e-01] [-9.18289959e-01 3.23822111e-01 -8.67651582e-01 ... 6.22765115e-03 2.00956520e-02 2.88252205e-01] ... [-3.54561180e-01 6.59511507e-01 -6.74815595e-01 ... -9.89565730e-01 6.33973420e-01 -3.09555739e-01] [-3.03119898e-01 -2.83914256e+00 3.56024712e-01 ... -5.74320078e-01 1.52006185e+00 -1.05737245e+00] [-2.16524631e-01 9.64205861e-01 -6.24369800e-01 ... 7.24269543e-03 -2.29966521e-01 3.20560217e-01]]] ... [[[ 1.14515796e-01 -1.29225925e-01 9.68505681e-01 ... 4.29670930e-01 9.85097408e-01 9.18946147e-01] [ 3.88498455e-02 1.22901046e+00 -4.59876269e-01 ... -1.16026473e+00 -3.40934306e-01 1.04289748e-01] [-1.29821852e-01 1.37464821e+00 6.90640092e-01 ... 1.13149273e+00 8.78982186e-01 -2.72018164e-01] ... [-7.07355022e-01 1.64576143e-01 -2.29223794e-03 ... -1.85720757e-01 6.96544707e-01 4.36683774e-01] [-7.10696280e-01 -1.68527496e+00 -2.05903187e-01 ... -3.77703905e-01 -7.46584475e-01 3.51087898e-01] [ 8.94496322e-01 7.95135856e-01 1.55974412e+00 ... 9.24916685e-01 -8.98564398e-01 -2.54150927e-01]] [[-3.51595491e-01 -3.10387820e-01 -2.37714693e-01 ... 4.41085428e-01 -7.05426753e-01 -3.45147222e-01] [ 2.84416735e-01 -1.45587742e-01 2.31149483e+00 ... -1.22526693e+00 -4.76882786e-01 1.58974242e+00] [-2.67281115e-01 5.65166622e-02 -2.06759162e-02 ... -7.33732462e-01 1.81873024e-01 3.37868929e-01] ... [ 5.78118622e-01 -1.34351283e-01 9.83445227e-01 ... 1.43917948e-01 -4.88795906e-01 -1.03797770e+00] [ 7.16375887e-01 -3.76717001e-01 1.40990865e+00 ... -4.79095817e-01 1.25494763e-01 3.53891309e-03] [-3.88491333e-01 1.55875117e-01 -1.18697658e-01 ... 1.73349857e+00 -1.98026812e+00 1.38395917e+00]] [[ 1.87982544e-01 1.38500378e-01 3.25837135e-01 ... -7.41427600e-01 -5.00391424e-01 -3.04704905e-01] [ 1.75330758e+00 1.53795946e+00 1.42582679e+00 ... 1.95175018e-02 1.48820996e-01 1.25617719e+00] [-1.02003622e+00 -1.19386637e+00 1.22362947e+00 ... -5.45726776e-01 -4.50409681e-01 1.27360892e+00] ... [ 8.14187288e-01 -1.80418730e-01 -5.78711510e-01 ... -6.83080971e-01 1.73217177e+00 1.42961013e+00] [-5.29276393e-02 -1.51114440e+00 -9.39253926e-01 ... -7.87018776e-01 3.18448693e-02 2.18295860e+00] [ 3.36735100e-01 3.69479775e-01 -2.15137705e-01 ... -1.25960803e+00 2.17461795e-01 1.14950299e-01]]] [[[-1.27226162e+00 -1.93346560e-01 7.30943307e-02 ... 7.68498361e-01 1.08849680e+00 1.03045964e+00] [ 2.46309906e-01 -3.38415951e-01 8.79910588e-01 ... 8.63046288e-01 4.83276218e-01 1.77560055e+00] [ 1.09071648e+00 3.05009127e-01 6.80824697e-01 ... -1.69956028e+00 7.90186226e-01 1.77393630e-01] ... [ 5.87693214e-01 5.00790477e-01 -1.33785129e+00 ... -9.27909493e-01 -5.00944078e-01 -9.68448281e-01] [-6.49227738e-01 -4.99150634e-01 1.94181979e+00 ... -1.05418491e+00 -7.37484634e-01 1.37200105e+00] [ 9.41375315e-01 3.32341522e-01 -1.68938029e+00 ... 6.76474988e-01 1.90572488e+00 -7.16757625e-02]] [[-2.69315219e+00 4.57230806e-01 -3.89949381e-01 ... 3.07056755e-01 1.26415265e+00 7.13029444e-01] [ 6.33911908e-01 5.80470860e-01 2.20564699e+00 ... 7.30015695e-01 -1.12264431e+00 4.75341529e-02] [-1.07649505e-01 2.57104456e-01 -7.37967730e-01 ... 4.73414302e-01 -2.35354304e-01 -3.92469376e-01] ... [-1.84170830e+00 -2.20553949e-01 4.63784426e-01 ... 3.41030478e-01 9.15783405e-01 4.29192744e-02] [-6.32694185e-01 -1.46633077e+00 -1.01556611e+00 ... -4.37983245e-01 6.06358945e-01 9.74618912e-01] [ 6.70734107e-01 -2.70488948e-01 2.46888041e+00 ... 6.63703799e-01 -3.47363353e-01 -4.60869581e-01]] [[-3.40421855e-01 5.74515343e-01 -4.86601770e-01 ... 1.42738509e+00 -8.13290119e-01 -1.40080750e+00] [ 1.47856581e+00 1.19483674e+00 3.37493904e-02 ... 1.30485654e+00 -3.69403005e-01 -4.90620524e-01] [-5.89392602e-01 -3.55194174e-02 -3.76568764e-01 ... -4.39148307e-01 8.79888296e-01 -6.67679429e-01] ... [ 8.14381540e-02 -1.91458523e+00 2.62715369e-01 ... 1.79623389e+00 -3.20493072e-01 -3.66419137e-01] [-1.85415471e+00 -1.88679588e+00 2.01889008e-01 ... 1.41195405e+00 6.54359579e-01 1.04746141e-01] [-1.18878198e+00 2.57648802e+00 -1.26327288e+00 ... 1.17869246e+00 2.51933962e-01 -5.60358465e-01]]] [[[-3.88807133e-02 -8.27491105e-01 -5.13800800e-01 ... -7.30792165e-01 -1.31989861e+00 -1.02405012e+00] [-4.24713194e-01 -2.50823461e-02 -1.24935198e+00 ... -2.01893345e-01 1.98426473e+00 2.17775464e-01] [-2.01467350e-01 4.26088810e-01 -1.62511230e+00 ... -2.81299305e+00 -4.38193500e-01 -1.08676553e+00] ... [ 6.54333055e-01 1.60288167e+00 2.71735311e-01 ... 5.13127387e-01 2.11298376e-01 -1.20696259e+00] [ 1.78382838e+00 -1.48304808e+00 -5.33821404e-01 ... 5.17328739e-01 -7.31184065e-01 -2.36813314e-02] [ 1.62255025e+00 -1.57183528e+00 1.01581383e+00 ... -5.76196790e-01 -1.25310540e+00 1.81575048e+00]] [[-2.80085087e-01 1.49783504e+00 -1.37314975e+00 ... 1.90502298e+00 1.40794709e-01 6.50481164e-01] [ 1.55254960e+00 1.43689561e+00 -6.71553671e-01 ... -5.76736212e-01 -7.77737737e-01 1.39040500e-01] [-3.91540319e-01 1.49333644e+00 -6.29163682e-01 ... -1.47580266e+00 -5.45173995e-02 8.97034049e-01] ... [-7.99493790e-02 -5.27121902e-01 4.68916744e-01 ... -7.74129272e-01 -3.17323595e-01 1.30310106e+00] [-1.54685092e+00 -3.21760148e-01 -5.04653901e-02 ... 9.44887996e-01 -1.47199464e+00 -8.58025789e-01] [ 9.44220126e-02 3.47557425e-01 -7.91530073e-01 ... -1.62097800e+00 7.37421811e-01 -7.04084992e-01]] [[-1.58823860e+00 -1.17456746e+00 1.42102146e+00 ... -2.58901262e+00 1.06950581e-01 -1.94986951e+00] [-7.94800103e-01 5.91681004e-01 -1.71116781e+00 ... 7.56254792e-01 -3.14555079e-01 2.70716381e+00] [ 2.45821297e-01 -4.77095395e-01 -9.32361111e-02 ... -2.88645029e-01 -1.06306577e+00 -1.26590049e+00] ... [-1.24150217e+00 1.92610645e+00 3.39584321e-01 ... 1.84185755e+00 7.81012237e-01 -7.79188335e-01] [ 5.69903612e-01 2.11962163e-02 7.51469493e-01 ... 4.09521550e-01 -6.49473310e-01 2.77835071e-01] [-1.18578970e+00 2.27491951e+00 -7.10595548e-01 ... 8.79162967e-01 1.10065989e-01 -2.42011726e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 0.53493549 -0.5951042 -2.20901687 ... -1.08016477 -1.73006171 0.70994129] [-0.75423411 -1.39155161 -1.08859734 ... -0.93458553 0.7341208 1.42507165] [ 2.22993452 -0.01869875 0.67808982 ... -0.07189269 -0.74804616 -1.31087308] ... [-0.38383194 -0.47399273 0.32409407 ... -1.25189227 -0.66107112 0.72367155] [ 0.26347406 0.62378504 1.16397846 ... 0.95862986 0.7009993 -0.70161119] [ 2.24646643 -0.25415207 0.48622426 ... -1.60858031 -0.69345564 2.2535186 ]] [[-1.39016093 0.61847846 0.67654399 ... -0.82940308 -0.26366051 -0.40278314] [ 0.03527654 -0.83208742 1.29227727 ... 0.59911924 -2.45560827 -1.72161383] [-0.47954965 -0.75990719 -1.39124757 ... -0.94338458 1.58363384 0.38430825] ... [ 1.18697137 -0.98068476 -1.29876913 ... 0.28985266 -0.7354163 0.45277922] [-0.74352545 -1.4006145 0.34083133 ... 0.50919432 -0.41736798 -1.23623265] [ 0.02414526 0.71989531 -0.51664876 ... 0.66349807 0.49271857 1.39272149]] [[ 0.12297843 -0.31275469 0.09904106 ... -1.19512433 -0.54137891 2.33659468] [-0.36820117 -0.23824612 2.74572739 ... -0.70781086 -0.63274501 -1.20881237] [-0.26971979 0.03517976 -0.23321564 ... 0.32832732 -0.33199841 0.12278999] ... [ 0.51540911 0.96964229 -0.2516309 ... -0.34275565 1.31557211 0.67269402] [ 1.89684101 2.35083348 0.45208812 ... -0.00608907 -0.10139603 -0.15424164] [-0.90197589 -0.71230685 0.54565672 ... 0.57022755 1.42160479 -0.57728106]]]]; ov_res: [[[[ 0.53493547 -0.59510422 -2.2090168 ... -1.08016479 -1.73006165 0.70994127] [-0.75423414 -1.39155161 -1.0885973 ... -0.93458551 0.73412079 1.4250716 ] [ 2.22993445 -0.01869875 0.6780898 ... -0.07189269 -0.74804616 -1.31087303] ... [-0.38383195 -0.47399274 0.32409406 ... -1.25189221 -0.66107112 0.72367156] [ 0.26347405 0.62378502 1.16397846 ... 0.95862985 0.70099932 -0.70161116] [ 2.2464664 -0.25415206 0.48622426 ... -1.60858035 -0.69345564 2.25351858]] [[-1.39016092 0.61847848 0.67654401 ... -0.8294031 -0.26366052 -0.40278313] [ 0.03527654 -0.8320874 1.29227722 ... 0.59911925 -2.45560837 -1.72161388] [-0.47954965 -0.75990719 -1.39124751 ... -0.94338459 1.5836339 0.38430825] ... [ 1.18697143 -0.98068476 -1.29876912 ... 0.28985265 -0.73541629 0.45277923] [-0.74352545 -1.4006145 0.34083134 ... 0.50919431 -0.41736796 -1.23623264] [ 0.02414526 0.7198953 -0.51664877 ... 0.66349804 0.49271858 1.39272153]] [[ 0.12297843 -0.31275469 0.09904106 ... -1.19512439 -0.54137892 2.33659458] [-0.36820117 -0.23824613 2.7457273 ... -0.70781088 -0.63274503 -1.20881236] [-0.26971978 0.03517976 -0.23321563 ... 0.32832733 -0.33199841 0.12278999] ... [ 0.51540911 0.96964228 -0.2516309 ... -0.34275565 1.31557214 0.67269403] [ 1.89684105 2.35083342 0.45208812 ... -0.00608907 -0.10139603 -0.15424164] [-0.90197587 -0.71230686 0.54565674 ... 0.57022756 1.42160475 -0.57728106]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[-2.45627645e-01 -3.29163835e-01 1.18306393e-01 -9.10419255e-01 2.37514832e+00 1.19983112e+00 4.82359612e-01 -2.13582788e+00] [ 1.30910321e+00 3.16469201e-01 1.38631279e+00 -1.09332393e+00 -6.16964652e-01 -4.36262787e-01 6.96347066e-01 1.32975613e+00] [-1.08245823e+00 -5.00273585e-01 -1.33223720e-01 1.13867427e+00 1.86371968e-01 -1.91332193e+00 -1.39799579e+00 1.73154606e-01] [ 3.51554803e-01 1.38820707e-01 -3.24899803e-01 -7.94829200e-01 -8.65267366e-01 7.13508416e-01 5.29061273e-01 4.54519939e-01] [-1.52702537e+00 -8.57184302e-01 -2.80170280e-01 -2.91678978e-01 1.86770438e+00 -1.15016978e-01 -8.31365916e-01 -1.27736148e-01] [-2.96322726e-01 -9.52617318e-01 -1.72908597e+00 -4.16819518e-01 -6.89985943e-01 -5.41126819e-01 -6.73645446e-01 -3.67225891e-01] [ 1.07736157e+00 7.72034066e-01 2.14371345e-01 -4.93276265e-01 -1.34336383e+00 1.31759506e+00 6.63408867e-01 -2.50928463e-01] [ 7.34389290e-01 -4.90486784e-01 9.05674887e-01 -1.57015984e+00 7.29569746e-01 -1.83433848e+00 2.29893580e+00 1.19202624e+00]]] [[[-2.09369208e-01 7.14207708e-01 -1.78161311e+00 -3.73802426e-01 -1.29829128e+00 -1.33978434e+00 -1.99708465e+00 2.86357891e-01] [ 1.51739053e+00 4.89870786e-01 -7.65505889e-03 9.31440650e-02 -1.76917080e+00 -5.50810317e-01 -2.17838501e-01 6.25223358e-01] [-1.24233983e+00 1.63214553e+00 1.06497294e-01 -1.30772025e-01 7.67011569e-01 8.00595532e-01 -1.05048866e+00 1.51405277e+00] [ 1.23551322e+00 -3.87581474e-01 -6.56249178e-01 1.61875622e-01 -3.85464442e-01 -5.41196994e-01 -1.12675505e+00 3.53854445e-01] [ 7.30452436e-01 -5.52734144e-01 1.21168760e+00 1.72040105e-01 -1.70931847e-01 1.33980888e+00 8.64499405e-01 7.81329798e-01] [ 1.02262441e+00 1.10941218e+00 8.48828640e-01 5.32944094e-01 3.27486837e-01 4.99897181e-02 1.43766815e+00 1.05845558e+00] [-1.03562800e+00 -5.29370633e-02 9.09811895e-01 -6.96596447e-01 -1.37070656e+00 8.81399210e-02 -9.37534359e-01 -5.14215585e-01] [-1.16557370e-01 3.27602160e-01 -7.49796254e-01 2.49109420e+00 7.44042878e-01 -2.63213836e-02 -1.83745774e+00 -1.34094172e+00]]] [[[ 1.85354856e+00 -4.02129011e-01 -5.53310169e-01 -1.57435043e+00 1.36664885e+00 -1.08608295e+00 6.65799320e-02 1.27913411e+00] [-9.30072588e-02 1.67238178e+00 3.81906301e-01 -8.11461522e-01 1.13205467e+00 -1.41152561e-01 1.12114471e+00 9.81218907e-01] [ 9.87467045e-02 6.18707309e-01 -3.67830545e-01 -1.82440737e+00 -3.46659638e-01 -7.45980180e-01 8.24786591e-01 1.98747052e-01] [ 9.67205071e-01 -6.39475996e-01 1.32109624e+00 6.29754211e-01 2.49496452e-01 -1.02129760e+00 -1.20233569e+00 -7.67817386e-02] [-1.46116408e-02 -9.47453705e-01 -5.11620638e-01 6.37014447e-01 3.79631960e-01 1.96060751e-01 4.57464546e-01 -1.42989348e-01] [-2.44759189e-02 -3.32540668e-02 -5.59439402e-01 1.29650447e+00 -1.47738591e-01 -3.80859380e-01 -7.86718792e-02 -1.77103629e-01] [ 2.44756697e-01 -1.06976218e+00 -7.88347630e-01 1.81758737e+00 -2.48227656e-02 -4.40375909e-01 5.59682550e-01 8.98269867e-01] [-1.60588261e+00 -8.58222228e-01 -3.96775653e-01 8.24598399e-01 9.92656945e-01 4.93864496e-01 2.11903947e+00 -1.52318630e-01]]] [[[-3.32035625e-01 4.84895742e-01 9.57214576e-01 7.88153945e-01 1.96782898e+00 -4.48941808e-01 7.91104074e-01 1.32320233e+00] [ 1.43711823e+00 8.63778186e-02 -1.05308748e+00 5.34323350e-01 -1.81131052e+00 1.05800815e-01 1.22931079e+00 1.19164167e+00] [-1.03040898e+00 7.12480073e-01 -1.71786943e+00 -1.00930928e+00 -1.26705023e-01 -1.97274021e+00 -1.10547329e-01 1.79537861e+00] [ 5.73977142e-01 -4.34162583e-01 2.46613121e+00 -1.09758407e+00 -3.17723665e-01 -1.27198383e+00 1.28633670e+00 5.80944928e-02] [-1.19393208e+00 -1.90516061e-01 7.00108077e-01 -2.41620084e-01 -1.21095584e+00 4.75500720e-01 -1.39825253e+00 -1.39610314e+00] [-1.46078128e+00 8.49514131e-01 3.03863492e-01 1.32531991e+00 2.92904724e+00 6.51683992e-01 -9.30853311e-01 9.98977214e-01] [ 1.53027613e+00 -2.31608692e-01 -1.18351930e+00 -6.70168279e-02 2.39202839e+00 -1.43815683e+00 8.19264594e-01 -9.77686788e-01] [ 1.39765308e+00 -3.28980964e+00 1.69708236e+00 2.13487629e+00 8.82417761e-01 -1.38473702e+00 1.20804232e+00 5.31890301e-01]]] [[[-8.32657451e-01 -8.87813474e-01 -1.58673464e+00 8.53611238e-02 1.04568982e-01 1.51964251e+00 4.70782990e-01 4.55139337e-01] [ 3.91876071e-01 -1.75574794e+00 9.32997202e-01 -1.08602958e+00 6.69968308e-01 -3.91882476e-01 1.41108645e+00 3.47921466e-01] [-9.73376522e-01 -8.44925502e-01 -1.86572656e-01 9.87475324e-02 4.80190624e-01 1.03350214e+00 2.26694159e-01 -3.28269411e-01] [ 1.82462589e+00 -8.76490878e-03 2.56590659e-02 -3.81139627e-01 -2.59168284e-01 1.67531354e-01 -2.44507263e-02 -1.00587927e-01] [ 7.45640029e-01 3.27525568e-01 3.02969496e+00 -1.00581392e+00 -4.10246495e-01 1.94922389e-01 -1.18798053e+00 -2.88222227e-01] [-2.30572006e-01 -3.91231739e-01 1.56885860e+00 8.04135801e-01 3.91661301e-01 7.60760766e-01 -1.21384195e+00 -1.04120469e+00] [-8.22891014e-01 -1.31538403e+00 -1.19927039e+00 -6.17039028e-01 2.64562114e-01 -1.85332378e+00 -9.02371972e-01 5.02366947e-03] [ 6.85285709e-01 -3.05361311e-01 5.23684428e-01 -4.35041947e-01 -1.11823191e+00 1.35274610e+00 4.55143058e-01 5.83819500e-01]]] [[[-5.42812235e-01 8.31868271e-01 -1.22770602e+00 1.07087341e+00 -1.38532206e+00 5.29790766e-01 -7.60567386e-01 -1.32247808e-01] [-4.62144129e-02 2.15623569e+00 -1.61838033e+00 -1.54395113e+00 7.79917931e-01 1.12517011e-01 1.63905302e+00 -7.75802120e-01] [-6.75728749e-01 3.67268841e-01 -1.56595958e+00 1.73433326e-02 6.33439148e-01 -1.53445201e-01 -6.07332696e-01 -1.17649606e+00] [-1.07678374e+00 4.79242805e-02 -1.49876117e-01 3.11002248e-01 -4.98202383e-02 2.10393675e+00 -2.48593800e-01 6.91652805e-01] [ 1.42783659e+00 2.06161475e+00 -9.78208609e-01 3.96673548e-01 1.16907091e+00 -1.10965008e-01 -3.46907947e-02 3.34997575e-01] [-5.51498614e-01 7.67340895e-01 1.07683298e+00 1.89179285e-01 2.06629458e-01 1.43103845e+00 1.26149942e+00 -6.91981182e-01] [ 1.61159321e+00 -1.00499241e-01 -1.03893737e+00 1.09288130e+00 1.39820153e+00 -2.71559625e-01 -9.88142679e-01 1.51170055e+00] [-2.41601784e+00 2.66416950e-02 -2.10471196e+00 1.03853274e+00 -3.36236474e-02 1.09238303e+00 -2.23155257e+00 1.37645863e-01]]] [[[ 7.51291378e-01 3.19592678e-01 3.96886090e-01 1.29242085e+00 -3.44222172e-01 1.30186208e-01 5.48861008e-01 -2.37374150e+00] [-1.26087594e-03 -2.70474259e-01 2.65982593e-01 -2.11142985e-01 7.42364210e-01 -1.65902810e-01 5.38762051e-01 8.67505847e-01] [ 7.69767547e-03 1.66669974e+00 -1.95310980e+00 2.79179904e-01 1.47741409e+00 -3.20197514e-01 4.40635199e-01 1.57522191e-01] [-1.81295432e+00 2.06535522e+00 8.43777565e-01 5.08935591e-01 1.62802777e-01 1.25702481e+00 1.46404651e+00 1.57186972e+00] [-8.50359064e-01 4.08137066e-01 1.51281361e-01 -9.17396683e-01 1.49106334e+00 -1.19760094e+00 7.44905195e-01 -8.08570130e-01] [-1.36874149e+00 2.73081616e-01 1.59823903e+00 1.06163461e+00 -1.78059553e+00 -7.15261143e-01 -8.87666449e-01 -6.26569859e-01] [-5.16558446e-01 -1.26258702e+00 1.19343988e+00 -4.39633731e-01 -2.44125387e+00 -1.94413742e+00 -4.22959031e-01 -9.84241311e-01] [-1.09813616e+00 -6.61311797e-01 9.31741679e-01 2.69407406e-01 -9.44411202e-01 -1.27619276e+00 1.11282611e+00 6.50701249e-01]]] [[[ 6.58134109e-01 5.02049344e-01 1.39927793e+00 -4.43384728e-01 -1.68183470e+00 1.69087798e+00 6.17217929e-01 1.43823931e-01] [-1.05222540e+00 1.35533539e+00 -5.95707624e-01 3.99724952e-01 2.75812258e-02 -5.74959325e-01 4.91318296e-01 -6.51044715e-01] [-2.91450419e-01 -3.93096747e-01 -5.25047169e-01 2.64463860e+00 -2.64729153e+00 -4.76290739e-01 -1.16355373e+00 2.38453431e+00] [ 1.41838150e+00 2.05237762e-02 -1.83716569e+00 9.52137158e-01 -3.08384226e-01 -8.35067271e-01 5.28201771e-01 -1.80585252e-01] [-9.17435594e-01 1.64698153e+00 -5.03255265e-01 -4.58362582e-01 -5.56416474e-01 5.55269450e-01 -8.68225677e-01 -9.86678786e-01] [ 1.09798487e-01 9.71315785e-02 -3.77364845e-01 -8.03692229e-01 -4.08755404e-03 7.88375774e-01 9.21184214e-01 3.22547694e-01] [ 1.40847018e-01 -1.86527702e+00 1.28265663e+00 -3.98232590e-01 -3.50214341e-01 2.60639781e-01 -1.63743078e-01 -1.14161382e+00] [-1.34177109e+00 -1.52911634e+00 3.34591521e-01 -9.22938396e-01 -2.12761595e-01 -1.18023298e+00 -1.24874765e+00 -2.13902502e+00]]] [[[ 1.69866787e+00 1.77098123e-01 -7.69667531e-01 -3.76421375e-01 -1.67592136e+00 9.38605121e-01 8.31400706e-01 1.24727746e+00] [ 1.67976773e+00 -2.06788445e-01 -5.01958731e-01 -3.35544259e-01 3.98731479e-01 8.30769762e-01 1.80702736e-02 -3.56766415e-01] [-8.20576458e-02 1.03230317e+00 -1.64598793e-01 -6.05759806e-01 1.50232704e+00 5.02350554e-02 -6.49381472e-02 3.86991469e-01] [ 2.38280793e-01 1.92750141e-01 2.92683790e-01 1.16700712e+00 -6.25969893e-01 1.24889108e+00 7.92458911e-01 -9.71026016e-01] [ 1.61987899e+00 -8.68915957e-02 4.09040152e-01 1.52954975e+00 -2.59352856e+00 -1.31991316e+00 -1.40994956e+00 -1.23161539e+00] [ 7.10510758e-01 -2.44877886e+00 9.89498569e-01 1.27698448e+00 5.01231463e-01 3.73621936e-01 -1.63901446e-01 1.09278349e+00] [ 4.59724425e-01 -4.38041767e-01 -1.13109660e+00 8.65892366e-02 6.81984016e-01 -5.86936979e-01 -1.14652999e+00 1.87936987e-01] [ 1.39707095e-01 1.26445267e+00 1.00054815e+00 -3.76634902e-01 -3.48176144e-01 -8.91595535e-01 -7.59011211e-01 -6.39411261e-01]]] [[[-4.04680791e-01 9.11461271e-02 3.49790552e-01 -1.17990502e+00 2.81346150e+00 7.24297932e-01 -5.40166152e-01 9.55666680e-01] [-1.12183997e-01 -1.98494438e+00 6.20888170e-01 1.54131653e+00 -1.32243632e+00 5.28643274e-01 -2.60727243e+00 4.56019121e-02] [ 8.34988163e-02 -1.00228244e+00 -1.70884491e+00 -1.64334627e+00 -1.24388313e-02 1.14880300e+00 -6.45903298e-01 4.69127908e-01] [ 7.25519550e-02 2.04572980e-02 -2.00347396e-01 -1.37710117e-01 -5.04425152e-01 1.75964287e-02 7.35432708e-01 -6.05848983e-01] [-3.09297182e-01 -1.11642040e+00 -9.20019831e-01 6.27400512e-01 -2.98703203e-01 -1.06985841e+00 6.56824184e-01 6.97417200e-01] [-8.94288182e-01 6.05450659e-02 1.98477041e-01 1.95490387e-01 -1.33330946e+00 -5.08255918e-01 8.77966798e-01 -1.49189607e-01] [-5.09238068e-01 -2.82534697e-01 -1.52308070e+00 2.41668113e+00 -3.88310489e-02 -6.97644645e-02 -9.70527975e-01 1.21595800e+00] [-4.04441426e-01 1.52802983e+00 7.70333715e-01 -7.65137876e-01 -3.99330887e-01 -3.05113029e-01 8.96503549e-01 -1.36419564e+00]]]]; ov_res: [[[[-2.45627642e-01 -3.29163849e-01 1.18306391e-01 -9.10419226e-01 2.37514830e+00 1.19983113e+00 4.82359618e-01 -2.13582778e+00] [ 1.30910325e+00 3.16469193e-01 1.38631284e+00 -1.09332395e+00 -6.16964638e-01 -4.36262786e-01 6.96347058e-01 1.32975614e+00] [-1.08245826e+00 -5.00273585e-01 -1.33223727e-01 1.13867426e+00 1.86371967e-01 -1.91332197e+00 -1.39799583e+00 1.73154607e-01] [ 3.51554811e-01 1.38820708e-01 -3.24899793e-01 -7.94829190e-01 -8.65267336e-01 7.13508427e-01 5.29061258e-01 4.54519928e-01] [-1.52702534e+00 -8.57184291e-01 -2.80170292e-01 -2.91678965e-01 1.86770439e+00 -1.15016982e-01 -8.31365943e-01 -1.27736151e-01] [-2.96322733e-01 -9.52617347e-01 -1.72908592e+00 -4.16819513e-01 -6.89985931e-01 -5.41126847e-01 -6.73645437e-01 -3.67225885e-01] [ 1.07736158e+00 7.72034049e-01 2.14371338e-01 -4.93276268e-01 -1.34336388e+00 1.31759501e+00 6.63408875e-01 -2.50928462e-01] [ 7.34389305e-01 -4.90486771e-01 9.05674875e-01 -1.57015979e+00 7.29569733e-01 -1.83433843e+00 2.29893589e+00 1.19202626e+00]]] [[[-2.09369212e-01 7.14207709e-01 -1.78161311e+00 -3.73802423e-01 -1.29829133e+00 -1.33978438e+00 -1.99708462e+00 2.86357880e-01] [ 1.51739049e+00 4.89870787e-01 -7.65505899e-03 9.31440666e-02 -1.76917076e+00 -5.50810337e-01 -2.17838496e-01 6.25223339e-01] [-1.24233985e+00 1.63214552e+00 1.06497295e-01 -1.30772024e-01 7.67011583e-01 8.00595522e-01 -1.05048871e+00 1.51405275e+00] [ 1.23551321e+00 -3.87581468e-01 -6.56249166e-01 1.61875620e-01 -3.85464430e-01 -5.41197002e-01 -1.12675500e+00 3.53854448e-01] [ 7.30452418e-01 -5.52734137e-01 1.21168756e+00 1.72040105e-01 -1.70931846e-01 1.33980894e+00 8.64499390e-01 7.81329811e-01] [ 1.02262437e+00 1.10941219e+00 8.48828614e-01 5.32944083e-01 3.27486843e-01 4.99897189e-02 1.43766820e+00 1.05845559e+00] [-1.03562796e+00 -5.29370643e-02 9.09811914e-01 -6.96596444e-01 -1.37070656e+00 8.81399214e-02 -9.37534332e-01 -5.14215589e-01] [-1.16557367e-01 3.27602148e-01 -7.49796271e-01 2.49109411e+00 7.44042873e-01 -2.63213832e-02 -1.83745778e+00 -1.34094167e+00]]] [[[ 1.85354853e+00 -4.02129024e-01 -5.53310156e-01 -1.57435048e+00 1.36664891e+00 -1.08608294e+00 6.65799305e-02 1.27913415e+00] [-9.30072591e-02 1.67238176e+00 3.81906301e-01 -8.11461508e-01 1.13205469e+00 -1.41152561e-01 1.12114465e+00 9.81218934e-01] [ 9.87467021e-02 6.18707299e-01 -3.67830545e-01 -1.82440734e+00 -3.46659631e-01 -7.45980203e-01 8.24786603e-01 1.98747054e-01] [ 9.67205048e-01 -6.39476001e-01 1.32109618e+00 6.29754186e-01 2.49496445e-01 -1.02129757e+00 -1.20233572e+00 -7.67817423e-02] [-1.46116409e-02 -9.47453678e-01 -5.11620641e-01 6.37014449e-01 3.79631966e-01 1.96060747e-01 4.57464546e-01 -1.42989352e-01] [-2.44759191e-02 -3.32540683e-02 -5.59439421e-01 1.29650450e+00 -1.47738591e-01 -3.80859375e-01 -7.86718801e-02 -1.77103624e-01] [ 2.44756699e-01 -1.06976223e+00 -7.88347602e-01 1.81758738e+00 -2.48227660e-02 -4.40375894e-01 5.59682548e-01 8.98269892e-01] [-1.60588264e+00 -8.58222246e-01 -3.96775663e-01 8.24598372e-01 9.92656946e-01 4.93864506e-01 2.11903954e+00 -1.52318627e-01]]] [[[-3.32035631e-01 4.84895736e-01 9.57214594e-01 7.88153946e-01 1.96782899e+00 -4.48941797e-01 7.91104078e-01 1.32320237e+00] [ 1.43711817e+00 8.63778219e-02 -1.05308747e+00 5.34323335e-01 -1.81131053e+00 1.05800815e-01 1.22931075e+00 1.19164169e+00] [-1.03040898e+00 7.12480068e-01 -1.71786940e+00 -1.00930929e+00 -1.26705021e-01 -1.97274017e+00 -1.10547327e-01 1.79537857e+00] [ 5.73977113e-01 -4.34162587e-01 2.46613121e+00 -1.09758413e+00 -3.17723662e-01 -1.27198386e+00 1.28633666e+00 5.80944940e-02] [-1.19393206e+00 -1.90516055e-01 7.00108051e-01 -2.41620079e-01 -1.21095586e+00 4.75500733e-01 -1.39825249e+00 -1.39610314e+00] [-1.46078134e+00 8.49514127e-01 3.03863496e-01 1.32531989e+00 2.92904735e+00 6.51683986e-01 -9.30853307e-01 9.98977244e-01] [ 1.53027618e+00 -2.31608689e-01 -1.18351936e+00 -6.70168251e-02 2.39202833e+00 -1.43815684e+00 8.19264591e-01 -9.77686763e-01] [ 1.39765310e+00 -3.28980970e+00 1.69708240e+00 2.13487625e+00 8.82417738e-01 -1.38473701e+00 1.20804238e+00 5.31890273e-01]]] [[[-8.32657456e-01 -8.87813449e-01 -1.58673465e+00 8.53611231e-02 1.04568981e-01 1.51964247e+00 4.70782995e-01 4.55139339e-01] [ 3.91876072e-01 -1.75574791e+00 9.32997227e-01 -1.08602953e+00 6.69968307e-01 -3.91882479e-01 1.41108644e+00 3.47921461e-01] [-9.73376513e-01 -8.44925523e-01 -1.86572656e-01 9.87475291e-02 4.80190635e-01 1.03350210e+00 2.26694152e-01 -3.28269422e-01] [ 1.82462585e+00 -8.76490865e-03 2.56590657e-02 -3.81139636e-01 -2.59168297e-01 1.67531356e-01 -2.44507268e-02 -1.00587927e-01] [ 7.45640039e-01 3.27525556e-01 3.02969503e+00 -1.00581396e+00 -4.10246491e-01 1.94922388e-01 -1.18798053e+00 -2.88222224e-01] [-2.30572000e-01 -3.91231745e-01 1.56885862e+00 8.04135799e-01 3.91661316e-01 7.60760784e-01 -1.21384192e+00 -1.04120469e+00] [-8.22890997e-01 -1.31538403e+00 -1.19927037e+00 -6.17039025e-01 2.64562100e-01 -1.85332382e+00 -9.02371943e-01 5.02366945e-03] [ 6.85285687e-01 -3.05361301e-01 5.23684442e-01 -4.35041934e-01 -1.11823189e+00 1.35274613e+00 4.55143064e-01 5.83819509e-01]]] [[[-5.42812228e-01 8.31868291e-01 -1.22770607e+00 1.07087338e+00 -1.38532209e+00 5.29790759e-01 -7.60567367e-01 -1.32247806e-01] [-4.62144129e-02 2.15623569e+00 -1.61838031e+00 -1.54395115e+00 7.79917955e-01 1.12517014e-01 1.63905299e+00 -7.75802135e-01] [-6.75728738e-01 3.67268831e-01 -1.56595957e+00 1.73433330e-02 6.33439124e-01 -1.53445199e-01 -6.07332706e-01 -1.17649603e+00] [-1.07678378e+00 4.79242802e-02 -1.49876118e-01 3.11002254e-01 -4.98202369e-02 2.10393667e+00 -2.48593807e-01 6.91652834e-01] [ 1.42783654e+00 2.06161475e+00 -9.78208601e-01 3.96673560e-01 1.16907096e+00 -1.10965006e-01 -3.46907936e-02 3.34997565e-01] [-5.51498592e-01 7.67340899e-01 1.07683301e+00 1.89179286e-01 2.06629455e-01 1.43103850e+00 1.26149940e+00 -6.91981196e-01] [ 1.61159325e+00 -1.00499243e-01 -1.03893733e+00 1.09288132e+00 1.39820158e+00 -2.71559626e-01 -9.88142669e-01 1.51170051e+00] [-2.41601777e+00 2.66416948e-02 -2.10471201e+00 1.03853273e+00 -3.36236469e-02 1.09238303e+00 -2.23155260e+00 1.37645870e-01]]] [[[ 7.51291394e-01 3.19592685e-01 3.96886081e-01 1.29242086e+00 -3.44222158e-01 1.30186215e-01 5.48861027e-01 -2.37374139e+00] [-1.26087596e-03 -2.70474255e-01 2.65982598e-01 -2.11142987e-01 7.42364228e-01 -1.65902808e-01 5.38762033e-01 8.67505848e-01] [ 7.69767538e-03 1.66669977e+00 -1.95310974e+00 2.79179901e-01 1.47741413e+00 -3.20197523e-01 4.40635204e-01 1.57522187e-01] [-1.81295431e+00 2.06535530e+00 8.43777537e-01 5.08935571e-01 1.62802771e-01 1.25702477e+00 1.46404648e+00 1.57186973e+00] [-8.50359082e-01 4.08137053e-01 1.51281357e-01 -9.17396665e-01 1.49106336e+00 -1.19760096e+00 7.44905174e-01 -8.08570147e-01] [-1.36874151e+00 2.73081630e-01 1.59823906e+00 1.06163466e+00 -1.78059554e+00 -7.15261161e-01 -8.87666464e-01 -6.26569867e-01] [-5.16558468e-01 -1.26258707e+00 1.19343984e+00 -4.39633727e-01 -2.44125390e+00 -1.94413745e+00 -4.22959030e-01 -9.84241307e-01] [-1.09813619e+00 -6.61311805e-01 9.31741655e-01 2.69407392e-01 -9.44411218e-01 -1.27619278e+00 1.11282611e+00 6.50701225e-01]]] [[[ 6.58134103e-01 5.02049327e-01 1.39927793e+00 -4.43384737e-01 -1.68183470e+00 1.69087803e+00 6.17217958e-01 1.43823937e-01] [-1.05222535e+00 1.35533535e+00 -5.95707595e-01 3.99724960e-01 2.75812261e-02 -5.74959338e-01 4.91318285e-01 -6.51044726e-01] [-2.91450411e-01 -3.93096745e-01 -5.25047183e-01 2.64463854e+00 -2.64729142e+00 -4.76290733e-01 -1.16355371e+00 2.38453436e+00] [ 1.41838145e+00 2.05237754e-02 -1.83716571e+00 9.52137172e-01 -3.08384240e-01 -8.35067272e-01 5.28201759e-01 -1.80585250e-01] [-9.17435586e-01 1.64698148e+00 -5.03255248e-01 -4.58362579e-01 -5.56416452e-01 5.55269420e-01 -8.68225694e-01 -9.86678779e-01] [ 1.09798491e-01 9.71315801e-02 -3.77364844e-01 -8.03692222e-01 -4.08755383e-03 7.88375795e-01 9.21184242e-01 3.22547704e-01] [ 1.40847012e-01 -1.86527705e+00 1.28265667e+00 -3.98232579e-01 -3.50214332e-01 2.60639787e-01 -1.63743079e-01 -1.14161384e+00] [-1.34177113e+00 -1.52911639e+00 3.34591508e-01 -9.22938406e-01 -2.12761596e-01 -1.18023300e+00 -1.24874771e+00 -2.13902497e+00]]] [[[ 1.69866788e+00 1.77098125e-01 -7.69667506e-01 -3.76421362e-01 -1.67592132e+00 9.38605130e-01 8.31400692e-01 1.24727750e+00] [ 1.67976773e+00 -2.06788450e-01 -5.01958728e-01 -3.35544258e-01 3.98731470e-01 8.30769777e-01 1.80702731e-02 -3.56766403e-01] [-8.20576474e-02 1.03230321e+00 -1.64598793e-01 -6.05759799e-01 1.50232708e+00 5.02350554e-02 -6.49381503e-02 3.86991471e-01] [ 2.38280788e-01 1.92750141e-01 2.92683780e-01 1.16700709e+00 -6.25969887e-01 1.24889112e+00 7.92458892e-01 -9.71026003e-01] [ 1.61987901e+00 -8.68915990e-02 4.09040153e-01 1.52954972e+00 -2.59352851e+00 -1.31991315e+00 -1.40994954e+00 -1.23161542e+00] [ 7.10510731e-01 -2.44877887e+00 9.89498556e-01 1.27698445e+00 5.01231492e-01 3.73621941e-01 -1.63901448e-01 1.09278345e+00] [ 4.59724426e-01 -4.38041776e-01 -1.13109660e+00 8.65892395e-02 6.81984007e-01 -5.86936951e-01 -1.14653003e+00 1.87936991e-01] [ 1.39707088e-01 1.26445270e+00 1.00054812e+00 -3.76634896e-01 -3.48176152e-01 -8.91595542e-01 -7.59011209e-01 -6.39411271e-01]]] [[[-4.04680789e-01 9.11461264e-02 3.49790543e-01 -1.17990506e+00 2.81346154e+00 7.24297941e-01 -5.40166140e-01 9.55666661e-01] [-1.12183996e-01 -1.98494434e+00 6.20888174e-01 1.54131651e+00 -1.32243633e+00 5.28643250e-01 -2.60727239e+00 4.56019118e-02] [ 8.34988132e-02 -1.00228250e+00 -1.70884490e+00 -1.64334631e+00 -1.24388309e-02 1.14880300e+00 -6.45903289e-01 4.69127893e-01] [ 7.25519583e-02 2.04572976e-02 -2.00347394e-01 -1.37710124e-01 -5.04425168e-01 1.75964292e-02 7.35432684e-01 -6.05848968e-01] [-3.09297174e-01 -1.11642039e+00 -9.20019805e-01 6.27400517e-01 -2.98703194e-01 -1.06985843e+00 6.56824172e-01 6.97417200e-01] [-8.94288182e-01 6.05450645e-02 1.98477045e-01 1.95490390e-01 -1.33330941e+00 -5.08255899e-01 8.77966821e-01 -1.49189606e-01] [-5.09238064e-01 -2.82534689e-01 -1.52308071e+00 2.41668105e+00 -3.88310477e-02 -6.97644651e-02 -9.70527947e-01 1.21595800e+00] [-4.04441416e-01 1.52802980e+00 7.70333707e-01 -7.65137851e-01 -3.99330884e-01 -3.05113018e-01 8.96503568e-01 -1.36419559e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[0.991854]]]]; ov_res: [[[[0.99185401]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:-4 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5144.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=-4]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:-3 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5146.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=-3]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 224; ov_res: 224 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 224; ov_res: 224 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:-2 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5148.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=-2]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6847492 () -> (dynamic[...])' with friendly_name 'Concat_6847492': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6849550 () -> (dynamic[...])' with friendly_name 'Concat_6849550': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failfw_re: 10; ov_res: 10 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:-1 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5150.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=-1]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:0 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5152.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=0]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 512; ov_res: 512 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:1 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5154.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=1]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 3; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 224; ov_res: 224 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 224; ov_res: 224 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:2 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5156.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=2]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 10; ov_res: 10 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 8; ov_res: 8 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_listunpack.py::TestListUnpack::test_listconstruct_getitem_listunpack[ ie_device:CPU - precision:FP32 - idx:3 ] | 0.24 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_listunpack.___torch_mangle_5158.prim_listunpack, %in1.1 : Tensor, %in2.1 : Tensor, %in3.1 : Tensor, %in4.1 : Tensor): %self.idx : int = prim::Constant[value=3]() %items.1 : int[][] = prim::ListConstruct() %7 : int[] = aten::size(%in1.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:29 %8 : int[][] = aten::append(%items.1, %7) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:73:16 %9 : int[] = aten::size(%in2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:29 %10 : int[][] = aten::append(%items.1, %9) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:74:16 %11 : int[] = aten::size(%in3.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:29 %12 : int[][] = aten::append(%items.1, %11) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:75:16 %13 : int[] = aten::size(%in4.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:29 %14 : int[][] = aten::append(%items.1, %13) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:76:16 %getitem_0.1 : int[] = aten::__getitem__(%items.1, %self.idx) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_listunpack.py:77:28 %a.1 : int, %b.1 : int, %c.1 : int, %d.1 : int = prim::ListUnpack(%getitem_0.1) %20 : (int, int, int, int) = prim::TupleConstruct(%a.1, %b.1, %c.1, %d.1) return (%20) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5159.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-0.14406025 0.8109343 -0.4032445 0.0694655 -0.6433752 0.6880602 0.59370816 -0.1207324 -1.2506006 0.6443521 ]]; ov_res: [[-0.14406025 0.8109343 -0.4032445 0.0694655 -0.6433752 0.6880602 0.59370816 -0.1207324 -1.2506006 0.6443521 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5161.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 1.3525857 0.6900942 1.0347186 -0.6155348 0.5847593 -0.8695658 -0.56022555 -1.2715751 0.99243695 0.13239633]]; ov_res: [[ 1.3525857 0.6900942 1.0347186 -0.6155348 0.5847593 -0.8695658 -0.56022555 -1.2715751 0.99243695 0.13239633]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5163.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 0.29330236 -1.8719321 1.250778 -0.9002679 -0.2542306 -0.93021446 2.176217 -0.33254528 -0.33299264 -0.95456344]]; ov_res: [[ 0.29330236 -1.8719321 1.250778 -0.9002679 -0.2542306 -0.93021446 2.176217 -0.33254528 -0.33299264 -0.95456344]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5165.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]; ov_res: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5167.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]; ov_res: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5169.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]]; ov_res: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5171.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 0. 0. 0. 0.20581497 -0.13562797 -2.2459924 0.22016257 -0.8186167 -1.5247488 0. ]]; ov_res: [[ 0. 0. 0. 0.20581497 -0.13562797 -2.2459924 0.22016257 -0.8186167 -1.5247488 0. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5173.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 1. -0.40938836 1. 1.6661388 1.4808108 0.84983176 -1.4542159 -0.48848966 1. 0.15306942]]; ov_res: [[ 1. -0.40938836 1. 1.6661388 1.4808108 0.84983176 -1.4542159 -0.48848966 1. 0.15306942]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5175.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-1.2024939 0.00226903 -1. -1. -0.3912889 0.5496873 -1. 0.561585 -1. -1. ]]; ov_res: [[-1.2024939 0.00226903 -1. -1. -0.3912889 0.5496873 -1. 0.561585 -1. -1. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:zeros - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5177.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 1.6243879 0.1837359 -1.6283046 0.9257092 1.4242377 -0.79899424 -0.4400513 -0.1834438 1.8278509 -0.43185902]]; ov_res: [[ 1.6243879 0.1837359 -1.6283046 0.9257092 1.4242377 -0.79899424 -0.4400513 -0.1834438 1.8278509 -0.43185902]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:zeros - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5179.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-1.7397819 1.1490154 1.5395066 -1.8391877 -0.7493073 -1.0603964 -0.34350327 -0.13933049 -0.32751372 -1.4406532 ]]; ov_res: [[-1.7397819 1.1490154 1.5395066 -1.8391877 -0.7493073 -1.0603964 -0.34350327 -0.13933049 -0.32751372 -1.4406532 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:zeros - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5181.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 1.0418688 0.15350871 0.05008386 0.24163963 -0.06122663 0.27818647 0.01007866 1.5071068 0.09702311 -0.01527343]]; ov_res: [[ 1.0418688 0.15350871 0.05008386 0.24163963 -0.06122663 0.27818647 0.01007866 1.5071068 0.09702311 -0.01527343]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:ones - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5183.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]; ov_res: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:ones - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5185.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]; ov_res: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:ones - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5187.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]]; ov_res: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:random - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5189.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 0. -0.1213816 0.55713195 0. 0. 0.49461478 1.0542898 0.7626093 0. 0. ]]; ov_res: [[ 0. -0.1213816 0.55713195 0. 0. 0.49461478 1.0542898 0.7626093 0. 0. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:random - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5191.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[ 1.2153133 1.3160398 1.1285734 -0.6015922 0.24224405 -0.97650015 0.64650816 1. 1. 1. ]]; ov_res: [[ 1.2153133 1.3160398 1.1285734 -0.6015922 0.24224405 -0.97650015 0.64650816 1. 1. 1. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:True - mask_dtype:<class 'bool'> - mask_fill:random - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5193.aten_masked_fill_, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill_(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:35:23 return (%4) fw_re: [[-1.0000000e+00 -1.0000000e+00 1.6487005e+00 2.3901570e+00 -1.0000000e+00 6.2060964e-01 -1.0000000e+00 4.9527478e-01 -3.0527051e-04 8.1427723e-01]]; ov_res: [[-1.0000000e+00 -1.0000000e+00 1.6487005e+00 2.3901570e+00 -1.0000000e+00 6.2060964e-01 -1.0000000e+00 4.9527478e-01 -3.0527051e-04 8.1427723e-01]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5194.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.69233996 0.97062576 -0.64711136 1.1470989 -0.00319868 -0.49326068 1.5897552 -0.8614529 -0.29765123 -0.3719139 ]]; ov_res: [[ 0.69233996 0.97062576 -0.64711136 1.1470989 -0.00319868 -0.49326068 1.5897552 -0.8614529 -0.29765123 -0.3719139 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5196.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.8422102 -0.7166524 1.7411183 1.4288961 -0.52423435 -2.3542922 -0.94151056 0.49172223 -1.5408214 0.7936472 ]]; ov_res: [[ 0.8422102 -0.7166524 1.7411183 1.4288961 -0.52423435 -2.3542922 -0.94151056 0.49172223 -1.5408214 0.7936472 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5198.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.23896167 0.628503 -1.4383811 -0.6423127 -1.0957981 -2.0675151 -0.4247243 0.8206563 -0.17866205 -0.3599194 ]]; ov_res: [[ 0.23896167 0.628503 -1.4383811 -0.6423127 -1.0957981 -2.0675151 -0.4247243 0.8206563 -0.17866205 -0.3599194 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5200.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]; ov_res: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5202.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]; ov_res: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5204.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]]; ov_res: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5206.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-0.13507597 -0.25679067 0.39954183 0. -1.2897344 0. 0. -0.22437383 -0.94456017 0. ]]; ov_res: [[-0.13507597 -0.25679067 0.39954183 0. -1.2897344 0. 0. -0.22437383 -0.94456017 0. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5208.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 1. 1. -0.69278276 -0.85621077 -0.15256497 -0.12042563 -0.75345707 0.5072333 1. 1. ]]; ov_res: [[ 1. 1. -0.69278276 -0.85621077 -0.15256497 -0.12042563 -0.75345707 0.5072333 1. 1. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:random - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5210.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-0.5928306 -1.0172781 -0.45207387 0.8321809 -1. 1.0957458 -1. -1. -1.6615019 -1. ]]; ov_res: [[-0.5928306 -1.0172781 -0.45207387 0.8321809 -1. 1.0957458 -1. -1. -1.6615019 -1. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:zeros - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5212.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.84154 0.6468004 -0.54106027 0.64129716 -1.9196135 0.98148215 0.11973006 0.11198533 -0.26944086 -0.5041878 ]]; ov_res: [[ 0.84154 0.6468004 -0.54106027 0.64129716 -1.9196135 0.98148215 0.11973006 0.11198533 -0.26944086 -0.5041878 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:zeros - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5214.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-2.1697197 -0.25209412 -0.08780307 1.339164 -1.9388283 -0.94114405 -0.97482485 -0.39556617 -1.0659348 0.52396166]]; ov_res: [[-2.1697197 -0.25209412 -0.08780307 1.339164 -1.9388283 -0.94114405 -0.97482485 -0.39556617 -1.0659348 0.52396166]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:zeros - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5216.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.35764587 -0.16514422 1.5012441 -1.793895 -1.249402 -0.55804414 0.5265367 1.8669045 -0.4684698 -0.47168452]]; ov_res: [[ 0.35764587 -0.16514422 1.5012441 -1.793895 -1.249402 -0.55804414 0.5265367 1.8669045 -0.4684698 -0.47168452]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:ones - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5218.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]; ov_res: [[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:ones - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5220.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]]; ov_res: [[1. 1. 1. 1. 1. 1. 1. 1. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:ones - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5222.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]]; ov_res: [[-1. -1. -1. -1. -1. -1. -1. -1. -1. -1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:random - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5224.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=0.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-0.79449266 1.1986614 0. 0. 0. -1.0264828 0. 0.2979191 -1.4174436 -1.6697022 ]]; ov_res: [[-0.79449266 1.1986614 0. 0. 0. -1.0264828 0. 0.2979191 -1.4174436 -1.6697022 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:random - value:1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5226.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[-0.5218648 0.33053893 1. -1.6632289 -1.5903193 1. 1. -0.16750887 1. 0.35914317]]; ov_res: [[-0.5218648 0.33053893 1. -1.6632289 -1.5903193 1. 1. -0.16750887 1. 0.35914317]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_masked_fill.py::TestMaskedFill::test_masked_fill[ ie_device:CPU - precision:FP32 - inplace:False - mask_dtype:<class 'bool'> - mask_fill:random - value:-1.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_masked_fill.___torch_mangle_5228.aten_masked_fill, %x.1 : Tensor, %mask.1 : Tensor): %self.value : float = prim::Constant[value=-1.]() %4 : Tensor = aten::masked_fill(%x.1, %mask.1, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_masked_fill.py:27:23 return (%4) fw_re: [[ 0.50061345 -1. -1.4748032 -1. -0.22157034 2.1871836 -0.31513873 -1. -1. -2.0563762 ]]; ov_res: [[ 0.50061345 -1. -1.4748032 -1. -0.22157034 2.1871836 -0.31513873 -1. -1. -2.0563762 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:min - axes:None - keep_dims:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5229.aten_min_max, %x.1 : Tensor): %2 : Tensor = aten::min(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:28:23 return (%2) fw_re: -3.0406761169433594; ov_res: -3.0406761169433594 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:min - axes:1 - keep_dims:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5230.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=0]() %self.axes : int = prim::Constant[value=1]() %4 : Tensor, %5 : Tensor = aten::min(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[-0.7423569 -1.77787 -0.76274484 -0.46802184 -1.8255299 -0.06480574 -2.2238002 -0.18589538 -1.018509 -1.1444001 ] [-1.085384 -0.42892298 -0.62059927 -1.4757586 -0.25288206 -0.06613322 -0.8425839 -0.9971594 -0.73247445 -1.0915611 ] [-0.51593643 -1.8697422 -1.056149 -0.55491316 -2.276371 -0.8969867 -0.997742 -0.93602896 -0.97330093 -1.2435875 ] [-1.6874094 0.22630565 -0.7850089 -0.6420992 -0.21034724 -0.6066769 -0.0386085 -0.15011911 -1.07987 -0.3652378 ] [-0.5537793 -0.0887104 -0.3337089 -2.0193493 -0.9897633 -0.03784892 -1.5305123 -0.8562131 -0.72696143 -0.43696785] [ 0.00584361 -0.38543263 -1.6833887 -1.9606318 -2.707503 -0.14370193 -0.01768273 -0.8814114 -1.779414 -1.2644968 ] [-0.09104536 -1.7165796 0.63991255 0.6122143 -1.2939413 0.02129135 -1.2990098 -2.0096126 -0.8873795 -0.39387006] [-0.97500306 -2.1637294 -1.7805766 0.87402165 -0.370087 -0.35826007 -1.1437635 -1.4121773 -0.61812085 -2.732775 ] [-0.6693555 -1.1357557 -0.70336944 -1.2040519 -1.008382 -2.451088 -1.8778447 -0.13703455 -1.1687598 -2.8011534 ] [-0.80495024 -2.2741473 -0.44260606 0.4402206 0.23612806 -0.37154728 -1.2146937 -0.964274 -0.9745447 -0.65720165]]]; ov_res: [[[-0.7423569 -1.77787 -0.76274484 -0.46802184 -1.8255299 -0.06480574 -2.2238002 -0.18589538 -1.018509 -1.1444001 ] [-1.085384 -0.42892298 -0.62059927 -1.4757586 -0.25288206 -0.06613322 -0.8425839 -0.9971594 -0.73247445 -1.0915611 ] [-0.51593643 -1.8697422 -1.056149 -0.55491316 -2.276371 -0.8969867 -0.997742 -0.93602896 -0.97330093 -1.2435875 ] [-1.6874094 0.22630565 -0.7850089 -0.6420992 -0.21034724 -0.6066769 -0.0386085 -0.15011911 -1.07987 -0.3652378 ] [-0.5537793 -0.0887104 -0.3337089 -2.0193493 -0.9897633 -0.03784892 -1.5305123 -0.8562131 -0.72696143 -0.43696785] [ 0.00584361 -0.38543263 -1.6833887 -1.9606318 -2.707503 -0.14370193 -0.01768273 -0.8814114 -1.779414 -1.2644968 ] [-0.09104536 -1.7165796 0.63991255 0.6122143 -1.2939413 0.02129135 -1.2990098 -2.0096126 -0.8873795 -0.39387006] [-0.97500306 -2.1637294 -1.7805766 0.87402165 -0.370087 -0.35826007 -1.1437635 -1.4121773 -0.61812085 -2.732775 ] [-0.6693555 -1.1357557 -0.70336944 -1.2040519 -1.008382 -2.451088 -1.8778447 -0.13703455 -1.1687598 -2.8011534 ] [-0.80495024 -2.2741473 -0.44260606 0.4402206 0.23612806 -0.37154728 -1.2146937 -0.964274 -0.9745447 -0.65720165]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[2 1 0 1 2 1 0 2 2 0] [1 0 1 1 0 2 2 2 2 0] [0 0 1 2 1 2 2 1 0 0] [0 1 0 0 0 2 0 0 2 0] [0 2 1 1 0 2 0 1 0 1] [1 2 2 2 2 1 1 0 0 0] [1 0 1 0 2 0 2 0 0 2] [0 1 2 2 2 0 0 2 2 2] [1 0 1 2 2 1 2 2 0 2] [2 0 1 1 1 2 0 1 2 0]]]; ov_res: [[[2 1 0 1 2 1 0 2 2 0] [1 0 1 1 0 2 2 2 2 0] [0 0 1 2 1 2 2 1 0 0] [0 1 0 0 0 2 0 0 2 0] [0 2 1 1 0 2 0 1 0 1] [1 2 2 2 2 1 1 0 0 0] [1 0 1 0 2 0 2 0 0 2] [0 1 2 2 2 0 0 2 2 2] [1 0 1 2 2 1 2 2 0 2] [2 0 1 1 1 2 0 1 2 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:min - axes:1 - keep_dims:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5232.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=1]() %self.axes : int = prim::Constant[value=1]() %4 : Tensor, %5 : Tensor = aten::min(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) ed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6851608 () -> (dynamic[...])' with friendly_name 'Concat_6851608': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6853666 () -> (dynamic[...])' with friendly_name 'Concat_6853666': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6855724 () -> (dynamic[...])' with friendly_name 'Concat_6855724': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6857782 () -> (dynamic[...])' with friendly_name 'Concat_6857782': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6859840 () -> (dynamic[...])' with friendly_name 'Concat_6859840': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'get_input_size() >= 1' failed at src/core/src/op/concat.cpp:33: While validating node 'v0::Concat Concat_6861898 () -> (dynamic[...])' with friendly_name 'Concat_6861898': At least one argument required. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API fw_re: [[[[-0.29844978 -0.8843821 -1.7865462 -1.0906185 -0.4608394 -0.76993096 -0.1220537 -1.726557 -1.7823101 -0.9943917 ] [-0.2991807 0.42760217 -2.3050976 -0.38272238 -1.1174713 -1.3386827 -0.55806494 -0.83844805 -2.4196527 -1.4326291 ] [ 0.3308991 -0.710504 -0.8574063 -1.0177592 -0.072662 -0.7500252 -0.2643266 -1.2914367 -0.47538713 -0.6847732 ] [-0.57459044 -0.35579568 -1.3122288 -0.6637138 -1.7081133 0.65951806 -0.728519 -0.73597604 -1.6960002 -0.64186096] [-1.0547148 -0.00846962 -0.22296844 -1.7098881 -1.9768515 -1.5452507 -0.348847 -0.48890796 -0.24943979 0.7115979 ] [-0.8515192 -0.45016587 0.8604937 -0.282929 -3.3560245 -2.0008895 -0.6692392 -0.6035586 -2.0691316 -1.367427 ] [-1.3565197 -0.7452039 -0.92381185 -0.84152573 -1.7981396 -0.53896695 -0.06256934 -2.1878319 -1.8559241 0.5964465 ] [-0.93833554 0.65064216 -0.5309055 -1.1150976 -0.7424978 -1.8924723 -1.5136029 0.22215664 0.3247934 0.38409612] [-2.3307345 -0.5651044 -2.2786999 -1.378814 -1.0248437 -1.2104664 -2.5719535 -0.98822737 -0.5938065 -0.6094458 ] [-1.3395756 0.26961118 -0.46028468 -0.8355574 -0.93237066 -1.1600465 -1.0821413 -0.23095196 -1.0864817 -0.47457853]]]]; ov_res: [[[[-0.29844978 -0.8843821 -1.7865462 -1.0906185 -0.4608394 -0.76993096 -0.1220537 -1.726557 -1.7823101 -0.9943917 ] [-0.2991807 0.42760217 -2.3050976 -0.38272238 -1.1174713 -1.3386827 -0.55806494 -0.83844805 -2.4196527 -1.4326291 ] [ 0.3308991 -0.710504 -0.8574063 -1.0177592 -0.072662 -0.7500252 -0.2643266 -1.2914367 -0.47538713 -0.6847732 ] [-0.57459044 -0.35579568 -1.3122288 -0.6637138 -1.7081133 0.65951806 -0.728519 -0.73597604 -1.6960002 -0.64186096] [-1.0547148 -0.00846962 -0.22296844 -1.7098881 -1.9768515 -1.5452507 -0.348847 -0.48890796 -0.24943979 0.7115979 ] [-0.8515192 -0.45016587 0.8604937 -0.282929 -3.3560245 -2.0008895 -0.6692392 -0.6035586 -2.0691316 -1.367427 ] [-1.3565197 -0.7452039 -0.92381185 -0.84152573 -1.7981396 -0.53896695 -0.06256934 -2.1878319 -1.8559241 0.5964465 ] [-0.93833554 0.65064216 -0.5309055 -1.1150976 -0.7424978 -1.8924723 -1.5136029 0.22215664 0.3247934 0.38409612] [-2.3307345 -0.5651044 -2.2786999 -1.378814 -1.0248437 -1.2104664 -2.5719535 -0.98822737 -0.5938065 -0.6094458 ] [-1.3395756 0.26961118 -0.46028468 -0.8355574 -0.93237066 -1.1600465 -1.0821413 -0.23095196 -1.0864817 -0.47457853]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[0 0 2 0 1 1 0 0 0 0] [2 1 1 1 0 1 1 2 0 2] [2 2 1 1 0 2 1 0 1 2] [0 2 1 0 2 2 0 1 0 2] [1 2 0 2 0 2 2 1 2 0] [0 1 1 1 1 0 1 1 2 1] [2 1 1 2 2 1 0 0 1 2] [2 1 2 2 0 0 1 1 1 0] [1 2 2 1 1 2 1 0 2 2] [1 2 0 2 2 2 1 0 2 1]]]]; ov_res: [[[[0 0 2 0 1 1 0 0 0 0] [2 1 1 1 0 1 1 2 0 2] [2 2 1 1 0 2 1 0 1 2] [0 2 1 0 2 2 0 1 0 2] [1 2 0 2 0 2 2 1 2 0] [0 1 1 1 1 0 1 1 2 1] [2 1 1 2 2 1 0 0 1 2] [2 1 2 2 0 0 1 1 1 0] [1 2 2 1 1 2 1 0 2 2] [1 2 0 2 2 2 1 0 2 1]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:min - axes:-1 - keep_dims:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5234.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=0]() %self.axes : int = prim::Constant[value=-1]() %4 : Tensor, %5 : Tensor = aten::min(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[-0.9098722 -1.1840612 -1.6050318 -1.5607437 -1.2380223 -0.76350486 -1.0845602 -1.8221415 -1.6995057 -1.6464171 ] [-1.3251878 -1.8789217 -0.76642793 -1.7081424 -2.1198485 -1.4927024 -1.6330302 -1.3744179 -1.515565 -1.231363 ] [-1.418786 -1.090459 -1.041695 -1.7337053 -2.186909 -1.1787997 -1.4737065 -0.5467786 -1.1227006 -1.48003 ]]]; ov_res: [[[-0.9098722 -1.1840612 -1.6050318 -1.5607437 -1.2380223 -0.76350486 -1.0845602 -1.8221415 -1.6995057 -1.6464171 ] [-1.3251878 -1.8789217 -0.76642793 -1.7081424 -2.1198485 -1.4927024 -1.6330302 -1.3744179 -1.515565 -1.231363 ] [-1.418786 -1.090459 -1.041695 -1.7337053 -2.186909 -1.1787997 -1.4737065 -0.5467786 -1.1227006 -1.48003 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[8 7 3 7 8 9 6 6 2 4] [8 5 7 8 7 5 2 1 4 6] [2 5 9 2 2 4 2 3 6 0]]]; ov_res: [[[8 7 3 7 8 9 6 6 2 4] [8 5 7 8 7 5 2 1 4 6] [2 5 9 2 2 4 2 3 6 0]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:min - axes:-1 - keep_dims:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5236.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=1]() %self.axes : int = prim::Constant[value=-1]() %4 : Tensor, %5 : Tensor = aten::min(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[[-1.9820544 ] [-1.9312581 ] [-1.5092715 ] [-0.8499087 ] [-3.2891746 ] [-2.3336785 ] [-1.3246617 ] [-1.0706518 ] [-1.5818152 ] [-1.3941523 ]] [[-0.62509686] [-1.9945856 ] [-2.1966743 ] [-1.0952215 ] [-2.1037416 ] [-1.4352088 ] [-2.8547533 ] [-1.6958628 ] [-0.65905565] [-2.9853039 ]] [[-0.36896494] [-0.8992543 ] [-0.8415009 ] [-1.2166823 ] [-1.5177317 ] [-1.4365498 ] [-1.9943204 ] [-0.62037677] [-1.776337 ] [-0.9634128 ]]]]; ov_res: [[[[-1.9820544 ] [-1.9312581 ] [-1.5092715 ] [-0.8499087 ] [-3.2891746 ] [-2.3336785 ] [-1.3246617 ] [-1.0706518 ] [-1.5818152 ] [-1.3941523 ]] [[-0.62509686] [-1.9945856 ] [-2.1966743 ] [-1.0952215 ] [-2.1037416 ] [-1.4352088 ] [-2.8547533 ] [-1.6958628 ] [-0.65905565] [-2.9853039 ]] [[-0.36896494] [-0.8992543 ] [-0.8415009 ] [-1.2166823 ] [-1.5177317 ] [-1.4365498 ] [-1.9943204 ] [-0.62037677] [-1.776337 ] [-0.9634128 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[9] [6] [9] [3] [8] [8] [3] [0] [1] [0]] [[9] [0] [8] [9] [2] [0] [4] [3] [8] [2]] [[4] [1] [1] [9] [5] [1] [0] [6] [4] [8]]]]; ov_res: [[[[9] [6] [9] [3] [8] [8] [3] [0] [1] [0]] [[9] [0] [8] [9] [2] [0] [4] [3] [8] [2]] [[4] [1] [1] [9] [5] [1] [0] [6] [4] [8]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:max - axes:None - keep_dims:None ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5238.aten_min_max, %x.1 : Tensor): %2 : Tensor = aten::max(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:28:23 return (%2) fw_re: 2.512571334838867; ov_res: 2.512571334838867 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:max - axes:1 - keep_dims:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5240.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=0]() %self.axes : int = prim::Constant[value=1]() %4 : Tensor, %5 : Tensor = aten::max(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[-0.34047806 0.18201485 1.866523 0.34920734 0.7853315 1.2168177 0.5921109 3.0395198 1.4705051 0.8082995 ] [ 1.6903192 1.1203822 1.1852956 1.3097323 0.9082322 0.98221856 0.25790703 2.2182543 0.58945346 0.8342965 ] [ 0.860183 -0.8237237 -0.11693794 1.3598489 1.3040257 0.33353314 0.25737077 0.1649482 0.93189317 -0.08528721] [ 1.8315524 1.4723258 1.4325104 0.63087785 0.9736269 1.2983644 1.4012349 0.8322789 2.3303313 1.2570696 ] [ 0.76035833 -0.21951222 0.15968725 1.3305548 1.0274798 -0.55149114 1.0326784 1.7014174 0.9203922 1.0489179 ] [ 2.0933452 0.8274614 -0.05383935 0.75519484 1.0936028 1.7958621 0.09450777 1.3806499 -0.23575999 1.0023804 ] [ 1.5190202 0.82023925 -0.10333627 0.66851777 0.66810334 0.84889627 0.93303555 0.3318228 1.7139839 0.0855725 ] [ 0.37705457 1.6670722 0.96996677 0.5013285 -0.11604399 1.5900593 1.0356551 0.9232869 1.9794356 0.05130078] [ 1.2645633 1.9565984 1.3105242 1.0886178 1.242537 0.25758502 2.0325499 0.43176273 -0.48024982 0.5851792 ] [-0.44604656 1.5352457 0.79151136 -0.7328946 0.11421508 1.5868797 0.8653789 2.0843003 0.25383428 1.3158398 ]]]; ov_res: [[[-0.34047806 0.18201485 1.866523 0.34920734 0.7853315 1.2168177 0.5921109 3.0395198 1.4705051 0.8082995 ] [ 1.6903192 1.1203822 1.1852956 1.3097323 0.9082322 0.98221856 0.25790703 2.2182543 0.58945346 0.8342965 ] [ 0.860183 -0.8237237 -0.11693794 1.3598489 1.3040257 0.33353314 0.25737077 0.1649482 0.93189317 -0.08528721] [ 1.8315524 1.4723258 1.4325104 0.63087785 0.9736269 1.2983644 1.4012349 0.8322789 2.3303313 1.2570696 ] [ 0.76035833 -0.21951222 0.15968725 1.3305548 1.0274798 -0.55149114 1.0326784 1.7014174 0.9203922 1.0489179 ] [ 2.0933452 0.8274614 -0.05383935 0.75519484 1.0936028 1.7958621 0.09450777 1.3806499 -0.23575999 1.0023804 ] [ 1.5190202 0.82023925 -0.10333627 0.66851777 0.66810334 0.84889627 0.93303555 0.3318228 1.7139839 0.0855725 ] [ 0.37705457 1.6670722 0.96996677 0.5013285 -0.11604399 1.5900593 1.0356551 0.9232869 1.9794356 0.05130078] [ 1.2645633 1.9565984 1.3105242 1.0886178 1.242537 0.25758502 2.0325499 0.43176273 -0.48024982 0.5851792 ] [-0.44604656 1.5352457 0.79151136 -0.7328946 0.11421508 1.5868797 0.8653789 2.0843003 0.25383428 1.3158398 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[1 2 2 1 2 2 2 2 2 0] [0 0 0 0 1 0 1 0 2 0] [2 1 1 0 1 2 1 2 1 1] [2 0 2 0 1 2 1 0 2 1] [0 2 0 0 1 0 2 1 0 1] [2 2 2 0 0 0 1 2 0 2] [0 1 0 2 1 1 0 1 1 2] [2 1 0 1 2 0 2 2 0 0] [2 1 2 0 0 0 0 2 1 2] [1 0 0 1 1 2 1 2 1 2]]]; ov_res: [[[1 2 2 1 2 2 2 2 2 0] [0 0 0 0 1 0 1 0 2 0] [2 1 1 0 1 2 1 2 1 1] [2 0 2 0 1 2 1 0 2 1] [0 2 0 0 1 0 2 1 0 1] [2 2 2 0 0 0 1 2 0 2] [0 1 0 2 1 1 0 1 1 2] [2 1 0 1 2 0 2 2 0 0] [2 1 2 0 0 0 0 2 1 2] [1 0 0 1 1 2 1 2 1 2]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:max - axes:1 - keep_dims:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5242.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=1]() %self.axes : int = prim::Constant[value=1]() %4 : Tensor, %5 : Tensor = aten::max(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[[ 1.395328 0.6436694 0.4882743 0.9014166 0.1072718 -0.2235669 1.0284787 2.208726 0.7152703 0.5798029 ] [ 0.03898196 0.69149774 1.0895764 1.1773549 0.86546624 0.8622565 -0.04554618 0.9942447 0.7099738 2.332613 ] [ 2.8369272 1.2322748 1.7022661 0.94195545 1.814994 0.9119123 0.4725625 0.24469194 0.4034035 0.131492 ] [-0.08391801 0.9872851 2.3945568 0.865783 0.8933622 2.3360665 2.7333434 1.4722697 1.3249811 -0.08229084] [ 1.4248956 1.5703173 1.1630336 0.6217973 1.3316207 0.882269 1.4216709 1.330372 -0.20914884 1.4210073 ] [ 1.4208289 0.55191046 1.3857993 0.13261418 0.7879588 0.28103834 0.5178324 -0.6211334 -0.1103908 0.8380002 ] [ 1.1326501 1.0818206 -0.03246906 -0.17391822 0.9478829 0.5763633 0.1273148 1.4194356 0.838832 0.6894567 ] [ 0.1906269 2.0757618 0.46610704 0.7990927 1.7287471 0.9947167 1.2417111 2.564059 0.9351813 1.3156329 ] [ 0.3286119 0.67551637 1.274484 0.7989829 1.715697 1.2619 -0.25428253 -1.1098441 1.4615232 2.0408072 ] [ 1.4004501 1.40874 1.0823175 1.4748031 0.40536198 4.123074 0.76160073 1.0822035 0.50825655 0.36531574]]]]; ov_res: [[[[ 1.395328 0.6436694 0.4882743 0.9014166 0.1072718 -0.2235669 1.0284787 2.208726 0.7152703 0.5798029 ] [ 0.03898196 0.69149774 1.0895764 1.1773549 0.86546624 0.8622565 -0.04554618 0.9942447 0.7099738 2.332613 ] [ 2.8369272 1.2322748 1.7022661 0.94195545 1.814994 0.9119123 0.4725625 0.24469194 0.4034035 0.131492 ] [-0.08391801 0.9872851 2.3945568 0.865783 0.8933622 2.3360665 2.7333434 1.4722697 1.3249811 -0.08229084] [ 1.4248956 1.5703173 1.1630336 0.6217973 1.3316207 0.882269 1.4216709 1.330372 -0.20914884 1.4210073 ] [ 1.4208289 0.55191046 1.3857993 0.13261418 0.7879588 0.28103834 0.5178324 -0.6211334 -0.1103908 0.8380002 ] [ 1.1326501 1.0818206 -0.03246906 -0.17391822 0.9478829 0.5763633 0.1273148 1.4194356 0.838832 0.6894567 ] [ 0.1906269 2.0757618 0.46610704 0.7990927 1.7287471 0.9947167 1.2417111 2.564059 0.9351813 1.3156329 ] [ 0.3286119 0.67551637 1.274484 0.7989829 1.715697 1.2619 -0.25428253 -1.1098441 1.4615232 2.0408072 ] [ 1.4004501 1.40874 1.0823175 1.4748031 0.40536198 4.123074 0.76160073 1.0822035 0.50825655 0.36531574]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[0 0 2 0 0 2 0 0 0 2] [1 1 1 2 0 0 1 0 1 0] [2 2 2 2 2 1 2 1 0 2] [0 1 2 2 2 2 2 2 2 1] [0 0 1 2 0 0 1 1 2 0] [1 0 1 0 1 2 1 1 1 2] [0 1 2 1 2 1 0 1 2 1] [1 0 1 1 2 2 0 2 0 2] [1 2 2 2 0 0 1 1 2 1] [2 2 0 0 2 1 1 2 2 2]]]]; ov_res: [[[[0 0 2 0 0 2 0 0 0 2] [1 1 1 2 0 0 1 0 1 0] [2 2 2 2 2 1 2 1 0 2] [0 1 2 2 2 2 2 2 2 1] [0 0 1 2 0 0 1 1 2 0] [1 0 1 0 1 2 1 1 1 2] [0 1 2 1 2 1 0 1 2 1] [1 0 1 1 2 2 0 2 0 2] [1 2 2 2 0 0 1 1 2 1] [2 2 0 0 2 1 1 2 2 2]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:max - axes:-1 - keep_dims:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5244.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=0]() %self.axes : int = prim::Constant[value=-1]() %4 : Tensor, %5 : Tensor = aten::max(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[1.8713628 0.9533122 0.89795625 0.67119807 1.6678112 2.0991042 1.4358383 2.2315538 0.68902737 1.2346643 ] [1.6983098 1.2067374 0.48432693 1.2327381 1.5829078 1.7891499 1.6065043 1.2679406 1.4730879 1.7320906 ] [1.5287377 0.7764036 1.0351485 1.3059434 1.9706545 2.1357627 2.496419 1.749228 2.500306 1.9553483 ]]]; ov_res: [[[1.8713628 0.9533122 0.89795625 0.67119807 1.6678112 2.0991042 1.4358383 2.2315538 0.68902737 1.2346643 ] [1.6983098 1.2067374 0.48432693 1.2327381 1.5829078 1.7891499 1.6065043 1.2679406 1.4730879 1.7320906 ] [1.5287377 0.7764036 1.0351485 1.3059434 1.9706545 2.1357627 2.496419 1.749228 2.500306 1.9553483 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[7 2 5 4 5 7 7 9 1 7] [9 6 1 5 7 9 1 5 5 2] [0 2 5 6 9 3 5 9 6 2]]]; ov_res: [[[7 2 5 4 5 7 7 9 1 7] [9 6 1 5 7 9 1 5 5 2] [0 2 5 6 9 3 5 9 6 2]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_reduce_min_max[ ie_device:CPU - precision:FP32 - op_type:max - axes:-1 - keep_dims:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5246.aten_min_max_3args, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=1]() %self.axes : int = prim::Constant[value=-1]() %4 : Tensor, %5 : Tensor = aten::max(%x.1, %self.axes, %self.keep_dims) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:38:23 %6 : NamedTuple(values : Tensor, indices : Tensor) = prim::TupleConstruct(%4, %5) return (%6) fw_re: [[[[2.0227544 ] [1.5260413 ] [1.9807647 ] [2.052346 ] [1.3309333 ] [2.0613317 ] [2.1251662 ] [0.93907183] [0.65452534] [1.7485021 ]] [[1.3953869 ] [1.5069278 ] [1.578905 ] [1.6719363 ] [1.6512107 ] [1.8924365 ] [1.9422247 ] [2.7245753 ] [0.7888489 ] [1.5911633 ]] [[1.2507362 ] [2.0029206 ] [1.9211497 ] [1.8347168 ] [0.44919977] [1.9298865 ] [0.66314596] [0.7858519 ] [1.8404275 ] [2.4954417 ]]]]; ov_res: [[[[2.0227544 ] [1.5260413 ] [1.9807647 ] [2.052346 ] [1.3309333 ] [2.0613317 ] [2.1251662 ] [0.93907183] [0.65452534] [1.7485021 ]] [[1.3953869 ] [1.5069278 ] [1.578905 ] [1.6719363 ] [1.6512107 ] [1.8924365 ] [1.9422247 ] [2.7245753 ] [0.7888489 ] [1.5911633 ]] [[1.2507362 ] [2.0029206 ] [1.9211497 ] [1.8347168 ] [0.44919977] [1.9298865 ] [0.66314596] [0.7858519 ] [1.8404275 ] [2.4954417 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[8] [1] [4] [3] [5] [2] [4] [4] [7] [2]] [[6] [8] [7] [6] [2] [9] [9] [1] [3] [6]] [[4] [2] [8] [8] [6] [3] [1] [5] [1] [7]]]]; ov_res: [[[[8] [1] [4] [3] [5] [2] [4] [4] [7] [2]] [[6] [8] [7] [6] [2] [9] [9] [1] [3] [6]] [[4] [2] [8] [8] [6] [3] [1] [5] [1] [7]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_min_max[ ie_device:CPU - precision:FP32 - op_type:min ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5247.aten_min_max_2args, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::min(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:46:23 return (%3) fw_re: [[[[-3.19343098e-02 5.09604335e-01 -7.34794497e-01 -3.25883567e-01 2.74695396e-01 -7.38798141e-01 -2.15904212e+00 -9.22318995e-01 -5.26199102e-01 -6.14829063e-01] [-7.49491155e-01 -9.39334214e-01 -6.71031237e-01 2.51954406e-01 2.11330608e-01 -1.66403413e+00 4.43065226e-01 -2.23871517e+00 -1.40997231e+00 -1.83241630e+00] [ 4.68331754e-01 -1.43350148e+00 -7.91202843e-01 -1.37574780e+00 -3.58474493e-01 -5.59547544e-01 -1.36652052e-01 -1.91192663e+00 -4.28677559e-01 9.66510028e-02] [-1.84025204e+00 -1.55278099e+00 -4.77906205e-02 8.64490271e-02 -1.08646095e+00 -1.42995834e+00 -2.07575035e+00 -2.48731077e-01 -1.04300308e+00 -9.40800369e-01] [-2.24802941e-01 -9.39776227e-02 -1.06533873e+00 8.76576602e-01 2.62200743e-01 -6.50497854e-01 -2.96193222e-03 -4.06441808e-01 -2.20154357e+00 5.96271306e-02] [-3.63938618e+00 3.67033541e-01 4.82249826e-01 -4.93202299e-01 -1.42120171e+00 -9.34501231e-01 -9.27910984e-01 -3.55006452e-03 -1.31261349e+00 -8.94137025e-01] [ 4.38144058e-01 4.95640367e-01 -1.23145413e+00 -1.88457882e+00 -1.13899875e+00 1.91495359e-01 -9.08023357e-01 -9.48576331e-01 3.10755223e-01 -7.41226554e-01] [ 7.88830101e-01 -6.37238204e-01 -5.38828671e-01 3.21347773e-01 -1.45068860e+00 3.92486870e-01 1.14761961e+00 -1.22310400e+00 -1.66949153e+00 2.14510217e-01] [ 1.30069637e+00 -1.54007292e+00 -1.44103920e+00 -1.05004442e+00 -3.40182036e-01 -8.44215333e-01 -1.73903787e+00 -6.85814917e-01 9.69943523e-01 -1.02372013e-01] [ 1.09667277e+00 -2.77974939e+00 -2.34447145e+00 1.40643105e-01 -1.56184971e+00 -1.43811250e+00 -4.32874709e-01 -1.35392696e-01 -1.04529202e+00 -5.94743825e-02]] [[-1.48408866e+00 8.54366273e-02 2.80607432e-01 -6.63237154e-01 2.63594866e-01 -2.78031882e-02 -7.26801157e-01 4.21983391e-01 3.73984516e-01 -2.03803730e+00] [-1.20035923e+00 -1.39254141e+00 4.25513871e-02 -2.13258266e+00 -6.90082163e-02 -7.61447608e-01 5.83435744e-02 4.39918637e-01 1.43815324e-01 -5.53594291e-01] [-1.14598620e+00 -1.52855015e+00 -3.42261642e-01 1.26366287e-01 -8.78153324e-01 -4.03965056e-01 -7.71108270e-01 -1.07616889e+00 -1.04758823e+00 3.95363182e-01] [-2.07884207e-01 1.93922257e+00 -3.59902173e-01 2.13967621e-01 -1.70994306e+00 2.69874215e-01 -2.16303515e+00 -4.56243157e-01 4.26900506e-01 -6.22899473e-01] [-5.49744189e-01 -1.07503325e-01 -1.41614139e+00 4.44492027e-02 -1.01082170e+00 -2.46124315e+00 -7.98860863e-02 -2.07102463e-01 -1.53255725e+00 -8.89882028e-01] [-3.34878594e-01 -1.02174222e+00 -1.08892262e+00 -2.34839749e+00 -1.99823424e-01 -6.75488472e-01 -1.01053762e+00 2.18045786e-01 -2.06866741e-01 -1.66152167e+00] [-1.86143506e+00 -4.50084895e-01 -5.43270946e-01 -4.63301748e-01 -1.18182600e+00 8.35659802e-01 -1.15616632e+00 -6.17440790e-02 2.31336188e-02 -1.64635018e-01] [-9.50327158e-01 -1.63757634e+00 -1.35825217e-01 6.12182140e-01 6.64990395e-02 1.70220494e-01 -8.09330583e-01 8.80807579e-01 8.13584402e-02 -8.36345732e-01] [-3.58539522e-02 -3.49897414e-01 -1.15225244e+00 -6.63183033e-01 -6.82743907e-01 -1.02043760e+00 -1.53008103e+00 -1.48578739e+00 3.85506749e-01 -1.10415566e+00] [-2.34963560e+00 -6.23995781e-01 -3.10965598e-01 -7.03588247e-01 -3.99996936e-01 -1.31914878e+00 -1.35929787e+00 -3.78868878e-01 -1.07864940e+00 -1.23740029e+00]] [[-1.55850554e+00 -1.24458051e+00 -5.91810763e-01 -1.59853175e-01 -2.49894485e-01 3.87838393e-01 2.78702468e-01 -1.59821987e+00 -7.43849218e-01 3.51721756e-02] [ 1.40364730e+00 4.20140058e-01 -5.17742038e-01 1.85956046e-01 -9.01171863e-01 2.98472971e-01 -1.27506495e+00 2.81123459e-01 -1.73996902e+00 6.76386893e-01] [-1.30603802e+00 -2.25011539e+00 1.01707125e+00 6.29513383e-01 4.54875112e-01 -2.57322162e-01 -1.06445539e+00 -9.25068080e-01 1.74212649e-01 -9.19032037e-01] [-9.34594631e-01 -4.69981432e-01 1.53332126e+00 -1.08656085e+00 -1.96472478e+00 -9.87270474e-01 2.50553578e-01 -2.09393576e-01 -5.37287593e-01 5.53674549e-02] [-9.70405817e-01 -3.17895442e-01 1.02968431e+00 -1.55211997e+00 1.77802429e-01 9.53145683e-01 3.32260162e-01 -2.24821970e-01 -1.24135472e-01 5.05732417e-01] [-1.70507538e+00 -2.24550977e-01 -1.18092632e+00 4.69829857e-01 -1.31802738e+00 -1.73886681e+00 -1.15728652e+00 5.67284942e-01 -2.67407566e-01 -1.36550403e+00] [ 1.72933236e-01 -9.96633589e-01 -2.30547681e-01 7.32014060e-01 -9.54475284e-01 1.77726102e+00 -1.16158032e+00 5.17801233e-02 -6.11170769e-01 -1.17685974e+00] [-8.76227915e-01 5.28367877e-01 -4.66865987e-01 -2.97213137e-01 -4.47301537e-01 -1.53867126e+00 -4.43168759e-01 -1.76459804e-01 -1.30472183e+00 3.77327710e-01] [ 5.33567145e-02 -7.56700337e-02 -1.23992455e+00 1.04844809e+00 -8.56227279e-01 -2.19756532e+00 5.28413177e-01 -1.70643181e-01 5.62417090e-01 7.47072875e-01] [ 5.42423390e-02 1.33926859e-02 -3.82720423e+00 -2.36876154e+00 4.21030581e-01 -1.72082794e+00 -1.53982639e+00 -2.35291791e+00 -2.97033876e-01 1.36154437e+00]]]]; ov_res: [[[[-3.19343098e-02 5.09604335e-01 -7.34794497e-01 -3.25883567e-01 2.74695396e-01 -7.38798141e-01 -2.15904212e+00 -9.22318995e-01 -5.26199102e-01 -6.14829063e-01] [-7.49491155e-01 -9.39334214e-01 -6.71031237e-01 2.51954406e-01 2.11330608e-01 -1.66403413e+00 4.43065226e-01 -2.23871517e+00 -1.40997231e+00 -1.83241630e+00] [ 4.68331754e-01 -1.43350148e+00 -7.91202843e-01 -1.37574780e+00 -3.58474493e-01 -5.59547544e-01 -1.36652052e-01 -1.91192663e+00 -4.28677559e-01 9.66510028e-02] [-1.84025204e+00 -1.55278099e+00 -4.77906205e-02 8.64490271e-02 -1.08646095e+00 -1.42995834e+00 -2.07575035e+00 -2.48731077e-01 -1.04300308e+00 -9.40800369e-01] [-2.24802941e-01 -9.39776227e-02 -1.06533873e+00 8.76576602e-01 2.62200743e-01 -6.50497854e-01 -2.96193222e-03 -4.06441808e-01 -2.20154357e+00 5.96271306e-02] [-3.63938618e+00 3.67033541e-01 4.82249826e-01 -4.93202299e-01 -1.42120171e+00 -9.34501231e-01 -9.27910984e-01 -3.55006452e-03 -1.31261349e+00 -8.94137025e-01] [ 4.38144058e-01 4.95640367e-01 -1.23145413e+00 -1.88457882e+00 -1.13899875e+00 1.91495359e-01 -9.08023357e-01 -9.48576331e-01 3.10755223e-01 -7.41226554e-01] [ 7.88830101e-01 -6.37238204e-01 -5.38828671e-01 3.21347773e-01 -1.45068860e+00 3.92486870e-01 1.14761961e+00 -1.22310400e+00 -1.66949153e+00 2.14510217e-01] [ 1.30069637e+00 -1.54007292e+00 -1.44103920e+00 -1.05004442e+00 -3.40182036e-01 -8.44215333e-01 -1.73903787e+00 -6.85814917e-01 9.69943523e-01 -1.02372013e-01] [ 1.09667277e+00 -2.77974939e+00 -2.34447145e+00 1.40643105e-01 -1.56184971e+00 -1.43811250e+00 -4.32874709e-01 -1.35392696e-01 -1.04529202e+00 -5.94743825e-02]] [[-1.48408866e+00 8.54366273e-02 2.80607432e-01 -6.63237154e-01 2.63594866e-01 -2.78031882e-02 -7.26801157e-01 4.21983391e-01 3.73984516e-01 -2.03803730e+00] [-1.20035923e+00 -1.39254141e+00 4.25513871e-02 -2.13258266e+00 -6.90082163e-02 -7.61447608e-01 5.83435744e-02 4.39918637e-01 1.43815324e-01 -5.53594291e-01] [-1.14598620e+00 -1.52855015e+00 -3.42261642e-01 1.26366287e-01 -8.78153324e-01 -4.03965056e-01 -7.71108270e-01 -1.07616889e+00 -1.04758823e+00 3.95363182e-01] [-2.07884207e-01 1.93922257e+00 -3.59902173e-01 2.13967621e-01 -1.70994306e+00 2.69874215e-01 -2.16303515e+00 -4.56243157e-01 4.26900506e-01 -6.22899473e-01] [-5.49744189e-01 -1.07503325e-01 -1.41614139e+00 4.44492027e-02 -1.01082170e+00 -2.46124315e+00 -7.98860863e-02 -2.07102463e-01 -1.53255725e+00 -8.89882028e-01] [-3.34878594e-01 -1.02174222e+00 -1.08892262e+00 -2.34839749e+00 -1.99823424e-01 -6.75488472e-01 -1.01053762e+00 2.18045786e-01 -2.06866741e-01 -1.66152167e+00] [-1.86143506e+00 -4.50084895e-01 -5.43270946e-01 -4.63301748e-01 -1.18182600e+00 8.35659802e-01 -1.15616632e+00 -6.17440790e-02 2.31336188e-02 -1.64635018e-01] [-9.50327158e-01 -1.63757634e+00 -1.35825217e-01 6.12182140e-01 6.64990395e-02 1.70220494e-01 -8.09330583e-01 8.80807579e-01 8.13584402e-02 -8.36345732e-01] [-3.58539522e-02 -3.49897414e-01 -1.15225244e+00 -6.63183033e-01 -6.82743907e-01 -1.02043760e+00 -1.53008103e+00 -1.48578739e+00 3.85506749e-01 -1.10415566e+00] [-2.34963560e+00 -6.23995781e-01 -3.10965598e-01 -7.03588247e-01 -3.99996936e-01 -1.31914878e+00 -1.35929787e+00 -3.78868878e-01 -1.07864940e+00 -1.23740029e+00]] [[-1.55850554e+00 -1.24458051e+00 -5.91810763e-01 -1.59853175e-01 -2.49894485e-01 3.87838393e-01 2.78702468e-01 -1.59821987e+00 -7.43849218e-01 3.51721756e-02] [ 1.40364730e+00 4.20140058e-01 -5.17742038e-01 1.85956046e-01 -9.01171863e-01 2.98472971e-01 -1.27506495e+00 2.81123459e-01 -1.73996902e+00 6.76386893e-01] [-1.30603802e+00 -2.25011539e+00 1.01707125e+00 6.29513383e-01 4.54875112e-01 -2.57322162e-01 -1.06445539e+00 -9.25068080e-01 1.74212649e-01 -9.19032037e-01] [-9.34594631e-01 -4.69981432e-01 1.53332126e+00 -1.08656085e+00 -1.96472478e+00 -9.87270474e-01 2.50553578e-01 -2.09393576e-01 -5.37287593e-01 5.53674549e-02] [-9.70405817e-01 -3.17895442e-01 1.02968431e+00 -1.55211997e+00 1.77802429e-01 9.53145683e-01 3.32260162e-01 -2.24821970e-01 -1.24135472e-01 5.05732417e-01] [-1.70507538e+00 -2.24550977e-01 -1.18092632e+00 4.69829857e-01 -1.31802738e+00 -1.73886681e+00 -1.15728652e+00 5.67284942e-01 -2.67407566e-01 -1.36550403e+00] [ 1.72933236e-01 -9.96633589e-01 -2.30547681e-01 7.32014060e-01 -9.54475284e-01 1.77726102e+00 -1.16158032e+00 5.17801233e-02 -6.11170769e-01 -1.17685974e+00] [-8.76227915e-01 5.28367877e-01 -4.66865987e-01 -2.97213137e-01 -4.47301537e-01 -1.53867126e+00 -4.43168759e-01 -1.76459804e-01 -1.30472183e+00 3.77327710e-01] [ 5.33567145e-02 -7.56700337e-02 -1.23992455e+00 1.04844809e+00 -8.56227279e-01 -2.19756532e+00 5.28413177e-01 -1.70643181e-01 5.62417090e-01 7.47072875e-01] [ 5.42423390e-02 1.33926859e-02 -3.82720423e+00 -2.36876154e+00 4.21030581e-01 -1.72082794e+00 -1.53982639e+00 -2.35291791e+00 -2.97033876e-01 1.36154437e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestMinMax::test_min_max[ ie_device:CPU - precision:FP32 - op_type:max ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5249.aten_min_max_2args, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:46:23 return (%3) fw_re: [[[[ 2.84699130e+00 7.36669779e-01 -7.18401313e-01 3.59180234e-02 -3.37138891e-01 1.22159791e+00 9.17654335e-01 -5.85902512e-01 1.88591754e+00 -3.84849161e-01] [-4.78707701e-01 -1.44763604e-01 -3.82371873e-01 2.05902398e-01 -2.67847657e-01 8.21586788e-01 7.40739405e-01 4.42101717e-01 6.51018620e-01 2.27600503e+00] [ 2.52463520e-01 1.76007003e-01 -8.65728632e-02 -1.30265737e+00 -5.93936801e-01 4.84061033e-01 -1.42896676e+00 -5.98988459e-02 1.37343630e-01 1.21067226e+00] [ 6.03276908e-01 9.72472370e-01 -9.11514163e-02 7.29182780e-01 1.64025337e-01 6.66211426e-01 1.50618330e-01 1.81386054e+00 -2.06359124e+00 6.02752984e-01] [ 9.88931954e-01 2.30648220e-01 1.76064923e-01 1.31543291e+00 6.34463072e-01 -2.90074438e-01 -1.15532708e+00 -8.06106210e-01 1.50207710e+00 1.59566700e-01] [ 1.45325351e+00 1.33616674e+00 8.66280377e-01 5.10190904e-01 2.24041796e+00 4.38067466e-01 9.32110310e-01 1.13401473e+00 -9.39970091e-02 3.88455927e-01] [ 1.73629665e+00 2.30352139e+00 -6.39236569e-01 1.67683077e+00 -1.70997763e+00 1.63976848e+00 8.82502854e-01 6.65708780e-01 3.29373598e+00 -1.50420845e-01] [ 3.95860761e-01 4.47338015e-01 4.64975655e-01 6.95356369e-01 1.16148210e+00 1.16073036e+00 4.47730184e-01 1.03997636e+00 1.32724226e+00 8.08213890e-01] [-1.94405109e-01 2.00262642e+00 1.58683765e+00 1.36072803e+00 1.05938613e+00 -1.52274087e-01 1.78518906e-01 1.03049076e+00 1.77824152e+00 -1.01017261e+00] [-1.25576150e+00 -1.16353929e-01 9.04955089e-01 5.84901154e-01 1.01040423e+00 -1.09864242e-01 6.80016458e-01 6.24496788e-02 7.98074484e-01 1.61025798e+00]] [[ 6.06221616e-01 1.03675127e+00 -5.24104059e-01 4.94211942e-01 1.15689728e-02 3.00594783e+00 3.35025400e-01 1.67730069e+00 7.40465701e-01 -7.57807791e-01] [ 1.24450815e+00 1.30090490e-01 1.82300821e-01 9.19294596e-01 -3.66427571e-01 3.28457177e-01 2.03350258e+00 -8.32201958e-01 1.12638152e+00 1.65647876e+00] [ 1.04685950e+00 -1.97513655e-01 7.60806918e-01 9.42738175e-01 -1.04158545e+00 3.12285453e-01 2.85104066e-01 3.38045925e-01 1.91335511e+00 -6.11301899e-01] [ 1.29888141e+00 -5.58487773e-01 7.61464000e-01 5.66488564e-01 8.81452203e-01 1.54365146e+00 -6.96913674e-02 2.32666230e+00 -2.67014563e-01 2.11738259e-01] [ 6.74712300e-01 6.57403886e-01 -9.97188315e-02 5.87770104e-01 1.82112980e+00 7.74246931e-01 2.61982620e-01 1.03603089e+00 -7.21693218e-01 -2.23008916e-01] [ 1.00211167e+00 1.33420551e+00 -9.32305977e-02 3.61283839e-01 1.85641015e+00 1.50982156e-01 1.06388792e-01 -2.45566919e-01 2.06381381e-01 -5.92817485e-01] [ 1.19267926e-01 1.01780558e+00 9.05607820e-01 -1.15374815e+00 1.94254726e-01 3.54984522e-01 6.87094867e-01 9.02314007e-01 5.62812030e-01 -3.08295697e-01] [ 1.48290133e+00 -4.36093807e-02 2.17923307e+00 8.23513746e-01 6.14503026e-01 6.94389105e-01 6.58437669e-01 1.09335554e+00 2.15004519e-01 8.09951544e-01] [ 1.84043422e-02 8.17865252e-01 3.67426187e-01 1.20126116e+00 -5.81423044e-02 -5.09105563e-01 1.40220678e+00 2.13862762e-01 6.76042795e-01 1.89599764e+00] [ 4.68303561e-01 -4.15439665e-01 1.29413092e+00 2.07533717e+00 -6.96051657e-01 1.12270296e+00 1.12738657e+00 -4.22072232e-01 7.09439293e-02 2.50894642e+00]] [[ 1.33661997e+00 7.64397502e-01 -6.13400996e-01 3.72645676e-01 1.46076798e+00 8.11819673e-01 8.45817804e-01 4.34123695e-01 8.83914471e-01 -4.15982783e-01] [ 1.00532234e+00 1.33539939e+00 -1.44914091e+00 1.76636851e+00 -1.06696808e+00 1.45809388e+00 4.11236361e-02 2.99342051e-02 8.89058053e-01 2.07323655e-01] [-1.65826356e+00 1.66834509e+00 1.33803725e+00 1.10267878e+00 1.11302841e+00 1.28667974e+00 9.97097373e-01 1.98883995e-01 1.46123254e+00 -3.98915172e-01] [ 1.54588318e+00 -2.64812587e-03 -6.37596965e-01 2.53894478e-01 6.47516251e-01 1.75396726e-01 -8.92536193e-02 -4.03284550e-01 6.73587978e-01 6.65396154e-01] [ 4.77617621e-01 1.34780395e+00 8.49585474e-01 -1.24928392e-01 -1.12522900e-01 5.64160407e-01 -2.69455165e-01 -2.49739468e-01 6.29787862e-01 4.23830658e-01] [ 1.45139349e+00 2.25710161e-02 4.44705486e-01 -3.41257483e-01 2.82590270e-01 1.59612131e+00 1.29152858e+00 2.48913717e+00 1.82592785e+00 6.93964064e-01] [ 2.52439022e-01 1.46232557e+00 -4.47655797e-01 2.37927198e+00 2.40589842e-01 2.49113068e-01 5.93232274e-01 9.18190420e-01 1.73530430e-01 1.48511934e+00] [-6.62864223e-02 -1.17946649e+00 4.00338531e-01 2.67788863e+00 1.42790866e+00 -3.22126359e-01 -8.26720834e-01 3.77452433e-01 -6.11838579e-01 6.95124149e-01] [ 1.23628736e+00 7.62880370e-02 1.81377351e+00 1.20242774e+00 1.20460880e+00 1.51429260e+00 5.23504436e-01 -8.05694282e-01 1.48997092e+00 8.78531158e-01] [ 5.72673440e-01 1.44799042e+00 -5.20510674e-02 2.17245057e-01 -1.07590973e-01 -4.28125083e-01 1.27159035e+00 4.23665166e-01 3.11885685e-01 1.25581062e+00]]]]; ov_res: [[[[ 2.84699130e+00 7.36669779e-01 -7.18401313e-01 3.59180234e-02 -3.37138891e-01 1.22159791e+00 9.17654335e-01 -5.85902512e-01 1.88591754e+00 -3.84849161e-01] [-4.78707701e-01 -1.44763604e-01 -3.82371873e-01 2.05902398e-01 -2.67847657e-01 8.21586788e-01 7.40739405e-01 4.42101717e-01 6.51018620e-01 2.27600503e+00] [ 2.52463520e-01 1.76007003e-01 -8.65728632e-02 -1.30265737e+00 -5.93936801e-01 4.84061033e-01 -1.42896676e+00 -5.98988459e-02 1.37343630e-01 1.21067226e+00] [ 6.03276908e-01 9.72472370e-01 -9.11514163e-02 7.29182780e-01 1.64025337e-01 6.66211426e-01 1.50618330e-01 1.81386054e+00 -2.06359124e+00 6.02752984e-01] [ 9.88931954e-01 2.30648220e-01 1.76064923e-01 1.31543291e+00 6.34463072e-01 -2.90074438e-01 -1.15532708e+00 -8.06106210e-01 1.50207710e+00 1.59566700e-01] [ 1.45325351e+00 1.33616674e+00 8.66280377e-01 5.10190904e-01 2.24041796e+00 4.38067466e-01 9.32110310e-01 1.13401473e+00 -9.39970091e-02 3.88455927e-01] [ 1.73629665e+00 2.30352139e+00 -6.39236569e-01 1.67683077e+00 -1.70997763e+00 1.63976848e+00 8.82502854e-01 6.65708780e-01 3.29373598e+00 -1.50420845e-01] [ 3.95860761e-01 4.47338015e-01 4.64975655e-01 6.95356369e-01 1.16148210e+00 1.16073036e+00 4.47730184e-01 1.03997636e+00 1.32724226e+00 8.08213890e-01] [-1.94405109e-01 2.00262642e+00 1.58683765e+00 1.36072803e+00 1.05938613e+00 -1.52274087e-01 1.78518906e-01 1.03049076e+00 1.77824152e+00 -1.01017261e+00] [-1.25576150e+00 -1.16353929e-01 9.04955089e-01 5.84901154e-01 1.01040423e+00 -1.09864242e-01 6.80016458e-01 6.24496788e-02 7.98074484e-01 1.61025798e+00]] [[ 6.06221616e-01 1.03675127e+00 -5.24104059e-01 4.94211942e-01 1.15689728e-02 3.00594783e+00 3.35025400e-01 1.67730069e+00 7.40465701e-01 -7.57807791e-01] [ 1.24450815e+00 1.30090490e-01 1.82300821e-01 9.19294596e-01 -3.66427571e-01 3.28457177e-01 2.03350258e+00 -8.32201958e-01 1.12638152e+00 1.65647876e+00] [ 1.04685950e+00 -1.97513655e-01 7.60806918e-01 9.42738175e-01 -1.04158545e+00 3.12285453e-01 2.85104066e-01 3.38045925e-01 1.91335511e+00 -6.11301899e-01] [ 1.29888141e+00 -5.58487773e-01 7.61464000e-01 5.66488564e-01 8.81452203e-01 1.54365146e+00 -6.96913674e-02 2.32666230e+00 -2.67014563e-01 2.11738259e-01] [ 6.74712300e-01 6.57403886e-01 -9.97188315e-02 5.87770104e-01 1.82112980e+00 7.74246931e-01 2.61982620e-01 1.03603089e+00 -7.21693218e-01 -2.23008916e-01] [ 1.00211167e+00 1.33420551e+00 -9.32305977e-02 3.61283839e-01 1.85641015e+00 1.50982156e-01 1.06388792e-01 -2.45566919e-01 2.06381381e-01 -5.92817485e-01] [ 1.19267926e-01 1.01780558e+00 9.05607820e-01 -1.15374815e+00 1.94254726e-01 3.54984522e-01 6.87094867e-01 9.02314007e-01 5.62812030e-01 -3.08295697e-01] [ 1.48290133e+00 -4.36093807e-02 2.17923307e+00 8.23513746e-01 6.14503026e-01 6.94389105e-01 6.58437669e-01 1.09335554e+00 2.15004519e-01 8.09951544e-01] [ 1.84043422e-02 8.17865252e-01 3.67426187e-01 1.20126116e+00 -5.81423044e-02 -5.09105563e-01 1.40220678e+00 2.13862762e-01 6.76042795e-01 1.89599764e+00] [ 4.68303561e-01 -4.15439665e-01 1.29413092e+00 2.07533717e+00 -6.96051657e-01 1.12270296e+00 1.12738657e+00 -4.22072232e-01 7.09439293e-02 2.50894642e+00]] [[ 1.33661997e+00 7.64397502e-01 -6.13400996e-01 3.72645676e-01 1.46076798e+00 8.11819673e-01 8.45817804e-01 4.34123695e-01 8.83914471e-01 -4.15982783e-01] [ 1.00532234e+00 1.33539939e+00 -1.44914091e+00 1.76636851e+00 -1.06696808e+00 1.45809388e+00 4.11236361e-02 2.99342051e-02 8.89058053e-01 2.07323655e-01] [-1.65826356e+00 1.66834509e+00 1.33803725e+00 1.10267878e+00 1.11302841e+00 1.28667974e+00 9.97097373e-01 1.98883995e-01 1.46123254e+00 -3.98915172e-01] [ 1.54588318e+00 -2.64812587e-03 -6.37596965e-01 2.53894478e-01 6.47516251e-01 1.75396726e-01 -8.92536193e-02 -4.03284550e-01 6.73587978e-01 6.65396154e-01] [ 4.77617621e-01 1.34780395e+00 8.49585474e-01 -1.24928392e-01 -1.12522900e-01 5.64160407e-01 -2.69455165e-01 -2.49739468e-01 6.29787862e-01 4.23830658e-01] [ 1.45139349e+00 2.25710161e-02 4.44705486e-01 -3.41257483e-01 2.82590270e-01 1.59612131e+00 1.29152858e+00 2.48913717e+00 1.82592785e+00 6.93964064e-01] [ 2.52439022e-01 1.46232557e+00 -4.47655797e-01 2.37927198e+00 2.40589842e-01 2.49113068e-01 5.93232274e-01 9.18190420e-01 1.73530430e-01 1.48511934e+00] [-6.62864223e-02 -1.17946649e+00 4.00338531e-01 2.67788863e+00 1.42790866e+00 -3.22126359e-01 -8.26720834e-01 3.77452433e-01 -6.11838579e-01 6.95124149e-01] [ 1.23628736e+00 7.62880370e-02 1.81377351e+00 1.20242774e+00 1.20460880e+00 1.51429260e+00 5.23504436e-01 -8.05694282e-01 1.48997092e+00 8.78531158e-01] [ 5.72673440e-01 1.44799042e+00 -5.20510674e-02 2.17245057e-01 -1.07590973e-01 -4.28125083e-01 1.27159035e+00 4.23665166e-01 3.11885685e-01 1.25581062e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'float'} - case:2_values ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5250.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 1.0; ov_res: 1.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'float'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5251.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) fw_re: [1.0, 1.0]; ov_res: [1. 1.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'float'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5252.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 1.0; ov_res: 1.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'float'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5253.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) fw_re: 0.0; ov_res: 0.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'float'} - case:2_values ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5255.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 1.0; ov_res: 1.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'float'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5257.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) fw_re: [1.0, 2.0]; ov_res: [1. 2.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'float'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5259.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 2.0; ov_res: 2.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'float'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5261.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_fw_re: 1.0; ov_res: 1.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'float'} - case:2_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5263.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 2.0; ov_res: 2.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'float'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5265.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) fw_re: [2.0, 3.0]; ov_res: [2. 3.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'float'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5267.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 3.0; ov_res: 3.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'float'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5269.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) fw_re: 2.0; ov_res: 2.0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'int'} - case:2_values ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5271.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 1.0; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'int'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5273.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) fw_re: [1.0, 1.0]; ov_res: [1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'int'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5275.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 1.0; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 0, 'second_input': 1, 'dtype': 'int'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5277.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) fw_re: 0.0; ov_res: 0 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'int'} - case:2_values ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5279.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 1.0; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'int'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5281.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) fw_re: [1.0, 2.0]; ov_res: [1 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'int'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5283.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 2.0; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 1, 'second_input': 1, 'dtype': 'int'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5285.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) fw_re: 1.0; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'int'} - case:2_values ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5287.prim_max_2_values, %x.1 : float, %y.1 : float): %3 : float = prim::max(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:86:23 return (%3) fw_re: 2.0; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'int'} - case:2_list_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5289.prim_max_2_list_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:31 %4 : float[] = prim::ListConstruct(%x.1, %3) %5 : float = aten::sub(%y.1, %x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:41 %6 : float[] = prim::ListConstruct(%y.1, %5) %7 : float[] = prim::max(%4, %6) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:90:23 return (%7) node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support onfw_re: [2.0, 3.0]; ov_res: [2 3] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'int'} - case:list_several_values ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5291.prim_max_1list_several_values, %x.1 : float, %y.1 : float): %3 : float = aten::add(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:34 %4 : float[] = prim::ListConstruct(%x.1, %y.1, %3) %5 : float = prim::max(%4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:95:23 return (%5) fw_re: 3.0; ov_res: 3 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_min_max.py::TestPrimMax::test_min_max[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'first_input': 2, 'second_input': 1, 'dtype': 'int'} - case:one_value ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_min_max.___torch_mangle_5293.prim_max_one_value, %x.1 : float, %y : float): %3 : float[] = prim::ListConstruct(%x.1) %4 : float = prim::max(%3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_min_max.py:99:23 return (%4) fw_re: 2.0; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_mm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5294.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::mm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[-0.26581863 4.0535936 0.3156717 ] [-0.10903627 -0.08998211 0.7160304 ] [-0.02426662 -1.9934936 -3.4541361 ]]; ov_res: [[-0.26581863 4.0535936 0.3156717 ] [-0.10903627 -0.08998211 0.7160304 ] [-0.02426662 -1.9934936 -3.4541361 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_mm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5296.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::mm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 0.8749262 1.5338732 ] [-0.60344714 2.2578425 ]]; ov_res: [[ 0.8749262 1.5338732 ] [-0.60344714 2.2578425 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_mm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5298.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::mm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 2.7982347] [ 3.2544103] [ 4.710468 ] [ 4.046497 ] [-4.781559 ] [ 6.8963327] [ 3.3033686] [ 1.3662587] [-1.2231907] [ 1.4026085]]; ov_res: [[ 2.798235 ] [ 3.25441 ] [ 4.710468 ] [ 4.046497 ] [-4.7815595] [ 6.896333 ] [ 3.3033686] [ 1.3662589] [-1.2231907] [ 1.4026085]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_mm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5300.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::mm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 0.43298057 -3.0932636 ]]; ov_res: [[ 0.43298057 -3.0932636 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_mm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5302.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::mm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[-1.9418402]]; ov_res: [[-1.9418402]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_bmm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (10, 3, 3), 'matrix2_shape': (10, 3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5304.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::bmm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 2.2671390e+00 -6.5063775e-01 -2.1873355e+00] [-6.0506070e-01 7.2390103e-01 1.2311622e+00] [-1.4524704e-01 7.8232139e-01 -1.0024736e+00]] [[-1.1016188e+00 -1.4602987e+00 6.5820587e-01] [ 2.5223415e+00 4.5511436e-01 -1.7196602e-01] [ 2.6574557e+00 8.9074850e-01 3.2937074e-01]] [[ 1.2133895e+00 1.9783103e-01 -2.7237535e-03] [ 7.2732443e-01 -7.9133409e-01 2.2573195e-01] [ 1.3024886e+00 -7.5447112e-01 -7.5264013e-01]] [[ 2.1438599e-02 3.0268435e+00 8.3160800e-01] [-7.1849978e-01 2.7883596e+00 1.8697896e+00] [ 2.3302121e+00 -3.7319517e-01 2.7388999e+00]] [[-9.8456490e-01 -2.1530393e-01 8.5299551e-02] [-5.5087203e-01 2.2526845e-01 3.3097357e-01] [ 1.5426074e+00 1.9533522e-01 -1.0460715e+00]] [[ 1.0476735e+00 -2.4397578e+00 6.8010330e-01] [-4.0323198e-01 -1.8670440e-02 -1.0327566e+00] [-5.9466702e-01 1.9210054e+00 -4.2713377e-01]] [[-1.5358437e+00 -9.8486358e-01 -6.7669028e-01] [ 3.2204261e+00 2.0879560e+00 4.4373369e+00] [-2.0802987e-01 -2.1377467e-01 -1.6183382e+00]] [[ 3.1596488e-01 1.5212064e+00 1.1633126e+00] [ 2.5873411e+00 1.8981451e+00 2.1755435e+00] [-1.9336548e+00 1.2004349e+00 2.5673223e+00]] [[ 6.1915004e-01 1.7770882e-01 1.3287458e-01] [ 2.0119340e+00 -5.9750044e-01 3.4325557e+00] [ 7.4393630e-01 2.7072766e-01 -2.1359360e-01]] [[ 7.3138738e-01 -8.7443030e-01 -2.7350631e-01] [-9.3033737e-01 1.5840635e+00 -2.6722476e+00] [-2.9402040e-02 1.3229954e-01 -1.6652799e+00]]]; ov_res: [[[ 2.2671390e+00 -6.5063769e-01 -2.1873355e+00] [-6.0506070e-01 7.2390109e-01 1.2311622e+00] [-1.4524706e-01 7.8232139e-01 -1.0024736e+00]] [[-1.1016189e+00 -1.4602987e+00 6.5820593e-01] [ 2.5223415e+00 4.5511431e-01 -1.7196602e-01] [ 2.6574557e+00 8.9074850e-01 3.2937077e-01]] [[ 1.2133895e+00 1.9783103e-01 -2.7237516e-03] [ 7.2732443e-01 -7.9133409e-01 2.2573192e-01] [ 1.3024887e+00 -7.5447118e-01 -7.5264013e-01]] [[ 2.1438576e-02 3.0268435e+00 8.3160800e-01] [-7.1849972e-01 2.7883596e+00 1.8697896e+00] [ 2.3302121e+00 -3.7319517e-01 2.7388999e+00]] [[-9.8456490e-01 -2.1530394e-01 8.5299559e-02] [-5.5087203e-01 2.2526845e-01 3.3097360e-01] [ 1.5426073e+00 1.9533522e-01 -1.0460715e+00]] [[ 1.0476736e+00 -2.4397578e+00 6.8010324e-01] [-4.0323195e-01 -1.8670417e-02 -1.0327564e+00] [-5.9466702e-01 1.9210054e+00 -4.2713377e-01]] [[-1.5358437e+00 -9.8486358e-01 -6.7669028e-01] [ 3.2204261e+00 2.0879560e+00 4.4373369e+00] [-2.0802984e-01 -2.1377470e-01 -1.6183382e+00]] [[ 3.1596488e-01 1.5212065e+00 1.1633126e+00] [ 2.5873411e+00 1.8981450e+00 2.1755435e+00] [-1.9336549e+00 1.2004349e+00 2.5673223e+00]] [[ 6.1915004e-01 1.7770882e-01 1.3287459e-01] [ 2.0119340e+00 -5.9750044e-01 3.4325559e+00] [ 7.4393630e-01 2.7072760e-01 -2.1359362e-01]] [[ 7.3138738e-01 -8.7443030e-01 -2.7350634e-01] [-9.3033731e-01 1.5840635e+00 -2.6722476e+00] [-2.9402038e-02 1.3229954e-01 -1.6652799e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_bmm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 2, 3), 'matrix2_shape': (1, 3, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5306.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::bmm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 0.9956863 -1.0933805 ] [-0.68933403 2.0011246 ]]]; ov_res: [[[ 0.9956863 -1.0933805 ] [-0.68933403 2.0011246 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_bmm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 10, 5), 'matrix2_shape': (2, 5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5308.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::bmm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 1.4981035 ] [ 1.4648793 ] [ 0.62478405] [-2.0971735 ] [-3.1621256 ] [ 1.4989445 ] [ 0.27445605] [-2.4854903 ] [-0.93047225] [-1.8489813 ]] [[ 1.166269 ] [ 0.1059808 ] [ 1.4410111 ] [ 2.8120997 ] [ 1.2645983 ] [-2.5079591 ] [ 1.026717 ] [ 0.76649004] [-2.1111572 ] [ 0.9597215 ]]]; ov_res: [[[ 1.4981035 ] [ 1.4648793 ] [ 0.624784 ] [-2.0971735 ] [-3.1621253 ] [ 1.4989445 ] [ 0.27445602] [-2.4854903 ] [-0.93047225] [-1.8489813 ]] [[ 1.166269 ] [ 0.10598084] [ 1.4410111 ] [ 2.8121 ] [ 1.2645983 ] [-2.507959 ] [ 1.026717 ] [ 0.76649004] [-2.1111572 ] [ 0.9597215 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_bmm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (3, 1, 10), 'matrix2_shape': (3, 10, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5310.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::bmm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 2.5730891 -2.0472486 ]] [[-3.155521 -3.5706294 ]] [[ 2.4223166 -0.19001377]]]; ov_res: [[[ 2.573089 -2.0472486]] [[-3.155521 -3.5706294]] [[ 2.4223166 -0.1900139]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_bmm[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (4, 1, 10), 'matrix2_shape': (4, 10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5312.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::bmm(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 2.684389 ]] [[-5.665759 ]] [[-1.5955205 ]] [[ 0.68823713]]]; ov_res: [[[ 2.684389 ]] [[-5.6657596 ]] [[-1.5955204 ]] [[ 0.68823725]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (10, 3, 3), 'matrix2_shape': (10, 3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5314.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[-1.6444653e+00 -1.1444532e+00 1.4685405e+00] [ 1.2247200e+00 -6.1322203e+00 2.7697129e+00] [-2.0145400e-01 5.6136549e-03 2.3209147e-01]] [[-8.3220935e-01 3.2378716e+00 7.9191995e-01] [ 1.4993370e-01 -1.1387067e+00 -7.4706036e-01] [-1.5713519e+00 3.0955925e+00 -5.3731465e-01]] [[ 2.5417495e+00 1.9387200e+00 4.9602452e-01] [ 5.7664909e+00 2.5201468e+00 -2.9689223e-01] [-1.3617402e+00 -7.4016505e-01 -1.3838542e+00]] [[ 1.3692781e+00 -1.4147943e+00 -5.0891984e-01] [ 4.2756113e-01 -1.3003967e+00 -4.2915964e-01] [ 3.2831686e+00 1.5595762e+00 4.7863939e-01]] [[-1.0749104e+00 1.3404313e+00 1.1245906e+00] [-4.4902653e-01 2.4098217e-01 6.6233408e-01] [-6.6135085e-01 1.4872475e+00 2.8694594e-01]] [[-4.2428264e-01 3.2461691e-01 -2.5743681e-01] [-1.7933521e+00 4.9936742e-01 -1.0556279e+00] [ 2.3585093e-01 4.5849428e-01 3.2284909e-01]] [[-1.5174675e-01 -2.0337915e+00 7.5022817e-02] [-3.0817208e+00 -3.3769149e-02 -2.6250455e-01] [ 3.0575452e+00 -2.0852089e-02 1.1138792e+00]] [[-7.2568059e-01 1.6578391e+00 -8.8115543e-01] [-5.8526003e-01 1.0652366e+00 -1.5098032e+00] [-4.7278827e-01 1.1375941e+00 -5.6134176e-01]] [[-1.8596569e+00 3.0670059e+00 -1.3371980e-01] [-2.1795371e+00 9.2689574e-01 -1.2758350e+00] [-2.2501631e+00 -2.6209283e+00 -1.5829141e+00]] [[-4.9311715e-01 -1.1065817e+00 -2.0809982e+00] [ 3.8528484e-01 -6.5840602e-01 -2.8757970e+00] [-8.1089085e-01 -2.5168353e-01 -5.0643808e-01]]]; ov_res: [[[-1.6444652e+00 -1.1444532e+00 1.4685404e+00] [ 1.2247200e+00 -6.1322203e+00 2.7697127e+00] [-2.0145403e-01 5.6136637e-03 2.3209147e-01]] [[-8.3220947e-01 3.2378716e+00 7.9191995e-01] [ 1.4993374e-01 -1.1387067e+00 -7.4706036e-01] [-1.5713519e+00 3.0955925e+00 -5.3731459e-01]] [[ 2.5417495e+00 1.9387200e+00 4.9602449e-01] [ 5.7664909e+00 2.5201471e+00 -2.9689234e-01] [-1.3617402e+00 -7.4016500e-01 -1.3838543e+00]] [[ 1.3692781e+00 -1.4147942e+00 -5.0891984e-01] [ 4.2756113e-01 -1.3003966e+00 -4.2915964e-01] [ 3.2831686e+00 1.5595762e+00 4.7863942e-01]] [[-1.0749104e+00 1.3404313e+00 1.1245906e+00] [-4.4902653e-01 2.4098217e-01 6.6233408e-01] [-6.6135085e-01 1.4872476e+00 2.8694594e-01]] [[-4.2428270e-01 3.2461691e-01 -2.5743681e-01] [-1.7933521e+00 4.9936745e-01 -1.0556278e+00] [ 2.3585096e-01 4.5849428e-01 3.2284912e-01]] [[-1.5174679e-01 -2.0337913e+00 7.5022817e-02] [-3.0817208e+00 -3.3769127e-02 -2.6250455e-01] [ 3.0575452e+00 -2.0852119e-02 1.1138791e+00]] [[-7.2568047e-01 1.6578391e+00 -8.8115543e-01] [-5.8526003e-01 1.0652366e+00 -1.5098032e+00] [-4.7278824e-01 1.1375941e+00 -5.6134176e-01]] [[-1.8596569e+00 3.0670059e+00 -1.3371979e-01] [-2.1795371e+00 9.2689574e-01 -1.2758350e+00] [-2.2501631e+00 -2.6209280e+00 -1.5829142e+00]] [[-4.9311712e-01 -1.1065818e+00 -2.0809982e+00] [ 3.8528484e-01 -6.5840608e-01 -2.8757970e+00] [-8.1089085e-01 -2.5168356e-01 -5.0643808e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 2, 3), 'matrix2_shape': (1, 3, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5316.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 0.38435942 0.9373764 ] [ 0.8257624 -0.5420202 ]]]; ov_res: [[[ 0.38435945 0.93737644] [ 0.82576245 -0.5420202 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 10, 5), 'matrix2_shape': (2, 5, 1)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5318.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 2.1132495 ] [-2.9667397 ] [ 1.5435677 ] [ 1.6353008 ] [-0.15439841] [ 5.995303 ] [ 0.14489797] [-6.226624 ] [-2.4585202 ] [-2.2546418 ]] [[-0.84398586] [-0.48075712] [-1.2251942 ] [-1.0328872 ] [ 1.4616547 ] [ 0.12442636] [-1.981599 ] [-2.1527107 ] [-1.2019448 ] [-1.2853024 ]]]; ov_res: [[[ 2.1132495 ] [-2.9667397 ] [ 1.5435675 ] [ 1.6353008 ] [-0.15439852] [ 5.995303 ] [ 0.14489798] [-6.2266245 ] [-2.4585202 ] [-2.2546418 ]] [[-0.8439858 ] [-0.48075712] [-1.2251943 ] [-1.0328872 ] [ 1.4616545 ] [ 0.12442635] [-1.981599 ] [-2.1527107 ] [-1.2019448 ] [-1.2853024 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (3, 1, 10), 'matrix2_shape': (3, 10, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5320.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[-0.42151177 -1.5451934 ]] [[-3.3228111 -1.2609062 ]] [[-3.598831 0.5264642 ]]]; ov_res: [[[-0.42151174 -1.5451938 ]] [[-3.3228111 -1.2609061 ]] [[-3.598831 0.52646446]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (4, 1, 10), 'matrix2_shape': (4, 10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5322.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 6.8340573 ]] [[ 0.02524692]] [[-0.858567 ]] [[-3.7339098 ]]]; ov_res: [[[ 6.8340573 ]] [[ 0.02524677]] [[-0.85856694]] [[-3.7339094 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (3, 3), 'matrix2_shape': (3, 3)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5324.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 0.01849457 1.1955614 -0.23159237] [ 0.5413837 -0.5232298 0.5385349 ] [ 0.1170884 1.9147172 -0.35258535]]; ov_res: [[ 0.01849457 1.1955614 -0.23159237] [ 0.5413837 -0.5232298 0.5385349 ] [ 0.1170884 1.9147172 -0.35258535]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 3), 'matrix2_shape': (3, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5326.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 2.4666753 -1.3496094] [-3.5075245 -1.0226514]]; ov_res: [[ 2.4666753 -1.3496094] [-3.5075245 -1.0226514]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (10, 5), 'matrix2_shape': (5, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5328.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[ 0.4121704 ] [ 0.531702 ] [-0.1441648 ] [-0.18345024] [ 1.5466527 ] [-1.1776263 ] [-0.4739641 ] [ 0.9317543 ] [ 0.79069597] [ 1.1988057 ]]; ov_res: [[ 0.41217047] [ 0.531702 ] [-0.14416479] [-0.18345019] [ 1.5466527 ] [-1.1776261 ] [-0.47396407] [ 0.9317543 ] [ 0.7906959 ] [ 1.1988057 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (10, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5330.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[0.4516673 1.6269141]]; ov_res: [[0.4516673 1.6269141]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5332.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[-0.82785916]]; ov_res: [[-0.82785916]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (10, 3, 3), 'matrix2_shape': (3, 3)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5334.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 0.29909262 -1.5913869 -3.7420518 ] [-1.35765 1.0221689 2.9064631 ] [ 0.8226998 -1.7196171 -3.7150445 ]] [[ 1.4022243 -0.76284623 0.7473552 ] [ 0.7544171 -0.778387 -1.2608756 ] [ 0.26633814 1.3703414 -0.07170192]] [[ 2.1121023 -3.5732741 -4.9190297 ] [-0.11628772 -5.688205 -3.6717913 ] [-0.18165433 -0.5396713 -0.57851756]] [[ 0.75273263 -0.45641044 -0.0534527 ] [ 1.6030378 -2.3427005 -2.5363216 ] [-0.45550755 2.504985 1.8177607 ]] [[ 0.4768868 1.0008775 3.547991 ] [ 0.5897961 -0.81390715 -1.3953544 ] [ 1.4906266 -2.6899755 -5.495038 ]] [[ 0.9517302 -0.96226317 -2.990506 ] [-0.9658078 3.4021292 1.4108912 ] [ 1.2836287 -1.6073327 -0.5755237 ]] [[ 0.7962534 3.112682 1.514178 ] [ 0.5855125 -0.3869931 0.91454625] [-0.7899803 1.2346034 4.114533 ]] [[-1.5169587 0.6697628 2.7246978 ] [-0.19486937 -0.37924713 -1.3132181 ] [ 0.0738199 2.1494532 0.6053487 ]] [[-0.7690227 -0.6473265 0.832851 ] [ 0.41315708 -0.5687174 -3.4733517 ] [ 1.6063288 -1.6750435 -5.445607 ]] [[-0.54561406 2.6473315 -0.29672882] [-1.6100911 3.409994 1.1565799 ] [-1.1575232 4.4142675 1.0965353 ]]]; ov_res: [[[ 0.29909262 -1.5913869 -3.7420518 ] [-1.35765 1.0221689 2.9064631 ] [ 0.8226998 -1.7196171 -3.7150445 ]] [[ 1.4022243 -0.76284623 0.7473552 ] [ 0.7544171 -0.778387 -1.2608756 ] [ 0.26633814 1.3703414 -0.07170192]] [[ 2.1121023 -3.5732741 -4.9190297 ] [-0.11628772 -5.688205 -3.6717913 ] [-0.18165433 -0.5396713 -0.57851756]] [[ 0.75273263 -0.45641044 -0.0534527 ] [ 1.6030378 -2.3427005 -2.5363216 ] [-0.45550755 2.504985 1.8177607 ]] [[ 0.4768868 1.0008775 3.547991 ] [ 0.5897961 -0.81390715 -1.3953544 ] [ 1.4906266 -2.6899755 -5.495038 ]] [[ 0.9517302 -0.96226317 -2.990506 ] [-0.9658078 3.4021292 1.4108912 ] [ 1.2836287 -1.6073327 -0.5755237 ]] [[ 0.7962534 3.112682 1.514178 ] [ 0.5855125 -0.3869931 0.91454625] [-0.7899803 1.2346034 4.114533 ]] [[-1.5169587 0.6697628 2.7246978 ] [-0.19486937 -0.37924713 -1.3132181 ] [ 0.0738199 2.1494532 0.6053487 ]] [[-0.7690227 -0.6473265 0.832851 ] [ 0.41315708 -0.5687174 -3.4733517 ] [ 1.6063288 -1.6750435 -5.445607 ]] [[-0.54561406 2.6473315 -0.29672882] [-1.6100911 3.409994 1.1565799 ] [-1.1575232 4.4142675 1.0965353 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 3), 'matrix2_shape': (10, 3, 2)} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5336.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[-2.7025554 2.827416 ] [-1.843725 0.7196632 ]] [[-0.01696543 0.27072087] [-1.3263984 1.8504399 ]] [[ 5.4240813 -7.612092 ] [-1.8056529 0.62888944]] [[ 4.295726 1.8768644 ] [ 1.8663824 -3.9991097 ]] [[-3.2597594 -2.1979046 ] [ 0.42730635 2.2275715 ]] [[ 0.4074253 -1.3400294 ] [ 0.81426054 -1.3809254 ]] [[ 1.5037439 4.232337 ] [-0.9482372 1.1513075 ]] [[ 3.8121274 3.191624 ] [-0.5080571 -1.0975336 ]] [[ 0.09944833 0.1927667 ] [ 1.3767745 -1.7563637 ]] [[-1.0847868 -1.0544161 ] [-1.455079 -0.75688845]]]; ov_res: [[[-2.7025554 2.827416 ] [-1.843725 0.7196632 ]] [[-0.01696543 0.27072087] [-1.3263984 1.8504399 ]] [[ 5.4240813 -7.612092 ] [-1.8056529 0.62888944]] [[ 4.295726 1.8768644 ] [ 1.8663824 -3.9991097 ]] [[-3.2597594 -2.1979046 ] [ 0.42730635 2.2275715 ]] [[ 0.4074253 -1.3400294 ] [ 0.81426054 -1.3809254 ]] [[ 1.5037439 4.232337 ] [-0.9482372 1.1513075 ]] [[ 3.8121274 3.191624 ] [-0.5080571 -1.0975336 ]] [[ 0.09944833 0.1927667 ] [ 1.3767745 -1.7563637 ]] [[-1.0847868 -1.0544161 ] [-1.455079 -0.75688845]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10, 5), 'matrix2_shape': (5, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5338.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 0.37186933] [-0.5917471 ] [ 1.8693321 ] [ 0.6744555 ] [ 2.7791886 ] [ 6.051592 ] [ 2.2894535 ] [-1.8631572 ] [ 0.57621944] [ 2.815628 ]]]; ov_res: [[[ 0.37186936] [-0.5917472 ] [ 1.8693322 ] [ 0.67445546] [ 2.7791884 ] [ 6.0515924 ] [ 2.2894535 ] [-1.8631573 ] [ 0.57621944] [ 2.815628 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (5, 1, 10), 'matrix2_shape': (10, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5340.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[ 1.9118679 0.2636701]] [[-4.070513 -5.192945 ]] [[-3.034898 -3.122269 ]] [[-3.6485844 -3.090615 ]] [[ 5.549267 4.156562 ]]]; ov_res: [[[ 1.9118679 0.2636701]] [[-4.070513 -5.192945 ]] [[-3.034898 -3.122269 ]] [[-3.6485844 -3.090615 ]] [[ 5.549267 4.156562 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (4, 10, 2)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5342.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[-1.9705873 -4.1897707 ]] [[ 2.7983298 2.6548095 ]] [[ 0.6760658 -1.5832536 ]] [[ 3.1288652 0.16178465]]]; ov_res: [[[-1.9705871 -4.1897707 ]] [[ 2.7983298 2.6548095 ]] [[ 0.67606574 -1.5832536 ]] [[ 3.128865 0.16178462]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (2, 1, 10), 'matrix2_shape': (10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5344.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[-1.5790977 ]] [[-0.77748924]]]; ov_res: [[[-1.5790976 ]] [[-0.77748954]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_mm.py::TestMatMul::test_matmul[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'matrix1_shape': (1, 10), 'matrix2_shape': (2, 10, 1)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_mm.___torch_mangle_5346.aten_mm, %m1.1 : Tensor, %m2.1 : Tensor): %3 : Tensor = aten::matmul(%m1.1, %m2.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_mm.py:28:23 return (%3) fw_re: [[[0.21374118]] [[0.16947234]]]; ov_res: [[[0.21374117]] [[0.16947249]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5347.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5349.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: [[0 0 0] [0 0 1] [0 1 0] [0 1 1] [0 2 0] [0 2 1] [0 3 0] [0 3 1] [0 4 0] [0 4 1] [0 5 0] [0 5 1] [0 6 0] [0 6 1] [0 7 0] [0 7 1] [0 8 0] [0 8 1] [0 9 0] [0 9 1] [1 0 0] [1 0 1] [1 1 0] [1 1 1] [1 2 0] [1 2 1] [1 3 0] [1 3 1] [1 4 0] [1 4 1] [1 5 0] [1 5 1] [1 6 0] [1 6 1] [1 7 0] [1 7 1] [1 8 0] [1 8 1] [1 9 0] [1 9 1]]; ov_res: [[0 0 0] [0 0 1] [0 1 0] [0 1 1] [0 2 0] [0 2 1] [0 3 0] [0 3 1] [0 4 0] [0 4 1] [0 5 0] [0 5 1] [0 6 0] [0 6 1] [0 7 0] [0 7 1] [0 8 0] [0 8 1] [0 9 0] [0 9 1] [1 0 0] [1 0 1] [1 1 0] [1 1 1] [1 2 0] [1 2 1] [1 3 0] [1 3 1] [1 4 0] [1 4 1] [1 5 0] [1 5 1] [1 6 0] [1 6 1] [1 7 0] [1 7 1] [1 8 0] [1 8 1] [1 9 0] [1 9 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'numpy.uint8'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5351.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: [[0 1 1] [0 4 1] [0 6 1] [0 9 1] [1 1 1] [1 4 1] [1 6 1] [1 9 1]]; ov_res: [[0 1 1] [0 4 1] [0 6 1] [0 9 1] [1 1 1] [1 4 1] [1 6 1] [1 9 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'bool'> - mask_fill:zeros ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5353.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'bool'> - mask_fill:ones ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5355.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: [[0 0 0] [0 0 1] [0 1 0] [0 1 1] [0 2 0] [0 2 1] [0 3 0] [0 3 1] [0 4 0] [0 4 1] [0 5 0] [0 5 1] [0 6 0] [0 6 1] [0 7 0] [0 7 1] [0 8 0] [0 8 1] [0 9 0] [0 9 1] [1 0 0] [1 0 1] [1 1 0] [1 1 1] [1 2 0] [1 2 1] [1 3 0] [1 3 1] [1 4 0] [1 4 1] [1 5 0] [1 5 1] [1 6 0] [1 6 1] [1 7 0] [1 7 1] [1 8 0] [1 8 1] [1 9 0] [1 9 1]]; ov_res: [[0 0 0] [0 0 1] [0 1 0] [0 1 1] [0 2 0] [0 2 1] [0 3 0] [0 3 1] [0 4 0] [0 4 1] [0 5 0] [0 5 1] [0 6 0] [0 6 1] [0 7 0] [0 7 1] [0 8 0] [0 8 1] [0 9 0] [0 9 1] [1 0 0] [1 0 1] [1 1 0] [1 1 1] [1 2 0] [1 2 1] [1 3 0] [1 3 1] [1 4 0] [1 4 1] [1 5 0] [1 5 1] [1 6 0] [1 6 1] [1 7 0] [1 7 1] [1 8 0] [1 8 1] [1 9 0] [1 9 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:False - mask_dtype:<class 'bool'> - mask_fill:random ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5357.aten_nonzero, %cond.1 : Tensor): %2 : Tensor = aten::nonzero(%cond.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:23:23 return (%2) fw_re: [[0 2 1] [0 4 1] [0 6 1] [0 7 1] [1 2 1] [1 4 1] [1 6 1] [1 7 1]]; ov_res: [[0 2 1] [0 4 1] [0 6 1] [0 7 1] [1 2 1] [1 4 1] [1 6 1] [1 7 1]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5359.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5362.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(40, strides=[1], requires_grad=0, device=cpu), Long(40, strides=[1], requires_grad=0, device=cpu), Long(40, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1]; ov_res: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9]; ov_res: [0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1]; ov_res: [0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'numpy.uint8'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5365.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(10, strides=[1], requires_grad=0, device=cpu), Long(10, strides=[1], requires_grad=0, device=cpu), Long(10, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: [0 0 0 0 0 1 1 1 1 1]; ov_res: [0 0 0 0 0 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [2 3 5 6 8 2 3 5 6 8]; ov_res: [2 3 5 6 8 2 3 5 6 8] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [1 1 1 1 1 1 1 1 1 1]; ov_res: [1 1 1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'bool'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5368.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'bool'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5371.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(40, strides=[1], requires_grad=0, device=cpu), Long(40, strides=[1], requires_grad=0, device=cpu), Long(40, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1]; ov_res: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9]; ov_res: [0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1]; ov_res: [0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_nonzero.py::TestNonZero::test_nonzero[ ie_device:CPU - precision:FP32 - as_tuple:True - mask_dtype:<class 'bool'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_nonzero.___torch_mangle_5374.aten_nonzero_numpy, %cond : Tensor): %2 : Tensor[] = aten::nonzero_numpy(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_nonzero.py:28:0 %3 : Tensor, %4 : Tensor, %5 : Tensor = prim::ListUnpack(%2) %6 : (Long(8, strides=[1], requires_grad=0, device=cpu), Long(8, strides=[1], requires_grad=0, device=cpu), Long(8, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4, %5) return (%6) fw_re: [0 0 0 0 1 1 1 1]; ov_res: [0 0 0 0 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [3 5 6 8 3 5 6 8]; ov_res: [3 5 6 8 3 5 6 8] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [1 1 1 1 1 1 1 1]; ov_res: [1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5375.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[2.29173688e-01 9.77392340e-02 5.00845920e-01 7.32410423e-02 3.82578993e-01] [4.97164459e-01 2.84922023e-01 1.28697956e+00 6.69826694e-01 1.22877542e+00] [3.51387227e-02 2.70749142e-01 4.31665506e-01 1.46340265e-01 2.46748597e-01] [3.60315210e-01 8.60363476e-01 1.35969040e+00 8.87162717e-02 1.04818545e-01]] [[1.19239895e-01 5.16314047e-01 1.11432445e+00 1.91106434e-01 1.37842184e-01] [8.44146525e-02 3.51403396e-01 5.75999286e-01 1.72615102e-01 8.96349726e-01] [7.66518160e-01 7.24847614e-01 8.15891758e-01 3.38733203e-04 7.30725706e-01] [1.01714240e-01 4.68527313e-01 3.50640192e-01 2.73441258e-01 7.49389665e-02]] [[8.87369460e-01 5.02074017e-01 2.47106815e-01 1.89031867e-01 1.66460089e-01] [2.99197218e-01 2.37204856e-01 7.15759432e-01 5.47087832e-02 7.06136846e-01] [6.42920345e-01 3.87255779e-01 8.11734239e-02 5.17218411e-01 8.28142927e-02] [3.32044647e-01 2.64550934e-01 9.54651069e-01 8.49880911e-02 2.38887242e-01]]]]; ov_res: [[[[2.29173690e-01 9.77392420e-02 5.00845909e-01 7.32410476e-02 3.82578969e-01] [4.97164458e-01 2.84922034e-01 1.28697968e+00 6.69826686e-01 1.22877538e+00] [3.51387225e-02 2.70749122e-01 4.31665480e-01 1.46340251e-01 2.46748582e-01] [3.60315204e-01 8.60363424e-01 1.35969043e+00 8.87162685e-02 1.04818545e-01]] [[1.19239897e-01 5.16314030e-01 1.11432445e+00 1.91106439e-01 1.37842178e-01] [8.44146535e-02 3.51403415e-01 5.75999260e-01 1.72615096e-01 8.96349728e-01] [7.66518176e-01 7.24847615e-01 8.15891743e-01 3.38733196e-04 7.30725765e-01] [1.01714246e-01 4.68527287e-01 3.50640237e-01 2.73441255e-01 7.49389678e-02]] [[8.87369454e-01 5.02074063e-01 2.47106805e-01 1.89031869e-01 1.66460097e-01] [2.99197227e-01 2.37204880e-01 7.15759397e-01 5.47087900e-02 7.06136882e-01] [6.42920375e-01 3.87255788e-01 8.11734200e-02 5.17218411e-01 8.28142911e-02] [3.32044661e-01 2.64550924e-01 9.54650998e-01 8.49880874e-02 2.38887221e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5377.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.22896449 0.29394815 0.3909654 0.37795654 0.49215805] [0.30255067 0.34545711 0.60411094 0.28090505 0.54346774] [0.23756423 0.10688857 0.70708909 0.39420753 0.48979065] [0.08075567 0.31345674 1.13251055 0.0679246 0.09441653]] [[0.3501177 0.43507202 0.2996214 0.15986678 0.03525722] [0.3033189 0.07934022 0.32937164 0.08565083 0.01303259] [0.36178178 0.13912824 0.54179222 0.96941146 0.1690637 ] [0.69814954 0.12089103 0.36513951 0.67879321 0.49251124]] [[0.33219614 0.48230114 0.61271324 0.0437309 0.06365236] [0.19287956 0.25618952 0.1791015 0.11269426 0.58813402] [0.2681016 0.5551921 0.81192365 0.22640703 0.26283397] [0.21671696 0.17181973 0.44981739 0.09065112 0.39524671]]]]; ov_res: [[[[0.22896449 0.29394817 0.39096537 0.37795651 0.49215803] [0.30255067 0.34545708 0.6041109 0.28090504 0.5434677 ] [0.23756424 0.10688857 0.70708913 0.39420754 0.48979065] [0.08075567 0.31345674 1.13251066 0.0679246 0.09441654]] [[0.35011768 0.43507203 0.2996214 0.15986678 0.03525722] [0.30331889 0.07934022 0.32937163 0.08565082 0.01303259] [0.36178178 0.13912824 0.54179221 0.96941143 0.1690637 ] [0.69814956 0.12089104 0.36513951 0.67879319 0.49251124]] [[0.33219615 0.48230115 0.61271328 0.0437309 0.06365236] [0.19287956 0.25618953 0.1791015 0.11269427 0.58813399] [0.2681016 0.55519205 0.81192362 0.22640701 0.26283398] [0.21671696 0.17181973 0.44981736 0.09065112 0.39524671]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5379.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]]]]; ov_res: [[[[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5381.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[2.22401216 0.36109661 2.7245604 3.49602843 2.42201685] [1.15890285 1.26023815 1.71043009 1.1472849 2.18840222] [1.95581777 0.34295256 2.75840833 0.58986836 0.91506391] [0.36906881 1.67425921 0.78269757 1.54921376 1.22197481]] [[0.44969504 3.8038739 0.79146724 1.3368479 0.76768953] [1.34444581 0.11207874 1.25698903 1.2059369 1.73744124] [1.875783 2.60771414 1.59816812 1.37120302 1.38336676] [1.31063095 0.87497463 3.03427136 1.0650418 0.66084693]] [[2.70296498 2.13529267 1.49805413 2.02209285 0.60891925] [0.38947145 0.67514236 2.12179551 1.66385587 1.69426765] [0.30267275 1.55557221 1.26194397 2.92304158 1.55811413] [0.65128097 1.62641427 1.68788696 1.52539174 0.84717377]]]]; ov_res: [[[[2.22401214 0.36109662 2.7245605 3.49602842 2.42201686] [1.15890288 1.26023817 1.71043003 1.14728487 2.18840218] [1.9558177 0.34295255 2.75840855 0.58986837 0.91506392] [0.3690688 1.67425919 0.78269756 1.54921365 1.22197485]] [[0.44969505 3.80387402 0.79146725 1.3368479 0.76768953] [1.34444582 0.11207874 1.256989 1.20593691 1.7374413 ] [1.87578297 2.60771418 1.59816813 1.37120295 1.3833667 ] [1.31063104 0.87497461 3.03427124 1.06504178 0.66084695]] [[2.70296502 2.13529253 1.49805403 2.02209282 0.6089192 ] [0.38947147 0.67514241 2.12179542 1.66385579 1.69426763] [0.30267274 1.55557227 1.26194394 2.92304158 1.55811405] [0.65128094 1.6264143 1.68788695 1.5253917 0.84717375]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5383.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.80977224 1.55579364 1.50931814 1.12320154 2.77940657] [0.86419948 1.68558022 1.68223067 0.70277455 1.59481273] [1.33792271 0.64545789 0.36069269 0.12900332 1.93316556] [1.30257057 2.18277968 1.06220286 1.04031894 0.51856181]] [[1.07083705 1.54068869 0.55552142 0.24284332 0.55299353] [1.1331669 0.39499954 0.22588549 1.45304349 0.63135809] [0.93094568 2.54593345 0.58553041 1.00069889 2.38133284] [1.14642267 1.84889766 1.21751321 2.92850306 0.71389627]] [[1.56130162 0.17055085 0.39288966 0.4867952 0.87106772] [1.43277057 2.5487705 0.47270525 0.42231118 0.75199187] [0.71489784 2.48092636 0.40724962 1.68537669 2.01869176] [1.13532202 0.75424054 1.82063842 1.22104876 1.80219488]]]]; ov_res: [[[[1.80977225 1.55579376 1.50931811 1.12320149 2.77940655] [0.86419946 1.68558025 1.68223071 0.70277458 1.59481287] [1.33792269 0.64545786 0.36069271 0.12900333 1.93316555] [1.30257058 2.18277979 1.06220281 1.04031897 0.51856184]] [[1.07083714 1.54068863 0.55552143 0.24284332 0.55299354] [1.13316691 0.39499956 0.22588548 1.45304346 0.63135809] [0.93094569 2.54593349 0.5855304 1.00069892 2.38133287] [1.14642274 1.84889758 1.2175132 2.92850327 0.71389621]] [[1.56130159 0.17055085 0.39288968 0.48679519 0.87106776] [1.43277061 2.54877067 0.47270525 0.42231119 0.75199187] [0.71489787 2.48092651 0.40724963 1.68537664 2.01869178] [1.13532197 0.75424051 1.82063842 1.22104883 1.80219483]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5385.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.34811738 0.72072313 2.3106746 2.235855 1.19856502] [2.02586366 0.68200498 1.07647359 1.25333718 1.19688924] [0.72634735 1.89855979 0.99578791 1.78096054 1.43041812] [0.15006524 1.38486871 1.18279124 1.69241891 1.07098175]] [[0.8416951 0.39954144 0.55549235 1.70312126 1.67057366] [1.63773885 1.23624357 0.32893572 1.03485628 0.80010728] [0.23330159 1.75122472 2.07401661 1.2706129 2.62370985] [1.13623794 1.59778356 1.39719313 1.32159835 0.28357372]] [[0.58948599 1.42924856 0.74030164 1.27826906 1.13410384] [1.45814046 1.26315897 1.56122145 2.11293221 1.24460064] [1.26171634 1.41490015 1.91309911 0.53266621 0.5002238 ] [1.08159099 0.96711589 0.90976232 1.07913659 1.15460023]]]]; ov_res: [[[[1.34811735 0.72072315 2.31067467 2.23585486 1.19856501] [2.02586365 0.68200499 1.07647359 1.25333714 1.19688928] [0.72634733 1.89855981 0.99578792 1.78096056 1.43041813] [0.15006523 1.38486874 1.18279123 1.69241893 1.07098174]] [[0.84169513 0.39954144 0.5554924 1.7031213 1.67057371] [1.63773894 1.23624349 0.32893571 1.03485632 0.8001073 ] [0.23330158 1.75122464 2.07401657 1.27061296 2.62370992] [1.13623786 1.59778357 1.39719319 1.32159841 0.28357372]] [[0.589486 1.42924857 0.74030161 1.27826905 1.13410378] [1.45814049 1.26315904 1.56122148 2.11293221 1.24460065] [1.26171637 1.41490018 1.91309917 0.53266621 0.50022382] [1.08159101 0.96711588 0.90976232 1.07913661 1.15460014]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5387.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.50485637 1.26071255 0.61363873 1.2460675 2.34041922] [1.32124771 0.12473587 0.26490877 0.87834461 1.84858147] [0.96356908 1.03274844 0.2608275 0.19277334 0.66887413] [0.69885387 0.25768138 1.66817915 2.910187 2.53454039]] [[1.71930305 0.23008068 0.52438655 1.32186775 1.98816001] [1.90247589 0.81039136 1.05422265 1.79704548 0.89118013] [0.60563778 1.5373397 1.06967706 0.36969562 1.90837469] [0.11005978 0.59668721 0.5520443 0.5738612 1.27155425]] [[0.77837235 0.81224556 0.94797911 0.82858509 1.83488546] [0.58071204 1.52240292 1.02849354 0.78559326 0.19536136] [2.1149544 1.04144803 1.96703738 0.99682909 1.50221589] [1.31815858 0.23239612 1.78425384 1.07237308 2.17672294]]]]; ov_res: [[[[1.50485635 1.2607125 0.61363876 1.24606752 2.34041929] [1.3212477 0.12473588 0.26490876 0.8783446 1.84858143] [0.9635691 1.03274846 0.26082751 0.19277334 0.66887414] [0.69885385 0.25768137 1.66817915 2.91018701 2.53454041]] [[1.71930301 0.23008068 0.52438653 1.3218677 1.98816001] [1.90247583 0.81039137 1.0542227 1.79704547 0.89118016] [0.60563779 1.53733969 1.06967711 0.3696956 1.90837467] [0.11005978 0.5966872 0.55204433 0.57386118 1.27155423]] [[0.77837235 0.81224555 0.94797909 0.82858509 1.83488548] [0.58071202 1.52240288 1.02849352 0.78559327 0.19536136] [2.11495447 1.041448 1.96703732 0.99682909 1.50221586] [1.31815863 0.23239611 1.78425384 1.07237303 2.176723 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5389.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.86446559 0.73636125 1.02257022 0.55304947 0.29887811] [0.21535728 0.04943831 0.52198951 0.56689721 0.73338246] [0.45789877 0.67095493 0.47643942 0.33872897 0.26108025] [1.05561563 0.46391917 0.25576239 0.6218082 0.27161512]] [[0.65071831 0.09690498 0.84933238 0.49812319 0.08920333] [0.04569124 0.5943144 0.88082294 0.34644127 0.57396155] [0.25193716 0.97651069 0.48737042 0.22029527 0.03991495] [0.18578165 0.23027911 0.20248918 0.04464311 0.48498014]] [[0.07078531 2.09865217 0.15639132 0.34125082 0.82969171] [0.49089413 0.39911137 0.08772965 0.68654386 0.7656212 ] [0.56299696 0.62605272 0.67102089 0.02574346 0.04749459] [0.46999547 0.27144445 0.4268565 0.08104553 0.42937645]]]]; ov_res: [[[[0.86446559 0.73636127 1.02257025 0.55304945 0.2988781 ] [0.21535727 0.04943831 0.52198952 0.56689721 0.73338246] [0.45789877 0.67095494 0.47643942 0.33872896 0.26108024] [1.05561566 0.46391916 0.2557624 0.62180823 0.27161512]] [[0.65071833 0.09690499 0.84933239 0.4981232 0.08920333] [0.04569124 0.5943144 0.88082296 0.34644127 0.57396156] [0.25193715 0.9765107 0.48737043 0.22029527 0.03991495] [0.18578164 0.23027912 0.20248918 0.04464311 0.48498014]] [[0.07078531 2.09865212 0.15639132 0.34125081 0.82969171] [0.49089414 0.39911136 0.08772966 0.68654388 0.76562119] [0.56299698 0.62605274 0.67102087 0.02574346 0.04749459] [0.46999547 0.27144444 0.42685649 0.08104553 0.42937645]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5391.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.11991141 0.00866825 0.13250585 0.26492638 0.02694323] [0.0865987 0.15183298 0.45359166 0.0585135 0.21113009] [0.00274705 0.12617795 0.05786455 0.08065507 0.07286058] [0.09227937 0.05294308 0.25242049 0.17934581 0.2282209 ]]]]; ov_res: [[[[0.11991141 0.00866825 0.13250585 0.2649264 0.02694323] [0.0865987 0.15183298 0.45359164 0.0585135 0.21113008] [0.00274705 0.12617797 0.05786455 0.08065507 0.07286058] [0.09227937 0.05294308 0.25242049 0.1793458 0.22822091]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5393.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.03014162 0.03709587 0.00328846 0.1534117 0.02926017] [0.18306793 0.02057857 0.05628518 0.0230655 0.10866876] [0.03119424 0.02275645 0.16625734 0.1431112 0.06701972] [0.13392554 0.07366069 0.13213001 0.03007712 0.02464413]]]]; ov_res: [[[[0.03014162 0.03709587 0.00328846 0.1534117 0.02926017] [0.18306795 0.02057857 0.05628518 0.0230655 0.10866877] [0.03119424 0.02275645 0.16625734 0.1431112 0.06701972] [0.13392554 0.07366069 0.13213001 0.03007712 0.02464413]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5395.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.]]]]; ov_res: [[[[6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5397.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.99440126 5.34053434 5.30391037 5.09151409 4.35092666] [6.1063092 4.0394502 4.57116432 3.11940184 3.69480404] [3.96422223 6.23274189 2.72942622 5.50634947 4.18710528] [3.73529074 6.21797547 7.55709206 5.29746048 6.3723469 ]]]]; ov_res: [[[[1.99440122 5.34053469 5.30391073 5.09151411 4.3509264 ] [6.10630894 4.03945017 4.57116413 3.11940169 3.69480395] [3.96422243 6.23274231 2.72942615 5.50634909 4.18710518] [3.73529077 6.21797562 7.55709219 5.29746056 6.37234735]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:2 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5399.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.18963924 1.70671175 2.76386059 2.48357387 2.66921098] [1.94146575 2.39892741 2.38262265 2.8904087 1.96234928] [2.6057693 1.95640392 2.10780058 1.87888828 2.53340226] [1.90611436 2.74701127 2.20328329 1.04704833 2.0419531 ]]]]; ov_res: [[[[1.18963933 1.70671177 2.7638607 2.48357391 2.66921091] [1.94146585 2.39892745 2.38262272 2.89040875 1.9623493 ] [2.60576916 1.95640385 2.10780072 1.87888825 2.53340244] [1.90611434 2.74701118 2.20328331 1.04704833 2.04195309]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5401.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[2.59438903 1.32873218 1.72078497 2.29687727 2.08397554] [1.68176551 2.84209662 0.54407284 2.90358951 2.04380115] [1.34337938 1.79274149 1.98497053 1.60784926 2.79231482] [1.10648061 2.80790269 1.4849075 2.53887537 1.65569947]]]]; ov_res: [[[[2.59438896 1.32873213 1.72078502 2.29687738 2.08397555] [1.68176544 2.84209681 0.54407287 2.90358949 2.04380131] [1.34337938 1.79274154 1.98497057 1.60784924 2.79231477] [1.1064806 2.80790281 1.48490763 2.53887558 1.65569949]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5403.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[1.42979713 1.54636728 1.1581754 2.75526435 0.81290944] [1.48951896 1.37820981 2.33737375 1.42948363 1.12918378] [1.91289584 2.36577329 1.52875827 1.10228714 1.29997354] [1.18205685 2.65029694 2.13771936 1.57709379 1.95177861]]]]; ov_res: [[[[1.42979717 1.54636729 1.15817535 2.75526428 0.81290942] [1.489519 1.37820983 2.33737373 1.42948365 1.12918377] [1.9128958 2.3657732 1.52875829 1.10228717 1.29997349] [1.1820569 2.65029693 2.13771939 1.57709384 1.95177865]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5405.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.70865461 0.02009751 0.13434894 0.08567844 0.04096045] [0.42800787 0.32310523 0.10461986 0.08744046 0.08062406] [0.05588718 0.16840334 0.33113858 0.0200214 0.30195416] [0.01517182 0.21250793 0.08591852 0.34035988 0.22603692]]]]; ov_res: [[[[0.70865458 0.02009751 0.13434894 0.08567844 0.04096045] [0.42800787 0.32310525 0.10461986 0.08744045 0.08062406] [0.05588718 0.16840334 0.33113858 0.0200214 0.30195415] [0.01517182 0.21250793 0.08591852 0.3403599 0.22603692]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5407.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.06009987 0.00185456 0.01924142 0.0408789 0.00384363]]]]; ov_res: [[[[0.06009987 0.00185456 0.01924142 0.04087891 0.00384363]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5409.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.00023349 0.01458754 0.00075127 0.02123725 0.00848455]]]]; ov_res: [[[[0.00023349 0.01458754 0.00075127 0.02123725 0.00848455]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5411.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[24. 24. 24. 24. 24.]]]]; ov_res: [[[[24. 24. 24. 24. 24.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:1 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5413.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[22.27212627 14.61744911 16.413864 21.12939623 20.63252978]]]]; ov_res: [[[[22.27212524 14.6174469 16.41386414 21.12939835 20.63253021]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5415.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[4.25214535 5.80633602 4.33119536 5.17888364 5.01087592]]]]; ov_res: [[[[4.25214529 5.80633593 4.33119488 5.17888355 5.0108757 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5417.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[4.09909648 4.49841932 4.70943468 2.62917631 3.83982792]]]]; ov_res: [[[[4.0990963 4.49841976 4.70943451 2.62917638 3.83982778]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5419.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[2.07991598 2.34204217 2.344327 2.76988383 2.07790135]]]]; ov_res: [[[[2.079916 2.34204221 2.34432697 2.76988387 2.07790136]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:True - dim:[0, 1, 2] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5421.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[[0.01282121 0.04815511 0.03126582 0.04690045 0.00376997]]]]; ov_res: [[[[0.01282121 0.04815511 0.03126582 0.04690045 0.00376997]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5423.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[0.3329111 0.45396386 0.56896455 0.15559216 0.33427269] [0.34572929 0.83034438 0.41655168 0.94990454 0.53484262] [0.26370316 0.2368233 0.05855749 0.13142268 0.34483894] [0.88023139 0.26073926 0.84205474 0.2287835 0.30991301]] [[0.22710999 0.796054 0.44497829 0.05726894 0.12451449] [0.38310876 0.48302081 0.30971613 0.01152923 0.3694579 ] [0.15205223 0.22575376 0.36639492 0.20305619 0.21860065] [0.57018708 0.58697367 0.84712559 0.12311747 0.24717096]] [[0.36167452 0.54403499 0.55716751 0.93018983 0.46232221] [0.41299785 0.40437763 0.19059151 0.36441346 0.34499135] [0.07846467 0.41710605 0.00490666 0.29183386 0.45196521] [0.89999054 1.27583977 0.50922356 0.06389121 0.1443216 ]]]; ov_res: [[[0.3329111 0.45396388 0.56896454 0.15559216 0.33427268] [0.34572932 0.83034438 0.41655171 0.94990456 0.53484261] [0.26370317 0.23682329 0.05855748 0.13142268 0.34483895] [0.88023144 0.2607393 0.84205478 0.2287835 0.30991301]] [[0.22710998 0.79605395 0.4449783 0.05726894 0.12451448] [0.38310874 0.48302081 0.30971614 0.01152923 0.3694579 ] [0.15205224 0.22575377 0.36639491 0.2030562 0.21860065] [0.57018703 0.58697367 0.84712559 0.12311747 0.24717094]] [[0.36167452 0.54403496 0.55716747 0.93018979 0.46232221] [0.41299784 0.40437764 0.19059151 0.36441344 0.34499136] [0.07846467 0.41710603 0.00490666 0.29183388 0.45196521] [0.8999905 1.27583969 0.50922358 0.06389122 0.14432159]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5425.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[0.02298519 0.41862176 0.15881173 0.02815981 0.15381112] [0.38791243 0.38736813 0.1733142 1.28377975 0.28564825] [0.49519717 0.06539419 0.12520554 0.30396854 0.77618328] [0.3271705 0.59511948 0.32001063 0.19785369 0.43257994]] [[0.45794024 0.34438957 0.45479457 0.1602385 0.24130867] [0.43465342 0.31006456 0.04452897 0.08280554 0.19436734] [0.28959398 0.34469578 0.70921565 0.34211875 0.29881856] [0.05404651 0.11180716 0.12729279 0.63462491 0.42904649]] [[0.32445409 0.1800295 0.64955009 0.20654666 0.26745146] [0.1874409 0.17272297 0.41979623 0.48201596 0.12687924] [0.61621001 0.23490795 0.21516851 0.18037661 0.22953234] [0.06475165 0.33360463 0.01799024 0.14976583 0.58773429]]]; ov_res: [[[0.02298519 0.41862178 0.15881172 0.02815982 0.15381111] [0.38791242 0.38736814 0.1733142 1.28377974 0.28564826] [0.49519715 0.06539418 0.12520553 0.30396852 0.77618325] [0.32717052 0.59511948 0.32001063 0.1978537 0.43257993]] [[0.45794025 0.34438956 0.45479459 0.16023849 0.24130866] [0.43465343 0.31006455 0.04452896 0.08280554 0.19436733] [0.28959396 0.34469581 0.70921564 0.34211874 0.29881856] [0.05404651 0.11180715 0.12729278 0.6346249 0.42904648]] [[0.3244541 0.1800295 0.64955008 0.20654665 0.26745147] [0.1874409 0.17272298 0.4197962 0.48201594 0.12687925] [0.61620998 0.23490795 0.21516852 0.18037662 0.22953233] [0.06475165 0.3336046 0.01799024 0.14976583 0.58773428]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5427.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]]]; ov_res: [[[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]] [[2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.] [2. 2. 2. 2. 2.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5429.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[0.37897977 2.31003726 1.88119839 1.0883812 0.75463528] [2.33716496 0.90924724 1.57234936 1.10849712 1.2272296 ] [1.50477341 2.82187761 0.54960481 0.47070389 1.86143428] [0.62475854 0.73687873 1.3027976 1.0004605 0.94622562]] [[1.13248884 1.03551153 2.2264836 0.22751027 2.90756647] [2.90701372 1.09733454 0.79033516 1.15544012 1.35469731] [1.07603959 1.6656362 1.87347309 1.58106477 0.9204107 ] [0.83034932 3.27349724 1.066409 0.57292578 0.70983503]] [[1.84397735 1.26065106 2.72272884 1.59353604 1.33697973] [1.31175582 1.53831509 1.92998995 2.72657108 0.97443386] [1.30378958 1.72782311 1.63160643 0.77810741 0.83108667] [2.98035208 1.83697026 2.47160766 0.28582582 1.73791032]]]; ov_res: [[[0.37897977 2.31003714 1.88119841 1.08838129 0.75463527] [2.33716488 0.90924728 1.57234931 1.10849714 1.2272296 ] [1.50477338 2.82187748 0.54960483 0.4707039 1.86143422] [0.62475854 0.73687875 1.30279756 1.00046051 0.94622564]] [[1.13248885 1.03551149 2.22648358 0.22751027 2.90756655] [2.90701365 1.09733462 0.79033518 1.15544009 1.35469735] [1.07603955 1.66563618 1.87347317 1.58106482 0.92041075] [0.83034933 3.2734971 1.06640899 0.57292581 0.70983505]] [[1.84397745 1.26065111 2.72272873 1.59353602 1.33697975] [1.3117559 1.53831506 1.92998993 2.72657108 0.9744339 ] [1.30378962 1.72782314 1.63160634 0.7781074 0.83108664] [2.98035192 1.83697021 2.47160745 0.28582582 1.73791027]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5431.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[1.67927189 0.51087304 0.61048438 1.71571033 1.96063506] [0.137796 1.64108911 1.7609021 0.90430845 1.26932649] [1.28718957 0.63149763 0.34022357 0.61102461 1.13081663] [1.48542052 3.77182222 0.89792223 0.39016152 1.1988546 ]] [[1.52457231 0.71387688 1.02685158 1.93371317 1.93067726] [2.32727088 1.18773858 1.65607771 1.84168658 0.73103043] [0.42695847 1.2795563 0.44423167 0.56846419 0.76355215] [0.40898865 1.62704158 0.51741544 1.89716259 0.75589895]] [[0.66048166 0.58076901 0.8851373 0.73357188 1.61063821] [1.60392066 3.18879927 2.05483908 0.35353416 0.99559989] [1.29630801 0.29390633 1.03172308 1.05619951 0.91662778] [1.37349538 0.8676686 0.16537354 1.40809174 2.09892223]]]; ov_res: [[[1.67927194 0.51087308 0.61048436 1.71571028 1.96063507] [0.137796 1.64108908 1.76090205 0.9043085 1.26932645] [1.2871896 0.63149762 0.34022358 0.61102462 1.13081658] [1.48542058 3.77182221 0.89792222 0.39016151 1.19885457]] [[1.52457237 0.7138769 1.02685153 1.9337132 1.93067729] [2.32727098 1.18773854 1.65607762 1.84168661 0.7310304 ] [0.42695847 1.27955627 0.44423166 0.56846422 0.76355213] [0.40898862 1.62704158 0.51741546 1.89716256 0.75589895]] [[0.66048163 0.580769 0.88513732 0.73357183 1.61063826] [1.6039207 3.18879938 2.05483913 0.35353416 0.99559993] [1.29630804 0.29390633 1.03172314 1.05619955 0.91662776] [1.37349534 0.86766863 0.16537353 1.40809178 2.09892225]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5433.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[1.46764985 0.23070119 0.51139497 1.39948094 0.24559274] [1.24908766 2.2031217 0.45190323 1.25388305 0.20568704] [1.74252745 0.68707867 1.97788482 0.28568366 1.61375942] [0.72953559 0.93989908 2.22625437 2.89127536 1.25054295]] [[0.1533417 1.40889834 1.20995121 0.78553871 1.12390736] [0.94087346 1.85477373 1.39079515 1.08791861 1.91015309] [0.94171922 1.18310209 1.12560112 1.64199128 0.70390122] [1.84528878 1.1608512 0.56850773 2.29621055 0.99975039]] [[0.35983777 1.55904029 1.44570188 0.52288803 0.4392289 ] [0.90478046 1.36550139 1.01444655 0.85857093 1.3544768 ] [1.10872347 0.92678302 2.17571804 0.85288326 1.35043887] [1.406959 2.1361147 1.00553951 0.52336459 0.88821983]]]; ov_res: [[[1.46764982 0.23070118 0.51139498 1.39948106 0.24559273] [1.24908769 2.20312166 0.45190322 1.25388312 0.20568703] [1.74252748 0.68707865 1.97788489 0.28568366 1.6137594 ] [0.72953558 0.93989909 2.22625446 2.89127541 1.250543 ]] [[0.15334171 1.40889835 1.20995128 0.78553867 1.12390745] [0.9408735 1.85477376 1.39079511 1.08791864 1.91015315] [0.94171917 1.18310213 1.12560117 1.64199126 0.70390123] [1.84528887 1.16085124 0.56850773 2.29621053 0.99975038]] [[0.35983777 1.55904031 1.44570196 0.522888 0.43922889] [0.90478045 1.3655014 1.0144465 0.85857093 1.35447681] [1.10872352 0.92678303 2.17571783 0.85288322 1.35043883] [1.40695894 2.1361146 1.00553954 0.5233646 0.88821983]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5435.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[0.6661233 0.69859139 1.0273157 2.16794834 1.32578469] [1.99128545 0.43442189 1.29797911 0.21200658 0.91648528] [0.48779065 0.34372632 0.71152868 2.27776792 0.81024531] [1.76006551 1.29595894 0.12422802 0.74603182 1.44669905]] [[1.46027443 1.26362805 1.01674857 1.06768753 1.01197375] [1.71859183 1.58910554 0.79947347 1.80845773 1.63562018] [0.98092256 1.64562227 0.63646266 1.58237098 2.48876339] [1.1510121 1.2563912 0.81056638 2.35766453 1.82922681]] [[0.89533946 0.84947366 1.14305155 0.98910383 1.04367905] [2.87809765 0.94100504 1.88625481 0.90150727 0.63974715] [0.41166388 0.69201121 0.31718493 0.91815269 0.2514484 ] [1.97083758 2.17949253 2.12623854 1.09119963 0.52088386]]]; ov_res: [[[0.66612327 0.69859141 1.02731574 2.16794825 1.32578468] [1.99128544 0.4344219 1.29797912 0.21200658 0.91648531] [0.48779064 0.34372631 0.71152866 2.2777679 0.81024534] [1.76006556 1.295959 0.12422802 0.74603182 1.44669902]] [[1.46027446 1.26362801 1.01674855 1.06768751 1.01197374] [1.71859181 1.58910549 0.79947346 1.80845773 1.63562024] [0.98092258 1.64562225 0.63646263 1.582371 2.48876333] [1.15101206 1.25639117 0.81056637 2.35766459 1.82922685]] [[0.89533949 0.84947366 1.14305151 0.98910385 1.043679 ] [2.87809753 0.94100505 1.88625479 0.90150726 0.63974714] [0.41166389 0.69201124 0.31718493 0.91815269 0.25144839] [1.97083759 2.17949247 2.12623858 1.09119964 0.52088386]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5437.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[[1.1391495 1.12190524 0.18276879 0.12213999 0.7191152 ] [0.18194103 0.28545346 1.13354706 0.15297392 0.48696863] [0.99313576 0.02159001 0.53422682 0.80710397 0.31899649] [0.27103654 0.95752468 0.10477157 0.89304975 0.24541079]] [[0.61690792 0.17881338 0.24346888 0.85319655 1.12427185] [0.02059032 0.48460411 0.24928164 0.36789825 1.35482474] [0.57463474 0.49971776 0.16686499 0.20166068 0.91230491] [0.33959941 0.4031571 0.59945695 0.16322258 0.11019924]] [[0.74724728 0.29795802 0.06661043 0.80153369 0.20252958] [0.43596642 0.0539483 0.35456786 0.20123588 0.38212459] [0.12405911 0.22114538 0.67967006 0.79476902 0.25015725] [0.35198507 0.47040103 0.07442913 0.09084789 0.20102126]]]; ov_res: [[[1.13914955 1.12190521 0.18276879 0.12213999 0.7191152 ] [0.18194103 0.28545347 1.13354707 0.15297392 0.48696864] [0.99313575 0.02159001 0.53422683 0.80710399 0.31899649] [0.27103654 0.95752466 0.10477156 0.89304978 0.24541079]] [[0.61690789 0.17881338 0.24346888 0.85319656 1.12427187] [0.02059031 0.48460412 0.24928164 0.36789826 1.35482478] [0.57463473 0.49971777 0.16686499 0.20166068 0.91230494] [0.3395994 0.40315711 0.59945697 0.16322258 0.11019924]] [[0.74724728 0.29795802 0.06661043 0.8015337 0.20252958] [0.4359664 0.0539483 0.35456786 0.20123589 0.3821246 ] [0.12405911 0.22114539 0.67967004 0.79476905 0.25015724] [0.35198507 0.47040102 0.07442913 0.09084789 0.20102127]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5439.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[0.02517511 0.14518935 0.00540006 0.14216228 0.06345134] [0.30954531 0.0037204 0.20321997 0.26334594 0.38485064] [0.06732045 0.17584843 0.1021396 0.0804108 0.04196831] [0.13982435 0.07760028 0.03353529 0.1257094 0.07907515]]; ov_res: [[0.02517511 0.14518934 0.00540006 0.14216228 0.06345134] [0.30954531 0.0037204 0.20321997 0.26334593 0.38485065] [0.06732045 0.17584842 0.10213961 0.08041079 0.04196831] [0.13982435 0.07760028 0.03353529 0.1257094 0.07907515]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5441.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[0.06998578 0.07558715 0.03712557 0.08149374 0.16537539] [0.04192415 0.13652835 0.0455508 0.09498295 0.07792143] [0.11648784 0.12929263 0.07542557 0.10856331 0.05610985] [0.04883711 0.17761649 0.01980247 0.00786448 0.15097464]]; ov_res: [[0.06998578 0.07558715 0.03712557 0.08149374 0.16537538] [0.04192415 0.13652836 0.0455508 0.09498294 0.07792143] [0.11648784 0.12929262 0.07542557 0.10856331 0.05610985] [0.0488371 0.17761649 0.01980247 0.00786448 0.15097463]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5443.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.]]; ov_res: [[6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.] [6. 6. 6. 6. 6.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5445.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[7.16959943 4.77840075 6.89933443 4.71679044 5.61593459] [3.09658901 2.65817114 3.28422966 3.78720016 4.64073553] [5.08125019 5.49656593 3.52341288 5.38089179 3.2136447 ] [5.64826725 5.27002429 7.53480382 3.9915297 5.93653317]]; ov_res: [[7.16959953 4.77840042 6.89933443 4.71679068 5.61593485] [3.09658909 2.65817142 3.28422952 3.78720021 4.64073563] [5.08125019 5.4965663 3.52341294 5.3808918 3.21364474] [5.64826775 5.27002382 7.53480339 3.9915297 5.93653297]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5447.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[2.34490549 2.44057342 2.29967629 1.76710652 1.7928802 ] [1.27398154 1.81738817 2.40089442 2.71591023 2.17692318] [1.82411981 3.27643105 2.55695319 2.88555263 1.77147277] [3.16589023 2.88088898 1.93870164 2.15515345 2.09058378]]; ov_res: [[2.34490561 2.44057345 2.29967618 1.76710641 1.7928803 ] [1.27398157 1.81738806 2.4008944 2.7159102 2.17692327] [1.82411981 3.27643085 2.55695319 2.88555264 1.77147281] [3.16589022 2.88088894 1.93870163 2.15515351 2.0905838 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5449.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[2.53146764 1.08679805 2.42281874 2.64565723 2.60239872] [2.17842676 2.41131024 2.28685687 3.54192266 1.30397621] [1.09429965 2.10854286 2.01014514 2.95661019 1.38717506] [3.07396589 2.15967543 1.62157055 2.60818154 1.66850878]]; ov_res: [[2.53146768 1.08679807 2.4228189 2.6456573 2.60239887] [2.17842674 2.4113102 2.28685689 3.54192305 1.30397618] [1.09429967 2.10854292 2.01014519 2.9566102 1.38717508] [3.07396603 2.15967536 1.62157047 2.60818148 1.66850877]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5451.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[1.38401938 1.44678312 1.35039185 2.24238488 1.52957446] [2.50103637 1.98107621 3.19838502 2.16301678 1.67210913] [1.77159642 1.19301578 1.47810845 2.09195735 1.35114919] [1.8470473 2.00380314 1.24097961 0.92574537 1.62410955]]; ov_res: [[1.38401937 1.44678307 1.35039186 2.24238491 1.52957451] [2.50103641 1.98107624 3.198385 2.1630168 1.67210913] [1.77159643 1.19301581 1.47810841 2.09195733 1.3511492 ] [1.84704733 2.00380325 1.24097967 0.92574537 1.62410951]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5453.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [[0.0019314 0.25277062 0.04069968 0.26570642 0.00055889] [0.12854104 0.0250628 0.05691976 0.17647575 0.18137897] [0.13009382 0.3033684 0.19724819 0.14645086 0.09314407] [0.20645948 0.1704012 0.23805823 0.25103259 0.23189074]]; ov_res: [[0.0019314 0.25277063 0.04069968 0.26570642 0.00055889] [0.12854104 0.0250628 0.05691976 0.17647575 0.18137898] [0.13009383 0.30336839 0.19724819 0.14645086 0.09314407] [0.20645948 0.1704012 0.23805822 0.25103259 0.23189074]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5455.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=-2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [0.07441165 0.04975879 0.02775787 0.04794681 0.03773319]; ov_res: [0.07441165 0.04975879 0.02775787 0.0479468 0.03773319] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5457.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=-1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [0.0201868 0.01088257 0.00814356 0.00916355 0.00572518]; ov_res: [0.0201868 0.01088257 0.00814356 0.00916355 0.00572518] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5459.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=0]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [24. 24. 24. 24. 24.]; ov_res: [24. 24. 24. 24. 24.] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5461.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=1]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [19.25510792 16.81447106 21.01305725 20.59430087 23.041054 ]; ov_res: [19.25510788 16.81446838 21.01305771 20.59430313 23.04105377] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5463.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : int = prim::Constant[value=2]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [5.6002901 5.39753985 4.78662251 4.81172378 5.38934215]; ov_res: [5.6002903 5.39753962 4.78662252 4.81172371 5.38934231] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:2.5 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5465.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=2.5]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [3.91585521 5.09384237 4.47965179 4.93858107 4.83247093]; ov_res: [3.91585541 5.09384251 4.47965193 4.93858147 4.83247137] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5467.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [1.98680324 2.99254963 1.82264133 1.87170989 1.48640513]; ov_res: [1.98680329 2.99254966 1.82264137 1.87170994 1.48640513] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_norm.py::TestNorm::test_norm[ ie_device:CPU - precision:FP32 - keepdim:False - dim:[0, 1, 2] - p:-inf ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_norm.___torch_mangle_5469.aten_norm, %input_data.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.dim : int[] = prim::Constant[value=[0, 1, 2]]() %self.p : float = prim::Constant[value=-inf]() %5 : Tensor = aten::norm(%input_data.1, %self.p, %self.dim, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_norm.py:28:23 return (%5) fw_re: [0.0162941 0.03145886 0.00902935 0.14777172 0.0287179 ]; ov_res: [0.0162941 0.03145886 0.00902935 0.14777172 0.0287179 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (1,)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5470.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 1; ov_res: 1 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (2,)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5472.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 2; ov_res: 2 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (2, 3)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5474.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 6; ov_res: 6 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (3, 4, 5)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5476.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 60; ov_res: 60 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (1, 2, 3, 4)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5478.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 24; ov_res: 24 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_numel.py::TestNumel::test_numel[ ie_device:CPU - precision:FP32 - kwargs_to_prepare_input:{'input_shape': (1, 2, 3, 4, 5)} ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_numel.___torch_mangle_5480.aten_numel, %x.1 : Tensor): %2 : int = aten::numel(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_numel.py:19:23 return (%2) fw_re: 120; ov_res: 120 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5481.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="reflect"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 0.14634764 -0.23439011 0.14634764 ... -0.21704979 -0.51735353 -0.43286556] [-0.29533237 0.9695825 -0.29533237 ... -0.26253295 -0.91870505 -0.3880471 ] [ 0.16306075 0.25322238 0.16306075 ... 0.73696977 0.80745 -2.1692996 ] ... [ 1.1789907 2.0941777 1.1789907 ... 0.24838746 0.2847272 -0.12465814] [-1.5594862 -1.5945091 -1.5594862 ... -0.4584395 -1.1389787 1.8340807 ] [ 0.37542754 0.4870207 0.37542754 ... -0.9895482 0.8859911 -0.9536032 ]] [[-2.081408 -1.316685 -2.081408 ... -2.0937195 -1.827661 -0.47460398] [ 0.58771646 -0.9909495 0.58771646 ... -0.80511147 -0.55863416 -2.3184323 ] [-0.7800607 0.70402944 -0.7800607 ... -0.5117541 -1.3234658 -0.04410945] ... [-0.09603491 0.29480088 -0.09603491 ... -0.07164803 0.47837463 -0.46666777] [-0.6039238 0.21586743 -0.6039238 ... -0.4255962 0.12803005 0.32318255] [-2.2466595 -0.38074204 -2.2466595 ... -1.1607692 -0.16240576 -0.9039414 ]] [[ 0.96167785 1.3137045 0.96167785 ... -0.06887972 0.4503332 -0.3284728 ] [-0.7414074 -1.9291614 -0.7414074 ... 0.9520762 -0.90595585 -1.25957 ] [ 0.73876387 -0.24312739 0.73876387 ... -0.08230343 -0.7702607 -1.590291 ] ... [-0.8068745 0.47958577 -0.8068745 ... -0.24835628 -0.05499271 0.44314143] [-1.6674759 -0.05894565 -1.6674759 ... 1.2166779 -0.8717895 0.6139609 ] [ 0.5880938 -0.84939045 0.5880938 ... -1.7351391 1.3790659 1.3689663 ]]]]; ov_res: [[[[ 0.14634764 -0.23439011 0.14634764 ... -0.21704979 -0.51735353 -0.43286556] [-0.29533237 0.9695825 -0.29533237 ... -0.26253295 -0.91870505 -0.3880471 ] [ 0.16306075 0.25322238 0.16306075 ... 0.73696977 0.80745 -2.1692996 ] ... [ 1.1789907 2.0941777 1.1789907 ... 0.24838746 0.2847272 -0.12465814] [-1.5594862 -1.5945091 -1.5594862 ... -0.4584395 -1.1389787 1.8340807 ] [ 0.37542754 0.4870207 0.37542754 ... -0.9895482 0.8859911 -0.9536032 ]] [[-2.081408 -1.316685 -2.081408 ... -2.0937195 -1.827661 -0.47460398] [ 0.58771646 -0.9909495 0.58771646 ... -0.80511147 -0.55863416 -2.3184323 ] [-0.7800607 0.70402944 -0.7800607 ... -0.5117541 -1.3234658 -0.04410945] ... [-0.09603491 0.29480088 -0.09603491 ... -0.07164803 0.47837463 -0.46666777] [-0.6039238 0.21586743 -0.6039238 ... -0.4255962 0.12803005 0.32318255] [-2.2466595 -0.38074204 -2.2466595 ... -1.1607692 -0.16240576 -0.9039414 ]] [[ 0.96167785 1.3137045 0.96167785 ... -0.06887972 0.4503332 -0.3284728 ] [-0.7414074 -1.9291614 -0.7414074 ... 0.9520762 -0.90595585 -1.25957 ] [ 0.73876387 -0.24312739 0.73876387 ... -0.08230343 -0.7702607 -1.590291 ] ... [-0.8068745 0.47958577 -0.8068745 ... -0.24835628 -0.05499271 0.44314143] [-1.6674759 -0.05894565 -1.6674759 ... 1.2166779 -0.8717895 0.6139609 ] [ 0.5880938 -0.84939045 0.5880938 ... -1.7351391 1.3790659 1.3689663 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5483.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="reflect"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 2.13841772e+00 -9.68492448e-01 2.13841772e+00 1.74526870e+00 -3.67558509e-01 9.47779655e-01 1.83396399e+00 5.72154284e-01 2.36977026e-01 1.59464821e-01 4.14178103e-01 -5.80949187e-01 1.58569053e-01 -2.11508595e-03 1.75057435e+00] [ 5.90465605e-01 3.70869219e-01 5.90465605e-01 5.79063237e-01 6.73419952e-01 -5.61178088e-01 4.71312761e-01 -8.42422664e-01 -1.17317152e+00 2.62960124e+00 1.85510015e+00 1.05906022e+00 3.58604163e-01 1.38157558e+00 -3.78412336e-01] [ 9.43405032e-01 -2.85733175e+00 9.43405032e-01 -2.64259994e-01 1.41886175e+00 1.85088031e-02 4.77601945e-01 7.63861239e-01 -3.68032932e-01 -7.40372121e-01 1.35637903e+00 1.32248187e+00 -9.90998805e-01 -1.42035067e+00 5.43224551e-02] [ 2.83120781e-01 -7.53794432e-01 2.83120781e-01 5.25268674e-01 1.15931761e+00 9.16124806e-02 1.29886675e+00 -8.39177072e-01 8.67603958e-01 -6.55565858e-01 -8.45397353e-01 5.63629687e-01 -6.95459366e-01 -4.33169067e-01 9.68138516e-01] [ 7.44133592e-01 -8.91824543e-01 7.44133592e-01 -6.92620456e-01 -7.69083500e-01 -1.12294900e+00 -1.07463825e+00 2.76517291e-02 2.89096087e-01 -1.54784179e+00 -6.93502545e-01 3.07832807e-01 6.29808366e-01 9.60122406e-01 1.83529973e-01] [-1.91509438e+00 1.03229916e+00 -1.91509438e+00 9.78125155e-01 1.19815075e+00 -3.00147146e-01 1.59942567e-01 -7.11438656e-01 2.65293813e+00 6.74227238e-01 4.31245893e-01 -5.65099530e-02 -8.99325967e-01 -3.98193806e-01 1.98956579e-01] [ 1.25898421e+00 -5.28382003e-01 1.25898421e+00 6.02507219e-02 1.34894538e+00 -1.38990402e-01 -1.22146815e-01 8.20932388e-01 -4.86597359e-01 -1.08109200e+00 -1.78409469e+00 -1.75974917e+00 -2.64031053e-01 -1.08358169e+00 -1.16318798e+00] [-7.22677886e-01 5.62311351e-01 -7.22677886e-01 -6.32877648e-01 -5.39523900e-01 -5.94553596e-04 5.16096950e-01 -9.66646433e-01 -9.96716380e-01 -3.57796550e-02 -9.27969292e-02 1.25942275e-01 1.74223512e-01 7.73131788e-01 -1.49675742e-01] [ 1.98656976e-01 -2.22404957e+00 1.98656976e-01 8.34570289e-01 -1.16680048e-01 7.67457068e-01 1.79486036e+00 3.71287853e-01 2.76091874e-01 5.87297864e-02 -3.85834798e-02 1.81774211e+00 -2.09582657e-01 1.42891073e+00 -4.46418554e-01] [ 5.99292278e-01 2.53681850e+00 5.99292278e-01 4.41799194e-01 -1.20582536e-01 -1.11207746e-01 1.88840437e+00 -1.49213409e+00 -7.46476352e-01 3.92365187e-01 2.25581026e+00 -5.00337362e-01 3.52291077e-01 -1.38365954e-01 -1.73157954e+00] [-2.13312328e-01 1.54190540e-01 -2.13312328e-01 7.28264302e-02 -1.13188314e+00 2.54273772e-01 1.51948416e+00 -4.39997435e-01 -8.99492443e-01 6.71977252e-02 6.18167758e-01 2.12131754e-01 4.60679047e-02 -7.43985951e-01 -2.04097748e+00] [-1.17041922e+00 2.46065974e+00 -1.17041922e+00 -1.55222386e-01 1.48926809e-01 7.57902861e-01 -1.99827874e+00 1.31877944e-01 1.54823124e+00 -8.01176190e-01 -2.35251278e-01 1.62701929e+00 -5.21557033e-01 -5.35300933e-02 -1.96570411e-01] [ 4.93126810e-01 -1.83524942e+00 4.93126810e-01 2.82869697e-01 7.28020251e-01 -1.14448023e+00 -1.63531885e-01 8.43430340e-01 5.41749299e-01 -2.94278920e-01 -6.67443156e-01 -2.53602684e-01 -1.62657607e+00 -9.48339701e-01 1.26287985e+00] [ 4.69144255e-01 -5.88819087e-01 4.69144255e-01 8.33893955e-01 -1.43623435e+00 -1.29353094e+00 3.46833616e-01 -1.06064785e+00 3.96710694e-01 9.87841859e-02 4.63278800e-01 1.72304952e+00 1.29212892e+00 6.01491272e-01 9.50960934e-01]] [[ 1.49877891e-01 7.12410212e-01 1.49877891e-01 -3.29045385e-01 -1.45243430e+00 2.99094409e-01 -8.22554767e-01 -3.18535596e-01 6.61947310e-01 2.76902728e-02 -4.47365135e-01 4.34339255e-01 5.96993268e-01 -9.01793182e-01 -7.07484901e-01] [ 1.17315099e-01 3.91491167e-02 1.17315099e-01 5.13585865e-01 -9.62218791e-02 -1.12403110e-01 -1.97713935e+00 -4.72781032e-01 -3.79616082e-01 -1.12760484e+00 1.70032930e+00 -4.34237748e-01 -5.65056801e-01 5.03584802e-01 -1.42674923e+00] [ 2.96519905e-01 2.86592305e-01 2.96519905e-01 -1.51550007e+00 1.87581182e-01 -3.85843247e-01 1.03648966e-02 -7.61078775e-01 4.25523072e-01 4.23174649e-02 -8.16691697e-01 3.44269395e-01 1.74444124e-01 5.36040902e-01 7.27767348e-02] [ 1.43845588e-01 4.32238787e-01 1.43845588e-01 -1.16077352e+00 -2.46185452e-01 -5.14349282e-01 1.04514587e+00 -2.12502789e+00 -7.33494818e-01 -2.09315205e+00 -3.50759268e-01 -1.80561793e+00 -5.52775800e-01 7.63216838e-02 -4.99807000e-01] [ 9.00506973e-01 3.30853045e-01 9.00506973e-01 3.51783745e-02 7.26018012e-01 4.96271819e-01 3.95331502e-01 -7.88365722e-01 -7.38560796e-01 7.52654731e-01 6.74173474e-01 -4.84213799e-01 4.74586636e-01 1.80792093e+00 -4.25703049e-01] [-4.31665361e-01 -1.67877525e-01 -4.31665361e-01 8.95127118e-01 1.08351815e+00 1.06193376e+00 -1.88183591e-01 4.36433077e-01 -8.97826254e-01 2.57605463e-01 -1.19054634e-02 6.05156645e-02 5.34210838e-02 -1.18723202e+00 1.10778797e+00] [-7.08942235e-01 -3.28930378e-01 -7.08942235e-01 2.03350589e-01 -2.27657989e-01 -9.93534803e-01 9.51556921e-01 -1.67272830e+00 7.48361886e-01 -3.43299687e-01 7.80787349e-01 7.96621144e-01 -1.56598437e+00 4.42155868e-01 -3.07887316e-01] [ 1.15388489e+00 7.95100480e-02 1.15388489e+00 7.60657549e-01 5.77798486e-02 -9.44557428e-01 1.51188254e+00 -1.34713471e+00 1.32225549e+00 6.22094631e-01 -9.14600313e-01 -2.02419564e-01 2.18233347e+00 1.42133808e+00 1.04015636e+00] [ 6.90246880e-01 -5.88914216e-01 6.90246880e-01 1.44772923e+00 1.11935593e-01 -1.20459571e-02 -4.96090621e-01 2.66178727e-01 -4.48443890e-02 -1.53733298e-01 -3.68175626e-01 -2.03820229e-01 -9.23157930e-01 -7.61227250e-01 -6.38714619e-03] [ 1.19146369e-01 -6.46453202e-01 1.19146369e-01 -1.74726439e+00 -3.61849755e-01 1.82706010e+00 -6.67344689e-01 -4.21911269e-01 -1.27338106e-02 -1.22180128e+00 7.77929962e-01 -9.85205531e-01 1.00532448e+00 2.45392346e+00 -9.46897268e-01] [-1.37323821e+00 -6.64169788e-01 -1.37323821e+00 3.92367244e-01 -3.75547409e-01 -3.26726347e-01 4.36854005e-01 4.44042832e-01 -3.33126664e-01 1.09389508e+00 -9.93327618e-01 1.36424696e+00 -5.97330630e-01 -6.72124267e-01 7.95120776e-01] [-8.84150445e-01 2.04773688e+00 -8.84150445e-01 1.53427052e+00 2.97155399e-02 -3.74238819e-01 9.35957551e-01 3.22421610e-01 1.16006672e-01 -2.54543447e+00 -7.77171254e-01 -5.03235340e-01 -1.65524840e+00 -6.69581024e-03 -7.01994359e-01] [ 5.28669022e-02 -3.81532252e-01 5.28669022e-02 3.42411935e-01 -8.84334683e-01 -1.45826602e+00 8.90980244e-01 -2.11300164e-01 -7.11422414e-02 1.18978417e+00 -1.56341147e+00 -4.19093728e-01 6.21785998e-01 9.23397914e-02 4.57854480e-01] [-2.23858077e-02 7.53004849e-01 -2.23858077e-02 -7.71708846e-01 4.82098013e-01 3.93637210e-01 -7.77989089e-01 -3.25445756e-02 -2.54843663e-02 6.51919067e-01 1.15433717e+00 -2.96049565e-01 -1.37522793e+00 -1.40114439e+00 1.74613222e-01]] [[ 9.48091805e-01 5.27679384e-01 9.48091805e-01 1.08874512e+00 1.37740862e+00 1.86266100e+00 -7.13591993e-01 1.37209463e+00 -1.11804008e+00 1.32721543e+00 -1.01480258e+00 3.08136880e-01 1.26580328e-01 -1.39462936e+00 -1.13846588e+00] [-1.65363395e+00 6.56242192e-01 -1.65363395e+00 -2.09380299e-01 -2.99463034e-01 3.75192761e-01 -3.98861825e-01 9.64295447e-01 3.38728964e-01 -2.53470212e-01 -2.22833470e-01 -6.61431134e-01 -7.07330883e-01 -1.35227513e+00 1.06668182e-01] [ 6.89598560e-01 1.45669937e+00 6.89598560e-01 -1.09585953e+00 -1.33986628e+00 2.00647497e+00 -3.92910570e-01 -1.30132049e-01 -3.03727299e-01 -5.28511941e-01 4.33472306e-01 1.11183310e+00 -8.40389431e-01 -8.79488647e-01 5.72759509e-01] [-4.73058939e-01 -1.15968361e-01 -4.73058939e-01 -3.83457869e-01 2.65565372e+00 7.30620265e-01 1.28984153e+00 -4.96392637e-01 2.38405481e-01 1.76370308e-01 -3.21188778e-01 1.09192705e+00 -1.28952123e-03 -6.34585619e-01 -5.67992032e-01] [-1.34085131e+00 -1.03406131e-01 -1.34085131e+00 3.99203375e-02 4.98879164e-01 6.40232921e-01 -1.21459186e+00 3.68245333e-01 -1.50410429e-01 6.62467629e-02 -7.60973871e-01 -1.91109478e+00 -1.85677755e+00 1.82420528e+00 6.60147905e-01] [ 6.56478047e-01 7.77184963e-01 6.56478047e-01 1.75265163e-01 1.28609002e-01 5.21608889e-01 1.62978554e+00 4.79664244e-02 -2.27947593e-01 -1.40867245e+00 1.62934110e-01 3.88476938e-01 7.77943075e-01 -8.28478456e-01 -7.80160248e-01] [ 1.92613864e+00 8.01430363e-03 1.92613864e+00 1.00252084e-01 -3.74423742e-01 6.69326246e-01 -1.28138924e+00 9.89280939e-01 -8.90224397e-01 -1.34668124e+00 -7.48710871e-01 7.06282377e-01 3.68867934e-01 2.07389212e+00 -4.14263755e-01] [ 1.78489640e-01 -1.07571438e-01 1.78489640e-01 -2.34951138e-01 1.52965367e-01 1.06321729e-03 3.49766672e-01 -1.04579484e+00 4.72350597e-01 -8.49782377e-02 -1.68350771e-01 9.18159187e-01 -8.91107559e-01 -1.95725597e-02 7.35495210e-01] [ 8.34035277e-02 -7.68186986e-01 8.34035277e-02 1.48458314e+00 6.44947350e-01 7.85290003e-01 -4.30476546e-01 8.89632463e-01 -5.73266387e-01 3.16703796e-01 2.81710029e-01 -3.39995891e-01 6.49989992e-02 1.13991237e+00 -3.05028141e-01] [ 1.73011720e+00 -6.03942633e-01 1.73011720e+00 5.25771737e-01 1.26778674e+00 -1.43946618e-01 3.36239457e-01 1.02917027e+00 1.09263623e+00 -4.44781542e-01 1.12308279e-01 -1.97255373e+00 -7.69784987e-01 -8.75329152e-02 2.57874846e-01] [-8.05329561e-01 -1.77658927e+00 -8.05329561e-01 -1.44747865e+00 -2.08029062e-01 -2.13217044e+00 -1.00736368e+00 -6.41048908e-01 -4.47042972e-01 4.49953564e-02 -6.55861616e-01 1.04282815e-02 -2.26152226e-01 1.64458215e-01 -6.17774844e-01] [ 1.67136490e+00 -1.28950608e+00 1.67136490e+00 1.43237722e+00 1.55843389e+00 1.08591795e+00 -1.25778711e+00 -1.56179261e+00 -3.94140273e-01 -5.47203779e-01 4.91783708e-01 7.88421929e-01 -7.91543961e-01 -8.14261675e-01 -1.03876814e-01] [-8.04965854e-01 -1.68425369e+00 -8.04965854e-01 6.83704317e-01 -8.60297441e-01 -3.47288758e-01 -1.39058876e+00 6.32131457e-01 -4.43263613e-02 -1.70387900e+00 -7.57112384e-01 2.37808490e+00 -2.85592258e-01 3.00718009e-01 1.43118131e+00] [ 1.28905511e+00 -3.72418761e-01 1.28905511e+00 1.15142894e+00 -1.38823020e+00 -1.41385749e-01 -1.70335412e+00 8.04437459e-01 -1.89046419e+00 2.49644265e-01 7.08830595e-01 -7.97348619e-02 -2.15404010e+00 1.59834161e-01 -1.64572883e+00]] [[ 1.49877891e-01 7.12410212e-01 1.49877891e-01 -3.29045385e-01 -1.45243430e+00 2.99094409e-01 -8.22554767e-01 -3.18535596e-01 6.61947310e-01 2.76902728e-02 -4.47365135e-01 4.34339255e-01 5.96993268e-01 -9.01793182e-01 -7.07484901e-01] [ 1.17315099e-01 3.91491167e-02 1.17315099e-01 5.13585865e-01 -9.62218791e-02 -1.12403110e-01 -1.97713935e+00 -4.72781032e-01 -3.79616082e-01 -1.12760484e+00 1.70032930e+00 -4.34237748e-01 -5.65056801e-01 5.03584802e-01 -1.42674923e+00] [ 2.96519905e-01 2.86592305e-01 2.96519905e-01 -1.51550007e+00 1.87581182e-01 -3.85843247e-01 1.03648966e-02 -7.61078775e-01 4.25523072e-01 4.23174649e-02 -8.16691697e-01 3.44269395e-01 1.74444124e-01 5.36040902e-01 7.27767348e-02] [ 1.43845588e-01 4.32238787e-01 1.43845588e-01 -1.16077352e+00 -2.46185452e-01 -5.14349282e-01 1.04514587e+00 -2.12502789e+00 -7.33494818e-01 -2.09315205e+00 -3.50759268e-01 -1.80561793e+00 -5.52775800e-01 7.63216838e-02 -4.99807000e-01] [ 9.00506973e-01 3.30853045e-01 9.00506973e-01 3.51783745e-02 7.26018012e-01 4.96271819e-01 3.95331502e-01 -7.88365722e-01 -7.38560796e-01 7.52654731e-01 6.74173474e-01 -4.84213799e-01 4.74586636e-01 1.80792093e+00 -4.25703049e-01] [-4.31665361e-01 -1.67877525e-01 -4.31665361e-01 8.95127118e-01 1.08351815e+00 1.06193376e+00 -1.88183591e-01 4.36433077e-01 -8.97826254e-01 2.57605463e-01 -1.19054634e-02 6.05156645e-02 5.34210838e-02 -1.18723202e+00 1.10778797e+00] [-7.08942235e-01 -3.28930378e-01 -7.08942235e-01 2.03350589e-01 -2.27657989e-01 -9.93534803e-01 9.51556921e-01 -1.67272830e+00 7.48361886e-01 -3.43299687e-01 7.80787349e-01 7.96621144e-01 -1.56598437e+00 4.42155868e-01 -3.07887316e-01] [ 1.15388489e+00 7.95100480e-02 1.15388489e+00 7.60657549e-01 5.77798486e-02 -9.44557428e-01 1.51188254e+00 -1.34713471e+00 1.32225549e+00 6.22094631e-01 -9.14600313e-01 -2.02419564e-01 2.18233347e+00 1.42133808e+00 1.04015636e+00] [ 6.90246880e-01 -5.88914216e-01 6.90246880e-01 1.44772923e+00 1.11935593e-01 -1.20459571e-02 -4.96090621e-01 2.66178727e-01 -4.48443890e-02 -1.53733298e-01 -3.68175626e-01 -2.03820229e-01 -9.23157930e-01 -7.61227250e-01 -6.38714619e-03] [ 1.19146369e-01 -6.46453202e-01 1.19146369e-01 -1.74726439e+00 -3.61849755e-01 1.82706010e+00 -6.67344689e-01 -4.21911269e-01 -1.27338106e-02 -1.22180128e+00 7.77929962e-01 -9.85205531e-01 1.00532448e+00 2.45392346e+00 -9.46897268e-01] [-1.37323821e+00 -6.64169788e-01 -1.37323821e+00 3.92367244e-01 -3.75547409e-01 -3.26726347e-01 4.36854005e-01 4.44042832e-01 -3.33126664e-01 1.09389508e+00 -9.93327618e-01 1.36424696e+00 -5.97330630e-01 -6.72124267e-01 7.95120776e-01] [-8.84150445e-01 2.04773688e+00 -8.84150445e-01 1.53427052e+00 2.97155399e-02 -3.74238819e-01 9.35957551e-01 3.22421610e-01 1.16006672e-01 -2.54543447e+00 -7.77171254e-01 -5.03235340e-01 -1.65524840e+00 -6.69581024e-03 -7.01994359e-01] [ 5.28669022e-02 -3.81532252e-01 5.28669022e-02 3.42411935e-01 -8.84334683e-01 -1.45826602e+00 8.90980244e-01 -2.11300164e-01 -7.11422414e-02 1.18978417e+00 -1.56341147e+00 -4.19093728e-01 6.21785998e-01 9.23397914e-02 4.57854480e-01] [-2.23858077e-02 7.53004849e-01 -2.23858077e-02 -7.71708846e-01 4.82098013e-01 3.93637210e-01 -7.77989089e-01 -3.25445756e-02 -2.54843663e-02 6.51919067e-01 1.15433717e+00 -2.96049565e-01 -1.37522793e+00 -1.40114439e+00 1.74613222e-01]]]]; ov_res: [[[[ 2.13841772e+00 -9.68492448e-01 2.13841772e+00 1.74526870e+00 -3.67558509e-01 9.47779655e-01 1.83396399e+00 5.72154284e-01 2.36977026e-01 1.59464821e-01 4.14178103e-01 -5.80949187e-01 1.58569053e-01 -2.11508595e-03 1.75057435e+00] [ 5.90465605e-01 3.70869219e-01 5.90465605e-01 5.79063237e-01 6.73419952e-01 -5.61178088e-01 4.71312761e-01 -8.42422664e-01 -1.17317152e+00 2.62960124e+00 1.85510015e+00 1.05906022e+00 3.58604163e-01 1.38157558e+00 -3.78412336e-01] [ 9.43405032e-01 -2.85733175e+00 9.43405032e-01 -2.64259994e-01 1.41886175e+00 1.85088031e-02 4.77601945e-01 7.63861239e-01 -3.68032932e-01 -7.40372121e-01 1.35637903e+00 1.32248187e+00 -9.90998805e-01 -1.42035067e+00 5.43224551e-02] [ 2.83120781e-01 -7.53794432e-01 2.83120781e-01 5.25268674e-01 1.15931761e+00 9.16124806e-02 1.29886675e+00 -8.39177072e-01 8.67603958e-01 -6.55565858e-01 -8.45397353e-01 5.63629687e-01 -6.95459366e-01 -4.33169067e-01 9.68138516e-01] [ 7.44133592e-01 -8.91824543e-01 7.44133592e-01 -6.92620456e-01 -7.69083500e-01 -1.12294900e+00 -1.07463825e+00 2.76517291e-02 2.89096087e-01 -1.54784179e+00 -6.93502545e-01 3.07832807e-01 6.29808366e-01 9.60122406e-01 1.83529973e-01] [-1.91509438e+00 1.03229916e+00 -1.91509438e+00 9.78125155e-01 1.19815075e+00 -3.00147146e-01 1.59942567e-01 -7.11438656e-01 2.65293813e+00 6.74227238e-01 4.31245893e-01 -5.65099530e-02 -8.99325967e-01 -3.98193806e-01 1.98956579e-01] [ 1.25898421e+00 -5.28382003e-01 1.25898421e+00 6.02507219e-02 1.34894538e+00 -1.38990402e-01 -1.22146815e-01 8.20932388e-01 -4.86597359e-01 -1.08109200e+00 -1.78409469e+00 -1.75974917e+00 -2.64031053e-01 -1.08358169e+00 -1.16318798e+00] [-7.22677886e-01 5.62311351e-01 -7.22677886e-01 -6.32877648e-01 -5.39523900e-01 -5.94553596e-04 5.16096950e-01 -9.66646433e-01 -9.96716380e-01 -3.57796550e-02 -9.27969292e-02 1.25942275e-01 1.74223512e-01 7.73131788e-01 -1.49675742e-01] [ 1.98656976e-01 -2.22404957e+00 1.98656976e-01 8.34570289e-01 -1.16680048e-01 7.67457068e-01 1.79486036e+00 3.71287853e-01 2.76091874e-01 5.87297864e-02 -3.85834798e-02 1.81774211e+00 -2.09582657e-01 1.42891073e+00 -4.46418554e-01] [ 5.99292278e-01 2.53681850e+00 5.99292278e-01 4.41799194e-01 -1.20582536e-01 -1.11207746e-01 1.88840437e+00 -1.49213409e+00 -7.46476352e-01 3.92365187e-01 2.25581026e+00 -5.00337362e-01 3.52291077e-01 -1.38365954e-01 -1.73157954e+00] [-2.13312328e-01 1.54190540e-01 -2.13312328e-01 7.28264302e-02 -1.13188314e+00 2.54273772e-01 1.51948416e+00 -4.39997435e-01 -8.99492443e-01 6.71977252e-02 6.18167758e-01 2.12131754e-01 4.60679047e-02 -7.43985951e-01 -2.04097748e+00] [-1.17041922e+00 2.46065974e+00 -1.17041922e+00 -1.55222386e-01 1.48926809e-01 7.57902861e-01 -1.99827874e+00 1.31877944e-01 1.54823124e+00 -8.01176190e-01 -2.35251278e-01 1.62701929e+00 -5.21557033e-01 -5.35300933e-02 -1.96570411e-01] [ 4.93126810e-01 -1.83524942e+00 4.93126810e-01 2.82869697e-01 7.28020251e-01 -1.14448023e+00 -1.63531885e-01 8.43430340e-01 5.41749299e-01 -2.94278920e-01 -6.67443156e-01 -2.53602684e-01 -1.62657607e+00 -9.48339701e-01 1.26287985e+00] [ 4.69144255e-01 -5.88819087e-01 4.69144255e-01 8.33893955e-01 -1.43623435e+00 -1.29353094e+00 3.46833616e-01 -1.06064785e+00 3.96710694e-01 9.87841859e-02 4.63278800e-01 1.72304952e+00 1.29212892e+00 6.01491272e-01 9.50960934e-01]] [[ 1.49877891e-01 7.12410212e-01 1.49877891e-01 -3.29045385e-01 -1.45243430e+00 2.99094409e-01 -8.22554767e-01 -3.18535596e-01 6.61947310e-01 2.76902728e-02 -4.47365135e-01 4.34339255e-01 5.96993268e-01 -9.01793182e-01 -7.07484901e-01] [ 1.17315099e-01 3.91491167e-02 1.17315099e-01 5.13585865e-01 -9.62218791e-02 -1.12403110e-01 -1.97713935e+00 -4.72781032e-01 -3.79616082e-01 -1.12760484e+00 1.70032930e+00 -4.34237748e-01 -5.65056801e-01 5.03584802e-01 -1.42674923e+00] [ 2.96519905e-01 2.86592305e-01 2.96519905e-01 -1.51550007e+00 1.87581182e-01 -3.85843247e-01 1.03648966e-02 -7.61078775e-01 4.25523072e-01 4.23174649e-02 -8.16691697e-01 3.44269395e-01 1.74444124e-01 5.36040902e-01 7.27767348e-02] [ 1.43845588e-01 4.32238787e-01 1.43845588e-01 -1.16077352e+00 -2.46185452e-01 -5.14349282e-01 1.04514587e+00 -2.12502789e+00 -7.33494818e-01 -2.09315205e+00 -3.50759268e-01 -1.80561793e+00 -5.52775800e-01 7.63216838e-02 -4.99807000e-01] [ 9.00506973e-01 3.30853045e-01 9.00506973e-01 3.51783745e-02 7.26018012e-01 4.96271819e-01 3.95331502e-01 -7.88365722e-01 -7.38560796e-01 7.52654731e-01 6.74173474e-01 -4.84213799e-01 4.74586636e-01 1.80792093e+00 -4.25703049e-01] [-4.31665361e-01 -1.67877525e-01 -4.31665361e-01 8.95127118e-01 1.08351815e+00 1.06193376e+00 -1.88183591e-01 4.36433077e-01 -8.97826254e-01 2.57605463e-01 -1.19054634e-02 6.05156645e-02 5.34210838e-02 -1.18723202e+00 1.10778797e+00] [-7.08942235e-01 -3.28930378e-01 -7.08942235e-01 2.03350589e-01 -2.27657989e-01 -9.93534803e-01 9.51556921e-01 -1.67272830e+00 7.48361886e-01 -3.43299687e-01 7.80787349e-01 7.96621144e-01 -1.56598437e+00 4.42155868e-01 -3.07887316e-01] [ 1.15388489e+00 7.95100480e-02 1.15388489e+00 7.60657549e-01 5.77798486e-02 -9.44557428e-01 1.51188254e+00 -1.34713471e+00 1.32225549e+00 6.22094631e-01 -9.14600313e-01 -2.02419564e-01 2.18233347e+00 1.42133808e+00 1.04015636e+00] [ 6.90246880e-01 -5.88914216e-01 6.90246880e-01 1.44772923e+00 1.11935593e-01 -1.20459571e-02 -4.96090621e-01 2.66178727e-01 -4.48443890e-02 -1.53733298e-01 -3.68175626e-01 -2.03820229e-01 -9.23157930e-01 -7.61227250e-01 -6.38714619e-03] [ 1.19146369e-01 -6.46453202e-01 1.19146369e-01 -1.74726439e+00 -3.61849755e-01 1.82706010e+00 -6.67344689e-01 -4.21911269e-01 -1.27338106e-02 -1.22180128e+00 7.77929962e-01 -9.85205531e-01 1.00532448e+00 2.45392346e+00 -9.46897268e-01] [-1.37323821e+00 -6.64169788e-01 -1.37323821e+00 3.92367244e-01 -3.75547409e-01 -3.26726347e-01 4.36854005e-01 4.44042832e-01 -3.33126664e-01 1.09389508e+00 -9.93327618e-01 1.36424696e+00 -5.97330630e-01 -6.72124267e-01 7.95120776e-01] [-8.84150445e-01 2.04773688e+00 -8.84150445e-01 1.53427052e+00 2.97155399e-02 -3.74238819e-01 9.35957551e-01 3.22421610e-01 1.16006672e-01 -2.54543447e+00 -7.77171254e-01 -5.03235340e-01 -1.65524840e+00 -6.69581024e-03 -7.01994359e-01] [ 5.28669022e-02 -3.81532252e-01 5.28669022e-02 3.42411935e-01 -8.84334683e-01 -1.45826602e+00 8.90980244e-01 -2.11300164e-01 -7.11422414e-02 1.18978417e+00 -1.56341147e+00 -4.19093728e-01 6.21785998e-01 9.23397914e-02 4.57854480e-01] [-2.23858077e-02 7.53004849e-01 -2.23858077e-02 -7.71708846e-01 4.82098013e-01 3.93637210e-01 -7.77989089e-01 -3.25445756e-02 -2.54843663e-02 6.51919067e-01 1.15433717e+00 -2.96049565e-01 -1.37522793e+00 -1.40114439e+00 1.74613222e-01]] [[ 9.48091805e-01 5.27679384e-01 9.48091805e-01 1.08874512e+00 1.37740862e+00 1.86266100e+00 -7.13591993e-01 1.37209463e+00 -1.11804008e+00 1.32721543e+00 -1.01480258e+00 3.08136880e-01 1.26580328e-01 -1.39462936e+00 -1.13846588e+00] [-1.65363395e+00 6.56242192e-01 -1.65363395e+00 -2.09380299e-01 -2.99463034e-01 3.75192761e-01 -3.98861825e-01 9.64295447e-01 3.38728964e-01 -2.53470212e-01 -2.22833470e-01 -6.61431134e-01 -7.07330883e-01 -1.35227513e+00 1.06668182e-01] [ 6.89598560e-01 1.45669937e+00 6.89598560e-01 -1.09585953e+00 -1.33986628e+00 2.00647497e+00 -3.92910570e-01 -1.30132049e-01 -3.03727299e-01 -5.28511941e-01 4.33472306e-01 1.11183310e+00 -8.40389431e-01 -8.79488647e-01 5.72759509e-01] [-4.73058939e-01 -1.15968361e-01 -4.73058939e-01 -3.83457869e-01 2.65565372e+00 7.30620265e-01 1.28984153e+00 -4.96392637e-01 2.38405481e-01 1.76370308e-01 -3.21188778e-01 1.09192705e+00 -1.28952123e-03 -6.34585619e-01 -5.67992032e-01] [-1.34085131e+00 -1.03406131e-01 -1.34085131e+00 3.99203375e-02 4.98879164e-01 6.40232921e-01 -1.21459186e+00 3.68245333e-01 -1.50410429e-01 6.62467629e-02 -7.60973871e-01 -1.91109478e+00 -1.85677755e+00 1.82420528e+00 6.60147905e-01] [ 6.56478047e-01 7.77184963e-01 6.56478047e-01 1.75265163e-01 1.28609002e-01 5.21608889e-01 1.62978554e+00 4.79664244e-02 -2.27947593e-01 -1.40867245e+00 1.62934110e-01 3.88476938e-01 7.77943075e-01 -8.28478456e-01 -7.80160248e-01] [ 1.92613864e+00 8.01430363e-03 1.92613864e+00 1.00252084e-01 -3.74423742e-01 6.69326246e-01 -1.28138924e+00 9.89280939e-01 -8.90224397e-01 -1.34668124e+00 -7.48710871e-01 7.06282377e-01 3.68867934e-01 2.07389212e+00 -4.14263755e-01] [ 1.78489640e-01 -1.07571438e-01 1.78489640e-01 -2.34951138e-01 1.52965367e-01 1.06321729e-03 3.49766672e-01 -1.04579484e+00 4.72350597e-01 -8.49782377e-02 -1.68350771e-01 9.18159187e-01 -8.91107559e-01 -1.95725597e-02 7.35495210e-01] [ 8.34035277e-02 -7.68186986e-01 8.34035277e-02 1.48458314e+00 6.44947350e-01 7.85290003e-01 -4.30476546e-01 8.89632463e-01 -5.73266387e-01 3.16703796e-01 2.81710029e-01 -3.39995891e-01 6.49989992e-02 1.13991237e+00 -3.05028141e-01] [ 1.73011720e+00 -6.03942633e-01 1.73011720e+00 5.25771737e-01 1.26778674e+00 -1.43946618e-01 3.36239457e-01 1.02917027e+00 1.09263623e+00 -4.44781542e-01 1.12308279e-01 -1.97255373e+00 -7.69784987e-01 -8.75329152e-02 2.57874846e-01] [-8.05329561e-01 -1.77658927e+00 -8.05329561e-01 -1.44747865e+00 -2.08029062e-01 -2.13217044e+00 -1.00736368e+00 -6.41048908e-01 -4.47042972e-01 4.49953564e-02 -6.55861616e-01 1.04282815e-02 -2.26152226e-01 1.64458215e-01 -6.17774844e-01] [ 1.67136490e+00 -1.28950608e+00 1.67136490e+00 1.43237722e+00 1.55843389e+00 1.08591795e+00 -1.25778711e+00 -1.56179261e+00 -3.94140273e-01 -5.47203779e-01 4.91783708e-01 7.88421929e-01 -7.91543961e-01 -8.14261675e-01 -1.03876814e-01] [-8.04965854e-01 -1.68425369e+00 -8.04965854e-01 6.83704317e-01 -8.60297441e-01 -3.47288758e-01 -1.39058876e+00 6.32131457e-01 -4.43263613e-02 -1.70387900e+00 -7.57112384e-01 2.37808490e+00 -2.85592258e-01 3.00718009e-01 1.43118131e+00] [ 1.28905511e+00 -3.72418761e-01 1.28905511e+00 1.15142894e+00 -1.38823020e+00 -1.41385749e-01 -1.70335412e+00 8.04437459e-01 -1.89046419e+00 2.49644265e-01 7.08830595e-01 -7.97348619e-02 -2.15404010e+00 1.59834161e-01 -1.64572883e+00]] [[ 1.49877891e-01 7.12410212e-01 1.49877891e-01 -3.29045385e-01 -1.45243430e+00 2.99094409e-01 -8.22554767e-01 -3.18535596e-01 6.61947310e-01 2.76902728e-02 -4.47365135e-01 4.34339255e-01 5.96993268e-01 -9.01793182e-01 -7.07484901e-01] [ 1.17315099e-01 3.91491167e-02 1.17315099e-01 5.13585865e-01 -9.62218791e-02 -1.12403110e-01 -1.97713935e+00 -4.72781032e-01 -3.79616082e-01 -1.12760484e+00 1.70032930e+00 -4.34237748e-01 -5.65056801e-01 5.03584802e-01 -1.42674923e+00] [ 2.96519905e-01 2.86592305e-01 2.96519905e-01 -1.51550007e+00 1.87581182e-01 -3.85843247e-01 1.03648966e-02 -7.61078775e-01 4.25523072e-01 4.23174649e-02 -8.16691697e-01 3.44269395e-01 1.74444124e-01 5.36040902e-01 7.27767348e-02] [ 1.43845588e-01 4.32238787e-01 1.43845588e-01 -1.16077352e+00 -2.46185452e-01 -5.14349282e-01 1.04514587e+00 -2.12502789e+00 -7.33494818e-01 -2.09315205e+00 -3.50759268e-01 -1.80561793e+00 -5.52775800e-01 7.63216838e-02 -4.99807000e-01] [ 9.00506973e-01 3.30853045e-01 9.00506973e-01 3.51783745e-02 7.26018012e-01 4.96271819e-01 3.95331502e-01 -7.88365722e-01 -7.38560796e-01 7.52654731e-01 6.74173474e-01 -4.84213799e-01 4.74586636e-01 1.80792093e+00 -4.25703049e-01] [-4.31665361e-01 -1.67877525e-01 -4.31665361e-01 8.95127118e-01 1.08351815e+00 1.06193376e+00 -1.88183591e-01 4.36433077e-01 -8.97826254e-01 2.57605463e-01 -1.19054634e-02 6.05156645e-02 5.34210838e-02 -1.18723202e+00 1.10778797e+00] [-7.08942235e-01 -3.28930378e-01 -7.08942235e-01 2.03350589e-01 -2.27657989e-01 -9.93534803e-01 9.51556921e-01 -1.67272830e+00 7.48361886e-01 -3.43299687e-01 7.80787349e-01 7.96621144e-01 -1.56598437e+00 4.42155868e-01 -3.07887316e-01] [ 1.15388489e+00 7.95100480e-02 1.15388489e+00 7.60657549e-01 5.77798486e-02 -9.44557428e-01 1.51188254e+00 -1.34713471e+00 1.32225549e+00 6.22094631e-01 -9.14600313e-01 -2.02419564e-01 2.18233347e+00 1.42133808e+00 1.04015636e+00] [ 6.90246880e-01 -5.88914216e-01 6.90246880e-01 1.44772923e+00 1.11935593e-01 -1.20459571e-02 -4.96090621e-01 2.66178727e-01 -4.48443890e-02 -1.53733298e-01 -3.68175626e-01 -2.03820229e-01 -9.23157930e-01 -7.61227250e-01 -6.38714619e-03] [ 1.19146369e-01 -6.46453202e-01 1.19146369e-01 -1.74726439e+00 -3.61849755e-01 1.82706010e+00 -6.67344689e-01 -4.21911269e-01 -1.27338106e-02 -1.22180128e+00 7.77929962e-01 -9.85205531e-01 1.00532448e+00 2.45392346e+00 -9.46897268e-01] [-1.37323821e+00 -6.64169788e-01 -1.37323821e+00 3.92367244e-01 -3.75547409e-01 -3.26726347e-01 4.36854005e-01 4.44042832e-01 -3.33126664e-01 1.09389508e+00 -9.93327618e-01 1.36424696e+00 -5.97330630e-01 -6.72124267e-01 7.95120776e-01] [-8.84150445e-01 2.04773688e+00 -8.84150445e-01 1.53427052e+00 2.97155399e-02 -3.74238819e-01 9.35957551e-01 3.22421610e-01 1.16006672e-01 -2.54543447e+00 -7.77171254e-01 -5.03235340e-01 -1.65524840e+00 -6.69581024e-03 -7.01994359e-01] [ 5.28669022e-02 -3.81532252e-01 5.28669022e-02 3.42411935e-01 -8.84334683e-01 -1.45826602e+00 8.90980244e-01 -2.11300164e-01 -7.11422414e-02 1.18978417e+00 -1.56341147e+00 -4.19093728e-01 6.21785998e-01 9.23397914e-02 4.57854480e-01] [-2.23858077e-02 7.53004849e-01 -2.23858077e-02 -7.71708846e-01 4.82098013e-01 3.93637210e-01 -7.77989089e-01 -3.25445756e-02 -2.54843663e-02 6.51919067e-01 1.15433717e+00 -2.96049565e-01 -1.37522793e+00 -1.40114439e+00 1.74613222e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5485.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="reflect"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[-4.75784928e-01 6.29138291e-01 1.04549313e+00 1.66967034e+00 -6.39285207e-01 -7.61120617e-02 -1.55317128e+00 -1.33492208e+00 1.25792825e+00 -6.98363125e-01 -2.54940003e-01 -2.34801745e+00 -7.56072819e-01 -2.99929202e-01] [ 1.24255829e-01 -9.08955991e-01 3.46494108e-01 -3.89134258e-01 2.19691753e-01 9.26803708e-01 6.74441993e-01 -1.23440433e+00 2.20540476e+00 -1.31205904e+00 9.53925371e-01 3.89973015e-01 -7.40675479e-02 2.35846877e+00] [-9.93423283e-01 -3.61170530e-01 3.36480707e-01 -8.14734817e-01 -7.87277699e-01 -6.68797791e-01 -7.66381681e-01 8.02170694e-01 -8.94693375e-01 7.58720934e-01 -7.79411554e-01 -1.05614532e-02 -1.27499509e+00 2.41881415e-01] [-1.12518024e+00 4.00148034e-01 -9.33108747e-01 -5.68121850e-01 -3.66395891e-01 -4.67640847e-01 1.77968368e-01 -1.11316419e+00 -2.31123161e+00 -9.00905609e-01 1.68835211e+00 8.78264308e-01 -1.41281986e+00 -2.13467979e+00] [-6.72031105e-01 -1.50202167e+00 -1.15928566e+00 6.84937418e-01 -1.49203435e-01 7.83025920e-02 -9.45458651e-01 -1.48114547e-01 -1.93102932e+00 9.84994352e-01 5.05493255e-03 -1.95365977e+00 -5.71791887e-01 2.19593692e+00] [-1.22617602e-01 1.36504602e+00 -2.79002994e-01 -7.69757926e-01 1.15425363e-01 6.19285822e-01 -2.57888412e+00 -4.58012968e-01 -2.07191873e+00 1.03272545e+00 -1.02945495e+00 -5.33569515e-01 -5.91169596e-01 -2.16787243e+00] [-4.67695221e-02 4.64605212e-01 4.88115966e-01 1.13702261e+00 -7.96538830e-01 -1.53018379e+00 3.75032485e-01 -2.37470984e+00 -1.68163633e+00 2.46027160e+00 7.01982319e-01 -3.37171674e-01 1.24005474e-01 1.26305258e+00] [-6.39632940e-01 -1.36894882e+00 -1.43521476e+00 7.92701617e-02 1.75883308e-01 -9.76368368e-01 -4.26510066e-01 -5.78984916e-01 3.06483507e-01 1.07975769e+00 -1.42334390e+00 -9.14856017e-01 1.99890628e-01 5.63475072e-01] [-7.99786210e-01 8.75662923e-01 -6.65841818e-01 3.46697211e-01 -2.10740507e-01 5.70128858e-01 4.64656875e-02 -2.16591334e+00 -1.50931728e+00 1.80662483e-01 3.14113915e-01 -7.82258749e-01 1.84814423e-01 -1.42821538e+00] [ 7.62629807e-02 3.25739831e-01 -8.44347239e-01 -1.84908211e-02 3.55702162e-01 -3.51978272e-01 2.17296720e+00 1.06682919e-01 -1.43080616e+00 6.56124592e-01 -2.04439902e+00 3.14094663e-01 -1.15974867e+00 -8.01288486e-02] [ 7.78496265e-02 -1.67683184e+00 -1.09806347e+00 -4.64809872e-02 -1.62417412e-01 -3.82756740e-01 -9.55981433e-01 -3.62244934e-01 6.78861141e-01 1.25371587e+00 5.81050634e-01 -9.97353673e-01 6.89637899e-01 4.29654658e-01] [ 1.52798951e-01 5.54223359e-01 -1.55227304e+00 -1.94894516e+00 5.51464796e-01 3.16392899e-01 -1.30342221e+00 1.25780308e+00 -1.66597575e-01 -1.18274093e+00 -3.49919140e-01 7.73252010e-01 1.60696375e+00 -7.07434177e-01] [-2.84322444e-02 1.40070796e+00 1.02763915e+00 -1.76264465e+00 2.53861010e-01 1.97849125e-01 6.42377973e-01 1.37170112e+00 -1.67114723e+00 -8.72637331e-01 -1.85541058e+00 1.63892412e+00 -1.81039944e-01 -3.92965496e-01] [ 7.95512378e-01 1.06482995e+00 -3.61426055e-01 4.24869746e-01 -4.21821177e-01 -1.02182591e+00 -2.72556454e-01 1.29449511e+00 -5.69045655e-02 1.22853410e+00 3.44083697e-01 1.34012654e-01 4.29585487e-01 -9.26899433e-01]] [[-3.61465782e-01 -6.29723608e-01 8.93101934e-03 -2.79608443e-02 -1.01971078e+00 1.22306299e+00 1.90649951e+00 1.15705216e+00 -1.79687068e-02 -2.36005425e-01 2.78526568e+00 3.03032458e-01 3.42757910e-01 -6.81788564e-01] [-3.47257912e-01 -1.32159144e-01 1.28738558e+00 -2.54553580e+00 -2.51040435e+00 2.91896481e-02 -1.12075710e+00 8.88411850e-02 -4.96359020e-01 1.25485826e+00 1.08551227e-01 8.98223102e-01 4.19248175e-03 -7.09632784e-02] [ 2.80213654e-02 -1.38708159e-01 -4.13897842e-01 -5.75334251e-01 7.23815441e-01 1.01112294e+00 1.81420493e+00 6.47269309e-01 -1.61534876e-01 -2.79188734e-02 1.42231774e+00 -1.59700572e-01 5.68285346e-01 8.45158756e-01] [ 1.94316000e-01 -3.16220552e-01 6.04407899e-02 1.48434627e+00 5.37686706e-01 -4.99061823e-01 6.36968687e-02 1.65705977e-03 3.33319336e-01 -5.87811530e-01 6.94710463e-02 -2.57263392e-01 -6.62981033e-01 1.60316014e+00] [ 9.23178494e-02 1.60738516e+00 -5.90534091e-01 1.40647566e+00 1.52554750e+00 -1.05449355e+00 -1.50093091e+00 -1.05376244e+00 3.95516694e-01 1.11951387e+00 1.67906368e+00 1.47106433e+00 -2.23376095e-01 1.80219805e+00] [-1.16999459e+00 1.80579233e+00 -1.74376622e-01 5.06210506e-01 1.25011086e+00 -4.68114823e-01 8.70677590e-01 1.93320364e-02 -1.38284117e-01 4.76378530e-01 7.57348478e-01 1.19945395e+00 6.56021953e-01 7.31848538e-01] [ 1.37756371e+00 1.15770742e-01 -2.98404753e-01 -1.20519087e-01 -7.77339339e-01 -1.30587661e+00 5.12601793e-01 2.03522444e-01 1.06239522e+00 8.17875266e-01 -1.68713284e+00 -5.95797122e-01 1.08575368e+00 2.45775402e-01] [ 6.27881944e-01 1.06092192e-01 5.04812241e-01 -3.04960966e-01 -7.07863510e-01 -4.62514967e-01 -1.49566785e-01 9.91081595e-01 1.43798971e+00 3.22156549e-01 -1.04388213e+00 1.21226346e+00 8.84459913e-01 2.64431655e-01] [-8.13871622e-01 5.98652363e-01 5.97728372e-01 1.28411546e-01 -1.52251482e+00 3.83709610e-01 1.58797443e-01 -6.00545883e-01 9.35585737e-01 -3.06362510e+00 9.85094666e-01 -2.82742023e-01 1.45685637e+00 1.35277450e+00] [-4.44151103e-01 1.05390942e+00 1.07836530e-01 9.98072326e-01 -1.10478330e+00 -5.55822909e-01 -5.39197028e-01 -5.87926626e-01 2.01631486e-01 -1.40470541e+00 -6.67488202e-02 -7.54788637e-01 -5.36825657e-01 1.00832248e+00] [-1.96625561e-01 6.67626500e-01 1.76631749e+00 2.06943822e+00 -5.30458033e-01 1.98445046e+00 3.39242190e-01 3.65577519e-01 1.44365299e+00 8.87197435e-01 -1.16427743e+00 -9.30313230e-01 6.94640756e-01 3.47125560e-01] [ 1.64887094e+00 8.51930201e-01 -5.99679053e-01 -1.03742671e+00 9.59690094e-01 -3.19617599e-01 -1.65289909e-01 -6.26522124e-01 1.03620398e+00 -1.17866230e+00 -6.35855138e-01 -7.14941442e-01 4.36290175e-01 -2.83828437e-01] [ 1.75147489e-01 -1.88281983e-01 1.98594177e+00 1.69434980e-01 -1.45570129e-01 3.10295522e-01 4.62997139e-01 7.31379911e-02 1.27584592e-01 1.67326069e+00 -2.28269291e+00 -8.09751391e-01 9.28295255e-01 -1.15240479e+00] [ 8.81747436e-03 1.07140493e+00 -7.24908531e-01 1.51877373e-01 7.90061235e-01 1.14864439e-01 -1.42008686e+00 -3.37175056e-02 6.73547924e-01 1.30606127e+00 8.00350547e-01 4.62330014e-01 1.48442507e+00 1.23465931e+00]] [[-3.15731496e-01 8.55346024e-01 -2.13594627e+00 6.14394188e-01 8.91971231e-01 1.03322172e+00 1.52455759e+00 1.28766322e+00 1.21174908e+00 -3.25456709e-01 4.70025182e-01 1.49304950e+00 -1.94713265e-01 1.07355797e+00] [ 3.73194933e-01 5.75620830e-01 -3.78411025e-01 -1.74708053e-01 -6.74301565e-01 2.88526952e-01 1.62296593e+00 9.22996998e-01 5.22734523e-01 -3.41668040e-01 -7.80516326e-01 5.74857891e-01 -7.74898333e-03 2.26644173e-01] [-1.01081395e+00 4.31125835e-02 3.34992379e-01 -8.90078187e-01 -1.48613620e+00 5.93768418e-01 4.98177201e-01 -9.46449816e-01 2.46809316e+00 6.07459486e-01 1.83462083e-01 1.06546342e+00 7.63221681e-01 -5.37832975e-01] [-9.33923244e-01 2.76682496e-01 -4.46554542e-01 1.05396354e+00 1.93627918e+00 -5.04998505e-01 5.54984510e-01 1.09215450e+00 -3.34439129e-02 6.76081240e-01 -1.02954257e+00 1.97560191e+00 9.89266157e-01 -4.17920500e-01] [-9.72506106e-02 -1.10901378e-01 1.23222399e+00 1.07099020e+00 -3.18890721e-01 2.61908263e-01 -5.68041563e-01 -2.32044220e-01 -2.64845181e+00 1.83546275e-01 -9.49931264e-01 4.61023003e-01 -5.51855624e-01 6.83355570e-01] [-3.11239314e+00 -1.86408639e-01 3.32808904e-02 8.93398225e-01 2.47493386e-01 1.02824934e-01 -1.65345359e+00 7.27236748e-01 -2.34108472e+00 1.81338966e+00 -1.12578642e+00 -1.29310656e+00 -6.18774772e-01 -2.30594262e-01] [-1.19606149e+00 -1.03004241e+00 7.10591078e-01 8.33089471e-01 6.70648515e-01 -2.16119051e+00 -1.08115792e+00 5.08610606e-01 5.31939268e-01 4.45750743e-01 -1.32287011e-01 6.69908702e-01 -2.10960007e+00 -1.70906508e+00] [-1.32650983e+00 -7.68538296e-01 -1.00560308e+00 -1.33991158e+00 1.38692951e+00 1.77615154e+00 5.81916511e-01 -1.33665133e+00 9.66661811e-01 1.24389935e+00 -1.51926994e-01 -5.47234595e-01 -9.32601571e-01 2.21485043e+00] [-1.95028067e-01 1.81367978e-01 -2.18591666e+00 -2.14159489e+00 5.44602811e-01 5.13688564e-01 -5.69414616e-01 1.26925957e+00 -2.14776969e+00 -1.04219683e-01 3.08578551e-01 -1.25210273e+00 1.19393814e+00 1.52756125e-01] [-1.50878346e+00 1.92502832e+00 5.23293316e-01 1.51349461e+00 -1.01293993e+00 2.09910497e-01 -5.04129410e-01 -1.15482844e-02 5.78794479e-01 7.95299888e-01 -1.23164690e+00 2.20740747e+00 -3.83224338e-01 2.86540836e-02] [ 1.27527165e+00 1.29503322e+00 2.50275880e-01 -4.98218030e-01 -1.62285626e+00 -2.85787791e-01 -6.80771887e-01 1.09852898e+00 -7.44265467e-02 4.15245593e-01 1.52319825e+00 9.46282566e-01 2.52033293e-01 9.22591150e-01] [ 2.10472479e-01 -3.49673569e-01 8.52338731e-01 -2.98251081e-02 6.21017694e-01 7.85361707e-01 -9.56152320e-01 -2.28171766e-01 3.63763213e-01 -2.78236955e-01 -9.76868093e-01 1.83242035e+00 -1.40926823e-01 -7.18773186e-01] [ 1.13403440e+00 2.21540976e+00 -3.88655812e-01 1.51303694e-01 9.38374400e-01 6.42084032e-02 -8.09331715e-01 9.68070507e-01 -7.89877832e-01 8.13986510e-02 4.76090550e-01 3.77398357e-02 1.21547294e+00 -1.03749931e+00] [ 2.86823511e-01 8.52787614e-01 -5.87918818e-01 -9.76120174e-01 1.70768785e+00 -5.39012492e-01 -5.22714436e-01 1.30913651e+00 -8.41136098e-01 2.69338071e-01 -6.42236173e-01 -3.53340507e-01 1.38911891e+00 -1.12699318e+00]]]]; ov_res: [[[[-4.75784928e-01 6.29138291e-01 1.04549313e+00 1.66967034e+00 -6.39285207e-01 -7.61120617e-02 -1.55317128e+00 -1.33492208e+00 1.25792825e+00 -6.98363125e-01 -2.54940003e-01 -2.34801745e+00 -7.56072819e-01 -2.99929202e-01] [ 1.24255829e-01 -9.08955991e-01 3.46494108e-01 -3.89134258e-01 2.19691753e-01 9.26803708e-01 6.74441993e-01 -1.23440433e+00 2.20540476e+00 -1.31205904e+00 9.53925371e-01 3.89973015e-01 -7.40675479e-02 2.35846877e+00] [-9.93423283e-01 -3.61170530e-01 3.36480707e-01 -8.14734817e-01 -7.87277699e-01 -6.68797791e-01 -7.66381681e-01 8.02170694e-01 -8.94693375e-01 7.58720934e-01 -7.79411554e-01 -1.05614532e-02 -1.27499509e+00 2.41881415e-01] [-1.12518024e+00 4.00148034e-01 -9.33108747e-01 -5.68121850e-01 -3.66395891e-01 -4.67640847e-01 1.77968368e-01 -1.11316419e+00 -2.31123161e+00 -9.00905609e-01 1.68835211e+00 8.78264308e-01 -1.41281986e+00 -2.13467979e+00] [-6.72031105e-01 -1.50202167e+00 -1.15928566e+00 6.84937418e-01 -1.49203435e-01 7.83025920e-02 -9.45458651e-01 -1.48114547e-01 -1.93102932e+00 9.84994352e-01 5.05493255e-03 -1.95365977e+00 -5.71791887e-01 2.19593692e+00] [-1.22617602e-01 1.36504602e+00 -2.79002994e-01 -7.69757926e-01 1.15425363e-01 6.19285822e-01 -2.57888412e+00 -4.58012968e-01 -2.07191873e+00 1.03272545e+00 -1.02945495e+00 -5.33569515e-01 -5.91169596e-01 -2.16787243e+00] [-4.67695221e-02 4.64605212e-01 4.88115966e-01 1.13702261e+00 -7.96538830e-01 -1.53018379e+00 3.75032485e-01 -2.37470984e+00 -1.68163633e+00 2.46027160e+00 7.01982319e-01 -3.37171674e-01 1.24005474e-01 1.26305258e+00] [-6.39632940e-01 -1.36894882e+00 -1.43521476e+00 7.92701617e-02 1.75883308e-01 -9.76368368e-01 -4.26510066e-01 -5.78984916e-01 3.06483507e-01 1.07975769e+00 -1.42334390e+00 -9.14856017e-01 1.99890628e-01 5.63475072e-01] [-7.99786210e-01 8.75662923e-01 -6.65841818e-01 3.46697211e-01 -2.10740507e-01 5.70128858e-01 4.64656875e-02 -2.16591334e+00 -1.50931728e+00 1.80662483e-01 3.14113915e-01 -7.82258749e-01 1.84814423e-01 -1.42821538e+00] [ 7.62629807e-02 3.25739831e-01 -8.44347239e-01 -1.84908211e-02 3.55702162e-01 -3.51978272e-01 2.17296720e+00 1.06682919e-01 -1.43080616e+00 6.56124592e-01 -2.04439902e+00 3.14094663e-01 -1.15974867e+00 -8.01288486e-02] [ 7.78496265e-02 -1.67683184e+00 -1.09806347e+00 -4.64809872e-02 -1.62417412e-01 -3.82756740e-01 -9.55981433e-01 -3.62244934e-01 6.78861141e-01 1.25371587e+00 5.81050634e-01 -9.97353673e-01 6.89637899e-01 4.29654658e-01] [ 1.52798951e-01 5.54223359e-01 -1.55227304e+00 -1.94894516e+00 5.51464796e-01 3.16392899e-01 -1.30342221e+00 1.25780308e+00 -1.66597575e-01 -1.18274093e+00 -3.49919140e-01 7.73252010e-01 1.60696375e+00 -7.07434177e-01] [-2.84322444e-02 1.40070796e+00 1.02763915e+00 -1.76264465e+00 2.53861010e-01 1.97849125e-01 6.42377973e-01 1.37170112e+00 -1.67114723e+00 -8.72637331e-01 -1.85541058e+00 1.63892412e+00 -1.81039944e-01 -3.92965496e-01] [ 7.95512378e-01 1.06482995e+00 -3.61426055e-01 4.24869746e-01 -4.21821177e-01 -1.02182591e+00 -2.72556454e-01 1.29449511e+00 -5.69045655e-02 1.22853410e+00 3.44083697e-01 1.34012654e-01 4.29585487e-01 -9.26899433e-01]] [[-3.61465782e-01 -6.29723608e-01 8.93101934e-03 -2.79608443e-02 -1.01971078e+00 1.22306299e+00 1.90649951e+00 1.15705216e+00 -1.79687068e-02 -2.36005425e-01 2.78526568e+00 3.03032458e-01 3.42757910e-01 -6.81788564e-01] [-3.47257912e-01 -1.32159144e-01 1.28738558e+00 -2.54553580e+00 -2.51040435e+00 2.91896481e-02 -1.12075710e+00 8.88411850e-02 -4.96359020e-01 1.25485826e+00 1.08551227e-01 8.98223102e-01 4.19248175e-03 -7.09632784e-02] [ 2.80213654e-02 -1.38708159e-01 -4.13897842e-01 -5.75334251e-01 7.23815441e-01 1.01112294e+00 1.81420493e+00 6.47269309e-01 -1.61534876e-01 -2.79188734e-02 1.42231774e+00 -1.59700572e-01 5.68285346e-01 8.45158756e-01] [ 1.94316000e-01 -3.16220552e-01 6.04407899e-02 1.48434627e+00 5.37686706e-01 -4.99061823e-01 6.36968687e-02 1.65705977e-03 3.33319336e-01 -5.87811530e-01 6.94710463e-02 -2.57263392e-01 -6.62981033e-01 1.60316014e+00] [ 9.23178494e-02 1.60738516e+00 -5.90534091e-01 1.40647566e+00 1.52554750e+00 -1.05449355e+00 -1.50093091e+00 -1.05376244e+00 3.95516694e-01 1.11951387e+00 1.67906368e+00 1.47106433e+00 -2.23376095e-01 1.80219805e+00] [-1.16999459e+00 1.80579233e+00 -1.74376622e-01 5.06210506e-01 1.25011086e+00 -4.68114823e-01 8.70677590e-01 1.93320364e-02 -1.38284117e-01 4.76378530e-01 7.57348478e-01 1.19945395e+00 6.56021953e-01 7.31848538e-01] [ 1.37756371e+00 1.15770742e-01 -2.98404753e-01 -1.20519087e-01 -7.77339339e-01 -1.30587661e+00 5.12601793e-01 2.03522444e-01 1.06239522e+00 8.17875266e-01 -1.68713284e+00 -5.95797122e-01 1.08575368e+00 2.45775402e-01] [ 6.27881944e-01 1.06092192e-01 5.04812241e-01 -3.04960966e-01 -7.07863510e-01 -4.62514967e-01 -1.49566785e-01 9.91081595e-01 1.43798971e+00 3.22156549e-01 -1.04388213e+00 1.21226346e+00 8.84459913e-01 2.64431655e-01] [-8.13871622e-01 5.98652363e-01 5.97728372e-01 1.28411546e-01 -1.52251482e+00 3.83709610e-01 1.58797443e-01 -6.00545883e-01 9.35585737e-01 -3.06362510e+00 9.85094666e-01 -2.82742023e-01 1.45685637e+00 1.35277450e+00] [-4.44151103e-01 1.05390942e+00 1.07836530e-01 9.98072326e-01 -1.10478330e+00 -5.55822909e-01 -5.39197028e-01 -5.87926626e-01 2.01631486e-01 -1.40470541e+00 -6.67488202e-02 -7.54788637e-01 -5.36825657e-01 1.00832248e+00] [-1.96625561e-01 6.67626500e-01 1.76631749e+00 2.06943822e+00 -5.30458033e-01 1.98445046e+00 3.39242190e-01 3.65577519e-01 1.44365299e+00 8.87197435e-01 -1.16427743e+00 -9.30313230e-01 6.94640756e-01 3.47125560e-01] [ 1.64887094e+00 8.51930201e-01 -5.99679053e-01 -1.03742671e+00 9.59690094e-01 -3.19617599e-01 -1.65289909e-01 -6.26522124e-01 1.03620398e+00 -1.17866230e+00 -6.35855138e-01 -7.14941442e-01 4.36290175e-01 -2.83828437e-01] [ 1.75147489e-01 -1.88281983e-01 1.98594177e+00 1.69434980e-01 -1.45570129e-01 3.10295522e-01 4.62997139e-01 7.31379911e-02 1.27584592e-01 1.67326069e+00 -2.28269291e+00 -8.09751391e-01 9.28295255e-01 -1.15240479e+00] [ 8.81747436e-03 1.07140493e+00 -7.24908531e-01 1.51877373e-01 7.90061235e-01 1.14864439e-01 -1.42008686e+00 -3.37175056e-02 6.73547924e-01 1.30606127e+00 8.00350547e-01 4.62330014e-01 1.48442507e+00 1.23465931e+00]] [[-3.15731496e-01 8.55346024e-01 -2.13594627e+00 6.14394188e-01 8.91971231e-01 1.03322172e+00 1.52455759e+00 1.28766322e+00 1.21174908e+00 -3.25456709e-01 4.70025182e-01 1.49304950e+00 -1.94713265e-01 1.07355797e+00] [ 3.73194933e-01 5.75620830e-01 -3.78411025e-01 -1.74708053e-01 -6.74301565e-01 2.88526952e-01 1.62296593e+00 9.22996998e-01 5.22734523e-01 -3.41668040e-01 -7.80516326e-01 5.74857891e-01 -7.74898333e-03 2.26644173e-01] [-1.01081395e+00 4.31125835e-02 3.34992379e-01 -8.90078187e-01 -1.48613620e+00 5.93768418e-01 4.98177201e-01 -9.46449816e-01 2.46809316e+00 6.07459486e-01 1.83462083e-01 1.06546342e+00 7.63221681e-01 -5.37832975e-01] [-9.33923244e-01 2.76682496e-01 -4.46554542e-01 1.05396354e+00 1.93627918e+00 -5.04998505e-01 5.54984510e-01 1.09215450e+00 -3.34439129e-02 6.76081240e-01 -1.02954257e+00 1.97560191e+00 9.89266157e-01 -4.17920500e-01] [-9.72506106e-02 -1.10901378e-01 1.23222399e+00 1.07099020e+00 -3.18890721e-01 2.61908263e-01 -5.68041563e-01 -2.32044220e-01 -2.64845181e+00 1.83546275e-01 -9.49931264e-01 4.61023003e-01 -5.51855624e-01 6.83355570e-01] [-3.11239314e+00 -1.86408639e-01 3.32808904e-02 8.93398225e-01 2.47493386e-01 1.02824934e-01 -1.65345359e+00 7.27236748e-01 -2.34108472e+00 1.81338966e+00 -1.12578642e+00 -1.29310656e+00 -6.18774772e-01 -2.30594262e-01] [-1.19606149e+00 -1.03004241e+00 7.10591078e-01 8.33089471e-01 6.70648515e-01 -2.16119051e+00 -1.08115792e+00 5.08610606e-01 5.31939268e-01 4.45750743e-01 -1.32287011e-01 6.69908702e-01 -2.10960007e+00 -1.70906508e+00] [-1.32650983e+00 -7.68538296e-01 -1.00560308e+00 -1.33991158e+00 1.38692951e+00 1.77615154e+00 5.81916511e-01 -1.33665133e+00 9.66661811e-01 1.24389935e+00 -1.51926994e-01 -5.47234595e-01 -9.32601571e-01 2.21485043e+00] [-1.95028067e-01 1.81367978e-01 -2.18591666e+00 -2.14159489e+00 5.44602811e-01 5.13688564e-01 -5.69414616e-01 1.26925957e+00 -2.14776969e+00 -1.04219683e-01 3.08578551e-01 -1.25210273e+00 1.19393814e+00 1.52756125e-01] [-1.50878346e+00 1.92502832e+00 5.23293316e-01 1.51349461e+00 -1.01293993e+00 2.09910497e-01 -5.04129410e-01 -1.15482844e-02 5.78794479e-01 7.95299888e-01 -1.23164690e+00 2.20740747e+00 -3.83224338e-01 2.86540836e-02] [ 1.27527165e+00 1.29503322e+00 2.50275880e-01 -4.98218030e-01 -1.62285626e+00 -2.85787791e-01 -6.80771887e-01 1.09852898e+00 -7.44265467e-02 4.15245593e-01 1.52319825e+00 9.46282566e-01 2.52033293e-01 9.22591150e-01] [ 2.10472479e-01 -3.49673569e-01 8.52338731e-01 -2.98251081e-02 6.21017694e-01 7.85361707e-01 -9.56152320e-01 -2.28171766e-01 3.63763213e-01 -2.78236955e-01 -9.76868093e-01 1.83242035e+00 -1.40926823e-01 -7.18773186e-01] [ 1.13403440e+00 2.21540976e+00 -3.88655812e-01 1.51303694e-01 9.38374400e-01 6.42084032e-02 -8.09331715e-01 9.68070507e-01 -7.89877832e-01 8.13986510e-02 4.76090550e-01 3.77398357e-02 1.21547294e+00 -1.03749931e+00] [ 2.86823511e-01 8.52787614e-01 -5.87918818e-01 -9.76120174e-01 1.70768785e+00 -5.39012492e-01 -5.22714436e-01 1.30913651e+00 -8.41136098e-01 2.69338071e-01 -6.42236173e-01 -3.53340507e-01 1.38911891e+00 -1.12699318e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5487.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="replicate"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) ly constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_Efw_re: [[[[-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] [-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] [-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] ... [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084] [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084] [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084]] [[-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] [-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] [-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] ... [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997] [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997] [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997]] [[-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] [-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] [-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] ... [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938] [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938] [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938]]]]; ov_res: [[[[-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] [-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] [-1.2644385 -1.2644385 1.1176217 ... 1.3698542 1.3698542 1.3698542 ] ... [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084] [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084] [-0.21716015 -0.21716015 -2.9310286 ... 0.58252084 0.58252084 0.58252084]] [[-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] [-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] [-1.014646 -1.014646 -0.22211613 ... 0.0269084 0.0269084 0.0269084 ] ... [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997] [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997] [-1.1664683 -1.1664683 -1.3190101 ... 0.69813997 0.69813997 0.69813997]] [[-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] [-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] [-0.76754445 -0.76754445 -0.5179781 ... -0.23370557 -0.23370557 -0.23370557] ... [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938] [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938] [-0.45834827 -0.45834827 -2.383465 ... 0.34414938 0.34414938 0.34414938]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 0) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5489.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="replicate"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[-5.45171499e-01 -5.45171499e-01 2.79488504e-01 -1.23897457e+00 1.20319915e+00 -7.44195461e-01 -1.83645234e-01 4.53313030e-02 -2.33635917e-01 9.89033580e-01 -1.63260967e-01 -6.12292409e-01 -2.12720561e+00 1.01457250e+00 -7.26451337e-01] [-5.80314815e-01 -5.80314815e-01 1.78083706e+00 1.24060512e+00 1.82386780e+00 -7.53486633e-01 1.80652559e-01 4.23742682e-01 -6.38895631e-02 -2.09563851e+00 1.62929285e+00 1.56635547e+00 4.35199231e-01 -1.22930482e-01 5.64476430e-01] [ 1.56724668e+00 1.56724668e+00 -1.15675473e+00 -3.93979400e-01 -9.79601622e-01 4.95002151e-01 1.35406470e+00 -2.08497047e+00 1.32882154e+00 -8.56592000e-01 2.20690346e+00 5.23568869e-01 -9.69765306e-01 1.92620504e+00 -7.19614923e-01] [-5.70080578e-01 -5.70080578e-01 1.15989335e-01 -1.45428824e+00 -1.59299421e+00 -1.94392693e+00 4.26936120e-01 1.46224535e+00 -5.98102033e-01 6.01413310e-01 -1.48573034e-02 9.27560091e-01 1.78803161e-01 2.54988503e+00 2.78209448e+00] [ 7.53018618e-01 7.53018618e-01 8.51718247e-01 7.99187958e-01 7.62526929e-01 -3.32919925e-01 1.84494570e-01 9.74511802e-01 2.43187502e-01 -8.00072193e-01 1.17082393e+00 -9.91965592e-01 -9.27590251e-01 9.00432765e-01 -6.79557621e-01] [ 5.40782928e-01 5.40782928e-01 -4.55580473e-01 9.66161489e-01 -1.13290596e+00 6.30861104e-01 -1.30035949e+00 4.48358327e-01 -7.17418730e-01 -1.53873876e-01 2.80213535e-01 4.94762599e-01 -7.70760477e-01 2.90985137e-01 7.72827029e-01] [-1.87604475e+00 -1.87604475e+00 2.44354695e-01 -6.28955066e-02 5.99871218e-01 2.32270792e-01 -3.18964720e-01 2.00468957e-01 -3.58284503e-01 -1.92009851e-01 1.51688293e-01 -8.70790124e-01 -9.42577049e-02 -5.82284033e-01 4.35300261e-01] [-1.65280718e-02 -1.65280718e-02 1.27189851e+00 -3.91009331e-01 4.66295451e-01 8.94187152e-01 -1.17137933e+00 3.04740816e-01 1.79396674e-01 3.29931617e-01 -1.83743763e+00 -1.70649320e-01 1.23171973e+00 -6.59348428e-01 -1.06625235e+00] [-3.34688872e-01 -3.34688872e-01 -1.47053075e+00 -1.40038228e+00 1.73985100e+00 3.50634366e-01 -4.53833014e-01 3.33404613e+00 7.92986527e-02 3.27636927e-01 -1.40797281e+00 2.74972260e-01 -1.29031932e+00 -7.34619856e-01 -1.41652679e+00] [-1.66814342e-01 -1.66814342e-01 4.85318452e-01 1.05856609e+00 2.45919712e-02 2.45931327e-01 -1.18808162e+00 1.05649984e+00 5.91904163e-01 7.94849932e-01 -4.46582913e-01 -2.80977916e-02 -7.48165786e-01 1.21042287e+00 -7.89415777e-01] [-1.46072614e+00 -1.46072614e+00 3.69028211e-01 3.98390263e-01 -1.84919193e-01 8.62150788e-01 -1.20342112e+00 5.76345325e-01 -7.37858236e-01 5.39483488e-01 -2.06521358e-02 2.29528499e+00 -1.33064792e-01 6.81715310e-01 -6.05323434e-01] [-3.46115202e-01 -3.46115202e-01 -6.77138567e-01 1.22100222e+00 2.61526853e-01 -3.49902719e-01 -1.70279574e+00 8.50128531e-01 2.74960905e-01 4.81659383e-01 -1.55723476e+00 -2.57912844e-01 -6.43332541e-01 -3.73258144e-01 -1.19878376e+00] [-5.94534278e-01 -5.94534278e-01 -8.44127387e-02 -5.82812786e-01 7.75428891e-01 2.50034857e+00 9.12532955e-02 3.48555714e-01 -9.64706361e-01 -8.65299225e-01 7.62105048e-01 1.58334804e+00 5.39693296e-01 -6.55077338e-01 -1.46765001e-02] [-8.65001559e-01 -8.65001559e-01 -1.14080548e-01 -2.93804914e-01 -9.55689430e-01 6.97275579e-01 -9.31193471e-01 -1.12130374e-01 3.32467735e-01 -2.22688460e+00 5.12918115e-01 -6.07385159e-01 1.80751666e-01 -1.06524855e-01 -1.34413266e+00]] [[-1.64278948e+00 -1.64278948e+00 -8.44019830e-01 1.28267419e+00 -1.43383515e+00 -1.01158524e+00 -8.46402824e-01 -5.73155344e-01 -2.98660398e-01 -1.02951191e-02 -8.76939118e-01 8.34776998e-01 -1.05782914e+00 -8.14449966e-01 -1.44441581e+00] [-4.92081076e-01 -4.92081076e-01 1.19882452e+00 -7.22323954e-01 1.46432316e+00 -6.44380987e-01 -6.10694468e-01 7.01192021e-01 6.32083118e-02 1.16495185e-01 7.21838593e-01 -1.32176673e+00 1.33253205e+00 -1.76526338e-01 2.11250949e+00] [-1.08630344e-01 -1.08630344e-01 8.26039553e-01 -5.26679397e-01 -1.44541943e+00 -5.57638466e-01 8.30779016e-01 -3.67366374e-01 4.82910164e-02 -1.74822164e+00 1.07809842e+00 -7.54291654e-01 -7.89110661e-01 -9.41229463e-01 9.88659799e-01] [ 1.41464481e-02 1.41464481e-02 7.00812101e-01 -9.41345811e-01 -1.27064094e-01 -6.94369137e-01 1.29972064e+00 4.31904495e-01 -6.88474953e-01 7.38641262e-01 -8.85329396e-02 -5.28297544e-01 -1.34902573e+00 1.34765625e+00 -1.18910909e+00] [ 5.11410832e-01 5.11410832e-01 1.50476754e+00 1.82750475e+00 1.34487450e+00 -1.06027448e+00 5.19792251e-02 -6.45608187e-01 -6.71169087e-02 -2.13744894e-01 -2.21980348e-01 -4.40258801e-01 -1.16224313e+00 3.72820720e-02 -2.04171324e+00] [-7.75407404e-02 -7.75407404e-02 -4.83821392e-01 -6.65556267e-02 7.90707350e-01 4.94060665e-01 -8.28653038e-01 -1.09620333e+00 2.48676345e-01 1.11814749e+00 2.06639588e-01 -3.07526916e-01 -5.18553436e-01 6.39872402e-02 -7.27481484e-01] [-6.06080115e-01 -6.06080115e-01 -5.30808032e-01 4.33092833e-01 -5.26187360e-01 -7.25803375e-01 1.78772080e+00 -4.72519159e-01 1.54715646e-02 -5.03385305e-01 -6.28429830e-01 1.61082542e+00 -1.23526889e-03 -2.44996399e-01 6.94029868e-01] [-4.50420946e-01 -4.50420946e-01 -1.76446819e+00 1.66449130e+00 9.90758717e-01 -5.05785227e-01 2.65439630e-01 -2.40759134e+00 2.22834349e-01 4.69663501e-01 5.44863284e-01 1.36934614e+00 -6.62678897e-01 8.22624862e-01 -4.07958716e-01] [-6.93847120e-01 -6.93847120e-01 4.85637486e-01 -1.09683609e+00 -9.56972018e-02 -8.20834562e-02 -2.11365707e-02 -2.01597005e-01 1.72820855e-02 1.25916258e-01 1.07639349e+00 -1.69195399e-01 2.43419930e-01 -1.12247717e+00 1.60589278e-01] [ 1.08535874e+00 1.08535874e+00 -5.74587286e-01 7.21058995e-03 8.67643237e-01 1.45351028e+00 6.77735150e-01 -9.07772601e-01 -1.31003547e+00 1.75631487e+00 2.10707933e-02 1.48513401e+00 -1.44258499e+00 5.95715761e-01 -1.83914936e+00] [-1.57933486e+00 -1.57933486e+00 -2.87475884e-01 -3.17998767e-01 1.05524766e+00 -4.16055262e-01 9.12051141e-01 4.57852669e-02 3.67394775e-01 -1.01972926e+00 1.45061576e+00 -2.05087042e+00 -4.35690969e-01 -3.50684151e-02 1.31872284e+00] [ 7.21875489e-01 7.21875489e-01 -6.25485420e-01 2.18300849e-01 6.17592454e-01 2.92147517e-01 -3.13090324e-01 6.57271147e-01 1.34197259e+00 -3.68428677e-01 1.95231959e-01 1.92646050e+00 1.22428095e+00 4.08536375e-01 -1.78489551e-01] [-7.77145147e-01 -7.77145147e-01 -2.06921005e+00 -6.56368256e-01 7.73375690e-01 1.92181730e+00 -1.15018797e+00 -7.82430828e-01 -3.32441539e-01 1.33547470e-01 2.17704281e-01 4.70290780e-01 -7.44208634e-01 4.73606944e-01 -7.34922588e-01] [-8.61371815e-01 -8.61371815e-01 -1.27073973e-01 -1.86949337e+00 6.20614827e-01 -8.31839979e-01 -1.05495811e+00 -9.69781935e-01 1.55331967e-02 -3.77075464e-01 -7.08472669e-01 -2.63954606e-02 9.15735438e-02 3.30565929e-01 3.77681881e-01]] [[-2.77241230e-01 -2.77241230e-01 6.35602355e-01 1.63380265e+00 8.27962995e-01 1.93546510e+00 -2.18757105e+00 -5.44341981e-01 4.48592752e-01 -1.00589836e+00 1.40529931e+00 8.04867506e-01 1.56117952e+00 -4.71120588e-02 1.21767029e-01] [-2.55672902e-01 -2.55672902e-01 9.16782260e-01 -1.16523271e-02 -1.32740289e-01 1.06701767e+00 -6.52436972e-01 -6.71288908e-01 6.39430210e-02 6.15689158e-02 -1.88496679e-01 9.48445618e-01 -5.05608678e-01 -7.07643807e-01 -1.42299283e+00] [ 4.70275909e-01 4.70275909e-01 -2.23834544e-01 -4.19813901e-01 3.39677662e-01 7.34301448e-01 8.06780457e-01 -4.20690067e-02 6.01392567e-01 6.68389425e-02 -1.78570616e+00 -5.94026268e-01 1.53496742e+00 -8.10099766e-03 -1.42689741e+00] [-4.92421359e-01 -4.92421359e-01 1.64386773e+00 3.70286942e-01 1.75419450e+00 -1.71841311e+00 2.01900870e-01 -4.96788919e-01 1.89171803e+00 -2.12652254e+00 -9.30940449e-01 -1.22269344e+00 1.44410968e-01 -3.78861368e-01 -4.42200929e-01] [-7.22789690e-02 -7.22789690e-02 1.70083010e+00 -5.20537049e-02 -3.91670391e-02 1.11733520e+00 -1.00207162e+00 1.03972507e+00 9.43626046e-01 1.27483523e+00 -8.26914787e-01 5.97545803e-01 9.47878003e-01 -7.27642596e-01 1.23235357e+00] [ 2.19149411e-01 2.19149411e-01 1.28920168e-01 -1.35564161e-02 -2.18494630e+00 9.21078920e-01 7.97052264e-01 6.74875081e-02 4.59295511e-01 -8.17855537e-01 9.42472816e-01 -5.72807610e-01 1.37977982e+00 7.53593802e-01 -1.07159472e+00] [ 4.39269662e-01 4.39269662e-01 -1.30036759e+00 6.67866617e-02 -1.78607547e+00 9.85034645e-01 1.34165823e-01 -8.19084167e-01 -1.73909649e-01 4.61354136e-01 -2.77551979e-01 1.64515510e-01 -1.52842075e-01 -1.06365597e+00 1.14946222e+00] [ 7.05107331e-01 7.05107331e-01 -1.42385387e+00 1.29654551e+00 1.37099934e+00 -1.78469455e+00 2.68965453e-01 -1.06808436e+00 1.60818672e+00 6.22579694e-01 -5.29809773e-01 9.47215378e-01 2.83539343e+00 -5.94027579e-01 -3.54500175e-01] [-4.12421495e-01 -4.12421495e-01 -5.82753658e-01 -6.47701859e-01 6.18892252e-01 1.33305490e+00 -8.45802009e-01 1.70371509e+00 1.38616771e-01 8.87700617e-01 1.76143789e+00 1.90552860e-01 2.30214429e+00 -6.08403862e-01 -1.49164557e+00] [ 9.39346850e-01 9.39346850e-01 -2.08513245e-01 -3.75067025e-01 5.67298353e-01 1.67828059e+00 1.87786925e+00 6.67543888e-01 7.83109844e-01 -5.72595298e-01 9.16476130e-01 7.70867050e-01 -1.68666184e-01 4.90714550e-01 1.61735141e+00] [-4.88977104e-01 -4.88977104e-01 2.10027605e-01 -4.05826956e-01 -2.27230340e-01 8.97038639e-01 -1.61409211e+00 9.13201332e-01 6.29962802e-01 8.56105387e-01 -1.49307191e-01 -1.79742551e+00 9.45828632e-02 -1.00771880e+00 4.99737784e-02] [ 2.01261520e+00 2.01261520e+00 -9.52759743e-01 -2.49322832e-01 7.22602248e-01 3.52487378e-02 -1.30472088e+00 -2.23492429e-01 1.34392214e+00 4.34808195e-01 9.75404605e-02 7.08771944e-01 8.81257236e-01 1.75451502e-01 4.96468037e-01] [ 5.64341009e-01 5.64341009e-01 9.13012743e-01 1.88503551e+00 4.61653382e-01 -2.22645855e+00 -3.20265353e-01 2.40833834e-01 -5.15505612e-01 -7.23150790e-01 2.99254566e-01 -1.53185889e-01 8.21335316e-01 -3.84428352e-01 -2.18834747e-02] [ 9.84251320e-01 9.84251320e-01 -1.09508425e-01 -1.49945712e+00 -2.88514167e-01 6.24225080e-01 1.55872977e+00 -4.61833119e-01 2.00881839e+00 4.11294341e-01 1.23791683e+00 2.72644609e-01 -4.88423020e-01 -7.93853045e-01 -1.39974451e+00]]]]; ov_res: [[[[-5.45171499e-01 -5.45171499e-01 2.79488504e-01 -1.23897457e+00 1.20319915e+00 -7.44195461e-01 -1.83645234e-01 4.53313030e-02 -2.33635917e-01 9.89033580e-01 -1.63260967e-01 -6.12292409e-01 -2.12720561e+00 1.01457250e+00 -7.26451337e-01] [-5.80314815e-01 -5.80314815e-01 1.78083706e+00 1.24060512e+00 1.82386780e+00 -7.53486633e-01 1.80652559e-01 4.23742682e-01 -6.38895631e-02 -2.09563851e+00 1.62929285e+00 1.56635547e+00 4.35199231e-01 -1.22930482e-01 5.64476430e-01] [ 1.56724668e+00 1.56724668e+00 -1.15675473e+00 -3.93979400e-01 -9.79601622e-01 4.95002151e-01 1.35406470e+00 -2.08497047e+00 1.32882154e+00 -8.56592000e-01 2.20690346e+00 5.23568869e-01 -9.69765306e-01 1.92620504e+00 -7.19614923e-01] [-5.70080578e-01 -5.70080578e-01 1.15989335e-01 -1.45428824e+00 -1.59299421e+00 -1.94392693e+00 4.26936120e-01 1.46224535e+00 -5.98102033e-01 6.01413310e-01 -1.48573034e-02 9.27560091e-01 1.78803161e-01 2.54988503e+00 2.78209448e+00] [ 7.53018618e-01 7.53018618e-01 8.51718247e-01 7.99187958e-01 7.62526929e-01 -3.32919925e-01 1.84494570e-01 9.74511802e-01 2.43187502e-01 -8.00072193e-01 1.17082393e+00 -9.91965592e-01 -9.27590251e-01 9.00432765e-01 -6.79557621e-01] [ 5.40782928e-01 5.40782928e-01 -4.55580473e-01 9.66161489e-01 -1.13290596e+00 6.30861104e-01 -1.30035949e+00 4.48358327e-01 -7.17418730e-01 -1.53873876e-01 2.80213535e-01 4.94762599e-01 -7.70760477e-01 2.90985137e-01 7.72827029e-01] [-1.87604475e+00 -1.87604475e+00 2.44354695e-01 -6.28955066e-02 5.99871218e-01 2.32270792e-01 -3.18964720e-01 2.00468957e-01 -3.58284503e-01 -1.92009851e-01 1.51688293e-01 -8.70790124e-01 -9.42577049e-02 -5.82284033e-01 4.35300261e-01] [-1.65280718e-02 -1.65280718e-02 1.27189851e+00 -3.91009331e-01 4.66295451e-01 8.94187152e-01 -1.17137933e+00 3.04740816e-01 1.79396674e-01 3.29931617e-01 -1.83743763e+00 -1.70649320e-01 1.23171973e+00 -6.59348428e-01 -1.06625235e+00] [-3.34688872e-01 -3.34688872e-01 -1.47053075e+00 -1.40038228e+00 1.73985100e+00 3.50634366e-01 -4.53833014e-01 3.33404613e+00 7.92986527e-02 3.27636927e-01 -1.40797281e+00 2.74972260e-01 -1.29031932e+00 -7.34619856e-01 -1.41652679e+00] [-1.66814342e-01 -1.66814342e-01 4.85318452e-01 1.05856609e+00 2.45919712e-02 2.45931327e-01 -1.18808162e+00 1.05649984e+00 5.91904163e-01 7.94849932e-01 -4.46582913e-01 -2.80977916e-02 -7.48165786e-01 1.21042287e+00 -7.89415777e-01] [-1.46072614e+00 -1.46072614e+00 3.69028211e-01 3.98390263e-01 -1.84919193e-01 8.62150788e-01 -1.20342112e+00 5.76345325e-01 -7.37858236e-01 5.39483488e-01 -2.06521358e-02 2.29528499e+00 -1.33064792e-01 6.81715310e-01 -6.05323434e-01] [-3.46115202e-01 -3.46115202e-01 -6.77138567e-01 1.22100222e+00 2.61526853e-01 -3.49902719e-01 -1.70279574e+00 8.50128531e-01 2.74960905e-01 4.81659383e-01 -1.55723476e+00 -2.57912844e-01 -6.43332541e-01 -3.73258144e-01 -1.19878376e+00] [-5.94534278e-01 -5.94534278e-01 -8.44127387e-02 -5.82812786e-01 7.75428891e-01 2.50034857e+00 9.12532955e-02 3.48555714e-01 -9.64706361e-01 -8.65299225e-01 7.62105048e-01 1.58334804e+00 5.39693296e-01 -6.55077338e-01 -1.46765001e-02] [-8.65001559e-01 -8.65001559e-01 -1.14080548e-01 -2.93804914e-01 -9.55689430e-01 6.97275579e-01 -9.31193471e-01 -1.12130374e-01 3.32467735e-01 -2.22688460e+00 5.12918115e-01 -6.07385159e-01 1.80751666e-01 -1.06524855e-01 -1.34413266e+00]] [[-1.64278948e+00 -1.64278948e+00 -8.44019830e-01 1.28267419e+00 -1.43383515e+00 -1.01158524e+00 -8.46402824e-01 -5.73155344e-01 -2.98660398e-01 -1.02951191e-02 -8.76939118e-01 8.34776998e-01 -1.05782914e+00 -8.14449966e-01 -1.44441581e+00] [-4.92081076e-01 -4.92081076e-01 1.19882452e+00 -7.22323954e-01 1.46432316e+00 -6.44380987e-01 -6.10694468e-01 7.01192021e-01 6.32083118e-02 1.16495185e-01 7.21838593e-01 -1.32176673e+00 1.33253205e+00 -1.76526338e-01 2.11250949e+00] [-1.08630344e-01 -1.08630344e-01 8.26039553e-01 -5.26679397e-01 -1.44541943e+00 -5.57638466e-01 8.30779016e-01 -3.67366374e-01 4.82910164e-02 -1.74822164e+00 1.07809842e+00 -7.54291654e-01 -7.89110661e-01 -9.41229463e-01 9.88659799e-01] [ 1.41464481e-02 1.41464481e-02 7.00812101e-01 -9.41345811e-01 -1.27064094e-01 -6.94369137e-01 1.29972064e+00 4.31904495e-01 -6.88474953e-01 7.38641262e-01 -8.85329396e-02 -5.28297544e-01 -1.34902573e+00 1.34765625e+00 -1.18910909e+00] [ 5.11410832e-01 5.11410832e-01 1.50476754e+00 1.82750475e+00 1.34487450e+00 -1.06027448e+00 5.19792251e-02 -6.45608187e-01 -6.71169087e-02 -2.13744894e-01 -2.21980348e-01 -4.40258801e-01 -1.16224313e+00 3.72820720e-02 -2.04171324e+00] [-7.75407404e-02 -7.75407404e-02 -4.83821392e-01 -6.65556267e-02 7.90707350e-01 4.94060665e-01 -8.28653038e-01 -1.09620333e+00 2.48676345e-01 1.11814749e+00 2.06639588e-01 -3.07526916e-01 -5.18553436e-01 6.39872402e-02 -7.27481484e-01] [-6.06080115e-01 -6.06080115e-01 -5.30808032e-01 4.33092833e-01 -5.26187360e-01 -7.25803375e-01 1.78772080e+00 -4.72519159e-01 1.54715646e-02 -5.03385305e-01 -6.28429830e-01 1.61082542e+00 -1.23526889e-03 -2.44996399e-01 6.94029868e-01] [-4.50420946e-01 -4.50420946e-01 -1.76446819e+00 1.66449130e+00 9.90758717e-01 -5.05785227e-01 2.65439630e-01 -2.40759134e+00 2.22834349e-01 4.69663501e-01 5.44863284e-01 1.36934614e+00 -6.62678897e-01 8.22624862e-01 -4.07958716e-01] [-6.93847120e-01 -6.93847120e-01 4.85637486e-01 -1.09683609e+00 -9.56972018e-02 -8.20834562e-02 -2.11365707e-02 -2.01597005e-01 1.72820855e-02 1.25916258e-01 1.07639349e+00 -1.69195399e-01 2.43419930e-01 -1.12247717e+00 1.60589278e-01] [ 1.08535874e+00 1.08535874e+00 -5.74587286e-01 7.21058995e-03 8.67643237e-01 1.45351028e+00 6.77735150e-01 -9.07772601e-01 -1.31003547e+00 1.75631487e+00 2.10707933e-02 1.48513401e+00 -1.44258499e+00 5.95715761e-01 -1.83914936e+00] [-1.57933486e+00 -1.57933486e+00 -2.87475884e-01 -3.17998767e-01 1.05524766e+00 -4.16055262e-01 9.12051141e-01 4.57852669e-02 3.67394775e-01 -1.01972926e+00 1.45061576e+00 -2.05087042e+00 -4.35690969e-01 -3.50684151e-02 1.31872284e+00] [ 7.21875489e-01 7.21875489e-01 -6.25485420e-01 2.18300849e-01 6.17592454e-01 2.92147517e-01 -3.13090324e-01 6.57271147e-01 1.34197259e+00 -3.68428677e-01 1.95231959e-01 1.92646050e+00 1.22428095e+00 4.08536375e-01 -1.78489551e-01] [-7.77145147e-01 -7.77145147e-01 -2.06921005e+00 -6.56368256e-01 7.73375690e-01 1.92181730e+00 -1.15018797e+00 -7.82430828e-01 -3.32441539e-01 1.33547470e-01 2.17704281e-01 4.70290780e-01 -7.44208634e-01 4.73606944e-01 -7.34922588e-01] [-8.61371815e-01 -8.61371815e-01 -1.27073973e-01 -1.86949337e+00 6.20614827e-01 -8.31839979e-01 -1.05495811e+00 -9.69781935e-01 1.55331967e-02 -3.77075464e-01 -7.08472669e-01 -2.63954606e-02 9.15735438e-02 3.30565929e-01 3.77681881e-01]] [[-2.77241230e-01 -2.77241230e-01 6.35602355e-01 1.63380265e+00 8.27962995e-01 1.93546510e+00 -2.18757105e+00 -5.44341981e-01 4.48592752e-01 -1.00589836e+00 1.40529931e+00 8.04867506e-01 1.56117952e+00 -4.71120588e-02 1.21767029e-01] [-2.55672902e-01 -2.55672902e-01 9.16782260e-01 -1.16523271e-02 -1.32740289e-01 1.06701767e+00 -6.52436972e-01 -6.71288908e-01 6.39430210e-02 6.15689158e-02 -1.88496679e-01 9.48445618e-01 -5.05608678e-01 -7.07643807e-01 -1.42299283e+00] [ 4.70275909e-01 4.70275909e-01 -2.23834544e-01 -4.19813901e-01 3.39677662e-01 7.34301448e-01 8.06780457e-01 -4.20690067e-02 6.01392567e-01 6.68389425e-02 -1.78570616e+00 -5.94026268e-01 1.53496742e+00 -8.10099766e-03 -1.42689741e+00] [-4.92421359e-01 -4.92421359e-01 1.64386773e+00 3.70286942e-01 1.75419450e+00 -1.71841311e+00 2.01900870e-01 -4.96788919e-01 1.89171803e+00 -2.12652254e+00 -9.30940449e-01 -1.22269344e+00 1.44410968e-01 -3.78861368e-01 -4.42200929e-01] [-7.22789690e-02 -7.22789690e-02 1.70083010e+00 -5.20537049e-02 -3.91670391e-02 1.11733520e+00 -1.00207162e+00 1.03972507e+00 9.43626046e-01 1.27483523e+00 -8.26914787e-01 5.97545803e-01 9.47878003e-01 -7.27642596e-01 1.23235357e+00] [ 2.19149411e-01 2.19149411e-01 1.28920168e-01 -1.35564161e-02 -2.18494630e+00 9.21078920e-01 7.97052264e-01 6.74875081e-02 4.59295511e-01 -8.17855537e-01 9.42472816e-01 -5.72807610e-01 1.37977982e+00 7.53593802e-01 -1.07159472e+00] [ 4.39269662e-01 4.39269662e-01 -1.30036759e+00 6.67866617e-02 -1.78607547e+00 9.85034645e-01 1.34165823e-01 -8.19084167e-01 -1.73909649e-01 4.61354136e-01 -2.77551979e-01 1.64515510e-01 -1.52842075e-01 -1.06365597e+00 1.14946222e+00] [ 7.05107331e-01 7.05107331e-01 -1.42385387e+00 1.29654551e+00 1.37099934e+00 -1.78469455e+00 2.68965453e-01 -1.06808436e+00 1.60818672e+00 6.22579694e-01 -5.29809773e-01 9.47215378e-01 2.83539343e+00 -5.94027579e-01 -3.54500175e-01] [-4.12421495e-01 -4.12421495e-01 -5.82753658e-01 -6.47701859e-01 6.18892252e-01 1.33305490e+00 -8.45802009e-01 1.70371509e+00 1.38616771e-01 8.87700617e-01 1.76143789e+00 1.90552860e-01 2.30214429e+00 -6.08403862e-01 -1.49164557e+00] [ 9.39346850e-01 9.39346850e-01 -2.08513245e-01 -3.75067025e-01 5.67298353e-01 1.67828059e+00 1.87786925e+00 6.67543888e-01 7.83109844e-01 -5.72595298e-01 9.16476130e-01 7.70867050e-01 -1.68666184e-01 4.90714550e-01 1.61735141e+00] [-4.88977104e-01 -4.88977104e-01 2.10027605e-01 -4.05826956e-01 -2.27230340e-01 8.97038639e-01 -1.61409211e+00 9.13201332e-01 6.29962802e-01 8.56105387e-01 -1.49307191e-01 -1.79742551e+00 9.45828632e-02 -1.00771880e+00 4.99737784e-02] [ 2.01261520e+00 2.01261520e+00 -9.52759743e-01 -2.49322832e-01 7.22602248e-01 3.52487378e-02 -1.30472088e+00 -2.23492429e-01 1.34392214e+00 4.34808195e-01 9.75404605e-02 7.08771944e-01 8.81257236e-01 1.75451502e-01 4.96468037e-01] [ 5.64341009e-01 5.64341009e-01 9.13012743e-01 1.88503551e+00 4.61653382e-01 -2.22645855e+00 -3.20265353e-01 2.40833834e-01 -5.15505612e-01 -7.23150790e-01 2.99254566e-01 -1.53185889e-01 8.21335316e-01 -3.84428352e-01 -2.18834747e-02] [ 9.84251320e-01 9.84251320e-01 -1.09508425e-01 -1.49945712e+00 -2.88514167e-01 6.24225080e-01 1.55872977e+00 -4.61833119e-01 2.00881839e+00 4.11294341e-01 1.23791683e+00 2.72644609e-01 -4.88423020e-01 -7.93853045e-01 -1.39974451e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5491.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="replicate"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 1.85846400e+00 1.85846400e+00 2.28694177e+00 6.62641168e-01 1.40224412e-01 -5.35668194e-01 -2.59748876e-01 -3.37152123e-01 -1.98108420e-01 4.51730154e-02 -3.97378504e-01 1.13467371e+00 1.27687168e+00 -1.70145929e+00 1.85702336e+00] [ 9.46191419e-03 9.46191419e-03 -7.00047731e-01 -8.17364752e-01 9.16555166e-01 1.12986457e+00 1.93429744e+00 1.04095721e+00 9.33815360e-01 8.61143231e-01 -1.14370096e+00 -2.30814174e-01 1.03399849e+00 1.73801675e-01 2.46382773e-01] [-4.95055169e-02 -4.95055169e-02 -2.89810956e-01 -4.48993951e-01 -1.64123547e+00 -1.07720935e+00 1.60011500e-01 7.23367870e-01 -1.91623592e+00 9.22111630e-01 -1.78870887e-01 1.62888014e+00 3.86201680e-01 3.82887483e-01 7.20653892e-01] [-2.79733360e-01 -2.79733360e-01 4.84626263e-01 -5.65503836e-02 -7.87618756e-03 3.87568116e-01 -8.53876472e-02 1.70734024e+00 -1.83327585e-01 -6.82658195e-01 -2.20436037e-01 1.36714923e+00 6.29327238e-01 -8.58345151e-01 -1.03891683e+00] [-2.21274900e+00 -2.21274900e+00 5.15011907e-01 4.99742568e-01 1.34956932e+00 6.05715871e-01 -3.90720844e-01 2.04315353e-02 -2.12761566e-01 6.12653136e-01 -2.61515766e-01 -8.09877157e-01 -7.23450780e-02 8.07661265e-02 1.07121181e+00] [ 1.21629894e+00 1.21629894e+00 2.71967024e-01 -8.27855527e-01 2.61172295e-01 6.74932659e-01 9.99992847e-01 3.94154936e-01 -1.26151264e-01 3.67118716e-01 -3.27029645e-01 -5.47213435e-01 -3.62318486e-01 5.69130778e-01 8.65485907e-01] [ 5.30089855e-01 5.30089855e-01 9.77215707e-01 -1.93057442e+00 -1.51394701e+00 -7.20630944e-01 2.68324757e+00 6.51284516e-01 -3.52682441e-01 -9.94830966e-01 -1.26194382e+00 -6.92242026e-01 -5.57329178e-01 -1.63262689e+00 -3.10761958e-01] [-3.27417515e-02 -3.27417515e-02 -8.75744760e-01 -5.61513186e-01 -7.49801755e-01 -1.22454524e+00 -6.59612298e-01 1.03664994e-01 -2.42914343e+00 7.48705447e-01 6.98602021e-01 -2.12263376e-01 -2.92139024e-01 -2.05111519e-01 -3.40124480e-02] [ 1.09216321e+00 1.09216321e+00 1.27731292e-02 -1.76914012e+00 7.35742509e-01 1.00924361e+00 -6.19224906e-01 -4.61307466e-01 -1.41465211e+00 5.18248864e-02 -1.41937268e+00 1.71399355e+00 1.42897093e+00 1.93766737e+00 5.64390957e-01] [ 2.90318519e-01 2.90318519e-01 -4.70579892e-01 1.39740217e+00 -2.18228340e+00 1.78394526e-01 3.78001705e-02 1.99400270e+00 -7.38690376e-01 4.44344059e-02 -4.21037585e-01 -7.02174366e-01 1.46828842e+00 -5.41994810e-01 -6.88867504e-03] [ 2.57323027e-01 2.57323027e-01 9.51496422e-01 8.07342649e-01 -2.52806139e+00 4.54237223e-01 -6.44495040e-02 -5.76472759e-01 1.35414565e+00 8.78620863e-01 -6.47538364e-01 -1.82952511e+00 2.21056208e-01 6.66954517e-01 1.39855158e+00] [ 5.20859599e-01 5.20859599e-01 -4.37727392e-01 5.33788502e-02 5.06125271e-01 9.59994912e-01 -1.20212865e+00 -1.24051952e+00 2.03090644e+00 -1.86541891e+00 -7.39835262e-01 -1.10603416e+00 2.08492446e+00 -6.43390238e-01 -7.27825105e-01] [ 9.17268455e-01 9.17268455e-01 7.49187112e-01 -4.68787014e-01 -6.04881287e-01 2.79796600e-01 -1.12536490e+00 1.40085721e+00 3.65720630e-01 1.25322044e+00 -6.08995080e-01 1.28277576e+00 -2.35888076e+00 -1.23578250e+00 -4.60872173e-01] [-1.19919457e-01 -1.19919457e-01 -8.01555753e-01 -1.70989946e-01 3.68705541e-01 2.76498973e-01 -2.66461492e-01 1.23389995e+00 -6.83183789e-01 -3.07721496e+00 -1.07270658e+00 2.02895433e-01 1.25983381e+00 5.71837962e-01 -3.50048184e-01]] [[-9.30638134e-01 -9.30638134e-01 7.79502988e-02 2.03326821e-01 1.27096224e+00 -7.26113141e-01 -1.07670403e+00 1.43327788e-01 1.76481760e+00 -5.49723506e-01 7.03281701e-01 1.41260874e+00 1.46600699e+00 1.66027832e+00 -8.03055882e-01] [ 5.90500057e-01 5.90500057e-01 -9.86007094e-01 -1.11192775e+00 1.82941592e+00 1.24109101e+00 -3.44878912e-01 8.91525030e-01 -9.52307463e-01 -2.81142592e-01 -3.88807446e-01 -1.66039932e+00 3.21956843e-01 -1.14852524e+00 4.91483361e-01] [ 1.03761542e+00 1.03761542e+00 -8.89018527e-04 -1.99485287e-01 2.11463857e+00 2.06836149e-01 4.87662822e-01 9.43898261e-01 3.80620301e-01 6.60819292e-01 9.12609100e-02 3.86948884e-01 -7.19335005e-02 2.86029267e+00 1.27123201e+00] [-2.29660749e-01 -2.29660749e-01 5.12749195e-01 3.29985112e-01 1.16899729e+00 -8.31585824e-01 -1.02214360e+00 4.54910211e-02 4.17095304e-01 -4.33888137e-02 -1.43305928e-01 -1.08530748e+00 9.59667265e-01 3.99231315e-01 -1.38501823e+00] [ 2.89190173e-01 2.89190173e-01 9.55932975e-01 4.78348225e-01 1.03507400e+00 -4.51573163e-01 5.10023832e-01 6.66929126e-01 6.56857848e-01 -2.65824747e+00 -1.04265302e-01 -7.03363597e-01 -3.85696888e-01 7.96690732e-02 2.32176828e+00] [ 9.99859869e-01 9.99859869e-01 2.38085121e-01 -2.87303895e-01 -3.83658975e-01 -2.09629202e+00 -1.05732262e+00 -4.41875130e-01 -1.19196653e+00 7.44683072e-02 -1.60944450e+00 3.68965894e-01 -2.27712199e-01 -1.08507586e+00 1.83381271e+00] [ 1.70765053e-02 1.70765053e-02 1.30334711e+00 1.72719955e-02 9.22375843e-02 3.69767547e-01 3.30672592e-01 1.53551519e-01 -3.38284165e-01 1.64088368e+00 8.59427750e-01 -1.35219419e+00 1.50516748e-01 -1.83341168e-02 1.15178621e+00] [-2.76732653e-01 -2.76732653e-01 -5.85962951e-01 -1.84600401e+00 1.28081584e+00 6.37114227e-01 -1.15084136e+00 1.56292811e-01 3.56955558e-01 -7.89962292e-01 -5.42953193e-01 1.02348256e+00 6.33320138e-02 -5.12090385e-01 -1.28531468e+00] [-2.58722246e-01 -2.58722246e-01 -1.04386759e+00 -8.12266693e-02 -1.05133426e+00 -4.55088824e-01 1.46150672e+00 -8.12214687e-02 6.66138887e-01 -5.48853993e-01 -1.13803101e+00 4.68457937e-01 1.32481337e+00 -5.23850441e-01 -4.13167804e-01] [ 2.87870497e-01 2.87870497e-01 -7.26105645e-02 -2.25785255e-01 1.26498795e+00 2.94547796e-01 5.02706707e-01 -6.44807398e-01 -2.42798049e-02 -1.18587308e-01 5.05252518e-02 -2.37339064e-01 -4.99664307e-01 6.48385435e-02 -1.16173553e+00] [-5.42046130e-01 -5.42046130e-01 -3.17010313e-01 1.50010431e+00 -1.52710593e+00 -6.78590059e-01 -1.21770769e-01 4.03297991e-01 -6.62299395e-01 1.33437085e+00 6.52991235e-01 -1.73209751e+00 1.22772940e-01 1.41860828e-01 -7.10764587e-01] [ 3.61131281e-01 3.61131281e-01 -2.86840582e+00 -8.32575381e-01 3.93167697e-02 -2.14497559e-02 -1.40391409e+00 1.13223732e+00 3.76652420e-01 6.07949615e-01 6.48595870e-01 -6.97976291e-01 -3.48420978e-01 5.97987354e-01 1.82049108e+00] [-1.81227887e+00 -1.81227887e+00 1.32953537e+00 2.66537428e+00 9.40917850e-01 -2.09634566e+00 1.55190313e+00 -1.49255991e+00 -9.07792211e-01 -9.09512997e-01 -7.07280517e-01 -5.20950973e-01 3.45402241e-01 -1.06650603e+00 5.30565321e-01] [-7.40257323e-01 -7.40257323e-01 -3.37198913e-01 6.98469400e-01 5.56178130e-02 -1.48039639e+00 -1.00272846e+00 -8.11246317e-03 -2.14252830e-03 -1.06250435e-01 1.15118432e+00 1.36648506e-01 -8.43003154e-01 -8.92096758e-01 -6.95053995e-01]] [[-1.69232368e-01 -1.69232368e-01 5.50947726e-01 -2.12103948e-01 3.22090417e-01 6.30465865e-01 2.25081027e-01 4.36277717e-01 1.66636944e-01 6.01921082e-01 -7.09678650e-01 2.26386237e+00 -5.02142847e-01 1.68596220e+00 4.74659801e-01] [ 1.26110530e+00 1.26110530e+00 -5.13189197e-01 5.24306893e-01 1.45680988e+00 4.36027348e-01 -5.04858792e-01 4.87893194e-01 -6.46470368e-01 3.65833163e-01 1.65005493e+00 -1.62163234e+00 2.05931887e-01 1.68283030e-01 5.11609614e-01] [ 3.01973164e-01 3.01973164e-01 -6.67988181e-01 -2.26984903e-01 1.67775917e+00 -1.32162869e+00 -1.65747595e+00 -9.81210589e-01 -2.32270503e+00 9.99701738e-01 -3.51871341e-01 -6.35269165e-01 8.22300732e-01 8.55928212e-02 1.02471900e+00] [-2.59602487e-01 -2.59602487e-01 1.94107354e-01 -8.15540254e-01 -4.11585569e-01 3.90159935e-01 4.90965664e-01 1.11396432e+00 2.29970515e-01 -1.04020230e-01 -1.18532844e-01 -3.39584529e-01 -1.02020800e-01 3.63462046e-02 1.22879839e+00] [-1.16588807e+00 -1.16588807e+00 8.04931641e-01 -5.19155681e-01 -3.77498478e-01 2.87719220e-01 -1.58386898e+00 -1.41537726e+00 4.28883374e-01 1.71877235e-01 -4.05139387e-01 -2.34142274e-01 1.37994099e+00 8.87932107e-02 1.97340751e+00] [-1.61549139e+00 -1.61549139e+00 6.98910177e-01 3.95247370e-01 9.98534858e-01 -2.77890921e-01 5.09553075e-01 2.08675727e-01 -4.81842875e-01 4.71514374e-01 -3.85375500e-01 -3.64785492e-01 2.23388135e-01 -2.21145928e-01 -1.96418852e-01] [-9.91838127e-02 -9.91838127e-02 3.46604586e-01 7.67581999e-01 -1.18273664e+00 1.04935706e+00 5.11997521e-01 -1.47289693e+00 8.25906575e-01 -1.95114180e-01 -1.10275543e+00 2.29153827e-01 -6.83164537e-01 9.01331723e-01 9.06898260e-01] [-3.87570441e-01 -3.87570441e-01 -1.69471216e+00 9.54446673e-01 -1.27457058e+00 -6.30191922e-01 -2.63563573e-01 -6.50861561e-01 -1.09317374e+00 3.44041698e-02 -4.19741690e-01 -6.99203730e-01 -3.14581108e+00 -3.89207184e-01 -1.33030176e+00] [-6.78564787e-01 -6.78564787e-01 -4.92266744e-01 -1.11362112e+00 -1.29834935e-01 1.17809200e+00 8.40084255e-01 -5.40674806e-01 2.58864492e-01 8.06644440e-01 3.94640058e-01 -4.30417880e-02 1.69964835e-01 6.40770018e-01 -1.32059395e+00] [ 3.75220865e-01 3.75220865e-01 5.73554933e-01 -7.55645692e-01 -7.47553110e-01 8.53855133e-01 -1.12371814e+00 -1.11456323e+00 -2.41685688e-01 -2.90638149e-01 -1.03732014e+00 -8.19100499e-01 -3.20301962e+00 4.43014711e-01 1.37198687e+00] [-3.23798209e-01 -3.23798209e-01 -9.23423111e-01 1.36457038e+00 1.17140961e+00 -1.09876370e+00 1.50946808e+00 2.60108948e+00 1.68257445e-01 5.59990168e-01 6.79463446e-01 2.66654223e-01 6.20249622e-02 7.66603589e-01 -1.67200220e+00] [-3.25736821e-01 -3.25736821e-01 2.77188867e-01 1.04350102e+00 4.66627032e-01 -1.11400485e+00 -2.48189971e-01 -1.23966956e+00 -2.23265573e-01 -6.24147892e-01 2.18629479e+00 7.96088994e-01 -1.28528583e+00 -6.20582759e-01 6.37751043e-01] [-3.62185866e-01 -3.62185866e-01 -6.44482851e-01 -1.68741846e+00 -6.89650834e-01 -2.91265249e-01 4.41005856e-01 -5.41451991e-01 4.71562475e-01 1.81137204e-01 -6.44340634e-01 2.46725023e-01 -1.92187503e-01 -1.70529091e+00 1.41354012e+00] [-1.28892076e+00 -1.28892076e+00 -8.00857604e-01 7.77622581e-01 7.77060509e-01 -8.95922929e-02 1.12238109e+00 -2.28318140e-01 1.18872844e-01 -4.71152395e-01 3.60368103e-01 -1.72973633e+00 9.39558268e-01 -9.74934697e-01 7.89400041e-02]] [[-1.69232368e-01 -1.69232368e-01 5.50947726e-01 -2.12103948e-01 3.22090417e-01 6.30465865e-01 2.25081027e-01 4.36277717e-01 1.66636944e-01 6.01921082e-01 -7.09678650e-01 2.26386237e+00 -5.02142847e-01 1.68596220e+00 4.74659801e-01] [ 1.26110530e+00 1.26110530e+00 -5.13189197e-01 5.24306893e-01 1.45680988e+00 4.36027348e-01 -5.04858792e-01 4.87893194e-01 -6.46470368e-01 3.65833163e-01 1.65005493e+00 -1.62163234e+00 2.05931887e-01 1.68283030e-01 5.11609614e-01] [ 3.01973164e-01 3.01973164e-01 -6.67988181e-01 -2.26984903e-01 1.67775917e+00 -1.32162869e+00 -1.65747595e+00 -9.81210589e-01 -2.32270503e+00 9.99701738e-01 -3.51871341e-01 -6.35269165e-01 8.22300732e-01 8.55928212e-02 1.02471900e+00] [-2.59602487e-01 -2.59602487e-01 1.94107354e-01 -8.15540254e-01 -4.11585569e-01 3.90159935e-01 4.90965664e-01 1.11396432e+00 2.29970515e-01 -1.04020230e-01 -1.18532844e-01 -3.39584529e-01 -1.02020800e-01 3.63462046e-02 1.22879839e+00] [-1.16588807e+00 -1.16588807e+00 8.04931641e-01 -5.19155681e-01 -3.77498478e-01 2.87719220e-01 -1.58386898e+00 -1.41537726e+00 4.28883374e-01 1.71877235e-01 -4.05139387e-01 -2.34142274e-01 1.37994099e+00 8.87932107e-02 1.97340751e+00] [-1.61549139e+00 -1.61549139e+00 6.98910177e-01 3.95247370e-01 9.98534858e-01 -2.77890921e-01 5.09553075e-01 2.08675727e-01 -4.81842875e-01 4.71514374e-01 -3.85375500e-01 -3.64785492e-01 2.23388135e-01 -2.21145928e-01 -1.96418852e-01] [-9.91838127e-02 -9.91838127e-02 3.46604586e-01 7.67581999e-01 -1.18273664e+00 1.04935706e+00 5.11997521e-01 -1.47289693e+00 8.25906575e-01 -1.95114180e-01 -1.10275543e+00 2.29153827e-01 -6.83164537e-01 9.01331723e-01 9.06898260e-01] [-3.87570441e-01 -3.87570441e-01 -1.69471216e+00 9.54446673e-01 -1.27457058e+00 -6.30191922e-01 -2.63563573e-01 -6.50861561e-01 -1.09317374e+00 3.44041698e-02 -4.19741690e-01 -6.99203730e-01 -3.14581108e+00 -3.89207184e-01 -1.33030176e+00] [-6.78564787e-01 -6.78564787e-01 -4.92266744e-01 -1.11362112e+00 -1.29834935e-01 1.17809200e+00 8.40084255e-01 -5.40674806e-01 2.58864492e-01 8.06644440e-01 3.94640058e-01 -4.30417880e-02 1.69964835e-01 6.40770018e-01 -1.32059395e+00] [ 3.75220865e-01 3.75220865e-01 5.73554933e-01 -7.55645692e-01 -7.47553110e-01 8.53855133e-01 -1.12371814e+00 -1.11456323e+00 -2.41685688e-01 -2.90638149e-01 -1.03732014e+00 -8.19100499e-01 -3.20301962e+00 4.43014711e-01 1.37198687e+00] [-3.23798209e-01 -3.23798209e-01 -9.23423111e-01 1.36457038e+00 1.17140961e+00 -1.09876370e+00 1.50946808e+00 2.60108948e+00 1.68257445e-01 5.59990168e-01 6.79463446e-01 2.66654223e-01 6.20249622e-02 7.66603589e-01 -1.67200220e+00] [-3.25736821e-01 -3.25736821e-01 2.77188867e-01 1.04350102e+00 4.66627032e-01 -1.11400485e+00 -2.48189971e-01 -1.23966956e+00 -2.23265573e-01 -6.24147892e-01 2.18629479e+00 7.96088994e-01 -1.28528583e+00 -6.20582759e-01 6.37751043e-01] [-3.62185866e-01 -3.62185866e-01 -6.44482851e-01 -1.68741846e+00 -6.89650834e-01 -2.91265249e-01 4.41005856e-01 -5.41451991e-01 4.71562475e-01 1.81137204e-01 -6.44340634e-01 2.46725023e-01 -1.92187503e-01 -1.70529091e+00 1.41354012e+00] [-1.28892076e+00 -1.28892076e+00 -8.00857604e-01 7.77622581e-01 7.77060509e-01 -8.95922929e-02 1.12238109e+00 -2.28318140e-01 1.18872844e-01 -4.71152395e-01 3.60368103e-01 -1.72973633e+00 9.39558268e-01 -9.74934697e-01 7.89400041e-02]]]]; ov_res: [[[[ 1.85846400e+00 1.85846400e+00 2.28694177e+00 6.62641168e-01 1.40224412e-01 -5.35668194e-01 -2.59748876e-01 -3.37152123e-01 -1.98108420e-01 4.51730154e-02 -3.97378504e-01 1.13467371e+00 1.27687168e+00 -1.70145929e+00 1.85702336e+00] [ 9.46191419e-03 9.46191419e-03 -7.00047731e-01 -8.17364752e-01 9.16555166e-01 1.12986457e+00 1.93429744e+00 1.04095721e+00 9.33815360e-01 8.61143231e-01 -1.14370096e+00 -2.30814174e-01 1.03399849e+00 1.73801675e-01 2.46382773e-01] [-4.95055169e-02 -4.95055169e-02 -2.89810956e-01 -4.48993951e-01 -1.64123547e+00 -1.07720935e+00 1.60011500e-01 7.23367870e-01 -1.91623592e+00 9.22111630e-01 -1.78870887e-01 1.62888014e+00 3.86201680e-01 3.82887483e-01 7.20653892e-01] [-2.79733360e-01 -2.79733360e-01 4.84626263e-01 -5.65503836e-02 -7.87618756e-03 3.87568116e-01 -8.53876472e-02 1.70734024e+00 -1.83327585e-01 -6.82658195e-01 -2.20436037e-01 1.36714923e+00 6.29327238e-01 -8.58345151e-01 -1.03891683e+00] [-2.21274900e+00 -2.21274900e+00 5.15011907e-01 4.99742568e-01 1.34956932e+00 6.05715871e-01 -3.90720844e-01 2.04315353e-02 -2.12761566e-01 6.12653136e-01 -2.61515766e-01 -8.09877157e-01 -7.23450780e-02 8.07661265e-02 1.07121181e+00] [ 1.21629894e+00 1.21629894e+00 2.71967024e-01 -8.27855527e-01 2.61172295e-01 6.74932659e-01 9.99992847e-01 3.94154936e-01 -1.26151264e-01 3.67118716e-01 -3.27029645e-01 -5.47213435e-01 -3.62318486e-01 5.69130778e-01 8.65485907e-01] [ 5.30089855e-01 5.30089855e-01 9.77215707e-01 -1.93057442e+00 -1.51394701e+00 -7.20630944e-01 2.68324757e+00 6.51284516e-01 -3.52682441e-01 -9.94830966e-01 -1.26194382e+00 -6.92242026e-01 -5.57329178e-01 -1.63262689e+00 -3.10761958e-01] [-3.27417515e-02 -3.27417515e-02 -8.75744760e-01 -5.61513186e-01 -7.49801755e-01 -1.22454524e+00 -6.59612298e-01 1.03664994e-01 -2.42914343e+00 7.48705447e-01 6.98602021e-01 -2.12263376e-01 -2.92139024e-01 -2.05111519e-01 -3.40124480e-02] [ 1.09216321e+00 1.09216321e+00 1.27731292e-02 -1.76914012e+00 7.35742509e-01 1.00924361e+00 -6.19224906e-01 -4.61307466e-01 -1.41465211e+00 5.18248864e-02 -1.41937268e+00 1.71399355e+00 1.42897093e+00 1.93766737e+00 5.64390957e-01] [ 2.90318519e-01 2.90318519e-01 -4.70579892e-01 1.39740217e+00 -2.18228340e+00 1.78394526e-01 3.78001705e-02 1.99400270e+00 -7.38690376e-01 4.44344059e-02 -4.21037585e-01 -7.02174366e-01 1.46828842e+00 -5.41994810e-01 -6.88867504e-03] [ 2.57323027e-01 2.57323027e-01 9.51496422e-01 8.07342649e-01 -2.52806139e+00 4.54237223e-01 -6.44495040e-02 -5.76472759e-01 1.35414565e+00 8.78620863e-01 -6.47538364e-01 -1.82952511e+00 2.21056208e-01 6.66954517e-01 1.39855158e+00] [ 5.20859599e-01 5.20859599e-01 -4.37727392e-01 5.33788502e-02 5.06125271e-01 9.59994912e-01 -1.20212865e+00 -1.24051952e+00 2.03090644e+00 -1.86541891e+00 -7.39835262e-01 -1.10603416e+00 2.08492446e+00 -6.43390238e-01 -7.27825105e-01] [ 9.17268455e-01 9.17268455e-01 7.49187112e-01 -4.68787014e-01 -6.04881287e-01 2.79796600e-01 -1.12536490e+00 1.40085721e+00 3.65720630e-01 1.25322044e+00 -6.08995080e-01 1.28277576e+00 -2.35888076e+00 -1.23578250e+00 -4.60872173e-01] [-1.19919457e-01 -1.19919457e-01 -8.01555753e-01 -1.70989946e-01 3.68705541e-01 2.76498973e-01 -2.66461492e-01 1.23389995e+00 -6.83183789e-01 -3.07721496e+00 -1.07270658e+00 2.02895433e-01 1.25983381e+00 5.71837962e-01 -3.50048184e-01]] [[-9.30638134e-01 -9.30638134e-01 7.79502988e-02 2.03326821e-01 1.27096224e+00 -7.26113141e-01 -1.07670403e+00 1.43327788e-01 1.76481760e+00 -5.49723506e-01 7.03281701e-01 1.41260874e+00 1.46600699e+00 1.66027832e+00 -8.03055882e-01] [ 5.90500057e-01 5.90500057e-01 -9.86007094e-01 -1.11192775e+00 1.82941592e+00 1.24109101e+00 -3.44878912e-01 8.91525030e-01 -9.52307463e-01 -2.81142592e-01 -3.88807446e-01 -1.66039932e+00 3.21956843e-01 -1.14852524e+00 4.91483361e-01] [ 1.03761542e+00 1.03761542e+00 -8.89018527e-04 -1.99485287e-01 2.11463857e+00 2.06836149e-01 4.87662822e-01 9.43898261e-01 3.80620301e-01 6.60819292e-01 9.12609100e-02 3.86948884e-01 -7.19335005e-02 2.86029267e+00 1.27123201e+00] [-2.29660749e-01 -2.29660749e-01 5.12749195e-01 3.29985112e-01 1.16899729e+00 -8.31585824e-01 -1.02214360e+00 4.54910211e-02 4.17095304e-01 -4.33888137e-02 -1.43305928e-01 -1.08530748e+00 9.59667265e-01 3.99231315e-01 -1.38501823e+00] [ 2.89190173e-01 2.89190173e-01 9.55932975e-01 4.78348225e-01 1.03507400e+00 -4.51573163e-01 5.10023832e-01 6.66929126e-01 6.56857848e-01 -2.65824747e+00 -1.04265302e-01 -7.03363597e-01 -3.85696888e-01 7.96690732e-02 2.32176828e+00] [ 9.99859869e-01 9.99859869e-01 2.38085121e-01 -2.87303895e-01 -3.83658975e-01 -2.09629202e+00 -1.05732262e+00 -4.41875130e-01 -1.19196653e+00 7.44683072e-02 -1.60944450e+00 3.68965894e-01 -2.27712199e-01 -1.08507586e+00 1.83381271e+00] [ 1.70765053e-02 1.70765053e-02 1.30334711e+00 1.72719955e-02 9.22375843e-02 3.69767547e-01 3.30672592e-01 1.53551519e-01 -3.38284165e-01 1.64088368e+00 8.59427750e-01 -1.35219419e+00 1.50516748e-01 -1.83341168e-02 1.15178621e+00] [-2.76732653e-01 -2.76732653e-01 -5.85962951e-01 -1.84600401e+00 1.28081584e+00 6.37114227e-01 -1.15084136e+00 1.56292811e-01 3.56955558e-01 -7.89962292e-01 -5.42953193e-01 1.02348256e+00 6.33320138e-02 -5.12090385e-01 -1.28531468e+00] [-2.58722246e-01 -2.58722246e-01 -1.04386759e+00 -8.12266693e-02 -1.05133426e+00 -4.55088824e-01 1.46150672e+00 -8.12214687e-02 6.66138887e-01 -5.48853993e-01 -1.13803101e+00 4.68457937e-01 1.32481337e+00 -5.23850441e-01 -4.13167804e-01] [ 2.87870497e-01 2.87870497e-01 -7.26105645e-02 -2.25785255e-01 1.26498795e+00 2.94547796e-01 5.02706707e-01 -6.44807398e-01 -2.42798049e-02 -1.18587308e-01 5.05252518e-02 -2.37339064e-01 -4.99664307e-01 6.48385435e-02 -1.16173553e+00] [-5.42046130e-01 -5.42046130e-01 -3.17010313e-01 1.50010431e+00 -1.52710593e+00 -6.78590059e-01 -1.21770769e-01 4.03297991e-01 -6.62299395e-01 1.33437085e+00 6.52991235e-01 -1.73209751e+00 1.22772940e-01 1.41860828e-01 -7.10764587e-01] [ 3.61131281e-01 3.61131281e-01 -2.86840582e+00 -8.32575381e-01 3.93167697e-02 -2.14497559e-02 -1.40391409e+00 1.13223732e+00 3.76652420e-01 6.07949615e-01 6.48595870e-01 -6.97976291e-01 -3.48420978e-01 5.97987354e-01 1.82049108e+00] [-1.81227887e+00 -1.81227887e+00 1.32953537e+00 2.66537428e+00 9.40917850e-01 -2.09634566e+00 1.55190313e+00 -1.49255991e+00 -9.07792211e-01 -9.09512997e-01 -7.07280517e-01 -5.20950973e-01 3.45402241e-01 -1.06650603e+00 5.30565321e-01] [-7.40257323e-01 -7.40257323e-01 -3.37198913e-01 6.98469400e-01 5.56178130e-02 -1.48039639e+00 -1.00272846e+00 -8.11246317e-03 -2.14252830e-03 -1.06250435e-01 1.15118432e+00 1.36648506e-01 -8.43003154e-01 -8.92096758e-01 -6.95053995e-01]] [[-1.69232368e-01 -1.69232368e-01 5.50947726e-01 -2.12103948e-01 3.22090417e-01 6.30465865e-01 2.25081027e-01 4.36277717e-01 1.66636944e-01 6.01921082e-01 -7.09678650e-01 2.26386237e+00 -5.02142847e-01 1.68596220e+00 4.74659801e-01] [ 1.26110530e+00 1.26110530e+00 -5.13189197e-01 5.24306893e-01 1.45680988e+00 4.36027348e-01 -5.04858792e-01 4.87893194e-01 -6.46470368e-01 3.65833163e-01 1.65005493e+00 -1.62163234e+00 2.05931887e-01 1.68283030e-01 5.11609614e-01] [ 3.01973164e-01 3.01973164e-01 -6.67988181e-01 -2.26984903e-01 1.67775917e+00 -1.32162869e+00 -1.65747595e+00 -9.81210589e-01 -2.32270503e+00 9.99701738e-01 -3.51871341e-01 -6.35269165e-01 8.22300732e-01 8.55928212e-02 1.02471900e+00] [-2.59602487e-01 -2.59602487e-01 1.94107354e-01 -8.15540254e-01 -4.11585569e-01 3.90159935e-01 4.90965664e-01 1.11396432e+00 2.29970515e-01 -1.04020230e-01 -1.18532844e-01 -3.39584529e-01 -1.02020800e-01 3.63462046e-02 1.22879839e+00] [-1.16588807e+00 -1.16588807e+00 8.04931641e-01 -5.19155681e-01 -3.77498478e-01 2.87719220e-01 -1.58386898e+00 -1.41537726e+00 4.28883374e-01 1.71877235e-01 -4.05139387e-01 -2.34142274e-01 1.37994099e+00 8.87932107e-02 1.97340751e+00] [-1.61549139e+00 -1.61549139e+00 6.98910177e-01 3.95247370e-01 9.98534858e-01 -2.77890921e-01 5.09553075e-01 2.08675727e-01 -4.81842875e-01 4.71514374e-01 -3.85375500e-01 -3.64785492e-01 2.23388135e-01 -2.21145928e-01 -1.96418852e-01] [-9.91838127e-02 -9.91838127e-02 3.46604586e-01 7.67581999e-01 -1.18273664e+00 1.04935706e+00 5.11997521e-01 -1.47289693e+00 8.25906575e-01 -1.95114180e-01 -1.10275543e+00 2.29153827e-01 -6.83164537e-01 9.01331723e-01 9.06898260e-01] [-3.87570441e-01 -3.87570441e-01 -1.69471216e+00 9.54446673e-01 -1.27457058e+00 -6.30191922e-01 -2.63563573e-01 -6.50861561e-01 -1.09317374e+00 3.44041698e-02 -4.19741690e-01 -6.99203730e-01 -3.14581108e+00 -3.89207184e-01 -1.33030176e+00] [-6.78564787e-01 -6.78564787e-01 -4.92266744e-01 -1.11362112e+00 -1.29834935e-01 1.17809200e+00 8.40084255e-01 -5.40674806e-01 2.58864492e-01 8.06644440e-01 3.94640058e-01 -4.30417880e-02 1.69964835e-01 6.40770018e-01 -1.32059395e+00] [ 3.75220865e-01 3.75220865e-01 5.73554933e-01 -7.55645692e-01 -7.47553110e-01 8.53855133e-01 -1.12371814e+00 -1.11456323e+00 -2.41685688e-01 -2.90638149e-01 -1.03732014e+00 -8.19100499e-01 -3.20301962e+00 4.43014711e-01 1.37198687e+00] [-3.23798209e-01 -3.23798209e-01 -9.23423111e-01 1.36457038e+00 1.17140961e+00 -1.09876370e+00 1.50946808e+00 2.60108948e+00 1.68257445e-01 5.59990168e-01 6.79463446e-01 2.66654223e-01 6.20249622e-02 7.66603589e-01 -1.67200220e+00] [-3.25736821e-01 -3.25736821e-01 2.77188867e-01 1.04350102e+00 4.66627032e-01 -1.11400485e+00 -2.48189971e-01 -1.23966956e+00 -2.23265573e-01 -6.24147892e-01 2.18629479e+00 7.96088994e-01 -1.28528583e+00 -6.20582759e-01 6.37751043e-01] [-3.62185866e-01 -3.62185866e-01 -6.44482851e-01 -1.68741846e+00 -6.89650834e-01 -2.91265249e-01 4.41005856e-01 -5.41451991e-01 4.71562475e-01 1.81137204e-01 -6.44340634e-01 2.46725023e-01 -1.92187503e-01 -1.70529091e+00 1.41354012e+00] [-1.28892076e+00 -1.28892076e+00 -8.00857604e-01 7.77622581e-01 7.77060509e-01 -8.95922929e-02 1.12238109e+00 -2.28318140e-01 1.18872844e-01 -4.71152395e-01 3.60368103e-01 -1.72973633e+00 9.39558268e-01 -9.74934697e-01 7.89400041e-02]] [[-1.69232368e-01 -1.69232368e-01 5.50947726e-01 -2.12103948e-01 3.22090417e-01 6.30465865e-01 2.25081027e-01 4.36277717e-01 1.66636944e-01 6.01921082e-01 -7.09678650e-01 2.26386237e+00 -5.02142847e-01 1.68596220e+00 4.74659801e-01] [ 1.26110530e+00 1.26110530e+00 -5.13189197e-01 5.24306893e-01 1.45680988e+00 4.36027348e-01 -5.04858792e-01 4.87893194e-01 -6.46470368e-01 3.65833163e-01 1.65005493e+00 -1.62163234e+00 2.05931887e-01 1.68283030e-01 5.11609614e-01] [ 3.01973164e-01 3.01973164e-01 -6.67988181e-01 -2.26984903e-01 1.67775917e+00 -1.32162869e+00 -1.65747595e+00 -9.81210589e-01 -2.32270503e+00 9.99701738e-01 -3.51871341e-01 -6.35269165e-01 8.22300732e-01 8.55928212e-02 1.02471900e+00] [-2.59602487e-01 -2.59602487e-01 1.94107354e-01 -8.15540254e-01 -4.11585569e-01 3.90159935e-01 4.90965664e-01 1.11396432e+00 2.29970515e-01 -1.04020230e-01 -1.18532844e-01 -3.39584529e-01 -1.02020800e-01 3.63462046e-02 1.22879839e+00] [-1.16588807e+00 -1.16588807e+00 8.04931641e-01 -5.19155681e-01 -3.77498478e-01 2.87719220e-01 -1.58386898e+00 -1.41537726e+00 4.28883374e-01 1.71877235e-01 -4.05139387e-01 -2.34142274e-01 1.37994099e+00 8.87932107e-02 1.97340751e+00] [-1.61549139e+00 -1.61549139e+00 6.98910177e-01 3.95247370e-01 9.98534858e-01 -2.77890921e-01 5.09553075e-01 2.08675727e-01 -4.81842875e-01 4.71514374e-01 -3.85375500e-01 -3.64785492e-01 2.23388135e-01 -2.21145928e-01 -1.96418852e-01] [-9.91838127e-02 -9.91838127e-02 3.46604586e-01 7.67581999e-01 -1.18273664e+00 1.04935706e+00 5.11997521e-01 -1.47289693e+00 8.25906575e-01 -1.95114180e-01 -1.10275543e+00 2.29153827e-01 -6.83164537e-01 9.01331723e-01 9.06898260e-01] [-3.87570441e-01 -3.87570441e-01 -1.69471216e+00 9.54446673e-01 -1.27457058e+00 -6.30191922e-01 -2.63563573e-01 -6.50861561e-01 -1.09317374e+00 3.44041698e-02 -4.19741690e-01 -6.99203730e-01 -3.14581108e+00 -3.89207184e-01 -1.33030176e+00] [-6.78564787e-01 -6.78564787e-01 -4.92266744e-01 -1.11362112e+00 -1.29834935e-01 1.17809200e+00 8.40084255e-01 -5.40674806e-01 2.58864492e-01 8.06644440e-01 3.94640058e-01 -4.30417880e-02 1.69964835e-01 6.40770018e-01 -1.32059395e+00] [ 3.75220865e-01 3.75220865e-01 5.73554933e-01 -7.55645692e-01 -7.47553110e-01 8.53855133e-01 -1.12371814e+00 -1.11456323e+00 -2.41685688e-01 -2.90638149e-01 -1.03732014e+00 -8.19100499e-01 -3.20301962e+00 4.43014711e-01 1.37198687e+00] [-3.23798209e-01 -3.23798209e-01 -9.23423111e-01 1.36457038e+00 1.17140961e+00 -1.09876370e+00 1.50946808e+00 2.60108948e+00 1.68257445e-01 5.59990168e-01 6.79463446e-01 2.66654223e-01 6.20249622e-02 7.66603589e-01 -1.67200220e+00] [-3.25736821e-01 -3.25736821e-01 2.77188867e-01 1.04350102e+00 4.66627032e-01 -1.11400485e+00 -2.48189971e-01 -1.23966956e+00 -2.23265573e-01 -6.24147892e-01 2.18629479e+00 7.96088994e-01 -1.28528583e+00 -6.20582759e-01 6.37751043e-01] [-3.62185866e-01 -3.62185866e-01 -6.44482851e-01 -1.68741846e+00 -6.89650834e-01 -2.91265249e-01 4.41005856e-01 -5.41451991e-01 4.71562475e-01 1.81137204e-01 -6.44340634e-01 2.46725023e-01 -1.92187503e-01 -1.70529091e+00 1.41354012e+00] [-1.28892076e+00 -1.28892076e+00 -8.00857604e-01 7.77622581e-01 7.77060509e-01 -8.95922929e-02 1.12238109e+00 -2.28318140e-01 1.18872844e-01 -4.71152395e-01 3.60368103e-01 -1.72973633e+00 9.39558268e-01 -9.74934697e-01 7.89400041e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5493.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="replicate"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 3.66389960e-01 3.04442048e-01 6.59496903e-01 -2.74981052e-01 3.06667149e-01 4.61497605e-01 1.16879642e+00 1.38208866e-01 -3.03362906e-01 2.01282889e-01 1.54676273e-01 5.05593181e-01 -1.11625183e+00 1.23169087e-01] [ 1.83808535e-01 -3.15759003e-01 8.10418129e-02 1.00575483e+00 -1.05000329e+00 -6.14527285e-01 2.27644712e-01 1.54873526e+00 2.43262202e-01 3.42542410e-01 9.10101175e-01 -7.24414647e-01 -6.44587398e-01 6.54800534e-01] [ 5.22096694e-01 -4.02223706e-01 -8.03955793e-01 -4.60379124e-01 -1.15595222e+00 -1.59298003e+00 -1.88921824e-01 -1.80069834e-01 1.50561953e+00 -7.43077636e-01 8.46103668e-01 -8.51529837e-01 6.29012883e-01 2.77232826e-01] [-1.86511827e+00 -5.81100583e-01 1.09850943e+00 1.27697015e+00 4.97980267e-01 -4.95979190e-02 -8.84653449e-01 -1.67112470e-01 -6.63207829e-01 -9.58279908e-01 4.53759670e-01 4.81827915e-01 -3.19007754e-01 4.80778694e-01] [-1.85119420e-01 6.50018573e-01 4.01976943e-01 -2.72938430e-01 2.80642271e-01 -1.80564976e+00 9.92286682e-01 -5.42335212e-01 -1.76565838e+00 7.46097088e-01 -2.21528653e-02 1.35591912e+00 -1.84579027e+00 -7.13537991e-01] [ 7.50030696e-01 3.35448712e-01 -9.39858332e-02 -2.08206773e+00 8.35610807e-01 7.23332703e-01 -2.67854273e-01 -6.08420134e-01 1.41060781e+00 -9.91586506e-01 -8.27032551e-02 1.64370012e+00 9.76855755e-01 -6.21535540e-01] [ 4.48132336e-01 -2.88595706e-01 -2.19692421e+00 -9.99234378e-01 -6.00893676e-01 -4.41341013e-01 -2.19677711e+00 2.38641486e-01 -1.06008463e-01 6.70828462e-01 2.29566002e+00 7.27540374e-01 -2.26630640e+00 -1.14824630e-01] [-5.78962028e-01 1.72482088e-01 2.68817043e+00 -6.60151124e-01 6.46796674e-02 9.23289835e-01 7.47504771e-01 -1.42989650e-01 -9.07308996e-01 4.96918917e-01 -1.33697271e+00 2.04257756e-01 -5.66144645e-01 9.00796056e-01] [-1.02951765e+00 -6.67399883e-01 -8.94161820e-01 -1.09240973e+00 -3.84434879e-01 -4.62885052e-02 -2.21483573e-01 -6.31784022e-01 -2.51124799e-01 6.18747056e-01 -9.34256375e-01 -1.87351274e+00 -7.67921031e-01 -4.99892265e-01] [-1.14026070e+00 5.08316934e-01 1.17408764e+00 1.02642381e+00 3.27022582e-01 4.67455000e-01 -2.15728775e-01 -1.35844111e+00 2.49346361e-01 2.38364816e+00 1.49382794e+00 3.54467720e-01 1.06020415e+00 1.70846713e+00] [ 6.85113251e-01 5.52351713e-01 -1.98330402e+00 8.86842370e-01 1.02460361e+00 4.72613163e-02 -4.78474438e-01 3.35458040e-01 2.96648890e-01 -6.08667135e-01 -1.99103698e-01 -1.18572287e-01 -5.77104032e-01 2.51709521e-01] [-1.29042193e-01 -1.10533603e-01 2.51455498e+00 7.87486043e-03 -1.98639119e+00 -8.33429396e-01 -3.67953181e-01 -8.45422328e-01 -5.46397746e-01 2.41958022e+00 8.54175389e-01 -6.78350091e-01 2.30592704e+00 -2.37884715e-01] [-4.79527265e-01 -6.85055256e-01 1.93501979e-01 3.78749639e-01 -1.25757623e+00 -2.00805140e+00 -1.61103892e+00 1.73911512e+00 5.30844927e-01 7.07512915e-01 8.15623045e-01 4.12008613e-01 2.69649714e-01 -6.14328861e-01] [-1.19625282e+00 1.12329438e-01 3.78241055e-02 1.29821360e-01 -3.50714982e-01 -8.99472833e-01 6.79736853e-01 2.82673001e-01 6.23803973e-01 -5.45790195e-01 1.06468332e+00 -1.45262766e+00 -4.38290149e-01 -1.34909511e+00]] [[ 6.21571124e-01 -1.30728292e+00 -4.26684350e-01 -1.60116696e+00 3.05923283e-01 -2.72192299e-01 -1.14589989e-01 8.14671278e-01 8.00067961e-01 1.87687588e+00 -1.52595258e+00 -3.98683250e-01 -6.99418306e-01 1.07761073e+00] [ 9.49335545e-02 2.63621271e-01 -1.07798681e-01 2.08798099e+00 -1.79209149e+00 1.01685786e+00 3.33527893e-01 2.24104702e-01 6.65342808e-01 6.36420667e-01 -1.22644462e-01 3.13488424e-01 4.38422352e-01 6.53814256e-01] [-9.27173942e-02 8.49888384e-01 1.30710161e+00 -1.54272187e+00 -1.69105291e+00 -9.25311968e-02 1.47546935e+00 -6.65217042e-02 -1.27316678e+00 -7.37458527e-01 -2.21398282e+00 6.12559497e-01 9.51523602e-01 6.75924361e-01] [-3.86069894e-01 2.22872639e+00 -1.04157865e+00 2.54801583e+00 -4.22372371e-01 1.48756117e-01 -1.51726890e+00 4.79070783e-01 2.48683071e+00 2.23807693e+00 -1.89874041e+00 8.11234713e-02 -2.38925529e+00 2.87946016e-01] [-9.37309504e-01 -5.13382614e-01 -8.66076469e-01 5.78510165e-01 -2.22879693e-01 8.53850842e-01 -2.65875868e-02 -1.35434330e-01 -4.53529477e-01 -8.53473186e-01 1.18748236e+00 -5.97530782e-01 -6.59573615e-01 -9.78151500e-01] [ 1.15410481e-02 2.67998111e-02 -9.98081863e-01 1.24788129e+00 -1.20132041e+00 -2.88409263e-01 1.24296117e+00 1.37621924e-01 5.74122667e-01 6.08084023e-01 -1.79546446e-01 3.75825286e-01 1.26034665e+00 -5.35635054e-01] [-4.08788741e-01 1.96453333e+00 -1.08816540e+00 2.29267597e+00 1.47983134e+00 4.20867831e-01 5.68939567e-01 1.68184876e+00 -4.68713075e-01 9.02676806e-02 6.86964393e-01 -1.05973148e+00 1.36423182e+00 5.92619181e-01] [-1.38227606e+00 1.16684234e+00 -4.86907721e-01 1.16904385e-01 1.82413906e-01 -4.18701351e-01 1.06245005e+00 9.25013423e-01 -8.25087488e-01 -2.80143857e-01 1.98401764e-01 -4.08165604e-02 -1.89591372e+00 -4.55731183e-01] [ 1.53896427e+00 9.75089967e-02 5.63876867e-01 7.04607129e-01 1.25636089e+00 2.50446856e-01 -5.02449274e-01 -1.17210138e+00 3.34213406e-01 1.56844747e+00 1.94038153e+00 -4.50253814e-01 -1.48847020e+00 -2.31754079e-01] [-1.40606880e-01 -8.72317076e-01 -3.44132781e-01 -1.36872172e+00 3.51550281e-01 1.33281600e+00 2.62447357e-01 -4.11615103e-01 -1.86262178e+00 2.82469511e+00 -1.35567200e+00 -1.25862539e+00 6.06329143e-01 1.25952911e+00] [-8.89675692e-02 1.25032330e+00 -2.25929804e-02 -1.61416903e-01 -1.49656427e+00 -4.58281666e-01 1.03622627e+00 -9.09209013e-01 1.06419516e+00 6.27669573e-01 -6.00408196e-01 -1.50187120e-01 -5.24645567e-01 -5.64296663e-01] [-3.32479149e-01 -5.69789588e-01 -3.63684505e-01 6.08096063e-01 1.38755488e+00 -3.02491367e-01 -5.27885914e-01 -1.57143176e-01 -1.15514445e+00 6.34751320e-01 -1.05687857e+00 8.73947024e-01 -1.41225708e+00 7.56015003e-01] [ 1.64908159e+00 3.89938727e-02 -1.50700748e+00 6.99353576e-01 3.30101371e-01 9.90607858e-01 2.01177031e-01 -5.14312804e-01 2.51252145e-01 -1.04832697e+00 -9.09449995e-01 1.25321060e-01 2.56362697e-03 -5.62089272e-02] [-8.92481744e-01 1.38211459e-01 -1.75726044e+00 2.62629986e-01 8.39589357e-01 -2.10451677e-01 -1.06407419e-01 1.07348084e-01 5.72786748e-01 1.99396729e-01 -7.26931989e-01 -2.65057315e-03 -1.93475854e+00 -1.72557867e+00]] [[-1.01670361e+00 9.72085536e-01 -1.61863267e+00 1.90988392e-01 6.66197002e-01 1.04078269e+00 9.27372456e-01 -1.65800035e+00 7.41414070e-01 -1.49015427e+00 -8.47029313e-02 -1.69975966e-01 -2.72993326e-01 1.03117061e+00] [-9.62679923e-01 -5.58086038e-01 3.14355850e-01 7.58967936e-01 5.17558932e-01 -1.34311152e+00 1.11341345e+00 -1.93163490e+00 6.51739001e-01 9.50103581e-01 -3.04786265e-01 -4.15749907e-01 -1.41798162e+00 -3.04232687e-01] [ 2.89149672e-01 2.76577353e-01 -2.84479320e-01 1.38203037e+00 -3.18425298e-01 1.32422727e-02 2.18964314e+00 -2.75317550e+00 1.00928879e+00 1.83140319e-02 -1.67882228e+00 -8.23145986e-01 5.83945930e-01 -1.14139545e+00] [-1.66109240e+00 -5.13782144e-01 1.67849749e-01 -4.63918179e-01 1.25320864e+00 -7.91297257e-01 3.61405849e-01 9.42833841e-01 -1.63680986e-01 1.72123507e-01 5.49101233e-01 -1.44439673e+00 3.68545681e-01 -1.18076289e+00] [-7.38842607e-01 -2.28884473e-01 -2.11103296e+00 4.51702476e-01 2.19418788e+00 3.55515748e-01 -1.10214978e-01 -1.23268557e+00 5.18119812e-01 6.70290053e-01 -7.08857059e-01 -1.08133328e+00 3.02141219e-01 1.68747675e+00] [-4.70842749e-01 -8.10106337e-01 9.59910452e-01 1.35290098e+00 -4.94936764e-01 -5.73688447e-01 -8.69315445e-01 -1.24163099e-01 -2.93002337e-01 -1.43755674e+00 3.55893254e-01 9.64680493e-01 3.95168722e-01 -1.40741539e+00] [-9.89052534e-01 -1.66646302e+00 8.91612470e-01 2.66614348e-01 -1.00017428e+00 -2.25018114e-01 -2.53566235e-01 2.13078886e-01 -8.90347540e-01 1.26974487e+00 2.04841658e-01 1.00859141e+00 -5.98620117e-01 -1.87795073e-01] [-4.49090332e-01 -9.09542859e-01 -3.88763577e-01 9.52376485e-01 -4.83154684e-01 -1.95186259e-03 -1.69183028e+00 7.82500505e-01 -9.33989525e-01 2.07612395e+00 3.37344587e-01 8.19434285e-01 -1.26888788e+00 -1.83668122e-01] [ 9.54106450e-01 -3.63908410e-02 3.53585303e-01 -9.34690237e-01 1.54449284e+00 9.93585885e-01 -8.74292433e-01 -6.09790027e-01 4.70323473e-01 -5.80163538e-01 4.81328070e-01 7.98014164e-01 -6.49012685e-01 -2.13277981e-01] [ 2.93814808e-01 7.37012178e-02 -1.10900629e+00 3.89792547e-02 -4.57337111e-01 8.59941244e-01 1.24928474e+00 -9.68488336e-01 -5.45563757e-01 -2.06972569e-01 1.21012378e+00 -5.49868941e-02 8.81380260e-01 1.82340240e+00] [-1.16933227e+00 -6.05248548e-02 -1.17598641e+00 5.91334626e-02 1.68756866e+00 7.30193704e-02 1.67583990e+00 -1.73944309e-01 1.65048584e-01 -4.61474329e-01 -8.27643454e-01 -2.78831512e-01 1.16951799e+00 -6.76866412e-01] [ 7.86132589e-02 9.00452793e-01 -1.06169596e-01 1.43959686e-01 1.33865654e-01 6.64523482e-01 -1.18590963e+00 4.28325325e-01 8.62918437e-01 3.28288883e-01 -9.74990845e-01 6.59181923e-02 6.49562716e-01 -2.87616730e-01] [ 1.09501719e+00 1.30263138e+00 2.86742032e-01 6.28689229e-01 -1.96534917e-01 -8.96891594e-01 1.21992931e-01 -2.11322024e-01 -9.23774600e-01 -1.40498266e-01 -9.66386348e-02 5.78823924e-01 -1.70307291e+00 -5.16831577e-01] [ 9.78412181e-02 -9.19753611e-01 -5.53380251e-01 -2.36804962e+00 4.50731337e-01 -1.63804996e+00 -3.18275779e-01 3.04706216e-01 -1.32368445e-01 2.94365495e-01 6.41859531e-01 -3.35874796e-01 9.23693836e-01 7.44882345e-01]]]]; ov_res: [[[[ 3.66389960e-01 3.04442048e-01 6.59496903e-01 -2.74981052e-01 3.06667149e-01 4.61497605e-01 1.16879642e+00 1.38208866e-01 -3.03362906e-01 2.01282889e-01 1.54676273e-01 5.05593181e-01 -1.11625183e+00 1.23169087e-01] [ 1.83808535e-01 -3.15759003e-01 8.10418129e-02 1.00575483e+00 -1.05000329e+00 -6.14527285e-01 2.27644712e-01 1.54873526e+00 2.43262202e-01 3.42542410e-01 9.10101175e-01 -7.24414647e-01 -6.44587398e-01 6.54800534e-01] [ 5.22096694e-01 -4.02223706e-01 -8.03955793e-01 -4.60379124e-01 -1.15595222e+00 -1.59298003e+00 -1.88921824e-01 -1.80069834e-01 1.50561953e+00 -7.43077636e-01 8.46103668e-01 -8.51529837e-01 6.29012883e-01 2.77232826e-01] [-1.86511827e+00 -5.81100583e-01 1.09850943e+00 1.27697015e+00 4.97980267e-01 -4.95979190e-02 -8.84653449e-01 -1.67112470e-01 -6.63207829e-01 -9.58279908e-01 4.53759670e-01 4.81827915e-01 -3.19007754e-01 4.80778694e-01] [-1.85119420e-01 6.50018573e-01 4.01976943e-01 -2.72938430e-01 2.80642271e-01 -1.80564976e+00 9.92286682e-01 -5.42335212e-01 -1.76565838e+00 7.46097088e-01 -2.21528653e-02 1.35591912e+00 -1.84579027e+00 -7.13537991e-01] [ 7.50030696e-01 3.35448712e-01 -9.39858332e-02 -2.08206773e+00 8.35610807e-01 7.23332703e-01 -2.67854273e-01 -6.08420134e-01 1.41060781e+00 -9.91586506e-01 -8.27032551e-02 1.64370012e+00 9.76855755e-01 -6.21535540e-01] [ 4.48132336e-01 -2.88595706e-01 -2.19692421e+00 -9.99234378e-01 -6.00893676e-01 -4.41341013e-01 -2.19677711e+00 2.38641486e-01 -1.06008463e-01 6.70828462e-01 2.29566002e+00 7.27540374e-01 -2.26630640e+00 -1.14824630e-01] [-5.78962028e-01 1.72482088e-01 2.68817043e+00 -6.60151124e-01 6.46796674e-02 9.23289835e-01 7.47504771e-01 -1.42989650e-01 -9.07308996e-01 4.96918917e-01 -1.33697271e+00 2.04257756e-01 -5.66144645e-01 9.00796056e-01] [-1.02951765e+00 -6.67399883e-01 -8.94161820e-01 -1.09240973e+00 -3.84434879e-01 -4.62885052e-02 -2.21483573e-01 -6.31784022e-01 -2.51124799e-01 6.18747056e-01 -9.34256375e-01 -1.87351274e+00 -7.67921031e-01 -4.99892265e-01] [-1.14026070e+00 5.08316934e-01 1.17408764e+00 1.02642381e+00 3.27022582e-01 4.67455000e-01 -2.15728775e-01 -1.35844111e+00 2.49346361e-01 2.38364816e+00 1.49382794e+00 3.54467720e-01 1.06020415e+00 1.70846713e+00] [ 6.85113251e-01 5.52351713e-01 -1.98330402e+00 8.86842370e-01 1.02460361e+00 4.72613163e-02 -4.78474438e-01 3.35458040e-01 2.96648890e-01 -6.08667135e-01 -1.99103698e-01 -1.18572287e-01 -5.77104032e-01 2.51709521e-01] [-1.29042193e-01 -1.10533603e-01 2.51455498e+00 7.87486043e-03 -1.98639119e+00 -8.33429396e-01 -3.67953181e-01 -8.45422328e-01 -5.46397746e-01 2.41958022e+00 8.54175389e-01 -6.78350091e-01 2.30592704e+00 -2.37884715e-01] [-4.79527265e-01 -6.85055256e-01 1.93501979e-01 3.78749639e-01 -1.25757623e+00 -2.00805140e+00 -1.61103892e+00 1.73911512e+00 5.30844927e-01 7.07512915e-01 8.15623045e-01 4.12008613e-01 2.69649714e-01 -6.14328861e-01] [-1.19625282e+00 1.12329438e-01 3.78241055e-02 1.29821360e-01 -3.50714982e-01 -8.99472833e-01 6.79736853e-01 2.82673001e-01 6.23803973e-01 -5.45790195e-01 1.06468332e+00 -1.45262766e+00 -4.38290149e-01 -1.34909511e+00]] [[ 6.21571124e-01 -1.30728292e+00 -4.26684350e-01 -1.60116696e+00 3.05923283e-01 -2.72192299e-01 -1.14589989e-01 8.14671278e-01 8.00067961e-01 1.87687588e+00 -1.52595258e+00 -3.98683250e-01 -6.99418306e-01 1.07761073e+00] [ 9.49335545e-02 2.63621271e-01 -1.07798681e-01 2.08798099e+00 -1.79209149e+00 1.01685786e+00 3.33527893e-01 2.24104702e-01 6.65342808e-01 6.36420667e-01 -1.22644462e-01 3.13488424e-01 4.38422352e-01 6.53814256e-01] [-9.27173942e-02 8.49888384e-01 1.30710161e+00 -1.54272187e+00 -1.69105291e+00 -9.25311968e-02 1.47546935e+00 -6.65217042e-02 -1.27316678e+00 -7.37458527e-01 -2.21398282e+00 6.12559497e-01 9.51523602e-01 6.75924361e-01] [-3.86069894e-01 2.22872639e+00 -1.04157865e+00 2.54801583e+00 -4.22372371e-01 1.48756117e-01 -1.51726890e+00 4.79070783e-01 2.48683071e+00 2.23807693e+00 -1.89874041e+00 8.11234713e-02 -2.38925529e+00 2.87946016e-01] [-9.37309504e-01 -5.13382614e-01 -8.66076469e-01 5.78510165e-01 -2.22879693e-01 8.53850842e-01 -2.65875868e-02 -1.35434330e-01 -4.53529477e-01 -8.53473186e-01 1.18748236e+00 -5.97530782e-01 -6.59573615e-01 -9.78151500e-01] [ 1.15410481e-02 2.67998111e-02 -9.98081863e-01 1.24788129e+00 -1.20132041e+00 -2.88409263e-01 1.24296117e+00 1.37621924e-01 5.74122667e-01 6.08084023e-01 -1.79546446e-01 3.75825286e-01 1.26034665e+00 -5.35635054e-01] [-4.08788741e-01 1.96453333e+00 -1.08816540e+00 2.29267597e+00 1.47983134e+00 4.20867831e-01 5.68939567e-01 1.68184876e+00 -4.68713075e-01 9.02676806e-02 6.86964393e-01 -1.05973148e+00 1.36423182e+00 5.92619181e-01] [-1.38227606e+00 1.16684234e+00 -4.86907721e-01 1.16904385e-01 1.82413906e-01 -4.18701351e-01 1.06245005e+00 9.25013423e-01 -8.25087488e-01 -2.80143857e-01 1.98401764e-01 -4.08165604e-02 -1.89591372e+00 -4.55731183e-01] [ 1.53896427e+00 9.75089967e-02 5.63876867e-01 7.04607129e-01 1.25636089e+00 2.50446856e-01 -5.02449274e-01 -1.17210138e+00 3.34213406e-01 1.56844747e+00 1.94038153e+00 -4.50253814e-01 -1.48847020e+00 -2.31754079e-01] [-1.40606880e-01 -8.72317076e-01 -3.44132781e-01 -1.36872172e+00 3.51550281e-01 1.33281600e+00 2.62447357e-01 -4.11615103e-01 -1.86262178e+00 2.82469511e+00 -1.35567200e+00 -1.25862539e+00 6.06329143e-01 1.25952911e+00] [-8.89675692e-02 1.25032330e+00 -2.25929804e-02 -1.61416903e-01 -1.49656427e+00 -4.58281666e-01 1.03622627e+00 -9.09209013e-01 1.06419516e+00 6.27669573e-01 -6.00408196e-01 -1.50187120e-01 -5.24645567e-01 -5.64296663e-01] [-3.32479149e-01 -5.69789588e-01 -3.63684505e-01 6.08096063e-01 1.38755488e+00 -3.02491367e-01 -5.27885914e-01 -1.57143176e-01 -1.15514445e+00 6.34751320e-01 -1.05687857e+00 8.73947024e-01 -1.41225708e+00 7.56015003e-01] [ 1.64908159e+00 3.89938727e-02 -1.50700748e+00 6.99353576e-01 3.30101371e-01 9.90607858e-01 2.01177031e-01 -5.14312804e-01 2.51252145e-01 -1.04832697e+00 -9.09449995e-01 1.25321060e-01 2.56362697e-03 -5.62089272e-02] [-8.92481744e-01 1.38211459e-01 -1.75726044e+00 2.62629986e-01 8.39589357e-01 -2.10451677e-01 -1.06407419e-01 1.07348084e-01 5.72786748e-01 1.99396729e-01 -7.26931989e-01 -2.65057315e-03 -1.93475854e+00 -1.72557867e+00]] [[-1.01670361e+00 9.72085536e-01 -1.61863267e+00 1.90988392e-01 6.66197002e-01 1.04078269e+00 9.27372456e-01 -1.65800035e+00 7.41414070e-01 -1.49015427e+00 -8.47029313e-02 -1.69975966e-01 -2.72993326e-01 1.03117061e+00] [-9.62679923e-01 -5.58086038e-01 3.14355850e-01 7.58967936e-01 5.17558932e-01 -1.34311152e+00 1.11341345e+00 -1.93163490e+00 6.51739001e-01 9.50103581e-01 -3.04786265e-01 -4.15749907e-01 -1.41798162e+00 -3.04232687e-01] [ 2.89149672e-01 2.76577353e-01 -2.84479320e-01 1.38203037e+00 -3.18425298e-01 1.32422727e-02 2.18964314e+00 -2.75317550e+00 1.00928879e+00 1.83140319e-02 -1.67882228e+00 -8.23145986e-01 5.83945930e-01 -1.14139545e+00] [-1.66109240e+00 -5.13782144e-01 1.67849749e-01 -4.63918179e-01 1.25320864e+00 -7.91297257e-01 3.61405849e-01 9.42833841e-01 -1.63680986e-01 1.72123507e-01 5.49101233e-01 -1.44439673e+00 3.68545681e-01 -1.18076289e+00] [-7.38842607e-01 -2.28884473e-01 -2.11103296e+00 4.51702476e-01 2.19418788e+00 3.55515748e-01 -1.10214978e-01 -1.23268557e+00 5.18119812e-01 6.70290053e-01 -7.08857059e-01 -1.08133328e+00 3.02141219e-01 1.68747675e+00] [-4.70842749e-01 -8.10106337e-01 9.59910452e-01 1.35290098e+00 -4.94936764e-01 -5.73688447e-01 -8.69315445e-01 -1.24163099e-01 -2.93002337e-01 -1.43755674e+00 3.55893254e-01 9.64680493e-01 3.95168722e-01 -1.40741539e+00] [-9.89052534e-01 -1.66646302e+00 8.91612470e-01 2.66614348e-01 -1.00017428e+00 -2.25018114e-01 -2.53566235e-01 2.13078886e-01 -8.90347540e-01 1.26974487e+00 2.04841658e-01 1.00859141e+00 -5.98620117e-01 -1.87795073e-01] [-4.49090332e-01 -9.09542859e-01 -3.88763577e-01 9.52376485e-01 -4.83154684e-01 -1.95186259e-03 -1.69183028e+00 7.82500505e-01 -9.33989525e-01 2.07612395e+00 3.37344587e-01 8.19434285e-01 -1.26888788e+00 -1.83668122e-01] [ 9.54106450e-01 -3.63908410e-02 3.53585303e-01 -9.34690237e-01 1.54449284e+00 9.93585885e-01 -8.74292433e-01 -6.09790027e-01 4.70323473e-01 -5.80163538e-01 4.81328070e-01 7.98014164e-01 -6.49012685e-01 -2.13277981e-01] [ 2.93814808e-01 7.37012178e-02 -1.10900629e+00 3.89792547e-02 -4.57337111e-01 8.59941244e-01 1.24928474e+00 -9.68488336e-01 -5.45563757e-01 -2.06972569e-01 1.21012378e+00 -5.49868941e-02 8.81380260e-01 1.82340240e+00] [-1.16933227e+00 -6.05248548e-02 -1.17598641e+00 5.91334626e-02 1.68756866e+00 7.30193704e-02 1.67583990e+00 -1.73944309e-01 1.65048584e-01 -4.61474329e-01 -8.27643454e-01 -2.78831512e-01 1.16951799e+00 -6.76866412e-01] [ 7.86132589e-02 9.00452793e-01 -1.06169596e-01 1.43959686e-01 1.33865654e-01 6.64523482e-01 -1.18590963e+00 4.28325325e-01 8.62918437e-01 3.28288883e-01 -9.74990845e-01 6.59181923e-02 6.49562716e-01 -2.87616730e-01] [ 1.09501719e+00 1.30263138e+00 2.86742032e-01 6.28689229e-01 -1.96534917e-01 -8.96891594e-01 1.21992931e-01 -2.11322024e-01 -9.23774600e-01 -1.40498266e-01 -9.66386348e-02 5.78823924e-01 -1.70307291e+00 -5.16831577e-01] [ 9.78412181e-02 -9.19753611e-01 -5.53380251e-01 -2.36804962e+00 4.50731337e-01 -1.63804996e+00 -3.18275779e-01 3.04706216e-01 -1.32368445e-01 2.94365495e-01 6.41859531e-01 -3.35874796e-01 9.23693836e-01 7.44882345e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5495.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:42.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5497.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : float = prim::Constant[value=42.]() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]]]; ov_res: [[[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:-0.57 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5499.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : float = prim::Constant[value=-0.56999999999999995]() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]]]; ov_res: [[[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5501.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 0.00000000e+00 4.89888459e-01 -1.49401700e+00 2.44544578e+00 -4.54092056e-01 -8.96857917e-01 2.60066748e-01 -2.46348786e+00 2.09728289e+00 1.14846277e+00 9.10861671e-01 8.45503151e-01 -1.70933202e-01 -3.07622373e-01 -1.20616460e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.55823553e+00 2.16635108e+00 -1.65346014e+00 -8.64239395e-01 -9.96767998e-01 -1.00808191e+00 -1.06541264e+00 2.30489343e-01 3.69133204e-01 -2.49639437e-01 -8.67662966e-01 -7.38668740e-01 -1.15652573e+00 8.75263631e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.80547309e-01 -3.91231626e-02 1.35306728e+00 7.04672873e-01 -3.14720869e-01 -1.17840931e-01 1.19680691e+00 2.69632936e-01 1.53996277e+00 -2.97575384e-01 -8.22415709e-01 -1.56678930e-02 -1.16154122e+00 -6.14178538e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.10694098e-01 -7.29878604e-01 4.79254425e-01 -4.30669308e-01 1.93783268e-01 7.95567513e-01 -9.16150630e-01 1.94924998e+00 -1.25073805e-01 8.11114788e-01 -8.23930204e-01 8.41395974e-01 1.43877470e+00 7.55868316e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.62200385e-01 -1.87233400e+00 4.53787625e-01 1.00316751e+00 1.64663732e+00 -1.07024562e+00 2.92432338e-01 -1.05833995e+00 1.52690804e+00 1.16974151e+00 4.06883806e-01 3.90216261e-01 1.16394542e-01 1.55070019e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.84253079e-01 -2.45327687e+00 6.43336833e-01 -9.30749893e-01 2.69229561e-01 1.41158164e+00 -1.80832505e-01 -9.79015410e-01 4.63582128e-01 -6.69572830e-01 -2.85838306e-01 9.77785170e-01 -2.38107634e+00 2.79439658e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.05276537e-01 -1.11365962e+00 6.23449564e-01 5.22044241e-01 5.10660410e-01 1.73469067e-01 1.38883817e+00 -6.77871227e-01 1.11681022e-01 1.63368738e+00 -9.16290998e-01 4.23208028e-01 -1.35673058e+00 -1.00929797e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.31010134e-02 -1.43544388e+00 -1.38544476e+00 2.44009900e+00 -1.98149037e+00 -9.64720249e-01 -2.24388584e-01 7.09511817e-01 9.84048188e-01 6.24904990e-01 -1.39073104e-01 5.35643995e-01 7.53800645e-02 8.67748439e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -7.01391399e-01 -2.75175065e-01 -1.84166670e+00 1.90147889e+00 -6.30283535e-01 5.34460247e-01 -4.38966185e-01 1.02563143e+00 -1.80918729e+00 -9.38383996e-01 -7.80107304e-02 3.15416813e-01 3.97417426e-01 -9.02010858e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.12338352e+00 2.10757279e+00 -6.43864214e-01 -4.63875264e-01 -9.78059590e-01 8.75082254e-01 -9.00361836e-01 1.62306750e+00 1.98731124e-01 -2.27786207e+00 6.61343336e-01 1.29155427e-01 -1.33210135e+00 -6.31490946e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.27905318e-01 1.90543801e-01 -7.39872575e-01 -1.77089047e+00 -2.90803641e-01 -4.29232061e-01 -1.15127003e+00 -9.71090794e-01 5.97548485e-01 7.17094779e-01 -1.28402674e+00 -7.41293907e-01 -4.12520051e-01 -8.82559791e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 8.77539217e-01 8.05516303e-01 3.04383308e-01 -1.25394368e+00 -2.86723138e-03 7.87421405e-01 4.95499462e-01 6.47853494e-01 2.12926134e-01 1.60093650e-01 -1.16027248e+00 1.05996037e+00 2.70116746e-01 -3.00051153e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.02145100e+00 -1.79592133e+00 1.90630627e+00 -3.40205766e-02 6.24325931e-01 -5.36508799e-01 4.10228848e-01 -4.38137501e-01 -3.45057398e-01 6.47863567e-01 3.79038900e-01 -9.41264570e-01 -9.10648227e-01 -6.23597264e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.62892246e-01 6.65591121e-01 -1.59739363e+00 4.79532480e-01 1.64886042e-01 4.00240391e-01 -8.61407697e-01 -1.41995203e+00 -8.94199967e-01 -2.41688299e+00 1.69412637e+00 -1.23178400e-01 -1.30593193e+00 -2.10761523e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 5.91267273e-03 7.80467987e-01 -2.28217706e-01 1.19768989e+00 -1.28559172e+00 -1.41364121e+00 2.09703967e-01 1.18375027e+00 1.23185337e+00 -1.26662776e-01 -1.12294745e+00 -1.63106251e+00 1.39218199e+00 -5.08975327e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.89949691e-01 1.44722319e+00 -8.90938461e-01 7.33991206e-01 2.28973210e-01 -1.66626680e+00 -1.41155469e+00 4.71577466e-01 1.92475581e+00 -2.74974972e-01 -3.17125261e-01 -1.96006656e-01 -2.49039388e+00 6.71337247e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.32786357e+00 -1.05682087e+00 -1.40834916e+00 8.48057628e-01 -4.43975568e-01 9.03514445e-01 -5.17602742e-01 -1.07932293e+00 1.09120095e+00 -1.55732167e+00 -1.83968902e+00 1.42692792e+00 4.41069514e-01 -7.92079091e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.75254297e-01 -2.02928185e+00 1.24421716e+00 -4.50980961e-01 1.44409680e+00 -1.28401434e+00 -8.43001425e-01 -7.70803630e-01 6.39694631e-02 1.58880845e-01 -1.50272161e-01 7.19525874e-01 3.43876690e-01 4.46078479e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.14138460e+00 2.89183766e-01 4.23849732e-01 1.65932262e+00 -1.08999515e+00 -1.44612205e+00 -1.29705513e+00 -9.76344720e-02 8.80647242e-01 8.78436044e-02 6.97128415e-01 7.73306549e-01 2.48612106e-01 -1.03465021e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.77824628e-01 3.23457122e-01 -2.54264385e-01 -6.39713228e-01 2.45286807e-01 6.50690138e-01 -1.76907229e+00 1.54856610e+00 -3.88164252e-01 1.59333348e+00 8.22925210e-01 2.46634051e-01 5.39220154e-01 1.21098757e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -5.16941488e-01 1.47256184e+00 -1.49286449e+00 5.14655650e-01 -1.00469887e+00 1.21348664e-01 -5.31774819e-01 -6.64436162e-01 1.10550058e+00 -2.13884011e-01 1.14949691e+00 -4.97694820e-01 -9.61705565e-01 -1.72763908e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.11422074e-01 2.23660439e-01 -1.71702063e+00 -3.82503390e-01 1.36365205e-01 -1.64975166e-01 1.23062515e+00 7.56814361e-01 3.73626918e-01 2.50435412e-01 -1.43390679e+00 1.01853311e+00 -9.88438904e-01 -4.07702774e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.66199625e+00 -2.79850334e-01 1.41750240e+00 1.65774536e+00 -5.30001521e-01 2.38089848e+00 6.63898528e-01 -1.63879111e-01 3.54732007e-01 8.37352097e-01 4.93119210e-01 1.27680326e+00 -6.03431165e-01 1.62044597e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.95603833e-01 4.50659603e-01 3.00415844e-01 2.20252536e-02 1.24074662e+00 4.45326865e-01 -5.83492517e-01 9.94478703e-01 2.15946734e-01 -4.22853202e-01 -4.89148609e-02 1.59658766e+00 -1.44995525e-01 1.39805520e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.04333836e-01 1.18228459e+00 1.84751534e+00 1.74509566e-02 -6.55816048e-02 -7.35439599e-01 -3.54950935e-01 3.99400055e-01 -1.68051600e-01 -1.61539698e+00 1.66633630e+00 8.95910978e-01 1.62670648e+00 -2.30572894e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.08729434e-01 -1.15825593e+00 -9.35008943e-01 9.23337221e-01 -1.33500338e-01 -8.20165098e-01 -5.58674455e-01 3.16276455e+00 7.07879663e-01 -1.16201425e+00 -9.97616172e-01 7.14102527e-03 -3.05610001e-01 -1.06747456e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.34137255e-01 -3.04550678e-01 -1.24419630e+00 8.84384930e-01 -1.01766276e+00 1.10703158e+00 -4.90237534e-01 1.16465986e+00 9.19038117e-01 3.01652759e-01 -5.34521699e-01 -4.47198264e-02 4.68929172e-01 8.18897784e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.07329583e+00 2.27698907e-01 -7.96763301e-01 -3.23971123e-01 -1.60508946e-01 -1.06078124e+00 -7.51665771e-01 -1.21483719e+00 1.14732051e+00 1.39348507e+00 1.13443363e+00 7.32521296e-01 -9.99651611e-01 1.27017367e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -9.02473807e-01 9.04589415e-01 1.01488960e+00 1.18047905e+00 -1.29493737e+00 5.98063052e-01 2.62482651e-02 -1.71678379e-01 -5.79548717e-01 7.82891829e-03 -1.72890615e+00 1.26595438e+00 4.20242429e-01 -1.04121757e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.84310281e+00 5.04619479e-01 6.32658899e-01 6.32414043e-01 9.93012011e-01 6.43068314e-01 2.32955098e-01 2.14105010e+00 2.38276899e-01 5.72015047e-01 1.03331125e+00 6.01168811e-01 3.51318717e-01 -5.51054716e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.89415449e-01 -7.02224076e-01 4.54940856e-01 -3.80914420e-01 1.57884169e+00 -8.42851400e-01 -3.33518147e-01 -1.51699948e+00 -4.41655010e-01 4.65468913e-01 -3.19689780e-01 1.34322569e-01 -1.53192377e+00 4.68471684e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.21858323e+00 -1.14117838e-01 6.46009862e-01 2.96585206e-02 1.84057325e-01 -8.68615627e-01 3.14829618e-01 -2.62932122e-01 2.71481693e-01 -6.49539530e-01 3.14756542e-01 -1.45180082e+00 2.52574825e+00 -5.88044703e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.92400873e-01 1.21275413e+00 9.62430518e-03 -6.45034075e-01 7.48862743e-01 9.22550559e-01 -4.34905022e-01 6.79903328e-02 8.82537842e-01 -2.26068169e-01 1.11839771e+00 -5.95926680e-02 -1.57466304e+00 -8.19418669e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.99632806e-01 -8.69604409e-01 -7.47091889e-01 -4.42629933e-01 1.51818320e-01 -9.83802020e-04 2.83349883e-02 -2.27568722e+00 -4.03548419e-01 1.21130741e+00 1.33145666e+00 -1.71992564e+00 -4.06654239e-01 -8.96862149e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.64683837e-02 -9.43442285e-01 2.44899678e+00 2.96383232e-01 -6.42664731e-01 -1.19986153e+00 -1.43977553e-01 4.36956286e-02 4.42858636e-01 1.94233894e+00 -2.51239109e+00 -1.84596598e+00 -8.58398974e-01 -4.09244329e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.08897698e-01 -1.48433483e+00 -1.09720159e+00 -4.65062559e-01 -1.14645910e+00 -9.40232515e-01 -2.82462716e-01 7.55703270e-01 1.32825053e+00 -3.30255032e-01 7.43159175e-01 -2.51113915e+00 -3.02159041e-01 8.06962699e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -5.44604301e-01 -3.73563379e-01 1.26336360e+00 6.51365280e-01 -1.04823685e+00 1.70569837e+00 -9.31521654e-01 -3.47674698e-01 7.85419941e-01 -2.38041073e-01 -2.09867954e-01 8.19787458e-02 1.81391573e+00 1.25258243e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.30723619e+00 -1.11780751e+00 9.95713830e-01 4.51185495e-01 9.42672431e-01 -1.70200968e+00 -2.46926355e+00 1.00427353e+00 -9.49132740e-02 2.07138002e-01 -4.23716515e-01 6.17859483e-01 1.44650573e-02 -7.98362195e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.42429435e-01 3.04275870e-01 -1.66804910e+00 -3.31586689e-01 -1.94264281e+00 1.61600202e-01 5.46104312e-01 -1.46806037e+00 2.90114909e-01 1.42487001e+00 7.32493475e-02 9.07652617e-01 -1.78299248e+00 2.73791254e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.53618383e-01 -2.80948937e-01 8.17072988e-01 -2.21144772e+00 1.98002732e+00 -1.02106772e-01 -1.03189456e+00 2.25131059e+00 5.27276933e-01 1.10850358e+00 -5.89605093e-01 7.54604816e-01 1.84061944e+00 8.49943876e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.20140475e-01 -1.33727002e+00 4.86051798e-01 -4.28736061e-01 -1.54220474e+00 -3.83798271e-01 -5.43394089e-01 1.08465314e+00 -1.19380188e+00 1.14532173e+00 1.86112430e-02 -5.84822357e-01 -1.60354090e+00 -3.68558377e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.77371204e-01 -8.89382303e-01 -3.07882816e-01 -1.30260670e+00 1.10091019e+00 -2.99714065e+00 8.03817332e-01 1.04407036e+00 -6.03538454e-01 -5.70548058e-01 2.72217059e+00 4.08010393e-01 7.76778162e-01 -1.40411153e-01 0.00000000e+00 0.00000000e+00]]]]; ov_res: [[[[ 0.00000000e+00 4.89888459e-01 -1.49401700e+00 2.44544578e+00 -4.54092056e-01 -8.96857917e-01 2.60066748e-01 -2.46348786e+00 2.09728289e+00 1.14846277e+00 9.10861671e-01 8.45503151e-01 -1.70933202e-01 -3.07622373e-01 -1.20616460e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.55823553e+00 2.16635108e+00 -1.65346014e+00 -8.64239395e-01 -9.96767998e-01 -1.00808191e+00 -1.06541264e+00 2.30489343e-01 3.69133204e-01 -2.49639437e-01 -8.67662966e-01 -7.38668740e-01 -1.15652573e+00 8.75263631e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.80547309e-01 -3.91231626e-02 1.35306728e+00 7.04672873e-01 -3.14720869e-01 -1.17840931e-01 1.19680691e+00 2.69632936e-01 1.53996277e+00 -2.97575384e-01 -8.22415709e-01 -1.56678930e-02 -1.16154122e+00 -6.14178538e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.10694098e-01 -7.29878604e-01 4.79254425e-01 -4.30669308e-01 1.93783268e-01 7.95567513e-01 -9.16150630e-01 1.94924998e+00 -1.25073805e-01 8.11114788e-01 -8.23930204e-01 8.41395974e-01 1.43877470e+00 7.55868316e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.62200385e-01 -1.87233400e+00 4.53787625e-01 1.00316751e+00 1.64663732e+00 -1.07024562e+00 2.92432338e-01 -1.05833995e+00 1.52690804e+00 1.16974151e+00 4.06883806e-01 3.90216261e-01 1.16394542e-01 1.55070019e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.84253079e-01 -2.45327687e+00 6.43336833e-01 -9.30749893e-01 2.69229561e-01 1.41158164e+00 -1.80832505e-01 -9.79015410e-01 4.63582128e-01 -6.69572830e-01 -2.85838306e-01 9.77785170e-01 -2.38107634e+00 2.79439658e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -9.05276537e-01 -1.11365962e+00 6.23449564e-01 5.22044241e-01 5.10660410e-01 1.73469067e-01 1.38883817e+00 -6.77871227e-01 1.11681022e-01 1.63368738e+00 -9.16290998e-01 4.23208028e-01 -1.35673058e+00 -1.00929797e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.31010134e-02 -1.43544388e+00 -1.38544476e+00 2.44009900e+00 -1.98149037e+00 -9.64720249e-01 -2.24388584e-01 7.09511817e-01 9.84048188e-01 6.24904990e-01 -1.39073104e-01 5.35643995e-01 7.53800645e-02 8.67748439e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -7.01391399e-01 -2.75175065e-01 -1.84166670e+00 1.90147889e+00 -6.30283535e-01 5.34460247e-01 -4.38966185e-01 1.02563143e+00 -1.80918729e+00 -9.38383996e-01 -7.80107304e-02 3.15416813e-01 3.97417426e-01 -9.02010858e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.12338352e+00 2.10757279e+00 -6.43864214e-01 -4.63875264e-01 -9.78059590e-01 8.75082254e-01 -9.00361836e-01 1.62306750e+00 1.98731124e-01 -2.27786207e+00 6.61343336e-01 1.29155427e-01 -1.33210135e+00 -6.31490946e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.27905318e-01 1.90543801e-01 -7.39872575e-01 -1.77089047e+00 -2.90803641e-01 -4.29232061e-01 -1.15127003e+00 -9.71090794e-01 5.97548485e-01 7.17094779e-01 -1.28402674e+00 -7.41293907e-01 -4.12520051e-01 -8.82559791e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 8.77539217e-01 8.05516303e-01 3.04383308e-01 -1.25394368e+00 -2.86723138e-03 7.87421405e-01 4.95499462e-01 6.47853494e-01 2.12926134e-01 1.60093650e-01 -1.16027248e+00 1.05996037e+00 2.70116746e-01 -3.00051153e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.02145100e+00 -1.79592133e+00 1.90630627e+00 -3.40205766e-02 6.24325931e-01 -5.36508799e-01 4.10228848e-01 -4.38137501e-01 -3.45057398e-01 6.47863567e-01 3.79038900e-01 -9.41264570e-01 -9.10648227e-01 -6.23597264e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.62892246e-01 6.65591121e-01 -1.59739363e+00 4.79532480e-01 1.64886042e-01 4.00240391e-01 -8.61407697e-01 -1.41995203e+00 -8.94199967e-01 -2.41688299e+00 1.69412637e+00 -1.23178400e-01 -1.30593193e+00 -2.10761523e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 5.91267273e-03 7.80467987e-01 -2.28217706e-01 1.19768989e+00 -1.28559172e+00 -1.41364121e+00 2.09703967e-01 1.18375027e+00 1.23185337e+00 -1.26662776e-01 -1.12294745e+00 -1.63106251e+00 1.39218199e+00 -5.08975327e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.89949691e-01 1.44722319e+00 -8.90938461e-01 7.33991206e-01 2.28973210e-01 -1.66626680e+00 -1.41155469e+00 4.71577466e-01 1.92475581e+00 -2.74974972e-01 -3.17125261e-01 -1.96006656e-01 -2.49039388e+00 6.71337247e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.32786357e+00 -1.05682087e+00 -1.40834916e+00 8.48057628e-01 -4.43975568e-01 9.03514445e-01 -5.17602742e-01 -1.07932293e+00 1.09120095e+00 -1.55732167e+00 -1.83968902e+00 1.42692792e+00 4.41069514e-01 -7.92079091e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.75254297e-01 -2.02928185e+00 1.24421716e+00 -4.50980961e-01 1.44409680e+00 -1.28401434e+00 -8.43001425e-01 -7.70803630e-01 6.39694631e-02 1.58880845e-01 -1.50272161e-01 7.19525874e-01 3.43876690e-01 4.46078479e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.14138460e+00 2.89183766e-01 4.23849732e-01 1.65932262e+00 -1.08999515e+00 -1.44612205e+00 -1.29705513e+00 -9.76344720e-02 8.80647242e-01 8.78436044e-02 6.97128415e-01 7.73306549e-01 2.48612106e-01 -1.03465021e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.77824628e-01 3.23457122e-01 -2.54264385e-01 -6.39713228e-01 2.45286807e-01 6.50690138e-01 -1.76907229e+00 1.54856610e+00 -3.88164252e-01 1.59333348e+00 8.22925210e-01 2.46634051e-01 5.39220154e-01 1.21098757e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -5.16941488e-01 1.47256184e+00 -1.49286449e+00 5.14655650e-01 -1.00469887e+00 1.21348664e-01 -5.31774819e-01 -6.64436162e-01 1.10550058e+00 -2.13884011e-01 1.14949691e+00 -4.97694820e-01 -9.61705565e-01 -1.72763908e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.11422074e-01 2.23660439e-01 -1.71702063e+00 -3.82503390e-01 1.36365205e-01 -1.64975166e-01 1.23062515e+00 7.56814361e-01 3.73626918e-01 2.50435412e-01 -1.43390679e+00 1.01853311e+00 -9.88438904e-01 -4.07702774e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.66199625e+00 -2.79850334e-01 1.41750240e+00 1.65774536e+00 -5.30001521e-01 2.38089848e+00 6.63898528e-01 -1.63879111e-01 3.54732007e-01 8.37352097e-01 4.93119210e-01 1.27680326e+00 -6.03431165e-01 1.62044597e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.95603833e-01 4.50659603e-01 3.00415844e-01 2.20252536e-02 1.24074662e+00 4.45326865e-01 -5.83492517e-01 9.94478703e-01 2.15946734e-01 -4.22853202e-01 -4.89148609e-02 1.59658766e+00 -1.44995525e-01 1.39805520e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.04333836e-01 1.18228459e+00 1.84751534e+00 1.74509566e-02 -6.55816048e-02 -7.35439599e-01 -3.54950935e-01 3.99400055e-01 -1.68051600e-01 -1.61539698e+00 1.66633630e+00 8.95910978e-01 1.62670648e+00 -2.30572894e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.08729434e-01 -1.15825593e+00 -9.35008943e-01 9.23337221e-01 -1.33500338e-01 -8.20165098e-01 -5.58674455e-01 3.16276455e+00 7.07879663e-01 -1.16201425e+00 -9.97616172e-01 7.14102527e-03 -3.05610001e-01 -1.06747456e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.34137255e-01 -3.04550678e-01 -1.24419630e+00 8.84384930e-01 -1.01766276e+00 1.10703158e+00 -4.90237534e-01 1.16465986e+00 9.19038117e-01 3.01652759e-01 -5.34521699e-01 -4.47198264e-02 4.68929172e-01 8.18897784e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.07329583e+00 2.27698907e-01 -7.96763301e-01 -3.23971123e-01 -1.60508946e-01 -1.06078124e+00 -7.51665771e-01 -1.21483719e+00 1.14732051e+00 1.39348507e+00 1.13443363e+00 7.32521296e-01 -9.99651611e-01 1.27017367e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -9.02473807e-01 9.04589415e-01 1.01488960e+00 1.18047905e+00 -1.29493737e+00 5.98063052e-01 2.62482651e-02 -1.71678379e-01 -5.79548717e-01 7.82891829e-03 -1.72890615e+00 1.26595438e+00 4.20242429e-01 -1.04121757e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.84310281e+00 5.04619479e-01 6.32658899e-01 6.32414043e-01 9.93012011e-01 6.43068314e-01 2.32955098e-01 2.14105010e+00 2.38276899e-01 5.72015047e-01 1.03331125e+00 6.01168811e-01 3.51318717e-01 -5.51054716e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.89415449e-01 -7.02224076e-01 4.54940856e-01 -3.80914420e-01 1.57884169e+00 -8.42851400e-01 -3.33518147e-01 -1.51699948e+00 -4.41655010e-01 4.65468913e-01 -3.19689780e-01 1.34322569e-01 -1.53192377e+00 4.68471684e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.21858323e+00 -1.14117838e-01 6.46009862e-01 2.96585206e-02 1.84057325e-01 -8.68615627e-01 3.14829618e-01 -2.62932122e-01 2.71481693e-01 -6.49539530e-01 3.14756542e-01 -1.45180082e+00 2.52574825e+00 -5.88044703e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.92400873e-01 1.21275413e+00 9.62430518e-03 -6.45034075e-01 7.48862743e-01 9.22550559e-01 -4.34905022e-01 6.79903328e-02 8.82537842e-01 -2.26068169e-01 1.11839771e+00 -5.95926680e-02 -1.57466304e+00 -8.19418669e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.99632806e-01 -8.69604409e-01 -7.47091889e-01 -4.42629933e-01 1.51818320e-01 -9.83802020e-04 2.83349883e-02 -2.27568722e+00 -4.03548419e-01 1.21130741e+00 1.33145666e+00 -1.71992564e+00 -4.06654239e-01 -8.96862149e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.64683837e-02 -9.43442285e-01 2.44899678e+00 2.96383232e-01 -6.42664731e-01 -1.19986153e+00 -1.43977553e-01 4.36956286e-02 4.42858636e-01 1.94233894e+00 -2.51239109e+00 -1.84596598e+00 -8.58398974e-01 -4.09244329e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 9.08897698e-01 -1.48433483e+00 -1.09720159e+00 -4.65062559e-01 -1.14645910e+00 -9.40232515e-01 -2.82462716e-01 7.55703270e-01 1.32825053e+00 -3.30255032e-01 7.43159175e-01 -2.51113915e+00 -3.02159041e-01 8.06962699e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -5.44604301e-01 -3.73563379e-01 1.26336360e+00 6.51365280e-01 -1.04823685e+00 1.70569837e+00 -9.31521654e-01 -3.47674698e-01 7.85419941e-01 -2.38041073e-01 -2.09867954e-01 8.19787458e-02 1.81391573e+00 1.25258243e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.30723619e+00 -1.11780751e+00 9.95713830e-01 4.51185495e-01 9.42672431e-01 -1.70200968e+00 -2.46926355e+00 1.00427353e+00 -9.49132740e-02 2.07138002e-01 -4.23716515e-01 6.17859483e-01 1.44650573e-02 -7.98362195e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.42429435e-01 3.04275870e-01 -1.66804910e+00 -3.31586689e-01 -1.94264281e+00 1.61600202e-01 5.46104312e-01 -1.46806037e+00 2.90114909e-01 1.42487001e+00 7.32493475e-02 9.07652617e-01 -1.78299248e+00 2.73791254e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.53618383e-01 -2.80948937e-01 8.17072988e-01 -2.21144772e+00 1.98002732e+00 -1.02106772e-01 -1.03189456e+00 2.25131059e+00 5.27276933e-01 1.10850358e+00 -5.89605093e-01 7.54604816e-01 1.84061944e+00 8.49943876e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.20140475e-01 -1.33727002e+00 4.86051798e-01 -4.28736061e-01 -1.54220474e+00 -3.83798271e-01 -5.43394089e-01 1.08465314e+00 -1.19380188e+00 1.14532173e+00 1.86112430e-02 -5.84822357e-01 -1.60354090e+00 -3.68558377e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.77371204e-01 -8.89382303e-01 -3.07882816e-01 -1.30260670e+00 1.10091019e+00 -2.99714065e+00 8.03817332e-01 1.04407036e+00 -6.03538454e-01 -5.70548058e-01 2.72217059e+00 4.08010393e-01 7.76778162e-01 -1.40411153e-01 0.00000000e+00 0.00000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5503.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 0. 0.2681688 -2.0864158 -1.5079944 0.5603066 -0.6582971 0.83087593 0.27808794 0.84234715 0.4605277 -1.0101421 0.38001823 3.6569831 0.27042636 0.6971312 ] [ 0. 0.6854182 0.13878486 -0.28487048 1.9545991 -0.4692962 -0.7292422 0.08067049 0.8019348 0.36000964 0.62570584 -1.2334414 0.12901603 0.23758689 -0.02864466] [ 0. 0.21845432 -1.3556356 0.5853109 0.7924248 1.1835178 -0.22693846 1.9906721 -0.95960927 1.2046278 -0.46556827 -1.2073234 -0.7737045 0.74790484 1.48025 ] [ 0. -0.02720316 0.23418573 -0.36853334 -0.17768489 -1.335209 -0.9195721 1.2319279 1.4845985 -0.9771285 -0.60484505 -0.2844431 0.48049417 0.93093044 -0.21863128] [ 0. -0.9893057 -0.09298092 -0.9674527 2.1967986 0.04016247 -0.89350635 0.8269577 0.1776547 -0.6631764 0.9926471 0.73453623 -0.18325388 -1.5409533 -1.792041 ] [ 0. 1.1504337 -0.10811359 0.08843325 -0.11421485 1.5639246 -0.21165729 -0.4987735 0.5896544 0.10326385 0.8431825 1.0535432 0.6879772 0.88017887 0.95732915] [ 0. 1.0616626 -0.23538291 1.5517838 0.6180071 2.726564 -0.45774403 -1.2573543 -0.4653936 1.0776371 0.90412056 0.652619 -0.25142476 -1.1488783 -0.60398203] [ 0. -2.0671277 -0.35470277 -0.15491037 1.0584832 -0.3879281 -0.64413565 -0.06609146 1.0475584 0.5056724 -1.0522909 -0.75420594 -0.8871519 -0.38563353 0.11009352] [ 0. 0.02722269 -0.16407712 0.75302607 -0.22751233 1.4120477 0.78190666 1.0993272 -1.1706746 -0.9810139 -1.3836789 0.8470399 -1.3028706 0.24249342 -0.9999402 ] [ 0. -0.51699674 -0.85140675 -0.07542821 0.70831645 0.5774712 -0.28891096 -0.4700357 0.4243731 -0.77920526 -0.6629186 -0.07623233 0.8533474 0.09785195 -0.20054466] [ 0. 0.28944668 1.7438891 0.15419802 -0.30327627 0.6467274 -1.1313975 -1.5296482 -0.0088079 -0.94336253 0.895014 -0.28170732 1.3605129 0.02384619 1.4542251 ] [ 0. 1.5469245 1.2893528 -1.5662823 0.03016344 0.0320187 -1.5588787 0.19422393 0.15632842 -0.28337568 -1.3561732 -1.164319 0.7703341 -0.24461207 0.656065 ] [ 0. 2.350791 0.98478115 -0.37849107 1.8685058 -0.544081 0.3270849 -0.15501331 0.9658629 -0.7723956 2.0023513 -1.5211393 -0.1636027 0.317666 -1.3044375 ] [ 0. 1.5619183 -0.7484324 1.0999684 -0.9905051 0.6037011 0.74430054 -1.8362428 0.9797166 -0.56297183 -0.1587544 -0.09796321 -0.72033215 -0.73573804 -0.65585345]] [[ 0. -0.74786335 -0.5752026 0.78447783 -0.41426915 0.34111908 2.0479722 1.1782807 0.22581574 0.09942677 -0.73315734 1.457728 0.6137981 1.6543455 0.5208509 ] [ 0. -1.0501112 -0.15199876 0.09726692 -1.3436233 0.662364 0.97864664 -0.7367297 -1.1360149 -1.6689408 -0.09760271 -0.98832047 0.03106857 0.9399902 -0.1709865 ] [ 0. 0.68106663 0.9758076 -0.47114983 0.30536896 0.60936284 -0.16492651 -0.49894974 -1.5727228 -0.5783932 0.18661349 -2.0916367 0.40800473 1.2522589 -0.47074643] [ 0. -0.8405646 0.37266427 -0.5849461 1.478017 0.4049136 0.5870216 -0.78782856 -0.3003468 -0.21523418 0.83864504 0.6673946 0.6367911 0.30150872 0.08137815] [ 0. 1.0121547 0.41252917 1.0334375 -1.7465838 0.78838134 -0.08377871 1.5503973 0.6271581 0.2504139 1.4316274 -0.74416083 -0.6637609 -1.188649 1.2087846 ] [ 0. 1.9489179 -0.0775462 0.95192826 -1.0509391 0.47374517 0.13236363 1.9702811 -0.3944504 -0.5750061 -0.12261299 1.7739868 1.0866107 -1.0479643 -0.5296081 ] [ 0. -0.42148763 0.6397655 -0.37319475 -0.3499873 -1.548777 -0.7440557 -0.3380441 1.1040802 0.92361057 -0.10725069 -0.65979856 1.250226 -0.448129 -1.1888498 ] [ 0. 0.58531487 0.19067292 0.06286708 -0.7904113 0.34894106 -0.62172955 -0.32550552 0.479228 0.66629535 -1.709109 0.6648221 0.6204596 1.0607486 0.34422696] [ 0. -0.17695743 -1.5270165 -0.26372415 -0.56906664 -0.11124353 0.154072 1.2265079 -0.9894464 0.9310982 1.8916898 -0.9689668 -1.1958864 0.5027611 -0.0794358 ] [ 0. -0.37860057 0.5224926 0.99702173 1.380551 -0.54061717 0.6519071 -0.11370388 -1.4588331 0.6227354 0.9045593 0.19302437 -0.0255883 -1.2821271 0.45421284] [ 0. 0.5838735 1.1038617 0.86151886 0.8896699 -0.44535238 1.9187636 -0.12480813 -0.1016189 -3.1313431 -0.4499923 0.128651 0.04349481 1.0431969 -0.29787096] [ 0. -0.22582367 0.15990858 1.2068303 -0.15585533 0.7447838 0.6973592 0.62597394 -0.2587711 0.8692876 1.3497621 -0.40978074 0.97644067 0.36697888 -0.9222965 ] [ 0. 0.5514538 0.0327612 1.417036 -1.9980855 0.08555703 -0.21491523 -1.0567092 0.74767417 -0.28942785 -1.5248805 -0.81538665 -1.9108461 0.2794841 -0.31058112] [ 0. 0.1938246 -0.7332862 1.0128379 -1.5487605 -0.25862065 1.3704208 0.07380289 -0.03527166 0.78880143 -0.75061786 0.4808961 -0.77533215 -1.2685288 -1.797083 ]] [[ 0. -0.4264178 0.26657185 -0.6025924 1.1993114 0.8230856 0.6325385 -1.9049908 0.86753106 -0.9268914 1.7690926 1.7288723 -0.02566173 1.2777745 0.3919997 ] [ 0. 0.6399123 0.29252857 -0.4428081 0.8417309 -0.24515085 -0.2872941 2.5788467 -1.4801204 0.7931841 0.96881074 -0.3413466 -1.4952563 -1.1413448 2.0841014 ] [ 0. -0.6111452 0.7471116 0.22333023 -0.7973106 -0.17371787 1.1483765 0.00460664 0.38799822 0.8006236 0.40789902 0.36957902 -0.28332183 2.1266768 0.58784926] [ 0. -0.9919902 -0.3947772 0.5211881 -0.40132865 -0.93431336 0.4438441 0.28278112 1.0661706 -2.852921 -0.6584899 -0.37189686 1.1984626 -0.15729699 -1.3220352 ] [ 0. 0.08801148 0.28149268 -0.8621402 -0.26473436 -1.4925221 1.0714544 -0.2580411 0.21090376 0.21891887 -2.609881 -0.16506858 0.2155284 -0.60700756 0.15273176] [ 0. 0.17506064 0.05319844 0.6648481 0.5687574 0.7568433 -0.46302855 -0.74954385 0.4342282 0.74849075 -0.43779847 0.2797872 0.44586793 0.29851988 0.14571203] [ 0. 0.5442646 -1.0398034 0.3751528 0.9032874 -1.7347002 0.8309773 0.78518975 -0.3668353 0.48488146 1.4279552 0.5125858 -1.4215028 0.72942847 -0.32601193] [ 0. -0.48392653 -0.28979415 -0.7204723 -0.40437892 1.4885218 -1.1359897 0.7411284 -2.3111053 -1.0730627 -1.9707998 -1.0757043 0.3451482 -1.5850554 0.2549279 ] [ 0. 0.4268266 0.60488075 -0.5682541 0.75525606 0.4870442 -0.4974016 -0.16820478 0.94899577 -0.09546255 -0.11765 -1.4117794 0.92054653 -0.61069894 0.84855133] [ 0. -0.21615528 0.00412421 -0.82176876 0.10886092 0.69131225 -0.49165484 2.3211484 -0.63722634 -0.17584814 -1.8289018 0.3758914 0.13941559 -0.89650124 1.0032903 ] [ 0. 0.04412008 -0.230525 -0.67114425 0.01931887 -1.8166274 -0.8531235 -0.924525 -0.7660868 -0.09350353 0.54705656 1.1347072 0.27708313 -0.71803534 0.20895351] [ 0. -1.3626231 -0.6341901 0.9518484 1.6334739 -0.6077719 -0.14529349 -1.1530817 0.77574414 -0.9089948 -1.5156014 0.28601107 0.06078949 1.990262 -1.6213417 ] [ 0. -0.21467151 0.8116412 0.4499767 0.97115153 -0.14842914 0.7590392 0.6590174 0.19766553 -0.5971469 0.41562518 -0.7278762 -1.0634499 -0.35252917 0.9377429 ] [ 0. 3.0346632 1.0787499 2.0036075 -0.3433968 -0.0392866 0.0979479 0.11629106 0.88566667 0.09674838 0.01896703 -0.7139513 -0.3769667 0.9546918 0.7712543 ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]]; ov_res: [[[[ 0. 0.2681688 -2.0864158 -1.5079944 0.5603066 -0.6582971 0.83087593 0.27808794 0.84234715 0.4605277 -1.0101421 0.38001823 3.6569831 0.27042636 0.6971312 ] [ 0. 0.6854182 0.13878486 -0.28487048 1.9545991 -0.4692962 -0.7292422 0.08067049 0.8019348 0.36000964 0.62570584 -1.2334414 0.12901603 0.23758689 -0.02864466] [ 0. 0.21845432 -1.3556356 0.5853109 0.7924248 1.1835178 -0.22693846 1.9906721 -0.95960927 1.2046278 -0.46556827 -1.2073234 -0.7737045 0.74790484 1.48025 ] [ 0. -0.02720316 0.23418573 -0.36853334 -0.17768489 -1.335209 -0.9195721 1.2319279 1.4845985 -0.9771285 -0.60484505 -0.2844431 0.48049417 0.93093044 -0.21863128] [ 0. -0.9893057 -0.09298092 -0.9674527 2.1967986 0.04016247 -0.89350635 0.8269577 0.1776547 -0.6631764 0.9926471 0.73453623 -0.18325388 -1.5409533 -1.792041 ] [ 0. 1.1504337 -0.10811359 0.08843325 -0.11421485 1.5639246 -0.21165729 -0.4987735 0.5896544 0.10326385 0.8431825 1.0535432 0.6879772 0.88017887 0.95732915] [ 0. 1.0616626 -0.23538291 1.5517838 0.6180071 2.726564 -0.45774403 -1.2573543 -0.4653936 1.0776371 0.90412056 0.652619 -0.25142476 -1.1488783 -0.60398203] [ 0. -2.0671277 -0.35470277 -0.15491037 1.0584832 -0.3879281 -0.64413565 -0.06609146 1.0475584 0.5056724 -1.0522909 -0.75420594 -0.8871519 -0.38563353 0.11009352] [ 0. 0.02722269 -0.16407712 0.75302607 -0.22751233 1.4120477 0.78190666 1.0993272 -1.1706746 -0.9810139 -1.3836789 0.8470399 -1.3028706 0.24249342 -0.9999402 ] [ 0. -0.51699674 -0.85140675 -0.07542821 0.70831645 0.5774712 -0.28891096 -0.4700357 0.4243731 -0.77920526 -0.6629186 -0.07623233 0.8533474 0.09785195 -0.20054466] [ 0. 0.28944668 1.7438891 0.15419802 -0.30327627 0.6467274 -1.1313975 -1.5296482 -0.0088079 -0.94336253 0.895014 -0.28170732 1.3605129 0.02384619 1.4542251 ] [ 0. 1.5469245 1.2893528 -1.5662823 0.03016344 0.0320187 -1.5588787 0.19422393 0.15632842 -0.28337568 -1.3561732 -1.164319 0.7703341 -0.24461207 0.656065 ] [ 0. 2.350791 0.98478115 -0.37849107 1.8685058 -0.544081 0.3270849 -0.15501331 0.9658629 -0.7723956 2.0023513 -1.5211393 -0.1636027 0.317666 -1.3044375 ] [ 0. 1.5619183 -0.7484324 1.0999684 -0.9905051 0.6037011 0.74430054 -1.8362428 0.9797166 -0.56297183 -0.1587544 -0.09796321 -0.72033215 -0.73573804 -0.65585345]] [[ 0. -0.74786335 -0.5752026 0.78447783 -0.41426915 0.34111908 2.0479722 1.1782807 0.22581574 0.09942677 -0.73315734 1.457728 0.6137981 1.6543455 0.5208509 ] [ 0. -1.0501112 -0.15199876 0.09726692 -1.3436233 0.662364 0.97864664 -0.7367297 -1.1360149 -1.6689408 -0.09760271 -0.98832047 0.03106857 0.9399902 -0.1709865 ] [ 0. 0.68106663 0.9758076 -0.47114983 0.30536896 0.60936284 -0.16492651 -0.49894974 -1.5727228 -0.5783932 0.18661349 -2.0916367 0.40800473 1.2522589 -0.47074643] [ 0. -0.8405646 0.37266427 -0.5849461 1.478017 0.4049136 0.5870216 -0.78782856 -0.3003468 -0.21523418 0.83864504 0.6673946 0.6367911 0.30150872 0.08137815] [ 0. 1.0121547 0.41252917 1.0334375 -1.7465838 0.78838134 -0.08377871 1.5503973 0.6271581 0.2504139 1.4316274 -0.74416083 -0.6637609 -1.188649 1.2087846 ] [ 0. 1.9489179 -0.0775462 0.95192826 -1.0509391 0.47374517 0.13236363 1.9702811 -0.3944504 -0.5750061 -0.12261299 1.7739868 1.0866107 -1.0479643 -0.5296081 ] [ 0. -0.42148763 0.6397655 -0.37319475 -0.3499873 -1.548777 -0.7440557 -0.3380441 1.1040802 0.92361057 -0.10725069 -0.65979856 1.250226 -0.448129 -1.1888498 ] [ 0. 0.58531487 0.19067292 0.06286708 -0.7904113 0.34894106 -0.62172955 -0.32550552 0.479228 0.66629535 -1.709109 0.6648221 0.6204596 1.0607486 0.34422696] [ 0. -0.17695743 -1.5270165 -0.26372415 -0.56906664 -0.11124353 0.154072 1.2265079 -0.9894464 0.9310982 1.8916898 -0.9689668 -1.1958864 0.5027611 -0.0794358 ] [ 0. -0.37860057 0.5224926 0.99702173 1.380551 -0.54061717 0.6519071 -0.11370388 -1.4588331 0.6227354 0.9045593 0.19302437 -0.0255883 -1.2821271 0.45421284] [ 0. 0.5838735 1.1038617 0.86151886 0.8896699 -0.44535238 1.9187636 -0.12480813 -0.1016189 -3.1313431 -0.4499923 0.128651 0.04349481 1.0431969 -0.29787096] [ 0. -0.22582367 0.15990858 1.2068303 -0.15585533 0.7447838 0.6973592 0.62597394 -0.2587711 0.8692876 1.3497621 -0.40978074 0.97644067 0.36697888 -0.9222965 ] [ 0. 0.5514538 0.0327612 1.417036 -1.9980855 0.08555703 -0.21491523 -1.0567092 0.74767417 -0.28942785 -1.5248805 -0.81538665 -1.9108461 0.2794841 -0.31058112] [ 0. 0.1938246 -0.7332862 1.0128379 -1.5487605 -0.25862065 1.3704208 0.07380289 -0.03527166 0.78880143 -0.75061786 0.4808961 -0.77533215 -1.2685288 -1.797083 ]] [[ 0. -0.4264178 0.26657185 -0.6025924 1.1993114 0.8230856 0.6325385 -1.9049908 0.86753106 -0.9268914 1.7690926 1.7288723 -0.02566173 1.2777745 0.3919997 ] [ 0. 0.6399123 0.29252857 -0.4428081 0.8417309 -0.24515085 -0.2872941 2.5788467 -1.4801204 0.7931841 0.96881074 -0.3413466 -1.4952563 -1.1413448 2.0841014 ] [ 0. -0.6111452 0.7471116 0.22333023 -0.7973106 -0.17371787 1.1483765 0.00460664 0.38799822 0.8006236 0.40789902 0.36957902 -0.28332183 2.1266768 0.58784926] [ 0. -0.9919902 -0.3947772 0.5211881 -0.40132865 -0.93431336 0.4438441 0.28278112 1.0661706 -2.852921 -0.6584899 -0.37189686 1.1984626 -0.15729699 -1.3220352 ] [ 0. 0.08801148 0.28149268 -0.8621402 -0.26473436 -1.4925221 1.0714544 -0.2580411 0.21090376 0.21891887 -2.609881 -0.16506858 0.2155284 -0.60700756 0.15273176] [ 0. 0.17506064 0.05319844 0.6648481 0.5687574 0.7568433 -0.46302855 -0.74954385 0.4342282 0.74849075 -0.43779847 0.2797872 0.44586793 0.29851988 0.14571203] [ 0. 0.5442646 -1.0398034 0.3751528 0.9032874 -1.7347002 0.8309773 0.78518975 -0.3668353 0.48488146 1.4279552 0.5125858 -1.4215028 0.72942847 -0.32601193] [ 0. -0.48392653 -0.28979415 -0.7204723 -0.40437892 1.4885218 -1.1359897 0.7411284 -2.3111053 -1.0730627 -1.9707998 -1.0757043 0.3451482 -1.5850554 0.2549279 ] [ 0. 0.4268266 0.60488075 -0.5682541 0.75525606 0.4870442 -0.4974016 -0.16820478 0.94899577 -0.09546255 -0.11765 -1.4117794 0.92054653 -0.61069894 0.84855133] [ 0. -0.21615528 0.00412421 -0.82176876 0.10886092 0.69131225 -0.49165484 2.3211484 -0.63722634 -0.17584814 -1.8289018 0.3758914 0.13941559 -0.89650124 1.0032903 ] [ 0. 0.04412008 -0.230525 -0.67114425 0.01931887 -1.8166274 -0.8531235 -0.924525 -0.7660868 -0.09350353 0.54705656 1.1347072 0.27708313 -0.71803534 0.20895351] [ 0. -1.3626231 -0.6341901 0.9518484 1.6334739 -0.6077719 -0.14529349 -1.1530817 0.77574414 -0.9089948 -1.5156014 0.28601107 0.06078949 1.990262 -1.6213417 ] [ 0. -0.21467151 0.8116412 0.4499767 0.97115153 -0.14842914 0.7590392 0.6590174 0.19766553 -0.5971469 0.41562518 -0.7278762 -1.0634499 -0.35252917 0.9377429 ] [ 0. 3.0346632 1.0787499 2.0036075 -0.3433968 -0.0392866 0.0979479 0.11629106 0.88566667 0.09674838 0.01896703 -0.7139513 -0.3769667 0.9546918 0.7712543 ]] [[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ] [ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5505.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[-2.12596670e-01 3.26207846e-01 6.48725703e-02 9.33299065e-01 2.18284106e+00 8.54737818e-01 9.41645503e-01 2.43733693e-02 5.77178776e-01 -8.69983137e-01 8.05537164e-01 -4.48620290e-01 3.32677871e-01 5.34006596e-01] [-1.05654812e+00 -2.59164870e-01 -4.84505922e-01 9.23203170e-01 -4.75087643e-01 1.41803658e+00 2.68104911e-01 -1.03751206e+00 4.56641316e-01 -1.38156223e+00 2.78223872e-01 5.91355860e-01 8.45177412e-01 -4.89252666e-03] [ 1.76166201e+00 2.97959328e-01 1.88361967e+00 -1.19531691e+00 -5.35414219e-01 -1.17766631e+00 1.78578389e+00 2.27835074e-01 -8.69018078e-01 7.98105061e-01 -1.70170128e+00 7.81964421e-01 -1.47162008e+00 2.26243973e-01] [-7.45061815e-01 -4.03469771e-01 1.83047056e+00 -3.50428551e-01 -1.49098039e+00 5.25264323e-01 6.70586288e-01 -9.16205227e-01 1.37512550e-01 -1.59483528e+00 -3.78453955e-02 -3.33128333e-01 -8.20357054e-02 -1.16111100e+00] [-1.48394632e+00 -1.62316814e-01 -6.99731827e-01 -2.91338176e-01 -9.98800159e-01 -5.16960621e-01 8.01830530e-01 7.52222061e-01 -1.11652148e+00 -7.73182750e-01 -3.48630518e-01 -4.76694614e-01 -5.77073216e-01 4.61333513e-01] [-8.09675362e-03 6.52916789e-01 1.45731187e+00 -7.29538441e-01 1.08953810e+00 2.06426885e-02 1.05410206e+00 5.68225026e-01 1.04787970e+00 8.82413805e-01 -4.65176672e-01 4.14363772e-01 1.19150972e+00 -1.85819244e+00] [-1.44933736e+00 -1.13995385e+00 -1.54176199e+00 -1.47135460e+00 9.63614821e-01 -3.44447017e-01 8.02480578e-01 1.53677797e+00 -1.84288692e+00 8.28075111e-01 1.04607272e+00 6.63510203e-01 -1.68756932e-01 7.62334168e-01] [ 1.86072075e+00 -7.63725713e-02 -9.53556120e-01 -1.81270325e+00 1.76228702e+00 -2.38559699e+00 -1.51398122e+00 2.49155864e-01 2.24591303e+00 -6.58771098e-01 -2.65524507e+00 2.52535552e-01 -5.17326713e-01 9.01462615e-01] [-2.05862701e-01 1.25784910e+00 -3.97591352e-01 1.11025703e+00 5.79244912e-01 -9.32497308e-02 6.96706772e-01 -1.57887840e+00 1.24510384e+00 1.36045861e+00 -4.53342289e-01 -1.87237307e-01 -3.84501576e-01 7.69082725e-01] [-1.05609429e+00 -8.57789755e-01 5.40152311e-01 1.46952271e-01 1.01144290e+00 4.92313057e-01 8.41783285e-02 1.36437178e+00 -8.79432261e-01 -7.45688438e-01 -5.31634927e-01 -7.88994074e-01 6.14715159e-01 -6.31217957e-01] [ 4.65961695e-01 -2.75597453e-01 9.52432752e-01 -9.08218145e-01 -1.42434752e+00 8.91921639e-01 1.22350091e-02 1.93674195e+00 -4.03977960e-01 -1.94734752e-01 2.06703639e+00 1.40374291e+00 -9.83123362e-01 7.04047918e-01] [-1.16175199e+00 -1.15088141e+00 -3.76354098e-01 9.98970389e-01 6.66161358e-01 -1.06403434e+00 2.21145630e-01 2.02431127e-01 9.70177829e-01 5.79903066e-01 6.39905632e-01 -8.01171899e-01 -2.35493469e+00 -3.14033866e-01] [ 1.03410745e+00 6.45389333e-02 5.27704060e-01 -1.53841957e-01 -1.20436139e-01 2.89265901e-01 -3.52039278e-01 -5.08282006e-01 -8.09489667e-01 -1.31232178e+00 1.24198627e+00 -5.62894084e-02 9.41075861e-01 -1.23808764e-01] [-4.77683246e-01 1.57676864e+00 -4.40803260e-01 -1.78621382e-01 9.00411546e-01 1.74195513e-01 3.05304192e-02 -7.81131566e-01 -3.73915136e-01 -1.20100759e-01 -1.22830796e+00 4.55087453e-01 1.58561873e+00 -9.05239880e-01]] [[-1.55010998e+00 -7.44999796e-02 -2.62496531e-01 -9.71382797e-01 5.73468089e-01 -1.46491483e-01 3.91816884e-01 -3.20131630e-01 1.09289348e+00 -3.63215357e-02 2.32015159e-02 1.36518085e+00 3.89026910e-01 -1.50754178e+00] [ 1.94587171e-01 6.75983203e-04 -1.37717497e+00 5.73258519e-01 9.14350033e-01 -1.18030870e+00 1.42409766e+00 -3.16761225e-01 -1.68386257e+00 -6.39599144e-01 1.31148887e+00 5.90072870e-01 8.29672754e-01 -1.09841800e+00] [-1.25284517e+00 5.69902718e-01 -5.88833869e-01 6.22560918e-01 -4.37699437e-01 6.12448394e-01 -3.56075615e-01 -1.48139274e+00 9.46891129e-01 -2.98296392e-01 4.87886399e-01 6.52861735e-03 -1.58241880e+00 -6.84939265e-01] [-8.47692609e-01 -2.11096972e-01 -2.07947516e+00 1.57387704e-01 1.21682368e-01 -1.81154981e-01 2.66890168e-01 -5.37272513e-01 -9.11143959e-01 -5.27459562e-01 -1.12065196e+00 -2.50193886e-02 4.71028775e-01 -2.21358865e-01] [-3.71740945e-02 2.33064845e-01 3.01441047e-02 4.75290447e-01 8.58843684e-01 -3.68777394e-01 -1.02266932e+00 -6.45396233e-01 -7.16819707e-03 -1.40446472e+00 1.28442144e+00 -5.08253038e-01 -6.62830353e-01 -7.54325092e-01] [ 8.81220758e-01 4.11885232e-01 -6.43553972e-01 1.71933460e+00 4.61637139e-01 1.30915797e+00 8.05611849e-01 2.31854752e-01 6.54659033e-01 1.18687952e+00 -1.50354600e+00 -2.65009493e-01 -8.47687244e-01 -4.98178035e-01] [-1.46586370e+00 -1.58429846e-01 2.42381081e-01 9.74811494e-01 -1.23173344e+00 2.25847542e-01 5.18340692e-02 2.14852557e-01 1.19876528e+00 3.28307390e-01 -1.36018288e+00 2.88530827e-01 -1.11665416e+00 -1.50820839e+00] [ 1.99917877e+00 1.16042304e+00 -5.01740456e-01 -4.22702819e-01 1.06471753e+00 -1.24278677e+00 1.92585856e-01 4.87687290e-01 -1.53372213e-01 7.53899992e-01 3.01597625e-01 3.43547463e-01 5.81197858e-01 -1.88116074e+00] [ 8.18544984e-01 -1.37041056e+00 -8.09439003e-01 -9.57380891e-01 -4.74440813e-01 -4.40769374e-01 1.98932958e+00 -1.02313928e-01 -1.38439488e+00 -5.31598389e-01 -7.18507990e-02 -1.07084170e-01 7.79906392e-01 -6.38470948e-01] [-5.63920975e-01 -8.54285061e-01 -5.05210042e-01 2.57283628e-01 -6.22508407e-01 5.44536114e-01 6.67510808e-01 5.27256906e-01 2.30794862e-01 2.25850433e-01 7.16198832e-02 2.26753011e-01 9.00824368e-01 1.57408631e+00] [-3.39380652e-01 1.18969810e+00 7.88716674e-01 5.41197836e-01 1.07868469e+00 1.41897428e+00 1.55178770e-01 2.98198313e-01 -7.33188391e-02 1.33063328e+00 -3.86659741e-01 4.77597952e-01 -2.53007507e+00 4.06810194e-01] [-5.84520698e-01 1.44982627e-02 1.15313673e+00 1.55429995e+00 -1.92225173e-01 -2.05804482e-01 3.03001493e-01 -1.03797317e+00 1.44625843e+00 9.93073165e-01 -1.12961888e+00 9.52344358e-01 1.60160577e+00 1.38857186e+00] [ 7.18469739e-01 -8.68730471e-02 -1.52852309e+00 1.97013900e-01 1.71421930e-01 -2.70574391e-01 5.12267232e-01 1.33223903e+00 -5.10407805e-01 -1.36308396e+00 -1.11735845e+00 6.79814100e-01 -1.36078441e+00 9.10019219e-01] [ 1.71870422e+00 1.16929471e+00 -5.27685881e-01 -2.66788781e-01 9.23928916e-01 1.82692397e+00 6.44217074e-01 1.95895100e+00 -6.74480677e-01 -8.27207923e-01 8.89325440e-01 5.52744210e-01 -3.31164569e-01 -4.24526423e-01]] [[ 1.05402195e+00 -3.97824526e-01 1.54896784e+00 -1.20016778e+00 -1.80552912e+00 3.68212044e-01 -1.35274518e+00 2.95134187e-02 -3.68292034e-02 2.19342589e-01 1.43517077e-01 -3.34017754e-01 -9.58720028e-01 3.70201826e-01] [-4.84284401e-01 -5.78566313e-01 2.09838796e+00 6.96304977e-01 6.33477643e-02 8.85202050e-01 2.11396623e+00 7.54720211e-01 -2.34350562e-01 6.71832204e-01 1.18850991e-01 -5.59067607e-01 2.34302372e-01 -8.11954975e-01] [ 8.83488506e-02 4.27198619e-01 -3.81869853e-01 -8.21760371e-02 1.02731287e+00 2.29866225e-02 6.25261664e-01 2.50524044e-01 -1.25016618e+00 1.03563416e+00 -2.03405380e-01 1.12857664e+00 -6.12906396e-01 -8.20708275e-01] [ 2.09028006e+00 -1.16767430e+00 8.94640505e-01 4.14825827e-01 4.63711649e-01 -1.29014745e-01 1.06313097e+00 1.22431636e+00 -3.15622449e+00 -2.98717022e-01 -1.19783930e-01 -9.06145155e-01 7.17999637e-01 -1.23032769e-02] [ 9.61648464e-01 -2.01340094e-01 -3.36709730e-02 -1.78548491e+00 1.10224950e+00 9.09631133e-01 1.16973782e+00 -2.23484588e+00 3.13589883e+00 -1.05205023e+00 -4.11232501e-01 4.83627111e-01 7.35164881e-01 -3.88200700e-01] [ 1.55106628e+00 -3.04784596e-01 -1.17508554e+00 -6.58779740e-01 1.04302406e+00 -1.10459521e-01 6.00516140e-01 1.40020680e+00 -5.27853608e-01 -1.97732699e+00 -1.42613792e+00 2.33197045e+00 6.40745521e-01 7.02328503e-01] [ 5.84364772e-01 9.81090367e-01 9.80693698e-01 -1.14686084e+00 -1.65259898e-01 -5.24063230e-01 1.98270544e-01 -1.23529303e+00 -1.26927555e+00 1.04210041e-01 -7.13664889e-01 -2.30484828e-01 -6.00701213e-01 -3.92730653e-01] [-9.08275366e-01 1.47741127e+00 -1.94450033e+00 -9.63775396e-01 -6.83212951e-02 1.04865301e-02 -8.67307127e-01 -1.31344628e+00 9.10370708e-01 -1.34100711e+00 -1.10114372e+00 1.75522506e+00 4.19047564e-01 2.47434139e-01] [ 1.17525494e+00 1.82714331e+00 6.59609497e-01 1.94210351e-01 1.41382301e+00 6.96225226e-01 5.35192601e-02 5.94695926e-01 2.17015243e+00 7.46517718e-01 1.39198303e+00 1.10237062e+00 3.19233358e-01 -1.64252162e+00] [-6.34719580e-02 -1.35278761e+00 -7.98065245e-01 -4.52280194e-01 2.19242290e-01 -1.62663734e+00 -5.26787996e-01 -3.95923071e-02 1.55713809e+00 5.60481310e-01 -7.39467800e-01 8.09302509e-01 -7.75770426e-01 -2.21683955e+00] [ 2.50755250e-01 5.08418567e-02 -1.25584090e+00 1.90895751e-01 1.12696993e+00 5.56898952e-01 -2.95204312e-01 1.45311803e-01 -6.41054809e-01 -3.10465366e-01 -4.87016797e-01 -2.13380837e+00 -8.74060094e-01 -1.01809621e+00] [-4.80024666e-02 -1.18059194e+00 6.11937582e-01 9.73330498e-01 -1.19136302e-02 -8.40465352e-02 1.30076611e+00 6.79315746e-01 1.47643828e+00 5.72273731e-01 1.36874342e+00 1.55495834e+00 -4.50123101e-01 -1.90594599e-01] [ 7.65726864e-01 -4.42233443e-01 1.35805833e+00 -8.42890203e-01 5.77536523e-01 4.45708960e-01 -1.92623362e-01 -1.13036680e+00 2.61501580e-01 1.88763595e+00 -9.21907485e-01 1.22605228e+00 -3.78129810e-01 1.29678702e+00] [ 1.28166127e+00 2.12603733e-01 -5.05679697e-02 -8.76933277e-01 -6.09093249e-01 -8.54233086e-01 4.98180278e-02 7.99702667e-03 -6.94191575e-01 -3.98348927e-01 4.32763398e-01 2.89808452e-01 -7.42359042e-01 -1.32224038e-01]]]]; ov_res: [[[[-2.12596670e-01 3.26207846e-01 6.48725703e-02 9.33299065e-01 2.18284106e+00 8.54737818e-01 9.41645503e-01 2.43733693e-02 5.77178776e-01 -8.69983137e-01 8.05537164e-01 -4.48620290e-01 3.32677871e-01 5.34006596e-01] [-1.05654812e+00 -2.59164870e-01 -4.84505922e-01 9.23203170e-01 -4.75087643e-01 1.41803658e+00 2.68104911e-01 -1.03751206e+00 4.56641316e-01 -1.38156223e+00 2.78223872e-01 5.91355860e-01 8.45177412e-01 -4.89252666e-03] [ 1.76166201e+00 2.97959328e-01 1.88361967e+00 -1.19531691e+00 -5.35414219e-01 -1.17766631e+00 1.78578389e+00 2.27835074e-01 -8.69018078e-01 7.98105061e-01 -1.70170128e+00 7.81964421e-01 -1.47162008e+00 2.26243973e-01] [-7.45061815e-01 -4.03469771e-01 1.83047056e+00 -3.50428551e-01 -1.49098039e+00 5.25264323e-01 6.70586288e-01 -9.16205227e-01 1.37512550e-01 -1.59483528e+00 -3.78453955e-02 -3.33128333e-01 -8.20357054e-02 -1.16111100e+00] [-1.48394632e+00 -1.62316814e-01 -6.99731827e-01 -2.91338176e-01 -9.98800159e-01 -5.16960621e-01 8.01830530e-01 7.52222061e-01 -1.11652148e+00 -7.73182750e-01 -3.48630518e-01 -4.76694614e-01 -5.77073216e-01 4.61333513e-01] [-8.09675362e-03 6.52916789e-01 1.45731187e+00 -7.29538441e-01 1.08953810e+00 2.06426885e-02 1.05410206e+00 5.68225026e-01 1.04787970e+00 8.82413805e-01 -4.65176672e-01 4.14363772e-01 1.19150972e+00 -1.85819244e+00] [-1.44933736e+00 -1.13995385e+00 -1.54176199e+00 -1.47135460e+00 9.63614821e-01 -3.44447017e-01 8.02480578e-01 1.53677797e+00 -1.84288692e+00 8.28075111e-01 1.04607272e+00 6.63510203e-01 -1.68756932e-01 7.62334168e-01] [ 1.86072075e+00 -7.63725713e-02 -9.53556120e-01 -1.81270325e+00 1.76228702e+00 -2.38559699e+00 -1.51398122e+00 2.49155864e-01 2.24591303e+00 -6.58771098e-01 -2.65524507e+00 2.52535552e-01 -5.17326713e-01 9.01462615e-01] [-2.05862701e-01 1.25784910e+00 -3.97591352e-01 1.11025703e+00 5.79244912e-01 -9.32497308e-02 6.96706772e-01 -1.57887840e+00 1.24510384e+00 1.36045861e+00 -4.53342289e-01 -1.87237307e-01 -3.84501576e-01 7.69082725e-01] [-1.05609429e+00 -8.57789755e-01 5.40152311e-01 1.46952271e-01 1.01144290e+00 4.92313057e-01 8.41783285e-02 1.36437178e+00 -8.79432261e-01 -7.45688438e-01 -5.31634927e-01 -7.88994074e-01 6.14715159e-01 -6.31217957e-01] [ 4.65961695e-01 -2.75597453e-01 9.52432752e-01 -9.08218145e-01 -1.42434752e+00 8.91921639e-01 1.22350091e-02 1.93674195e+00 -4.03977960e-01 -1.94734752e-01 2.06703639e+00 1.40374291e+00 -9.83123362e-01 7.04047918e-01] [-1.16175199e+00 -1.15088141e+00 -3.76354098e-01 9.98970389e-01 6.66161358e-01 -1.06403434e+00 2.21145630e-01 2.02431127e-01 9.70177829e-01 5.79903066e-01 6.39905632e-01 -8.01171899e-01 -2.35493469e+00 -3.14033866e-01] [ 1.03410745e+00 6.45389333e-02 5.27704060e-01 -1.53841957e-01 -1.20436139e-01 2.89265901e-01 -3.52039278e-01 -5.08282006e-01 -8.09489667e-01 -1.31232178e+00 1.24198627e+00 -5.62894084e-02 9.41075861e-01 -1.23808764e-01] [-4.77683246e-01 1.57676864e+00 -4.40803260e-01 -1.78621382e-01 9.00411546e-01 1.74195513e-01 3.05304192e-02 -7.81131566e-01 -3.73915136e-01 -1.20100759e-01 -1.22830796e+00 4.55087453e-01 1.58561873e+00 -9.05239880e-01]] [[-1.55010998e+00 -7.44999796e-02 -2.62496531e-01 -9.71382797e-01 5.73468089e-01 -1.46491483e-01 3.91816884e-01 -3.20131630e-01 1.09289348e+00 -3.63215357e-02 2.32015159e-02 1.36518085e+00 3.89026910e-01 -1.50754178e+00] [ 1.94587171e-01 6.75983203e-04 -1.37717497e+00 5.73258519e-01 9.14350033e-01 -1.18030870e+00 1.42409766e+00 -3.16761225e-01 -1.68386257e+00 -6.39599144e-01 1.31148887e+00 5.90072870e-01 8.29672754e-01 -1.09841800e+00] [-1.25284517e+00 5.69902718e-01 -5.88833869e-01 6.22560918e-01 -4.37699437e-01 6.12448394e-01 -3.56075615e-01 -1.48139274e+00 9.46891129e-01 -2.98296392e-01 4.87886399e-01 6.52861735e-03 -1.58241880e+00 -6.84939265e-01] [-8.47692609e-01 -2.11096972e-01 -2.07947516e+00 1.57387704e-01 1.21682368e-01 -1.81154981e-01 2.66890168e-01 -5.37272513e-01 -9.11143959e-01 -5.27459562e-01 -1.12065196e+00 -2.50193886e-02 4.71028775e-01 -2.21358865e-01] [-3.71740945e-02 2.33064845e-01 3.01441047e-02 4.75290447e-01 8.58843684e-01 -3.68777394e-01 -1.02266932e+00 -6.45396233e-01 -7.16819707e-03 -1.40446472e+00 1.28442144e+00 -5.08253038e-01 -6.62830353e-01 -7.54325092e-01] [ 8.81220758e-01 4.11885232e-01 -6.43553972e-01 1.71933460e+00 4.61637139e-01 1.30915797e+00 8.05611849e-01 2.31854752e-01 6.54659033e-01 1.18687952e+00 -1.50354600e+00 -2.65009493e-01 -8.47687244e-01 -4.98178035e-01] [-1.46586370e+00 -1.58429846e-01 2.42381081e-01 9.74811494e-01 -1.23173344e+00 2.25847542e-01 5.18340692e-02 2.14852557e-01 1.19876528e+00 3.28307390e-01 -1.36018288e+00 2.88530827e-01 -1.11665416e+00 -1.50820839e+00] [ 1.99917877e+00 1.16042304e+00 -5.01740456e-01 -4.22702819e-01 1.06471753e+00 -1.24278677e+00 1.92585856e-01 4.87687290e-01 -1.53372213e-01 7.53899992e-01 3.01597625e-01 3.43547463e-01 5.81197858e-01 -1.88116074e+00] [ 8.18544984e-01 -1.37041056e+00 -8.09439003e-01 -9.57380891e-01 -4.74440813e-01 -4.40769374e-01 1.98932958e+00 -1.02313928e-01 -1.38439488e+00 -5.31598389e-01 -7.18507990e-02 -1.07084170e-01 7.79906392e-01 -6.38470948e-01] [-5.63920975e-01 -8.54285061e-01 -5.05210042e-01 2.57283628e-01 -6.22508407e-01 5.44536114e-01 6.67510808e-01 5.27256906e-01 2.30794862e-01 2.25850433e-01 7.16198832e-02 2.26753011e-01 9.00824368e-01 1.57408631e+00] [-3.39380652e-01 1.18969810e+00 7.88716674e-01 5.41197836e-01 1.07868469e+00 1.41897428e+00 1.55178770e-01 2.98198313e-01 -7.33188391e-02 1.33063328e+00 -3.86659741e-01 4.77597952e-01 -2.53007507e+00 4.06810194e-01] [-5.84520698e-01 1.44982627e-02 1.15313673e+00 1.55429995e+00 -1.92225173e-01 -2.05804482e-01 3.03001493e-01 -1.03797317e+00 1.44625843e+00 9.93073165e-01 -1.12961888e+00 9.52344358e-01 1.60160577e+00 1.38857186e+00] [ 7.18469739e-01 -8.68730471e-02 -1.52852309e+00 1.97013900e-01 1.71421930e-01 -2.70574391e-01 5.12267232e-01 1.33223903e+00 -5.10407805e-01 -1.36308396e+00 -1.11735845e+00 6.79814100e-01 -1.36078441e+00 9.10019219e-01] [ 1.71870422e+00 1.16929471e+00 -5.27685881e-01 -2.66788781e-01 9.23928916e-01 1.82692397e+00 6.44217074e-01 1.95895100e+00 -6.74480677e-01 -8.27207923e-01 8.89325440e-01 5.52744210e-01 -3.31164569e-01 -4.24526423e-01]] [[ 1.05402195e+00 -3.97824526e-01 1.54896784e+00 -1.20016778e+00 -1.80552912e+00 3.68212044e-01 -1.35274518e+00 2.95134187e-02 -3.68292034e-02 2.19342589e-01 1.43517077e-01 -3.34017754e-01 -9.58720028e-01 3.70201826e-01] [-4.84284401e-01 -5.78566313e-01 2.09838796e+00 6.96304977e-01 6.33477643e-02 8.85202050e-01 2.11396623e+00 7.54720211e-01 -2.34350562e-01 6.71832204e-01 1.18850991e-01 -5.59067607e-01 2.34302372e-01 -8.11954975e-01] [ 8.83488506e-02 4.27198619e-01 -3.81869853e-01 -8.21760371e-02 1.02731287e+00 2.29866225e-02 6.25261664e-01 2.50524044e-01 -1.25016618e+00 1.03563416e+00 -2.03405380e-01 1.12857664e+00 -6.12906396e-01 -8.20708275e-01] [ 2.09028006e+00 -1.16767430e+00 8.94640505e-01 4.14825827e-01 4.63711649e-01 -1.29014745e-01 1.06313097e+00 1.22431636e+00 -3.15622449e+00 -2.98717022e-01 -1.19783930e-01 -9.06145155e-01 7.17999637e-01 -1.23032769e-02] [ 9.61648464e-01 -2.01340094e-01 -3.36709730e-02 -1.78548491e+00 1.10224950e+00 9.09631133e-01 1.16973782e+00 -2.23484588e+00 3.13589883e+00 -1.05205023e+00 -4.11232501e-01 4.83627111e-01 7.35164881e-01 -3.88200700e-01] [ 1.55106628e+00 -3.04784596e-01 -1.17508554e+00 -6.58779740e-01 1.04302406e+00 -1.10459521e-01 6.00516140e-01 1.40020680e+00 -5.27853608e-01 -1.97732699e+00 -1.42613792e+00 2.33197045e+00 6.40745521e-01 7.02328503e-01] [ 5.84364772e-01 9.81090367e-01 9.80693698e-01 -1.14686084e+00 -1.65259898e-01 -5.24063230e-01 1.98270544e-01 -1.23529303e+00 -1.26927555e+00 1.04210041e-01 -7.13664889e-01 -2.30484828e-01 -6.00701213e-01 -3.92730653e-01] [-9.08275366e-01 1.47741127e+00 -1.94450033e+00 -9.63775396e-01 -6.83212951e-02 1.04865301e-02 -8.67307127e-01 -1.31344628e+00 9.10370708e-01 -1.34100711e+00 -1.10114372e+00 1.75522506e+00 4.19047564e-01 2.47434139e-01] [ 1.17525494e+00 1.82714331e+00 6.59609497e-01 1.94210351e-01 1.41382301e+00 6.96225226e-01 5.35192601e-02 5.94695926e-01 2.17015243e+00 7.46517718e-01 1.39198303e+00 1.10237062e+00 3.19233358e-01 -1.64252162e+00] [-6.34719580e-02 -1.35278761e+00 -7.98065245e-01 -4.52280194e-01 2.19242290e-01 -1.62663734e+00 -5.26787996e-01 -3.95923071e-02 1.55713809e+00 5.60481310e-01 -7.39467800e-01 8.09302509e-01 -7.75770426e-01 -2.21683955e+00] [ 2.50755250e-01 5.08418567e-02 -1.25584090e+00 1.90895751e-01 1.12696993e+00 5.56898952e-01 -2.95204312e-01 1.45311803e-01 -6.41054809e-01 -3.10465366e-01 -4.87016797e-01 -2.13380837e+00 -8.74060094e-01 -1.01809621e+00] [-4.80024666e-02 -1.18059194e+00 6.11937582e-01 9.73330498e-01 -1.19136302e-02 -8.40465352e-02 1.30076611e+00 6.79315746e-01 1.47643828e+00 5.72273731e-01 1.36874342e+00 1.55495834e+00 -4.50123101e-01 -1.90594599e-01] [ 7.65726864e-01 -4.42233443e-01 1.35805833e+00 -8.42890203e-01 5.77536523e-01 4.45708960e-01 -1.92623362e-01 -1.13036680e+00 2.61501580e-01 1.88763595e+00 -9.21907485e-01 1.22605228e+00 -3.78129810e-01 1.29678702e+00] [ 1.28166127e+00 2.12603733e-01 -5.05679697e-02 -8.76933277e-01 -6.09093249e-01 -8.54233086e-01 4.98180278e-02 7.99702667e-03 -6.94191575e-01 -3.98348927e-01 4.32763398e-01 2.89808452e-01 -7.42359042e-01 -1.32224038e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1, 1, 2) - mode:constant - value:0.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5507.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1, 1, 2]]() %self.value : float = prim::Constant[value=0.]() %self.mode : str = prim::Constant[value="constant"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 -2.4606110e-01 2.9957077e-01 ... 8.9258027e-01 8.3581269e-02 9.7610319e-01] [ 0.0000000e+00 -2.0526106e+00 -3.3197737e-01 ... 1.2073824e+00 2.2797821e-01 4.0871730e-01] [ 0.0000000e+00 -1.6452593e+00 -8.5930490e-01 ... 1.2793629e-01 1.8788176e+00 1.3883166e+00] ... [ 0.0000000e+00 4.9177462e-01 1.0408791e+00 ... -7.9453373e-01 -1.3485655e+00 9.6606815e-01] [ 0.0000000e+00 5.5092752e-01 -3.4421015e-01 ... 1.6908674e+00 8.1519651e-01 9.5061082e-01] [ 0.0000000e+00 -4.6238129e-04 3.9480367e-01 ... -8.4018558e-01 4.3006942e-01 -1.0600933e+00]] [[ 0.0000000e+00 6.4181471e-01 4.8007518e-01 ... 1.3327233e-01 -4.7107485e-01 7.0092535e-01] [ 0.0000000e+00 1.4835463e+00 -7.9783618e-01 ... -1.5586082e+00 6.0074246e-01 -9.9614248e-02] [ 0.0000000e+00 -6.4910722e-01 -6.2203389e-01 ... 2.0780680e-01 6.2719959e-01 -4.8540249e-01] ... [ 0.0000000e+00 4.2721044e-02 -1.7021714e-01 ... -4.7399905e-01 -1.2240992e-01 -5.5280894e-01] [ 0.0000000e+00 8.9115465e-01 2.8583345e-01 ... -1.4294492e-01 1.4435453e+00 -2.0853068e-01] [ 0.0000000e+00 -3.4408316e-01 1.7080543e+00 ... 3.1197286e-01 -1.2391326e+00 1.0399725e+00]] [[ 0.0000000e+00 -4.1386077e-01 5.6390464e-01 ... -3.4529665e-01 2.4570200e+00 -1.2325243e+00] [ 0.0000000e+00 -1.1235503e+00 -1.3007325e-01 ... 4.0766549e-01 -5.3219569e-01 3.1468552e-01] [ 0.0000000e+00 7.7490084e-02 2.1136892e+00 ... 4.1432205e-01 2.6939332e-01 1.9173047e-01] ... [ 0.0000000e+00 1.1352855e+00 -1.3131259e-02 ... 2.1600406e-01 1.0218056e+00 2.4047668e+00] [ 0.0000000e+00 9.0930110e-01 2.1282762e-01 ... -3.4004912e-01 -2.1469810e+00 -3.4974661e-01] [ 0.0000000e+00 -2.9397967e-01 9.8786658e-01 ... -6.6966832e-01 -2.9887817e+00 2.1972188e-01]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]; ov_res: [[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 -2.4606110e-01 2.9957077e-01 ... 8.9258027e-01 8.3581269e-02 9.7610319e-01] [ 0.0000000e+00 -2.0526106e+00 -3.3197737e-01 ... 1.2073824e+00 2.2797821e-01 4.0871730e-01] [ 0.0000000e+00 -1.6452593e+00 -8.5930490e-01 ... 1.2793629e-01 1.8788176e+00 1.3883166e+00] ... [ 0.0000000e+00 4.9177462e-01 1.0408791e+00 ... -7.9453373e-01 -1.3485655e+00 9.6606815e-01] [ 0.0000000e+00 5.5092752e-01 -3.4421015e-01 ... 1.6908674e+00 8.1519651e-01 9.5061082e-01] [ 0.0000000e+00 -4.6238129e-04 3.9480367e-01 ... -8.4018558e-01 4.3006942e-01 -1.0600933e+00]] [[ 0.0000000e+00 6.4181471e-01 4.8007518e-01 ... 1.3327233e-01 -4.7107485e-01 7.0092535e-01] [ 0.0000000e+00 1.4835463e+00 -7.9783618e-01 ... -1.5586082e+00 6.0074246e-01 -9.9614248e-02] [ 0.0000000e+00 -6.4910722e-01 -6.2203389e-01 ... 2.0780680e-01 6.2719959e-01 -4.8540249e-01] ... [ 0.0000000e+00 4.2721044e-02 -1.7021714e-01 ... -4.7399905e-01 -1.2240992e-01 -5.5280894e-01] [ 0.0000000e+00 8.9115465e-01 2.8583345e-01 ... -1.4294492e-01 1.4435453e+00 -2.0853068e-01] [ 0.0000000e+00 -3.4408316e-01 1.7080543e+00 ... 3.1197286e-01 -1.2391326e+00 1.0399725e+00]] [[ 0.0000000e+00 -4.1386077e-01 5.6390464e-01 ... -3.4529665e-01 2.4570200e+00 -1.2325243e+00] [ 0.0000000e+00 -1.1235503e+00 -1.3007325e-01 ... 4.0766549e-01 -5.3219569e-01 3.1468552e-01] [ 0.0000000e+00 7.7490084e-02 2.1136892e+00 ... 4.1432205e-01 2.6939332e-01 1.9173047e-01] ... [ 0.0000000e+00 1.1352855e+00 -1.3131259e-02 ... 2.1600406e-01 1.0218056e+00 2.4047668e+00] [ 0.0000000e+00 9.0930110e-01 2.1282762e-01 ... -3.4004912e-01 -2.1469810e+00 -3.4974661e-01] [ 0.0000000e+00 -2.9397967e-01 9.8786658e-01 ... -6.6966832e-01 -2.9887817e+00 2.1972188e-01]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 0, 0) - mode:circular - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5509.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 3.58778298e-01 1.06465131e-01 1.26758850e+00 1.41758180e+00 5.59242725e-01 2.27567211e-01 -2.05621219e+00 -7.76028454e-01 -7.10386515e-01 8.09882939e-01 2.10594726e+00 -1.41730353e-01 -1.47965407e+00 8.19435656e-01 3.58778298e-01 1.06465131e-01 1.26758850e+00] [-4.30690914e-01 4.59170014e-01 -1.61179125e-01 8.33408460e-02 1.92406669e-01 -1.80836946e-01 -6.29349768e-01 -1.97202611e+00 -1.93644953e+00 4.65014935e-01 -8.90552878e-01 -1.81330585e+00 4.51260805e-01 3.84217620e-01 -4.30690914e-01 4.59170014e-01 -1.61179125e-01] [-3.61029834e-01 7.47457266e-01 1.06863379e+00 -1.00407064e+00 -6.60303354e-01 -5.43209791e-01 -2.84293979e-01 2.43663645e+00 1.56584418e+00 -2.13533378e+00 4.28341627e-02 1.42546773e+00 -1.35543704e+00 -1.56520534e+00 -3.61029834e-01 7.47457266e-01 1.06863379e+00] [ 7.89828300e-01 -4.66064900e-01 6.73340857e-01 5.59733927e-01 4.20073211e-01 -3.34459603e-01 6.27168596e-01 -1.66461408e+00 8.49651814e-01 -2.15898603e-01 -1.27507401e+00 -9.19703208e-03 -7.25752294e-01 -1.70802510e+00 7.89828300e-01 -4.66064900e-01 6.73340857e-01] [-1.94531322e+00 7.08883643e-01 1.26355693e-01 -6.75627232e-01 9.96714890e-01 8.09278071e-01 1.00100458e+00 -5.98102987e-01 -9.48424935e-01 4.14355487e-01 2.91303873e-01 1.73028147e+00 -1.20595086e+00 -1.55765986e+00 -1.94531322e+00 7.08883643e-01 1.26355693e-01] [-3.83092254e-01 4.62589338e-02 1.23444152e+00 -9.99678612e-01 -1.29471052e+00 1.35929453e+00 6.88320398e-01 1.08713484e+00 7.53535986e-01 9.11371708e-01 8.27397406e-01 -1.39902079e+00 8.48163188e-01 8.07169974e-02 -3.83092254e-01 4.62589338e-02 1.23444152e+00] [-1.57884014e+00 -6.17775731e-02 9.77229416e-01 3.08387399e-01 1.80067599e-01 4.54209775e-01 -1.99438560e+00 -1.45159113e+00 -1.39061606e+00 -3.60637814e-01 2.41402864e+00 1.65152550e+00 1.49546897e+00 6.57010674e-02 -1.57884014e+00 -6.17775731e-02 9.77229416e-01] [ 4.56212908e-01 -2.31809586e-01 3.29876125e-01 9.47052836e-01 1.54944527e+00 2.20865822e+00 -3.90570968e-01 8.37386608e-01 1.38504654e-01 -2.14570236e+00 1.20308304e+00 -7.37108409e-01 -2.32507199e-01 6.62967801e-01 4.56212908e-01 -2.31809586e-01 3.29876125e-01] [ 1.89441586e+00 -7.73058057e-01 1.90183985e+00 4.93773282e-01 2.61560380e-01 1.57526386e+00 6.26632750e-01 -3.82519007e-01 2.24235971e-02 1.09149170e+00 1.83958864e+00 9.30818081e-01 4.42335382e-02 1.97214559e-01 1.89441586e+00 -7.73058057e-01 1.90183985e+00] [ 1.91331789e-01 7.18062222e-01 -1.47946000e+00 -9.35544074e-01 4.75607246e-01 1.35699403e+00 -4.69378620e-01 1.17003247e-01 -7.00175107e-01 -3.73630337e-02 3.88151854e-01 -4.09235507e-01 -4.68394399e-01 1.50154936e+00 1.91331789e-01 7.18062222e-01 -1.47946000e+00] [-1.95796752e+00 -2.17580780e-01 -1.41114235e+00 -9.38788235e-01 -1.08307146e-01 -2.22256735e-01 -1.55872077e-01 -7.52958477e-01 1.11310029e+00 3.19655240e-01 -1.70114005e+00 3.64944339e-02 1.09272182e+00 1.34929240e-01 -1.95796752e+00 -2.17580780e-01 -1.41114235e+00] [ 6.68982565e-01 -2.95057744e-01 1.94027126e+00 1.34224534e-01 -1.76496601e+00 -1.82699129e-01 1.51442456e+00 -2.15026259e-01 -5.58720887e-01 2.16598296e+00 1.03620839e+00 1.75288826e-01 -6.23505771e-01 9.96723711e-01 6.68982565e-01 -2.95057744e-01 1.94027126e+00] [ 1.11481857e+00 1.05810791e-01 7.70550907e-01 -6.22583687e-01 2.33763620e-01 -2.43055677e+00 3.11627835e-01 1.75725949e+00 4.46929365e-01 -4.97883648e-01 4.39157844e-01 -6.15307331e-01 -3.10086161e-01 1.55958220e-01 1.11481857e+00 1.05810791e-01 7.70550907e-01] [-5.42861104e-01 2.38428358e-02 -7.92192340e-01 -1.43860012e-01 -1.52911320e-01 1.84805763e+00 8.56421649e-01 -1.89236909e-01 9.85304594e-01 -2.71681875e-01 9.65274498e-03 1.13334846e+00 -9.15921152e-01 3.97765577e-01 -5.42861104e-01 2.38428358e-02 -7.92192340e-01]] [[-8.32665265e-01 -1.04152477e+00 9.15636778e-01 2.16566220e-01 1.00690103e+00 1.72505271e+00 1.44498587e-01 7.02280700e-01 -1.55195284e+00 2.36171675e+00 -8.43054652e-02 -5.45895696e-01 -4.72414136e-01 -3.85706306e-01 -8.32665265e-01 -1.04152477e+00 9.15636778e-01] [ 3.57533008e-01 2.79269934e+00 -1.82549387e-01 -3.43464129e-02 1.38683283e+00 -1.49332654e+00 3.12424213e-01 1.26009977e+00 -2.12761354e+00 2.02394676e+00 3.09298873e-01 -6.44759953e-01 9.12788212e-01 1.81272253e-01 3.57533008e-01 2.79269934e+00 -1.82549387e-01] [ 6.93117678e-02 -4.19461757e-01 -4.98069935e-02 -9.58259642e-01 -8.82345319e-01 -2.24607420e+00 1.04293644e+00 -6.23462558e-01 1.44502664e+00 -1.08338153e+00 1.66961741e+00 5.24336338e-01 1.90605307e+00 2.06462550e+00 6.93117678e-02 -4.19461757e-01 -4.98069935e-02] [ 9.64652419e-01 -1.17807090e-01 -9.06715512e-01 1.85695505e+00 4.51053113e-01 -5.11363924e-01 -8.64563882e-01 3.39142621e-01 7.39631355e-01 -2.50275898e+00 -8.48900899e-03 2.55631119e-01 1.24344444e+00 1.16444528e+00 9.64652419e-01 -1.17807090e-01 -9.06715512e-01] [ 4.63968426e-01 -1.50099480e+00 1.12555039e+00 2.29765847e-01 -9.86615792e-02 6.14835382e-01 -3.85210127e-01 -1.83501053e+00 2.62524664e-01 1.02855945e+00 -4.83282536e-01 -1.20722383e-01 2.30315067e-02 4.58410025e-01 4.63968426e-01 -1.50099480e+00 1.12555039e+00] [-1.59643471e+00 3.04301709e-01 -1.75447392e+00 1.09476662e+00 -8.49455833e-01 5.97074628e-01 8.75951707e-01 -2.57533640e-01 -2.87970513e-01 -8.78678858e-01 -1.53265685e-01 4.90771115e-01 1.45718837e+00 -2.15391383e-01 -1.59643471e+00 3.04301709e-01 -1.75447392e+00] [-7.32444406e-01 1.02022338e+00 1.02095090e-01 3.08657914e-01 -1.84886372e+00 1.20948963e-01 8.71928811e-01 1.07424414e+00 -2.32669520e+00 -3.57781023e-01 1.44329917e+00 -1.01433396e+00 1.54229969e-01 -1.25918365e+00 -7.32444406e-01 1.02022338e+00 1.02095090e-01] [ 1.02590942e+00 7.52255082e-01 -2.64759034e-01 4.95354265e-01 3.22712213e-01 -2.75636822e-01 -5.34415543e-01 1.79099545e-01 6.23582065e-01 -9.78090286e-01 -1.48041499e+00 1.05486274e+00 6.32284760e-01 9.96239960e-01 1.02590942e+00 7.52255082e-01 -2.64759034e-01] [ 1.22871876e+00 -1.15288544e+00 1.12624788e+00 -1.66452929e-01 -1.12315333e+00 -9.24865603e-01 -1.63933706e+00 1.37582171e+00 -2.56167579e+00 1.06934023e+00 -9.23630118e-01 -2.08898807e+00 1.04533541e+00 7.17065990e-01 1.22871876e+00 -1.15288544e+00 1.12624788e+00] [-5.77979267e-01 8.53357553e-01 9.44107354e-01 5.86353898e-01 -4.35048252e-01 8.84462819e-02 1.31733954e-01 -2.26795316e+00 -9.28731337e-02 -2.56907821e-01 -1.59438872e+00 6.52280599e-02 9.84098613e-01 -1.76633447e-01 -5.77979267e-01 8.53357553e-01 9.44107354e-01] [-2.87389737e-02 -7.08457083e-02 -5.59022546e-01 5.65682888e-01 -1.46955979e+00 6.29836917e-01 2.04867482e+00 2.83408117e+00 -8.05304796e-02 5.10361716e-02 -7.81214237e-01 1.53125596e+00 7.32531428e-01 -1.20978427e+00 -2.87389737e-02 -7.08457083e-02 -5.59022546e-01] [-1.04174912e+00 -4.99248832e-01 -5.72162390e-01 -1.54889524e+00 -1.29991025e-02 -5.82960010e-01 -6.33727133e-01 -9.93989632e-02 6.49132192e-01 4.54229325e-01 -7.20163345e-01 8.72479320e-01 -1.61293638e+00 -1.50401962e+00 -1.04174912e+00 -4.99248832e-01 -5.72162390e-01] [ 7.00685203e-01 -1.61572921e+00 -1.11251883e-01 -9.30019438e-01 1.08554459e+00 -5.69391727e-01 4.72748354e-02 7.09284782e-01 -1.13008356e+00 -1.04236901e+00 1.02160001e+00 5.53236127e-01 1.28972208e+00 1.33043563e+00 7.00685203e-01 -1.61572921e+00 -1.11251883e-01] [ 1.64380562e+00 5.25244415e-01 -7.45745659e-01 -1.64908743e+00 5.01601934e-01 7.34036684e-01 -5.14867604e-01 -3.90207469e-01 -1.82522464e+00 1.01942420e+00 9.77638662e-01 1.64596510e+00 5.93017936e-01 1.08675730e+00 1.64380562e+00 5.25244415e-01 -7.45745659e-01]] [[-1.99326500e-01 -1.54720807e+00 -5.44335067e-01 -3.91592175e-01 -2.47767711e+00 2.50261277e-01 -1.08524525e+00 -2.94621140e-01 1.01648450e+00 1.17854393e+00 -7.68009722e-01 9.27458525e-01 4.57905978e-01 1.34600353e+00 -1.99326500e-01 -1.54720807e+00 -5.44335067e-01] [-2.21869767e-01 3.33599716e-01 -8.04617465e-01 4.83833075e-01 -1.34821761e+00 -7.86430836e-01 1.52022853e-01 -1.43707231e-01 4.39120501e-01 1.21588558e-01 -8.92370403e-01 -1.24387527e+00 7.23652840e-01 -3.91274780e-01 -2.21869767e-01 3.33599716e-01 -8.04617465e-01] [ 5.92505991e-01 1.41920936e+00 -2.13895273e+00 2.47748137e-01 -9.42124128e-01 5.93091428e-01 5.21953762e-01 4.14823033e-02 6.72357678e-01 -1.04671657e+00 -1.05418539e+00 6.67380273e-01 -9.72805500e-01 1.88889480e+00 5.92505991e-01 1.41920936e+00 -2.13895273e+00] [-1.17595875e+00 -2.14593157e-01 -1.90701139e+00 9.51526046e-01 5.05395710e-01 -1.04671001e+00 1.16115555e-01 -4.34897184e-01 -1.77460587e+00 -1.24790430e+00 8.37264180e-01 -6.45714819e-01 -1.09225459e-01 1.77338108e-01 -1.17595875e+00 -2.14593157e-01 -1.90701139e+00] [-2.78645307e-01 1.15060949e+00 -1.38137266e-01 1.51987445e+00 -2.06636176e-01 1.40715212e-01 1.04960203e+00 8.96047592e-01 2.08539057e+00 -1.05234230e+00 1.45945454e+00 1.14301383e+00 -9.61417556e-01 -1.51009810e+00 -2.78645307e-01 1.15060949e+00 -1.38137266e-01] [ 8.35648596e-01 2.91451156e-01 2.72162352e-02 9.92237568e-01 1.65749812e+00 -4.53040719e-01 -8.93059075e-01 1.80661932e-01 -1.13476372e+00 1.93282509e+00 1.35306692e+00 -5.40032268e-01 7.04316795e-01 -6.19966626e-01 8.35648596e-01 2.91451156e-01 2.72162352e-02] [ 1.50059056e+00 -2.30486751e+00 3.72600168e-01 -1.35647321e+00 -2.22946689e-01 -6.06218100e-01 -3.10345054e-01 1.72872841e+00 -1.37690425e+00 -1.32333612e+00 -2.64690965e-01 -5.43963909e-01 4.32884067e-01 -1.62156665e+00 1.50059056e+00 -2.30486751e+00 3.72600168e-01] [ 2.12581545e-01 -7.39331782e-01 -1.25003994e+00 3.85027826e-01 1.61132312e+00 -2.15790004e-01 -2.46970356e-01 1.28887427e+00 1.32922840e+00 7.39124537e-01 -4.75630701e-01 -7.08771646e-01 -2.60058910e-01 -8.68854076e-02 2.12581545e-01 -7.39331782e-01 -1.25003994e+00] [-5.20892560e-01 5.60231209e-01 -6.79765224e-01 -3.13190147e-02 5.48365176e-01 -9.16587859e-02 -3.81302834e-01 -7.19923735e-01 -7.60051310e-01 -5.39819956e-01 -6.24524534e-01 1.29583314e-01 4.76857394e-01 -1.97585511e+00 -5.20892560e-01 5.60231209e-01 -6.79765224e-01] [-1.04260755e+00 -1.16445601e-03 -7.14884996e-01 -3.11576962e-01 1.09037590e+00 -1.40488192e-01 -3.04036945e-01 2.29058176e-01 6.12643540e-01 -8.48582566e-01 1.41692626e+00 1.08761716e+00 -2.91889142e-02 5.27363300e-01 -1.04260755e+00 -1.16445601e-03 -7.14884996e-01] [ 4.68823612e-01 -1.48702592e-01 -2.54026413e+00 7.51422226e-01 -3.99727911e-01 -1.47452075e-02 -1.28427279e+00 -1.73451853e+00 -1.43543386e+00 1.30041949e-02 -7.94933975e-01 1.58608720e-01 -7.59680629e-01 3.18717360e-01 4.68823612e-01 -1.48702592e-01 -2.54026413e+00] [-6.48708522e-01 1.02830338e+00 1.30212021e+00 1.35431659e+00 9.66977835e-01 1.99833140e-01 6.61360621e-01 -2.94869453e-01 -7.63966203e-01 1.19205475e-01 -5.91964483e-01 1.76656699e+00 -1.13831949e+00 -7.11755872e-01 -6.48708522e-01 1.02830338e+00 1.30212021e+00] [-2.95086503e-01 5.63571811e-01 -1.70496404e+00 -7.79853642e-01 -2.22545713e-02 -2.37866551e-01 4.00389880e-01 4.96311262e-02 -1.35945988e+00 4.79384065e-01 1.03211761e+00 6.55505419e-01 3.18053752e-01 -1.07791936e+00 -2.95086503e-01 5.63571811e-01 -1.70496404e+00] [-3.74364525e-01 -1.00016129e+00 1.81064236e+00 1.31217837e-01 2.47883081e+00 5.64432204e-01 5.37223756e-01 -8.88252318e-01 -7.43793070e-01 -3.44247296e-02 3.78810883e-01 -1.47375071e+00 -9.28483129e-01 -1.21624756e+00 -3.74364525e-01 -1.00016129e+00 1.81064236e+00]]]]; ov_res: [[[[ 3.58778298e-01 1.06465131e-01 1.26758850e+00 1.41758180e+00 5.59242725e-01 2.27567211e-01 -2.05621219e+00 -7.76028454e-01 -7.10386515e-01 8.09882939e-01 2.10594726e+00 -1.41730353e-01 -1.47965407e+00 8.19435656e-01 3.58778298e-01 1.06465131e-01 1.26758850e+00] [-4.30690914e-01 4.59170014e-01 -1.61179125e-01 8.33408460e-02 1.92406669e-01 -1.80836946e-01 -6.29349768e-01 -1.97202611e+00 -1.93644953e+00 4.65014935e-01 -8.90552878e-01 -1.81330585e+00 4.51260805e-01 3.84217620e-01 -4.30690914e-01 4.59170014e-01 -1.61179125e-01] [-3.61029834e-01 7.47457266e-01 1.06863379e+00 -1.00407064e+00 -6.60303354e-01 -5.43209791e-01 -2.84293979e-01 2.43663645e+00 1.56584418e+00 -2.13533378e+00 4.28341627e-02 1.42546773e+00 -1.35543704e+00 -1.56520534e+00 -3.61029834e-01 7.47457266e-01 1.06863379e+00] [ 7.89828300e-01 -4.66064900e-01 6.73340857e-01 5.59733927e-01 4.20073211e-01 -3.34459603e-01 6.27168596e-01 -1.66461408e+00 8.49651814e-01 -2.15898603e-01 -1.27507401e+00 -9.19703208e-03 -7.25752294e-01 -1.70802510e+00 7.89828300e-01 -4.66064900e-01 6.73340857e-01] [-1.94531322e+00 7.08883643e-01 1.26355693e-01 -6.75627232e-01 9.96714890e-01 8.09278071e-01 1.00100458e+00 -5.98102987e-01 -9.48424935e-01 4.14355487e-01 2.91303873e-01 1.73028147e+00 -1.20595086e+00 -1.55765986e+00 -1.94531322e+00 7.08883643e-01 1.26355693e-01] [-3.83092254e-01 4.62589338e-02 1.23444152e+00 -9.99678612e-01 -1.29471052e+00 1.35929453e+00 6.88320398e-01 1.08713484e+00 7.53535986e-01 9.11371708e-01 8.27397406e-01 -1.39902079e+00 8.48163188e-01 8.07169974e-02 -3.83092254e-01 4.62589338e-02 1.23444152e+00] [-1.57884014e+00 -6.17775731e-02 9.77229416e-01 3.08387399e-01 1.80067599e-01 4.54209775e-01 -1.99438560e+00 -1.45159113e+00 -1.39061606e+00 -3.60637814e-01 2.41402864e+00 1.65152550e+00 1.49546897e+00 6.57010674e-02 -1.57884014e+00 -6.17775731e-02 9.77229416e-01] [ 4.56212908e-01 -2.31809586e-01 3.29876125e-01 9.47052836e-01 1.54944527e+00 2.20865822e+00 -3.90570968e-01 8.37386608e-01 1.38504654e-01 -2.14570236e+00 1.20308304e+00 -7.37108409e-01 -2.32507199e-01 6.62967801e-01 4.56212908e-01 -2.31809586e-01 3.29876125e-01] [ 1.89441586e+00 -7.73058057e-01 1.90183985e+00 4.93773282e-01 2.61560380e-01 1.57526386e+00 6.26632750e-01 -3.82519007e-01 2.24235971e-02 1.09149170e+00 1.83958864e+00 9.30818081e-01 4.42335382e-02 1.97214559e-01 1.89441586e+00 -7.73058057e-01 1.90183985e+00] [ 1.91331789e-01 7.18062222e-01 -1.47946000e+00 -9.35544074e-01 4.75607246e-01 1.35699403e+00 -4.69378620e-01 1.17003247e-01 -7.00175107e-01 -3.73630337e-02 3.88151854e-01 -4.09235507e-01 -4.68394399e-01 1.50154936e+00 1.91331789e-01 7.18062222e-01 -1.47946000e+00] [-1.95796752e+00 -2.17580780e-01 -1.41114235e+00 -9.38788235e-01 -1.08307146e-01 -2.22256735e-01 -1.55872077e-01 -7.52958477e-01 1.11310029e+00 3.19655240e-01 -1.70114005e+00 3.64944339e-02 1.09272182e+00 1.34929240e-01 -1.95796752e+00 -2.17580780e-01 -1.41114235e+00] [ 6.68982565e-01 -2.95057744e-01 1.94027126e+00 1.34224534e-01 -1.76496601e+00 -1.82699129e-01 1.51442456e+00 -2.15026259e-01 -5.58720887e-01 2.16598296e+00 1.03620839e+00 1.75288826e-01 -6.23505771e-01 9.96723711e-01 6.68982565e-01 -2.95057744e-01 1.94027126e+00] [ 1.11481857e+00 1.05810791e-01 7.70550907e-01 -6.22583687e-01 2.33763620e-01 -2.43055677e+00 3.11627835e-01 1.75725949e+00 4.46929365e-01 -4.97883648e-01 4.39157844e-01 -6.15307331e-01 -3.10086161e-01 1.55958220e-01 1.11481857e+00 1.05810791e-01 7.70550907e-01] [-5.42861104e-01 2.38428358e-02 -7.92192340e-01 -1.43860012e-01 -1.52911320e-01 1.84805763e+00 8.56421649e-01 -1.89236909e-01 9.85304594e-01 -2.71681875e-01 9.65274498e-03 1.13334846e+00 -9.15921152e-01 3.97765577e-01 -5.42861104e-01 2.38428358e-02 -7.92192340e-01]] [[-8.32665265e-01 -1.04152477e+00 9.15636778e-01 2.16566220e-01 1.00690103e+00 1.72505271e+00 1.44498587e-01 7.02280700e-01 -1.55195284e+00 2.36171675e+00 -8.43054652e-02 -5.45895696e-01 -4.72414136e-01 -3.85706306e-01 -8.32665265e-01 -1.04152477e+00 9.15636778e-01] [ 3.57533008e-01 2.79269934e+00 -1.82549387e-01 -3.43464129e-02 1.38683283e+00 -1.49332654e+00 3.12424213e-01 1.26009977e+00 -2.12761354e+00 2.02394676e+00 3.09298873e-01 -6.44759953e-01 9.12788212e-01 1.81272253e-01 3.57533008e-01 2.79269934e+00 -1.82549387e-01] [ 6.93117678e-02 -4.19461757e-01 -4.98069935e-02 -9.58259642e-01 -8.82345319e-01 -2.24607420e+00 1.04293644e+00 -6.23462558e-01 1.44502664e+00 -1.08338153e+00 1.66961741e+00 5.24336338e-01 1.90605307e+00 2.06462550e+00 6.93117678e-02 -4.19461757e-01 -4.98069935e-02] [ 9.64652419e-01 -1.17807090e-01 -9.06715512e-01 1.85695505e+00 4.51053113e-01 -5.11363924e-01 -8.64563882e-01 3.39142621e-01 7.39631355e-01 -2.50275898e+00 -8.48900899e-03 2.55631119e-01 1.24344444e+00 1.16444528e+00 9.64652419e-01 -1.17807090e-01 -9.06715512e-01] [ 4.63968426e-01 -1.50099480e+00 1.12555039e+00 2.29765847e-01 -9.86615792e-02 6.14835382e-01 -3.85210127e-01 -1.83501053e+00 2.62524664e-01 1.02855945e+00 -4.83282536e-01 -1.20722383e-01 2.30315067e-02 4.58410025e-01 4.63968426e-01 -1.50099480e+00 1.12555039e+00] [-1.59643471e+00 3.04301709e-01 -1.75447392e+00 1.09476662e+00 -8.49455833e-01 5.97074628e-01 8.75951707e-01 -2.57533640e-01 -2.87970513e-01 -8.78678858e-01 -1.53265685e-01 4.90771115e-01 1.45718837e+00 -2.15391383e-01 -1.59643471e+00 3.04301709e-01 -1.75447392e+00] [-7.32444406e-01 1.02022338e+00 1.02095090e-01 3.08657914e-01 -1.84886372e+00 1.20948963e-01 8.71928811e-01 1.07424414e+00 -2.32669520e+00 -3.57781023e-01 1.44329917e+00 -1.01433396e+00 1.54229969e-01 -1.25918365e+00 -7.32444406e-01 1.02022338e+00 1.02095090e-01] [ 1.02590942e+00 7.52255082e-01 -2.64759034e-01 4.95354265e-01 3.22712213e-01 -2.75636822e-01 -5.34415543e-01 1.79099545e-01 6.23582065e-01 -9.78090286e-01 -1.48041499e+00 1.05486274e+00 6.32284760e-01 9.96239960e-01 1.02590942e+00 7.52255082e-01 -2.64759034e-01] [ 1.22871876e+00 -1.15288544e+00 1.12624788e+00 -1.66452929e-01 -1.12315333e+00 -9.24865603e-01 -1.63933706e+00 1.37582171e+00 -2.56167579e+00 1.06934023e+00 -9.23630118e-01 -2.08898807e+00 1.04533541e+00 7.17065990e-01 1.22871876e+00 -1.15288544e+00 1.12624788e+00] [-5.77979267e-01 8.53357553e-01 9.44107354e-01 5.86353898e-01 -4.35048252e-01 8.84462819e-02 1.31733954e-01 -2.26795316e+00 -9.28731337e-02 -2.56907821e-01 -1.59438872e+00 6.52280599e-02 9.84098613e-01 -1.76633447e-01 -5.77979267e-01 8.53357553e-01 9.44107354e-01] [-2.87389737e-02 -7.08457083e-02 -5.59022546e-01 5.65682888e-01 -1.46955979e+00 6.29836917e-01 2.04867482e+00 2.83408117e+00 -8.05304796e-02 5.10361716e-02 -7.81214237e-01 1.53125596e+00 7.32531428e-01 -1.20978427e+00 -2.87389737e-02 -7.08457083e-02 -5.59022546e-01] [-1.04174912e+00 -4.99248832e-01 -5.72162390e-01 -1.54889524e+00 -1.29991025e-02 -5.82960010e-01 -6.33727133e-01 -9.93989632e-02 6.49132192e-01 4.54229325e-01 -7.20163345e-01 8.72479320e-01 -1.61293638e+00 -1.50401962e+00 -1.04174912e+00 -4.99248832e-01 -5.72162390e-01] [ 7.00685203e-01 -1.61572921e+00 -1.11251883e-01 -9.30019438e-01 1.08554459e+00 -5.69391727e-01 4.72748354e-02 7.09284782e-01 -1.13008356e+00 -1.04236901e+00 1.02160001e+00 5.53236127e-01 1.28972208e+00 1.33043563e+00 7.00685203e-01 -1.61572921e+00 -1.11251883e-01] [ 1.64380562e+00 5.25244415e-01 -7.45745659e-01 -1.64908743e+00 5.01601934e-01 7.34036684e-01 -5.14867604e-01 -3.90207469e-01 -1.82522464e+00 1.01942420e+00 9.77638662e-01 1.64596510e+00 5.93017936e-01 1.08675730e+00 1.64380562e+00 5.25244415e-01 -7.45745659e-01]] [[-1.99326500e-01 -1.54720807e+00 -5.44335067e-01 -3.91592175e-01 -2.47767711e+00 2.50261277e-01 -1.08524525e+00 -2.94621140e-01 1.01648450e+00 1.17854393e+00 -7.68009722e-01 9.27458525e-01 4.57905978e-01 1.34600353e+00 -1.99326500e-01 -1.54720807e+00 -5.44335067e-01] [-2.21869767e-01 3.33599716e-01 -8.04617465e-01 4.83833075e-01 -1.34821761e+00 -7.86430836e-01 1.52022853e-01 -1.43707231e-01 4.39120501e-01 1.21588558e-01 -8.92370403e-01 -1.24387527e+00 7.23652840e-01 -3.91274780e-01 -2.21869767e-01 3.33599716e-01 -8.04617465e-01] [ 5.92505991e-01 1.41920936e+00 -2.13895273e+00 2.47748137e-01 -9.42124128e-01 5.93091428e-01 5.21953762e-01 4.14823033e-02 6.72357678e-01 -1.04671657e+00 -1.05418539e+00 6.67380273e-01 -9.72805500e-01 1.88889480e+00 5.92505991e-01 1.41920936e+00 -2.13895273e+00] [-1.17595875e+00 -2.14593157e-01 -1.90701139e+00 9.51526046e-01 5.05395710e-01 -1.04671001e+00 1.16115555e-01 -4.34897184e-01 -1.77460587e+00 -1.24790430e+00 8.37264180e-01 -6.45714819e-01 -1.09225459e-01 1.77338108e-01 -1.17595875e+00 -2.14593157e-01 -1.90701139e+00] [-2.78645307e-01 1.15060949e+00 -1.38137266e-01 1.51987445e+00 -2.06636176e-01 1.40715212e-01 1.04960203e+00 8.96047592e-01 2.08539057e+00 -1.05234230e+00 1.45945454e+00 1.14301383e+00 -9.61417556e-01 -1.51009810e+00 -2.78645307e-01 1.15060949e+00 -1.38137266e-01] [ 8.35648596e-01 2.91451156e-01 2.72162352e-02 9.92237568e-01 1.65749812e+00 -4.53040719e-01 -8.93059075e-01 1.80661932e-01 -1.13476372e+00 1.93282509e+00 1.35306692e+00 -5.40032268e-01 7.04316795e-01 -6.19966626e-01 8.35648596e-01 2.91451156e-01 2.72162352e-02] [ 1.50059056e+00 -2.30486751e+00 3.72600168e-01 -1.35647321e+00 -2.22946689e-01 -6.06218100e-01 -3.10345054e-01 1.72872841e+00 -1.37690425e+00 -1.32333612e+00 -2.64690965e-01 -5.43963909e-01 4.32884067e-01 -1.62156665e+00 1.50059056e+00 -2.30486751e+00 3.72600168e-01] [ 2.12581545e-01 -7.39331782e-01 -1.25003994e+00 3.85027826e-01 1.61132312e+00 -2.15790004e-01 -2.46970356e-01 1.28887427e+00 1.32922840e+00 7.39124537e-01 -4.75630701e-01 -7.08771646e-01 -2.60058910e-01 -8.68854076e-02 2.12581545e-01 -7.39331782e-01 -1.25003994e+00] [-5.20892560e-01 5.60231209e-01 -6.79765224e-01 -3.13190147e-02 5.48365176e-01 -9.16587859e-02 -3.81302834e-01 -7.19923735e-01 -7.60051310e-01 -5.39819956e-01 -6.24524534e-01 1.29583314e-01 4.76857394e-01 -1.97585511e+00 -5.20892560e-01 5.60231209e-01 -6.79765224e-01] [-1.04260755e+00 -1.16445601e-03 -7.14884996e-01 -3.11576962e-01 1.09037590e+00 -1.40488192e-01 -3.04036945e-01 2.29058176e-01 6.12643540e-01 -8.48582566e-01 1.41692626e+00 1.08761716e+00 -2.91889142e-02 5.27363300e-01 -1.04260755e+00 -1.16445601e-03 -7.14884996e-01] [ 4.68823612e-01 -1.48702592e-01 -2.54026413e+00 7.51422226e-01 -3.99727911e-01 -1.47452075e-02 -1.28427279e+00 -1.73451853e+00 -1.43543386e+00 1.30041949e-02 -7.94933975e-01 1.58608720e-01 -7.59680629e-01 3.18717360e-01 4.68823612e-01 -1.48702592e-01 -2.54026413e+00] [-6.48708522e-01 1.02830338e+00 1.30212021e+00 1.35431659e+00 9.66977835e-01 1.99833140e-01 6.61360621e-01 -2.94869453e-01 -7.63966203e-01 1.19205475e-01 -5.91964483e-01 1.76656699e+00 -1.13831949e+00 -7.11755872e-01 -6.48708522e-01 1.02830338e+00 1.30212021e+00] [-2.95086503e-01 5.63571811e-01 -1.70496404e+00 -7.79853642e-01 -2.22545713e-02 -2.37866551e-01 4.00389880e-01 4.96311262e-02 -1.35945988e+00 4.79384065e-01 1.03211761e+00 6.55505419e-01 3.18053752e-01 -1.07791936e+00 -2.95086503e-01 5.63571811e-01 -1.70496404e+00] [-3.74364525e-01 -1.00016129e+00 1.81064236e+00 1.31217837e-01 2.47883081e+00 5.64432204e-01 5.37223756e-01 -8.88252318e-01 -7.43793070e-01 -3.44247296e-02 3.78810883e-01 -1.47375071e+00 -9.28483129e-01 -1.21624756e+00 -3.74364525e-01 -1.00016129e+00 1.81064236e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5511.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) RROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable fw_re: [[[[-0.29881185 -1.9794276 0.20135231 ... -0.29881185 -1.9794276 0.20135231] [ 1.1601851 0.03632373 -0.5453932 ... 1.1601851 0.03632373 -0.5453932 ] [ 0.4425289 -0.3988952 1.7002814 ... 0.4425289 -0.3988952 1.7002814 ] ... [-0.4916553 1.3853023 1.0968648 ... -0.4916553 1.3853023 1.0968648 ] [-0.3683561 0.26638812 -0.6228417 ... -0.3683561 0.26638812 -0.6228417 ] [-1.6230471 1.0511518 0.04886806 ... -1.6230471 1.0511518 0.04886806]] [[ 0.6724049 -0.9659377 0.31281954 ... 0.6724049 -0.9659377 0.31281954] [-0.10061713 1.6848716 -0.07611062 ... -0.10061713 1.6848716 -0.07611062] [-0.7152942 -0.9318856 0.69423693 ... -0.7152942 -0.9318856 0.69423693] ... [ 0.11607347 -0.7506933 0.8979936 ... 0.11607347 -0.7506933 0.8979936 ] [-1.1923485 -0.17303294 -1.5734375 ... -1.1923485 -0.17303294 -1.5734375 ] [ 0.54107577 1.2773758 1.036774 ... 0.54107577 1.2773758 1.036774 ]] [[ 1.6179159 -0.7170625 1.2202168 ... 1.6179159 -0.7170625 1.2202168 ] [-1.9010291 1.0613115 0.5343422 ... -1.9010291 1.0613115 0.5343422 ] [-2.4705038 -1.3664743 1.0179995 ... -2.4705038 -1.3664743 1.0179995 ] ... [ 0.33534554 -0.6772276 1.4084271 ... 0.33534554 -0.6772276 1.4084271 ] [-0.93249416 0.24506088 0.5086248 ... -0.93249416 0.24506088 0.5086248 ] [-0.4082402 -1.0331651 -0.03903661 ... -0.4082402 -1.0331651 -0.03903661]]]]; ov_res: [[[[-0.29881185 -1.9794276 0.20135231 ... -0.29881185 -1.9794276 0.20135231] [ 1.1601851 0.03632373 -0.5453932 ... 1.1601851 0.03632373 -0.5453932 ] [ 0.4425289 -0.3988952 1.7002814 ... 0.4425289 -0.3988952 1.7002814 ] ... [-0.4916553 1.3853023 1.0968648 ... -0.4916553 1.3853023 1.0968648 ] [-0.3683561 0.26638812 -0.6228417 ... -0.3683561 0.26638812 -0.6228417 ] [-1.6230471 1.0511518 0.04886806 ... -1.6230471 1.0511518 0.04886806]] [[ 0.6724049 -0.9659377 0.31281954 ... 0.6724049 -0.9659377 0.31281954] [-0.10061713 1.6848716 -0.07611062 ... -0.10061713 1.6848716 -0.07611062] [-0.7152942 -0.9318856 0.69423693 ... -0.7152942 -0.9318856 0.69423693] ... [ 0.11607347 -0.7506933 0.8979936 ... 0.11607347 -0.7506933 0.8979936 ] [-1.1923485 -0.17303294 -1.5734375 ... -1.1923485 -0.17303294 -1.5734375 ] [ 0.54107577 1.2773758 1.036774 ... 0.54107577 1.2773758 1.036774 ]] [[ 1.6179159 -0.7170625 1.2202168 ... 1.6179159 -0.7170625 1.2202168 ] [-1.9010291 1.0613115 0.5343422 ... -1.9010291 1.0613115 0.5343422 ] [-2.4705038 -1.3664743 1.0179995 ... -2.4705038 -1.3664743 1.0179995 ] ... [ 0.33534554 -0.6772276 1.4084271 ... 0.33534554 -0.6772276 1.4084271 ] [-0.93249416 0.24506088 0.5086248 ... -0.93249416 0.24506088 0.5086248 ] [-0.4082402 -1.0331651 -0.03903661 ... -0.4082402 -1.0331651 -0.03903661]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 1, 0, 0) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5513.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 5.23746550e-01 -4.63810861e-01 2.30439901e+00 5.89861691e-01 3.45612615e-01 -8.23541433e-02 1.39126623e+00 1.47179651e+00 -3.96760106e-01 -1.24288440e-01 3.07619953e+00 6.45588696e-01 9.28852499e-01 -1.79789916e-01 5.23746550e-01] [ 1.33914673e+00 1.07871139e+00 -1.55885887e+00 8.99995446e-01 -1.28552449e+00 -4.41618949e-01 -1.83197927e+00 1.33226824e+00 7.77711928e-01 8.15699100e-01 1.68758082e+00 -1.73231721e+00 -4.38368291e-01 -6.67294562e-01 1.33914673e+00] [ 5.46387613e-01 -2.70581037e-01 2.70993620e-01 1.88768566e+00 3.83503824e-01 -8.67029309e-01 -3.60407978e-01 -5.14939070e-01 -1.45901906e+00 -1.02767515e+00 -4.20688391e-01 5.84266365e-01 7.64415801e-01 -2.34445501e-02 5.46387613e-01] [-6.18958414e-01 6.53440654e-01 1.23407376e+00 5.99669158e-01 1.10355592e+00 1.00266314e+00 2.51141095e+00 -5.22668839e-01 -1.72989583e+00 7.90073454e-01 6.34665430e-01 9.05705929e-01 -4.53734010e-01 -6.88203812e-01 -6.18958414e-01] [ 9.64688480e-01 9.31597948e-01 1.08486247e+00 6.24221861e-01 6.94327056e-01 6.85904503e-01 1.73853323e-01 1.74207640e+00 5.31201899e-01 4.51556981e-01 -1.35330665e+00 -3.32158506e-01 -2.03060299e-01 -9.17915463e-01 9.64688480e-01] [ 5.22745699e-02 -9.05622184e-01 -3.51637065e-01 8.63333225e-01 -2.00577617e-01 -1.59834480e+00 -5.63798428e-01 -1.49569869e-01 -1.23405266e+00 -2.71980190e+00 -2.36988068e-02 8.47618163e-01 -8.07583511e-01 -1.07454501e-01 5.22745699e-02] [ 1.07365203e+00 7.91710734e-01 -3.57353330e-01 1.84995696e-01 2.05407932e-01 -8.55828106e-01 -4.36346978e-01 -1.47174335e+00 -3.97939324e-01 -9.18561459e-01 -6.10356569e-01 -1.21335733e+00 1.00018394e+00 1.13483191e+00 1.07365203e+00] [ 1.08012962e+00 9.54833925e-01 -1.16706111e-01 -6.50342524e-01 3.52815211e-01 1.64964974e-01 -4.02193457e-01 8.03421199e-01 1.57926662e-03 1.12703252e+00 -3.42107505e-01 -4.28353429e-01 3.80270571e-01 1.87895566e-01 1.08012962e+00] [-1.84266940e-02 -9.55541849e-01 1.18704617e+00 -1.74317971e-01 -4.05575126e-01 -2.76461393e-01 -9.53954279e-01 -6.81949317e-01 1.04518223e+00 1.90571988e+00 4.73636240e-01 -2.66791970e-01 -5.76114118e-01 6.50941312e-01 -1.84266940e-02] [ 4.47854996e-01 4.45380002e-01 -1.05992389e+00 -6.76022843e-04 -1.08664572e+00 1.93143889e-01 9.37415540e-01 -6.47986591e-01 -1.69453990e+00 8.59004736e-01 1.42810082e+00 2.67203748e-01 -2.49004215e-01 -5.67115068e-01 4.47854996e-01] [ 1.23367679e+00 -2.76134157e+00 1.20192313e+00 5.49410760e-01 -7.38286555e-01 5.15873492e-01 -6.10539457e-03 5.61117768e-01 2.00486198e-01 8.97710845e-02 2.05665734e-02 8.88918877e-01 -7.71151543e-01 6.20182455e-01 1.23367679e+00] [-4.11023080e-01 -1.66601613e-02 3.52856696e-01 -7.49306977e-01 2.01672149e+00 -1.69345677e+00 -2.54692703e-01 7.69220233e-01 -8.03436100e-01 -2.72283077e+00 2.87378073e-01 -4.70836349e-02 -8.28026474e-01 -3.08010995e-01 -4.11023080e-01] [ 2.29570532e+00 -1.03109908e+00 -2.14678669e+00 1.40488803e+00 -6.15648031e-01 3.10112238e-01 -2.37611197e-02 -6.35684848e-01 1.09313071e+00 -2.71095604e-01 4.73081231e-01 -6.66444302e-01 -3.09567183e-01 5.87509274e-01 2.29570532e+00] [ 1.09575319e+00 4.77805585e-01 1.69594979e+00 -1.43675125e+00 -9.33663920e-02 -1.96939623e+00 -1.97915685e+00 -1.22555745e+00 -4.06820327e-03 3.47511351e-01 1.21708596e+00 5.33390701e-01 -1.74744129e+00 -1.61267757e-01 1.09575319e+00]] [[-2.39220679e-01 5.98391652e-01 -1.15223622e+00 1.84485269e+00 -1.60806882e+00 5.12829065e-01 -9.20215428e-01 3.70659232e-01 3.56150866e-01 -1.87663585e-02 -2.81938434e-01 1.39624286e+00 3.61809254e-01 8.25420260e-01 -2.39220679e-01] [-9.92255092e-01 -2.44378671e-01 -4.51386213e-01 7.91653216e-01 9.05913353e-01 -8.70050430e-01 -4.50565398e-01 1.60081935e+00 -4.61084425e-01 -1.66860676e+00 2.24775314e+00 -2.51416731e+00 4.33095306e-01 -7.79057682e-01 -9.92255092e-01] [-1.81106821e-01 1.16872323e+00 -1.07249880e+00 6.64526284e-01 5.37250042e-01 5.87462068e-01 5.87370336e-01 3.99041504e-01 1.79431164e+00 -1.37367356e+00 -1.36488283e+00 -5.30443192e-01 -6.06424391e-01 6.21780097e-01 -1.81106821e-01] [-3.56463403e-01 8.71435583e-01 -1.48889780e+00 -2.60367244e-01 8.06329668e-01 -4.54715282e-01 -6.57686472e-01 -1.60337663e+00 -8.28416944e-01 -4.21174794e-01 5.55733562e-01 -1.26360074e-01 5.49417853e-01 -4.23902035e-01 -3.56463403e-01] [-8.28063563e-02 -1.43626297e+00 -6.45406961e-01 -1.10126185e+00 -4.59037632e-01 -6.02654099e-01 4.35544997e-01 4.83292609e-01 -6.06028914e-01 -8.18469524e-02 -5.78619719e-01 2.38971448e+00 1.56072363e-01 2.64038235e-01 -8.28063563e-02] [-1.30561149e+00 1.21082890e+00 1.22044392e-01 -2.97091663e-01 4.44839716e-01 -3.90256159e-02 -6.64284527e-01 -2.24174142e-01 7.18562663e-01 -7.62328982e-01 -1.12206376e+00 -5.30404389e-01 7.50259757e-01 2.28262730e-02 -1.30561149e+00] [ 5.33667132e-02 -3.25167269e-01 -2.19558764e+00 1.81538146e-02 2.20922559e-01 3.85272317e-02 1.13871896e+00 3.96112708e-04 2.62883115e+00 -7.29304671e-01 1.56138659e-01 3.33322823e-01 2.58687067e+00 2.06907535e+00 5.33667132e-02] [-2.06415606e+00 -1.03705442e+00 -1.26363361e+00 -1.47410882e+00 -8.31544325e-02 1.73568413e-01 2.50813413e+00 1.43209267e+00 5.35110831e-01 9.38050330e-01 2.57995367e-01 6.96464628e-02 1.25643551e+00 1.80577308e-01 -2.06415606e+00] [ 4.77117270e-01 8.27557147e-01 -8.99910808e-01 -1.04820609e+00 8.81151936e-04 2.01543665e+00 1.26518881e+00 -1.71475852e+00 1.80733073e+00 1.44364548e+00 -1.39262879e+00 3.87161303e+00 -2.50283003e-01 9.58021805e-02 4.77117270e-01] [ 9.03720737e-01 -8.34896386e-01 1.39531028e+00 -1.60246873e+00 1.66834369e-01 2.87325001e+00 -5.30460179e-01 1.96136790e-03 3.94645005e-01 -1.30498812e-01 5.66183984e-01 -1.77299881e+00 2.04620987e-01 1.08927643e+00 9.03720737e-01] [ 2.22750813e-01 6.92727745e-01 5.91461360e-01 -1.46506295e-01 8.25874507e-02 -1.07713819e+00 2.26236001e-01 1.88273203e+00 8.30188930e-01 8.69956911e-01 -1.37967420e+00 2.10329056e+00 -9.46408287e-02 5.77155314e-03 2.22750813e-01] [ 1.36289215e+00 1.81325600e-01 -1.12988746e+00 4.62102383e-01 1.08619440e+00 -1.39524055e+00 3.29262018e-01 1.56208611e+00 1.96171999e+00 -1.56820977e+00 1.12642892e-01 -8.85509908e-01 1.95869756e+00 9.45017695e-01 1.36289215e+00] [ 1.26598012e+00 1.54213476e+00 -1.49303496e+00 -2.95611531e-01 7.03908861e-01 -6.42472088e-01 1.39547515e+00 -7.21408725e-01 6.51517034e-01 -4.69485492e-01 5.39963245e-01 -9.66434032e-02 1.37687266e-01 3.62412721e-01 1.26598012e+00] [ 4.63436007e-01 -1.79944730e+00 2.84316897e-01 1.19894767e+00 -9.63467658e-01 -9.80056047e-01 4.33289319e-01 -8.43866542e-02 8.76335561e-01 -9.83571172e-01 4.32896227e-01 3.90580326e-01 -5.76370843e-02 1.54338157e+00 4.63436007e-01]] [[ 3.77330959e-01 -2.99483180e-01 1.02411306e+00 1.04768455e+00 -7.75735378e-01 -1.29651988e+00 1.45694181e-01 3.38203423e-02 -6.11015856e-02 -1.40045106e+00 2.03010011e+00 5.97509205e-01 4.55006838e-01 1.33899257e-01 3.77330959e-01] [ 8.26900959e-01 -4.54876989e-01 5.65377176e-01 1.83592665e+00 2.96882296e+00 -1.27966154e+00 1.07962064e-01 1.60351896e+00 3.15893978e-01 1.07072793e-01 2.13961110e-01 8.25035393e-01 -1.90358609e-01 8.45744193e-01 8.26900959e-01] [-1.60988426e+00 9.48803842e-01 -1.06812811e+00 1.27024281e+00 -2.23684263e+00 -7.30140269e-01 -7.36495480e-02 -8.69807780e-01 2.26818156e+00 7.90116906e-01 1.16192532e+00 -4.84704167e-01 5.09866595e-01 -1.84875357e+00 -1.60988426e+00] [ 2.67929482e+00 -3.10174763e-01 -4.83424276e-01 -1.34183943e+00 3.08422542e+00 -6.94694221e-01 1.59412158e+00 1.74885169e-02 -3.78639530e-03 9.81650174e-01 5.78341186e-01 -2.23159850e-01 -1.02068853e+00 2.11066529e-01 2.67929482e+00] [-1.09919381e+00 -1.00323176e+00 -9.93022561e-01 -6.85484171e-01 3.17089349e-01 -2.25266382e-01 -6.70095906e-02 -8.28291923e-02 5.02121866e-01 7.18047678e-01 -1.52974653e+00 7.44979620e-01 -1.37577415e-01 -1.77399898e+00 -1.09919381e+00] [-1.31519806e+00 -7.72459269e-01 -1.81165338e+00 1.02249694e+00 -1.38035536e+00 -3.10454816e-02 -1.87539518e+00 1.80400014e+00 -5.39507508e-01 -1.73881149e+00 -7.14431763e-01 1.56583154e+00 -7.16096699e-01 -1.35660005e+00 -1.31519806e+00] [ 9.35552359e-01 4.94104892e-01 8.97020519e-01 -7.09945917e-01 -8.40221167e-01 1.42407119e+00 -1.59025431e+00 -1.24893141e+00 -6.76839292e-01 3.31273109e-01 -1.35433948e+00 3.99922907e-01 1.86286080e+00 -1.80455279e+00 9.35552359e-01] [ 1.39182717e-01 -2.39503551e+00 -8.80323529e-01 1.28534961e+00 5.26979804e-01 -9.13671911e-01 -5.98444223e-01 6.12331510e-01 1.85748565e+00 5.49353838e-01 -2.01373696e+00 1.59767044e+00 -8.06961238e-01 8.65161359e-01 1.39182717e-01] [ 1.70608256e-02 -3.51744406e-02 1.00750339e+00 -1.07932568e+00 -2.32974812e-01 -4.21299696e-01 -6.81770861e-01 -2.02316475e+00 -2.00610423e+00 -4.26145852e-01 1.45929587e+00 3.57594371e-01 6.02733374e-01 9.97496426e-01 1.70608256e-02] [-1.39170754e+00 1.23918200e+00 2.71916777e-01 1.00808942e+00 -7.02316165e-01 -5.43580353e-01 6.97243929e-01 1.17778994e-01 8.85381401e-01 -4.07945484e-01 2.58337110e-01 9.29814875e-01 -1.28881502e+00 8.03892851e-01 -1.39170754e+00] [ 2.10928440e-01 4.36867923e-01 6.11527145e-01 4.22269464e-01 -9.14700508e-01 -1.08912003e+00 7.93503046e-01 2.50260502e-01 1.80162168e+00 8.16864073e-01 4.53081340e-01 -1.57196558e+00 -1.00033939e+00 -1.07301331e+00 2.10928440e-01] [ 3.44112247e-01 -1.02904236e+00 -1.05523515e+00 -3.62470269e-01 -8.38515088e-02 -6.34002626e-01 -1.76029706e+00 -6.09342635e-01 1.45686579e+00 -3.75291556e-01 -7.54684091e-01 -4.36687946e-01 -1.15395570e+00 -3.21533680e-01 3.44112247e-01] [-1.06647110e+00 1.91555396e-01 6.00823641e-01 3.67125988e-01 -1.51683375e-01 -1.58292544e+00 -2.42030159e-01 -1.41284966e+00 -9.95963752e-01 -6.37133896e-01 9.46766853e-01 -1.18445861e+00 -8.54894280e-01 -1.90658614e-01 -1.06647110e+00] [ 3.60173583e-01 -2.35512387e-03 -9.49850738e-01 -1.50651395e-01 2.55424321e-01 -9.83326435e-02 -1.72966182e+00 -1.24329865e+00 -2.77237266e-01 -6.32154226e-01 -1.06014037e+00 -1.90377071e-01 3.22139740e-01 -1.37373433e-01 3.60173583e-01]]]]; ov_res: [[[[ 5.23746550e-01 -4.63810861e-01 2.30439901e+00 5.89861691e-01 3.45612615e-01 -8.23541433e-02 1.39126623e+00 1.47179651e+00 -3.96760106e-01 -1.24288440e-01 3.07619953e+00 6.45588696e-01 9.28852499e-01 -1.79789916e-01 5.23746550e-01] [ 1.33914673e+00 1.07871139e+00 -1.55885887e+00 8.99995446e-01 -1.28552449e+00 -4.41618949e-01 -1.83197927e+00 1.33226824e+00 7.77711928e-01 8.15699100e-01 1.68758082e+00 -1.73231721e+00 -4.38368291e-01 -6.67294562e-01 1.33914673e+00] [ 5.46387613e-01 -2.70581037e-01 2.70993620e-01 1.88768566e+00 3.83503824e-01 -8.67029309e-01 -3.60407978e-01 -5.14939070e-01 -1.45901906e+00 -1.02767515e+00 -4.20688391e-01 5.84266365e-01 7.64415801e-01 -2.34445501e-02 5.46387613e-01] [-6.18958414e-01 6.53440654e-01 1.23407376e+00 5.99669158e-01 1.10355592e+00 1.00266314e+00 2.51141095e+00 -5.22668839e-01 -1.72989583e+00 7.90073454e-01 6.34665430e-01 9.05705929e-01 -4.53734010e-01 -6.88203812e-01 -6.18958414e-01] [ 9.64688480e-01 9.31597948e-01 1.08486247e+00 6.24221861e-01 6.94327056e-01 6.85904503e-01 1.73853323e-01 1.74207640e+00 5.31201899e-01 4.51556981e-01 -1.35330665e+00 -3.32158506e-01 -2.03060299e-01 -9.17915463e-01 9.64688480e-01] [ 5.22745699e-02 -9.05622184e-01 -3.51637065e-01 8.63333225e-01 -2.00577617e-01 -1.59834480e+00 -5.63798428e-01 -1.49569869e-01 -1.23405266e+00 -2.71980190e+00 -2.36988068e-02 8.47618163e-01 -8.07583511e-01 -1.07454501e-01 5.22745699e-02] [ 1.07365203e+00 7.91710734e-01 -3.57353330e-01 1.84995696e-01 2.05407932e-01 -8.55828106e-01 -4.36346978e-01 -1.47174335e+00 -3.97939324e-01 -9.18561459e-01 -6.10356569e-01 -1.21335733e+00 1.00018394e+00 1.13483191e+00 1.07365203e+00] [ 1.08012962e+00 9.54833925e-01 -1.16706111e-01 -6.50342524e-01 3.52815211e-01 1.64964974e-01 -4.02193457e-01 8.03421199e-01 1.57926662e-03 1.12703252e+00 -3.42107505e-01 -4.28353429e-01 3.80270571e-01 1.87895566e-01 1.08012962e+00] [-1.84266940e-02 -9.55541849e-01 1.18704617e+00 -1.74317971e-01 -4.05575126e-01 -2.76461393e-01 -9.53954279e-01 -6.81949317e-01 1.04518223e+00 1.90571988e+00 4.73636240e-01 -2.66791970e-01 -5.76114118e-01 6.50941312e-01 -1.84266940e-02] [ 4.47854996e-01 4.45380002e-01 -1.05992389e+00 -6.76022843e-04 -1.08664572e+00 1.93143889e-01 9.37415540e-01 -6.47986591e-01 -1.69453990e+00 8.59004736e-01 1.42810082e+00 2.67203748e-01 -2.49004215e-01 -5.67115068e-01 4.47854996e-01] [ 1.23367679e+00 -2.76134157e+00 1.20192313e+00 5.49410760e-01 -7.38286555e-01 5.15873492e-01 -6.10539457e-03 5.61117768e-01 2.00486198e-01 8.97710845e-02 2.05665734e-02 8.88918877e-01 -7.71151543e-01 6.20182455e-01 1.23367679e+00] [-4.11023080e-01 -1.66601613e-02 3.52856696e-01 -7.49306977e-01 2.01672149e+00 -1.69345677e+00 -2.54692703e-01 7.69220233e-01 -8.03436100e-01 -2.72283077e+00 2.87378073e-01 -4.70836349e-02 -8.28026474e-01 -3.08010995e-01 -4.11023080e-01] [ 2.29570532e+00 -1.03109908e+00 -2.14678669e+00 1.40488803e+00 -6.15648031e-01 3.10112238e-01 -2.37611197e-02 -6.35684848e-01 1.09313071e+00 -2.71095604e-01 4.73081231e-01 -6.66444302e-01 -3.09567183e-01 5.87509274e-01 2.29570532e+00] [ 1.09575319e+00 4.77805585e-01 1.69594979e+00 -1.43675125e+00 -9.33663920e-02 -1.96939623e+00 -1.97915685e+00 -1.22555745e+00 -4.06820327e-03 3.47511351e-01 1.21708596e+00 5.33390701e-01 -1.74744129e+00 -1.61267757e-01 1.09575319e+00]] [[-2.39220679e-01 5.98391652e-01 -1.15223622e+00 1.84485269e+00 -1.60806882e+00 5.12829065e-01 -9.20215428e-01 3.70659232e-01 3.56150866e-01 -1.87663585e-02 -2.81938434e-01 1.39624286e+00 3.61809254e-01 8.25420260e-01 -2.39220679e-01] [-9.92255092e-01 -2.44378671e-01 -4.51386213e-01 7.91653216e-01 9.05913353e-01 -8.70050430e-01 -4.50565398e-01 1.60081935e+00 -4.61084425e-01 -1.66860676e+00 2.24775314e+00 -2.51416731e+00 4.33095306e-01 -7.79057682e-01 -9.92255092e-01] [-1.81106821e-01 1.16872323e+00 -1.07249880e+00 6.64526284e-01 5.37250042e-01 5.87462068e-01 5.87370336e-01 3.99041504e-01 1.79431164e+00 -1.37367356e+00 -1.36488283e+00 -5.30443192e-01 -6.06424391e-01 6.21780097e-01 -1.81106821e-01] [-3.56463403e-01 8.71435583e-01 -1.48889780e+00 -2.60367244e-01 8.06329668e-01 -4.54715282e-01 -6.57686472e-01 -1.60337663e+00 -8.28416944e-01 -4.21174794e-01 5.55733562e-01 -1.26360074e-01 5.49417853e-01 -4.23902035e-01 -3.56463403e-01] [-8.28063563e-02 -1.43626297e+00 -6.45406961e-01 -1.10126185e+00 -4.59037632e-01 -6.02654099e-01 4.35544997e-01 4.83292609e-01 -6.06028914e-01 -8.18469524e-02 -5.78619719e-01 2.38971448e+00 1.56072363e-01 2.64038235e-01 -8.28063563e-02] [-1.30561149e+00 1.21082890e+00 1.22044392e-01 -2.97091663e-01 4.44839716e-01 -3.90256159e-02 -6.64284527e-01 -2.24174142e-01 7.18562663e-01 -7.62328982e-01 -1.12206376e+00 -5.30404389e-01 7.50259757e-01 2.28262730e-02 -1.30561149e+00] [ 5.33667132e-02 -3.25167269e-01 -2.19558764e+00 1.81538146e-02 2.20922559e-01 3.85272317e-02 1.13871896e+00 3.96112708e-04 2.62883115e+00 -7.29304671e-01 1.56138659e-01 3.33322823e-01 2.58687067e+00 2.06907535e+00 5.33667132e-02] [-2.06415606e+00 -1.03705442e+00 -1.26363361e+00 -1.47410882e+00 -8.31544325e-02 1.73568413e-01 2.50813413e+00 1.43209267e+00 5.35110831e-01 9.38050330e-01 2.57995367e-01 6.96464628e-02 1.25643551e+00 1.80577308e-01 -2.06415606e+00] [ 4.77117270e-01 8.27557147e-01 -8.99910808e-01 -1.04820609e+00 8.81151936e-04 2.01543665e+00 1.26518881e+00 -1.71475852e+00 1.80733073e+00 1.44364548e+00 -1.39262879e+00 3.87161303e+00 -2.50283003e-01 9.58021805e-02 4.77117270e-01] [ 9.03720737e-01 -8.34896386e-01 1.39531028e+00 -1.60246873e+00 1.66834369e-01 2.87325001e+00 -5.30460179e-01 1.96136790e-03 3.94645005e-01 -1.30498812e-01 5.66183984e-01 -1.77299881e+00 2.04620987e-01 1.08927643e+00 9.03720737e-01] [ 2.22750813e-01 6.92727745e-01 5.91461360e-01 -1.46506295e-01 8.25874507e-02 -1.07713819e+00 2.26236001e-01 1.88273203e+00 8.30188930e-01 8.69956911e-01 -1.37967420e+00 2.10329056e+00 -9.46408287e-02 5.77155314e-03 2.22750813e-01] [ 1.36289215e+00 1.81325600e-01 -1.12988746e+00 4.62102383e-01 1.08619440e+00 -1.39524055e+00 3.29262018e-01 1.56208611e+00 1.96171999e+00 -1.56820977e+00 1.12642892e-01 -8.85509908e-01 1.95869756e+00 9.45017695e-01 1.36289215e+00] [ 1.26598012e+00 1.54213476e+00 -1.49303496e+00 -2.95611531e-01 7.03908861e-01 -6.42472088e-01 1.39547515e+00 -7.21408725e-01 6.51517034e-01 -4.69485492e-01 5.39963245e-01 -9.66434032e-02 1.37687266e-01 3.62412721e-01 1.26598012e+00] [ 4.63436007e-01 -1.79944730e+00 2.84316897e-01 1.19894767e+00 -9.63467658e-01 -9.80056047e-01 4.33289319e-01 -8.43866542e-02 8.76335561e-01 -9.83571172e-01 4.32896227e-01 3.90580326e-01 -5.76370843e-02 1.54338157e+00 4.63436007e-01]] [[ 3.77330959e-01 -2.99483180e-01 1.02411306e+00 1.04768455e+00 -7.75735378e-01 -1.29651988e+00 1.45694181e-01 3.38203423e-02 -6.11015856e-02 -1.40045106e+00 2.03010011e+00 5.97509205e-01 4.55006838e-01 1.33899257e-01 3.77330959e-01] [ 8.26900959e-01 -4.54876989e-01 5.65377176e-01 1.83592665e+00 2.96882296e+00 -1.27966154e+00 1.07962064e-01 1.60351896e+00 3.15893978e-01 1.07072793e-01 2.13961110e-01 8.25035393e-01 -1.90358609e-01 8.45744193e-01 8.26900959e-01] [-1.60988426e+00 9.48803842e-01 -1.06812811e+00 1.27024281e+00 -2.23684263e+00 -7.30140269e-01 -7.36495480e-02 -8.69807780e-01 2.26818156e+00 7.90116906e-01 1.16192532e+00 -4.84704167e-01 5.09866595e-01 -1.84875357e+00 -1.60988426e+00] [ 2.67929482e+00 -3.10174763e-01 -4.83424276e-01 -1.34183943e+00 3.08422542e+00 -6.94694221e-01 1.59412158e+00 1.74885169e-02 -3.78639530e-03 9.81650174e-01 5.78341186e-01 -2.23159850e-01 -1.02068853e+00 2.11066529e-01 2.67929482e+00] [-1.09919381e+00 -1.00323176e+00 -9.93022561e-01 -6.85484171e-01 3.17089349e-01 -2.25266382e-01 -6.70095906e-02 -8.28291923e-02 5.02121866e-01 7.18047678e-01 -1.52974653e+00 7.44979620e-01 -1.37577415e-01 -1.77399898e+00 -1.09919381e+00] [-1.31519806e+00 -7.72459269e-01 -1.81165338e+00 1.02249694e+00 -1.38035536e+00 -3.10454816e-02 -1.87539518e+00 1.80400014e+00 -5.39507508e-01 -1.73881149e+00 -7.14431763e-01 1.56583154e+00 -7.16096699e-01 -1.35660005e+00 -1.31519806e+00] [ 9.35552359e-01 4.94104892e-01 8.97020519e-01 -7.09945917e-01 -8.40221167e-01 1.42407119e+00 -1.59025431e+00 -1.24893141e+00 -6.76839292e-01 3.31273109e-01 -1.35433948e+00 3.99922907e-01 1.86286080e+00 -1.80455279e+00 9.35552359e-01] [ 1.39182717e-01 -2.39503551e+00 -8.80323529e-01 1.28534961e+00 5.26979804e-01 -9.13671911e-01 -5.98444223e-01 6.12331510e-01 1.85748565e+00 5.49353838e-01 -2.01373696e+00 1.59767044e+00 -8.06961238e-01 8.65161359e-01 1.39182717e-01] [ 1.70608256e-02 -3.51744406e-02 1.00750339e+00 -1.07932568e+00 -2.32974812e-01 -4.21299696e-01 -6.81770861e-01 -2.02316475e+00 -2.00610423e+00 -4.26145852e-01 1.45929587e+00 3.57594371e-01 6.02733374e-01 9.97496426e-01 1.70608256e-02] [-1.39170754e+00 1.23918200e+00 2.71916777e-01 1.00808942e+00 -7.02316165e-01 -5.43580353e-01 6.97243929e-01 1.17778994e-01 8.85381401e-01 -4.07945484e-01 2.58337110e-01 9.29814875e-01 -1.28881502e+00 8.03892851e-01 -1.39170754e+00] [ 2.10928440e-01 4.36867923e-01 6.11527145e-01 4.22269464e-01 -9.14700508e-01 -1.08912003e+00 7.93503046e-01 2.50260502e-01 1.80162168e+00 8.16864073e-01 4.53081340e-01 -1.57196558e+00 -1.00033939e+00 -1.07301331e+00 2.10928440e-01] [ 3.44112247e-01 -1.02904236e+00 -1.05523515e+00 -3.62470269e-01 -8.38515088e-02 -6.34002626e-01 -1.76029706e+00 -6.09342635e-01 1.45686579e+00 -3.75291556e-01 -7.54684091e-01 -4.36687946e-01 -1.15395570e+00 -3.21533680e-01 3.44112247e-01] [-1.06647110e+00 1.91555396e-01 6.00823641e-01 3.67125988e-01 -1.51683375e-01 -1.58292544e+00 -2.42030159e-01 -1.41284966e+00 -9.95963752e-01 -6.37133896e-01 9.46766853e-01 -1.18445861e+00 -8.54894280e-01 -1.90658614e-01 -1.06647110e+00] [ 3.60173583e-01 -2.35512387e-03 -9.49850738e-01 -1.50651395e-01 2.55424321e-01 -9.83326435e-02 -1.72966182e+00 -1.24329865e+00 -2.77237266e-01 -6.32154226e-01 -1.06014037e+00 -1.90377071e-01 3.22139740e-01 -1.37373433e-01 3.60173583e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5515.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 2.37157964e-03 -1.64424908e+00 -1.41071165e+00 1.25786260e-01 -1.05186725e+00 4.49540138e-01 -3.41551256e+00 -3.89624983e-01 1.55643427e+00 1.56279609e-01 6.72772944e-01 -9.47986543e-02 1.98707029e-01 -2.89885312e-01] [-1.81281924e+00 2.40699363e+00 1.07945335e+00 -8.56304526e-01 -2.34523326e-01 2.42352471e-01 3.32247943e-01 -6.92154288e-01 -1.81719184e+00 -5.74054897e-01 4.87720639e-01 -3.01927894e-01 1.28412044e+00 2.20050097e-01] [-1.85315812e+00 -1.48285067e+00 -1.43691331e-01 -6.27910972e-01 3.85183394e-01 4.00841802e-01 -3.91098298e-02 5.11378467e-01 -6.18715659e-02 7.48770893e-01 -8.62950265e-01 3.98848295e-01 -3.25171314e-02 -1.60522008e+00] [ 9.52048659e-01 9.67232436e-02 -3.64153147e-01 5.25882363e-01 -4.98472214e-01 4.87178303e-02 1.06988490e+00 2.46530080e+00 6.41945004e-02 -2.20341384e-01 1.76306689e+00 1.94188988e+00 8.89445901e-01 -1.00177467e+00] [ 1.32938659e+00 8.62863123e-01 -2.17508376e-01 7.01363981e-01 -5.07254124e-01 7.27312028e-01 -7.64327347e-02 4.59253788e-01 -2.40613706e-02 9.65659320e-02 1.99466392e-01 6.01653010e-02 -2.93613970e-01 -1.17280507e+00] [-1.29033399e+00 -4.18206275e-01 -8.19523454e-01 -4.15444583e-01 -5.01268566e-01 -6.53920710e-01 -1.31371462e+00 8.83944631e-01 -8.80640864e-01 -1.82227933e+00 1.09132338e+00 2.53515983e+00 -3.76271427e-01 7.94301406e-02] [-1.81619060e+00 -9.21029389e-01 1.51664186e+00 -8.53310227e-01 1.07222438e+00 5.23092806e-01 2.61589080e-01 1.83374688e-01 -5.04504085e-01 7.96479523e-01 -5.81981897e-01 -1.87684917e+00 -2.63796121e-01 7.27881014e-01] [ 2.20725679e+00 -7.84049332e-01 5.37199557e-01 9.50693011e-01 3.15021574e-01 8.55536401e-01 -1.13080718e-01 -1.74498236e+00 -9.53751802e-01 8.23906302e-01 -8.39477554e-02 -1.61424220e+00 5.67731559e-01 -1.26940206e-01] [-2.09436655e+00 -6.47063434e-01 1.39459145e+00 1.16387844e+00 -3.04136008e-01 2.32672468e-02 -1.02894962e+00 -3.01596105e-01 -2.70463582e-02 1.03969765e+00 -6.59469962e-01 3.00305188e-01 -7.34492719e-01 -8.78176808e-01] [-9.08472657e-01 -2.83889979e-01 2.71913558e-01 -4.63975698e-01 -6.54022574e-01 -1.04478908e+00 1.63290834e+00 -1.29060006e+00 1.54820549e+00 -3.76476608e-02 -7.53042817e-01 3.52028728e-01 1.96951473e+00 -6.94825873e-02] [ 9.50417995e-01 -1.22674298e+00 5.32342076e-01 4.62358326e-01 2.10121702e-02 2.24034637e-01 1.06777906e+00 1.35800636e+00 -1.58680511e+00 -9.47951555e-01 5.56898937e-02 1.11115074e+00 -2.91997820e-01 -1.26076198e+00] [-1.24077034e+00 -1.20901547e-01 -3.61588508e-01 -5.35470784e-01 -1.55611467e+00 3.33257526e-01 -5.03126383e-02 3.79982084e-01 -5.40117562e-01 9.23020840e-01 9.73320603e-01 2.44219586e-01 -1.65313199e-01 4.07020271e-01] [ 7.12122977e-01 1.63146067e+00 4.95212764e-01 -1.59939110e+00 1.04959339e-01 6.23462021e-01 4.44849879e-01 -1.13950446e-01 7.03601599e-01 -1.57271159e+00 -3.78027022e-01 -3.01736206e-01 9.42422986e-01 1.16937959e+00] [-7.85277724e-01 8.46106946e-01 -1.08349693e+00 1.65612829e+00 8.12256217e-01 -3.53511572e-01 -5.48123121e-01 -4.29916978e-01 5.76481879e-01 -7.85534322e-01 4.93710518e-01 5.88361859e-01 7.58408964e-01 7.98884705e-02]] [[-2.22638384e-01 2.76407689e-01 -8.87626410e-01 -5.16555786e-01 -4.52379346e-01 -1.96143723e+00 -3.08904082e-01 -6.48234725e-01 3.32795709e-01 7.43842050e-02 -5.54643348e-02 -1.24475360e+00 -1.17054904e+00 1.48434389e+00] [-1.84747851e+00 -3.48415017e-01 -2.25824282e-01 1.45265222e-01 1.18027508e+00 1.90198135e+00 1.38189006e+00 -1.08670313e-02 -2.61324906e+00 -7.16185749e-01 6.04838580e-02 -1.03044641e+00 1.71526885e+00 -2.30824947e-01] [ 9.69349965e-02 1.26042056e+00 4.49427992e-01 2.16939658e-01 2.06340134e-01 -6.82751656e-01 7.65476167e-01 5.33626556e-01 7.51842618e-01 -1.07725358e+00 -5.96097887e-01 -1.38717282e+00 9.40990388e-01 5.81349492e-01] [-2.31468916e+00 -1.66635271e-02 4.59224612e-01 -1.72256902e-01 5.48780739e-01 1.92956761e-01 -3.66653129e-02 -2.63910592e-01 -3.14243942e-01 1.27358067e+00 1.43492505e-01 1.59970379e+00 -4.58889157e-01 -1.28667319e+00] [-2.40407139e-01 1.09438074e+00 -8.32264006e-01 3.13845038e-01 -7.36943781e-01 -8.20791841e-01 -1.16802537e+00 9.43289250e-02 -3.53726238e-01 -9.78594363e-01 -1.00245774e+00 5.26302040e-01 2.49728784e-01 7.62646377e-01] [ 4.24282342e-01 2.16927576e+00 -1.35215908e-01 -8.69945347e-01 1.74066758e+00 1.68632284e-01 -2.70510614e-02 -2.90434897e-01 4.44074154e-01 -1.75622895e-01 1.18814385e+00 5.48755586e-01 2.94697434e-01 -8.45539093e-01] [-2.54665703e-01 9.51208472e-01 6.48218691e-01 5.31849265e-02 -6.42900169e-01 3.53217512e-01 -1.32233226e+00 -4.05352980e-01 7.43683219e-01 1.86889720e+00 6.23661280e-01 -4.29055095e-01 3.20586234e-01 5.24499476e-01] [ 1.09570515e+00 -5.14017284e-01 8.76936167e-02 -3.16010624e-01 1.69813776e+00 1.45825934e+00 -4.14205670e-01 2.70861745e-01 -8.78154099e-01 7.86681294e-01 1.48255336e+00 1.57488513e+00 -1.37653840e+00 1.50716662e-01] [-7.73553312e-01 -1.52849090e+00 -1.70331895e+00 1.74881995e+00 -2.72738159e-01 -7.93392479e-01 -4.70606893e-01 1.03340602e+00 6.51148200e-01 4.14899960e-02 -6.69491664e-02 -4.92844880e-01 -1.84033394e+00 -6.42998740e-02] [-6.84412420e-01 1.48487434e-01 6.02196418e-02 1.21738993e-01 -2.27467477e-01 -1.57461375e-01 -1.92742968e+00 -1.43586791e+00 3.29016000e-01 -8.34019542e-01 1.75839543e-01 7.76158273e-01 1.77018508e-01 -1.13507819e+00] [ 1.08058369e+00 2.24634960e-01 8.24699521e-01 1.72983155e-01 1.35479939e+00 -6.73637211e-01 -4.57778782e-01 7.12544322e-01 6.41990602e-02 -2.55707169e+00 -4.96654630e-01 -6.48118794e-01 1.85144889e+00 5.41450977e-01] [ 6.26132905e-01 5.55117965e-01 1.02083719e+00 1.06672347e+00 8.49809647e-02 -5.27237616e-02 -5.01256883e-01 -9.21835363e-01 -1.62036344e-01 9.32082653e-01 5.63120902e-01 -2.55501628e-01 -1.47013831e+00 2.10222459e+00] [-9.55565050e-02 6.71144605e-01 1.16236269e+00 -3.68023425e-01 -3.22685778e-01 7.86635101e-01 -7.20774770e-01 -2.66975313e-01 -1.33855867e+00 1.12747645e+00 9.04981196e-01 -2.84727484e-01 -5.87886989e-01 1.75683415e+00] [-1.52756190e+00 6.22283101e-01 -5.08719325e-01 4.20720540e-02 1.46378672e+00 -3.35809380e-01 -7.44464278e-01 -6.89134121e-01 -3.31666283e-02 3.21663648e-01 1.52116156e+00 5.71516097e-01 -4.01971787e-01 1.39049745e+00]] [[ 2.64094323e-02 -1.26748037e+00 -7.17011511e-01 -1.07325353e-01 -2.80352179e-02 1.91897064e-01 -3.44933361e-01 -7.74184108e-01 -3.84414345e-01 -1.47863364e+00 5.77387452e-01 1.40699172e+00 -1.09307194e+00 1.50271308e+00] [ 7.81623185e-01 -2.52860457e-01 5.69590449e-01 9.40338433e-01 -9.64632213e-01 4.56188828e-01 2.52876937e-01 5.35167515e-01 -1.28675473e+00 -3.35314780e-01 1.03203106e+00 -2.31314301e-01 -3.39900434e-01 -1.84026766e+00] [ 1.12131679e+00 3.15199420e-02 1.93458572e-01 -2.43290797e-01 1.12316310e+00 1.81988072e+00 6.29111588e-01 9.03104305e-01 6.98892698e-02 -3.75077188e-01 8.57243121e-01 -1.06672406e+00 -9.46751058e-01 6.01041913e-02] [-1.83877930e-01 -2.20730233e+00 -1.16252410e+00 1.95429832e-01 -1.83601773e+00 -8.03895652e-01 1.12192142e+00 -2.21306038e+00 -4.35911387e-01 -4.75775361e-01 -7.83298790e-01 1.66797149e+00 7.53927380e-02 -9.55456972e-01] [-9.84371454e-02 3.11739385e-01 -7.56915808e-01 -1.95011938e+00 1.33185589e+00 3.18536592e+00 -5.72022140e-01 8.24606597e-01 7.63622403e-01 -4.36802179e-01 -1.41539544e-01 2.01019835e+00 -5.31813145e-01 9.69211400e-01] [-3.19583863e-01 2.60608464e-01 -2.59969592e-01 -8.22921932e-01 1.19261526e-01 1.74398494e+00 -1.00896001e+00 -4.88133550e-01 1.82143107e-01 -1.05142839e-01 4.53451604e-01 2.91919082e-01 -1.04145527e+00 1.43094587e+00] [-6.16656803e-02 5.50320387e-01 2.55609369e+00 -1.65737176e+00 -2.59132296e-01 9.36683834e-01 -5.12472272e-01 -7.47756362e-01 -2.24742222e+00 8.88105452e-01 2.97033519e-01 -5.22808373e-01 7.84100354e-01 -3.83535802e-01] [ 2.56386685e+00 2.02033803e-01 -4.02711004e-01 -1.36404447e-02 -1.42225170e+00 -9.28147972e-01 -6.25654235e-02 -1.53445542e-01 2.20118165e-01 8.89425695e-01 5.22191167e-01 -1.14985633e+00 -3.23009156e-02 -5.93707263e-01] [ 8.51011932e-01 -6.66928768e-01 6.95878625e-01 6.27077937e-01 1.33879447e+00 -1.83948410e+00 -3.43251646e-01 1.02304173e+00 -1.27243865e+00 1.09295940e+00 -1.14612699e+00 1.42431903e+00 -2.01602840e+00 -7.81672835e-01] [-3.32225710e-01 -1.23146963e+00 -7.73949444e-01 1.37708533e+00 2.33647138e-01 -1.07393034e-01 -1.87011993e+00 1.69050884e+00 7.83228874e-01 5.69395781e-01 -7.05265939e-01 1.58673251e+00 -8.86726141e-01 4.01444793e-01] [-1.68138653e-01 6.99473843e-02 5.29995263e-01 -9.66210186e-01 -6.33744061e-01 5.67632020e-01 -1.60295677e+00 -7.67949164e-01 1.21063896e-01 1.61247814e+00 -9.88808125e-02 5.16960442e-01 1.59022665e+00 7.62841642e-01] [-3.59955370e-01 7.47095048e-01 -3.99601549e-01 -1.95682317e-01 -7.04123139e-01 -5.37345745e-02 -8.11483487e-02 1.14861257e-01 -3.74451637e-01 1.47877288e+00 5.72936714e-01 2.17239738e+00 3.29595536e-01 9.48578775e-01] [-2.38601661e+00 -6.37392700e-01 2.34470367e-01 -3.98364007e-01 6.19688451e-01 -5.78225613e-01 9.26510334e-01 9.39872384e-01 -8.44654381e-01 6.60870075e-01 -5.18262565e-01 -7.76809514e-01 9.04569387e-01 -4.06799674e-01] [-1.30924344e+00 -1.10970032e+00 7.34382749e-01 6.00074708e-01 -8.02966654e-01 -1.65878642e+00 9.95336115e-01 3.59265029e-01 -6.79243982e-01 7.50566721e-01 2.04521585e+00 4.14748430e-01 1.03983140e+00 3.70122731e-01]]]]; ov_res: [[[[ 2.37157964e-03 -1.64424908e+00 -1.41071165e+00 1.25786260e-01 -1.05186725e+00 4.49540138e-01 -3.41551256e+00 -3.89624983e-01 1.55643427e+00 1.56279609e-01 6.72772944e-01 -9.47986543e-02 1.98707029e-01 -2.89885312e-01] [-1.81281924e+00 2.40699363e+00 1.07945335e+00 -8.56304526e-01 -2.34523326e-01 2.42352471e-01 3.32247943e-01 -6.92154288e-01 -1.81719184e+00 -5.74054897e-01 4.87720639e-01 -3.01927894e-01 1.28412044e+00 2.20050097e-01] [-1.85315812e+00 -1.48285067e+00 -1.43691331e-01 -6.27910972e-01 3.85183394e-01 4.00841802e-01 -3.91098298e-02 5.11378467e-01 -6.18715659e-02 7.48770893e-01 -8.62950265e-01 3.98848295e-01 -3.25171314e-02 -1.60522008e+00] [ 9.52048659e-01 9.67232436e-02 -3.64153147e-01 5.25882363e-01 -4.98472214e-01 4.87178303e-02 1.06988490e+00 2.46530080e+00 6.41945004e-02 -2.20341384e-01 1.76306689e+00 1.94188988e+00 8.89445901e-01 -1.00177467e+00] [ 1.32938659e+00 8.62863123e-01 -2.17508376e-01 7.01363981e-01 -5.07254124e-01 7.27312028e-01 -7.64327347e-02 4.59253788e-01 -2.40613706e-02 9.65659320e-02 1.99466392e-01 6.01653010e-02 -2.93613970e-01 -1.17280507e+00] [-1.29033399e+00 -4.18206275e-01 -8.19523454e-01 -4.15444583e-01 -5.01268566e-01 -6.53920710e-01 -1.31371462e+00 8.83944631e-01 -8.80640864e-01 -1.82227933e+00 1.09132338e+00 2.53515983e+00 -3.76271427e-01 7.94301406e-02] [-1.81619060e+00 -9.21029389e-01 1.51664186e+00 -8.53310227e-01 1.07222438e+00 5.23092806e-01 2.61589080e-01 1.83374688e-01 -5.04504085e-01 7.96479523e-01 -5.81981897e-01 -1.87684917e+00 -2.63796121e-01 7.27881014e-01] [ 2.20725679e+00 -7.84049332e-01 5.37199557e-01 9.50693011e-01 3.15021574e-01 8.55536401e-01 -1.13080718e-01 -1.74498236e+00 -9.53751802e-01 8.23906302e-01 -8.39477554e-02 -1.61424220e+00 5.67731559e-01 -1.26940206e-01] [-2.09436655e+00 -6.47063434e-01 1.39459145e+00 1.16387844e+00 -3.04136008e-01 2.32672468e-02 -1.02894962e+00 -3.01596105e-01 -2.70463582e-02 1.03969765e+00 -6.59469962e-01 3.00305188e-01 -7.34492719e-01 -8.78176808e-01] [-9.08472657e-01 -2.83889979e-01 2.71913558e-01 -4.63975698e-01 -6.54022574e-01 -1.04478908e+00 1.63290834e+00 -1.29060006e+00 1.54820549e+00 -3.76476608e-02 -7.53042817e-01 3.52028728e-01 1.96951473e+00 -6.94825873e-02] [ 9.50417995e-01 -1.22674298e+00 5.32342076e-01 4.62358326e-01 2.10121702e-02 2.24034637e-01 1.06777906e+00 1.35800636e+00 -1.58680511e+00 -9.47951555e-01 5.56898937e-02 1.11115074e+00 -2.91997820e-01 -1.26076198e+00] [-1.24077034e+00 -1.20901547e-01 -3.61588508e-01 -5.35470784e-01 -1.55611467e+00 3.33257526e-01 -5.03126383e-02 3.79982084e-01 -5.40117562e-01 9.23020840e-01 9.73320603e-01 2.44219586e-01 -1.65313199e-01 4.07020271e-01] [ 7.12122977e-01 1.63146067e+00 4.95212764e-01 -1.59939110e+00 1.04959339e-01 6.23462021e-01 4.44849879e-01 -1.13950446e-01 7.03601599e-01 -1.57271159e+00 -3.78027022e-01 -3.01736206e-01 9.42422986e-01 1.16937959e+00] [-7.85277724e-01 8.46106946e-01 -1.08349693e+00 1.65612829e+00 8.12256217e-01 -3.53511572e-01 -5.48123121e-01 -4.29916978e-01 5.76481879e-01 -7.85534322e-01 4.93710518e-01 5.88361859e-01 7.58408964e-01 7.98884705e-02]] [[-2.22638384e-01 2.76407689e-01 -8.87626410e-01 -5.16555786e-01 -4.52379346e-01 -1.96143723e+00 -3.08904082e-01 -6.48234725e-01 3.32795709e-01 7.43842050e-02 -5.54643348e-02 -1.24475360e+00 -1.17054904e+00 1.48434389e+00] [-1.84747851e+00 -3.48415017e-01 -2.25824282e-01 1.45265222e-01 1.18027508e+00 1.90198135e+00 1.38189006e+00 -1.08670313e-02 -2.61324906e+00 -7.16185749e-01 6.04838580e-02 -1.03044641e+00 1.71526885e+00 -2.30824947e-01] [ 9.69349965e-02 1.26042056e+00 4.49427992e-01 2.16939658e-01 2.06340134e-01 -6.82751656e-01 7.65476167e-01 5.33626556e-01 7.51842618e-01 -1.07725358e+00 -5.96097887e-01 -1.38717282e+00 9.40990388e-01 5.81349492e-01] [-2.31468916e+00 -1.66635271e-02 4.59224612e-01 -1.72256902e-01 5.48780739e-01 1.92956761e-01 -3.66653129e-02 -2.63910592e-01 -3.14243942e-01 1.27358067e+00 1.43492505e-01 1.59970379e+00 -4.58889157e-01 -1.28667319e+00] [-2.40407139e-01 1.09438074e+00 -8.32264006e-01 3.13845038e-01 -7.36943781e-01 -8.20791841e-01 -1.16802537e+00 9.43289250e-02 -3.53726238e-01 -9.78594363e-01 -1.00245774e+00 5.26302040e-01 2.49728784e-01 7.62646377e-01] [ 4.24282342e-01 2.16927576e+00 -1.35215908e-01 -8.69945347e-01 1.74066758e+00 1.68632284e-01 -2.70510614e-02 -2.90434897e-01 4.44074154e-01 -1.75622895e-01 1.18814385e+00 5.48755586e-01 2.94697434e-01 -8.45539093e-01] [-2.54665703e-01 9.51208472e-01 6.48218691e-01 5.31849265e-02 -6.42900169e-01 3.53217512e-01 -1.32233226e+00 -4.05352980e-01 7.43683219e-01 1.86889720e+00 6.23661280e-01 -4.29055095e-01 3.20586234e-01 5.24499476e-01] [ 1.09570515e+00 -5.14017284e-01 8.76936167e-02 -3.16010624e-01 1.69813776e+00 1.45825934e+00 -4.14205670e-01 2.70861745e-01 -8.78154099e-01 7.86681294e-01 1.48255336e+00 1.57488513e+00 -1.37653840e+00 1.50716662e-01] [-7.73553312e-01 -1.52849090e+00 -1.70331895e+00 1.74881995e+00 -2.72738159e-01 -7.93392479e-01 -4.70606893e-01 1.03340602e+00 6.51148200e-01 4.14899960e-02 -6.69491664e-02 -4.92844880e-01 -1.84033394e+00 -6.42998740e-02] [-6.84412420e-01 1.48487434e-01 6.02196418e-02 1.21738993e-01 -2.27467477e-01 -1.57461375e-01 -1.92742968e+00 -1.43586791e+00 3.29016000e-01 -8.34019542e-01 1.75839543e-01 7.76158273e-01 1.77018508e-01 -1.13507819e+00] [ 1.08058369e+00 2.24634960e-01 8.24699521e-01 1.72983155e-01 1.35479939e+00 -6.73637211e-01 -4.57778782e-01 7.12544322e-01 6.41990602e-02 -2.55707169e+00 -4.96654630e-01 -6.48118794e-01 1.85144889e+00 5.41450977e-01] [ 6.26132905e-01 5.55117965e-01 1.02083719e+00 1.06672347e+00 8.49809647e-02 -5.27237616e-02 -5.01256883e-01 -9.21835363e-01 -1.62036344e-01 9.32082653e-01 5.63120902e-01 -2.55501628e-01 -1.47013831e+00 2.10222459e+00] [-9.55565050e-02 6.71144605e-01 1.16236269e+00 -3.68023425e-01 -3.22685778e-01 7.86635101e-01 -7.20774770e-01 -2.66975313e-01 -1.33855867e+00 1.12747645e+00 9.04981196e-01 -2.84727484e-01 -5.87886989e-01 1.75683415e+00] [-1.52756190e+00 6.22283101e-01 -5.08719325e-01 4.20720540e-02 1.46378672e+00 -3.35809380e-01 -7.44464278e-01 -6.89134121e-01 -3.31666283e-02 3.21663648e-01 1.52116156e+00 5.71516097e-01 -4.01971787e-01 1.39049745e+00]] [[ 2.64094323e-02 -1.26748037e+00 -7.17011511e-01 -1.07325353e-01 -2.80352179e-02 1.91897064e-01 -3.44933361e-01 -7.74184108e-01 -3.84414345e-01 -1.47863364e+00 5.77387452e-01 1.40699172e+00 -1.09307194e+00 1.50271308e+00] [ 7.81623185e-01 -2.52860457e-01 5.69590449e-01 9.40338433e-01 -9.64632213e-01 4.56188828e-01 2.52876937e-01 5.35167515e-01 -1.28675473e+00 -3.35314780e-01 1.03203106e+00 -2.31314301e-01 -3.39900434e-01 -1.84026766e+00] [ 1.12131679e+00 3.15199420e-02 1.93458572e-01 -2.43290797e-01 1.12316310e+00 1.81988072e+00 6.29111588e-01 9.03104305e-01 6.98892698e-02 -3.75077188e-01 8.57243121e-01 -1.06672406e+00 -9.46751058e-01 6.01041913e-02] [-1.83877930e-01 -2.20730233e+00 -1.16252410e+00 1.95429832e-01 -1.83601773e+00 -8.03895652e-01 1.12192142e+00 -2.21306038e+00 -4.35911387e-01 -4.75775361e-01 -7.83298790e-01 1.66797149e+00 7.53927380e-02 -9.55456972e-01] [-9.84371454e-02 3.11739385e-01 -7.56915808e-01 -1.95011938e+00 1.33185589e+00 3.18536592e+00 -5.72022140e-01 8.24606597e-01 7.63622403e-01 -4.36802179e-01 -1.41539544e-01 2.01019835e+00 -5.31813145e-01 9.69211400e-01] [-3.19583863e-01 2.60608464e-01 -2.59969592e-01 -8.22921932e-01 1.19261526e-01 1.74398494e+00 -1.00896001e+00 -4.88133550e-01 1.82143107e-01 -1.05142839e-01 4.53451604e-01 2.91919082e-01 -1.04145527e+00 1.43094587e+00] [-6.16656803e-02 5.50320387e-01 2.55609369e+00 -1.65737176e+00 -2.59132296e-01 9.36683834e-01 -5.12472272e-01 -7.47756362e-01 -2.24742222e+00 8.88105452e-01 2.97033519e-01 -5.22808373e-01 7.84100354e-01 -3.83535802e-01] [ 2.56386685e+00 2.02033803e-01 -4.02711004e-01 -1.36404447e-02 -1.42225170e+00 -9.28147972e-01 -6.25654235e-02 -1.53445542e-01 2.20118165e-01 8.89425695e-01 5.22191167e-01 -1.14985633e+00 -3.23009156e-02 -5.93707263e-01] [ 8.51011932e-01 -6.66928768e-01 6.95878625e-01 6.27077937e-01 1.33879447e+00 -1.83948410e+00 -3.43251646e-01 1.02304173e+00 -1.27243865e+00 1.09295940e+00 -1.14612699e+00 1.42431903e+00 -2.01602840e+00 -7.81672835e-01] [-3.32225710e-01 -1.23146963e+00 -7.73949444e-01 1.37708533e+00 2.33647138e-01 -1.07393034e-01 -1.87011993e+00 1.69050884e+00 7.83228874e-01 5.69395781e-01 -7.05265939e-01 1.58673251e+00 -8.86726141e-01 4.01444793e-01] [-1.68138653e-01 6.99473843e-02 5.29995263e-01 -9.66210186e-01 -6.33744061e-01 5.67632020e-01 -1.60295677e+00 -7.67949164e-01 1.21063896e-01 1.61247814e+00 -9.88808125e-02 5.16960442e-01 1.59022665e+00 7.62841642e-01] [-3.59955370e-01 7.47095048e-01 -3.99601549e-01 -1.95682317e-01 -7.04123139e-01 -5.37345745e-02 -8.11483487e-02 1.14861257e-01 -3.74451637e-01 1.47877288e+00 5.72936714e-01 2.17239738e+00 3.29595536e-01 9.48578775e-01] [-2.38601661e+00 -6.37392700e-01 2.34470367e-01 -3.98364007e-01 6.19688451e-01 -5.78225613e-01 9.26510334e-01 9.39872384e-01 -8.44654381e-01 6.60870075e-01 -5.18262565e-01 -7.76809514e-01 9.04569387e-01 -4.06799674e-01] [-1.30924344e+00 -1.10970032e+00 7.34382749e-01 6.00074708e-01 -8.02966654e-01 -1.65878642e+00 9.95336115e-01 3.59265029e-01 -6.79243982e-01 7.50566721e-01 2.04521585e+00 4.14748430e-01 1.03983140e+00 3.70122731e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(0, 0, -1, -2) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5517.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, -1, -2]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 4.69505012e-01 8.59974474e-02 -3.96644831e-01 -6.64303184e-01 6.24609649e-01 1.52422652e-01 -2.66525447e-02 1.08674335e+00 5.65453649e-01 1.47440448e-01 9.19126391e-01 4.25991118e-01 1.14049840e+00 4.51506317e-01] [ 6.94322526e-01 -3.38831335e-01 -1.82311490e-01 -5.69043934e-01 -1.46304563e-01 -6.03110433e-01 -4.76869017e-01 -6.87873065e-01 -1.83785546e+00 1.96722341e+00 4.01104271e-01 -1.01135504e+00 1.66983202e-01 -1.30314124e+00] [-3.85186858e-02 2.53967345e-01 7.51824975e-01 -5.78948021e-01 1.45086861e+00 8.30518305e-01 3.50448221e-01 1.40841782e-01 -7.40659535e-01 7.51228988e-01 8.02076101e-01 -9.92348194e-01 -2.05701637e+00 8.13717425e-01] [-2.91386008e-01 1.94494426e-01 1.07313609e+00 -9.85603273e-01 -6.84308231e-01 -1.84664145e-01 -2.06308866e+00 1.80411503e-01 2.18396807e+00 9.84248698e-01 -2.34169340e+00 -6.13039494e-01 1.40466362e-01 -2.23683923e-01] [-2.35004163e+00 -1.54063904e+00 -1.45429955e-03 -7.49024451e-01 -2.32246447e+00 1.14070296e+00 8.94510329e-01 -8.48067522e-01 1.02555954e+00 -3.35217953e-01 1.08819008e+00 2.45382413e-01 -1.24088310e-01 -5.31834662e-01] [ 1.63080907e+00 -1.76989958e-01 8.51047099e-01 9.83609498e-01 -1.27697957e+00 4.08975422e-01 3.20046008e-01 2.76366770e-01 -1.95244694e+00 2.20228910e-01 -5.05389392e-01 5.49397096e-02 -1.64516494e-01 -1.80899465e+00] [ 6.46208465e-01 8.42527568e-01 -1.84572265e-02 -2.98573971e-01 7.22265020e-02 -6.84920788e-01 6.92286730e-01 -6.67637587e-02 -4.35746722e-02 -5.94618499e-01 -8.03616703e-01 4.50859725e-01 2.28951144e+00 6.86748326e-01] [-1.25083315e+00 1.22150815e+00 -8.92011702e-01 -1.98931217e+00 -1.57400107e+00 -2.02023715e-01 -1.13431466e+00 -4.38367993e-01 -5.61948180e-01 -4.37335968e-01 -4.70317066e-01 6.50009692e-01 5.74628711e-01 -8.31678927e-01] [ 1.12770784e+00 1.19140577e+00 4.31439318e-02 -1.12466559e-01 -1.51687294e-01 1.15105035e-02 -9.73006546e-01 -1.14878190e+00 1.77928448e+00 -9.58918214e-01 1.03110564e+00 -2.10772896e+00 -1.99725837e-01 -1.37079060e+00] [ 1.05793405e+00 7.31803477e-01 -9.43310022e-01 -1.94434047e+00 6.06206059e-01 -4.11691576e-01 1.90778181e-01 5.35744071e-01 2.45924383e-01 -5.11577027e-03 9.57660735e-01 1.54072547e+00 2.42873639e-01 -1.10849202e-01] [-1.22707522e+00 1.70360351e+00 9.34603512e-01 8.99126649e-01 -9.17283058e-01 1.57123303e+00 8.93618464e-01 -2.18176290e-01 7.41793931e-01 -1.05367911e+00 3.06849480e-01 -5.01234949e-01 -1.13028567e-02 5.71550541e-02]] [[-1.25047064e+00 -2.15236831e+00 -2.77451873e-01 -2.62507766e-01 4.35362250e-01 -2.50564098e+00 -5.91055274e-01 -1.96321577e-01 -1.42171073e+00 6.53188646e-01 1.75402296e+00 -1.68848503e+00 1.46679521e+00 4.27976608e-01] [ 5.02156854e-01 -1.63061464e+00 -2.41993219e-01 1.39450514e+00 4.74127382e-01 8.10002759e-02 -6.28560483e-01 -4.21088219e-01 6.51980102e-01 1.71584296e+00 8.27946961e-01 -3.16600055e-01 4.90223289e-01 3.37932885e-01] [-1.85762358e+00 -2.06613302e+00 -1.25430501e+00 3.16877246e-01 3.20663512e-01 -8.79520416e-01 3.55930716e-01 1.42878473e+00 8.57075036e-01 -1.03804275e-01 1.30493975e+00 3.63840520e-01 1.26486564e+00 2.96247005e-01] [ 1.41781080e+00 -4.05068398e-02 -1.74764663e-01 -9.43775296e-01 -1.20655596e+00 -2.33781123e+00 -1.50433862e+00 7.36829221e-01 3.23582917e-01 -4.83584870e-03 1.93553936e+00 -1.47684860e+00 -2.42863274e+00 -1.44906330e+00] [-1.73524320e+00 6.55371547e-01 1.21014655e+00 -1.13970959e+00 -2.95906793e-02 -3.00231516e-01 -2.90952921e-01 -2.47065857e-01 -3.60380560e-01 7.35502064e-01 4.04229671e-01 -9.44991410e-01 -1.11600578e+00 9.88561451e-01] [ 7.51861215e-01 1.24282932e+00 -8.12013865e-01 1.42715442e+00 1.42069310e-01 -2.94583291e-01 -1.74128151e+00 -3.10356230e-01 -1.12147224e+00 3.28981400e-01 6.75112724e-01 -3.58896106e-01 6.94444418e-01 -1.54273319e+00] [ 5.11475317e-02 2.45801955e-01 7.01294839e-01 -5.89377359e-02 -1.39538288e+00 1.21477568e+00 -5.67519069e-01 6.53785050e-01 2.74390429e-02 -1.39887393e+00 -3.43011051e-01 -4.00705427e-01 -3.39135379e-01 9.01652992e-01] [-3.79503846e-01 5.09282589e-01 -1.24019945e+00 1.55963421e+00 4.01276976e-01 -4.31945235e-01 4.74238366e-01 -1.20959923e-01 8.05296123e-01 1.19783431e-01 -9.98820543e-01 6.94525123e-01 7.53273487e-01 9.69241679e-01] [ 3.76632959e-01 -1.87252557e+00 5.73700905e-01 1.33399045e+00 -5.17821983e-02 -6.12896144e-01 -2.68087178e-01 -1.62649465e+00 -1.44680834e+00 8.16994905e-01 -8.54995787e-01 -1.09226632e+00 -1.82971224e-01 3.82593244e-01] [ 5.24232201e-02 7.77992725e-01 -8.01642597e-01 1.29061997e+00 -5.23134351e-01 5.05466044e-01 -3.18603754e-01 -1.21178567e+00 7.30587840e-01 -1.32551014e+00 -8.60599399e-01 1.67145312e+00 1.25403666e+00 -1.05528378e+00] [-6.28896654e-01 8.14867198e-01 8.55593622e-01 1.70704877e+00 3.04610521e-01 -3.25239748e-01 -3.51042819e+00 -5.73407292e-01 1.40476108e+00 -2.06154323e+00 2.15522766e-01 2.97384840e-02 -4.56787437e-01 1.34526801e+00]] [[ 8.66611674e-03 -1.22466362e+00 5.58867216e-01 5.55430412e-01 -1.11762929e+00 -1.51726842e+00 -4.21046108e-01 1.42323303e+00 -1.01576984e+00 -1.30026117e-01 -5.85382879e-01 1.12044132e+00 -6.76356852e-01 5.56646645e-01] [-3.13861489e-01 8.58072102e-01 1.48040438e+00 1.00085604e+00 -7.69278884e-01 -4.37143952e-01 -4.20600027e-01 -1.94404638e+00 -4.82879244e-02 -5.65611005e-01 -2.37997961e+00 7.08807170e-01 -4.09187615e-01 5.56666195e-01] [ 2.40138620e-02 -2.79880494e-01 -3.28595847e-01 5.99106729e-01 -1.26799333e+00 -1.49747849e-01 1.32922128e-01 -1.63733768e+00 1.23630333e+00 -6.12869978e-01 -3.79229188e-01 7.54318163e-02 4.73711133e-01 -4.73835898e-05] [-2.03685522e+00 6.27258793e-02 -9.39755142e-01 -1.72962859e-01 -5.18165171e-01 -1.01263262e-01 -3.49753916e-01 4.70944524e-01 -1.14098239e+00 1.09710658e+00 -1.08675277e+00 -6.58214092e-01 1.87262893e-01 -1.07654250e+00] [ 5.27525604e-01 -1.00839043e+00 2.28416994e-02 -9.24721360e-01 -9.66193914e-01 1.07655966e+00 1.73763132e+00 1.62657285e+00 -7.74721086e-01 2.05585212e-01 -2.53855348e-01 1.53234541e-01 3.62032741e-01 9.07369792e-01] [ 6.12830758e-01 7.90424585e-01 -7.90515542e-02 -8.17674473e-02 1.79826963e+00 1.29910693e-01 1.17578638e+00 -2.41117048e+00 -2.02799201e+00 1.11101024e-01 -7.65214264e-01 5.59256561e-02 -2.70415545e-01 -1.19512685e-01] [-1.26760888e+00 -9.99293700e-02 2.53274113e-01 2.30767936e-01 2.03038430e+00 1.21396220e+00 -8.61441255e-01 -7.99722016e-01 6.99484825e-01 9.79290843e-01 1.23661494e+00 -1.43664944e+00 -1.16377902e+00 -8.61240685e-01] [ 4.84618992e-01 1.40170383e+00 -2.32612595e-01 1.86584961e+00 6.95930719e-02 -1.06116366e+00 1.12879395e+00 -5.49083352e-01 3.21208924e-01 1.72385371e+00 -1.52019426e-01 9.93964255e-01 -1.49812743e-01 9.00010228e-01] [-5.48996210e-01 -7.50718951e-01 1.39644548e-01 -1.13811362e+00 -2.78324127e-01 -1.61255765e+00 -1.89552569e+00 -4.96096730e-01 -5.88337421e-01 1.02152681e+00 7.61770725e-01 8.30240846e-01 1.44384396e+00 -4.10005808e-01] [-3.53672922e-01 -4.01443481e-01 1.43347418e+00 5.87392449e-01 6.65542006e-01 8.15380871e-01 2.63590837e+00 4.83664781e-01 -8.47147331e-02 1.15040624e+00 1.42888486e-01 1.32137567e-01 -7.86268413e-01 7.51313686e-01] [-2.58389139e+00 -1.01101410e+00 -3.45599025e-01 -7.31560230e-01 -1.39619195e+00 -2.06923485e+00 -1.25089288e-01 1.91537654e+00 1.61736941e+00 -1.24762261e+00 1.60448313e+00 -3.65259707e-01 4.76255953e-01 6.55545235e-01]]]]; ov_res: [[[[ 4.69505012e-01 8.59974474e-02 -3.96644831e-01 -6.64303184e-01 6.24609649e-01 1.52422652e-01 -2.66525447e-02 1.08674335e+00 5.65453649e-01 1.47440448e-01 9.19126391e-01 4.25991118e-01 1.14049840e+00 4.51506317e-01] [ 6.94322526e-01 -3.38831335e-01 -1.82311490e-01 -5.69043934e-01 -1.46304563e-01 -6.03110433e-01 -4.76869017e-01 -6.87873065e-01 -1.83785546e+00 1.96722341e+00 4.01104271e-01 -1.01135504e+00 1.66983202e-01 -1.30314124e+00] [-3.85186858e-02 2.53967345e-01 7.51824975e-01 -5.78948021e-01 1.45086861e+00 8.30518305e-01 3.50448221e-01 1.40841782e-01 -7.40659535e-01 7.51228988e-01 8.02076101e-01 -9.92348194e-01 -2.05701637e+00 8.13717425e-01] [-2.91386008e-01 1.94494426e-01 1.07313609e+00 -9.85603273e-01 -6.84308231e-01 -1.84664145e-01 -2.06308866e+00 1.80411503e-01 2.18396807e+00 9.84248698e-01 -2.34169340e+00 -6.13039494e-01 1.40466362e-01 -2.23683923e-01] [-2.35004163e+00 -1.54063904e+00 -1.45429955e-03 -7.49024451e-01 -2.32246447e+00 1.14070296e+00 8.94510329e-01 -8.48067522e-01 1.02555954e+00 -3.35217953e-01 1.08819008e+00 2.45382413e-01 -1.24088310e-01 -5.31834662e-01] [ 1.63080907e+00 -1.76989958e-01 8.51047099e-01 9.83609498e-01 -1.27697957e+00 4.08975422e-01 3.20046008e-01 2.76366770e-01 -1.95244694e+00 2.20228910e-01 -5.05389392e-01 5.49397096e-02 -1.64516494e-01 -1.80899465e+00] [ 6.46208465e-01 8.42527568e-01 -1.84572265e-02 -2.98573971e-01 7.22265020e-02 -6.84920788e-01 6.92286730e-01 -6.67637587e-02 -4.35746722e-02 -5.94618499e-01 -8.03616703e-01 4.50859725e-01 2.28951144e+00 6.86748326e-01] [-1.25083315e+00 1.22150815e+00 -8.92011702e-01 -1.98931217e+00 -1.57400107e+00 -2.02023715e-01 -1.13431466e+00 -4.38367993e-01 -5.61948180e-01 -4.37335968e-01 -4.70317066e-01 6.50009692e-01 5.74628711e-01 -8.31678927e-01] [ 1.12770784e+00 1.19140577e+00 4.31439318e-02 -1.12466559e-01 -1.51687294e-01 1.15105035e-02 -9.73006546e-01 -1.14878190e+00 1.77928448e+00 -9.58918214e-01 1.03110564e+00 -2.10772896e+00 -1.99725837e-01 -1.37079060e+00] [ 1.05793405e+00 7.31803477e-01 -9.43310022e-01 -1.94434047e+00 6.06206059e-01 -4.11691576e-01 1.90778181e-01 5.35744071e-01 2.45924383e-01 -5.11577027e-03 9.57660735e-01 1.54072547e+00 2.42873639e-01 -1.10849202e-01] [-1.22707522e+00 1.70360351e+00 9.34603512e-01 8.99126649e-01 -9.17283058e-01 1.57123303e+00 8.93618464e-01 -2.18176290e-01 7.41793931e-01 -1.05367911e+00 3.06849480e-01 -5.01234949e-01 -1.13028567e-02 5.71550541e-02]] [[-1.25047064e+00 -2.15236831e+00 -2.77451873e-01 -2.62507766e-01 4.35362250e-01 -2.50564098e+00 -5.91055274e-01 -1.96321577e-01 -1.42171073e+00 6.53188646e-01 1.75402296e+00 -1.68848503e+00 1.46679521e+00 4.27976608e-01] [ 5.02156854e-01 -1.63061464e+00 -2.41993219e-01 1.39450514e+00 4.74127382e-01 8.10002759e-02 -6.28560483e-01 -4.21088219e-01 6.51980102e-01 1.71584296e+00 8.27946961e-01 -3.16600055e-01 4.90223289e-01 3.37932885e-01] [-1.85762358e+00 -2.06613302e+00 -1.25430501e+00 3.16877246e-01 3.20663512e-01 -8.79520416e-01 3.55930716e-01 1.42878473e+00 8.57075036e-01 -1.03804275e-01 1.30493975e+00 3.63840520e-01 1.26486564e+00 2.96247005e-01] [ 1.41781080e+00 -4.05068398e-02 -1.74764663e-01 -9.43775296e-01 -1.20655596e+00 -2.33781123e+00 -1.50433862e+00 7.36829221e-01 3.23582917e-01 -4.83584870e-03 1.93553936e+00 -1.47684860e+00 -2.42863274e+00 -1.44906330e+00] [-1.73524320e+00 6.55371547e-01 1.21014655e+00 -1.13970959e+00 -2.95906793e-02 -3.00231516e-01 -2.90952921e-01 -2.47065857e-01 -3.60380560e-01 7.35502064e-01 4.04229671e-01 -9.44991410e-01 -1.11600578e+00 9.88561451e-01] [ 7.51861215e-01 1.24282932e+00 -8.12013865e-01 1.42715442e+00 1.42069310e-01 -2.94583291e-01 -1.74128151e+00 -3.10356230e-01 -1.12147224e+00 3.28981400e-01 6.75112724e-01 -3.58896106e-01 6.94444418e-01 -1.54273319e+00] [ 5.11475317e-02 2.45801955e-01 7.01294839e-01 -5.89377359e-02 -1.39538288e+00 1.21477568e+00 -5.67519069e-01 6.53785050e-01 2.74390429e-02 -1.39887393e+00 -3.43011051e-01 -4.00705427e-01 -3.39135379e-01 9.01652992e-01] [-3.79503846e-01 5.09282589e-01 -1.24019945e+00 1.55963421e+00 4.01276976e-01 -4.31945235e-01 4.74238366e-01 -1.20959923e-01 8.05296123e-01 1.19783431e-01 -9.98820543e-01 6.94525123e-01 7.53273487e-01 9.69241679e-01] [ 3.76632959e-01 -1.87252557e+00 5.73700905e-01 1.33399045e+00 -5.17821983e-02 -6.12896144e-01 -2.68087178e-01 -1.62649465e+00 -1.44680834e+00 8.16994905e-01 -8.54995787e-01 -1.09226632e+00 -1.82971224e-01 3.82593244e-01] [ 5.24232201e-02 7.77992725e-01 -8.01642597e-01 1.29061997e+00 -5.23134351e-01 5.05466044e-01 -3.18603754e-01 -1.21178567e+00 7.30587840e-01 -1.32551014e+00 -8.60599399e-01 1.67145312e+00 1.25403666e+00 -1.05528378e+00] [-6.28896654e-01 8.14867198e-01 8.55593622e-01 1.70704877e+00 3.04610521e-01 -3.25239748e-01 -3.51042819e+00 -5.73407292e-01 1.40476108e+00 -2.06154323e+00 2.15522766e-01 2.97384840e-02 -4.56787437e-01 1.34526801e+00]] [[ 8.66611674e-03 -1.22466362e+00 5.58867216e-01 5.55430412e-01 -1.11762929e+00 -1.51726842e+00 -4.21046108e-01 1.42323303e+00 -1.01576984e+00 -1.30026117e-01 -5.85382879e-01 1.12044132e+00 -6.76356852e-01 5.56646645e-01] [-3.13861489e-01 8.58072102e-01 1.48040438e+00 1.00085604e+00 -7.69278884e-01 -4.37143952e-01 -4.20600027e-01 -1.94404638e+00 -4.82879244e-02 -5.65611005e-01 -2.37997961e+00 7.08807170e-01 -4.09187615e-01 5.56666195e-01] [ 2.40138620e-02 -2.79880494e-01 -3.28595847e-01 5.99106729e-01 -1.26799333e+00 -1.49747849e-01 1.32922128e-01 -1.63733768e+00 1.23630333e+00 -6.12869978e-01 -3.79229188e-01 7.54318163e-02 4.73711133e-01 -4.73835898e-05] [-2.03685522e+00 6.27258793e-02 -9.39755142e-01 -1.72962859e-01 -5.18165171e-01 -1.01263262e-01 -3.49753916e-01 4.70944524e-01 -1.14098239e+00 1.09710658e+00 -1.08675277e+00 -6.58214092e-01 1.87262893e-01 -1.07654250e+00] [ 5.27525604e-01 -1.00839043e+00 2.28416994e-02 -9.24721360e-01 -9.66193914e-01 1.07655966e+00 1.73763132e+00 1.62657285e+00 -7.74721086e-01 2.05585212e-01 -2.53855348e-01 1.53234541e-01 3.62032741e-01 9.07369792e-01] [ 6.12830758e-01 7.90424585e-01 -7.90515542e-02 -8.17674473e-02 1.79826963e+00 1.29910693e-01 1.17578638e+00 -2.41117048e+00 -2.02799201e+00 1.11101024e-01 -7.65214264e-01 5.59256561e-02 -2.70415545e-01 -1.19512685e-01] [-1.26760888e+00 -9.99293700e-02 2.53274113e-01 2.30767936e-01 2.03038430e+00 1.21396220e+00 -8.61441255e-01 -7.99722016e-01 6.99484825e-01 9.79290843e-01 1.23661494e+00 -1.43664944e+00 -1.16377902e+00 -8.61240685e-01] [ 4.84618992e-01 1.40170383e+00 -2.32612595e-01 1.86584961e+00 6.95930719e-02 -1.06116366e+00 1.12879395e+00 -5.49083352e-01 3.21208924e-01 1.72385371e+00 -1.52019426e-01 9.93964255e-01 -1.49812743e-01 9.00010228e-01] [-5.48996210e-01 -7.50718951e-01 1.39644548e-01 -1.13811362e+00 -2.78324127e-01 -1.61255765e+00 -1.89552569e+00 -4.96096730e-01 -5.88337421e-01 1.02152681e+00 7.61770725e-01 8.30240846e-01 1.44384396e+00 -4.10005808e-01] [-3.53672922e-01 -4.01443481e-01 1.43347418e+00 5.87392449e-01 6.65542006e-01 8.15380871e-01 2.63590837e+00 4.83664781e-01 -8.47147331e-02 1.15040624e+00 1.42888486e-01 1.32137567e-01 -7.86268413e-01 7.51313686e-01] [-2.58389139e+00 -1.01101410e+00 -3.45599025e-01 -7.31560230e-01 -1.39619195e+00 -2.06923485e+00 -1.25089288e-01 1.91537654e+00 1.61736941e+00 -1.24762261e+00 1.60448313e+00 -3.65259707e-01 4.76255953e-01 6.55545235e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(-1, -2, -1, -2) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5519.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -2, -1, -2]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 4.90994066e-01 1.20706213e+00 -1.08344889e+00 -1.29976705e-01 -1.00210369e+00 -1.07969023e-01 -4.39945996e-01 1.30696070e+00 6.00602441e-02 8.25190067e-01 -9.35746372e-01] [ 1.03229558e+00 -1.01318157e+00 -3.72036338e-01 -2.00130120e-01 5.30364215e-02 7.74288401e-02 -3.25481951e-01 -1.33462608e+00 1.48279762e+00 -8.94884109e-01 7.87493169e-01] [-7.90866077e-01 5.37738144e-01 -8.52699816e-01 1.48546898e+00 -2.39331508e+00 2.75232881e-01 -7.16769218e-01 2.40840003e-01 7.93628335e-01 9.89696801e-01 -3.99686426e-01] [-2.34936309e+00 -1.23713648e+00 8.22545290e-01 -2.95772135e-01 -2.84524146e-03 1.24139285e+00 8.78930449e-01 3.65681171e-01 -1.58055902e+00 -3.11283422e+00 -4.47814465e-01] [ 6.37932539e-01 2.12694690e-01 1.57479718e-01 8.65571022e-01 5.61472595e-01 -1.06246364e+00 -1.19704700e+00 -6.94713175e-01 2.69381666e+00 -5.67179084e-01 1.61703873e+00] [-4.32470709e-01 9.41303149e-02 -6.91912830e-01 5.47544777e-01 -7.00103462e-01 -4.51086283e-01 6.66721761e-01 -1.38513911e+00 1.53810889e-01 1.97817755e+00 3.30140263e-01] [ 6.43646196e-02 2.20994115e+00 -2.15081549e+00 4.03293252e-01 -6.16197772e-02 -9.85078871e-01 -1.93221748e-01 7.37547278e-01 1.55268061e+00 1.00933754e+00 -4.31766987e-01] [ 1.29516236e-02 -1.15182555e+00 -7.50036538e-02 -2.23889375e+00 -1.54036686e-01 3.75058442e-01 -4.90698248e-01 1.10686243e+00 -2.92987712e-02 4.06309217e-01 -1.27686608e+00] [-2.32479715e+00 -3.51009220e-01 -1.37234831e+00 -1.73345363e+00 -1.27325997e-01 1.01949859e+00 1.11567652e+00 -1.90199959e+00 2.44282261e-01 1.30211914e+00 -7.68326700e-01] [ 3.41225237e-01 2.03681692e-01 2.02515149e+00 -5.08946478e-01 -1.85882294e+00 8.83507788e-01 5.27913749e-01 -1.19269454e+00 -2.29523087e+00 1.28817630e+00 -5.19367695e-01] [-6.61233425e-01 2.42925048e-01 1.30085754e+00 9.49640930e-01 1.30755246e+00 1.20107003e-01 -7.09256589e-01 -2.20917463e+00 7.59857237e-01 1.92028582e-01 -7.39719644e-02]] [[-2.00122976e+00 7.21299291e-01 1.53853402e-01 7.26243377e-01 8.94637764e-01 1.27225626e+00 -2.17471212e-01 9.79628384e-01 -1.53751647e+00 -2.26927757e+00 7.28200749e-02] [-1.30751312e+00 3.98453116e-01 5.93571365e-01 -3.25619936e-01 -2.01126024e-01 5.27287960e-01 1.17934905e-01 -1.25559199e+00 -1.46171048e-01 -4.01767403e-01 -5.98898828e-01] [-1.57028747e+00 5.11456728e-01 9.74718392e-01 4.71351901e-03 -5.55856526e-01 3.12817484e-01 4.55433547e-01 -8.29073321e-03 -3.04389149e-01 -1.29785526e+00 4.73769695e-01] [ 3.23625028e-01 -1.92890897e-01 -9.49673653e-01 -1.78119791e+00 -2.94606256e+00 2.06923246e+00 3.83226871e-01 -1.42189229e+00 -3.14331084e-01 -1.25732899e+00 -2.00210527e-01] [ 3.08383018e-01 2.51220882e-01 1.31266427e+00 2.43048096e+00 4.93450522e-01 8.62949848e-01 -4.52872366e-01 1.22618079e+00 -6.71616733e-01 8.31930637e-01 -1.64916539e+00] [-6.65715158e-01 -1.96145225e+00 1.69142711e+00 5.31471789e-01 1.14207244e+00 -1.53886306e+00 2.30666709e+00 -3.77875745e-01 1.37327552e+00 1.92692026e-01 -1.34520054e+00] [-1.75204292e-01 1.65679008e-02 -9.00276423e-01 -5.75610876e-01 -1.25070167e+00 -4.29689512e-02 -7.57195294e-01 1.16052639e+00 -4.33052987e-01 5.34866989e-01 -3.20514500e-01] [-2.10055336e-01 -2.48051554e-01 2.33142331e-01 7.06082284e-01 -1.05251944e+00 -1.20538378e+00 -1.14630449e+00 8.79504383e-01 -8.95888746e-01 5.58809221e-01 -1.46033123e-01] [-1.18870258e-01 -3.22590470e-01 -2.94598162e-01 -3.37891608e-01 -7.08368838e-01 7.89433420e-01 3.01966574e-02 -8.98900777e-02 2.81290356e-02 3.97489458e-01 -4.93828505e-02] [-1.54640734e-01 -2.21872002e-01 1.25840080e+00 1.74924982e+00 -3.76379013e-01 -6.33297861e-01 1.04604736e-01 2.36620203e-01 -2.00744763e-01 -1.62539256e+00 -4.80162561e-01] [-5.01763463e-01 -3.85974705e-01 6.86160266e-01 7.82446265e-01 -1.88719973e-01 -3.17676735e+00 1.62176285e-02 7.86819637e-01 -7.30089918e-02 1.03411281e+00 1.41967547e+00]] [[ 4.70337003e-01 -4.28671062e-01 -9.00781751e-01 5.77487826e-01 -2.04089165e-01 2.45537996e+00 -9.27872539e-01 3.58133018e-02 -1.47820675e+00 -5.01321971e-01 -1.24739218e+00] [ 1.02758682e+00 8.33941460e-01 -1.33616477e-02 -1.04795706e+00 1.85589242e+00 -8.26158524e-01 5.18634737e-01 -8.14744711e-01 -3.17227542e-01 9.82217848e-01 -2.53776979e+00] [ 1.36568272e+00 -4.54397589e-01 -1.93619990e+00 1.70864195e-01 1.02311514e-01 1.56236827e-01 5.47335982e-01 -6.78191006e-01 1.47020197e+00 -2.96873188e+00 -1.17872977e+00] [ 6.61988318e-01 -9.62535962e-02 2.15592340e-01 2.00850040e-01 -9.68952835e-01 4.74824131e-01 2.21399665e+00 -6.20354176e-01 -2.41875902e-01 -6.49875104e-01 -6.84942663e-01] [-1.94757611e-01 -7.99084365e-01 8.23607862e-01 -1.09839892e+00 9.98907566e-01 2.20565237e-02 1.01671612e+00 -1.21532822e+00 5.91808498e-01 1.61929762e+00 -9.17157710e-01] [-8.42365265e-01 1.80837476e+00 1.37574661e+00 -3.70170951e-01 -7.74804875e-02 1.35725176e+00 -7.39904270e-02 2.11940333e-01 9.64561701e-01 -2.73220271e-01 -1.74234378e+00] [-8.43890905e-01 -8.41062546e-01 1.89213598e+00 2.23333740e+00 8.85922074e-01 3.18459660e-01 5.57383895e-01 3.04460317e-01 1.29009891e+00 2.49759865e+00 2.30511141e+00] [ 5.36864758e-01 2.68660545e-01 2.53151935e-02 3.11131746e-01 -9.85425115e-01 1.68579233e+00 2.08438826e+00 -9.10907745e-01 9.63310003e-01 1.56894076e+00 1.09312582e+00] [ 6.71944022e-01 9.96068120e-01 -2.23755527e+00 1.08179927e-01 1.34418797e+00 1.72143805e+00 9.61479768e-02 -1.61046818e-01 1.56989670e+00 1.74470282e+00 1.18445301e+00] [-4.37643886e-01 -7.81949818e-01 -6.53029203e-01 -1.03811228e+00 -9.31201160e-01 4.98788357e-02 1.40962958e+00 -6.65256798e-01 -5.30301630e-01 3.74975264e-01 -4.06616718e-01] [-5.88635206e-01 -1.75198331e-01 -1.46236777e-01 -1.53637397e+00 1.59321332e+00 -5.13533456e-03 1.13094819e+00 -7.57815063e-01 5.56205511e-01 -2.33327413e+00 7.19118059e-01]]]]; ov_res: [[[[ 4.90994066e-01 1.20706213e+00 -1.08344889e+00 -1.29976705e-01 -1.00210369e+00 -1.07969023e-01 -4.39945996e-01 1.30696070e+00 6.00602441e-02 8.25190067e-01 -9.35746372e-01] [ 1.03229558e+00 -1.01318157e+00 -3.72036338e-01 -2.00130120e-01 5.30364215e-02 7.74288401e-02 -3.25481951e-01 -1.33462608e+00 1.48279762e+00 -8.94884109e-01 7.87493169e-01] [-7.90866077e-01 5.37738144e-01 -8.52699816e-01 1.48546898e+00 -2.39331508e+00 2.75232881e-01 -7.16769218e-01 2.40840003e-01 7.93628335e-01 9.89696801e-01 -3.99686426e-01] [-2.34936309e+00 -1.23713648e+00 8.22545290e-01 -2.95772135e-01 -2.84524146e-03 1.24139285e+00 8.78930449e-01 3.65681171e-01 -1.58055902e+00 -3.11283422e+00 -4.47814465e-01] [ 6.37932539e-01 2.12694690e-01 1.57479718e-01 8.65571022e-01 5.61472595e-01 -1.06246364e+00 -1.19704700e+00 -6.94713175e-01 2.69381666e+00 -5.67179084e-01 1.61703873e+00] [-4.32470709e-01 9.41303149e-02 -6.91912830e-01 5.47544777e-01 -7.00103462e-01 -4.51086283e-01 6.66721761e-01 -1.38513911e+00 1.53810889e-01 1.97817755e+00 3.30140263e-01] [ 6.43646196e-02 2.20994115e+00 -2.15081549e+00 4.03293252e-01 -6.16197772e-02 -9.85078871e-01 -1.93221748e-01 7.37547278e-01 1.55268061e+00 1.00933754e+00 -4.31766987e-01] [ 1.29516236e-02 -1.15182555e+00 -7.50036538e-02 -2.23889375e+00 -1.54036686e-01 3.75058442e-01 -4.90698248e-01 1.10686243e+00 -2.92987712e-02 4.06309217e-01 -1.27686608e+00] [-2.32479715e+00 -3.51009220e-01 -1.37234831e+00 -1.73345363e+00 -1.27325997e-01 1.01949859e+00 1.11567652e+00 -1.90199959e+00 2.44282261e-01 1.30211914e+00 -7.68326700e-01] [ 3.41225237e-01 2.03681692e-01 2.02515149e+00 -5.08946478e-01 -1.85882294e+00 8.83507788e-01 5.27913749e-01 -1.19269454e+00 -2.29523087e+00 1.28817630e+00 -5.19367695e-01] [-6.61233425e-01 2.42925048e-01 1.30085754e+00 9.49640930e-01 1.30755246e+00 1.20107003e-01 -7.09256589e-01 -2.20917463e+00 7.59857237e-01 1.92028582e-01 -7.39719644e-02]] [[-2.00122976e+00 7.21299291e-01 1.53853402e-01 7.26243377e-01 8.94637764e-01 1.27225626e+00 -2.17471212e-01 9.79628384e-01 -1.53751647e+00 -2.26927757e+00 7.28200749e-02] [-1.30751312e+00 3.98453116e-01 5.93571365e-01 -3.25619936e-01 -2.01126024e-01 5.27287960e-01 1.17934905e-01 -1.25559199e+00 -1.46171048e-01 -4.01767403e-01 -5.98898828e-01] [-1.57028747e+00 5.11456728e-01 9.74718392e-01 4.71351901e-03 -5.55856526e-01 3.12817484e-01 4.55433547e-01 -8.29073321e-03 -3.04389149e-01 -1.29785526e+00 4.73769695e-01] [ 3.23625028e-01 -1.92890897e-01 -9.49673653e-01 -1.78119791e+00 -2.94606256e+00 2.06923246e+00 3.83226871e-01 -1.42189229e+00 -3.14331084e-01 -1.25732899e+00 -2.00210527e-01] [ 3.08383018e-01 2.51220882e-01 1.31266427e+00 2.43048096e+00 4.93450522e-01 8.62949848e-01 -4.52872366e-01 1.22618079e+00 -6.71616733e-01 8.31930637e-01 -1.64916539e+00] [-6.65715158e-01 -1.96145225e+00 1.69142711e+00 5.31471789e-01 1.14207244e+00 -1.53886306e+00 2.30666709e+00 -3.77875745e-01 1.37327552e+00 1.92692026e-01 -1.34520054e+00] [-1.75204292e-01 1.65679008e-02 -9.00276423e-01 -5.75610876e-01 -1.25070167e+00 -4.29689512e-02 -7.57195294e-01 1.16052639e+00 -4.33052987e-01 5.34866989e-01 -3.20514500e-01] [-2.10055336e-01 -2.48051554e-01 2.33142331e-01 7.06082284e-01 -1.05251944e+00 -1.20538378e+00 -1.14630449e+00 8.79504383e-01 -8.95888746e-01 5.58809221e-01 -1.46033123e-01] [-1.18870258e-01 -3.22590470e-01 -2.94598162e-01 -3.37891608e-01 -7.08368838e-01 7.89433420e-01 3.01966574e-02 -8.98900777e-02 2.81290356e-02 3.97489458e-01 -4.93828505e-02] [-1.54640734e-01 -2.21872002e-01 1.25840080e+00 1.74924982e+00 -3.76379013e-01 -6.33297861e-01 1.04604736e-01 2.36620203e-01 -2.00744763e-01 -1.62539256e+00 -4.80162561e-01] [-5.01763463e-01 -3.85974705e-01 6.86160266e-01 7.82446265e-01 -1.88719973e-01 -3.17676735e+00 1.62176285e-02 7.86819637e-01 -7.30089918e-02 1.03411281e+00 1.41967547e+00]] [[ 4.70337003e-01 -4.28671062e-01 -9.00781751e-01 5.77487826e-01 -2.04089165e-01 2.45537996e+00 -9.27872539e-01 3.58133018e-02 -1.47820675e+00 -5.01321971e-01 -1.24739218e+00] [ 1.02758682e+00 8.33941460e-01 -1.33616477e-02 -1.04795706e+00 1.85589242e+00 -8.26158524e-01 5.18634737e-01 -8.14744711e-01 -3.17227542e-01 9.82217848e-01 -2.53776979e+00] [ 1.36568272e+00 -4.54397589e-01 -1.93619990e+00 1.70864195e-01 1.02311514e-01 1.56236827e-01 5.47335982e-01 -6.78191006e-01 1.47020197e+00 -2.96873188e+00 -1.17872977e+00] [ 6.61988318e-01 -9.62535962e-02 2.15592340e-01 2.00850040e-01 -9.68952835e-01 4.74824131e-01 2.21399665e+00 -6.20354176e-01 -2.41875902e-01 -6.49875104e-01 -6.84942663e-01] [-1.94757611e-01 -7.99084365e-01 8.23607862e-01 -1.09839892e+00 9.98907566e-01 2.20565237e-02 1.01671612e+00 -1.21532822e+00 5.91808498e-01 1.61929762e+00 -9.17157710e-01] [-8.42365265e-01 1.80837476e+00 1.37574661e+00 -3.70170951e-01 -7.74804875e-02 1.35725176e+00 -7.39904270e-02 2.11940333e-01 9.64561701e-01 -2.73220271e-01 -1.74234378e+00] [-8.43890905e-01 -8.41062546e-01 1.89213598e+00 2.23333740e+00 8.85922074e-01 3.18459660e-01 5.57383895e-01 3.04460317e-01 1.29009891e+00 2.49759865e+00 2.30511141e+00] [ 5.36864758e-01 2.68660545e-01 2.53151935e-02 3.11131746e-01 -9.85425115e-01 1.68579233e+00 2.08438826e+00 -9.10907745e-01 9.63310003e-01 1.56894076e+00 1.09312582e+00] [ 6.71944022e-01 9.96068120e-01 -2.23755527e+00 1.08179927e-01 1.34418797e+00 1.72143805e+00 9.61479768e-02 -1.61046818e-01 1.56989670e+00 1.74470282e+00 1.18445301e+00] [-4.37643886e-01 -7.81949818e-01 -6.53029203e-01 -1.03811228e+00 -9.31201160e-01 4.98788357e-02 1.40962958e+00 -6.65256798e-01 -5.30301630e-01 3.74975264e-01 -4.06616718e-01] [-5.88635206e-01 -1.75198331e-01 -1.46236777e-01 -1.53637397e+00 1.59321332e+00 -5.13533456e-03 1.13094819e+00 -7.57815063e-01 5.56205511e-01 -2.33327413e+00 7.19118059e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad4d[ ie_device:CPU - precision:FP32 - pads:(-5, -8, 0, 0) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5521.aten_pad, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-5, -8, 0, 0]]() %self.value : NoneType = prim::Constant() %self.mode : str = prim::Constant[value="circular"]() %5 : Tensor = aten::pad(%x.1, %2, %self.mode, %self.value) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:23 return (%5) fw_re: [[[[ 0.37792856] [-0.43736795] [ 1.1396664 ] [ 1.8035803 ] [ 0.5884567 ] [-0.72878283] [-1.1188047 ] [ 0.8631539 ] [ 0.10500188] [ 0.8647555 ] [-2.1768932 ] [-0.7617271 ] [ 2.2998085 ] [-0.3308042 ]] [[-0.10202249] [-0.211866 ] [-0.24666454] [-0.36853164] [ 0.08592405] [ 0.54460007] [-1.3597872 ] [ 0.6313373 ] [-1.0768083 ] [-0.34932294] [ 0.07079557] [-2.3158486 ] [ 0.00711034] [ 0.0067766 ]] [[-1.5525919 ] [-0.02786177] [-0.8440236 ] [-1.466947 ] [-0.8386557 ] [-0.8245226 ] [ 0.6824187 ] [ 2.2680156 ] [ 0.447467 ] [-0.15848498] [ 0.953767 ] [-0.7502552 ] [-1.0809566 ] [ 1.4185214 ]]]]; ov_res: [[[[ 0.37792856] [-0.43736795] [ 1.1396664 ] [ 1.8035803 ] [ 0.5884567 ] [-0.72878283] [-1.1188047 ] [ 0.8631539 ] [ 0.10500188] [ 0.8647555 ] [-2.1768932 ] [-0.7617271 ] [ 2.2998085 ] [-0.3308042 ]] [[-0.10202249] [-0.211866 ] [-0.24666454] [-0.36853164] [ 0.08592405] [ 0.54460007] [-1.3597872 ] [ 0.6313373 ] [-1.0768083 ] [-0.34932294] [ 0.07079557] [-2.3158486 ] [ 0.00711034] [ 0.0067766 ]] [[-1.5525919 ] [-0.02786177] [-0.8440236 ] [-1.466947 ] [-0.8386557 ] [-0.8245226 ] [ 0.6824187 ] [ 2.2680156 ] [ 0.447467 ] [-0.15848498] [ 0.953767 ] [-0.7502552 ] [-1.0809566 ] [ 1.4185214 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4, 5, 6) - mode:reflect - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5524.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4, 5, 6]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 5.90430021e-01 -6.60768449e-01 5.90430021e-01 ... -5.38980603e-01 9.01276805e-03 -3.12323608e-02] [-1.58472151e-01 7.44761705e-01 -1.58472151e-01 ... -1.57872367e+00 1.62295055e+00 2.23033234e-01] [ 1.37479913e+00 -7.84031451e-01 1.37479913e+00 ... 5.09402156e-01 -1.97941169e-01 8.45418572e-01] ... [ 2.14383110e-01 7.94326127e-01 2.14383110e-01 ... -7.00347483e-01 -1.02119458e+00 -1.77597627e-01] [-4.23849851e-01 -1.41846597e-01 -4.23849851e-01 ... -2.31672215e+00 1.14651334e+00 1.30357280e-01] [ 1.18052876e+00 -2.50755596e+00 1.18052876e+00 ... 1.89621538e-01 5.49218774e-01 -1.66440949e-01]] [[-1.32824779e+00 4.93711568e-02 -1.32824779e+00 ... 2.32976526e-01 -5.87069631e-01 -3.41650128e-01] [-7.74359643e-01 1.26387620e+00 -7.74359643e-01 ... 7.03546226e-01 -9.85639453e-01 -2.60299355e-01] [ 1.42028940e+00 -4.60950822e-01 1.42028940e+00 ... 1.66335534e-02 9.37290609e-01 5.42787731e-01] ... [ 5.12947559e-01 -3.69276851e-02 5.12947559e-01 ... 1.54848814e-01 -1.20562482e+00 3.01036417e-01] [ 3.66714760e-03 1.26073331e-01 3.66714760e-03 ... 5.78624308e-02 -4.63332742e-01 -6.91592872e-01] [ 1.38065958e+00 3.07986345e-02 1.38065958e+00 ... 1.52422786e+00 -3.44130635e-01 1.07636988e+00]] [[-4.75055099e-01 8.29482853e-01 -4.75055099e-01 ... 1.23488462e+00 -1.91361165e+00 2.39532813e-02] [-7.01215744e-01 -1.30694973e+00 -7.01215744e-01 ... -8.42687964e-01 -1.48589206e+00 -1.96372318e+00] [-3.18449408e-01 2.31872725e+00 -3.18449408e-01 ... -9.31562483e-02 2.40559745e+00 1.12684691e+00] ... [ 1.52828538e+00 -1.08873677e+00 1.52828538e+00 ... -8.08505177e-01 -7.33914316e-01 -1.05497205e+00] [-1.73865974e-01 -3.85686755e-01 -1.73865974e-01 ... -3.78936231e-02 -2.47770858e+00 -1.62310287e-01] [ 2.80886024e-01 -3.84189427e-01 2.80886024e-01 ... -9.56642747e-01 8.27925682e-01 8.09201479e-01]] ... [[-6.70356035e-01 -6.39287114e-01 -6.70356035e-01 ... -4.51424986e-01 5.05484603e-02 -1.79031873e+00] [ 1.04704750e+00 2.56294340e-01 1.04704750e+00 ... -1.93002665e+00 -3.00021499e-01 -1.58757374e-01] [ 4.39426273e-01 -1.37551403e+00 4.39426273e-01 ... 9.12957370e-01 -9.26889658e-01 4.35718924e-01] ... [ 3.03820282e-01 1.03263986e+00 3.03820282e-01 ... 3.32241058e-01 1.00747752e+00 -2.83827513e-01] [-2.41752553e+00 -9.17380333e-01 -2.41752553e+00 ... -3.82416129e-01 -1.22723162e+00 5.97765684e-01] [ 6.75051689e-01 4.68922332e-02 6.75051689e-01 ... -1.16037078e-01 -8.09431136e-01 9.29027140e-01]] [[ 6.86739922e-01 -2.76417613e-01 6.86739922e-01 ... 4.09941494e-01 -4.10991997e-01 1.76543534e+00] [-2.28450134e-01 7.82285631e-02 -2.28450134e-01 ... 3.67497116e-01 2.23007703e+00 -1.05718344e-01] [-1.02775657e+00 -1.21841514e+00 -1.02775657e+00 ... -1.69607258e+00 -6.04521155e-01 3.66914034e-01] ... [-8.73684049e-01 -1.27641153e+00 -8.73684049e-01 ... 1.08641706e-01 -3.93416323e-02 1.74252892e+00] [ 2.13959157e-01 9.57485199e-01 2.13959157e-01 ... -5.78915894e-01 6.33423924e-01 5.37211411e-02] [ 5.83706498e-01 -2.77564740e+00 5.83706498e-01 ... 1.98284769e+00 -7.43598819e-01 -1.10665560e+00]] [[-4.66517299e-01 -8.07597861e-03 -4.66517299e-01 ... -1.08869958e+00 1.02569342e+00 -4.23331410e-01] [ 3.98332566e-01 1.28276694e+00 3.98332566e-01 ... 6.18538797e-01 -6.17311358e-01 -9.87808704e-01] [-9.83294368e-01 1.65654913e-01 -9.83294368e-01 ... -1.57351708e+00 -1.92800057e+00 -3.72316986e-01] ... [-2.00472927e+00 1.70102906e+00 -2.00472927e+00 ... -3.71601075e-01 1.00291109e+00 1.81876957e+00] [ 7.41045117e-01 5.28961062e-01 7.41045117e-01 ... 6.82548732e-02 1.49636519e+00 1.63388610e+00] [-4.37215745e-01 1.18215442e-01 -4.37215745e-01 ... -1.64227486e+00 8.80724609e-01 -9.51730430e-01]]] [[[ 3.08275253e-01 -7.91383684e-01 3.08275253e-01 ... -2.14403182e-01 1.75508231e-01 -4.59423542e-01] [ 1.20403767e+00 1.05619681e+00 1.20403767e+00 ... 1.84047371e-01 -9.74338055e-02 2.33654425e-01] [ 3.15345228e-01 1.84274340e+00 3.15345228e-01 ... 4.67218429e-01 -1.87874222e+00 1.38899553e+00] ... [ 5.45120418e-01 -8.48649085e-01 5.45120418e-01 ... 4.86300766e-01 6.98191643e-01 1.00181711e+00] [ 5.97356915e-01 -7.95288205e-01 5.97356915e-01 ... 3.82434726e-01 4.25436348e-01 -1.62953508e+00] [ 7.04789400e-01 7.58360773e-02 7.04789400e-01 ... -2.42323828e+00 -1.14465475e+00 -1.94593060e+00]] [[-1.35589945e+00 -7.60873914e-01 -1.35589945e+00 ... -4.87646759e-01 -1.49193513e+00 -2.67149180e-01] [-4.39163685e-01 6.05530143e-01 -4.39163685e-01 ... -5.37831485e-01 -7.81744897e-01 -5.97120881e-01] [-1.66116953e+00 1.48703873e+00 -1.66116953e+00 ... -7.05928683e-01 1.92580569e+00 1.67235482e+00] ... [-1.10470533e+00 4.75878537e-01 -1.10470533e+00 ... 4.84249681e-01 1.60586727e+00 -1.08706340e-01] [ 9.31197762e-01 -6.74250782e-01 9.31197762e-01 ... 6.07172668e-01 -1.13075221e+00 5.61780989e-01] [-6.97036624e-01 -1.45788717e+00 -6.97036624e-01 ... -4.93353546e-01 3.34437519e-01 -3.34660001e-02]] [[ 3.73114973e-01 1.92085409e+00 3.73114973e-01 ... -4.78693813e-01 -1.42756855e+00 6.53865993e-01] [ 1.01177931e+00 4.27327871e-01 1.01177931e+00 ... -1.99826908e+00 -1.87938750e-01 -4.22228903e-01] [-2.75098264e-01 3.54842603e-01 -2.75098264e-01 ... 2.45925993e-01 -1.64845824e+00 9.34724092e-01] ... [ 6.73458576e-01 6.21581711e-02 6.73458576e-01 ... -1.96150196e+00 -2.98888385e-01 4.97484475e-01] [-7.64664590e-01 8.00670758e-02 -7.64664590e-01 ... -1.11056924e+00 -5.30995965e-01 1.09445798e+00] [ 4.50285047e-01 4.57151771e-01 4.50285047e-01 ... -1.33861661e+00 -1.18578684e+00 -1.07678878e+00]] ... [[ 1.61359459e-01 1.51169682e+00 1.61359459e-01 ... 6.46684766e-01 1.76266551e-01 7.82390594e-01] [-6.52749717e-01 -3.00603479e-01 -6.52749717e-01 ... -1.57885119e-01 3.75609159e-01 1.15223479e+00] [-1.43690463e-02 -4.21069890e-01 -1.43690463e-02 ... 8.10388684e-01 1.19067717e+00 -2.17380196e-01] ... [-8.55826497e-01 -2.25913823e-02 -8.55826497e-01 ... 4.99177814e-01 -7.65931070e-01 -1.32043660e-01] [-1.63861394e-01 3.18659991e-01 -1.63861394e-01 ... -1.53676772e+00 -3.41394305e-01 8.53159606e-01] [ 2.79169768e-01 -3.66167814e-01 2.79169768e-01 ... -8.53148282e-01 -1.46408796e+00 -5.41332603e-01]] [[ 5.59209585e-01 8.19067597e-01 5.59209585e-01 ... -6.67557836e-01 3.22049528e-01 4.62268710e-01] [ 1.08015168e+00 1.54836679e+00 1.08015168e+00 ... -1.00308990e+00 -4.41766113e-01 -1.02006590e+00] [-1.34874916e+00 2.08421850e+00 -1.34874916e+00 ... 2.32853103e+00 2.11266780e+00 -5.55474341e-01] ... [ 1.51627183e+00 -1.89912856e+00 1.51627183e+00 ... 6.12574637e-01 -1.74724746e+00 6.87923491e-01] [-2.68701148e+00 -2.43110806e-01 -2.68701148e+00 ... -3.79552126e-01 6.22222543e-01 -5.65558262e-02] [ 1.08694530e+00 -6.54302418e-01 1.08694530e+00 ... 4.22721505e-01 -7.52802193e-01 4.56981599e-01]] [[ 8.34531784e-01 9.01838005e-01 8.34531784e-01 ... 4.54701841e-01 -1.67243314e+00 -1.83284092e+00] [-2.13269889e-01 -8.19865167e-02 -2.13269889e-01 ... -2.31770962e-01 1.11898124e+00 -2.68234879e-01] [-2.97950327e-01 -1.54923820e+00 -2.97950327e-01 ... -1.79819405e+00 -7.52046466e-01 1.20163888e-01] ... [-1.59320164e+00 -1.01424432e+00 -1.59320164e+00 ... -3.84295911e-01 -4.10990953e-01 5.62361419e-01] [ 7.88556874e-01 1.04680634e+00 7.88556874e-01 ... 1.30900073e+00 1.12487495e+00 1.13218367e+00] [ 7.47339249e-01 1.17802048e+00 7.47339249e-01 ... 1.66470266e-03 -2.49897552e+00 -2.56018877e-01]]] [[[-1.53653121e+00 -1.62762091e-01 -1.53653121e+00 ... 4.03263181e-01 -3.85646641e-01 -1.48018169e+00] [-4.16148007e-01 1.99938014e-01 -4.16148007e-01 ... 8.55712414e-01 7.85628200e-01 1.21794724e+00] [-6.42577708e-01 -3.46928686e-01 -6.42577708e-01 ... 1.89402890e+00 9.42328095e-01 -1.92137551e+00] ... [ 1.11819887e+00 -1.42248166e+00 1.11819887e+00 ... -1.38084590e+00 8.95166457e-01 -2.40175515e-01] [ 1.26251805e+00 -3.53772849e-01 1.26251805e+00 ... 5.41212596e-02 -8.95303607e-01 6.84945211e-02] [-2.10206985e+00 -1.79553843e+00 -2.10206985e+00 ... 1.83086562e+00 -7.03664362e-01 -1.64601833e-01]] [[ 1.02703679e+00 -1.05179465e+00 1.02703679e+00 ... -2.45917127e-01 -7.47384131e-01 2.61873901e-01] [ 1.05337179e+00 -5.63938200e-01 1.05337179e+00 ... -5.19510806e-01 -1.64418411e+00 -5.05617917e-01] [ 7.82193542e-01 5.29428303e-01 7.82193542e-01 ... -1.05210125e+00 -6.24011099e-01 -1.61681843e+00] ... [-5.43355823e-01 1.25843465e+00 -5.43355823e-01 ... -7.02676356e-01 -8.63993704e-01 -7.67434716e-01] [-6.35058701e-01 5.37349880e-01 -6.35058701e-01 ... -9.41115692e-02 -1.28301501e+00 5.74394643e-01] [ 9.91331041e-01 9.62644935e-01 9.91331041e-01 ... 4.26167920e-02 6.12776518e-01 -1.99893928e+00]] [[-4.31457937e-01 -7.82538474e-01 -4.31457937e-01 ... -3.74524623e-01 3.45068544e-01 5.64618766e-01] [ 4.70247306e-03 -8.43211949e-01 4.70247306e-03 ... -1.45734817e-01 -3.76759380e-01 -1.60641026e+00] [ 1.16915606e-01 3.48915279e-01 1.16915606e-01 ... -6.93470120e-01 6.73472703e-01 -3.05065095e-01] ... [-2.12872863e+00 8.28550875e-01 -2.12872863e+00 ... -1.90303528e+00 1.13269913e+00 7.83548713e-01] [-2.20599604e+00 1.22490895e+00 -2.20599604e+00 ... 1.22954071e+00 -1.51980832e-01 1.12872708e+00] [-6.19704783e-01 1.69484407e-01 -6.19704783e-01 ... 7.15544283e-01 8.82810295e-01 -2.99725741e-01]] ... [[ 4.26994324e-01 -3.09997463e+00 4.26994324e-01 ... -8.33460748e-01 2.93753886e+00 4.31463122e-02] [ 3.42291802e-01 5.97816706e-01 3.42291802e-01 ... -1.05288839e+00 -1.15382767e+00 -1.00671925e-01] [-7.45214164e-01 -1.75781476e+00 -7.45214164e-01 ... 1.52931118e+00 -6.35024607e-01 -1.28589475e+00] ... [ 1.61777663e+00 1.74534857e-01 1.61777663e+00 ... 3.54840994e-01 7.42407501e-01 9.24402535e-01] [-7.23517239e-01 9.16274548e-01 -7.23517239e-01 ... 1.25346863e+00 7.47803628e-01 -1.35392380e+00] [-1.21061432e+00 -8.64645839e-01 -1.21061432e+00 ... 5.33985913e-01 -2.31300521e+00 -1.86064804e+00]] [[-2.90804267e-01 -1.93863428e+00 -2.90804267e-01 ... -2.88732201e-02 3.16224605e-01 -5.74911237e-01] [ 1.42451036e+00 5.43226779e-01 1.42451036e+00 ... -2.81161714e+00 1.38405454e+00 -1.19166839e+00] [-9.28780735e-01 -2.79675037e-01 -9.28780735e-01 ... -9.75541055e-01 5.63830376e-01 1.92866004e+00] ... [ 3.76307517e-01 -1.08866131e+00 3.76307517e-01 ... -7.48566091e-01 1.00668776e+00 -2.60251224e-01] [-4.02160645e-01 -7.70142674e-01 -4.02160645e-01 ... 9.43632722e-01 1.22387290e+00 1.11661673e+00] [-5.02340972e-01 -2.21505195e-01 -5.02340972e-01 ... -5.54639578e-01 1.15321541e+00 -1.18803120e+00]] [[ 3.01351130e-01 -5.04731834e-02 3.01351130e-01 ... -1.04451843e-01 3.23279709e-01 4.54094410e-01] [-2.42051077e+00 2.76230663e-01 -2.42051077e+00 ... 3.01597655e-01 5.35066687e-02 -1.43297002e-01] [-1.45861816e+00 6.46507561e-01 -1.45861816e+00 ... 1.16652071e+00 -1.25509763e+00 1.22652903e-01] ... [-9.14821550e-02 1.56081796e+00 -9.14821550e-02 ... 1.00408900e+00 1.16840437e-01 1.13813967e-01] [-1.25785232e+00 9.36377466e-01 -1.25785232e+00 ... -2.12063909e-01 -4.96568233e-01 4.84086633e-01] [-7.31057823e-01 6.71212792e-01 -7.31057823e-01 ... -4.01360869e-01 -4.09861207e-01 3.88687283e-01]]]]]; ov_res: [[[[[ 5.90430021e-01 -6.60768449e-01 5.90430021e-01 ... -5.38980603e-01 9.01276805e-03 -3.12323608e-02] [-1.58472151e-01 7.44761705e-01 -1.58472151e-01 ... -1.57872367e+00 1.62295055e+00 2.23033234e-01] [ 1.37479913e+00 -7.84031451e-01 1.37479913e+00 ... 5.09402156e-01 -1.97941169e-01 8.45418572e-01] ... [ 2.14383110e-01 7.94326127e-01 2.14383110e-01 ... -7.00347483e-01 -1.02119458e+00 -1.77597627e-01] [-4.23849851e-01 -1.41846597e-01 -4.23849851e-01 ... -2.31672215e+00 1.14651334e+00 1.30357280e-01] [ 1.18052876e+00 -2.50755596e+00 1.18052876e+00 ... 1.89621538e-01 5.49218774e-01 -1.66440949e-01]] [[-1.32824779e+00 4.93711568e-02 -1.32824779e+00 ... 2.32976526e-01 -5.87069631e-01 -3.41650128e-01] [-7.74359643e-01 1.26387620e+00 -7.74359643e-01 ... 7.03546226e-01 -9.85639453e-01 -2.60299355e-01] [ 1.42028940e+00 -4.60950822e-01 1.42028940e+00 ... 1.66335534e-02 9.37290609e-01 5.42787731e-01] ... [ 5.12947559e-01 -3.69276851e-02 5.12947559e-01 ... 1.54848814e-01 -1.20562482e+00 3.01036417e-01] [ 3.66714760e-03 1.26073331e-01 3.66714760e-03 ... 5.78624308e-02 -4.63332742e-01 -6.91592872e-01] [ 1.38065958e+00 3.07986345e-02 1.38065958e+00 ... 1.52422786e+00 -3.44130635e-01 1.07636988e+00]] [[-4.75055099e-01 8.29482853e-01 -4.75055099e-01 ... 1.23488462e+00 -1.91361165e+00 2.39532813e-02] [-7.01215744e-01 -1.30694973e+00 -7.01215744e-01 ... -8.42687964e-01 -1.48589206e+00 -1.96372318e+00] [-3.18449408e-01 2.31872725e+00 -3.18449408e-01 ... -9.31562483e-02 2.40559745e+00 1.12684691e+00] ... [ 1.52828538e+00 -1.08873677e+00 1.52828538e+00 ... -8.08505177e-01 -7.33914316e-01 -1.05497205e+00] [-1.73865974e-01 -3.85686755e-01 -1.73865974e-01 ... -3.78936231e-02 -2.47770858e+00 -1.62310287e-01] [ 2.80886024e-01 -3.84189427e-01 2.80886024e-01 ... -9.56642747e-01 8.27925682e-01 8.09201479e-01]] ... [[-6.70356035e-01 -6.39287114e-01 -6.70356035e-01 ... -4.51424986e-01 5.05484603e-02 -1.79031873e+00] [ 1.04704750e+00 2.56294340e-01 1.04704750e+00 ... -1.93002665e+00 -3.00021499e-01 -1.58757374e-01] [ 4.39426273e-01 -1.37551403e+00 4.39426273e-01 ... 9.12957370e-01 -9.26889658e-01 4.35718924e-01] ... [ 3.03820282e-01 1.03263986e+00 3.03820282e-01 ... 3.32241058e-01 1.00747752e+00 -2.83827513e-01] [-2.41752553e+00 -9.17380333e-01 -2.41752553e+00 ... -3.82416129e-01 -1.22723162e+00 5.97765684e-01] [ 6.75051689e-01 4.68922332e-02 6.75051689e-01 ... -1.16037078e-01 -8.09431136e-01 9.29027140e-01]] [[ 6.86739922e-01 -2.76417613e-01 6.86739922e-01 ... 4.09941494e-01 -4.10991997e-01 1.76543534e+00] [-2.28450134e-01 7.82285631e-02 -2.28450134e-01 ... 3.67497116e-01 2.23007703e+00 -1.05718344e-01] [-1.02775657e+00 -1.21841514e+00 -1.02775657e+00 ... -1.69607258e+00 -6.04521155e-01 3.66914034e-01] ... [-8.73684049e-01 -1.27641153e+00 -8.73684049e-01 ... 1.08641706e-01 -3.93416323e-02 1.74252892e+00] [ 2.13959157e-01 9.57485199e-01 2.13959157e-01 ... -5.78915894e-01 6.33423924e-01 5.37211411e-02] [ 5.83706498e-01 -2.77564740e+00 5.83706498e-01 ... 1.98284769e+00 -7.43598819e-01 -1.10665560e+00]] [[-4.66517299e-01 -8.07597861e-03 -4.66517299e-01 ... -1.08869958e+00 1.02569342e+00 -4.23331410e-01] [ 3.98332566e-01 1.28276694e+00 3.98332566e-01 ... 6.18538797e-01 -6.17311358e-01 -9.87808704e-01] [-9.83294368e-01 1.65654913e-01 -9.83294368e-01 ... -1.57351708e+00 -1.92800057e+00 -3.72316986e-01] ... [-2.00472927e+00 1.70102906e+00 -2.00472927e+00 ... -3.71601075e-01 1.00291109e+00 1.81876957e+00] [ 7.41045117e-01 5.28961062e-01 7.41045117e-01 ... 6.82548732e-02 1.49636519e+00 1.63388610e+00] [-4.37215745e-01 1.18215442e-01 -4.37215745e-01 ... -1.64227486e+00 8.80724609e-01 -9.51730430e-01]]] [[[ 3.08275253e-01 -7.91383684e-01 3.08275253e-01 ... -2.14403182e-01 1.75508231e-01 -4.59423542e-01] [ 1.20403767e+00 1.05619681e+00 1.20403767e+00 ... 1.84047371e-01 -9.74338055e-02 2.33654425e-01] [ 3.15345228e-01 1.84274340e+00 3.15345228e-01 ... 4.67218429e-01 -1.87874222e+00 1.38899553e+00] ... [ 5.45120418e-01 -8.48649085e-01 5.45120418e-01 ... 4.86300766e-01 6.98191643e-01 1.00181711e+00] [ 5.97356915e-01 -7.95288205e-01 5.97356915e-01 ... 3.82434726e-01 4.25436348e-01 -1.62953508e+00] [ 7.04789400e-01 7.58360773e-02 7.04789400e-01 ... -2.42323828e+00 -1.14465475e+00 -1.94593060e+00]] [[-1.35589945e+00 -7.60873914e-01 -1.35589945e+00 ... -4.87646759e-01 -1.49193513e+00 -2.67149180e-01] [-4.39163685e-01 6.05530143e-01 -4.39163685e-01 ... -5.37831485e-01 -7.81744897e-01 -5.97120881e-01] [-1.66116953e+00 1.48703873e+00 -1.66116953e+00 ... -7.05928683e-01 1.92580569e+00 1.67235482e+00] ... [-1.10470533e+00 4.75878537e-01 -1.10470533e+00 ... 4.84249681e-01 1.60586727e+00 -1.08706340e-01] [ 9.31197762e-01 -6.74250782e-01 9.31197762e-01 ... 6.07172668e-01 -1.13075221e+00 5.61780989e-01] [-6.97036624e-01 -1.45788717e+00 -6.97036624e-01 ... -4.93353546e-01 3.34437519e-01 -3.34660001e-02]] [[ 3.73114973e-01 1.92085409e+00 3.73114973e-01 ... -4.78693813e-01 -1.42756855e+00 6.53865993e-01] [ 1.01177931e+00 4.27327871e-01 1.01177931e+00 ... -1.99826908e+00 -1.87938750e-01 -4.22228903e-01] [-2.75098264e-01 3.54842603e-01 -2.75098264e-01 ... 2.45925993e-01 -1.64845824e+00 9.34724092e-01] ... [ 6.73458576e-01 6.21581711e-02 6.73458576e-01 ... -1.96150196e+00 -2.98888385e-01 4.97484475e-01] [-7.64664590e-01 8.00670758e-02 -7.64664590e-01 ... -1.11056924e+00 -5.30995965e-01 1.09445798e+00] [ 4.50285047e-01 4.57151771e-01 4.50285047e-01 ... -1.33861661e+00 -1.18578684e+00 -1.07678878e+00]] ... [[ 1.61359459e-01 1.51169682e+00 1.61359459e-01 ... 6.46684766e-01 1.76266551e-01 7.82390594e-01] [-6.52749717e-01 -3.00603479e-01 -6.52749717e-01 ... -1.57885119e-01 3.75609159e-01 1.15223479e+00] [-1.43690463e-02 -4.21069890e-01 -1.43690463e-02 ... 8.10388684e-01 1.19067717e+00 -2.17380196e-01] ... [-8.55826497e-01 -2.25913823e-02 -8.55826497e-01 ... 4.99177814e-01 -7.65931070e-01 -1.32043660e-01] [-1.63861394e-01 3.18659991e-01 -1.63861394e-01 ... -1.53676772e+00 -3.41394305e-01 8.53159606e-01] [ 2.79169768e-01 -3.66167814e-01 2.79169768e-01 ... -8.53148282e-01 -1.46408796e+00 -5.41332603e-01]] [[ 5.59209585e-01 8.19067597e-01 5.59209585e-01 ... -6.67557836e-01 3.22049528e-01 4.62268710e-01] [ 1.08015168e+00 1.54836679e+00 1.08015168e+00 ... -1.00308990e+00 -4.41766113e-01 -1.02006590e+00] [-1.34874916e+00 2.08421850e+00 -1.34874916e+00 ... 2.32853103e+00 2.11266780e+00 -5.55474341e-01] ... [ 1.51627183e+00 -1.89912856e+00 1.51627183e+00 ... 6.12574637e-01 -1.74724746e+00 6.87923491e-01] [-2.68701148e+00 -2.43110806e-01 -2.68701148e+00 ... -3.79552126e-01 6.22222543e-01 -5.65558262e-02] [ 1.08694530e+00 -6.54302418e-01 1.08694530e+00 ... 4.22721505e-01 -7.52802193e-01 4.56981599e-01]] [[ 8.34531784e-01 9.01838005e-01 8.34531784e-01 ... 4.54701841e-01 -1.67243314e+00 -1.83284092e+00] [-2.13269889e-01 -8.19865167e-02 -2.13269889e-01 ... -2.31770962e-01 1.11898124e+00 -2.68234879e-01] [-2.97950327e-01 -1.54923820e+00 -2.97950327e-01 ... -1.79819405e+00 -7.52046466e-01 1.20163888e-01] ... [-1.59320164e+00 -1.01424432e+00 -1.59320164e+00 ... -3.84295911e-01 -4.10990953e-01 5.62361419e-01] [ 7.88556874e-01 1.04680634e+00 7.88556874e-01 ... 1.30900073e+00 1.12487495e+00 1.13218367e+00] [ 7.47339249e-01 1.17802048e+00 7.47339249e-01 ... 1.66470266e-03 -2.49897552e+00 -2.56018877e-01]]] [[[-1.53653121e+00 -1.62762091e-01 -1.53653121e+00 ... 4.03263181e-01 -3.85646641e-01 -1.48018169e+00] [-4.16148007e-01 1.99938014e-01 -4.16148007e-01 ... 8.55712414e-01 7.85628200e-01 1.21794724e+00] [-6.42577708e-01 -3.46928686e-01 -6.42577708e-01 ... 1.89402890e+00 9.42328095e-01 -1.92137551e+00] ... [ 1.11819887e+00 -1.42248166e+00 1.11819887e+00 ... -1.38084590e+00 8.95166457e-01 -2.40175515e-01] [ 1.26251805e+00 -3.53772849e-01 1.26251805e+00 ... 5.41212596e-02 -8.95303607e-01 6.84945211e-02] [-2.10206985e+00 -1.79553843e+00 -2.10206985e+00 ... 1.83086562e+00 -7.03664362e-01 -1.64601833e-01]] [[ 1.02703679e+00 -1.05179465e+00 1.02703679e+00 ... -2.45917127e-01 -7.47384131e-01 2.61873901e-01] [ 1.05337179e+00 -5.63938200e-01 1.05337179e+00 ... -5.19510806e-01 -1.64418411e+00 -5.05617917e-01] [ 7.82193542e-01 5.29428303e-01 7.82193542e-01 ... -1.05210125e+00 -6.24011099e-01 -1.61681843e+00] ... [-5.43355823e-01 1.25843465e+00 -5.43355823e-01 ... -7.02676356e-01 -8.63993704e-01 -7.67434716e-01] [-6.35058701e-01 5.37349880e-01 -6.35058701e-01 ... -9.41115692e-02 -1.28301501e+00 5.74394643e-01] [ 9.91331041e-01 9.62644935e-01 9.91331041e-01 ... 4.26167920e-02 6.12776518e-01 -1.99893928e+00]] [[-4.31457937e-01 -7.82538474e-01 -4.31457937e-01 ... -3.74524623e-01 3.45068544e-01 5.64618766e-01] [ 4.70247306e-03 -8.43211949e-01 4.70247306e-03 ... -1.45734817e-01 -3.76759380e-01 -1.60641026e+00] [ 1.16915606e-01 3.48915279e-01 1.16915606e-01 ... -6.93470120e-01 6.73472703e-01 -3.05065095e-01] ... [-2.12872863e+00 8.28550875e-01 -2.12872863e+00 ... -1.90303528e+00 1.13269913e+00 7.83548713e-01] [-2.20599604e+00 1.22490895e+00 -2.20599604e+00 ... 1.22954071e+00 -1.51980832e-01 1.12872708e+00] [-6.19704783e-01 1.69484407e-01 -6.19704783e-01 ... 7.15544283e-01 8.82810295e-01 -2.99725741e-01]] ... [[ 4.26994324e-01 -3.09997463e+00 4.26994324e-01 ... -8.33460748e-01 2.93753886e+00 4.31463122e-02] [ 3.42291802e-01 5.97816706e-01 3.42291802e-01 ... -1.05288839e+00 -1.15382767e+00 -1.00671925e-01] [-7.45214164e-01 -1.75781476e+00 -7.45214164e-01 ... 1.52931118e+00 -6.35024607e-01 -1.28589475e+00] ... [ 1.61777663e+00 1.74534857e-01 1.61777663e+00 ... 3.54840994e-01 7.42407501e-01 9.24402535e-01] [-7.23517239e-01 9.16274548e-01 -7.23517239e-01 ... 1.25346863e+00 7.47803628e-01 -1.35392380e+00] [-1.21061432e+00 -8.64645839e-01 -1.21061432e+00 ... 5.33985913e-01 -2.31300521e+00 -1.86064804e+00]] [[-2.90804267e-01 -1.93863428e+00 -2.90804267e-01 ... -2.88732201e-02 3.16224605e-01 -5.74911237e-01] [ 1.42451036e+00 5.43226779e-01 1.42451036e+00 ... -2.81161714e+00 1.38405454e+00 -1.19166839e+00] [-9.28780735e-01 -2.79675037e-01 -9.28780735e-01 ... -9.75541055e-01 5.63830376e-01 1.92866004e+00] ... [ 3.76307517e-01 -1.08866131e+00 3.76307517e-01 ... -7.48566091e-01 1.00668776e+00 -2.60251224e-01] [-4.02160645e-01 -7.70142674e-01 -4.02160645e-01 ... 9.43632722e-01 1.22387290e+00 1.11661673e+00] [-5.02340972e-01 -2.21505195e-01 -5.02340972e-01 ... -5.54639578e-01 1.15321541e+00 -1.18803120e+00]] [[ 3.01351130e-01 -5.04731834e-02 3.01351130e-01 ... -1.04451843e-01 3.23279709e-01 4.54094410e-01] [-2.42051077e+00 2.76230663e-01 -2.42051077e+00 ... 3.01597655e-01 5.35066687e-02 -1.43297002e-01] [-1.45861816e+00 6.46507561e-01 -1.45861816e+00 ... 1.16652071e+00 -1.25509763e+00 1.22652903e-01] ... [-9.14821550e-02 1.56081796e+00 -9.14821550e-02 ... 1.00408900e+00 1.16840437e-01 1.13813967e-01] [-1.25785232e+00 9.36377466e-01 -1.25785232e+00 ... -2.12063909e-01 -4.96568233e-01 4.84086633e-01] [-7.31057823e-01 6.71212792e-01 -7.31057823e-01 ... -4.01360869e-01 -4.09861207e-01 3.88687283e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:reflect - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5527.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-1.47188321e-01 -1.66945457e-01 -1.47188321e-01 ... -4.88983452e-01 -1.33425033e+00 -2.70702153e-01] [ 1.54545188e+00 1.13510048e+00 1.54545188e+00 ... -2.20520003e-03 -2.88504601e-01 -1.34180057e+00] [ 1.00956297e+00 1.20543158e+00 1.00956297e+00 ... 2.69057488e+00 1.93126619e-01 3.81356746e-01] ... [-4.97660637e-01 -7.94765532e-01 -4.97660637e-01 ... 1.33041844e-01 9.32778060e-01 2.48217836e-01] [-9.95480895e-01 6.62713110e-01 -9.95480895e-01 ... 1.57732368e-01 -3.40432763e-01 9.05307174e-01] [ 1.60174239e+00 -6.43368721e-01 1.60174239e+00 ... 1.17042136e+00 -8.00008535e-01 -1.97453007e-01]] [[-6.65337980e-01 -1.24771215e-01 -6.65337980e-01 ... -5.36349893e-01 -2.31685966e-01 2.02818942e+00] [ 3.07738215e-01 -1.40513599e+00 3.07738215e-01 ... 5.14957786e-01 -1.08660960e+00 4.20325488e-01] [-5.96348405e-01 8.18587601e-01 -5.96348405e-01 ... 4.66556340e-01 -1.98553845e-01 -6.58998430e-01] ... [ 1.59980786e+00 4.49787855e-01 1.59980786e+00 ... 3.52072954e-01 2.16828704e+00 -1.52682102e+00] [-1.01488363e-03 5.56319237e-01 -1.01488363e-03 ... 3.71853530e-01 1.85283411e+00 5.38294554e-01] [-1.32992402e-01 3.68285090e-01 -1.32992402e-01 ... -3.90799850e-01 -8.35082114e-01 2.17920616e-02]] [[-1.51918638e+00 -4.08979058e-01 -1.51918638e+00 ... -9.05651510e-01 8.70753288e-01 -3.75763208e-01] [-1.77891469e+00 1.92327094e+00 -1.77891469e+00 ... -6.04209185e-01 7.16586828e-01 7.88764417e-01] [-6.52075469e-01 1.28386915e+00 -6.52075469e-01 ... 5.22495151e-01 -1.63052887e-01 7.65316546e-01] ... [ 1.85334116e-01 -6.17592335e-02 1.85334116e-01 ... -4.73767787e-01 6.84902012e-01 -1.48660347e-01] [-1.23572695e+00 -1.40800983e-01 -1.23572695e+00 ... 1.10024124e-01 -2.31061864e+00 1.59499556e-01] [ 9.14313555e-01 -6.67587817e-01 9.14313555e-01 ... -1.02415323e-01 -3.91286284e-01 -7.47204423e-01]] ... [[-9.70376551e-01 -5.60074091e-01 -9.70376551e-01 ... -1.09832251e+00 -2.46047512e-01 -5.93870617e-02] [ 1.35513425e-01 -6.13211870e-01 1.35513425e-01 ... 3.82441938e-01 -1.19128931e+00 -2.54003525e-01] [ 2.42301207e-02 3.59171122e-01 2.42301207e-02 ... 2.77318209e-01 -4.27301228e-01 -1.98775515e-01] ... [-1.46816814e+00 -1.27640867e+00 -1.46816814e+00 ... -1.02506816e-01 -3.06720793e-01 1.20696020e+00] [-8.47724438e-01 -6.84521720e-02 -8.47724438e-01 ... -2.37099096e-01 6.11708462e-02 1.21034145e+00] [ 2.15253448e+00 4.12306517e-01 2.15253448e+00 ... 3.57662648e-01 4.09441233e-01 2.17818403e+00]] [[-4.06557590e-01 -1.16688919e+00 -4.06557590e-01 ... -7.02719986e-01 3.35824013e-01 -7.77237356e-01] [-2.12527052e-01 -9.45453763e-01 -2.12527052e-01 ... -3.71330649e-01 5.70772663e-02 4.89175349e-01] [-4.06701654e-01 -4.59734768e-01 -4.06701654e-01 ... 7.09533274e-01 -1.25188363e+00 -3.49009573e-01] ... [ 9.66002762e-01 -3.93965334e-01 9.66002762e-01 ... -4.00855422e-01 -3.12895566e-01 4.22539413e-01] [ 1.20515454e+00 -1.31149399e+00 1.20515454e+00 ... -3.63181829e-02 1.70675302e+00 -2.87737936e-01] [ 8.19736838e-01 -5.00868440e-01 8.19736838e-01 ... -6.42697692e-01 5.00603437e-01 1.98157817e-01]] [[-9.70376551e-01 -5.60074091e-01 -9.70376551e-01 ... -1.09832251e+00 -2.46047512e-01 -5.93870617e-02] [ 1.35513425e-01 -6.13211870e-01 1.35513425e-01 ... 3.82441938e-01 -1.19128931e+00 -2.54003525e-01] [ 2.42301207e-02 3.59171122e-01 2.42301207e-02 ... 2.77318209e-01 -4.27301228e-01 -1.98775515e-01] ... [-1.46816814e+00 -1.27640867e+00 -1.46816814e+00 ... -1.02506816e-01 -3.06720793e-01 1.20696020e+00] [-8.47724438e-01 -6.84521720e-02 -8.47724438e-01 ... -2.37099096e-01 6.11708462e-02 1.21034145e+00] [ 2.15253448e+00 4.12306517e-01 2.15253448e+00 ... 3.57662648e-01 4.09441233e-01 2.17818403e+00]]] [[[-9.34877813e-01 -8.41643691e-01 -9.34877813e-01 ... 4.70130205e-01 -3.16743284e-01 -2.13066995e-01] [ 1.84923005e+00 1.68673861e+00 1.84923005e+00 ... -1.00859534e-02 1.09674573e+00 -1.74954295e+00] [-2.45245248e-01 5.01003087e-01 -2.45245248e-01 ... 1.64081848e+00 1.50183082e-01 -4.85350966e-01] ... [-7.92203307e-01 2.35744461e-01 -7.92203307e-01 ... 2.41190754e-02 2.57645220e-01 1.78015530e-01] [-2.43789852e-02 -2.77109563e-01 -2.43789852e-02 ... -1.61362007e-01 -1.30500913e+00 1.46279305e-01] [ 6.57590091e-01 -7.03583002e-01 6.57590091e-01 ... 1.51915801e+00 -8.56233299e-01 -8.52802992e-01]] [[-9.77730677e-02 1.22554815e+00 -9.77730677e-02 ... -4.71067905e-01 -1.00530243e+00 -1.03375030e+00] [-3.78436267e-01 -4.02009785e-02 -3.78436267e-01 ... -7.04241931e-01 2.20100284e+00 1.52784991e+00] [-1.13683701e+00 1.69636655e+00 -1.13683701e+00 ... -2.41500903e-02 8.88019621e-01 -9.66293573e-01] ... [ 1.12256145e+00 -8.04287136e-01 1.12256145e+00 ... -1.25510025e+00 1.77548155e-01 -6.46944940e-01] [ 1.06152885e-01 -4.85962220e-02 1.06152885e-01 ... -8.60911667e-01 3.91751587e-01 -6.60007954e-01] [-7.10775375e-01 2.10568404e+00 -7.10775375e-01 ... 2.51718760e+00 -1.60629702e+00 -6.72821462e-01]] [[ 2.29263306e+00 3.04729909e-01 2.29263306e+00 ... -6.63128018e-01 1.27920508e+00 1.03909230e+00] [ 1.01694965e+00 1.61423850e+00 1.01694965e+00 ... 7.47584999e-01 1.74963665e+00 1.42315641e-01] [-1.11535692e+00 -2.75271082e+00 -1.11535692e+00 ... -2.03610882e-02 -1.47189939e+00 3.70597601e-01] ... [ 5.12988418e-02 1.29765165e+00 5.12988418e-02 ... 6.31418288e-01 -7.77890086e-01 -3.45129311e-01] [ 9.16801766e-02 -1.30947280e+00 9.16801766e-02 ... 1.35135603e+00 1.97794288e-01 1.47484345e-02] [-1.43529510e+00 -8.70132372e-02 -1.43529510e+00 ... 1.23534489e+00 -2.34918213e+00 -4.13227350e-01]] ... [[-8.96850586e-01 6.97639763e-01 -8.96850586e-01 ... -1.58582664e+00 8.84036541e-01 4.75292712e-01] [-7.39408731e-01 4.52384710e-01 -7.39408731e-01 ... 5.46401381e-01 -5.61013818e-01 6.91380501e-02] [ 1.15919483e+00 -1.55724585e+00 1.15919483e+00 ... 9.72625852e-01 -6.20099064e-03 -7.50205338e-01] ... [-7.10781634e-01 3.34642053e-01 -7.10781634e-01 ... -8.04562211e-01 6.94828257e-02 -9.90864158e-01] [ 3.23351949e-01 -9.68392611e-01 3.23351949e-01 ... -4.41131800e-01 1.25634408e+00 4.84728217e-01] [ 8.62644613e-01 -7.16466606e-01 8.62644613e-01 ... -1.61649418e+00 -1.83126378e+00 -9.27514493e-01]] [[ 1.01263654e+00 -5.95349312e-01 1.01263654e+00 ... 1.30484331e+00 -1.62235188e+00 1.66546535e-02] [ 4.13280874e-01 5.31982780e-01 4.13280874e-01 ... -2.21598521e-01 -2.15622693e-01 -3.83587122e-01] [-4.79850322e-01 -2.51430011e+00 -4.79850322e-01 ... 1.08805187e-01 4.72383410e-01 -2.32509923e+00] ... [ 1.78662610e+00 3.52254212e-01 1.78662610e+00 ... 2.05653206e-01 1.52100354e-01 -4.77723598e-01] [ 1.79128063e+00 2.68831348e+00 1.79128063e+00 ... -7.54521638e-02 5.83166718e-01 -1.36900532e+00] [ 1.21804667e+00 -1.30123413e+00 1.21804667e+00 ... 9.50513422e-01 1.08955300e+00 -8.35651755e-01]] [[-8.96850586e-01 6.97639763e-01 -8.96850586e-01 ... -1.58582664e+00 8.84036541e-01 4.75292712e-01] [-7.39408731e-01 4.52384710e-01 -7.39408731e-01 ... 5.46401381e-01 -5.61013818e-01 6.91380501e-02] [ 1.15919483e+00 -1.55724585e+00 1.15919483e+00 ... 9.72625852e-01 -6.20099064e-03 -7.50205338e-01] ... [-7.10781634e-01 3.34642053e-01 -7.10781634e-01 ... -8.04562211e-01 6.94828257e-02 -9.90864158e-01] [ 3.23351949e-01 -9.68392611e-01 3.23351949e-01 ... -4.41131800e-01 1.25634408e+00 4.84728217e-01] [ 8.62644613e-01 -7.16466606e-01 8.62644613e-01 ... -1.61649418e+00 -1.83126378e+00 -9.27514493e-01]]] [[[ 1.81109524e+00 -1.46945906e+00 1.81109524e+00 ... 4.13699478e-01 -7.88736045e-01 -7.72856772e-02] [-5.24725378e-01 -1.11278701e+00 -5.24725378e-01 ... 4.25350994e-01 3.21106642e-01 3.52655858e-01] [ 3.23565394e-01 3.89664143e-01 3.23565394e-01 ... -2.09209061e+00 7.72990525e-01 1.65524256e+00] ... [-1.36002815e+00 3.29685479e-01 -1.36002815e+00 ... -1.51520759e-01 9.10692990e-01 -5.37766874e-01] [-3.71627152e-01 7.32975483e-01 -3.71627152e-01 ... 1.69492733e+00 -1.47331692e-02 -5.13505161e-01] [-7.58343399e-01 -4.32967544e-01 -7.58343399e-01 ... 1.52235293e+00 2.36535382e+00 -6.08226538e-01]] [[-8.52423728e-01 -1.33317137e+00 -8.52423728e-01 ... 2.26084888e-02 -1.38402152e+00 -3.40870857e-01] [ 1.02179050e+00 -3.19863343e+00 1.02179050e+00 ... 1.59661210e+00 -6.05385363e-01 2.70663291e-01] [ 4.59582657e-01 -4.45258379e-01 4.59582657e-01 ... 1.68618226e+00 -1.18550174e-01 -2.70794004e-01] ... [-8.80621612e-01 -1.15824319e-01 -8.80621612e-01 ... 1.56795010e-01 -2.43275076e-01 -4.63926971e-01] [ 9.64994669e-01 -9.35393691e-01 9.64994669e-01 ... -1.96018219e+00 -9.76115048e-01 -6.16661668e-01] [-3.95603836e-01 4.99087781e-01 -3.95603836e-01 ... 1.80354667e+00 5.81244707e-01 1.20823073e+00]] [[-1.35891736e+00 1.41674191e-01 -1.35891736e+00 ... -2.16674781e+00 -2.21436638e-02 9.45373058e-01] [ 1.41585279e+00 -1.77804410e+00 1.41585279e+00 ... 1.09908020e+00 3.23908567e-01 -1.04035830e+00] [ 1.39518380e+00 4.43184704e-01 1.39518380e+00 ... 1.45864439e+00 5.47947586e-02 2.69040674e-01] ... [ 1.16484618e+00 5.18044472e-01 1.16484618e+00 ... 1.65967596e+00 -5.45458496e-02 -9.46412012e-02] [-1.09208560e+00 -1.93238109e-01 -1.09208560e+00 ... 9.61017668e-01 -2.42941737e+00 -1.09686148e+00] [ 2.18564689e-01 -1.44472504e+00 2.18564689e-01 ... 4.36218947e-01 1.51634169e+00 5.53735077e-01]] ... [[-1.83543658e+00 1.47955668e+00 -1.83543658e+00 ... 8.86311084e-02 7.09581137e-01 2.77132779e-01] [-4.40929502e-01 8.60776067e-01 -4.40929502e-01 ... -1.78810164e-01 7.76706040e-01 1.17084968e+00] [-2.98728019e-01 1.56141505e-01 -2.98728019e-01 ... -8.75531793e-01 -1.02732646e+00 7.65034854e-01] ... [ 2.34254766e+00 -2.18215153e-01 2.34254766e+00 ... 7.18544364e-01 2.69701868e-01 -1.02277660e+00] [-1.78162825e+00 -6.59296930e-01 -1.78162825e+00 ... -2.27119103e-01 -7.10893571e-01 4.01055545e-01] [-1.04602742e+00 -7.13216245e-01 -1.04602742e+00 ... -8.28261733e-01 3.96421582e-01 7.41328001e-01]] [[ 6.45119488e-01 1.33029974e+00 6.45119488e-01 ... -2.27748108e+00 7.27276564e-01 1.54754043e+00] [ 9.91001129e-02 -6.04383469e-01 9.91001129e-02 ... -1.17714798e+00 4.20028627e-01 9.06972229e-01] [ 8.69384944e-01 -9.93166149e-01 8.69384944e-01 ... -9.97973084e-02 -7.28455961e-01 7.21291065e-01] ... [ 1.60998476e+00 -3.45223188e-01 1.60998476e+00 ... 1.37801266e+00 -7.13959396e-01 7.13104725e-01] [-1.98117837e-01 -7.82433867e-01 -1.98117837e-01 ... -1.21621597e+00 -8.89712989e-01 1.70681703e+00] [ 4.21966374e-01 8.73217762e-01 4.21966374e-01 ... -1.08253348e+00 -1.06378055e+00 8.17631125e-01]] [[-1.83543658e+00 1.47955668e+00 -1.83543658e+00 ... 8.86311084e-02 7.09581137e-01 2.77132779e-01] [-4.40929502e-01 8.60776067e-01 -4.40929502e-01 ... -1.78810164e-01 7.76706040e-01 1.17084968e+00] [-2.98728019e-01 1.56141505e-01 -2.98728019e-01 ... -8.75531793e-01 -1.02732646e+00 7.65034854e-01] ... [ 2.34254766e+00 -2.18215153e-01 2.34254766e+00 ... 7.18544364e-01 2.69701868e-01 -1.02277660e+00] [-1.78162825e+00 -6.59296930e-01 -1.78162825e+00 ... -2.27119103e-01 -7.10893571e-01 4.01055545e-01] [-1.04602742e+00 -7.13216245e-01 -1.04602742e+00 ... -8.28261733e-01 3.96421582e-01 7.41328001e-01]]]]]; ov_res: [[[[[-1.47188321e-01 -1.66945457e-01 -1.47188321e-01 ... -4.88983452e-01 -1.33425033e+00 -2.70702153e-01] [ 1.54545188e+00 1.13510048e+00 1.54545188e+00 ... -2.20520003e-03 -2.88504601e-01 -1.34180057e+00] [ 1.00956297e+00 1.20543158e+00 1.00956297e+00 ... 2.69057488e+00 1.93126619e-01 3.81356746e-01] ... [-4.97660637e-01 -7.94765532e-01 -4.97660637e-01 ... 1.33041844e-01 9.32778060e-01 2.48217836e-01] [-9.95480895e-01 6.62713110e-01 -9.95480895e-01 ... 1.57732368e-01 -3.40432763e-01 9.05307174e-01] [ 1.60174239e+00 -6.43368721e-01 1.60174239e+00 ... 1.17042136e+00 -8.00008535e-01 -1.97453007e-01]] [[-6.65337980e-01 -1.24771215e-01 -6.65337980e-01 ... -5.36349893e-01 -2.31685966e-01 2.02818942e+00] [ 3.07738215e-01 -1.40513599e+00 3.07738215e-01 ... 5.14957786e-01 -1.08660960e+00 4.20325488e-01] [-5.96348405e-01 8.18587601e-01 -5.96348405e-01 ... 4.66556340e-01 -1.98553845e-01 -6.58998430e-01] ... [ 1.59980786e+00 4.49787855e-01 1.59980786e+00 ... 3.52072954e-01 2.16828704e+00 -1.52682102e+00] [-1.01488363e-03 5.56319237e-01 -1.01488363e-03 ... 3.71853530e-01 1.85283411e+00 5.38294554e-01] [-1.32992402e-01 3.68285090e-01 -1.32992402e-01 ... -3.90799850e-01 -8.35082114e-01 2.17920616e-02]] [[-1.51918638e+00 -4.08979058e-01 -1.51918638e+00 ... -9.05651510e-01 8.70753288e-01 -3.75763208e-01] [-1.77891469e+00 1.92327094e+00 -1.77891469e+00 ... -6.04209185e-01 7.16586828e-01 7.88764417e-01] [-6.52075469e-01 1.28386915e+00 -6.52075469e-01 ... 5.22495151e-01 -1.63052887e-01 7.65316546e-01] ... [ 1.85334116e-01 -6.17592335e-02 1.85334116e-01 ... -4.73767787e-01 6.84902012e-01 -1.48660347e-01] [-1.23572695e+00 -1.40800983e-01 -1.23572695e+00 ... 1.10024124e-01 -2.31061864e+00 1.59499556e-01] [ 9.14313555e-01 -6.67587817e-01 9.14313555e-01 ... -1.02415323e-01 -3.91286284e-01 -7.47204423e-01]] ... [[-9.70376551e-01 -5.60074091e-01 -9.70376551e-01 ... -1.09832251e+00 -2.46047512e-01 -5.93870617e-02] [ 1.35513425e-01 -6.13211870e-01 1.35513425e-01 ... 3.82441938e-01 -1.19128931e+00 -2.54003525e-01] [ 2.42301207e-02 3.59171122e-01 2.42301207e-02 ... 2.77318209e-01 -4.27301228e-01 -1.98775515e-01] ... [-1.46816814e+00 -1.27640867e+00 -1.46816814e+00 ... -1.02506816e-01 -3.06720793e-01 1.20696020e+00] [-8.47724438e-01 -6.84521720e-02 -8.47724438e-01 ... -2.37099096e-01 6.11708462e-02 1.21034145e+00] [ 2.15253448e+00 4.12306517e-01 2.15253448e+00 ... 3.57662648e-01 4.09441233e-01 2.17818403e+00]] [[-4.06557590e-01 -1.16688919e+00 -4.06557590e-01 ... -7.02719986e-01 3.35824013e-01 -7.77237356e-01] [-2.12527052e-01 -9.45453763e-01 -2.12527052e-01 ... -3.71330649e-01 5.70772663e-02 4.89175349e-01] [-4.06701654e-01 -4.59734768e-01 -4.06701654e-01 ... 7.09533274e-01 -1.25188363e+00 -3.49009573e-01] ... [ 9.66002762e-01 -3.93965334e-01 9.66002762e-01 ... -4.00855422e-01 -3.12895566e-01 4.22539413e-01] [ 1.20515454e+00 -1.31149399e+00 1.20515454e+00 ... -3.63181829e-02 1.70675302e+00 -2.87737936e-01] [ 8.19736838e-01 -5.00868440e-01 8.19736838e-01 ... -6.42697692e-01 5.00603437e-01 1.98157817e-01]] [[-9.70376551e-01 -5.60074091e-01 -9.70376551e-01 ... -1.09832251e+00 -2.46047512e-01 -5.93870617e-02] [ 1.35513425e-01 -6.13211870e-01 1.35513425e-01 ... 3.82441938e-01 -1.19128931e+00 -2.54003525e-01] [ 2.42301207e-02 3.59171122e-01 2.42301207e-02 ... 2.77318209e-01 -4.27301228e-01 -1.98775515e-01] ... [-1.46816814e+00 -1.27640867e+00 -1.46816814e+00 ... -1.02506816e-01 -3.06720793e-01 1.20696020e+00] [-8.47724438e-01 -6.84521720e-02 -8.47724438e-01 ... -2.37099096e-01 6.11708462e-02 1.21034145e+00] [ 2.15253448e+00 4.12306517e-01 2.15253448e+00 ... 3.57662648e-01 4.09441233e-01 2.17818403e+00]]] [[[-9.34877813e-01 -8.41643691e-01 -9.34877813e-01 ... 4.70130205e-01 -3.16743284e-01 -2.13066995e-01] [ 1.84923005e+00 1.68673861e+00 1.84923005e+00 ... -1.00859534e-02 1.09674573e+00 -1.74954295e+00] [-2.45245248e-01 5.01003087e-01 -2.45245248e-01 ... 1.64081848e+00 1.50183082e-01 -4.85350966e-01] ... [-7.92203307e-01 2.35744461e-01 -7.92203307e-01 ... 2.41190754e-02 2.57645220e-01 1.78015530e-01] [-2.43789852e-02 -2.77109563e-01 -2.43789852e-02 ... -1.61362007e-01 -1.30500913e+00 1.46279305e-01] [ 6.57590091e-01 -7.03583002e-01 6.57590091e-01 ... 1.51915801e+00 -8.56233299e-01 -8.52802992e-01]] [[-9.77730677e-02 1.22554815e+00 -9.77730677e-02 ... -4.71067905e-01 -1.00530243e+00 -1.03375030e+00] [-3.78436267e-01 -4.02009785e-02 -3.78436267e-01 ... -7.04241931e-01 2.20100284e+00 1.52784991e+00] [-1.13683701e+00 1.69636655e+00 -1.13683701e+00 ... -2.41500903e-02 8.88019621e-01 -9.66293573e-01] ... [ 1.12256145e+00 -8.04287136e-01 1.12256145e+00 ... -1.25510025e+00 1.77548155e-01 -6.46944940e-01] [ 1.06152885e-01 -4.85962220e-02 1.06152885e-01 ... -8.60911667e-01 3.91751587e-01 -6.60007954e-01] [-7.10775375e-01 2.10568404e+00 -7.10775375e-01 ... 2.51718760e+00 -1.60629702e+00 -6.72821462e-01]] [[ 2.29263306e+00 3.04729909e-01 2.29263306e+00 ... -6.63128018e-01 1.27920508e+00 1.03909230e+00] [ 1.01694965e+00 1.61423850e+00 1.01694965e+00 ... 7.47584999e-01 1.74963665e+00 1.42315641e-01] [-1.11535692e+00 -2.75271082e+00 -1.11535692e+00 ... -2.03610882e-02 -1.47189939e+00 3.70597601e-01] ... [ 5.12988418e-02 1.29765165e+00 5.12988418e-02 ... 6.31418288e-01 -7.77890086e-01 -3.45129311e-01] [ 9.16801766e-02 -1.30947280e+00 9.16801766e-02 ... 1.35135603e+00 1.97794288e-01 1.47484345e-02] [-1.43529510e+00 -8.70132372e-02 -1.43529510e+00 ... 1.23534489e+00 -2.34918213e+00 -4.13227350e-01]] ... [[-8.96850586e-01 6.97639763e-01 -8.96850586e-01 ... -1.58582664e+00 8.84036541e-01 4.75292712e-01] [-7.39408731e-01 4.52384710e-01 -7.39408731e-01 ... 5.46401381e-01 -5.61013818e-01 6.91380501e-02] [ 1.15919483e+00 -1.55724585e+00 1.15919483e+00 ... 9.72625852e-01 -6.20099064e-03 -7.50205338e-01] ... [-7.10781634e-01 3.34642053e-01 -7.10781634e-01 ... -8.04562211e-01 6.94828257e-02 -9.90864158e-01] [ 3.23351949e-01 -9.68392611e-01 3.23351949e-01 ... -4.41131800e-01 1.25634408e+00 4.84728217e-01] [ 8.62644613e-01 -7.16466606e-01 8.62644613e-01 ... -1.61649418e+00 -1.83126378e+00 -9.27514493e-01]] [[ 1.01263654e+00 -5.95349312e-01 1.01263654e+00 ... 1.30484331e+00 -1.62235188e+00 1.66546535e-02] [ 4.13280874e-01 5.31982780e-01 4.13280874e-01 ... -2.21598521e-01 -2.15622693e-01 -3.83587122e-01] [-4.79850322e-01 -2.51430011e+00 -4.79850322e-01 ... 1.08805187e-01 4.72383410e-01 -2.32509923e+00] ... [ 1.78662610e+00 3.52254212e-01 1.78662610e+00 ... 2.05653206e-01 1.52100354e-01 -4.77723598e-01] [ 1.79128063e+00 2.68831348e+00 1.79128063e+00 ... -7.54521638e-02 5.83166718e-01 -1.36900532e+00] [ 1.21804667e+00 -1.30123413e+00 1.21804667e+00 ... 9.50513422e-01 1.08955300e+00 -8.35651755e-01]] [[-8.96850586e-01 6.97639763e-01 -8.96850586e-01 ... -1.58582664e+00 8.84036541e-01 4.75292712e-01] [-7.39408731e-01 4.52384710e-01 -7.39408731e-01 ... 5.46401381e-01 -5.61013818e-01 6.91380501e-02] [ 1.15919483e+00 -1.55724585e+00 1.15919483e+00 ... 9.72625852e-01 -6.20099064e-03 -7.50205338e-01] ... [-7.10781634e-01 3.34642053e-01 -7.10781634e-01 ... -8.04562211e-01 6.94828257e-02 -9.90864158e-01] [ 3.23351949e-01 -9.68392611e-01 3.23351949e-01 ... -4.41131800e-01 1.25634408e+00 4.84728217e-01] [ 8.62644613e-01 -7.16466606e-01 8.62644613e-01 ... -1.61649418e+00 -1.83126378e+00 -9.27514493e-01]]] [[[ 1.81109524e+00 -1.46945906e+00 1.81109524e+00 ... 4.13699478e-01 -7.88736045e-01 -7.72856772e-02] [-5.24725378e-01 -1.11278701e+00 -5.24725378e-01 ... 4.25350994e-01 3.21106642e-01 3.52655858e-01] [ 3.23565394e-01 3.89664143e-01 3.23565394e-01 ... -2.09209061e+00 7.72990525e-01 1.65524256e+00] ... [-1.36002815e+00 3.29685479e-01 -1.36002815e+00 ... -1.51520759e-01 9.10692990e-01 -5.37766874e-01] [-3.71627152e-01 7.32975483e-01 -3.71627152e-01 ... 1.69492733e+00 -1.47331692e-02 -5.13505161e-01] [-7.58343399e-01 -4.32967544e-01 -7.58343399e-01 ... 1.52235293e+00 2.36535382e+00 -6.08226538e-01]] [[-8.52423728e-01 -1.33317137e+00 -8.52423728e-01 ... 2.26084888e-02 -1.38402152e+00 -3.40870857e-01] [ 1.02179050e+00 -3.19863343e+00 1.02179050e+00 ... 1.59661210e+00 -6.05385363e-01 2.70663291e-01] [ 4.59582657e-01 -4.45258379e-01 4.59582657e-01 ... 1.68618226e+00 -1.18550174e-01 -2.70794004e-01] ... [-8.80621612e-01 -1.15824319e-01 -8.80621612e-01 ... 1.56795010e-01 -2.43275076e-01 -4.63926971e-01] [ 9.64994669e-01 -9.35393691e-01 9.64994669e-01 ... -1.96018219e+00 -9.76115048e-01 -6.16661668e-01] [-3.95603836e-01 4.99087781e-01 -3.95603836e-01 ... 1.80354667e+00 5.81244707e-01 1.20823073e+00]] [[-1.35891736e+00 1.41674191e-01 -1.35891736e+00 ... -2.16674781e+00 -2.21436638e-02 9.45373058e-01] [ 1.41585279e+00 -1.77804410e+00 1.41585279e+00 ... 1.09908020e+00 3.23908567e-01 -1.04035830e+00] [ 1.39518380e+00 4.43184704e-01 1.39518380e+00 ... 1.45864439e+00 5.47947586e-02 2.69040674e-01] ... [ 1.16484618e+00 5.18044472e-01 1.16484618e+00 ... 1.65967596e+00 -5.45458496e-02 -9.46412012e-02] [-1.09208560e+00 -1.93238109e-01 -1.09208560e+00 ... 9.61017668e-01 -2.42941737e+00 -1.09686148e+00] [ 2.18564689e-01 -1.44472504e+00 2.18564689e-01 ... 4.36218947e-01 1.51634169e+00 5.53735077e-01]] ... [[-1.83543658e+00 1.47955668e+00 -1.83543658e+00 ... 8.86311084e-02 7.09581137e-01 2.77132779e-01] [-4.40929502e-01 8.60776067e-01 -4.40929502e-01 ... -1.78810164e-01 7.76706040e-01 1.17084968e+00] [-2.98728019e-01 1.56141505e-01 -2.98728019e-01 ... -8.75531793e-01 -1.02732646e+00 7.65034854e-01] ... [ 2.34254766e+00 -2.18215153e-01 2.34254766e+00 ... 7.18544364e-01 2.69701868e-01 -1.02277660e+00] [-1.78162825e+00 -6.59296930e-01 -1.78162825e+00 ... -2.27119103e-01 -7.10893571e-01 4.01055545e-01] [-1.04602742e+00 -7.13216245e-01 -1.04602742e+00 ... -8.28261733e-01 3.96421582e-01 7.41328001e-01]] [[ 6.45119488e-01 1.33029974e+00 6.45119488e-01 ... -2.27748108e+00 7.27276564e-01 1.54754043e+00] [ 9.91001129e-02 -6.04383469e-01 9.91001129e-02 ... -1.17714798e+00 4.20028627e-01 9.06972229e-01] [ 8.69384944e-01 -9.93166149e-01 8.69384944e-01 ... -9.97973084e-02 -7.28455961e-01 7.21291065e-01] ... [ 1.60998476e+00 -3.45223188e-01 1.60998476e+00 ... 1.37801266e+00 -7.13959396e-01 7.13104725e-01] [-1.98117837e-01 -7.82433867e-01 -1.98117837e-01 ... -1.21621597e+00 -8.89712989e-01 1.70681703e+00] [ 4.21966374e-01 8.73217762e-01 4.21966374e-01 ... -1.08253348e+00 -1.06378055e+00 8.17631125e-01]] [[-1.83543658e+00 1.47955668e+00 -1.83543658e+00 ... 8.86311084e-02 7.09581137e-01 2.77132779e-01] [-4.40929502e-01 8.60776067e-01 -4.40929502e-01 ... -1.78810164e-01 7.76706040e-01 1.17084968e+00] [-2.98728019e-01 1.56141505e-01 -2.98728019e-01 ... -8.75531793e-01 -1.02732646e+00 7.65034854e-01] ... [ 2.34254766e+00 -2.18215153e-01 2.34254766e+00 ... 7.18544364e-01 2.69701868e-01 -1.02277660e+00] [-1.78162825e+00 -6.59296930e-01 -1.78162825e+00 ... -2.27119103e-01 -7.10893571e-01 4.01055545e-01] [-1.04602742e+00 -7.13216245e-01 -1.04602742e+00 ... -8.28261733e-01 3.96421582e-01 7.41328001e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 0) - mode:reflect - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5530.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-1.3175386 0.6254664 -1.3175386 ... 1.1121497 -1.7860328 -0.99207276] [-0.4260405 0.85655373 -0.4260405 ... 0.3094832 0.46321455 -1.0973772 ] [-0.43526596 1.7113899 -0.43526596 ... 0.07893115 0.54713637 1.1850607 ] ... [ 1.7352276 0.27792287 1.7352276 ... 0.35581785 0.40807688 0.10423648] [ 0.7972566 -0.15787284 0.7972566 ... 0.539443 -0.743617 -1.0207502 ] [ 1.3456227 -0.702141 1.3456227 ... 0.7169761 0.2169123 -0.19745857]] [[-0.574363 -0.5003574 -0.574363 ... 2.087466 0.6164399 0.72725195] [ 1.3870074 0.5723431 1.3870074 ... 1.7346897 0.63486195 -1.8438555 ] [-0.05888663 0.06797386 -0.05888663 ... 0.47282588 -0.6823699 -0.76984566] ... [-0.22208935 0.20572777 -0.22208935 ... -1.2068931 0.65665054 -1.4799885 ] [-1.2042181 1.1423504 -1.2042181 ... 0.6721103 -0.5408168 0.6232742 ] [ 0.28238744 0.08764236 0.28238744 ... -1.7169046 1.2970108 2.4356582 ]] [[ 0.3417956 1.0382404 0.3417956 ... 2.1563592 0.48465228 -0.7019827 ] [ 0.39190736 0.9513708 0.39190736 ... 0.1325555 -3.016997 -0.3944253 ] [ 0.9382923 1.553821 0.9382923 ... 0.20455885 0.7606612 0.30447152] ... [ 2.5916827 0.957956 2.5916827 ... 0.59638846 1.3938668 0.91760665] [ 0.04120364 0.30679888 0.04120364 ... -2.4913406 0.14204477 -0.90599793] [ 0.5537778 0.9678094 0.5537778 ... 0.05612843 -0.21687548 0.24454723]] ... [[-0.40318432 0.36531186 -0.40318432 ... -0.15932775 -2.9908755 -1.9936311 ] [-0.32851487 1.5655094 -0.32851487 ... 0.0898739 1.6323246 -0.27700645] [ 1.1225823 -1.4512 1.1225823 ... 0.58401847 -1.331896 -1.5427623 ] ... [ 0.60383403 -1.2351633 0.60383403 ... 1.0600681 -1.7219338 0.7294034 ] [-0.32689172 0.61056656 -0.32689172 ... -0.31247014 0.05578963 -0.48573995] [ 0.01580561 -0.80225223 0.01580561 ... 0.6740077 0.7538255 -1.7048078 ]] [[-1.6423227 1.3886114 -1.6423227 ... -0.14881495 -1.6438491 -0.7842549 ] [-0.37215906 0.07672508 -0.37215906 ... 0.293751 0.16701378 -0.6886567 ] [ 0.3748987 1.3340317 0.3748987 ... 1.2807795 -0.00429249 -1.8360698 ] ... [-1.1219563 -1.3068019 -1.1219563 ... 0.6360993 2.2461312 -0.42811042] [-0.7906762 0.29761803 -0.7906762 ... -0.08864368 -1.6119674 0.633945 ] [-1.3318995 0.68074185 -1.3318995 ... -0.20188615 -1.1859419 0.38841718]] [[-0.31808645 1.0163912 -0.31808645 ... -1.8431102 0.03216331 -0.12346006] [ 1.8700914 -0.8420262 1.8700914 ... -0.6499623 0.3134242 0.42680764] [-0.39480114 1.1718972 -0.39480114 ... -1.0075241 -0.9887401 -1.476626 ] ... [-0.50384414 -1.2582815 -0.50384414 ... -1.3505331 -1.0205898 1.4192386 ] [-0.36075664 0.9835888 -0.36075664 ... 0.45242736 -0.592431 -0.6022177 ] [-0.45064932 0.70744157 -0.45064932 ... 0.08996528 -0.6338575 -0.41599932]]] [[[ 1.8716457 0.7499405 1.8716457 ... -0.92059094 0.2264629 -1.0652237 ] [-1.9114059 -0.7690558 -1.9114059 ... -0.57738155 0.08076323 -1.1906462 ] [-1.136623 -1.6371182 -1.136623 ... 2.3841226 -2.1010375 0.08454936] ... [ 1.2541902 -0.6948551 1.2541902 ... 0.9058279 -0.52053463 0.39004272] [-1.7913032 1.0197023 -1.7913032 ... 0.47661653 -0.41683936 0.45426017] [-0.41034225 -0.6400945 -0.41034225 ... 0.11233196 -0.29310256 -1.1105584 ]] [[ 1.3218007 -0.5446374 1.3218007 ... -1.9319308 -1.1074201 0.3225124 ] [-0.63814104 0.5876441 -0.63814104 ... -0.08650747 -0.7292565 -0.42957905] [ 0.8834363 0.20299147 0.8834363 ... -1.9598515 0.11569387 -1.3092935 ] ... [ 0.95133835 0.6112289 0.95133835 ... 0.13511136 0.35905823 -0.8106637 ] [-0.8029831 -1.904934 -0.8029831 ... -0.07357137 -0.204313 -0.42496395] [ 0.8869196 0.5873203 0.8869196 ... -1.4428743 -0.73152846 0.37681895]] [[ 0.9077303 -0.93819106 0.9077303 ... 1.8743831 1.5263505 0.4639685 ] [ 2.3361838 -1.001436 2.3361838 ... 1.0423688 1.7484272 0.45400542] [-0.5974388 0.2736702 -0.5974388 ... -0.20400213 0.71815985 1.6229124 ] ... [ 0.34344164 -2.0023108 0.34344164 ... -2.0228016 -0.5806433 0.5357282 ] [-0.01484027 0.90311563 -0.01484027 ... 2.4126651 -0.9496385 -0.20402852] [ 0.17472735 1.0461437 0.17472735 ... 0.3872344 1.6690521 -0.772569 ]] ... [[-1.648966 0.95564127 -1.648966 ... -0.44746977 -2.9424555 0.2976121 ] [-1.1088065 -0.2941382 -1.1088065 ... 0.99079585 -0.246226 0.5467239 ] [ 0.46811864 -0.00907813 0.46811864 ... -1.546797 -1.3807653 -1.5414348 ] ... [ 2.3752651 -1.5221704 2.3752651 ... -0.41907865 0.08934841 -0.6262398 ] [-1.6347438 1.1508292 -1.6347438 ... -1.0606366 -1.562111 0.15024348] [ 1.1453595 -2.2862887 1.1453595 ... -0.73089504 -0.98179865 -1.4595068 ]] [[-1.8357702 0.08795539 -1.8357702 ... 0.19524372 -1.2672575 1.4268703 ] [ 1.5293896 -0.91914994 1.5293896 ... 1.773915 1.0117719 0.9206362 ] [-0.42167768 0.2168307 -0.42167768 ... -1.4767019 -0.27822128 0.5690863 ] ... [ 0.96553624 1.5275747 0.96553624 ... -0.33577317 0.20576926 0.7611405 ] [-0.6900169 2.048188 -0.6900169 ... -0.32667303 -1.4650869 0.6305381 ] [ 0.52474505 -0.52812946 0.52474505 ... -0.718533 -1.0141122 0.56568366]] [[-1.350073 1.9114188 -1.350073 ... 0.94231117 2.3700216 -0.7980447 ] [-0.34750277 0.9411464 -0.34750277 ... -1.3260719 -0.7352507 -0.96581477] [-0.00421535 -0.9962365 -0.00421535 ... 1.413337 -0.4833838 -0.36471838] ... [-1.1779015 -0.11819034 -1.1779015 ... -0.57160544 -1.5915323 1.4334973 ] [ 0.11660581 1.2089453 0.11660581 ... 0.29538867 0.585567 -2.018931 ] [-1.1426165 -0.31516856 -1.1426165 ... 0.44286036 -0.4012159 0.7462441 ]]] [[[-0.94142693 -0.8602972 -0.94142693 ... -0.18766694 1.8013213 -0.2821273 ] [-0.8028188 -1.0075405 -0.8028188 ... -0.9069884 0.5019006 -0.30637664] [-1.4029225 0.09817919 -1.4029225 ... -1.456534 -1.1640862 -1.5772921 ] ... [ 0.96130335 -0.29276577 0.96130335 ... 0.92211616 -1.4287899 0.26194492] [-1.0795319 0.42968032 -1.0795319 ... 0.2829522 1.9289951 0.73359895] [ 0.31414953 -0.2761825 0.31414953 ... 1.0531104 -1.6450702 -1.7518606 ]] [[ 0.28405532 -1.7927605 0.28405532 ... 0.15664078 0.2473489 0.97367 ] [ 0.70260954 -0.9139364 0.70260954 ... 0.5235489 -0.24821357 0.49973807] [ 0.31062838 0.5974198 0.31062838 ... 1.1541163 0.45509493 0.9368674 ] ... [-0.822655 -0.91343 -0.822655 ... -0.7824954 0.12398225 -0.4101491 ] [-1.5259434 0.9033628 -1.5259434 ... -0.84556234 -0.1489783 -1.5939993 ] [-0.8295245 -0.28756437 -0.8295245 ... -1.2465311 0.14331612 1.484193 ]] [[-0.76048344 -0.912999 -0.76048344 ... 0.64473146 0.22981627 -1.4657636 ] [-1.0835541 2.0336907 -1.0835541 ... -0.66520256 -1.943814 1.08309 ] [-0.17049313 -0.28617316 -0.17049313 ... 0.665908 -0.16956216 -0.13770524] ... [-0.13803342 0.6266507 -0.13803342 ... 0.31370527 -0.53411925 0.32349965] [-0.8724551 -0.0399984 -0.8724551 ... 1.6567522 -2.1495502 -0.26309112] [ 0.43420547 -0.40678257 0.43420547 ... 0.7692864 1.424314 -0.24066283]] ... [[ 0.08187737 0.32896635 0.08187737 ... -0.06537623 0.07496519 -1.544992 ] [ 1.65458 -0.15787475 1.65458 ... -0.5665541 2.31064 0.22172807] [ 0.6442191 -2.194293 0.6442191 ... -0.9896043 -1.0202714 -0.02006808] ... [ 1.8173333 1.082636 1.8173333 ... -0.04148509 -0.2443006 -1.8779455 ] [ 0.2731613 0.1467718 0.2731613 ... -1.156887 -0.3556696 -1.3951178 ] [-0.60448444 -0.27324373 -0.60448444 ... 0.5439297 -0.7819049 -0.2460171 ]] [[ 1.162872 -0.4562795 1.162872 ... 0.80216455 -0.6286099 1.5325638 ] [ 1.0710046 -1.15889 1.0710046 ... 0.06284533 -1.2041 -1.1397092 ] [-0.5012116 -0.69238234 -0.5012116 ... 1.4628007 1.3008159 -1.3846965 ] ... [-0.49540365 0.08774041 -0.49540365 ... -0.74516475 -0.214779 -0.8046489 ] [-0.15136293 -2.1042864 -0.15136293 ... -1.7771648 1.8463498 -1.1504188 ] [-0.21050781 -0.94656163 -0.21050781 ... -0.01485992 -2.2009788 0.09744668]] [[-1.4145713 -0.77709204 -1.4145713 ... 1.041287 -0.30048218 -1.373593 ] [-0.65722805 -0.6979848 -0.65722805 ... 0.06472369 0.9565341 -0.27881476] [ 1.366632 1.77086 1.366632 ... -0.5778094 0.58781165 0.15115373] ... [-0.02098376 -0.76798683 -0.02098376 ... 0.583936 -0.8394106 0.9317197 ] [-1.8170156 1.4639851 -1.8170156 ... -1.3590287 1.3696195 -0.28061315] [ 0.56576025 -0.56743944 0.56576025 ... -0.7430904 3.161161 -0.5014649 ]]]]]; ov_res: [[[[[-1.3175386 0.6254664 -1.3175386 ... 1.1121497 -1.7860328 -0.99207276] [-0.4260405 0.85655373 -0.4260405 ... 0.3094832 0.46321455 -1.0973772 ] [-0.43526596 1.7113899 -0.43526596 ... 0.07893115 0.54713637 1.1850607 ] ... [ 1.7352276 0.27792287 1.7352276 ... 0.35581785 0.40807688 0.10423648] [ 0.7972566 -0.15787284 0.7972566 ... 0.539443 -0.743617 -1.0207502 ] [ 1.3456227 -0.702141 1.3456227 ... 0.7169761 0.2169123 -0.19745857]] [[-0.574363 -0.5003574 -0.574363 ... 2.087466 0.6164399 0.72725195] [ 1.3870074 0.5723431 1.3870074 ... 1.7346897 0.63486195 -1.8438555 ] [-0.05888663 0.06797386 -0.05888663 ... 0.47282588 -0.6823699 -0.76984566] ... [-0.22208935 0.20572777 -0.22208935 ... -1.2068931 0.65665054 -1.4799885 ] [-1.2042181 1.1423504 -1.2042181 ... 0.6721103 -0.5408168 0.6232742 ] [ 0.28238744 0.08764236 0.28238744 ... -1.7169046 1.2970108 2.4356582 ]] [[ 0.3417956 1.0382404 0.3417956 ... 2.1563592 0.48465228 -0.7019827 ] [ 0.39190736 0.9513708 0.39190736 ... 0.1325555 -3.016997 -0.3944253 ] [ 0.9382923 1.553821 0.9382923 ... 0.20455885 0.7606612 0.30447152] ... [ 2.5916827 0.957956 2.5916827 ... 0.59638846 1.3938668 0.91760665] [ 0.04120364 0.30679888 0.04120364 ... -2.4913406 0.14204477 -0.90599793] [ 0.5537778 0.9678094 0.5537778 ... 0.05612843 -0.21687548 0.24454723]] ... [[-0.40318432 0.36531186 -0.40318432 ... -0.15932775 -2.9908755 -1.9936311 ] [-0.32851487 1.5655094 -0.32851487 ... 0.0898739 1.6323246 -0.27700645] [ 1.1225823 -1.4512 1.1225823 ... 0.58401847 -1.331896 -1.5427623 ] ... [ 0.60383403 -1.2351633 0.60383403 ... 1.0600681 -1.7219338 0.7294034 ] [-0.32689172 0.61056656 -0.32689172 ... -0.31247014 0.05578963 -0.48573995] [ 0.01580561 -0.80225223 0.01580561 ... 0.6740077 0.7538255 -1.7048078 ]] [[-1.6423227 1.3886114 -1.6423227 ... -0.14881495 -1.6438491 -0.7842549 ] [-0.37215906 0.07672508 -0.37215906 ... 0.293751 0.16701378 -0.6886567 ] [ 0.3748987 1.3340317 0.3748987 ... 1.2807795 -0.00429249 -1.8360698 ] ... [-1.1219563 -1.3068019 -1.1219563 ... 0.6360993 2.2461312 -0.42811042] [-0.7906762 0.29761803 -0.7906762 ... -0.08864368 -1.6119674 0.633945 ] [-1.3318995 0.68074185 -1.3318995 ... -0.20188615 -1.1859419 0.38841718]] [[-0.31808645 1.0163912 -0.31808645 ... -1.8431102 0.03216331 -0.12346006] [ 1.8700914 -0.8420262 1.8700914 ... -0.6499623 0.3134242 0.42680764] [-0.39480114 1.1718972 -0.39480114 ... -1.0075241 -0.9887401 -1.476626 ] ... [-0.50384414 -1.2582815 -0.50384414 ... -1.3505331 -1.0205898 1.4192386 ] [-0.36075664 0.9835888 -0.36075664 ... 0.45242736 -0.592431 -0.6022177 ] [-0.45064932 0.70744157 -0.45064932 ... 0.08996528 -0.6338575 -0.41599932]]] [[[ 1.8716457 0.7499405 1.8716457 ... -0.92059094 0.2264629 -1.0652237 ] [-1.9114059 -0.7690558 -1.9114059 ... -0.57738155 0.08076323 -1.1906462 ] [-1.136623 -1.6371182 -1.136623 ... 2.3841226 -2.1010375 0.08454936] ... [ 1.2541902 -0.6948551 1.2541902 ... 0.9058279 -0.52053463 0.39004272] [-1.7913032 1.0197023 -1.7913032 ... 0.47661653 -0.41683936 0.45426017] [-0.41034225 -0.6400945 -0.41034225 ... 0.11233196 -0.29310256 -1.1105584 ]] [[ 1.3218007 -0.5446374 1.3218007 ... -1.9319308 -1.1074201 0.3225124 ] [-0.63814104 0.5876441 -0.63814104 ... -0.08650747 -0.7292565 -0.42957905] [ 0.8834363 0.20299147 0.8834363 ... -1.9598515 0.11569387 -1.3092935 ] ... [ 0.95133835 0.6112289 0.95133835 ... 0.13511136 0.35905823 -0.8106637 ] [-0.8029831 -1.904934 -0.8029831 ... -0.07357137 -0.204313 -0.42496395] [ 0.8869196 0.5873203 0.8869196 ... -1.4428743 -0.73152846 0.37681895]] [[ 0.9077303 -0.93819106 0.9077303 ... 1.8743831 1.5263505 0.4639685 ] [ 2.3361838 -1.001436 2.3361838 ... 1.0423688 1.7484272 0.45400542] [-0.5974388 0.2736702 -0.5974388 ... -0.20400213 0.71815985 1.6229124 ] ... [ 0.34344164 -2.0023108 0.34344164 ... -2.0228016 -0.5806433 0.5357282 ] [-0.01484027 0.90311563 -0.01484027 ... 2.4126651 -0.9496385 -0.20402852] [ 0.17472735 1.0461437 0.17472735 ... 0.3872344 1.6690521 -0.772569 ]] ... [[-1.648966 0.95564127 -1.648966 ... -0.44746977 -2.9424555 0.2976121 ] [-1.1088065 -0.2941382 -1.1088065 ... 0.99079585 -0.246226 0.5467239 ] [ 0.46811864 -0.00907813 0.46811864 ... -1.546797 -1.3807653 -1.5414348 ] ... [ 2.3752651 -1.5221704 2.3752651 ... -0.41907865 0.08934841 -0.6262398 ] [-1.6347438 1.1508292 -1.6347438 ... -1.0606366 -1.562111 0.15024348] [ 1.1453595 -2.2862887 1.1453595 ... -0.73089504 -0.98179865 -1.4595068 ]] [[-1.8357702 0.08795539 -1.8357702 ... 0.19524372 -1.2672575 1.4268703 ] [ 1.5293896 -0.91914994 1.5293896 ... 1.773915 1.0117719 0.9206362 ] [-0.42167768 0.2168307 -0.42167768 ... -1.4767019 -0.27822128 0.5690863 ] ... [ 0.96553624 1.5275747 0.96553624 ... -0.33577317 0.20576926 0.7611405 ] [-0.6900169 2.048188 -0.6900169 ... -0.32667303 -1.4650869 0.6305381 ] [ 0.52474505 -0.52812946 0.52474505 ... -0.718533 -1.0141122 0.56568366]] [[-1.350073 1.9114188 -1.350073 ... 0.94231117 2.3700216 -0.7980447 ] [-0.34750277 0.9411464 -0.34750277 ... -1.3260719 -0.7352507 -0.96581477] [-0.00421535 -0.9962365 -0.00421535 ... 1.413337 -0.4833838 -0.36471838] ... [-1.1779015 -0.11819034 -1.1779015 ... -0.57160544 -1.5915323 1.4334973 ] [ 0.11660581 1.2089453 0.11660581 ... 0.29538867 0.585567 -2.018931 ] [-1.1426165 -0.31516856 -1.1426165 ... 0.44286036 -0.4012159 0.7462441 ]]] [[[-0.94142693 -0.8602972 -0.94142693 ... -0.18766694 1.8013213 -0.2821273 ] [-0.8028188 -1.0075405 -0.8028188 ... -0.9069884 0.5019006 -0.30637664] [-1.4029225 0.09817919 -1.4029225 ... -1.456534 -1.1640862 -1.5772921 ] ... [ 0.96130335 -0.29276577 0.96130335 ... 0.92211616 -1.4287899 0.26194492] [-1.0795319 0.42968032 -1.0795319 ... 0.2829522 1.9289951 0.73359895] [ 0.31414953 -0.2761825 0.31414953 ... 1.0531104 -1.6450702 -1.7518606 ]] [[ 0.28405532 -1.7927605 0.28405532 ... 0.15664078 0.2473489 0.97367 ] [ 0.70260954 -0.9139364 0.70260954 ... 0.5235489 -0.24821357 0.49973807] [ 0.31062838 0.5974198 0.31062838 ... 1.1541163 0.45509493 0.9368674 ] ... [-0.822655 -0.91343 -0.822655 ... -0.7824954 0.12398225 -0.4101491 ] [-1.5259434 0.9033628 -1.5259434 ... -0.84556234 -0.1489783 -1.5939993 ] [-0.8295245 -0.28756437 -0.8295245 ... -1.2465311 0.14331612 1.484193 ]] [[-0.76048344 -0.912999 -0.76048344 ... 0.64473146 0.22981627 -1.4657636 ] [-1.0835541 2.0336907 -1.0835541 ... -0.66520256 -1.943814 1.08309 ] [-0.17049313 -0.28617316 -0.17049313 ... 0.665908 -0.16956216 -0.13770524] ... [-0.13803342 0.6266507 -0.13803342 ... 0.31370527 -0.53411925 0.32349965] [-0.8724551 -0.0399984 -0.8724551 ... 1.6567522 -2.1495502 -0.26309112] [ 0.43420547 -0.40678257 0.43420547 ... 0.7692864 1.424314 -0.24066283]] ... [[ 0.08187737 0.32896635 0.08187737 ... -0.06537623 0.07496519 -1.544992 ] [ 1.65458 -0.15787475 1.65458 ... -0.5665541 2.31064 0.22172807] [ 0.6442191 -2.194293 0.6442191 ... -0.9896043 -1.0202714 -0.02006808] ... [ 1.8173333 1.082636 1.8173333 ... -0.04148509 -0.2443006 -1.8779455 ] [ 0.2731613 0.1467718 0.2731613 ... -1.156887 -0.3556696 -1.3951178 ] [-0.60448444 -0.27324373 -0.60448444 ... 0.5439297 -0.7819049 -0.2460171 ]] [[ 1.162872 -0.4562795 1.162872 ... 0.80216455 -0.6286099 1.5325638 ] [ 1.0710046 -1.15889 1.0710046 ... 0.06284533 -1.2041 -1.1397092 ] [-0.5012116 -0.69238234 -0.5012116 ... 1.4628007 1.3008159 -1.3846965 ] ... [-0.49540365 0.08774041 -0.49540365 ... -0.74516475 -0.214779 -0.8046489 ] [-0.15136293 -2.1042864 -0.15136293 ... -1.7771648 1.8463498 -1.1504188 ] [-0.21050781 -0.94656163 -0.21050781 ... -0.01485992 -2.2009788 0.09744668]] [[-1.4145713 -0.77709204 -1.4145713 ... 1.041287 -0.30048218 -1.373593 ] [-0.65722805 -0.6979848 -0.65722805 ... 0.06472369 0.9565341 -0.27881476] [ 1.366632 1.77086 1.366632 ... -0.5778094 0.58781165 0.15115373] ... [-0.02098376 -0.76798683 -0.02098376 ... 0.583936 -0.8394106 0.9317197 ] [-1.8170156 1.4639851 -1.8170156 ... -1.3590287 1.3696195 -0.28061315] [ 0.56576025 -0.56743944 0.56576025 ... -0.7430904 3.161161 -0.5014649 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:reflect - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5533.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 0.02205282 -0.6212775 -0.1621549 ... -0.3245551 0.2851067 -0.46912053] [ 1.5229394 0.32784915 -0.8009884 ... -1.289486 0.4332118 -0.04403728] [ 0.58691144 1.2109429 0.54310113 ... -0.5735843 0.45342836 -0.65624326] ... [ 0.7156483 0.39571768 0.49379042 ... -2.3795383 -0.88468343 2.7471652 ] [-0.628024 2.1331217 -1.9500699 ... -1.4140955 0.12119365 2.0553415 ] [-0.8053978 -0.42887318 -1.1597219 ... -1.2178833 0.38165075 0.833857 ]] [[ 0.22955129 0.7008824 0.07011477 ... -0.15870687 0.50442874 3.0888445 ] [-0.0505615 1.0629256 -0.39274105 ... 0.82708627 0.5913351 -1.1975839 ] [ 1.388087 -0.7969127 0.35643712 ... 0.1402281 0.5712005 -0.3566168 ] ... [-0.05407041 1.935888 0.9541659 ... 0.59355056 1.315469 -0.75977486] [-1.2631477 -1.0454719 0.34341246 ... 0.19034071 -1.2141063 -0.3579548 ] [ 0.44216388 0.06497159 0.22962604 ... -0.7926063 -0.0540443 1.7269771 ]] [[ 0.31589338 1.2946234 -0.7373233 ... 0.31550944 -0.5906682 2.0114546 ] [-0.04033015 0.28969777 0.4489232 ... 0.21627681 2.6486678 0.33698913] [-0.32275164 1.2300502 -0.6242539 ... 0.09078202 1.2322582 1.4114579 ] ... [-1.2429186 -1.0438098 -1.8725357 ... -0.7375276 0.3296259 -0.7174487 ] [ 0.23147473 -0.04069976 -0.4307629 ... -0.13738322 -0.76046866 -0.09831341] [ 0.5083336 0.92859083 0.68330216 ... -0.43358967 1.6749628 -0.5581488 ]] ... [[ 0.5792496 0.6347134 2.8066194 ... -1.3432544 0.3950501 0.9032266 ] [-0.9135863 0.94901866 0.2915126 ... -0.0375851 -0.76542187 -0.17842548] [ 0.04294497 -0.23080689 0.8593805 ... 0.1982478 0.92875916 0.16847686] ... [-1.5189937 1.249523 1.7391804 ... -0.29543883 -1.2298019 -0.3068198 ] [ 0.2004927 -0.7794374 -0.00760346 ... -0.04062232 -0.16879481 1.7837298 ] [-0.6074645 -0.3748282 -0.8734993 ... -0.22637191 0.5584948 -0.80340296]] [[-0.60794663 0.9286486 0.43437517 ... 0.51428103 0.10921264 0.3286438 ] [ 0.41357696 -0.63595647 1.7806475 ... 2.0067828 -0.37729552 -0.97545433] [ 0.17804107 0.01498763 -0.4778618 ... -0.7101426 -0.3590327 -0.59166735] ... [-1.1662915 -0.2919406 -0.04629185 ... 0.9072391 -0.88617784 -0.12669028] [-1.1627328 0.8129392 -1.57537 ... -0.71181047 1.3514402 -0.15124893] [ 2.921003 0.31206954 -0.6110642 ... -1.7939268 -0.84192383 0.4860742 ]] [[-0.15966372 -0.6529619 0.7068642 ... -0.8988833 -0.00881412 -0.60369956] [ 1.3089229 1.045295 0.7980282 ... 1.5272547 -0.63374925 0.8662913 ] [ 0.5627091 -0.21479475 -0.3013664 ... -0.58085674 -0.2856365 1.8750381 ] ... [ 0.15670343 0.6907899 1.080957 ... -0.08457727 -0.602868 0.95211816] [-0.24224135 0.925279 -1.2647505 ... 0.29728925 0.61091286 -1.7900565 ] [ 0.06410812 1.3307519 -0.22464631 ... 0.41774285 -0.3816462 -0.71176434]]] [[[-0.5689347 -0.502523 1.6707458 ... -0.23034962 -1.0103589 -0.31596705] [ 0.70772046 1.661389 -1.0238581 ... -0.769771 0.86867255 0.4771379 ] [-1.107654 0.48524985 -1.7437234 ... -0.5294894 1.0468849 -0.05861264] ... [-2.7127042 2.7865183 0.38639423 ... 0.01646743 0.52535284 -0.09760617] [ 0.5356017 -0.28234288 0.21900816 ... 0.41965044 -0.43870696 0.918383 ] [ 0.22485664 0.9600687 -0.49216855 ... -0.73559535 -0.02530494 -0.3585667 ]] [[ 0.3768464 -0.6794991 -0.5791224 ... 2.1860628 0.8974064 -1.2941225 ] [ 0.60561216 0.25435844 0.6196432 ... 0.2969153 -0.32024246 0.10079979] [ 1.1949741 0.43127155 0.83229023 ... 0.9851699 0.93126 -0.00566062] ... [-1.0505261 -0.52356267 1.1024593 ... 1.6362306 -0.4191644 1.2708164 ] [-0.9403861 0.72757787 -0.17315213 ... -0.76333386 0.4816934 0.05376362] [-0.40093824 0.28533444 -0.115711 ... 1.071186 0.8879564 -1.3183985 ]] [[-1.7577852 0.7565658 -0.10900172 ... -0.37718084 0.6259866 0.41088805] [-0.3720033 -1.0365971 -1.1518794 ... 0.06933604 -0.62285984 0.02767771] [-1.0897158 1.2307227 -0.5168734 ... -0.88282156 -0.9914261 -0.69925475] ... [-0.73721886 -0.723219 0.27213508 ... -0.23061657 1.2279797 -0.56576955] [-1.1056055 0.5009212 -0.9189098 ... 0.15541103 1.769364 0.54065466] [-1.1733968 -0.5794676 0.01537082 ... 0.5679569 -0.39802417 0.3454619 ]] ... [[ 1.8631716 -1.7232155 0.7839451 ... -1.1062733 0.13010044 0.5674451 ] [ 1.153927 0.41517416 -1.1707793 ... 1.3107445 -0.86562943 -0.25189734] [-0.16566007 -0.8881029 -0.45164967 ... -0.44881305 0.48229682 -0.3923623 ] ... [-0.5143631 -1.3054422 -0.25133356 ... -0.21043259 0.89461035 -1.8450613 ] [ 0.39974064 -0.7855033 -0.10066222 ... 1.2888312 -1.9765925 -0.63214594] [-0.57470256 2.1647427 0.5012384 ... -0.37391636 -0.8495466 1.4384099 ]] [[-0.20298785 0.09043577 1.0737529 ... 0.8428718 -1.5859954 0.03129538] [-1.0445274 -0.02802191 -2.0207446 ... 1.2770773 0.357159 1.3059765 ] [-0.4719153 -0.25868934 -0.26705095 ... 1.2540237 0.60921174 2.1343186 ] ... [ 0.09596765 0.85907394 1.8256398 ... 0.01860699 0.10923391 -1.3131977 ] [ 0.54171073 -1.76188 -3.1117249 ... -2.3422925 0.81752706 0.94342786] [ 1.7705327 1.6097596 -0.13612686 ... -1.405668 -2.0475657 0.94202113]] [[-0.12028761 -1.3806943 1.6795713 ... 1.2037734 2.044555 -1.5957499 ] [ 0.11585703 -1.0834403 -0.27309728 ... 2.5651772 -1.2401851 -2.5278196 ] [ 1.7264055 1.3352485 -0.75815016 ... 0.89463 -1.4768598 0.81388324] ... [ 0.7812744 -0.4485062 -1.0701215 ... 0.61324245 2.1579154 0.97574043] [-0.20454833 -0.08256809 -1.1248732 ... 0.31260988 0.7926804 0.43821386] [ 0.8935376 0.6277509 1.4018432 ... -0.53848666 1.6297376 0.62079316]]] [[[-0.9842114 0.76932544 -2.0399954 ... 0.8458361 -0.29441956 -0.82905847] [ 0.8389231 0.6448237 -0.32713237 ... -0.80525035 -0.5050083 -0.22055551] [ 0.19752307 -0.4792974 -0.37943676 ... -1.3668975 0.6242512 1.0631912 ] ... [ 0.47511458 -0.969085 0.6427878 ... -0.9454787 0.49767876 1.8510072 ] [-0.41541117 -0.36021265 0.66990495 ... 2.1530116 -0.33208776 -0.9993341 ] [-0.42496383 -0.04061733 1.576663 ... 1.865553 2.1474435 0.44263527]] [[-0.6560635 0.2361292 -1.4376775 ... 0.5225981 0.80032575 -1.8220745 ] [ 1.9758351 -0.6882551 0.03986887 ... -0.44066995 0.9011751 -0.09149702] [-1.2630674 -1.3458403 0.47536543 ... -2.8559139 1.042361 0.96197766] ... [ 2.170581 0.34460786 -1.9630523 ... 0.32237834 0.43326733 -0.7973209 ] [ 0.1069649 -0.589283 -0.2902853 ... 0.9413547 0.44219097 1.0424887 ] [ 0.22423775 1.3216048 -0.31567505 ... 0.30046064 1.5112132 0.05155411]] [[-0.33658454 2.0547411 0.19214281 ... -0.8518987 -0.9350933 -0.391418 ] [-0.7258112 -0.81618196 0.03665226 ... -2.81886 0.83022225 1.4333014 ] [ 2.067453 -0.59254044 -0.08703218 ... -1.452996 -0.65012217 1.4913007 ] ... [ 0.8138213 2.1784365 0.29597405 ... 1.0201794 0.10485898 -0.06547429] [-0.02808571 -0.4559368 1.4346813 ... 0.45350847 1.8744643 -0.39543104] [-1.1113094 0.66388637 1.1318697 ... 0.33653054 -1.4114041 1.078493 ]] ... [[ 0.39057085 -0.6921353 -0.41187155 ... 0.8474395 -0.18090542 0.794471 ] [-0.4197677 -0.85777456 -0.9445221 ... -0.61476463 0.7739116 -1.0229045 ] [ 0.0686442 -0.8940967 0.22762686 ... 0.8921071 -0.3899228 -1.2064861 ] ... [ 0.34737462 0.3125766 1.0790967 ... -2.8386767 0.510496 2.163456 ] [ 0.08582344 -1.1945279 0.01372897 ... -2.5721657 0.14869307 0.06857813] [ 0.81729496 -0.39953014 1.7032152 ... 0.96770066 0.97393775 0.4099856 ]] [[-0.7754996 -0.01206773 1.6638699 ... 1.4946121 1.1957558 -1.2913922 ] [-0.66612726 -0.4846004 -0.64598835 ... 1.5571811 -0.5931279 -0.9768201 ] [-0.46144655 0.5537884 -0.01078537 ... -0.4232965 2.3976767 -0.3988725 ] ... [ 0.7905513 0.7248115 1.2249002 ... -0.03822948 0.08263204 0.72337824] [ 1.5003867 0.05819308 0.01280229 ... -0.02662294 -0.10576688 0.13692367] [-0.76309 -0.8697184 -1.1275414 ... 0.2556223 0.07755467 1.1582322 ]] [[-0.16229782 -0.804219 0.10689672 ... -1.6434877 -1.1166719 2.2821834 ] [ 0.40045205 -0.52863663 0.27093074 ... 2.0210545 -0.6629579 1.6662438 ] [ 1.7692405 0.9712191 -0.8286488 ... -1.3211602 0.6461295 -0.3560136 ] ... [ 0.6982522 -0.36618525 2.6794207 ... 2.4598062 0.6220151 0.06731309] [-2.1066859 -1.916825 0.534041 ... 0.10809253 1.4500283 -0.925113 ] [ 0.5384869 -0.18806323 0.85608226 ... 0.44397882 0.5544081 1.1764468 ]]]]]; ov_res: [[[[[ 0.02205282 -0.6212775 -0.1621549 ... -0.3245551 0.2851067 -0.46912053] [ 1.5229394 0.32784915 -0.8009884 ... -1.289486 0.4332118 -0.04403728] [ 0.58691144 1.2109429 0.54310113 ... -0.5735843 0.45342836 -0.65624326] ... [ 0.7156483 0.39571768 0.49379042 ... -2.3795383 -0.88468343 2.7471652 ] [-0.628024 2.1331217 -1.9500699 ... -1.4140955 0.12119365 2.0553415 ] [-0.8053978 -0.42887318 -1.1597219 ... -1.2178833 0.38165075 0.833857 ]] [[ 0.22955129 0.7008824 0.07011477 ... -0.15870687 0.50442874 3.0888445 ] [-0.0505615 1.0629256 -0.39274105 ... 0.82708627 0.5913351 -1.1975839 ] [ 1.388087 -0.7969127 0.35643712 ... 0.1402281 0.5712005 -0.3566168 ] ... [-0.05407041 1.935888 0.9541659 ... 0.59355056 1.315469 -0.75977486] [-1.2631477 -1.0454719 0.34341246 ... 0.19034071 -1.2141063 -0.3579548 ] [ 0.44216388 0.06497159 0.22962604 ... -0.7926063 -0.0540443 1.7269771 ]] [[ 0.31589338 1.2946234 -0.7373233 ... 0.31550944 -0.5906682 2.0114546 ] [-0.04033015 0.28969777 0.4489232 ... 0.21627681 2.6486678 0.33698913] [-0.32275164 1.2300502 -0.6242539 ... 0.09078202 1.2322582 1.4114579 ] ... [-1.2429186 -1.0438098 -1.8725357 ... -0.7375276 0.3296259 -0.7174487 ] [ 0.23147473 -0.04069976 -0.4307629 ... -0.13738322 -0.76046866 -0.09831341] [ 0.5083336 0.92859083 0.68330216 ... -0.43358967 1.6749628 -0.5581488 ]] ... [[ 0.5792496 0.6347134 2.8066194 ... -1.3432544 0.3950501 0.9032266 ] [-0.9135863 0.94901866 0.2915126 ... -0.0375851 -0.76542187 -0.17842548] [ 0.04294497 -0.23080689 0.8593805 ... 0.1982478 0.92875916 0.16847686] ... [-1.5189937 1.249523 1.7391804 ... -0.29543883 -1.2298019 -0.3068198 ] [ 0.2004927 -0.7794374 -0.00760346 ... -0.04062232 -0.16879481 1.7837298 ] [-0.6074645 -0.3748282 -0.8734993 ... -0.22637191 0.5584948 -0.80340296]] [[-0.60794663 0.9286486 0.43437517 ... 0.51428103 0.10921264 0.3286438 ] [ 0.41357696 -0.63595647 1.7806475 ... 2.0067828 -0.37729552 -0.97545433] [ 0.17804107 0.01498763 -0.4778618 ... -0.7101426 -0.3590327 -0.59166735] ... [-1.1662915 -0.2919406 -0.04629185 ... 0.9072391 -0.88617784 -0.12669028] [-1.1627328 0.8129392 -1.57537 ... -0.71181047 1.3514402 -0.15124893] [ 2.921003 0.31206954 -0.6110642 ... -1.7939268 -0.84192383 0.4860742 ]] [[-0.15966372 -0.6529619 0.7068642 ... -0.8988833 -0.00881412 -0.60369956] [ 1.3089229 1.045295 0.7980282 ... 1.5272547 -0.63374925 0.8662913 ] [ 0.5627091 -0.21479475 -0.3013664 ... -0.58085674 -0.2856365 1.8750381 ] ... [ 0.15670343 0.6907899 1.080957 ... -0.08457727 -0.602868 0.95211816] [-0.24224135 0.925279 -1.2647505 ... 0.29728925 0.61091286 -1.7900565 ] [ 0.06410812 1.3307519 -0.22464631 ... 0.41774285 -0.3816462 -0.71176434]]] [[[-0.5689347 -0.502523 1.6707458 ... -0.23034962 -1.0103589 -0.31596705] [ 0.70772046 1.661389 -1.0238581 ... -0.769771 0.86867255 0.4771379 ] [-1.107654 0.48524985 -1.7437234 ... -0.5294894 1.0468849 -0.05861264] ... [-2.7127042 2.7865183 0.38639423 ... 0.01646743 0.52535284 -0.09760617] [ 0.5356017 -0.28234288 0.21900816 ... 0.41965044 -0.43870696 0.918383 ] [ 0.22485664 0.9600687 -0.49216855 ... -0.73559535 -0.02530494 -0.3585667 ]] [[ 0.3768464 -0.6794991 -0.5791224 ... 2.1860628 0.8974064 -1.2941225 ] [ 0.60561216 0.25435844 0.6196432 ... 0.2969153 -0.32024246 0.10079979] [ 1.1949741 0.43127155 0.83229023 ... 0.9851699 0.93126 -0.00566062] ... [-1.0505261 -0.52356267 1.1024593 ... 1.6362306 -0.4191644 1.2708164 ] [-0.9403861 0.72757787 -0.17315213 ... -0.76333386 0.4816934 0.05376362] [-0.40093824 0.28533444 -0.115711 ... 1.071186 0.8879564 -1.3183985 ]] [[-1.7577852 0.7565658 -0.10900172 ... -0.37718084 0.6259866 0.41088805] [-0.3720033 -1.0365971 -1.1518794 ... 0.06933604 -0.62285984 0.02767771] [-1.0897158 1.2307227 -0.5168734 ... -0.88282156 -0.9914261 -0.69925475] ... [-0.73721886 -0.723219 0.27213508 ... -0.23061657 1.2279797 -0.56576955] [-1.1056055 0.5009212 -0.9189098 ... 0.15541103 1.769364 0.54065466] [-1.1733968 -0.5794676 0.01537082 ... 0.5679569 -0.39802417 0.3454619 ]] ... [[ 1.8631716 -1.7232155 0.7839451 ... -1.1062733 0.13010044 0.5674451 ] [ 1.153927 0.41517416 -1.1707793 ... 1.3107445 -0.86562943 -0.25189734] [-0.16566007 -0.8881029 -0.45164967 ... -0.44881305 0.48229682 -0.3923623 ] ... [-0.5143631 -1.3054422 -0.25133356 ... -0.21043259 0.89461035 -1.8450613 ] [ 0.39974064 -0.7855033 -0.10066222 ... 1.2888312 -1.9765925 -0.63214594] [-0.57470256 2.1647427 0.5012384 ... -0.37391636 -0.8495466 1.4384099 ]] [[-0.20298785 0.09043577 1.0737529 ... 0.8428718 -1.5859954 0.03129538] [-1.0445274 -0.02802191 -2.0207446 ... 1.2770773 0.357159 1.3059765 ] [-0.4719153 -0.25868934 -0.26705095 ... 1.2540237 0.60921174 2.1343186 ] ... [ 0.09596765 0.85907394 1.8256398 ... 0.01860699 0.10923391 -1.3131977 ] [ 0.54171073 -1.76188 -3.1117249 ... -2.3422925 0.81752706 0.94342786] [ 1.7705327 1.6097596 -0.13612686 ... -1.405668 -2.0475657 0.94202113]] [[-0.12028761 -1.3806943 1.6795713 ... 1.2037734 2.044555 -1.5957499 ] [ 0.11585703 -1.0834403 -0.27309728 ... 2.5651772 -1.2401851 -2.5278196 ] [ 1.7264055 1.3352485 -0.75815016 ... 0.89463 -1.4768598 0.81388324] ... [ 0.7812744 -0.4485062 -1.0701215 ... 0.61324245 2.1579154 0.97574043] [-0.20454833 -0.08256809 -1.1248732 ... 0.31260988 0.7926804 0.43821386] [ 0.8935376 0.6277509 1.4018432 ... -0.53848666 1.6297376 0.62079316]]] [[[-0.9842114 0.76932544 -2.0399954 ... 0.8458361 -0.29441956 -0.82905847] [ 0.8389231 0.6448237 -0.32713237 ... -0.80525035 -0.5050083 -0.22055551] [ 0.19752307 -0.4792974 -0.37943676 ... -1.3668975 0.6242512 1.0631912 ] ... [ 0.47511458 -0.969085 0.6427878 ... -0.9454787 0.49767876 1.8510072 ] [-0.41541117 -0.36021265 0.66990495 ... 2.1530116 -0.33208776 -0.9993341 ] [-0.42496383 -0.04061733 1.576663 ... 1.865553 2.1474435 0.44263527]] [[-0.6560635 0.2361292 -1.4376775 ... 0.5225981 0.80032575 -1.8220745 ] [ 1.9758351 -0.6882551 0.03986887 ... -0.44066995 0.9011751 -0.09149702] [-1.2630674 -1.3458403 0.47536543 ... -2.8559139 1.042361 0.96197766] ... [ 2.170581 0.34460786 -1.9630523 ... 0.32237834 0.43326733 -0.7973209 ] [ 0.1069649 -0.589283 -0.2902853 ... 0.9413547 0.44219097 1.0424887 ] [ 0.22423775 1.3216048 -0.31567505 ... 0.30046064 1.5112132 0.05155411]] [[-0.33658454 2.0547411 0.19214281 ... -0.8518987 -0.9350933 -0.391418 ] [-0.7258112 -0.81618196 0.03665226 ... -2.81886 0.83022225 1.4333014 ] [ 2.067453 -0.59254044 -0.08703218 ... -1.452996 -0.65012217 1.4913007 ] ... [ 0.8138213 2.1784365 0.29597405 ... 1.0201794 0.10485898 -0.06547429] [-0.02808571 -0.4559368 1.4346813 ... 0.45350847 1.8744643 -0.39543104] [-1.1113094 0.66388637 1.1318697 ... 0.33653054 -1.4114041 1.078493 ]] ... [[ 0.39057085 -0.6921353 -0.41187155 ... 0.8474395 -0.18090542 0.794471 ] [-0.4197677 -0.85777456 -0.9445221 ... -0.61476463 0.7739116 -1.0229045 ] [ 0.0686442 -0.8940967 0.22762686 ... 0.8921071 -0.3899228 -1.2064861 ] ... [ 0.34737462 0.3125766 1.0790967 ... -2.8386767 0.510496 2.163456 ] [ 0.08582344 -1.1945279 0.01372897 ... -2.5721657 0.14869307 0.06857813] [ 0.81729496 -0.39953014 1.7032152 ... 0.96770066 0.97393775 0.4099856 ]] [[-0.7754996 -0.01206773 1.6638699 ... 1.4946121 1.1957558 -1.2913922 ] [-0.66612726 -0.4846004 -0.64598835 ... 1.5571811 -0.5931279 -0.9768201 ] [-0.46144655 0.5537884 -0.01078537 ... -0.4232965 2.3976767 -0.3988725 ] ... [ 0.7905513 0.7248115 1.2249002 ... -0.03822948 0.08263204 0.72337824] [ 1.5003867 0.05819308 0.01280229 ... -0.02662294 -0.10576688 0.13692367] [-0.76309 -0.8697184 -1.1275414 ... 0.2556223 0.07755467 1.1582322 ]] [[-0.16229782 -0.804219 0.10689672 ... -1.6434877 -1.1166719 2.2821834 ] [ 0.40045205 -0.52863663 0.27093074 ... 2.0210545 -0.6629579 1.6662438 ] [ 1.7692405 0.9712191 -0.8286488 ... -1.3211602 0.6461295 -0.3560136 ] ... [ 0.6982522 -0.36618525 2.6794207 ... 2.4598062 0.6220151 0.06731309] [-2.1066859 -1.916825 0.534041 ... 0.10809253 1.4500283 -0.925113 ] [ 0.5384869 -0.18806323 0.85608226 ... 0.44397882 0.5544081 1.1764468 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4, 5, 6) - mode:replicate - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5536.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4, 5, 6]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during cofw_re: [[[[[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] [[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] [[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] ... [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]] [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]] [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]]] [[[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] [[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] [[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] ... [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]] [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]] [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]]] [[[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] [[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] [[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] ... [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]] [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]] [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]]]]]; ov_res: [[[[[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] [[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] [[ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] [ 1.6222848 1.6222848 0.12266236 ... -2.0921605 -2.0921605 -2.0921605 ] ... [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147] [-0.2814335 -0.2814335 1.1666198 ... -0.26585147 -0.26585147 -0.26585147]] ... [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]] [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]] [[ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] [ 0.7401392 0.7401392 -0.5743026 ... 0.19980386 0.19980386 0.19980386] ... [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ] [ 1.5498694 1.5498694 1.4721467 ... 0.711778 0.711778 0.711778 ]]] [[[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] [[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] [[-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] [-0.24134272 -0.24134272 0.13020295 ... 1.3434247 1.3434247 1.3434247 ] ... [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397] [-1.7006596 -1.7006596 0.05088438 ... 0.81880397 0.81880397 0.81880397]] ... [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]] [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]] [[-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] [-0.2621964 -0.2621964 -0.5856054 ... 0.5014624 0.5014624 0.5014624 ] ... [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ] [-0.9197754 -0.9197754 -2.4243822 ... -0.6568007 -0.6568007 -0.6568007 ]]] [[[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] [[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] [[-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] [-0.12974086 -0.12974086 -0.5505309 ... -1.7066338 -1.7066338 -1.7066338 ] ... [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ] [-0.55013734 -0.55013734 0.42117637 ... 1.2875004 1.2875004 1.2875004 ]] ... [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]] [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]] [[-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] [-0.5123022 -0.5123022 0.42898646 ... -0.24282555 -0.24282555 -0.24282555] ... [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ] [-0.14088479 -0.14088479 -1.7698824 ... -0.8135088 -0.8135088 -0.8135088 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 0) - mode:replicate - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5539.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 8.77367198e-01 8.77367198e-01 7.70096421e-01 ... 7.81811059e-01 5.10397077e-01 -1.56334674e+00] [ 2.58935243e-01 2.58935243e-01 1.19699323e+00 ... 4.40105833e-02 3.24798711e-02 1.22149038e+00] [-4.40292388e-01 -4.40292388e-01 1.19546548e-01 ... -3.88542339e-02 2.75991797e-01 -1.35757875e+00] ... [ 1.01921237e+00 1.01921237e+00 1.78898478e+00 ... 1.26652479e+00 2.67565608e+00 -2.25373045e-01] [-2.08806694e-02 -2.08806694e-02 2.37687898e+00 ... 1.54210675e+00 -8.90685439e-01 -2.56616343e-02] [-2.64300734e-01 -2.64300734e-01 7.19157219e-01 ... 1.03303385e+00 1.36690497e+00 -1.38504612e+00]] [[-6.39942408e-01 -6.39942408e-01 -1.37025043e-01 ... -4.39236611e-01 -4.89789285e-02 1.56824934e+00] [-8.97995114e-01 -8.97995114e-01 -9.15376127e-01 ... 8.71785581e-01 -5.59273541e-01 6.05688572e-01] [-1.38148367e+00 -1.38148367e+00 6.11496806e-01 ... -1.95968199e+00 1.39853072e+00 -8.80135894e-01] ... [ 5.57266474e-01 5.57266474e-01 -1.19286871e+00 ... 8.68249655e-01 -1.08790743e+00 -1.03246796e+00] [ 1.43480802e+00 1.43480802e+00 7.41684735e-01 ... -4.54877555e-01 6.20418135e-03 9.00342345e-01] [-1.01811826e+00 -1.01811826e+00 1.01777136e+00 ... 5.71442485e-01 4.47654836e-02 -1.06507552e+00]] [[ 1.34969544e+00 1.34969544e+00 1.33351982e+00 ... -1.17445242e+00 1.00243092e+00 -1.12532449e+00] [ 7.71251976e-01 7.71251976e-01 -1.02534902e+00 ... 2.25845838e+00 -1.08591139e+00 -1.02595317e+00] [-1.98992085e+00 -1.98992085e+00 -2.40145534e-01 ... 2.17135236e-01 4.16793853e-01 -1.75954473e+00] ... [ 7.14974284e-01 7.14974284e-01 -1.09714150e+00 ... 2.47793508e+00 3.79330724e-01 -1.26099622e+00] [ 3.35916847e-01 3.35916847e-01 -1.09708399e-01 ... 3.48901749e-01 -8.49027038e-01 1.59249461e+00] [-5.22795141e-01 -5.22795141e-01 -1.11887264e+00 ... -9.42747891e-01 1.77728653e+00 -3.50076497e-01]] ... [[-4.72621143e-01 -4.72621143e-01 3.37448031e-01 ... 2.42499322e-01 -1.35233498e+00 -1.47532463e+00] [ 5.60540855e-01 5.60540855e-01 -1.06750584e+00 ... -5.22649825e-01 -6.68960392e-01 1.21184444e+00] [ 3.53716344e-01 3.53716344e-01 -1.83270082e-01 ... -1.09238005e+00 -9.79465902e-01 -1.74400234e+00] ... [ 6.22410774e-01 6.22410774e-01 2.33086899e-01 ... -1.10347998e+00 -2.47034177e-01 -2.60874367e+00] [-1.59756327e+00 -1.59756327e+00 1.05927706e+00 ... 1.39174879e+00 -2.27737993e-01 -4.99991864e-01] [ 1.15471947e+00 1.15471947e+00 3.30197304e-01 ... 5.29863596e-01 -2.16538191e-01 -1.18886375e+00]] [[-1.43981719e+00 -1.43981719e+00 4.13797140e-01 ... 1.02405834e+00 -8.19712460e-01 -7.79873610e-01] [ 1.07162213e+00 1.07162213e+00 -1.13042307e+00 ... 1.42058396e+00 1.05555105e+00 -3.46283019e-01] [ 9.97886598e-01 9.97886598e-01 -1.53227603e+00 ... -1.10520899e+00 -9.20195460e-01 -5.45205593e-01] ... [ 1.04447150e+00 1.04447150e+00 2.05277777e+00 ... -9.00495276e-02 -4.68126923e-01 -2.47208548e+00] [-2.40601107e-01 -2.40601107e-01 1.72451723e+00 ... 1.11855805e+00 -1.16048254e-01 4.96101946e-01] [-1.58546960e+00 -1.58546960e+00 -7.35570416e-02 ... -1.18116617e+00 -1.53881073e+00 1.19152200e+00]] [[-2.01020032e-01 -2.01020032e-01 6.73840940e-01 ... 3.75157595e-01 -1.48093075e-01 -4.98848945e-01] [-1.64205015e-01 -1.64205015e-01 -3.14526632e-02 ... 3.99696440e-01 -1.48219812e+00 1.46881306e+00] [ 9.83266532e-01 9.83266532e-01 -2.17037249e+00 ... -8.36024582e-01 -1.22277045e+00 1.05881453e+00] ... [ 1.95093453e-01 1.95093453e-01 1.90482378e+00 ... 3.36639643e-01 1.64788976e-01 -7.37572789e-01] [-9.35498238e-01 -9.35498238e-01 7.33035445e-01 ... 1.50964403e+00 4.28241313e-01 -3.15694243e-01] [-1.64752030e+00 -1.64752030e+00 -9.75739479e-01 ... -3.76592577e-01 -1.59089172e+00 7.88529336e-01]]] [[[-1.52968794e-01 -1.52968794e-01 4.68020469e-01 ... 7.71235585e-01 6.49846271e-02 1.86733747e+00] [ 1.11251032e+00 1.11251032e+00 -8.86430562e-01 ... -5.05851150e-01 -1.44614065e+00 2.48269234e-02] [-5.45065701e-01 -5.45065701e-01 -2.09825754e+00 ... 1.92536592e-01 1.67424262e+00 -3.23582083e-01] ... [-1.02754436e-01 -1.02754436e-01 1.97170424e+00 ... -4.70494926e-01 5.94955310e-02 1.20739305e+00] [ 3.94042313e-01 3.94042313e-01 -5.90536177e-01 ... -8.55789483e-02 -1.65499032e-01 -1.26024497e+00] [-5.59428334e-01 -5.59428334e-01 5.27815104e-01 ... -4.80210502e-03 -1.57644182e-01 -2.77185440e+00]] [[ 5.74709028e-02 5.74709028e-02 -4.04730654e+00 ... 7.07836211e-01 1.16049564e+00 -3.20318937e-01] [-8.00534070e-01 -8.00534070e-01 6.75312340e-01 ... -1.44931853e+00 -6.95501745e-01 7.42951930e-01] [ 4.98892516e-01 4.98892516e-01 -1.15702164e+00 ... 2.23989296e+00 -8.53541791e-01 -3.49561751e-01] ... [ 1.10040593e+00 1.10040593e+00 2.44015053e-01 ... 1.87664688e+00 -8.21946263e-01 -1.06133270e+00] [ 1.91567206e+00 1.91567206e+00 -4.03199285e-01 ... -2.43707463e-01 -1.39000714e+00 6.76032186e-01] [-9.70672369e-01 -9.70672369e-01 9.56295073e-01 ... -2.18618378e-01 9.97150600e-01 4.18348491e-01]] [[ 1.08406462e-01 1.08406462e-01 -1.04116952e+00 ... 1.24517453e+00 -2.45153353e-01 2.20708990e+00] [-1.88724494e+00 -1.88724494e+00 -2.13030076e+00 ... 3.22007053e-02 -3.23702246e-01 -8.07265490e-02] [ 1.51769102e+00 1.51769102e+00 4.82574970e-01 ... -1.69822842e-01 -5.67826450e-01 -5.04081070e-01] ... [ 7.73419499e-01 7.73419499e-01 -5.00598431e-01 ... -2.54977524e-01 -1.22032203e-01 -9.65427995e-01] [-5.80740571e-01 -5.80740571e-01 -8.43654215e-01 ... -1.35738778e+00 -1.83388913e+00 -7.64366269e-01] [-4.33893651e-01 -4.33893651e-01 -4.72210348e-01 ... 8.57168734e-01 2.95149863e-01 -9.18849051e-01]] ... [[ 7.81676352e-01 7.81676352e-01 9.01864231e-01 ... -1.77125072e+00 -4.29606736e-01 -2.99944878e-01] [-1.56264198e+00 -1.56264198e+00 3.89530212e-02 ... -2.11206651e+00 3.25834602e-01 1.48971856e+00] [ 3.16625625e-01 3.16625625e-01 8.55190277e-01 ... 2.22636044e-01 2.01751560e-01 -1.16075277e+00] ... [-1.57105887e+00 -1.57105887e+00 1.75894395e-01 ... 1.46658361e+00 9.42662835e-01 1.85446758e-02] [ 5.11527002e-01 5.11527002e-01 1.33341998e-01 ... 4.85263705e-01 -6.28486335e-01 8.03044379e-01] [ 1.14589453e+00 1.14589453e+00 1.94124949e+00 ... 4.11890775e-01 -8.24552655e-01 -1.10375917e+00]] [[ 9.50455070e-01 9.50455070e-01 -3.28000486e-01 ... 2.20412701e-01 1.38418508e+00 4.90789115e-01] [ 9.13005546e-02 9.13005546e-02 6.71297371e-01 ... -3.02223086e-01 -3.40153337e-01 -4.73824412e-01] [ 5.80370486e-01 5.80370486e-01 -1.48948634e+00 ... -7.68425584e-01 -1.37887609e+00 -4.48106527e-01] ... [ 1.80168879e+00 1.80168879e+00 8.51397097e-01 ... 6.18706703e-01 -8.07068050e-01 1.12825520e-01] [-3.49231005e-01 -3.49231005e-01 9.54547584e-01 ... 1.55480063e+00 -1.13307393e+00 -2.85919815e-01] [-4.88669932e-01 -4.88669932e-01 6.73343893e-03 ... -1.35005009e+00 -9.07847285e-01 -7.13229656e-01]] [[ 1.93376452e-01 1.93376452e-01 3.88196468e-01 ... -1.46274757e+00 -3.40259403e-01 1.81143057e+00] [-5.46497703e-01 -5.46497703e-01 3.20994228e-01 ... -4.20322865e-01 -8.70715737e-01 -1.59519970e+00] [-1.96914017e+00 -1.96914017e+00 -1.26100159e+00 ... -4.82174940e-03 6.79716229e-01 -1.32739949e+00] ... [-6.81075096e-01 -6.81075096e-01 -8.36243927e-01 ... -1.34891212e+00 -3.58783394e-01 -2.48827368e-01] [-1.83694988e-01 -1.83694988e-01 -3.22528221e-02 ... 6.69865310e-03 6.27802461e-02 -3.50795567e-01] [ 2.24981099e-01 2.24981099e-01 -4.43600506e-01 ... 3.89186382e-01 8.08200598e-01 3.16413850e-01]]] [[[-2.04096293e+00 -2.04096293e+00 5.68879783e-01 ... 2.87760317e-01 1.63435745e+00 -1.26513028e+00] [-1.27704871e+00 -1.27704871e+00 -5.05832195e-01 ... 5.06441295e-01 -8.15598518e-02 7.74310529e-02] [-5.57862878e-01 -5.57862878e-01 3.74359816e-01 ... -2.70229667e-01 1.67246640e+00 2.72158772e-01] ... [-1.45120478e+00 -1.45120478e+00 -9.55922008e-01 ... -4.53865081e-01 -4.58779484e-01 -2.85129786e-01] [-1.02585793e+00 -1.02585793e+00 2.35568523e+00 ... 6.70680523e-01 -6.77094698e-01 -2.30351016e-01] [ 7.84409523e-01 7.84409523e-01 -5.10833681e-01 ... 1.04640281e+00 -1.91130936e+00 9.31096077e-02]] [[ 7.27183938e-01 7.27183938e-01 1.80291247e+00 ... -2.13097438e-01 -5.66231348e-02 1.18239236e+00] [-1.74299204e+00 -1.74299204e+00 3.59348685e-01 ... 1.21313073e-01 6.31376505e-01 -6.83053851e-01] [ 3.31123918e-01 3.31123918e-01 5.40368080e-01 ... -7.37323344e-01 7.77297840e-03 -4.35873777e-01] ... [ 1.75625718e+00 1.75625718e+00 -7.16121852e-01 ... 1.31993222e+00 -1.20367372e+00 -5.73807836e-01] [ 1.37266278e+00 1.37266278e+00 2.51940012e-01 ... 9.54452574e-01 -4.71066833e-01 6.24729335e-01] [-9.70836639e-01 -9.70836639e-01 6.79176509e-01 ... 3.40905577e-01 -1.34153259e+00 -2.92474580e+00]] [[ 5.06649494e-01 5.06649494e-01 -2.21667409e-01 ... -5.47491968e-01 1.22853577e+00 -7.87359655e-01] [ 2.50160336e-01 2.50160336e-01 6.27817631e-01 ... 6.96876198e-02 6.15394533e-01 -1.70317852e+00] [-8.47815648e-02 -8.47815648e-02 1.04090893e+00 ... 7.28195786e-01 1.09426367e+00 4.53213938e-02] ... [-7.57006049e-01 -7.57006049e-01 2.42416143e-01 ... -6.53472841e-01 -1.90803373e+00 7.03179538e-01] [-1.59955323e+00 -1.59955323e+00 -1.35691792e-01 ... -2.41238058e-01 6.67563200e-01 -5.23901761e-01] [-9.85993445e-01 -9.85993445e-01 7.45353580e-01 ... 8.54058266e-01 -5.78350902e-01 -1.78144622e+00]] ... [[ 1.85314786e+00 1.85314786e+00 -2.88734108e-01 ... -9.35059667e-01 5.35938859e-01 2.60103941e+00] [ 1.41970825e+00 1.41970825e+00 7.83002019e-01 ... -6.99785233e-01 -1.59976006e+00 -3.04561555e-01] [-3.24916956e-03 -3.24916956e-03 1.13553596e+00 ... 3.28653723e-01 -8.24634075e-01 -1.45086575e+00] ... [ 4.73396897e-01 4.73396897e-01 1.09461355e+00 ... -4.73111212e-01 2.77566838e+00 1.03208154e-01] [-4.99379069e-01 -4.99379069e-01 5.41379035e-01 ... -9.38993514e-01 -1.40970811e-01 8.22493970e-01] [-6.38523936e-01 -6.38523936e-01 8.47472191e-01 ... -8.14395607e-01 7.02176809e-01 3.09059560e-01]] [[ 9.20418501e-01 9.20418501e-01 -2.53307670e-01 ... 1.13374805e+00 4.41949725e-01 5.60184300e-01] [-9.54909801e-01 -9.54909801e-01 -3.01345065e-02 ... 1.94942939e+00 1.12966728e+00 -7.44067907e-01] [ 1.04540634e+00 1.04540634e+00 -9.68654037e-01 ... 2.59895593e-01 -6.23776674e-01 5.97070992e-01] ... [-4.57689315e-02 -4.57689315e-02 -2.21447420e+00 ... -1.80379260e+00 4.39651400e-01 2.20226735e-01] [-1.76163122e-01 -1.76163122e-01 -1.66607714e+00 ... -5.19318879e-01 2.57273346e-01 -7.16177523e-02] [ 2.78412366e+00 2.78412366e+00 1.75924599e+00 ... -7.79150069e-01 1.38403261e+00 -4.12648916e-01]] [[-1.92623675e-01 -1.92623675e-01 4.98297453e-01 ... 2.83328295e-01 -4.69655633e-01 2.46782079e-01] [-1.44243538e+00 -1.44243538e+00 1.90307885e-01 ... -3.68323833e-01 1.12706900e+00 -4.28983271e-01] [-1.81076854e-01 -1.81076854e-01 -2.98193932e-01 ... -1.97306171e-01 3.23522282e+00 -1.22972202e+00] ... [ 7.91544378e-01 7.91544378e-01 -4.96141702e-01 ... 1.36188293e+00 -8.08390509e-03 -1.49577260e+00] [ 2.47173882e+00 2.47173882e+00 -9.52951372e-01 ... 5.29814482e-01 1.08139050e+00 9.38956022e-01] [-6.30067289e-01 -6.30067289e-01 -1.46152604e+00 ... -5.29764220e-02 2.50352889e-01 -1.43064845e+00]]]]]; ov_res: [[[[[ 8.77367198e-01 8.77367198e-01 7.70096421e-01 ... 7.81811059e-01 5.10397077e-01 -1.56334674e+00] [ 2.58935243e-01 2.58935243e-01 1.19699323e+00 ... 4.40105833e-02 3.24798711e-02 1.22149038e+00] [-4.40292388e-01 -4.40292388e-01 1.19546548e-01 ... -3.88542339e-02 2.75991797e-01 -1.35757875e+00] ... [ 1.01921237e+00 1.01921237e+00 1.78898478e+00 ... 1.26652479e+00 2.67565608e+00 -2.25373045e-01] [-2.08806694e-02 -2.08806694e-02 2.37687898e+00 ... 1.54210675e+00 -8.90685439e-01 -2.56616343e-02] [-2.64300734e-01 -2.64300734e-01 7.19157219e-01 ... 1.03303385e+00 1.36690497e+00 -1.38504612e+00]] [[-6.39942408e-01 -6.39942408e-01 -1.37025043e-01 ... -4.39236611e-01 -4.89789285e-02 1.56824934e+00] [-8.97995114e-01 -8.97995114e-01 -9.15376127e-01 ... 8.71785581e-01 -5.59273541e-01 6.05688572e-01] [-1.38148367e+00 -1.38148367e+00 6.11496806e-01 ... -1.95968199e+00 1.39853072e+00 -8.80135894e-01] ... [ 5.57266474e-01 5.57266474e-01 -1.19286871e+00 ... 8.68249655e-01 -1.08790743e+00 -1.03246796e+00] [ 1.43480802e+00 1.43480802e+00 7.41684735e-01 ... -4.54877555e-01 6.20418135e-03 9.00342345e-01] [-1.01811826e+00 -1.01811826e+00 1.01777136e+00 ... 5.71442485e-01 4.47654836e-02 -1.06507552e+00]] [[ 1.34969544e+00 1.34969544e+00 1.33351982e+00 ... -1.17445242e+00 1.00243092e+00 -1.12532449e+00] [ 7.71251976e-01 7.71251976e-01 -1.02534902e+00 ... 2.25845838e+00 -1.08591139e+00 -1.02595317e+00] [-1.98992085e+00 -1.98992085e+00 -2.40145534e-01 ... 2.17135236e-01 4.16793853e-01 -1.75954473e+00] ... [ 7.14974284e-01 7.14974284e-01 -1.09714150e+00 ... 2.47793508e+00 3.79330724e-01 -1.26099622e+00] [ 3.35916847e-01 3.35916847e-01 -1.09708399e-01 ... 3.48901749e-01 -8.49027038e-01 1.59249461e+00] [-5.22795141e-01 -5.22795141e-01 -1.11887264e+00 ... -9.42747891e-01 1.77728653e+00 -3.50076497e-01]] ... [[-4.72621143e-01 -4.72621143e-01 3.37448031e-01 ... 2.42499322e-01 -1.35233498e+00 -1.47532463e+00] [ 5.60540855e-01 5.60540855e-01 -1.06750584e+00 ... -5.22649825e-01 -6.68960392e-01 1.21184444e+00] [ 3.53716344e-01 3.53716344e-01 -1.83270082e-01 ... -1.09238005e+00 -9.79465902e-01 -1.74400234e+00] ... [ 6.22410774e-01 6.22410774e-01 2.33086899e-01 ... -1.10347998e+00 -2.47034177e-01 -2.60874367e+00] [-1.59756327e+00 -1.59756327e+00 1.05927706e+00 ... 1.39174879e+00 -2.27737993e-01 -4.99991864e-01] [ 1.15471947e+00 1.15471947e+00 3.30197304e-01 ... 5.29863596e-01 -2.16538191e-01 -1.18886375e+00]] [[-1.43981719e+00 -1.43981719e+00 4.13797140e-01 ... 1.02405834e+00 -8.19712460e-01 -7.79873610e-01] [ 1.07162213e+00 1.07162213e+00 -1.13042307e+00 ... 1.42058396e+00 1.05555105e+00 -3.46283019e-01] [ 9.97886598e-01 9.97886598e-01 -1.53227603e+00 ... -1.10520899e+00 -9.20195460e-01 -5.45205593e-01] ... [ 1.04447150e+00 1.04447150e+00 2.05277777e+00 ... -9.00495276e-02 -4.68126923e-01 -2.47208548e+00] [-2.40601107e-01 -2.40601107e-01 1.72451723e+00 ... 1.11855805e+00 -1.16048254e-01 4.96101946e-01] [-1.58546960e+00 -1.58546960e+00 -7.35570416e-02 ... -1.18116617e+00 -1.53881073e+00 1.19152200e+00]] [[-2.01020032e-01 -2.01020032e-01 6.73840940e-01 ... 3.75157595e-01 -1.48093075e-01 -4.98848945e-01] [-1.64205015e-01 -1.64205015e-01 -3.14526632e-02 ... 3.99696440e-01 -1.48219812e+00 1.46881306e+00] [ 9.83266532e-01 9.83266532e-01 -2.17037249e+00 ... -8.36024582e-01 -1.22277045e+00 1.05881453e+00] ... [ 1.95093453e-01 1.95093453e-01 1.90482378e+00 ... 3.36639643e-01 1.64788976e-01 -7.37572789e-01] [-9.35498238e-01 -9.35498238e-01 7.33035445e-01 ... 1.50964403e+00 4.28241313e-01 -3.15694243e-01] [-1.64752030e+00 -1.64752030e+00 -9.75739479e-01 ... -3.76592577e-01 -1.59089172e+00 7.88529336e-01]]] [[[-1.52968794e-01 -1.52968794e-01 4.68020469e-01 ... 7.71235585e-01 6.49846271e-02 1.86733747e+00] [ 1.11251032e+00 1.11251032e+00 -8.86430562e-01 ... -5.05851150e-01 -1.44614065e+00 2.48269234e-02] [-5.45065701e-01 -5.45065701e-01 -2.09825754e+00 ... 1.92536592e-01 1.67424262e+00 -3.23582083e-01] ... [-1.02754436e-01 -1.02754436e-01 1.97170424e+00 ... -4.70494926e-01 5.94955310e-02 1.20739305e+00] [ 3.94042313e-01 3.94042313e-01 -5.90536177e-01 ... -8.55789483e-02 -1.65499032e-01 -1.26024497e+00] [-5.59428334e-01 -5.59428334e-01 5.27815104e-01 ... -4.80210502e-03 -1.57644182e-01 -2.77185440e+00]] [[ 5.74709028e-02 5.74709028e-02 -4.04730654e+00 ... 7.07836211e-01 1.16049564e+00 -3.20318937e-01] [-8.00534070e-01 -8.00534070e-01 6.75312340e-01 ... -1.44931853e+00 -6.95501745e-01 7.42951930e-01] [ 4.98892516e-01 4.98892516e-01 -1.15702164e+00 ... 2.23989296e+00 -8.53541791e-01 -3.49561751e-01] ... [ 1.10040593e+00 1.10040593e+00 2.44015053e-01 ... 1.87664688e+00 -8.21946263e-01 -1.06133270e+00] [ 1.91567206e+00 1.91567206e+00 -4.03199285e-01 ... -2.43707463e-01 -1.39000714e+00 6.76032186e-01] [-9.70672369e-01 -9.70672369e-01 9.56295073e-01 ... -2.18618378e-01 9.97150600e-01 4.18348491e-01]] [[ 1.08406462e-01 1.08406462e-01 -1.04116952e+00 ... 1.24517453e+00 -2.45153353e-01 2.20708990e+00] [-1.88724494e+00 -1.88724494e+00 -2.13030076e+00 ... 3.22007053e-02 -3.23702246e-01 -8.07265490e-02] [ 1.51769102e+00 1.51769102e+00 4.82574970e-01 ... -1.69822842e-01 -5.67826450e-01 -5.04081070e-01] ... [ 7.73419499e-01 7.73419499e-01 -5.00598431e-01 ... -2.54977524e-01 -1.22032203e-01 -9.65427995e-01] [-5.80740571e-01 -5.80740571e-01 -8.43654215e-01 ... -1.35738778e+00 -1.83388913e+00 -7.64366269e-01] [-4.33893651e-01 -4.33893651e-01 -4.72210348e-01 ... 8.57168734e-01 2.95149863e-01 -9.18849051e-01]] ... [[ 7.81676352e-01 7.81676352e-01 9.01864231e-01 ... -1.77125072e+00 -4.29606736e-01 -2.99944878e-01] [-1.56264198e+00 -1.56264198e+00 3.89530212e-02 ... -2.11206651e+00 3.25834602e-01 1.48971856e+00] [ 3.16625625e-01 3.16625625e-01 8.55190277e-01 ... 2.22636044e-01 2.01751560e-01 -1.16075277e+00] ... [-1.57105887e+00 -1.57105887e+00 1.75894395e-01 ... 1.46658361e+00 9.42662835e-01 1.85446758e-02] [ 5.11527002e-01 5.11527002e-01 1.33341998e-01 ... 4.85263705e-01 -6.28486335e-01 8.03044379e-01] [ 1.14589453e+00 1.14589453e+00 1.94124949e+00 ... 4.11890775e-01 -8.24552655e-01 -1.10375917e+00]] [[ 9.50455070e-01 9.50455070e-01 -3.28000486e-01 ... 2.20412701e-01 1.38418508e+00 4.90789115e-01] [ 9.13005546e-02 9.13005546e-02 6.71297371e-01 ... -3.02223086e-01 -3.40153337e-01 -4.73824412e-01] [ 5.80370486e-01 5.80370486e-01 -1.48948634e+00 ... -7.68425584e-01 -1.37887609e+00 -4.48106527e-01] ... [ 1.80168879e+00 1.80168879e+00 8.51397097e-01 ... 6.18706703e-01 -8.07068050e-01 1.12825520e-01] [-3.49231005e-01 -3.49231005e-01 9.54547584e-01 ... 1.55480063e+00 -1.13307393e+00 -2.85919815e-01] [-4.88669932e-01 -4.88669932e-01 6.73343893e-03 ... -1.35005009e+00 -9.07847285e-01 -7.13229656e-01]] [[ 1.93376452e-01 1.93376452e-01 3.88196468e-01 ... -1.46274757e+00 -3.40259403e-01 1.81143057e+00] [-5.46497703e-01 -5.46497703e-01 3.20994228e-01 ... -4.20322865e-01 -8.70715737e-01 -1.59519970e+00] [-1.96914017e+00 -1.96914017e+00 -1.26100159e+00 ... -4.82174940e-03 6.79716229e-01 -1.32739949e+00] ... [-6.81075096e-01 -6.81075096e-01 -8.36243927e-01 ... -1.34891212e+00 -3.58783394e-01 -2.48827368e-01] [-1.83694988e-01 -1.83694988e-01 -3.22528221e-02 ... 6.69865310e-03 6.27802461e-02 -3.50795567e-01] [ 2.24981099e-01 2.24981099e-01 -4.43600506e-01 ... 3.89186382e-01 8.08200598e-01 3.16413850e-01]]] [[[-2.04096293e+00 -2.04096293e+00 5.68879783e-01 ... 2.87760317e-01 1.63435745e+00 -1.26513028e+00] [-1.27704871e+00 -1.27704871e+00 -5.05832195e-01 ... 5.06441295e-01 -8.15598518e-02 7.74310529e-02] [-5.57862878e-01 -5.57862878e-01 3.74359816e-01 ... -2.70229667e-01 1.67246640e+00 2.72158772e-01] ... [-1.45120478e+00 -1.45120478e+00 -9.55922008e-01 ... -4.53865081e-01 -4.58779484e-01 -2.85129786e-01] [-1.02585793e+00 -1.02585793e+00 2.35568523e+00 ... 6.70680523e-01 -6.77094698e-01 -2.30351016e-01] [ 7.84409523e-01 7.84409523e-01 -5.10833681e-01 ... 1.04640281e+00 -1.91130936e+00 9.31096077e-02]] [[ 7.27183938e-01 7.27183938e-01 1.80291247e+00 ... -2.13097438e-01 -5.66231348e-02 1.18239236e+00] [-1.74299204e+00 -1.74299204e+00 3.59348685e-01 ... 1.21313073e-01 6.31376505e-01 -6.83053851e-01] [ 3.31123918e-01 3.31123918e-01 5.40368080e-01 ... -7.37323344e-01 7.77297840e-03 -4.35873777e-01] ... [ 1.75625718e+00 1.75625718e+00 -7.16121852e-01 ... 1.31993222e+00 -1.20367372e+00 -5.73807836e-01] [ 1.37266278e+00 1.37266278e+00 2.51940012e-01 ... 9.54452574e-01 -4.71066833e-01 6.24729335e-01] [-9.70836639e-01 -9.70836639e-01 6.79176509e-01 ... 3.40905577e-01 -1.34153259e+00 -2.92474580e+00]] [[ 5.06649494e-01 5.06649494e-01 -2.21667409e-01 ... -5.47491968e-01 1.22853577e+00 -7.87359655e-01] [ 2.50160336e-01 2.50160336e-01 6.27817631e-01 ... 6.96876198e-02 6.15394533e-01 -1.70317852e+00] [-8.47815648e-02 -8.47815648e-02 1.04090893e+00 ... 7.28195786e-01 1.09426367e+00 4.53213938e-02] ... [-7.57006049e-01 -7.57006049e-01 2.42416143e-01 ... -6.53472841e-01 -1.90803373e+00 7.03179538e-01] [-1.59955323e+00 -1.59955323e+00 -1.35691792e-01 ... -2.41238058e-01 6.67563200e-01 -5.23901761e-01] [-9.85993445e-01 -9.85993445e-01 7.45353580e-01 ... 8.54058266e-01 -5.78350902e-01 -1.78144622e+00]] ... [[ 1.85314786e+00 1.85314786e+00 -2.88734108e-01 ... -9.35059667e-01 5.35938859e-01 2.60103941e+00] [ 1.41970825e+00 1.41970825e+00 7.83002019e-01 ... -6.99785233e-01 -1.59976006e+00 -3.04561555e-01] [-3.24916956e-03 -3.24916956e-03 1.13553596e+00 ... 3.28653723e-01 -8.24634075e-01 -1.45086575e+00] ... [ 4.73396897e-01 4.73396897e-01 1.09461355e+00 ... -4.73111212e-01 2.77566838e+00 1.03208154e-01] [-4.99379069e-01 -4.99379069e-01 5.41379035e-01 ... -9.38993514e-01 -1.40970811e-01 8.22493970e-01] [-6.38523936e-01 -6.38523936e-01 8.47472191e-01 ... -8.14395607e-01 7.02176809e-01 3.09059560e-01]] [[ 9.20418501e-01 9.20418501e-01 -2.53307670e-01 ... 1.13374805e+00 4.41949725e-01 5.60184300e-01] [-9.54909801e-01 -9.54909801e-01 -3.01345065e-02 ... 1.94942939e+00 1.12966728e+00 -7.44067907e-01] [ 1.04540634e+00 1.04540634e+00 -9.68654037e-01 ... 2.59895593e-01 -6.23776674e-01 5.97070992e-01] ... [-4.57689315e-02 -4.57689315e-02 -2.21447420e+00 ... -1.80379260e+00 4.39651400e-01 2.20226735e-01] [-1.76163122e-01 -1.76163122e-01 -1.66607714e+00 ... -5.19318879e-01 2.57273346e-01 -7.16177523e-02] [ 2.78412366e+00 2.78412366e+00 1.75924599e+00 ... -7.79150069e-01 1.38403261e+00 -4.12648916e-01]] [[-1.92623675e-01 -1.92623675e-01 4.98297453e-01 ... 2.83328295e-01 -4.69655633e-01 2.46782079e-01] [-1.44243538e+00 -1.44243538e+00 1.90307885e-01 ... -3.68323833e-01 1.12706900e+00 -4.28983271e-01] [-1.81076854e-01 -1.81076854e-01 -2.98193932e-01 ... -1.97306171e-01 3.23522282e+00 -1.22972202e+00] ... [ 7.91544378e-01 7.91544378e-01 -4.96141702e-01 ... 1.36188293e+00 -8.08390509e-03 -1.49577260e+00] [ 2.47173882e+00 2.47173882e+00 -9.52951372e-01 ... 5.29814482e-01 1.08139050e+00 9.38956022e-01] [-6.30067289e-01 -6.30067289e-01 -1.46152604e+00 ... -5.29764220e-02 2.50352889e-01 -1.43064845e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:replicate - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5542.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-9.62066591e-01 -9.62066591e-01 -4.56744224e-01 ... -4.01221484e-01 -9.60377872e-01 -1.29066384e+00] [-1.71264362e+00 -1.71264362e+00 1.87137306e+00 ... 1.09262073e+00 -1.02166152e+00 4.66686785e-01] [ 1.13494301e+00 1.13494301e+00 5.41887343e-01 ... -1.19305980e+00 6.98778033e-01 1.49727571e+00] ... [-9.94040072e-01 -9.94040072e-01 4.15009886e-01 ... 3.91916841e-01 1.58399157e-02 -3.46505731e-01] [ 6.10194206e-01 6.10194206e-01 1.77044511e+00 ... 1.32562876e+00 3.65296155e-01 5.32554209e-01] [-6.04438603e-01 -6.04438603e-01 2.00707074e-02 ... 2.88732857e-01 -2.04519844e+00 -7.51059473e-01]] [[ 1.17350137e+00 1.17350137e+00 7.74219394e-01 ... -5.84060073e-01 5.25615752e-01 6.88801944e-01] [ 1.11556184e+00 1.11556184e+00 3.55729938e-01 ... 7.75856152e-02 6.15247823e-02 -8.80105421e-03] [ 6.14276171e-01 6.14276171e-01 9.58927274e-01 ... 2.15391070e-01 2.67684609e-01 1.83678582e-01] ... [-4.33031678e-01 -4.33031678e-01 5.01983520e-03 ... -1.22635975e-01 -1.22409117e+00 -3.10405165e-01] [-3.24396700e-01 -3.24396700e-01 1.46456826e+00 ... -5.38503900e-02 5.07284284e-01 8.03090110e-02] [ 2.27925628e-01 2.27925628e-01 -1.06618130e+00 ... 1.01973164e+00 -5.93939602e-01 -8.15131187e-01]] [[ 4.82822299e-01 4.82822299e-01 -1.01659620e+00 ... 1.03291404e+00 2.65439343e+00 -1.19574273e+00] [ 3.40289980e-01 3.40289980e-01 -3.72530878e-01 ... -2.89780814e-02 -1.29235375e+00 -1.97262275e+00] [ 4.82891977e-01 4.82891977e-01 2.39784169e+00 ... -6.25152230e-01 5.38257184e-04 3.09938997e-01] ... [ 8.03835094e-01 8.03835094e-01 -6.86416402e-02 ... 5.84531784e-01 -3.74834478e-01 1.06573975e+00] [-4.75995451e-01 -4.75995451e-01 9.58739638e-01 ... 8.92595351e-01 -1.23221040e+00 3.28504443e-01] [-1.89120376e+00 -1.89120376e+00 -1.10856545e+00 ... 4.30638017e-03 -6.10289946e-02 -1.96731651e+00]] ... [[ 5.94225883e-01 5.94225883e-01 -2.39276975e-01 ... -7.96687305e-01 9.69988823e-01 -6.39257550e-01] [ 6.87435150e-01 6.87435150e-01 3.78267914e-01 ... -1.03712857e+00 -9.11985874e-01 -3.43551069e-01] [-4.33008701e-01 -4.33008701e-01 -9.91427362e-01 ... 2.32202148e+00 1.10154286e-01 -2.58635491e-01] ... [-3.40505131e-02 -3.40505131e-02 6.69617131e-02 ... 6.42748535e-01 -8.85336995e-01 9.37225163e-01] [-1.00409722e+00 -1.00409722e+00 -4.94264066e-01 ... -1.18928933e+00 -1.16826808e+00 1.67108452e+00] [-2.09255075e+00 -2.09255075e+00 5.11289775e-01 ... -4.46443379e-01 6.34968996e-01 -8.35771620e-01]] [[ 1.78645158e+00 1.78645158e+00 8.96628022e-01 ... 1.35162994e-01 1.92892849e+00 9.71880734e-01] [ 9.89611268e-01 9.89611268e-01 -9.10737574e-01 ... -1.41936827e+00 -6.49016857e-01 9.59445179e-01] [ 6.76008463e-01 6.76008463e-01 1.92157280e+00 ... 2.01332068e+00 5.85332029e-02 -2.52270311e-01] ... [ 3.60711336e-01 3.60711336e-01 -1.58970188e-02 ... 2.74708509e-01 -3.48219156e-01 -1.32323182e+00] [-6.10755682e-01 -6.10755682e-01 5.84180355e-01 ... -1.65315485e+00 1.80075061e+00 -2.03529406e+00] [-3.92888010e-01 -3.92888010e-01 6.76379561e-01 ... 3.37589788e+00 2.29087758e+00 -1.81755280e+00]] [[ 1.78645158e+00 1.78645158e+00 8.96628022e-01 ... 1.35162994e-01 1.92892849e+00 9.71880734e-01] [ 9.89611268e-01 9.89611268e-01 -9.10737574e-01 ... -1.41936827e+00 -6.49016857e-01 9.59445179e-01] [ 6.76008463e-01 6.76008463e-01 1.92157280e+00 ... 2.01332068e+00 5.85332029e-02 -2.52270311e-01] ... [ 3.60711336e-01 3.60711336e-01 -1.58970188e-02 ... 2.74708509e-01 -3.48219156e-01 -1.32323182e+00] [-6.10755682e-01 -6.10755682e-01 5.84180355e-01 ... -1.65315485e+00 1.80075061e+00 -2.03529406e+00] [-3.92888010e-01 -3.92888010e-01 6.76379561e-01 ... 3.37589788e+00 2.29087758e+00 -1.81755280e+00]]] [[[ 3.38796288e-01 3.38796288e-01 9.56254601e-01 ... -5.38189709e-01 4.08502489e-01 5.90214469e-02] [-5.03961504e-01 -5.03961504e-01 -4.41271871e-01 ... 5.07317126e-01 9.88135397e-01 -3.72951180e-01] [-1.05027902e+00 -1.05027902e+00 5.49466848e-01 ... -5.99119723e-01 1.15297878e+00 -2.30346060e+00] ... [-5.13270319e-01 -5.13270319e-01 -2.65661757e-02 ... 1.31728101e+00 -1.47539818e+00 2.76933998e-01] [-3.11295465e-02 -3.11295465e-02 -8.50046396e-01 ... -5.99368513e-01 2.18084645e+00 8.04135621e-01] [-1.55050918e-01 -1.55050918e-01 3.18780750e-01 ... -1.42172456e+00 -1.70891881e-02 2.05893829e-01]] [[ 7.80488491e-01 7.80488491e-01 6.58674717e-01 ... 1.01631010e+00 1.34311235e+00 -5.40662944e-01] [-1.07740760e+00 -1.07740760e+00 -1.23462033e+00 ... -1.20205641e+00 2.63601597e-02 1.84155703e+00] [-1.75236344e+00 -1.75236344e+00 -6.74170792e-01 ... -3.30133885e-01 -1.64799368e+00 1.59480488e+00] ... [-4.94487524e-01 -4.94487524e-01 -9.20529757e-03 ... 3.54861915e-02 1.32744193e+00 -7.37382770e-01] [ 5.19604743e-01 5.19604743e-01 1.92078561e-01 ... 8.68592039e-02 -1.52456570e+00 -1.43442118e+00] [ 1.51321387e+00 1.51321387e+00 6.82536185e-01 ... -1.50398874e+00 -3.33479375e-01 -2.60039140e-02]] [[-7.86015272e-01 -7.86015272e-01 5.88888466e-01 ... -1.04248069e-01 -1.17730081e+00 1.94380713e+00] [-2.91042328e-01 -2.91042328e-01 -1.04167295e+00 ... 4.81916487e-01 -2.05878094e-02 -8.28415573e-01] [-7.43586838e-01 -7.43586838e-01 -5.66332936e-01 ... 1.30471253e+00 1.13783360e+00 -7.60181665e-01] ... [-3.26847024e-02 -3.26847024e-02 1.71974862e+00 ... -1.00235391e+00 -1.22087765e+00 -1.41164148e+00] [-9.11579877e-02 -9.11579877e-02 -1.94133914e+00 ... 3.37319411e-02 -8.78544092e-01 -8.81833792e-01] [-1.01333499e+00 -1.01333499e+00 -3.27547282e-01 ... 8.10904562e-01 7.01891035e-02 4.30951923e-01]] ... [[ 1.66634583e+00 1.66634583e+00 -9.22069013e-01 ... 1.20924950e+00 1.29425704e+00 -1.26287150e+00] [ 4.11407426e-02 4.11407426e-02 8.62896442e-01 ... -9.75408927e-02 1.56047791e-01 -1.12446785e+00] [-7.71086931e-01 -7.71086931e-01 5.19353151e-01 ... -1.05361426e+00 1.19960690e+00 6.97331131e-02] ... [-1.03279158e-01 -1.03279158e-01 -2.15111911e-01 ... 1.26123488e+00 4.40332294e-01 -4.80511300e-02] [-1.53829828e-01 -1.53829828e-01 -7.95179844e-01 ... -7.65310466e-01 5.16364157e-01 1.00423777e+00] [ 7.52188265e-02 7.52188265e-02 -3.29384416e-01 ... 7.18575597e-01 3.73832136e-01 8.06731641e-01]] [[-1.07295144e+00 -1.07295144e+00 -1.68617833e+00 ... -1.14483392e+00 -1.06209147e+00 1.33708751e+00] [-8.95875454e-01 -8.95875454e-01 3.27932924e-01 ... -6.51293457e-01 -1.52349567e+00 1.59513760e+00] [ 1.01166725e+00 1.01166725e+00 -3.04903239e-01 ... -1.09872270e+00 1.64449251e+00 -7.25180060e-02] ... [ 2.01470280e+00 2.01470280e+00 -7.68397033e-01 ... -5.77815950e-01 7.21319318e-01 -3.63184869e-01] [ 2.10034639e-01 2.10034639e-01 -1.54786301e+00 ... 1.22839965e-01 -3.89335930e-01 2.30619967e-01] [-1.83713686e+00 -1.83713686e+00 7.42995083e-01 ... 5.34902632e-01 2.53656477e-01 -2.14368358e-01]] [[-1.07295144e+00 -1.07295144e+00 -1.68617833e+00 ... -1.14483392e+00 -1.06209147e+00 1.33708751e+00] [-8.95875454e-01 -8.95875454e-01 3.27932924e-01 ... -6.51293457e-01 -1.52349567e+00 1.59513760e+00] [ 1.01166725e+00 1.01166725e+00 -3.04903239e-01 ... -1.09872270e+00 1.64449251e+00 -7.25180060e-02] ... [ 2.01470280e+00 2.01470280e+00 -7.68397033e-01 ... -5.77815950e-01 7.21319318e-01 -3.63184869e-01] [ 2.10034639e-01 2.10034639e-01 -1.54786301e+00 ... 1.22839965e-01 -3.89335930e-01 2.30619967e-01] [-1.83713686e+00 -1.83713686e+00 7.42995083e-01 ... 5.34902632e-01 2.53656477e-01 -2.14368358e-01]]] [[[-2.13589907e-01 -2.13589907e-01 -1.72925338e-01 ... 4.48168993e-01 6.81733131e-01 1.63054633e+00] [-3.71055514e-01 -3.71055514e-01 -9.78755653e-01 ... 4.99048591e-01 1.48056611e-01 1.62547219e+00] [-2.39118600e+00 -2.39118600e+00 9.36460316e-01 ... 2.02407479e+00 1.05459476e+00 6.35936260e-01] ... [-9.36563015e-01 -9.36563015e-01 2.11175039e-01 ... 9.47013348e-02 -1.48539543e+00 8.57263923e-01] [ 9.21808302e-01 9.21808302e-01 5.87090671e-01 ... 3.22801918e-01 -2.35898042e+00 -7.46092618e-01] [ 1.59992769e-01 1.59992769e-01 1.53230160e-01 ... -4.47238177e-01 -1.57924429e-01 1.56142998e+00]] [[ 2.76878506e-01 2.76878506e-01 4.72143501e-01 ... -4.76695560e-02 -7.63285041e-01 5.32722712e-01] [-1.70101368e+00 -1.70101368e+00 8.23657885e-02 ... 8.15988898e-01 -2.15980673e+00 9.57667351e-01] [-2.92615592e-01 -2.92615592e-01 -1.57156444e+00 ... 9.68107224e-01 1.64072186e-01 -5.90083778e-01] ... [ 1.09423685e+00 1.09423685e+00 -1.61283001e-01 ... -1.20298100e+00 -1.02614510e+00 1.23529518e+00] [ 8.78604174e-01 8.78604174e-01 1.74982393e+00 ... -1.58427989e+00 -4.90612090e-01 5.81207611e-02] [-1.52252048e-01 -1.52252048e-01 2.47510761e-01 ... -8.52711260e-01 -5.88680089e-01 1.14889479e+00]] [[-1.00252819e+00 -1.00252819e+00 1.30779386e+00 ... 2.17610136e-01 8.97640109e-01 1.58692881e-01] [-1.23278081e+00 -1.23278081e+00 2.10962677e+00 ... 4.59006429e-01 3.26287709e-02 -1.89230591e-01] [ 2.27789164e+00 2.27789164e+00 3.59154791e-01 ... 1.78627002e+00 2.31942630e+00 1.11321306e+00] ... [ 3.74896049e-01 3.74896049e-01 -6.53156698e-01 ... -1.02398014e+00 -2.50922132e+00 5.36526740e-01] [ 7.22380579e-01 7.22380579e-01 -8.58281851e-01 ... -4.40778583e-01 -9.73274887e-01 7.36803472e-01] [ 8.72808397e-01 8.72808397e-01 -9.63619351e-01 ... -1.25470614e+00 1.12731993e+00 3.55639309e-01]] ... [[ 2.05557442e+00 2.05557442e+00 -7.07092404e-01 ... 1.22174799e+00 -2.09055394e-01 2.08171502e-01] [-4.63743567e-01 -4.63743567e-01 2.16038942e+00 ... -1.44300568e+00 -1.65006292e+00 6.02312982e-01] [ 3.50656956e-01 3.50656956e-01 -2.17130825e-01 ... 1.34712726e-01 1.49281573e+00 1.70373940e+00] ... [ 2.66389072e-01 2.66389072e-01 5.73056102e-01 ... 1.38991654e+00 -2.02308702e+00 -2.87250340e-01] [-1.16132510e+00 -1.16132510e+00 8.30167294e-01 ... 1.16596687e+00 2.81645626e-01 -1.11050355e+00] [-9.38117683e-01 -9.38117683e-01 -5.60900092e-01 ... 5.58493435e-01 1.31114209e+00 -8.89330208e-01]] [[-1.10558951e+00 -1.10558951e+00 1.12805557e+00 ... -5.89391053e-01 1.20009506e+00 -1.79727221e+00] [-6.69285506e-02 -6.69285506e-02 9.89965916e-01 ... -2.11712614e-01 1.33740082e-01 -6.87736273e-01] [-8.27056289e-01 -8.27056289e-01 3.40934426e-01 ... 9.91447210e-01 5.21641493e-01 5.49283564e-01] ... [-1.72174060e+00 -1.72174060e+00 1.69143867e+00 ... -9.19850409e-01 -1.65136009e-01 -2.41531059e-01] [ 7.32774198e-01 7.32774198e-01 -3.04123729e-01 ... -4.10821646e-01 1.85835615e-01 1.37942231e+00] [ 5.19725442e-01 5.19725442e-01 -1.70097542e+00 ... 8.69647086e-01 4.74541187e-01 -1.06954545e-01]] [[-1.10558951e+00 -1.10558951e+00 1.12805557e+00 ... -5.89391053e-01 1.20009506e+00 -1.79727221e+00] [-6.69285506e-02 -6.69285506e-02 9.89965916e-01 ... -2.11712614e-01 1.33740082e-01 -6.87736273e-01] [-8.27056289e-01 -8.27056289e-01 3.40934426e-01 ... 9.91447210e-01 5.21641493e-01 5.49283564e-01] ... [-1.72174060e+00 -1.72174060e+00 1.69143867e+00 ... -9.19850409e-01 -1.65136009e-01 -2.41531059e-01] [ 7.32774198e-01 7.32774198e-01 -3.04123729e-01 ... -4.10821646e-01 1.85835615e-01 1.37942231e+00] [ 5.19725442e-01 5.19725442e-01 -1.70097542e+00 ... 8.69647086e-01 4.74541187e-01 -1.06954545e-01]]]]]; ov_res: [[[[[-9.62066591e-01 -9.62066591e-01 -4.56744224e-01 ... -4.01221484e-01 -9.60377872e-01 -1.29066384e+00] [-1.71264362e+00 -1.71264362e+00 1.87137306e+00 ... 1.09262073e+00 -1.02166152e+00 4.66686785e-01] [ 1.13494301e+00 1.13494301e+00 5.41887343e-01 ... -1.19305980e+00 6.98778033e-01 1.49727571e+00] ... [-9.94040072e-01 -9.94040072e-01 4.15009886e-01 ... 3.91916841e-01 1.58399157e-02 -3.46505731e-01] [ 6.10194206e-01 6.10194206e-01 1.77044511e+00 ... 1.32562876e+00 3.65296155e-01 5.32554209e-01] [-6.04438603e-01 -6.04438603e-01 2.00707074e-02 ... 2.88732857e-01 -2.04519844e+00 -7.51059473e-01]] [[ 1.17350137e+00 1.17350137e+00 7.74219394e-01 ... -5.84060073e-01 5.25615752e-01 6.88801944e-01] [ 1.11556184e+00 1.11556184e+00 3.55729938e-01 ... 7.75856152e-02 6.15247823e-02 -8.80105421e-03] [ 6.14276171e-01 6.14276171e-01 9.58927274e-01 ... 2.15391070e-01 2.67684609e-01 1.83678582e-01] ... [-4.33031678e-01 -4.33031678e-01 5.01983520e-03 ... -1.22635975e-01 -1.22409117e+00 -3.10405165e-01] [-3.24396700e-01 -3.24396700e-01 1.46456826e+00 ... -5.38503900e-02 5.07284284e-01 8.03090110e-02] [ 2.27925628e-01 2.27925628e-01 -1.06618130e+00 ... 1.01973164e+00 -5.93939602e-01 -8.15131187e-01]] [[ 4.82822299e-01 4.82822299e-01 -1.01659620e+00 ... 1.03291404e+00 2.65439343e+00 -1.19574273e+00] [ 3.40289980e-01 3.40289980e-01 -3.72530878e-01 ... -2.89780814e-02 -1.29235375e+00 -1.97262275e+00] [ 4.82891977e-01 4.82891977e-01 2.39784169e+00 ... -6.25152230e-01 5.38257184e-04 3.09938997e-01] ... [ 8.03835094e-01 8.03835094e-01 -6.86416402e-02 ... 5.84531784e-01 -3.74834478e-01 1.06573975e+00] [-4.75995451e-01 -4.75995451e-01 9.58739638e-01 ... 8.92595351e-01 -1.23221040e+00 3.28504443e-01] [-1.89120376e+00 -1.89120376e+00 -1.10856545e+00 ... 4.30638017e-03 -6.10289946e-02 -1.96731651e+00]] ... [[ 5.94225883e-01 5.94225883e-01 -2.39276975e-01 ... -7.96687305e-01 9.69988823e-01 -6.39257550e-01] [ 6.87435150e-01 6.87435150e-01 3.78267914e-01 ... -1.03712857e+00 -9.11985874e-01 -3.43551069e-01] [-4.33008701e-01 -4.33008701e-01 -9.91427362e-01 ... 2.32202148e+00 1.10154286e-01 -2.58635491e-01] ... [-3.40505131e-02 -3.40505131e-02 6.69617131e-02 ... 6.42748535e-01 -8.85336995e-01 9.37225163e-01] [-1.00409722e+00 -1.00409722e+00 -4.94264066e-01 ... -1.18928933e+00 -1.16826808e+00 1.67108452e+00] [-2.09255075e+00 -2.09255075e+00 5.11289775e-01 ... -4.46443379e-01 6.34968996e-01 -8.35771620e-01]] [[ 1.78645158e+00 1.78645158e+00 8.96628022e-01 ... 1.35162994e-01 1.92892849e+00 9.71880734e-01] [ 9.89611268e-01 9.89611268e-01 -9.10737574e-01 ... -1.41936827e+00 -6.49016857e-01 9.59445179e-01] [ 6.76008463e-01 6.76008463e-01 1.92157280e+00 ... 2.01332068e+00 5.85332029e-02 -2.52270311e-01] ... [ 3.60711336e-01 3.60711336e-01 -1.58970188e-02 ... 2.74708509e-01 -3.48219156e-01 -1.32323182e+00] [-6.10755682e-01 -6.10755682e-01 5.84180355e-01 ... -1.65315485e+00 1.80075061e+00 -2.03529406e+00] [-3.92888010e-01 -3.92888010e-01 6.76379561e-01 ... 3.37589788e+00 2.29087758e+00 -1.81755280e+00]] [[ 1.78645158e+00 1.78645158e+00 8.96628022e-01 ... 1.35162994e-01 1.92892849e+00 9.71880734e-01] [ 9.89611268e-01 9.89611268e-01 -9.10737574e-01 ... -1.41936827e+00 -6.49016857e-01 9.59445179e-01] [ 6.76008463e-01 6.76008463e-01 1.92157280e+00 ... 2.01332068e+00 5.85332029e-02 -2.52270311e-01] ... [ 3.60711336e-01 3.60711336e-01 -1.58970188e-02 ... 2.74708509e-01 -3.48219156e-01 -1.32323182e+00] [-6.10755682e-01 -6.10755682e-01 5.84180355e-01 ... -1.65315485e+00 1.80075061e+00 -2.03529406e+00] [-3.92888010e-01 -3.92888010e-01 6.76379561e-01 ... 3.37589788e+00 2.29087758e+00 -1.81755280e+00]]] [[[ 3.38796288e-01 3.38796288e-01 9.56254601e-01 ... -5.38189709e-01 4.08502489e-01 5.90214469e-02] [-5.03961504e-01 -5.03961504e-01 -4.41271871e-01 ... 5.07317126e-01 9.88135397e-01 -3.72951180e-01] [-1.05027902e+00 -1.05027902e+00 5.49466848e-01 ... -5.99119723e-01 1.15297878e+00 -2.30346060e+00] ... [-5.13270319e-01 -5.13270319e-01 -2.65661757e-02 ... 1.31728101e+00 -1.47539818e+00 2.76933998e-01] [-3.11295465e-02 -3.11295465e-02 -8.50046396e-01 ... -5.99368513e-01 2.18084645e+00 8.04135621e-01] [-1.55050918e-01 -1.55050918e-01 3.18780750e-01 ... -1.42172456e+00 -1.70891881e-02 2.05893829e-01]] [[ 7.80488491e-01 7.80488491e-01 6.58674717e-01 ... 1.01631010e+00 1.34311235e+00 -5.40662944e-01] [-1.07740760e+00 -1.07740760e+00 -1.23462033e+00 ... -1.20205641e+00 2.63601597e-02 1.84155703e+00] [-1.75236344e+00 -1.75236344e+00 -6.74170792e-01 ... -3.30133885e-01 -1.64799368e+00 1.59480488e+00] ... [-4.94487524e-01 -4.94487524e-01 -9.20529757e-03 ... 3.54861915e-02 1.32744193e+00 -7.37382770e-01] [ 5.19604743e-01 5.19604743e-01 1.92078561e-01 ... 8.68592039e-02 -1.52456570e+00 -1.43442118e+00] [ 1.51321387e+00 1.51321387e+00 6.82536185e-01 ... -1.50398874e+00 -3.33479375e-01 -2.60039140e-02]] [[-7.86015272e-01 -7.86015272e-01 5.88888466e-01 ... -1.04248069e-01 -1.17730081e+00 1.94380713e+00] [-2.91042328e-01 -2.91042328e-01 -1.04167295e+00 ... 4.81916487e-01 -2.05878094e-02 -8.28415573e-01] [-7.43586838e-01 -7.43586838e-01 -5.66332936e-01 ... 1.30471253e+00 1.13783360e+00 -7.60181665e-01] ... [-3.26847024e-02 -3.26847024e-02 1.71974862e+00 ... -1.00235391e+00 -1.22087765e+00 -1.41164148e+00] [-9.11579877e-02 -9.11579877e-02 -1.94133914e+00 ... 3.37319411e-02 -8.78544092e-01 -8.81833792e-01] [-1.01333499e+00 -1.01333499e+00 -3.27547282e-01 ... 8.10904562e-01 7.01891035e-02 4.30951923e-01]] ... [[ 1.66634583e+00 1.66634583e+00 -9.22069013e-01 ... 1.20924950e+00 1.29425704e+00 -1.26287150e+00] [ 4.11407426e-02 4.11407426e-02 8.62896442e-01 ... -9.75408927e-02 1.56047791e-01 -1.12446785e+00] [-7.71086931e-01 -7.71086931e-01 5.19353151e-01 ... -1.05361426e+00 1.19960690e+00 6.97331131e-02] ... [-1.03279158e-01 -1.03279158e-01 -2.15111911e-01 ... 1.26123488e+00 4.40332294e-01 -4.80511300e-02] [-1.53829828e-01 -1.53829828e-01 -7.95179844e-01 ... -7.65310466e-01 5.16364157e-01 1.00423777e+00] [ 7.52188265e-02 7.52188265e-02 -3.29384416e-01 ... 7.18575597e-01 3.73832136e-01 8.06731641e-01]] [[-1.07295144e+00 -1.07295144e+00 -1.68617833e+00 ... -1.14483392e+00 -1.06209147e+00 1.33708751e+00] [-8.95875454e-01 -8.95875454e-01 3.27932924e-01 ... -6.51293457e-01 -1.52349567e+00 1.59513760e+00] [ 1.01166725e+00 1.01166725e+00 -3.04903239e-01 ... -1.09872270e+00 1.64449251e+00 -7.25180060e-02] ... [ 2.01470280e+00 2.01470280e+00 -7.68397033e-01 ... -5.77815950e-01 7.21319318e-01 -3.63184869e-01] [ 2.10034639e-01 2.10034639e-01 -1.54786301e+00 ... 1.22839965e-01 -3.89335930e-01 2.30619967e-01] [-1.83713686e+00 -1.83713686e+00 7.42995083e-01 ... 5.34902632e-01 2.53656477e-01 -2.14368358e-01]] [[-1.07295144e+00 -1.07295144e+00 -1.68617833e+00 ... -1.14483392e+00 -1.06209147e+00 1.33708751e+00] [-8.95875454e-01 -8.95875454e-01 3.27932924e-01 ... -6.51293457e-01 -1.52349567e+00 1.59513760e+00] [ 1.01166725e+00 1.01166725e+00 -3.04903239e-01 ... -1.09872270e+00 1.64449251e+00 -7.25180060e-02] ... [ 2.01470280e+00 2.01470280e+00 -7.68397033e-01 ... -5.77815950e-01 7.21319318e-01 -3.63184869e-01] [ 2.10034639e-01 2.10034639e-01 -1.54786301e+00 ... 1.22839965e-01 -3.89335930e-01 2.30619967e-01] [-1.83713686e+00 -1.83713686e+00 7.42995083e-01 ... 5.34902632e-01 2.53656477e-01 -2.14368358e-01]]] [[[-2.13589907e-01 -2.13589907e-01 -1.72925338e-01 ... 4.48168993e-01 6.81733131e-01 1.63054633e+00] [-3.71055514e-01 -3.71055514e-01 -9.78755653e-01 ... 4.99048591e-01 1.48056611e-01 1.62547219e+00] [-2.39118600e+00 -2.39118600e+00 9.36460316e-01 ... 2.02407479e+00 1.05459476e+00 6.35936260e-01] ... [-9.36563015e-01 -9.36563015e-01 2.11175039e-01 ... 9.47013348e-02 -1.48539543e+00 8.57263923e-01] [ 9.21808302e-01 9.21808302e-01 5.87090671e-01 ... 3.22801918e-01 -2.35898042e+00 -7.46092618e-01] [ 1.59992769e-01 1.59992769e-01 1.53230160e-01 ... -4.47238177e-01 -1.57924429e-01 1.56142998e+00]] [[ 2.76878506e-01 2.76878506e-01 4.72143501e-01 ... -4.76695560e-02 -7.63285041e-01 5.32722712e-01] [-1.70101368e+00 -1.70101368e+00 8.23657885e-02 ... 8.15988898e-01 -2.15980673e+00 9.57667351e-01] [-2.92615592e-01 -2.92615592e-01 -1.57156444e+00 ... 9.68107224e-01 1.64072186e-01 -5.90083778e-01] ... [ 1.09423685e+00 1.09423685e+00 -1.61283001e-01 ... -1.20298100e+00 -1.02614510e+00 1.23529518e+00] [ 8.78604174e-01 8.78604174e-01 1.74982393e+00 ... -1.58427989e+00 -4.90612090e-01 5.81207611e-02] [-1.52252048e-01 -1.52252048e-01 2.47510761e-01 ... -8.52711260e-01 -5.88680089e-01 1.14889479e+00]] [[-1.00252819e+00 -1.00252819e+00 1.30779386e+00 ... 2.17610136e-01 8.97640109e-01 1.58692881e-01] [-1.23278081e+00 -1.23278081e+00 2.10962677e+00 ... 4.59006429e-01 3.26287709e-02 -1.89230591e-01] [ 2.27789164e+00 2.27789164e+00 3.59154791e-01 ... 1.78627002e+00 2.31942630e+00 1.11321306e+00] ... [ 3.74896049e-01 3.74896049e-01 -6.53156698e-01 ... -1.02398014e+00 -2.50922132e+00 5.36526740e-01] [ 7.22380579e-01 7.22380579e-01 -8.58281851e-01 ... -4.40778583e-01 -9.73274887e-01 7.36803472e-01] [ 8.72808397e-01 8.72808397e-01 -9.63619351e-01 ... -1.25470614e+00 1.12731993e+00 3.55639309e-01]] ... [[ 2.05557442e+00 2.05557442e+00 -7.07092404e-01 ... 1.22174799e+00 -2.09055394e-01 2.08171502e-01] [-4.63743567e-01 -4.63743567e-01 2.16038942e+00 ... -1.44300568e+00 -1.65006292e+00 6.02312982e-01] [ 3.50656956e-01 3.50656956e-01 -2.17130825e-01 ... 1.34712726e-01 1.49281573e+00 1.70373940e+00] ... [ 2.66389072e-01 2.66389072e-01 5.73056102e-01 ... 1.38991654e+00 -2.02308702e+00 -2.87250340e-01] [-1.16132510e+00 -1.16132510e+00 8.30167294e-01 ... 1.16596687e+00 2.81645626e-01 -1.11050355e+00] [-9.38117683e-01 -9.38117683e-01 -5.60900092e-01 ... 5.58493435e-01 1.31114209e+00 -8.89330208e-01]] [[-1.10558951e+00 -1.10558951e+00 1.12805557e+00 ... -5.89391053e-01 1.20009506e+00 -1.79727221e+00] [-6.69285506e-02 -6.69285506e-02 9.89965916e-01 ... -2.11712614e-01 1.33740082e-01 -6.87736273e-01] [-8.27056289e-01 -8.27056289e-01 3.40934426e-01 ... 9.91447210e-01 5.21641493e-01 5.49283564e-01] ... [-1.72174060e+00 -1.72174060e+00 1.69143867e+00 ... -9.19850409e-01 -1.65136009e-01 -2.41531059e-01] [ 7.32774198e-01 7.32774198e-01 -3.04123729e-01 ... -4.10821646e-01 1.85835615e-01 1.37942231e+00] [ 5.19725442e-01 5.19725442e-01 -1.70097542e+00 ... 8.69647086e-01 4.74541187e-01 -1.06954545e-01]] [[-1.10558951e+00 -1.10558951e+00 1.12805557e+00 ... -5.89391053e-01 1.20009506e+00 -1.79727221e+00] [-6.69285506e-02 -6.69285506e-02 9.89965916e-01 ... -2.11712614e-01 1.33740082e-01 -6.87736273e-01] [-8.27056289e-01 -8.27056289e-01 3.40934426e-01 ... 9.91447210e-01 5.21641493e-01 5.49283564e-01] ... [-1.72174060e+00 -1.72174060e+00 1.69143867e+00 ... -9.19850409e-01 -1.65136009e-01 -2.41531059e-01] [ 7.32774198e-01 7.32774198e-01 -3.04123729e-01 ... -4.10821646e-01 1.85835615e-01 1.37942231e+00] [ 5.19725442e-01 5.19725442e-01 -1.70097542e+00 ... 8.69647086e-01 4.74541187e-01 -1.06954545e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:replicate - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5545.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 3.1418738 -1.3175095 -1.047034 ... 2.155303 0.29799286 1.4163041 ] [ 2.1820717 1.8803716 1.0326782 ... -1.6461991 1.917684 1.0649378 ] [ 0.2689755 0.1223876 -1.8526192 ... -1.8383996 0.08764891 1.780883 ] ... [-1.4854432 0.82906437 1.2671956 ... 0.25771254 0.92916334 0.6158258 ] [-1.5769964 1.7853898 0.99328864 ... 0.6740336 -0.35933733 0.8133617 ] [-0.4919036 -0.7635366 0.42586598 ... 1.6036384 0.38292953 -0.72885615]] [[-0.19668616 -1.1195916 -0.55970764 ... -1.2767522 -0.13027155 2.3114617 ] [ 0.38772994 -0.01475831 -0.26203004 ... 0.10162561 0.39511764 -1.601416 ] [-0.19303866 0.9140088 0.3924392 ... 0.39076927 1.8409806 0.23313002] ... [-0.6248132 -0.37991336 -0.04006537 ... -0.9250941 1.728728 -1.579141 ] [ 1.0368147 1.4550552 1.2165592 ... 0.2360074 1.2289985 -0.48011565] [-1.9308715 -1.4591707 0.18970649 ... 0.5698642 1.1904445 -1.0383216 ]] [[-0.5555358 -1.1048006 0.26963514 ... -0.0702633 0.29734766 -0.6887827 ] [-0.27039906 0.8330966 -2.2475407 ... 1.0203098 0.6296482 -0.33441666] [-0.65776724 -1.6237019 0.12536068 ... 1.8310089 -0.00864617 1.0054361 ] ... [ 0.2401581 0.32741112 -0.28476864 ... -0.38740417 -0.30253905 1.9389338 ] [-1.3459362 -2.184247 -0.34441528 ... 0.8183341 0.0707744 -1.1237729 ] [ 0.52232254 -0.17325643 0.7801552 ... -0.29711393 -0.7029025 0.6257331 ]] ... [[ 0.0808095 -1.7047862 -0.700875 ... 0.541986 0.5450709 -0.7279213 ] [ 2.2747755 -2.2074502 0.29203457 ... -1.1165669 0.09858319 0.13514295] [ 0.24301691 0.5215226 0.81906724 ... 0.9225812 0.36550725 0.68271494] ... [ 0.32157692 0.7119395 -0.436981 ... 0.7585001 -0.7272282 -1.3786187 ] [ 1.3109374 -0.05922608 0.5599012 ... 0.23317112 -1.8243809 1.319219 ] [ 0.0095611 0.49668366 -1.155145 ... -1.1675617 -1.9377241 2.0459847 ]] [[ 0.5922907 -0.27841103 1.7244313 ... -0.16651045 0.3083695 -0.49205548] [ 0.3922765 0.23110527 0.92845124 ... -0.12540108 -0.7223349 0.47668737] [ 0.7240861 0.30024087 0.13734587 ... 0.24533755 -0.8753795 -0.21233764] ... [ 1.1375058 2.4578843 0.33732674 ... 0.49473733 -1.3981476 -0.97141606] [ 0.9262893 1.8921933 -1.6231215 ... -0.67520934 -0.26338843 -0.0312997 ] [-1.7107064 0.61265105 -1.7651387 ... -0.15947443 0.10210667 -0.5028807 ]] [[-2.2209368 0.51955956 -1.5030717 ... -1.7374318 -0.29894263 0.31123206] [-2.0254805 -1.6456374 0.47785714 ... 0.41061562 -0.42820597 -1.235421 ] [-0.7318013 1.0894586 -0.6747377 ... 1.6941864 -0.32382807 0.27049547] ... [ 0.09760158 0.08894959 0.62075925 ... 0.3657809 -0.6831801 0.18831484] [ 0.35891077 -1.3708715 0.5163805 ... -1.0944906 -1.7060127 0.08600013] [ 1.7886573 -1.6124107 -0.32742158 ... -0.6750325 -0.7989052 -1.41221 ]]] [[[ 0.16429727 0.17010106 0.88401455 ... -0.28774172 0.05264647 0.6557171 ] [ 1.0195539 2.3399963 0.15432788 ... 1.6068356 -1.8044453 -0.21396117] [-0.5577954 -0.7093825 -1.0240896 ... -1.0532614 0.7474384 0.1828565 ] ... [ 1.9940367 0.8342226 -1.2629186 ... -0.17690861 0.26937732 -0.22714746] [-2.2023592 0.70691717 1.5145702 ... -0.56947863 -0.15684453 -0.71337277] [-1.5169106 -0.7489712 0.59743947 ... -0.9493592 -0.12236647 0.5514883 ]] [[ 0.7210629 1.378698 1.3823566 ... 1.4236826 -2.4528775 0.20696023] [ 0.09645406 1.5693719 -1.1542523 ... 0.29119676 -0.10501087 -2.1981387 ] [ 0.33367562 1.0811425 0.5939868 ... 0.23601149 -0.05152496 0.5196303 ] ... [ 0.96835697 -0.6120273 0.79444927 ... -1.2670403 -1.3268825 0.33678582] [ 1.2034379 -0.7522281 -0.8954499 ... -0.6279073 -1.3641592 -0.19408704] [-1.0329311 1.6157067 -1.0762005 ... 1.431555 0.2145567 -0.7484894 ]] [[ 0.8547042 -0.83864236 0.5511606 ... 0.23071586 0.1372105 2.1706936 ] [-0.40814915 -0.04162762 0.6056227 ... -1.2681447 -1.1816568 0.44070348] [ 1.3858541 -0.4882301 -1.0578473 ... 1.156612 0.21018466 1.2356355 ] ... [-1.4458086 0.01789526 -0.12601365 ... -0.02989009 0.5618795 0.36559385] [ 0.392692 -0.10436978 0.68273526 ... -0.9614022 -0.12765019 2.0933628 ] [ 0.7165524 0.5435171 -1.4009128 ... -0.21717572 -0.6231054 0.7427859 ]] ... [[-1.1069522 1.2687044 -1.1322261 ... -0.26483902 -0.15867327 -0.5201746 ] [ 2.1064012 0.08967715 -0.56143683 ... 0.7005792 -0.10928418 0.9959893 ] [-2.5842154 -0.06604864 0.47474724 ... 1.2844337 -0.9493862 0.00824513] ... [ 0.24907213 -0.47046918 0.37412938 ... -1.8868943 0.13674888 0.6361798 ] [ 0.24953602 -0.75784206 -1.3043473 ... 1.0117178 0.15730922 0.10131124] [-0.53351486 -0.10458355 0.0784675 ... 0.02763698 0.21292959 -1.1517522 ]] [[-0.01439395 -1.2779303 -0.21255974 ... 0.30149227 0.2379293 0.5879997 ] [-0.06558529 0.06323406 0.30920407 ... -1.9793835 1.3040535 -0.7363337 ] [ 0.14101835 1.3297198 -0.14876145 ... 1.8800498 0.09660137 0.9816979 ] ... [ 0.7313291 0.76365906 -0.5675193 ... -0.25941437 -0.4032061 2.4963112 ] [-0.75277066 -0.3137196 -0.06569705 ... 0.10655798 -1.6009846 -0.45391077] [-0.07764955 0.21298082 0.78397816 ... -0.19775127 0.3576738 -0.10120405]] [[ 1.1724193 -1.4201112 -1.5338064 ... 0.8024434 -0.89461124 0.5492112 ] [-1.3998606 -0.27104282 -0.88245565 ... 0.50052094 -0.11639076 0.1657852 ] [ 0.637965 -0.2421304 1.6467538 ... -0.5747477 0.36161754 -0.17435949] ... [-0.06140063 -0.15572768 1.0082256 ... -0.647367 0.34032086 -0.10356621] [-0.31117183 -1.4029742 -0.92450094 ... 0.677415 0.49514183 0.02188687] [-0.21323116 -0.30198684 2.4149735 ... 0.8310857 1.4760542 -0.48816326]]] [[[-1.2996657 -1.5160073 -0.8133221 ... 0.3559736 -1.2665026 0.78673357] [ 0.9982421 1.0980304 1.0949264 ... 1.2244965 -1.1058625 -1.1218708 ] [ 0.05614098 -0.69517046 -0.5758544 ... 2.8567054 0.23400857 0.5835535 ] ... [ 0.01159595 2.4025726 -0.7007786 ... -0.07608225 -0.46650973 -1.2968043 ] [-2.2277699 2.0305364 -1.5111084 ... 0.15066893 -0.36694166 0.69543624] [ 0.31148103 -1.9032723 -0.5694718 ... -0.6909957 -0.5619407 0.7396623 ]] [[-0.659777 -1.3691978 0.47468317 ... -0.7664174 0.39651918 1.890478 ] [-1.1847482 -0.43611127 -0.6457182 ... -0.4458186 -0.97610503 0.27884883] [-1.1730884 -0.780059 -0.4031795 ... 0.02873391 -0.10667942 -0.06869755] ... [ 0.24287072 1.0388231 0.2848249 ... 0.2450907 1.5414912 -0.36041963] [ 1.0231389 0.48627 -0.20749182 ... -0.2949488 -0.6362992 1.3425323 ] [ 0.31608012 -1.2194831 0.29719245 ... -0.6465133 0.21553174 -1.6381875 ]] [[-1.1276141 -0.17793459 1.5292544 ... -0.9763032 1.2663729 -1.9543262 ] [ 2.0882995 0.26528177 -0.18414873 ... 1.1721586 0.80961645 0.45919955] [-0.29256377 -0.21251334 -0.33133447 ... 0.710937 0.36876243 0.3209454 ] ... [-0.5589898 0.08976896 1.1578516 ... 0.24941434 1.0112439 -1.9782038 ] [-1.3433837 -0.0382057 0.4784365 ... 0.6428831 -0.70054084 -0.18131752] [-0.9631765 -0.08959469 -1.7329339 ... -0.8753555 -0.68851864 0.6890935 ]] ... [[-0.23686692 0.05616917 -1.7014786 ... -0.68544877 0.8406561 1.9984665 ] [-0.04751188 -1.0779969 1.0651693 ... -1.7350372 0.43861026 -1.7451715 ] [-0.85853356 -1.2716181 -0.30749327 ... -0.5272517 -1.5051942 0.33370772] ... [-0.21825054 -0.59926456 0.61179 ... -0.9895142 -0.6633571 -0.31064042] [ 0.70814794 -0.35823643 0.58957034 ... -0.1419883 0.6707083 -1.2221881 ] [-0.773839 0.9053046 0.605524 ... -0.58054364 -0.02183245 1.1070977 ]] [[-0.53024805 -1.8470933 0.16887864 ... 1.2740176 -0.8614189 -0.01199526] [-0.03897613 1.344567 -0.0511247 ... -0.11155405 1.602127 0.17930599] [-1.0359143 1.448485 0.60438746 ... 0.38447413 0.7963709 -1.4891137 ] ... [-0.23964325 0.9729904 0.23382936 ... -0.74882126 -0.8162557 1.0613286 ] [-0.3205649 -1.1788342 1.1082114 ... -1.2409213 -1.8519272 0.6735319 ] [ 1.1005946 0.0328756 1.3225912 ... 0.4793315 -0.9344725 -0.86073816]] [[-0.8249059 -0.37055922 -0.03870867 ... -2.269758 -0.6642474 -0.40933916] [-0.45586443 -0.49318692 0.345093 ... 0.14260797 -0.4766371 0.04086022] [ 0.16087015 0.57996905 -0.01665049 ... 0.40510806 -1.393764 0.76457477] ... [-0.52252907 -0.20002092 0.4496966 ... 1.4227171 -0.3230355 0.6397099 ] [-0.11585315 0.33967364 0.37625057 ... 2.321446 1.1596006 0.6718823 ] [-0.13961257 -0.59377456 0.6531288 ... 0.40337387 -0.7867019 -0.37737188]]]]]; ov_res: [[[[[ 3.1418738 -1.3175095 -1.047034 ... 2.155303 0.29799286 1.4163041 ] [ 2.1820717 1.8803716 1.0326782 ... -1.6461991 1.917684 1.0649378 ] [ 0.2689755 0.1223876 -1.8526192 ... -1.8383996 0.08764891 1.780883 ] ... [-1.4854432 0.82906437 1.2671956 ... 0.25771254 0.92916334 0.6158258 ] [-1.5769964 1.7853898 0.99328864 ... 0.6740336 -0.35933733 0.8133617 ] [-0.4919036 -0.7635366 0.42586598 ... 1.6036384 0.38292953 -0.72885615]] [[-0.19668616 -1.1195916 -0.55970764 ... -1.2767522 -0.13027155 2.3114617 ] [ 0.38772994 -0.01475831 -0.26203004 ... 0.10162561 0.39511764 -1.601416 ] [-0.19303866 0.9140088 0.3924392 ... 0.39076927 1.8409806 0.23313002] ... [-0.6248132 -0.37991336 -0.04006537 ... -0.9250941 1.728728 -1.579141 ] [ 1.0368147 1.4550552 1.2165592 ... 0.2360074 1.2289985 -0.48011565] [-1.9308715 -1.4591707 0.18970649 ... 0.5698642 1.1904445 -1.0383216 ]] [[-0.5555358 -1.1048006 0.26963514 ... -0.0702633 0.29734766 -0.6887827 ] [-0.27039906 0.8330966 -2.2475407 ... 1.0203098 0.6296482 -0.33441666] [-0.65776724 -1.6237019 0.12536068 ... 1.8310089 -0.00864617 1.0054361 ] ... [ 0.2401581 0.32741112 -0.28476864 ... -0.38740417 -0.30253905 1.9389338 ] [-1.3459362 -2.184247 -0.34441528 ... 0.8183341 0.0707744 -1.1237729 ] [ 0.52232254 -0.17325643 0.7801552 ... -0.29711393 -0.7029025 0.6257331 ]] ... [[ 0.0808095 -1.7047862 -0.700875 ... 0.541986 0.5450709 -0.7279213 ] [ 2.2747755 -2.2074502 0.29203457 ... -1.1165669 0.09858319 0.13514295] [ 0.24301691 0.5215226 0.81906724 ... 0.9225812 0.36550725 0.68271494] ... [ 0.32157692 0.7119395 -0.436981 ... 0.7585001 -0.7272282 -1.3786187 ] [ 1.3109374 -0.05922608 0.5599012 ... 0.23317112 -1.8243809 1.319219 ] [ 0.0095611 0.49668366 -1.155145 ... -1.1675617 -1.9377241 2.0459847 ]] [[ 0.5922907 -0.27841103 1.7244313 ... -0.16651045 0.3083695 -0.49205548] [ 0.3922765 0.23110527 0.92845124 ... -0.12540108 -0.7223349 0.47668737] [ 0.7240861 0.30024087 0.13734587 ... 0.24533755 -0.8753795 -0.21233764] ... [ 1.1375058 2.4578843 0.33732674 ... 0.49473733 -1.3981476 -0.97141606] [ 0.9262893 1.8921933 -1.6231215 ... -0.67520934 -0.26338843 -0.0312997 ] [-1.7107064 0.61265105 -1.7651387 ... -0.15947443 0.10210667 -0.5028807 ]] [[-2.2209368 0.51955956 -1.5030717 ... -1.7374318 -0.29894263 0.31123206] [-2.0254805 -1.6456374 0.47785714 ... 0.41061562 -0.42820597 -1.235421 ] [-0.7318013 1.0894586 -0.6747377 ... 1.6941864 -0.32382807 0.27049547] ... [ 0.09760158 0.08894959 0.62075925 ... 0.3657809 -0.6831801 0.18831484] [ 0.35891077 -1.3708715 0.5163805 ... -1.0944906 -1.7060127 0.08600013] [ 1.7886573 -1.6124107 -0.32742158 ... -0.6750325 -0.7989052 -1.41221 ]]] [[[ 0.16429727 0.17010106 0.88401455 ... -0.28774172 0.05264647 0.6557171 ] [ 1.0195539 2.3399963 0.15432788 ... 1.6068356 -1.8044453 -0.21396117] [-0.5577954 -0.7093825 -1.0240896 ... -1.0532614 0.7474384 0.1828565 ] ... [ 1.9940367 0.8342226 -1.2629186 ... -0.17690861 0.26937732 -0.22714746] [-2.2023592 0.70691717 1.5145702 ... -0.56947863 -0.15684453 -0.71337277] [-1.5169106 -0.7489712 0.59743947 ... -0.9493592 -0.12236647 0.5514883 ]] [[ 0.7210629 1.378698 1.3823566 ... 1.4236826 -2.4528775 0.20696023] [ 0.09645406 1.5693719 -1.1542523 ... 0.29119676 -0.10501087 -2.1981387 ] [ 0.33367562 1.0811425 0.5939868 ... 0.23601149 -0.05152496 0.5196303 ] ... [ 0.96835697 -0.6120273 0.79444927 ... -1.2670403 -1.3268825 0.33678582] [ 1.2034379 -0.7522281 -0.8954499 ... -0.6279073 -1.3641592 -0.19408704] [-1.0329311 1.6157067 -1.0762005 ... 1.431555 0.2145567 -0.7484894 ]] [[ 0.8547042 -0.83864236 0.5511606 ... 0.23071586 0.1372105 2.1706936 ] [-0.40814915 -0.04162762 0.6056227 ... -1.2681447 -1.1816568 0.44070348] [ 1.3858541 -0.4882301 -1.0578473 ... 1.156612 0.21018466 1.2356355 ] ... [-1.4458086 0.01789526 -0.12601365 ... -0.02989009 0.5618795 0.36559385] [ 0.392692 -0.10436978 0.68273526 ... -0.9614022 -0.12765019 2.0933628 ] [ 0.7165524 0.5435171 -1.4009128 ... -0.21717572 -0.6231054 0.7427859 ]] ... [[-1.1069522 1.2687044 -1.1322261 ... -0.26483902 -0.15867327 -0.5201746 ] [ 2.1064012 0.08967715 -0.56143683 ... 0.7005792 -0.10928418 0.9959893 ] [-2.5842154 -0.06604864 0.47474724 ... 1.2844337 -0.9493862 0.00824513] ... [ 0.24907213 -0.47046918 0.37412938 ... -1.8868943 0.13674888 0.6361798 ] [ 0.24953602 -0.75784206 -1.3043473 ... 1.0117178 0.15730922 0.10131124] [-0.53351486 -0.10458355 0.0784675 ... 0.02763698 0.21292959 -1.1517522 ]] [[-0.01439395 -1.2779303 -0.21255974 ... 0.30149227 0.2379293 0.5879997 ] [-0.06558529 0.06323406 0.30920407 ... -1.9793835 1.3040535 -0.7363337 ] [ 0.14101835 1.3297198 -0.14876145 ... 1.8800498 0.09660137 0.9816979 ] ... [ 0.7313291 0.76365906 -0.5675193 ... -0.25941437 -0.4032061 2.4963112 ] [-0.75277066 -0.3137196 -0.06569705 ... 0.10655798 -1.6009846 -0.45391077] [-0.07764955 0.21298082 0.78397816 ... -0.19775127 0.3576738 -0.10120405]] [[ 1.1724193 -1.4201112 -1.5338064 ... 0.8024434 -0.89461124 0.5492112 ] [-1.3998606 -0.27104282 -0.88245565 ... 0.50052094 -0.11639076 0.1657852 ] [ 0.637965 -0.2421304 1.6467538 ... -0.5747477 0.36161754 -0.17435949] ... [-0.06140063 -0.15572768 1.0082256 ... -0.647367 0.34032086 -0.10356621] [-0.31117183 -1.4029742 -0.92450094 ... 0.677415 0.49514183 0.02188687] [-0.21323116 -0.30198684 2.4149735 ... 0.8310857 1.4760542 -0.48816326]]] [[[-1.2996657 -1.5160073 -0.8133221 ... 0.3559736 -1.2665026 0.78673357] [ 0.9982421 1.0980304 1.0949264 ... 1.2244965 -1.1058625 -1.1218708 ] [ 0.05614098 -0.69517046 -0.5758544 ... 2.8567054 0.23400857 0.5835535 ] ... [ 0.01159595 2.4025726 -0.7007786 ... -0.07608225 -0.46650973 -1.2968043 ] [-2.2277699 2.0305364 -1.5111084 ... 0.15066893 -0.36694166 0.69543624] [ 0.31148103 -1.9032723 -0.5694718 ... -0.6909957 -0.5619407 0.7396623 ]] [[-0.659777 -1.3691978 0.47468317 ... -0.7664174 0.39651918 1.890478 ] [-1.1847482 -0.43611127 -0.6457182 ... -0.4458186 -0.97610503 0.27884883] [-1.1730884 -0.780059 -0.4031795 ... 0.02873391 -0.10667942 -0.06869755] ... [ 0.24287072 1.0388231 0.2848249 ... 0.2450907 1.5414912 -0.36041963] [ 1.0231389 0.48627 -0.20749182 ... -0.2949488 -0.6362992 1.3425323 ] [ 0.31608012 -1.2194831 0.29719245 ... -0.6465133 0.21553174 -1.6381875 ]] [[-1.1276141 -0.17793459 1.5292544 ... -0.9763032 1.2663729 -1.9543262 ] [ 2.0882995 0.26528177 -0.18414873 ... 1.1721586 0.80961645 0.45919955] [-0.29256377 -0.21251334 -0.33133447 ... 0.710937 0.36876243 0.3209454 ] ... [-0.5589898 0.08976896 1.1578516 ... 0.24941434 1.0112439 -1.9782038 ] [-1.3433837 -0.0382057 0.4784365 ... 0.6428831 -0.70054084 -0.18131752] [-0.9631765 -0.08959469 -1.7329339 ... -0.8753555 -0.68851864 0.6890935 ]] ... [[-0.23686692 0.05616917 -1.7014786 ... -0.68544877 0.8406561 1.9984665 ] [-0.04751188 -1.0779969 1.0651693 ... -1.7350372 0.43861026 -1.7451715 ] [-0.85853356 -1.2716181 -0.30749327 ... -0.5272517 -1.5051942 0.33370772] ... [-0.21825054 -0.59926456 0.61179 ... -0.9895142 -0.6633571 -0.31064042] [ 0.70814794 -0.35823643 0.58957034 ... -0.1419883 0.6707083 -1.2221881 ] [-0.773839 0.9053046 0.605524 ... -0.58054364 -0.02183245 1.1070977 ]] [[-0.53024805 -1.8470933 0.16887864 ... 1.2740176 -0.8614189 -0.01199526] [-0.03897613 1.344567 -0.0511247 ... -0.11155405 1.602127 0.17930599] [-1.0359143 1.448485 0.60438746 ... 0.38447413 0.7963709 -1.4891137 ] ... [-0.23964325 0.9729904 0.23382936 ... -0.74882126 -0.8162557 1.0613286 ] [-0.3205649 -1.1788342 1.1082114 ... -1.2409213 -1.8519272 0.6735319 ] [ 1.1005946 0.0328756 1.3225912 ... 0.4793315 -0.9344725 -0.86073816]] [[-0.8249059 -0.37055922 -0.03870867 ... -2.269758 -0.6642474 -0.40933916] [-0.45586443 -0.49318692 0.345093 ... 0.14260797 -0.4766371 0.04086022] [ 0.16087015 0.57996905 -0.01665049 ... 0.40510806 -1.393764 0.76457477] ... [-0.52252907 -0.20002092 0.4496966 ... 1.4227171 -0.3230355 0.6397099 ] [-0.11585315 0.33967364 0.37625057 ... 2.321446 1.1596006 0.6718823 ] [-0.13961257 -0.59377456 0.6531288 ... 0.40337387 -0.7867019 -0.37737188]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5548.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]]; ov_res: [[[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] ... [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:42.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5551.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=42.]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]] [[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]] [[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]]]]; ov_res: [[[[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]] [[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]] [[[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] ... [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]] [[42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] ... [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.] [42. 42. 42. ... 42. 42. 42.]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:-0.57 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5554.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=-0.56999999999999995]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]] [[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]] [[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]]]]; ov_res: [[[[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]] [[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]] [[[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] ... [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]] [[-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] ... [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57] [-0.57 -0.57 -0.57 ... -0.57 -0.57 -0.57]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2) - mode:constant - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5557.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 0.00000000e+00 -4.77883369e-01 1.19429147e+00 ... 4.67827767e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.94068302e-02 1.37681520e+00 ... -1.05780447e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.09610128e+00 -7.12875873e-02 ... -3.31666142e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.75397980e+00 8.75514895e-02 ... -1.33127558e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.24204743e+00 1.17219603e+00 ... -5.80627806e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.98197246e-01 7.93605566e-01 ... 1.53159779e-02 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -4.28726971e-01 -1.40487862e+00 ... 5.19039929e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.81079316e-01 1.09071028e+00 ... 2.41508588e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.82343483e-01 -3.37027431e-01 ... -1.01240385e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 9.84510601e-01 -7.84889758e-01 ... -2.99659818e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.17889786e+00 -1.62343800e+00 ... -1.21512973e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.21254098e+00 -2.69464850e+00 ... -2.20650854e-03 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -9.22245443e-01 -4.43050358e-03 ... -2.77584642e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.43621254e+00 -3.34801525e-01 ... -1.85581779e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.16483204e-02 5.22110581e-01 ... -8.32294583e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -6.62549913e-01 -1.22516167e+00 ... -8.09027851e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.99283457e-01 3.73173773e-01 ... 5.72687745e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.38107252e+00 6.09993637e-01 ... 6.67063415e-01 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -5.23917615e-01 -5.83365381e-01 ... -5.00656247e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.31507826e+00 2.54294246e-01 ... 4.34500724e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.35543251e+00 1.17422783e+00 ... 7.38608956e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.51277138e-02 1.40693855e+00 ... 9.55484211e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.24756587e-01 2.11752295e+00 ... -1.08198905e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.51092386e+00 -8.43763530e-01 ... 6.20547593e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 9.65139389e-01 -1.22652853e+00 ... -7.82874882e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.04716897e+00 3.10355544e-01 ... 7.68879335e-03 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.76477619e-02 1.09627414e+00 ... -7.49740124e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 2.41403192e-01 5.29627269e-03 ... 4.87882584e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.34652564e-01 2.80382168e-02 ... -1.67806536e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.16926450e-01 -7.82583296e-01 ... -1.08043879e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.31069756e+00 7.28390694e-01 ... 3.08747739e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.24416012e-01 3.63085747e-01 ... -6.24224186e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.86739051e-01 -2.06239033e+00 ... 1.52666914e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.67917705e+00 2.43406355e-01 ... 4.73175496e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.05051339e-01 -5.79413831e-01 ... -1.17608976e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.40251005e+00 -1.64235616e+00 ... 1.19672632e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -1.03656971e+00 -6.20090663e-01 ... 1.76031184e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.01000917e+00 -1.12383401e+00 ... 3.33103202e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.23073304e-01 1.43818069e+00 ... -5.53019404e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.16171765e+00 7.18374252e-02 ... -1.61555421e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.86669338e-01 5.49058616e-02 ... -3.22300136e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.13993037e+00 -4.72910792e-01 ... 2.98703492e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.20280397e+00 1.08560944e+00 ... 1.35338902e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.39918190e-01 8.60011101e-01 ... -1.01917350e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.05650567e-01 8.02738726e-01 ... 1.01298794e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 5.65544844e-01 5.31665564e-01 ... 7.95922816e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.80575097e-01 1.85686320e-01 ... -7.11816728e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.15634513e+00 -2.32694459e+00 ... -3.00776839e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.07749641e+00 4.87467825e-01 ... 4.67387885e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.15572453e+00 4.59467590e-01 ... 7.77464926e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.42288136e+00 1.53608963e-01 ... -1.30755436e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 4.07140434e-01 -2.63790525e-02 ... 1.04786366e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.29342037e-01 -1.00579977e-01 ... 2.39640772e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.30950046e+00 -3.74761283e-01 ... 1.42547399e-01 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -9.25652325e-01 -3.99343759e-01 ... 3.70112777e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.16981721e-01 1.31408286e+00 ... 3.50822508e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.80010998e+00 -1.05438185e+00 ... -2.40922973e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.33101022e+00 1.78745556e+00 ... -6.26299083e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.95642352e-01 -8.53206635e-01 ... -9.48002696e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.25539517e+00 -8.41926098e-01 ... -2.88260400e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -3.48406971e-01 -9.52431381e-01 ... 1.86924946e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.82717013e-01 -4.08787102e-01 ... 6.21855259e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.25706393e-01 1.46760595e+00 ... 2.55890012e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.38968730e+00 9.42866147e-01 ... -2.80963033e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.71033606e-01 -6.82482064e-01 ... -6.49179995e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.46112394e-01 -8.74766037e-02 ... 9.10738826e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.59501684e+00 6.14658780e-02 ... 1.04577267e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.71563283e-01 -1.21011734e+00 ... 1.06618190e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.05857944e-01 -2.94472188e-01 ... 8.71536195e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.02146074e-01 1.37280667e+00 ... -3.52308333e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.33848953e-01 3.06119978e-01 ... 1.37984347e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.49865592e+00 -5.63642919e-01 ... 6.88054621e-01 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -7.73462832e-01 -1.72699392e+00 ... -7.20711231e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.30306447e-01 8.51281643e-01 ... -1.53296161e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.12223268e-01 -1.46745995e-01 ... 9.14226472e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -7.93220520e-01 -6.78544760e-01 ... -8.65797102e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.80279493e+00 -2.44733262e+00 ... 7.66892970e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -7.44347200e-02 1.03261518e+00 ... 1.69288695e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.17632771e+00 -2.91119814e-01 ... 1.12658727e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.24543369e+00 -1.50721240e+00 ... 3.58355284e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.65501249e+00 7.14818120e-01 ... 6.02578342e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.30911386e+00 -2.20279360e+00 ... -1.06544483e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.54267268e-02 -4.33859617e-01 ... -8.01590502e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.23464979e-01 -9.06306326e-01 ... 1.19503796e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.01125348e+00 3.68274540e-01 ... 2.35438299e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.14344835e+00 -1.30568540e+00 ... -1.54276717e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.04650480e-01 -3.32942337e-01 ... 7.47505069e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.22496450e-01 2.12168074e+00 ... 1.07199097e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.85214841e-01 1.07693911e+00 ... 1.26753068e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.11160505e+00 -3.46903443e-01 ... 2.31093216e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -1.10253908e-01 -1.59392521e-01 ... 7.14776516e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.47360468e+00 4.88604724e-01 ... -2.25737238e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.82518208e-01 2.56959081e-01 ... -3.48583370e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -3.54351282e-01 -2.76719600e-01 ... 5.23550272e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.65429413e+00 -1.28075063e+00 ... 9.54635561e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.83535743e-01 1.82266986e+00 ... 4.85611111e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -8.33620548e-01 4.77618814e-01 ... 1.30867827e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.53682745e+00 -1.15612999e-01 ... 7.26736486e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.45593622e-01 -2.04396889e-01 ... -6.11256948e-03 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 4.71694887e-01 5.62070131e-01 ... -1.85664630e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.59226489e-01 1.60230839e+00 ... -4.70751941e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.68936425e-01 -2.68813103e-01 ... 1.63266137e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -8.71673942e-01 -1.96253687e-01 ... -1.79754972e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.57257438e-01 -1.68526486e-01 ... 4.93780911e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.16040313e+00 -4.11583304e-01 ... -4.36484724e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 5.48310101e-01 -5.39274454e-01 ... -3.29103023e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.21670985e-02 4.25663203e-01 ... -2.45947576e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.03832459e+00 9.49683607e-01 ... 7.63359427e-01 0.00000000e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 -4.77883369e-01 1.19429147e+00 ... 4.67827767e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.94068302e-02 1.37681520e+00 ... -1.05780447e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.09610128e+00 -7.12875873e-02 ... -3.31666142e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.75397980e+00 8.75514895e-02 ... -1.33127558e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.24204743e+00 1.17219603e+00 ... -5.80627806e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.98197246e-01 7.93605566e-01 ... 1.53159779e-02 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -4.28726971e-01 -1.40487862e+00 ... 5.19039929e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.81079316e-01 1.09071028e+00 ... 2.41508588e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.82343483e-01 -3.37027431e-01 ... -1.01240385e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 9.84510601e-01 -7.84889758e-01 ... -2.99659818e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.17889786e+00 -1.62343800e+00 ... -1.21512973e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.21254098e+00 -2.69464850e+00 ... -2.20650854e-03 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -9.22245443e-01 -4.43050358e-03 ... -2.77584642e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.43621254e+00 -3.34801525e-01 ... -1.85581779e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.16483204e-02 5.22110581e-01 ... -8.32294583e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -6.62549913e-01 -1.22516167e+00 ... -8.09027851e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.99283457e-01 3.73173773e-01 ... 5.72687745e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.38107252e+00 6.09993637e-01 ... 6.67063415e-01 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -5.23917615e-01 -5.83365381e-01 ... -5.00656247e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.31507826e+00 2.54294246e-01 ... 4.34500724e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.35543251e+00 1.17422783e+00 ... 7.38608956e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.51277138e-02 1.40693855e+00 ... 9.55484211e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.24756587e-01 2.11752295e+00 ... -1.08198905e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.51092386e+00 -8.43763530e-01 ... 6.20547593e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 9.65139389e-01 -1.22652853e+00 ... -7.82874882e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.04716897e+00 3.10355544e-01 ... 7.68879335e-03 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.76477619e-02 1.09627414e+00 ... -7.49740124e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 2.41403192e-01 5.29627269e-03 ... 4.87882584e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.34652564e-01 2.80382168e-02 ... -1.67806536e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.16926450e-01 -7.82583296e-01 ... -1.08043879e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.31069756e+00 7.28390694e-01 ... 3.08747739e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.24416012e-01 3.63085747e-01 ... -6.24224186e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.86739051e-01 -2.06239033e+00 ... 1.52666914e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.67917705e+00 2.43406355e-01 ... 4.73175496e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.05051339e-01 -5.79413831e-01 ... -1.17608976e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.40251005e+00 -1.64235616e+00 ... 1.19672632e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -1.03656971e+00 -6.20090663e-01 ... 1.76031184e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.01000917e+00 -1.12383401e+00 ... 3.33103202e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.23073304e-01 1.43818069e+00 ... -5.53019404e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.16171765e+00 7.18374252e-02 ... -1.61555421e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.86669338e-01 5.49058616e-02 ... -3.22300136e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.13993037e+00 -4.72910792e-01 ... 2.98703492e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.20280397e+00 1.08560944e+00 ... 1.35338902e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -4.39918190e-01 8.60011101e-01 ... -1.01917350e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.05650567e-01 8.02738726e-01 ... 1.01298794e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 5.65544844e-01 5.31665564e-01 ... 7.95922816e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.80575097e-01 1.85686320e-01 ... -7.11816728e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.15634513e+00 -2.32694459e+00 ... -3.00776839e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.07749641e+00 4.87467825e-01 ... 4.67387885e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.15572453e+00 4.59467590e-01 ... 7.77464926e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.42288136e+00 1.53608963e-01 ... -1.30755436e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 4.07140434e-01 -2.63790525e-02 ... 1.04786366e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.29342037e-01 -1.00579977e-01 ... 2.39640772e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.30950046e+00 -3.74761283e-01 ... 1.42547399e-01 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -9.25652325e-01 -3.99343759e-01 ... 3.70112777e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.16981721e-01 1.31408286e+00 ... 3.50822508e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.80010998e+00 -1.05438185e+00 ... -2.40922973e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.33101022e+00 1.78745556e+00 ... -6.26299083e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 6.95642352e-01 -8.53206635e-01 ... -9.48002696e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.25539517e+00 -8.41926098e-01 ... -2.88260400e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -3.48406971e-01 -9.52431381e-01 ... 1.86924946e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -3.82717013e-01 -4.08787102e-01 ... 6.21855259e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.25706393e-01 1.46760595e+00 ... 2.55890012e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -1.38968730e+00 9.42866147e-01 ... -2.80963033e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.71033606e-01 -6.82482064e-01 ... -6.49179995e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.46112394e-01 -8.74766037e-02 ... 9.10738826e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.59501684e+00 6.14658780e-02 ... 1.04577267e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.71563283e-01 -1.21011734e+00 ... 1.06618190e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 5.05857944e-01 -2.94472188e-01 ... 8.71536195e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.02146074e-01 1.37280667e+00 ... -3.52308333e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 3.33848953e-01 3.06119978e-01 ... 1.37984347e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.49865592e+00 -5.63642919e-01 ... 6.88054621e-01 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 -7.73462832e-01 -1.72699392e+00 ... -7.20711231e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.30306447e-01 8.51281643e-01 ... -1.53296161e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.12223268e-01 -1.46745995e-01 ... 9.14226472e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -7.93220520e-01 -6.78544760e-01 ... -8.65797102e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.80279493e+00 -2.44733262e+00 ... 7.66892970e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -7.44347200e-02 1.03261518e+00 ... 1.69288695e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 1.17632771e+00 -2.91119814e-01 ... 1.12658727e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.24543369e+00 -1.50721240e+00 ... 3.58355284e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.65501249e+00 7.14818120e-01 ... 6.02578342e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 1.30911386e+00 -2.20279360e+00 ... -1.06544483e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.54267268e-02 -4.33859617e-01 ... -8.01590502e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.23464979e-01 -9.06306326e-01 ... 1.19503796e+00 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -1.01125348e+00 3.68274540e-01 ... 2.35438299e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -1.14344835e+00 -1.30568540e+00 ... -1.54276717e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.04650480e-01 -3.32942337e-01 ... 7.47505069e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -2.22496450e-01 2.12168074e+00 ... 1.07199097e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.85214841e-01 1.07693911e+00 ... 1.26753068e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.11160505e+00 -3.46903443e-01 ... 2.31093216e+00 0.00000000e+00 0.00000000e+00]] ... [[ 0.00000000e+00 -1.10253908e-01 -1.59392521e-01 ... 7.14776516e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.47360468e+00 4.88604724e-01 ... -2.25737238e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.82518208e-01 2.56959081e-01 ... -3.48583370e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 -3.54351282e-01 -2.76719600e-01 ... 5.23550272e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.65429413e+00 -1.28075063e+00 ... 9.54635561e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -8.83535743e-01 1.82266986e+00 ... 4.85611111e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -8.33620548e-01 4.77618814e-01 ... 1.30867827e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.53682745e+00 -1.15612999e-01 ... 7.26736486e-02 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.45593622e-01 -2.04396889e-01 ... -6.11256948e-03 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 4.71694887e-01 5.62070131e-01 ... -1.85664630e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 4.59226489e-01 1.60230839e+00 ... -4.70751941e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 2.68936425e-01 -2.68813103e-01 ... 1.63266137e-01 0.00000000e+00 0.00000000e+00]] [[ 0.00000000e+00 -8.71673942e-01 -1.96253687e-01 ... -1.79754972e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -6.57257438e-01 -1.68526486e-01 ... 4.93780911e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 1.16040313e+00 -4.11583304e-01 ... -4.36484724e-01 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 5.48310101e-01 -5.39274454e-01 ... -3.29103023e-01 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 7.21670985e-02 4.25663203e-01 ... -2.45947576e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 -2.03832459e+00 9.49683607e-01 ... 7.63359427e-01 0.00000000e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1) - mode:constant - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5560.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 0.00000000e+00 -2.04232782e-01 -4.16802347e-01 ... 2.28904724e+00 -1.22339940e+00 -1.09298122e+00] [ 0.00000000e+00 6.79713726e-01 4.71942514e-01 ... 5.79860985e-01 1.50251940e-01 7.46613681e-01] [ 0.00000000e+00 1.97423017e+00 4.20454174e-01 ... 1.46091104e-01 8.86630297e-01 7.84063935e-02] ... [ 0.00000000e+00 8.33200037e-01 1.35342300e+00 ... 6.29384160e-01 3.89832616e-01 -1.56489956e+00] [ 0.00000000e+00 -3.35309803e-01 4.87307161e-01 ... -1.80819541e-01 5.74572146e-01 -1.47165215e+00] [ 0.00000000e+00 4.77980882e-01 1.54415047e+00 ... -4.55638409e-01 -2.22389624e-01 1.39728069e+00]] [[ 0.00000000e+00 1.51884913e+00 1.88363150e-01 ... 9.38106235e-03 2.33437978e-02 -6.36989057e-01] [ 0.00000000e+00 -3.52497905e-01 1.32028890e+00 ... -4.98401761e-01 -2.85083503e-01 4.35080618e-01] [ 0.00000000e+00 -1.17789757e+00 -4.03411388e-01 ... -2.44166183e+00 -5.80975413e-02 1.03821509e-01] ... [ 0.00000000e+00 1.43154192e+00 -2.72591924e-03 ... 8.83417964e-01 -1.34066358e-01 -3.66346925e-01] [ 0.00000000e+00 -1.43747425e+00 3.11114997e-01 ... -6.96311712e-01 -1.00332546e+00 -1.93061456e-01] [ 0.00000000e+00 1.91537809e+00 -1.22058451e+00 ... 1.41974437e+00 -1.28740835e+00 -1.48060691e+00]] [[ 0.00000000e+00 5.95852673e-01 -5.86114228e-01 ... -1.64476955e+00 -1.87327564e+00 8.42200518e-01] [ 0.00000000e+00 -6.38751149e-01 4.93189692e-01 ... 8.84060621e-01 1.23005581e+00 3.93446892e-01] [ 0.00000000e+00 5.36835864e-02 -9.42061305e-01 ... -5.20507991e-01 -2.16693595e-01 -1.77116239e+00] ... [ 0.00000000e+00 -6.11471772e-01 1.55974776e-01 ... 4.19441462e-01 -1.49527460e-01 -1.97518736e-01] [ 0.00000000e+00 9.66095984e-01 1.64801717e+00 ... 1.27548575e+00 1.27193284e+00 5.16422153e-01] [ 0.00000000e+00 -4.78966504e-01 2.57324249e-01 ... -5.16557753e-01 5.20827413e-01 1.75049508e+00]] ... [[ 0.00000000e+00 -4.88907814e-01 8.34670722e-01 ... -9.35950458e-01 9.87649202e-01 -9.79350030e-01] [ 0.00000000e+00 4.17448953e-02 -4.79195081e-02 ... 1.39413893e-01 -2.00055289e+00 1.14292346e-01] [ 0.00000000e+00 8.25709224e-01 -1.23573554e+00 ... 6.33295119e-01 4.23174173e-01 7.74935961e-01] ... [ 0.00000000e+00 1.96939543e-01 1.79116440e+00 ... 1.30474102e+00 -9.00627896e-02 1.90715992e+00] [ 0.00000000e+00 -3.03872496e-01 9.64276791e-01 ... 1.74727058e+00 -3.37470844e-02 8.60324800e-02] [ 0.00000000e+00 1.67783290e-01 -1.11120629e+00 ... -6.96037650e-01 1.86161771e-01 -9.17318404e-01]] [[ 0.00000000e+00 6.20564580e-01 -1.24569941e+00 ... -1.38434395e-01 -9.91616189e-01 -1.01903784e+00] [ 0.00000000e+00 8.21906388e-01 -6.86327219e-01 ... 7.32056499e-01 4.32705104e-01 -8.21590722e-01] [ 0.00000000e+00 1.49316084e+00 5.13833821e-01 ... -9.24392045e-01 1.32527962e-01 -4.65293616e-01] ... [ 0.00000000e+00 -5.51894546e-01 -4.64964777e-01 ... -8.22216928e-01 -1.93599030e-01 5.65953791e-01] [ 0.00000000e+00 7.01473832e-01 -8.72339487e-01 ... 1.33524859e+00 7.44034946e-01 4.85351324e-01] [ 0.00000000e+00 -5.91184080e-01 -1.85451531e+00 ... 4.64487016e-01 -1.23960483e+00 1.56678367e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 2.37804458e-01 2.19654489e+00 ... 2.05423877e-01 1.13507974e+00 -1.43705294e-01] [ 0.00000000e+00 -7.71904528e-01 -2.66945601e-01 ... 1.17074656e+00 2.15673178e-01 -2.85071045e-01] [ 0.00000000e+00 -1.86668181e+00 -1.24698460e+00 ... 6.78132176e-01 1.41701603e+00 2.44384572e-01] ... [ 0.00000000e+00 7.34161198e-01 2.40694380e+00 ... 5.46285987e-01 8.96063805e-01 -7.97895432e-01] [ 0.00000000e+00 7.21424103e-01 1.79274008e-03 ... 4.87035483e-01 9.32861269e-01 8.47414136e-01] [ 0.00000000e+00 5.01070142e-01 -2.92771131e-01 ... -4.81736362e-02 2.27975041e-01 -2.22571874e+00]] [[ 0.00000000e+00 3.31336737e-01 5.44080377e-01 ... 3.57892513e-01 -8.04747880e-01 1.07795966e+00] [ 0.00000000e+00 -1.59082413e+00 -1.95957005e+00 ... -2.04587594e-01 1.19223714e+00 -7.04689741e-01] [ 0.00000000e+00 1.47481894e+00 2.66471386e-01 ... 7.45602250e-01 6.70270264e-01 2.00960469e+00] ... [ 0.00000000e+00 -2.54838288e-01 7.63264149e-02 ... -6.00547075e-01 -1.34025142e-01 -1.00452578e+00] [ 0.00000000e+00 8.79952967e-01 1.10266864e+00 ... -8.91204655e-01 4.86541271e-01 1.70359612e-02] [ 0.00000000e+00 -1.68945193e-01 -3.35590929e-01 ... 1.38149168e-02 1.82539773e+00 8.29409420e-01]] [[ 0.00000000e+00 -1.16535127e+00 -3.96321744e-01 ... -2.21912098e+00 -5.17081916e-01 4.47574496e-01] [ 0.00000000e+00 5.15115023e-01 6.18337691e-02 ... -7.43421733e-01 -3.67352188e-01 4.10474330e-01] [ 0.00000000e+00 4.45192277e-01 1.45531714e-01 ... -1.26933825e+00 -1.95063436e+00 1.76114053e-01] ... [ 0.00000000e+00 -8.35023522e-01 -6.05440378e-01 ... 3.43833894e-01 -3.49532105e-02 -4.10314173e-01] [ 0.00000000e+00 -1.68995595e+00 5.18116593e-01 ... 1.86676967e+00 1.78168929e+00 1.15328515e+00] [ 0.00000000e+00 2.29485288e-01 -1.09604442e+00 ... -1.20996726e+00 -8.21396589e-01 -1.51291263e+00]] ... [[ 0.00000000e+00 1.38231564e+00 -1.26818156e+00 ... 1.73030913e+00 -4.97181535e-01 8.08000326e-01] [ 0.00000000e+00 -1.08061588e+00 8.25195014e-01 ... 8.02772641e-01 3.71997058e-01 -8.69160056e-01] [ 0.00000000e+00 -5.78826368e-01 5.94998837e-01 ... 6.69309258e-01 5.10562845e-02 -2.00470114e+00] ... [ 0.00000000e+00 1.49305451e+00 -5.17792583e-01 ... -1.35593951e+00 -6.52402878e-01 -2.51943380e-01] [ 0.00000000e+00 -3.95897120e-01 1.91280305e-01 ... -4.64010656e-01 6.33939087e-01 -2.05606431e-01] [ 0.00000000e+00 -4.80406910e-01 9.31016207e-01 ... -1.04683185e+00 -1.18132138e+00 4.49456722e-01]] [[ 0.00000000e+00 -9.34214294e-01 -1.52126217e+00 ... -5.09738684e-01 1.61994621e-01 -1.68829158e-01] [ 0.00000000e+00 -1.66444802e+00 2.01439619e-01 ... -1.78526378e+00 3.29062715e-02 1.70554709e+00] [ 0.00000000e+00 6.37322128e-01 9.43702608e-02 ... 1.11242726e-01 -1.15001917e+00 -2.56670475e-01] ... [ 0.00000000e+00 -7.47318983e-01 1.44280863e+00 ... 5.96014738e-01 1.16271400e+00 -1.36231399e+00] [ 0.00000000e+00 8.74365568e-01 4.33696210e-02 ... -4.95497435e-02 -3.25628817e-01 -3.82012308e-01] [ 0.00000000e+00 1.43267214e-01 -1.04184473e+00 ... 4.41399544e-01 -5.10216773e-01 5.42793348e-02]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 7.26661906e-02 -9.71867859e-01 ... 5.16154170e-01 4.81224179e-01 1.24410188e+00] [ 0.00000000e+00 -4.69834834e-01 1.22681284e+00 ... 1.01672912e+00 -1.02253854e+00 -1.55870354e+00] [ 0.00000000e+00 1.06880903e+00 7.93708980e-01 ... -2.30127168e+00 2.05798000e-01 1.97914639e-03] ... [ 0.00000000e+00 2.86290616e-01 -2.53428251e-01 ... 7.49894142e-01 -1.65486884e+00 -8.88874054e-01] [ 0.00000000e+00 -4.05900925e-01 3.13953549e-01 ... -1.89169919e+00 3.75663750e-02 8.57378900e-01] [ 0.00000000e+00 -1.91347804e-02 3.04713726e-01 ... -3.21551830e-01 1.48716643e-01 1.84510380e-01]] [[ 0.00000000e+00 -1.26790512e+00 6.24870479e-01 ... 7.09037542e-01 -1.35651898e+00 1.44133091e+00] [ 0.00000000e+00 -1.75645006e+00 -3.85051519e-01 ... -2.45924458e-01 -7.49516726e-01 8.63667011e-01] [ 0.00000000e+00 1.17186822e-01 -7.78596640e-01 ... 1.33669829e+00 1.56149483e+00 1.13563025e+00] ... [ 0.00000000e+00 -6.39721036e-01 -8.11072409e-01 ... 1.12076187e+00 1.43101835e+00 6.47340953e-01] [ 0.00000000e+00 -1.07419312e+00 2.59325647e+00 ... -5.24040520e-01 -2.28046581e-01 -2.69455582e-01] [ 0.00000000e+00 -3.72187018e-01 -2.96384722e-01 ... 8.87530267e-01 1.04611552e+00 7.17563629e-01]] [[ 0.00000000e+00 -7.82779336e-01 -2.73158848e-01 ... -9.93914425e-01 -1.31199300e+00 3.34415466e-01] [ 0.00000000e+00 9.22786236e-01 -2.48870879e-01 ... 2.96501676e-03 1.74611926e-01 3.23351681e-01] [ 0.00000000e+00 -1.96342647e-01 1.01222026e+00 ... -1.85535872e+00 -1.14993680e+00 1.77691853e+00] ... [ 0.00000000e+00 2.38282382e-01 -3.24056409e-02 ... 8.31847966e-01 -5.87866008e-01 2.38097000e+00] [ 0.00000000e+00 -8.64278257e-01 -5.76813459e-01 ... 1.25955963e+00 -7.89078534e-01 6.56063035e-02] [ 0.00000000e+00 1.33979142e+00 -8.80636573e-01 ... 2.00277710e+00 9.03273582e-01 -1.81358844e-01]] ... [[ 0.00000000e+00 -1.93163708e-01 4.41709191e-01 ... 2.00631547e+00 -1.98365208e-02 -8.77661467e-01] [ 0.00000000e+00 -3.50408219e-02 1.96940348e-01 ... 1.67676735e+00 -7.43350863e-01 -3.46706748e-01] [ 0.00000000e+00 4.73498106e-01 -4.38460499e-01 ... 6.60864949e-01 -1.41403437e+00 -1.08472192e+00] ... [ 0.00000000e+00 4.53273058e-01 7.85058022e-01 ... 8.27733278e-01 -7.60938406e-01 -3.74037147e-01] [ 0.00000000e+00 1.10761416e+00 -2.15355730e+00 ... 5.35614908e-01 1.69296503e-01 -1.14803827e+00] [ 0.00000000e+00 1.17314172e+00 -1.59157491e+00 ... 1.70476747e+00 1.82460304e-02 2.73379874e+00]] [[ 0.00000000e+00 -1.00192332e+00 1.26360357e+00 ... 1.89344957e-01 -7.50696719e-01 -5.33679008e-01] [ 0.00000000e+00 7.53090799e-01 -6.52683794e-01 ... 2.18721464e-01 1.48104966e+00 -7.84882069e-01] [ 0.00000000e+00 -3.51494402e-01 -9.19351697e-01 ... 1.27689862e+00 1.84469461e-01 7.76178062e-01] ... [ 0.00000000e+00 -1.50012314e+00 -5.09015560e-01 ... -1.10082231e-01 2.65037045e-02 -8.83157909e-01] [ 0.00000000e+00 -5.86726487e-01 1.26392066e+00 ... -5.96274376e-01 2.21305251e-01 -7.26549029e-01] [ 0.00000000e+00 6.63627446e-01 -4.18707192e-01 ... 6.10160828e-02 7.05015063e-01 6.57753825e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]]; ov_res: [[[[[ 0.00000000e+00 -2.04232782e-01 -4.16802347e-01 ... 2.28904724e+00 -1.22339940e+00 -1.09298122e+00] [ 0.00000000e+00 6.79713726e-01 4.71942514e-01 ... 5.79860985e-01 1.50251940e-01 7.46613681e-01] [ 0.00000000e+00 1.97423017e+00 4.20454174e-01 ... 1.46091104e-01 8.86630297e-01 7.84063935e-02] ... [ 0.00000000e+00 8.33200037e-01 1.35342300e+00 ... 6.29384160e-01 3.89832616e-01 -1.56489956e+00] [ 0.00000000e+00 -3.35309803e-01 4.87307161e-01 ... -1.80819541e-01 5.74572146e-01 -1.47165215e+00] [ 0.00000000e+00 4.77980882e-01 1.54415047e+00 ... -4.55638409e-01 -2.22389624e-01 1.39728069e+00]] [[ 0.00000000e+00 1.51884913e+00 1.88363150e-01 ... 9.38106235e-03 2.33437978e-02 -6.36989057e-01] [ 0.00000000e+00 -3.52497905e-01 1.32028890e+00 ... -4.98401761e-01 -2.85083503e-01 4.35080618e-01] [ 0.00000000e+00 -1.17789757e+00 -4.03411388e-01 ... -2.44166183e+00 -5.80975413e-02 1.03821509e-01] ... [ 0.00000000e+00 1.43154192e+00 -2.72591924e-03 ... 8.83417964e-01 -1.34066358e-01 -3.66346925e-01] [ 0.00000000e+00 -1.43747425e+00 3.11114997e-01 ... -6.96311712e-01 -1.00332546e+00 -1.93061456e-01] [ 0.00000000e+00 1.91537809e+00 -1.22058451e+00 ... 1.41974437e+00 -1.28740835e+00 -1.48060691e+00]] [[ 0.00000000e+00 5.95852673e-01 -5.86114228e-01 ... -1.64476955e+00 -1.87327564e+00 8.42200518e-01] [ 0.00000000e+00 -6.38751149e-01 4.93189692e-01 ... 8.84060621e-01 1.23005581e+00 3.93446892e-01] [ 0.00000000e+00 5.36835864e-02 -9.42061305e-01 ... -5.20507991e-01 -2.16693595e-01 -1.77116239e+00] ... [ 0.00000000e+00 -6.11471772e-01 1.55974776e-01 ... 4.19441462e-01 -1.49527460e-01 -1.97518736e-01] [ 0.00000000e+00 9.66095984e-01 1.64801717e+00 ... 1.27548575e+00 1.27193284e+00 5.16422153e-01] [ 0.00000000e+00 -4.78966504e-01 2.57324249e-01 ... -5.16557753e-01 5.20827413e-01 1.75049508e+00]] ... [[ 0.00000000e+00 -4.88907814e-01 8.34670722e-01 ... -9.35950458e-01 9.87649202e-01 -9.79350030e-01] [ 0.00000000e+00 4.17448953e-02 -4.79195081e-02 ... 1.39413893e-01 -2.00055289e+00 1.14292346e-01] [ 0.00000000e+00 8.25709224e-01 -1.23573554e+00 ... 6.33295119e-01 4.23174173e-01 7.74935961e-01] ... [ 0.00000000e+00 1.96939543e-01 1.79116440e+00 ... 1.30474102e+00 -9.00627896e-02 1.90715992e+00] [ 0.00000000e+00 -3.03872496e-01 9.64276791e-01 ... 1.74727058e+00 -3.37470844e-02 8.60324800e-02] [ 0.00000000e+00 1.67783290e-01 -1.11120629e+00 ... -6.96037650e-01 1.86161771e-01 -9.17318404e-01]] [[ 0.00000000e+00 6.20564580e-01 -1.24569941e+00 ... -1.38434395e-01 -9.91616189e-01 -1.01903784e+00] [ 0.00000000e+00 8.21906388e-01 -6.86327219e-01 ... 7.32056499e-01 4.32705104e-01 -8.21590722e-01] [ 0.00000000e+00 1.49316084e+00 5.13833821e-01 ... -9.24392045e-01 1.32527962e-01 -4.65293616e-01] ... [ 0.00000000e+00 -5.51894546e-01 -4.64964777e-01 ... -8.22216928e-01 -1.93599030e-01 5.65953791e-01] [ 0.00000000e+00 7.01473832e-01 -8.72339487e-01 ... 1.33524859e+00 7.44034946e-01 4.85351324e-01] [ 0.00000000e+00 -5.91184080e-01 -1.85451531e+00 ... 4.64487016e-01 -1.23960483e+00 1.56678367e+00]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 2.37804458e-01 2.19654489e+00 ... 2.05423877e-01 1.13507974e+00 -1.43705294e-01] [ 0.00000000e+00 -7.71904528e-01 -2.66945601e-01 ... 1.17074656e+00 2.15673178e-01 -2.85071045e-01] [ 0.00000000e+00 -1.86668181e+00 -1.24698460e+00 ... 6.78132176e-01 1.41701603e+00 2.44384572e-01] ... [ 0.00000000e+00 7.34161198e-01 2.40694380e+00 ... 5.46285987e-01 8.96063805e-01 -7.97895432e-01] [ 0.00000000e+00 7.21424103e-01 1.79274008e-03 ... 4.87035483e-01 9.32861269e-01 8.47414136e-01] [ 0.00000000e+00 5.01070142e-01 -2.92771131e-01 ... -4.81736362e-02 2.27975041e-01 -2.22571874e+00]] [[ 0.00000000e+00 3.31336737e-01 5.44080377e-01 ... 3.57892513e-01 -8.04747880e-01 1.07795966e+00] [ 0.00000000e+00 -1.59082413e+00 -1.95957005e+00 ... -2.04587594e-01 1.19223714e+00 -7.04689741e-01] [ 0.00000000e+00 1.47481894e+00 2.66471386e-01 ... 7.45602250e-01 6.70270264e-01 2.00960469e+00] ... [ 0.00000000e+00 -2.54838288e-01 7.63264149e-02 ... -6.00547075e-01 -1.34025142e-01 -1.00452578e+00] [ 0.00000000e+00 8.79952967e-01 1.10266864e+00 ... -8.91204655e-01 4.86541271e-01 1.70359612e-02] [ 0.00000000e+00 -1.68945193e-01 -3.35590929e-01 ... 1.38149168e-02 1.82539773e+00 8.29409420e-01]] [[ 0.00000000e+00 -1.16535127e+00 -3.96321744e-01 ... -2.21912098e+00 -5.17081916e-01 4.47574496e-01] [ 0.00000000e+00 5.15115023e-01 6.18337691e-02 ... -7.43421733e-01 -3.67352188e-01 4.10474330e-01] [ 0.00000000e+00 4.45192277e-01 1.45531714e-01 ... -1.26933825e+00 -1.95063436e+00 1.76114053e-01] ... [ 0.00000000e+00 -8.35023522e-01 -6.05440378e-01 ... 3.43833894e-01 -3.49532105e-02 -4.10314173e-01] [ 0.00000000e+00 -1.68995595e+00 5.18116593e-01 ... 1.86676967e+00 1.78168929e+00 1.15328515e+00] [ 0.00000000e+00 2.29485288e-01 -1.09604442e+00 ... -1.20996726e+00 -8.21396589e-01 -1.51291263e+00]] ... [[ 0.00000000e+00 1.38231564e+00 -1.26818156e+00 ... 1.73030913e+00 -4.97181535e-01 8.08000326e-01] [ 0.00000000e+00 -1.08061588e+00 8.25195014e-01 ... 8.02772641e-01 3.71997058e-01 -8.69160056e-01] [ 0.00000000e+00 -5.78826368e-01 5.94998837e-01 ... 6.69309258e-01 5.10562845e-02 -2.00470114e+00] ... [ 0.00000000e+00 1.49305451e+00 -5.17792583e-01 ... -1.35593951e+00 -6.52402878e-01 -2.51943380e-01] [ 0.00000000e+00 -3.95897120e-01 1.91280305e-01 ... -4.64010656e-01 6.33939087e-01 -2.05606431e-01] [ 0.00000000e+00 -4.80406910e-01 9.31016207e-01 ... -1.04683185e+00 -1.18132138e+00 4.49456722e-01]] [[ 0.00000000e+00 -9.34214294e-01 -1.52126217e+00 ... -5.09738684e-01 1.61994621e-01 -1.68829158e-01] [ 0.00000000e+00 -1.66444802e+00 2.01439619e-01 ... -1.78526378e+00 3.29062715e-02 1.70554709e+00] [ 0.00000000e+00 6.37322128e-01 9.43702608e-02 ... 1.11242726e-01 -1.15001917e+00 -2.56670475e-01] ... [ 0.00000000e+00 -7.47318983e-01 1.44280863e+00 ... 5.96014738e-01 1.16271400e+00 -1.36231399e+00] [ 0.00000000e+00 8.74365568e-01 4.33696210e-02 ... -4.95497435e-02 -3.25628817e-01 -3.82012308e-01] [ 0.00000000e+00 1.43267214e-01 -1.04184473e+00 ... 4.41399544e-01 -5.10216773e-01 5.42793348e-02]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]] [[[ 0.00000000e+00 7.26661906e-02 -9.71867859e-01 ... 5.16154170e-01 4.81224179e-01 1.24410188e+00] [ 0.00000000e+00 -4.69834834e-01 1.22681284e+00 ... 1.01672912e+00 -1.02253854e+00 -1.55870354e+00] [ 0.00000000e+00 1.06880903e+00 7.93708980e-01 ... -2.30127168e+00 2.05798000e-01 1.97914639e-03] ... [ 0.00000000e+00 2.86290616e-01 -2.53428251e-01 ... 7.49894142e-01 -1.65486884e+00 -8.88874054e-01] [ 0.00000000e+00 -4.05900925e-01 3.13953549e-01 ... -1.89169919e+00 3.75663750e-02 8.57378900e-01] [ 0.00000000e+00 -1.91347804e-02 3.04713726e-01 ... -3.21551830e-01 1.48716643e-01 1.84510380e-01]] [[ 0.00000000e+00 -1.26790512e+00 6.24870479e-01 ... 7.09037542e-01 -1.35651898e+00 1.44133091e+00] [ 0.00000000e+00 -1.75645006e+00 -3.85051519e-01 ... -2.45924458e-01 -7.49516726e-01 8.63667011e-01] [ 0.00000000e+00 1.17186822e-01 -7.78596640e-01 ... 1.33669829e+00 1.56149483e+00 1.13563025e+00] ... [ 0.00000000e+00 -6.39721036e-01 -8.11072409e-01 ... 1.12076187e+00 1.43101835e+00 6.47340953e-01] [ 0.00000000e+00 -1.07419312e+00 2.59325647e+00 ... -5.24040520e-01 -2.28046581e-01 -2.69455582e-01] [ 0.00000000e+00 -3.72187018e-01 -2.96384722e-01 ... 8.87530267e-01 1.04611552e+00 7.17563629e-01]] [[ 0.00000000e+00 -7.82779336e-01 -2.73158848e-01 ... -9.93914425e-01 -1.31199300e+00 3.34415466e-01] [ 0.00000000e+00 9.22786236e-01 -2.48870879e-01 ... 2.96501676e-03 1.74611926e-01 3.23351681e-01] [ 0.00000000e+00 -1.96342647e-01 1.01222026e+00 ... -1.85535872e+00 -1.14993680e+00 1.77691853e+00] ... [ 0.00000000e+00 2.38282382e-01 -3.24056409e-02 ... 8.31847966e-01 -5.87866008e-01 2.38097000e+00] [ 0.00000000e+00 -8.64278257e-01 -5.76813459e-01 ... 1.25955963e+00 -7.89078534e-01 6.56063035e-02] [ 0.00000000e+00 1.33979142e+00 -8.80636573e-01 ... 2.00277710e+00 9.03273582e-01 -1.81358844e-01]] ... [[ 0.00000000e+00 -1.93163708e-01 4.41709191e-01 ... 2.00631547e+00 -1.98365208e-02 -8.77661467e-01] [ 0.00000000e+00 -3.50408219e-02 1.96940348e-01 ... 1.67676735e+00 -7.43350863e-01 -3.46706748e-01] [ 0.00000000e+00 4.73498106e-01 -4.38460499e-01 ... 6.60864949e-01 -1.41403437e+00 -1.08472192e+00] ... [ 0.00000000e+00 4.53273058e-01 7.85058022e-01 ... 8.27733278e-01 -7.60938406e-01 -3.74037147e-01] [ 0.00000000e+00 1.10761416e+00 -2.15355730e+00 ... 5.35614908e-01 1.69296503e-01 -1.14803827e+00] [ 0.00000000e+00 1.17314172e+00 -1.59157491e+00 ... 1.70476747e+00 1.82460304e-02 2.73379874e+00]] [[ 0.00000000e+00 -1.00192332e+00 1.26360357e+00 ... 1.89344957e-01 -7.50696719e-01 -5.33679008e-01] [ 0.00000000e+00 7.53090799e-01 -6.52683794e-01 ... 2.18721464e-01 1.48104966e+00 -7.84882069e-01] [ 0.00000000e+00 -3.51494402e-01 -9.19351697e-01 ... 1.27689862e+00 1.84469461e-01 7.76178062e-01] ... [ 0.00000000e+00 -1.50012314e+00 -5.09015560e-01 ... -1.10082231e-01 2.65037045e-02 -8.83157909e-01] [ 0.00000000e+00 -5.86726487e-01 1.26392066e+00 ... -5.96274376e-01 2.21305251e-01 -7.26549029e-01] [ 0.00000000e+00 6.63627446e-01 -4.18707192e-01 ... 6.10160828e-02 7.05015063e-01 6.57753825e-01]] [[ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] ... [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00] [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:constant - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5563.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 1.00755942e+00 1.66014242e+00 -1.63126254e+00 ... -1.02633345e+00 1.07736087e+00 4.02702808e-01] [-5.32595634e-01 1.27609980e+00 -2.71368712e-01 ... -4.51277167e-01 -3.55931997e-01 1.40452683e+00] [ 1.20782003e-01 8.28970313e-01 7.20620871e-01 ... -5.00288606e-01 9.42667574e-02 1.08040914e-01] ... [-4.84598726e-01 -8.12482238e-01 -1.44752598e+00 ... -1.59558403e+00 2.64021397e-01 5.08374810e-01] [-1.47956520e-01 -5.52438557e-01 7.65976667e-01 ... -6.76651537e-01 7.48074651e-02 -1.14313841e+00] [-1.10336256e+00 3.55167091e-01 -2.40168348e-01 ... -4.25328873e-02 -5.93180656e-02 6.46627784e-01]] [[ 1.04685508e-01 4.45008546e-01 4.85629916e-01 ... -3.44173938e-01 2.81713605e-01 -1.49939704e+00] [-4.86100703e-01 -4.91713256e-01 3.65749568e-01 ... 1.61815143e+00 5.34296215e-01 9.14010823e-01] [-1.95215535e+00 4.00163829e-01 1.37209690e+00 ... -1.26838648e+00 -3.12935263e-02 -4.34938312e-01] ... [-1.50025654e+00 -3.64260167e-01 5.69313645e-01 ... 8.86327386e-01 -9.60960567e-01 -1.65403783e-02] [ 1.40096307e-01 -1.33800650e+00 -9.40349996e-02 ... -1.50824094e+00 2.52866417e-01 4.69395250e-01] [-9.32727158e-02 1.90390146e+00 2.08329725e+00 ... 9.90886867e-01 9.94693220e-01 -5.29147983e-01]] [[-9.15948033e-01 -1.57149315e-01 -1.51481908e-02 ... 4.28802162e-01 1.04060328e+00 4.16308403e-01] [-2.60406911e-01 -3.56300265e-01 9.79742765e-01 ... 1.38839436e+00 -7.76195228e-02 9.26269174e-01] [ 2.60678474e-02 -2.11536169e-01 -3.65209460e-01 ... 1.18747878e+00 -7.00549781e-01 1.70573783e+00] ... [-2.22361282e-01 1.26139736e+00 -5.81436083e-02 ... -1.23998213e+00 -1.07632756e+00 6.66565657e-01] [-4.89502758e-01 1.58717716e+00 -3.83774936e-01 ... -4.52468723e-01 2.44048834e+00 -9.79644835e-01] [ 8.59178543e-01 7.82504678e-01 1.31954384e+00 ... -4.10202503e-01 1.34843791e+00 1.65893936e+00]] ... [[-2.83852726e-01 -2.19532456e-02 1.60225078e-01 ... 6.80439830e-01 -5.97363114e-01 -1.12351477e+00] [-3.00775915e-01 8.28543067e-01 -3.18236142e-01 ... 3.37709218e-01 1.27963519e+00 8.55589509e-01] [ 8.08923423e-01 7.04122707e-02 1.07648659e+00 ... 1.38579738e+00 -7.71293104e-01 1.23314545e-01] ... [-1.18983722e+00 -1.82498419e+00 -1.23172648e-01 ... 1.04063344e+00 6.84294939e-01 7.20672607e-01] [ 2.76438922e-01 3.03183705e-01 -6.05018139e-01 ... 1.05280447e+00 1.41784537e+00 2.47075319e-01] [ 4.19647574e-01 1.75389588e+00 -4.78782564e-01 ... 1.24064863e+00 9.20666754e-01 -7.17927635e-01]] [[ 2.03539014e-01 3.05615634e-01 -5.18328846e-01 ... 8.17601621e-01 -1.73908114e-01 2.14944139e-01] [-1.04272321e-01 1.68180621e+00 1.02198935e+00 ... 1.91688493e-01 -3.57150733e-01 -2.05267400e-01] [-7.78932214e-01 7.09244549e-01 5.56076109e-01 ... -1.51933146e+00 -2.64060050e-01 -8.51639569e-01] ... [-1.18389797e+00 -9.00416315e-01 1.12689567e+00 ... 7.24269807e-01 2.30550423e-01 1.33328843e+00] [-9.13921058e-01 -3.23866487e-01 -1.86985111e+00 ... -5.14508821e-02 7.87571549e-01 1.20375700e-01] [ 1.89200246e+00 -2.60015464e+00 6.75732732e-01 ... 4.22869027e-02 5.24438880e-02 -6.19820178e-01]] [[-4.83032644e-01 -1.69418454e-01 -1.41743863e+00 ... 2.09057236e+00 9.15969670e-01 1.30432463e+00] [-8.55805278e-02 1.33335948e+00 -3.55748981e-02 ... -8.76124024e-01 -6.55216634e-01 -2.58109182e-01] [-7.10870266e-01 -1.96513689e+00 8.50457251e-01 ... 5.53005576e-01 5.09282112e-01 -7.11410999e-01] ... [ 7.39160538e-01 6.88753963e-01 8.53156745e-01 ... -1.89453170e-01 -4.25973088e-01 1.34145707e-01] [-4.89394128e-01 7.64180869e-02 9.27618742e-01 ... -6.65677249e-01 -9.54354942e-01 2.04563409e-01] [ 1.43330371e+00 -1.94150150e+00 -2.41329461e-01 ... -4.13743764e-01 5.73221743e-01 6.44689918e-01]]] [[[ 5.47026634e-01 -5.91944337e-01 -1.30950940e+00 ... 6.91323757e-01 6.75075054e-01 -1.85474043e-03] [-5.92196658e-02 -1.51919648e-01 -6.03494793e-02 ... -1.61035210e-01 -5.31015933e-01 1.65581405e+00] [ 1.22341835e+00 -1.16038525e+00 2.79134482e-01 ... 1.77640736e+00 -1.41391551e+00 -8.37872982e-01] ... [-1.57504559e+00 -1.19864099e-01 1.00293899e+00 ... 2.17270565e+00 -7.24202037e-01 -1.03269613e+00] [ 6.07173562e-01 2.73976851e+00 1.17823434e+00 ... -1.79432169e-01 -5.41707516e-01 -1.00811183e+00] [ 8.27700019e-01 -1.65057313e+00 -1.39671326e+00 ... -2.70397421e-02 -3.50742280e-01 1.51178602e-03]] [[-3.35378218e+00 6.24631286e-01 -8.53095427e-02 ... 9.98921037e-01 -7.11164236e-01 8.90612185e-01] [-2.89884597e-01 1.07356989e+00 -1.03581202e+00 ... 1.99900365e+00 9.39949751e-01 1.92223489e+00] [ 1.47440970e-01 1.51014578e+00 6.29835129e-01 ... -2.13835418e-01 9.57976162e-01 5.66377901e-02] ... [ 1.29448414e-01 -3.27363670e-01 1.07019156e-01 ... -4.15385187e-01 -4.74888265e-01 1.34583759e+00] [-2.38847971e+00 1.54958159e-01 1.28230476e+00 ... -6.21719480e-01 7.36114502e-01 -4.58617061e-01] [ 2.61264771e-01 8.30785453e-01 1.71468154e-01 ... 1.46345878e+00 1.13054514e+00 -1.62704915e-01]] [[-3.02822798e-01 -2.64900923e-01 -1.02858734e+00 ... 1.98078513e+00 -3.44477922e-01 6.84052646e-01] [-5.81806660e-01 2.29037955e-01 1.06233621e+00 ... 3.66806872e-02 -9.18051898e-01 -1.08461249e+00] [-1.78227139e+00 -6.09567225e-01 7.11183250e-01 ... 1.47010648e+00 7.60153890e-01 -1.41326451e+00] ... [ 5.37121296e-01 1.31856114e-01 7.82638907e-01 ... -8.36055160e-01 -1.34995055e+00 -1.48057286e-02] [ 6.39025211e-01 -1.17461868e-01 7.35940814e-01 ... -1.61482728e+00 -1.77017644e-01 -3.14952843e-02] [-1.21617770e+00 9.20152783e-01 7.51728892e-01 ... -4.67901006e-02 7.01544404e-01 6.65586650e-01]] ... [[-1.29066885e+00 -1.17314875e+00 -1.15033495e+00 ... -1.68745375e+00 -3.62305939e-01 -8.86429548e-01] [ 2.96560347e-01 1.58020115e+00 1.22637093e+00 ... -1.18769872e+00 1.05249977e+00 9.65273023e-01] [ 1.26793951e-01 -3.99497986e-01 2.94922173e-01 ... -8.86293173e-01 -1.98438227e-01 8.62276912e-01] ... [-6.07599616e-01 4.05795038e-01 -2.43846560e+00 ... -4.33277994e-01 -3.59512627e-01 -5.27078271e-01] [ 2.51120687e-01 -1.60681680e-01 4.56575274e-01 ... 6.59439415e-02 -5.20463765e-01 3.47899139e-01] [ 7.27215648e-01 5.10024011e-01 -3.62536818e-01 ... 1.22702849e+00 1.92255783e+00 -2.28608280e-01]] [[ 1.61071420e-02 -1.73028386e+00 3.25640708e-01 ... 1.37040782e+00 -1.32590219e-01 -7.58705437e-01] [-2.71595716e+00 1.93351164e-01 2.21641231e+00 ... 2.89050993e-02 -3.56696725e-01 1.02076399e+00] [ 1.12083912e+00 1.12247336e+00 -8.20438862e-01 ... 1.37629354e+00 1.18326402e+00 -1.58864588e-01] ... [-1.02314186e+00 1.94866374e-01 1.84736478e+00 ... 6.09820366e-01 -8.19610246e-03 1.88308284e-01] [-7.25101113e-01 -1.38610333e-01 -3.31988446e-02 ... 3.44358444e-01 -8.58276546e-01 -7.19661891e-01] [-1.09276056e+00 1.64785016e+00 5.82385123e-01 ... 6.02435656e-02 -8.12467560e-02 9.72998679e-01]] [[-1.31660253e-01 -4.37399626e-01 1.29136562e+00 ... -7.81228304e-01 -8.02643061e-01 -8.41289043e-01] [-1.53133774e+00 4.24752176e-01 9.40432325e-02 ... 7.73641169e-01 -4.14141357e-01 -1.78787911e+00] [ 6.76758960e-02 8.15273464e-01 1.33207011e+00 ... 1.14249170e+00 -9.57478583e-01 2.93669254e-01] ... [ 1.38638341e+00 -1.69980109e+00 -1.08357787e-01 ... -5.25444090e-01 2.08436728e+00 1.55384684e+00] [-2.61434126e+00 -3.73167515e-01 -1.68114853e+00 ... -1.52606830e-01 3.56609344e-01 -1.92635819e-01] [-1.88142347e+00 -1.66516349e-01 1.30866623e+00 ... -2.49236703e+00 -5.85635662e-01 -1.09846139e+00]]] [[[-1.64386666e+00 -8.58665824e-01 -2.38100678e-01 ... -7.37223983e-01 -9.16150868e-01 -3.58989149e-01] [ 1.53864574e+00 -3.21174353e-01 -8.89506996e-01 ... 2.83740520e-01 -2.94249117e-01 -1.21499789e+00] [ 1.44486165e+00 1.35096824e-02 -1.12162316e+00 ... -3.84206444e-01 -5.00759363e-01 1.79737818e+00] ... [ 7.89129078e-01 4.52371240e-01 -1.16349459e+00 ... -5.57990849e-01 -2.02463412e+00 1.00666486e-01] [ 5.35059452e-01 -1.18701167e-01 1.65098238e+00 ... 4.03870732e-01 9.14396420e-02 -2.51580894e-01] [-7.61219501e-01 1.79827094e-01 1.21430492e+00 ... -1.96441188e-01 1.45509720e+00 -1.08363879e+00]] [[-1.82074949e-01 -1.86742401e+00 1.13229156e+00 ... -2.53282756e-01 -7.75195479e-01 -4.01122309e-02] [-6.63199663e-01 -8.39238346e-01 -1.31229532e+00 ... -1.65545344e-01 -7.03561246e-01 1.12183189e+00] [ 1.61222875e-01 1.24028695e+00 7.46098280e-01 ... -3.82140785e-01 -3.50253321e-02 5.10938406e-01] ... [ 2.28864956e+00 2.96268642e-01 -1.02949095e+00 ... -1.62713623e+00 9.97626722e-01 2.25944743e-01] [ 2.50486588e+00 -1.15116596e+00 8.91469479e-01 ... -7.32627511e-02 4.61919993e-01 5.49138427e-01] [-1.21274924e+00 1.09681439e+00 1.85449943e-01 ... -1.19930327e+00 -5.00563323e-01 -3.52358758e-01]] [[ 5.68384111e-01 -6.34346753e-02 9.39240217e-01 ... -9.61764872e-01 -4.78418142e-01 6.21784389e-01] [-1.60004056e+00 3.71054858e-01 3.79607767e-01 ... -7.48230934e-01 1.05211377e+00 1.52798116e-01] [ 4.11555082e-01 7.51019835e-01 2.74865717e-01 ... -2.44417500e+00 4.65139389e-01 1.77164346e-01] ... [-1.09907568e+00 8.52803349e-01 1.89339936e+00 ... -8.49580467e-01 1.44324288e-01 4.97839808e-01] [ 6.64741158e-01 6.90945331e-03 -6.38066769e-01 ... 4.64352906e-01 -3.05993035e-02 -6.77436829e-01] [ 8.58356953e-01 1.52532232e+00 5.91556072e-01 ... 1.75031826e-01 -8.86122644e-01 4.46800500e-01]] ... [[ 2.94965021e-02 1.85877907e+00 -3.50832748e+00 ... -7.27508664e-01 3.98107529e-01 -8.20279181e-01] [ 1.92283377e-01 4.86626357e-01 -1.92273796e+00 ... 9.15605903e-01 6.06390774e-01 1.06906402e+00] [-2.43685484e-01 -3.58570099e-01 -9.25298214e-01 ... -9.08734679e-01 -2.43209779e-01 -8.11499774e-01] ... [-1.29942036e+00 3.47354412e-02 4.45157290e-01 ... 1.19245493e+00 -5.93851209e-01 -2.81930596e-01] [ 2.09720954e-02 1.76737130e+00 -2.53829551e+00 ... -5.84479198e-02 5.56745827e-01 9.49017644e-01] [ 8.36369216e-01 1.39951682e+00 1.26707268e+00 ... -5.39776146e-01 -9.35421288e-01 5.61763406e-01]] [[ 6.90181971e-01 5.56803942e-01 2.13502064e-01 ... 1.32493067e+00 6.25797391e-01 3.61929625e-01] [-1.71433866e-01 7.64788508e-01 7.35021889e-01 ... 2.95723152e+00 1.25247562e+00 -5.05898952e-01] [-8.39249849e-01 2.07492754e-01 1.10263038e+00 ... -1.15431976e+00 -2.21006617e-01 -5.16136825e-01] ... [-1.17317557e+00 7.74864137e-01 3.82078588e-01 ... 5.22879899e-01 1.96530730e-01 4.20331419e-01] [-5.15064597e-01 -1.30448115e+00 -1.40147877e+00 ... -6.89220786e-01 -1.50552952e+00 -5.64407706e-01] [ 3.03611493e+00 1.23709750e+00 1.62196493e+00 ... -2.37191725e+00 -4.16162252e-01 1.00243592e+00]] [[-2.02415794e-01 2.51263356e+00 1.09504759e+00 ... 3.78587008e-01 3.23090732e-01 1.37123287e+00] [ 1.55182973e-01 4.54872668e-01 -6.81728721e-01 ... -1.66738048e-01 8.07074130e-01 1.28390446e-01] [ 1.67434072e+00 -5.52439988e-01 7.26600349e-01 ... 9.48922753e-01 3.34673822e-02 1.43715215e+00] ... [-2.24341297e+00 4.08387125e-01 -6.17512524e-01 ... 1.60515559e+00 3.46558034e-01 1.82102546e-02] [ 1.17878997e+00 3.00386459e-01 -3.88495743e-01 ... 4.55716580e-01 -1.53499150e+00 -7.20186651e-01] [ 8.75487804e-01 -3.83184880e-01 5.92967153e-01 ... 2.70315617e-01 -6.75004601e-01 -2.21068040e-01]]]]]; ov_res: [[[[[ 1.00755942e+00 1.66014242e+00 -1.63126254e+00 ... -1.02633345e+00 1.07736087e+00 4.02702808e-01] [-5.32595634e-01 1.27609980e+00 -2.71368712e-01 ... -4.51277167e-01 -3.55931997e-01 1.40452683e+00] [ 1.20782003e-01 8.28970313e-01 7.20620871e-01 ... -5.00288606e-01 9.42667574e-02 1.08040914e-01] ... [-4.84598726e-01 -8.12482238e-01 -1.44752598e+00 ... -1.59558403e+00 2.64021397e-01 5.08374810e-01] [-1.47956520e-01 -5.52438557e-01 7.65976667e-01 ... -6.76651537e-01 7.48074651e-02 -1.14313841e+00] [-1.10336256e+00 3.55167091e-01 -2.40168348e-01 ... -4.25328873e-02 -5.93180656e-02 6.46627784e-01]] [[ 1.04685508e-01 4.45008546e-01 4.85629916e-01 ... -3.44173938e-01 2.81713605e-01 -1.49939704e+00] [-4.86100703e-01 -4.91713256e-01 3.65749568e-01 ... 1.61815143e+00 5.34296215e-01 9.14010823e-01] [-1.95215535e+00 4.00163829e-01 1.37209690e+00 ... -1.26838648e+00 -3.12935263e-02 -4.34938312e-01] ... [-1.50025654e+00 -3.64260167e-01 5.69313645e-01 ... 8.86327386e-01 -9.60960567e-01 -1.65403783e-02] [ 1.40096307e-01 -1.33800650e+00 -9.40349996e-02 ... -1.50824094e+00 2.52866417e-01 4.69395250e-01] [-9.32727158e-02 1.90390146e+00 2.08329725e+00 ... 9.90886867e-01 9.94693220e-01 -5.29147983e-01]] [[-9.15948033e-01 -1.57149315e-01 -1.51481908e-02 ... 4.28802162e-01 1.04060328e+00 4.16308403e-01] [-2.60406911e-01 -3.56300265e-01 9.79742765e-01 ... 1.38839436e+00 -7.76195228e-02 9.26269174e-01] [ 2.60678474e-02 -2.11536169e-01 -3.65209460e-01 ... 1.18747878e+00 -7.00549781e-01 1.70573783e+00] ... [-2.22361282e-01 1.26139736e+00 -5.81436083e-02 ... -1.23998213e+00 -1.07632756e+00 6.66565657e-01] [-4.89502758e-01 1.58717716e+00 -3.83774936e-01 ... -4.52468723e-01 2.44048834e+00 -9.79644835e-01] [ 8.59178543e-01 7.82504678e-01 1.31954384e+00 ... -4.10202503e-01 1.34843791e+00 1.65893936e+00]] ... [[-2.83852726e-01 -2.19532456e-02 1.60225078e-01 ... 6.80439830e-01 -5.97363114e-01 -1.12351477e+00] [-3.00775915e-01 8.28543067e-01 -3.18236142e-01 ... 3.37709218e-01 1.27963519e+00 8.55589509e-01] [ 8.08923423e-01 7.04122707e-02 1.07648659e+00 ... 1.38579738e+00 -7.71293104e-01 1.23314545e-01] ... [-1.18983722e+00 -1.82498419e+00 -1.23172648e-01 ... 1.04063344e+00 6.84294939e-01 7.20672607e-01] [ 2.76438922e-01 3.03183705e-01 -6.05018139e-01 ... 1.05280447e+00 1.41784537e+00 2.47075319e-01] [ 4.19647574e-01 1.75389588e+00 -4.78782564e-01 ... 1.24064863e+00 9.20666754e-01 -7.17927635e-01]] [[ 2.03539014e-01 3.05615634e-01 -5.18328846e-01 ... 8.17601621e-01 -1.73908114e-01 2.14944139e-01] [-1.04272321e-01 1.68180621e+00 1.02198935e+00 ... 1.91688493e-01 -3.57150733e-01 -2.05267400e-01] [-7.78932214e-01 7.09244549e-01 5.56076109e-01 ... -1.51933146e+00 -2.64060050e-01 -8.51639569e-01] ... [-1.18389797e+00 -9.00416315e-01 1.12689567e+00 ... 7.24269807e-01 2.30550423e-01 1.33328843e+00] [-9.13921058e-01 -3.23866487e-01 -1.86985111e+00 ... -5.14508821e-02 7.87571549e-01 1.20375700e-01] [ 1.89200246e+00 -2.60015464e+00 6.75732732e-01 ... 4.22869027e-02 5.24438880e-02 -6.19820178e-01]] [[-4.83032644e-01 -1.69418454e-01 -1.41743863e+00 ... 2.09057236e+00 9.15969670e-01 1.30432463e+00] [-8.55805278e-02 1.33335948e+00 -3.55748981e-02 ... -8.76124024e-01 -6.55216634e-01 -2.58109182e-01] [-7.10870266e-01 -1.96513689e+00 8.50457251e-01 ... 5.53005576e-01 5.09282112e-01 -7.11410999e-01] ... [ 7.39160538e-01 6.88753963e-01 8.53156745e-01 ... -1.89453170e-01 -4.25973088e-01 1.34145707e-01] [-4.89394128e-01 7.64180869e-02 9.27618742e-01 ... -6.65677249e-01 -9.54354942e-01 2.04563409e-01] [ 1.43330371e+00 -1.94150150e+00 -2.41329461e-01 ... -4.13743764e-01 5.73221743e-01 6.44689918e-01]]] [[[ 5.47026634e-01 -5.91944337e-01 -1.30950940e+00 ... 6.91323757e-01 6.75075054e-01 -1.85474043e-03] [-5.92196658e-02 -1.51919648e-01 -6.03494793e-02 ... -1.61035210e-01 -5.31015933e-01 1.65581405e+00] [ 1.22341835e+00 -1.16038525e+00 2.79134482e-01 ... 1.77640736e+00 -1.41391551e+00 -8.37872982e-01] ... [-1.57504559e+00 -1.19864099e-01 1.00293899e+00 ... 2.17270565e+00 -7.24202037e-01 -1.03269613e+00] [ 6.07173562e-01 2.73976851e+00 1.17823434e+00 ... -1.79432169e-01 -5.41707516e-01 -1.00811183e+00] [ 8.27700019e-01 -1.65057313e+00 -1.39671326e+00 ... -2.70397421e-02 -3.50742280e-01 1.51178602e-03]] [[-3.35378218e+00 6.24631286e-01 -8.53095427e-02 ... 9.98921037e-01 -7.11164236e-01 8.90612185e-01] [-2.89884597e-01 1.07356989e+00 -1.03581202e+00 ... 1.99900365e+00 9.39949751e-01 1.92223489e+00] [ 1.47440970e-01 1.51014578e+00 6.29835129e-01 ... -2.13835418e-01 9.57976162e-01 5.66377901e-02] ... [ 1.29448414e-01 -3.27363670e-01 1.07019156e-01 ... -4.15385187e-01 -4.74888265e-01 1.34583759e+00] [-2.38847971e+00 1.54958159e-01 1.28230476e+00 ... -6.21719480e-01 7.36114502e-01 -4.58617061e-01] [ 2.61264771e-01 8.30785453e-01 1.71468154e-01 ... 1.46345878e+00 1.13054514e+00 -1.62704915e-01]] [[-3.02822798e-01 -2.64900923e-01 -1.02858734e+00 ... 1.98078513e+00 -3.44477922e-01 6.84052646e-01] [-5.81806660e-01 2.29037955e-01 1.06233621e+00 ... 3.66806872e-02 -9.18051898e-01 -1.08461249e+00] [-1.78227139e+00 -6.09567225e-01 7.11183250e-01 ... 1.47010648e+00 7.60153890e-01 -1.41326451e+00] ... [ 5.37121296e-01 1.31856114e-01 7.82638907e-01 ... -8.36055160e-01 -1.34995055e+00 -1.48057286e-02] [ 6.39025211e-01 -1.17461868e-01 7.35940814e-01 ... -1.61482728e+00 -1.77017644e-01 -3.14952843e-02] [-1.21617770e+00 9.20152783e-01 7.51728892e-01 ... -4.67901006e-02 7.01544404e-01 6.65586650e-01]] ... [[-1.29066885e+00 -1.17314875e+00 -1.15033495e+00 ... -1.68745375e+00 -3.62305939e-01 -8.86429548e-01] [ 2.96560347e-01 1.58020115e+00 1.22637093e+00 ... -1.18769872e+00 1.05249977e+00 9.65273023e-01] [ 1.26793951e-01 -3.99497986e-01 2.94922173e-01 ... -8.86293173e-01 -1.98438227e-01 8.62276912e-01] ... [-6.07599616e-01 4.05795038e-01 -2.43846560e+00 ... -4.33277994e-01 -3.59512627e-01 -5.27078271e-01] [ 2.51120687e-01 -1.60681680e-01 4.56575274e-01 ... 6.59439415e-02 -5.20463765e-01 3.47899139e-01] [ 7.27215648e-01 5.10024011e-01 -3.62536818e-01 ... 1.22702849e+00 1.92255783e+00 -2.28608280e-01]] [[ 1.61071420e-02 -1.73028386e+00 3.25640708e-01 ... 1.37040782e+00 -1.32590219e-01 -7.58705437e-01] [-2.71595716e+00 1.93351164e-01 2.21641231e+00 ... 2.89050993e-02 -3.56696725e-01 1.02076399e+00] [ 1.12083912e+00 1.12247336e+00 -8.20438862e-01 ... 1.37629354e+00 1.18326402e+00 -1.58864588e-01] ... [-1.02314186e+00 1.94866374e-01 1.84736478e+00 ... 6.09820366e-01 -8.19610246e-03 1.88308284e-01] [-7.25101113e-01 -1.38610333e-01 -3.31988446e-02 ... 3.44358444e-01 -8.58276546e-01 -7.19661891e-01] [-1.09276056e+00 1.64785016e+00 5.82385123e-01 ... 6.02435656e-02 -8.12467560e-02 9.72998679e-01]] [[-1.31660253e-01 -4.37399626e-01 1.29136562e+00 ... -7.81228304e-01 -8.02643061e-01 -8.41289043e-01] [-1.53133774e+00 4.24752176e-01 9.40432325e-02 ... 7.73641169e-01 -4.14141357e-01 -1.78787911e+00] [ 6.76758960e-02 8.15273464e-01 1.33207011e+00 ... 1.14249170e+00 -9.57478583e-01 2.93669254e-01] ... [ 1.38638341e+00 -1.69980109e+00 -1.08357787e-01 ... -5.25444090e-01 2.08436728e+00 1.55384684e+00] [-2.61434126e+00 -3.73167515e-01 -1.68114853e+00 ... -1.52606830e-01 3.56609344e-01 -1.92635819e-01] [-1.88142347e+00 -1.66516349e-01 1.30866623e+00 ... -2.49236703e+00 -5.85635662e-01 -1.09846139e+00]]] [[[-1.64386666e+00 -8.58665824e-01 -2.38100678e-01 ... -7.37223983e-01 -9.16150868e-01 -3.58989149e-01] [ 1.53864574e+00 -3.21174353e-01 -8.89506996e-01 ... 2.83740520e-01 -2.94249117e-01 -1.21499789e+00] [ 1.44486165e+00 1.35096824e-02 -1.12162316e+00 ... -3.84206444e-01 -5.00759363e-01 1.79737818e+00] ... [ 7.89129078e-01 4.52371240e-01 -1.16349459e+00 ... -5.57990849e-01 -2.02463412e+00 1.00666486e-01] [ 5.35059452e-01 -1.18701167e-01 1.65098238e+00 ... 4.03870732e-01 9.14396420e-02 -2.51580894e-01] [-7.61219501e-01 1.79827094e-01 1.21430492e+00 ... -1.96441188e-01 1.45509720e+00 -1.08363879e+00]] [[-1.82074949e-01 -1.86742401e+00 1.13229156e+00 ... -2.53282756e-01 -7.75195479e-01 -4.01122309e-02] [-6.63199663e-01 -8.39238346e-01 -1.31229532e+00 ... -1.65545344e-01 -7.03561246e-01 1.12183189e+00] [ 1.61222875e-01 1.24028695e+00 7.46098280e-01 ... -3.82140785e-01 -3.50253321e-02 5.10938406e-01] ... [ 2.28864956e+00 2.96268642e-01 -1.02949095e+00 ... -1.62713623e+00 9.97626722e-01 2.25944743e-01] [ 2.50486588e+00 -1.15116596e+00 8.91469479e-01 ... -7.32627511e-02 4.61919993e-01 5.49138427e-01] [-1.21274924e+00 1.09681439e+00 1.85449943e-01 ... -1.19930327e+00 -5.00563323e-01 -3.52358758e-01]] [[ 5.68384111e-01 -6.34346753e-02 9.39240217e-01 ... -9.61764872e-01 -4.78418142e-01 6.21784389e-01] [-1.60004056e+00 3.71054858e-01 3.79607767e-01 ... -7.48230934e-01 1.05211377e+00 1.52798116e-01] [ 4.11555082e-01 7.51019835e-01 2.74865717e-01 ... -2.44417500e+00 4.65139389e-01 1.77164346e-01] ... [-1.09907568e+00 8.52803349e-01 1.89339936e+00 ... -8.49580467e-01 1.44324288e-01 4.97839808e-01] [ 6.64741158e-01 6.90945331e-03 -6.38066769e-01 ... 4.64352906e-01 -3.05993035e-02 -6.77436829e-01] [ 8.58356953e-01 1.52532232e+00 5.91556072e-01 ... 1.75031826e-01 -8.86122644e-01 4.46800500e-01]] ... [[ 2.94965021e-02 1.85877907e+00 -3.50832748e+00 ... -7.27508664e-01 3.98107529e-01 -8.20279181e-01] [ 1.92283377e-01 4.86626357e-01 -1.92273796e+00 ... 9.15605903e-01 6.06390774e-01 1.06906402e+00] [-2.43685484e-01 -3.58570099e-01 -9.25298214e-01 ... -9.08734679e-01 -2.43209779e-01 -8.11499774e-01] ... [-1.29942036e+00 3.47354412e-02 4.45157290e-01 ... 1.19245493e+00 -5.93851209e-01 -2.81930596e-01] [ 2.09720954e-02 1.76737130e+00 -2.53829551e+00 ... -5.84479198e-02 5.56745827e-01 9.49017644e-01] [ 8.36369216e-01 1.39951682e+00 1.26707268e+00 ... -5.39776146e-01 -9.35421288e-01 5.61763406e-01]] [[ 6.90181971e-01 5.56803942e-01 2.13502064e-01 ... 1.32493067e+00 6.25797391e-01 3.61929625e-01] [-1.71433866e-01 7.64788508e-01 7.35021889e-01 ... 2.95723152e+00 1.25247562e+00 -5.05898952e-01] [-8.39249849e-01 2.07492754e-01 1.10263038e+00 ... -1.15431976e+00 -2.21006617e-01 -5.16136825e-01] ... [-1.17317557e+00 7.74864137e-01 3.82078588e-01 ... 5.22879899e-01 1.96530730e-01 4.20331419e-01] [-5.15064597e-01 -1.30448115e+00 -1.40147877e+00 ... -6.89220786e-01 -1.50552952e+00 -5.64407706e-01] [ 3.03611493e+00 1.23709750e+00 1.62196493e+00 ... -2.37191725e+00 -4.16162252e-01 1.00243592e+00]] [[-2.02415794e-01 2.51263356e+00 1.09504759e+00 ... 3.78587008e-01 3.23090732e-01 1.37123287e+00] [ 1.55182973e-01 4.54872668e-01 -6.81728721e-01 ... -1.66738048e-01 8.07074130e-01 1.28390446e-01] [ 1.67434072e+00 -5.52439988e-01 7.26600349e-01 ... 9.48922753e-01 3.34673822e-02 1.43715215e+00] ... [-2.24341297e+00 4.08387125e-01 -6.17512524e-01 ... 1.60515559e+00 3.46558034e-01 1.82102546e-02] [ 1.17878997e+00 3.00386459e-01 -3.88495743e-01 ... 4.55716580e-01 -1.53499150e+00 -7.20186651e-01] [ 8.75487804e-01 -3.83184880e-01 5.92967153e-01 ... 2.70315617e-01 -6.75004601e-01 -2.21068040e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1, 1, 2) - mode:constant - value:0.0 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5566.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=0.]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1, 1, 2]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 -5.8855295e-01 1.1342857e+00 ... -8.3425349e-01 -1.8250477e+00 5.8109713e-01] [ 0.0000000e+00 -9.2663598e-01 1.2533876e+00 ... -9.1905719e-01 1.4528890e+00 1.2749554e-01] [ 0.0000000e+00 -2.3143642e-02 -1.3113747e+00 ... -4.8327917e-01 4.8284215e-01 1.0031334e+00] ... [ 0.0000000e+00 -1.2524511e+00 -9.1590978e-02 ... 4.1897959e-01 1.9622393e+00 7.3948777e-01] [ 0.0000000e+00 -7.9944891e-01 1.6096040e+00 ... 4.4810569e-01 -3.2550830e-01 3.5676302e-04] [ 0.0000000e+00 1.0017999e+00 1.7567215e+00 ... -7.4646342e-01 -1.3482828e+00 4.6431124e-01]] [[ 0.0000000e+00 -7.1618950e-01 3.0234417e-01 ... 6.0266125e-01 2.8634512e-01 -3.6467344e-01] [ 0.0000000e+00 -7.6441504e-02 -2.8693068e-01 ... 3.9366764e-01 -5.1526922e-01 1.6629693e-01] [ 0.0000000e+00 8.2041152e-02 -2.9805174e-01 ... 4.8473111e-01 1.4675957e+00 -1.7530812e+00] ... [ 0.0000000e+00 8.9643203e-02 5.3764790e-01 ... 3.1783201e-02 3.4223238e-01 1.9442687e+00] [ 0.0000000e+00 -1.1500468e+00 1.5901309e-01 ... -6.6876173e-01 -1.5980815e+00 4.6776506e-01] [ 0.0000000e+00 1.0283356e+00 9.1916002e-02 ... 5.4705089e-01 -6.0711336e-01 -4.3797660e-01]] [[ 0.0000000e+00 -1.4753206e+00 8.0069911e-01 ... 4.4338951e-01 -5.9227150e-02 2.2328594e+00] [ 0.0000000e+00 -1.9109769e-01 9.8979306e-01 ... -2.3056946e+00 1.5459774e+00 9.5600921e-01] [ 0.0000000e+00 9.2880890e-02 5.2406067e-01 ... -7.9296428e-01 -3.0995700e-01 1.2264770e+00] ... [ 0.0000000e+00 -1.7317992e-01 -1.4451952e+00 ... 8.6352122e-01 6.3740182e-01 6.8851435e-01] [ 0.0000000e+00 1.2265077e+00 -3.9939484e-01 ... -1.2641711e+00 -5.6710315e-01 1.3744707e+00] [ 0.0000000e+00 7.1635050e-01 -8.1600195e-01 ... 2.8757015e-01 -2.5362166e-02 1.0414636e-01]] ... [[ 0.0000000e+00 -1.1617081e+00 1.9190537e+00 ... 1.5318531e+00 7.4451619e-01 -4.6616521e-01] [ 0.0000000e+00 4.6408787e-02 2.6380283e-01 ... 1.0438284e+00 1.6396316e+00 9.7334588e-01] [ 0.0000000e+00 -1.9923035e+00 -1.0010234e+00 ... -7.7844632e-01 3.3836791e-01 -4.7948495e-02] ... [ 0.0000000e+00 1.7000952e+00 -1.5522912e+00 ... -2.1259332e-01 3.4021798e-01 5.8775860e-01] [ 0.0000000e+00 9.5975190e-02 1.5388426e+00 ... -6.3354087e-01 -3.4101412e-01 -6.3075221e-01] [ 0.0000000e+00 1.4843374e+00 1.1105913e+00 ... 1.7635390e+00 1.9630567e+00 1.3601624e+00]] [[ 0.0000000e+00 1.0613106e+00 1.4554064e+00 ... 3.8825196e-01 -1.0094625e+00 1.4214274e-01] [ 0.0000000e+00 -1.9393632e-01 8.3552852e-02 ... 4.0043509e-01 -2.1224091e+00 1.9054255e-01] [ 0.0000000e+00 3.2196757e-01 2.4914698e-01 ... 1.2392192e+00 -1.0142993e+00 1.0253861e-01] ... [ 0.0000000e+00 -8.1431711e-01 -2.6955838e+00 ... -4.7161379e-01 1.8822445e-01 1.7268427e-01] [ 0.0000000e+00 -4.7738644e-01 9.1132395e-02 ... 4.8604077e-01 -1.4050214e+00 6.0572106e-01] [ 0.0000000e+00 2.2403891e-01 1.7579172e+00 ... -8.4478617e-01 2.7335769e-01 -1.4215554e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 1.0036784e+00 4.8569278e-03 ... -1.8255958e-01 -6.7592913e-01 -1.7323807e+00] [ 0.0000000e+00 1.6388274e+00 -1.1709701e+00 ... 5.9728581e-01 1.0502770e+00 5.1493559e-02] [ 0.0000000e+00 9.8558933e-02 -1.2518966e+00 ... -8.6610299e-01 -7.0434296e-01 -8.3245957e-01] ... [ 0.0000000e+00 -1.4837657e+00 -1.5627369e+00 ... 1.1433624e+00 1.0666535e+00 6.7656296e-01] [ 0.0000000e+00 -1.6963568e+00 -1.6851821e-03 ... -8.5152483e-01 -3.0873266e-01 2.3032211e-01] [ 0.0000000e+00 7.5532812e-01 5.2944171e-01 ... 1.5786053e+00 -7.4011439e-01 2.6671796e+00]] [[ 0.0000000e+00 -1.0738843e-01 7.3145640e-01 ... -5.2037859e-01 7.5254440e-01 -3.7401199e-02] [ 0.0000000e+00 8.0531895e-01 -6.0467958e-01 ... -1.8897637e+00 -6.1249197e-01 -4.6195826e-01] [ 0.0000000e+00 -4.3795374e-01 1.5264089e+00 ... -1.0494937e+00 -1.6683424e-01 1.3086051e+00] ... [ 0.0000000e+00 1.3090872e+00 8.7604197e-03 ... -1.7807397e-01 1.7638414e-01 -1.3228995e+00] [ 0.0000000e+00 2.5543597e-01 7.1786672e-01 ... -6.6744411e-01 7.9275888e-01 5.6653678e-01] [ 0.0000000e+00 2.3849165e+00 1.8840715e+00 ... 6.3950604e-01 -8.2557893e-01 -6.2428665e-01]] [[ 0.0000000e+00 -3.9208177e-01 1.7318085e+00 ... 1.7871454e+00 -1.5506799e-02 1.1161274e+00] [ 0.0000000e+00 -5.1623762e-01 -2.6443318e-01 ... -6.2610477e-01 9.8647630e-01 1.5688722e+00] [ 0.0000000e+00 -8.8725460e-01 -7.0950449e-01 ... 1.3874562e+00 5.9320611e-01 -1.2145624e+00] ... [ 0.0000000e+00 -7.6556563e-01 7.8044176e-01 ... -7.4337971e-01 -1.4101711e-01 1.2221649e+00] [ 0.0000000e+00 -1.6862490e+00 3.8534337e-01 ... -1.6333941e+00 1.5752891e+00 1.0445504e-01] [ 0.0000000e+00 -1.1675633e+00 -1.5145099e-01 ... 2.4146716e-01 -3.3462283e-01 1.6269379e+00]] ... [[ 0.0000000e+00 6.7720056e-01 4.5828575e-01 ... 3.7454897e-01 -1.8483332e+00 -1.7727994e+00] [ 0.0000000e+00 -7.3097557e-01 -6.5339303e-01 ... -3.3006752e-01 1.3076939e-01 -1.1785733e+00] [ 0.0000000e+00 -2.3559709e+00 9.9967414e-01 ... 1.1795950e-01 -9.3069643e-01 1.0696635e+00] ... [ 0.0000000e+00 6.6322666e-01 -2.4137607e-02 ... -1.7988831e+00 8.1743175e-01 1.6856875e+00] [ 0.0000000e+00 6.7998779e-01 -4.9182680e-01 ... 6.5748990e-02 2.9495651e-02 -7.2069331e-03] [ 0.0000000e+00 9.8612368e-01 6.3415027e-01 ... -1.5184883e+00 -7.4085206e-01 -3.0767673e-01]] [[ 0.0000000e+00 -6.4289486e-01 -7.6767176e-01 ... 2.1420231e+00 6.2088937e-01 8.2943463e-01] [ 0.0000000e+00 1.1794649e-02 6.5464532e-01 ... -9.5385098e-01 4.2100835e-01 8.3039087e-01] [ 0.0000000e+00 -1.8931469e+00 8.9121449e-01 ... -2.5649792e-01 -8.4952962e-01 3.2354817e+00] ... [ 0.0000000e+00 9.6318692e-02 -1.2929562e-01 ... -1.0654184e-02 -1.6337937e+00 1.1089036e+00] [ 0.0000000e+00 1.1146662e+00 -7.2575516e-01 ... 1.4502167e+00 1.8454012e-01 1.1334883e+00] [ 0.0000000e+00 -1.3548181e+00 4.0021762e-01 ... -1.3972667e+00 3.9743137e-01 1.2990197e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 4.9004719e-01 -8.2073778e-02 ... -1.2019348e+00 -1.5072669e+00 5.2629334e-01] [ 0.0000000e+00 5.0797015e-01 -1.3118091e-01 ... -1.7310140e+00 -1.1997039e+00 4.4332597e-02] [ 0.0000000e+00 3.3865666e-01 4.7520357e-01 ... -5.9043914e-01 6.7382914e-01 -2.2212019e+00] ... [ 0.0000000e+00 -5.2702123e-01 1.0565220e+00 ... -3.2938939e-01 -1.2406294e+00 -5.0031155e-01] [ 0.0000000e+00 3.8588282e-01 -1.7173052e+00 ... 1.7934972e-01 -6.3974851e-01 1.2570865e+00] [ 0.0000000e+00 -5.0316012e-01 8.8010275e-01 ... -1.7473732e-01 9.6983820e-02 1.7965397e+00]] [[ 0.0000000e+00 1.7813052e+00 -1.2006077e+00 ... 1.4098842e-01 -5.9058803e-01 1.5235351e+00] [ 0.0000000e+00 -1.8193056e-01 8.0418569e-01 ... -8.6299640e-01 -8.5251033e-01 -8.6044812e-01] [ 0.0000000e+00 7.0306903e-01 -6.1515117e-01 ... -8.4805018e-01 2.3864985e-02 -9.8544264e-01] ... [ 0.0000000e+00 1.1128098e+00 9.0748996e-01 ... -3.5865599e-01 -9.4310331e-01 1.0329765e+00] [ 0.0000000e+00 -7.4287075e-01 -4.5402083e-01 ... 2.1868671e-01 -3.1550670e-01 -1.5282968e+00] [ 0.0000000e+00 2.6118159e-01 8.7383074e-01 ... 8.2960203e-02 -1.4529885e+00 1.0343031e+00]] [[ 0.0000000e+00 -3.8344371e-01 2.1481205e-02 ... -1.2861491e+00 1.6694078e+00 -1.1282825e+00] [ 0.0000000e+00 -1.3104060e+00 5.1861459e-01 ... 3.3919290e-01 -1.1420765e+00 -2.6898429e-02] [ 0.0000000e+00 3.8348041e-03 7.0863478e-02 ... 2.4539623e+00 7.4822420e-01 8.8625354e-01] ... [ 0.0000000e+00 -4.1841879e-01 1.1482532e+00 ... 3.8958961e-01 8.8380438e-01 1.0887886e+00] [ 0.0000000e+00 4.2425945e-01 -2.6286843e+00 ... -7.7100348e-01 -8.4073734e-01 -7.4204570e-01] [ 0.0000000e+00 1.0655113e+00 5.4414189e-01 ... -1.9968123e+00 -2.7413123e+00 -1.6855191e-01]] ... [[ 0.0000000e+00 2.2178641e-01 2.4773876e-01 ... -4.0038642e-01 1.0892955e+00 4.0876535e-01] [ 0.0000000e+00 1.6996045e+00 -6.6573757e-01 ... 7.1804106e-01 -6.5049189e-03 -1.0580341e+00] [ 0.0000000e+00 -8.3698964e-01 -6.7014235e-01 ... 8.9761108e-01 -1.0526447e-01 -1.1537281e+00] ... [ 0.0000000e+00 8.0601357e-02 -8.8608193e-01 ... -6.2741512e-01 1.9527826e-01 9.0099961e-01] [ 0.0000000e+00 -8.7658507e-01 4.7531578e-01 ... 1.0437676e+00 -4.7988495e-01 -8.2398593e-01] [ 0.0000000e+00 3.1585355e+00 -5.4294892e-02 ... 8.6602032e-01 3.5777673e-01 1.3869772e+00]] [[ 0.0000000e+00 1.3914248e+00 -1.6745129e+00 ... 1.6259028e-02 2.9297858e-01 7.0705897e-01] [ 0.0000000e+00 -1.6793687e+00 9.3034518e-01 ... -6.9379413e-01 1.2600207e+00 -7.2065949e-01] [ 0.0000000e+00 1.2283627e+00 7.3231292e-01 ... -1.9317992e-01 -1.0616207e+00 6.3202012e-01] ... [ 0.0000000e+00 2.0832284e-01 -4.6279415e-01 ... 1.7301700e+00 2.5095825e+00 -1.0596238e+00] [ 0.0000000e+00 7.3186648e-01 -5.5473137e-01 ... 9.7094625e-01 -1.3446549e-01 1.9327534e+00] [ 0.0000000e+00 9.1119313e-01 6.6980135e-01 ... -2.1458833e+00 -9.2933613e-01 1.7764518e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]]; ov_res: [[[[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 -5.8855295e-01 1.1342857e+00 ... -8.3425349e-01 -1.8250477e+00 5.8109713e-01] [ 0.0000000e+00 -9.2663598e-01 1.2533876e+00 ... -9.1905719e-01 1.4528890e+00 1.2749554e-01] [ 0.0000000e+00 -2.3143642e-02 -1.3113747e+00 ... -4.8327917e-01 4.8284215e-01 1.0031334e+00] ... [ 0.0000000e+00 -1.2524511e+00 -9.1590978e-02 ... 4.1897959e-01 1.9622393e+00 7.3948777e-01] [ 0.0000000e+00 -7.9944891e-01 1.6096040e+00 ... 4.4810569e-01 -3.2550830e-01 3.5676302e-04] [ 0.0000000e+00 1.0017999e+00 1.7567215e+00 ... -7.4646342e-01 -1.3482828e+00 4.6431124e-01]] [[ 0.0000000e+00 -7.1618950e-01 3.0234417e-01 ... 6.0266125e-01 2.8634512e-01 -3.6467344e-01] [ 0.0000000e+00 -7.6441504e-02 -2.8693068e-01 ... 3.9366764e-01 -5.1526922e-01 1.6629693e-01] [ 0.0000000e+00 8.2041152e-02 -2.9805174e-01 ... 4.8473111e-01 1.4675957e+00 -1.7530812e+00] ... [ 0.0000000e+00 8.9643203e-02 5.3764790e-01 ... 3.1783201e-02 3.4223238e-01 1.9442687e+00] [ 0.0000000e+00 -1.1500468e+00 1.5901309e-01 ... -6.6876173e-01 -1.5980815e+00 4.6776506e-01] [ 0.0000000e+00 1.0283356e+00 9.1916002e-02 ... 5.4705089e-01 -6.0711336e-01 -4.3797660e-01]] [[ 0.0000000e+00 -1.4753206e+00 8.0069911e-01 ... 4.4338951e-01 -5.9227150e-02 2.2328594e+00] [ 0.0000000e+00 -1.9109769e-01 9.8979306e-01 ... -2.3056946e+00 1.5459774e+00 9.5600921e-01] [ 0.0000000e+00 9.2880890e-02 5.2406067e-01 ... -7.9296428e-01 -3.0995700e-01 1.2264770e+00] ... [ 0.0000000e+00 -1.7317992e-01 -1.4451952e+00 ... 8.6352122e-01 6.3740182e-01 6.8851435e-01] [ 0.0000000e+00 1.2265077e+00 -3.9939484e-01 ... -1.2641711e+00 -5.6710315e-01 1.3744707e+00] [ 0.0000000e+00 7.1635050e-01 -8.1600195e-01 ... 2.8757015e-01 -2.5362166e-02 1.0414636e-01]] ... [[ 0.0000000e+00 -1.1617081e+00 1.9190537e+00 ... 1.5318531e+00 7.4451619e-01 -4.6616521e-01] [ 0.0000000e+00 4.6408787e-02 2.6380283e-01 ... 1.0438284e+00 1.6396316e+00 9.7334588e-01] [ 0.0000000e+00 -1.9923035e+00 -1.0010234e+00 ... -7.7844632e-01 3.3836791e-01 -4.7948495e-02] ... [ 0.0000000e+00 1.7000952e+00 -1.5522912e+00 ... -2.1259332e-01 3.4021798e-01 5.8775860e-01] [ 0.0000000e+00 9.5975190e-02 1.5388426e+00 ... -6.3354087e-01 -3.4101412e-01 -6.3075221e-01] [ 0.0000000e+00 1.4843374e+00 1.1105913e+00 ... 1.7635390e+00 1.9630567e+00 1.3601624e+00]] [[ 0.0000000e+00 1.0613106e+00 1.4554064e+00 ... 3.8825196e-01 -1.0094625e+00 1.4214274e-01] [ 0.0000000e+00 -1.9393632e-01 8.3552852e-02 ... 4.0043509e-01 -2.1224091e+00 1.9054255e-01] [ 0.0000000e+00 3.2196757e-01 2.4914698e-01 ... 1.2392192e+00 -1.0142993e+00 1.0253861e-01] ... [ 0.0000000e+00 -8.1431711e-01 -2.6955838e+00 ... -4.7161379e-01 1.8822445e-01 1.7268427e-01] [ 0.0000000e+00 -4.7738644e-01 9.1132395e-02 ... 4.8604077e-01 -1.4050214e+00 6.0572106e-01] [ 0.0000000e+00 2.2403891e-01 1.7579172e+00 ... -8.4478617e-01 2.7335769e-01 -1.4215554e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 1.0036784e+00 4.8569278e-03 ... -1.8255958e-01 -6.7592913e-01 -1.7323807e+00] [ 0.0000000e+00 1.6388274e+00 -1.1709701e+00 ... 5.9728581e-01 1.0502770e+00 5.1493559e-02] [ 0.0000000e+00 9.8558933e-02 -1.2518966e+00 ... -8.6610299e-01 -7.0434296e-01 -8.3245957e-01] ... [ 0.0000000e+00 -1.4837657e+00 -1.5627369e+00 ... 1.1433624e+00 1.0666535e+00 6.7656296e-01] [ 0.0000000e+00 -1.6963568e+00 -1.6851821e-03 ... -8.5152483e-01 -3.0873266e-01 2.3032211e-01] [ 0.0000000e+00 7.5532812e-01 5.2944171e-01 ... 1.5786053e+00 -7.4011439e-01 2.6671796e+00]] [[ 0.0000000e+00 -1.0738843e-01 7.3145640e-01 ... -5.2037859e-01 7.5254440e-01 -3.7401199e-02] [ 0.0000000e+00 8.0531895e-01 -6.0467958e-01 ... -1.8897637e+00 -6.1249197e-01 -4.6195826e-01] [ 0.0000000e+00 -4.3795374e-01 1.5264089e+00 ... -1.0494937e+00 -1.6683424e-01 1.3086051e+00] ... [ 0.0000000e+00 1.3090872e+00 8.7604197e-03 ... -1.7807397e-01 1.7638414e-01 -1.3228995e+00] [ 0.0000000e+00 2.5543597e-01 7.1786672e-01 ... -6.6744411e-01 7.9275888e-01 5.6653678e-01] [ 0.0000000e+00 2.3849165e+00 1.8840715e+00 ... 6.3950604e-01 -8.2557893e-01 -6.2428665e-01]] [[ 0.0000000e+00 -3.9208177e-01 1.7318085e+00 ... 1.7871454e+00 -1.5506799e-02 1.1161274e+00] [ 0.0000000e+00 -5.1623762e-01 -2.6443318e-01 ... -6.2610477e-01 9.8647630e-01 1.5688722e+00] [ 0.0000000e+00 -8.8725460e-01 -7.0950449e-01 ... 1.3874562e+00 5.9320611e-01 -1.2145624e+00] ... [ 0.0000000e+00 -7.6556563e-01 7.8044176e-01 ... -7.4337971e-01 -1.4101711e-01 1.2221649e+00] [ 0.0000000e+00 -1.6862490e+00 3.8534337e-01 ... -1.6333941e+00 1.5752891e+00 1.0445504e-01] [ 0.0000000e+00 -1.1675633e+00 -1.5145099e-01 ... 2.4146716e-01 -3.3462283e-01 1.6269379e+00]] ... [[ 0.0000000e+00 6.7720056e-01 4.5828575e-01 ... 3.7454897e-01 -1.8483332e+00 -1.7727994e+00] [ 0.0000000e+00 -7.3097557e-01 -6.5339303e-01 ... -3.3006752e-01 1.3076939e-01 -1.1785733e+00] [ 0.0000000e+00 -2.3559709e+00 9.9967414e-01 ... 1.1795950e-01 -9.3069643e-01 1.0696635e+00] ... [ 0.0000000e+00 6.6322666e-01 -2.4137607e-02 ... -1.7988831e+00 8.1743175e-01 1.6856875e+00] [ 0.0000000e+00 6.7998779e-01 -4.9182680e-01 ... 6.5748990e-02 2.9495651e-02 -7.2069331e-03] [ 0.0000000e+00 9.8612368e-01 6.3415027e-01 ... -1.5184883e+00 -7.4085206e-01 -3.0767673e-01]] [[ 0.0000000e+00 -6.4289486e-01 -7.6767176e-01 ... 2.1420231e+00 6.2088937e-01 8.2943463e-01] [ 0.0000000e+00 1.1794649e-02 6.5464532e-01 ... -9.5385098e-01 4.2100835e-01 8.3039087e-01] [ 0.0000000e+00 -1.8931469e+00 8.9121449e-01 ... -2.5649792e-01 -8.4952962e-01 3.2354817e+00] ... [ 0.0000000e+00 9.6318692e-02 -1.2929562e-01 ... -1.0654184e-02 -1.6337937e+00 1.1089036e+00] [ 0.0000000e+00 1.1146662e+00 -7.2575516e-01 ... 1.4502167e+00 1.8454012e-01 1.1334883e+00] [ 0.0000000e+00 -1.3548181e+00 4.0021762e-01 ... -1.3972667e+00 3.9743137e-01 1.2990197e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 4.9004719e-01 -8.2073778e-02 ... -1.2019348e+00 -1.5072669e+00 5.2629334e-01] [ 0.0000000e+00 5.0797015e-01 -1.3118091e-01 ... -1.7310140e+00 -1.1997039e+00 4.4332597e-02] [ 0.0000000e+00 3.3865666e-01 4.7520357e-01 ... -5.9043914e-01 6.7382914e-01 -2.2212019e+00] ... [ 0.0000000e+00 -5.2702123e-01 1.0565220e+00 ... -3.2938939e-01 -1.2406294e+00 -5.0031155e-01] [ 0.0000000e+00 3.8588282e-01 -1.7173052e+00 ... 1.7934972e-01 -6.3974851e-01 1.2570865e+00] [ 0.0000000e+00 -5.0316012e-01 8.8010275e-01 ... -1.7473732e-01 9.6983820e-02 1.7965397e+00]] [[ 0.0000000e+00 1.7813052e+00 -1.2006077e+00 ... 1.4098842e-01 -5.9058803e-01 1.5235351e+00] [ 0.0000000e+00 -1.8193056e-01 8.0418569e-01 ... -8.6299640e-01 -8.5251033e-01 -8.6044812e-01] [ 0.0000000e+00 7.0306903e-01 -6.1515117e-01 ... -8.4805018e-01 2.3864985e-02 -9.8544264e-01] ... [ 0.0000000e+00 1.1128098e+00 9.0748996e-01 ... -3.5865599e-01 -9.4310331e-01 1.0329765e+00] [ 0.0000000e+00 -7.4287075e-01 -4.5402083e-01 ... 2.1868671e-01 -3.1550670e-01 -1.5282968e+00] [ 0.0000000e+00 2.6118159e-01 8.7383074e-01 ... 8.2960203e-02 -1.4529885e+00 1.0343031e+00]] [[ 0.0000000e+00 -3.8344371e-01 2.1481205e-02 ... -1.2861491e+00 1.6694078e+00 -1.1282825e+00] [ 0.0000000e+00 -1.3104060e+00 5.1861459e-01 ... 3.3919290e-01 -1.1420765e+00 -2.6898429e-02] [ 0.0000000e+00 3.8348041e-03 7.0863478e-02 ... 2.4539623e+00 7.4822420e-01 8.8625354e-01] ... [ 0.0000000e+00 -4.1841879e-01 1.1482532e+00 ... 3.8958961e-01 8.8380438e-01 1.0887886e+00] [ 0.0000000e+00 4.2425945e-01 -2.6286843e+00 ... -7.7100348e-01 -8.4073734e-01 -7.4204570e-01] [ 0.0000000e+00 1.0655113e+00 5.4414189e-01 ... -1.9968123e+00 -2.7413123e+00 -1.6855191e-01]] ... [[ 0.0000000e+00 2.2178641e-01 2.4773876e-01 ... -4.0038642e-01 1.0892955e+00 4.0876535e-01] [ 0.0000000e+00 1.6996045e+00 -6.6573757e-01 ... 7.1804106e-01 -6.5049189e-03 -1.0580341e+00] [ 0.0000000e+00 -8.3698964e-01 -6.7014235e-01 ... 8.9761108e-01 -1.0526447e-01 -1.1537281e+00] ... [ 0.0000000e+00 8.0601357e-02 -8.8608193e-01 ... -6.2741512e-01 1.9527826e-01 9.0099961e-01] [ 0.0000000e+00 -8.7658507e-01 4.7531578e-01 ... 1.0437676e+00 -4.7988495e-01 -8.2398593e-01] [ 0.0000000e+00 3.1585355e+00 -5.4294892e-02 ... 8.6602032e-01 3.5777673e-01 1.3869772e+00]] [[ 0.0000000e+00 1.3914248e+00 -1.6745129e+00 ... 1.6259028e-02 2.9297858e-01 7.0705897e-01] [ 0.0000000e+00 -1.6793687e+00 9.3034518e-01 ... -6.9379413e-01 1.2600207e+00 -7.2065949e-01] [ 0.0000000e+00 1.2283627e+00 7.3231292e-01 ... -1.9317992e-01 -1.0616207e+00 6.3202012e-01] ... [ 0.0000000e+00 2.0832284e-01 -4.6279415e-01 ... 1.7301700e+00 2.5095825e+00 -1.0596238e+00] [ 0.0000000e+00 7.3186648e-01 -5.5473137e-01 ... 9.7094625e-01 -1.3446549e-01 1.9327534e+00] [ 0.0000000e+00 9.1119313e-01 6.6980135e-01 ... -2.1458833e+00 -9.2933613e-01 1.7764518e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]] [[[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] ... [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]] [[ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] ... [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00] [ 0.0000000e+00 0.0000000e+00 0.0000000e+00 ... 0.0000000e+00 0.0000000e+00 0.0000000e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 0, 0, 0, 0, 1, 1, 2, 2, 3) - mode:constant - value:0.0 ] | 0.22 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5569.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=0.]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0, 0, 0, 0, 1, 1, 2, 2, 3]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) nversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES orfw_re: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0.8227891 -1.0119064 ... -0.9386463 0.63800275 0.3863003 ] [ 0. -1.1446784 -1.1824858 ... 0.93911016 0.260379 1.1839291 ] [ 0. -0.27926108 0.28807172 ... -1.1439561 -0.5849089 -1.0896434 ] ... [ 0. -1.0776547 -0.14874053 ... -1.1509743 -0.13808537 0.22910231] [ 0. 1.8411679 -0.33165637 ... 0.73915565 0.39518163 1.9672662 ] [ 0. 1.285953 0.29667866 ... -1.0023015 0.3003238 -0.338694 ]] [[ 0. -0.23638585 1.6199658 ... 0.32726058 0.175522 -0.28776595] [ 0. -0.639544 -0.9825682 ... -1.0464767 -0.11897838 -0.14620475] [ 0. 1.0407361 -0.8625769 ... 1.0139103 0.03504014 -0.5311946 ] ... [ 0. -0.24884047 -1.0822502 ... 1.5944604 -0.18364425 -1.6073047 ] [ 0. 0.445545 -1.3289703 ... -0.1175478 -0.46784875 1.6740576 ] [ 0. 0.11523513 -0.0300019 ... 0.5482693 0.7140044 -1.7578294 ]] [[ 0. -0.42596006 -1.0129732 ... 0.7394097 -1.815711 0.48150063] [ 0. 1.0346748 -0.5533011 ... -1.2797343 -0.5967356 -0.81590956] [ 0. 0.80149007 -0.5076447 ... 0.72013134 -1.6639107 1.1491297 ] ... [ 0. -0.24821378 -0.76500285 ... -0.32885152 -0.46385965 -0.03930042] [ 0. 1.2053242 -0.03890674 ... -0.98893046 -0.6038855 -0.5303928 ] [ 0. -1.2911031 -0.0892669 ... -0.36805153 -0.35931593 0.6025956 ]] ... [[ 0. -1.7308394 0.4726906 ... 0.75082636 1.5265762 -1.6431116 ] [ 0. 0.2127212 -0.61887085 ... -2.784372 -1.2069482 -0.41476193] [ 0. -0.18230216 -1.1027248 ... -1.5364416 -0.80649006 -0.53380156] ... [ 0. -1.0249461 -1.0367062 ... -1.3887318 0.4872847 -0.70316 ] [ 0. -0.40894502 -0.8406417 ... -1.1011422 0.20240907 0.9603591 ] [ 0. 0.49116454 0.6294219 ... 1.9993337 0.1194666 -1.0655444 ]] [[ 0. -1.4082378 -0.13391253 ... -0.06432655 -0.57196784 -0.352144 ] [ 0. -0.44902676 0.58604 ... 0.5907772 0.41881675 0.88273746] [ 0. 0.3011009 0.90438956 ... -0.8371917 0.2461919 0.73779523] ... [ 0. -1.3912774 0.5562762 ... -0.5024922 0.08988459 1.6525071 ] [ 0. -0.5522653 0.61601305 ... -0.18481238 -0.71344304 0.5900941 ] [ 0. 0.14890511 1.7316835 ... -0.10484893 1.0168315 1.4528438 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. -0.7567314 -0.9875606 ... 1.5321256 0.31919816 -0.23209336] [ 0. -0.32612064 1.3718381 ... 0.40835342 0.6581352 -0.53416324] [ 0. -0.13216996 -0.5370303 ... -0.48336375 -1.3794862 -0.7555238 ] ... [ 0. -0.57489127 -0.87465143 ... 0.04152123 -0.22963798 0.5798923 ] [ 0. 1.0350264 -1.7737327 ... 0.41177088 0.69385374 0.5382795 ] [ 0. 0.70875835 0.06395172 ... 0.23081514 0.22579683 1.2911501 ]] [[ 0. -0.5367375 0.80061835 ... -0.32845688 -0.41141343 0.8121104 ] [ 0. -0.11751816 -0.27615714 ... -0.24377438 0.02242486 -0.47990477] [ 0. -1.3509011 1.6848096 ... -0.50539327 0.75709385 -0.12018029] ... [ 0. -0.29014063 -0.46975353 ... -1.0385728 -1.398235 -0.75972307] [ 0. -1.2941768 0.62373775 ... 0.56696415 0.13663082 -0.4035028 ] [ 0. 1.1268858 0.441896 ... -0.9544422 1.8142692 2.2357066 ]] [[ 0. -0.78152925 0.1688861 ... -0.41847038 -0.3052229 1.7293253 ] [ 0. -2.3391078 -1.036522 ... -1.2837858 -0.5397261 -1.9154513 ] [ 0. -0.82042795 0.89010185 ... 0.04198258 -0.02111089 1.3163941 ] ... [ 0. 0.24569225 -0.48138243 ... 0.5388556 -0.89407533 1.4419591 ] [ 0. 4.4408264 0.0386154 ... -1.4940623 1.3499408 2.140777 ] [ 0. -0.13406783 0.37928516 ... 0.37934458 1.5307786 0.1597057 ]] ... [[ 0. 0.5237357 0.07896925 ... -0.32464367 1.4231013 0.8916023 ] [ 0. -0.83956075 -0.03336907 ... 0.8726019 0.24320306 -0.57455564] [ 0. -0.19737805 -1.2374685 ... 0.8560932 -0.31925225 -0.5290642 ] ... [ 0. -1.6124327 1.0449421 ... 1.5872861 -0.31339008 -1.3527124 ] [ 0. -0.98905236 -1.2006316 ... -1.0032617 -2.1062703 1.5646186 ] [ 0. -0.86115056 1.3807733 ... 0.38965845 1.0481931 -1.2541605 ]] [[ 0. -0.09187569 -0.30332804 ... -1.205141 1.1423156 -0.52770376] [ 0. 0.63906044 1.5362554 ... -0.2688495 -2.037426 0.52920955] [ 0. -0.23650852 -1.8895943 ... 0.2830821 -0.86107385 0.03142321] ... [ 0. -1.0492363 1.1041381 ... -0.05761085 0.64037687 0.26385486] [ 0. 0.896886 -1.3371445 ... -0.47789034 -0.07184077 -1.4096917 ] [ 0. 0.92537713 -0.06521158 ... 0.20908825 0.679139 -1.07862 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. -0.99871117 0.62959105 ... -0.7244826 1.335015 0.38327354] [ 0. -0.5733132 1.9060724 ... 0.01193488 1.0520442 -0.12743382] [ 0. 0.7028737 0.03611442 ... -0.28717706 1.1007929 -0.21515748] ... [ 0. 0.9401633 -1.3947947 ... -0.09884536 1.0907136 0.64498544] [ 0. -1.0666945 -0.36577606 ... -0.54367596 -0.07225692 -0.5917129 ] [ 0. 0.25162435 0.779592 ... 0.24019824 -0.96945244 -0.55638224]] [[ 0. -0.36560735 -0.26567012 ... 1.037793 -0.08835927 0.7199538 ] [ 0. -0.87977964 1.1664658 ... 0.15817028 -0.72560525 -1.0257304 ] [ 0. -0.06756358 2.0566711 ... -0.10022449 -0.33081123 0.8868938 ] ... [ 0. 0.3320302 -0.6988527 ... 2.32524 -0.7451462 -1.3963264 ] [ 0. 0.2529663 0.9640044 ... -0.90108675 0.11339589 0.5819532 ] [ 0. 0.71279913 -0.05085556 ... -1.899242 0.21507262 0.59861934]] [[ 0. 0.80314445 -0.03043877 ... 0.22667837 -0.68732256 -1.086719 ] [ 0. -1.1362377 -0.3779478 ... 0.27403256 0.4041196 -0.13689436] [ 0. 0.43114737 -1.116583 ... -0.45982486 -0.92093194 0.38511336] ... [ 0. -0.29164678 1.8562819 ... 1.6071416 -0.14652105 0.8805798 ] [ 0. -1.5299354 -0.12763578 ... 0.34512156 0.93738234 -1.7092347 ] [ 0. -1.7739009 -2.0611439 ... -0.34279332 -0.59088445 0.92536837]] ... [[ 0. -0.2986876 -0.8545024 ... -0.9432697 -0.55279464 -0.375199 ] [ 0. 1.6014408 -0.271981 ... -0.53382486 0.67767596 -2.636116 ] [ 0. -0.04290132 1.7951608 ... 0.4694867 1.5018433 0.2858557 ] ... [ 0. -1.2838807 1.02909 ... -0.05898803 -0.44845343 -0.3267768 ] [ 0. 0.5740919 -1.9077723 ... 0.63440233 -0.86865586 0.9127479 ] [ 0. 0.7272764 -0.05741662 ... 1.4903816 0.84881955 -0.32324985]] [[ 0. -0.8688747 0.70986724 ... 1.5203545 2.0830743 -0.802229 ] [ 0. -0.22640532 0.41434842 ... 0.10585175 -0.63973415 -0.95681876] [ 0. -1.3147616 0.786661 ... -0.8588489 -0.93758345 0.5938595 ] ... [ 0. 0.3513225 -0.24761349 ... -0.945746 0.02773631 -0.9122767 ] [ 0. -0.6679614 1.0076507 ... -1.3785394 0.17677605 0.69851315] [ 0. -1.7695014 0.08301241 ... 0.22120976 0.29998258 0.48985186]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]]; ov_res: [[[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0.8227891 -1.0119064 ... -0.9386463 0.63800275 0.3863003 ] [ 0. -1.1446784 -1.1824858 ... 0.93911016 0.260379 1.1839291 ] [ 0. -0.27926108 0.28807172 ... -1.1439561 -0.5849089 -1.0896434 ] ... [ 0. -1.0776547 -0.14874053 ... -1.1509743 -0.13808537 0.22910231] [ 0. 1.8411679 -0.33165637 ... 0.73915565 0.39518163 1.9672662 ] [ 0. 1.285953 0.29667866 ... -1.0023015 0.3003238 -0.338694 ]] [[ 0. -0.23638585 1.6199658 ... 0.32726058 0.175522 -0.28776595] [ 0. -0.639544 -0.9825682 ... -1.0464767 -0.11897838 -0.14620475] [ 0. 1.0407361 -0.8625769 ... 1.0139103 0.03504014 -0.5311946 ] ... [ 0. -0.24884047 -1.0822502 ... 1.5944604 -0.18364425 -1.6073047 ] [ 0. 0.445545 -1.3289703 ... -0.1175478 -0.46784875 1.6740576 ] [ 0. 0.11523513 -0.0300019 ... 0.5482693 0.7140044 -1.7578294 ]] [[ 0. -0.42596006 -1.0129732 ... 0.7394097 -1.815711 0.48150063] [ 0. 1.0346748 -0.5533011 ... -1.2797343 -0.5967356 -0.81590956] [ 0. 0.80149007 -0.5076447 ... 0.72013134 -1.6639107 1.1491297 ] ... [ 0. -0.24821378 -0.76500285 ... -0.32885152 -0.46385965 -0.03930042] [ 0. 1.2053242 -0.03890674 ... -0.98893046 -0.6038855 -0.5303928 ] [ 0. -1.2911031 -0.0892669 ... -0.36805153 -0.35931593 0.6025956 ]] ... [[ 0. -1.7308394 0.4726906 ... 0.75082636 1.5265762 -1.6431116 ] [ 0. 0.2127212 -0.61887085 ... -2.784372 -1.2069482 -0.41476193] [ 0. -0.18230216 -1.1027248 ... -1.5364416 -0.80649006 -0.53380156] ... [ 0. -1.0249461 -1.0367062 ... -1.3887318 0.4872847 -0.70316 ] [ 0. -0.40894502 -0.8406417 ... -1.1011422 0.20240907 0.9603591 ] [ 0. 0.49116454 0.6294219 ... 1.9993337 0.1194666 -1.0655444 ]] [[ 0. -1.4082378 -0.13391253 ... -0.06432655 -0.57196784 -0.352144 ] [ 0. -0.44902676 0.58604 ... 0.5907772 0.41881675 0.88273746] [ 0. 0.3011009 0.90438956 ... -0.8371917 0.2461919 0.73779523] ... [ 0. -1.3912774 0.5562762 ... -0.5024922 0.08988459 1.6525071 ] [ 0. -0.5522653 0.61601305 ... -0.18481238 -0.71344304 0.5900941 ] [ 0. 0.14890511 1.7316835 ... -0.10484893 1.0168315 1.4528438 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. -0.7567314 -0.9875606 ... 1.5321256 0.31919816 -0.23209336] [ 0. -0.32612064 1.3718381 ... 0.40835342 0.6581352 -0.53416324] [ 0. -0.13216996 -0.5370303 ... -0.48336375 -1.3794862 -0.7555238 ] ... [ 0. -0.57489127 -0.87465143 ... 0.04152123 -0.22963798 0.5798923 ] [ 0. 1.0350264 -1.7737327 ... 0.41177088 0.69385374 0.5382795 ] [ 0. 0.70875835 0.06395172 ... 0.23081514 0.22579683 1.2911501 ]] [[ 0. -0.5367375 0.80061835 ... -0.32845688 -0.41141343 0.8121104 ] [ 0. -0.11751816 -0.27615714 ... -0.24377438 0.02242486 -0.47990477] [ 0. -1.3509011 1.6848096 ... -0.50539327 0.75709385 -0.12018029] ... [ 0. -0.29014063 -0.46975353 ... -1.0385728 -1.398235 -0.75972307] [ 0. -1.2941768 0.62373775 ... 0.56696415 0.13663082 -0.4035028 ] [ 0. 1.1268858 0.441896 ... -0.9544422 1.8142692 2.2357066 ]] [[ 0. -0.78152925 0.1688861 ... -0.41847038 -0.3052229 1.7293253 ] [ 0. -2.3391078 -1.036522 ... -1.2837858 -0.5397261 -1.9154513 ] [ 0. -0.82042795 0.89010185 ... 0.04198258 -0.02111089 1.3163941 ] ... [ 0. 0.24569225 -0.48138243 ... 0.5388556 -0.89407533 1.4419591 ] [ 0. 4.4408264 0.0386154 ... -1.4940623 1.3499408 2.140777 ] [ 0. -0.13406783 0.37928516 ... 0.37934458 1.5307786 0.1597057 ]] ... [[ 0. 0.5237357 0.07896925 ... -0.32464367 1.4231013 0.8916023 ] [ 0. -0.83956075 -0.03336907 ... 0.8726019 0.24320306 -0.57455564] [ 0. -0.19737805 -1.2374685 ... 0.8560932 -0.31925225 -0.5290642 ] ... [ 0. -1.6124327 1.0449421 ... 1.5872861 -0.31339008 -1.3527124 ] [ 0. -0.98905236 -1.2006316 ... -1.0032617 -2.1062703 1.5646186 ] [ 0. -0.86115056 1.3807733 ... 0.38965845 1.0481931 -1.2541605 ]] [[ 0. -0.09187569 -0.30332804 ... -1.205141 1.1423156 -0.52770376] [ 0. 0.63906044 1.5362554 ... -0.2688495 -2.037426 0.52920955] [ 0. -0.23650852 -1.8895943 ... 0.2830821 -0.86107385 0.03142321] ... [ 0. -1.0492363 1.1041381 ... -0.05761085 0.64037687 0.26385486] [ 0. 0.896886 -1.3371445 ... -0.47789034 -0.07184077 -1.4096917 ] [ 0. 0.92537713 -0.06521158 ... 0.20908825 0.679139 -1.07862 ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. -0.99871117 0.62959105 ... -0.7244826 1.335015 0.38327354] [ 0. -0.5733132 1.9060724 ... 0.01193488 1.0520442 -0.12743382] [ 0. 0.7028737 0.03611442 ... -0.28717706 1.1007929 -0.21515748] ... [ 0. 0.9401633 -1.3947947 ... -0.09884536 1.0907136 0.64498544] [ 0. -1.0666945 -0.36577606 ... -0.54367596 -0.07225692 -0.5917129 ] [ 0. 0.25162435 0.779592 ... 0.24019824 -0.96945244 -0.55638224]] [[ 0. -0.36560735 -0.26567012 ... 1.037793 -0.08835927 0.7199538 ] [ 0. -0.87977964 1.1664658 ... 0.15817028 -0.72560525 -1.0257304 ] [ 0. -0.06756358 2.0566711 ... -0.10022449 -0.33081123 0.8868938 ] ... [ 0. 0.3320302 -0.6988527 ... 2.32524 -0.7451462 -1.3963264 ] [ 0. 0.2529663 0.9640044 ... -0.90108675 0.11339589 0.5819532 ] [ 0. 0.71279913 -0.05085556 ... -1.899242 0.21507262 0.59861934]] [[ 0. 0.80314445 -0.03043877 ... 0.22667837 -0.68732256 -1.086719 ] [ 0. -1.1362377 -0.3779478 ... 0.27403256 0.4041196 -0.13689436] [ 0. 0.43114737 -1.116583 ... -0.45982486 -0.92093194 0.38511336] ... [ 0. -0.29164678 1.8562819 ... 1.6071416 -0.14652105 0.8805798 ] [ 0. -1.5299354 -0.12763578 ... 0.34512156 0.93738234 -1.7092347 ] [ 0. -1.7739009 -2.0611439 ... -0.34279332 -0.59088445 0.92536837]] ... [[ 0. -0.2986876 -0.8545024 ... -0.9432697 -0.55279464 -0.375199 ] [ 0. 1.6014408 -0.271981 ... -0.53382486 0.67767596 -2.636116 ] [ 0. -0.04290132 1.7951608 ... 0.4694867 1.5018433 0.2858557 ] ... [ 0. -1.2838807 1.02909 ... -0.05898803 -0.44845343 -0.3267768 ] [ 0. 0.5740919 -1.9077723 ... 0.63440233 -0.86865586 0.9127479 ] [ 0. 0.7272764 -0.05741662 ... 1.4903816 0.84881955 -0.32324985]] [[ 0. -0.8688747 0.70986724 ... 1.5203545 2.0830743 -0.802229 ] [ 0. -0.22640532 0.41434842 ... 0.10585175 -0.63973415 -0.95681876] [ 0. -1.3147616 0.786661 ... -0.8588489 -0.93758345 0.5938595 ] ... [ 0. 0.3513225 -0.24761349 ... -0.945746 0.02773631 -0.9122767 ] [ 0. -0.6679614 1.0076507 ... -1.3785394 0.17677605 0.69851315] [ 0. -1.7695014 0.08301241 ... 0.22120976 0.29998258 0.48985186]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]] [[[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]] [[[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] ... [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]] [[ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] ... [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ] [ 0. 0. 0. ... 0. 0. 0. ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 0, 0, 0, 0) - mode:circular - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5572.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 0.37880868 0.68068624 0.20739508 ... 0.37880868 0.68068624 0.20739508] [ 0.46038613 1.8444563 -0.43442428 ... 0.46038613 1.8444563 -0.43442428] [ 0.5721077 -1.089023 -0.3100213 ... 0.5721077 -1.089023 -0.3100213 ] ... [ 1.4804151 0.5283898 0.08027624 ... 1.4804151 0.5283898 0.08027624] [ 2.156444 0.09984811 -0.44980535 ... 2.156444 0.09984811 -0.44980535] [ 0.8008472 -1.2087048 0.02053211 ... 0.8008472 -1.2087048 0.02053211]] [[ 0.17388685 2.0306172 -0.8451224 ... 0.17388685 2.0306172 -0.8451224 ] [-0.64041424 1.7967048 -0.57674754 ... -0.64041424 1.7967048 -0.57674754] [-0.11374393 -1.0127447 1.2083054 ... -0.11374393 -1.0127447 1.2083054 ] ... [-1.2745898 1.2588923 -0.14162144 ... -1.2745898 1.2588923 -0.14162144] [ 0.37750295 -0.89436424 -0.98394793 ... 0.37750295 -0.89436424 -0.98394793] [-0.96554285 0.4123174 0.57025224 ... -0.96554285 0.4123174 0.57025224]] [[-1.1848755 -0.29693067 -1.3679104 ... -1.1848755 -0.29693067 -1.3679104 ] [-0.04422021 -0.55168957 -1.3301035 ... -0.04422021 -0.55168957 -1.3301035 ] [-0.10404997 1.1893184 -1.7356697 ... -0.10404997 1.1893184 -1.7356697 ] ... [-0.9873826 0.39145553 -1.0327325 ... -0.9873826 0.39145553 -1.0327325 ] [-1.7291402 -1.1087108 -0.1157877 ... -1.7291402 -1.1087108 -0.1157877 ] [-0.15310076 -0.00560331 -0.5974042 ... -0.15310076 -0.00560331 -0.5974042 ]] ... [[-0.7006434 -0.43552458 -0.3892966 ... -0.7006434 -0.43552458 -0.3892966 ] [ 1.3908916 0.5717682 1.4962746 ... 1.3908916 0.5717682 1.4962746 ] [ 0.7075992 -0.61640704 1.2245787 ... 0.7075992 -0.61640704 1.2245787 ] ... [-1.9000585 -0.8607623 1.9526489 ... -1.9000585 -0.8607623 1.9526489 ] [ 0.5470385 -2.093405 0.21795756 ... 0.5470385 -2.093405 0.21795756] [ 1.2853317 -1.4743141 -0.38593102 ... 1.2853317 -1.4743141 -0.38593102]] [[-0.03085194 -0.50679344 -1.792975 ... -0.03085194 -0.50679344 -1.792975 ] [-1.2874058 0.5504852 0.38791415 ... -1.2874058 0.5504852 0.38791415] [ 0.4463067 1.9871719 2.3352432 ... 0.4463067 1.9871719 2.3352432 ] ... [ 0.69877625 0.7973796 -0.84530854 ... 0.69877625 0.7973796 -0.84530854] [ 1.3860857 -0.33079532 -0.25064355 ... 1.3860857 -0.33079532 -0.25064355] [-0.1430469 -0.8351438 0.42488796 ... -0.1430469 -0.8351438 0.42488796]] [[-0.5969188 1.3164581 0.09340956 ... -0.5969188 1.3164581 0.09340956] [ 1.1999185 1.2315145 -0.26621202 ... 1.1999185 1.2315145 -0.26621202] [-0.11620613 1.5850155 0.43739724 ... -0.11620613 1.5850155 0.43739724] ... [ 0.5252224 -1.2872858 0.10607541 ... 0.5252224 -1.2872858 0.10607541] [-1.5987161 -0.43482527 0.72248566 ... -1.5987161 -0.43482527 0.72248566] [-0.36184546 -1.0349845 1.9108465 ... -0.36184546 -1.0349845 1.9108465 ]]] [[[-0.6981376 -2.044954 -0.05241985 ... -0.6981376 -2.044954 -0.05241985] [-0.0878995 -2.187573 0.62577164 ... -0.0878995 -2.187573 0.62577164] [ 1.6767776 0.67645776 0.9220449 ... 1.6767776 0.67645776 0.9220449 ] ... [ 1.3849611 0.13095629 -0.43503556 ... 1.3849611 0.13095629 -0.43503556] [-0.6570841 0.6184004 -1.688058 ... -0.6570841 0.6184004 -1.688058 ] [ 2.0628467 0.8074335 -0.7168977 ... 2.0628467 0.8074335 -0.7168977 ]] [[ 0.32454747 0.3092792 0.08441489 ... 0.32454747 0.3092792 0.08441489] [ 0.28228313 -1.2729142 1.6220675 ... 0.28228313 -1.2729142 1.6220675 ] [ 1.0414094 -0.5268106 -0.01821892 ... 1.0414094 -0.5268106 -0.01821892] ... [-0.79223794 -0.87088925 0.20517097 ... -0.79223794 -0.87088925 0.20517097] [ 0.57638425 -0.6677807 1.0828387 ... 0.57638425 -0.6677807 1.0828387 ] [ 1.3398017 -0.6161207 0.8623166 ... 1.3398017 -0.6161207 0.8623166 ]] [[ 0.5249033 1.1844852 -0.31726977 ... 0.5249033 1.1844852 -0.31726977] [-0.30353165 -1.6469265 -0.07666128 ... -0.30353165 -1.6469265 -0.07666128] [-0.7995077 0.5731622 1.104548 ... -0.7995077 0.5731622 1.104548 ] ... [ 1.7998573 0.40578827 -1.167384 ... 1.7998573 0.40578827 -1.167384 ] [ 0.92719597 -0.75663126 -1.1286675 ... 0.92719597 -0.75663126 -1.1286675 ] [-0.4420877 1.8273338 -0.93068516 ... -0.4420877 1.8273338 -0.93068516]] ... [[ 0.70352954 -2.0735166 -1.4280672 ... 0.70352954 -2.0735166 -1.4280672 ] [-0.28703842 0.4824239 -0.16645561 ... -0.28703842 0.4824239 -0.16645561] [ 0.38952866 -1.5939039 0.50527877 ... 0.38952866 -1.5939039 0.50527877] ... [ 0.28312355 1.0942973 0.8105058 ... 0.28312355 1.0942973 0.8105058 ] [-0.913275 -0.3653751 1.0227369 ... -0.913275 -0.3653751 1.0227369 ] [ 0.09195846 -1.3137109 -1.8855957 ... 0.09195846 -1.3137109 -1.8855957 ]] [[-0.8642954 -1.7998 0.41130847 ... -0.8642954 -1.7998 0.41130847] [-1.1969925 0.7044658 -2.1638296 ... -1.1969925 0.7044658 -2.1638296 ] [ 0.69200945 -0.336006 -1.1668826 ... 0.69200945 -0.336006 -1.1668826 ] ... [-0.74087703 0.22479698 -0.31348804 ... -0.74087703 0.22479698 -0.31348804] [-0.2833197 1.3127662 -0.36898154 ... -0.2833197 1.3127662 -0.36898154] [-1.2718474 -0.7223546 0.73771054 ... -1.2718474 -0.7223546 0.73771054]] [[-0.32817665 0.4851381 -1.3344971 ... -0.32817665 0.4851381 -1.3344971 ] [ 0.17124724 2.1228495 0.12618174 ... 0.17124724 2.1228495 0.12618174] [-0.40790373 0.07780166 0.6957799 ... -0.40790373 0.07780166 0.6957799 ] ... [ 0.70223105 -1.660918 0.49169537 ... 0.70223105 -1.660918 0.49169537] [-0.28257784 -1.5773414 -0.3110427 ... -0.28257784 -1.5773414 -0.3110427 ] [-0.93417174 -0.19959962 0.22750509 ... -0.93417174 -0.19959962 0.22750509]]] [[[-0.00620431 -1.089953 0.82193285 ... -0.00620431 -1.089953 0.82193285] [-0.17148332 -0.49323457 0.62542546 ... -0.17148332 -0.49323457 0.62542546] [ 0.18209642 -0.5200535 0.09000982 ... 0.18209642 -0.5200535 0.09000982] ... [-0.21439753 0.06518424 -1.7417033 ... -0.21439753 0.06518424 -1.7417033 ] [-0.84903 1.1839484 -0.01706216 ... -0.84903 1.1839484 -0.01706216] [-2.3082209 0.08648819 -2.561133 ... -2.3082209 0.08648819 -2.561133 ]] [[ 1.475626 -0.50792855 0.9678747 ... 1.475626 -0.50792855 0.9678747 ] [-0.19563305 -1.2430443 -1.5510662 ... -0.19563305 -1.2430443 -1.5510662 ] [ 2.2137988 -2.7965646 0.42875153 ... 2.2137988 -2.7965646 0.42875153] ... [-0.75005686 -2.489735 -1.0167009 ... -0.75005686 -2.489735 -1.0167009 ] [ 1.8027607 0.2761783 -0.04934733 ... 1.8027607 0.2761783 -0.04934733] [ 0.85281754 -0.9509976 0.8118288 ... 0.85281754 -0.9509976 0.8118288 ]] [[ 0.20514312 1.018995 1.8963223 ... 0.20514312 1.018995 1.8963223 ] [-0.39306268 0.4288733 -0.9816849 ... -0.39306268 0.4288733 -0.9816849 ] [ 1.7898958 -1.1389562 1.3550067 ... 1.7898958 -1.1389562 1.3550067 ] ... [-0.4256133 1.0565443 -1.2404082 ... -0.4256133 1.0565443 -1.2404082 ] [ 0.32947922 -0.19581005 -0.31142458 ... 0.32947922 -0.19581005 -0.31142458] [-1.5021989 -0.21550588 0.03274842 ... -1.5021989 -0.21550588 0.03274842]] ... [[-1.3883338 0.34843957 -2.4501412 ... -1.3883338 0.34843957 -2.4501412 ] [-0.85926646 -0.20627466 1.3201345 ... -0.85926646 -0.20627466 1.3201345 ] [ 0.06788076 -0.79724044 -1.0917828 ... 0.06788076 -0.79724044 -1.0917828 ] ... [-1.2005075 -0.39841363 1.9459845 ... -1.2005075 -0.39841363 1.9459845 ] [-1.0370952 -1.5649813 -0.45054772 ... -1.0370952 -1.5649813 -0.45054772] [-0.6158574 0.42224297 -0.2817957 ... -0.6158574 0.42224297 -0.2817957 ]] [[-1.2376848 1.105126 -1.2532109 ... -1.2376848 1.105126 -1.2532109 ] [ 0.14429538 -1.3255552 -0.35241976 ... 0.14429538 -1.3255552 -0.35241976] [ 1.7997409 0.84060866 1.110865 ... 1.7997409 0.84060866 1.110865 ] ... [ 0.8103374 -1.8383076 0.80557615 ... 0.8103374 -1.8383076 0.80557615] [ 0.40039083 -0.58082944 -0.92203814 ... 0.40039083 -0.58082944 -0.92203814] [ 0.4740357 -0.1047878 -0.30480373 ... 0.4740357 -0.1047878 -0.30480373]] [[-0.96613836 -0.7452124 -0.00674435 ... -0.96613836 -0.7452124 -0.00674435] [ 0.20816733 -1.8178861 -0.9788213 ... 0.20816733 -1.8178861 -0.9788213 ] [ 0.8059445 -0.50662565 -0.6121895 ... 0.8059445 -0.50662565 -0.6121895 ] ... [-0.2577334 1.2533808 -0.02123976 ... -0.2577334 1.2533808 -0.02123976] [-0.97440535 0.02293926 -0.96537316 ... -0.97440535 0.02293926 -0.96537316] [ 0.32909718 1.0485084 0.68162864 ... 0.32909718 1.0485084 0.68162864]]]]]; ov_res: [[[[[ 0.37880868 0.68068624 0.20739508 ... 0.37880868 0.68068624 0.20739508] [ 0.46038613 1.8444563 -0.43442428 ... 0.46038613 1.8444563 -0.43442428] [ 0.5721077 -1.089023 -0.3100213 ... 0.5721077 -1.089023 -0.3100213 ] ... [ 1.4804151 0.5283898 0.08027624 ... 1.4804151 0.5283898 0.08027624] [ 2.156444 0.09984811 -0.44980535 ... 2.156444 0.09984811 -0.44980535] [ 0.8008472 -1.2087048 0.02053211 ... 0.8008472 -1.2087048 0.02053211]] [[ 0.17388685 2.0306172 -0.8451224 ... 0.17388685 2.0306172 -0.8451224 ] [-0.64041424 1.7967048 -0.57674754 ... -0.64041424 1.7967048 -0.57674754] [-0.11374393 -1.0127447 1.2083054 ... -0.11374393 -1.0127447 1.2083054 ] ... [-1.2745898 1.2588923 -0.14162144 ... -1.2745898 1.2588923 -0.14162144] [ 0.37750295 -0.89436424 -0.98394793 ... 0.37750295 -0.89436424 -0.98394793] [-0.96554285 0.4123174 0.57025224 ... -0.96554285 0.4123174 0.57025224]] [[-1.1848755 -0.29693067 -1.3679104 ... -1.1848755 -0.29693067 -1.3679104 ] [-0.04422021 -0.55168957 -1.3301035 ... -0.04422021 -0.55168957 -1.3301035 ] [-0.10404997 1.1893184 -1.7356697 ... -0.10404997 1.1893184 -1.7356697 ] ... [-0.9873826 0.39145553 -1.0327325 ... -0.9873826 0.39145553 -1.0327325 ] [-1.7291402 -1.1087108 -0.1157877 ... -1.7291402 -1.1087108 -0.1157877 ] [-0.15310076 -0.00560331 -0.5974042 ... -0.15310076 -0.00560331 -0.5974042 ]] ... [[-0.7006434 -0.43552458 -0.3892966 ... -0.7006434 -0.43552458 -0.3892966 ] [ 1.3908916 0.5717682 1.4962746 ... 1.3908916 0.5717682 1.4962746 ] [ 0.7075992 -0.61640704 1.2245787 ... 0.7075992 -0.61640704 1.2245787 ] ... [-1.9000585 -0.8607623 1.9526489 ... -1.9000585 -0.8607623 1.9526489 ] [ 0.5470385 -2.093405 0.21795756 ... 0.5470385 -2.093405 0.21795756] [ 1.2853317 -1.4743141 -0.38593102 ... 1.2853317 -1.4743141 -0.38593102]] [[-0.03085194 -0.50679344 -1.792975 ... -0.03085194 -0.50679344 -1.792975 ] [-1.2874058 0.5504852 0.38791415 ... -1.2874058 0.5504852 0.38791415] [ 0.4463067 1.9871719 2.3352432 ... 0.4463067 1.9871719 2.3352432 ] ... [ 0.69877625 0.7973796 -0.84530854 ... 0.69877625 0.7973796 -0.84530854] [ 1.3860857 -0.33079532 -0.25064355 ... 1.3860857 -0.33079532 -0.25064355] [-0.1430469 -0.8351438 0.42488796 ... -0.1430469 -0.8351438 0.42488796]] [[-0.5969188 1.3164581 0.09340956 ... -0.5969188 1.3164581 0.09340956] [ 1.1999185 1.2315145 -0.26621202 ... 1.1999185 1.2315145 -0.26621202] [-0.11620613 1.5850155 0.43739724 ... -0.11620613 1.5850155 0.43739724] ... [ 0.5252224 -1.2872858 0.10607541 ... 0.5252224 -1.2872858 0.10607541] [-1.5987161 -0.43482527 0.72248566 ... -1.5987161 -0.43482527 0.72248566] [-0.36184546 -1.0349845 1.9108465 ... -0.36184546 -1.0349845 1.9108465 ]]] [[[-0.6981376 -2.044954 -0.05241985 ... -0.6981376 -2.044954 -0.05241985] [-0.0878995 -2.187573 0.62577164 ... -0.0878995 -2.187573 0.62577164] [ 1.6767776 0.67645776 0.9220449 ... 1.6767776 0.67645776 0.9220449 ] ... [ 1.3849611 0.13095629 -0.43503556 ... 1.3849611 0.13095629 -0.43503556] [-0.6570841 0.6184004 -1.688058 ... -0.6570841 0.6184004 -1.688058 ] [ 2.0628467 0.8074335 -0.7168977 ... 2.0628467 0.8074335 -0.7168977 ]] [[ 0.32454747 0.3092792 0.08441489 ... 0.32454747 0.3092792 0.08441489] [ 0.28228313 -1.2729142 1.6220675 ... 0.28228313 -1.2729142 1.6220675 ] [ 1.0414094 -0.5268106 -0.01821892 ... 1.0414094 -0.5268106 -0.01821892] ... [-0.79223794 -0.87088925 0.20517097 ... -0.79223794 -0.87088925 0.20517097] [ 0.57638425 -0.6677807 1.0828387 ... 0.57638425 -0.6677807 1.0828387 ] [ 1.3398017 -0.6161207 0.8623166 ... 1.3398017 -0.6161207 0.8623166 ]] [[ 0.5249033 1.1844852 -0.31726977 ... 0.5249033 1.1844852 -0.31726977] [-0.30353165 -1.6469265 -0.07666128 ... -0.30353165 -1.6469265 -0.07666128] [-0.7995077 0.5731622 1.104548 ... -0.7995077 0.5731622 1.104548 ] ... [ 1.7998573 0.40578827 -1.167384 ... 1.7998573 0.40578827 -1.167384 ] [ 0.92719597 -0.75663126 -1.1286675 ... 0.92719597 -0.75663126 -1.1286675 ] [-0.4420877 1.8273338 -0.93068516 ... -0.4420877 1.8273338 -0.93068516]] ... [[ 0.70352954 -2.0735166 -1.4280672 ... 0.70352954 -2.0735166 -1.4280672 ] [-0.28703842 0.4824239 -0.16645561 ... -0.28703842 0.4824239 -0.16645561] [ 0.38952866 -1.5939039 0.50527877 ... 0.38952866 -1.5939039 0.50527877] ... [ 0.28312355 1.0942973 0.8105058 ... 0.28312355 1.0942973 0.8105058 ] [-0.913275 -0.3653751 1.0227369 ... -0.913275 -0.3653751 1.0227369 ] [ 0.09195846 -1.3137109 -1.8855957 ... 0.09195846 -1.3137109 -1.8855957 ]] [[-0.8642954 -1.7998 0.41130847 ... -0.8642954 -1.7998 0.41130847] [-1.1969925 0.7044658 -2.1638296 ... -1.1969925 0.7044658 -2.1638296 ] [ 0.69200945 -0.336006 -1.1668826 ... 0.69200945 -0.336006 -1.1668826 ] ... [-0.74087703 0.22479698 -0.31348804 ... -0.74087703 0.22479698 -0.31348804] [-0.2833197 1.3127662 -0.36898154 ... -0.2833197 1.3127662 -0.36898154] [-1.2718474 -0.7223546 0.73771054 ... -1.2718474 -0.7223546 0.73771054]] [[-0.32817665 0.4851381 -1.3344971 ... -0.32817665 0.4851381 -1.3344971 ] [ 0.17124724 2.1228495 0.12618174 ... 0.17124724 2.1228495 0.12618174] [-0.40790373 0.07780166 0.6957799 ... -0.40790373 0.07780166 0.6957799 ] ... [ 0.70223105 -1.660918 0.49169537 ... 0.70223105 -1.660918 0.49169537] [-0.28257784 -1.5773414 -0.3110427 ... -0.28257784 -1.5773414 -0.3110427 ] [-0.93417174 -0.19959962 0.22750509 ... -0.93417174 -0.19959962 0.22750509]]] [[[-0.00620431 -1.089953 0.82193285 ... -0.00620431 -1.089953 0.82193285] [-0.17148332 -0.49323457 0.62542546 ... -0.17148332 -0.49323457 0.62542546] [ 0.18209642 -0.5200535 0.09000982 ... 0.18209642 -0.5200535 0.09000982] ... [-0.21439753 0.06518424 -1.7417033 ... -0.21439753 0.06518424 -1.7417033 ] [-0.84903 1.1839484 -0.01706216 ... -0.84903 1.1839484 -0.01706216] [-2.3082209 0.08648819 -2.561133 ... -2.3082209 0.08648819 -2.561133 ]] [[ 1.475626 -0.50792855 0.9678747 ... 1.475626 -0.50792855 0.9678747 ] [-0.19563305 -1.2430443 -1.5510662 ... -0.19563305 -1.2430443 -1.5510662 ] [ 2.2137988 -2.7965646 0.42875153 ... 2.2137988 -2.7965646 0.42875153] ... [-0.75005686 -2.489735 -1.0167009 ... -0.75005686 -2.489735 -1.0167009 ] [ 1.8027607 0.2761783 -0.04934733 ... 1.8027607 0.2761783 -0.04934733] [ 0.85281754 -0.9509976 0.8118288 ... 0.85281754 -0.9509976 0.8118288 ]] [[ 0.20514312 1.018995 1.8963223 ... 0.20514312 1.018995 1.8963223 ] [-0.39306268 0.4288733 -0.9816849 ... -0.39306268 0.4288733 -0.9816849 ] [ 1.7898958 -1.1389562 1.3550067 ... 1.7898958 -1.1389562 1.3550067 ] ... [-0.4256133 1.0565443 -1.2404082 ... -0.4256133 1.0565443 -1.2404082 ] [ 0.32947922 -0.19581005 -0.31142458 ... 0.32947922 -0.19581005 -0.31142458] [-1.5021989 -0.21550588 0.03274842 ... -1.5021989 -0.21550588 0.03274842]] ... [[-1.3883338 0.34843957 -2.4501412 ... -1.3883338 0.34843957 -2.4501412 ] [-0.85926646 -0.20627466 1.3201345 ... -0.85926646 -0.20627466 1.3201345 ] [ 0.06788076 -0.79724044 -1.0917828 ... 0.06788076 -0.79724044 -1.0917828 ] ... [-1.2005075 -0.39841363 1.9459845 ... -1.2005075 -0.39841363 1.9459845 ] [-1.0370952 -1.5649813 -0.45054772 ... -1.0370952 -1.5649813 -0.45054772] [-0.6158574 0.42224297 -0.2817957 ... -0.6158574 0.42224297 -0.2817957 ]] [[-1.2376848 1.105126 -1.2532109 ... -1.2376848 1.105126 -1.2532109 ] [ 0.14429538 -1.3255552 -0.35241976 ... 0.14429538 -1.3255552 -0.35241976] [ 1.7997409 0.84060866 1.110865 ... 1.7997409 0.84060866 1.110865 ] ... [ 0.8103374 -1.8383076 0.80557615 ... 0.8103374 -1.8383076 0.80557615] [ 0.40039083 -0.58082944 -0.92203814 ... 0.40039083 -0.58082944 -0.92203814] [ 0.4740357 -0.1047878 -0.30480373 ... 0.4740357 -0.1047878 -0.30480373]] [[-0.96613836 -0.7452124 -0.00674435 ... -0.96613836 -0.7452124 -0.00674435] [ 0.20816733 -1.8178861 -0.9788213 ... 0.20816733 -1.8178861 -0.9788213 ] [ 0.8059445 -0.50662565 -0.6121895 ... 0.8059445 -0.50662565 -0.6121895 ] ... [-0.2577334 1.2533808 -0.02123976 ... -0.2577334 1.2533808 -0.02123976] [-0.97440535 0.02293926 -0.96537316 ... -0.97440535 0.02293926 -0.96537316] [ 0.32909718 1.0485084 0.68162864 ... 0.32909718 1.0485084 0.68162864]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4, 5, 6) - mode:circular - value:None ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5575.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4, 5, 6]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-5.53810179e-01 -3.35653782e-01 -4.41951036e-01 ... -5.53810179e-01 -3.35653782e-01 -4.41951036e-01] [ 4.26710658e-02 1.38215244e-01 2.80739903e-01 ... 4.26710658e-02 1.38215244e-01 2.80739903e-01] [ 1.48893726e+00 8.71097505e-01 5.20508289e-01 ... 1.48893726e+00 8.71097505e-01 5.20508289e-01] ... [ 1.10206090e-01 -8.99109125e-01 2.69632316e+00 ... 1.10206090e-01 -8.99109125e-01 2.69632316e+00] [ 4.19339955e-01 -8.89167428e-01 1.22228551e+00 ... 4.19339955e-01 -8.89167428e-01 1.22228551e+00] [-1.53784990e+00 1.13165617e+00 1.63614526e-02 ... -1.53784990e+00 1.13165617e+00 1.63614526e-02]] [[ 7.08623767e-01 -3.67839575e-01 1.20065451e+00 ... 7.08623767e-01 -3.67839575e-01 1.20065451e+00] [-6.14120185e-01 7.70440996e-01 -6.02273166e-01 ... -6.14120185e-01 7.70440996e-01 -6.02273166e-01] [-3.84867191e-01 5.27938783e-01 -1.38635814e+00 ... -3.84867191e-01 5.27938783e-01 -1.38635814e+00] ... [ 1.83500275e-01 8.32151234e-01 3.98182005e-01 ... 1.83500275e-01 8.32151234e-01 3.98182005e-01] [-4.76032704e-01 1.42752945e-01 8.61519456e-01 ... -4.76032704e-01 1.42752945e-01 8.61519456e-01] [ 4.16439027e-01 -6.41110003e-01 -1.96456945e+00 ... 4.16439027e-01 -6.41110003e-01 -1.96456945e+00]] [[-4.46101546e-01 -1.47212133e-01 2.05463037e-01 ... -4.46101546e-01 -1.47212133e-01 2.05463037e-01] [ 5.30359089e-01 -2.39112824e-02 -1.39535701e+00 ... 5.30359089e-01 -2.39112824e-02 -1.39535701e+00] [-1.56895137e+00 -1.27823639e+00 3.17921162e-01 ... -1.56895137e+00 -1.27823639e+00 3.17921162e-01] ... [ 1.98210168e+00 -1.19721973e+00 3.78609389e-01 ... 1.98210168e+00 -1.19721973e+00 3.78609389e-01] [ 3.91592503e-01 -2.45806843e-01 8.55446279e-01 ... 3.91592503e-01 -2.45806843e-01 8.55446279e-01] [ 1.46099234e+00 7.03478932e-01 1.91950709e-01 ... 1.46099234e+00 7.03478932e-01 1.91950709e-01]] ... [[-1.69019258e+00 5.64639866e-01 6.19974494e-01 ... -1.69019258e+00 5.64639866e-01 6.19974494e-01] [-4.10598963e-01 1.48990989e+00 4.75591809e-01 ... -4.10598963e-01 1.48990989e+00 4.75591809e-01] [ 5.52183509e-01 -2.45235741e-01 3.62448335e-01 ... 5.52183509e-01 -2.45235741e-01 3.62448335e-01] ... [ 7.52554610e-02 -3.29592079e-01 7.81781793e-01 ... 7.52554610e-02 -3.29592079e-01 7.81781793e-01] [-2.98086673e-01 4.59039330e-01 9.50652212e-02 ... -2.98086673e-01 4.59039330e-01 9.50652212e-02] [-1.58297694e+00 -6.25060499e-01 -2.17163041e-01 ... -1.58297694e+00 -6.25060499e-01 -2.17163041e-01]] [[-8.67935121e-02 -7.63063073e-01 -1.91505563e+00 ... -8.67935121e-02 -7.63063073e-01 -1.91505563e+00] [-8.58444750e-01 -4.40798551e-01 3.07928562e-01 ... -8.58444750e-01 -4.40798551e-01 3.07928562e-01] [ 9.24098372e-01 1.18905866e+00 -1.16279221e+00 ... 9.24098372e-01 1.18905866e+00 -1.16279221e+00] ... [-5.97966015e-01 -1.70915246e+00 3.65120232e-01 ... -5.97966015e-01 -1.70915246e+00 3.65120232e-01] [ 2.19353363e-02 2.78805524e-01 6.88627601e-01 ... 2.19353363e-02 2.78805524e-01 6.88627601e-01] [ 1.67381823e+00 1.99443042e+00 5.83513021e-01 ... 1.67381823e+00 1.99443042e+00 5.83513021e-01]] [[ 2.85938680e-01 -4.64276522e-02 -7.42582798e-01 ... 2.85938680e-01 -4.64276522e-02 -7.42582798e-01] [-9.82019126e-01 1.50899541e+00 7.40528405e-01 ... -9.82019126e-01 1.50899541e+00 7.40528405e-01] [-7.62559950e-01 -2.84079313e-01 -8.99489582e-01 ... -7.62559950e-01 -2.84079313e-01 -8.99489582e-01] ... [ 3.62559646e-01 -9.20100689e-01 1.25418901e+00 ... 3.62559646e-01 -9.20100689e-01 1.25418901e+00] [-7.21807539e-01 3.01095098e-01 1.12959135e+00 ... -7.21807539e-01 3.01095098e-01 1.12959135e+00] [ 1.69223678e+00 -1.29378068e+00 -8.06537747e-01 ... 1.69223678e+00 -1.29378068e+00 -8.06537747e-01]]] [[[-8.05147946e-01 -6.05183071e-04 -5.11953533e-01 ... -8.05147946e-01 -6.05183071e-04 -5.11953533e-01] [ 2.05480027e+00 -1.49634898e-01 4.07393724e-02 ... 2.05480027e+00 -1.49634898e-01 4.07393724e-02] [ 1.40852773e+00 -1.34248292e+00 -4.75500494e-01 ... 1.40852773e+00 -1.34248292e+00 -4.75500494e-01] ... [ 1.28205955e+00 -4.94769737e-02 -2.15073615e-01 ... 1.28205955e+00 -4.94769737e-02 -2.15073615e-01] [-6.21946454e-01 1.82031238e+00 6.84965402e-02 ... -6.21946454e-01 1.82031238e+00 6.84965402e-02] [-1.18781471e+00 -2.22336698e+00 -1.29813647e+00 ... -1.18781471e+00 -2.22336698e+00 -1.29813647e+00]] [[ 4.99563515e-01 3.89574766e-02 -1.04971492e+00 ... 4.99563515e-01 3.89574766e-02 -1.04971492e+00] [-1.21588075e+00 3.02931428e-01 2.68964827e-01 ... -1.21588075e+00 3.02931428e-01 2.68964827e-01] [ 7.32467324e-02 4.32019264e-01 -2.27137327e-01 ... 7.32467324e-02 4.32019264e-01 -2.27137327e-01] ... [ 1.66795599e+00 -2.24787593e+00 -3.63243341e-01 ... 1.66795599e+00 -2.24787593e+00 -3.63243341e-01] [ 3.42073023e-01 -5.51085353e-01 1.47408172e-01 ... 3.42073023e-01 -5.51085353e-01 1.47408172e-01] [ 1.16796899e+00 1.08816779e+00 9.33801174e-01 ... 1.16796899e+00 1.08816779e+00 9.33801174e-01]] [[-9.36535239e-01 6.79753780e-01 6.07898772e-01 ... -9.36535239e-01 6.79753780e-01 6.07898772e-01] [-7.17329144e-01 1.42339122e+00 6.18601561e-01 ... -7.17329144e-01 1.42339122e+00 6.18601561e-01] [ 1.00778472e+00 6.87917769e-01 8.99764001e-01 ... 1.00778472e+00 6.87917769e-01 8.99764001e-01] ... [-1.12441793e-01 7.36056924e-01 -1.48291814e+00 ... -1.12441793e-01 7.36056924e-01 -1.48291814e+00] [-7.26198480e-02 1.39841366e+00 -1.24544501e+00 ... -7.26198480e-02 1.39841366e+00 -1.24544501e+00] [-6.01458907e-01 6.90050542e-01 -1.11756720e-01 ... -6.01458907e-01 6.90050542e-01 -1.11756720e-01]] ... [[-1.75859526e-01 1.22513700e+00 -9.14518535e-01 ... -1.75859526e-01 1.22513700e+00 -9.14518535e-01] [-5.99226177e-01 -6.44664109e-01 6.87800825e-01 ... -5.99226177e-01 -6.44664109e-01 6.87800825e-01] [-2.80946910e-01 -8.35155621e-02 -6.58860803e-01 ... -2.80946910e-01 -8.35155621e-02 -6.58860803e-01] ... [ 2.55187392e-01 1.16733408e+00 3.59439373e-01 ... 2.55187392e-01 1.16733408e+00 3.59439373e-01] [ 1.06129384e+00 1.05583835e+00 3.11881542e-01 ... 1.06129384e+00 1.05583835e+00 3.11881542e-01] [-2.70070374e-01 8.28375757e-01 9.47435439e-01 ... -2.70070374e-01 8.28375757e-01 9.47435439e-01]] [[-4.15991634e-01 4.92718935e-01 1.13888955e+00 ... -4.15991634e-01 4.92718935e-01 1.13888955e+00] [-5.61158419e-01 9.58302617e-01 1.21929479e+00 ... -5.61158419e-01 9.58302617e-01 1.21929479e+00] [-8.88628244e-01 4.05902088e-01 -4.36787382e-02 ... -8.88628244e-01 4.05902088e-01 -4.36787382e-02] ... [ 1.30089152e+00 -1.22141707e+00 3.79727408e-02 ... 1.30089152e+00 -1.22141707e+00 3.79727408e-02] [-3.57260972e-01 -4.95758533e-01 3.19062471e-01 ... -3.57260972e-01 -4.95758533e-01 3.19062471e-01] [-5.11062801e-01 -7.33035088e-01 -9.84214902e-01 ... -5.11062801e-01 -7.33035088e-01 -9.84214902e-01]] [[ 1.75547376e-01 5.15375659e-02 -7.90852070e-01 ... 1.75547376e-01 5.15375659e-02 -7.90852070e-01] [ 4.52420801e-01 -3.16172630e-01 -8.90386283e-01 ... 4.52420801e-01 -3.16172630e-01 -8.90386283e-01] [-5.75229704e-01 1.99326944e+00 1.92085373e+00 ... -5.75229704e-01 1.99326944e+00 1.92085373e+00] ... [ 1.18155563e+00 1.45604134e+00 -1.22744715e+00 ... 1.18155563e+00 1.45604134e+00 -1.22744715e+00] [ 1.53016543e+00 -3.28193069e-01 3.60781163e-01 ... 1.53016543e+00 -3.28193069e-01 3.60781163e-01] [ 7.83646226e-01 5.28270125e-01 1.26775831e-01 ... 7.83646226e-01 5.28270125e-01 1.26775831e-01]]] [[[-2.72486955e-01 1.33899665e+00 4.58378881e-01 ... -2.72486955e-01 1.33899665e+00 4.58378881e-01] [-2.48939320e-01 8.05515587e-01 2.04483613e-01 ... -2.48939320e-01 8.05515587e-01 2.04483613e-01] [ 9.01713252e-01 -5.30264556e-01 -2.41495371e-01 ... 9.01713252e-01 -5.30264556e-01 -2.41495371e-01] ... [-3.44979018e-01 -1.03338289e+00 -1.20811939e+00 ... -3.44979018e-01 -1.03338289e+00 -1.20811939e+00] [ 2.02522588e+00 -8.49229336e-01 9.36323941e-01 ... 2.02522588e+00 -8.49229336e-01 9.36323941e-01] [ 8.31573963e-01 -3.66648696e-02 8.70341718e-01 ... 8.31573963e-01 -3.66648696e-02 8.70341718e-01]] [[-8.29488695e-01 5.95309675e-01 -5.99623546e-02 ... -8.29488695e-01 5.95309675e-01 -5.99623546e-02] [ 1.01443768e+00 1.19470108e+00 -3.58936250e-01 ... 1.01443768e+00 1.19470108e+00 -3.58936250e-01] [ 1.53500295e+00 7.52551973e-01 4.85836774e-01 ... 1.53500295e+00 7.52551973e-01 4.85836774e-01] ... [ 5.48323750e-01 -6.89547539e-01 -3.75357181e-01 ... 5.48323750e-01 -6.89547539e-01 -3.75357181e-01] [ 7.80939937e-01 8.60180318e-01 1.60528600e+00 ... 7.80939937e-01 8.60180318e-01 1.60528600e+00] [-3.60939130e-02 -2.06124067e+00 -1.46990347e+00 ... -3.60939130e-02 -2.06124067e+00 -1.46990347e+00]] [[-1.09113485e-01 -3.05660218e-01 -1.65706441e-01 ... -1.09113485e-01 -3.05660218e-01 -1.65706441e-01] [ 2.34170914e-01 1.40430629e+00 1.31235194e+00 ... 2.34170914e-01 1.40430629e+00 1.31235194e+00] [-5.71696222e-01 -1.72669545e-01 1.02576542e+00 ... -5.71696222e-01 -1.72669545e-01 1.02576542e+00] ... [-1.95896506e+00 4.77245241e-01 -5.81702828e-01 ... -1.95896506e+00 4.77245241e-01 -5.81702828e-01] [ 6.19397581e-01 -4.94797409e-01 -2.07208490e+00 ... 6.19397581e-01 -4.94797409e-01 -2.07208490e+00] [-1.11160922e+00 -9.06410575e-01 1.54039130e-01 ... -1.11160922e+00 -9.06410575e-01 1.54039130e-01]] ... [[-1.49483180e+00 2.92727649e-01 8.86060148e-02 ... -1.49483180e+00 2.92727649e-01 8.86060148e-02] [-1.18901026e+00 1.52811289e+00 5.63905120e-01 ... -1.18901026e+00 1.52811289e+00 5.63905120e-01] [ 3.68918568e-01 9.14249867e-02 -8.86555612e-01 ... 3.68918568e-01 9.14249867e-02 -8.86555612e-01] ... [-1.14619148e+00 -1.20715225e+00 -1.26925302e+00 ... -1.14619148e+00 -1.20715225e+00 -1.26925302e+00] [-2.03307033e+00 4.17899549e-01 3.28402579e-01 ... -2.03307033e+00 4.17899549e-01 3.28402579e-01] [ 2.44877174e-01 -5.36948502e-01 5.15861034e-01 ... 2.44877174e-01 -5.36948502e-01 5.15861034e-01]] [[-3.28920619e-03 -3.26162308e-01 2.97416508e-01 ... -3.28920619e-03 -3.26162308e-01 2.97416508e-01] [-9.13482368e-01 -1.70522499e+00 8.92648175e-02 ... -9.13482368e-01 -1.70522499e+00 8.92648175e-02] [-1.12421381e+00 -7.05458522e-01 2.18222648e-01 ... -1.12421381e+00 -7.05458522e-01 2.18222648e-01] ... [ 2.62749910e-01 1.06277831e-01 -6.01452105e-02 ... 2.62749910e-01 1.06277831e-01 -6.01452105e-02] [ 6.38593614e-01 -1.45725656e+00 -5.80943942e-01 ... 6.38593614e-01 -1.45725656e+00 -5.80943942e-01] [ 4.62338865e-01 2.95711756e-02 1.64198768e+00 ... 4.62338865e-01 2.95711756e-02 1.64198768e+00]] [[ 8.74271274e-01 -5.34758687e-01 1.32457986e-01 ... 8.74271274e-01 -5.34758687e-01 1.32457986e-01] [-5.76043546e-01 -2.88803554e+00 7.33709112e-02 ... -5.76043546e-01 -2.88803554e+00 7.33709112e-02] [-3.59183878e-01 -6.86723769e-01 2.88011670e+00 ... -3.59183878e-01 -6.86723769e-01 2.88011670e+00] ... [-1.78030640e-01 -8.69245827e-01 6.49546027e-01 ... -1.78030640e-01 -8.69245827e-01 6.49546027e-01] [ 4.21278507e-01 -2.58107334e-01 -1.34169567e+00 ... 4.21278507e-01 -2.58107334e-01 -1.34169567e+00] [-1.71795323e-01 -8.90804470e-01 3.11312735e-01 ... -1.71795323e-01 -8.90804470e-01 3.11312735e-01]]]]]; ov_res: [[[[[-5.53810179e-01 -3.35653782e-01 -4.41951036e-01 ... -5.53810179e-01 -3.35653782e-01 -4.41951036e-01] [ 4.26710658e-02 1.38215244e-01 2.80739903e-01 ... 4.26710658e-02 1.38215244e-01 2.80739903e-01] [ 1.48893726e+00 8.71097505e-01 5.20508289e-01 ... 1.48893726e+00 8.71097505e-01 5.20508289e-01] ... [ 1.10206090e-01 -8.99109125e-01 2.69632316e+00 ... 1.10206090e-01 -8.99109125e-01 2.69632316e+00] [ 4.19339955e-01 -8.89167428e-01 1.22228551e+00 ... 4.19339955e-01 -8.89167428e-01 1.22228551e+00] [-1.53784990e+00 1.13165617e+00 1.63614526e-02 ... -1.53784990e+00 1.13165617e+00 1.63614526e-02]] [[ 7.08623767e-01 -3.67839575e-01 1.20065451e+00 ... 7.08623767e-01 -3.67839575e-01 1.20065451e+00] [-6.14120185e-01 7.70440996e-01 -6.02273166e-01 ... -6.14120185e-01 7.70440996e-01 -6.02273166e-01] [-3.84867191e-01 5.27938783e-01 -1.38635814e+00 ... -3.84867191e-01 5.27938783e-01 -1.38635814e+00] ... [ 1.83500275e-01 8.32151234e-01 3.98182005e-01 ... 1.83500275e-01 8.32151234e-01 3.98182005e-01] [-4.76032704e-01 1.42752945e-01 8.61519456e-01 ... -4.76032704e-01 1.42752945e-01 8.61519456e-01] [ 4.16439027e-01 -6.41110003e-01 -1.96456945e+00 ... 4.16439027e-01 -6.41110003e-01 -1.96456945e+00]] [[-4.46101546e-01 -1.47212133e-01 2.05463037e-01 ... -4.46101546e-01 -1.47212133e-01 2.05463037e-01] [ 5.30359089e-01 -2.39112824e-02 -1.39535701e+00 ... 5.30359089e-01 -2.39112824e-02 -1.39535701e+00] [-1.56895137e+00 -1.27823639e+00 3.17921162e-01 ... -1.56895137e+00 -1.27823639e+00 3.17921162e-01] ... [ 1.98210168e+00 -1.19721973e+00 3.78609389e-01 ... 1.98210168e+00 -1.19721973e+00 3.78609389e-01] [ 3.91592503e-01 -2.45806843e-01 8.55446279e-01 ... 3.91592503e-01 -2.45806843e-01 8.55446279e-01] [ 1.46099234e+00 7.03478932e-01 1.91950709e-01 ... 1.46099234e+00 7.03478932e-01 1.91950709e-01]] ... [[-1.69019258e+00 5.64639866e-01 6.19974494e-01 ... -1.69019258e+00 5.64639866e-01 6.19974494e-01] [-4.10598963e-01 1.48990989e+00 4.75591809e-01 ... -4.10598963e-01 1.48990989e+00 4.75591809e-01] [ 5.52183509e-01 -2.45235741e-01 3.62448335e-01 ... 5.52183509e-01 -2.45235741e-01 3.62448335e-01] ... [ 7.52554610e-02 -3.29592079e-01 7.81781793e-01 ... 7.52554610e-02 -3.29592079e-01 7.81781793e-01] [-2.98086673e-01 4.59039330e-01 9.50652212e-02 ... -2.98086673e-01 4.59039330e-01 9.50652212e-02] [-1.58297694e+00 -6.25060499e-01 -2.17163041e-01 ... -1.58297694e+00 -6.25060499e-01 -2.17163041e-01]] [[-8.67935121e-02 -7.63063073e-01 -1.91505563e+00 ... -8.67935121e-02 -7.63063073e-01 -1.91505563e+00] [-8.58444750e-01 -4.40798551e-01 3.07928562e-01 ... -8.58444750e-01 -4.40798551e-01 3.07928562e-01] [ 9.24098372e-01 1.18905866e+00 -1.16279221e+00 ... 9.24098372e-01 1.18905866e+00 -1.16279221e+00] ... [-5.97966015e-01 -1.70915246e+00 3.65120232e-01 ... -5.97966015e-01 -1.70915246e+00 3.65120232e-01] [ 2.19353363e-02 2.78805524e-01 6.88627601e-01 ... 2.19353363e-02 2.78805524e-01 6.88627601e-01] [ 1.67381823e+00 1.99443042e+00 5.83513021e-01 ... 1.67381823e+00 1.99443042e+00 5.83513021e-01]] [[ 2.85938680e-01 -4.64276522e-02 -7.42582798e-01 ... 2.85938680e-01 -4.64276522e-02 -7.42582798e-01] [-9.82019126e-01 1.50899541e+00 7.40528405e-01 ... -9.82019126e-01 1.50899541e+00 7.40528405e-01] [-7.62559950e-01 -2.84079313e-01 -8.99489582e-01 ... -7.62559950e-01 -2.84079313e-01 -8.99489582e-01] ... [ 3.62559646e-01 -9.20100689e-01 1.25418901e+00 ... 3.62559646e-01 -9.20100689e-01 1.25418901e+00] [-7.21807539e-01 3.01095098e-01 1.12959135e+00 ... -7.21807539e-01 3.01095098e-01 1.12959135e+00] [ 1.69223678e+00 -1.29378068e+00 -8.06537747e-01 ... 1.69223678e+00 -1.29378068e+00 -8.06537747e-01]]] [[[-8.05147946e-01 -6.05183071e-04 -5.11953533e-01 ... -8.05147946e-01 -6.05183071e-04 -5.11953533e-01] [ 2.05480027e+00 -1.49634898e-01 4.07393724e-02 ... 2.05480027e+00 -1.49634898e-01 4.07393724e-02] [ 1.40852773e+00 -1.34248292e+00 -4.75500494e-01 ... 1.40852773e+00 -1.34248292e+00 -4.75500494e-01] ... [ 1.28205955e+00 -4.94769737e-02 -2.15073615e-01 ... 1.28205955e+00 -4.94769737e-02 -2.15073615e-01] [-6.21946454e-01 1.82031238e+00 6.84965402e-02 ... -6.21946454e-01 1.82031238e+00 6.84965402e-02] [-1.18781471e+00 -2.22336698e+00 -1.29813647e+00 ... -1.18781471e+00 -2.22336698e+00 -1.29813647e+00]] [[ 4.99563515e-01 3.89574766e-02 -1.04971492e+00 ... 4.99563515e-01 3.89574766e-02 -1.04971492e+00] [-1.21588075e+00 3.02931428e-01 2.68964827e-01 ... -1.21588075e+00 3.02931428e-01 2.68964827e-01] [ 7.32467324e-02 4.32019264e-01 -2.27137327e-01 ... 7.32467324e-02 4.32019264e-01 -2.27137327e-01] ... [ 1.66795599e+00 -2.24787593e+00 -3.63243341e-01 ... 1.66795599e+00 -2.24787593e+00 -3.63243341e-01] [ 3.42073023e-01 -5.51085353e-01 1.47408172e-01 ... 3.42073023e-01 -5.51085353e-01 1.47408172e-01] [ 1.16796899e+00 1.08816779e+00 9.33801174e-01 ... 1.16796899e+00 1.08816779e+00 9.33801174e-01]] [[-9.36535239e-01 6.79753780e-01 6.07898772e-01 ... -9.36535239e-01 6.79753780e-01 6.07898772e-01] [-7.17329144e-01 1.42339122e+00 6.18601561e-01 ... -7.17329144e-01 1.42339122e+00 6.18601561e-01] [ 1.00778472e+00 6.87917769e-01 8.99764001e-01 ... 1.00778472e+00 6.87917769e-01 8.99764001e-01] ... [-1.12441793e-01 7.36056924e-01 -1.48291814e+00 ... -1.12441793e-01 7.36056924e-01 -1.48291814e+00] [-7.26198480e-02 1.39841366e+00 -1.24544501e+00 ... -7.26198480e-02 1.39841366e+00 -1.24544501e+00] [-6.01458907e-01 6.90050542e-01 -1.11756720e-01 ... -6.01458907e-01 6.90050542e-01 -1.11756720e-01]] ... [[-1.75859526e-01 1.22513700e+00 -9.14518535e-01 ... -1.75859526e-01 1.22513700e+00 -9.14518535e-01] [-5.99226177e-01 -6.44664109e-01 6.87800825e-01 ... -5.99226177e-01 -6.44664109e-01 6.87800825e-01] [-2.80946910e-01 -8.35155621e-02 -6.58860803e-01 ... -2.80946910e-01 -8.35155621e-02 -6.58860803e-01] ... [ 2.55187392e-01 1.16733408e+00 3.59439373e-01 ... 2.55187392e-01 1.16733408e+00 3.59439373e-01] [ 1.06129384e+00 1.05583835e+00 3.11881542e-01 ... 1.06129384e+00 1.05583835e+00 3.11881542e-01] [-2.70070374e-01 8.28375757e-01 9.47435439e-01 ... -2.70070374e-01 8.28375757e-01 9.47435439e-01]] [[-4.15991634e-01 4.92718935e-01 1.13888955e+00 ... -4.15991634e-01 4.92718935e-01 1.13888955e+00] [-5.61158419e-01 9.58302617e-01 1.21929479e+00 ... -5.61158419e-01 9.58302617e-01 1.21929479e+00] [-8.88628244e-01 4.05902088e-01 -4.36787382e-02 ... -8.88628244e-01 4.05902088e-01 -4.36787382e-02] ... [ 1.30089152e+00 -1.22141707e+00 3.79727408e-02 ... 1.30089152e+00 -1.22141707e+00 3.79727408e-02] [-3.57260972e-01 -4.95758533e-01 3.19062471e-01 ... -3.57260972e-01 -4.95758533e-01 3.19062471e-01] [-5.11062801e-01 -7.33035088e-01 -9.84214902e-01 ... -5.11062801e-01 -7.33035088e-01 -9.84214902e-01]] [[ 1.75547376e-01 5.15375659e-02 -7.90852070e-01 ... 1.75547376e-01 5.15375659e-02 -7.90852070e-01] [ 4.52420801e-01 -3.16172630e-01 -8.90386283e-01 ... 4.52420801e-01 -3.16172630e-01 -8.90386283e-01] [-5.75229704e-01 1.99326944e+00 1.92085373e+00 ... -5.75229704e-01 1.99326944e+00 1.92085373e+00] ... [ 1.18155563e+00 1.45604134e+00 -1.22744715e+00 ... 1.18155563e+00 1.45604134e+00 -1.22744715e+00] [ 1.53016543e+00 -3.28193069e-01 3.60781163e-01 ... 1.53016543e+00 -3.28193069e-01 3.60781163e-01] [ 7.83646226e-01 5.28270125e-01 1.26775831e-01 ... 7.83646226e-01 5.28270125e-01 1.26775831e-01]]] [[[-2.72486955e-01 1.33899665e+00 4.58378881e-01 ... -2.72486955e-01 1.33899665e+00 4.58378881e-01] [-2.48939320e-01 8.05515587e-01 2.04483613e-01 ... -2.48939320e-01 8.05515587e-01 2.04483613e-01] [ 9.01713252e-01 -5.30264556e-01 -2.41495371e-01 ... 9.01713252e-01 -5.30264556e-01 -2.41495371e-01] ... [-3.44979018e-01 -1.03338289e+00 -1.20811939e+00 ... -3.44979018e-01 -1.03338289e+00 -1.20811939e+00] [ 2.02522588e+00 -8.49229336e-01 9.36323941e-01 ... 2.02522588e+00 -8.49229336e-01 9.36323941e-01] [ 8.31573963e-01 -3.66648696e-02 8.70341718e-01 ... 8.31573963e-01 -3.66648696e-02 8.70341718e-01]] [[-8.29488695e-01 5.95309675e-01 -5.99623546e-02 ... -8.29488695e-01 5.95309675e-01 -5.99623546e-02] [ 1.01443768e+00 1.19470108e+00 -3.58936250e-01 ... 1.01443768e+00 1.19470108e+00 -3.58936250e-01] [ 1.53500295e+00 7.52551973e-01 4.85836774e-01 ... 1.53500295e+00 7.52551973e-01 4.85836774e-01] ... [ 5.48323750e-01 -6.89547539e-01 -3.75357181e-01 ... 5.48323750e-01 -6.89547539e-01 -3.75357181e-01] [ 7.80939937e-01 8.60180318e-01 1.60528600e+00 ... 7.80939937e-01 8.60180318e-01 1.60528600e+00] [-3.60939130e-02 -2.06124067e+00 -1.46990347e+00 ... -3.60939130e-02 -2.06124067e+00 -1.46990347e+00]] [[-1.09113485e-01 -3.05660218e-01 -1.65706441e-01 ... -1.09113485e-01 -3.05660218e-01 -1.65706441e-01] [ 2.34170914e-01 1.40430629e+00 1.31235194e+00 ... 2.34170914e-01 1.40430629e+00 1.31235194e+00] [-5.71696222e-01 -1.72669545e-01 1.02576542e+00 ... -5.71696222e-01 -1.72669545e-01 1.02576542e+00] ... [-1.95896506e+00 4.77245241e-01 -5.81702828e-01 ... -1.95896506e+00 4.77245241e-01 -5.81702828e-01] [ 6.19397581e-01 -4.94797409e-01 -2.07208490e+00 ... 6.19397581e-01 -4.94797409e-01 -2.07208490e+00] [-1.11160922e+00 -9.06410575e-01 1.54039130e-01 ... -1.11160922e+00 -9.06410575e-01 1.54039130e-01]] ... [[-1.49483180e+00 2.92727649e-01 8.86060148e-02 ... -1.49483180e+00 2.92727649e-01 8.86060148e-02] [-1.18901026e+00 1.52811289e+00 5.63905120e-01 ... -1.18901026e+00 1.52811289e+00 5.63905120e-01] [ 3.68918568e-01 9.14249867e-02 -8.86555612e-01 ... 3.68918568e-01 9.14249867e-02 -8.86555612e-01] ... [-1.14619148e+00 -1.20715225e+00 -1.26925302e+00 ... -1.14619148e+00 -1.20715225e+00 -1.26925302e+00] [-2.03307033e+00 4.17899549e-01 3.28402579e-01 ... -2.03307033e+00 4.17899549e-01 3.28402579e-01] [ 2.44877174e-01 -5.36948502e-01 5.15861034e-01 ... 2.44877174e-01 -5.36948502e-01 5.15861034e-01]] [[-3.28920619e-03 -3.26162308e-01 2.97416508e-01 ... -3.28920619e-03 -3.26162308e-01 2.97416508e-01] [-9.13482368e-01 -1.70522499e+00 8.92648175e-02 ... -9.13482368e-01 -1.70522499e+00 8.92648175e-02] [-1.12421381e+00 -7.05458522e-01 2.18222648e-01 ... -1.12421381e+00 -7.05458522e-01 2.18222648e-01] ... [ 2.62749910e-01 1.06277831e-01 -6.01452105e-02 ... 2.62749910e-01 1.06277831e-01 -6.01452105e-02] [ 6.38593614e-01 -1.45725656e+00 -5.80943942e-01 ... 6.38593614e-01 -1.45725656e+00 -5.80943942e-01] [ 4.62338865e-01 2.95711756e-02 1.64198768e+00 ... 4.62338865e-01 2.95711756e-02 1.64198768e+00]] [[ 8.74271274e-01 -5.34758687e-01 1.32457986e-01 ... 8.74271274e-01 -5.34758687e-01 1.32457986e-01] [-5.76043546e-01 -2.88803554e+00 7.33709112e-02 ... -5.76043546e-01 -2.88803554e+00 7.33709112e-02] [-3.59183878e-01 -6.86723769e-01 2.88011670e+00 ... -3.59183878e-01 -6.86723769e-01 2.88011670e+00] ... [-1.78030640e-01 -8.69245827e-01 6.49546027e-01 ... -1.78030640e-01 -8.69245827e-01 6.49546027e-01] [ 4.21278507e-01 -2.58107334e-01 -1.34169567e+00 ... 4.21278507e-01 -2.58107334e-01 -1.34169567e+00] [-1.71795323e-01 -8.90804470e-01 3.11312735e-01 ... -1.71795323e-01 -8.90804470e-01 3.11312735e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 1, 0, 0, 0, 0) - mode:circular - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5578.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 1, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 2.9544582 -2.3451567 -0.25342935 ... 0.26655716 -0.03969112 2.9544582 ] [ 1.0326438 0.4286878 0.3291906 ... -0.10477369 0.7765916 1.0326438 ] [ 2.4097316 -0.6307006 0.68076515 ... -0.00365907 1.0422746 2.4097316 ] ... [ 0.20040318 -1.0811416 0.14582898 ... 0.39640865 1.9822179 0.20040318] [ 0.9307957 1.1577147 -0.8764332 ... 1.6850064 0.8926486 0.9307957 ] [-1.182925 -0.58469266 -0.02476222 ... -2.2345262 0.18849486 -1.182925 ]] [[ 0.29260927 -0.7963618 0.6035037 ... 0.3494048 -0.05007969 0.29260927] [-0.18502323 0.29812875 -1.2482929 ... 0.767694 0.7306798 -0.18502323] [-1.1759554 -0.14869697 -0.6941967 ... -1.1932188 -1.8127112 -1.1759554 ] ... [-1.1112239 0.33001485 -0.5476582 ... 1.4171863 -0.24951541 -1.1112239 ] [-0.01263343 0.7022486 -0.08353008 ... -0.57647425 1.2192564 -0.01263343] [ 0.43383837 0.38833615 -1.0809352 ... -0.9333281 0.5467515 0.43383837]] [[ 1.5062984 0.48221856 0.89108765 ... 0.9545603 -1.1357208 1.5062984 ] [ 0.21867542 0.00918808 0.7169139 ... -0.11699312 -1.367709 0.21867542] [-0.47389978 0.4936241 0.38662094 ... 0.26705503 -0.31950092 -0.47389978] ... [-0.39243093 -0.44039342 0.3175709 ... 0.03724869 -0.73614526 -0.39243093] [ 0.48399076 -1.3511398 -1.1662832 ... 0.22297426 -1.3437177 0.48399076] [-0.5386648 0.12856835 -0.06572443 ... -0.21083939 -0.53675956 -0.5386648 ]] ... [[-1.1510673 0.0667518 -0.0832122 ... -0.3171126 1.2559799 -1.1510673 ] [ 1.0630392 -0.8385509 0.05791361 ... 0.32159895 -2.4463024 1.0630392 ] [-0.3041595 -1.9321058 1.2324252 ... 0.9786767 -0.13761668 -0.3041595 ] ... [-1.4132031 -0.00449519 0.75946826 ... 1.1990882 0.802658 -1.4132031 ] [ 0.25793266 0.26861662 -1.1029536 ... 0.57110953 -0.34060684 0.25793266] [-0.14495529 0.7309745 -0.8868516 ... -0.9367909 -0.8206119 -0.14495529]] [[ 0.27182028 1.0642929 -0.95413125 ... 1.5343529 -1.703313 0.27182028] [ 2.025195 -0.74661124 -0.15558857 ... 1.5689735 1.4120054 2.025195 ] [-2.7215183 -1.7604768 -1.7753283 ... -0.12456827 -1.0948945 -2.7215183 ] ... [-1.2583426 -2.4129803 -0.3249461 ... -0.93908304 -0.719401 -1.2583426 ] [-0.7500573 -0.61892927 2.6387494 ... 1.9468234 1.1144578 -0.7500573 ] [ 1.2940673 1.2704928 -1.6048537 ... 0.7712219 0.20963655 1.2940673 ]] [[-0.673588 0.69387984 0.5965634 ... 2.4164045 1.324819 -0.673588 ] [ 1.4162824 0.32108593 -0.6791421 ... -0.72887594 -0.7187818 1.4162824 ] [ 0.88121074 -0.5697592 -0.48404437 ... 0.21656097 0.25025842 0.88121074] ... [ 0.35785487 -0.6537948 -1.3161038 ... -2.4028034 -0.68477654 0.35785487] [-0.8965843 1.5659382 0.46377087 ... 0.48221594 0.01659334 -0.8965843 ] [ 1.7830195 -1.3500911 0.02313311 ... -1.4656537 -0.14243932 1.7830195 ]]] [[[-1.248059 0.89252806 -0.6963907 ... -0.57173985 1.26742 -1.248059 ] [-1.7706133 0.39489597 0.4549135 ... -0.10884949 -1.088247 -1.7706133 ] [ 0.44570538 -1.2197593 1.3582985 ... -0.13486566 -0.3365147 0.44570538] ... [ 1.0178407 -0.9927334 0.6797063 ... 0.24592741 1.5748651 1.0178407 ] [ 0.7219536 -0.6354008 -0.5080237 ... -0.22740558 -0.73242885 0.7219536 ] [-0.72453845 -0.70461386 -0.44868076 ... 0.8515714 -1.6891692 -0.72453845]] [[ 1.1231258 -0.11509645 -0.4748177 ... -0.03096791 0.40029493 1.1231258 ] [ 0.24420443 -0.32065618 -0.34928703 ... 0.64252335 0.7850551 0.24420443] [ 0.86112225 -0.05878349 0.45930153 ... -0.02157838 -0.01108851 0.86112225] ... [ 0.74018586 -1.5689728 -0.5066574 ... -1.3238503 1.5389544 0.74018586] [-0.14564323 -1.0950285 1.379167 ... 0.89259464 -0.53081626 -0.14564323] [-0.98471004 1.1413529 -1.5065997 ... -0.6195425 -0.43039316 -0.98471004]] [[ 0.06114148 1.1736755 -0.2445479 ... 0.49401808 -0.6399458 0.06114148] [ 0.84458613 -0.5839837 -0.17829686 ... 2.178064 -1.4455736 0.84458613] [ 0.76570195 0.25272608 0.7943601 ... 1.2891601 -0.6839333 0.76570195] ... [ 0.6568774 2.6469052 0.20338747 ... 0.73228115 -0.63470834 0.6568774 ] [ 0.10321555 0.6965618 -1.6742302 ... 0.7436394 0.35449132 0.10321555] [-0.12424581 0.22498071 0.95643145 ... -0.75820845 0.6496397 -0.12424581]] ... [[-0.11832167 -1.458683 -0.50317943 ... -0.39177564 -0.3056423 -0.11832167] [-0.27824506 0.7155809 -1.0758417 ... -1.3257874 1.000492 -0.27824506] [ 1.4087583 0.74999523 0.28770697 ... -0.5354333 1.235926 1.4087583 ] ... [-1.3963909 0.8523025 0.02471784 ... 1.438756 0.9956029 -1.3963909 ] [-0.3776076 1.11448 -0.3164451 ... 0.11120027 0.34835866 -0.3776076 ] [ 0.41759792 -1.2348098 0.43409765 ... -1.485596 -0.51130664 0.41759792]] [[ 1.489757 -0.21768941 1.6222727 ... -1.7511854 0.54377854 1.489757 ] [ 0.2470566 -2.2591517 -0.64420927 ... 0.9575781 0.6536824 0.2470566 ] [-1.4358186 -1.5995959 0.03001295 ... 0.9754912 0.24332102 -1.4358186 ] ... [ 0.9746696 1.744437 -1.4162089 ... -0.20951371 0.160977 0.9746696 ] [ 0.27314544 -1.4943835 -0.45307037 ... 0.08190107 -0.5158239 0.27314544] [-0.36111817 -0.21111861 1.6035999 ... -1.3144585 -0.1863733 -0.36111817]] [[ 0.8824567 -2.2097328 -0.1354561 ... 0.26622623 1.0304254 0.8824567 ] [-0.7314423 -0.08815398 -1.7031668 ... 0.76376307 -0.9288409 -0.7314423 ] [-1.5247177 -1.5797455 -0.6499829 ... 0.45977116 -0.27576128 -1.5247177 ] ... [-1.5387702 -0.04354078 -0.13705684 ... 1.0597806 0.5067979 -1.5387702 ] [ 0.8227683 0.5718945 -1.2180554 ... 1.5773551 1.3326044 0.8227683 ] [ 0.5410944 -0.4937862 0.54521126 ... -0.31882074 1.8569024 0.5410944 ]]] [[[ 0.10002748 1.9235946 -0.90015966 ... 0.3890773 -0.6844513 0.10002748] [-0.8364027 1.5829692 0.31537542 ... 0.33634317 -0.5228524 -0.8364027 ] [-0.08212159 -0.41950208 -0.33328107 ... 0.16591208 -0.56963253 -0.08212159] ... [-0.7390672 -0.17226066 0.86367655 ... -1.3890752 1.3685229 -0.7390672 ] [-2.1591427 -0.85131186 -0.16421162 ... -1.4498284 -0.7989568 -2.1591427 ] [-0.04054871 -0.9130551 -1.3030058 ... 1.7255195 -0.57926464 -0.04054871]] [[ 1.0149506 0.5764245 0.3156643 ... 0.22782315 1.5669694 1.0149506 ] [-0.1371942 0.26839882 -0.07176815 ... -0.15979062 -2.4830594 -0.1371942 ] [-0.3440671 0.04818569 0.19636826 ... -0.02851189 -0.6495891 -0.3440671 ] ... [-1.5440996 -0.07085963 0.16903286 ... 0.26065502 0.35793197 -1.5440996 ] [-0.1837759 0.27826613 0.89792305 ... 0.29124755 -0.37264743 -0.1837759 ] [ 0.07046597 -0.08505538 0.37785348 ... 1.2720371 -0.8534702 0.07046597]] [[ 1.2365699 0.70817846 0.7193288 ... -0.31895754 -0.03783023 1.2365699 ] [ 0.67151916 1.0209118 0.65304774 ... 1.4718318 0.37488478 0.67151916] [ 0.05232966 0.14050485 -0.44517314 ... 0.22773519 0.8010145 0.05232966] ... [-0.33836624 -0.04003892 0.26349407 ... 1.0020757 0.30217537 -0.33836624] [-1.251923 1.1351265 0.89315104 ... -0.6978142 -0.41233194 -1.251923 ] [ 0.04683488 0.44003546 -1.015462 ... 1.2253623 -1.7193031 0.04683488]] ... [[-0.28958672 -2.4640844 0.52814835 ... 0.84010565 0.44476324 -0.28958672] [-0.7107291 -0.61210346 0.13262983 ... -0.9194148 0.27947584 -0.7107291 ] [-0.81909126 1.863913 -0.66692257 ... -0.01981807 1.2494838 -0.81909126] ... [ 0.40860462 -0.83140534 -0.4324551 ... 0.1332685 -0.20263302 0.40860462] [ 0.833733 1.0364411 -0.29831937 ... -2.32787 0.17959683 0.833733 ] [-0.04587726 0.79751587 -0.31522575 ... 1.0029124 0.02995016 -0.04587726]] [[ 0.6464259 0.2538281 1.7620877 ... -0.18445592 -1.5227866 0.6464259 ] [ 0.9051208 0.7592575 0.02612517 ... 0.02915361 1.4238701 0.9051208 ] [ 1.1028283 0.50589335 -0.2429033 ... 0.3177915 -0.2650154 1.1028283 ] ... [-0.28786615 0.6984217 -1.2919027 ... 0.77631766 -1.8733301 -0.28786615] [ 0.48514464 1.5900375 -1.5805665 ... -1.0798934 0.46320274 0.48514464] [ 0.18134269 -1.7438844 -1.8433322 ... 0.3626853 -0.657898 0.18134269]] [[-0.13968784 0.2334697 0.48938507 ... 1.6173153 -1.3294802 -0.13968784] [-0.42145574 0.2584755 -0.05223909 ... -0.25725344 1.2005156 -0.42145574] [ 0.4441575 2.01379 0.25835615 ... 2.0953913 -1.092773 0.4441575 ] ... [-1.0344262 -0.7320847 0.68487823 ... -0.4384164 -1.3674802 -1.0344262 ] [ 0.5496018 -1.3766265 -0.6461222 ... 0.37149587 1.4355063 0.5496018 ] [ 1.4552076 -0.751053 -1.2855443 ... 0.23136902 -0.5203496 1.4552076 ]]]]]; ov_res: [[[[[ 2.9544582 -2.3451567 -0.25342935 ... 0.26655716 -0.03969112 2.9544582 ] [ 1.0326438 0.4286878 0.3291906 ... -0.10477369 0.7765916 1.0326438 ] [ 2.4097316 -0.6307006 0.68076515 ... -0.00365907 1.0422746 2.4097316 ] ... [ 0.20040318 -1.0811416 0.14582898 ... 0.39640865 1.9822179 0.20040318] [ 0.9307957 1.1577147 -0.8764332 ... 1.6850064 0.8926486 0.9307957 ] [-1.182925 -0.58469266 -0.02476222 ... -2.2345262 0.18849486 -1.182925 ]] [[ 0.29260927 -0.7963618 0.6035037 ... 0.3494048 -0.05007969 0.29260927] [-0.18502323 0.29812875 -1.2482929 ... 0.767694 0.7306798 -0.18502323] [-1.1759554 -0.14869697 -0.6941967 ... -1.1932188 -1.8127112 -1.1759554 ] ... [-1.1112239 0.33001485 -0.5476582 ... 1.4171863 -0.24951541 -1.1112239 ] [-0.01263343 0.7022486 -0.08353008 ... -0.57647425 1.2192564 -0.01263343] [ 0.43383837 0.38833615 -1.0809352 ... -0.9333281 0.5467515 0.43383837]] [[ 1.5062984 0.48221856 0.89108765 ... 0.9545603 -1.1357208 1.5062984 ] [ 0.21867542 0.00918808 0.7169139 ... -0.11699312 -1.367709 0.21867542] [-0.47389978 0.4936241 0.38662094 ... 0.26705503 -0.31950092 -0.47389978] ... [-0.39243093 -0.44039342 0.3175709 ... 0.03724869 -0.73614526 -0.39243093] [ 0.48399076 -1.3511398 -1.1662832 ... 0.22297426 -1.3437177 0.48399076] [-0.5386648 0.12856835 -0.06572443 ... -0.21083939 -0.53675956 -0.5386648 ]] ... [[-1.1510673 0.0667518 -0.0832122 ... -0.3171126 1.2559799 -1.1510673 ] [ 1.0630392 -0.8385509 0.05791361 ... 0.32159895 -2.4463024 1.0630392 ] [-0.3041595 -1.9321058 1.2324252 ... 0.9786767 -0.13761668 -0.3041595 ] ... [-1.4132031 -0.00449519 0.75946826 ... 1.1990882 0.802658 -1.4132031 ] [ 0.25793266 0.26861662 -1.1029536 ... 0.57110953 -0.34060684 0.25793266] [-0.14495529 0.7309745 -0.8868516 ... -0.9367909 -0.8206119 -0.14495529]] [[ 0.27182028 1.0642929 -0.95413125 ... 1.5343529 -1.703313 0.27182028] [ 2.025195 -0.74661124 -0.15558857 ... 1.5689735 1.4120054 2.025195 ] [-2.7215183 -1.7604768 -1.7753283 ... -0.12456827 -1.0948945 -2.7215183 ] ... [-1.2583426 -2.4129803 -0.3249461 ... -0.93908304 -0.719401 -1.2583426 ] [-0.7500573 -0.61892927 2.6387494 ... 1.9468234 1.1144578 -0.7500573 ] [ 1.2940673 1.2704928 -1.6048537 ... 0.7712219 0.20963655 1.2940673 ]] [[-0.673588 0.69387984 0.5965634 ... 2.4164045 1.324819 -0.673588 ] [ 1.4162824 0.32108593 -0.6791421 ... -0.72887594 -0.7187818 1.4162824 ] [ 0.88121074 -0.5697592 -0.48404437 ... 0.21656097 0.25025842 0.88121074] ... [ 0.35785487 -0.6537948 -1.3161038 ... -2.4028034 -0.68477654 0.35785487] [-0.8965843 1.5659382 0.46377087 ... 0.48221594 0.01659334 -0.8965843 ] [ 1.7830195 -1.3500911 0.02313311 ... -1.4656537 -0.14243932 1.7830195 ]]] [[[-1.248059 0.89252806 -0.6963907 ... -0.57173985 1.26742 -1.248059 ] [-1.7706133 0.39489597 0.4549135 ... -0.10884949 -1.088247 -1.7706133 ] [ 0.44570538 -1.2197593 1.3582985 ... -0.13486566 -0.3365147 0.44570538] ... [ 1.0178407 -0.9927334 0.6797063 ... 0.24592741 1.5748651 1.0178407 ] [ 0.7219536 -0.6354008 -0.5080237 ... -0.22740558 -0.73242885 0.7219536 ] [-0.72453845 -0.70461386 -0.44868076 ... 0.8515714 -1.6891692 -0.72453845]] [[ 1.1231258 -0.11509645 -0.4748177 ... -0.03096791 0.40029493 1.1231258 ] [ 0.24420443 -0.32065618 -0.34928703 ... 0.64252335 0.7850551 0.24420443] [ 0.86112225 -0.05878349 0.45930153 ... -0.02157838 -0.01108851 0.86112225] ... [ 0.74018586 -1.5689728 -0.5066574 ... -1.3238503 1.5389544 0.74018586] [-0.14564323 -1.0950285 1.379167 ... 0.89259464 -0.53081626 -0.14564323] [-0.98471004 1.1413529 -1.5065997 ... -0.6195425 -0.43039316 -0.98471004]] [[ 0.06114148 1.1736755 -0.2445479 ... 0.49401808 -0.6399458 0.06114148] [ 0.84458613 -0.5839837 -0.17829686 ... 2.178064 -1.4455736 0.84458613] [ 0.76570195 0.25272608 0.7943601 ... 1.2891601 -0.6839333 0.76570195] ... [ 0.6568774 2.6469052 0.20338747 ... 0.73228115 -0.63470834 0.6568774 ] [ 0.10321555 0.6965618 -1.6742302 ... 0.7436394 0.35449132 0.10321555] [-0.12424581 0.22498071 0.95643145 ... -0.75820845 0.6496397 -0.12424581]] ... [[-0.11832167 -1.458683 -0.50317943 ... -0.39177564 -0.3056423 -0.11832167] [-0.27824506 0.7155809 -1.0758417 ... -1.3257874 1.000492 -0.27824506] [ 1.4087583 0.74999523 0.28770697 ... -0.5354333 1.235926 1.4087583 ] ... [-1.3963909 0.8523025 0.02471784 ... 1.438756 0.9956029 -1.3963909 ] [-0.3776076 1.11448 -0.3164451 ... 0.11120027 0.34835866 -0.3776076 ] [ 0.41759792 -1.2348098 0.43409765 ... -1.485596 -0.51130664 0.41759792]] [[ 1.489757 -0.21768941 1.6222727 ... -1.7511854 0.54377854 1.489757 ] [ 0.2470566 -2.2591517 -0.64420927 ... 0.9575781 0.6536824 0.2470566 ] [-1.4358186 -1.5995959 0.03001295 ... 0.9754912 0.24332102 -1.4358186 ] ... [ 0.9746696 1.744437 -1.4162089 ... -0.20951371 0.160977 0.9746696 ] [ 0.27314544 -1.4943835 -0.45307037 ... 0.08190107 -0.5158239 0.27314544] [-0.36111817 -0.21111861 1.6035999 ... -1.3144585 -0.1863733 -0.36111817]] [[ 0.8824567 -2.2097328 -0.1354561 ... 0.26622623 1.0304254 0.8824567 ] [-0.7314423 -0.08815398 -1.7031668 ... 0.76376307 -0.9288409 -0.7314423 ] [-1.5247177 -1.5797455 -0.6499829 ... 0.45977116 -0.27576128 -1.5247177 ] ... [-1.5387702 -0.04354078 -0.13705684 ... 1.0597806 0.5067979 -1.5387702 ] [ 0.8227683 0.5718945 -1.2180554 ... 1.5773551 1.3326044 0.8227683 ] [ 0.5410944 -0.4937862 0.54521126 ... -0.31882074 1.8569024 0.5410944 ]]] [[[ 0.10002748 1.9235946 -0.90015966 ... 0.3890773 -0.6844513 0.10002748] [-0.8364027 1.5829692 0.31537542 ... 0.33634317 -0.5228524 -0.8364027 ] [-0.08212159 -0.41950208 -0.33328107 ... 0.16591208 -0.56963253 -0.08212159] ... [-0.7390672 -0.17226066 0.86367655 ... -1.3890752 1.3685229 -0.7390672 ] [-2.1591427 -0.85131186 -0.16421162 ... -1.4498284 -0.7989568 -2.1591427 ] [-0.04054871 -0.9130551 -1.3030058 ... 1.7255195 -0.57926464 -0.04054871]] [[ 1.0149506 0.5764245 0.3156643 ... 0.22782315 1.5669694 1.0149506 ] [-0.1371942 0.26839882 -0.07176815 ... -0.15979062 -2.4830594 -0.1371942 ] [-0.3440671 0.04818569 0.19636826 ... -0.02851189 -0.6495891 -0.3440671 ] ... [-1.5440996 -0.07085963 0.16903286 ... 0.26065502 0.35793197 -1.5440996 ] [-0.1837759 0.27826613 0.89792305 ... 0.29124755 -0.37264743 -0.1837759 ] [ 0.07046597 -0.08505538 0.37785348 ... 1.2720371 -0.8534702 0.07046597]] [[ 1.2365699 0.70817846 0.7193288 ... -0.31895754 -0.03783023 1.2365699 ] [ 0.67151916 1.0209118 0.65304774 ... 1.4718318 0.37488478 0.67151916] [ 0.05232966 0.14050485 -0.44517314 ... 0.22773519 0.8010145 0.05232966] ... [-0.33836624 -0.04003892 0.26349407 ... 1.0020757 0.30217537 -0.33836624] [-1.251923 1.1351265 0.89315104 ... -0.6978142 -0.41233194 -1.251923 ] [ 0.04683488 0.44003546 -1.015462 ... 1.2253623 -1.7193031 0.04683488]] ... [[-0.28958672 -2.4640844 0.52814835 ... 0.84010565 0.44476324 -0.28958672] [-0.7107291 -0.61210346 0.13262983 ... -0.9194148 0.27947584 -0.7107291 ] [-0.81909126 1.863913 -0.66692257 ... -0.01981807 1.2494838 -0.81909126] ... [ 0.40860462 -0.83140534 -0.4324551 ... 0.1332685 -0.20263302 0.40860462] [ 0.833733 1.0364411 -0.29831937 ... -2.32787 0.17959683 0.833733 ] [-0.04587726 0.79751587 -0.31522575 ... 1.0029124 0.02995016 -0.04587726]] [[ 0.6464259 0.2538281 1.7620877 ... -0.18445592 -1.5227866 0.6464259 ] [ 0.9051208 0.7592575 0.02612517 ... 0.02915361 1.4238701 0.9051208 ] [ 1.1028283 0.50589335 -0.2429033 ... 0.3177915 -0.2650154 1.1028283 ] ... [-0.28786615 0.6984217 -1.2919027 ... 0.77631766 -1.8733301 -0.28786615] [ 0.48514464 1.5900375 -1.5805665 ... -1.0798934 0.46320274 0.48514464] [ 0.18134269 -1.7438844 -1.8433322 ... 0.3626853 -0.657898 0.18134269]] [[-0.13968784 0.2334697 0.48938507 ... 1.6173153 -1.3294802 -0.13968784] [-0.42145574 0.2584755 -0.05223909 ... -0.25725344 1.2005156 -0.42145574] [ 0.4441575 2.01379 0.25835615 ... 2.0953913 -1.092773 0.4441575 ] ... [-1.0344262 -0.7320847 0.68487823 ... -0.4384164 -1.3674802 -1.0344262 ] [ 0.5496018 -1.3766265 -0.6461222 ... 0.37149587 1.4355063 0.5496018 ] [ 1.4552076 -0.751053 -1.2855443 ... 0.23136902 -0.5203496 1.4552076 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 0, 0, 0, 0, 0) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5581.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-7.86556125e-01 1.61165297e-02 1.55708730e-01 ... 5.34119308e-01 -2.07152948e-01 1.40357530e+00] [ 1.70643938e+00 -8.62871349e-01 1.90306294e+00 ... -3.38140070e-01 -9.99904513e-01 -8.24561119e-01] [ 1.42607653e+00 -9.54729855e-01 -4.90347922e-01 ... -3.80269587e-01 3.74048576e-02 5.25581300e-01] ... [-1.36141324e+00 -1.50619054e+00 -8.67787361e-01 ... 1.82080865e-01 1.30290174e+00 -4.70124722e-01] [-1.16339672e+00 -5.04574537e-01 5.90756297e-01 ... -6.28376842e-01 -5.24784863e-01 -1.00507450e+00] [ 4.32638405e-03 -3.89702499e-01 -1.65517008e+00 ... -2.49730676e-01 1.12992680e+00 6.36589408e-01]] [[-1.29209971e+00 5.48749804e-01 -9.51487347e-02 ... 1.69586316e-02 9.49732900e-01 -2.60048360e-01] [ 1.93358970e+00 9.38344240e-01 -1.50723732e+00 ... -9.60099280e-01 4.64940131e-01 9.39166471e-02] [ 1.00067161e-01 9.40396130e-01 8.40672314e-01 ... -2.94083804e-01 7.27708101e-01 -7.79959977e-01] ... [ 6.62074506e-01 3.58591735e-01 -6.04921997e-01 ... -2.05020517e-01 5.06780386e-01 -1.73109889e-01] [ 1.99274421e-01 -5.38457990e-01 -7.17106879e-01 ... 1.04557908e+00 3.40881288e-01 -1.67820823e+00] [ 8.28450263e-01 -8.06369483e-02 1.87777865e+00 ... 9.35941517e-01 2.10300833e-03 -9.60662067e-01]] [[ 9.24945056e-01 1.42549682e+00 -8.29555690e-01 ... 5.40566407e-02 -8.30822527e-01 4.66292530e-01] [ 6.84924901e-01 -6.23363972e-01 -1.95833445e-01 ... 3.95173490e-01 2.90122628e-02 2.49106616e-01] [-9.84170914e-01 4.64445919e-01 4.02613014e-01 ... 8.95803630e-01 -2.02077603e+00 -9.42169309e-01] ... [ 2.22496271e+00 -4.42888796e-01 2.78571397e-01 ... -2.84407288e-01 -2.81980801e-02 7.02812970e-01] [-5.04592597e-01 -1.37659883e+00 -2.45408511e+00 ... 2.57270741e+00 1.57080102e+00 -6.89459741e-01] [-8.36829066e-01 4.38464880e-01 -4.11267012e-01 ... -9.05655622e-01 -7.52072632e-01 8.42113912e-01]] ... [[ 6.89077914e-01 -8.19120705e-01 -1.89959180e+00 ... 3.33666533e-01 -5.91709197e-01 5.92373073e-01] [ 1.44250542e-01 -1.03278959e+00 8.21247280e-01 ... -1.07324493e+00 -4.32070643e-01 -8.18977594e-01] [-1.44798860e-01 -7.14679480e-01 9.61739600e-01 ... -1.88104689e-01 -9.93189096e-01 4.72808212e-01] ... [ 2.04258847e+00 2.06054044e+00 -6.58275127e-01 ... 1.31328940e+00 1.33369386e+00 4.34238613e-01] [ 1.58046842e+00 -5.03930032e-01 -3.92408133e-01 ... -5.91681361e-01 -2.57759213e+00 -5.26952624e-01] [-1.05939746e+00 5.39327227e-02 -1.25002742e+00 ... 1.94295347e-01 2.94548571e-01 9.77309525e-01]] [[-7.48547137e-01 2.39458621e-01 -8.15076768e-01 ... 1.77232051e+00 7.75645852e-01 4.45229620e-01] [ 7.06702709e-01 -8.54489744e-01 -2.88598061e-01 ... 2.63796449e-01 -1.74073614e-02 -4.89512771e-01] [-1.22176719e+00 1.07906199e+00 2.16535234e+00 ... 2.48051509e-01 1.85536420e+00 -4.68232371e-02] ... [ 1.09340882e+00 1.29962516e+00 -5.03880501e-01 ... -8.47023427e-01 1.58611089e-01 6.59955680e-01] [ 1.67656735e-01 8.04923177e-01 -8.41115415e-02 ... 1.11262396e-01 1.72916603e+00 1.67123866e+00] [-7.47302234e-01 -7.52290308e-01 -7.77159572e-01 ... -6.64910197e-01 7.87916005e-01 1.81860864e+00]] [[-3.50888729e-01 3.38648677e-01 -8.28426242e-01 ... 8.63652349e-01 6.59217536e-01 1.63964367e+00] [-8.33804011e-01 2.12394190e+00 1.91558208e-02 ... -1.82362771e+00 -8.25596035e-01 7.95177400e-01] [-5.99093854e-01 1.61646736e+00 -7.58356676e-02 ... 4.06340241e-01 4.44120258e-01 2.41832757e+00] ... [-2.60824275e+00 -8.67479920e-01 -1.35458493e+00 ... -1.27626851e-01 4.95758027e-01 -2.54149944e-01] [-1.61932147e+00 -1.76029086e-01 1.07011294e+00 ... -1.76047659e+00 -2.90465713e-01 -1.00990176e+00] [-7.84183979e-01 -1.98852408e+00 -6.03355229e-01 ... 1.02847308e-01 1.37848830e+00 -4.83153909e-01]]] [[[ 1.43018937e+00 -1.82030571e+00 -1.35562134e+00 ... 1.24056125e+00 -3.80950421e-01 7.31903553e-01] [-1.75627100e+00 5.23657441e-01 3.81468167e-03 ... -1.66375148e+00 -2.95599680e-02 -3.20882618e-01] [-5.95475137e-01 1.33011734e+00 1.18636250e+00 ... 4.05743331e-01 -7.27061391e-01 6.33009136e-01] ... [-2.29637086e-01 -7.46025085e-01 2.03268600e+00 ... -3.82427692e-01 -1.37033331e+00 6.77025139e-01] [ 9.70316648e-01 1.30120659e+00 1.24377036e+00 ... -6.45261526e-01 4.18244004e-01 2.83430606e-01] [-9.61371779e-01 -1.07130063e+00 3.23287398e-01 ... -5.36996603e-01 1.31869221e+00 2.33711526e-02]] [[-4.10966337e-01 5.81006289e-01 3.79894286e-01 ... 4.79943752e-01 -9.84796464e-01 -2.58901089e-01] [-1.35042453e+00 5.39207876e-01 -7.67343700e-01 ... 6.44743919e-01 2.80232102e-01 1.24073470e+00] [ 2.13378072e+00 3.13303083e-01 -1.43156841e-01 ... 5.37399709e-01 1.45729280e+00 1.02355413e-01] ... [ 1.68804765e+00 8.32311690e-01 -1.26938331e+00 ... 1.68507481e+00 7.67222226e-01 -2.78249174e-01] [ 4.33979362e-01 6.42188251e-01 -7.57597983e-01 ... 8.17420006e-01 -1.38072145e+00 -7.25783333e-02] [ 2.46119404e+00 -2.22812425e-02 1.83343160e+00 ... 3.25380951e-01 3.23234260e-01 -1.37353909e+00]] [[ 5.87210834e-01 -8.38221610e-01 -8.36409271e-01 ... 5.37503421e-01 -4.84713107e-01 -9.17085886e-01] [ 1.45276725e+00 -7.62004331e-02 -9.54371452e-01 ... 9.98587608e-01 -3.21744233e-01 -7.08417952e-01] [ 4.63948756e-01 -4.31512266e-01 9.53833222e-01 ... -2.23010834e-02 6.11085832e-01 -1.79457843e-01] ... [ 5.56398511e-01 -8.69394660e-01 8.45552921e-01 ... 4.57503527e-01 1.68103158e-01 1.25174618e+00] [-5.31190395e-01 3.71885657e-01 6.50744319e-01 ... 1.40176272e+00 -8.87723744e-01 -1.08029163e+00] [ 7.24526107e-01 6.49136722e-01 -1.28850028e-01 ... 7.63696551e-01 6.95734441e-01 5.28240085e-01]] ... [[ 8.93066376e-02 -3.73473093e-02 7.27857172e-01 ... 6.72741771e-01 4.63725477e-01 -2.06802154e+00] [ 7.09740520e-01 -4.80426550e-01 -4.35818464e-01 ... 5.24435341e-01 1.50767016e+00 6.00239635e-02] [ 2.25884095e-01 8.80894735e-02 -1.23103309e+00 ... -1.62525523e+00 1.61649740e+00 1.74077660e-01] ... [-5.85692763e-01 -3.64115417e-01 2.92825699e-01 ... 2.38498664e+00 1.27759099e+00 -7.42927074e-01] [ 5.79509079e-01 9.29005444e-01 6.08236253e-01 ... 2.92864472e-01 3.87220562e-01 -6.97670996e-01] [-4.44258213e-01 6.74790025e-01 5.30088544e-01 ... -1.41155767e+00 -4.45125252e-01 -3.43250260e-02]] [[ 1.95045788e-02 -2.02142310e+00 2.21873727e-02 ... 7.98458278e-01 2.55842030e-01 8.30599666e-01] [ 3.66558790e-01 2.73508906e-01 -8.06366503e-01 ... -2.70349029e-02 -6.20991111e-01 -4.21687722e-01] [-1.26884794e+00 1.36552787e+00 -1.53840184e+00 ... 1.08074749e+00 -2.49306655e+00 -1.97293890e+00] ... [ 6.13766074e-01 6.21775806e-01 -5.32576859e-01 ... 5.14054596e-01 -9.79541123e-01 9.02524531e-01] [ 1.04652703e+00 4.89375144e-01 2.89730012e-01 ... 5.09908915e-01 1.60226607e+00 -1.13940310e+00] [ 4.88568097e-01 1.21055877e+00 -5.16898394e-01 ... 7.49058723e-02 -1.00877032e-01 -1.40306020e+00]] [[ 1.70571685e+00 -1.30952522e-01 -9.40839946e-01 ... 1.71914026e-01 -4.17501658e-01 1.37135661e+00] [ 9.63182509e-01 -7.28522599e-01 9.71937716e-01 ... -7.67421722e-01 -4.04137820e-01 -7.21702337e-01] [-1.78975689e+00 8.66965890e-01 4.07055169e-01 ... 2.53849745e+00 2.75691271e-01 2.24099725e-01] ... [ 2.62251168e-01 -7.66102552e-01 6.23767860e-02 ... 3.93181324e-01 -6.63086593e-01 -9.89710748e-01] [ 1.10322404e+00 1.01336324e+00 -7.87130930e-03 ... -2.22823858e+00 4.55564588e-01 1.61843407e+00] [ 6.29245102e-01 1.49895415e-01 1.30717492e+00 ... 2.70538568e-01 1.84866142e+00 9.52670515e-01]]] [[[ 1.29363704e+00 8.15338254e-01 -1.04367244e+00 ... 2.72514999e-01 -2.08336785e-02 -1.54236150e+00] [ 7.20737934e-01 6.57787025e-01 2.70248383e-01 ... 8.08131248e-02 1.47625649e+00 1.05257618e+00] [-4.00279045e-01 -1.01260364e+00 -1.47264552e+00 ... 1.05899377e-02 -7.93783009e-01 7.49893069e-01] ... [-1.00833364e-01 -6.00916505e-01 -2.26766944e+00 ... 1.94764304e+00 -5.96458800e-02 4.04672712e-01] [ 3.52617830e-01 4.58657265e-01 -4.62140560e-01 ... 1.65403044e+00 -1.89369947e-01 -7.45322049e-01] [ 8.20566714e-01 -3.67413878e-01 1.49026930e+00 ... 1.02270007e+00 1.88690424e+00 -9.19551849e-01]] [[ 1.11459875e+00 -9.46903944e-01 -1.37623549e+00 ... -6.95697917e-03 3.38427216e-01 7.64698327e-01] [ 6.60345614e-01 -2.35648346e+00 1.62999189e+00 ... -9.75130945e-02 2.85683662e-01 1.49303782e+00] [-5.00719070e-01 -2.33480319e-01 -9.63451713e-02 ... -1.58815515e+00 -1.74544916e-01 1.41421318e+00] ... [ 7.17406511e-01 -5.79761624e-01 6.80699170e-01 ... -1.96427964e-02 -2.20664293e-01 -1.98516712e-01] [-2.26670694e+00 -1.29819727e+00 -4.33763385e-01 ... -4.05150540e-02 -1.81395069e-01 -7.53308237e-01] [-5.54665148e-01 4.11563933e-01 2.70424604e-01 ... -1.84780598e+00 8.23118687e-01 -4.96387005e-01]] [[ 4.52065617e-01 -1.88642454e+00 -9.58587050e-01 ... 5.64168632e-01 6.67724609e-01 2.29195461e-01] [ 3.27884667e-02 1.63464040e-01 3.23519200e-01 ... 5.50788939e-01 5.59607983e-01 3.55661929e-01] [-4.32989717e-01 2.42539573e+00 1.11095214e+00 ... 9.62026894e-01 -5.19736648e-01 2.20224515e-01] ... [-1.99145466e-01 6.66054666e-01 -2.00181222e+00 ... 1.29902649e+00 4.40619975e-01 8.60702515e-01] [ 8.40001822e-01 1.29472995e+00 9.24070120e-01 ... -2.28099898e-01 -8.33334863e-01 -9.44100022e-01] [-2.15727285e-01 5.27142584e-01 1.90071654e+00 ... -1.46317172e+00 1.16552539e-01 1.09706879e+00]] ... [[ 1.56999752e-01 1.52347434e+00 -3.07016931e-02 ... -7.69110262e-01 -7.73995638e-01 -2.40422070e-01] [-1.06943655e+00 -2.01511431e+00 -1.44540393e+00 ... 9.16359425e-01 1.80891275e-01 -2.82407969e-01] [-9.53423679e-01 -2.79240346e+00 2.78287768e-01 ... 5.59297323e-01 8.87969255e-01 -1.77818680e+00] ... [ 1.17172694e+00 -7.98103690e-01 -1.08151078e+00 ... 1.67014897e-01 -3.78715694e-01 -6.04815543e-01] [ 1.34875566e-01 2.79019564e-01 -1.59786955e-01 ... 1.43701291e+00 -9.64088976e-01 6.87612474e-01] [-1.35108814e-01 -1.13416302e+00 1.34983826e+00 ... 1.62183452e+00 -1.20259613e-01 -1.06710100e+00]] [[ 1.66294813e+00 1.08601165e+00 -1.10209250e+00 ... -1.83298945e+00 1.43004525e+00 1.03245270e+00] [-1.25668883e+00 4.43986356e-01 -1.18062985e+00 ... -1.19115913e+00 2.91888744e-01 1.42725036e-01] [-3.09770077e-01 1.14021981e+00 8.45863581e-01 ... -4.96498883e-01 1.07425904e+00 -8.68761837e-01] ... [ 6.17933989e-01 2.15292782e-01 2.77178288e-01 ... -2.07718045e-01 -5.33715904e-01 -1.24851525e-01] [ 7.90646553e-01 -6.04182303e-01 -1.09602392e+00 ... -3.43677670e-01 2.66534954e-01 1.28701246e+00] [ 6.64421141e-01 1.66176045e+00 3.29559520e-02 ... 6.50866389e-01 -3.35042626e-01 -1.11043358e+00]] [[ 1.13754332e+00 -1.20386219e+00 9.33536351e-01 ... -4.09186512e-01 -9.33257759e-01 1.63993454e+00] [ 8.03960800e-01 4.28639501e-01 -7.77555645e-01 ... -1.90586194e-01 -1.45239770e-01 9.13886011e-01] [ 2.60155112e-01 -1.01701505e-01 5.06015420e-01 ... 8.31554651e-01 6.42604157e-02 -2.55037338e-01] ... [ 5.98528981e-01 -2.33138490e+00 3.51260543e-01 ... -9.32964623e-01 5.99550366e-01 -1.30496216e+00] [ 1.52295482e+00 2.42373690e-01 -8.70826468e-03 ... 1.16386190e-02 -9.60271716e-01 1.82498050e+00] [ 1.37364006e+00 -3.95514429e-01 -7.11600006e-01 ... -8.98171544e-01 5.06575406e-01 2.04608488e+00]]]]]; ov_res: [[[[[-7.86556125e-01 1.61165297e-02 1.55708730e-01 ... 5.34119308e-01 -2.07152948e-01 1.40357530e+00] [ 1.70643938e+00 -8.62871349e-01 1.90306294e+00 ... -3.38140070e-01 -9.99904513e-01 -8.24561119e-01] [ 1.42607653e+00 -9.54729855e-01 -4.90347922e-01 ... -3.80269587e-01 3.74048576e-02 5.25581300e-01] ... [-1.36141324e+00 -1.50619054e+00 -8.67787361e-01 ... 1.82080865e-01 1.30290174e+00 -4.70124722e-01] [-1.16339672e+00 -5.04574537e-01 5.90756297e-01 ... -6.28376842e-01 -5.24784863e-01 -1.00507450e+00] [ 4.32638405e-03 -3.89702499e-01 -1.65517008e+00 ... -2.49730676e-01 1.12992680e+00 6.36589408e-01]] [[-1.29209971e+00 5.48749804e-01 -9.51487347e-02 ... 1.69586316e-02 9.49732900e-01 -2.60048360e-01] [ 1.93358970e+00 9.38344240e-01 -1.50723732e+00 ... -9.60099280e-01 4.64940131e-01 9.39166471e-02] [ 1.00067161e-01 9.40396130e-01 8.40672314e-01 ... -2.94083804e-01 7.27708101e-01 -7.79959977e-01] ... [ 6.62074506e-01 3.58591735e-01 -6.04921997e-01 ... -2.05020517e-01 5.06780386e-01 -1.73109889e-01] [ 1.99274421e-01 -5.38457990e-01 -7.17106879e-01 ... 1.04557908e+00 3.40881288e-01 -1.67820823e+00] [ 8.28450263e-01 -8.06369483e-02 1.87777865e+00 ... 9.35941517e-01 2.10300833e-03 -9.60662067e-01]] [[ 9.24945056e-01 1.42549682e+00 -8.29555690e-01 ... 5.40566407e-02 -8.30822527e-01 4.66292530e-01] [ 6.84924901e-01 -6.23363972e-01 -1.95833445e-01 ... 3.95173490e-01 2.90122628e-02 2.49106616e-01] [-9.84170914e-01 4.64445919e-01 4.02613014e-01 ... 8.95803630e-01 -2.02077603e+00 -9.42169309e-01] ... [ 2.22496271e+00 -4.42888796e-01 2.78571397e-01 ... -2.84407288e-01 -2.81980801e-02 7.02812970e-01] [-5.04592597e-01 -1.37659883e+00 -2.45408511e+00 ... 2.57270741e+00 1.57080102e+00 -6.89459741e-01] [-8.36829066e-01 4.38464880e-01 -4.11267012e-01 ... -9.05655622e-01 -7.52072632e-01 8.42113912e-01]] ... [[ 6.89077914e-01 -8.19120705e-01 -1.89959180e+00 ... 3.33666533e-01 -5.91709197e-01 5.92373073e-01] [ 1.44250542e-01 -1.03278959e+00 8.21247280e-01 ... -1.07324493e+00 -4.32070643e-01 -8.18977594e-01] [-1.44798860e-01 -7.14679480e-01 9.61739600e-01 ... -1.88104689e-01 -9.93189096e-01 4.72808212e-01] ... [ 2.04258847e+00 2.06054044e+00 -6.58275127e-01 ... 1.31328940e+00 1.33369386e+00 4.34238613e-01] [ 1.58046842e+00 -5.03930032e-01 -3.92408133e-01 ... -5.91681361e-01 -2.57759213e+00 -5.26952624e-01] [-1.05939746e+00 5.39327227e-02 -1.25002742e+00 ... 1.94295347e-01 2.94548571e-01 9.77309525e-01]] [[-7.48547137e-01 2.39458621e-01 -8.15076768e-01 ... 1.77232051e+00 7.75645852e-01 4.45229620e-01] [ 7.06702709e-01 -8.54489744e-01 -2.88598061e-01 ... 2.63796449e-01 -1.74073614e-02 -4.89512771e-01] [-1.22176719e+00 1.07906199e+00 2.16535234e+00 ... 2.48051509e-01 1.85536420e+00 -4.68232371e-02] ... [ 1.09340882e+00 1.29962516e+00 -5.03880501e-01 ... -8.47023427e-01 1.58611089e-01 6.59955680e-01] [ 1.67656735e-01 8.04923177e-01 -8.41115415e-02 ... 1.11262396e-01 1.72916603e+00 1.67123866e+00] [-7.47302234e-01 -7.52290308e-01 -7.77159572e-01 ... -6.64910197e-01 7.87916005e-01 1.81860864e+00]] [[-3.50888729e-01 3.38648677e-01 -8.28426242e-01 ... 8.63652349e-01 6.59217536e-01 1.63964367e+00] [-8.33804011e-01 2.12394190e+00 1.91558208e-02 ... -1.82362771e+00 -8.25596035e-01 7.95177400e-01] [-5.99093854e-01 1.61646736e+00 -7.58356676e-02 ... 4.06340241e-01 4.44120258e-01 2.41832757e+00] ... [-2.60824275e+00 -8.67479920e-01 -1.35458493e+00 ... -1.27626851e-01 4.95758027e-01 -2.54149944e-01] [-1.61932147e+00 -1.76029086e-01 1.07011294e+00 ... -1.76047659e+00 -2.90465713e-01 -1.00990176e+00] [-7.84183979e-01 -1.98852408e+00 -6.03355229e-01 ... 1.02847308e-01 1.37848830e+00 -4.83153909e-01]]] [[[ 1.43018937e+00 -1.82030571e+00 -1.35562134e+00 ... 1.24056125e+00 -3.80950421e-01 7.31903553e-01] [-1.75627100e+00 5.23657441e-01 3.81468167e-03 ... -1.66375148e+00 -2.95599680e-02 -3.20882618e-01] [-5.95475137e-01 1.33011734e+00 1.18636250e+00 ... 4.05743331e-01 -7.27061391e-01 6.33009136e-01] ... [-2.29637086e-01 -7.46025085e-01 2.03268600e+00 ... -3.82427692e-01 -1.37033331e+00 6.77025139e-01] [ 9.70316648e-01 1.30120659e+00 1.24377036e+00 ... -6.45261526e-01 4.18244004e-01 2.83430606e-01] [-9.61371779e-01 -1.07130063e+00 3.23287398e-01 ... -5.36996603e-01 1.31869221e+00 2.33711526e-02]] [[-4.10966337e-01 5.81006289e-01 3.79894286e-01 ... 4.79943752e-01 -9.84796464e-01 -2.58901089e-01] [-1.35042453e+00 5.39207876e-01 -7.67343700e-01 ... 6.44743919e-01 2.80232102e-01 1.24073470e+00] [ 2.13378072e+00 3.13303083e-01 -1.43156841e-01 ... 5.37399709e-01 1.45729280e+00 1.02355413e-01] ... [ 1.68804765e+00 8.32311690e-01 -1.26938331e+00 ... 1.68507481e+00 7.67222226e-01 -2.78249174e-01] [ 4.33979362e-01 6.42188251e-01 -7.57597983e-01 ... 8.17420006e-01 -1.38072145e+00 -7.25783333e-02] [ 2.46119404e+00 -2.22812425e-02 1.83343160e+00 ... 3.25380951e-01 3.23234260e-01 -1.37353909e+00]] [[ 5.87210834e-01 -8.38221610e-01 -8.36409271e-01 ... 5.37503421e-01 -4.84713107e-01 -9.17085886e-01] [ 1.45276725e+00 -7.62004331e-02 -9.54371452e-01 ... 9.98587608e-01 -3.21744233e-01 -7.08417952e-01] [ 4.63948756e-01 -4.31512266e-01 9.53833222e-01 ... -2.23010834e-02 6.11085832e-01 -1.79457843e-01] ... [ 5.56398511e-01 -8.69394660e-01 8.45552921e-01 ... 4.57503527e-01 1.68103158e-01 1.25174618e+00] [-5.31190395e-01 3.71885657e-01 6.50744319e-01 ... 1.40176272e+00 -8.87723744e-01 -1.08029163e+00] [ 7.24526107e-01 6.49136722e-01 -1.28850028e-01 ... 7.63696551e-01 6.95734441e-01 5.28240085e-01]] ... [[ 8.93066376e-02 -3.73473093e-02 7.27857172e-01 ... 6.72741771e-01 4.63725477e-01 -2.06802154e+00] [ 7.09740520e-01 -4.80426550e-01 -4.35818464e-01 ... 5.24435341e-01 1.50767016e+00 6.00239635e-02] [ 2.25884095e-01 8.80894735e-02 -1.23103309e+00 ... -1.62525523e+00 1.61649740e+00 1.74077660e-01] ... [-5.85692763e-01 -3.64115417e-01 2.92825699e-01 ... 2.38498664e+00 1.27759099e+00 -7.42927074e-01] [ 5.79509079e-01 9.29005444e-01 6.08236253e-01 ... 2.92864472e-01 3.87220562e-01 -6.97670996e-01] [-4.44258213e-01 6.74790025e-01 5.30088544e-01 ... -1.41155767e+00 -4.45125252e-01 -3.43250260e-02]] [[ 1.95045788e-02 -2.02142310e+00 2.21873727e-02 ... 7.98458278e-01 2.55842030e-01 8.30599666e-01] [ 3.66558790e-01 2.73508906e-01 -8.06366503e-01 ... -2.70349029e-02 -6.20991111e-01 -4.21687722e-01] [-1.26884794e+00 1.36552787e+00 -1.53840184e+00 ... 1.08074749e+00 -2.49306655e+00 -1.97293890e+00] ... [ 6.13766074e-01 6.21775806e-01 -5.32576859e-01 ... 5.14054596e-01 -9.79541123e-01 9.02524531e-01] [ 1.04652703e+00 4.89375144e-01 2.89730012e-01 ... 5.09908915e-01 1.60226607e+00 -1.13940310e+00] [ 4.88568097e-01 1.21055877e+00 -5.16898394e-01 ... 7.49058723e-02 -1.00877032e-01 -1.40306020e+00]] [[ 1.70571685e+00 -1.30952522e-01 -9.40839946e-01 ... 1.71914026e-01 -4.17501658e-01 1.37135661e+00] [ 9.63182509e-01 -7.28522599e-01 9.71937716e-01 ... -7.67421722e-01 -4.04137820e-01 -7.21702337e-01] [-1.78975689e+00 8.66965890e-01 4.07055169e-01 ... 2.53849745e+00 2.75691271e-01 2.24099725e-01] ... [ 2.62251168e-01 -7.66102552e-01 6.23767860e-02 ... 3.93181324e-01 -6.63086593e-01 -9.89710748e-01] [ 1.10322404e+00 1.01336324e+00 -7.87130930e-03 ... -2.22823858e+00 4.55564588e-01 1.61843407e+00] [ 6.29245102e-01 1.49895415e-01 1.30717492e+00 ... 2.70538568e-01 1.84866142e+00 9.52670515e-01]]] [[[ 1.29363704e+00 8.15338254e-01 -1.04367244e+00 ... 2.72514999e-01 -2.08336785e-02 -1.54236150e+00] [ 7.20737934e-01 6.57787025e-01 2.70248383e-01 ... 8.08131248e-02 1.47625649e+00 1.05257618e+00] [-4.00279045e-01 -1.01260364e+00 -1.47264552e+00 ... 1.05899377e-02 -7.93783009e-01 7.49893069e-01] ... [-1.00833364e-01 -6.00916505e-01 -2.26766944e+00 ... 1.94764304e+00 -5.96458800e-02 4.04672712e-01] [ 3.52617830e-01 4.58657265e-01 -4.62140560e-01 ... 1.65403044e+00 -1.89369947e-01 -7.45322049e-01] [ 8.20566714e-01 -3.67413878e-01 1.49026930e+00 ... 1.02270007e+00 1.88690424e+00 -9.19551849e-01]] [[ 1.11459875e+00 -9.46903944e-01 -1.37623549e+00 ... -6.95697917e-03 3.38427216e-01 7.64698327e-01] [ 6.60345614e-01 -2.35648346e+00 1.62999189e+00 ... -9.75130945e-02 2.85683662e-01 1.49303782e+00] [-5.00719070e-01 -2.33480319e-01 -9.63451713e-02 ... -1.58815515e+00 -1.74544916e-01 1.41421318e+00] ... [ 7.17406511e-01 -5.79761624e-01 6.80699170e-01 ... -1.96427964e-02 -2.20664293e-01 -1.98516712e-01] [-2.26670694e+00 -1.29819727e+00 -4.33763385e-01 ... -4.05150540e-02 -1.81395069e-01 -7.53308237e-01] [-5.54665148e-01 4.11563933e-01 2.70424604e-01 ... -1.84780598e+00 8.23118687e-01 -4.96387005e-01]] [[ 4.52065617e-01 -1.88642454e+00 -9.58587050e-01 ... 5.64168632e-01 6.67724609e-01 2.29195461e-01] [ 3.27884667e-02 1.63464040e-01 3.23519200e-01 ... 5.50788939e-01 5.59607983e-01 3.55661929e-01] [-4.32989717e-01 2.42539573e+00 1.11095214e+00 ... 9.62026894e-01 -5.19736648e-01 2.20224515e-01] ... [-1.99145466e-01 6.66054666e-01 -2.00181222e+00 ... 1.29902649e+00 4.40619975e-01 8.60702515e-01] [ 8.40001822e-01 1.29472995e+00 9.24070120e-01 ... -2.28099898e-01 -8.33334863e-01 -9.44100022e-01] [-2.15727285e-01 5.27142584e-01 1.90071654e+00 ... -1.46317172e+00 1.16552539e-01 1.09706879e+00]] ... [[ 1.56999752e-01 1.52347434e+00 -3.07016931e-02 ... -7.69110262e-01 -7.73995638e-01 -2.40422070e-01] [-1.06943655e+00 -2.01511431e+00 -1.44540393e+00 ... 9.16359425e-01 1.80891275e-01 -2.82407969e-01] [-9.53423679e-01 -2.79240346e+00 2.78287768e-01 ... 5.59297323e-01 8.87969255e-01 -1.77818680e+00] ... [ 1.17172694e+00 -7.98103690e-01 -1.08151078e+00 ... 1.67014897e-01 -3.78715694e-01 -6.04815543e-01] [ 1.34875566e-01 2.79019564e-01 -1.59786955e-01 ... 1.43701291e+00 -9.64088976e-01 6.87612474e-01] [-1.35108814e-01 -1.13416302e+00 1.34983826e+00 ... 1.62183452e+00 -1.20259613e-01 -1.06710100e+00]] [[ 1.66294813e+00 1.08601165e+00 -1.10209250e+00 ... -1.83298945e+00 1.43004525e+00 1.03245270e+00] [-1.25668883e+00 4.43986356e-01 -1.18062985e+00 ... -1.19115913e+00 2.91888744e-01 1.42725036e-01] [-3.09770077e-01 1.14021981e+00 8.45863581e-01 ... -4.96498883e-01 1.07425904e+00 -8.68761837e-01] ... [ 6.17933989e-01 2.15292782e-01 2.77178288e-01 ... -2.07718045e-01 -5.33715904e-01 -1.24851525e-01] [ 7.90646553e-01 -6.04182303e-01 -1.09602392e+00 ... -3.43677670e-01 2.66534954e-01 1.28701246e+00] [ 6.64421141e-01 1.66176045e+00 3.29559520e-02 ... 6.50866389e-01 -3.35042626e-01 -1.11043358e+00]] [[ 1.13754332e+00 -1.20386219e+00 9.33536351e-01 ... -4.09186512e-01 -9.33257759e-01 1.63993454e+00] [ 8.03960800e-01 4.28639501e-01 -7.77555645e-01 ... -1.90586194e-01 -1.45239770e-01 9.13886011e-01] [ 2.60155112e-01 -1.01701505e-01 5.06015420e-01 ... 8.31554651e-01 6.42604157e-02 -2.55037338e-01] ... [ 5.98528981e-01 -2.33138490e+00 3.51260543e-01 ... -9.32964623e-01 5.99550366e-01 -1.30496216e+00] [ 1.52295482e+00 2.42373690e-01 -8.70826468e-03 ... 1.16386190e-02 -9.60271716e-01 1.82498050e+00] [ 1.37364006e+00 -3.95514429e-01 -7.11600006e-01 ... -8.98171544e-01 5.06575406e-01 2.04608488e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(0, 0, -1, -2, 0, 0) - mode:circular - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5584.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0, -1, -2, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-2.86036432e-01 -1.07301378e+00 -6.73609257e-01 ... 9.62049723e-01 4.31815207e-01 -2.70735264e-01] [ 1.20714474e+00 1.36485589e+00 -2.96295375e-01 ... 1.47553611e+00 -4.74835306e-01 1.99355707e-01] [ 7.46628582e-01 -1.11519158e-01 4.09376025e-01 ... -1.79094446e+00 8.54500458e-02 1.64577806e+00] ... [-1.86705732e+00 -1.55761689e-01 5.47306597e-01 ... -2.97169805e-01 -4.79737312e-01 -2.46489763e-01] [-5.65173090e-01 -1.95608184e-01 -1.10015476e+00 ... -3.22875053e-01 5.68982542e-01 7.73634493e-01] [-5.85130692e-01 -5.74029386e-01 1.69174647e+00 ... 5.75565577e-01 1.58759260e+00 1.96521163e+00]] [[ 1.02867472e+00 2.77301431e+00 -6.83626056e-01 ... 3.77100945e-01 -7.53734529e-01 9.32176352e-01] [-9.56607223e-01 -6.42532349e-01 -2.46915311e-01 ... -1.09435368e+00 -1.02244473e+00 1.79446328e+00] [-9.72106829e-02 -6.13369107e-01 2.94237494e-01 ... -4.42182600e-01 4.95113969e-01 -9.35632646e-01] ... [ 2.83481210e-01 1.46704602e+00 1.11376154e+00 ... -1.88827229e+00 1.29357648e+00 5.57028921e-03] [-1.12552392e+00 2.36399031e+00 -1.10048521e+00 ... -1.10701501e+00 -1.05620432e+00 6.64827526e-01] [-6.70180023e-01 9.95330155e-01 -1.29375041e-01 ... 1.17582016e-01 -2.54537702e-01 3.77369821e-01]] [[ 3.22631866e-01 -1.46481168e+00 -1.76814830e+00 ... -5.54854870e-02 5.89945734e-01 -8.27878594e-01] [-4.19734836e-01 -1.24579811e+00 1.03664808e-01 ... -1.79704025e-01 2.80698031e-01 -1.16046286e+00] [ 1.12947129e-01 -2.05132023e-01 6.14297509e-01 ... 9.06736493e-01 -4.76455599e-01 1.98522002e-01] ... [ 5.27285576e-01 -1.10497868e+00 -2.14693308e+00 ... -2.22456336e+00 -7.14259207e-01 -2.10178241e-01] [ 3.22171736e+00 -9.21389103e-01 -1.21291630e-01 ... 9.28033829e-01 -6.14227951e-01 6.14592314e-01] [ 1.57039237e+00 -1.13453412e+00 8.06318521e-02 ... 1.87243557e+00 -1.11315274e+00 4.20902252e-01]] ... [[-5.96962869e-01 -2.27886036e-01 -1.54516503e-01 ... 2.12438092e-01 9.44968164e-02 -7.41364658e-01] [ 1.18241596e+00 6.71187118e-02 1.19379830e+00 ... 1.91486895e+00 -9.63450074e-01 1.07960798e-01] [ 5.21764994e-01 -2.04939589e-01 -4.14099753e-01 ... 2.85988390e-01 7.82539323e-02 5.97586989e-01] ... [-9.90724683e-01 3.53133619e-01 6.88469052e-01 ... 1.89673305e-01 1.06393611e+00 -5.06681502e-01] [ 1.78824055e+00 -2.00260594e-01 2.00100017e+00 ... -1.35883141e+00 -2.22603992e-01 1.19483805e+00] [-2.20283651e+00 9.53411758e-01 2.69133002e-01 ... -1.80413795e+00 6.93684161e-01 -3.03684354e-01]] [[ 1.70684743e+00 -9.52357173e-01 -6.33570790e-01 ... -1.13224208e+00 -2.27742791e-02 -1.69246256e+00] [ 4.78577524e-01 2.34041786e+00 2.20383000e+00 ... -1.42446756e+00 -9.40360785e-01 -2.88194448e-01] [-3.70334923e-01 8.98911893e-01 -7.74456978e-01 ... 3.67252231e-02 1.60036600e+00 -6.61739707e-01] ... [ 1.46254086e+00 -1.67715108e+00 -1.59083748e+00 ... -5.51908493e-01 -5.00013053e-01 -2.16096640e+00] [-8.86064172e-01 -2.04157448e+00 -4.44428951e-01 ... -6.84265077e-01 -5.90144396e-01 -1.49958873e+00] [ 1.03786141e-01 -4.14615542e-01 -1.30323970e+00 ... -9.42940056e-01 1.09167171e+00 2.16807628e+00]] [[ 9.02133584e-01 -5.38778961e-01 -2.12112442e-01 ... -8.74217629e-01 -4.81113642e-01 1.16191351e+00] [ 6.06592298e-01 1.95115542e+00 1.42312098e+00 ... -1.53346539e+00 -2.24048108e-01 2.22663060e-01] [ 8.66154671e-01 7.05235079e-02 -1.02311647e+00 ... -8.53488088e-01 -2.36245587e-01 5.57124376e-01] ... [ 9.79719937e-01 4.15632397e-01 2.45352006e+00 ... 1.03188396e+00 8.41557264e-01 -1.03061788e-01] [ 7.43667305e-01 -2.32493713e-01 -7.07414925e-01 ... -1.58151972e+00 -6.99891806e-01 -4.97260332e-01] [-2.06574798e+00 2.17475510e+00 8.90542746e-01 ... 1.60401538e-01 -2.12591553e+00 -5.73091924e-01]]] [[[-8.10773551e-01 -1.74334550e+00 5.83185792e-01 ... -1.16778779e+00 -7.66352832e-01 1.71967137e+00] [-8.64291847e-01 -4.04465795e-01 6.14443347e-02 ... 9.99422491e-01 -1.53555489e+00 -5.83630741e-01] [ 4.03624177e-01 5.44712961e-01 -2.21113533e-01 ... -1.92188874e-01 -9.47219312e-01 -7.99512714e-02] ... [-7.77190089e-01 -1.24722695e+00 1.52982759e+00 ... 1.06141761e-01 -2.37960890e-02 1.32678998e+00] [-9.13021207e-01 1.94690478e+00 6.92754090e-01 ... 2.16389465e+00 1.14502227e+00 -4.88063432e-02] [-2.98436582e-01 8.00872743e-02 7.20398366e-01 ... 1.89097524e-02 -8.64540637e-01 6.47618413e-01]] [[-2.60029364e+00 -2.63195664e-01 -1.50223136e+00 ... 4.74073648e-01 6.34558201e-02 -5.46014309e-01] [ 1.74168622e+00 -4.30040024e-02 -1.12004852e+00 ... -1.26600516e+00 -4.37347084e-01 -1.31270796e-01] [-7.41674960e-01 -1.17459345e+00 4.61504340e-01 ... -1.74790776e+00 -7.88105428e-01 -4.50408250e-01] ... [ 1.81482518e+00 -6.62690401e-01 -6.09070182e-01 ... 1.06798744e+00 -5.57682633e-01 1.82821512e-01] [-6.79490805e-01 -3.69503908e-02 1.33323148e-01 ... -1.25932753e+00 8.15875232e-01 5.03592372e-01] [-9.63275880e-02 -5.36824055e-02 8.01825225e-01 ... -2.94007540e-01 -1.62832594e+00 -1.03256893e+00]] [[-3.80447149e-01 -2.96887428e-01 1.32175875e+00 ... -1.21502113e+00 1.80664968e+00 -1.46597373e+00] [-1.94311991e-01 2.04347491e+00 1.23682094e+00 ... 3.75149995e-01 -6.68637693e-01 -8.08435977e-01] [-6.76824972e-02 2.84686893e-01 -7.56676137e-01 ... -7.35293508e-01 -7.75203586e-01 -5.48606575e-01] ... [ 8.85005116e-01 2.19778657e+00 7.06823409e-01 ... 4.68509704e-01 -1.93277597e-02 -6.24910355e-01] [ 2.30414346e-01 3.05445820e-01 -1.13252550e-02 ... -1.42866805e-01 8.42905641e-01 2.48700663e-01] [-2.11048335e-01 -3.29672962e-01 1.16231453e+00 ... 1.33240327e-01 1.59354579e+00 -1.17188787e+00]] ... [[-3.05940002e-01 -1.87303871e-01 -4.42571729e-01 ... 9.79165018e-01 3.11300009e-02 5.86066991e-02] [-1.28231883e+00 5.99440217e-01 1.26651490e+00 ... -2.34196961e-01 -8.03557038e-01 6.20638072e-01] [-6.93396986e-01 1.16372287e+00 -6.93250358e-01 ... 9.31130886e-01 -6.44429386e-01 -1.67862177e-01] ... [ 1.09886336e+00 9.04390991e-01 4.72295970e-01 ... 2.11524069e-01 9.35064703e-02 4.50418323e-01] [-7.92417109e-01 -1.44530579e-01 1.01016057e+00 ... -1.37442541e+00 3.23018730e-01 1.25723314e+00] [ 1.05508542e+00 7.48783529e-01 2.09345508e+00 ... -1.90271467e-01 -1.35956419e+00 -1.40528083e+00]] [[ 1.01285768e+00 1.66658640e+00 -3.01766324e+00 ... 1.29072058e+00 -2.60253195e-02 2.51023459e+00] [ 1.26300550e+00 1.27054632e+00 8.82796824e-01 ... -1.64861584e+00 7.90198594e-02 4.82630074e-01] [ 1.01216435e+00 -5.81163764e-01 1.35362506e-01 ... 1.51143539e+00 3.57200801e-01 7.88131416e-01] ... [ 1.93141267e-01 -1.65971351e+00 -1.44804597e+00 ... 1.20709836e+00 -8.01256299e-03 -1.03530741e+00] [ 5.50837209e-03 4.61833775e-02 1.35461104e+00 ... -4.01843458e-01 -8.77135038e-01 1.07602179e+00] [ 6.28116846e-01 -2.30578914e-01 9.75959957e-01 ... 6.44052327e-01 1.63265184e-01 -1.08959508e+00]] [[ 1.42250335e+00 1.78964674e-01 -6.98594078e-02 ... -3.80159885e-01 -1.81225324e+00 -1.67913580e+00] [ 8.97933096e-02 -2.04194784e-01 -8.78491759e-01 ... -4.43284929e-01 3.19176793e-01 -1.08503759e+00] [-1.67575330e-01 -7.30000913e-01 -2.12901950e-01 ... 1.09667182e+00 -1.31099081e+00 -5.30105829e-01] ... [-8.66825879e-01 -5.58736920e-01 -9.80353177e-01 ... 3.57797116e-01 -8.81479144e-01 -1.25903964e+00] [ 1.93965435e+00 8.68296206e-01 -8.64883900e-01 ... 1.38001156e+00 -3.88631582e-01 -3.13614644e-02] [ 7.76769400e-01 6.80356398e-02 -3.80604565e-01 ... -1.11104584e+00 -2.41955653e-01 -3.86807591e-01]]] [[[-3.17366743e+00 8.69847596e-01 -1.35458696e+00 ... 4.99398649e-01 5.78078568e-01 8.23611915e-01] [ 8.99429083e-01 -2.65862137e-01 2.44418859e+00 ... -4.65292558e-02 5.83107769e-01 -2.28784055e-01] [ 1.12739071e-01 -1.68933201e+00 -5.15018106e-01 ... 6.40295744e-01 -1.08338463e+00 1.40234268e+00] ... [-6.99801803e-01 4.87120867e-01 9.89766181e-01 ... -3.42454910e-01 -1.06769815e-01 2.56047368e+00] [ 3.50723386e-01 2.22033310e+00 3.85933548e-01 ... -1.15205896e+00 6.09553277e-01 5.82607508e-01] [-2.34973192e+00 6.22817390e-02 5.92610836e-01 ... 2.93438256e-01 -5.95322609e-01 7.59549141e-01]] [[-3.64855945e-01 -1.07763624e+00 -1.46278873e-01 ... -1.40954971e+00 -8.79866302e-01 -7.47937411e-02] [-1.43947387e+00 7.11702257e-02 -7.54325628e-01 ... -5.97416878e-01 4.98962440e-02 8.72590303e-01] [-9.71475661e-01 1.30516708e+00 1.08604097e+00 ... -3.49725820e-02 2.26297212e+00 -5.78802764e-01] ... [-1.39037859e+00 -1.26303172e+00 6.73211515e-02 ... 1.31695777e-01 -1.34179854e+00 1.06571567e+00] [ 2.71750718e-01 -5.77212453e-01 1.41323662e+00 ... 6.12908125e-01 1.53151584e+00 1.11520159e+00] [-8.91993046e-01 5.07101119e-01 -2.31622800e-01 ... -1.55437660e+00 6.59652889e-01 1.20028198e+00]] [[-1.75098169e+00 -1.15219104e+00 2.89881408e-01 ... -9.07303870e-01 -1.20246530e+00 -6.01795435e-01] [-3.87511909e-01 1.20590672e-01 -6.55733705e-01 ... 2.89169371e-01 1.35711980e+00 1.55476257e-01] [-2.66044378e-01 2.22427800e-01 -2.84923781e-02 ... 5.66553295e-01 -1.88787669e-01 1.84478593e+00] ... [ 3.33847851e-01 1.31177485e+00 2.51004577e-01 ... -1.27202898e-01 -7.36369491e-01 -3.02575026e-02] [ 4.44799066e-01 1.35951769e-03 1.03122687e+00 ... 1.55632591e+00 -2.51825690e-01 5.83875775e-01] [ 1.60688847e-01 1.52936339e+00 -9.22485590e-01 ... 1.59432137e+00 -3.64925027e-01 -7.96032310e-01]] ... [[ 3.68462682e-01 -1.14521444e+00 3.00521970e-01 ... -3.04093838e-01 1.88273895e+00 4.34827685e-01] [ 1.77802312e+00 -2.20462918e+00 5.05542040e-01 ... 5.81298232e-01 1.21269429e+00 -1.52670896e+00] [-2.40325332e+00 -4.81323361e-01 -1.09610820e+00 ... 1.62751937e+00 1.03204119e+00 1.45545959e+00] ... [-2.66372800e-01 -1.06717157e+00 -1.26201797e+00 ... -6.31291866e-01 -9.62706089e-01 -1.63440549e+00] [-1.40142298e+00 -6.58424973e-01 -1.62601507e+00 ... -5.81512749e-01 4.79079604e-01 -2.80041248e-01] [-1.90986717e+00 -6.71061635e-01 -1.15226650e+00 ... 1.87011397e+00 2.13032216e-01 -1.17697823e+00]] [[-4.06193197e-01 4.87235188e-01 -1.35959852e+00 ... 3.65721643e-01 7.15191126e-01 8.16011131e-01] [ 5.67916334e-01 6.01769499e-02 -2.38973916e-01 ... 1.51148558e+00 8.07598531e-01 -8.22554708e-01] [-4.36571389e-01 -6.74805716e-02 -6.97288036e-01 ... 7.57833719e-01 5.79468429e-01 8.09559107e-01] ... [ 8.06519628e-01 -1.16724372e+00 7.12055743e-01 ... 4.67114709e-03 -1.65556109e+00 1.99691153e+00] [ 2.24125671e+00 2.73457915e-01 -4.82058018e-01 ... -1.07015252e+00 -2.48211932e+00 -2.31964976e-01] [-2.11991683e-01 1.05780828e+00 1.42054808e+00 ... 5.57921708e-01 -9.35131967e-01 -5.57480037e-01]] [[ 8.00637364e-01 3.63053203e-01 4.21252280e-01 ... 2.89902210e-01 5.79993069e-01 -7.58254409e-01] [ 1.23710120e+00 -7.44409978e-01 1.17744637e+00 ... 1.81964934e+00 -1.23401558e+00 -5.31042106e-02] [-1.37761462e+00 -1.74757689e-01 -6.33580744e-01 ... -1.03628290e+00 9.06038523e-01 1.17145514e+00] ... [-9.37404931e-01 3.22969198e-01 1.57508802e+00 ... 1.92798018e+00 2.69249797e-01 4.90770280e-01] [-1.23435684e-01 2.22771838e-01 2.25358412e-01 ... 1.30625272e+00 2.56698638e-01 1.15686381e+00] [-7.22263977e-02 3.16266298e-01 -8.52926731e-01 ... -1.56471550e-01 -7.25811303e-01 -1.38195896e+00]]]]]; ov_res: [[[[[-2.86036432e-01 -1.07301378e+00 -6.73609257e-01 ... 9.62049723e-01 4.31815207e-01 -2.70735264e-01] [ 1.20714474e+00 1.36485589e+00 -2.96295375e-01 ... 1.47553611e+00 -4.74835306e-01 1.99355707e-01] [ 7.46628582e-01 -1.11519158e-01 4.09376025e-01 ... -1.79094446e+00 8.54500458e-02 1.64577806e+00] ... [-1.86705732e+00 -1.55761689e-01 5.47306597e-01 ... -2.97169805e-01 -4.79737312e-01 -2.46489763e-01] [-5.65173090e-01 -1.95608184e-01 -1.10015476e+00 ... -3.22875053e-01 5.68982542e-01 7.73634493e-01] [-5.85130692e-01 -5.74029386e-01 1.69174647e+00 ... 5.75565577e-01 1.58759260e+00 1.96521163e+00]] [[ 1.02867472e+00 2.77301431e+00 -6.83626056e-01 ... 3.77100945e-01 -7.53734529e-01 9.32176352e-01] [-9.56607223e-01 -6.42532349e-01 -2.46915311e-01 ... -1.09435368e+00 -1.02244473e+00 1.79446328e+00] [-9.72106829e-02 -6.13369107e-01 2.94237494e-01 ... -4.42182600e-01 4.95113969e-01 -9.35632646e-01] ... [ 2.83481210e-01 1.46704602e+00 1.11376154e+00 ... -1.88827229e+00 1.29357648e+00 5.57028921e-03] [-1.12552392e+00 2.36399031e+00 -1.10048521e+00 ... -1.10701501e+00 -1.05620432e+00 6.64827526e-01] [-6.70180023e-01 9.95330155e-01 -1.29375041e-01 ... 1.17582016e-01 -2.54537702e-01 3.77369821e-01]] [[ 3.22631866e-01 -1.46481168e+00 -1.76814830e+00 ... -5.54854870e-02 5.89945734e-01 -8.27878594e-01] [-4.19734836e-01 -1.24579811e+00 1.03664808e-01 ... -1.79704025e-01 2.80698031e-01 -1.16046286e+00] [ 1.12947129e-01 -2.05132023e-01 6.14297509e-01 ... 9.06736493e-01 -4.76455599e-01 1.98522002e-01] ... [ 5.27285576e-01 -1.10497868e+00 -2.14693308e+00 ... -2.22456336e+00 -7.14259207e-01 -2.10178241e-01] [ 3.22171736e+00 -9.21389103e-01 -1.21291630e-01 ... 9.28033829e-01 -6.14227951e-01 6.14592314e-01] [ 1.57039237e+00 -1.13453412e+00 8.06318521e-02 ... 1.87243557e+00 -1.11315274e+00 4.20902252e-01]] ... [[-5.96962869e-01 -2.27886036e-01 -1.54516503e-01 ... 2.12438092e-01 9.44968164e-02 -7.41364658e-01] [ 1.18241596e+00 6.71187118e-02 1.19379830e+00 ... 1.91486895e+00 -9.63450074e-01 1.07960798e-01] [ 5.21764994e-01 -2.04939589e-01 -4.14099753e-01 ... 2.85988390e-01 7.82539323e-02 5.97586989e-01] ... [-9.90724683e-01 3.53133619e-01 6.88469052e-01 ... 1.89673305e-01 1.06393611e+00 -5.06681502e-01] [ 1.78824055e+00 -2.00260594e-01 2.00100017e+00 ... -1.35883141e+00 -2.22603992e-01 1.19483805e+00] [-2.20283651e+00 9.53411758e-01 2.69133002e-01 ... -1.80413795e+00 6.93684161e-01 -3.03684354e-01]] [[ 1.70684743e+00 -9.52357173e-01 -6.33570790e-01 ... -1.13224208e+00 -2.27742791e-02 -1.69246256e+00] [ 4.78577524e-01 2.34041786e+00 2.20383000e+00 ... -1.42446756e+00 -9.40360785e-01 -2.88194448e-01] [-3.70334923e-01 8.98911893e-01 -7.74456978e-01 ... 3.67252231e-02 1.60036600e+00 -6.61739707e-01] ... [ 1.46254086e+00 -1.67715108e+00 -1.59083748e+00 ... -5.51908493e-01 -5.00013053e-01 -2.16096640e+00] [-8.86064172e-01 -2.04157448e+00 -4.44428951e-01 ... -6.84265077e-01 -5.90144396e-01 -1.49958873e+00] [ 1.03786141e-01 -4.14615542e-01 -1.30323970e+00 ... -9.42940056e-01 1.09167171e+00 2.16807628e+00]] [[ 9.02133584e-01 -5.38778961e-01 -2.12112442e-01 ... -8.74217629e-01 -4.81113642e-01 1.16191351e+00] [ 6.06592298e-01 1.95115542e+00 1.42312098e+00 ... -1.53346539e+00 -2.24048108e-01 2.22663060e-01] [ 8.66154671e-01 7.05235079e-02 -1.02311647e+00 ... -8.53488088e-01 -2.36245587e-01 5.57124376e-01] ... [ 9.79719937e-01 4.15632397e-01 2.45352006e+00 ... 1.03188396e+00 8.41557264e-01 -1.03061788e-01] [ 7.43667305e-01 -2.32493713e-01 -7.07414925e-01 ... -1.58151972e+00 -6.99891806e-01 -4.97260332e-01] [-2.06574798e+00 2.17475510e+00 8.90542746e-01 ... 1.60401538e-01 -2.12591553e+00 -5.73091924e-01]]] [[[-8.10773551e-01 -1.74334550e+00 5.83185792e-01 ... -1.16778779e+00 -7.66352832e-01 1.71967137e+00] [-8.64291847e-01 -4.04465795e-01 6.14443347e-02 ... 9.99422491e-01 -1.53555489e+00 -5.83630741e-01] [ 4.03624177e-01 5.44712961e-01 -2.21113533e-01 ... -1.92188874e-01 -9.47219312e-01 -7.99512714e-02] ... [-7.77190089e-01 -1.24722695e+00 1.52982759e+00 ... 1.06141761e-01 -2.37960890e-02 1.32678998e+00] [-9.13021207e-01 1.94690478e+00 6.92754090e-01 ... 2.16389465e+00 1.14502227e+00 -4.88063432e-02] [-2.98436582e-01 8.00872743e-02 7.20398366e-01 ... 1.89097524e-02 -8.64540637e-01 6.47618413e-01]] [[-2.60029364e+00 -2.63195664e-01 -1.50223136e+00 ... 4.74073648e-01 6.34558201e-02 -5.46014309e-01] [ 1.74168622e+00 -4.30040024e-02 -1.12004852e+00 ... -1.26600516e+00 -4.37347084e-01 -1.31270796e-01] [-7.41674960e-01 -1.17459345e+00 4.61504340e-01 ... -1.74790776e+00 -7.88105428e-01 -4.50408250e-01] ... [ 1.81482518e+00 -6.62690401e-01 -6.09070182e-01 ... 1.06798744e+00 -5.57682633e-01 1.82821512e-01] [-6.79490805e-01 -3.69503908e-02 1.33323148e-01 ... -1.25932753e+00 8.15875232e-01 5.03592372e-01] [-9.63275880e-02 -5.36824055e-02 8.01825225e-01 ... -2.94007540e-01 -1.62832594e+00 -1.03256893e+00]] [[-3.80447149e-01 -2.96887428e-01 1.32175875e+00 ... -1.21502113e+00 1.80664968e+00 -1.46597373e+00] [-1.94311991e-01 2.04347491e+00 1.23682094e+00 ... 3.75149995e-01 -6.68637693e-01 -8.08435977e-01] [-6.76824972e-02 2.84686893e-01 -7.56676137e-01 ... -7.35293508e-01 -7.75203586e-01 -5.48606575e-01] ... [ 8.85005116e-01 2.19778657e+00 7.06823409e-01 ... 4.68509704e-01 -1.93277597e-02 -6.24910355e-01] [ 2.30414346e-01 3.05445820e-01 -1.13252550e-02 ... -1.42866805e-01 8.42905641e-01 2.48700663e-01] [-2.11048335e-01 -3.29672962e-01 1.16231453e+00 ... 1.33240327e-01 1.59354579e+00 -1.17188787e+00]] ... [[-3.05940002e-01 -1.87303871e-01 -4.42571729e-01 ... 9.79165018e-01 3.11300009e-02 5.86066991e-02] [-1.28231883e+00 5.99440217e-01 1.26651490e+00 ... -2.34196961e-01 -8.03557038e-01 6.20638072e-01] [-6.93396986e-01 1.16372287e+00 -6.93250358e-01 ... 9.31130886e-01 -6.44429386e-01 -1.67862177e-01] ... [ 1.09886336e+00 9.04390991e-01 4.72295970e-01 ... 2.11524069e-01 9.35064703e-02 4.50418323e-01] [-7.92417109e-01 -1.44530579e-01 1.01016057e+00 ... -1.37442541e+00 3.23018730e-01 1.25723314e+00] [ 1.05508542e+00 7.48783529e-01 2.09345508e+00 ... -1.90271467e-01 -1.35956419e+00 -1.40528083e+00]] [[ 1.01285768e+00 1.66658640e+00 -3.01766324e+00 ... 1.29072058e+00 -2.60253195e-02 2.51023459e+00] [ 1.26300550e+00 1.27054632e+00 8.82796824e-01 ... -1.64861584e+00 7.90198594e-02 4.82630074e-01] [ 1.01216435e+00 -5.81163764e-01 1.35362506e-01 ... 1.51143539e+00 3.57200801e-01 7.88131416e-01] ... [ 1.93141267e-01 -1.65971351e+00 -1.44804597e+00 ... 1.20709836e+00 -8.01256299e-03 -1.03530741e+00] [ 5.50837209e-03 4.61833775e-02 1.35461104e+00 ... -4.01843458e-01 -8.77135038e-01 1.07602179e+00] [ 6.28116846e-01 -2.30578914e-01 9.75959957e-01 ... 6.44052327e-01 1.63265184e-01 -1.08959508e+00]] [[ 1.42250335e+00 1.78964674e-01 -6.98594078e-02 ... -3.80159885e-01 -1.81225324e+00 -1.67913580e+00] [ 8.97933096e-02 -2.04194784e-01 -8.78491759e-01 ... -4.43284929e-01 3.19176793e-01 -1.08503759e+00] [-1.67575330e-01 -7.30000913e-01 -2.12901950e-01 ... 1.09667182e+00 -1.31099081e+00 -5.30105829e-01] ... [-8.66825879e-01 -5.58736920e-01 -9.80353177e-01 ... 3.57797116e-01 -8.81479144e-01 -1.25903964e+00] [ 1.93965435e+00 8.68296206e-01 -8.64883900e-01 ... 1.38001156e+00 -3.88631582e-01 -3.13614644e-02] [ 7.76769400e-01 6.80356398e-02 -3.80604565e-01 ... -1.11104584e+00 -2.41955653e-01 -3.86807591e-01]]] [[[-3.17366743e+00 8.69847596e-01 -1.35458696e+00 ... 4.99398649e-01 5.78078568e-01 8.23611915e-01] [ 8.99429083e-01 -2.65862137e-01 2.44418859e+00 ... -4.65292558e-02 5.83107769e-01 -2.28784055e-01] [ 1.12739071e-01 -1.68933201e+00 -5.15018106e-01 ... 6.40295744e-01 -1.08338463e+00 1.40234268e+00] ... [-6.99801803e-01 4.87120867e-01 9.89766181e-01 ... -3.42454910e-01 -1.06769815e-01 2.56047368e+00] [ 3.50723386e-01 2.22033310e+00 3.85933548e-01 ... -1.15205896e+00 6.09553277e-01 5.82607508e-01] [-2.34973192e+00 6.22817390e-02 5.92610836e-01 ... 2.93438256e-01 -5.95322609e-01 7.59549141e-01]] [[-3.64855945e-01 -1.07763624e+00 -1.46278873e-01 ... -1.40954971e+00 -8.79866302e-01 -7.47937411e-02] [-1.43947387e+00 7.11702257e-02 -7.54325628e-01 ... -5.97416878e-01 4.98962440e-02 8.72590303e-01] [-9.71475661e-01 1.30516708e+00 1.08604097e+00 ... -3.49725820e-02 2.26297212e+00 -5.78802764e-01] ... [-1.39037859e+00 -1.26303172e+00 6.73211515e-02 ... 1.31695777e-01 -1.34179854e+00 1.06571567e+00] [ 2.71750718e-01 -5.77212453e-01 1.41323662e+00 ... 6.12908125e-01 1.53151584e+00 1.11520159e+00] [-8.91993046e-01 5.07101119e-01 -2.31622800e-01 ... -1.55437660e+00 6.59652889e-01 1.20028198e+00]] [[-1.75098169e+00 -1.15219104e+00 2.89881408e-01 ... -9.07303870e-01 -1.20246530e+00 -6.01795435e-01] [-3.87511909e-01 1.20590672e-01 -6.55733705e-01 ... 2.89169371e-01 1.35711980e+00 1.55476257e-01] [-2.66044378e-01 2.22427800e-01 -2.84923781e-02 ... 5.66553295e-01 -1.88787669e-01 1.84478593e+00] ... [ 3.33847851e-01 1.31177485e+00 2.51004577e-01 ... -1.27202898e-01 -7.36369491e-01 -3.02575026e-02] [ 4.44799066e-01 1.35951769e-03 1.03122687e+00 ... 1.55632591e+00 -2.51825690e-01 5.83875775e-01] [ 1.60688847e-01 1.52936339e+00 -9.22485590e-01 ... 1.59432137e+00 -3.64925027e-01 -7.96032310e-01]] ... [[ 3.68462682e-01 -1.14521444e+00 3.00521970e-01 ... -3.04093838e-01 1.88273895e+00 4.34827685e-01] [ 1.77802312e+00 -2.20462918e+00 5.05542040e-01 ... 5.81298232e-01 1.21269429e+00 -1.52670896e+00] [-2.40325332e+00 -4.81323361e-01 -1.09610820e+00 ... 1.62751937e+00 1.03204119e+00 1.45545959e+00] ... [-2.66372800e-01 -1.06717157e+00 -1.26201797e+00 ... -6.31291866e-01 -9.62706089e-01 -1.63440549e+00] [-1.40142298e+00 -6.58424973e-01 -1.62601507e+00 ... -5.81512749e-01 4.79079604e-01 -2.80041248e-01] [-1.90986717e+00 -6.71061635e-01 -1.15226650e+00 ... 1.87011397e+00 2.13032216e-01 -1.17697823e+00]] [[-4.06193197e-01 4.87235188e-01 -1.35959852e+00 ... 3.65721643e-01 7.15191126e-01 8.16011131e-01] [ 5.67916334e-01 6.01769499e-02 -2.38973916e-01 ... 1.51148558e+00 8.07598531e-01 -8.22554708e-01] [-4.36571389e-01 -6.74805716e-02 -6.97288036e-01 ... 7.57833719e-01 5.79468429e-01 8.09559107e-01] ... [ 8.06519628e-01 -1.16724372e+00 7.12055743e-01 ... 4.67114709e-03 -1.65556109e+00 1.99691153e+00] [ 2.24125671e+00 2.73457915e-01 -4.82058018e-01 ... -1.07015252e+00 -2.48211932e+00 -2.31964976e-01] [-2.11991683e-01 1.05780828e+00 1.42054808e+00 ... 5.57921708e-01 -9.35131967e-01 -5.57480037e-01]] [[ 8.00637364e-01 3.63053203e-01 4.21252280e-01 ... 2.89902210e-01 5.79993069e-01 -7.58254409e-01] [ 1.23710120e+00 -7.44409978e-01 1.17744637e+00 ... 1.81964934e+00 -1.23401558e+00 -5.31042106e-02] [-1.37761462e+00 -1.74757689e-01 -6.33580744e-01 ... -1.03628290e+00 9.06038523e-01 1.17145514e+00] ... [-9.37404931e-01 3.22969198e-01 1.57508802e+00 ... 1.92798018e+00 2.69249797e-01 4.90770280e-01] [-1.23435684e-01 2.22771838e-01 2.25358412e-01 ... 1.30625272e+00 2.56698638e-01 1.15686381e+00] [-7.22263977e-02 3.16266298e-01 -8.52926731e-01 ... -1.56471550e-01 -7.25811303e-01 -1.38195896e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(-1, -2, -1, -2, -1, -2) - mode:circular - value:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5587.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[-1, -2, -1, -2, -1, -2]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[ 1.88392252e-01 -3.73001724e-01 -1.11531079e+00 ... 9.78975296e-01 -2.29745895e-01 -6.62822425e-01] [-6.63368404e-01 -9.52759027e-01 -3.53962444e-02 ... -1.82713962e+00 1.18488634e+00 -2.61516850e-02] [-3.75958443e-01 -1.12654187e-01 -3.44170541e-01 ... 1.93064463e+00 9.33024809e-02 5.06873369e-01] ... [ 3.00080813e-02 2.92076856e-01 3.23204815e-01 ... -2.01442361e-01 -1.39193904e+00 -1.24779367e+00] [ 8.27962235e-02 6.83466733e-01 -1.02294791e+00 ... 1.83045113e+00 1.39227521e+00 -1.41005173e-01] [-7.78749585e-01 -1.39141321e-01 -1.45158291e+00 ... 4.06992361e-02 -2.33290642e-01 -8.78689364e-02]] [[ 1.47915125e+00 2.47398749e-01 1.76615143e+00 ... -2.47725055e-01 -1.65257883e+00 2.29818836e-01] [-1.19801119e-01 8.82438481e-01 9.79171932e-01 ... -9.50569808e-01 -8.97225916e-01 8.54423285e-01] [-1.24918175e+00 1.10623074e+00 9.28316057e-01 ... -1.05835104e+00 1.23946285e+00 6.53971374e-01] ... [ 2.76146173e+00 -5.26645958e-01 -4.55190837e-01 ... -6.05456047e-02 -2.53850490e-01 -1.23538804e+00] [ 1.13175921e-01 -8.15308094e-02 1.78763568e+00 ... 5.39321423e-01 -3.90816361e-01 -5.28077930e-02] [ 3.91066790e-01 1.46620661e-01 -5.37511587e-01 ... -3.97892565e-01 -3.35366696e-01 -1.45512283e+00]] [[ 3.98002654e-01 -1.78229868e+00 -1.01158485e-01 ... -2.08762813e+00 -1.23761629e-03 4.54705954e-01] [ 2.33178616e+00 1.99075115e+00 2.09553540e-01 ... -7.12199926e-01 2.03732312e-01 1.24502206e+00] [ 7.44365513e-01 2.16594309e-01 1.02389663e-01 ... -6.76889777e-01 2.21763611e+00 7.48748839e-01] ... [-1.37230888e-01 7.63812721e-01 3.50016451e+00 ... 1.70095587e+00 3.17459196e-01 -4.65620905e-01] [ 1.58388272e-01 -5.27530789e-01 1.17213023e+00 ... 1.43578172e-01 2.53036022e-01 4.82551336e-01] [ 2.85976291e-01 1.68248308e+00 6.83246195e-01 ... 1.27211046e+00 8.79993439e-01 -3.15667272e-01]] ... [[ 8.58122289e-01 -7.39015102e-01 -2.65985101e-01 ... -9.52644527e-01 4.40574624e-02 -6.37733579e-01] [ 6.73581421e-01 -1.11397803e+00 1.74527019e-02 ... 1.05710208e+00 1.22631438e-01 -3.86754364e-01] [ 4.63661075e-01 -1.45883191e+00 2.53405499e+00 ... 1.47201371e+00 3.47255796e-01 -1.23511899e+00] ... [-3.02891433e-01 -5.14682174e-01 1.07311070e+00 ... 1.36080682e+00 3.86158079e-01 1.58698857e+00] [-4.73967612e-01 2.23557413e-01 1.36155891e+00 ... 2.48465210e-01 -1.13649619e+00 4.75837618e-01] [ 5.08910775e-01 2.66659832e+00 1.24085891e+00 ... 7.65940398e-02 1.08249240e-01 -1.27880156e-01]] [[-1.14610517e+00 -7.17715919e-01 8.26370299e-01 ... -4.57538038e-01 -5.44704616e-01 1.16706407e+00] [-4.81794089e-01 5.90620376e-02 -9.17419136e-01 ... 1.15781522e+00 3.78349781e-01 9.02343869e-01] [ 1.44294202e+00 -1.53971091e-01 9.24503148e-01 ... -8.44514310e-01 -4.63772923e-01 -8.42678726e-01] ... [ 1.02153623e+00 7.48907208e-01 -1.29871309e+00 ... 6.11260355e-01 7.03002632e-01 8.34902048e-01] [ 4.69396025e-01 -1.98980063e-01 1.50906372e+00 ... -8.88581097e-01 -8.61924827e-01 -1.17365432e+00] [ 1.00418948e-01 -3.71414870e-01 -6.76550806e-01 ... -1.32532215e+00 9.04200077e-01 -8.57140869e-02]] [[-6.11189663e-01 -4.54026252e-01 1.05434442e+00 ... 7.96110868e-01 1.79954737e-01 2.63999534e+00] [-7.70260811e-01 -5.30801356e-01 7.17496634e-01 ... -5.50724626e-01 9.66273129e-01 -8.20988953e-01] [ 1.45341325e+00 -1.43758237e-01 2.59762764e-01 ... -3.39735210e-01 1.05974853e+00 8.62763941e-01] ... [-4.93358880e-01 -1.23851490e+00 -1.34973437e-01 ... 2.06754226e-02 -5.19818306e-01 -4.42578597e-03] [ 1.61837757e-01 -1.34594411e-01 -8.28956068e-01 ... 1.82602275e-02 -1.34709907e+00 3.80028844e-01] [-8.20355237e-01 -1.20161963e+00 6.07701302e-01 ... -1.02822721e+00 3.09288740e-01 1.11748910e+00]]] [[[ 1.76515710e+00 -1.51544893e+00 1.75883496e+00 ... 2.94348001e-01 3.52778643e-01 1.39733911e+00] [-1.05391026e+00 -1.25519478e+00 -1.58325219e+00 ... 1.00263500e+00 3.19731593e-01 -8.10961366e-01] [-2.25175768e-01 -2.74775382e-02 2.38682151e-01 ... 1.56930074e-01 -4.54760879e-01 -2.11201501e+00] ... [ 1.35797286e+00 -3.54664326e-01 -8.89436305e-01 ... -8.52169394e-01 -2.51105782e-02 -1.22515452e+00] [ 1.27659664e-01 3.94568771e-01 -1.57612562e+00 ... 9.76278931e-02 -9.38515902e-01 -4.29011196e-01] [-5.05788922e-01 -9.20873940e-01 4.09559876e-01 ... -3.90024126e-01 2.65232116e-01 1.27020586e+00]] [[-1.08645582e+00 1.31683397e+00 -9.42592502e-01 ... 4.85135555e-01 -9.68692958e-01 -6.73352480e-02] [-1.49664849e-01 1.29958892e+00 7.44144917e-01 ... -7.58426309e-01 1.49147499e+00 2.85338074e-01] [ 9.43801224e-01 -2.13898063e-01 -1.13581920e+00 ... 9.65719163e-01 6.39937460e-01 1.12366247e+00] ... [ 6.08830154e-01 -2.78887540e-01 -5.09772897e-01 ... -9.33825731e-01 9.81034636e-01 1.01718640e+00] [-4.07474220e-01 -7.72100985e-01 -9.41959545e-02 ... -1.14714444e+00 -1.17913997e+00 1.66564727e+00] [-1.66861498e+00 -6.86633825e-01 -4.94136125e-01 ... -5.18535852e-01 9.69296455e-01 -9.47469354e-01]] [[ 6.68039024e-01 -5.98501623e-01 -7.07689345e-01 ... 6.03845082e-02 1.29259253e+00 -5.36549985e-01] [-1.07850432e+00 5.34827530e-01 -2.30283931e-01 ... 1.14793487e-01 1.29685831e+00 2.10230398e+00] [-8.87938380e-01 -4.91999477e-01 -1.50829196e-01 ... -3.52035612e-01 9.07132685e-01 -6.64238274e-01] ... [-1.40077543e+00 1.13455641e+00 1.33501545e-01 ... -5.70984557e-03 9.53192472e-01 -1.73339796e+00] [-2.50922889e-01 -9.53581750e-01 3.66726875e-01 ... 6.33392274e-01 1.15570712e+00 1.53491199e+00] [ 1.71803534e+00 1.32396951e-01 -7.44588435e-01 ... 5.95553219e-02 5.94336212e-01 -7.72126973e-01]] ... [[-1.19610155e+00 -1.02066267e+00 -1.42395806e+00 ... -4.54238951e-01 1.26941606e-01 8.74389052e-01] [-5.27985156e-01 3.03681225e-01 -1.70612311e+00 ... -1.97607863e+00 -1.65270710e+00 3.92849684e-01] [ 1.08807814e+00 -9.51066613e-01 5.91001511e-01 ... 4.29818898e-01 -6.47805333e-01 -5.73763132e-01] ... [ 1.01012278e+00 -1.34378338e+00 -1.36774421e+00 ... 2.43556201e-02 7.90060997e-01 1.38430274e+00] [-3.73419859e-02 -4.09576595e-02 -2.07978272e+00 ... -3.20023596e-01 1.56907952e+00 -9.94402617e-02] [-5.20134449e-01 -1.15314209e+00 -2.63221450e-02 ... -1.79640722e+00 1.19375646e+00 -1.84282899e-01]] [[ 1.41795814e-01 7.83008814e-01 2.24038029e+00 ... 3.21328312e-01 9.59752738e-01 -6.61115825e-01] [-2.49458337e-03 -9.16328281e-02 2.45347142e+00 ... -1.26231802e+00 -8.10291469e-01 -9.24403191e-01] [-1.05708957e+00 5.23318470e-01 2.00288105e+00 ... -8.04735184e-01 -6.24837399e-01 -4.32783999e-02] ... [ 2.53596187e-01 -3.15698886e+00 -9.66919065e-01 ... -2.45140895e-01 -3.31440598e-01 -1.15061510e+00] [ 1.35300243e+00 1.45834193e-01 8.87590349e-01 ... 4.86367345e-01 9.98962164e-01 1.13858092e+00] [-8.23576331e-01 1.30533993e+00 2.98812658e-01 ... -7.14774489e-01 4.76879567e-01 -3.47575992e-01]] [[ 1.01924276e+00 -4.39314693e-02 -1.69010293e+00 ... 6.86097860e-01 7.08691299e-01 3.96718442e-01] [-5.81203341e-01 2.93991178e-01 -2.64174044e-01 ... 8.15492868e-01 -6.37702882e-01 -8.90421808e-01] [ 1.30034462e-01 -1.03115952e+00 -1.42839432e+00 ... 9.81084704e-02 -8.41434062e-01 -3.36067259e-01] ... [-5.99394262e-01 8.42353463e-01 4.44201261e-01 ... 1.15474534e+00 -3.28266084e-01 8.49898577e-01] [ 1.43965352e+00 -1.10769105e+00 1.09255147e+00 ... -6.16963983e-01 -1.42091250e+00 4.09264900e-02] [-1.37130630e+00 2.44432896e-01 -1.76576781e+00 ... -7.76203871e-01 1.93163663e-01 1.25521612e+00]]] [[[ 1.98054742e-02 -2.93884367e-01 9.40672696e-01 ... -5.78646481e-01 -3.84966403e-01 8.26142669e-01] [-6.97473884e-01 -4.66963887e-01 -1.16758227e+00 ... 1.04534459e+00 1.49308693e+00 -6.51774824e-01] [-5.36168754e-01 1.14380322e-01 -9.96134400e-01 ... -8.05121183e-01 -1.10058522e+00 -1.20180234e-01] ... [-6.31159067e-01 2.17795300e+00 3.15952361e-01 ... -1.14690161e+00 2.12630853e-02 -1.23892832e+00] [-4.23218101e-01 -6.56120777e-01 1.41693699e+00 ... 4.20460254e-01 -9.81788814e-01 1.04922724e+00] [ 1.20829952e+00 9.40771341e-01 -1.47122025e-01 ... -1.13116264e+00 -4.57199253e-02 2.41803932e+00]] [[-4.34448630e-01 5.76181412e-01 1.01845808e-01 ... 4.14387207e-04 -3.88836674e-02 -5.71905553e-01] [ 1.13435137e+00 -9.55371141e-01 7.98688293e-01 ... 6.11653924e-01 9.17137623e-01 6.42723083e-01] [ 6.78676426e-01 1.02606535e+00 -1.79341242e-01 ... -9.51250851e-01 -4.15296555e-02 -4.43374813e-01] ... [-7.32451200e-01 -1.24944501e-01 -7.30930567e-01 ... -7.08581388e-01 1.62808228e+00 -1.82207692e+00] [ 4.54640836e-02 5.88726640e-01 1.64933932e+00 ... 9.49590683e-01 8.53334725e-01 -5.52649081e-01] [ 3.35206568e-01 -3.22852194e-01 2.10696054e+00 ... 1.76031399e+00 -3.64724815e-01 7.64604628e-01]] [[ 8.80379498e-01 -1.08244598e+00 -3.09960663e-01 ... -2.05862331e+00 -6.03025146e-02 1.19711554e+00] [ 2.01754236e+00 -1.18837667e+00 1.76302516e+00 ... 3.90910536e-01 2.10580897e+00 -1.46913874e+00] [-1.25493026e+00 5.37375450e-01 4.79142100e-01 ... -6.11069560e-01 -3.14976245e-01 9.08745825e-01] ... [-7.20178962e-01 9.63372350e-01 1.54454231e+00 ... 2.14175820e-01 1.04581618e+00 1.74709439e-01] [-2.04608607e+00 -7.54724324e-01 3.37663084e-01 ... 9.40498412e-01 3.40174615e-01 -5.17291784e-01] [-1.67643547e-01 1.65725291e+00 2.65952259e-01 ... 1.65144593e-01 -8.76836956e-01 8.49443793e-01]] ... [[ 4.15232301e-01 -4.73329961e-01 -8.06379855e-01 ... 1.98592663e+00 -7.68161118e-01 7.36086845e-01] [-1.30251944e-01 5.24920821e-01 -5.75232863e-01 ... -2.70017564e-01 -9.83580470e-01 7.53591120e-01] [-8.06067526e-01 1.18359670e-01 7.51761734e-01 ... 4.50438619e-01 -2.42924437e-01 -2.66704440e-01] ... [ 7.67868102e-01 -6.48061752e-01 -1.04777527e+00 ... -6.95170224e-01 6.33562088e-01 -1.53019512e-02] [ 2.02407658e-01 -1.23321402e+00 -1.46143937e+00 ... 7.75261998e-01 6.18404627e-01 -5.49730122e-01] [ 2.00280237e+00 1.42474437e+00 -4.59197536e-02 ... 1.28479636e+00 -1.10105467e+00 1.90393209e+00]] [[-1.66193449e+00 2.37685585e+00 1.62642252e+00 ... 1.59434862e-02 2.68212914e-01 4.84448403e-01] [ 1.78408280e-01 2.20958328e+00 -1.11606205e+00 ... 8.75588477e-01 -6.83471143e-01 -1.20190048e+00] [-1.81829739e+00 1.17698765e+00 7.14023769e-01 ... -2.50313729e-01 -1.09872736e-01 -1.85544324e+00] ... [-7.32166886e-01 9.33625042e-01 1.00683451e-01 ... 1.56458330e+00 6.94639027e-01 1.69649303e+00] [ 3.07446778e-01 4.16125894e-01 -1.46320355e+00 ... 5.41788377e-02 9.60561633e-01 -1.15810680e+00] [-5.66702150e-02 1.81801128e+00 5.63364804e-01 ... 1.10766053e+00 8.40765774e-01 5.44643700e-01]] [[ 6.32323444e-01 2.93476820e-01 -1.34360924e-01 ... -5.98613918e-01 1.38074800e-01 -5.20491421e-01] [ 2.32183859e-01 2.93123126e-02 1.26225305e+00 ... 2.96551764e-01 2.96491832e-01 9.13049877e-01] [-2.32652202e-01 -6.17414057e-01 -9.17938352e-02 ... 1.03628731e+00 1.76198140e-01 -8.98573875e-01] ... [ 1.28463495e+00 2.34438330e-02 -2.07348347e+00 ... -4.57384676e-01 -3.75568643e-02 7.55075812e-01] [-1.20199311e+00 -4.90679592e-01 -9.44445014e-01 ... 9.02846694e-01 6.23171926e-01 1.93703187e+00] [-1.51990354e-01 -2.91078061e-01 -6.88783884e-01 ... 1.47546268e+00 -7.44072616e-01 8.60391259e-01]]]]]; ov_res: [[[[[ 1.88392252e-01 -3.73001724e-01 -1.11531079e+00 ... 9.78975296e-01 -2.29745895e-01 -6.62822425e-01] [-6.63368404e-01 -9.52759027e-01 -3.53962444e-02 ... -1.82713962e+00 1.18488634e+00 -2.61516850e-02] [-3.75958443e-01 -1.12654187e-01 -3.44170541e-01 ... 1.93064463e+00 9.33024809e-02 5.06873369e-01] ... [ 3.00080813e-02 2.92076856e-01 3.23204815e-01 ... -2.01442361e-01 -1.39193904e+00 -1.24779367e+00] [ 8.27962235e-02 6.83466733e-01 -1.02294791e+00 ... 1.83045113e+00 1.39227521e+00 -1.41005173e-01] [-7.78749585e-01 -1.39141321e-01 -1.45158291e+00 ... 4.06992361e-02 -2.33290642e-01 -8.78689364e-02]] [[ 1.47915125e+00 2.47398749e-01 1.76615143e+00 ... -2.47725055e-01 -1.65257883e+00 2.29818836e-01] [-1.19801119e-01 8.82438481e-01 9.79171932e-01 ... -9.50569808e-01 -8.97225916e-01 8.54423285e-01] [-1.24918175e+00 1.10623074e+00 9.28316057e-01 ... -1.05835104e+00 1.23946285e+00 6.53971374e-01] ... [ 2.76146173e+00 -5.26645958e-01 -4.55190837e-01 ... -6.05456047e-02 -2.53850490e-01 -1.23538804e+00] [ 1.13175921e-01 -8.15308094e-02 1.78763568e+00 ... 5.39321423e-01 -3.90816361e-01 -5.28077930e-02] [ 3.91066790e-01 1.46620661e-01 -5.37511587e-01 ... -3.97892565e-01 -3.35366696e-01 -1.45512283e+00]] [[ 3.98002654e-01 -1.78229868e+00 -1.01158485e-01 ... -2.08762813e+00 -1.23761629e-03 4.54705954e-01] [ 2.33178616e+00 1.99075115e+00 2.09553540e-01 ... -7.12199926e-01 2.03732312e-01 1.24502206e+00] [ 7.44365513e-01 2.16594309e-01 1.02389663e-01 ... -6.76889777e-01 2.21763611e+00 7.48748839e-01] ... [-1.37230888e-01 7.63812721e-01 3.50016451e+00 ... 1.70095587e+00 3.17459196e-01 -4.65620905e-01] [ 1.58388272e-01 -5.27530789e-01 1.17213023e+00 ... 1.43578172e-01 2.53036022e-01 4.82551336e-01] [ 2.85976291e-01 1.68248308e+00 6.83246195e-01 ... 1.27211046e+00 8.79993439e-01 -3.15667272e-01]] ... [[ 8.58122289e-01 -7.39015102e-01 -2.65985101e-01 ... -9.52644527e-01 4.40574624e-02 -6.37733579e-01] [ 6.73581421e-01 -1.11397803e+00 1.74527019e-02 ... 1.05710208e+00 1.22631438e-01 -3.86754364e-01] [ 4.63661075e-01 -1.45883191e+00 2.53405499e+00 ... 1.47201371e+00 3.47255796e-01 -1.23511899e+00] ... [-3.02891433e-01 -5.14682174e-01 1.07311070e+00 ... 1.36080682e+00 3.86158079e-01 1.58698857e+00] [-4.73967612e-01 2.23557413e-01 1.36155891e+00 ... 2.48465210e-01 -1.13649619e+00 4.75837618e-01] [ 5.08910775e-01 2.66659832e+00 1.24085891e+00 ... 7.65940398e-02 1.08249240e-01 -1.27880156e-01]] [[-1.14610517e+00 -7.17715919e-01 8.26370299e-01 ... -4.57538038e-01 -5.44704616e-01 1.16706407e+00] [-4.81794089e-01 5.90620376e-02 -9.17419136e-01 ... 1.15781522e+00 3.78349781e-01 9.02343869e-01] [ 1.44294202e+00 -1.53971091e-01 9.24503148e-01 ... -8.44514310e-01 -4.63772923e-01 -8.42678726e-01] ... [ 1.02153623e+00 7.48907208e-01 -1.29871309e+00 ... 6.11260355e-01 7.03002632e-01 8.34902048e-01] [ 4.69396025e-01 -1.98980063e-01 1.50906372e+00 ... -8.88581097e-01 -8.61924827e-01 -1.17365432e+00] [ 1.00418948e-01 -3.71414870e-01 -6.76550806e-01 ... -1.32532215e+00 9.04200077e-01 -8.57140869e-02]] [[-6.11189663e-01 -4.54026252e-01 1.05434442e+00 ... 7.96110868e-01 1.79954737e-01 2.63999534e+00] [-7.70260811e-01 -5.30801356e-01 7.17496634e-01 ... -5.50724626e-01 9.66273129e-01 -8.20988953e-01] [ 1.45341325e+00 -1.43758237e-01 2.59762764e-01 ... -3.39735210e-01 1.05974853e+00 8.62763941e-01] ... [-4.93358880e-01 -1.23851490e+00 -1.34973437e-01 ... 2.06754226e-02 -5.19818306e-01 -4.42578597e-03] [ 1.61837757e-01 -1.34594411e-01 -8.28956068e-01 ... 1.82602275e-02 -1.34709907e+00 3.80028844e-01] [-8.20355237e-01 -1.20161963e+00 6.07701302e-01 ... -1.02822721e+00 3.09288740e-01 1.11748910e+00]]] [[[ 1.76515710e+00 -1.51544893e+00 1.75883496e+00 ... 2.94348001e-01 3.52778643e-01 1.39733911e+00] [-1.05391026e+00 -1.25519478e+00 -1.58325219e+00 ... 1.00263500e+00 3.19731593e-01 -8.10961366e-01] [-2.25175768e-01 -2.74775382e-02 2.38682151e-01 ... 1.56930074e-01 -4.54760879e-01 -2.11201501e+00] ... [ 1.35797286e+00 -3.54664326e-01 -8.89436305e-01 ... -8.52169394e-01 -2.51105782e-02 -1.22515452e+00] [ 1.27659664e-01 3.94568771e-01 -1.57612562e+00 ... 9.76278931e-02 -9.38515902e-01 -4.29011196e-01] [-5.05788922e-01 -9.20873940e-01 4.09559876e-01 ... -3.90024126e-01 2.65232116e-01 1.27020586e+00]] [[-1.08645582e+00 1.31683397e+00 -9.42592502e-01 ... 4.85135555e-01 -9.68692958e-01 -6.73352480e-02] [-1.49664849e-01 1.29958892e+00 7.44144917e-01 ... -7.58426309e-01 1.49147499e+00 2.85338074e-01] [ 9.43801224e-01 -2.13898063e-01 -1.13581920e+00 ... 9.65719163e-01 6.39937460e-01 1.12366247e+00] ... [ 6.08830154e-01 -2.78887540e-01 -5.09772897e-01 ... -9.33825731e-01 9.81034636e-01 1.01718640e+00] [-4.07474220e-01 -7.72100985e-01 -9.41959545e-02 ... -1.14714444e+00 -1.17913997e+00 1.66564727e+00] [-1.66861498e+00 -6.86633825e-01 -4.94136125e-01 ... -5.18535852e-01 9.69296455e-01 -9.47469354e-01]] [[ 6.68039024e-01 -5.98501623e-01 -7.07689345e-01 ... 6.03845082e-02 1.29259253e+00 -5.36549985e-01] [-1.07850432e+00 5.34827530e-01 -2.30283931e-01 ... 1.14793487e-01 1.29685831e+00 2.10230398e+00] [-8.87938380e-01 -4.91999477e-01 -1.50829196e-01 ... -3.52035612e-01 9.07132685e-01 -6.64238274e-01] ... [-1.40077543e+00 1.13455641e+00 1.33501545e-01 ... -5.70984557e-03 9.53192472e-01 -1.73339796e+00] [-2.50922889e-01 -9.53581750e-01 3.66726875e-01 ... 6.33392274e-01 1.15570712e+00 1.53491199e+00] [ 1.71803534e+00 1.32396951e-01 -7.44588435e-01 ... 5.95553219e-02 5.94336212e-01 -7.72126973e-01]] ... [[-1.19610155e+00 -1.02066267e+00 -1.42395806e+00 ... -4.54238951e-01 1.26941606e-01 8.74389052e-01] [-5.27985156e-01 3.03681225e-01 -1.70612311e+00 ... -1.97607863e+00 -1.65270710e+00 3.92849684e-01] [ 1.08807814e+00 -9.51066613e-01 5.91001511e-01 ... 4.29818898e-01 -6.47805333e-01 -5.73763132e-01] ... [ 1.01012278e+00 -1.34378338e+00 -1.36774421e+00 ... 2.43556201e-02 7.90060997e-01 1.38430274e+00] [-3.73419859e-02 -4.09576595e-02 -2.07978272e+00 ... -3.20023596e-01 1.56907952e+00 -9.94402617e-02] [-5.20134449e-01 -1.15314209e+00 -2.63221450e-02 ... -1.79640722e+00 1.19375646e+00 -1.84282899e-01]] [[ 1.41795814e-01 7.83008814e-01 2.24038029e+00 ... 3.21328312e-01 9.59752738e-01 -6.61115825e-01] [-2.49458337e-03 -9.16328281e-02 2.45347142e+00 ... -1.26231802e+00 -8.10291469e-01 -9.24403191e-01] [-1.05708957e+00 5.23318470e-01 2.00288105e+00 ... -8.04735184e-01 -6.24837399e-01 -4.32783999e-02] ... [ 2.53596187e-01 -3.15698886e+00 -9.66919065e-01 ... -2.45140895e-01 -3.31440598e-01 -1.15061510e+00] [ 1.35300243e+00 1.45834193e-01 8.87590349e-01 ... 4.86367345e-01 9.98962164e-01 1.13858092e+00] [-8.23576331e-01 1.30533993e+00 2.98812658e-01 ... -7.14774489e-01 4.76879567e-01 -3.47575992e-01]] [[ 1.01924276e+00 -4.39314693e-02 -1.69010293e+00 ... 6.86097860e-01 7.08691299e-01 3.96718442e-01] [-5.81203341e-01 2.93991178e-01 -2.64174044e-01 ... 8.15492868e-01 -6.37702882e-01 -8.90421808e-01] [ 1.30034462e-01 -1.03115952e+00 -1.42839432e+00 ... 9.81084704e-02 -8.41434062e-01 -3.36067259e-01] ... [-5.99394262e-01 8.42353463e-01 4.44201261e-01 ... 1.15474534e+00 -3.28266084e-01 8.49898577e-01] [ 1.43965352e+00 -1.10769105e+00 1.09255147e+00 ... -6.16963983e-01 -1.42091250e+00 4.09264900e-02] [-1.37130630e+00 2.44432896e-01 -1.76576781e+00 ... -7.76203871e-01 1.93163663e-01 1.25521612e+00]]] [[[ 1.98054742e-02 -2.93884367e-01 9.40672696e-01 ... -5.78646481e-01 -3.84966403e-01 8.26142669e-01] [-6.97473884e-01 -4.66963887e-01 -1.16758227e+00 ... 1.04534459e+00 1.49308693e+00 -6.51774824e-01] [-5.36168754e-01 1.14380322e-01 -9.96134400e-01 ... -8.05121183e-01 -1.10058522e+00 -1.20180234e-01] ... [-6.31159067e-01 2.17795300e+00 3.15952361e-01 ... -1.14690161e+00 2.12630853e-02 -1.23892832e+00] [-4.23218101e-01 -6.56120777e-01 1.41693699e+00 ... 4.20460254e-01 -9.81788814e-01 1.04922724e+00] [ 1.20829952e+00 9.40771341e-01 -1.47122025e-01 ... -1.13116264e+00 -4.57199253e-02 2.41803932e+00]] [[-4.34448630e-01 5.76181412e-01 1.01845808e-01 ... 4.14387207e-04 -3.88836674e-02 -5.71905553e-01] [ 1.13435137e+00 -9.55371141e-01 7.98688293e-01 ... 6.11653924e-01 9.17137623e-01 6.42723083e-01] [ 6.78676426e-01 1.02606535e+00 -1.79341242e-01 ... -9.51250851e-01 -4.15296555e-02 -4.43374813e-01] ... [-7.32451200e-01 -1.24944501e-01 -7.30930567e-01 ... -7.08581388e-01 1.62808228e+00 -1.82207692e+00] [ 4.54640836e-02 5.88726640e-01 1.64933932e+00 ... 9.49590683e-01 8.53334725e-01 -5.52649081e-01] [ 3.35206568e-01 -3.22852194e-01 2.10696054e+00 ... 1.76031399e+00 -3.64724815e-01 7.64604628e-01]] [[ 8.80379498e-01 -1.08244598e+00 -3.09960663e-01 ... -2.05862331e+00 -6.03025146e-02 1.19711554e+00] [ 2.01754236e+00 -1.18837667e+00 1.76302516e+00 ... 3.90910536e-01 2.10580897e+00 -1.46913874e+00] [-1.25493026e+00 5.37375450e-01 4.79142100e-01 ... -6.11069560e-01 -3.14976245e-01 9.08745825e-01] ... [-7.20178962e-01 9.63372350e-01 1.54454231e+00 ... 2.14175820e-01 1.04581618e+00 1.74709439e-01] [-2.04608607e+00 -7.54724324e-01 3.37663084e-01 ... 9.40498412e-01 3.40174615e-01 -5.17291784e-01] [-1.67643547e-01 1.65725291e+00 2.65952259e-01 ... 1.65144593e-01 -8.76836956e-01 8.49443793e-01]] ... [[ 4.15232301e-01 -4.73329961e-01 -8.06379855e-01 ... 1.98592663e+00 -7.68161118e-01 7.36086845e-01] [-1.30251944e-01 5.24920821e-01 -5.75232863e-01 ... -2.70017564e-01 -9.83580470e-01 7.53591120e-01] [-8.06067526e-01 1.18359670e-01 7.51761734e-01 ... 4.50438619e-01 -2.42924437e-01 -2.66704440e-01] ... [ 7.67868102e-01 -6.48061752e-01 -1.04777527e+00 ... -6.95170224e-01 6.33562088e-01 -1.53019512e-02] [ 2.02407658e-01 -1.23321402e+00 -1.46143937e+00 ... 7.75261998e-01 6.18404627e-01 -5.49730122e-01] [ 2.00280237e+00 1.42474437e+00 -4.59197536e-02 ... 1.28479636e+00 -1.10105467e+00 1.90393209e+00]] [[-1.66193449e+00 2.37685585e+00 1.62642252e+00 ... 1.59434862e-02 2.68212914e-01 4.84448403e-01] [ 1.78408280e-01 2.20958328e+00 -1.11606205e+00 ... 8.75588477e-01 -6.83471143e-01 -1.20190048e+00] [-1.81829739e+00 1.17698765e+00 7.14023769e-01 ... -2.50313729e-01 -1.09872736e-01 -1.85544324e+00] ... [-7.32166886e-01 9.33625042e-01 1.00683451e-01 ... 1.56458330e+00 6.94639027e-01 1.69649303e+00] [ 3.07446778e-01 4.16125894e-01 -1.46320355e+00 ... 5.41788377e-02 9.60561633e-01 -1.15810680e+00] [-5.66702150e-02 1.81801128e+00 5.63364804e-01 ... 1.10766053e+00 8.40765774e-01 5.44643700e-01]] [[ 6.32323444e-01 2.93476820e-01 -1.34360924e-01 ... -5.98613918e-01 1.38074800e-01 -5.20491421e-01] [ 2.32183859e-01 2.93123126e-02 1.26225305e+00 ... 2.96551764e-01 2.96491832e-01 9.13049877e-01] [-2.32652202e-01 -6.17414057e-01 -9.17938352e-02 ... 1.03628731e+00 1.76198140e-01 -8.98573875e-01] ... [ 1.28463495e+00 2.34438330e-02 -2.07348347e+00 ... -4.57384676e-01 -3.75568643e-02 7.55075812e-01] [-1.20199311e+00 -4.90679592e-01 -9.44445014e-01 ... 9.02846694e-01 6.23171926e-01 1.93703187e+00] [-1.51990354e-01 -2.91078061e-01 -6.88783884e-01 ... 1.47546268e+00 -7.44072616e-01 8.60391259e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(-5, -8, 0, 0, 0, 0) - mode:circular - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5590.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[-5, -8, 0, 0, 0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-5.78900814e-01 -1.77209353e+00 2.76522458e-01 -1.95033920e+00 6.94850147e-01] [ 1.84361088e+00 4.87173468e-01 1.09487331e+00 6.62527740e-01 -6.94730878e-01] [-7.49500632e-01 1.25648642e+00 -3.15683991e-01 -2.30499431e-01 1.02408922e+00] ... [-8.59691739e-01 7.19830930e-01 -9.27382767e-01 -1.06578910e+00 6.93651199e-01] [ 1.10732138e+00 2.26630616e+00 1.80020437e-01 1.73362151e-01 -8.98729682e-01] [-1.67683125e+00 1.80875659e-01 7.43123531e-01 1.87048361e-01 -1.14911127e+00]] [[ 1.06742275e+00 1.65337287e-02 2.04156533e-01 2.69335198e+00 5.38290918e-01] [ 1.23313034e+00 8.78996789e-01 -3.71024758e-01 1.08220838e-01 -6.59087241e-01] [ 2.94586513e-02 8.11012685e-01 -3.18983465e-01 -4.81824428e-01 4.46440168e-02] ... [-2.85004663e+00 7.66313970e-01 1.18217766e+00 -9.64762330e-01 -1.12061411e-01] [-3.12169611e-01 -1.86643507e-02 -1.53507698e+00 -8.49974573e-01 8.34199727e-01] [-1.25618517e+00 -1.93023884e+00 -8.78406823e-01 -7.99892545e-01 7.36909032e-01]] [[ 3.68816972e-01 2.02063155e+00 -1.07897782e+00 8.85298073e-01 3.15788478e-01] [-2.43884057e-01 2.11501420e-01 1.21042705e+00 1.59504876e-01 1.67019999e+00] [ 3.85274261e-01 4.59155023e-01 1.96798861e-01 6.62208423e-02 -4.71127748e-01] ... [ 3.82349908e-01 4.46128584e-02 6.12955034e-01 -9.82080847e-02 1.19200671e+00] [ 7.18082607e-01 9.03140843e-01 2.38721654e-01 1.08519506e+00 -6.05389997e-02] [ 4.19043988e-01 7.20073134e-02 2.05724567e-01 -1.48677921e+00 8.56503174e-02]] ... [[-1.75520390e-01 6.29888892e-01 -3.69071215e-01 -1.94389477e-01 2.49553591e-01] [-1.36074722e+00 3.92892957e-01 -2.54895613e-02 -3.11476678e-01 2.62472034e-01] [ 4.97366369e-01 -4.21428561e-01 1.55197918e+00 2.53223091e-01 -8.25713336e-01] ... [-9.10641134e-01 -4.58295107e-01 4.92602944e-01 8.48717749e-01 -6.87998906e-02] [ 1.46121287e+00 6.01742387e-01 1.76750338e+00 6.84259295e-01 -1.45395923e+00] [ 1.94443560e+00 1.53593600e+00 2.09277821e+00 -1.36479235e+00 1.79193258e-01]] [[-2.19357657e+00 -1.00977540e-01 1.54883194e+00 3.62728089e-01 8.58164668e-01] [ 8.59792173e-01 -9.73874092e-01 -1.09596074e+00 -8.35216939e-01 7.44514883e-01] [-1.50281847e+00 1.79606915e+00 -1.23392034e+00 -4.89249587e-01 2.57841974e-01] ... [-6.22519553e-01 5.58140457e-01 -3.50849956e-01 1.98258686e+00 3.47969621e-01] [ 1.02670169e+00 -1.53975415e+00 -2.30544305e+00 2.92099230e-02 -1.15694594e+00] [-1.15774393e+00 1.39476681e+00 -8.51850688e-01 -1.86126602e+00 9.95251119e-01]] [[ 2.65017915e+00 1.43985599e-01 6.39401600e-02 -4.99938190e-01 -1.50135469e+00] [ 2.66270846e-01 4.70827632e-02 -2.35753119e-01 -6.82787523e-02 -2.41184711e+00] [ 1.71421552e+00 -1.46029317e+00 -1.20458388e+00 -1.61650944e+00 -1.46197617e+00] ... [-2.46224833e+00 -1.16120704e-01 -1.08602989e+00 -3.75677556e-01 1.57952929e+00] [-1.17370641e+00 1.69807989e-02 -1.54979670e+00 1.02924705e-01 -7.17515126e-02] [-3.22330266e-01 1.35303438e+00 -1.79663181e+00 -3.57923418e-01 -2.61087680e+00]]] [[[ 8.81967366e-01 -7.71652222e-01 5.37295163e-01 2.39247791e-02 -7.49939740e-01] [-2.89045662e-01 -1.41384557e-01 -6.35200739e-01 -1.30723381e+00 -3.37991826e-02] [ 4.16740239e-01 7.35510707e-01 2.80239433e-01 8.00219476e-01 -1.98080987e-01] ... [-9.99294817e-01 -4.91953820e-01 9.43906963e-01 9.12997425e-01 -6.61332130e-01] [-2.91659594e-01 -5.77813864e-01 4.05527689e-02 1.35983229e+00 1.94468307e+00] [ 3.27016801e-01 4.21371371e-01 6.53757572e-01 -2.52394617e-01 1.90591887e-02]] [[ 4.09549803e-01 4.74183410e-01 2.45950389e+00 1.65425754e+00 -1.26746047e+00] [-3.42975229e-01 -1.07159400e+00 7.84874916e-01 -2.81572074e-01 9.32239473e-01] [-1.30699289e+00 6.40191734e-01 8.98781717e-01 -1.38495159e+00 1.02411687e+00] ... [-9.43339407e-01 -5.28133214e-01 -4.16003168e-01 -1.11453235e-01 -8.37879956e-01] [-2.85844326e-01 1.90903753e-01 1.76593006e-01 -9.99962270e-01 1.50172329e+00] [ 1.77375877e+00 -3.46796572e-01 -5.61960116e-02 7.20430553e-01 -4.42525178e-01]] [[-1.41127729e+00 -2.86691993e-01 1.12031974e-01 -1.67971241e+00 -9.75263417e-01] [ 1.04190767e+00 -1.34564400e+00 6.13993108e-01 7.37122238e-01 2.74318337e-01] [-3.37776244e-01 -1.41663706e+00 -8.82723391e-01 -8.26849461e-01 -1.38166571e+00] ... [ 1.34584427e+00 -2.44301334e-01 7.70643413e-01 -1.38570678e+00 3.11201692e-01] [-2.29423419e-01 1.05187452e+00 -8.61379057e-02 -9.66621697e-01 1.47429740e+00] [-2.78593445e+00 2.21441194e-01 1.38981092e+00 1.18421447e+00 -3.65240604e-01]] ... [[ 3.69310170e-01 -1.31309402e+00 -8.95118713e-01 -8.44254047e-02 3.29805285e-01] [ 1.06657040e+00 6.76431417e-01 6.35346532e-01 1.55635834e+00 2.99251348e-01] [-2.23544151e-01 9.04008269e-01 8.05516422e-01 1.59378183e+00 -2.12657166e+00] ... [ 3.69045019e-01 1.59354782e+00 -1.49256146e+00 -8.95534456e-01 5.78603446e-01] [ 9.07464087e-01 -1.65160406e+00 -6.97496176e-01 1.97685465e-01 1.59770757e-01] [-9.31771278e-01 8.56188416e-01 2.96783781e+00 7.21208274e-01 -2.60851800e-01]] [[ 2.93092996e-01 -6.47949576e-01 -1.09183812e+00 -7.26120591e-01 5.50175309e-01] [ 1.48120534e+00 -2.61966538e+00 1.55071008e+00 -1.12430465e+00 1.01472414e+00] [ 1.82459486e+00 6.08211517e-01 3.37336719e-01 1.73219550e+00 -1.94602326e-01] ... [ 7.80899644e-01 5.94907068e-02 -8.23344290e-01 -1.30595374e+00 -4.13493097e-01] [-1.48541951e+00 -5.26594460e-01 -3.40305185e+00 1.90517589e-01 -6.37099206e-01] [ 1.39177179e+00 -8.31199050e-01 1.64563406e+00 1.44268215e+00 -3.89986128e-01]] [[-9.30610538e-01 2.12314749e+00 1.37436807e+00 1.07389104e+00 6.53245151e-01] [-4.93026376e-01 -5.62602758e-01 -2.29620647e+00 6.41113222e-01 -4.59027022e-01] [-6.06351256e-01 1.17597973e+00 -8.91682863e-01 -5.16485989e-01 1.44465402e-01] ... [-3.18819433e-01 2.04522282e-01 -2.04087162e+00 -3.03826600e-01 1.47218037e+00] [-1.11115539e+00 2.69857258e-01 5.55794001e-01 6.74141049e-01 7.13855028e-01] [-2.58584678e-01 -8.83552909e-01 3.76332104e-01 3.86981696e-01 7.37703860e-01]]] [[[ 6.53011382e-01 -3.82206172e-01 -3.98789138e-01 5.72662532e-01 -1.49785066e+00] [-5.10860741e-01 -1.88746870e+00 -1.53589857e+00 3.64183187e-01 3.68092090e-01] [ 2.00444412e+00 -2.14711934e-01 1.95046616e+00 5.16099274e-01 2.28295207e-01] ... [-2.18873286e+00 1.18902862e+00 7.08142459e-01 -6.33832395e-01 9.48906481e-01] [ 9.65379119e-01 -7.49924839e-01 5.38661033e-02 -2.03453034e-01 1.10946703e+00] [ 4.48469967e-01 -5.44217110e-01 -9.65495110e-01 -4.40294333e-02 -9.14582491e-01]] [[ 6.90900147e-01 5.63925147e-01 -1.06851697e+00 -3.88958156e-01 -2.51238036e+00] [-1.99334645e+00 2.84038752e-01 1.81734741e-01 7.56055295e-01 -2.83308119e-01] [-3.53682905e-01 4.14142847e-01 -2.11198473e+00 5.42559683e-01 7.28860795e-01] ... [ 1.19159269e+00 3.66346329e-01 -1.12699246e+00 -1.34500146e+00 -1.29127300e+00] [ 2.89160176e-03 1.28613889e-01 -5.97287714e-01 -7.39718318e-01 5.36443770e-01] [ 6.14295721e-01 6.26652176e-03 4.53325331e-01 8.02242100e-01 3.84278685e-01]] [[ 5.06427467e-01 -3.75757933e-01 -3.75966728e-01 -5.73771298e-01 -1.89294547e-01] [-8.96200538e-01 -3.22784334e-01 1.31362751e-01 -1.53636485e-01 -1.42830479e+00] [ 6.19663596e-01 -6.45358622e-01 -1.55622983e+00 -6.00564599e-01 -7.19763115e-02] ... [ 9.94091034e-01 7.56787479e-01 1.32070184e+00 -9.59755301e-01 7.86436379e-01] [-3.55302393e-01 -3.02720778e-02 -9.36984122e-01 9.85365659e-02 1.82519838e-01] [-6.71854019e-01 -8.77862517e-03 -1.74029723e-01 7.00598583e-02 7.62587428e-01]] ... [[ 1.25305879e+00 -3.94663125e-01 -9.70700920e-01 1.05428588e+00 1.51205793e-01] [ 9.58199441e-01 8.60884666e-01 1.72130942e-01 -8.71370256e-01 7.91581869e-01] [ 4.67133284e-01 -1.15358186e+00 -4.62464869e-01 1.47333130e-01 3.01610172e-01] ... [-4.46630791e-02 -1.35150269e-01 -6.38623178e-01 7.09315985e-02 -7.04937950e-02] [ 5.84717989e-01 1.39778984e+00 2.07371521e+00 1.78367245e+00 -1.67122558e-01] [-8.31343532e-01 -7.87919521e-01 -2.22474873e-01 7.95415714e-02 -7.05168962e-01]] [[ 3.07426006e-01 1.20909882e+00 -4.46330607e-01 2.33780131e-01 -7.71714151e-01] [-2.94381738e-01 1.47701359e+00 9.61315811e-01 1.90667880e+00 2.36678883e-01] [-9.04892087e-01 1.07440710e+00 2.04043493e-01 1.40930867e+00 -4.04471278e-01] ... [ 8.43913674e-01 -5.69059312e-01 -2.05637097e+00 -2.93329000e-01 4.21181679e-01] [ 4.22098756e-01 1.10013402e+00 4.46287811e-01 -9.97139633e-01 -2.36499101e-01] [-2.91428328e-01 6.50845110e-01 -5.36397517e-01 -1.36356831e+00 -1.73559368e-01]] [[ 9.30906653e-01 -1.75503910e-01 -3.84099871e-01 -1.86668873e-01 7.83934593e-01] [ 2.39012074e-02 8.88308227e-01 -1.48107576e+00 -2.78293699e-01 3.93387288e-01] [-1.18444026e+00 -2.60171533e+00 -7.87797630e-01 7.25134388e-02 7.92640090e-01] ... [-3.03050548e-01 -5.98680377e-01 4.37725186e-01 2.08067465e+00 2.04058170e+00] [-1.64426339e+00 3.33492190e-01 1.22455038e-01 1.41779557e-01 -3.63090575e-01] [-1.78438485e+00 1.27733409e+00 -1.64691225e-01 1.17231321e+00 1.01332676e+00]]]]]; ov_res: [[[[[-5.78900814e-01 -1.77209353e+00 2.76522458e-01 -1.95033920e+00 6.94850147e-01] [ 1.84361088e+00 4.87173468e-01 1.09487331e+00 6.62527740e-01 -6.94730878e-01] [-7.49500632e-01 1.25648642e+00 -3.15683991e-01 -2.30499431e-01 1.02408922e+00] ... [-8.59691739e-01 7.19830930e-01 -9.27382767e-01 -1.06578910e+00 6.93651199e-01] [ 1.10732138e+00 2.26630616e+00 1.80020437e-01 1.73362151e-01 -8.98729682e-01] [-1.67683125e+00 1.80875659e-01 7.43123531e-01 1.87048361e-01 -1.14911127e+00]] [[ 1.06742275e+00 1.65337287e-02 2.04156533e-01 2.69335198e+00 5.38290918e-01] [ 1.23313034e+00 8.78996789e-01 -3.71024758e-01 1.08220838e-01 -6.59087241e-01] [ 2.94586513e-02 8.11012685e-01 -3.18983465e-01 -4.81824428e-01 4.46440168e-02] ... [-2.85004663e+00 7.66313970e-01 1.18217766e+00 -9.64762330e-01 -1.12061411e-01] [-3.12169611e-01 -1.86643507e-02 -1.53507698e+00 -8.49974573e-01 8.34199727e-01] [-1.25618517e+00 -1.93023884e+00 -8.78406823e-01 -7.99892545e-01 7.36909032e-01]] [[ 3.68816972e-01 2.02063155e+00 -1.07897782e+00 8.85298073e-01 3.15788478e-01] [-2.43884057e-01 2.11501420e-01 1.21042705e+00 1.59504876e-01 1.67019999e+00] [ 3.85274261e-01 4.59155023e-01 1.96798861e-01 6.62208423e-02 -4.71127748e-01] ... [ 3.82349908e-01 4.46128584e-02 6.12955034e-01 -9.82080847e-02 1.19200671e+00] [ 7.18082607e-01 9.03140843e-01 2.38721654e-01 1.08519506e+00 -6.05389997e-02] [ 4.19043988e-01 7.20073134e-02 2.05724567e-01 -1.48677921e+00 8.56503174e-02]] ... [[-1.75520390e-01 6.29888892e-01 -3.69071215e-01 -1.94389477e-01 2.49553591e-01] [-1.36074722e+00 3.92892957e-01 -2.54895613e-02 -3.11476678e-01 2.62472034e-01] [ 4.97366369e-01 -4.21428561e-01 1.55197918e+00 2.53223091e-01 -8.25713336e-01] ... [-9.10641134e-01 -4.58295107e-01 4.92602944e-01 8.48717749e-01 -6.87998906e-02] [ 1.46121287e+00 6.01742387e-01 1.76750338e+00 6.84259295e-01 -1.45395923e+00] [ 1.94443560e+00 1.53593600e+00 2.09277821e+00 -1.36479235e+00 1.79193258e-01]] [[-2.19357657e+00 -1.00977540e-01 1.54883194e+00 3.62728089e-01 8.58164668e-01] [ 8.59792173e-01 -9.73874092e-01 -1.09596074e+00 -8.35216939e-01 7.44514883e-01] [-1.50281847e+00 1.79606915e+00 -1.23392034e+00 -4.89249587e-01 2.57841974e-01] ... [-6.22519553e-01 5.58140457e-01 -3.50849956e-01 1.98258686e+00 3.47969621e-01] [ 1.02670169e+00 -1.53975415e+00 -2.30544305e+00 2.92099230e-02 -1.15694594e+00] [-1.15774393e+00 1.39476681e+00 -8.51850688e-01 -1.86126602e+00 9.95251119e-01]] [[ 2.65017915e+00 1.43985599e-01 6.39401600e-02 -4.99938190e-01 -1.50135469e+00] [ 2.66270846e-01 4.70827632e-02 -2.35753119e-01 -6.82787523e-02 -2.41184711e+00] [ 1.71421552e+00 -1.46029317e+00 -1.20458388e+00 -1.61650944e+00 -1.46197617e+00] ... [-2.46224833e+00 -1.16120704e-01 -1.08602989e+00 -3.75677556e-01 1.57952929e+00] [-1.17370641e+00 1.69807989e-02 -1.54979670e+00 1.02924705e-01 -7.17515126e-02] [-3.22330266e-01 1.35303438e+00 -1.79663181e+00 -3.57923418e-01 -2.61087680e+00]]] [[[ 8.81967366e-01 -7.71652222e-01 5.37295163e-01 2.39247791e-02 -7.49939740e-01] [-2.89045662e-01 -1.41384557e-01 -6.35200739e-01 -1.30723381e+00 -3.37991826e-02] [ 4.16740239e-01 7.35510707e-01 2.80239433e-01 8.00219476e-01 -1.98080987e-01] ... [-9.99294817e-01 -4.91953820e-01 9.43906963e-01 9.12997425e-01 -6.61332130e-01] [-2.91659594e-01 -5.77813864e-01 4.05527689e-02 1.35983229e+00 1.94468307e+00] [ 3.27016801e-01 4.21371371e-01 6.53757572e-01 -2.52394617e-01 1.90591887e-02]] [[ 4.09549803e-01 4.74183410e-01 2.45950389e+00 1.65425754e+00 -1.26746047e+00] [-3.42975229e-01 -1.07159400e+00 7.84874916e-01 -2.81572074e-01 9.32239473e-01] [-1.30699289e+00 6.40191734e-01 8.98781717e-01 -1.38495159e+00 1.02411687e+00] ... [-9.43339407e-01 -5.28133214e-01 -4.16003168e-01 -1.11453235e-01 -8.37879956e-01] [-2.85844326e-01 1.90903753e-01 1.76593006e-01 -9.99962270e-01 1.50172329e+00] [ 1.77375877e+00 -3.46796572e-01 -5.61960116e-02 7.20430553e-01 -4.42525178e-01]] [[-1.41127729e+00 -2.86691993e-01 1.12031974e-01 -1.67971241e+00 -9.75263417e-01] [ 1.04190767e+00 -1.34564400e+00 6.13993108e-01 7.37122238e-01 2.74318337e-01] [-3.37776244e-01 -1.41663706e+00 -8.82723391e-01 -8.26849461e-01 -1.38166571e+00] ... [ 1.34584427e+00 -2.44301334e-01 7.70643413e-01 -1.38570678e+00 3.11201692e-01] [-2.29423419e-01 1.05187452e+00 -8.61379057e-02 -9.66621697e-01 1.47429740e+00] [-2.78593445e+00 2.21441194e-01 1.38981092e+00 1.18421447e+00 -3.65240604e-01]] ... [[ 3.69310170e-01 -1.31309402e+00 -8.95118713e-01 -8.44254047e-02 3.29805285e-01] [ 1.06657040e+00 6.76431417e-01 6.35346532e-01 1.55635834e+00 2.99251348e-01] [-2.23544151e-01 9.04008269e-01 8.05516422e-01 1.59378183e+00 -2.12657166e+00] ... [ 3.69045019e-01 1.59354782e+00 -1.49256146e+00 -8.95534456e-01 5.78603446e-01] [ 9.07464087e-01 -1.65160406e+00 -6.97496176e-01 1.97685465e-01 1.59770757e-01] [-9.31771278e-01 8.56188416e-01 2.96783781e+00 7.21208274e-01 -2.60851800e-01]] [[ 2.93092996e-01 -6.47949576e-01 -1.09183812e+00 -7.26120591e-01 5.50175309e-01] [ 1.48120534e+00 -2.61966538e+00 1.55071008e+00 -1.12430465e+00 1.01472414e+00] [ 1.82459486e+00 6.08211517e-01 3.37336719e-01 1.73219550e+00 -1.94602326e-01] ... [ 7.80899644e-01 5.94907068e-02 -8.23344290e-01 -1.30595374e+00 -4.13493097e-01] [-1.48541951e+00 -5.26594460e-01 -3.40305185e+00 1.90517589e-01 -6.37099206e-01] [ 1.39177179e+00 -8.31199050e-01 1.64563406e+00 1.44268215e+00 -3.89986128e-01]] [[-9.30610538e-01 2.12314749e+00 1.37436807e+00 1.07389104e+00 6.53245151e-01] [-4.93026376e-01 -5.62602758e-01 -2.29620647e+00 6.41113222e-01 -4.59027022e-01] [-6.06351256e-01 1.17597973e+00 -8.91682863e-01 -5.16485989e-01 1.44465402e-01] ... [-3.18819433e-01 2.04522282e-01 -2.04087162e+00 -3.03826600e-01 1.47218037e+00] [-1.11115539e+00 2.69857258e-01 5.55794001e-01 6.74141049e-01 7.13855028e-01] [-2.58584678e-01 -8.83552909e-01 3.76332104e-01 3.86981696e-01 7.37703860e-01]]] [[[ 6.53011382e-01 -3.82206172e-01 -3.98789138e-01 5.72662532e-01 -1.49785066e+00] [-5.10860741e-01 -1.88746870e+00 -1.53589857e+00 3.64183187e-01 3.68092090e-01] [ 2.00444412e+00 -2.14711934e-01 1.95046616e+00 5.16099274e-01 2.28295207e-01] ... [-2.18873286e+00 1.18902862e+00 7.08142459e-01 -6.33832395e-01 9.48906481e-01] [ 9.65379119e-01 -7.49924839e-01 5.38661033e-02 -2.03453034e-01 1.10946703e+00] [ 4.48469967e-01 -5.44217110e-01 -9.65495110e-01 -4.40294333e-02 -9.14582491e-01]] [[ 6.90900147e-01 5.63925147e-01 -1.06851697e+00 -3.88958156e-01 -2.51238036e+00] [-1.99334645e+00 2.84038752e-01 1.81734741e-01 7.56055295e-01 -2.83308119e-01] [-3.53682905e-01 4.14142847e-01 -2.11198473e+00 5.42559683e-01 7.28860795e-01] ... [ 1.19159269e+00 3.66346329e-01 -1.12699246e+00 -1.34500146e+00 -1.29127300e+00] [ 2.89160176e-03 1.28613889e-01 -5.97287714e-01 -7.39718318e-01 5.36443770e-01] [ 6.14295721e-01 6.26652176e-03 4.53325331e-01 8.02242100e-01 3.84278685e-01]] [[ 5.06427467e-01 -3.75757933e-01 -3.75966728e-01 -5.73771298e-01 -1.89294547e-01] [-8.96200538e-01 -3.22784334e-01 1.31362751e-01 -1.53636485e-01 -1.42830479e+00] [ 6.19663596e-01 -6.45358622e-01 -1.55622983e+00 -6.00564599e-01 -7.19763115e-02] ... [ 9.94091034e-01 7.56787479e-01 1.32070184e+00 -9.59755301e-01 7.86436379e-01] [-3.55302393e-01 -3.02720778e-02 -9.36984122e-01 9.85365659e-02 1.82519838e-01] [-6.71854019e-01 -8.77862517e-03 -1.74029723e-01 7.00598583e-02 7.62587428e-01]] ... [[ 1.25305879e+00 -3.94663125e-01 -9.70700920e-01 1.05428588e+00 1.51205793e-01] [ 9.58199441e-01 8.60884666e-01 1.72130942e-01 -8.71370256e-01 7.91581869e-01] [ 4.67133284e-01 -1.15358186e+00 -4.62464869e-01 1.47333130e-01 3.01610172e-01] ... [-4.46630791e-02 -1.35150269e-01 -6.38623178e-01 7.09315985e-02 -7.04937950e-02] [ 5.84717989e-01 1.39778984e+00 2.07371521e+00 1.78367245e+00 -1.67122558e-01] [-8.31343532e-01 -7.87919521e-01 -2.22474873e-01 7.95415714e-02 -7.05168962e-01]] [[ 3.07426006e-01 1.20909882e+00 -4.46330607e-01 2.33780131e-01 -7.71714151e-01] [-2.94381738e-01 1.47701359e+00 9.61315811e-01 1.90667880e+00 2.36678883e-01] [-9.04892087e-01 1.07440710e+00 2.04043493e-01 1.40930867e+00 -4.04471278e-01] ... [ 8.43913674e-01 -5.69059312e-01 -2.05637097e+00 -2.93329000e-01 4.21181679e-01] [ 4.22098756e-01 1.10013402e+00 4.46287811e-01 -9.97139633e-01 -2.36499101e-01] [-2.91428328e-01 6.50845110e-01 -5.36397517e-01 -1.36356831e+00 -1.73559368e-01]] [[ 9.30906653e-01 -1.75503910e-01 -3.84099871e-01 -1.86668873e-01 7.83934593e-01] [ 2.39012074e-02 8.88308227e-01 -1.48107576e+00 -2.78293699e-01 3.93387288e-01] [-1.18444026e+00 -2.60171533e+00 -7.87797630e-01 7.25134388e-02 7.92640090e-01] ... [-3.03050548e-01 -5.98680377e-01 4.37725186e-01 2.08067465e+00 2.04058170e+00] [-1.64426339e+00 3.33492190e-01 1.22455038e-01 1.41779557e-01 -3.63090575e-01] [-1.78438485e+00 1.27733409e+00 -1.64691225e-01 1.17231321e+00 1.01332676e+00]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad5d[ ie_device:CPU - precision:FP32 - pads:(10, 10, 10, 10, 10, 10) - mode:circular - value:None ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5593.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="circular"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[10, 10, 10, 10, 10, 10]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[[[[-2.84210891e-01 5.72862923e-01 -1.33169639e+00 ... -8.51189733e-01 -2.84210891e-01 5.72862923e-01] [-1.23556137e+00 5.29008657e-02 2.27717578e-01 ... -1.05256939e+00 -1.23556137e+00 5.29008657e-02] [-2.35669389e-01 -4.54510748e-01 -1.30596906e-01 ... 1.46095395e+00 -2.35669389e-01 -4.54510748e-01] ... [ 8.57746363e-01 1.39959872e+00 -6.34138763e-01 ... 9.59161341e-01 8.57746363e-01 1.39959872e+00] [ 1.53815418e-01 2.14499146e-01 1.63932920e+00 ... 1.50189257e+00 1.53815418e-01 2.14499146e-01] [-4.35871542e-01 -1.39888096e+00 -4.70158428e-01 ... 8.02909791e-01 -4.35871542e-01 -1.39888096e+00]] [[-8.23822439e-01 1.41715184e-01 -1.26837468e+00 ... -8.16439927e-01 -8.23822439e-01 1.41715184e-01] [ 4.93282944e-01 1.07355499e+00 -5.31120837e-01 ... -3.20631295e-01 4.93282944e-01 1.07355499e+00] [-6.20880760e-02 -1.24031448e+00 -1.02147007e+00 ... 4.69264567e-01 -6.20880760e-02 -1.24031448e+00] ... [-1.21688592e+00 -6.37315631e-01 1.74168396e+00 ... -6.37943089e-01 -1.21688592e+00 -6.37315631e-01] [ 2.43156642e-01 -1.08095789e+00 -4.50175047e-01 ... 2.66389698e-01 2.43156642e-01 -1.08095789e+00] [ 8.54839742e-01 -4.67825346e-02 1.38159740e+00 ... -1.40421307e+00 8.54839742e-01 -4.67825346e-02]] [[ 9.48036790e-01 6.62540138e-01 2.89598703e-01 ... 7.52143681e-01 9.48036790e-01 6.62540138e-01] [ 8.55293572e-01 -9.65022802e-01 2.89658248e-01 ... -5.92309892e-01 8.55293572e-01 -9.65022802e-01] [ 1.59636283e+00 1.06462002e+00 5.26945710e-01 ... 1.51727676e-01 1.59636283e+00 1.06462002e+00] ... [-8.33149195e-01 -9.97714221e-01 -1.18972874e+00 ... 9.37344551e-01 -8.33149195e-01 -9.97714221e-01] [-8.88220429e-01 1.65724432e+00 1.29243064e+00 ... -5.00108302e-01 -8.88220429e-01 1.65724432e+00] [-1.85594797e+00 1.88037086e+00 2.54206777e-01 ... -1.07193530e+00 -1.85594797e+00 1.88037086e+00]] ... [[-4.78810705e-02 -1.21394622e+00 -2.39533842e-01 ... 3.52396369e-01 -4.78810705e-02 -1.21394622e+00] [-3.04217428e-01 -6.37311220e-01 -4.86617070e-03 ... -5.69432259e-01 -3.04217428e-01 -6.37311220e-01] [-7.15587616e-01 8.24457645e-01 8.02882195e-01 ... 2.66772985e-01 -7.15587616e-01 8.24457645e-01] ... [ 1.26304221e+00 2.95693636e-01 -7.13777959e-01 ... 3.46386015e-01 1.26304221e+00 2.95693636e-01] [-1.04496706e+00 7.45998695e-02 5.15561521e-01 ... 2.70035148e-01 -1.04496706e+00 7.45998695e-02] [-5.33394627e-02 1.29002762e+00 3.30412030e-01 ... 3.45896035e-01 -5.33394627e-02 1.29002762e+00]] [[ 1.33343413e-01 -7.99835622e-01 -4.20503646e-01 ... 2.00789928e+00 1.33343413e-01 -7.99835622e-01] [ 1.40933120e+00 2.80540973e-01 5.71999311e-01 ... -1.31500995e+00 1.40933120e+00 2.80540973e-01] [-1.74563563e+00 3.00819665e-01 -1.07114470e+00 ... 4.16031331e-01 -1.74563563e+00 3.00819665e-01] ... [ 9.07796621e-01 1.07446587e+00 1.11126041e+00 ... -1.20693123e+00 9.07796621e-01 1.07446587e+00] [ 1.16508436e+00 3.28968018e-01 -8.15427721e-01 ... -2.18585044e-01 1.16508436e+00 3.28968018e-01] [ 7.50102460e-01 -2.17280626e-01 1.56658873e-01 ... -1.32657871e-01 7.50102460e-01 -2.17280626e-01]] [[ 1.19632971e+00 -1.02837622e+00 -6.90222383e-01 ... 1.99033475e+00 1.19632971e+00 -1.02837622e+00] [-4.88363087e-01 -2.99056061e-03 -6.97202459e-02 ... 9.43263412e-01 -4.88363087e-01 -2.99056061e-03] [-2.18368649e+00 -9.52330410e-01 -1.24147928e+00 ... -4.99307752e-01 -2.18368649e+00 -9.52330410e-01] ... [-1.40617594e-01 -4.44298595e-01 -4.72062111e-01 ... 5.88813901e-01 -1.40617594e-01 -4.44298595e-01] [ 8.49026561e-01 -2.32565120e-01 -2.52724320e-01 ... 1.63529527e+00 8.49026561e-01 -2.32565120e-01] [ 1.33710849e+00 4.91619706e-01 -7.85124898e-02 ... -1.52732238e-01 1.33710849e+00 4.91619706e-01]]] [[[-1.29428482e+00 -5.94233990e-01 -2.66335994e-01 ... -1.44190919e+00 -1.29428482e+00 -5.94233990e-01] [-1.86723083e-01 2.55718797e-01 4.82033715e-02 ... -5.83084345e-01 -1.86723083e-01 2.55718797e-01] [ 8.62592816e-01 -9.96300936e-01 -3.78520161e-01 ... 1.47939846e-01 8.62592816e-01 -9.96300936e-01] ... [-1.10084355e-01 -1.26313174e+00 1.18558562e+00 ... 1.67597866e+00 -1.10084355e-01 -1.26313174e+00] [-5.55514455e-01 1.70025289e+00 2.84570396e-01 ... -3.11999351e-01 -5.55514455e-01 1.70025289e+00] [-1.40945792e-01 -3.02406371e-01 -6.53684258e-01 ... 1.74598849e+00 -1.40945792e-01 -3.02406371e-01]] [[ 2.04154179e-01 -1.17971075e+00 -1.09280431e+00 ... -7.87079334e-01 2.04154179e-01 -1.17971075e+00] [-1.96839988e-01 -8.19411457e-01 -9.90005314e-01 ... -2.34360173e-01 -1.96839988e-01 -8.19411457e-01] [-1.24315605e-01 1.41979098e+00 -1.12277174e+00 ... 7.64411747e-01 -1.24315605e-01 1.41979098e+00] ... [ 2.38526791e-01 -2.00835133e+00 -1.72541693e-01 ... -6.90440774e-01 2.38526791e-01 -2.00835133e+00] [ 6.10244513e-01 -3.22939545e-01 2.39831582e-01 ... -4.12026405e-01 6.10244513e-01 -3.22939545e-01] [ 6.29179835e-01 1.84746441e-02 1.01718974e+00 ... 4.69587356e-01 6.29179835e-01 1.84746441e-02]] [[-1.10101891e+00 -8.35820138e-01 7.44156659e-01 ... -8.74187946e-01 -1.10101891e+00 -8.35820138e-01] [-7.28008151e-01 -1.31533384e+00 6.46833837e-01 ... 2.11053967e-01 -7.28008151e-01 -1.31533384e+00] [ 8.17534804e-01 1.84062049e-01 -3.98342937e-01 ... -2.85641909e-01 8.17534804e-01 1.84062049e-01] ... [-2.96546876e-01 -4.00889784e-01 8.89023602e-01 ... 1.36262760e-01 -2.96546876e-01 -4.00889784e-01] [ 6.64342344e-01 6.04535878e-01 -5.46581805e-01 ... 1.86577463e+00 6.64342344e-01 6.04535878e-01] [ 7.42781281e-01 -9.34874833e-01 1.01087594e+00 ... -1.18025792e+00 7.42781281e-01 -9.34874833e-01]] ... [[-5.77772319e-01 1.08069611e+00 -1.13243842e+00 ... 1.14801219e-02 -5.77772319e-01 1.08069611e+00] [-7.17500508e-01 1.00439680e+00 -4.58276063e-01 ... 1.20063472e+00 -7.17500508e-01 1.00439680e+00] [-1.22549498e+00 -8.02201852e-02 1.83723962e+00 ... 9.29495096e-02 -1.22549498e+00 -8.02201852e-02] ... [ 5.03282487e-01 3.26622081e+00 6.28055155e-01 ... 1.00680816e+00 5.03282487e-01 3.26622081e+00] [-3.24932486e-01 4.71072674e-01 -2.19488904e-01 ... -1.90015841e+00 -3.24932486e-01 4.71072674e-01] [-4.66854423e-01 -2.07768750e+00 -1.78432852e-01 ... 2.15850830e+00 -4.66854423e-01 -2.07768750e+00]] [[ 1.08430386e+00 -2.04125881e+00 -8.98090839e-01 ... -5.14514625e-01 1.08430386e+00 -2.04125881e+00] [-5.46028689e-02 -2.86561579e-01 -4.12961066e-01 ... -1.13833058e+00 -5.46028689e-02 -2.86561579e-01] [ 1.68714178e+00 5.54913394e-02 -2.13489652e+00 ... -5.30547261e-01 1.68714178e+00 5.54913394e-02] ... [-1.14100516e+00 1.20659292e+00 8.74830246e-01 ... 1.30407691e+00 -1.14100516e+00 1.20659292e+00] [-1.10210288e+00 1.26299635e-01 1.39879668e+00 ... -1.18293178e+00 -1.10210288e+00 1.26299635e-01] [ 2.58830738e+00 1.14178562e+00 2.06762142e-02 ... -4.88860905e-01 2.58830738e+00 1.14178562e+00]] [[-3.68220627e-01 1.20104611e-01 6.41119182e-02 ... -6.20215595e-01 -3.68220627e-01 1.20104611e-01] [ 3.98155674e-02 -1.11906683e+00 -6.23164117e-01 ... 8.13054502e-01 3.98155674e-02 -1.11906683e+00] [-5.34965098e-02 -3.65240753e-01 -1.96984887e+00 ... 8.66729200e-01 -5.34965098e-02 -3.65240753e-01] ... [ 3.81697357e-01 1.07152112e-01 -1.08104479e+00 ... -8.67051005e-01 3.81697357e-01 1.07152112e-01] [-5.26965261e-01 1.81114465e-01 -1.38693607e+00 ... 8.13282728e-01 -5.26965261e-01 1.81114465e-01] [-1.16338742e+00 -4.97184843e-01 1.27700317e+00 ... 8.97791982e-01 -1.16338742e+00 -4.97184843e-01]]] [[[ 6.94106460e-01 -1.55366927e-01 -5.26511312e-01 ... -6.81260154e-02 6.94106460e-01 -1.55366927e-01] [ 3.93885858e-02 1.00598323e+00 2.45106667e-01 ... -3.93682599e-01 3.93885858e-02 1.00598323e+00] [-1.20020926e+00 -9.49211061e-01 6.34113193e-01 ... -8.59831631e-01 -1.20020926e+00 -9.49211061e-01] ... [ 6.22717381e-01 5.18537700e-01 1.83767334e-01 ... 3.29971015e-01 6.22717381e-01 5.18537700e-01] [ 1.31682312e+00 -1.01872408e+00 3.52154598e-02 ... -1.53331304e+00 1.31682312e+00 -1.01872408e+00] [ 1.30800462e+00 -1.71422458e+00 -1.18796217e+00 ... 9.05023158e-01 1.30800462e+00 -1.71422458e+00]] [[-5.19504488e-01 -1.48188937e+00 6.13800228e-01 ... -3.15626949e-01 -5.19504488e-01 -1.48188937e+00] [-1.50928617e-01 -7.34287426e-02 -4.12354678e-01 ... -2.91629463e-01 -1.50928617e-01 -7.34287426e-02] [ 1.07661867e+00 5.47689199e-01 -6.05148748e-02 ... -5.19485474e-01 1.07661867e+00 5.47689199e-01] ... [-5.59018731e-01 8.28360796e-01 -6.24975502e-01 ... 7.09649146e-01 -5.59018731e-01 8.28360796e-01] [ 5.28370678e-01 9.02384818e-01 -8.06354642e-01 ... 2.93753833e-01 5.28370678e-01 9.02384818e-01] [ 7.00411618e-01 -5.49118400e-01 -1.29427731e-01 ... 2.45229781e-01 7.00411618e-01 -5.49118400e-01]] [[-1.40102112e+00 9.90473509e-01 1.02610672e+00 ... 1.39695215e+00 -1.40102112e+00 9.90473509e-01] [ 1.39234783e-02 8.18847477e-01 -1.02521086e+00 ... 1.01075363e+00 1.39234783e-02 8.18847477e-01] [ 4.91217673e-01 8.72260034e-01 -8.18652511e-01 ... 1.13976151e-01 4.91217673e-01 8.72260034e-01] ... [-1.23796248e+00 -1.70546114e+00 1.55882943e+00 ... -7.25101411e-01 -1.23796248e+00 -1.70546114e+00] [-1.72584802e-02 -9.90423411e-02 -1.22625566e+00 ... 1.15064085e+00 -1.72584802e-02 -9.90423411e-02] [-5.45480438e-02 -6.75024271e-01 -2.36692524e+00 ... 9.74982083e-01 -5.45480438e-02 -6.75024271e-01]] ... [[ 1.23541772e+00 1.22550786e+00 5.44963777e-01 ... 1.85640407e+00 1.23541772e+00 1.22550786e+00] [-5.46439171e-01 4.08262871e-02 -1.34369206e+00 ... 9.06471491e-01 -5.46439171e-01 4.08262871e-02] [-6.89928055e-01 1.06456602e+00 -1.06503320e+00 ... -3.06077182e-01 -6.89928055e-01 1.06456602e+00] ... [ 1.89773113e-01 -1.46112192e+00 9.30014908e-01 ... 3.82948548e-01 1.89773113e-01 -1.46112192e+00] [-5.87500334e-01 -9.22881067e-01 9.17157888e-01 ... -1.88058600e-01 -5.87500334e-01 -9.22881067e-01] [-7.23220348e-01 -1.66680849e+00 2.15187013e-01 ... -5.33863716e-03 -7.23220348e-01 -1.66680849e+00]] [[-5.88871658e-01 -5.30136585e-01 1.79361737e+00 ... -6.49358273e-01 -5.88871658e-01 -5.30136585e-01] [-4.13437456e-01 -7.11153746e-01 -1.17066431e+00 ... 1.68382645e-01 -4.13437456e-01 -7.11153746e-01] [ 7.80652583e-01 -6.77876592e-01 -1.09997904e+00 ... -1.95924079e+00 7.80652583e-01 -6.77876592e-01] ... [ 4.21896994e-01 -7.20263541e-01 9.24752295e-01 ... 7.17137754e-01 4.21896994e-01 -7.20263541e-01] [-5.68506718e-01 -2.07439750e-01 -1.60721648e+00 ... 5.56763887e-01 -5.68506718e-01 -2.07439750e-01] [-1.13082564e+00 -2.21147269e-01 -5.65604210e-01 ... 5.67601085e-01 -1.13082564e+00 -2.21147269e-01]] [[ 4.36655730e-01 1.06894696e+00 8.86160731e-01 ... -3.33689362e-01 4.36655730e-01 1.06894696e+00] [-3.84403795e-01 -1.49195477e-01 6.28936172e-01 ... 3.85652453e-01 -3.84403795e-01 -1.49195477e-01] [ 2.72296071e-01 1.33190211e-02 2.70319730e-01 ... -1.60184777e+00 2.72296071e-01 1.33190211e-02] ... [-4.56557453e-01 2.70648539e-01 1.10610366e+00 ... 5.52576184e-01 -4.56557453e-01 2.70648539e-01] [-3.39502424e-01 1.07221194e-01 -5.46053588e-01 ... 2.39773914e-01 -3.39502424e-01 1.07221194e-01] [ 1.07068002e+00 1.36011556e-01 5.26998639e-01 ... 1.47742316e-01 1.07068002e+00 1.36011556e-01]]]]]; ov_res: [[[[[-2.84210891e-01 5.72862923e-01 -1.33169639e+00 ... -8.51189733e-01 -2.84210891e-01 5.72862923e-01] [-1.23556137e+00 5.29008657e-02 2.27717578e-01 ... -1.05256939e+00 -1.23556137e+00 5.29008657e-02] [-2.35669389e-01 -4.54510748e-01 -1.30596906e-01 ... 1.46095395e+00 -2.35669389e-01 -4.54510748e-01] ... [ 8.57746363e-01 1.39959872e+00 -6.34138763e-01 ... 9.59161341e-01 8.57746363e-01 1.39959872e+00] [ 1.53815418e-01 2.14499146e-01 1.63932920e+00 ... 1.50189257e+00 1.53815418e-01 2.14499146e-01] [-4.35871542e-01 -1.39888096e+00 -4.70158428e-01 ... 8.02909791e-01 -4.35871542e-01 -1.39888096e+00]] [[-8.23822439e-01 1.41715184e-01 -1.26837468e+00 ... -8.16439927e-01 -8.23822439e-01 1.41715184e-01] [ 4.93282944e-01 1.07355499e+00 -5.31120837e-01 ... -3.20631295e-01 4.93282944e-01 1.07355499e+00] [-6.20880760e-02 -1.24031448e+00 -1.02147007e+00 ... 4.69264567e-01 -6.20880760e-02 -1.24031448e+00] ... [-1.21688592e+00 -6.37315631e-01 1.74168396e+00 ... -6.37943089e-01 -1.21688592e+00 -6.37315631e-01] [ 2.43156642e-01 -1.08095789e+00 -4.50175047e-01 ... 2.66389698e-01 2.43156642e-01 -1.08095789e+00] [ 8.54839742e-01 -4.67825346e-02 1.38159740e+00 ... -1.40421307e+00 8.54839742e-01 -4.67825346e-02]] [[ 9.48036790e-01 6.62540138e-01 2.89598703e-01 ... 7.52143681e-01 9.48036790e-01 6.62540138e-01] [ 8.55293572e-01 -9.65022802e-01 2.89658248e-01 ... -5.92309892e-01 8.55293572e-01 -9.65022802e-01] [ 1.59636283e+00 1.06462002e+00 5.26945710e-01 ... 1.51727676e-01 1.59636283e+00 1.06462002e+00] ... [-8.33149195e-01 -9.97714221e-01 -1.18972874e+00 ... 9.37344551e-01 -8.33149195e-01 -9.97714221e-01] [-8.88220429e-01 1.65724432e+00 1.29243064e+00 ... -5.00108302e-01 -8.88220429e-01 1.65724432e+00] [-1.85594797e+00 1.88037086e+00 2.54206777e-01 ... -1.07193530e+00 -1.85594797e+00 1.88037086e+00]] ... [[-4.78810705e-02 -1.21394622e+00 -2.39533842e-01 ... 3.52396369e-01 -4.78810705e-02 -1.21394622e+00] [-3.04217428e-01 -6.37311220e-01 -4.86617070e-03 ... -5.69432259e-01 -3.04217428e-01 -6.37311220e-01] [-7.15587616e-01 8.24457645e-01 8.02882195e-01 ... 2.66772985e-01 -7.15587616e-01 8.24457645e-01] ... [ 1.26304221e+00 2.95693636e-01 -7.13777959e-01 ... 3.46386015e-01 1.26304221e+00 2.95693636e-01] [-1.04496706e+00 7.45998695e-02 5.15561521e-01 ... 2.70035148e-01 -1.04496706e+00 7.45998695e-02] [-5.33394627e-02 1.29002762e+00 3.30412030e-01 ... 3.45896035e-01 -5.33394627e-02 1.29002762e+00]] [[ 1.33343413e-01 -7.99835622e-01 -4.20503646e-01 ... 2.00789928e+00 1.33343413e-01 -7.99835622e-01] [ 1.40933120e+00 2.80540973e-01 5.71999311e-01 ... -1.31500995e+00 1.40933120e+00 2.80540973e-01] [-1.74563563e+00 3.00819665e-01 -1.07114470e+00 ... 4.16031331e-01 -1.74563563e+00 3.00819665e-01] ... [ 9.07796621e-01 1.07446587e+00 1.11126041e+00 ... -1.20693123e+00 9.07796621e-01 1.07446587e+00] [ 1.16508436e+00 3.28968018e-01 -8.15427721e-01 ... -2.18585044e-01 1.16508436e+00 3.28968018e-01] [ 7.50102460e-01 -2.17280626e-01 1.56658873e-01 ... -1.32657871e-01 7.50102460e-01 -2.17280626e-01]] [[ 1.19632971e+00 -1.02837622e+00 -6.90222383e-01 ... 1.99033475e+00 1.19632971e+00 -1.02837622e+00] [-4.88363087e-01 -2.99056061e-03 -6.97202459e-02 ... 9.43263412e-01 -4.88363087e-01 -2.99056061e-03] [-2.18368649e+00 -9.52330410e-01 -1.24147928e+00 ... -4.99307752e-01 -2.18368649e+00 -9.52330410e-01] ... [-1.40617594e-01 -4.44298595e-01 -4.72062111e-01 ... 5.88813901e-01 -1.40617594e-01 -4.44298595e-01] [ 8.49026561e-01 -2.32565120e-01 -2.52724320e-01 ... 1.63529527e+00 8.49026561e-01 -2.32565120e-01] [ 1.33710849e+00 4.91619706e-01 -7.85124898e-02 ... -1.52732238e-01 1.33710849e+00 4.91619706e-01]]] [[[-1.29428482e+00 -5.94233990e-01 -2.66335994e-01 ... -1.44190919e+00 -1.29428482e+00 -5.94233990e-01] [-1.86723083e-01 2.55718797e-01 4.82033715e-02 ... -5.83084345e-01 -1.86723083e-01 2.55718797e-01] [ 8.62592816e-01 -9.96300936e-01 -3.78520161e-01 ... 1.47939846e-01 8.62592816e-01 -9.96300936e-01] ... [-1.10084355e-01 -1.26313174e+00 1.18558562e+00 ... 1.67597866e+00 -1.10084355e-01 -1.26313174e+00] [-5.55514455e-01 1.70025289e+00 2.84570396e-01 ... -3.11999351e-01 -5.55514455e-01 1.70025289e+00] [-1.40945792e-01 -3.02406371e-01 -6.53684258e-01 ... 1.74598849e+00 -1.40945792e-01 -3.02406371e-01]] [[ 2.04154179e-01 -1.17971075e+00 -1.09280431e+00 ... -7.87079334e-01 2.04154179e-01 -1.17971075e+00] [-1.96839988e-01 -8.19411457e-01 -9.90005314e-01 ... -2.34360173e-01 -1.96839988e-01 -8.19411457e-01] [-1.24315605e-01 1.41979098e+00 -1.12277174e+00 ... 7.64411747e-01 -1.24315605e-01 1.41979098e+00] ... [ 2.38526791e-01 -2.00835133e+00 -1.72541693e-01 ... -6.90440774e-01 2.38526791e-01 -2.00835133e+00] [ 6.10244513e-01 -3.22939545e-01 2.39831582e-01 ... -4.12026405e-01 6.10244513e-01 -3.22939545e-01] [ 6.29179835e-01 1.84746441e-02 1.01718974e+00 ... 4.69587356e-01 6.29179835e-01 1.84746441e-02]] [[-1.10101891e+00 -8.35820138e-01 7.44156659e-01 ... -8.74187946e-01 -1.10101891e+00 -8.35820138e-01] [-7.28008151e-01 -1.31533384e+00 6.46833837e-01 ... 2.11053967e-01 -7.28008151e-01 -1.31533384e+00] [ 8.17534804e-01 1.84062049e-01 -3.98342937e-01 ... -2.85641909e-01 8.17534804e-01 1.84062049e-01] ... [-2.96546876e-01 -4.00889784e-01 8.89023602e-01 ... 1.36262760e-01 -2.96546876e-01 -4.00889784e-01] [ 6.64342344e-01 6.04535878e-01 -5.46581805e-01 ... 1.86577463e+00 6.64342344e-01 6.04535878e-01] [ 7.42781281e-01 -9.34874833e-01 1.01087594e+00 ... -1.18025792e+00 7.42781281e-01 -9.34874833e-01]] ... [[-5.77772319e-01 1.08069611e+00 -1.13243842e+00 ... 1.14801219e-02 -5.77772319e-01 1.08069611e+00] [-7.17500508e-01 1.00439680e+00 -4.58276063e-01 ... 1.20063472e+00 -7.17500508e-01 1.00439680e+00] [-1.22549498e+00 -8.02201852e-02 1.83723962e+00 ... 9.29495096e-02 -1.22549498e+00 -8.02201852e-02] ... [ 5.03282487e-01 3.26622081e+00 6.28055155e-01 ... 1.00680816e+00 5.03282487e-01 3.26622081e+00] [-3.24932486e-01 4.71072674e-01 -2.19488904e-01 ... -1.90015841e+00 -3.24932486e-01 4.71072674e-01] [-4.66854423e-01 -2.07768750e+00 -1.78432852e-01 ... 2.15850830e+00 -4.66854423e-01 -2.07768750e+00]] [[ 1.08430386e+00 -2.04125881e+00 -8.98090839e-01 ... -5.14514625e-01 1.08430386e+00 -2.04125881e+00] [-5.46028689e-02 -2.86561579e-01 -4.12961066e-01 ... -1.13833058e+00 -5.46028689e-02 -2.86561579e-01] [ 1.68714178e+00 5.54913394e-02 -2.13489652e+00 ... -5.30547261e-01 1.68714178e+00 5.54913394e-02] ... [-1.14100516e+00 1.20659292e+00 8.74830246e-01 ... 1.30407691e+00 -1.14100516e+00 1.20659292e+00] [-1.10210288e+00 1.26299635e-01 1.39879668e+00 ... -1.18293178e+00 -1.10210288e+00 1.26299635e-01] [ 2.58830738e+00 1.14178562e+00 2.06762142e-02 ... -4.88860905e-01 2.58830738e+00 1.14178562e+00]] [[-3.68220627e-01 1.20104611e-01 6.41119182e-02 ... -6.20215595e-01 -3.68220627e-01 1.20104611e-01] [ 3.98155674e-02 -1.11906683e+00 -6.23164117e-01 ... 8.13054502e-01 3.98155674e-02 -1.11906683e+00] [-5.34965098e-02 -3.65240753e-01 -1.96984887e+00 ... 8.66729200e-01 -5.34965098e-02 -3.65240753e-01] ... [ 3.81697357e-01 1.07152112e-01 -1.08104479e+00 ... -8.67051005e-01 3.81697357e-01 1.07152112e-01] [-5.26965261e-01 1.81114465e-01 -1.38693607e+00 ... 8.13282728e-01 -5.26965261e-01 1.81114465e-01] [-1.16338742e+00 -4.97184843e-01 1.27700317e+00 ... 8.97791982e-01 -1.16338742e+00 -4.97184843e-01]]] [[[ 6.94106460e-01 -1.55366927e-01 -5.26511312e-01 ... -6.81260154e-02 6.94106460e-01 -1.55366927e-01] [ 3.93885858e-02 1.00598323e+00 2.45106667e-01 ... -3.93682599e-01 3.93885858e-02 1.00598323e+00] [-1.20020926e+00 -9.49211061e-01 6.34113193e-01 ... -8.59831631e-01 -1.20020926e+00 -9.49211061e-01] ... [ 6.22717381e-01 5.18537700e-01 1.83767334e-01 ... 3.29971015e-01 6.22717381e-01 5.18537700e-01] [ 1.31682312e+00 -1.01872408e+00 3.52154598e-02 ... -1.53331304e+00 1.31682312e+00 -1.01872408e+00] [ 1.30800462e+00 -1.71422458e+00 -1.18796217e+00 ... 9.05023158e-01 1.30800462e+00 -1.71422458e+00]] [[-5.19504488e-01 -1.48188937e+00 6.13800228e-01 ... -3.15626949e-01 -5.19504488e-01 -1.48188937e+00] [-1.50928617e-01 -7.34287426e-02 -4.12354678e-01 ... -2.91629463e-01 -1.50928617e-01 -7.34287426e-02] [ 1.07661867e+00 5.47689199e-01 -6.05148748e-02 ... -5.19485474e-01 1.07661867e+00 5.47689199e-01] ... [-5.59018731e-01 8.28360796e-01 -6.24975502e-01 ... 7.09649146e-01 -5.59018731e-01 8.28360796e-01] [ 5.28370678e-01 9.02384818e-01 -8.06354642e-01 ... 2.93753833e-01 5.28370678e-01 9.02384818e-01] [ 7.00411618e-01 -5.49118400e-01 -1.29427731e-01 ... 2.45229781e-01 7.00411618e-01 -5.49118400e-01]] [[-1.40102112e+00 9.90473509e-01 1.02610672e+00 ... 1.39695215e+00 -1.40102112e+00 9.90473509e-01] [ 1.39234783e-02 8.18847477e-01 -1.02521086e+00 ... 1.01075363e+00 1.39234783e-02 8.18847477e-01] [ 4.91217673e-01 8.72260034e-01 -8.18652511e-01 ... 1.13976151e-01 4.91217673e-01 8.72260034e-01] ... [-1.23796248e+00 -1.70546114e+00 1.55882943e+00 ... -7.25101411e-01 -1.23796248e+00 -1.70546114e+00] [-1.72584802e-02 -9.90423411e-02 -1.22625566e+00 ... 1.15064085e+00 -1.72584802e-02 -9.90423411e-02] [-5.45480438e-02 -6.75024271e-01 -2.36692524e+00 ... 9.74982083e-01 -5.45480438e-02 -6.75024271e-01]] ... [[ 1.23541772e+00 1.22550786e+00 5.44963777e-01 ... 1.85640407e+00 1.23541772e+00 1.22550786e+00] [-5.46439171e-01 4.08262871e-02 -1.34369206e+00 ... 9.06471491e-01 -5.46439171e-01 4.08262871e-02] [-6.89928055e-01 1.06456602e+00 -1.06503320e+00 ... -3.06077182e-01 -6.89928055e-01 1.06456602e+00] ... [ 1.89773113e-01 -1.46112192e+00 9.30014908e-01 ... 3.82948548e-01 1.89773113e-01 -1.46112192e+00] [-5.87500334e-01 -9.22881067e-01 9.17157888e-01 ... -1.88058600e-01 -5.87500334e-01 -9.22881067e-01] [-7.23220348e-01 -1.66680849e+00 2.15187013e-01 ... -5.33863716e-03 -7.23220348e-01 -1.66680849e+00]] [[-5.88871658e-01 -5.30136585e-01 1.79361737e+00 ... -6.49358273e-01 -5.88871658e-01 -5.30136585e-01] [-4.13437456e-01 -7.11153746e-01 -1.17066431e+00 ... 1.68382645e-01 -4.13437456e-01 -7.11153746e-01] [ 7.80652583e-01 -6.77876592e-01 -1.09997904e+00 ... -1.95924079e+00 7.80652583e-01 -6.77876592e-01] ... [ 4.21896994e-01 -7.20263541e-01 9.24752295e-01 ... 7.17137754e-01 4.21896994e-01 -7.20263541e-01] [-5.68506718e-01 -2.07439750e-01 -1.60721648e+00 ... 5.56763887e-01 -5.68506718e-01 -2.07439750e-01] [-1.13082564e+00 -2.21147269e-01 -5.65604210e-01 ... 5.67601085e-01 -1.13082564e+00 -2.21147269e-01]] [[ 4.36655730e-01 1.06894696e+00 8.86160731e-01 ... -3.33689362e-01 4.36655730e-01 1.06894696e+00] [-3.84403795e-01 -1.49195477e-01 6.28936172e-01 ... 3.85652453e-01 -3.84403795e-01 -1.49195477e-01] [ 2.72296071e-01 1.33190211e-02 2.70319730e-01 ... -1.60184777e+00 2.72296071e-01 1.33190211e-02] ... [-4.56557453e-01 2.70648539e-01 1.10610366e+00 ... 5.52576184e-01 -4.56557453e-01 2.70648539e-01] [-3.39502424e-01 1.07221194e-01 -5.46053588e-01 ... 2.39773914e-01 -3.39502424e-01 1.07221194e-01] [ 1.07068002e+00 1.36011556e-01 5.26998639e-01 ... 1.47742316e-01 1.07068002e+00 1.36011556e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 2) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5596.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[-1.7323955 -1.9109805 -1.7323955 1.1207411 -1.7323955 -1.9109805]]; ov_res: [[-1.7323955 -1.9109805 -1.7323955 1.1207411 -1.7323955 -1.9109805]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 0) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5599.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python ifw_re: [[-0.21222626 -0.64991254 -0.21222626 0.3406815 ]]; ov_res: [[-0.21222626 -0.64991254 -0.21222626 0.3406815 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(0, 0) - mode:reflect - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5602.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="reflect"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[0.71069217 1.2920853 0.6847766 ]]; ov_res: [[0.71069217 1.2920853 0.6847766 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 2) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5605.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[-0.07221095 -0.07221095 -0.4515396 0.19058776 0.19058776 0.19058776]]; ov_res: [[-0.07221095 -0.07221095 -0.4515396 0.19058776 0.19058776 0.19058776]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 0) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5608.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[ 0.15278213 0.15278213 0.12621453 -0.3845173 ]]; ov_res: [[ 0.15278213 0.15278213 0.12621453 -0.3845173 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(0, 0) - mode:replicate - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5611.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="replicate"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[ 3.4714723 -0.6122373 1.2500603]]; ov_res: [[ 3.4714723 -0.6122373 1.2500603]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 0) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5614.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[ 0. -0.01539529 -0.5437806 0.02612558]]; ov_res: [[ 0. -0.01539529 -0.5437806 0.02612558]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 0) - mode:constant - value:42.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5617.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=42.]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[42. -0.11215626 0.18274243 -0.507818 ]]; ov_res: [[42. -0.11215626 0.18274243 -0.507818 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 0) - mode:constant - value:-0.57 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5620.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=-0.56999999999999995]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[-0.57 0.9019545 0.61769193 -0.15876332]]; ov_res: [[-0.57 0.9019545 0.61769193 -0.15876332]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5623.aten_pad, %x : Tensor): %2 : NoneType = prim::Constant() %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0.598145 0.14164649 0.3069018 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ]]; ov_res: [[0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0.598145 0.14164649 0.3069018 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ] [0. 0. 0. 0. 0. 0. ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:42.0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5626.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=42.]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 -1.4611648e+00 -7.9427272e-01 -2.2658363e-02 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01]]; ov_res: [[ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 -1.4611648e+00 -7.9427272e-01 -2.2658363e-02 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01] [ 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01 4.2000000e+01]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pad.py::TestPad::test_pad2d[ ie_device:CPU - precision:FP32 - pads:(1, 2, 3, 4) - mode:constant - value:-0.57 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pad.___torch_mangle_5629.aten_pad, %x : Tensor): %2 : float = prim::Constant[value=-0.56999999999999995]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %3 : str = prim::Constant[value="constant"]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 %4 : int[] = prim::Constant[value=[1, 2, 3, 4]]() %5 : Tensor = aten::pad(%x, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pad.py:27:0 return (%5) fw_re: [[-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 0.3659503 0.41370144 0.6684393 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ]]; ov_res: [[-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 0.3659503 0.41370144 0.6684393 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ] [-0.57 -0.57 -0.57 -0.57 -0.57 -0.57 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_permute.py::TestPermute::test_relu[ ie_device:CPU - precision:FP32 - order:[0, 2, 3, 1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_permute.___torch_mangle_5630.aten_permute, %x.1 : Tensor): %self.order : int[] = prim::Constant[value=[0, 2, 3, 1]]() %3 : Tensor = aten::permute(%x.1, %self.order) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_permute.py:23:23 return (%3) fw_re: [[[[-6.9604719e-01 4.6413693e-01 1.6947992e+00] [ 1.5398988e+00 -2.2726727e+00 -8.2865320e-02] [-1.6287259e-03 1.6887653e+00 9.0944946e-02] ... [ 1.2626233e+00 -1.5051019e+00 -1.6052759e+00] [-1.9920792e-01 1.7785825e-01 -2.5692293e-01] [-2.1293776e-01 1.5202984e+00 5.4849255e-01]] [[-7.4250412e-01 6.1783457e-01 -1.4205168e+00] [ 1.1632117e-01 -9.3856895e-01 2.0462699e+00] [-6.4190227e-01 1.4186677e+00 2.7933728e-02] ... [-2.1606627e-01 6.3199943e-01 4.6718836e-02] [-6.8589753e-01 -6.2889516e-01 -6.9366235e-01] [ 1.2509753e+00 1.9088057e-01 -3.2307353e+00]] [[-1.9738246e+00 -1.3647138e+00 -1.5045580e+00] [ 1.2831013e-01 9.3264592e-01 -4.9202818e-01] [ 7.7572578e-01 -2.9222497e-01 3.6919737e-01] ... [ 1.3641043e-01 -7.0604485e-01 5.2691531e-01] [-7.1617681e-01 1.2789779e+00 4.9630597e-01] [-1.4191041e+00 -8.2923299e-01 1.6216580e+00]] ... [[-1.6659249e+00 5.8394253e-01 4.6260303e-01] [-9.2406732e-01 9.1724706e-01 -5.2956268e-02] [ 7.4470490e-01 -8.1341624e-01 9.6190119e-01] ... [-2.1172450e+00 3.8143227e-01 5.0493455e-01] [ 1.1676944e+00 -1.2422695e+00 -2.0078270e-01] [-2.8131029e-01 -1.4741436e-01 8.1153858e-01]] [[ 6.6492432e-01 -3.9439204e-01 4.1959363e-01] [ 2.2695479e-01 5.2906352e-01 6.3506252e-01] [-5.5217689e-01 -2.6804537e-01 -6.4402503e-01] ... [-1.5257391e+00 -4.4977340e-01 -1.5491198e+00] [ 2.3474720e+00 1.7036100e-01 1.2068599e+00] [ 2.6226291e-01 3.2322857e-01 -6.7701423e-01]] [[-2.4751988e-01 1.5691198e+00 -1.9709428e-01] [-7.1910334e-01 -1.9643635e+00 -1.8232235e-01] [ 2.9791081e-01 -9.4596781e-03 3.5982516e-01] ... [ 2.8683266e-01 -8.3969545e-01 -5.4501522e-01] [-1.6844401e+00 -2.8272420e-01 5.5386043e-01] [-4.1233790e-01 -1.4910175e+00 -4.7581983e-01]]]]; ov_res: [[[[-6.9604719e-01 4.6413693e-01 1.6947992e+00] [ 1.5398988e+00 -2.2726727e+00 -8.2865320e-02] [-1.6287259e-03 1.6887653e+00 9.0944946e-02] ... [ 1.2626233e+00 -1.5051019e+00 -1.6052759e+00] [-1.9920792e-01 1.7785825e-01 -2.5692293e-01] [-2.1293776e-01 1.5202984e+00 5.4849255e-01]] [[-7.4250412e-01 6.1783457e-01 -1.4205168e+00] [ 1.1632117e-01 -9.3856895e-01 2.0462699e+00] [-6.4190227e-01 1.4186677e+00 2.7933728e-02] ... [-2.1606627e-01 6.3199943e-01 4.6718836e-02] [-6.8589753e-01 -6.2889516e-01 -6.9366235e-01] [ 1.2509753e+00 1.9088057e-01 -3.2307353e+00]] [[-1.9738246e+00 -1.3647138e+00 -1.5045580e+00] [ 1.2831013e-01 9.3264592e-01 -4.9202818e-01] [ 7.7572578e-01 -2.9222497e-01 3.6919737e-01] ... [ 1.3641043e-01 -7.0604485e-01 5.2691531e-01] [-7.1617681e-01 1.2789779e+00 4.9630597e-01] [-1.4191041e+00 -8.2923299e-01 1.6216580e+00]] ... [[-1.6659249e+00 5.8394253e-01 4.6260303e-01] [-9.2406732e-01 9.1724706e-01 -5.2956268e-02] [ 7.4470490e-01 -8.1341624e-01 9.6190119e-01] ... [-2.1172450e+00 3.8143227e-01 5.0493455e-01] [ 1.1676944e+00 -1.2422695e+00 -2.0078270e-01] [-2.8131029e-01 -1.4741436e-01 8.1153858e-01]] [[ 6.6492432e-01 -3.9439204e-01 4.1959363e-01] [ 2.2695479e-01 5.2906352e-01 6.3506252e-01] [-5.5217689e-01 -2.6804537e-01 -6.4402503e-01] ... [-1.5257391e+00 -4.4977340e-01 -1.5491198e+00] [ 2.3474720e+00 1.7036100e-01 1.2068599e+00] [ 2.6226291e-01 3.2322857e-01 -6.7701423e-01]] [[-2.4751988e-01 1.5691198e+00 -1.9709428e-01] [-7.1910334e-01 -1.9643635e+00 -1.8232235e-01] [ 2.9791081e-01 -9.4596781e-03 3.5982516e-01] ... [ 2.8683266e-01 -8.3969545e-01 -5.4501522e-01] [-1.6844401e+00 -2.8272420e-01 5.5386043e-01] [-4.1233790e-01 -1.4910175e+00 -4.7581983e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_permute.py::TestPermute::test_relu[ ie_device:CPU - precision:FP32 - order:[0, 3, 1, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_permute.___torch_mangle_5632.aten_permute, %x.1 : Tensor): %self.order : int[] = prim::Constant[value=[0, 3, 1, 2]]() %3 : Tensor = aten::permute(%x.1, %self.order) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_permute.py:23:23 return (%3) fw_re: [[[[-2.4948409 1.1171721 0.74684733 ... -0.65800166 -0.31304663 -0.25403133] [-0.39400735 -0.48187217 0.3487587 ... -0.61997485 0.5890117 0.0179284 ] [ 0.15693904 -1.6401372 0.05002936 ... -0.22093132 0.50058055 -1.3950236 ]] [[ 0.4881952 -0.6788761 -1.1973807 ... 0.6367392 -1.4805099 0.09015796] [ 1.331554 -0.08440714 0.12834103 ... -0.16273814 0.7610562 -0.23440278] [-1.4785392 -1.1035255 -0.9220587 ... 0.40242463 0.61062026 0.60533804]] [[ 1.9560744 -0.49924675 -0.666268 ... 1.1294703 0.60182256 0.2758521 ] [ 1.2856199 -0.43870988 1.1755028 ... -0.41072336 0.04291403 0.2887329 ] [-0.85289776 1.2727702 1.3172703 ... -1.0789585 0.8572614 -0.6209573 ]] ... [[ 0.4468476 -0.8600769 -0.776209 ... 0.11535221 -0.4707523 -0.23058805] [ 1.1131928 -1.5240668 -0.1673143 ... -0.75044715 0.32341024 0.43430826] [ 0.4942816 1.0456901 0.19368862 ... -0.7050093 0.07690819 -0.02387831]] [[ 0.6306144 -0.45487782 -0.13369869 ... -0.32790676 0.30918926 -0.89431745] [-0.10514683 0.7686865 1.387178 ... 0.8088889 0.8075415 0.990138 ] [ 0.7371517 -0.9249867 0.7500539 ... -0.7910252 -1.2871352 0.39563692]] [[ 1.2565204 0.24577436 -0.32506865 ... 1.0341055 -1.9225953 0.69032085] [ 1.2359858 1.6149471 -0.3855528 ... 0.41507646 2.2615783 0.00713706] [-1.3311545 -0.21988338 0.27092907 ... 0.25886518 1.6992313 0.07803385]]]]; ov_res: [[[[-2.4948409 1.1171721 0.74684733 ... -0.65800166 -0.31304663 -0.25403133] [-0.39400735 -0.48187217 0.3487587 ... -0.61997485 0.5890117 0.0179284 ] [ 0.15693904 -1.6401372 0.05002936 ... -0.22093132 0.50058055 -1.3950236 ]] [[ 0.4881952 -0.6788761 -1.1973807 ... 0.6367392 -1.4805099 0.09015796] [ 1.331554 -0.08440714 0.12834103 ... -0.16273814 0.7610562 -0.23440278] [-1.4785392 -1.1035255 -0.9220587 ... 0.40242463 0.61062026 0.60533804]] [[ 1.9560744 -0.49924675 -0.666268 ... 1.1294703 0.60182256 0.2758521 ] [ 1.2856199 -0.43870988 1.1755028 ... -0.41072336 0.04291403 0.2887329 ] [-0.85289776 1.2727702 1.3172703 ... -1.0789585 0.8572614 -0.6209573 ]] ... [[ 0.4468476 -0.8600769 -0.776209 ... 0.11535221 -0.4707523 -0.23058805] [ 1.1131928 -1.5240668 -0.1673143 ... -0.75044715 0.32341024 0.43430826] [ 0.4942816 1.0456901 0.19368862 ... -0.7050093 0.07690819 -0.02387831]] [[ 0.6306144 -0.45487782 -0.13369869 ... -0.32790676 0.30918926 -0.89431745] [-0.10514683 0.7686865 1.387178 ... 0.8088889 0.8075415 0.990138 ] [ 0.7371517 -0.9249867 0.7500539 ... -0.7910252 -1.2871352 0.39563692]] [[ 1.2565204 0.24577436 -0.32506865 ... 1.0341055 -1.9225953 0.69032085] [ 1.2359858 1.6149471 -0.3855528 ... 0.41507646 2.2615783 0.00713706] [-1.3311545 -0.21988338 0.27092907 ... 0.25886518 1.6992313 0.07803385]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5634.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-2.22815797e-02 -4.57232259e-02 9.05878246e-02 -7.86619037e-02 6.38030171e-02 -2.62701750e-01 3.56059941e-03 2.99644291e-01 4.85594600e-01 1.41443431e-01 -4.85458642e-01 -3.90120447e-01 -1.54843815e-02] [ 1.56522170e-01 1.70713458e-02 1.25688136e-01 2.40465671e-01 1.82158977e-01 -2.99297452e-01 -7.03624338e-02 2.79246688e-01 5.75432837e-01 -1.66708916e-01 -3.51425052e-01 -3.85733098e-01 6.10473566e-02] [-1.26936368e-03 -2.25210056e-01 9.76936519e-02 3.61381918e-01 2.65455604e-01 -1.80443719e-01 -4.26729536e-03 3.55187863e-01 4.60164189e-01 -1.39078215e-01 -3.60855192e-01 -3.76822859e-01 -2.53703535e-01] [ 5.17776251e-01 4.02743220e-02 1.84139982e-01 2.72100300e-01 4.88781452e-01 3.58459145e-01 4.89843518e-01 7.24333525e-01 7.50595510e-01 5.60370028e-01 4.31736052e-01 4.28157091e-01 1.24697059e-01] [ 4.05394018e-01 1.07047558e-02 2.38629177e-01 1.35131538e-01 5.75059414e-01 4.96339798e-01 4.79973912e-01 4.48361039e-01 3.71555209e-01 2.83400416e-01 2.50349969e-01 1.99309900e-01 -1.77366529e-02] [ 4.05112833e-01 1.39742330e-01 1.02935940e-01 -1.63573787e-01 2.03553677e-01 1.94812313e-01 9.85684022e-02 2.47300789e-01 2.86650747e-01 2.85417169e-01 2.66594768e-01 1.57739855e-02 -3.64909954e-02] [ 5.02280951e-01 3.30552578e-01 3.52198035e-01 -1.24032035e-01 1.68457642e-01 -1.26948878e-01 -8.29697307e-03 -1.14230953e-01 -2.36056700e-01 -4.63688642e-01 -4.20491844e-01 -3.96121591e-01 -1.78544998e-01] [ 3.21835101e-01 2.54096657e-01 2.26201639e-01 -2.55233318e-01 -9.80069265e-02 -2.61499822e-01 -1.23989664e-01 -7.64115378e-02 -3.33037972e-01 -2.58675575e-01 -3.44067931e-01 -3.48679692e-01 -1.82591230e-01] [-1.50569513e-01 -1.82617698e-02 -6.57049119e-02 -4.37240750e-01 -6.49411753e-02 4.19904105e-03 2.00362489e-01 -2.03881204e-01 -6.42705917e-01 -5.16195238e-01 -5.72362900e-01 -3.68194371e-01 -5.55270433e-01] [-5.85064709e-01 -1.44558489e-01 -8.05657208e-02 -2.74630427e-01 -4.43066597e-01 -3.17023426e-01 -1.93790972e-01 -2.44501173e-01 -2.97696918e-01 -5.55951476e-01 -3.07612419e-01 -4.13012177e-01 -3.85667115e-01] [-8.67876470e-01 -2.41720945e-01 -6.17497484e-04 5.54494932e-02 -3.74037117e-01 -2.38114655e-01 -2.29304865e-01 -2.88276345e-01 -4.02014703e-01 -6.89761400e-01 -3.00832301e-01 -1.42487124e-01 -2.52855361e-01] [-5.07194161e-01 -2.69434124e-01 1.32012114e-01 3.35560143e-01 -8.77952203e-02 -5.93768507e-02 7.81727135e-02 4.00567986e-02 6.31847754e-02 -4.37058806e-01 1.64649040e-01 -2.08092332e-02 2.08205670e-01] [-1.99231356e-01 -1.90669835e-01 -4.46448401e-02 -1.73008107e-02 -2.82686651e-01 -1.16349511e-01 2.66919553e-01 3.04851681e-01 1.16370721e-02 -9.54359919e-02 3.66492063e-01 5.09855032e-01 4.52965885e-01]] [[ 6.63144048e-03 2.78071016e-01 6.59572333e-02 -1.57894403e-01 2.97892746e-03 2.07217053e-01 1.26695141e-01 3.79504897e-02 -4.97614920e-01 -3.41806054e-01 -8.76414701e-02 3.98840636e-01 8.07792366e-01] [-3.24096173e-01 5.77796847e-02 1.98337182e-01 1.37445763e-01 3.01968843e-01 3.18702430e-01 6.93945944e-01 3.84921849e-01 -5.85621148e-02 -3.30187827e-01 -9.13635865e-02 6.12574041e-01 8.69933844e-01] [ 8.64566714e-02 1.02666721e-01 2.84886241e-01 3.94876271e-01 1.51175901e-01 -4.78123352e-02 1.54237390e-01 -4.37973328e-02 -3.87856625e-02 -2.31561080e-01 1.40859718e-02 2.92147070e-01 3.03400785e-01] [-6.39705881e-02 -1.72018588e-01 -2.12672334e-02 4.18668725e-02 -1.55977607e-01 -5.35808742e-01 -3.55469078e-01 -2.88242012e-01 2.46236056e-01 1.60628140e-01 1.79736957e-01 1.55116573e-01 1.14072822e-02] [ 1.39823645e-01 -4.74833660e-02 -1.16007909e-01 -4.84854072e-01 -7.84797251e-01 -8.40175867e-01 -9.12640333e-01 -6.42501950e-01 -9.12731364e-02 3.79415125e-01 3.81577551e-01 1.14015667e-02 -2.85692841e-01] [ 2.32921019e-01 1.63885280e-02 -2.43180618e-01 -5.78656614e-01 -4.89052981e-01 -4.66583192e-01 -4.03012067e-01 -3.47789019e-01 8.71706307e-02 3.43035430e-01 2.51727104e-01 -1.28808782e-01 -1.84145905e-02] [ 2.04875603e-01 1.22908555e-01 -2.42236927e-01 -2.13475287e-01 -2.79969245e-01 -1.30880579e-01 -1.43784687e-01 -1.32735595e-01 -2.02304468e-01 -3.29677999e-01 -5.92756510e-01 -6.48192286e-01 -3.11547309e-01] [ 3.95338058e-01 2.66325206e-01 -1.34070814e-01 3.58976036e-01 5.01518786e-01 6.04072988e-01 2.78840184e-01 1.90187559e-01 -2.59970009e-01 -3.60174060e-01 -5.05233288e-01 -2.58703083e-01 -5.29044271e-02] [-1.20428056e-01 -1.51870519e-01 2.60567535e-02 4.25296575e-01 3.71429801e-01 2.01736018e-01 -1.81117356e-01 1.05663262e-01 -1.12109132e-01 1.00248322e-01 -2.09115803e-01 3.78609225e-02 -1.65984243e-01] [-2.21371949e-01 -3.53898942e-01 6.02716282e-02 2.38116503e-01 2.09682226e-01 1.14358082e-01 -2.89349675e-01 1.50309071e-01 1.18568555e-01 5.54063857e-01 3.81301641e-01 2.10525066e-01 -1.93883762e-01] [-3.57831120e-01 -4.50813979e-01 5.50637301e-03 1.68340400e-01 -1.32852241e-01 -2.94177890e-01 -7.15631723e-01 -1.56305760e-01 1.59017220e-02 3.41357827e-01 5.86345606e-02 -5.97730353e-02 -1.77967288e-02] [-9.83725339e-02 -3.10065418e-01 1.81226283e-01 2.58203655e-01 -6.15644865e-02 -3.68540972e-01 -9.24510300e-01 -5.64384043e-01 -4.87897396e-01 -6.10134006e-02 4.96480130e-02 -3.25300023e-02 2.71232635e-01] [ 4.52203155e-01 2.41992772e-01 3.13293219e-01 2.82707810e-01 -8.35160315e-02 -3.57517928e-01 -6.65412605e-01 -2.92038977e-01 -1.67441398e-01 -2.58121155e-02 9.32726078e-03 3.00759166e-01 5.56860864e-01]] [[-2.70038992e-01 -2.89812386e-01 -4.00872588e-01 -1.33357093e-01 -3.29626948e-02 -5.96726090e-02 -2.12418605e-02 2.10051715e-01 6.05216809e-02 -5.74466400e-02 -4.95255589e-01 -4.55918640e-01 -4.77179319e-01] [-6.21603541e-02 -2.73685634e-01 -4.19183522e-01 -1.35084569e-01 6.95120394e-02 2.36080781e-01 2.55627155e-01 4.00724530e-01 2.68116325e-01 1.58222944e-01 7.76189342e-02 3.49688791e-02 -2.11469650e-01] [ 5.19311190e-01 9.16111618e-02 -1.81964576e-01 -2.71921009e-01 2.17777714e-01 3.75514835e-01 4.62496340e-01 5.53090453e-01 2.77155787e-01 3.80975276e-01 3.74110669e-01 4.17469114e-01 -1.52475223e-01] [ 2.33000919e-01 -9.53533221e-03 2.89476991e-01 5.12868643e-01 7.67547369e-01 5.14297843e-01 2.18395770e-01 4.48258609e-01 2.17523068e-01 1.81759983e-01 6.49601892e-02 1.58385709e-01 -1.95544183e-01] [ 5.10569513e-01 3.80054504e-01 6.29829168e-01 7.37652659e-01 1.05638301e+00 4.47492272e-01 1.50091559e-01 4.72567081e-02 1.31174857e-02 -1.40997529e-01 -1.34990746e-02 -3.53634395e-02 7.76690245e-03] [-1.05075307e-01 -4.82924581e-02 2.97564834e-01 8.48259628e-01 1.07158887e+00 6.94553137e-01 1.00076050e-01 -3.39134000e-02 -8.24147165e-02 -2.93609858e-01 -3.32721174e-01 -5.26872613e-02 1.46080464e-01] [ 3.80794168e-01 4.22725827e-01 1.20538659e-01 3.19033623e-01 5.41323423e-01 5.78281820e-01 4.46392208e-01 1.80352181e-01 1.53819874e-01 -2.40275621e-01 -1.82554439e-01 2.02132203e-02 2.46177852e-01] [ 1.82084844e-01 3.14367294e-01 -9.91330668e-02 1.53811216e-01 2.28511870e-01 7.48663664e-01 6.26126289e-01 7.33178318e-01 1.82974547e-01 -1.30383506e-01 -4.60603833e-01 -1.26387805e-01 5.89633510e-02] [ 9.70753431e-02 6.18314683e-01 3.79117042e-01 3.26283336e-01 -1.26639903e-01 2.57603347e-01 2.59650916e-01 5.30713379e-01 2.00384259e-01 -1.08140193e-01 -2.74351507e-01 -2.89351016e-01 -1.42914742e-01] [-1.54898107e-01 3.50999981e-02 -2.31266439e-01 -2.63973445e-01 -5.14186025e-01 -2.61412784e-02 2.01410845e-01 3.74083072e-01 4.28348631e-02 -2.24654883e-01 -1.76479280e-01 -3.06750983e-01 -1.69854641e-01] [-1.48604676e-01 -1.89724684e-01 -2.20035210e-01 -3.45237464e-01 -6.89403296e-01 -3.57747853e-01 -1.08404875e-01 2.43001297e-01 -9.59262848e-02 -4.26488668e-01 -3.29243928e-01 -2.58918226e-01 -1.43342495e-01] [-1.18599333e-01 -6.17241383e-01 -9.27545965e-01 -8.76081109e-01 -4.07063693e-01 -4.56111990e-02 5.03296494e-01 3.92844766e-01 1.64702758e-01 -3.37254286e-01 -3.44775349e-01 -9.37254131e-02 2.07419515e-01] [-2.84365386e-01 -7.93650568e-01 -6.43596530e-01 -4.78926986e-01 2.02992950e-02 1.95654184e-01 3.64655495e-01 3.04822952e-01 1.01896137e-01 -1.76046535e-01 -3.18750441e-01 -4.33928818e-02 6.32738471e-02]]]]; ov_res: [[[[-2.22815797e-02 -4.57232259e-02 9.05878246e-02 -7.86619037e-02 6.38030171e-02 -2.62701750e-01 3.56059941e-03 2.99644291e-01 4.85594600e-01 1.41443431e-01 -4.85458642e-01 -3.90120447e-01 -1.54843815e-02] [ 1.56522170e-01 1.70713458e-02 1.25688136e-01 2.40465671e-01 1.82158977e-01 -2.99297452e-01 -7.03624338e-02 2.79246688e-01 5.75432837e-01 -1.66708916e-01 -3.51425052e-01 -3.85733098e-01 6.10473566e-02] [-1.26936368e-03 -2.25210056e-01 9.76936519e-02 3.61381918e-01 2.65455604e-01 -1.80443719e-01 -4.26729536e-03 3.55187863e-01 4.60164189e-01 -1.39078215e-01 -3.60855192e-01 -3.76822859e-01 -2.53703535e-01] [ 5.17776251e-01 4.02743220e-02 1.84139982e-01 2.72100300e-01 4.88781452e-01 3.58459145e-01 4.89843518e-01 7.24333525e-01 7.50595510e-01 5.60370028e-01 4.31736052e-01 4.28157091e-01 1.24697059e-01] [ 4.05394018e-01 1.07047558e-02 2.38629177e-01 1.35131538e-01 5.75059414e-01 4.96339798e-01 4.79973912e-01 4.48361039e-01 3.71555209e-01 2.83400416e-01 2.50349969e-01 1.99309900e-01 -1.77366529e-02] [ 4.05112833e-01 1.39742330e-01 1.02935940e-01 -1.63573787e-01 2.03553677e-01 1.94812313e-01 9.85684022e-02 2.47300789e-01 2.86650747e-01 2.85417169e-01 2.66594768e-01 1.57739855e-02 -3.64909954e-02] [ 5.02280951e-01 3.30552578e-01 3.52198035e-01 -1.24032035e-01 1.68457642e-01 -1.26948878e-01 -8.29697307e-03 -1.14230953e-01 -2.36056700e-01 -4.63688642e-01 -4.20491844e-01 -3.96121591e-01 -1.78544998e-01] [ 3.21835101e-01 2.54096657e-01 2.26201639e-01 -2.55233318e-01 -9.80069265e-02 -2.61499822e-01 -1.23989664e-01 -7.64115378e-02 -3.33037972e-01 -2.58675575e-01 -3.44067931e-01 -3.48679692e-01 -1.82591230e-01] [-1.50569513e-01 -1.82617698e-02 -6.57049119e-02 -4.37240750e-01 -6.49411753e-02 4.19904105e-03 2.00362489e-01 -2.03881204e-01 -6.42705917e-01 -5.16195238e-01 -5.72362900e-01 -3.68194371e-01 -5.55270433e-01] [-5.85064709e-01 -1.44558489e-01 -8.05657208e-02 -2.74630427e-01 -4.43066597e-01 -3.17023426e-01 -1.93790972e-01 -2.44501173e-01 -2.97696918e-01 -5.55951476e-01 -3.07612419e-01 -4.13012177e-01 -3.85667115e-01] [-8.67876470e-01 -2.41720945e-01 -6.17497484e-04 5.54494932e-02 -3.74037117e-01 -2.38114655e-01 -2.29304865e-01 -2.88276345e-01 -4.02014703e-01 -6.89761400e-01 -3.00832301e-01 -1.42487124e-01 -2.52855361e-01] [-5.07194161e-01 -2.69434124e-01 1.32012114e-01 3.35560143e-01 -8.77952203e-02 -5.93768507e-02 7.81727135e-02 4.00567986e-02 6.31847754e-02 -4.37058806e-01 1.64649040e-01 -2.08092332e-02 2.08205670e-01] [-1.99231356e-01 -1.90669835e-01 -4.46448401e-02 -1.73008107e-02 -2.82686651e-01 -1.16349511e-01 2.66919553e-01 3.04851681e-01 1.16370721e-02 -9.54359919e-02 3.66492063e-01 5.09855032e-01 4.52965885e-01]] [[ 6.63144048e-03 2.78071016e-01 6.59572333e-02 -1.57894403e-01 2.97892746e-03 2.07217053e-01 1.26695141e-01 3.79504897e-02 -4.97614920e-01 -3.41806054e-01 -8.76414701e-02 3.98840636e-01 8.07792366e-01] [-3.24096173e-01 5.77796847e-02 1.98337182e-01 1.37445763e-01 3.01968843e-01 3.18702430e-01 6.93945944e-01 3.84921849e-01 -5.85621148e-02 -3.30187827e-01 -9.13635865e-02 6.12574041e-01 8.69933844e-01] [ 8.64566714e-02 1.02666721e-01 2.84886241e-01 3.94876271e-01 1.51175901e-01 -4.78123352e-02 1.54237390e-01 -4.37973328e-02 -3.87856625e-02 -2.31561080e-01 1.40859718e-02 2.92147070e-01 3.03400785e-01] [-6.39705881e-02 -1.72018588e-01 -2.12672334e-02 4.18668725e-02 -1.55977607e-01 -5.35808742e-01 -3.55469078e-01 -2.88242012e-01 2.46236056e-01 1.60628140e-01 1.79736957e-01 1.55116573e-01 1.14072822e-02] [ 1.39823645e-01 -4.74833660e-02 -1.16007909e-01 -4.84854072e-01 -7.84797251e-01 -8.40175867e-01 -9.12640333e-01 -6.42501950e-01 -9.12731364e-02 3.79415125e-01 3.81577551e-01 1.14015667e-02 -2.85692841e-01] [ 2.32921019e-01 1.63885280e-02 -2.43180618e-01 -5.78656614e-01 -4.89052981e-01 -4.66583192e-01 -4.03012067e-01 -3.47789019e-01 8.71706307e-02 3.43035430e-01 2.51727104e-01 -1.28808782e-01 -1.84145905e-02] [ 2.04875603e-01 1.22908555e-01 -2.42236927e-01 -2.13475287e-01 -2.79969245e-01 -1.30880579e-01 -1.43784687e-01 -1.32735595e-01 -2.02304468e-01 -3.29677999e-01 -5.92756510e-01 -6.48192286e-01 -3.11547309e-01] [ 3.95338058e-01 2.66325206e-01 -1.34070814e-01 3.58976036e-01 5.01518786e-01 6.04072988e-01 2.78840184e-01 1.90187559e-01 -2.59970009e-01 -3.60174060e-01 -5.05233288e-01 -2.58703083e-01 -5.29044271e-02] [-1.20428056e-01 -1.51870519e-01 2.60567535e-02 4.25296575e-01 3.71429801e-01 2.01736018e-01 -1.81117356e-01 1.05663262e-01 -1.12109132e-01 1.00248322e-01 -2.09115803e-01 3.78609225e-02 -1.65984243e-01] [-2.21371949e-01 -3.53898942e-01 6.02716282e-02 2.38116503e-01 2.09682226e-01 1.14358082e-01 -2.89349675e-01 1.50309071e-01 1.18568555e-01 5.54063857e-01 3.81301641e-01 2.10525066e-01 -1.93883762e-01] [-3.57831120e-01 -4.50813979e-01 5.50637301e-03 1.68340400e-01 -1.32852241e-01 -2.94177890e-01 -7.15631723e-01 -1.56305760e-01 1.59017220e-02 3.41357827e-01 5.86345606e-02 -5.97730353e-02 -1.77967288e-02] [-9.83725339e-02 -3.10065418e-01 1.81226283e-01 2.58203655e-01 -6.15644865e-02 -3.68540972e-01 -9.24510300e-01 -5.64384043e-01 -4.87897396e-01 -6.10134006e-02 4.96480130e-02 -3.25300023e-02 2.71232635e-01] [ 4.52203155e-01 2.41992772e-01 3.13293219e-01 2.82707810e-01 -8.35160315e-02 -3.57517928e-01 -6.65412605e-01 -2.92038977e-01 -1.67441398e-01 -2.58121155e-02 9.32726078e-03 3.00759166e-01 5.56860864e-01]] [[-2.70038992e-01 -2.89812386e-01 -4.00872588e-01 -1.33357093e-01 -3.29626948e-02 -5.96726090e-02 -2.12418605e-02 2.10051715e-01 6.05216809e-02 -5.74466400e-02 -4.95255589e-01 -4.55918640e-01 -4.77179319e-01] [-6.21603541e-02 -2.73685634e-01 -4.19183522e-01 -1.35084569e-01 6.95120394e-02 2.36080781e-01 2.55627155e-01 4.00724530e-01 2.68116325e-01 1.58222944e-01 7.76189342e-02 3.49688791e-02 -2.11469650e-01] [ 5.19311190e-01 9.16111618e-02 -1.81964576e-01 -2.71921009e-01 2.17777714e-01 3.75514835e-01 4.62496340e-01 5.53090453e-01 2.77155787e-01 3.80975276e-01 3.74110669e-01 4.17469114e-01 -1.52475223e-01] [ 2.33000919e-01 -9.53533221e-03 2.89476991e-01 5.12868643e-01 7.67547369e-01 5.14297843e-01 2.18395770e-01 4.48258609e-01 2.17523068e-01 1.81759983e-01 6.49601892e-02 1.58385709e-01 -1.95544183e-01] [ 5.10569513e-01 3.80054504e-01 6.29829168e-01 7.37652659e-01 1.05638301e+00 4.47492272e-01 1.50091559e-01 4.72567081e-02 1.31174857e-02 -1.40997529e-01 -1.34990746e-02 -3.53634395e-02 7.76690245e-03] [-1.05075307e-01 -4.82924581e-02 2.97564834e-01 8.48259628e-01 1.07158887e+00 6.94553137e-01 1.00076050e-01 -3.39134000e-02 -8.24147165e-02 -2.93609858e-01 -3.32721174e-01 -5.26872613e-02 1.46080464e-01] [ 3.80794168e-01 4.22725827e-01 1.20538659e-01 3.19033623e-01 5.41323423e-01 5.78281820e-01 4.46392208e-01 1.80352181e-01 1.53819874e-01 -2.40275621e-01 -1.82554439e-01 2.02132203e-02 2.46177852e-01] [ 1.82084844e-01 3.14367294e-01 -9.91330668e-02 1.53811216e-01 2.28511870e-01 7.48663664e-01 6.26126289e-01 7.33178318e-01 1.82974547e-01 -1.30383506e-01 -4.60603833e-01 -1.26387805e-01 5.89633510e-02] [ 9.70753431e-02 6.18314683e-01 3.79117042e-01 3.26283336e-01 -1.26639903e-01 2.57603347e-01 2.59650916e-01 5.30713379e-01 2.00384259e-01 -1.08140193e-01 -2.74351507e-01 -2.89351016e-01 -1.42914742e-01] [-1.54898107e-01 3.50999981e-02 -2.31266439e-01 -2.63973445e-01 -5.14186025e-01 -2.61412784e-02 2.01410845e-01 3.74083072e-01 4.28348631e-02 -2.24654883e-01 -1.76479280e-01 -3.06750983e-01 -1.69854641e-01] [-1.48604676e-01 -1.89724684e-01 -2.20035210e-01 -3.45237464e-01 -6.89403296e-01 -3.57747853e-01 -1.08404875e-01 2.43001297e-01 -9.59262848e-02 -4.26488668e-01 -3.29243928e-01 -2.58918226e-01 -1.43342495e-01] [-1.18599333e-01 -6.17241383e-01 -9.27545965e-01 -8.76081109e-01 -4.07063693e-01 -4.56111990e-02 5.03296494e-01 3.92844766e-01 1.64702758e-01 -3.37254286e-01 -3.44775349e-01 -9.37254131e-02 2.07419515e-01] [-2.84365386e-01 -7.93650568e-01 -6.43596530e-01 -4.78926986e-01 2.02992950e-02 1.95654184e-01 3.64655495e-01 3.04822952e-01 1.01896137e-01 -1.76046535e-01 -3.18750441e-01 -4.33928818e-02 6.32738471e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5637.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[1, 1]]() %5 : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::avg_pool2d(%x, %5, %4, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%6) fw_re: [[[[-1.27023011e-01 -1.42125696e-01 2.49811541e-02 -1.55741587e-01 -1.21235915e-01 -2.91000903e-01 -1.92427412e-01 -3.00504893e-01 -2.54322425e-03 7.72515833e-02 2.47517526e-02 -4.39647101e-02 -2.10497454e-01 -1.35964692e-01 -1.55505598e-01] [-1.83561072e-01 -3.66910875e-01 -3.26828063e-01 -3.65041405e-01 -2.72136688e-01 -2.85723329e-01 -3.57556999e-01 -4.07151878e-01 -1.62916794e-01 -2.06854213e-02 7.14555755e-02 -1.55060798e-01 -4.04331267e-01 -5.26887774e-01 -3.75264406e-01] [-1.70440778e-01 -5.22071540e-01 -2.82127798e-01 -3.89908142e-02 1.73946008e-01 -1.03645414e-01 -2.03042313e-01 1.14982529e-02 1.80921793e-01 2.04275951e-01 2.37250701e-01 5.58561422e-02 -2.95851648e-01 -6.65936232e-01 -5.25549233e-01] [-2.85376102e-01 -6.21836901e-01 -7.55762458e-01 -1.44121781e-01 6.95374906e-02 1.97021112e-01 -1.80595741e-01 1.54685318e-01 -8.85044634e-02 6.09408543e-02 6.76429421e-02 2.15943873e-01 -1.75269902e-01 -6.32995486e-01 -5.52553415e-01] [-5.19056201e-01 -6.77353203e-01 -6.47339404e-01 1.80870950e-01 3.28099877e-01 3.21136743e-01 -2.15642061e-02 2.59211600e-01 1.12282299e-02 8.10350999e-02 -1.88923389e-01 2.28129700e-01 -1.77982613e-01 -1.89325169e-01 -1.90861329e-01] [-6.34639919e-01 -5.78481376e-01 -7.72225142e-01 -1.22696824e-01 -1.91478670e-01 2.01206267e-01 -6.76242262e-02 1.14748798e-01 -1.20975509e-01 -1.32420659e-02 -2.85394639e-01 -1.29943475e-01 -3.33398044e-01 -3.50031525e-01 -2.61600167e-01] [-5.26439250e-01 -3.63476545e-01 -3.62544715e-01 -3.17858867e-02 -1.59984857e-01 1.11952489e-02 -1.05006300e-01 3.11005842e-02 3.93055417e-02 6.18258044e-02 -3.28141540e-01 -2.46300489e-01 -3.80967200e-01 -2.06277221e-01 -3.20606865e-02] [-4.64888930e-01 -3.25424701e-01 -3.34993422e-01 -9.56256092e-02 -2.71694660e-01 -2.07212523e-01 -3.05085540e-01 -3.71513702e-02 1.32833987e-01 2.23155469e-01 -1.27678722e-01 9.27752927e-02 4.57383655e-02 8.91060010e-02 -6.51417449e-02] [-3.00604194e-01 -3.30425918e-01 -4.91156317e-02 3.06114294e-02 6.37400150e-02 -5.00289984e-02 -4.69248220e-02 1.75525695e-01 3.91731650e-01 4.03354883e-01 2.43145362e-01 5.78114808e-01 6.68677807e-01 8.42992187e-01 4.53856885e-01] [-7.72783160e-03 -2.46184483e-01 4.22750041e-02 2.28045166e-01 1.69865400e-01 -1.55835092e-01 -4.54610676e-01 1.22933760e-01 1.39836371e-01 3.51231605e-01 1.24951564e-01 6.63490057e-01 7.17571199e-01 9.93208170e-01 4.67815727e-01] [ 5.90123951e-01 2.05077752e-01 8.11951980e-02 -3.00117210e-02 1.12626635e-01 3.35778557e-02 -3.97132456e-01 -1.65595293e-01 -1.75332889e-01 7.06733614e-02 6.63684607e-02 1.21498972e-01 3.94297719e-01 4.85975146e-01 3.42304826e-01] [ 4.77364421e-01 -3.81448679e-02 -1.73361912e-01 -8.11261013e-02 2.14915171e-01 -6.15808889e-02 -4.61524010e-01 -2.36364663e-01 -4.06826109e-01 -2.19501436e-01 -5.02504349e-01 -2.44843036e-01 -3.14224511e-01 -8.26644599e-02 -1.07145764e-01] [ 5.93708515e-01 1.56486034e-01 -2.13080585e-01 -3.59550834e-01 1.26216993e-01 6.19183369e-02 -2.64255792e-01 -4.10476893e-01 -5.01369238e-01 -1.50015146e-01 -1.53759003e-01 -1.95617050e-01 -3.08371127e-01 -3.22550029e-01 -1.56932592e-01] [ 1.49466142e-01 -1.47457138e-01 -1.23924389e-01 -2.89462328e-01 -1.31026208e-01 -1.94641620e-01 -2.15592667e-01 1.12620488e-01 -2.31597796e-01 6.01235218e-02 5.24711758e-02 8.86391401e-02 -4.05678719e-01 -2.98947215e-01 -9.91716087e-02] [ 1.77155793e-01 4.30113822e-02 -1.16713695e-01 -2.77288973e-01 -3.24666977e-01 -1.89988926e-01 -2.89947301e-01 -1.36700077e-02 -2.43712321e-01 7.06577003e-02 2.93928742e-01 1.67302787e-01 -1.21807896e-01 -2.30842218e-01 -6.40720781e-03]] [[-3.44997235e-02 -1.02017988e-02 2.77651787e-01 5.52417457e-01 5.96588075e-01 3.54658455e-01 2.67101169e-01 8.88078511e-02 7.28029460e-02 2.55602062e-01 2.06498042e-01 1.48876265e-01 -5.70440479e-02 1.16839066e-01 1.75710708e-01] [ 1.26705039e-02 -1.43656537e-01 2.41314784e-01 2.03676417e-01 3.30341011e-01 1.20339684e-01 1.13900304e-01 -4.14645076e-02 -2.08073437e-01 1.79932639e-02 -7.78698400e-02 -1.32006615e-01 -1.24937348e-01 1.55940339e-01 2.88499027e-01] [ 2.19048202e-01 -6.57563331e-03 3.66462201e-01 1.19854875e-01 1.16490006e-01 -3.83004308e-01 -6.60074532e-01 -6.15294635e-01 -5.66672504e-01 -4.20625210e-01 -5.75708807e-01 -6.97419882e-01 -5.18742740e-01 -1.76379129e-01 7.73527920e-02] [ 1.20325483e-01 -8.67235661e-03 3.92506093e-01 -1.13221236e-01 -1.95205063e-01 -7.59910166e-01 -7.03837574e-01 -5.92472553e-01 -3.36314678e-01 -4.02008444e-01 -6.79066956e-01 -1.03643274e+00 -5.91771901e-01 -8.72373208e-02 2.97843337e-01] [ 1.11379780e-01 2.36872137e-01 4.20437574e-01 1.43398955e-01 3.81190106e-02 -5.26010811e-01 -5.54073155e-01 -5.89349866e-01 -1.39707029e-01 8.78548175e-02 -1.44321844e-01 -6.62929833e-01 -6.30515635e-01 -1.96974501e-01 2.03174278e-01] [ 7.95459375e-02 1.71144456e-01 8.94716829e-02 -3.10570627e-01 -3.78692091e-01 -4.60699618e-01 -1.37965068e-01 -2.70278245e-01 5.98602146e-02 2.53089145e-02 4.51109596e-02 -5.34200013e-01 -4.15541470e-01 -1.19264394e-01 2.98238307e-01] [-3.25868116e-03 1.25853002e-01 -3.21528204e-02 -1.48316383e-01 -1.33525312e-01 -1.77204341e-01 3.34527344e-02 -2.15412885e-01 1.15885325e-01 2.62591541e-02 4.60859925e-01 1.08645469e-01 3.09696257e-01 1.58069268e-01 3.66846025e-01] [-1.59802437e-02 1.60131395e-01 -2.19654646e-02 -7.24605173e-02 -1.08095199e-01 -9.84658375e-02 8.70082900e-02 -1.87006727e-01 3.03659290e-01 -7.53116794e-03 4.63876307e-01 -1.66286632e-01 2.09509775e-01 1.25707328e-01 4.58135724e-01] [-4.77638990e-02 1.36679068e-01 -7.07570612e-02 -1.07839838e-01 6.01343587e-02 2.64723569e-01 5.56257069e-01 2.17182636e-01 4.45839405e-01 3.41669679e-01 4.69748825e-01 -4.33090553e-02 1.38429254e-01 1.72243997e-01 4.17013049e-01] [ 3.97274554e-01 5.89153051e-01 1.73390761e-01 -6.23878762e-02 -2.53626883e-01 5.29950187e-02 3.58574271e-01 3.01022083e-01 3.93376112e-01 3.68434787e-01 3.43546122e-01 -2.89132267e-01 -4.74999845e-01 -3.77907485e-01 -9.00329128e-02] [ 2.44820908e-01 7.99681842e-02 -8.29889923e-02 -4.87093776e-02 7.97046050e-02 2.75292993e-01 5.88510513e-01 7.12429285e-01 4.26911592e-01 2.45326698e-01 8.46587121e-02 -1.00142255e-01 -4.37603265e-01 -4.91242290e-01 -3.73578310e-01] [ 2.50020832e-01 2.17302352e-01 3.18389297e-01 4.57440585e-01 2.73804903e-01 8.85080248e-02 -1.64030254e-01 1.68716162e-01 2.42485687e-01 3.04328293e-01 3.35884005e-01 1.28785208e-01 -1.03687488e-01 -1.87432989e-01 -1.69951141e-01] [-2.41501093e-01 -4.53911155e-01 -1.19669223e-02 3.57238412e-01 4.11190897e-01 3.79958034e-01 -1.70663908e-01 2.21475393e-01 7.20517188e-02 4.00586486e-01 1.47457570e-01 1.63143072e-02 -2.18281671e-01 -3.19922622e-03 7.19821230e-02] [-3.85780931e-02 1.73008636e-01 4.43303406e-01 5.03879964e-01 5.42622060e-02 2.32153490e-01 -2.05697775e-01 2.09935084e-01 1.55975401e-01 5.58734834e-01 2.64723480e-01 -1.49228990e-01 -3.66434723e-01 -1.31057203e-01 7.14408755e-02] [-1.18242994e-01 -2.89516989e-03 1.74543694e-01 2.19218180e-01 -3.19246389e-02 3.03442001e-01 1.91689074e-01 4.71694797e-01 2.29369268e-01 4.60734218e-01 1.19717412e-01 -2.53556252e-01 -6.26469254e-01 -4.69654679e-01 -2.06795737e-01]] [[ 1.14428312e-01 -3.77730206e-02 -3.05003524e-01 -4.96257454e-01 -8.12849700e-01 -9.82534945e-01 -2.87319541e-01 1.70582056e-01 2.49762908e-01 -4.25964296e-01 -2.38653123e-01 -1.53054610e-01 2.18027160e-02 -4.44264188e-02 -4.27151881e-02] [ 1.18418671e-02 -3.40821385e-01 -5.86172640e-01 -6.40556395e-01 -9.33061481e-01 -1.15611649e+00 -3.74347478e-01 4.56337720e-01 4.98007894e-01 -1.44872248e-01 -8.16990137e-02 -6.24878854e-02 1.63875610e-01 1.24129854e-01 2.23956645e-01] [ 3.82844657e-01 -8.93905014e-02 -5.22867560e-01 -5.09673417e-01 -3.37845147e-01 -3.58323991e-01 2.67369837e-01 6.32237673e-01 9.32389259e-01 2.50602245e-01 6.21241689e-01 3.58646840e-01 7.15579271e-01 4.20697570e-01 3.72115940e-01] [ 3.35759670e-01 -1.64453506e-01 -2.27172539e-01 -4.00197715e-01 9.84222889e-02 -9.06793475e-02 3.59058857e-01 5.46136916e-01 8.15511703e-01 2.51694530e-01 3.90264273e-01 3.64171356e-01 5.50133944e-01 6.84242249e-02 -4.33187746e-02] [ 5.89791000e-01 3.92272741e-01 2.14777365e-01 9.45630744e-02 5.54799855e-01 3.77951443e-01 5.13334334e-01 9.63446200e-02 4.99819547e-01 -2.00259149e-01 3.49437743e-01 2.75142968e-01 4.58078384e-01 -2.13875189e-01 -3.97904873e-01] [ 3.78926635e-01 3.33804578e-01 1.98979825e-01 2.60382354e-01 4.01872575e-01 1.68292701e-01 6.76111206e-02 -1.00864440e-01 2.35756323e-01 -1.88749328e-01 -1.29990712e-01 -1.38529047e-01 -2.43406937e-01 -4.54964280e-01 -5.83234608e-01] [ 3.18785757e-02 1.01033427e-01 2.49104965e-02 3.58906060e-01 2.43478984e-01 2.32575685e-01 -8.97722542e-02 -3.71077657e-01 -2.55817771e-01 -2.00756416e-01 1.28482163e-01 -9.20441747e-03 -4.57067907e-01 -4.90702629e-01 -5.29893041e-01] [ 4.46510725e-02 1.51857167e-01 3.90844233e-03 5.13318516e-02 -1.67619631e-01 -1.19731240e-01 -1.85618535e-01 -1.13598533e-01 -6.24378119e-03 3.43394689e-02 1.24502312e-02 4.87219021e-02 -4.71976757e-01 -4.49229866e-01 -5.79140007e-01] [-3.37550431e-01 -2.66547263e-01 -2.35198766e-01 -1.15909949e-01 -2.52566755e-01 -1.07366718e-01 -1.99559152e-01 1.80322193e-02 1.55535161e-01 1.15196787e-01 8.73542652e-02 -6.38770908e-02 -2.46045277e-01 -7.40228415e-01 -7.33859062e-01] [-1.73340291e-01 6.11964194e-03 -2.67608523e-01 -3.61754708e-02 -1.68374658e-01 -8.33706707e-02 1.68015346e-01 4.15332437e-01 5.34844279e-01 1.32567942e-01 -2.09101945e-01 -3.55269969e-01 -3.29813033e-01 -3.64796460e-01 -1.73426226e-01] [-2.33342782e-01 -2.68326223e-01 -5.47690153e-01 -1.91624373e-01 -1.75360680e-01 5.36666363e-02 2.26059809e-01 3.24082494e-01 9.49620530e-02 -6.66401116e-03 -3.46398383e-01 -5.39693117e-01 -5.79260886e-01 -2.66570866e-01 1.18877292e-01] [-1.61168072e-02 -9.26917791e-02 -3.37325305e-01 -2.97042821e-02 3.00038904e-01 3.84750962e-01 6.51524603e-01 4.89457726e-01 6.74809143e-02 -1.28219705e-02 -3.37949395e-01 -3.14809889e-01 -4.99749035e-01 -2.26959914e-01 1.19481392e-01] [ 2.17990484e-02 -3.44867527e-01 -7.36951113e-01 -4.87136841e-01 -1.58799425e-01 1.46780565e-01 3.68890017e-01 5.32314599e-01 6.14908000e-04 -3.26503634e-01 -4.01152343e-01 -2.84525633e-01 -3.22512120e-01 -3.62511665e-01 -1.29014075e-01] [-4.37479019e-02 -2.55059570e-01 -7.95678139e-01 -3.00618887e-01 -1.17927589e-01 1.16023712e-01 1.84902892e-01 4.99780655e-01 6.29898831e-02 -2.14129865e-01 -4.27352786e-01 8.75590928e-03 -9.55027714e-03 -2.04344854e-01 -2.72976100e-01] [-5.51626412e-03 -1.00757711e-01 -6.47745371e-01 -2.03106537e-01 -3.70735168e-01 -1.03440858e-01 -1.54748559e-01 2.46509761e-01 -1.16097540e-01 -2.45759547e-01 -3.61921996e-01 6.18879795e-02 7.53025189e-02 1.31043285e-01 -2.03119293e-02]]]]; ov_res: [[[[-1.27023011e-01 -1.42125696e-01 2.49811541e-02 -1.55741587e-01 -1.21235915e-01 -2.91000903e-01 -1.92427412e-01 -3.00504893e-01 -2.54322425e-03 7.72515833e-02 2.47517526e-02 -4.39647101e-02 -2.10497454e-01 -1.35964692e-01 -1.55505598e-01] [-1.83561072e-01 -3.66910875e-01 -3.26828063e-01 -3.65041405e-01 -2.72136688e-01 -2.85723329e-01 -3.57556999e-01 -4.07151878e-01 -1.62916794e-01 -2.06854213e-02 7.14555755e-02 -1.55060798e-01 -4.04331267e-01 -5.26887774e-01 -3.75264406e-01] [-1.70440778e-01 -5.22071540e-01 -2.82127798e-01 -3.89908142e-02 1.73946008e-01 -1.03645414e-01 -2.03042313e-01 1.14982529e-02 1.80921793e-01 2.04275951e-01 2.37250701e-01 5.58561422e-02 -2.95851648e-01 -6.65936232e-01 -5.25549233e-01] [-2.85376102e-01 -6.21836901e-01 -7.55762458e-01 -1.44121781e-01 6.95374906e-02 1.97021112e-01 -1.80595741e-01 1.54685318e-01 -8.85044634e-02 6.09408543e-02 6.76429421e-02 2.15943873e-01 -1.75269902e-01 -6.32995486e-01 -5.52553415e-01] [-5.19056201e-01 -6.77353203e-01 -6.47339404e-01 1.80870950e-01 3.28099877e-01 3.21136743e-01 -2.15642061e-02 2.59211600e-01 1.12282299e-02 8.10350999e-02 -1.88923389e-01 2.28129700e-01 -1.77982613e-01 -1.89325169e-01 -1.90861329e-01] [-6.34639919e-01 -5.78481376e-01 -7.72225142e-01 -1.22696824e-01 -1.91478670e-01 2.01206267e-01 -6.76242262e-02 1.14748798e-01 -1.20975509e-01 -1.32420659e-02 -2.85394639e-01 -1.29943475e-01 -3.33398044e-01 -3.50031525e-01 -2.61600167e-01] [-5.26439250e-01 -3.63476545e-01 -3.62544715e-01 -3.17858867e-02 -1.59984857e-01 1.11952489e-02 -1.05006300e-01 3.11005842e-02 3.93055417e-02 6.18258044e-02 -3.28141540e-01 -2.46300489e-01 -3.80967200e-01 -2.06277221e-01 -3.20606865e-02] [-4.64888930e-01 -3.25424701e-01 -3.34993422e-01 -9.56256092e-02 -2.71694660e-01 -2.07212523e-01 -3.05085540e-01 -3.71513702e-02 1.32833987e-01 2.23155469e-01 -1.27678722e-01 9.27752927e-02 4.57383655e-02 8.91060010e-02 -6.51417449e-02] [-3.00604194e-01 -3.30425918e-01 -4.91156317e-02 3.06114294e-02 6.37400150e-02 -5.00289984e-02 -4.69248220e-02 1.75525695e-01 3.91731650e-01 4.03354883e-01 2.43145362e-01 5.78114808e-01 6.68677807e-01 8.42992187e-01 4.53856885e-01] [-7.72783160e-03 -2.46184483e-01 4.22750041e-02 2.28045166e-01 1.69865400e-01 -1.55835092e-01 -4.54610676e-01 1.22933760e-01 1.39836371e-01 3.51231605e-01 1.24951564e-01 6.63490057e-01 7.17571199e-01 9.93208170e-01 4.67815727e-01] [ 5.90123951e-01 2.05077752e-01 8.11951980e-02 -3.00117210e-02 1.12626635e-01 3.35778557e-02 -3.97132456e-01 -1.65595293e-01 -1.75332889e-01 7.06733614e-02 6.63684607e-02 1.21498972e-01 3.94297719e-01 4.85975146e-01 3.42304826e-01] [ 4.77364421e-01 -3.81448679e-02 -1.73361912e-01 -8.11261013e-02 2.14915171e-01 -6.15808889e-02 -4.61524010e-01 -2.36364663e-01 -4.06826109e-01 -2.19501436e-01 -5.02504349e-01 -2.44843036e-01 -3.14224511e-01 -8.26644599e-02 -1.07145764e-01] [ 5.93708515e-01 1.56486034e-01 -2.13080585e-01 -3.59550834e-01 1.26216993e-01 6.19183369e-02 -2.64255792e-01 -4.10476893e-01 -5.01369238e-01 -1.50015146e-01 -1.53759003e-01 -1.95617050e-01 -3.08371127e-01 -3.22550029e-01 -1.56932592e-01] [ 1.49466142e-01 -1.47457138e-01 -1.23924389e-01 -2.89462328e-01 -1.31026208e-01 -1.94641620e-01 -2.15592667e-01 1.12620488e-01 -2.31597796e-01 6.01235218e-02 5.24711758e-02 8.86391401e-02 -4.05678719e-01 -2.98947215e-01 -9.91716087e-02] [ 1.77155793e-01 4.30113822e-02 -1.16713695e-01 -2.77288973e-01 -3.24666977e-01 -1.89988926e-01 -2.89947301e-01 -1.36700077e-02 -2.43712321e-01 7.06577003e-02 2.93928742e-01 1.67302787e-01 -1.21807896e-01 -2.30842218e-01 -6.40720781e-03]] [[-3.44997235e-02 -1.02017988e-02 2.77651787e-01 5.52417457e-01 5.96588075e-01 3.54658455e-01 2.67101169e-01 8.88078511e-02 7.28029460e-02 2.55602062e-01 2.06498042e-01 1.48876265e-01 -5.70440479e-02 1.16839066e-01 1.75710708e-01] [ 1.26705039e-02 -1.43656537e-01 2.41314784e-01 2.03676417e-01 3.30341011e-01 1.20339684e-01 1.13900304e-01 -4.14645076e-02 -2.08073437e-01 1.79932639e-02 -7.78698400e-02 -1.32006615e-01 -1.24937348e-01 1.55940339e-01 2.88499027e-01] [ 2.19048202e-01 -6.57563331e-03 3.66462201e-01 1.19854875e-01 1.16490006e-01 -3.83004308e-01 -6.60074532e-01 -6.15294635e-01 -5.66672504e-01 -4.20625210e-01 -5.75708807e-01 -6.97419882e-01 -5.18742740e-01 -1.76379129e-01 7.73527920e-02] [ 1.20325483e-01 -8.67235661e-03 3.92506093e-01 -1.13221236e-01 -1.95205063e-01 -7.59910166e-01 -7.03837574e-01 -5.92472553e-01 -3.36314678e-01 -4.02008444e-01 -6.79066956e-01 -1.03643274e+00 -5.91771901e-01 -8.72373208e-02 2.97843337e-01] [ 1.11379780e-01 2.36872137e-01 4.20437574e-01 1.43398955e-01 3.81190106e-02 -5.26010811e-01 -5.54073155e-01 -5.89349866e-01 -1.39707029e-01 8.78548175e-02 -1.44321844e-01 -6.62929833e-01 -6.30515635e-01 -1.96974501e-01 2.03174278e-01] [ 7.95459375e-02 1.71144456e-01 8.94716829e-02 -3.10570627e-01 -3.78692091e-01 -4.60699618e-01 -1.37965068e-01 -2.70278245e-01 5.98602146e-02 2.53089145e-02 4.51109596e-02 -5.34200013e-01 -4.15541470e-01 -1.19264394e-01 2.98238307e-01] [-3.25868116e-03 1.25853002e-01 -3.21528204e-02 -1.48316383e-01 -1.33525312e-01 -1.77204341e-01 3.34527344e-02 -2.15412885e-01 1.15885325e-01 2.62591541e-02 4.60859925e-01 1.08645469e-01 3.09696257e-01 1.58069268e-01 3.66846025e-01] [-1.59802437e-02 1.60131395e-01 -2.19654646e-02 -7.24605173e-02 -1.08095199e-01 -9.84658375e-02 8.70082900e-02 -1.87006727e-01 3.03659290e-01 -7.53116794e-03 4.63876307e-01 -1.66286632e-01 2.09509775e-01 1.25707328e-01 4.58135724e-01] [-4.77638990e-02 1.36679068e-01 -7.07570612e-02 -1.07839838e-01 6.01343587e-02 2.64723569e-01 5.56257069e-01 2.17182636e-01 4.45839405e-01 3.41669679e-01 4.69748825e-01 -4.33090553e-02 1.38429254e-01 1.72243997e-01 4.17013049e-01] [ 3.97274554e-01 5.89153051e-01 1.73390761e-01 -6.23878762e-02 -2.53626883e-01 5.29950187e-02 3.58574271e-01 3.01022083e-01 3.93376112e-01 3.68434787e-01 3.43546122e-01 -2.89132267e-01 -4.74999845e-01 -3.77907485e-01 -9.00329128e-02] [ 2.44820908e-01 7.99681842e-02 -8.29889923e-02 -4.87093776e-02 7.97046050e-02 2.75292993e-01 5.88510513e-01 7.12429285e-01 4.26911592e-01 2.45326698e-01 8.46587121e-02 -1.00142255e-01 -4.37603265e-01 -4.91242290e-01 -3.73578310e-01] [ 2.50020832e-01 2.17302352e-01 3.18389297e-01 4.57440585e-01 2.73804903e-01 8.85080248e-02 -1.64030254e-01 1.68716162e-01 2.42485687e-01 3.04328293e-01 3.35884005e-01 1.28785208e-01 -1.03687488e-01 -1.87432989e-01 -1.69951141e-01] [-2.41501093e-01 -4.53911155e-01 -1.19669223e-02 3.57238412e-01 4.11190897e-01 3.79958034e-01 -1.70663908e-01 2.21475393e-01 7.20517188e-02 4.00586486e-01 1.47457570e-01 1.63143072e-02 -2.18281671e-01 -3.19922622e-03 7.19821230e-02] [-3.85780931e-02 1.73008636e-01 4.43303406e-01 5.03879964e-01 5.42622060e-02 2.32153490e-01 -2.05697775e-01 2.09935084e-01 1.55975401e-01 5.58734834e-01 2.64723480e-01 -1.49228990e-01 -3.66434723e-01 -1.31057203e-01 7.14408755e-02] [-1.18242994e-01 -2.89516989e-03 1.74543694e-01 2.19218180e-01 -3.19246389e-02 3.03442001e-01 1.91689074e-01 4.71694797e-01 2.29369268e-01 4.60734218e-01 1.19717412e-01 -2.53556252e-01 -6.26469254e-01 -4.69654679e-01 -2.06795737e-01]] [[ 1.14428312e-01 -3.77730206e-02 -3.05003524e-01 -4.96257454e-01 -8.12849700e-01 -9.82534945e-01 -2.87319541e-01 1.70582056e-01 2.49762908e-01 -4.25964296e-01 -2.38653123e-01 -1.53054610e-01 2.18027160e-02 -4.44264188e-02 -4.27151881e-02] [ 1.18418671e-02 -3.40821385e-01 -5.86172640e-01 -6.40556395e-01 -9.33061481e-01 -1.15611649e+00 -3.74347478e-01 4.56337720e-01 4.98007894e-01 -1.44872248e-01 -8.16990137e-02 -6.24878854e-02 1.63875610e-01 1.24129854e-01 2.23956645e-01] [ 3.82844657e-01 -8.93905014e-02 -5.22867560e-01 -5.09673417e-01 -3.37845147e-01 -3.58323991e-01 2.67369837e-01 6.32237673e-01 9.32389259e-01 2.50602245e-01 6.21241689e-01 3.58646840e-01 7.15579271e-01 4.20697570e-01 3.72115940e-01] [ 3.35759670e-01 -1.64453506e-01 -2.27172539e-01 -4.00197715e-01 9.84222889e-02 -9.06793475e-02 3.59058857e-01 5.46136916e-01 8.15511703e-01 2.51694530e-01 3.90264273e-01 3.64171356e-01 5.50133944e-01 6.84242249e-02 -4.33187746e-02] [ 5.89791000e-01 3.92272741e-01 2.14777365e-01 9.45630744e-02 5.54799855e-01 3.77951443e-01 5.13334334e-01 9.63446200e-02 4.99819547e-01 -2.00259149e-01 3.49437743e-01 2.75142968e-01 4.58078384e-01 -2.13875189e-01 -3.97904873e-01] [ 3.78926635e-01 3.33804578e-01 1.98979825e-01 2.60382354e-01 4.01872575e-01 1.68292701e-01 6.76111206e-02 -1.00864440e-01 2.35756323e-01 -1.88749328e-01 -1.29990712e-01 -1.38529047e-01 -2.43406937e-01 -4.54964280e-01 -5.83234608e-01] [ 3.18785757e-02 1.01033427e-01 2.49104965e-02 3.58906060e-01 2.43478984e-01 2.32575685e-01 -8.97722542e-02 -3.71077657e-01 -2.55817771e-01 -2.00756416e-01 1.28482163e-01 -9.20441747e-03 -4.57067907e-01 -4.90702629e-01 -5.29893041e-01] [ 4.46510725e-02 1.51857167e-01 3.90844233e-03 5.13318516e-02 -1.67619631e-01 -1.19731240e-01 -1.85618535e-01 -1.13598533e-01 -6.24378119e-03 3.43394689e-02 1.24502312e-02 4.87219021e-02 -4.71976757e-01 -4.49229866e-01 -5.79140007e-01] [-3.37550431e-01 -2.66547263e-01 -2.35198766e-01 -1.15909949e-01 -2.52566755e-01 -1.07366718e-01 -1.99559152e-01 1.80322193e-02 1.55535161e-01 1.15196787e-01 8.73542652e-02 -6.38770908e-02 -2.46045277e-01 -7.40228415e-01 -7.33859062e-01] [-1.73340291e-01 6.11964194e-03 -2.67608523e-01 -3.61754708e-02 -1.68374658e-01 -8.33706707e-02 1.68015346e-01 4.15332437e-01 5.34844279e-01 1.32567942e-01 -2.09101945e-01 -3.55269969e-01 -3.29813033e-01 -3.64796460e-01 -1.73426226e-01] [-2.33342782e-01 -2.68326223e-01 -5.47690153e-01 -1.91624373e-01 -1.75360680e-01 5.36666363e-02 2.26059809e-01 3.24082494e-01 9.49620530e-02 -6.66401116e-03 -3.46398383e-01 -5.39693117e-01 -5.79260886e-01 -2.66570866e-01 1.18877292e-01] [-1.61168072e-02 -9.26917791e-02 -3.37325305e-01 -2.97042821e-02 3.00038904e-01 3.84750962e-01 6.51524603e-01 4.89457726e-01 6.74809143e-02 -1.28219705e-02 -3.37949395e-01 -3.14809889e-01 -4.99749035e-01 -2.26959914e-01 1.19481392e-01] [ 2.17990484e-02 -3.44867527e-01 -7.36951113e-01 -4.87136841e-01 -1.58799425e-01 1.46780565e-01 3.68890017e-01 5.32314599e-01 6.14908000e-04 -3.26503634e-01 -4.01152343e-01 -2.84525633e-01 -3.22512120e-01 -3.62511665e-01 -1.29014075e-01] [-4.37479019e-02 -2.55059570e-01 -7.95678139e-01 -3.00618887e-01 -1.17927589e-01 1.16023712e-01 1.84902892e-01 4.99780655e-01 6.29898831e-02 -2.14129865e-01 -4.27352786e-01 8.75590928e-03 -9.55027714e-03 -2.04344854e-01 -2.72976100e-01] [-5.51626412e-03 -1.00757711e-01 -6.47745371e-01 -2.03106537e-01 -3.70735168e-01 -1.03440858e-01 -1.54748559e-01 2.46509761e-01 -1.16097540e-01 -2.45759547e-01 -3.61921996e-01 6.18879795e-02 7.53025189e-02 1.31043285e-01 -2.03119293e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5640.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 1]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[ 0.1318339 0.05328061 -0.1006558 -0.3637917 -0.4007425 -0.27008054 -0.09272828 0.15257846 0.51868796 0.7232417 0.42432404 -0.03492179 -0.37377965 -0.27592772 -0.14684683] [-0.05546411 -0.24508034 -0.09590436 -0.21948041 -0.2861794 -0.2911967 -0.11516818 0.10725575 0.36377728 0.3484499 0.29549074 -0.05712178 -0.4244395 -0.19663128 -0.10223944] [-0.34529215 -0.47074795 -0.21943074 -0.35753196 -0.3065012 -0.0678859 0.30397666 0.1551297 0.04937398 -0.24627835 -0.3987635 -0.5225668 -0.5954198 -0.03519123 0.04418346] [-0.1781346 -0.4088455 -0.2589759 -0.5827667 -0.2334528 0.03163526 0.5565712 0.18983538 -0.01690993 -0.3935282 -0.5342064 -0.29873288 -0.3177786 0.29884207 0.11202386] [-0.38639018 -0.13838053 0.19145557 -0.21910077 -0.2790811 -0.05013834 0.81987673 0.1971311 -0.00581212 -0.55672944 -0.6163194 -0.4498398 -0.2771844 0.6659968 0.47956297] [-0.32291374 -0.09900787 0.31094512 -0.08423501 -0.01686718 0.3006544 0.98590004 0.29717955 -0.10471811 -0.25669894 -0.341993 -0.29520735 -0.45778704 0.348336 0.24791712] [-0.00953982 0.19132726 0.08016547 -0.18318243 -0.3241247 0.16260175 0.57196045 0.21177332 -0.41625786 -0.36940712 -0.52876246 -0.2924403 -0.7940746 -0.26226687 -0.28595525] [ 0.02742448 -0.09940721 -0.25347978 -0.5990985 -0.48549685 0.10097804 0.28503785 0.37530747 -0.40891722 -0.4027155 -0.706131 -0.21714902 -0.47947678 -0.2738191 -0.32722342] [ 0.08804549 -0.03676718 -0.59743017 -0.48725706 -0.3302463 0.12918442 0.03442058 0.12839715 -0.18714494 -0.31526205 -0.52419305 -0.2594279 -0.08066836 -0.14973964 -0.22267863] [-0.32896268 -0.21952292 -0.14090773 0.1045196 0.05416826 0.26532057 0.38619652 0.3456542 0.2918487 -0.03588843 -0.07384035 -0.0716953 0.5488512 0.35526985 0.17517248] [-0.29478982 -0.11268537 -0.19446266 0.2645329 0.22194116 0.33179745 0.20836264 0.01972631 0.12267942 0.19271567 0.0604305 0.00768526 0.40227312 0.33153442 0.088241 ] [-0.28677756 0.01879887 0.28807834 0.6960801 0.57671857 0.4253306 0.20892413 0.06736049 0.22847578 0.25974286 0.04694445 0.11346664 0.36932346 0.20110536 -0.08795913] [-0.17715764 0.08800735 0.22224225 0.41259775 0.34781894 0.24865277 -0.07186271 -0.19384736 -0.00913376 0.05897975 -0.16142543 -0.01320112 -0.08029163 -0.01397875 -0.26416543]] [[-0.51049006 -0.41708693 -0.13247071 0.11168163 0.2993821 0.2766803 -0.2670206 -0.5562886 -0.40507266 -0.06447072 -0.11524516 -0.24302475 0.42592943 0.84259367 0.811524 ] [ 0.02192266 0.21833366 -0.0696311 -0.1158897 -0.07660844 0.14253688 -0.21597439 -0.436018 -0.5314653 -0.00317196 -0.22652046 -0.29424506 -0.2426131 0.3547795 0.42711353] [-0.22178677 -0.11802498 -0.39085338 -0.20166665 -0.33503968 -0.12089863 -0.45233 -0.30990154 -0.28094488 0.30331486 0.16002753 -0.03204819 -0.36196527 -0.30030438 -0.10882732] [-0.02648665 0.02443888 -0.43871817 -0.01091592 -0.10392486 0.11930057 -0.04887301 0.23279534 0.24314833 0.3203915 0.09482468 0.03888604 -0.30612 -0.44024903 -0.37354884] [-0.14117424 0.00999277 -0.38985258 0.12341854 -0.16556141 -0.07690436 -0.13329516 0.04125109 0.41011688 0.38049924 0.19480759 -0.04738623 -0.30227116 -0.3099684 -0.27206123] [-0.09606932 0.29894105 0.06139652 0.30941123 -0.18336977 0.0118461 0.00970186 0.18840045 0.4042823 0.44866064 -0.03833525 -0.20528378 -0.524358 -0.2455634 -0.30301383] [-0.18317157 0.01231391 -0.00659464 0.1203064 -0.3849646 -0.07712504 -0.25242686 0.01475949 0.394558 0.48765102 0.01402484 -0.5989554 -0.8314241 -0.1400655 0.03810111] [-0.09894948 -0.14350885 -0.20328656 0.08702789 -0.07179746 0.3148529 -0.04725552 0.34773624 0.6801249 0.63390243 0.15473573 -0.33430597 -0.11756007 0.2907858 0.279014 ] [ 0.26767698 0.06415287 -0.03225536 0.2614442 0.37661642 0.66319877 0.17976004 0.45675814 0.6952419 0.6053963 0.6253455 0.35287932 0.5390944 0.4870386 0.33174142] [ 0.51893586 0.3598577 -0.05501792 0.03391255 0.37168857 0.6380864 0.31789684 0.41266388 0.47455454 0.23366468 0.12479753 0.4237651 0.6511078 0.6389099 0.17172529] [ 0.5399032 0.6331449 0.16051728 0.00673113 0.29709092 0.47029126 0.27994618 -0.05052434 0.14457464 0.08857708 0.26121947 0.62518084 0.6139636 0.6234454 0.11047843] [ 0.28571114 0.38989335 0.16632268 -0.19451469 -0.27959684 -0.07224187 -0.1017223 -0.20046969 -0.14176716 -0.32454786 -0.2041251 0.01093905 0.12163501 0.2285716 -0.0399209 ] [ 0.09653708 0.13207613 0.14207979 0.04050206 -0.07979335 0.03930822 -0.03801934 -0.14440976 -0.18725042 -0.2357801 0.04466981 -0.02563886 0.13587515 0.12445859 0.10407741]] [[-0.19501954 -0.3459488 -0.20789388 -0.23921289 -0.02893639 -0.31444037 -0.42373273 -0.5083489 -0.34588087 -0.00540452 -0.14329974 -0.09513892 -0.10520656 -0.21344456 -0.23197079] [ 0.04275423 -0.0276261 -0.19074665 -0.2168876 0.2009911 0.0959457 0.05857314 -0.45097712 -0.06043808 0.05828387 0.01055352 -0.23772417 -0.27181274 -0.09417653 -0.03858537] [-0.01626821 -0.03611416 0.08875424 0.12166868 0.4660157 0.09530538 0.0526838 -0.23578337 -0.1221894 0.0430603 -0.05344038 0.02347567 0.05285938 0.22967643 0.15897927] [-0.1557465 -0.20107853 0.01182644 -0.1191773 0.11993631 -0.0127612 0.08058506 -0.25451735 -0.23693755 -0.10800105 -0.04850533 0.004617 -0.09968184 0.0457446 0.00471608] [-0.27898276 -0.30430076 0.15193227 0.42214975 0.40345472 0.24111445 0.29339975 0.26419994 -0.25209865 -0.14292194 -0.03180097 0.16891056 -0.2038749 -0.20299649 -0.23824641] [-0.26819712 -0.33262926 -0.10167353 0.11520754 0.1378816 0.16981928 0.3177147 0.15829319 -0.2342795 -0.06316862 0.27514815 0.3301374 -0.12721997 -0.19478111 -0.20747101] [ 0.05132944 -0.07710583 -0.21462539 0.18745147 0.34248015 0.51268685 0.32323933 0.20095736 -0.13140988 -0.14930707 0.15647869 0.0302848 0.13213995 -0.11606863 -0.11370022] [ 0.37185746 0.29868332 0.0916283 0.05756373 0.1152204 0.43675053 0.19103773 0.23722093 -0.14800937 -0.26605415 -0.21701103 -0.13564819 0.23797396 -0.21612303 -0.28194827] [ 0.43588242 0.40084273 0.44075754 0.31903815 0.12842867 0.29445162 -0.10109672 0.0681844 -0.07861253 0.03684466 0.34983575 0.25135392 0.3419639 -0.19189537 -0.2069686 ] [ 0.19163895 0.30217814 0.59869796 0.423485 0.2214866 -0.05900595 -0.26818404 -0.05669256 0.13228914 0.45410988 0.4454602 0.38937446 0.23607863 -0.05211661 -0.0923453 ] [ 0.09078048 0.32140738 0.7671912 0.47501394 0.20376486 -0.22199957 -0.27526808 -0.16349933 0.06004806 0.3634669 0.49074936 0.43710035 0.06731934 -0.05273284 -0.03690312] [ 0.06044002 0.2787348 0.5753957 -0.03504417 -0.30163604 -0.6106805 -0.52602077 -0.3321562 -0.04420156 0.19029732 -0.21978293 -0.36558336 -0.5012118 -0.44381672 -0.33326736] [ 0.40092543 0.5269893 0.5626536 -0.19131303 -0.48088613 -0.52982783 -0.2475881 -0.20688067 -0.17218637 -0.24085996 -0.5481761 -0.6996839 -0.7148268 -0.39111194 -0.10788742]]]]; ov_res: [[[[ 0.1318339 0.05328061 -0.1006558 -0.3637917 -0.4007425 -0.27008054 -0.09272828 0.15257846 0.51868796 0.7232417 0.42432404 -0.03492179 -0.37377965 -0.27592772 -0.14684683] [-0.05546411 -0.24508034 -0.09590436 -0.21948041 -0.2861794 -0.2911967 -0.11516818 0.10725575 0.36377728 0.3484499 0.29549074 -0.05712178 -0.4244395 -0.19663128 -0.10223944] [-0.34529215 -0.47074795 -0.21943074 -0.35753196 -0.3065012 -0.0678859 0.30397666 0.1551297 0.04937398 -0.24627835 -0.3987635 -0.5225668 -0.5954198 -0.03519123 0.04418346] [-0.1781346 -0.4088455 -0.2589759 -0.5827667 -0.2334528 0.03163526 0.5565712 0.18983538 -0.01690993 -0.3935282 -0.5342064 -0.29873288 -0.3177786 0.29884207 0.11202386] [-0.38639018 -0.13838053 0.19145557 -0.21910077 -0.2790811 -0.05013834 0.81987673 0.1971311 -0.00581212 -0.55672944 -0.6163194 -0.4498398 -0.2771844 0.6659968 0.47956297] [-0.32291374 -0.09900787 0.31094512 -0.08423501 -0.01686718 0.3006544 0.98590004 0.29717955 -0.10471811 -0.25669894 -0.341993 -0.29520735 -0.45778704 0.348336 0.24791712] [-0.00953982 0.19132726 0.08016547 -0.18318243 -0.3241247 0.16260175 0.57196045 0.21177332 -0.41625786 -0.36940712 -0.52876246 -0.2924403 -0.7940746 -0.26226687 -0.28595525] [ 0.02742448 -0.09940721 -0.25347978 -0.5990985 -0.48549685 0.10097804 0.28503785 0.37530747 -0.40891722 -0.4027155 -0.706131 -0.21714902 -0.47947678 -0.2738191 -0.32722342] [ 0.08804549 -0.03676718 -0.59743017 -0.48725706 -0.3302463 0.12918442 0.03442058 0.12839715 -0.18714494 -0.31526205 -0.52419305 -0.2594279 -0.08066836 -0.14973964 -0.22267863] [-0.32896268 -0.21952292 -0.14090773 0.1045196 0.05416826 0.26532057 0.38619652 0.3456542 0.2918487 -0.03588843 -0.07384035 -0.0716953 0.5488512 0.35526985 0.17517248] [-0.29478982 -0.11268537 -0.19446266 0.2645329 0.22194116 0.33179745 0.20836264 0.01972631 0.12267942 0.19271567 0.0604305 0.00768526 0.40227312 0.33153442 0.088241 ] [-0.28677756 0.01879887 0.28807834 0.6960801 0.57671857 0.4253306 0.20892413 0.06736049 0.22847578 0.25974286 0.04694445 0.11346664 0.36932346 0.20110536 -0.08795913] [-0.17715764 0.08800735 0.22224225 0.41259775 0.34781894 0.24865277 -0.07186271 -0.19384736 -0.00913376 0.05897975 -0.16142543 -0.01320112 -0.08029163 -0.01397875 -0.26416543]] [[-0.51049006 -0.41708693 -0.13247071 0.11168163 0.2993821 0.2766803 -0.2670206 -0.5562886 -0.40507266 -0.06447072 -0.11524516 -0.24302475 0.42592943 0.84259367 0.811524 ] [ 0.02192266 0.21833366 -0.0696311 -0.1158897 -0.07660844 0.14253688 -0.21597439 -0.436018 -0.5314653 -0.00317196 -0.22652046 -0.29424506 -0.2426131 0.3547795 0.42711353] [-0.22178677 -0.11802498 -0.39085338 -0.20166665 -0.33503968 -0.12089863 -0.45233 -0.30990154 -0.28094488 0.30331486 0.16002753 -0.03204819 -0.36196527 -0.30030438 -0.10882732] [-0.02648665 0.02443888 -0.43871817 -0.01091592 -0.10392486 0.11930057 -0.04887301 0.23279534 0.24314833 0.3203915 0.09482468 0.03888604 -0.30612 -0.44024903 -0.37354884] [-0.14117424 0.00999277 -0.38985258 0.12341854 -0.16556141 -0.07690436 -0.13329516 0.04125109 0.41011688 0.38049924 0.19480759 -0.04738623 -0.30227116 -0.3099684 -0.27206123] [-0.09606932 0.29894105 0.06139652 0.30941123 -0.18336977 0.0118461 0.00970186 0.18840045 0.4042823 0.44866064 -0.03833525 -0.20528378 -0.524358 -0.2455634 -0.30301383] [-0.18317157 0.01231391 -0.00659464 0.1203064 -0.3849646 -0.07712504 -0.25242686 0.01475949 0.394558 0.48765102 0.01402484 -0.5989554 -0.8314241 -0.1400655 0.03810111] [-0.09894948 -0.14350885 -0.20328656 0.08702789 -0.07179746 0.3148529 -0.04725552 0.34773624 0.6801249 0.63390243 0.15473573 -0.33430597 -0.11756007 0.2907858 0.279014 ] [ 0.26767698 0.06415287 -0.03225536 0.2614442 0.37661642 0.66319877 0.17976004 0.45675814 0.6952419 0.6053963 0.6253455 0.35287932 0.5390944 0.4870386 0.33174142] [ 0.51893586 0.3598577 -0.05501792 0.03391255 0.37168857 0.6380864 0.31789684 0.41266388 0.47455454 0.23366468 0.12479753 0.4237651 0.6511078 0.6389099 0.17172529] [ 0.5399032 0.6331449 0.16051728 0.00673113 0.29709092 0.47029126 0.27994618 -0.05052434 0.14457464 0.08857708 0.26121947 0.62518084 0.6139636 0.6234454 0.11047843] [ 0.28571114 0.38989335 0.16632268 -0.19451469 -0.27959684 -0.07224187 -0.1017223 -0.20046969 -0.14176716 -0.32454786 -0.2041251 0.01093905 0.12163501 0.2285716 -0.0399209 ] [ 0.09653708 0.13207613 0.14207979 0.04050206 -0.07979335 0.03930822 -0.03801934 -0.14440976 -0.18725042 -0.2357801 0.04466981 -0.02563886 0.13587515 0.12445859 0.10407741]] [[-0.19501954 -0.3459488 -0.20789388 -0.23921289 -0.02893639 -0.31444037 -0.42373273 -0.5083489 -0.34588087 -0.00540452 -0.14329974 -0.09513892 -0.10520656 -0.21344456 -0.23197079] [ 0.04275423 -0.0276261 -0.19074665 -0.2168876 0.2009911 0.0959457 0.05857314 -0.45097712 -0.06043808 0.05828387 0.01055352 -0.23772417 -0.27181274 -0.09417653 -0.03858537] [-0.01626821 -0.03611416 0.08875424 0.12166868 0.4660157 0.09530538 0.0526838 -0.23578337 -0.1221894 0.0430603 -0.05344038 0.02347567 0.05285938 0.22967643 0.15897927] [-0.1557465 -0.20107853 0.01182644 -0.1191773 0.11993631 -0.0127612 0.08058506 -0.25451735 -0.23693755 -0.10800105 -0.04850533 0.004617 -0.09968184 0.0457446 0.00471608] [-0.27898276 -0.30430076 0.15193227 0.42214975 0.40345472 0.24111445 0.29339975 0.26419994 -0.25209865 -0.14292194 -0.03180097 0.16891056 -0.2038749 -0.20299649 -0.23824641] [-0.26819712 -0.33262926 -0.10167353 0.11520754 0.1378816 0.16981928 0.3177147 0.15829319 -0.2342795 -0.06316862 0.27514815 0.3301374 -0.12721997 -0.19478111 -0.20747101] [ 0.05132944 -0.07710583 -0.21462539 0.18745147 0.34248015 0.51268685 0.32323933 0.20095736 -0.13140988 -0.14930707 0.15647869 0.0302848 0.13213995 -0.11606863 -0.11370022] [ 0.37185746 0.29868332 0.0916283 0.05756373 0.1152204 0.43675053 0.19103773 0.23722093 -0.14800937 -0.26605415 -0.21701103 -0.13564819 0.23797396 -0.21612303 -0.28194827] [ 0.43588242 0.40084273 0.44075754 0.31903815 0.12842867 0.29445162 -0.10109672 0.0681844 -0.07861253 0.03684466 0.34983575 0.25135392 0.3419639 -0.19189537 -0.2069686 ] [ 0.19163895 0.30217814 0.59869796 0.423485 0.2214866 -0.05900595 -0.26818404 -0.05669256 0.13228914 0.45410988 0.4454602 0.38937446 0.23607863 -0.05211661 -0.0923453 ] [ 0.09078048 0.32140738 0.7671912 0.47501394 0.20376486 -0.22199957 -0.27526808 -0.16349933 0.06004806 0.3634669 0.49074936 0.43710035 0.06731934 -0.05273284 -0.03690312] [ 0.06044002 0.2787348 0.5753957 -0.03504417 -0.30163604 -0.6106805 -0.52602077 -0.3321562 -0.04420156 0.19029732 -0.21978293 -0.36558336 -0.5012118 -0.44381672 -0.33326736] [ 0.40092543 0.5269893 0.5626536 -0.19131303 -0.48088613 -0.52982783 -0.2475881 -0.20688067 -0.17218637 -0.24085996 -0.5481761 -0.6996839 -0.7148268 -0.39111194 -0.10788742]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5643.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) nstance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: fw_re: [[[[ 4.56552297e-01 2.98359036e-01 1.14757471e-01 3.50324780e-01 1.97153330e-01 1.09432861e-01 -1.98773876e-01 -6.18590638e-02 1.02749757e-01 5.86928278e-02 -9.10640433e-02 -5.69037557e-01 -5.23011565e-01] [ 2.93223739e-01 3.13099474e-01 1.80207297e-01 2.14826509e-01 2.14160994e-01 6.07473701e-02 -3.47243412e-03 -2.72007585e-02 2.94117033e-01 1.27857387e-01 1.12766236e-01 -4.34324294e-01 -5.84592283e-01] [ 1.19839959e-01 2.46152237e-01 5.04628271e-02 -1.36740416e-01 -5.27385622e-02 1.45383671e-01 -1.25462310e-02 -1.86287582e-01 -1.06092401e-01 9.44891125e-02 2.51638740e-01 1.16949715e-01 -6.72788993e-02] [-1.25692546e-01 8.65742937e-02 1.34253010e-01 -2.96812624e-01 -1.65377140e-01 -9.22691822e-03 3.55093002e-01 3.63462090e-01 3.32922399e-01 2.62549996e-01 3.39920253e-01 4.43188399e-01 2.81511903e-01] [-3.00130785e-01 -6.56128749e-02 5.80661632e-02 -2.36272678e-01 -5.02500236e-01 -3.52820039e-01 5.09449765e-02 4.36811864e-01 3.75351697e-01 3.19374293e-01 2.10082456e-01 3.51757050e-01 6.44388735e-01] [-6.19100153e-01 -3.37259173e-01 -1.32732466e-01 -5.30399866e-02 -2.12380111e-01 -2.23444059e-01 -8.04600567e-02 2.50186771e-01 9.20133367e-02 -5.94071532e-03 -2.30852291e-01 -2.14512810e-01 4.40001160e-01] [-7.48917758e-01 -4.02559847e-01 -3.71215582e-01 -3.22663933e-02 -1.02223501e-01 -1.58393443e-01 -2.82657474e-01 -1.07061595e-01 2.09183082e-01 3.07770580e-01 3.91587853e-01 1.38598561e-01 5.41336775e-01] [-1.05488390e-01 2.78867990e-01 -7.58044347e-02 2.15604603e-01 1.40209615e-01 2.94144154e-01 -1.17635161e-01 -1.23449244e-01 1.51991338e-01 1.86549917e-01 3.78406316e-01 1.52260596e-02 2.68756926e-01] [ 2.04967424e-01 6.55711412e-01 -2.33972743e-02 2.13416651e-01 6.17198162e-02 1.44987881e-01 -2.48628333e-02 -1.17737003e-01 3.78364325e-01 1.55821696e-01 5.22356689e-01 3.40707719e-01 2.16621459e-01] [-6.02089018e-02 3.56821746e-01 -2.18990266e-01 -9.93929580e-02 -1.87348425e-01 -6.02923371e-02 -2.13390812e-01 -4.42750186e-01 -1.90115273e-01 -2.80560344e-01 -1.34197921e-01 -1.39525998e-02 1.72849894e-01] [-1.29023492e-01 -6.69299662e-02 -2.49628246e-01 -3.27366889e-02 1.89434692e-01 -6.07531331e-02 -2.06609845e-01 -7.17136502e-01 -4.91616368e-01 -4.00060743e-01 -9.09799039e-02 2.30026647e-01 3.55878294e-01] [-5.05713701e-01 -2.52611965e-01 -1.29513010e-01 9.74324644e-02 2.08222955e-01 -3.69727798e-02 -2.96439528e-01 -5.73200464e-01 -2.26486787e-01 -3.41719985e-02 -2.93866862e-02 1.56338006e-01 1.43160209e-01] [-8.22816603e-03 8.37364718e-02 3.97080153e-01 4.73110616e-01 3.85822892e-01 1.66264549e-02 -1.98661257e-02 4.39338163e-02 1.21771656e-01 7.43701011e-02 -7.77295679e-02 2.22452804e-01 8.80006477e-02] [-2.49639571e-01 -2.96921194e-01 -1.02556884e-01 -3.58412452e-02 8.55215043e-02 -4.88449037e-02 -3.85546722e-02 3.19841862e-01 4.65938449e-01 2.66288370e-01 -1.87532321e-01 -2.05435619e-01 -4.33393627e-01] [ 1.10623054e-01 -2.17682794e-01 -8.40425268e-02 -2.13844135e-01 -4.81028482e-02 -5.98466545e-02 1.08941898e-01 3.79909992e-01 2.45465860e-01 -8.74883612e-04 -2.76456773e-01 -2.63488680e-01 -2.43068591e-01]] [[ 2.12012500e-01 3.18744868e-01 4.49315399e-01 -7.80312940e-02 -7.68515691e-02 -3.39636803e-01 1.01569660e-01 -1.45632133e-01 3.21876854e-01 3.45283359e-01 5.08579949e-04 1.42338589e-01 -7.34512657e-02] [ 7.48638809e-01 7.55822420e-01 7.15687513e-01 -1.84516385e-02 4.15849015e-02 7.69406855e-02 4.13644522e-01 7.28902146e-02 6.11028910e-01 4.27919269e-01 4.47442569e-02 -9.34595764e-02 -1.32503569e-01] [ 7.23382771e-01 6.58910811e-01 6.75281048e-01 2.94189658e-02 4.97216173e-03 1.37231722e-01 2.84276724e-01 1.36073425e-01 5.39975286e-01 2.17245817e-01 -2.55559348e-02 -1.84323132e-01 5.33372425e-02] [ 2.23403454e-01 1.60484508e-01 1.75998479e-01 -9.00562182e-02 7.29360199e-03 3.71372938e-01 2.79871136e-01 3.36847454e-01 3.14765364e-01 -4.86096665e-02 -3.08989167e-01 -4.90406990e-01 -3.55477519e-02] [-6.84635818e-01 -8.54252100e-01 -5.82208693e-01 -5.44513166e-01 -1.46819770e-01 -4.32475582e-02 -3.12218405e-02 5.82189299e-02 -1.09362401e-01 8.86640325e-03 -2.25559950e-01 -4.57211845e-02 3.36079687e-01] [-8.51879776e-01 -1.05093062e+00 -7.25950360e-01 -7.08854616e-01 -3.39722455e-01 -1.96871087e-01 4.71478142e-02 5.24578094e-02 -1.95804343e-01 -1.06035747e-01 -2.32423171e-01 4.16449495e-02 4.58678126e-01] [-4.15240020e-01 -8.21932554e-01 -7.39483356e-01 -8.41998756e-01 -4.03477073e-01 -2.31462240e-01 1.33337289e-01 1.76016837e-01 1.50310457e-01 2.69973606e-01 -8.39236826e-02 2.13718534e-01 6.76008940e-01] [-2.57359892e-02 -9.79560241e-02 1.20912381e-01 -1.17705345e-01 -3.32131684e-01 -2.64723361e-01 9.20101106e-02 2.91815877e-01 3.68911564e-01 3.11071724e-01 -1.17741756e-01 1.18350692e-01 2.24439383e-01] [ 8.96505937e-02 9.58970785e-02 4.08044785e-01 8.20219666e-02 -1.04112580e-01 -2.74395198e-01 -1.98387221e-01 -6.23356774e-02 2.09989384e-01 2.10180193e-01 7.41509497e-02 1.88219577e-01 2.62210518e-01] [ 9.09166709e-02 2.28910863e-01 4.47736412e-01 1.02674156e-01 -2.37659141e-01 -1.72015324e-01 -1.80342048e-01 -2.16667235e-01 7.33575150e-02 1.65592134e-01 4.05955583e-01 2.97880828e-01 1.88075200e-01] [ 2.70789325e-01 3.13637406e-01 3.13682377e-01 -1.58944473e-01 -5.36629140e-01 -2.52683371e-01 -1.43170938e-01 -1.61548302e-01 -5.65584749e-02 -1.30261198e-01 1.69459462e-01 3.99178453e-02 2.71025509e-01] [ 5.31534910e-01 6.13765359e-01 5.85147858e-01 -1.88260466e-01 -4.11767781e-01 -1.20005637e-01 2.45339841e-01 1.74624085e-01 -1.69098377e-03 -2.79879332e-01 -2.55685359e-01 -3.52343619e-01 -4.63908687e-02] [ 4.26063687e-01 3.03761333e-01 3.59906137e-01 -9.27189663e-02 -2.70630062e-01 -3.03628236e-01 -7.31277764e-02 -1.13432661e-01 -3.04964900e-01 -5.52446485e-01 -3.67675036e-01 -5.47767758e-01 -1.68767825e-01] [ 1.64167479e-01 1.55828059e-01 2.41683647e-01 -4.77854125e-02 1.85983045e-05 -2.90151685e-01 -2.08096534e-01 -3.96742910e-01 -4.05551791e-01 -3.33870620e-01 -5.37728854e-02 -1.66176166e-02 1.48281649e-01] [-1.29209518e-01 -1.44417256e-01 -1.48491830e-01 4.72129136e-02 -4.94417623e-02 -2.49547064e-01 -3.81448239e-01 -4.16546613e-01 -2.94027686e-01 3.17754485e-02 2.89617896e-01 3.35852355e-01 2.20064953e-01]] [[-5.10978177e-02 6.41341656e-02 -5.54774523e-01 -4.89843041e-01 -5.92736840e-01 -4.30223912e-01 -3.70560139e-01 4.74930257e-02 2.95997500e-01 3.28566432e-01 1.48956001e-01 3.50941777e-01 2.30506957e-01] [-1.67624712e-01 2.79334426e-01 -7.22550631e-01 -4.58919585e-01 -7.62918055e-01 -2.63415605e-01 -4.05764401e-01 -6.73634037e-02 -6.61334842e-02 1.57530963e-01 5.62430099e-02 4.02748317e-01 2.72303998e-01] [-4.43236470e-01 7.66447838e-03 -7.04526782e-01 -1.62618518e-01 -4.45070803e-01 5.34456633e-02 -9.81789902e-02 9.60437357e-02 -5.77432588e-02 8.21628422e-02 -2.70550549e-01 -2.25607365e-01 -7.75124505e-02] [-1.84626430e-02 2.00286210e-01 -2.56311268e-01 9.41079259e-02 2.47048531e-02 4.49681401e-01 1.76093206e-01 1.24127544e-01 -3.39316279e-01 -2.48754561e-01 -4.56014693e-01 -1.56151474e-01 2.12308511e-01] [-7.43579566e-02 -4.06694412e-03 6.43383637e-02 3.32682610e-01 2.72550195e-01 3.10126662e-01 1.89922035e-01 2.42546976e-01 -7.13943765e-02 -1.96616471e-01 -5.11474013e-01 -3.57750058e-01 5.46995066e-02] [ 5.00461876e-01 3.00451387e-02 1.58249184e-01 2.83933699e-01 5.87263465e-01 5.44429481e-01 3.42060596e-01 2.99638867e-01 -4.94826473e-02 -8.28806460e-02 -2.50600338e-01 -1.06657594e-01 -1.10954069e-01] [ 1.39424905e-01 -3.41656022e-02 3.47466558e-01 3.94479662e-01 6.50686145e-01 7.26907015e-01 5.93881428e-01 4.81962740e-01 -1.56774055e-02 -2.11183116e-01 -4.09876764e-01 -4.44619805e-01 -3.13140213e-01] [ 3.09907198e-01 -9.77481306e-02 1.80653274e-01 3.72779399e-01 8.65484118e-01 9.10702586e-01 7.23619878e-01 5.04796028e-01 8.88096392e-02 -3.40505958e-01 -4.97296453e-01 -4.48479652e-01 -1.62824273e-01] [-3.97457667e-02 -9.07554254e-02 -1.04889143e-02 4.54721659e-01 4.89735931e-01 6.97654963e-01 5.19635081e-01 4.16572571e-01 1.38567552e-01 -2.30883062e-01 -2.37430841e-01 6.07442185e-02 3.72016072e-01] [ 2.61674658e-03 -7.56233037e-02 -6.03737645e-02 5.90981305e-01 4.22947526e-01 5.56314290e-01 5.04341662e-01 5.05456209e-01 4.18308765e-01 -2.04848871e-02 4.83595990e-02 4.34509486e-01 4.29667532e-01] [ 1.51756346e-01 -1.50978193e-01 -1.04832083e-01 -7.58060738e-02 9.26732197e-02 2.66915649e-01 4.44096088e-01 3.25706095e-01 4.55470771e-01 2.26055309e-01 4.07831252e-01 5.18481851e-01 3.74663800e-01] [-3.41160968e-02 -2.37397775e-01 1.00164205e-01 8.89216959e-02 3.60845804e-01 3.70653957e-01 3.89409631e-01 8.32305923e-02 2.04993486e-01 2.48705700e-01 4.62599605e-01 2.39349097e-01 -1.49542302e-01] [ 1.64051712e-01 -3.22893053e-01 -2.05878884e-01 -4.54340786e-01 -1.16951279e-01 -8.09406862e-02 -4.06185575e-02 -1.61899343e-01 -1.92689169e-02 1.29479334e-01 4.56071436e-01 5.89953810e-02 -1.99567363e-01] [ 3.54599237e-01 -1.92233801e-01 -1.98061422e-01 -1.90488219e-01 5.35956174e-02 6.12049773e-02 -1.92300677e-02 -8.17815363e-02 2.40565669e-02 2.44569704e-01 4.35680628e-01 2.15037286e-01 1.58904325e-02] [ 4.83372629e-01 -9.37969144e-03 -1.69561207e-01 -3.15243214e-01 -9.61973965e-02 -9.80330165e-03 2.78335419e-02 -3.09375729e-02 -2.10753009e-02 -3.33709791e-02 1.12247147e-01 7.64017403e-02 2.01875851e-01]]]]; ov_res: [[[[ 4.56552297e-01 2.98359036e-01 1.14757471e-01 3.50324780e-01 1.97153330e-01 1.09432861e-01 -1.98773876e-01 -6.18590638e-02 1.02749757e-01 5.86928278e-02 -9.10640433e-02 -5.69037557e-01 -5.23011565e-01] [ 2.93223739e-01 3.13099474e-01 1.80207297e-01 2.14826509e-01 2.14160994e-01 6.07473701e-02 -3.47243412e-03 -2.72007585e-02 2.94117033e-01 1.27857387e-01 1.12766236e-01 -4.34324294e-01 -5.84592283e-01] [ 1.19839959e-01 2.46152237e-01 5.04628271e-02 -1.36740416e-01 -5.27385622e-02 1.45383671e-01 -1.25462310e-02 -1.86287582e-01 -1.06092401e-01 9.44891125e-02 2.51638740e-01 1.16949715e-01 -6.72788993e-02] [-1.25692546e-01 8.65742937e-02 1.34253010e-01 -2.96812624e-01 -1.65377140e-01 -9.22691822e-03 3.55093002e-01 3.63462090e-01 3.32922399e-01 2.62549996e-01 3.39920253e-01 4.43188399e-01 2.81511903e-01] [-3.00130785e-01 -6.56128749e-02 5.80661632e-02 -2.36272678e-01 -5.02500236e-01 -3.52820039e-01 5.09449765e-02 4.36811864e-01 3.75351697e-01 3.19374293e-01 2.10082456e-01 3.51757050e-01 6.44388735e-01] [-6.19100153e-01 -3.37259173e-01 -1.32732466e-01 -5.30399866e-02 -2.12380111e-01 -2.23444059e-01 -8.04600567e-02 2.50186771e-01 9.20133367e-02 -5.94071532e-03 -2.30852291e-01 -2.14512810e-01 4.40001160e-01] [-7.48917758e-01 -4.02559847e-01 -3.71215582e-01 -3.22663933e-02 -1.02223501e-01 -1.58393443e-01 -2.82657474e-01 -1.07061595e-01 2.09183082e-01 3.07770580e-01 3.91587853e-01 1.38598561e-01 5.41336775e-01] [-1.05488390e-01 2.78867990e-01 -7.58044347e-02 2.15604603e-01 1.40209615e-01 2.94144154e-01 -1.17635161e-01 -1.23449244e-01 1.51991338e-01 1.86549917e-01 3.78406316e-01 1.52260596e-02 2.68756926e-01] [ 2.04967424e-01 6.55711412e-01 -2.33972743e-02 2.13416651e-01 6.17198162e-02 1.44987881e-01 -2.48628333e-02 -1.17737003e-01 3.78364325e-01 1.55821696e-01 5.22356689e-01 3.40707719e-01 2.16621459e-01] [-6.02089018e-02 3.56821746e-01 -2.18990266e-01 -9.93929580e-02 -1.87348425e-01 -6.02923371e-02 -2.13390812e-01 -4.42750186e-01 -1.90115273e-01 -2.80560344e-01 -1.34197921e-01 -1.39525998e-02 1.72849894e-01] [-1.29023492e-01 -6.69299662e-02 -2.49628246e-01 -3.27366889e-02 1.89434692e-01 -6.07531331e-02 -2.06609845e-01 -7.17136502e-01 -4.91616368e-01 -4.00060743e-01 -9.09799039e-02 2.30026647e-01 3.55878294e-01] [-5.05713701e-01 -2.52611965e-01 -1.29513010e-01 9.74324644e-02 2.08222955e-01 -3.69727798e-02 -2.96439528e-01 -5.73200464e-01 -2.26486787e-01 -3.41719985e-02 -2.93866862e-02 1.56338006e-01 1.43160209e-01] [-8.22816603e-03 8.37364718e-02 3.97080153e-01 4.73110616e-01 3.85822892e-01 1.66264549e-02 -1.98661257e-02 4.39338163e-02 1.21771656e-01 7.43701011e-02 -7.77295679e-02 2.22452804e-01 8.80006477e-02] [-2.49639571e-01 -2.96921194e-01 -1.02556884e-01 -3.58412452e-02 8.55215043e-02 -4.88449037e-02 -3.85546722e-02 3.19841862e-01 4.65938449e-01 2.66288370e-01 -1.87532321e-01 -2.05435619e-01 -4.33393627e-01] [ 1.10623054e-01 -2.17682794e-01 -8.40425268e-02 -2.13844135e-01 -4.81028482e-02 -5.98466545e-02 1.08941898e-01 3.79909992e-01 2.45465860e-01 -8.74883612e-04 -2.76456773e-01 -2.63488680e-01 -2.43068591e-01]] [[ 2.12012500e-01 3.18744868e-01 4.49315399e-01 -7.80312940e-02 -7.68515691e-02 -3.39636803e-01 1.01569660e-01 -1.45632133e-01 3.21876854e-01 3.45283359e-01 5.08579949e-04 1.42338589e-01 -7.34512657e-02] [ 7.48638809e-01 7.55822420e-01 7.15687513e-01 -1.84516385e-02 4.15849015e-02 7.69406855e-02 4.13644522e-01 7.28902146e-02 6.11028910e-01 4.27919269e-01 4.47442569e-02 -9.34595764e-02 -1.32503569e-01] [ 7.23382771e-01 6.58910811e-01 6.75281048e-01 2.94189658e-02 4.97216173e-03 1.37231722e-01 2.84276724e-01 1.36073425e-01 5.39975286e-01 2.17245817e-01 -2.55559348e-02 -1.84323132e-01 5.33372425e-02] [ 2.23403454e-01 1.60484508e-01 1.75998479e-01 -9.00562182e-02 7.29360199e-03 3.71372938e-01 2.79871136e-01 3.36847454e-01 3.14765364e-01 -4.86096665e-02 -3.08989167e-01 -4.90406990e-01 -3.55477519e-02] [-6.84635818e-01 -8.54252100e-01 -5.82208693e-01 -5.44513166e-01 -1.46819770e-01 -4.32475582e-02 -3.12218405e-02 5.82189299e-02 -1.09362401e-01 8.86640325e-03 -2.25559950e-01 -4.57211845e-02 3.36079687e-01] [-8.51879776e-01 -1.05093062e+00 -7.25950360e-01 -7.08854616e-01 -3.39722455e-01 -1.96871087e-01 4.71478142e-02 5.24578094e-02 -1.95804343e-01 -1.06035747e-01 -2.32423171e-01 4.16449495e-02 4.58678126e-01] [-4.15240020e-01 -8.21932554e-01 -7.39483356e-01 -8.41998756e-01 -4.03477073e-01 -2.31462240e-01 1.33337289e-01 1.76016837e-01 1.50310457e-01 2.69973606e-01 -8.39236826e-02 2.13718534e-01 6.76008940e-01] [-2.57359892e-02 -9.79560241e-02 1.20912381e-01 -1.17705345e-01 -3.32131684e-01 -2.64723361e-01 9.20101106e-02 2.91815877e-01 3.68911564e-01 3.11071724e-01 -1.17741756e-01 1.18350692e-01 2.24439383e-01] [ 8.96505937e-02 9.58970785e-02 4.08044785e-01 8.20219666e-02 -1.04112580e-01 -2.74395198e-01 -1.98387221e-01 -6.23356774e-02 2.09989384e-01 2.10180193e-01 7.41509497e-02 1.88219577e-01 2.62210518e-01] [ 9.09166709e-02 2.28910863e-01 4.47736412e-01 1.02674156e-01 -2.37659141e-01 -1.72015324e-01 -1.80342048e-01 -2.16667235e-01 7.33575150e-02 1.65592134e-01 4.05955583e-01 2.97880828e-01 1.88075200e-01] [ 2.70789325e-01 3.13637406e-01 3.13682377e-01 -1.58944473e-01 -5.36629140e-01 -2.52683371e-01 -1.43170938e-01 -1.61548302e-01 -5.65584749e-02 -1.30261198e-01 1.69459462e-01 3.99178453e-02 2.71025509e-01] [ 5.31534910e-01 6.13765359e-01 5.85147858e-01 -1.88260466e-01 -4.11767781e-01 -1.20005637e-01 2.45339841e-01 1.74624085e-01 -1.69098377e-03 -2.79879332e-01 -2.55685359e-01 -3.52343619e-01 -4.63908687e-02] [ 4.26063687e-01 3.03761333e-01 3.59906137e-01 -9.27189663e-02 -2.70630062e-01 -3.03628236e-01 -7.31277764e-02 -1.13432661e-01 -3.04964900e-01 -5.52446485e-01 -3.67675036e-01 -5.47767758e-01 -1.68767825e-01] [ 1.64167479e-01 1.55828059e-01 2.41683647e-01 -4.77854125e-02 1.85983045e-05 -2.90151685e-01 -2.08096534e-01 -3.96742910e-01 -4.05551791e-01 -3.33870620e-01 -5.37728854e-02 -1.66176166e-02 1.48281649e-01] [-1.29209518e-01 -1.44417256e-01 -1.48491830e-01 4.72129136e-02 -4.94417623e-02 -2.49547064e-01 -3.81448239e-01 -4.16546613e-01 -2.94027686e-01 3.17754485e-02 2.89617896e-01 3.35852355e-01 2.20064953e-01]] [[-5.10978177e-02 6.41341656e-02 -5.54774523e-01 -4.89843041e-01 -5.92736840e-01 -4.30223912e-01 -3.70560139e-01 4.74930257e-02 2.95997500e-01 3.28566432e-01 1.48956001e-01 3.50941777e-01 2.30506957e-01] [-1.67624712e-01 2.79334426e-01 -7.22550631e-01 -4.58919585e-01 -7.62918055e-01 -2.63415605e-01 -4.05764401e-01 -6.73634037e-02 -6.61334842e-02 1.57530963e-01 5.62430099e-02 4.02748317e-01 2.72303998e-01] [-4.43236470e-01 7.66447838e-03 -7.04526782e-01 -1.62618518e-01 -4.45070803e-01 5.34456633e-02 -9.81789902e-02 9.60437357e-02 -5.77432588e-02 8.21628422e-02 -2.70550549e-01 -2.25607365e-01 -7.75124505e-02] [-1.84626430e-02 2.00286210e-01 -2.56311268e-01 9.41079259e-02 2.47048531e-02 4.49681401e-01 1.76093206e-01 1.24127544e-01 -3.39316279e-01 -2.48754561e-01 -4.56014693e-01 -1.56151474e-01 2.12308511e-01] [-7.43579566e-02 -4.06694412e-03 6.43383637e-02 3.32682610e-01 2.72550195e-01 3.10126662e-01 1.89922035e-01 2.42546976e-01 -7.13943765e-02 -1.96616471e-01 -5.11474013e-01 -3.57750058e-01 5.46995066e-02] [ 5.00461876e-01 3.00451387e-02 1.58249184e-01 2.83933699e-01 5.87263465e-01 5.44429481e-01 3.42060596e-01 2.99638867e-01 -4.94826473e-02 -8.28806460e-02 -2.50600338e-01 -1.06657594e-01 -1.10954069e-01] [ 1.39424905e-01 -3.41656022e-02 3.47466558e-01 3.94479662e-01 6.50686145e-01 7.26907015e-01 5.93881428e-01 4.81962740e-01 -1.56774055e-02 -2.11183116e-01 -4.09876764e-01 -4.44619805e-01 -3.13140213e-01] [ 3.09907198e-01 -9.77481306e-02 1.80653274e-01 3.72779399e-01 8.65484118e-01 9.10702586e-01 7.23619878e-01 5.04796028e-01 8.88096392e-02 -3.40505958e-01 -4.97296453e-01 -4.48479652e-01 -1.62824273e-01] [-3.97457667e-02 -9.07554254e-02 -1.04889143e-02 4.54721659e-01 4.89735931e-01 6.97654963e-01 5.19635081e-01 4.16572571e-01 1.38567552e-01 -2.30883062e-01 -2.37430841e-01 6.07442185e-02 3.72016072e-01] [ 2.61674658e-03 -7.56233037e-02 -6.03737645e-02 5.90981305e-01 4.22947526e-01 5.56314290e-01 5.04341662e-01 5.05456209e-01 4.18308765e-01 -2.04848871e-02 4.83595990e-02 4.34509486e-01 4.29667532e-01] [ 1.51756346e-01 -1.50978193e-01 -1.04832083e-01 -7.58060738e-02 9.26732197e-02 2.66915649e-01 4.44096088e-01 3.25706095e-01 4.55470771e-01 2.26055309e-01 4.07831252e-01 5.18481851e-01 3.74663800e-01] [-3.41160968e-02 -2.37397775e-01 1.00164205e-01 8.89216959e-02 3.60845804e-01 3.70653957e-01 3.89409631e-01 8.32305923e-02 2.04993486e-01 2.48705700e-01 4.62599605e-01 2.39349097e-01 -1.49542302e-01] [ 1.64051712e-01 -3.22893053e-01 -2.05878884e-01 -4.54340786e-01 -1.16951279e-01 -8.09406862e-02 -4.06185575e-02 -1.61899343e-01 -1.92689169e-02 1.29479334e-01 4.56071436e-01 5.89953810e-02 -1.99567363e-01] [ 3.54599237e-01 -1.92233801e-01 -1.98061422e-01 -1.90488219e-01 5.35956174e-02 6.12049773e-02 -1.92300677e-02 -8.17815363e-02 2.40565669e-02 2.44569704e-01 4.35680628e-01 2.15037286e-01 1.58904325e-02] [ 4.83372629e-01 -9.37969144e-03 -1.69561207e-01 -3.15243214e-01 -9.61973965e-02 -9.80330165e-03 2.78335419e-02 -3.09375729e-02 -2.10753009e-02 -3.33709791e-02 1.12247147e-01 7.64017403e-02 2.01875851e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5646.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[2, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-0.20706731 -0.3781587 -0.3836926 -0.5403765 -0.18194427 -0.46877554 -0.3604129 -0.36211735 -0.06915186 0.06966457 -0.01706649 -0.03359546 -0.224302 ] [ 0.25209534 0.3637734 0.5465757 0.23371786 0.01961968 -0.13959244 0.01029338 0.18585604 0.08768331 0.01696164 -0.40811774 -0.16754536 -0.25021303] [ 0.1396576 0.5940648 0.31316048 0.08111086 -0.33789536 -0.06957188 0.18742377 0.44717565 0.37293753 0.0861589 -0.31552038 -0.29618952 0.10349764] [-0.30656064 -0.26517975 0.07961976 -0.07247721 0.01685116 -0.04834245 0.1477727 0.22099261 0.18612406 0.25463766 0.1818993 -0.18893252 -0.19372587] [ 0.16268244 -0.27537188 0.1262575 0.22023389 0.37591043 0.04278804 0.30436173 0.24498545 0.26650187 -0.25764623 0.02941008 0.03215659 0.19056408] [-0.20609692 -0.3515298 0.1069024 -0.09675694 0.558429 0.26230717 0.67580044 0.08846757 -0.01143665 -0.5497348 -0.14822374 -0.12308434 0.17708145] [ 0.12216686 -0.05915929 -0.19571728 0.08328092 0.31967807 0.38095573 0.38448402 0.21740283 0.56487846 0.40393656 0.52851206 0.04887239 -0.10246351]] [[ 0.27250278 0.27394712 0.0144852 0.2400701 0.04360435 -0.36845684 -0.29471785 -0.4661075 0.11123709 0.05880046 0.5569741 0.25825632 0.15721747] [ 0.09768811 -0.07755871 -0.09498734 -0.4308316 -0.427568 -0.61568224 -0.16214393 -0.2271158 -0.04286633 0.10898727 0.36656392 0.12222518 -0.2189719 ] [-0.16019267 -0.37675816 -0.34726885 -0.65706426 -0.42310655 -0.45613924 0.28881016 0.1513647 -0.0490596 -0.3241374 -0.5343907 -0.4136646 -0.9715887 ] [ 0.06569925 0.2187416 0.05934493 -0.18135156 -0.27808917 0.25043952 0.56462723 0.7953331 0.6035556 0.4214205 -0.06841426 -0.36332723 -0.29057726] [ 0.23101956 0.25716257 0.4127321 0.41316423 0.0597238 0.01776857 -0.09695342 0.15351184 0.25934064 0.574834 0.40332687 -0.41687867 -0.9340446 ] [ 0.28959373 0.10535868 0.0998387 0.08364026 -0.15427311 -0.19128036 -0.3863419 -0.29577458 -0.30352095 -0.36705387 -0.1630066 -0.24248709 -0.4604879 ] [-0.10572162 -0.14830598 -0.03921908 -0.15290575 -0.24175617 -0.30987057 -0.14704989 -0.13670065 -0.16587642 -0.1573044 0.16001624 0.3633803 -0.1547986 ]] [[ 0.44727293 -0.04524085 -0.24355125 -0.2123608 -0.17787464 0.15325913 -0.02653003 0.02639445 0.11250803 0.2268158 0.32514128 -0.02140488 0.09205402] [ 0.39810887 0.17268775 0.04325918 -0.13187593 -0.17085892 -0.06436813 -0.00689491 -0.12229516 -0.15905908 0.12287384 0.2602423 0.09791074 -0.31344363] [ 0.25269967 -0.20926784 -0.3660794 -0.08755795 0.17106448 0.15987891 -0.37475798 -0.6517021 -0.83834225 -0.16596058 0.04294351 0.26599562 -0.19601637] [ 0.30145994 0.31336027 0.5596023 0.6881873 0.4316946 0.02767736 -0.22834094 0.00925393 -0.01575704 0.14545263 -0.08371788 0.06347835 -0.45279884] [ 0.1535888 0.17721416 0.3904464 0.20076242 -0.03151764 -0.18508556 -0.06612663 0.15284604 0.03486567 -0.10308906 -0.21305294 0.06505094 -0.14665681] [-0.20344113 0.07871083 0.29946995 0.54279715 0.2821486 -0.08643007 -0.41871357 -0.29739842 -0.02765681 0.01837727 0.02914264 0.09447047 -0.16038924] [-0.13283272 -0.07022773 -0.13712056 0.02549744 0.0392977 0.19985196 0.16186571 -0.02600106 0.19384915 0.02237141 0.35616457 0.31886455 0.34398732]]]]; ov_res: [[[[-0.20706731 -0.3781587 -0.3836926 -0.5403765 -0.18194427 -0.46877554 -0.3604129 -0.36211735 -0.06915186 0.06966457 -0.01706649 -0.03359546 -0.224302 ] [ 0.25209534 0.3637734 0.5465757 0.23371786 0.01961968 -0.13959244 0.01029338 0.18585604 0.08768331 0.01696164 -0.40811774 -0.16754536 -0.25021303] [ 0.1396576 0.5940648 0.31316048 0.08111086 -0.33789536 -0.06957188 0.18742377 0.44717565 0.37293753 0.0861589 -0.31552038 -0.29618952 0.10349764] [-0.30656064 -0.26517975 0.07961976 -0.07247721 0.01685116 -0.04834245 0.1477727 0.22099261 0.18612406 0.25463766 0.1818993 -0.18893252 -0.19372587] [ 0.16268244 -0.27537188 0.1262575 0.22023389 0.37591043 0.04278804 0.30436173 0.24498545 0.26650187 -0.25764623 0.02941008 0.03215659 0.19056408] [-0.20609692 -0.3515298 0.1069024 -0.09675694 0.558429 0.26230717 0.67580044 0.08846757 -0.01143665 -0.5497348 -0.14822374 -0.12308434 0.17708145] [ 0.12216686 -0.05915929 -0.19571728 0.08328092 0.31967807 0.38095573 0.38448402 0.21740283 0.56487846 0.40393656 0.52851206 0.04887239 -0.10246351]] [[ 0.27250278 0.27394712 0.0144852 0.2400701 0.04360435 -0.36845684 -0.29471785 -0.4661075 0.11123709 0.05880046 0.5569741 0.25825632 0.15721747] [ 0.09768811 -0.07755871 -0.09498734 -0.4308316 -0.427568 -0.61568224 -0.16214393 -0.2271158 -0.04286633 0.10898727 0.36656392 0.12222518 -0.2189719 ] [-0.16019267 -0.37675816 -0.34726885 -0.65706426 -0.42310655 -0.45613924 0.28881016 0.1513647 -0.0490596 -0.3241374 -0.5343907 -0.4136646 -0.9715887 ] [ 0.06569925 0.2187416 0.05934493 -0.18135156 -0.27808917 0.25043952 0.56462723 0.7953331 0.6035556 0.4214205 -0.06841426 -0.36332723 -0.29057726] [ 0.23101956 0.25716257 0.4127321 0.41316423 0.0597238 0.01776857 -0.09695342 0.15351184 0.25934064 0.574834 0.40332687 -0.41687867 -0.9340446 ] [ 0.28959373 0.10535868 0.0998387 0.08364026 -0.15427311 -0.19128036 -0.3863419 -0.29577458 -0.30352095 -0.36705387 -0.1630066 -0.24248709 -0.4604879 ] [-0.10572162 -0.14830598 -0.03921908 -0.15290575 -0.24175617 -0.30987057 -0.14704989 -0.13670065 -0.16587642 -0.1573044 0.16001624 0.3633803 -0.1547986 ]] [[ 0.44727293 -0.04524085 -0.24355125 -0.2123608 -0.17787464 0.15325913 -0.02653003 0.02639445 0.11250803 0.2268158 0.32514128 -0.02140488 0.09205402] [ 0.39810887 0.17268775 0.04325918 -0.13187593 -0.17085892 -0.06436813 -0.00689491 -0.12229516 -0.15905908 0.12287384 0.2602423 0.09791074 -0.31344363] [ 0.25269967 -0.20926784 -0.3660794 -0.08755795 0.17106448 0.15987891 -0.37475798 -0.6517021 -0.83834225 -0.16596058 0.04294351 0.26599562 -0.19601637] [ 0.30145994 0.31336027 0.5596023 0.6881873 0.4316946 0.02767736 -0.22834094 0.00925393 -0.01575704 0.14545263 -0.08371788 0.06347835 -0.45279884] [ 0.1535888 0.17721416 0.3904464 0.20076242 -0.03151764 -0.18508556 -0.06612663 0.15284604 0.03486567 -0.10308906 -0.21305294 0.06505094 -0.14665681] [-0.20344113 0.07871083 0.29946995 0.54279715 0.2821486 -0.08643007 -0.41871357 -0.29739842 -0.02765681 0.01837727 0.02914264 0.09447047 -0.16038924] [-0.13283272 -0.07022773 -0.13712056 0.02549744 0.0392977 0.19985196 0.16186571 -0.02600106 0.19384915 0.02237141 0.35616457 0.31886455 0.34398732]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5649.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::avg_pool2d(%x, %5, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%6) fw_re: [[[[-5.55789024e-02 -1.16562247e-01 -4.43920940e-01 5.01609862e-01 7.20785379e-01 -1.85693741e-01 3.41749758e-01 -1.66990608e-01 2.48525545e-01 -2.00952888e-02 -2.23878801e-01 6.71293318e-01 -6.16957664e-01 2.83298343e-01 6.47793353e-01] [ 3.11509967e-01 5.71935713e-01 4.63970661e-01 -5.16431212e-01 -3.80684957e-02 5.28732538e-02 -4.25071090e-01 2.42557064e-01 9.59957123e-01 -9.15855050e-01 -7.58527756e-01 9.10064995e-01 1.14340389e+00 -4.19629574e-01 7.53353238e-02] [ 1.07647657e+00 -1.69449985e-01 6.67822242e-01 -1.72492057e-01 -6.97658300e-01 -9.53829765e-01 4.31862473e-01 1.79583430e-01 5.06352067e-01 2.40759730e-01 4.49626982e-01 4.41266030e-01 1.54755950e+00 9.82414484e-01 3.03598374e-01] [ 1.20609581e+00 -9.80740786e-02 -7.49395251e-01 2.80971467e-01 4.02195156e-01 -7.00266212e-02 3.72357070e-01 -8.94671440e-01 -2.61412233e-01 7.85118222e-01 5.44692427e-02 4.63149250e-01 -2.17000544e-01 4.67981696e-01 -6.73415005e-01] [-1.08441025e-01 5.07602453e-01 7.26716995e-01 1.57279417e-01 -3.30270827e-01 -4.32820439e-01 9.10093725e-01 3.05315137e-01 6.37543559e-01 9.50787067e-01 -4.28052336e-01 6.37908220e-01 -3.39642376e-01 1.03962529e+00 -4.07855362e-01] [ 6.19547665e-01 -3.34610522e-01 2.43116587e-01 7.67747521e-01 -9.92782831e-01 7.78103769e-01 -3.07316124e-01 -1.67925358e-01 2.63864815e-01 -5.02420962e-02 -5.21884561e-02 -7.00840354e-01 5.86171031e-01 9.98184085e-02 3.29793870e-01] [ 3.82842600e-01 -1.53814927e-02 6.97222888e-01 6.78150713e-01 -2.92815864e-01 -1.05979078e-01 1.42351484e+00 -9.90823865e-01 1.04611218e-01 -8.47309589e-01 1.00619829e+00 -5.05264699e-01 -3.72711778e-01 5.37074804e-02 1.74973845e-01] [-3.93264472e-01 1.25459731e+00 1.38403821e+00 2.04976425e-01 5.12356460e-01 -1.09181643e+00 2.07586837e+00 -4.84144330e-01 1.52311444e-01 2.00656366e+00 -5.04700184e-01 -1.41933548e+00 1.20375955e+00 1.42595458e+00 -5.31087399e-01]] [[-9.57429290e-01 -7.58644938e-01 -2.21078038e-01 6.94953322e-01 -3.69246155e-01 -2.89265215e-01 -8.11443388e-01 1.58676374e+00 4.33060229e-01 -5.36852598e-01 -4.98166144e-01 3.23243797e-01 7.17176437e-01 -4.80695724e-01 -1.06258243e-02] [-7.79311419e-01 -1.43698323e+00 3.41046423e-01 5.90686381e-01 3.98317903e-01 -4.43867445e-02 3.23983908e-01 7.08749354e-01 -4.13228333e-01 1.90789771e+00 -2.37337738e-01 6.16330266e-01 8.70518267e-01 -9.93622482e-01 9.15270686e-01] [ 1.16001177e+00 -6.10403180e-01 8.18542242e-01 7.80874372e-01 -1.10953856e+00 -1.06976151e+00 -9.37355638e-01 3.75965327e-01 7.73355067e-01 1.39967471e-01 7.15675116e-01 -3.06694627e-01 2.37972543e-01 5.69435537e-01 8.05714279e-02] [ 2.22430050e-01 3.39472890e-01 8.88883650e-01 8.64019036e-01 -1.20024085e-02 -1.70958710e+00 1.78926730e+00 -1.38021559e-01 5.14310598e-02 -6.19119942e-01 -4.41186100e-01 -1.34452426e+00 -8.62755477e-02 1.00606084e+00 -6.68175519e-02] [ 2.46588275e-01 5.54440737e-01 9.86338854e-01 1.68866843e-01 2.31044561e-01 1.78111464e-01 -8.79051089e-02 -1.73618197e+00 -2.70679712e-01 -1.48930550e-01 4.60515022e-02 -1.91151798e-01 1.64888337e-01 -2.16568291e-01 -2.09392279e-01] [-4.83331472e-01 -5.56390584e-01 1.41486511e-01 -5.62622547e-01 6.54797554e-01 -5.09312272e-01 1.25576067e+00 1.79255277e-01 8.97549927e-01 3.25798690e-01 -3.00569922e-01 -9.41400886e-01 7.11513340e-01 1.06430626e+00 9.32130516e-01] [ 1.50019094e-01 1.69257283e+00 3.50093693e-01 6.07038379e-01 -1.11084282e-01 9.32405651e-01 -3.62742990e-01 -2.83835411e-01 -1.30031157e+00 -8.26111138e-01 2.89599806e-01 6.23894930e-02 1.12850845e+00 -1.12926805e+00 -1.46831363e-01] [ 6.45432353e-01 6.33468151e-01 7.01363564e-01 -1.08422196e+00 -1.06823218e+00 -5.93268752e-01 -3.79114121e-01 -2.55555749e-01 4.65202570e-01 -2.76369870e-01 -7.48670399e-01 -1.05939484e+00 -1.41977167e+00 -2.20527554e+00 -1.17499578e+00]] [[ 7.54239619e-01 2.69707710e-01 1.94872171e-01 1.03482939e-02 -4.18179989e-01 6.22813582e-01 -6.80063486e-01 5.70987523e-01 -7.19323814e-01 -4.22434837e-01 1.14870048e+00 1.48884058e-02 -1.33065403e+00 1.30129740e-01 8.69977355e-01] [ 3.89657199e-01 -1.08000183e+00 -1.24609888e+00 -9.59370017e-01 -7.18712807e-03 6.87033415e-01 4.31401968e-01 9.57095742e-01 -6.27985239e-01 8.23333859e-01 2.87134290e-01 6.15328550e-01 -6.24169707e-01 -3.19123089e-01 -1.04395437e+00] [-4.72114593e-01 -2.06346691e-01 -9.00182605e-01 -1.08118105e+00 -1.14452195e+00 -2.64300227e-01 1.65945888e-02 2.15963304e-01 -3.14390421e-01 -8.60715508e-02 3.07369322e-01 -2.91692419e-03 3.16646874e-01 -6.96986794e-01 4.07045931e-01] [ 5.12882322e-02 -2.88659275e-01 -5.88163495e-01 3.63646686e-01 1.75693810e-01 1.00626135e+00 -6.34966850e-01 3.03826123e-01 1.19622087e+00 1.10881209e+00 -4.90308404e-01 1.55249071e+00 1.02925527e+00 -1.97868729e+00 8.10052216e-01] [ 9.40224767e-01 2.71759331e-01 -7.25975990e-01 -2.45524377e-01 -1.88838661e-01 -5.74318945e-01 2.12049633e-02 1.34512544e-01 2.60769576e-01 9.00572300e-01 2.66293168e-01 -1.58589587e-01 -1.30792367e+00 7.12131977e-01 -9.47637856e-02] [ 6.81848347e-01 4.25959229e-02 -1.72922671e-01 -4.15201545e-01 7.46254086e-01 -6.42868340e-01 8.68393064e-01 -2.47813568e-01 2.71351159e-01 1.98172003e-01 9.40674663e-01 -1.27173930e-01 -1.32754087e-01 -1.55815113e+00 -7.08175302e-02] [ 5.42220250e-02 -4.08098727e-01 9.47115779e-01 8.30317318e-01 -5.79441428e-01 -2.25760430e-01 5.70547462e-01 -3.62574756e-01 9.53691065e-01 -7.60745168e-01 2.40383577e+00 8.69191885e-02 2.75604874e-01 5.33351064e-01 1.96425855e-01] [ 1.23910737e+00 -5.37759900e-01 2.31030792e-01 8.18832144e-02 -1.47068799e+00 1.11245036e+00 -6.51218057e-01 2.40788102e-01 -1.05059111e+00 8.08673501e-01 -7.40608647e-02 -4.27942425e-02 2.21047950e+00 -1.26214576e+00 2.92671204e+00]]]]; ov_res: [[[[-5.55789024e-02 -1.16562247e-01 -4.43920940e-01 5.01609862e-01 7.20785379e-01 -1.85693741e-01 3.41749758e-01 -1.66990608e-01 2.48525545e-01 -2.00952888e-02 -2.23878801e-01 6.71293318e-01 -6.16957664e-01 2.83298343e-01 6.47793353e-01] [ 3.11509967e-01 5.71935713e-01 4.63970661e-01 -5.16431212e-01 -3.80684957e-02 5.28732538e-02 -4.25071090e-01 2.42557064e-01 9.59957123e-01 -9.15855050e-01 -7.58527756e-01 9.10064995e-01 1.14340389e+00 -4.19629574e-01 7.53353238e-02] [ 1.07647657e+00 -1.69449985e-01 6.67822242e-01 -1.72492057e-01 -6.97658300e-01 -9.53829765e-01 4.31862473e-01 1.79583430e-01 5.06352067e-01 2.40759730e-01 4.49626982e-01 4.41266030e-01 1.54755950e+00 9.82414484e-01 3.03598374e-01] [ 1.20609581e+00 -9.80740786e-02 -7.49395251e-01 2.80971467e-01 4.02195156e-01 -7.00266212e-02 3.72357070e-01 -8.94671440e-01 -2.61412233e-01 7.85118222e-01 5.44692427e-02 4.63149250e-01 -2.17000544e-01 4.67981696e-01 -6.73415005e-01] [-1.08441025e-01 5.07602453e-01 7.26716995e-01 1.57279417e-01 -3.30270827e-01 -4.32820439e-01 9.10093725e-01 3.05315137e-01 6.37543559e-01 9.50787067e-01 -4.28052336e-01 6.37908220e-01 -3.39642376e-01 1.03962529e+00 -4.07855362e-01] [ 6.19547665e-01 -3.34610522e-01 2.43116587e-01 7.67747521e-01 -9.92782831e-01 7.78103769e-01 -3.07316124e-01 -1.67925358e-01 2.63864815e-01 -5.02420962e-02 -5.21884561e-02 -7.00840354e-01 5.86171031e-01 9.98184085e-02 3.29793870e-01] [ 3.82842600e-01 -1.53814927e-02 6.97222888e-01 6.78150713e-01 -2.92815864e-01 -1.05979078e-01 1.42351484e+00 -9.90823865e-01 1.04611218e-01 -8.47309589e-01 1.00619829e+00 -5.05264699e-01 -3.72711778e-01 5.37074804e-02 1.74973845e-01] [-3.93264472e-01 1.25459731e+00 1.38403821e+00 2.04976425e-01 5.12356460e-01 -1.09181643e+00 2.07586837e+00 -4.84144330e-01 1.52311444e-01 2.00656366e+00 -5.04700184e-01 -1.41933548e+00 1.20375955e+00 1.42595458e+00 -5.31087399e-01]] [[-9.57429290e-01 -7.58644938e-01 -2.21078038e-01 6.94953322e-01 -3.69246155e-01 -2.89265215e-01 -8.11443388e-01 1.58676374e+00 4.33060229e-01 -5.36852598e-01 -4.98166144e-01 3.23243797e-01 7.17176437e-01 -4.80695724e-01 -1.06258243e-02] [-7.79311419e-01 -1.43698323e+00 3.41046423e-01 5.90686381e-01 3.98317903e-01 -4.43867445e-02 3.23983908e-01 7.08749354e-01 -4.13228333e-01 1.90789771e+00 -2.37337738e-01 6.16330266e-01 8.70518267e-01 -9.93622482e-01 9.15270686e-01] [ 1.16001177e+00 -6.10403180e-01 8.18542242e-01 7.80874372e-01 -1.10953856e+00 -1.06976151e+00 -9.37355638e-01 3.75965327e-01 7.73355067e-01 1.39967471e-01 7.15675116e-01 -3.06694627e-01 2.37972543e-01 5.69435537e-01 8.05714279e-02] [ 2.22430050e-01 3.39472890e-01 8.88883650e-01 8.64019036e-01 -1.20024085e-02 -1.70958710e+00 1.78926730e+00 -1.38021559e-01 5.14310598e-02 -6.19119942e-01 -4.41186100e-01 -1.34452426e+00 -8.62755477e-02 1.00606084e+00 -6.68175519e-02] [ 2.46588275e-01 5.54440737e-01 9.86338854e-01 1.68866843e-01 2.31044561e-01 1.78111464e-01 -8.79051089e-02 -1.73618197e+00 -2.70679712e-01 -1.48930550e-01 4.60515022e-02 -1.91151798e-01 1.64888337e-01 -2.16568291e-01 -2.09392279e-01] [-4.83331472e-01 -5.56390584e-01 1.41486511e-01 -5.62622547e-01 6.54797554e-01 -5.09312272e-01 1.25576067e+00 1.79255277e-01 8.97549927e-01 3.25798690e-01 -3.00569922e-01 -9.41400886e-01 7.11513340e-01 1.06430626e+00 9.32130516e-01] [ 1.50019094e-01 1.69257283e+00 3.50093693e-01 6.07038379e-01 -1.11084282e-01 9.32405651e-01 -3.62742990e-01 -2.83835411e-01 -1.30031157e+00 -8.26111138e-01 2.89599806e-01 6.23894930e-02 1.12850845e+00 -1.12926805e+00 -1.46831363e-01] [ 6.45432353e-01 6.33468151e-01 7.01363564e-01 -1.08422196e+00 -1.06823218e+00 -5.93268752e-01 -3.79114121e-01 -2.55555749e-01 4.65202570e-01 -2.76369870e-01 -7.48670399e-01 -1.05939484e+00 -1.41977167e+00 -2.20527554e+00 -1.17499578e+00]] [[ 7.54239619e-01 2.69707710e-01 1.94872171e-01 1.03482939e-02 -4.18179989e-01 6.22813582e-01 -6.80063486e-01 5.70987523e-01 -7.19323814e-01 -4.22434837e-01 1.14870048e+00 1.48884058e-02 -1.33065403e+00 1.30129740e-01 8.69977355e-01] [ 3.89657199e-01 -1.08000183e+00 -1.24609888e+00 -9.59370017e-01 -7.18712807e-03 6.87033415e-01 4.31401968e-01 9.57095742e-01 -6.27985239e-01 8.23333859e-01 2.87134290e-01 6.15328550e-01 -6.24169707e-01 -3.19123089e-01 -1.04395437e+00] [-4.72114593e-01 -2.06346691e-01 -9.00182605e-01 -1.08118105e+00 -1.14452195e+00 -2.64300227e-01 1.65945888e-02 2.15963304e-01 -3.14390421e-01 -8.60715508e-02 3.07369322e-01 -2.91692419e-03 3.16646874e-01 -6.96986794e-01 4.07045931e-01] [ 5.12882322e-02 -2.88659275e-01 -5.88163495e-01 3.63646686e-01 1.75693810e-01 1.00626135e+00 -6.34966850e-01 3.03826123e-01 1.19622087e+00 1.10881209e+00 -4.90308404e-01 1.55249071e+00 1.02925527e+00 -1.97868729e+00 8.10052216e-01] [ 9.40224767e-01 2.71759331e-01 -7.25975990e-01 -2.45524377e-01 -1.88838661e-01 -5.74318945e-01 2.12049633e-02 1.34512544e-01 2.60769576e-01 9.00572300e-01 2.66293168e-01 -1.58589587e-01 -1.30792367e+00 7.12131977e-01 -9.47637856e-02] [ 6.81848347e-01 4.25959229e-02 -1.72922671e-01 -4.15201545e-01 7.46254086e-01 -6.42868340e-01 8.68393064e-01 -2.47813568e-01 2.71351159e-01 1.98172003e-01 9.40674663e-01 -1.27173930e-01 -1.32754087e-01 -1.55815113e+00 -7.08175302e-02] [ 5.42220250e-02 -4.08098727e-01 9.47115779e-01 8.30317318e-01 -5.79441428e-01 -2.25760430e-01 5.70547462e-01 -3.62574756e-01 9.53691065e-01 -7.60745168e-01 2.40383577e+00 8.69191885e-02 2.75604874e-01 5.33351064e-01 1.96425855e-01] [ 1.23910737e+00 -5.37759900e-01 2.31030792e-01 8.18832144e-02 -1.47068799e+00 1.11245036e+00 -6.51218057e-01 2.40788102e-01 -1.05059111e+00 8.08673501e-01 -7.40608647e-02 -4.27942425e-02 2.21047950e+00 -1.26214576e+00 2.92671204e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5652.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[ 3.30589205e-01 9.16643161e-03 2.19847441e-01 -1.84024170e-01 -7.65526295e-02 -3.35191518e-01 1.30473793e-01 -1.09527307e-02 -8.17700922e-02 -3.76102179e-01 -5.02443612e-01 -4.68096197e-01 -3.18509042e-01] [ 7.63068259e-01 3.62516493e-01 3.46392810e-01 -1.37788951e-01 -1.77081242e-01 -5.61310351e-01 -9.70613062e-02 -1.83795750e-01 1.80013791e-01 1.75055802e-01 4.74132419e-01 1.52242869e-01 -1.05531253e-01] [ 5.09255707e-01 1.90632030e-01 -2.33790744e-02 -2.26015300e-01 -3.37618470e-01 -5.19106627e-01 -2.63294578e-01 -1.05873518e-01 3.79529119e-01 4.35143322e-01 7.02654183e-01 4.17221218e-01 2.33920991e-01] [ 6.28995180e-01 1.99011788e-01 -4.37979251e-02 1.38769537e-01 -4.18459922e-02 -1.77430227e-01 -4.94752526e-01 -2.58676201e-01 1.70243636e-01 6.79426491e-01 1.01387727e+00 5.86951852e-01 3.32743704e-01] [-3.04670632e-02 -6.44518137e-02 -2.02948570e-01 3.12078856e-02 1.02839291e-01 4.49117161e-02 -2.94562221e-01 -2.33345091e-01 -7.41682425e-02 2.13121340e-01 1.44793987e-02 -2.91305706e-02 1.63115248e-01] [-6.00815471e-03 -1.31436408e-01 -2.20787466e-01 2.37554703e-02 -1.10668883e-01 5.62158041e-02 -3.49724352e-01 -3.74141008e-01 -1.73566505e-01 -4.65872623e-02 2.71367226e-02 -4.29628432e-01 -8.55569318e-02] [ 2.70575546e-02 1.60818338e-01 9.58996359e-03 7.92377293e-02 7.75884688e-02 2.88954884e-01 -4.57559712e-02 -1.11128911e-01 2.86938623e-02 1.72377646e-01 3.81232612e-02 -3.36692542e-01 -2.67133743e-01] [ 3.52737367e-01 2.87118256e-01 -1.86618298e-01 -3.95782709e-01 -3.32851291e-01 2.51344800e-01 2.72361875e-01 4.92085040e-01 4.38676000e-01 4.09765005e-01 3.43993425e-01 9.14900005e-02 -2.40780115e-02] [ 3.05922359e-01 4.18914586e-01 3.77514632e-03 -4.76411611e-01 -3.63690287e-01 2.32054234e-01 4.74174291e-01 4.55467582e-01 3.04639369e-01 4.22662467e-01 3.25662524e-01 1.11207522e-01 1.18781514e-01] [ 6.35244429e-01 7.32124567e-01 -1.09607033e-01 -6.87490404e-01 -6.70027494e-01 -1.89257897e-02 3.68609220e-01 2.59946674e-01 -1.32832229e-01 -4.13570523e-01 -4.89427149e-01 -2.75734127e-01 1.73958391e-01] [ 3.57163578e-01 6.36436522e-01 1.65541563e-02 -9.25286189e-02 -2.57305682e-01 -1.99458420e-01 -1.19977765e-01 -4.63056028e-01 -4.29889560e-01 -3.25515956e-01 -1.30072549e-01 -1.21149626e-02 1.92616269e-01] [ 7.05642939e-01 4.88451332e-01 2.44616941e-02 2.14147210e-01 3.98539066e-01 1.46379620e-01 1.39222547e-01 -1.63036406e-01 -1.14089757e-01 1.80147756e-02 4.90331091e-02 2.34072551e-01 1.24357432e-01] [ 1.50126398e-01 1.05598308e-02 -1.39819250e-01 4.69029307e-01 7.71533430e-01 3.36093575e-01 1.01909041e-01 -3.39994490e-01 -3.46120335e-02 1.49481803e-01 2.98064649e-01 3.49041730e-01 9.76252928e-02]] [[-1.57779500e-01 -1.23141557e-01 -1.37640744e-01 1.71575233e-01 1.06609732e-01 1.74806044e-01 -1.16997957e-01 -6.07734397e-02 2.39451468e-01 6.52260900e-01 6.01928353e-01 6.35783076e-01 1.89403474e-01] [-4.99460697e-02 2.56606251e-01 2.71117061e-01 4.21323895e-01 3.49411577e-01 1.68870986e-01 -2.21871510e-01 -3.44325662e-01 -6.65064752e-02 4.20566887e-01 4.72338080e-01 3.76690716e-01 1.13467626e-01] [-4.68727946e-02 2.34625004e-02 2.07724839e-01 4.00620818e-01 5.60199954e-02 -1.29429623e-01 -4.08281237e-01 -3.78308564e-01 -2.44569913e-01 2.51289129e-01 4.84323084e-01 3.03247690e-01 3.52914073e-02] [ 1.21937722e-01 1.11486465e-01 -7.52746984e-02 -1.78581133e-01 -1.81032166e-01 -2.95854390e-01 1.90957393e-02 3.91674936e-02 2.12098420e-01 1.36538997e-01 3.63425851e-01 1.27162844e-01 1.67796463e-01] [ 4.78800088e-02 -1.64614781e-03 2.07921062e-02 -7.11345375e-02 -8.56377929e-02 -2.05594674e-01 1.54658124e-01 1.50396764e-01 2.09038392e-01 1.29818171e-01 1.73036069e-01 1.08801439e-01 2.71122038e-01] [ 5.75664826e-02 -1.54636167e-02 -6.41815588e-02 -1.32889748e-02 3.65733862e-01 2.61182398e-01 3.77405614e-01 2.89102376e-01 3.89533788e-01 3.91666889e-01 -4.62508984e-02 -3.04775298e-01 -2.49423981e-01] [ 3.25923324e-01 2.39015266e-01 1.57801807e-01 1.79147720e-02 5.86543605e-02 -6.58330321e-02 -1.35128096e-01 -5.11554740e-02 1.02058977e-01 3.68611544e-01 3.29079218e-02 -4.60558206e-01 -4.68666822e-01] [ 1.04331464e-01 -1.07016116e-01 1.09791964e-01 -8.20842832e-02 5.10741211e-02 -1.53296605e-01 -1.03005357e-01 1.46798685e-01 3.21237445e-01 5.15236735e-01 2.19127238e-01 -2.91648537e-01 -1.98469818e-01] [-5.74248098e-02 -1.27745822e-01 2.40807846e-01 -4.58677439e-03 -2.98919737e-01 -4.08659369e-01 -2.27655351e-01 -6.07533306e-02 -2.11867794e-01 -6.60069799e-03 2.52783060e-01 2.65665233e-01 2.96841830e-01] [ 6.96651125e-03 6.61331788e-02 3.38833481e-01 3.06080699e-01 -1.99817985e-01 -2.70779490e-01 -1.38241008e-01 4.35801923e-01 4.44463901e-02 -1.87766880e-01 -1.71217814e-01 1.63031399e-01 4.75769162e-01] [ 5.43050766e-01 3.02889138e-01 -1.61358640e-02 -1.79483965e-01 -5.62492132e-01 -3.54421079e-01 -2.36946657e-01 2.55624205e-01 -8.99809077e-02 -2.76289165e-01 -4.46924806e-01 -2.17991367e-01 -1.37571111e-01] [ 4.13569301e-01 2.42678106e-01 1.84881955e-01 8.40600729e-02 -1.99256167e-01 -1.09009415e-01 -6.51892349e-02 2.00562015e-01 6.32699803e-02 -1.95490465e-01 -3.77609611e-01 -4.22793657e-01 -2.75072634e-01] [ 3.10004409e-03 -6.27847090e-02 1.68397501e-01 6.94387853e-02 -2.75734335e-01 -9.88508165e-02 -1.68881014e-01 -1.90085564e-02 -3.24105024e-01 -4.20896947e-01 -4.74322200e-01 -6.71008885e-01 -4.69524115e-01]] [[ 3.70845854e-01 1.29734427e-01 4.59525995e-02 -1.00636296e-01 4.18301970e-01 1.39599547e-01 2.81845301e-01 1.45057008e-01 2.72385299e-01 3.42562944e-01 2.08283350e-01 1.75335854e-01 -3.97565097e-01] [ 5.63833356e-01 5.05297184e-01 1.99449956e-01 -3.40146989e-01 -2.05213472e-01 -1.27109155e-01 7.39127249e-02 1.39828026e-01 2.57283062e-01 4.26710099e-01 4.31550622e-01 9.61472020e-02 -3.17316413e-01] [-1.29908854e-02 1.72753185e-01 2.48910263e-01 -3.02170906e-02 -4.00241137e-01 -6.48991823e-01 -4.22747195e-01 2.15988308e-01 6.54284120e-01 8.02918077e-01 5.23850679e-01 -4.60926555e-02 -3.09422910e-01] [-3.93488258e-01 -2.54768252e-01 -4.21372980e-01 -3.59502345e-01 -6.81477606e-01 -6.55839562e-01 -3.46666336e-01 4.20879006e-01 9.75270152e-01 9.96661723e-01 3.72959405e-01 -1.59710035e-01 -2.86233783e-01] [-6.78640842e-01 -3.72715443e-01 -4.58723128e-01 -2.61938542e-01 -4.13305551e-01 -5.92044950e-01 -4.15963024e-01 1.20597877e-01 4.10471767e-01 5.12496710e-01 -1.68281421e-01 -1.84187800e-01 -3.12880576e-01] [ 3.75616364e-02 2.04196826e-01 -7.30336159e-02 -5.08823479e-03 -2.13403910e-01 -2.17124537e-01 -3.24248165e-01 -2.34106436e-01 -1.82976983e-02 6.54795095e-02 -1.75122187e-01 -5.08271679e-02 -2.88059711e-01] [ 3.18246990e-01 3.48075271e-01 4.14448053e-01 1.70630947e-01 -7.60298073e-02 -4.23763543e-01 -7.28439152e-01 -5.98298609e-01 -4.77523744e-01 -1.31558076e-01 -4.73632544e-01 -2.64845695e-02 -1.99754313e-01] [ 2.03064263e-01 -8.34379569e-02 2.57594019e-01 2.84845755e-02 -2.04744354e-01 -5.34906805e-01 -6.78205967e-01 -7.79882967e-01 -4.96032089e-01 -7.10131079e-02 -1.76893622e-02 -5.92500903e-02 -5.67743242e-01] [-1.01435192e-01 -4.51660305e-02 1.87482327e-01 1.53541252e-01 5.00096241e-04 -2.06239715e-01 -1.91479266e-01 -5.28805912e-01 -5.79315543e-01 -3.37821126e-01 -1.23735704e-01 1.85935106e-02 -1.72780842e-01] [-2.56756514e-01 -4.66028005e-02 1.97562963e-01 2.95237303e-01 1.63699344e-01 -3.83976363e-02 5.70670426e-01 -5.03101870e-02 -1.72279775e-01 -5.34570694e-01 1.05498284e-01 1.29759274e-02 -1.48996994e-01] [ 2.02020228e-01 5.68593562e-01 5.95143855e-01 2.44898051e-01 3.01976264e-01 2.57132649e-01 5.92536509e-01 -9.11406279e-02 -4.04297292e-01 -6.44373178e-01 -3.10906947e-01 -2.94120431e-01 -2.31363028e-01] [ 4.02349502e-01 4.60074544e-01 5.97670197e-01 1.31201535e-01 1.85604766e-01 1.10071532e-01 2.44688094e-01 -1.65584758e-01 -6.40237629e-02 -2.89340168e-01 -9.11916569e-02 -3.50344628e-01 -3.23162615e-01] [ 9.71047059e-02 -2.99956929e-02 -6.79578930e-02 -1.85756579e-01 1.11571789e-01 2.08510816e-01 -2.10907057e-01 -4.36639100e-01 -1.54636651e-01 -1.04243830e-01 -1.29578874e-01 -4.61096972e-01 -4.89957273e-01]]]]; ov_res: [[[[ 3.30589205e-01 9.16643161e-03 2.19847441e-01 -1.84024170e-01 -7.65526295e-02 -3.35191518e-01 1.30473793e-01 -1.09527307e-02 -8.17700922e-02 -3.76102179e-01 -5.02443612e-01 -4.68096197e-01 -3.18509042e-01] [ 7.63068259e-01 3.62516493e-01 3.46392810e-01 -1.37788951e-01 -1.77081242e-01 -5.61310351e-01 -9.70613062e-02 -1.83795750e-01 1.80013791e-01 1.75055802e-01 4.74132419e-01 1.52242869e-01 -1.05531253e-01] [ 5.09255707e-01 1.90632030e-01 -2.33790744e-02 -2.26015300e-01 -3.37618470e-01 -5.19106627e-01 -2.63294578e-01 -1.05873518e-01 3.79529119e-01 4.35143322e-01 7.02654183e-01 4.17221218e-01 2.33920991e-01] [ 6.28995180e-01 1.99011788e-01 -4.37979251e-02 1.38769537e-01 -4.18459922e-02 -1.77430227e-01 -4.94752526e-01 -2.58676201e-01 1.70243636e-01 6.79426491e-01 1.01387727e+00 5.86951852e-01 3.32743704e-01] [-3.04670632e-02 -6.44518137e-02 -2.02948570e-01 3.12078856e-02 1.02839291e-01 4.49117161e-02 -2.94562221e-01 -2.33345091e-01 -7.41682425e-02 2.13121340e-01 1.44793987e-02 -2.91305706e-02 1.63115248e-01] [-6.00815471e-03 -1.31436408e-01 -2.20787466e-01 2.37554703e-02 -1.10668883e-01 5.62158041e-02 -3.49724352e-01 -3.74141008e-01 -1.73566505e-01 -4.65872623e-02 2.71367226e-02 -4.29628432e-01 -8.55569318e-02] [ 2.70575546e-02 1.60818338e-01 9.58996359e-03 7.92377293e-02 7.75884688e-02 2.88954884e-01 -4.57559712e-02 -1.11128911e-01 2.86938623e-02 1.72377646e-01 3.81232612e-02 -3.36692542e-01 -2.67133743e-01] [ 3.52737367e-01 2.87118256e-01 -1.86618298e-01 -3.95782709e-01 -3.32851291e-01 2.51344800e-01 2.72361875e-01 4.92085040e-01 4.38676000e-01 4.09765005e-01 3.43993425e-01 9.14900005e-02 -2.40780115e-02] [ 3.05922359e-01 4.18914586e-01 3.77514632e-03 -4.76411611e-01 -3.63690287e-01 2.32054234e-01 4.74174291e-01 4.55467582e-01 3.04639369e-01 4.22662467e-01 3.25662524e-01 1.11207522e-01 1.18781514e-01] [ 6.35244429e-01 7.32124567e-01 -1.09607033e-01 -6.87490404e-01 -6.70027494e-01 -1.89257897e-02 3.68609220e-01 2.59946674e-01 -1.32832229e-01 -4.13570523e-01 -4.89427149e-01 -2.75734127e-01 1.73958391e-01] [ 3.57163578e-01 6.36436522e-01 1.65541563e-02 -9.25286189e-02 -2.57305682e-01 -1.99458420e-01 -1.19977765e-01 -4.63056028e-01 -4.29889560e-01 -3.25515956e-01 -1.30072549e-01 -1.21149626e-02 1.92616269e-01] [ 7.05642939e-01 4.88451332e-01 2.44616941e-02 2.14147210e-01 3.98539066e-01 1.46379620e-01 1.39222547e-01 -1.63036406e-01 -1.14089757e-01 1.80147756e-02 4.90331091e-02 2.34072551e-01 1.24357432e-01] [ 1.50126398e-01 1.05598308e-02 -1.39819250e-01 4.69029307e-01 7.71533430e-01 3.36093575e-01 1.01909041e-01 -3.39994490e-01 -3.46120335e-02 1.49481803e-01 2.98064649e-01 3.49041730e-01 9.76252928e-02]] [[-1.57779500e-01 -1.23141557e-01 -1.37640744e-01 1.71575233e-01 1.06609732e-01 1.74806044e-01 -1.16997957e-01 -6.07734397e-02 2.39451468e-01 6.52260900e-01 6.01928353e-01 6.35783076e-01 1.89403474e-01] [-4.99460697e-02 2.56606251e-01 2.71117061e-01 4.21323895e-01 3.49411577e-01 1.68870986e-01 -2.21871510e-01 -3.44325662e-01 -6.65064752e-02 4.20566887e-01 4.72338080e-01 3.76690716e-01 1.13467626e-01] [-4.68727946e-02 2.34625004e-02 2.07724839e-01 4.00620818e-01 5.60199954e-02 -1.29429623e-01 -4.08281237e-01 -3.78308564e-01 -2.44569913e-01 2.51289129e-01 4.84323084e-01 3.03247690e-01 3.52914073e-02] [ 1.21937722e-01 1.11486465e-01 -7.52746984e-02 -1.78581133e-01 -1.81032166e-01 -2.95854390e-01 1.90957393e-02 3.91674936e-02 2.12098420e-01 1.36538997e-01 3.63425851e-01 1.27162844e-01 1.67796463e-01] [ 4.78800088e-02 -1.64614781e-03 2.07921062e-02 -7.11345375e-02 -8.56377929e-02 -2.05594674e-01 1.54658124e-01 1.50396764e-01 2.09038392e-01 1.29818171e-01 1.73036069e-01 1.08801439e-01 2.71122038e-01] [ 5.75664826e-02 -1.54636167e-02 -6.41815588e-02 -1.32889748e-02 3.65733862e-01 2.61182398e-01 3.77405614e-01 2.89102376e-01 3.89533788e-01 3.91666889e-01 -4.62508984e-02 -3.04775298e-01 -2.49423981e-01] [ 3.25923324e-01 2.39015266e-01 1.57801807e-01 1.79147720e-02 5.86543605e-02 -6.58330321e-02 -1.35128096e-01 -5.11554740e-02 1.02058977e-01 3.68611544e-01 3.29079218e-02 -4.60558206e-01 -4.68666822e-01] [ 1.04331464e-01 -1.07016116e-01 1.09791964e-01 -8.20842832e-02 5.10741211e-02 -1.53296605e-01 -1.03005357e-01 1.46798685e-01 3.21237445e-01 5.15236735e-01 2.19127238e-01 -2.91648537e-01 -1.98469818e-01] [-5.74248098e-02 -1.27745822e-01 2.40807846e-01 -4.58677439e-03 -2.98919737e-01 -4.08659369e-01 -2.27655351e-01 -6.07533306e-02 -2.11867794e-01 -6.60069799e-03 2.52783060e-01 2.65665233e-01 2.96841830e-01] [ 6.96651125e-03 6.61331788e-02 3.38833481e-01 3.06080699e-01 -1.99817985e-01 -2.70779490e-01 -1.38241008e-01 4.35801923e-01 4.44463901e-02 -1.87766880e-01 -1.71217814e-01 1.63031399e-01 4.75769162e-01] [ 5.43050766e-01 3.02889138e-01 -1.61358640e-02 -1.79483965e-01 -5.62492132e-01 -3.54421079e-01 -2.36946657e-01 2.55624205e-01 -8.99809077e-02 -2.76289165e-01 -4.46924806e-01 -2.17991367e-01 -1.37571111e-01] [ 4.13569301e-01 2.42678106e-01 1.84881955e-01 8.40600729e-02 -1.99256167e-01 -1.09009415e-01 -6.51892349e-02 2.00562015e-01 6.32699803e-02 -1.95490465e-01 -3.77609611e-01 -4.22793657e-01 -2.75072634e-01] [ 3.10004409e-03 -6.27847090e-02 1.68397501e-01 6.94387853e-02 -2.75734335e-01 -9.88508165e-02 -1.68881014e-01 -1.90085564e-02 -3.24105024e-01 -4.20896947e-01 -4.74322200e-01 -6.71008885e-01 -4.69524115e-01]] [[ 3.70845854e-01 1.29734427e-01 4.59525995e-02 -1.00636296e-01 4.18301970e-01 1.39599547e-01 2.81845301e-01 1.45057008e-01 2.72385299e-01 3.42562944e-01 2.08283350e-01 1.75335854e-01 -3.97565097e-01] [ 5.63833356e-01 5.05297184e-01 1.99449956e-01 -3.40146989e-01 -2.05213472e-01 -1.27109155e-01 7.39127249e-02 1.39828026e-01 2.57283062e-01 4.26710099e-01 4.31550622e-01 9.61472020e-02 -3.17316413e-01] [-1.29908854e-02 1.72753185e-01 2.48910263e-01 -3.02170906e-02 -4.00241137e-01 -6.48991823e-01 -4.22747195e-01 2.15988308e-01 6.54284120e-01 8.02918077e-01 5.23850679e-01 -4.60926555e-02 -3.09422910e-01] [-3.93488258e-01 -2.54768252e-01 -4.21372980e-01 -3.59502345e-01 -6.81477606e-01 -6.55839562e-01 -3.46666336e-01 4.20879006e-01 9.75270152e-01 9.96661723e-01 3.72959405e-01 -1.59710035e-01 -2.86233783e-01] [-6.78640842e-01 -3.72715443e-01 -4.58723128e-01 -2.61938542e-01 -4.13305551e-01 -5.92044950e-01 -4.15963024e-01 1.20597877e-01 4.10471767e-01 5.12496710e-01 -1.68281421e-01 -1.84187800e-01 -3.12880576e-01] [ 3.75616364e-02 2.04196826e-01 -7.30336159e-02 -5.08823479e-03 -2.13403910e-01 -2.17124537e-01 -3.24248165e-01 -2.34106436e-01 -1.82976983e-02 6.54795095e-02 -1.75122187e-01 -5.08271679e-02 -2.88059711e-01] [ 3.18246990e-01 3.48075271e-01 4.14448053e-01 1.70630947e-01 -7.60298073e-02 -4.23763543e-01 -7.28439152e-01 -5.98298609e-01 -4.77523744e-01 -1.31558076e-01 -4.73632544e-01 -2.64845695e-02 -1.99754313e-01] [ 2.03064263e-01 -8.34379569e-02 2.57594019e-01 2.84845755e-02 -2.04744354e-01 -5.34906805e-01 -6.78205967e-01 -7.79882967e-01 -4.96032089e-01 -7.10131079e-02 -1.76893622e-02 -5.92500903e-02 -5.67743242e-01] [-1.01435192e-01 -4.51660305e-02 1.87482327e-01 1.53541252e-01 5.00096241e-04 -2.06239715e-01 -1.91479266e-01 -5.28805912e-01 -5.79315543e-01 -3.37821126e-01 -1.23735704e-01 1.85935106e-02 -1.72780842e-01] [-2.56756514e-01 -4.66028005e-02 1.97562963e-01 2.95237303e-01 1.63699344e-01 -3.83976363e-02 5.70670426e-01 -5.03101870e-02 -1.72279775e-01 -5.34570694e-01 1.05498284e-01 1.29759274e-02 -1.48996994e-01] [ 2.02020228e-01 5.68593562e-01 5.95143855e-01 2.44898051e-01 3.01976264e-01 2.57132649e-01 5.92536509e-01 -9.11406279e-02 -4.04297292e-01 -6.44373178e-01 -3.10906947e-01 -2.94120431e-01 -2.31363028e-01] [ 4.02349502e-01 4.60074544e-01 5.97670197e-01 1.31201535e-01 1.85604766e-01 1.10071532e-01 2.44688094e-01 -1.65584758e-01 -6.40237629e-02 -2.89340168e-01 -9.11916569e-02 -3.50344628e-01 -3.23162615e-01] [ 9.71047059e-02 -2.99956929e-02 -6.79578930e-02 -1.85756579e-01 1.11571789e-01 2.08510816e-01 -2.10907057e-01 -4.36639100e-01 -1.54636651e-01 -1.04243830e-01 -1.29578874e-01 -4.61096972e-01 -4.89957273e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5655.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-0.22928566 -0.24200922 0.23596567 0.40569603 0.5437289 0.40849012 0.15733343 0.07585856 0.08438125 0.23342401 -0.11906806 -0.54989254 -0.83014786 -0.66344094 -0.3499414 ] [-0.33952332 -0.3124693 0.07828274 0.1646542 0.20186673 0.10421921 -0.09521076 -0.12862414 0.09654578 0.3681036 -0.03197113 -0.60738724 -0.81126744 -0.5753148 -0.2843753 ] [-0.06932044 -0.12692602 0.07631082 0.00453989 0.18049382 -0.12510824 -0.12479867 -0.30285507 -0.08726803 -0.2683327 -0.31417271 -0.40527624 -0.5767859 -0.8063736 -0.7573006 ] [-0.458802 -0.43636382 -0.36280784 -0.23389217 -0.2342298 -0.26703015 -0.11653005 -0.19472902 0.10571909 -0.13778107 -0.04300535 -0.17059726 -0.0564369 -0.12035877 -0.21610914] [-0.17381744 -0.26933682 -0.4263145 -0.10911078 0.16639604 0.35764164 0.10414158 -0.06647919 -0.15927869 -0.4359935 -0.2809932 -0.2577597 -0.03442742 -0.10486995 -0.1068621 ] [-0.09360582 -0.26064873 -0.72045845 -0.3448168 0.07084624 0.6384863 0.40157154 0.07338928 -0.13343394 -0.15635303 -0.15874274 -0.21607696 -0.14178763 0.10871015 0.09937676] [ 0.06459416 -0.06722508 -0.66229796 -0.41392422 -0.13639912 0.34558332 0.1668396 0.13293107 -0.04623924 -0.19282787 -0.5290566 -0.47577122 -0.31605038 -0.03192545 -0.03863784] [-0.09805749 -0.06870605 -0.635623 -0.4298563 -0.33862972 0.16063352 0.21835856 0.17293964 0.1760691 0.09149008 -0.17257962 -0.12388415 -0.12040138 -0.21064588 -0.3141116 ] [-0.15219074 0.14993459 -0.24401763 -0.15771827 -0.40457916 -0.0280354 0.18611433 0.16587538 0.14469433 0.0029898 -0.06535062 -0.23321396 -0.15330765 -0.37475765 -0.16811436] [ 0.42438614 0.49655378 0.05754046 0.00555881 0.08311241 0.17298836 0.07889148 0.01887377 0.06617529 0.13259453 0.26002333 0.265065 0.34114885 -0.13656902 -0.03440901] [ 0.6662071 0.6739222 0.15915102 0.08173175 0.06108704 0.09736514 -0.12621579 -0.053202 0.05926198 0.28789145 0.21272093 -0.07804254 -0.3478332 -0.36873657 -0.07351037] [ 0.9010116 0.7103924 0.3270819 -0.13658313 -0.16755205 -0.33181837 -0.3289079 0.04255642 0.17490558 0.24024598 -0.23247106 -0.39627582 -0.5630446 -0.2154221 -0.1121637 ] [ 0.72886634 0.7729492 0.10181745 0.06764428 -0.31594414 -0.01987516 -0.16716902 0.30780622 0.3769936 0.33837113 -0.20199841 -0.8246712 -1.0036585 -0.53351915 -0.20028271] [ 0.38336226 0.39566556 0.03559358 0.17438598 -0.18217403 -0.11914032 -0.13528886 0.3969749 0.6155495 0.45783642 0.10929924 -0.5447958 -0.5617325 -0.30993378 -0.07153812] [ 0.04433791 0.12469967 -0.3246194 0.22595567 -0.08123949 0.1874438 -0.18069199 0.28994566 0.5147903 0.646204 0.531677 -0.05054994 -0.16409265 -0.23591344 -0.04420038]] [[-0.14473552 -0.3229701 -0.09943587 -0.24274053 0.08019128 -0.19504148 0.00433404 -0.13150604 0.01672189 0.03144683 -0.0268188 0.0969353 -0.1503265 0.00661539 -0.03901451] [-0.26921332 -0.31279337 -0.14518827 -0.37761933 -0.32485843 -0.55746466 -0.4794551 -0.510897 -0.3510726 0.10315225 0.2329269 0.03214576 -0.05164514 -0.04026772 0.21823616] [-0.19088212 -0.00184458 0.04367334 -0.37889618 -0.6094771 -0.26846993 -0.2494656 0.01481469 -0.12295614 0.1816093 0.01766953 0.01798455 0.24069098 0.03900898 0.04246385] [-0.07267797 0.20493881 0.32173708 -0.1920521 -0.6248653 -0.46982962 -0.39397836 -0.22306688 -0.2365498 0.03992577 -0.02925109 -0.21543656 0.14653644 0.10056669 0.1738425 ] [ 0.0478895 0.19140707 0.1595276 -0.37877977 -0.41124815 -0.02671473 0.25653428 0.12448553 -0.01837577 -0.20401222 -0.45995796 -0.2193008 -0.00688993 0.20717354 -0.00889833] [-0.05672662 0.08711958 0.09097072 -0.25166115 -0.28600848 0.04090034 0.12858374 0.04911955 -0.11680447 0.09753085 -0.0738626 -0.22727543 -0.37633842 -0.06852659 0.14732844] [-0.458849 -0.1764914 -0.3227525 -0.10589191 -0.38183647 0.15471077 0.06847135 0.05189036 -0.30325836 -0.01333747 0.02022293 -0.04517288 -0.4019072 -0.1888879 0.06164532] [-0.4119484 -0.05921258 0.02672989 0.3076036 -0.07884979 0.18580431 0.03161208 0.01175929 -0.18097286 0.19104078 0.3536099 0.1916349 -0.03313422 -0.08873776 0.03571338] [-0.26455677 -0.07178717 -0.02736973 -0.01160249 -0.46356094 -0.28623855 -0.1840432 0.02740156 0.07962096 0.13199939 0.13661936 0.06846922 0.1287448 0.21140952 0.13182554] [-0.05598627 0.24766451 0.6785879 0.22440079 -0.32295597 -0.31807163 -0.16013734 0.09766911 0.1407898 0.26028693 0.15464434 -0.01027496 0.01920033 0.06608573 0.06406441] [ 0.23348504 0.53366333 0.71320057 0.2699973 -0.21673532 -0.47448 -0.23172076 0.14851047 0.34536678 0.18938549 0.13525571 -0.16919602 -0.24384838 -0.27124614 -0.05764489] [ 0.13730992 0.5904151 0.7404659 0.5788644 -0.05484223 -0.35022467 -0.12266857 0.05586205 0.08206318 -0.01112978 0.2795667 0.12441771 -0.01141253 -0.28077608 -0.10362574] [ 0.48644602 0.6064604 0.30383313 -0.02466883 0.03416406 -0.368523 0.1331577 0.10483423 0.1554682 0.0081779 0.20665604 0.04440934 -0.1212517 -0.35659933 -0.11182022] [ 0.18144679 0.14729339 0.12140486 -0.26369986 -0.17001766 -0.495962 0.21638605 0.23993567 0.09361321 -0.03679806 -0.00232401 -0.01939278 -0.14476699 -0.0373102 0.1458637 ] [ 0.19611444 0.0480094 0.02877157 -0.34661034 0.03007603 -0.23988527 0.2891119 0.23104636 0.14998575 0.04540147 -0.12837082 -0.18078095 -0.2728171 -0.09045483 0.0639247 ]] [[-0.05419416 0.11876375 0.07787242 -0.05105833 -0.2369762 -0.1543633 -0.27160558 -0.45988587 -0.35373256 -0.24675618 0.16873969 0.05281411 0.04117295 -0.2095841 -0.10940506] [-0.12566549 0.08204488 0.1577486 0.24653919 -0.03382478 0.0381913 -0.39956728 -0.5960689 -0.7907612 -0.5533906 -0.1024712 -0.02183583 0.13426822 0.0734007 0.2129227 ] [ 0.10990135 0.20497313 0.25378904 -0.00290332 -0.2629987 -0.04918447 -0.3298412 -0.43052125 -0.780214 -0.7796369 -0.66517097 -0.24584569 0.09083752 0.2360407 0.10356519] [-0.10462618 -0.21595971 -0.00715704 0.34027374 0.4110439 0.52743745 -0.0720379 -0.14997874 -0.31110615 -0.500284 -0.6772409 -0.27437475 0.08542016 0.37283787 0.08744354] [ 0.02353185 -0.12109677 -0.00343146 -0.06954519 0.4045476 0.46195543 0.3412579 0.02105604 0.39553702 -0.08311219 -0.5041708 -0.5236593 -0.16577026 0.15974675 -0.07014378] [-0.15656422 -0.29910976 -0.17226662 0.26021063 0.9074478 1.0510833 0.62325424 0.28477314 0.7676434 0.43916935 0.09590863 -0.6694388 -0.65361565 -0.49841028 -0.17693013] [-0.17363419 -0.22136116 -0.26703203 -0.13130853 0.34638494 0.2577112 0.18049397 -0.09984414 0.30857345 -0.09435043 -0.24404328 -0.839854 -0.5875275 -0.48179913 -0.01361521] [-0.49599558 -0.6682658 -0.6399588 -0.0586165 0.17969199 0.02873286 -0.5115178 -0.65765643 -0.16549641 -0.10518948 0.03919174 -0.489507 -0.53844154 -0.61229634 -0.28828087] [-0.26110527 -0.32647297 -0.45378146 -0.11760434 -0.06147426 -0.4757541 -0.9219089 -1.1636672 -0.41675702 -0.1177206 0.13183725 -0.18962634 -0.30175647 -0.46756268 -0.3375777 ] [-0.2878412 -0.32503325 -0.68527895 -0.13983801 -0.1132466 -0.136458 -0.84233856 -0.9168715 -0.43864372 0.16352473 0.07615261 -0.18895118 -0.5251333 -0.673243 -0.623299 ] [-0.06641783 -0.08690968 -0.56167006 -0.13650024 0.04512674 0.17795332 -0.26792526 -0.0723607 0.10287846 0.41650444 0.15274848 0.07364512 -0.12177612 -0.30220065 -0.2626612 ] [-0.3068457 -0.5320907 -1.0493329 -0.32198566 0.09291665 0.76573324 0.34052345 0.3696139 0.09116246 0.22379126 0.16810195 0.34888056 0.160971 0.16437086 -0.00568561] [-0.30908772 -0.40085503 -0.51988226 0.06314258 0.09512709 0.6070803 0.22264402 0.48308712 0.15559418 0.10536864 0.21996352 0.29450747 0.1419824 0.26825607 0.23994017] [-0.03774326 0.04066223 -0.2934327 0.13402373 -0.10132753 0.34232494 0.01075824 0.13961422 -0.07630082 -0.13648687 -0.02153687 -0.04023693 -0.19707742 0.01538507 0.11656761] [ 0.17767437 0.33181974 0.24449492 0.27077097 -0.06090884 -0.11309593 -0.31037748 0.06212819 0.02838437 -0.00646475 -0.16347612 -0.1992965 -0.17643014 0.01137961 0.15401751]]]]; ov_res: [[[[-0.22928566 -0.24200922 0.23596567 0.40569603 0.5437289 0.40849012 0.15733343 0.07585856 0.08438125 0.23342401 -0.11906806 -0.54989254 -0.83014786 -0.66344094 -0.3499414 ] [-0.33952332 -0.3124693 0.07828274 0.1646542 0.20186673 0.10421921 -0.09521076 -0.12862414 0.09654578 0.3681036 -0.03197113 -0.60738724 -0.81126744 -0.5753148 -0.2843753 ] [-0.06932044 -0.12692602 0.07631082 0.00453989 0.18049382 -0.12510824 -0.12479867 -0.30285507 -0.08726803 -0.2683327 -0.31417271 -0.40527624 -0.5767859 -0.8063736 -0.7573006 ] [-0.458802 -0.43636382 -0.36280784 -0.23389217 -0.2342298 -0.26703015 -0.11653005 -0.19472902 0.10571909 -0.13778107 -0.04300535 -0.17059726 -0.0564369 -0.12035877 -0.21610914] [-0.17381744 -0.26933682 -0.4263145 -0.10911078 0.16639604 0.35764164 0.10414158 -0.06647919 -0.15927869 -0.4359935 -0.2809932 -0.2577597 -0.03442742 -0.10486995 -0.1068621 ] [-0.09360582 -0.26064873 -0.72045845 -0.3448168 0.07084624 0.6384863 0.40157154 0.07338928 -0.13343394 -0.15635303 -0.15874274 -0.21607696 -0.14178763 0.10871015 0.09937676] [ 0.06459416 -0.06722508 -0.66229796 -0.41392422 -0.13639912 0.34558332 0.1668396 0.13293107 -0.04623924 -0.19282787 -0.5290566 -0.47577122 -0.31605038 -0.03192545 -0.03863784] [-0.09805749 -0.06870605 -0.635623 -0.4298563 -0.33862972 0.16063352 0.21835856 0.17293964 0.1760691 0.09149008 -0.17257962 -0.12388415 -0.12040138 -0.21064588 -0.3141116 ] [-0.15219074 0.14993459 -0.24401763 -0.15771827 -0.40457916 -0.0280354 0.18611433 0.16587538 0.14469433 0.0029898 -0.06535062 -0.23321396 -0.15330765 -0.37475765 -0.16811436] [ 0.42438614 0.49655378 0.05754046 0.00555881 0.08311241 0.17298836 0.07889148 0.01887377 0.06617529 0.13259453 0.26002333 0.265065 0.34114885 -0.13656902 -0.03440901] [ 0.6662071 0.6739222 0.15915102 0.08173175 0.06108704 0.09736514 -0.12621579 -0.053202 0.05926198 0.28789145 0.21272093 -0.07804254 -0.3478332 -0.36873657 -0.07351037] [ 0.9010116 0.7103924 0.3270819 -0.13658313 -0.16755205 -0.33181837 -0.3289079 0.04255642 0.17490558 0.24024598 -0.23247106 -0.39627582 -0.5630446 -0.2154221 -0.1121637 ] [ 0.72886634 0.7729492 0.10181745 0.06764428 -0.31594414 -0.01987516 -0.16716902 0.30780622 0.3769936 0.33837113 -0.20199841 -0.8246712 -1.0036585 -0.53351915 -0.20028271] [ 0.38336226 0.39566556 0.03559358 0.17438598 -0.18217403 -0.11914032 -0.13528886 0.3969749 0.6155495 0.45783642 0.10929924 -0.5447958 -0.5617325 -0.30993378 -0.07153812] [ 0.04433791 0.12469967 -0.3246194 0.22595567 -0.08123949 0.1874438 -0.18069199 0.28994566 0.5147903 0.646204 0.531677 -0.05054994 -0.16409265 -0.23591344 -0.04420038]] [[-0.14473552 -0.3229701 -0.09943587 -0.24274053 0.08019128 -0.19504148 0.00433404 -0.13150604 0.01672189 0.03144683 -0.0268188 0.0969353 -0.1503265 0.00661539 -0.03901451] [-0.26921332 -0.31279337 -0.14518827 -0.37761933 -0.32485843 -0.55746466 -0.4794551 -0.510897 -0.3510726 0.10315225 0.2329269 0.03214576 -0.05164514 -0.04026772 0.21823616] [-0.19088212 -0.00184458 0.04367334 -0.37889618 -0.6094771 -0.26846993 -0.2494656 0.01481469 -0.12295614 0.1816093 0.01766953 0.01798455 0.24069098 0.03900898 0.04246385] [-0.07267797 0.20493881 0.32173708 -0.1920521 -0.6248653 -0.46982962 -0.39397836 -0.22306688 -0.2365498 0.03992577 -0.02925109 -0.21543656 0.14653644 0.10056669 0.1738425 ] [ 0.0478895 0.19140707 0.1595276 -0.37877977 -0.41124815 -0.02671473 0.25653428 0.12448553 -0.01837577 -0.20401222 -0.45995796 -0.2193008 -0.00688993 0.20717354 -0.00889833] [-0.05672662 0.08711958 0.09097072 -0.25166115 -0.28600848 0.04090034 0.12858374 0.04911955 -0.11680447 0.09753085 -0.0738626 -0.22727543 -0.37633842 -0.06852659 0.14732844] [-0.458849 -0.1764914 -0.3227525 -0.10589191 -0.38183647 0.15471077 0.06847135 0.05189036 -0.30325836 -0.01333747 0.02022293 -0.04517288 -0.4019072 -0.1888879 0.06164532] [-0.4119484 -0.05921258 0.02672989 0.3076036 -0.07884979 0.18580431 0.03161208 0.01175929 -0.18097286 0.19104078 0.3536099 0.1916349 -0.03313422 -0.08873776 0.03571338] [-0.26455677 -0.07178717 -0.02736973 -0.01160249 -0.46356094 -0.28623855 -0.1840432 0.02740156 0.07962096 0.13199939 0.13661936 0.06846922 0.1287448 0.21140952 0.13182554] [-0.05598627 0.24766451 0.6785879 0.22440079 -0.32295597 -0.31807163 -0.16013734 0.09766911 0.1407898 0.26028693 0.15464434 -0.01027496 0.01920033 0.06608573 0.06406441] [ 0.23348504 0.53366333 0.71320057 0.2699973 -0.21673532 -0.47448 -0.23172076 0.14851047 0.34536678 0.18938549 0.13525571 -0.16919602 -0.24384838 -0.27124614 -0.05764489] [ 0.13730992 0.5904151 0.7404659 0.5788644 -0.05484223 -0.35022467 -0.12266857 0.05586205 0.08206318 -0.01112978 0.2795667 0.12441771 -0.01141253 -0.28077608 -0.10362574] [ 0.48644602 0.6064604 0.30383313 -0.02466883 0.03416406 -0.368523 0.1331577 0.10483423 0.1554682 0.0081779 0.20665604 0.04440934 -0.1212517 -0.35659933 -0.11182022] [ 0.18144679 0.14729339 0.12140486 -0.26369986 -0.17001766 -0.495962 0.21638605 0.23993567 0.09361321 -0.03679806 -0.00232401 -0.01939278 -0.14476699 -0.0373102 0.1458637 ] [ 0.19611444 0.0480094 0.02877157 -0.34661034 0.03007603 -0.23988527 0.2891119 0.23104636 0.14998575 0.04540147 -0.12837082 -0.18078095 -0.2728171 -0.09045483 0.0639247 ]] [[-0.05419416 0.11876375 0.07787242 -0.05105833 -0.2369762 -0.1543633 -0.27160558 -0.45988587 -0.35373256 -0.24675618 0.16873969 0.05281411 0.04117295 -0.2095841 -0.10940506] [-0.12566549 0.08204488 0.1577486 0.24653919 -0.03382478 0.0381913 -0.39956728 -0.5960689 -0.7907612 -0.5533906 -0.1024712 -0.02183583 0.13426822 0.0734007 0.2129227 ] [ 0.10990135 0.20497313 0.25378904 -0.00290332 -0.2629987 -0.04918447 -0.3298412 -0.43052125 -0.780214 -0.7796369 -0.66517097 -0.24584569 0.09083752 0.2360407 0.10356519] [-0.10462618 -0.21595971 -0.00715704 0.34027374 0.4110439 0.52743745 -0.0720379 -0.14997874 -0.31110615 -0.500284 -0.6772409 -0.27437475 0.08542016 0.37283787 0.08744354] [ 0.02353185 -0.12109677 -0.00343146 -0.06954519 0.4045476 0.46195543 0.3412579 0.02105604 0.39553702 -0.08311219 -0.5041708 -0.5236593 -0.16577026 0.15974675 -0.07014378] [-0.15656422 -0.29910976 -0.17226662 0.26021063 0.9074478 1.0510833 0.62325424 0.28477314 0.7676434 0.43916935 0.09590863 -0.6694388 -0.65361565 -0.49841028 -0.17693013] [-0.17363419 -0.22136116 -0.26703203 -0.13130853 0.34638494 0.2577112 0.18049397 -0.09984414 0.30857345 -0.09435043 -0.24404328 -0.839854 -0.5875275 -0.48179913 -0.01361521] [-0.49599558 -0.6682658 -0.6399588 -0.0586165 0.17969199 0.02873286 -0.5115178 -0.65765643 -0.16549641 -0.10518948 0.03919174 -0.489507 -0.53844154 -0.61229634 -0.28828087] [-0.26110527 -0.32647297 -0.45378146 -0.11760434 -0.06147426 -0.4757541 -0.9219089 -1.1636672 -0.41675702 -0.1177206 0.13183725 -0.18962634 -0.30175647 -0.46756268 -0.3375777 ] [-0.2878412 -0.32503325 -0.68527895 -0.13983801 -0.1132466 -0.136458 -0.84233856 -0.9168715 -0.43864372 0.16352473 0.07615261 -0.18895118 -0.5251333 -0.673243 -0.623299 ] [-0.06641783 -0.08690968 -0.56167006 -0.13650024 0.04512674 0.17795332 -0.26792526 -0.0723607 0.10287846 0.41650444 0.15274848 0.07364512 -0.12177612 -0.30220065 -0.2626612 ] [-0.3068457 -0.5320907 -1.0493329 -0.32198566 0.09291665 0.76573324 0.34052345 0.3696139 0.09116246 0.22379126 0.16810195 0.34888056 0.160971 0.16437086 -0.00568561] [-0.30908772 -0.40085503 -0.51988226 0.06314258 0.09512709 0.6070803 0.22264402 0.48308712 0.15559418 0.10536864 0.21996352 0.29450747 0.1419824 0.26825607 0.23994017] [-0.03774326 0.04066223 -0.2934327 0.13402373 -0.10132753 0.34232494 0.01075824 0.13961422 -0.07630082 -0.13648687 -0.02153687 -0.04023693 -0.19707742 0.01538507 0.11656761] [ 0.17767437 0.33181974 0.24449492 0.27077097 -0.06090884 -0.11309593 -0.31037748 0.06212819 0.02838437 -0.00646475 -0.16347612 -0.1992965 -0.17643014 0.01137961 0.15401751]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5658.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 1]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[-1.72672287e-01 -7.34571591e-02 4.10483144e-02 1.81218505e-01 1.08629065e-02 -5.88137321e-02 6.63317069e-02 -2.21120920e-02 2.42759258e-01 1.05573021e-01 5.91478705e-01 3.46537650e-01 4.07521307e-01 1.26741678e-01 3.32379080e-02] [-6.14159293e-02 1.95407182e-01 2.42887184e-01 3.71651590e-01 2.21857011e-01 -1.00488864e-01 4.11312506e-02 -4.85060066e-02 8.38522553e-01 6.03339612e-01 5.82759857e-01 -4.54431266e-01 -3.19740802e-01 -3.26223701e-01 6.84484392e-02] [-1.01429924e-01 -1.42248869e-02 1.70221068e-02 1.60955638e-01 9.46696699e-02 -3.59043144e-02 -2.97675692e-02 -2.16343328e-02 6.38751447e-01 4.06198442e-01 2.41930649e-01 -4.85986590e-01 -3.35128844e-01 -2.97004193e-01 -2.13475036e-03] [-8.95169526e-02 -3.20668906e-01 -4.80688423e-01 -1.34807780e-01 3.43634456e-01 3.69003683e-01 8.86223763e-02 -1.10132433e-01 4.95317668e-01 4.53987956e-01 1.86520949e-01 -4.89023536e-01 -3.33737910e-01 2.16619655e-01 5.50967216e-01] [ 2.26025179e-01 -5.55356890e-02 -2.65578896e-01 -8.72339681e-02 -2.34258976e-02 2.47659624e-01 -6.82782456e-02 1.55196583e-03 4.96255001e-04 4.14101072e-02 1.53890317e-02 -2.10003540e-01 -1.57651380e-02 4.76007611e-01 6.75315738e-01] [ 7.63166845e-02 -1.06850661e-01 3.00457012e-02 8.93450454e-02 2.83424258e-01 4.61642861e-01 2.81242520e-01 3.83713515e-03 -1.75049171e-01 2.82141492e-02 2.94102967e-01 -3.63326669e-02 -2.46191695e-02 3.58657002e-01 6.99677825e-01] [ 3.72601449e-01 4.40199345e-01 5.48732996e-01 4.13183779e-01 2.75287509e-01 4.11057442e-01 1.64976627e-01 -1.27600446e-01 -2.12227404e-01 -6.34269416e-02 4.15684760e-01 5.62124141e-03 1.40881300e-01 2.93627203e-01 4.54651892e-01] [-5.40437289e-02 -2.15096354e-01 -1.10077694e-01 -2.72107601e-01 -3.93378083e-03 1.21032000e-01 3.37598592e-01 -1.03977598e-01 -1.94664285e-01 -1.08599082e-01 5.02848446e-01 3.76912951e-01 1.53194115e-01 5.34001179e-02 3.11747398e-02] [-3.11005916e-02 -2.14915246e-01 -4.98926759e-01 -3.39228660e-01 -3.47090781e-01 1.97550729e-02 3.56349349e-02 8.37927163e-02 4.07492518e-02 8.64679515e-02 4.02864754e-01 4.54808444e-01 3.08612615e-01 2.88427323e-01 -7.16505433e-03] [-3.89148295e-01 -7.55705178e-01 -1.09417689e+00 -8.54866862e-01 -6.66417658e-01 -1.64720699e-01 -2.20927909e-01 -2.86075115e-01 -5.20868719e-01 -3.57711732e-01 -3.01996730e-02 3.91469687e-01 8.31525996e-02 4.77922820e-02 -2.52320856e-01] [-3.69452924e-01 -4.54130590e-01 -6.78391635e-01 -2.69771218e-01 -2.91995913e-01 2.02777028e-01 -3.85475963e-01 -7.00130165e-01 -7.21494377e-01 -3.36416095e-01 9.18085426e-02 4.50109005e-01 3.62923980e-01 3.70329499e-01 -1.20180584e-02] [-9.10760835e-02 -1.65337145e-01 -3.19628790e-02 -2.78718341e-02 7.85778090e-02 2.89618939e-01 -2.76826978e-01 -7.31943786e-01 -9.04198110e-01 -7.82342374e-01 -2.67420620e-01 6.25186488e-02 8.05663541e-02 -2.81494021e-01 -4.02472764e-01] [-9.57252681e-02 -2.16944888e-02 1.69012964e-01 2.56110102e-01 4.48637068e-01 6.87255204e-01 1.94691569e-01 -2.87343174e-01 -3.52154613e-01 -2.63505489e-01 1.78311467e-02 1.88704040e-02 -1.32433906e-01 -2.16057479e-01 -2.19674408e-01]] [[-9.84936655e-02 1.79637790e-01 5.85018247e-02 1.27183259e-01 -1.55791305e-02 -3.33529681e-01 -3.14556152e-01 -6.38379455e-01 -1.61797196e-01 -2.86404192e-01 1.43283978e-01 2.25729048e-01 2.06263259e-01 -2.04520732e-01 -3.79194140e-01] [-2.89747030e-01 1.72282830e-01 2.22853929e-01 7.70965815e-01 6.68519258e-01 2.86809236e-01 -2.76955307e-01 -5.55864930e-01 -1.11864261e-01 -4.46402580e-02 -5.56184053e-02 1.37777761e-01 1.92348629e-01 -4.36620526e-02 -3.29011321e-01] [-2.87437774e-02 3.55547786e-01 2.17927024e-01 6.68836474e-01 7.24001169e-01 5.07506907e-01 -3.23919177e-01 -7.32763648e-01 -4.18721884e-01 -1.25898510e-01 -1.03297710e-01 -5.24589941e-02 1.61233425e-01 -7.36526772e-02 -1.78077042e-01] [ 1.18077837e-01 5.06523848e-01 4.63300437e-01 6.19646490e-01 3.93968940e-01 5.11605322e-01 -9.28690284e-02 -1.23624720e-01 -1.18527874e-01 3.18768770e-02 -2.71504130e-02 -8.17353353e-02 2.22267047e-01 1.87332388e-02 7.06556253e-03] [ 3.32202941e-01 4.75389212e-01 2.96277523e-01 3.44472617e-01 4.83417302e-01 8.96868289e-01 3.44654083e-01 1.86689213e-01 9.38220788e-03 3.53261143e-01 3.60600621e-01 7.41986930e-02 2.85502583e-01 5.87103292e-02 2.01156199e-01] [ 1.66471958e-01 3.84427816e-01 3.36969912e-01 3.11777145e-01 3.64121079e-01 7.55035818e-01 4.51473594e-01 3.57246459e-01 -5.15676662e-02 4.00751740e-01 2.35615492e-01 1.50363922e-01 7.33270124e-02 -1.30362526e-01 -9.73306224e-02] [ 2.18935341e-01 3.28329235e-01 1.36765659e-01 2.36510307e-01 5.54286182e-01 8.68375123e-01 6.58218443e-01 3.32349062e-01 -1.30788431e-01 1.47474110e-01 -1.72598317e-01 -2.23901778e-01 -3.14194232e-01 -2.65499264e-01 -1.30967066e-01] [-4.25669141e-02 6.64560199e-02 9.24417302e-02 3.14659387e-01 4.30406660e-01 8.12069178e-01 8.02929640e-01 6.06123745e-01 -2.02117741e-01 -3.68727297e-01 -5.28542340e-01 -1.75873205e-01 -1.02603501e-02 -3.76837030e-02 -7.12497011e-02] [-4.80622128e-02 2.07010249e-04 1.29410848e-01 3.54632765e-01 3.98000360e-01 7.24781156e-01 6.84848845e-01 5.25601149e-01 -2.31864722e-03 -2.09713593e-01 -3.35600704e-01 -1.06380850e-01 1.53630689e-01 2.24285841e-01 1.40420914e-01] [ 1.82706252e-01 2.49441564e-01 4.80671138e-01 4.96740133e-01 6.15194917e-01 8.92867625e-01 7.93582678e-01 6.67981446e-01 7.41753057e-02 9.80612710e-02 4.21200246e-02 4.03357089e-01 5.04979432e-01 3.91279787e-01 1.63493842e-01] [ 1.86833143e-01 6.78256601e-02 4.65301841e-01 7.45300055e-02 4.52762395e-01 2.95025557e-01 2.21339285e-01 6.17838837e-02 -3.68740767e-01 1.08282462e-01 3.82141938e-04 3.29873919e-01 1.94898352e-01 1.70507431e-01 1.28547996e-01] [ 2.21610591e-01 -1.78312585e-02 2.42258862e-01 -1.41742334e-01 2.60405689e-01 2.51775414e-01 2.98486441e-01 3.91700476e-01 -1.56767130e-01 7.27668181e-02 -3.02822441e-01 8.96419585e-02 6.70926273e-02 2.46753037e-01 2.40334064e-01] [-7.35957623e-02 -1.72336921e-01 1.04751244e-01 -3.16503823e-01 -1.67644426e-01 -9.05665383e-03 2.00242788e-01 4.05614734e-01 3.66203822e-02 4.02653962e-01 -6.80106953e-02 -1.09867483e-01 -5.74326634e-01 -3.20513368e-01 -6.09344058e-02]] [[-8.65493640e-02 -3.31348091e-01 -3.35277647e-01 -4.48532961e-02 -7.22898021e-02 -5.35850883e-01 -3.57365787e-01 9.12004034e-04 7.67629206e-01 5.91049850e-01 7.93231726e-01 4.18035060e-01 2.03692675e-01 -4.84323576e-02 -2.78159250e-02] [ 8.59611928e-02 -7.55237341e-02 -3.33882958e-01 -7.32587948e-02 -1.31919369e-01 -2.34706223e-01 -3.26898843e-02 4.31383014e-01 8.25758696e-01 7.71341980e-01 1.00823903e+00 1.06000018e+00 6.73290014e-01 1.48938596e-01 -7.72058442e-02] [-6.82103932e-02 -3.93130854e-02 -3.61523867e-01 -1.59900323e-01 -1.53598204e-01 -4.99449857e-03 2.73000803e-02 3.47222596e-01 5.02179682e-01 6.76893353e-01 5.47684610e-01 3.01598966e-01 2.74069935e-01 -1.52615860e-01 -4.50378284e-02] [-3.41692343e-02 7.64475763e-02 -4.27176327e-01 -4.40994203e-01 -8.60115170e-01 -3.82594347e-01 -2.83460230e-01 9.72350538e-02 4.57550809e-02 2.90838540e-01 1.70779049e-01 -2.03625504e-02 -2.10676298e-01 -3.38702857e-01 -2.12454602e-01] [-9.13093388e-02 1.25553712e-01 5.38287163e-02 -1.41467661e-01 -5.81107557e-01 -2.08623275e-01 -1.79969579e-01 -2.52176046e-01 -5.15733600e-01 -4.30096000e-01 -2.25790709e-01 -4.80583191e-01 -2.88088441e-01 -2.89256841e-01 -7.58666173e-02] [-2.30643302e-01 -2.14570895e-01 -5.16748875e-02 -3.03684950e-01 -5.41488290e-01 -1.25591591e-01 -4.10613492e-02 -2.26734743e-01 -4.89326358e-01 -7.44114637e-01 -4.33844060e-01 -5.83913505e-01 -3.34881246e-01 -9.06657130e-02 1.58283971e-02] [-1.72329590e-01 -1.85472757e-01 3.13750505e-02 -3.21612388e-01 -3.18919003e-01 1.53751642e-01 1.40081644e-01 -2.05319583e-01 -6.39990628e-01 -6.23936176e-01 -2.20740393e-01 -2.48908445e-01 1.20023318e-01 3.04640263e-01 3.65502924e-01] [-2.40944043e-01 -4.63179469e-01 -2.56042093e-01 -5.55817902e-01 -2.64867187e-01 -9.80906487e-02 1.12895884e-01 1.06705971e-01 2.71237046e-01 1.49391098e-02 4.76904726e-03 -4.61118549e-01 -3.77742440e-01 -1.05289377e-01 2.38578349e-01] [-2.58642137e-01 -1.99635357e-01 -1.43136203e-01 -4.08408970e-01 -4.24161404e-01 -2.27426827e-01 3.52225378e-02 2.40211189e-01 3.67855966e-01 5.84712699e-02 -7.04258233e-02 -3.56029533e-02 -8.39873180e-02 9.28745791e-02 3.55505943e-02] [-5.64363152e-02 3.01277876e-01 4.11432415e-01 5.48269987e-01 2.85395384e-01 -1.00864254e-01 -3.43287915e-01 -1.38840824e-01 3.15505207e-01 3.53183091e-01 1.21887773e-01 1.36378378e-01 -2.66621947e-01 -1.51188135e-01 -2.60190636e-01] [ 3.81591097e-02 3.63043249e-01 1.06934071e-01 4.04515654e-01 2.26437643e-01 1.42951757e-01 -4.00275767e-01 -4.53892350e-01 -2.41683185e-01 3.89179379e-01 3.38571846e-01 5.71801424e-01 1.16281062e-01 4.64595966e-02 -2.31048211e-01] [ 2.54949957e-01 5.07464290e-01 3.39961737e-01 6.65609896e-01 5.63294291e-01 1.15533367e-01 -5.64114630e-01 -5.62678993e-01 -2.21189871e-01 5.36231339e-01 4.14691150e-01 6.10875905e-01 2.50604272e-01 2.07769468e-01 -3.92325707e-02] [-3.15251023e-01 -1.27906784e-01 -5.40341251e-03 4.96755004e-01 3.26310515e-01 8.41088071e-02 -1.18345909e-01 5.67250606e-03 7.59485587e-02 2.13694379e-01 2.18051225e-01 2.51980126e-01 1.15390912e-01 1.88159883e-01 1.35864794e-01]]]]; ov_res: [[[[-1.72672287e-01 -7.34571591e-02 4.10483144e-02 1.81218505e-01 1.08629065e-02 -5.88137321e-02 6.63317069e-02 -2.21120920e-02 2.42759258e-01 1.05573021e-01 5.91478705e-01 3.46537650e-01 4.07521307e-01 1.26741678e-01 3.32379080e-02] [-6.14159293e-02 1.95407182e-01 2.42887184e-01 3.71651590e-01 2.21857011e-01 -1.00488864e-01 4.11312506e-02 -4.85060066e-02 8.38522553e-01 6.03339612e-01 5.82759857e-01 -4.54431266e-01 -3.19740802e-01 -3.26223701e-01 6.84484392e-02] [-1.01429924e-01 -1.42248869e-02 1.70221068e-02 1.60955638e-01 9.46696699e-02 -3.59043144e-02 -2.97675692e-02 -2.16343328e-02 6.38751447e-01 4.06198442e-01 2.41930649e-01 -4.85986590e-01 -3.35128844e-01 -2.97004193e-01 -2.13475036e-03] [-8.95169526e-02 -3.20668906e-01 -4.80688423e-01 -1.34807780e-01 3.43634456e-01 3.69003683e-01 8.86223763e-02 -1.10132433e-01 4.95317668e-01 4.53987956e-01 1.86520949e-01 -4.89023536e-01 -3.33737910e-01 2.16619655e-01 5.50967216e-01] [ 2.26025179e-01 -5.55356890e-02 -2.65578896e-01 -8.72339681e-02 -2.34258976e-02 2.47659624e-01 -6.82782456e-02 1.55196583e-03 4.96255001e-04 4.14101072e-02 1.53890317e-02 -2.10003540e-01 -1.57651380e-02 4.76007611e-01 6.75315738e-01] [ 7.63166845e-02 -1.06850661e-01 3.00457012e-02 8.93450454e-02 2.83424258e-01 4.61642861e-01 2.81242520e-01 3.83713515e-03 -1.75049171e-01 2.82141492e-02 2.94102967e-01 -3.63326669e-02 -2.46191695e-02 3.58657002e-01 6.99677825e-01] [ 3.72601449e-01 4.40199345e-01 5.48732996e-01 4.13183779e-01 2.75287509e-01 4.11057442e-01 1.64976627e-01 -1.27600446e-01 -2.12227404e-01 -6.34269416e-02 4.15684760e-01 5.62124141e-03 1.40881300e-01 2.93627203e-01 4.54651892e-01] [-5.40437289e-02 -2.15096354e-01 -1.10077694e-01 -2.72107601e-01 -3.93378083e-03 1.21032000e-01 3.37598592e-01 -1.03977598e-01 -1.94664285e-01 -1.08599082e-01 5.02848446e-01 3.76912951e-01 1.53194115e-01 5.34001179e-02 3.11747398e-02] [-3.11005916e-02 -2.14915246e-01 -4.98926759e-01 -3.39228660e-01 -3.47090781e-01 1.97550729e-02 3.56349349e-02 8.37927163e-02 4.07492518e-02 8.64679515e-02 4.02864754e-01 4.54808444e-01 3.08612615e-01 2.88427323e-01 -7.16505433e-03] [-3.89148295e-01 -7.55705178e-01 -1.09417689e+00 -8.54866862e-01 -6.66417658e-01 -1.64720699e-01 -2.20927909e-01 -2.86075115e-01 -5.20868719e-01 -3.57711732e-01 -3.01996730e-02 3.91469687e-01 8.31525996e-02 4.77922820e-02 -2.52320856e-01] [-3.69452924e-01 -4.54130590e-01 -6.78391635e-01 -2.69771218e-01 -2.91995913e-01 2.02777028e-01 -3.85475963e-01 -7.00130165e-01 -7.21494377e-01 -3.36416095e-01 9.18085426e-02 4.50109005e-01 3.62923980e-01 3.70329499e-01 -1.20180584e-02] [-9.10760835e-02 -1.65337145e-01 -3.19628790e-02 -2.78718341e-02 7.85778090e-02 2.89618939e-01 -2.76826978e-01 -7.31943786e-01 -9.04198110e-01 -7.82342374e-01 -2.67420620e-01 6.25186488e-02 8.05663541e-02 -2.81494021e-01 -4.02472764e-01] [-9.57252681e-02 -2.16944888e-02 1.69012964e-01 2.56110102e-01 4.48637068e-01 6.87255204e-01 1.94691569e-01 -2.87343174e-01 -3.52154613e-01 -2.63505489e-01 1.78311467e-02 1.88704040e-02 -1.32433906e-01 -2.16057479e-01 -2.19674408e-01]] [[-9.84936655e-02 1.79637790e-01 5.85018247e-02 1.27183259e-01 -1.55791305e-02 -3.33529681e-01 -3.14556152e-01 -6.38379455e-01 -1.61797196e-01 -2.86404192e-01 1.43283978e-01 2.25729048e-01 2.06263259e-01 -2.04520732e-01 -3.79194140e-01] [-2.89747030e-01 1.72282830e-01 2.22853929e-01 7.70965815e-01 6.68519258e-01 2.86809236e-01 -2.76955307e-01 -5.55864930e-01 -1.11864261e-01 -4.46402580e-02 -5.56184053e-02 1.37777761e-01 1.92348629e-01 -4.36620526e-02 -3.29011321e-01] [-2.87437774e-02 3.55547786e-01 2.17927024e-01 6.68836474e-01 7.24001169e-01 5.07506907e-01 -3.23919177e-01 -7.32763648e-01 -4.18721884e-01 -1.25898510e-01 -1.03297710e-01 -5.24589941e-02 1.61233425e-01 -7.36526772e-02 -1.78077042e-01] [ 1.18077837e-01 5.06523848e-01 4.63300437e-01 6.19646490e-01 3.93968940e-01 5.11605322e-01 -9.28690284e-02 -1.23624720e-01 -1.18527874e-01 3.18768770e-02 -2.71504130e-02 -8.17353353e-02 2.22267047e-01 1.87332388e-02 7.06556253e-03] [ 3.32202941e-01 4.75389212e-01 2.96277523e-01 3.44472617e-01 4.83417302e-01 8.96868289e-01 3.44654083e-01 1.86689213e-01 9.38220788e-03 3.53261143e-01 3.60600621e-01 7.41986930e-02 2.85502583e-01 5.87103292e-02 2.01156199e-01] [ 1.66471958e-01 3.84427816e-01 3.36969912e-01 3.11777145e-01 3.64121079e-01 7.55035818e-01 4.51473594e-01 3.57246459e-01 -5.15676662e-02 4.00751740e-01 2.35615492e-01 1.50363922e-01 7.33270124e-02 -1.30362526e-01 -9.73306224e-02] [ 2.18935341e-01 3.28329235e-01 1.36765659e-01 2.36510307e-01 5.54286182e-01 8.68375123e-01 6.58218443e-01 3.32349062e-01 -1.30788431e-01 1.47474110e-01 -1.72598317e-01 -2.23901778e-01 -3.14194232e-01 -2.65499264e-01 -1.30967066e-01] [-4.25669141e-02 6.64560199e-02 9.24417302e-02 3.14659387e-01 4.30406660e-01 8.12069178e-01 8.02929640e-01 6.06123745e-01 -2.02117741e-01 -3.68727297e-01 -5.28542340e-01 -1.75873205e-01 -1.02603501e-02 -3.76837030e-02 -7.12497011e-02] [-4.80622128e-02 2.07010249e-04 1.29410848e-01 3.54632765e-01 3.98000360e-01 7.24781156e-01 6.84848845e-01 5.25601149e-01 -2.31864722e-03 -2.09713593e-01 -3.35600704e-01 -1.06380850e-01 1.53630689e-01 2.24285841e-01 1.40420914e-01] [ 1.82706252e-01 2.49441564e-01 4.80671138e-01 4.96740133e-01 6.15194917e-01 8.92867625e-01 7.93582678e-01 6.67981446e-01 7.41753057e-02 9.80612710e-02 4.21200246e-02 4.03357089e-01 5.04979432e-01 3.91279787e-01 1.63493842e-01] [ 1.86833143e-01 6.78256601e-02 4.65301841e-01 7.45300055e-02 4.52762395e-01 2.95025557e-01 2.21339285e-01 6.17838837e-02 -3.68740767e-01 1.08282462e-01 3.82141938e-04 3.29873919e-01 1.94898352e-01 1.70507431e-01 1.28547996e-01] [ 2.21610591e-01 -1.78312585e-02 2.42258862e-01 -1.41742334e-01 2.60405689e-01 2.51775414e-01 2.98486441e-01 3.91700476e-01 -1.56767130e-01 7.27668181e-02 -3.02822441e-01 8.96419585e-02 6.70926273e-02 2.46753037e-01 2.40334064e-01] [-7.35957623e-02 -1.72336921e-01 1.04751244e-01 -3.16503823e-01 -1.67644426e-01 -9.05665383e-03 2.00242788e-01 4.05614734e-01 3.66203822e-02 4.02653962e-01 -6.80106953e-02 -1.09867483e-01 -5.74326634e-01 -3.20513368e-01 -6.09344058e-02]] [[-8.65493640e-02 -3.31348091e-01 -3.35277647e-01 -4.48532961e-02 -7.22898021e-02 -5.35850883e-01 -3.57365787e-01 9.12004034e-04 7.67629206e-01 5.91049850e-01 7.93231726e-01 4.18035060e-01 2.03692675e-01 -4.84323576e-02 -2.78159250e-02] [ 8.59611928e-02 -7.55237341e-02 -3.33882958e-01 -7.32587948e-02 -1.31919369e-01 -2.34706223e-01 -3.26898843e-02 4.31383014e-01 8.25758696e-01 7.71341980e-01 1.00823903e+00 1.06000018e+00 6.73290014e-01 1.48938596e-01 -7.72058442e-02] [-6.82103932e-02 -3.93130854e-02 -3.61523867e-01 -1.59900323e-01 -1.53598204e-01 -4.99449857e-03 2.73000803e-02 3.47222596e-01 5.02179682e-01 6.76893353e-01 5.47684610e-01 3.01598966e-01 2.74069935e-01 -1.52615860e-01 -4.50378284e-02] [-3.41692343e-02 7.64475763e-02 -4.27176327e-01 -4.40994203e-01 -8.60115170e-01 -3.82594347e-01 -2.83460230e-01 9.72350538e-02 4.57550809e-02 2.90838540e-01 1.70779049e-01 -2.03625504e-02 -2.10676298e-01 -3.38702857e-01 -2.12454602e-01] [-9.13093388e-02 1.25553712e-01 5.38287163e-02 -1.41467661e-01 -5.81107557e-01 -2.08623275e-01 -1.79969579e-01 -2.52176046e-01 -5.15733600e-01 -4.30096000e-01 -2.25790709e-01 -4.80583191e-01 -2.88088441e-01 -2.89256841e-01 -7.58666173e-02] [-2.30643302e-01 -2.14570895e-01 -5.16748875e-02 -3.03684950e-01 -5.41488290e-01 -1.25591591e-01 -4.10613492e-02 -2.26734743e-01 -4.89326358e-01 -7.44114637e-01 -4.33844060e-01 -5.83913505e-01 -3.34881246e-01 -9.06657130e-02 1.58283971e-02] [-1.72329590e-01 -1.85472757e-01 3.13750505e-02 -3.21612388e-01 -3.18919003e-01 1.53751642e-01 1.40081644e-01 -2.05319583e-01 -6.39990628e-01 -6.23936176e-01 -2.20740393e-01 -2.48908445e-01 1.20023318e-01 3.04640263e-01 3.65502924e-01] [-2.40944043e-01 -4.63179469e-01 -2.56042093e-01 -5.55817902e-01 -2.64867187e-01 -9.80906487e-02 1.12895884e-01 1.06705971e-01 2.71237046e-01 1.49391098e-02 4.76904726e-03 -4.61118549e-01 -3.77742440e-01 -1.05289377e-01 2.38578349e-01] [-2.58642137e-01 -1.99635357e-01 -1.43136203e-01 -4.08408970e-01 -4.24161404e-01 -2.27426827e-01 3.52225378e-02 2.40211189e-01 3.67855966e-01 5.84712699e-02 -7.04258233e-02 -3.56029533e-02 -8.39873180e-02 9.28745791e-02 3.55505943e-02] [-5.64363152e-02 3.01277876e-01 4.11432415e-01 5.48269987e-01 2.85395384e-01 -1.00864254e-01 -3.43287915e-01 -1.38840824e-01 3.15505207e-01 3.53183091e-01 1.21887773e-01 1.36378378e-01 -2.66621947e-01 -1.51188135e-01 -2.60190636e-01] [ 3.81591097e-02 3.63043249e-01 1.06934071e-01 4.04515654e-01 2.26437643e-01 1.42951757e-01 -4.00275767e-01 -4.53892350e-01 -2.41683185e-01 3.89179379e-01 3.38571846e-01 5.71801424e-01 1.16281062e-01 4.64595966e-02 -2.31048211e-01] [ 2.54949957e-01 5.07464290e-01 3.39961737e-01 6.65609896e-01 5.63294291e-01 1.15533367e-01 -5.64114630e-01 -5.62678993e-01 -2.21189871e-01 5.36231339e-01 4.14691150e-01 6.10875905e-01 2.50604272e-01 2.07769468e-01 -3.92325707e-02] [-3.15251023e-01 -1.27906784e-01 -5.40341251e-03 4.96755004e-01 3.26310515e-01 8.41088071e-02 -1.18345909e-01 5.67250606e-03 7.59485587e-02 2.13694379e-01 2.18051225e-01 2.51980126e-01 1.15390912e-01 1.88159883e-01 1.35864794e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5661.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[1, 0]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[-4.11390454e-01 -3.84872884e-01 -3.79446208e-01 -4.17636931e-01 -2.52873540e-01 -2.57279664e-01 -2.54374504e-01 -3.49674106e-01 -6.90029487e-02 -1.35049239e-01 -9.17189792e-02 -3.87122005e-01 -3.87216546e-02] [-3.53212804e-01 -4.25334185e-01 -3.81002873e-01 -3.90538901e-01 -1.66714355e-01 -2.24297926e-01 -4.67091233e-01 -2.81951755e-01 -1.21531166e-01 5.01770042e-02 -1.43587455e-01 -5.10214448e-01 -1.67966247e-01] [-9.28897709e-02 -2.91149259e-01 -2.79982090e-01 -1.42751396e-01 -1.99802712e-01 -5.87809145e-01 -9.16075408e-01 -4.96544838e-01 -8.54853690e-02 4.09265190e-01 4.84490842e-02 -9.45784748e-02 -4.57353075e-04] [ 1.51221141e-01 -3.42917778e-02 2.62481775e-02 -3.76353562e-02 -5.41473590e-02 -2.21304879e-01 -4.71282601e-01 -7.21371323e-02 8.76375958e-02 3.78972471e-01 3.63499112e-02 -2.53162980e-01 -2.42437094e-01] [ 5.83449267e-02 -2.45065019e-02 6.34194463e-02 -3.13693136e-02 -2.50139356e-01 -3.87982517e-01 -2.39330828e-01 -1.75338447e-01 2.38352299e-01 2.26742163e-01 4.61184308e-02 -2.17821389e-01 -6.96657524e-02] [ 1.58856422e-01 2.20503986e-01 2.38287032e-01 -4.73132469e-02 -2.50864297e-01 -1.73114926e-01 2.34159783e-01 2.20456436e-01 2.98812121e-01 -2.34334208e-02 -1.06187589e-01 -1.16845556e-01 1.19322419e-01] [ 1.29336804e-01 5.29450849e-02 1.39453799e-01 -1.50402814e-01 -5.21101892e-01 -5.20559132e-01 -3.37605894e-01 -1.18076071e-01 2.46012341e-02 -4.31608744e-02 -1.68281756e-02 -1.89214870e-01 -2.18572348e-01] [ 1.82962894e-01 -1.94187105e-01 -3.03172588e-01 -4.37138855e-01 -3.40690285e-01 -7.55365342e-02 -2.13954628e-01 -1.68323647e-02 -3.44534889e-02 2.33725131e-01 2.85900563e-01 2.17145622e-01 -2.48309210e-01] [ 9.80482399e-02 -2.29916945e-01 -3.18786263e-01 -4.61109519e-01 -5.59806585e-01 6.07993873e-03 -2.47206047e-01 -8.87700021e-02 -3.93371791e-01 1.31162614e-01 4.34148252e-01 3.34195018e-01 -8.72434489e-03] [ 2.29778871e-01 -1.52821586e-01 -9.19568911e-02 -2.89156824e-01 -4.60720688e-01 -4.68788221e-02 7.93714523e-02 2.24031270e-01 -4.95251268e-02 5.17816544e-01 4.97495234e-01 7.24544644e-01 4.25243497e-01] [ 3.33459586e-01 8.91274214e-02 1.27036661e-01 -2.66050696e-01 -7.48908579e-01 -4.47996736e-01 -1.62168398e-01 3.28338057e-01 1.64909899e-01 2.63103127e-01 1.79653630e-01 2.73964196e-01 4.82653409e-01] [ 2.86662698e-01 2.00912103e-01 7.61716962e-02 8.68535489e-02 -1.46517754e-01 -3.50345343e-01 -2.08513647e-01 1.38241574e-01 6.65647149e-01 5.15758753e-01 4.23986763e-01 9.78832766e-02 1.63301621e-02] [ 4.69733566e-01 5.21358371e-01 -5.53292781e-02 -1.28834754e-01 -2.47600794e-01 -2.07610190e-01 1.06757030e-01 3.68928403e-01 7.93322265e-01 5.32189250e-01 7.43974447e-01 3.40318233e-01 1.84205711e-01] [-4.50527258e-02 1.01305116e-02 -1.80621222e-01 -2.45271713e-01 -4.71004248e-02 -5.86601347e-02 2.93281674e-01 1.67828873e-01 4.25209194e-01 5.07307053e-01 7.56563842e-01 5.32917321e-01 -4.87336190e-03] [ 4.73653600e-02 -1.24762759e-01 -1.29313529e-01 -3.93787950e-01 -1.94625527e-01 -8.22954997e-02 2.71834612e-01 1.77281648e-01 7.31139928e-02 1.89419329e-01 3.37438375e-01 4.58850592e-01 1.21154837e-01]] [[ 2.09666118e-01 1.49260983e-01 -2.69612018e-03 -1.40152723e-02 -5.05860269e-01 -4.37257499e-01 -2.65144408e-01 -6.42075837e-02 -7.99430758e-02 -1.75356254e-01 -4.54320908e-02 -1.47992983e-01 -9.54802409e-02] [ 5.30340552e-01 5.59712589e-01 3.85294169e-01 2.30353326e-01 -1.79466754e-01 -2.64574289e-01 -9.60478932e-02 -9.37062353e-02 -7.29798600e-02 -3.87979925e-01 -3.04974407e-01 -3.30108136e-01 -3.87324952e-02] [ 3.14271092e-01 1.29004508e-01 1.75792798e-01 3.18124384e-01 4.55235630e-01 3.99428099e-01 3.63128811e-01 4.79765050e-03 -1.29348725e-01 -4.52150702e-01 -3.14873874e-01 -1.16214938e-01 9.88126993e-02] [ 3.83245826e-01 2.37902373e-01 3.10699493e-01 2.38431484e-01 6.08988762e-01 4.16350901e-01 3.11825812e-01 -4.93711643e-02 -1.12975821e-01 -4.22913879e-01 -5.18195331e-01 -1.16581172e-01 9.95328128e-02] [ 7.11970985e-01 3.68345141e-01 1.99794829e-01 2.84655225e-02 9.97817144e-02 -1.59327183e-02 3.47074680e-02 -3.84012703e-03 -2.28808001e-01 -4.37774420e-01 -5.20041704e-01 1.03355989e-01 3.12575698e-02] [ 9.00183022e-01 6.99525356e-01 4.14420605e-01 1.84438989e-01 4.81312945e-02 -1.83265075e-01 7.95222633e-03 1.68194577e-01 2.60325015e-01 4.92682606e-02 -5.97316027e-03 2.28253782e-01 7.71559104e-02] [ 7.68039644e-01 7.27251351e-01 2.73335546e-01 1.67064488e-01 7.63958097e-02 2.08343238e-01 3.35013419e-01 4.91717935e-01 3.79647523e-01 3.85997355e-01 2.87307143e-01 5.56844115e-01 3.42483699e-01] [ 2.76139647e-01 4.08749253e-01 3.68214130e-01 4.81198430e-01 5.54051280e-01 5.36207080e-01 4.44622040e-01 3.46872211e-01 4.68523234e-01 3.81902099e-01 3.19406241e-01 1.95457369e-01 2.35185519e-01] [ 1.77915215e-01 2.90444255e-01 4.65206087e-01 3.34151924e-01 2.82441139e-01 -6.65056407e-02 1.19665571e-01 -1.94572374e-01 -1.18351914e-02 -2.37547770e-01 -2.00919017e-01 -1.55711591e-01 1.34884253e-01] [ 2.58772582e-01 4.90980506e-01 8.13057482e-01 4.24219728e-01 3.28927964e-01 -3.56194824e-01 3.72930765e-02 -4.20009345e-01 -2.68777013e-02 -4.44669098e-01 -3.62965643e-01 -4.74900663e-01 -1.60554543e-01] [ 1.01902336e-01 3.59459758e-01 6.18776500e-01 2.70884216e-01 1.03035374e-02 -4.34264302e-01 1.46674514e-02 -2.84090012e-01 -1.94151372e-01 -9.05752182e-02 1.31707430e-01 -3.27787459e-01 -4.41970050e-01] [ 7.19126761e-02 3.48524034e-01 4.54271942e-01 4.22997355e-01 5.32642663e-01 3.31424683e-01 4.10190701e-01 -3.00543189e-01 -2.07357913e-01 2.64501840e-01 6.60180151e-01 5.72463460e-02 -3.18774730e-01] [ 3.76115948e-01 -1.88569892e-02 -4.26273942e-02 -9.29160044e-02 6.30254030e-01 7.53588140e-01 8.49644005e-01 -6.42145798e-02 -3.33166480e-01 3.57376456e-01 7.51540661e-01 3.86586994e-01 -1.17966257e-01] [ 6.45487130e-01 1.98449828e-02 9.92842540e-02 3.11664473e-02 1.13727665e+00 9.66571569e-01 9.19957399e-01 -1.38735667e-01 -2.83709198e-01 1.51586458e-01 4.43093836e-01 5.82373202e-01 1.54601544e-01] [ 6.38350248e-01 5.91093898e-02 1.18433185e-01 -1.01830639e-01 6.48917437e-01 5.98059297e-01 5.66807985e-01 -3.16774100e-02 -3.49474311e-01 -5.87960221e-02 9.12887603e-02 3.05309147e-01 -4.12626863e-02]] [[ 5.35814762e-02 -4.59576070e-01 -5.50886929e-01 -1.65414169e-01 7.30161294e-02 3.04623246e-01 1.05571188e-01 1.45029604e-01 3.20337236e-01 2.84432262e-01 5.75981915e-01 3.02146405e-01 3.76672208e-01] [ 8.64057541e-02 -4.22524989e-01 -5.49075723e-01 -1.32485464e-01 2.57890858e-02 1.18120722e-01 1.81638062e-01 2.82386065e-01 5.50880969e-01 3.19101483e-01 4.40787166e-01 1.21254049e-01 1.92751139e-01] [ 4.57913250e-01 1.56485677e-01 -1.72294170e-01 -2.75736570e-01 -3.19477081e-01 -1.17362633e-01 2.55174667e-01 3.51895541e-01 4.41514313e-01 1.99178353e-01 2.88350940e-01 -3.58058065e-02 4.64164354e-02] [ 5.05874097e-01 5.03070593e-01 -1.27010215e-02 -1.57751106e-02 -3.46727967e-01 -2.49542654e-01 -2.97145993e-01 -3.85813653e-01 -3.67419183e-01 -2.84915358e-01 -1.01992667e-01 3.05197500e-02 1.78363666e-01] [ 1.05515945e+00 5.35283327e-01 -1.22247236e-02 -3.24011892e-01 -4.56610471e-01 -4.08426762e-01 -7.35135853e-01 -7.55538046e-01 -6.34333134e-01 -3.40283245e-01 1.19094420e-02 2.69647390e-01 5.43089151e-01] [ 1.01654744e+00 5.11439323e-01 -4.49890345e-02 -1.97209224e-01 -4.66049522e-01 -5.67858517e-01 -9.09546196e-01 -5.99107981e-01 -3.76232177e-01 -2.56687015e-01 -1.67843342e-01 7.57040679e-02 3.99195313e-01] [ 8.46442759e-01 3.19835335e-01 1.11779824e-01 -3.91633481e-01 -3.74972701e-01 -5.09004593e-01 -4.90292341e-01 -9.45191383e-02 1.97267737e-02 -2.08916008e-01 -3.81820142e-01 -3.63024890e-01 -6.74100146e-02] [ 4.94805813e-01 6.86684847e-01 4.06476945e-01 1.66716501e-01 -7.89566934e-02 -1.25495940e-01 -1.65644154e-01 3.34003538e-01 2.29951084e-01 6.44149911e-03 -4.44038570e-01 -3.17578822e-01 -1.15138412e-01] [ 4.98926014e-01 8.35077941e-01 5.10062099e-01 1.42780408e-01 -1.42778248e-01 -2.09277213e-01 -2.84394711e-01 1.90069184e-01 2.06835017e-01 1.76394865e-01 -1.63782835e-01 -5.04979417e-02 2.28499442e-01] [ 6.20498002e-01 7.95290053e-01 3.98336053e-01 2.76677042e-01 1.30603477e-01 -6.20713979e-02 -6.10394888e-02 2.63372302e-01 3.45009565e-01 4.39844340e-01 3.37376237e-01 5.46972632e-01 7.57858396e-01] [ 1.00495525e-01 4.02853079e-02 -2.47908533e-01 -3.09811741e-01 -4.07248616e-01 -3.23369622e-01 -2.25600272e-01 7.93753490e-02 -5.43173291e-02 5.65060042e-02 2.56797969e-01 4.48635161e-01 5.21841168e-01] [ 7.31195435e-02 -9.56242979e-02 -4.36770320e-01 -1.45012796e-01 -6.97290450e-02 4.29144576e-02 -2.37214968e-01 -2.13301525e-01 -5.07935405e-01 -1.74034730e-01 2.39524171e-02 1.61170185e-01 -9.52244624e-02] [-5.46705723e-01 -4.55632061e-01 -5.21013498e-01 -4.63122055e-02 -4.51655269e-01 -2.88651705e-01 -5.59310198e-01 -3.49522114e-01 -5.60190260e-01 -4.27552342e-01 -1.42829549e-02 -8.51097852e-02 -4.29820657e-01] [-3.43326747e-01 -2.80618399e-01 -3.50901634e-01 -7.99908265e-02 -4.13171649e-01 -6.20550960e-02 -8.75102952e-02 1.00047126e-01 -1.94482982e-01 -2.67887145e-01 -3.64118330e-02 -4.00192112e-01 -6.87716186e-01] [-4.02213633e-01 -3.55005890e-01 -2.21288085e-01 -2.09998846e-01 -4.48387355e-01 -1.41449869e-01 2.13779196e-01 3.92459780e-01 1.56541079e-01 -2.07499191e-01 3.37053798e-02 -2.19555780e-01 -3.65143418e-01]]]]; ov_res: [[[[-4.11390454e-01 -3.84872884e-01 -3.79446208e-01 -4.17636931e-01 -2.52873540e-01 -2.57279664e-01 -2.54374504e-01 -3.49674106e-01 -6.90029487e-02 -1.35049239e-01 -9.17189792e-02 -3.87122005e-01 -3.87216546e-02] [-3.53212804e-01 -4.25334185e-01 -3.81002873e-01 -3.90538901e-01 -1.66714355e-01 -2.24297926e-01 -4.67091233e-01 -2.81951755e-01 -1.21531166e-01 5.01770042e-02 -1.43587455e-01 -5.10214448e-01 -1.67966247e-01] [-9.28897709e-02 -2.91149259e-01 -2.79982090e-01 -1.42751396e-01 -1.99802712e-01 -5.87809145e-01 -9.16075408e-01 -4.96544838e-01 -8.54853690e-02 4.09265190e-01 4.84490842e-02 -9.45784748e-02 -4.57353075e-04] [ 1.51221141e-01 -3.42917778e-02 2.62481775e-02 -3.76353562e-02 -5.41473590e-02 -2.21304879e-01 -4.71282601e-01 -7.21371323e-02 8.76375958e-02 3.78972471e-01 3.63499112e-02 -2.53162980e-01 -2.42437094e-01] [ 5.83449267e-02 -2.45065019e-02 6.34194463e-02 -3.13693136e-02 -2.50139356e-01 -3.87982517e-01 -2.39330828e-01 -1.75338447e-01 2.38352299e-01 2.26742163e-01 4.61184308e-02 -2.17821389e-01 -6.96657524e-02] [ 1.58856422e-01 2.20503986e-01 2.38287032e-01 -4.73132469e-02 -2.50864297e-01 -1.73114926e-01 2.34159783e-01 2.20456436e-01 2.98812121e-01 -2.34334208e-02 -1.06187589e-01 -1.16845556e-01 1.19322419e-01] [ 1.29336804e-01 5.29450849e-02 1.39453799e-01 -1.50402814e-01 -5.21101892e-01 -5.20559132e-01 -3.37605894e-01 -1.18076071e-01 2.46012341e-02 -4.31608744e-02 -1.68281756e-02 -1.89214870e-01 -2.18572348e-01] [ 1.82962894e-01 -1.94187105e-01 -3.03172588e-01 -4.37138855e-01 -3.40690285e-01 -7.55365342e-02 -2.13954628e-01 -1.68323647e-02 -3.44534889e-02 2.33725131e-01 2.85900563e-01 2.17145622e-01 -2.48309210e-01] [ 9.80482399e-02 -2.29916945e-01 -3.18786263e-01 -4.61109519e-01 -5.59806585e-01 6.07993873e-03 -2.47206047e-01 -8.87700021e-02 -3.93371791e-01 1.31162614e-01 4.34148252e-01 3.34195018e-01 -8.72434489e-03] [ 2.29778871e-01 -1.52821586e-01 -9.19568911e-02 -2.89156824e-01 -4.60720688e-01 -4.68788221e-02 7.93714523e-02 2.24031270e-01 -4.95251268e-02 5.17816544e-01 4.97495234e-01 7.24544644e-01 4.25243497e-01] [ 3.33459586e-01 8.91274214e-02 1.27036661e-01 -2.66050696e-01 -7.48908579e-01 -4.47996736e-01 -1.62168398e-01 3.28338057e-01 1.64909899e-01 2.63103127e-01 1.79653630e-01 2.73964196e-01 4.82653409e-01] [ 2.86662698e-01 2.00912103e-01 7.61716962e-02 8.68535489e-02 -1.46517754e-01 -3.50345343e-01 -2.08513647e-01 1.38241574e-01 6.65647149e-01 5.15758753e-01 4.23986763e-01 9.78832766e-02 1.63301621e-02] [ 4.69733566e-01 5.21358371e-01 -5.53292781e-02 -1.28834754e-01 -2.47600794e-01 -2.07610190e-01 1.06757030e-01 3.68928403e-01 7.93322265e-01 5.32189250e-01 7.43974447e-01 3.40318233e-01 1.84205711e-01] [-4.50527258e-02 1.01305116e-02 -1.80621222e-01 -2.45271713e-01 -4.71004248e-02 -5.86601347e-02 2.93281674e-01 1.67828873e-01 4.25209194e-01 5.07307053e-01 7.56563842e-01 5.32917321e-01 -4.87336190e-03] [ 4.73653600e-02 -1.24762759e-01 -1.29313529e-01 -3.93787950e-01 -1.94625527e-01 -8.22954997e-02 2.71834612e-01 1.77281648e-01 7.31139928e-02 1.89419329e-01 3.37438375e-01 4.58850592e-01 1.21154837e-01]] [[ 2.09666118e-01 1.49260983e-01 -2.69612018e-03 -1.40152723e-02 -5.05860269e-01 -4.37257499e-01 -2.65144408e-01 -6.42075837e-02 -7.99430758e-02 -1.75356254e-01 -4.54320908e-02 -1.47992983e-01 -9.54802409e-02] [ 5.30340552e-01 5.59712589e-01 3.85294169e-01 2.30353326e-01 -1.79466754e-01 -2.64574289e-01 -9.60478932e-02 -9.37062353e-02 -7.29798600e-02 -3.87979925e-01 -3.04974407e-01 -3.30108136e-01 -3.87324952e-02] [ 3.14271092e-01 1.29004508e-01 1.75792798e-01 3.18124384e-01 4.55235630e-01 3.99428099e-01 3.63128811e-01 4.79765050e-03 -1.29348725e-01 -4.52150702e-01 -3.14873874e-01 -1.16214938e-01 9.88126993e-02] [ 3.83245826e-01 2.37902373e-01 3.10699493e-01 2.38431484e-01 6.08988762e-01 4.16350901e-01 3.11825812e-01 -4.93711643e-02 -1.12975821e-01 -4.22913879e-01 -5.18195331e-01 -1.16581172e-01 9.95328128e-02] [ 7.11970985e-01 3.68345141e-01 1.99794829e-01 2.84655225e-02 9.97817144e-02 -1.59327183e-02 3.47074680e-02 -3.84012703e-03 -2.28808001e-01 -4.37774420e-01 -5.20041704e-01 1.03355989e-01 3.12575698e-02] [ 9.00183022e-01 6.99525356e-01 4.14420605e-01 1.84438989e-01 4.81312945e-02 -1.83265075e-01 7.95222633e-03 1.68194577e-01 2.60325015e-01 4.92682606e-02 -5.97316027e-03 2.28253782e-01 7.71559104e-02] [ 7.68039644e-01 7.27251351e-01 2.73335546e-01 1.67064488e-01 7.63958097e-02 2.08343238e-01 3.35013419e-01 4.91717935e-01 3.79647523e-01 3.85997355e-01 2.87307143e-01 5.56844115e-01 3.42483699e-01] [ 2.76139647e-01 4.08749253e-01 3.68214130e-01 4.81198430e-01 5.54051280e-01 5.36207080e-01 4.44622040e-01 3.46872211e-01 4.68523234e-01 3.81902099e-01 3.19406241e-01 1.95457369e-01 2.35185519e-01] [ 1.77915215e-01 2.90444255e-01 4.65206087e-01 3.34151924e-01 2.82441139e-01 -6.65056407e-02 1.19665571e-01 -1.94572374e-01 -1.18351914e-02 -2.37547770e-01 -2.00919017e-01 -1.55711591e-01 1.34884253e-01] [ 2.58772582e-01 4.90980506e-01 8.13057482e-01 4.24219728e-01 3.28927964e-01 -3.56194824e-01 3.72930765e-02 -4.20009345e-01 -2.68777013e-02 -4.44669098e-01 -3.62965643e-01 -4.74900663e-01 -1.60554543e-01] [ 1.01902336e-01 3.59459758e-01 6.18776500e-01 2.70884216e-01 1.03035374e-02 -4.34264302e-01 1.46674514e-02 -2.84090012e-01 -1.94151372e-01 -9.05752182e-02 1.31707430e-01 -3.27787459e-01 -4.41970050e-01] [ 7.19126761e-02 3.48524034e-01 4.54271942e-01 4.22997355e-01 5.32642663e-01 3.31424683e-01 4.10190701e-01 -3.00543189e-01 -2.07357913e-01 2.64501840e-01 6.60180151e-01 5.72463460e-02 -3.18774730e-01] [ 3.76115948e-01 -1.88569892e-02 -4.26273942e-02 -9.29160044e-02 6.30254030e-01 7.53588140e-01 8.49644005e-01 -6.42145798e-02 -3.33166480e-01 3.57376456e-01 7.51540661e-01 3.86586994e-01 -1.17966257e-01] [ 6.45487130e-01 1.98449828e-02 9.92842540e-02 3.11664473e-02 1.13727665e+00 9.66571569e-01 9.19957399e-01 -1.38735667e-01 -2.83709198e-01 1.51586458e-01 4.43093836e-01 5.82373202e-01 1.54601544e-01] [ 6.38350248e-01 5.91093898e-02 1.18433185e-01 -1.01830639e-01 6.48917437e-01 5.98059297e-01 5.66807985e-01 -3.16774100e-02 -3.49474311e-01 -5.87960221e-02 9.12887603e-02 3.05309147e-01 -4.12626863e-02]] [[ 5.35814762e-02 -4.59576070e-01 -5.50886929e-01 -1.65414169e-01 7.30161294e-02 3.04623246e-01 1.05571188e-01 1.45029604e-01 3.20337236e-01 2.84432262e-01 5.75981915e-01 3.02146405e-01 3.76672208e-01] [ 8.64057541e-02 -4.22524989e-01 -5.49075723e-01 -1.32485464e-01 2.57890858e-02 1.18120722e-01 1.81638062e-01 2.82386065e-01 5.50880969e-01 3.19101483e-01 4.40787166e-01 1.21254049e-01 1.92751139e-01] [ 4.57913250e-01 1.56485677e-01 -1.72294170e-01 -2.75736570e-01 -3.19477081e-01 -1.17362633e-01 2.55174667e-01 3.51895541e-01 4.41514313e-01 1.99178353e-01 2.88350940e-01 -3.58058065e-02 4.64164354e-02] [ 5.05874097e-01 5.03070593e-01 -1.27010215e-02 -1.57751106e-02 -3.46727967e-01 -2.49542654e-01 -2.97145993e-01 -3.85813653e-01 -3.67419183e-01 -2.84915358e-01 -1.01992667e-01 3.05197500e-02 1.78363666e-01] [ 1.05515945e+00 5.35283327e-01 -1.22247236e-02 -3.24011892e-01 -4.56610471e-01 -4.08426762e-01 -7.35135853e-01 -7.55538046e-01 -6.34333134e-01 -3.40283245e-01 1.19094420e-02 2.69647390e-01 5.43089151e-01] [ 1.01654744e+00 5.11439323e-01 -4.49890345e-02 -1.97209224e-01 -4.66049522e-01 -5.67858517e-01 -9.09546196e-01 -5.99107981e-01 -3.76232177e-01 -2.56687015e-01 -1.67843342e-01 7.57040679e-02 3.99195313e-01] [ 8.46442759e-01 3.19835335e-01 1.11779824e-01 -3.91633481e-01 -3.74972701e-01 -5.09004593e-01 -4.90292341e-01 -9.45191383e-02 1.97267737e-02 -2.08916008e-01 -3.81820142e-01 -3.63024890e-01 -6.74100146e-02] [ 4.94805813e-01 6.86684847e-01 4.06476945e-01 1.66716501e-01 -7.89566934e-02 -1.25495940e-01 -1.65644154e-01 3.34003538e-01 2.29951084e-01 6.44149911e-03 -4.44038570e-01 -3.17578822e-01 -1.15138412e-01] [ 4.98926014e-01 8.35077941e-01 5.10062099e-01 1.42780408e-01 -1.42778248e-01 -2.09277213e-01 -2.84394711e-01 1.90069184e-01 2.06835017e-01 1.76394865e-01 -1.63782835e-01 -5.04979417e-02 2.28499442e-01] [ 6.20498002e-01 7.95290053e-01 3.98336053e-01 2.76677042e-01 1.30603477e-01 -6.20713979e-02 -6.10394888e-02 2.63372302e-01 3.45009565e-01 4.39844340e-01 3.37376237e-01 5.46972632e-01 7.57858396e-01] [ 1.00495525e-01 4.02853079e-02 -2.47908533e-01 -3.09811741e-01 -4.07248616e-01 -3.23369622e-01 -2.25600272e-01 7.93753490e-02 -5.43173291e-02 5.65060042e-02 2.56797969e-01 4.48635161e-01 5.21841168e-01] [ 7.31195435e-02 -9.56242979e-02 -4.36770320e-01 -1.45012796e-01 -6.97290450e-02 4.29144576e-02 -2.37214968e-01 -2.13301525e-01 -5.07935405e-01 -1.74034730e-01 2.39524171e-02 1.61170185e-01 -9.52244624e-02] [-5.46705723e-01 -4.55632061e-01 -5.21013498e-01 -4.63122055e-02 -4.51655269e-01 -2.88651705e-01 -5.59310198e-01 -3.49522114e-01 -5.60190260e-01 -4.27552342e-01 -1.42829549e-02 -8.51097852e-02 -4.29820657e-01] [-3.43326747e-01 -2.80618399e-01 -3.50901634e-01 -7.99908265e-02 -4.13171649e-01 -6.20550960e-02 -8.75102952e-02 1.00047126e-01 -1.94482982e-01 -2.67887145e-01 -3.64118330e-02 -4.00192112e-01 -6.87716186e-01] [-4.02213633e-01 -3.55005890e-01 -2.21288085e-01 -2.09998846e-01 -4.48387355e-01 -1.41449869e-01 2.13779196e-01 3.92459780e-01 1.56541079e-01 -2.07499191e-01 3.37053798e-02 -2.19555780e-01 -3.65143418e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5664.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[2, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[ 0.42020088 0.36675525 0.23779304 -0.06031841 -0.16184941 -0.26570907 -0.2528713 -0.23769324 0.03997982 0.57117367 0.64382863 0.75131917 -0.08191604] [ 0.6556869 0.69867754 0.13656084 0.23578636 0.03377467 0.43634823 0.3562902 0.4676186 0.23809719 0.34293997 -0.16863468 0.08242296 -0.38971063] [ 0.393277 0.2367518 -0.08453307 -0.07637021 -0.36336634 0.03824554 0.0225378 0.3731352 0.15396112 -0.17320496 -0.21597679 -0.2680117 -0.03072873] [ 0.05574948 -0.0406028 -0.37370455 -0.22413918 -0.20043069 0.22717915 0.18315056 0.12815182 -0.21945548 -0.36006266 -0.5835375 -0.2725048 -0.27839917] [-0.20365746 0.3095294 0.57215077 0.49778545 0.01222207 -0.10144761 -0.1168441 0.11707505 -0.13641495 -0.10343838 -0.02974185 0.30736884 0.25352776] [-0.48012474 -0.29096663 0.22537804 0.10076499 0.5284671 0.45575285 0.18854819 -0.14035606 -0.4628905 -0.17229325 -0.09659236 0.54550385 0.5074959 ] [-0.09387048 0.15756205 0.23045486 0.27201024 0.11446166 -0.02754302 0.03684942 0.14114194 0.09301233 0.23298973 0.46547005 0.8838253 0.6660247 ]] [[ 0.3395317 0.56703115 0.5137586 0.26646382 0.04575964 -0.01543748 -0.08972417 -0.00800188 0.24946377 0.06099228 0.46322238 0.28904882 0.4777323 ] [-0.09693411 0.0309788 0.6930477 0.7613557 0.5253592 0.12972511 -0.25403133 -0.053202 0.23823756 0.41677555 0.74397427 0.41328788 0.33719078] [ 0.0516811 0.03327478 -0.04318709 0.18761796 0.4229573 0.19398114 -0.02304537 -0.4948901 -0.33111343 -0.05489659 0.35813278 0.36149985 0.36333835] [ 0.5887983 0.41904336 0.49273622 0.13083637 0.12834622 -0.2219272 -0.41086066 -0.8799327 -0.61804074 -0.16873299 0.29272503 -0.16010138 -0.47327477] [ 0.04010511 -0.0934266 0.20242867 0.15820596 -0.25733835 -0.28888535 -0.4465605 -0.36477545 -0.2509034 -0.33736524 -0.14735836 0.01837742 0.09367469] [-0.15278691 -0.21112442 0.3237115 0.875931 1.2403911 0.7476486 -0.18785447 -0.9629791 -0.80222046 -0.7747018 -0.47158888 -0.19237754 0.16430806] [-0.54002774 -0.63676524 -0.41477767 0.13575262 0.3791204 0.14253081 -0.18133561 -0.38590738 -0.45397097 -0.31072983 -0.56087035 -0.39943382 -0.73905987]] [[ 0.03644443 0.13177177 0.1733576 0.69182146 0.5608598 0.3651963 -0.05113049 0.09489516 0.21313363 -0.07954289 0.13748294 0.12883751 0.42311466] [ 0.06809366 -0.14250654 0.04302257 0.5133932 0.6525165 0.191724 -0.358325 -0.25549474 0.08695816 0.32396588 -0.05005035 -0.16276751 -0.26402405] [-0.23758385 -0.17401634 0.06600137 0.09648185 0.1252303 -0.0908805 0.07122951 0.15341769 0.44145808 0.6689543 0.16185999 -0.04706131 -0.13019973] [-0.32503906 -0.21115234 -0.03739655 -0.34427604 -0.02787783 -0.1110644 0.5564797 0.11393987 -0.18416058 -0.410815 -0.31179437 -0.22193901 -0.2633312 ] [ 0.49815962 0.17252654 0.23211487 -0.242375 0.08328795 -0.02650579 0.31008577 0.12226656 -0.12054084 -0.22625478 -0.55546534 -0.73238033 -0.5396774 ] [-0.11683345 -0.3326636 0.05618072 -0.00947608 0.12631011 0.07404438 0.00695246 0.1294887 -0.15558812 -0.34888768 -0.5304288 -0.41595206 0.04758148] [-0.35664213 -0.12058181 0.75463116 0.73901427 0.34972456 -0.09047547 -0.04805054 0.1404561 -0.00328215 -0.05019061 0.25257 0.43626416 0.59521466]]]]; ov_res: [[[[ 0.42020088 0.36675525 0.23779304 -0.06031841 -0.16184941 -0.26570907 -0.2528713 -0.23769324 0.03997982 0.57117367 0.64382863 0.75131917 -0.08191604] [ 0.6556869 0.69867754 0.13656084 0.23578636 0.03377467 0.43634823 0.3562902 0.4676186 0.23809719 0.34293997 -0.16863468 0.08242296 -0.38971063] [ 0.393277 0.2367518 -0.08453307 -0.07637021 -0.36336634 0.03824554 0.0225378 0.3731352 0.15396112 -0.17320496 -0.21597679 -0.2680117 -0.03072873] [ 0.05574948 -0.0406028 -0.37370455 -0.22413918 -0.20043069 0.22717915 0.18315056 0.12815182 -0.21945548 -0.36006266 -0.5835375 -0.2725048 -0.27839917] [-0.20365746 0.3095294 0.57215077 0.49778545 0.01222207 -0.10144761 -0.1168441 0.11707505 -0.13641495 -0.10343838 -0.02974185 0.30736884 0.25352776] [-0.48012474 -0.29096663 0.22537804 0.10076499 0.5284671 0.45575285 0.18854819 -0.14035606 -0.4628905 -0.17229325 -0.09659236 0.54550385 0.5074959 ] [-0.09387048 0.15756205 0.23045486 0.27201024 0.11446166 -0.02754302 0.03684942 0.14114194 0.09301233 0.23298973 0.46547005 0.8838253 0.6660247 ]] [[ 0.3395317 0.56703115 0.5137586 0.26646382 0.04575964 -0.01543748 -0.08972417 -0.00800188 0.24946377 0.06099228 0.46322238 0.28904882 0.4777323 ] [-0.09693411 0.0309788 0.6930477 0.7613557 0.5253592 0.12972511 -0.25403133 -0.053202 0.23823756 0.41677555 0.74397427 0.41328788 0.33719078] [ 0.0516811 0.03327478 -0.04318709 0.18761796 0.4229573 0.19398114 -0.02304537 -0.4948901 -0.33111343 -0.05489659 0.35813278 0.36149985 0.36333835] [ 0.5887983 0.41904336 0.49273622 0.13083637 0.12834622 -0.2219272 -0.41086066 -0.8799327 -0.61804074 -0.16873299 0.29272503 -0.16010138 -0.47327477] [ 0.04010511 -0.0934266 0.20242867 0.15820596 -0.25733835 -0.28888535 -0.4465605 -0.36477545 -0.2509034 -0.33736524 -0.14735836 0.01837742 0.09367469] [-0.15278691 -0.21112442 0.3237115 0.875931 1.2403911 0.7476486 -0.18785447 -0.9629791 -0.80222046 -0.7747018 -0.47158888 -0.19237754 0.16430806] [-0.54002774 -0.63676524 -0.41477767 0.13575262 0.3791204 0.14253081 -0.18133561 -0.38590738 -0.45397097 -0.31072983 -0.56087035 -0.39943382 -0.73905987]] [[ 0.03644443 0.13177177 0.1733576 0.69182146 0.5608598 0.3651963 -0.05113049 0.09489516 0.21313363 -0.07954289 0.13748294 0.12883751 0.42311466] [ 0.06809366 -0.14250654 0.04302257 0.5133932 0.6525165 0.191724 -0.358325 -0.25549474 0.08695816 0.32396588 -0.05005035 -0.16276751 -0.26402405] [-0.23758385 -0.17401634 0.06600137 0.09648185 0.1252303 -0.0908805 0.07122951 0.15341769 0.44145808 0.6689543 0.16185999 -0.04706131 -0.13019973] [-0.32503906 -0.21115234 -0.03739655 -0.34427604 -0.02787783 -0.1110644 0.5564797 0.11393987 -0.18416058 -0.410815 -0.31179437 -0.22193901 -0.2633312 ] [ 0.49815962 0.17252654 0.23211487 -0.242375 0.08328795 -0.02650579 0.31008577 0.12226656 -0.12054084 -0.22625478 -0.55546534 -0.73238033 -0.5396774 ] [-0.11683345 -0.3326636 0.05618072 -0.00947608 0.12631011 0.07404438 0.00695246 0.1294887 -0.15558812 -0.34888768 -0.5304288 -0.41595206 0.04758148] [-0.35664213 -0.12058181 0.75463116 0.73901427 0.34972456 -0.09047547 -0.04805054 0.1404561 -0.00328215 -0.05019061 0.25257 0.43626416 0.59521466]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5667.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[2, 1]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-5.2988845e-01 -9.0408880e-01 1.2498119e+00 4.0255764e-01 5.1157475e-03 -1.8505877e-01 1.1715378e+00 -4.2291090e-01 -5.4629210e-02 1.2130251e+00 1.7852426e-02 -7.4512571e-01 -2.9239297e-01 -2.3681661e-01 -1.3701174e+00] [ 1.8283670e+00 1.7675563e+00 3.3573055e-01 1.4546013e+00 -6.5701085e-01 1.2258532e+00 3.4117362e-01 4.3795460e-01 4.6768749e-01 -2.8624117e-02 -6.1419797e-01 -6.3039780e-01 -1.8833137e+00 1.5748529e+00 -1.1015534e+00] [ 3.4689352e-02 4.8175490e-01 6.7234230e-01 -4.9288493e-01 4.1805869e-01 1.6536682e+00 3.6039859e-01 -6.6302150e-01 -3.2634950e-01 1.3587135e+00 9.2982781e-01 -3.9848730e-01 7.0014679e-01 -1.7388654e-01 8.1526464e-01] [-2.6860845e-01 -3.1336707e-01 -2.6788905e-02 -3.2091612e-01 -8.0053937e-01 6.9788325e-01 7.6612473e-01 1.6394562e-01 -8.3071643e-01 2.2028475e-01 -7.9958290e-02 9.6038389e-01 -1.9756407e-01 5.0950068e-01 -1.0710173e+00] [ 5.1098430e-01 5.9150469e-01 -3.1189972e-01 3.6284053e-01 4.4671112e-01 -6.3154602e-01 -5.2335185e-01 -1.5111183e-01 6.3500506e-01 -2.8007931e-01 6.7493093e-01 -1.0284828e+00 -9.2932242e-01 -1.6188392e-01 5.1656592e-01] [ 2.6816964e-01 -1.2827450e-01 -6.0768139e-01 3.8545555e-01 1.0106980e+00 -6.2939715e-01 1.7630041e-01 -6.0073709e-01 4.0319669e-01 2.6408404e-01 5.7578415e-02 -2.8165141e-01 1.1795419e+00 6.4245087e-01 4.4473255e-01] [-1.3429546e-01 -2.4218673e-01 -9.1405255e-01 7.7775341e-01 -4.3084705e-01 -2.8946452e-02 4.6407849e-01 1.2768106e-01 -6.2522089e-01 -8.1685871e-02 3.5285708e-01 -3.0347973e-02 1.3164704e+00 -4.4109726e-01 -3.9789474e-01]] [[-4.4080156e-01 -1.2105504e-01 -5.8984214e-01 -6.7346996e-01 -7.1417135e-01 -1.4920419e+00 -1.0463740e+00 -4.7667593e-02 -1.3931292e-01 1.4002819e+00 8.5328287e-01 1.1804161e+00 -2.0313547e+00 1.6232370e+00 -9.9970210e-01] [ 5.0014240e-01 1.2164749e+00 -6.2551826e-01 -7.6560676e-01 1.8068683e-01 2.2695127e-01 -2.4378428e-01 1.4371777e-01 -7.7805209e-01 2.6409766e-01 8.2096422e-01 -2.4848878e-02 -1.4921122e+00 -5.9022021e-01 8.1666148e-01] [ 2.3924077e-01 2.3126006e-03 1.7960763e-01 -6.8531692e-01 9.7234964e-01 -9.3651193e-01 -1.0639307e-01 -9.7464544e-01 -2.4119787e-01 -9.1165006e-01 4.8829797e-01 -3.0398542e-01 8.9737868e-01 -1.0905061e+00 -5.7800078e-01] [-8.0769265e-01 -1.1811170e-01 2.4147898e-02 -2.8124037e-01 -3.3217642e-01 -7.0828617e-02 3.2342249e-01 -5.5336869e-01 7.6645687e-03 -4.3440822e-01 3.5543203e-01 2.5472671e-01 -1.3342669e+00 2.3150003e-01 5.6432754e-02] [-1.3253675e-01 1.8407862e-01 5.8366776e-01 7.7747446e-01 2.9529971e-01 -1.1713223e+00 1.5246987e+00 -1.6836002e-01 -9.0646827e-01 -1.2246126e-01 -3.8354027e-01 7.8725165e-01 -9.6326411e-01 7.9535767e-02 -1.0910995e+00] [-5.9700489e-01 -9.8731428e-02 3.9356306e-02 3.2316133e-01 1.0259405e+00 6.7821670e-01 6.0115451e-01 -6.4090830e-01 2.7742818e-01 -1.3298061e-01 -4.3828040e-01 -1.6614270e+00 1.8469709e+00 4.9070898e-01 -3.4571853e-01] [-4.3393952e-01 3.5736936e-01 -7.8244096e-01 -4.6905369e-01 -9.7010946e-01 -8.3161783e-01 -4.8211288e-01 -3.4395385e-01 1.2041323e-01 1.4281965e+00 -5.9273630e-01 -5.5421388e-01 -1.2082338e+00 5.7611799e-01 -1.3620918e+00]] [[ 7.1505612e-01 5.3832144e-01 -1.2116346e+00 2.6638278e-01 6.5254343e-01 -3.3335686e-02 8.6163944e-01 -3.8125792e-01 -6.9815910e-01 3.2556486e-01 2.0097136e-01 5.7978272e-02 -1.0745808e+00 -1.7318822e+00 -5.8743536e-01] [ 3.0203691e-01 -4.7783577e-01 4.9634501e-01 -9.2198610e-02 -1.6147989e-01 7.5637706e-02 2.6463324e-01 -1.1721455e+00 -3.0666605e-01 4.6706334e-02 8.1651700e-01 7.3292828e-01 -3.5611004e-01 8.8449329e-01 2.8399322e-02] [ 1.3717133e+00 3.5734922e-01 3.1316665e-01 2.6910362e-01 -1.1245716e+00 1.6173190e-01 4.9704897e-01 6.0696229e-02 -2.0804915e-01 6.1692548e-01 -8.5440123e-01 -2.4527353e-01 -6.0060382e-01 -2.0019077e-01 7.3154640e-01] [ 6.8529505e-01 7.9445648e-01 3.6000705e-01 -8.9431949e-02 -9.1555715e-04 -1.0908391e+00 -9.7751033e-01 1.0603799e+00 -1.1860814e+00 8.4536779e-01 -7.6288506e-02 -2.4428938e-01 7.1200407e-01 -3.7914920e-01 -1.0693611e+00] [ 1.1707404e+00 3.4148794e-01 -6.1819993e-02 1.2805433e+00 2.3344985e-01 -1.2760401e-01 -5.6108403e-01 -1.3312762e+00 4.5541191e-01 3.8337597e-01 2.2407207e-01 -1.4296789e-01 6.7826360e-03 5.6123930e-01 7.2868741e-01] [-1.3930511e-01 -4.6793792e-01 -8.7032318e-03 1.3787818e+00 -5.3429806e-01 -4.9105918e-01 4.3573964e-01 -1.6063154e-02 4.9439812e-01 -1.4130163e+00 -1.1595951e+00 -1.4935237e-01 6.7567807e-01 -4.1656354e-01 1.7258918e-01] [-3.8882786e-01 -1.4438069e-01 4.2062759e-01 -4.1624266e-01 1.1473792e+00 -7.0114905e-01 2.0223778e-01 1.0562732e+00 6.6641420e-01 3.0085707e-01 4.2562497e-01 3.2673186e-01 4.8220125e-01 -7.5776142e-01 -1.4327991e-01]]]]; ov_res: [[[[-5.2988845e-01 -9.0408880e-01 1.2498119e+00 4.0255764e-01 5.1157475e-03 -1.8505877e-01 1.1715378e+00 -4.2291090e-01 -5.4629210e-02 1.2130251e+00 1.7852426e-02 -7.4512571e-01 -2.9239297e-01 -2.3681661e-01 -1.3701174e+00] [ 1.8283670e+00 1.7675563e+00 3.3573055e-01 1.4546013e+00 -6.5701085e-01 1.2258532e+00 3.4117362e-01 4.3795460e-01 4.6768749e-01 -2.8624117e-02 -6.1419797e-01 -6.3039780e-01 -1.8833137e+00 1.5748529e+00 -1.1015534e+00] [ 3.4689352e-02 4.8175490e-01 6.7234230e-01 -4.9288493e-01 4.1805869e-01 1.6536682e+00 3.6039859e-01 -6.6302150e-01 -3.2634950e-01 1.3587135e+00 9.2982781e-01 -3.9848730e-01 7.0014679e-01 -1.7388654e-01 8.1526464e-01] [-2.6860845e-01 -3.1336707e-01 -2.6788905e-02 -3.2091612e-01 -8.0053937e-01 6.9788325e-01 7.6612473e-01 1.6394562e-01 -8.3071643e-01 2.2028475e-01 -7.9958290e-02 9.6038389e-01 -1.9756407e-01 5.0950068e-01 -1.0710173e+00] [ 5.1098430e-01 5.9150469e-01 -3.1189972e-01 3.6284053e-01 4.4671112e-01 -6.3154602e-01 -5.2335185e-01 -1.5111183e-01 6.3500506e-01 -2.8007931e-01 6.7493093e-01 -1.0284828e+00 -9.2932242e-01 -1.6188392e-01 5.1656592e-01] [ 2.6816964e-01 -1.2827450e-01 -6.0768139e-01 3.8545555e-01 1.0106980e+00 -6.2939715e-01 1.7630041e-01 -6.0073709e-01 4.0319669e-01 2.6408404e-01 5.7578415e-02 -2.8165141e-01 1.1795419e+00 6.4245087e-01 4.4473255e-01] [-1.3429546e-01 -2.4218673e-01 -9.1405255e-01 7.7775341e-01 -4.3084705e-01 -2.8946452e-02 4.6407849e-01 1.2768106e-01 -6.2522089e-01 -8.1685871e-02 3.5285708e-01 -3.0347973e-02 1.3164704e+00 -4.4109726e-01 -3.9789474e-01]] [[-4.4080156e-01 -1.2105504e-01 -5.8984214e-01 -6.7346996e-01 -7.1417135e-01 -1.4920419e+00 -1.0463740e+00 -4.7667593e-02 -1.3931292e-01 1.4002819e+00 8.5328287e-01 1.1804161e+00 -2.0313547e+00 1.6232370e+00 -9.9970210e-01] [ 5.0014240e-01 1.2164749e+00 -6.2551826e-01 -7.6560676e-01 1.8068683e-01 2.2695127e-01 -2.4378428e-01 1.4371777e-01 -7.7805209e-01 2.6409766e-01 8.2096422e-01 -2.4848878e-02 -1.4921122e+00 -5.9022021e-01 8.1666148e-01] [ 2.3924077e-01 2.3126006e-03 1.7960763e-01 -6.8531692e-01 9.7234964e-01 -9.3651193e-01 -1.0639307e-01 -9.7464544e-01 -2.4119787e-01 -9.1165006e-01 4.8829797e-01 -3.0398542e-01 8.9737868e-01 -1.0905061e+00 -5.7800078e-01] [-8.0769265e-01 -1.1811170e-01 2.4147898e-02 -2.8124037e-01 -3.3217642e-01 -7.0828617e-02 3.2342249e-01 -5.5336869e-01 7.6645687e-03 -4.3440822e-01 3.5543203e-01 2.5472671e-01 -1.3342669e+00 2.3150003e-01 5.6432754e-02] [-1.3253675e-01 1.8407862e-01 5.8366776e-01 7.7747446e-01 2.9529971e-01 -1.1713223e+00 1.5246987e+00 -1.6836002e-01 -9.0646827e-01 -1.2246126e-01 -3.8354027e-01 7.8725165e-01 -9.6326411e-01 7.9535767e-02 -1.0910995e+00] [-5.9700489e-01 -9.8731428e-02 3.9356306e-02 3.2316133e-01 1.0259405e+00 6.7821670e-01 6.0115451e-01 -6.4090830e-01 2.7742818e-01 -1.3298061e-01 -4.3828040e-01 -1.6614270e+00 1.8469709e+00 4.9070898e-01 -3.4571853e-01] [-4.3393952e-01 3.5736936e-01 -7.8244096e-01 -4.6905369e-01 -9.7010946e-01 -8.3161783e-01 -4.8211288e-01 -3.4395385e-01 1.2041323e-01 1.4281965e+00 -5.9273630e-01 -5.5421388e-01 -1.2082338e+00 5.7611799e-01 -1.3620918e+00]] [[ 7.1505612e-01 5.3832144e-01 -1.2116346e+00 2.6638278e-01 6.5254343e-01 -3.3335686e-02 8.6163944e-01 -3.8125792e-01 -6.9815910e-01 3.2556486e-01 2.0097136e-01 5.7978272e-02 -1.0745808e+00 -1.7318822e+00 -5.8743536e-01] [ 3.0203691e-01 -4.7783577e-01 4.9634501e-01 -9.2198610e-02 -1.6147989e-01 7.5637706e-02 2.6463324e-01 -1.1721455e+00 -3.0666605e-01 4.6706334e-02 8.1651700e-01 7.3292828e-01 -3.5611004e-01 8.8449329e-01 2.8399322e-02] [ 1.3717133e+00 3.5734922e-01 3.1316665e-01 2.6910362e-01 -1.1245716e+00 1.6173190e-01 4.9704897e-01 6.0696229e-02 -2.0804915e-01 6.1692548e-01 -8.5440123e-01 -2.4527353e-01 -6.0060382e-01 -2.0019077e-01 7.3154640e-01] [ 6.8529505e-01 7.9445648e-01 3.6000705e-01 -8.9431949e-02 -9.1555715e-04 -1.0908391e+00 -9.7751033e-01 1.0603799e+00 -1.1860814e+00 8.4536779e-01 -7.6288506e-02 -2.4428938e-01 7.1200407e-01 -3.7914920e-01 -1.0693611e+00] [ 1.1707404e+00 3.4148794e-01 -6.1819993e-02 1.2805433e+00 2.3344985e-01 -1.2760401e-01 -5.6108403e-01 -1.3312762e+00 4.5541191e-01 3.8337597e-01 2.2407207e-01 -1.4296789e-01 6.7826360e-03 5.6123930e-01 7.2868741e-01] [-1.3930511e-01 -4.6793792e-01 -8.7032318e-03 1.3787818e+00 -5.3429806e-01 -4.9105918e-01 4.3573964e-01 -1.6063154e-02 4.9439812e-01 -1.4130163e+00 -1.1595951e+00 -1.4935237e-01 6.7567807e-01 -4.1656354e-01 1.7258918e-01] [-3.8882786e-01 -1.4438069e-01 4.2062759e-01 -4.1624266e-01 1.1473792e+00 -7.0114905e-01 2.0223778e-01 1.0562732e+00 6.6641420e-01 3.0085707e-01 4.2562497e-01 3.2673186e-01 4.8220125e-01 -7.5776142e-01 -1.4327991e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5670.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[ 7.40363598e-02 -1.16807669e-01 1.09126888e-01 2.73110382e-02 1.09414607e-01 8.46983716e-02 7.96774924e-02 4.52443868e-01 2.01368257e-02 5.44110686e-02 -6.62780106e-02 -1.01783715e-01 -2.63602734e-02] [-1.98862240e-01 -2.15462610e-01 1.99342072e-01 -6.63381396e-03 6.24916283e-03 -4.23363060e-01 -2.51251936e-01 3.44209880e-01 2.75756508e-01 2.72199988e-01 -5.27636195e-03 1.99991226e-01 -8.20559114e-02] [-3.03279519e-01 -1.09379411e-01 1.89258009e-01 -9.35643017e-02 -5.56897700e-01 -6.67928755e-01 -3.94599348e-01 2.44669124e-01 2.70392329e-01 1.43749222e-01 2.76876211e-01 3.61311495e-01 5.00022411e-01] [-2.07429469e-01 1.75418854e-01 6.33412898e-01 2.59389699e-01 -9.46892351e-02 -3.30623358e-01 1.98760759e-02 2.48814404e-01 4.72018391e-01 7.42973089e-02 9.71555263e-02 -1.06779881e-01 5.31478412e-02] [-6.68124333e-02 3.68914753e-02 4.51450229e-01 2.22125694e-01 7.90427104e-02 -8.42248499e-02 8.11785981e-02 1.39788225e-01 9.68670398e-02 -4.52577323e-02 9.33793187e-02 -2.18914613e-01 6.57662451e-02] [ 5.97620681e-02 -1.02001138e-01 4.56378579e-01 2.90050447e-01 4.46287006e-01 1.32183075e-01 2.18563080e-01 -1.13053799e-01 -1.60995036e-01 -1.64480075e-01 -1.72683131e-02 -8.68478492e-02 -1.94632158e-01] [-1.77435294e-01 -5.66654392e-02 1.79910988e-01 8.38756263e-02 8.85449052e-02 5.97325042e-02 -1.25186682e-01 -2.85799682e-01 -5.45239687e-01 -1.78779662e-01 -5.82780913e-02 3.27219903e-01 1.74656734e-01] [-5.38058460e-01 -3.36936712e-01 -3.52740474e-02 3.31745744e-01 2.93024421e-01 3.19304407e-01 4.00424451e-02 -3.85803550e-01 -4.28365439e-01 -2.06274554e-01 9.11938995e-02 9.32427272e-02 -1.88990965e-01] [-3.41754168e-01 -1.25362612e-02 4.59197611e-02 3.63509834e-01 4.01261032e-01 3.60227793e-01 -7.00782165e-02 -3.13958496e-01 -1.95564523e-01 1.96782157e-01 4.32895392e-01 1.87380373e-01 -6.28385618e-02] [-2.64779419e-01 1.17542595e-01 -9.29508433e-02 4.01651353e-01 2.02547416e-01 4.36029643e-01 2.03038529e-01 -1.42829478e-01 -7.60848671e-02 3.45873497e-02 6.38296843e-01 3.49574238e-01 -6.09798543e-03] [ 2.84872115e-01 6.02790952e-01 2.02534273e-01 1.88372821e-01 -2.45732993e-01 1.31971389e-01 7.18199909e-02 8.80648270e-02 -6.16769604e-02 -1.91648805e-03 1.52224332e-01 2.34035090e-01 2.03256294e-01] [-4.01653171e-01 1.48037776e-01 8.14180151e-02 3.94240677e-01 -3.34172845e-01 5.73951267e-02 -1.85590059e-01 4.47670631e-02 -5.09589791e-01 -3.69506955e-01 -2.54520983e-01 3.09387565e-01 3.05673540e-01] [-4.18083370e-01 -4.29965667e-02 -3.76313105e-02 3.03110480e-01 -2.73369849e-01 -9.90141854e-02 -4.57439005e-01 -8.56176540e-02 -4.55817074e-01 -2.33967900e-02 -2.19036788e-01 2.50008361e-06 3.01990714e-02]] [[-1.91983387e-01 -2.27781311e-02 -2.48669591e-02 4.52141911e-01 1.82423756e-01 1.66641071e-01 2.64994770e-01 2.16517583e-01 1.79336872e-02 -3.72587562e-01 9.64061022e-02 -7.57320449e-02 -1.73433810e-01] [ 7.42470026e-02 1.15983777e-01 2.06796825e-01 6.46717787e-01 3.62776548e-01 1.62966967e-01 1.26630127e-01 -7.03801960e-02 -3.20526659e-01 -6.54827297e-01 -7.75985420e-02 -1.78567424e-01 -1.53075084e-01] [-7.31683075e-02 -3.55840363e-02 -8.36874843e-02 3.66375238e-01 -1.28511548e-01 -1.12825088e-01 -4.65523511e-01 -2.53570437e-01 -2.91846156e-01 -2.39908457e-01 -8.98226649e-02 -2.67815590e-01 -4.11716223e-01] [ 3.01078975e-01 2.15934828e-01 1.49202384e-02 1.81462631e-01 -1.66661646e-02 -1.49869984e-02 -3.71618330e-01 -3.11961800e-01 -4.52550769e-01 -2.72790492e-01 -1.40165672e-01 -2.14215130e-01 -2.41081849e-01] [ 2.20142081e-01 1.04427792e-01 -2.25738794e-01 4.65927906e-02 -1.36761470e-02 1.70594633e-01 -2.74296880e-01 -2.38996983e-01 -2.09227040e-01 2.61753082e-01 1.39925033e-01 -2.87561864e-01 -5.70133507e-01] [ 7.92427883e-02 -2.36594707e-01 -6.52344465e-01 -1.75104469e-01 1.65047601e-01 6.02865934e-01 2.95491457e-01 1.48648424e-02 -2.97008544e-01 -6.61142692e-02 1.83614884e-02 -4.93675210e-02 -1.06838301e-01] [ 2.12482393e-01 1.12302965e-02 -2.51215339e-01 -1.25441059e-01 8.00262988e-02 4.54405993e-01 2.57805586e-01 -7.59611428e-02 -3.54150295e-01 -1.30147487e-01 3.07819452e-02 -1.42492009e-02 -4.84823994e-02] [ 1.95557386e-01 1.41387701e-01 -3.25071216e-01 6.01903349e-02 3.72268260e-01 7.09483862e-01 3.75694931e-01 -9.35769752e-02 -3.73936772e-01 -2.19331920e-01 -1.07062668e-01 7.49489963e-02 9.94345769e-02] [ 2.61565328e-01 4.67880875e-01 1.33617461e-01 3.05585325e-01 3.22333783e-01 3.56490701e-01 2.03722060e-01 -1.73204273e-01 -5.54882027e-02 -6.21024109e-02 6.96158409e-03 -1.76983774e-01 -2.42067412e-01] [-2.10214540e-01 4.86725718e-02 -3.81613612e-01 2.52505839e-01 1.28662705e-01 5.00107408e-01 1.16334625e-01 -1.17852718e-01 -4.66222577e-02 -5.78147583e-02 -7.57696331e-02 -2.20968962e-01 -4.49072383e-02] [-2.19072044e-01 -1.27579287e-01 -2.56488264e-01 -2.08212778e-01 -7.16875494e-01 -3.02140355e-01 -2.94602692e-01 -1.28973335e-01 8.26438889e-02 3.74083333e-02 -6.85519655e-04 -2.44669810e-01 1.45496065e-02] [ 6.37142137e-02 -8.94914381e-03 -1.08452477e-01 -1.44533319e-02 -5.08947134e-01 -2.45342359e-01 -4.44839209e-01 -1.00077786e-01 1.35388553e-01 1.77023590e-01 -1.69340238e-01 -2.76528239e-01 2.14559939e-02] [ 3.82547647e-01 3.24543983e-01 6.39060855e-01 1.90544546e-01 -2.17707992e-01 -5.24829745e-01 -2.87302673e-01 -1.00700647e-01 -8.58403966e-02 -9.23620462e-02 -3.29787254e-01 -1.08770713e-01 -1.93829507e-01]] [[-2.28482798e-01 -1.13036722e-01 1.89568102e-01 3.72784063e-02 -6.10204674e-02 -3.22380096e-01 5.53875566e-02 1.31211311e-01 1.83391407e-01 5.71516901e-02 2.95343816e-01 3.50011885e-01 3.91503364e-01] [-2.77067304e-01 -1.65885389e-01 -2.31552392e-01 -1.66502267e-01 4.61404510e-02 -2.36793351e-03 1.40991360e-01 -4.66801710e-02 2.01023966e-01 2.99768001e-01 3.29425663e-01 3.97989899e-01 5.01463890e-01] [ 3.88830751e-02 3.42357345e-02 -3.79119873e-01 -1.56092212e-01 1.45845830e-01 3.68781239e-01 2.45653018e-01 -1.31472856e-01 4.14792560e-02 -6.72388375e-02 1.61534980e-01 2.66527593e-01 4.24353480e-01] [-1.36415914e-01 -1.57352313e-01 -6.02464914e-01 -4.60261464e-01 9.85425040e-02 1.88250214e-01 3.54592577e-02 -2.66019940e-01 1.22981891e-01 7.47577548e-02 -1.65937454e-01 -3.03821135e-02 4.77397293e-02] [ 2.20546290e-01 5.90345189e-02 -1.62393123e-01 -3.73242229e-01 2.52109796e-01 2.48492345e-01 4.01570171e-01 3.12815189e-01 4.79654431e-01 1.59790665e-01 -3.87014538e-01 -4.23734307e-01 -3.76448601e-01] [ 2.98880696e-01 -5.62259741e-02 -1.09024113e-02 -1.46818692e-02 3.31774592e-01 1.99317351e-01 1.70535788e-01 4.79851931e-01 2.69844085e-01 8.68052617e-02 -6.37286127e-01 -4.80327278e-01 -3.80969882e-01] [ 6.15614057e-01 1.53889909e-01 -2.68661845e-02 -5.39729111e-02 2.01346561e-01 2.94054478e-01 2.54172146e-01 5.37727952e-01 -1.35149926e-01 -3.61388270e-03 -3.67568322e-02 4.83884484e-01 4.34644401e-01] [ 3.07689816e-01 7.39642903e-02 -2.06081524e-01 7.14030862e-02 -8.56132880e-02 1.45776138e-01 -2.84887165e-01 2.25238502e-02 -4.19124037e-01 -1.81307033e-01 3.68558355e-02 3.49088281e-01 5.97334146e-01] [-1.38641685e-01 2.15599671e-01 2.64786601e-01 3.66483331e-01 9.81995538e-02 2.06776649e-01 1.22129150e-01 2.37610817e-01 -1.19071297e-01 2.64075864e-02 4.47113037e-01 6.12662554e-01 5.58138430e-01] [-2.60922372e-01 5.42251617e-02 2.84769237e-01 6.20632768e-01 4.00574327e-01 4.18001533e-01 1.98020667e-01 3.65059257e-01 3.79235655e-01 9.42324027e-02 1.26006857e-01 1.44387111e-01 1.66694209e-01] [-2.76637346e-01 3.39117795e-01 5.85423946e-01 6.18568659e-01 6.28370523e-01 5.02263010e-01 6.77968144e-01 3.63919526e-01 2.92317599e-01 -1.07734008e-02 9.66197550e-02 1.63908854e-01 -1.26944616e-01] [-1.43524855e-01 1.73306376e-01 4.63569313e-02 -9.44873244e-02 4.22657654e-02 4.21024263e-02 1.45379454e-01 3.56557295e-02 3.17435533e-01 1.24608293e-01 5.22560999e-02 -7.78282881e-02 -1.04208432e-01] [ 1.24284372e-01 1.60653889e-01 1.50089681e-01 4.28402685e-02 3.70945841e-01 2.22845107e-01 1.38211668e-01 -2.41727114e-01 -2.93222237e-02 6.95287287e-02 8.58288035e-02 -2.57022858e-01 -2.88184524e-01]]]]; ov_res: [[[[ 7.40363598e-02 -1.16807669e-01 1.09126888e-01 2.73110382e-02 1.09414607e-01 8.46983716e-02 7.96774924e-02 4.52443868e-01 2.01368257e-02 5.44110686e-02 -6.62780106e-02 -1.01783715e-01 -2.63602734e-02] [-1.98862240e-01 -2.15462610e-01 1.99342072e-01 -6.63381396e-03 6.24916283e-03 -4.23363060e-01 -2.51251936e-01 3.44209880e-01 2.75756508e-01 2.72199988e-01 -5.27636195e-03 1.99991226e-01 -8.20559114e-02] [-3.03279519e-01 -1.09379411e-01 1.89258009e-01 -9.35643017e-02 -5.56897700e-01 -6.67928755e-01 -3.94599348e-01 2.44669124e-01 2.70392329e-01 1.43749222e-01 2.76876211e-01 3.61311495e-01 5.00022411e-01] [-2.07429469e-01 1.75418854e-01 6.33412898e-01 2.59389699e-01 -9.46892351e-02 -3.30623358e-01 1.98760759e-02 2.48814404e-01 4.72018391e-01 7.42973089e-02 9.71555263e-02 -1.06779881e-01 5.31478412e-02] [-6.68124333e-02 3.68914753e-02 4.51450229e-01 2.22125694e-01 7.90427104e-02 -8.42248499e-02 8.11785981e-02 1.39788225e-01 9.68670398e-02 -4.52577323e-02 9.33793187e-02 -2.18914613e-01 6.57662451e-02] [ 5.97620681e-02 -1.02001138e-01 4.56378579e-01 2.90050447e-01 4.46287006e-01 1.32183075e-01 2.18563080e-01 -1.13053799e-01 -1.60995036e-01 -1.64480075e-01 -1.72683131e-02 -8.68478492e-02 -1.94632158e-01] [-1.77435294e-01 -5.66654392e-02 1.79910988e-01 8.38756263e-02 8.85449052e-02 5.97325042e-02 -1.25186682e-01 -2.85799682e-01 -5.45239687e-01 -1.78779662e-01 -5.82780913e-02 3.27219903e-01 1.74656734e-01] [-5.38058460e-01 -3.36936712e-01 -3.52740474e-02 3.31745744e-01 2.93024421e-01 3.19304407e-01 4.00424451e-02 -3.85803550e-01 -4.28365439e-01 -2.06274554e-01 9.11938995e-02 9.32427272e-02 -1.88990965e-01] [-3.41754168e-01 -1.25362612e-02 4.59197611e-02 3.63509834e-01 4.01261032e-01 3.60227793e-01 -7.00782165e-02 -3.13958496e-01 -1.95564523e-01 1.96782157e-01 4.32895392e-01 1.87380373e-01 -6.28385618e-02] [-2.64779419e-01 1.17542595e-01 -9.29508433e-02 4.01651353e-01 2.02547416e-01 4.36029643e-01 2.03038529e-01 -1.42829478e-01 -7.60848671e-02 3.45873497e-02 6.38296843e-01 3.49574238e-01 -6.09798543e-03] [ 2.84872115e-01 6.02790952e-01 2.02534273e-01 1.88372821e-01 -2.45732993e-01 1.31971389e-01 7.18199909e-02 8.80648270e-02 -6.16769604e-02 -1.91648805e-03 1.52224332e-01 2.34035090e-01 2.03256294e-01] [-4.01653171e-01 1.48037776e-01 8.14180151e-02 3.94240677e-01 -3.34172845e-01 5.73951267e-02 -1.85590059e-01 4.47670631e-02 -5.09589791e-01 -3.69506955e-01 -2.54520983e-01 3.09387565e-01 3.05673540e-01] [-4.18083370e-01 -4.29965667e-02 -3.76313105e-02 3.03110480e-01 -2.73369849e-01 -9.90141854e-02 -4.57439005e-01 -8.56176540e-02 -4.55817074e-01 -2.33967900e-02 -2.19036788e-01 2.50008361e-06 3.01990714e-02]] [[-1.91983387e-01 -2.27781311e-02 -2.48669591e-02 4.52141911e-01 1.82423756e-01 1.66641071e-01 2.64994770e-01 2.16517583e-01 1.79336872e-02 -3.72587562e-01 9.64061022e-02 -7.57320449e-02 -1.73433810e-01] [ 7.42470026e-02 1.15983777e-01 2.06796825e-01 6.46717787e-01 3.62776548e-01 1.62966967e-01 1.26630127e-01 -7.03801960e-02 -3.20526659e-01 -6.54827297e-01 -7.75985420e-02 -1.78567424e-01 -1.53075084e-01] [-7.31683075e-02 -3.55840363e-02 -8.36874843e-02 3.66375238e-01 -1.28511548e-01 -1.12825088e-01 -4.65523511e-01 -2.53570437e-01 -2.91846156e-01 -2.39908457e-01 -8.98226649e-02 -2.67815590e-01 -4.11716223e-01] [ 3.01078975e-01 2.15934828e-01 1.49202384e-02 1.81462631e-01 -1.66661646e-02 -1.49869984e-02 -3.71618330e-01 -3.11961800e-01 -4.52550769e-01 -2.72790492e-01 -1.40165672e-01 -2.14215130e-01 -2.41081849e-01] [ 2.20142081e-01 1.04427792e-01 -2.25738794e-01 4.65927906e-02 -1.36761470e-02 1.70594633e-01 -2.74296880e-01 -2.38996983e-01 -2.09227040e-01 2.61753082e-01 1.39925033e-01 -2.87561864e-01 -5.70133507e-01] [ 7.92427883e-02 -2.36594707e-01 -6.52344465e-01 -1.75104469e-01 1.65047601e-01 6.02865934e-01 2.95491457e-01 1.48648424e-02 -2.97008544e-01 -6.61142692e-02 1.83614884e-02 -4.93675210e-02 -1.06838301e-01] [ 2.12482393e-01 1.12302965e-02 -2.51215339e-01 -1.25441059e-01 8.00262988e-02 4.54405993e-01 2.57805586e-01 -7.59611428e-02 -3.54150295e-01 -1.30147487e-01 3.07819452e-02 -1.42492009e-02 -4.84823994e-02] [ 1.95557386e-01 1.41387701e-01 -3.25071216e-01 6.01903349e-02 3.72268260e-01 7.09483862e-01 3.75694931e-01 -9.35769752e-02 -3.73936772e-01 -2.19331920e-01 -1.07062668e-01 7.49489963e-02 9.94345769e-02] [ 2.61565328e-01 4.67880875e-01 1.33617461e-01 3.05585325e-01 3.22333783e-01 3.56490701e-01 2.03722060e-01 -1.73204273e-01 -5.54882027e-02 -6.21024109e-02 6.96158409e-03 -1.76983774e-01 -2.42067412e-01] [-2.10214540e-01 4.86725718e-02 -3.81613612e-01 2.52505839e-01 1.28662705e-01 5.00107408e-01 1.16334625e-01 -1.17852718e-01 -4.66222577e-02 -5.78147583e-02 -7.57696331e-02 -2.20968962e-01 -4.49072383e-02] [-2.19072044e-01 -1.27579287e-01 -2.56488264e-01 -2.08212778e-01 -7.16875494e-01 -3.02140355e-01 -2.94602692e-01 -1.28973335e-01 8.26438889e-02 3.74083333e-02 -6.85519655e-04 -2.44669810e-01 1.45496065e-02] [ 6.37142137e-02 -8.94914381e-03 -1.08452477e-01 -1.44533319e-02 -5.08947134e-01 -2.45342359e-01 -4.44839209e-01 -1.00077786e-01 1.35388553e-01 1.77023590e-01 -1.69340238e-01 -2.76528239e-01 2.14559939e-02] [ 3.82547647e-01 3.24543983e-01 6.39060855e-01 1.90544546e-01 -2.17707992e-01 -5.24829745e-01 -2.87302673e-01 -1.00700647e-01 -8.58403966e-02 -9.23620462e-02 -3.29787254e-01 -1.08770713e-01 -1.93829507e-01]] [[-2.28482798e-01 -1.13036722e-01 1.89568102e-01 3.72784063e-02 -6.10204674e-02 -3.22380096e-01 5.53875566e-02 1.31211311e-01 1.83391407e-01 5.71516901e-02 2.95343816e-01 3.50011885e-01 3.91503364e-01] [-2.77067304e-01 -1.65885389e-01 -2.31552392e-01 -1.66502267e-01 4.61404510e-02 -2.36793351e-03 1.40991360e-01 -4.66801710e-02 2.01023966e-01 2.99768001e-01 3.29425663e-01 3.97989899e-01 5.01463890e-01] [ 3.88830751e-02 3.42357345e-02 -3.79119873e-01 -1.56092212e-01 1.45845830e-01 3.68781239e-01 2.45653018e-01 -1.31472856e-01 4.14792560e-02 -6.72388375e-02 1.61534980e-01 2.66527593e-01 4.24353480e-01] [-1.36415914e-01 -1.57352313e-01 -6.02464914e-01 -4.60261464e-01 9.85425040e-02 1.88250214e-01 3.54592577e-02 -2.66019940e-01 1.22981891e-01 7.47577548e-02 -1.65937454e-01 -3.03821135e-02 4.77397293e-02] [ 2.20546290e-01 5.90345189e-02 -1.62393123e-01 -3.73242229e-01 2.52109796e-01 2.48492345e-01 4.01570171e-01 3.12815189e-01 4.79654431e-01 1.59790665e-01 -3.87014538e-01 -4.23734307e-01 -3.76448601e-01] [ 2.98880696e-01 -5.62259741e-02 -1.09024113e-02 -1.46818692e-02 3.31774592e-01 1.99317351e-01 1.70535788e-01 4.79851931e-01 2.69844085e-01 8.68052617e-02 -6.37286127e-01 -4.80327278e-01 -3.80969882e-01] [ 6.15614057e-01 1.53889909e-01 -2.68661845e-02 -5.39729111e-02 2.01346561e-01 2.94054478e-01 2.54172146e-01 5.37727952e-01 -1.35149926e-01 -3.61388270e-03 -3.67568322e-02 4.83884484e-01 4.34644401e-01] [ 3.07689816e-01 7.39642903e-02 -2.06081524e-01 7.14030862e-02 -8.56132880e-02 1.45776138e-01 -2.84887165e-01 2.25238502e-02 -4.19124037e-01 -1.81307033e-01 3.68558355e-02 3.49088281e-01 5.97334146e-01] [-1.38641685e-01 2.15599671e-01 2.64786601e-01 3.66483331e-01 9.81995538e-02 2.06776649e-01 1.22129150e-01 2.37610817e-01 -1.19071297e-01 2.64075864e-02 4.47113037e-01 6.12662554e-01 5.58138430e-01] [-2.60922372e-01 5.42251617e-02 2.84769237e-01 6.20632768e-01 4.00574327e-01 4.18001533e-01 1.98020667e-01 3.65059257e-01 3.79235655e-01 9.42324027e-02 1.26006857e-01 1.44387111e-01 1.66694209e-01] [-2.76637346e-01 3.39117795e-01 5.85423946e-01 6.18568659e-01 6.28370523e-01 5.02263010e-01 6.77968144e-01 3.63919526e-01 2.92317599e-01 -1.07734008e-02 9.66197550e-02 1.63908854e-01 -1.26944616e-01] [-1.43524855e-01 1.73306376e-01 4.63569313e-02 -9.44873244e-02 4.22657654e-02 4.21024263e-02 1.45379454e-01 3.56557295e-02 3.17435533e-01 1.24608293e-01 5.22560999e-02 -7.78282881e-02 -1.04208432e-01] [ 1.24284372e-01 1.60653889e-01 1.50089681e-01 4.28402685e-02 3.70945841e-01 2.22845107e-01 1.38211668e-01 -2.41727114e-01 -2.93222237e-02 6.95287287e-02 8.58288035e-02 -2.57022858e-01 -2.88184524e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5673.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[ 4.28940892e-01 2.84515083e-01 2.56993145e-01 3.62802744e-01 7.86774233e-02 -1.29645780e-01 -1.77452281e-01 1.39518484e-01 -3.87704760e-01 -2.63805270e-01 -3.71398240e-01 2.36046389e-01 -9.51595604e-02 2.43634377e-02 -3.30292583e-01] [ 2.65442759e-01 1.25771821e-01 1.03760257e-01 2.09860235e-01 4.56974320e-02 -1.27034381e-01 -2.65621580e-02 2.15468273e-01 -1.43853366e-01 -9.64133665e-02 -2.55408168e-01 2.16287985e-01 -3.09924874e-02 2.59782374e-01 -4.30900455e-02] [ 5.52606225e-01 2.20155776e-01 -2.03516915e-01 -1.36124358e-01 -7.12277517e-02 -1.02361694e-01 1.76782787e-01 2.04551697e-01 -1.56073973e-01 -6.88026771e-02 -3.37022930e-01 2.21931443e-01 -2.48541623e-01 -6.86504543e-02 -5.56685150e-01] [ 6.61964715e-02 -9.21650454e-02 -1.31801441e-01 -3.02683234e-01 -1.21462613e-01 -3.23246270e-02 3.76097113e-01 4.20573056e-01 3.56623083e-01 5.54315150e-01 2.16894686e-01 5.66029787e-01 8.66215304e-02 4.76237416e-01 8.23435783e-02] [ 4.08680826e-01 2.43478164e-01 2.67328769e-01 4.53341939e-02 2.01680556e-01 1.00346074e-01 2.56065458e-01 2.78211534e-01 3.75746161e-01 7.07540452e-01 4.25321758e-01 7.31759787e-01 2.86137134e-01 4.79000717e-01 4.75919284e-02] [ 4.31684732e-01 5.72429597e-01 9.94688451e-01 3.39491099e-01 3.90787452e-01 1.32835984e-01 4.66348529e-01 3.05887699e-01 7.94407845e-01 9.72419024e-01 8.86143148e-01 6.29758596e-01 3.74159187e-01 3.76499951e-01 1.28538728e-01] [ 6.49217665e-01 6.96407318e-01 9.32617843e-01 3.09522063e-01 6.05274558e-01 1.83816984e-01 3.10012192e-01 -3.55325341e-01 2.50413090e-01 5.65852284e-01 9.58541214e-01 6.26351774e-01 5.60991287e-01 1.98688507e-01 4.89431210e-02] [ 4.47826296e-01 6.24873817e-01 7.99206972e-01 2.58389115e-01 6.02267861e-01 1.86929330e-01 4.66072708e-01 -3.81672919e-01 3.26059937e-01 3.22519243e-01 6.46465778e-01 2.57009745e-01 1.60521910e-01 -1.56119987e-01 -2.45881870e-01] [ 2.53283858e-01 1.90836668e-01 3.00871849e-01 -5.59537485e-02 5.71614981e-01 1.87907144e-01 3.21085006e-01 -4.80008066e-01 -1.90438509e-01 -4.04490590e-01 -2.81883657e-01 -2.61494428e-01 1.02172226e-01 1.35746494e-01 2.98404843e-01] [ 5.21592438e-01 3.44051361e-01 3.00688446e-01 4.68671136e-03 1.39374644e-01 -3.63601327e-01 -1.50978923e-01 -1.01010785e-01 1.82882264e-01 -9.97697785e-02 -5.57073891e-01 -5.24379075e-01 -3.89976263e-01 9.28562731e-02 5.04131973e-01] [ 2.87359029e-01 3.01508039e-01 1.54131576e-01 -1.87392920e-01 -3.77052367e-01 -6.57542169e-01 -3.81739885e-01 -3.78040940e-01 -4.06648993e-01 -3.62624049e-01 -3.02680224e-01 6.75148070e-02 3.71414393e-01 5.86920500e-01 9.42631304e-01] [ 3.08409691e-01 7.11447001e-01 4.70786929e-01 1.69491291e-01 -6.08147323e-01 -8.14669192e-01 -5.18878341e-01 -2.57223547e-01 -1.31347049e-02 1.80126190e-01 2.19971851e-01 1.11864775e-01 3.14624727e-01 3.47096860e-01 8.01769674e-01] [ 2.95383722e-01 8.34361851e-01 4.23815697e-01 3.53431612e-01 -3.32327247e-01 -3.42057168e-01 -1.72135875e-01 -2.80758440e-01 -9.41317230e-02 2.44727405e-03 3.77700210e-01 2.57383317e-01 5.93159497e-01 -2.49356832e-02 1.21251225e-01] [ 5.78569233e-01 8.95711064e-01 4.88977313e-01 3.06496881e-02 -5.67138433e-01 -5.33644915e-01 -1.32090092e-01 1.30956143e-01 3.63996655e-01 4.30988222e-01 3.51470411e-01 2.32948828e-02 6.80057108e-02 -3.71876091e-01 -1.58972397e-01] [ 7.36717999e-01 7.73253262e-01 2.40170732e-01 -2.27466807e-01 -5.76135457e-01 -5.89729965e-01 -2.01607779e-01 4.00329530e-02 2.10524321e-01 3.69567662e-01 3.52040619e-01 3.81648540e-01 2.36829057e-01 -2.67006338e-01 -3.06510508e-01]] [[ 6.73480630e-01 3.38197798e-01 8.03261530e-03 -3.91844988e-01 3.60097080e-01 1.32170841e-01 3.51083755e-01 -1.60006002e-01 -3.33634138e-01 -5.74811315e-03 -2.27637485e-01 3.05675775e-01 1.61030054e-01 2.89994121e-01 2.11229444e-01] [ 3.67798656e-01 1.88278891e-02 -2.39012957e-01 -4.35198665e-01 3.31847340e-01 1.30807534e-01 9.83479172e-02 -4.56263810e-01 -2.93738216e-01 1.17383534e-02 8.23738649e-02 5.62505484e-01 7.51956701e-01 7.98483789e-01 5.88250816e-01] [-1.24584757e-01 -8.99681374e-02 -1.08091399e-01 -6.38416037e-02 2.27176219e-01 2.03993455e-01 2.10853890e-02 -5.49163997e-01 -4.13346648e-01 -1.59444615e-01 1.19142972e-01 3.97890210e-01 4.56316888e-01 2.84533501e-01 -2.77222116e-02] [-3.15155625e-01 -1.71846166e-01 -1.58349499e-01 4.01216894e-02 1.14341818e-01 2.06991151e-01 -1.85605496e-01 -3.56423885e-01 -3.66862625e-01 4.29149345e-02 4.57410067e-02 4.25148904e-01 2.70670921e-01 3.31059515e-01 -1.41236335e-02] [-3.26179534e-01 2.50557959e-02 5.79187796e-02 1.16804451e-01 -2.65227944e-01 1.27183599e-02 -2.70138234e-01 -1.76687911e-01 -7.36099720e-01 -2.18041334e-02 -2.21094027e-01 1.43310025e-01 -5.79381883e-01 -4.80666041e-01 -7.28930295e-01] [-1.33189261e-02 2.36822233e-01 -3.92440148e-02 -2.28329048e-01 -7.17366099e-01 -2.73103833e-01 -3.82382125e-01 -1.01852618e-01 -5.55132151e-01 6.20116107e-03 -1.73021108e-01 2.97106504e-02 -1.80485696e-01 -2.73257732e-01 -3.67883921e-01] [-6.20115995e-01 1.39969718e-02 -2.09359396e-02 -4.03688736e-02 -6.31810069e-01 -2.92200536e-01 -4.92867798e-01 -4.39518005e-01 -5.74538648e-01 -2.41314247e-01 -1.41612560e-01 -2.65359133e-01 1.11281341e-02 -2.18203999e-02 1.74058333e-01] [-5.24095595e-01 -9.69279483e-02 1.80650666e-01 2.31223449e-01 -1.48058861e-01 -1.60017028e-01 -2.07526520e-01 -3.64187034e-03 -2.72511374e-02 -5.25758527e-02 -1.86205328e-01 -1.96719557e-01 1.58188969e-01 3.22076082e-02 5.06407619e-02] [-4.01630670e-01 -1.61600351e-01 4.81887192e-01 4.36878443e-01 3.91609371e-01 1.79946631e-01 2.42144987e-01 4.01064098e-01 1.39365345e-01 2.17860579e-01 1.77208856e-01 5.80633163e-01 6.26661956e-01 2.82889158e-01 5.55486083e-02] [-4.47042227e-01 -4.93983477e-01 4.32326168e-01 3.86229157e-01 6.91096783e-01 2.64041334e-01 5.33230841e-01 6.57828510e-01 3.48107487e-01 4.30136144e-01 2.68263698e-01 7.41481245e-01 3.34413260e-01 -7.08056390e-02 -5.91624737e-01] [-4.32796150e-01 -3.81767690e-01 2.17149377e-01 1.22428402e-01 4.23423856e-01 2.34671906e-01 3.82247001e-01 1.52261093e-01 4.46142219e-02 1.19141608e-01 3.32701534e-01 6.12875223e-01 4.67180669e-01 1.59832507e-01 -2.41417333e-01] [-5.47979593e-01 -4.15841639e-01 -7.80786276e-02 1.05551630e-01 1.41980127e-01 1.78038895e-01 1.84166897e-02 -5.86946011e-02 -1.67803094e-02 9.04287249e-02 4.29194514e-03 -1.11024722e-01 -1.02603659e-01 1.18281603e-01 1.55059593e-02] [ 2.48023257e-01 2.69586239e-02 -2.23311156e-01 -4.20601726e-01 -4.65906978e-01 -5.20187952e-02 -1.48397088e-01 1.41141877e-01 -1.23073921e-01 1.50846243e-01 -2.05221564e-01 5.33857197e-02 1.80431813e-01 4.54539448e-01 3.36041719e-01] [ 3.49352270e-01 1.93350762e-01 -1.10599257e-01 -4.14889961e-01 -6.24189794e-01 -2.32657433e-01 -2.10464165e-01 3.62984985e-01 2.18344867e-01 3.78782511e-01 -2.42988765e-01 -1.85167268e-01 -3.29409242e-01 -1.57325715e-01 -2.06068680e-01] [ 6.50825262e-01 3.48786145e-01 -3.00913453e-02 -7.24527359e-01 -8.19074690e-01 -5.21313488e-01 -1.76309034e-01 3.36489171e-01 2.09673628e-01 2.58150190e-01 -2.88262606e-01 -8.87328386e-03 -3.15811217e-01 -3.85027736e-01 -6.55001521e-01]] [[ 3.95037323e-01 4.94841635e-02 -6.87875897e-02 3.32133770e-01 4.69338685e-01 -4.09669615e-02 -4.85447317e-01 -9.32251036e-01 -6.47134721e-01 -4.20813173e-01 -9.55915675e-02 -3.74354035e-01 -7.20231473e-01 -8.21836770e-01 -8.55861187e-01] [ 4.12417293e-01 1.30967081e-01 -1.48700640e-01 3.17226529e-01 4.57118183e-01 2.68548369e-01 -2.68867821e-01 -7.97145307e-01 -6.48112595e-01 -4.63509887e-01 -5.00393212e-02 -1.90471724e-01 -1.01127252e-01 -9.72169340e-02 -1.37408942e-01] [ 2.30323315e-01 9.95925665e-02 -8.77990499e-02 2.76026577e-01 2.59576648e-01 4.20379996e-01 -1.09754369e-01 -3.31431180e-01 -4.09803092e-01 -2.47772217e-01 2.54776087e-02 4.48596291e-02 1.28278911e-01 6.81868717e-02 -7.71791637e-02] [ 4.09343727e-02 -2.61228774e-02 -5.27362376e-02 5.16409986e-03 2.19732393e-02 2.12640747e-01 4.86500673e-02 8.40777829e-02 1.15157723e-01 7.22677410e-02 9.83265787e-02 -2.91472077e-02 2.83297986e-01 3.43306363e-01 3.83090168e-01] [ 1.67996690e-01 1.26211137e-01 7.34238252e-02 -3.07451457e-01 -3.12688768e-01 -2.00472161e-01 -9.05224010e-02 1.30035281e-01 4.00984734e-01 4.36768234e-01 2.39030495e-01 9.28227417e-03 -2.36822784e-01 -2.71998376e-01 -3.37601215e-01] [ 1.20946467e-02 3.07534561e-02 2.78742407e-02 -2.44592398e-01 -2.77206540e-01 -4.26189274e-01 -4.76002693e-01 -3.32831085e-01 2.44587258e-01 3.21650654e-01 4.09120530e-01 -1.71900652e-02 -1.14332676e-01 -1.80858374e-01 -3.26985717e-02] [-2.12139308e-01 -3.43314139e-03 -5.04981764e-02 1.92133248e-01 6.55392825e-04 -2.55862236e-01 -7.77663469e-01 -9.07699287e-01 -3.00942063e-01 6.95299357e-02 4.19979841e-01 4.65659909e-02 -7.79130980e-02 -1.06389552e-01 9.90842283e-02] [ 1.46412492e-01 2.69616187e-01 2.46697053e-01 4.94361341e-01 1.83993429e-01 -1.88936770e-01 -7.04695344e-01 -7.08147049e-01 -4.78943348e-01 -2.81756669e-01 3.08283448e-01 1.64805084e-01 3.51160020e-01 8.42186064e-02 3.94580513e-01] [ 5.27871311e-01 2.83117831e-01 8.27784911e-02 2.80071169e-01 3.44847530e-01 7.37497360e-02 -1.87104046e-01 -2.34898672e-01 -2.04786584e-02 -1.16078548e-01 3.30308497e-01 1.39768362e-01 4.59513515e-01 3.45871508e-01 7.08931148e-01] [ 9.26647007e-01 5.17823875e-01 3.97192985e-01 -2.62659462e-03 1.15253314e-01 -1.19210415e-01 1.62187353e-01 2.34666929e-01 1.72066912e-01 -1.11141101e-01 3.13984871e-01 3.93371761e-01 3.69986683e-01 2.28505552e-01 4.65053588e-01] [ 1.92065597e-01 -1.28476828e-01 -1.27264149e-02 -3.94395381e-01 -2.15328693e-01 -6.21738195e-01 -1.71989545e-01 1.33049227e-02 3.29377621e-01 1.91428244e-01 1.73896015e-01 1.23854600e-01 -5.00698984e-02 9.20490101e-02 2.41177037e-01] [ 6.06557690e-02 -6.45375550e-02 -1.42316688e-02 -3.90004456e-01 -4.32847708e-01 -7.25504637e-01 -3.35134208e-01 -2.26868838e-01 -1.32201120e-01 -3.04904938e-01 -2.10912243e-01 3.34682353e-02 -2.02535510e-01 -1.19712047e-01 -1.61819592e-01] [-1.74750403e-01 -3.07648987e-01 -5.57709575e-01 -6.19953156e-01 -4.28349137e-01 -2.74375051e-01 -2.00329021e-01 -1.37500927e-01 -1.69362232e-01 -1.79925695e-01 -3.10463279e-01 -6.18535690e-02 -7.55527690e-02 2.20942244e-01 1.33199930e-01] [-8.07932541e-02 -1.96453139e-01 -4.98937279e-01 -2.50974298e-01 2.35060062e-02 4.45541441e-01 3.80293697e-01 3.52459550e-01 9.22363997e-02 -8.07814226e-02 -3.17496926e-01 6.16669133e-02 8.78949985e-02 8.22595775e-01 8.03774655e-01] [-4.57595140e-02 -1.30115196e-01 -4.40188199e-01 -2.11526155e-01 7.23852739e-02 6.25587761e-01 5.45455933e-01 5.63381672e-01 2.77972370e-01 2.82648295e-01 -1.80566505e-01 1.84664682e-01 1.04952633e-01 1.04968560e+00 9.56744194e-01]]]]; ov_res: [[[[ 4.28940892e-01 2.84515083e-01 2.56993145e-01 3.62802744e-01 7.86774233e-02 -1.29645780e-01 -1.77452281e-01 1.39518484e-01 -3.87704760e-01 -2.63805270e-01 -3.71398240e-01 2.36046389e-01 -9.51595604e-02 2.43634377e-02 -3.30292583e-01] [ 2.65442759e-01 1.25771821e-01 1.03760257e-01 2.09860235e-01 4.56974320e-02 -1.27034381e-01 -2.65621580e-02 2.15468273e-01 -1.43853366e-01 -9.64133665e-02 -2.55408168e-01 2.16287985e-01 -3.09924874e-02 2.59782374e-01 -4.30900455e-02] [ 5.52606225e-01 2.20155776e-01 -2.03516915e-01 -1.36124358e-01 -7.12277517e-02 -1.02361694e-01 1.76782787e-01 2.04551697e-01 -1.56073973e-01 -6.88026771e-02 -3.37022930e-01 2.21931443e-01 -2.48541623e-01 -6.86504543e-02 -5.56685150e-01] [ 6.61964715e-02 -9.21650454e-02 -1.31801441e-01 -3.02683234e-01 -1.21462613e-01 -3.23246270e-02 3.76097113e-01 4.20573056e-01 3.56623083e-01 5.54315150e-01 2.16894686e-01 5.66029787e-01 8.66215304e-02 4.76237416e-01 8.23435783e-02] [ 4.08680826e-01 2.43478164e-01 2.67328769e-01 4.53341939e-02 2.01680556e-01 1.00346074e-01 2.56065458e-01 2.78211534e-01 3.75746161e-01 7.07540452e-01 4.25321758e-01 7.31759787e-01 2.86137134e-01 4.79000717e-01 4.75919284e-02] [ 4.31684732e-01 5.72429597e-01 9.94688451e-01 3.39491099e-01 3.90787452e-01 1.32835984e-01 4.66348529e-01 3.05887699e-01 7.94407845e-01 9.72419024e-01 8.86143148e-01 6.29758596e-01 3.74159187e-01 3.76499951e-01 1.28538728e-01] [ 6.49217665e-01 6.96407318e-01 9.32617843e-01 3.09522063e-01 6.05274558e-01 1.83816984e-01 3.10012192e-01 -3.55325341e-01 2.50413090e-01 5.65852284e-01 9.58541214e-01 6.26351774e-01 5.60991287e-01 1.98688507e-01 4.89431210e-02] [ 4.47826296e-01 6.24873817e-01 7.99206972e-01 2.58389115e-01 6.02267861e-01 1.86929330e-01 4.66072708e-01 -3.81672919e-01 3.26059937e-01 3.22519243e-01 6.46465778e-01 2.57009745e-01 1.60521910e-01 -1.56119987e-01 -2.45881870e-01] [ 2.53283858e-01 1.90836668e-01 3.00871849e-01 -5.59537485e-02 5.71614981e-01 1.87907144e-01 3.21085006e-01 -4.80008066e-01 -1.90438509e-01 -4.04490590e-01 -2.81883657e-01 -2.61494428e-01 1.02172226e-01 1.35746494e-01 2.98404843e-01] [ 5.21592438e-01 3.44051361e-01 3.00688446e-01 4.68671136e-03 1.39374644e-01 -3.63601327e-01 -1.50978923e-01 -1.01010785e-01 1.82882264e-01 -9.97697785e-02 -5.57073891e-01 -5.24379075e-01 -3.89976263e-01 9.28562731e-02 5.04131973e-01] [ 2.87359029e-01 3.01508039e-01 1.54131576e-01 -1.87392920e-01 -3.77052367e-01 -6.57542169e-01 -3.81739885e-01 -3.78040940e-01 -4.06648993e-01 -3.62624049e-01 -3.02680224e-01 6.75148070e-02 3.71414393e-01 5.86920500e-01 9.42631304e-01] [ 3.08409691e-01 7.11447001e-01 4.70786929e-01 1.69491291e-01 -6.08147323e-01 -8.14669192e-01 -5.18878341e-01 -2.57223547e-01 -1.31347049e-02 1.80126190e-01 2.19971851e-01 1.11864775e-01 3.14624727e-01 3.47096860e-01 8.01769674e-01] [ 2.95383722e-01 8.34361851e-01 4.23815697e-01 3.53431612e-01 -3.32327247e-01 -3.42057168e-01 -1.72135875e-01 -2.80758440e-01 -9.41317230e-02 2.44727405e-03 3.77700210e-01 2.57383317e-01 5.93159497e-01 -2.49356832e-02 1.21251225e-01] [ 5.78569233e-01 8.95711064e-01 4.88977313e-01 3.06496881e-02 -5.67138433e-01 -5.33644915e-01 -1.32090092e-01 1.30956143e-01 3.63996655e-01 4.30988222e-01 3.51470411e-01 2.32948828e-02 6.80057108e-02 -3.71876091e-01 -1.58972397e-01] [ 7.36717999e-01 7.73253262e-01 2.40170732e-01 -2.27466807e-01 -5.76135457e-01 -5.89729965e-01 -2.01607779e-01 4.00329530e-02 2.10524321e-01 3.69567662e-01 3.52040619e-01 3.81648540e-01 2.36829057e-01 -2.67006338e-01 -3.06510508e-01]] [[ 6.73480630e-01 3.38197798e-01 8.03261530e-03 -3.91844988e-01 3.60097080e-01 1.32170841e-01 3.51083755e-01 -1.60006002e-01 -3.33634138e-01 -5.74811315e-03 -2.27637485e-01 3.05675775e-01 1.61030054e-01 2.89994121e-01 2.11229444e-01] [ 3.67798656e-01 1.88278891e-02 -2.39012957e-01 -4.35198665e-01 3.31847340e-01 1.30807534e-01 9.83479172e-02 -4.56263810e-01 -2.93738216e-01 1.17383534e-02 8.23738649e-02 5.62505484e-01 7.51956701e-01 7.98483789e-01 5.88250816e-01] [-1.24584757e-01 -8.99681374e-02 -1.08091399e-01 -6.38416037e-02 2.27176219e-01 2.03993455e-01 2.10853890e-02 -5.49163997e-01 -4.13346648e-01 -1.59444615e-01 1.19142972e-01 3.97890210e-01 4.56316888e-01 2.84533501e-01 -2.77222116e-02] [-3.15155625e-01 -1.71846166e-01 -1.58349499e-01 4.01216894e-02 1.14341818e-01 2.06991151e-01 -1.85605496e-01 -3.56423885e-01 -3.66862625e-01 4.29149345e-02 4.57410067e-02 4.25148904e-01 2.70670921e-01 3.31059515e-01 -1.41236335e-02] [-3.26179534e-01 2.50557959e-02 5.79187796e-02 1.16804451e-01 -2.65227944e-01 1.27183599e-02 -2.70138234e-01 -1.76687911e-01 -7.36099720e-01 -2.18041334e-02 -2.21094027e-01 1.43310025e-01 -5.79381883e-01 -4.80666041e-01 -7.28930295e-01] [-1.33189261e-02 2.36822233e-01 -3.92440148e-02 -2.28329048e-01 -7.17366099e-01 -2.73103833e-01 -3.82382125e-01 -1.01852618e-01 -5.55132151e-01 6.20116107e-03 -1.73021108e-01 2.97106504e-02 -1.80485696e-01 -2.73257732e-01 -3.67883921e-01] [-6.20115995e-01 1.39969718e-02 -2.09359396e-02 -4.03688736e-02 -6.31810069e-01 -2.92200536e-01 -4.92867798e-01 -4.39518005e-01 -5.74538648e-01 -2.41314247e-01 -1.41612560e-01 -2.65359133e-01 1.11281341e-02 -2.18203999e-02 1.74058333e-01] [-5.24095595e-01 -9.69279483e-02 1.80650666e-01 2.31223449e-01 -1.48058861e-01 -1.60017028e-01 -2.07526520e-01 -3.64187034e-03 -2.72511374e-02 -5.25758527e-02 -1.86205328e-01 -1.96719557e-01 1.58188969e-01 3.22076082e-02 5.06407619e-02] [-4.01630670e-01 -1.61600351e-01 4.81887192e-01 4.36878443e-01 3.91609371e-01 1.79946631e-01 2.42144987e-01 4.01064098e-01 1.39365345e-01 2.17860579e-01 1.77208856e-01 5.80633163e-01 6.26661956e-01 2.82889158e-01 5.55486083e-02] [-4.47042227e-01 -4.93983477e-01 4.32326168e-01 3.86229157e-01 6.91096783e-01 2.64041334e-01 5.33230841e-01 6.57828510e-01 3.48107487e-01 4.30136144e-01 2.68263698e-01 7.41481245e-01 3.34413260e-01 -7.08056390e-02 -5.91624737e-01] [-4.32796150e-01 -3.81767690e-01 2.17149377e-01 1.22428402e-01 4.23423856e-01 2.34671906e-01 3.82247001e-01 1.52261093e-01 4.46142219e-02 1.19141608e-01 3.32701534e-01 6.12875223e-01 4.67180669e-01 1.59832507e-01 -2.41417333e-01] [-5.47979593e-01 -4.15841639e-01 -7.80786276e-02 1.05551630e-01 1.41980127e-01 1.78038895e-01 1.84166897e-02 -5.86946011e-02 -1.67803094e-02 9.04287249e-02 4.29194514e-03 -1.11024722e-01 -1.02603659e-01 1.18281603e-01 1.55059593e-02] [ 2.48023257e-01 2.69586239e-02 -2.23311156e-01 -4.20601726e-01 -4.65906978e-01 -5.20187952e-02 -1.48397088e-01 1.41141877e-01 -1.23073921e-01 1.50846243e-01 -2.05221564e-01 5.33857197e-02 1.80431813e-01 4.54539448e-01 3.36041719e-01] [ 3.49352270e-01 1.93350762e-01 -1.10599257e-01 -4.14889961e-01 -6.24189794e-01 -2.32657433e-01 -2.10464165e-01 3.62984985e-01 2.18344867e-01 3.78782511e-01 -2.42988765e-01 -1.85167268e-01 -3.29409242e-01 -1.57325715e-01 -2.06068680e-01] [ 6.50825262e-01 3.48786145e-01 -3.00913453e-02 -7.24527359e-01 -8.19074690e-01 -5.21313488e-01 -1.76309034e-01 3.36489171e-01 2.09673628e-01 2.58150190e-01 -2.88262606e-01 -8.87328386e-03 -3.15811217e-01 -3.85027736e-01 -6.55001521e-01]] [[ 3.95037323e-01 4.94841635e-02 -6.87875897e-02 3.32133770e-01 4.69338685e-01 -4.09669615e-02 -4.85447317e-01 -9.32251036e-01 -6.47134721e-01 -4.20813173e-01 -9.55915675e-02 -3.74354035e-01 -7.20231473e-01 -8.21836770e-01 -8.55861187e-01] [ 4.12417293e-01 1.30967081e-01 -1.48700640e-01 3.17226529e-01 4.57118183e-01 2.68548369e-01 -2.68867821e-01 -7.97145307e-01 -6.48112595e-01 -4.63509887e-01 -5.00393212e-02 -1.90471724e-01 -1.01127252e-01 -9.72169340e-02 -1.37408942e-01] [ 2.30323315e-01 9.95925665e-02 -8.77990499e-02 2.76026577e-01 2.59576648e-01 4.20379996e-01 -1.09754369e-01 -3.31431180e-01 -4.09803092e-01 -2.47772217e-01 2.54776087e-02 4.48596291e-02 1.28278911e-01 6.81868717e-02 -7.71791637e-02] [ 4.09343727e-02 -2.61228774e-02 -5.27362376e-02 5.16409986e-03 2.19732393e-02 2.12640747e-01 4.86500673e-02 8.40777829e-02 1.15157723e-01 7.22677410e-02 9.83265787e-02 -2.91472077e-02 2.83297986e-01 3.43306363e-01 3.83090168e-01] [ 1.67996690e-01 1.26211137e-01 7.34238252e-02 -3.07451457e-01 -3.12688768e-01 -2.00472161e-01 -9.05224010e-02 1.30035281e-01 4.00984734e-01 4.36768234e-01 2.39030495e-01 9.28227417e-03 -2.36822784e-01 -2.71998376e-01 -3.37601215e-01] [ 1.20946467e-02 3.07534561e-02 2.78742407e-02 -2.44592398e-01 -2.77206540e-01 -4.26189274e-01 -4.76002693e-01 -3.32831085e-01 2.44587258e-01 3.21650654e-01 4.09120530e-01 -1.71900652e-02 -1.14332676e-01 -1.80858374e-01 -3.26985717e-02] [-2.12139308e-01 -3.43314139e-03 -5.04981764e-02 1.92133248e-01 6.55392825e-04 -2.55862236e-01 -7.77663469e-01 -9.07699287e-01 -3.00942063e-01 6.95299357e-02 4.19979841e-01 4.65659909e-02 -7.79130980e-02 -1.06389552e-01 9.90842283e-02] [ 1.46412492e-01 2.69616187e-01 2.46697053e-01 4.94361341e-01 1.83993429e-01 -1.88936770e-01 -7.04695344e-01 -7.08147049e-01 -4.78943348e-01 -2.81756669e-01 3.08283448e-01 1.64805084e-01 3.51160020e-01 8.42186064e-02 3.94580513e-01] [ 5.27871311e-01 2.83117831e-01 8.27784911e-02 2.80071169e-01 3.44847530e-01 7.37497360e-02 -1.87104046e-01 -2.34898672e-01 -2.04786584e-02 -1.16078548e-01 3.30308497e-01 1.39768362e-01 4.59513515e-01 3.45871508e-01 7.08931148e-01] [ 9.26647007e-01 5.17823875e-01 3.97192985e-01 -2.62659462e-03 1.15253314e-01 -1.19210415e-01 1.62187353e-01 2.34666929e-01 1.72066912e-01 -1.11141101e-01 3.13984871e-01 3.93371761e-01 3.69986683e-01 2.28505552e-01 4.65053588e-01] [ 1.92065597e-01 -1.28476828e-01 -1.27264149e-02 -3.94395381e-01 -2.15328693e-01 -6.21738195e-01 -1.71989545e-01 1.33049227e-02 3.29377621e-01 1.91428244e-01 1.73896015e-01 1.23854600e-01 -5.00698984e-02 9.20490101e-02 2.41177037e-01] [ 6.06557690e-02 -6.45375550e-02 -1.42316688e-02 -3.90004456e-01 -4.32847708e-01 -7.25504637e-01 -3.35134208e-01 -2.26868838e-01 -1.32201120e-01 -3.04904938e-01 -2.10912243e-01 3.34682353e-02 -2.02535510e-01 -1.19712047e-01 -1.61819592e-01] [-1.74750403e-01 -3.07648987e-01 -5.57709575e-01 -6.19953156e-01 -4.28349137e-01 -2.74375051e-01 -2.00329021e-01 -1.37500927e-01 -1.69362232e-01 -1.79925695e-01 -3.10463279e-01 -6.18535690e-02 -7.55527690e-02 2.20942244e-01 1.33199930e-01] [-8.07932541e-02 -1.96453139e-01 -4.98937279e-01 -2.50974298e-01 2.35060062e-02 4.45541441e-01 3.80293697e-01 3.52459550e-01 9.22363997e-02 -8.07814226e-02 -3.17496926e-01 6.16669133e-02 8.78949985e-02 8.22595775e-01 8.03774655e-01] [-4.57595140e-02 -1.30115196e-01 -4.40188199e-01 -2.11526155e-01 7.23852739e-02 6.25587761e-01 5.45455933e-01 5.63381672e-01 2.77972370e-01 2.82648295e-01 -1.80566505e-01 1.84664682e-01 1.04952633e-01 1.04968560e+00 9.56744194e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5676.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 1]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[-2.49399468e-01 -2.09376752e-01 -3.75980318e-01 -8.48141238e-02 1.58586457e-01 -1.35508835e-01 -2.43040025e-01 -2.44963512e-01 -5.05310893e-02 5.26905581e-02 -1.39211655e-01 -3.81040238e-02 -2.48393029e-01 -2.30409756e-01 -1.59224987e-01] [-1.19222365e-01 -2.77182698e-01 -2.17816472e-01 -2.73371994e-01 1.55792743e-01 -8.41804072e-02 -5.36165982e-02 1.22041302e-02 1.76502466e-02 3.80581170e-01 8.70565847e-02 -1.35502154e-02 -1.30929157e-01 -1.02811895e-01 2.74566442e-01] [ 1.33234873e-01 -9.63513777e-02 1.09867357e-01 -1.23733126e-01 3.32720608e-01 -8.18918720e-02 -2.54764527e-01 -6.04400225e-02 1.27557479e-02 4.97730613e-01 -1.45854264e-01 -1.81524575e-01 -2.20860213e-01 7.03773126e-02 3.26106101e-01] [-1.97468847e-02 -1.18188471e-01 -3.91312316e-02 -1.70294225e-01 1.01030514e-01 2.03932989e-02 -5.01552485e-02 2.99004912e-01 2.14349329e-01 4.88689423e-01 -5.01052558e-01 -2.67861336e-01 -2.84220189e-01 3.23923528e-01 2.46091425e-01] [ 2.39203766e-01 3.63916814e-01 1.09976329e-01 7.05466643e-02 -2.20345519e-02 -2.34961882e-01 -3.98414850e-01 -2.26763994e-01 5.52445054e-02 4.20391083e-01 -1.26272738e-01 1.90606058e-01 1.36697590e-01 5.87672472e-01 3.02387834e-01] [ 2.77354687e-01 4.50069278e-01 2.26680458e-01 1.84056118e-01 1.73588961e-01 1.05371540e-02 -3.73076081e-01 -3.91242981e-01 -2.04751179e-01 4.60837781e-01 1.71195701e-01 5.43100953e-01 2.38256723e-01 3.59205872e-01 3.85585837e-02] [-3.01251207e-02 1.04994774e-01 -1.17373150e-02 2.05627289e-02 -5.66593148e-02 -2.60647774e-01 -3.28152537e-01 -1.52663767e-01 8.23767334e-02 6.86528862e-01 6.19249582e-01 6.15015805e-01 1.69849634e-01 1.16894692e-01 6.66012764e-02] [ 3.38007025e-02 1.88737929e-01 9.03843194e-02 2.47536693e-02 3.90480198e-02 1.09889023e-01 1.74668506e-01 2.44652778e-01 1.86518833e-01 6.14635229e-01 4.53407615e-01 4.20812249e-01 -9.25112814e-02 -1.42931923e-01 -1.20945036e-01] [-4.46007013e-01 -1.61480725e-01 -2.62455165e-01 -3.08857799e-01 -3.80599082e-01 -1.05179347e-01 6.95931911e-02 7.70855546e-02 -5.47351427e-02 1.80349365e-01 1.81239858e-01 7.44349584e-02 -2.55852267e-02 1.05741816e-02 2.77331948e-01] [ 9.60473120e-02 1.17572416e-02 -1.90505505e-01 -4.56401944e-01 -3.31257701e-01 -2.57515848e-01 -9.29388180e-02 -2.89132178e-01 -3.38421911e-01 -2.84021616e-01 -1.24837242e-01 -4.88208756e-02 4.05275896e-02 2.35996172e-01 5.50488174e-01] [-1.41098768e-01 -3.04170489e-01 -6.24669671e-01 -7.64995813e-01 -5.90947568e-01 -1.88889846e-01 -2.48053923e-01 -2.72288799e-01 -2.73854136e-01 -3.23924482e-01 -2.75249720e-01 -1.78873733e-01 -8.75471160e-02 1.37435988e-01 1.72948554e-01] [ 2.43736625e-01 -6.23977855e-02 -2.31037587e-01 -3.18242610e-01 -2.32341349e-01 -9.65890661e-02 -9.13041979e-02 1.06635608e-01 3.47012579e-01 4.35745567e-02 -9.21442583e-02 -1.92603275e-01 -1.62173256e-01 1.58356875e-01 3.61032896e-02] [-3.26173127e-01 -3.05084795e-01 -1.95651114e-01 4.84899022e-02 2.32166052e-01 3.82489264e-01 6.80302922e-03 1.73705459e-01 4.92179990e-01 4.85089839e-01 2.68667579e-01 7.50711784e-02 -1.00642137e-01 -1.30135156e-02 -2.75937110e-01]] [[-9.48358774e-02 -4.33753990e-02 -5.94842173e-02 1.67589635e-01 1.98571280e-01 1.45166636e-01 6.55053407e-02 1.93040788e-01 2.53241181e-01 2.10853592e-01 1.28259778e-01 2.35253990e-01 1.00695878e-01 -3.02864134e-01 -6.82086408e-01] [-6.10621413e-03 5.85426539e-02 -1.46512508e-01 -7.64453411e-03 5.40969754e-03 3.26833844e-01 3.44330400e-01 2.93574870e-01 -2.90809534e-02 4.39041178e-04 -4.39135134e-02 1.22312754e-01 1.77692950e-01 -1.77168667e-01 -4.98166829e-01] [-2.73611248e-01 -1.31545857e-01 -2.45959699e-01 2.64529973e-01 2.65897773e-02 3.61208498e-01 2.08219159e-02 2.41771862e-01 -7.48274028e-02 6.01609349e-02 6.62592649e-02 -3.80186103e-02 2.84634560e-01 1.29887581e-01 2.06601694e-01] [-1.71878278e-01 -1.26439286e-02 -1.70072347e-01 4.89600711e-02 -2.16644704e-01 6.38404131e-01 2.75400519e-01 2.88508773e-01 -2.46771276e-01 -5.26148826e-02 3.09026748e-01 6.02516718e-02 5.62651694e-01 9.03164670e-02 2.73951381e-01] [-2.55419940e-01 -2.71067262e-01 -4.84208226e-01 -1.42252184e-02 2.16316298e-01 8.12473178e-01 5.65185428e-01 4.33803976e-01 -1.03165947e-01 6.25252724e-05 1.95620462e-01 1.16534352e-01 3.29517603e-01 3.94090004e-02 3.07381511e-01] [ 4.74998623e-01 1.71283275e-01 -2.76522368e-01 -2.86806643e-01 1.09254986e-01 5.86787105e-01 6.56920731e-01 1.96672931e-01 -7.00047240e-02 -4.62009758e-02 1.07465453e-01 -2.01223075e-01 4.90060262e-02 6.54575080e-02 5.85042417e-01] [ 8.06764126e-01 2.49238595e-01 -2.00178713e-01 -5.19861460e-01 1.89401239e-01 2.84937322e-01 5.68353832e-01 2.26015806e-01 -3.44661884e-02 9.77428723e-03 -2.92585403e-01 -3.74543995e-01 -5.85255265e-01 -2.03126028e-01 2.20923766e-01] [ 9.03738916e-01 4.59085196e-01 2.98976507e-02 -5.25015175e-01 -1.03565440e-01 -8.91525205e-03 1.75439253e-01 -2.02589542e-01 -1.89750820e-01 9.07569081e-02 -1.63791701e-02 -1.39454708e-01 -4.36970115e-01 -1.32635936e-01 6.48717657e-02] [ 4.01531667e-01 5.15592620e-02 -2.33076468e-01 -5.23427844e-01 -5.45921959e-02 -8.94540474e-02 -1.94849461e-01 -4.00057495e-01 -2.92300969e-01 1.72493160e-01 6.65713251e-02 -1.41521186e-01 -7.00712442e-01 -6.54586256e-01 -5.51767170e-01] [-1.38868362e-01 -1.08488217e-01 -2.17851296e-01 -1.47858679e-01 -3.81697877e-03 -2.19309986e-01 -1.23943366e-01 -3.33865315e-01 -8.43510851e-02 -1.74664427e-02 1.58170521e-01 -1.76957116e-01 -5.51592171e-01 -7.78919399e-01 -6.64364398e-01] [-2.58964654e-02 -2.09247530e-01 -1.80361181e-01 2.43439570e-01 4.04643506e-01 2.12064102e-01 -1.38489753e-01 5.84141463e-02 4.81408834e-02 8.66356492e-02 -3.04489732e-02 -5.96578360e-01 -7.04239130e-01 -8.56281519e-01 -4.19694990e-01] [ 1.12081952e-01 6.96781427e-02 2.49025270e-01 3.98060352e-01 5.47906160e-01 4.65831906e-01 4.84169960e-01 5.07527173e-01 2.87917644e-01 1.19384140e-01 4.52800244e-02 -7.67995119e-02 -2.73190141e-01 -3.23221415e-01 -2.29230821e-01] [ 2.93103635e-01 2.86932975e-01 3.73092055e-01 2.76087880e-01 1.19773388e-01 3.09211046e-01 2.55563170e-01 3.92505229e-01 1.05497852e-01 9.64634959e-03 1.56627566e-01 4.35355544e-01 4.72477794e-01 3.55330676e-01 2.30056360e-01]] [[-1.93534866e-01 -3.24773788e-01 -3.91162038e-02 -1.65322706e-01 1.01283461e-01 3.35900515e-01 3.25699657e-01 1.62996292e-01 -1.82252452e-01 -3.52890611e-01 -3.65076996e-02 2.53391236e-01 5.54228842e-01 5.55686474e-01 4.95962024e-01] [-2.64371961e-01 -3.69890600e-01 7.42365643e-02 -3.32660288e-01 -1.37103662e-01 -8.56967643e-02 3.98478955e-01 3.81851524e-01 3.12560380e-01 -1.11672558e-01 -1.69881493e-01 -1.71629310e-01 -1.36531126e-02 -3.36115900e-03 8.02153349e-03] [-4.80592519e-01 -3.78965735e-01 1.64838463e-01 -1.16901435e-01 1.78195108e-02 -6.70270249e-02 3.26830417e-01 2.37451851e-01 9.65214819e-02 -7.51630440e-02 -3.89188975e-01 -2.82692075e-01 -1.95718065e-01 -4.76908498e-02 -6.27908781e-02] [-5.45325041e-01 -2.00279534e-01 2.12215930e-01 2.05977678e-01 -3.83125022e-02 -1.79202646e-01 1.11134052e-02 -2.84958687e-02 3.03986669e-01 1.38802871e-01 -4.37385105e-02 -1.42356418e-02 8.93118754e-02 2.48865232e-01 4.86559384e-02] [ 2.92783618e-01 9.25421193e-02 -2.11501971e-01 -2.59626836e-01 -4.69165474e-01 -2.78029412e-01 -4.42803264e-01 -2.44304016e-01 -1.81932390e-01 1.68789864e-01 1.37037843e-01 4.53783453e-01 2.87832737e-01 3.60170186e-01 7.42715672e-02] [ 1.41077802e-01 2.85002217e-02 -2.95416415e-01 -4.86457534e-02 -7.22820386e-02 7.04177767e-02 -2.72044241e-01 -1.68632284e-01 -1.41939297e-01 1.13652878e-01 1.05386108e-01 2.87520707e-01 3.33662421e-01 2.57784545e-01 1.53770462e-01] [ 5.50594389e-01 -5.87272234e-02 -4.78691518e-01 -3.00053924e-01 1.54874790e-02 1.41301185e-01 -4.28332716e-01 -2.20856547e-01 -5.74672222e-01 4.92408723e-02 -5.07275127e-02 4.90493357e-01 4.34716344e-01 2.08442837e-01 -5.91725111e-03] [-3.27145569e-02 -4.56933044e-02 -1.48149952e-01 2.52397746e-01 4.67105240e-01 6.37995005e-01 2.23313063e-01 8.54659379e-02 -2.42105797e-01 1.57883003e-01 1.88912466e-01 3.61653596e-01 3.34198833e-01 -1.54124305e-01 -2.60942549e-01] [ 1.67494323e-02 -1.31733352e-02 -1.17320284e-01 8.65461528e-02 2.87374884e-01 7.37075031e-01 2.55815446e-01 1.88790858e-01 -1.57956079e-01 3.32902163e-01 3.79601121e-01 5.22440016e-01 1.81770161e-01 -4.35301811e-01 -7.23199904e-01] [-6.66518435e-02 1.78751007e-01 1.93353698e-01 4.27638203e-01 5.48894227e-01 1.16047549e+00 7.86911964e-01 6.82763338e-01 2.02170447e-01 1.61811411e-01 1.12256557e-01 2.61807553e-02 1.52664587e-01 -5.68939269e-01 -6.54303193e-01] [-2.56388336e-01 -1.83012828e-01 8.31374377e-02 2.66134113e-01 6.55860543e-01 8.79325807e-01 6.93938792e-01 4.42889631e-01 2.56706119e-01 -1.52390569e-01 -1.22283138e-01 -1.27523556e-01 2.80661255e-01 -2.43323773e-01 -3.05849522e-01] [ 1.30084828e-01 5.83227091e-02 3.18437666e-02 1.89134106e-01 4.59331870e-01 4.05444205e-01 5.30353725e-01 1.93810701e-01 1.82827875e-01 -5.50233305e-01 -4.75966185e-01 -3.78148198e-01 2.04275876e-01 -2.56833792e-01 -4.36127186e-01] [ 3.23352545e-01 1.01170681e-01 -6.55280799e-02 -1.37854684e-02 4.02368873e-01 4.63186651e-02 4.51314926e-01 -7.80029502e-03 3.25674504e-01 -2.83885360e-01 -3.60643059e-01 -8.94819647e-02 -9.62070003e-02 1.13617979e-01 -1.34842172e-01]]]]; ov_res: [[[[-2.49399468e-01 -2.09376752e-01 -3.75980318e-01 -8.48141238e-02 1.58586457e-01 -1.35508835e-01 -2.43040025e-01 -2.44963512e-01 -5.05310893e-02 5.26905581e-02 -1.39211655e-01 -3.81040238e-02 -2.48393029e-01 -2.30409756e-01 -1.59224987e-01] [-1.19222365e-01 -2.77182698e-01 -2.17816472e-01 -2.73371994e-01 1.55792743e-01 -8.41804072e-02 -5.36165982e-02 1.22041302e-02 1.76502466e-02 3.80581170e-01 8.70565847e-02 -1.35502154e-02 -1.30929157e-01 -1.02811895e-01 2.74566442e-01] [ 1.33234873e-01 -9.63513777e-02 1.09867357e-01 -1.23733126e-01 3.32720608e-01 -8.18918720e-02 -2.54764527e-01 -6.04400225e-02 1.27557479e-02 4.97730613e-01 -1.45854264e-01 -1.81524575e-01 -2.20860213e-01 7.03773126e-02 3.26106101e-01] [-1.97468847e-02 -1.18188471e-01 -3.91312316e-02 -1.70294225e-01 1.01030514e-01 2.03932989e-02 -5.01552485e-02 2.99004912e-01 2.14349329e-01 4.88689423e-01 -5.01052558e-01 -2.67861336e-01 -2.84220189e-01 3.23923528e-01 2.46091425e-01] [ 2.39203766e-01 3.63916814e-01 1.09976329e-01 7.05466643e-02 -2.20345519e-02 -2.34961882e-01 -3.98414850e-01 -2.26763994e-01 5.52445054e-02 4.20391083e-01 -1.26272738e-01 1.90606058e-01 1.36697590e-01 5.87672472e-01 3.02387834e-01] [ 2.77354687e-01 4.50069278e-01 2.26680458e-01 1.84056118e-01 1.73588961e-01 1.05371540e-02 -3.73076081e-01 -3.91242981e-01 -2.04751179e-01 4.60837781e-01 1.71195701e-01 5.43100953e-01 2.38256723e-01 3.59205872e-01 3.85585837e-02] [-3.01251207e-02 1.04994774e-01 -1.17373150e-02 2.05627289e-02 -5.66593148e-02 -2.60647774e-01 -3.28152537e-01 -1.52663767e-01 8.23767334e-02 6.86528862e-01 6.19249582e-01 6.15015805e-01 1.69849634e-01 1.16894692e-01 6.66012764e-02] [ 3.38007025e-02 1.88737929e-01 9.03843194e-02 2.47536693e-02 3.90480198e-02 1.09889023e-01 1.74668506e-01 2.44652778e-01 1.86518833e-01 6.14635229e-01 4.53407615e-01 4.20812249e-01 -9.25112814e-02 -1.42931923e-01 -1.20945036e-01] [-4.46007013e-01 -1.61480725e-01 -2.62455165e-01 -3.08857799e-01 -3.80599082e-01 -1.05179347e-01 6.95931911e-02 7.70855546e-02 -5.47351427e-02 1.80349365e-01 1.81239858e-01 7.44349584e-02 -2.55852267e-02 1.05741816e-02 2.77331948e-01] [ 9.60473120e-02 1.17572416e-02 -1.90505505e-01 -4.56401944e-01 -3.31257701e-01 -2.57515848e-01 -9.29388180e-02 -2.89132178e-01 -3.38421911e-01 -2.84021616e-01 -1.24837242e-01 -4.88208756e-02 4.05275896e-02 2.35996172e-01 5.50488174e-01] [-1.41098768e-01 -3.04170489e-01 -6.24669671e-01 -7.64995813e-01 -5.90947568e-01 -1.88889846e-01 -2.48053923e-01 -2.72288799e-01 -2.73854136e-01 -3.23924482e-01 -2.75249720e-01 -1.78873733e-01 -8.75471160e-02 1.37435988e-01 1.72948554e-01] [ 2.43736625e-01 -6.23977855e-02 -2.31037587e-01 -3.18242610e-01 -2.32341349e-01 -9.65890661e-02 -9.13041979e-02 1.06635608e-01 3.47012579e-01 4.35745567e-02 -9.21442583e-02 -1.92603275e-01 -1.62173256e-01 1.58356875e-01 3.61032896e-02] [-3.26173127e-01 -3.05084795e-01 -1.95651114e-01 4.84899022e-02 2.32166052e-01 3.82489264e-01 6.80302922e-03 1.73705459e-01 4.92179990e-01 4.85089839e-01 2.68667579e-01 7.50711784e-02 -1.00642137e-01 -1.30135156e-02 -2.75937110e-01]] [[-9.48358774e-02 -4.33753990e-02 -5.94842173e-02 1.67589635e-01 1.98571280e-01 1.45166636e-01 6.55053407e-02 1.93040788e-01 2.53241181e-01 2.10853592e-01 1.28259778e-01 2.35253990e-01 1.00695878e-01 -3.02864134e-01 -6.82086408e-01] [-6.10621413e-03 5.85426539e-02 -1.46512508e-01 -7.64453411e-03 5.40969754e-03 3.26833844e-01 3.44330400e-01 2.93574870e-01 -2.90809534e-02 4.39041178e-04 -4.39135134e-02 1.22312754e-01 1.77692950e-01 -1.77168667e-01 -4.98166829e-01] [-2.73611248e-01 -1.31545857e-01 -2.45959699e-01 2.64529973e-01 2.65897773e-02 3.61208498e-01 2.08219159e-02 2.41771862e-01 -7.48274028e-02 6.01609349e-02 6.62592649e-02 -3.80186103e-02 2.84634560e-01 1.29887581e-01 2.06601694e-01] [-1.71878278e-01 -1.26439286e-02 -1.70072347e-01 4.89600711e-02 -2.16644704e-01 6.38404131e-01 2.75400519e-01 2.88508773e-01 -2.46771276e-01 -5.26148826e-02 3.09026748e-01 6.02516718e-02 5.62651694e-01 9.03164670e-02 2.73951381e-01] [-2.55419940e-01 -2.71067262e-01 -4.84208226e-01 -1.42252184e-02 2.16316298e-01 8.12473178e-01 5.65185428e-01 4.33803976e-01 -1.03165947e-01 6.25252724e-05 1.95620462e-01 1.16534352e-01 3.29517603e-01 3.94090004e-02 3.07381511e-01] [ 4.74998623e-01 1.71283275e-01 -2.76522368e-01 -2.86806643e-01 1.09254986e-01 5.86787105e-01 6.56920731e-01 1.96672931e-01 -7.00047240e-02 -4.62009758e-02 1.07465453e-01 -2.01223075e-01 4.90060262e-02 6.54575080e-02 5.85042417e-01] [ 8.06764126e-01 2.49238595e-01 -2.00178713e-01 -5.19861460e-01 1.89401239e-01 2.84937322e-01 5.68353832e-01 2.26015806e-01 -3.44661884e-02 9.77428723e-03 -2.92585403e-01 -3.74543995e-01 -5.85255265e-01 -2.03126028e-01 2.20923766e-01] [ 9.03738916e-01 4.59085196e-01 2.98976507e-02 -5.25015175e-01 -1.03565440e-01 -8.91525205e-03 1.75439253e-01 -2.02589542e-01 -1.89750820e-01 9.07569081e-02 -1.63791701e-02 -1.39454708e-01 -4.36970115e-01 -1.32635936e-01 6.48717657e-02] [ 4.01531667e-01 5.15592620e-02 -2.33076468e-01 -5.23427844e-01 -5.45921959e-02 -8.94540474e-02 -1.94849461e-01 -4.00057495e-01 -2.92300969e-01 1.72493160e-01 6.65713251e-02 -1.41521186e-01 -7.00712442e-01 -6.54586256e-01 -5.51767170e-01] [-1.38868362e-01 -1.08488217e-01 -2.17851296e-01 -1.47858679e-01 -3.81697877e-03 -2.19309986e-01 -1.23943366e-01 -3.33865315e-01 -8.43510851e-02 -1.74664427e-02 1.58170521e-01 -1.76957116e-01 -5.51592171e-01 -7.78919399e-01 -6.64364398e-01] [-2.58964654e-02 -2.09247530e-01 -1.80361181e-01 2.43439570e-01 4.04643506e-01 2.12064102e-01 -1.38489753e-01 5.84141463e-02 4.81408834e-02 8.66356492e-02 -3.04489732e-02 -5.96578360e-01 -7.04239130e-01 -8.56281519e-01 -4.19694990e-01] [ 1.12081952e-01 6.96781427e-02 2.49025270e-01 3.98060352e-01 5.47906160e-01 4.65831906e-01 4.84169960e-01 5.07527173e-01 2.87917644e-01 1.19384140e-01 4.52800244e-02 -7.67995119e-02 -2.73190141e-01 -3.23221415e-01 -2.29230821e-01] [ 2.93103635e-01 2.86932975e-01 3.73092055e-01 2.76087880e-01 1.19773388e-01 3.09211046e-01 2.55563170e-01 3.92505229e-01 1.05497852e-01 9.64634959e-03 1.56627566e-01 4.35355544e-01 4.72477794e-01 3.55330676e-01 2.30056360e-01]] [[-1.93534866e-01 -3.24773788e-01 -3.91162038e-02 -1.65322706e-01 1.01283461e-01 3.35900515e-01 3.25699657e-01 1.62996292e-01 -1.82252452e-01 -3.52890611e-01 -3.65076996e-02 2.53391236e-01 5.54228842e-01 5.55686474e-01 4.95962024e-01] [-2.64371961e-01 -3.69890600e-01 7.42365643e-02 -3.32660288e-01 -1.37103662e-01 -8.56967643e-02 3.98478955e-01 3.81851524e-01 3.12560380e-01 -1.11672558e-01 -1.69881493e-01 -1.71629310e-01 -1.36531126e-02 -3.36115900e-03 8.02153349e-03] [-4.80592519e-01 -3.78965735e-01 1.64838463e-01 -1.16901435e-01 1.78195108e-02 -6.70270249e-02 3.26830417e-01 2.37451851e-01 9.65214819e-02 -7.51630440e-02 -3.89188975e-01 -2.82692075e-01 -1.95718065e-01 -4.76908498e-02 -6.27908781e-02] [-5.45325041e-01 -2.00279534e-01 2.12215930e-01 2.05977678e-01 -3.83125022e-02 -1.79202646e-01 1.11134052e-02 -2.84958687e-02 3.03986669e-01 1.38802871e-01 -4.37385105e-02 -1.42356418e-02 8.93118754e-02 2.48865232e-01 4.86559384e-02] [ 2.92783618e-01 9.25421193e-02 -2.11501971e-01 -2.59626836e-01 -4.69165474e-01 -2.78029412e-01 -4.42803264e-01 -2.44304016e-01 -1.81932390e-01 1.68789864e-01 1.37037843e-01 4.53783453e-01 2.87832737e-01 3.60170186e-01 7.42715672e-02] [ 1.41077802e-01 2.85002217e-02 -2.95416415e-01 -4.86457534e-02 -7.22820386e-02 7.04177767e-02 -2.72044241e-01 -1.68632284e-01 -1.41939297e-01 1.13652878e-01 1.05386108e-01 2.87520707e-01 3.33662421e-01 2.57784545e-01 1.53770462e-01] [ 5.50594389e-01 -5.87272234e-02 -4.78691518e-01 -3.00053924e-01 1.54874790e-02 1.41301185e-01 -4.28332716e-01 -2.20856547e-01 -5.74672222e-01 4.92408723e-02 -5.07275127e-02 4.90493357e-01 4.34716344e-01 2.08442837e-01 -5.91725111e-03] [-3.27145569e-02 -4.56933044e-02 -1.48149952e-01 2.52397746e-01 4.67105240e-01 6.37995005e-01 2.23313063e-01 8.54659379e-02 -2.42105797e-01 1.57883003e-01 1.88912466e-01 3.61653596e-01 3.34198833e-01 -1.54124305e-01 -2.60942549e-01] [ 1.67494323e-02 -1.31733352e-02 -1.17320284e-01 8.65461528e-02 2.87374884e-01 7.37075031e-01 2.55815446e-01 1.88790858e-01 -1.57956079e-01 3.32902163e-01 3.79601121e-01 5.22440016e-01 1.81770161e-01 -4.35301811e-01 -7.23199904e-01] [-6.66518435e-02 1.78751007e-01 1.93353698e-01 4.27638203e-01 5.48894227e-01 1.16047549e+00 7.86911964e-01 6.82763338e-01 2.02170447e-01 1.61811411e-01 1.12256557e-01 2.61807553e-02 1.52664587e-01 -5.68939269e-01 -6.54303193e-01] [-2.56388336e-01 -1.83012828e-01 8.31374377e-02 2.66134113e-01 6.55860543e-01 8.79325807e-01 6.93938792e-01 4.42889631e-01 2.56706119e-01 -1.52390569e-01 -1.22283138e-01 -1.27523556e-01 2.80661255e-01 -2.43323773e-01 -3.05849522e-01] [ 1.30084828e-01 5.83227091e-02 3.18437666e-02 1.89134106e-01 4.59331870e-01 4.05444205e-01 5.30353725e-01 1.93810701e-01 1.82827875e-01 -5.50233305e-01 -4.75966185e-01 -3.78148198e-01 2.04275876e-01 -2.56833792e-01 -4.36127186e-01] [ 3.23352545e-01 1.01170681e-01 -6.55280799e-02 -1.37854684e-02 4.02368873e-01 4.63186651e-02 4.51314926e-01 -7.80029502e-03 3.25674504e-01 -2.83885360e-01 -3.60643059e-01 -8.94819647e-02 -9.62070003e-02 1.13617979e-01 -1.34842172e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5679.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[1, 0]]() %6 : int[] = prim::Constant[value=[1, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[ 9.48651414e-03 4.55367804e-01 1.95439115e-01 -1.73194408e-01 -3.20845604e-01 -7.11841956e-02 2.74621576e-01 4.96657819e-01 2.89307088e-01 4.38224345e-01 2.92281002e-01 4.32506949e-01 1.36000648e-01] [-2.42566213e-01 1.89444244e-01 1.68138430e-01 3.57039124e-02 -1.82003379e-01 -4.67487127e-02 1.38464957e-01 2.81511396e-01 3.04937363e-03 2.84107208e-01 1.09068483e-01 4.42764550e-01 1.37871712e-01] [-3.94941986e-01 -5.44627547e-01 -1.78681359e-01 2.20689178e-03 2.06227913e-01 -3.18091184e-01 -4.93232310e-01 -4.13828373e-01 -1.31735429e-01 2.41334438e-01 4.19833288e-02 3.26633155e-01 3.64887714e-03] [-6.48832560e-01 -5.59335828e-01 -1.40928268e-01 -9.05980766e-02 -1.76370069e-01 -4.40972209e-01 -4.54728246e-01 -2.54725903e-01 -5.85694574e-02 1.17020212e-01 1.19973123e-02 -1.04979742e-02 -1.46554187e-01] [-2.80544490e-01 -4.61312026e-01 -4.28704888e-01 -5.95590234e-01 -5.13887405e-01 -3.99853915e-01 -3.67063016e-01 -2.30639219e-01 -1.12970814e-01 -1.58851191e-01 -1.16148286e-01 -4.14492011e-01 -2.42536858e-01] [ 5.39713837e-02 1.83717042e-01 -6.34095445e-02 -4.86279219e-01 -8.20344031e-01 -2.59801537e-01 3.28961983e-02 9.47219506e-02 -2.16043755e-01 -3.19450498e-01 -2.02919841e-02 -4.37684745e-01 -2.54252851e-01] [-3.87752414e-01 1.59589753e-01 -7.24762529e-02 -2.29890928e-01 -7.38800287e-01 -3.32913250e-02 1.93271980e-01 1.78386986e-01 -3.67680609e-01 -5.07756650e-01 -6.85472786e-02 -2.30198890e-01 -1.80307806e-01] [-6.29918158e-01 3.10386479e-01 3.70278299e-01 4.66178358e-01 -2.98134744e-01 3.89999002e-02 3.78060699e-01 6.06308699e-01 2.07567185e-01 -7.33358711e-02 2.25487068e-01 7.68311396e-02 -1.95276842e-01] [-8.71256113e-01 -1.96180761e-01 -1.91566974e-01 2.52224803e-01 -2.18136415e-01 1.71239957e-01 5.45284092e-01 6.38378084e-01 2.36424923e-01 -2.98220783e-01 5.71437217e-02 -5.71450070e-02 -4.97642606e-02] [-1.11388735e-01 -1.13905653e-01 -7.87754953e-02 3.42248797e-01 4.30777259e-02 3.60565707e-02 2.97325343e-01 1.98321328e-01 -4.70910221e-02 -3.14821362e-01 1.32368028e-01 2.20022276e-02 5.01535684e-02] [-1.37435030e-02 -2.12589025e-01 -3.78863931e-01 -2.31232177e-02 -2.93951988e-01 -3.37068677e-01 -3.35646629e-01 -4.66400623e-01 -6.01498485e-01 -5.33106029e-01 1.84837610e-01 7.42692947e-02 1.56089500e-01] [ 2.98735768e-01 4.59498614e-02 -3.07762891e-01 -2.21794263e-01 -4.11072850e-01 -5.47291100e-01 -5.16941786e-01 -6.82489216e-01 -4.24708247e-01 -3.46941799e-01 3.99258763e-01 6.82577416e-02 1.16228789e-01] [ 6.74057603e-02 1.81735948e-01 -2.50142515e-01 -2.20060289e-01 -5.15643358e-01 -5.26759863e-01 -5.72134137e-01 -7.30732024e-01 -2.66906261e-01 -3.68334889e-01 2.20504507e-01 -2.51409680e-01 -2.00135149e-02] [ 2.33030289e-01 1.99094906e-01 -2.71856666e-01 -2.57236600e-01 -3.99405032e-01 -2.73997426e-01 -2.58578181e-01 -5.75536311e-01 -2.89554834e-01 -6.95854902e-01 -2.39786968e-01 -5.35180092e-01 -1.51630247e-03] [ 2.59040833e-01 2.38316298e-01 -1.70639470e-01 -1.05421178e-01 -3.62988442e-01 -2.61372864e-01 -4.45330739e-01 -6.11503780e-01 -6.39167964e-01 -9.02400434e-01 -6.52591050e-01 -5.01543224e-01 1.73855219e-02]] [[ 1.58905208e-01 2.60610282e-01 1.01315655e-01 2.81585097e-01 -1.28470063e-02 -3.03989440e-01 -6.31817162e-01 -3.27130795e-01 3.79166871e-01 5.64695358e-01 5.65938711e-01 2.72141427e-01 1.75931260e-01] [ 6.21773079e-02 3.30612719e-01 2.01137677e-01 2.35973492e-01 5.52589148e-02 -7.65872970e-02 -1.37965828e-01 -1.20227918e-01 3.59136432e-01 4.01242971e-01 3.36513996e-01 4.31489497e-02 -5.43897897e-02] [ 2.60466069e-01 3.10011387e-01 1.66548620e-04 -3.02790664e-02 -1.97723806e-01 -1.66124359e-01 1.02667160e-01 1.30963758e-01 3.07026595e-01 -4.74479608e-03 -3.84262353e-02 -1.63534805e-01 -5.63156344e-02] [ 4.07297969e-01 3.14258993e-01 -7.22344071e-02 -2.37799138e-01 -4.25265916e-03 -1.19182855e-01 -9.29064211e-03 -2.37343654e-01 -2.10514754e-01 -5.98720834e-02 -1.04232877e-01 1.17263354e-01 -2.17657506e-01] [ 1.59725845e-01 -2.32331112e-01 -5.38232028e-01 -6.13411665e-01 -2.93246508e-01 -3.37428629e-01 -9.19586793e-02 -9.32072327e-02 -2.94841766e-01 -2.36647949e-01 -1.46121472e-01 1.55126810e-01 -1.35031804e-01] [-1.63121119e-01 -2.34948054e-01 -1.88486412e-01 -3.14919680e-01 -3.65000188e-01 -4.70309049e-01 -1.35780394e-01 1.02974862e-01 -2.11019576e-01 -1.94955856e-01 -2.56801099e-01 -9.16834176e-02 -2.47062609e-01] [-7.46708035e-01 -7.35018492e-01 -4.35606062e-01 -6.54977083e-01 -5.57660103e-01 -4.34703141e-01 6.23820499e-02 3.31201524e-01 1.00160867e-01 -4.40841883e-01 -4.28136945e-01 -6.78888917e-01 -1.80777133e-01] [-6.82855785e-01 -3.35357517e-01 -2.52511799e-01 -2.79674113e-01 -3.36216688e-01 -2.06065103e-01 -1.94621310e-02 1.83471322e-01 9.40563604e-02 -1.53546914e-01 -9.25158635e-02 -3.06101143e-01 1.15883853e-02] [-5.81882954e-01 -2.31409609e-01 -3.03721577e-01 -1.72873259e-01 -1.38648346e-01 -3.09817982e-03 -1.12168826e-01 -6.32101148e-02 -3.29211950e-02 -2.05101073e-01 -1.06133565e-01 -4.00690258e-01 -1.19708352e-01] [-2.56726503e-01 -2.26771772e-01 -3.52102369e-01 -2.45723411e-01 -2.87191212e-01 -1.85896307e-01 -2.57884979e-01 -2.21574575e-01 -1.27005875e-01 -6.30223453e-02 -2.19573509e-02 -2.11919710e-01 -2.77975589e-01] [-6.61786422e-02 -5.26030660e-01 -4.66159880e-01 -5.02203524e-01 -5.60132742e-01 -4.85287070e-01 -3.38158399e-01 -6.57149702e-02 5.39511405e-02 -1.70533687e-01 -3.49236667e-01 -3.92087042e-01 -3.81395757e-01] [ 2.53637195e-01 -1.55452430e-01 -1.17967315e-01 -2.39385843e-01 -5.78585505e-01 -6.61845028e-01 -2.57917762e-01 7.01758042e-02 8.31173509e-02 -2.25938246e-01 -5.46509981e-01 -1.58038616e-01 -1.46392152e-01] [ 1.92778498e-01 2.67103940e-01 2.08754510e-01 9.72822234e-02 -5.39360225e-01 -6.23727798e-01 -3.13797027e-01 -5.20976521e-02 -1.40714079e-01 -3.89489144e-01 -5.24047017e-01 -1.09055080e-01 -9.65336859e-02] [ 1.58427656e-01 4.14234489e-01 5.09097993e-01 3.28706622e-01 -1.33796841e-01 -5.09702921e-01 -4.71085340e-01 -5.31445205e-01 -4.08155829e-01 -8.00545931e-01 -8.01263273e-01 -5.67111850e-01 -1.58988297e-01] [-4.22172844e-02 8.50545242e-02 3.23027402e-01 1.44125864e-01 6.58542290e-02 -3.65144223e-01 -5.95243156e-01 -8.39674890e-01 -6.44912899e-01 -1.01787472e+00 -7.73817241e-01 -7.16725409e-01 -2.74815291e-01]] [[-3.74030918e-01 1.33853734e-01 3.86832803e-01 7.31339455e-02 -5.04593551e-02 -1.72415718e-01 1.13112509e-01 -2.00994834e-01 -2.89627701e-01 -2.59202033e-01 -2.60037929e-01 -3.79540592e-01 9.10767540e-02] [-4.21721451e-02 -1.60274908e-01 -1.60202868e-02 -1.41692951e-01 4.36455384e-02 -1.06619783e-01 2.60057569e-01 1.32061735e-01 1.55235231e-01 -1.58719838e-01 -1.56756118e-01 -3.08768749e-01 3.02037030e-01] [ 3.34092945e-01 -1.40105054e-01 -1.49275959e-01 -2.92263240e-01 9.27538332e-03 2.44365801e-04 4.32507843e-01 4.56531167e-01 2.28237525e-01 -2.19894171e-01 -2.27126047e-01 -2.03850716e-01 -2.66291294e-02] [ 2.75787145e-01 -3.22629154e-01 -4.52253699e-01 -1.96445361e-01 -4.59290296e-02 2.56538391e-01 2.84507573e-01 6.15593672e-01 3.17768008e-02 -2.40653723e-01 -3.63889098e-01 1.26530975e-02 2.13344842e-01] [-1.72272027e-02 -1.50964200e-01 -1.31381363e-01 -1.48611322e-01 -6.04532547e-02 3.26785535e-01 3.43460411e-01 4.92532611e-01 -2.68765628e-01 -1.12878770e-01 -3.38410944e-01 1.26765251e-01 8.86830911e-02] [ 1.20065242e-01 3.80964503e-02 -1.07277125e-01 -1.92889702e-02 2.03847528e-01 1.64987147e-01 4.87633273e-02 -1.83100835e-03 -3.49635750e-01 -1.55828416e-01 -1.70156434e-01 -3.65229696e-02 2.90971816e-01] [ 5.32476455e-02 1.74917921e-01 1.17024817e-01 1.87173545e-01 3.01210463e-01 -1.81565687e-04 -1.75541863e-01 -3.17266107e-01 -1.23989426e-01 6.09520674e-02 1.73013225e-01 -1.55092046e-01 -2.74033546e-02] [ 7.06473738e-02 1.30923778e-01 -2.42404174e-02 1.35650396e-01 1.98990375e-01 -3.99180055e-01 -6.03571653e-01 -8.47764850e-01 -4.77636546e-01 -1.44132078e-01 3.23663294e-01 1.28564565e-03 2.36978270e-02] [ 6.43510791e-03 9.47271753e-03 -1.25774026e-01 -1.72761455e-01 -1.59446582e-01 -4.31631833e-01 -4.90596890e-01 -5.26752472e-01 -3.42786074e-01 1.90144390e-01 3.32072079e-01 1.64924935e-01 -7.77333081e-02] [ 2.52849460e-01 9.26423967e-02 3.15144248e-02 -1.63701162e-01 -1.19822688e-01 -5.11054575e-01 -4.03939128e-01 -6.45276785e-01 -5.88337541e-01 -2.20407918e-01 2.13219866e-01 1.78095475e-01 3.20478350e-01] [ 4.99877930e-01 3.54793072e-01 4.55117077e-02 -9.79211330e-02 -3.35146397e-01 -3.39582503e-01 -1.90424174e-01 -3.86841774e-01 -3.04277062e-01 -4.40939099e-01 -3.71627212e-02 -2.25565255e-01 -8.24514627e-02] [ 4.40860718e-01 1.87385395e-01 1.30810261e-01 -1.72176763e-01 -4.24230695e-01 -3.61478746e-01 3.73161491e-03 -2.81449735e-01 -1.69202864e-01 -5.62021852e-01 -8.81934091e-02 -4.66775298e-01 -2.16088369e-01] [-3.90504450e-02 6.97338581e-02 1.77014619e-01 -1.36737391e-01 -5.40672779e-01 -4.97710347e-01 1.25552624e-01 1.88488513e-01 3.87807399e-01 -2.07940847e-01 -1.57262400e-01 -3.67527127e-01 -1.24572784e-01] [ 4.10263278e-02 1.20409288e-01 3.43193561e-01 -9.81659442e-02 -2.50167727e-01 -3.28763187e-01 2.85203662e-02 3.65231782e-01 4.24883634e-01 1.34164065e-01 -2.48998374e-01 -4.27897394e-01 -2.11327448e-01] [-4.52358723e-02 3.20929706e-01 5.15649915e-01 1.46150023e-01 -1.00759529e-01 -3.44892770e-01 -9.02152881e-02 3.78230065e-01 4.77476209e-01 1.69141889e-01 -3.40526491e-01 -3.82300496e-01 -2.24402249e-01]]]]; ov_res: [[[[ 9.48651414e-03 4.55367804e-01 1.95439115e-01 -1.73194408e-01 -3.20845604e-01 -7.11841956e-02 2.74621576e-01 4.96657819e-01 2.89307088e-01 4.38224345e-01 2.92281002e-01 4.32506949e-01 1.36000648e-01] [-2.42566213e-01 1.89444244e-01 1.68138430e-01 3.57039124e-02 -1.82003379e-01 -4.67487127e-02 1.38464957e-01 2.81511396e-01 3.04937363e-03 2.84107208e-01 1.09068483e-01 4.42764550e-01 1.37871712e-01] [-3.94941986e-01 -5.44627547e-01 -1.78681359e-01 2.20689178e-03 2.06227913e-01 -3.18091184e-01 -4.93232310e-01 -4.13828373e-01 -1.31735429e-01 2.41334438e-01 4.19833288e-02 3.26633155e-01 3.64887714e-03] [-6.48832560e-01 -5.59335828e-01 -1.40928268e-01 -9.05980766e-02 -1.76370069e-01 -4.40972209e-01 -4.54728246e-01 -2.54725903e-01 -5.85694574e-02 1.17020212e-01 1.19973123e-02 -1.04979742e-02 -1.46554187e-01] [-2.80544490e-01 -4.61312026e-01 -4.28704888e-01 -5.95590234e-01 -5.13887405e-01 -3.99853915e-01 -3.67063016e-01 -2.30639219e-01 -1.12970814e-01 -1.58851191e-01 -1.16148286e-01 -4.14492011e-01 -2.42536858e-01] [ 5.39713837e-02 1.83717042e-01 -6.34095445e-02 -4.86279219e-01 -8.20344031e-01 -2.59801537e-01 3.28961983e-02 9.47219506e-02 -2.16043755e-01 -3.19450498e-01 -2.02919841e-02 -4.37684745e-01 -2.54252851e-01] [-3.87752414e-01 1.59589753e-01 -7.24762529e-02 -2.29890928e-01 -7.38800287e-01 -3.32913250e-02 1.93271980e-01 1.78386986e-01 -3.67680609e-01 -5.07756650e-01 -6.85472786e-02 -2.30198890e-01 -1.80307806e-01] [-6.29918158e-01 3.10386479e-01 3.70278299e-01 4.66178358e-01 -2.98134744e-01 3.89999002e-02 3.78060699e-01 6.06308699e-01 2.07567185e-01 -7.33358711e-02 2.25487068e-01 7.68311396e-02 -1.95276842e-01] [-8.71256113e-01 -1.96180761e-01 -1.91566974e-01 2.52224803e-01 -2.18136415e-01 1.71239957e-01 5.45284092e-01 6.38378084e-01 2.36424923e-01 -2.98220783e-01 5.71437217e-02 -5.71450070e-02 -4.97642606e-02] [-1.11388735e-01 -1.13905653e-01 -7.87754953e-02 3.42248797e-01 4.30777259e-02 3.60565707e-02 2.97325343e-01 1.98321328e-01 -4.70910221e-02 -3.14821362e-01 1.32368028e-01 2.20022276e-02 5.01535684e-02] [-1.37435030e-02 -2.12589025e-01 -3.78863931e-01 -2.31232177e-02 -2.93951988e-01 -3.37068677e-01 -3.35646629e-01 -4.66400623e-01 -6.01498485e-01 -5.33106029e-01 1.84837610e-01 7.42692947e-02 1.56089500e-01] [ 2.98735768e-01 4.59498614e-02 -3.07762891e-01 -2.21794263e-01 -4.11072850e-01 -5.47291100e-01 -5.16941786e-01 -6.82489216e-01 -4.24708247e-01 -3.46941799e-01 3.99258763e-01 6.82577416e-02 1.16228789e-01] [ 6.74057603e-02 1.81735948e-01 -2.50142515e-01 -2.20060289e-01 -5.15643358e-01 -5.26759863e-01 -5.72134137e-01 -7.30732024e-01 -2.66906261e-01 -3.68334889e-01 2.20504507e-01 -2.51409680e-01 -2.00135149e-02] [ 2.33030289e-01 1.99094906e-01 -2.71856666e-01 -2.57236600e-01 -3.99405032e-01 -2.73997426e-01 -2.58578181e-01 -5.75536311e-01 -2.89554834e-01 -6.95854902e-01 -2.39786968e-01 -5.35180092e-01 -1.51630247e-03] [ 2.59040833e-01 2.38316298e-01 -1.70639470e-01 -1.05421178e-01 -3.62988442e-01 -2.61372864e-01 -4.45330739e-01 -6.11503780e-01 -6.39167964e-01 -9.02400434e-01 -6.52591050e-01 -5.01543224e-01 1.73855219e-02]] [[ 1.58905208e-01 2.60610282e-01 1.01315655e-01 2.81585097e-01 -1.28470063e-02 -3.03989440e-01 -6.31817162e-01 -3.27130795e-01 3.79166871e-01 5.64695358e-01 5.65938711e-01 2.72141427e-01 1.75931260e-01] [ 6.21773079e-02 3.30612719e-01 2.01137677e-01 2.35973492e-01 5.52589148e-02 -7.65872970e-02 -1.37965828e-01 -1.20227918e-01 3.59136432e-01 4.01242971e-01 3.36513996e-01 4.31489497e-02 -5.43897897e-02] [ 2.60466069e-01 3.10011387e-01 1.66548620e-04 -3.02790664e-02 -1.97723806e-01 -1.66124359e-01 1.02667160e-01 1.30963758e-01 3.07026595e-01 -4.74479608e-03 -3.84262353e-02 -1.63534805e-01 -5.63156344e-02] [ 4.07297969e-01 3.14258993e-01 -7.22344071e-02 -2.37799138e-01 -4.25265916e-03 -1.19182855e-01 -9.29064211e-03 -2.37343654e-01 -2.10514754e-01 -5.98720834e-02 -1.04232877e-01 1.17263354e-01 -2.17657506e-01] [ 1.59725845e-01 -2.32331112e-01 -5.38232028e-01 -6.13411665e-01 -2.93246508e-01 -3.37428629e-01 -9.19586793e-02 -9.32072327e-02 -2.94841766e-01 -2.36647949e-01 -1.46121472e-01 1.55126810e-01 -1.35031804e-01] [-1.63121119e-01 -2.34948054e-01 -1.88486412e-01 -3.14919680e-01 -3.65000188e-01 -4.70309049e-01 -1.35780394e-01 1.02974862e-01 -2.11019576e-01 -1.94955856e-01 -2.56801099e-01 -9.16834176e-02 -2.47062609e-01] [-7.46708035e-01 -7.35018492e-01 -4.35606062e-01 -6.54977083e-01 -5.57660103e-01 -4.34703141e-01 6.23820499e-02 3.31201524e-01 1.00160867e-01 -4.40841883e-01 -4.28136945e-01 -6.78888917e-01 -1.80777133e-01] [-6.82855785e-01 -3.35357517e-01 -2.52511799e-01 -2.79674113e-01 -3.36216688e-01 -2.06065103e-01 -1.94621310e-02 1.83471322e-01 9.40563604e-02 -1.53546914e-01 -9.25158635e-02 -3.06101143e-01 1.15883853e-02] [-5.81882954e-01 -2.31409609e-01 -3.03721577e-01 -1.72873259e-01 -1.38648346e-01 -3.09817982e-03 -1.12168826e-01 -6.32101148e-02 -3.29211950e-02 -2.05101073e-01 -1.06133565e-01 -4.00690258e-01 -1.19708352e-01] [-2.56726503e-01 -2.26771772e-01 -3.52102369e-01 -2.45723411e-01 -2.87191212e-01 -1.85896307e-01 -2.57884979e-01 -2.21574575e-01 -1.27005875e-01 -6.30223453e-02 -2.19573509e-02 -2.11919710e-01 -2.77975589e-01] [-6.61786422e-02 -5.26030660e-01 -4.66159880e-01 -5.02203524e-01 -5.60132742e-01 -4.85287070e-01 -3.38158399e-01 -6.57149702e-02 5.39511405e-02 -1.70533687e-01 -3.49236667e-01 -3.92087042e-01 -3.81395757e-01] [ 2.53637195e-01 -1.55452430e-01 -1.17967315e-01 -2.39385843e-01 -5.78585505e-01 -6.61845028e-01 -2.57917762e-01 7.01758042e-02 8.31173509e-02 -2.25938246e-01 -5.46509981e-01 -1.58038616e-01 -1.46392152e-01] [ 1.92778498e-01 2.67103940e-01 2.08754510e-01 9.72822234e-02 -5.39360225e-01 -6.23727798e-01 -3.13797027e-01 -5.20976521e-02 -1.40714079e-01 -3.89489144e-01 -5.24047017e-01 -1.09055080e-01 -9.65336859e-02] [ 1.58427656e-01 4.14234489e-01 5.09097993e-01 3.28706622e-01 -1.33796841e-01 -5.09702921e-01 -4.71085340e-01 -5.31445205e-01 -4.08155829e-01 -8.00545931e-01 -8.01263273e-01 -5.67111850e-01 -1.58988297e-01] [-4.22172844e-02 8.50545242e-02 3.23027402e-01 1.44125864e-01 6.58542290e-02 -3.65144223e-01 -5.95243156e-01 -8.39674890e-01 -6.44912899e-01 -1.01787472e+00 -7.73817241e-01 -7.16725409e-01 -2.74815291e-01]] [[-3.74030918e-01 1.33853734e-01 3.86832803e-01 7.31339455e-02 -5.04593551e-02 -1.72415718e-01 1.13112509e-01 -2.00994834e-01 -2.89627701e-01 -2.59202033e-01 -2.60037929e-01 -3.79540592e-01 9.10767540e-02] [-4.21721451e-02 -1.60274908e-01 -1.60202868e-02 -1.41692951e-01 4.36455384e-02 -1.06619783e-01 2.60057569e-01 1.32061735e-01 1.55235231e-01 -1.58719838e-01 -1.56756118e-01 -3.08768749e-01 3.02037030e-01] [ 3.34092945e-01 -1.40105054e-01 -1.49275959e-01 -2.92263240e-01 9.27538332e-03 2.44365801e-04 4.32507843e-01 4.56531167e-01 2.28237525e-01 -2.19894171e-01 -2.27126047e-01 -2.03850716e-01 -2.66291294e-02] [ 2.75787145e-01 -3.22629154e-01 -4.52253699e-01 -1.96445361e-01 -4.59290296e-02 2.56538391e-01 2.84507573e-01 6.15593672e-01 3.17768008e-02 -2.40653723e-01 -3.63889098e-01 1.26530975e-02 2.13344842e-01] [-1.72272027e-02 -1.50964200e-01 -1.31381363e-01 -1.48611322e-01 -6.04532547e-02 3.26785535e-01 3.43460411e-01 4.92532611e-01 -2.68765628e-01 -1.12878770e-01 -3.38410944e-01 1.26765251e-01 8.86830911e-02] [ 1.20065242e-01 3.80964503e-02 -1.07277125e-01 -1.92889702e-02 2.03847528e-01 1.64987147e-01 4.87633273e-02 -1.83100835e-03 -3.49635750e-01 -1.55828416e-01 -1.70156434e-01 -3.65229696e-02 2.90971816e-01] [ 5.32476455e-02 1.74917921e-01 1.17024817e-01 1.87173545e-01 3.01210463e-01 -1.81565687e-04 -1.75541863e-01 -3.17266107e-01 -1.23989426e-01 6.09520674e-02 1.73013225e-01 -1.55092046e-01 -2.74033546e-02] [ 7.06473738e-02 1.30923778e-01 -2.42404174e-02 1.35650396e-01 1.98990375e-01 -3.99180055e-01 -6.03571653e-01 -8.47764850e-01 -4.77636546e-01 -1.44132078e-01 3.23663294e-01 1.28564565e-03 2.36978270e-02] [ 6.43510791e-03 9.47271753e-03 -1.25774026e-01 -1.72761455e-01 -1.59446582e-01 -4.31631833e-01 -4.90596890e-01 -5.26752472e-01 -3.42786074e-01 1.90144390e-01 3.32072079e-01 1.64924935e-01 -7.77333081e-02] [ 2.52849460e-01 9.26423967e-02 3.15144248e-02 -1.63701162e-01 -1.19822688e-01 -5.11054575e-01 -4.03939128e-01 -6.45276785e-01 -5.88337541e-01 -2.20407918e-01 2.13219866e-01 1.78095475e-01 3.20478350e-01] [ 4.99877930e-01 3.54793072e-01 4.55117077e-02 -9.79211330e-02 -3.35146397e-01 -3.39582503e-01 -1.90424174e-01 -3.86841774e-01 -3.04277062e-01 -4.40939099e-01 -3.71627212e-02 -2.25565255e-01 -8.24514627e-02] [ 4.40860718e-01 1.87385395e-01 1.30810261e-01 -1.72176763e-01 -4.24230695e-01 -3.61478746e-01 3.73161491e-03 -2.81449735e-01 -1.69202864e-01 -5.62021852e-01 -8.81934091e-02 -4.66775298e-01 -2.16088369e-01] [-3.90504450e-02 6.97338581e-02 1.77014619e-01 -1.36737391e-01 -5.40672779e-01 -4.97710347e-01 1.25552624e-01 1.88488513e-01 3.87807399e-01 -2.07940847e-01 -1.57262400e-01 -3.67527127e-01 -1.24572784e-01] [ 4.10263278e-02 1.20409288e-01 3.43193561e-01 -9.81659442e-02 -2.50167727e-01 -3.28763187e-01 2.85203662e-02 3.65231782e-01 4.24883634e-01 1.34164065e-01 -2.48998374e-01 -4.27897394e-01 -2.11327448e-01] [-4.52358723e-02 3.20929706e-01 5.15649915e-01 1.46150023e-01 -1.00759529e-01 -3.44892770e-01 -9.02152881e-02 3.78230065e-01 4.77476209e-01 1.69141889e-01 -3.40526491e-01 -3.82300496e-01 -2.24402249e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5682.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[2, 1]]() %7 : int[] = prim::Constant[value=[3, 3]]() %8 : Tensor = aten::avg_pool2d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%8) fw_re: [[[[ 0.16359259 0.11415863 -0.3361491 -0.17945841 -0.17718539 0.20928262 0.10936197 -0.01485601 -0.22793947 -0.39390096 -0.22309245 -0.19380271 0.00613816] [ 0.32539713 0.5135353 0.11212054 -0.07694802 0.02129349 -0.42723238 -0.1437512 0.05821101 0.50359386 0.2309236 -0.01773041 -0.04674694 -0.29514122] [ 0.4334912 0.2713683 0.41794074 0.1904648 0.30141535 -0.22096868 0.03637066 -0.21767196 0.22545692 0.06577212 -0.01809004 -0.2882439 -0.38727736] [-0.39369804 -0.38396692 -0.10990094 -0.15604255 0.18952666 -0.13644439 -0.25105464 -0.5247399 -0.13296632 0.08091287 0.03002176 -0.5297499 -0.6077744 ] [-0.2847522 -0.1472418 0.07228573 0.18057114 0.39416474 0.27948382 0.021668 -0.40065932 -0.3827726 -0.07012703 -0.00440537 -0.10346207 -0.15202837] [-0.09059405 -0.39831734 -0.2754394 0.10113253 0.19945513 0.05169541 0.0203814 0.10931312 0.14743257 -0.03143785 -0.18112816 -0.7663412 -0.36803225] [ 0.08450598 -0.24259087 -0.20568506 0.264726 0.5731413 0.26445553 0.20624995 0.30605307 0.59738463 0.01701774 -0.1654625 -0.30452573 0.00773599]] [[ 0.2966712 -0.01144793 -0.03148922 -0.09572775 0.16406073 0.32908583 0.13156435 0.161089 -0.1549128 0.02786531 -0.17132908 0.0059365 0.00507712] [-0.04615526 -0.02222524 -0.07957923 -0.22672139 -0.33959356 -0.4665282 -0.5216945 -0.05504382 0.04859828 0.00336637 -0.426772 -0.43330076 0.03893846] [-0.3946583 -0.23649938 -0.01234115 0.20525093 -0.30879384 -0.6266301 -0.56790775 0.11596248 0.26273483 0.30506977 0.32345015 0.28439674 0.29561588] [ 0.2186486 0.14939457 0.8017608 0.6939406 0.4959718 0.07322539 -0.08706168 -0.2483982 -0.28662315 0.14200456 0.7728503 0.73701155 0.59950674] [-0.13171631 -0.17954934 0.2733253 0.5135503 0.49618953 0.2379931 0.05654182 -0.49355662 -0.21208367 -0.3900979 0.200365 0.01787074 0.410554 ] [ 0.8034795 0.8763504 0.7529532 0.5235638 0.3232033 0.18385673 0.315824 0.02887474 -0.05194011 -0.1483305 0.11572414 -0.20540375 -0.5766432 ] [-0.11485147 0.23540534 0.39576495 0.02245955 -0.12047429 0.04090969 0.6029502 0.5006879 0.07163209 -0.0388693 -0.20445745 -0.2809813 -0.39124286]] [[ 0.6724663 0.3332728 0.07512865 0.05251626 0.2669133 0.3429738 0.07494729 -0.2716029 -0.5153967 -0.7224824 -0.20615067 0.2992264 0.57891154] [ 0.43863627 0.36041942 0.11107609 0.09442648 -0.01992209 0.20910233 0.3728956 0.27635753 0.08690847 -0.41976166 -0.05352191 0.01839909 0.19874714] [ 0.33000267 -0.0077043 0.01867207 -0.22037241 0.05993023 -0.3529379 0.05433209 -0.0483239 0.46504545 0.20965403 0.6212337 0.36417657 0.29080933] [ 0.31862497 0.23892143 0.51686966 0.46152604 0.4376013 -0.20999005 -0.35563856 -0.51795524 0.07261126 -0.00962385 0.45790026 -0.0405788 0.16122876] [ 0.4315449 0.5053028 0.4532585 0.28027213 -0.22284794 -0.4339031 -0.41722095 -0.42098352 -0.20342322 0.1602506 0.30914938 0.29092458 -0.10097424] [-0.15001166 0.07198481 0.75354147 0.7858702 0.41585636 -0.00267607 0.02590462 0.2967013 -0.00128397 0.3368253 0.40562698 0.9825044 0.18337972] [-0.04919952 -0.30707797 -0.05481632 0.48712057 0.4504107 0.28798592 0.27213776 0.6749542 0.663093 0.5880969 0.3899077 0.54128486 0.2337887 ]]]]; ov_res: [[[[ 0.16359259 0.11415863 -0.3361491 -0.17945841 -0.17718539 0.20928262 0.10936197 -0.01485601 -0.22793947 -0.39390096 -0.22309245 -0.19380271 0.00613816] [ 0.32539713 0.5135353 0.11212054 -0.07694802 0.02129349 -0.42723238 -0.1437512 0.05821101 0.50359386 0.2309236 -0.01773041 -0.04674694 -0.29514122] [ 0.4334912 0.2713683 0.41794074 0.1904648 0.30141535 -0.22096868 0.03637066 -0.21767196 0.22545692 0.06577212 -0.01809004 -0.2882439 -0.38727736] [-0.39369804 -0.38396692 -0.10990094 -0.15604255 0.18952666 -0.13644439 -0.25105464 -0.5247399 -0.13296632 0.08091287 0.03002176 -0.5297499 -0.6077744 ] [-0.2847522 -0.1472418 0.07228573 0.18057114 0.39416474 0.27948382 0.021668 -0.40065932 -0.3827726 -0.07012703 -0.00440537 -0.10346207 -0.15202837] [-0.09059405 -0.39831734 -0.2754394 0.10113253 0.19945513 0.05169541 0.0203814 0.10931312 0.14743257 -0.03143785 -0.18112816 -0.7663412 -0.36803225] [ 0.08450598 -0.24259087 -0.20568506 0.264726 0.5731413 0.26445553 0.20624995 0.30605307 0.59738463 0.01701774 -0.1654625 -0.30452573 0.00773599]] [[ 0.2966712 -0.01144793 -0.03148922 -0.09572775 0.16406073 0.32908583 0.13156435 0.161089 -0.1549128 0.02786531 -0.17132908 0.0059365 0.00507712] [-0.04615526 -0.02222524 -0.07957923 -0.22672139 -0.33959356 -0.4665282 -0.5216945 -0.05504382 0.04859828 0.00336637 -0.426772 -0.43330076 0.03893846] [-0.3946583 -0.23649938 -0.01234115 0.20525093 -0.30879384 -0.6266301 -0.56790775 0.11596248 0.26273483 0.30506977 0.32345015 0.28439674 0.29561588] [ 0.2186486 0.14939457 0.8017608 0.6939406 0.4959718 0.07322539 -0.08706168 -0.2483982 -0.28662315 0.14200456 0.7728503 0.73701155 0.59950674] [-0.13171631 -0.17954934 0.2733253 0.5135503 0.49618953 0.2379931 0.05654182 -0.49355662 -0.21208367 -0.3900979 0.200365 0.01787074 0.410554 ] [ 0.8034795 0.8763504 0.7529532 0.5235638 0.3232033 0.18385673 0.315824 0.02887474 -0.05194011 -0.1483305 0.11572414 -0.20540375 -0.5766432 ] [-0.11485147 0.23540534 0.39576495 0.02245955 -0.12047429 0.04090969 0.6029502 0.5006879 0.07163209 -0.0388693 -0.20445745 -0.2809813 -0.39124286]] [[ 0.6724663 0.3332728 0.07512865 0.05251626 0.2669133 0.3429738 0.07494729 -0.2716029 -0.5153967 -0.7224824 -0.20615067 0.2992264 0.57891154] [ 0.43863627 0.36041942 0.11107609 0.09442648 -0.01992209 0.20910233 0.3728956 0.27635753 0.08690847 -0.41976166 -0.05352191 0.01839909 0.19874714] [ 0.33000267 -0.0077043 0.01867207 -0.22037241 0.05993023 -0.3529379 0.05433209 -0.0483239 0.46504545 0.20965403 0.6212337 0.36417657 0.29080933] [ 0.31862497 0.23892143 0.51686966 0.46152604 0.4376013 -0.20999005 -0.35563856 -0.51795524 0.07261126 -0.00962385 0.45790026 -0.0405788 0.16122876] [ 0.4315449 0.5053028 0.4532585 0.28027213 -0.22284794 -0.4339031 -0.41722095 -0.42098352 -0.20342322 0.1602506 0.30914938 0.29092458 -0.10097424] [-0.15001166 0.07198481 0.75354147 0.7858702 0.41585636 -0.00267607 0.02590462 0.2967013 -0.00128397 0.3368253 0.40562698 0.9825044 0.18337972] [-0.04919952 -0.30707797 -0.05481632 0.48712057 0.4504107 0.28798592 0.27213776 0.6749542 0.663093 0.5880969 0.3899077 0.54128486 0.2337887 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5685.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %5 : int[] = prim::Constant[value=[0, 0]]() %6 : int[] = prim::Constant[value=[2, 1]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-8.23354721e-01 -9.95093584e-01 1.24626905e-02 -7.65104055e-01 -5.49392164e-01 8.15329313e-01 -5.36937952e-01 -8.33626509e-01 -7.07287192e-02 -7.87580609e-02 -3.28017086e-01 5.29224336e-01 2.34485173e+00 6.90731287e-01 4.94636983e-01] [ 4.01236087e-01 -5.88883281e-01 3.86435449e-01 -1.46185601e+00 2.47165293e-01 5.77326477e-01 -2.05125153e-01 4.54531252e-01 -4.84304547e-01 -1.96049094e-01 9.07763839e-02 -3.10732365e-01 1.00909233e-01 6.91106915e-03 -1.62956536e-01] [ 1.00863945e+00 -4.42100197e-01 8.19708586e-01 -2.97791898e-01 -6.61801845e-02 7.40097523e-01 -4.94243830e-01 -1.35908055e+00 1.40313834e-01 -1.12639248e+00 6.23241067e-01 -4.05675888e-01 -1.85383588e-01 -1.64165449e+00 8.24069560e-01] [-8.11625600e-01 -1.33363798e-01 -8.84372354e-01 -7.24454403e-01 1.78467676e-01 -2.68268138e-02 -1.29417762e-01 1.18803069e-01 2.23496974e-01 -5.70154309e-01 3.22470725e-01 -7.54528642e-01 -2.15043545e-01 -2.78738976e-01 -1.12240657e-01] [-6.92376316e-01 -5.69475830e-01 -1.38264030e-01 6.45949006e-01 1.52159774e+00 1.25104189e-02 2.86201477e-01 1.50656477e-02 -1.15781724e-01 -7.53234446e-01 -2.62396157e-01 -5.36148965e-01 4.61785555e-01 1.35540199e+00 3.95057946e-01] [ 1.02827340e-01 8.31672132e-01 1.68062821e-01 -7.40319431e-01 -6.74238205e-01 -4.03142184e-01 2.69776583e-01 1.38628900e+00 7.02897429e-01 -1.80163711e-01 -9.18207824e-01 -1.81253180e-01 1.83262050e-01 3.90768409e-01 2.89825201e-02] [ 1.78846121e-02 -3.04294288e-01 -7.31834173e-02 -6.08211696e-01 6.62687063e-01 4.23388839e-01 1.62822723e-01 5.95700741e-02 -3.72984111e-02 -9.70226467e-01 6.60376549e-01 -2.21050769e-01 -5.18220663e-02 3.65322828e-02 -3.51037383e-01] [-9.87454474e-01 6.67993605e-01 5.01333714e-01 -7.92241693e-01 1.22493148e+00 8.99066031e-01 6.26973569e-01 3.18064690e-01 -1.60868311e+00 1.22852242e+00 7.61500895e-01 -1.65803097e-02 8.90518188e-01 9.71228182e-01 7.49149472e-02]] [[-9.62943017e-01 7.99488068e-01 -5.81224024e-01 4.22835141e-01 7.52853274e-01 -7.06935525e-01 6.36946380e-01 -3.93349916e-01 -3.08671176e-01 8.76988113e-01 3.44427556e-01 -1.24243498e+00 -7.82452047e-01 -1.71117139e+00 -1.05584860e-02] [-7.39436030e-01 1.64710641e-01 5.83686173e-01 -3.52304637e-01 -2.64279395e-01 -8.09854090e-01 -2.69526929e-01 -2.10489631e-01 -2.04465985e-02 -1.54741395e+00 -1.45070136e-01 2.77933121e-01 2.21554801e-01 -8.01254869e-01 -2.45301321e-01] [-7.72082359e-02 -2.32260644e-01 3.58549833e-01 4.13390577e-01 6.47396863e-01 2.95290291e-01 1.23337150e-01 3.63686353e-01 4.63603199e-01 -7.73892105e-01 -1.88657865e-01 1.24184406e+00 -1.39834726e+00 6.42917871e-01 -9.51127648e-01] [ 6.62357450e-01 -1.10203616e-01 -1.39903247e-01 4.36209410e-01 -5.25209546e-01 1.09117317e+00 -3.79697144e-01 -7.24961817e-01 -1.11788821e+00 -1.47545540e+00 -2.43419826e-01 -4.22968268e-02 2.18718499e-02 6.70980036e-01 -1.05689853e-01] [ 9.34383631e-01 -1.93872809e-01 9.67451155e-01 -8.93771648e-02 3.45031992e-02 -4.38184083e-01 1.19738746e+00 -6.42954230e-01 1.16478729e+00 6.34315789e-01 -5.80393553e-01 8.15420151e-01 1.82984066e+00 7.61663377e-01 -6.26080036e-01] [ 7.54364669e-01 -5.69632232e-01 -1.31913805e+00 1.09509611e+00 1.23009694e+00 -6.59368277e-01 4.68044609e-01 1.18455505e+00 5.96654564e-02 3.78416717e-01 -3.06914508e-01 -1.67134702e-01 5.15875578e-01 2.99284220e-01 4.87267733e-01] [ 9.09037173e-01 6.88915372e-01 -5.42279065e-01 1.10786843e+00 1.30483299e-01 -1.61673558e+00 -2.14841977e-01 9.79158521e-01 4.38601077e-01 -1.29662406e+00 -2.63363510e-01 -3.36453110e-01 5.55283189e-01 -1.21471500e+00 4.61893678e-02] [ 1.32196891e+00 -6.78719640e-01 1.38819647e+00 1.35887638e-01 -1.36397386e+00 9.23794568e-01 -2.20657676e-01 -2.69082606e-01 6.23630762e-01 9.96586919e-01 -1.78356695e+00 -1.43973362e+00 -1.19006956e+00 7.04139322e-02 2.63744855e+00]] [[ 3.69997621e-02 9.28375244e-01 -6.04468167e-01 -7.48142302e-01 -6.82324350e-01 8.43233585e-01 -7.68086493e-01 2.87217587e-01 6.35410309e-01 -9.39721107e-01 -1.65040359e-01 1.16646636e+00 -2.93183386e-01 -1.38866639e+00 -6.33639097e-01] [-4.30938572e-01 4.41503406e-01 2.13959828e-01 5.53477705e-01 7.11851180e-01 -2.65061438e-01 -4.67588305e-02 8.53926539e-02 1.20810807e-01 -4.22100797e-02 3.32648158e-01 6.55655444e-01 -5.94755292e-01 3.67384911e-01 3.30090761e-01] [ 1.47066641e+00 4.69701171e-01 4.04187262e-01 1.04739085e-01 9.97625589e-02 7.30746031e-01 -1.32846165e+00 3.22510600e-02 -5.77076554e-01 -1.21575522e+00 7.17973471e-01 9.81215358e-01 9.12149698e-02 -5.81567526e-01 5.83024740e-01] [ 3.10721278e-01 -1.22360826e+00 -5.50710559e-02 1.20669281e+00 -2.99980164e-01 1.21380627e-01 -3.39832187e-01 8.23057890e-01 -1.08594358e-01 -7.19921827e-01 -4.75877941e-01 -2.08741933e-01 3.05133820e-01 -1.13956481e-01 2.86015421e-01] [ 9.03316617e-01 -2.48316586e-01 4.98512119e-01 -2.24395066e-01 1.15058970e+00 1.71074823e-01 3.30023646e-01 -6.69290423e-01 -4.00584400e-01 1.19864061e-01 -7.29155958e-01 -1.07390797e+00 1.43812045e-01 8.70727777e-01 -1.21148133e+00] [ 4.60521251e-01 -7.50498116e-01 -3.00250739e-01 5.23266137e-01 -3.11277807e-04 5.29925525e-01 9.34270382e-01 -4.13987398e-01 8.96044374e-01 1.10645115e+00 7.83194065e-01 -7.70886123e-01 3.22763056e-01 -1.08074546e+00 -8.59445512e-01] [ 1.00691104e+00 4.32014763e-02 -1.73236713e-01 8.06262851e-01 2.77131259e-01 4.55011129e-02 -2.28509009e-02 -4.67363209e-01 6.90716267e-01 3.40226531e-01 2.82344103e-01 9.83108342e-01 3.15078497e-01 1.09620333e+00 2.50175446e-01] [-8.40341032e-01 8.83291423e-01 2.30922215e-02 -6.48600101e-01 1.10247064e+00 -7.82820761e-01 9.16665673e-01 7.11856902e-01 -1.60944903e+00 1.15816939e+00 8.84517133e-01 9.34925228e-02 -1.68532658e+00 4.61671352e-01 -1.16117144e+00]]]]; ov_res: [[[[-8.23354721e-01 -9.95093584e-01 1.24626905e-02 -7.65104055e-01 -5.49392164e-01 8.15329313e-01 -5.36937952e-01 -8.33626509e-01 -7.07287192e-02 -7.87580609e-02 -3.28017086e-01 5.29224336e-01 2.34485173e+00 6.90731287e-01 4.94636983e-01] [ 4.01236087e-01 -5.88883281e-01 3.86435449e-01 -1.46185601e+00 2.47165293e-01 5.77326477e-01 -2.05125153e-01 4.54531252e-01 -4.84304547e-01 -1.96049094e-01 9.07763839e-02 -3.10732365e-01 1.00909233e-01 6.91106915e-03 -1.62956536e-01] [ 1.00863945e+00 -4.42100197e-01 8.19708586e-01 -2.97791898e-01 -6.61801845e-02 7.40097523e-01 -4.94243830e-01 -1.35908055e+00 1.40313834e-01 -1.12639248e+00 6.23241067e-01 -4.05675888e-01 -1.85383588e-01 -1.64165449e+00 8.24069560e-01] [-8.11625600e-01 -1.33363798e-01 -8.84372354e-01 -7.24454403e-01 1.78467676e-01 -2.68268138e-02 -1.29417762e-01 1.18803069e-01 2.23496974e-01 -5.70154309e-01 3.22470725e-01 -7.54528642e-01 -2.15043545e-01 -2.78738976e-01 -1.12240657e-01] [-6.92376316e-01 -5.69475830e-01 -1.38264030e-01 6.45949006e-01 1.52159774e+00 1.25104189e-02 2.86201477e-01 1.50656477e-02 -1.15781724e-01 -7.53234446e-01 -2.62396157e-01 -5.36148965e-01 4.61785555e-01 1.35540199e+00 3.95057946e-01] [ 1.02827340e-01 8.31672132e-01 1.68062821e-01 -7.40319431e-01 -6.74238205e-01 -4.03142184e-01 2.69776583e-01 1.38628900e+00 7.02897429e-01 -1.80163711e-01 -9.18207824e-01 -1.81253180e-01 1.83262050e-01 3.90768409e-01 2.89825201e-02] [ 1.78846121e-02 -3.04294288e-01 -7.31834173e-02 -6.08211696e-01 6.62687063e-01 4.23388839e-01 1.62822723e-01 5.95700741e-02 -3.72984111e-02 -9.70226467e-01 6.60376549e-01 -2.21050769e-01 -5.18220663e-02 3.65322828e-02 -3.51037383e-01] [-9.87454474e-01 6.67993605e-01 5.01333714e-01 -7.92241693e-01 1.22493148e+00 8.99066031e-01 6.26973569e-01 3.18064690e-01 -1.60868311e+00 1.22852242e+00 7.61500895e-01 -1.65803097e-02 8.90518188e-01 9.71228182e-01 7.49149472e-02]] [[-9.62943017e-01 7.99488068e-01 -5.81224024e-01 4.22835141e-01 7.52853274e-01 -7.06935525e-01 6.36946380e-01 -3.93349916e-01 -3.08671176e-01 8.76988113e-01 3.44427556e-01 -1.24243498e+00 -7.82452047e-01 -1.71117139e+00 -1.05584860e-02] [-7.39436030e-01 1.64710641e-01 5.83686173e-01 -3.52304637e-01 -2.64279395e-01 -8.09854090e-01 -2.69526929e-01 -2.10489631e-01 -2.04465985e-02 -1.54741395e+00 -1.45070136e-01 2.77933121e-01 2.21554801e-01 -8.01254869e-01 -2.45301321e-01] [-7.72082359e-02 -2.32260644e-01 3.58549833e-01 4.13390577e-01 6.47396863e-01 2.95290291e-01 1.23337150e-01 3.63686353e-01 4.63603199e-01 -7.73892105e-01 -1.88657865e-01 1.24184406e+00 -1.39834726e+00 6.42917871e-01 -9.51127648e-01] [ 6.62357450e-01 -1.10203616e-01 -1.39903247e-01 4.36209410e-01 -5.25209546e-01 1.09117317e+00 -3.79697144e-01 -7.24961817e-01 -1.11788821e+00 -1.47545540e+00 -2.43419826e-01 -4.22968268e-02 2.18718499e-02 6.70980036e-01 -1.05689853e-01] [ 9.34383631e-01 -1.93872809e-01 9.67451155e-01 -8.93771648e-02 3.45031992e-02 -4.38184083e-01 1.19738746e+00 -6.42954230e-01 1.16478729e+00 6.34315789e-01 -5.80393553e-01 8.15420151e-01 1.82984066e+00 7.61663377e-01 -6.26080036e-01] [ 7.54364669e-01 -5.69632232e-01 -1.31913805e+00 1.09509611e+00 1.23009694e+00 -6.59368277e-01 4.68044609e-01 1.18455505e+00 5.96654564e-02 3.78416717e-01 -3.06914508e-01 -1.67134702e-01 5.15875578e-01 2.99284220e-01 4.87267733e-01] [ 9.09037173e-01 6.88915372e-01 -5.42279065e-01 1.10786843e+00 1.30483299e-01 -1.61673558e+00 -2.14841977e-01 9.79158521e-01 4.38601077e-01 -1.29662406e+00 -2.63363510e-01 -3.36453110e-01 5.55283189e-01 -1.21471500e+00 4.61893678e-02] [ 1.32196891e+00 -6.78719640e-01 1.38819647e+00 1.35887638e-01 -1.36397386e+00 9.23794568e-01 -2.20657676e-01 -2.69082606e-01 6.23630762e-01 9.96586919e-01 -1.78356695e+00 -1.43973362e+00 -1.19006956e+00 7.04139322e-02 2.63744855e+00]] [[ 3.69997621e-02 9.28375244e-01 -6.04468167e-01 -7.48142302e-01 -6.82324350e-01 8.43233585e-01 -7.68086493e-01 2.87217587e-01 6.35410309e-01 -9.39721107e-01 -1.65040359e-01 1.16646636e+00 -2.93183386e-01 -1.38866639e+00 -6.33639097e-01] [-4.30938572e-01 4.41503406e-01 2.13959828e-01 5.53477705e-01 7.11851180e-01 -2.65061438e-01 -4.67588305e-02 8.53926539e-02 1.20810807e-01 -4.22100797e-02 3.32648158e-01 6.55655444e-01 -5.94755292e-01 3.67384911e-01 3.30090761e-01] [ 1.47066641e+00 4.69701171e-01 4.04187262e-01 1.04739085e-01 9.97625589e-02 7.30746031e-01 -1.32846165e+00 3.22510600e-02 -5.77076554e-01 -1.21575522e+00 7.17973471e-01 9.81215358e-01 9.12149698e-02 -5.81567526e-01 5.83024740e-01] [ 3.10721278e-01 -1.22360826e+00 -5.50710559e-02 1.20669281e+00 -2.99980164e-01 1.21380627e-01 -3.39832187e-01 8.23057890e-01 -1.08594358e-01 -7.19921827e-01 -4.75877941e-01 -2.08741933e-01 3.05133820e-01 -1.13956481e-01 2.86015421e-01] [ 9.03316617e-01 -2.48316586e-01 4.98512119e-01 -2.24395066e-01 1.15058970e+00 1.71074823e-01 3.30023646e-01 -6.69290423e-01 -4.00584400e-01 1.19864061e-01 -7.29155958e-01 -1.07390797e+00 1.43812045e-01 8.70727777e-01 -1.21148133e+00] [ 4.60521251e-01 -7.50498116e-01 -3.00250739e-01 5.23266137e-01 -3.11277807e-04 5.29925525e-01 9.34270382e-01 -4.13987398e-01 8.96044374e-01 1.10645115e+00 7.83194065e-01 -7.70886123e-01 3.22763056e-01 -1.08074546e+00 -8.59445512e-01] [ 1.00691104e+00 4.32014763e-02 -1.73236713e-01 8.06262851e-01 2.77131259e-01 4.55011129e-02 -2.28509009e-02 -4.67363209e-01 6.90716267e-01 3.40226531e-01 2.82344103e-01 9.83108342e-01 3.15078497e-01 1.09620333e+00 2.50175446e-01] [-8.40341032e-01 8.83291423e-01 2.30922215e-02 -6.48600101e-01 1.10247064e+00 -7.82820761e-01 9.16665673e-01 7.11856902e-01 -1.60944903e+00 1.15816939e+00 8.84517133e-01 9.34925228e-02 -1.68532658e+00 4.61671352e-01 -1.16117144e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5688.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[ 2.84937173e-01 -1.74517974e-01 -2.71643817e-01 -2.04136789e-01 3.50981615e-02 1.66665494e-01 -1.05910130e-01 -9.62122008e-02 5.15851855e-01 1.04499626e+00 1.04936647e+00 6.21023118e-01 2.51106530e-01] [ 4.61202890e-01 -8.78620669e-02 6.02258295e-02 -9.35685337e-02 2.75424898e-01 2.87819654e-01 1.92253634e-01 2.91113257e-01 6.77303195e-01 8.55438173e-01 5.32767296e-01 1.75975516e-01 -6.49667233e-02] [ 6.85578763e-01 1.26444995e-01 2.93087065e-01 2.18292102e-01 8.02958965e-01 6.45303905e-01 2.74774730e-01 5.44915974e-01 6.28192425e-01 7.72790372e-01 9.18022171e-02 -8.62524658e-02 -1.80943623e-01] [ 6.65479481e-01 5.41343272e-01 4.54662174e-01 4.88949418e-01 6.59591258e-01 5.65673172e-01 3.89433503e-01 9.86487508e-01 5.75293779e-01 4.11615908e-01 -3.89390856e-01 -2.37850294e-01 -2.47871503e-01] [ 7.66847670e-01 3.75006437e-01 2.25181207e-01 2.30078623e-01 5.59350848e-01 1.77726448e-01 4.63225730e-02 5.93631506e-01 4.72998977e-01 6.38919592e-01 1.40527070e-01 2.97361743e-02 -8.93393531e-02] [ 2.34560117e-01 2.83084989e-01 2.88168311e-01 3.95388603e-01 3.35837215e-01 2.84637604e-02 -1.41358674e-01 1.56779647e-01 1.63007289e-01 2.41720065e-01 -2.36374140e-03 8.23051780e-02 -5.83220720e-02] [ 2.92861044e-01 4.52227384e-01 6.23125434e-01 6.87548399e-01 5.65202713e-01 3.37414622e-01 2.08575755e-01 1.08805858e-01 1.59277767e-01 -3.13281491e-02 2.13742983e-02 1.91344116e-02 7.09893182e-02] [ 4.25548218e-02 4.90408212e-01 5.18067837e-01 8.42990637e-01 4.35679823e-01 6.42511070e-01 2.27546275e-01 1.89298000e-02 -1.49647042e-01 -5.33388197e-01 -4.50613707e-01 -2.21287191e-01 1.71825856e-01] [ 3.75628233e-01 6.47038043e-01 3.08218837e-01 2.16972023e-01 2.25469377e-02 3.17616612e-01 3.55113268e-01 1.40030310e-01 4.78076525e-02 -4.76890713e-01 -2.62347668e-01 -1.64360896e-01 1.50695294e-01] [ 2.57060319e-01 3.91247809e-01 2.63154507e-02 -1.57587692e-01 -1.17959708e-01 -2.16624793e-03 2.04157494e-02 -1.34567633e-01 -7.60828927e-02 -4.35852468e-01 -2.77197331e-01 -3.09569418e-01 -1.02243669e-01] [ 4.96275008e-01 8.32597613e-01 5.15690804e-01 -8.30023456e-03 -1.35520279e-01 -3.52611482e-01 1.61313023e-02 -6.33468479e-02 1.80146620e-01 -1.82944387e-01 3.92740443e-02 -2.03360796e-01 -1.88595816e-01] [ 1.98447183e-02 2.81793445e-01 5.45839429e-01 3.83474082e-01 2.34603137e-01 -4.10405621e-02 -3.67485285e-02 1.14065647e-01 2.11055577e-01 -1.52792737e-01 -6.85365945e-02 -3.87143016e-01 -1.17155686e-01] [-1.11619130e-01 6.63885176e-02 3.85012418e-01 3.01765800e-01 8.50497857e-02 -2.37879679e-01 -1.67205408e-01 1.94474280e-01 5.58674276e-01 2.15438843e-01 2.81944007e-01 2.13604569e-02 1.65612057e-01]] [[ 9.28702354e-02 -2.98230588e-01 -3.87247473e-01 -9.95114088e-01 -8.43222380e-01 -6.59305096e-01 -5.48152566e-01 -3.44306350e-01 -4.32849616e-01 -1.58589289e-01 -2.83632219e-01 -3.91519397e-01 -7.18891382e-01] [ 2.28576392e-01 1.99146699e-02 -3.05632204e-01 -7.85122573e-01 -8.34467113e-01 -4.46327895e-01 -2.56103873e-01 -5.96398041e-02 -4.59598042e-02 1.26697823e-01 6.19153045e-02 -2.15689316e-01 -5.29350698e-01] [ 1.23045575e-02 -1.66254789e-01 -1.41989917e-01 -4.63771492e-01 -5.72454095e-01 -3.34481418e-01 1.20755807e-01 5.13280749e-01 -6.68065399e-02 -1.02061164e-02 -3.95299643e-01 -1.62197560e-01 -3.32450092e-01] [ 1.45536602e-01 -1.50723621e-01 -3.50116640e-02 -2.89004117e-01 -4.57162857e-01 -4.16693091e-01 5.30554466e-02 2.97211438e-01 -4.04699445e-01 -4.52178955e-01 -3.00946444e-01 9.85424370e-02 2.50856459e-01] [-4.69443910e-02 -3.62228960e-01 -5.20455278e-02 6.03392906e-02 -1.07220195e-01 -2.22839508e-02 1.70806855e-01 5.63189328e-01 -7.31337443e-02 -1.23072468e-01 9.88780148e-03 1.66214436e-01 4.84190136e-01] [ 5.81331253e-01 -1.91334879e-03 -3.35552730e-02 -1.36516616e-01 3.81109789e-02 3.20014477e-01 4.97755945e-01 7.38387465e-01 2.66955763e-01 -7.51242694e-03 3.19264919e-01 1.48151070e-01 5.76741934e-01] [ 2.24617898e-01 1.55384958e-01 4.60050255e-02 1.73485368e-01 2.01740101e-01 4.78496075e-01 2.84948200e-01 4.01707977e-01 1.51682600e-01 6.27124310e-02 4.92062926e-01 -2.28208303e-03 4.75096494e-01] [ 5.86209536e-01 6.30292535e-01 1.95686504e-01 -2.12416038e-01 -1.66635007e-01 2.65560001e-01 3.13544750e-01 1.99210912e-01 -2.02335417e-01 -1.60690337e-01 5.19205555e-02 1.03671640e-01 5.33388734e-01] [ 1.46543533e-01 3.00383002e-01 1.11874424e-01 -9.16118622e-02 2.78906035e-03 3.24029922e-01 4.65443283e-01 1.43671572e-01 -2.33463764e-01 -2.91460633e-01 -1.94392949e-01 -7.55764768e-02 1.43356651e-01] [ 1.15711533e-01 3.85939553e-02 5.62978834e-02 -2.72106677e-01 -9.51611102e-02 1.01113446e-01 2.47867271e-01 2.27152500e-02 -2.90409356e-01 -3.56346428e-01 -3.52875531e-01 -6.70437664e-02 7.10416660e-02] [-6.89473972e-02 -1.71275854e-01 -6.61197081e-02 1.11952342e-01 2.55776018e-01 2.65891641e-01 -2.07028553e-01 -2.03842834e-01 -2.74053782e-01 -2.14641705e-01 -1.06513612e-01 -2.30419531e-01 -1.33519679e-01] [-2.34039858e-01 -7.90514126e-02 1.32929802e-01 3.67347091e-01 1.44414648e-01 -2.82916764e-04 -4.21226561e-01 -1.83021188e-01 -1.42000034e-01 -3.50672677e-02 3.52735251e-01 2.90562928e-01 5.22299826e-01] [-4.91320401e-01 -1.78181827e-01 -2.78702170e-01 3.26657772e-01 1.34842098e-01 5.24072945e-01 2.63499886e-01 3.06487024e-01 2.13965178e-01 1.26042560e-01 3.44188929e-01 5.47832511e-02 4.13864821e-01]] [[-9.35675502e-01 -5.29798806e-01 -1.14372678e-01 2.70957619e-01 -3.42138819e-02 -6.30896091e-02 -2.08543330e-01 -7.58192539e-02 -2.04701602e-01 1.87781616e-03 -6.33980334e-02 1.51912138e-01 -8.03936049e-02] [-5.82603335e-01 -3.22951198e-01 1.04981713e-01 7.14695215e-01 6.54414773e-01 5.64560235e-01 -1.91592108e-02 -2.16714457e-01 -5.86033046e-01 -1.36318371e-01 -8.43414888e-02 4.69525665e-01 1.89869225e-01] [ 1.72146663e-01 2.40000620e-01 5.79466879e-01 3.04838330e-01 2.53976554e-01 -4.43131514e-02 -1.40648842e-01 -2.31707782e-01 -4.00351882e-01 -2.80283570e-01 -1.08794220e-01 2.10064366e-01 3.54811341e-01] [ 5.99561393e-01 7.06507444e-01 1.12092245e+00 5.75364530e-01 4.11706120e-01 -1.60037339e-01 -1.86821356e-01 -3.92335266e-01 -4.40499544e-01 -3.96291941e-01 -2.52075791e-01 -3.38955373e-02 1.31672859e-01] [ 2.02801868e-01 3.31395328e-01 1.04138219e+00 3.95209819e-01 2.67079264e-01 -2.91724771e-01 -9.73774269e-02 -1.55804589e-01 -2.69001052e-02 -1.27557099e-01 1.28906548e-01 -1.81500807e-01 -1.78837135e-01] [-1.32424282e-02 4.33938324e-01 8.48060191e-01 3.50568831e-01 1.64810419e-01 3.69732119e-02 3.17785144e-01 1.62560016e-01 -4.94911149e-02 -2.49118030e-01 -5.96554950e-02 -9.18378830e-02 -2.31771529e-01] [-4.42059308e-01 1.19359441e-01 3.32886726e-01 1.25934109e-01 -8.85733366e-02 -1.79738209e-01 -5.93019277e-02 -6.76593632e-02 1.93857756e-02 1.70495100e-02 2.49916151e-01 9.54150409e-02 -1.64369613e-01] [-1.68023720e-01 1.01634711e-01 -1.33728683e-01 -2.02003688e-01 -4.19715792e-01 -3.18950325e-01 1.41495556e-01 3.13101485e-02 -2.64284074e-01 -5.49815416e-01 -3.64749908e-01 1.84997059e-02 4.21149284e-03] [-4.02422279e-01 -3.67199630e-01 -5.59358299e-01 -3.79001677e-01 -5.46542883e-01 -4.45154190e-01 -1.58503234e-01 -2.29243070e-01 -2.45535746e-01 -3.71493459e-01 -3.45567107e-01 -3.59596759e-02 -1.93845600e-01] [-5.56137443e-01 -5.58676541e-01 -7.29053259e-01 -4.94224608e-01 -5.52505076e-01 -5.84516287e-01 -1.40009344e-01 -3.51563126e-01 -3.53098094e-01 -4.85899091e-01 -5.24092436e-01 -2.42752716e-01 -4.10382360e-01] [-4.51881528e-01 -2.65603721e-01 -4.02521193e-01 -4.59568828e-01 -4.50271010e-01 -5.43453932e-01 -2.64354110e-01 -3.15787524e-01 -4.93685789e-02 9.62403864e-02 -1.98641196e-01 -1.33082524e-01 -1.30305693e-01] [-6.79452777e-01 -7.20514357e-01 -6.25706136e-01 -4.71092701e-01 -3.68036002e-01 -5.05185664e-01 -2.32324183e-01 -1.41791046e-01 4.12052423e-02 1.25143737e-01 -2.56472796e-01 -3.26810509e-01 -1.54563114e-01] [-2.94873089e-01 -4.52557206e-01 -2.55692273e-01 -9.81233120e-02 -2.13005990e-01 -2.18593046e-01 -2.07885802e-01 2.54842639e-01 2.29944810e-01 1.84459120e-01 -4.50830191e-01 -8.76359791e-02 3.31339628e-01]]]]; ov_res: [[[[ 2.84937173e-01 -1.74517974e-01 -2.71643817e-01 -2.04136789e-01 3.50981615e-02 1.66665494e-01 -1.05910130e-01 -9.62122008e-02 5.15851855e-01 1.04499626e+00 1.04936647e+00 6.21023118e-01 2.51106530e-01] [ 4.61202890e-01 -8.78620669e-02 6.02258295e-02 -9.35685337e-02 2.75424898e-01 2.87819654e-01 1.92253634e-01 2.91113257e-01 6.77303195e-01 8.55438173e-01 5.32767296e-01 1.75975516e-01 -6.49667233e-02] [ 6.85578763e-01 1.26444995e-01 2.93087065e-01 2.18292102e-01 8.02958965e-01 6.45303905e-01 2.74774730e-01 5.44915974e-01 6.28192425e-01 7.72790372e-01 9.18022171e-02 -8.62524658e-02 -1.80943623e-01] [ 6.65479481e-01 5.41343272e-01 4.54662174e-01 4.88949418e-01 6.59591258e-01 5.65673172e-01 3.89433503e-01 9.86487508e-01 5.75293779e-01 4.11615908e-01 -3.89390856e-01 -2.37850294e-01 -2.47871503e-01] [ 7.66847670e-01 3.75006437e-01 2.25181207e-01 2.30078623e-01 5.59350848e-01 1.77726448e-01 4.63225730e-02 5.93631506e-01 4.72998977e-01 6.38919592e-01 1.40527070e-01 2.97361743e-02 -8.93393531e-02] [ 2.34560117e-01 2.83084989e-01 2.88168311e-01 3.95388603e-01 3.35837215e-01 2.84637604e-02 -1.41358674e-01 1.56779647e-01 1.63007289e-01 2.41720065e-01 -2.36374140e-03 8.23051780e-02 -5.83220720e-02] [ 2.92861044e-01 4.52227384e-01 6.23125434e-01 6.87548399e-01 5.65202713e-01 3.37414622e-01 2.08575755e-01 1.08805858e-01 1.59277767e-01 -3.13281491e-02 2.13742983e-02 1.91344116e-02 7.09893182e-02] [ 4.25548218e-02 4.90408212e-01 5.18067837e-01 8.42990637e-01 4.35679823e-01 6.42511070e-01 2.27546275e-01 1.89298000e-02 -1.49647042e-01 -5.33388197e-01 -4.50613707e-01 -2.21287191e-01 1.71825856e-01] [ 3.75628233e-01 6.47038043e-01 3.08218837e-01 2.16972023e-01 2.25469377e-02 3.17616612e-01 3.55113268e-01 1.40030310e-01 4.78076525e-02 -4.76890713e-01 -2.62347668e-01 -1.64360896e-01 1.50695294e-01] [ 2.57060319e-01 3.91247809e-01 2.63154507e-02 -1.57587692e-01 -1.17959708e-01 -2.16624793e-03 2.04157494e-02 -1.34567633e-01 -7.60828927e-02 -4.35852468e-01 -2.77197331e-01 -3.09569418e-01 -1.02243669e-01] [ 4.96275008e-01 8.32597613e-01 5.15690804e-01 -8.30023456e-03 -1.35520279e-01 -3.52611482e-01 1.61313023e-02 -6.33468479e-02 1.80146620e-01 -1.82944387e-01 3.92740443e-02 -2.03360796e-01 -1.88595816e-01] [ 1.98447183e-02 2.81793445e-01 5.45839429e-01 3.83474082e-01 2.34603137e-01 -4.10405621e-02 -3.67485285e-02 1.14065647e-01 2.11055577e-01 -1.52792737e-01 -6.85365945e-02 -3.87143016e-01 -1.17155686e-01] [-1.11619130e-01 6.63885176e-02 3.85012418e-01 3.01765800e-01 8.50497857e-02 -2.37879679e-01 -1.67205408e-01 1.94474280e-01 5.58674276e-01 2.15438843e-01 2.81944007e-01 2.13604569e-02 1.65612057e-01]] [[ 9.28702354e-02 -2.98230588e-01 -3.87247473e-01 -9.95114088e-01 -8.43222380e-01 -6.59305096e-01 -5.48152566e-01 -3.44306350e-01 -4.32849616e-01 -1.58589289e-01 -2.83632219e-01 -3.91519397e-01 -7.18891382e-01] [ 2.28576392e-01 1.99146699e-02 -3.05632204e-01 -7.85122573e-01 -8.34467113e-01 -4.46327895e-01 -2.56103873e-01 -5.96398041e-02 -4.59598042e-02 1.26697823e-01 6.19153045e-02 -2.15689316e-01 -5.29350698e-01] [ 1.23045575e-02 -1.66254789e-01 -1.41989917e-01 -4.63771492e-01 -5.72454095e-01 -3.34481418e-01 1.20755807e-01 5.13280749e-01 -6.68065399e-02 -1.02061164e-02 -3.95299643e-01 -1.62197560e-01 -3.32450092e-01] [ 1.45536602e-01 -1.50723621e-01 -3.50116640e-02 -2.89004117e-01 -4.57162857e-01 -4.16693091e-01 5.30554466e-02 2.97211438e-01 -4.04699445e-01 -4.52178955e-01 -3.00946444e-01 9.85424370e-02 2.50856459e-01] [-4.69443910e-02 -3.62228960e-01 -5.20455278e-02 6.03392906e-02 -1.07220195e-01 -2.22839508e-02 1.70806855e-01 5.63189328e-01 -7.31337443e-02 -1.23072468e-01 9.88780148e-03 1.66214436e-01 4.84190136e-01] [ 5.81331253e-01 -1.91334879e-03 -3.35552730e-02 -1.36516616e-01 3.81109789e-02 3.20014477e-01 4.97755945e-01 7.38387465e-01 2.66955763e-01 -7.51242694e-03 3.19264919e-01 1.48151070e-01 5.76741934e-01] [ 2.24617898e-01 1.55384958e-01 4.60050255e-02 1.73485368e-01 2.01740101e-01 4.78496075e-01 2.84948200e-01 4.01707977e-01 1.51682600e-01 6.27124310e-02 4.92062926e-01 -2.28208303e-03 4.75096494e-01] [ 5.86209536e-01 6.30292535e-01 1.95686504e-01 -2.12416038e-01 -1.66635007e-01 2.65560001e-01 3.13544750e-01 1.99210912e-01 -2.02335417e-01 -1.60690337e-01 5.19205555e-02 1.03671640e-01 5.33388734e-01] [ 1.46543533e-01 3.00383002e-01 1.11874424e-01 -9.16118622e-02 2.78906035e-03 3.24029922e-01 4.65443283e-01 1.43671572e-01 -2.33463764e-01 -2.91460633e-01 -1.94392949e-01 -7.55764768e-02 1.43356651e-01] [ 1.15711533e-01 3.85939553e-02 5.62978834e-02 -2.72106677e-01 -9.51611102e-02 1.01113446e-01 2.47867271e-01 2.27152500e-02 -2.90409356e-01 -3.56346428e-01 -3.52875531e-01 -6.70437664e-02 7.10416660e-02] [-6.89473972e-02 -1.71275854e-01 -6.61197081e-02 1.11952342e-01 2.55776018e-01 2.65891641e-01 -2.07028553e-01 -2.03842834e-01 -2.74053782e-01 -2.14641705e-01 -1.06513612e-01 -2.30419531e-01 -1.33519679e-01] [-2.34039858e-01 -7.90514126e-02 1.32929802e-01 3.67347091e-01 1.44414648e-01 -2.82916764e-04 -4.21226561e-01 -1.83021188e-01 -1.42000034e-01 -3.50672677e-02 3.52735251e-01 2.90562928e-01 5.22299826e-01] [-4.91320401e-01 -1.78181827e-01 -2.78702170e-01 3.26657772e-01 1.34842098e-01 5.24072945e-01 2.63499886e-01 3.06487024e-01 2.13965178e-01 1.26042560e-01 3.44188929e-01 5.47832511e-02 4.13864821e-01]] [[-9.35675502e-01 -5.29798806e-01 -1.14372678e-01 2.70957619e-01 -3.42138819e-02 -6.30896091e-02 -2.08543330e-01 -7.58192539e-02 -2.04701602e-01 1.87781616e-03 -6.33980334e-02 1.51912138e-01 -8.03936049e-02] [-5.82603335e-01 -3.22951198e-01 1.04981713e-01 7.14695215e-01 6.54414773e-01 5.64560235e-01 -1.91592108e-02 -2.16714457e-01 -5.86033046e-01 -1.36318371e-01 -8.43414888e-02 4.69525665e-01 1.89869225e-01] [ 1.72146663e-01 2.40000620e-01 5.79466879e-01 3.04838330e-01 2.53976554e-01 -4.43131514e-02 -1.40648842e-01 -2.31707782e-01 -4.00351882e-01 -2.80283570e-01 -1.08794220e-01 2.10064366e-01 3.54811341e-01] [ 5.99561393e-01 7.06507444e-01 1.12092245e+00 5.75364530e-01 4.11706120e-01 -1.60037339e-01 -1.86821356e-01 -3.92335266e-01 -4.40499544e-01 -3.96291941e-01 -2.52075791e-01 -3.38955373e-02 1.31672859e-01] [ 2.02801868e-01 3.31395328e-01 1.04138219e+00 3.95209819e-01 2.67079264e-01 -2.91724771e-01 -9.73774269e-02 -1.55804589e-01 -2.69001052e-02 -1.27557099e-01 1.28906548e-01 -1.81500807e-01 -1.78837135e-01] [-1.32424282e-02 4.33938324e-01 8.48060191e-01 3.50568831e-01 1.64810419e-01 3.69732119e-02 3.17785144e-01 1.62560016e-01 -4.94911149e-02 -2.49118030e-01 -5.96554950e-02 -9.18378830e-02 -2.31771529e-01] [-4.42059308e-01 1.19359441e-01 3.32886726e-01 1.25934109e-01 -8.85733366e-02 -1.79738209e-01 -5.93019277e-02 -6.76593632e-02 1.93857756e-02 1.70495100e-02 2.49916151e-01 9.54150409e-02 -1.64369613e-01] [-1.68023720e-01 1.01634711e-01 -1.33728683e-01 -2.02003688e-01 -4.19715792e-01 -3.18950325e-01 1.41495556e-01 3.13101485e-02 -2.64284074e-01 -5.49815416e-01 -3.64749908e-01 1.84997059e-02 4.21149284e-03] [-4.02422279e-01 -3.67199630e-01 -5.59358299e-01 -3.79001677e-01 -5.46542883e-01 -4.45154190e-01 -1.58503234e-01 -2.29243070e-01 -2.45535746e-01 -3.71493459e-01 -3.45567107e-01 -3.59596759e-02 -1.93845600e-01] [-5.56137443e-01 -5.58676541e-01 -7.29053259e-01 -4.94224608e-01 -5.52505076e-01 -5.84516287e-01 -1.40009344e-01 -3.51563126e-01 -3.53098094e-01 -4.85899091e-01 -5.24092436e-01 -2.42752716e-01 -4.10382360e-01] [-4.51881528e-01 -2.65603721e-01 -4.02521193e-01 -4.59568828e-01 -4.50271010e-01 -5.43453932e-01 -2.64354110e-01 -3.15787524e-01 -4.93685789e-02 9.62403864e-02 -1.98641196e-01 -1.33082524e-01 -1.30305693e-01] [-6.79452777e-01 -7.20514357e-01 -6.25706136e-01 -4.71092701e-01 -3.68036002e-01 -5.05185664e-01 -2.32324183e-01 -1.41791046e-01 4.12052423e-02 1.25143737e-01 -2.56472796e-01 -3.26810509e-01 -1.54563114e-01] [-2.94873089e-01 -4.52557206e-01 -2.55692273e-01 -9.81233120e-02 -2.13005990e-01 -2.18593046e-01 -2.07885802e-01 2.54842639e-01 2.29944810e-01 1.84459120e-01 -4.50830191e-01 -8.76359791e-02 3.31339628e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5691.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[1, 1]]() %5 : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::avg_pool2d(%x, %5, %4, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%6) fw_re: [[[[ 4.37329113e-01 3.31863374e-01 -6.23942800e-02 1.74433962e-01 2.00488523e-01 -1.58819228e-01 -2.98836917e-01 -1.16128169e-01 3.57203007e-01 3.71670038e-01 -1.80723801e-01 -5.08087456e-01 -6.67378366e-01 -4.89025593e-01 -6.06538951e-01] [ 6.40193284e-01 4.45328832e-01 2.84636825e-01 -1.78135242e-02 -1.11520715e-01 -2.14821801e-01 -8.49826336e-02 5.39081283e-02 2.42342681e-01 1.25504151e-01 -2.39282977e-02 -2.79366612e-01 -1.03392273e-01 -1.90981686e-01 -2.38781855e-01] [ 1.30746281e+00 7.47414052e-01 5.45278668e-01 -1.67219222e-01 1.43640965e-01 5.15798748e-01 8.01537097e-01 6.77581906e-01 4.13523883e-01 -2.93765329e-02 1.32770568e-01 1.73402995e-01 6.98288202e-01 3.67877811e-01 3.54599863e-01] [ 8.32214594e-01 4.33353812e-01 1.86187565e-01 -4.59727079e-01 -1.60847828e-01 2.89041162e-01 5.09903431e-01 2.70775110e-01 7.94955045e-02 3.70567106e-02 1.96583524e-01 2.47560084e-01 6.86498344e-01 6.00209832e-01 7.57539690e-01] [ 7.70309687e-01 4.93700176e-01 5.87081052e-02 -1.97838038e-01 9.87342745e-02 4.11266387e-01 3.73032868e-01 -1.92213699e-01 -3.77712101e-01 -3.13271344e-01 -1.99756995e-01 6.87071979e-02 1.72417328e-01 4.59849834e-01 4.03066605e-01] [-1.69421230e-02 1.39460728e-01 2.01897264e-01 -1.35136679e-01 -2.13429987e-01 -2.50560850e-01 -2.86750924e-02 -3.99911970e-01 -4.95092720e-01 -3.33503067e-01 -3.80719692e-01 -1.38251439e-01 -8.37377682e-02 3.38369399e-01 3.67828727e-01] [ 2.82163352e-01 2.54195452e-01 6.99611008e-01 1.32407665e-01 -7.57449195e-02 -1.46906912e-01 2.50102103e-01 1.36320189e-01 -4.10872363e-02 -2.29643404e-01 -2.00661853e-01 -4.43798117e-02 2.77274121e-02 8.75382200e-02 -1.19970329e-01] [ 2.20882501e-02 -5.98007720e-03 7.46534526e-01 6.27199039e-02 5.71099902e-03 -1.24150358e-01 2.71454126e-01 3.29078406e-01 2.15162292e-01 2.69182950e-01 1.71617523e-01 2.17609823e-01 9.72911492e-02 7.37493578e-03 -5.85475080e-02] [-1.27138719e-01 -1.19702749e-01 5.76874197e-01 1.90515906e-01 1.80667952e-01 1.64125383e-01 1.64608553e-01 -3.66220735e-02 -1.00187786e-01 2.94031531e-01 4.48421478e-01 2.84494877e-01 -2.76322495e-02 -1.76592946e-01 -1.54041991e-01] [-8.30148607e-02 -1.31997406e-01 2.52234042e-01 -8.48947167e-02 1.58116207e-01 3.23652893e-01 2.34801799e-01 -4.56925221e-02 -1.71084568e-01 5.33821940e-01 6.92941546e-01 4.22824472e-01 5.88476807e-02 -6.37470856e-02 1.31160691e-01] [-3.24288130e-01 -4.36701089e-01 -1.19493872e-01 -2.14793950e-01 -2.07874253e-02 1.43108264e-01 1.33220673e-01 3.52063864e-01 3.69748443e-01 7.16660082e-01 5.13305545e-01 1.50693595e-01 -5.51064201e-02 -2.81549990e-01 -2.14457199e-01] [ 2.52768844e-01 -9.53018740e-02 1.57423109e-01 -1.29142068e-02 5.09557486e-01 3.76063049e-01 3.96197259e-01 4.26279426e-01 6.40788019e-01 6.80594862e-01 3.65165740e-01 1.59283638e-01 3.19917686e-02 -1.77127004e-01 -3.72900486e-01] [ 1.05537079e-01 -6.36185110e-02 5.35345040e-02 2.37694345e-02 3.45458567e-01 9.05136317e-02 3.29951316e-01 2.11497158e-01 7.12309301e-01 2.57121503e-01 3.94357294e-02 -5.32331586e-01 -3.46390307e-01 -5.84943712e-01 -4.61369276e-01] [ 3.47450227e-01 3.91523987e-01 3.25976312e-01 3.10634464e-01 5.59353411e-01 5.82093358e-01 7.06797242e-01 1.35299759e-02 1.98230863e-01 -1.71254367e-01 2.07091987e-01 -3.54091823e-01 1.79962188e-01 -4.94338684e-02 4.06744719e-01] [ 1.78528298e-02 2.70981908e-01 5.29177301e-02 2.27178112e-01 9.60978791e-02 3.53425890e-01 5.12763083e-01 7.07479343e-02 5.72734587e-02 -3.42103243e-01 1.89703181e-01 -5.49513996e-01 1.91477060e-01 7.43790483e-03 8.70565474e-01]] [[-6.25620604e-01 -1.69406787e-01 4.18826818e-01 1.03879321e+00 9.98232841e-01 4.64564234e-01 -1.15191258e-01 -6.52872324e-01 2.79213876e-01 7.56265104e-01 6.98442161e-01 -4.99437422e-01 -1.28164673e+00 -2.88989931e-01 2.38806218e-01] [-4.10755396e-01 -2.64618456e-01 1.96264848e-01 6.79235220e-01 7.51414835e-01 3.86120021e-01 -1.64081186e-01 -5.31653881e-01 2.36988273e-02 4.10257667e-01 2.75321752e-01 -5.77918768e-01 -1.19842649e+00 -2.16869667e-01 3.37576509e-01] [-7.13496134e-02 2.47713029e-02 2.14459002e-01 5.14703155e-01 4.70877588e-01 4.39382374e-01 4.96625900e-03 -2.70805806e-01 -1.98891461e-01 -5.50460406e-02 -5.42858317e-02 -2.12183356e-01 -6.07327163e-01 -1.22215778e-01 -8.22199285e-02] [-1.10218592e-01 -4.29125763e-02 7.96328299e-03 2.20866814e-01 3.30779552e-01 2.97173619e-01 -1.38602436e-01 -3.36531907e-01 -5.82620323e-01 -3.75261128e-01 -1.89313844e-01 1.30168006e-01 1.08859561e-01 -1.54532135e-01 -2.95812458e-01] [-2.43542239e-01 -7.91677535e-02 -1.01208258e-02 -8.01673234e-02 9.41276029e-02 2.29743898e-01 6.17835708e-02 -3.31465304e-01 -5.47303915e-01 -3.62101793e-01 -1.76333308e-01 3.07142735e-03 1.32688046e-01 -3.22516501e-01 -5.95099390e-01] [-2.78536022e-01 -1.62572473e-01 4.69707465e-03 -2.76113719e-01 -2.95843985e-02 2.73927838e-01 3.19156975e-01 -1.52774617e-01 -4.16932076e-01 -2.37144440e-01 3.31901722e-02 -4.94930856e-02 5.13965003e-02 -3.28888386e-01 -3.66663486e-01] [-4.47329909e-01 -5.03429949e-01 -2.18941599e-01 -2.54849434e-01 4.33767997e-02 2.28299111e-01 8.87718350e-02 -4.59483773e-01 -3.43494743e-01 -3.19818780e-02 1.60838619e-01 -2.65231460e-01 -1.32397234e-01 9.84388217e-03 1.27776578e-01] [-2.42154762e-01 -3.58745754e-01 -1.37148485e-01 -8.22050422e-02 -9.30232331e-02 -7.29834139e-02 -3.20581257e-01 -2.65381485e-01 -5.23638912e-02 2.69899577e-01 5.38298070e-01 2.02568799e-01 2.04806671e-01 7.78113678e-02 7.22701028e-02] [-7.77842462e-01 -6.71679854e-01 -2.78910398e-01 1.03585333e-01 6.67486489e-02 5.52198067e-02 -1.15957409e-01 -1.63313419e-01 2.65596248e-02 2.01853752e-01 4.71415520e-01 9.51828435e-02 4.49597016e-02 7.92360604e-02 2.33803883e-01] [-5.65504849e-01 -4.74866748e-01 -1.03098765e-01 4.22488637e-02 -1.04813300e-01 -3.01230222e-01 -1.48691699e-01 2.97342658e-01 4.77570266e-01 2.78487176e-01 2.32232153e-01 4.63927947e-02 5.18636033e-02 -1.17538525e-02 -4.42617089e-02] [-2.62969226e-01 -1.00487798e-01 -1.25837743e-01 2.04710364e-01 6.17483519e-02 8.00571442e-02 7.10212514e-02 1.30563781e-01 2.52058983e-01 -7.43297338e-02 -4.71079610e-02 -4.70950715e-02 1.07939109e-01 1.69786945e-01 1.67873636e-01] [ 7.12057173e-01 5.65949500e-01 -2.07617674e-02 2.46033743e-01 1.01367138e-01 1.77834928e-01 -1.50748178e-01 1.91863254e-02 2.34106481e-01 -1.10208482e-01 -3.82916600e-01 -5.55095196e-01 -8.86931419e-02 -8.74995664e-02 -1.69747863e-02] [ 5.90862989e-01 5.30003309e-01 7.30582327e-02 2.60512412e-01 1.46422118e-01 6.07301235e-01 3.69214267e-02 9.01830792e-02 6.59847260e-03 8.44643787e-02 -7.43745714e-02 -3.17012727e-01 3.85557935e-02 -9.54331681e-02 1.97149396e-01] [ 6.12240024e-02 1.88148007e-01 2.25086182e-01 3.87912631e-01 4.77545381e-01 6.54554665e-01 2.80865848e-01 1.55662850e-01 -1.30767822e-01 1.74243585e-03 -1.01895452e-01 -3.31100345e-01 1.38447881e-01 -1.39356717e-01 2.10548565e-01] [-3.95009786e-01 -1.13272630e-01 4.14650768e-01 3.94540548e-01 5.54652452e-01 6.74942732e-01 4.52155620e-01 2.52888680e-01 -2.06176758e-01 6.39361069e-02 1.74257830e-01 1.83536902e-01 4.65010315e-01 5.97956590e-02 2.46554658e-01]] [[ 5.94137847e-01 6.45037472e-01 6.45514846e-01 6.44380629e-01 -3.27742696e-02 -7.41513908e-01 -7.11284161e-01 -7.52319396e-01 -4.40249890e-01 -9.04589832e-01 -5.85964024e-01 -7.94245481e-01 -7.74834394e-01 -5.49888074e-01 -3.45018387e-01] [ 6.96161091e-01 5.16426206e-01 3.60747010e-01 3.03004861e-01 5.29705808e-02 -5.72117805e-01 -4.97612268e-01 -4.87874717e-01 -2.56772578e-01 -6.45617545e-01 -3.81532133e-01 -3.94941658e-01 -2.73791682e-02 4.50563952e-02 3.18260044e-01] [ 5.92830479e-01 3.50020528e-01 1.01962477e-01 1.51247844e-01 2.82040834e-01 -2.62097776e-01 -1.57951862e-01 -2.47862160e-01 -1.56990681e-02 -2.67963827e-01 -8.97811539e-03 9.68488827e-02 -1.50071247e-03 -3.28760594e-02 6.49576709e-02] [ 1.83980033e-01 -1.13990702e-01 -1.47148967e-03 -9.40673891e-03 6.06657624e-01 1.66020930e-01 1.34324700e-01 1.00286007e-02 -1.31271586e-01 -5.97000979e-02 -9.72208902e-02 4.17767167e-01 4.74146158e-01 1.37772724e-01 -4.45942283e-02] [-2.29405090e-01 -1.71320021e-01 1.99068338e-01 2.17468008e-01 3.96370351e-01 1.83103010e-01 -4.49804477e-02 -3.70133575e-03 -2.34702796e-01 1.75793067e-01 1.20126987e-02 2.92322218e-01 -7.88299739e-02 -4.35275495e-01 -6.94866836e-01] [-1.50111929e-01 -1.74819738e-01 2.11402569e-02 -1.81181312e-01 -7.26328930e-04 7.11489990e-02 -1.26782700e-01 -7.35374168e-02 -4.46866989e-01 -2.84991741e-01 -3.44602168e-01 1.84041128e-01 1.35061994e-01 -1.80793852e-01 -4.86166000e-01] [-3.88755232e-01 -3.15959930e-01 -1.75970495e-01 1.99171245e-01 1.13993965e-01 -1.30886748e-01 -4.39975619e-01 -4.04849678e-01 -3.87235522e-01 -2.85310328e-01 -8.36628675e-02 1.87269241e-01 9.52983126e-02 5.80586381e-02 -5.92192225e-02] [-4.82395142e-02 -1.27888545e-01 -1.58885688e-01 -2.57008336e-02 -1.08116798e-01 -4.13437754e-01 -3.14719409e-01 -5.29315412e-01 -4.33994502e-01 -6.88404739e-01 -2.95878738e-01 9.23293531e-02 2.58735158e-02 2.53565878e-01 -3.04725468e-02] [-4.14720513e-02 -2.23965272e-01 -2.14791507e-01 7.46258199e-02 2.00811550e-01 -2.88058668e-01 -3.20808828e-01 -4.43285108e-01 -2.80136585e-01 -3.61802459e-01 -3.97007316e-01 5.64230531e-02 -2.15100259e-01 2.84677953e-01 -1.40258804e-01] [ 3.16113174e-01 1.85891956e-01 -2.22608328e-01 -2.46412218e-01 -1.95485055e-01 -5.44852167e-02 2.01002091e-01 9.00133699e-02 -2.11165667e-01 -2.87501454e-01 -3.84152502e-01 3.34201977e-02 -2.39981234e-01 -6.73268735e-02 -3.90072733e-01] [-2.84679651e-01 -1.30695596e-01 -2.71860421e-01 2.25478202e-01 3.33946407e-01 4.72048908e-01 4.75850224e-01 6.64136171e-01 5.88985942e-02 -1.54365227e-01 -5.59144437e-01 -2.18836740e-01 -4.13709402e-01 -5.02280414e-01 -5.03795147e-01] [-6.11283362e-01 -2.65595168e-01 -2.54513621e-01 -6.84801042e-02 -4.56501007e-01 -1.40653372e-01 3.52979541e-01 8.93966973e-01 4.49214458e-01 5.55816479e-02 -2.17134193e-01 -3.75080436e-01 -2.90650845e-01 -3.76093805e-01 -2.62589660e-02] [-4.32247996e-01 -3.64886880e-01 -9.10306498e-02 1.66081548e-01 2.19358370e-01 9.50500667e-02 3.58395964e-01 6.63710356e-01 6.33794963e-01 1.53497875e-01 -4.35857587e-02 -4.73617285e-01 -3.67731452e-01 -2.60918945e-01 2.80652344e-01] [-7.66298100e-02 -2.59541869e-01 -1.01573758e-01 -4.84249778e-02 5.46107702e-02 -2.32413560e-01 -2.42237821e-02 1.93865031e-01 5.65569818e-01 2.21299976e-01 4.12179589e-01 -2.36962046e-02 1.27243221e-01 2.09784433e-01 6.88002169e-01] [ 1.51684165e-01 -1.02905594e-01 1.06527209e-02 3.48167270e-01 6.90957785e-01 2.42895439e-01 2.53598634e-02 -9.18216482e-02 3.33710432e-01 2.30885193e-01 7.07107723e-01 2.71908551e-01 3.00844163e-01 1.54325649e-01 6.76684618e-01]]]]; ov_res: [[[[ 4.37329113e-01 3.31863374e-01 -6.23942800e-02 1.74433962e-01 2.00488523e-01 -1.58819228e-01 -2.98836917e-01 -1.16128169e-01 3.57203007e-01 3.71670038e-01 -1.80723801e-01 -5.08087456e-01 -6.67378366e-01 -4.89025593e-01 -6.06538951e-01] [ 6.40193284e-01 4.45328832e-01 2.84636825e-01 -1.78135242e-02 -1.11520715e-01 -2.14821801e-01 -8.49826336e-02 5.39081283e-02 2.42342681e-01 1.25504151e-01 -2.39282977e-02 -2.79366612e-01 -1.03392273e-01 -1.90981686e-01 -2.38781855e-01] [ 1.30746281e+00 7.47414052e-01 5.45278668e-01 -1.67219222e-01 1.43640965e-01 5.15798748e-01 8.01537097e-01 6.77581906e-01 4.13523883e-01 -2.93765329e-02 1.32770568e-01 1.73402995e-01 6.98288202e-01 3.67877811e-01 3.54599863e-01] [ 8.32214594e-01 4.33353812e-01 1.86187565e-01 -4.59727079e-01 -1.60847828e-01 2.89041162e-01 5.09903431e-01 2.70775110e-01 7.94955045e-02 3.70567106e-02 1.96583524e-01 2.47560084e-01 6.86498344e-01 6.00209832e-01 7.57539690e-01] [ 7.70309687e-01 4.93700176e-01 5.87081052e-02 -1.97838038e-01 9.87342745e-02 4.11266387e-01 3.73032868e-01 -1.92213699e-01 -3.77712101e-01 -3.13271344e-01 -1.99756995e-01 6.87071979e-02 1.72417328e-01 4.59849834e-01 4.03066605e-01] [-1.69421230e-02 1.39460728e-01 2.01897264e-01 -1.35136679e-01 -2.13429987e-01 -2.50560850e-01 -2.86750924e-02 -3.99911970e-01 -4.95092720e-01 -3.33503067e-01 -3.80719692e-01 -1.38251439e-01 -8.37377682e-02 3.38369399e-01 3.67828727e-01] [ 2.82163352e-01 2.54195452e-01 6.99611008e-01 1.32407665e-01 -7.57449195e-02 -1.46906912e-01 2.50102103e-01 1.36320189e-01 -4.10872363e-02 -2.29643404e-01 -2.00661853e-01 -4.43798117e-02 2.77274121e-02 8.75382200e-02 -1.19970329e-01] [ 2.20882501e-02 -5.98007720e-03 7.46534526e-01 6.27199039e-02 5.71099902e-03 -1.24150358e-01 2.71454126e-01 3.29078406e-01 2.15162292e-01 2.69182950e-01 1.71617523e-01 2.17609823e-01 9.72911492e-02 7.37493578e-03 -5.85475080e-02] [-1.27138719e-01 -1.19702749e-01 5.76874197e-01 1.90515906e-01 1.80667952e-01 1.64125383e-01 1.64608553e-01 -3.66220735e-02 -1.00187786e-01 2.94031531e-01 4.48421478e-01 2.84494877e-01 -2.76322495e-02 -1.76592946e-01 -1.54041991e-01] [-8.30148607e-02 -1.31997406e-01 2.52234042e-01 -8.48947167e-02 1.58116207e-01 3.23652893e-01 2.34801799e-01 -4.56925221e-02 -1.71084568e-01 5.33821940e-01 6.92941546e-01 4.22824472e-01 5.88476807e-02 -6.37470856e-02 1.31160691e-01] [-3.24288130e-01 -4.36701089e-01 -1.19493872e-01 -2.14793950e-01 -2.07874253e-02 1.43108264e-01 1.33220673e-01 3.52063864e-01 3.69748443e-01 7.16660082e-01 5.13305545e-01 1.50693595e-01 -5.51064201e-02 -2.81549990e-01 -2.14457199e-01] [ 2.52768844e-01 -9.53018740e-02 1.57423109e-01 -1.29142068e-02 5.09557486e-01 3.76063049e-01 3.96197259e-01 4.26279426e-01 6.40788019e-01 6.80594862e-01 3.65165740e-01 1.59283638e-01 3.19917686e-02 -1.77127004e-01 -3.72900486e-01] [ 1.05537079e-01 -6.36185110e-02 5.35345040e-02 2.37694345e-02 3.45458567e-01 9.05136317e-02 3.29951316e-01 2.11497158e-01 7.12309301e-01 2.57121503e-01 3.94357294e-02 -5.32331586e-01 -3.46390307e-01 -5.84943712e-01 -4.61369276e-01] [ 3.47450227e-01 3.91523987e-01 3.25976312e-01 3.10634464e-01 5.59353411e-01 5.82093358e-01 7.06797242e-01 1.35299759e-02 1.98230863e-01 -1.71254367e-01 2.07091987e-01 -3.54091823e-01 1.79962188e-01 -4.94338684e-02 4.06744719e-01] [ 1.78528298e-02 2.70981908e-01 5.29177301e-02 2.27178112e-01 9.60978791e-02 3.53425890e-01 5.12763083e-01 7.07479343e-02 5.72734587e-02 -3.42103243e-01 1.89703181e-01 -5.49513996e-01 1.91477060e-01 7.43790483e-03 8.70565474e-01]] [[-6.25620604e-01 -1.69406787e-01 4.18826818e-01 1.03879321e+00 9.98232841e-01 4.64564234e-01 -1.15191258e-01 -6.52872324e-01 2.79213876e-01 7.56265104e-01 6.98442161e-01 -4.99437422e-01 -1.28164673e+00 -2.88989931e-01 2.38806218e-01] [-4.10755396e-01 -2.64618456e-01 1.96264848e-01 6.79235220e-01 7.51414835e-01 3.86120021e-01 -1.64081186e-01 -5.31653881e-01 2.36988273e-02 4.10257667e-01 2.75321752e-01 -5.77918768e-01 -1.19842649e+00 -2.16869667e-01 3.37576509e-01] [-7.13496134e-02 2.47713029e-02 2.14459002e-01 5.14703155e-01 4.70877588e-01 4.39382374e-01 4.96625900e-03 -2.70805806e-01 -1.98891461e-01 -5.50460406e-02 -5.42858317e-02 -2.12183356e-01 -6.07327163e-01 -1.22215778e-01 -8.22199285e-02] [-1.10218592e-01 -4.29125763e-02 7.96328299e-03 2.20866814e-01 3.30779552e-01 2.97173619e-01 -1.38602436e-01 -3.36531907e-01 -5.82620323e-01 -3.75261128e-01 -1.89313844e-01 1.30168006e-01 1.08859561e-01 -1.54532135e-01 -2.95812458e-01] [-2.43542239e-01 -7.91677535e-02 -1.01208258e-02 -8.01673234e-02 9.41276029e-02 2.29743898e-01 6.17835708e-02 -3.31465304e-01 -5.47303915e-01 -3.62101793e-01 -1.76333308e-01 3.07142735e-03 1.32688046e-01 -3.22516501e-01 -5.95099390e-01] [-2.78536022e-01 -1.62572473e-01 4.69707465e-03 -2.76113719e-01 -2.95843985e-02 2.73927838e-01 3.19156975e-01 -1.52774617e-01 -4.16932076e-01 -2.37144440e-01 3.31901722e-02 -4.94930856e-02 5.13965003e-02 -3.28888386e-01 -3.66663486e-01] [-4.47329909e-01 -5.03429949e-01 -2.18941599e-01 -2.54849434e-01 4.33767997e-02 2.28299111e-01 8.87718350e-02 -4.59483773e-01 -3.43494743e-01 -3.19818780e-02 1.60838619e-01 -2.65231460e-01 -1.32397234e-01 9.84388217e-03 1.27776578e-01] [-2.42154762e-01 -3.58745754e-01 -1.37148485e-01 -8.22050422e-02 -9.30232331e-02 -7.29834139e-02 -3.20581257e-01 -2.65381485e-01 -5.23638912e-02 2.69899577e-01 5.38298070e-01 2.02568799e-01 2.04806671e-01 7.78113678e-02 7.22701028e-02] [-7.77842462e-01 -6.71679854e-01 -2.78910398e-01 1.03585333e-01 6.67486489e-02 5.52198067e-02 -1.15957409e-01 -1.63313419e-01 2.65596248e-02 2.01853752e-01 4.71415520e-01 9.51828435e-02 4.49597016e-02 7.92360604e-02 2.33803883e-01] [-5.65504849e-01 -4.74866748e-01 -1.03098765e-01 4.22488637e-02 -1.04813300e-01 -3.01230222e-01 -1.48691699e-01 2.97342658e-01 4.77570266e-01 2.78487176e-01 2.32232153e-01 4.63927947e-02 5.18636033e-02 -1.17538525e-02 -4.42617089e-02] [-2.62969226e-01 -1.00487798e-01 -1.25837743e-01 2.04710364e-01 6.17483519e-02 8.00571442e-02 7.10212514e-02 1.30563781e-01 2.52058983e-01 -7.43297338e-02 -4.71079610e-02 -4.70950715e-02 1.07939109e-01 1.69786945e-01 1.67873636e-01] [ 7.12057173e-01 5.65949500e-01 -2.07617674e-02 2.46033743e-01 1.01367138e-01 1.77834928e-01 -1.50748178e-01 1.91863254e-02 2.34106481e-01 -1.10208482e-01 -3.82916600e-01 -5.55095196e-01 -8.86931419e-02 -8.74995664e-02 -1.69747863e-02] [ 5.90862989e-01 5.30003309e-01 7.30582327e-02 2.60512412e-01 1.46422118e-01 6.07301235e-01 3.69214267e-02 9.01830792e-02 6.59847260e-03 8.44643787e-02 -7.43745714e-02 -3.17012727e-01 3.85557935e-02 -9.54331681e-02 1.97149396e-01] [ 6.12240024e-02 1.88148007e-01 2.25086182e-01 3.87912631e-01 4.77545381e-01 6.54554665e-01 2.80865848e-01 1.55662850e-01 -1.30767822e-01 1.74243585e-03 -1.01895452e-01 -3.31100345e-01 1.38447881e-01 -1.39356717e-01 2.10548565e-01] [-3.95009786e-01 -1.13272630e-01 4.14650768e-01 3.94540548e-01 5.54652452e-01 6.74942732e-01 4.52155620e-01 2.52888680e-01 -2.06176758e-01 6.39361069e-02 1.74257830e-01 1.83536902e-01 4.65010315e-01 5.97956590e-02 2.46554658e-01]] [[ 5.94137847e-01 6.45037472e-01 6.45514846e-01 6.44380629e-01 -3.27742696e-02 -7.41513908e-01 -7.11284161e-01 -7.52319396e-01 -4.40249890e-01 -9.04589832e-01 -5.85964024e-01 -7.94245481e-01 -7.74834394e-01 -5.49888074e-01 -3.45018387e-01] [ 6.96161091e-01 5.16426206e-01 3.60747010e-01 3.03004861e-01 5.29705808e-02 -5.72117805e-01 -4.97612268e-01 -4.87874717e-01 -2.56772578e-01 -6.45617545e-01 -3.81532133e-01 -3.94941658e-01 -2.73791682e-02 4.50563952e-02 3.18260044e-01] [ 5.92830479e-01 3.50020528e-01 1.01962477e-01 1.51247844e-01 2.82040834e-01 -2.62097776e-01 -1.57951862e-01 -2.47862160e-01 -1.56990681e-02 -2.67963827e-01 -8.97811539e-03 9.68488827e-02 -1.50071247e-03 -3.28760594e-02 6.49576709e-02] [ 1.83980033e-01 -1.13990702e-01 -1.47148967e-03 -9.40673891e-03 6.06657624e-01 1.66020930e-01 1.34324700e-01 1.00286007e-02 -1.31271586e-01 -5.97000979e-02 -9.72208902e-02 4.17767167e-01 4.74146158e-01 1.37772724e-01 -4.45942283e-02] [-2.29405090e-01 -1.71320021e-01 1.99068338e-01 2.17468008e-01 3.96370351e-01 1.83103010e-01 -4.49804477e-02 -3.70133575e-03 -2.34702796e-01 1.75793067e-01 1.20126987e-02 2.92322218e-01 -7.88299739e-02 -4.35275495e-01 -6.94866836e-01] [-1.50111929e-01 -1.74819738e-01 2.11402569e-02 -1.81181312e-01 -7.26328930e-04 7.11489990e-02 -1.26782700e-01 -7.35374168e-02 -4.46866989e-01 -2.84991741e-01 -3.44602168e-01 1.84041128e-01 1.35061994e-01 -1.80793852e-01 -4.86166000e-01] [-3.88755232e-01 -3.15959930e-01 -1.75970495e-01 1.99171245e-01 1.13993965e-01 -1.30886748e-01 -4.39975619e-01 -4.04849678e-01 -3.87235522e-01 -2.85310328e-01 -8.36628675e-02 1.87269241e-01 9.52983126e-02 5.80586381e-02 -5.92192225e-02] [-4.82395142e-02 -1.27888545e-01 -1.58885688e-01 -2.57008336e-02 -1.08116798e-01 -4.13437754e-01 -3.14719409e-01 -5.29315412e-01 -4.33994502e-01 -6.88404739e-01 -2.95878738e-01 9.23293531e-02 2.58735158e-02 2.53565878e-01 -3.04725468e-02] [-4.14720513e-02 -2.23965272e-01 -2.14791507e-01 7.46258199e-02 2.00811550e-01 -2.88058668e-01 -3.20808828e-01 -4.43285108e-01 -2.80136585e-01 -3.61802459e-01 -3.97007316e-01 5.64230531e-02 -2.15100259e-01 2.84677953e-01 -1.40258804e-01] [ 3.16113174e-01 1.85891956e-01 -2.22608328e-01 -2.46412218e-01 -1.95485055e-01 -5.44852167e-02 2.01002091e-01 9.00133699e-02 -2.11165667e-01 -2.87501454e-01 -3.84152502e-01 3.34201977e-02 -2.39981234e-01 -6.73268735e-02 -3.90072733e-01] [-2.84679651e-01 -1.30695596e-01 -2.71860421e-01 2.25478202e-01 3.33946407e-01 4.72048908e-01 4.75850224e-01 6.64136171e-01 5.88985942e-02 -1.54365227e-01 -5.59144437e-01 -2.18836740e-01 -4.13709402e-01 -5.02280414e-01 -5.03795147e-01] [-6.11283362e-01 -2.65595168e-01 -2.54513621e-01 -6.84801042e-02 -4.56501007e-01 -1.40653372e-01 3.52979541e-01 8.93966973e-01 4.49214458e-01 5.55816479e-02 -2.17134193e-01 -3.75080436e-01 -2.90650845e-01 -3.76093805e-01 -2.62589660e-02] [-4.32247996e-01 -3.64886880e-01 -9.10306498e-02 1.66081548e-01 2.19358370e-01 9.50500667e-02 3.58395964e-01 6.63710356e-01 6.33794963e-01 1.53497875e-01 -4.35857587e-02 -4.73617285e-01 -3.67731452e-01 -2.60918945e-01 2.80652344e-01] [-7.66298100e-02 -2.59541869e-01 -1.01573758e-01 -4.84249778e-02 5.46107702e-02 -2.32413560e-01 -2.42237821e-02 1.93865031e-01 5.65569818e-01 2.21299976e-01 4.12179589e-01 -2.36962046e-02 1.27243221e-01 2.09784433e-01 6.88002169e-01] [ 1.51684165e-01 -1.02905594e-01 1.06527209e-02 3.48167270e-01 6.90957785e-01 2.42895439e-01 2.53598634e-02 -9.18216482e-02 3.33710432e-01 2.30885193e-01 7.07107723e-01 2.71908551e-01 3.00844163e-01 1.54325649e-01 6.76684618e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5694.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 1]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[ 4.07840043e-01 3.51128429e-01 6.35377988e-02 1.50727987e-01 1.68315731e-02 -2.36086637e-01 -3.59591424e-01 -2.82273531e-01 2.44628880e-02 5.63408315e-01 9.69314948e-02 1.55755477e-02 -5.09951174e-01 1.68291256e-01 2.73098141e-01] [-5.15446961e-01 -2.55833119e-01 -9.39744487e-02 1.66087925e-01 -9.76923294e-03 -3.28715980e-01 -4.64677453e-01 -1.29083142e-01 -1.27600372e-01 4.60193217e-01 -4.07616682e-02 5.02693579e-02 -6.79896772e-01 -1.96154639e-01 -1.21109985e-01] [-9.15109217e-01 -4.29005355e-01 -1.15401216e-01 4.20864761e-01 1.08645618e-01 -4.96394843e-01 -4.13776457e-01 -2.92065945e-02 7.42931813e-02 2.39037409e-01 -2.53533050e-02 1.12427860e-01 -2.98272312e-01 -1.48258079e-02 1.26866862e-01] [-1.24049103e+00 -3.92410487e-01 1.14674792e-01 6.93641543e-01 2.38885298e-01 -9.79375392e-02 -1.90782204e-01 2.41959020e-01 5.83385415e-02 9.59937871e-02 -1.74196824e-01 2.42597722e-02 -5.05529232e-02 3.13856989e-01 7.28578866e-01] [-5.39625347e-01 3.64823937e-02 3.53613138e-01 6.52221382e-01 2.95331508e-01 1.25985339e-01 -1.65866017e-01 -1.76693592e-02 1.38525502e-03 3.55448993e-03 -1.03881963e-01 1.43307760e-01 3.22735429e-01 5.86907506e-01 8.00498426e-01] [-1.09109282e-03 -2.86827087e-02 -3.90606858e-02 -1.23364501e-01 7.92682469e-02 1.86085269e-01 1.06719747e-01 -7.50643313e-02 -3.49898815e-01 -3.15558285e-01 -2.56522536e-01 2.25022554e-01 4.69170034e-01 5.31049848e-01 4.73317474e-01] [ 2.20471606e-01 1.67880626e-03 -3.67239177e-01 -6.48979485e-01 -3.58934045e-01 -4.32638340e-02 2.89777696e-01 -5.42148612e-02 -2.99168915e-01 -2.09617227e-01 2.56075393e-02 2.90337741e-01 1.11655630e-01 1.15871035e-01 -5.51352918e-04] [ 5.75967431e-01 2.51916945e-01 -2.46026382e-01 -3.95137429e-01 -1.80869132e-01 -3.62991542e-01 1.09464817e-01 -2.02314302e-01 -2.43606254e-01 -2.34453857e-01 -9.89285409e-02 -1.68895334e-01 -4.84067500e-01 -3.51343930e-01 -1.83006525e-01] [ 6.72390401e-01 3.38232934e-01 -3.98363739e-01 -2.83329427e-01 -1.20520152e-01 -2.96553876e-02 1.74146920e-01 -1.87075436e-02 -1.71395600e-01 -7.93441832e-02 -6.68743998e-02 -8.49901587e-02 -5.38833976e-01 -1.24088064e-01 1.43198207e-01] [ 7.54618466e-01 3.99111360e-01 -1.99079618e-01 2.04217434e-03 5.86655065e-02 7.91629255e-02 -5.15526421e-02 2.45793071e-02 -2.73373485e-01 -1.95911467e-01 -3.95775020e-01 -3.85865420e-01 -4.10850227e-01 -7.27096945e-02 4.58613157e-01] [ 2.68486142e-01 5.08819148e-02 -1.88789338e-01 9.32432413e-02 1.29851133e-01 1.53881729e-01 -6.72325939e-02 1.78586245e-01 -1.52891427e-01 1.83781590e-02 -1.69801787e-01 -4.78069745e-02 -1.61200255e-01 -1.60150275e-01 7.40655437e-02] [-2.01340199e-01 -2.80814052e-01 -1.53451979e-01 2.12070942e-01 3.18837732e-01 2.56609201e-01 -4.91755009e-02 1.63694680e-01 1.82295181e-02 1.32405415e-01 -3.11076581e-01 -5.02435207e-01 -8.85585248e-01 -8.45492244e-01 -6.40891492e-01] [-5.70935190e-01 -6.30624235e-01 -2.03210399e-01 -2.01300085e-01 1.39607534e-01 -8.49071443e-02 -7.67894462e-02 -1.47725046e-01 -1.17572546e-01 8.73141661e-02 -1.84296727e-01 -3.12995553e-01 -7.26157188e-01 -7.40771115e-01 -6.75439894e-01]] [[-8.26731980e-01 -7.01610565e-01 -2.35660180e-01 -9.04398561e-02 1.86461747e-01 -1.55727208e-01 -1.43797114e-01 -4.91330415e-01 -4.54781592e-01 -2.71864951e-01 2.13107407e-01 4.60727811e-01 3.76867503e-01 4.27361459e-01 4.48320508e-01] [-8.05621684e-01 -6.31073952e-01 -1.87114164e-01 -3.51000309e-01 -4.28870708e-01 -5.26623011e-01 -2.06552804e-01 -1.54578105e-01 -2.13321567e-01 -6.16206340e-02 3.62998188e-01 6.84040785e-01 5.21606803e-01 5.29080808e-01 4.30292100e-01] [-3.62736464e-01 -4.06609803e-01 -3.66192847e-01 -3.07323813e-01 -6.26431584e-01 -4.99724656e-01 -3.82294953e-01 9.83002931e-02 -2.06231680e-02 -9.96582806e-02 9.15714353e-02 1.35744989e-01 -4.83871624e-02 -6.72168517e-03 3.53956744e-02] [ 1.60569981e-01 4.83877994e-02 1.87142432e-01 -5.23759983e-02 -8.10628757e-02 7.67454281e-02 3.94371629e-01 3.72932017e-01 1.34387150e-01 1.24344297e-01 8.62518977e-03 -4.40530092e-01 -5.53468406e-01 -2.85901219e-01 1.54339358e-01] [ 2.17298627e-01 -1.21875018e-01 -1.36636257e-01 -1.95416898e-01 1.73206031e-01 2.59180814e-01 4.14117694e-01 2.38539912e-02 -3.26053709e-01 -3.70330870e-01 -2.20543370e-01 -2.96071887e-01 -3.05052012e-01 -3.32985401e-01 -9.99091100e-03] [ 1.51834503e-01 1.26988422e-02 6.18602894e-02 1.61401093e-01 3.12757850e-01 2.38096476e-01 2.44673595e-01 -1.70261905e-01 -4.85463619e-01 -4.04189825e-01 -1.33784562e-01 1.10887818e-01 1.20020896e-01 -8.87396336e-02 -1.25316873e-01] [-1.85332134e-01 -2.16384336e-01 -9.91591513e-02 8.96783695e-02 9.66642201e-02 -2.03543395e-01 -3.69862437e-01 -2.99457699e-01 -3.35092545e-01 -8.31997022e-02 1.36827946e-01 2.80508786e-01 1.70844316e-01 -8.27637389e-02 3.93192582e-02] [-4.28506643e-01 -2.80425608e-01 -1.85025275e-01 7.15875104e-02 -2.08319556e-02 -1.74254119e-01 -2.45105848e-01 2.76111275e-01 2.72555918e-01 1.91514835e-01 6.46649487e-03 7.35287219e-02 5.91178611e-02 -2.11056992e-01 -2.06575051e-01] [ 3.23112607e-02 2.16919735e-01 3.40750739e-02 2.37128809e-01 1.00034490e-01 7.83718936e-03 -2.26685200e-02 5.05747914e-01 9.85697508e-01 8.49357843e-01 2.76247442e-01 -1.07246548e-01 -4.33164924e-01 -3.82494956e-01 -4.35613900e-01] [ 4.37196016e-01 5.09859145e-01 1.90836608e-01 4.51288491e-01 1.29692882e-01 6.14836551e-02 -6.55726865e-02 5.85293531e-01 1.22008705e+00 9.33500826e-01 2.80334830e-01 -1.16020121e-01 -5.56425691e-01 -4.76620793e-01 -7.49934256e-01] [ 3.77277941e-01 4.47979599e-01 3.89116764e-01 7.71155357e-01 5.09103119e-01 2.77852237e-01 5.49432710e-02 5.56708217e-01 1.19821477e+00 1.07815409e+00 5.20910025e-01 -4.99740317e-02 -7.87872434e-01 -6.52916491e-01 -8.10057580e-01] [ 1.40422747e-01 1.83893472e-01 2.15281859e-01 4.34199750e-01 4.79180545e-01 3.55485678e-01 3.21481556e-01 5.21045268e-01 6.51082277e-01 4.34843004e-01 3.35653394e-01 6.27153888e-02 -3.25222373e-01 -6.63700521e-01 -6.74541295e-01] [ 2.02994600e-01 -4.66820411e-02 -1.79523200e-01 1.05359584e-01 5.62565744e-01 5.47639132e-01 6.94532037e-01 4.06571835e-01 4.19318795e-01 1.10494494e-02 1.80683240e-01 -1.45760402e-01 -2.37163678e-01 -5.01043677e-01 -3.59365225e-01]] [[-3.82379383e-01 -4.33108777e-01 -1.83718741e-01 -4.03927594e-01 -1.89534530e-01 -8.64070207e-02 1.66506216e-01 3.36948752e-01 2.71754205e-01 2.85639584e-01 5.14938012e-02 1.54392682e-02 -1.25813186e-01 -3.41529250e-01 -5.13670146e-01] [-5.97027004e-01 -3.10591191e-01 2.00230896e-01 -3.08929384e-01 -5.00100672e-01 -5.93864799e-01 -4.51925159e-01 -1.66981936e-01 -2.28433922e-01 -1.53237050e-02 -3.24962109e-01 -1.70440719e-01 -1.44379124e-01 -5.29060811e-02 -1.77180812e-01] [-6.27486825e-01 -4.70390528e-01 8.51563513e-02 -2.26082325e-01 -2.42569581e-01 -3.63499194e-01 -3.86074841e-01 -1.02131419e-01 -7.73069588e-03 2.16187179e-01 4.47539538e-02 -2.42118254e-01 -2.16252804e-01 -4.91374761e-01 -3.79098386e-01] [-2.37080559e-01 -1.90771401e-01 -5.84123395e-02 -4.53716397e-01 -4.20830607e-01 -4.38206583e-01 -1.02654457e-01 1.29648253e-01 4.66342233e-02 1.35674074e-01 -3.47827017e-01 -3.14154625e-01 -5.81926167e-01 -5.18277049e-01 -5.38297772e-01] [ 2.65421364e-02 -1.63419828e-01 -3.39442521e-01 -2.54364341e-01 1.35996729e-01 2.46166632e-01 1.84247941e-01 2.63232470e-01 2.56056368e-01 4.72371727e-01 -2.63163537e-01 -3.99720222e-01 -7.40418315e-01 -7.31090188e-01 -7.75619984e-01] [ 1.44928321e-01 -1.18007511e-01 -3.89195263e-01 -3.98938060e-01 -1.75343871e-01 -1.82075426e-01 -1.35717809e-01 -2.13161018e-02 -1.06500918e-02 -6.77755773e-02 -5.83627403e-01 -4.93546486e-01 -4.14413512e-01 2.73696855e-02 1.59064218e-01] [ 1.81915596e-01 -2.83510923e-01 -6.36149585e-01 -4.35416937e-01 -1.05778538e-01 2.74387859e-02 -2.90206850e-01 -9.42220688e-02 3.28535810e-02 4.10539024e-02 -7.76038170e-02 -2.74049640e-01 1.97420688e-03 7.39045069e-02 4.49327022e-01] [-8.94694850e-02 -5.40296197e-01 -9.11319435e-01 -7.00314939e-01 -2.66576648e-01 -9.64096785e-02 -9.48134959e-02 1.97088048e-02 -2.18290091e-02 -1.03266109e-02 3.42259288e-01 3.40517908e-01 3.78033966e-01 2.47279331e-01 5.76302767e-01] [ 1.57372251e-01 -3.44852865e-01 -3.97169113e-01 -6.06943846e-01 -3.04975770e-02 -2.49787495e-02 -2.52891388e-02 4.26255204e-02 -2.62391329e-01 1.13896988e-02 2.40533769e-01 3.76195788e-01 3.50836158e-01 2.23889858e-01 5.41814625e-01] [-5.77326000e-01 -4.56332326e-01 -2.44242460e-01 -5.66116452e-01 -4.58835006e-01 -2.15832412e-01 8.01683515e-02 4.15849946e-02 -3.92908514e-01 -7.81348050e-02 2.46085718e-01 3.82546633e-01 3.38647425e-01 3.80059421e-01 6.14098847e-01] [-5.64784825e-01 -4.41140026e-01 -2.40590885e-01 -6.87260866e-01 -9.70289886e-01 -5.80992877e-01 -3.43567193e-01 -6.85551986e-02 -3.09366226e-01 -1.80509269e-01 6.43236935e-02 -1.47514790e-01 2.48995334e-01 3.81734371e-01 7.73517311e-01] [-8.35765600e-01 -3.90304506e-01 -4.52028066e-01 -4.72220302e-01 -9.81729746e-01 -1.66154936e-01 3.30740213e-02 2.54761726e-01 -1.68075413e-01 -1.97839662e-01 7.89417187e-04 -2.93952953e-02 1.44242108e-01 1.43247306e-01 2.65068889e-01] [-1.87074780e-01 -1.35812208e-01 -3.72334808e-01 -3.03828984e-01 -4.92953837e-01 -5.40665202e-02 1.71985701e-01 5.25761068e-01 2.61519551e-01 1.30107984e-01 -2.70967484e-02 2.23299470e-02 1.40651047e-01 -1.21489363e-02 -5.36743784e-03]]]]; ov_res: [[[[ 4.07840043e-01 3.51128429e-01 6.35377988e-02 1.50727987e-01 1.68315731e-02 -2.36086637e-01 -3.59591424e-01 -2.82273531e-01 2.44628880e-02 5.63408315e-01 9.69314948e-02 1.55755477e-02 -5.09951174e-01 1.68291256e-01 2.73098141e-01] [-5.15446961e-01 -2.55833119e-01 -9.39744487e-02 1.66087925e-01 -9.76923294e-03 -3.28715980e-01 -4.64677453e-01 -1.29083142e-01 -1.27600372e-01 4.60193217e-01 -4.07616682e-02 5.02693579e-02 -6.79896772e-01 -1.96154639e-01 -1.21109985e-01] [-9.15109217e-01 -4.29005355e-01 -1.15401216e-01 4.20864761e-01 1.08645618e-01 -4.96394843e-01 -4.13776457e-01 -2.92065945e-02 7.42931813e-02 2.39037409e-01 -2.53533050e-02 1.12427860e-01 -2.98272312e-01 -1.48258079e-02 1.26866862e-01] [-1.24049103e+00 -3.92410487e-01 1.14674792e-01 6.93641543e-01 2.38885298e-01 -9.79375392e-02 -1.90782204e-01 2.41959020e-01 5.83385415e-02 9.59937871e-02 -1.74196824e-01 2.42597722e-02 -5.05529232e-02 3.13856989e-01 7.28578866e-01] [-5.39625347e-01 3.64823937e-02 3.53613138e-01 6.52221382e-01 2.95331508e-01 1.25985339e-01 -1.65866017e-01 -1.76693592e-02 1.38525502e-03 3.55448993e-03 -1.03881963e-01 1.43307760e-01 3.22735429e-01 5.86907506e-01 8.00498426e-01] [-1.09109282e-03 -2.86827087e-02 -3.90606858e-02 -1.23364501e-01 7.92682469e-02 1.86085269e-01 1.06719747e-01 -7.50643313e-02 -3.49898815e-01 -3.15558285e-01 -2.56522536e-01 2.25022554e-01 4.69170034e-01 5.31049848e-01 4.73317474e-01] [ 2.20471606e-01 1.67880626e-03 -3.67239177e-01 -6.48979485e-01 -3.58934045e-01 -4.32638340e-02 2.89777696e-01 -5.42148612e-02 -2.99168915e-01 -2.09617227e-01 2.56075393e-02 2.90337741e-01 1.11655630e-01 1.15871035e-01 -5.51352918e-04] [ 5.75967431e-01 2.51916945e-01 -2.46026382e-01 -3.95137429e-01 -1.80869132e-01 -3.62991542e-01 1.09464817e-01 -2.02314302e-01 -2.43606254e-01 -2.34453857e-01 -9.89285409e-02 -1.68895334e-01 -4.84067500e-01 -3.51343930e-01 -1.83006525e-01] [ 6.72390401e-01 3.38232934e-01 -3.98363739e-01 -2.83329427e-01 -1.20520152e-01 -2.96553876e-02 1.74146920e-01 -1.87075436e-02 -1.71395600e-01 -7.93441832e-02 -6.68743998e-02 -8.49901587e-02 -5.38833976e-01 -1.24088064e-01 1.43198207e-01] [ 7.54618466e-01 3.99111360e-01 -1.99079618e-01 2.04217434e-03 5.86655065e-02 7.91629255e-02 -5.15526421e-02 2.45793071e-02 -2.73373485e-01 -1.95911467e-01 -3.95775020e-01 -3.85865420e-01 -4.10850227e-01 -7.27096945e-02 4.58613157e-01] [ 2.68486142e-01 5.08819148e-02 -1.88789338e-01 9.32432413e-02 1.29851133e-01 1.53881729e-01 -6.72325939e-02 1.78586245e-01 -1.52891427e-01 1.83781590e-02 -1.69801787e-01 -4.78069745e-02 -1.61200255e-01 -1.60150275e-01 7.40655437e-02] [-2.01340199e-01 -2.80814052e-01 -1.53451979e-01 2.12070942e-01 3.18837732e-01 2.56609201e-01 -4.91755009e-02 1.63694680e-01 1.82295181e-02 1.32405415e-01 -3.11076581e-01 -5.02435207e-01 -8.85585248e-01 -8.45492244e-01 -6.40891492e-01] [-5.70935190e-01 -6.30624235e-01 -2.03210399e-01 -2.01300085e-01 1.39607534e-01 -8.49071443e-02 -7.67894462e-02 -1.47725046e-01 -1.17572546e-01 8.73141661e-02 -1.84296727e-01 -3.12995553e-01 -7.26157188e-01 -7.40771115e-01 -6.75439894e-01]] [[-8.26731980e-01 -7.01610565e-01 -2.35660180e-01 -9.04398561e-02 1.86461747e-01 -1.55727208e-01 -1.43797114e-01 -4.91330415e-01 -4.54781592e-01 -2.71864951e-01 2.13107407e-01 4.60727811e-01 3.76867503e-01 4.27361459e-01 4.48320508e-01] [-8.05621684e-01 -6.31073952e-01 -1.87114164e-01 -3.51000309e-01 -4.28870708e-01 -5.26623011e-01 -2.06552804e-01 -1.54578105e-01 -2.13321567e-01 -6.16206340e-02 3.62998188e-01 6.84040785e-01 5.21606803e-01 5.29080808e-01 4.30292100e-01] [-3.62736464e-01 -4.06609803e-01 -3.66192847e-01 -3.07323813e-01 -6.26431584e-01 -4.99724656e-01 -3.82294953e-01 9.83002931e-02 -2.06231680e-02 -9.96582806e-02 9.15714353e-02 1.35744989e-01 -4.83871624e-02 -6.72168517e-03 3.53956744e-02] [ 1.60569981e-01 4.83877994e-02 1.87142432e-01 -5.23759983e-02 -8.10628757e-02 7.67454281e-02 3.94371629e-01 3.72932017e-01 1.34387150e-01 1.24344297e-01 8.62518977e-03 -4.40530092e-01 -5.53468406e-01 -2.85901219e-01 1.54339358e-01] [ 2.17298627e-01 -1.21875018e-01 -1.36636257e-01 -1.95416898e-01 1.73206031e-01 2.59180814e-01 4.14117694e-01 2.38539912e-02 -3.26053709e-01 -3.70330870e-01 -2.20543370e-01 -2.96071887e-01 -3.05052012e-01 -3.32985401e-01 -9.99091100e-03] [ 1.51834503e-01 1.26988422e-02 6.18602894e-02 1.61401093e-01 3.12757850e-01 2.38096476e-01 2.44673595e-01 -1.70261905e-01 -4.85463619e-01 -4.04189825e-01 -1.33784562e-01 1.10887818e-01 1.20020896e-01 -8.87396336e-02 -1.25316873e-01] [-1.85332134e-01 -2.16384336e-01 -9.91591513e-02 8.96783695e-02 9.66642201e-02 -2.03543395e-01 -3.69862437e-01 -2.99457699e-01 -3.35092545e-01 -8.31997022e-02 1.36827946e-01 2.80508786e-01 1.70844316e-01 -8.27637389e-02 3.93192582e-02] [-4.28506643e-01 -2.80425608e-01 -1.85025275e-01 7.15875104e-02 -2.08319556e-02 -1.74254119e-01 -2.45105848e-01 2.76111275e-01 2.72555918e-01 1.91514835e-01 6.46649487e-03 7.35287219e-02 5.91178611e-02 -2.11056992e-01 -2.06575051e-01] [ 3.23112607e-02 2.16919735e-01 3.40750739e-02 2.37128809e-01 1.00034490e-01 7.83718936e-03 -2.26685200e-02 5.05747914e-01 9.85697508e-01 8.49357843e-01 2.76247442e-01 -1.07246548e-01 -4.33164924e-01 -3.82494956e-01 -4.35613900e-01] [ 4.37196016e-01 5.09859145e-01 1.90836608e-01 4.51288491e-01 1.29692882e-01 6.14836551e-02 -6.55726865e-02 5.85293531e-01 1.22008705e+00 9.33500826e-01 2.80334830e-01 -1.16020121e-01 -5.56425691e-01 -4.76620793e-01 -7.49934256e-01] [ 3.77277941e-01 4.47979599e-01 3.89116764e-01 7.71155357e-01 5.09103119e-01 2.77852237e-01 5.49432710e-02 5.56708217e-01 1.19821477e+00 1.07815409e+00 5.20910025e-01 -4.99740317e-02 -7.87872434e-01 -6.52916491e-01 -8.10057580e-01] [ 1.40422747e-01 1.83893472e-01 2.15281859e-01 4.34199750e-01 4.79180545e-01 3.55485678e-01 3.21481556e-01 5.21045268e-01 6.51082277e-01 4.34843004e-01 3.35653394e-01 6.27153888e-02 -3.25222373e-01 -6.63700521e-01 -6.74541295e-01] [ 2.02994600e-01 -4.66820411e-02 -1.79523200e-01 1.05359584e-01 5.62565744e-01 5.47639132e-01 6.94532037e-01 4.06571835e-01 4.19318795e-01 1.10494494e-02 1.80683240e-01 -1.45760402e-01 -2.37163678e-01 -5.01043677e-01 -3.59365225e-01]] [[-3.82379383e-01 -4.33108777e-01 -1.83718741e-01 -4.03927594e-01 -1.89534530e-01 -8.64070207e-02 1.66506216e-01 3.36948752e-01 2.71754205e-01 2.85639584e-01 5.14938012e-02 1.54392682e-02 -1.25813186e-01 -3.41529250e-01 -5.13670146e-01] [-5.97027004e-01 -3.10591191e-01 2.00230896e-01 -3.08929384e-01 -5.00100672e-01 -5.93864799e-01 -4.51925159e-01 -1.66981936e-01 -2.28433922e-01 -1.53237050e-02 -3.24962109e-01 -1.70440719e-01 -1.44379124e-01 -5.29060811e-02 -1.77180812e-01] [-6.27486825e-01 -4.70390528e-01 8.51563513e-02 -2.26082325e-01 -2.42569581e-01 -3.63499194e-01 -3.86074841e-01 -1.02131419e-01 -7.73069588e-03 2.16187179e-01 4.47539538e-02 -2.42118254e-01 -2.16252804e-01 -4.91374761e-01 -3.79098386e-01] [-2.37080559e-01 -1.90771401e-01 -5.84123395e-02 -4.53716397e-01 -4.20830607e-01 -4.38206583e-01 -1.02654457e-01 1.29648253e-01 4.66342233e-02 1.35674074e-01 -3.47827017e-01 -3.14154625e-01 -5.81926167e-01 -5.18277049e-01 -5.38297772e-01] [ 2.65421364e-02 -1.63419828e-01 -3.39442521e-01 -2.54364341e-01 1.35996729e-01 2.46166632e-01 1.84247941e-01 2.63232470e-01 2.56056368e-01 4.72371727e-01 -2.63163537e-01 -3.99720222e-01 -7.40418315e-01 -7.31090188e-01 -7.75619984e-01] [ 1.44928321e-01 -1.18007511e-01 -3.89195263e-01 -3.98938060e-01 -1.75343871e-01 -1.82075426e-01 -1.35717809e-01 -2.13161018e-02 -1.06500918e-02 -6.77755773e-02 -5.83627403e-01 -4.93546486e-01 -4.14413512e-01 2.73696855e-02 1.59064218e-01] [ 1.81915596e-01 -2.83510923e-01 -6.36149585e-01 -4.35416937e-01 -1.05778538e-01 2.74387859e-02 -2.90206850e-01 -9.42220688e-02 3.28535810e-02 4.10539024e-02 -7.76038170e-02 -2.74049640e-01 1.97420688e-03 7.39045069e-02 4.49327022e-01] [-8.94694850e-02 -5.40296197e-01 -9.11319435e-01 -7.00314939e-01 -2.66576648e-01 -9.64096785e-02 -9.48134959e-02 1.97088048e-02 -2.18290091e-02 -1.03266109e-02 3.42259288e-01 3.40517908e-01 3.78033966e-01 2.47279331e-01 5.76302767e-01] [ 1.57372251e-01 -3.44852865e-01 -3.97169113e-01 -6.06943846e-01 -3.04975770e-02 -2.49787495e-02 -2.52891388e-02 4.26255204e-02 -2.62391329e-01 1.13896988e-02 2.40533769e-01 3.76195788e-01 3.50836158e-01 2.23889858e-01 5.41814625e-01] [-5.77326000e-01 -4.56332326e-01 -2.44242460e-01 -5.66116452e-01 -4.58835006e-01 -2.15832412e-01 8.01683515e-02 4.15849946e-02 -3.92908514e-01 -7.81348050e-02 2.46085718e-01 3.82546633e-01 3.38647425e-01 3.80059421e-01 6.14098847e-01] [-5.64784825e-01 -4.41140026e-01 -2.40590885e-01 -6.87260866e-01 -9.70289886e-01 -5.80992877e-01 -3.43567193e-01 -6.85551986e-02 -3.09366226e-01 -1.80509269e-01 6.43236935e-02 -1.47514790e-01 2.48995334e-01 3.81734371e-01 7.73517311e-01] [-8.35765600e-01 -3.90304506e-01 -4.52028066e-01 -4.72220302e-01 -9.81729746e-01 -1.66154936e-01 3.30740213e-02 2.54761726e-01 -1.68075413e-01 -1.97839662e-01 7.89417187e-04 -2.93952953e-02 1.44242108e-01 1.43247306e-01 2.65068889e-01] [-1.87074780e-01 -1.35812208e-01 -3.72334808e-01 -3.03828984e-01 -4.92953837e-01 -5.40665202e-02 1.71985701e-01 5.25761068e-01 2.61519551e-01 1.30107984e-01 -2.70967484e-02 2.23299470e-02 1.40651047e-01 -1.21489363e-02 -5.36743784e-03]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5697.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[1, 0]]() %5 : int[] = prim::Constant[value=[1, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) fw_re: [[[[-1.03368908e-01 1.23479076e-01 1.78715944e-01 5.26894689e-01 2.86385268e-01 8.34152997e-01 5.88163078e-01 6.75368965e-01 1.96180999e-01 1.39364332e-01 -5.71297817e-02 1.33584097e-01 1.25509128e-01] [-2.81768143e-01 1.02994300e-01 2.17170611e-01 3.95694107e-01 3.67459655e-02 2.76822776e-01 2.08393544e-01 5.46063483e-01 2.75224894e-01 3.92636657e-01 -5.75604029e-02 3.50326188e-02 -8.19777250e-02] [-1.85285524e-01 4.12953570e-02 -6.20479584e-02 2.33526334e-01 -1.42922699e-02 -7.46055022e-02 -2.95803845e-01 -2.12011375e-02 1.78828791e-01 6.19340599e-01 2.25328848e-01 8.24775994e-02 -2.45486125e-01] [ 9.83693823e-03 1.44758135e-01 -2.18790352e-01 -2.43511304e-01 -5.44337273e-01 -8.89632106e-01 -9.10727262e-01 -5.35137892e-01 -5.32117076e-02 4.02465373e-01 -5.80228753e-02 -3.12995046e-01 -4.79205549e-01] [ 3.70248944e-01 1.18380994e-01 -7.71826506e-01 -7.63081789e-01 -9.42235827e-01 -6.65294349e-01 -7.03705490e-01 -3.80839080e-01 -2.89648902e-02 1.44950613e-01 2.91287173e-02 -2.67035693e-01 -1.47797734e-01] [-6.31788187e-03 -5.32073788e-02 -7.93149889e-01 -8.62386405e-01 -9.36897278e-01 -3.12119067e-01 -3.05593640e-01 2.01104879e-01 1.29260272e-01 1.60576254e-01 -5.87145090e-02 -1.06734104e-01 2.36526653e-01] [ 1.92907115e-04 -5.98883890e-02 -5.40445566e-01 -6.49391830e-01 -7.27509081e-01 2.35562697e-02 1.56579837e-01 7.17767417e-01 3.08996320e-01 3.15859288e-01 1.18047029e-01 1.71022236e-01 3.16375673e-01] [-4.94075082e-02 1.61279172e-01 1.12872690e-01 -2.61975732e-02 -8.13727528e-02 3.88197690e-01 4.46558893e-01 6.26345754e-01 8.92611519e-02 2.73180008e-03 3.46861817e-02 5.94702624e-02 3.30256999e-01] [ 2.79740930e-01 1.42093956e-01 8.84120539e-02 2.36377977e-02 1.44727603e-01 2.25006521e-01 1.96469009e-01 1.43812060e-01 2.64895894e-02 2.36975223e-01 3.13582331e-01 1.28975600e-01 2.95754313e-01] [ 7.80194700e-02 8.71258080e-02 2.60160387e-01 2.29220390e-01 3.38569283e-01 4.12339121e-01 3.20836723e-01 1.82281345e-01 7.17537105e-02 -1.50263840e-02 2.70841926e-01 -1.34036541e-01 4.87142622e-01] [ 2.05337536e-02 -2.02196762e-01 -1.42533869e-01 -2.58381236e-02 3.45342681e-02 7.24233910e-02 -2.53635272e-02 -5.52846715e-02 -1.78781617e-02 8.11704546e-02 1.19383641e-01 -1.83188260e-01 1.91649914e-01] [-4.08116765e-02 -2.83450276e-01 -3.18845034e-01 -9.91357863e-02 -1.50902867e-01 5.74127678e-03 6.58254921e-02 1.07868791e-01 4.22706753e-02 -2.49318600e-01 -1.26210868e-01 -2.06690148e-01 5.10827973e-02] [ 2.71374822e-01 3.39629389e-02 -2.07449228e-01 -1.02004156e-01 -2.31259421e-01 8.93666446e-02 5.08495979e-02 -4.58418988e-02 -4.03649479e-01 -3.70542884e-01 -1.97691947e-01 1.67972058e-01 3.75320315e-02] [ 6.54220343e-01 6.37814224e-01 3.59233320e-01 2.72884965e-01 -1.58865340e-02 1.91502929e-01 8.50400552e-02 6.57916069e-02 -4.97772217e-01 -3.32126588e-01 -2.38806143e-01 5.85805535e-01 2.04713166e-01] [ 9.90255117e-01 1.08605516e+00 8.31307113e-01 5.15970170e-01 -3.25067267e-02 2.33266771e-01 4.78792042e-02 2.33302936e-02 -9.02172089e-01 -5.69181025e-01 -3.87088984e-01 8.16588879e-01 4.66442853e-01]] [[ 7.97870755e-02 1.14450455e-02 3.58792871e-01 4.12851781e-01 4.55004841e-01 -2.97662556e-01 -7.34107792e-01 -6.96595967e-01 -3.74384135e-01 -1.92611385e-02 7.31507912e-02 2.29442954e-01 1.09355778e-01] [ 6.59523234e-02 1.78477764e-02 7.24492446e-02 1.73100770e-01 4.99742150e-01 -1.56444624e-01 -5.56934536e-01 -6.48622334e-01 -3.59561592e-01 8.03316459e-02 2.49945506e-01 4.01595145e-01 4.71403599e-02] [-2.41300046e-01 -1.32581338e-01 -2.10094288e-01 1.39269978e-03 2.85647988e-01 -2.60432065e-01 -3.08392704e-01 -5.53219438e-01 -3.61100465e-01 -3.26717794e-01 -1.80112705e-01 -1.01678811e-01 -2.21834242e-01] [-6.42885566e-01 -3.93135808e-02 -1.47559211e-01 2.37555504e-01 5.31147599e-01 -9.37535986e-03 -1.28746718e-01 -4.93279994e-01 -1.98897958e-01 -2.02142105e-01 1.21426888e-01 1.73990801e-01 -6.83529526e-02] [-7.72771597e-01 -4.99243848e-03 -1.61803305e-01 1.77097291e-01 2.88296849e-01 5.82514554e-02 1.14419013e-01 -3.61168116e-01 -4.26999964e-02 -3.29928845e-01 -2.99165510e-02 1.38954937e-01 8.01581070e-02] [-6.06762171e-01 -1.86079275e-02 -4.10566032e-02 3.42614532e-01 3.43733728e-01 5.60835600e-02 -3.56395572e-01 -6.15050733e-01 -3.09587598e-01 -3.34881805e-02 3.71783018e-01 6.35836959e-01 1.86192676e-01] [-5.32999039e-02 3.22846740e-01 5.15274592e-02 3.17437127e-02 -4.78229634e-02 -9.35137272e-02 -2.32626021e-01 -4.75426465e-01 -2.50911117e-01 6.93980753e-02 2.51487017e-01 5.72115064e-01 1.23716421e-01] [ 1.90468490e-01 2.54184812e-01 3.29061836e-01 3.53946060e-01 2.29436457e-01 3.12947296e-02 -2.99958557e-01 -5.25826216e-01 -7.13489830e-01 -3.97926807e-01 3.03741023e-02 4.30158824e-01 1.02193251e-01] [ 2.37520531e-01 4.81035650e-01 3.88780475e-01 -5.47154211e-02 4.98717874e-02 -4.14079763e-02 2.23848075e-01 -8.02069753e-02 -2.21541733e-01 -4.24942911e-01 -7.79939517e-02 2.45348483e-01 1.99176669e-01] [ 2.46540889e-01 2.49051496e-01 2.10103527e-01 -5.14731742e-02 7.46022463e-02 1.33074261e-02 2.05993727e-01 -1.04510449e-02 -1.02358527e-01 -4.16304171e-01 1.60689652e-02 1.64518952e-01 -1.05031598e-02] [-2.72385120e-01 -2.07195252e-01 -2.50145048e-01 -2.95669705e-01 1.21774934e-02 -6.75063878e-02 2.20633045e-01 1.37620747e-01 4.01387066e-01 2.63302475e-01 4.26692545e-01 2.43305847e-01 -1.21369161e-01] [-4.39828277e-01 -3.28165799e-01 -2.98363894e-01 -1.59116060e-01 -1.21762685e-03 -2.95468476e-02 -4.57159020e-02 -1.09199688e-01 -2.44414136e-02 2.69800127e-01 3.49628568e-01 2.81385541e-01 -3.34634632e-01] [-6.81804955e-01 -2.65143275e-01 -1.61599502e-01 -3.79477143e-02 -3.01393475e-02 4.76611778e-02 1.21288031e-01 2.78789878e-01 2.53691912e-01 3.21279109e-01 1.84677303e-01 3.25657368e-01 8.18870813e-02] [-3.79405648e-01 4.12740037e-02 -1.42749948e-02 3.48997302e-02 -1.51813254e-01 1.91279069e-01 2.01076835e-01 3.69206488e-01 2.67611146e-01 1.40594542e-01 -5.30404635e-02 2.97800273e-01 3.00868154e-01] [-3.37923139e-01 -5.19400835e-02 -9.66711044e-02 1.42607138e-01 -1.92235708e-01 4.38066870e-01 4.49229479e-01 8.18260431e-01 7.55666792e-01 2.82640427e-01 -3.33671682e-02 2.47127965e-01 5.75459659e-01]] [[-3.72408718e-01 -1.07761241e-01 -6.26356423e-01 -5.15198648e-01 -9.32140827e-01 -7.33637214e-02 -2.16377571e-01 8.03245306e-02 -3.35957050e-01 -3.61576408e-01 -4.87351447e-01 -2.34937146e-01 3.41205388e-01] [-2.88532257e-01 -4.40758057e-02 -2.42334917e-01 -3.51765454e-01 -7.94182062e-01 -3.22140902e-01 -2.91634828e-01 2.01464687e-02 -2.83608705e-01 -1.38811052e-01 -1.87335253e-01 2.73823272e-02 2.26512566e-01] [-1.69828892e-01 -5.87809347e-02 -3.13881755e-01 -4.55610603e-01 -7.12436974e-01 -4.08120215e-01 -2.60428071e-01 1.56475678e-01 1.49869815e-01 5.62967062e-01 3.69531035e-01 5.24325252e-01 6.98185936e-02] [-1.47336781e-01 -1.93961129e-01 -1.77890599e-01 -3.50960791e-01 -5.82159102e-01 -6.87156498e-01 -3.14676523e-01 3.46492380e-02 6.42945021e-02 4.81012464e-01 7.72825539e-01 9.36061203e-01 4.29636687e-01] [-8.25026259e-02 -2.67051667e-01 -4.57008153e-01 -4.45796013e-01 -5.46747446e-01 -4.52222288e-01 -2.94155389e-01 -1.28299192e-01 -1.28987014e-01 2.03748703e-01 5.17594874e-01 8.82061541e-01 6.34599507e-01] [-2.46523827e-01 2.66416073e-01 2.49098372e-02 2.03650877e-01 -5.39377093e-01 -4.63126928e-01 -6.95225954e-01 -6.08891189e-01 -6.85444713e-01 -4.44087148e-01 -1.94674045e-01 3.18630069e-01 4.15522456e-01] [ 5.08151762e-03 3.25739682e-01 2.81945989e-02 1.82203725e-01 -3.03213000e-01 -3.44207644e-01 -5.39837301e-01 -5.88194430e-01 -2.41190195e-01 -6.01728037e-02 1.56459466e-01 3.41498822e-01 4.68613625e-01] [ 3.62998098e-02 9.61103961e-02 -2.91810989e-01 1.56244440e-02 -2.82924533e-01 -3.51909548e-01 -3.11173350e-01 -2.97687501e-01 -4.70545180e-02 1.49755299e-01 2.19540223e-01 2.18248501e-01 1.41343340e-01] [ 4.10032600e-01 -1.47384569e-01 -7.16164291e-01 -6.98262870e-01 -2.08478928e-01 -7.93766975e-02 2.80489683e-01 3.22384089e-02 2.06908301e-01 1.13758512e-01 2.93500841e-01 1.46863624e-01 8.89023021e-02] [ 1.55314133e-01 -4.29478854e-01 -5.24564326e-01 -3.03139329e-01 2.33197317e-01 1.86677828e-01 3.36073190e-01 7.40809366e-02 -8.86201933e-02 -1.34319961e-01 -2.38526985e-02 2.12257102e-01 -1.01991087e-01] [-1.69906616e-01 -2.45767459e-01 -1.24261275e-01 -1.16611175e-01 2.90272385e-01 1.11875787e-01 3.19891185e-01 5.11620790e-02 1.15376383e-01 1.79624036e-02 2.41233900e-01 3.12431455e-01 9.47936345e-03] [-1.52007982e-01 -2.46622249e-01 7.84667954e-03 1.65554434e-01 3.03378791e-01 4.57655229e-02 1.97435021e-01 -1.06485821e-02 -5.96585795e-02 -3.09582472e-01 -5.72606325e-02 2.21763134e-01 3.37554067e-02] [-1.15044035e-01 -1.21591568e-01 6.26890138e-02 2.00052299e-02 -1.21098310e-01 -1.91214355e-03 1.42391235e-01 2.38153562e-01 2.05471013e-02 -1.77898422e-01 -2.11993247e-01 2.19186340e-02 -2.74104532e-03] [ 4.44535106e-01 2.39484072e-01 -4.11335640e-02 -4.88669425e-02 -2.98137367e-01 3.28908384e-01 3.27233404e-01 4.34345603e-01 -4.72941250e-02 -1.17570296e-01 -3.02870095e-01 9.46447253e-04 -2.53541600e-02] [ 3.46024424e-01 2.42629454e-01 -8.09784699e-03 -1.43348873e-02 -3.08827639e-01 5.56526959e-01 4.45764422e-01 7.11572170e-01 1.04349397e-01 4.55889195e-01 1.40638845e-02 1.16578959e-01 4.21580561e-02]]]]; ov_res: [[[[-1.03368908e-01 1.23479076e-01 1.78715944e-01 5.26894689e-01 2.86385268e-01 8.34152997e-01 5.88163078e-01 6.75368965e-01 1.96180999e-01 1.39364332e-01 -5.71297817e-02 1.33584097e-01 1.25509128e-01] [-2.81768143e-01 1.02994300e-01 2.17170611e-01 3.95694107e-01 3.67459655e-02 2.76822776e-01 2.08393544e-01 5.46063483e-01 2.75224894e-01 3.92636657e-01 -5.75604029e-02 3.50326188e-02 -8.19777250e-02] [-1.85285524e-01 4.12953570e-02 -6.20479584e-02 2.33526334e-01 -1.42922699e-02 -7.46055022e-02 -2.95803845e-01 -2.12011375e-02 1.78828791e-01 6.19340599e-01 2.25328848e-01 8.24775994e-02 -2.45486125e-01] [ 9.83693823e-03 1.44758135e-01 -2.18790352e-01 -2.43511304e-01 -5.44337273e-01 -8.89632106e-01 -9.10727262e-01 -5.35137892e-01 -5.32117076e-02 4.02465373e-01 -5.80228753e-02 -3.12995046e-01 -4.79205549e-01] [ 3.70248944e-01 1.18380994e-01 -7.71826506e-01 -7.63081789e-01 -9.42235827e-01 -6.65294349e-01 -7.03705490e-01 -3.80839080e-01 -2.89648902e-02 1.44950613e-01 2.91287173e-02 -2.67035693e-01 -1.47797734e-01] [-6.31788187e-03 -5.32073788e-02 -7.93149889e-01 -8.62386405e-01 -9.36897278e-01 -3.12119067e-01 -3.05593640e-01 2.01104879e-01 1.29260272e-01 1.60576254e-01 -5.87145090e-02 -1.06734104e-01 2.36526653e-01] [ 1.92907115e-04 -5.98883890e-02 -5.40445566e-01 -6.49391830e-01 -7.27509081e-01 2.35562697e-02 1.56579837e-01 7.17767417e-01 3.08996320e-01 3.15859288e-01 1.18047029e-01 1.71022236e-01 3.16375673e-01] [-4.94075082e-02 1.61279172e-01 1.12872690e-01 -2.61975732e-02 -8.13727528e-02 3.88197690e-01 4.46558893e-01 6.26345754e-01 8.92611519e-02 2.73180008e-03 3.46861817e-02 5.94702624e-02 3.30256999e-01] [ 2.79740930e-01 1.42093956e-01 8.84120539e-02 2.36377977e-02 1.44727603e-01 2.25006521e-01 1.96469009e-01 1.43812060e-01 2.64895894e-02 2.36975223e-01 3.13582331e-01 1.28975600e-01 2.95754313e-01] [ 7.80194700e-02 8.71258080e-02 2.60160387e-01 2.29220390e-01 3.38569283e-01 4.12339121e-01 3.20836723e-01 1.82281345e-01 7.17537105e-02 -1.50263840e-02 2.70841926e-01 -1.34036541e-01 4.87142622e-01] [ 2.05337536e-02 -2.02196762e-01 -1.42533869e-01 -2.58381236e-02 3.45342681e-02 7.24233910e-02 -2.53635272e-02 -5.52846715e-02 -1.78781617e-02 8.11704546e-02 1.19383641e-01 -1.83188260e-01 1.91649914e-01] [-4.08116765e-02 -2.83450276e-01 -3.18845034e-01 -9.91357863e-02 -1.50902867e-01 5.74127678e-03 6.58254921e-02 1.07868791e-01 4.22706753e-02 -2.49318600e-01 -1.26210868e-01 -2.06690148e-01 5.10827973e-02] [ 2.71374822e-01 3.39629389e-02 -2.07449228e-01 -1.02004156e-01 -2.31259421e-01 8.93666446e-02 5.08495979e-02 -4.58418988e-02 -4.03649479e-01 -3.70542884e-01 -1.97691947e-01 1.67972058e-01 3.75320315e-02] [ 6.54220343e-01 6.37814224e-01 3.59233320e-01 2.72884965e-01 -1.58865340e-02 1.91502929e-01 8.50400552e-02 6.57916069e-02 -4.97772217e-01 -3.32126588e-01 -2.38806143e-01 5.85805535e-01 2.04713166e-01] [ 9.90255117e-01 1.08605516e+00 8.31307113e-01 5.15970170e-01 -3.25067267e-02 2.33266771e-01 4.78792042e-02 2.33302936e-02 -9.02172089e-01 -5.69181025e-01 -3.87088984e-01 8.16588879e-01 4.66442853e-01]] [[ 7.97870755e-02 1.14450455e-02 3.58792871e-01 4.12851781e-01 4.55004841e-01 -2.97662556e-01 -7.34107792e-01 -6.96595967e-01 -3.74384135e-01 -1.92611385e-02 7.31507912e-02 2.29442954e-01 1.09355778e-01] [ 6.59523234e-02 1.78477764e-02 7.24492446e-02 1.73100770e-01 4.99742150e-01 -1.56444624e-01 -5.56934536e-01 -6.48622334e-01 -3.59561592e-01 8.03316459e-02 2.49945506e-01 4.01595145e-01 4.71403599e-02] [-2.41300046e-01 -1.32581338e-01 -2.10094288e-01 1.39269978e-03 2.85647988e-01 -2.60432065e-01 -3.08392704e-01 -5.53219438e-01 -3.61100465e-01 -3.26717794e-01 -1.80112705e-01 -1.01678811e-01 -2.21834242e-01] [-6.42885566e-01 -3.93135808e-02 -1.47559211e-01 2.37555504e-01 5.31147599e-01 -9.37535986e-03 -1.28746718e-01 -4.93279994e-01 -1.98897958e-01 -2.02142105e-01 1.21426888e-01 1.73990801e-01 -6.83529526e-02] [-7.72771597e-01 -4.99243848e-03 -1.61803305e-01 1.77097291e-01 2.88296849e-01 5.82514554e-02 1.14419013e-01 -3.61168116e-01 -4.26999964e-02 -3.29928845e-01 -2.99165510e-02 1.38954937e-01 8.01581070e-02] [-6.06762171e-01 -1.86079275e-02 -4.10566032e-02 3.42614532e-01 3.43733728e-01 5.60835600e-02 -3.56395572e-01 -6.15050733e-01 -3.09587598e-01 -3.34881805e-02 3.71783018e-01 6.35836959e-01 1.86192676e-01] [-5.32999039e-02 3.22846740e-01 5.15274592e-02 3.17437127e-02 -4.78229634e-02 -9.35137272e-02 -2.32626021e-01 -4.75426465e-01 -2.50911117e-01 6.93980753e-02 2.51487017e-01 5.72115064e-01 1.23716421e-01] [ 1.90468490e-01 2.54184812e-01 3.29061836e-01 3.53946060e-01 2.29436457e-01 3.12947296e-02 -2.99958557e-01 -5.25826216e-01 -7.13489830e-01 -3.97926807e-01 3.03741023e-02 4.30158824e-01 1.02193251e-01] [ 2.37520531e-01 4.81035650e-01 3.88780475e-01 -5.47154211e-02 4.98717874e-02 -4.14079763e-02 2.23848075e-01 -8.02069753e-02 -2.21541733e-01 -4.24942911e-01 -7.79939517e-02 2.45348483e-01 1.99176669e-01] [ 2.46540889e-01 2.49051496e-01 2.10103527e-01 -5.14731742e-02 7.46022463e-02 1.33074261e-02 2.05993727e-01 -1.04510449e-02 -1.02358527e-01 -4.16304171e-01 1.60689652e-02 1.64518952e-01 -1.05031598e-02] [-2.72385120e-01 -2.07195252e-01 -2.50145048e-01 -2.95669705e-01 1.21774934e-02 -6.75063878e-02 2.20633045e-01 1.37620747e-01 4.01387066e-01 2.63302475e-01 4.26692545e-01 2.43305847e-01 -1.21369161e-01] [-4.39828277e-01 -3.28165799e-01 -2.98363894e-01 -1.59116060e-01 -1.21762685e-03 -2.95468476e-02 -4.57159020e-02 -1.09199688e-01 -2.44414136e-02 2.69800127e-01 3.49628568e-01 2.81385541e-01 -3.34634632e-01] [-6.81804955e-01 -2.65143275e-01 -1.61599502e-01 -3.79477143e-02 -3.01393475e-02 4.76611778e-02 1.21288031e-01 2.78789878e-01 2.53691912e-01 3.21279109e-01 1.84677303e-01 3.25657368e-01 8.18870813e-02] [-3.79405648e-01 4.12740037e-02 -1.42749948e-02 3.48997302e-02 -1.51813254e-01 1.91279069e-01 2.01076835e-01 3.69206488e-01 2.67611146e-01 1.40594542e-01 -5.30404635e-02 2.97800273e-01 3.00868154e-01] [-3.37923139e-01 -5.19400835e-02 -9.66711044e-02 1.42607138e-01 -1.92235708e-01 4.38066870e-01 4.49229479e-01 8.18260431e-01 7.55666792e-01 2.82640427e-01 -3.33671682e-02 2.47127965e-01 5.75459659e-01]] [[-3.72408718e-01 -1.07761241e-01 -6.26356423e-01 -5.15198648e-01 -9.32140827e-01 -7.33637214e-02 -2.16377571e-01 8.03245306e-02 -3.35957050e-01 -3.61576408e-01 -4.87351447e-01 -2.34937146e-01 3.41205388e-01] [-2.88532257e-01 -4.40758057e-02 -2.42334917e-01 -3.51765454e-01 -7.94182062e-01 -3.22140902e-01 -2.91634828e-01 2.01464687e-02 -2.83608705e-01 -1.38811052e-01 -1.87335253e-01 2.73823272e-02 2.26512566e-01] [-1.69828892e-01 -5.87809347e-02 -3.13881755e-01 -4.55610603e-01 -7.12436974e-01 -4.08120215e-01 -2.60428071e-01 1.56475678e-01 1.49869815e-01 5.62967062e-01 3.69531035e-01 5.24325252e-01 6.98185936e-02] [-1.47336781e-01 -1.93961129e-01 -1.77890599e-01 -3.50960791e-01 -5.82159102e-01 -6.87156498e-01 -3.14676523e-01 3.46492380e-02 6.42945021e-02 4.81012464e-01 7.72825539e-01 9.36061203e-01 4.29636687e-01] [-8.25026259e-02 -2.67051667e-01 -4.57008153e-01 -4.45796013e-01 -5.46747446e-01 -4.52222288e-01 -2.94155389e-01 -1.28299192e-01 -1.28987014e-01 2.03748703e-01 5.17594874e-01 8.82061541e-01 6.34599507e-01] [-2.46523827e-01 2.66416073e-01 2.49098372e-02 2.03650877e-01 -5.39377093e-01 -4.63126928e-01 -6.95225954e-01 -6.08891189e-01 -6.85444713e-01 -4.44087148e-01 -1.94674045e-01 3.18630069e-01 4.15522456e-01] [ 5.08151762e-03 3.25739682e-01 2.81945989e-02 1.82203725e-01 -3.03213000e-01 -3.44207644e-01 -5.39837301e-01 -5.88194430e-01 -2.41190195e-01 -6.01728037e-02 1.56459466e-01 3.41498822e-01 4.68613625e-01] [ 3.62998098e-02 9.61103961e-02 -2.91810989e-01 1.56244440e-02 -2.82924533e-01 -3.51909548e-01 -3.11173350e-01 -2.97687501e-01 -4.70545180e-02 1.49755299e-01 2.19540223e-01 2.18248501e-01 1.41343340e-01] [ 4.10032600e-01 -1.47384569e-01 -7.16164291e-01 -6.98262870e-01 -2.08478928e-01 -7.93766975e-02 2.80489683e-01 3.22384089e-02 2.06908301e-01 1.13758512e-01 2.93500841e-01 1.46863624e-01 8.89023021e-02] [ 1.55314133e-01 -4.29478854e-01 -5.24564326e-01 -3.03139329e-01 2.33197317e-01 1.86677828e-01 3.36073190e-01 7.40809366e-02 -8.86201933e-02 -1.34319961e-01 -2.38526985e-02 2.12257102e-01 -1.01991087e-01] [-1.69906616e-01 -2.45767459e-01 -1.24261275e-01 -1.16611175e-01 2.90272385e-01 1.11875787e-01 3.19891185e-01 5.11620790e-02 1.15376383e-01 1.79624036e-02 2.41233900e-01 3.12431455e-01 9.47936345e-03] [-1.52007982e-01 -2.46622249e-01 7.84667954e-03 1.65554434e-01 3.03378791e-01 4.57655229e-02 1.97435021e-01 -1.06485821e-02 -5.96585795e-02 -3.09582472e-01 -5.72606325e-02 2.21763134e-01 3.37554067e-02] [-1.15044035e-01 -1.21591568e-01 6.26890138e-02 2.00052299e-02 -1.21098310e-01 -1.91214355e-03 1.42391235e-01 2.38153562e-01 2.05471013e-02 -1.77898422e-01 -2.11993247e-01 2.19186340e-02 -2.74104532e-03] [ 4.44535106e-01 2.39484072e-01 -4.11335640e-02 -4.88669425e-02 -2.98137367e-01 3.28908384e-01 3.27233404e-01 4.34345603e-01 -4.72941250e-02 -1.17570296e-01 -3.02870095e-01 9.46447253e-04 -2.53541600e-02] [ 3.46024424e-01 2.42629454e-01 -8.09784699e-03 -1.43348873e-02 -3.08827639e-01 5.56526959e-01 4.45764422e-01 7.11572170e-01 1.04349397e-01 4.55889195e-01 1.40638845e-02 1.16578959e-01 4.21580561e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5700.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[2, 1]]() %6 : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::avg_pool2d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%7) prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debufw_re: [[[[-5.26071370e-01 1.06075510e-01 2.43869096e-01 6.09460592e-01 3.06967586e-01 2.05922171e-01 1.43250197e-01 2.05754280e-01 1.57464206e-01 4.51319739e-02 -1.76270469e-03 -2.28629246e-01 -1.29169315e-01] [-2.85613716e-01 1.72444046e-01 4.94278908e-01 2.73657918e-01 1.26660049e-01 2.16920301e-02 7.07653388e-02 -1.42552316e-01 -2.67944187e-01 -1.11941032e-01 1.46836573e-02 8.27552378e-02 1.62031472e-01] [-2.00715601e-01 2.67106354e-01 4.93065089e-01 1.33082926e-01 -2.31817409e-01 -1.26775548e-01 2.52995864e-02 8.98745060e-02 2.47854099e-01 1.02419890e-01 1.60313368e-01 -3.49179029e-01 -6.89011037e-01] [ 9.80395526e-02 6.71773195e-01 3.29415113e-01 1.20645560e-01 -1.70316353e-01 -2.03309000e-01 -3.57111752e-01 -4.44566399e-01 -2.90141553e-01 1.22548290e-01 3.40204358e-01 1.90330192e-01 -4.20092046e-01] [-1.16314068e-01 -8.58983770e-02 -3.94327641e-02 -9.05022770e-02 8.48527029e-02 -3.24648023e-01 -2.27263244e-03 -3.10524255e-01 -1.29514113e-01 -1.69543147e-01 -1.56706169e-01 -2.54658759e-01 -4.35200572e-01] [ 4.95196506e-02 -1.40015885e-01 6.69580579e-01 4.80430752e-01 3.81833285e-01 -4.54765677e-01 -2.63312221e-01 1.78271130e-01 5.57983935e-01 7.72023201e-01 3.54781449e-01 2.17563450e-01 2.08781585e-01] [-4.03629720e-01 -3.62799376e-01 3.70579660e-02 -1.31624147e-01 -1.46003082e-01 -3.06911826e-01 -2.37053022e-01 -1.86715096e-01 -2.44660437e-01 2.20863521e-03 -1.20524034e-01 -1.51666239e-01 1.07580662e-01]] [[ 1.01790853e-01 -2.46651754e-01 -4.42477375e-01 -7.00801432e-01 -3.84617835e-01 3.02870907e-02 4.48810160e-01 1.53271973e-01 -3.21571171e-01 -2.82902926e-01 1.89244503e-03 2.46897057e-01 4.66273894e-04] [-6.83190405e-01 -1.19057286e+00 -1.25715458e+00 -6.70177102e-01 -2.87077218e-01 8.57430547e-02 -9.29042250e-02 -2.12390348e-01 -3.61794233e-01 -5.80412447e-01 -5.84347010e-01 -7.08217621e-01 -4.30035770e-01] [-2.82254726e-01 -2.11130783e-01 -6.39305472e-01 -4.07769948e-01 -3.51820529e-01 1.39997173e-02 -5.05112350e-01 -6.92201376e-01 -6.66575909e-01 -4.26905155e-01 -1.73162758e-01 8.19517523e-02 2.93063223e-01] [-3.72515768e-01 -3.58629525e-01 -3.81233364e-01 -4.93447036e-01 -1.05348349e-01 -2.91663669e-02 -3.64252478e-02 6.08933493e-02 -2.35063042e-02 4.65690732e-01 1.68011695e-01 3.48376870e-01 4.97832708e-02] [-3.84203255e-01 -4.49324340e-01 -3.40101719e-01 -3.32847863e-01 7.95258384e-05 2.75003970e-01 5.06378822e-02 3.88548933e-02 1.55992627e-01 4.69189584e-01 9.26496163e-02 -2.83710122e-01 -3.18366796e-01] [-3.54031384e-01 -5.90777814e-01 -2.42992982e-01 -1.54624321e-02 2.06078500e-01 3.58264238e-01 1.89534873e-01 -2.45710909e-02 1.60493910e-01 -1.14959531e-01 -2.53004730e-01 -7.70167887e-01 -2.47879371e-01] [-5.18291771e-01 -5.21281421e-01 2.23584339e-01 2.45475471e-01 4.04178500e-01 -1.69708937e-01 -1.77645952e-01 -3.07771146e-01 -1.58924252e-01 -6.58017099e-02 -5.81494570e-02 1.38146341e-01 3.47313821e-01]] [[-3.61397594e-01 -4.75128531e-01 -6.50989413e-02 1.35604545e-01 6.98387548e-02 3.49459529e-01 -2.94693083e-01 -5.63067049e-02 -2.42063358e-01 2.15108655e-02 -3.46398443e-01 -3.46272588e-01 -1.57075748e-01] [-4.19327915e-01 -6.12276077e-01 -3.86933714e-01 -2.00941980e-01 -2.63366431e-01 -1.33328140e-01 -4.30525452e-01 -2.03112155e-01 -3.24320197e-01 -9.75565761e-02 -1.92130253e-01 -3.34213227e-01 2.39313334e-01] [-4.55939144e-01 -1.13954715e-01 2.96847033e-03 5.23781739e-02 -2.13315889e-01 -2.51014620e-01 -5.99679351e-01 -4.61467326e-01 -5.47074616e-01 -1.87247679e-01 -5.21085441e-01 -3.43746126e-01 -2.30905265e-01] [-9.02732238e-02 -2.88835466e-01 -1.51536450e-01 -4.27420557e-01 -1.42676458e-01 -4.39968169e-01 -3.11893106e-01 -4.19600695e-01 -3.25249135e-01 -1.87899962e-01 -4.60992396e-01 -3.59579295e-01 -5.52686691e-01] [ 9.02805030e-02 -5.23543097e-02 3.56826524e-04 -2.63262272e-01 1.21978641e-01 -2.94923425e-01 -6.46772087e-02 -3.90620559e-01 -2.45951235e-01 -2.06074417e-01 -2.10598916e-01 4.99271750e-02 4.04723501e-03] [-8.54177251e-02 -5.93840033e-02 -4.22199845e-01 -5.40063739e-01 -1.80736765e-01 -4.45908830e-02 2.46908024e-01 -4.01782215e-01 -8.05502713e-01 -9.38260913e-01 -5.87339580e-01 -3.39162737e-01 -1.40439011e-02] [-6.12246931e-01 -9.48391497e-01 -7.15149820e-01 -5.06362557e-01 -8.68257657e-02 -2.47748457e-02 4.43170607e-01 2.79209495e-01 1.54901564e-01 3.92892510e-02 -2.50295619e-04 -2.44766176e-01 -2.55234092e-01]]]]; ov_res: [[[[-5.26071370e-01 1.06075510e-01 2.43869096e-01 6.09460592e-01 3.06967586e-01 2.05922171e-01 1.43250197e-01 2.05754280e-01 1.57464206e-01 4.51319739e-02 -1.76270469e-03 -2.28629246e-01 -1.29169315e-01] [-2.85613716e-01 1.72444046e-01 4.94278908e-01 2.73657918e-01 1.26660049e-01 2.16920301e-02 7.07653388e-02 -1.42552316e-01 -2.67944187e-01 -1.11941032e-01 1.46836573e-02 8.27552378e-02 1.62031472e-01] [-2.00715601e-01 2.67106354e-01 4.93065089e-01 1.33082926e-01 -2.31817409e-01 -1.26775548e-01 2.52995864e-02 8.98745060e-02 2.47854099e-01 1.02419890e-01 1.60313368e-01 -3.49179029e-01 -6.89011037e-01] [ 9.80395526e-02 6.71773195e-01 3.29415113e-01 1.20645560e-01 -1.70316353e-01 -2.03309000e-01 -3.57111752e-01 -4.44566399e-01 -2.90141553e-01 1.22548290e-01 3.40204358e-01 1.90330192e-01 -4.20092046e-01] [-1.16314068e-01 -8.58983770e-02 -3.94327641e-02 -9.05022770e-02 8.48527029e-02 -3.24648023e-01 -2.27263244e-03 -3.10524255e-01 -1.29514113e-01 -1.69543147e-01 -1.56706169e-01 -2.54658759e-01 -4.35200572e-01] [ 4.95196506e-02 -1.40015885e-01 6.69580579e-01 4.80430752e-01 3.81833285e-01 -4.54765677e-01 -2.63312221e-01 1.78271130e-01 5.57983935e-01 7.72023201e-01 3.54781449e-01 2.17563450e-01 2.08781585e-01] [-4.03629720e-01 -3.62799376e-01 3.70579660e-02 -1.31624147e-01 -1.46003082e-01 -3.06911826e-01 -2.37053022e-01 -1.86715096e-01 -2.44660437e-01 2.20863521e-03 -1.20524034e-01 -1.51666239e-01 1.07580662e-01]] [[ 1.01790853e-01 -2.46651754e-01 -4.42477375e-01 -7.00801432e-01 -3.84617835e-01 3.02870907e-02 4.48810160e-01 1.53271973e-01 -3.21571171e-01 -2.82902926e-01 1.89244503e-03 2.46897057e-01 4.66273894e-04] [-6.83190405e-01 -1.19057286e+00 -1.25715458e+00 -6.70177102e-01 -2.87077218e-01 8.57430547e-02 -9.29042250e-02 -2.12390348e-01 -3.61794233e-01 -5.80412447e-01 -5.84347010e-01 -7.08217621e-01 -4.30035770e-01] [-2.82254726e-01 -2.11130783e-01 -6.39305472e-01 -4.07769948e-01 -3.51820529e-01 1.39997173e-02 -5.05112350e-01 -6.92201376e-01 -6.66575909e-01 -4.26905155e-01 -1.73162758e-01 8.19517523e-02 2.93063223e-01] [-3.72515768e-01 -3.58629525e-01 -3.81233364e-01 -4.93447036e-01 -1.05348349e-01 -2.91663669e-02 -3.64252478e-02 6.08933493e-02 -2.35063042e-02 4.65690732e-01 1.68011695e-01 3.48376870e-01 4.97832708e-02] [-3.84203255e-01 -4.49324340e-01 -3.40101719e-01 -3.32847863e-01 7.95258384e-05 2.75003970e-01 5.06378822e-02 3.88548933e-02 1.55992627e-01 4.69189584e-01 9.26496163e-02 -2.83710122e-01 -3.18366796e-01] [-3.54031384e-01 -5.90777814e-01 -2.42992982e-01 -1.54624321e-02 2.06078500e-01 3.58264238e-01 1.89534873e-01 -2.45710909e-02 1.60493910e-01 -1.14959531e-01 -2.53004730e-01 -7.70167887e-01 -2.47879371e-01] [-5.18291771e-01 -5.21281421e-01 2.23584339e-01 2.45475471e-01 4.04178500e-01 -1.69708937e-01 -1.77645952e-01 -3.07771146e-01 -1.58924252e-01 -6.58017099e-02 -5.81494570e-02 1.38146341e-01 3.47313821e-01]] [[-3.61397594e-01 -4.75128531e-01 -6.50989413e-02 1.35604545e-01 6.98387548e-02 3.49459529e-01 -2.94693083e-01 -5.63067049e-02 -2.42063358e-01 2.15108655e-02 -3.46398443e-01 -3.46272588e-01 -1.57075748e-01] [-4.19327915e-01 -6.12276077e-01 -3.86933714e-01 -2.00941980e-01 -2.63366431e-01 -1.33328140e-01 -4.30525452e-01 -2.03112155e-01 -3.24320197e-01 -9.75565761e-02 -1.92130253e-01 -3.34213227e-01 2.39313334e-01] [-4.55939144e-01 -1.13954715e-01 2.96847033e-03 5.23781739e-02 -2.13315889e-01 -2.51014620e-01 -5.99679351e-01 -4.61467326e-01 -5.47074616e-01 -1.87247679e-01 -5.21085441e-01 -3.43746126e-01 -2.30905265e-01] [-9.02732238e-02 -2.88835466e-01 -1.51536450e-01 -4.27420557e-01 -1.42676458e-01 -4.39968169e-01 -3.11893106e-01 -4.19600695e-01 -3.25249135e-01 -1.87899962e-01 -4.60992396e-01 -3.59579295e-01 -5.52686691e-01] [ 9.02805030e-02 -5.23543097e-02 3.56826524e-04 -2.63262272e-01 1.21978641e-01 -2.94923425e-01 -6.46772087e-02 -3.90620559e-01 -2.45951235e-01 -2.06074417e-01 -2.10598916e-01 4.99271750e-02 4.04723501e-03] [-8.54177251e-02 -5.93840033e-02 -4.22199845e-01 -5.40063739e-01 -1.80736765e-01 -4.45908830e-02 2.46908024e-01 -4.01782215e-01 -8.05502713e-01 -9.38260913e-01 -5.87339580e-01 -3.39162737e-01 -1.40439011e-02] [-6.12246931e-01 -9.48391497e-01 -7.15149820e-01 -5.06362557e-01 -8.68257657e-02 -2.47748457e-02 4.43170607e-01 2.79209495e-01 1.54901564e-01 3.92892510e-02 -2.50295619e-04 -2.44766176e-01 -2.55234092e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool2d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5703.aten_avg_pool2d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 %4 : int[] = prim::Constant[value=[0, 0]]() %5 : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::avg_pool2d(%x, %5, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:64:0 return (%6) fw_re: [[[[ 3.27242017e-02 -7.12484270e-02 4.29043472e-01 -6.44314647e-01 4.85023767e-01 -2.67074227e-01 1.09615326e-02 -3.08501899e-01 1.13091993e+00 1.48219705e-01 -2.74905235e-01 3.22919697e-01 1.17412794e+00 -2.70090103e-02 -8.97130072e-02] [-7.45906949e-01 7.35601783e-01 8.36547613e-01 -1.22695088e+00 -9.94107306e-01 1.34102091e-01 3.85106027e-01 -1.04446959e+00 -4.10110205e-02 -5.08390427e-01 -5.33299148e-01 6.91275656e-01 1.77939177e-01 -6.47046328e-01 1.12570369e+00] [-6.34748697e-01 1.10180676e-01 1.55265808e+00 5.28022528e-01 -1.81036621e-01 1.49016276e-01 7.95259774e-01 1.24376988e+00 -2.23882869e-02 4.00751740e-01 3.55283767e-01 -2.86427021e-01 -1.06876671e+00 4.57062662e-01 5.68568349e-01] [ 2.66896844e+00 7.52799585e-02 1.75526071e+00 -6.83954060e-01 3.04602027e-01 -1.18424475e+00 -3.02577347e-01 4.42494154e-02 1.13838565e+00 2.20576346e-01 5.74745238e-01 2.57127255e-01 -5.74707866e-01 4.14337218e-02 1.91942707e-01] [-3.39665532e-01 1.06242824e+00 -7.24666655e-01 8.58724236e-01 -1.81526124e-01 -7.47392654e-01 6.20512247e-01 2.57786959e-02 -7.66661286e-01 3.63121867e-01 -3.09539080e-01 -2.55992562e-01 6.64039850e-01 2.49254704e+00 9.27635729e-02] [-6.42248690e-01 9.19777215e-01 1.19357681e+00 -1.37423468e+00 -6.13116622e-01 7.73230433e-01 1.80463761e-01 5.78065634e-01 9.27941442e-01 -1.54063746e-01 6.44510150e-01 9.83392447e-02 -3.10329020e-01 5.39585911e-02 1.72321677e-01] [-5.22273958e-01 3.25154155e-01 -4.87373978e-01 1.73197031e+00 7.58376956e-01 2.23008588e-01 -1.08397567e+00 -1.98994070e-01 5.96733212e-01 4.92533743e-01 -1.25229645e+00 -2.10983992e-01 -4.48321998e-01 -1.41053379e-01 -7.07778931e-01]] [[-6.92150950e-01 -2.77276725e-01 3.38916659e-01 6.08942270e-01 -1.36217505e-01 2.28342950e-01 1.60947025e-01 -8.92024159e-01 5.38651347e-02 3.21385026e-01 4.11425650e-01 1.01884142e-01 8.03210735e-01 -3.34842384e-01 -1.04926980e+00] [-6.27516329e-01 -9.30999160e-01 5.29434264e-01 6.88128114e-01 -2.38966241e-01 -4.00426447e-01 -2.78099656e-01 -2.13537693e+00 6.60869956e-01 -6.40862584e-01 -2.24469304e-02 -1.20769620e+00 -4.20829475e-01 -5.61638355e-01 8.37626457e-01] [ 1.05454135e+00 7.35104501e-01 -3.04105252e-01 -3.32426488e-01 5.64210892e-01 -4.35965657e-01 -2.50051886e-01 -6.03996098e-01 8.51827860e-01 -4.62632924e-01 -1.63158405e+00 3.01219642e-01 -1.05686390e+00 5.62418699e-01 -1.56451374e-01] [ 1.11575156e-01 7.95527220e-01 -5.60482800e-01 1.02625084e+00 2.41885960e-01 -1.72652006e-02 -2.30385616e-01 -3.25489342e-02 7.02068806e-02 -7.22456723e-02 3.97665560e-01 1.67908907e+00 -2.38118172e-02 8.31956089e-01 6.64288282e-01] [ 4.53483909e-01 -1.59654051e-01 1.04895882e-01 -3.08447897e-01 -4.29463685e-01 3.94188583e-01 -1.91390133e+00 -4.51978534e-01 2.81086594e-01 5.90957105e-01 -1.01626456e-01 -3.92490476e-01 -3.82839650e-01 -8.43390405e-01 4.75716889e-01] [ 2.34526038e-01 -1.24644554e+00 7.89038956e-01 -2.15283409e-02 3.39017689e-01 -5.23420036e-01 -5.84572375e-01 1.11696303e+00 3.07440311e-01 1.03749990e-01 -8.19783211e-02 4.39185202e-01 3.71143967e-01 -5.49043536e-01 1.43651199e+00] [-5.07774055e-01 -4.27547663e-01 -6.70656860e-02 6.90384507e-01 -3.08009386e-01 3.51590514e-01 -9.47144628e-02 -2.28045970e-01 -4.15393114e-01 -3.82892221e-01 4.16929424e-02 -1.40216887e+00 2.10624546e-01 -7.58334398e-02 -4.09630507e-01]] [[-4.80071664e-01 1.31027746e+00 -2.60202616e-01 3.64188075e-01 1.04419097e-01 1.48147523e+00 1.30371332e-01 9.30587649e-01 -2.72342533e-01 1.32234231e-01 -3.49062532e-01 -2.49510944e-01 -2.65503556e-01 -1.31269395e-02 7.47383952e-01] [ 1.11202908e+00 -7.46516049e-01 -1.37677670e-01 6.85616970e-01 -4.31256562e-01 -8.92088711e-02 -5.83594441e-02 -3.52573633e-01 -4.51696217e-01 -1.15758824e+00 -8.03425133e-01 4.88738060e-01 5.54880857e-01 6.85825467e-01 -9.36570764e-01] [-1.56256580e+00 -6.72872305e-01 9.38971698e-01 -2.64919102e-01 -4.38691020e-01 6.96025863e-02 -1.03141177e+00 1.34929478e-01 -3.86814922e-01 -1.63587999e+00 -4.28375006e-01 2.90477097e-01 -6.90852106e-01 6.39200330e-01 -4.39055264e-01] [-2.44317934e-01 -2.66914189e-01 7.98285007e-02 4.11328405e-01 1.36607754e+00 -1.74147868e+00 4.21413898e-01 8.91493559e-02 6.75938785e-01 8.48502278e-01 2.69159436e-01 8.20405662e-01 -1.18105292e+00 1.72342747e-01 -2.07235217e-02] [-2.59933293e-01 -3.07451546e-01 -6.68825507e-01 1.21428478e+00 -8.39508176e-01 -5.41196585e-01 -1.97755367e-01 -5.06128967e-01 4.73916531e-03 -2.49222398e-01 -8.57764006e-01 -4.09420043e-01 1.20820332e+00 -9.50119495e-01 7.32699513e-01] [-8.15775156e-01 -1.78453350e+00 6.02679402e-02 -1.05751741e+00 1.10479355e+00 8.85138214e-02 -6.69413507e-01 -1.76673412e-01 4.89669740e-02 -5.21802664e-01 -6.19561553e-01 2.25302011e-01 1.29044509e+00 -2.40633652e-01 -1.48731470e-03] [ 2.49409646e-01 3.33947241e-02 -1.64600492e-01 -9.16152596e-01 -6.93810582e-01 -7.80285239e-01 3.54957551e-01 1.72156262e+00 1.04185486e+00 -1.84459910e-01 1.73856542e-01 -6.39215112e-04 -1.57175809e-02 -2.00824320e-01 -1.05136096e-01]]]]; ov_res: [[[[ 3.27242017e-02 -7.12484270e-02 4.29043472e-01 -6.44314647e-01 4.85023767e-01 -2.67074227e-01 1.09615326e-02 -3.08501899e-01 1.13091993e+00 1.48219705e-01 -2.74905235e-01 3.22919697e-01 1.17412794e+00 -2.70090103e-02 -8.97130072e-02] [-7.45906949e-01 7.35601783e-01 8.36547613e-01 -1.22695088e+00 -9.94107306e-01 1.34102091e-01 3.85106027e-01 -1.04446959e+00 -4.10110205e-02 -5.08390427e-01 -5.33299148e-01 6.91275656e-01 1.77939177e-01 -6.47046328e-01 1.12570369e+00] [-6.34748697e-01 1.10180676e-01 1.55265808e+00 5.28022528e-01 -1.81036621e-01 1.49016276e-01 7.95259774e-01 1.24376988e+00 -2.23882869e-02 4.00751740e-01 3.55283767e-01 -2.86427021e-01 -1.06876671e+00 4.57062662e-01 5.68568349e-01] [ 2.66896844e+00 7.52799585e-02 1.75526071e+00 -6.83954060e-01 3.04602027e-01 -1.18424475e+00 -3.02577347e-01 4.42494154e-02 1.13838565e+00 2.20576346e-01 5.74745238e-01 2.57127255e-01 -5.74707866e-01 4.14337218e-02 1.91942707e-01] [-3.39665532e-01 1.06242824e+00 -7.24666655e-01 8.58724236e-01 -1.81526124e-01 -7.47392654e-01 6.20512247e-01 2.57786959e-02 -7.66661286e-01 3.63121867e-01 -3.09539080e-01 -2.55992562e-01 6.64039850e-01 2.49254704e+00 9.27635729e-02] [-6.42248690e-01 9.19777215e-01 1.19357681e+00 -1.37423468e+00 -6.13116622e-01 7.73230433e-01 1.80463761e-01 5.78065634e-01 9.27941442e-01 -1.54063746e-01 6.44510150e-01 9.83392447e-02 -3.10329020e-01 5.39585911e-02 1.72321677e-01] [-5.22273958e-01 3.25154155e-01 -4.87373978e-01 1.73197031e+00 7.58376956e-01 2.23008588e-01 -1.08397567e+00 -1.98994070e-01 5.96733212e-01 4.92533743e-01 -1.25229645e+00 -2.10983992e-01 -4.48321998e-01 -1.41053379e-01 -7.07778931e-01]] [[-6.92150950e-01 -2.77276725e-01 3.38916659e-01 6.08942270e-01 -1.36217505e-01 2.28342950e-01 1.60947025e-01 -8.92024159e-01 5.38651347e-02 3.21385026e-01 4.11425650e-01 1.01884142e-01 8.03210735e-01 -3.34842384e-01 -1.04926980e+00] [-6.27516329e-01 -9.30999160e-01 5.29434264e-01 6.88128114e-01 -2.38966241e-01 -4.00426447e-01 -2.78099656e-01 -2.13537693e+00 6.60869956e-01 -6.40862584e-01 -2.24469304e-02 -1.20769620e+00 -4.20829475e-01 -5.61638355e-01 8.37626457e-01] [ 1.05454135e+00 7.35104501e-01 -3.04105252e-01 -3.32426488e-01 5.64210892e-01 -4.35965657e-01 -2.50051886e-01 -6.03996098e-01 8.51827860e-01 -4.62632924e-01 -1.63158405e+00 3.01219642e-01 -1.05686390e+00 5.62418699e-01 -1.56451374e-01] [ 1.11575156e-01 7.95527220e-01 -5.60482800e-01 1.02625084e+00 2.41885960e-01 -1.72652006e-02 -2.30385616e-01 -3.25489342e-02 7.02068806e-02 -7.22456723e-02 3.97665560e-01 1.67908907e+00 -2.38118172e-02 8.31956089e-01 6.64288282e-01] [ 4.53483909e-01 -1.59654051e-01 1.04895882e-01 -3.08447897e-01 -4.29463685e-01 3.94188583e-01 -1.91390133e+00 -4.51978534e-01 2.81086594e-01 5.90957105e-01 -1.01626456e-01 -3.92490476e-01 -3.82839650e-01 -8.43390405e-01 4.75716889e-01] [ 2.34526038e-01 -1.24644554e+00 7.89038956e-01 -2.15283409e-02 3.39017689e-01 -5.23420036e-01 -5.84572375e-01 1.11696303e+00 3.07440311e-01 1.03749990e-01 -8.19783211e-02 4.39185202e-01 3.71143967e-01 -5.49043536e-01 1.43651199e+00] [-5.07774055e-01 -4.27547663e-01 -6.70656860e-02 6.90384507e-01 -3.08009386e-01 3.51590514e-01 -9.47144628e-02 -2.28045970e-01 -4.15393114e-01 -3.82892221e-01 4.16929424e-02 -1.40216887e+00 2.10624546e-01 -7.58334398e-02 -4.09630507e-01]] [[-4.80071664e-01 1.31027746e+00 -2.60202616e-01 3.64188075e-01 1.04419097e-01 1.48147523e+00 1.30371332e-01 9.30587649e-01 -2.72342533e-01 1.32234231e-01 -3.49062532e-01 -2.49510944e-01 -2.65503556e-01 -1.31269395e-02 7.47383952e-01] [ 1.11202908e+00 -7.46516049e-01 -1.37677670e-01 6.85616970e-01 -4.31256562e-01 -8.92088711e-02 -5.83594441e-02 -3.52573633e-01 -4.51696217e-01 -1.15758824e+00 -8.03425133e-01 4.88738060e-01 5.54880857e-01 6.85825467e-01 -9.36570764e-01] [-1.56256580e+00 -6.72872305e-01 9.38971698e-01 -2.64919102e-01 -4.38691020e-01 6.96025863e-02 -1.03141177e+00 1.34929478e-01 -3.86814922e-01 -1.63587999e+00 -4.28375006e-01 2.90477097e-01 -6.90852106e-01 6.39200330e-01 -4.39055264e-01] [-2.44317934e-01 -2.66914189e-01 7.98285007e-02 4.11328405e-01 1.36607754e+00 -1.74147868e+00 4.21413898e-01 8.91493559e-02 6.75938785e-01 8.48502278e-01 2.69159436e-01 8.20405662e-01 -1.18105292e+00 1.72342747e-01 -2.07235217e-02] [-2.59933293e-01 -3.07451546e-01 -6.68825507e-01 1.21428478e+00 -8.39508176e-01 -5.41196585e-01 -1.97755367e-01 -5.06128967e-01 4.73916531e-03 -2.49222398e-01 -8.57764006e-01 -4.09420043e-01 1.20820332e+00 -9.50119495e-01 7.32699513e-01] [-8.15775156e-01 -1.78453350e+00 6.02679402e-02 -1.05751741e+00 1.10479355e+00 8.85138214e-02 -6.69413507e-01 -1.76673412e-01 4.89669740e-02 -5.21802664e-01 -6.19561553e-01 2.25302011e-01 1.29044509e+00 -2.40633652e-01 -1.48731470e-03] [ 2.49409646e-01 3.33947241e-02 -1.64600492e-01 -9.16152596e-01 -6.93810582e-01 -7.80285239e-01 3.54957551e-01 1.72156262e+00 1.04185486e+00 -1.84459910e-01 1.73856542e-01 -6.39215112e-04 -1.57175809e-02 -2.00824320e-01 -1.05136096e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5705.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[3]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[ 0.03680425 0.8854421 0.64861655 0.5108185 0.38043776 0.1108684 -0.02822169 -0.06640533 0.25421086 0.50127226 0.2346292 -0.37895235 -0.6098512 ] [-0.2753764 0.02445112 -0.24337418 -0.42638874 -0.40154615 -0.27033985 -0.64620715 -1.23959 -0.73292464 -0.21980141 0.34714806 0.09567463 0.17245972] [ 0.11365592 0.50244963 0.44735858 -0.49593142 -0.93603086 -0.6541261 -0.36694393 -0.36528912 -0.36037198 0.45691717 0.5437682 0.46635982 0.27543926]]]; ov_res: [[[ 0.03680425 0.8854421 0.64861655 0.5108185 0.38043776 0.1108684 -0.02822169 -0.06640533 0.25421086 0.50127226 0.2346292 -0.37895235 -0.6098512 ] [-0.2753764 0.02445112 -0.24337418 -0.42638874 -0.40154615 -0.27033985 -0.64620715 -1.23959 -0.73292464 -0.21980141 0.34714806 0.09567463 0.17245972] [ 0.11365592 0.50244963 0.44735858 -0.49593142 -0.93603086 -0.6541261 -0.36694393 -0.36528912 -0.36037198 0.45691717 0.5437682 0.46635982 0.27543926]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5708.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[4]]() %5 : Tensor = aten::avg_pool1d(%x, %4, %3, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%5) fw_re: [[[-0.59141904 -0.36805743 -0.47629505 -0.16071409 0.3897589 0.20791137 0.82412374 0.6771488 0.46903932 0.14379534 -0.17464086 -0.45286864 -0.47579247 -0.19206259] [-0.24735463 -0.12117432 -0.24916433 -0.2337821 -0.14457 -0.17595112 0.01872054 0.25396585 0.6573811 1.0173204 0.7295027 0.6964989 0.474654 0.01991554] [-0.40993232 -0.45018482 -0.51896787 -0.12380365 -0.40311378 -0.25936827 -0.42385483 -0.4109043 0.11307304 0.30384165 0.5602249 0.31106606 0.2927295 -0.00153212]]]; ov_res: [[[-0.59141904 -0.36805743 -0.47629505 -0.16071409 0.3897589 0.20791137 0.82412374 0.6771488 0.46903932 0.14379534 -0.17464086 -0.45286864 -0.47579247 -0.19206259] [-0.24735463 -0.12117432 -0.24916433 -0.2337821 -0.14457 -0.17595112 0.01872054 0.25396585 0.6573811 1.0173204 0.7295027 0.6964989 0.474654 0.01991554] [-0.40993232 -0.45018482 -0.51896787 -0.12380365 -0.40311378 -0.25936827 -0.42385483 -0.4109043 0.11307304 0.30384165 0.5602249 0.31106606 0.2927295 -0.00153212]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5711.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[2]]() %4 : int[] = prim::Constant[value=[5]]() %5 : int[] = prim::Constant[value=[4]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[ 0.22926846 -0.5397531 -0.46869722 0.22911939] [ 0.11062194 -0.0088307 1.1831727 -0.6672124 ] [-0.08588301 0.64749324 0.38656706 -0.22882989]]]; ov_res: [[[ 0.22926846 -0.5397531 -0.46869722 0.22911939] [ 0.11062194 -0.0088307 1.1831727 -0.6672124 ] [-0.08588301 0.64749324 0.38656706 -0.22882989]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5714.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[0]]() %5 : int[] = prim::Constant[value=[1]]() %6 : int[] = prim::Constant[value=[3]]() %7 : Tensor = aten::avg_pool1d(%x, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%7) fw_re: [[[ 0.18938379 0.0465219 -0.24749051 -0.5344725 0.023368 -0.31910267 0.5490616 0.00997885 0.68649846 0.20981228 0.6836875 0.3416108 0.31668103] [-0.20389527 -0.10414129 -0.09809767 0.02629092 0.37630782 0.28658655 0.10676873 -0.7825315 -0.21377416 0.44820628 0.77121836 -0.012309 -0.27365407] [ 0.57902527 0.6589861 0.2290463 -0.3665526 0.06844473 0.4020541 0.615475 -0.00177156 0.19706702 0.19432229 0.35714063 -0.27303988 -0.83792186]]]; ov_res: [[[ 0.18938379 0.0465219 -0.24749051 -0.5344725 0.023368 -0.31910267 0.5490616 0.00997885 0.68649846 0.20981228 0.6836875 0.3416108 0.31668103] [-0.20389527 -0.10414129 -0.09809767 0.02629092 0.37630782 0.28658655 0.10676873 -0.7825315 -0.21377416 0.44820628 0.77121836 -0.012309 -0.27365407] [ 0.57902527 0.6589861 0.2290463 -0.3665526 0.06844473 0.4020541 0.615475 -0.00177156 0.19706702 0.19432229 0.35714063 -0.27303988 -0.83792186]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5717.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[4]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[ 0.22599876 0.3972981 -0.02703573 0.17401649 -0.11571877 0.03043792 -0.3968084 -0.41697398 -0.45328796 -0.8863881 -0.64392376 -0.97407186 -0.5789088 -0.46326473] [-0.42875183 -1.0764985 -0.89491844 -0.903976 -0.6473341 0.13831477 -0.50899905 -0.48975265 -0.9039401 -0.8482916 -0.10985404 -0.5046848 -0.44070607 -0.6342567 ] [ 0.08484098 -0.0905121 -0.5507635 -0.34825024 -0.18017866 -0.06626503 0.60496336 0.11345595 0.30982208 0.26153022 -0.1131344 0.2326533 0.06558309 0.17531441]]]; ov_res: [[[ 0.22599876 0.3972981 -0.02703573 0.17401649 -0.11571877 0.03043792 -0.3968084 -0.41697398 -0.45328796 -0.8863881 -0.64392376 -0.97407186 -0.5789088 -0.46326473] [-0.42875183 -1.0764985 -0.89491844 -0.903976 -0.6473341 0.13831477 -0.50899905 -0.48975265 -0.9039401 -0.8482916 -0.10985404 -0.5046848 -0.44070607 -0.6342567 ] [ 0.08484098 -0.0905121 -0.5507635 -0.34825024 -0.18017866 -0.06626503 0.60496336 0.11345595 0.30982208 0.26153022 -0.1131344 0.2326533 0.06558309 0.17531441]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5720.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[2]]() %5 : int[] = prim::Constant[value=[5]]() %6 : int[] = prim::Constant[value=[4]]() %7 : Tensor = aten::avg_pool1d(%x, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%7) fw_re: [[[-0.12741545 0.3356237 0.2935522 -0.3047358 ] [ 0.04709078 0.94414675 -0.2817908 0.20242056] [ 0.68914396 0.8396391 -0.2413154 -0.6345349 ]]]; ov_res: [[[-0.12741545 0.3356237 0.2935522 -0.3047358 ] [ 0.04709078 0.94414675 -0.2817908 0.20242056] [ 0.68914396 0.8396391 -0.2413154 -0.6345349 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5723.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[0]]() %5 : int[] = prim::Constant[value=[1]]() %6 : int[] = prim::Constant[value=[3]]() %7 : Tensor = aten::avg_pool1d(%x, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%7) fw_re: [[[ 0.6160147 0.5250072 0.47440663 0.7386287 0.49060678 0.1073968 -0.48493347 -0.50583607 -0.86369294 -0.86579967 -0.09707686 0.30184636 -0.27947006] [ 0.07399438 0.12221316 0.19266802 0.44583777 -0.0128557 -0.06845812 0.4344313 0.22802013 0.36567542 -0.6473417 -0.31828284 -0.57423 0.07046782] [-0.95155114 -0.4328117 -0.20767169 -0.18563795 -0.19275276 -0.06974748 0.7565488 -0.03927982 0.3962082 -0.20503114 -0.28286758 -0.24364083 0.30002236]]]; ov_res: [[[ 0.6160147 0.5250072 0.47440663 0.7386287 0.49060678 0.1073968 -0.48493347 -0.50583607 -0.86369294 -0.86579967 -0.09707686 0.30184636 -0.27947006] [ 0.07399438 0.12221316 0.19266802 0.44583777 -0.0128557 -0.06845812 0.4344313 0.22802013 0.36567542 -0.6473417 -0.31828284 -0.57423 0.07046782] [-0.95155114 -0.4328117 -0.20767169 -0.18563795 -0.19275276 -0.06974748 0.7565488 -0.03927982 0.3962082 -0.20503114 -0.28286758 -0.24364083 0.30002236]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5726.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[4]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[ 0.4538455 0.67221 1.2882116 1.2417623 0.10994709 -0.07501107 -0.500648 -0.9601667 0.03046814 0.18055156 0.5808136 0.9755321 1.1743312 1.1698403 ] [-0.29890352 -0.38294524 -0.23088534 -0.34461266 -0.39590362 0.3027392 0.11215252 -0.25616547 0.02929055 0.05402237 0.49292785 0.98059344 1.105222 0.7208198 ] [ 0.04837751 -0.10734168 -0.23018068 0.41478708 0.29447478 0.00349537 0.56229615 0.26467294 -0.0541821 0.70531183 0.49319318 0.49791497 1.1563015 1.108549 ]]]; ov_res: [[[ 0.4538455 0.67221 1.2882116 1.2417623 0.10994709 -0.07501107 -0.500648 -0.9601667 0.03046814 0.18055156 0.5808136 0.9755321 1.1743312 1.1698403 ] [-0.29890352 -0.38294524 -0.23088534 -0.34461266 -0.39590362 0.3027392 0.11215252 -0.25616547 0.02929055 0.05402237 0.49292785 0.98059344 1.105222 0.7208198 ] [ 0.04837751 -0.10734168 -0.23018068 0.41478708 0.29447478 0.00349537 0.56229615 0.26467294 -0.0541821 0.70531183 0.49319318 0.49791497 1.1563015 1.108549 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5729.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %4 : int[] = prim::Constant[value=[2]]() %5 : int[] = prim::Constant[value=[5]]() %6 : int[] = prim::Constant[value=[4]]() %7 : Tensor = aten::avg_pool1d(%x, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%7) fw_re: [[[ 0.7762734 0.00790447 0.8327994 -0.21957134] [-0.66124666 0.23136592 0.455202 -0.40017602] [-0.2770514 0.2999536 0.34450746 0.04588553]]]; ov_res: [[[ 0.7762734 0.00790447 0.8327994 -0.21957134] [-0.66124666 0.23136592 0.455202 -0.40017602] [-0.2770514 0.2999536 0.34450746 0.04588553]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5732.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[3]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[-1.347443 -0.6958242 0.0093509 0.38908756 0.84577614 0.3705671 0.4066449 -0.31270024 0.13110054 0.00432155 0.8213127 0.4777361 0.2358541 ] [ 0.5827343 0.963595 1.0604088 -0.05252584 -0.49409378 -1.247883 -0.18615603 -0.2879536 0.12230661 -0.01800175 -0.1871361 -0.00593305 -0.49707636] [-0.5732703 -0.3399875 0.35303617 0.49134842 0.7352888 0.08618867 -0.07413153 -0.5950623 -0.23986857 -0.13001163 0.1977626 -0.23176175 0.5134633 ]]]; ov_res: [[[-1.347443 -0.6958242 0.0093509 0.38908756 0.84577614 0.3705671 0.4066449 -0.31270024 0.13110054 0.00432155 0.8213127 0.4777361 0.2358541 ] [ 0.5827343 0.963595 1.0604088 -0.05252584 -0.49409378 -1.247883 -0.18615603 -0.2879536 0.12230661 -0.01800175 -0.1871361 -0.00593305 -0.49707636] [-0.5732703 -0.3399875 0.35303617 0.49134842 0.7352888 0.08618867 -0.07413153 -0.5950623 -0.23986857 -0.13001163 0.1977626 -0.23176175 0.5134633 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5735.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[4]]() %5 : Tensor = aten::avg_pool1d(%x, %4, %3, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%5) fw_re: [[[-0.25781044 -0.16624704 -0.19905585 -0.15278083 0.01541834 -0.09962916 0.41319013 0.54313326 0.5423941 0.6391616 0.44916794 0.7348618 0.7708247 1.0159918 ] [ 0.6841784 0.6853353 0.7122053 0.48841196 0.4948424 0.37902123 0.04233682 0.29359457 0.10178961 0.3624984 0.19911604 -0.11003163 0.58135515 0.35235485] [ 0.30885804 0.258222 0.03636964 -0.23828669 -0.6299853 -0.5224886 -0.657506 -0.6796979 -0.5392563 -0.33506614 -0.06938852 0.32262433 0.36213824 0.03183058]]]; ov_res: [[[-0.25781044 -0.16624704 -0.19905585 -0.15278083 0.01541834 -0.09962916 0.41319013 0.54313326 0.5423941 0.6391616 0.44916794 0.7348618 0.7708247 1.0159918 ] [ 0.6841784 0.6853353 0.7122053 0.48841196 0.4948424 0.37902123 0.04233682 0.29359457 0.10178961 0.3624984 0.19911604 -0.11003163 0.58135515 0.35235485] [ 0.30885804 0.258222 0.03636964 -0.23828669 -0.6299853 -0.5224886 -0.657506 -0.6796979 -0.5392563 -0.33506614 -0.06938852 0.32262433 0.36213824 0.03183058]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool1d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5738.aten_avg_pool1d, %x : Tensor): %2 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 %3 : int[] = prim::Constant[value=[2]]() %4 : int[] = prim::Constant[value=[5]]() %5 : int[] = prim::Constant[value=[4]]() %6 : Tensor = aten::avg_pool1d(%x, %5, %4, %3, %2, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:72:0 return (%6) fw_re: [[[ 0.6331216 0.41251224 -0.10989791 0.34783155] [ 0.81002533 -0.39495316 -0.39480913 -1.1112765 ] [ 0.4704291 0.16460764 -0.892701 -0.3259311 ]]]; ov_res: [[[ 0.6331216 0.41251224 -0.10989791 0.34783155] [ 0.81002533 -0.39495316 -0.39480913 -1.1112765 ] [ 0.4704291 0.16460764 -0.892701 -0.3259311 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5740.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[1, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[-1.04262985e-01 -3.74921039e-02 -2.92631034e-02 ... -6.12349957e-02 3.24577913e-02 -5.60778193e-02] [-3.93588729e-02 -8.05184171e-02 -1.29410207e-01 ... -5.58222383e-02 3.79393063e-02 -9.78087708e-02] [ 2.41489619e-01 4.47013825e-02 -9.14818943e-02 ... 3.65896560e-02 6.58737943e-02 9.22440588e-02] ... [-1.42795593e-01 -2.58020610e-01 -2.15370744e-01 ... -3.88618797e-01 -1.66178390e-01 -1.79028824e-01] [ 1.25019088e-01 4.70613427e-02 -2.88784062e-03 ... 4.03430387e-02 -4.03228737e-02 -1.17686994e-01] [ 1.29051223e-01 2.24717081e-01 -4.99809124e-02 ... 1.90940544e-01 9.49922651e-02 -2.10312575e-01]] [[ 7.69583210e-02 3.36872414e-02 5.63725159e-02 ... 7.01313093e-03 6.23980612e-02 -9.94186550e-02] [ 1.55844688e-01 2.40108874e-02 -1.83319161e-03 ... 1.54688969e-01 2.03198850e-01 -6.18023314e-02] [ 1.07595004e-01 -6.75322637e-02 2.58949865e-02 ... 2.60331035e-01 2.74433225e-01 7.77123347e-02] ... [ 9.75353345e-02 -2.12016311e-02 3.04753222e-02 ... -3.50804955e-01 -1.02954432e-01 1.77425861e-01] [ 2.95207262e-01 3.06088239e-01 1.98382035e-01 ... 1.01708882e-01 1.01174168e-01 1.53688923e-01] [ 1.80228174e-01 3.04007858e-01 1.29990190e-01 ... 2.08429053e-01 2.46180728e-01 -4.96425815e-02]] [[ 8.79434124e-02 -8.48370194e-02 -1.65156558e-01 ... 9.67245176e-02 1.21682972e-01 -1.17474973e-01] [ 5.06106317e-02 -9.68045294e-02 -2.68390745e-01 ... 1.33579090e-01 6.87741414e-02 -8.88515115e-02] [-6.02612756e-02 -3.73464376e-01 -4.93251741e-01 ... 1.69994012e-01 -7.21417973e-03 -8.31518546e-02] ... [ 1.56246319e-01 1.19165137e-01 -3.82541679e-02 ... -3.50440413e-01 -1.84713807e-02 3.11683536e-01] [ 3.52275312e-01 2.89008945e-01 1.42519558e-02 ... 3.00849658e-02 1.66502953e-01 3.35481972e-01] [-1.79123376e-02 6.98408112e-02 -2.16791078e-01 ... 5.77778295e-02 2.57526219e-01 1.13462798e-01]] ... [[ 1.60005987e-01 -1.68081880e-01 -2.87433028e-01 ... -3.92611742e-01 -1.96655497e-01 1.75586417e-01] [ 2.57809103e-01 -3.22560742e-02 -7.13601112e-02 ... -3.05676848e-01 -8.29504877e-02 1.33674920e-01] [ 8.05229545e-02 1.18909553e-02 -7.91538134e-03 ... -1.92033961e-01 1.00672483e-01 1.60116166e-01] ... [ 1.47238290e-02 6.80524409e-02 1.58607930e-01 ... 4.50981073e-02 8.89105871e-02 1.47594795e-01] [-2.86585446e-02 -5.63659668e-02 2.16033295e-01 ... 7.86777064e-02 1.97762311e-01 7.23193288e-02] [ 2.35282078e-01 3.66536289e-01 3.42396230e-01 ... 7.11959526e-02 1.37303531e-01 2.17021219e-02]] [[-1.73300177e-01 -3.69089901e-01 -4.89296854e-01 ... -3.20496023e-01 -1.16965570e-01 1.62224591e-01] [-1.69614017e-01 -2.96064913e-01 -3.89672875e-01 ... -1.09520331e-02 1.84465125e-01 2.51898348e-01] [ 2.65829451e-02 9.32956636e-02 -8.01255554e-02 ... -1.39559567e-01 2.06072524e-01 2.19696183e-02] ... [ 1.03963055e-01 1.29502565e-01 1.03046753e-01 ... 2.29555726e-01 3.29457432e-01 3.18353206e-01] [ 8.70012939e-02 1.43386737e-01 1.46212757e-01 ... 8.36771503e-02 1.23806566e-01 1.55137792e-01] [ 3.96166593e-01 5.10565817e-01 2.14541703e-01 ... -1.91861056e-02 1.39071990e-05 -2.01136507e-02]] [[ 4.07261811e-02 -1.40761361e-01 -4.31042999e-01 ... -4.14109915e-01 -1.28155679e-01 -9.42956097e-03] [ 1.27000839e-01 -8.86176005e-02 -3.16050351e-01 ... -7.71061927e-02 1.49390802e-01 1.17955640e-01] [ 1.90393612e-01 1.18778318e-01 -1.27210408e-01 ... -8.22492018e-02 2.49515131e-01 3.18055749e-02] ... [-7.28103938e-03 -1.51119739e-01 1.11049460e-02 ... 1.84045181e-01 3.75116318e-01 3.04341048e-01] [ 4.76294942e-02 -3.23959142e-02 1.07848234e-01 ... -1.68930978e-01 1.65900483e-03 -1.13343559e-01] [ 2.08846435e-01 2.51821846e-01 1.15312330e-01 ... -1.41414031e-01 -8.36217590e-03 -5.42233326e-02]]] [[[-2.07560360e-01 -1.23374127e-01 6.83245063e-02 ... 8.81112963e-02 2.87522655e-02 -2.84439892e-01] [-6.83641508e-02 1.22850522e-01 1.94018260e-01 ... 1.95922166e-01 1.44569218e-01 -8.32547992e-02] [-1.80729050e-02 1.49817377e-01 1.91495359e-01 ... 4.40251119e-02 -1.19804498e-02 -8.83317962e-02] ... [ 5.93158789e-02 2.67385036e-01 3.27204555e-01 ... 2.31727138e-01 -1.41734064e-01 -2.39358544e-01] [-7.23422915e-02 -4.04784409e-03 3.10896426e-01 ... 9.67485979e-02 -2.58965254e-01 -3.77453357e-01] [-2.81885892e-01 -3.11611444e-01 9.75564346e-02 ... 6.12826385e-02 -1.44478351e-01 -2.32856646e-01]] [[-2.84796990e-02 -1.71713337e-01 1.23306930e-01 ... 1.85365006e-02 -1.72715887e-01 -1.48082346e-01] [ 1.55265033e-01 5.10465205e-02 1.71947822e-01 ... 3.14797372e-01 -2.91869529e-02 3.87554592e-03] [ 4.17824745e-01 2.58745700e-01 2.80688345e-01 ... 1.28569677e-01 -7.53624886e-02 -2.95914579e-02] ... [-1.10272706e-01 1.44957364e-01 1.86844826e-01 ... 5.58161177e-02 -3.52935165e-01 -4.52605009e-01] [-3.82941961e-01 -1.88512877e-01 8.15679412e-03 ... -1.17284302e-02 -4.23723847e-01 -4.39581722e-01] [-3.68648410e-01 -3.84078532e-01 -9.77365449e-02 ... 1.06397219e-01 -1.25277877e-01 -6.35042712e-02]] [[ 4.43918794e-01 7.10339919e-02 9.37602818e-02 ... -2.78847292e-02 -1.61568746e-01 -1.55469969e-01] [ 4.00136352e-01 8.45795050e-02 -6.27294742e-03 ... 2.19116405e-01 -6.39965385e-02 -3.63638960e-02] [ 4.89051580e-01 3.66271555e-01 1.79604650e-01 ... 2.86205441e-01 8.22021589e-02 -1.56764627e-01] ... [ 2.73053516e-02 4.28697728e-02 -6.04694523e-02 ... 1.85170844e-01 -2.50093579e-01 -3.17576170e-01] [-3.57390195e-01 -4.73673880e-01 -2.86282450e-01 ... 2.18001958e-02 -2.53842473e-01 -2.38229483e-01] [-3.18362653e-01 -5.13772905e-01 -2.82278866e-01 ... 3.64973508e-02 -1.08089104e-01 -1.60864040e-01]] ... [[-2.52708316e-01 -2.46423513e-01 -3.84066850e-01 ... -1.21572800e-01 -1.12777166e-01 -1.36312366e-01] [-5.12877524e-01 -2.21898779e-01 -3.16387951e-01 ... -6.31039441e-02 -1.25330701e-01 -2.98802018e-01] [-3.59479934e-01 -4.15275805e-02 9.56642032e-02 ... 1.65924639e-01 9.78031680e-02 -1.01270296e-01] ... [-1.37765780e-01 -6.65135682e-02 1.16268650e-01 ... 2.90958006e-02 1.64748505e-01 2.79455572e-01] [ 2.34241933e-02 -8.28090385e-02 -8.02860111e-02 ... -3.06746252e-02 2.64939573e-02 1.65709287e-01] [ 1.50791705e-01 1.01910643e-01 -7.58534297e-02 ... 1.44453168e-01 1.00629665e-01 9.36946720e-02]] [[-1.95274696e-01 -1.06495492e-01 -2.02929378e-01 ... -1.67459458e-01 -1.34713039e-01 -2.14781642e-01] [-4.45394844e-01 -1.51395187e-01 -1.72785118e-01 ... -9.08416584e-02 -1.35594636e-01 -3.23229045e-01] [-4.82485354e-01 -1.20226227e-01 8.25126469e-02 ... 1.37412801e-01 -9.66317300e-03 -2.68840045e-01] ... [ 1.28288977e-02 3.20786312e-02 7.50056729e-02 ... -4.08098280e-01 -6.16944097e-02 1.53729364e-01] [ 1.70369148e-01 5.77405989e-02 7.88104460e-02 ... -1.16511114e-01 -1.29932329e-01 -8.40277970e-02] [ 2.20418870e-01 3.06842774e-01 1.25243366e-01 ... -9.06535909e-02 -1.44777283e-01 -6.59803301e-02]] [[-1.79790989e-01 -8.38917792e-02 9.26863682e-03 ... 1.26721993e-01 6.84176832e-02 8.12543482e-02] [-4.32596058e-01 -2.22815216e-01 1.23475315e-02 ... 2.56218761e-01 2.82733977e-01 1.82902172e-01] [-3.17234039e-01 -2.57106245e-01 1.34282157e-01 ... 2.44341642e-01 2.23548710e-01 1.06157929e-01] ... [ 4.06549014e-02 5.06885014e-02 1.70943737e-01 ... -2.99545348e-01 -6.10692427e-02 2.02263057e-01] [ 3.14707369e-01 7.83437714e-02 4.31019701e-02 ... -1.10648811e-01 -1.51235133e-01 4.99292649e-02] [ 3.52241278e-01 2.61139959e-01 -2.48510037e-02 ... -2.15692833e-01 -3.38460982e-01 -1.56900153e-01]]] [[[ 4.05936055e-02 -1.48611248e-01 -5.29056750e-02 ... 2.80592650e-01 3.88608515e-01 2.38599896e-01] [-7.44074062e-02 -1.49071559e-01 -5.72957918e-02 ... 1.19202010e-01 7.03587905e-02 5.83013855e-02] [-1.97470933e-01 -2.35524669e-01 3.91927473e-02 ... 7.78200179e-02 -1.38144717e-01 -3.52067709e-01] ... [ 2.73788452e-01 6.58700913e-02 1.16792470e-01 ... 2.68334419e-01 1.53777376e-01 3.56677115e-01] [ 1.41197266e-02 -3.12650055e-01 -1.60980478e-01 ... 3.54811661e-02 3.03930491e-02 2.09346369e-01] [-1.84850723e-01 -7.44529292e-02 5.89559376e-02 ... -6.99448660e-02 -2.74302233e-02 9.90186036e-02]] [[-1.62273943e-01 -1.92922890e-01 -4.23360206e-02 ... 2.17616484e-01 2.53376752e-01 2.38869116e-02] [-5.39900213e-02 -2.16900826e-01 -2.20394328e-01 ... 1.36890560e-01 9.01389271e-02 -2.19518449e-02] [-3.10535640e-01 -4.18316841e-01 -1.24780737e-01 ... 2.35853083e-02 -5.07825650e-02 -2.95844883e-01] ... [-4.57207896e-02 1.29872531e-01 2.58341312e-01 ... 2.21799359e-01 1.40453249e-01 4.20637876e-01] [-3.78371060e-01 -4.61561590e-01 -2.13930920e-01 ... -9.97720137e-02 -8.47512856e-02 1.47548437e-01] [-3.29226226e-01 -3.02702338e-01 4.26235758e-02 ... -1.51478067e-01 -5.32829240e-02 1.11794576e-01]] [[-4.79715578e-02 -8.91729444e-02 -3.20068412e-02 ... 2.20336258e-01 3.69062692e-01 1.14650495e-01] [ 7.26017058e-02 -1.64826140e-01 -2.66413718e-01 ... 2.30552584e-01 3.00871849e-01 2.36313701e-01] [-2.87471950e-01 -4.33987737e-01 -3.20033878e-01 ... 2.02213481e-01 2.08370402e-01 1.78064510e-01] ... [ 5.56000695e-02 3.13468784e-01 2.74305254e-01 ... -1.27283912e-02 1.41565025e-01 2.65272200e-01] [-1.71956852e-01 -1.58055723e-01 -1.31312504e-01 ... -3.13773826e-02 -4.92527299e-02 1.44033775e-01] [-2.07701236e-01 -1.16145954e-01 3.48103754e-02 ... -2.00433824e-02 6.72644824e-02 2.97353208e-01]] ... [[ 2.10983112e-01 1.46824956e-01 1.05480656e-01 ... -4.20975201e-02 8.94962400e-02 2.23306373e-01] [ 1.34653017e-01 1.44107491e-01 1.59701347e-01 ... 1.40202984e-01 2.63018787e-01 2.00208098e-01] [ 1.27089635e-01 1.59707040e-01 3.24460387e-01 ... 1.58921210e-03 4.34518158e-02 -1.55322641e-01] ... [ 3.55366498e-01 3.23146462e-01 7.91675076e-02 ... -9.44712982e-02 4.36254889e-02 1.19876772e-01] [ 1.92841023e-01 3.37322354e-01 1.54211834e-01 ... 3.28542367e-02 2.14555487e-01 1.60990894e-01] [-3.83374207e-02 2.43055031e-01 2.10379004e-01 ... 1.92610145e-01 4.36053872e-01 2.10202858e-01]] [[ 1.16442114e-01 -1.05357416e-01 -1.63629085e-01 ... 5.34940664e-05 2.69582540e-01 2.51775950e-01] [ 1.69228166e-01 -1.62723623e-02 -1.81996375e-01 ... -1.75672546e-01 9.06483829e-02 2.82514766e-02] [ 7.61026293e-02 -1.06797993e-01 1.45223243e-02 ... -2.17159092e-01 -1.51532426e-01 -9.72723812e-02] ... [ 5.04136784e-03 2.63744473e-01 5.50245196e-02 ... -8.03876892e-02 -5.81481401e-03 1.73807204e-01] [-9.39370506e-03 2.16811955e-01 1.90634094e-02 ... 5.40290261e-03 8.51285458e-02 9.41008404e-02] [-1.98356420e-01 9.43482369e-02 -7.27000535e-02 ... 2.81258337e-02 6.68602288e-02 -3.34521686e-03]] [[-5.37076704e-02 -2.55743355e-01 -2.78280914e-01 ... 2.24331394e-02 1.47892669e-01 7.28430077e-02] [-1.56974047e-02 -1.70193419e-01 -1.77041784e-01 ... -2.14671820e-01 -1.71972383e-02 -1.96093664e-01] [-5.53879049e-03 -3.25313598e-01 -1.46585539e-01 ... -1.31104097e-01 -1.52581498e-01 -1.14269905e-01] ... [-7.69901695e-03 1.57454520e-01 7.91773424e-02 ... -2.17773944e-01 -3.95288974e-01 -9.61026624e-02] [-1.06032178e-01 1.66523680e-01 9.01599899e-02 ... -1.68946069e-02 -1.40336633e-01 -9.73984897e-02] [-2.29122460e-01 1.82526127e-01 1.73927113e-01 ... 5.21362349e-02 -3.66650745e-02 -1.03648953e-01]]]]]; ov_res: [[[[[-1.04262985e-01 -3.74921039e-02 -2.92631034e-02 ... -6.12349957e-02 3.24577913e-02 -5.60778193e-02] [-3.93588729e-02 -8.05184171e-02 -1.29410207e-01 ... -5.58222383e-02 3.79393063e-02 -9.78087708e-02] [ 2.41489619e-01 4.47013825e-02 -9.14818943e-02 ... 3.65896560e-02 6.58737943e-02 9.22440588e-02] ... [-1.42795593e-01 -2.58020610e-01 -2.15370744e-01 ... -3.88618797e-01 -1.66178390e-01 -1.79028824e-01] [ 1.25019088e-01 4.70613427e-02 -2.88784062e-03 ... 4.03430387e-02 -4.03228737e-02 -1.17686994e-01] [ 1.29051223e-01 2.24717081e-01 -4.99809124e-02 ... 1.90940544e-01 9.49922651e-02 -2.10312575e-01]] [[ 7.69583210e-02 3.36872414e-02 5.63725159e-02 ... 7.01313093e-03 6.23980612e-02 -9.94186550e-02] [ 1.55844688e-01 2.40108874e-02 -1.83319161e-03 ... 1.54688969e-01 2.03198850e-01 -6.18023314e-02] [ 1.07595004e-01 -6.75322637e-02 2.58949865e-02 ... 2.60331035e-01 2.74433225e-01 7.77123347e-02] ... [ 9.75353345e-02 -2.12016311e-02 3.04753222e-02 ... -3.50804955e-01 -1.02954432e-01 1.77425861e-01] [ 2.95207262e-01 3.06088239e-01 1.98382035e-01 ... 1.01708882e-01 1.01174168e-01 1.53688923e-01] [ 1.80228174e-01 3.04007858e-01 1.29990190e-01 ... 2.08429053e-01 2.46180728e-01 -4.96425815e-02]] [[ 8.79434124e-02 -8.48370194e-02 -1.65156558e-01 ... 9.67245176e-02 1.21682972e-01 -1.17474973e-01] [ 5.06106317e-02 -9.68045294e-02 -2.68390745e-01 ... 1.33579090e-01 6.87741414e-02 -8.88515115e-02] [-6.02612756e-02 -3.73464376e-01 -4.93251741e-01 ... 1.69994012e-01 -7.21417973e-03 -8.31518546e-02] ... [ 1.56246319e-01 1.19165137e-01 -3.82541679e-02 ... -3.50440413e-01 -1.84713807e-02 3.11683536e-01] [ 3.52275312e-01 2.89008945e-01 1.42519558e-02 ... 3.00849658e-02 1.66502953e-01 3.35481972e-01] [-1.79123376e-02 6.98408112e-02 -2.16791078e-01 ... 5.77778295e-02 2.57526219e-01 1.13462798e-01]] ... [[ 1.60005987e-01 -1.68081880e-01 -2.87433028e-01 ... -3.92611742e-01 -1.96655497e-01 1.75586417e-01] [ 2.57809103e-01 -3.22560742e-02 -7.13601112e-02 ... -3.05676848e-01 -8.29504877e-02 1.33674920e-01] [ 8.05229545e-02 1.18909553e-02 -7.91538134e-03 ... -1.92033961e-01 1.00672483e-01 1.60116166e-01] ... [ 1.47238290e-02 6.80524409e-02 1.58607930e-01 ... 4.50981073e-02 8.89105871e-02 1.47594795e-01] [-2.86585446e-02 -5.63659668e-02 2.16033295e-01 ... 7.86777064e-02 1.97762311e-01 7.23193288e-02] [ 2.35282078e-01 3.66536289e-01 3.42396230e-01 ... 7.11959526e-02 1.37303531e-01 2.17021219e-02]] [[-1.73300177e-01 -3.69089901e-01 -4.89296854e-01 ... -3.20496023e-01 -1.16965570e-01 1.62224591e-01] [-1.69614017e-01 -2.96064913e-01 -3.89672875e-01 ... -1.09520331e-02 1.84465125e-01 2.51898348e-01] [ 2.65829451e-02 9.32956636e-02 -8.01255554e-02 ... -1.39559567e-01 2.06072524e-01 2.19696183e-02] ... [ 1.03963055e-01 1.29502565e-01 1.03046753e-01 ... 2.29555726e-01 3.29457432e-01 3.18353206e-01] [ 8.70012939e-02 1.43386737e-01 1.46212757e-01 ... 8.36771503e-02 1.23806566e-01 1.55137792e-01] [ 3.96166593e-01 5.10565817e-01 2.14541703e-01 ... -1.91861056e-02 1.39071990e-05 -2.01136507e-02]] [[ 4.07261811e-02 -1.40761361e-01 -4.31042999e-01 ... -4.14109915e-01 -1.28155679e-01 -9.42956097e-03] [ 1.27000839e-01 -8.86176005e-02 -3.16050351e-01 ... -7.71061927e-02 1.49390802e-01 1.17955640e-01] [ 1.90393612e-01 1.18778318e-01 -1.27210408e-01 ... -8.22492018e-02 2.49515131e-01 3.18055749e-02] ... [-7.28103938e-03 -1.51119739e-01 1.11049460e-02 ... 1.84045181e-01 3.75116318e-01 3.04341048e-01] [ 4.76294942e-02 -3.23959142e-02 1.07848234e-01 ... -1.68930978e-01 1.65900483e-03 -1.13343559e-01] [ 2.08846435e-01 2.51821846e-01 1.15312330e-01 ... -1.41414031e-01 -8.36217590e-03 -5.42233326e-02]]] [[[-2.07560360e-01 -1.23374127e-01 6.83245063e-02 ... 8.81112963e-02 2.87522655e-02 -2.84439892e-01] [-6.83641508e-02 1.22850522e-01 1.94018260e-01 ... 1.95922166e-01 1.44569218e-01 -8.32547992e-02] [-1.80729050e-02 1.49817377e-01 1.91495359e-01 ... 4.40251119e-02 -1.19804498e-02 -8.83317962e-02] ... [ 5.93158789e-02 2.67385036e-01 3.27204555e-01 ... 2.31727138e-01 -1.41734064e-01 -2.39358544e-01] [-7.23422915e-02 -4.04784409e-03 3.10896426e-01 ... 9.67485979e-02 -2.58965254e-01 -3.77453357e-01] [-2.81885892e-01 -3.11611444e-01 9.75564346e-02 ... 6.12826385e-02 -1.44478351e-01 -2.32856646e-01]] [[-2.84796990e-02 -1.71713337e-01 1.23306930e-01 ... 1.85365006e-02 -1.72715887e-01 -1.48082346e-01] [ 1.55265033e-01 5.10465205e-02 1.71947822e-01 ... 3.14797372e-01 -2.91869529e-02 3.87554592e-03] [ 4.17824745e-01 2.58745700e-01 2.80688345e-01 ... 1.28569677e-01 -7.53624886e-02 -2.95914579e-02] ... [-1.10272706e-01 1.44957364e-01 1.86844826e-01 ... 5.58161177e-02 -3.52935165e-01 -4.52605009e-01] [-3.82941961e-01 -1.88512877e-01 8.15679412e-03 ... -1.17284302e-02 -4.23723847e-01 -4.39581722e-01] [-3.68648410e-01 -3.84078532e-01 -9.77365449e-02 ... 1.06397219e-01 -1.25277877e-01 -6.35042712e-02]] [[ 4.43918794e-01 7.10339919e-02 9.37602818e-02 ... -2.78847292e-02 -1.61568746e-01 -1.55469969e-01] [ 4.00136352e-01 8.45795050e-02 -6.27294742e-03 ... 2.19116405e-01 -6.39965385e-02 -3.63638960e-02] [ 4.89051580e-01 3.66271555e-01 1.79604650e-01 ... 2.86205441e-01 8.22021589e-02 -1.56764627e-01] ... [ 2.73053516e-02 4.28697728e-02 -6.04694523e-02 ... 1.85170844e-01 -2.50093579e-01 -3.17576170e-01] [-3.57390195e-01 -4.73673880e-01 -2.86282450e-01 ... 2.18001958e-02 -2.53842473e-01 -2.38229483e-01] [-3.18362653e-01 -5.13772905e-01 -2.82278866e-01 ... 3.64973508e-02 -1.08089104e-01 -1.60864040e-01]] ... [[-2.52708316e-01 -2.46423513e-01 -3.84066850e-01 ... -1.21572800e-01 -1.12777166e-01 -1.36312366e-01] [-5.12877524e-01 -2.21898779e-01 -3.16387951e-01 ... -6.31039441e-02 -1.25330701e-01 -2.98802018e-01] [-3.59479934e-01 -4.15275805e-02 9.56642032e-02 ... 1.65924639e-01 9.78031680e-02 -1.01270296e-01] ... [-1.37765780e-01 -6.65135682e-02 1.16268650e-01 ... 2.90958006e-02 1.64748505e-01 2.79455572e-01] [ 2.34241933e-02 -8.28090385e-02 -8.02860111e-02 ... -3.06746252e-02 2.64939573e-02 1.65709287e-01] [ 1.50791705e-01 1.01910643e-01 -7.58534297e-02 ... 1.44453168e-01 1.00629665e-01 9.36946720e-02]] [[-1.95274696e-01 -1.06495492e-01 -2.02929378e-01 ... -1.67459458e-01 -1.34713039e-01 -2.14781642e-01] [-4.45394844e-01 -1.51395187e-01 -1.72785118e-01 ... -9.08416584e-02 -1.35594636e-01 -3.23229045e-01] [-4.82485354e-01 -1.20226227e-01 8.25126469e-02 ... 1.37412801e-01 -9.66317300e-03 -2.68840045e-01] ... [ 1.28288977e-02 3.20786312e-02 7.50056729e-02 ... -4.08098280e-01 -6.16944097e-02 1.53729364e-01] [ 1.70369148e-01 5.77405989e-02 7.88104460e-02 ... -1.16511114e-01 -1.29932329e-01 -8.40277970e-02] [ 2.20418870e-01 3.06842774e-01 1.25243366e-01 ... -9.06535909e-02 -1.44777283e-01 -6.59803301e-02]] [[-1.79790989e-01 -8.38917792e-02 9.26863682e-03 ... 1.26721993e-01 6.84176832e-02 8.12543482e-02] [-4.32596058e-01 -2.22815216e-01 1.23475315e-02 ... 2.56218761e-01 2.82733977e-01 1.82902172e-01] [-3.17234039e-01 -2.57106245e-01 1.34282157e-01 ... 2.44341642e-01 2.23548710e-01 1.06157929e-01] ... [ 4.06549014e-02 5.06885014e-02 1.70943737e-01 ... -2.99545348e-01 -6.10692427e-02 2.02263057e-01] [ 3.14707369e-01 7.83437714e-02 4.31019701e-02 ... -1.10648811e-01 -1.51235133e-01 4.99292649e-02] [ 3.52241278e-01 2.61139959e-01 -2.48510037e-02 ... -2.15692833e-01 -3.38460982e-01 -1.56900153e-01]]] [[[ 4.05936055e-02 -1.48611248e-01 -5.29056750e-02 ... 2.80592650e-01 3.88608515e-01 2.38599896e-01] [-7.44074062e-02 -1.49071559e-01 -5.72957918e-02 ... 1.19202010e-01 7.03587905e-02 5.83013855e-02] [-1.97470933e-01 -2.35524669e-01 3.91927473e-02 ... 7.78200179e-02 -1.38144717e-01 -3.52067709e-01] ... [ 2.73788452e-01 6.58700913e-02 1.16792470e-01 ... 2.68334419e-01 1.53777376e-01 3.56677115e-01] [ 1.41197266e-02 -3.12650055e-01 -1.60980478e-01 ... 3.54811661e-02 3.03930491e-02 2.09346369e-01] [-1.84850723e-01 -7.44529292e-02 5.89559376e-02 ... -6.99448660e-02 -2.74302233e-02 9.90186036e-02]] [[-1.62273943e-01 -1.92922890e-01 -4.23360206e-02 ... 2.17616484e-01 2.53376752e-01 2.38869116e-02] [-5.39900213e-02 -2.16900826e-01 -2.20394328e-01 ... 1.36890560e-01 9.01389271e-02 -2.19518449e-02] [-3.10535640e-01 -4.18316841e-01 -1.24780737e-01 ... 2.35853083e-02 -5.07825650e-02 -2.95844883e-01] ... [-4.57207896e-02 1.29872531e-01 2.58341312e-01 ... 2.21799359e-01 1.40453249e-01 4.20637876e-01] [-3.78371060e-01 -4.61561590e-01 -2.13930920e-01 ... -9.97720137e-02 -8.47512856e-02 1.47548437e-01] [-3.29226226e-01 -3.02702338e-01 4.26235758e-02 ... -1.51478067e-01 -5.32829240e-02 1.11794576e-01]] [[-4.79715578e-02 -8.91729444e-02 -3.20068412e-02 ... 2.20336258e-01 3.69062692e-01 1.14650495e-01] [ 7.26017058e-02 -1.64826140e-01 -2.66413718e-01 ... 2.30552584e-01 3.00871849e-01 2.36313701e-01] [-2.87471950e-01 -4.33987737e-01 -3.20033878e-01 ... 2.02213481e-01 2.08370402e-01 1.78064510e-01] ... [ 5.56000695e-02 3.13468784e-01 2.74305254e-01 ... -1.27283912e-02 1.41565025e-01 2.65272200e-01] [-1.71956852e-01 -1.58055723e-01 -1.31312504e-01 ... -3.13773826e-02 -4.92527299e-02 1.44033775e-01] [-2.07701236e-01 -1.16145954e-01 3.48103754e-02 ... -2.00433824e-02 6.72644824e-02 2.97353208e-01]] ... [[ 2.10983112e-01 1.46824956e-01 1.05480656e-01 ... -4.20975201e-02 8.94962400e-02 2.23306373e-01] [ 1.34653017e-01 1.44107491e-01 1.59701347e-01 ... 1.40202984e-01 2.63018787e-01 2.00208098e-01] [ 1.27089635e-01 1.59707040e-01 3.24460387e-01 ... 1.58921210e-03 4.34518158e-02 -1.55322641e-01] ... [ 3.55366498e-01 3.23146462e-01 7.91675076e-02 ... -9.44712982e-02 4.36254889e-02 1.19876772e-01] [ 1.92841023e-01 3.37322354e-01 1.54211834e-01 ... 3.28542367e-02 2.14555487e-01 1.60990894e-01] [-3.83374207e-02 2.43055031e-01 2.10379004e-01 ... 1.92610145e-01 4.36053872e-01 2.10202858e-01]] [[ 1.16442114e-01 -1.05357416e-01 -1.63629085e-01 ... 5.34940664e-05 2.69582540e-01 2.51775950e-01] [ 1.69228166e-01 -1.62723623e-02 -1.81996375e-01 ... -1.75672546e-01 9.06483829e-02 2.82514766e-02] [ 7.61026293e-02 -1.06797993e-01 1.45223243e-02 ... -2.17159092e-01 -1.51532426e-01 -9.72723812e-02] ... [ 5.04136784e-03 2.63744473e-01 5.50245196e-02 ... -8.03876892e-02 -5.81481401e-03 1.73807204e-01] [-9.39370506e-03 2.16811955e-01 1.90634094e-02 ... 5.40290261e-03 8.51285458e-02 9.41008404e-02] [-1.98356420e-01 9.43482369e-02 -7.27000535e-02 ... 2.81258337e-02 6.68602288e-02 -3.34521686e-03]] [[-5.37076704e-02 -2.55743355e-01 -2.78280914e-01 ... 2.24331394e-02 1.47892669e-01 7.28430077e-02] [-1.56974047e-02 -1.70193419e-01 -1.77041784e-01 ... -2.14671820e-01 -1.71972383e-02 -1.96093664e-01] [-5.53879049e-03 -3.25313598e-01 -1.46585539e-01 ... -1.31104097e-01 -1.52581498e-01 -1.14269905e-01] ... [-7.69901695e-03 1.57454520e-01 7.91773424e-02 ... -2.17773944e-01 -3.95288974e-01 -9.61026624e-02] [-1.06032178e-01 1.66523680e-01 9.01599899e-02 ... -1.68946069e-02 -1.40336633e-01 -9.73984897e-02] [-2.29122460e-01 1.82526127e-01 1.73927113e-01 ... 5.21362349e-02 -3.66650745e-02 -1.03648953e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5743.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[1, 1, 1]]() %5 : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::avg_pool3d(%x, %5, %4, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%6) fw_re: [[[[[-9.57864821e-02 -9.96742696e-02 1.05411425e-01 ... 5.67504466e-02 -1.43983155e-01 -1.24784276e-01] [-1.60156339e-01 -1.23632811e-01 1.31636247e-01 ... 5.10631949e-02 -4.61570360e-02 -2.87296195e-02] [-4.44068275e-02 1.98403914e-02 1.22294992e-01 ... 1.60479173e-01 2.03539401e-01 2.15970874e-01] ... [ 1.22868396e-01 1.19172908e-01 1.52246598e-02 ... -9.20512974e-02 -2.82500714e-01 -2.60102570e-01] [-3.52061838e-02 -6.87555363e-03 5.45891374e-02 ... -5.99631406e-02 -2.31879920e-01 -1.20543562e-01] [-1.56027019e-01 -1.75629586e-01 -4.82051708e-02 ... 6.13045879e-02 -1.78358734e-01 -1.06403485e-01]] [[-1.97559252e-01 -1.76243871e-01 5.90441264e-02 ... -1.13067804e-02 -1.08849525e-01 -1.29170209e-01] [-2.35154986e-01 -1.86835259e-01 1.62564829e-01 ... 1.99449882e-02 1.61976125e-02 9.77009721e-03] [ 2.27880962e-02 7.60819018e-02 1.65826738e-01 ... 9.79516953e-02 8.26110989e-02 1.49764329e-01] ... [ 1.16636381e-01 1.79266900e-01 2.49005891e-02 ... -2.56145149e-01 -4.49964970e-01 -4.29439396e-01] [ 9.07046441e-03 1.22589238e-01 2.02869639e-01 ... -1.85772002e-01 -3.31016749e-01 -2.46629432e-01] [-1.32751405e-01 -1.06335729e-01 1.60088465e-02 ... -9.14135873e-02 -2.67482162e-01 -1.78466320e-01]] [[-1.92767866e-02 -8.63409713e-02 -1.55992284e-01 ... -5.64947762e-02 -2.90306360e-02 -2.21146476e-02] [ 5.40277772e-02 2.94575244e-02 7.09488541e-02 ... 3.25068794e-02 1.58286411e-02 5.26026078e-02] [ 3.29203993e-01 2.88301349e-01 2.02577710e-01 ... -4.95552793e-02 -2.21710075e-02 -2.70273332e-02] ... [-9.65142250e-03 1.27165496e-01 5.72433807e-02 ... -2.82214403e-01 -3.95964235e-01 -3.68855208e-01] [-1.18593849e-01 6.13846704e-02 1.52945921e-01 ... -9.30157974e-02 -2.68520713e-01 -1.56548679e-01] [-1.91301897e-01 -5.27319014e-02 1.11771345e-01 ... -1.00633249e-01 -1.78705886e-01 -8.36139992e-02]] ... [[-1.21827208e-01 -8.90043229e-02 6.35862947e-02 ... 4.53067347e-02 1.07273787e-01 7.48785660e-02] [-9.99434441e-02 -8.88942406e-02 1.48071140e-01 ... -1.68395992e-02 -7.71039575e-02 -6.45054281e-02] [-6.89224377e-02 -4.04727198e-02 1.18236504e-01 ... 4.81451452e-02 2.72267498e-03 -3.21313087e-03] ... [ 1.10719120e-02 3.15820053e-02 3.92950922e-02 ... 5.65396585e-02 -9.93777625e-03 4.92737256e-02] [-1.78785995e-02 1.17233330e-02 6.16371632e-02 ... -3.08054388e-01 -2.57212222e-01 -1.20687969e-01] [ 1.45645142e-01 6.30884841e-02 1.58784047e-01 ... -2.41923645e-01 -2.85065055e-01 -1.37483492e-01]] [[-9.07330960e-02 -9.69122127e-02 5.04845940e-02 ... -4.25404729e-03 -3.79472645e-03 -4.46321676e-03] [ 1.93412285e-02 7.09229708e-02 1.97694585e-01 ... 9.33795422e-02 -1.28713250e-01 -1.36302114e-01] [ 1.72896802e-01 3.32923889e-01 3.42349857e-01 ... 6.40419424e-02 -2.06376463e-01 -2.11852282e-01] ... [ 1.27342433e-01 1.78081930e-01 1.50196359e-01 ... 1.39682174e-01 -8.50029364e-02 -1.61503535e-02] [-5.48412018e-02 -1.65180385e-01 -3.15654725e-02 ... -3.18330050e-01 -3.13475817e-01 -8.99788216e-02] [ 2.74035893e-02 -5.98754808e-02 3.87811363e-02 ... -2.36310855e-01 -2.95178354e-01 -1.53454319e-01]] [[-2.10399888e-02 -1.35573119e-01 -8.92718434e-02 ... 3.73842381e-02 -8.28424841e-02 -5.58037683e-02] [ 8.93882886e-02 5.31377643e-02 7.34086782e-02 ... 1.41049311e-01 -1.57113388e-01 -1.53518707e-01] [ 1.79087237e-01 2.52858281e-01 2.43732572e-01 ... 1.65040463e-01 -1.74515590e-01 -2.03948021e-01] ... [ 9.02751535e-02 1.96865469e-01 2.32975423e-01 ... -1.38400421e-02 -1.24437898e-01 -8.06875527e-02] [ 1.25800679e-03 -5.66259213e-02 3.81833054e-02 ... -2.88541526e-01 -3.07422578e-01 -7.58267865e-02] [-2.08994281e-02 -2.42675859e-02 7.32419118e-02 ... -2.09069073e-01 -2.42255464e-01 -1.04464248e-01]]] [[[ 1.11445718e-01 5.22657074e-02 -1.07057756e-02 ... -1.09126382e-01 -9.49386321e-03 1.07100848e-02] [ 1.05441317e-01 1.58096716e-01 5.76727353e-02 ... -1.94383606e-01 3.50984000e-02 -3.77934612e-03] [-6.70622438e-02 3.51278437e-03 2.82063968e-02 ... -3.09963197e-01 -6.11439981e-02 -1.59158424e-01] ... [-3.65812257e-02 -1.13775954e-01 1.10679045e-02 ... -1.50996312e-01 -2.10267410e-01 -1.39665917e-01] [-8.93644765e-02 -1.74429342e-01 4.47309576e-02 ... 2.37586182e-02 4.90357392e-02 3.62766460e-02] [-9.52899009e-02 -1.68055639e-01 -1.44812623e-02 ... 1.13071263e-01 1.73099935e-01 9.14321467e-02]] [[ 1.84836000e-01 5.59275933e-02 -1.21210348e-02 ... -7.66963065e-02 7.23732933e-02 1.11768231e-01] [ 2.82292545e-01 2.94070214e-01 1.91638753e-01 ... -1.71510622e-01 9.47885439e-02 1.16679436e-02] [ 5.79913668e-02 9.97414291e-02 1.09569088e-01 ... -1.43110260e-01 -3.73169221e-03 -2.09004417e-01] ... [-1.21179074e-01 -2.30283841e-01 -9.84908268e-02 ... -2.72609293e-01 -2.60209709e-01 -2.11148843e-01] [-8.26101974e-02 -1.87304050e-01 -1.36328146e-01 ... -2.22729612e-02 6.27493486e-02 -1.21162375e-02] [-4.50068489e-02 -1.10424995e-01 -9.29917246e-02 ... 1.31302312e-01 2.41874531e-01 7.30694532e-02]] [[ 1.48447528e-01 1.42240331e-01 3.95069346e-02 ... 7.90722370e-02 1.75018281e-01 1.59099221e-01] [ 2.93425947e-01 3.54881227e-01 2.38487691e-01 ... -2.47795274e-03 2.12573364e-01 7.91436210e-02] [ 1.42105788e-01 2.51424879e-01 2.48006359e-01 ... 1.24747328e-01 1.67546287e-01 -3.30907628e-02] ... [-1.96047276e-01 -2.77192563e-01 -1.33772686e-01 ... -2.93994695e-01 -9.86845046e-02 2.49248958e-04] [-3.58865894e-02 -6.34681061e-02 -3.77370976e-02 ... -2.28975639e-01 -5.14823608e-02 -9.48338434e-02] [ 4.65092212e-02 3.92552838e-02 6.65978575e-03 ... -3.90828587e-02 7.30687529e-02 -6.23259358e-02]] ... [[ 1.11459255e-01 1.15485944e-01 -6.08393475e-02 ... 1.20746821e-01 2.32876241e-01 2.47084484e-01] [ 1.64053708e-01 8.81161988e-02 -1.21603951e-01 ... 1.71097651e-01 1.96084410e-01 1.76148281e-01] [ 1.86089292e-01 1.32662803e-01 -2.28706151e-01 ... 1.26833856e-01 1.26216426e-01 5.80618605e-02] ... [-3.95507097e-01 -4.75449800e-01 -1.36560798e-01 ... -4.58903879e-01 -1.08619563e-01 -8.05956349e-02] [-1.53908521e-01 -2.06581309e-01 -9.69450846e-02 ... -8.91963392e-02 1.20166475e-02 -9.97187421e-02] [ 1.22727253e-01 1.32158399e-01 3.29192765e-02 ... -2.41706409e-02 6.53886423e-02 2.28480604e-02]] [[ 2.49417350e-02 7.55149350e-02 4.85104062e-02 ... 1.13412462e-01 2.21730158e-01 6.17252141e-02] [ 6.04337938e-02 2.79350318e-02 -2.47882809e-02 ... 2.16050655e-01 1.68252662e-01 1.62967932e-04] [ 6.23129308e-02 6.16579503e-02 -2.76036024e-01 ... 2.08495736e-01 1.55715957e-01 -1.11575788e-02] ... [-3.65237385e-01 -4.29090232e-01 -1.25744209e-01 ... -2.75985420e-01 -2.40891486e-01 -1.71076775e-01] [-2.20087364e-01 -1.29099429e-01 4.87595722e-02 ... -1.10840209e-01 -1.34530157e-01 -1.36824936e-01] [ 2.72967294e-02 1.62230000e-01 2.10897282e-01 ... 9.73483082e-03 1.70868374e-02 -1.34083210e-02]] [[-9.13929194e-03 7.49967024e-02 7.31830150e-02 ... 1.39412746e-01 8.54657367e-02 -5.81470951e-02] [ 1.10212788e-02 5.09827994e-02 4.89451550e-02 ... 2.51049578e-01 4.44318727e-02 -1.06347285e-01] [ 1.08365521e-01 1.63524687e-01 -1.72734703e-03 ... 2.24764153e-01 9.98175219e-02 -6.90482780e-02] ... [-1.83419019e-01 -1.51684031e-01 2.93705631e-02 ... -7.11760893e-02 -1.36582151e-01 -9.99605656e-02] [-4.11771573e-02 1.34842083e-01 2.03271180e-01 ... -9.39365402e-02 -1.51288792e-01 -5.49607240e-02] [ 5.27773425e-02 2.37111762e-01 2.84331262e-01 ... -3.60103399e-02 -3.45420092e-02 -8.69573001e-03]]] [[[ 8.74194056e-02 1.46531239e-01 2.27250397e-01 ... 3.10745627e-01 2.95799404e-01 4.17131148e-02] [ 1.17937177e-01 1.56018615e-01 1.34306550e-01 ... 3.82011443e-01 3.55672479e-01 8.78327191e-02] [-2.20630900e-03 -1.29673883e-01 -2.40532339e-01 ... 1.29595250e-01 2.55783081e-01 1.48144424e-01] ... [-1.02846332e-01 -1.18142605e-01 -2.69781232e-01 ... 9.71376970e-02 -1.22239269e-01 -7.97973275e-02] [-1.68164998e-01 -2.46392384e-01 -2.77105540e-01 ... 4.17466797e-02 -1.48256168e-01 -1.12627424e-01] [-2.04017803e-01 -2.52308607e-01 -2.23281205e-01 ... 1.36061370e-01 2.08876748e-03 -1.32957185e-02]] [[ 2.69954473e-01 2.92281181e-01 2.31355637e-01 ... 2.71306723e-01 3.16454768e-01 9.08652395e-02] [ 3.59556824e-01 3.71548027e-01 2.53474653e-01 ... 3.49154592e-01 2.62520254e-01 6.33934662e-02] [ 1.80879951e-01 4.67202403e-02 -1.45722836e-01 ... 1.08764708e-01 1.60271361e-01 7.03480542e-02] ... [ 2.79274341e-02 -1.56083051e-02 -1.14045374e-01 ... 1.94844633e-01 -7.29453489e-02 -3.84608246e-02] [-2.18854123e-03 -9.24712345e-02 -1.16280407e-01 ... 4.55260240e-02 -1.76853493e-01 -9.82311070e-02] [-1.19627036e-01 -1.85191453e-01 -1.14695966e-01 ... 9.74771380e-02 -1.89337879e-02 -1.13770058e-02]] [[ 2.50600845e-01 3.09840351e-01 2.35988125e-01 ... 1.64733291e-01 2.75823891e-01 1.03315622e-01] [ 3.28527123e-01 3.34552944e-01 1.91112041e-01 ... 2.63814032e-01 2.27963194e-01 1.93634883e-01] [ 2.48118103e-01 2.09941596e-01 -3.54239228e-03 ... 2.00148746e-01 3.08337659e-01 2.36369252e-01] ... [ 3.39381039e-01 2.48587474e-01 1.76961228e-01 ... 1.00207299e-01 -1.94403064e-02 -6.15376001e-03] [ 1.76055700e-01 2.00500697e-01 1.83070123e-01 ... -7.59852352e-03 -2.50458002e-01 -1.72113523e-01] [ 1.11250125e-01 1.41354397e-01 2.10994169e-01 ... 3.92352082e-02 -5.57966530e-02 -6.57273382e-02]] ... [[ 7.11702108e-02 6.33534938e-02 -5.96613735e-02 ... 1.22225722e-02 1.00100555e-01 1.89702570e-01] [ 2.39228532e-02 1.39762327e-01 6.34995699e-02 ... -8.00784528e-02 1.60781503e-01 2.29277208e-01] [-7.08049536e-02 6.20449111e-02 9.66654345e-02 ... -4.95609455e-02 1.18128240e-01 1.66798502e-01] ... [ 2.34743208e-02 -1.77371465e-02 3.22478600e-02 ... -2.24129900e-01 4.79732640e-03 -7.10669011e-02] [ 9.38963220e-02 1.07779749e-01 9.14581269e-02 ... -2.33035639e-01 7.34891519e-02 9.18748453e-02] [ 1.67929083e-01 1.67667001e-01 6.86579868e-02 ... -3.31143774e-02 1.88019946e-01 1.84807047e-01]] [[ 8.12186822e-02 -4.12385948e-02 -1.31031841e-01 ... 1.41037832e-04 1.27369985e-01 1.52190343e-01] [-3.39557603e-02 -7.85071924e-02 -7.40758255e-02 ... 3.94830890e-02 2.43269265e-01 2.02628061e-01] [-3.09441119e-01 -3.53459686e-01 -1.33224517e-01 ... -2.77264081e-02 2.73245782e-01 2.27461994e-01] ... [-6.85554147e-02 -1.15133636e-01 -6.57668263e-02 ... -2.65171945e-01 -2.75862217e-01 -3.24155390e-01] [-1.78349018e-02 -9.94561538e-02 7.75554627e-02 ... -2.84009874e-01 -3.46427649e-01 -2.80484766e-01] [ 1.46222129e-01 8.17354023e-02 1.70733258e-01 ... -1.37197122e-01 -2.60569036e-01 -1.93625078e-01]] [[ 1.85092479e-01 1.14975601e-01 6.29541203e-02 ... -7.37583786e-02 -2.53065750e-02 5.08248955e-02] [ 8.71049389e-02 7.13549331e-02 1.38483867e-01 ... -3.32478248e-03 1.26572981e-01 1.69672295e-01] [-2.56948858e-01 -3.55372608e-01 -1.07448332e-01 ... -7.95784369e-02 1.43050939e-01 1.57225490e-01] ... [ 1.06931403e-01 1.49909094e-01 6.93818182e-02 ... -4.55053866e-01 -3.24816346e-01 -2.64440387e-01] [ 1.29697829e-01 1.48750365e-01 1.67728007e-01 ... -2.96660364e-01 -2.48141795e-01 -1.93006799e-01] [ 1.79118499e-01 1.77188203e-01 1.90414682e-01 ... -1.02893285e-01 -1.48524284e-01 -1.31980971e-01]]]]]; ov_res: [[[[[-9.57864821e-02 -9.96742696e-02 1.05411425e-01 ... 5.67504466e-02 -1.43983155e-01 -1.24784276e-01] [-1.60156339e-01 -1.23632811e-01 1.31636247e-01 ... 5.10631949e-02 -4.61570360e-02 -2.87296195e-02] [-4.44068275e-02 1.98403914e-02 1.22294992e-01 ... 1.60479173e-01 2.03539401e-01 2.15970874e-01] ... [ 1.22868396e-01 1.19172908e-01 1.52246598e-02 ... -9.20512974e-02 -2.82500714e-01 -2.60102570e-01] [-3.52061838e-02 -6.87555363e-03 5.45891374e-02 ... -5.99631406e-02 -2.31879920e-01 -1.20543562e-01] [-1.56027019e-01 -1.75629586e-01 -4.82051708e-02 ... 6.13045879e-02 -1.78358734e-01 -1.06403485e-01]] [[-1.97559252e-01 -1.76243871e-01 5.90441264e-02 ... -1.13067804e-02 -1.08849525e-01 -1.29170209e-01] [-2.35154986e-01 -1.86835259e-01 1.62564829e-01 ... 1.99449882e-02 1.61976125e-02 9.77009721e-03] [ 2.27880962e-02 7.60819018e-02 1.65826738e-01 ... 9.79516953e-02 8.26110989e-02 1.49764329e-01] ... [ 1.16636381e-01 1.79266900e-01 2.49005891e-02 ... -2.56145149e-01 -4.49964970e-01 -4.29439396e-01] [ 9.07046441e-03 1.22589238e-01 2.02869639e-01 ... -1.85772002e-01 -3.31016749e-01 -2.46629432e-01] [-1.32751405e-01 -1.06335729e-01 1.60088465e-02 ... -9.14135873e-02 -2.67482162e-01 -1.78466320e-01]] [[-1.92767866e-02 -8.63409713e-02 -1.55992284e-01 ... -5.64947762e-02 -2.90306360e-02 -2.21146476e-02] [ 5.40277772e-02 2.94575244e-02 7.09488541e-02 ... 3.25068794e-02 1.58286411e-02 5.26026078e-02] [ 3.29203993e-01 2.88301349e-01 2.02577710e-01 ... -4.95552793e-02 -2.21710075e-02 -2.70273332e-02] ... [-9.65142250e-03 1.27165496e-01 5.72433807e-02 ... -2.82214403e-01 -3.95964235e-01 -3.68855208e-01] [-1.18593849e-01 6.13846704e-02 1.52945921e-01 ... -9.30157974e-02 -2.68520713e-01 -1.56548679e-01] [-1.91301897e-01 -5.27319014e-02 1.11771345e-01 ... -1.00633249e-01 -1.78705886e-01 -8.36139992e-02]] ... [[-1.21827208e-01 -8.90043229e-02 6.35862947e-02 ... 4.53067347e-02 1.07273787e-01 7.48785660e-02] [-9.99434441e-02 -8.88942406e-02 1.48071140e-01 ... -1.68395992e-02 -7.71039575e-02 -6.45054281e-02] [-6.89224377e-02 -4.04727198e-02 1.18236504e-01 ... 4.81451452e-02 2.72267498e-03 -3.21313087e-03] ... [ 1.10719120e-02 3.15820053e-02 3.92950922e-02 ... 5.65396585e-02 -9.93777625e-03 4.92737256e-02] [-1.78785995e-02 1.17233330e-02 6.16371632e-02 ... -3.08054388e-01 -2.57212222e-01 -1.20687969e-01] [ 1.45645142e-01 6.30884841e-02 1.58784047e-01 ... -2.41923645e-01 -2.85065055e-01 -1.37483492e-01]] [[-9.07330960e-02 -9.69122127e-02 5.04845940e-02 ... -4.25404729e-03 -3.79472645e-03 -4.46321676e-03] [ 1.93412285e-02 7.09229708e-02 1.97694585e-01 ... 9.33795422e-02 -1.28713250e-01 -1.36302114e-01] [ 1.72896802e-01 3.32923889e-01 3.42349857e-01 ... 6.40419424e-02 -2.06376463e-01 -2.11852282e-01] ... [ 1.27342433e-01 1.78081930e-01 1.50196359e-01 ... 1.39682174e-01 -8.50029364e-02 -1.61503535e-02] [-5.48412018e-02 -1.65180385e-01 -3.15654725e-02 ... -3.18330050e-01 -3.13475817e-01 -8.99788216e-02] [ 2.74035893e-02 -5.98754808e-02 3.87811363e-02 ... -2.36310855e-01 -2.95178354e-01 -1.53454319e-01]] [[-2.10399888e-02 -1.35573119e-01 -8.92718434e-02 ... 3.73842381e-02 -8.28424841e-02 -5.58037683e-02] [ 8.93882886e-02 5.31377643e-02 7.34086782e-02 ... 1.41049311e-01 -1.57113388e-01 -1.53518707e-01] [ 1.79087237e-01 2.52858281e-01 2.43732572e-01 ... 1.65040463e-01 -1.74515590e-01 -2.03948021e-01] ... [ 9.02751535e-02 1.96865469e-01 2.32975423e-01 ... -1.38400421e-02 -1.24437898e-01 -8.06875527e-02] [ 1.25800679e-03 -5.66259213e-02 3.81833054e-02 ... -2.88541526e-01 -3.07422578e-01 -7.58267865e-02] [-2.08994281e-02 -2.42675859e-02 7.32419118e-02 ... -2.09069073e-01 -2.42255464e-01 -1.04464248e-01]]] [[[ 1.11445718e-01 5.22657074e-02 -1.07057756e-02 ... -1.09126382e-01 -9.49386321e-03 1.07100848e-02] [ 1.05441317e-01 1.58096716e-01 5.76727353e-02 ... -1.94383606e-01 3.50984000e-02 -3.77934612e-03] [-6.70622438e-02 3.51278437e-03 2.82063968e-02 ... -3.09963197e-01 -6.11439981e-02 -1.59158424e-01] ... [-3.65812257e-02 -1.13775954e-01 1.10679045e-02 ... -1.50996312e-01 -2.10267410e-01 -1.39665917e-01] [-8.93644765e-02 -1.74429342e-01 4.47309576e-02 ... 2.37586182e-02 4.90357392e-02 3.62766460e-02] [-9.52899009e-02 -1.68055639e-01 -1.44812623e-02 ... 1.13071263e-01 1.73099935e-01 9.14321467e-02]] [[ 1.84836000e-01 5.59275933e-02 -1.21210348e-02 ... -7.66963065e-02 7.23732933e-02 1.11768231e-01] [ 2.82292545e-01 2.94070214e-01 1.91638753e-01 ... -1.71510622e-01 9.47885439e-02 1.16679436e-02] [ 5.79913668e-02 9.97414291e-02 1.09569088e-01 ... -1.43110260e-01 -3.73169221e-03 -2.09004417e-01] ... [-1.21179074e-01 -2.30283841e-01 -9.84908268e-02 ... -2.72609293e-01 -2.60209709e-01 -2.11148843e-01] [-8.26101974e-02 -1.87304050e-01 -1.36328146e-01 ... -2.22729612e-02 6.27493486e-02 -1.21162375e-02] [-4.50068489e-02 -1.10424995e-01 -9.29917246e-02 ... 1.31302312e-01 2.41874531e-01 7.30694532e-02]] [[ 1.48447528e-01 1.42240331e-01 3.95069346e-02 ... 7.90722370e-02 1.75018281e-01 1.59099221e-01] [ 2.93425947e-01 3.54881227e-01 2.38487691e-01 ... -2.47795274e-03 2.12573364e-01 7.91436210e-02] [ 1.42105788e-01 2.51424879e-01 2.48006359e-01 ... 1.24747328e-01 1.67546287e-01 -3.30907628e-02] ... [-1.96047276e-01 -2.77192563e-01 -1.33772686e-01 ... -2.93994695e-01 -9.86845046e-02 2.49248958e-04] [-3.58865894e-02 -6.34681061e-02 -3.77370976e-02 ... -2.28975639e-01 -5.14823608e-02 -9.48338434e-02] [ 4.65092212e-02 3.92552838e-02 6.65978575e-03 ... -3.90828587e-02 7.30687529e-02 -6.23259358e-02]] ... [[ 1.11459255e-01 1.15485944e-01 -6.08393475e-02 ... 1.20746821e-01 2.32876241e-01 2.47084484e-01] [ 1.64053708e-01 8.81161988e-02 -1.21603951e-01 ... 1.71097651e-01 1.96084410e-01 1.76148281e-01] [ 1.86089292e-01 1.32662803e-01 -2.28706151e-01 ... 1.26833856e-01 1.26216426e-01 5.80618605e-02] ... [-3.95507097e-01 -4.75449800e-01 -1.36560798e-01 ... -4.58903879e-01 -1.08619563e-01 -8.05956349e-02] [-1.53908521e-01 -2.06581309e-01 -9.69450846e-02 ... -8.91963392e-02 1.20166475e-02 -9.97187421e-02] [ 1.22727253e-01 1.32158399e-01 3.29192765e-02 ... -2.41706409e-02 6.53886423e-02 2.28480604e-02]] [[ 2.49417350e-02 7.55149350e-02 4.85104062e-02 ... 1.13412462e-01 2.21730158e-01 6.17252141e-02] [ 6.04337938e-02 2.79350318e-02 -2.47882809e-02 ... 2.16050655e-01 1.68252662e-01 1.62967932e-04] [ 6.23129308e-02 6.16579503e-02 -2.76036024e-01 ... 2.08495736e-01 1.55715957e-01 -1.11575788e-02] ... [-3.65237385e-01 -4.29090232e-01 -1.25744209e-01 ... -2.75985420e-01 -2.40891486e-01 -1.71076775e-01] [-2.20087364e-01 -1.29099429e-01 4.87595722e-02 ... -1.10840209e-01 -1.34530157e-01 -1.36824936e-01] [ 2.72967294e-02 1.62230000e-01 2.10897282e-01 ... 9.73483082e-03 1.70868374e-02 -1.34083210e-02]] [[-9.13929194e-03 7.49967024e-02 7.31830150e-02 ... 1.39412746e-01 8.54657367e-02 -5.81470951e-02] [ 1.10212788e-02 5.09827994e-02 4.89451550e-02 ... 2.51049578e-01 4.44318727e-02 -1.06347285e-01] [ 1.08365521e-01 1.63524687e-01 -1.72734703e-03 ... 2.24764153e-01 9.98175219e-02 -6.90482780e-02] ... [-1.83419019e-01 -1.51684031e-01 2.93705631e-02 ... -7.11760893e-02 -1.36582151e-01 -9.99605656e-02] [-4.11771573e-02 1.34842083e-01 2.03271180e-01 ... -9.39365402e-02 -1.51288792e-01 -5.49607240e-02] [ 5.27773425e-02 2.37111762e-01 2.84331262e-01 ... -3.60103399e-02 -3.45420092e-02 -8.69573001e-03]]] [[[ 8.74194056e-02 1.46531239e-01 2.27250397e-01 ... 3.10745627e-01 2.95799404e-01 4.17131148e-02] [ 1.17937177e-01 1.56018615e-01 1.34306550e-01 ... 3.82011443e-01 3.55672479e-01 8.78327191e-02] [-2.20630900e-03 -1.29673883e-01 -2.40532339e-01 ... 1.29595250e-01 2.55783081e-01 1.48144424e-01] ... [-1.02846332e-01 -1.18142605e-01 -2.69781232e-01 ... 9.71376970e-02 -1.22239269e-01 -7.97973275e-02] [-1.68164998e-01 -2.46392384e-01 -2.77105540e-01 ... 4.17466797e-02 -1.48256168e-01 -1.12627424e-01] [-2.04017803e-01 -2.52308607e-01 -2.23281205e-01 ... 1.36061370e-01 2.08876748e-03 -1.32957185e-02]] [[ 2.69954473e-01 2.92281181e-01 2.31355637e-01 ... 2.71306723e-01 3.16454768e-01 9.08652395e-02] [ 3.59556824e-01 3.71548027e-01 2.53474653e-01 ... 3.49154592e-01 2.62520254e-01 6.33934662e-02] [ 1.80879951e-01 4.67202403e-02 -1.45722836e-01 ... 1.08764708e-01 1.60271361e-01 7.03480542e-02] ... [ 2.79274341e-02 -1.56083051e-02 -1.14045374e-01 ... 1.94844633e-01 -7.29453489e-02 -3.84608246e-02] [-2.18854123e-03 -9.24712345e-02 -1.16280407e-01 ... 4.55260240e-02 -1.76853493e-01 -9.82311070e-02] [-1.19627036e-01 -1.85191453e-01 -1.14695966e-01 ... 9.74771380e-02 -1.89337879e-02 -1.13770058e-02]] [[ 2.50600845e-01 3.09840351e-01 2.35988125e-01 ... 1.64733291e-01 2.75823891e-01 1.03315622e-01] [ 3.28527123e-01 3.34552944e-01 1.91112041e-01 ... 2.63814032e-01 2.27963194e-01 1.93634883e-01] [ 2.48118103e-01 2.09941596e-01 -3.54239228e-03 ... 2.00148746e-01 3.08337659e-01 2.36369252e-01] ... [ 3.39381039e-01 2.48587474e-01 1.76961228e-01 ... 1.00207299e-01 -1.94403064e-02 -6.15376001e-03] [ 1.76055700e-01 2.00500697e-01 1.83070123e-01 ... -7.59852352e-03 -2.50458002e-01 -1.72113523e-01] [ 1.11250125e-01 1.41354397e-01 2.10994169e-01 ... 3.92352082e-02 -5.57966530e-02 -6.57273382e-02]] ... [[ 7.11702108e-02 6.33534938e-02 -5.96613735e-02 ... 1.22225722e-02 1.00100555e-01 1.89702570e-01] [ 2.39228532e-02 1.39762327e-01 6.34995699e-02 ... -8.00784528e-02 1.60781503e-01 2.29277208e-01] [-7.08049536e-02 6.20449111e-02 9.66654345e-02 ... -4.95609455e-02 1.18128240e-01 1.66798502e-01] ... [ 2.34743208e-02 -1.77371465e-02 3.22478600e-02 ... -2.24129900e-01 4.79732640e-03 -7.10669011e-02] [ 9.38963220e-02 1.07779749e-01 9.14581269e-02 ... -2.33035639e-01 7.34891519e-02 9.18748453e-02] [ 1.67929083e-01 1.67667001e-01 6.86579868e-02 ... -3.31143774e-02 1.88019946e-01 1.84807047e-01]] [[ 8.12186822e-02 -4.12385948e-02 -1.31031841e-01 ... 1.41037832e-04 1.27369985e-01 1.52190343e-01] [-3.39557603e-02 -7.85071924e-02 -7.40758255e-02 ... 3.94830890e-02 2.43269265e-01 2.02628061e-01] [-3.09441119e-01 -3.53459686e-01 -1.33224517e-01 ... -2.77264081e-02 2.73245782e-01 2.27461994e-01] ... [-6.85554147e-02 -1.15133636e-01 -6.57668263e-02 ... -2.65171945e-01 -2.75862217e-01 -3.24155390e-01] [-1.78349018e-02 -9.94561538e-02 7.75554627e-02 ... -2.84009874e-01 -3.46427649e-01 -2.80484766e-01] [ 1.46222129e-01 8.17354023e-02 1.70733258e-01 ... -1.37197122e-01 -2.60569036e-01 -1.93625078e-01]] [[ 1.85092479e-01 1.14975601e-01 6.29541203e-02 ... -7.37583786e-02 -2.53065750e-02 5.08248955e-02] [ 8.71049389e-02 7.13549331e-02 1.38483867e-01 ... -3.32478248e-03 1.26572981e-01 1.69672295e-01] [-2.56948858e-01 -3.55372608e-01 -1.07448332e-01 ... -7.95784369e-02 1.43050939e-01 1.57225490e-01] ... [ 1.06931403e-01 1.49909094e-01 6.93818182e-02 ... -4.55053866e-01 -3.24816346e-01 -2.64440387e-01] [ 1.29697829e-01 1.48750365e-01 1.67728007e-01 ... -2.96660364e-01 -2.48141795e-01 -1.93006799e-01] [ 1.79118499e-01 1.77188203e-01 1.90414682e-01 ... -1.02893285e-01 -1.48524284e-01 -1.31980971e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5746.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::avg_pool3d(%x, %5, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%6) fw_re: [[[[[ 9.77984965e-02 1.83787465e-01 -1.26881137e-01 1.07890859e-01 -1.77260369e-01] [-3.37994725e-01 9.74149629e-02 -1.79064661e-01 2.51809567e-01 2.62419492e-01] [-1.57364886e-02 2.42359698e-01 1.22843727e-01 -2.35322773e-01 2.10284412e-01] [ 2.36600026e-01 2.04620529e-02 -4.73294295e-02 6.86416477e-02 2.07098514e-01] [ 1.27453934e-02 1.71340585e-01 3.76276895e-02 3.45892869e-02 -1.18107907e-02]] [[ 9.23029780e-02 1.51525885e-01 2.99777269e-01 2.62957186e-01 -1.56217664e-01] [-1.36583686e-01 -8.26918781e-02 -2.10590530e-02 4.81521077e-02 -2.32994497e-01] [-1.81105986e-01 -6.40171245e-02 -2.74849031e-02 1.62269056e-01 -3.06167841e-01] [-3.20151478e-01 1.63469255e-01 1.47389948e-01 -1.16218381e-01 2.00013027e-01] [ 7.09039792e-02 9.57574993e-02 -4.35744934e-02 5.57691306e-02 -5.80861010e-02]] [[ 1.65385604e-01 -1.64658949e-01 2.36736611e-01 -8.51784274e-02 -2.40697265e-01] [-1.58525571e-01 1.05232343e-01 5.23970388e-02 -1.01763695e-01 -2.76979115e-02] [-3.76215726e-01 -3.12047213e-01 -7.53890127e-02 2.79439151e-01 -7.34041780e-02] [-1.44089311e-01 1.52481869e-01 -2.13106405e-02 1.46385401e-01 2.50293314e-01] [-2.20058620e-01 -1.23673409e-01 9.68912989e-02 -9.42983851e-03 -1.79784462e-01]] [[-6.20809235e-02 -2.34065890e-01 -1.21644489e-01 -1.34148419e-01 1.50312066e-01] [-1.25366628e-01 3.80452126e-01 -1.00447088e-01 9.73328874e-02 -1.69660747e-01] [-1.05956621e-01 3.90239496e-04 6.44807294e-02 1.82334527e-01 2.01457202e-01] [ 4.94021177e-02 -2.26793461e-04 -3.72084498e-01 4.40677851e-01 1.41408101e-01] [ 3.07963490e-01 4.79647458e-01 -2.05438554e-01 -3.92040797e-02 1.06647080e-02]] [[ 1.12219721e-01 -1.24137774e-01 -2.19779834e-01 -4.18721028e-02 1.90142810e-01] [-4.75484356e-02 -2.26499215e-01 -1.44227743e-01 7.08155110e-02 -6.23110831e-02] [-4.25389372e-02 -5.91957830e-02 2.70818919e-02 4.57294434e-02 -2.97405988e-01] [ 2.26351649e-01 -5.05945943e-02 3.88559729e-01 -8.65839943e-02 3.79800014e-02] [ 1.06602289e-01 -7.74100646e-02 6.27724603e-02 -1.71180777e-02 -3.49653631e-01]]] [[[-2.05528736e-01 4.15663600e-01 -1.30099326e-01 -3.39363277e-01 1.28127873e-01] [-9.23714116e-02 -2.52743363e-01 -1.70327593e-02 -4.64727789e-01 2.66478419e-01] [ 2.11315274e-01 -8.32440332e-02 1.90975770e-01 -2.21867755e-01 2.65261769e-01] [-5.36555909e-02 -8.08673277e-02 -5.50334677e-02 -2.44623914e-01 4.00683172e-02] [ 3.66429925e-01 -1.06969938e-01 -1.34990327e-02 -2.82940846e-02 8.39952379e-02]] [[-8.47165212e-02 -2.99909450e-02 1.60471916e-01 1.64021552e-01 -6.40718639e-02] [-1.10699177e-01 -1.73226893e-01 3.13769886e-03 1.01343744e-01 -6.93102460e-03] [-2.20632970e-01 -1.90764248e-01 -3.72451767e-02 1.31639257e-01 1.90046534e-01] [ 1.90547574e-02 -5.50951883e-02 -2.36397654e-01 -2.53788501e-01 1.84820086e-01] [-1.29809558e-01 1.19606085e-01 8.82753637e-03 -2.96168867e-03 -1.04831547e-01]] [[ 6.11939728e-02 -1.68174073e-01 -9.85628832e-03 3.48384887e-01 8.58951658e-02] [ 1.05965666e-01 2.34947335e-02 -7.41573051e-02 -2.75745634e-02 -1.00847535e-01] [ 1.91749394e-01 1.36632279e-01 2.27936357e-01 -3.77961308e-01 2.10401099e-02] [-3.56334448e-02 -2.81419582e-03 1.57084718e-01 2.47949973e-01 2.03806654e-01] [-3.27337116e-01 -1.70772865e-01 -7.32410774e-02 4.10024703e-01 1.38861626e-01]] [[ 2.32716143e-01 -2.05084056e-01 -2.36314103e-01 3.97606134e-01 1.52496368e-01] [ 2.28020474e-01 6.21276908e-02 -1.93154022e-01 -2.30413899e-01 1.94797054e-01] [-2.92206556e-01 8.93923640e-02 4.89312559e-02 2.40129650e-01 -1.80982515e-01] [-9.72881839e-02 7.48182461e-02 2.20193118e-01 1.75023645e-01 1.13675490e-01] [-2.76128739e-01 7.27923736e-02 9.68523100e-02 1.40589830e-02 3.22782487e-01]] [[ 2.09440738e-01 -1.42657742e-01 3.14487040e-01 1.15991056e-01 3.26754183e-01] [ 1.11619487e-01 -1.99123725e-01 -2.09155828e-01 -1.68938711e-01 1.70739397e-01] [ 1.29652768e-01 2.08475009e-01 7.09039867e-02 -1.11728087e-01 5.31898379e-01] [-1.36278272e-01 1.11841530e-01 3.15651864e-01 -9.27532390e-02 2.26629049e-01] [ 9.02395770e-02 -1.12964585e-01 4.54478450e-02 -2.17181772e-01 -3.59158337e-01]]] [[[-2.27225438e-01 3.04814335e-02 -1.62358746e-01 -5.58064692e-02 1.21996596e-01] [-2.38088503e-01 6.96620345e-02 7.49625117e-02 -8.86772648e-02 1.72328249e-01] [ 1.75400063e-01 5.33361994e-02 -1.16023861e-01 -3.87474239e-01 1.47234946e-01] [ 1.78430632e-01 -3.19800496e-01 1.99846588e-02 2.71418896e-02 2.10702106e-01] [-2.69148618e-01 1.77768677e-01 -7.98870847e-02 2.36709386e-01 3.12916070e-01]] [[ 2.45031994e-02 -3.09149325e-01 1.44187197e-01 1.52824119e-01 -9.51800868e-02] [ 1.53333649e-01 -3.47112834e-01 -2.50769377e-01 1.45914376e-01 -5.15686683e-02] [ 1.12145506e-01 -1.26288757e-01 1.12410530e-01 -9.03307926e-03 2.58035839e-01] [ 1.56886745e-02 -2.01928243e-01 1.45114539e-02 -5.96196428e-02 -4.38808501e-02] [-8.25813338e-02 4.41994131e-01 1.46869168e-01 -4.45330054e-01 -1.72438651e-01]] [[-7.42724836e-02 -4.44138013e-02 1.54241055e-01 -5.08225225e-02 -1.66823447e-01] [-5.92952967e-02 3.84806186e-01 -6.18748320e-03 3.31916302e-01 -1.55534357e-01] [-2.88365483e-01 -1.45194367e-01 2.28744790e-01 -1.16385289e-01 -3.98218513e-01] [-2.98354417e-01 -1.73074126e-01 1.04903124e-01 -1.90778039e-02 8.38679597e-02] [-3.03438693e-01 -4.99863662e-02 -1.79460108e-01 1.67038575e-01 -3.23277377e-02]] [[-5.02671748e-02 -1.20030157e-01 2.44784176e-01 5.65290377e-02 8.04817379e-02] [-5.06979264e-02 -7.75311440e-02 1.01275727e-01 -2.29518026e-01 2.14851070e-02] [ 2.70373046e-01 2.00149864e-02 -1.51889876e-01 3.54922637e-02 1.82761133e-01] [ 2.23114312e-01 -3.46982181e-02 2.95800734e-02 7.94568434e-02 3.20831925e-04] [ 9.86290630e-03 -1.03250459e-01 -2.78633032e-02 -4.05878127e-02 -4.85358566e-01]] [[ 1.69615582e-01 2.22153023e-01 2.94828176e-01 -1.77315399e-02 -4.81588505e-02] [-2.45832741e-01 8.20540860e-02 -1.68568015e-01 -1.52988568e-01 -3.59873056e-01] [-4.44733053e-02 3.98921698e-01 3.84406418e-01 -7.33234733e-02 3.54942054e-01] [-3.80554982e-02 -7.59112602e-03 -7.63295963e-02 2.63887525e-01 2.39810467e-01] [ 4.41423617e-02 -1.80124760e-01 2.20034495e-01 -4.12273221e-02 3.93290073e-01]]]]]; ov_res: [[[[[ 9.77984965e-02 1.83787465e-01 -1.26881137e-01 1.07890859e-01 -1.77260369e-01] [-3.37994725e-01 9.74149629e-02 -1.79064661e-01 2.51809567e-01 2.62419492e-01] [-1.57364886e-02 2.42359698e-01 1.22843727e-01 -2.35322773e-01 2.10284412e-01] [ 2.36600026e-01 2.04620529e-02 -4.73294295e-02 6.86416477e-02 2.07098514e-01] [ 1.27453934e-02 1.71340585e-01 3.76276895e-02 3.45892869e-02 -1.18107907e-02]] [[ 9.23029780e-02 1.51525885e-01 2.99777269e-01 2.62957186e-01 -1.56217664e-01] [-1.36583686e-01 -8.26918781e-02 -2.10590530e-02 4.81521077e-02 -2.32994497e-01] [-1.81105986e-01 -6.40171245e-02 -2.74849031e-02 1.62269056e-01 -3.06167841e-01] [-3.20151478e-01 1.63469255e-01 1.47389948e-01 -1.16218381e-01 2.00013027e-01] [ 7.09039792e-02 9.57574993e-02 -4.35744934e-02 5.57691306e-02 -5.80861010e-02]] [[ 1.65385604e-01 -1.64658949e-01 2.36736611e-01 -8.51784274e-02 -2.40697265e-01] [-1.58525571e-01 1.05232343e-01 5.23970388e-02 -1.01763695e-01 -2.76979115e-02] [-3.76215726e-01 -3.12047213e-01 -7.53890127e-02 2.79439151e-01 -7.34041780e-02] [-1.44089311e-01 1.52481869e-01 -2.13106405e-02 1.46385401e-01 2.50293314e-01] [-2.20058620e-01 -1.23673409e-01 9.68912989e-02 -9.42983851e-03 -1.79784462e-01]] [[-6.20809235e-02 -2.34065890e-01 -1.21644489e-01 -1.34148419e-01 1.50312066e-01] [-1.25366628e-01 3.80452126e-01 -1.00447088e-01 9.73328874e-02 -1.69660747e-01] [-1.05956621e-01 3.90239496e-04 6.44807294e-02 1.82334527e-01 2.01457202e-01] [ 4.94021177e-02 -2.26793461e-04 -3.72084498e-01 4.40677851e-01 1.41408101e-01] [ 3.07963490e-01 4.79647458e-01 -2.05438554e-01 -3.92040797e-02 1.06647080e-02]] [[ 1.12219721e-01 -1.24137774e-01 -2.19779834e-01 -4.18721028e-02 1.90142810e-01] [-4.75484356e-02 -2.26499215e-01 -1.44227743e-01 7.08155110e-02 -6.23110831e-02] [-4.25389372e-02 -5.91957830e-02 2.70818919e-02 4.57294434e-02 -2.97405988e-01] [ 2.26351649e-01 -5.05945943e-02 3.88559729e-01 -8.65839943e-02 3.79800014e-02] [ 1.06602289e-01 -7.74100646e-02 6.27724603e-02 -1.71180777e-02 -3.49653631e-01]]] [[[-2.05528736e-01 4.15663600e-01 -1.30099326e-01 -3.39363277e-01 1.28127873e-01] [-9.23714116e-02 -2.52743363e-01 -1.70327593e-02 -4.64727789e-01 2.66478419e-01] [ 2.11315274e-01 -8.32440332e-02 1.90975770e-01 -2.21867755e-01 2.65261769e-01] [-5.36555909e-02 -8.08673277e-02 -5.50334677e-02 -2.44623914e-01 4.00683172e-02] [ 3.66429925e-01 -1.06969938e-01 -1.34990327e-02 -2.82940846e-02 8.39952379e-02]] [[-8.47165212e-02 -2.99909450e-02 1.60471916e-01 1.64021552e-01 -6.40718639e-02] [-1.10699177e-01 -1.73226893e-01 3.13769886e-03 1.01343744e-01 -6.93102460e-03] [-2.20632970e-01 -1.90764248e-01 -3.72451767e-02 1.31639257e-01 1.90046534e-01] [ 1.90547574e-02 -5.50951883e-02 -2.36397654e-01 -2.53788501e-01 1.84820086e-01] [-1.29809558e-01 1.19606085e-01 8.82753637e-03 -2.96168867e-03 -1.04831547e-01]] [[ 6.11939728e-02 -1.68174073e-01 -9.85628832e-03 3.48384887e-01 8.58951658e-02] [ 1.05965666e-01 2.34947335e-02 -7.41573051e-02 -2.75745634e-02 -1.00847535e-01] [ 1.91749394e-01 1.36632279e-01 2.27936357e-01 -3.77961308e-01 2.10401099e-02] [-3.56334448e-02 -2.81419582e-03 1.57084718e-01 2.47949973e-01 2.03806654e-01] [-3.27337116e-01 -1.70772865e-01 -7.32410774e-02 4.10024703e-01 1.38861626e-01]] [[ 2.32716143e-01 -2.05084056e-01 -2.36314103e-01 3.97606134e-01 1.52496368e-01] [ 2.28020474e-01 6.21276908e-02 -1.93154022e-01 -2.30413899e-01 1.94797054e-01] [-2.92206556e-01 8.93923640e-02 4.89312559e-02 2.40129650e-01 -1.80982515e-01] [-9.72881839e-02 7.48182461e-02 2.20193118e-01 1.75023645e-01 1.13675490e-01] [-2.76128739e-01 7.27923736e-02 9.68523100e-02 1.40589830e-02 3.22782487e-01]] [[ 2.09440738e-01 -1.42657742e-01 3.14487040e-01 1.15991056e-01 3.26754183e-01] [ 1.11619487e-01 -1.99123725e-01 -2.09155828e-01 -1.68938711e-01 1.70739397e-01] [ 1.29652768e-01 2.08475009e-01 7.09039867e-02 -1.11728087e-01 5.31898379e-01] [-1.36278272e-01 1.11841530e-01 3.15651864e-01 -9.27532390e-02 2.26629049e-01] [ 9.02395770e-02 -1.12964585e-01 4.54478450e-02 -2.17181772e-01 -3.59158337e-01]]] [[[-2.27225438e-01 3.04814335e-02 -1.62358746e-01 -5.58064692e-02 1.21996596e-01] [-2.38088503e-01 6.96620345e-02 7.49625117e-02 -8.86772648e-02 1.72328249e-01] [ 1.75400063e-01 5.33361994e-02 -1.16023861e-01 -3.87474239e-01 1.47234946e-01] [ 1.78430632e-01 -3.19800496e-01 1.99846588e-02 2.71418896e-02 2.10702106e-01] [-2.69148618e-01 1.77768677e-01 -7.98870847e-02 2.36709386e-01 3.12916070e-01]] [[ 2.45031994e-02 -3.09149325e-01 1.44187197e-01 1.52824119e-01 -9.51800868e-02] [ 1.53333649e-01 -3.47112834e-01 -2.50769377e-01 1.45914376e-01 -5.15686683e-02] [ 1.12145506e-01 -1.26288757e-01 1.12410530e-01 -9.03307926e-03 2.58035839e-01] [ 1.56886745e-02 -2.01928243e-01 1.45114539e-02 -5.96196428e-02 -4.38808501e-02] [-8.25813338e-02 4.41994131e-01 1.46869168e-01 -4.45330054e-01 -1.72438651e-01]] [[-7.42724836e-02 -4.44138013e-02 1.54241055e-01 -5.08225225e-02 -1.66823447e-01] [-5.92952967e-02 3.84806186e-01 -6.18748320e-03 3.31916302e-01 -1.55534357e-01] [-2.88365483e-01 -1.45194367e-01 2.28744790e-01 -1.16385289e-01 -3.98218513e-01] [-2.98354417e-01 -1.73074126e-01 1.04903124e-01 -1.90778039e-02 8.38679597e-02] [-3.03438693e-01 -4.99863662e-02 -1.79460108e-01 1.67038575e-01 -3.23277377e-02]] [[-5.02671748e-02 -1.20030157e-01 2.44784176e-01 5.65290377e-02 8.04817379e-02] [-5.06979264e-02 -7.75311440e-02 1.01275727e-01 -2.29518026e-01 2.14851070e-02] [ 2.70373046e-01 2.00149864e-02 -1.51889876e-01 3.54922637e-02 1.82761133e-01] [ 2.23114312e-01 -3.46982181e-02 2.95800734e-02 7.94568434e-02 3.20831925e-04] [ 9.86290630e-03 -1.03250459e-01 -2.78633032e-02 -4.05878127e-02 -4.85358566e-01]] [[ 1.69615582e-01 2.22153023e-01 2.94828176e-01 -1.77315399e-02 -4.81588505e-02] [-2.45832741e-01 8.20540860e-02 -1.68568015e-01 -1.52988568e-01 -3.59873056e-01] [-4.44733053e-02 3.98921698e-01 3.84406418e-01 -7.33234733e-02 3.54942054e-01] [-3.80554982e-02 -7.59112602e-03 -7.63295963e-02 2.63887525e-01 2.39810467e-01] [ 4.41423617e-02 -1.80124760e-01 2.20034495e-01 -4.12273221e-02 3.93290073e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:True - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5749.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[3, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[-3.55849504e-01 -3.85911971e-01 -2.12030336e-01 ... -2.85069793e-01 4.47480865e-02 -6.71768188e-02] [-5.36859334e-01 -6.55908763e-01 -1.71902001e-01 ... 1.62605360e-01 4.75042701e-01 6.94806814e-01] [-4.08722311e-01 -7.54322350e-01 -4.54322010e-01 ... -2.82307178e-01 6.91614866e-01 6.23417556e-01] ... [ 4.15153414e-01 -1.17821850e-01 -4.20586824e-01 ... -7.79781565e-02 -1.85268596e-01 -3.98623794e-01] [ 4.87230390e-01 7.39605367e-01 6.73090518e-01 ... 9.73015010e-01 3.05870976e-02 -5.50967097e-01] [-2.91340977e-01 5.97317278e-01 1.12657559e+00 ... 8.50027561e-01 4.02740717e-01 -3.80257934e-01]] [[-2.73065120e-01 1.86536357e-01 1.30275771e-01 ... -1.98892206e-02 2.62304276e-01 -2.10845266e-02] [-1.18392669e-01 4.61493462e-01 -1.58034146e-01 ... 3.61991435e-01 1.27553359e-01 1.53765023e-01] [ 8.87847006e-01 4.08451915e-01 -8.74481261e-01 ... 2.58166015e-01 2.23721579e-01 2.95002431e-01] ... [ 4.10784297e-02 -6.09821320e-01 1.61445633e-01 ... 1.98177382e-01 3.03736001e-01 2.97542095e-01] [ 2.20606446e-01 -3.03187340e-01 1.84386685e-01 ... 1.04367733e-02 -1.45814523e-01 2.32334491e-02] [ 8.19751620e-02 -5.47670960e-01 4.35152680e-01 ... -5.00366330e-01 -2.44607449e-01 -2.64380217e-01]] [[ 5.30156791e-01 -2.78061479e-01 2.47118488e-01 ... -5.31821661e-02 6.64240837e-01 9.45914984e-02] [ 4.23551559e-01 -1.85265645e-01 1.51214674e-01 ... -2.07516775e-01 1.94586828e-01 -1.96056142e-01] [-1.31988615e-01 -3.55670691e-01 -1.89182818e-01 ... 2.41107140e-02 -1.96732745e-01 -2.50476804e-02] ... [-1.00360429e+00 3.42843622e-01 -1.30999774e-01 ... -3.87235321e-02 -5.35257645e-02 -1.31717131e-01] [-4.48529720e-01 -1.92198396e-01 6.64635122e-01 ... 3.16394776e-01 -3.82580549e-01 4.79707092e-01] [ 4.95122522e-01 -2.19190553e-01 7.33138621e-01 ... 1.88092306e-01 -2.40867138e-01 9.01731253e-01]] [[-1.47889152e-01 -2.35101640e-01 3.48196626e-01 ... 4.35182720e-01 1.56845346e-01 -9.94945109e-01] [-3.75637859e-01 3.64077091e-01 -4.80307341e-01 ... 5.66298068e-01 2.68740535e-01 -3.55556756e-01] [-4.39950377e-01 4.70881701e-01 -4.56476003e-01 ... -2.19043687e-01 3.74334127e-01 1.10632867e-01] ... [-5.35821021e-01 1.73907325e-01 -3.62818360e-01 ... -1.62180036e-01 -4.85577464e-01 6.53574288e-01] [-1.24120146e-01 4.57192272e-01 -4.88426536e-01 ... -5.53160720e-02 -3.03310066e-01 4.13954616e-01] [-3.21451873e-01 -3.47279310e-01 1.16827972e-01 ... 3.42672825e-01 1.43406570e-01 2.05696642e-01]] [[ 1.60866484e-01 1.60756484e-01 -1.71050176e-01 ... 3.32914919e-01 4.21682566e-01 -1.19379163e-03] [ 2.53875852e-01 -1.12458885e-01 -3.09295982e-01 ... 6.84112310e-03 1.74737573e-01 3.40871096e-01] [ 3.40851575e-01 -9.27746668e-02 -2.47454762e-01 ... -1.80294320e-01 -4.55386996e-01 1.42967016e-01] ... [ 6.95854962e-01 -3.58032554e-01 -2.15417817e-01 ... -1.69665173e-01 7.95981944e-01 1.01266302e-01] [ 6.47928298e-01 -4.36679840e-01 4.37007219e-01 ... -5.98589599e-01 8.41371536e-01 4.82648253e-01] [ 7.31884181e-01 -8.94622028e-01 4.54034090e-01 ... -3.58027428e-01 4.99316072e-03 8.46601948e-02]]] [[[ 7.29555070e-01 7.02457204e-02 4.29949313e-01 ... 4.53769058e-01 -5.46309054e-01 -7.71457627e-02] [ 8.69672775e-01 -1.14341581e-03 -5.55876791e-01 ... 6.52307451e-01 -1.15004569e-01 -1.15641572e-01] [ 8.61368716e-01 4.10182327e-01 -5.05727112e-01 ... 6.59554422e-01 -3.06710392e-01 -2.13381127e-01] ... [ 5.04061401e-01 1.27867207e-01 3.85390967e-01 ... 4.10526901e-01 3.19257885e-01 8.33953857e-01] [ 4.65276390e-01 -2.49260947e-01 4.12137121e-01 ... -3.20703149e-01 1.77729785e-01 1.76735237e-01] [-8.04886073e-02 2.21949518e-02 -4.24576372e-01 ... -4.82144207e-01 4.49792027e-01 -2.98693597e-01]] [[-3.91807348e-01 -4.97107029e-01 3.35075885e-01 ... -5.12454569e-01 -5.19663095e-01 -3.03285569e-01] [-8.08492005e-01 -5.00446372e-02 3.14716697e-01 ... 2.26093993e-01 -8.39361846e-01 -2.73979604e-02] [-1.15327179e+00 -1.69935092e-01 2.66725510e-01 ... 7.82722712e-01 -3.96229535e-01 6.39649704e-02] ... [-3.70154053e-01 -1.58030882e-01 2.54186004e-01 ... -1.32282436e-01 -1.99406277e-02 2.91730046e-01] [-3.64507996e-02 1.15212895e-01 1.64600536e-01 ... 1.70400903e-01 1.77155077e-01 5.09208679e-01] [-5.53375483e-03 2.52418965e-01 -3.57648164e-01 ... 4.73068267e-01 8.68017495e-01 6.99250162e-01]] [[-6.17649019e-01 -1.47812083e-01 3.06803018e-01 ... 9.58465934e-02 8.29003334e-01 5.76762676e-01] [-8.76275599e-02 -1.53943732e-01 7.67512321e-02 ... 3.38569313e-01 8.69767845e-01 -1.03661567e-01] [ 6.03643060e-02 -4.90270466e-01 2.94218838e-01 ... 3.55273247e-01 2.48999834e-01 -1.38790250e-01] ... [-5.92039108e-01 6.47775173e-01 6.07295573e-01 ... -4.74017709e-01 -1.64734066e-01 -3.47269922e-01] [-8.89054000e-01 7.33175993e-01 3.80725861e-01 ... -7.16334164e-01 -5.52049160e-01 1.79588795e-01] [-5.17093897e-01 3.10342520e-01 -3.50795299e-01 ... -4.61021513e-01 -4.14445966e-01 2.83836633e-01]] [[ 8.73614773e-02 2.73423165e-01 3.18249196e-01 ... -1.78441241e-01 -9.96701494e-02 -7.97720477e-02] [-2.00758874e-02 -6.37589097e-01 -1.39279261e-01 ... -2.95980573e-02 -3.22441086e-02 -9.04914141e-02] [-5.55584490e-01 -7.33434916e-01 -7.44996443e-02 ... 2.68712137e-02 7.22269595e-01 -4.72394042e-02] ... [-5.95547676e-01 -6.57265365e-01 -6.12592936e-01 ... -4.35012907e-01 -9.11934435e-01 1.58426091e-01] [-4.25115079e-01 8.41823220e-02 -5.06480522e-02 ... -5.42995214e-01 -3.64031941e-01 3.15438211e-02] [ 3.44878465e-01 1.42365918e-01 4.58794944e-02 ... 4.66796190e-01 1.23659573e-01 2.95071363e-01]] [[-1.32839426e-01 1.33971259e-01 -3.57313454e-02 ... 7.11364746e-01 -3.75245571e-01 2.18143031e-01] [-3.81655186e-01 2.97915459e-01 6.69053316e-01 ... -1.15252651e-01 -2.30581999e-01 1.68391857e-02] [ 6.02633543e-02 -7.08321705e-02 3.60421270e-01 ... -4.82764810e-01 5.30903876e-01 -4.00018424e-01] ... [ 5.83788931e-01 -9.64437246e-01 3.36336255e-01 ... 2.23538771e-01 9.99694169e-02 -2.27931559e-01] [ 2.69346327e-01 -2.48714849e-01 1.04279526e-01 ... -1.08167566e-01 4.19418365e-01 -2.81707644e-02] [-1.16355121e-01 -6.93284273e-02 -1.60137892e-01 ... 4.86976296e-01 2.55340546e-01 3.71500283e-01]]] [[[-1.87793076e-01 -5.40196359e-01 4.59110051e-01 ... 1.48468524e-01 6.34849072e-03 2.74960458e-01] [-8.63585413e-01 -5.58809280e-01 -2.73897380e-01 ... -3.80038470e-01 -9.12493467e-02 8.72920975e-02] [-4.13367420e-01 2.82692820e-01 -5.88270843e-01 ... -6.68566942e-01 5.45752756e-02 1.22880548e-01] ... [-4.54254061e-01 -7.95815945e-01 -1.61133096e-01 ... -1.34116858e-01 -1.03164291e+00 1.14225835e-01] [-8.08449268e-01 -3.28940749e-01 1.34828344e-01 ... -7.57185698e-01 -8.64350796e-01 9.04662311e-02] [-1.02203524e+00 7.14230016e-02 -3.00747808e-02 ... -5.89228213e-01 -1.56956539e-01 3.61440569e-01]] [[ 7.83190072e-01 -2.17976585e-01 -7.10089028e-01 ... -2.36921515e-02 4.28326458e-01 2.67486811e-01] [ 7.95921147e-01 3.38574857e-01 -3.60113978e-01 ... 1.73372373e-01 4.01658773e-01 4.33835499e-02] [ 2.78097719e-01 1.25753015e-01 2.89543778e-01 ... -2.59795785e-01 3.65957588e-01 2.04464510e-01] ... [ 3.74840021e-01 2.37955615e-01 7.67576635e-01 ... -5.30522875e-02 -1.56723857e-01 -1.97548166e-01] [ 1.29612952e-01 -2.02011541e-01 2.24836409e-01 ... -5.34732878e-01 -2.86495239e-02 2.30930507e-01] [ 2.43535385e-01 -1.03871830e-01 2.15156838e-01 ... 2.37034902e-01 2.05758333e-01 4.96210337e-01]] [[ 2.61393458e-01 1.80722084e-02 4.06414956e-01 ... -8.10149610e-01 7.56324470e-01 3.17761689e-01] [ 4.19906884e-01 -3.10587436e-01 6.33117378e-01 ... -6.68041527e-01 9.70963240e-02 -6.37575611e-02] [ 3.58865589e-01 -7.25559294e-01 6.73220634e-01 ... -3.20019245e-01 -2.64212489e-01 -7.18225241e-01] ... [ 5.41213632e-01 9.04366493e-01 5.49587548e-01 ... 2.74469447e-03 3.16810042e-01 -8.22550356e-01] [ 6.78874671e-01 1.00505066e+00 2.71971017e-01 ... -9.36526880e-02 4.73872274e-01 -4.02014345e-01] [ 9.92893517e-01 4.31693822e-01 3.81719023e-01 ... 2.04090834e-01 -2.81341430e-02 1.05370402e-01]] [[ 2.24549174e-02 4.57630247e-01 -5.55137455e-01 ... -4.01686698e-01 -3.51252198e-01 4.89508390e-01] [-2.88185745e-01 -1.48815945e-01 -7.53409803e-01 ... 1.13929667e-01 -3.84736627e-01 3.81535560e-01] [ 2.61106014e-01 -7.79845491e-02 -1.21245749e-01 ... 9.52283740e-02 -7.93815136e-01 -2.88390070e-01] ... [ 1.83884993e-01 4.09916371e-01 -3.87537628e-01 ... -1.64636329e-01 3.14110130e-01 -8.35497212e-03] [ 2.35101864e-01 3.88137221e-01 -7.62315169e-02 ... 1.30859241e-01 6.33566558e-01 -1.08079113e-01] [ 2.74388731e-01 -7.42375776e-02 -4.48141187e-01 ... 2.08682463e-01 -2.80775372e-02 -4.55847830e-01]] [[ 5.92820831e-02 4.93762255e-01 -4.49296713e-01 ... -4.11325693e-01 -3.34723592e-02 -1.27502099e-01] [ 3.01810473e-01 5.47404945e-01 -7.18328297e-01 ... -6.37008905e-01 2.29825795e-01 -6.54092968e-01] [ 5.55083215e-01 7.08822787e-01 -4.71414238e-01 ... -1.67595539e-02 6.71992958e-01 -1.29244840e+00] ... [-2.07653344e-01 1.99148178e-01 -3.49598020e-01 ... -2.54478216e-01 -4.60079342e-01 7.14202821e-01] [-2.30670199e-01 -2.66324073e-01 -3.69781524e-01 ... -3.68714660e-01 -1.77128196e-01 6.72358215e-01] [ 9.28272977e-02 -1.24744475e-01 -3.35562199e-01 ... -6.08727098e-01 2.73028046e-01 4.52965945e-01]]]]]; ov_res: [[[[[-3.55849504e-01 -3.85911971e-01 -2.12030336e-01 ... -2.85069793e-01 4.47480865e-02 -6.71768188e-02] [-5.36859334e-01 -6.55908763e-01 -1.71902001e-01 ... 1.62605360e-01 4.75042701e-01 6.94806814e-01] [-4.08722311e-01 -7.54322350e-01 -4.54322010e-01 ... -2.82307178e-01 6.91614866e-01 6.23417556e-01] ... [ 4.15153414e-01 -1.17821850e-01 -4.20586824e-01 ... -7.79781565e-02 -1.85268596e-01 -3.98623794e-01] [ 4.87230390e-01 7.39605367e-01 6.73090518e-01 ... 9.73015010e-01 3.05870976e-02 -5.50967097e-01] [-2.91340977e-01 5.97317278e-01 1.12657559e+00 ... 8.50027561e-01 4.02740717e-01 -3.80257934e-01]] [[-2.73065120e-01 1.86536357e-01 1.30275771e-01 ... -1.98892206e-02 2.62304276e-01 -2.10845266e-02] [-1.18392669e-01 4.61493462e-01 -1.58034146e-01 ... 3.61991435e-01 1.27553359e-01 1.53765023e-01] [ 8.87847006e-01 4.08451915e-01 -8.74481261e-01 ... 2.58166015e-01 2.23721579e-01 2.95002431e-01] ... [ 4.10784297e-02 -6.09821320e-01 1.61445633e-01 ... 1.98177382e-01 3.03736001e-01 2.97542095e-01] [ 2.20606446e-01 -3.03187340e-01 1.84386685e-01 ... 1.04367733e-02 -1.45814523e-01 2.32334491e-02] [ 8.19751620e-02 -5.47670960e-01 4.35152680e-01 ... -5.00366330e-01 -2.44607449e-01 -2.64380217e-01]] [[ 5.30156791e-01 -2.78061479e-01 2.47118488e-01 ... -5.31821661e-02 6.64240837e-01 9.45914984e-02] [ 4.23551559e-01 -1.85265645e-01 1.51214674e-01 ... -2.07516775e-01 1.94586828e-01 -1.96056142e-01] [-1.31988615e-01 -3.55670691e-01 -1.89182818e-01 ... 2.41107140e-02 -1.96732745e-01 -2.50476804e-02] ... [-1.00360429e+00 3.42843622e-01 -1.30999774e-01 ... -3.87235321e-02 -5.35257645e-02 -1.31717131e-01] [-4.48529720e-01 -1.92198396e-01 6.64635122e-01 ... 3.16394776e-01 -3.82580549e-01 4.79707092e-01] [ 4.95122522e-01 -2.19190553e-01 7.33138621e-01 ... 1.88092306e-01 -2.40867138e-01 9.01731253e-01]] [[-1.47889152e-01 -2.35101640e-01 3.48196626e-01 ... 4.35182720e-01 1.56845346e-01 -9.94945109e-01] [-3.75637859e-01 3.64077091e-01 -4.80307341e-01 ... 5.66298068e-01 2.68740535e-01 -3.55556756e-01] [-4.39950377e-01 4.70881701e-01 -4.56476003e-01 ... -2.19043687e-01 3.74334127e-01 1.10632867e-01] ... [-5.35821021e-01 1.73907325e-01 -3.62818360e-01 ... -1.62180036e-01 -4.85577464e-01 6.53574288e-01] [-1.24120146e-01 4.57192272e-01 -4.88426536e-01 ... -5.53160720e-02 -3.03310066e-01 4.13954616e-01] [-3.21451873e-01 -3.47279310e-01 1.16827972e-01 ... 3.42672825e-01 1.43406570e-01 2.05696642e-01]] [[ 1.60866484e-01 1.60756484e-01 -1.71050176e-01 ... 3.32914919e-01 4.21682566e-01 -1.19379163e-03] [ 2.53875852e-01 -1.12458885e-01 -3.09295982e-01 ... 6.84112310e-03 1.74737573e-01 3.40871096e-01] [ 3.40851575e-01 -9.27746668e-02 -2.47454762e-01 ... -1.80294320e-01 -4.55386996e-01 1.42967016e-01] ... [ 6.95854962e-01 -3.58032554e-01 -2.15417817e-01 ... -1.69665173e-01 7.95981944e-01 1.01266302e-01] [ 6.47928298e-01 -4.36679840e-01 4.37007219e-01 ... -5.98589599e-01 8.41371536e-01 4.82648253e-01] [ 7.31884181e-01 -8.94622028e-01 4.54034090e-01 ... -3.58027428e-01 4.99316072e-03 8.46601948e-02]]] [[[ 7.29555070e-01 7.02457204e-02 4.29949313e-01 ... 4.53769058e-01 -5.46309054e-01 -7.71457627e-02] [ 8.69672775e-01 -1.14341581e-03 -5.55876791e-01 ... 6.52307451e-01 -1.15004569e-01 -1.15641572e-01] [ 8.61368716e-01 4.10182327e-01 -5.05727112e-01 ... 6.59554422e-01 -3.06710392e-01 -2.13381127e-01] ... [ 5.04061401e-01 1.27867207e-01 3.85390967e-01 ... 4.10526901e-01 3.19257885e-01 8.33953857e-01] [ 4.65276390e-01 -2.49260947e-01 4.12137121e-01 ... -3.20703149e-01 1.77729785e-01 1.76735237e-01] [-8.04886073e-02 2.21949518e-02 -4.24576372e-01 ... -4.82144207e-01 4.49792027e-01 -2.98693597e-01]] [[-3.91807348e-01 -4.97107029e-01 3.35075885e-01 ... -5.12454569e-01 -5.19663095e-01 -3.03285569e-01] [-8.08492005e-01 -5.00446372e-02 3.14716697e-01 ... 2.26093993e-01 -8.39361846e-01 -2.73979604e-02] [-1.15327179e+00 -1.69935092e-01 2.66725510e-01 ... 7.82722712e-01 -3.96229535e-01 6.39649704e-02] ... [-3.70154053e-01 -1.58030882e-01 2.54186004e-01 ... -1.32282436e-01 -1.99406277e-02 2.91730046e-01] [-3.64507996e-02 1.15212895e-01 1.64600536e-01 ... 1.70400903e-01 1.77155077e-01 5.09208679e-01] [-5.53375483e-03 2.52418965e-01 -3.57648164e-01 ... 4.73068267e-01 8.68017495e-01 6.99250162e-01]] [[-6.17649019e-01 -1.47812083e-01 3.06803018e-01 ... 9.58465934e-02 8.29003334e-01 5.76762676e-01] [-8.76275599e-02 -1.53943732e-01 7.67512321e-02 ... 3.38569313e-01 8.69767845e-01 -1.03661567e-01] [ 6.03643060e-02 -4.90270466e-01 2.94218838e-01 ... 3.55273247e-01 2.48999834e-01 -1.38790250e-01] ... [-5.92039108e-01 6.47775173e-01 6.07295573e-01 ... -4.74017709e-01 -1.64734066e-01 -3.47269922e-01] [-8.89054000e-01 7.33175993e-01 3.80725861e-01 ... -7.16334164e-01 -5.52049160e-01 1.79588795e-01] [-5.17093897e-01 3.10342520e-01 -3.50795299e-01 ... -4.61021513e-01 -4.14445966e-01 2.83836633e-01]] [[ 8.73614773e-02 2.73423165e-01 3.18249196e-01 ... -1.78441241e-01 -9.96701494e-02 -7.97720477e-02] [-2.00758874e-02 -6.37589097e-01 -1.39279261e-01 ... -2.95980573e-02 -3.22441086e-02 -9.04914141e-02] [-5.55584490e-01 -7.33434916e-01 -7.44996443e-02 ... 2.68712137e-02 7.22269595e-01 -4.72394042e-02] ... [-5.95547676e-01 -6.57265365e-01 -6.12592936e-01 ... -4.35012907e-01 -9.11934435e-01 1.58426091e-01] [-4.25115079e-01 8.41823220e-02 -5.06480522e-02 ... -5.42995214e-01 -3.64031941e-01 3.15438211e-02] [ 3.44878465e-01 1.42365918e-01 4.58794944e-02 ... 4.66796190e-01 1.23659573e-01 2.95071363e-01]] [[-1.32839426e-01 1.33971259e-01 -3.57313454e-02 ... 7.11364746e-01 -3.75245571e-01 2.18143031e-01] [-3.81655186e-01 2.97915459e-01 6.69053316e-01 ... -1.15252651e-01 -2.30581999e-01 1.68391857e-02] [ 6.02633543e-02 -7.08321705e-02 3.60421270e-01 ... -4.82764810e-01 5.30903876e-01 -4.00018424e-01] ... [ 5.83788931e-01 -9.64437246e-01 3.36336255e-01 ... 2.23538771e-01 9.99694169e-02 -2.27931559e-01] [ 2.69346327e-01 -2.48714849e-01 1.04279526e-01 ... -1.08167566e-01 4.19418365e-01 -2.81707644e-02] [-1.16355121e-01 -6.93284273e-02 -1.60137892e-01 ... 4.86976296e-01 2.55340546e-01 3.71500283e-01]]] [[[-1.87793076e-01 -5.40196359e-01 4.59110051e-01 ... 1.48468524e-01 6.34849072e-03 2.74960458e-01] [-8.63585413e-01 -5.58809280e-01 -2.73897380e-01 ... -3.80038470e-01 -9.12493467e-02 8.72920975e-02] [-4.13367420e-01 2.82692820e-01 -5.88270843e-01 ... -6.68566942e-01 5.45752756e-02 1.22880548e-01] ... [-4.54254061e-01 -7.95815945e-01 -1.61133096e-01 ... -1.34116858e-01 -1.03164291e+00 1.14225835e-01] [-8.08449268e-01 -3.28940749e-01 1.34828344e-01 ... -7.57185698e-01 -8.64350796e-01 9.04662311e-02] [-1.02203524e+00 7.14230016e-02 -3.00747808e-02 ... -5.89228213e-01 -1.56956539e-01 3.61440569e-01]] [[ 7.83190072e-01 -2.17976585e-01 -7.10089028e-01 ... -2.36921515e-02 4.28326458e-01 2.67486811e-01] [ 7.95921147e-01 3.38574857e-01 -3.60113978e-01 ... 1.73372373e-01 4.01658773e-01 4.33835499e-02] [ 2.78097719e-01 1.25753015e-01 2.89543778e-01 ... -2.59795785e-01 3.65957588e-01 2.04464510e-01] ... [ 3.74840021e-01 2.37955615e-01 7.67576635e-01 ... -5.30522875e-02 -1.56723857e-01 -1.97548166e-01] [ 1.29612952e-01 -2.02011541e-01 2.24836409e-01 ... -5.34732878e-01 -2.86495239e-02 2.30930507e-01] [ 2.43535385e-01 -1.03871830e-01 2.15156838e-01 ... 2.37034902e-01 2.05758333e-01 4.96210337e-01]] [[ 2.61393458e-01 1.80722084e-02 4.06414956e-01 ... -8.10149610e-01 7.56324470e-01 3.17761689e-01] [ 4.19906884e-01 -3.10587436e-01 6.33117378e-01 ... -6.68041527e-01 9.70963240e-02 -6.37575611e-02] [ 3.58865589e-01 -7.25559294e-01 6.73220634e-01 ... -3.20019245e-01 -2.64212489e-01 -7.18225241e-01] ... [ 5.41213632e-01 9.04366493e-01 5.49587548e-01 ... 2.74469447e-03 3.16810042e-01 -8.22550356e-01] [ 6.78874671e-01 1.00505066e+00 2.71971017e-01 ... -9.36526880e-02 4.73872274e-01 -4.02014345e-01] [ 9.92893517e-01 4.31693822e-01 3.81719023e-01 ... 2.04090834e-01 -2.81341430e-02 1.05370402e-01]] [[ 2.24549174e-02 4.57630247e-01 -5.55137455e-01 ... -4.01686698e-01 -3.51252198e-01 4.89508390e-01] [-2.88185745e-01 -1.48815945e-01 -7.53409803e-01 ... 1.13929667e-01 -3.84736627e-01 3.81535560e-01] [ 2.61106014e-01 -7.79845491e-02 -1.21245749e-01 ... 9.52283740e-02 -7.93815136e-01 -2.88390070e-01] ... [ 1.83884993e-01 4.09916371e-01 -3.87537628e-01 ... -1.64636329e-01 3.14110130e-01 -8.35497212e-03] [ 2.35101864e-01 3.88137221e-01 -7.62315169e-02 ... 1.30859241e-01 6.33566558e-01 -1.08079113e-01] [ 2.74388731e-01 -7.42375776e-02 -4.48141187e-01 ... 2.08682463e-01 -2.80775372e-02 -4.55847830e-01]] [[ 5.92820831e-02 4.93762255e-01 -4.49296713e-01 ... -4.11325693e-01 -3.34723592e-02 -1.27502099e-01] [ 3.01810473e-01 5.47404945e-01 -7.18328297e-01 ... -6.37008905e-01 2.29825795e-01 -6.54092968e-01] [ 5.55083215e-01 7.08822787e-01 -4.71414238e-01 ... -1.67595539e-02 6.71992958e-01 -1.29244840e+00] ... [-2.07653344e-01 1.99148178e-01 -3.49598020e-01 ... -2.54478216e-01 -4.60079342e-01 7.14202821e-01] [-2.30670199e-01 -2.66324073e-01 -3.69781524e-01 ... -3.68714660e-01 -1.77128196e-01 6.72358215e-01] [ 9.28272977e-02 -1.24744475e-01 -3.35562199e-01 ... -6.08727098e-01 2.73028046e-01 4.52965945e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5752.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[1, 1, 1]]() %7 : int[] = prim::Constant[value=[3, 3, 3]]() %8 : Tensor = aten::avg_pool3d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%8) fw_re: [[[[[ 3.09589151e-02 9.18379948e-02 -5.32431044e-02 ... 4.30814177e-01 6.38051406e-02 -3.18004787e-01] [ 1.41137108e-01 1.36630358e-02 -1.49442226e-01 ... 5.45937479e-01 2.02319711e-01 -2.04457194e-01] [ 1.75667495e-01 4.58715037e-02 3.22476355e-03 ... 5.36222517e-01 3.31536084e-01 1.23059213e-01] ... [ 7.61411265e-02 -1.21351972e-03 -3.92864719e-02 ... -2.66150832e-02 -1.14172541e-01 -1.88740224e-01] [ 1.48197366e-02 8.08522180e-02 -2.99827605e-02 ... 2.79165208e-01 7.33188838e-02 -9.38662440e-02] [ 4.51156162e-02 1.19403355e-01 -9.39647332e-02 ... 4.63248789e-01 1.35257617e-01 -2.70736087e-02]] [[ 1.80120081e-01 1.04551259e-02 3.81315872e-02 ... 3.09398919e-01 2.15745550e-02 -2.02373803e-01] [ 3.46752703e-01 2.47197330e-01 6.35165051e-02 ... 4.70236808e-01 2.51527607e-01 -6.92536682e-02] [ 2.00936571e-01 3.08510810e-01 1.20139934e-01 ... 4.63717192e-01 4.81693774e-01 2.24722013e-01] ... [ 1.55842289e-01 6.68465123e-02 2.37005621e-01 ... -1.95102409e-01 -5.61125018e-02 -9.18853432e-02] [-4.28664498e-02 1.34432361e-01 2.63784260e-01 ... 5.06215096e-02 8.73373970e-02 -6.84237853e-02] [ 2.58024968e-02 2.92644143e-01 2.19199374e-01 ... 2.79697061e-01 1.65175349e-01 -2.70521659e-02]] [[ 1.66802794e-01 -5.05488701e-02 -1.57318205e-01 ... 2.75968820e-01 1.91138864e-01 -1.89052094e-02] [ 3.05588394e-01 2.40715057e-01 -1.25561669e-01 ... 4.65029389e-01 3.53984118e-01 5.28813824e-02] [ 1.36843666e-01 2.88100302e-01 -5.44536747e-02 ... 3.95943582e-01 4.10535425e-01 2.62339622e-01] ... [-1.64276466e-01 -6.14599371e-03 1.06755406e-01 ... -1.57902882e-01 1.32972747e-01 -1.18323043e-01] [-3.18305969e-01 -1.33707747e-01 -2.76631210e-02 ... -3.66097242e-02 2.07882628e-01 4.22683097e-02] [-1.80030644e-01 5.85791990e-02 8.68183523e-02 ... 7.33207390e-02 1.45840243e-01 -5.73222339e-02]] ... [[ 1.55104212e-02 -2.35272825e-01 -3.52219194e-01 ... -6.26488775e-02 2.73574561e-01 2.45255977e-02] [-5.58326626e-03 -2.06174359e-01 -1.96776018e-01 ... -1.51189864e-01 3.22552443e-01 1.66695863e-01] [ 5.42455986e-02 -7.52387196e-02 -1.70815170e-01 ... -1.01358324e-01 2.61696517e-01 2.77067900e-01] ... [ 4.76407707e-02 4.69213948e-02 4.79162857e-02 ... -2.92457808e-02 -3.23469549e-01 -2.34700009e-01] [ 8.02053586e-02 1.17578618e-01 8.51722434e-02 ... -2.67411228e-02 -1.14429951e-01 1.20524138e-01] [ 4.34338376e-02 3.02696936e-02 5.35093881e-02 ... 3.49511765e-02 -5.76036312e-02 1.17259890e-01]] [[-2.76878148e-01 -4.60155368e-01 -2.75205970e-01 ... -4.86668758e-02 8.42345599e-03 -1.49789274e-01] [-4.07296062e-01 -6.19902551e-01 -3.44159484e-01 ... -1.78785011e-01 -2.28392333e-02 -3.84675227e-02] [-1.72219336e-01 -4.32174623e-01 -4.59425241e-01 ... 1.13849621e-02 1.23835050e-01 1.43407047e-01] ... [ 9.56696048e-02 3.33665535e-02 -1.21012196e-01 ... -7.91774318e-02 -3.23553860e-01 -2.62802154e-01] [ 8.96922275e-02 -1.24534853e-02 -1.24564953e-01 ... -1.83047131e-01 -2.35549152e-01 -4.28376682e-02] [ 7.50467107e-02 -4.80929688e-02 -5.44409044e-02 ... -2.52878368e-01 -2.48898670e-01 -9.22248811e-02]] [[-2.04934195e-01 -3.43269914e-01 -4.36560839e-01 ... -1.42218173e-01 -1.81593634e-02 -7.87435994e-02] [-3.24690700e-01 -5.44948876e-01 -4.12667811e-01 ... -9.87801701e-02 -3.12318373e-02 1.86172239e-02] [-1.09914832e-01 -4.19885188e-01 -4.97535527e-01 ... -7.83425785e-05 1.38591439e-01 1.42438486e-01] ... [ 1.21396072e-01 -7.11979493e-02 1.31844968e-01 ... -1.94127828e-01 -1.92465603e-01 -1.81849897e-01] [ 3.03572297e-01 -3.41747925e-02 7.79148489e-02 ... -2.11944833e-01 -1.08692378e-01 -2.72571780e-02] [-6.19594082e-02 -2.19580635e-01 -1.27279058e-01 ... -1.18316777e-01 3.97601351e-02 -9.42491144e-02]]] [[[-4.17020768e-02 2.85440721e-02 -6.02814481e-02 ... -9.10165831e-02 -2.21717749e-02 9.74221826e-02] [-1.75898924e-01 -5.02513014e-02 -2.56121963e-01 ... -4.39231992e-01 -2.91367471e-01 -2.29343474e-01] [-1.35534301e-01 -1.74179450e-02 -1.68418899e-01 ... -3.57322693e-01 -2.10705727e-01 -1.66287459e-02] ... [-2.61638686e-02 -1.86219588e-01 -2.04971567e-01 ... -2.94564534e-02 -1.77420497e-01 -1.22699983e-01] [ 2.75025249e-01 -6.50243834e-02 -1.77052945e-01 ... 6.56235516e-02 -3.12491745e-01 -1.30817473e-01] [ 3.20216012e-03 -1.49444237e-01 -9.65116024e-02 ... 1.99796870e-01 -5.65981455e-02 -2.57041752e-01]] [[-3.74747515e-01 -2.08913699e-01 -8.02817270e-02 ... -1.98818102e-01 -1.95413396e-01 -9.41707268e-02] [-3.48825246e-01 -1.93254262e-01 -3.04519624e-01 ... -5.13410032e-01 -4.67266858e-01 -3.53696227e-01] [-2.99185067e-01 -3.63480151e-02 -1.77274644e-01 ... -4.80651528e-01 -3.62174004e-01 -7.47573748e-02] ... [-1.57475248e-01 -1.97848707e-01 -1.50414050e-01 ... -1.03258371e-01 -1.47089347e-01 3.86776403e-02] [ 9.72380266e-02 1.44930795e-01 -7.26649910e-02 ... 6.71201423e-02 -1.82995632e-01 4.55823354e-02] [-8.08965042e-02 -4.15385887e-02 -6.48915693e-02 ... 1.15228891e-01 -5.74700423e-02 -1.41527668e-01]] [[-4.54192340e-01 -3.92034739e-01 -1.59066543e-01 ... -2.57974297e-01 -4.27909315e-01 -1.94664195e-01] [-1.47510976e-01 -1.35700479e-01 -1.59329787e-01 ... -3.49729568e-01 -3.27876210e-01 -1.32447898e-01] [-1.66741654e-01 1.76983187e-03 -1.39182061e-01 ... -3.16798240e-01 -3.12595785e-01 4.03053574e-02] ... [-2.69874841e-01 -2.22813070e-01 -1.43197551e-01 ... -6.78232536e-02 -3.44202250e-01 -1.90432385e-01] [-7.49468803e-02 2.37059928e-02 -2.08354682e-01 ... 1.28729343e-02 -2.35847771e-01 1.73662640e-02] [-1.24482565e-01 -2.20540930e-02 -5.48743159e-02 ... -5.60416142e-04 -6.07789047e-02 -1.60852253e-01]] ... [[ 3.97003582e-03 -2.68378854e-02 -1.31250486e-01 ... 6.93490803e-02 5.37346080e-02 -1.11861914e-01] [ 3.86105329e-02 -4.50947545e-02 -3.28126281e-01 ... 4.38235439e-02 -3.46045196e-02 -4.83806543e-02] [-8.26635063e-02 6.29291162e-02 -4.00424339e-02 ... -1.17276177e-01 -3.29744935e-01 -2.71321863e-01] ... [ 5.24930954e-02 1.01636268e-01 9.26029310e-02 ... 1.98704600e-01 2.58853525e-01 -9.17751691e-04] [-7.46096522e-02 -1.55933291e-01 -1.22078434e-01 ... 3.12974334e-01 4.09751505e-01 1.10159673e-01] [-3.26555111e-02 -1.93437785e-01 -8.05602893e-02 ... 5.82796037e-02 1.56645134e-01 1.69370085e-01]] [[-2.23418325e-01 -7.51262605e-02 -4.13132682e-02 ... -1.32634476e-01 -1.90257043e-01 -3.65796834e-01] [-8.53590965e-02 -1.30712986e-01 -2.27230266e-01 ... -4.21018451e-02 -7.75713101e-02 -2.36132935e-01] [-1.26473352e-01 -3.52294780e-02 -5.78981787e-02 ... -3.41190100e-02 -2.99095035e-01 -3.40205908e-01] ... [ 1.02557614e-01 1.82756841e-01 2.14207813e-01 ... 2.31499985e-01 2.15966865e-01 1.55713066e-01] [ 6.14729896e-02 5.97892776e-02 1.41006052e-01 ... 3.71703118e-01 1.77435845e-01 3.44787701e-03] [-6.14223210e-03 -1.87686786e-01 -1.35046229e-01 ... 1.49218351e-01 -3.29493359e-02 3.85397021e-03]] [[-1.48547083e-01 -1.30324781e-01 9.94076766e-03 ... -3.51241350e-01 -3.73151183e-01 -4.16174442e-01] [-1.28352597e-01 -2.38170162e-01 -1.35713443e-01 ... -1.67372420e-01 -1.45229951e-01 -1.92767039e-01] [-2.38683954e-01 -2.08460733e-01 -2.44774818e-01 ... -1.52326480e-01 -2.81957418e-01 -3.44480187e-01] ... [-4.85554971e-02 6.67506680e-02 5.05870283e-02 ... 2.00184569e-01 1.73683658e-01 2.30708063e-01] [-4.66368236e-02 7.22414926e-02 2.17812229e-02 ... 2.45708734e-01 1.38695976e-02 -4.69963737e-02] [ 1.15726650e-01 2.38307454e-02 -1.54940873e-01 ... 1.15958899e-01 -1.47738278e-01 -1.81327462e-01]]] [[[-1.13153078e-01 1.54510498e-01 3.99646759e-02 ... 2.78089583e-01 2.77925968e-01 3.33338797e-01] [ 1.46432474e-01 2.59760827e-01 1.25250489e-01 ... 1.87128738e-01 2.22069070e-01 4.67198640e-01] [ 3.35327834e-02 1.78065911e-01 1.94274988e-02 ... -1.22995466e-01 1.00055888e-01 4.36698765e-01] ... [-7.85140991e-02 1.26977395e-02 -1.07586913e-01 ... 2.79399287e-02 -8.36305246e-02 1.82272211e-01] [ 3.26908231e-01 3.37901264e-01 3.80707756e-02 ... -1.56888366e-01 -1.65189773e-01 2.29173183e-01] [ 3.47301334e-01 1.81549236e-01 -1.19655959e-01 ... 9.95211396e-03 1.84839871e-02 1.83858693e-01]] [[-8.80141705e-02 1.33221835e-01 7.62521625e-02 ... 1.65392399e-01 8.37890580e-02 2.80074954e-01] [ 1.61254376e-01 2.96701878e-01 1.25823393e-01 ... 1.27288178e-01 1.93585396e-01 4.21271950e-01] [-4.90662344e-02 1.40792400e-01 -6.07263818e-02 ... -1.25414252e-01 2.66072154e-01 3.98887783e-01] ... [-1.62604198e-01 -1.48044843e-02 -3.47472787e-01 ... -1.58810347e-01 -1.48850977e-01 1.82312846e-01] [ 1.77263077e-02 6.72334507e-02 -3.05268794e-01 ... -1.64959982e-01 -3.82860929e-01 -6.63922029e-03] [ 1.52470872e-01 -6.81099817e-02 -4.09562856e-01 ... 2.86644250e-02 -2.02678144e-01 -6.33158907e-02]] [[-2.08298966e-01 6.50324300e-02 7.55713657e-02 ... 4.35850844e-02 -1.39573723e-01 2.03452483e-01] [-1.03613185e-02 8.12991634e-02 4.25464697e-02 ... 3.00443973e-02 -7.15076039e-03 3.32138807e-01] [-7.38871982e-03 1.50153667e-01 8.73068199e-02 ... 3.84165067e-03 1.82408899e-01 3.04645360e-01] ... [-4.05556679e-01 -8.29155445e-02 -3.82378846e-01 ... -3.69180113e-01 -1.72064602e-01 -2.94747744e-02] [-3.33076209e-01 -8.16573277e-02 -5.05155802e-01 ... -6.22847602e-02 -3.38297039e-01 -1.16082065e-01] [-4.92089689e-02 2.06993129e-02 -3.11898023e-01 ... 2.72568911e-01 -7.80419260e-02 -9.08309408e-03]] ... [[-1.70781165e-01 -1.98588729e-01 2.31457781e-02 ... 1.17504448e-01 3.50977778e-01 3.79016042e-01] [-2.24378839e-01 -5.39961532e-02 1.74192414e-01 ... -1.03831254e-01 1.58692580e-02 -6.10908046e-02] [ 7.22303987e-02 6.48928657e-02 7.35310987e-02 ... -3.03336740e-01 -2.14845687e-01 -1.77980423e-01] ... [ 2.18937486e-01 3.53834301e-01 3.20950091e-01 ... -5.59759401e-02 1.19004525e-01 -1.25195300e-02] [ 3.28909129e-01 4.49543059e-01 9.45366547e-02 ... -7.64273629e-02 1.92014500e-01 -1.30021110e-01] [ 5.31513058e-02 2.39971697e-01 2.11330615e-02 ... 7.91113228e-02 3.93632114e-01 1.11044265e-01]] [[-2.02299118e-01 -1.75878018e-01 9.02321041e-02 ... 2.60111034e-01 3.75195146e-01 3.86213332e-01] [-1.64366856e-01 -1.42964542e-01 1.29506141e-01 ... 6.71903938e-02 1.98082700e-01 1.71271145e-01] [-1.23072993e-02 -2.29703397e-01 -1.72319040e-01 ... -2.65787877e-02 -7.85642713e-02 3.31636053e-03] ... [ 9.84459221e-02 -7.94093162e-02 -2.90865060e-02 ... 1.77107453e-01 2.50800580e-01 1.38394609e-01] [-3.98417376e-02 1.26417931e-02 -1.61380947e-01 ... -9.51025933e-02 3.69649082e-02 -1.21278362e-02] [-3.24398071e-01 -1.30588919e-01 -1.36927530e-01 ... 1.86157487e-02 1.36949092e-01 9.87994075e-02]] [[-2.58537799e-01 -3.66465628e-01 1.01331204e-01 ... 8.71856213e-02 -3.31174210e-02 7.47049674e-02] [-1.57648385e-01 -2.43538499e-01 1.86367318e-01 ... 5.75875789e-02 1.04180142e-01 -4.59729210e-02] [-2.11019009e-01 -4.33863312e-01 -3.25275004e-01 ... -6.43832684e-02 -2.12734163e-01 -2.80855566e-01] ... [ 1.91181913e-01 1.69664204e-01 2.32855439e-01 ... 1.76316559e-01 2.03670040e-01 1.46791950e-01] [-6.32038713e-02 9.47502181e-02 1.12706758e-01 ... -4.48960476e-02 -4.44070324e-02 5.32058477e-02] [-2.25720942e-01 -3.19699943e-02 5.64147495e-02 ... 3.02536450e-02 -2.92454585e-02 1.13243526e-02]]]]]; ov_res: [[[[[ 3.09589151e-02 9.18379948e-02 -5.32431044e-02 ... 4.30814177e-01 6.38051406e-02 -3.18004787e-01] [ 1.41137108e-01 1.36630358e-02 -1.49442226e-01 ... 5.45937479e-01 2.02319711e-01 -2.04457194e-01] [ 1.75667495e-01 4.58715037e-02 3.22476355e-03 ... 5.36222517e-01 3.31536084e-01 1.23059213e-01] ... [ 7.61411265e-02 -1.21351972e-03 -3.92864719e-02 ... -2.66150832e-02 -1.14172541e-01 -1.88740224e-01] [ 1.48197366e-02 8.08522180e-02 -2.99827605e-02 ... 2.79165208e-01 7.33188838e-02 -9.38662440e-02] [ 4.51156162e-02 1.19403355e-01 -9.39647332e-02 ... 4.63248789e-01 1.35257617e-01 -2.70736087e-02]] [[ 1.80120081e-01 1.04551259e-02 3.81315872e-02 ... 3.09398919e-01 2.15745550e-02 -2.02373803e-01] [ 3.46752703e-01 2.47197330e-01 6.35165051e-02 ... 4.70236808e-01 2.51527607e-01 -6.92536682e-02] [ 2.00936571e-01 3.08510810e-01 1.20139934e-01 ... 4.63717192e-01 4.81693774e-01 2.24722013e-01] ... [ 1.55842289e-01 6.68465123e-02 2.37005621e-01 ... -1.95102409e-01 -5.61125018e-02 -9.18853432e-02] [-4.28664498e-02 1.34432361e-01 2.63784260e-01 ... 5.06215096e-02 8.73373970e-02 -6.84237853e-02] [ 2.58024968e-02 2.92644143e-01 2.19199374e-01 ... 2.79697061e-01 1.65175349e-01 -2.70521659e-02]] [[ 1.66802794e-01 -5.05488701e-02 -1.57318205e-01 ... 2.75968820e-01 1.91138864e-01 -1.89052094e-02] [ 3.05588394e-01 2.40715057e-01 -1.25561669e-01 ... 4.65029389e-01 3.53984118e-01 5.28813824e-02] [ 1.36843666e-01 2.88100302e-01 -5.44536747e-02 ... 3.95943582e-01 4.10535425e-01 2.62339622e-01] ... [-1.64276466e-01 -6.14599371e-03 1.06755406e-01 ... -1.57902882e-01 1.32972747e-01 -1.18323043e-01] [-3.18305969e-01 -1.33707747e-01 -2.76631210e-02 ... -3.66097242e-02 2.07882628e-01 4.22683097e-02] [-1.80030644e-01 5.85791990e-02 8.68183523e-02 ... 7.33207390e-02 1.45840243e-01 -5.73222339e-02]] ... [[ 1.55104212e-02 -2.35272825e-01 -3.52219194e-01 ... -6.26488775e-02 2.73574561e-01 2.45255977e-02] [-5.58326626e-03 -2.06174359e-01 -1.96776018e-01 ... -1.51189864e-01 3.22552443e-01 1.66695863e-01] [ 5.42455986e-02 -7.52387196e-02 -1.70815170e-01 ... -1.01358324e-01 2.61696517e-01 2.77067900e-01] ... [ 4.76407707e-02 4.69213948e-02 4.79162857e-02 ... -2.92457808e-02 -3.23469549e-01 -2.34700009e-01] [ 8.02053586e-02 1.17578618e-01 8.51722434e-02 ... -2.67411228e-02 -1.14429951e-01 1.20524138e-01] [ 4.34338376e-02 3.02696936e-02 5.35093881e-02 ... 3.49511765e-02 -5.76036312e-02 1.17259890e-01]] [[-2.76878148e-01 -4.60155368e-01 -2.75205970e-01 ... -4.86668758e-02 8.42345599e-03 -1.49789274e-01] [-4.07296062e-01 -6.19902551e-01 -3.44159484e-01 ... -1.78785011e-01 -2.28392333e-02 -3.84675227e-02] [-1.72219336e-01 -4.32174623e-01 -4.59425241e-01 ... 1.13849621e-02 1.23835050e-01 1.43407047e-01] ... [ 9.56696048e-02 3.33665535e-02 -1.21012196e-01 ... -7.91774318e-02 -3.23553860e-01 -2.62802154e-01] [ 8.96922275e-02 -1.24534853e-02 -1.24564953e-01 ... -1.83047131e-01 -2.35549152e-01 -4.28376682e-02] [ 7.50467107e-02 -4.80929688e-02 -5.44409044e-02 ... -2.52878368e-01 -2.48898670e-01 -9.22248811e-02]] [[-2.04934195e-01 -3.43269914e-01 -4.36560839e-01 ... -1.42218173e-01 -1.81593634e-02 -7.87435994e-02] [-3.24690700e-01 -5.44948876e-01 -4.12667811e-01 ... -9.87801701e-02 -3.12318373e-02 1.86172239e-02] [-1.09914832e-01 -4.19885188e-01 -4.97535527e-01 ... -7.83425785e-05 1.38591439e-01 1.42438486e-01] ... [ 1.21396072e-01 -7.11979493e-02 1.31844968e-01 ... -1.94127828e-01 -1.92465603e-01 -1.81849897e-01] [ 3.03572297e-01 -3.41747925e-02 7.79148489e-02 ... -2.11944833e-01 -1.08692378e-01 -2.72571780e-02] [-6.19594082e-02 -2.19580635e-01 -1.27279058e-01 ... -1.18316777e-01 3.97601351e-02 -9.42491144e-02]]] [[[-4.17020768e-02 2.85440721e-02 -6.02814481e-02 ... -9.10165831e-02 -2.21717749e-02 9.74221826e-02] [-1.75898924e-01 -5.02513014e-02 -2.56121963e-01 ... -4.39231992e-01 -2.91367471e-01 -2.29343474e-01] [-1.35534301e-01 -1.74179450e-02 -1.68418899e-01 ... -3.57322693e-01 -2.10705727e-01 -1.66287459e-02] ... [-2.61638686e-02 -1.86219588e-01 -2.04971567e-01 ... -2.94564534e-02 -1.77420497e-01 -1.22699983e-01] [ 2.75025249e-01 -6.50243834e-02 -1.77052945e-01 ... 6.56235516e-02 -3.12491745e-01 -1.30817473e-01] [ 3.20216012e-03 -1.49444237e-01 -9.65116024e-02 ... 1.99796870e-01 -5.65981455e-02 -2.57041752e-01]] [[-3.74747515e-01 -2.08913699e-01 -8.02817270e-02 ... -1.98818102e-01 -1.95413396e-01 -9.41707268e-02] [-3.48825246e-01 -1.93254262e-01 -3.04519624e-01 ... -5.13410032e-01 -4.67266858e-01 -3.53696227e-01] [-2.99185067e-01 -3.63480151e-02 -1.77274644e-01 ... -4.80651528e-01 -3.62174004e-01 -7.47573748e-02] ... [-1.57475248e-01 -1.97848707e-01 -1.50414050e-01 ... -1.03258371e-01 -1.47089347e-01 3.86776403e-02] [ 9.72380266e-02 1.44930795e-01 -7.26649910e-02 ... 6.71201423e-02 -1.82995632e-01 4.55823354e-02] [-8.08965042e-02 -4.15385887e-02 -6.48915693e-02 ... 1.15228891e-01 -5.74700423e-02 -1.41527668e-01]] [[-4.54192340e-01 -3.92034739e-01 -1.59066543e-01 ... -2.57974297e-01 -4.27909315e-01 -1.94664195e-01] [-1.47510976e-01 -1.35700479e-01 -1.59329787e-01 ... -3.49729568e-01 -3.27876210e-01 -1.32447898e-01] [-1.66741654e-01 1.76983187e-03 -1.39182061e-01 ... -3.16798240e-01 -3.12595785e-01 4.03053574e-02] ... [-2.69874841e-01 -2.22813070e-01 -1.43197551e-01 ... -6.78232536e-02 -3.44202250e-01 -1.90432385e-01] [-7.49468803e-02 2.37059928e-02 -2.08354682e-01 ... 1.28729343e-02 -2.35847771e-01 1.73662640e-02] [-1.24482565e-01 -2.20540930e-02 -5.48743159e-02 ... -5.60416142e-04 -6.07789047e-02 -1.60852253e-01]] ... [[ 3.97003582e-03 -2.68378854e-02 -1.31250486e-01 ... 6.93490803e-02 5.37346080e-02 -1.11861914e-01] [ 3.86105329e-02 -4.50947545e-02 -3.28126281e-01 ... 4.38235439e-02 -3.46045196e-02 -4.83806543e-02] [-8.26635063e-02 6.29291162e-02 -4.00424339e-02 ... -1.17276177e-01 -3.29744935e-01 -2.71321863e-01] ... [ 5.24930954e-02 1.01636268e-01 9.26029310e-02 ... 1.98704600e-01 2.58853525e-01 -9.17751691e-04] [-7.46096522e-02 -1.55933291e-01 -1.22078434e-01 ... 3.12974334e-01 4.09751505e-01 1.10159673e-01] [-3.26555111e-02 -1.93437785e-01 -8.05602893e-02 ... 5.82796037e-02 1.56645134e-01 1.69370085e-01]] [[-2.23418325e-01 -7.51262605e-02 -4.13132682e-02 ... -1.32634476e-01 -1.90257043e-01 -3.65796834e-01] [-8.53590965e-02 -1.30712986e-01 -2.27230266e-01 ... -4.21018451e-02 -7.75713101e-02 -2.36132935e-01] [-1.26473352e-01 -3.52294780e-02 -5.78981787e-02 ... -3.41190100e-02 -2.99095035e-01 -3.40205908e-01] ... [ 1.02557614e-01 1.82756841e-01 2.14207813e-01 ... 2.31499985e-01 2.15966865e-01 1.55713066e-01] [ 6.14729896e-02 5.97892776e-02 1.41006052e-01 ... 3.71703118e-01 1.77435845e-01 3.44787701e-03] [-6.14223210e-03 -1.87686786e-01 -1.35046229e-01 ... 1.49218351e-01 -3.29493359e-02 3.85397021e-03]] [[-1.48547083e-01 -1.30324781e-01 9.94076766e-03 ... -3.51241350e-01 -3.73151183e-01 -4.16174442e-01] [-1.28352597e-01 -2.38170162e-01 -1.35713443e-01 ... -1.67372420e-01 -1.45229951e-01 -1.92767039e-01] [-2.38683954e-01 -2.08460733e-01 -2.44774818e-01 ... -1.52326480e-01 -2.81957418e-01 -3.44480187e-01] ... [-4.85554971e-02 6.67506680e-02 5.05870283e-02 ... 2.00184569e-01 1.73683658e-01 2.30708063e-01] [-4.66368236e-02 7.22414926e-02 2.17812229e-02 ... 2.45708734e-01 1.38695976e-02 -4.69963737e-02] [ 1.15726650e-01 2.38307454e-02 -1.54940873e-01 ... 1.15958899e-01 -1.47738278e-01 -1.81327462e-01]]] [[[-1.13153078e-01 1.54510498e-01 3.99646759e-02 ... 2.78089583e-01 2.77925968e-01 3.33338797e-01] [ 1.46432474e-01 2.59760827e-01 1.25250489e-01 ... 1.87128738e-01 2.22069070e-01 4.67198640e-01] [ 3.35327834e-02 1.78065911e-01 1.94274988e-02 ... -1.22995466e-01 1.00055888e-01 4.36698765e-01] ... [-7.85140991e-02 1.26977395e-02 -1.07586913e-01 ... 2.79399287e-02 -8.36305246e-02 1.82272211e-01] [ 3.26908231e-01 3.37901264e-01 3.80707756e-02 ... -1.56888366e-01 -1.65189773e-01 2.29173183e-01] [ 3.47301334e-01 1.81549236e-01 -1.19655959e-01 ... 9.95211396e-03 1.84839871e-02 1.83858693e-01]] [[-8.80141705e-02 1.33221835e-01 7.62521625e-02 ... 1.65392399e-01 8.37890580e-02 2.80074954e-01] [ 1.61254376e-01 2.96701878e-01 1.25823393e-01 ... 1.27288178e-01 1.93585396e-01 4.21271950e-01] [-4.90662344e-02 1.40792400e-01 -6.07263818e-02 ... -1.25414252e-01 2.66072154e-01 3.98887783e-01] ... [-1.62604198e-01 -1.48044843e-02 -3.47472787e-01 ... -1.58810347e-01 -1.48850977e-01 1.82312846e-01] [ 1.77263077e-02 6.72334507e-02 -3.05268794e-01 ... -1.64959982e-01 -3.82860929e-01 -6.63922029e-03] [ 1.52470872e-01 -6.81099817e-02 -4.09562856e-01 ... 2.86644250e-02 -2.02678144e-01 -6.33158907e-02]] [[-2.08298966e-01 6.50324300e-02 7.55713657e-02 ... 4.35850844e-02 -1.39573723e-01 2.03452483e-01] [-1.03613185e-02 8.12991634e-02 4.25464697e-02 ... 3.00443973e-02 -7.15076039e-03 3.32138807e-01] [-7.38871982e-03 1.50153667e-01 8.73068199e-02 ... 3.84165067e-03 1.82408899e-01 3.04645360e-01] ... [-4.05556679e-01 -8.29155445e-02 -3.82378846e-01 ... -3.69180113e-01 -1.72064602e-01 -2.94747744e-02] [-3.33076209e-01 -8.16573277e-02 -5.05155802e-01 ... -6.22847602e-02 -3.38297039e-01 -1.16082065e-01] [-4.92089689e-02 2.06993129e-02 -3.11898023e-01 ... 2.72568911e-01 -7.80419260e-02 -9.08309408e-03]] ... [[-1.70781165e-01 -1.98588729e-01 2.31457781e-02 ... 1.17504448e-01 3.50977778e-01 3.79016042e-01] [-2.24378839e-01 -5.39961532e-02 1.74192414e-01 ... -1.03831254e-01 1.58692580e-02 -6.10908046e-02] [ 7.22303987e-02 6.48928657e-02 7.35310987e-02 ... -3.03336740e-01 -2.14845687e-01 -1.77980423e-01] ... [ 2.18937486e-01 3.53834301e-01 3.20950091e-01 ... -5.59759401e-02 1.19004525e-01 -1.25195300e-02] [ 3.28909129e-01 4.49543059e-01 9.45366547e-02 ... -7.64273629e-02 1.92014500e-01 -1.30021110e-01] [ 5.31513058e-02 2.39971697e-01 2.11330615e-02 ... 7.91113228e-02 3.93632114e-01 1.11044265e-01]] [[-2.02299118e-01 -1.75878018e-01 9.02321041e-02 ... 2.60111034e-01 3.75195146e-01 3.86213332e-01] [-1.64366856e-01 -1.42964542e-01 1.29506141e-01 ... 6.71903938e-02 1.98082700e-01 1.71271145e-01] [-1.23072993e-02 -2.29703397e-01 -1.72319040e-01 ... -2.65787877e-02 -7.85642713e-02 3.31636053e-03] ... [ 9.84459221e-02 -7.94093162e-02 -2.90865060e-02 ... 1.77107453e-01 2.50800580e-01 1.38394609e-01] [-3.98417376e-02 1.26417931e-02 -1.61380947e-01 ... -9.51025933e-02 3.69649082e-02 -1.21278362e-02] [-3.24398071e-01 -1.30588919e-01 -1.36927530e-01 ... 1.86157487e-02 1.36949092e-01 9.87994075e-02]] [[-2.58537799e-01 -3.66465628e-01 1.01331204e-01 ... 8.71856213e-02 -3.31174210e-02 7.47049674e-02] [-1.57648385e-01 -2.43538499e-01 1.86367318e-01 ... 5.75875789e-02 1.04180142e-01 -4.59729210e-02] [-2.11019009e-01 -4.33863312e-01 -3.25275004e-01 ... -6.43832684e-02 -2.12734163e-01 -2.80855566e-01] ... [ 1.91181913e-01 1.69664204e-01 2.32855439e-01 ... 1.76316559e-01 2.03670040e-01 1.46791950e-01] [-6.32038713e-02 9.47502181e-02 1.12706758e-01 ... -4.48960476e-02 -4.44070324e-02 5.32058477e-02] [-2.25720942e-01 -3.19699943e-02 5.64147495e-02 ... 3.02536450e-02 -2.92454585e-02 1.13243526e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5755.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[1, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[-9.18602496e-02 -5.23219034e-02 1.45185187e-01 ... -1.64273046e-02 1.38779029e-01 7.45133832e-02] [-1.97267726e-01 -1.94562599e-01 1.72599196e-01 ... 3.15470509e-02 1.15369573e-01 4.69706915e-02] [-2.04255283e-01 -2.28280947e-01 5.42668141e-02 ... -2.90906355e-02 -2.03089677e-02 -9.32145584e-03] ... [-7.33208060e-02 -1.98413953e-01 -2.40726158e-01 ... 3.63739967e-01 1.54172167e-01 6.50708228e-02] [-7.65945613e-02 -1.16779506e-01 -7.32877702e-02 ... 2.68560767e-01 1.36348337e-01 6.43536448e-02] [ 4.07322198e-02 4.23915572e-02 8.15682560e-02 ... 1.19795337e-01 5.26040755e-02 3.59457210e-02]] [[-4.91445251e-02 7.30397105e-02 2.79677272e-01 ... -3.16248788e-03 1.94213782e-02 1.10655492e-02] [-1.04082100e-01 -3.79271582e-02 2.69905567e-01 ... -2.05155648e-03 7.86255952e-03 -1.45861087e-02] [-2.65544116e-01 -2.34396473e-01 1.79606583e-02 ... -1.17102906e-01 -7.88281560e-02 -5.19630499e-02] ... [-1.87704697e-01 -2.84030586e-01 -2.71446556e-01 ... 4.33797330e-01 4.66320477e-02 -3.43115851e-02] [-1.02982476e-01 -9.11036879e-02 -9.34460536e-02 ... 2.67625928e-01 8.02433677e-03 -6.27796799e-02] [ 8.87321979e-02 1.33424342e-01 9.49601606e-02 ... 4.30799946e-02 -1.09459631e-01 -7.58140907e-02]] [[-1.29921377e-01 1.81426257e-02 2.26000965e-01 ... 5.54106683e-02 3.27850170e-02 -3.77567522e-02] [ 1.20744877e-03 1.71310976e-01 3.66232246e-01 ... 5.33585064e-02 1.64260551e-01 1.53191881e-02] [-1.52553201e-01 9.11167264e-02 1.45705909e-01 ... -5.83627075e-02 1.02800138e-01 7.44136348e-02] ... [-2.41727933e-01 -1.85706735e-01 1.17983734e-02 ... 3.33786041e-01 3.74163082e-03 -1.57527253e-01] [-2.46265829e-01 -4.03301716e-02 1.37605757e-01 ... 6.44340441e-02 -1.03563316e-01 -1.62915930e-01] [-7.83458427e-02 5.36062233e-02 8.02195370e-02 ... -4.62646410e-02 -1.71032488e-01 -1.07097954e-01]] ... [[-8.18375051e-02 -1.22185692e-01 -9.42691639e-02 ... -5.22089861e-02 6.87896684e-02 -4.91334945e-02] [ 2.86766570e-02 -7.97798708e-02 -2.03861594e-01 ... -6.81294128e-03 2.17230599e-02 -9.74701419e-02] [ 2.29915395e-01 1.98004335e-01 1.18502341e-01 ... -8.33939165e-02 -4.52778190e-02 -1.31400287e-01] ... [-1.25766441e-01 -1.52062804e-01 -1.14154354e-01 ... 1.61915615e-01 1.64224789e-01 1.78712100e-01] [-5.73503301e-02 -1.42947748e-01 -1.58151925e-01 ... 2.72998840e-01 2.37455279e-01 2.43223220e-01] [-9.00700986e-02 -2.37696618e-01 -2.78837740e-01 ... 1.59691975e-01 1.02249451e-01 1.39875665e-01]] [[-8.48769676e-03 -4.98282574e-02 -1.41870320e-01 ... -1.53318018e-01 9.36350748e-02 3.36979926e-02] [ 3.53927277e-02 -3.24418359e-02 -2.23908082e-01 ... -6.16307184e-02 1.23205960e-01 7.51870275e-02] [ 1.74668983e-01 1.51833236e-01 -2.35415827e-02 ... -5.94273321e-02 9.70370620e-02 8.12572911e-02] ... [-2.49128059e-01 -2.00671405e-01 -1.25869736e-01 ... 7.50477165e-02 -7.91546255e-02 -2.09888257e-02] [-9.19978097e-02 -1.10126354e-01 -9.42145512e-02 ... 2.12678626e-01 7.34207928e-02 4.89462055e-02] [-1.08613200e-01 -1.10562176e-01 -1.99585408e-02 ... 1.75465673e-01 -9.59480274e-03 -7.72534385e-02]] [[ 4.42214124e-03 -2.77025457e-02 -1.17251158e-01 ... -9.03929770e-02 7.31198564e-02 6.70247748e-02] [ 1.04046660e-02 -3.29440162e-02 -1.59696743e-01 ... -2.13617794e-02 1.19207032e-01 1.03394732e-01] [ 2.41001584e-02 1.91100501e-02 -1.14922747e-01 ... -5.37535697e-02 2.07683165e-02 4.71347719e-02] ... [-7.88026974e-02 -8.94325897e-02 -8.20782185e-02 ... 1.59124270e-01 -3.00919898e-02 -7.79954493e-02] [-9.70163867e-02 -1.47525147e-01 -4.61131372e-02 ... 9.76887345e-02 7.14886785e-02 -2.76872590e-02] [-1.40321836e-01 -1.66328564e-01 -1.77646568e-03 ... 4.29656692e-02 -6.27131388e-02 -1.63764417e-01]]] [[[ 6.77707046e-02 6.28323015e-03 2.91844876e-03 ... 1.06305778e-01 2.44817704e-01 1.84552774e-01] [ 1.24109574e-01 1.02241933e-01 -1.27673503e-02 ... -5.34735993e-03 2.31676266e-01 1.66777477e-01] [ 9.09004584e-02 1.08724222e-01 2.20321887e-03 ... 1.10920466e-01 1.48928449e-01 9.49491784e-02] ... [ 1.87378198e-01 9.31028724e-02 7.58090839e-02 ... 1.05104961e-01 8.23217407e-02 8.98870267e-03] [ 1.04394421e-01 4.35173959e-02 -3.55589874e-02 ... 7.58756921e-02 2.06709221e-01 6.57723099e-02] [-5.10918200e-02 -1.05787188e-01 -1.04787216e-01 ... 1.12816066e-01 2.20745504e-01 1.41158685e-01]] [[ 1.22358508e-01 2.75000501e-02 9.03430656e-02 ... -4.94881673e-03 1.64163366e-01 7.75110871e-02] [ 7.10232630e-02 3.47870886e-02 3.66546176e-02 ... -6.25992864e-02 2.05119535e-01 1.13737330e-01] [ 5.25925793e-02 6.58570975e-02 -3.37176360e-02 ... 5.10749035e-02 9.19256359e-02 2.78583765e-02] ... [ 1.13918163e-01 5.76245077e-02 1.16355773e-02 ... 1.96424127e-01 1.94831640e-01 8.02689418e-02] [ 3.65791619e-02 1.73236299e-02 -1.41476579e-02 ... 1.47970662e-01 2.02504620e-01 -5.22421487e-03] [-3.70402969e-02 -5.18967584e-02 -2.49169040e-02 ... 1.29997656e-01 1.45313904e-01 2.49059163e-02]] [[ 1.23326570e-01 8.18323195e-02 2.30174273e-01 ... -9.97568741e-02 1.03955992e-01 -8.24557021e-02] [ 7.88648799e-02 8.30670744e-02 1.52531967e-01 ... -1.16991691e-01 2.20279247e-01 -3.34750004e-02] [ 1.01485148e-01 1.39676705e-01 1.16452977e-01 ... -3.96644920e-02 1.48410097e-01 -5.86866774e-02] ... [ 2.05646962e-01 2.99950927e-01 3.04807872e-02 ... 3.42233717e-01 2.38029033e-01 2.03046486e-01] [-3.04430947e-02 9.79678705e-03 -1.06578181e-02 ... 1.72454908e-01 1.76002711e-01 6.03529401e-02] [-5.99694513e-02 -8.35193470e-02 -6.00209050e-02 ... 9.12059471e-02 1.05983153e-01 6.72836080e-02]] ... [[-1.00146025e-01 -1.12432703e-01 -1.00305796e-01 ... 3.54137197e-02 -1.34767890e-01 -1.45122319e-01] [ 2.63009518e-02 1.03219703e-01 5.20345457e-02 ... 1.25754429e-02 -5.75417690e-02 -9.74860117e-02] [ 5.03245816e-02 4.17996675e-01 3.13045830e-01 ... -6.49732426e-02 3.41923647e-02 6.70816824e-02] ... [-1.66642025e-01 -3.93618137e-01 -4.82154787e-01 ... 7.65141547e-02 2.61688471e-01 1.45044282e-01] [-1.58008844e-01 -3.40854377e-01 -5.78534842e-01 ... 2.65159845e-01 4.30863112e-01 3.32999349e-01] [-1.09156467e-01 -2.21392930e-01 -2.37090304e-01 ... 7.42749721e-02 1.75802156e-01 1.78824112e-01]] [[-1.14193738e-01 -1.28415957e-01 -1.90266207e-01 ... -1.38892159e-02 -1.28150612e-01 -1.52380452e-01] [ 4.46882506e-04 4.28563394e-02 1.74588840e-02 ... -5.36768585e-02 -7.84155354e-02 -1.42213210e-01] [-1.36269659e-01 3.23819108e-02 1.23674244e-01 ... -8.15155953e-02 -7.95083344e-02 -1.08097449e-01] ... [-2.88314819e-01 -4.56464946e-01 -4.65549827e-01 ... -5.87515021e-03 2.56092042e-01 1.02407075e-01] [-2.15631858e-01 -3.22513342e-01 -3.56572598e-01 ... 2.05699429e-01 3.06126952e-01 1.27367437e-01] [-1.68423101e-01 -2.31743783e-01 -7.05032721e-02 ... 9.86992866e-02 7.57693350e-02 -3.19688655e-02]] [[ 3.48003954e-02 -3.96451738e-04 -1.78755924e-01 ... -1.07356787e-01 -2.05312461e-01 -1.45277336e-01] [ 1.58536896e-01 1.43262416e-01 2.75133625e-02 ... -1.15541585e-01 -1.37473226e-01 -1.31297484e-01] [-5.38723916e-02 -2.12075450e-02 5.36089204e-02 ... 2.80640298e-03 -9.06172916e-02 -1.41250208e-01] ... [-2.39851281e-01 -3.93360823e-01 -3.18426698e-01 ... -7.06505105e-02 4.86103110e-02 -5.53786661e-03] [-1.07920818e-01 -1.48441628e-01 -1.36562884e-01 ... 6.63938150e-02 4.79092188e-02 -4.13317140e-03] [-1.52146116e-01 -1.72787175e-01 -7.03678131e-02 ... 1.35242671e-01 2.17708386e-03 -7.11984485e-02]]] [[[ 3.60013917e-02 9.59541500e-02 6.53633699e-02 ... 1.35928616e-02 1.37445435e-01 1.16662771e-01] [ 1.74066108e-02 1.19829528e-01 3.29958908e-02 ... -3.90367843e-02 1.24369942e-01 1.14199832e-01] [ 8.43735114e-02 2.69770563e-01 2.99937874e-01 ... 1.01362988e-01 3.09907347e-01 2.31490508e-01] ... [-2.13591918e-01 -2.31907383e-01 -5.61565943e-02 ... 2.34053195e-01 1.72918260e-01 1.24481551e-01] [-1.26084641e-01 -1.27923220e-01 -3.66143836e-03 ... 1.44488543e-01 -6.15699776e-02 -2.99049038e-02] [-2.87906285e-02 -5.61126173e-02 5.59865460e-02 ... 7.80602172e-02 -1.26959577e-01 -8.38189423e-02]] [[ 1.83712109e-03 8.60289037e-02 2.69702710e-02 ... -1.82895347e-01 1.55923530e-01 1.98659614e-01] [-2.38504969e-02 1.12615958e-01 -1.23211825e-02 ... -2.08537295e-01 2.22006902e-01 2.26556942e-01] [-7.14704720e-03 1.18458524e-01 1.60116956e-01 ... 4.85275649e-02 5.02528250e-01 3.48434687e-01] ... [-1.97621167e-01 -2.03930467e-01 -1.80394426e-01 ... 8.60846043e-02 -6.16886839e-02 -8.34395811e-02] [-2.16045946e-01 -2.02905372e-01 -4.00896333e-02 ... 5.98019063e-02 -1.69928253e-01 -2.11038008e-01] [-1.63459137e-01 -2.11982086e-01 4.53321300e-02 ... 2.38384884e-02 -2.00662389e-01 -2.12496251e-01]] [[ 2.47383919e-02 1.04343265e-01 1.04767889e-01 ... -2.80741185e-01 4.82490957e-02 1.66840747e-01] [ 4.51138429e-02 1.89978346e-01 2.04203472e-01 ... -4.43253130e-01 3.92664894e-02 1.72741920e-01] [ 1.96752816e-01 2.74073392e-01 4.69334632e-01 ... -1.74419180e-01 1.14851139e-01 1.51338026e-01] ... [-1.47679746e-01 -9.97681022e-02 -9.16424617e-02 ... 1.25681221e-01 -5.88144101e-02 -8.01767632e-02] [-1.12636618e-01 -3.92994955e-02 6.35474846e-02 ... 1.68055311e-01 -5.69146387e-02 -2.00857848e-01] [-1.68357659e-02 3.54853389e-03 3.99983674e-02 ... 1.17814191e-01 -1.18505254e-01 -2.03286856e-01]] ... [[-9.17569473e-02 -1.70167357e-01 -8.92430246e-02 ... -1.55602256e-02 -7.77986944e-02 -1.03086919e-01] [-5.68210185e-02 -2.10066617e-01 -8.34826753e-02 ... -1.00762270e-01 -8.54801610e-02 -1.20891601e-01] [ 1.79170091e-02 -1.80382535e-01 -1.57907933e-01 ... -1.35282010e-01 -1.96701810e-01 -1.65060610e-01] ... [-3.63491744e-01 -4.52692747e-01 -2.10488722e-01 ... 1.31665438e-01 1.56044781e-01 2.00582996e-01] [-2.75865674e-01 -3.77155155e-01 -6.16193675e-02 ... 1.91670880e-01 3.26299161e-01 1.10815860e-01] [-1.64183736e-01 -2.80049235e-01 -2.16087271e-02 ... 1.01951808e-01 3.20354611e-01 1.36563957e-01]] [[-1.16980307e-01 -1.87113553e-01 -1.99249131e-03 ... 3.43571678e-02 -1.49453580e-01 -2.76342988e-01] [-1.25586286e-01 -2.67652303e-01 -7.02857152e-02 ... -8.21615979e-02 -1.96380481e-01 -3.22515637e-01] [-8.68633762e-02 -2.66644120e-01 -1.49977714e-01 ... -1.38091341e-01 -2.57771850e-01 -2.76411176e-01] ... [-3.13194990e-01 -4.53426570e-01 -2.85586804e-01 ... 5.00741638e-02 3.31074029e-01 2.93761492e-01] [-1.13077536e-01 -2.13208646e-01 1.45044550e-02 ... 3.30285393e-02 4.28558677e-01 2.11350858e-01] [ 3.42213064e-02 -6.82925747e-04 1.27032742e-01 ... 2.97729801e-02 3.71576816e-01 1.90729499e-01]] [[-7.71810189e-02 -9.81225967e-02 1.16383024e-01 ... -3.38558815e-02 -3.82110514e-02 -1.40000924e-01] [-1.50054067e-01 -2.16953859e-01 2.94724964e-02 ... -1.02609873e-01 -6.56285882e-02 -1.63421363e-01] [-6.10092618e-02 -1.68456331e-01 -4.98996302e-02 ... -1.76759616e-01 -2.47684941e-01 -2.31769532e-01] ... [-2.27574170e-01 -3.86009216e-01 -2.22523987e-01 ... 1.29742757e-03 2.01361343e-01 1.31653801e-01] [-1.38425514e-01 -2.17621967e-01 4.87056896e-02 ... 4.19767536e-02 2.22129226e-01 6.42938688e-02] [-4.73335274e-02 -4.19286638e-02 1.42728180e-01 ... 7.19530955e-02 2.40556017e-01 1.21744826e-01]]]]]; ov_res: [[[[[-9.18602496e-02 -5.23219034e-02 1.45185187e-01 ... -1.64273046e-02 1.38779029e-01 7.45133832e-02] [-1.97267726e-01 -1.94562599e-01 1.72599196e-01 ... 3.15470509e-02 1.15369573e-01 4.69706915e-02] [-2.04255283e-01 -2.28280947e-01 5.42668141e-02 ... -2.90906355e-02 -2.03089677e-02 -9.32145584e-03] ... [-7.33208060e-02 -1.98413953e-01 -2.40726158e-01 ... 3.63739967e-01 1.54172167e-01 6.50708228e-02] [-7.65945613e-02 -1.16779506e-01 -7.32877702e-02 ... 2.68560767e-01 1.36348337e-01 6.43536448e-02] [ 4.07322198e-02 4.23915572e-02 8.15682560e-02 ... 1.19795337e-01 5.26040755e-02 3.59457210e-02]] [[-4.91445251e-02 7.30397105e-02 2.79677272e-01 ... -3.16248788e-03 1.94213782e-02 1.10655492e-02] [-1.04082100e-01 -3.79271582e-02 2.69905567e-01 ... -2.05155648e-03 7.86255952e-03 -1.45861087e-02] [-2.65544116e-01 -2.34396473e-01 1.79606583e-02 ... -1.17102906e-01 -7.88281560e-02 -5.19630499e-02] ... [-1.87704697e-01 -2.84030586e-01 -2.71446556e-01 ... 4.33797330e-01 4.66320477e-02 -3.43115851e-02] [-1.02982476e-01 -9.11036879e-02 -9.34460536e-02 ... 2.67625928e-01 8.02433677e-03 -6.27796799e-02] [ 8.87321979e-02 1.33424342e-01 9.49601606e-02 ... 4.30799946e-02 -1.09459631e-01 -7.58140907e-02]] [[-1.29921377e-01 1.81426257e-02 2.26000965e-01 ... 5.54106683e-02 3.27850170e-02 -3.77567522e-02] [ 1.20744877e-03 1.71310976e-01 3.66232246e-01 ... 5.33585064e-02 1.64260551e-01 1.53191881e-02] [-1.52553201e-01 9.11167264e-02 1.45705909e-01 ... -5.83627075e-02 1.02800138e-01 7.44136348e-02] ... [-2.41727933e-01 -1.85706735e-01 1.17983734e-02 ... 3.33786041e-01 3.74163082e-03 -1.57527253e-01] [-2.46265829e-01 -4.03301716e-02 1.37605757e-01 ... 6.44340441e-02 -1.03563316e-01 -1.62915930e-01] [-7.83458427e-02 5.36062233e-02 8.02195370e-02 ... -4.62646410e-02 -1.71032488e-01 -1.07097954e-01]] ... [[-8.18375051e-02 -1.22185692e-01 -9.42691639e-02 ... -5.22089861e-02 6.87896684e-02 -4.91334945e-02] [ 2.86766570e-02 -7.97798708e-02 -2.03861594e-01 ... -6.81294128e-03 2.17230599e-02 -9.74701419e-02] [ 2.29915395e-01 1.98004335e-01 1.18502341e-01 ... -8.33939165e-02 -4.52778190e-02 -1.31400287e-01] ... [-1.25766441e-01 -1.52062804e-01 -1.14154354e-01 ... 1.61915615e-01 1.64224789e-01 1.78712100e-01] [-5.73503301e-02 -1.42947748e-01 -1.58151925e-01 ... 2.72998840e-01 2.37455279e-01 2.43223220e-01] [-9.00700986e-02 -2.37696618e-01 -2.78837740e-01 ... 1.59691975e-01 1.02249451e-01 1.39875665e-01]] [[-8.48769676e-03 -4.98282574e-02 -1.41870320e-01 ... -1.53318018e-01 9.36350748e-02 3.36979926e-02] [ 3.53927277e-02 -3.24418359e-02 -2.23908082e-01 ... -6.16307184e-02 1.23205960e-01 7.51870275e-02] [ 1.74668983e-01 1.51833236e-01 -2.35415827e-02 ... -5.94273321e-02 9.70370620e-02 8.12572911e-02] ... [-2.49128059e-01 -2.00671405e-01 -1.25869736e-01 ... 7.50477165e-02 -7.91546255e-02 -2.09888257e-02] [-9.19978097e-02 -1.10126354e-01 -9.42145512e-02 ... 2.12678626e-01 7.34207928e-02 4.89462055e-02] [-1.08613200e-01 -1.10562176e-01 -1.99585408e-02 ... 1.75465673e-01 -9.59480274e-03 -7.72534385e-02]] [[ 4.42214124e-03 -2.77025457e-02 -1.17251158e-01 ... -9.03929770e-02 7.31198564e-02 6.70247748e-02] [ 1.04046660e-02 -3.29440162e-02 -1.59696743e-01 ... -2.13617794e-02 1.19207032e-01 1.03394732e-01] [ 2.41001584e-02 1.91100501e-02 -1.14922747e-01 ... -5.37535697e-02 2.07683165e-02 4.71347719e-02] ... [-7.88026974e-02 -8.94325897e-02 -8.20782185e-02 ... 1.59124270e-01 -3.00919898e-02 -7.79954493e-02] [-9.70163867e-02 -1.47525147e-01 -4.61131372e-02 ... 9.76887345e-02 7.14886785e-02 -2.76872590e-02] [-1.40321836e-01 -1.66328564e-01 -1.77646568e-03 ... 4.29656692e-02 -6.27131388e-02 -1.63764417e-01]]] [[[ 6.77707046e-02 6.28323015e-03 2.91844876e-03 ... 1.06305778e-01 2.44817704e-01 1.84552774e-01] [ 1.24109574e-01 1.02241933e-01 -1.27673503e-02 ... -5.34735993e-03 2.31676266e-01 1.66777477e-01] [ 9.09004584e-02 1.08724222e-01 2.20321887e-03 ... 1.10920466e-01 1.48928449e-01 9.49491784e-02] ... [ 1.87378198e-01 9.31028724e-02 7.58090839e-02 ... 1.05104961e-01 8.23217407e-02 8.98870267e-03] [ 1.04394421e-01 4.35173959e-02 -3.55589874e-02 ... 7.58756921e-02 2.06709221e-01 6.57723099e-02] [-5.10918200e-02 -1.05787188e-01 -1.04787216e-01 ... 1.12816066e-01 2.20745504e-01 1.41158685e-01]] [[ 1.22358508e-01 2.75000501e-02 9.03430656e-02 ... -4.94881673e-03 1.64163366e-01 7.75110871e-02] [ 7.10232630e-02 3.47870886e-02 3.66546176e-02 ... -6.25992864e-02 2.05119535e-01 1.13737330e-01] [ 5.25925793e-02 6.58570975e-02 -3.37176360e-02 ... 5.10749035e-02 9.19256359e-02 2.78583765e-02] ... [ 1.13918163e-01 5.76245077e-02 1.16355773e-02 ... 1.96424127e-01 1.94831640e-01 8.02689418e-02] [ 3.65791619e-02 1.73236299e-02 -1.41476579e-02 ... 1.47970662e-01 2.02504620e-01 -5.22421487e-03] [-3.70402969e-02 -5.18967584e-02 -2.49169040e-02 ... 1.29997656e-01 1.45313904e-01 2.49059163e-02]] [[ 1.23326570e-01 8.18323195e-02 2.30174273e-01 ... -9.97568741e-02 1.03955992e-01 -8.24557021e-02] [ 7.88648799e-02 8.30670744e-02 1.52531967e-01 ... -1.16991691e-01 2.20279247e-01 -3.34750004e-02] [ 1.01485148e-01 1.39676705e-01 1.16452977e-01 ... -3.96644920e-02 1.48410097e-01 -5.86866774e-02] ... [ 2.05646962e-01 2.99950927e-01 3.04807872e-02 ... 3.42233717e-01 2.38029033e-01 2.03046486e-01] [-3.04430947e-02 9.79678705e-03 -1.06578181e-02 ... 1.72454908e-01 1.76002711e-01 6.03529401e-02] [-5.99694513e-02 -8.35193470e-02 -6.00209050e-02 ... 9.12059471e-02 1.05983153e-01 6.72836080e-02]] ... [[-1.00146025e-01 -1.12432703e-01 -1.00305796e-01 ... 3.54137197e-02 -1.34767890e-01 -1.45122319e-01] [ 2.63009518e-02 1.03219703e-01 5.20345457e-02 ... 1.25754429e-02 -5.75417690e-02 -9.74860117e-02] [ 5.03245816e-02 4.17996675e-01 3.13045830e-01 ... -6.49732426e-02 3.41923647e-02 6.70816824e-02] ... [-1.66642025e-01 -3.93618137e-01 -4.82154787e-01 ... 7.65141547e-02 2.61688471e-01 1.45044282e-01] [-1.58008844e-01 -3.40854377e-01 -5.78534842e-01 ... 2.65159845e-01 4.30863112e-01 3.32999349e-01] [-1.09156467e-01 -2.21392930e-01 -2.37090304e-01 ... 7.42749721e-02 1.75802156e-01 1.78824112e-01]] [[-1.14193738e-01 -1.28415957e-01 -1.90266207e-01 ... -1.38892159e-02 -1.28150612e-01 -1.52380452e-01] [ 4.46882506e-04 4.28563394e-02 1.74588840e-02 ... -5.36768585e-02 -7.84155354e-02 -1.42213210e-01] [-1.36269659e-01 3.23819108e-02 1.23674244e-01 ... -8.15155953e-02 -7.95083344e-02 -1.08097449e-01] ... [-2.88314819e-01 -4.56464946e-01 -4.65549827e-01 ... -5.87515021e-03 2.56092042e-01 1.02407075e-01] [-2.15631858e-01 -3.22513342e-01 -3.56572598e-01 ... 2.05699429e-01 3.06126952e-01 1.27367437e-01] [-1.68423101e-01 -2.31743783e-01 -7.05032721e-02 ... 9.86992866e-02 7.57693350e-02 -3.19688655e-02]] [[ 3.48003954e-02 -3.96451738e-04 -1.78755924e-01 ... -1.07356787e-01 -2.05312461e-01 -1.45277336e-01] [ 1.58536896e-01 1.43262416e-01 2.75133625e-02 ... -1.15541585e-01 -1.37473226e-01 -1.31297484e-01] [-5.38723916e-02 -2.12075450e-02 5.36089204e-02 ... 2.80640298e-03 -9.06172916e-02 -1.41250208e-01] ... [-2.39851281e-01 -3.93360823e-01 -3.18426698e-01 ... -7.06505105e-02 4.86103110e-02 -5.53786661e-03] [-1.07920818e-01 -1.48441628e-01 -1.36562884e-01 ... 6.63938150e-02 4.79092188e-02 -4.13317140e-03] [-1.52146116e-01 -1.72787175e-01 -7.03678131e-02 ... 1.35242671e-01 2.17708386e-03 -7.11984485e-02]]] [[[ 3.60013917e-02 9.59541500e-02 6.53633699e-02 ... 1.35928616e-02 1.37445435e-01 1.16662771e-01] [ 1.74066108e-02 1.19829528e-01 3.29958908e-02 ... -3.90367843e-02 1.24369942e-01 1.14199832e-01] [ 8.43735114e-02 2.69770563e-01 2.99937874e-01 ... 1.01362988e-01 3.09907347e-01 2.31490508e-01] ... [-2.13591918e-01 -2.31907383e-01 -5.61565943e-02 ... 2.34053195e-01 1.72918260e-01 1.24481551e-01] [-1.26084641e-01 -1.27923220e-01 -3.66143836e-03 ... 1.44488543e-01 -6.15699776e-02 -2.99049038e-02] [-2.87906285e-02 -5.61126173e-02 5.59865460e-02 ... 7.80602172e-02 -1.26959577e-01 -8.38189423e-02]] [[ 1.83712109e-03 8.60289037e-02 2.69702710e-02 ... -1.82895347e-01 1.55923530e-01 1.98659614e-01] [-2.38504969e-02 1.12615958e-01 -1.23211825e-02 ... -2.08537295e-01 2.22006902e-01 2.26556942e-01] [-7.14704720e-03 1.18458524e-01 1.60116956e-01 ... 4.85275649e-02 5.02528250e-01 3.48434687e-01] ... [-1.97621167e-01 -2.03930467e-01 -1.80394426e-01 ... 8.60846043e-02 -6.16886839e-02 -8.34395811e-02] [-2.16045946e-01 -2.02905372e-01 -4.00896333e-02 ... 5.98019063e-02 -1.69928253e-01 -2.11038008e-01] [-1.63459137e-01 -2.11982086e-01 4.53321300e-02 ... 2.38384884e-02 -2.00662389e-01 -2.12496251e-01]] [[ 2.47383919e-02 1.04343265e-01 1.04767889e-01 ... -2.80741185e-01 4.82490957e-02 1.66840747e-01] [ 4.51138429e-02 1.89978346e-01 2.04203472e-01 ... -4.43253130e-01 3.92664894e-02 1.72741920e-01] [ 1.96752816e-01 2.74073392e-01 4.69334632e-01 ... -1.74419180e-01 1.14851139e-01 1.51338026e-01] ... [-1.47679746e-01 -9.97681022e-02 -9.16424617e-02 ... 1.25681221e-01 -5.88144101e-02 -8.01767632e-02] [-1.12636618e-01 -3.92994955e-02 6.35474846e-02 ... 1.68055311e-01 -5.69146387e-02 -2.00857848e-01] [-1.68357659e-02 3.54853389e-03 3.99983674e-02 ... 1.17814191e-01 -1.18505254e-01 -2.03286856e-01]] ... [[-9.17569473e-02 -1.70167357e-01 -8.92430246e-02 ... -1.55602256e-02 -7.77986944e-02 -1.03086919e-01] [-5.68210185e-02 -2.10066617e-01 -8.34826753e-02 ... -1.00762270e-01 -8.54801610e-02 -1.20891601e-01] [ 1.79170091e-02 -1.80382535e-01 -1.57907933e-01 ... -1.35282010e-01 -1.96701810e-01 -1.65060610e-01] ... [-3.63491744e-01 -4.52692747e-01 -2.10488722e-01 ... 1.31665438e-01 1.56044781e-01 2.00582996e-01] [-2.75865674e-01 -3.77155155e-01 -6.16193675e-02 ... 1.91670880e-01 3.26299161e-01 1.10815860e-01] [-1.64183736e-01 -2.80049235e-01 -2.16087271e-02 ... 1.01951808e-01 3.20354611e-01 1.36563957e-01]] [[-1.16980307e-01 -1.87113553e-01 -1.99249131e-03 ... 3.43571678e-02 -1.49453580e-01 -2.76342988e-01] [-1.25586286e-01 -2.67652303e-01 -7.02857152e-02 ... -8.21615979e-02 -1.96380481e-01 -3.22515637e-01] [-8.68633762e-02 -2.66644120e-01 -1.49977714e-01 ... -1.38091341e-01 -2.57771850e-01 -2.76411176e-01] ... [-3.13194990e-01 -4.53426570e-01 -2.85586804e-01 ... 5.00741638e-02 3.31074029e-01 2.93761492e-01] [-1.13077536e-01 -2.13208646e-01 1.45044550e-02 ... 3.30285393e-02 4.28558677e-01 2.11350858e-01] [ 3.42213064e-02 -6.82925747e-04 1.27032742e-01 ... 2.97729801e-02 3.71576816e-01 1.90729499e-01]] [[-7.71810189e-02 -9.81225967e-02 1.16383024e-01 ... -3.38558815e-02 -3.82110514e-02 -1.40000924e-01] [-1.50054067e-01 -2.16953859e-01 2.94724964e-02 ... -1.02609873e-01 -6.56285882e-02 -1.63421363e-01] [-6.10092618e-02 -1.68456331e-01 -4.98996302e-02 ... -1.76759616e-01 -2.47684941e-01 -2.31769532e-01] ... [-2.27574170e-01 -3.86009216e-01 -2.22523987e-01 ... 1.29742757e-03 2.01361343e-01 1.31653801e-01] [-1.38425514e-01 -2.17621967e-01 4.87056896e-02 ... 4.19767536e-02 2.22129226e-01 6.42938688e-02] [-4.73335274e-02 -4.19286638e-02 1.42728180e-01 ... 7.19530955e-02 2.40556017e-01 1.21744826e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5758.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[ 6.72579929e-02 -2.63872743e-01 4.14825320e-01 2.55828649e-01 1.53700158e-01] [ 1.00140925e-02 4.38249037e-02 8.35149363e-02 -2.35961780e-01 -1.72167912e-01] [ 1.38113007e-01 -2.05274701e-01 7.64797032e-02 7.49218538e-02 4.38628830e-02] [ 1.05542555e-01 4.61728834e-02 2.36269191e-01 -9.71901417e-02 3.85241270e-01] [ 6.11422956e-02 1.36806786e-01 1.15959771e-01 -3.55544463e-02 4.88594621e-02]] [[-2.60676742e-01 1.24465704e-01 -1.54601201e-01 2.13712692e-01 -1.45992398e-01] [ 1.18122108e-01 5.74271344e-02 -2.49814857e-02 -8.57273489e-02 1.46163628e-01] [ 2.05087349e-01 3.03829238e-02 3.52126136e-02 2.26892054e-01 2.88409948e-01] [-1.91728115e-01 -2.81032026e-01 -1.20813742e-01 1.56739412e-03 -6.70782998e-02] [-1.07186832e-01 5.23836277e-02 9.76117998e-02 -1.12241834e-01 -4.18628678e-02]] [[-2.32398421e-01 2.95737594e-01 -1.87637612e-01 -2.59453714e-01 8.48273784e-02] [ 1.58159554e-01 3.77424955e-01 7.96933621e-02 1.27990663e-01 2.06821233e-01] [ 6.77042454e-02 2.34242100e-02 -4.14181978e-01 1.35731563e-01 3.01351901e-02] [ 6.02267496e-02 1.10632457e-01 -2.46074930e-01 7.02983141e-02 6.38420954e-02] [-3.69085729e-01 -4.84138429e-02 -3.99764441e-02 1.94854289e-02 -1.64776612e-02]] [[ 2.13833645e-01 1.78426474e-01 5.17515689e-02 7.53655732e-02 -1.67410582e-01] [ 4.46524806e-02 1.30212963e-01 -8.35407618e-03 2.36829862e-01 4.29099649e-02] [ 1.99697635e-04 9.74713415e-02 -5.86144850e-02 2.08744496e-01 -1.80488210e-02] [ 1.26649320e-01 2.18037575e-01 -1.62067980e-01 9.19417962e-02 -1.61848843e-01] [-3.34367040e-03 8.23287964e-02 -1.04228973e-01 1.30858347e-01 9.46989208e-02]] [[ 1.00618206e-01 1.03593841e-01 -3.63290340e-01 6.12700395e-02 -1.35669678e-01] [ 1.97575450e-01 -7.63030872e-02 -1.24130256e-01 -1.06451638e-01 -1.74122434e-02] [ 2.17777073e-01 2.23482206e-01 -1.73423171e-01 1.23581178e-01 -1.69282332e-01] [ 1.57763213e-01 -7.67979696e-02 4.02823865e-01 1.19748294e-01 -5.88018261e-02] [ 2.75084168e-01 -1.03267603e-01 -1.32746920e-01 -9.43893939e-02 -4.00879532e-01]]] [[[ 4.40033386e-03 2.25941151e-01 8.37691948e-02 4.93257381e-02 9.26686227e-02] [-2.42749214e-01 1.99673593e-01 -7.41401389e-02 -9.48292241e-02 -9.60150454e-03] [ 4.14310116e-03 1.27683982e-01 1.70167144e-02 2.35237867e-01 1.06018491e-01] [-1.33618712e-02 6.39108047e-02 1.37807369e-01 -1.29993977e-02 -2.03527175e-02] [-7.74571970e-02 -1.21827796e-01 -6.16988689e-02 7.85598159e-02 1.76152289e-01]] [[ 1.96104720e-01 -8.86090323e-02 2.86528260e-01 3.02242458e-01 -1.26828551e-01] [-1.33091167e-01 -1.53098002e-01 3.41543518e-02 1.09155655e-01 2.19530180e-01] [-1.23748876e-01 1.42986506e-01 -2.27402002e-01 -2.43227884e-01 -2.14826375e-01] [ 1.64947540e-01 3.44746560e-02 2.69493684e-02 2.02426508e-01 3.17890167e-01] [ 8.20876211e-02 3.91274281e-02 -4.77686599e-02 -1.73913211e-01 -7.34191462e-02]] [[-1.60509512e-01 -2.36375451e-01 -1.12072034e-02 3.72845978e-02 -1.86346725e-01] [-3.24291587e-01 -1.27061903e-01 -4.39177565e-02 -7.10756928e-02 -2.48557460e-02] [-8.34485665e-02 -2.32726082e-01 -2.49756262e-01 -4.73588221e-02 6.07025176e-02] [-1.51647866e-01 1.98163673e-01 1.22797206e-01 -8.39862749e-02 1.16745800e-01] [ 1.49751633e-01 7.50938132e-02 3.91402580e-02 -1.92552768e-02 -9.61858109e-02]] [[-1.18212998e-01 -2.19793066e-01 3.61706764e-02 3.74781728e-01 2.36790568e-01] [ 4.92673665e-01 4.04668376e-02 4.92274500e-02 -1.81705207e-02 -4.51428406e-02] [ 9.79194641e-02 -1.87857375e-01 -6.42576814e-02 1.74416408e-01 5.82516715e-02] [-2.36114994e-01 -5.34783155e-02 -5.09396719e-04 -1.66129008e-01 7.18076900e-02] [ 6.92390352e-02 2.01476917e-01 2.56524593e-01 -1.96097314e-01 -2.03442022e-01]] [[ 5.59016205e-02 1.17230050e-01 -2.14223154e-02 3.48988250e-02 -1.31447822e-01] [-5.50824583e-01 -1.90362081e-01 1.08501010e-01 -2.76657902e-02 1.58783734e-01] [ 1.40084967e-01 -4.22611952e-01 -2.37939000e-01 -6.94122612e-02 -9.00061578e-02] [ 3.14030610e-02 5.22696413e-02 -7.85100646e-03 2.67349303e-01 -1.19433455e-01] [-1.26595333e-01 1.66256815e-01 5.81673309e-02 1.58566594e-01 -2.09620655e-01]]] [[[ 1.07823079e-02 -2.36400403e-03 1.12208829e-03 3.26986872e-02 -7.48650916e-03] [-8.54621641e-03 -1.15159228e-01 1.16512291e-01 -2.35903993e-01 2.22397730e-01] [-2.18213052e-02 -1.58341885e-01 -2.85286810e-02 1.26493797e-01 2.97333766e-02] [-3.31244282e-02 3.13178122e-01 5.06087281e-02 1.33092269e-01 -1.68877631e-01] [-3.84474285e-02 -1.56568319e-01 -1.80763919e-02 -1.37044102e-01 -1.63550973e-01]] [[-1.59401949e-02 1.41196862e-01 1.74237378e-02 2.81804278e-02 -1.27577797e-01] [-1.63563952e-01 1.31636754e-01 1.09891281e-01 1.66073203e-01 9.82303619e-02] [-9.39575210e-02 -1.35022685e-01 2.25131601e-01 -2.49037966e-01 -2.18852580e-01] [-4.78123069e-01 -1.67334542e-01 7.56801143e-02 -2.44266391e-01 1.61809087e-01] [ 1.96142107e-01 6.57520965e-02 -5.66875227e-02 -2.09689438e-01 -2.41949618e-01]] [[ 2.15911344e-02 4.09534276e-02 -1.54923853e-02 -6.19378746e-01 2.38444701e-01] [-1.72172248e-01 1.21134035e-01 -1.46751285e-01 -2.69313991e-01 -2.71576971e-01] [ 2.96874762e-01 1.13801815e-01 -3.00714105e-01 4.99221981e-02 -1.93102613e-01] [ 1.33843362e-01 -8.39390457e-02 -7.36774206e-02 2.18235850e-01 1.41888946e-01] [-1.02955170e-01 8.25757086e-02 2.04774179e-03 -9.52118412e-02 2.79819608e-01]] [[ 1.02417469e-01 2.87864730e-02 2.23139254e-03 5.43441214e-02 -5.71491104e-03] [ 2.18917027e-01 2.04383209e-01 3.07993323e-01 -9.38684642e-02 8.54917094e-02] [-5.45403920e-02 4.94467327e-03 9.42426994e-02 4.87814993e-02 3.15634370e-01] [ 1.54735342e-01 7.10325912e-02 1.56825818e-02 -1.75696194e-01 -7.75915459e-02] [-2.23090015e-02 -2.57484764e-01 -4.29139346e-01 2.26291046e-02 4.75762814e-01]] [[ 2.22160220e-01 1.09810077e-01 2.00948298e-01 -3.13968599e-01 -3.64303261e-01] [-2.83764079e-02 7.07740635e-02 7.79271452e-03 -2.38452718e-01 -2.52980173e-01] [ 9.80462357e-02 1.44683093e-01 -1.82509869e-01 2.03089461e-01 -1.29883692e-01] [-2.24470988e-01 -5.63475341e-02 2.53985286e-01 1.54432952e-01 9.71844643e-02] [-1.31165236e-01 1.67997539e-01 -6.41927198e-02 -6.32666051e-02 9.98683646e-02]]]]]; ov_res: [[[[[ 6.72579929e-02 -2.63872743e-01 4.14825320e-01 2.55828649e-01 1.53700158e-01] [ 1.00140925e-02 4.38249037e-02 8.35149363e-02 -2.35961780e-01 -1.72167912e-01] [ 1.38113007e-01 -2.05274701e-01 7.64797032e-02 7.49218538e-02 4.38628830e-02] [ 1.05542555e-01 4.61728834e-02 2.36269191e-01 -9.71901417e-02 3.85241270e-01] [ 6.11422956e-02 1.36806786e-01 1.15959771e-01 -3.55544463e-02 4.88594621e-02]] [[-2.60676742e-01 1.24465704e-01 -1.54601201e-01 2.13712692e-01 -1.45992398e-01] [ 1.18122108e-01 5.74271344e-02 -2.49814857e-02 -8.57273489e-02 1.46163628e-01] [ 2.05087349e-01 3.03829238e-02 3.52126136e-02 2.26892054e-01 2.88409948e-01] [-1.91728115e-01 -2.81032026e-01 -1.20813742e-01 1.56739412e-03 -6.70782998e-02] [-1.07186832e-01 5.23836277e-02 9.76117998e-02 -1.12241834e-01 -4.18628678e-02]] [[-2.32398421e-01 2.95737594e-01 -1.87637612e-01 -2.59453714e-01 8.48273784e-02] [ 1.58159554e-01 3.77424955e-01 7.96933621e-02 1.27990663e-01 2.06821233e-01] [ 6.77042454e-02 2.34242100e-02 -4.14181978e-01 1.35731563e-01 3.01351901e-02] [ 6.02267496e-02 1.10632457e-01 -2.46074930e-01 7.02983141e-02 6.38420954e-02] [-3.69085729e-01 -4.84138429e-02 -3.99764441e-02 1.94854289e-02 -1.64776612e-02]] [[ 2.13833645e-01 1.78426474e-01 5.17515689e-02 7.53655732e-02 -1.67410582e-01] [ 4.46524806e-02 1.30212963e-01 -8.35407618e-03 2.36829862e-01 4.29099649e-02] [ 1.99697635e-04 9.74713415e-02 -5.86144850e-02 2.08744496e-01 -1.80488210e-02] [ 1.26649320e-01 2.18037575e-01 -1.62067980e-01 9.19417962e-02 -1.61848843e-01] [-3.34367040e-03 8.23287964e-02 -1.04228973e-01 1.30858347e-01 9.46989208e-02]] [[ 1.00618206e-01 1.03593841e-01 -3.63290340e-01 6.12700395e-02 -1.35669678e-01] [ 1.97575450e-01 -7.63030872e-02 -1.24130256e-01 -1.06451638e-01 -1.74122434e-02] [ 2.17777073e-01 2.23482206e-01 -1.73423171e-01 1.23581178e-01 -1.69282332e-01] [ 1.57763213e-01 -7.67979696e-02 4.02823865e-01 1.19748294e-01 -5.88018261e-02] [ 2.75084168e-01 -1.03267603e-01 -1.32746920e-01 -9.43893939e-02 -4.00879532e-01]]] [[[ 4.40033386e-03 2.25941151e-01 8.37691948e-02 4.93257381e-02 9.26686227e-02] [-2.42749214e-01 1.99673593e-01 -7.41401389e-02 -9.48292241e-02 -9.60150454e-03] [ 4.14310116e-03 1.27683982e-01 1.70167144e-02 2.35237867e-01 1.06018491e-01] [-1.33618712e-02 6.39108047e-02 1.37807369e-01 -1.29993977e-02 -2.03527175e-02] [-7.74571970e-02 -1.21827796e-01 -6.16988689e-02 7.85598159e-02 1.76152289e-01]] [[ 1.96104720e-01 -8.86090323e-02 2.86528260e-01 3.02242458e-01 -1.26828551e-01] [-1.33091167e-01 -1.53098002e-01 3.41543518e-02 1.09155655e-01 2.19530180e-01] [-1.23748876e-01 1.42986506e-01 -2.27402002e-01 -2.43227884e-01 -2.14826375e-01] [ 1.64947540e-01 3.44746560e-02 2.69493684e-02 2.02426508e-01 3.17890167e-01] [ 8.20876211e-02 3.91274281e-02 -4.77686599e-02 -1.73913211e-01 -7.34191462e-02]] [[-1.60509512e-01 -2.36375451e-01 -1.12072034e-02 3.72845978e-02 -1.86346725e-01] [-3.24291587e-01 -1.27061903e-01 -4.39177565e-02 -7.10756928e-02 -2.48557460e-02] [-8.34485665e-02 -2.32726082e-01 -2.49756262e-01 -4.73588221e-02 6.07025176e-02] [-1.51647866e-01 1.98163673e-01 1.22797206e-01 -8.39862749e-02 1.16745800e-01] [ 1.49751633e-01 7.50938132e-02 3.91402580e-02 -1.92552768e-02 -9.61858109e-02]] [[-1.18212998e-01 -2.19793066e-01 3.61706764e-02 3.74781728e-01 2.36790568e-01] [ 4.92673665e-01 4.04668376e-02 4.92274500e-02 -1.81705207e-02 -4.51428406e-02] [ 9.79194641e-02 -1.87857375e-01 -6.42576814e-02 1.74416408e-01 5.82516715e-02] [-2.36114994e-01 -5.34783155e-02 -5.09396719e-04 -1.66129008e-01 7.18076900e-02] [ 6.92390352e-02 2.01476917e-01 2.56524593e-01 -1.96097314e-01 -2.03442022e-01]] [[ 5.59016205e-02 1.17230050e-01 -2.14223154e-02 3.48988250e-02 -1.31447822e-01] [-5.50824583e-01 -1.90362081e-01 1.08501010e-01 -2.76657902e-02 1.58783734e-01] [ 1.40084967e-01 -4.22611952e-01 -2.37939000e-01 -6.94122612e-02 -9.00061578e-02] [ 3.14030610e-02 5.22696413e-02 -7.85100646e-03 2.67349303e-01 -1.19433455e-01] [-1.26595333e-01 1.66256815e-01 5.81673309e-02 1.58566594e-01 -2.09620655e-01]]] [[[ 1.07823079e-02 -2.36400403e-03 1.12208829e-03 3.26986872e-02 -7.48650916e-03] [-8.54621641e-03 -1.15159228e-01 1.16512291e-01 -2.35903993e-01 2.22397730e-01] [-2.18213052e-02 -1.58341885e-01 -2.85286810e-02 1.26493797e-01 2.97333766e-02] [-3.31244282e-02 3.13178122e-01 5.06087281e-02 1.33092269e-01 -1.68877631e-01] [-3.84474285e-02 -1.56568319e-01 -1.80763919e-02 -1.37044102e-01 -1.63550973e-01]] [[-1.59401949e-02 1.41196862e-01 1.74237378e-02 2.81804278e-02 -1.27577797e-01] [-1.63563952e-01 1.31636754e-01 1.09891281e-01 1.66073203e-01 9.82303619e-02] [-9.39575210e-02 -1.35022685e-01 2.25131601e-01 -2.49037966e-01 -2.18852580e-01] [-4.78123069e-01 -1.67334542e-01 7.56801143e-02 -2.44266391e-01 1.61809087e-01] [ 1.96142107e-01 6.57520965e-02 -5.66875227e-02 -2.09689438e-01 -2.41949618e-01]] [[ 2.15911344e-02 4.09534276e-02 -1.54923853e-02 -6.19378746e-01 2.38444701e-01] [-1.72172248e-01 1.21134035e-01 -1.46751285e-01 -2.69313991e-01 -2.71576971e-01] [ 2.96874762e-01 1.13801815e-01 -3.00714105e-01 4.99221981e-02 -1.93102613e-01] [ 1.33843362e-01 -8.39390457e-02 -7.36774206e-02 2.18235850e-01 1.41888946e-01] [-1.02955170e-01 8.25757086e-02 2.04774179e-03 -9.52118412e-02 2.79819608e-01]] [[ 1.02417469e-01 2.87864730e-02 2.23139254e-03 5.43441214e-02 -5.71491104e-03] [ 2.18917027e-01 2.04383209e-01 3.07993323e-01 -9.38684642e-02 8.54917094e-02] [-5.45403920e-02 4.94467327e-03 9.42426994e-02 4.87814993e-02 3.15634370e-01] [ 1.54735342e-01 7.10325912e-02 1.56825818e-02 -1.75696194e-01 -7.75915459e-02] [-2.23090015e-02 -2.57484764e-01 -4.29139346e-01 2.26291046e-02 4.75762814e-01]] [[ 2.22160220e-01 1.09810077e-01 2.00948298e-01 -3.13968599e-01 -3.64303261e-01] [-2.83764079e-02 7.07740635e-02 7.79271452e-03 -2.38452718e-01 -2.52980173e-01] [ 9.80462357e-02 1.44683093e-01 -1.82509869e-01 2.03089461e-01 -1.29883692e-01] [-2.24470988e-01 -5.63475341e-02 2.53985286e-01 1.54432952e-01 9.71844643e-02] [-1.31165236e-01 1.67997539e-01 -6.41927198e-02 -6.32666051e-02 9.98683646e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:True - ceil_mode:False - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5761.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[3, 1, 1]]() %7 : int[] = prim::Constant[value=[3, 2, 1]]() %8 : Tensor = aten::avg_pool3d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%8) fw_re: [[[[[-6.94942549e-02 -1.12195261e-01 -3.05657715e-01 ... 6.14263773e-01 1.83993027e-01 1.11210518e-01] [-3.45952027e-02 -3.08343917e-01 -6.96473047e-02 ... 8.37320805e-01 3.03897619e-01 4.68279988e-01] [-1.20134847e-02 -1.90938473e-01 -9.38955173e-02 ... 2.83823967e-01 3.04094944e-02 5.85640252e-01] ... [-2.01394498e-01 2.02511027e-01 6.09833777e-01 ... 2.48374239e-01 -1.08580142e-01 -5.60886323e-01] [ 2.55047500e-01 -7.56135061e-02 9.97297466e-02 ... 7.50459805e-02 3.63899499e-01 -7.35927105e-01] [-1.23203158e-01 -6.80621564e-01 -3.29931468e-01 ... 1.79507695e-02 6.14121556e-01 3.21031421e-01]] [[ 3.57698202e-01 -2.31622860e-01 5.92436016e-01 ... 1.31692380e-01 3.63067627e-01 -2.41713062e-01] [ 1.49070650e-01 3.54768825e-03 1.51378214e-01 ... 4.93204258e-02 1.01078939e+00 -8.58989954e-01] [-2.01216832e-01 2.14521721e-01 2.35019088e-01 ... -7.47120678e-02 7.46115506e-01 -3.50804359e-01] ... [ 3.19259852e-01 -2.67754704e-01 1.39158770e-01 ... 7.08602369e-03 -2.70163268e-01 -6.72402382e-01] [-5.39054215e-01 -3.40264052e-01 -1.54845342e-01 ... -4.49505895e-01 -7.85935223e-02 -2.74519295e-01] [-1.05367124e+00 -3.69893551e-01 6.85749352e-02 ... -7.76183605e-02 -1.88686132e-01 -4.01649803e-01]] [[ 6.50426388e-01 -2.78813448e-02 6.78434372e-02 ... 3.20555568e-02 -4.67486262e-01 9.98162329e-02] [ 8.77958611e-02 -1.61496997e-01 4.05378133e-01 ... -9.90909114e-02 -4.61527586e-01 3.07137165e-02] [ 8.30086395e-02 -5.45230150e-01 -9.46843252e-02 ... -4.99699563e-01 -1.39288545e-01 -1.07531913e-01] ... [ 2.75107697e-02 -2.78011769e-01 2.72786468e-01 ... -3.62524748e-01 -4.16611403e-01 -1.54270962e-01] [ 7.89173022e-02 -1.83194101e-01 3.16641480e-01 ... -2.28511438e-01 -1.99632738e-02 2.19640937e-02] [ 6.22496009e-02 4.18779880e-01 -4.85608548e-01 ... -4.63114887e-01 -1.23041786e-01 3.18716675e-01]] [[ 5.65459907e-01 4.37321335e-01 2.58518487e-01 ... 3.67481746e-02 2.65359491e-01 -6.10498130e-01] [ 9.09168303e-01 5.13903916e-01 -1.77263305e-01 ... -1.99678555e-01 5.31580150e-01 -4.70077604e-01] [ 5.68521798e-01 1.66481480e-01 -3.08772121e-02 ... -9.42909494e-02 -4.09917422e-02 -6.30279660e-01] ... [-2.63362736e-01 -1.00528829e-01 -9.37108219e-01 ... -1.47796527e-01 2.89300501e-01 -7.89091587e-02] [ 2.08742574e-01 1.70840621e-01 -7.36140430e-01 ... 3.42822760e-01 -5.40058017e-02 -1.22984178e-01] [ 2.56439839e-02 2.63064891e-01 -5.10657132e-01 ... 7.36589253e-01 -1.55079469e-01 -1.13982178e-01]] [[-1.85785577e-01 -3.91092271e-01 -3.64418864e-01 ... 2.01172177e-02 -5.46931446e-01 1.91962972e-01] [ 2.83731192e-01 5.06102853e-02 -1.01595066e-01 ... -4.69496459e-01 2.85754800e-01 -5.67158878e-01] [ 4.09431428e-01 -5.19054234e-01 -2.75383830e-01 ... -6.81333244e-01 5.31746959e-03 -4.08087343e-01] ... [-3.61329168e-01 1.14985950e-01 3.95927668e-01 ... -2.87948847e-01 -1.36195943e-01 5.62782772e-02] [-1.65761247e-01 3.49025965e-01 8.34039032e-01 ... -4.25327867e-01 -9.04358625e-01 -1.88081980e-01] [ 2.08574653e-01 7.61139989e-02 3.81754518e-01 ... 6.70524836e-01 -6.20607674e-01 2.27183793e-02]]] [[[-1.93417504e-01 -1.54864132e-01 -2.92763442e-01 ... 8.01610529e-01 1.47783741e-01 5.86861707e-02] [-1.07040502e-01 -1.10509837e+00 -2.67983645e-01 ... 1.91378161e-01 -4.44513321e-01 3.68242711e-01] [ 1.75988615e-01 -5.03072083e-01 -2.77156383e-01 ... -1.73961565e-01 -5.00127494e-01 1.17540367e-01] ... [ 1.46851718e-01 4.51619327e-02 7.76616717e-03 ... -4.83795762e-01 4.16902423e-01 2.14653492e-01] [ 1.21851593e-01 2.43955255e-01 3.12617607e-02 ... 5.09802639e-01 1.59579828e-01 4.96541291e-01] [-3.04245651e-01 3.38446528e-01 4.90473695e-02 ... -2.56921258e-02 -4.17800993e-01 4.68627125e-01]] [[-7.17881024e-01 5.18262804e-01 3.24390121e-02 ... -3.89984697e-01 -3.03105026e-01 -1.21525891e-01] [-3.36204141e-01 5.66746807e-03 2.55597502e-01 ... -3.99542809e-01 -3.82952541e-01 5.57002485e-01] [ 2.94751614e-01 -2.31631041e-01 -1.34822115e-01 ... -4.59116340e-01 -8.98064747e-02 3.92932892e-01] ... [-8.59204113e-01 -1.25981316e-01 3.09587151e-01 ... -4.25726563e-01 3.77943784e-01 1.19672632e+00] [-7.83226013e-01 4.91702765e-01 -4.48675066e-01 ... -1.18191140e-02 2.25429535e-01 4.59024787e-01] [-1.16115458e-01 6.00220561e-01 -3.51182103e-01 ... 9.91702378e-02 -4.97569114e-01 -5.43953478e-01]] [[-5.57668328e-01 -6.94149554e-01 -2.22620487e-01 ... 2.38461569e-01 1.66544154e-01 3.90525728e-01] [-4.77752417e-01 1.58619031e-01 2.61628181e-01 ... 2.92473529e-02 7.16989040e-01 -4.27392721e-02] [ 1.70528248e-01 9.97421205e-01 4.49832231e-01 ... 2.13176593e-01 -2.55290240e-01 -5.12039840e-01] ... [-7.04759359e-02 -6.55712605e-01 -3.80884290e-01 ... -2.50820696e-01 9.79437828e-02 2.23801181e-01] [ 4.09924597e-01 -5.59771471e-02 -4.04851548e-02 ... 4.60257344e-02 -2.07073167e-01 -1.82103351e-01] [ 3.82725507e-01 3.89231354e-01 5.58764279e-01 ... -7.16604769e-01 -8.77239525e-01 3.24888416e-02]] [[ 4.85830456e-01 -2.56165832e-01 -1.95518136e-01 ... -3.08688015e-01 2.42880717e-01 -1.20036989e-01] [ 5.08761466e-01 -7.66840458e-01 9.45239067e-02 ... 1.95784554e-01 9.20319036e-02 4.66875136e-02] [-1.16340287e-01 -2.38292634e-01 1.83626413e-01 ... -2.05551147e-01 -3.00637752e-01 2.80134082e-02] ... [ 3.81116718e-01 -1.24126337e-01 2.00919472e-02 ... 1.40280247e-01 1.98781746e-03 -6.38480484e-01] [-1.60095897e-02 -5.73646784e-01 -1.39576793e-02 ... -4.01959658e-01 3.96086246e-01 -5.13182104e-01] [ 6.93124473e-01 -5.81667461e-02 1.92213729e-01 ... -6.18575275e-01 -3.21056657e-02 -1.04719907e-01]] [[ 2.87730008e-01 -3.06717783e-01 2.96537969e-02 ... -6.32611036e-01 -6.91870868e-01 -1.73900083e-01] [-4.06861305e-02 -8.36897790e-01 3.25358599e-01 ... -5.80371201e-01 -2.72155195e-01 1.35372147e-01] [-8.38135108e-02 -1.47538781e-01 -3.59291673e-01 ... -4.34513330e-01 -7.23896325e-01 -2.76160002e-01] ... [ 2.79298965e-02 -8.21293533e-01 8.85404348e-02 ... 3.12907964e-01 -5.34415424e-01 -3.25341463e-01] [ 7.62447491e-02 3.22867602e-01 -2.25927174e-01 ... -4.74415272e-02 -1.63094252e-01 -5.69371641e-01] [ 2.29928851e-01 7.80505165e-02 9.59993526e-02 ... -6.93108618e-01 -2.38769099e-01 -4.37576532e-01]]] [[[ 3.56540829e-01 -3.39552969e-01 -6.31878003e-02 ... -1.93096042e-01 3.30244489e-02 4.36046571e-01] [-2.76778149e-03 -2.61072367e-01 -2.37057820e-01 ... -3.44535232e-01 5.91392934e-01 7.82962739e-01] [-5.66736162e-01 3.62645417e-01 -1.00766098e+00 ... -5.38478076e-01 8.27429295e-01 7.52965391e-01] ... [ 4.24837559e-01 9.36926067e-01 6.85143471e-01 ... -1.34481177e-01 -1.41435280e-01 3.36614698e-01] [ 1.88921735e-01 -2.32523799e-01 4.44580466e-01 ... -2.88395733e-01 1.33192405e-01 -8.36025551e-02] [-3.27794142e-02 -1.40337572e-01 4.71127272e-01 ... -3.89556848e-02 -2.37428341e-02 -2.50525087e-01]] [[-7.68970490e-01 -3.72835875e-01 4.58264858e-01 ... 2.71541417e-01 6.58302784e-01 1.89787790e-01] [-1.61265269e-01 -5.65089643e-01 3.39665920e-01 ... 6.15700901e-01 4.02839988e-01 1.23579659e-01] [ 5.08567393e-01 -2.74640143e-01 -4.34555769e-01 ... 9.24531996e-01 6.64625943e-01 -2.78187960e-01] ... [-6.26804948e-01 5.27784705e-01 -4.92361158e-01 ... -1.97512403e-01 -8.73055875e-01 1.69004425e-01] [-3.31248373e-01 5.83208799e-02 -3.76624346e-01 ... -4.14653987e-01 -5.53598583e-01 -1.44875154e-01] [ 5.18199317e-02 -3.14168066e-01 2.48203516e-01 ... 9.46205556e-02 9.81737003e-02 -1.71240494e-01]] [[-5.89916855e-02 1.43714352e-02 -1.26565680e-01 ... -3.99391890e-01 1.25108480e-01 6.67591870e-04] [-2.96214018e-02 2.06878051e-01 -2.25993395e-01 ... -1.65484205e-01 -6.95816875e-02 2.93935146e-02] [ 4.29043084e-01 2.82533076e-02 -4.02263552e-01 ... 2.06331000e-01 -3.08333367e-01 -3.36526960e-01] ... [ 7.61137977e-02 5.68802059e-01 1.91884100e-01 ... -1.72936037e-01 -2.43573785e-01 -5.51810078e-02] [-3.60935539e-01 6.65810883e-01 8.41663599e-01 ... -6.87429309e-02 2.60392338e-01 1.44430503e-01] [-6.90562204e-02 -9.15884897e-02 -1.63659796e-01 ... -1.42292663e-01 5.08858502e-01 4.03419584e-01]] [[-7.60948732e-02 5.17772019e-01 3.70370239e-01 ... 3.64492774e-01 3.19107443e-01 1.96990788e-01] [ 1.65148646e-01 4.52873081e-01 2.77408689e-01 ... 2.09364101e-01 4.02057171e-01 -5.42772599e-02] [ 4.53807354e-01 5.52805364e-01 -3.47771734e-01 ... -7.74338484e-01 9.92453471e-02 2.14112163e-01] ... [-9.66696322e-01 -8.49239063e-03 2.80105054e-01 ... 7.95818269e-01 1.96216121e-01 -6.06830651e-03] [-7.01159000e-01 5.46453893e-01 5.05000532e-01 ... 5.50242364e-01 2.30493043e-02 -2.03415677e-01] [-3.02560180e-01 4.39959377e-01 9.99522805e-02 ... 4.27845240e-01 -3.48450571e-01 -1.46279231e-01]] [[-1.29283980e-01 -1.58547968e-01 6.55846775e-01 ... 8.36193934e-02 -8.65640402e-01 -4.50057834e-01] [-4.61940289e-01 -5.02294660e-01 2.07023546e-01 ... -1.43735008e-02 -7.56866634e-01 -5.03880918e-01] [-8.91360939e-01 -2.41364613e-01 6.62658438e-02 ... -2.44424269e-01 -2.37251744e-01 -2.50182062e-01] ... [-2.77897477e-01 -3.41691405e-01 5.18872261e-01 ... -4.98289019e-01 -9.72600698e-01 -3.32720280e-02] [-2.59879738e-01 -7.95279369e-02 1.12237535e-01 ... -2.38137648e-01 -9.27603006e-01 -1.42120272e-01] [-1.29496768e-01 5.38174927e-01 -1.97572902e-01 ... 4.58984971e-02 1.72943305e-02 2.14749023e-01]]]]]; ov_res: [[[[[-6.94942549e-02 -1.12195261e-01 -3.05657715e-01 ... 6.14263773e-01 1.83993027e-01 1.11210518e-01] [-3.45952027e-02 -3.08343917e-01 -6.96473047e-02 ... 8.37320805e-01 3.03897619e-01 4.68279988e-01] [-1.20134847e-02 -1.90938473e-01 -9.38955173e-02 ... 2.83823967e-01 3.04094944e-02 5.85640252e-01] ... [-2.01394498e-01 2.02511027e-01 6.09833777e-01 ... 2.48374239e-01 -1.08580142e-01 -5.60886323e-01] [ 2.55047500e-01 -7.56135061e-02 9.97297466e-02 ... 7.50459805e-02 3.63899499e-01 -7.35927105e-01] [-1.23203158e-01 -6.80621564e-01 -3.29931468e-01 ... 1.79507695e-02 6.14121556e-01 3.21031421e-01]] [[ 3.57698202e-01 -2.31622860e-01 5.92436016e-01 ... 1.31692380e-01 3.63067627e-01 -2.41713062e-01] [ 1.49070650e-01 3.54768825e-03 1.51378214e-01 ... 4.93204258e-02 1.01078939e+00 -8.58989954e-01] [-2.01216832e-01 2.14521721e-01 2.35019088e-01 ... -7.47120678e-02 7.46115506e-01 -3.50804359e-01] ... [ 3.19259852e-01 -2.67754704e-01 1.39158770e-01 ... 7.08602369e-03 -2.70163268e-01 -6.72402382e-01] [-5.39054215e-01 -3.40264052e-01 -1.54845342e-01 ... -4.49505895e-01 -7.85935223e-02 -2.74519295e-01] [-1.05367124e+00 -3.69893551e-01 6.85749352e-02 ... -7.76183605e-02 -1.88686132e-01 -4.01649803e-01]] [[ 6.50426388e-01 -2.78813448e-02 6.78434372e-02 ... 3.20555568e-02 -4.67486262e-01 9.98162329e-02] [ 8.77958611e-02 -1.61496997e-01 4.05378133e-01 ... -9.90909114e-02 -4.61527586e-01 3.07137165e-02] [ 8.30086395e-02 -5.45230150e-01 -9.46843252e-02 ... -4.99699563e-01 -1.39288545e-01 -1.07531913e-01] ... [ 2.75107697e-02 -2.78011769e-01 2.72786468e-01 ... -3.62524748e-01 -4.16611403e-01 -1.54270962e-01] [ 7.89173022e-02 -1.83194101e-01 3.16641480e-01 ... -2.28511438e-01 -1.99632738e-02 2.19640937e-02] [ 6.22496009e-02 4.18779880e-01 -4.85608548e-01 ... -4.63114887e-01 -1.23041786e-01 3.18716675e-01]] [[ 5.65459907e-01 4.37321335e-01 2.58518487e-01 ... 3.67481746e-02 2.65359491e-01 -6.10498130e-01] [ 9.09168303e-01 5.13903916e-01 -1.77263305e-01 ... -1.99678555e-01 5.31580150e-01 -4.70077604e-01] [ 5.68521798e-01 1.66481480e-01 -3.08772121e-02 ... -9.42909494e-02 -4.09917422e-02 -6.30279660e-01] ... [-2.63362736e-01 -1.00528829e-01 -9.37108219e-01 ... -1.47796527e-01 2.89300501e-01 -7.89091587e-02] [ 2.08742574e-01 1.70840621e-01 -7.36140430e-01 ... 3.42822760e-01 -5.40058017e-02 -1.22984178e-01] [ 2.56439839e-02 2.63064891e-01 -5.10657132e-01 ... 7.36589253e-01 -1.55079469e-01 -1.13982178e-01]] [[-1.85785577e-01 -3.91092271e-01 -3.64418864e-01 ... 2.01172177e-02 -5.46931446e-01 1.91962972e-01] [ 2.83731192e-01 5.06102853e-02 -1.01595066e-01 ... -4.69496459e-01 2.85754800e-01 -5.67158878e-01] [ 4.09431428e-01 -5.19054234e-01 -2.75383830e-01 ... -6.81333244e-01 5.31746959e-03 -4.08087343e-01] ... [-3.61329168e-01 1.14985950e-01 3.95927668e-01 ... -2.87948847e-01 -1.36195943e-01 5.62782772e-02] [-1.65761247e-01 3.49025965e-01 8.34039032e-01 ... -4.25327867e-01 -9.04358625e-01 -1.88081980e-01] [ 2.08574653e-01 7.61139989e-02 3.81754518e-01 ... 6.70524836e-01 -6.20607674e-01 2.27183793e-02]]] [[[-1.93417504e-01 -1.54864132e-01 -2.92763442e-01 ... 8.01610529e-01 1.47783741e-01 5.86861707e-02] [-1.07040502e-01 -1.10509837e+00 -2.67983645e-01 ... 1.91378161e-01 -4.44513321e-01 3.68242711e-01] [ 1.75988615e-01 -5.03072083e-01 -2.77156383e-01 ... -1.73961565e-01 -5.00127494e-01 1.17540367e-01] ... [ 1.46851718e-01 4.51619327e-02 7.76616717e-03 ... -4.83795762e-01 4.16902423e-01 2.14653492e-01] [ 1.21851593e-01 2.43955255e-01 3.12617607e-02 ... 5.09802639e-01 1.59579828e-01 4.96541291e-01] [-3.04245651e-01 3.38446528e-01 4.90473695e-02 ... -2.56921258e-02 -4.17800993e-01 4.68627125e-01]] [[-7.17881024e-01 5.18262804e-01 3.24390121e-02 ... -3.89984697e-01 -3.03105026e-01 -1.21525891e-01] [-3.36204141e-01 5.66746807e-03 2.55597502e-01 ... -3.99542809e-01 -3.82952541e-01 5.57002485e-01] [ 2.94751614e-01 -2.31631041e-01 -1.34822115e-01 ... -4.59116340e-01 -8.98064747e-02 3.92932892e-01] ... [-8.59204113e-01 -1.25981316e-01 3.09587151e-01 ... -4.25726563e-01 3.77943784e-01 1.19672632e+00] [-7.83226013e-01 4.91702765e-01 -4.48675066e-01 ... -1.18191140e-02 2.25429535e-01 4.59024787e-01] [-1.16115458e-01 6.00220561e-01 -3.51182103e-01 ... 9.91702378e-02 -4.97569114e-01 -5.43953478e-01]] [[-5.57668328e-01 -6.94149554e-01 -2.22620487e-01 ... 2.38461569e-01 1.66544154e-01 3.90525728e-01] [-4.77752417e-01 1.58619031e-01 2.61628181e-01 ... 2.92473529e-02 7.16989040e-01 -4.27392721e-02] [ 1.70528248e-01 9.97421205e-01 4.49832231e-01 ... 2.13176593e-01 -2.55290240e-01 -5.12039840e-01] ... [-7.04759359e-02 -6.55712605e-01 -3.80884290e-01 ... -2.50820696e-01 9.79437828e-02 2.23801181e-01] [ 4.09924597e-01 -5.59771471e-02 -4.04851548e-02 ... 4.60257344e-02 -2.07073167e-01 -1.82103351e-01] [ 3.82725507e-01 3.89231354e-01 5.58764279e-01 ... -7.16604769e-01 -8.77239525e-01 3.24888416e-02]] [[ 4.85830456e-01 -2.56165832e-01 -1.95518136e-01 ... -3.08688015e-01 2.42880717e-01 -1.20036989e-01] [ 5.08761466e-01 -7.66840458e-01 9.45239067e-02 ... 1.95784554e-01 9.20319036e-02 4.66875136e-02] [-1.16340287e-01 -2.38292634e-01 1.83626413e-01 ... -2.05551147e-01 -3.00637752e-01 2.80134082e-02] ... [ 3.81116718e-01 -1.24126337e-01 2.00919472e-02 ... 1.40280247e-01 1.98781746e-03 -6.38480484e-01] [-1.60095897e-02 -5.73646784e-01 -1.39576793e-02 ... -4.01959658e-01 3.96086246e-01 -5.13182104e-01] [ 6.93124473e-01 -5.81667461e-02 1.92213729e-01 ... -6.18575275e-01 -3.21056657e-02 -1.04719907e-01]] [[ 2.87730008e-01 -3.06717783e-01 2.96537969e-02 ... -6.32611036e-01 -6.91870868e-01 -1.73900083e-01] [-4.06861305e-02 -8.36897790e-01 3.25358599e-01 ... -5.80371201e-01 -2.72155195e-01 1.35372147e-01] [-8.38135108e-02 -1.47538781e-01 -3.59291673e-01 ... -4.34513330e-01 -7.23896325e-01 -2.76160002e-01] ... [ 2.79298965e-02 -8.21293533e-01 8.85404348e-02 ... 3.12907964e-01 -5.34415424e-01 -3.25341463e-01] [ 7.62447491e-02 3.22867602e-01 -2.25927174e-01 ... -4.74415272e-02 -1.63094252e-01 -5.69371641e-01] [ 2.29928851e-01 7.80505165e-02 9.59993526e-02 ... -6.93108618e-01 -2.38769099e-01 -4.37576532e-01]]] [[[ 3.56540829e-01 -3.39552969e-01 -6.31878003e-02 ... -1.93096042e-01 3.30244489e-02 4.36046571e-01] [-2.76778149e-03 -2.61072367e-01 -2.37057820e-01 ... -3.44535232e-01 5.91392934e-01 7.82962739e-01] [-5.66736162e-01 3.62645417e-01 -1.00766098e+00 ... -5.38478076e-01 8.27429295e-01 7.52965391e-01] ... [ 4.24837559e-01 9.36926067e-01 6.85143471e-01 ... -1.34481177e-01 -1.41435280e-01 3.36614698e-01] [ 1.88921735e-01 -2.32523799e-01 4.44580466e-01 ... -2.88395733e-01 1.33192405e-01 -8.36025551e-02] [-3.27794142e-02 -1.40337572e-01 4.71127272e-01 ... -3.89556848e-02 -2.37428341e-02 -2.50525087e-01]] [[-7.68970490e-01 -3.72835875e-01 4.58264858e-01 ... 2.71541417e-01 6.58302784e-01 1.89787790e-01] [-1.61265269e-01 -5.65089643e-01 3.39665920e-01 ... 6.15700901e-01 4.02839988e-01 1.23579659e-01] [ 5.08567393e-01 -2.74640143e-01 -4.34555769e-01 ... 9.24531996e-01 6.64625943e-01 -2.78187960e-01] ... [-6.26804948e-01 5.27784705e-01 -4.92361158e-01 ... -1.97512403e-01 -8.73055875e-01 1.69004425e-01] [-3.31248373e-01 5.83208799e-02 -3.76624346e-01 ... -4.14653987e-01 -5.53598583e-01 -1.44875154e-01] [ 5.18199317e-02 -3.14168066e-01 2.48203516e-01 ... 9.46205556e-02 9.81737003e-02 -1.71240494e-01]] [[-5.89916855e-02 1.43714352e-02 -1.26565680e-01 ... -3.99391890e-01 1.25108480e-01 6.67591870e-04] [-2.96214018e-02 2.06878051e-01 -2.25993395e-01 ... -1.65484205e-01 -6.95816875e-02 2.93935146e-02] [ 4.29043084e-01 2.82533076e-02 -4.02263552e-01 ... 2.06331000e-01 -3.08333367e-01 -3.36526960e-01] ... [ 7.61137977e-02 5.68802059e-01 1.91884100e-01 ... -1.72936037e-01 -2.43573785e-01 -5.51810078e-02] [-3.60935539e-01 6.65810883e-01 8.41663599e-01 ... -6.87429309e-02 2.60392338e-01 1.44430503e-01] [-6.90562204e-02 -9.15884897e-02 -1.63659796e-01 ... -1.42292663e-01 5.08858502e-01 4.03419584e-01]] [[-7.60948732e-02 5.17772019e-01 3.70370239e-01 ... 3.64492774e-01 3.19107443e-01 1.96990788e-01] [ 1.65148646e-01 4.52873081e-01 2.77408689e-01 ... 2.09364101e-01 4.02057171e-01 -5.42772599e-02] [ 4.53807354e-01 5.52805364e-01 -3.47771734e-01 ... -7.74338484e-01 9.92453471e-02 2.14112163e-01] ... [-9.66696322e-01 -8.49239063e-03 2.80105054e-01 ... 7.95818269e-01 1.96216121e-01 -6.06830651e-03] [-7.01159000e-01 5.46453893e-01 5.05000532e-01 ... 5.50242364e-01 2.30493043e-02 -2.03415677e-01] [-3.02560180e-01 4.39959377e-01 9.99522805e-02 ... 4.27845240e-01 -3.48450571e-01 -1.46279231e-01]] [[-1.29283980e-01 -1.58547968e-01 6.55846775e-01 ... 8.36193934e-02 -8.65640402e-01 -4.50057834e-01] [-4.61940289e-01 -5.02294660e-01 2.07023546e-01 ... -1.43735008e-02 -7.56866634e-01 -5.03880918e-01] [-8.91360939e-01 -2.41364613e-01 6.62658438e-02 ... -2.44424269e-01 -2.37251744e-01 -2.50182062e-01] ... [-2.77897477e-01 -3.41691405e-01 5.18872261e-01 ... -4.98289019e-01 -9.72600698e-01 -3.32720280e-02] [-2.59879738e-01 -7.95279369e-02 1.12237535e-01 ... -2.38137648e-01 -9.27603006e-01 -1.42120272e-01] [-1.29496768e-01 5.38174927e-01 -1.97572902e-01 ... 4.58984971e-02 1.72943305e-02 2.14749023e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5764.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[1, 1, 1]]() %7 : int[] = prim::Constant[value=[3, 3, 3]]() %8 : Tensor = aten::avg_pool3d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%8) fw_re: [[[[[ 0.158751 -0.0210897 0.07471832 ... -0.18215118 -0.21167347 -0.07447529] [ 0.16592208 -0.00963059 0.14060545 ... -0.01034149 -0.03342535 0.2051124 ] [ 0.07363363 0.04614665 0.17205416 ... -0.12821577 -0.14763275 -0.0849954 ] ... [-0.09330448 0.00557867 0.0218623 ... -0.12438256 0.12946682 0.45854878] [-0.03339214 0.07346591 -0.01730788 ... 0.07326087 0.19027089 0.3114727 ] [ 0.18095344 0.07293934 -0.00650376 ... 0.10152745 0.19889548 0.06689826]] [[ 0.09144376 -0.23041259 -0.19787084 ... 0.01168003 -0.11802223 -0.17345133] [ 0.20155665 -0.01508786 0.03060547 ... 0.12519954 0.01947683 0.02458096] [ 0.17617117 0.07072047 0.1270819 ... 0.07631401 -0.03925966 -0.13039169] ... [-0.0391308 -0.2017429 -0.07170827 ... -0.12614915 -0.02121907 0.22178663] [-0.0050375 -0.11402907 -0.03675439 ... -0.08029748 -0.10131989 -0.13444784] [ 0.20451029 0.01522087 0.0792696 ... -0.13302803 -0.08909369 -0.24809784]] [[ 0.5327766 0.12961598 -0.02486898 ... -0.00101288 -0.12181729 -0.17297496] [ 0.26469362 0.02271541 0.00063623 ... 0.08172622 -0.21404935 -0.3575115 ] [ 0.19593564 0.04152802 0.04116395 ... 0.06061731 -0.06806172 -0.27245438] ... [-0.16601728 -0.38368857 -0.16648862 ... 0.00966605 0.01888048 0.1971875 ] [-0.06760699 -0.25767586 -0.08047624 ... -0.11504167 -0.13422264 -0.22772723] [ 0.14536749 0.1353762 0.167403 ... -0.07616871 -0.04021677 -0.1962815 ]] ... [[ 0.03609646 -0.19508249 -0.19522363 ... -0.21036796 -0.20239915 -0.13667752] [-0.04716498 0.02280746 -0.02372289 ... -0.05550199 -0.07833252 0.04890773] [-0.02180829 0.2515017 0.17918056 ... -0.07396121 -0.1512827 0.01430858] ... [-0.14487231 0.02886644 0.18732326 ... -0.18286408 -0.35562077 -0.1269798 ] [-0.26339805 -0.26279578 -0.06744391 ... -0.00234975 -0.2621832 -0.15278722] [-0.23988064 -0.3092481 -0.26105013 ... 0.12422456 0.00242573 -0.04897769]] [[ 0.1605251 -0.012032 -0.12687123 ... -0.1040238 -0.05569258 -0.12836824] [ 0.12389929 0.09734157 0.03735669 ... -0.09281505 -0.16002472 -0.00409881] [ 0.07920679 0.09820815 0.09945942 ... -0.25959533 -0.40945357 -0.26494998] ... [ 0.22679447 0.45113122 0.576679 ... -0.13988157 -0.3117379 -0.22026326] [ 0.15658008 0.28900057 0.28757286 ... -0.1022581 -0.29702684 -0.19038181] [ 0.04938367 -0.03959416 -0.09681816 ... -0.03176238 -0.09149137 -0.08605251]] [[-0.12521236 -0.189941 -0.17386624 ... -0.02340823 -0.105764 -0.27521032] [-0.07402398 -0.09559487 -0.12401104 ... 0.13633655 -0.0407572 -0.0318267 ] [-0.17359625 -0.22527541 -0.13464113 ... 0.10837703 -0.20183802 -0.2717043 ] ... [ 0.31006926 0.31430906 0.32453722 ... 0.02933945 0.04327348 0.14569452] [ 0.2541412 0.1180517 0.02889737 ... 0.03643364 -0.0103979 0.06636763] [ 0.1565333 -0.13781968 -0.28378284 ... -0.14101246 -0.17707157 -0.2376875 ]]] [[[ 0.11285496 0.09822654 0.41344932 ... 0.1990123 -0.00316977 -0.14284433] [ 0.49190247 0.36698496 0.32717767 ... 0.15087515 -0.02851271 -0.15083617] [ 0.39624977 0.31429002 0.3940211 ... 0.38777888 0.24725054 0.08098838] ... [-0.22262973 -0.10862966 0.21123812 ... 0.01166469 -0.22804695 -0.3120149 ] [-0.19712661 -0.10180753 0.11102621 ... -0.06913963 -0.1775925 -0.12946418] [-0.10798296 -0.0797164 -0.01411714 ... -0.01555024 -0.0036074 0.12939556]] [[ 0.08248752 0.11455352 0.31032428 ... -0.28726193 -0.24687678 -0.20399532] [ 0.32445335 0.2522883 0.09305085 ... -0.2487774 -0.1720459 -0.18428202] [ 0.40479037 0.34349254 0.16695993 ... -0.05564654 0.07771682 0.06621464] ... [-0.07379383 0.1166089 0.3186709 ... 0.13690202 -0.25776374 -0.15909952] [ 0.08371367 0.26881343 0.34918976 ... 0.08418643 -0.16613048 -0.12543851] [ 0.2075125 0.08523151 -0.032301 ... 0.06239483 0.23174578 0.27574572]] [[-0.02583024 0.00726356 0.13266797 ... -0.4447688 -0.3705578 -0.1846822 ] [ 0.08187827 0.1297039 0.1274915 ... -0.2653842 -0.16341728 0.01534028] [ 0.00603126 0.2024434 0.11653601 ... -0.20773298 0.06872033 0.16296107] ... [-0.07707673 -0.08438505 -0.0292342 ... -0.09693941 -0.46320125 -0.15377635] [ 0.10596797 0.25775346 0.21465495 ... -0.06394546 -0.2792597 -0.10244251] [ 0.18805587 0.10966463 0.02231671 ... -0.07848748 0.17191476 0.17506772]] ... [[-0.11855708 -0.13657197 0.26074395 ... -0.04799508 -0.32901114 -0.18302047] [-0.12321406 -0.11064505 0.20945756 ... 0.09325649 -0.3267758 -0.42029932] [-0.14327528 0.03048761 0.2755811 ... 0.2634656 -0.19394861 -0.29613778] ... [-0.23616584 -0.06992088 -0.11698762 ... -0.2986265 -0.16221407 -0.15571272] [-0.17223804 -0.16768903 -0.23974572 ... -0.03911143 0.16666433 0.13172041] [-0.02984962 -0.1243315 -0.254841 ... 0.02807454 0.20128347 0.17601578]] [[-0.14645645 -0.26079273 0.10945982 ... -0.03779665 -0.35603517 -0.15335792] [-0.17708722 -0.27139688 0.01503854 ... -0.10429242 -0.39398858 -0.2873817 ] [-0.070191 0.08163215 0.2469637 ... -0.04036243 -0.23945121 -0.24113117] ... [-0.15288283 -0.01606376 -0.25208357 ... -0.39281648 -0.3392469 -0.24572638] [-0.03802798 -0.01068481 -0.18411382 ... -0.29987893 -0.03970407 -0.05429664] [-0.02416164 -0.03882963 -0.3074927 ... -0.1782362 0.18386005 0.23745291]] [[ 0.05460017 -0.14451693 0.08921537 ... -0.03009578 -0.03458266 0.10046429] [-0.13736704 -0.19579709 0.1543624 ... -0.150588 -0.15919687 -0.05578446] [-0.13925534 -0.19814274 0.13840495 ... -0.2337534 -0.08011636 -0.00604653] ... [-0.11605174 -0.00089451 -0.10492256 ... 0.03066428 -0.0761789 0.13951957] [-0.06289024 -0.06483959 -0.11578892 ... -0.12803122 0.06060705 0.14737673] [-0.2073969 -0.21965675 -0.30378535 ... 0.09599624 0.35031903 0.39096412]]] [[[ 0.26344532 0.4066726 0.20187818 ... -0.20262745 -0.36806616 -0.30433404] [ 0.04093897 0.15051174 -0.18549806 ... -0.05713269 -0.30410942 -0.2515376 ] [ 0.02060531 -0.06717297 -0.24149954 ... 0.1933884 -0.12305913 -0.03389737] ... [ 0.32194895 0.3130941 0.13652146 ... 0.05184085 0.01293402 -0.12978686] [ 0.24946553 0.15205044 0.0064767 ... 0.06096656 0.07620123 -0.10634197] [ 0.22393523 0.20930387 0.07360262 ... -0.24081993 -0.0765053 0.03905204]] [[ 0.05375468 0.04422637 0.04202982 ... -0.03769263 -0.16397902 -0.06882591] [-0.04567086 -0.03213467 -0.12081129 ... -0.17208776 -0.26691565 -0.07127965] [-0.19760129 -0.19752525 -0.1756513 ... 0.09405156 -0.1288228 -0.02115551] ... [ 0.37777287 0.14386052 -0.13986959 ... -0.0016032 -0.17935143 -0.25071305] [ 0.359346 0.09968678 -0.20554408 ... 0.14905846 0.08719789 -0.0589276 ] [ 0.37011927 0.28732267 0.19335917 ... 0.06173807 0.10968622 0.01310122]] [[ 0.06980481 -0.05280295 0.15199168 ... 0.03433061 0.01228906 0.02476343] [-0.01662093 -0.02045623 0.06619183 ... 0.02117334 0.05519467 0.13909097] [-0.3316279 -0.10183807 0.06543974 ... 0.29006684 0.08136205 0.11779494] ... [ 0.1698511 0.14313592 -0.12852852 ... 0.01827706 -0.09162546 -0.22597149] [ 0.3279134 0.19013326 -0.08391968 ... 0.16055615 0.126665 -0.03717193] [ 0.29316717 0.3400973 0.21757561 ... 0.21664244 0.2937988 0.10159072]] ... [[-0.12352961 0.21848877 -0.01727727 ... 0.23544846 -0.00886641 0.21894401] [-0.03970575 0.29116893 0.10693354 ... 0.24775048 0.10029966 0.03650415] [ 0.04177976 0.18395995 0.12268814 ... 0.04319476 0.15212888 0.12285739] ... [-0.23483944 -0.26924127 -0.1367129 ... 0.04051707 -0.13831727 0.04747731] [-0.3169563 -0.46863094 -0.56060356 ... -0.10547566 -0.21502933 -0.07316856] [-0.16714032 -0.26647362 -0.38785884 ... -0.19164257 -0.2271415 -0.02407837]] [[-0.09508651 0.11198134 0.00740879 ... 0.22429492 -0.09183774 0.14240722] [ 0.01479962 0.28183463 0.182836 ... 0.18697546 0.03594264 0.0834438 ] [-0.021594 0.19016953 0.22533514 ... -0.18573073 0.04083301 0.20690046] ... [-0.05980518 -0.17365612 -0.24755536 ... 0.19030553 -0.0680193 0.02370323] [-0.047937 -0.22690259 -0.36664015 ... -0.02507005 -0.26873735 -0.21401082] [ 0.07471755 -0.01513348 -0.30782 ... -0.15484294 -0.30850387 -0.03085613]] [[-0.13196504 0.10417908 0.18074585 ... 0.25157055 -0.20635675 -0.0967205 ] [-0.06921785 0.19536416 0.27540612 ... 0.18681465 -0.04215272 0.04005148] [ 0.07789256 0.17372304 0.36790383 ... -0.09518248 -0.01664816 0.28562912] ... [-0.0285116 -0.16168769 -0.29164085 ... 0.53725576 0.30981222 0.24069655] [-0.01912141 -0.09694824 -0.23398049 ... 0.26547128 0.14567989 0.23697174] [ 0.08187211 -0.08578348 -0.3515682 ... 0.08450851 -0.01587026 0.19767708]]]]]; ov_res: [[[[[ 0.158751 -0.0210897 0.07471832 ... -0.18215118 -0.21167347 -0.07447529] [ 0.16592208 -0.00963059 0.14060545 ... -0.01034149 -0.03342535 0.2051124 ] [ 0.07363363 0.04614665 0.17205416 ... -0.12821577 -0.14763275 -0.0849954 ] ... [-0.09330448 0.00557867 0.0218623 ... -0.12438256 0.12946682 0.45854878] [-0.03339214 0.07346591 -0.01730788 ... 0.07326087 0.19027089 0.3114727 ] [ 0.18095344 0.07293934 -0.00650376 ... 0.10152745 0.19889548 0.06689826]] [[ 0.09144376 -0.23041259 -0.19787084 ... 0.01168003 -0.11802223 -0.17345133] [ 0.20155665 -0.01508786 0.03060547 ... 0.12519954 0.01947683 0.02458096] [ 0.17617117 0.07072047 0.1270819 ... 0.07631401 -0.03925966 -0.13039169] ... [-0.0391308 -0.2017429 -0.07170827 ... -0.12614915 -0.02121907 0.22178663] [-0.0050375 -0.11402907 -0.03675439 ... -0.08029748 -0.10131989 -0.13444784] [ 0.20451029 0.01522087 0.0792696 ... -0.13302803 -0.08909369 -0.24809784]] [[ 0.5327766 0.12961598 -0.02486898 ... -0.00101288 -0.12181729 -0.17297496] [ 0.26469362 0.02271541 0.00063623 ... 0.08172622 -0.21404935 -0.3575115 ] [ 0.19593564 0.04152802 0.04116395 ... 0.06061731 -0.06806172 -0.27245438] ... [-0.16601728 -0.38368857 -0.16648862 ... 0.00966605 0.01888048 0.1971875 ] [-0.06760699 -0.25767586 -0.08047624 ... -0.11504167 -0.13422264 -0.22772723] [ 0.14536749 0.1353762 0.167403 ... -0.07616871 -0.04021677 -0.1962815 ]] ... [[ 0.03609646 -0.19508249 -0.19522363 ... -0.21036796 -0.20239915 -0.13667752] [-0.04716498 0.02280746 -0.02372289 ... -0.05550199 -0.07833252 0.04890773] [-0.02180829 0.2515017 0.17918056 ... -0.07396121 -0.1512827 0.01430858] ... [-0.14487231 0.02886644 0.18732326 ... -0.18286408 -0.35562077 -0.1269798 ] [-0.26339805 -0.26279578 -0.06744391 ... -0.00234975 -0.2621832 -0.15278722] [-0.23988064 -0.3092481 -0.26105013 ... 0.12422456 0.00242573 -0.04897769]] [[ 0.1605251 -0.012032 -0.12687123 ... -0.1040238 -0.05569258 -0.12836824] [ 0.12389929 0.09734157 0.03735669 ... -0.09281505 -0.16002472 -0.00409881] [ 0.07920679 0.09820815 0.09945942 ... -0.25959533 -0.40945357 -0.26494998] ... [ 0.22679447 0.45113122 0.576679 ... -0.13988157 -0.3117379 -0.22026326] [ 0.15658008 0.28900057 0.28757286 ... -0.1022581 -0.29702684 -0.19038181] [ 0.04938367 -0.03959416 -0.09681816 ... -0.03176238 -0.09149137 -0.08605251]] [[-0.12521236 -0.189941 -0.17386624 ... -0.02340823 -0.105764 -0.27521032] [-0.07402398 -0.09559487 -0.12401104 ... 0.13633655 -0.0407572 -0.0318267 ] [-0.17359625 -0.22527541 -0.13464113 ... 0.10837703 -0.20183802 -0.2717043 ] ... [ 0.31006926 0.31430906 0.32453722 ... 0.02933945 0.04327348 0.14569452] [ 0.2541412 0.1180517 0.02889737 ... 0.03643364 -0.0103979 0.06636763] [ 0.1565333 -0.13781968 -0.28378284 ... -0.14101246 -0.17707157 -0.2376875 ]]] [[[ 0.11285496 0.09822654 0.41344932 ... 0.1990123 -0.00316977 -0.14284433] [ 0.49190247 0.36698496 0.32717767 ... 0.15087515 -0.02851271 -0.15083617] [ 0.39624977 0.31429002 0.3940211 ... 0.38777888 0.24725054 0.08098838] ... [-0.22262973 -0.10862966 0.21123812 ... 0.01166469 -0.22804695 -0.3120149 ] [-0.19712661 -0.10180753 0.11102621 ... -0.06913963 -0.1775925 -0.12946418] [-0.10798296 -0.0797164 -0.01411714 ... -0.01555024 -0.0036074 0.12939556]] [[ 0.08248752 0.11455352 0.31032428 ... -0.28726193 -0.24687678 -0.20399532] [ 0.32445335 0.2522883 0.09305085 ... -0.2487774 -0.1720459 -0.18428202] [ 0.40479037 0.34349254 0.16695993 ... -0.05564654 0.07771682 0.06621464] ... [-0.07379383 0.1166089 0.3186709 ... 0.13690202 -0.25776374 -0.15909952] [ 0.08371367 0.26881343 0.34918976 ... 0.08418643 -0.16613048 -0.12543851] [ 0.2075125 0.08523151 -0.032301 ... 0.06239483 0.23174578 0.27574572]] [[-0.02583024 0.00726356 0.13266797 ... -0.4447688 -0.3705578 -0.1846822 ] [ 0.08187827 0.1297039 0.1274915 ... -0.2653842 -0.16341728 0.01534028] [ 0.00603126 0.2024434 0.11653601 ... -0.20773298 0.06872033 0.16296107] ... [-0.07707673 -0.08438505 -0.0292342 ... -0.09693941 -0.46320125 -0.15377635] [ 0.10596797 0.25775346 0.21465495 ... -0.06394546 -0.2792597 -0.10244251] [ 0.18805587 0.10966463 0.02231671 ... -0.07848748 0.17191476 0.17506772]] ... [[-0.11855708 -0.13657197 0.26074395 ... -0.04799508 -0.32901114 -0.18302047] [-0.12321406 -0.11064505 0.20945756 ... 0.09325649 -0.3267758 -0.42029932] [-0.14327528 0.03048761 0.2755811 ... 0.2634656 -0.19394861 -0.29613778] ... [-0.23616584 -0.06992088 -0.11698762 ... -0.2986265 -0.16221407 -0.15571272] [-0.17223804 -0.16768903 -0.23974572 ... -0.03911143 0.16666433 0.13172041] [-0.02984962 -0.1243315 -0.254841 ... 0.02807454 0.20128347 0.17601578]] [[-0.14645645 -0.26079273 0.10945982 ... -0.03779665 -0.35603517 -0.15335792] [-0.17708722 -0.27139688 0.01503854 ... -0.10429242 -0.39398858 -0.2873817 ] [-0.070191 0.08163215 0.2469637 ... -0.04036243 -0.23945121 -0.24113117] ... [-0.15288283 -0.01606376 -0.25208357 ... -0.39281648 -0.3392469 -0.24572638] [-0.03802798 -0.01068481 -0.18411382 ... -0.29987893 -0.03970407 -0.05429664] [-0.02416164 -0.03882963 -0.3074927 ... -0.1782362 0.18386005 0.23745291]] [[ 0.05460017 -0.14451693 0.08921537 ... -0.03009578 -0.03458266 0.10046429] [-0.13736704 -0.19579709 0.1543624 ... -0.150588 -0.15919687 -0.05578446] [-0.13925534 -0.19814274 0.13840495 ... -0.2337534 -0.08011636 -0.00604653] ... [-0.11605174 -0.00089451 -0.10492256 ... 0.03066428 -0.0761789 0.13951957] [-0.06289024 -0.06483959 -0.11578892 ... -0.12803122 0.06060705 0.14737673] [-0.2073969 -0.21965675 -0.30378535 ... 0.09599624 0.35031903 0.39096412]]] [[[ 0.26344532 0.4066726 0.20187818 ... -0.20262745 -0.36806616 -0.30433404] [ 0.04093897 0.15051174 -0.18549806 ... -0.05713269 -0.30410942 -0.2515376 ] [ 0.02060531 -0.06717297 -0.24149954 ... 0.1933884 -0.12305913 -0.03389737] ... [ 0.32194895 0.3130941 0.13652146 ... 0.05184085 0.01293402 -0.12978686] [ 0.24946553 0.15205044 0.0064767 ... 0.06096656 0.07620123 -0.10634197] [ 0.22393523 0.20930387 0.07360262 ... -0.24081993 -0.0765053 0.03905204]] [[ 0.05375468 0.04422637 0.04202982 ... -0.03769263 -0.16397902 -0.06882591] [-0.04567086 -0.03213467 -0.12081129 ... -0.17208776 -0.26691565 -0.07127965] [-0.19760129 -0.19752525 -0.1756513 ... 0.09405156 -0.1288228 -0.02115551] ... [ 0.37777287 0.14386052 -0.13986959 ... -0.0016032 -0.17935143 -0.25071305] [ 0.359346 0.09968678 -0.20554408 ... 0.14905846 0.08719789 -0.0589276 ] [ 0.37011927 0.28732267 0.19335917 ... 0.06173807 0.10968622 0.01310122]] [[ 0.06980481 -0.05280295 0.15199168 ... 0.03433061 0.01228906 0.02476343] [-0.01662093 -0.02045623 0.06619183 ... 0.02117334 0.05519467 0.13909097] [-0.3316279 -0.10183807 0.06543974 ... 0.29006684 0.08136205 0.11779494] ... [ 0.1698511 0.14313592 -0.12852852 ... 0.01827706 -0.09162546 -0.22597149] [ 0.3279134 0.19013326 -0.08391968 ... 0.16055615 0.126665 -0.03717193] [ 0.29316717 0.3400973 0.21757561 ... 0.21664244 0.2937988 0.10159072]] ... [[-0.12352961 0.21848877 -0.01727727 ... 0.23544846 -0.00886641 0.21894401] [-0.03970575 0.29116893 0.10693354 ... 0.24775048 0.10029966 0.03650415] [ 0.04177976 0.18395995 0.12268814 ... 0.04319476 0.15212888 0.12285739] ... [-0.23483944 -0.26924127 -0.1367129 ... 0.04051707 -0.13831727 0.04747731] [-0.3169563 -0.46863094 -0.56060356 ... -0.10547566 -0.21502933 -0.07316856] [-0.16714032 -0.26647362 -0.38785884 ... -0.19164257 -0.2271415 -0.02407837]] [[-0.09508651 0.11198134 0.00740879 ... 0.22429492 -0.09183774 0.14240722] [ 0.01479962 0.28183463 0.182836 ... 0.18697546 0.03594264 0.0834438 ] [-0.021594 0.19016953 0.22533514 ... -0.18573073 0.04083301 0.20690046] ... [-0.05980518 -0.17365612 -0.24755536 ... 0.19030553 -0.0680193 0.02370323] [-0.047937 -0.22690259 -0.36664015 ... -0.02507005 -0.26873735 -0.21401082] [ 0.07471755 -0.01513348 -0.30782 ... -0.15484294 -0.30850387 -0.03085613]] [[-0.13196504 0.10417908 0.18074585 ... 0.25157055 -0.20635675 -0.0967205 ] [-0.06921785 0.19536416 0.27540612 ... 0.18681465 -0.04215272 0.04005148] [ 0.07789256 0.17372304 0.36790383 ... -0.09518248 -0.01664816 0.28562912] ... [-0.0285116 -0.16168769 -0.29164085 ... 0.53725576 0.30981222 0.24069655] [-0.01912141 -0.09694824 -0.23398049 ... 0.26547128 0.14567989 0.23697174] [ 0.08187211 -0.08578348 -0.3515682 ... 0.08450851 -0.01587026 0.19767708]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5767.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[1, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[ 2.41425604e-01 1.66786179e-01 2.40042850e-01 ... 1.36739314e-02 -4.79991324e-02 -1.31047592e-01] [ 2.38436565e-01 1.84789687e-01 1.86959729e-01 ... -2.05083460e-01 -2.42493808e-01 -3.10765713e-01] [ 9.20576826e-02 -4.65302169e-02 -3.99345085e-02 ... -1.90947801e-01 -3.04721951e-01 -4.69419956e-01] ... [ 3.02801400e-01 -9.28159356e-02 1.03683122e-01 ... 3.91094804e-01 4.90441859e-01 6.78887784e-01] [ 3.16300578e-02 -3.08159947e-01 -1.94342032e-01 ... 4.09326583e-01 6.19229972e-01 6.59404099e-01] [-6.32042587e-02 -4.23968643e-01 -3.57226342e-01 ... 4.23792630e-01 5.45293629e-01 4.46554065e-01]] [[ 1.17364980e-01 1.84068799e-01 9.51661021e-02 ... -2.08668932e-02 -2.71107778e-02 -1.12366475e-01] [ 1.21376380e-01 1.78320050e-01 6.52148351e-02 ... -3.28134894e-01 -3.01941454e-01 -2.51385480e-01] [ 8.04393142e-02 1.41465357e-02 -4.30873930e-02 ... -3.65711212e-01 -2.89612919e-01 -2.23886788e-01] ... [ 8.23405981e-02 -1.67066678e-01 1.45228177e-01 ... 1.11309469e-01 3.35494012e-01 5.41881025e-01] [-9.13361181e-03 -2.95959055e-01 -1.10081106e-01 ... 1.41886130e-01 5.18481255e-01 5.89812994e-01] [-6.53452352e-02 -3.43556643e-01 -2.79684424e-01 ... 2.57583261e-01 4.89248753e-01 3.35172802e-01]] [[ 2.39214614e-01 2.17813939e-01 1.42867230e-02 ... 6.94789961e-02 1.07042156e-01 1.81584224e-01] [ 2.59965628e-01 2.02404708e-01 3.52290682e-02 ... -2.80995041e-01 -1.35253772e-01 6.66303188e-02] [ 2.51252532e-01 1.63244799e-01 -5.01310406e-03 ... -2.35740110e-01 1.23059945e-02 1.75237119e-01] ... [ 1.16963536e-01 3.65597047e-02 2.81873345e-01 ... -3.37899089e-01 -2.03061607e-02 3.20819259e-01] [-3.26808691e-02 -2.14386940e-01 -1.25791449e-02 ... -4.09414619e-01 3.88749875e-03 3.38184237e-01] [-2.24176958e-01 -2.48363525e-01 -9.13842469e-02 ... -2.65359133e-01 -1.25834383e-02 2.12802127e-01]] ... [[ 3.79621238e-01 3.75796974e-01 2.71025032e-01 ... -1.49141610e-01 -4.83891815e-01 -4.81184810e-01] [ 3.83467764e-01 3.39783072e-01 -2.05428116e-02 ... -1.10382490e-01 -4.77116942e-01 -4.40694213e-01] [-1.26153067e-01 3.28945555e-02 -2.07796365e-01 ... -1.85599774e-01 -5.34943283e-01 -5.10182381e-01] ... [ 3.95005755e-02 -6.36225045e-02 -3.19225490e-01 ... -2.41161689e-01 -1.47101507e-01 -2.06143618e-01] [ 3.44657004e-01 2.39458591e-01 -1.02348804e-01 ... -2.36308798e-01 -5.14895953e-02 4.33162265e-02] [ 2.72369176e-01 1.82861850e-01 -1.09380029e-01 ... -2.71103352e-01 1.51490629e-01 3.97814721e-01]] [[ 2.15738714e-01 2.34484121e-01 1.43706873e-01 ... -7.95842037e-02 -4.12813663e-01 -6.06697261e-01] [ 2.67825544e-01 2.07439557e-01 -5.41544445e-02 ... -3.26953232e-02 -3.48614156e-01 -4.42328304e-01] [-2.16622561e-01 -1.99508920e-01 -4.53385204e-01 ... 4.70691696e-02 -1.70260623e-01 -1.52433813e-01] ... [ 1.63089167e-02 1.19279604e-03 -1.08510904e-01 ... -2.48404428e-01 -8.15719888e-02 -1.69406995e-01] [-3.08776423e-02 2.92098504e-02 -1.16280414e-01 ... -2.71862984e-01 -9.09147039e-03 -8.69394243e-02] [-2.53809690e-02 3.26847583e-02 -1.35441586e-01 ... -3.45339179e-01 1.40874147e-01 2.51809716e-01]] [[ 1.27688378e-01 3.69280070e-01 4.06328708e-01 ... -2.35610902e-02 -2.64154315e-01 -5.86703002e-01] [ 3.11687052e-01 3.47727716e-01 6.55168593e-02 ... -6.69992417e-02 -1.24641508e-01 -3.53796482e-01] [-2.46168569e-01 -2.29519576e-01 -3.94519866e-01 ... -5.86462282e-02 6.47033304e-02 -6.22438900e-02] ... [-2.43212417e-01 -1.44492492e-01 -5.04028536e-02 ... -2.41221324e-01 -2.26161271e-01 -3.15849543e-01] [-4.36510473e-01 -2.43751124e-01 -1.02752194e-01 ... -6.49881661e-01 -4.42762345e-01 -6.35506272e-01] [-2.89052486e-01 -2.86670059e-01 -1.08230509e-01 ... -7.50751793e-01 -3.72371227e-01 -4.47761953e-01]]] [[[ 2.39391118e-01 -1.73814390e-02 -4.01303411e-01 ... 2.72755593e-01 2.86610007e-01 4.74517882e-01] [ 1.80261001e-01 -6.40588161e-03 -2.69184381e-01 ... 1.95917368e-01 2.28365660e-02 1.89638674e-01] [ 7.33620450e-02 4.95940261e-02 -5.43661080e-02 ... -4.79889251e-02 -7.73377419e-02 1.31495118e-01] ... [ 4.37690139e-01 2.76598811e-01 8.91413391e-02 ... -3.04885209e-01 -6.00252569e-01 -7.26460755e-01] [ 1.65577173e-01 6.13438562e-02 -1.21814929e-01 ... 7.28498846e-02 -2.28048965e-01 -3.29384655e-01] [-1.80727810e-01 -1.99437365e-01 -3.66445899e-01 ... 1.05039559e-01 -1.79570988e-01 -1.42238230e-01]] [[ 1.64303944e-01 6.86569326e-03 -2.05729008e-01 ... -6.55081719e-02 1.46689087e-01 3.66738230e-01] [ 5.31954505e-02 -9.55610797e-02 -2.44627535e-01 ... 1.38065163e-02 4.85634804e-02 1.96940809e-01] [-1.09252416e-01 -1.61966145e-01 -2.00683296e-01 ... -3.45025584e-02 5.88538013e-02 2.24300072e-01] ... [ 3.92881721e-01 3.55134189e-01 1.15666300e-01 ... -1.58104330e-01 -2.83833086e-01 -2.87249237e-01] [ 3.97249572e-02 4.06735428e-02 -6.47582114e-02 ... 8.03875253e-02 -3.93747054e-02 -2.71414127e-02] [ 5.89155639e-03 -2.44161878e-02 -2.16915950e-01 ... -7.23437741e-02 -8.33821818e-02 -4.06011157e-02]] [[-1.18388675e-01 -2.18048897e-02 -9.19229835e-02 ... -4.93094921e-01 1.08066432e-01 3.13286871e-01] [-2.38002062e-01 -2.33817592e-01 -2.09163636e-01 ... -2.66578585e-01 3.90241779e-02 1.31087154e-01] [-3.35128695e-01 -3.87413919e-01 -2.49779016e-01 ... -1.87245086e-01 -1.04946002e-01 -8.87680054e-02] ... [ 3.05425059e-02 1.50753960e-01 -1.84268594e-01 ... 1.58408269e-01 2.30975717e-01 4.04454499e-01] [-2.24335089e-01 -2.56468095e-02 -2.72659510e-02 ... 9.85108837e-02 3.00484240e-01 4.17070717e-01] [-3.52513105e-01 -1.22078620e-01 -1.40802041e-01 ... -1.28836855e-01 2.58716524e-01 4.37651247e-01]] ... [[-4.19291556e-02 -1.64399847e-01 -1.66466564e-01 ... -1.31464237e-02 -1.02753572e-01 -3.15511316e-01] [-5.95961027e-02 -1.11354493e-01 -2.52170637e-02 ... -1.59786344e-01 -1.78773776e-01 -2.26086140e-01] [-1.45085603e-01 -1.94024026e-01 1.01061002e-03 ... 4.07392830e-02 4.81784269e-02 4.79048677e-02] ... [ 4.34316665e-01 3.66873175e-01 2.35237151e-01 ... 6.84062168e-02 -1.49554417e-01 -3.37917924e-01] [ 6.27143741e-01 5.03923118e-01 4.04690593e-01 ... -2.42547989e-01 -2.26447180e-01 -3.70557100e-01] [ 8.22316468e-01 4.90547448e-01 4.49452549e-01 ... -6.26153111e-01 -3.69493634e-01 -5.36584318e-01]] [[-9.61269811e-02 -1.43080816e-01 -3.98769900e-02 ... -1.92679450e-01 -5.29719926e-02 -8.50918591e-02] [-1.45144537e-01 -8.06927830e-02 3.75873558e-02 ... -3.05065513e-01 -1.49835706e-01 -5.44848368e-02] [-1.04349032e-01 -1.07547887e-01 3.21158650e-03 ... -1.43537238e-01 6.74856082e-02 6.88254535e-02] ... [ 5.27309477e-01 4.07318532e-01 2.66085565e-01 ... 3.84559706e-02 -2.56959703e-02 -2.25754231e-01] [ 4.81053203e-01 3.32710892e-01 2.31354401e-01 ... -8.51932466e-02 -1.53678253e-01 -2.93974012e-01] [ 5.33778131e-01 2.55239904e-01 2.13116854e-01 ... -3.57496411e-01 -2.95863509e-01 -5.05087018e-01]] [[-3.41261104e-02 -1.05352126e-01 8.15063193e-02 ... 1.14793159e-01 3.33744913e-01 2.43111536e-01] [-1.03144586e-01 -1.15333609e-01 -3.67738456e-02 ... -5.12238257e-02 8.65929797e-02 4.27447371e-02] [ 6.91300118e-03 -2.32795849e-01 -2.85772443e-01 ... 4.26335745e-02 2.81592846e-01 1.38910040e-01] ... [ 3.61142129e-01 2.07484543e-01 1.15734309e-01 ... -2.32816279e-01 -1.73534766e-01 -4.15378064e-01] [ 3.43705118e-02 1.89342685e-02 6.18284531e-02 ... 3.71355340e-02 -8.81294310e-02 -3.47955585e-01] [-2.74606496e-01 -2.35356614e-01 -1.24839842e-01 ... -1.83756351e-01 -3.10278386e-01 -6.83552802e-01]]] [[[ 2.40189895e-01 -5.12371846e-02 5.88077717e-02 ... 1.26065731e-01 2.46467292e-01 5.76198757e-01] [ 2.29845777e-01 5.33079728e-02 8.56828690e-02 ... 1.64828971e-01 2.34918728e-01 4.37235832e-01] [-6.60498813e-02 -9.41034481e-02 5.72469048e-02 ... 2.92593867e-01 4.72143292e-01 6.49573505e-01] ... [ 2.56797999e-01 2.87407756e-01 1.05986275e-01 ... 1.48037836e-01 2.83026189e-01 4.00626272e-01] [ 2.80368719e-02 1.51218683e-01 1.81463450e-01 ... 2.22263977e-01 3.20233554e-01 4.91544098e-01] [-7.96361417e-02 -9.30153951e-02 2.06192750e-02 ... 2.00639293e-01 1.32933825e-01 3.07480276e-01]] [[ 1.43535482e-02 -7.37957358e-02 1.73054542e-02 ... -1.52061045e-01 -2.43619885e-02 2.45318606e-01] [ 1.70274332e-01 9.67076644e-02 1.21632971e-01 ... -7.82275870e-02 -2.15615910e-02 1.53159335e-01] [ 1.72738004e-02 4.06255526e-03 1.16597585e-01 ... 8.42269957e-02 1.84556127e-01 3.98509651e-01] ... [ 1.35078609e-01 2.77309358e-01 1.36965290e-01 ... 3.81043226e-01 2.40174934e-01 2.12875679e-01] [ 6.29107207e-02 2.15125963e-01 2.34928295e-01 ... 4.30005431e-01 2.84617603e-01 3.75292897e-01] [ 9.35224369e-02 1.15637384e-01 2.16587618e-01 ... 3.46205443e-01 5.64408042e-02 1.45356342e-01]] [[-1.28623396e-01 1.50800496e-02 1.37370676e-01 ... -6.61352798e-02 1.36608973e-01 2.68112034e-01] [ 2.37479676e-02 6.95056375e-03 1.36555970e-01 ... 2.00106744e-02 1.30263776e-01 2.33447209e-01] [-1.19495764e-01 -6.86865449e-02 7.23454133e-02 ... 9.73937809e-02 1.03203878e-01 1.66525289e-01] ... [ 1.25959307e-01 3.55684340e-01 3.47927690e-01 ... 3.57275814e-01 6.43418655e-02 1.89490654e-02] [-1.65359870e-01 1.69870406e-01 2.86047935e-01 ... 3.49566311e-01 2.14956596e-01 2.06897050e-01] [-2.33093977e-01 1.10225305e-01 3.85363847e-01 ... 3.17197055e-01 9.96260867e-02 9.42462161e-02]] ... [[-2.28018209e-01 -2.12903678e-01 -1.09559000e-01 ... -7.57877752e-02 -7.51479417e-02 -2.51796216e-01] [ 5.07577956e-02 4.83425893e-02 8.69470462e-03 ... 9.74110961e-02 4.43946645e-02 2.74260831e-03] [ 9.95907113e-02 1.83917180e-01 1.26133800e-01 ... 1.11697085e-01 2.02350721e-01 1.08245082e-01] ... [ 3.50097626e-01 3.66593152e-01 3.76655400e-01 ... -5.92847914e-02 -8.59224871e-02 -1.86881185e-01] [ 3.04439038e-01 3.15724730e-01 2.95712918e-01 ... -1.19187534e-01 -1.18878610e-01 -7.99909085e-02] [ 4.68100160e-01 3.27513218e-01 1.50239900e-01 ... 4.02031578e-02 -4.93546240e-02 1.31988870e-02]] [[-1.80251554e-01 -1.37149036e-01 -1.25090495e-01 ... -7.12098926e-02 -2.80506276e-02 3.33422534e-02] [-5.34140617e-02 1.08000278e-01 1.78024508e-02 ... -1.52325258e-01 -9.16775241e-02 -7.73596317e-02] [ 1.28934458e-01 3.39399844e-01 1.41293421e-01 ... -1.55610457e-01 -3.11970711e-04 -2.62418296e-02] ... [ 3.11414123e-01 2.58648694e-01 3.61556500e-01 ... 1.77735552e-01 1.52579784e-01 7.29496032e-02] [ 3.39019984e-01 2.02316701e-01 2.85814196e-01 ... -1.23764247e-01 -8.33393186e-02 -5.67391366e-02] [ 2.91294724e-01 4.47910354e-02 1.86569896e-02 ... -9.25941095e-02 -5.27468063e-02 -9.91623104e-02]] [[-5.67885414e-02 3.65903974e-03 -1.07121490e-01 ... -5.33473678e-02 2.57285327e-01 3.29945505e-01] [-5.88392131e-02 1.77007034e-01 -4.44589742e-03 ... -1.77565277e-01 2.55390275e-02 2.42560506e-02] [ 2.84943581e-02 3.24729174e-01 5.33078052e-02 ... -3.19783300e-01 -9.63272378e-02 -1.69918552e-01] ... [ 4.49351221e-01 3.56818259e-01 4.36801255e-01 ... 5.59906773e-02 2.49784335e-01 2.68295795e-01] [ 5.98493576e-01 2.50823975e-01 2.68577427e-01 ... -2.07888156e-01 5.62585192e-03 1.27365500e-01] [ 3.15697193e-01 4.04007323e-02 -6.85678050e-02 ... -2.05796435e-01 4.21393625e-02 1.01606503e-01]]]]]; ov_res: [[[[[ 2.41425604e-01 1.66786179e-01 2.40042850e-01 ... 1.36739314e-02 -4.79991324e-02 -1.31047592e-01] [ 2.38436565e-01 1.84789687e-01 1.86959729e-01 ... -2.05083460e-01 -2.42493808e-01 -3.10765713e-01] [ 9.20576826e-02 -4.65302169e-02 -3.99345085e-02 ... -1.90947801e-01 -3.04721951e-01 -4.69419956e-01] ... [ 3.02801400e-01 -9.28159356e-02 1.03683122e-01 ... 3.91094804e-01 4.90441859e-01 6.78887784e-01] [ 3.16300578e-02 -3.08159947e-01 -1.94342032e-01 ... 4.09326583e-01 6.19229972e-01 6.59404099e-01] [-6.32042587e-02 -4.23968643e-01 -3.57226342e-01 ... 4.23792630e-01 5.45293629e-01 4.46554065e-01]] [[ 1.17364980e-01 1.84068799e-01 9.51661021e-02 ... -2.08668932e-02 -2.71107778e-02 -1.12366475e-01] [ 1.21376380e-01 1.78320050e-01 6.52148351e-02 ... -3.28134894e-01 -3.01941454e-01 -2.51385480e-01] [ 8.04393142e-02 1.41465357e-02 -4.30873930e-02 ... -3.65711212e-01 -2.89612919e-01 -2.23886788e-01] ... [ 8.23405981e-02 -1.67066678e-01 1.45228177e-01 ... 1.11309469e-01 3.35494012e-01 5.41881025e-01] [-9.13361181e-03 -2.95959055e-01 -1.10081106e-01 ... 1.41886130e-01 5.18481255e-01 5.89812994e-01] [-6.53452352e-02 -3.43556643e-01 -2.79684424e-01 ... 2.57583261e-01 4.89248753e-01 3.35172802e-01]] [[ 2.39214614e-01 2.17813939e-01 1.42867230e-02 ... 6.94789961e-02 1.07042156e-01 1.81584224e-01] [ 2.59965628e-01 2.02404708e-01 3.52290682e-02 ... -2.80995041e-01 -1.35253772e-01 6.66303188e-02] [ 2.51252532e-01 1.63244799e-01 -5.01310406e-03 ... -2.35740110e-01 1.23059945e-02 1.75237119e-01] ... [ 1.16963536e-01 3.65597047e-02 2.81873345e-01 ... -3.37899089e-01 -2.03061607e-02 3.20819259e-01] [-3.26808691e-02 -2.14386940e-01 -1.25791449e-02 ... -4.09414619e-01 3.88749875e-03 3.38184237e-01] [-2.24176958e-01 -2.48363525e-01 -9.13842469e-02 ... -2.65359133e-01 -1.25834383e-02 2.12802127e-01]] ... [[ 3.79621238e-01 3.75796974e-01 2.71025032e-01 ... -1.49141610e-01 -4.83891815e-01 -4.81184810e-01] [ 3.83467764e-01 3.39783072e-01 -2.05428116e-02 ... -1.10382490e-01 -4.77116942e-01 -4.40694213e-01] [-1.26153067e-01 3.28945555e-02 -2.07796365e-01 ... -1.85599774e-01 -5.34943283e-01 -5.10182381e-01] ... [ 3.95005755e-02 -6.36225045e-02 -3.19225490e-01 ... -2.41161689e-01 -1.47101507e-01 -2.06143618e-01] [ 3.44657004e-01 2.39458591e-01 -1.02348804e-01 ... -2.36308798e-01 -5.14895953e-02 4.33162265e-02] [ 2.72369176e-01 1.82861850e-01 -1.09380029e-01 ... -2.71103352e-01 1.51490629e-01 3.97814721e-01]] [[ 2.15738714e-01 2.34484121e-01 1.43706873e-01 ... -7.95842037e-02 -4.12813663e-01 -6.06697261e-01] [ 2.67825544e-01 2.07439557e-01 -5.41544445e-02 ... -3.26953232e-02 -3.48614156e-01 -4.42328304e-01] [-2.16622561e-01 -1.99508920e-01 -4.53385204e-01 ... 4.70691696e-02 -1.70260623e-01 -1.52433813e-01] ... [ 1.63089167e-02 1.19279604e-03 -1.08510904e-01 ... -2.48404428e-01 -8.15719888e-02 -1.69406995e-01] [-3.08776423e-02 2.92098504e-02 -1.16280414e-01 ... -2.71862984e-01 -9.09147039e-03 -8.69394243e-02] [-2.53809690e-02 3.26847583e-02 -1.35441586e-01 ... -3.45339179e-01 1.40874147e-01 2.51809716e-01]] [[ 1.27688378e-01 3.69280070e-01 4.06328708e-01 ... -2.35610902e-02 -2.64154315e-01 -5.86703002e-01] [ 3.11687052e-01 3.47727716e-01 6.55168593e-02 ... -6.69992417e-02 -1.24641508e-01 -3.53796482e-01] [-2.46168569e-01 -2.29519576e-01 -3.94519866e-01 ... -5.86462282e-02 6.47033304e-02 -6.22438900e-02] ... [-2.43212417e-01 -1.44492492e-01 -5.04028536e-02 ... -2.41221324e-01 -2.26161271e-01 -3.15849543e-01] [-4.36510473e-01 -2.43751124e-01 -1.02752194e-01 ... -6.49881661e-01 -4.42762345e-01 -6.35506272e-01] [-2.89052486e-01 -2.86670059e-01 -1.08230509e-01 ... -7.50751793e-01 -3.72371227e-01 -4.47761953e-01]]] [[[ 2.39391118e-01 -1.73814390e-02 -4.01303411e-01 ... 2.72755593e-01 2.86610007e-01 4.74517882e-01] [ 1.80261001e-01 -6.40588161e-03 -2.69184381e-01 ... 1.95917368e-01 2.28365660e-02 1.89638674e-01] [ 7.33620450e-02 4.95940261e-02 -5.43661080e-02 ... -4.79889251e-02 -7.73377419e-02 1.31495118e-01] ... [ 4.37690139e-01 2.76598811e-01 8.91413391e-02 ... -3.04885209e-01 -6.00252569e-01 -7.26460755e-01] [ 1.65577173e-01 6.13438562e-02 -1.21814929e-01 ... 7.28498846e-02 -2.28048965e-01 -3.29384655e-01] [-1.80727810e-01 -1.99437365e-01 -3.66445899e-01 ... 1.05039559e-01 -1.79570988e-01 -1.42238230e-01]] [[ 1.64303944e-01 6.86569326e-03 -2.05729008e-01 ... -6.55081719e-02 1.46689087e-01 3.66738230e-01] [ 5.31954505e-02 -9.55610797e-02 -2.44627535e-01 ... 1.38065163e-02 4.85634804e-02 1.96940809e-01] [-1.09252416e-01 -1.61966145e-01 -2.00683296e-01 ... -3.45025584e-02 5.88538013e-02 2.24300072e-01] ... [ 3.92881721e-01 3.55134189e-01 1.15666300e-01 ... -1.58104330e-01 -2.83833086e-01 -2.87249237e-01] [ 3.97249572e-02 4.06735428e-02 -6.47582114e-02 ... 8.03875253e-02 -3.93747054e-02 -2.71414127e-02] [ 5.89155639e-03 -2.44161878e-02 -2.16915950e-01 ... -7.23437741e-02 -8.33821818e-02 -4.06011157e-02]] [[-1.18388675e-01 -2.18048897e-02 -9.19229835e-02 ... -4.93094921e-01 1.08066432e-01 3.13286871e-01] [-2.38002062e-01 -2.33817592e-01 -2.09163636e-01 ... -2.66578585e-01 3.90241779e-02 1.31087154e-01] [-3.35128695e-01 -3.87413919e-01 -2.49779016e-01 ... -1.87245086e-01 -1.04946002e-01 -8.87680054e-02] ... [ 3.05425059e-02 1.50753960e-01 -1.84268594e-01 ... 1.58408269e-01 2.30975717e-01 4.04454499e-01] [-2.24335089e-01 -2.56468095e-02 -2.72659510e-02 ... 9.85108837e-02 3.00484240e-01 4.17070717e-01] [-3.52513105e-01 -1.22078620e-01 -1.40802041e-01 ... -1.28836855e-01 2.58716524e-01 4.37651247e-01]] ... [[-4.19291556e-02 -1.64399847e-01 -1.66466564e-01 ... -1.31464237e-02 -1.02753572e-01 -3.15511316e-01] [-5.95961027e-02 -1.11354493e-01 -2.52170637e-02 ... -1.59786344e-01 -1.78773776e-01 -2.26086140e-01] [-1.45085603e-01 -1.94024026e-01 1.01061002e-03 ... 4.07392830e-02 4.81784269e-02 4.79048677e-02] ... [ 4.34316665e-01 3.66873175e-01 2.35237151e-01 ... 6.84062168e-02 -1.49554417e-01 -3.37917924e-01] [ 6.27143741e-01 5.03923118e-01 4.04690593e-01 ... -2.42547989e-01 -2.26447180e-01 -3.70557100e-01] [ 8.22316468e-01 4.90547448e-01 4.49452549e-01 ... -6.26153111e-01 -3.69493634e-01 -5.36584318e-01]] [[-9.61269811e-02 -1.43080816e-01 -3.98769900e-02 ... -1.92679450e-01 -5.29719926e-02 -8.50918591e-02] [-1.45144537e-01 -8.06927830e-02 3.75873558e-02 ... -3.05065513e-01 -1.49835706e-01 -5.44848368e-02] [-1.04349032e-01 -1.07547887e-01 3.21158650e-03 ... -1.43537238e-01 6.74856082e-02 6.88254535e-02] ... [ 5.27309477e-01 4.07318532e-01 2.66085565e-01 ... 3.84559706e-02 -2.56959703e-02 -2.25754231e-01] [ 4.81053203e-01 3.32710892e-01 2.31354401e-01 ... -8.51932466e-02 -1.53678253e-01 -2.93974012e-01] [ 5.33778131e-01 2.55239904e-01 2.13116854e-01 ... -3.57496411e-01 -2.95863509e-01 -5.05087018e-01]] [[-3.41261104e-02 -1.05352126e-01 8.15063193e-02 ... 1.14793159e-01 3.33744913e-01 2.43111536e-01] [-1.03144586e-01 -1.15333609e-01 -3.67738456e-02 ... -5.12238257e-02 8.65929797e-02 4.27447371e-02] [ 6.91300118e-03 -2.32795849e-01 -2.85772443e-01 ... 4.26335745e-02 2.81592846e-01 1.38910040e-01] ... [ 3.61142129e-01 2.07484543e-01 1.15734309e-01 ... -2.32816279e-01 -1.73534766e-01 -4.15378064e-01] [ 3.43705118e-02 1.89342685e-02 6.18284531e-02 ... 3.71355340e-02 -8.81294310e-02 -3.47955585e-01] [-2.74606496e-01 -2.35356614e-01 -1.24839842e-01 ... -1.83756351e-01 -3.10278386e-01 -6.83552802e-01]]] [[[ 2.40189895e-01 -5.12371846e-02 5.88077717e-02 ... 1.26065731e-01 2.46467292e-01 5.76198757e-01] [ 2.29845777e-01 5.33079728e-02 8.56828690e-02 ... 1.64828971e-01 2.34918728e-01 4.37235832e-01] [-6.60498813e-02 -9.41034481e-02 5.72469048e-02 ... 2.92593867e-01 4.72143292e-01 6.49573505e-01] ... [ 2.56797999e-01 2.87407756e-01 1.05986275e-01 ... 1.48037836e-01 2.83026189e-01 4.00626272e-01] [ 2.80368719e-02 1.51218683e-01 1.81463450e-01 ... 2.22263977e-01 3.20233554e-01 4.91544098e-01] [-7.96361417e-02 -9.30153951e-02 2.06192750e-02 ... 2.00639293e-01 1.32933825e-01 3.07480276e-01]] [[ 1.43535482e-02 -7.37957358e-02 1.73054542e-02 ... -1.52061045e-01 -2.43619885e-02 2.45318606e-01] [ 1.70274332e-01 9.67076644e-02 1.21632971e-01 ... -7.82275870e-02 -2.15615910e-02 1.53159335e-01] [ 1.72738004e-02 4.06255526e-03 1.16597585e-01 ... 8.42269957e-02 1.84556127e-01 3.98509651e-01] ... [ 1.35078609e-01 2.77309358e-01 1.36965290e-01 ... 3.81043226e-01 2.40174934e-01 2.12875679e-01] [ 6.29107207e-02 2.15125963e-01 2.34928295e-01 ... 4.30005431e-01 2.84617603e-01 3.75292897e-01] [ 9.35224369e-02 1.15637384e-01 2.16587618e-01 ... 3.46205443e-01 5.64408042e-02 1.45356342e-01]] [[-1.28623396e-01 1.50800496e-02 1.37370676e-01 ... -6.61352798e-02 1.36608973e-01 2.68112034e-01] [ 2.37479676e-02 6.95056375e-03 1.36555970e-01 ... 2.00106744e-02 1.30263776e-01 2.33447209e-01] [-1.19495764e-01 -6.86865449e-02 7.23454133e-02 ... 9.73937809e-02 1.03203878e-01 1.66525289e-01] ... [ 1.25959307e-01 3.55684340e-01 3.47927690e-01 ... 3.57275814e-01 6.43418655e-02 1.89490654e-02] [-1.65359870e-01 1.69870406e-01 2.86047935e-01 ... 3.49566311e-01 2.14956596e-01 2.06897050e-01] [-2.33093977e-01 1.10225305e-01 3.85363847e-01 ... 3.17197055e-01 9.96260867e-02 9.42462161e-02]] ... [[-2.28018209e-01 -2.12903678e-01 -1.09559000e-01 ... -7.57877752e-02 -7.51479417e-02 -2.51796216e-01] [ 5.07577956e-02 4.83425893e-02 8.69470462e-03 ... 9.74110961e-02 4.43946645e-02 2.74260831e-03] [ 9.95907113e-02 1.83917180e-01 1.26133800e-01 ... 1.11697085e-01 2.02350721e-01 1.08245082e-01] ... [ 3.50097626e-01 3.66593152e-01 3.76655400e-01 ... -5.92847914e-02 -8.59224871e-02 -1.86881185e-01] [ 3.04439038e-01 3.15724730e-01 2.95712918e-01 ... -1.19187534e-01 -1.18878610e-01 -7.99909085e-02] [ 4.68100160e-01 3.27513218e-01 1.50239900e-01 ... 4.02031578e-02 -4.93546240e-02 1.31988870e-02]] [[-1.80251554e-01 -1.37149036e-01 -1.25090495e-01 ... -7.12098926e-02 -2.80506276e-02 3.33422534e-02] [-5.34140617e-02 1.08000278e-01 1.78024508e-02 ... -1.52325258e-01 -9.16775241e-02 -7.73596317e-02] [ 1.28934458e-01 3.39399844e-01 1.41293421e-01 ... -1.55610457e-01 -3.11970711e-04 -2.62418296e-02] ... [ 3.11414123e-01 2.58648694e-01 3.61556500e-01 ... 1.77735552e-01 1.52579784e-01 7.29496032e-02] [ 3.39019984e-01 2.02316701e-01 2.85814196e-01 ... -1.23764247e-01 -8.33393186e-02 -5.67391366e-02] [ 2.91294724e-01 4.47910354e-02 1.86569896e-02 ... -9.25941095e-02 -5.27468063e-02 -9.91623104e-02]] [[-5.67885414e-02 3.65903974e-03 -1.07121490e-01 ... -5.33473678e-02 2.57285327e-01 3.29945505e-01] [-5.88392131e-02 1.77007034e-01 -4.44589742e-03 ... -1.77565277e-01 2.55390275e-02 2.42560506e-02] [ 2.84943581e-02 3.24729174e-01 5.33078052e-02 ... -3.19783300e-01 -9.63272378e-02 -1.69918552e-01] ... [ 4.49351221e-01 3.56818259e-01 4.36801255e-01 ... 5.59906773e-02 2.49784335e-01 2.68295795e-01] [ 5.98493576e-01 2.50823975e-01 2.68577427e-01 ... -2.07888156e-01 5.62585192e-03 1.27365500e-01] [ 3.15697193e-01 4.04007323e-02 -6.85678050e-02 ... -2.05796435e-01 4.21393625e-02 1.01606503e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5770.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[-8.64252597e-02 8.13391581e-02 -1.15400620e-01 -2.84541488e-01 -9.03247297e-02] [-7.19670879e-07 -1.19868845e-01 4.22162153e-02 -5.51148802e-02 -3.30317281e-02] [-1.57480761e-02 -1.37275073e-03 -4.79956031e-01 -5.33631861e-01 -3.35410684e-02] [-1.90707028e-01 -2.50101328e-01 2.50907511e-01 4.95183542e-02 -1.17641069e-01] [-9.30851400e-02 -2.03893378e-01 5.20940609e-02 7.48724043e-02 -4.38051730e-01]] [[ 3.14207971e-01 1.50852464e-02 2.67052621e-01 -1.51012808e-01 -1.31598637e-01] [-3.48420471e-01 -1.75678328e-01 -2.41648495e-01 -2.77529862e-02 1.23317823e-01] [ 7.96314403e-02 -4.11192477e-02 2.12051943e-01 -1.20615818e-01 -8.17226246e-03] [-1.15050212e-01 6.53171241e-02 -2.92417198e-01 2.22815514e-01 -1.85186014e-01] [-1.30997956e-01 -5.94008937e-02 -2.32008874e-01 1.89721491e-02 -2.10000783e-01]] [[-1.36510596e-01 2.59927120e-02 8.20087567e-02 -2.41323367e-01 -3.67276371e-01] [-2.69824434e-02 1.15385540e-02 3.08956593e-01 8.49392563e-02 2.71366388e-01] [ 3.76975611e-02 1.49492864e-02 -1.11547574e-01 -1.12208568e-01 1.11491047e-01] [-3.33692700e-01 9.37647279e-03 3.06767136e-01 -1.08122349e-01 -1.14241175e-01] [-1.14957742e-01 8.59525874e-02 6.81843758e-02 1.61591128e-01 -1.08885482e-01]] [[ 1.13991369e-02 2.75559234e-03 2.93101728e-01 3.77647966e-01 -1.08776435e-01] [ 1.32387020e-02 -1.24292322e-01 2.17675209e-01 -1.33433551e-01 -1.83738872e-01] [-3.31795007e-01 1.41123503e-01 -3.70606452e-01 -1.52059063e-01 -8.49040970e-02] [-8.15218166e-02 4.34864610e-01 2.49721691e-01 2.09857807e-01 -2.71912694e-01] [ 1.55458627e-02 2.23958969e-01 -1.95867226e-01 -2.10591137e-01 2.98495561e-01]] [[-1.06393099e-01 -3.56069878e-02 2.17434272e-01 1.48598120e-01 1.28563806e-01] [ 2.89677590e-01 -2.29977071e-01 2.42983505e-01 -5.59230074e-02 -6.55018911e-02] [-3.18559587e-01 -3.95959765e-02 -1.30229130e-01 -7.25793913e-02 -4.07936834e-02] [-2.57552117e-01 -4.83071022e-02 -1.65885866e-01 -2.20188841e-01 -2.84371942e-01] [-1.88229959e-02 6.68778038e-03 -3.68331969e-01 -3.49838585e-01 -1.45932287e-01]]] [[[-1.37938201e-01 -2.33422294e-01 -1.48881733e-01 -9.08456147e-02 4.06661242e-01] [ 5.42907976e-02 1.54858097e-01 8.11354220e-02 2.63011724e-01 -2.21244171e-01] [ 6.69276044e-02 -1.90861881e-01 2.99318098e-02 1.77812606e-01 9.60529447e-02] [-6.38039932e-02 3.62580195e-02 9.10618678e-02 7.83909783e-02 -2.85300724e-02] [ 2.60369748e-01 -8.68399348e-03 1.04928970e-01 8.93786475e-02 -3.66032213e-01]] [[-2.59913206e-01 2.70714790e-01 -3.64000291e-01 1.57961454e-02 -1.04596026e-01] [ 3.50073606e-01 -1.15339749e-01 2.43037846e-02 2.91247457e-01 1.46074340e-01] [ 1.16557799e-01 -3.33867759e-01 9.13147582e-04 7.17967525e-02 -1.72133982e-01] [ 6.23459518e-02 -5.05713709e-02 -6.58260435e-02 -1.55721754e-01 1.08022459e-01] [-2.09947854e-01 -5.04774600e-02 -1.68617830e-01 4.02459979e-01 5.78118376e-02]] [[ 2.43808210e-01 1.24105297e-01 -1.06747061e-01 1.06646933e-01 -2.02793822e-01] [-2.93617517e-01 -1.17718466e-01 1.29597291e-01 -1.36508271e-01 8.29569027e-02] [ 1.10368356e-01 2.11814135e-01 3.65147144e-01 1.37797385e-01 2.17187852e-01] [-2.05217764e-01 3.39199632e-01 1.26122698e-01 1.50414735e-01 -4.63851206e-02] [-1.34252727e-01 2.11180046e-01 -7.55069181e-02 -1.12513244e-01 5.73390760e-02]] [[ 3.62335742e-01 3.01838070e-01 3.18266451e-01 2.17250377e-01 1.25155523e-01] [-2.05281880e-02 1.78136989e-01 1.42253429e-01 1.04730979e-01 -3.74702178e-02] [ 1.55040711e-01 -2.01617301e-01 -1.80381030e-01 2.48402104e-01 3.58892977e-01] [-8.06593001e-02 3.16453040e-01 -3.76448959e-01 -8.48229006e-02 7.97195062e-02] [ 2.89877743e-01 3.85831267e-01 -2.94417381e-01 -2.26200461e-01 2.41569877e-02]] [[-1.92147363e-02 -2.38001451e-01 8.28563049e-02 2.35932454e-01 -1.23861641e-01] [-2.84719110e-01 -3.46180022e-01 1.86215058e-01 -1.45068839e-01 -1.99214742e-01] [ 3.10100932e-02 -5.68457767e-02 -1.97142005e-01 1.43205553e-01 -3.61498028e-01] [-1.60396591e-01 -1.99229538e-01 3.37382376e-01 1.90729976e-01 1.29538685e-01] [ 3.76356244e-01 1.66040003e-01 -3.47856104e-01 8.45350772e-02 2.08732292e-01]]] [[[-1.14998817e-02 -2.10587204e-01 2.42425039e-01 -1.31857276e-01 3.11317772e-01] [-3.45678232e-03 7.24619702e-02 -3.20142284e-02 -1.37399852e-01 -4.60959598e-02] [ 7.02400357e-02 -3.85556407e-02 -2.33722433e-01 -1.49863929e-01 -3.87471616e-02] [ 8.70093238e-03 5.45696355e-02 -2.25527257e-01 1.08048402e-01 1.08743131e-01] [ 1.31268620e-01 -2.24866480e-01 -5.75916946e-01 1.70705333e-01 2.22514138e-01]] [[-4.27957982e-01 2.79459599e-02 6.33711964e-02 1.68257430e-01 3.66242714e-02] [ 1.49449725e-02 2.09918216e-01 4.27537113e-02 3.38709712e-01 8.71956721e-02] [ 5.04155099e-01 -2.70783931e-01 -1.44480884e-01 -1.29545465e-01 -9.14788768e-02] [-2.21791193e-01 -1.29397258e-01 2.10416675e-01 1.14027254e-01 3.32900316e-01] [ 3.63755435e-01 -1.39862329e-01 7.36098140e-02 1.46658853e-01 1.77866444e-01]] [[ 8.49615186e-02 -2.50995755e-01 1.73006859e-02 2.31847972e-01 -1.94280878e-01] [-1.12069063e-01 4.87739444e-01 -1.00164361e-01 5.00589430e-01 -2.21821234e-01] [-4.82742526e-02 -2.57976174e-01 6.23085946e-02 4.32221025e-01 1.78052068e-01] [ 5.92709064e-01 -1.10586405e-01 3.32198143e-01 -1.76074564e-01 -4.04900014e-01] [ 1.46878645e-01 -2.33891368e-01 6.04335144e-02 1.45728871e-01 -1.09522834e-01]] [[ 2.97343105e-01 -1.13729805e-01 -2.67941207e-01 -2.07372829e-01 5.04138432e-02] [-1.90508589e-01 -2.54234206e-02 1.25306532e-01 -5.34380786e-03 3.37368175e-02] [-2.07456127e-01 -2.96852618e-01 3.13523710e-02 1.19123809e-01 2.29502115e-02] [ 8.18506107e-02 -7.54448995e-02 1.87225312e-01 -1.67067245e-01 -5.25904112e-02] [ 5.05736843e-02 -1.60555784e-02 1.46238744e-01 1.68964826e-02 4.18283604e-02]] [[-1.68394566e-01 -1.58179745e-01 6.03000931e-02 -6.76507354e-02 -3.90822105e-02] [ 3.32268387e-01 -1.89951405e-01 -3.00950855e-01 4.29239839e-01 2.75964171e-01] [-1.67070001e-01 -1.58201292e-01 -8.16998482e-02 -1.39830887e-01 -2.17653394e-01] [-1.71790481e-01 5.80403546e-04 -1.57252580e-01 1.10538244e-01 -2.29028478e-01] [ 9.15039182e-02 1.05513394e-01 -2.15479597e-01 1.71757355e-01 1.94891438e-01]]]]]; ov_res: [[[[[-8.64252597e-02 8.13391581e-02 -1.15400620e-01 -2.84541488e-01 -9.03247297e-02] [-7.19670879e-07 -1.19868845e-01 4.22162153e-02 -5.51148802e-02 -3.30317281e-02] [-1.57480761e-02 -1.37275073e-03 -4.79956031e-01 -5.33631861e-01 -3.35410684e-02] [-1.90707028e-01 -2.50101328e-01 2.50907511e-01 4.95183542e-02 -1.17641069e-01] [-9.30851400e-02 -2.03893378e-01 5.20940609e-02 7.48724043e-02 -4.38051730e-01]] [[ 3.14207971e-01 1.50852464e-02 2.67052621e-01 -1.51012808e-01 -1.31598637e-01] [-3.48420471e-01 -1.75678328e-01 -2.41648495e-01 -2.77529862e-02 1.23317823e-01] [ 7.96314403e-02 -4.11192477e-02 2.12051943e-01 -1.20615818e-01 -8.17226246e-03] [-1.15050212e-01 6.53171241e-02 -2.92417198e-01 2.22815514e-01 -1.85186014e-01] [-1.30997956e-01 -5.94008937e-02 -2.32008874e-01 1.89721491e-02 -2.10000783e-01]] [[-1.36510596e-01 2.59927120e-02 8.20087567e-02 -2.41323367e-01 -3.67276371e-01] [-2.69824434e-02 1.15385540e-02 3.08956593e-01 8.49392563e-02 2.71366388e-01] [ 3.76975611e-02 1.49492864e-02 -1.11547574e-01 -1.12208568e-01 1.11491047e-01] [-3.33692700e-01 9.37647279e-03 3.06767136e-01 -1.08122349e-01 -1.14241175e-01] [-1.14957742e-01 8.59525874e-02 6.81843758e-02 1.61591128e-01 -1.08885482e-01]] [[ 1.13991369e-02 2.75559234e-03 2.93101728e-01 3.77647966e-01 -1.08776435e-01] [ 1.32387020e-02 -1.24292322e-01 2.17675209e-01 -1.33433551e-01 -1.83738872e-01] [-3.31795007e-01 1.41123503e-01 -3.70606452e-01 -1.52059063e-01 -8.49040970e-02] [-8.15218166e-02 4.34864610e-01 2.49721691e-01 2.09857807e-01 -2.71912694e-01] [ 1.55458627e-02 2.23958969e-01 -1.95867226e-01 -2.10591137e-01 2.98495561e-01]] [[-1.06393099e-01 -3.56069878e-02 2.17434272e-01 1.48598120e-01 1.28563806e-01] [ 2.89677590e-01 -2.29977071e-01 2.42983505e-01 -5.59230074e-02 -6.55018911e-02] [-3.18559587e-01 -3.95959765e-02 -1.30229130e-01 -7.25793913e-02 -4.07936834e-02] [-2.57552117e-01 -4.83071022e-02 -1.65885866e-01 -2.20188841e-01 -2.84371942e-01] [-1.88229959e-02 6.68778038e-03 -3.68331969e-01 -3.49838585e-01 -1.45932287e-01]]] [[[-1.37938201e-01 -2.33422294e-01 -1.48881733e-01 -9.08456147e-02 4.06661242e-01] [ 5.42907976e-02 1.54858097e-01 8.11354220e-02 2.63011724e-01 -2.21244171e-01] [ 6.69276044e-02 -1.90861881e-01 2.99318098e-02 1.77812606e-01 9.60529447e-02] [-6.38039932e-02 3.62580195e-02 9.10618678e-02 7.83909783e-02 -2.85300724e-02] [ 2.60369748e-01 -8.68399348e-03 1.04928970e-01 8.93786475e-02 -3.66032213e-01]] [[-2.59913206e-01 2.70714790e-01 -3.64000291e-01 1.57961454e-02 -1.04596026e-01] [ 3.50073606e-01 -1.15339749e-01 2.43037846e-02 2.91247457e-01 1.46074340e-01] [ 1.16557799e-01 -3.33867759e-01 9.13147582e-04 7.17967525e-02 -1.72133982e-01] [ 6.23459518e-02 -5.05713709e-02 -6.58260435e-02 -1.55721754e-01 1.08022459e-01] [-2.09947854e-01 -5.04774600e-02 -1.68617830e-01 4.02459979e-01 5.78118376e-02]] [[ 2.43808210e-01 1.24105297e-01 -1.06747061e-01 1.06646933e-01 -2.02793822e-01] [-2.93617517e-01 -1.17718466e-01 1.29597291e-01 -1.36508271e-01 8.29569027e-02] [ 1.10368356e-01 2.11814135e-01 3.65147144e-01 1.37797385e-01 2.17187852e-01] [-2.05217764e-01 3.39199632e-01 1.26122698e-01 1.50414735e-01 -4.63851206e-02] [-1.34252727e-01 2.11180046e-01 -7.55069181e-02 -1.12513244e-01 5.73390760e-02]] [[ 3.62335742e-01 3.01838070e-01 3.18266451e-01 2.17250377e-01 1.25155523e-01] [-2.05281880e-02 1.78136989e-01 1.42253429e-01 1.04730979e-01 -3.74702178e-02] [ 1.55040711e-01 -2.01617301e-01 -1.80381030e-01 2.48402104e-01 3.58892977e-01] [-8.06593001e-02 3.16453040e-01 -3.76448959e-01 -8.48229006e-02 7.97195062e-02] [ 2.89877743e-01 3.85831267e-01 -2.94417381e-01 -2.26200461e-01 2.41569877e-02]] [[-1.92147363e-02 -2.38001451e-01 8.28563049e-02 2.35932454e-01 -1.23861641e-01] [-2.84719110e-01 -3.46180022e-01 1.86215058e-01 -1.45068839e-01 -1.99214742e-01] [ 3.10100932e-02 -5.68457767e-02 -1.97142005e-01 1.43205553e-01 -3.61498028e-01] [-1.60396591e-01 -1.99229538e-01 3.37382376e-01 1.90729976e-01 1.29538685e-01] [ 3.76356244e-01 1.66040003e-01 -3.47856104e-01 8.45350772e-02 2.08732292e-01]]] [[[-1.14998817e-02 -2.10587204e-01 2.42425039e-01 -1.31857276e-01 3.11317772e-01] [-3.45678232e-03 7.24619702e-02 -3.20142284e-02 -1.37399852e-01 -4.60959598e-02] [ 7.02400357e-02 -3.85556407e-02 -2.33722433e-01 -1.49863929e-01 -3.87471616e-02] [ 8.70093238e-03 5.45696355e-02 -2.25527257e-01 1.08048402e-01 1.08743131e-01] [ 1.31268620e-01 -2.24866480e-01 -5.75916946e-01 1.70705333e-01 2.22514138e-01]] [[-4.27957982e-01 2.79459599e-02 6.33711964e-02 1.68257430e-01 3.66242714e-02] [ 1.49449725e-02 2.09918216e-01 4.27537113e-02 3.38709712e-01 8.71956721e-02] [ 5.04155099e-01 -2.70783931e-01 -1.44480884e-01 -1.29545465e-01 -9.14788768e-02] [-2.21791193e-01 -1.29397258e-01 2.10416675e-01 1.14027254e-01 3.32900316e-01] [ 3.63755435e-01 -1.39862329e-01 7.36098140e-02 1.46658853e-01 1.77866444e-01]] [[ 8.49615186e-02 -2.50995755e-01 1.73006859e-02 2.31847972e-01 -1.94280878e-01] [-1.12069063e-01 4.87739444e-01 -1.00164361e-01 5.00589430e-01 -2.21821234e-01] [-4.82742526e-02 -2.57976174e-01 6.23085946e-02 4.32221025e-01 1.78052068e-01] [ 5.92709064e-01 -1.10586405e-01 3.32198143e-01 -1.76074564e-01 -4.04900014e-01] [ 1.46878645e-01 -2.33891368e-01 6.04335144e-02 1.45728871e-01 -1.09522834e-01]] [[ 2.97343105e-01 -1.13729805e-01 -2.67941207e-01 -2.07372829e-01 5.04138432e-02] [-1.90508589e-01 -2.54234206e-02 1.25306532e-01 -5.34380786e-03 3.37368175e-02] [-2.07456127e-01 -2.96852618e-01 3.13523710e-02 1.19123809e-01 2.29502115e-02] [ 8.18506107e-02 -7.54448995e-02 1.87225312e-01 -1.67067245e-01 -5.25904112e-02] [ 5.05736843e-02 -1.60555784e-02 1.46238744e-01 1.68964826e-02 4.18283604e-02]] [[-1.68394566e-01 -1.58179745e-01 6.03000931e-02 -6.76507354e-02 -3.90822105e-02] [ 3.32268387e-01 -1.89951405e-01 -3.00950855e-01 4.29239839e-01 2.75964171e-01] [-1.67070001e-01 -1.58201292e-01 -8.16998482e-02 -1.39830887e-01 -2.17653394e-01] [-1.71790481e-01 5.80403546e-04 -1.57252580e-01 1.10538244e-01 -2.29028478e-01] [ 9.15039182e-02 1.05513394e-01 -2.15479597e-01 1.71757355e-01 1.94891438e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:True - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5773.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : bool = prim::Constant[value=1]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %5 : int[] = prim::Constant[value=[0, 0, 0]]() %6 : int[] = prim::Constant[value=[3, 1, 1]]() %7 : int[] = prim::Constant[value=[3, 2, 1]]() %8 : Tensor = aten::avg_pool3d(%x, %7, %6, %5, %4, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%8) fw_re: [[[[[ 3.45505285e-03 8.52056816e-02 3.73423725e-01 ... -1.71549276e-01 -2.22578049e-01 1.86722085e-01] [ 1.53609529e-01 -4.02382970e-01 8.77180323e-02 ... -3.12100559e-01 3.31534773e-01 2.79092044e-01] [ 2.81541228e-01 2.14943275e-01 -2.65529275e-01 ... 3.75200123e-01 7.92429626e-01 2.22283602e-03] ... [-1.95504472e-01 5.95583506e-02 6.49927080e-01 ... 2.62644082e-01 -4.15804893e-01 -2.61318237e-01] [-6.68671429e-01 1.97692607e-02 4.18014526e-01 ... 2.96418935e-01 -2.29735926e-01 2.68342853e-01] [-4.35524106e-01 -4.15394098e-01 3.64345193e-01 ... 2.15909123e-01 -2.67955631e-01 2.99590617e-01]] [[-2.80832499e-03 -2.14182779e-01 -4.35961515e-01 ... -1.89896181e-01 7.34586954e-01 5.81192374e-01] [ 1.94877520e-01 -4.80490565e-01 -3.86832476e-01 ... -7.04834402e-01 3.62045139e-01 3.39394927e-01] [ 3.97049218e-01 -7.78011084e-01 -8.70561421e-01 ... -4.89661127e-01 4.49407101e-01 2.45359704e-01] ... [-6.40698552e-01 -2.31007576e-01 1.03316772e+00 ... -5.05233526e-01 -5.34876227e-01 -1.00669026e-01] [-9.28143203e-01 1.18640900e-01 9.07308400e-01 ... -3.71133894e-01 1.61925163e-02 1.29738972e-01] [-4.91042733e-01 2.92765617e-01 5.22879362e-01 ... -6.59797341e-03 -1.01920329e-01 2.00396895e-01]] [[ 1.86598703e-01 -9.02011812e-01 -2.48372912e-01 ... 3.11751395e-01 8.63887846e-01 1.05813235e-01] [-7.26097077e-02 -2.90370136e-01 -4.81065661e-01 ... -2.32812047e-01 3.16080183e-01 1.51498333e-01] [-4.91408825e-01 -7.84798637e-02 1.08292736e-01 ... -2.52510726e-01 -2.66654398e-02 -4.76865768e-01] ... [ 1.54852808e-01 5.95202148e-01 -2.82574803e-01 ... 3.25486094e-01 4.46973920e-01 -2.31294811e-01] [-1.50724307e-01 1.10504282e+00 -1.61524788e-01 ... -3.19329023e-01 2.15156659e-01 1.16147287e-01] [ 2.14233622e-01 6.07764900e-01 -7.48216584e-02 ... -1.52773708e-01 1.14446282e-02 -1.82906672e-01]] [[-2.29671180e-01 2.46299520e-01 1.99632153e-01 ... 7.47355744e-02 -3.86798263e-01 3.19293767e-01] [-2.51760274e-01 7.68486202e-01 -5.55415154e-01 ... 1.08637072e-01 -1.01048060e-01 2.77099013e-02] [-5.92333414e-02 3.47777039e-01 -3.75935167e-01 ... -5.04232943e-01 -2.27803305e-01 -3.15874405e-02] ... [ 5.01115382e-01 -5.42529106e-01 -8.87318980e-03 ... -6.12926893e-02 1.20576300e-01 -8.31508338e-01] [-1.10268988e-01 1.22004123e-02 1.57441244e-01 ... 1.27994931e+00 8.53338957e-01 -4.51777935e-01] [ 4.41969037e-02 6.58772290e-01 4.85516757e-01 ... 5.47640979e-01 4.57352728e-01 -2.59553760e-01]] [[-2.48147488e-01 -2.29217052e-01 -1.04644708e-01 ... -4.52117831e-01 2.84538805e-01 5.82335055e-01] [-3.69273782e-01 -2.51263142e-01 9.39468205e-01 ... -4.61447686e-01 7.31980801e-01 2.15089262e-01] [-1.62126660e-01 2.52328247e-01 7.73343801e-01 ... 7.15079904e-03 6.44535840e-01 -4.64217663e-01] ... [-3.45109582e-01 1.76793292e-01 -7.06252158e-01 ... 3.50982457e-01 1.29998773e-01 -4.06915955e-02] [-2.04338178e-01 -4.56671417e-02 -4.79708403e-01 ... -2.99765110e-01 -1.02870606e-01 -3.51931721e-01] [ 1.09958686e-01 1.32412344e-01 -5.40628850e-01 ... -3.85750383e-01 3.13863099e-01 -2.42325187e-01]]] [[[-5.25231302e-01 1.48683205e-01 9.83702838e-01 ... -3.36603403e-01 1.65266171e-01 -4.17306513e-01] [-4.07631069e-01 -2.42060542e-01 -2.96345502e-01 ... 6.24451339e-02 8.23447764e-01 -9.59210619e-02] [-1.29957972e-02 -5.03996909e-01 -2.61165053e-01 ... -1.18415080e-01 7.35465109e-01 -1.64984465e-02] ... [ 6.95814788e-01 1.28959119e-01 3.61287624e-01 ... -2.10675076e-01 -1.77316964e-01 1.49187386e-01] [ 1.37154028e-01 -4.47991759e-01 8.22353303e-01 ... -1.29259273e-01 -4.41343993e-01 -1.11188889e-02] [-5.51003993e-01 -9.38123241e-02 7.47650146e-01 ... 1.44475460e-01 -2.70063430e-01 -7.71967545e-02]] [[-7.69874230e-02 2.98616439e-01 -9.10129678e-03 ... -2.64208227e-01 2.46651676e-02 1.63754746e-01] [-4.25849915e-01 -1.03684120e-01 -3.94243114e-02 ... -6.99273586e-01 2.84194648e-02 -6.95651919e-02] [-5.53890407e-01 -1.07021607e-01 3.49398136e-01 ... -2.60839462e-01 -3.94833207e-01 1.66541543e-02] ... [-8.25552121e-02 -4.10823315e-01 -1.56841874e-01 ... 1.92861065e-01 4.24280949e-02 -6.76726922e-02] [-1.25241756e-01 -4.69174892e-01 -7.07179368e-01 ... 3.85444880e-01 2.52799988e-01 -7.97819138e-01] [-3.82812977e-01 -6.84548914e-02 -5.46409428e-01 ... 2.23338500e-01 1.89355969e-01 -4.10161942e-01]] [[ 3.44216436e-01 4.82600003e-01 -4.10853982e-01 ... -2.28965029e-01 -4.60137814e-01 -1.07042909e+00] [-1.95494309e-01 8.23551655e-01 2.62601674e-01 ... 6.88819408e-01 5.56818902e-01 1.16780460e-01] [-1.76030457e-01 -3.14329475e-01 6.01154007e-02 ... 6.79723024e-01 2.58291394e-01 6.18104994e-01] ... [ 3.37080359e-01 -2.83941478e-01 -7.12600052e-01 ... 4.91401739e-02 -1.23102255e-01 -7.12554455e-01] [-2.66991138e-01 1.39743626e-01 -2.44044349e-01 ... -1.80571243e-01 -1.89760372e-01 -8.50320101e-01] [-3.46462339e-01 1.56885490e-01 3.64916354e-01 ... 1.87048122e-01 -4.35683876e-01 -3.92412037e-01]] [[ 6.82118416e-01 1.54166028e-01 1.65390745e-01 ... -8.17466319e-01 8.73919308e-01 -4.10173923e-01] [ 4.35853571e-01 2.78607696e-01 3.33248347e-01 ... -6.86992466e-01 6.02420807e-01 -1.03420235e-01] [ 9.66453180e-02 3.00128251e-01 2.70317525e-01 ... -1.44951329e-01 9.27696601e-02 3.46534163e-01] ... [ 6.25678539e-01 2.63327122e-01 1.43379226e-01 ... 5.10236025e-01 4.06591296e-02 3.04311842e-01] [ 1.25392988e-01 -5.77509403e-04 -2.29191467e-01 ... 2.50453074e-02 -2.30829164e-01 -4.94929440e-02] [ 2.69000024e-01 -1.51885048e-01 -3.59223038e-02 ... -7.23052472e-02 3.83928083e-02 5.14282798e-03]] [[ 1.05558060e-01 -2.18918845e-01 -3.13084662e-01 ... 5.62322021e-01 -4.45515543e-01 -2.47995213e-01] [ 1.78796068e-01 -3.28505009e-01 2.98735648e-01 ... 8.02469254e-01 -4.75552082e-02 5.30684479e-02] [-7.77888894e-02 4.61423635e-01 5.84321797e-01 ... 4.40274239e-01 4.28313643e-01 1.23301297e-01] ... [ 7.01002061e-01 6.45627081e-01 -6.04995489e-01 ... 2.25972250e-01 1.54027760e-01 1.31564081e-01] [ 4.23811078e-02 2.85851330e-01 -4.25760180e-01 ... 7.09074974e-01 6.13479435e-01 -3.36539537e-01] [-4.37762469e-01 -6.51233420e-02 -2.19989922e-02 ... 4.64255661e-01 1.21147938e-01 -4.97453123e-01]]] [[[-2.99270004e-01 -2.44565710e-01 -4.24846023e-01 ... -1.01734297e-02 -2.41330668e-01 -2.29894534e-01] [-1.54668316e-01 -3.32907081e-01 -8.84495210e-03 ... 1.75128415e-01 -7.41080105e-01 3.18263859e-01] [-3.31662148e-02 3.72045070e-01 3.57531458e-01 ... 1.85169995e-01 -7.67730772e-01 2.82237798e-01] ... [-2.35528126e-01 5.49687862e-01 3.95272017e-01 ... 2.15801716e-01 -1.80240676e-01 5.07445753e-01] [-2.13161707e-01 4.06316608e-01 2.52571970e-01 ... 1.93250164e-01 -3.82996835e-02 5.81864178e-01] [ 9.62628722e-02 7.93566525e-01 -2.15772446e-02 ... 5.04859984e-01 4.42494564e-02 -6.92099556e-02]] [[-5.24247110e-01 -4.26537879e-02 -2.66132921e-01 ... -5.40088117e-02 -9.52278972e-02 1.68197021e-01] [-9.71020877e-01 -2.29059562e-01 2.45642707e-01 ... -1.84481069e-01 3.73750687e-01 1.16686054e-01] [-8.28476369e-01 -2.81546623e-01 -7.41129592e-02 ... 1.14768304e-01 5.17465174e-01 5.11523485e-01] ... [-3.59741449e-01 6.71801746e-01 1.31183863e-03 ... 2.82170117e-01 5.89955747e-01 3.67292076e-01] [-2.07146719e-01 -1.24653637e-01 2.59494424e-01 ... 4.00311917e-01 5.12150168e-01 5.15395343e-01] [-5.14271796e-01 -2.97284961e-01 2.73656845e-01 ... 3.86249393e-01 1.95436060e-01 -5.08021176e-01]] [[ 2.81448722e-01 4.21025962e-01 2.59168029e-01 ... -6.80084005e-02 -3.97048682e-01 2.03115940e-01] [ 2.85893172e-01 4.46097136e-01 5.51765323e-01 ... -9.55070630e-02 5.19428067e-02 5.27980149e-01] [ 2.34088823e-01 4.44649786e-01 3.85744572e-01 ... 1.33809462e-01 3.37672323e-01 6.37017414e-02] ... [ 6.60472885e-02 5.02408206e-01 5.59788942e-02 ... 7.60626078e-01 -4.50858213e-02 -7.81352222e-01] [ 2.51166731e-01 4.45019960e-01 -2.22335473e-01 ... 2.63174593e-01 -3.95210385e-02 -5.34332812e-01] [-6.64593279e-03 3.00469905e-01 -5.98768950e-01 ... 1.98561981e-01 3.30547035e-01 4.12512153e-01]] [[ 1.93064455e-02 5.21667540e-01 4.35070276e-01 ... 3.17721456e-01 -2.87132114e-01 1.00615120e-03] [-5.09128630e-01 1.00494397e+00 4.60081369e-01 ... 2.75303990e-01 -3.04332882e-01 -8.35451707e-02] [-5.46236753e-01 4.09240484e-01 6.49112940e-01 ... -6.58005476e-02 -2.66584367e-01 2.54388899e-01] ... [-2.75014788e-01 -4.40703630e-02 -1.21277653e-01 ... 2.08200365e-02 1.74167439e-01 2.03456581e-01] [-3.06727916e-01 -2.15538442e-01 -4.26724672e-01 ... -5.87540567e-01 3.09871942e-01 2.58776605e-01] [-4.59449887e-02 -6.30896211e-01 -4.65674490e-01 ... 1.33241817e-01 -4.27834690e-02 1.52315497e-02]] [[-1.00332581e-01 -2.16924071e-01 1.45010293e-01 ... 3.91409487e-01 1.72957107e-01 -3.64382863e-01] [ 9.13217142e-02 -1.52229965e-01 2.70634770e-01 ... 7.92504132e-01 1.71033785e-01 6.72521964e-02] [ 5.08507602e-02 -4.10689592e-01 -3.82762879e-01 ... 4.09986943e-01 1.46321833e-01 4.71871905e-02] ... [-1.56829968e-01 7.78475478e-02 3.04430068e-01 ... 1.53011426e-01 -2.44509682e-01 -1.00576341e+00] [-9.21507895e-01 3.27459425e-01 6.04029261e-02 ... -9.42380056e-02 -1.74422547e-01 -6.77379191e-01] [-3.95241141e-01 8.06279123e-01 -1.93179548e-01 ... -2.07624480e-01 -4.58435714e-02 -4.55459543e-02]]]]]; ov_res: [[[[[ 3.45505285e-03 8.52056816e-02 3.73423725e-01 ... -1.71549276e-01 -2.22578049e-01 1.86722085e-01] [ 1.53609529e-01 -4.02382970e-01 8.77180323e-02 ... -3.12100559e-01 3.31534773e-01 2.79092044e-01] [ 2.81541228e-01 2.14943275e-01 -2.65529275e-01 ... 3.75200123e-01 7.92429626e-01 2.22283602e-03] ... [-1.95504472e-01 5.95583506e-02 6.49927080e-01 ... 2.62644082e-01 -4.15804893e-01 -2.61318237e-01] [-6.68671429e-01 1.97692607e-02 4.18014526e-01 ... 2.96418935e-01 -2.29735926e-01 2.68342853e-01] [-4.35524106e-01 -4.15394098e-01 3.64345193e-01 ... 2.15909123e-01 -2.67955631e-01 2.99590617e-01]] [[-2.80832499e-03 -2.14182779e-01 -4.35961515e-01 ... -1.89896181e-01 7.34586954e-01 5.81192374e-01] [ 1.94877520e-01 -4.80490565e-01 -3.86832476e-01 ... -7.04834402e-01 3.62045139e-01 3.39394927e-01] [ 3.97049218e-01 -7.78011084e-01 -8.70561421e-01 ... -4.89661127e-01 4.49407101e-01 2.45359704e-01] ... [-6.40698552e-01 -2.31007576e-01 1.03316772e+00 ... -5.05233526e-01 -5.34876227e-01 -1.00669026e-01] [-9.28143203e-01 1.18640900e-01 9.07308400e-01 ... -3.71133894e-01 1.61925163e-02 1.29738972e-01] [-4.91042733e-01 2.92765617e-01 5.22879362e-01 ... -6.59797341e-03 -1.01920329e-01 2.00396895e-01]] [[ 1.86598703e-01 -9.02011812e-01 -2.48372912e-01 ... 3.11751395e-01 8.63887846e-01 1.05813235e-01] [-7.26097077e-02 -2.90370136e-01 -4.81065661e-01 ... -2.32812047e-01 3.16080183e-01 1.51498333e-01] [-4.91408825e-01 -7.84798637e-02 1.08292736e-01 ... -2.52510726e-01 -2.66654398e-02 -4.76865768e-01] ... [ 1.54852808e-01 5.95202148e-01 -2.82574803e-01 ... 3.25486094e-01 4.46973920e-01 -2.31294811e-01] [-1.50724307e-01 1.10504282e+00 -1.61524788e-01 ... -3.19329023e-01 2.15156659e-01 1.16147287e-01] [ 2.14233622e-01 6.07764900e-01 -7.48216584e-02 ... -1.52773708e-01 1.14446282e-02 -1.82906672e-01]] [[-2.29671180e-01 2.46299520e-01 1.99632153e-01 ... 7.47355744e-02 -3.86798263e-01 3.19293767e-01] [-2.51760274e-01 7.68486202e-01 -5.55415154e-01 ... 1.08637072e-01 -1.01048060e-01 2.77099013e-02] [-5.92333414e-02 3.47777039e-01 -3.75935167e-01 ... -5.04232943e-01 -2.27803305e-01 -3.15874405e-02] ... [ 5.01115382e-01 -5.42529106e-01 -8.87318980e-03 ... -6.12926893e-02 1.20576300e-01 -8.31508338e-01] [-1.10268988e-01 1.22004123e-02 1.57441244e-01 ... 1.27994931e+00 8.53338957e-01 -4.51777935e-01] [ 4.41969037e-02 6.58772290e-01 4.85516757e-01 ... 5.47640979e-01 4.57352728e-01 -2.59553760e-01]] [[-2.48147488e-01 -2.29217052e-01 -1.04644708e-01 ... -4.52117831e-01 2.84538805e-01 5.82335055e-01] [-3.69273782e-01 -2.51263142e-01 9.39468205e-01 ... -4.61447686e-01 7.31980801e-01 2.15089262e-01] [-1.62126660e-01 2.52328247e-01 7.73343801e-01 ... 7.15079904e-03 6.44535840e-01 -4.64217663e-01] ... [-3.45109582e-01 1.76793292e-01 -7.06252158e-01 ... 3.50982457e-01 1.29998773e-01 -4.06915955e-02] [-2.04338178e-01 -4.56671417e-02 -4.79708403e-01 ... -2.99765110e-01 -1.02870606e-01 -3.51931721e-01] [ 1.09958686e-01 1.32412344e-01 -5.40628850e-01 ... -3.85750383e-01 3.13863099e-01 -2.42325187e-01]]] [[[-5.25231302e-01 1.48683205e-01 9.83702838e-01 ... -3.36603403e-01 1.65266171e-01 -4.17306513e-01] [-4.07631069e-01 -2.42060542e-01 -2.96345502e-01 ... 6.24451339e-02 8.23447764e-01 -9.59210619e-02] [-1.29957972e-02 -5.03996909e-01 -2.61165053e-01 ... -1.18415080e-01 7.35465109e-01 -1.64984465e-02] ... [ 6.95814788e-01 1.28959119e-01 3.61287624e-01 ... -2.10675076e-01 -1.77316964e-01 1.49187386e-01] [ 1.37154028e-01 -4.47991759e-01 8.22353303e-01 ... -1.29259273e-01 -4.41343993e-01 -1.11188889e-02] [-5.51003993e-01 -9.38123241e-02 7.47650146e-01 ... 1.44475460e-01 -2.70063430e-01 -7.71967545e-02]] [[-7.69874230e-02 2.98616439e-01 -9.10129678e-03 ... -2.64208227e-01 2.46651676e-02 1.63754746e-01] [-4.25849915e-01 -1.03684120e-01 -3.94243114e-02 ... -6.99273586e-01 2.84194648e-02 -6.95651919e-02] [-5.53890407e-01 -1.07021607e-01 3.49398136e-01 ... -2.60839462e-01 -3.94833207e-01 1.66541543e-02] ... [-8.25552121e-02 -4.10823315e-01 -1.56841874e-01 ... 1.92861065e-01 4.24280949e-02 -6.76726922e-02] [-1.25241756e-01 -4.69174892e-01 -7.07179368e-01 ... 3.85444880e-01 2.52799988e-01 -7.97819138e-01] [-3.82812977e-01 -6.84548914e-02 -5.46409428e-01 ... 2.23338500e-01 1.89355969e-01 -4.10161942e-01]] [[ 3.44216436e-01 4.82600003e-01 -4.10853982e-01 ... -2.28965029e-01 -4.60137814e-01 -1.07042909e+00] [-1.95494309e-01 8.23551655e-01 2.62601674e-01 ... 6.88819408e-01 5.56818902e-01 1.16780460e-01] [-1.76030457e-01 -3.14329475e-01 6.01154007e-02 ... 6.79723024e-01 2.58291394e-01 6.18104994e-01] ... [ 3.37080359e-01 -2.83941478e-01 -7.12600052e-01 ... 4.91401739e-02 -1.23102255e-01 -7.12554455e-01] [-2.66991138e-01 1.39743626e-01 -2.44044349e-01 ... -1.80571243e-01 -1.89760372e-01 -8.50320101e-01] [-3.46462339e-01 1.56885490e-01 3.64916354e-01 ... 1.87048122e-01 -4.35683876e-01 -3.92412037e-01]] [[ 6.82118416e-01 1.54166028e-01 1.65390745e-01 ... -8.17466319e-01 8.73919308e-01 -4.10173923e-01] [ 4.35853571e-01 2.78607696e-01 3.33248347e-01 ... -6.86992466e-01 6.02420807e-01 -1.03420235e-01] [ 9.66453180e-02 3.00128251e-01 2.70317525e-01 ... -1.44951329e-01 9.27696601e-02 3.46534163e-01] ... [ 6.25678539e-01 2.63327122e-01 1.43379226e-01 ... 5.10236025e-01 4.06591296e-02 3.04311842e-01] [ 1.25392988e-01 -5.77509403e-04 -2.29191467e-01 ... 2.50453074e-02 -2.30829164e-01 -4.94929440e-02] [ 2.69000024e-01 -1.51885048e-01 -3.59223038e-02 ... -7.23052472e-02 3.83928083e-02 5.14282798e-03]] [[ 1.05558060e-01 -2.18918845e-01 -3.13084662e-01 ... 5.62322021e-01 -4.45515543e-01 -2.47995213e-01] [ 1.78796068e-01 -3.28505009e-01 2.98735648e-01 ... 8.02469254e-01 -4.75552082e-02 5.30684479e-02] [-7.77888894e-02 4.61423635e-01 5.84321797e-01 ... 4.40274239e-01 4.28313643e-01 1.23301297e-01] ... [ 7.01002061e-01 6.45627081e-01 -6.04995489e-01 ... 2.25972250e-01 1.54027760e-01 1.31564081e-01] [ 4.23811078e-02 2.85851330e-01 -4.25760180e-01 ... 7.09074974e-01 6.13479435e-01 -3.36539537e-01] [-4.37762469e-01 -6.51233420e-02 -2.19989922e-02 ... 4.64255661e-01 1.21147938e-01 -4.97453123e-01]]] [[[-2.99270004e-01 -2.44565710e-01 -4.24846023e-01 ... -1.01734297e-02 -2.41330668e-01 -2.29894534e-01] [-1.54668316e-01 -3.32907081e-01 -8.84495210e-03 ... 1.75128415e-01 -7.41080105e-01 3.18263859e-01] [-3.31662148e-02 3.72045070e-01 3.57531458e-01 ... 1.85169995e-01 -7.67730772e-01 2.82237798e-01] ... [-2.35528126e-01 5.49687862e-01 3.95272017e-01 ... 2.15801716e-01 -1.80240676e-01 5.07445753e-01] [-2.13161707e-01 4.06316608e-01 2.52571970e-01 ... 1.93250164e-01 -3.82996835e-02 5.81864178e-01] [ 9.62628722e-02 7.93566525e-01 -2.15772446e-02 ... 5.04859984e-01 4.42494564e-02 -6.92099556e-02]] [[-5.24247110e-01 -4.26537879e-02 -2.66132921e-01 ... -5.40088117e-02 -9.52278972e-02 1.68197021e-01] [-9.71020877e-01 -2.29059562e-01 2.45642707e-01 ... -1.84481069e-01 3.73750687e-01 1.16686054e-01] [-8.28476369e-01 -2.81546623e-01 -7.41129592e-02 ... 1.14768304e-01 5.17465174e-01 5.11523485e-01] ... [-3.59741449e-01 6.71801746e-01 1.31183863e-03 ... 2.82170117e-01 5.89955747e-01 3.67292076e-01] [-2.07146719e-01 -1.24653637e-01 2.59494424e-01 ... 4.00311917e-01 5.12150168e-01 5.15395343e-01] [-5.14271796e-01 -2.97284961e-01 2.73656845e-01 ... 3.86249393e-01 1.95436060e-01 -5.08021176e-01]] [[ 2.81448722e-01 4.21025962e-01 2.59168029e-01 ... -6.80084005e-02 -3.97048682e-01 2.03115940e-01] [ 2.85893172e-01 4.46097136e-01 5.51765323e-01 ... -9.55070630e-02 5.19428067e-02 5.27980149e-01] [ 2.34088823e-01 4.44649786e-01 3.85744572e-01 ... 1.33809462e-01 3.37672323e-01 6.37017414e-02] ... [ 6.60472885e-02 5.02408206e-01 5.59788942e-02 ... 7.60626078e-01 -4.50858213e-02 -7.81352222e-01] [ 2.51166731e-01 4.45019960e-01 -2.22335473e-01 ... 2.63174593e-01 -3.95210385e-02 -5.34332812e-01] [-6.64593279e-03 3.00469905e-01 -5.98768950e-01 ... 1.98561981e-01 3.30547035e-01 4.12512153e-01]] [[ 1.93064455e-02 5.21667540e-01 4.35070276e-01 ... 3.17721456e-01 -2.87132114e-01 1.00615120e-03] [-5.09128630e-01 1.00494397e+00 4.60081369e-01 ... 2.75303990e-01 -3.04332882e-01 -8.35451707e-02] [-5.46236753e-01 4.09240484e-01 6.49112940e-01 ... -6.58005476e-02 -2.66584367e-01 2.54388899e-01] ... [-2.75014788e-01 -4.40703630e-02 -1.21277653e-01 ... 2.08200365e-02 1.74167439e-01 2.03456581e-01] [-3.06727916e-01 -2.15538442e-01 -4.26724672e-01 ... -5.87540567e-01 3.09871942e-01 2.58776605e-01] [-4.59449887e-02 -6.30896211e-01 -4.65674490e-01 ... 1.33241817e-01 -4.27834690e-02 1.52315497e-02]] [[-1.00332581e-01 -2.16924071e-01 1.45010293e-01 ... 3.91409487e-01 1.72957107e-01 -3.64382863e-01] [ 9.13217142e-02 -1.52229965e-01 2.70634770e-01 ... 7.92504132e-01 1.71033785e-01 6.72521964e-02] [ 5.08507602e-02 -4.10689592e-01 -3.82762879e-01 ... 4.09986943e-01 1.46321833e-01 4.71871905e-02] ... [-1.56829968e-01 7.78475478e-02 3.04430068e-01 ... 1.53011426e-01 -2.44509682e-01 -1.00576341e+00] [-9.21507895e-01 3.27459425e-01 6.04029261e-02 ... -9.42380056e-02 -1.74422547e-01 -6.77379191e-01] [-3.95241141e-01 8.06279123e-01 -1.93179548e-01 ... -2.07624480e-01 -4.58435714e-02 -4.55459543e-02]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5776.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[1, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[ 0.03359585 -0.06791215 -0.04236135 ... -0.17574298 -0.13083036 0.05027799] [ 0.1673006 -0.02263102 0.21145564 ... -0.1956255 -0.06052884 0.17273895] [ 0.28012156 0.16423762 0.14027058 ... -0.04694767 0.03431283 0.27082723] ... [ 0.06396587 0.08704543 0.22282541 ... -0.27580205 -0.2087165 0.04577675] [-0.16026932 0.06187787 0.23790328 ... -0.33918917 -0.03900588 0.31632593] [-0.34485653 -0.21134432 0.09385994 ... -0.4018406 -0.20973937 0.4398302 ]] [[ 0.21025714 -0.03732708 -0.0804715 ... -0.04926635 -0.01544574 0.07542954] [ 0.41027713 0.22550057 0.32637766 ... 0.04546196 0.06112492 0.06315481] [ 0.41064546 0.36356682 0.14977275 ... 0.18944246 0.16599597 0.25187406] ... [ 0.15344785 0.1455716 0.16934699 ... -0.2959849 -0.01603306 0.09022145] [-0.14377543 0.06599783 0.14991952 ... -0.21831425 0.07654446 0.23603779] [-0.33547196 -0.17447948 -0.02384728 ... -0.21067213 -0.15611897 0.14785556]] [[ 0.19963203 -0.07885021 0.02593775 ... -0.08483805 0.12082024 0.17263782] [ 0.44484383 0.2851711 0.43099973 ... 0.15084247 0.26528394 0.1550103 ] [ 0.3602436 0.35051894 0.30690897 ... 0.27674347 0.37448132 0.30240083] ... [ 0.3021557 0.23976819 0.051138 ... -0.17679612 0.04173883 0.14945887] [-0.12841487 0.05249963 -0.00739198 ... -0.12136126 0.1150319 0.31603977] [-0.28475708 -0.18185207 -0.04513106 ... -0.03505001 0.07411866 0.29681724]] ... [[-0.16404088 -0.08520442 0.0210627 ... -0.0276368 -0.07657062 -0.07590388] [-0.22325194 -0.17529613 -0.23175354 ... -0.02816243 -0.12430339 -0.10598195] [-0.31460547 -0.23194945 -0.2113546 ... -0.09454259 -0.29628542 -0.17195866] ... [-0.18914166 -0.20251411 0.05495348 ... -0.46934283 -0.4072823 -0.22060165] [-0.34678236 -0.2440792 -0.09064766 ... -0.39613533 -0.2529856 -0.2151557 ] [-0.14149553 -0.11341851 -0.00859769 ... -0.25242627 -0.20304745 -0.23584726]] [[-0.3331404 -0.23077255 0.03808453 ... 0.10655631 -0.09432153 0.0315575 ] [-0.12098812 0.06296084 0.22439077 ... -0.11663566 -0.1363325 0.15224248] [-0.05730672 0.02937802 -0.03836128 ... -0.27648515 -0.22025356 0.08783545] ... [-0.05437235 -0.21442537 -0.04213995 ... -0.32922018 -0.24986391 -0.16362515] [-0.15581667 -0.20990455 -0.1666042 ... -0.36231485 -0.21912156 -0.27233574] [-0.08073263 -0.09569407 -0.08851987 ... -0.27858844 -0.11964291 -0.11165548]] [[-0.50913113 -0.3083532 -0.16088665 ... 0.11699357 -0.04465412 0.09701223] [-0.19195567 -0.00900225 0.25129876 ... -0.00528001 -0.02386053 0.25186804] [-0.01081866 0.08004811 -0.020739 ... -0.11422181 -0.13578929 0.34247792] ... [ 0.04651393 -0.11932246 -0.05236684 ... -0.4927353 -0.27543646 -0.12323042] [ 0.04722858 -0.16070983 -0.13506018 ... -0.3225098 -0.2511795 -0.18435727] [ 0.21655476 0.01590029 -0.06992918 ... -0.09739275 -0.11573016 -0.01614675]]] [[[ 0.12828921 0.16444825 0.55925554 ... 0.11669798 0.03066416 -0.03626081] [ 0.08157817 0.25170425 0.53418154 ... 0.02564962 -0.0536265 -0.09104227] [ 0.13443777 0.33803537 0.45222297 ... -0.15870434 -0.22561134 -0.29153016] ... [-0.04821621 -0.04397135 -0.26389685 ... -0.2404379 -0.24916084 -0.14114422] [-0.06318806 0.05012512 -0.1729899 ... 0.1317359 -0.03978303 -0.14136232] [-0.21934408 0.27893737 0.14364396 ... 0.01463543 -0.10047664 -0.23342068]] [[-0.28047538 -0.2175861 0.03262281 ... 0.29364535 0.26755786 0.15579064] [-0.07978901 0.02135451 0.07546511 ... 0.11095004 0.07179289 0.10725717] [-0.01313261 0.20228213 0.16814436 ... -0.04703509 -0.17559116 -0.14020777] ... [ 0.04830055 0.11807335 -0.1244624 ... -0.31680185 -0.25438732 -0.08119857] [ 0.08097351 0.29375336 0.0078285 ... 0.06620734 -0.05695806 -0.01026041] [-0.20511207 0.22685292 0.14616685 ... 0.25577766 0.09087992 0.09099558]] [[-0.3295822 -0.35998324 -0.19972047 ... 0.10163442 -0.06906405 -0.05444685] [-0.15550089 -0.23500334 -0.24140692 ... -0.0066272 -0.02302172 0.1426646 ] [-0.21808651 -0.13827796 -0.2135439 ... -0.16774835 -0.23892042 -0.04936623] ... [ 0.15567571 0.06824928 -0.04408136 ... -0.06329922 -0.15912095 -0.16694821] [ 0.19493379 0.15165846 -0.08251217 ... -0.03670258 -0.15436874 -0.06008722] [-0.10413452 0.02075044 0.02777971 ... 0.04511235 -0.08435806 0.07720966]] ... [[ 0.06857723 0.17020491 -0.14801048 ... -0.14676984 -0.24101047 0.04364456] [ 0.0984135 -0.14758416 -0.37020397 ... 0.02931448 -0.06940224 0.01957047] [ 0.31042486 -0.11025295 -0.32175812 ... -0.04510977 -0.06249785 -0.08848657] ... [-0.04002813 0.05910459 -0.06459505 ... 0.14619 0.19345729 0.08832918] [-0.0013466 0.17489399 0.10220913 ... 0.0908464 0.2248575 0.05614408] [ 0.28121626 0.44793186 0.3798504 ... -0.06922136 0.20303814 0.1802193 ]] [[ 0.00600536 -0.04121532 -0.25253484 ... -0.19015482 -0.24517459 0.08919484] [-0.02499548 -0.24672377 -0.35277277 ... 0.16609396 -0.0135245 0.00762145] [ 0.12828648 -0.03320762 -0.02924098 ... 0.15896215 0.17932557 0.04656424] ... [-0.00161178 -0.02861504 -0.25211066 ... 0.2081361 0.17465842 -0.02087895] [-0.04282398 0.10953245 -0.00761958 ... 0.10910325 0.15247373 0.01780813] [ 0.07134565 0.20085691 0.13102767 ... 0.11621004 0.32018697 0.28348356]] [[ 0.06416673 -0.19622928 -0.28457674 ... -0.43870544 -0.32013413 -0.14486168] [ 0.15348218 -0.13276084 -0.20670326 ... -0.17117043 -0.09835842 -0.16654989] [ 0.35187417 0.24044664 0.2073116 ... -0.06846455 0.32331604 0.03914991] ... [ 0.11467516 0.12413268 -0.29037404 ... 0.3770298 0.27147976 0.00179722] [ 0.08498605 0.1886608 -0.07963349 ... 0.20868775 0.08997636 -0.06091916] [ 0.14200346 0.15723975 0.03041111 ... 0.12599306 0.25578544 0.04939345]]] [[[-0.16867448 -0.02915692 -0.17808229 ... 0.2368037 0.0946294 0.14196226] [-0.11160701 0.04542422 0.00700615 ... 0.49257734 0.44606635 0.43640244] [-0.24119933 -0.13183568 -0.05481594 ... 0.35572737 0.29339358 0.47088358] ... [-0.12112989 -0.09058119 -0.15033294 ... -0.50591 -0.270619 -0.22430903] [ 0.18732002 -0.00611064 0.03167855 ... -0.3101815 -0.300297 -0.1389067 ] [ 0.25455227 0.08660889 -0.13832887 ... -0.10544708 -0.05629173 -0.00595219]] [[-0.24871293 -0.1775996 -0.15622866 ... 0.11306581 0.06867345 0.14982195] [-0.30275995 -0.19427787 -0.20000984 ... 0.24087372 0.28762195 0.305209 ] [-0.30294403 -0.37709945 -0.33879542 ... 0.1556675 0.12718388 0.20148452] ... [-0.16800337 -0.19227 -0.21644738 ... -0.2221631 -0.3048111 -0.34506473] [-0.1451244 -0.3469866 -0.08481883 ... -0.07353202 -0.08529648 0.0883536 ] [-0.17878819 -0.23789246 -0.06900239 ... 0.05166965 0.12084203 0.15632029]] [[-0.25125003 -0.12717906 -0.2425253 ... -0.0390095 0.04017491 -0.08459433] [-0.32016996 -0.21665297 -0.30407825 ... -0.00522632 0.13452135 0.08422422] [-0.24266408 -0.27836975 -0.0905806 ... -0.14821205 -0.09361731 0.06787247] ... [-0.09959219 -0.09302334 -0.23559186 ... 0.0091385 -0.16702566 -0.09133243] [-0.12865126 -0.23631862 -0.13239147 ... 0.0165365 0.15981466 0.34437487] [-0.16678502 -0.12759915 0.02386468 ... -0.17313187 0.06819817 0.17076252]] ... [[-0.39350578 -0.20664275 0.01359434 ... -0.1323897 -0.15966034 -0.17125359] [-0.47467253 -0.30504587 -0.31511015 ... -0.20899092 -0.11118834 -0.01611841] [-0.53976864 -0.2663101 -0.20305593 ... -0.06767908 -0.11898283 0.20159999] ... [ 0.28744236 0.23147166 0.01269894 ... -0.18367605 0.02341793 0.1189063 ] [ 0.23455779 0.01547132 -0.13048226 ... 0.05536391 0.1510966 0.3026844 ] [ 0.32501915 0.09626909 0.08377298 ... 0.09966391 0.08782376 0.3234196 ]] [[-0.10509785 -0.05142367 -0.11483142 ... -0.00776936 0.03247391 0.17506649] [ 0.01832494 -0.1185524 -0.39459738 ... -0.20981085 -0.03960245 0.18839404] [-0.13557446 -0.14086153 -0.15170251 ... -0.0167701 0.06059643 0.43197674] ... [ 0.04475129 0.10077719 -0.10127418 ... 0.14942275 0.14456668 0.02161399] [ 0.16850716 0.17150635 -0.19875582 ... 0.12918222 0.11271793 0.0394121 ] [ 0.29398486 0.280913 -0.01196156 ... 0.07852134 0.07030719 0.09086547]] [[-0.07073694 -0.10046645 -0.03597007 ... -0.07923523 0.01387493 0.01231721] [-0.1020954 -0.26712653 -0.33932224 ... -0.09367128 0.14109574 0.07324279] [-0.2119318 -0.31250268 -0.20440452 ... 0.24267639 0.4752171 0.4165711 ] ... [-0.0879111 -0.12620623 -0.10090303 ... 0.28960818 0.28885174 0.19804284] [ 0.10308822 0.04430995 -0.09934303 ... 0.18856834 0.06447497 -0.12182979] [ 0.26574394 0.26832452 0.15274914 ... 0.10362884 0.01922722 -0.14547373]]]]]; ov_res: [[[[[ 0.03359585 -0.06791215 -0.04236135 ... -0.17574298 -0.13083036 0.05027799] [ 0.1673006 -0.02263102 0.21145564 ... -0.1956255 -0.06052884 0.17273895] [ 0.28012156 0.16423762 0.14027058 ... -0.04694767 0.03431283 0.27082723] ... [ 0.06396587 0.08704543 0.22282541 ... -0.27580205 -0.2087165 0.04577675] [-0.16026932 0.06187787 0.23790328 ... -0.33918917 -0.03900588 0.31632593] [-0.34485653 -0.21134432 0.09385994 ... -0.4018406 -0.20973937 0.4398302 ]] [[ 0.21025714 -0.03732708 -0.0804715 ... -0.04926635 -0.01544574 0.07542954] [ 0.41027713 0.22550057 0.32637766 ... 0.04546196 0.06112492 0.06315481] [ 0.41064546 0.36356682 0.14977275 ... 0.18944246 0.16599597 0.25187406] ... [ 0.15344785 0.1455716 0.16934699 ... -0.2959849 -0.01603306 0.09022145] [-0.14377543 0.06599783 0.14991952 ... -0.21831425 0.07654446 0.23603779] [-0.33547196 -0.17447948 -0.02384728 ... -0.21067213 -0.15611897 0.14785556]] [[ 0.19963203 -0.07885021 0.02593775 ... -0.08483805 0.12082024 0.17263782] [ 0.44484383 0.2851711 0.43099973 ... 0.15084247 0.26528394 0.1550103 ] [ 0.3602436 0.35051894 0.30690897 ... 0.27674347 0.37448132 0.30240083] ... [ 0.3021557 0.23976819 0.051138 ... -0.17679612 0.04173883 0.14945887] [-0.12841487 0.05249963 -0.00739198 ... -0.12136126 0.1150319 0.31603977] [-0.28475708 -0.18185207 -0.04513106 ... -0.03505001 0.07411866 0.29681724]] ... [[-0.16404088 -0.08520442 0.0210627 ... -0.0276368 -0.07657062 -0.07590388] [-0.22325194 -0.17529613 -0.23175354 ... -0.02816243 -0.12430339 -0.10598195] [-0.31460547 -0.23194945 -0.2113546 ... -0.09454259 -0.29628542 -0.17195866] ... [-0.18914166 -0.20251411 0.05495348 ... -0.46934283 -0.4072823 -0.22060165] [-0.34678236 -0.2440792 -0.09064766 ... -0.39613533 -0.2529856 -0.2151557 ] [-0.14149553 -0.11341851 -0.00859769 ... -0.25242627 -0.20304745 -0.23584726]] [[-0.3331404 -0.23077255 0.03808453 ... 0.10655631 -0.09432153 0.0315575 ] [-0.12098812 0.06296084 0.22439077 ... -0.11663566 -0.1363325 0.15224248] [-0.05730672 0.02937802 -0.03836128 ... -0.27648515 -0.22025356 0.08783545] ... [-0.05437235 -0.21442537 -0.04213995 ... -0.32922018 -0.24986391 -0.16362515] [-0.15581667 -0.20990455 -0.1666042 ... -0.36231485 -0.21912156 -0.27233574] [-0.08073263 -0.09569407 -0.08851987 ... -0.27858844 -0.11964291 -0.11165548]] [[-0.50913113 -0.3083532 -0.16088665 ... 0.11699357 -0.04465412 0.09701223] [-0.19195567 -0.00900225 0.25129876 ... -0.00528001 -0.02386053 0.25186804] [-0.01081866 0.08004811 -0.020739 ... -0.11422181 -0.13578929 0.34247792] ... [ 0.04651393 -0.11932246 -0.05236684 ... -0.4927353 -0.27543646 -0.12323042] [ 0.04722858 -0.16070983 -0.13506018 ... -0.3225098 -0.2511795 -0.18435727] [ 0.21655476 0.01590029 -0.06992918 ... -0.09739275 -0.11573016 -0.01614675]]] [[[ 0.12828921 0.16444825 0.55925554 ... 0.11669798 0.03066416 -0.03626081] [ 0.08157817 0.25170425 0.53418154 ... 0.02564962 -0.0536265 -0.09104227] [ 0.13443777 0.33803537 0.45222297 ... -0.15870434 -0.22561134 -0.29153016] ... [-0.04821621 -0.04397135 -0.26389685 ... -0.2404379 -0.24916084 -0.14114422] [-0.06318806 0.05012512 -0.1729899 ... 0.1317359 -0.03978303 -0.14136232] [-0.21934408 0.27893737 0.14364396 ... 0.01463543 -0.10047664 -0.23342068]] [[-0.28047538 -0.2175861 0.03262281 ... 0.29364535 0.26755786 0.15579064] [-0.07978901 0.02135451 0.07546511 ... 0.11095004 0.07179289 0.10725717] [-0.01313261 0.20228213 0.16814436 ... -0.04703509 -0.17559116 -0.14020777] ... [ 0.04830055 0.11807335 -0.1244624 ... -0.31680185 -0.25438732 -0.08119857] [ 0.08097351 0.29375336 0.0078285 ... 0.06620734 -0.05695806 -0.01026041] [-0.20511207 0.22685292 0.14616685 ... 0.25577766 0.09087992 0.09099558]] [[-0.3295822 -0.35998324 -0.19972047 ... 0.10163442 -0.06906405 -0.05444685] [-0.15550089 -0.23500334 -0.24140692 ... -0.0066272 -0.02302172 0.1426646 ] [-0.21808651 -0.13827796 -0.2135439 ... -0.16774835 -0.23892042 -0.04936623] ... [ 0.15567571 0.06824928 -0.04408136 ... -0.06329922 -0.15912095 -0.16694821] [ 0.19493379 0.15165846 -0.08251217 ... -0.03670258 -0.15436874 -0.06008722] [-0.10413452 0.02075044 0.02777971 ... 0.04511235 -0.08435806 0.07720966]] ... [[ 0.06857723 0.17020491 -0.14801048 ... -0.14676984 -0.24101047 0.04364456] [ 0.0984135 -0.14758416 -0.37020397 ... 0.02931448 -0.06940224 0.01957047] [ 0.31042486 -0.11025295 -0.32175812 ... -0.04510977 -0.06249785 -0.08848657] ... [-0.04002813 0.05910459 -0.06459505 ... 0.14619 0.19345729 0.08832918] [-0.0013466 0.17489399 0.10220913 ... 0.0908464 0.2248575 0.05614408] [ 0.28121626 0.44793186 0.3798504 ... -0.06922136 0.20303814 0.1802193 ]] [[ 0.00600536 -0.04121532 -0.25253484 ... -0.19015482 -0.24517459 0.08919484] [-0.02499548 -0.24672377 -0.35277277 ... 0.16609396 -0.0135245 0.00762145] [ 0.12828648 -0.03320762 -0.02924098 ... 0.15896215 0.17932557 0.04656424] ... [-0.00161178 -0.02861504 -0.25211066 ... 0.2081361 0.17465842 -0.02087895] [-0.04282398 0.10953245 -0.00761958 ... 0.10910325 0.15247373 0.01780813] [ 0.07134565 0.20085691 0.13102767 ... 0.11621004 0.32018697 0.28348356]] [[ 0.06416673 -0.19622928 -0.28457674 ... -0.43870544 -0.32013413 -0.14486168] [ 0.15348218 -0.13276084 -0.20670326 ... -0.17117043 -0.09835842 -0.16654989] [ 0.35187417 0.24044664 0.2073116 ... -0.06846455 0.32331604 0.03914991] ... [ 0.11467516 0.12413268 -0.29037404 ... 0.3770298 0.27147976 0.00179722] [ 0.08498605 0.1886608 -0.07963349 ... 0.20868775 0.08997636 -0.06091916] [ 0.14200346 0.15723975 0.03041111 ... 0.12599306 0.25578544 0.04939345]]] [[[-0.16867448 -0.02915692 -0.17808229 ... 0.2368037 0.0946294 0.14196226] [-0.11160701 0.04542422 0.00700615 ... 0.49257734 0.44606635 0.43640244] [-0.24119933 -0.13183568 -0.05481594 ... 0.35572737 0.29339358 0.47088358] ... [-0.12112989 -0.09058119 -0.15033294 ... -0.50591 -0.270619 -0.22430903] [ 0.18732002 -0.00611064 0.03167855 ... -0.3101815 -0.300297 -0.1389067 ] [ 0.25455227 0.08660889 -0.13832887 ... -0.10544708 -0.05629173 -0.00595219]] [[-0.24871293 -0.1775996 -0.15622866 ... 0.11306581 0.06867345 0.14982195] [-0.30275995 -0.19427787 -0.20000984 ... 0.24087372 0.28762195 0.305209 ] [-0.30294403 -0.37709945 -0.33879542 ... 0.1556675 0.12718388 0.20148452] ... [-0.16800337 -0.19227 -0.21644738 ... -0.2221631 -0.3048111 -0.34506473] [-0.1451244 -0.3469866 -0.08481883 ... -0.07353202 -0.08529648 0.0883536 ] [-0.17878819 -0.23789246 -0.06900239 ... 0.05166965 0.12084203 0.15632029]] [[-0.25125003 -0.12717906 -0.2425253 ... -0.0390095 0.04017491 -0.08459433] [-0.32016996 -0.21665297 -0.30407825 ... -0.00522632 0.13452135 0.08422422] [-0.24266408 -0.27836975 -0.0905806 ... -0.14821205 -0.09361731 0.06787247] ... [-0.09959219 -0.09302334 -0.23559186 ... 0.0091385 -0.16702566 -0.09133243] [-0.12865126 -0.23631862 -0.13239147 ... 0.0165365 0.15981466 0.34437487] [-0.16678502 -0.12759915 0.02386468 ... -0.17313187 0.06819817 0.17076252]] ... [[-0.39350578 -0.20664275 0.01359434 ... -0.1323897 -0.15966034 -0.17125359] [-0.47467253 -0.30504587 -0.31511015 ... -0.20899092 -0.11118834 -0.01611841] [-0.53976864 -0.2663101 -0.20305593 ... -0.06767908 -0.11898283 0.20159999] ... [ 0.28744236 0.23147166 0.01269894 ... -0.18367605 0.02341793 0.1189063 ] [ 0.23455779 0.01547132 -0.13048226 ... 0.05536391 0.1510966 0.3026844 ] [ 0.32501915 0.09626909 0.08377298 ... 0.09966391 0.08782376 0.3234196 ]] [[-0.10509785 -0.05142367 -0.11483142 ... -0.00776936 0.03247391 0.17506649] [ 0.01832494 -0.1185524 -0.39459738 ... -0.20981085 -0.03960245 0.18839404] [-0.13557446 -0.14086153 -0.15170251 ... -0.0167701 0.06059643 0.43197674] ... [ 0.04475129 0.10077719 -0.10127418 ... 0.14942275 0.14456668 0.02161399] [ 0.16850716 0.17150635 -0.19875582 ... 0.12918222 0.11271793 0.0394121 ] [ 0.29398486 0.280913 -0.01196156 ... 0.07852134 0.07030719 0.09086547]] [[-0.07073694 -0.10046645 -0.03597007 ... -0.07923523 0.01387493 0.01231721] [-0.1020954 -0.26712653 -0.33932224 ... -0.09367128 0.14109574 0.07324279] [-0.2119318 -0.31250268 -0.20440452 ... 0.24267639 0.4752171 0.4165711 ] ... [-0.0879111 -0.12620623 -0.10090303 ... 0.28960818 0.28885174 0.19804284] [ 0.10308822 0.04430995 -0.09934303 ... 0.18856834 0.06447497 -0.12182979] [ 0.26574394 0.26832452 0.15274914 ... 0.10362884 0.01922722 -0.14547373]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5779.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[1, 1, 1]]() %5 : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::avg_pool3d(%x, %5, %4, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%6) fw_re: [[[[[ 1.52779862e-01 3.00674681e-02 7.66139179e-02 ... 1.81393344e-02 2.26057723e-01 3.36919546e-01] [ 1.29395917e-01 9.08922330e-02 2.44276941e-01 ... 2.05233365e-01 4.23399121e-01 5.07915199e-01] [ 1.33510962e-01 5.11413626e-02 1.44873857e-02 ... 1.74543679e-01 2.35370904e-01 1.23881221e-01] ... [ 1.13051213e-01 1.41656846e-01 1.45409018e-01 ... -9.21401232e-02 -1.51356071e-01 -2.17134729e-01] [ 3.34431916e-01 3.72818261e-01 2.27549106e-01 ... -2.68603921e-01 5.21603636e-02 -7.46056661e-02] [ 2.51958728e-01 3.48808497e-01 2.17196301e-01 ... -4.23085332e-01 -2.05393493e-01 -4.27410454e-01]] [[ 2.02667952e-01 1.51917472e-01 2.27578562e-02 ... 7.22238347e-02 1.20178141e-01 1.46546423e-01] [ 8.21070001e-02 1.33341745e-01 2.47924805e-01 ... 2.61186749e-01 2.96184689e-01 3.00790727e-01] [ 2.21511260e-01 1.50169551e-01 2.62824476e-01 ... 2.02870071e-01 2.09898219e-01 9.66315120e-02] ... [ 1.39352262e-01 2.50559539e-01 2.34849855e-01 ... -9.70879719e-02 -9.89574492e-02 -2.35558584e-01] [ 2.35511035e-01 3.53552997e-01 1.45968318e-01 ... -2.98798084e-02 1.96477249e-01 9.05699804e-02] [ 4.01904196e-01 4.69400346e-01 1.97449520e-01 ... -3.68005969e-02 1.43574804e-01 -8.17029923e-03]] [[-3.81962895e-01 -7.00452477e-02 -7.46925771e-02 ... 2.28148606e-02 1.34738818e-01 1.03082754e-01] [-3.90333861e-01 -1.42804369e-01 -7.37602413e-02 ... 5.88200092e-02 1.82558715e-01 2.31055319e-01] [-3.08531970e-01 -1.92111492e-01 -1.25477076e-01 ... 2.38480959e-02 4.81696911e-02 2.57708598e-02] ... [-1.54577658e-01 1.00949958e-01 2.75376588e-01 ... -2.68639266e-01 -3.23473305e-01 -4.43707526e-01] [ 1.52571723e-01 3.36703122e-01 3.23904306e-01 ... -2.39700340e-02 1.20961286e-01 1.64435312e-01] [ 5.45955002e-01 6.20212555e-01 4.64740336e-01 ... 1.04290411e-01 2.26830855e-01 2.01807812e-01]] ... [[-5.46990812e-01 -4.05357957e-01 -1.58748418e-01 ... -2.31231749e-02 -1.61039755e-02 -2.37639025e-01] [-1.92703515e-01 -1.04984425e-01 1.14085466e-01 ... 7.79169574e-02 1.49308711e-01 4.04939055e-02] [-1.98028266e-01 -1.13272995e-01 1.87688798e-01 ... 2.83704758e-01 4.14608151e-01 3.35084617e-01] ... [ 2.01416910e-01 1.99412227e-01 1.54873446e-01 ... 6.68900684e-02 -1.32688567e-01 -2.05503643e-01] [ 1.45959288e-01 1.46014482e-01 -1.15627997e-01 ... -8.11586380e-02 -2.58064628e-01 -4.22745228e-01] [-5.28105497e-02 -1.28368631e-01 -3.59144479e-01 ... -1.50920630e-01 -2.45870799e-01 -4.24137741e-01]] [[-8.10785770e-01 -4.09026980e-01 -6.23898953e-02 ... 1.41084731e-01 -9.86493309e-04 -1.76615119e-01] [-5.12864053e-01 -2.36467496e-01 8.51243883e-02 ... 1.47870615e-01 1.31739110e-01 7.59135783e-02] [-1.46157295e-01 4.45459336e-02 3.37332487e-01 ... 2.20899895e-01 2.03877330e-01 1.62518099e-01] ... [ 5.88148646e-02 1.20050430e-01 -5.21481372e-02 ... 8.04934353e-02 3.34337316e-02 -1.31221592e-01] [-6.36837212e-03 3.30697820e-02 -2.03832269e-01 ... -9.03161243e-02 -1.40287295e-01 -3.38745058e-01] [-2.07268193e-01 -1.61894709e-01 -3.24487478e-01 ... -3.80483061e-01 -2.83879161e-01 -4.87253904e-01]] [[-7.60424137e-01 -4.14896876e-01 2.01269183e-02 ... 2.24861860e-01 4.36074138e-02 5.88194132e-02] [-4.99167055e-01 -2.74415731e-01 8.67700577e-02 ... 2.67305940e-01 1.31919518e-01 1.69654548e-01] [-1.62767932e-01 4.73498777e-02 3.46041858e-01 ... 3.42309743e-01 2.84453034e-01 2.48299167e-01] ... [-1.61135681e-02 1.22303776e-01 -8.56670439e-02 ... 5.89663200e-02 1.09055035e-01 -1.61887497e-01] [-1.79676211e-03 3.60591337e-02 -1.30443037e-01 ... -2.34214328e-02 -9.18136463e-02 -4.09728140e-01] [-1.71952918e-01 -1.07528955e-01 -1.76589713e-01 ... -3.93119305e-01 -3.17605585e-01 -7.16151774e-01]]] [[[ 2.54402161e-01 2.22129509e-01 1.03057295e-01 ... 4.85895514e-01 2.84894109e-01 3.67386937e-01] [-6.72790408e-03 -2.69157253e-02 -2.94869374e-02 ... 3.78768355e-01 2.57320791e-01 3.22554529e-01] [-3.28945637e-01 -3.59892249e-01 -2.39671633e-01 ... 5.29725738e-02 2.34754980e-01 4.34885651e-01] ... [ 2.37468943e-01 7.46810436e-03 -1.93192065e-01 ... -6.34281263e-02 2.15906680e-01 4.49563235e-01] [-4.27212333e-03 -1.07811674e-01 -1.07263457e-02 ... -1.24652587e-01 4.60016243e-02 3.57485801e-01] [ 1.44609243e-01 -7.42076114e-02 -1.63254306e-01 ... -1.85891733e-01 -1.37300983e-01 3.88771258e-02]] [[ 1.62868634e-01 1.54944524e-01 -5.22583276e-02 ... 1.79260761e-01 7.47189224e-02 7.00745657e-02] [ 5.13979644e-02 -2.44131219e-02 -1.86229154e-01 ... 4.86220568e-02 -1.56404357e-02 -5.12758158e-02] [-2.58229315e-01 -3.52691263e-01 -3.23179454e-01 ... -2.87040740e-01 3.08625959e-02 1.89693272e-01] ... [ 2.55921930e-01 -2.28830446e-02 -2.02616781e-01 ... 7.02929422e-02 1.17528088e-01 3.58803153e-01] [ 2.61046320e-01 2.70498823e-02 3.18695828e-02 ... 5.22801355e-02 1.99096680e-01 5.36237240e-01] [ 1.95930168e-01 4.99759289e-03 -1.16011702e-01 ... 2.52824128e-02 1.64049923e-01 3.23878944e-01]] [[ 8.79266858e-02 3.27760428e-02 1.25067130e-01 ... 2.00852916e-01 1.63695514e-01 1.16056234e-01] [ 1.10386044e-01 8.59279260e-02 2.16143094e-02 ... 1.11059666e-01 8.25518444e-02 2.78660059e-02] [-1.58916861e-01 -8.83060768e-02 -5.35180755e-02 ... -2.43657559e-01 1.43124580e-01 1.63344145e-01] ... [ 9.46942270e-02 -1.42374292e-01 -5.17424047e-01 ... 1.15427777e-01 1.29390016e-01 3.06935102e-01] [ 3.44866872e-01 8.53207782e-02 -2.59927988e-01 ... 1.65187851e-01 2.63976514e-01 4.94117260e-01] [ 2.23274633e-01 5.90045787e-02 -2.24678725e-01 ... 2.39170238e-01 3.51205945e-01 4.86576080e-01]] ... [[ 2.16996744e-01 -2.02408180e-01 -3.12771112e-01 ... -2.57175118e-01 -1.70111522e-01 -3.34944390e-02] [ 2.22306460e-01 -1.31477997e-01 -3.60866904e-01 ... -1.17831796e-01 -1.31569073e-01 -2.02467501e-01] [ 3.83259624e-01 8.33881146e-04 -1.47777051e-01 ... -2.57524759e-01 -2.57143438e-01 -3.18154395e-01] ... [-1.54497758e-01 2.09071487e-02 1.57086059e-01 ... 1.05603285e-01 -4.10768837e-02 -1.60558656e-01] [-2.66784787e-01 1.30764639e-03 2.71576375e-01 ... 2.25085571e-01 8.37632082e-03 -1.62920475e-01] [-3.88235599e-01 -2.18310863e-01 4.48310152e-02 ... 2.30854452e-01 1.07737193e-02 -1.99574947e-01]] [[ 8.15360844e-02 -1.02750368e-01 -1.89246863e-01 ... -3.74826759e-01 -2.47202292e-01 -2.93071389e-01] [ 1.47919819e-01 -7.33847544e-02 -2.99238592e-01 ... -1.89121783e-01 -2.58327454e-01 -3.49227726e-01] [ 3.23181599e-01 -8.66698567e-03 -3.73001546e-01 ... -1.02576241e-01 -2.59083718e-01 -4.58484590e-01] ... [ 2.04561874e-02 1.91357329e-01 3.25626105e-01 ... 6.68485940e-04 -2.71016508e-01 -3.04864973e-01] [-1.02878414e-01 8.17861706e-02 2.74341226e-01 ... -8.29069316e-02 -2.11306870e-01 -1.99755460e-01] [-2.93394178e-01 -2.22731888e-01 -4.67074569e-03 ... -8.66469403e-04 -1.33295715e-01 -1.83252797e-01]] [[ 5.52469082e-02 -6.76896796e-02 -1.45769402e-01 ... -2.93331861e-01 -1.24625392e-01 -1.20952100e-01] [ 1.96009651e-01 1.45246573e-02 -2.59640962e-01 ... -8.44393745e-02 -1.39274165e-01 -1.10198282e-01] [ 5.03157973e-01 1.37713596e-01 -4.35389578e-01 ... -5.08256741e-02 -1.01121582e-01 -2.27965549e-01] ... [ 4.50359173e-02 2.83592314e-01 4.71908420e-01 ... 6.73367679e-02 -3.80339086e-01 -4.32461143e-01] [ 7.68138543e-02 1.27075896e-01 1.30367219e-01 ... -1.33228138e-01 -3.60910803e-01 -3.72478575e-01] [-1.99721828e-02 -1.34686157e-01 -2.36869991e-01 ... 2.88703125e-02 -2.67871976e-01 -3.79436970e-01]]] [[[-6.30717635e-01 -1.91310987e-01 5.43333031e-02 ... -5.28732061e-01 -3.26500118e-01 -6.18521810e-01] [-5.65003932e-01 -2.23504275e-01 -2.24523675e-02 ... -2.31183708e-01 -2.37663776e-01 -3.58520955e-01] [-3.00981164e-01 -3.11932087e-01 -2.40648955e-01 ... 7.88749103e-03 -1.69861708e-02 -2.21460655e-01] ... [ 3.52246493e-01 4.53793526e-01 4.36728358e-01 ... -3.64211202e-01 -1.85892656e-01 -8.96516070e-02] [ 4.03519958e-01 3.06556195e-01 1.95477068e-01 ... -4.00063721e-03 9.36502144e-02 3.97378951e-01] [ 3.16303998e-01 3.63901407e-01 3.45001131e-01 ... -8.82083550e-02 -6.10423572e-02 1.91500932e-01]] [[-6.21510088e-01 -2.93140650e-01 -6.88919425e-02 ... -2.18057096e-01 3.77751724e-03 -6.21383786e-02] [-6.14648759e-01 -2.66149729e-01 1.42896175e-03 ... -8.22931603e-02 -3.49711403e-02 -2.34282874e-02] [-3.28158081e-01 -2.76146531e-01 -1.84367523e-01 ... -3.05848513e-02 -1.57728754e-02 -1.71824202e-01] ... [ 6.95536956e-02 7.96627700e-02 1.30650461e-01 ... -3.45512889e-02 1.84620500e-01 1.89833269e-01] [ 1.57141656e-01 1.01906769e-02 -1.52206316e-03 ... -9.33214650e-03 2.23102838e-01 4.04609650e-01] [ 6.94084391e-02 5.51980734e-03 1.43107399e-01 ... -2.08967373e-01 8.43884517e-03 1.41715363e-01]] [[-2.76992559e-01 -1.68212950e-01 -9.11261737e-02 ... 3.83390317e-04 4.46537733e-02 1.88574314e-01] [-4.16536748e-01 -2.79056668e-01 -7.80574977e-02 ... 5.13974726e-02 -1.05205001e-02 9.78368148e-02] [-2.57783562e-01 -2.86334038e-01 -1.88902721e-01 ... -2.42208989e-04 1.17998170e-02 -4.99952026e-02] ... [-1.10874191e-01 -1.17246211e-01 -8.11496601e-02 ... 1.85269251e-01 2.70119488e-01 2.94967860e-01] [-1.75683439e-01 -2.15022013e-01 -2.00622335e-01 ... -3.11538912e-02 1.07548349e-01 2.01982543e-01] [-2.18113959e-01 -2.00410232e-01 -6.31911680e-02 ... -2.49763384e-01 -1.46482587e-01 -1.09323658e-01]] ... [[ 3.66666943e-01 2.19755292e-01 -6.94667324e-02 ... 1.18719779e-01 3.15070033e-01 4.16471094e-01] [ 3.35291326e-01 1.14740640e-01 -3.49189378e-02 ... -7.46762380e-02 1.09130114e-01 4.34345342e-02] [ 1.81371391e-01 1.11045465e-02 -1.81634352e-01 ... -1.69827774e-01 -6.54000789e-02 -6.16651177e-02] ... [-7.18127862e-02 1.08460814e-01 3.24322939e-01 ... -6.17808513e-02 1.21538602e-01 8.05202425e-02] [-1.92935780e-01 -2.51751486e-03 1.84014007e-01 ... 1.27582565e-01 1.58259496e-01 1.99776486e-01] [-4.92946416e-01 -3.84356171e-01 -1.86385721e-01 ... 3.61728549e-01 5.96894026e-02 4.81151640e-02]] [[ 2.50674099e-01 3.05418104e-01 2.89347507e-02 ... 1.24019809e-01 1.69126466e-01 4.11279589e-01] [ 3.11157972e-01 1.96873605e-01 -8.50770399e-02 ... -7.65860733e-03 4.41076569e-02 1.00858964e-01] [ 3.37165713e-01 1.44310027e-01 -1.53192461e-01 ... -2.06874594e-01 -3.84738110e-02 1.11104906e-01] ... [-2.87316293e-01 -1.24916583e-01 1.31130368e-01 ... -1.74442325e-02 3.84922372e-04 -4.62815017e-02] [-3.37603837e-02 1.18831336e-01 2.90660024e-01 ... 1.49555907e-01 4.61133197e-02 1.25080526e-01] [-2.32024834e-01 -1.21580392e-01 9.67537314e-02 ... 4.01495725e-01 2.44916752e-02 -4.72989865e-02]] [[ 1.39828309e-01 1.64551541e-01 2.76679378e-02 ... 1.82454765e-01 6.55919015e-02 2.33787969e-01] [ 3.39714199e-01 2.26615131e-01 -3.14744306e-03 ... 7.75018558e-02 -1.46471001e-02 6.46345839e-02] [ 3.22060496e-01 1.22426189e-01 -1.70341462e-01 ... -1.61090344e-01 -1.20158963e-01 -2.01865528e-02] ... [-1.66012913e-01 -1.90367267e-01 1.45124123e-01 ... 8.29255953e-02 6.40016943e-02 -7.26956204e-02] [ 8.11503530e-02 9.40137804e-02 3.02295685e-01 ... 1.57539546e-01 2.61674915e-02 -5.53593636e-02] [-1.23330876e-01 -1.15060605e-01 9.11675692e-02 ... 3.54061931e-01 -2.75401473e-02 -2.43100926e-01]]]]]; ov_res: [[[[[ 1.52779862e-01 3.00674681e-02 7.66139179e-02 ... 1.81393344e-02 2.26057723e-01 3.36919546e-01] [ 1.29395917e-01 9.08922330e-02 2.44276941e-01 ... 2.05233365e-01 4.23399121e-01 5.07915199e-01] [ 1.33510962e-01 5.11413626e-02 1.44873857e-02 ... 1.74543679e-01 2.35370904e-01 1.23881221e-01] ... [ 1.13051213e-01 1.41656846e-01 1.45409018e-01 ... -9.21401232e-02 -1.51356071e-01 -2.17134729e-01] [ 3.34431916e-01 3.72818261e-01 2.27549106e-01 ... -2.68603921e-01 5.21603636e-02 -7.46056661e-02] [ 2.51958728e-01 3.48808497e-01 2.17196301e-01 ... -4.23085332e-01 -2.05393493e-01 -4.27410454e-01]] [[ 2.02667952e-01 1.51917472e-01 2.27578562e-02 ... 7.22238347e-02 1.20178141e-01 1.46546423e-01] [ 8.21070001e-02 1.33341745e-01 2.47924805e-01 ... 2.61186749e-01 2.96184689e-01 3.00790727e-01] [ 2.21511260e-01 1.50169551e-01 2.62824476e-01 ... 2.02870071e-01 2.09898219e-01 9.66315120e-02] ... [ 1.39352262e-01 2.50559539e-01 2.34849855e-01 ... -9.70879719e-02 -9.89574492e-02 -2.35558584e-01] [ 2.35511035e-01 3.53552997e-01 1.45968318e-01 ... -2.98798084e-02 1.96477249e-01 9.05699804e-02] [ 4.01904196e-01 4.69400346e-01 1.97449520e-01 ... -3.68005969e-02 1.43574804e-01 -8.17029923e-03]] [[-3.81962895e-01 -7.00452477e-02 -7.46925771e-02 ... 2.28148606e-02 1.34738818e-01 1.03082754e-01] [-3.90333861e-01 -1.42804369e-01 -7.37602413e-02 ... 5.88200092e-02 1.82558715e-01 2.31055319e-01] [-3.08531970e-01 -1.92111492e-01 -1.25477076e-01 ... 2.38480959e-02 4.81696911e-02 2.57708598e-02] ... [-1.54577658e-01 1.00949958e-01 2.75376588e-01 ... -2.68639266e-01 -3.23473305e-01 -4.43707526e-01] [ 1.52571723e-01 3.36703122e-01 3.23904306e-01 ... -2.39700340e-02 1.20961286e-01 1.64435312e-01] [ 5.45955002e-01 6.20212555e-01 4.64740336e-01 ... 1.04290411e-01 2.26830855e-01 2.01807812e-01]] ... [[-5.46990812e-01 -4.05357957e-01 -1.58748418e-01 ... -2.31231749e-02 -1.61039755e-02 -2.37639025e-01] [-1.92703515e-01 -1.04984425e-01 1.14085466e-01 ... 7.79169574e-02 1.49308711e-01 4.04939055e-02] [-1.98028266e-01 -1.13272995e-01 1.87688798e-01 ... 2.83704758e-01 4.14608151e-01 3.35084617e-01] ... [ 2.01416910e-01 1.99412227e-01 1.54873446e-01 ... 6.68900684e-02 -1.32688567e-01 -2.05503643e-01] [ 1.45959288e-01 1.46014482e-01 -1.15627997e-01 ... -8.11586380e-02 -2.58064628e-01 -4.22745228e-01] [-5.28105497e-02 -1.28368631e-01 -3.59144479e-01 ... -1.50920630e-01 -2.45870799e-01 -4.24137741e-01]] [[-8.10785770e-01 -4.09026980e-01 -6.23898953e-02 ... 1.41084731e-01 -9.86493309e-04 -1.76615119e-01] [-5.12864053e-01 -2.36467496e-01 8.51243883e-02 ... 1.47870615e-01 1.31739110e-01 7.59135783e-02] [-1.46157295e-01 4.45459336e-02 3.37332487e-01 ... 2.20899895e-01 2.03877330e-01 1.62518099e-01] ... [ 5.88148646e-02 1.20050430e-01 -5.21481372e-02 ... 8.04934353e-02 3.34337316e-02 -1.31221592e-01] [-6.36837212e-03 3.30697820e-02 -2.03832269e-01 ... -9.03161243e-02 -1.40287295e-01 -3.38745058e-01] [-2.07268193e-01 -1.61894709e-01 -3.24487478e-01 ... -3.80483061e-01 -2.83879161e-01 -4.87253904e-01]] [[-7.60424137e-01 -4.14896876e-01 2.01269183e-02 ... 2.24861860e-01 4.36074138e-02 5.88194132e-02] [-4.99167055e-01 -2.74415731e-01 8.67700577e-02 ... 2.67305940e-01 1.31919518e-01 1.69654548e-01] [-1.62767932e-01 4.73498777e-02 3.46041858e-01 ... 3.42309743e-01 2.84453034e-01 2.48299167e-01] ... [-1.61135681e-02 1.22303776e-01 -8.56670439e-02 ... 5.89663200e-02 1.09055035e-01 -1.61887497e-01] [-1.79676211e-03 3.60591337e-02 -1.30443037e-01 ... -2.34214328e-02 -9.18136463e-02 -4.09728140e-01] [-1.71952918e-01 -1.07528955e-01 -1.76589713e-01 ... -3.93119305e-01 -3.17605585e-01 -7.16151774e-01]]] [[[ 2.54402161e-01 2.22129509e-01 1.03057295e-01 ... 4.85895514e-01 2.84894109e-01 3.67386937e-01] [-6.72790408e-03 -2.69157253e-02 -2.94869374e-02 ... 3.78768355e-01 2.57320791e-01 3.22554529e-01] [-3.28945637e-01 -3.59892249e-01 -2.39671633e-01 ... 5.29725738e-02 2.34754980e-01 4.34885651e-01] ... [ 2.37468943e-01 7.46810436e-03 -1.93192065e-01 ... -6.34281263e-02 2.15906680e-01 4.49563235e-01] [-4.27212333e-03 -1.07811674e-01 -1.07263457e-02 ... -1.24652587e-01 4.60016243e-02 3.57485801e-01] [ 1.44609243e-01 -7.42076114e-02 -1.63254306e-01 ... -1.85891733e-01 -1.37300983e-01 3.88771258e-02]] [[ 1.62868634e-01 1.54944524e-01 -5.22583276e-02 ... 1.79260761e-01 7.47189224e-02 7.00745657e-02] [ 5.13979644e-02 -2.44131219e-02 -1.86229154e-01 ... 4.86220568e-02 -1.56404357e-02 -5.12758158e-02] [-2.58229315e-01 -3.52691263e-01 -3.23179454e-01 ... -2.87040740e-01 3.08625959e-02 1.89693272e-01] ... [ 2.55921930e-01 -2.28830446e-02 -2.02616781e-01 ... 7.02929422e-02 1.17528088e-01 3.58803153e-01] [ 2.61046320e-01 2.70498823e-02 3.18695828e-02 ... 5.22801355e-02 1.99096680e-01 5.36237240e-01] [ 1.95930168e-01 4.99759289e-03 -1.16011702e-01 ... 2.52824128e-02 1.64049923e-01 3.23878944e-01]] [[ 8.79266858e-02 3.27760428e-02 1.25067130e-01 ... 2.00852916e-01 1.63695514e-01 1.16056234e-01] [ 1.10386044e-01 8.59279260e-02 2.16143094e-02 ... 1.11059666e-01 8.25518444e-02 2.78660059e-02] [-1.58916861e-01 -8.83060768e-02 -5.35180755e-02 ... -2.43657559e-01 1.43124580e-01 1.63344145e-01] ... [ 9.46942270e-02 -1.42374292e-01 -5.17424047e-01 ... 1.15427777e-01 1.29390016e-01 3.06935102e-01] [ 3.44866872e-01 8.53207782e-02 -2.59927988e-01 ... 1.65187851e-01 2.63976514e-01 4.94117260e-01] [ 2.23274633e-01 5.90045787e-02 -2.24678725e-01 ... 2.39170238e-01 3.51205945e-01 4.86576080e-01]] ... [[ 2.16996744e-01 -2.02408180e-01 -3.12771112e-01 ... -2.57175118e-01 -1.70111522e-01 -3.34944390e-02] [ 2.22306460e-01 -1.31477997e-01 -3.60866904e-01 ... -1.17831796e-01 -1.31569073e-01 -2.02467501e-01] [ 3.83259624e-01 8.33881146e-04 -1.47777051e-01 ... -2.57524759e-01 -2.57143438e-01 -3.18154395e-01] ... [-1.54497758e-01 2.09071487e-02 1.57086059e-01 ... 1.05603285e-01 -4.10768837e-02 -1.60558656e-01] [-2.66784787e-01 1.30764639e-03 2.71576375e-01 ... 2.25085571e-01 8.37632082e-03 -1.62920475e-01] [-3.88235599e-01 -2.18310863e-01 4.48310152e-02 ... 2.30854452e-01 1.07737193e-02 -1.99574947e-01]] [[ 8.15360844e-02 -1.02750368e-01 -1.89246863e-01 ... -3.74826759e-01 -2.47202292e-01 -2.93071389e-01] [ 1.47919819e-01 -7.33847544e-02 -2.99238592e-01 ... -1.89121783e-01 -2.58327454e-01 -3.49227726e-01] [ 3.23181599e-01 -8.66698567e-03 -3.73001546e-01 ... -1.02576241e-01 -2.59083718e-01 -4.58484590e-01] ... [ 2.04561874e-02 1.91357329e-01 3.25626105e-01 ... 6.68485940e-04 -2.71016508e-01 -3.04864973e-01] [-1.02878414e-01 8.17861706e-02 2.74341226e-01 ... -8.29069316e-02 -2.11306870e-01 -1.99755460e-01] [-2.93394178e-01 -2.22731888e-01 -4.67074569e-03 ... -8.66469403e-04 -1.33295715e-01 -1.83252797e-01]] [[ 5.52469082e-02 -6.76896796e-02 -1.45769402e-01 ... -2.93331861e-01 -1.24625392e-01 -1.20952100e-01] [ 1.96009651e-01 1.45246573e-02 -2.59640962e-01 ... -8.44393745e-02 -1.39274165e-01 -1.10198282e-01] [ 5.03157973e-01 1.37713596e-01 -4.35389578e-01 ... -5.08256741e-02 -1.01121582e-01 -2.27965549e-01] ... [ 4.50359173e-02 2.83592314e-01 4.71908420e-01 ... 6.73367679e-02 -3.80339086e-01 -4.32461143e-01] [ 7.68138543e-02 1.27075896e-01 1.30367219e-01 ... -1.33228138e-01 -3.60910803e-01 -3.72478575e-01] [-1.99721828e-02 -1.34686157e-01 -2.36869991e-01 ... 2.88703125e-02 -2.67871976e-01 -3.79436970e-01]]] [[[-6.30717635e-01 -1.91310987e-01 5.43333031e-02 ... -5.28732061e-01 -3.26500118e-01 -6.18521810e-01] [-5.65003932e-01 -2.23504275e-01 -2.24523675e-02 ... -2.31183708e-01 -2.37663776e-01 -3.58520955e-01] [-3.00981164e-01 -3.11932087e-01 -2.40648955e-01 ... 7.88749103e-03 -1.69861708e-02 -2.21460655e-01] ... [ 3.52246493e-01 4.53793526e-01 4.36728358e-01 ... -3.64211202e-01 -1.85892656e-01 -8.96516070e-02] [ 4.03519958e-01 3.06556195e-01 1.95477068e-01 ... -4.00063721e-03 9.36502144e-02 3.97378951e-01] [ 3.16303998e-01 3.63901407e-01 3.45001131e-01 ... -8.82083550e-02 -6.10423572e-02 1.91500932e-01]] [[-6.21510088e-01 -2.93140650e-01 -6.88919425e-02 ... -2.18057096e-01 3.77751724e-03 -6.21383786e-02] [-6.14648759e-01 -2.66149729e-01 1.42896175e-03 ... -8.22931603e-02 -3.49711403e-02 -2.34282874e-02] [-3.28158081e-01 -2.76146531e-01 -1.84367523e-01 ... -3.05848513e-02 -1.57728754e-02 -1.71824202e-01] ... [ 6.95536956e-02 7.96627700e-02 1.30650461e-01 ... -3.45512889e-02 1.84620500e-01 1.89833269e-01] [ 1.57141656e-01 1.01906769e-02 -1.52206316e-03 ... -9.33214650e-03 2.23102838e-01 4.04609650e-01] [ 6.94084391e-02 5.51980734e-03 1.43107399e-01 ... -2.08967373e-01 8.43884517e-03 1.41715363e-01]] [[-2.76992559e-01 -1.68212950e-01 -9.11261737e-02 ... 3.83390317e-04 4.46537733e-02 1.88574314e-01] [-4.16536748e-01 -2.79056668e-01 -7.80574977e-02 ... 5.13974726e-02 -1.05205001e-02 9.78368148e-02] [-2.57783562e-01 -2.86334038e-01 -1.88902721e-01 ... -2.42208989e-04 1.17998170e-02 -4.99952026e-02] ... [-1.10874191e-01 -1.17246211e-01 -8.11496601e-02 ... 1.85269251e-01 2.70119488e-01 2.94967860e-01] [-1.75683439e-01 -2.15022013e-01 -2.00622335e-01 ... -3.11538912e-02 1.07548349e-01 2.01982543e-01] [-2.18113959e-01 -2.00410232e-01 -6.31911680e-02 ... -2.49763384e-01 -1.46482587e-01 -1.09323658e-01]] ... [[ 3.66666943e-01 2.19755292e-01 -6.94667324e-02 ... 1.18719779e-01 3.15070033e-01 4.16471094e-01] [ 3.35291326e-01 1.14740640e-01 -3.49189378e-02 ... -7.46762380e-02 1.09130114e-01 4.34345342e-02] [ 1.81371391e-01 1.11045465e-02 -1.81634352e-01 ... -1.69827774e-01 -6.54000789e-02 -6.16651177e-02] ... [-7.18127862e-02 1.08460814e-01 3.24322939e-01 ... -6.17808513e-02 1.21538602e-01 8.05202425e-02] [-1.92935780e-01 -2.51751486e-03 1.84014007e-01 ... 1.27582565e-01 1.58259496e-01 1.99776486e-01] [-4.92946416e-01 -3.84356171e-01 -1.86385721e-01 ... 3.61728549e-01 5.96894026e-02 4.81151640e-02]] [[ 2.50674099e-01 3.05418104e-01 2.89347507e-02 ... 1.24019809e-01 1.69126466e-01 4.11279589e-01] [ 3.11157972e-01 1.96873605e-01 -8.50770399e-02 ... -7.65860733e-03 4.41076569e-02 1.00858964e-01] [ 3.37165713e-01 1.44310027e-01 -1.53192461e-01 ... -2.06874594e-01 -3.84738110e-02 1.11104906e-01] ... [-2.87316293e-01 -1.24916583e-01 1.31130368e-01 ... -1.74442325e-02 3.84922372e-04 -4.62815017e-02] [-3.37603837e-02 1.18831336e-01 2.90660024e-01 ... 1.49555907e-01 4.61133197e-02 1.25080526e-01] [-2.32024834e-01 -1.21580392e-01 9.67537314e-02 ... 4.01495725e-01 2.44916752e-02 -4.72989865e-02]] [[ 1.39828309e-01 1.64551541e-01 2.76679378e-02 ... 1.82454765e-01 6.55919015e-02 2.33787969e-01] [ 3.39714199e-01 2.26615131e-01 -3.14744306e-03 ... 7.75018558e-02 -1.46471001e-02 6.46345839e-02] [ 3.22060496e-01 1.22426189e-01 -1.70341462e-01 ... -1.61090344e-01 -1.20158963e-01 -2.01865528e-02] ... [-1.66012913e-01 -1.90367267e-01 1.45124123e-01 ... 8.29255953e-02 6.40016943e-02 -7.26956204e-02] [ 8.11503530e-02 9.40137804e-02 3.02295685e-01 ... 1.57539546e-01 2.61674915e-02 -5.53593636e-02] [-1.23330876e-01 -1.15060605e-01 9.11675692e-02 ... 3.54061931e-01 -2.75401473e-02 -2.43100926e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5782.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::avg_pool3d(%x, %5, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%6) fw_re: [[[[[ 0.10299119 0.21704884 -0.1316016 0.04037039 -0.22863194] [ 0.01237402 0.30824298 -0.11578488 -0.08399738 -0.3418756 ] [ 0.30764863 -0.12840894 -0.09907385 -0.09452504 -0.3243479 ] [-0.12011138 -0.19515592 0.09584289 -0.33789513 -0.05682506] [-0.02880882 0.25187257 0.28394577 -0.35237515 -0.10836238]] [[-0.14628662 -0.02928841 0.00882092 0.3631658 0.20236692] [ 0.0291779 -0.22712345 0.09224898 -0.17307691 0.14384349] [ 0.16132389 -0.14418341 -0.03507618 -0.02984391 0.06674372] [-0.09652846 0.15445054 -0.12342373 0.3742495 -0.0487727 ] [-0.11122021 -0.08821293 0.08068877 -0.23762234 0.24991931]] [[-0.2756582 -0.21593918 -0.10588779 0.09092392 -0.012709 ] [-0.11452206 0.15515849 0.17484774 -0.13980784 -0.25180238] [ 0.25884315 -0.09291211 -0.14957897 0.2753921 -0.05372471] [ 0.04295181 -0.16517538 -0.11211702 0.39055082 -0.17727292] [ 0.255527 0.07699848 -0.2468427 -0.03117832 0.21021587]] [[ 0.35048082 0.3905724 -0.4382842 -0.20182589 -0.19884737] [ 0.07693371 -0.01932199 0.01754111 -0.12616934 0.03129368] [-0.06005247 -0.33034325 0.23783118 0.28734738 0.26468608] [-0.27705818 -0.20859411 -0.24508402 -0.06769859 0.37298757] [-0.3388714 0.08934169 -0.5439556 0.07323229 -0.02940512]] [[-0.20903708 -0.09217955 0.0433166 0.07080055 -0.14062542] [ 0.05811571 0.00881997 -0.17055407 0.5588936 0.22580443] [ 0.26092812 -0.1304321 0.19601645 0.14772306 -0.01957551] [ 0.07150494 -0.15237837 -0.02650144 0.04099733 -0.16460931] [-0.30424464 -0.24768782 0.02629541 -0.12077927 0.26117635]]] [[[-0.16015959 -0.12775233 -0.23372279 0.11048165 0.07094143] [ 0.15686987 -0.32415062 0.12139088 -0.06381942 -0.10483823] [-0.07939634 -0.06629565 -0.25350404 -0.28403246 0.13273092] [ 0.14774986 -0.00561185 -0.03750496 0.02207744 0.47979432] [ 0.04115722 -0.10896861 0.0986928 -0.21583728 0.21532291]] [[ 0.11077361 0.04595499 -0.30621186 -0.03176329 -0.01480337] [ 0.11817931 -0.10522576 -0.21256068 0.05742748 -0.30496895] [-0.22825764 -0.08757974 -0.0599862 -0.23002158 0.18090422] [ 0.3536546 0.14924489 -0.00709536 -0.02582475 -0.06080467] [-0.11935593 0.15022735 0.25992796 0.16376318 0.18765962]] [[ 0.01172467 -0.42157942 0.3634171 0.429936 -0.29081616] [ 0.00294585 -0.1813957 0.19558293 0.2463376 0.09660048] [-0.35399964 -0.143106 0.40083313 -0.01469957 0.24872255] [-0.08640841 0.08442902 0.39775375 -0.00545431 -0.17097312] [ 0.16061836 -0.16479976 -0.10989324 -0.12537053 0.23071688]] [[ 0.12188884 0.01809095 -0.02200419 0.27472043 -0.00901792] [ 0.13276051 -0.22742845 -0.29128936 0.11546324 -0.28876832] [-0.2356104 -0.03377553 0.09912787 0.2986355 -0.03618668] [ 0.10640135 -0.21313703 0.03702538 0.14180036 -0.07438039] [-0.1770414 0.21048298 -0.2871637 0.00224089 0.15206985]] [[ 0.06103951 -0.1724455 0.04901315 0.09573922 0.27220473] [ 0.25481936 -0.45192292 -0.11666075 -0.13478176 -0.072984 ] [ 0.04969839 0.19116676 0.01472883 -0.34820703 0.01629271] [ 0.29675576 -0.24864626 -0.15082282 0.3972113 -0.34512016] [ 0.2465574 0.0780229 -0.2964832 0.17052008 0.02095507]]] [[[-0.36245954 0.48104954 -0.076326 0.04279403 -0.16210999] [ 0.30534932 0.2693152 -0.19056085 -0.17599155 -0.15051693] [ 0.15678634 0.14449048 0.0758452 -0.11534459 0.12453752] [ 0.02980384 -0.19918947 0.03325496 0.3960793 -0.08858819] [ 0.00486077 -0.1023603 -0.033282 0.04120717 -0.04557606]] [[-0.04025279 0.15475033 -0.09239679 0.08150935 -0.00942294] [-0.31898323 0.24841249 -0.4380975 -0.11574689 -0.06551766] [ 0.17736407 -0.34264523 0.15340261 0.1745963 -0.3059549 ] [ 0.190163 -0.10008978 -0.037489 0.27953374 0.24039418] [ 0.16356707 -0.2568142 -0.04716897 -0.15440772 0.10754053]] [[-0.16350418 -0.07039408 0.39374834 0.09988369 -0.02339723] [-0.23849992 -0.14030425 0.34626442 -0.01743313 0.26180273] [ 0.369321 -0.154554 0.0279871 0.12506042 -0.01058218] [ 0.0224723 -0.03426266 0.09086384 0.03382374 -0.13673101] [ 0.3021335 -0.14253698 -0.03171226 0.1841777 -0.15433194]] [[-0.13202123 0.12779754 0.10930818 -0.09603789 -0.13478585] [ 0.19785884 -0.18556997 0.02132753 0.05685867 0.07336278] [ 0.11988619 0.26821455 0.06130219 0.11002797 -0.11097039] [-0.07538982 -0.0081004 -0.06349654 -0.00942639 0.14956567] [-0.17058831 0.0902299 -0.19358133 0.03247217 -0.12645644]] [[-0.09894122 -0.19556992 0.23267452 -0.19331342 0.0865239 ] [ 0.28561002 -0.01554502 -0.08592037 0.06308006 -0.09419701] [-0.15706812 -0.00611387 0.22199309 0.09761781 0.05821047] [-0.16743205 -0.13565703 -0.00586198 0.2677334 -0.3138043 ] [ 0.02080437 -0.18966883 0.49254715 0.11252168 -0.4595195 ]]]]]; ov_res: [[[[[ 0.10299119 0.21704884 -0.1316016 0.04037039 -0.22863194] [ 0.01237402 0.30824298 -0.11578488 -0.08399738 -0.3418756 ] [ 0.30764863 -0.12840894 -0.09907385 -0.09452504 -0.3243479 ] [-0.12011138 -0.19515592 0.09584289 -0.33789513 -0.05682506] [-0.02880882 0.25187257 0.28394577 -0.35237515 -0.10836238]] [[-0.14628662 -0.02928841 0.00882092 0.3631658 0.20236692] [ 0.0291779 -0.22712345 0.09224898 -0.17307691 0.14384349] [ 0.16132389 -0.14418341 -0.03507618 -0.02984391 0.06674372] [-0.09652846 0.15445054 -0.12342373 0.3742495 -0.0487727 ] [-0.11122021 -0.08821293 0.08068877 -0.23762234 0.24991931]] [[-0.2756582 -0.21593918 -0.10588779 0.09092392 -0.012709 ] [-0.11452206 0.15515849 0.17484774 -0.13980784 -0.25180238] [ 0.25884315 -0.09291211 -0.14957897 0.2753921 -0.05372471] [ 0.04295181 -0.16517538 -0.11211702 0.39055082 -0.17727292] [ 0.255527 0.07699848 -0.2468427 -0.03117832 0.21021587]] [[ 0.35048082 0.3905724 -0.4382842 -0.20182589 -0.19884737] [ 0.07693371 -0.01932199 0.01754111 -0.12616934 0.03129368] [-0.06005247 -0.33034325 0.23783118 0.28734738 0.26468608] [-0.27705818 -0.20859411 -0.24508402 -0.06769859 0.37298757] [-0.3388714 0.08934169 -0.5439556 0.07323229 -0.02940512]] [[-0.20903708 -0.09217955 0.0433166 0.07080055 -0.14062542] [ 0.05811571 0.00881997 -0.17055407 0.5588936 0.22580443] [ 0.26092812 -0.1304321 0.19601645 0.14772306 -0.01957551] [ 0.07150494 -0.15237837 -0.02650144 0.04099733 -0.16460931] [-0.30424464 -0.24768782 0.02629541 -0.12077927 0.26117635]]] [[[-0.16015959 -0.12775233 -0.23372279 0.11048165 0.07094143] [ 0.15686987 -0.32415062 0.12139088 -0.06381942 -0.10483823] [-0.07939634 -0.06629565 -0.25350404 -0.28403246 0.13273092] [ 0.14774986 -0.00561185 -0.03750496 0.02207744 0.47979432] [ 0.04115722 -0.10896861 0.0986928 -0.21583728 0.21532291]] [[ 0.11077361 0.04595499 -0.30621186 -0.03176329 -0.01480337] [ 0.11817931 -0.10522576 -0.21256068 0.05742748 -0.30496895] [-0.22825764 -0.08757974 -0.0599862 -0.23002158 0.18090422] [ 0.3536546 0.14924489 -0.00709536 -0.02582475 -0.06080467] [-0.11935593 0.15022735 0.25992796 0.16376318 0.18765962]] [[ 0.01172467 -0.42157942 0.3634171 0.429936 -0.29081616] [ 0.00294585 -0.1813957 0.19558293 0.2463376 0.09660048] [-0.35399964 -0.143106 0.40083313 -0.01469957 0.24872255] [-0.08640841 0.08442902 0.39775375 -0.00545431 -0.17097312] [ 0.16061836 -0.16479976 -0.10989324 -0.12537053 0.23071688]] [[ 0.12188884 0.01809095 -0.02200419 0.27472043 -0.00901792] [ 0.13276051 -0.22742845 -0.29128936 0.11546324 -0.28876832] [-0.2356104 -0.03377553 0.09912787 0.2986355 -0.03618668] [ 0.10640135 -0.21313703 0.03702538 0.14180036 -0.07438039] [-0.1770414 0.21048298 -0.2871637 0.00224089 0.15206985]] [[ 0.06103951 -0.1724455 0.04901315 0.09573922 0.27220473] [ 0.25481936 -0.45192292 -0.11666075 -0.13478176 -0.072984 ] [ 0.04969839 0.19116676 0.01472883 -0.34820703 0.01629271] [ 0.29675576 -0.24864626 -0.15082282 0.3972113 -0.34512016] [ 0.2465574 0.0780229 -0.2964832 0.17052008 0.02095507]]] [[[-0.36245954 0.48104954 -0.076326 0.04279403 -0.16210999] [ 0.30534932 0.2693152 -0.19056085 -0.17599155 -0.15051693] [ 0.15678634 0.14449048 0.0758452 -0.11534459 0.12453752] [ 0.02980384 -0.19918947 0.03325496 0.3960793 -0.08858819] [ 0.00486077 -0.1023603 -0.033282 0.04120717 -0.04557606]] [[-0.04025279 0.15475033 -0.09239679 0.08150935 -0.00942294] [-0.31898323 0.24841249 -0.4380975 -0.11574689 -0.06551766] [ 0.17736407 -0.34264523 0.15340261 0.1745963 -0.3059549 ] [ 0.190163 -0.10008978 -0.037489 0.27953374 0.24039418] [ 0.16356707 -0.2568142 -0.04716897 -0.15440772 0.10754053]] [[-0.16350418 -0.07039408 0.39374834 0.09988369 -0.02339723] [-0.23849992 -0.14030425 0.34626442 -0.01743313 0.26180273] [ 0.369321 -0.154554 0.0279871 0.12506042 -0.01058218] [ 0.0224723 -0.03426266 0.09086384 0.03382374 -0.13673101] [ 0.3021335 -0.14253698 -0.03171226 0.1841777 -0.15433194]] [[-0.13202123 0.12779754 0.10930818 -0.09603789 -0.13478585] [ 0.19785884 -0.18556997 0.02132753 0.05685867 0.07336278] [ 0.11988619 0.26821455 0.06130219 0.11002797 -0.11097039] [-0.07538982 -0.0081004 -0.06349654 -0.00942639 0.14956567] [-0.17058831 0.0902299 -0.19358133 0.03247217 -0.12645644]] [[-0.09894122 -0.19556992 0.23267452 -0.19331342 0.0865239 ] [ 0.28561002 -0.01554502 -0.08592037 0.06308006 -0.09419701] [-0.15706812 -0.00611387 0.22199309 0.09761781 0.05821047] [-0.16743205 -0.13565703 -0.00586198 0.2677334 -0.3138043 ] [ 0.02080437 -0.18966883 0.49254715 0.11252168 -0.4595195 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_avg_pool3d[ ie_device:CPU - precision:FP32 - count_include_pad:False - ceil_mode:False - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5785.aten_avg_pool3d, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 %4 : int[] = prim::Constant[value=[0, 0, 0]]() %5 : int[] = prim::Constant[value=[3, 1, 1]]() %6 : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::avg_pool3d(%x, %6, %5, %4, %3, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pooling.py:68:0 return (%7) fw_re: [[[[[ 7.63997436e-03 1.03234470e-01 -7.85520375e-01 ... -4.04202305e-02 8.31521034e-01 5.50850444e-02] [ 3.94742750e-02 3.93033028e-04 -5.48381269e-01 ... -1.67218864e-01 6.58124506e-01 1.84086606e-01] [ 1.42995402e-01 -1.12128519e-01 -4.60249968e-02 ... 1.56990543e-01 1.25547424e-01 3.59918833e-01] ... [ 1.38032213e-01 -6.09776080e-01 5.10874808e-01 ... 9.68351141e-02 7.58461580e-02 -5.42867720e-01] [-5.83216131e-01 1.32910207e-01 -2.20716640e-01 ... 5.67049205e-01 3.57564211e-01 -4.04719621e-01] [-2.58193344e-01 4.21877176e-01 -6.18602276e-01 ... 6.59937143e-01 -9.33450684e-02 6.65262640e-01]] [[ 2.30260286e-02 2.67367482e-01 -5.10594435e-02 ... -1.86365828e-01 -2.86187679e-01 9.59236622e-02] [ 6.64354980e-01 5.81176877e-01 -1.13487147e-01 ... -2.32998028e-01 5.99206090e-02 -2.91989237e-01] [ 9.67549741e-01 1.67222381e-01 -2.17106774e-01 ... -9.52652097e-02 1.74183264e-01 -3.06680292e-01] ... [ 3.51637125e-01 1.96960330e-01 5.72051346e-01 ... 2.27769408e-02 3.84035744e-02 6.11347437e-01] [-2.53625125e-01 6.09196126e-02 4.59578842e-01 ... 3.10878068e-01 -1.39506280e-01 1.14561129e+00] [ 5.29509373e-02 4.95244741e-01 2.73782462e-01 ... -4.06587012e-02 -2.90905356e-01 2.45798621e-02]] [[ 4.90916729e-01 1.24273145e+00 -1.98637590e-01 ... -6.01162612e-01 -1.03096269e-01 -1.96022674e-01] [ 2.57966429e-01 7.80469179e-02 -1.14876807e+00 ... -7.04920709e-01 -1.22866958e-01 3.03310305e-01] [-1.85983583e-01 -3.99062008e-01 -1.15866339e+00 ... -3.18736583e-01 -3.25195551e-01 2.00991090e-02] ... [ 3.19082707e-01 -3.48193169e-01 5.97115941e-02 ... -1.44143790e-01 -6.08480990e-01 4.15442251e-02] [ 4.64313626e-01 -3.27459246e-01 -3.38563114e-01 ... -2.06329152e-01 1.04960382e-01 3.02218109e-01] [ 7.55180493e-02 1.99704361e-03 -8.34731162e-02 ... -8.51960015e-03 6.65811300e-01 -2.56944150e-01]] [[-1.25423089e-01 -1.19980074e-01 -1.03913255e-01 ... -4.30320472e-01 2.59843260e-01 3.36099625e-01] [-3.73127460e-01 6.10090196e-02 1.21473968e-01 ... -2.83825010e-01 6.02823384e-02 2.60774910e-01] [-2.16808602e-01 2.48564780e-01 5.72233140e-01 ... -3.22442949e-01 -3.32809120e-01 -2.08630919e-01] ... [ 2.15696320e-01 1.94567040e-01 -4.77281898e-01 ... -1.38264969e-02 -3.62301022e-01 -9.23503265e-02] [ 3.60717684e-01 -3.40162128e-01 -1.42629400e-01 ... -3.65233541e-01 -3.29162538e-01 2.03771114e-01] [ 5.27132571e-01 7.11074099e-02 2.63921320e-01 ... 5.84010303e-01 2.02293888e-01 7.02305213e-02]] [[-6.20825708e-01 -3.79567854e-02 -1.52868286e-01 ... 2.65966803e-01 7.66131401e-01 6.53081536e-02] [-2.54841328e-01 1.62218496e-01 -9.76516306e-02 ... 3.78072053e-01 6.77058458e-01 9.04926434e-02] [ 2.69433260e-01 1.44413605e-01 -4.64827895e-01 ... 4.20234680e-01 6.29772320e-02 3.41760308e-01] ... [ 1.94764331e-01 1.19285375e-01 -5.37766874e-01 ... -2.73270518e-01 4.44625467e-01 6.58890232e-02] [-3.20109241e-02 2.71109748e-03 1.28646284e-01 ... -5.01655281e-01 7.66313493e-01 -2.53612548e-01] [-5.52001715e-01 -3.34578753e-01 -1.41189307e-01 ... 9.99519601e-02 5.22837102e-01 3.40940833e-01]]] [[[ 4.19865578e-01 4.21443224e-01 1.24767356e-01 ... -2.64704764e-01 3.53619099e-01 3.44368607e-01] [ 2.57359058e-01 1.93554163e-02 -2.08305314e-01 ... -4.97242540e-01 3.39262933e-02 2.36509740e-02] [ 3.96621674e-01 1.54705226e-01 -3.13331127e-01 ... 5.38577549e-02 2.29179263e-02 -2.75208294e-01] ... [-8.18352476e-02 1.01001024e-01 -5.22562802e-01 ... 2.43755981e-01 2.06737909e-02 1.29955932e-01] [-4.05842781e-01 4.25036997e-01 -6.62518680e-01 ... -2.64272928e-01 -3.51884365e-02 4.87196535e-01] [-6.57516181e-01 8.06739926e-02 -1.37129307e-01 ... 4.97581773e-02 3.25365096e-01 4.10913736e-01]] [[ 3.73316765e-01 -8.79246473e-01 -5.45145929e-01 ... -1.26145280e-03 -4.76094127e-01 -6.96142197e-01] [ 5.06496467e-02 -3.37061882e-01 -3.05011481e-01 ... -5.94278634e-01 -4.48491007e-01 -1.21293001e-01] [ 4.76835901e-03 -1.75295278e-01 -9.67388153e-02 ... -1.02819204e-01 -5.09988330e-02 -2.65429765e-01] ... [ 1.37046024e-01 -6.25557542e-01 -5.14295042e-01 ... -8.06730449e-01 9.20193419e-02 1.24322735e-01] [ 2.49269426e-01 3.46426181e-02 -5.23340344e-01 ... -5.68633199e-01 -6.50535896e-02 -5.37077248e-01] [ 3.66843008e-02 2.09672526e-01 2.91127544e-02 ... 3.52481395e-01 2.80333906e-01 -4.42007333e-01]] [[ 2.70007253e-01 8.59781578e-02 -6.59323335e-02 ... 7.96827152e-02 4.42384072e-02 -8.12113658e-02] [ 1.57962903e-01 1.77517235e-01 -3.00276339e-01 ... -5.70030391e-01 1.58979282e-01 -1.23127095e-01] [-6.99910149e-02 -2.15352699e-02 -5.05467236e-01 ... 6.77426234e-02 9.54380274e-01 -2.80509889e-02] ... [-4.80033755e-01 6.17228687e-01 8.14897344e-02 ... 7.84497559e-01 4.19115812e-01 2.28356719e-01] [ 6.02848113e-01 -1.51342511e-01 6.23706520e-01 ... 6.74289525e-01 7.67518044e-01 2.08928064e-01] [ 3.59890938e-01 -5.94947398e-01 4.71944422e-01 ... 3.57436061e-01 4.75614160e-01 2.32667383e-02]] [[-1.57225773e-01 6.97934926e-02 -3.95799547e-01 ... -2.70744175e-01 1.91984490e-01 2.39403471e-01] [-3.45615357e-01 -7.36832678e-01 -5.24753667e-02 ... -2.58368015e-01 -1.78381965e-01 2.17653498e-01] [ 4.28298384e-01 -6.92509472e-01 1.41121134e-01 ... -5.44411123e-01 -1.85666885e-02 1.38761654e-01] ... [-3.15529734e-01 2.84200069e-02 -5.23036420e-01 ... -7.73914039e-01 6.66519225e-01 -6.13375425e-01] [-1.80924416e-01 -3.31364632e-01 -6.13791168e-01 ... -4.71580267e-01 -5.38449228e-01 -1.44217685e-01] [-6.80396318e-01 -3.39875929e-02 -6.87985420e-01 ... -1.94951996e-01 -7.65648365e-01 1.04328655e-01]] [[ 2.02942148e-01 -6.50804102e-01 -5.90459466e-01 ... -2.03020975e-01 -5.24904311e-01 -3.22196871e-01] [ 3.12446594e-01 9.81856659e-02 -4.67740893e-01 ... 1.28841117e-01 -3.86167139e-01 1.14591919e-01] [ 7.56898880e-01 4.78778988e-01 1.52152345e-01 ... 1.92031786e-01 -1.72810718e-01 8.42346177e-02] ... [-2.32834816e-01 2.72360742e-01 7.83258379e-01 ... -1.51131496e-01 4.76935834e-01 3.35027844e-01] [-2.18503132e-01 -1.20934598e-01 9.45152819e-01 ... 2.11798742e-01 7.72024170e-02 -4.44270730e-01] [ 2.64236331e-01 -2.60573268e-01 -7.94136152e-02 ... -3.51130366e-02 -2.96703845e-01 7.45238140e-02]]] [[[-3.55789751e-01 -3.39167327e-01 4.38729167e-01 ... -3.02742273e-01 -3.89924735e-01 3.35190445e-01] [ 4.60847378e-01 -4.86469418e-01 -4.47545350e-02 ... 2.96194162e-02 -1.09209716e-01 3.39478403e-01] [ 4.20114666e-01 -2.95763999e-01 -4.44303393e-01 ... -7.96084628e-02 -1.97310328e-01 4.73914385e-01] ... [ 4.14336435e-02 -4.67655033e-01 -4.72920835e-02 ... -3.41304779e-01 4.01742667e-01 6.39004230e-01] [ 1.12867802e-01 1.43098636e-02 -2.41479635e-01 ... -6.68125153e-01 3.95762086e-01 5.19419849e-01] [-4.83162969e-01 1.13476820e-01 1.24968626e-01 ... -6.91435575e-01 2.02651560e-01 -6.10323763e-03]] [[-3.59688759e-01 7.71841109e-01 2.45648086e-01 ... -5.05521357e-01 -1.01665474e-01 7.79798925e-01] [-1.20228231e-02 3.59575629e-01 -2.41731703e-01 ... -7.31303394e-01 4.64158922e-01 1.29413947e-01] [ 2.12748662e-01 -4.87075239e-01 -1.87622666e-01 ... -5.91857731e-01 2.84164655e-03 3.98218632e-01] ... [-3.38637978e-02 6.91495955e-01 -5.58270991e-01 ... -7.07433283e-01 5.38609177e-02 -5.60921133e-01] [ 4.53348398e-01 -1.81716800e-01 2.32130289e-01 ... -6.47447944e-01 -2.53690314e-02 1.01102225e-01] [ 7.48861074e-01 -2.54008383e-01 -2.75563270e-01 ... -4.98363107e-01 -1.48330415e-02 3.94305997e-02]] [[-2.83592731e-01 -6.65161610e-01 -4.25719053e-01 ... 1.71247527e-01 5.84654808e-01 -3.33485842e-01] [-3.59392732e-01 -3.66290718e-01 -7.48164356e-02 ... -2.22093180e-01 7.13205516e-01 -8.31246138e-01] [-7.85947561e-01 -9.06717256e-02 2.13954866e-01 ... -4.10362296e-02 8.70624840e-01 -3.27661932e-01] ... [-4.60416555e-01 1.08180501e-01 2.78216094e-01 ... -3.44256729e-01 -3.78298350e-02 -1.49869576e-01] [-5.41989617e-02 -4.45630169e-03 -3.75085205e-01 ... -4.57170635e-01 -5.31595461e-02 1.42192975e-01] [ 7.85546079e-02 6.21847101e-02 -1.35933906e-01 ... -2.29295969e-01 6.61623850e-02 1.27247376e-02]] [[ 4.45573896e-01 -3.72872740e-01 2.56424040e-01 ... -1.16926469e-01 -9.45734501e-01 5.84402978e-01] [-2.84705967e-01 -4.70712811e-01 2.52015918e-01 ... 1.46747336e-01 -4.09256697e-01 7.48614371e-01] [-7.96911061e-01 4.46596056e-01 6.47720873e-01 ... -5.60011975e-02 -5.47882140e-01 8.67359877e-01] ... [-2.69131899e-01 -1.42840862e-01 1.12514913e-01 ... 6.59667030e-02 -1.08444713e-01 -1.00349498e+00] [-3.09632510e-01 5.07949889e-01 7.27398545e-02 ... 1.57333221e-02 1.68800071e-01 -3.93637806e-01] [-4.36494499e-01 4.67439204e-01 -7.46094048e-01 ... -1.09833121e-01 -6.79565454e-03 1.21812783e-01]] [[-1.32037714e-01 -9.27010253e-02 -1.95582509e-01 ... 1.03201199e+00 3.55122894e-01 8.34453166e-01] [-6.40512645e-01 -1.49779245e-01 -1.89243600e-01 ... 8.21847260e-01 3.43300015e-01 2.50494510e-01] [-2.36087129e-01 -4.58926529e-01 1.10660069e-01 ... -3.54628593e-01 -3.51159811e-01 -6.58996165e-01] ... [-1.47596955e-01 -2.00263057e-02 -6.72609150e-01 ... -1.19094312e-01 4.57042605e-01 2.87416428e-02] [-2.23131254e-01 -7.15803728e-02 -6.61782026e-02 ... -1.74939752e-01 -7.08735362e-02 -4.47007418e-01] [-6.29044950e-01 2.26981435e-02 -7.20164776e-01 ... -5.28131664e-01 -1.88051656e-01 2.56473988e-01]]]]]; ov_res: [[[[[ 7.63997436e-03 1.03234470e-01 -7.85520375e-01 ... -4.04202305e-02 8.31521034e-01 5.50850444e-02] [ 3.94742750e-02 3.93033028e-04 -5.48381269e-01 ... -1.67218864e-01 6.58124506e-01 1.84086606e-01] [ 1.42995402e-01 -1.12128519e-01 -4.60249968e-02 ... 1.56990543e-01 1.25547424e-01 3.59918833e-01] ... [ 1.38032213e-01 -6.09776080e-01 5.10874808e-01 ... 9.68351141e-02 7.58461580e-02 -5.42867720e-01] [-5.83216131e-01 1.32910207e-01 -2.20716640e-01 ... 5.67049205e-01 3.57564211e-01 -4.04719621e-01] [-2.58193344e-01 4.21877176e-01 -6.18602276e-01 ... 6.59937143e-01 -9.33450684e-02 6.65262640e-01]] [[ 2.30260286e-02 2.67367482e-01 -5.10594435e-02 ... -1.86365828e-01 -2.86187679e-01 9.59236622e-02] [ 6.64354980e-01 5.81176877e-01 -1.13487147e-01 ... -2.32998028e-01 5.99206090e-02 -2.91989237e-01] [ 9.67549741e-01 1.67222381e-01 -2.17106774e-01 ... -9.52652097e-02 1.74183264e-01 -3.06680292e-01] ... [ 3.51637125e-01 1.96960330e-01 5.72051346e-01 ... 2.27769408e-02 3.84035744e-02 6.11347437e-01] [-2.53625125e-01 6.09196126e-02 4.59578842e-01 ... 3.10878068e-01 -1.39506280e-01 1.14561129e+00] [ 5.29509373e-02 4.95244741e-01 2.73782462e-01 ... -4.06587012e-02 -2.90905356e-01 2.45798621e-02]] [[ 4.90916729e-01 1.24273145e+00 -1.98637590e-01 ... -6.01162612e-01 -1.03096269e-01 -1.96022674e-01] [ 2.57966429e-01 7.80469179e-02 -1.14876807e+00 ... -7.04920709e-01 -1.22866958e-01 3.03310305e-01] [-1.85983583e-01 -3.99062008e-01 -1.15866339e+00 ... -3.18736583e-01 -3.25195551e-01 2.00991090e-02] ... [ 3.19082707e-01 -3.48193169e-01 5.97115941e-02 ... -1.44143790e-01 -6.08480990e-01 4.15442251e-02] [ 4.64313626e-01 -3.27459246e-01 -3.38563114e-01 ... -2.06329152e-01 1.04960382e-01 3.02218109e-01] [ 7.55180493e-02 1.99704361e-03 -8.34731162e-02 ... -8.51960015e-03 6.65811300e-01 -2.56944150e-01]] [[-1.25423089e-01 -1.19980074e-01 -1.03913255e-01 ... -4.30320472e-01 2.59843260e-01 3.36099625e-01] [-3.73127460e-01 6.10090196e-02 1.21473968e-01 ... -2.83825010e-01 6.02823384e-02 2.60774910e-01] [-2.16808602e-01 2.48564780e-01 5.72233140e-01 ... -3.22442949e-01 -3.32809120e-01 -2.08630919e-01] ... [ 2.15696320e-01 1.94567040e-01 -4.77281898e-01 ... -1.38264969e-02 -3.62301022e-01 -9.23503265e-02] [ 3.60717684e-01 -3.40162128e-01 -1.42629400e-01 ... -3.65233541e-01 -3.29162538e-01 2.03771114e-01] [ 5.27132571e-01 7.11074099e-02 2.63921320e-01 ... 5.84010303e-01 2.02293888e-01 7.02305213e-02]] [[-6.20825708e-01 -3.79567854e-02 -1.52868286e-01 ... 2.65966803e-01 7.66131401e-01 6.53081536e-02] [-2.54841328e-01 1.62218496e-01 -9.76516306e-02 ... 3.78072053e-01 6.77058458e-01 9.04926434e-02] [ 2.69433260e-01 1.44413605e-01 -4.64827895e-01 ... 4.20234680e-01 6.29772320e-02 3.41760308e-01] ... [ 1.94764331e-01 1.19285375e-01 -5.37766874e-01 ... -2.73270518e-01 4.44625467e-01 6.58890232e-02] [-3.20109241e-02 2.71109748e-03 1.28646284e-01 ... -5.01655281e-01 7.66313493e-01 -2.53612548e-01] [-5.52001715e-01 -3.34578753e-01 -1.41189307e-01 ... 9.99519601e-02 5.22837102e-01 3.40940833e-01]]] [[[ 4.19865578e-01 4.21443224e-01 1.24767356e-01 ... -2.64704764e-01 3.53619099e-01 3.44368607e-01] [ 2.57359058e-01 1.93554163e-02 -2.08305314e-01 ... -4.97242540e-01 3.39262933e-02 2.36509740e-02] [ 3.96621674e-01 1.54705226e-01 -3.13331127e-01 ... 5.38577549e-02 2.29179263e-02 -2.75208294e-01] ... [-8.18352476e-02 1.01001024e-01 -5.22562802e-01 ... 2.43755981e-01 2.06737909e-02 1.29955932e-01] [-4.05842781e-01 4.25036997e-01 -6.62518680e-01 ... -2.64272928e-01 -3.51884365e-02 4.87196535e-01] [-6.57516181e-01 8.06739926e-02 -1.37129307e-01 ... 4.97581773e-02 3.25365096e-01 4.10913736e-01]] [[ 3.73316765e-01 -8.79246473e-01 -5.45145929e-01 ... -1.26145280e-03 -4.76094127e-01 -6.96142197e-01] [ 5.06496467e-02 -3.37061882e-01 -3.05011481e-01 ... -5.94278634e-01 -4.48491007e-01 -1.21293001e-01] [ 4.76835901e-03 -1.75295278e-01 -9.67388153e-02 ... -1.02819204e-01 -5.09988330e-02 -2.65429765e-01] ... [ 1.37046024e-01 -6.25557542e-01 -5.14295042e-01 ... -8.06730449e-01 9.20193419e-02 1.24322735e-01] [ 2.49269426e-01 3.46426181e-02 -5.23340344e-01 ... -5.68633199e-01 -6.50535896e-02 -5.37077248e-01] [ 3.66843008e-02 2.09672526e-01 2.91127544e-02 ... 3.52481395e-01 2.80333906e-01 -4.42007333e-01]] [[ 2.70007253e-01 8.59781578e-02 -6.59323335e-02 ... 7.96827152e-02 4.42384072e-02 -8.12113658e-02] [ 1.57962903e-01 1.77517235e-01 -3.00276339e-01 ... -5.70030391e-01 1.58979282e-01 -1.23127095e-01] [-6.99910149e-02 -2.15352699e-02 -5.05467236e-01 ... 6.77426234e-02 9.54380274e-01 -2.80509889e-02] ... [-4.80033755e-01 6.17228687e-01 8.14897344e-02 ... 7.84497559e-01 4.19115812e-01 2.28356719e-01] [ 6.02848113e-01 -1.51342511e-01 6.23706520e-01 ... 6.74289525e-01 7.67518044e-01 2.08928064e-01] [ 3.59890938e-01 -5.94947398e-01 4.71944422e-01 ... 3.57436061e-01 4.75614160e-01 2.32667383e-02]] [[-1.57225773e-01 6.97934926e-02 -3.95799547e-01 ... -2.70744175e-01 1.91984490e-01 2.39403471e-01] [-3.45615357e-01 -7.36832678e-01 -5.24753667e-02 ... -2.58368015e-01 -1.78381965e-01 2.17653498e-01] [ 4.28298384e-01 -6.92509472e-01 1.41121134e-01 ... -5.44411123e-01 -1.85666885e-02 1.38761654e-01] ... [-3.15529734e-01 2.84200069e-02 -5.23036420e-01 ... -7.73914039e-01 6.66519225e-01 -6.13375425e-01] [-1.80924416e-01 -3.31364632e-01 -6.13791168e-01 ... -4.71580267e-01 -5.38449228e-01 -1.44217685e-01] [-6.80396318e-01 -3.39875929e-02 -6.87985420e-01 ... -1.94951996e-01 -7.65648365e-01 1.04328655e-01]] [[ 2.02942148e-01 -6.50804102e-01 -5.90459466e-01 ... -2.03020975e-01 -5.24904311e-01 -3.22196871e-01] [ 3.12446594e-01 9.81856659e-02 -4.67740893e-01 ... 1.28841117e-01 -3.86167139e-01 1.14591919e-01] [ 7.56898880e-01 4.78778988e-01 1.52152345e-01 ... 1.92031786e-01 -1.72810718e-01 8.42346177e-02] ... [-2.32834816e-01 2.72360742e-01 7.83258379e-01 ... -1.51131496e-01 4.76935834e-01 3.35027844e-01] [-2.18503132e-01 -1.20934598e-01 9.45152819e-01 ... 2.11798742e-01 7.72024170e-02 -4.44270730e-01] [ 2.64236331e-01 -2.60573268e-01 -7.94136152e-02 ... -3.51130366e-02 -2.96703845e-01 7.45238140e-02]]] [[[-3.55789751e-01 -3.39167327e-01 4.38729167e-01 ... -3.02742273e-01 -3.89924735e-01 3.35190445e-01] [ 4.60847378e-01 -4.86469418e-01 -4.47545350e-02 ... 2.96194162e-02 -1.09209716e-01 3.39478403e-01] [ 4.20114666e-01 -2.95763999e-01 -4.44303393e-01 ... -7.96084628e-02 -1.97310328e-01 4.73914385e-01] ... [ 4.14336435e-02 -4.67655033e-01 -4.72920835e-02 ... -3.41304779e-01 4.01742667e-01 6.39004230e-01] [ 1.12867802e-01 1.43098636e-02 -2.41479635e-01 ... -6.68125153e-01 3.95762086e-01 5.19419849e-01] [-4.83162969e-01 1.13476820e-01 1.24968626e-01 ... -6.91435575e-01 2.02651560e-01 -6.10323763e-03]] [[-3.59688759e-01 7.71841109e-01 2.45648086e-01 ... -5.05521357e-01 -1.01665474e-01 7.79798925e-01] [-1.20228231e-02 3.59575629e-01 -2.41731703e-01 ... -7.31303394e-01 4.64158922e-01 1.29413947e-01] [ 2.12748662e-01 -4.87075239e-01 -1.87622666e-01 ... -5.91857731e-01 2.84164655e-03 3.98218632e-01] ... [-3.38637978e-02 6.91495955e-01 -5.58270991e-01 ... -7.07433283e-01 5.38609177e-02 -5.60921133e-01] [ 4.53348398e-01 -1.81716800e-01 2.32130289e-01 ... -6.47447944e-01 -2.53690314e-02 1.01102225e-01] [ 7.48861074e-01 -2.54008383e-01 -2.75563270e-01 ... -4.98363107e-01 -1.48330415e-02 3.94305997e-02]] [[-2.83592731e-01 -6.65161610e-01 -4.25719053e-01 ... 1.71247527e-01 5.84654808e-01 -3.33485842e-01] [-3.59392732e-01 -3.66290718e-01 -7.48164356e-02 ... -2.22093180e-01 7.13205516e-01 -8.31246138e-01] [-7.85947561e-01 -9.06717256e-02 2.13954866e-01 ... -4.10362296e-02 8.70624840e-01 -3.27661932e-01] ... [-4.60416555e-01 1.08180501e-01 2.78216094e-01 ... -3.44256729e-01 -3.78298350e-02 -1.49869576e-01] [-5.41989617e-02 -4.45630169e-03 -3.75085205e-01 ... -4.57170635e-01 -5.31595461e-02 1.42192975e-01] [ 7.85546079e-02 6.21847101e-02 -1.35933906e-01 ... -2.29295969e-01 6.61623850e-02 1.27247376e-02]] [[ 4.45573896e-01 -3.72872740e-01 2.56424040e-01 ... -1.16926469e-01 -9.45734501e-01 5.84402978e-01] [-2.84705967e-01 -4.70712811e-01 2.52015918e-01 ... 1.46747336e-01 -4.09256697e-01 7.48614371e-01] [-7.96911061e-01 4.46596056e-01 6.47720873e-01 ... -5.60011975e-02 -5.47882140e-01 8.67359877e-01] ... [-2.69131899e-01 -1.42840862e-01 1.12514913e-01 ... 6.59667030e-02 -1.08444713e-01 -1.00349498e+00] [-3.09632510e-01 5.07949889e-01 7.27398545e-02 ... 1.57333221e-02 1.68800071e-01 -3.93637806e-01] [-4.36494499e-01 4.67439204e-01 -7.46094048e-01 ... -1.09833121e-01 -6.79565454e-03 1.21812783e-01]] [[-1.32037714e-01 -9.27010253e-02 -1.95582509e-01 ... 1.03201199e+00 3.55122894e-01 8.34453166e-01] [-6.40512645e-01 -1.49779245e-01 -1.89243600e-01 ... 8.21847260e-01 3.43300015e-01 2.50494510e-01] [-2.36087129e-01 -4.58926529e-01 1.10660069e-01 ... -3.54628593e-01 -3.51159811e-01 -6.58996165e-01] ... [-1.47596955e-01 -2.00263057e-02 -6.72609150e-01 ... -1.19094312e-01 4.57042605e-01 2.87416428e-02] [-2.23131254e-01 -7.15803728e-02 -6.61782026e-02 ... -1.74939752e-01 -7.08735362e-02 -4.47007418e-01] [-6.29044950e-01 2.26981435e-02 -7.20164776e-01 ... -5.28131664e-01 -1.88051656e-01 2.56473988e-01]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5786.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.7277042 1.7277042 1.7277042 0.76226676 0.76226676 0.46063837 0.9368522 0.9368522 1.5318007 1.5318007 1.5318007 1.0683932 0.98534065] [ 2.1495245 2.1495245 1.7277042 2.091387 2.091387 2.091387 0.9368522 0.9368522 0.9368522 0.38296813 0.38296813 0.6938102 0.98534065] [ 2.1495245 2.1495245 1.7277042 2.091387 2.091387 2.091387 0.18726023 0.25936577 0.5886071 0.5886071 0.5886071 1.5352888 1.5352888 ] [ 2.1495245 2.1495245 1.6203824 2.091387 2.091387 2.091387 0.9730089 0.9730089 2.8615718 2.8615718 2.8615718 1.5352888 1.5352888 ] [ 1.6203824 1.6203824 1.6203824 1.5019605 1.5019605 0.9730089 0.9730089 0.9730089 2.8615718 2.8615718 2.8615718 1.5352888 1.5352888 ] [ 0.81275874 1.3164244 1.5019605 1.5019605 1.5019605 0.9730089 0.9730089 1.2158364 2.8615718 2.8615718 2.8615718 0.9793573 1.3101848 ] [ 0.81275874 0.81275874 0.8871095 0.8871095 0.8871095 0.5615756 0.5615756 1.2158364 1.2158364 1.2158364 0.9793573 0.9793573 1.3101848 ] [ 0.81275874 0.81275874 0.8871095 0.8871095 0.8871095 0.5615756 0.5615756 1.2158364 1.2158364 1.2158364 0.6445117 0.07936052 1.3101848 ] [ 0.6775806 0.7595717 0.816339 2.1166503 2.1166503 2.1166503 0.9757328 0.9757328 0.9757328 0.75967157 0.6445117 0.07936052 0.34021527] [ 2.1497824 2.1497824 1.7794902 2.1166503 2.1166503 2.1166503 0.9757328 0.9757328 0.9757328 0.6445117 0.6445117 0.62760967 0.62760967] [ 2.1497824 2.1497824 1.7794902 2.1166503 2.1166503 2.1166503 0.9757328 1.5454527 1.5454527 1.5454527 2.3058636 2.3058636 2.3058636 ] [ 2.1497824 2.1497824 1.7794902 1.3810085 1.3810085 1.3810085 0.78263396 1.5454527 1.5454527 1.5882567 2.3058636 2.3058636 2.3058636 ] [ 0.868139 0.868139 1.0286463 1.3810085 1.3810085 1.3810085 1.1342213 1.5454527 1.5454527 1.5882567 2.3058636 2.3058636 2.3058636 ]] [[ 1.5008806 1.5970948 1.5970948 1.5970948 1.0184512 1.0184512 1.3391141 1.3391141 1.3391141 0.41369396 0.36888176 0.92635906 3.3146932 ] [ 1.5008806 1.5008806 0.638038 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.177222 1.177222 0.92635906 2.5390172 ] [ 0.6861817 0.6861817 0.638038 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.3057644 1.177222 1.2422073 1.2422073 ] [ 0.07975096 0.71254927 0.71254927 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.3057644 1.177222 1.2422073 1.2422073 ] [ 1.2411069 1.2411069 0.71254927 0.84566605 1.593614 1.593614 1.593614 1.3057644 1.3057644 1.3057644 1.0666698 1.2422073 1.2422073 ] [ 1.2411069 1.2411069 0.7542639 1.4294102 1.593614 1.593614 1.593614 0.78713113 0.78713113 0.90295225 0.90295225 0.90295225 0.7188133 ] [ 1.9422814 1.3691268 0.7542639 1.4294102 1.4294102 1.4294102 0.92877704 0.78713113 0.78713113 0.90295225 1.3542463 1.3542463 1.4699143 ] [ 1.9422814 1.3691268 1.3231921 1.4294102 1.4294102 1.4294102 0.92877704 0.87018454 0.4772035 0.6901569 1.3542463 1.3542463 1.4699143 ] [ 1.9422814 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.87018454 0.00338066 0.8568835 1.3542463 1.3542463 1.4699143 ] [ 1.4083568 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.87018454 2.1854088 2.1854088 2.1854088 1.0154209 1.0154209 ] [ 1.4083568 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.34695342 2.1854088 2.1854088 2.1854088 0.8568835 0.05811808] [ 1.4083568 1.4083568 0.8995422 1.5765544 1.5765544 1.5765544 0.91170335 0.91170335 2.1854088 2.1854088 2.1854088 0.66874766 0.05811808] [ 0.8995422 0.8995422 0.8995422 1.5765544 1.5765544 2.0229576 2.0229576 2.0229576 1.3629643 1.3629643 0.66874766 0.66874766 1.9293492 ]] [[ 0.63865334 1.2440795 1.2892823 2.6670477 2.6670477 2.6670477 1.7417369 1.7417369 1.905141 1.905141 2.0390396 2.0390396 2.0390396 ] [ 0.5047141 1.2440795 1.2892823 1.2892823 1.2892823 0.5683923 2.26929 2.26929 2.26929 1.905141 2.0390396 2.0390396 2.0390396 ] [ 0.9447243 0.9447243 1.0278484 1.0278484 1.0278484 -0.16325304 2.26929 2.26929 2.26929 1.905141 1.905141 1.0999415 1.4661182 ] [ 0.9447243 0.9447243 1.0278484 1.0278484 1.0278484 0.40454668 2.26929 2.26929 2.26929 1.396619 0.8591272 2.4182966 2.4182966 ] [ 0.9447243 1.0804743 1.1078328 1.1078328 1.1078328 0.45495814 0.47288123 1.396619 1.8311558 1.8311558 1.8311558 2.4182966 2.4182966 ] [ 0.66403604 1.0804743 1.1078328 1.1958442 1.1958442 1.1958442 0.47288123 1.396619 1.8311558 1.8311558 1.8311558 2.4182966 2.4182966 ] [ 0.66403604 1.5860943 1.5860943 1.9279302 1.9279302 1.9279302 0.6186015 0.65457124 1.8311558 1.8311558 1.8311558 1.4936426 1.4936426 ] [ 2.1997774 2.1997774 1.5860943 1.9279302 1.9279302 1.9279302 0.62737054 0.67895925 0.67895925 0.67895925 1.4936426 1.4936426 1.4936426 ] [ 2.1997774 2.1997774 1.5860943 1.9279302 1.9279302 1.9279302 0.6382926 0.67895925 0.9807025 0.9807025 1.4936426 1.664669 1.664669 ] [ 2.1997774 2.1997774 0.9265757 1.4674188 1.4674188 1.4674188 1.4302349 1.4302349 0.9807025 0.9807025 0.9807025 1.664669 1.664669 ] [ 1.0126095 1.1468354 1.1468354 1.324622 1.324622 1.4302349 1.4302349 1.4302349 1.2088468 1.2088468 0.9807025 1.664669 1.664669 ] [ 1.0126095 1.1468354 1.1468354 1.1589177 1.1589177 1.4302349 1.4302349 1.4302349 1.2758073 1.3338416 1.3338416 1.3338416 1.6494751 ] [ 2.0093677 1.4366773 1.4366773 1.4366773 1.1589177 1.1589177 0.9136528 1.2758073 1.2758073 1.3639294 1.859656 1.859656 1.859656 ]]]]; ov_res: [[[[ 1.7277042 1.7277042 1.7277042 0.76226676 0.76226676 0.46063837 0.9368522 0.9368522 1.5318007 1.5318007 1.5318007 1.0683932 0.98534065] [ 2.1495245 2.1495245 1.7277042 2.091387 2.091387 2.091387 0.9368522 0.9368522 0.9368522 0.38296813 0.38296813 0.6938102 0.98534065] [ 2.1495245 2.1495245 1.7277042 2.091387 2.091387 2.091387 0.18726023 0.25936577 0.5886071 0.5886071 0.5886071 1.5352888 1.5352888 ] [ 2.1495245 2.1495245 1.6203824 2.091387 2.091387 2.091387 0.9730089 0.9730089 2.8615718 2.8615718 2.8615718 1.5352888 1.5352888 ] [ 1.6203824 1.6203824 1.6203824 1.5019605 1.5019605 0.9730089 0.9730089 0.9730089 2.8615718 2.8615718 2.8615718 1.5352888 1.5352888 ] [ 0.81275874 1.3164244 1.5019605 1.5019605 1.5019605 0.9730089 0.9730089 1.2158364 2.8615718 2.8615718 2.8615718 0.9793573 1.3101848 ] [ 0.81275874 0.81275874 0.8871095 0.8871095 0.8871095 0.5615756 0.5615756 1.2158364 1.2158364 1.2158364 0.9793573 0.9793573 1.3101848 ] [ 0.81275874 0.81275874 0.8871095 0.8871095 0.8871095 0.5615756 0.5615756 1.2158364 1.2158364 1.2158364 0.6445117 0.07936052 1.3101848 ] [ 0.6775806 0.7595717 0.816339 2.1166503 2.1166503 2.1166503 0.9757328 0.9757328 0.9757328 0.75967157 0.6445117 0.07936052 0.34021527] [ 2.1497824 2.1497824 1.7794902 2.1166503 2.1166503 2.1166503 0.9757328 0.9757328 0.9757328 0.6445117 0.6445117 0.62760967 0.62760967] [ 2.1497824 2.1497824 1.7794902 2.1166503 2.1166503 2.1166503 0.9757328 1.5454527 1.5454527 1.5454527 2.3058636 2.3058636 2.3058636 ] [ 2.1497824 2.1497824 1.7794902 1.3810085 1.3810085 1.3810085 0.78263396 1.5454527 1.5454527 1.5882567 2.3058636 2.3058636 2.3058636 ] [ 0.868139 0.868139 1.0286463 1.3810085 1.3810085 1.3810085 1.1342213 1.5454527 1.5454527 1.5882567 2.3058636 2.3058636 2.3058636 ]] [[ 1.5008806 1.5970948 1.5970948 1.5970948 1.0184512 1.0184512 1.3391141 1.3391141 1.3391141 0.41369396 0.36888176 0.92635906 3.3146932 ] [ 1.5008806 1.5008806 0.638038 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.177222 1.177222 0.92635906 2.5390172 ] [ 0.6861817 0.6861817 0.638038 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.3057644 1.177222 1.2422073 1.2422073 ] [ 0.07975096 0.71254927 0.71254927 2.7974393 2.7974393 2.7974393 2.2270243 2.2270243 2.2270243 1.3057644 1.177222 1.2422073 1.2422073 ] [ 1.2411069 1.2411069 0.71254927 0.84566605 1.593614 1.593614 1.593614 1.3057644 1.3057644 1.3057644 1.0666698 1.2422073 1.2422073 ] [ 1.2411069 1.2411069 0.7542639 1.4294102 1.593614 1.593614 1.593614 0.78713113 0.78713113 0.90295225 0.90295225 0.90295225 0.7188133 ] [ 1.9422814 1.3691268 0.7542639 1.4294102 1.4294102 1.4294102 0.92877704 0.78713113 0.78713113 0.90295225 1.3542463 1.3542463 1.4699143 ] [ 1.9422814 1.3691268 1.3231921 1.4294102 1.4294102 1.4294102 0.92877704 0.87018454 0.4772035 0.6901569 1.3542463 1.3542463 1.4699143 ] [ 1.9422814 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.87018454 0.00338066 0.8568835 1.3542463 1.3542463 1.4699143 ] [ 1.4083568 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.87018454 2.1854088 2.1854088 2.1854088 1.0154209 1.0154209 ] [ 1.4083568 1.7833816 1.7833816 2.359878 2.359878 2.359878 1.9344602 0.34695342 2.1854088 2.1854088 2.1854088 0.8568835 0.05811808] [ 1.4083568 1.4083568 0.8995422 1.5765544 1.5765544 1.5765544 0.91170335 0.91170335 2.1854088 2.1854088 2.1854088 0.66874766 0.05811808] [ 0.8995422 0.8995422 0.8995422 1.5765544 1.5765544 2.0229576 2.0229576 2.0229576 1.3629643 1.3629643 0.66874766 0.66874766 1.9293492 ]] [[ 0.63865334 1.2440795 1.2892823 2.6670477 2.6670477 2.6670477 1.7417369 1.7417369 1.905141 1.905141 2.0390396 2.0390396 2.0390396 ] [ 0.5047141 1.2440795 1.2892823 1.2892823 1.2892823 0.5683923 2.26929 2.26929 2.26929 1.905141 2.0390396 2.0390396 2.0390396 ] [ 0.9447243 0.9447243 1.0278484 1.0278484 1.0278484 -0.16325304 2.26929 2.26929 2.26929 1.905141 1.905141 1.0999415 1.4661182 ] [ 0.9447243 0.9447243 1.0278484 1.0278484 1.0278484 0.40454668 2.26929 2.26929 2.26929 1.396619 0.8591272 2.4182966 2.4182966 ] [ 0.9447243 1.0804743 1.1078328 1.1078328 1.1078328 0.45495814 0.47288123 1.396619 1.8311558 1.8311558 1.8311558 2.4182966 2.4182966 ] [ 0.66403604 1.0804743 1.1078328 1.1958442 1.1958442 1.1958442 0.47288123 1.396619 1.8311558 1.8311558 1.8311558 2.4182966 2.4182966 ] [ 0.66403604 1.5860943 1.5860943 1.9279302 1.9279302 1.9279302 0.6186015 0.65457124 1.8311558 1.8311558 1.8311558 1.4936426 1.4936426 ] [ 2.1997774 2.1997774 1.5860943 1.9279302 1.9279302 1.9279302 0.62737054 0.67895925 0.67895925 0.67895925 1.4936426 1.4936426 1.4936426 ] [ 2.1997774 2.1997774 1.5860943 1.9279302 1.9279302 1.9279302 0.6382926 0.67895925 0.9807025 0.9807025 1.4936426 1.664669 1.664669 ] [ 2.1997774 2.1997774 0.9265757 1.4674188 1.4674188 1.4674188 1.4302349 1.4302349 0.9807025 0.9807025 0.9807025 1.664669 1.664669 ] [ 1.0126095 1.1468354 1.1468354 1.324622 1.324622 1.4302349 1.4302349 1.4302349 1.2088468 1.2088468 0.9807025 1.664669 1.664669 ] [ 1.0126095 1.1468354 1.1468354 1.1589177 1.1589177 1.4302349 1.4302349 1.4302349 1.2758073 1.3338416 1.3338416 1.3338416 1.6494751 ] [ 2.0093677 1.4366773 1.4366773 1.4366773 1.1589177 1.1589177 0.9136528 1.2758073 1.2758073 1.3639294 1.859656 1.859656 1.859656 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5788.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.stride, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%5) fw_re: [[[[-0.60204667 0.87988967 0.87988967 0.87988967 1.0750837 1.1345619 1.1345619 1.1345619 1.7730259 1.7730259 1.7730259 0.9863435 0.9863435 0.9863435 0.753211 ] [-0.30496666 0.87988967 0.87988967 0.87988967 1.0750837 1.1345619 1.1345619 1.1345619 1.7730259 1.7730259 1.7730259 0.9863435 0.9863435 1.3639644 1.3639644 ] [-0.30496666 0.715548 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 1.1161257 1.1161257 1.4503418 2.1236591 2.1236591 2.1236591 1.3639644 ] [ 0.23686756 0.715548 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 1.1161257 1.1161257 1.4503418 2.1236591 2.1236591 2.1236591 1.3639644 ] [ 1.872012 1.872012 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 0.7988288 0.7988288 2.3095148 2.3095148 2.3095148 2.1236591 1.1046424 ] [ 1.872012 1.872012 1.6966146 1.6966146 1.6966146 0.848591 0.8009885 0.8009885 0.7988288 0.7988288 2.3095148 2.3095148 2.3095148 1.1046424 1.1046424 ] [ 1.872012 1.872012 1.6966146 1.6966146 1.6966146 1.035875 1.035875 0.8009885 0.53514695 0.07751048 2.3095148 2.3095148 2.3095148 1.1046424 1.1046424 ] [ 0.6158213 1.2161127 1.6966146 1.6966146 1.6966146 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.5800217 0.9335936 1.090818 1.090818 1.090818 ] [ 0.6158213 1.2161127 1.2161127 1.2161127 1.035875 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.41943142 0.9335936 2.1597824 2.1597824 2.1597824 ] [ 0.6158213 1.2161127 1.2161127 1.2161127 1.0014213 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.41943142 1.8684399 2.1597824 2.1597824 2.1597824 ] [ 1.6261982 1.6261982 1.5704926 1.5704926 1.5704926 1.0014213 1.0014213 0.95440954 0.9227756 0.578633 1.4110386 1.8684399 2.1597824 2.1597824 2.1597824 ] [ 1.6261982 1.6261982 2.5581255 2.5581255 2.5581255 1.0014213 1.0014213 1.1651174 1.1651174 1.1651174 1.4110386 1.8684399 1.8684399 1.8684399 1.2963823 ] [ 1.6261982 1.6261982 2.5581255 2.5581255 2.5581255 2.1001172 0.9535438 1.1651174 1.1651174 1.1651174 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ] [ 0.61572105 0.90624297 2.5581255 2.5581255 2.5581255 2.1001172 1.2923996 1.2923996 1.1651174 1.1651174 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ] [ 0.3685386 0.3685386 -0.09993063 2.1001172 2.1001172 2.1001172 1.2923996 1.2923996 0.48404807 0.48404807 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ]] [[ 1.6928414 1.6928414 1.6928414 0.77924 1.334475 1.334475 1.334475 1.1924418 1.2652978 1.2652978 1.2652978 0.8667783 1.5934327 1.5934327 1.5934327 ] [ 1.6928414 1.6928414 1.6928414 0.8984708 1.334475 1.334475 1.334475 1.6269584 1.6269584 1.6269584 1.5787324 0.8667783 1.5934327 1.5934327 1.5934327 ] [ 1.6928414 1.6928414 1.6928414 1.4107138 1.4107138 1.1924418 1.2345852 1.6269584 1.6269584 1.6269584 1.5787324 0.8667783 0.8667783 0.97490704 0.97490704] [ 1.7304868 1.7304868 1.4107138 1.4107138 1.4107138 1.0883338 1.2345852 1.6269584 1.6269584 1.6269584 1.5787324 0.83033293 0.83033293 0.5171632 0.5171632 ] [ 1.7304868 1.7304868 1.4107138 1.4107138 1.4107138 1.0883338 1.2345852 1.2345852 1.2345852 0.7907666 0.7907666 0.2737874 0.9750781 0.9750781 0.9750781 ] [ 1.7304868 1.7304868 0.55637807 0.66804534 0.66804534 1.0883338 1.0883338 1.0883338 1.2255 1.2255 1.2255 0.2737874 0.9750781 0.9750781 0.9750781 ] [ 0.45092073 0.45092073 0.15919526 0.66804534 0.66804534 0.66804534 0.5353589 0.5370924 1.2255 1.4398487 1.5724887 1.5724887 1.5724887 0.9750781 0.9750781 ] [ 1.6009762 1.6009762 1.9479474 2.341663 2.341663 2.341663 0.794485 1.4200135 1.4200135 1.4398487 1.5724887 1.5724887 1.5724887 0.80128473 0.80128473] [ 2.0595837 2.0595837 2.0595837 2.341663 2.341663 2.341663 0.987482 1.4200135 1.4359775 1.4398487 1.5724887 1.5724887 1.5724887 0.9938576 0.9938576 ] [ 2.0595837 2.0595837 2.0595837 2.341663 2.341663 2.341663 1.5427781 1.4200135 1.4359775 1.4359775 1.4359775 0.5870539 0.9817863 0.9938576 0.9938576 ] [ 2.0595837 2.0595837 2.0595837 1.4253414 1.5427781 1.5427781 1.6781318 1.6781318 1.6781318 1.4359775 1.4359775 0.8017171 0.9817863 1.4515873 1.4515873 ] [-0.05420512 0.8768003 1.4253414 1.4253414 1.5427781 1.5427781 1.6781318 1.6781318 1.6781318 0.8230051 0.8230051 0.8017171 0.8017171 1.4515873 1.4515873 ] [ 1.2280409 1.2280409 1.4253414 1.4253414 1.4253414 1.1835431 1.6781318 1.6781318 1.6781318 0.9710647 0.9710647 0.9710647 0.8838814 1.4515873 1.4515873 ] [ 1.2280409 1.2280409 1.2280409 0.8768003 1.1325645 1.3815197 1.3815197 1.3815197 1.3101258 1.0087904 1.0087904 1.8110145 1.8110145 1.8110145 1.081767 ] [ 1.2280409 1.2280409 1.2280409 0.6822675 1.1325645 1.3815197 1.3815197 1.3815197 1.3101258 1.0087904 1.0087904 1.8110145 1.8110145 1.8110145 1.081767 ]] [[-0.3117147 1.1360959 1.1360959 1.1360959 2.1022518 2.1022518 2.1022518 0.48736548 0.38922969 0.21921657 0.61686486 0.61686486 1.3162577 1.3162577 1.3162577 ] [ 1.0832385 1.1360959 1.1360959 1.1360959 2.1022518 2.1022518 2.1022518 0.8498554 0.7973979 0.7973979 0.61686486 0.7219931 1.3162577 1.3162577 1.3162577 ] [ 1.0832385 1.1360959 1.1360959 1.1360959 0.75107265 0.909721 0.909721 0.909721 0.7973979 0.7973979 0.5380033 0.7219931 0.9495013 0.9495013 0.9495013 ] [ 1.0832385 1.0832385 1.0832385 0.75107265 0.75107265 0.909721 0.909721 0.909721 0.90501964 0.7973979 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 0.83218694 0.83218694 0.38335305 0.75107265 0.75107265 0.909721 0.909721 0.909721 0.90501964 0.76764745 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 0.83218694 0.83218694 0.38335305 0.3447161 0.9567465 0.9567465 0.9567465 0.90501964 0.90501964 0.76764745 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 1.17033 2.3982136 2.3982136 2.3982136 0.9567465 0.9567465 1.3807895 1.3807895 1.3807895 1.1350697 1.1350697 1.1350697 0.69048625 1.4057897 1.4057897 ] [ 1.17033 2.3982136 2.3982136 2.3982136 1.2796463 0.9567465 1.3807895 1.3807895 1.9161422 1.9161422 1.9161422 1.469113 0.77399415 1.7452239 1.7452239 ] [ 1.17033 2.3982136 2.3982136 2.3982136 1.2796463 0.5383911 1.3807895 1.3807895 1.9161422 1.9161422 1.9161422 1.469113 0.77399415 1.7452239 1.7452239 ] [ 0.9609493 0.9609493 1.2796463 1.2796463 3.1261206 3.1261206 3.1261206 0.63702303 1.9161422 1.9161422 1.9161422 1.469113 1.3685437 1.7452239 1.7452239 ] [ 1.5273069 1.5273069 1.2528517 0.9677751 3.1261206 3.1261206 3.1261206 0.7346011 0.7346011 0.7346011 1.3685437 1.3685437 1.3685437 0.4874822 0.28856385] [ 1.5273069 1.5957395 1.5957395 1.5957395 3.1261206 3.1261206 3.1261206 1.0914038 1.0914038 0.7346011 1.3685437 1.3685437 1.3685437 1.2421356 1.2421356 ] [ 1.5273069 1.5957395 1.5957395 1.5957395 0.79353994 0.8169936 1.0914038 1.0914038 1.4880221 1.4880221 1.4880221 0.2533529 1.2421356 1.2421356 1.2421356 ] [ 1.0261511 1.5957395 1.5957395 1.5957395 1.1214937 1.1214937 1.1214937 1.0914038 1.4880221 1.4880221 1.4880221 1.3684413 1.2865833 1.2421356 1.2421356 ] [ 0.58343226 0.58343226 0.6338273 0.7441731 1.1214937 1.1214937 1.1214937 0.8169936 1.4880221 1.4880221 1.4880221 1.3684413 1.2865833 0.62128186 0.47090176]]]]; ov_res: [[[[-0.60204667 0.87988967 0.87988967 0.87988967 1.0750837 1.1345619 1.1345619 1.1345619 1.7730259 1.7730259 1.7730259 0.9863435 0.9863435 0.9863435 0.753211 ] [-0.30496666 0.87988967 0.87988967 0.87988967 1.0750837 1.1345619 1.1345619 1.1345619 1.7730259 1.7730259 1.7730259 0.9863435 0.9863435 1.3639644 1.3639644 ] [-0.30496666 0.715548 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 1.1161257 1.1161257 1.4503418 2.1236591 2.1236591 2.1236591 1.3639644 ] [ 0.23686756 0.715548 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 1.1161257 1.1161257 1.4503418 2.1236591 2.1236591 2.1236591 1.3639644 ] [ 1.872012 1.872012 1.3036745 1.3036745 1.3036745 1.5445697 1.5445697 1.5445697 0.7988288 0.7988288 2.3095148 2.3095148 2.3095148 2.1236591 1.1046424 ] [ 1.872012 1.872012 1.6966146 1.6966146 1.6966146 0.848591 0.8009885 0.8009885 0.7988288 0.7988288 2.3095148 2.3095148 2.3095148 1.1046424 1.1046424 ] [ 1.872012 1.872012 1.6966146 1.6966146 1.6966146 1.035875 1.035875 0.8009885 0.53514695 0.07751048 2.3095148 2.3095148 2.3095148 1.1046424 1.1046424 ] [ 0.6158213 1.2161127 1.6966146 1.6966146 1.6966146 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.5800217 0.9335936 1.090818 1.090818 1.090818 ] [ 0.6158213 1.2161127 1.2161127 1.2161127 1.035875 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.41943142 0.9335936 2.1597824 2.1597824 2.1597824 ] [ 0.6158213 1.2161127 1.2161127 1.2161127 1.0014213 1.4294038 1.4294038 1.4294038 1.3832654 0.6464019 0.41943142 1.8684399 2.1597824 2.1597824 2.1597824 ] [ 1.6261982 1.6261982 1.5704926 1.5704926 1.5704926 1.0014213 1.0014213 0.95440954 0.9227756 0.578633 1.4110386 1.8684399 2.1597824 2.1597824 2.1597824 ] [ 1.6261982 1.6261982 2.5581255 2.5581255 2.5581255 1.0014213 1.0014213 1.1651174 1.1651174 1.1651174 1.4110386 1.8684399 1.8684399 1.8684399 1.2963823 ] [ 1.6261982 1.6261982 2.5581255 2.5581255 2.5581255 2.1001172 0.9535438 1.1651174 1.1651174 1.1651174 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ] [ 0.61572105 0.90624297 2.5581255 2.5581255 2.5581255 2.1001172 1.2923996 1.2923996 1.1651174 1.1651174 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ] [ 0.3685386 0.3685386 -0.09993063 2.1001172 2.1001172 2.1001172 1.2923996 1.2923996 0.48404807 0.48404807 1.8805633 1.8805633 1.8805633 1.3151501 1.3151501 ]] [[ 1.6928414 1.6928414 1.6928414 0.77924 1.334475 1.334475 1.334475 1.1924418 1.2652978 1.2652978 1.2652978 0.8667783 1.5934327 1.5934327 1.5934327 ] [ 1.6928414 1.6928414 1.6928414 0.8984708 1.334475 1.334475 1.334475 1.6269584 1.6269584 1.6269584 1.5787324 0.8667783 1.5934327 1.5934327 1.5934327 ] [ 1.6928414 1.6928414 1.6928414 1.4107138 1.4107138 1.1924418 1.2345852 1.6269584 1.6269584 1.6269584 1.5787324 0.8667783 0.8667783 0.97490704 0.97490704] [ 1.7304868 1.7304868 1.4107138 1.4107138 1.4107138 1.0883338 1.2345852 1.6269584 1.6269584 1.6269584 1.5787324 0.83033293 0.83033293 0.5171632 0.5171632 ] [ 1.7304868 1.7304868 1.4107138 1.4107138 1.4107138 1.0883338 1.2345852 1.2345852 1.2345852 0.7907666 0.7907666 0.2737874 0.9750781 0.9750781 0.9750781 ] [ 1.7304868 1.7304868 0.55637807 0.66804534 0.66804534 1.0883338 1.0883338 1.0883338 1.2255 1.2255 1.2255 0.2737874 0.9750781 0.9750781 0.9750781 ] [ 0.45092073 0.45092073 0.15919526 0.66804534 0.66804534 0.66804534 0.5353589 0.5370924 1.2255 1.4398487 1.5724887 1.5724887 1.5724887 0.9750781 0.9750781 ] [ 1.6009762 1.6009762 1.9479474 2.341663 2.341663 2.341663 0.794485 1.4200135 1.4200135 1.4398487 1.5724887 1.5724887 1.5724887 0.80128473 0.80128473] [ 2.0595837 2.0595837 2.0595837 2.341663 2.341663 2.341663 0.987482 1.4200135 1.4359775 1.4398487 1.5724887 1.5724887 1.5724887 0.9938576 0.9938576 ] [ 2.0595837 2.0595837 2.0595837 2.341663 2.341663 2.341663 1.5427781 1.4200135 1.4359775 1.4359775 1.4359775 0.5870539 0.9817863 0.9938576 0.9938576 ] [ 2.0595837 2.0595837 2.0595837 1.4253414 1.5427781 1.5427781 1.6781318 1.6781318 1.6781318 1.4359775 1.4359775 0.8017171 0.9817863 1.4515873 1.4515873 ] [-0.05420512 0.8768003 1.4253414 1.4253414 1.5427781 1.5427781 1.6781318 1.6781318 1.6781318 0.8230051 0.8230051 0.8017171 0.8017171 1.4515873 1.4515873 ] [ 1.2280409 1.2280409 1.4253414 1.4253414 1.4253414 1.1835431 1.6781318 1.6781318 1.6781318 0.9710647 0.9710647 0.9710647 0.8838814 1.4515873 1.4515873 ] [ 1.2280409 1.2280409 1.2280409 0.8768003 1.1325645 1.3815197 1.3815197 1.3815197 1.3101258 1.0087904 1.0087904 1.8110145 1.8110145 1.8110145 1.081767 ] [ 1.2280409 1.2280409 1.2280409 0.6822675 1.1325645 1.3815197 1.3815197 1.3815197 1.3101258 1.0087904 1.0087904 1.8110145 1.8110145 1.8110145 1.081767 ]] [[-0.3117147 1.1360959 1.1360959 1.1360959 2.1022518 2.1022518 2.1022518 0.48736548 0.38922969 0.21921657 0.61686486 0.61686486 1.3162577 1.3162577 1.3162577 ] [ 1.0832385 1.1360959 1.1360959 1.1360959 2.1022518 2.1022518 2.1022518 0.8498554 0.7973979 0.7973979 0.61686486 0.7219931 1.3162577 1.3162577 1.3162577 ] [ 1.0832385 1.1360959 1.1360959 1.1360959 0.75107265 0.909721 0.909721 0.909721 0.7973979 0.7973979 0.5380033 0.7219931 0.9495013 0.9495013 0.9495013 ] [ 1.0832385 1.0832385 1.0832385 0.75107265 0.75107265 0.909721 0.909721 0.909721 0.90501964 0.7973979 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 0.83218694 0.83218694 0.38335305 0.75107265 0.75107265 0.909721 0.909721 0.909721 0.90501964 0.76764745 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 0.83218694 0.83218694 0.38335305 0.3447161 0.9567465 0.9567465 0.9567465 0.90501964 0.90501964 0.76764745 1.3953362 1.3953362 1.3953362 1.9283699 1.9283699 ] [ 1.17033 2.3982136 2.3982136 2.3982136 0.9567465 0.9567465 1.3807895 1.3807895 1.3807895 1.1350697 1.1350697 1.1350697 0.69048625 1.4057897 1.4057897 ] [ 1.17033 2.3982136 2.3982136 2.3982136 1.2796463 0.9567465 1.3807895 1.3807895 1.9161422 1.9161422 1.9161422 1.469113 0.77399415 1.7452239 1.7452239 ] [ 1.17033 2.3982136 2.3982136 2.3982136 1.2796463 0.5383911 1.3807895 1.3807895 1.9161422 1.9161422 1.9161422 1.469113 0.77399415 1.7452239 1.7452239 ] [ 0.9609493 0.9609493 1.2796463 1.2796463 3.1261206 3.1261206 3.1261206 0.63702303 1.9161422 1.9161422 1.9161422 1.469113 1.3685437 1.7452239 1.7452239 ] [ 1.5273069 1.5273069 1.2528517 0.9677751 3.1261206 3.1261206 3.1261206 0.7346011 0.7346011 0.7346011 1.3685437 1.3685437 1.3685437 0.4874822 0.28856385] [ 1.5273069 1.5957395 1.5957395 1.5957395 3.1261206 3.1261206 3.1261206 1.0914038 1.0914038 0.7346011 1.3685437 1.3685437 1.3685437 1.2421356 1.2421356 ] [ 1.5273069 1.5957395 1.5957395 1.5957395 0.79353994 0.8169936 1.0914038 1.0914038 1.4880221 1.4880221 1.4880221 0.2533529 1.2421356 1.2421356 1.2421356 ] [ 1.0261511 1.5957395 1.5957395 1.5957395 1.1214937 1.1214937 1.1214937 1.0914038 1.4880221 1.4880221 1.4880221 1.3684413 1.2865833 1.2421356 1.2421356 ] [ 0.58343226 0.58343226 0.6338273 0.7441731 1.1214937 1.1214937 1.1214937 0.8169936 1.4880221 1.4880221 1.4880221 1.3684413 1.2865833 0.62128186 0.47090176]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5790.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 1]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.7206502 1.7206502 1.7206502 0.5273852 0.9652563 1.4250153 1.6194482 1.6194482 1.6194482 0.5338492 0.5338492 0.30623284 1.828385 1.828385 1.828385 ] [ 0.4139905 1.533347 1.533347 1.533347 0.9652563 1.4250153 1.6194482 1.6194482 1.6194482 0.5338492 1.8701001 1.8701001 1.8701001 1.578212 1.578212 ] [ 0.52857244 1.533347 1.533347 1.533347 0.9652563 0.9652563 1.6194482 1.6194482 1.6194482 0.5338492 1.8701001 1.8701001 1.8701001 0.59028095 0.29775167] [ 0.52857244 1.533347 1.533347 1.533347 0.8912952 2.0010185 2.0010185 2.0010185 0.4955112 0.4955112 1.8701001 1.8701001 1.8701001 0.59028095 0.3758224 ] [ 0.52857244 0.52857244 0.52857244 0.8912952 0.8912952 2.0010185 2.0010185 2.0010185 0.7681606 1.5581433 1.5581433 1.5581433 0.4257259 0.4257259 0.3758224 ] [ 0.43684006 0.43684006 0.372306 1.2059525 1.2059525 2.0010185 2.0010185 2.0010185 1.634381 1.5581433 1.5581433 1.5581433 0.93253374 0.93253374 0.93253374] [ 0.6578786 0.9504704 0.9504704 1.2059525 1.2059525 1.2059525 1.634381 1.634381 1.634381 1.5581433 1.5581433 1.5581433 0.93253374 0.93253374 0.93253374] [ 0.6578786 0.9504704 0.9504704 1.2059525 1.2059525 1.43606 1.634381 1.634381 1.634381 1.3541322 0.69922113 0.69922113 0.93253374 0.93253374 0.93253374] [ 1.2393125 1.2393125 0.9504704 0.9504704 0.46311438 1.5021677 1.5021677 1.5021677 1.3541322 1.3541322 0.43083668 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.2393125 1.2393125 1.1934506 1.1934506 1.1934506 1.5021677 1.5467318 1.5467318 1.5467318 1.3541322 0.5543186 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.2393125 1.2393125 1.1934506 1.1934506 1.1934506 1.5021677 1.5467318 1.5467318 1.5467318 0.65940124 0.5543186 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.3887535 1.3887535 1.1934506 1.1934506 1.1934506 0.6395847 1.5467318 1.5467318 1.5467318 0.64535564 1.0224608 1.0224608 1.0224608 1.1633528 1.1633528 ] [ 1.3887535 1.3887535 0.843264 0.5330558 0.46305734 0.46305734 0.27063888 0.08829994 1.9350054 1.9350054 1.9350054 1.0224608 1.0224608 1.1633528 1.1633528 ]] [[ 1.7218336 1.7218336 1.7218336 0.6271609 2.1182995 2.1182995 2.1182995 1.012367 1.012367 1.012367 0.20707022 1.7341723 1.7341723 1.7341723 0.76093596] [ 0.9969091 0.9969091 0.6271609 0.6271609 2.1182995 2.1182995 2.1182995 1.2769765 1.2769765 0.20707022 0.20707022 1.7341723 1.7341723 1.7341723 1.6139603 ] [ 0.68442184 0.68442184 1.6850381 1.6850381 1.6850381 1.2830806 1.2769765 1.2769765 1.2769765 0.95334435 0.4512792 1.7341723 1.7341723 1.7341723 1.6139603 ] [ 1.2721143 1.2721143 1.6850381 1.6850381 1.6850381 1.2830806 1.2769765 1.2769765 1.2769765 1.1432778 1.9038198 1.9038198 1.9038198 1.6139603 1.6139603 ] [ 1.2721143 1.2721143 1.6850381 1.6850381 1.6850381 1.2830806 0.66689277 1.1791512 1.6922365 1.6922365 1.9038198 1.9038198 1.9038198 1.2908585 0.9590126 ] [ 1.2721143 1.2721143 1.2721143 1.1005679 1.4252036 1.4252036 1.4252036 1.1791512 1.6922365 1.6922365 1.9038198 1.9038198 1.9038198 0.9590126 0.9590126 ] [ 2.1233242 2.1233242 0.98924404 1.1005679 1.4252036 1.4252036 1.4252036 2.1985786 2.1985786 2.1985786 1.6922365 1.0904434 1.0904434 1.0904434 -0.17316678] [ 2.1233242 2.1233242 0.90338856 1.1005679 1.4252036 1.4252036 1.4252036 2.1985786 2.1985786 2.1985786 0.8694737 1.8421788 1.8421788 1.8421788 0.3475248 ] [ 2.1233242 2.1233242 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 2.1985786 2.1985786 2.1985786 1.2837073 1.8421788 1.8421788 1.8421788 0.3475248 ] [ 0.8033605 0.8033605 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 1.5847232 1.5847232 1.7600586 1.7600586 1.8421788 1.8421788 1.8421788 1.2256054 ] [ 1.8016634 1.8016634 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 1.5847232 1.5847232 1.7600586 1.7600586 1.7600586 0.98311585 1.2256054 1.2256054 ] [ 1.9604713 1.9604713 1.9604713 1.0816453 1.0816453 1.0816453 0.9910796 2.0688064 2.0688064 2.0688064 1.7600586 1.7600586 0.57929784 1.2256054 1.2256054 ] [ 1.9604713 1.9604713 1.9604713 1.0816453 1.0816453 1.0816453 0.9910796 2.0688064 2.0688064 2.0688064 1.4008563 1.4008563 0.7634021 0.7634021 0.58355576]] [[ 1.2747811 1.5378236 1.5378236 1.5378236 0.95534647 0.88043904 0.6359687 1.6098762 1.7188884 1.7188884 1.7188884 1.0116282 1.0116282 1.035205 1.035205 ] [ 0.26166055 1.5378236 1.5378236 1.5378236 0.95534647 0.88043904 0.20001082 1.6098762 1.7188884 1.7188884 1.8325728 1.8325728 2.0808775 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 0.88043904 0.88043904 0.88043904 0.20001082 1.6098762 1.6098762 1.6098762 2.4070404 2.4070404 2.4070404 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 1.1861258 1.1861258 1.1861258 1.1310698 1.7034811 1.7034811 2.1045005 2.4070404 2.4070404 2.4070404 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 1.1861258 1.1861258 1.1861258 1.1310698 1.7034811 1.7034811 2.1045005 2.4070404 2.4070404 2.4070404 1.6373485 1.6338788 ] [ 1.2756163 1.2756163 1.2756163 1.1861258 1.1861258 1.7201034 1.7201034 1.7201034 1.7034811 2.1045005 2.1045005 2.1045005 1.6338788 1.6338788 1.6338788 ] [ 1.6046841 1.6046841 1.332709 1.332709 1.1413635 1.7201034 1.7201034 1.7201034 1.3656915 1.3656915 1.3656915 1.1007743 0.42876077 0.48235023 0.48235023] [ 1.6046841 1.6046841 1.332709 1.332709 1.1413635 1.7201034 1.7201034 1.7201034 1.3656915 1.3656915 1.3656915 1.1007743 0.6428179 0.6428179 0.6428179 ] [ 1.6046841 1.6046841 1.332709 1.332709 0.96440226 2.8827538 2.8827538 2.8827538 1.3656915 1.3656915 1.3656915 1.1007743 0.6428179 0.6428179 0.6428179 ] [ 1.9355184 1.9355184 0.96440226 0.96440226 0.96440226 2.8827538 2.8827538 2.8827538 0.64059 0.64059 0.64059 0.17814258 0.6428179 0.6428179 0.6428179 ] [ 1.9355184 1.9355184 0.96440226 0.96440226 0.96440226 2.8827538 2.8827538 2.8827538 1.4937389 1.4937389 1.067106 1.067106 0.24959366 1.1686856 1.1686856 ] [ 1.9355184 1.9355184 0.62655836 0.7844483 0.7844483 0.7844483 0.7479141 1.4937389 1.4937389 1.4937389 1.067106 1.067106 0.24959366 1.1686856 1.1686856 ] [ 0.929171 0.929171 0.45244005 0.7844483 0.7844483 0.7844483 1.0130116 1.4937389 1.4937389 1.4937389 1.067106 1.067106 1.5221727 1.5221727 1.5221727 ]]]]; ov_res: [[[[ 1.7206502 1.7206502 1.7206502 0.5273852 0.9652563 1.4250153 1.6194482 1.6194482 1.6194482 0.5338492 0.5338492 0.30623284 1.828385 1.828385 1.828385 ] [ 0.4139905 1.533347 1.533347 1.533347 0.9652563 1.4250153 1.6194482 1.6194482 1.6194482 0.5338492 1.8701001 1.8701001 1.8701001 1.578212 1.578212 ] [ 0.52857244 1.533347 1.533347 1.533347 0.9652563 0.9652563 1.6194482 1.6194482 1.6194482 0.5338492 1.8701001 1.8701001 1.8701001 0.59028095 0.29775167] [ 0.52857244 1.533347 1.533347 1.533347 0.8912952 2.0010185 2.0010185 2.0010185 0.4955112 0.4955112 1.8701001 1.8701001 1.8701001 0.59028095 0.3758224 ] [ 0.52857244 0.52857244 0.52857244 0.8912952 0.8912952 2.0010185 2.0010185 2.0010185 0.7681606 1.5581433 1.5581433 1.5581433 0.4257259 0.4257259 0.3758224 ] [ 0.43684006 0.43684006 0.372306 1.2059525 1.2059525 2.0010185 2.0010185 2.0010185 1.634381 1.5581433 1.5581433 1.5581433 0.93253374 0.93253374 0.93253374] [ 0.6578786 0.9504704 0.9504704 1.2059525 1.2059525 1.2059525 1.634381 1.634381 1.634381 1.5581433 1.5581433 1.5581433 0.93253374 0.93253374 0.93253374] [ 0.6578786 0.9504704 0.9504704 1.2059525 1.2059525 1.43606 1.634381 1.634381 1.634381 1.3541322 0.69922113 0.69922113 0.93253374 0.93253374 0.93253374] [ 1.2393125 1.2393125 0.9504704 0.9504704 0.46311438 1.5021677 1.5021677 1.5021677 1.3541322 1.3541322 0.43083668 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.2393125 1.2393125 1.1934506 1.1934506 1.1934506 1.5021677 1.5467318 1.5467318 1.5467318 1.3541322 0.5543186 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.2393125 1.2393125 1.1934506 1.1934506 1.1934506 1.5021677 1.5467318 1.5467318 1.5467318 0.65940124 0.5543186 0.8790089 0.8790089 1.2831643 1.2831643 ] [ 1.3887535 1.3887535 1.1934506 1.1934506 1.1934506 0.6395847 1.5467318 1.5467318 1.5467318 0.64535564 1.0224608 1.0224608 1.0224608 1.1633528 1.1633528 ] [ 1.3887535 1.3887535 0.843264 0.5330558 0.46305734 0.46305734 0.27063888 0.08829994 1.9350054 1.9350054 1.9350054 1.0224608 1.0224608 1.1633528 1.1633528 ]] [[ 1.7218336 1.7218336 1.7218336 0.6271609 2.1182995 2.1182995 2.1182995 1.012367 1.012367 1.012367 0.20707022 1.7341723 1.7341723 1.7341723 0.76093596] [ 0.9969091 0.9969091 0.6271609 0.6271609 2.1182995 2.1182995 2.1182995 1.2769765 1.2769765 0.20707022 0.20707022 1.7341723 1.7341723 1.7341723 1.6139603 ] [ 0.68442184 0.68442184 1.6850381 1.6850381 1.6850381 1.2830806 1.2769765 1.2769765 1.2769765 0.95334435 0.4512792 1.7341723 1.7341723 1.7341723 1.6139603 ] [ 1.2721143 1.2721143 1.6850381 1.6850381 1.6850381 1.2830806 1.2769765 1.2769765 1.2769765 1.1432778 1.9038198 1.9038198 1.9038198 1.6139603 1.6139603 ] [ 1.2721143 1.2721143 1.6850381 1.6850381 1.6850381 1.2830806 0.66689277 1.1791512 1.6922365 1.6922365 1.9038198 1.9038198 1.9038198 1.2908585 0.9590126 ] [ 1.2721143 1.2721143 1.2721143 1.1005679 1.4252036 1.4252036 1.4252036 1.1791512 1.6922365 1.6922365 1.9038198 1.9038198 1.9038198 0.9590126 0.9590126 ] [ 2.1233242 2.1233242 0.98924404 1.1005679 1.4252036 1.4252036 1.4252036 2.1985786 2.1985786 2.1985786 1.6922365 1.0904434 1.0904434 1.0904434 -0.17316678] [ 2.1233242 2.1233242 0.90338856 1.1005679 1.4252036 1.4252036 1.4252036 2.1985786 2.1985786 2.1985786 0.8694737 1.8421788 1.8421788 1.8421788 0.3475248 ] [ 2.1233242 2.1233242 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 2.1985786 2.1985786 2.1985786 1.2837073 1.8421788 1.8421788 1.8421788 0.3475248 ] [ 0.8033605 0.8033605 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 1.5847232 1.5847232 1.7600586 1.7600586 1.8421788 1.8421788 1.8421788 1.2256054 ] [ 1.8016634 1.8016634 1.6079497 1.6079497 1.6079497 1.0964094 1.5847232 1.5847232 1.5847232 1.7600586 1.7600586 1.7600586 0.98311585 1.2256054 1.2256054 ] [ 1.9604713 1.9604713 1.9604713 1.0816453 1.0816453 1.0816453 0.9910796 2.0688064 2.0688064 2.0688064 1.7600586 1.7600586 0.57929784 1.2256054 1.2256054 ] [ 1.9604713 1.9604713 1.9604713 1.0816453 1.0816453 1.0816453 0.9910796 2.0688064 2.0688064 2.0688064 1.4008563 1.4008563 0.7634021 0.7634021 0.58355576]] [[ 1.2747811 1.5378236 1.5378236 1.5378236 0.95534647 0.88043904 0.6359687 1.6098762 1.7188884 1.7188884 1.7188884 1.0116282 1.0116282 1.035205 1.035205 ] [ 0.26166055 1.5378236 1.5378236 1.5378236 0.95534647 0.88043904 0.20001082 1.6098762 1.7188884 1.7188884 1.8325728 1.8325728 2.0808775 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 0.88043904 0.88043904 0.88043904 0.20001082 1.6098762 1.6098762 1.6098762 2.4070404 2.4070404 2.4070404 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 1.1861258 1.1861258 1.1861258 1.1310698 1.7034811 1.7034811 2.1045005 2.4070404 2.4070404 2.4070404 2.0808775 2.0808775 ] [ 1.7499907 1.7499907 1.7499907 1.1861258 1.1861258 1.1861258 1.1310698 1.7034811 1.7034811 2.1045005 2.4070404 2.4070404 2.4070404 1.6373485 1.6338788 ] [ 1.2756163 1.2756163 1.2756163 1.1861258 1.1861258 1.7201034 1.7201034 1.7201034 1.7034811 2.1045005 2.1045005 2.1045005 1.6338788 1.6338788 1.6338788 ] [ 1.6046841 1.6046841 1.332709 1.332709 1.1413635 1.7201034 1.7201034 1.7201034 1.3656915 1.3656915 1.3656915 1.1007743 0.42876077 0.48235023 0.48235023] [ 1.6046841 1.6046841 1.332709 1.332709 1.1413635 1.7201034 1.7201034 1.7201034 1.3656915 1.3656915 1.3656915 1.1007743 0.6428179 0.6428179 0.6428179 ] [ 1.6046841 1.6046841 1.332709 1.332709 0.96440226 2.8827538 2.8827538 2.8827538 1.3656915 1.3656915 1.3656915 1.1007743 0.6428179 0.6428179 0.6428179 ] [ 1.9355184 1.9355184 0.96440226 0.96440226 0.96440226 2.8827538 2.8827538 2.8827538 0.64059 0.64059 0.64059 0.17814258 0.6428179 0.6428179 0.6428179 ] [ 1.9355184 1.9355184 0.96440226 0.96440226 0.96440226 2.8827538 2.8827538 2.8827538 1.4937389 1.4937389 1.067106 1.067106 0.24959366 1.1686856 1.1686856 ] [ 1.9355184 1.9355184 0.62655836 0.7844483 0.7844483 0.7844483 0.7479141 1.4937389 1.4937389 1.4937389 1.067106 1.067106 0.24959366 1.1686856 1.1686856 ] [ 0.929171 0.929171 0.45244005 0.7844483 0.7844483 0.7844483 1.0130116 1.4937389 1.4937389 1.4937389 1.067106 1.067106 1.5221727 1.5221727 1.5221727 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5792.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[1, 0]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.8322386 1.1215912 1.0280411 1.0280411 1.0280411 0.6370051 0.6278014 0.6278014 0.6278014 0.22722763 1.1518242 1.1518242 1.1518242 ] [ 1.8322386 1.1215912 1.1722368 1.1722368 1.1722368 0.79167056 0.6817378 0.71257204 0.71257204 0.71257204 1.1518242 1.8249861 1.8249861 ] [ 1.8322386 1.1215912 2.2229517 2.2229517 2.2229517 0.84572077 0.84572077 1.5430479 1.5430479 1.5430479 1.1518242 1.8249861 1.8249861 ] [ 2.2103324 2.2103324 2.2229517 2.2229517 2.2229517 0.84572077 1.11903 1.5430479 1.5430479 1.5430479 0.9116382 1.8249861 1.8249861 ] [ 2.2103324 2.2103324 2.2229517 2.2229517 2.2229517 1.4909894 1.11903 1.5430479 1.5430479 1.5430479 0.8065708 1.4190782 1.4190782 ] [ 2.2103324 2.2103324 2.2103324 1.4909894 1.4909894 1.4909894 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 0.93656695 1.6036923 ] [ 0.8643031 0.6434675 0.95762575 1.4909894 1.4909894 1.4909894 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 0.93656695 1.6470346 ] [ 1.0934874 1.0934874 1.0934874 0.9814273 0.98460203 1.1297147 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 1.1651038 1.6470346 ] [ 1.1272614 1.1272614 1.1272614 0.9814273 0.9814273 0.9814273 0.8087707 0.8087707 0.8087707 0.7576617 0.8785416 1.1651038 1.6470346 ] [ 1.1272614 1.1272614 1.1272614 0.44080976 0.44080976 0.44080976 0.11437768 0.7576617 0.93059117 0.93059117 0.93059117 1.1651038 1.5686256 ] [ 1.2550644 1.1272614 1.1272614 0.40580675 0.40580675 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.0583264 1.2368062 1.5686256 ] [ 1.2550644 1.0676792 1.0418061 0.94723076 0.94723076 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.0583264 1.2368062 1.2368062 ] [ 1.2550644 1.2289821 1.2289821 1.2289821 0.94723076 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.1858281 1.2368062 1.2368062 ] [ 1.0453893 1.2289821 1.2289821 1.2289821 0.9952932 1.8474877 1.8474877 1.8474877 0.8898166 1.1858281 1.1858281 1.1858281 0.6287441 ] [ 1.0453893 1.2289821 1.2289821 1.2289821 0.9952932 1.8474877 1.8474877 1.8474877 0.8898166 1.1858281 1.1858281 1.1858281 0.6287441 ]] [[ 2.0763853 2.0763853 2.0763853 1.5029764 2.2300806 2.2300806 2.2300806 1.9035159 1.9035159 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 2.0763853 2.0763853 2.0763853 1.5029764 2.2300806 2.2300806 2.2300806 1.9035159 1.9035159 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 1.1615311 1.5029764 1.5029764 1.5029764 2.2300806 2.2300806 2.2300806 0.6890596 1.1427307 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 1.5768385 1.5768385 1.1278558 1.1278558 0.6121021 0.4999942 0.6890596 0.6890596 1.1427307 1.1427307 1.1427307 1.2302419 1.2302419 ] [ 1.5768385 1.5768385 1.118759 1.118759 0.91047084 1.0193454 1.0193454 1.0193454 1.1427307 1.1427307 1.1427307 0.33570242 2.0724428 ] [ 1.5768385 1.5768385 1.118759 1.118759 0.91047084 1.0193454 1.0193454 1.0193454 1.0014404 1.0014404 0.33570242 1.0116148 2.0724428 ] [ 0.8956707 0.8026831 0.8026831 0.8026831 0.91047084 1.0193454 1.0193454 1.0591122 1.0591122 1.0591122 0.6277972 1.0116148 2.0724428 ] [ 0.8956707 0.8026831 0.8026831 2.0737827 2.0737827 2.0737827 1.0043635 1.0591122 1.0591122 1.0591122 0.6277972 1.0116148 1.7431467 ] [ 0.6448746 0.6691972 0.6691972 2.0737827 2.0737827 2.0737827 1.3037604 1.3037604 1.3037604 1.0591122 0.6277972 0.4375548 1.7431467 ] [ 1.0100254 1.0100254 1.0100254 2.0737827 2.0737827 2.0737827 1.3037604 1.3037604 1.3037604 0.81278145 1.2368822 1.2368822 1.7431467 ] [ 1.0100254 1.0100254 1.0100254 1.72153 1.72153 1.72153 1.3037604 1.3037604 1.3037604 1.4030904 1.4030904 1.4030904 1.2368822 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.7059273 1.3631214 1.2884032 1.2884032 0.59125155 2.0680068 2.0680068 2.0680068 1.2368822 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.7059273 1.3802111 1.2884032 1.2884032 0.9304696 2.0680068 2.1520445 2.1520445 2.1520445 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.8350675 1.8350675 1.1526567 1.1526567 1.1526567 2.0680068 2.1520445 2.1520445 2.1520445 ] [ 0.6975856 0.625525 0.5734374 1.8350675 1.8350675 1.8350675 1.1526567 1.1526567 1.1526567 0.36430869 2.1520445 2.1520445 2.1520445 ]] [[ 2.3879318 2.3879318 1.6154279 1.2373198 1.2373198 1.2139238 -0.19627625 0.5246931 1.6920229 1.6920229 1.6920229 0.35812533 0.71116704] [ 2.3879318 2.3879318 1.6154279 1.2373198 1.2373198 1.2139238 2.826524 2.826524 2.826524 1.6920229 1.6920229 0.95641077 0.95641077] [ 2.3879318 2.3879318 1.2373198 1.2373198 1.2373198 1.2179643 2.826524 2.826524 2.826524 0.53799254 0.6025734 0.95641077 0.95641077] [ 1.9278073 1.9278073 0.5781493 0.85588783 0.85588783 1.2179643 2.826524 2.826524 2.826524 0.8048381 0.9316795 0.95641077 0.95641077] [ 1.5575466 1.5575466 0.5460513 0.85588783 0.85588783 1.2179643 1.8238732 1.8238732 1.8238732 0.8048381 0.9316795 0.9316795 0.9316795 ] [ 1.5575466 1.5575466 0.16466969 0.16466969 0.64440334 0.64440334 1.8238732 1.8238732 1.8238732 0.8048381 0.9316795 0.9316795 0.9316795 ] [ 1.5575466 1.5575466 0.8293116 0.8293116 0.64440334 0.64440334 0.64440334 0.4559733 0.32837376 0.1729819 1.0180709 1.0180709 1.0180709 ] [ 1.055802 1.5232809 1.5232809 1.5232809 0.56501573 0.56501573 0.4559733 0.4559733 1.0463737 1.0463737 1.0463737 1.5012084 1.5012084 ] [ 1.054363 1.5232809 1.5232809 1.5232809 0.56501573 0.56501573 0.7570903 0.7570903 1.0463737 1.0463737 1.0463737 1.5365411 1.5365411 ] [ 0.940252 1.5232809 1.5232809 1.5232809 1.5957563 1.5957563 1.5957563 0.7570903 1.0463737 1.0463737 1.98542 1.98542 1.98542 ] [ 0.940252 0.940252 0.37016773 0.9230215 1.5957563 1.5957563 1.5957563 1.4106404 0.8428446 0.8428446 1.98542 1.98542 1.98542 ] [ 1.9679931 0.60027975 0.35910514 2.343571 2.343571 2.343571 1.5957563 1.4106404 0.8428446 0.8428446 1.98542 2.219552 2.219552 ] [ 1.9679931 0.60027975 0.35910514 2.343571 2.343571 2.343571 1.4106404 1.4106404 1.266661 0.6786402 0.37078646 2.219552 2.219552 ] [ 1.9679931 0.9201366 0.35910514 2.343571 2.343571 2.343571 1.6723835 1.6723835 1.266661 1.0021307 1.6107657 2.219552 2.219552 ] [ 0.9201366 0.9201366 -0.33714056 0.71181995 0.71181995 1.6723835 1.6723835 1.6723835 1.266661 1.0021307 1.6107657 1.6107657 1.6107657 ]]]]; ov_res: [[[[ 1.8322386 1.1215912 1.0280411 1.0280411 1.0280411 0.6370051 0.6278014 0.6278014 0.6278014 0.22722763 1.1518242 1.1518242 1.1518242 ] [ 1.8322386 1.1215912 1.1722368 1.1722368 1.1722368 0.79167056 0.6817378 0.71257204 0.71257204 0.71257204 1.1518242 1.8249861 1.8249861 ] [ 1.8322386 1.1215912 2.2229517 2.2229517 2.2229517 0.84572077 0.84572077 1.5430479 1.5430479 1.5430479 1.1518242 1.8249861 1.8249861 ] [ 2.2103324 2.2103324 2.2229517 2.2229517 2.2229517 0.84572077 1.11903 1.5430479 1.5430479 1.5430479 0.9116382 1.8249861 1.8249861 ] [ 2.2103324 2.2103324 2.2229517 2.2229517 2.2229517 1.4909894 1.11903 1.5430479 1.5430479 1.5430479 0.8065708 1.4190782 1.4190782 ] [ 2.2103324 2.2103324 2.2103324 1.4909894 1.4909894 1.4909894 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 0.93656695 1.6036923 ] [ 0.8643031 0.6434675 0.95762575 1.4909894 1.4909894 1.4909894 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 0.93656695 1.6470346 ] [ 1.0934874 1.0934874 1.0934874 0.9814273 0.98460203 1.1297147 1.1297147 1.3184016 1.3184016 1.3184016 1.0690317 1.1651038 1.6470346 ] [ 1.1272614 1.1272614 1.1272614 0.9814273 0.9814273 0.9814273 0.8087707 0.8087707 0.8087707 0.7576617 0.8785416 1.1651038 1.6470346 ] [ 1.1272614 1.1272614 1.1272614 0.44080976 0.44080976 0.44080976 0.11437768 0.7576617 0.93059117 0.93059117 0.93059117 1.1651038 1.5686256 ] [ 1.2550644 1.1272614 1.1272614 0.40580675 0.40580675 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.0583264 1.2368062 1.5686256 ] [ 1.2550644 1.0676792 1.0418061 0.94723076 0.94723076 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.0583264 1.2368062 1.2368062 ] [ 1.2550644 1.2289821 1.2289821 1.2289821 0.94723076 2.6275434 2.6275434 2.6275434 1.4792343 1.2552385 1.1858281 1.2368062 1.2368062 ] [ 1.0453893 1.2289821 1.2289821 1.2289821 0.9952932 1.8474877 1.8474877 1.8474877 0.8898166 1.1858281 1.1858281 1.1858281 0.6287441 ] [ 1.0453893 1.2289821 1.2289821 1.2289821 0.9952932 1.8474877 1.8474877 1.8474877 0.8898166 1.1858281 1.1858281 1.1858281 0.6287441 ]] [[ 2.0763853 2.0763853 2.0763853 1.5029764 2.2300806 2.2300806 2.2300806 1.9035159 1.9035159 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 2.0763853 2.0763853 2.0763853 1.5029764 2.2300806 2.2300806 2.2300806 1.9035159 1.9035159 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 1.1615311 1.5029764 1.5029764 1.5029764 2.2300806 2.2300806 2.2300806 0.6890596 1.1427307 2.2866962 2.2866962 2.2866962 1.5695899 ] [ 1.5768385 1.5768385 1.1278558 1.1278558 0.6121021 0.4999942 0.6890596 0.6890596 1.1427307 1.1427307 1.1427307 1.2302419 1.2302419 ] [ 1.5768385 1.5768385 1.118759 1.118759 0.91047084 1.0193454 1.0193454 1.0193454 1.1427307 1.1427307 1.1427307 0.33570242 2.0724428 ] [ 1.5768385 1.5768385 1.118759 1.118759 0.91047084 1.0193454 1.0193454 1.0193454 1.0014404 1.0014404 0.33570242 1.0116148 2.0724428 ] [ 0.8956707 0.8026831 0.8026831 0.8026831 0.91047084 1.0193454 1.0193454 1.0591122 1.0591122 1.0591122 0.6277972 1.0116148 2.0724428 ] [ 0.8956707 0.8026831 0.8026831 2.0737827 2.0737827 2.0737827 1.0043635 1.0591122 1.0591122 1.0591122 0.6277972 1.0116148 1.7431467 ] [ 0.6448746 0.6691972 0.6691972 2.0737827 2.0737827 2.0737827 1.3037604 1.3037604 1.3037604 1.0591122 0.6277972 0.4375548 1.7431467 ] [ 1.0100254 1.0100254 1.0100254 2.0737827 2.0737827 2.0737827 1.3037604 1.3037604 1.3037604 0.81278145 1.2368822 1.2368822 1.7431467 ] [ 1.0100254 1.0100254 1.0100254 1.72153 1.72153 1.72153 1.3037604 1.3037604 1.3037604 1.4030904 1.4030904 1.4030904 1.2368822 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.7059273 1.3631214 1.2884032 1.2884032 0.59125155 2.0680068 2.0680068 2.0680068 1.2368822 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.7059273 1.3802111 1.2884032 1.2884032 0.9304696 2.0680068 2.1520445 2.1520445 2.1520445 ] [ 1.9180675 1.9521385 1.9521385 1.9521385 1.8350675 1.8350675 1.1526567 1.1526567 1.1526567 2.0680068 2.1520445 2.1520445 2.1520445 ] [ 0.6975856 0.625525 0.5734374 1.8350675 1.8350675 1.8350675 1.1526567 1.1526567 1.1526567 0.36430869 2.1520445 2.1520445 2.1520445 ]] [[ 2.3879318 2.3879318 1.6154279 1.2373198 1.2373198 1.2139238 -0.19627625 0.5246931 1.6920229 1.6920229 1.6920229 0.35812533 0.71116704] [ 2.3879318 2.3879318 1.6154279 1.2373198 1.2373198 1.2139238 2.826524 2.826524 2.826524 1.6920229 1.6920229 0.95641077 0.95641077] [ 2.3879318 2.3879318 1.2373198 1.2373198 1.2373198 1.2179643 2.826524 2.826524 2.826524 0.53799254 0.6025734 0.95641077 0.95641077] [ 1.9278073 1.9278073 0.5781493 0.85588783 0.85588783 1.2179643 2.826524 2.826524 2.826524 0.8048381 0.9316795 0.95641077 0.95641077] [ 1.5575466 1.5575466 0.5460513 0.85588783 0.85588783 1.2179643 1.8238732 1.8238732 1.8238732 0.8048381 0.9316795 0.9316795 0.9316795 ] [ 1.5575466 1.5575466 0.16466969 0.16466969 0.64440334 0.64440334 1.8238732 1.8238732 1.8238732 0.8048381 0.9316795 0.9316795 0.9316795 ] [ 1.5575466 1.5575466 0.8293116 0.8293116 0.64440334 0.64440334 0.64440334 0.4559733 0.32837376 0.1729819 1.0180709 1.0180709 1.0180709 ] [ 1.055802 1.5232809 1.5232809 1.5232809 0.56501573 0.56501573 0.4559733 0.4559733 1.0463737 1.0463737 1.0463737 1.5012084 1.5012084 ] [ 1.054363 1.5232809 1.5232809 1.5232809 0.56501573 0.56501573 0.7570903 0.7570903 1.0463737 1.0463737 1.0463737 1.5365411 1.5365411 ] [ 0.940252 1.5232809 1.5232809 1.5232809 1.5957563 1.5957563 1.5957563 0.7570903 1.0463737 1.0463737 1.98542 1.98542 1.98542 ] [ 0.940252 0.940252 0.37016773 0.9230215 1.5957563 1.5957563 1.5957563 1.4106404 0.8428446 0.8428446 1.98542 1.98542 1.98542 ] [ 1.9679931 0.60027975 0.35910514 2.343571 2.343571 2.343571 1.5957563 1.4106404 0.8428446 0.8428446 1.98542 2.219552 2.219552 ] [ 1.9679931 0.60027975 0.35910514 2.343571 2.343571 2.343571 1.4106404 1.4106404 1.266661 0.6786402 0.37078646 2.219552 2.219552 ] [ 1.9679931 0.9201366 0.35910514 2.343571 2.343571 2.343571 1.6723835 1.6723835 1.266661 1.0021307 1.6107657 2.219552 2.219552 ] [ 0.9201366 0.9201366 -0.33714056 0.71181995 0.71181995 1.6723835 1.6723835 1.6723835 1.266661 1.0021307 1.6107657 1.6107657 1.6107657 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5794.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[ 1.1793113 0.4308514 0.7459428 1.2420558 1.2420558 1.3812909 1.3812909 1.3812909 1.193749 1.1900772 2.558067 2.558067 2.558067 ] [ 0.6400141 1.3567346 1.3567346 1.6209087 1.6209087 1.6209087 1.3812909 1.3812909 1.1900772 1.1900772 1.7358439 1.7358439 1.7358439 ] [ 1.933057 1.933057 1.933057 0.57075536 0.57075536 -0.01646709 0.72836524 0.9429729 1.6487027 1.6487027 1.6487027 2.5739696 2.5739696 ] [ 2.6386507 1.933057 1.933057 1.5237297 1.5237297 1.5237297 0.72836524 1.0024225 1.0024225 1.0024225 0.9135186 0.9135186 1.0892606 ] [ 2.6386507 0.6057785 0.6057785 1.8457108 1.8457108 1.8457108 0.3622448 1.0024225 1.0024225 1.0024225 0.4659756 0.4659756 1.0892606 ] [ 1.4661605 0.69739956 0.47249755 0.47249755 0.47249755 0.681604 0.73661894 0.73661894 1.4047064 1.4047064 1.4047064 1.7806971 1.7806971 ] [ 1.4661605 1.0743642 1.0743642 0.2744038 0.13889726 1.3337917 1.7071811 1.7071811 1.7071811 1.4047064 1.4047064 1.7806971 1.7806971 ]] [[ 0.77648324 0.9482727 1.6755791 1.6755791 1.6755791 1.1440116 1.1440116 1.1440116 0.8923842 0.39319083 0.17562903 0.36213282 1.6217358 ] [ 1.3894734 1.633599 1.6386012 1.6386012 1.6386012 0.9911173 0.9911173 0.9911173 0.5282227 1.65396 1.65396 1.65396 1.6217358 ] [ 1.2354836 1.2354836 1.4001939 1.400871 1.400871 1.400871 0.85923755 2.1309414 2.1309414 2.1309414 1.0071203 0.86595434 0.86595434] [ 1.5180913 1.5180913 1.5180913 1.6004919 1.6004919 1.6004919 1.4851779 2.1309414 2.1309414 2.1309414 1.4141397 1.164937 1.164937 ] [ 1.5180913 1.5180913 1.5180913 0.42083922 0.37747708 1.4851779 1.4851779 1.4851779 1.3650872 1.3650872 1.9846174 1.9846174 1.9846174 ] [ 1.391835 1.6199136 1.6199136 2.3990026 2.3990026 2.3990026 1.7351089 1.7351089 1.5360818 1.7215985 1.9846174 1.9846174 1.9846174 ] [ 1.0387319 1.0387319 0.7406728 1.0392057 1.0392057 1.7351089 1.7351089 1.7351089 2.339869 2.339869 2.339869 1.7215985 0.814536 ]] [[ 1.7597827 1.2382195 1.2382195 1.2382195 0.86687523 0.86687523 0.8589968 0.8589968 0.8589968 1.2378869 1.2378869 2.1913586 2.1913586 ] [ 1.8028516 1.8028516 1.2382195 1.2382195 0.86687523 0.86687523 0.64220476 2.0932274 2.0932274 2.0932274 1.5592245 0.8724706 1.9135765 ] [ 1.8028516 1.8028516 1.3142248 1.3142248 0.5193375 0.64220476 2.8558338 2.8558338 2.8558338 1.6698765 1.6698765 2.409068 2.409068 ] [ 0.17130718 1.3142248 1.3142248 1.3142248 0.42241603 0.7141884 0.7141884 0.7141884 1.3319585 1.3319585 1.3319585 2.409068 2.409068 ] [ 0.86391157 1.3423321 2.430533 2.430533 2.430533 1.1198504 0.51561797 2.2604709 2.2604709 2.2604709 1.3319585 1.0433482 1.0433482 ] [ 1.5592295 1.5592295 2.430533 2.430533 2.430533 1.1198504 0.94348204 0.48545796 0.48545796 0.9250124 0.9250124 1.9092447 1.9092447 ] [ 2.7849648 2.7849648 1.5592295 1.6281759 1.6281759 1.6281759 0.94348204 0.13195565 0.94817805 0.94817805 0.94817805 1.9092447 1.9092447 ]]]]; ov_res: [[[[ 1.1793113 0.4308514 0.7459428 1.2420558 1.2420558 1.3812909 1.3812909 1.3812909 1.193749 1.1900772 2.558067 2.558067 2.558067 ] [ 0.6400141 1.3567346 1.3567346 1.6209087 1.6209087 1.6209087 1.3812909 1.3812909 1.1900772 1.1900772 1.7358439 1.7358439 1.7358439 ] [ 1.933057 1.933057 1.933057 0.57075536 0.57075536 -0.01646709 0.72836524 0.9429729 1.6487027 1.6487027 1.6487027 2.5739696 2.5739696 ] [ 2.6386507 1.933057 1.933057 1.5237297 1.5237297 1.5237297 0.72836524 1.0024225 1.0024225 1.0024225 0.9135186 0.9135186 1.0892606 ] [ 2.6386507 0.6057785 0.6057785 1.8457108 1.8457108 1.8457108 0.3622448 1.0024225 1.0024225 1.0024225 0.4659756 0.4659756 1.0892606 ] [ 1.4661605 0.69739956 0.47249755 0.47249755 0.47249755 0.681604 0.73661894 0.73661894 1.4047064 1.4047064 1.4047064 1.7806971 1.7806971 ] [ 1.4661605 1.0743642 1.0743642 0.2744038 0.13889726 1.3337917 1.7071811 1.7071811 1.7071811 1.4047064 1.4047064 1.7806971 1.7806971 ]] [[ 0.77648324 0.9482727 1.6755791 1.6755791 1.6755791 1.1440116 1.1440116 1.1440116 0.8923842 0.39319083 0.17562903 0.36213282 1.6217358 ] [ 1.3894734 1.633599 1.6386012 1.6386012 1.6386012 0.9911173 0.9911173 0.9911173 0.5282227 1.65396 1.65396 1.65396 1.6217358 ] [ 1.2354836 1.2354836 1.4001939 1.400871 1.400871 1.400871 0.85923755 2.1309414 2.1309414 2.1309414 1.0071203 0.86595434 0.86595434] [ 1.5180913 1.5180913 1.5180913 1.6004919 1.6004919 1.6004919 1.4851779 2.1309414 2.1309414 2.1309414 1.4141397 1.164937 1.164937 ] [ 1.5180913 1.5180913 1.5180913 0.42083922 0.37747708 1.4851779 1.4851779 1.4851779 1.3650872 1.3650872 1.9846174 1.9846174 1.9846174 ] [ 1.391835 1.6199136 1.6199136 2.3990026 2.3990026 2.3990026 1.7351089 1.7351089 1.5360818 1.7215985 1.9846174 1.9846174 1.9846174 ] [ 1.0387319 1.0387319 0.7406728 1.0392057 1.0392057 1.7351089 1.7351089 1.7351089 2.339869 2.339869 2.339869 1.7215985 0.814536 ]] [[ 1.7597827 1.2382195 1.2382195 1.2382195 0.86687523 0.86687523 0.8589968 0.8589968 0.8589968 1.2378869 1.2378869 2.1913586 2.1913586 ] [ 1.8028516 1.8028516 1.2382195 1.2382195 0.86687523 0.86687523 0.64220476 2.0932274 2.0932274 2.0932274 1.5592245 0.8724706 1.9135765 ] [ 1.8028516 1.8028516 1.3142248 1.3142248 0.5193375 0.64220476 2.8558338 2.8558338 2.8558338 1.6698765 1.6698765 2.409068 2.409068 ] [ 0.17130718 1.3142248 1.3142248 1.3142248 0.42241603 0.7141884 0.7141884 0.7141884 1.3319585 1.3319585 1.3319585 2.409068 2.409068 ] [ 0.86391157 1.3423321 2.430533 2.430533 2.430533 1.1198504 0.51561797 2.2604709 2.2604709 2.2604709 1.3319585 1.0433482 1.0433482 ] [ 1.5592295 1.5592295 2.430533 2.430533 2.430533 1.1198504 0.94348204 0.48545796 0.48545796 0.9250124 0.9250124 1.9092447 1.9092447 ] [ 2.7849648 2.7849648 1.5592295 1.6281759 1.6281759 1.6281759 0.94348204 0.13195565 0.94817805 0.94817805 0.94817805 1.9092447 1.9092447 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5796.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.kernel_size, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 0.6689669 -0.40726802 1.1691029 -0.03886377 -0.8994812 0.22885206 1.1782022 0.5767446 0.07888669 1.151401 0.0734884 -0.09707315 1.2560414 -0.5134347 2.4129362 ] [-0.2424279 0.03461387 1.7772205 1.6610385 0.33112362 -0.4815821 -0.635138 -0.5788862 -0.03894336 1.4012896 -0.8038762 0.3470315 0.31880894 0.92887676 0.87838817] [ 0.18426897 1.57621 -0.3287298 -0.04210621 -0.51909727 -0.23627627 2.3801804 0.11231568 -0.6041427 0.28832844 -0.8640213 0.6730575 0.8235258 0.5861911 1.9547019 ] [ 0.8091659 1.1172231 1.255847 0.36185098 0.5501761 1.11941 1.9368097 1.4201032 1.1486603 0.9571294 0.01240684 0.8025108 -0.25470966 -0.43356332 0.5763937 ] [-1.074868 1.5723054 0.92915064 -0.6304006 2.2915049 0.5501851 0.7110145 0.9299814 0.48978293 -0.59277993 -0.10970134 0.7465435 0.19599983 -0.17222957 0.13052127] [ 1.2977426 0.80635923 -1.0239501 2.5304677 0.2751765 2.3599956 1.6160074 1.3818668 -0.566055 -0.2518651 2.6850934 -0.29093802 0.52781266 0.4075731 0.11517148] [-0.70028234 1.1736871 -0.06278662 0.8358466 0.30655316 -0.7193992 0.06737911 -0.16974233 0.8485697 2.1731071 -0.66410005 -1.3878336 0.17030406 0.39565855 0.6937324 ] [-0.13080177 0.3841057 -2.1167698 -0.7762942 -0.31006905 -1.0080596 1.0801549 0.0241045 -0.16515426 1.0307585 1.0113822 -0.2621967 0.40586093 -0.1222553 -0.2865628 ]] [[ 0.08794124 1.4779081 -0.11852389 0.15458558 0.25068557 -0.5976247 0.62764555 0.42468968 0.3849582 0.0152577 0.29884586 1.6909604 0.5798745 1.220965 1.8387104 ] [ 1.4001756 -0.4981528 1.2557766 0.5813073 0.38294223 0.18769377 -1.0082198 1.1235256 -0.06323734 0.3828559 0.8846391 -1.3799734 1.541646 1.0304537 1.0336207 ] [ 0.20893528 0.7996871 0.56486535 -1.0812908 0.44401753 1.0673138 0.1546869 0.37014085 1.9009846 1.6890982 1.076446 -0.5434575 0.63461083 -0.3743302 0.9563133 ] [ 0.883206 2.498798 1.2483147 0.23919116 1.927834 1.1359624 1.9387451 0.96775043 0.0441938 0.95189667 0.9929641 2.213581 0.00769196 0.40149114 1.350705 ] [ 1.628308 0.13563946 1.4022715 0.11384395 1.3702756 0.17237516 1.2692353 0.7720833 0.24234174 0.992329 0.9890533 0.8045389 0.5096945 1.5099168 0.13036506] [ 1.1204315 0.8001843 1.2942042 0.5097649 -0.6021686 1.3001889 0.79204553 0.0112596 0.07409178 0.07487736 -0.5744239 0.5589165 1.1999782 0.99875855 1.4783597 ] [ 0.3499793 -0.79124624 0.07646098 1.0248641 -0.7609388 0.72317255 0.46819413 -0.40652308 0.69263387 -0.02026944 1.9094156 0.85961735 -0.06184317 0.55261683 1.3894587 ] [ 0.15778291 -0.6565683 -0.97314876 -1.0439618 0.87869024 0.93739 0.92843086 0.13679926 -0.84092814 -0.8827083 -0.04411911 -1.5996927 2.2977192 -1.3140154 -0.03035695]] [[ 0.9836169 1.4478128 1.39783 1.4024519 0.90118366 1.3326126 0.76384103 1.1845193 0.0608469 1.4109821 0.5843299 0.4739014 0.64351434 0.6832919 0.04855169] [ 0.2393181 -0.21851447 -0.08973689 0.430613 1.1289655 -0.19175158 -0.04192639 0.5484395 0.2579589 0.66436297 0.26353976 0.79368144 -0.356011 0.01271833 -0.7038844 ] [ 1.8677304 0.7893579 0.93416995 1.2291387 0.6955275 0.33862105 -0.06739276 0.6117686 0.5689168 0.53687346 -0.33143133 -0.6320955 1.2231219 1.0176405 0.4968687 ] [ 0.57326436 0.6294792 0.6054485 0.11508437 0.7936028 0.7311771 -0.17057675 1.1045314 0.95095325 0.5667245 0.49488917 0.9367498 0.86008435 -0.4638388 1.6735135 ] [-0.47541508 0.91100186 0.39779457 0.44301763 0.29289588 -0.12758742 1.2582586 2.706676 0.4383154 0.1311113 0.61837465 0.21437253 2.4557407 0.08262596 0.9291401 ] [ 0.17746836 1.5086275 0.01779238 -0.52075535 -0.04907097 -0.15230778 0.48198307 -0.9434082 0.2207652 2.652582 0.8446752 -0.25930652 0.5836481 0.66498953 1.5641545 ] [ 1.8728774 1.3188986 1.6642774 0.06085719 -0.20109303 0.07530206 1.1734437 0.11123541 0.0758312 1.3814098 0.4636649 0.47803238 0.16555439 -0.17031091 1.3661357 ] [-0.73516095 -0.27005577 -0.78382117 0.8120792 -1.2411382 -1.2753164 -0.64461875 0.17205171 1.0220007 2.2859354 -0.8357694 0.32802853 0.21571857 -0.13978092 -0.38316104]]]]; ov_res: [[[[ 0.6689669 -0.40726802 1.1691029 -0.03886377 -0.8994812 0.22885206 1.1782022 0.5767446 0.07888669 1.151401 0.0734884 -0.09707315 1.2560414 -0.5134347 2.4129362 ] [-0.2424279 0.03461387 1.7772205 1.6610385 0.33112362 -0.4815821 -0.635138 -0.5788862 -0.03894336 1.4012896 -0.8038762 0.3470315 0.31880894 0.92887676 0.87838817] [ 0.18426897 1.57621 -0.3287298 -0.04210621 -0.51909727 -0.23627627 2.3801804 0.11231568 -0.6041427 0.28832844 -0.8640213 0.6730575 0.8235258 0.5861911 1.9547019 ] [ 0.8091659 1.1172231 1.255847 0.36185098 0.5501761 1.11941 1.9368097 1.4201032 1.1486603 0.9571294 0.01240684 0.8025108 -0.25470966 -0.43356332 0.5763937 ] [-1.074868 1.5723054 0.92915064 -0.6304006 2.2915049 0.5501851 0.7110145 0.9299814 0.48978293 -0.59277993 -0.10970134 0.7465435 0.19599983 -0.17222957 0.13052127] [ 1.2977426 0.80635923 -1.0239501 2.5304677 0.2751765 2.3599956 1.6160074 1.3818668 -0.566055 -0.2518651 2.6850934 -0.29093802 0.52781266 0.4075731 0.11517148] [-0.70028234 1.1736871 -0.06278662 0.8358466 0.30655316 -0.7193992 0.06737911 -0.16974233 0.8485697 2.1731071 -0.66410005 -1.3878336 0.17030406 0.39565855 0.6937324 ] [-0.13080177 0.3841057 -2.1167698 -0.7762942 -0.31006905 -1.0080596 1.0801549 0.0241045 -0.16515426 1.0307585 1.0113822 -0.2621967 0.40586093 -0.1222553 -0.2865628 ]] [[ 0.08794124 1.4779081 -0.11852389 0.15458558 0.25068557 -0.5976247 0.62764555 0.42468968 0.3849582 0.0152577 0.29884586 1.6909604 0.5798745 1.220965 1.8387104 ] [ 1.4001756 -0.4981528 1.2557766 0.5813073 0.38294223 0.18769377 -1.0082198 1.1235256 -0.06323734 0.3828559 0.8846391 -1.3799734 1.541646 1.0304537 1.0336207 ] [ 0.20893528 0.7996871 0.56486535 -1.0812908 0.44401753 1.0673138 0.1546869 0.37014085 1.9009846 1.6890982 1.076446 -0.5434575 0.63461083 -0.3743302 0.9563133 ] [ 0.883206 2.498798 1.2483147 0.23919116 1.927834 1.1359624 1.9387451 0.96775043 0.0441938 0.95189667 0.9929641 2.213581 0.00769196 0.40149114 1.350705 ] [ 1.628308 0.13563946 1.4022715 0.11384395 1.3702756 0.17237516 1.2692353 0.7720833 0.24234174 0.992329 0.9890533 0.8045389 0.5096945 1.5099168 0.13036506] [ 1.1204315 0.8001843 1.2942042 0.5097649 -0.6021686 1.3001889 0.79204553 0.0112596 0.07409178 0.07487736 -0.5744239 0.5589165 1.1999782 0.99875855 1.4783597 ] [ 0.3499793 -0.79124624 0.07646098 1.0248641 -0.7609388 0.72317255 0.46819413 -0.40652308 0.69263387 -0.02026944 1.9094156 0.85961735 -0.06184317 0.55261683 1.3894587 ] [ 0.15778291 -0.6565683 -0.97314876 -1.0439618 0.87869024 0.93739 0.92843086 0.13679926 -0.84092814 -0.8827083 -0.04411911 -1.5996927 2.2977192 -1.3140154 -0.03035695]] [[ 0.9836169 1.4478128 1.39783 1.4024519 0.90118366 1.3326126 0.76384103 1.1845193 0.0608469 1.4109821 0.5843299 0.4739014 0.64351434 0.6832919 0.04855169] [ 0.2393181 -0.21851447 -0.08973689 0.430613 1.1289655 -0.19175158 -0.04192639 0.5484395 0.2579589 0.66436297 0.26353976 0.79368144 -0.356011 0.01271833 -0.7038844 ] [ 1.8677304 0.7893579 0.93416995 1.2291387 0.6955275 0.33862105 -0.06739276 0.6117686 0.5689168 0.53687346 -0.33143133 -0.6320955 1.2231219 1.0176405 0.4968687 ] [ 0.57326436 0.6294792 0.6054485 0.11508437 0.7936028 0.7311771 -0.17057675 1.1045314 0.95095325 0.5667245 0.49488917 0.9367498 0.86008435 -0.4638388 1.6735135 ] [-0.47541508 0.91100186 0.39779457 0.44301763 0.29289588 -0.12758742 1.2582586 2.706676 0.4383154 0.1311113 0.61837465 0.21437253 2.4557407 0.08262596 0.9291401 ] [ 0.17746836 1.5086275 0.01779238 -0.52075535 -0.04907097 -0.15230778 0.48198307 -0.9434082 0.2207652 2.652582 0.8446752 -0.25930652 0.5836481 0.66498953 1.5641545 ] [ 1.8728774 1.3188986 1.6642774 0.06085719 -0.20109303 0.07530206 1.1734437 0.11123541 0.0758312 1.3814098 0.4636649 0.47803238 0.16555439 -0.17031091 1.3661357 ] [-0.73516095 -0.27005577 -0.78382117 0.8120792 -1.2411382 -1.2753164 -0.64461875 0.17205171 1.0220007 2.2859354 -0.8357694 0.32802853 0.21571857 -0.13978092 -0.38316104]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5798.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0]]() %3 : int[] = prim::Constant[value=[1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 0.5148738 1.3713511 1.6548456 1.6548456 1.6548456 1.7271132 1.7271132 1.7271132 1.4931628 1.4340692 1.2533419 1.2533419 1.7030871 ] [ 0.94834095 1.3713511 1.6548456 1.6548456 1.6548456 1.2011048 1.4931628 1.4931628 1.4931628 1.4340692 1.2419533 1.2419533 1.7030871 ] [ 1.0580589 1.0266099 1.6548456 1.6548456 1.6548456 1.4132217 1.4931628 1.4931628 1.4931628 1.4340692 1.2359065 1.2359065 1.2359065 ] [ 1.0580589 1.0266099 1.0266099 1.4132217 1.4132217 1.4132217 1.1485062 2.330909 2.330909 2.330909 1.377302 1.377302 1.377302 ] [ 1.0580589 1.0266099 1.0266099 1.4132217 1.4132217 1.4132217 1.1485062 2.330909 2.330909 2.330909 1.377302 1.377302 1.377302 ] [ 1.5079588 1.5079588 1.5079588 0.770673 1.4970121 1.4970121 1.4970121 2.330909 2.330909 2.330909 2.246843 2.246843 1.377302 ] [ 1.5079588 1.5079588 1.5079588 0.6722824 1.4970121 3.4881907 3.4881907 3.4881907 1.6476595 2.246843 2.246843 2.246843 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.6722824 1.4970121 3.4881907 3.4881907 3.4881907 1.6476595 2.246843 2.246843 2.246843 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.6722824 1.028367 3.4881907 3.4881907 3.4881907 1.6476595 1.0721822 1.5849696 1.5955038 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.79241204 1.028367 1.028367 1.028367 0.39638045 0.82578415 1.0721822 1.2869216 1.2869216 1.2869216 ] [ 0.79570246 0.79570246 0.79570246 0.79241204 1.028367 1.028367 1.028367 0.2645728 0.82578415 1.0721822 1.2869216 1.6199074 1.6199074 ] [ 0.79570246 0.79570246 0.79570246 0.79241204 0.79241204 0.6789825 1.1550356 1.1550356 1.1550356 0.82578415 1.1093241 1.6199074 1.6199074 ] [-0.22065087 1.4155166 1.4155166 1.4155166 1.5693415 1.5693415 1.5693415 1.1844971 1.1844971 0.5572146 0.5572146 1.6199074 1.6199074 ]] [[ 2.4696631 2.4696631 0.96387625 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 2.1349227 2.1349227 2.1349227 1.8202486 1.8202486 ] [ 2.4696631 2.4696631 2.1361802 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 1.0601274 1.1074247 1.8202486 1.8202486 1.8202486 ] [ 2.4696631 2.4696631 2.1361802 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 1.0601274 1.3656682 1.8202486 1.8202486 1.8202486 ] [ 0.9331438 2.1361802 2.1361802 2.1361802 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.3656682 1.3080505 ] [-0.2932257 0.33983624 0.33983624 1.361412 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.3656682 1.3080505 ] [ 0.0462261 0.0462261 -0.0089298 1.361412 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.1408963 0.80783856] [ 0.0462261 0.0462261 0.92999613 1.7147343 1.7147343 1.7147343 0.6749962 0.6749962 0.12086528 1.1408963 1.1408963 1.1408963 0.80783856] [ 0.8360057 0.8360057 0.92999613 1.7147343 1.7147343 1.7147343 0.82124525 0.82124525 1.3492327 1.3492327 1.3492327 1.1408963 0.21263774] [ 0.8360057 0.8360057 0.92999613 1.7147343 1.7147343 1.7147343 0.8664086 0.8664086 1.8891609 1.8891609 1.8891609 0.7607446 0.21263774] [ 1.404118 0.8360057 0.31611598 1.4159521 1.4159521 1.4159521 0.8664086 0.8664086 1.8891609 1.8891609 1.8891609 1.1544502 1.2648705 ] [ 1.404118 1.2226315 1.2226315 1.4159521 1.4159521 1.4159521 1.1582133 0.8664086 1.8891609 1.8891609 1.8891609 1.1544502 1.2648705 ] [ 1.9134595 1.2226315 1.2226315 0.3059954 1.1582133 1.1582133 1.1582133 0.5458446 0.27924418 0.754331 1.4896277 1.4896277 1.4896277 ] [ 1.9134595 1.6852076 1.6852076 0.4122323 1.1582133 1.1582133 1.1582133 1.0035678 1.0035678 0.4673042 1.4896277 1.4896277 1.4896277 ]] [[ 1.5157939 1.5157939 1.3894532 1.2836703 1.4718904 1.4718904 1.5787781 1.5787781 1.5787781 1.1937196 1.1937196 1.6915435 1.6915435 ] [ 1.5157939 1.5157939 1.2836703 1.2836703 1.4718904 1.4718904 1.4718904 1.130113 1.130113 1.130113 1.3895864 1.6915435 1.6915435 ] [ 1.5157939 1.5157939 1.2836703 1.2836703 1.4718904 1.4718904 1.4718904 1.7449926 1.7449926 1.7449926 1.3895864 1.3895864 1.3895864 ] [ 1.203264 1.203264 1.2484363 1.2484363 1.2484363 1.2202125 2.54408 2.54408 2.54408 1.7449926 1.3895864 1.3895864 1.3895864 ] [ 1.203264 1.203264 1.661971 1.661971 1.661971 0.66009796 2.54408 2.54408 2.54408 1.8086013 1.8086013 0.83765465 0.6975709 ] [ 1.203264 1.4684685 1.661971 1.661971 1.661971 1.507161 2.54408 2.54408 2.54408 1.8086013 1.8086013 1.3264734 1.3264734 ] [ 0.61810285 1.4684685 1.661971 1.661971 1.661971 1.507161 1.507161 -0.09607099 1.8086013 1.8086013 1.8086013 1.3264734 1.3264734 ] [ 0.61810285 1.4684685 1.4684685 1.4684685 1.507161 1.507161 1.507161 0.44085297 0.8331967 0.8331967 0.8331967 1.3264734 1.3264734 ] [ 3.1173081 0.48943174 1.1193277 1.1193277 1.1193277 0.6247732 0.4371826 0.44085297 1.5030406 1.5030406 1.5030406 1.0944779 1.0944779 ] [ 3.1173081 0.8692257 0.8692257 0.72581524 0.72581524 0.72581524 0.722813 0.44085297 1.5030406 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 3.1173081 0.8692257 0.8692257 0.72581524 0.72581524 0.72581524 0.722813 0.29013917 1.5030406 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 0.8692257 0.8692257 0.8692257 0.72581524 0.72581524 1.223415 1.223415 1.223415 0.3281126 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 0.8483165 2.1359837 2.1359837 2.3882911 2.3882911 2.3882911 1.223415 1.223415 0.7466534 0.7466534 0.9464758 1.4078662 1.4078662 ]]]]; ov_res: [[[[ 0.5148738 1.3713511 1.6548456 1.6548456 1.6548456 1.7271132 1.7271132 1.7271132 1.4931628 1.4340692 1.2533419 1.2533419 1.7030871 ] [ 0.94834095 1.3713511 1.6548456 1.6548456 1.6548456 1.2011048 1.4931628 1.4931628 1.4931628 1.4340692 1.2419533 1.2419533 1.7030871 ] [ 1.0580589 1.0266099 1.6548456 1.6548456 1.6548456 1.4132217 1.4931628 1.4931628 1.4931628 1.4340692 1.2359065 1.2359065 1.2359065 ] [ 1.0580589 1.0266099 1.0266099 1.4132217 1.4132217 1.4132217 1.1485062 2.330909 2.330909 2.330909 1.377302 1.377302 1.377302 ] [ 1.0580589 1.0266099 1.0266099 1.4132217 1.4132217 1.4132217 1.1485062 2.330909 2.330909 2.330909 1.377302 1.377302 1.377302 ] [ 1.5079588 1.5079588 1.5079588 0.770673 1.4970121 1.4970121 1.4970121 2.330909 2.330909 2.330909 2.246843 2.246843 1.377302 ] [ 1.5079588 1.5079588 1.5079588 0.6722824 1.4970121 3.4881907 3.4881907 3.4881907 1.6476595 2.246843 2.246843 2.246843 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.6722824 1.4970121 3.4881907 3.4881907 3.4881907 1.6476595 2.246843 2.246843 2.246843 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.6722824 1.028367 3.4881907 3.4881907 3.4881907 1.6476595 1.0721822 1.5849696 1.5955038 1.5955038 ] [ 2.1036732 2.1036732 2.1036732 0.79241204 1.028367 1.028367 1.028367 0.39638045 0.82578415 1.0721822 1.2869216 1.2869216 1.2869216 ] [ 0.79570246 0.79570246 0.79570246 0.79241204 1.028367 1.028367 1.028367 0.2645728 0.82578415 1.0721822 1.2869216 1.6199074 1.6199074 ] [ 0.79570246 0.79570246 0.79570246 0.79241204 0.79241204 0.6789825 1.1550356 1.1550356 1.1550356 0.82578415 1.1093241 1.6199074 1.6199074 ] [-0.22065087 1.4155166 1.4155166 1.4155166 1.5693415 1.5693415 1.5693415 1.1844971 1.1844971 0.5572146 0.5572146 1.6199074 1.6199074 ]] [[ 2.4696631 2.4696631 0.96387625 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 2.1349227 2.1349227 2.1349227 1.8202486 1.8202486 ] [ 2.4696631 2.4696631 2.1361802 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 1.0601274 1.1074247 1.8202486 1.8202486 1.8202486 ] [ 2.4696631 2.4696631 2.1361802 2.7321813 2.7321813 2.7321813 1.0842317 1.0842317 1.0601274 1.3656682 1.8202486 1.8202486 1.8202486 ] [ 0.9331438 2.1361802 2.1361802 2.1361802 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.3656682 1.3080505 ] [-0.2932257 0.33983624 0.33983624 1.361412 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.3656682 1.3080505 ] [ 0.0462261 0.0462261 -0.0089298 1.361412 1.8200786 1.8200786 1.8200786 1.321361 1.5848033 1.5848033 1.5848033 1.1408963 0.80783856] [ 0.0462261 0.0462261 0.92999613 1.7147343 1.7147343 1.7147343 0.6749962 0.6749962 0.12086528 1.1408963 1.1408963 1.1408963 0.80783856] [ 0.8360057 0.8360057 0.92999613 1.7147343 1.7147343 1.7147343 0.82124525 0.82124525 1.3492327 1.3492327 1.3492327 1.1408963 0.21263774] [ 0.8360057 0.8360057 0.92999613 1.7147343 1.7147343 1.7147343 0.8664086 0.8664086 1.8891609 1.8891609 1.8891609 0.7607446 0.21263774] [ 1.404118 0.8360057 0.31611598 1.4159521 1.4159521 1.4159521 0.8664086 0.8664086 1.8891609 1.8891609 1.8891609 1.1544502 1.2648705 ] [ 1.404118 1.2226315 1.2226315 1.4159521 1.4159521 1.4159521 1.1582133 0.8664086 1.8891609 1.8891609 1.8891609 1.1544502 1.2648705 ] [ 1.9134595 1.2226315 1.2226315 0.3059954 1.1582133 1.1582133 1.1582133 0.5458446 0.27924418 0.754331 1.4896277 1.4896277 1.4896277 ] [ 1.9134595 1.6852076 1.6852076 0.4122323 1.1582133 1.1582133 1.1582133 1.0035678 1.0035678 0.4673042 1.4896277 1.4896277 1.4896277 ]] [[ 1.5157939 1.5157939 1.3894532 1.2836703 1.4718904 1.4718904 1.5787781 1.5787781 1.5787781 1.1937196 1.1937196 1.6915435 1.6915435 ] [ 1.5157939 1.5157939 1.2836703 1.2836703 1.4718904 1.4718904 1.4718904 1.130113 1.130113 1.130113 1.3895864 1.6915435 1.6915435 ] [ 1.5157939 1.5157939 1.2836703 1.2836703 1.4718904 1.4718904 1.4718904 1.7449926 1.7449926 1.7449926 1.3895864 1.3895864 1.3895864 ] [ 1.203264 1.203264 1.2484363 1.2484363 1.2484363 1.2202125 2.54408 2.54408 2.54408 1.7449926 1.3895864 1.3895864 1.3895864 ] [ 1.203264 1.203264 1.661971 1.661971 1.661971 0.66009796 2.54408 2.54408 2.54408 1.8086013 1.8086013 0.83765465 0.6975709 ] [ 1.203264 1.4684685 1.661971 1.661971 1.661971 1.507161 2.54408 2.54408 2.54408 1.8086013 1.8086013 1.3264734 1.3264734 ] [ 0.61810285 1.4684685 1.661971 1.661971 1.661971 1.507161 1.507161 -0.09607099 1.8086013 1.8086013 1.8086013 1.3264734 1.3264734 ] [ 0.61810285 1.4684685 1.4684685 1.4684685 1.507161 1.507161 1.507161 0.44085297 0.8331967 0.8331967 0.8331967 1.3264734 1.3264734 ] [ 3.1173081 0.48943174 1.1193277 1.1193277 1.1193277 0.6247732 0.4371826 0.44085297 1.5030406 1.5030406 1.5030406 1.0944779 1.0944779 ] [ 3.1173081 0.8692257 0.8692257 0.72581524 0.72581524 0.72581524 0.722813 0.44085297 1.5030406 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 3.1173081 0.8692257 0.8692257 0.72581524 0.72581524 0.72581524 0.722813 0.29013917 1.5030406 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 0.8692257 0.8692257 0.8692257 0.72581524 0.72581524 1.223415 1.223415 1.223415 0.3281126 1.9858296 1.9858296 2.1003692 2.1003692 ] [ 0.8483165 2.1359837 2.1359837 2.3882911 2.3882911 2.3882911 1.223415 1.223415 0.7466534 0.7466534 0.9464758 1.4078662 1.4078662 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5800.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %5 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.stride, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%5) fw_re: [[[[0.136926 0.23290414 0.23290414 0.23290414 0.61876255 0.61876255 2.120987 2.120987 2.120987 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 1.2041436 ] [0.29089138 0.29089138 0.29089138 0.23290414 0.61876255 0.61876255 2.120987 2.120987 2.120987 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 1.2041436 ] [0.29089138 0.8344828 0.8344828 0.8344828 0.61876255 0.61876255 0.8859093 0.8859093 1.5576187 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 0.01598603] [0.29089138 0.8344828 0.8344828 0.8344828 1.4866934 1.4866934 1.4866934 0.8859093 0.9520938 0.9520938 0.9520938 0.22859822 1.1962733 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.4527028 0.8344828 1.4866934 1.4866934 1.4866934 0.85164815 0.85164815 0.85164815 1.5722975 1.5722975 1.5722975 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.8216289 1.8216289 1.8216289 1.4866934 1.4866934 0.85164815 0.85164815 0.85164815 1.5722975 1.5722975 1.5722975 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.8216289 1.8216289 1.8216289 0.7642138 1.2439153 1.2439153 1.2439153 0.85164815 1.5722975 1.5722975 1.5722975 1.5321205 0.6958287 ] [1.4594399 1.4594399 1.8216289 1.8216289 1.8216289 0.7642138 1.2439153 1.2439153 1.2439153 1.1858621 1.1920729 1.5321205 1.5321205 1.5321205 0.6958287 ] [1.9381498 1.9381498 1.9381498 1.3216438 0.8322287 1.2623546 1.7041421 1.7041421 1.7041421 1.1858621 1.4988511 1.5321205 1.5321205 1.5321205 0.6958287 ] [1.9381498 1.9381498 1.9381498 1.7939578 0.32297558 1.2623546 1.7041421 1.7041421 1.7041421 1.1858621 1.4988511 1.4988511 1.4988511 0.8974396 0.5328162 ] [1.9381498 1.9381498 1.9381498 1.7939578 1.400622 1.2623546 1.7041421 1.7041421 1.7041421 0.49264142 1.4988511 1.5295651 2.453384 2.453384 2.453384 ] [1.2620376 1.9374053 1.9374053 1.9374053 1.400622 0.88279027 1.2197027 1.2197027 1.2197027 0.49264142 0.49264142 1.5295651 2.453384 2.453384 2.453384 ] [1.2620376 1.9374053 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 0.5471008 0.5471008 1.5295651 2.453384 2.453384 2.453384 ] [0.64208454 1.9374053 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 2.2858272 2.2858272 2.2858272 1.7821114 1.7821114 1.7821114 ] [0.4299445 0.4299445 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 2.2858272 2.2858272 2.2858272 1.7821114 1.7821114 1.7821114 ]] [[2.0199816 2.0199816 1.951731 1.951731 1.4184748 1.4184748 0.5060502 0.76964134 0.76964134 0.76964134 0.22032586 1.0766687 1.0766687 1.0766687 0.84190357] [2.0199816 2.0199816 1.951731 1.951731 1.4184748 1.4184748 0.95454705 0.95454705 0.76964134 0.76964134 0.22032586 1.0766687 1.0766687 1.0766687 0.84190357] [0.6588004 1.4143777 1.4143777 1.4143777 1.0395763 1.0395763 0.95454705 0.95454705 1.4384307 1.4384307 1.4384307 0.72762 0.84190357 0.84190357 0.84190357] [0.60375255 1.4143777 1.4143777 1.4143777 1.0395763 1.0395763 0.95454705 0.95454705 1.4384307 1.4384307 1.4384307 0.72762 0.33004266 0.49637738 0.49637738] [0.9761332 1.4143777 1.4143777 1.4143777 0.84264696 1.5845159 1.5845159 1.5845159 1.4384307 1.4384307 1.8691671 1.8691671 1.8691671 0.87214965 0.87214965] [0.9761332 0.9761332 1.6027335 1.6027335 1.6027335 1.5845159 1.5845159 1.5845159 1.0023205 1.0023205 1.8691671 1.8691671 1.8691671 2.0892222 2.0892222 ] [0.9761332 0.9761332 1.6027335 1.6027335 1.6027335 1.5845159 1.5845159 1.5845159 0.78960776 0.78960776 1.8691671 1.8691671 1.8691671 2.0892222 2.0892222 ] [1.0925984 1.0925984 1.6027335 1.6027335 1.6027335 1.3896595 1.3896595 1.2430302 1.8529458 1.8529458 1.8529458 0.8075929 0.8075929 2.0892222 2.0892222 ] [1.3636777 1.3636777 1.3636777 0.85681957 1.5737002 1.5737002 1.5737002 0.81314814 1.8529458 1.8529458 2.1179621 2.1179621 2.1179621 0.8075929 0.66044813] [1.3636777 1.3636777 1.3636777 0.7235125 1.5737002 1.5737002 1.5737002 0.81314814 1.8529458 1.8529458 2.1179621 2.1179621 2.1179621 0.91868085 0.91868085] [1.3636777 1.3636777 1.3636777 0.6767632 1.5737002 1.5737002 1.5737002 1.2587345 0.7722023 1.139612 2.1179621 2.1179621 2.1179621 0.91868085 0.91868085] [0.5070273 0.5070273 0.5070273 0.4624766 0.9847045 1.2587345 1.2587345 1.2587345 1.5102926 1.5102926 1.5102926 1.2849743 1.2849743 1.2849743 0.91868085] [0.5070273 0.5070273 0.5070273 0.66884375 0.9847045 1.2587345 1.2587345 1.2587345 1.5102926 1.5102926 1.5102926 1.2849743 1.2849743 1.2849743 0.9042433 ] [1.7648503 1.7648503 1.7648503 0.9625288 1.0536443 1.0536443 1.0536443 1.661685 1.661685 1.661685 1.5102926 1.2849743 1.2849743 1.2849743 0.7308945 ] [1.7648503 1.7648503 1.7648503 0.9625288 1.0536443 1.0536443 1.0536443 1.661685 1.661685 1.661685 0.65972185 0.97613484 0.97613484 0.97613484 0.7308945 ]] [[1.2404768 1.4212898 1.4212898 1.4212898 1.1098877 2.2986095 2.2986095 2.2986095 0.55456793 1.020285 1.6183271 1.6183271 1.6183271 1.191221 1.191221 ] [1.2404768 1.4212898 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 2.2986095 0.84782183 1.020285 1.6183271 1.6183271 1.6238935 1.6238935 1.6238935 ] [1.2404768 1.2404768 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 0.7246044 1.0227374 1.0227374 1.0227374 0.85279983 1.6238935 1.6238935 1.6238935 ] [0.6288165 0.6288165 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 1.1431109 1.1431109 1.6040876 1.6040876 1.6040876 1.6238935 1.6238935 1.6238935 ] [0.6288165 0.6288165 0.47121018 1.4295715 1.4295715 1.4295715 0.80751085 1.2639023 1.2639023 1.6040876 1.6040876 1.6040876 0.95339835 0.6279626 0.6279626 ] [1.034162 1.034162 1.034162 0.5933973 0.5732487 2.4745388 2.4745388 2.4745388 1.749652 1.6040876 1.6040876 1.6040876 1.444011 1.4894782 1.4894782 ] [1.034162 1.034162 1.034162 0.8203674 0.8203674 2.4745388 2.4745388 2.4745388 1.749652 1.5942469 3.3978088 3.3978088 3.3978088 1.4894782 1.4894782 ] [1.034162 1.034162 1.034162 0.8203674 0.8203674 2.4745388 2.4745388 2.4745388 1.749652 1.5942469 3.3978088 3.3978088 3.3978088 2.104195 2.104195 ] [1.345158 1.345158 1.345158 0.8203674 0.8203674 2.0659876 2.0659876 2.0659876 0.97216696 0.97216696 3.3978088 3.3978088 3.3978088 2.104195 2.104195 ] [1.345158 1.345158 1.345158 0.44989592 1.2784499 2.0659876 2.0659876 2.0659876 1.7248774 1.7248774 1.4124671 1.4124671 1.4124671 2.104195 2.104195 ] [1.345158 1.345158 2.0856805 2.0856805 2.0856805 1.9936913 1.9936913 1.9936913 1.7248774 1.7248774 0.48962858 0.06657137 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5132385 2.0856805 2.0856805 2.0856805 1.9936913 1.9936913 1.9936913 1.7248774 1.7248774 0.24950351 0.06657137 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5132385 2.0856805 2.0856805 2.5944731 2.5944731 2.5944731 0.88005584 0.68750024 0.68750024 0.47356805 1.0179433 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5955196 0.5955196 0.5955196 2.5944731 2.5944731 2.5944731 0.88005584 0.68750024 0.9308051 0.9308051 1.0179433 1.4648995 1.4648995 1.4648995 ] [0.2538487 0.5955196 0.5955196 0.5955196 2.5944731 2.5944731 2.5944731 0.88005584 0.5936276 0.9308051 0.9308051 1.0179433 1.4648995 1.4648995 1.4648995 ]]]]; ov_res: [[[[0.136926 0.23290414 0.23290414 0.23290414 0.61876255 0.61876255 2.120987 2.120987 2.120987 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 1.2041436 ] [0.29089138 0.29089138 0.29089138 0.23290414 0.61876255 0.61876255 2.120987 2.120987 2.120987 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 1.2041436 ] [0.29089138 0.8344828 0.8344828 0.8344828 0.61876255 0.61876255 0.8859093 0.8859093 1.5576187 1.5576187 1.5576187 1.3013889 1.3013889 1.3013889 0.01598603] [0.29089138 0.8344828 0.8344828 0.8344828 1.4866934 1.4866934 1.4866934 0.8859093 0.9520938 0.9520938 0.9520938 0.22859822 1.1962733 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.4527028 0.8344828 1.4866934 1.4866934 1.4866934 0.85164815 0.85164815 0.85164815 1.5722975 1.5722975 1.5722975 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.8216289 1.8216289 1.8216289 1.4866934 1.4866934 0.85164815 0.85164815 0.85164815 1.5722975 1.5722975 1.5722975 1.1962733 1.1962733 ] [1.4527028 1.4527028 1.8216289 1.8216289 1.8216289 0.7642138 1.2439153 1.2439153 1.2439153 0.85164815 1.5722975 1.5722975 1.5722975 1.5321205 0.6958287 ] [1.4594399 1.4594399 1.8216289 1.8216289 1.8216289 0.7642138 1.2439153 1.2439153 1.2439153 1.1858621 1.1920729 1.5321205 1.5321205 1.5321205 0.6958287 ] [1.9381498 1.9381498 1.9381498 1.3216438 0.8322287 1.2623546 1.7041421 1.7041421 1.7041421 1.1858621 1.4988511 1.5321205 1.5321205 1.5321205 0.6958287 ] [1.9381498 1.9381498 1.9381498 1.7939578 0.32297558 1.2623546 1.7041421 1.7041421 1.7041421 1.1858621 1.4988511 1.4988511 1.4988511 0.8974396 0.5328162 ] [1.9381498 1.9381498 1.9381498 1.7939578 1.400622 1.2623546 1.7041421 1.7041421 1.7041421 0.49264142 1.4988511 1.5295651 2.453384 2.453384 2.453384 ] [1.2620376 1.9374053 1.9374053 1.9374053 1.400622 0.88279027 1.2197027 1.2197027 1.2197027 0.49264142 0.49264142 1.5295651 2.453384 2.453384 2.453384 ] [1.2620376 1.9374053 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 0.5471008 0.5471008 1.5295651 2.453384 2.453384 2.453384 ] [0.64208454 1.9374053 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 2.2858272 2.2858272 2.2858272 1.7821114 1.7821114 1.7821114 ] [0.4299445 0.4299445 2.253992 2.253992 2.253992 1.6638824 1.6638824 1.6638824 1.3779026 2.2858272 2.2858272 2.2858272 1.7821114 1.7821114 1.7821114 ]] [[2.0199816 2.0199816 1.951731 1.951731 1.4184748 1.4184748 0.5060502 0.76964134 0.76964134 0.76964134 0.22032586 1.0766687 1.0766687 1.0766687 0.84190357] [2.0199816 2.0199816 1.951731 1.951731 1.4184748 1.4184748 0.95454705 0.95454705 0.76964134 0.76964134 0.22032586 1.0766687 1.0766687 1.0766687 0.84190357] [0.6588004 1.4143777 1.4143777 1.4143777 1.0395763 1.0395763 0.95454705 0.95454705 1.4384307 1.4384307 1.4384307 0.72762 0.84190357 0.84190357 0.84190357] [0.60375255 1.4143777 1.4143777 1.4143777 1.0395763 1.0395763 0.95454705 0.95454705 1.4384307 1.4384307 1.4384307 0.72762 0.33004266 0.49637738 0.49637738] [0.9761332 1.4143777 1.4143777 1.4143777 0.84264696 1.5845159 1.5845159 1.5845159 1.4384307 1.4384307 1.8691671 1.8691671 1.8691671 0.87214965 0.87214965] [0.9761332 0.9761332 1.6027335 1.6027335 1.6027335 1.5845159 1.5845159 1.5845159 1.0023205 1.0023205 1.8691671 1.8691671 1.8691671 2.0892222 2.0892222 ] [0.9761332 0.9761332 1.6027335 1.6027335 1.6027335 1.5845159 1.5845159 1.5845159 0.78960776 0.78960776 1.8691671 1.8691671 1.8691671 2.0892222 2.0892222 ] [1.0925984 1.0925984 1.6027335 1.6027335 1.6027335 1.3896595 1.3896595 1.2430302 1.8529458 1.8529458 1.8529458 0.8075929 0.8075929 2.0892222 2.0892222 ] [1.3636777 1.3636777 1.3636777 0.85681957 1.5737002 1.5737002 1.5737002 0.81314814 1.8529458 1.8529458 2.1179621 2.1179621 2.1179621 0.8075929 0.66044813] [1.3636777 1.3636777 1.3636777 0.7235125 1.5737002 1.5737002 1.5737002 0.81314814 1.8529458 1.8529458 2.1179621 2.1179621 2.1179621 0.91868085 0.91868085] [1.3636777 1.3636777 1.3636777 0.6767632 1.5737002 1.5737002 1.5737002 1.2587345 0.7722023 1.139612 2.1179621 2.1179621 2.1179621 0.91868085 0.91868085] [0.5070273 0.5070273 0.5070273 0.4624766 0.9847045 1.2587345 1.2587345 1.2587345 1.5102926 1.5102926 1.5102926 1.2849743 1.2849743 1.2849743 0.91868085] [0.5070273 0.5070273 0.5070273 0.66884375 0.9847045 1.2587345 1.2587345 1.2587345 1.5102926 1.5102926 1.5102926 1.2849743 1.2849743 1.2849743 0.9042433 ] [1.7648503 1.7648503 1.7648503 0.9625288 1.0536443 1.0536443 1.0536443 1.661685 1.661685 1.661685 1.5102926 1.2849743 1.2849743 1.2849743 0.7308945 ] [1.7648503 1.7648503 1.7648503 0.9625288 1.0536443 1.0536443 1.0536443 1.661685 1.661685 1.661685 0.65972185 0.97613484 0.97613484 0.97613484 0.7308945 ]] [[1.2404768 1.4212898 1.4212898 1.4212898 1.1098877 2.2986095 2.2986095 2.2986095 0.55456793 1.020285 1.6183271 1.6183271 1.6183271 1.191221 1.191221 ] [1.2404768 1.4212898 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 2.2986095 0.84782183 1.020285 1.6183271 1.6183271 1.6238935 1.6238935 1.6238935 ] [1.2404768 1.2404768 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 0.7246044 1.0227374 1.0227374 1.0227374 0.85279983 1.6238935 1.6238935 1.6238935 ] [0.6288165 0.6288165 1.5318844 1.5318844 2.3056998 2.3056998 2.3056998 1.1431109 1.1431109 1.6040876 1.6040876 1.6040876 1.6238935 1.6238935 1.6238935 ] [0.6288165 0.6288165 0.47121018 1.4295715 1.4295715 1.4295715 0.80751085 1.2639023 1.2639023 1.6040876 1.6040876 1.6040876 0.95339835 0.6279626 0.6279626 ] [1.034162 1.034162 1.034162 0.5933973 0.5732487 2.4745388 2.4745388 2.4745388 1.749652 1.6040876 1.6040876 1.6040876 1.444011 1.4894782 1.4894782 ] [1.034162 1.034162 1.034162 0.8203674 0.8203674 2.4745388 2.4745388 2.4745388 1.749652 1.5942469 3.3978088 3.3978088 3.3978088 1.4894782 1.4894782 ] [1.034162 1.034162 1.034162 0.8203674 0.8203674 2.4745388 2.4745388 2.4745388 1.749652 1.5942469 3.3978088 3.3978088 3.3978088 2.104195 2.104195 ] [1.345158 1.345158 1.345158 0.8203674 0.8203674 2.0659876 2.0659876 2.0659876 0.97216696 0.97216696 3.3978088 3.3978088 3.3978088 2.104195 2.104195 ] [1.345158 1.345158 1.345158 0.44989592 1.2784499 2.0659876 2.0659876 2.0659876 1.7248774 1.7248774 1.4124671 1.4124671 1.4124671 2.104195 2.104195 ] [1.345158 1.345158 2.0856805 2.0856805 2.0856805 1.9936913 1.9936913 1.9936913 1.7248774 1.7248774 0.48962858 0.06657137 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5132385 2.0856805 2.0856805 2.0856805 1.9936913 1.9936913 1.9936913 1.7248774 1.7248774 0.24950351 0.06657137 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5132385 2.0856805 2.0856805 2.5944731 2.5944731 2.5944731 0.88005584 0.68750024 0.68750024 0.47356805 1.0179433 2.0214596 2.1148734 2.1148734 ] [0.5132385 0.5955196 0.5955196 0.5955196 2.5944731 2.5944731 2.5944731 0.88005584 0.68750024 0.9308051 0.9308051 1.0179433 1.4648995 1.4648995 1.4648995 ] [0.2538487 0.5955196 0.5955196 0.5955196 2.5944731 2.5944731 2.5944731 0.88005584 0.5936276 0.9308051 0.9308051 1.0179433 1.4648995 1.4648995 1.4648995 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5802.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 1]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.9459594 1.9459594 1.9459594 1.3521186 1.3521186 1.3521186 1.0419179 1.0419179 1.2857237 1.2857237 1.2857237 1.1955767 1.1955767 1.1301894 0.75172204] [ 1.9459594 1.9459594 1.9459594 2.6910198 2.6910198 2.6910198 0.81046367 0.59260017 1.3656431 1.3656431 1.751889 1.751889 1.751889 1.1301894 0.75172204] [-0.22309896 0.6162441 0.6162441 2.6910198 2.6910198 2.6910198 0.81046367 0.66976595 1.5296645 1.5296645 1.751889 1.751889 1.751889 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6162441 2.6910198 2.6910198 2.6910198 0.66976595 0.66976595 1.5296645 1.5296645 1.751889 1.751889 1.751889 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6206798 1.1088114 1.1088114 1.1088114 0.66976595 0.66976595 1.5296645 1.6594336 1.6594336 1.6957117 1.6957117 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6206798 1.1088114 1.1088114 1.1088114 0.9611073 0.9611073 0.9611073 1.6594336 1.6594336 1.6957117 1.6957117 1.6957117 0.45348194] [ 0.6206798 1.7141947 1.7141947 1.7141947 0.7728094 0.7728094 0.9611073 0.9611073 0.9611073 1.6594336 1.6594336 1.6594336 1.0080137 1.0285944 1.0285944 ] [-0.37195522 1.7141947 1.7141947 1.7141947 1.2239519 1.2239519 0.9611073 0.9611073 0.9611073 0.6114483 0.35485187 0.35485187 1.4389178 1.4389178 1.4389178 ] [ 0.32841536 1.7141947 1.7141947 1.7141947 2.0534298 2.0534298 2.0534298 1.0665016 -0.08484817 1.3793966 1.3793966 1.3793966 1.4389178 1.4389178 1.4389178 ] [ 0.46454862 0.9587406 1.4331412 1.4331412 2.0534298 2.0534298 2.0534298 1.0665016 2.8643005 2.8643005 2.8643005 1.3793966 1.4389178 1.4389178 1.4389178 ] [ 0.46454862 0.9587406 1.4331412 2.1505623 2.1505623 2.1505623 2.0534298 1.0665016 2.8643005 2.8643005 2.8643005 1.3793966 0.48688003 1.0382749 1.0382749 ] [ 0.46454862 0.9587406 0.9587406 2.1505623 2.1505623 2.1505623 0.96012086 0.96012086 2.8643005 2.8643005 2.8643005 1.4296458 1.4308392 1.4308392 1.4308392 ] [ 0.3583689 0.3583689 0.82553464 2.1505623 2.1505623 2.1505623 1.0521382 1.0521382 1.4038056 1.4038056 1.4095684 1.4296458 1.4308392 1.4308392 1.4308392 ]] [[ 1.1498104 1.1498104 1.1498104 0.47851467 0.5412814 1.0327802 1.0327802 1.0327802 1.0229824 0.6527574 0.47874448 1.7421842 1.7421842 1.7421842 1.3060378 ] [ 1.1498104 1.1498104 1.1498104 0.47851467 0.5412814 1.0327802 1.0327802 1.0327802 1.0229824 0.80375725 0.80375725 0.80375725 1.3060378 1.3060378 1.3060378 ] [ 0.559355 0.88198096 0.88198096 0.88198096 0.38877368 1.0327802 1.0327802 1.0327802 1.0229824 1.2462941 1.2462941 1.2462941 0.70467436 0.70467436 0.70467436] [ 0.68232733 0.88198096 0.88198096 0.88198096 0.37072042 0.37072042 0.9316358 0.9316358 0.9316358 1.2462941 1.5181202 1.5181202 1.5181202 0.8471952 0.8471952 ] [ 0.68232733 0.88198096 0.88198096 0.88198096 0.48734492 0.48734492 1.7952477 1.7952477 1.7952477 1.2462941 1.5181202 1.5181202 1.5181202 0.93213683 0.93213683] [ 0.68232733 0.83477265 0.83477265 0.83477265 0.61840576 0.61840576 1.7952477 1.7952477 1.7952477 0.6826436 1.5181202 1.5181202 1.5181202 1.2054039 1.2054039 ] [ 0.1423934 0.83477265 2.2084963 2.2084963 2.2084963 0.61840576 1.7952477 1.7952477 1.7952477 0.40776327 0.7317415 0.8396863 0.93213683 1.2054039 1.2054039 ] [ 0.7630038 0.83477265 2.2084963 2.2084963 2.2084963 1.6259837 1.6259837 1.6259837 0.7089476 0.5357361 1.4519429 1.4519429 1.4519429 1.2054039 1.2054039 ] [ 0.7630038 0.7630038 2.2084963 2.2084963 2.2084963 1.6259837 1.6259837 1.6259837 0.7089476 0.5357361 1.4519429 1.4519429 1.4519429 0.7887208 0.7887208 ] [ 0.7630038 0.9407647 1.3790009 1.3790009 1.786033 1.786033 1.786033 1.6259837 0.805066 0.805066 1.4519429 1.4519429 1.4519429 0.7887208 0.7887208 ] [ 0.28719234 0.9407647 0.9407647 1.2012423 1.786033 1.786033 1.786033 1.0842737 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.7887208 0.7887208 ] [ 0.28719234 1.8659647 1.8659647 1.8659647 1.786033 1.786033 1.786033 1.0842737 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.72075176 0.6243851 ] [ 0.33639032 1.8659647 1.997293 1.997293 1.997293 1.9610066 1.9610066 1.9610066 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.72075176 0.6243851 ]] [[ 1.9471985 1.9471985 1.7934021 1.7934021 1.7934021 1.4017311 1.4017311 1.4017311 2.5489645 2.5489645 2.5489645 1.775401 1.775401 1.775401 1.5897977 ] [ 1.9471985 2.3789742 2.3789742 2.3789742 1.7934021 1.4017311 1.4017311 1.4017311 1.3725512 1.3725512 2.156312 2.156312 2.156312 1.9814068 0.8070812 ] [ 1.9471985 2.3789742 2.3789742 2.3789742 1.5142977 1.5142977 1.4017311 1.4017311 1.2010365 1.1357058 2.156312 2.156312 2.156312 1.9814068 0.35297355] [ 1.5522153 2.3789742 2.3789742 2.3789742 1.5142977 1.5142977 1.2010365 1.2010365 1.2010365 1.1357058 2.156312 2.156312 2.156312 1.9814068 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 1.5142977 1.0509859 1.0509859 1.1517466 1.1517466 1.1517466 1.19006 1.2754506 1.2754506 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 1.0509859 2.1065054 2.1065054 2.1065054 1.1517466 1.1517466 1.19006 1.2754506 1.2754506 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 0.2911263 2.1065054 2.1065054 2.1065054 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [ 1.3381071 1.3381071 1.3516146 1.3858733 1.3858733 1.3858733 2.1065054 2.1065054 2.1065054 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [ 0.92303854 1.2138201 1.3516146 1.3858733 1.3858733 1.3858733 0.77227926 1.3833573 1.7196879 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [-0.1036746 1.8280966 1.8280966 1.8280966 1.3858733 1.3858733 0.77227926 1.3833573 1.3833573 1.3833573 0.71291614 0.71291614 0.71291614 0.89126927 0.89126927] [ 1.0212219 1.8280966 1.8280966 1.8280966 0.7556253 0.7556253 0.7313765 1.3833573 1.3833573 1.3833573 1.3113006 1.2503794 0.48244682 0.89126927 0.89126927] [ 1.0212219 1.8280966 1.8280966 1.8280966 0.85539985 0.85539985 0.85539985 1.0027701 1.3113006 1.316714 1.316714 1.7854719 1.7854719 1.7854719 0.8644098 ] [ 1.0212219 1.0212219 1.0212219 0.95990056 0.95990056 0.95990056 0.85539985 1.1502078 1.3113006 1.316714 1.316714 1.7854719 1.7854719 1.7854719 1.7790747 ]]]]; ov_res: [[[[ 1.9459594 1.9459594 1.9459594 1.3521186 1.3521186 1.3521186 1.0419179 1.0419179 1.2857237 1.2857237 1.2857237 1.1955767 1.1955767 1.1301894 0.75172204] [ 1.9459594 1.9459594 1.9459594 2.6910198 2.6910198 2.6910198 0.81046367 0.59260017 1.3656431 1.3656431 1.751889 1.751889 1.751889 1.1301894 0.75172204] [-0.22309896 0.6162441 0.6162441 2.6910198 2.6910198 2.6910198 0.81046367 0.66976595 1.5296645 1.5296645 1.751889 1.751889 1.751889 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6162441 2.6910198 2.6910198 2.6910198 0.66976595 0.66976595 1.5296645 1.5296645 1.751889 1.751889 1.751889 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6206798 1.1088114 1.1088114 1.1088114 0.66976595 0.66976595 1.5296645 1.6594336 1.6594336 1.6957117 1.6957117 2.149178 2.149178 ] [ 1.1022474 1.1022474 0.6206798 1.1088114 1.1088114 1.1088114 0.9611073 0.9611073 0.9611073 1.6594336 1.6594336 1.6957117 1.6957117 1.6957117 0.45348194] [ 0.6206798 1.7141947 1.7141947 1.7141947 0.7728094 0.7728094 0.9611073 0.9611073 0.9611073 1.6594336 1.6594336 1.6594336 1.0080137 1.0285944 1.0285944 ] [-0.37195522 1.7141947 1.7141947 1.7141947 1.2239519 1.2239519 0.9611073 0.9611073 0.9611073 0.6114483 0.35485187 0.35485187 1.4389178 1.4389178 1.4389178 ] [ 0.32841536 1.7141947 1.7141947 1.7141947 2.0534298 2.0534298 2.0534298 1.0665016 -0.08484817 1.3793966 1.3793966 1.3793966 1.4389178 1.4389178 1.4389178 ] [ 0.46454862 0.9587406 1.4331412 1.4331412 2.0534298 2.0534298 2.0534298 1.0665016 2.8643005 2.8643005 2.8643005 1.3793966 1.4389178 1.4389178 1.4389178 ] [ 0.46454862 0.9587406 1.4331412 2.1505623 2.1505623 2.1505623 2.0534298 1.0665016 2.8643005 2.8643005 2.8643005 1.3793966 0.48688003 1.0382749 1.0382749 ] [ 0.46454862 0.9587406 0.9587406 2.1505623 2.1505623 2.1505623 0.96012086 0.96012086 2.8643005 2.8643005 2.8643005 1.4296458 1.4308392 1.4308392 1.4308392 ] [ 0.3583689 0.3583689 0.82553464 2.1505623 2.1505623 2.1505623 1.0521382 1.0521382 1.4038056 1.4038056 1.4095684 1.4296458 1.4308392 1.4308392 1.4308392 ]] [[ 1.1498104 1.1498104 1.1498104 0.47851467 0.5412814 1.0327802 1.0327802 1.0327802 1.0229824 0.6527574 0.47874448 1.7421842 1.7421842 1.7421842 1.3060378 ] [ 1.1498104 1.1498104 1.1498104 0.47851467 0.5412814 1.0327802 1.0327802 1.0327802 1.0229824 0.80375725 0.80375725 0.80375725 1.3060378 1.3060378 1.3060378 ] [ 0.559355 0.88198096 0.88198096 0.88198096 0.38877368 1.0327802 1.0327802 1.0327802 1.0229824 1.2462941 1.2462941 1.2462941 0.70467436 0.70467436 0.70467436] [ 0.68232733 0.88198096 0.88198096 0.88198096 0.37072042 0.37072042 0.9316358 0.9316358 0.9316358 1.2462941 1.5181202 1.5181202 1.5181202 0.8471952 0.8471952 ] [ 0.68232733 0.88198096 0.88198096 0.88198096 0.48734492 0.48734492 1.7952477 1.7952477 1.7952477 1.2462941 1.5181202 1.5181202 1.5181202 0.93213683 0.93213683] [ 0.68232733 0.83477265 0.83477265 0.83477265 0.61840576 0.61840576 1.7952477 1.7952477 1.7952477 0.6826436 1.5181202 1.5181202 1.5181202 1.2054039 1.2054039 ] [ 0.1423934 0.83477265 2.2084963 2.2084963 2.2084963 0.61840576 1.7952477 1.7952477 1.7952477 0.40776327 0.7317415 0.8396863 0.93213683 1.2054039 1.2054039 ] [ 0.7630038 0.83477265 2.2084963 2.2084963 2.2084963 1.6259837 1.6259837 1.6259837 0.7089476 0.5357361 1.4519429 1.4519429 1.4519429 1.2054039 1.2054039 ] [ 0.7630038 0.7630038 2.2084963 2.2084963 2.2084963 1.6259837 1.6259837 1.6259837 0.7089476 0.5357361 1.4519429 1.4519429 1.4519429 0.7887208 0.7887208 ] [ 0.7630038 0.9407647 1.3790009 1.3790009 1.786033 1.786033 1.786033 1.6259837 0.805066 0.805066 1.4519429 1.4519429 1.4519429 0.7887208 0.7887208 ] [ 0.28719234 0.9407647 0.9407647 1.2012423 1.786033 1.786033 1.786033 1.0842737 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.7887208 0.7887208 ] [ 0.28719234 1.8659647 1.8659647 1.8659647 1.786033 1.786033 1.786033 1.0842737 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.72075176 0.6243851 ] [ 0.33639032 1.8659647 1.997293 1.997293 1.997293 1.9610066 1.9610066 1.9610066 2.4731505 2.480733 2.480733 2.480733 0.94782335 0.72075176 0.6243851 ]] [[ 1.9471985 1.9471985 1.7934021 1.7934021 1.7934021 1.4017311 1.4017311 1.4017311 2.5489645 2.5489645 2.5489645 1.775401 1.775401 1.775401 1.5897977 ] [ 1.9471985 2.3789742 2.3789742 2.3789742 1.7934021 1.4017311 1.4017311 1.4017311 1.3725512 1.3725512 2.156312 2.156312 2.156312 1.9814068 0.8070812 ] [ 1.9471985 2.3789742 2.3789742 2.3789742 1.5142977 1.5142977 1.4017311 1.4017311 1.2010365 1.1357058 2.156312 2.156312 2.156312 1.9814068 0.35297355] [ 1.5522153 2.3789742 2.3789742 2.3789742 1.5142977 1.5142977 1.2010365 1.2010365 1.2010365 1.1357058 2.156312 2.156312 2.156312 1.9814068 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 1.5142977 1.0509859 1.0509859 1.1517466 1.1517466 1.1517466 1.19006 1.2754506 1.2754506 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 1.0509859 2.1065054 2.1065054 2.1065054 1.1517466 1.1517466 1.19006 1.2754506 1.2754506 1.2754506 ] [ 2.2393982 2.2393982 2.6345937 2.6345937 2.6345937 0.2911263 2.1065054 2.1065054 2.1065054 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [ 1.3381071 1.3381071 1.3516146 1.3858733 1.3858733 1.3858733 2.1065054 2.1065054 2.1065054 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [ 0.92303854 1.2138201 1.3516146 1.3858733 1.3858733 1.3858733 0.77227926 1.3833573 1.7196879 1.7196879 1.7196879 1.6208543 1.6208543 2.1133485 2.1133485 ] [-0.1036746 1.8280966 1.8280966 1.8280966 1.3858733 1.3858733 0.77227926 1.3833573 1.3833573 1.3833573 0.71291614 0.71291614 0.71291614 0.89126927 0.89126927] [ 1.0212219 1.8280966 1.8280966 1.8280966 0.7556253 0.7556253 0.7313765 1.3833573 1.3833573 1.3833573 1.3113006 1.2503794 0.48244682 0.89126927 0.89126927] [ 1.0212219 1.8280966 1.8280966 1.8280966 0.85539985 0.85539985 0.85539985 1.0027701 1.3113006 1.316714 1.316714 1.7854719 1.7854719 1.7854719 0.8644098 ] [ 1.0212219 1.0212219 1.0212219 0.95990056 0.95990056 0.95990056 0.85539985 1.1502078 1.3113006 1.316714 1.316714 1.7854719 1.7854719 1.7854719 1.7790747 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5804.aten_max_pool2d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[1, 0]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[2.6532881 2.6532881 2.6532881 1.6115872 1.6115872 1.6115872 0.6715537 0.5725379 0.44232503 0.9721227 0.9721227 0.9721227 0.9008855 ] [2.6532881 2.6532881 2.6532881 2.5021966 1.6115872 1.6115872 1.6039748 1.6039748 1.6039748 0.9721227 0.9721227 0.9721227 0.9008855 ] [2.2646039 2.5021966 2.5021966 2.5021966 0.51065665 0.5725379 1.6039748 1.6039748 1.6039748 1.3664806 1.3664806 0.9721227 0.9008855 ] [2.2646039 2.5021966 2.5021966 2.5021966 1.1029582 1.5621341 1.6039748 1.6039748 1.6039748 1.3664806 1.3664806 1.762633 1.762633 ] [1.7967814 1.7967814 1.7967814 1.1029582 1.1029582 1.5621341 1.5621341 1.5621341 1.3664806 1.5831168 1.5831168 1.762633 1.762633 ] [1.4450611 1.3097222 1.3097222 1.3097222 1.1029582 1.5621341 1.5621341 1.924344 1.924344 1.924344 1.6067704 1.762633 1.762633 ] [1.4450611 1.3097222 1.3097222 1.3097222 1.0185013 1.0185013 1.1148176 1.924344 1.924344 1.924344 1.6067704 1.6067704 0.6898969 ] [2.4795873 2.4795873 1.9984472 1.9984472 1.0185013 1.0185013 1.1148176 1.924344 1.924344 1.924344 1.6067704 1.6067704 0.6898969 ] [2.4795873 2.4795873 1.9984472 1.9984472 0.57953143 1.0051771 1.1148176 1.1148176 1.1148176 0.7716817 0.7716817 0.8269574 0.8269574 ] [2.4795873 2.4795873 1.9984472 1.9984472 0.8033927 1.3005427 1.3005427 1.3005427 0.7572247 0.7572247 0.7572247 0.8269574 0.8269574 ] [0.8677425 0.97301525 0.97301525 0.97301525 0.8033927 1.3005427 1.3005427 1.3005427 0.7572247 0.7572247 0.7572247 1.1718863 2.0784879 ] [0.7767668 0.97301525 0.97301525 0.97301525 1.6167352 1.6167352 1.6167352 1.3005427 0.75837266 0.7572247 0.8123849 1.1718863 2.0784879 ] [0.7767668 0.97301525 0.97301525 1.1020857 1.6167352 1.6167352 1.6167352 0.75837266 0.75837266 0.5891591 0.8123849 1.1718863 2.0784879 ] [3.9761956 3.9761956 2.4839218 2.4839218 1.7561028 1.7561028 1.6167352 0.75837266 0.75837266 0.49370944 2.1893284 2.1893284 2.1893284 ] [3.9761956 3.9761956 2.4839218 2.4839218 1.7561028 1.7561028 0.7734703 0.1441502 0.1441502 0.49370944 2.1893284 2.1893284 2.1893284 ]] [[0.8464866 0.8464866 1.2690285 1.3918515 1.3918515 1.3918515 0.575884 0.575884 0.42401052 0.36841452 1.5946181 1.5946181 1.5946181 ] [0.8464866 0.8464866 1.2690285 1.3918515 1.3918515 1.3918515 0.575884 0.575884 0.42401052 0.36841452 1.5946181 1.5946181 1.5946181 ] [0.87455404 0.87455404 1.4102715 1.4102715 1.4102715 1.3918515 0.575884 0.67334944 0.9760482 0.9760482 1.3727765 1.464035 1.464035 ] [2.4987245 2.4987245 1.6268175 1.6268175 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3727765 1.3727765 ] [2.4987245 2.4987245 1.6268175 1.7687298 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3771701 1.3771701 ] [2.4987245 2.4987245 1.6268175 1.7687298 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3771701 1.3771701 ] [1.0381316 1.0381316 0.17095016 1.7687298 1.7687298 2.2856247 2.2856247 2.2856247 1.3394696 1.8711307 1.8711307 1.8711307 1.4776433 ] [1.5489715 1.5489715 1.5489715 0.74624103 0.6735423 2.2856247 2.2856247 2.2856247 1.6092403 1.8711307 1.8711307 1.8711307 1.4776433 ] [2.5739396 1.5489715 1.5489715 1.2268251 1.2268251 2.2856247 2.2856247 2.2856247 1.6092403 1.8711307 1.8711307 1.8711307 1.4776433 ] [2.5739396 2.1991725 2.1991725 1.2268251 1.2268251 1.2268251 0.9346339 1.1502577 1.6092403 1.6092403 1.6092403 1.3692012 1.3692012 ] [2.5739396 2.1991725 2.1991725 1.6548057 1.6548057 1.6548057 0.9346339 2.1303632 2.1303632 2.1303632 1.3692012 1.3692012 1.3692012 ] [2.1991725 2.1991725 2.1991725 1.6548057 1.6548057 1.6548057 0.90302527 2.1303632 2.1303632 2.1303632 1.3692012 1.3692012 1.3692012 ] [1.3226193 1.3226193 1.3226193 1.6548057 1.6548057 1.9149512 1.9149512 2.1303632 2.1303632 2.1303632 1.3641007 1.3641007 1.2730433 ] [1.3226193 1.3226193 1.3226193 1.409572 1.409572 1.9149512 1.9149512 1.9149512 0.73744947 1.3641007 1.3641007 1.5201795 1.5201795 ] [1.3226193 1.3226193 1.3226193 1.409572 1.409572 1.9149512 1.9149512 1.9149512 0.4024677 1.3641007 1.3641007 1.5201795 1.5201795 ]] [[2.028389 2.028389 0.6635923 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.0709248 0.29069024 0.29069024 0.23756482 0.25228453] [2.028389 2.028389 1.4034222 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.2959611 1.2959611 0.29069024 1.277588 1.277588 ] [2.028389 2.028389 1.4034222 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.2959611 1.2959611 0.6326734 1.277588 1.277588 ] [1.7686814 1.4034222 1.4034222 0.9757887 0.9757887 0.445532 0.80973566 1.3194803 1.3194803 1.3194803 0.6326734 1.277588 1.277588 ] [1.7686814 1.5927513 1.5927513 1.5927513 0.9757887 0.445532 0.445532 1.3194803 1.3194803 1.3194803 0.8103632 0.8103632 1.2243927 ] [0.92209995 1.5927513 1.5927513 1.5927513 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.080903 1.080903 1.080903 ] [1.8856946 1.5927513 1.5927513 1.5927513 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.080903 1.080903 1.080903 ] [1.8856946 1.3691536 1.3691536 1.0147135 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.3454719 1.3454719 1.3454719 ] [1.8856946 1.3691536 1.3691536 0.61655873 0.61655873 1.2998823 1.2998823 1.2998823 0.9785522 0.9785522 1.3454719 1.3454719 1.3454719 ] [0.74740654 0.74740654 0.74740654 0.61655873 0.61655873 0.7432705 0.7432705 1.8220317 1.8220317 1.8220317 1.3454719 1.3454719 1.3454719 ] [0.7102422 0.9701328 0.9701328 0.97873276 0.97873276 0.97873276 0.7432705 1.8220317 1.8220317 1.8220317 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 0.97873276 0.1567587 1.8220317 1.8220317 1.8220317 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 1.868582 1.868582 1.0929406 1.0929406 1.0929406 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 1.868582 1.868582 1.0929406 1.3948348 1.3948348 1.3948348 0.8496039 0.8496039 ] [1.1191931 1.4429306 1.4429306 1.4429306 1.868582 1.868582 1.868582 1.0745685 1.3948348 1.3948348 1.3948348 0.6064637 0.6064637 ]]]]; ov_res: [[[[2.6532881 2.6532881 2.6532881 1.6115872 1.6115872 1.6115872 0.6715537 0.5725379 0.44232503 0.9721227 0.9721227 0.9721227 0.9008855 ] [2.6532881 2.6532881 2.6532881 2.5021966 1.6115872 1.6115872 1.6039748 1.6039748 1.6039748 0.9721227 0.9721227 0.9721227 0.9008855 ] [2.2646039 2.5021966 2.5021966 2.5021966 0.51065665 0.5725379 1.6039748 1.6039748 1.6039748 1.3664806 1.3664806 0.9721227 0.9008855 ] [2.2646039 2.5021966 2.5021966 2.5021966 1.1029582 1.5621341 1.6039748 1.6039748 1.6039748 1.3664806 1.3664806 1.762633 1.762633 ] [1.7967814 1.7967814 1.7967814 1.1029582 1.1029582 1.5621341 1.5621341 1.5621341 1.3664806 1.5831168 1.5831168 1.762633 1.762633 ] [1.4450611 1.3097222 1.3097222 1.3097222 1.1029582 1.5621341 1.5621341 1.924344 1.924344 1.924344 1.6067704 1.762633 1.762633 ] [1.4450611 1.3097222 1.3097222 1.3097222 1.0185013 1.0185013 1.1148176 1.924344 1.924344 1.924344 1.6067704 1.6067704 0.6898969 ] [2.4795873 2.4795873 1.9984472 1.9984472 1.0185013 1.0185013 1.1148176 1.924344 1.924344 1.924344 1.6067704 1.6067704 0.6898969 ] [2.4795873 2.4795873 1.9984472 1.9984472 0.57953143 1.0051771 1.1148176 1.1148176 1.1148176 0.7716817 0.7716817 0.8269574 0.8269574 ] [2.4795873 2.4795873 1.9984472 1.9984472 0.8033927 1.3005427 1.3005427 1.3005427 0.7572247 0.7572247 0.7572247 0.8269574 0.8269574 ] [0.8677425 0.97301525 0.97301525 0.97301525 0.8033927 1.3005427 1.3005427 1.3005427 0.7572247 0.7572247 0.7572247 1.1718863 2.0784879 ] [0.7767668 0.97301525 0.97301525 0.97301525 1.6167352 1.6167352 1.6167352 1.3005427 0.75837266 0.7572247 0.8123849 1.1718863 2.0784879 ] [0.7767668 0.97301525 0.97301525 1.1020857 1.6167352 1.6167352 1.6167352 0.75837266 0.75837266 0.5891591 0.8123849 1.1718863 2.0784879 ] [3.9761956 3.9761956 2.4839218 2.4839218 1.7561028 1.7561028 1.6167352 0.75837266 0.75837266 0.49370944 2.1893284 2.1893284 2.1893284 ] [3.9761956 3.9761956 2.4839218 2.4839218 1.7561028 1.7561028 0.7734703 0.1441502 0.1441502 0.49370944 2.1893284 2.1893284 2.1893284 ]] [[0.8464866 0.8464866 1.2690285 1.3918515 1.3918515 1.3918515 0.575884 0.575884 0.42401052 0.36841452 1.5946181 1.5946181 1.5946181 ] [0.8464866 0.8464866 1.2690285 1.3918515 1.3918515 1.3918515 0.575884 0.575884 0.42401052 0.36841452 1.5946181 1.5946181 1.5946181 ] [0.87455404 0.87455404 1.4102715 1.4102715 1.4102715 1.3918515 0.575884 0.67334944 0.9760482 0.9760482 1.3727765 1.464035 1.464035 ] [2.4987245 2.4987245 1.6268175 1.6268175 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3727765 1.3727765 ] [2.4987245 2.4987245 1.6268175 1.7687298 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3771701 1.3771701 ] [2.4987245 2.4987245 1.6268175 1.7687298 1.99161 1.99161 1.99161 1.833381 1.833381 1.833381 1.4762827 1.3771701 1.3771701 ] [1.0381316 1.0381316 0.17095016 1.7687298 1.7687298 2.2856247 2.2856247 2.2856247 1.3394696 1.8711307 1.8711307 1.8711307 1.4776433 ] [1.5489715 1.5489715 1.5489715 0.74624103 0.6735423 2.2856247 2.2856247 2.2856247 1.6092403 1.8711307 1.8711307 1.8711307 1.4776433 ] [2.5739396 1.5489715 1.5489715 1.2268251 1.2268251 2.2856247 2.2856247 2.2856247 1.6092403 1.8711307 1.8711307 1.8711307 1.4776433 ] [2.5739396 2.1991725 2.1991725 1.2268251 1.2268251 1.2268251 0.9346339 1.1502577 1.6092403 1.6092403 1.6092403 1.3692012 1.3692012 ] [2.5739396 2.1991725 2.1991725 1.6548057 1.6548057 1.6548057 0.9346339 2.1303632 2.1303632 2.1303632 1.3692012 1.3692012 1.3692012 ] [2.1991725 2.1991725 2.1991725 1.6548057 1.6548057 1.6548057 0.90302527 2.1303632 2.1303632 2.1303632 1.3692012 1.3692012 1.3692012 ] [1.3226193 1.3226193 1.3226193 1.6548057 1.6548057 1.9149512 1.9149512 2.1303632 2.1303632 2.1303632 1.3641007 1.3641007 1.2730433 ] [1.3226193 1.3226193 1.3226193 1.409572 1.409572 1.9149512 1.9149512 1.9149512 0.73744947 1.3641007 1.3641007 1.5201795 1.5201795 ] [1.3226193 1.3226193 1.3226193 1.409572 1.409572 1.9149512 1.9149512 1.9149512 0.4024677 1.3641007 1.3641007 1.5201795 1.5201795 ]] [[2.028389 2.028389 0.6635923 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.0709248 0.29069024 0.29069024 0.23756482 0.25228453] [2.028389 2.028389 1.4034222 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.2959611 1.2959611 0.29069024 1.277588 1.277588 ] [2.028389 2.028389 1.4034222 1.9650403 1.9650403 2.5680437 2.5680437 2.5680437 1.2959611 1.2959611 0.6326734 1.277588 1.277588 ] [1.7686814 1.4034222 1.4034222 0.9757887 0.9757887 0.445532 0.80973566 1.3194803 1.3194803 1.3194803 0.6326734 1.277588 1.277588 ] [1.7686814 1.5927513 1.5927513 1.5927513 0.9757887 0.445532 0.445532 1.3194803 1.3194803 1.3194803 0.8103632 0.8103632 1.2243927 ] [0.92209995 1.5927513 1.5927513 1.5927513 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.080903 1.080903 1.080903 ] [1.8856946 1.5927513 1.5927513 1.5927513 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.080903 1.080903 1.080903 ] [1.8856946 1.3691536 1.3691536 1.0147135 1.3233067 1.3233067 1.3233067 1.800147 1.800147 1.800147 1.3454719 1.3454719 1.3454719 ] [1.8856946 1.3691536 1.3691536 0.61655873 0.61655873 1.2998823 1.2998823 1.2998823 0.9785522 0.9785522 1.3454719 1.3454719 1.3454719 ] [0.74740654 0.74740654 0.74740654 0.61655873 0.61655873 0.7432705 0.7432705 1.8220317 1.8220317 1.8220317 1.3454719 1.3454719 1.3454719 ] [0.7102422 0.9701328 0.9701328 0.97873276 0.97873276 0.97873276 0.7432705 1.8220317 1.8220317 1.8220317 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 0.97873276 0.1567587 1.8220317 1.8220317 1.8220317 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 1.868582 1.868582 1.0929406 1.0929406 1.0929406 1.6208217 1.6208217 1.6208217 ] [1.6592454 1.6592454 2.1139045 2.1139045 2.1139045 1.868582 1.868582 1.0929406 1.3948348 1.3948348 1.3948348 0.8496039 0.8496039 ] [1.1191931 1.4429306 1.4429306 1.4429306 1.868582 1.868582 1.868582 1.0745685 1.3948348 1.3948348 1.3948348 0.6064637 0.6064637 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5806.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[1.3894391 1.0784014 1.1881907 1.9766675 1.9766675 1.9766675 1.7552309 1.7552309 1.7552309 1.6203696 0.16437495 1.4871709 1.4871709 ] [1.1499629 1.0739348 1.1881907 1.1881907 1.1881907 0.95833826 0.95833826 1.3505677 1.3505677 1.3505677 0.60597134 1.4871709 1.4871709 ] [1.1499629 1.0739348 1.0739348 1.0739348 1.0593201 0.95833826 1.4599746 1.5854905 1.7521663 1.7521663 1.7521663 1.2171452 1.3903302 ] [1.2660034 1.2660034 1.2660034 1.0026389 0.2734691 0.80271065 0.80271065 1.5854905 1.7521663 1.7521663 1.7521663 1.5801421 1.5801421 ] [0.5642649 1.2179433 2.4249353 2.4249353 2.4249353 0.81031686 0.82580125 1.2081081 1.2081081 1.2081081 1.1244556 1.1244556 0.44567087] [1.5039669 1.5039669 2.4249353 2.4249353 2.4249353 0.81031686 0.81031686 0.81031686 1.7237711 1.7237711 1.7237711 0.42636865 0.42636865] [1.7233303 1.5039669 1.5039669 1.6676557 1.6676557 1.6676557 1.3290582 1.6596235 1.6596235 1.6596235 1.5527688 0.12951593 0.18359192]] [[1.0420083 1.6847314 1.6847314 1.6847314 0.9908189 2.9012487 2.9012487 2.9012487 1.0227908 0.44366285 1.7012659 1.8490245 1.8490245 ] [0.5510057 1.6847314 1.6862495 1.6862495 1.6862495 1.5114623 1.0304328 1.0834821 1.0834821 1.0834821 0.95243484 1.2769712 1.2769712 ] [0.6482236 2.602799 2.602799 2.602799 1.6862495 0.9480372 1.0304328 1.0834821 1.0834821 1.0834821 1.0543733 1.2769712 1.2769712 ] [0.7431665 2.602799 2.602799 2.602799 1.1765177 1.1765177 1.1765177 1.0079366 1.7749385 1.7749385 1.7749385 1.0165918 0.7964425 ] [1.6444459 1.6444459 1.6444459 0.73048675 1.4971988 1.4971988 1.4971988 1.443739 0.7292355 0.7292355 0.7292355 0.62899923 2.2554324 ] [1.6687887 1.6687887 1.6444459 1.4324603 1.4324603 1.443739 1.443739 1.443739 1.3409487 1.3409487 0.7555519 1.1603011 2.2554324 ] [1.6687887 1.6687887 1.3286301 1.3286301 0.6330388 1.7891232 1.7891232 1.7891232 1.3684337 1.3684337 0.7555519 1.1603011 1.1603011 ]] [[1.1067594 1.1067594 0.4283086 0.3946825 0.75686306 0.75686306 0.75686306 0.8922985 1.0265918 1.0265918 1.0265918 0.9459403 0.9459403 ] [0.98144364 1.6872386 1.6872386 1.9278166 1.9278166 1.9596026 1.9596026 1.9596026 1.0265918 1.1947587 1.1947587 1.1947587 0.37884706] [1.3781737 1.3781737 0.91665685 0.91665685 0.252998 0.5483962 1.6936283 1.6936283 1.6936283 0.81886756 0.81886756 0.7106249 0.5818363 ] [1.6583518 1.6583518 0.91665685 1.4263762 1.4263762 1.4263762 1.6936283 1.6936283 1.6936283 1.3459843 1.3459843 1.3459843 1.0121491 ] [3.3456008 1.6524935 2.5029702 2.5029702 2.5029702 0.12622018 1.601639 1.601639 1.601639 1.4187247 1.4187247 1.3459843 1.0121491 ] [3.3456008 1.6858734 1.6858734 1.6524935 0.6343548 1.2073622 1.2073622 3.3950698 3.3950698 3.3950698 1.4187247 1.1242545 1.5071733 ] [1.6314331 1.6314331 1.8422265 1.8422265 1.8422265 0.71608657 0.7984121 3.3950698 3.3950698 3.3950698 1.2046587 1.2046587 1.5071733 ]]]]; ov_res: [[[[1.3894391 1.0784014 1.1881907 1.9766675 1.9766675 1.9766675 1.7552309 1.7552309 1.7552309 1.6203696 0.16437495 1.4871709 1.4871709 ] [1.1499629 1.0739348 1.1881907 1.1881907 1.1881907 0.95833826 0.95833826 1.3505677 1.3505677 1.3505677 0.60597134 1.4871709 1.4871709 ] [1.1499629 1.0739348 1.0739348 1.0739348 1.0593201 0.95833826 1.4599746 1.5854905 1.7521663 1.7521663 1.7521663 1.2171452 1.3903302 ] [1.2660034 1.2660034 1.2660034 1.0026389 0.2734691 0.80271065 0.80271065 1.5854905 1.7521663 1.7521663 1.7521663 1.5801421 1.5801421 ] [0.5642649 1.2179433 2.4249353 2.4249353 2.4249353 0.81031686 0.82580125 1.2081081 1.2081081 1.2081081 1.1244556 1.1244556 0.44567087] [1.5039669 1.5039669 2.4249353 2.4249353 2.4249353 0.81031686 0.81031686 0.81031686 1.7237711 1.7237711 1.7237711 0.42636865 0.42636865] [1.7233303 1.5039669 1.5039669 1.6676557 1.6676557 1.6676557 1.3290582 1.6596235 1.6596235 1.6596235 1.5527688 0.12951593 0.18359192]] [[1.0420083 1.6847314 1.6847314 1.6847314 0.9908189 2.9012487 2.9012487 2.9012487 1.0227908 0.44366285 1.7012659 1.8490245 1.8490245 ] [0.5510057 1.6847314 1.6862495 1.6862495 1.6862495 1.5114623 1.0304328 1.0834821 1.0834821 1.0834821 0.95243484 1.2769712 1.2769712 ] [0.6482236 2.602799 2.602799 2.602799 1.6862495 0.9480372 1.0304328 1.0834821 1.0834821 1.0834821 1.0543733 1.2769712 1.2769712 ] [0.7431665 2.602799 2.602799 2.602799 1.1765177 1.1765177 1.1765177 1.0079366 1.7749385 1.7749385 1.7749385 1.0165918 0.7964425 ] [1.6444459 1.6444459 1.6444459 0.73048675 1.4971988 1.4971988 1.4971988 1.443739 0.7292355 0.7292355 0.7292355 0.62899923 2.2554324 ] [1.6687887 1.6687887 1.6444459 1.4324603 1.4324603 1.443739 1.443739 1.443739 1.3409487 1.3409487 0.7555519 1.1603011 2.2554324 ] [1.6687887 1.6687887 1.3286301 1.3286301 0.6330388 1.7891232 1.7891232 1.7891232 1.3684337 1.3684337 0.7555519 1.1603011 1.1603011 ]] [[1.1067594 1.1067594 0.4283086 0.3946825 0.75686306 0.75686306 0.75686306 0.8922985 1.0265918 1.0265918 1.0265918 0.9459403 0.9459403 ] [0.98144364 1.6872386 1.6872386 1.9278166 1.9278166 1.9596026 1.9596026 1.9596026 1.0265918 1.1947587 1.1947587 1.1947587 0.37884706] [1.3781737 1.3781737 0.91665685 0.91665685 0.252998 0.5483962 1.6936283 1.6936283 1.6936283 0.81886756 0.81886756 0.7106249 0.5818363 ] [1.6583518 1.6583518 0.91665685 1.4263762 1.4263762 1.4263762 1.6936283 1.6936283 1.6936283 1.3459843 1.3459843 1.3459843 1.0121491 ] [3.3456008 1.6524935 2.5029702 2.5029702 2.5029702 0.12622018 1.601639 1.601639 1.601639 1.4187247 1.4187247 1.3459843 1.0121491 ] [3.3456008 1.6858734 1.6858734 1.6524935 0.6343548 1.2073622 1.2073622 3.3950698 3.3950698 3.3950698 1.4187247 1.1242545 1.5071733 ] [1.6314331 1.6314331 1.8422265 1.8422265 1.8422265 0.71608657 0.7984121 3.3950698 3.3950698 3.3950698 1.2046587 1.2046587 1.5071733 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5808.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.kernel_size, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[-1.63147077e-01 3.05787295e-01 1.21060085e+00 6.29989445e-01 1.81619275e+00 2.38571787e+00 2.27428123e-01 1.06601989e+00 2.73830557e+00 7.34743138e-04 2.74315357e-01 6.01446271e-01 -4.74208981e-01 1.83290732e+00 -1.30370486e+00] [ 8.08576584e-01 4.94472086e-01 3.37097585e-01 4.31132883e-01 -1.24681520e+00 7.08750606e-01 2.80560464e-01 1.77579892e+00 -1.60119489e-01 1.31610572e+00 8.42621803e-01 -1.78481471e-02 5.02682626e-01 -1.08472124e-01 2.27682447e+00] [ 3.54156286e-01 1.22170103e+00 8.04891288e-01 -6.96296453e-01 -8.42393398e-01 2.94289637e+00 -6.20400727e-01 8.22693855e-02 -1.08202994e-01 -1.05050611e+00 1.32494760e+00 5.69536090e-01 7.09986687e-01 1.28415585e+00 1.94833308e-01] [ 1.65420604e+00 1.22164059e+00 1.10306597e+00 -1.41068533e-01 3.69849317e-02 1.38512230e+00 6.49531364e-01 1.15973759e+00 1.85659873e+00 5.19292653e-01 6.08969808e-01 4.09332186e-01 1.00713640e-01 2.36431098e+00 3.09620023e-01] [ 3.32446873e-01 -2.33988851e-01 1.94494605e+00 2.29571879e-01 3.88354719e-01 1.55116284e+00 1.08696866e+00 7.60727048e-01 -1.99717969e-01 -9.75566804e-02 3.09701157e+00 1.33977962e+00 6.80332482e-02 6.81632936e-01 2.28243804e+00] [ 7.67709613e-01 -6.50103530e-03 1.34209228e+00 -4.06157702e-01 3.06410044e-01 -6.43760979e-01 5.64400017e-01 -1.01424940e-03 3.27274501e-01 8.72783005e-01 -4.03727498e-03 2.66134858e+00 1.34015769e-01 -2.61219710e-01 -1.85939804e-01] [-8.60321283e-01 -6.80119591e-03 3.36588770e-02 8.61445546e-01 1.03311062e+00 1.24368727e+00 -6.77448362e-02 4.02485579e-02 6.44189239e-01 1.46567881e+00 -1.46846250e-01 2.50635654e-01 1.81071115e+00 4.79861856e-01 1.00570488e+00]] [[ 1.33046937e+00 6.66920960e-01 -1.83560699e-01 8.02567959e-01 1.44673276e+00 -4.84944969e-01 -7.16143131e-01 6.81689918e-01 1.31865656e+00 1.02506483e+00 8.28927696e-01 9.66267824e-01 1.08313477e+00 2.51712871e+00 1.86710030e-01] [ 8.91975760e-01 6.89028859e-01 -5.64213753e-01 2.74968266e-01 9.85457420e-01 2.43322328e-02 6.46916986e-01 1.00780630e+00 -7.34960794e-01 9.23671424e-01 3.98213148e-01 1.09091020e+00 2.70860720e+00 -6.63631409e-02 1.91750872e+00] [-1.49799848e+00 1.91284880e-01 -4.45647955e-01 -8.46545175e-02 5.28981030e-01 2.12243414e+00 6.94334745e-01 1.12976408e+00 1.40592957e+00 -9.44771290e-01 1.96243703e+00 -6.03807047e-02 1.31416011e+00 2.19410375e-01 2.39131391e-01] [ 7.63273314e-02 1.10001898e+00 2.44170189e+00 -1.13560900e-01 3.30671877e-01 9.56000924e-01 2.03324747e+00 7.38083541e-01 7.55263805e-01 1.34506679e+00 -2.17524722e-01 1.12654793e+00 8.40542853e-01 8.80134702e-01 -1.51684597e-01] [ 1.00638628e+00 1.56149018e+00 -7.72115827e-01 1.07265890e+00 3.24051976e-01 1.68369365e+00 1.10176599e+00 1.31410885e+00 2.82144785e+00 2.13391539e-02 1.18186057e+00 2.89197117e-01 1.46584654e+00 4.21073169e-01 -8.40149373e-02] [ 4.92411852e-02 8.95252466e-01 3.25506777e-01 1.21465683e+00 -4.19430703e-01 -2.98099488e-01 7.89126277e-01 -1.24977839e+00 -2.00949013e-01 1.74645758e+00 7.95375347e-01 5.47000706e-01 1.47395134e-01 -1.23361623e+00 1.61943328e+00] [ 6.48863316e-01 4.05530483e-01 8.54105294e-01 -1.84022099e-01 1.56945205e+00 1.47693682e+00 9.40574110e-01 1.03623700e+00 -7.53743714e-03 1.09283257e+00 1.09904146e+00 1.06004596e+00 -9.36786950e-01 1.49694335e+00 1.37378502e+00]] [[ 5.54537117e-01 8.80271137e-01 9.81157005e-01 1.09564805e+00 1.26666415e+00 1.56878901e+00 5.40882945e-01 6.45378888e-01 9.33306408e-04 1.65803778e+00 2.31905866e+00 -1.32589005e-02 1.95350122e+00 2.12847665e-01 -5.46052158e-01] [ 9.25866425e-01 -3.69239777e-01 7.68071771e-01 -1.20876580e-01 2.30852097e-01 7.01579809e-01 -6.08459078e-02 -6.10796958e-02 -5.46847582e-01 2.17185959e-01 1.35372913e+00 2.50595033e-01 7.74566531e-01 -2.68739522e-01 1.69471097e+00] [ 3.10012054e+00 1.16400075e+00 1.09272015e+00 2.75726289e-01 2.63973475e-01 -2.38038942e-01 4.56758231e-01 5.86006463e-01 9.48630512e-01 -1.15989029e-01 6.92522764e-01 1.55139732e+00 3.98515791e-01 1.11796463e+00 -5.52461565e-01] [ 7.90019691e-01 1.83610797e+00 -4.87160742e-01 2.05655266e-02 7.44097650e-01 -7.90206671e-01 1.96595535e-01 1.01060176e+00 7.58005083e-01 1.73996913e+00 -6.27040029e-01 -4.22453851e-01 6.58829987e-01 1.09880948e+00 4.35819536e-01] [ 1.00711572e+00 1.07104027e+00 -4.00868982e-01 -4.36967492e-01 9.35323238e-01 1.54197872e+00 9.49491799e-01 2.68142414e+00 -6.52556121e-01 1.00680685e+00 8.78626466e-01 2.94871002e-01 9.56356406e-01 1.71187019e+00 -1.92846030e-01] [ 7.42950678e-01 2.16308162e-01 4.73184168e-01 6.27289295e-01 7.29124486e-01 5.33554971e-01 1.48014152e+00 1.87062240e+00 -7.99026564e-02 1.08385742e+00 1.45743191e+00 1.66373682e+00 1.23619437e+00 2.49587551e-01 1.72130966e+00] [ 8.23780239e-01 -1.52058497e-01 4.48337048e-01 -1.48112684e-01 2.74341911e-01 5.85471809e-01 9.59715009e-01 1.23565686e+00 1.75163007e+00 -4.18352902e-01 8.68448436e-01 2.83587456e+00 4.28552955e-01 1.35161772e-01 -2.12921292e-01]]]]; ov_res: [[[[-1.63147077e-01 3.05787295e-01 1.21060085e+00 6.29989445e-01 1.81619275e+00 2.38571787e+00 2.27428123e-01 1.06601989e+00 2.73830557e+00 7.34743138e-04 2.74315357e-01 6.01446271e-01 -4.74208981e-01 1.83290732e+00 -1.30370486e+00] [ 8.08576584e-01 4.94472086e-01 3.37097585e-01 4.31132883e-01 -1.24681520e+00 7.08750606e-01 2.80560464e-01 1.77579892e+00 -1.60119489e-01 1.31610572e+00 8.42621803e-01 -1.78481471e-02 5.02682626e-01 -1.08472124e-01 2.27682447e+00] [ 3.54156286e-01 1.22170103e+00 8.04891288e-01 -6.96296453e-01 -8.42393398e-01 2.94289637e+00 -6.20400727e-01 8.22693855e-02 -1.08202994e-01 -1.05050611e+00 1.32494760e+00 5.69536090e-01 7.09986687e-01 1.28415585e+00 1.94833308e-01] [ 1.65420604e+00 1.22164059e+00 1.10306597e+00 -1.41068533e-01 3.69849317e-02 1.38512230e+00 6.49531364e-01 1.15973759e+00 1.85659873e+00 5.19292653e-01 6.08969808e-01 4.09332186e-01 1.00713640e-01 2.36431098e+00 3.09620023e-01] [ 3.32446873e-01 -2.33988851e-01 1.94494605e+00 2.29571879e-01 3.88354719e-01 1.55116284e+00 1.08696866e+00 7.60727048e-01 -1.99717969e-01 -9.75566804e-02 3.09701157e+00 1.33977962e+00 6.80332482e-02 6.81632936e-01 2.28243804e+00] [ 7.67709613e-01 -6.50103530e-03 1.34209228e+00 -4.06157702e-01 3.06410044e-01 -6.43760979e-01 5.64400017e-01 -1.01424940e-03 3.27274501e-01 8.72783005e-01 -4.03727498e-03 2.66134858e+00 1.34015769e-01 -2.61219710e-01 -1.85939804e-01] [-8.60321283e-01 -6.80119591e-03 3.36588770e-02 8.61445546e-01 1.03311062e+00 1.24368727e+00 -6.77448362e-02 4.02485579e-02 6.44189239e-01 1.46567881e+00 -1.46846250e-01 2.50635654e-01 1.81071115e+00 4.79861856e-01 1.00570488e+00]] [[ 1.33046937e+00 6.66920960e-01 -1.83560699e-01 8.02567959e-01 1.44673276e+00 -4.84944969e-01 -7.16143131e-01 6.81689918e-01 1.31865656e+00 1.02506483e+00 8.28927696e-01 9.66267824e-01 1.08313477e+00 2.51712871e+00 1.86710030e-01] [ 8.91975760e-01 6.89028859e-01 -5.64213753e-01 2.74968266e-01 9.85457420e-01 2.43322328e-02 6.46916986e-01 1.00780630e+00 -7.34960794e-01 9.23671424e-01 3.98213148e-01 1.09091020e+00 2.70860720e+00 -6.63631409e-02 1.91750872e+00] [-1.49799848e+00 1.91284880e-01 -4.45647955e-01 -8.46545175e-02 5.28981030e-01 2.12243414e+00 6.94334745e-01 1.12976408e+00 1.40592957e+00 -9.44771290e-01 1.96243703e+00 -6.03807047e-02 1.31416011e+00 2.19410375e-01 2.39131391e-01] [ 7.63273314e-02 1.10001898e+00 2.44170189e+00 -1.13560900e-01 3.30671877e-01 9.56000924e-01 2.03324747e+00 7.38083541e-01 7.55263805e-01 1.34506679e+00 -2.17524722e-01 1.12654793e+00 8.40542853e-01 8.80134702e-01 -1.51684597e-01] [ 1.00638628e+00 1.56149018e+00 -7.72115827e-01 1.07265890e+00 3.24051976e-01 1.68369365e+00 1.10176599e+00 1.31410885e+00 2.82144785e+00 2.13391539e-02 1.18186057e+00 2.89197117e-01 1.46584654e+00 4.21073169e-01 -8.40149373e-02] [ 4.92411852e-02 8.95252466e-01 3.25506777e-01 1.21465683e+00 -4.19430703e-01 -2.98099488e-01 7.89126277e-01 -1.24977839e+00 -2.00949013e-01 1.74645758e+00 7.95375347e-01 5.47000706e-01 1.47395134e-01 -1.23361623e+00 1.61943328e+00] [ 6.48863316e-01 4.05530483e-01 8.54105294e-01 -1.84022099e-01 1.56945205e+00 1.47693682e+00 9.40574110e-01 1.03623700e+00 -7.53743714e-03 1.09283257e+00 1.09904146e+00 1.06004596e+00 -9.36786950e-01 1.49694335e+00 1.37378502e+00]] [[ 5.54537117e-01 8.80271137e-01 9.81157005e-01 1.09564805e+00 1.26666415e+00 1.56878901e+00 5.40882945e-01 6.45378888e-01 9.33306408e-04 1.65803778e+00 2.31905866e+00 -1.32589005e-02 1.95350122e+00 2.12847665e-01 -5.46052158e-01] [ 9.25866425e-01 -3.69239777e-01 7.68071771e-01 -1.20876580e-01 2.30852097e-01 7.01579809e-01 -6.08459078e-02 -6.10796958e-02 -5.46847582e-01 2.17185959e-01 1.35372913e+00 2.50595033e-01 7.74566531e-01 -2.68739522e-01 1.69471097e+00] [ 3.10012054e+00 1.16400075e+00 1.09272015e+00 2.75726289e-01 2.63973475e-01 -2.38038942e-01 4.56758231e-01 5.86006463e-01 9.48630512e-01 -1.15989029e-01 6.92522764e-01 1.55139732e+00 3.98515791e-01 1.11796463e+00 -5.52461565e-01] [ 7.90019691e-01 1.83610797e+00 -4.87160742e-01 2.05655266e-02 7.44097650e-01 -7.90206671e-01 1.96595535e-01 1.01060176e+00 7.58005083e-01 1.73996913e+00 -6.27040029e-01 -4.22453851e-01 6.58829987e-01 1.09880948e+00 4.35819536e-01] [ 1.00711572e+00 1.07104027e+00 -4.00868982e-01 -4.36967492e-01 9.35323238e-01 1.54197872e+00 9.49491799e-01 2.68142414e+00 -6.52556121e-01 1.00680685e+00 8.78626466e-01 2.94871002e-01 9.56356406e-01 1.71187019e+00 -1.92846030e-01] [ 7.42950678e-01 2.16308162e-01 4.73184168e-01 6.27289295e-01 7.29124486e-01 5.33554971e-01 1.48014152e+00 1.87062240e+00 -7.99026564e-02 1.08385742e+00 1.45743191e+00 1.66373682e+00 1.23619437e+00 2.49587551e-01 1.72130966e+00] [ 8.23780239e-01 -1.52058497e-01 4.48337048e-01 -1.48112684e-01 2.74341911e-01 5.85471809e-01 9.59715009e-01 1.23565686e+00 1.75163007e+00 -4.18352902e-01 8.68448436e-01 2.83587456e+00 4.28552955e-01 1.35161772e-01 -2.12921292e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5810.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[ 2.739867 3.19065 0.9521759 3.19065 1.5764834 3.19065 1.5764834 2.9851365 1.5764834 2.9851365 1.1584816 ] [ 1.8051275 1.9371711 0.7998218 0.9461014 1.1036023 0.9461014 1.1036023 0.01216678 1.3083181 0.7140213 1.3083181 ] [ 1.079762 0.43365532 0.9521759 1.3386129 0.9521759 2.9851365 0.78914976 2.9851365 0.78914976 2.9851365 1.1584816 ] [ 0.9289764 1.9371711 0.9289764 1.79177 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 1.0634298 ] [ 1.048374 0.71107364 1.3442636 1.1144422 1.3442636 2.9851365 1.3442636 2.9851365 1.2485031 2.9851365 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 1.0634298 ] [ 1.048374 0.71107364 1.3442636 0.71107364 1.3442636 2.4344585 1.3442636 2.4344585 1.2485031 2.4344585 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 0.70324194] [ 1.048374 1.7344772 1.3442636 1.7344772 1.3442636 2.4344585 1.3442636 2.4344585 1.2485031 2.4344585 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 1.5495738 1.8657309 1.5495738 1.209152 1.5495738 1.0072384 0.6960524 ] [ 0.6689557 1.7344772 0.83066595 1.7344772 0.83066595 1.7344772 0.83066595 0.91387385 0.19220266 0.91387385 0.2394517 ]] [[ 2.2406769 1.1158935 2.2406769 1.4621391 2.2406769 1.5872358 1.7615771 1.5872358 1.7615771 1.5872358 1.7615771 ] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.8417044 1.1358423 1.8417044 2.0621362 2.5305314 2.0621362 ] [ 0.2746798 1.5071996 0.2746798 1.5071996 1.3426632 1.5071996 1.3426632 1.4621391 1.3426632 1.46097 0.15981618] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.109598 1.1358423 1.1579757 2.0621362 2.5305314 2.0621362 ] [ 0.3529481 2.057749 1.1130669 2.057749 1.3426632 1.5071996 1.3426632 0.7396197 1.3426632 1.5961802 0.2790325 ] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.4571687 0.8500917 1.4571687 2.0621362 2.5305314 2.0621362 ] [ 0.39158025 2.057749 1.1130669 2.057749 1.1130669 1.5071996 1.1130669 0.42664203 0.5920961 1.5961802 0.2790325 ] [ 1.5843996 1.3454949 1.5843996 1.3454949 1.5843996 1.4571687 0.8500917 1.4571687 1.7416863 1.4571687 1.7416863 ] [ 0.5423728 2.057749 1.1130669 2.057749 1.1130669 0.03674202 1.1130669 0.21596433 0.5920961 1.5961802 0.2790325 ] [ 2.3778489 1.3454949 2.3778489 1.3454949 2.3778489 1.4571687 1.3449236 1.4571687 1.7416863 1.4571687 1.7416863 ] [ 1.3128773 1.8684663 0.6990556 1.8684663 0.6990556 0.73014295 0.67626435 0.9013848 0.5920961 0.9013848 0.41233775]] [[ 1.8167709 1.7782319 2.1901102 1.7782319 2.1901102 1.7782319 2.1901102 1.3314176 0.8627525 2.4174387 0.8627525 ] [ 1.2696987 1.9180871 1.2696987 1.9180871 0.6954643 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 1.8167709 2.526227 2.1901102 2.526227 2.1901102 1.7782319 2.1901102 1.4607645 0.8753444 2.4174387 0.8753444 ] [ 1.4956088 1.9180871 1.4956088 1.9180871 1.4956088 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 1.0462413 2.526227 2.1901102 2.526227 2.1901102 1.7782319 2.1901102 1.4607645 0.95536757 2.4174387 0.8753444 ] [ 1.4956088 0.68660045 1.4956088 0.5893266 1.4956088 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 0.8973139 2.526227 1.6244633 2.6327176 1.6244633 2.6327176 1.6244633 2.6327176 1.949746 -0.04725664 1.949746 ] [ 1.4956088 1.0228459 1.4956088 0.5893266 1.4956088 2.249505 1.1731168 2.249505 1.1731168 2.249505 1.2899704 ] [ 1.3975016 1.3385913 1.3975016 2.6327176 1.0793018 2.6327176 1.6606728 2.6327176 1.949746 0.36662918 1.949746 ] [ 1.3448672 1.0228459 1.0804825 0.5893266 1.1731168 2.249505 1.1731168 2.249505 1.1731168 2.249505 1.439189 ] [ 1.3975016 1.2105187 1.3975016 2.6327176 1.0793018 2.6327176 1.6606728 2.6327176 1.949746 0.5884876 1.949746 ]]]]; ov_res: [[[[ 2.739867 3.19065 0.9521759 3.19065 1.5764834 3.19065 1.5764834 2.9851365 1.5764834 2.9851365 1.1584816 ] [ 1.8051275 1.9371711 0.7998218 0.9461014 1.1036023 0.9461014 1.1036023 0.01216678 1.3083181 0.7140213 1.3083181 ] [ 1.079762 0.43365532 0.9521759 1.3386129 0.9521759 2.9851365 0.78914976 2.9851365 0.78914976 2.9851365 1.1584816 ] [ 0.9289764 1.9371711 0.9289764 1.79177 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 1.0634298 ] [ 1.048374 0.71107364 1.3442636 1.1144422 1.3442636 2.9851365 1.3442636 2.9851365 1.2485031 2.9851365 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 1.0634298 ] [ 1.048374 0.71107364 1.3442636 0.71107364 1.3442636 2.4344585 1.3442636 2.4344585 1.2485031 2.4344585 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 2.3451054 1.79177 2.3451054 1.79177 2.3451054 1.4412408 0.70324194] [ 1.048374 1.7344772 1.3442636 1.7344772 1.3442636 2.4344585 1.3442636 2.4344585 1.2485031 2.4344585 1.2485031 ] [ 1.0293958 2.7771764 1.0293958 2.7771764 1.5495738 1.8657309 1.5495738 1.209152 1.5495738 1.0072384 0.6960524 ] [ 0.6689557 1.7344772 0.83066595 1.7344772 0.83066595 1.7344772 0.83066595 0.91387385 0.19220266 0.91387385 0.2394517 ]] [[ 2.2406769 1.1158935 2.2406769 1.4621391 2.2406769 1.5872358 1.7615771 1.5872358 1.7615771 1.5872358 1.7615771 ] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.8417044 1.1358423 1.8417044 2.0621362 2.5305314 2.0621362 ] [ 0.2746798 1.5071996 0.2746798 1.5071996 1.3426632 1.5071996 1.3426632 1.4621391 1.3426632 1.46097 0.15981618] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.109598 1.1358423 1.1579757 2.0621362 2.5305314 2.0621362 ] [ 0.3529481 2.057749 1.1130669 2.057749 1.3426632 1.5071996 1.3426632 0.7396197 1.3426632 1.5961802 0.2790325 ] [ 2.3608153 1.3983582 2.3608153 1.3983582 2.3608153 1.4571687 0.8500917 1.4571687 2.0621362 2.5305314 2.0621362 ] [ 0.39158025 2.057749 1.1130669 2.057749 1.1130669 1.5071996 1.1130669 0.42664203 0.5920961 1.5961802 0.2790325 ] [ 1.5843996 1.3454949 1.5843996 1.3454949 1.5843996 1.4571687 0.8500917 1.4571687 1.7416863 1.4571687 1.7416863 ] [ 0.5423728 2.057749 1.1130669 2.057749 1.1130669 0.03674202 1.1130669 0.21596433 0.5920961 1.5961802 0.2790325 ] [ 2.3778489 1.3454949 2.3778489 1.3454949 2.3778489 1.4571687 1.3449236 1.4571687 1.7416863 1.4571687 1.7416863 ] [ 1.3128773 1.8684663 0.6990556 1.8684663 0.6990556 0.73014295 0.67626435 0.9013848 0.5920961 0.9013848 0.41233775]] [[ 1.8167709 1.7782319 2.1901102 1.7782319 2.1901102 1.7782319 2.1901102 1.3314176 0.8627525 2.4174387 0.8627525 ] [ 1.2696987 1.9180871 1.2696987 1.9180871 0.6954643 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 1.8167709 2.526227 2.1901102 2.526227 2.1901102 1.7782319 2.1901102 1.4607645 0.8753444 2.4174387 0.8753444 ] [ 1.4956088 1.9180871 1.4956088 1.9180871 1.4956088 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 1.0462413 2.526227 2.1901102 2.526227 2.1901102 1.7782319 2.1901102 1.4607645 0.95536757 2.4174387 0.8753444 ] [ 1.4956088 0.68660045 1.4956088 0.5893266 1.4956088 2.288891 2.1436832 2.288891 2.1436832 2.288891 2.1436832 ] [ 0.8973139 2.526227 1.6244633 2.6327176 1.6244633 2.6327176 1.6244633 2.6327176 1.949746 -0.04725664 1.949746 ] [ 1.4956088 1.0228459 1.4956088 0.5893266 1.4956088 2.249505 1.1731168 2.249505 1.1731168 2.249505 1.2899704 ] [ 1.3975016 1.3385913 1.3975016 2.6327176 1.0793018 2.6327176 1.6606728 2.6327176 1.949746 0.36662918 1.949746 ] [ 1.3448672 1.0228459 1.0804825 0.5893266 1.1731168 2.249505 1.1731168 2.249505 1.1731168 2.249505 1.439189 ] [ 1.3975016 1.2105187 1.3975016 2.6327176 1.0793018 2.6327176 1.6606728 2.6327176 1.949746 0.5884876 1.949746 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5812.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.stride, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.8382655 1.1279896 1.8382655 1.4674228 1.4836738 1.4674228 1.554698 1.4674228 1.554698 0.09819758 1.554698 -0.13297683 1.417175 ] [ 0.8664536 0.87463945 0.8664536 0.87463945 0.78201145 1.3383286 1.0291483 1.3383286 1.1847032 1.3383286 1.1847032 1.4380554 1.1847032 ] [ 1.8382655 1.1279896 1.8382655 1.4674228 1.4976318 1.4674228 1.554698 1.4674228 1.554698 0.5762787 1.554698 0.65620726 1.417175 ] [ 0.8664536 0.87463945 0.8664536 0.87463945 0.78201145 1.3383286 1.0291483 1.3383286 1.1847032 1.3383286 1.5901939 1.0957075 1.5901939 ] [ 1.8382655 1.077766 1.8382655 1.4674228 1.4976318 1.4674228 1.554698 1.4674228 1.554698 0.5762787 1.554698 0.65620726 1.417175 ] [ 0.8664536 0.87463945 2.6745222 0.87463945 2.6745222 1.6993266 2.6745222 1.6993266 1.1847032 1.6993266 1.5901939 1.0957075 1.5901939 ] [-0.2850428 2.1839652 -0.2850428 1.077766 1.4976318 0.03028883 1.4976318 1.965093 1.4976318 1.965093 0.6210502 1.965093 0.6210502 ] [-0.17133714 0.6269104 2.6745222 0.6269104 2.6745222 1.6993266 2.6745222 1.6993266 0.62052995 1.6993266 1.5901939 0.95136935 1.5901939 ] [ 0.00304598 2.1839652 0.00304598 1.314504 -0.37844282 0.31511116 -0.37844282 1.965093 0.8653191 1.965093 0.8653191 1.965093 0.8653191 ] [-0.17133714 1.0454037 2.6745222 1.0454037 2.6745222 1.6993266 2.6745222 1.6993266 0.62052995 1.6993266 2.1042423 1.1589719 2.1042423 ] [ 0.47850594 2.1839652 1.3114485 1.314504 1.3114485 0.3632496 1.3114485 1.965093 0.8653191 1.965093 0.8653191 1.965093 0.8653191 ] [ 0.09836111 1.9789026 0.9716325 1.9789026 1.188471 1.5029355 2.2220309 1.940277 2.2220309 1.940277 2.2220309 1.940277 2.1042423 ] [ 0.47850594 1.8031309 1.3114485 1.314504 1.3114485 0.3632496 1.3114485 1.5270774 0.8653191 1.5270774 0.8653191 1.5270774 0.8653191 ]] [[ 2.345498 1.8930932 2.345498 1.8930932 2.345498 1.7435824 1.4045184 1.5640599 1.4045184 1.5640599 1.2865596 1.5640599 1.2865596 ] [ 0.723294 0.9316065 1.4025408 0.9316065 1.4025408 1.2009928 1.4025408 2.1034608 1.8267723 2.1034608 1.8267723 2.1034608 1.8267723 ] [ 2.6077106 1.8930932 2.6077106 1.8930932 2.6077106 1.7468809 1.4045184 1.7468809 1.4045184 1.7468809 1.2865596 1.5640599 1.2865596 ] [ 1.2625731 0.81105095 1.4025408 0.81105095 1.4025408 1.2009928 1.4025408 1.2009928 1.8267723 1.8204128 1.8267723 2.0598292 1.8267723 ] [ 2.6077106 1.8930932 2.6077106 1.8930932 2.6077106 1.7468809 1.4045184 1.7468809 1.4045184 1.7468809 1.2865596 0.92402744 1.2865596 ] [ 1.2625731 2.1151555 1.2625731 2.1151555 1.1426696 2.1151555 1.1426696 2.0193796 1.8267723 2.0193796 1.8267723 2.0598292 1.8267723 ] [ 2.6077106 0.7247359 2.6077106 1.1046807 2.6077106 1.7468809 1.4348799 1.7468809 0.6296941 1.7468809 0.6296941 0.92402744 0.20769748] [ 2.0373275 2.1151555 2.0373275 2.1151555 2.0373275 2.1151555 1.1426696 2.0193796 1.002373 2.0193796 1.002373 2.0193796 1.002373 ] [ 0.75139874 0.7247359 1.4348799 1.1046807 1.4348799 1.1046807 1.8698357 1.1046807 1.8698357 1.4559281 1.8698357 1.4559281 0.9664491 ] [ 2.0373275 2.1151555 2.0373275 2.1151555 2.0373275 2.1151555 0.15148747 2.0193796 0.02566547 2.0193796 0.01799527 2.0193796 0.01799527] [ 0.75139874 1.2482464 1.4348799 1.2482464 1.4348799 1.6081911 1.8698357 1.6081911 1.8698357 1.6081911 1.8698357 1.4841455 0.9664491 ] [ 2.0373275 0.87973535 2.1750057 0.87973535 2.1750057 2.1023064 2.1750057 2.1023064 0.02566547 2.1023064 -0.19750199 1.5177734 -0.19750199] [ 0.75139874 1.2482464 0.75139874 1.2482464 1.3651958 1.6081911 1.8698357 1.6081911 1.8698357 1.6081911 1.8698357 1.4841455 0.9664491 ]] [[ 2.1436489 1.277775 2.1436489 0.9934864 2.1436489 0.9934864 1.0721549 1.9283718 0.89183044 1.9283718 1.4688963 2.0328624 1.4688963 ] [ 2.2731206 2.1854188 2.2731206 2.1854188 2.2731206 1.9624532 1.6222737 1.9624532 1.6222737 1.1020011 1.0736135 1.1020011 0.53563446] [ 2.1436489 1.7892503 2.1436489 0.9934864 2.1436489 0.9934864 1.0721549 1.9283718 0.9319291 1.9283718 1.4688963 2.0328624 1.4688963 ] [ 2.2731206 2.1854188 2.2731206 2.1854188 2.2731206 0.66760427 1.6222737 0.66760427 1.6222737 1.1020011 2.405807 1.1020011 2.405807 ] [ 2.1436489 1.7892503 2.1436489 1.2263532 2.1436489 1.2263532 0.9406784 1.9283718 1.7993808 1.9283718 1.7993808 1.9283718 1.7993808 ] [ 2.2731206 1.8766861 2.2731206 1.8766861 2.2731206 1.0006894 1.6222737 1.0006894 1.6222737 0.5723544 2.405807 0.92600965 2.405807 ] [ 0.6106162 1.7892503 0.6106162 1.2263532 0.6106162 1.2263532 0.9406784 1.2263532 1.9247395 0.8369987 1.9247395 0.7020344 1.9247395 ] [ 0.7139622 1.8766861 1.1406642 1.8766861 1.2206097 1.0006894 2.78033 1.0006894 2.78033 0.5723544 2.78033 0.85005265 2.405807 ] [ 0.6106162 0.95827264 0.6106162 1.2263532 1.4622244 1.2263532 1.4622244 2.3166826 1.9247395 2.3166826 1.9247395 2.3166826 1.9247395 ] [ 0.60428524 1.8766861 1.1406642 1.8766861 2.3656054 1.0006894 2.78033 1.0006894 2.78033 0.5723544 2.78033 0.5723544 0.7953535 ] [ 0.6106162 0.97239256 0.6106162 0.95827264 1.4622244 0.95827264 1.4622244 2.3166826 1.9247395 2.3166826 1.9247395 2.3166826 1.9247395 ] [ 1.040172 0.6521078 1.040172 0.6521078 2.3656054 0.6521078 2.78033 0.40191063 2.78033 0.40191063 2.78033 1.8594981 1.2737108 ] [-0.44095427 0.97239256 0.5191985 0.95827264 1.4622244 0.95827264 1.4622244 2.3166826 1.4622244 2.3166826 0.9853742 2.3166826 0.32714012]]]]; ov_res: [[[[ 1.8382655 1.1279896 1.8382655 1.4674228 1.4836738 1.4674228 1.554698 1.4674228 1.554698 0.09819758 1.554698 -0.13297683 1.417175 ] [ 0.8664536 0.87463945 0.8664536 0.87463945 0.78201145 1.3383286 1.0291483 1.3383286 1.1847032 1.3383286 1.1847032 1.4380554 1.1847032 ] [ 1.8382655 1.1279896 1.8382655 1.4674228 1.4976318 1.4674228 1.554698 1.4674228 1.554698 0.5762787 1.554698 0.65620726 1.417175 ] [ 0.8664536 0.87463945 0.8664536 0.87463945 0.78201145 1.3383286 1.0291483 1.3383286 1.1847032 1.3383286 1.5901939 1.0957075 1.5901939 ] [ 1.8382655 1.077766 1.8382655 1.4674228 1.4976318 1.4674228 1.554698 1.4674228 1.554698 0.5762787 1.554698 0.65620726 1.417175 ] [ 0.8664536 0.87463945 2.6745222 0.87463945 2.6745222 1.6993266 2.6745222 1.6993266 1.1847032 1.6993266 1.5901939 1.0957075 1.5901939 ] [-0.2850428 2.1839652 -0.2850428 1.077766 1.4976318 0.03028883 1.4976318 1.965093 1.4976318 1.965093 0.6210502 1.965093 0.6210502 ] [-0.17133714 0.6269104 2.6745222 0.6269104 2.6745222 1.6993266 2.6745222 1.6993266 0.62052995 1.6993266 1.5901939 0.95136935 1.5901939 ] [ 0.00304598 2.1839652 0.00304598 1.314504 -0.37844282 0.31511116 -0.37844282 1.965093 0.8653191 1.965093 0.8653191 1.965093 0.8653191 ] [-0.17133714 1.0454037 2.6745222 1.0454037 2.6745222 1.6993266 2.6745222 1.6993266 0.62052995 1.6993266 2.1042423 1.1589719 2.1042423 ] [ 0.47850594 2.1839652 1.3114485 1.314504 1.3114485 0.3632496 1.3114485 1.965093 0.8653191 1.965093 0.8653191 1.965093 0.8653191 ] [ 0.09836111 1.9789026 0.9716325 1.9789026 1.188471 1.5029355 2.2220309 1.940277 2.2220309 1.940277 2.2220309 1.940277 2.1042423 ] [ 0.47850594 1.8031309 1.3114485 1.314504 1.3114485 0.3632496 1.3114485 1.5270774 0.8653191 1.5270774 0.8653191 1.5270774 0.8653191 ]] [[ 2.345498 1.8930932 2.345498 1.8930932 2.345498 1.7435824 1.4045184 1.5640599 1.4045184 1.5640599 1.2865596 1.5640599 1.2865596 ] [ 0.723294 0.9316065 1.4025408 0.9316065 1.4025408 1.2009928 1.4025408 2.1034608 1.8267723 2.1034608 1.8267723 2.1034608 1.8267723 ] [ 2.6077106 1.8930932 2.6077106 1.8930932 2.6077106 1.7468809 1.4045184 1.7468809 1.4045184 1.7468809 1.2865596 1.5640599 1.2865596 ] [ 1.2625731 0.81105095 1.4025408 0.81105095 1.4025408 1.2009928 1.4025408 1.2009928 1.8267723 1.8204128 1.8267723 2.0598292 1.8267723 ] [ 2.6077106 1.8930932 2.6077106 1.8930932 2.6077106 1.7468809 1.4045184 1.7468809 1.4045184 1.7468809 1.2865596 0.92402744 1.2865596 ] [ 1.2625731 2.1151555 1.2625731 2.1151555 1.1426696 2.1151555 1.1426696 2.0193796 1.8267723 2.0193796 1.8267723 2.0598292 1.8267723 ] [ 2.6077106 0.7247359 2.6077106 1.1046807 2.6077106 1.7468809 1.4348799 1.7468809 0.6296941 1.7468809 0.6296941 0.92402744 0.20769748] [ 2.0373275 2.1151555 2.0373275 2.1151555 2.0373275 2.1151555 1.1426696 2.0193796 1.002373 2.0193796 1.002373 2.0193796 1.002373 ] [ 0.75139874 0.7247359 1.4348799 1.1046807 1.4348799 1.1046807 1.8698357 1.1046807 1.8698357 1.4559281 1.8698357 1.4559281 0.9664491 ] [ 2.0373275 2.1151555 2.0373275 2.1151555 2.0373275 2.1151555 0.15148747 2.0193796 0.02566547 2.0193796 0.01799527 2.0193796 0.01799527] [ 0.75139874 1.2482464 1.4348799 1.2482464 1.4348799 1.6081911 1.8698357 1.6081911 1.8698357 1.6081911 1.8698357 1.4841455 0.9664491 ] [ 2.0373275 0.87973535 2.1750057 0.87973535 2.1750057 2.1023064 2.1750057 2.1023064 0.02566547 2.1023064 -0.19750199 1.5177734 -0.19750199] [ 0.75139874 1.2482464 0.75139874 1.2482464 1.3651958 1.6081911 1.8698357 1.6081911 1.8698357 1.6081911 1.8698357 1.4841455 0.9664491 ]] [[ 2.1436489 1.277775 2.1436489 0.9934864 2.1436489 0.9934864 1.0721549 1.9283718 0.89183044 1.9283718 1.4688963 2.0328624 1.4688963 ] [ 2.2731206 2.1854188 2.2731206 2.1854188 2.2731206 1.9624532 1.6222737 1.9624532 1.6222737 1.1020011 1.0736135 1.1020011 0.53563446] [ 2.1436489 1.7892503 2.1436489 0.9934864 2.1436489 0.9934864 1.0721549 1.9283718 0.9319291 1.9283718 1.4688963 2.0328624 1.4688963 ] [ 2.2731206 2.1854188 2.2731206 2.1854188 2.2731206 0.66760427 1.6222737 0.66760427 1.6222737 1.1020011 2.405807 1.1020011 2.405807 ] [ 2.1436489 1.7892503 2.1436489 1.2263532 2.1436489 1.2263532 0.9406784 1.9283718 1.7993808 1.9283718 1.7993808 1.9283718 1.7993808 ] [ 2.2731206 1.8766861 2.2731206 1.8766861 2.2731206 1.0006894 1.6222737 1.0006894 1.6222737 0.5723544 2.405807 0.92600965 2.405807 ] [ 0.6106162 1.7892503 0.6106162 1.2263532 0.6106162 1.2263532 0.9406784 1.2263532 1.9247395 0.8369987 1.9247395 0.7020344 1.9247395 ] [ 0.7139622 1.8766861 1.1406642 1.8766861 1.2206097 1.0006894 2.78033 1.0006894 2.78033 0.5723544 2.78033 0.85005265 2.405807 ] [ 0.6106162 0.95827264 0.6106162 1.2263532 1.4622244 1.2263532 1.4622244 2.3166826 1.9247395 2.3166826 1.9247395 2.3166826 1.9247395 ] [ 0.60428524 1.8766861 1.1406642 1.8766861 2.3656054 1.0006894 2.78033 1.0006894 2.78033 0.5723544 2.78033 0.5723544 0.7953535 ] [ 0.6106162 0.97239256 0.6106162 0.95827264 1.4622244 0.95827264 1.4622244 2.3166826 1.9247395 2.3166826 1.9247395 2.3166826 1.9247395 ] [ 1.040172 0.6521078 1.040172 0.6521078 2.3656054 0.6521078 2.78033 0.40191063 2.78033 0.40191063 2.78033 1.8594981 1.2737108 ] [-0.44095427 0.97239256 0.5191985 0.95827264 1.4622244 0.95827264 1.4622244 2.3166826 1.4622244 2.3166826 0.9853742 2.3166826 0.32714012]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5814.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 1]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[ 0.8045693 2.2413893 1.2280788 2.2413893 1.6044494 0.5218193 2.3486638 0.9241046 2.3486638 0.9241046 2.3486638 0.9241046 1.5206302 ] [ 0.44965318 1.3453428 0.88260585 1.7931157 0.88260585 1.7931157 0.88260585 1.7931157 0.8204391 1.7541066 0.8204391 1.7541066 0.41569868] [ 1.4446934 2.2413893 1.4446934 2.2413893 1.4706106 0.6349288 1.4706106 0.9241046 1.4706106 0.9241046 1.8510164 1.1158954 1.8510164 ] [ 1.3730943 1.0936129 1.3730943 1.7931157 1.3730943 1.7931157 0.88260585 2.6954315 0.8204391 2.6954315 0.8204391 2.6954315 0.19989555] [ 1.4446934 1.389294 1.4446934 1.389294 1.4706106 0.6349288 1.4706106 0.9241046 1.4706106 1.6263009 1.8510164 1.6263009 1.8510164 ] [ 1.3730943 1.3357267 1.3730943 1.7931157 1.3730943 1.7931157 1.3020351 2.6954315 1.3020351 2.6954315 1.3020351 2.6954315 0.708019 ] [ 1.4446934 1.2205354 1.4446934 1.2205354 1.3480707 0.6349288 1.3480707 0.5295555 1.177134 1.6263009 1.8510164 1.6263009 1.8510164 ] [ 1.3730943 1.3357267 1.3730943 1.3357267 1.3730943 1.1486361 1.3020351 2.6954315 1.3020351 2.6954315 1.3020351 2.6954315 0.72020304] [ 0.94144833 0.9867305 1.3480707 0.9867305 1.3480707 0.9867305 1.3480707 0.5295555 1.6431185 1.6263009 1.6431185 1.9931849 1.6431185 ] [ 0.9422812 1.3357267 0.9422812 1.3357267 0.76528543 1.1191603 1.3020351 1.2927754 1.3020351 1.2927754 1.3020351 1.2927754 0.72020304] [ 1.4207342 0.9867305 1.4207342 0.9867305 1.4207342 0.9867305 1.1456921 0.94360316 1.6431185 0.94360316 1.6431185 1.9931849 1.6431185 ]] [[ 0.78926665 0.40080827 0.78926665 1.1475843 1.0126686 1.1475843 1.0126686 1.521489 1.54387 1.521489 1.54387 2.5834394 1.54387 ] [ 1.5705262 2.051993 1.5705262 2.051993 1.1504977 0.4523784 1.1504977 2.4493105 0.9066713 2.4493105 0.9414415 2.5998945 0.9414415 ] [ 0.40438086 0.40080827 0.40438086 1.1475843 1.0126686 1.1475843 1.5553517 1.521489 1.5553517 1.521489 1.5553517 2.5834394 1.54387 ] [ 1.5705262 1.7300808 1.5705262 0.538847 1.7911409 1.4945383 1.7911409 2.0411448 1.7911409 2.0411448 0.9414415 2.0411448 0.9414415 ] [ 0.6645231 0.43590373 0.6645231 1.1475843 1.0126686 1.1475843 1.5553517 1.521489 1.5553517 1.521489 1.5553517 1.521489 1.54387 ] [ 0.8775103 2.8473992 0.9630991 2.8473992 1.7911409 1.4945383 1.7911409 2.0411448 1.7911409 2.0411448 1.0157253 2.0411448 1.0157253 ] [ 0.6645231 2.197001 0.6645231 2.197001 0.6645231 2.197001 1.5553517 1.2983489 1.5553517 1.2983489 1.5553517 1.2983489 1.229935 ] [ 0.8775103 2.8473992 1.2094965 2.8473992 1.7911409 1.4945383 3.0034533 1.4945383 3.0034533 1.4945383 3.0034533 1.3204753 1.0157253 ] [ 0.6645231 2.197001 0.6645231 2.197001 0.6645231 2.197001 0.39634702 1.2983489 1.229935 1.2983489 1.229935 1.2983489 1.229935 ] [ 0.8775103 2.8473992 1.2094965 2.8473992 1.2094965 1.122495 3.0034533 2.4264219 3.0034533 2.4264219 3.0034533 2.4264219 1.0157253 ] [ 0.5265908 2.197001 0.5265908 2.197001 0.47285146 2.197001 0.10327133 1.1096171 0.6853206 1.1096171 0.97442174 1.0720367 0.97442174]] [[ 1.8140979 1.4139824 1.8140979 1.4171216 1.3342534 1.4171216 1.586206 2.4074628 1.586206 2.4074628 1.586206 2.4074628 1.3611414 ] [ 1.9571537 2.5263574 1.9571537 2.5263574 1.9571537 1.7735845 1.2766389 1.7735845 1.0664119 2.2395287 1.0664119 2.2395287 1.0664119 ] [ 1.8140979 1.2340878 1.8140979 1.2340878 1.3342534 1.2340878 1.3755153 2.4074628 1.3755153 2.4074628 1.3755153 2.4074628 1.3530524 ] [ 1.9571537 2.5263574 1.9571537 2.5263574 1.9571537 1.1466848 1.2801539 1.1466848 1.2801539 2.2395287 1.2801539 2.2395287 0.62236255] [ 1.3342534 1.2340878 1.3342534 1.2340878 1.4870942 1.2340878 1.4870942 2.4074628 1.4870942 2.4074628 1.3755153 2.4074628 1.3530524 ] [ 1.9571537 2.270639 1.9571537 1.9382129 1.9571537 1.1466848 1.2801539 1.1466848 1.2801539 2.2395287 1.2801539 2.2395287 0.62236255] [ 0.4819167 0.9210902 1.0373256 0.9210902 1.4870942 0.9210902 1.4870942 0.7876021 1.4870942 1.9548544 1.3755153 1.9548544 1.3530524 ] [ 0.6871571 2.270639 1.0844281 1.9382129 1.0844281 0.6608945 1.2801539 0.6050572 1.8549707 0.11674846 1.8549707 0.52133214 1.8549707 ] [ 1.5443512 0.9210902 1.5443512 0.9210902 1.4870942 0.9210902 1.4870942 0.7876021 1.4870942 0.94015115 0.91721094 1.0229354 0.91721094] [-0.06754887 2.5579305 0.8861895 2.5579305 0.8861895 1.3455027 2.0500572 1.3455027 2.0500572 1.3455027 2.0500572 0.52133214 1.8549707 ] [ 1.5443512 1.0392214 1.5443512 1.0392214 1.4420476 0.68349385 1.4420476 0.63333344 0.777048 0.75787425 0.91721094 1.0229354 0.91721094]]]]; ov_res: [[[[ 0.8045693 2.2413893 1.2280788 2.2413893 1.6044494 0.5218193 2.3486638 0.9241046 2.3486638 0.9241046 2.3486638 0.9241046 1.5206302 ] [ 0.44965318 1.3453428 0.88260585 1.7931157 0.88260585 1.7931157 0.88260585 1.7931157 0.8204391 1.7541066 0.8204391 1.7541066 0.41569868] [ 1.4446934 2.2413893 1.4446934 2.2413893 1.4706106 0.6349288 1.4706106 0.9241046 1.4706106 0.9241046 1.8510164 1.1158954 1.8510164 ] [ 1.3730943 1.0936129 1.3730943 1.7931157 1.3730943 1.7931157 0.88260585 2.6954315 0.8204391 2.6954315 0.8204391 2.6954315 0.19989555] [ 1.4446934 1.389294 1.4446934 1.389294 1.4706106 0.6349288 1.4706106 0.9241046 1.4706106 1.6263009 1.8510164 1.6263009 1.8510164 ] [ 1.3730943 1.3357267 1.3730943 1.7931157 1.3730943 1.7931157 1.3020351 2.6954315 1.3020351 2.6954315 1.3020351 2.6954315 0.708019 ] [ 1.4446934 1.2205354 1.4446934 1.2205354 1.3480707 0.6349288 1.3480707 0.5295555 1.177134 1.6263009 1.8510164 1.6263009 1.8510164 ] [ 1.3730943 1.3357267 1.3730943 1.3357267 1.3730943 1.1486361 1.3020351 2.6954315 1.3020351 2.6954315 1.3020351 2.6954315 0.72020304] [ 0.94144833 0.9867305 1.3480707 0.9867305 1.3480707 0.9867305 1.3480707 0.5295555 1.6431185 1.6263009 1.6431185 1.9931849 1.6431185 ] [ 0.9422812 1.3357267 0.9422812 1.3357267 0.76528543 1.1191603 1.3020351 1.2927754 1.3020351 1.2927754 1.3020351 1.2927754 0.72020304] [ 1.4207342 0.9867305 1.4207342 0.9867305 1.4207342 0.9867305 1.1456921 0.94360316 1.6431185 0.94360316 1.6431185 1.9931849 1.6431185 ]] [[ 0.78926665 0.40080827 0.78926665 1.1475843 1.0126686 1.1475843 1.0126686 1.521489 1.54387 1.521489 1.54387 2.5834394 1.54387 ] [ 1.5705262 2.051993 1.5705262 2.051993 1.1504977 0.4523784 1.1504977 2.4493105 0.9066713 2.4493105 0.9414415 2.5998945 0.9414415 ] [ 0.40438086 0.40080827 0.40438086 1.1475843 1.0126686 1.1475843 1.5553517 1.521489 1.5553517 1.521489 1.5553517 2.5834394 1.54387 ] [ 1.5705262 1.7300808 1.5705262 0.538847 1.7911409 1.4945383 1.7911409 2.0411448 1.7911409 2.0411448 0.9414415 2.0411448 0.9414415 ] [ 0.6645231 0.43590373 0.6645231 1.1475843 1.0126686 1.1475843 1.5553517 1.521489 1.5553517 1.521489 1.5553517 1.521489 1.54387 ] [ 0.8775103 2.8473992 0.9630991 2.8473992 1.7911409 1.4945383 1.7911409 2.0411448 1.7911409 2.0411448 1.0157253 2.0411448 1.0157253 ] [ 0.6645231 2.197001 0.6645231 2.197001 0.6645231 2.197001 1.5553517 1.2983489 1.5553517 1.2983489 1.5553517 1.2983489 1.229935 ] [ 0.8775103 2.8473992 1.2094965 2.8473992 1.7911409 1.4945383 3.0034533 1.4945383 3.0034533 1.4945383 3.0034533 1.3204753 1.0157253 ] [ 0.6645231 2.197001 0.6645231 2.197001 0.6645231 2.197001 0.39634702 1.2983489 1.229935 1.2983489 1.229935 1.2983489 1.229935 ] [ 0.8775103 2.8473992 1.2094965 2.8473992 1.2094965 1.122495 3.0034533 2.4264219 3.0034533 2.4264219 3.0034533 2.4264219 1.0157253 ] [ 0.5265908 2.197001 0.5265908 2.197001 0.47285146 2.197001 0.10327133 1.1096171 0.6853206 1.1096171 0.97442174 1.0720367 0.97442174]] [[ 1.8140979 1.4139824 1.8140979 1.4171216 1.3342534 1.4171216 1.586206 2.4074628 1.586206 2.4074628 1.586206 2.4074628 1.3611414 ] [ 1.9571537 2.5263574 1.9571537 2.5263574 1.9571537 1.7735845 1.2766389 1.7735845 1.0664119 2.2395287 1.0664119 2.2395287 1.0664119 ] [ 1.8140979 1.2340878 1.8140979 1.2340878 1.3342534 1.2340878 1.3755153 2.4074628 1.3755153 2.4074628 1.3755153 2.4074628 1.3530524 ] [ 1.9571537 2.5263574 1.9571537 2.5263574 1.9571537 1.1466848 1.2801539 1.1466848 1.2801539 2.2395287 1.2801539 2.2395287 0.62236255] [ 1.3342534 1.2340878 1.3342534 1.2340878 1.4870942 1.2340878 1.4870942 2.4074628 1.4870942 2.4074628 1.3755153 2.4074628 1.3530524 ] [ 1.9571537 2.270639 1.9571537 1.9382129 1.9571537 1.1466848 1.2801539 1.1466848 1.2801539 2.2395287 1.2801539 2.2395287 0.62236255] [ 0.4819167 0.9210902 1.0373256 0.9210902 1.4870942 0.9210902 1.4870942 0.7876021 1.4870942 1.9548544 1.3755153 1.9548544 1.3530524 ] [ 0.6871571 2.270639 1.0844281 1.9382129 1.0844281 0.6608945 1.2801539 0.6050572 1.8549707 0.11674846 1.8549707 0.52133214 1.8549707 ] [ 1.5443512 0.9210902 1.5443512 0.9210902 1.4870942 0.9210902 1.4870942 0.7876021 1.4870942 0.94015115 0.91721094 1.0229354 0.91721094] [-0.06754887 2.5579305 0.8861895 2.5579305 0.8861895 1.3455027 2.0500572 1.3455027 2.0500572 1.3455027 2.0500572 0.52133214 1.8549707 ] [ 1.5443512 1.0392214 1.5443512 1.0392214 1.4420476 0.68349385 1.4420476 0.63333344 0.777048 0.75787425 0.91721094 1.0229354 0.91721094]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5816.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[1, 0]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[-0.20729081 1.6829196 0.9396622 1.6829196 0.9396622 2.0341482 0.9396622 2.0341482 0.5091875 2.0341482 0.47580186] [ 1.7865171 1.1581557 1.7865171 1.1581557 0.9115695 0.8954403 1.4773896 0.72378325 1.4773896 2.0432634 1.4773896 ] [-0.19905007 1.6829196 0.9396622 1.6829196 0.9396622 2.0341482 1.4930406 2.0341482 1.4930406 2.0341482 1.4930406 ] [ 1.7865171 0.8954403 1.7865171 0.8954403 0.9115695 0.8954403 1.4773896 0.5027401 1.4773896 1.6061509 1.4773896 ] [ 0.6466546 1.6829196 0.6466546 1.6829196 1.3605464 2.0341482 1.4930406 2.0341482 1.4930406 2.0341482 1.4930406 ] [ 0.2659291 1.961197 0.2659291 1.961197 0.151041 0.49503398 1.4773896 0.5027401 1.4773896 1.6061509 1.4773896 ] [ 1.1872143 1.2415239 1.1872143 0.46086466 1.3605464 1.3239527 1.4930406 1.3239527 1.540399 1.9439147 1.540399 ] [ 1.9127344 1.961197 1.1338972 1.961197 1.1338972 1.730602 1.1338972 0.8805133 1.1093862 1.6061509 1.1093862 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.3605464 1.3239527 1.3605464 1.3239527 1.540399 1.9439147 1.540399 ] [ 1.9127344 1.961197 1.1338972 2.0791845 1.1338972 2.0791845 1.1338972 2.0791845 1.7929387 0.8805133 1.7929387 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.1554197 1.1038705 1.1554197 1.2343284 1.540399 2.2324038 1.540399 ] [ 1.9127344 1.730602 1.1338972 2.0791845 1.1338972 2.0791845 1.1338972 2.0791845 1.7929387 1.6691918 1.7929387 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.1554197 1.1038705 1.1554197 1.2343284 1.1554197 2.2324038 0.81329286]] [[ 0.14075187 0.8042617 3.184263 1.2464463 3.184263 1.2464463 3.184263 1.2464463 0.8670388 1.1837814 0.8670388 ] [ 1.7629374 1.5883366 1.7629374 2.1673841 0.92016536 2.1673841 1.0218661 2.1673841 1.960789 0.95949364 1.960789 ] [ 0.14075187 0.8042617 3.184263 1.2464463 3.184263 1.2464463 3.184263 1.2464463 0.9948123 1.1837814 0.9948123 ] [ 1.7629374 1.5883366 1.7629374 2.1673841 0.92016536 2.1673841 0.92016536 2.1673841 1.960789 0.91199803 1.960789 ] [ 1.1003962 1.4763913 3.184263 0.8042617 3.184263 1.1837814 3.184263 1.1837814 1.5050237 1.1837814 0.9948123 ] [ 1.03184 1.0655438 1.3813363 2.1673841 1.3813363 2.1673841 1.3813363 2.1673841 1.960789 2.2032478 1.960789 ] [ 1.1003962 1.4763913 1.3632267 0.93434733 1.5050237 1.5227888 1.5050237 1.5227888 1.5050237 1.5227888 0.9948123 ] [ 0.976603 0.71670264 1.3813363 0.55276656 1.3813363 0.59283984 1.8485135 1.5555848 1.8485135 2.2032478 1.8485135 ] [ 1.1003962 1.4763913 1.3632267 0.93434733 1.5050237 1.5227888 1.5050237 1.5227888 1.5050237 1.5227888 0.47057867] [ 0.976603 0.67326826 1.3813363 0.558137 1.3813363 0.3859447 1.8485135 1.5555848 1.8485135 2.2032478 1.8485135 ] [ 1.2194493 0.93434733 1.2194493 0.93434733 0.95322025 1.5227888 0.77241296 1.5227888 0.77241296 1.5227888 0.47057867] [ 0.976603 1.611535 0.97138584 0.6239487 1.5810043 0.3859447 1.8485135 1.5555848 1.8485135 1.5555848 1.8485135 ] [ 1.2194493 0.91429305 1.2194493 0.91429305 0.95322025 0.91429305 0.77241296 -0.088875 0.77241296 -0.088875 0.25197092]] [[ 1.9996129 0.54992855 1.9996129 0.9337226 1.7922343 0.9337226 1.7922343 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.46020573 1.997063 0.5765786 1.997063 0.5765786 1.0683384 1.1416273 1.1556436 1.1416273 1.1556436 ] [ 1.9996129 2.0545797 1.9996129 2.0545797 1.7922343 0.9337226 1.7922343 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.5443428 1.997063 0.596589 1.997063 0.596589 1.663337 1.1416273 1.663337 1.3795062 1.3373908 ] [ 0.91800046 2.0545797 0.91800046 2.0545797 1.411747 1.1362486 1.411747 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.5443428 1.997063 0.596589 1.997063 1.1556462 1.663337 1.1556462 1.663337 1.3795062 1.3373908 ] [ 0.91800046 2.475886 0.91800046 2.475886 0.91800046 1.1362486 0.8806557 0.82073975 1.1023499 0.82073975 1.1023499 ] [ 1.3317478 0.7189983 1.3317478 0.7189983 1.663337 1.1556462 1.663337 1.1556462 1.663337 1.3795062 1.3373908 ] [ 1.2754073 2.475886 1.2754073 2.475886 0.91800046 1.7941722 0.9347581 1.7941722 1.180133 1.7941722 1.180133 ] [ 1.2961407 0.7189983 1.2961407 0.7189983 0.8772339 1.1556462 0.8772339 1.1556462 1.5495946 1.1556462 1.5495946 ] [ 1.7016699 2.475886 1.2754073 2.475886 1.7589109 1.7941722 1.7589109 1.7941722 1.7589109 1.7941722 1.180133 ] [ 0.7973349 0.90799385 0.8772339 0.90799385 0.8772339 0.90799385 0.8772339 0.16982684 1.5495946 1.1446656 1.5495946 ] [ 1.7016699 1.4969442 1.2754073 0.9120329 1.7589109 1.7941722 1.7589109 1.7941722 1.7589109 1.7941722 1.180133 ]]]]; ov_res: [[[[-0.20729081 1.6829196 0.9396622 1.6829196 0.9396622 2.0341482 0.9396622 2.0341482 0.5091875 2.0341482 0.47580186] [ 1.7865171 1.1581557 1.7865171 1.1581557 0.9115695 0.8954403 1.4773896 0.72378325 1.4773896 2.0432634 1.4773896 ] [-0.19905007 1.6829196 0.9396622 1.6829196 0.9396622 2.0341482 1.4930406 2.0341482 1.4930406 2.0341482 1.4930406 ] [ 1.7865171 0.8954403 1.7865171 0.8954403 0.9115695 0.8954403 1.4773896 0.5027401 1.4773896 1.6061509 1.4773896 ] [ 0.6466546 1.6829196 0.6466546 1.6829196 1.3605464 2.0341482 1.4930406 2.0341482 1.4930406 2.0341482 1.4930406 ] [ 0.2659291 1.961197 0.2659291 1.961197 0.151041 0.49503398 1.4773896 0.5027401 1.4773896 1.6061509 1.4773896 ] [ 1.1872143 1.2415239 1.1872143 0.46086466 1.3605464 1.3239527 1.4930406 1.3239527 1.540399 1.9439147 1.540399 ] [ 1.9127344 1.961197 1.1338972 1.961197 1.1338972 1.730602 1.1338972 0.8805133 1.1093862 1.6061509 1.1093862 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.3605464 1.3239527 1.3605464 1.3239527 1.540399 1.9439147 1.540399 ] [ 1.9127344 1.961197 1.1338972 2.0791845 1.1338972 2.0791845 1.1338972 2.0791845 1.7929387 0.8805133 1.7929387 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.1554197 1.1038705 1.1554197 1.2343284 1.540399 2.2324038 1.540399 ] [ 1.9127344 1.730602 1.1338972 2.0791845 1.1338972 2.0791845 1.1338972 2.0791845 1.7929387 1.6691918 1.7929387 ] [ 1.5341771 1.5723475 1.5341771 1.1038705 1.1554197 1.1038705 1.1554197 1.2343284 1.1554197 2.2324038 0.81329286]] [[ 0.14075187 0.8042617 3.184263 1.2464463 3.184263 1.2464463 3.184263 1.2464463 0.8670388 1.1837814 0.8670388 ] [ 1.7629374 1.5883366 1.7629374 2.1673841 0.92016536 2.1673841 1.0218661 2.1673841 1.960789 0.95949364 1.960789 ] [ 0.14075187 0.8042617 3.184263 1.2464463 3.184263 1.2464463 3.184263 1.2464463 0.9948123 1.1837814 0.9948123 ] [ 1.7629374 1.5883366 1.7629374 2.1673841 0.92016536 2.1673841 0.92016536 2.1673841 1.960789 0.91199803 1.960789 ] [ 1.1003962 1.4763913 3.184263 0.8042617 3.184263 1.1837814 3.184263 1.1837814 1.5050237 1.1837814 0.9948123 ] [ 1.03184 1.0655438 1.3813363 2.1673841 1.3813363 2.1673841 1.3813363 2.1673841 1.960789 2.2032478 1.960789 ] [ 1.1003962 1.4763913 1.3632267 0.93434733 1.5050237 1.5227888 1.5050237 1.5227888 1.5050237 1.5227888 0.9948123 ] [ 0.976603 0.71670264 1.3813363 0.55276656 1.3813363 0.59283984 1.8485135 1.5555848 1.8485135 2.2032478 1.8485135 ] [ 1.1003962 1.4763913 1.3632267 0.93434733 1.5050237 1.5227888 1.5050237 1.5227888 1.5050237 1.5227888 0.47057867] [ 0.976603 0.67326826 1.3813363 0.558137 1.3813363 0.3859447 1.8485135 1.5555848 1.8485135 2.2032478 1.8485135 ] [ 1.2194493 0.93434733 1.2194493 0.93434733 0.95322025 1.5227888 0.77241296 1.5227888 0.77241296 1.5227888 0.47057867] [ 0.976603 1.611535 0.97138584 0.6239487 1.5810043 0.3859447 1.8485135 1.5555848 1.8485135 1.5555848 1.8485135 ] [ 1.2194493 0.91429305 1.2194493 0.91429305 0.95322025 0.91429305 0.77241296 -0.088875 0.77241296 -0.088875 0.25197092]] [[ 1.9996129 0.54992855 1.9996129 0.9337226 1.7922343 0.9337226 1.7922343 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.46020573 1.997063 0.5765786 1.997063 0.5765786 1.0683384 1.1416273 1.1556436 1.1416273 1.1556436 ] [ 1.9996129 2.0545797 1.9996129 2.0545797 1.7922343 0.9337226 1.7922343 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.5443428 1.997063 0.596589 1.997063 0.596589 1.663337 1.1416273 1.663337 1.3795062 1.3373908 ] [ 0.91800046 2.0545797 0.91800046 2.0545797 1.411747 1.1362486 1.411747 1.1514355 1.9049238 1.1514355 1.9049238 ] [ 1.997063 0.5443428 1.997063 0.596589 1.997063 1.1556462 1.663337 1.1556462 1.663337 1.3795062 1.3373908 ] [ 0.91800046 2.475886 0.91800046 2.475886 0.91800046 1.1362486 0.8806557 0.82073975 1.1023499 0.82073975 1.1023499 ] [ 1.3317478 0.7189983 1.3317478 0.7189983 1.663337 1.1556462 1.663337 1.1556462 1.663337 1.3795062 1.3373908 ] [ 1.2754073 2.475886 1.2754073 2.475886 0.91800046 1.7941722 0.9347581 1.7941722 1.180133 1.7941722 1.180133 ] [ 1.2961407 0.7189983 1.2961407 0.7189983 0.8772339 1.1556462 0.8772339 1.1556462 1.5495946 1.1556462 1.5495946 ] [ 1.7016699 2.475886 1.2754073 2.475886 1.7589109 1.7941722 1.7589109 1.7941722 1.7589109 1.7941722 1.180133 ] [ 0.7973349 0.90799385 0.8772339 0.90799385 0.8772339 0.90799385 0.8772339 0.16982684 1.5495946 1.1446656 1.5495946 ] [ 1.7016699 1.4969442 1.2754073 0.9120329 1.7589109 1.7941722 1.7589109 1.7941722 1.7589109 1.7941722 1.180133 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5818.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[1.9242793 2.3073514 1.9242793 1.3452009 1.702587 1.3452009 1.3322226 0.854972 1.3322226 1.4168842 1.3322226 ] [1.702587 2.3073514 1.702587 1.3452009 1.702587 1.3452009 1.4147706 1.1565944 1.3322226 1.5817316 1.3322226 ] [1.702587 2.773976 1.702587 2.773976 1.702587 1.3452009 1.4147706 1.3079222 1.3322226 1.5817316 1.3322226 ] [1.8201396 2.773976 1.8201396 2.773976 1.4147706 1.5499104 1.4147706 1.5499104 0.85055274 1.5817316 0.85055274] [1.8201396 2.773976 1.8201396 2.773976 1.7161986 1.5499104 1.7161986 1.5499104 0.82350236 1.4479239 0.76480633] [1.8201396 1.6361135 1.8201396 1.6361135 1.7161986 1.5499104 1.7161986 1.5499104 1.5566429 1.4479239 1.5566429 ]] [[2.0440218 0.9861393 2.0440218 0.9861393 2.0440218 0.8214763 0.8518414 0.8214763 1.3321632 0.62417763 1.3321632 ] [2.0440218 1.7096988 2.0440218 1.7096988 2.0440218 1.7096988 0.8182964 1.7004157 1.3321632 0.85397714 1.3321632 ] [1.3682424 1.7096988 1.3682424 1.7096988 1.3682424 1.7096988 0.5892804 1.7004157 0.5863088 0.85397714 0.5863088 ] [1.370867 1.7096988 0.6781842 1.7096988 0.1904413 1.7096988 0.5863088 1.7004157 1.1172816 1.0889996 1.1172816 ] [1.370867 1.4677712 1.0511268 1.4189228 1.0511268 0.52096474 0.9169061 1.0889996 1.1172816 1.0889996 1.1172816 ] [1.370867 1.4677712 1.8250653 1.4189228 1.8250653 1.2937666 1.8250653 1.2937666 1.1172816 1.5954933 1.1172816 ]] [[0.8819857 0.4910015 2.3596742 0.4910015 2.3596742 0.4910015 2.3596742 1.1037445 1.4652733 1.1037445 1.4652733 ] [0.8819857 1.7178786 0.8819857 1.5916808 0.8819857 0.4910015 1.4652733 0.6797368 1.4652733 0.6797368 1.9397117 ] [1.9057134 2.582398 1.9057134 1.5916808 1.9057134 2.01319 1.067227 2.01319 1.5115441 2.01319 1.9397117 ] [1.9057134 2.582398 1.9057134 1.5916808 1.9057134 2.01319 1.2762221 2.01319 1.5115441 2.01319 1.9397117 ] [1.9057134 2.582398 1.9057134 1.3587189 1.9057134 2.4105394 1.2762221 2.4105394 1.5115441 2.4105394 1.6215616 ] [1.3531891 1.6708739 1.3531891 0.59121144 1.3531891 2.4105394 1.2762221 2.4105394 1.2762221 2.4105394 1.6215616 ]]]]; ov_res: [[[[1.9242793 2.3073514 1.9242793 1.3452009 1.702587 1.3452009 1.3322226 0.854972 1.3322226 1.4168842 1.3322226 ] [1.702587 2.3073514 1.702587 1.3452009 1.702587 1.3452009 1.4147706 1.1565944 1.3322226 1.5817316 1.3322226 ] [1.702587 2.773976 1.702587 2.773976 1.702587 1.3452009 1.4147706 1.3079222 1.3322226 1.5817316 1.3322226 ] [1.8201396 2.773976 1.8201396 2.773976 1.4147706 1.5499104 1.4147706 1.5499104 0.85055274 1.5817316 0.85055274] [1.8201396 2.773976 1.8201396 2.773976 1.7161986 1.5499104 1.7161986 1.5499104 0.82350236 1.4479239 0.76480633] [1.8201396 1.6361135 1.8201396 1.6361135 1.7161986 1.5499104 1.7161986 1.5499104 1.5566429 1.4479239 1.5566429 ]] [[2.0440218 0.9861393 2.0440218 0.9861393 2.0440218 0.8214763 0.8518414 0.8214763 1.3321632 0.62417763 1.3321632 ] [2.0440218 1.7096988 2.0440218 1.7096988 2.0440218 1.7096988 0.8182964 1.7004157 1.3321632 0.85397714 1.3321632 ] [1.3682424 1.7096988 1.3682424 1.7096988 1.3682424 1.7096988 0.5892804 1.7004157 0.5863088 0.85397714 0.5863088 ] [1.370867 1.7096988 0.6781842 1.7096988 0.1904413 1.7096988 0.5863088 1.7004157 1.1172816 1.0889996 1.1172816 ] [1.370867 1.4677712 1.0511268 1.4189228 1.0511268 0.52096474 0.9169061 1.0889996 1.1172816 1.0889996 1.1172816 ] [1.370867 1.4677712 1.8250653 1.4189228 1.8250653 1.2937666 1.8250653 1.2937666 1.1172816 1.5954933 1.1172816 ]] [[0.8819857 0.4910015 2.3596742 0.4910015 2.3596742 0.4910015 2.3596742 1.1037445 1.4652733 1.1037445 1.4652733 ] [0.8819857 1.7178786 0.8819857 1.5916808 0.8819857 0.4910015 1.4652733 0.6797368 1.4652733 0.6797368 1.9397117 ] [1.9057134 2.582398 1.9057134 1.5916808 1.9057134 2.01319 1.067227 2.01319 1.5115441 2.01319 1.9397117 ] [1.9057134 2.582398 1.9057134 1.5916808 1.9057134 2.01319 1.2762221 2.01319 1.5115441 2.01319 1.9397117 ] [1.9057134 2.582398 1.9057134 1.3587189 1.9057134 2.4105394 1.2762221 2.4105394 1.5115441 2.4105394 1.6215616 ] [1.3531891 1.6708739 1.3531891 0.59121144 1.3531891 2.4105394 1.2762221 2.4105394 1.2762221 2.4105394 1.6215616 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5820.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.kernel_size, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 0.10820696 0.9254056 -0.1304293 0.8456369 0.6284779 0.0429311 -0.04388537 0.31082457 0.86392874 0.16095197 2.4304714 1.5953416 0.59371823 -0.90866584 0.86328155] [ 1.1314994 0.9254056 0.12652677 0.42579824 0.6284779 -0.65176046 0.0847719 1.7311096 0.86392874 0.16095197 1.1957835 -0.07618173 0.67829984 0.8945377 0.86328155] [ 1.1314994 1.612012 0.12652677 0.1930523 0.7427646 -0.34138045 0.4484443 1.7311096 -0.17191565 1.9130831 1.1957835 -0.02302312 0.67829984 1.6976429 0.47954252] [ 0.04543685 1.612012 1.1674384 -0.5710744 0.7427646 0.34308782 0.4484443 -0.193632 -0.17191565 1.9130831 1.8133532 2.3195777 2.144948 1.6976429 0.3154491 ] [ 0.5783491 -0.00864873 1.3039263 0.46129116 0.510431 0.34308782 -1.3818744 -0.193632 0.77244276 0.05079895 1.8133532 2.3195777 2.144948 0.8569182 0.33048135] [ 0.5783491 1.2144854 1.3039263 0.46129116 -0.20262685 0.00570253 -1.3416834 -1.0603976 0.77244276 0.05079895 0.11399189 1.337145 -0.62399095 1.0880983 0.77290505] [-0.2969185 1.2144854 0.6732949 -0.4170135 -0.09315027 -0.26801065 0.13294935 1.3275222 0.53507614 1.5408599 0.8179761 1.337145 -0.02907932 1.0880983 0.77290505]] [[-0.19086033 0.5626893 1.4040583 -1.4594796 0.8284952 1.4110805 -0.05052555 0.7574493 0.09140645 -0.6416472 0.7440411 -0.09024788 1.4334046 -0.20973031 0.4287023 ] [ 0.49142435 0.25048643 1.4040583 -0.11060046 1.0557582 1.4110805 -0.05052555 0.7574493 -0.5718179 0.3614913 0.39887246 0.0311049 1.4334046 -0.9011611 0.7308654 ] [ 0.49142435 1.2738249 0.14039153 -0.11060046 1.0557582 0.9248463 -0.62898153 -0.29418185 -0.5718179 0.3614913 0.39887246 0.1482964 0.666108 -0.14645751 1.0097991 ] [ 0.5410684 1.2738249 -0.2098557 -0.3290194 0.09175117 0.9248463 3.0040648 -0.29418185 0.7998612 -0.44408202 -1.0702565 1.1938621 -0.3614003 0.80303264 1.4797589 ] [ 0.5410684 0.7323949 -0.2098557 -0.74399465 1.9343922 -0.8093777 3.0040648 -0.35763222 0.7998612 -0.2922977 0.83573353 1.1938621 -0.3614003 0.80303264 1.4797589 ] [ 0.5180995 1.8873472 -0.2119157 -0.09744447 1.9343922 0.85383093 1.8925915 0.5243312 0.21736802 -0.2922977 0.83573353 0.3704858 -1.3430891 0.27591714 -0.8847722 ] [-0.26100567 1.8873472 -0.5149211 -0.09744447 0.3952328 0.85383093 1.3444064 0.5243312 0.21736802 -0.8645444 0.57931674 1.2190729 0.7795131 1.5364108 -0.1834854 ]] [[-1.321565 1.1909175 0.85179144 1.2876034 0.15462056 1.7756597 0.85520005 -0.68170196 -0.9443383 0.6792702 1.805976 0.0305612 1.2180593 0.13348387 -0.06282756] [-1.3107114 1.1909175 0.85179144 1.2876034 0.15462056 1.7756597 0.85520005 0.13721383 0.6398406 -0.11239046 1.805976 -0.33182538 -0.25051185 0.13348387 -0.6524267 ] [-0.6571338 0.73779243 -0.2626865 0.6723621 -0.76655245 1.2484634 1.8041954 0.13721383 0.6398406 -0.1877622 -0.10766342 1.638246 -0.5814276 -0.7548618 0.05059103] [ 0.9495694 0.73779243 0.5160345 0.6723621 -0.76655245 0.4825562 1.8041954 2.56228 -0.29623088 0.3164102 0.01324193 1.638246 0.9633768 1.8075776 1.7360146 ] [ 0.9495694 1.1630423 0.7132911 0.56643695 1.3425491 0.94206494 -0.1677924 2.56228 0.63796365 0.38021272 0.79055303 0.1858296 0.9633768 1.8075776 1.7360146 ] [ 1.4713981 1.2322774 0.73875475 -0.45947832 1.3425491 0.94206494 0.98582417 -0.19149758 0.63796365 0.38021272 0.79055303 0.1858296 2.4119542 0.07470748 0.32696223] [ 1.4713981 1.2322774 0.73875475 0.3480607 0.6506534 -0.12332618 0.98582417 0.25268608 0.35959134 -0.00597787 0.28512233 -0.34332862 2.4119542 0.07470748 1.0889438 ]]]]; ov_res: [[[[ 0.10820696 0.9254056 -0.1304293 0.8456369 0.6284779 0.0429311 -0.04388537 0.31082457 0.86392874 0.16095197 2.4304714 1.5953416 0.59371823 -0.90866584 0.86328155] [ 1.1314994 0.9254056 0.12652677 0.42579824 0.6284779 -0.65176046 0.0847719 1.7311096 0.86392874 0.16095197 1.1957835 -0.07618173 0.67829984 0.8945377 0.86328155] [ 1.1314994 1.612012 0.12652677 0.1930523 0.7427646 -0.34138045 0.4484443 1.7311096 -0.17191565 1.9130831 1.1957835 -0.02302312 0.67829984 1.6976429 0.47954252] [ 0.04543685 1.612012 1.1674384 -0.5710744 0.7427646 0.34308782 0.4484443 -0.193632 -0.17191565 1.9130831 1.8133532 2.3195777 2.144948 1.6976429 0.3154491 ] [ 0.5783491 -0.00864873 1.3039263 0.46129116 0.510431 0.34308782 -1.3818744 -0.193632 0.77244276 0.05079895 1.8133532 2.3195777 2.144948 0.8569182 0.33048135] [ 0.5783491 1.2144854 1.3039263 0.46129116 -0.20262685 0.00570253 -1.3416834 -1.0603976 0.77244276 0.05079895 0.11399189 1.337145 -0.62399095 1.0880983 0.77290505] [-0.2969185 1.2144854 0.6732949 -0.4170135 -0.09315027 -0.26801065 0.13294935 1.3275222 0.53507614 1.5408599 0.8179761 1.337145 -0.02907932 1.0880983 0.77290505]] [[-0.19086033 0.5626893 1.4040583 -1.4594796 0.8284952 1.4110805 -0.05052555 0.7574493 0.09140645 -0.6416472 0.7440411 -0.09024788 1.4334046 -0.20973031 0.4287023 ] [ 0.49142435 0.25048643 1.4040583 -0.11060046 1.0557582 1.4110805 -0.05052555 0.7574493 -0.5718179 0.3614913 0.39887246 0.0311049 1.4334046 -0.9011611 0.7308654 ] [ 0.49142435 1.2738249 0.14039153 -0.11060046 1.0557582 0.9248463 -0.62898153 -0.29418185 -0.5718179 0.3614913 0.39887246 0.1482964 0.666108 -0.14645751 1.0097991 ] [ 0.5410684 1.2738249 -0.2098557 -0.3290194 0.09175117 0.9248463 3.0040648 -0.29418185 0.7998612 -0.44408202 -1.0702565 1.1938621 -0.3614003 0.80303264 1.4797589 ] [ 0.5410684 0.7323949 -0.2098557 -0.74399465 1.9343922 -0.8093777 3.0040648 -0.35763222 0.7998612 -0.2922977 0.83573353 1.1938621 -0.3614003 0.80303264 1.4797589 ] [ 0.5180995 1.8873472 -0.2119157 -0.09744447 1.9343922 0.85383093 1.8925915 0.5243312 0.21736802 -0.2922977 0.83573353 0.3704858 -1.3430891 0.27591714 -0.8847722 ] [-0.26100567 1.8873472 -0.5149211 -0.09744447 0.3952328 0.85383093 1.3444064 0.5243312 0.21736802 -0.8645444 0.57931674 1.2190729 0.7795131 1.5364108 -0.1834854 ]] [[-1.321565 1.1909175 0.85179144 1.2876034 0.15462056 1.7756597 0.85520005 -0.68170196 -0.9443383 0.6792702 1.805976 0.0305612 1.2180593 0.13348387 -0.06282756] [-1.3107114 1.1909175 0.85179144 1.2876034 0.15462056 1.7756597 0.85520005 0.13721383 0.6398406 -0.11239046 1.805976 -0.33182538 -0.25051185 0.13348387 -0.6524267 ] [-0.6571338 0.73779243 -0.2626865 0.6723621 -0.76655245 1.2484634 1.8041954 0.13721383 0.6398406 -0.1877622 -0.10766342 1.638246 -0.5814276 -0.7548618 0.05059103] [ 0.9495694 0.73779243 0.5160345 0.6723621 -0.76655245 0.4825562 1.8041954 2.56228 -0.29623088 0.3164102 0.01324193 1.638246 0.9633768 1.8075776 1.7360146 ] [ 0.9495694 1.1630423 0.7132911 0.56643695 1.3425491 0.94206494 -0.1677924 2.56228 0.63796365 0.38021272 0.79055303 0.1858296 0.9633768 1.8075776 1.7360146 ] [ 1.4713981 1.2322774 0.73875475 -0.45947832 1.3425491 0.94206494 0.98582417 -0.19149758 0.63796365 0.38021272 0.79055303 0.1858296 2.4119542 0.07470748 0.32696223] [ 1.4713981 1.2322774 0.73875475 0.3480607 0.6506534 -0.12332618 0.98582417 0.25268608 0.35959134 -0.00597787 0.28512233 -0.34332862 2.4119542 0.07470748 1.0889438 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5822.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %4 : int[] = prim::Constant[value=[1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[ 1.3806571 1.5496285 1.3806571 1.5496285 1.1609392 1.5129561 1.6056876 1.5129561 1.6056876 1.1329335 1.6056876 ] [ 2.2387667 1.562373 2.2387667 1.6046131 1.4206041 1.9831346 0.74955136 1.9831346 1.5658236 1.9831346 1.5658236 ] [ 1.3806571 1.9493957 1.3806571 1.9493957 1.1609392 1.5129561 1.1609392 1.5129561 1.1609392 0.80966717 0.28010255] [ 2.95251 0.33473966 2.95251 1.6046131 2.95251 1.6046131 0.5090624 1.6046131 1.5658236 0.72063947 1.5658236 ] [ 1.3806571 1.9493957 1.3806571 1.9493957 1.1609392 1.9324195 1.1609392 1.9324195 1.1969528 0.80966717 1.1969528 ] [ 2.95251 0.38897228 2.95251 1.6046131 2.95251 1.6046131 1.3180687 1.6046131 1.5658236 0.6941229 1.5658236 ] [-0.358873 1.9493957 -0.1490901 1.9493957 0.4870537 1.9324195 1.0888782 1.9324195 1.6203151 1.0754592 1.6203151 ] [ 2.95251 1.6088139 2.95251 1.6088139 2.95251 1.0915612 1.3180687 1.0915612 1.3180687 1.0915612 1.9651307 ] [ 1.0487305 1.747901 1.0487305 1.9324195 0.5762008 1.9324195 1.0888782 1.9324195 1.6203151 1.0915102 1.6203151 ] [ 0.747617 1.6088139 1.0753187 1.6616002 1.0753187 1.6616002 1.3180687 1.6616002 1.3180687 1.5903583 1.9651307 ] [ 1.1183267 1.548568 1.1183267 1.0641595 0.5762008 1.0754592 1.0888782 1.0754592 1.6203151 1.0915102 1.6203151 ]] [[ 1.8533994 0.99477315 0.69986594 1.702618 1.2123451 1.702618 1.2123451 1.702618 1.2123451 0.94945514 0.578483 ] [ 1.441049 1.8121848 1.498498 1.8121848 1.498498 1.8121848 1.498498 1.7343932 2.0127895 1.0159502 2.261208 ] [ 1.8533994 1.2418107 0.69986594 1.702618 1.2123451 1.702618 1.2123451 1.702618 1.2123451 0.94945514 1.1999781 ] [ 1.441049 2.1919012 2.145001 2.1919012 2.145001 1.8121848 2.145001 1.7343932 2.0127895 0.6460596 2.0127895 ] [ 1.8533994 1.2418107 0.80445695 1.702618 0.80445695 1.7699876 1.1999781 1.7699876 1.1999781 1.7699876 1.1999781 ] [ 2.1607444 2.1919012 2.1607444 2.1919012 2.145001 1.7343932 2.145001 1.7343932 2.0127895 2.518866 2.0127895 ] [ 1.4231668 1.2418107 0.80445695 1.2418107 0.80445695 1.7699876 1.1999781 1.7699876 1.1999781 1.7699876 1.8842157 ] [ 2.1607444 2.1919012 2.1607444 2.1919012 2.145001 1.3868247 2.145001 1.3868247 0.24978894 2.518866 0.5607833 ] [ 0.80445695 0.69647646 0.80445695 0.73472744 1.0298271 1.7699876 1.0298271 1.7699876 1.3874797 1.7699876 1.8842157 ] [ 2.1607444 0.75212795 2.1607444 1.7280586 1.4185362 1.7280586 0.81660646 1.7280586 0.73639494 2.518866 0.73639494] [ 1.1931432 0.4749493 1.1931432 1.0665548 1.0298271 1.0665548 1.0298271 1.0665548 1.3874797 0.7899996 1.8842157 ]] [[ 2.0892172 1.5181932 2.0892172 1.5181932 2.0892172 1.1387972 1.400078 1.1387972 1.400078 0.95839816 2.050277 ] [ 1.6964849 1.7679564 1.6964849 1.3729305 1.8708626 0.23053697 2.7323737 0.23053697 2.7323737 1.0660877 2.7323737 ] [ 2.0892172 1.5181932 2.0892172 1.5181932 2.0892172 1.1387972 1.400078 1.1387972 2.2876737 0.95839816 2.2876737 ] [ 1.6964849 1.7679564 1.6964849 1.3729305 1.6964849 0.5345397 2.7323737 0.37908182 2.7323737 1.0660877 2.7323737 ] [ 0.90901697 2.4344802 0.90901697 2.4344802 0.90901697 1.1387972 1.400078 1.1387972 2.2876737 0.95839816 2.2876737 ] [ 1.6964849 1.7679564 1.6964849 1.209595 2.0412107 0.82657474 2.7323737 0.82657474 2.7323737 1.290618 2.7323737 ] [ 0.90901697 2.4344802 0.90901697 2.4344802 0.90901697 0.79233354 1.2788079 1.5815467 2.2876737 1.5815467 2.2876737 ] [ 0.7318233 0.991428 1.4230764 0.82657474 2.0412107 2.5488195 2.0412107 2.5488195 2.0412107 2.5488195 0.9303857 ] [ 2.7920458 3.035597 2.7920458 3.035597 1.4807272 0.79233354 1.4807272 1.5815467 1.9056215 1.5815467 1.9056215 ] [ 0.6986618 1.1970178 0.6986618 1.1970178 2.0412107 2.5488195 2.0412107 2.5488195 2.0412107 2.5488195 0.50528824] [ 2.7920458 3.035597 2.7920458 3.035597 1.6850166 1.557098 1.6850166 1.6735916 1.9056215 1.6735916 1.9056215 ]]]]; ov_res: [[[[ 1.3806571 1.5496285 1.3806571 1.5496285 1.1609392 1.5129561 1.6056876 1.5129561 1.6056876 1.1329335 1.6056876 ] [ 2.2387667 1.562373 2.2387667 1.6046131 1.4206041 1.9831346 0.74955136 1.9831346 1.5658236 1.9831346 1.5658236 ] [ 1.3806571 1.9493957 1.3806571 1.9493957 1.1609392 1.5129561 1.1609392 1.5129561 1.1609392 0.80966717 0.28010255] [ 2.95251 0.33473966 2.95251 1.6046131 2.95251 1.6046131 0.5090624 1.6046131 1.5658236 0.72063947 1.5658236 ] [ 1.3806571 1.9493957 1.3806571 1.9493957 1.1609392 1.9324195 1.1609392 1.9324195 1.1969528 0.80966717 1.1969528 ] [ 2.95251 0.38897228 2.95251 1.6046131 2.95251 1.6046131 1.3180687 1.6046131 1.5658236 0.6941229 1.5658236 ] [-0.358873 1.9493957 -0.1490901 1.9493957 0.4870537 1.9324195 1.0888782 1.9324195 1.6203151 1.0754592 1.6203151 ] [ 2.95251 1.6088139 2.95251 1.6088139 2.95251 1.0915612 1.3180687 1.0915612 1.3180687 1.0915612 1.9651307 ] [ 1.0487305 1.747901 1.0487305 1.9324195 0.5762008 1.9324195 1.0888782 1.9324195 1.6203151 1.0915102 1.6203151 ] [ 0.747617 1.6088139 1.0753187 1.6616002 1.0753187 1.6616002 1.3180687 1.6616002 1.3180687 1.5903583 1.9651307 ] [ 1.1183267 1.548568 1.1183267 1.0641595 0.5762008 1.0754592 1.0888782 1.0754592 1.6203151 1.0915102 1.6203151 ]] [[ 1.8533994 0.99477315 0.69986594 1.702618 1.2123451 1.702618 1.2123451 1.702618 1.2123451 0.94945514 0.578483 ] [ 1.441049 1.8121848 1.498498 1.8121848 1.498498 1.8121848 1.498498 1.7343932 2.0127895 1.0159502 2.261208 ] [ 1.8533994 1.2418107 0.69986594 1.702618 1.2123451 1.702618 1.2123451 1.702618 1.2123451 0.94945514 1.1999781 ] [ 1.441049 2.1919012 2.145001 2.1919012 2.145001 1.8121848 2.145001 1.7343932 2.0127895 0.6460596 2.0127895 ] [ 1.8533994 1.2418107 0.80445695 1.702618 0.80445695 1.7699876 1.1999781 1.7699876 1.1999781 1.7699876 1.1999781 ] [ 2.1607444 2.1919012 2.1607444 2.1919012 2.145001 1.7343932 2.145001 1.7343932 2.0127895 2.518866 2.0127895 ] [ 1.4231668 1.2418107 0.80445695 1.2418107 0.80445695 1.7699876 1.1999781 1.7699876 1.1999781 1.7699876 1.8842157 ] [ 2.1607444 2.1919012 2.1607444 2.1919012 2.145001 1.3868247 2.145001 1.3868247 0.24978894 2.518866 0.5607833 ] [ 0.80445695 0.69647646 0.80445695 0.73472744 1.0298271 1.7699876 1.0298271 1.7699876 1.3874797 1.7699876 1.8842157 ] [ 2.1607444 0.75212795 2.1607444 1.7280586 1.4185362 1.7280586 0.81660646 1.7280586 0.73639494 2.518866 0.73639494] [ 1.1931432 0.4749493 1.1931432 1.0665548 1.0298271 1.0665548 1.0298271 1.0665548 1.3874797 0.7899996 1.8842157 ]] [[ 2.0892172 1.5181932 2.0892172 1.5181932 2.0892172 1.1387972 1.400078 1.1387972 1.400078 0.95839816 2.050277 ] [ 1.6964849 1.7679564 1.6964849 1.3729305 1.8708626 0.23053697 2.7323737 0.23053697 2.7323737 1.0660877 2.7323737 ] [ 2.0892172 1.5181932 2.0892172 1.5181932 2.0892172 1.1387972 1.400078 1.1387972 2.2876737 0.95839816 2.2876737 ] [ 1.6964849 1.7679564 1.6964849 1.3729305 1.6964849 0.5345397 2.7323737 0.37908182 2.7323737 1.0660877 2.7323737 ] [ 0.90901697 2.4344802 0.90901697 2.4344802 0.90901697 1.1387972 1.400078 1.1387972 2.2876737 0.95839816 2.2876737 ] [ 1.6964849 1.7679564 1.6964849 1.209595 2.0412107 0.82657474 2.7323737 0.82657474 2.7323737 1.290618 2.7323737 ] [ 0.90901697 2.4344802 0.90901697 2.4344802 0.90901697 0.79233354 1.2788079 1.5815467 2.2876737 1.5815467 2.2876737 ] [ 0.7318233 0.991428 1.4230764 0.82657474 2.0412107 2.5488195 2.0412107 2.5488195 2.0412107 2.5488195 0.9303857 ] [ 2.7920458 3.035597 2.7920458 3.035597 1.4807272 0.79233354 1.4807272 1.5815467 1.9056215 1.5815467 1.9056215 ] [ 0.6986618 1.1970178 0.6986618 1.1970178 2.0412107 2.5488195 2.0412107 2.5488195 2.0412107 2.5488195 0.50528824] [ 2.7920458 3.035597 2.7920458 3.035597 1.6850166 1.557098 1.6850166 1.6735916 1.9056215 1.6735916 1.9056215 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5824.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.stride, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 3.0495248 0.99034137 3.0495248 0.99034137 3.0495248 0.99034137 1.1256198 1.8969127 0.65253586 1.8969127 1.7077935 1.8969127 1.7077935 ] [ 0.6492823 1.877415 0.72158504 0.6272058 0.72158504 0.39272633 1.1143488 0.9895541 1.1219597 0.9895541 1.1219597 0.9895541 1.1219597 ] [ 3.0495248 0.99034137 3.0495248 0.99034137 3.0495248 0.99034137 1.1256198 1.8969127 0.65253586 1.8969127 2.0217006 1.8969127 2.0217006 ] [ 0.49661022 1.877415 1.0393088 1.066994 1.0393088 1.066994 1.0393088 0.9895541 0.39511913 0.9895541 0.26188523 0.99657726 0.26188523] [ 3.0495248 1.2893903 3.0495248 1.2893903 3.0495248 1.2893903 1.0353646 1.8969127 0.65253586 1.9204655 2.748248 1.9204655 2.748248 ] [ 0.49661022 1.877415 1.0393088 1.066994 1.0393088 1.066994 1.0393088 1.3770889 0.29788786 1.3770889 0.13238114 1.3770889 0.13238114] [ 1.4549922 1.8376638 1.4549922 1.8376638 0.8502281 1.2893903 0.50026447 0.5584539 -0.09809163 1.9204655 2.748248 1.9204655 2.748248 ] [ 0.47808146 1.3582985 1.0393088 1.066994 1.0393088 1.066994 1.0393088 1.3770889 0.3610124 1.3770889 0.13238114 1.3770889 0.13238114] [ 1.4549922 1.8376638 1.4549922 1.8376638 1.1752405 1.2893903 1.1752405 2.260147 1.1752405 2.260147 2.748248 2.260147 2.748248 ] [ 1.1758416 1.3582985 1.1758416 1.3574352 1.7890091 1.3574352 1.7890091 1.3770889 1.7890091 1.5696169 0.5230864 1.5696169 0.33757406] [ 1.4549922 1.8376638 1.4549922 1.8376638 1.1752405 0.7000414 1.1752405 2.260147 1.1752405 2.260147 0.14155188 2.260147 -0.04320283] [ 1.1758416 2.4821918 1.1758416 2.4821918 1.7890091 1.3574352 1.7890091 1.1699053 1.7890091 1.5696169 0.5603385 1.5696169 0.5603385 ] [ 0.5343208 0.28078756 0.5343208 0.2944655 1.1752405 0.7000414 1.1752405 2.260147 1.1752405 2.260147 0.14155188 2.260147 -0.35315165]] [[ 1.858068 1.4525845 1.858068 1.4525845 1.858068 1.4525845 1.3377112 1.1213428 1.3377112 1.1213428 1.3377112 1.1213428 1.3090314 ] [ 2.0780244 2.4996579 2.0780244 2.4996579 1.1503445 2.4996579 1.1503445 1.6646 1.1503445 1.6646 1.3654319 0.98473316 1.3654319 ] [ 1.858068 1.4525845 1.858068 1.4525845 1.858068 1.4525845 1.3377112 1.1213428 1.3377112 1.1213428 1.3377112 1.3131261 1.3090314 ] [ 2.0780244 1.5388548 2.0780244 1.5388548 1.1503445 1.6646 1.1503445 1.6646 1.1503445 1.6646 1.0560992 0.98473316 1.0560992 ] [ 1.858068 1.0169923 1.858068 1.0169923 1.858068 1.0169923 1.3377112 0.6581052 1.3377112 0.7389813 1.3377112 1.3131261 0.86769694] [ 2.038293 1.1483773 2.038293 0.93171996 0.7338104 0.93171996 0.8138562 1.4858813 0.8138562 1.7356837 1.0560992 1.7356837 1.0560992 ] [ 2.3024323 1.0169923 2.3024323 1.2159352 0.46870825 1.2159352 0.46870825 1.2159352 0.86769694 1.8526229 0.86769694 1.8526229 0.86769694] [ 2.038293 1.9817773 2.038293 1.9817773 0.7338104 1.9817773 1.2146003 1.4858813 1.2146003 1.7356837 1.2146003 2.0015922 1.0562581 ] [ 2.3024323 0.73579603 2.3024323 1.2159352 2.298082 1.2159352 2.298082 1.2159352 0.9411505 1.8526229 0.9411505 1.8526229 -0.03640363] [ 2.038293 1.9817773 2.038293 1.9817773 0.9170824 1.9817773 1.4476802 1.4858813 1.4476802 1.7356837 1.4476802 2.0015922 1.0562581 ] [ 2.3024323 1.2984293 2.3024323 1.2984293 2.298082 1.2984293 2.298082 1.2159352 2.0785618 1.8526229 2.0785618 1.8526229 1.0869231 ] [ 0.9170824 1.9817773 0.9170824 1.9817773 0.9170824 1.9817773 1.4476802 1.6642573 1.4476802 1.6642573 1.4476802 2.0015922 1.0562581 ] [ 1.5263839 1.2984293 2.298082 1.2984293 2.298082 1.2984293 2.298082 0.6962191 2.0785618 0.7097151 2.0785618 1.3616832 1.0869231 ]] [[ 0.6685671 0.63641727 0.6685671 0.63641727 0.9869991 0.24266402 0.9869991 -0.21781884 0.9869991 -0.07259056 0.80292726 2.0298378 0.6168468 ] [ 1.2056218 1.6538858 1.2056218 1.6538858 1.2056218 0.95271313 0.98367006 0.7382729 0.98367006 0.61301845 0.98367006 0.61301845 0.9442609 ] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.9869991 1.088742 0.9869991 1.1449584 0.9869991 1.1449584 0.82841593 2.0298378 0.82841593] [ 0.70075685 1.6538858 0.70075685 1.6538858 1.4549915 0.9928638 1.4549915 0.9928638 1.4549915 0.9928638 1.0215744 0.7337453 0.31518477] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.41919854 1.088742 1.2790974 1.1449584 1.2790974 2.0158339 1.2790974 2.0298378 0.82841593] [ 0.85225844 1.6538858 1.8519335 1.6538858 1.8519335 1.3280299 1.8519335 1.0875571 1.6610737 1.2746297 1.6610737 1.2746297 1.6610737 ] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.9964427 1.088742 1.2790974 1.1449584 1.440849 2.0158339 3.0631595 2.0158339 3.0631595 ] [ 0.89446783 1.3280299 1.8519335 1.3280299 1.8519335 1.3280299 1.8519335 1.0875571 1.6610737 1.2746297 2.4937344 1.2746297 2.4937344 ] [ 1.2456528 1.3076164 1.2456528 0.59938014 0.9964427 0.59938014 1.2790974 0.75237286 1.440849 2.0158339 3.0631595 2.0158339 3.0631595 ] [ 1.9378589 1.3280299 1.9378589 1.3280299 1.9378589 1.3280299 1.8519335 1.0875571 1.6610737 1.4669338 2.4937344 1.4669338 2.4937344 ] [ 0.5642745 1.3076164 0.7409384 0.7747712 0.9964427 0.7747712 1.1330903 0.7747712 1.440849 1.1730152 3.0631595 1.1730152 3.0631595 ] [ 1.9378589 1.1946945 1.9378589 0.9570399 1.9378589 0.9570399 0.5137209 0.9570399 0.44883284 1.4669338 2.4937344 1.4669338 2.4937344 ] [ 0.5642745 1.3076164 0.7409384 0.7747712 0.7409384 0.7747712 0.7409384 0.7747712 0.42749354 1.1730152 0.77786326 1.1730152 0.77786326]]]]; ov_res: [[[[ 3.0495248 0.99034137 3.0495248 0.99034137 3.0495248 0.99034137 1.1256198 1.8969127 0.65253586 1.8969127 1.7077935 1.8969127 1.7077935 ] [ 0.6492823 1.877415 0.72158504 0.6272058 0.72158504 0.39272633 1.1143488 0.9895541 1.1219597 0.9895541 1.1219597 0.9895541 1.1219597 ] [ 3.0495248 0.99034137 3.0495248 0.99034137 3.0495248 0.99034137 1.1256198 1.8969127 0.65253586 1.8969127 2.0217006 1.8969127 2.0217006 ] [ 0.49661022 1.877415 1.0393088 1.066994 1.0393088 1.066994 1.0393088 0.9895541 0.39511913 0.9895541 0.26188523 0.99657726 0.26188523] [ 3.0495248 1.2893903 3.0495248 1.2893903 3.0495248 1.2893903 1.0353646 1.8969127 0.65253586 1.9204655 2.748248 1.9204655 2.748248 ] [ 0.49661022 1.877415 1.0393088 1.066994 1.0393088 1.066994 1.0393088 1.3770889 0.29788786 1.3770889 0.13238114 1.3770889 0.13238114] [ 1.4549922 1.8376638 1.4549922 1.8376638 0.8502281 1.2893903 0.50026447 0.5584539 -0.09809163 1.9204655 2.748248 1.9204655 2.748248 ] [ 0.47808146 1.3582985 1.0393088 1.066994 1.0393088 1.066994 1.0393088 1.3770889 0.3610124 1.3770889 0.13238114 1.3770889 0.13238114] [ 1.4549922 1.8376638 1.4549922 1.8376638 1.1752405 1.2893903 1.1752405 2.260147 1.1752405 2.260147 2.748248 2.260147 2.748248 ] [ 1.1758416 1.3582985 1.1758416 1.3574352 1.7890091 1.3574352 1.7890091 1.3770889 1.7890091 1.5696169 0.5230864 1.5696169 0.33757406] [ 1.4549922 1.8376638 1.4549922 1.8376638 1.1752405 0.7000414 1.1752405 2.260147 1.1752405 2.260147 0.14155188 2.260147 -0.04320283] [ 1.1758416 2.4821918 1.1758416 2.4821918 1.7890091 1.3574352 1.7890091 1.1699053 1.7890091 1.5696169 0.5603385 1.5696169 0.5603385 ] [ 0.5343208 0.28078756 0.5343208 0.2944655 1.1752405 0.7000414 1.1752405 2.260147 1.1752405 2.260147 0.14155188 2.260147 -0.35315165]] [[ 1.858068 1.4525845 1.858068 1.4525845 1.858068 1.4525845 1.3377112 1.1213428 1.3377112 1.1213428 1.3377112 1.1213428 1.3090314 ] [ 2.0780244 2.4996579 2.0780244 2.4996579 1.1503445 2.4996579 1.1503445 1.6646 1.1503445 1.6646 1.3654319 0.98473316 1.3654319 ] [ 1.858068 1.4525845 1.858068 1.4525845 1.858068 1.4525845 1.3377112 1.1213428 1.3377112 1.1213428 1.3377112 1.3131261 1.3090314 ] [ 2.0780244 1.5388548 2.0780244 1.5388548 1.1503445 1.6646 1.1503445 1.6646 1.1503445 1.6646 1.0560992 0.98473316 1.0560992 ] [ 1.858068 1.0169923 1.858068 1.0169923 1.858068 1.0169923 1.3377112 0.6581052 1.3377112 0.7389813 1.3377112 1.3131261 0.86769694] [ 2.038293 1.1483773 2.038293 0.93171996 0.7338104 0.93171996 0.8138562 1.4858813 0.8138562 1.7356837 1.0560992 1.7356837 1.0560992 ] [ 2.3024323 1.0169923 2.3024323 1.2159352 0.46870825 1.2159352 0.46870825 1.2159352 0.86769694 1.8526229 0.86769694 1.8526229 0.86769694] [ 2.038293 1.9817773 2.038293 1.9817773 0.7338104 1.9817773 1.2146003 1.4858813 1.2146003 1.7356837 1.2146003 2.0015922 1.0562581 ] [ 2.3024323 0.73579603 2.3024323 1.2159352 2.298082 1.2159352 2.298082 1.2159352 0.9411505 1.8526229 0.9411505 1.8526229 -0.03640363] [ 2.038293 1.9817773 2.038293 1.9817773 0.9170824 1.9817773 1.4476802 1.4858813 1.4476802 1.7356837 1.4476802 2.0015922 1.0562581 ] [ 2.3024323 1.2984293 2.3024323 1.2984293 2.298082 1.2984293 2.298082 1.2159352 2.0785618 1.8526229 2.0785618 1.8526229 1.0869231 ] [ 0.9170824 1.9817773 0.9170824 1.9817773 0.9170824 1.9817773 1.4476802 1.6642573 1.4476802 1.6642573 1.4476802 2.0015922 1.0562581 ] [ 1.5263839 1.2984293 2.298082 1.2984293 2.298082 1.2984293 2.298082 0.6962191 2.0785618 0.7097151 2.0785618 1.3616832 1.0869231 ]] [[ 0.6685671 0.63641727 0.6685671 0.63641727 0.9869991 0.24266402 0.9869991 -0.21781884 0.9869991 -0.07259056 0.80292726 2.0298378 0.6168468 ] [ 1.2056218 1.6538858 1.2056218 1.6538858 1.2056218 0.95271313 0.98367006 0.7382729 0.98367006 0.61301845 0.98367006 0.61301845 0.9442609 ] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.9869991 1.088742 0.9869991 1.1449584 0.9869991 1.1449584 0.82841593 2.0298378 0.82841593] [ 0.70075685 1.6538858 0.70075685 1.6538858 1.4549915 0.9928638 1.4549915 0.9928638 1.4549915 0.9928638 1.0215744 0.7337453 0.31518477] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.41919854 1.088742 1.2790974 1.1449584 1.2790974 2.0158339 1.2790974 2.0298378 0.82841593] [ 0.85225844 1.6538858 1.8519335 1.6538858 1.8519335 1.3280299 1.8519335 1.0875571 1.6610737 1.2746297 1.6610737 1.2746297 1.6610737 ] [ 1.7076097 1.5549358 1.7076097 1.5549358 0.9964427 1.088742 1.2790974 1.1449584 1.440849 2.0158339 3.0631595 2.0158339 3.0631595 ] [ 0.89446783 1.3280299 1.8519335 1.3280299 1.8519335 1.3280299 1.8519335 1.0875571 1.6610737 1.2746297 2.4937344 1.2746297 2.4937344 ] [ 1.2456528 1.3076164 1.2456528 0.59938014 0.9964427 0.59938014 1.2790974 0.75237286 1.440849 2.0158339 3.0631595 2.0158339 3.0631595 ] [ 1.9378589 1.3280299 1.9378589 1.3280299 1.9378589 1.3280299 1.8519335 1.0875571 1.6610737 1.4669338 2.4937344 1.4669338 2.4937344 ] [ 0.5642745 1.3076164 0.7409384 0.7747712 0.9964427 0.7747712 1.1330903 0.7747712 1.440849 1.1730152 3.0631595 1.1730152 3.0631595 ] [ 1.9378589 1.1946945 1.9378589 0.9570399 1.9378589 0.9570399 0.5137209 0.9570399 0.44883284 1.4669338 2.4937344 1.4669338 2.4937344 ] [ 0.5642745 1.3076164 0.7409384 0.7747712 0.7409384 0.7747712 0.7409384 0.7747712 0.42749354 1.1730152 0.77786326 1.1730152 0.77786326]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [0, 1]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5826.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 1]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[1.9891253 2.7626193 1.9891253 2.2269044 1.9891253 0.7756888 1.3393617 0.7756888 1.3393617 1.4962356 0.7942614 1.4962356 0.7942614 ] [1.1529075 1.1308593 1.1529075 1.1308593 1.1529075 2.0890799 1.4847857 2.2530243 1.4847857 2.2530243 1.4847857 2.2530243 0.7193833 ] [1.9891253 2.7626193 1.9891253 2.2269044 1.9891253 0.7756888 1.1554854 0.7756888 1.301569 1.4962356 1.301569 1.4962356 1.301569 ] [1.1529075 1.2467085 1.1529075 1.2467085 1.1529075 2.0890799 0.98692846 2.2530243 1.8937973 2.2530243 1.8937973 2.2530243 1.8937973 ] [1.4455956 2.2269044 1.4455956 2.2269044 1.4455956 1.2946097 1.1554854 1.2946097 1.301569 1.4962356 1.301569 1.4962356 1.301569 ] [1.1529075 1.9396402 1.1529075 1.9396402 1.1529075 1.9396402 1.049854 2.2530243 1.8937973 2.7688007 1.8937973 2.7688007 1.8937973 ] [1.4455956 1.0228279 1.4455956 1.8000281 1.4455956 1.8000281 1.1554854 1.8000281 1.301569 1.2946097 1.301569 0.78227377 1.301569 ] [0.8862769 1.9396402 1.42683 1.9396402 1.42683 1.9396402 1.42683 2.0198407 1.8937973 2.7688007 1.8937973 2.7688007 1.8937973 ] [1.4455956 1.8169372 1.5604409 1.8169372 1.5604409 1.8000281 1.5604409 1.8000281 0.8882598 1.3506562 0.8882598 1.3506562 0.8882598 ] [0.8862769 1.9396402 1.42683 1.9396402 1.42683 1.9396402 1.42683 1.349929 1.049854 2.7688007 1.4170643 2.7688007 1.4170643 ] [0.8050898 1.8169372 1.5604409 1.8169372 1.7297918 1.8000281 1.7297918 1.8000281 1.7297918 1.3506562 0.8882598 1.3506562 0.8882598 ]] [[1.2913469 0.4717037 1.2913469 0.455611 1.3402138 0.20720206 1.3402138 1.1019485 1.3402138 1.1019485 1.2165735 1.1019485 1.2165735 ] [1.783015 1.8188035 1.783015 1.8188035 1.7201285 1.8188035 0.8552345 1.3916483 0.16272077 0.76942676 1.2515768 1.2899134 1.2515768 ] [0.62131065 1.751391 1.2806847 1.751391 1.3402138 1.2393395 1.3402138 1.2393395 1.3402138 0.96906006 1.2165735 0.96906006 1.2165735 ] [1.7201285 1.8188035 1.7201285 1.8188035 1.7201285 1.8188035 1.3697599 1.3916483 1.3697599 0.76942676 1.2440547 1.2899134 1.0250703 ] [0.62131065 1.751391 1.5455419 1.751391 1.5455419 1.2393395 1.5455419 1.2393395 1.2165735 0.96906006 1.2165735 0.96906006 1.2165735 ] [1.7201285 1.8188035 1.7201285 1.8188035 1.7201285 1.8188035 1.3697599 1.3916483 1.3697599 0.76411587 1.2440547 1.2899134 0.6890817 ] [1.3045607 1.751391 1.5455419 1.751391 1.5455419 1.3227999 1.5946515 1.3227999 1.5946515 1.3227999 1.5946515 0.8662609 0.66317236] [1.2233753 1.1380925 1.2233753 1.2142696 1.3697599 1.2142696 1.3697599 1.2142696 1.3697599 0.76411587 1.2440547 0.76411587 0.24322097] [1.3045607 0.9246831 1.5455419 0.6754653 1.5455419 1.3227999 1.5946515 1.3227999 1.5946515 1.3227999 1.6527562 2.0012925 1.6527562 ] [1.2233753 1.1380925 1.2233753 1.2142696 1.2233753 1.2142696 0.9533376 1.2142696 0.7534848 0.92705816 0.3664472 0.92705816 0.3664472 ] [1.3045607 0.9246831 1.3045607 0.91755694 1.3045607 1.3227999 1.5946515 1.3227999 1.5946515 1.4180986 1.6527562 2.474862 1.6527562 ]] [[0.9811768 1.0593464 0.9811768 1.0593464 0.9811768 1.0593464 0.45823473 1.0542554 0.45823473 1.0542554 1.2285539 0.9095556 1.2285539 ] [0.59319454 2.0331998 1.8026861 2.5389135 1.8026861 2.5389135 1.8026861 2.5389135 1.0738615 1.739318 1.6840839 1.739318 1.6840839 ] [1.5114154 1.8006974 1.5114154 1.8006974 1.5114154 1.8006974 1.3639241 1.0542554 0.97143245 1.0542554 1.2285539 0.86799306 1.2285539 ] [2.852722 2.0331998 2.852722 2.5389135 1.8026861 2.5389135 1.8026861 2.5389135 0.96741813 1.739318 1.6840839 1.739318 1.6840839 ] [1.5114154 1.8006974 1.5114154 1.8006974 1.5114154 1.8006974 1.3639241 1.0542554 0.97143245 1.2840446 1.2285539 1.2840446 1.2285539 ] [2.852722 0.39900643 2.852722 2.5389135 0.87495947 2.5389135 0.96741813 3.3100147 0.96741813 3.3100147 1.0560836 3.3100147 1.0560836 ] [1.5114154 2.0540175 1.5114154 2.0540175 1.5114154 2.0540175 1.3639241 0.8774943 1.0019447 1.2840446 1.0019447 1.2840446 0.97143245] [2.852722 1.7437195 2.852722 1.7437195 0.87495947 1.7437195 0.96741813 3.3100147 0.96741813 3.3100147 1.0560836 3.3100147 1.0560836 ] [0.37601316 2.0540175 1.422498 2.0540175 1.422498 2.0540175 1.422498 1.4975946 1.8835137 1.2840446 1.8835137 1.2840446 1.8835137 ] [0.9091368 1.7437195 0.9091368 1.7437195 0.87495947 1.7437195 0.62265414 3.3100147 0.62265414 3.3100147 1.0560836 3.3100147 1.0560836 ] [0.49664092 2.0540175 1.422498 2.0540175 1.422498 2.0540175 1.422498 1.4975946 1.8835137 1.2228758 1.8835137 1.3966054 1.8835137 ]]]]; ov_res: [[[[1.9891253 2.7626193 1.9891253 2.2269044 1.9891253 0.7756888 1.3393617 0.7756888 1.3393617 1.4962356 0.7942614 1.4962356 0.7942614 ] [1.1529075 1.1308593 1.1529075 1.1308593 1.1529075 2.0890799 1.4847857 2.2530243 1.4847857 2.2530243 1.4847857 2.2530243 0.7193833 ] [1.9891253 2.7626193 1.9891253 2.2269044 1.9891253 0.7756888 1.1554854 0.7756888 1.301569 1.4962356 1.301569 1.4962356 1.301569 ] [1.1529075 1.2467085 1.1529075 1.2467085 1.1529075 2.0890799 0.98692846 2.2530243 1.8937973 2.2530243 1.8937973 2.2530243 1.8937973 ] [1.4455956 2.2269044 1.4455956 2.2269044 1.4455956 1.2946097 1.1554854 1.2946097 1.301569 1.4962356 1.301569 1.4962356 1.301569 ] [1.1529075 1.9396402 1.1529075 1.9396402 1.1529075 1.9396402 1.049854 2.2530243 1.8937973 2.7688007 1.8937973 2.7688007 1.8937973 ] [1.4455956 1.0228279 1.4455956 1.8000281 1.4455956 1.8000281 1.1554854 1.8000281 1.301569 1.2946097 1.301569 0.78227377 1.301569 ] [0.8862769 1.9396402 1.42683 1.9396402 1.42683 1.9396402 1.42683 2.0198407 1.8937973 2.7688007 1.8937973 2.7688007 1.8937973 ] [1.4455956 1.8169372 1.5604409 1.8169372 1.5604409 1.8000281 1.5604409 1.8000281 0.8882598 1.3506562 0.8882598 1.3506562 0.8882598 ] [0.8862769 1.9396402 1.42683 1.9396402 1.42683 1.9396402 1.42683 1.349929 1.049854 2.7688007 1.4170643 2.7688007 1.4170643 ] [0.8050898 1.8169372 1.5604409 1.8169372 1.7297918 1.8000281 1.7297918 1.8000281 1.7297918 1.3506562 0.8882598 1.3506562 0.8882598 ]] [[1.2913469 0.4717037 1.2913469 0.455611 1.3402138 0.20720206 1.3402138 1.1019485 1.3402138 1.1019485 1.2165735 1.1019485 1.2165735 ] [1.783015 1.8188035 1.783015 1.8188035 1.7201285 1.8188035 0.8552345 1.3916483 0.16272077 0.76942676 1.2515768 1.2899134 1.2515768 ] [0.62131065 1.751391 1.2806847 1.751391 1.3402138 1.2393395 1.3402138 1.2393395 1.3402138 0.96906006 1.2165735 0.96906006 1.2165735 ] [1.7201285 1.8188035 1.7201285 1.8188035 1.7201285 1.8188035 1.3697599 1.3916483 1.3697599 0.76942676 1.2440547 1.2899134 1.0250703 ] [0.62131065 1.751391 1.5455419 1.751391 1.5455419 1.2393395 1.5455419 1.2393395 1.2165735 0.96906006 1.2165735 0.96906006 1.2165735 ] [1.7201285 1.8188035 1.7201285 1.8188035 1.7201285 1.8188035 1.3697599 1.3916483 1.3697599 0.76411587 1.2440547 1.2899134 0.6890817 ] [1.3045607 1.751391 1.5455419 1.751391 1.5455419 1.3227999 1.5946515 1.3227999 1.5946515 1.3227999 1.5946515 0.8662609 0.66317236] [1.2233753 1.1380925 1.2233753 1.2142696 1.3697599 1.2142696 1.3697599 1.2142696 1.3697599 0.76411587 1.2440547 0.76411587 0.24322097] [1.3045607 0.9246831 1.5455419 0.6754653 1.5455419 1.3227999 1.5946515 1.3227999 1.5946515 1.3227999 1.6527562 2.0012925 1.6527562 ] [1.2233753 1.1380925 1.2233753 1.2142696 1.2233753 1.2142696 0.9533376 1.2142696 0.7534848 0.92705816 0.3664472 0.92705816 0.3664472 ] [1.3045607 0.9246831 1.3045607 0.91755694 1.3045607 1.3227999 1.5946515 1.3227999 1.5946515 1.4180986 1.6527562 2.474862 1.6527562 ]] [[0.9811768 1.0593464 0.9811768 1.0593464 0.9811768 1.0593464 0.45823473 1.0542554 0.45823473 1.0542554 1.2285539 0.9095556 1.2285539 ] [0.59319454 2.0331998 1.8026861 2.5389135 1.8026861 2.5389135 1.8026861 2.5389135 1.0738615 1.739318 1.6840839 1.739318 1.6840839 ] [1.5114154 1.8006974 1.5114154 1.8006974 1.5114154 1.8006974 1.3639241 1.0542554 0.97143245 1.0542554 1.2285539 0.86799306 1.2285539 ] [2.852722 2.0331998 2.852722 2.5389135 1.8026861 2.5389135 1.8026861 2.5389135 0.96741813 1.739318 1.6840839 1.739318 1.6840839 ] [1.5114154 1.8006974 1.5114154 1.8006974 1.5114154 1.8006974 1.3639241 1.0542554 0.97143245 1.2840446 1.2285539 1.2840446 1.2285539 ] [2.852722 0.39900643 2.852722 2.5389135 0.87495947 2.5389135 0.96741813 3.3100147 0.96741813 3.3100147 1.0560836 3.3100147 1.0560836 ] [1.5114154 2.0540175 1.5114154 2.0540175 1.5114154 2.0540175 1.3639241 0.8774943 1.0019447 1.2840446 1.0019447 1.2840446 0.97143245] [2.852722 1.7437195 2.852722 1.7437195 0.87495947 1.7437195 0.96741813 3.3100147 0.96741813 3.3100147 1.0560836 3.3100147 1.0560836 ] [0.37601316 2.0540175 1.422498 2.0540175 1.422498 2.0540175 1.422498 1.4975946 1.8835137 1.2840446 1.8835137 1.2840446 1.8835137 ] [0.9091368 1.7437195 0.9091368 1.7437195 0.87495947 1.7437195 0.62265414 3.3100147 0.62265414 3.3100147 1.0560836 3.3100147 1.0560836 ] [0.49664092 2.0540175 1.422498 2.0540175 1.422498 2.0540175 1.422498 1.4975946 1.8835137 1.2228758 1.8835137 1.3966054 1.8835137 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [1, 1], 'padding': [1, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5828.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[1, 0]]() %self.stride : int[] = prim::Constant[value=[1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[ 1.8039641 0.7298982 1.8039641 1.8052255 1.8039641 1.8052255 3.6796386 1.8052255 3.6796386 1.5600227 3.6796386 ] [ 1.3257307 1.8107044 1.657015 1.516981 1.657015 1.516981 1.657015 1.6764321 1.6036948 2.0166059 1.6036948 ] [ 1.8039641 1.3603075 1.8039641 1.8052255 1.8039641 1.8052255 3.6796386 1.8052255 3.6796386 1.5600227 3.6796386 ] [ 1.5492936 1.6773992 1.657015 1.415867 1.657015 1.415867 1.657015 1.6764321 1.4434916 1.6764321 1.4434916 ] [ 1.8039641 1.6727701 2.160875 1.8052255 2.160875 1.8052255 3.6796386 1.8052255 3.6796386 1.635943 3.6796386 ] [ 1.5492936 1.2896461 1.657015 1.415867 1.657015 1.415867 1.657015 1.6764321 1.4341671 1.6764321 1.4341671 ] [ 1.3625137 1.6727701 2.160875 1.6727701 2.160875 1.2234216 2.160875 1.2234216 1.1661137 1.635943 1.1661137 ] [ 1.8315166 1.2896461 1.5492936 1.2896461 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.58736014 1.627571 ] [ 1.2149317 1.6727701 2.160875 1.6727701 2.160875 1.2234216 2.160875 1.2234216 1.1661137 1.635943 1.1661137 ] [ 1.8315166 1.2896461 1.3193165 1.2896461 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.9475897 1.732448 ] [ 1.6495992 0.645695 2.0167847 0.645695 2.0167847 0.645695 2.0167847 0.22568096 0.99622184 1.1501884 0.91434294] [ 1.8315166 0.2928582 1.3193165 1.2661785 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.9475897 1.732448 ] [ 1.6495992 0.5681284 1.6495992 0.5681284 1.6495992 -0.11467472 0.99622184 -0.08595906 0.99622184 1.1501884 0.49380726]] [[ 0.41240305 -0.12770736 2.1150773 0.99665344 2.1150773 0.99665344 2.1150773 1.08126 1.7974522 1.08126 1.3562694 ] [ 1.460829 1.8906224 1.2412151 1.7388471 1.2412151 1.7388471 1.719095 1.7388471 1.719095 0.21726157 1.719095 ] [ 0.88534147 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 1.08126 1.7974522 1.08126 1.3562694 ] [ 2.7569146 1.8906224 1.0588565 0.9804621 1.1685777 0.6457666 1.719095 0.3859396 1.719095 0.5421278 2.376069 ] [ 0.88534147 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 1.0000157 0.85043 0.73383105 1.2710587 ] [ 2.7569146 0.9804621 1.0588565 0.9804621 1.1685777 0.6457666 1.1685777 0.54269123 1.1685777 1.0212071 2.376069 ] [ 2.1170127 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 2.364836 0.85043 2.364836 1.2710587 ] [ 2.7569146 0.41441002 1.8468242 1.9125443 1.8468242 1.9125443 0.5734313 1.9125443 0.49637988 1.7989284 2.376069 ] [ 2.1170127 0.9052134 2.1170127 2.194401 2.1170127 2.194401 1.4556111 2.364836 0.85043 2.364836 1.2710587 ] [ 1.8468242 0.21829069 2.228688 1.9125443 2.228688 1.9125443 2.228688 1.9125443 1.7340995 1.7989284 1.3735497 ] [ 2.1170127 -0.08018884 2.1170127 2.194401 2.1170127 2.194401 1.4556111 2.364836 1.2120403 2.364836 1.2120403 ] [ 1.8468242 0.63289636 2.228688 1.9125443 2.228688 1.9125443 2.228688 2.0212321 1.7340995 2.0212321 1.3735497 ] [ 1.1158265 -0.15899527 1.1158265 2.194401 1.1158265 2.194401 0.3354419 2.194401 1.2120403 1.2889633 1.2120403 ]] [[ 1.4893492 0.93693304 1.4893492 0.93693304 1.3775367 2.054593 1.3775367 2.054593 0.06186416 2.054593 0.10891423] [ 1.3788041 2.7463398 1.3393029 2.7463398 1.3393029 2.7463398 1.3393029 1.6915643 1.0226432 1.0990844 1.0226432 ] [ 1.4893492 1.1053137 1.4893492 0.93693304 1.6469299 2.054593 1.6469299 2.054593 1.6469299 2.054593 0.10891423] [ 1.294509 2.7463398 1.3393029 2.7463398 1.3393029 2.7463398 1.3393029 1.6915643 0.9010448 1.0990844 0.6176594 ] [ 0.29477656 1.3795477 1.3775367 1.3795477 1.6469299 1.3795477 1.6469299 1.103949 1.6469299 1.103949 0.10232912] [ 1.294509 1.8925529 1.3393029 1.8925529 1.3393029 1.7488182 1.3393029 1.6915643 0.9010448 1.085059 1.2590585 ] [ 1.3285323 1.3795477 2.8725152 1.3795477 2.8725152 1.3795477 2.8725152 0.24290477 1.6469299 0.7288458 0.10232912] [ 1.294509 2.1671662 1.294509 2.1671662 1.1812104 2.1671662 1.0326736 1.085059 0.9323887 1.085059 1.2590585 ] [ 1.3285323 1.3795477 2.8725152 1.3795477 2.8725152 1.3795477 2.8725152 0.24951868 1.3682265 0.7288458 1.3682265 ] [ 1.1812104 2.1671662 1.1812104 2.1671662 1.1812104 2.1671662 1.0326736 0.5207049 0.9323887 0.5207049 1.2590585 ] [ 1.3285323 0.9468194 2.8725152 0.42045695 2.8725152 0.24951868 2.8725152 1.381025 1.3720995 2.8815205 1.3720995 ] [ 1.1812104 2.1671662 1.1812104 2.1671662 1.1812104 2.1671662 1.8085078 0.5207049 1.8085078 0.7539403 1.8085078 ] [ 0.21888964 0.9468194 1.429206 0.42045695 1.429206 0.24951868 1.429206 1.381025 1.3720995 2.8815205 1.3720995 ]]]]; ov_res: [[[[ 1.8039641 0.7298982 1.8039641 1.8052255 1.8039641 1.8052255 3.6796386 1.8052255 3.6796386 1.5600227 3.6796386 ] [ 1.3257307 1.8107044 1.657015 1.516981 1.657015 1.516981 1.657015 1.6764321 1.6036948 2.0166059 1.6036948 ] [ 1.8039641 1.3603075 1.8039641 1.8052255 1.8039641 1.8052255 3.6796386 1.8052255 3.6796386 1.5600227 3.6796386 ] [ 1.5492936 1.6773992 1.657015 1.415867 1.657015 1.415867 1.657015 1.6764321 1.4434916 1.6764321 1.4434916 ] [ 1.8039641 1.6727701 2.160875 1.8052255 2.160875 1.8052255 3.6796386 1.8052255 3.6796386 1.635943 3.6796386 ] [ 1.5492936 1.2896461 1.657015 1.415867 1.657015 1.415867 1.657015 1.6764321 1.4341671 1.6764321 1.4341671 ] [ 1.3625137 1.6727701 2.160875 1.6727701 2.160875 1.2234216 2.160875 1.2234216 1.1661137 1.635943 1.1661137 ] [ 1.8315166 1.2896461 1.5492936 1.2896461 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.58736014 1.627571 ] [ 1.2149317 1.6727701 2.160875 1.6727701 2.160875 1.2234216 2.160875 1.2234216 1.1661137 1.635943 1.1661137 ] [ 1.8315166 1.2896461 1.3193165 1.2896461 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.9475897 1.732448 ] [ 1.6495992 0.645695 2.0167847 0.645695 2.0167847 0.645695 2.0167847 0.22568096 0.99622184 1.1501884 0.91434294] [ 1.8315166 0.2928582 1.3193165 1.2661785 1.3193165 1.2661785 1.627571 1.2661785 1.627571 0.9475897 1.732448 ] [ 1.6495992 0.5681284 1.6495992 0.5681284 1.6495992 -0.11467472 0.99622184 -0.08595906 0.99622184 1.1501884 0.49380726]] [[ 0.41240305 -0.12770736 2.1150773 0.99665344 2.1150773 0.99665344 2.1150773 1.08126 1.7974522 1.08126 1.3562694 ] [ 1.460829 1.8906224 1.2412151 1.7388471 1.2412151 1.7388471 1.719095 1.7388471 1.719095 0.21726157 1.719095 ] [ 0.88534147 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 1.08126 1.7974522 1.08126 1.3562694 ] [ 2.7569146 1.8906224 1.0588565 0.9804621 1.1685777 0.6457666 1.719095 0.3859396 1.719095 0.5421278 2.376069 ] [ 0.88534147 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 1.0000157 0.85043 0.73383105 1.2710587 ] [ 2.7569146 0.9804621 1.0588565 0.9804621 1.1685777 0.6457666 1.1685777 0.54269123 1.1685777 1.0212071 2.376069 ] [ 2.1170127 1.4406099 2.252441 1.4406099 2.252441 1.4406099 2.252441 2.364836 0.85043 2.364836 1.2710587 ] [ 2.7569146 0.41441002 1.8468242 1.9125443 1.8468242 1.9125443 0.5734313 1.9125443 0.49637988 1.7989284 2.376069 ] [ 2.1170127 0.9052134 2.1170127 2.194401 2.1170127 2.194401 1.4556111 2.364836 0.85043 2.364836 1.2710587 ] [ 1.8468242 0.21829069 2.228688 1.9125443 2.228688 1.9125443 2.228688 1.9125443 1.7340995 1.7989284 1.3735497 ] [ 2.1170127 -0.08018884 2.1170127 2.194401 2.1170127 2.194401 1.4556111 2.364836 1.2120403 2.364836 1.2120403 ] [ 1.8468242 0.63289636 2.228688 1.9125443 2.228688 1.9125443 2.228688 2.0212321 1.7340995 2.0212321 1.3735497 ] [ 1.1158265 -0.15899527 1.1158265 2.194401 1.1158265 2.194401 0.3354419 2.194401 1.2120403 1.2889633 1.2120403 ]] [[ 1.4893492 0.93693304 1.4893492 0.93693304 1.3775367 2.054593 1.3775367 2.054593 0.06186416 2.054593 0.10891423] [ 1.3788041 2.7463398 1.3393029 2.7463398 1.3393029 2.7463398 1.3393029 1.6915643 1.0226432 1.0990844 1.0226432 ] [ 1.4893492 1.1053137 1.4893492 0.93693304 1.6469299 2.054593 1.6469299 2.054593 1.6469299 2.054593 0.10891423] [ 1.294509 2.7463398 1.3393029 2.7463398 1.3393029 2.7463398 1.3393029 1.6915643 0.9010448 1.0990844 0.6176594 ] [ 0.29477656 1.3795477 1.3775367 1.3795477 1.6469299 1.3795477 1.6469299 1.103949 1.6469299 1.103949 0.10232912] [ 1.294509 1.8925529 1.3393029 1.8925529 1.3393029 1.7488182 1.3393029 1.6915643 0.9010448 1.085059 1.2590585 ] [ 1.3285323 1.3795477 2.8725152 1.3795477 2.8725152 1.3795477 2.8725152 0.24290477 1.6469299 0.7288458 0.10232912] [ 1.294509 2.1671662 1.294509 2.1671662 1.1812104 2.1671662 1.0326736 1.085059 0.9323887 1.085059 1.2590585 ] [ 1.3285323 1.3795477 2.8725152 1.3795477 2.8725152 1.3795477 2.8725152 0.24951868 1.3682265 0.7288458 1.3682265 ] [ 1.1812104 2.1671662 1.1812104 2.1671662 1.1812104 2.1671662 1.0326736 0.5207049 0.9323887 0.5207049 1.2590585 ] [ 1.3285323 0.9468194 2.8725152 0.42045695 2.8725152 0.24951868 2.8725152 1.381025 1.3720995 2.8815205 1.3720995 ] [ 1.1812104 2.1671662 1.1812104 2.1671662 1.1812104 2.1671662 1.8085078 0.5207049 1.8085078 0.7539403 1.8085078 ] [ 0.21888964 0.9468194 1.429206 0.42045695 1.429206 0.24951868 1.429206 1.381025 1.3720995 2.8815205 1.3720995 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3], 'stride': [2, 1], 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5830.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[2, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3]]() %7 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.stride, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%7) fw_re: [[[[0.44464403 1.322647 2.069804 1.322647 2.069804 1.0127933 2.069804 1.0127933 1.1171929 1.0127933 1.1171929 ] [1.6513689 1.1497127 1.3381052 1.1497127 1.3381052 1.0127933 1.3381052 2.0879562 1.1171929 2.0879562 1.3208218 ] [1.6513689 1.2386981 0.7040513 1.2386981 0.17563573 1.2386981 0.17784472 2.0879562 0.37059042 2.0879562 1.3208218 ] [1.6513689 1.820643 0.7040513 1.2386981 0.4859307 1.2386981 1.0335557 2.0879562 1.0335557 2.0879562 2.577866 ] [0.8029991 1.820643 0.6460405 1.2386981 0.6460405 1.2386981 1.7830518 0.554229 1.7830518 0.554229 2.577866 ] [1.9313399 1.820643 1.9313399 2.3930173 0.6460405 2.3930173 1.7830518 2.3930173 1.7830518 0.554229 2.577866 ]] [[1.6520956 1.7581578 0.96988726 1.7581578 1.2067394 1.7581578 1.2067394 1.9042252 1.2067394 1.9042252 1.4701962 ] [0.96988726 1.7800364 0.96988726 1.7800364 1.2067394 1.7800364 1.2067394 1.9042252 1.2067394 1.9042252 1.4701962 ] [1.5957673 1.7800364 1.5957673 1.7800364 1.5957673 1.7800364 0.65845674 1.0308565 0.65845674 1.4034053 0.65845674] [1.5957673 1.7800364 1.5957673 1.7800364 1.5957673 1.7800364 0.65845674 1.0308565 0.65845674 1.4034053 0.65845674] [1.5957673 0.30091724 1.5957673 1.0197374 1.5957673 1.0197374 0.65845674 1.078763 0.65845674 1.078763 0.65845674] [1.0462314 1.4104251 1.0462314 1.4104251 1.0462314 1.4104251 0.46314543 1.078763 0.46314543 1.078763 0.46314543]] [[1.4168923 1.0549371 1.023448 2.077429 0.85942006 2.077429 1.745636 2.077429 1.745636 1.9462336 2.120276 ] [0.6381558 0.7793342 0.35689715 2.077429 0.85942006 2.077429 1.745636 2.077429 1.745636 2.4154658 1.745636 ] [1.188708 0.7793342 1.188708 2.077429 1.1679773 2.077429 1.745636 2.077429 1.745636 2.4154658 1.745636 ] [1.188708 1.092096 1.188708 1.092096 1.1679773 1.4140098 1.6144834 1.4140098 1.6144834 2.4154658 1.6144834 ] [1.1937451 1.092096 1.1937451 1.3784186 2.2331576 1.6147257 2.2331576 1.6147257 2.2331576 1.6147257 1.6144834 ] [2.0660658 1.5747472 1.1937451 1.5747472 2.2331576 1.6147257 2.2331576 1.6147257 2.2331576 1.6147257 1.2168558 ]]]]; ov_res: [[[[0.44464403 1.322647 2.069804 1.322647 2.069804 1.0127933 2.069804 1.0127933 1.1171929 1.0127933 1.1171929 ] [1.6513689 1.1497127 1.3381052 1.1497127 1.3381052 1.0127933 1.3381052 2.0879562 1.1171929 2.0879562 1.3208218 ] [1.6513689 1.2386981 0.7040513 1.2386981 0.17563573 1.2386981 0.17784472 2.0879562 0.37059042 2.0879562 1.3208218 ] [1.6513689 1.820643 0.7040513 1.2386981 0.4859307 1.2386981 1.0335557 2.0879562 1.0335557 2.0879562 2.577866 ] [0.8029991 1.820643 0.6460405 1.2386981 0.6460405 1.2386981 1.7830518 0.554229 1.7830518 0.554229 2.577866 ] [1.9313399 1.820643 1.9313399 2.3930173 0.6460405 2.3930173 1.7830518 2.3930173 1.7830518 0.554229 2.577866 ]] [[1.6520956 1.7581578 0.96988726 1.7581578 1.2067394 1.7581578 1.2067394 1.9042252 1.2067394 1.9042252 1.4701962 ] [0.96988726 1.7800364 0.96988726 1.7800364 1.2067394 1.7800364 1.2067394 1.9042252 1.2067394 1.9042252 1.4701962 ] [1.5957673 1.7800364 1.5957673 1.7800364 1.5957673 1.7800364 0.65845674 1.0308565 0.65845674 1.4034053 0.65845674] [1.5957673 1.7800364 1.5957673 1.7800364 1.5957673 1.7800364 0.65845674 1.0308565 0.65845674 1.4034053 0.65845674] [1.5957673 0.30091724 1.5957673 1.0197374 1.5957673 1.0197374 0.65845674 1.078763 0.65845674 1.078763 0.65845674] [1.0462314 1.4104251 1.0462314 1.4104251 1.0462314 1.4104251 0.46314543 1.078763 0.46314543 1.078763 0.46314543]] [[1.4168923 1.0549371 1.023448 2.077429 0.85942006 2.077429 1.745636 2.077429 1.745636 1.9462336 2.120276 ] [0.6381558 0.7793342 0.35689715 2.077429 0.85942006 2.077429 1.745636 2.077429 1.745636 2.4154658 1.745636 ] [1.188708 0.7793342 1.188708 2.077429 1.1679773 2.077429 1.745636 2.077429 1.745636 2.4154658 1.745636 ] [1.188708 1.092096 1.188708 1.092096 1.1679773 1.4140098 1.6144834 1.4140098 1.6144834 2.4154658 1.6144834 ] [1.1937451 1.092096 1.1937451 1.3784186 2.2331576 1.6147257 2.2331576 1.6147257 2.2331576 1.6147257 1.6144834 ] [2.0660658 1.5747472 1.1937451 1.5747472 2.2331576 1.6147257 2.2331576 1.6147257 2.2331576 1.6147257 1.2168558 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool2d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [2, 1], 'stride': [2, 1], 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5832.aten_max_pool2d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : int[] = prim::Constant[value=[0, 0]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[2, 1]]() %6 : Tensor = aten::max_pool2d(%x.1, %self.kernel_size, %self.kernel_size, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:782:11 return (%6) fw_re: [[[[ 1.6178538e-01 1.4141535e+00 1.0286012e+00 2.2963972e-01 8.1142676e-01 6.5280312e-01 5.0481826e-01 1.7753448e+00 7.9391795e-01 5.8292991e-01 1.7875031e+00 1.9973502e-01 1.2517412e+00 -4.0971112e-01 -1.6097245e+00] [ 1.6178538e-01 1.4141535e+00 1.7504211e-03 1.1847585e+00 2.6192448e+00 3.3289339e-02 5.0481826e-01 1.7753448e+00 7.9391795e-01 5.8292991e-01 5.3448129e-01 -5.4745626e-01 5.8470774e-01 8.9916006e-02 -1.6097245e+00] [-1.2205186e+00 -3.4583497e-01 -6.2241584e-01 1.1847585e+00 2.6192448e+00 1.6667345e+00 1.4753951e-01 2.9335830e-01 -2.6131284e-01 -4.6081446e-02 -6.4583915e-01 -5.4745626e-01 3.8843578e-01 8.9916006e-02 2.2247977e+00] [-7.9569542e-01 -1.1919780e+00 -6.3928729e-01 6.4972550e-01 5.1221943e-01 1.6667345e+00 3.1260395e-01 2.9335830e-01 2.0568691e-01 -3.4258440e-01 2.0545652e+00 9.3029863e-01 -6.2945604e-01 1.8957707e+00 2.2247977e+00] [ 1.1431301e+00 -8.3387546e-02 5.3199917e-01 2.3563705e-02 1.7126839e-01 1.3354737e+00 8.2018799e-01 2.0869581e-01 2.0568691e-01 1.1638384e+00 2.0545652e+00 9.3029863e-01 2.3092616e+00 1.8957707e+00 -1.0085746e+00] [ 1.1431301e+00 1.2751318e+00 5.3199917e-01 -2.8696092e-02 1.1739001e+00 1.5508908e+00 8.2018799e-01 -9.5458883e-01 6.8848693e-01 1.1638384e+00 8.4484893e-01 2.1150942e-01 2.3092616e+00 5.8023733e-01 -1.0085746e+00] [-1.9457574e-01 1.2751318e+00 2.1322841e-02 -2.8696092e-02 1.1739001e+00 1.5508908e+00 3.1888443e-01 1.0637035e+00 6.8848693e-01 -4.3700400e-01 -1.6299060e-01 8.0463536e-02 1.9600546e-01 5.8023733e-01 3.5284123e-01]] [[-2.3872255e-01 1.3323287e+00 -4.4924092e-01 3.0102551e-01 -5.8876550e-01 1.5433309e+00 1.3437068e+00 1.6729772e+00 4.5717838e-01 1.1496506e-01 1.0135165e+00 8.6021364e-01 -7.9218310e-01 3.9974887e-02 1.5242196e+00] [ 1.8062213e-01 6.9551957e-01 -4.4924092e-01 2.5907740e-01 6.7534345e-01 1.5433309e+00 1.3437068e+00 1.6729772e+00 -4.9040446e-01 4.3845779e-01 1.0135165e+00 8.6021364e-01 -1.5373349e-01 3.9974887e-02 1.5242196e+00] [ 1.8062213e-01 6.9551957e-01 -1.4532360e+00 6.8125658e-02 6.7534345e-01 9.9095935e-01 1.3282609e+00 -9.1528136e-01 -4.9040446e-01 7.1757954e-01 8.2833010e-01 2.8818524e-01 -1.5373349e-01 -5.1051054e-02 2.8315207e-01] [ 9.5286864e-01 3.3287752e-01 1.9652789e-02 6.8125658e-02 -1.3032037e-01 9.9095935e-01 1.3282609e+00 3.6531633e-01 7.3157251e-01 1.9469289e+00 -2.1144895e-01 -7.1829259e-01 -2.8255308e-01 -2.0335920e-01 -1.4853050e-01] [ 9.5286864e-01 3.4134924e-01 1.9652789e-02 -9.0184790e-01 3.8676235e-01 -9.8590916e-01 -2.1423016e-01 3.6531633e-01 7.3157251e-01 1.9469289e+00 4.3526486e-01 1.6516750e+00 2.0113833e+00 -1.1781112e+00 -1.4853050e-01] [ 8.0751014e-01 4.2043474e-01 -7.4703731e-02 -1.5306522e-01 3.8676235e-01 -3.3061825e-02 8.7796283e-01 8.0263400e-01 1.6730744e+00 -7.5179327e-01 4.3526486e-01 1.6516750e+00 2.0113833e+00 -8.0651212e-01 -9.3640834e-02] [ 8.0751014e-01 4.2043474e-01 1.8142620e+00 2.8827229e-01 -4.6341620e-02 -3.3061825e-02 8.7796283e-01 8.0263400e-01 1.6730744e+00 -1.2361093e+00 8.3947986e-01 -8.4328014e-01 7.7040637e-01 -7.6543957e-02 3.9971727e-01]] [[-2.0078012e-01 3.0675036e-01 1.7954040e-01 4.3563992e-02 -2.7450976e-01 -1.3537304e-01 2.0638587e-01 9.9976355e-01 -2.0308538e-01 5.5208504e-01 1.6159097e+00 9.8378348e-01 1.5923852e+00 -8.2162189e-01 7.3228210e-01] [-1.2574587e+00 2.4701126e-01 1.1555471e+00 3.7728256e-01 -2.6619706e-01 4.5421922e-01 2.0638587e-01 9.9976355e-01 -1.3803212e-01 5.5208504e-01 -1.0795932e+00 9.8378348e-01 3.9352074e-01 -7.6604325e-01 1.0886159e+00] [-3.6543608e-01 -2.5429204e-01 1.1555471e+00 3.7728256e-01 9.9614239e-01 4.5421922e-01 8.8171434e-01 1.5851398e+00 -1.3803212e-01 1.9514185e+00 -7.3279124e-01 4.5956546e-01 -4.1862455e-01 -4.9344015e-01 1.0886159e+00] [ 3.0966488e-01 4.0491015e-01 -4.0978685e-01 2.2154114e+00 9.9614239e-01 1.4600041e+00 8.8171434e-01 1.5851398e+00 -6.2882340e-01 1.9514185e+00 -5.0096393e-01 4.5956546e-01 2.0214711e-01 -3.3030191e-01 1.5043715e+00] [ 3.0966488e-01 4.0491015e-01 -4.0978685e-01 2.2154114e+00 -1.3043296e-02 1.4600041e+00 5.1828200e-01 -3.4122023e-01 5.5720031e-01 4.1914073e-01 -7.5371660e-02 -2.5102505e-01 1.2233253e+00 2.3403347e+00 1.5043715e+00] [ 1.0574208e+00 4.8777029e-01 2.4815957e-01 2.1159921e+00 -1.3043296e-02 1.5522001e+00 6.2226498e-01 -1.8946476e-02 5.5720031e-01 4.1914073e-01 3.4239963e-01 -7.3317647e-01 1.6125599e+00 2.3403347e+00 1.2622106e+00] [ 1.2794433e+00 4.8777029e-01 2.4815957e-01 5.3202164e-01 5.3197491e-01 1.5522001e+00 6.2226498e-01 4.2419396e-02 6.9069445e-01 -1.0454558e+00 8.5911548e-01 6.5680856e-01 1.6125599e+00 1.0403107e+00 3.3241591e-01]]]]; ov_res: [[[[ 1.6178538e-01 1.4141535e+00 1.0286012e+00 2.2963972e-01 8.1142676e-01 6.5280312e-01 5.0481826e-01 1.7753448e+00 7.9391795e-01 5.8292991e-01 1.7875031e+00 1.9973502e-01 1.2517412e+00 -4.0971112e-01 -1.6097245e+00] [ 1.6178538e-01 1.4141535e+00 1.7504211e-03 1.1847585e+00 2.6192448e+00 3.3289339e-02 5.0481826e-01 1.7753448e+00 7.9391795e-01 5.8292991e-01 5.3448129e-01 -5.4745626e-01 5.8470774e-01 8.9916006e-02 -1.6097245e+00] [-1.2205186e+00 -3.4583497e-01 -6.2241584e-01 1.1847585e+00 2.6192448e+00 1.6667345e+00 1.4753951e-01 2.9335830e-01 -2.6131284e-01 -4.6081446e-02 -6.4583915e-01 -5.4745626e-01 3.8843578e-01 8.9916006e-02 2.2247977e+00] [-7.9569542e-01 -1.1919780e+00 -6.3928729e-01 6.4972550e-01 5.1221943e-01 1.6667345e+00 3.1260395e-01 2.9335830e-01 2.0568691e-01 -3.4258440e-01 2.0545652e+00 9.3029863e-01 -6.2945604e-01 1.8957707e+00 2.2247977e+00] [ 1.1431301e+00 -8.3387546e-02 5.3199917e-01 2.3563705e-02 1.7126839e-01 1.3354737e+00 8.2018799e-01 2.0869581e-01 2.0568691e-01 1.1638384e+00 2.0545652e+00 9.3029863e-01 2.3092616e+00 1.8957707e+00 -1.0085746e+00] [ 1.1431301e+00 1.2751318e+00 5.3199917e-01 -2.8696092e-02 1.1739001e+00 1.5508908e+00 8.2018799e-01 -9.5458883e-01 6.8848693e-01 1.1638384e+00 8.4484893e-01 2.1150942e-01 2.3092616e+00 5.8023733e-01 -1.0085746e+00] [-1.9457574e-01 1.2751318e+00 2.1322841e-02 -2.8696092e-02 1.1739001e+00 1.5508908e+00 3.1888443e-01 1.0637035e+00 6.8848693e-01 -4.3700400e-01 -1.6299060e-01 8.0463536e-02 1.9600546e-01 5.8023733e-01 3.5284123e-01]] [[-2.3872255e-01 1.3323287e+00 -4.4924092e-01 3.0102551e-01 -5.8876550e-01 1.5433309e+00 1.3437068e+00 1.6729772e+00 4.5717838e-01 1.1496506e-01 1.0135165e+00 8.6021364e-01 -7.9218310e-01 3.9974887e-02 1.5242196e+00] [ 1.8062213e-01 6.9551957e-01 -4.4924092e-01 2.5907740e-01 6.7534345e-01 1.5433309e+00 1.3437068e+00 1.6729772e+00 -4.9040446e-01 4.3845779e-01 1.0135165e+00 8.6021364e-01 -1.5373349e-01 3.9974887e-02 1.5242196e+00] [ 1.8062213e-01 6.9551957e-01 -1.4532360e+00 6.8125658e-02 6.7534345e-01 9.9095935e-01 1.3282609e+00 -9.1528136e-01 -4.9040446e-01 7.1757954e-01 8.2833010e-01 2.8818524e-01 -1.5373349e-01 -5.1051054e-02 2.8315207e-01] [ 9.5286864e-01 3.3287752e-01 1.9652789e-02 6.8125658e-02 -1.3032037e-01 9.9095935e-01 1.3282609e+00 3.6531633e-01 7.3157251e-01 1.9469289e+00 -2.1144895e-01 -7.1829259e-01 -2.8255308e-01 -2.0335920e-01 -1.4853050e-01] [ 9.5286864e-01 3.4134924e-01 1.9652789e-02 -9.0184790e-01 3.8676235e-01 -9.8590916e-01 -2.1423016e-01 3.6531633e-01 7.3157251e-01 1.9469289e+00 4.3526486e-01 1.6516750e+00 2.0113833e+00 -1.1781112e+00 -1.4853050e-01] [ 8.0751014e-01 4.2043474e-01 -7.4703731e-02 -1.5306522e-01 3.8676235e-01 -3.3061825e-02 8.7796283e-01 8.0263400e-01 1.6730744e+00 -7.5179327e-01 4.3526486e-01 1.6516750e+00 2.0113833e+00 -8.0651212e-01 -9.3640834e-02] [ 8.0751014e-01 4.2043474e-01 1.8142620e+00 2.8827229e-01 -4.6341620e-02 -3.3061825e-02 8.7796283e-01 8.0263400e-01 1.6730744e+00 -1.2361093e+00 8.3947986e-01 -8.4328014e-01 7.7040637e-01 -7.6543957e-02 3.9971727e-01]] [[-2.0078012e-01 3.0675036e-01 1.7954040e-01 4.3563992e-02 -2.7450976e-01 -1.3537304e-01 2.0638587e-01 9.9976355e-01 -2.0308538e-01 5.5208504e-01 1.6159097e+00 9.8378348e-01 1.5923852e+00 -8.2162189e-01 7.3228210e-01] [-1.2574587e+00 2.4701126e-01 1.1555471e+00 3.7728256e-01 -2.6619706e-01 4.5421922e-01 2.0638587e-01 9.9976355e-01 -1.3803212e-01 5.5208504e-01 -1.0795932e+00 9.8378348e-01 3.9352074e-01 -7.6604325e-01 1.0886159e+00] [-3.6543608e-01 -2.5429204e-01 1.1555471e+00 3.7728256e-01 9.9614239e-01 4.5421922e-01 8.8171434e-01 1.5851398e+00 -1.3803212e-01 1.9514185e+00 -7.3279124e-01 4.5956546e-01 -4.1862455e-01 -4.9344015e-01 1.0886159e+00] [ 3.0966488e-01 4.0491015e-01 -4.0978685e-01 2.2154114e+00 9.9614239e-01 1.4600041e+00 8.8171434e-01 1.5851398e+00 -6.2882340e-01 1.9514185e+00 -5.0096393e-01 4.5956546e-01 2.0214711e-01 -3.3030191e-01 1.5043715e+00] [ 3.0966488e-01 4.0491015e-01 -4.0978685e-01 2.2154114e+00 -1.3043296e-02 1.4600041e+00 5.1828200e-01 -3.4122023e-01 5.5720031e-01 4.1914073e-01 -7.5371660e-02 -2.5102505e-01 1.2233253e+00 2.3403347e+00 1.5043715e+00] [ 1.0574208e+00 4.8777029e-01 2.4815957e-01 2.1159921e+00 -1.3043296e-02 1.5522001e+00 6.2226498e-01 -1.8946476e-02 5.5720031e-01 4.1914073e-01 3.4239963e-01 -7.3317647e-01 1.6125599e+00 2.3403347e+00 1.2622106e+00] [ 1.2794433e+00 4.8777029e-01 2.4815957e-01 5.3202164e-01 5.3197491e-01 1.5522001e+00 6.2226498e-01 4.2419396e-02 6.9069445e-01 -1.0454558e+00 8.5911548e-01 6.5680856e-01 1.6125599e+00 1.0403107e+00 3.3241591e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5833.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[3]]() %self.ceil_mode : bool = prim::Constant[value=1]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[ 0.33206153 0.33206153 0.33206153 -0.78926253 0.83996224 0.83996224 0.83996224 0.45550412 0.55673456 0.55673456 0.55673456 0.29459178 0.29459178] [ 0.38879192 0.38879192 0.1984018 -0.05989118 0.7517832 0.7517832 0.7517832 0.8083814 0.8083814 1.1412622 1.1412622 1.1412622 0.20145069] [ 0.68505704 0.68505704 1.061517 1.061517 1.061517 2.39439 2.39439 2.39439 0.8158411 0.5878256 0.5878256 -0.3088855 0.80128014]]]; ov_res: [[[ 0.33206153 0.33206153 0.33206153 -0.78926253 0.83996224 0.83996224 0.83996224 0.45550412 0.55673456 0.55673456 0.55673456 0.29459178 0.29459178] [ 0.38879192 0.38879192 0.1984018 -0.05989118 0.7517832 0.7517832 0.7517832 0.8083814 0.8083814 1.1412622 1.1412622 1.1412622 0.20145069] [ 0.68505704 0.68505704 1.061517 1.061517 1.061517 2.39439 2.39439 2.39439 0.8158411 0.5878256 0.5878256 -0.3088855 0.80128014]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5835.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=1]() %5 : Tensor = aten::max_pool1d(%x.1, %3, %2, %2, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%5) fw_re: [[[ 1.9462247 1.9462247 0.9208786 0.9208786 0.9208786 0.6981593 0.7053588 0.7053588 1.508986 1.508986 1.508986 1.508986 0.05105669 0.04091931] [ 1.4710916 1.4710916 0.6306757 2.6986265 2.6986265 2.6986265 2.6986265 1.066727 1.1340218 1.1340218 1.1340218 1.1340218 -0.33895868 -0.33895868] [ 0.8250661 0.8250661 0.8250661 1.6109476 1.6109476 1.6109476 1.6109476 1.2315996 1.047634 2.0437763 2.0437763 2.0437763 2.0437763 1.2714925 ]]]; ov_res: [[[ 1.9462247 1.9462247 0.9208786 0.9208786 0.9208786 0.6981593 0.7053588 0.7053588 1.508986 1.508986 1.508986 1.508986 0.05105669 0.04091931] [ 1.4710916 1.4710916 0.6306757 2.6986265 2.6986265 2.6986265 2.6986265 1.066727 1.1340218 1.1340218 1.1340218 1.1340218 -0.33895868 -0.33895868] [ 0.8250661 0.8250661 0.8250661 1.6109476 1.6109476 1.6109476 1.6109476 1.2315996 1.047634 2.0437763 2.0437763 2.0437763 2.0437763 1.2714925 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5837.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[2]]() %4 : int[] = prim::Constant[value=[5]]() %5 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=1]() %7 : Tensor = aten::max_pool1d(%x.1, %5, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%7) fw_re: [[[-0.814508 -0.34474775 1.2120488 0.47911757] [-0.333141 -0.05502466 1.1843976 -0.55216366] [ 1.4245111 1.1776284 0.5577828 -0.13990135]]]; ov_res: [[[-0.814508 -0.34474775 1.2120488 0.47911757] [-0.333141 -0.05502466 1.1843976 -0.55216366] [ 1.4245111 1.1776284 0.5577828 -0.13990135]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5839.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[3]]() %self.ceil_mode : bool = prim::Constant[value=0]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[1.2111554 0.8968339 0.8968339 0.56987125 0.5439827 0.6761862 0.6761862 0.6761862 0.30649126 0.30649126 0.8791095 0.8791095 0.8791095 ] [1.0225301 1.0225301 0.8151061 0.8151061 0.8151061 0.7214965 0.7214965 0.7214965 1.0275979 1.4956394 1.4956394 1.4956394 0.9967396 ] [0.16887012 0.16887012 1.4138362 1.4138362 1.4357105 1.4357105 1.4357105 0.34966838 0.32631204 0.32631204 0.32631204 0.21892509 0.9973838 ]]]; ov_res: [[[1.2111554 0.8968339 0.8968339 0.56987125 0.5439827 0.6761862 0.6761862 0.6761862 0.30649126 0.30649126 0.8791095 0.8791095 0.8791095 ] [1.0225301 1.0225301 0.8151061 0.8151061 0.8151061 0.7214965 0.7214965 0.7214965 1.0275979 1.4956394 1.4956394 1.4956394 0.9967396 ] [0.16887012 0.16887012 1.4138362 1.4138362 1.4357105 1.4357105 1.4357105 0.34966838 0.32631204 0.32631204 0.32631204 0.21892509 0.9973838 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5841.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=0]() %5 : Tensor = aten::max_pool1d(%x.1, %3, %2, %2, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%5) fw_re: [[[ 0.46352512 0.46352512 0.46352512 0.33463758 0.33463758 -0.17064197 -0.17064197 1.1871355 1.1871355 1.1871355 1.1871355 0.41460598 0.5741339 0.5741339 ] [ 1.4388669 1.4388669 0.4647017 0.91163975 0.91163975 0.91163975 0.91163975 0.6710754 0.97045726 0.97045726 0.97045726 0.97045726 0.9087737 0.9087737 ] [ 0.950136 0.950136 0.91199815 0.91199815 0.91199815 1.5700072 1.5700072 1.5700072 1.5700072 0.9566108 2.4404693 2.4404693 2.4404693 2.4404693 ]]]; ov_res: [[[ 0.46352512 0.46352512 0.46352512 0.33463758 0.33463758 -0.17064197 -0.17064197 1.1871355 1.1871355 1.1871355 1.1871355 0.41460598 0.5741339 0.5741339 ] [ 1.4388669 1.4388669 0.4647017 0.91163975 0.91163975 0.91163975 0.91163975 0.6710754 0.97045726 0.97045726 0.97045726 0.97045726 0.9087737 0.9087737 ] [ 0.950136 0.950136 0.91199815 0.91199815 0.91199815 1.5700072 1.5700072 1.5700072 1.5700072 0.9566108 2.4404693 2.4404693 2.4404693 2.4404693 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5843.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=[2]]() %4 : int[] = prim::Constant[value=[5]]() %5 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=0]() %7 : Tensor = aten::max_pool1d(%x.1, %5, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%7) fw_re: [[[-0.48763624 0.7375552 1.7410812 0.65370435] [ 1.5528252 1.2940522 2.0328689 0.4211926 ] [ 1.664961 1.6867512 1.3430847 1.1868525 ]]]; ov_res: [[[-0.48763624 0.7375552 1.7410812 0.65370435] [ 1.5528252 1.2940522 2.0328689 0.4211926 ] [ 1.664961 1.6867512 1.3430847 1.1868525 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5845.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[3]]() %self.ceil_mode : bool = prim::Constant[value=1]() %7 : Tensor = aten::max_pool1d(%x.1, %5, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%7) fw_re: [[[ 1.3211939e+00 1.5381552e+00 1.3211939e+00 -3.4504086e-01 1.3340263e+00 -3.3452702e-01 1.3340263e+00 -3.3452702e-01 1.3340263e+00 5.1096845e-01 8.0911648e-01] [-3.1455292e-03 5.5802757e-01 -1.0438517e-01 1.1864676e+00 -1.0438517e-01 1.1864676e+00 -5.1784503e-01 1.1864676e+00 1.8857677e-01 4.3565825e-01 1.8857677e-01] [ 1.4572570e+00 7.7092648e-01 1.4572570e+00 1.1519743e-01 5.5494398e-01 1.0684763e+00 5.5494398e-01 1.0684763e+00 5.5494398e-01 1.1152409e+00 1.8601798e-04]]]; ov_res: [[[ 1.3211939e+00 1.5381552e+00 1.3211939e+00 -3.4504086e-01 1.3340263e+00 -3.3452702e-01 1.3340263e+00 -3.3452702e-01 1.3340263e+00 5.1096845e-01 8.0911648e-01] [-3.1455292e-03 5.5802757e-01 -1.0438517e-01 1.1864676e+00 -1.0438517e-01 1.1864676e+00 -5.1784503e-01 1.1864676e+00 1.8857677e-01 4.3565825e-01 1.8857677e-01] [ 1.4572570e+00 7.7092648e-01 1.4572570e+00 1.1519743e-01 5.5494398e-01 1.0684763e+00 5.5494398e-01 1.0684763e+00 5.5494398e-01 1.1152409e+00 1.8601798e-04]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5847.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=1]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[ 0.18097466 0.23996712 0.18097466 0.11504995 0.5406179 0.756479 0.5406179 1.5443413 0.5406179 1.5443413 0.5406179 ] [-0.09433452 0.09073498 -0.09433452 0.09073498 -0.09433452 0.09073498 -0.31757548 0.03290257 0.42382005 1.4622332 0.42382005] [ 1.4960136 1.6996377 1.4960136 1.6996377 1.4960136 1.6996377 -0.03856614 1.1044557 2.0566103 0.8659931 2.0566103 ]]]; ov_res: [[[ 0.18097466 0.23996712 0.18097466 0.11504995 0.5406179 0.756479 0.5406179 1.5443413 0.5406179 1.5443413 0.5406179 ] [-0.09433452 0.09073498 -0.09433452 0.09073498 -0.09433452 0.09073498 -0.31757548 0.03290257 0.42382005 1.4622332 0.42382005] [ 1.4960136 1.6996377 1.4960136 1.6996377 1.4960136 1.6996377 -0.03856614 1.1044557 2.0566103 0.8659931 2.0566103 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5849.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[5]]() %4 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=1]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %2, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[ 0.6143207 1.5741887 2.8102067 0.21589792] [ 1.1822757 1.1585327 0.1313027 -0.09585414] [ 0.9561841 0.85295886 1.0442901 -0.9258374 ]]]; ov_res: [[[ 0.6143207 1.5741887 2.8102067 0.21589792] [ 1.1822757 1.1585327 0.1313027 -0.09585414] [ 0.9561841 0.85295886 1.0442901 -0.9258374 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': 3, 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5851.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[0]]() %4 : int[] = prim::Constant[value=[1]]() %5 : int[] = prim::Constant[value=[3]]() %self.ceil_mode : bool = prim::Constant[value=0]() %7 : Tensor = aten::max_pool1d(%x.1, %5, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%7) fw_re: [[[ 0.04707657 3.077654 0.04707657 3.077654 -0.02004782 3.077654 0.19916435 -0.19666459 0.43218416 0.15206586 1.6829439 ] [ 0.8031224 1.1013066 1.109185 1.1013066 2.0680757 1.0883807 2.0680757 1.3267738 2.0680757 1.3267738 0.67102313] [ 2.695718 2.3647559 2.695718 0.92061186 2.695718 0.92061186 1.3073229 0.82146114 0.06678952 0.72671765 0.06678952]]]; ov_res: [[[ 0.04707657 3.077654 0.04707657 3.077654 -0.02004782 3.077654 0.19916435 -0.19666459 0.43218416 0.15206586 1.6829439 ] [ 0.8031224 1.1013066 1.109185 1.1013066 2.0680757 1.0883807 2.0680757 1.3267738 2.0680757 1.3267738 0.67102313] [ 2.695718 2.3647559 2.695718 0.92061186 2.695718 0.92061186 1.3073229 0.82146114 0.06678952 0.72671765 0.06678952]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': (4,), 'stride': 1, 'padding': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5853.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[1]]() %4 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=0]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[ 0.568988 0.08929942 0.568988 0.8203472 0.568988 0.8203472 0.568988 1.1710377 -0.15637563 1.1710377 -0.15637563] [-0.06303643 0.06756566 -0.06303643 0.06756566 0.8739854 -1.2252989 0.8739854 -0.7073313 0.8739854 0.4810335 0.8739854 ] [ 1.7501197 0.62125635 1.7501197 0.07936165 1.7501197 0.07936165 0.9504961 0.07936165 0.9504961 -0.24324343 0.9504961 ]]]; ov_res: [[[ 0.568988 0.08929942 0.568988 0.8203472 0.568988 0.8203472 0.568988 1.1710377 -0.15637563 1.1710377 -0.15637563] [-0.06303643 0.06756566 -0.06303643 0.06756566 0.8739854 -1.2252989 0.8739854 -0.7073313 0.8739854 0.4810335 0.8739854 ] [ 1.7501197 0.62125635 1.7501197 0.07936165 1.7501197 0.07936165 0.9504961 0.07936165 0.9504961 -0.24324343 0.9504961 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool1d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': 4, 'stride': (5,), 'padding': 2} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5855.aten_max_pool1d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %3 : int[] = prim::Constant[value=[5]]() %4 : int[] = prim::Constant[value=[4]]() %self.ceil_mode : bool = prim::Constant[value=0]() %6 : Tensor = aten::max_pool1d(%x.1, %4, %3, %2, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:696:11 return (%6) fw_re: [[[1.2829903 0.8931795 2.4719567 ] [0.38896012 1.2407689 0.5220801 ] [0.6441757 1.7054366 1.9328853 ]]]; ov_res: [[[1.2829903 0.8931795 2.4719567 ] [0.38896012 1.2407689 0.5220801 ] [0.6441757 1.7054366 1.9328853 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5856.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0]]() %3 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[2.4090166 2.4090166 2.3629334 ... 1.9254693 1.9254693 1.9896729 ] [2.4090166 2.4090166 2.3629334 ... 1.9254693 1.9254693 1.5008096 ] [2.1168134 2.161429 2.3629334 ... 1.968148 1.9254693 1.2895026 ] ... [1.3719797 1.3719797 1.3719797 ... 2.9069846 2.9069846 1.6380941 ] [1.7115467 1.7115467 1.3719797 ... 1.6380941 1.6380941 1.6380941 ] [1.7115467 1.7115467 1.2894093 ... 1.9080662 1.9080662 1.0298996 ]] [[2.1168134 1.6873889 1.598989 ... 1.59663 1.5852752 2.2000165 ] [2.1168134 1.6873889 1.598989 ... 1.2385292 1.5008096 1.5008096 ] [2.1168134 1.7284282 1.7284282 ... 1.968148 1.1044945 1.2895026 ] ... [3.20292 3.20292 3.20292 ... 2.9069846 2.9069846 1.6787323 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.6380941 1.8516338 ] [3.20292 3.20292 3.20292 ... 1.9080662 1.9080662 1.8516338 ]] [[2.8917475 2.8917475 2.4215546 ... 1.916134 1.5852752 2.2000165 ] [2.8917475 2.8917475 1.598989 ... 1.9540055 1.9540055 1.3349183 ] [2.1168134 1.7284282 1.7284282 ... 1.9540055 1.9540055 1.3349183 ] ... [3.20292 3.20292 3.20292 ... 2.9069846 2.9069846 2.526203 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.6380941 2.526203 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.2306917 2.526203 ]] ... [[1.8616652 1.8040042 1.518829 ... 1.1637927 1.6168195 2.622427 ] [1.8040042 1.8040042 1.518829 ... 1.3879716 1.6168195 2.622427 ] [1.3794255 1.518829 1.518829 ... 1.7391692 1.7391692 2.622427 ] ... [1.599784 1.599784 1.2193155 ... 2.3534322 2.3534322 2.3534322 ] [0.85234135 1.1053325 1.1053325 ... 1.8624312 1.8806233 1.8806233 ] [1.2640629 1.1053325 1.1053325 ... 1.8624312 1.8806233 1.8806233 ]] [[1.8616652 1.518829 1.518829 ... 2.0875356 2.5619256 2.622427 ] [1.7646477 1.7646477 1.518829 ... 2.0875356 2.0875356 2.622427 ] [1.7646477 1.7646477 1.518829 ... 1.9015092 1.7391692 2.622427 ] ... [1.599784 1.599784 2.0076897 ... 2.014854 2.014854 2.3684318 ] [1.5708365 1.1085589 1.5870428 ... 1.5878618 1.8806233 2.3684318 ] [1.5708365 1.1085589 1.1085589 ... 1.5609471 1.8806233 1.8806233 ]] [[1.3794255 1.518829 1.818348 ... 2.0875356 2.5619256 2.5619256 ] [1.7646477 1.7646477 1.818348 ... 2.0875356 2.0875356 2.0875356 ] [1.7646477 1.7646477 1.818348 ... 3.3196216 3.3196216 1.24114 ] ... [1.8522133 1.8522133 2.0076897 ... 2.014854 2.014854 2.3684318 ] [1.8522133 1.8522133 1.5870428 ... 0.9549459 1.5155809 2.3684318 ] [1.6912913 1.6912913 1.6912913 ... 1.6015257 1.5133835 0.9119663 ]]] [[[1.4985715 2.4644256 2.4644256 ... 1.8777138 1.5500675 1.7065338 ] [2.5670037 1.5875179 1.5875179 ... 2.0433557 2.0433557 1.7065338 ] [2.5670037 1.6432309 1.6432309 ... 2.0433557 2.0433557 1.5395275 ] ... [1.9098569 1.9098569 2.750075 ... 1.5922984 1.5922984 1.4419742 ] [1.9098569 1.9098569 2.750075 ... 2.163799 1.7651784 1.7651784 ] [1.9098569 1.9098569 2.5332286 ... 2.163799 1.7651784 1.7651784 ]] [[2.4229264 2.4644256 2.4644256 ... 1.5500675 1.5500675 1.7065338 ] [2.5670037 1.2572752 1.2572752 ... 2.0433557 2.0433557 1.7065338 ] [2.5670037 1.6432309 1.6432309 ... 2.0433557 2.0433557 1.3271691 ] ... [2.0220456 2.0220456 2.750075 ... 1.5922984 1.5922984 1.4419742 ] [1.9098569 1.9098569 2.750075 ... 2.163799 1.7651784 1.7651784 ] [1.9098569 1.9098569 2.5332286 ... 2.163799 1.7651784 1.7651784 ]] [[2.4229264 2.4644256 2.4644256 ... 1.5500675 1.5500675 1.7065338 ] [2.4229264 1.2572752 1.2572752 ... 1.5500675 1.5500675 1.7065338 ] [1.8087566 1.2572752 1.2572752 ... 1.2550542 1.3271691 1.3271691 ] ... [2.0220456 2.0220456 2.750075 ... 1.4419742 1.4419742 1.4419742 ] [1.9921974 1.9921974 2.750075 ... 2.163799 1.33745 1.4072446 ] [1.9921974 1.9921974 1.9921974 ... 2.163799 1.3718971 1.0092976 ]] ... [[2.2770078 2.2770078 2.0085855 ... 1.700671 1.7548621 1.7548621 ] [2.2770078 2.2770078 2.0085855 ... 1.700671 1.7548621 1.7548621 ] [2.3639417 2.3639417 2.3639417 ... 1.4151318 1.4151318 1.0569597 ] ... [1.5489081 1.8589519 1.8589519 ... 1.6606017 1.5372671 1.5372671 ] [1.5489081 1.8589519 1.8589519 ... 1.1097502 1.0258638 1.0524701 ] [1.5489081 1.8589519 3.058525 ... 1.1841443 1.1841443 1.1794492 ]] [[2.2770078 2.2770078 2.2515535 ... 1.4911058 1.7548621 1.7548621 ] [2.2770078 2.2770078 1.4902387 ... 1.7689892 1.7689892 1.7689892 ] [2.3639417 2.3639417 2.3639417 ... 1.7689892 1.7689892 1.7689892 ] ... [1.5489081 1.8589519 1.8589519 ... 1.8123368 1.8123368 1.5372671 ] [3.412425 3.412425 1.8589519 ... 1.8123368 1.8123368 1.4306536 ] [3.412425 3.412425 3.058525 ... 1.8123368 1.8123368 1.9976627 ]] [[2.420761 2.420761 2.420761 ... 1.9658147 1.9658147 1.7548621 ] [2.420761 2.420761 2.420761 ... 1.7689892 1.7689892 2.5588167 ] [2.2770078 2.2770078 1.8767333 ... 1.7689892 1.7689892 2.5588167 ] ... [1.0205154 1.8227262 1.8227262 ... 1.8123368 1.894295 1.894295 ] [3.412425 3.412425 1.8227262 ... 1.8123368 1.8123368 1.699471 ] [3.412425 3.412425 2.5432222 ... 1.8123368 1.8123368 1.9976627 ]]] [[[1.9742163 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [1.9742163 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.7265157 ] [1.9742163 2.7153811 2.7153811 ... 1.6157868 1.6157868 1.7265157 ] ... [2.1621082 2.1621082 1.9283769 ... 2.3721476 2.3721476 2.3721476 ] [2.1621082 2.1621082 1.9283769 ... 2.6062827 2.3721476 2.4739847 ] [3.435458 3.435458 2.2467175 ... 2.6062827 1.6654854 2.4739847 ]] [[2.2688541 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [2.2688541 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [2.2688541 2.7153811 2.7153811 ... 0.9813311 0.77335 1.0932418 ] ... [2.1621082 2.1621082 1.9283769 ... 1.8917326 0.91996735 0.86330056] [2.1621082 2.1621082 1.9283769 ... 2.6062827 1.4547238 2.4739847 ] [3.435458 3.435458 2.2467175 ... 2.6062827 1.6654854 2.4739847 ]] [[2.2688541 2.7153811 2.7153811 ... 1.530909 1.530909 1.4667906 ] [2.2688541 2.7153811 2.7153811 ... 0.8743883 1.4667906 1.4667906 ] [2.2688541 2.7153811 2.7153811 ... 0.9813311 0.82332325 1.548604 ] ... [2.1621082 2.1621082 1.9283769 ... 1.8917326 0.91996735 1.7120272 ] [2.1621082 2.1621082 1.9283769 ... 2.6062827 1.4547238 1.4547238 ] [2.1621082 2.1621082 0.73439294 ... 2.6062827 1.4547238 1.4547238 ]] ... [[2.1769166 2.1769166 2.2068875 ... 2.1756344 1.3426589 1.3426589 ] [2.1769166 2.5142722 2.5142722 ... 2.1756344 2.0193827 2.0193827 ] [2.0497456 2.5142722 2.5142722 ... 1.4153572 2.0193827 2.0193827 ] ... [2.080784 2.080784 2.0675194 ... 1.7975417 1.0870689 1.5805478 ] [2.07699 2.0675194 2.0675194 ... 1.7975417 1.5256169 1.5805478 ] [2.07699 1.4556056 1.6788985 ... 1.7975417 1.6053112 2.0496888 ]] [[2.1769166 2.1769166 2.2068875 ... 2.1756344 1.7839329 1.7839329 ] [2.1769166 2.5142722 2.5142722 ... 2.1756344 1.9340594 1.9340594 ] [2.0497456 2.5142722 2.5142722 ... 1.4153572 1.9340594 1.9340594 ] ... [2.5474167 2.5474167 2.0675194 ... 2.6973467 2.6973467 2.6973467 ] [2.5474167 2.5474167 2.0675194 ... 1.7975417 1.5256169 1.1619024 ] [2.5474167 2.5474167 1.6788985 ... 1.7975417 1.6053112 2.0496888 ]] [[2.0585628 2.0585628 2.2068875 ... 2.1756344 1.7839329 1.7839329 ] [2.0497456 2.5142722 2.5142722 ... 2.1756344 1.9340594 1.9340594 ] [2.0497456 2.5142722 2.5142722 ... 1.476847 1.9340594 1.9340594 ] ... [2.5474167 2.9001234 2.9001234 ... 2.6973467 2.6973467 2.6973467 ] [2.5474167 2.9001234 2.9001234 ... 1.7975417 1.5256169 1.2879128 ] [2.5474167 2.5474167 2.4013157 ... 1.7975417 1.6053112 2.0496888 ]]]]]; ov_res: [[[[[2.4090166 2.4090166 2.3629334 ... 1.9254693 1.9254693 1.9896729 ] [2.4090166 2.4090166 2.3629334 ... 1.9254693 1.9254693 1.5008096 ] [2.1168134 2.161429 2.3629334 ... 1.968148 1.9254693 1.2895026 ] ... [1.3719797 1.3719797 1.3719797 ... 2.9069846 2.9069846 1.6380941 ] [1.7115467 1.7115467 1.3719797 ... 1.6380941 1.6380941 1.6380941 ] [1.7115467 1.7115467 1.2894093 ... 1.9080662 1.9080662 1.0298996 ]] [[2.1168134 1.6873889 1.598989 ... 1.59663 1.5852752 2.2000165 ] [2.1168134 1.6873889 1.598989 ... 1.2385292 1.5008096 1.5008096 ] [2.1168134 1.7284282 1.7284282 ... 1.968148 1.1044945 1.2895026 ] ... [3.20292 3.20292 3.20292 ... 2.9069846 2.9069846 1.6787323 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.6380941 1.8516338 ] [3.20292 3.20292 3.20292 ... 1.9080662 1.9080662 1.8516338 ]] [[2.8917475 2.8917475 2.4215546 ... 1.916134 1.5852752 2.2000165 ] [2.8917475 2.8917475 1.598989 ... 1.9540055 1.9540055 1.3349183 ] [2.1168134 1.7284282 1.7284282 ... 1.9540055 1.9540055 1.3349183 ] ... [3.20292 3.20292 3.20292 ... 2.9069846 2.9069846 2.526203 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.6380941 2.526203 ] [3.20292 3.20292 3.20292 ... 1.7782158 1.2306917 2.526203 ]] ... [[1.8616652 1.8040042 1.518829 ... 1.1637927 1.6168195 2.622427 ] [1.8040042 1.8040042 1.518829 ... 1.3879716 1.6168195 2.622427 ] [1.3794255 1.518829 1.518829 ... 1.7391692 1.7391692 2.622427 ] ... [1.599784 1.599784 1.2193155 ... 2.3534322 2.3534322 2.3534322 ] [0.85234135 1.1053325 1.1053325 ... 1.8624312 1.8806233 1.8806233 ] [1.2640629 1.1053325 1.1053325 ... 1.8624312 1.8806233 1.8806233 ]] [[1.8616652 1.518829 1.518829 ... 2.0875356 2.5619256 2.622427 ] [1.7646477 1.7646477 1.518829 ... 2.0875356 2.0875356 2.622427 ] [1.7646477 1.7646477 1.518829 ... 1.9015092 1.7391692 2.622427 ] ... [1.599784 1.599784 2.0076897 ... 2.014854 2.014854 2.3684318 ] [1.5708365 1.1085589 1.5870428 ... 1.5878618 1.8806233 2.3684318 ] [1.5708365 1.1085589 1.1085589 ... 1.5609471 1.8806233 1.8806233 ]] [[1.3794255 1.518829 1.818348 ... 2.0875356 2.5619256 2.5619256 ] [1.7646477 1.7646477 1.818348 ... 2.0875356 2.0875356 2.0875356 ] [1.7646477 1.7646477 1.818348 ... 3.3196216 3.3196216 1.24114 ] ... [1.8522133 1.8522133 2.0076897 ... 2.014854 2.014854 2.3684318 ] [1.8522133 1.8522133 1.5870428 ... 0.9549459 1.5155809 2.3684318 ] [1.6912913 1.6912913 1.6912913 ... 1.6015257 1.5133835 0.9119663 ]]] [[[1.4985715 2.4644256 2.4644256 ... 1.8777138 1.5500675 1.7065338 ] [2.5670037 1.5875179 1.5875179 ... 2.0433557 2.0433557 1.7065338 ] [2.5670037 1.6432309 1.6432309 ... 2.0433557 2.0433557 1.5395275 ] ... [1.9098569 1.9098569 2.750075 ... 1.5922984 1.5922984 1.4419742 ] [1.9098569 1.9098569 2.750075 ... 2.163799 1.7651784 1.7651784 ] [1.9098569 1.9098569 2.5332286 ... 2.163799 1.7651784 1.7651784 ]] [[2.4229264 2.4644256 2.4644256 ... 1.5500675 1.5500675 1.7065338 ] [2.5670037 1.2572752 1.2572752 ... 2.0433557 2.0433557 1.7065338 ] [2.5670037 1.6432309 1.6432309 ... 2.0433557 2.0433557 1.3271691 ] ... [2.0220456 2.0220456 2.750075 ... 1.5922984 1.5922984 1.4419742 ] [1.9098569 1.9098569 2.750075 ... 2.163799 1.7651784 1.7651784 ] [1.9098569 1.9098569 2.5332286 ... 2.163799 1.7651784 1.7651784 ]] [[2.4229264 2.4644256 2.4644256 ... 1.5500675 1.5500675 1.7065338 ] [2.4229264 1.2572752 1.2572752 ... 1.5500675 1.5500675 1.7065338 ] [1.8087566 1.2572752 1.2572752 ... 1.2550542 1.3271691 1.3271691 ] ... [2.0220456 2.0220456 2.750075 ... 1.4419742 1.4419742 1.4419742 ] [1.9921974 1.9921974 2.750075 ... 2.163799 1.33745 1.4072446 ] [1.9921974 1.9921974 1.9921974 ... 2.163799 1.3718971 1.0092976 ]] ... [[2.2770078 2.2770078 2.0085855 ... 1.700671 1.7548621 1.7548621 ] [2.2770078 2.2770078 2.0085855 ... 1.700671 1.7548621 1.7548621 ] [2.3639417 2.3639417 2.3639417 ... 1.4151318 1.4151318 1.0569597 ] ... [1.5489081 1.8589519 1.8589519 ... 1.6606017 1.5372671 1.5372671 ] [1.5489081 1.8589519 1.8589519 ... 1.1097502 1.0258638 1.0524701 ] [1.5489081 1.8589519 3.058525 ... 1.1841443 1.1841443 1.1794492 ]] [[2.2770078 2.2770078 2.2515535 ... 1.4911058 1.7548621 1.7548621 ] [2.2770078 2.2770078 1.4902387 ... 1.7689892 1.7689892 1.7689892 ] [2.3639417 2.3639417 2.3639417 ... 1.7689892 1.7689892 1.7689892 ] ... [1.5489081 1.8589519 1.8589519 ... 1.8123368 1.8123368 1.5372671 ] [3.412425 3.412425 1.8589519 ... 1.8123368 1.8123368 1.4306536 ] [3.412425 3.412425 3.058525 ... 1.8123368 1.8123368 1.9976627 ]] [[2.420761 2.420761 2.420761 ... 1.9658147 1.9658147 1.7548621 ] [2.420761 2.420761 2.420761 ... 1.7689892 1.7689892 2.5588167 ] [2.2770078 2.2770078 1.8767333 ... 1.7689892 1.7689892 2.5588167 ] ... [1.0205154 1.8227262 1.8227262 ... 1.8123368 1.894295 1.894295 ] [3.412425 3.412425 1.8227262 ... 1.8123368 1.8123368 1.699471 ] [3.412425 3.412425 2.5432222 ... 1.8123368 1.8123368 1.9976627 ]]] [[[1.9742163 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [1.9742163 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.7265157 ] [1.9742163 2.7153811 2.7153811 ... 1.6157868 1.6157868 1.7265157 ] ... [2.1621082 2.1621082 1.9283769 ... 2.3721476 2.3721476 2.3721476 ] [2.1621082 2.1621082 1.9283769 ... 2.6062827 2.3721476 2.4739847 ] [3.435458 3.435458 2.2467175 ... 2.6062827 1.6654854 2.4739847 ]] [[2.2688541 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [2.2688541 2.7153811 2.7153811 ... 1.8068898 1.8068898 1.6030289 ] [2.2688541 2.7153811 2.7153811 ... 0.9813311 0.77335 1.0932418 ] ... [2.1621082 2.1621082 1.9283769 ... 1.8917326 0.91996735 0.86330056] [2.1621082 2.1621082 1.9283769 ... 2.6062827 1.4547238 2.4739847 ] [3.435458 3.435458 2.2467175 ... 2.6062827 1.6654854 2.4739847 ]] [[2.2688541 2.7153811 2.7153811 ... 1.530909 1.530909 1.4667906 ] [2.2688541 2.7153811 2.7153811 ... 0.8743883 1.4667906 1.4667906 ] [2.2688541 2.7153811 2.7153811 ... 0.9813311 0.82332325 1.548604 ] ... [2.1621082 2.1621082 1.9283769 ... 1.8917326 0.91996735 1.7120272 ] [2.1621082 2.1621082 1.9283769 ... 2.6062827 1.4547238 1.4547238 ] [2.1621082 2.1621082 0.73439294 ... 2.6062827 1.4547238 1.4547238 ]] ... [[2.1769166 2.1769166 2.2068875 ... 2.1756344 1.3426589 1.3426589 ] [2.1769166 2.5142722 2.5142722 ... 2.1756344 2.0193827 2.0193827 ] [2.0497456 2.5142722 2.5142722 ... 1.4153572 2.0193827 2.0193827 ] ... [2.080784 2.080784 2.0675194 ... 1.7975417 1.0870689 1.5805478 ] [2.07699 2.0675194 2.0675194 ... 1.7975417 1.5256169 1.5805478 ] [2.07699 1.4556056 1.6788985 ... 1.7975417 1.6053112 2.0496888 ]] [[2.1769166 2.1769166 2.2068875 ... 2.1756344 1.7839329 1.7839329 ] [2.1769166 2.5142722 2.5142722 ... 2.1756344 1.9340594 1.9340594 ] [2.0497456 2.5142722 2.5142722 ... 1.4153572 1.9340594 1.9340594 ] ... [2.5474167 2.5474167 2.0675194 ... 2.6973467 2.6973467 2.6973467 ] [2.5474167 2.5474167 2.0675194 ... 1.7975417 1.5256169 1.1619024 ] [2.5474167 2.5474167 1.6788985 ... 1.7975417 1.6053112 2.0496888 ]] [[2.0585628 2.0585628 2.2068875 ... 2.1756344 1.7839329 1.7839329 ] [2.0497456 2.5142722 2.5142722 ... 2.1756344 1.9340594 1.9340594 ] [2.0497456 2.5142722 2.5142722 ... 1.476847 1.9340594 1.9340594 ] ... [2.5474167 2.9001234 2.9001234 ... 2.6973467 2.6973467 2.6973467 ] [2.5474167 2.9001234 2.9001234 ... 1.7975417 1.5256169 1.2879128 ] [2.5474167 2.5474167 2.4013157 ... 1.7975417 1.6053112 2.0496888 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5858.aten_max_pool3d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[1, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %5 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.stride, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%5) fw_re: [[[[[1.9005786 2.8274167 2.8274167 ... 0.82312495 0.82312495 0.82312495] [1.968514 2.8274167 2.8274167 ... 2.1823552 2.1823552 1.3095453 ] [1.968514 2.8274167 2.8274167 ... 2.1823552 2.1823552 1.3095453 ] ... [1.1764215 1.1764215 1.8217572 ... 1.3834974 1.5875365 1.5875365 ] [1.1764215 1.334534 1.9157059 ... 1.3834974 1.5875365 1.5875365 ] [0.69111276 1.334534 1.9157059 ... 1.1709332 1.5875365 1.5875365 ]] [[1.9005786 2.8274167 2.8274167 ... 0.82312495 0.82312495 0.82312495] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.3095453 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.72817 ] ... [2.664309 2.664309 2.664309 ... 1.3834974 1.5875365 1.5875365 ] [2.664309 2.664309 2.664309 ... 1.3834974 1.5875365 1.5875365 ] [2.664309 2.664309 2.664309 ... 1.1709332 1.5875365 1.5875365 ]] [[1.1451597 2.8274167 2.8274167 ... 1.4109725 1.4109725 0.7837898 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.3350439 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.72817 ] ... [2.664309 2.664309 2.664309 ... 1.3834974 1.2805947 1.2805947 ] [2.664309 2.664309 2.664309 ... 1.3834974 1.1709332 1.1709332 ] [2.664309 2.664309 2.664309 ... 1.1709332 1.1709332 1.1709332 ]] ... [[0.8913368 1.1544403 1.1544403 ... 1.1809926 1.0425482 0.7223801 ] [0.8913368 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] ... [2.2253032 2.2253032 1.8650857 ... 2.0214179 2.0214179 1.5011357 ] [0.9720118 1.5510645 1.8650857 ... 1.6719296 1.6719296 1.3770983 ] [0.9720118 1.5510645 1.8650857 ... 1.5142787 1.5142787 1.3770983 ]] [[0.8913368 1.1544403 1.1544403 ... 1.9759045 1.9759045 0.7949215 ] [0.8913368 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.7720033 1.7720033 ... 2.3062549 2.416387 2.416387 ] ... [2.9538312 2.9538312 2.9538312 ... 2.0214179 3.525725 3.525725 ] [2.9538312 2.9538312 2.9538312 ... 1.3978882 1.2694049 1.2600656 ] [1.732243 1.732243 1.5510645 ... 1.2694049 1.2694049 1.2450602 ]] [[0.8913368 1.1544403 1.1544403 ... 1.9759045 1.9759045 0.7949215 ] [0.8913368 1.4343544 1.4343544 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.7720033 1.7720033 ... 2.3062549 2.416387 2.416387 ] ... [2.9538312 2.9538312 2.9538312 ... 1.3671405 3.525725 3.525725 ] [2.9538312 2.9538312 2.9538312 ... 1.2694049 1.2694049 1.2450602 ] [1.732243 1.732243 1.5510645 ... 1.2694049 1.2694049 1.2450602 ]]] [[[2.8383396 2.8383396 2.8383396 ... 0.62973297 0.5851541 0.5259039 ] [2.8383396 2.8383396 2.8383396 ... 1.1499801 0.58816326 0.58816326] [0.96240515 1.7812312 1.7812312 ... 1.1499801 0.58816326 0.58816326] ... [1.4468782 1.4468782 2.4384758 ... 1.4826386 1.4826386 1.4826386 ] [1.0708466 1.2648563 2.4384758 ... 0.97730315 1.1864034 1.1864034 ] [0.78200686 0.78200686 1.0429044 ... 0.97730315 1.1864034 1.1864034 ]] [[2.8383396 2.8383396 2.8383396 ... 2.5548842 2.5548842 2.5548842 ] [2.8383396 2.8383396 2.8383396 ... 2.5548842 2.5548842 2.5548842 ] [0.96240515 1.7812312 1.7812312 ... 2.0495393 0.9060221 0.58816326] ... [1.4468782 1.4468782 2.640153 ... 1.4826386 1.4826386 1.4826386 ] [1.1589073 1.2648563 2.640153 ... 1.28227 1.1864034 1.1864034 ] [1.1589073 1.1589073 1.0429044 ... 1.28227 1.1864034 1.1864034 ]] [[1.1513572 1.9246159 2.1083379 ... 2.5548842 2.5548842 2.5548842 ] [1.1513572 1.9246159 2.1083379 ... 2.5548842 2.5548842 2.5548842 ] [1.1513572 1.7196355 2.03398 ... 2.0495393 0.9060221 0.7488404 ] ... [1.0322496 1.0395423 2.640153 ... 2.751452 2.751452 2.751452 ] [1.1589073 1.1589073 2.640153 ... 2.751452 2.751452 2.751452 ] [1.1589073 1.1589073 1.0395423 ... 2.751452 2.751452 2.751452 ]] ... [[1.7752174 1.7752174 1.7752174 ... 1.5205452 1.1937242 0.4947134 ] [1.7752174 1.7752174 2.099554 ... 3.0161395 3.0161395 3.0161395 ] [1.7752174 1.7752174 2.099554 ... 3.0161395 3.0161395 3.0161395 ] ... [2.4840605 2.4840605 2.4840605 ... 1.7734917 1.7734917 1.7734917 ] [2.4840605 2.4840605 2.4840605 ... 1.7734917 1.7734917 1.7734917 ] [2.4840605 2.4840605 2.4840605 ... 1.690777 1.690777 1.690777 ]] [[1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 2.099554 ... 3.0161395 3.0161395 3.0161395 ] [1.9381695 1.9381695 2.099554 ... 3.0161395 3.0161395 3.0161395 ] ... [0.79901296 0.9186113 1.0483195 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.5262413 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.5262413 ... 1.7695538 1.7695538 1.7695538 ]] [[1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 1.9381695 ... 2.3988862 2.3988862 2.068277 ] ... [0.6287797 0.6287797 1.0463396 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.0463396 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 0.8778555 ... 1.7695538 1.7695538 1.7695538 ]]] [[[1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4852576 1.4852576 1.4852576 ... 1.0730175 0.7022294 0.68622255] ... [1.830655 2.0972595 2.0972595 ... 1.4431287 1.5396272 1.5396272 ] [1.293203 2.0972595 2.0972595 ... 1.4431287 1.5396272 1.5396272 ] [0.9556442 1.1073753 1.1073753 ... 1.4431287 1.4431287 1.4431287 ]] [[1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4852576 1.4852576 1.4852576 ... 1.0730175 0.70643693 0.70643693] ... [1.830655 2.0972595 2.0972595 ... 1.4623142 1.5396272 1.5396272 ] [1.293203 2.0972595 2.0972595 ... 1.4623142 1.5396272 1.5396272 ] [0.9556442 1.5287579 1.5287579 ... 1.4431287 1.4431287 1.4431287 ]] [[1.5093035 1.5093035 1.5093035 ... 2.7143583 1.9872588 1.9872588 ] [2.2065525 2.2065525 1.5093035 ... 2.7143583 1.9872588 1.9872588 ] [2.2065525 2.2065525 1.4852576 ... 1.9872588 1.9872588 1.9872588 ] ... [1.2270917 2.0972595 2.0972595 ... 1.48234 1.5396272 1.5396272 ] [1.2270917 2.0972595 2.0972595 ... 1.48234 1.5396272 1.5396272 ] [0.8150836 1.5287579 1.5287579 ... 1.386924 0.9492747 0.9492747 ]] ... [[1.4303722 1.4913967 2.270318 ... 2.1688714 1.1659071 1.1659071 ] [1.4303722 1.4913967 2.270318 ... 2.1688714 1.1659071 1.1659071 ] [1.4303722 2.5148482 2.5148482 ... 2.1688714 1.3867639 1.3867639 ] ... [2.0229638 2.0229638 2.0229638 ... 2.2098956 3.1111217 3.1111217 ] [2.0229638 2.0229638 2.0229638 ... 2.2098956 3.1111217 3.1111217 ] [2.0229638 2.0229638 2.0229638 ... 1.3229647 3.1111217 3.1111217 ]] [[1.4303722 1.4913967 2.270318 ... 2.1688714 1.6220307 1.6220307 ] [1.4303722 1.4913967 2.270318 ... 2.1688714 1.6220307 1.6220307 ] [1.4997271 2.5148482 2.5148482 ... 2.1688714 1.6220307 1.6220307 ] ... [2.0229638 2.0229638 2.0229638 ... 2.2098956 2.2098956 2.2098956 ] [2.0229638 2.0229638 2.0229638 ... 2.2696927 2.2696927 2.2098956 ] [2.0229638 2.0229638 2.0229638 ... 2.2696927 2.2696927 1.8060052 ]] [[1.4303722 1.4913967 2.270318 ... 0.6057997 1.6220307 1.6220307 ] [1.4303722 1.4913967 2.270318 ... 0.6991001 1.6220307 1.6220307 ] [1.4997271 1.4997271 2.270318 ... 0.6991001 1.6220307 1.6220307 ] ... [1.5823356 1.982603 1.982603 ... 1.7158976 1.7633494 1.7633494 ] [1.6006144 1.982603 1.982603 ... 2.2696927 2.2696927 1.8060052 ] [1.6006144 1.982603 1.982603 ... 2.2696927 2.2696927 1.8060052 ]]]]]; ov_res: [[[[[1.9005786 2.8274167 2.8274167 ... 0.82312495 0.82312495 0.82312495] [1.968514 2.8274167 2.8274167 ... 2.1823552 2.1823552 1.3095453 ] [1.968514 2.8274167 2.8274167 ... 2.1823552 2.1823552 1.3095453 ] ... [1.1764215 1.1764215 1.8217572 ... 1.3834974 1.5875365 1.5875365 ] [1.1764215 1.334534 1.9157059 ... 1.3834974 1.5875365 1.5875365 ] [0.69111276 1.334534 1.9157059 ... 1.1709332 1.5875365 1.5875365 ]] [[1.9005786 2.8274167 2.8274167 ... 0.82312495 0.82312495 0.82312495] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.3095453 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.72817 ] ... [2.664309 2.664309 2.664309 ... 1.3834974 1.5875365 1.5875365 ] [2.664309 2.664309 2.664309 ... 1.3834974 1.5875365 1.5875365 ] [2.664309 2.664309 2.664309 ... 1.1709332 1.5875365 1.5875365 ]] [[1.1451597 2.8274167 2.8274167 ... 1.4109725 1.4109725 0.7837898 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.3350439 ] [1.968514 2.8274167 2.8274167 ... 2.7740119 2.7740119 1.72817 ] ... [2.664309 2.664309 2.664309 ... 1.3834974 1.2805947 1.2805947 ] [2.664309 2.664309 2.664309 ... 1.3834974 1.1709332 1.1709332 ] [2.664309 2.664309 2.664309 ... 1.1709332 1.1709332 1.1709332 ]] ... [[0.8913368 1.1544403 1.1544403 ... 1.1809926 1.0425482 0.7223801 ] [0.8913368 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] ... [2.2253032 2.2253032 1.8650857 ... 2.0214179 2.0214179 1.5011357 ] [0.9720118 1.5510645 1.8650857 ... 1.6719296 1.6719296 1.3770983 ] [0.9720118 1.5510645 1.8650857 ... 1.5142787 1.5142787 1.3770983 ]] [[0.8913368 1.1544403 1.1544403 ... 1.9759045 1.9759045 0.7949215 ] [0.8913368 1.5189999 1.5189999 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.7720033 1.7720033 ... 2.3062549 2.416387 2.416387 ] ... [2.9538312 2.9538312 2.9538312 ... 2.0214179 3.525725 3.525725 ] [2.9538312 2.9538312 2.9538312 ... 1.3978882 1.2694049 1.2600656 ] [1.732243 1.732243 1.5510645 ... 1.2694049 1.2694049 1.2450602 ]] [[0.8913368 1.1544403 1.1544403 ... 1.9759045 1.9759045 0.7949215 ] [0.8913368 1.4343544 1.4343544 ... 2.3062549 2.416387 2.416387 ] [0.94693255 1.7720033 1.7720033 ... 2.3062549 2.416387 2.416387 ] ... [2.9538312 2.9538312 2.9538312 ... 1.3671405 3.525725 3.525725 ] [2.9538312 2.9538312 2.9538312 ... 1.2694049 1.2694049 1.2450602 ] [1.732243 1.732243 1.5510645 ... 1.2694049 1.2694049 1.2450602 ]]] [[[2.8383396 2.8383396 2.8383396 ... 0.62973297 0.5851541 0.5259039 ] [2.8383396 2.8383396 2.8383396 ... 1.1499801 0.58816326 0.58816326] [0.96240515 1.7812312 1.7812312 ... 1.1499801 0.58816326 0.58816326] ... [1.4468782 1.4468782 2.4384758 ... 1.4826386 1.4826386 1.4826386 ] [1.0708466 1.2648563 2.4384758 ... 0.97730315 1.1864034 1.1864034 ] [0.78200686 0.78200686 1.0429044 ... 0.97730315 1.1864034 1.1864034 ]] [[2.8383396 2.8383396 2.8383396 ... 2.5548842 2.5548842 2.5548842 ] [2.8383396 2.8383396 2.8383396 ... 2.5548842 2.5548842 2.5548842 ] [0.96240515 1.7812312 1.7812312 ... 2.0495393 0.9060221 0.58816326] ... [1.4468782 1.4468782 2.640153 ... 1.4826386 1.4826386 1.4826386 ] [1.1589073 1.2648563 2.640153 ... 1.28227 1.1864034 1.1864034 ] [1.1589073 1.1589073 1.0429044 ... 1.28227 1.1864034 1.1864034 ]] [[1.1513572 1.9246159 2.1083379 ... 2.5548842 2.5548842 2.5548842 ] [1.1513572 1.9246159 2.1083379 ... 2.5548842 2.5548842 2.5548842 ] [1.1513572 1.7196355 2.03398 ... 2.0495393 0.9060221 0.7488404 ] ... [1.0322496 1.0395423 2.640153 ... 2.751452 2.751452 2.751452 ] [1.1589073 1.1589073 2.640153 ... 2.751452 2.751452 2.751452 ] [1.1589073 1.1589073 1.0395423 ... 2.751452 2.751452 2.751452 ]] ... [[1.7752174 1.7752174 1.7752174 ... 1.5205452 1.1937242 0.4947134 ] [1.7752174 1.7752174 2.099554 ... 3.0161395 3.0161395 3.0161395 ] [1.7752174 1.7752174 2.099554 ... 3.0161395 3.0161395 3.0161395 ] ... [2.4840605 2.4840605 2.4840605 ... 1.7734917 1.7734917 1.7734917 ] [2.4840605 2.4840605 2.4840605 ... 1.7734917 1.7734917 1.7734917 ] [2.4840605 2.4840605 2.4840605 ... 1.690777 1.690777 1.690777 ]] [[1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 2.099554 ... 3.0161395 3.0161395 3.0161395 ] [1.9381695 1.9381695 2.099554 ... 3.0161395 3.0161395 3.0161395 ] ... [0.79901296 0.9186113 1.0483195 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.5262413 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.5262413 ... 1.7695538 1.7695538 1.7695538 ]] [[1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 1.9381695 ... 2.431032 2.431032 2.431032 ] [1.9381695 1.9381695 1.9381695 ... 2.3988862 2.3988862 2.068277 ] ... [0.6287797 0.6287797 1.0463396 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 1.0463396 ... 1.7734917 1.7734917 1.7734917 ] [1.3128053 1.3128053 0.8778555 ... 1.7695538 1.7695538 1.7695538 ]]] [[[1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4852576 1.4852576 1.4852576 ... 1.0730175 0.7022294 0.68622255] ... [1.830655 2.0972595 2.0972595 ... 1.4431287 1.5396272 1.5396272 ] [1.293203 2.0972595 2.0972595 ... 1.4431287 1.5396272 1.5396272 ] [0.9556442 1.1073753 1.1073753 ... 1.4431287 1.4431287 1.4431287 ]] [[1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4870791 1.4870791 1.4870791 ... 2.7143583 1.9764254 1.9764254 ] [1.4852576 1.4852576 1.4852576 ... 1.0730175 0.70643693 0.70643693] ... [1.830655 2.0972595 2.0972595 ... 1.4623142 1.5396272 1.5396272 ] [1.293203 2.0972595 2.0972595 ... 1.4623142 1.5396272 1.5396272 ] [0.9556442 1.5287579 1.5287579 ... 1.4431287 1.4431287 1.4431287 ]] [[1.5093035 1.5093035 1.5093035 ... 2.7143583 1.9872588 1.9872588 ] [2.2065525 2.2065525 1.5093035 ... 2.7143583 1.9872588 1.9872588 ] [2.2065525 2.2065525 1.4852576 ... 1.9872588 1.9872588 1.9872588 ] ... [1.2270917 2.0972595 2.0972595 ... 1.48234 1.5396272 1.5396272 ] [1.2270917 2.0972595 2.0972595 ... 1.48234 1.5396272 1.5396272 ] [0.8150836 1.5287579 1.5287579 ... 1.386924 0.9492747 0.9492747 ]] ... [[1.4303722 1.4913967 2.270318 ... 2.1688714 1.1659071 1.1659071 ] [1.4303722 1.4913967 2.270318 ... 2.1688714 1.1659071 1.1659071 ] [1.4303722 2.5148482 2.5148482 ... 2.1688714 1.3867639 1.3867639 ] ... [2.0229638 2.0229638 2.0229638 ... 2.2098956 3.1111217 3.1111217 ] [2.0229638 2.0229638 2.0229638 ... 2.2098956 3.1111217 3.1111217 ] [2.0229638 2.0229638 2.0229638 ... 1.3229647 3.1111217 3.1111217 ]] [[1.4303722 1.4913967 2.270318 ... 2.1688714 1.6220307 1.6220307 ] [1.4303722 1.4913967 2.270318 ... 2.1688714 1.6220307 1.6220307 ] [1.4997271 2.5148482 2.5148482 ... 2.1688714 1.6220307 1.6220307 ] ... [2.0229638 2.0229638 2.0229638 ... 2.2098956 2.2098956 2.2098956 ] [2.0229638 2.0229638 2.0229638 ... 2.2696927 2.2696927 2.2098956 ] [2.0229638 2.0229638 2.0229638 ... 2.2696927 2.2696927 1.8060052 ]] [[1.4303722 1.4913967 2.270318 ... 0.6057997 1.6220307 1.6220307 ] [1.4303722 1.4913967 2.270318 ... 0.6991001 1.6220307 1.6220307 ] [1.4997271 1.4997271 2.270318 ... 0.6991001 1.6220307 1.6220307 ] ... [1.5823356 1.982603 1.982603 ... 1.7158976 1.7633494 1.7633494 ] [1.6006144 1.982603 1.982603 ... 2.2696927 2.2696927 1.8060052 ] [1.6006144 1.982603 1.982603 ... 2.2696927 2.2696927 1.8060052 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5860.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[1.9464029 2.119321 1.8282053 1.3339038 2.1764996 ] [1.740155 2.9151173 1.4230626 1.3864863 1.9302251 ] [2.4161499 2.6592681 1.1696053 2.0098956 1.8679106 ] [2.1233091 1.7562221 1.8917515 1.8308774 1.3439759 ] [1.716309 2.5578673 2.3234375 2.7194946 2.3392172 ]] [[1.4158342 1.7614083 1.3572649 1.8810121 1.9667752 ] [1.347268 1.2420925 1.9494723 1.4341642 1.5203058 ] [2.8166046 1.5235487 1.4183242 2.033857 2.614444 ] [1.6780562 1.0147947 2.0873919 1.3487834 2.6090674 ] [1.3578587 2.0932631 2.510127 2.0287242 2.5670137 ]] [[2.026716 1.7067645 2.0135868 2.6931353 1.7793062 ] [2.9121838 1.5060806 0.95658195 1.6127793 2.2505271 ] [3.2658708 2.20475 1.4356754 1.4230963 1.793467 ] [2.0681455 1.6683496 1.7117654 2.1215312 2.177596 ] [2.694891 1.7317073 1.5096215 2.0035772 1.9076802 ]] [[2.268814 1.6964333 1.4881337 1.9816729 3.4981859 ] [1.8618908 2.410181 1.9906796 0.99503136 3.1120715 ] [2.330911 1.5473062 2.111143 1.820026 1.9250596 ] [1.8550274 2.0354683 1.7530562 1.6751821 2.459754 ] [1.1742852 2.8363957 1.8033845 1.6042097 3.9980903 ]] [[1.9844898 1.2334543 1.4744343 2.2761595 1.166876 ] [1.7879782 1.8634591 2.044505 1.2984502 2.1448622 ] [1.5268275 2.1800468 1.6953541 1.6045578 2.2156503 ] [2.1098804 3.9535818 2.139552 1.6959984 1.1465944 ] [2.9984045 1.8254586 0.82334983 1.9345263 2.056471 ]]] [[[1.63801 2.2532303 1.9038343 2.4363225 1.8086091 ] [1.6396434 1.6564559 1.7820677 1.9396926 2.17349 ] [2.225704 2.398701 2.1088905 1.3893347 2.1527884 ] [1.580201 1.9690709 1.9099615 1.0265268 1.5530378 ] [1.750808 1.5078061 1.6482664 1.7707217 2.078735 ]] [[3.0102355 1.6523607 1.6600605 1.2284863 1.5391667 ] [1.9721555 1.8975322 1.6621654 1.725744 1.9934893 ] [2.6480713 2.4569898 1.0238944 1.3960843 2.2453935 ] [1.5308437 2.037555 1.3912026 2.9554727 1.3011103 ] [1.7019074 2.3328295 2.230836 1.9422572 2.6258886 ]] [[1.7238926 1.3557999 2.6900501 2.9238284 2.3328948 ] [1.564486 1.7772048 2.2666075 1.9229844 2.2028546 ] [2.1647356 1.9186133 1.2758021 1.77362 1.9912014 ] [1.734406 1.5904609 1.2765996 1.3461028 1.6842424 ] [1.6133419 1.9240112 1.19956 2.0709372 1.7608851 ]] [[1.908614 1.4440677 2.289603 2.3814332 1.8818854 ] [1.644191 2.2628436 3.9181807 3.082368 1.3469 ] [1.6990491 2.152461 2.5614882 1.7544122 1.8881845 ] [1.5598289 2.1360686 1.5987952 2.522903 1.4011575 ] [4.2556567 2.0815113 1.9604102 2.4680817 1.9896009 ]] [[3.1510897 1.9349577 3.1836724 2.2031527 1.8072147 ] [2.935259 1.8899876 1.5466229 3.2040267 2.2377656 ] [1.4806836 1.5355537 1.98668 2.2642076 1.3018669 ] [1.331354 3.6372044 1.8039297 2.350611 2.6051054 ] [1.1872706 1.8705175 1.3052402 2.2051225 1.8128752 ]]] [[[2.3642662 1.6681783 2.1080391 1.751088 1.8328435 ] [2.2108052 1.3931967 2.2650094 1.7164024 2.123785 ] [1.9685395 1.13884 1.7450163 2.785512 1.746783 ] [1.64181 1.9065647 1.8863747 2.8321579 1.7772067 ] [1.8527822 2.0428352 1.2553527 1.7622931 2.746987 ]] [[2.9301727 1.7763807 1.8505026 2.145019 1.5260178 ] [1.9918507 1.9852282 2.6339738 1.8140562 2.0437586 ] [2.5482566 2.3420613 2.5770571 1.7076421 2.3772852 ] [2.1356962 1.7972553 2.0582938 1.961195 1.7480658 ] [1.7083775 2.2551696 1.8077197 1.3295768 2.2473147 ]] [[2.8173985 1.2207853 1.9604148 1.8980222 2.4604115 ] [2.1847756 2.0985155 2.8590791 2.0235717 1.8213165 ] [2.4373603 1.7413273 2.4806406 2.0556052 1.6398869 ] [1.6348739 1.5683473 2.7794406 1.9554598 2.1189022 ] [1.7075824 2.4321373 1.5015776 2.7829914 2.1307254 ]] [[2.400812 1.9287703 2.8875535 1.8018063 1.7269732 ] [2.4850125 1.4140682 2.223337 2.6019714 1.9048655 ] [1.4680593 1.9549316 3.0125852 1.243385 2.2042673 ] [1.4202052 1.2552888 1.9618316 1.6876503 1.8967867 ] [1.4475486 1.7393607 2.295999 1.3866508 2.1164358 ]] [[1.8281986 2.391624 2.5091417 1.6221763 1.4148461 ] [1.5150799 1.8967026 1.7129436 2.0251582 1.9508258 ] [1.4970152 1.8469075 1.7452902 1.7078385 1.5977113 ] [2.1171682 1.9903096 2.2490122 1.1657529 2.264584 ] [1.8141143 2.523084 1.8545538 1.3957983 1.6899264 ]]]]]; ov_res: [[[[[1.9464029 2.119321 1.8282053 1.3339038 2.1764996 ] [1.740155 2.9151173 1.4230626 1.3864863 1.9302251 ] [2.4161499 2.6592681 1.1696053 2.0098956 1.8679106 ] [2.1233091 1.7562221 1.8917515 1.8308774 1.3439759 ] [1.716309 2.5578673 2.3234375 2.7194946 2.3392172 ]] [[1.4158342 1.7614083 1.3572649 1.8810121 1.9667752 ] [1.347268 1.2420925 1.9494723 1.4341642 1.5203058 ] [2.8166046 1.5235487 1.4183242 2.033857 2.614444 ] [1.6780562 1.0147947 2.0873919 1.3487834 2.6090674 ] [1.3578587 2.0932631 2.510127 2.0287242 2.5670137 ]] [[2.026716 1.7067645 2.0135868 2.6931353 1.7793062 ] [2.9121838 1.5060806 0.95658195 1.6127793 2.2505271 ] [3.2658708 2.20475 1.4356754 1.4230963 1.793467 ] [2.0681455 1.6683496 1.7117654 2.1215312 2.177596 ] [2.694891 1.7317073 1.5096215 2.0035772 1.9076802 ]] [[2.268814 1.6964333 1.4881337 1.9816729 3.4981859 ] [1.8618908 2.410181 1.9906796 0.99503136 3.1120715 ] [2.330911 1.5473062 2.111143 1.820026 1.9250596 ] [1.8550274 2.0354683 1.7530562 1.6751821 2.459754 ] [1.1742852 2.8363957 1.8033845 1.6042097 3.9980903 ]] [[1.9844898 1.2334543 1.4744343 2.2761595 1.166876 ] [1.7879782 1.8634591 2.044505 1.2984502 2.1448622 ] [1.5268275 2.1800468 1.6953541 1.6045578 2.2156503 ] [2.1098804 3.9535818 2.139552 1.6959984 1.1465944 ] [2.9984045 1.8254586 0.82334983 1.9345263 2.056471 ]]] [[[1.63801 2.2532303 1.9038343 2.4363225 1.8086091 ] [1.6396434 1.6564559 1.7820677 1.9396926 2.17349 ] [2.225704 2.398701 2.1088905 1.3893347 2.1527884 ] [1.580201 1.9690709 1.9099615 1.0265268 1.5530378 ] [1.750808 1.5078061 1.6482664 1.7707217 2.078735 ]] [[3.0102355 1.6523607 1.6600605 1.2284863 1.5391667 ] [1.9721555 1.8975322 1.6621654 1.725744 1.9934893 ] [2.6480713 2.4569898 1.0238944 1.3960843 2.2453935 ] [1.5308437 2.037555 1.3912026 2.9554727 1.3011103 ] [1.7019074 2.3328295 2.230836 1.9422572 2.6258886 ]] [[1.7238926 1.3557999 2.6900501 2.9238284 2.3328948 ] [1.564486 1.7772048 2.2666075 1.9229844 2.2028546 ] [2.1647356 1.9186133 1.2758021 1.77362 1.9912014 ] [1.734406 1.5904609 1.2765996 1.3461028 1.6842424 ] [1.6133419 1.9240112 1.19956 2.0709372 1.7608851 ]] [[1.908614 1.4440677 2.289603 2.3814332 1.8818854 ] [1.644191 2.2628436 3.9181807 3.082368 1.3469 ] [1.6990491 2.152461 2.5614882 1.7544122 1.8881845 ] [1.5598289 2.1360686 1.5987952 2.522903 1.4011575 ] [4.2556567 2.0815113 1.9604102 2.4680817 1.9896009 ]] [[3.1510897 1.9349577 3.1836724 2.2031527 1.8072147 ] [2.935259 1.8899876 1.5466229 3.2040267 2.2377656 ] [1.4806836 1.5355537 1.98668 2.2642076 1.3018669 ] [1.331354 3.6372044 1.8039297 2.350611 2.6051054 ] [1.1872706 1.8705175 1.3052402 2.2051225 1.8128752 ]]] [[[2.3642662 1.6681783 2.1080391 1.751088 1.8328435 ] [2.2108052 1.3931967 2.2650094 1.7164024 2.123785 ] [1.9685395 1.13884 1.7450163 2.785512 1.746783 ] [1.64181 1.9065647 1.8863747 2.8321579 1.7772067 ] [1.8527822 2.0428352 1.2553527 1.7622931 2.746987 ]] [[2.9301727 1.7763807 1.8505026 2.145019 1.5260178 ] [1.9918507 1.9852282 2.6339738 1.8140562 2.0437586 ] [2.5482566 2.3420613 2.5770571 1.7076421 2.3772852 ] [2.1356962 1.7972553 2.0582938 1.961195 1.7480658 ] [1.7083775 2.2551696 1.8077197 1.3295768 2.2473147 ]] [[2.8173985 1.2207853 1.9604148 1.8980222 2.4604115 ] [2.1847756 2.0985155 2.8590791 2.0235717 1.8213165 ] [2.4373603 1.7413273 2.4806406 2.0556052 1.6398869 ] [1.6348739 1.5683473 2.7794406 1.9554598 2.1189022 ] [1.7075824 2.4321373 1.5015776 2.7829914 2.1307254 ]] [[2.400812 1.9287703 2.8875535 1.8018063 1.7269732 ] [2.4850125 1.4140682 2.223337 2.6019714 1.9048655 ] [1.4680593 1.9549316 3.0125852 1.243385 2.2042673 ] [1.4202052 1.2552888 1.9618316 1.6876503 1.8967867 ] [1.4475486 1.7393607 2.295999 1.3866508 2.1164358 ]] [[1.8281986 2.391624 2.5091417 1.6221763 1.4148461 ] [1.5150799 1.8967026 1.7129436 2.0251582 1.9508258 ] [1.4970152 1.8469075 1.7452902 1.7078385 1.5977113 ] [2.1171682 1.9903096 2.2490122 1.1657529 2.264584 ] [1.8141143 2.523084 1.8545538 1.3957983 1.6899264 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:True - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5862.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.stride : int[] = prim::Constant[value=[3, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[ 1.948316 1.0134094 1.2766707 ... 1.538055 1.0618007 0.99848753] [ 1.948316 1.0134094 3.7960854 ... 0.7066079 1.2900671 0.8829755 ] [ 1.3133467 1.1038951 3.7960854 ... 1.2454549 1.426621 0.87690264] ... [ 0.975953 2.0299008 0.963572 ... 0.10104088 1.66676 1.1701965 ] [-0.11654006 1.1016461 0.9592264 ... 0.79055345 1.66676 0.7486447 ] [-0.11654006 1.8707218 0.9592264 ... 1.57783 0.93329006 1.1670197 ]] [[ 1.6884118 1.1573611 1.6156619 ... 1.6274185 0.2351969 1.8647861 ] [ 0.70489377 1.1573611 1.6156619 ... 1.8302352 2.0714653 1.5023619 ] [ 0.22157218 1.6425064 1.4261222 ... 1.8302352 2.0714653 1.5023619 ] ... [ 1.160748 1.7173251 0.3589817 ... 1.2357035 1.3225771 0.7031369 ] [ 1.6300117 0.77967775 0.14991072 ... 0.9754782 1.9999956 1.6658823 ] [ 1.6300117 0.77967775 1.1784095 ... 0.41798455 1.9999956 1.6658823 ]] [[ 1.3146124 1.876867 0.35163707 ... 0.86176944 1.9037707 1.3917648 ] [ 0.71406996 1.6105921 1.6458757 ... 1.4632393 1.9037707 1.2837396 ] [ 0.49128062 1.6105921 3.0221808 ... 1.4632393 1.3493094 1.0334717 ] ... [ 2.0011878 1.9991301 1.1746264 ... 1.8098258 2.476632 0.94910914] [ 1.2939633 0.5374989 1.1746264 ... 0.86023337 2.476632 0.94910914] [ 1.2939633 1.1564845 1.5369235 ... 2.938138 0.9558303 0.9749727 ]] [[ 1.1369766 1.1815801 2.260895 ... 1.5247724 1.2263556 0.814037 ] [ 1.3180945 0.5799918 1.0325395 ... 0.33933103 1.2263556 1.2704201 ] [ 1.3180945 0.22706035 0.8477083 ... 0.48048756 0.7552646 1.2704201 ] ... [ 1.2966748 0.9839701 1.2574259 ... 1.2421117 0.68228054 0.7710751 ] [ 1.037096 1.1002989 1.2574259 ... 0.9109932 0.68228054 0.9758038 ] [ 1.3208869 1.1002989 1.800384 ... 1.8476182 0.04634674 0.9758038 ]] [[ 1.3867049 1.8220791 1.7328527 ... 0.7867781 1.6574608 1.533213 ] [ 1.3867049 0.7655092 1.4870477 ... 0.7867781 1.6574608 1.7458344 ] [-0.30012158 0.422043 0.18827322 ... 1.1870359 1.0499352 1.7458344 ] ... [ 0.6852771 1.2552881 0.51939285 ... 1.8806111 1.3738774 0.9086196 ] [ 0.56934416 1.8436402 0.9272288 ... 1.8806111 1.9699396 0.7371374 ] [ 1.3199015 1.8436402 0.9272288 ... 1.1489235 1.9699396 1.5870991 ]]] [[[ 1.4193121 0.9441162 0.87364393 ... 1.7478017 0.93364793 1.3328812 ] [ 1.104924 1.8388586 1.0431582 ... 1.7478017 0.3403707 0.4483963 ] [ 1.033786 1.8388586 2.3630803 ... 1.3868787 0.93099105 1.1043016 ] ... [ 1.9958181 2.350668 0.43206725 ... 1.385716 0.8807363 1.5530931 ] [ 0.73042655 2.350668 -0.5833924 ... 1.5560625 2.4513378 1.5530931 ] [ 0.9101751 1.1548429 1.8127798 ... 1.5560625 2.4513378 1.4010309 ]] [[ 1.3008724 1.5595704 4.3273673 ... 2.3660958 1.2355881 0.32954395] [ 1.028408 1.5595704 4.3273673 ... 0.8070273 1.2355881 1.041387 ] [ 0.5612487 0.9154131 1.9687331 ... 1.3438615 0.7200043 1.175642 ] ... [ 0.18056682 1.6879083 1.1980697 ... 0.70491135 3.1114445 1.5714138 ] [ 0.5533722 1.6879083 0.5048365 ... 0.70491135 3.1114445 1.5714138 ] [ 0.5533722 1.5635136 1.300917 ... 1.3048754 1.540083 1.6890116 ]] [[ 0.586908 2.1906753 1.1957016 ... 0.7475629 1.808943 1.1814462 ] [ 0.586908 1.2130936 2.0329518 ... 0.42121032 1.808943 1.076542 ] [ 1.7592298 1.232282 2.0329518 ... 1.1714797 1.3892437 0.99624985] ... [ 2.1737452 0.8422882 1.4258629 ... 1.1666986 1.3299909 1.6892316 ] [ 2.1737452 0.6888005 1.4258629 ... 0.8834163 1.3299909 1.6892316 ] [ 0.75466794 0.3527227 1.0032427 ... 1.2173455 1.4523282 0.8367297 ]] [[ 0.53118485 0.9803869 2.0676239 ... 0.7396171 1.9663194 0.21474878] [ 2.1840286 0.8154929 2.0676239 ... 0.7396171 1.9663194 0.40015918] [ 2.1840286 1.1456733 0.7616963 ... 0.6155617 0.8071222 1.0047171 ] ... [ 1.7518977 1.0940121 0.88662267 ... 1.6192951 0.8649722 1.9943266 ] [ 1.7518977 1.0940121 0.88662267 ... 1.2264284 0.8649722 0.42044032] [ 0.8834944 1.6282867 0.6449133 ... 1.2264284 0.9635294 0.42044032]] [[ 1.1051084 1.6739448 1.4508289 ... 0.51617247 1.2066945 2.770117 ] [ 1.0006899 1.6739448 0.22491069 ... 1.4566123 1.2066945 2.770117 ] [ 1.0006899 0.97880644 1.3179759 ... 1.4566123 0.32798526 0.58597517] ... [ 2.2980716 0.81853735 1.674015 ... 1.6699858 1.6309159 1.1459751 ] [ 2.9179513 1.3945845 1.674015 ... 0.6452321 1.2352844 0.9992335 ] [ 2.9179513 1.3945845 0.22704515 ... 0.1798775 1.2352844 0.29746667]]] [[[ 0.5377342 1.4948559 1.6927454 ... 1.9607776 1.5642532 1.1992408 ] [ 0.5377342 1.4948559 1.6927454 ... 1.0354152 1.5642532 1.188067 ] [ 0.11230481 0.5480891 -0.12938635 ... 1.0354152 1.8368214 1.188067 ] ... [ 2.1244214 0.8694554 2.2519298 ... 1.5354838 1.2021451 0.7075347 ] [ 1.5354589 1.9141555 2.0174947 ... 1.5354838 1.3791716 0.7075347 ] [ 1.1790335 1.9141555 2.0174947 ... 1.0272328 1.7573758 0.6203712 ]] [[ 1.0432827 1.6992846 1.1271988 ... 1.1676383 1.0530567 0.40149456] [ 1.8335172 1.2768742 -0.18820515 ... 0.23548631 0.9265967 0.40149456] [ 1.8335172 1.1061655 1.5959351 ... 0.23548631 1.4598714 1.1596125 ] ... [ 1.4744581 1.4998368 1.4713637 ... 1.3369236 0.9074756 0.5582785 ] [ 1.4744581 1.4037386 1.4713637 ... 1.3369236 0.9301593 1.6618108 ] [ 0.4440765 0.95460105 1.6543697 ... 1.8462468 1.495434 1.6618108 ]] [[ 1.375569 1.251001 0.657484 ... 1.545707 1.5179454 1.6266297 ] [ 0.9651064 0.9972842 1.3125224 ... 1.545707 1.8711959 1.6266297 ] [ 0.9413329 2.171485 1.3125224 ... 0.4175458 1.8711959 1.1903771 ] ... [ 0.15904547 2.132812 0.8537729 ... 1.3889755 1.0502882 2.3458314 ] [ 0.5472819 2.2918167 1.2712293 ... 1.3889755 0.66144836 2.3458314 ] [ 1.2678363 2.2918167 1.2712293 ... 1.6452324 0.69896126 0.7402592 ]] [[ 1.3069422 1.6521087 0.6526574 ... 2.5073502 2.0690963 1.5421798 ] [ 1.9852505 2.4159582 0.3528165 ... 2.5073502 0.47657555 1.5421798 ] [ 1.9852505 2.4159582 0.7394498 ... 1.2098168 0.4347191 0.39344415] ... [ 1.6338227 0.13426584 1.7490495 ... 0.7695526 0.29274008 1.858614 ] [ 1.6338227 1.3221878 1.7490495 ... 0.52742684 1.3125702 1.858614 ] [ 2.007163 1.3221878 1.338194 ... 1.0746895 1.3125702 0.92504287]] [[ 1.5455701 1.5982953 1.3291641 ... 1.7167207 1.1807301 1.8612887 ] [ 1.1564722 1.5982953 1.3291641 ... 1.7167207 1.1807301 1.8612887 ] [ 1.5442352 1.0971713 2.2132096 ... 1.4382701 1.0923792 1.2081435 ] ... [ 0.7588302 0.5932885 1.1188978 ... 1.5181692 1.130585 0.41356394] [ 0.8729194 0.4911748 1.1188978 ... 0.9845224 1.130585 1.572213 ] [ 0.8729194 0.4911748 2.0847206 ... 1.4377153 1.0547171 1.572213 ]]]]]; ov_res: [[[[[ 1.948316 1.0134094 1.2766707 ... 1.538055 1.0618007 0.99848753] [ 1.948316 1.0134094 3.7960854 ... 0.7066079 1.2900671 0.8829755 ] [ 1.3133467 1.1038951 3.7960854 ... 1.2454549 1.426621 0.87690264] ... [ 0.975953 2.0299008 0.963572 ... 0.10104088 1.66676 1.1701965 ] [-0.11654006 1.1016461 0.9592264 ... 0.79055345 1.66676 0.7486447 ] [-0.11654006 1.8707218 0.9592264 ... 1.57783 0.93329006 1.1670197 ]] [[ 1.6884118 1.1573611 1.6156619 ... 1.6274185 0.2351969 1.8647861 ] [ 0.70489377 1.1573611 1.6156619 ... 1.8302352 2.0714653 1.5023619 ] [ 0.22157218 1.6425064 1.4261222 ... 1.8302352 2.0714653 1.5023619 ] ... [ 1.160748 1.7173251 0.3589817 ... 1.2357035 1.3225771 0.7031369 ] [ 1.6300117 0.77967775 0.14991072 ... 0.9754782 1.9999956 1.6658823 ] [ 1.6300117 0.77967775 1.1784095 ... 0.41798455 1.9999956 1.6658823 ]] [[ 1.3146124 1.876867 0.35163707 ... 0.86176944 1.9037707 1.3917648 ] [ 0.71406996 1.6105921 1.6458757 ... 1.4632393 1.9037707 1.2837396 ] [ 0.49128062 1.6105921 3.0221808 ... 1.4632393 1.3493094 1.0334717 ] ... [ 2.0011878 1.9991301 1.1746264 ... 1.8098258 2.476632 0.94910914] [ 1.2939633 0.5374989 1.1746264 ... 0.86023337 2.476632 0.94910914] [ 1.2939633 1.1564845 1.5369235 ... 2.938138 0.9558303 0.9749727 ]] [[ 1.1369766 1.1815801 2.260895 ... 1.5247724 1.2263556 0.814037 ] [ 1.3180945 0.5799918 1.0325395 ... 0.33933103 1.2263556 1.2704201 ] [ 1.3180945 0.22706035 0.8477083 ... 0.48048756 0.7552646 1.2704201 ] ... [ 1.2966748 0.9839701 1.2574259 ... 1.2421117 0.68228054 0.7710751 ] [ 1.037096 1.1002989 1.2574259 ... 0.9109932 0.68228054 0.9758038 ] [ 1.3208869 1.1002989 1.800384 ... 1.8476182 0.04634674 0.9758038 ]] [[ 1.3867049 1.8220791 1.7328527 ... 0.7867781 1.6574608 1.533213 ] [ 1.3867049 0.7655092 1.4870477 ... 0.7867781 1.6574608 1.7458344 ] [-0.30012158 0.422043 0.18827322 ... 1.1870359 1.0499352 1.7458344 ] ... [ 0.6852771 1.2552881 0.51939285 ... 1.8806111 1.3738774 0.9086196 ] [ 0.56934416 1.8436402 0.9272288 ... 1.8806111 1.9699396 0.7371374 ] [ 1.3199015 1.8436402 0.9272288 ... 1.1489235 1.9699396 1.5870991 ]]] [[[ 1.4193121 0.9441162 0.87364393 ... 1.7478017 0.93364793 1.3328812 ] [ 1.104924 1.8388586 1.0431582 ... 1.7478017 0.3403707 0.4483963 ] [ 1.033786 1.8388586 2.3630803 ... 1.3868787 0.93099105 1.1043016 ] ... [ 1.9958181 2.350668 0.43206725 ... 1.385716 0.8807363 1.5530931 ] [ 0.73042655 2.350668 -0.5833924 ... 1.5560625 2.4513378 1.5530931 ] [ 0.9101751 1.1548429 1.8127798 ... 1.5560625 2.4513378 1.4010309 ]] [[ 1.3008724 1.5595704 4.3273673 ... 2.3660958 1.2355881 0.32954395] [ 1.028408 1.5595704 4.3273673 ... 0.8070273 1.2355881 1.041387 ] [ 0.5612487 0.9154131 1.9687331 ... 1.3438615 0.7200043 1.175642 ] ... [ 0.18056682 1.6879083 1.1980697 ... 0.70491135 3.1114445 1.5714138 ] [ 0.5533722 1.6879083 0.5048365 ... 0.70491135 3.1114445 1.5714138 ] [ 0.5533722 1.5635136 1.300917 ... 1.3048754 1.540083 1.6890116 ]] [[ 0.586908 2.1906753 1.1957016 ... 0.7475629 1.808943 1.1814462 ] [ 0.586908 1.2130936 2.0329518 ... 0.42121032 1.808943 1.076542 ] [ 1.7592298 1.232282 2.0329518 ... 1.1714797 1.3892437 0.99624985] ... [ 2.1737452 0.8422882 1.4258629 ... 1.1666986 1.3299909 1.6892316 ] [ 2.1737452 0.6888005 1.4258629 ... 0.8834163 1.3299909 1.6892316 ] [ 0.75466794 0.3527227 1.0032427 ... 1.2173455 1.4523282 0.8367297 ]] [[ 0.53118485 0.9803869 2.0676239 ... 0.7396171 1.9663194 0.21474878] [ 2.1840286 0.8154929 2.0676239 ... 0.7396171 1.9663194 0.40015918] [ 2.1840286 1.1456733 0.7616963 ... 0.6155617 0.8071222 1.0047171 ] ... [ 1.7518977 1.0940121 0.88662267 ... 1.6192951 0.8649722 1.9943266 ] [ 1.7518977 1.0940121 0.88662267 ... 1.2264284 0.8649722 0.42044032] [ 0.8834944 1.6282867 0.6449133 ... 1.2264284 0.9635294 0.42044032]] [[ 1.1051084 1.6739448 1.4508289 ... 0.51617247 1.2066945 2.770117 ] [ 1.0006899 1.6739448 0.22491069 ... 1.4566123 1.2066945 2.770117 ] [ 1.0006899 0.97880644 1.3179759 ... 1.4566123 0.32798526 0.58597517] ... [ 2.2980716 0.81853735 1.674015 ... 1.6699858 1.6309159 1.1459751 ] [ 2.9179513 1.3945845 1.674015 ... 0.6452321 1.2352844 0.9992335 ] [ 2.9179513 1.3945845 0.22704515 ... 0.1798775 1.2352844 0.29746667]]] [[[ 0.5377342 1.4948559 1.6927454 ... 1.9607776 1.5642532 1.1992408 ] [ 0.5377342 1.4948559 1.6927454 ... 1.0354152 1.5642532 1.188067 ] [ 0.11230481 0.5480891 -0.12938635 ... 1.0354152 1.8368214 1.188067 ] ... [ 2.1244214 0.8694554 2.2519298 ... 1.5354838 1.2021451 0.7075347 ] [ 1.5354589 1.9141555 2.0174947 ... 1.5354838 1.3791716 0.7075347 ] [ 1.1790335 1.9141555 2.0174947 ... 1.0272328 1.7573758 0.6203712 ]] [[ 1.0432827 1.6992846 1.1271988 ... 1.1676383 1.0530567 0.40149456] [ 1.8335172 1.2768742 -0.18820515 ... 0.23548631 0.9265967 0.40149456] [ 1.8335172 1.1061655 1.5959351 ... 0.23548631 1.4598714 1.1596125 ] ... [ 1.4744581 1.4998368 1.4713637 ... 1.3369236 0.9074756 0.5582785 ] [ 1.4744581 1.4037386 1.4713637 ... 1.3369236 0.9301593 1.6618108 ] [ 0.4440765 0.95460105 1.6543697 ... 1.8462468 1.495434 1.6618108 ]] [[ 1.375569 1.251001 0.657484 ... 1.545707 1.5179454 1.6266297 ] [ 0.9651064 0.9972842 1.3125224 ... 1.545707 1.8711959 1.6266297 ] [ 0.9413329 2.171485 1.3125224 ... 0.4175458 1.8711959 1.1903771 ] ... [ 0.15904547 2.132812 0.8537729 ... 1.3889755 1.0502882 2.3458314 ] [ 0.5472819 2.2918167 1.2712293 ... 1.3889755 0.66144836 2.3458314 ] [ 1.2678363 2.2918167 1.2712293 ... 1.6452324 0.69896126 0.7402592 ]] [[ 1.3069422 1.6521087 0.6526574 ... 2.5073502 2.0690963 1.5421798 ] [ 1.9852505 2.4159582 0.3528165 ... 2.5073502 0.47657555 1.5421798 ] [ 1.9852505 2.4159582 0.7394498 ... 1.2098168 0.4347191 0.39344415] ... [ 1.6338227 0.13426584 1.7490495 ... 0.7695526 0.29274008 1.858614 ] [ 1.6338227 1.3221878 1.7490495 ... 0.52742684 1.3125702 1.858614 ] [ 2.007163 1.3221878 1.338194 ... 1.0746895 1.3125702 0.92504287]] [[ 1.5455701 1.5982953 1.3291641 ... 1.7167207 1.1807301 1.8612887 ] [ 1.1564722 1.5982953 1.3291641 ... 1.7167207 1.1807301 1.8612887 ] [ 1.5442352 1.0971713 2.2132096 ... 1.4382701 1.0923792 1.2081435 ] ... [ 0.7588302 0.5932885 1.1188978 ... 1.5181692 1.130585 0.41356394] [ 0.8729194 0.4911748 1.1188978 ... 0.9845224 1.130585 1.572213 ] [ 0.8729194 0.4911748 2.0847206 ... 1.4377153 1.0547171 1.572213 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5864.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 0, 0]]() %3 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %3, %2, %3, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[1.8004663 1.8004663 1.1040591 ... 1.1463127 2.4347227 2.4347227 ] [1.8004663 1.8004663 1.6322079 ... 1.1463127 2.4347227 2.4347227 ] [1.6322079 1.6322079 1.6322079 ... 1.131033 1.1575804 1.1575804 ] ... [1.517844 2.6426861 2.6426861 ... 1.7842284 2.470748 2.470748 ] [1.517844 2.6426861 2.6426861 ... 1.3783025 2.470748 2.470748 ] [1.8306935 2.6426861 2.6426861 ... 1.3783025 2.470748 2.470748 ]] [[1.8004663 1.8004663 1.1040591 ... 1.3077934 2.4347227 2.4347227 ] [1.8004663 1.8004663 1.2038383 ... 1.3077934 2.4347227 2.4347227 ] [1.0734437 1.2038383 1.2972594 ... 1.3066736 1.3066736 1.6791211 ] ... [1.517844 2.400869 2.400869 ... 1.1078552 2.470748 3.301705 ] [1.517844 2.400869 2.400869 ... 1.0675557 2.470748 3.301705 ] [1.386886 2.400869 2.400869 ... 1.0214245 2.470748 2.470748 ]] [[1.8004663 1.958269 1.958269 ... 1.3077934 0.95667595 0.95667595] [1.8004663 1.8834894 1.8834894 ... 1.3077934 1.1744213 1.6791211 ] [1.6021911 1.6021911 1.2757937 ... 1.3066736 1.3066736 1.6791211 ] ... [1.386535 2.400869 2.400869 ... 1.1078552 1.1078552 3.301705 ] [1.386535 2.400869 2.400869 ... 2.8428233 1.8746157 3.301705 ] [1.386886 2.400869 2.400869 ... 2.8428233 1.8746157 3.0458047 ]] ... [[1.8317785 1.8317785 2.158195 ... 1.9854624 1.9854624 1.886226 ] [1.5061672 1.5061672 2.158195 ... 2.5701523 2.5701523 1.886226 ] [2.0308568 1.5402691 2.158195 ... 3.1779187 3.1779187 1.886226 ] ... [2.0752964 2.0752964 2.0752964 ... 2.3874564 1.5764538 1.8051355 ] [2.0752964 2.0752964 2.0752964 ... 2.3874564 1.9911346 4.0229416 ] [2.2727382 2.2727382 2.2727382 ... 1.9911346 1.9911346 4.0229416 ]] [[2.2166114 2.2166114 2.158195 ... 1.9854624 1.9854624 1.9759197 ] [1.5061672 1.5061672 2.158195 ... 2.5701523 2.5701523 2.4258826 ] [2.0308568 1.3702568 2.158195 ... 3.1779187 3.1779187 2.4258826 ] ... [1.8461533 2.6071916 2.6071916 ... 1.5764538 1.9659437 1.9659437 ] [1.8461533 1.8461533 1.8461533 ... 1.9911346 1.9911346 1.9911346 ] [1.8461533 1.8461533 1.8461533 ... 1.9911346 1.9911346 1.9911346 ]] [[2.2755296 2.2755296 2.158195 ... 2.6642625 2.6642625 2.6642625 ] [1.579624 1.579624 2.158195 ... 2.6642625 2.6642625 2.6642625 ] [2.0308568 1.579624 2.158195 ... 2.6642625 2.6642625 2.6642625 ] ... [1.9967746 2.6071916 2.6071916 ... 1.3881711 1.9659437 1.9659437 ] [1.8461533 1.8461533 1.8461533 ... 1.5596838 1.5596838 1.8051355 ] [1.8461533 1.8461533 1.8461533 ... 1.5596838 1.5596838 1.8051355 ]]] [[[1.6287253 1.7230041 1.7230041 ... 2.0508575 2.0508575 1.7221086 ] [1.317508 1.7230041 1.7230041 ... 1.7221086 1.7221086 1.7221086 ] [1.7747711 1.7747711 1.7230041 ... 1.6919014 1.6652436 1.6652436 ] ... [1.5589314 1.5589314 1.2438185 ... 1.7255381 1.7255381 2.0451725 ] [1.5999234 1.5999234 1.1295036 ... 2.191894 2.191894 2.191894 ] [1.5999234 1.5999234 1.5722125 ... 2.191894 2.191894 2.191894 ]] [[1.763548 1.7230041 1.7230041 ... 2.0508575 2.0508575 1.4340413 ] [1.763548 1.7230041 3.2505956 ... 1.6919014 1.6652436 1.6652436 ] [1.7747711 1.7747711 3.2505956 ... 1.9281049 1.9281049 1.9281049 ] ... [2.1523793 2.1523793 1.8712938 ... 1.7255381 1.7255381 2.0451725 ] [2.1523793 2.1523793 1.8712938 ... 2.191894 2.191894 2.191894 ] [1.5999234 1.5999234 1.8712938 ... 2.191894 2.191894 2.191894 ]] [[1.763548 1.7230041 1.7230041 ... 2.4241486 2.4241486 2.0511801 ] [1.763548 1.7230041 3.2505956 ... 1.6919014 1.6652436 1.6652436 ] [1.3782918 1.7230041 3.2505956 ... 1.9281049 1.9281049 1.9281049 ] ... [3.7266116 3.7266116 3.7266116 ... 1.3759421 1.3759421 2.2670772 ] [3.7266116 3.7266116 3.7266116 ... 2.191894 2.191894 2.2670772 ] [1.9414926 1.5450534 2.2880926 ... 2.191894 2.191894 2.191894 ]] ... [[1.9455988 1.9322085 1.9322085 ... 1.9732616 1.6844314 1.3080735 ] [1.9455988 1.9322085 1.9322085 ... 1.9732616 1.6844314 1.3080735 ] [1.9322085 1.9322085 1.9322085 ... 1.6844314 1.6844314 1.0507042 ] ... [2.2706227 2.3363035 2.3363035 ... 1.7469229 1.7469229 1.7469229 ] [2.2706227 2.3363035 2.3363035 ... 2.343854 2.343854 2.343854 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ]] [[1.9455988 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] [1.9455988 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] [1.9322085 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] ... [2.2706227 1.8821051 1.9474149 ... 1.6075816 1.5248705 2.0415726 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ]] [[1.9455988 1.8369957 1.6573199 ... 1.3876553 1.7857867 1.7857867 ] [1.9455988 1.8369957 1.5448318 ... 1.4396076 1.7857867 1.7857867 ] [1.8369957 1.8369957 1.8302474 ... 1.4396076 1.7857867 1.7857867 ] ... [2.2706227 1.6538566 1.7378435 ... 1.6075816 1.5248705 2.0415726 ] [2.2706227 1.9440876 1.5320152 ... 1.5188912 1.5188912 2.0415726 ] [2.2706227 2.0027473 2.0027473 ... 1.5188912 1.5188912 2.0882285 ]]] [[[2.276221 2.276221 2.276221 ... 1.7912824 1.3803943 1.3803943 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] ... [2.1582897 2.1582897 2.1582897 ... 2.3887928 2.3887928 1.9868063 ] [2.1582897 2.1582897 2.1582897 ... 1.6692332 1.9868063 2.7160366 ] [1.7629327 1.7629327 1.3186121 ... 1.7528862 1.9868063 2.7160366 ]] [[2.276221 2.276221 2.276221 ... 1.7912824 1.0522583 1.0522583 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] ... [2.1582897 2.3449798 2.3449798 ... 2.3887928 2.3887928 1.9868063 ] [2.1582897 2.3449798 2.3449798 ... 1.6692332 1.9868063 2.7160366 ] [1.9542159 1.9542159 1.9542159 ... 1.7528862 1.9868063 2.7160366 ]] [[1.3498839 1.6706089 1.6706089 ... 1.3618484 1.3618484 1.3618484 ] [2.055996 1.9559814 1.9559814 ... 1.3618484 1.3618484 1.4358143 ] [2.055996 1.9559814 1.9559814 ... 1.3618484 1.3618484 1.4358143 ] ... [2.4456775 2.4456775 2.3449798 ... 1.6692332 1.6692332 2.1499553 ] [1.9542159 2.3449798 2.3449798 ... 1.6692332 1.6692332 2.1499553 ] [1.9542159 1.9542159 1.9542159 ... 1.3745282 1.8213798 2.1499553 ]] ... [[3.1085138 1.2776657 1.6003536 ... 1.5638807 1.5638807 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.5638807 1.5638807 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] ... [2.4061513 2.4061513 2.0147104 ... 1.1707131 1.1707131 1.4041305 ] [2.0147104 2.0147104 2.0147104 ... 1.2526792 1.2526792 1.2526792 ] [1.2774601 1.1162436 1.7556486 ... 1.4761286 1.4761286 1.2526792 ]] [[1.3588084 1.8881387 1.8881387 ... 1.5328707 1.2649726 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] ... [2.4061513 2.4061513 1.8165022 ... 2.421994 2.421994 2.421994 ] [1.7311916 1.7311916 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.2774601 1.1162436 1.7556486 ... 2.421994 2.421994 2.421994 ]] [[1.8229809 1.8881387 1.8881387 ... 1.5328707 1.1530282 1.995237 ] [2.2053168 2.2053168 2.2053168 ... 1.1530282 1.1530282 1.995237 ] [2.8493829 2.8493829 2.8493829 ... 2.107168 2.107168 1.995237 ] ... [1.8539737 1.7230357 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.7230357 1.7230357 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.3320429 1.3320429 1.7556486 ... 2.421994 2.421994 2.421994 ]]]]]; ov_res: [[[[[1.8004663 1.8004663 1.1040591 ... 1.1463127 2.4347227 2.4347227 ] [1.8004663 1.8004663 1.6322079 ... 1.1463127 2.4347227 2.4347227 ] [1.6322079 1.6322079 1.6322079 ... 1.131033 1.1575804 1.1575804 ] ... [1.517844 2.6426861 2.6426861 ... 1.7842284 2.470748 2.470748 ] [1.517844 2.6426861 2.6426861 ... 1.3783025 2.470748 2.470748 ] [1.8306935 2.6426861 2.6426861 ... 1.3783025 2.470748 2.470748 ]] [[1.8004663 1.8004663 1.1040591 ... 1.3077934 2.4347227 2.4347227 ] [1.8004663 1.8004663 1.2038383 ... 1.3077934 2.4347227 2.4347227 ] [1.0734437 1.2038383 1.2972594 ... 1.3066736 1.3066736 1.6791211 ] ... [1.517844 2.400869 2.400869 ... 1.1078552 2.470748 3.301705 ] [1.517844 2.400869 2.400869 ... 1.0675557 2.470748 3.301705 ] [1.386886 2.400869 2.400869 ... 1.0214245 2.470748 2.470748 ]] [[1.8004663 1.958269 1.958269 ... 1.3077934 0.95667595 0.95667595] [1.8004663 1.8834894 1.8834894 ... 1.3077934 1.1744213 1.6791211 ] [1.6021911 1.6021911 1.2757937 ... 1.3066736 1.3066736 1.6791211 ] ... [1.386535 2.400869 2.400869 ... 1.1078552 1.1078552 3.301705 ] [1.386535 2.400869 2.400869 ... 2.8428233 1.8746157 3.301705 ] [1.386886 2.400869 2.400869 ... 2.8428233 1.8746157 3.0458047 ]] ... [[1.8317785 1.8317785 2.158195 ... 1.9854624 1.9854624 1.886226 ] [1.5061672 1.5061672 2.158195 ... 2.5701523 2.5701523 1.886226 ] [2.0308568 1.5402691 2.158195 ... 3.1779187 3.1779187 1.886226 ] ... [2.0752964 2.0752964 2.0752964 ... 2.3874564 1.5764538 1.8051355 ] [2.0752964 2.0752964 2.0752964 ... 2.3874564 1.9911346 4.0229416 ] [2.2727382 2.2727382 2.2727382 ... 1.9911346 1.9911346 4.0229416 ]] [[2.2166114 2.2166114 2.158195 ... 1.9854624 1.9854624 1.9759197 ] [1.5061672 1.5061672 2.158195 ... 2.5701523 2.5701523 2.4258826 ] [2.0308568 1.3702568 2.158195 ... 3.1779187 3.1779187 2.4258826 ] ... [1.8461533 2.6071916 2.6071916 ... 1.5764538 1.9659437 1.9659437 ] [1.8461533 1.8461533 1.8461533 ... 1.9911346 1.9911346 1.9911346 ] [1.8461533 1.8461533 1.8461533 ... 1.9911346 1.9911346 1.9911346 ]] [[2.2755296 2.2755296 2.158195 ... 2.6642625 2.6642625 2.6642625 ] [1.579624 1.579624 2.158195 ... 2.6642625 2.6642625 2.6642625 ] [2.0308568 1.579624 2.158195 ... 2.6642625 2.6642625 2.6642625 ] ... [1.9967746 2.6071916 2.6071916 ... 1.3881711 1.9659437 1.9659437 ] [1.8461533 1.8461533 1.8461533 ... 1.5596838 1.5596838 1.8051355 ] [1.8461533 1.8461533 1.8461533 ... 1.5596838 1.5596838 1.8051355 ]]] [[[1.6287253 1.7230041 1.7230041 ... 2.0508575 2.0508575 1.7221086 ] [1.317508 1.7230041 1.7230041 ... 1.7221086 1.7221086 1.7221086 ] [1.7747711 1.7747711 1.7230041 ... 1.6919014 1.6652436 1.6652436 ] ... [1.5589314 1.5589314 1.2438185 ... 1.7255381 1.7255381 2.0451725 ] [1.5999234 1.5999234 1.1295036 ... 2.191894 2.191894 2.191894 ] [1.5999234 1.5999234 1.5722125 ... 2.191894 2.191894 2.191894 ]] [[1.763548 1.7230041 1.7230041 ... 2.0508575 2.0508575 1.4340413 ] [1.763548 1.7230041 3.2505956 ... 1.6919014 1.6652436 1.6652436 ] [1.7747711 1.7747711 3.2505956 ... 1.9281049 1.9281049 1.9281049 ] ... [2.1523793 2.1523793 1.8712938 ... 1.7255381 1.7255381 2.0451725 ] [2.1523793 2.1523793 1.8712938 ... 2.191894 2.191894 2.191894 ] [1.5999234 1.5999234 1.8712938 ... 2.191894 2.191894 2.191894 ]] [[1.763548 1.7230041 1.7230041 ... 2.4241486 2.4241486 2.0511801 ] [1.763548 1.7230041 3.2505956 ... 1.6919014 1.6652436 1.6652436 ] [1.3782918 1.7230041 3.2505956 ... 1.9281049 1.9281049 1.9281049 ] ... [3.7266116 3.7266116 3.7266116 ... 1.3759421 1.3759421 2.2670772 ] [3.7266116 3.7266116 3.7266116 ... 2.191894 2.191894 2.2670772 ] [1.9414926 1.5450534 2.2880926 ... 2.191894 2.191894 2.191894 ]] ... [[1.9455988 1.9322085 1.9322085 ... 1.9732616 1.6844314 1.3080735 ] [1.9455988 1.9322085 1.9322085 ... 1.9732616 1.6844314 1.3080735 ] [1.9322085 1.9322085 1.9322085 ... 1.6844314 1.6844314 1.0507042 ] ... [2.2706227 2.3363035 2.3363035 ... 1.7469229 1.7469229 1.7469229 ] [2.2706227 2.3363035 2.3363035 ... 2.343854 2.343854 2.343854 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ]] [[1.9455988 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] [1.9455988 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] [1.9322085 1.9322085 1.9322085 ... 1.1913803 1.7857867 1.7857867 ] ... [2.2706227 1.8821051 1.9474149 ... 1.6075816 1.5248705 2.0415726 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ] [2.2706227 1.8445891 1.8445891 ... 2.343854 2.343854 2.343854 ]] [[1.9455988 1.8369957 1.6573199 ... 1.3876553 1.7857867 1.7857867 ] [1.9455988 1.8369957 1.5448318 ... 1.4396076 1.7857867 1.7857867 ] [1.8369957 1.8369957 1.8302474 ... 1.4396076 1.7857867 1.7857867 ] ... [2.2706227 1.6538566 1.7378435 ... 1.6075816 1.5248705 2.0415726 ] [2.2706227 1.9440876 1.5320152 ... 1.5188912 1.5188912 2.0415726 ] [2.2706227 2.0027473 2.0027473 ... 1.5188912 1.5188912 2.0882285 ]]] [[[2.276221 2.276221 2.276221 ... 1.7912824 1.3803943 1.3803943 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] ... [2.1582897 2.1582897 2.1582897 ... 2.3887928 2.3887928 1.9868063 ] [2.1582897 2.1582897 2.1582897 ... 1.6692332 1.9868063 2.7160366 ] [1.7629327 1.7629327 1.3186121 ... 1.7528862 1.9868063 2.7160366 ]] [[2.276221 2.276221 2.276221 ... 1.7912824 1.0522583 1.0522583 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] [2.055996 1.9559814 1.9559814 ... 2.0563102 2.252208 2.252208 ] ... [2.1582897 2.3449798 2.3449798 ... 2.3887928 2.3887928 1.9868063 ] [2.1582897 2.3449798 2.3449798 ... 1.6692332 1.9868063 2.7160366 ] [1.9542159 1.9542159 1.9542159 ... 1.7528862 1.9868063 2.7160366 ]] [[1.3498839 1.6706089 1.6706089 ... 1.3618484 1.3618484 1.3618484 ] [2.055996 1.9559814 1.9559814 ... 1.3618484 1.3618484 1.4358143 ] [2.055996 1.9559814 1.9559814 ... 1.3618484 1.3618484 1.4358143 ] ... [2.4456775 2.4456775 2.3449798 ... 1.6692332 1.6692332 2.1499553 ] [1.9542159 2.3449798 2.3449798 ... 1.6692332 1.6692332 2.1499553 ] [1.9542159 1.9542159 1.9542159 ... 1.3745282 1.8213798 2.1499553 ]] ... [[3.1085138 1.2776657 1.6003536 ... 1.5638807 1.5638807 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.5638807 1.5638807 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] ... [2.4061513 2.4061513 2.0147104 ... 1.1707131 1.1707131 1.4041305 ] [2.0147104 2.0147104 2.0147104 ... 1.2526792 1.2526792 1.2526792 ] [1.2774601 1.1162436 1.7556486 ... 1.4761286 1.4761286 1.2526792 ]] [[1.3588084 1.8881387 1.8881387 ... 1.5328707 1.2649726 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] [2.2053168 2.2053168 2.2053168 ... 1.2649726 1.2649726 1.2649726 ] ... [2.4061513 2.4061513 1.8165022 ... 2.421994 2.421994 2.421994 ] [1.7311916 1.7311916 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.2774601 1.1162436 1.7556486 ... 2.421994 2.421994 2.421994 ]] [[1.8229809 1.8881387 1.8881387 ... 1.5328707 1.1530282 1.995237 ] [2.2053168 2.2053168 2.2053168 ... 1.1530282 1.1530282 1.995237 ] [2.8493829 2.8493829 2.8493829 ... 2.107168 2.107168 1.995237 ] ... [1.8539737 1.7230357 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.7230357 1.7230357 1.7556486 ... 2.421994 2.421994 2.421994 ] [1.3320429 1.3320429 1.7556486 ... 2.421994 2.421994 2.421994 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5866.aten_max_pool3d, %x.1 : Tensor): %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[1, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %5 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.stride, %self.stride, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%5) fw_re: [[[[[1.4079561 1.4079561 1.4079561 ... 2.0634127 2.004914 0.31386462] [1.4079561 1.4079561 1.4079561 ... 2.0634127 2.004914 0.411389 ] [1.4079561 1.792076 1.792076 ... 2.004914 2.004914 1.2320359 ] ... [1.9753255 1.9753255 1.1531802 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ]] [[1.4638007 1.4638007 1.4638007 ... 2.0634127 2.004914 0.3360492 ] [1.4638007 1.4638007 1.4638007 ... 2.0634127 2.004914 0.9581144 ] [1.4079561 1.792076 2.2978635 ... 2.004914 2.004914 1.2320359 ] ... [1.9753255 1.9753255 1.5990038 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ]] [[1.5466666 1.5466666 1.5466666 ... 2.0634127 1.3333254 1.2629551 ] [1.5466666 1.5466666 1.5466666 ... 2.2654274 1.9283252 1.9283252 ] [1.640065 1.640065 2.2978635 ... 2.2654274 1.9283252 1.9283252 ] ... [1.7181373 1.7181373 1.5990038 ... 3.0159059 3.0159059 3.0159059 ] [1.7181373 1.7181373 1.5806533 ... 3.0159059 3.0159059 3.0159059 ] [1.4223132 1.5806533 1.5806533 ... 3.0159059 3.0159059 3.0159059 ]] ... [[0.84731525 0.9401992 0.9401992 ... 2.3563592 2.3563592 2.3563592 ] [0.84731525 1.1966727 1.1966727 ... 2.3563592 2.3563592 2.3563592 ] [0.91898197 1.1966727 1.8667369 ... 2.3563592 2.3563592 2.3563592 ] ... [1.316314 1.316314 1.4537109 ... 2.4482236 2.4482236 1.1036936 ] [1.5981249 1.5981249 1.5981249 ... 2.8175507 2.4482236 1.1036936 ] [1.5981249 1.5981249 1.5981249 ... 2.8175507 1.9651961 1.1036936 ]] [[0.27132127 1.0194877 1.0194877 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1966727 1.1966727 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1966727 1.8667369 ... 2.3563592 2.3563592 2.3563592 ] ... [2.0273042 2.0273042 1.4537109 ... 2.4482236 2.4482236 0.6479012 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 2.4482236 1.1205417 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.9651961 1.1205417 ]] [[0.27132127 1.0194877 1.0194877 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1424581 1.0738078 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1424581 1.0738078 ... 2.3563592 2.3563592 2.3563592 ] ... [2.0273042 2.0273042 1.4537109 ... 1.7288257 1.7288257 0.6479012 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.1209697 1.1205417 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.1205417 1.1205417 ]]] [[[1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.4896551 1.4896551 1.6031064 ... 1.3894378 1.5156671 1.5156671 ] ... [2.2332222 2.2332222 2.2332222 ... 1.9721065 1.9721065 1.2363685 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ]] [[1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.4896551 1.6905329 1.7530513 ... 1.3894378 1.5156671 1.5156671 ] ... [2.2332222 2.2332222 2.2332222 ... 1.9721065 1.9721065 1.2363685 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ]] [[2.018236 2.018236 2.018236 ... 2.1452966 2.1452966 2.1452966 ] [2.018236 2.018236 2.018236 ... 2.1452966 2.1452966 2.1452966 ] [1.0043297 1.6905329 1.7530513 ... 1.3894378 1.830476 1.830476 ] ... [2.2332222 2.2332222 2.2332222 ... 1.7043453 1.7043453 1.1146896 ] [2.3345807 2.3345807 2.3345807 ... 1.1920968 1.1920968 1.1146896 ] [2.3345807 2.3345807 2.3345807 ... 1.1920968 1.1920968 0.9460214 ]] ... [[2.1728504 2.1728504 2.1728504 ... 1.8167772 1.644443 1.644443 ] [2.1728504 2.1728504 2.1728504 ... 1.8167772 1.644443 1.644443 ] [1.33967 1.33967 1.3633335 ... 1.8167772 1.4893984 1.4893984 ] ... [2.014108 2.014108 2.014108 ... 1.6705543 1.7360703 1.7360703 ] [1.9353019 1.9353019 1.9353019 ... 1.6705543 1.7360703 1.7360703 ] [1.9353019 1.9353019 1.9353019 ... 1.6705543 1.6705543 0.79231995]] [[1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.4195983 2.1411471 2.1411471 ... 1.4893984 1.4893984 1.4893984 ] ... [2.014108 2.014108 2.014108 ... 1.5818068 1.7360703 1.7360703 ] [1.8509344 1.8509344 1.8830479 ... 1.5818068 1.7360703 1.7360703 ] [1.6495321 1.6495321 1.8830479 ... 1.1890438 1.0901947 0.79231995]] [[1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.4195983 2.1411471 2.1411471 ... 1.4893984 1.4893984 1.4893984 ] ... [1.8509344 1.8509344 1.8509344 ... 1.1890438 1.7360703 1.7360703 ] [1.8509344 1.8509344 1.8509344 ... 1.1890438 1.7360703 1.7360703 ] [1.6495321 1.6495321 1.4150771 ... 1.1890438 1.0901947 0.72881174]]] [[[0.9156291 0.9156291 2.2269588 ... 1.9389671 1.1967106 1.1967106 ] [0.9156291 0.9156291 2.2269588 ... 1.9389671 2.0871894 2.0871894 ] [1.8758037 2.5950618 2.9636967 ... 1.2120856 2.1399379 2.1399379 ] ... [0.712882 0.712882 1.2114763 ... 1.6477826 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ]] [[1.188261 1.188261 2.2269588 ... 1.9389671 1.5155622 1.5155622 ] [1.4445028 1.4445028 2.2269588 ... 1.9389671 2.0871894 2.0871894 ] [1.8758037 2.5950618 2.9636967 ... 1.9530835 2.1399379 2.1399379 ] ... [0.712882 1.361132 1.361132 ... 2.2234292 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 2.2234292 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ]] [[1.188261 1.188261 2.2269588 ... 1.9389671 1.5155622 1.5155622 ] [1.4445028 1.4445028 2.2269588 ... 1.9389671 1.5480372 1.5480372 ] [1.4445028 1.4445028 2.2269588 ... 1.9530835 2.1399379 2.1399379 ] ... [1.1367681 1.361132 1.6823308 ... 2.5049908 2.5049908 1.3761753 ] [1.7323657 1.7323657 1.6823308 ... 2.2234292 1.9267029 1.9267029 ] [1.7323657 1.7323657 1.4343967 ... 1.9267029 1.9267029 1.9267029 ]] ... [[1.3137894 2.1217802 2.1217802 ... 2.7509346 2.7509346 1.3618759 ] [1.3137894 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] [0.74335146 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 2.1873124 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 2.1873124 ... 2.7348444 2.7348444 2.7348444 ]] [[1.2849513 2.1217802 2.2657442 ... 2.7509346 2.7509346 1.0294875 ] [1.2849513 2.1217802 2.2657442 ... 2.7509346 2.7509346 2.1818268 ] [0.40247947 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 1.1324793 ... 2.7348444 2.7348444 2.7348444 ]] [[1.2849513 1.2849513 2.2657442 ... 2.7509346 2.7509346 0.8531323 ] [1.2849513 1.2849513 2.2657442 ... 2.7509346 2.7509346 2.1818268 ] [0.12920336 0.45782384 1.4586672 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.3325577 1.6849006 1.6849006 ] [1.7670357 1.7670357 1.3043559 ... 2.3325577 2.367289 2.367289 ] [1.7670357 1.7670357 1.1324793 ... 2.3325577 2.367289 2.367289 ]]]]]; ov_res: [[[[[1.4079561 1.4079561 1.4079561 ... 2.0634127 2.004914 0.31386462] [1.4079561 1.4079561 1.4079561 ... 2.0634127 2.004914 0.411389 ] [1.4079561 1.792076 1.792076 ... 2.004914 2.004914 1.2320359 ] ... [1.9753255 1.9753255 1.1531802 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ]] [[1.4638007 1.4638007 1.4638007 ... 2.0634127 2.004914 0.3360492 ] [1.4638007 1.4638007 1.4638007 ... 2.0634127 2.004914 0.9581144 ] [1.4079561 1.792076 2.2978635 ... 2.004914 2.004914 1.2320359 ] ... [1.9753255 1.9753255 1.5990038 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ] [1.7981002 1.7981002 1.7981002 ... 3.0159059 3.0159059 3.0159059 ]] [[1.5466666 1.5466666 1.5466666 ... 2.0634127 1.3333254 1.2629551 ] [1.5466666 1.5466666 1.5466666 ... 2.2654274 1.9283252 1.9283252 ] [1.640065 1.640065 2.2978635 ... 2.2654274 1.9283252 1.9283252 ] ... [1.7181373 1.7181373 1.5990038 ... 3.0159059 3.0159059 3.0159059 ] [1.7181373 1.7181373 1.5806533 ... 3.0159059 3.0159059 3.0159059 ] [1.4223132 1.5806533 1.5806533 ... 3.0159059 3.0159059 3.0159059 ]] ... [[0.84731525 0.9401992 0.9401992 ... 2.3563592 2.3563592 2.3563592 ] [0.84731525 1.1966727 1.1966727 ... 2.3563592 2.3563592 2.3563592 ] [0.91898197 1.1966727 1.8667369 ... 2.3563592 2.3563592 2.3563592 ] ... [1.316314 1.316314 1.4537109 ... 2.4482236 2.4482236 1.1036936 ] [1.5981249 1.5981249 1.5981249 ... 2.8175507 2.4482236 1.1036936 ] [1.5981249 1.5981249 1.5981249 ... 2.8175507 1.9651961 1.1036936 ]] [[0.27132127 1.0194877 1.0194877 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1966727 1.1966727 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1966727 1.8667369 ... 2.3563592 2.3563592 2.3563592 ] ... [2.0273042 2.0273042 1.4537109 ... 2.4482236 2.4482236 0.6479012 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 2.4482236 1.1205417 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.9651961 1.1205417 ]] [[0.27132127 1.0194877 1.0194877 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1424581 1.0738078 ... 2.3563592 2.3563592 2.3563592 ] [1.1424581 1.1424581 1.0738078 ... 2.3563592 2.3563592 2.3563592 ] ... [2.0273042 2.0273042 1.4537109 ... 1.7288257 1.7288257 0.6479012 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.1209697 1.1205417 ] [0.9043058 1.1258914 2.301069 ... 2.8175507 1.1205417 1.1205417 ]]] [[[1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.4896551 1.4896551 1.6031064 ... 1.3894378 1.5156671 1.5156671 ] ... [2.2332222 2.2332222 2.2332222 ... 1.9721065 1.9721065 1.2363685 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ]] [[1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.7461337 1.7461337 1.942297 ... 2.1452966 2.1452966 2.1452966 ] [1.4896551 1.6905329 1.7530513 ... 1.3894378 1.5156671 1.5156671 ] ... [2.2332222 2.2332222 2.2332222 ... 1.9721065 1.9721065 1.2363685 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ] [2.2332222 2.2332222 2.2332222 ... 1.9721065 2.7527764 2.7527764 ]] [[2.018236 2.018236 2.018236 ... 2.1452966 2.1452966 2.1452966 ] [2.018236 2.018236 2.018236 ... 2.1452966 2.1452966 2.1452966 ] [1.0043297 1.6905329 1.7530513 ... 1.3894378 1.830476 1.830476 ] ... [2.2332222 2.2332222 2.2332222 ... 1.7043453 1.7043453 1.1146896 ] [2.3345807 2.3345807 2.3345807 ... 1.1920968 1.1920968 1.1146896 ] [2.3345807 2.3345807 2.3345807 ... 1.1920968 1.1920968 0.9460214 ]] ... [[2.1728504 2.1728504 2.1728504 ... 1.8167772 1.644443 1.644443 ] [2.1728504 2.1728504 2.1728504 ... 1.8167772 1.644443 1.644443 ] [1.33967 1.33967 1.3633335 ... 1.8167772 1.4893984 1.4893984 ] ... [2.014108 2.014108 2.014108 ... 1.6705543 1.7360703 1.7360703 ] [1.9353019 1.9353019 1.9353019 ... 1.6705543 1.7360703 1.7360703 ] [1.9353019 1.9353019 1.9353019 ... 1.6705543 1.6705543 0.79231995]] [[1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.4195983 2.1411471 2.1411471 ... 1.4893984 1.4893984 1.4893984 ] ... [2.014108 2.014108 2.014108 ... 1.5818068 1.7360703 1.7360703 ] [1.8509344 1.8509344 1.8830479 ... 1.5818068 1.7360703 1.7360703 ] [1.6495321 1.6495321 1.8830479 ... 1.1890438 1.0901947 0.79231995]] [[1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.388988 2.1411471 2.1411471 ... 1.5008781 1.644443 1.644443 ] [1.4195983 2.1411471 2.1411471 ... 1.4893984 1.4893984 1.4893984 ] ... [1.8509344 1.8509344 1.8509344 ... 1.1890438 1.7360703 1.7360703 ] [1.8509344 1.8509344 1.8509344 ... 1.1890438 1.7360703 1.7360703 ] [1.6495321 1.6495321 1.4150771 ... 1.1890438 1.0901947 0.72881174]]] [[[0.9156291 0.9156291 2.2269588 ... 1.9389671 1.1967106 1.1967106 ] [0.9156291 0.9156291 2.2269588 ... 1.9389671 2.0871894 2.0871894 ] [1.8758037 2.5950618 2.9636967 ... 1.2120856 2.1399379 2.1399379 ] ... [0.712882 0.712882 1.2114763 ... 1.6477826 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ]] [[1.188261 1.188261 2.2269588 ... 1.9389671 1.5155622 1.5155622 ] [1.4445028 1.4445028 2.2269588 ... 1.9389671 2.0871894 2.0871894 ] [1.8758037 2.5950618 2.9636967 ... 1.9530835 2.1399379 2.1399379 ] ... [0.712882 1.361132 1.361132 ... 2.2234292 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 2.2234292 1.9525528 1.9525528 ] [1.4343967 1.4343967 1.4343967 ... 1.6477826 1.9525528 1.9525528 ]] [[1.188261 1.188261 2.2269588 ... 1.9389671 1.5155622 1.5155622 ] [1.4445028 1.4445028 2.2269588 ... 1.9389671 1.5480372 1.5480372 ] [1.4445028 1.4445028 2.2269588 ... 1.9530835 2.1399379 2.1399379 ] ... [1.1367681 1.361132 1.6823308 ... 2.5049908 2.5049908 1.3761753 ] [1.7323657 1.7323657 1.6823308 ... 2.2234292 1.9267029 1.9267029 ] [1.7323657 1.7323657 1.4343967 ... 1.9267029 1.9267029 1.9267029 ]] ... [[1.3137894 2.1217802 2.1217802 ... 2.7509346 2.7509346 1.3618759 ] [1.3137894 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] [0.74335146 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 2.1873124 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 2.1873124 ... 2.7348444 2.7348444 2.7348444 ]] [[1.2849513 2.1217802 2.2657442 ... 2.7509346 2.7509346 1.0294875 ] [1.2849513 2.1217802 2.2657442 ... 2.7509346 2.7509346 2.1818268 ] [0.40247947 2.1217802 2.1217802 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 1.3043559 ... 2.7348444 2.7348444 2.7348444 ] [1.7670357 1.7670357 1.1324793 ... 2.7348444 2.7348444 2.7348444 ]] [[1.2849513 1.2849513 2.2657442 ... 2.7509346 2.7509346 0.8531323 ] [1.2849513 1.2849513 2.2657442 ... 2.7509346 2.7509346 2.1818268 ] [0.12920336 0.45782384 1.4586672 ... 2.7509346 2.7509346 2.1818268 ] ... [1.7670357 1.7670357 1.3043559 ... 2.3325577 1.6849006 1.6849006 ] [1.7670357 1.7670357 1.3043559 ... 2.3325577 2.367289 2.367289 ] [1.7670357 1.7670357 1.1324793 ... 2.3325577 2.367289 2.367289 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5868.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[1.2842813 2.0815465 2.6661115 2.1282225 2.6614358] [2.0279868 2.0992076 1.7078805 1.6091858 2.1374512] [1.6531521 2.551929 2.8063254 1.7378099 2.0277395] [2.1606479 2.0177705 2.1347623 1.7263867 1.9897258] [1.6595638 2.341698 1.7151011 1.8213551 1.9447452]] [[1.8223238 1.5901594 1.7612233 2.0371277 2.2199643] [2.2805247 2.426406 1.844746 2.197034 3.3170366] [1.1939551 1.7680866 2.5890675 1.4648385 2.0241518] [2.6720135 2.453538 2.1250875 2.0778074 2.051204 ] [1.8008809 1.7344362 1.7890667 2.2193334 2.1429281]] [[2.022476 2.4204242 2.3966422 2.1388352 2.363361 ] [1.256116 1.2229058 2.2419333 2.528331 2.3359761] [1.9611125 1.3976594 1.50908 2.028994 2.3858798] [1.8294461 1.5640483 1.6043214 2.3955975 2.6903083] [3.2061253 2.2981782 2.5370932 2.708583 1.849111 ]] [[2.5898097 2.1090107 3.1045253 2.165296 2.0964015] [1.5733839 2.4179232 1.6592672 1.028145 2.8088064] [2.437348 1.7328016 1.7627515 1.9823991 2.0599232] [2.1072462 1.6557044 1.2679846 1.8458593 1.3732255] [1.8119283 2.1254086 1.6425496 2.1202338 2.2155962]] [[2.284959 2.4728708 3.6320803 2.092146 1.2707049] [2.323064 2.227278 3.037133 2.236154 2.732686 ] [1.6078237 2.1612728 1.5955615 2.0951161 1.1598226] [1.9653794 2.061416 1.7520354 2.2763941 2.9620705] [1.6763662 2.5724506 1.265579 1.7395878 1.661924 ]]] [[[1.9103537 1.9729978 2.5378082 2.1836674 1.925412 ] [3.2337239 1.5404264 1.6342323 1.9262197 1.8738353] [1.6913756 2.4929829 1.6191115 1.7958056 1.7532955] [1.9644054 1.9422576 1.8188032 1.3666028 2.3232558] [1.4284008 1.8817872 2.5616963 2.3031685 1.5382001]] [[1.9770916 1.5500407 2.3558674 2.6676798 1.5829734] [2.2476346 2.2756493 1.3330672 1.1922166 2.193736 ] [2.0205796 1.4380378 2.0686226 2.2418776 2.4188468] [1.9447827 2.5816703 1.5874013 1.8113333 2.1859512] [2.1662226 2.718779 1.6797687 1.9066802 1.4282593]] [[1.8319236 2.7440734 2.4854808 2.446853 1.7652358] [2.3135111 2.0445304 1.9272338 1.9193664 1.866999 ] [2.0263238 2.2189214 1.5364473 1.102607 2.1331723] [1.7610102 2.018737 1.9574022 2.2511642 2.584157 ] [2.4653816 2.3346355 2.5249765 2.4369764 1.6076694]] [[1.6347737 1.5849649 2.4454508 1.1683619 1.9333526] [1.610312 1.7163575 1.9086516 1.5325171 2.4037125] [1.165612 1.3270003 2.274758 1.2318702 2.2433326] [1.8432572 2.9252558 1.5991596 1.6827637 1.7101225] [1.0976894 1.5318673 2.003701 1.8857744 2.137818 ]] [[2.5595233 1.8048631 2.8257067 1.3618081 1.3575315] [1.9462636 1.9209106 1.3924724 1.7429496 2.1761239] [1.9629948 1.5446174 2.189749 2.722208 2.9719884] [2.411856 2.191088 1.1897486 1.7049525 1.7052275] [1.8727713 2.0630915 2.5512388 1.8850003 2.4678667]]] [[[3.9862185 1.5411448 2.517776 1.985685 1.8112994] [2.4425683 1.9682026 2.1200976 3.048398 1.3435947] [2.448498 1.7182953 1.3657925 1.4517841 1.7328807] [1.9499342 1.7742437 2.4124434 2.017935 2.2478085] [2.5137591 1.3873498 2.2999032 1.5580469 1.881707 ]] [[1.7551471 1.7038922 1.810395 2.3161237 2.8385413] [4.1309257 1.8938501 1.5070144 1.7537613 1.2904518] [1.9145739 1.6431773 1.392439 2.365088 2.5453787] [2.4880617 2.8697913 1.9143564 1.3599515 2.1406698] [1.7158467 1.7679605 2.1417153 2.2155175 2.299314 ]] [[1.5002217 1.8693168 1.9432145 1.651578 2.0975568] [1.6107831 2.399242 1.6937312 2.5956903 2.6379714] [1.3013448 2.3181763 2.5114758 1.6155823 3.2583454] [2.0224428 1.5556678 1.3484274 1.1315664 1.6871941] [2.3793373 1.6735351 2.10093 1.48353 1.674521 ]] [[3.317569 2.0177693 1.5530545 1.9667879 1.9023881] [1.6408473 1.8625631 2.8921828 2.465858 2.0005374] [2.6122994 2.1925132 2.1118753 1.5162781 1.3477058] [2.2296388 2.315235 1.7380831 2.0464509 2.2897737] [2.8672976 2.7348232 1.5231085 2.4051218 2.4907775]] [[1.6026347 1.7525681 2.298558 2.0423446 1.6989174] [1.7575854 2.0152562 2.8231835 2.3298912 1.8507249] [1.4742255 1.7373197 2.079262 2.386475 2.3939948] [2.0310392 3.1493154 1.2722037 2.1160629 2.9446688] [1.845603 1.8608615 1.9683517 1.4915731 1.2822064]]]]]; ov_res: [[[[[1.2842813 2.0815465 2.6661115 2.1282225 2.6614358] [2.0279868 2.0992076 1.7078805 1.6091858 2.1374512] [1.6531521 2.551929 2.8063254 1.7378099 2.0277395] [2.1606479 2.0177705 2.1347623 1.7263867 1.9897258] [1.6595638 2.341698 1.7151011 1.8213551 1.9447452]] [[1.8223238 1.5901594 1.7612233 2.0371277 2.2199643] [2.2805247 2.426406 1.844746 2.197034 3.3170366] [1.1939551 1.7680866 2.5890675 1.4648385 2.0241518] [2.6720135 2.453538 2.1250875 2.0778074 2.051204 ] [1.8008809 1.7344362 1.7890667 2.2193334 2.1429281]] [[2.022476 2.4204242 2.3966422 2.1388352 2.363361 ] [1.256116 1.2229058 2.2419333 2.528331 2.3359761] [1.9611125 1.3976594 1.50908 2.028994 2.3858798] [1.8294461 1.5640483 1.6043214 2.3955975 2.6903083] [3.2061253 2.2981782 2.5370932 2.708583 1.849111 ]] [[2.5898097 2.1090107 3.1045253 2.165296 2.0964015] [1.5733839 2.4179232 1.6592672 1.028145 2.8088064] [2.437348 1.7328016 1.7627515 1.9823991 2.0599232] [2.1072462 1.6557044 1.2679846 1.8458593 1.3732255] [1.8119283 2.1254086 1.6425496 2.1202338 2.2155962]] [[2.284959 2.4728708 3.6320803 2.092146 1.2707049] [2.323064 2.227278 3.037133 2.236154 2.732686 ] [1.6078237 2.1612728 1.5955615 2.0951161 1.1598226] [1.9653794 2.061416 1.7520354 2.2763941 2.9620705] [1.6763662 2.5724506 1.265579 1.7395878 1.661924 ]]] [[[1.9103537 1.9729978 2.5378082 2.1836674 1.925412 ] [3.2337239 1.5404264 1.6342323 1.9262197 1.8738353] [1.6913756 2.4929829 1.6191115 1.7958056 1.7532955] [1.9644054 1.9422576 1.8188032 1.3666028 2.3232558] [1.4284008 1.8817872 2.5616963 2.3031685 1.5382001]] [[1.9770916 1.5500407 2.3558674 2.6676798 1.5829734] [2.2476346 2.2756493 1.3330672 1.1922166 2.193736 ] [2.0205796 1.4380378 2.0686226 2.2418776 2.4188468] [1.9447827 2.5816703 1.5874013 1.8113333 2.1859512] [2.1662226 2.718779 1.6797687 1.9066802 1.4282593]] [[1.8319236 2.7440734 2.4854808 2.446853 1.7652358] [2.3135111 2.0445304 1.9272338 1.9193664 1.866999 ] [2.0263238 2.2189214 1.5364473 1.102607 2.1331723] [1.7610102 2.018737 1.9574022 2.2511642 2.584157 ] [2.4653816 2.3346355 2.5249765 2.4369764 1.6076694]] [[1.6347737 1.5849649 2.4454508 1.1683619 1.9333526] [1.610312 1.7163575 1.9086516 1.5325171 2.4037125] [1.165612 1.3270003 2.274758 1.2318702 2.2433326] [1.8432572 2.9252558 1.5991596 1.6827637 1.7101225] [1.0976894 1.5318673 2.003701 1.8857744 2.137818 ]] [[2.5595233 1.8048631 2.8257067 1.3618081 1.3575315] [1.9462636 1.9209106 1.3924724 1.7429496 2.1761239] [1.9629948 1.5446174 2.189749 2.722208 2.9719884] [2.411856 2.191088 1.1897486 1.7049525 1.7052275] [1.8727713 2.0630915 2.5512388 1.8850003 2.4678667]]] [[[3.9862185 1.5411448 2.517776 1.985685 1.8112994] [2.4425683 1.9682026 2.1200976 3.048398 1.3435947] [2.448498 1.7182953 1.3657925 1.4517841 1.7328807] [1.9499342 1.7742437 2.4124434 2.017935 2.2478085] [2.5137591 1.3873498 2.2999032 1.5580469 1.881707 ]] [[1.7551471 1.7038922 1.810395 2.3161237 2.8385413] [4.1309257 1.8938501 1.5070144 1.7537613 1.2904518] [1.9145739 1.6431773 1.392439 2.365088 2.5453787] [2.4880617 2.8697913 1.9143564 1.3599515 2.1406698] [1.7158467 1.7679605 2.1417153 2.2155175 2.299314 ]] [[1.5002217 1.8693168 1.9432145 1.651578 2.0975568] [1.6107831 2.399242 1.6937312 2.5956903 2.6379714] [1.3013448 2.3181763 2.5114758 1.6155823 3.2583454] [2.0224428 1.5556678 1.3484274 1.1315664 1.6871941] [2.3793373 1.6735351 2.10093 1.48353 1.674521 ]] [[3.317569 2.0177693 1.5530545 1.9667879 1.9023881] [1.6408473 1.8625631 2.8921828 2.465858 2.0005374] [2.6122994 2.1925132 2.1118753 1.5162781 1.3477058] [2.2296388 2.315235 1.7380831 2.0464509 2.2897737] [2.8672976 2.7348232 1.5231085 2.4051218 2.4907775]] [[1.6026347 1.7525681 2.298558 2.0423446 1.6989174] [1.7575854 2.0152562 2.8231835 2.3298912 1.8507249] [1.4742255 1.7373197 2.079262 2.386475 2.3939948] [2.0310392 3.1493154 1.2722037 2.1160629 2.9446688] [1.845603 1.8608615 1.9683517 1.4915731 1.2822064]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:1 - ceil_mode:False - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5870.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.stride : int[] = prim::Constant[value=[3, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[ 1.5591613 1.0728233 0.80836654 ... 1.7148929 1.741599 1.7251312 ] [ 1.5591613 1.0578142 1.1225674 ... 1.0087334 1.2857833 1.5260173 ] [ 0.89435726 1.0365251 1.1225674 ... 1.0087334 1.2442024 1.5260173 ] ... [ 2.3812926 1.4045259 1.3014239 ... 1.9086199 1.3965666 1.2309635 ] [ 2.3812926 0.20173413 1.3014239 ... 1.3572223 1.3965666 1.3741645 ] [ 2.0696433 0.20173413 0.95976084 ... 0.47614434 0.5961477 1.3741645 ]] [[ 1.2964034 0.5532409 1.8870828 ... 1.4956257 2.0368857 1.1055281 ] [ 1.1547025 1.7579343 1.8870828 ... 1.4956257 2.6551826 1.1055281 ] [ 2.2795942 1.7579343 1.0261353 ... 1.4444804 2.6551826 0.80884886] ... [ 0.5870776 0.81635165 0.53966385 ... 1.6411372 1.055453 0.95016676] [ 0.5587257 0.81635165 1.0643358 ... 1.6411372 1.3051231 0.95016676] [ 1.0453666 0.44811776 1.2533358 ... 2.1931064 1.3051231 0.69093364]] [[ 0.42060012 0.7859676 1.148137 ... 1.2805371 1.9534596 1.6447878 ] [ 0.8699454 0.5549999 0.6799722 ... 1.3060733 1.9534596 1.1747363 ] [ 0.8699454 1.8708237 0.43799895 ... 1.3290396 0.9392915 1.4684715 ] ... [ 1.576573 1.127668 0.83262914 ... 0.6127951 0.7685627 1.9087851 ] [ 1.576573 1.127668 1.6139431 ... 0.7592007 0.7685627 1.9087851 ] [ 1.289598 2.0978692 1.6139431 ... 1.895455 0.7395826 2.0567846 ]] [[ 0.9748613 1.5866623 1.3932755 ... 1.6059568 1.5065715 0.3371863 ] [ 0.87067044 1.8968403 1.3932755 ... 1.6059568 1.5065715 0.3371863 ] [ 1.0064582 1.8968403 0.80151707 ... 1.4132578 1.3338413 0.05554366] ... [ 0.42273057 2.4034183 1.3708342 ... 1.9871155 1.0027435 1.6242162 ] [-0.14105788 0.5717518 1.3708342 ... 1.2251879 1.0027435 1.466206 ] [ 0.764334 1.8235534 1.3057727 ... 1.2251879 1.5627965 2.710993 ]] [[ 1.0846542 0.76797944 0.24325006 ... -0.26439255 0.40863487 1.4269365 ] [ 1.0846542 2.5009139 1.6370507 ... 0.46189675 -0.06065868 0.8642889 ] [ 0.8630487 2.5009139 1.6370507 ... 0.52658045 0.49675816 1.0780481 ] ... [ 1.1323775 1.8146678 1.5396777 ... 1.4927632 1.432035 2.5273128 ] [ 1.1323775 1.682794 2.3219945 ... 1.4927632 0.3532063 2.5273128 ] [ 0.81600636 1.682794 2.3219945 ... 0.6591124 0.3532063 0.34837863]]] [[[ 1.1822286 0.9675275 1.9763335 ... 1.0923941 0.38525376 0.35436773] [ 1.0215731 1.7792044 2.6525207 ... 1.291515 0.60643214 0.8433544 ] [ 1.3563123 1.7792044 2.6525207 ... 1.4403937 1.6130574 0.8433544 ] ... [ 2.520274 0.5952773 1.0146711 ... -0.02847905 1.2251513 1.2618537 ] [ 1.305146 0.7056373 1.3461461 ... -0.28077847 1.268838 1.2618537 ] [ 1.1464869 0.7056373 1.3461461 ... 0.9835523 1.268838 0.6345113 ]] [[ 0.80023223 0.68818474 1.3454322 ... 1.9161748 1.4945139 0.8916952 ] [ 1.8235184 3.3639388 1.5806476 ... 0.711615 1.6235399 0.8916952 ] [ 1.8235184 3.3639388 1.5806476 ... 2.3587668 1.6235399 0.94328654] ... [ 1.6006145 1.2513994 1.4312596 ... 1.3310876 0.5687687 1.4038403 ] [ 2.0454283 1.2513994 1.2215631 ... 1.3310876 1.5687865 2.5212007 ] [ 2.0454283 0.20762324 2.9221349 ... 0.62955225 1.5687865 2.5212007 ]] [[ 1.8251035 1.9315914 2.1588082 ... 1.1452606 0.9786897 2.1948102 ] [ 1.0943074 1.259058 1.2520343 ... 1.1452606 1.3798281 0.9111968 ] [ 0.27365085 1.259058 1.2089998 ... 0.5426304 1.3798281 1.7790712 ] ... [ 2.4561756 2.2667103 1.2467294 ... 1.9375471 2.1184344 1.768559 ] [ 0.7779557 2.2667103 1.1988649 ... 1.0887312 2.1184344 1.768559 ] [ 0.7779557 1.4401143 0.50820416 ... 1.0887312 1.9410598 0.13745107]] [[ 0.75410247 1.2080822 1.9065353 ... 1.4948243 1.096775 1.3906009 ] [ 1.1318614 1.3250674 1.9065353 ... 1.4948243 1.4926761 1.0117218 ] [ 1.2961003 1.7664549 0.4947434 ... 0.91498685 1.4926761 1.3877038 ] ... [ 0.5839232 1.1752083 1.395487 ... 0.5846335 1.096969 1.3991324 ] [ 1.0566458 0.3190141 1.8986814 ... 0.86601764 1.096969 1.3991324 ] [ 1.0566458 0.8939073 2.0147052 ... 0.86601764 1.311724 0.29246062]] [[ 1.9762881 1.2262251 1.2984692 ... 1.2352362 0.44670197 2.1639864 ] [ 1.5762649 1.2262251 2.4537888 ... 1.2352362 0.7040912 2.1639864 ] [ 1.5762649 2.4931593 2.4537888 ... 1.2888012 2.8016534 1.5464063 ] ... [ 1.6617368 0.93565553 0.6295751 ... 0.29720667 2.1351454 2.9202678 ] [ 1.848977 0.5113269 1.2478578 ... 0.7064463 0.4362321 2.9202678 ] [ 1.848977 0.5113269 1.2478578 ... 0.7064463 1.7370914 0.7253266 ]]] [[[ 0.9647273 1.0698235 0.65826213 ... 1.2518295 2.0954585 0.7312272 ] [ 2.2052827 0.91377044 0.9650719 ... 1.2518295 2.0954585 1.4350748 ] [ 2.2052827 1.1769197 1.7243282 ... 1.4277482 1.5290419 1.4350748 ] ... [ 0.01403552 0.8597131 1.337241 ... 1.4390572 1.8420615 3.4605381 ] [ 1.1840165 1.1956588 1.3238537 ... 1.4390572 1.8420615 3.4605381 ] [ 1.1840165 1.1956588 1.3238537 ... 0.30132842 0.74667966 0.97477484]] [[ 1.7853724 2.0749176 3.0256207 ... 0.44912735 1.370249 1.7667606 ] [ 0.9249756 2.0749176 1.4262289 ... 0.6789998 1.387861 1.7667606 ] [ 0.96438414 0.42268944 1.7137202 ... 0.8506999 1.387861 1.5930378 ] ... [ 3.1799543 1.33303 0.9453383 ... 1.682542 1.1393412 1.8319051 ] [ 1.922933 1.1293719 0.9453383 ... 1.682542 0.7794099 1.6278207 ] [ 1.1987886 1.4540702 1.1784247 ... 1.2290998 1.529008 1.1806647 ]] [[ 1.7633774 1.350311 1.7728808 ... 1.5287963 1.9897661 2.7883945 ] [ 0.8980558 0.8830389 1.7728808 ... -0.30820796 1.9897661 1.2545937 ] [ 0.8980558 0.99794996 2.090691 ... 1.316259 1.1971623 1.2545937 ] ... [-0.36348316 0.71613306 1.0909289 ... 1.6754273 1.2550511 0.43562448] [ 0.58726144 1.4659914 1.5898358 ... 2.1521962 1.4777702 2.4441752 ] [ 0.85477436 1.4659914 1.5898358 ... 2.1521962 1.5245404 2.4441752 ]] [[ 1.1820128 1.5078621 0.9149245 ... 0.84586936 1.1579347 1.51129 ] [ 1.1820128 2.4614966 0.9149245 ... 0.37126735 0.6606694 1.51129 ] [ 0.73205537 2.4614966 1.447512 ... 0.37126735 0.6606694 1.314268 ] ... [ 0.18736039 1.0926372 1.0287766 ... 0.20271549 1.2736545 1.8374261 ] [ 1.1642884 1.5606893 1.4988158 ... 2.0591424 1.2736545 1.8374261 ] [ 1.1642884 1.5606893 1.4988158 ... 2.0591424 1.0216395 2.2336338 ]] [[ 1.3880078 1.6849331 0.9027183 ... 1.8234015 0.8128539 1.6780794 ] [ 0.8206932 1.6849331 0.34688568 ... 2.4438102 1.7579528 1.6780794 ] [ 0.8206932 1.3000263 0.32756534 ... 2.4438102 1.7579528 0.33889994] ... [ 1.415131 0.972996 2.17175 ... 1.1963854 0.99400806 1.596121 ] [ 1.415131 1.95973 0.72805727 ... 0.68709034 1.0743327 0.6599931 ] [ 1.4127828 1.962934 0.72805727 ... 1.1023918 1.7124586 0.557862 ]]]]]; ov_res: [[[[[ 1.5591613 1.0728233 0.80836654 ... 1.7148929 1.741599 1.7251312 ] [ 1.5591613 1.0578142 1.1225674 ... 1.0087334 1.2857833 1.5260173 ] [ 0.89435726 1.0365251 1.1225674 ... 1.0087334 1.2442024 1.5260173 ] ... [ 2.3812926 1.4045259 1.3014239 ... 1.9086199 1.3965666 1.2309635 ] [ 2.3812926 0.20173413 1.3014239 ... 1.3572223 1.3965666 1.3741645 ] [ 2.0696433 0.20173413 0.95976084 ... 0.47614434 0.5961477 1.3741645 ]] [[ 1.2964034 0.5532409 1.8870828 ... 1.4956257 2.0368857 1.1055281 ] [ 1.1547025 1.7579343 1.8870828 ... 1.4956257 2.6551826 1.1055281 ] [ 2.2795942 1.7579343 1.0261353 ... 1.4444804 2.6551826 0.80884886] ... [ 0.5870776 0.81635165 0.53966385 ... 1.6411372 1.055453 0.95016676] [ 0.5587257 0.81635165 1.0643358 ... 1.6411372 1.3051231 0.95016676] [ 1.0453666 0.44811776 1.2533358 ... 2.1931064 1.3051231 0.69093364]] [[ 0.42060012 0.7859676 1.148137 ... 1.2805371 1.9534596 1.6447878 ] [ 0.8699454 0.5549999 0.6799722 ... 1.3060733 1.9534596 1.1747363 ] [ 0.8699454 1.8708237 0.43799895 ... 1.3290396 0.9392915 1.4684715 ] ... [ 1.576573 1.127668 0.83262914 ... 0.6127951 0.7685627 1.9087851 ] [ 1.576573 1.127668 1.6139431 ... 0.7592007 0.7685627 1.9087851 ] [ 1.289598 2.0978692 1.6139431 ... 1.895455 0.7395826 2.0567846 ]] [[ 0.9748613 1.5866623 1.3932755 ... 1.6059568 1.5065715 0.3371863 ] [ 0.87067044 1.8968403 1.3932755 ... 1.6059568 1.5065715 0.3371863 ] [ 1.0064582 1.8968403 0.80151707 ... 1.4132578 1.3338413 0.05554366] ... [ 0.42273057 2.4034183 1.3708342 ... 1.9871155 1.0027435 1.6242162 ] [-0.14105788 0.5717518 1.3708342 ... 1.2251879 1.0027435 1.466206 ] [ 0.764334 1.8235534 1.3057727 ... 1.2251879 1.5627965 2.710993 ]] [[ 1.0846542 0.76797944 0.24325006 ... -0.26439255 0.40863487 1.4269365 ] [ 1.0846542 2.5009139 1.6370507 ... 0.46189675 -0.06065868 0.8642889 ] [ 0.8630487 2.5009139 1.6370507 ... 0.52658045 0.49675816 1.0780481 ] ... [ 1.1323775 1.8146678 1.5396777 ... 1.4927632 1.432035 2.5273128 ] [ 1.1323775 1.682794 2.3219945 ... 1.4927632 0.3532063 2.5273128 ] [ 0.81600636 1.682794 2.3219945 ... 0.6591124 0.3532063 0.34837863]]] [[[ 1.1822286 0.9675275 1.9763335 ... 1.0923941 0.38525376 0.35436773] [ 1.0215731 1.7792044 2.6525207 ... 1.291515 0.60643214 0.8433544 ] [ 1.3563123 1.7792044 2.6525207 ... 1.4403937 1.6130574 0.8433544 ] ... [ 2.520274 0.5952773 1.0146711 ... -0.02847905 1.2251513 1.2618537 ] [ 1.305146 0.7056373 1.3461461 ... -0.28077847 1.268838 1.2618537 ] [ 1.1464869 0.7056373 1.3461461 ... 0.9835523 1.268838 0.6345113 ]] [[ 0.80023223 0.68818474 1.3454322 ... 1.9161748 1.4945139 0.8916952 ] [ 1.8235184 3.3639388 1.5806476 ... 0.711615 1.6235399 0.8916952 ] [ 1.8235184 3.3639388 1.5806476 ... 2.3587668 1.6235399 0.94328654] ... [ 1.6006145 1.2513994 1.4312596 ... 1.3310876 0.5687687 1.4038403 ] [ 2.0454283 1.2513994 1.2215631 ... 1.3310876 1.5687865 2.5212007 ] [ 2.0454283 0.20762324 2.9221349 ... 0.62955225 1.5687865 2.5212007 ]] [[ 1.8251035 1.9315914 2.1588082 ... 1.1452606 0.9786897 2.1948102 ] [ 1.0943074 1.259058 1.2520343 ... 1.1452606 1.3798281 0.9111968 ] [ 0.27365085 1.259058 1.2089998 ... 0.5426304 1.3798281 1.7790712 ] ... [ 2.4561756 2.2667103 1.2467294 ... 1.9375471 2.1184344 1.768559 ] [ 0.7779557 2.2667103 1.1988649 ... 1.0887312 2.1184344 1.768559 ] [ 0.7779557 1.4401143 0.50820416 ... 1.0887312 1.9410598 0.13745107]] [[ 0.75410247 1.2080822 1.9065353 ... 1.4948243 1.096775 1.3906009 ] [ 1.1318614 1.3250674 1.9065353 ... 1.4948243 1.4926761 1.0117218 ] [ 1.2961003 1.7664549 0.4947434 ... 0.91498685 1.4926761 1.3877038 ] ... [ 0.5839232 1.1752083 1.395487 ... 0.5846335 1.096969 1.3991324 ] [ 1.0566458 0.3190141 1.8986814 ... 0.86601764 1.096969 1.3991324 ] [ 1.0566458 0.8939073 2.0147052 ... 0.86601764 1.311724 0.29246062]] [[ 1.9762881 1.2262251 1.2984692 ... 1.2352362 0.44670197 2.1639864 ] [ 1.5762649 1.2262251 2.4537888 ... 1.2352362 0.7040912 2.1639864 ] [ 1.5762649 2.4931593 2.4537888 ... 1.2888012 2.8016534 1.5464063 ] ... [ 1.6617368 0.93565553 0.6295751 ... 0.29720667 2.1351454 2.9202678 ] [ 1.848977 0.5113269 1.2478578 ... 0.7064463 0.4362321 2.9202678 ] [ 1.848977 0.5113269 1.2478578 ... 0.7064463 1.7370914 0.7253266 ]]] [[[ 0.9647273 1.0698235 0.65826213 ... 1.2518295 2.0954585 0.7312272 ] [ 2.2052827 0.91377044 0.9650719 ... 1.2518295 2.0954585 1.4350748 ] [ 2.2052827 1.1769197 1.7243282 ... 1.4277482 1.5290419 1.4350748 ] ... [ 0.01403552 0.8597131 1.337241 ... 1.4390572 1.8420615 3.4605381 ] [ 1.1840165 1.1956588 1.3238537 ... 1.4390572 1.8420615 3.4605381 ] [ 1.1840165 1.1956588 1.3238537 ... 0.30132842 0.74667966 0.97477484]] [[ 1.7853724 2.0749176 3.0256207 ... 0.44912735 1.370249 1.7667606 ] [ 0.9249756 2.0749176 1.4262289 ... 0.6789998 1.387861 1.7667606 ] [ 0.96438414 0.42268944 1.7137202 ... 0.8506999 1.387861 1.5930378 ] ... [ 3.1799543 1.33303 0.9453383 ... 1.682542 1.1393412 1.8319051 ] [ 1.922933 1.1293719 0.9453383 ... 1.682542 0.7794099 1.6278207 ] [ 1.1987886 1.4540702 1.1784247 ... 1.2290998 1.529008 1.1806647 ]] [[ 1.7633774 1.350311 1.7728808 ... 1.5287963 1.9897661 2.7883945 ] [ 0.8980558 0.8830389 1.7728808 ... -0.30820796 1.9897661 1.2545937 ] [ 0.8980558 0.99794996 2.090691 ... 1.316259 1.1971623 1.2545937 ] ... [-0.36348316 0.71613306 1.0909289 ... 1.6754273 1.2550511 0.43562448] [ 0.58726144 1.4659914 1.5898358 ... 2.1521962 1.4777702 2.4441752 ] [ 0.85477436 1.4659914 1.5898358 ... 2.1521962 1.5245404 2.4441752 ]] [[ 1.1820128 1.5078621 0.9149245 ... 0.84586936 1.1579347 1.51129 ] [ 1.1820128 2.4614966 0.9149245 ... 0.37126735 0.6606694 1.51129 ] [ 0.73205537 2.4614966 1.447512 ... 0.37126735 0.6606694 1.314268 ] ... [ 0.18736039 1.0926372 1.0287766 ... 0.20271549 1.2736545 1.8374261 ] [ 1.1642884 1.5606893 1.4988158 ... 2.0591424 1.2736545 1.8374261 ] [ 1.1642884 1.5606893 1.4988158 ... 2.0591424 1.0216395 2.2336338 ]] [[ 1.3880078 1.6849331 0.9027183 ... 1.8234015 0.8128539 1.6780794 ] [ 0.8206932 1.6849331 0.34688568 ... 2.4438102 1.7579528 1.6780794 ] [ 0.8206932 1.3000263 0.32756534 ... 2.4438102 1.7579528 0.33889994] ... [ 1.415131 0.972996 2.17175 ... 1.1963854 0.99400806 1.596121 ] [ 1.415131 1.95973 0.72805727 ... 0.68709034 1.0743327 0.6599931 ] [ 1.4127828 1.962934 0.72805727 ... 1.1023918 1.7124586 0.557862 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5872.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[1.7284085 3.0102568 1.3647069 ... 1.8864815 1.7766691 1.8862344 ] [2.1847239 2.326045 2.1847239 ... 1.6627657 2.5725737 1.6627657 ] [1.7284085 3.0102568 1.6204892 ... 1.864867 1.7766691 1.8862344 ] ... [2.2726831 2.5343108 2.2726831 ... 2.3105118 2.298407 2.5712574 ] [1.9578084 2.1550765 1.9578084 ... 2.2785199 1.4420525 2.2785199 ] [2.2726831 1.7312464 2.2726831 ... 2.3105118 2.298407 1.6194203 ]] [[1.475829 1.9548979 2.5374615 ... 1.6045821 2.1157827 1.6291432 ] [1.8816084 1.5675554 1.8905622 ... 1.8874279 1.1298672 1.8874279 ] [1.994505 1.9548979 1.994505 ... 1.2720219 2.1229334 1.6291432 ] ... [1.4025215 1.5465308 1.4025215 ... 2.6853 2.435124 2.6853 ] [1.4608492 1.8836247 1.4608492 ... 2.6758795 1.2703451 2.6758795 ] [1.1757737 1.5465308 1.2288376 ... 2.6853 2.435124 2.6853 ]] [[2.0015452 3.0102568 2.658712 ... 1.8864815 2.4891758 1.8862344 ] [2.0693977 1.8287406 1.3704624 ... 2.3340878 2.0773854 2.173214 ] [2.0015452 3.0102568 2.658712 ... 1.864867 1.7766691 1.8862344 ] ... [2.2726831 2.5343108 2.5174375 ... 1.5820625 2.298407 2.5712574 ] [1.9131762 2.039718 1.5974458 ... 2.046683 1.3264468 1.6363186 ] [2.2726831 1.7312464 2.5174375 ... 1.5820625 2.298407 1.5820625 ]] ... [[1.7475597 1.1417878 1.9375358 ... 1.4700547 1.9572189 2.2137215 ] [1.6602386 2.0921752 1.5218976 ... 1.9419206 2.5776312 2.88169 ] [1.9087799 1.1417878 1.9375358 ... 1.1151567 1.9572189 2.2137215 ] ... [1.5206052 1.3718121 1.5206052 ... 1.5834441 1.1627599 1.448204 ] [1.789059 1.4623146 1.789059 ... 2.1938891 2.1672094 1.4290366 ] [0.6369474 2.264156 0.8457408 ... 1.5066662 1.7894117 1.5066662 ]] [[1.6348758 1.9307694 1.9257041 ... 1.3710707 2.0516784 1.3710707 ] [1.9813535 2.556154 1.3116934 ... 1.44319 3.330823 1.9876744 ] [2.146955 1.9307694 2.146955 ... 1.0681998 2.0516784 1.2106018 ] ... [1.6517531 1.7752692 2.1283684 ... 1.5779585 2.7261944 1.6819326 ] [2.0533936 1.5128189 2.0533936 ... 2.5026684 2.013701 2.5026684 ] [2.4940987 1.7752692 2.5804105 ... 1.5779585 2.7261944 1.6819326 ]] [[1.7107675 0.992633 1.7107675 ... 1.4700547 1.9700085 2.2137215 ] [1.752846 2.0921752 1.752846 ... 1.9419206 2.5776312 1.9419206 ] [1.7107675 0.992633 1.7107675 ... 1.1151567 1.7112951 2.2137215 ] ... [2.2714508 1.3921413 2.2714508 ... 1.9966704 2.1790538 1.448204 ] [1.789059 1.4623146 2.1003335 ... 2.1938891 2.1672094 1.4290366 ] [2.0651433 2.264156 2.0651433 ... 1.9966704 2.1790538 1.448204 ]]] [[[2.4134197 1.8294723 0.75319344 ... 2.3001988 1.8464983 2.3001988 ] [2.0142555 1.7116238 1.2420373 ... 1.497748 1.5592954 1.2839905 ] [3.5027692 2.437038 3.5027692 ... 2.3001988 1.8464983 2.3001988 ] ... [1.9805695 2.256199 1.9805695 ... 1.0590887 1.9113852 1.5098802 ] [1.8113854 1.9063356 1.8113854 ... 2.7684457 1.5929879 2.7684457 ] [1.9805695 2.0961778 1.9805695 ... 0.96041167 2.1597774 0.88863736]] [[1.6502749 1.7497556 1.8482987 ... 1.9226046 1.685759 1.5201235 ] [2.573559 2.727987 1.4497701 ... 1.8519133 1.5596497 1.8519133 ] [2.4653833 1.7497556 2.4653833 ... 2.1889105 1.6025649 2.1889105 ] ... [1.3955468 1.7955768 1.3955468 ... 2.8055835 2.792937 2.8055835 ] [1.5966836 1.8017889 1.8793156 ... 1.2196273 1.8927697 1.1905024 ] [1.3955468 1.6209375 1.3955468 ... 2.8055835 2.792937 2.8055835 ]] [[2.4632056 1.8294723 0.7629416 ... 2.3001988 2.1868265 2.3001988 ] [2.0142555 2.00751 3.3319075 ... 1.7670835 1.9917991 1.7670835 ] [2.4632056 2.3942225 1.9262829 ... 2.3001988 2.1868265 2.3001988 ] ... [2.7325094 1.3740577 2.7325094 ... 2.1094644 1.6506805 2.1094644 ] [1.5246261 1.0806661 1.5246261 ... 1.8863237 2.082226 1.9916564 ] [2.1873028 1.3740577 1.9805695 ... 2.1094644 1.6506805 2.1094644 ]] ... [[1.2332871 2.9503942 1.2332871 ... 1.4597471 1.4063833 1.4597471 ] [2.2572722 3.574878 2.2572722 ... 2.718701 1.8959752 2.718701 ] [1.8572706 2.2118576 1.8572706 ... 1.4597471 1.4063833 1.9478784 ] ... [1.4243948 1.7152231 1.8342323 ... 2.2662275 2.5051384 2.2662275 ] [1.34427 2.7900798 1.3819877 ... 1.1656218 2.2636733 1.6847152 ] [1.4243948 1.598678 1.7953645 ... 2.2662275 2.5051384 2.2662275 ]] [[2.4147131 1.7706904 2.3497536 ... 2.2730663 1.4401491 1.2978915 ] [2.243675 1.631051 2.243675 ... 2.4162254 3.1947203 2.4162254 ] [1.6059129 2.5483856 2.18233 ... 3.4922712 1.3227168 3.4922712 ] ... [1.1137369 2.3662982 1.5972455 ... 3.0581267 1.7707552 3.0581267 ] [2.1436064 1.7176008 2.1436064 ... 2.3183956 1.9389131 2.3183956 ] [2.5069258 1.7808682 1.5972455 ... 3.0581267 2.3494694 3.0581267 ]] [[1.2332871 2.9503942 1.2332871 ... 1.7027323 1.4063833 1.7027323 ] [1.2299587 1.9729729 2.4326808 ... 1.4387778 2.070953 2.5594358 ] [1.8572706 2.2118576 1.8572706 ... 1.7027323 1.4063833 2.5300243 ] ... [1.4243948 2.1974428 1.8342323 ... 2.2662275 2.061635 2.2662275 ] [1.34427 2.7900798 1.7050037 ... 1.153069 2.2636733 1.2151383 ] [1.4243948 2.1974428 1.7953645 ... 2.2662275 2.061635 2.2662275 ]]] [[[2.3659394 1.1910161 2.3659394 ... 1.6278168 2.0732162 1.6002684 ] [2.202128 2.3441103 1.8599492 ... 1.3700838 2.156424 1.1530361 ] [2.3659394 2.1762307 2.3659394 ... 1.9126419 1.6192038 1.9126419 ] ... [2.2255623 1.9270153 2.2255623 ... 1.958505 1.6688564 1.958505 ] [2.0759919 1.8888915 2.0759919 ... 2.1671746 1.1011919 2.1671746 ] [2.2255623 2.3819187 2.2255623 ... 2.465903 1.6688564 2.465903 ]] [[1.9567103 2.1179976 1.7023393 ... 1.8479279 1.9986298 1.8479279 ] [3.6936617 2.708187 3.6936617 ... 1.1781572 1.2876971 1.3943981 ] [1.9567103 2.1179976 1.8249099 ... 1.3973783 1.9986298 1.3046248 ] ... [1.8207772 1.9927822 1.8207772 ... 2.995125 1.6231291 2.995125 ] [1.6001635 2.4744763 1.4882137 ... 3.5128586 2.4316058 3.5128586 ] [1.8533131 1.9927822 1.8207772 ... 2.995125 2.1342301 2.995125 ]] [[2.3659394 2.2191494 2.3659394 ... 1.6278168 2.3410292 1.6002684 ] [2.202128 1.884934 2.2004864 ... 1.6713426 2.867838 1.6713426 ] [2.3659394 2.2191494 2.3659394 ... 1.9126419 2.4128563 1.9126419 ] ... [2.2255623 2.6391199 2.2255623 ... 1.958505 1.6688564 1.958505 ] [2.0759919 2.1604426 2.0759919 ... 2.1671746 1.1011919 2.1671746 ] [2.2255623 1.9270153 2.2255623 ... 2.465903 1.6688564 2.465903 ]] ... [[2.4710758 2.5401926 2.4710758 ... 1.6154455 2.589631 1.1215142 ] [1.4992896 1.2758346 2.7470405 ... 1.4503925 1.4590156 2.1218894 ] [2.4710758 2.5401926 2.4710758 ... 1.6154455 2.589631 0.7156303 ] ... [0.9222031 1.6270852 1.1317903 ... 2.0855668 3.0461655 1.5238975 ] [1.0584183 1.9712052 1.0584183 ... 2.545426 1.7153629 2.545426 ] [1.7218888 1.6270852 1.7218888 ... 2.0855668 3.0461655 1.9145429 ]] [[1.3751642 1.5460868 1.7486075 ... 1.4192468 1.1730409 1.895292 ] [2.7778392 1.9023271 2.7778392 ... 2.1273835 2.2391558 2.1273835 ] [1.4501122 1.3494959 1.7486075 ... 1.7245077 1.732866 1.895292 ] ... [2.5785007 2.9100556 2.5785007 ... 1.58457 3.0667105 2.0877485 ] [2.5055668 1.9315652 1.788296 ... 1.7727283 2.2133 1.8831488 ] [1.7171725 2.2241552 1.7171725 ... 1.58457 3.0667105 2.0877485 ]] [[2.4710758 2.124403 2.4710758 ... 1.6154455 2.589631 2.0712466 ] [2.0694182 1.727619 2.7940366 ... 1.4642833 1.4590156 2.1218894 ] [2.4710758 2.4788933 2.4710758 ... 1.6154455 2.589631 2.0712466 ] ... [1.5025893 2.3489265 1.5025893 ... 3.1252515 2.647598 3.1252515 ] [1.2296298 1.9712052 1.0584183 ... 2.0218554 1.7153629 2.0218554 ] [1.7218888 2.3489265 1.7218888 ... 3.1252515 2.647598 3.1252515 ]]]]]; ov_res: [[[[[1.7284085 3.0102568 1.3647069 ... 1.8864815 1.7766691 1.8862344 ] [2.1847239 2.326045 2.1847239 ... 1.6627657 2.5725737 1.6627657 ] [1.7284085 3.0102568 1.6204892 ... 1.864867 1.7766691 1.8862344 ] ... [2.2726831 2.5343108 2.2726831 ... 2.3105118 2.298407 2.5712574 ] [1.9578084 2.1550765 1.9578084 ... 2.2785199 1.4420525 2.2785199 ] [2.2726831 1.7312464 2.2726831 ... 2.3105118 2.298407 1.6194203 ]] [[1.475829 1.9548979 2.5374615 ... 1.6045821 2.1157827 1.6291432 ] [1.8816084 1.5675554 1.8905622 ... 1.8874279 1.1298672 1.8874279 ] [1.994505 1.9548979 1.994505 ... 1.2720219 2.1229334 1.6291432 ] ... [1.4025215 1.5465308 1.4025215 ... 2.6853 2.435124 2.6853 ] [1.4608492 1.8836247 1.4608492 ... 2.6758795 1.2703451 2.6758795 ] [1.1757737 1.5465308 1.2288376 ... 2.6853 2.435124 2.6853 ]] [[2.0015452 3.0102568 2.658712 ... 1.8864815 2.4891758 1.8862344 ] [2.0693977 1.8287406 1.3704624 ... 2.3340878 2.0773854 2.173214 ] [2.0015452 3.0102568 2.658712 ... 1.864867 1.7766691 1.8862344 ] ... [2.2726831 2.5343108 2.5174375 ... 1.5820625 2.298407 2.5712574 ] [1.9131762 2.039718 1.5974458 ... 2.046683 1.3264468 1.6363186 ] [2.2726831 1.7312464 2.5174375 ... 1.5820625 2.298407 1.5820625 ]] ... [[1.7475597 1.1417878 1.9375358 ... 1.4700547 1.9572189 2.2137215 ] [1.6602386 2.0921752 1.5218976 ... 1.9419206 2.5776312 2.88169 ] [1.9087799 1.1417878 1.9375358 ... 1.1151567 1.9572189 2.2137215 ] ... [1.5206052 1.3718121 1.5206052 ... 1.5834441 1.1627599 1.448204 ] [1.789059 1.4623146 1.789059 ... 2.1938891 2.1672094 1.4290366 ] [0.6369474 2.264156 0.8457408 ... 1.5066662 1.7894117 1.5066662 ]] [[1.6348758 1.9307694 1.9257041 ... 1.3710707 2.0516784 1.3710707 ] [1.9813535 2.556154 1.3116934 ... 1.44319 3.330823 1.9876744 ] [2.146955 1.9307694 2.146955 ... 1.0681998 2.0516784 1.2106018 ] ... [1.6517531 1.7752692 2.1283684 ... 1.5779585 2.7261944 1.6819326 ] [2.0533936 1.5128189 2.0533936 ... 2.5026684 2.013701 2.5026684 ] [2.4940987 1.7752692 2.5804105 ... 1.5779585 2.7261944 1.6819326 ]] [[1.7107675 0.992633 1.7107675 ... 1.4700547 1.9700085 2.2137215 ] [1.752846 2.0921752 1.752846 ... 1.9419206 2.5776312 1.9419206 ] [1.7107675 0.992633 1.7107675 ... 1.1151567 1.7112951 2.2137215 ] ... [2.2714508 1.3921413 2.2714508 ... 1.9966704 2.1790538 1.448204 ] [1.789059 1.4623146 2.1003335 ... 2.1938891 2.1672094 1.4290366 ] [2.0651433 2.264156 2.0651433 ... 1.9966704 2.1790538 1.448204 ]]] [[[2.4134197 1.8294723 0.75319344 ... 2.3001988 1.8464983 2.3001988 ] [2.0142555 1.7116238 1.2420373 ... 1.497748 1.5592954 1.2839905 ] [3.5027692 2.437038 3.5027692 ... 2.3001988 1.8464983 2.3001988 ] ... [1.9805695 2.256199 1.9805695 ... 1.0590887 1.9113852 1.5098802 ] [1.8113854 1.9063356 1.8113854 ... 2.7684457 1.5929879 2.7684457 ] [1.9805695 2.0961778 1.9805695 ... 0.96041167 2.1597774 0.88863736]] [[1.6502749 1.7497556 1.8482987 ... 1.9226046 1.685759 1.5201235 ] [2.573559 2.727987 1.4497701 ... 1.8519133 1.5596497 1.8519133 ] [2.4653833 1.7497556 2.4653833 ... 2.1889105 1.6025649 2.1889105 ] ... [1.3955468 1.7955768 1.3955468 ... 2.8055835 2.792937 2.8055835 ] [1.5966836 1.8017889 1.8793156 ... 1.2196273 1.8927697 1.1905024 ] [1.3955468 1.6209375 1.3955468 ... 2.8055835 2.792937 2.8055835 ]] [[2.4632056 1.8294723 0.7629416 ... 2.3001988 2.1868265 2.3001988 ] [2.0142555 2.00751 3.3319075 ... 1.7670835 1.9917991 1.7670835 ] [2.4632056 2.3942225 1.9262829 ... 2.3001988 2.1868265 2.3001988 ] ... [2.7325094 1.3740577 2.7325094 ... 2.1094644 1.6506805 2.1094644 ] [1.5246261 1.0806661 1.5246261 ... 1.8863237 2.082226 1.9916564 ] [2.1873028 1.3740577 1.9805695 ... 2.1094644 1.6506805 2.1094644 ]] ... [[1.2332871 2.9503942 1.2332871 ... 1.4597471 1.4063833 1.4597471 ] [2.2572722 3.574878 2.2572722 ... 2.718701 1.8959752 2.718701 ] [1.8572706 2.2118576 1.8572706 ... 1.4597471 1.4063833 1.9478784 ] ... [1.4243948 1.7152231 1.8342323 ... 2.2662275 2.5051384 2.2662275 ] [1.34427 2.7900798 1.3819877 ... 1.1656218 2.2636733 1.6847152 ] [1.4243948 1.598678 1.7953645 ... 2.2662275 2.5051384 2.2662275 ]] [[2.4147131 1.7706904 2.3497536 ... 2.2730663 1.4401491 1.2978915 ] [2.243675 1.631051 2.243675 ... 2.4162254 3.1947203 2.4162254 ] [1.6059129 2.5483856 2.18233 ... 3.4922712 1.3227168 3.4922712 ] ... [1.1137369 2.3662982 1.5972455 ... 3.0581267 1.7707552 3.0581267 ] [2.1436064 1.7176008 2.1436064 ... 2.3183956 1.9389131 2.3183956 ] [2.5069258 1.7808682 1.5972455 ... 3.0581267 2.3494694 3.0581267 ]] [[1.2332871 2.9503942 1.2332871 ... 1.7027323 1.4063833 1.7027323 ] [1.2299587 1.9729729 2.4326808 ... 1.4387778 2.070953 2.5594358 ] [1.8572706 2.2118576 1.8572706 ... 1.7027323 1.4063833 2.5300243 ] ... [1.4243948 2.1974428 1.8342323 ... 2.2662275 2.061635 2.2662275 ] [1.34427 2.7900798 1.7050037 ... 1.153069 2.2636733 1.2151383 ] [1.4243948 2.1974428 1.7953645 ... 2.2662275 2.061635 2.2662275 ]]] [[[2.3659394 1.1910161 2.3659394 ... 1.6278168 2.0732162 1.6002684 ] [2.202128 2.3441103 1.8599492 ... 1.3700838 2.156424 1.1530361 ] [2.3659394 2.1762307 2.3659394 ... 1.9126419 1.6192038 1.9126419 ] ... [2.2255623 1.9270153 2.2255623 ... 1.958505 1.6688564 1.958505 ] [2.0759919 1.8888915 2.0759919 ... 2.1671746 1.1011919 2.1671746 ] [2.2255623 2.3819187 2.2255623 ... 2.465903 1.6688564 2.465903 ]] [[1.9567103 2.1179976 1.7023393 ... 1.8479279 1.9986298 1.8479279 ] [3.6936617 2.708187 3.6936617 ... 1.1781572 1.2876971 1.3943981 ] [1.9567103 2.1179976 1.8249099 ... 1.3973783 1.9986298 1.3046248 ] ... [1.8207772 1.9927822 1.8207772 ... 2.995125 1.6231291 2.995125 ] [1.6001635 2.4744763 1.4882137 ... 3.5128586 2.4316058 3.5128586 ] [1.8533131 1.9927822 1.8207772 ... 2.995125 2.1342301 2.995125 ]] [[2.3659394 2.2191494 2.3659394 ... 1.6278168 2.3410292 1.6002684 ] [2.202128 1.884934 2.2004864 ... 1.6713426 2.867838 1.6713426 ] [2.3659394 2.2191494 2.3659394 ... 1.9126419 2.4128563 1.9126419 ] ... [2.2255623 2.6391199 2.2255623 ... 1.958505 1.6688564 1.958505 ] [2.0759919 2.1604426 2.0759919 ... 2.1671746 1.1011919 2.1671746 ] [2.2255623 1.9270153 2.2255623 ... 2.465903 1.6688564 2.465903 ]] ... [[2.4710758 2.5401926 2.4710758 ... 1.6154455 2.589631 1.1215142 ] [1.4992896 1.2758346 2.7470405 ... 1.4503925 1.4590156 2.1218894 ] [2.4710758 2.5401926 2.4710758 ... 1.6154455 2.589631 0.7156303 ] ... [0.9222031 1.6270852 1.1317903 ... 2.0855668 3.0461655 1.5238975 ] [1.0584183 1.9712052 1.0584183 ... 2.545426 1.7153629 2.545426 ] [1.7218888 1.6270852 1.7218888 ... 2.0855668 3.0461655 1.9145429 ]] [[1.3751642 1.5460868 1.7486075 ... 1.4192468 1.1730409 1.895292 ] [2.7778392 1.9023271 2.7778392 ... 2.1273835 2.2391558 2.1273835 ] [1.4501122 1.3494959 1.7486075 ... 1.7245077 1.732866 1.895292 ] ... [2.5785007 2.9100556 2.5785007 ... 1.58457 3.0667105 2.0877485 ] [2.5055668 1.9315652 1.788296 ... 1.7727283 2.2133 1.8831488 ] [1.7171725 2.2241552 1.7171725 ... 1.58457 3.0667105 2.0877485 ]] [[2.4710758 2.124403 2.4710758 ... 1.6154455 2.589631 2.0712466 ] [2.0694182 1.727619 2.7940366 ... 1.4642833 1.4590156 2.1218894 ] [2.4710758 2.4788933 2.4710758 ... 1.6154455 2.589631 2.0712466 ] ... [1.5025893 2.3489265 1.5025893 ... 3.1252515 2.647598 3.1252515 ] [1.2296298 1.9712052 1.0584183 ... 2.0218554 1.7153629 2.0218554 ] [1.7218888 2.3489265 1.7218888 ... 3.1252515 2.647598 3.1252515 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5874.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.stride : int[] = prim::Constant[value=[1, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.stride, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[0.5234207 1.8535751 0.7533365 ... 1.6833907 1.9801314 1.4047593 ] [1.9898765 1.4171898 1.9898765 ... 1.4735435 1.6215926 0.7238493 ] [0.5234207 1.8535751 1.3712595 ... 1.6833907 1.9801314 1.4047593 ] ... [2.0281098 2.0682776 2.0281098 ... 1.941467 1.5993321 1.941467 ] [0.28086695 2.206942 0.8716314 ... 1.7893875 1.5846908 0.83838314] [2.0281098 2.0682776 2.0281098 ... 0.84205073 1.5993321 0.29108924]] [[2.245824 1.218088 2.245824 ... 2.2232454 1.2487109 2.2232454 ] [1.4195919 2.3540165 1.4195919 ... 2.303518 2.6081972 1.8883876 ] [2.245824 2.0612757 2.245824 ... 2.2232454 1.2487109 2.2232454 ] ... [1.6704662 1.8682903 1.6704662 ... 1.9986979 1.6793303 1.9986979 ] [2.1063552 0.86465347 2.1063552 ... 1.7697357 1.3212062 1.7697357 ] [1.6704662 1.5317695 1.6704662 ... 1.9986979 1.6793303 1.9986979 ]] [[0.77312243 1.8535751 0.77312243 ... 1.6833907 1.9801314 1.4047593 ] [1.9898765 1.4171898 1.9898765 ... 1.4735435 1.6215926 1.162163 ] [0.77312243 1.8535751 1.3712595 ... 2.0384486 1.9801314 1.4047593 ] ... [2.0281098 2.0682776 2.0281098 ... 1.941467 1.7250648 1.941467 ] [2.333233 2.206942 2.333233 ... 1.7893875 1.5846908 0.83838314] [2.0281098 2.0682776 2.0281098 ... 1.8155228 1.5993321 0.84045756]] ... [[1.9271566 1.8731192 1.9271566 ... 1.5014911 1.6606495 1.4409438 ] [1.6480281 2.0604606 1.6480281 ... 2.2741807 2.5841715 1.2887255 ] [1.9271566 2.3251398 1.9271566 ... 1.5014911 2.1738365 1.4409438 ] ... [1.5259461 1.7353851 1.8634455 ... 2.061658 2.3127618 2.061658 ] [1.8718796 1.5808388 1.8718796 ... 1.5815523 2.8454254 1.5815523 ] [0.9824119 1.7353851 1.8634455 ... 1.772422 2.3127618 1.772422 ]] [[2.1200364 3.0086539 2.1200364 ... 1.3667871 2.3691275 1.3667871 ] [2.1555674 2.6668646 2.1555674 ... 1.7340739 2.255212 1.7340739 ] [2.1200364 3.0086539 2.1200364 ... 1.3667871 2.3691275 1.3667871 ] ... [1.138973 2.078901 1.345083 ... 1.7715061 2.7620995 1.2636267 ] [0.90815896 2.3129764 1.532569 ... 1.475255 2.1242192 1.475255 ] [0.9270168 2.078901 1.345083 ... 1.4631152 1.1976818 1.2636267 ]] [[1.9271566 1.8731192 1.9271566 ... 1.5014911 0.54242134 1.4409438 ] [1.6480281 2.0604606 1.6480281 ... 1.2887255 2.5841715 1.2887255 ] [1.9271566 1.8731192 1.9271566 ... 1.5014911 2.1738365 1.4409438 ] ... [1.5259461 1.6544424 1.8634455 ... 1.6468283 1.8641893 1.6468283 ] [1.8718796 1.5808388 1.8718796 ... 1.5529406 1.4420004 1.5529406 ] [0.9824119 1.6544424 1.8634455 ... 1.6468283 0.81528777 1.6468283 ]]] [[[0.6880803 1.5722506 0.6880803 ... 2.3788257 1.8349289 2.3788257 ] [1.8510121 1.6062884 1.8510121 ... 1.1084244 1.564898 1.1084244 ] [1.3355907 1.5722506 1.3355907 ... 2.3788257 1.8349289 2.3788257 ] ... [1.2671269 1.8141263 3.0777357 ... 1.4076362 1.1889254 1.4076362 ] [1.1810179 2.4871924 3.103184 ... 2.7086287 2.2751555 2.4503152 ] [1.2671269 1.1510129 1.5725199 ... 1.4076362 1.1610684 1.4076362 ]] [[1.7680503 1.8586931 1.7680503 ... 1.6904118 1.3233757 1.6904118 ] [1.9476272 1.3189334 2.171243 ... 2.7291417 1.0990852 2.7291417 ] [1.7680503 1.8586931 1.7680503 ... 1.6904118 1.7653031 1.6904118 ] ... [1.1729786 3.0158823 1.4054755 ... 2.8747869 1.6940874 2.8747869 ] [2.1047857 2.5864418 2.1047857 ... 2.2561646 1.4822528 2.2561646 ] [0.5588429 2.2015686 1.4054755 ... 2.8747869 1.6940874 2.8747869 ]] [[2.0705664 1.7803373 2.0705664 ... 2.3788257 1.8349289 2.3788257 ] [1.8510121 2.3833272 2.3372815 ... 1.391277 1.564898 1.391277 ] [2.0705664 1.7803373 2.0705664 ... 2.3788257 1.8349289 2.3788257 ] ... [1.2671269 2.3487904 3.0777357 ... 1.4076362 1.655316 1.4076362 ] [1.3666893 2.4871924 3.103184 ... 2.7086287 2.2751555 2.4503152 ] [1.2671269 2.3487904 1.5725199 ... 1.4076362 1.655316 1.4076362 ]] ... [[3.6652594 2.0948527 3.6652594 ... 2.4337156 1.8010828 2.4337156 ] [1.8775084 2.3987248 1.8896024 ... 1.5880605 1.1548176 1.5880605 ] [3.6652594 2.3754761 3.6652594 ... 2.4337156 1.8010828 2.4337156 ] ... [2.0718775 2.6441808 2.0718775 ... 1.7238784 3.1827824 1.7238784 ] [1.5856545 3.6151597 1.5856545 ... 2.2439353 1.7771127 2.2439353 ] [2.0718775 2.6441808 2.0718775 ... 1.7238784 3.1827824 1.7238784 ]] [[1.6825519 2.1796088 1.6825519 ... 1.6663815 0.99234766 1.6663815 ] [1.423917 2.1421194 1.423917 ... 2.1872134 1.3425065 1.2836459 ] [1.6825519 2.1796088 1.6825519 ... 1.6663815 0.99591726 1.6663815 ] ... [1.5607712 2.2315934 1.9796216 ... 2.028909 3.1421483 2.028909 ] [1.8028833 2.180049 1.8028833 ... 2.3364973 1.584781 2.3364973 ] [1.2026865 2.2315934 1.9796216 ... 2.028909 1.369878 2.028909 ]] [[3.1207154 2.0948527 3.1207154 ... 2.4337156 1.2643238 2.4337156 ] [1.8775084 2.3987248 1.8775084 ... 1.5880605 1.1548176 1.5880605 ] [3.1207154 2.3754761 3.1207154 ... 2.4337156 1.2643238 2.4337156 ] ... [2.0718775 1.8677502 2.0718775 ... 1.2189804 3.1827824 1.2189804 ] [1.0083253 3.6151597 1.5428476 ... 2.2439353 1.2272227 2.2439353 ] [2.0718775 1.4526539 2.0718775 ... 1.1677129 3.1827824 0.53674585]]] [[[2.4444323 1.1968006 2.4444323 ... 1.9591644 1.7368612 1.9591644 ] [2.021747 0.91412544 2.021747 ... 1.2184802 1.0017205 1.2184802 ] [2.4444323 1.5700295 2.4444323 ... 1.9591644 1.7368612 1.9591644 ] ... [1.913682 2.3422353 1.913682 ... 1.4197407 1.4413428 1.4027131 ] [1.5243446 2.0402224 1.5243446 ... 1.2703581 2.2261896 1.2703581 ] [1.913682 2.3422353 1.913682 ... 1.4197407 1.4413428 0.49202225]] [[2.370068 1.9431977 2.370068 ... 1.6374546 1.7008834 1.6374546 ] [1.8253784 2.0786445 1.8253784 ... 1.211737 2.26278 1.211737 ] [2.370068 2.1106498 2.370068 ... 1.6374546 2.1400707 1.6374546 ] ... [1.8412219 1.7100191 1.8412219 ... 2.4549773 2.4105642 2.2663753 ] [1.5657704 1.5119584 2.2264273 ... 1.9186592 1.4979054 1.9186592 ] [1.4422816 1.7100191 1.4930811 ... 2.4549773 2.4105642 1.5678334 ]] [[2.4647326 1.1968006 2.4647326 ... 1.9591644 1.7368612 1.9591644 ] [2.021747 1.5287316 2.021747 ... 1.8984578 1.0017205 1.8984578 ] [2.4647326 2.123814 2.4647326 ... 1.9591644 1.7368612 1.9591644 ] ... [1.913682 2.3422353 1.913682 ... 1.6235067 1.5990615 1.6235067 ] [1.5243446 2.0402224 1.5243446 ... 1.4735047 2.2261896 1.4735047 ] [1.913682 2.3422353 1.913682 ... 1.4197407 1.472351 0.810759 ]] ... [[1.8638434 1.2044545 1.9339203 ... 1.472447 2.3360317 1.472447 ] [1.8151132 2.2113347 1.8151132 ... 2.103637 1.9410135 2.103637 ] [1.8638434 2.6622334 1.9339203 ... 1.472447 2.3360317 1.472447 ] ... [1.326771 1.6561402 1.7196304 ... 2.1798978 1.9524742 1.4781865 ] [2.4858136 1.8531878 2.4858136 ... 2.7743244 1.44542 2.2767086 ] [1.326771 1.6561402 1.7196304 ... 2.1798978 1.9524742 0.95794654]] [[1.2343683 0.97904366 1.2343683 ... 1.5224085 1.6100053 1.5224085 ] [3.4169612 2.5595183 3.4169612 ... 1.660655 2.5900805 1.660655 ] [1.2353919 1.0639262 1.2353919 ... 2.702392 1.6818149 1.5224085 ] ... [1.9745395 2.2148252 2.003483 ... 2.221621 1.2013706 1.9890493 ] [1.8584396 1.9075423 1.8584396 ... 2.2882907 1.3994687 1.1014506 ] [1.9745395 2.2148252 2.003483 ... 2.221621 1.0356296 1.7339814 ]] [[1.8638434 1.2044545 1.9339203 ... 1.472447 2.3360317 1.472447 ] [1.0995208 2.2113347 1.0995208 ... 2.103637 1.9410135 2.103637 ] [1.8638434 2.6622334 1.9339203 ... 1.472447 2.3360317 1.472447 ] ... [1.326771 1.6561402 1.3380908 ... 2.1798978 1.0190413 1.0628896 ] [2.4858136 0.8988293 2.4858136 ... 2.7743244 1.2928132 2.2767086 ] [1.326771 1.6561402 1.326771 ... 2.1798978 0.75954777 0.95794654]]]]]; ov_res: [[[[[0.5234207 1.8535751 0.7533365 ... 1.6833907 1.9801314 1.4047593 ] [1.9898765 1.4171898 1.9898765 ... 1.4735435 1.6215926 0.7238493 ] [0.5234207 1.8535751 1.3712595 ... 1.6833907 1.9801314 1.4047593 ] ... [2.0281098 2.0682776 2.0281098 ... 1.941467 1.5993321 1.941467 ] [0.28086695 2.206942 0.8716314 ... 1.7893875 1.5846908 0.83838314] [2.0281098 2.0682776 2.0281098 ... 0.84205073 1.5993321 0.29108924]] [[2.245824 1.218088 2.245824 ... 2.2232454 1.2487109 2.2232454 ] [1.4195919 2.3540165 1.4195919 ... 2.303518 2.6081972 1.8883876 ] [2.245824 2.0612757 2.245824 ... 2.2232454 1.2487109 2.2232454 ] ... [1.6704662 1.8682903 1.6704662 ... 1.9986979 1.6793303 1.9986979 ] [2.1063552 0.86465347 2.1063552 ... 1.7697357 1.3212062 1.7697357 ] [1.6704662 1.5317695 1.6704662 ... 1.9986979 1.6793303 1.9986979 ]] [[0.77312243 1.8535751 0.77312243 ... 1.6833907 1.9801314 1.4047593 ] [1.9898765 1.4171898 1.9898765 ... 1.4735435 1.6215926 1.162163 ] [0.77312243 1.8535751 1.3712595 ... 2.0384486 1.9801314 1.4047593 ] ... [2.0281098 2.0682776 2.0281098 ... 1.941467 1.7250648 1.941467 ] [2.333233 2.206942 2.333233 ... 1.7893875 1.5846908 0.83838314] [2.0281098 2.0682776 2.0281098 ... 1.8155228 1.5993321 0.84045756]] ... [[1.9271566 1.8731192 1.9271566 ... 1.5014911 1.6606495 1.4409438 ] [1.6480281 2.0604606 1.6480281 ... 2.2741807 2.5841715 1.2887255 ] [1.9271566 2.3251398 1.9271566 ... 1.5014911 2.1738365 1.4409438 ] ... [1.5259461 1.7353851 1.8634455 ... 2.061658 2.3127618 2.061658 ] [1.8718796 1.5808388 1.8718796 ... 1.5815523 2.8454254 1.5815523 ] [0.9824119 1.7353851 1.8634455 ... 1.772422 2.3127618 1.772422 ]] [[2.1200364 3.0086539 2.1200364 ... 1.3667871 2.3691275 1.3667871 ] [2.1555674 2.6668646 2.1555674 ... 1.7340739 2.255212 1.7340739 ] [2.1200364 3.0086539 2.1200364 ... 1.3667871 2.3691275 1.3667871 ] ... [1.138973 2.078901 1.345083 ... 1.7715061 2.7620995 1.2636267 ] [0.90815896 2.3129764 1.532569 ... 1.475255 2.1242192 1.475255 ] [0.9270168 2.078901 1.345083 ... 1.4631152 1.1976818 1.2636267 ]] [[1.9271566 1.8731192 1.9271566 ... 1.5014911 0.54242134 1.4409438 ] [1.6480281 2.0604606 1.6480281 ... 1.2887255 2.5841715 1.2887255 ] [1.9271566 1.8731192 1.9271566 ... 1.5014911 2.1738365 1.4409438 ] ... [1.5259461 1.6544424 1.8634455 ... 1.6468283 1.8641893 1.6468283 ] [1.8718796 1.5808388 1.8718796 ... 1.5529406 1.4420004 1.5529406 ] [0.9824119 1.6544424 1.8634455 ... 1.6468283 0.81528777 1.6468283 ]]] [[[0.6880803 1.5722506 0.6880803 ... 2.3788257 1.8349289 2.3788257 ] [1.8510121 1.6062884 1.8510121 ... 1.1084244 1.564898 1.1084244 ] [1.3355907 1.5722506 1.3355907 ... 2.3788257 1.8349289 2.3788257 ] ... [1.2671269 1.8141263 3.0777357 ... 1.4076362 1.1889254 1.4076362 ] [1.1810179 2.4871924 3.103184 ... 2.7086287 2.2751555 2.4503152 ] [1.2671269 1.1510129 1.5725199 ... 1.4076362 1.1610684 1.4076362 ]] [[1.7680503 1.8586931 1.7680503 ... 1.6904118 1.3233757 1.6904118 ] [1.9476272 1.3189334 2.171243 ... 2.7291417 1.0990852 2.7291417 ] [1.7680503 1.8586931 1.7680503 ... 1.6904118 1.7653031 1.6904118 ] ... [1.1729786 3.0158823 1.4054755 ... 2.8747869 1.6940874 2.8747869 ] [2.1047857 2.5864418 2.1047857 ... 2.2561646 1.4822528 2.2561646 ] [0.5588429 2.2015686 1.4054755 ... 2.8747869 1.6940874 2.8747869 ]] [[2.0705664 1.7803373 2.0705664 ... 2.3788257 1.8349289 2.3788257 ] [1.8510121 2.3833272 2.3372815 ... 1.391277 1.564898 1.391277 ] [2.0705664 1.7803373 2.0705664 ... 2.3788257 1.8349289 2.3788257 ] ... [1.2671269 2.3487904 3.0777357 ... 1.4076362 1.655316 1.4076362 ] [1.3666893 2.4871924 3.103184 ... 2.7086287 2.2751555 2.4503152 ] [1.2671269 2.3487904 1.5725199 ... 1.4076362 1.655316 1.4076362 ]] ... [[3.6652594 2.0948527 3.6652594 ... 2.4337156 1.8010828 2.4337156 ] [1.8775084 2.3987248 1.8896024 ... 1.5880605 1.1548176 1.5880605 ] [3.6652594 2.3754761 3.6652594 ... 2.4337156 1.8010828 2.4337156 ] ... [2.0718775 2.6441808 2.0718775 ... 1.7238784 3.1827824 1.7238784 ] [1.5856545 3.6151597 1.5856545 ... 2.2439353 1.7771127 2.2439353 ] [2.0718775 2.6441808 2.0718775 ... 1.7238784 3.1827824 1.7238784 ]] [[1.6825519 2.1796088 1.6825519 ... 1.6663815 0.99234766 1.6663815 ] [1.423917 2.1421194 1.423917 ... 2.1872134 1.3425065 1.2836459 ] [1.6825519 2.1796088 1.6825519 ... 1.6663815 0.99591726 1.6663815 ] ... [1.5607712 2.2315934 1.9796216 ... 2.028909 3.1421483 2.028909 ] [1.8028833 2.180049 1.8028833 ... 2.3364973 1.584781 2.3364973 ] [1.2026865 2.2315934 1.9796216 ... 2.028909 1.369878 2.028909 ]] [[3.1207154 2.0948527 3.1207154 ... 2.4337156 1.2643238 2.4337156 ] [1.8775084 2.3987248 1.8775084 ... 1.5880605 1.1548176 1.5880605 ] [3.1207154 2.3754761 3.1207154 ... 2.4337156 1.2643238 2.4337156 ] ... [2.0718775 1.8677502 2.0718775 ... 1.2189804 3.1827824 1.2189804 ] [1.0083253 3.6151597 1.5428476 ... 2.2439353 1.2272227 2.2439353 ] [2.0718775 1.4526539 2.0718775 ... 1.1677129 3.1827824 0.53674585]]] [[[2.4444323 1.1968006 2.4444323 ... 1.9591644 1.7368612 1.9591644 ] [2.021747 0.91412544 2.021747 ... 1.2184802 1.0017205 1.2184802 ] [2.4444323 1.5700295 2.4444323 ... 1.9591644 1.7368612 1.9591644 ] ... [1.913682 2.3422353 1.913682 ... 1.4197407 1.4413428 1.4027131 ] [1.5243446 2.0402224 1.5243446 ... 1.2703581 2.2261896 1.2703581 ] [1.913682 2.3422353 1.913682 ... 1.4197407 1.4413428 0.49202225]] [[2.370068 1.9431977 2.370068 ... 1.6374546 1.7008834 1.6374546 ] [1.8253784 2.0786445 1.8253784 ... 1.211737 2.26278 1.211737 ] [2.370068 2.1106498 2.370068 ... 1.6374546 2.1400707 1.6374546 ] ... [1.8412219 1.7100191 1.8412219 ... 2.4549773 2.4105642 2.2663753 ] [1.5657704 1.5119584 2.2264273 ... 1.9186592 1.4979054 1.9186592 ] [1.4422816 1.7100191 1.4930811 ... 2.4549773 2.4105642 1.5678334 ]] [[2.4647326 1.1968006 2.4647326 ... 1.9591644 1.7368612 1.9591644 ] [2.021747 1.5287316 2.021747 ... 1.8984578 1.0017205 1.8984578 ] [2.4647326 2.123814 2.4647326 ... 1.9591644 1.7368612 1.9591644 ] ... [1.913682 2.3422353 1.913682 ... 1.6235067 1.5990615 1.6235067 ] [1.5243446 2.0402224 1.5243446 ... 1.4735047 2.2261896 1.4735047 ] [1.913682 2.3422353 1.913682 ... 1.4197407 1.472351 0.810759 ]] ... [[1.8638434 1.2044545 1.9339203 ... 1.472447 2.3360317 1.472447 ] [1.8151132 2.2113347 1.8151132 ... 2.103637 1.9410135 2.103637 ] [1.8638434 2.6622334 1.9339203 ... 1.472447 2.3360317 1.472447 ] ... [1.326771 1.6561402 1.7196304 ... 2.1798978 1.9524742 1.4781865 ] [2.4858136 1.8531878 2.4858136 ... 2.7743244 1.44542 2.2767086 ] [1.326771 1.6561402 1.7196304 ... 2.1798978 1.9524742 0.95794654]] [[1.2343683 0.97904366 1.2343683 ... 1.5224085 1.6100053 1.5224085 ] [3.4169612 2.5595183 3.4169612 ... 1.660655 2.5900805 1.660655 ] [1.2353919 1.0639262 1.2353919 ... 2.702392 1.6818149 1.5224085 ] ... [1.9745395 2.2148252 2.003483 ... 2.221621 1.2013706 1.9890493 ] [1.8584396 1.9075423 1.8584396 ... 2.2882907 1.3994687 1.1014506 ] [1.9745395 2.2148252 2.003483 ... 2.221621 1.0356296 1.7339814 ]] [[1.8638434 1.2044545 1.9339203 ... 1.472447 2.3360317 1.472447 ] [1.0995208 2.2113347 1.0995208 ... 2.103637 1.9410135 2.103637 ] [1.8638434 2.6622334 1.9339203 ... 1.472447 2.3360317 1.472447 ] ... [1.326771 1.6561402 1.3380908 ... 2.1798978 1.0190413 1.0628896 ] [2.4858136 0.8988293 2.4858136 ... 2.7743244 1.2928132 2.2767086 ] [1.326771 1.6561402 1.326771 ... 2.1798978 0.75954777 0.95794654]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5876.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[2.3172913 1.5387564 2.2957783 1.377722 0.9362996 ] [1.5221401 1.8716934 2.1583161 3.0210767 2.412626 ] [1.4759452 2.043106 1.8754599 0.9556307 3.2763412 ] [1.7202073 1.6945412 1.34741 1.8844953 1.305255 ] [1.3360623 1.5347637 1.358302 1.9138827 1.3846328 ]] [[1.4360154 2.0315657 3.6216063 1.7972099 1.8015755 ] [1.8351122 2.7957947 1.3988353 1.8926876 1.5523466 ] [2.9406488 1.5389096 1.1050656 1.8653855 2.0013125 ] [2.6345756 2.9135742 2.2565732 2.3238978 2.3358083 ] [1.8261188 1.783057 1.7858598 1.4835385 0.9250446 ]] [[2.3503375 1.6555952 2.5240362 1.3821976 1.8042614 ] [2.1459787 2.18598 2.103778 1.5885949 1.1599048 ] [1.656238 1.1724749 1.937752 1.5906334 1.5637653 ] [2.140944 2.7707675 1.9926692 2.4753664 1.7853004 ] [1.6981119 1.4847533 1.3899586 1.8201689 1.5209337 ]] [[2.45439 2.2871902 1.8654078 2.1442854 1.4567766 ] [1.7584603 1.3445104 2.3012726 2.7768047 1.472977 ] [1.9157528 1.5905939 2.0104394 2.1435893 1.9719996 ] [1.7804891 1.6024094 0.9816538 1.7975043 1.6838434 ] [0.8523517 1.2662826 1.0940928 1.5470282 0.4361084 ]] [[1.5424392 1.3239928 1.2019124 1.9434564 1.5861261 ] [2.6698773 1.7219448 2.044907 1.44337 1.2022792 ] [1.4740946 2.0100172 3.4834895 1.4402361 0.97751504] [2.9928532 2.2025 1.9287932 1.9373367 2.2557993 ] [1.9188821 1.5145149 1.6282471 3.06434 0.9659569 ]]] [[[1.6348127 1.9535588 2.602129 2.9139848 1.7889974 ] [1.1729106 2.666621 1.4684873 1.5825212 2.8014467 ] [2.0557196 2.491579 1.982235 1.7060896 1.9352612 ] [2.1165233 1.5760386 2.7419436 3.0591946 3.1887565 ] [1.353198 1.7627416 1.7856102 2.660328 1.0689973 ]] [[2.314067 2.320385 2.5103316 1.9654144 2.5602007 ] [1.6447064 2.2053292 2.1177313 1.6624559 1.1608617 ] [3.0840952 1.893706 2.3714004 3.219379 2.1897528 ] [2.0791376 2.1159654 2.0775259 1.1527916 1.736015 ] [1.6996526 1.5228732 3.2559657 2.1943157 2.3453152 ]] [[1.8888224 2.2735445 1.6804507 1.860795 1.8152589 ] [2.2103236 1.7704939 1.9129231 1.7869601 2.1665876 ] [2.461167 0.8494885 1.4631817 1.4776312 1.3609998 ] [2.1279213 1.6828141 1.9378933 2.4581618 2.1710665 ] [1.9037064 2.2357442 2.2419286 1.3586702 1.3868014 ]] [[1.9237998 1.986009 1.1839877 1.2383374 1.552654 ] [1.7728302 2.103808 1.1770607 1.7903292 0.9250128 ] [1.3448446 2.8608434 1.6417428 1.8386756 1.7137182 ] [1.2962693 1.6946294 1.4824905 2.4125025 1.686106 ] [1.5062642 1.6206309 1.6582353 1.3970029 1.7271377 ]] [[1.771726 2.3677175 2.3718042 1.134126 1.0323386 ] [1.3927772 1.9446641 1.8521316 2.014905 1.0653017 ] [1.5734059 2.1009662 1.3774548 2.8120508 1.4367601 ] [1.6507481 2.0154843 1.7482817 1.4885839 1.2003177 ] [1.9757208 1.8460418 1.7258142 1.7752181 1.8330275 ]]] [[[2.0907347 2.1766388 2.4747052 3.7689128 2.2251635 ] [1.2452188 1.883883 2.1025248 1.5373985 2.3337896 ] [1.7258949 1.563227 2.010226 1.2593066 1.8709155 ] [2.1871126 1.3183173 1.1970549 1.4448112 1.8187075 ] [2.388986 1.6541114 1.6993515 1.3036054 2.6760085 ]] [[1.8426137 2.2049878 1.737699 2.84218 1.7205757 ] [1.8333573 1.4196931 2.204724 1.6859806 2.3177183 ] [1.9003197 1.8742656 2.3371665 1.1523825 2.1321187 ] [1.3202467 2.0471213 2.5150955 3.3512142 1.9491448 ] [0.9498232 1.9185408 2.2393255 0.5251197 2.572253 ]] [[1.1001116 1.8220168 2.5305073 2.057429 1.6587616 ] [2.661674 1.9155368 2.9088254 3.451688 1.9169469 ] [1.9256148 2.5109136 1.9354533 1.7253978 1.1725119 ] [1.2644172 2.7798955 1.5708909 2.2131755 2.6648722 ] [2.2928576 2.012351 1.403653 3.2590878 0.88975704]] [[1.2684588 2.5435114 1.1489018 2.1395047 1.3845216 ] [1.6500703 2.5049086 1.447782 2.4007134 1.8125126 ] [1.5911838 1.6062455 1.5860952 1.3972229 1.2487875 ] [1.8820686 2.013223 1.5962309 2.0761826 1.4024494 ] [2.1330395 0.7129074 1.9645246 2.3974252 1.6796774 ]] [[2.0440292 1.9821819 1.4057763 1.376222 2.0521908 ] [1.3769517 2.4685175 2.9036336 1.8248945 1.2046216 ] [2.6621842 2.0520241 1.8735394 2.4536223 1.0830488 ] [1.5899353 1.5600365 2.111304 1.4611163 2.361661 ] [2.4572444 1.6188258 1.0994097 2.3875954 0.591926 ]]]]]; ov_res: [[[[[2.3172913 1.5387564 2.2957783 1.377722 0.9362996 ] [1.5221401 1.8716934 2.1583161 3.0210767 2.412626 ] [1.4759452 2.043106 1.8754599 0.9556307 3.2763412 ] [1.7202073 1.6945412 1.34741 1.8844953 1.305255 ] [1.3360623 1.5347637 1.358302 1.9138827 1.3846328 ]] [[1.4360154 2.0315657 3.6216063 1.7972099 1.8015755 ] [1.8351122 2.7957947 1.3988353 1.8926876 1.5523466 ] [2.9406488 1.5389096 1.1050656 1.8653855 2.0013125 ] [2.6345756 2.9135742 2.2565732 2.3238978 2.3358083 ] [1.8261188 1.783057 1.7858598 1.4835385 0.9250446 ]] [[2.3503375 1.6555952 2.5240362 1.3821976 1.8042614 ] [2.1459787 2.18598 2.103778 1.5885949 1.1599048 ] [1.656238 1.1724749 1.937752 1.5906334 1.5637653 ] [2.140944 2.7707675 1.9926692 2.4753664 1.7853004 ] [1.6981119 1.4847533 1.3899586 1.8201689 1.5209337 ]] [[2.45439 2.2871902 1.8654078 2.1442854 1.4567766 ] [1.7584603 1.3445104 2.3012726 2.7768047 1.472977 ] [1.9157528 1.5905939 2.0104394 2.1435893 1.9719996 ] [1.7804891 1.6024094 0.9816538 1.7975043 1.6838434 ] [0.8523517 1.2662826 1.0940928 1.5470282 0.4361084 ]] [[1.5424392 1.3239928 1.2019124 1.9434564 1.5861261 ] [2.6698773 1.7219448 2.044907 1.44337 1.2022792 ] [1.4740946 2.0100172 3.4834895 1.4402361 0.97751504] [2.9928532 2.2025 1.9287932 1.9373367 2.2557993 ] [1.9188821 1.5145149 1.6282471 3.06434 0.9659569 ]]] [[[1.6348127 1.9535588 2.602129 2.9139848 1.7889974 ] [1.1729106 2.666621 1.4684873 1.5825212 2.8014467 ] [2.0557196 2.491579 1.982235 1.7060896 1.9352612 ] [2.1165233 1.5760386 2.7419436 3.0591946 3.1887565 ] [1.353198 1.7627416 1.7856102 2.660328 1.0689973 ]] [[2.314067 2.320385 2.5103316 1.9654144 2.5602007 ] [1.6447064 2.2053292 2.1177313 1.6624559 1.1608617 ] [3.0840952 1.893706 2.3714004 3.219379 2.1897528 ] [2.0791376 2.1159654 2.0775259 1.1527916 1.736015 ] [1.6996526 1.5228732 3.2559657 2.1943157 2.3453152 ]] [[1.8888224 2.2735445 1.6804507 1.860795 1.8152589 ] [2.2103236 1.7704939 1.9129231 1.7869601 2.1665876 ] [2.461167 0.8494885 1.4631817 1.4776312 1.3609998 ] [2.1279213 1.6828141 1.9378933 2.4581618 2.1710665 ] [1.9037064 2.2357442 2.2419286 1.3586702 1.3868014 ]] [[1.9237998 1.986009 1.1839877 1.2383374 1.552654 ] [1.7728302 2.103808 1.1770607 1.7903292 0.9250128 ] [1.3448446 2.8608434 1.6417428 1.8386756 1.7137182 ] [1.2962693 1.6946294 1.4824905 2.4125025 1.686106 ] [1.5062642 1.6206309 1.6582353 1.3970029 1.7271377 ]] [[1.771726 2.3677175 2.3718042 1.134126 1.0323386 ] [1.3927772 1.9446641 1.8521316 2.014905 1.0653017 ] [1.5734059 2.1009662 1.3774548 2.8120508 1.4367601 ] [1.6507481 2.0154843 1.7482817 1.4885839 1.2003177 ] [1.9757208 1.8460418 1.7258142 1.7752181 1.8330275 ]]] [[[2.0907347 2.1766388 2.4747052 3.7689128 2.2251635 ] [1.2452188 1.883883 2.1025248 1.5373985 2.3337896 ] [1.7258949 1.563227 2.010226 1.2593066 1.8709155 ] [2.1871126 1.3183173 1.1970549 1.4448112 1.8187075 ] [2.388986 1.6541114 1.6993515 1.3036054 2.6760085 ]] [[1.8426137 2.2049878 1.737699 2.84218 1.7205757 ] [1.8333573 1.4196931 2.204724 1.6859806 2.3177183 ] [1.9003197 1.8742656 2.3371665 1.1523825 2.1321187 ] [1.3202467 2.0471213 2.5150955 3.3512142 1.9491448 ] [0.9498232 1.9185408 2.2393255 0.5251197 2.572253 ]] [[1.1001116 1.8220168 2.5305073 2.057429 1.6587616 ] [2.661674 1.9155368 2.9088254 3.451688 1.9169469 ] [1.9256148 2.5109136 1.9354533 1.7253978 1.1725119 ] [1.2644172 2.7798955 1.5708909 2.2131755 2.6648722 ] [2.2928576 2.012351 1.403653 3.2590878 0.88975704]] [[1.2684588 2.5435114 1.1489018 2.1395047 1.3845216 ] [1.6500703 2.5049086 1.447782 2.4007134 1.8125126 ] [1.5911838 1.6062455 1.5860952 1.3972229 1.2487875 ] [1.8820686 2.013223 1.5962309 2.0761826 1.4024494 ] [2.1330395 0.7129074 1.9645246 2.3974252 1.6796774 ]] [[2.0440292 1.9821819 1.4057763 1.376222 2.0521908 ] [1.3769517 2.4685175 2.9036336 1.8248945 1.2046216 ] [2.6621842 2.0520241 1.8735394 2.4536223 1.0830488 ] [1.5899353 1.5600365 2.111304 1.4611163 2.361661 ] [2.4572444 1.6188258 1.0994097 2.3875954 0.591926 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:True - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5878.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=1]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.stride : int[] = prim::Constant[value=[3, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[ 0.6944657 0.66624296 0.5972582 ... 1.0153235 1.9351971 0.56413877] [-0.41658643 1.1898597 2.774257 ... 0.56329906 1.9077731 0.6590448 ] [ 1.3004093 1.5070668 0.5972582 ... 1.0153235 1.9351971 0.6401594 ] ... [ 0.9744786 0.5505483 1.1096776 ... 2.2254887 0.7835617 0.45432258] [ 0.7231788 1.7311839 1.1411158 ... 1.1964439 -0.13214956 0.5662196 ] [ 2.3177588 0.7946953 0.8709581 ... 1.5510577 1.3038908 -0.10726982]] [[ 1.0675527 1.4729481 1.6115149 ... 1.8111008 1.3473805 1.1883669 ] [ 0.8320519 0.6195494 1.9545252 ... 1.3013278 1.4451625 1.2072628 ] [ 1.7822396 1.4411348 1.6115149 ... 1.7333655 1.2626067 0.8490151 ] ... [ 0.46980333 1.0935484 1.4143815 ... 0.5548887 2.6344688 0.9509755 ] [ 0.79012513 1.393609 1.3828155 ... 0.56570727 3.445367 1.6745728 ] [ 0.46980333 1.0935484 1.4143815 ... 0.15235603 0.32157663 0.5862467 ]] [[ 0.9937366 1.0639603 1.4282047 ... 2.0967946 1.0484189 1.9134827 ] [ 2.1036317 1.9922191 1.334555 ... 0.7096192 1.1329862 0.8049063 ] [ 1.2528654 1.7329477 3.108508 ... 0.44189727 1.0484189 1.5251293 ] ... [ 2.1586099 1.2151676 1.3535119 ... 0.4496263 0.60559493 0.58718574] [ 0.20457008 1.561419 1.2436033 ... 0.8141383 2.2985034 0.8710628 ] [ 0.95298326 0.36770448 1.3535119 ... 1.5776476 1.0834695 0.7561526 ]] [[ 0.01603474 1.2827351 1.0583551 ... 0.67970383 0.20835984 1.4311143 ] [ 0.739088 0.93169457 1.0554705 ... 1.4352882 2.3950953 0.46670192] [ 1.0500387 1.2827351 1.2186296 ... 1.3059627 1.5728277 1.4311143 ] ... [ 0.11528917 0.6527392 0.43912286 ... 1.7592617 0.45806968 0.69590175] [ 1.7765002 1.1631664 1.2798369 ... 0.7965159 0.96711755 0.5008614 ] [ 0.713452 1.5523642 0.89870083 ... 1.185826 0.2988707 1.7049148 ]] [[ 0.8005147 1.6168411 1.8912605 ... 0.5823056 1.7210125 0.378729 ] [ 1.1881524 0.54230404 1.1135899 ... 1.2073773 1.3234468 1.2006837 ] [ 0.8005147 1.6168411 1.8912605 ... 1.1263471 1.3501866 0.378729 ] ... [ 1.7765433 2.2482202 0.9587187 ... 0.773672 0.6637394 1.0708131 ] [ 0.77977324 2.5694191 -0.08102921 ... 2.6702595 1.2259305 0.27065447] [ 1.7765433 2.2482202 1.0282902 ... 0.773672 0.6637394 1.0708131 ]]] [[[ 1.6880072 0.893158 2.0241258 ... 1.7562531 0.4405963 3.0793893 ] [ 1.1561264 0.6176132 1.8686965 ... 1.0786031 0.39496207 0.817551 ] [ 0.30254763 0.893158 1.5702058 ... 1.3875589 0.6214088 3.0793893 ] ... [ 1.531627 1.5542394 1.2197835 ... 1.5828437 2.132664 1.0221872 ] [ 0.36057264 1.2342789 2.036487 ... 1.5736715 1.0246474 2.1722484 ] [ 1.531627 1.7014854 1.342662 ... 1.8107861 1.5887365 0.9570878 ]] [[ 0.4455006 0.83540183 1.0473795 ... 1.2104089 0.44588524 1.8519751 ] [ 1.2017077 0.96114254 2.0540617 ... 2.110603 0.8998408 2.2880743 ] [ 2.2969992 0.83540183 1.0473795 ... 1.2104089 1.4614207 2.0965815 ] ... [ 1.3506516 0.38748196 1.8416146 ... 1.9576923 1.6707027 1.3529382 ] [ 1.7000216 1.5185028 0.9770244 ... 2.4849784 1.4923631 1.3380046 ] [ 1.509821 0.7284317 1.8416146 ... 1.9576923 1.6707027 0.74704456]] [[ 1.5659671 1.159732 1.6315334 ... 2.0636885 0.97447366 1.5394495 ] [ 3.6025383 0.943562 1.761263 ... -0.21357498 0.613283 1.4176682 ] [ 1.5659671 1.5746521 1.4011564 ... 2.0124876 1.3969642 1.5394495 ] ... [ 1.7639344 0.38907766 1.250047 ... 0.20836845 1.4755436 0.7364769 ] [ 1.211615 0.6611789 1.4264566 ... 1.093077 0.65652335 0.5887681 ] [ 1.414897 0.41523555 1.1433318 ... 2.1713414 0.90946347 1.1043792 ]] [[ 1.6736133 2.0049963 1.2145628 ... 1.5857764 1.2078446 2.3116617 ] [ 0.95148814 1.0190889 1.3700196 ... 0.41725802 1.370815 0.11890071] [ 1.0575088 0.9617123 1.1237375 ... 0.42086846 -0.00837012 2.3116617 ] ... [ 0.26834673 1.3450288 1.2770662 ... 1.3364316 1.4041779 1.4184468 ] [ 1.437237 0.73194206 0.35320553 ... 0.22445983 1.8722897 0.6790183 ] [ 1.3976246 0.97595125 1.2770662 ... 1.4294821 1.7745779 -0.05206593]] [[ 0.72049856 0.6952726 -0.40135047 ... 1.0069202 1.7084261 2.6321652 ] [ 0.5546876 0.61208916 2.3609033 ... 0.65532637 2.0734143 0.60626394] [ 2.388964 1.0536503 0.5349423 ... 0.17799145 2.3544474 0.5121368 ] ... [ 1.0528903 1.6719257 0.5565484 ... 1.2574515 0.21211599 2.0564756 ] [-0.3378339 0.83855426 1.2408257 ... 0.33267567 1.7216785 2.4966383 ] [ 1.1357205 1.6719257 1.0511993 ... 0.3315299 0.21211599 1.2905265 ]]] [[[ 0.05824781 1.7274475 0.64020914 ... 1.4399242 1.0438505 0.8707708 ] [ 1.6645844 0.6587273 1.0247412 ... 2.113507 0.67113763 0.9521687 ] [ 1.9924369 0.78403324 0.92299634 ... 2.1135035 1.2677263 0.91342616] ... [ 1.136227 1.9714596 1.2859937 ... 1.405899 0.72666734 1.628609 ] [ 0.6040622 0.5167699 1.919271 ... 0.9559419 1.912527 1.1852714 ] [ 0.09019067 1.9714596 1.5723798 ... 1.4073668 0.88498044 1.628609 ]] [[ 1.645819 2.5279787 1.2805198 ... 0.27111736 1.601679 1.4874961 ] [ 1.0221862 1.508517 1.2746457 ... 2.3290727 0.9056982 1.0973006 ] [ 1.5681981 0.5088766 1.2805198 ... 1.3406262 1.601679 1.4874961 ] ... [ 0.85587364 1.3913664 0.75212026 ... 1.497473 2.0047317 0.7385309 ] [ 1.0466821 1.6750314 0.79496616 ... 1.3541312 0.8067997 1.2769613 ] [ 2.5071397 1.5593683 0.75212026 ... 1.497473 0.7653195 1.0145085 ]] [[ 1.2581841 1.9457066 1.622843 ... 1.5252775 0.67400306 2.0000072 ] [ 2.0843503 1.3871274 0.9615021 ... 1.3287593 1.4066647 1.9637939 ] [ 2.4178665 1.6527056 -0.09338944 ... 1.5230983 0.81419957 1.2889001 ] ... [ 1.5687307 0.28369138 0.9177601 ... 1.5009335 1.0460167 1.7312496 ] [ 1.2724721 1.5527042 1.1686741 ... 1.383965 0.62433505 0.560516 ] [ 1.5687307 0.28369138 1.4535555 ... 1.5009335 1.7604251 0.55737174]] [[ 0.68157506 1.6108841 0.10392062 ... 0.7052101 2.542754 2.250407 ] [ 1.5813226 0.97898805 0.50983405 ... 1.1295108 0.14062677 1.5015938 ] [ 2.6118846 0.8264297 0.23388453 ... 1.5530037 2.542754 2.250407 ] ... [ 1.1049895 0.7608641 1.2194242 ... 1.7995721 0.43226698 1.2782722 ] [ 1.2501762 1.1030638 0.7857708 ... 0.9876163 1.714555 -0.01843861] [ 1.2267983 1.2610134 1.3702284 ... 1.7995721 1.5637015 1.2575451 ]] [[ 0.85148704 0.32593092 0.14657223 ... 2.2544665 1.5266843 -0.11835962] [ 1.0159901 0.7425293 0.42485422 ... 0.8137203 2.2790482 0.3847543 ] [ 0.85148704 1.6260748 0.14657223 ... 0.85982203 1.2885724 -0.05536865] ... [ 0.38053688 2.3595712 1.4347883 ... 2.0152779 0.79168916 1.7795273 ] [ 1.3610477 0.68180233 0.94494784 ... -0.38718686 1.8152667 1.1648924 ] [ 1.474706 1.7936019 1.4347883 ... 0.910611 0.79168916 1.7795273 ]]]]]; ov_res: [[[[[ 0.6944657 0.66624296 0.5972582 ... 1.0153235 1.9351971 0.56413877] [-0.41658643 1.1898597 2.774257 ... 0.56329906 1.9077731 0.6590448 ] [ 1.3004093 1.5070668 0.5972582 ... 1.0153235 1.9351971 0.6401594 ] ... [ 0.9744786 0.5505483 1.1096776 ... 2.2254887 0.7835617 0.45432258] [ 0.7231788 1.7311839 1.1411158 ... 1.1964439 -0.13214956 0.5662196 ] [ 2.3177588 0.7946953 0.8709581 ... 1.5510577 1.3038908 -0.10726982]] [[ 1.0675527 1.4729481 1.6115149 ... 1.8111008 1.3473805 1.1883669 ] [ 0.8320519 0.6195494 1.9545252 ... 1.3013278 1.4451625 1.2072628 ] [ 1.7822396 1.4411348 1.6115149 ... 1.7333655 1.2626067 0.8490151 ] ... [ 0.46980333 1.0935484 1.4143815 ... 0.5548887 2.6344688 0.9509755 ] [ 0.79012513 1.393609 1.3828155 ... 0.56570727 3.445367 1.6745728 ] [ 0.46980333 1.0935484 1.4143815 ... 0.15235603 0.32157663 0.5862467 ]] [[ 0.9937366 1.0639603 1.4282047 ... 2.0967946 1.0484189 1.9134827 ] [ 2.1036317 1.9922191 1.334555 ... 0.7096192 1.1329862 0.8049063 ] [ 1.2528654 1.7329477 3.108508 ... 0.44189727 1.0484189 1.5251293 ] ... [ 2.1586099 1.2151676 1.3535119 ... 0.4496263 0.60559493 0.58718574] [ 0.20457008 1.561419 1.2436033 ... 0.8141383 2.2985034 0.8710628 ] [ 0.95298326 0.36770448 1.3535119 ... 1.5776476 1.0834695 0.7561526 ]] [[ 0.01603474 1.2827351 1.0583551 ... 0.67970383 0.20835984 1.4311143 ] [ 0.739088 0.93169457 1.0554705 ... 1.4352882 2.3950953 0.46670192] [ 1.0500387 1.2827351 1.2186296 ... 1.3059627 1.5728277 1.4311143 ] ... [ 0.11528917 0.6527392 0.43912286 ... 1.7592617 0.45806968 0.69590175] [ 1.7765002 1.1631664 1.2798369 ... 0.7965159 0.96711755 0.5008614 ] [ 0.713452 1.5523642 0.89870083 ... 1.185826 0.2988707 1.7049148 ]] [[ 0.8005147 1.6168411 1.8912605 ... 0.5823056 1.7210125 0.378729 ] [ 1.1881524 0.54230404 1.1135899 ... 1.2073773 1.3234468 1.2006837 ] [ 0.8005147 1.6168411 1.8912605 ... 1.1263471 1.3501866 0.378729 ] ... [ 1.7765433 2.2482202 0.9587187 ... 0.773672 0.6637394 1.0708131 ] [ 0.77977324 2.5694191 -0.08102921 ... 2.6702595 1.2259305 0.27065447] [ 1.7765433 2.2482202 1.0282902 ... 0.773672 0.6637394 1.0708131 ]]] [[[ 1.6880072 0.893158 2.0241258 ... 1.7562531 0.4405963 3.0793893 ] [ 1.1561264 0.6176132 1.8686965 ... 1.0786031 0.39496207 0.817551 ] [ 0.30254763 0.893158 1.5702058 ... 1.3875589 0.6214088 3.0793893 ] ... [ 1.531627 1.5542394 1.2197835 ... 1.5828437 2.132664 1.0221872 ] [ 0.36057264 1.2342789 2.036487 ... 1.5736715 1.0246474 2.1722484 ] [ 1.531627 1.7014854 1.342662 ... 1.8107861 1.5887365 0.9570878 ]] [[ 0.4455006 0.83540183 1.0473795 ... 1.2104089 0.44588524 1.8519751 ] [ 1.2017077 0.96114254 2.0540617 ... 2.110603 0.8998408 2.2880743 ] [ 2.2969992 0.83540183 1.0473795 ... 1.2104089 1.4614207 2.0965815 ] ... [ 1.3506516 0.38748196 1.8416146 ... 1.9576923 1.6707027 1.3529382 ] [ 1.7000216 1.5185028 0.9770244 ... 2.4849784 1.4923631 1.3380046 ] [ 1.509821 0.7284317 1.8416146 ... 1.9576923 1.6707027 0.74704456]] [[ 1.5659671 1.159732 1.6315334 ... 2.0636885 0.97447366 1.5394495 ] [ 3.6025383 0.943562 1.761263 ... -0.21357498 0.613283 1.4176682 ] [ 1.5659671 1.5746521 1.4011564 ... 2.0124876 1.3969642 1.5394495 ] ... [ 1.7639344 0.38907766 1.250047 ... 0.20836845 1.4755436 0.7364769 ] [ 1.211615 0.6611789 1.4264566 ... 1.093077 0.65652335 0.5887681 ] [ 1.414897 0.41523555 1.1433318 ... 2.1713414 0.90946347 1.1043792 ]] [[ 1.6736133 2.0049963 1.2145628 ... 1.5857764 1.2078446 2.3116617 ] [ 0.95148814 1.0190889 1.3700196 ... 0.41725802 1.370815 0.11890071] [ 1.0575088 0.9617123 1.1237375 ... 0.42086846 -0.00837012 2.3116617 ] ... [ 0.26834673 1.3450288 1.2770662 ... 1.3364316 1.4041779 1.4184468 ] [ 1.437237 0.73194206 0.35320553 ... 0.22445983 1.8722897 0.6790183 ] [ 1.3976246 0.97595125 1.2770662 ... 1.4294821 1.7745779 -0.05206593]] [[ 0.72049856 0.6952726 -0.40135047 ... 1.0069202 1.7084261 2.6321652 ] [ 0.5546876 0.61208916 2.3609033 ... 0.65532637 2.0734143 0.60626394] [ 2.388964 1.0536503 0.5349423 ... 0.17799145 2.3544474 0.5121368 ] ... [ 1.0528903 1.6719257 0.5565484 ... 1.2574515 0.21211599 2.0564756 ] [-0.3378339 0.83855426 1.2408257 ... 0.33267567 1.7216785 2.4966383 ] [ 1.1357205 1.6719257 1.0511993 ... 0.3315299 0.21211599 1.2905265 ]]] [[[ 0.05824781 1.7274475 0.64020914 ... 1.4399242 1.0438505 0.8707708 ] [ 1.6645844 0.6587273 1.0247412 ... 2.113507 0.67113763 0.9521687 ] [ 1.9924369 0.78403324 0.92299634 ... 2.1135035 1.2677263 0.91342616] ... [ 1.136227 1.9714596 1.2859937 ... 1.405899 0.72666734 1.628609 ] [ 0.6040622 0.5167699 1.919271 ... 0.9559419 1.912527 1.1852714 ] [ 0.09019067 1.9714596 1.5723798 ... 1.4073668 0.88498044 1.628609 ]] [[ 1.645819 2.5279787 1.2805198 ... 0.27111736 1.601679 1.4874961 ] [ 1.0221862 1.508517 1.2746457 ... 2.3290727 0.9056982 1.0973006 ] [ 1.5681981 0.5088766 1.2805198 ... 1.3406262 1.601679 1.4874961 ] ... [ 0.85587364 1.3913664 0.75212026 ... 1.497473 2.0047317 0.7385309 ] [ 1.0466821 1.6750314 0.79496616 ... 1.3541312 0.8067997 1.2769613 ] [ 2.5071397 1.5593683 0.75212026 ... 1.497473 0.7653195 1.0145085 ]] [[ 1.2581841 1.9457066 1.622843 ... 1.5252775 0.67400306 2.0000072 ] [ 2.0843503 1.3871274 0.9615021 ... 1.3287593 1.4066647 1.9637939 ] [ 2.4178665 1.6527056 -0.09338944 ... 1.5230983 0.81419957 1.2889001 ] ... [ 1.5687307 0.28369138 0.9177601 ... 1.5009335 1.0460167 1.7312496 ] [ 1.2724721 1.5527042 1.1686741 ... 1.383965 0.62433505 0.560516 ] [ 1.5687307 0.28369138 1.4535555 ... 1.5009335 1.7604251 0.55737174]] [[ 0.68157506 1.6108841 0.10392062 ... 0.7052101 2.542754 2.250407 ] [ 1.5813226 0.97898805 0.50983405 ... 1.1295108 0.14062677 1.5015938 ] [ 2.6118846 0.8264297 0.23388453 ... 1.5530037 2.542754 2.250407 ] ... [ 1.1049895 0.7608641 1.2194242 ... 1.7995721 0.43226698 1.2782722 ] [ 1.2501762 1.1030638 0.7857708 ... 0.9876163 1.714555 -0.01843861] [ 1.2267983 1.2610134 1.3702284 ... 1.7995721 1.5637015 1.2575451 ]] [[ 0.85148704 0.32593092 0.14657223 ... 2.2544665 1.5266843 -0.11835962] [ 1.0159901 0.7425293 0.42485422 ... 0.8137203 2.2790482 0.3847543 ] [ 0.85148704 1.6260748 0.14657223 ... 0.85982203 1.2885724 -0.05536865] ... [ 0.38053688 2.3595712 1.4347883 ... 2.0152779 0.79168916 1.7795273 ] [ 1.3610477 0.68180233 0.94494784 ... -0.38718686 1.8152667 1.1648924 ] [ 1.474706 1.7936019 1.4347883 ... 0.910611 0.79168916 1.7795273 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': 1, 'padding': 0} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5880.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %3 : int[] = prim::Constant[value=[0, 0, 0]]() %4 : int[] = prim::Constant[value=[1, 1, 1]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %4, %3, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[2.593326 1.57766 2.593326 ... 1.7828243 2.5172486 1.7828243 ] [2.0381358 2.6370108 1.9501199 ... 2.1252234 1.8103846 2.1252234 ] [2.593326 1.57766 2.593326 ... 1.0410888 2.5172486 1.0410888 ] ... [1.3959583 2.0255585 1.3959583 ... 2.2170653 1.7580515 2.2170653 ] [1.719129 1.2580305 1.9651033 ... 2.55451 2.3174422 2.55451 ] [2.705466 2.0255585 2.705466 ... 1.7868037 1.635284 2.0199263 ]] [[1.4783162 1.6488823 1.4783162 ... 1.211018 2.2473378 1.9942151 ] [1.9240742 2.8135061 1.9240742 ... 2.326782 3.0233667 2.326782 ] [1.5244193 2.2278466 1.7790271 ... 2.1364622 2.2473378 2.1364622 ] ... [2.1535401 2.2359767 2.3159456 ... 2.5734076 1.57718 2.5734076 ] [1.7723374 1.9113758 2.1937358 ... 1.8139653 1.9531169 1.920295 ] [2.1535401 2.2359767 1.7618709 ... 2.2744193 2.963406 2.2744193 ]] [[2.593326 2.2380555 2.593326 ... 1.888171 1.6640587 1.3528693 ] [2.065177 2.6370108 2.065177 ... 2.2930884 1.2577069 2.2930884 ] [2.593326 1.57766 2.593326 ... 1.888171 1.6640587 1.3528693 ] ... [1.3959583 2.0255585 1.3959583 ... 0.85458285 1.7580515 1.3502489 ] [1.719129 1.7317663 2.1968126 ... 2.55451 2.3174422 2.55451 ] [2.705466 2.0255585 2.705466 ... 1.3269018 1.635284 1.3502489 ]] ... [[1.780304 2.169824 2.030005 ... 2.317796 1.8077742 2.317796 ] [1.4326495 1.4190686 1.4326495 ... 3.0105867 1.6474909 1.523232 ] [1.3667883 1.4738559 2.030005 ... 2.317796 1.5953146 2.317796 ] ... [2.4169345 2.3931253 2.4169345 ... 1.8113884 1.7561781 1.9480495 ] [2.247161 3.0024228 2.247161 ... 1.1188045 1.976133 1.1119533 ] [2.73929 2.3931253 2.73929 ... 1.8113884 1.9104877 1.9480495 ]] [[4.083977 1.3506185 4.083977 ... 1.5086981 3.3834727 1.695213 ] [1.8202144 3.3125937 1.8202144 ... 2.8265991 1.9588405 1.9596586 ] [4.083977 1.5830877 4.083977 ... 1.4524798 3.3834727 1.6518695 ] ... [1.9855862 1.8617033 1.9855862 ... 1.8876904 1.590215 2.217256 ] [0.94586515 3.2140508 0.9706841 ... 1.424623 2.1236174 1.424623 ] [1.4848453 1.4936099 1.4848453 ... 2.0116715 1.590215 2.217256 ]] [[1.780304 2.169824 2.1516798 ... 2.317796 1.8077742 2.317796 ] [2.1124642 1.4190686 2.1124642 ... 3.0105867 0.91055536 1.523232 ] [1.3667883 1.4738559 2.030005 ... 2.317796 1.5953146 2.317796 ] ... [1.76847 2.3931253 1.404493 ... 1.9168949 1.5271022 1.8113884 ] [2.247161 3.0024228 2.247161 ... 1.1188045 1.976133 1.8736109 ] [2.0592246 2.3931253 1.9318273 ... 1.9168949 1.9104877 1.8733302 ]]] [[[1.2103575 2.0758812 1.2103575 ... 2.1430757 1.416237 2.1430757 ] [1.889362 1.8587688 1.889362 ... 1.6098188 1.564992 1.6098188 ] [1.2103575 2.0758812 1.2103575 ... 2.1430757 1.416237 2.1430757 ] ... [2.1405647 1.8198512 1.8437289 ... 1.1705078 2.0969205 1.3035463 ] [2.3651443 2.4560905 2.5289385 ... 1.7815241 1.5016319 1.7815241 ] [2.1405647 2.5091083 1.860255 ... 1.0350791 2.0969205 1.3035463 ]] [[2.3975644 1.5897384 2.3975644 ... 2.5081182 1.1643088 2.5081182 ] [1.5989538 1.9263744 1.5989538 ... 1.2265111 2.2190506 2.2607906 ] [2.5794208 1.6632476 2.3975644 ... 1.3339999 1.6709739 1.9606684 ] ... [2.2838895 2.2805283 1.4560105 ... 1.7044142 2.0388308 1.744581 ] [2.5458372 2.4733253 1.1727551 ... 1.9892762 2.5249352 1.9892762 ] [2.2838895 1.7193675 1.4560105 ... 1.7044142 2.0388308 1.744581 ]] [[1.2103575 2.7243972 1.2753726 ... 1.3858749 1.789367 1.3858749 ] [2.2897494 2.478848 2.2897494 ... 2.0551832 1.537464 2.0551832 ] [1.2103575 2.7243972 1.2753726 ... 1.3909916 1.789367 1.5884746 ] ... [2.651416 1.8198512 2.651416 ... 1.3615257 1.764124 1.3035463 ] [2.3651443 2.4560905 2.1707277 ... 1.7815241 1.4224067 1.7815241 ] [1.8437289 2.5091083 2.6210365 ... 2.069168 1.764124 2.069168 ]] ... [[2.003058 2.3751085 2.003058 ... 1.3949875 1.2720928 1.8657887 ] [3.0304027 2.1071367 1.542011 ... 1.6779072 2.3646135 2.8414664 ] [1.4267566 1.8586537 1.1303558 ... 1.3949875 1.1551232 1.8316399 ] ... [2.867655 1.723994 2.867655 ... 1.41467 1.1960841 2.2256498 ] [2.323044 2.1927667 2.323044 ... 1.8663787 1.6188432 1.8663787 ] [2.886367 2.1105232 2.886367 ... 1.41467 1.2086151 2.2256498 ]] [[2.0747404 1.8370131 2.0747404 ... 1.9881701 1.5747297 1.9881701 ] [2.312898 2.615473 2.312898 ... 1.9433602 2.1003244 2.9507482 ] [2.0747404 1.8084453 2.0747404 ... 1.7862494 2.6915424 1.6485871 ] ... [1.7704976 1.3893603 2.1142783 ... 2.2956746 1.2722051 2.2956746 ] [1.7366307 1.4478003 1.7366307 ... 2.3559778 2.3508034 2.3559778 ] [1.6264614 1.3893603 2.1142783 ... 2.051232 1.8474233 2.051232 ]] [[2.7171967 2.3751085 2.003058 ... 1.387877 1.8075024 1.488789 ] [3.0304027 2.1071367 1.9006708 ... 1.5499635 2.3646135 1.1658462 ] [2.7171967 1.049777 1.568004 ... 1.387877 1.8075024 1.8316399 ] ... [1.8362153 1.723994 1.8362153 ... 1.3762399 1.7198751 2.2256498 ] [2.323044 2.5422673 2.323044 ... 1.8663787 1.6188432 1.8663787 ] [2.886367 2.1105232 2.886367 ... 1.2602065 1.7198751 2.2256498 ]]] [[[1.7076547 1.4970099 1.7076547 ... 1.9992095 2.6702302 1.7410154 ] [1.3704007 1.3760384 1.3704007 ... 1.9526285 1.5927038 1.812477 ] [2.0595543 1.8591155 1.2451471 ... 1.9992095 1.9499632 2.3883338 ] ... [1.4033941 2.6829793 1.5010403 ... 1.759435 1.5767275 1.4857727 ] [1.7349226 2.1898012 1.7349226 ... 1.2506889 2.0521164 2.5449774 ] [1.9434148 2.6829793 1.9434148 ... 1.759435 1.5767275 1.5347979 ]] [[2.0504053 1.4649773 1.9385779 ... 2.4126084 2.227167 2.4126084 ] [1.7488307 1.7604873 1.7488307 ... 2.1580982 2.2234302 2.1580982 ] [2.481529 1.4649773 1.9385779 ... 1.4171748 2.227167 1.1178328 ] ... [1.865345 2.6998584 1.8830059 ... 2.7521732 1.5908346 2.3160577 ] [2.1033804 2.3288424 2.1033804 ... 2.5436435 2.2923505 2.5436435 ] [1.3213012 2.6998584 2.243911 ... 2.7521732 1.5908346 2.6346538 ]] [[1.7076547 2.2028985 1.7322903 ... 1.5247 1.9499632 1.7410154 ] [1.3704007 2.3081405 1.3704007 ... 1.9526285 1.1243391 1.812477 ] [2.5112667 1.8591155 2.5112667 ... 2.0468721 1.9499632 2.0717268 ] ... [1.4033941 2.6829793 1.6827655 ... 1.759435 1.5767275 1.0936726 ] [1.8203084 2.1898012 1.7349226 ... 1.2506889 2.0521164 2.5449774 ] [1.8735145 2.6829793 1.8735145 ... 2.0387795 1.5767275 1.5347979 ]] ... [[2.0380623 1.390259 1.6080794 ... 2.589329 2.0305474 2.589329 ] [2.9181762 1.0251373 2.3273787 ... 1.8815849 2.056244 1.930716 ] [2.0380623 2.2697787 1.6080794 ... 2.589329 2.0305474 2.589329 ] ... [2.2784498 1.7948992 1.0668094 ... 1.748179 1.946129 1.3980614 ] [1.6852274 1.6530123 1.2798133 ... 2.3439782 2.7760117 2.3439782 ] [2.2784498 2.080815 1.8417864 ... 1.3980614 1.946129 1.3980614 ]] [[2.4426866 1.6205634 2.4426866 ... 1.9568222 2.1965678 1.9568222 ] [2.1064332 2.6750839 1.9389398 ... 2.3664253 2.0570648 2.591888 ] [2.4426866 1.6205634 2.4426866 ... 1.9568222 2.1965678 1.9568222 ] ... [1.9721556 1.3896301 1.3304693 ... 2.2919269 1.3568033 2.520306 ] [2.4258888 1.5341537 2.4258888 ... 2.0286682 1.5895212 2.0286682 ] [1.9721556 1.0271648 1.1440247 ... 2.2919269 1.3568033 2.520306 ]] [[1.6080794 1.0850333 1.6080794 ... 2.589329 2.3224373 2.589329 ] [2.3273787 2.5168207 2.3273787 ... 1.8815849 2.8375194 1.930716 ] [1.6080794 1.2496058 1.6080794 ... 2.589329 2.207166 2.589329 ] ... [2.2784498 1.7948992 1.0560254 ... 1.748179 1.8683041 1.5697874 ] [1.6852274 1.7290341 1.2798133 ... 2.3439782 1.9931039 2.3439782 ] [2.2784498 2.921278 1.8417864 ... 1.3980614 1.8683041 1.3980614 ]]]]]; ov_res: [[[[[2.593326 1.57766 2.593326 ... 1.7828243 2.5172486 1.7828243 ] [2.0381358 2.6370108 1.9501199 ... 2.1252234 1.8103846 2.1252234 ] [2.593326 1.57766 2.593326 ... 1.0410888 2.5172486 1.0410888 ] ... [1.3959583 2.0255585 1.3959583 ... 2.2170653 1.7580515 2.2170653 ] [1.719129 1.2580305 1.9651033 ... 2.55451 2.3174422 2.55451 ] [2.705466 2.0255585 2.705466 ... 1.7868037 1.635284 2.0199263 ]] [[1.4783162 1.6488823 1.4783162 ... 1.211018 2.2473378 1.9942151 ] [1.9240742 2.8135061 1.9240742 ... 2.326782 3.0233667 2.326782 ] [1.5244193 2.2278466 1.7790271 ... 2.1364622 2.2473378 2.1364622 ] ... [2.1535401 2.2359767 2.3159456 ... 2.5734076 1.57718 2.5734076 ] [1.7723374 1.9113758 2.1937358 ... 1.8139653 1.9531169 1.920295 ] [2.1535401 2.2359767 1.7618709 ... 2.2744193 2.963406 2.2744193 ]] [[2.593326 2.2380555 2.593326 ... 1.888171 1.6640587 1.3528693 ] [2.065177 2.6370108 2.065177 ... 2.2930884 1.2577069 2.2930884 ] [2.593326 1.57766 2.593326 ... 1.888171 1.6640587 1.3528693 ] ... [1.3959583 2.0255585 1.3959583 ... 0.85458285 1.7580515 1.3502489 ] [1.719129 1.7317663 2.1968126 ... 2.55451 2.3174422 2.55451 ] [2.705466 2.0255585 2.705466 ... 1.3269018 1.635284 1.3502489 ]] ... [[1.780304 2.169824 2.030005 ... 2.317796 1.8077742 2.317796 ] [1.4326495 1.4190686 1.4326495 ... 3.0105867 1.6474909 1.523232 ] [1.3667883 1.4738559 2.030005 ... 2.317796 1.5953146 2.317796 ] ... [2.4169345 2.3931253 2.4169345 ... 1.8113884 1.7561781 1.9480495 ] [2.247161 3.0024228 2.247161 ... 1.1188045 1.976133 1.1119533 ] [2.73929 2.3931253 2.73929 ... 1.8113884 1.9104877 1.9480495 ]] [[4.083977 1.3506185 4.083977 ... 1.5086981 3.3834727 1.695213 ] [1.8202144 3.3125937 1.8202144 ... 2.8265991 1.9588405 1.9596586 ] [4.083977 1.5830877 4.083977 ... 1.4524798 3.3834727 1.6518695 ] ... [1.9855862 1.8617033 1.9855862 ... 1.8876904 1.590215 2.217256 ] [0.94586515 3.2140508 0.9706841 ... 1.424623 2.1236174 1.424623 ] [1.4848453 1.4936099 1.4848453 ... 2.0116715 1.590215 2.217256 ]] [[1.780304 2.169824 2.1516798 ... 2.317796 1.8077742 2.317796 ] [2.1124642 1.4190686 2.1124642 ... 3.0105867 0.91055536 1.523232 ] [1.3667883 1.4738559 2.030005 ... 2.317796 1.5953146 2.317796 ] ... [1.76847 2.3931253 1.404493 ... 1.9168949 1.5271022 1.8113884 ] [2.247161 3.0024228 2.247161 ... 1.1188045 1.976133 1.8736109 ] [2.0592246 2.3931253 1.9318273 ... 1.9168949 1.9104877 1.8733302 ]]] [[[1.2103575 2.0758812 1.2103575 ... 2.1430757 1.416237 2.1430757 ] [1.889362 1.8587688 1.889362 ... 1.6098188 1.564992 1.6098188 ] [1.2103575 2.0758812 1.2103575 ... 2.1430757 1.416237 2.1430757 ] ... [2.1405647 1.8198512 1.8437289 ... 1.1705078 2.0969205 1.3035463 ] [2.3651443 2.4560905 2.5289385 ... 1.7815241 1.5016319 1.7815241 ] [2.1405647 2.5091083 1.860255 ... 1.0350791 2.0969205 1.3035463 ]] [[2.3975644 1.5897384 2.3975644 ... 2.5081182 1.1643088 2.5081182 ] [1.5989538 1.9263744 1.5989538 ... 1.2265111 2.2190506 2.2607906 ] [2.5794208 1.6632476 2.3975644 ... 1.3339999 1.6709739 1.9606684 ] ... [2.2838895 2.2805283 1.4560105 ... 1.7044142 2.0388308 1.744581 ] [2.5458372 2.4733253 1.1727551 ... 1.9892762 2.5249352 1.9892762 ] [2.2838895 1.7193675 1.4560105 ... 1.7044142 2.0388308 1.744581 ]] [[1.2103575 2.7243972 1.2753726 ... 1.3858749 1.789367 1.3858749 ] [2.2897494 2.478848 2.2897494 ... 2.0551832 1.537464 2.0551832 ] [1.2103575 2.7243972 1.2753726 ... 1.3909916 1.789367 1.5884746 ] ... [2.651416 1.8198512 2.651416 ... 1.3615257 1.764124 1.3035463 ] [2.3651443 2.4560905 2.1707277 ... 1.7815241 1.4224067 1.7815241 ] [1.8437289 2.5091083 2.6210365 ... 2.069168 1.764124 2.069168 ]] ... [[2.003058 2.3751085 2.003058 ... 1.3949875 1.2720928 1.8657887 ] [3.0304027 2.1071367 1.542011 ... 1.6779072 2.3646135 2.8414664 ] [1.4267566 1.8586537 1.1303558 ... 1.3949875 1.1551232 1.8316399 ] ... [2.867655 1.723994 2.867655 ... 1.41467 1.1960841 2.2256498 ] [2.323044 2.1927667 2.323044 ... 1.8663787 1.6188432 1.8663787 ] [2.886367 2.1105232 2.886367 ... 1.41467 1.2086151 2.2256498 ]] [[2.0747404 1.8370131 2.0747404 ... 1.9881701 1.5747297 1.9881701 ] [2.312898 2.615473 2.312898 ... 1.9433602 2.1003244 2.9507482 ] [2.0747404 1.8084453 2.0747404 ... 1.7862494 2.6915424 1.6485871 ] ... [1.7704976 1.3893603 2.1142783 ... 2.2956746 1.2722051 2.2956746 ] [1.7366307 1.4478003 1.7366307 ... 2.3559778 2.3508034 2.3559778 ] [1.6264614 1.3893603 2.1142783 ... 2.051232 1.8474233 2.051232 ]] [[2.7171967 2.3751085 2.003058 ... 1.387877 1.8075024 1.488789 ] [3.0304027 2.1071367 1.9006708 ... 1.5499635 2.3646135 1.1658462 ] [2.7171967 1.049777 1.568004 ... 1.387877 1.8075024 1.8316399 ] ... [1.8362153 1.723994 1.8362153 ... 1.3762399 1.7198751 2.2256498 ] [2.323044 2.5422673 2.323044 ... 1.8663787 1.6188432 1.8663787 ] [2.886367 2.1105232 2.886367 ... 1.2602065 1.7198751 2.2256498 ]]] [[[1.7076547 1.4970099 1.7076547 ... 1.9992095 2.6702302 1.7410154 ] [1.3704007 1.3760384 1.3704007 ... 1.9526285 1.5927038 1.812477 ] [2.0595543 1.8591155 1.2451471 ... 1.9992095 1.9499632 2.3883338 ] ... [1.4033941 2.6829793 1.5010403 ... 1.759435 1.5767275 1.4857727 ] [1.7349226 2.1898012 1.7349226 ... 1.2506889 2.0521164 2.5449774 ] [1.9434148 2.6829793 1.9434148 ... 1.759435 1.5767275 1.5347979 ]] [[2.0504053 1.4649773 1.9385779 ... 2.4126084 2.227167 2.4126084 ] [1.7488307 1.7604873 1.7488307 ... 2.1580982 2.2234302 2.1580982 ] [2.481529 1.4649773 1.9385779 ... 1.4171748 2.227167 1.1178328 ] ... [1.865345 2.6998584 1.8830059 ... 2.7521732 1.5908346 2.3160577 ] [2.1033804 2.3288424 2.1033804 ... 2.5436435 2.2923505 2.5436435 ] [1.3213012 2.6998584 2.243911 ... 2.7521732 1.5908346 2.6346538 ]] [[1.7076547 2.2028985 1.7322903 ... 1.5247 1.9499632 1.7410154 ] [1.3704007 2.3081405 1.3704007 ... 1.9526285 1.1243391 1.812477 ] [2.5112667 1.8591155 2.5112667 ... 2.0468721 1.9499632 2.0717268 ] ... [1.4033941 2.6829793 1.6827655 ... 1.759435 1.5767275 1.0936726 ] [1.8203084 2.1898012 1.7349226 ... 1.2506889 2.0521164 2.5449774 ] [1.8735145 2.6829793 1.8735145 ... 2.0387795 1.5767275 1.5347979 ]] ... [[2.0380623 1.390259 1.6080794 ... 2.589329 2.0305474 2.589329 ] [2.9181762 1.0251373 2.3273787 ... 1.8815849 2.056244 1.930716 ] [2.0380623 2.2697787 1.6080794 ... 2.589329 2.0305474 2.589329 ] ... [2.2784498 1.7948992 1.0668094 ... 1.748179 1.946129 1.3980614 ] [1.6852274 1.6530123 1.2798133 ... 2.3439782 2.7760117 2.3439782 ] [2.2784498 2.080815 1.8417864 ... 1.3980614 1.946129 1.3980614 ]] [[2.4426866 1.6205634 2.4426866 ... 1.9568222 2.1965678 1.9568222 ] [2.1064332 2.6750839 1.9389398 ... 2.3664253 2.0570648 2.591888 ] [2.4426866 1.6205634 2.4426866 ... 1.9568222 2.1965678 1.9568222 ] ... [1.9721556 1.3896301 1.3304693 ... 2.2919269 1.3568033 2.520306 ] [2.4258888 1.5341537 2.4258888 ... 2.0286682 1.5895212 2.0286682 ] [1.9721556 1.0271648 1.1440247 ... 2.2919269 1.3568033 2.520306 ]] [[1.6080794 1.0850333 1.6080794 ... 2.589329 2.3224373 2.589329 ] [2.3273787 2.5168207 2.3273787 ... 1.8815849 2.8375194 1.930716 ] [1.6080794 1.2496058 1.6080794 ... 2.589329 2.207166 2.589329 ] ... [2.2784498 1.7948992 1.0560254 ... 1.748179 1.8683041 1.5697874 ] [1.6852274 1.7290341 1.2798133 ... 2.3439782 1.9931039 2.3439782 ] [2.2784498 2.921278 1.8417864 ... 1.3980614 1.8683041 1.3980614 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [1, 1, 1], 'padding': 1} ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5882.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.stride : int[] = prim::Constant[value=[1, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.stride, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[1.2274318 2.0615716 1.2716876 ... 1.4368101 1.8669571 1.4368101 ] [1.5198445 1.6688614 1.9929849 ... 1.1647853 2.0655007 1.1647853 ] [1.8622782 2.0615716 1.8622782 ... 1.4368101 1.8669571 1.4368101 ] ... [1.2103437 1.4109671 1.8463603 ... 0.6515424 1.9156891 0.64891475] [1.9995421 1.5435193 1.9995421 ... 2.0397017 2.027116 1.687662 ] [1.2103437 1.4109671 1.8463603 ... 0.6515424 1.203479 0.64891475]] [[1.2675748 2.1807184 1.2675748 ... 1.8397615 1.4305247 1.8397615 ] [1.396834 1.2860063 1.396834 ... 2.0534372 1.8916374 2.0534372 ] [2.071001 2.1807184 2.071001 ... 1.8397615 2.8779013 1.8397615 ] ... [1.506811 1.0641402 1.506811 ... 1.6883893 1.2196095 1.2522577 ] [1.2123127 1.6754987 1.3881946 ... 1.7528814 2.1607149 1.7528814 ] [1.506811 1.0641402 1.506811 ... 1.6883893 1.2196095 1.2522577 ]] [[1.2274318 2.0615716 1.2716876 ... 1.4368101 2.0819254 1.4368101 ] [1.5198445 1.6688614 1.9929849 ... 1.1647853 2.0655007 1.1647853 ] [1.8622782 2.0615716 1.8622782 ... 1.504463 2.0819254 1.4368101 ] ... [2.7742016 1.872359 2.7742016 ... 0.9953662 1.9156891 0.64891475] [1.9995421 1.5435193 1.9995421 ... 2.0397017 3.103499 1.749424 ] [2.7742016 1.872359 2.7742016 ... 0.9953662 1.6071653 0.64891475]] ... [[1.4950632 2.8284419 1.4950632 ... 0.92829823 1.7136102 0.92829823] [1.7372317 1.6335645 2.2165198 ... 1.2122527 1.9638761 1.066234 ] [1.4950632 2.8284419 1.4950632 ... 2.2150877 1.7136102 2.2150877 ] ... [0.8411044 1.9095159 1.5160769 ... 2.486315 2.8503761 2.2438366 ] [1.5671656 1.5505702 1.5671656 ... 1.5512836 2.9898033 1.5512836 ] [0.81956005 1.9095159 1.5160769 ... 2.486315 2.8503761 2.2438366 ]] [[1.4082627 2.3727505 1.542777 ... 1.0208247 1.1145542 1.0208247 ] [0.6308812 1.4283661 1.8299932 ... 1.3381654 1.39299 1.3381654 ] [2.3869214 2.3727505 2.3869214 ... 1.0208247 1.1268035 1.0208247 ] ... [2.0539713 1.974124 2.0539713 ... 2.9008834 1.6835177 1.6038945 ] [2.3163657 2.2360084 2.3163657 ... 1.8294017 1.8495041 1.8294017 ] [2.0539713 1.3337508 2.0539713 ... 2.9008834 1.6835177 1.6038945 ]] [[1.4950632 1.0156764 1.4950632 ... 0.92829823 1.5011287 0.92829823] [1.0972347 1.6335645 2.2165198 ... 1.066234 1.9638761 1.066234 ] [1.4950632 1.0156764 1.4950632 ... 2.2150877 1.5011287 2.2150877 ] ... [0.8411044 1.5429252 1.5160769 ... 2.486315 2.8503761 1.1809058 ] [1.5671656 1.4260246 1.5671656 ... 1.5512836 1.7215918 1.5512836 ] [0.81956005 1.5429252 1.5160769 ... 2.486315 2.8503761 1.1809058 ]]] [[[0.8870065 1.8263383 0.8870065 ... 1.4807587 1.6099186 1.4807587 ] [1.7325965 0.6268071 1.7325965 ... 1.7707701 1.1928816 1.7707701 ] [0.8870065 1.8263383 1.51215 ... 1.4807587 1.6099186 1.4807587 ] ... [0.7959122 2.070829 0.7959122 ... 1.625718 1.9626776 1.2398918 ] [1.6881734 1.0567691 1.6881734 ... 1.5525475 2.677825 1.5525475 ] [0.7959122 0.85448545 0.7959122 ... 1.625718 1.9626776 1.2398918 ]] [[1.0700889 2.674477 1.0700889 ... 2.5866046 2.1042926 1.9119829 ] [1.9163536 2.9590492 1.9163536 ... 2.4689994 2.2961853 2.4689994 ] [1.0700889 2.674477 1.0700889 ... 2.5866046 2.1042926 1.9119829 ] ... [2.2553682 2.2356977 2.2553682 ... 2.154082 1.8854872 2.154082 ] [1.811233 1.3978863 1.811233 ... 1.3942934 1.7407776 1.3942934 ] [2.2553682 2.2356977 2.2553682 ... 2.154082 1.836408 2.154082 ]] [[0.8870065 1.8263383 1.0009433 ... 1.6300905 1.6099186 1.6300905 ] [1.7325965 1.4363879 1.7325965 ... 1.7707701 1.1928816 1.7707701 ] [0.8870065 1.8263383 1.51215 ... 1.6300905 1.6099186 1.6300905 ] ... [2.1615496 2.070829 2.1615496 ... 1.625718 1.9626776 1.5419092 ] [1.6881734 1.7756141 1.6881734 ... 1.7357165 2.677825 1.7357165 ] [0.7959122 0.85448545 1.0898051 ... 1.625718 1.9626776 1.5419092 ]] ... [[1.3483205 1.3750072 1.5127347 ... 1.4884179 2.4366894 1.4884179 ] [1.0456718 1.7679143 1.0456718 ... 1.1955221 2.4885314 0.9080943 ] [2.9493122 2.1999674 2.9493122 ... 1.4884179 2.4366894 1.4884179 ] ... [1.6256871 2.4174726 1.6256871 ... 1.8042018 1.7136319 1.8042018 ] [1.1584382 2.0329342 2.1071627 ... 1.6466066 2.2129195 1.1806117 ] [0.87962395 1.4482573 1.1028622 ... 0.97560984 1.6994241 0.97560984]] [[1.7856311 0.7449641 1.7856311 ... 1.5832199 1.6891799 1.5832199 ] [1.171588 2.1512456 2.3838828 ... 2.1624231 2.058517 1.6373043 ] [1.7856311 0.91722816 1.7856311 ... 1.5832199 2.28363 1.5832199 ] ... [1.8372544 1.2482209 1.8372544 ... 1.7011983 1.6978241 1.7011983 ] [2.966333 2.1342733 2.966333 ... 1.7791128 1.8543786 1.7791128 ] [1.8372544 1.2009273 1.8372544 ... 1.7011983 1.467786 1.7011983 ]] [[1.106851 1.3750072 1.106851 ... 1.4884179 2.4366894 1.4884179 ] [0.61170554 1.7679143 0.64522856 ... 1.1955221 2.4885314 0.9080943 ] [2.9493122 2.1999674 2.9493122 ... 1.4884179 2.4366894 1.4884179 ] ... [1.6256871 2.4174726 1.6256871 ... 1.8042018 1.7136319 1.8042018 ] [1.1584382 2.0329342 1.1584382 ... 1.1806117 1.7699897 1.1806117 ] [0.87962395 0.8147833 1.1028622 ... 0.5350922 1.6994241 0.5350922 ]]] [[[1.8534101 2.1202252 1.8534101 ... 1.4271678 1.3991572 1.4271678 ] [1.3726511 2.3164456 2.3559535 ... 2.6925514 1.7272232 2.6925514 ] [1.8534101 2.1202252 1.8534101 ... 1.4271678 1.6143519 1.4271678 ] ... [0.911365 1.3135611 1.7296818 ... 2.1837769 2.2999132 1.4842292 ] [1.3866163 1.5503019 1.3866163 ... 2.1214077 2.1444337 1.8786038 ] [0.911365 1.3135611 1.7296818 ... 2.1837769 1.3374467 1.4842292 ]] [[2.4949574 2.0128293 2.4949574 ... 1.7803123 3.0983534 1.7803123 ] [2.5422406 2.4247823 2.5422406 ... 2.224904 1.9651119 2.224904 ] [2.4949574 2.0128293 2.4949574 ... 2.2728179 3.0983534 2.2728179 ] ... [2.8278213 1.2742358 2.8278213 ... 2.2841554 1.8554375 2.2841554 ] [1.6316371 1.6587561 1.8059084 ... 1.3930802 2.1997657 1.3930802 ] [2.17258 1.2742358 2.67265 ... 1.2869103 1.8554375 1.1714371 ]] [[1.8534101 2.1202252 1.8534101 ... 1.4271678 1.3991572 1.4271678 ] [1.7884597 2.3164456 2.3559535 ... 2.6925514 1.7272232 2.6925514 ] [1.8534101 2.1202252 1.8534101 ... 1.4271678 1.6143519 1.4271678 ] ... [1.9704901 1.3135611 1.9704901 ... 2.1837769 2.2999132 1.4842292 ] [1.5301037 1.5503019 1.5301037 ... 2.1214077 2.1444337 1.8786038 ] [1.9704901 1.3135611 1.9704901 ... 2.1837769 1.5443827 1.4842292 ]] ... [[2.6960843 0.97020143 2.6960843 ... 1.0584224 2.1699193 1.0048379 ] [1.1393718 1.947077 1.1393718 ... 1.3623749 1.3973873 0.8317797 ] [2.6960843 0.97020143 2.6960843 ... 1.0584224 2.1699193 1.0048379 ] ... [2.3494408 1.567527 2.3494408 ... 1.7510945 1.9740539 1.7510945 ] [1.4620035 2.0474129 1.4620035 ... 2.22306 2.10929 2.22306 ] [2.3494408 1.567527 2.3494408 ... 1.7510945 1.9740539 1.7510945 ]] [[1.6847794 3.177117 2.616716 ... 2.1856937 1.7945888 1.4071683 ] [2.1806805 2.8624644 2.1806805 ... 1.7509862 1.8558098 1.4611845 ] [1.6847794 3.177117 2.616716 ... 2.1856937 1.7945888 1.4071683 ] ... [1.4719872 1.2448181 1.4719872 ... 2.1986046 1.580833 1.4921988 ] [1.310898 1.6723328 1.9146762 ... 1.9336071 1.6906565 1.9336071 ] [1.4472934 1.2200991 1.4472934 ... 2.1986046 1.580833 1.4921988 ]] [[1.5176244 0.97020143 1.6350127 ... 1.0584224 2.1699193 1.0048379 ] [1.1393718 1.8149894 1.1393718 ... 1.1530997 1.0842696 0.8317797 ] [1.5176244 0.97020143 1.6350127 ... 1.0584224 2.1699193 1.0048379 ] ... [2.3494408 0.98213303 2.3494408 ... 1.7510945 1.8221353 1.7510945 ] [1.4620035 1.9713976 1.4620035 ... 1.7335769 2.10929 1.7335769 ] [2.3494408 0.98213303 2.3494408 ... 1.7510945 1.467928 1.7510945 ]]]]]; ov_res: [[[[[1.2274318 2.0615716 1.2716876 ... 1.4368101 1.8669571 1.4368101 ] [1.5198445 1.6688614 1.9929849 ... 1.1647853 2.0655007 1.1647853 ] [1.8622782 2.0615716 1.8622782 ... 1.4368101 1.8669571 1.4368101 ] ... [1.2103437 1.4109671 1.8463603 ... 0.6515424 1.9156891 0.64891475] [1.9995421 1.5435193 1.9995421 ... 2.0397017 2.027116 1.687662 ] [1.2103437 1.4109671 1.8463603 ... 0.6515424 1.203479 0.64891475]] [[1.2675748 2.1807184 1.2675748 ... 1.8397615 1.4305247 1.8397615 ] [1.396834 1.2860063 1.396834 ... 2.0534372 1.8916374 2.0534372 ] [2.071001 2.1807184 2.071001 ... 1.8397615 2.8779013 1.8397615 ] ... [1.506811 1.0641402 1.506811 ... 1.6883893 1.2196095 1.2522577 ] [1.2123127 1.6754987 1.3881946 ... 1.7528814 2.1607149 1.7528814 ] [1.506811 1.0641402 1.506811 ... 1.6883893 1.2196095 1.2522577 ]] [[1.2274318 2.0615716 1.2716876 ... 1.4368101 2.0819254 1.4368101 ] [1.5198445 1.6688614 1.9929849 ... 1.1647853 2.0655007 1.1647853 ] [1.8622782 2.0615716 1.8622782 ... 1.504463 2.0819254 1.4368101 ] ... [2.7742016 1.872359 2.7742016 ... 0.9953662 1.9156891 0.64891475] [1.9995421 1.5435193 1.9995421 ... 2.0397017 3.103499 1.749424 ] [2.7742016 1.872359 2.7742016 ... 0.9953662 1.6071653 0.64891475]] ... [[1.4950632 2.8284419 1.4950632 ... 0.92829823 1.7136102 0.92829823] [1.7372317 1.6335645 2.2165198 ... 1.2122527 1.9638761 1.066234 ] [1.4950632 2.8284419 1.4950632 ... 2.2150877 1.7136102 2.2150877 ] ... [0.8411044 1.9095159 1.5160769 ... 2.486315 2.8503761 2.2438366 ] [1.5671656 1.5505702 1.5671656 ... 1.5512836 2.9898033 1.5512836 ] [0.81956005 1.9095159 1.5160769 ... 2.486315 2.8503761 2.2438366 ]] [[1.4082627 2.3727505 1.542777 ... 1.0208247 1.1145542 1.0208247 ] [0.6308812 1.4283661 1.8299932 ... 1.3381654 1.39299 1.3381654 ] [2.3869214 2.3727505 2.3869214 ... 1.0208247 1.1268035 1.0208247 ] ... [2.0539713 1.974124 2.0539713 ... 2.9008834 1.6835177 1.6038945 ] [2.3163657 2.2360084 2.3163657 ... 1.8294017 1.8495041 1.8294017 ] [2.0539713 1.3337508 2.0539713 ... 2.9008834 1.6835177 1.6038945 ]] [[1.4950632 1.0156764 1.4950632 ... 0.92829823 1.5011287 0.92829823] [1.0972347 1.6335645 2.2165198 ... 1.066234 1.9638761 1.066234 ] [1.4950632 1.0156764 1.4950632 ... 2.2150877 1.5011287 2.2150877 ] ... [0.8411044 1.5429252 1.5160769 ... 2.486315 2.8503761 1.1809058 ] [1.5671656 1.4260246 1.5671656 ... 1.5512836 1.7215918 1.5512836 ] [0.81956005 1.5429252 1.5160769 ... 2.486315 2.8503761 1.1809058 ]]] [[[0.8870065 1.8263383 0.8870065 ... 1.4807587 1.6099186 1.4807587 ] [1.7325965 0.6268071 1.7325965 ... 1.7707701 1.1928816 1.7707701 ] [0.8870065 1.8263383 1.51215 ... 1.4807587 1.6099186 1.4807587 ] ... [0.7959122 2.070829 0.7959122 ... 1.625718 1.9626776 1.2398918 ] [1.6881734 1.0567691 1.6881734 ... 1.5525475 2.677825 1.5525475 ] [0.7959122 0.85448545 0.7959122 ... 1.625718 1.9626776 1.2398918 ]] [[1.0700889 2.674477 1.0700889 ... 2.5866046 2.1042926 1.9119829 ] [1.9163536 2.9590492 1.9163536 ... 2.4689994 2.2961853 2.4689994 ] [1.0700889 2.674477 1.0700889 ... 2.5866046 2.1042926 1.9119829 ] ... [2.2553682 2.2356977 2.2553682 ... 2.154082 1.8854872 2.154082 ] [1.811233 1.3978863 1.811233 ... 1.3942934 1.7407776 1.3942934 ] [2.2553682 2.2356977 2.2553682 ... 2.154082 1.836408 2.154082 ]] [[0.8870065 1.8263383 1.0009433 ... 1.6300905 1.6099186 1.6300905 ] [1.7325965 1.4363879 1.7325965 ... 1.7707701 1.1928816 1.7707701 ] [0.8870065 1.8263383 1.51215 ... 1.6300905 1.6099186 1.6300905 ] ... [2.1615496 2.070829 2.1615496 ... 1.625718 1.9626776 1.5419092 ] [1.6881734 1.7756141 1.6881734 ... 1.7357165 2.677825 1.7357165 ] [0.7959122 0.85448545 1.0898051 ... 1.625718 1.9626776 1.5419092 ]] ... [[1.3483205 1.3750072 1.5127347 ... 1.4884179 2.4366894 1.4884179 ] [1.0456718 1.7679143 1.0456718 ... 1.1955221 2.4885314 0.9080943 ] [2.9493122 2.1999674 2.9493122 ... 1.4884179 2.4366894 1.4884179 ] ... [1.6256871 2.4174726 1.6256871 ... 1.8042018 1.7136319 1.8042018 ] [1.1584382 2.0329342 2.1071627 ... 1.6466066 2.2129195 1.1806117 ] [0.87962395 1.4482573 1.1028622 ... 0.97560984 1.6994241 0.97560984]] [[1.7856311 0.7449641 1.7856311 ... 1.5832199 1.6891799 1.5832199 ] [1.171588 2.1512456 2.3838828 ... 2.1624231 2.058517 1.6373043 ] [1.7856311 0.91722816 1.7856311 ... 1.5832199 2.28363 1.5832199 ] ... [1.8372544 1.2482209 1.8372544 ... 1.7011983 1.6978241 1.7011983 ] [2.966333 2.1342733 2.966333 ... 1.7791128 1.8543786 1.7791128 ] [1.8372544 1.2009273 1.8372544 ... 1.7011983 1.467786 1.7011983 ]] [[1.106851 1.3750072 1.106851 ... 1.4884179 2.4366894 1.4884179 ] [0.61170554 1.7679143 0.64522856 ... 1.1955221 2.4885314 0.9080943 ] [2.9493122 2.1999674 2.9493122 ... 1.4884179 2.4366894 1.4884179 ] ... [1.6256871 2.4174726 1.6256871 ... 1.8042018 1.7136319 1.8042018 ] [1.1584382 2.0329342 1.1584382 ... 1.1806117 1.7699897 1.1806117 ] [0.87962395 0.8147833 1.1028622 ... 0.5350922 1.6994241 0.5350922 ]]] [[[1.8534101 2.1202252 1.8534101 ... 1.4271678 1.3991572 1.4271678 ] [1.3726511 2.3164456 2.3559535 ... 2.6925514 1.7272232 2.6925514 ] [1.8534101 2.1202252 1.8534101 ... 1.4271678 1.6143519 1.4271678 ] ... [0.911365 1.3135611 1.7296818 ... 2.1837769 2.2999132 1.4842292 ] [1.3866163 1.5503019 1.3866163 ... 2.1214077 2.1444337 1.8786038 ] [0.911365 1.3135611 1.7296818 ... 2.1837769 1.3374467 1.4842292 ]] [[2.4949574 2.0128293 2.4949574 ... 1.7803123 3.0983534 1.7803123 ] [2.5422406 2.4247823 2.5422406 ... 2.224904 1.9651119 2.224904 ] [2.4949574 2.0128293 2.4949574 ... 2.2728179 3.0983534 2.2728179 ] ... [2.8278213 1.2742358 2.8278213 ... 2.2841554 1.8554375 2.2841554 ] [1.6316371 1.6587561 1.8059084 ... 1.3930802 2.1997657 1.3930802 ] [2.17258 1.2742358 2.67265 ... 1.2869103 1.8554375 1.1714371 ]] [[1.8534101 2.1202252 1.8534101 ... 1.4271678 1.3991572 1.4271678 ] [1.7884597 2.3164456 2.3559535 ... 2.6925514 1.7272232 2.6925514 ] [1.8534101 2.1202252 1.8534101 ... 1.4271678 1.6143519 1.4271678 ] ... [1.9704901 1.3135611 1.9704901 ... 2.1837769 2.2999132 1.4842292 ] [1.5301037 1.5503019 1.5301037 ... 2.1214077 2.1444337 1.8786038 ] [1.9704901 1.3135611 1.9704901 ... 2.1837769 1.5443827 1.4842292 ]] ... [[2.6960843 0.97020143 2.6960843 ... 1.0584224 2.1699193 1.0048379 ] [1.1393718 1.947077 1.1393718 ... 1.3623749 1.3973873 0.8317797 ] [2.6960843 0.97020143 2.6960843 ... 1.0584224 2.1699193 1.0048379 ] ... [2.3494408 1.567527 2.3494408 ... 1.7510945 1.9740539 1.7510945 ] [1.4620035 2.0474129 1.4620035 ... 2.22306 2.10929 2.22306 ] [2.3494408 1.567527 2.3494408 ... 1.7510945 1.9740539 1.7510945 ]] [[1.6847794 3.177117 2.616716 ... 2.1856937 1.7945888 1.4071683 ] [2.1806805 2.8624644 2.1806805 ... 1.7509862 1.8558098 1.4611845 ] [1.6847794 3.177117 2.616716 ... 2.1856937 1.7945888 1.4071683 ] ... [1.4719872 1.2448181 1.4719872 ... 2.1986046 1.580833 1.4921988 ] [1.310898 1.6723328 1.9146762 ... 1.9336071 1.6906565 1.9336071 ] [1.4472934 1.2200991 1.4472934 ... 2.1986046 1.580833 1.4921988 ]] [[1.5176244 0.97020143 1.6350127 ... 1.0584224 2.1699193 1.0048379 ] [1.1393718 1.8149894 1.1393718 ... 1.1530997 1.0842696 0.8317797 ] [1.5176244 0.97020143 1.6350127 ... 1.0584224 2.1699193 1.0048379 ] ... [2.3494408 0.98213303 2.3494408 ... 1.7510945 1.8221353 1.7510945 ] [1.4620035 1.9713976 1.4620035 ... 1.7335769 2.10929 1.7335769 ] [2.3494408 0.98213303 2.3494408 ... 1.7510945 1.467928 1.7510945 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 3, 3], 'stride': [3, 3, 3], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5884.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.kernel_size : int[] = prim::Constant[value=[3, 3, 3]]() %6 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.kernel_size, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%6) fw_re: [[[[[1.5923642 1.8619971 1.3838047 1.5645437 ] [1.5870694 1.9021969 1.6582625 1.952618 ] [2.5005186 2.1606758 1.6978916 1.4519856 ] [2.781803 2.5281157 2.0093193 1.3140807 ]] [[1.1183984 1.7888081 2.0711806 2.4308872 ] [1.9034265 3.339002 1.2859609 1.7151821 ] [1.3220867 2.2518342 1.6860584 2.2325706 ] [1.360552 1.591878 1.5122533 2.0423193 ]] [[2.8667312 2.7991202 1.6943687 1.4076889 ] [1.7669979 1.5594739 1.7793968 1.101102 ] [1.8384573 2.1640627 2.492733 1.6147416 ] [3.060151 1.9563856 2.6234214 1.5346462 ]] [[1.9931314 2.4586184 2.0979874 2.5952187 ] [1.0865617 2.310755 1.8413868 2.872402 ] [1.9887888 2.8423965 1.5947524 1.8108046 ] [2.569795 2.1584654 1.3490086 2.190041 ]]] [[[2.6678996 2.0007625 1.8894447 1.0721406 ] [1.78589 1.945432 1.6811675 1.1896347 ] [1.8355595 1.890862 1.1568846 1.6080879 ] [1.6446462 1.8266212 1.1201622 2.011025 ]] [[2.4935327 1.64974 2.2196608 1.9862558 ] [1.7575614 2.3171082 0.9540748 1.4683123 ] [1.9249018 1.2581712 2.2667148 1.6485088 ] [2.621173 1.8429124 1.8649707 3.0162618 ]] [[1.5038744 2.5170944 1.781007 2.2441444 ] [1.4253653 2.6726654 3.013381 1.6678301 ] [2.3808556 1.2738609 1.9319149 1.5888973 ] [2.330999 1.8320366 2.349664 2.2785614 ]] [[2.527992 1.4344215 1.4065125 3.3453727 ] [1.6287546 1.4879861 1.8912337 2.086058 ] [2.1789408 1.1806296 2.3324277 2.1883051 ] [1.6868641 1.7576383 1.4691712 1.7337484 ]]] [[[1.7984849 1.6546243 3.0938146 2.5502374 ] [1.4422055 2.1272242 1.3929662 1.5652437 ] [1.9853812 2.2049928 2.1813972 1.1845454 ] [1.7264055 1.6377444 2.5187166 1.9227601 ]] [[1.5092679 2.4776278 1.5257516 2.2695417 ] [2.0364285 2.842915 1.7794291 1.9355725 ] [1.4918025 1.606612 1.5483297 2.6637356 ] [1.6977463 1.5332124 3.8812768 2.5025434 ]] [[1.6188848 2.5653772 2.8635187 1.8952181 ] [2.0480344 3.3539236 1.8151373 1.637978 ] [1.6343381 1.8395355 2.1784184 2.0488472 ] [1.8296307 1.8776098 2.6647093 1.7322515 ]] [[1.782717 0.9740229 2.109286 2.0816534 ] [2.1946254 1.1872822 1.9875935 0.76953816] [2.0048156 2.417378 2.4712794 1.7876728 ] [1.9056035 2.1436963 1.553621 1.477242 ]]]]]; ov_res: [[[[[1.5923642 1.8619971 1.3838047 1.5645437 ] [1.5870694 1.9021969 1.6582625 1.952618 ] [2.5005186 2.1606758 1.6978916 1.4519856 ] [2.781803 2.5281157 2.0093193 1.3140807 ]] [[1.1183984 1.7888081 2.0711806 2.4308872 ] [1.9034265 3.339002 1.2859609 1.7151821 ] [1.3220867 2.2518342 1.6860584 2.2325706 ] [1.360552 1.591878 1.5122533 2.0423193 ]] [[2.8667312 2.7991202 1.6943687 1.4076889 ] [1.7669979 1.5594739 1.7793968 1.101102 ] [1.8384573 2.1640627 2.492733 1.6147416 ] [3.060151 1.9563856 2.6234214 1.5346462 ]] [[1.9931314 2.4586184 2.0979874 2.5952187 ] [1.0865617 2.310755 1.8413868 2.872402 ] [1.9887888 2.8423965 1.5947524 1.8108046 ] [2.569795 2.1584654 1.3490086 2.190041 ]]] [[[2.6678996 2.0007625 1.8894447 1.0721406 ] [1.78589 1.945432 1.6811675 1.1896347 ] [1.8355595 1.890862 1.1568846 1.6080879 ] [1.6446462 1.8266212 1.1201622 2.011025 ]] [[2.4935327 1.64974 2.2196608 1.9862558 ] [1.7575614 2.3171082 0.9540748 1.4683123 ] [1.9249018 1.2581712 2.2667148 1.6485088 ] [2.621173 1.8429124 1.8649707 3.0162618 ]] [[1.5038744 2.5170944 1.781007 2.2441444 ] [1.4253653 2.6726654 3.013381 1.6678301 ] [2.3808556 1.2738609 1.9319149 1.5888973 ] [2.330999 1.8320366 2.349664 2.2785614 ]] [[2.527992 1.4344215 1.4065125 3.3453727 ] [1.6287546 1.4879861 1.8912337 2.086058 ] [2.1789408 1.1806296 2.3324277 2.1883051 ] [1.6868641 1.7576383 1.4691712 1.7337484 ]]] [[[1.7984849 1.6546243 3.0938146 2.5502374 ] [1.4422055 2.1272242 1.3929662 1.5652437 ] [1.9853812 2.2049928 2.1813972 1.1845454 ] [1.7264055 1.6377444 2.5187166 1.9227601 ]] [[1.5092679 2.4776278 1.5257516 2.2695417 ] [2.0364285 2.842915 1.7794291 1.9355725 ] [1.4918025 1.606612 1.5483297 2.6637356 ] [1.6977463 1.5332124 3.8812768 2.5025434 ]] [[1.6188848 2.5653772 2.8635187 1.8952181 ] [2.0480344 3.3539236 1.8151373 1.637978 ] [1.6343381 1.8395355 2.1784184 2.0488472 ] [1.8296307 1.8776098 2.6647093 1.7322515 ]] [[1.782717 0.9740229 2.109286 2.0816534 ] [2.1946254 1.1872822 1.9875935 0.76953816] [2.0048156 2.417378 2.4712794 1.7876728 ] [1.9056035 2.1436963 1.553621 1.477242 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pooling.py::TestPooling::test_max_pool3d[ ie_device:CPU - precision:FP32 - dilation:2 - ceil_mode:False - params:{'kernel_size': [3, 2, 1], 'stride': [3, 1, 1], 'padding': [0, 0, 0]} ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pooling.___torch_mangle_5886.aten_max_pool3d, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2, 2]]() %self.ceil_mode : bool = prim::Constant[value=0]() %self.padding : int[] = prim::Constant[value=[0, 0, 0]]() %self.stride : int[] = prim::Constant[value=[3, 1, 1]]() %self.kernel_size : int[] = prim::Constant[value=[3, 2, 1]]() %7 : Tensor = aten::max_pool3d(%x.1, %self.kernel_size, %self.stride, %self.padding, %2, %self.ceil_mode) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:868:11 return (%7) fw_re: [[[[[ 1.515505 1.0683846 0.55811113 ... 1.1385081 0.4396367 1.8347838 ] [ 1.9679797 0.8234326 1.5855572 ... 0.21705908 2.0658784 0.9454653 ] [ 1.515505 1.346375 1.0087359 ... 1.1385081 0.4396367 1.2203873 ] ... [ 1.2688503 1.9155341 1.875406 ... 0.6343748 1.9174914 1.8824799 ] [ 0.12001513 1.3182265 0.56960994 ... 0.06660467 1.6801012 2.2050183 ] [ 1.6557752 2.073625 1.875406 ... 0.43717095 2.1355245 1.8824799 ]] [[ 0.8973815 1.335578 1.7452749 ... 1.020721 0.40188697 1.5135015 ] [ 2.4951181 2.691518 2.7914279 ... 1.239267 1.2216402 0.3497976 ] [-0.05550718 0.22216196 1.7452749 ... 1.020721 0.78395844 1.0220904 ] ... [ 1.7463828 0.9060348 0.7680281 ... 1.2306932 0.8161857 0.81762797] [ 0.93777585 0.8318708 2.5996964 ... 0.11715803 1.3997463 1.4871495 ] [ 1.7463828 1.8877956 0.7254522 ... 1.389061 0.8161857 0.81762797]] [[ 1.6335863 0.9786245 0.9194438 ... 1.4481493 1.7387161 1.2158066 ] [-0.10287711 2.635368 2.3438318 ... 1.0698783 0.22719693 1.1434648 ] [ 1.6335863 0.9786245 2.0434341 ... 1.4481493 0.55238575 0.8137257 ] ... [ 1.1359646 1.4861193 1.1409574 ... 0.157877 1.4189677 1.9582062 ] [ 2.0045342 1.875244 -0.2581866 ... 1.38993 1.6480749 0.12740809] [ 1.4015568 1.4861193 1.1805195 ... 0.74121195 1.4189677 -0.50027806]] [[ 2.086168 0.816084 0.65163636 ... 1.1608278 2.2953897 1.8937321 ] [ 1.6181071 1.5886586 1.6633998 ... 1.7233485 1.6309277 1.2273004 ] [ 2.086168 0.816084 1.58351 ... 1.0909054 0.10734022 0.8745798 ] ... [ 1.6850184 0.72779924 1.0090855 ... 1.7572823 1.7355031 1.4729525 ] [ 3.5731652 0.8260755 0.7941075 ... 1.8085135 0.22907232 -0.09446223] [ 2.7797534 0.72779924 1.0176063 ... 1.7572823 1.555761 0.5178409 ]]] [[[ 0.88655514 2.1896791 2.6785173 ... 3.540973 0.6936393 1.2435188 ] [ 0.35637113 2.197574 1.1681063 ... 2.1717696 2.8969262 1.498884 ] [ 0.42037526 2.1896791 2.1261826 ... 1.0207541 0.79394376 0.7390971 ] ... [ 2.3221104 0.8922189 1.5184319 ... 1.3879788 1.9894738 1.0697595 ] [ 1.146785 1.1507243 0.78011984 ... 2.2020383 0.77558297 -0.14454286] [ 1.3532795 1.6843157 2.2422745 ... 0.690982 1.9894738 1.0697595 ]] [[ 0.36944544 -0.06652301 2.1148193 ... 2.1224205 1.3347962 3.9514275 ] [ 1.484541 1.2460248 1.6186044 ... 1.3744875 2.2368398 0.2272603 ] [ 2.2987623 -0.0090254 2.1148193 ... 2.1224205 1.3347962 3.9514275 ] ... [ 0.68129957 1.0990882 1.9908389 ... 1.2127767 1.0761353 1.1074656 ] [ 0.93900454 1.354637 0.42592847 ... 1.1398357 1.6223943 1.4035552 ] [ 0.68129957 1.5428848 1.4044628 ... 0.7003565 1.3126104 1.1074656 ]] [[ 1.0557698 2.1926646 1.3140194 ... 1.3127961 0.70508343 1.4914364 ] [ 0.5405974 0.24758695 0.7748675 ... -0.2775429 1.0626538 2.7497947 ] [ 1.4629245 0.9088762 1.2204422 ... 2.5838585 2.1029687 1.4914364 ] ... [ 2.0312557 0.67449605 1.9409498 ... 1.6261809 0.552349 2.3730175 ] [ 1.0623806 1.4826007 1.8178651 ... 1.7068225 2.9894156 0.55814135] [ 2.0312557 2.1319156 2.6270444 ... 1.36101 1.4882756 2.3730175 ]] [[ 0.25867146 0.20788766 0.6866837 ... 1.7541136 0.64083713 2.2011023 ] [ 1.0736326 0.88540167 0.94694555 ... 1.640397 0.73848087 -0.28980526] [ 0.74724656 0.7955967 0.53636533 ... 1.7541136 1.586235 2.2011023 ] ... [ 1.8606263 0.28522593 0.48882625 ... 2.6831005 1.4408873 0.46247038] [ 1.9544461 1.0420427 1.5838221 ... 0.25922787 1.4878064 0.5019981 ] [ 1.8606263 0.86679167 1.2284302 ... 1.4059268 1.4408873 0.7581882 ]]] [[[ 0.16112714 0.13984361 2.3474746 ... 1.6313782 1.5449663 1.0572212 ] [ 0.5963253 1.6741837 0.9238512 ... 0.29884017 0.7296747 0.30136195] [ 0.16112714 0.27449438 1.464365 ... 1.6313782 -0.5149365 1.322248 ] ... [ 2.3690107 0.5023596 1.7303638 ... 1.1041918 1.2917526 1.0645291 ] [ 0.46743062 1.574161 0.4681006 ... 1.0377855 1.2023388 0.15480876] [ 2.3690107 1.0851172 0.06006425 ... 2.0564408 1.2917526 1.1741097 ]] [[ 2.0593657 0.01239868 2.6859448 ... 1.8828944 0.03943291 2.4224567 ] [ 1.6767268 2.630738 1.7751774 ... 1.795562 1.2459441 0.62960166] [ 1.8538859 -0.22049598 2.6859448 ... 1.8828944 1.1786426 2.4224567 ] ... [ 1.183183 1.2668813 0.74587286 ... 1.5895706 0.7831229 1.0792555 ] [ 0.71274453 1.2669365 0.18658942 ... 1.4741828 1.4586061 0.61356217] [ 0.7400469 1.2668813 1.5610112 ... 2.2738318 -0.20612776 1.0792555 ]] [[ 0.91002256 0.37822813 1.2667526 ... 0.482461 1.9636143 1.9254864 ] [-0.1819219 1.4242061 2.6976335 ... 2.0961385 1.8175586 1.3445777 ] [ 0.91002256 0.8276561 1.2667526 ... 1.3599151 0.7539709 0.22182067] ... [ 1.1418571 1.4237024 0.8042407 ... 1.8388773 1.171055 1.34766 ] [ 2.0144281 0.43778837 0.83263254 ... 1.6073629 1.1623495 0.63540554] [ 0.26728326 1.4237024 0.8042407 ... 1.8388773 1.171055 1.2463826 ]] [[ 0.5723531 1.6150123 1.5431149 ... 0.59580296 0.250153 0.73052037] [ 2.267839 1.5055526 1.7024205 ... 0.7244827 1.6544702 2.0236042 ] [ 1.2145025 1.6150123 1.116202 ... 0.9189657 1.4637307 0.73052037] ... [ 0.593702 0.8478026 2.2180352 ... 1.5684047 2.6843367 1.8780441 ] [ 1.3366777 0.31681737 2.1104999 ... 0.65393215 0.9069064 1.8439473 ] [ 0.593702 0.6834593 2.2180352 ... 1.5684047 0.9396528 0.7412627 ]]]]]; ov_res: [[[[[ 1.515505 1.0683846 0.55811113 ... 1.1385081 0.4396367 1.8347838 ] [ 1.9679797 0.8234326 1.5855572 ... 0.21705908 2.0658784 0.9454653 ] [ 1.515505 1.346375 1.0087359 ... 1.1385081 0.4396367 1.2203873 ] ... [ 1.2688503 1.9155341 1.875406 ... 0.6343748 1.9174914 1.8824799 ] [ 0.12001513 1.3182265 0.56960994 ... 0.06660467 1.6801012 2.2050183 ] [ 1.6557752 2.073625 1.875406 ... 0.43717095 2.1355245 1.8824799 ]] [[ 0.8973815 1.335578 1.7452749 ... 1.020721 0.40188697 1.5135015 ] [ 2.4951181 2.691518 2.7914279 ... 1.239267 1.2216402 0.3497976 ] [-0.05550718 0.22216196 1.7452749 ... 1.020721 0.78395844 1.0220904 ] ... [ 1.7463828 0.9060348 0.7680281 ... 1.2306932 0.8161857 0.81762797] [ 0.93777585 0.8318708 2.5996964 ... 0.11715803 1.3997463 1.4871495 ] [ 1.7463828 1.8877956 0.7254522 ... 1.389061 0.8161857 0.81762797]] [[ 1.6335863 0.9786245 0.9194438 ... 1.4481493 1.7387161 1.2158066 ] [-0.10287711 2.635368 2.3438318 ... 1.0698783 0.22719693 1.1434648 ] [ 1.6335863 0.9786245 2.0434341 ... 1.4481493 0.55238575 0.8137257 ] ... [ 1.1359646 1.4861193 1.1409574 ... 0.157877 1.4189677 1.9582062 ] [ 2.0045342 1.875244 -0.2581866 ... 1.38993 1.6480749 0.12740809] [ 1.4015568 1.4861193 1.1805195 ... 0.74121195 1.4189677 -0.50027806]] [[ 2.086168 0.816084 0.65163636 ... 1.1608278 2.2953897 1.8937321 ] [ 1.6181071 1.5886586 1.6633998 ... 1.7233485 1.6309277 1.2273004 ] [ 2.086168 0.816084 1.58351 ... 1.0909054 0.10734022 0.8745798 ] ... [ 1.6850184 0.72779924 1.0090855 ... 1.7572823 1.7355031 1.4729525 ] [ 3.5731652 0.8260755 0.7941075 ... 1.8085135 0.22907232 -0.09446223] [ 2.7797534 0.72779924 1.0176063 ... 1.7572823 1.555761 0.5178409 ]]] [[[ 0.88655514 2.1896791 2.6785173 ... 3.540973 0.6936393 1.2435188 ] [ 0.35637113 2.197574 1.1681063 ... 2.1717696 2.8969262 1.498884 ] [ 0.42037526 2.1896791 2.1261826 ... 1.0207541 0.79394376 0.7390971 ] ... [ 2.3221104 0.8922189 1.5184319 ... 1.3879788 1.9894738 1.0697595 ] [ 1.146785 1.1507243 0.78011984 ... 2.2020383 0.77558297 -0.14454286] [ 1.3532795 1.6843157 2.2422745 ... 0.690982 1.9894738 1.0697595 ]] [[ 0.36944544 -0.06652301 2.1148193 ... 2.1224205 1.3347962 3.9514275 ] [ 1.484541 1.2460248 1.6186044 ... 1.3744875 2.2368398 0.2272603 ] [ 2.2987623 -0.0090254 2.1148193 ... 2.1224205 1.3347962 3.9514275 ] ... [ 0.68129957 1.0990882 1.9908389 ... 1.2127767 1.0761353 1.1074656 ] [ 0.93900454 1.354637 0.42592847 ... 1.1398357 1.6223943 1.4035552 ] [ 0.68129957 1.5428848 1.4044628 ... 0.7003565 1.3126104 1.1074656 ]] [[ 1.0557698 2.1926646 1.3140194 ... 1.3127961 0.70508343 1.4914364 ] [ 0.5405974 0.24758695 0.7748675 ... -0.2775429 1.0626538 2.7497947 ] [ 1.4629245 0.9088762 1.2204422 ... 2.5838585 2.1029687 1.4914364 ] ... [ 2.0312557 0.67449605 1.9409498 ... 1.6261809 0.552349 2.3730175 ] [ 1.0623806 1.4826007 1.8178651 ... 1.7068225 2.9894156 0.55814135] [ 2.0312557 2.1319156 2.6270444 ... 1.36101 1.4882756 2.3730175 ]] [[ 0.25867146 0.20788766 0.6866837 ... 1.7541136 0.64083713 2.2011023 ] [ 1.0736326 0.88540167 0.94694555 ... 1.640397 0.73848087 -0.28980526] [ 0.74724656 0.7955967 0.53636533 ... 1.7541136 1.586235 2.2011023 ] ... [ 1.8606263 0.28522593 0.48882625 ... 2.6831005 1.4408873 0.46247038] [ 1.9544461 1.0420427 1.5838221 ... 0.25922787 1.4878064 0.5019981 ] [ 1.8606263 0.86679167 1.2284302 ... 1.4059268 1.4408873 0.7581882 ]]] [[[ 0.16112714 0.13984361 2.3474746 ... 1.6313782 1.5449663 1.0572212 ] [ 0.5963253 1.6741837 0.9238512 ... 0.29884017 0.7296747 0.30136195] [ 0.16112714 0.27449438 1.464365 ... 1.6313782 -0.5149365 1.322248 ] ... [ 2.3690107 0.5023596 1.7303638 ... 1.1041918 1.2917526 1.0645291 ] [ 0.46743062 1.574161 0.4681006 ... 1.0377855 1.2023388 0.15480876] [ 2.3690107 1.0851172 0.06006425 ... 2.0564408 1.2917526 1.1741097 ]] [[ 2.0593657 0.01239868 2.6859448 ... 1.8828944 0.03943291 2.4224567 ] [ 1.6767268 2.630738 1.7751774 ... 1.795562 1.2459441 0.62960166] [ 1.8538859 -0.22049598 2.6859448 ... 1.8828944 1.1786426 2.4224567 ] ... [ 1.183183 1.2668813 0.74587286 ... 1.5895706 0.7831229 1.0792555 ] [ 0.71274453 1.2669365 0.18658942 ... 1.4741828 1.4586061 0.61356217] [ 0.7400469 1.2668813 1.5610112 ... 2.2738318 -0.20612776 1.0792555 ]] [[ 0.91002256 0.37822813 1.2667526 ... 0.482461 1.9636143 1.9254864 ] [-0.1819219 1.4242061 2.6976335 ... 2.0961385 1.8175586 1.3445777 ] [ 0.91002256 0.8276561 1.2667526 ... 1.3599151 0.7539709 0.22182067] ... [ 1.1418571 1.4237024 0.8042407 ... 1.8388773 1.171055 1.34766 ] [ 2.0144281 0.43778837 0.83263254 ... 1.6073629 1.1623495 0.63540554] [ 0.26728326 1.4237024 0.8042407 ... 1.8388773 1.171055 1.2463826 ]] [[ 0.5723531 1.6150123 1.5431149 ... 0.59580296 0.250153 0.73052037] [ 2.267839 1.5055526 1.7024205 ... 0.7244827 1.6544702 2.0236042 ] [ 1.2145025 1.6150123 1.116202 ... 0.9189657 1.4637307 0.73052037] ... [ 0.593702 0.8478026 2.2180352 ... 1.5684047 2.6843367 1.8780441 ] [ 1.3366777 0.31681737 2.1104999 ... 0.65393215 0.9069064 1.8439473 ] [ 0.593702 0.6834593 2.2180352 ... 1.5684047 0.9396528 0.7412627 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pow.py::TestPow::test_pow[ ie_device:CPU - precision:FP32 - test_input:(array([[1., 2.], [3., 4.]], dtype=float32), array([[1., 1.], [2., 2.]], dtype=float32)) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pow.___torch_mangle_5887.aten_pow, %input_data.1 : Tensor, %exponent.1 : Tensor): %3 : Tensor = aten::pow(%input_data.1, %exponent.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pow.py:33:23 return (%3) fw_re: [[ 1. 2.] [ 9. 16.]]; ov_res: [[ 1. 2.] [ 9. 16.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pow.py::TestPow::test_pow[ ie_device:CPU - precision:FP32 - test_input:(array([[1., 2.], [3., 4.]], dtype=float32), array([2., 3.], dtype=float32)) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pow.___torch_mangle_5889.aten_pow, %input_data.1 : Tensor, %exponent.1 : Tensor): %3 : Tensor = aten::pow(%input_data.1, %exponent.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pow.py:33:23 return (%3) fw_re: [[ 1. 8.] [ 9. 64.]]; ov_res: [[ 1. 8.] [ 9. 64.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pow.py::TestPow::test_pow[ ie_device:CPU - precision:FP32 - test_input:(array([[1., 2.], [3., 4.]], dtype=float32), array([2.], dtype=float32)) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pow.___torch_mangle_5891.aten_pow, %input_data.1 : Tensor, %exponent.1 : Tensor): %3 : Tensor = aten::pow(%input_data.1, %exponent.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pow.py:33:23 return (%3) fw_re: [[ 1. 4.] [ 9. 16.]]; ov_res: [[ 1. 4.] [ 9. 16.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pow.py::TestPow::test_pow[ ie_device:CPU - precision:FP32 - test_input:(array([5., 6.], dtype=float32), array([[1., 2.], [3., 4.]], dtype=float32)) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pow.___torch_mangle_5893.aten_pow, %input_data.1 : Tensor, %exponent.1 : Tensor): %3 : Tensor = aten::pow(%input_data.1, %exponent.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pow.py:33:23 return (%3) fw_re: [[ 5. 36.] [ 125. 1296.]]; ov_res: [[ 5. 36.] [ 125. 1296.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_pow.py::TestPow::test_pow[ ie_device:CPU - precision:FP32 - test_input:(array([5.], dtype=float32), array([[1., 2.], [3., 4.]], dtype=float32)) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_pow.___torch_mangle_5895.aten_pow, %input_data.1 : Tensor, %exponent.1 : Tensor): %3 : Tensor = aten::pow(%input_data.1, %exponent.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_pow.py:33:23 return (%3) fw_re: [[ 5. 25.] [125. 625.]]; ov_res: [[ 5. 25.] [125. 625.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_relu.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - inplace:False ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_relu.___torch_mangle_5896.aten_relu, %x.1 : Tensor): %result.3 : Tensor = aten::relu(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1457:17 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.3) return (%3) fw_re: [[[[-0.74050874 -1.2178338 0.0574206 ... 2.741991 -2.2281067 -0.6984535 ] [-0.60156935 0.46164224 0.760768 ... 1.429261 1.1633532 -2.3171945 ] [-0.12904976 -1.547429 -0.5765323 ... -0.81935364 0.6833839 0.7204024 ] ... [ 0.44797727 1.1535964 1.0519273 ... 0.47919372 1.7735691 1.1524047 ] [-0.6924224 0.686959 -1.354158 ... -0.32048574 -0.21669081 -0.2154195 ] [ 0.76791865 1.7137815 0.7194744 ... 0.03211265 0.6191636 2.2221534 ]] [[-0.8716024 -1.2722683 -0.75572276 ... 1.5889888 1.2808473 0.44876832] [-0.42739865 -0.38520652 2.143658 ... 1.9402738 0.98386055 0.4092706 ] [-0.9293256 -0.7238662 -0.12938698 ... 1.0257759 0.327895 -0.6417417 ] ... [ 0.05555598 -0.14138626 0.26467952 ... -0.43800914 -1.7198868 -0.10349458] [-0.7265618 0.5220365 1.6492673 ... -0.74589515 -0.5075262 0.18292843] [-1.312433 -0.311372 -0.8702749 ... 1.2551372 -0.303849 0.78243583]] [[-0.55578524 1.799665 -0.8923958 ... -0.04833847 0.71616614 -0.38354373] [ 0.32887143 -0.7651846 -0.37225387 ... -2.2865484 -0.07279263 -0.6296268 ] [-0.09295422 1.0938952 1.102952 ... 0.15186168 2.0707557 -0.34012988] ... [ 0.18887328 1.6664757 1.0510298 ... -1.3381625 -0.42593756 -2.448144 ] [-0.571567 -1.4332737 0.9439532 ... -0.56547934 0.31794026 0.42203355] [ 1.1363989 0.64730483 -0.7404538 ... 1.124023 2.6966763 0.07060628]]]]; ov_res: [[[[-0.74050874 -1.2178338 0.0574206 ... 2.741991 -2.2281067 -0.6984535 ] [-0.60156935 0.46164224 0.760768 ... 1.429261 1.1633532 -2.3171945 ] [-0.12904976 -1.547429 -0.5765323 ... -0.81935364 0.6833839 0.7204024 ] ... [ 0.44797727 1.1535964 1.0519273 ... 0.47919372 1.7735691 1.1524047 ] [-0.6924224 0.686959 -1.354158 ... -0.32048574 -0.21669081 -0.2154195 ] [ 0.76791865 1.7137815 0.7194744 ... 0.03211265 0.6191636 2.2221534 ]] [[-0.8716024 -1.2722683 -0.75572276 ... 1.5889888 1.2808473 0.44876832] [-0.42739865 -0.38520652 2.143658 ... 1.9402738 0.98386055 0.4092706 ] [-0.9293256 -0.7238662 -0.12938698 ... 1.0257759 0.327895 -0.6417417 ] ... [ 0.05555598 -0.14138626 0.26467952 ... -0.43800914 -1.7198868 -0.10349458] [-0.7265618 0.5220365 1.6492673 ... -0.74589515 -0.5075262 0.18292843] [-1.312433 -0.311372 -0.8702749 ... 1.2551372 -0.303849 0.78243583]] [[-0.55578524 1.799665 -0.8923958 ... -0.04833847 0.71616614 -0.38354373] [ 0.32887143 -0.7651846 -0.37225387 ... -2.2865484 -0.07279263 -0.6296268 ] [-0.09295422 1.0938952 1.102952 ... 0.15186168 2.0707557 -0.34012988] ... [ 0.18887328 1.6664757 1.0510298 ... -1.3381625 -0.42593756 -2.448144 ] [-0.571567 -1.4332737 0.9439532 ... -0.56547934 0.31794026 0.42203355] [ 1.1363989 0.64730483 -0.7404538 ... 1.124023 2.6966763 0.07060628]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[0. 0. 0.0574206 ... 2.741991 0. 0. ] [0. 0.46164224 0.760768 ... 1.429261 1.1633532 0. ] [0. 0. 0. ... 0. 0.6833839 0.7204024 ] ... [0.44797727 1.1535964 1.0519273 ... 0.47919372 1.7735691 1.1524047 ] [0. 0.686959 0. ... 0. 0. 0. ] [0.76791865 1.7137815 0.7194744 ... 0.03211265 0.6191636 2.2221534 ]] [[0. 0. 0. ... 1.5889888 1.2808473 0.44876832] [0. 0. 2.143658 ... 1.9402738 0.98386055 0.4092706 ] [0. 0. 0. ... 1.0257759 0.327895 0. ] ... [0.05555598 0. 0.26467952 ... 0. 0. 0. ] [0. 0.5220365 1.6492673 ... 0. 0. 0.18292843] [0. 0. 0. ... 1.2551372 0. 0.78243583]] [[0. 1.799665 0. ... 0. 0.71616614 0. ] [0.32887143 0. 0. ... 0. 0. 0. ] [0. 1.0938952 1.102952 ... 0.15186168 2.0707557 0. ] ... [0.18887328 1.6664757 1.0510298 ... 0. 0. 0. ] [0. 0. 0.9439532 ... 0. 0.31794026 0.42203355] [1.1363989 0.64730483 0. ... 1.124023 2.6966763 0.07060628]]]]; ov_res: [[[[0. 0. 0.0574206 ... 2.741991 0. 0. ] [0. 0.46164224 0.760768 ... 1.429261 1.1633532 0. ] [0. 0. 0. ... 0. 0.6833839 0.7204024 ] ... [0.44797727 1.1535964 1.0519273 ... 0.47919372 1.7735691 1.1524047 ] [0. 0.686959 0. ... 0. 0. 0. ] [0.76791865 1.7137815 0.7194744 ... 0.03211265 0.6191636 2.2221534 ]] [[0. 0. 0. ... 1.5889888 1.2808473 0.44876832] [0. 0. 2.143658 ... 1.9402738 0.98386055 0.4092706 ] [0. 0. 0. ... 1.0257759 0.327895 0. ] ... [0.05555598 0. 0.26467952 ... 0. 0. 0. ] [0. 0.5220365 1.6492673 ... 0. 0. 0.18292843] [0. 0. 0. ... 1.2551372 0. 0.78243583]] [[0. 1.799665 0. ... 0. 0.71616614 0. ] [0.32887143 0. 0. ... 0. 0. 0. ] [0. 1.0938952 1.102952 ... 0.15186168 2.0707557 0. ] ... [0.18887328 1.6664757 1.0510298 ... 0. 0. 0. ] [0. 0. 0.9439532 ... 0. 0.31794026 0.42203355] [1.1363989 0.64730483 0. ... 1.124023 2.6966763 0.07060628]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_relu.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - inplace:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_relu.___torch_mangle_5898.aten_relu, %x.1 : Tensor): %result.1 : Tensor = aten::relu_(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1455:17 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.1) return (%3) g mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: :fw_re: [[[[0. 1.046902 0. ... 0.01079161 0.03388745 0. ] [0. 1.3674338 0.47836185 ... 0. 0.7526878 0. ] [0. 0. 0.497207 ... 0. 0. 0. ] ... [0. 1.2805085 0. ... 0. 0. 0. ] [0. 0. 0.8836407 ... 2.961451 0. 0. ] [0. 0. 0.725876 ... 0.4749485 0. 1.8746697 ]] [[0.90587175 0.37210944 0.79452 ... 1.5025743 0. 0. ] [0.10524375 1.3491297 1.0548129 ... 0. 0. 0. ] [0. 1.639925 0. ... 1.3449336 1.2398607 0. ] ... [1.0760247 0.03338474 0. ... 1.8111869 0. 0. ] [0.94592744 0.85509646 0. ... 0. 0. 1.2871746 ] [0.94558406 0.36647418 1.0220009 ... 0. 0. 0.29595372]] [[0. 0. 0. ... 0.75236905 0.96596545 0. ] [0.854533 0.8936627 0. ... 0.04309141 0. 0. ] [0.6554702 0. 0. ... 0.78697056 0. 1.9552844 ] ... [0. 1.3136178 1.4080541 ... 0.24836904 3.0018368 0.16658928] [0.8412167 0. 0. ... 0. 2.1187625 0. ] [0.90422815 1.8125654 0.42772248 ... 0.4185621 0. 0. ]]]]; ov_res: [[[[0. 1.046902 0. ... 0.01079161 0.03388745 0. ] [0. 1.3674338 0.47836185 ... 0. 0.7526878 0. ] [0. 0. 0.497207 ... 0. 0. 0. ] ... [0. 1.2805085 0. ... 0. 0. 0. ] [0. 0. 0.8836407 ... 2.961451 0. 0. ] [0. 0. 0.725876 ... 0.4749485 0. 1.8746697 ]] [[0.90587175 0.37210944 0.79452 ... 1.5025743 0. 0. ] [0.10524375 1.3491297 1.0548129 ... 0. 0. 0. ] [0. 1.639925 0. ... 1.3449336 1.2398607 0. ] ... [1.0760247 0.03338474 0. ... 1.8111869 0. 0. ] [0.94592744 0.85509646 0. ... 0. 0. 1.2871746 ] [0.94558406 0.36647418 1.0220009 ... 0. 0. 0.29595372]] [[0. 0. 0. ... 0.75236905 0.96596545 0. ] [0.854533 0.8936627 0. ... 0.04309141 0. 0. ] [0.6554702 0. 0. ... 0.78697056 0. 1.9552844 ] ... [0. 1.3136178 1.4080541 ... 0.24836904 3.0018368 0.16658928] [0.8412167 0. 0. ... 0. 2.1187625 0. ] [0.90422815 1.8125654 0.42772248 ... 0.4185621 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[0. 1.046902 0. ... 0.01079161 0.03388745 0. ] [0. 1.3674338 0.47836185 ... 0. 0.7526878 0. ] [0. 0. 0.497207 ... 0. 0. 0. ] ... [0. 1.2805085 0. ... 0. 0. 0. ] [0. 0. 0.8836407 ... 2.961451 0. 0. ] [0. 0. 0.725876 ... 0.4749485 0. 1.8746697 ]] [[0.90587175 0.37210944 0.79452 ... 1.5025743 0. 0. ] [0.10524375 1.3491297 1.0548129 ... 0. 0. 0. ] [0. 1.639925 0. ... 1.3449336 1.2398607 0. ] ... [1.0760247 0.03338474 0. ... 1.8111869 0. 0. ] [0.94592744 0.85509646 0. ... 0. 0. 1.2871746 ] [0.94558406 0.36647418 1.0220009 ... 0. 0. 0.29595372]] [[0. 0. 0. ... 0.75236905 0.96596545 0. ] [0.854533 0.8936627 0. ... 0.04309141 0. 0. ] [0.6554702 0. 0. ... 0.78697056 0. 1.9552844 ] ... [0. 1.3136178 1.4080541 ... 0.24836904 3.0018368 0.16658928] [0.8412167 0. 0. ... 0. 2.1187625 0. ] [0.90422815 1.8125654 0.42772248 ... 0.4185621 0. 0. ]]]]; ov_res: [[[[0. 1.046902 0. ... 0.01079161 0.03388745 0. ] [0. 1.3674338 0.47836185 ... 0. 0.7526878 0. ] [0. 0. 0.497207 ... 0. 0. 0. ] ... [0. 1.2805085 0. ... 0. 0. 0. ] [0. 0. 0.8836407 ... 2.961451 0. 0. ] [0. 0. 0.725876 ... 0.4749485 0. 1.8746697 ]] [[0.90587175 0.37210944 0.79452 ... 1.5025743 0. 0. ] [0.10524375 1.3491297 1.0548129 ... 0. 0. 0. ] [0. 1.639925 0. ... 1.3449336 1.2398607 0. ] ... [1.0760247 0.03338474 0. ... 1.8111869 0. 0. ] [0.94592744 0.85509646 0. ... 0. 0. 1.2871746 ] [0.94558406 0.36647418 1.0220009 ... 0. 0. 0.29595372]] [[0. 0. 0. ... 0.75236905 0.96596545 0. ] [0.854533 0.8936627 0. ... 0.04309141 0. 0. ] [0.6554702 0. 0. ... 0.78697056 0. 1.9552844 ] ... [0. 1.3136178 1.4080541 ... 0.24836904 3.0018368 0.16658928] [0.8412167 0. 0. ... 0. 2.1187625 0. ] [0.90422815 1.8125654 0.42772248 ... 0.4185621 0. 0. ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_repeat.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - repeats:(4, 3) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_repeat.___torch_mangle_5899.aten_repeat, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[4, 3]]() %3 : Tensor = aten::repeat(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_repeat.py:24:23 return (%3) fw_re: [[-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848]]; ov_res: [[-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848] [-0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 -0.23084573 -0.36372003 0.54063797 -1.3662665 0.63745505 1.501701 0.97596663 -1.8515474 -0.35999665 -0.163383 ] [-0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848 -0.24726728 0.15388756 1.5651597 1.3738533 1.3530117 0.68035054 0.34374595 0.7324567 -0.50150776 -0.03372848]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_repeat.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - repeats:(1, 1) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_repeat.___torch_mangle_5901.aten_repeat, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 1]]() %3 : Tensor = aten::repeat(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_repeat.py:24:23 return (%3) fw_re: [[-0.7079136 0.83094764 0.5525945 1.6028563 -0.49401242 -0.0028507 0.6715729 -0.23390003 0.65509516 0.43611795] [ 0.9285943 1.965067 0.0261953 -1.8354104 0.25149173 1.0087711 -0.9269197 0.482941 0.068987 -1.0578129 ]]; ov_res: [[-0.7079136 0.83094764 0.5525945 1.6028563 -0.49401242 -0.0028507 0.6715729 -0.23390003 0.65509516 0.43611795] [ 0.9285943 1.965067 0.0261953 -1.8354104 0.25149173 1.0087711 -0.9269197 0.482941 0.068987 -1.0578129 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_repeat.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - repeats:(1, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_repeat.___torch_mangle_5903.aten_repeat, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 3]]() %3 : Tensor = aten::repeat(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_repeat.py:24:23 return (%3) fw_re: [[[ 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 ] [ 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834] [ 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 ] [ 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834]]]; ov_res: [[[ 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 ] [ 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834] [ 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 0.12454445 0.5356965 -0.8303637 0.8754378 -0.9234696 -0.44902304 0.08019944 -1.3065625 -0.92907363 -0.602972 ] [ 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834 1.244533 1.6720363 -2.1461678 1.8041621 -0.70182496 0.6872924 -0.24157102 -0.47277683 0.41508338 -0.63922834]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_repeat.py::TestRelu::test_relu[ ie_device:CPU - precision:FP32 - repeats:(1, 2, 2, 3) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_repeat.___torch_mangle_5905.aten_repeat, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1, 2, 2, 3]]() %3 : Tensor = aten::repeat(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_repeat.py:24:23 return (%3) fw_re: [[[[-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ] [-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ]] [[-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ] [-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ]]]]; ov_res: [[[[-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ] [-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ]] [[-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ] [-0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724 -0.40683255 -0.17182449 -0.26766396 0.48181033 0.7409606 0.13336417 0.29194412 -2.0521252 1.2049155 0.30001724] [-0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 -0.8116856 -0.2090251 -0.9853948 0.6897158 0.47173417 -1.4923153 -0.18896592 1.8622828 0.12806824 0.7268106 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[-1, 6] ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5906.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[-1, 6]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[20.122807 18.960129 6.7229443 10.003205 10.97824 0.82604426] [16.707645 25.308523 41.238956 4.2681856 14.799942 6.265021 ] [48.081383 38.218227 36.938816 4.7593203 42.836617 46.401497 ] ... [14.709993 11.953761 38.138252 49.330418 36.531475 10.297663 ] [39.271667 8.894856 36.214405 8.269354 42.26673 37.646053 ] [13.324775 39.156925 39.605377 27.09517 3.8029528 40.056236 ]]; ov_res: [[20.122807 18.960129 6.7229443 10.003205 10.97824 0.82604426] [16.707645 25.308523 41.238956 4.2681856 14.799942 6.265021 ] [48.081383 38.218227 36.938816 4.7593203 42.836617 46.401497 ] ... [14.709993 11.953761 38.138252 49.330418 36.531475 10.297663 ] [39.271667 8.894856 36.214405 8.269354 42.26673 37.646053 ] [13.324775 39.156925 39.605377 27.09517 3.8029528 40.056236 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[12, 12, 24, 1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5908.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[12, 12, 24, 1]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[16.834616 ] [43.382168 ] [40.478607 ] ... [35.30539 ] [ 5.874515 ] [ 4.31108 ]] [[16.447882 ] [35.98801 ] [37.00157 ] ... [ 2.2267075 ] [16.593433 ] [ 6.132117 ]] [[41.732895 ] [27.030252 ] [40.486897 ] ... [35.987595 ] [30.105175 ] [20.596996 ]] ... [[49.913372 ] [13.143303 ] [34.3739 ] ... [32.261086 ] [ 6.8324885 ] [39.55981 ]] [[46.371403 ] [19.57995 ] [10.053709 ] ... [14.319209 ] [ 0.14657918] [43.78159 ]] [[ 1.3563548 ] [45.521782 ] [33.549534 ] ... [38.11501 ] [29.210451 ] [42.03926 ]]] [[[ 5.19145 ] [16.234919 ] [35.393578 ] ... [48.875034 ] [31.33206 ] [37.196056 ]] [[46.367622 ] [16.806498 ] [16.160261 ] ... [24.648993 ] [28.496723 ] [49.497467 ]] [[24.564274 ] [17.498304 ] [41.382343 ] ... [ 8.688283 ] [ 0.38994083] [14.790685 ]] ... [[30.447649 ] [41.876465 ] [ 9.269878 ] ... [15.56606 ] [46.264305 ] [30.762354 ]] [[38.706623 ] [10.495604 ] [42.341118 ] ... [32.796303 ] [ 6.0497637 ] [ 9.715428 ]] [[ 8.774701 ] [ 0.75018823] [44.534264 ] ... [16.764765 ] [ 7.494693 ] [ 6.3867865 ]]] [[[48.583206 ] [19.996313 ] [17.288055 ] ... [24.61392 ] [49.94847 ] [ 8.260585 ]] [[38.42909 ] [37.08934 ] [44.757042 ] ... [19.191536 ] [ 7.2592115 ] [ 6.338125 ]] [[15.787574 ] [ 5.4371166 ] [33.445873 ] ... [34.227066 ] [ 6.7729416 ] [29.966856 ]] ... [[41.90851 ] [35.132065 ] [17.740314 ] ... [33.716324 ] [27.69014 ] [38.838326 ]] [[40.402103 ] [ 4.6290784 ] [48.449696 ] ... [14.844154 ] [24.249792 ] [11.600458 ]] [[32.05422 ] [39.27725 ] [ 1.5813361 ] ... [46.269337 ] [ 1.948389 ] [45.10294 ]]] ... [[[ 4.547969 ] [ 4.4995894 ] [28.006124 ] ... [18.439035 ] [30.269463 ] [29.41373 ]] [[23.194672 ] [26.350729 ] [ 6.1068144 ] ... [ 7.9719057 ] [24.067902 ] [46.379333 ]] [[ 4.9969263 ] [11.633748 ] [43.704372 ] ... [15.90566 ] [ 6.7744565 ] [ 7.9273343 ]] ... [[20.816055 ] [16.879164 ] [ 7.962172 ] ... [11.062418 ] [ 7.8268247 ] [42.28335 ]] [[32.833004 ] [ 3.9849088 ] [16.620934 ] ... [26.521347 ] [ 5.858614 ] [ 4.188335 ]] [[38.9272 ] [47.077526 ] [16.593489 ] ... [14.097906 ] [14.787615 ] [42.36765 ]]] [[[ 3.2536464 ] [20.412119 ] [ 4.9459853 ] ... [44.48977 ] [29.321594 ] [27.60584 ]] [[37.6547 ] [35.176517 ] [20.901648 ] ... [33.197845 ] [36.216904 ] [24.347416 ]] [[17.924458 ] [ 7.281983 ] [25.927996 ] ... [32.31325 ] [14.368298 ] [17.318815 ]] ... [[45.163437 ] [31.196747 ] [26.258898 ] ... [14.599852 ] [25.593927 ] [ 3.0294125 ]] [[31.08078 ] [33.620003 ] [ 9.693498 ] ... [44.78441 ] [31.39946 ] [41.3098 ]] [[34.079742 ] [29.937586 ] [22.20828 ] ... [ 0.9508631 ] [ 0.8593374 ] [49.169155 ]]] [[[10.84069 ] [21.89427 ] [34.803715 ] ... [16.706814 ] [34.358467 ] [13.715776 ]] [[ 6.507637 ] [ 6.026314 ] [45.346306 ] ... [32.233604 ] [29.395203 ] [21.621147 ]] [[49.16844 ] [ 2.2414567 ] [45.828945 ] ... [22.588507 ] [10.92259 ] [16.143028 ]] ... [[14.310006 ] [21.977419 ] [33.081387 ] ... [ 6.7348905 ] [46.92635 ] [27.785212 ]] [[40.475513 ] [ 0.48112103] [12.115862 ] ... [ 3.483428 ] [ 8.850199 ] [ 8.592791 ]] [[28.988894 ] [38.179436 ] [23.280752 ] ... [19.401436 ] [ 6.923342 ] [39.554745 ]]]]; ov_res: [[[[16.834616 ] [43.382168 ] [40.478607 ] ... [35.30539 ] [ 5.874515 ] [ 4.31108 ]] [[16.447882 ] [35.98801 ] [37.00157 ] ... [ 2.2267075 ] [16.593433 ] [ 6.132117 ]] [[41.732895 ] [27.030252 ] [40.486897 ] ... [35.987595 ] [30.105175 ] [20.596996 ]] ... [[49.913372 ] [13.143303 ] [34.3739 ] ... [32.261086 ] [ 6.8324885 ] [39.55981 ]] [[46.371403 ] [19.57995 ] [10.053709 ] ... [14.319209 ] [ 0.14657918] [43.78159 ]] [[ 1.3563548 ] [45.521782 ] [33.549534 ] ... [38.11501 ] [29.210451 ] [42.03926 ]]] [[[ 5.19145 ] [16.234919 ] [35.393578 ] ... [48.875034 ] [31.33206 ] [37.196056 ]] [[46.367622 ] [16.806498 ] [16.160261 ] ... [24.648993 ] [28.496723 ] [49.497467 ]] [[24.564274 ] [17.498304 ] [41.382343 ] ... [ 8.688283 ] [ 0.38994083] [14.790685 ]] ... [[30.447649 ] [41.876465 ] [ 9.269878 ] ... [15.56606 ] [46.264305 ] [30.762354 ]] [[38.706623 ] [10.495604 ] [42.341118 ] ... [32.796303 ] [ 6.0497637 ] [ 9.715428 ]] [[ 8.774701 ] [ 0.75018823] [44.534264 ] ... [16.764765 ] [ 7.494693 ] [ 6.3867865 ]]] [[[48.583206 ] [19.996313 ] [17.288055 ] ... [24.61392 ] [49.94847 ] [ 8.260585 ]] [[38.42909 ] [37.08934 ] [44.757042 ] ... [19.191536 ] [ 7.2592115 ] [ 6.338125 ]] [[15.787574 ] [ 5.4371166 ] [33.445873 ] ... [34.227066 ] [ 6.7729416 ] [29.966856 ]] ... [[41.90851 ] [35.132065 ] [17.740314 ] ... [33.716324 ] [27.69014 ] [38.838326 ]] [[40.402103 ] [ 4.6290784 ] [48.449696 ] ... [14.844154 ] [24.249792 ] [11.600458 ]] [[32.05422 ] [39.27725 ] [ 1.5813361 ] ... [46.269337 ] [ 1.948389 ] [45.10294 ]]] ... [[[ 4.547969 ] [ 4.4995894 ] [28.006124 ] ... [18.439035 ] [30.269463 ] [29.41373 ]] [[23.194672 ] [26.350729 ] [ 6.1068144 ] ... [ 7.9719057 ] [24.067902 ] [46.379333 ]] [[ 4.9969263 ] [11.633748 ] [43.704372 ] ... [15.90566 ] [ 6.7744565 ] [ 7.9273343 ]] ... [[20.816055 ] [16.879164 ] [ 7.962172 ] ... [11.062418 ] [ 7.8268247 ] [42.28335 ]] [[32.833004 ] [ 3.9849088 ] [16.620934 ] ... [26.521347 ] [ 5.858614 ] [ 4.188335 ]] [[38.9272 ] [47.077526 ] [16.593489 ] ... [14.097906 ] [14.787615 ] [42.36765 ]]] [[[ 3.2536464 ] [20.412119 ] [ 4.9459853 ] ... [44.48977 ] [29.321594 ] [27.60584 ]] [[37.6547 ] [35.176517 ] [20.901648 ] ... [33.197845 ] [36.216904 ] [24.347416 ]] [[17.924458 ] [ 7.281983 ] [25.927996 ] ... [32.31325 ] [14.368298 ] [17.318815 ]] ... [[45.163437 ] [31.196747 ] [26.258898 ] ... [14.599852 ] [25.593927 ] [ 3.0294125 ]] [[31.08078 ] [33.620003 ] [ 9.693498 ] ... [44.78441 ] [31.39946 ] [41.3098 ]] [[34.079742 ] [29.937586 ] [22.20828 ] ... [ 0.9508631 ] [ 0.8593374 ] [49.169155 ]]] [[[10.84069 ] [21.89427 ] [34.803715 ] ... [16.706814 ] [34.358467 ] [13.715776 ]] [[ 6.507637 ] [ 6.026314 ] [45.346306 ] ... [32.233604 ] [29.395203 ] [21.621147 ]] [[49.16844 ] [ 2.2414567 ] [45.828945 ] ... [22.588507 ] [10.92259 ] [16.143028 ]] ... [[14.310006 ] [21.977419 ] [33.081387 ] ... [ 6.7348905 ] [46.92635 ] [27.785212 ]] [[40.475513 ] [ 0.48112103] [12.115862 ] ... [ 3.483428 ] [ 8.850199 ] [ 8.592791 ]] [[28.988894 ] [38.179436 ] [23.280752 ] ... [19.401436 ] [ 6.923342 ] [39.554745 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[12, 12, 12, 2] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5910.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[12, 12, 12, 2]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[ 2.864906 43.91205 ] [45.788662 23.857866 ] [ 5.818357 21.736704 ] ... [16.145126 3.62949 ] [14.770235 8.624788 ] [31.54267 43.829975 ]] [[ 4.2678504 37.407154 ] [33.48945 24.295929 ] [ 0.8357211 5.412082 ] ... [38.266827 9.29942 ] [48.01658 22.160727 ] [26.585905 20.466162 ]] [[43.465977 7.498367 ] [ 4.5283012 36.2908 ] [ 6.1983447 4.409932 ] ... [40.87397 29.743237 ] [42.79904 49.57188 ] [28.06214 29.085434 ]] ... [[22.188507 42.037792 ] [ 8.631084 27.113188 ] [30.463482 13.852854 ] ... [39.32448 42.39713 ] [25.33253 39.85249 ] [12.749868 49.4289 ]] [[36.495815 32.17833 ] [ 4.2428346 8.237558 ] [26.116562 22.041132 ] ... [40.20587 8.018595 ] [37.144794 36.893032 ] [ 1.6186376 46.72817 ]] [[40.702934 11.184076 ] [26.050705 25.270243 ] [35.896652 17.060913 ] ... [23.436054 48.29412 ] [12.085631 40.92786 ] [45.16595 36.154476 ]]] [[[41.396366 9.811781 ] [24.987282 49.128166 ] [39.98726 7.547995 ] ... [ 8.4306965 20.251736 ] [ 9.898377 48.31209 ] [ 6.1822486 26.966413 ]] [[47.555313 19.230837 ] [19.868845 38.950188 ] [41.480087 14.21044 ] ... [26.457165 30.088469 ] [22.605177 35.21481 ] [42.7922 14.320719 ]] [[36.995926 3.7551744 ] [ 2.5438862 33.02701 ] [42.129803 21.356714 ] ... [21.756662 36.410778 ] [17.921316 40.454517 ] [ 6.72806 49.586163 ]] ... [[18.59064 49.519287 ] [43.864464 32.011326 ] [48.600964 37.643776 ] ... [21.940533 2.2229822 ] [40.309956 34.24294 ] [ 9.663487 31.271627 ]] [[12.730488 34.646442 ] [21.052397 27.757446 ] [30.753063 16.320284 ] ... [42.082767 36.08696 ] [45.948666 29.470596 ] [22.781883 11.023296 ]] [[20.736965 47.985985 ] [15.267529 2.166825 ] [31.363613 36.755356 ] ... [ 6.4746327 40.01637 ] [ 5.7573757 21.989735 ] [ 4.8249426 23.820492 ]]] [[[24.343678 13.665013 ] [36.114655 36.328518 ] [14.777481 23.586605 ] ... [44.90588 45.66957 ] [46.329823 31.614902 ] [25.199858 14.4946995 ]] [[23.440498 9.930032 ] [ 3.0959916 30.061321 ] [37.15016 0.36742178] ... [23.841524 25.221157 ] [ 6.2329564 26.617365 ] [18.002903 21.421843 ]] [[38.080284 40.958073 ] [20.19143 3.8964965 ] [33.86239 14.636863 ] ... [ 0.14834468 32.537514 ] [48.145145 45.23957 ] [38.38602 46.67314 ]] ... [[13.456233 30.521194 ] [34.79961 3.5036302 ] [25.802616 5.792336 ] ... [26.638048 28.386211 ] [22.396456 29.792887 ] [26.965275 7.3530216 ]] [[44.902683 35.385044 ] [10.472431 29.27674 ] [ 9.772814 5.1931067 ] ... [16.591831 3.0800536 ] [12.514053 2.472366 ] [37.368164 14.4678335 ]] [[42.975704 6.740496 ] [23.787153 26.38004 ] [ 8.940724 35.607414 ] ... [39.047527 20.518305 ] [49.055355 4.073518 ] [46.07622 3.2238102 ]]] ... [[[ 2.8149621 45.476406 ] [46.531395 19.405376 ] [ 9.30929 24.259718 ] ... [22.003202 16.140312 ] [49.813194 20.595783 ] [48.6233 20.254751 ]] [[10.694007 48.12589 ] [ 1.5558069 7.8385444 ] [11.876529 6.7133255 ] ... [ 9.215891 34.20643 ] [44.454105 32.279053 ] [ 2.757806 20.848307 ]] [[44.843025 24.054089 ] [13.09465 41.652107 ] [31.068121 23.295042 ] ... [13.241431 27.353214 ] [47.11302 44.152954 ] [43.61942 31.241169 ]] ... [[ 1.5748483 8.124693 ] [46.685295 16.044172 ] [18.601444 1.7990766 ] ... [19.124039 23.09656 ] [20.413712 27.274565 ] [29.915487 32.803307 ]] [[31.68972 49.97638 ] [ 5.5509815 17.3441 ] [ 3.6833408 16.691471 ] ... [ 6.5496325 40.920567 ] [23.640327 37.808098 ] [32.288143 22.047892 ]] [[22.662298 34.1955 ] [ 1.3930013 3.3556378 ] [14.892179 8.222141 ] ... [ 7.66029 38.682793 ] [ 2.2665758 1.313161 ] [44.6205 0.7256289 ]]] [[[34.466976 38.102215 ] [38.597824 36.067616 ] [30.824621 46.972336 ] ... [ 6.753097 3.0461438 ] [43.73642 44.465393 ] [14.897266 35.88327 ]] [[17.653404 47.40393 ] [43.87199 37.875202 ] [41.50689 20.499828 ] ... [49.869587 5.837619 ] [12.706481 10.660996 ] [20.503714 6.370058 ]] [[11.337409 16.275385 ] [ 5.461364 48.98268 ] [20.419275 7.219602 ] ... [26.578445 36.82308 ] [46.73222 30.238739 ] [15.903218 41.468548 ]] ... [[ 3.2720432 3.4797075 ] [ 7.619071 47.376526 ] [48.28436 28.427607 ] ... [20.369009 24.78645 ] [24.120852 14.7843275 ] [17.12699 14.272472 ]] [[25.17247 46.05331 ] [35.576733 21.553131 ] [24.964167 33.859707 ] ... [ 6.65117 14.393161 ] [26.819338 0.693545 ] [49.39648 7.610575 ]] [[ 9.795483 11.718482 ] [26.845644 4.1430597 ] [ 9.429971 33.696045 ] ... [17.78797 26.6894 ] [46.57343 42.429462 ] [11.195186 13.659836 ]]] [[[ 2.259921 44.47076 ] [ 4.238048 29.06355 ] [24.719584 25.199087 ] ... [26.90411 21.33435 ] [ 1.3516163 46.887993 ] [27.22232 38.62163 ]] [[42.35493 48.62601 ] [24.701738 23.981258 ] [ 9.320193 24.021465 ] ... [39.806618 26.679594 ] [15.180361 20.081612 ] [32.768433 32.483356 ]] [[41.51741 23.564116 ] [42.62723 45.498856 ] [ 1.3943763 23.612003 ] ... [26.04157 30.915873 ] [ 7.172883 46.181023 ] [ 3.7802572 27.549164 ]] ... [[ 2.775783 43.0858 ] [23.289616 36.67139 ] [31.20133 16.567049 ] ... [26.188345 12.2669 ] [13.077795 26.330034 ] [44.148705 18.950232 ]] [[12.765808 30.55417 ] [47.63908 17.251068 ] [ 9.994689 19.643236 ] ... [ 3.5732427 38.914593 ] [38.820637 1.7700951 ] [ 0.98105717 23.552141 ]] [[38.108402 15.244968 ] [ 6.335177 48.462185 ] [30.43751 27.82622 ] ... [ 1.389519 22.196922 ] [34.718063 42.57522 ] [49.950993 39.623463 ]]]]; ov_res: [[[[ 2.864906 43.91205 ] [45.788662 23.857866 ] [ 5.818357 21.736704 ] ... [16.145126 3.62949 ] [14.770235 8.624788 ] [31.54267 43.829975 ]] [[ 4.2678504 37.407154 ] [33.48945 24.295929 ] [ 0.8357211 5.412082 ] ... [38.266827 9.29942 ] [48.01658 22.160727 ] [26.585905 20.466162 ]] [[43.465977 7.498367 ] [ 4.5283012 36.2908 ] [ 6.1983447 4.409932 ] ... [40.87397 29.743237 ] [42.79904 49.57188 ] [28.06214 29.085434 ]] ... [[22.188507 42.037792 ] [ 8.631084 27.113188 ] [30.463482 13.852854 ] ... [39.32448 42.39713 ] [25.33253 39.85249 ] [12.749868 49.4289 ]] [[36.495815 32.17833 ] [ 4.2428346 8.237558 ] [26.116562 22.041132 ] ... [40.20587 8.018595 ] [37.144794 36.893032 ] [ 1.6186376 46.72817 ]] [[40.702934 11.184076 ] [26.050705 25.270243 ] [35.896652 17.060913 ] ... [23.436054 48.29412 ] [12.085631 40.92786 ] [45.16595 36.154476 ]]] [[[41.396366 9.811781 ] [24.987282 49.128166 ] [39.98726 7.547995 ] ... [ 8.4306965 20.251736 ] [ 9.898377 48.31209 ] [ 6.1822486 26.966413 ]] [[47.555313 19.230837 ] [19.868845 38.950188 ] [41.480087 14.21044 ] ... [26.457165 30.088469 ] [22.605177 35.21481 ] [42.7922 14.320719 ]] [[36.995926 3.7551744 ] [ 2.5438862 33.02701 ] [42.129803 21.356714 ] ... [21.756662 36.410778 ] [17.921316 40.454517 ] [ 6.72806 49.586163 ]] ... [[18.59064 49.519287 ] [43.864464 32.011326 ] [48.600964 37.643776 ] ... [21.940533 2.2229822 ] [40.309956 34.24294 ] [ 9.663487 31.271627 ]] [[12.730488 34.646442 ] [21.052397 27.757446 ] [30.753063 16.320284 ] ... [42.082767 36.08696 ] [45.948666 29.470596 ] [22.781883 11.023296 ]] [[20.736965 47.985985 ] [15.267529 2.166825 ] [31.363613 36.755356 ] ... [ 6.4746327 40.01637 ] [ 5.7573757 21.989735 ] [ 4.8249426 23.820492 ]]] [[[24.343678 13.665013 ] [36.114655 36.328518 ] [14.777481 23.586605 ] ... [44.90588 45.66957 ] [46.329823 31.614902 ] [25.199858 14.4946995 ]] [[23.440498 9.930032 ] [ 3.0959916 30.061321 ] [37.15016 0.36742178] ... [23.841524 25.221157 ] [ 6.2329564 26.617365 ] [18.002903 21.421843 ]] [[38.080284 40.958073 ] [20.19143 3.8964965 ] [33.86239 14.636863 ] ... [ 0.14834468 32.537514 ] [48.145145 45.23957 ] [38.38602 46.67314 ]] ... [[13.456233 30.521194 ] [34.79961 3.5036302 ] [25.802616 5.792336 ] ... [26.638048 28.386211 ] [22.396456 29.792887 ] [26.965275 7.3530216 ]] [[44.902683 35.385044 ] [10.472431 29.27674 ] [ 9.772814 5.1931067 ] ... [16.591831 3.0800536 ] [12.514053 2.472366 ] [37.368164 14.4678335 ]] [[42.975704 6.740496 ] [23.787153 26.38004 ] [ 8.940724 35.607414 ] ... [39.047527 20.518305 ] [49.055355 4.073518 ] [46.07622 3.2238102 ]]] ... [[[ 2.8149621 45.476406 ] [46.531395 19.405376 ] [ 9.30929 24.259718 ] ... [22.003202 16.140312 ] [49.813194 20.595783 ] [48.6233 20.254751 ]] [[10.694007 48.12589 ] [ 1.5558069 7.8385444 ] [11.876529 6.7133255 ] ... [ 9.215891 34.20643 ] [44.454105 32.279053 ] [ 2.757806 20.848307 ]] [[44.843025 24.054089 ] [13.09465 41.652107 ] [31.068121 23.295042 ] ... [13.241431 27.353214 ] [47.11302 44.152954 ] [43.61942 31.241169 ]] ... [[ 1.5748483 8.124693 ] [46.685295 16.044172 ] [18.601444 1.7990766 ] ... [19.124039 23.09656 ] [20.413712 27.274565 ] [29.915487 32.803307 ]] [[31.68972 49.97638 ] [ 5.5509815 17.3441 ] [ 3.6833408 16.691471 ] ... [ 6.5496325 40.920567 ] [23.640327 37.808098 ] [32.288143 22.047892 ]] [[22.662298 34.1955 ] [ 1.3930013 3.3556378 ] [14.892179 8.222141 ] ... [ 7.66029 38.682793 ] [ 2.2665758 1.313161 ] [44.6205 0.7256289 ]]] [[[34.466976 38.102215 ] [38.597824 36.067616 ] [30.824621 46.972336 ] ... [ 6.753097 3.0461438 ] [43.73642 44.465393 ] [14.897266 35.88327 ]] [[17.653404 47.40393 ] [43.87199 37.875202 ] [41.50689 20.499828 ] ... [49.869587 5.837619 ] [12.706481 10.660996 ] [20.503714 6.370058 ]] [[11.337409 16.275385 ] [ 5.461364 48.98268 ] [20.419275 7.219602 ] ... [26.578445 36.82308 ] [46.73222 30.238739 ] [15.903218 41.468548 ]] ... [[ 3.2720432 3.4797075 ] [ 7.619071 47.376526 ] [48.28436 28.427607 ] ... [20.369009 24.78645 ] [24.120852 14.7843275 ] [17.12699 14.272472 ]] [[25.17247 46.05331 ] [35.576733 21.553131 ] [24.964167 33.859707 ] ... [ 6.65117 14.393161 ] [26.819338 0.693545 ] [49.39648 7.610575 ]] [[ 9.795483 11.718482 ] [26.845644 4.1430597 ] [ 9.429971 33.696045 ] ... [17.78797 26.6894 ] [46.57343 42.429462 ] [11.195186 13.659836 ]]] [[[ 2.259921 44.47076 ] [ 4.238048 29.06355 ] [24.719584 25.199087 ] ... [26.90411 21.33435 ] [ 1.3516163 46.887993 ] [27.22232 38.62163 ]] [[42.35493 48.62601 ] [24.701738 23.981258 ] [ 9.320193 24.021465 ] ... [39.806618 26.679594 ] [15.180361 20.081612 ] [32.768433 32.483356 ]] [[41.51741 23.564116 ] [42.62723 45.498856 ] [ 1.3943763 23.612003 ] ... [26.04157 30.915873 ] [ 7.172883 46.181023 ] [ 3.7802572 27.549164 ]] ... [[ 2.775783 43.0858 ] [23.289616 36.67139 ] [31.20133 16.567049 ] ... [26.188345 12.2669 ] [13.077795 26.330034 ] [44.148705 18.950232 ]] [[12.765808 30.55417 ] [47.63908 17.251068 ] [ 9.994689 19.643236 ] ... [ 3.5732427 38.914593 ] [38.820637 1.7700951 ] [ 0.98105717 23.552141 ]] [[38.108402 15.244968 ] [ 6.335177 48.462185 ] [30.43751 27.82622 ] ... [ 1.389519 22.196922 ] [34.718063 42.57522 ] [49.950993 39.623463 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[12, -1, 12, 24] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5912.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[12, -1, 12, 24]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[3.12992020e+01 4.44752808e+01 3.29084740e+01 ... 1.18384874e+00 9.68970871e+00 2.48379974e+01] [8.43068719e-01 9.05227184e+00 3.80282030e-02 ... 3.61193810e+01 1.56031361e+01 3.93618435e-01] [1.11496429e+01 4.05246391e+01 3.59672508e+01 ... 1.46325264e+01 1.32339096e+01 1.95143528e+01] ... [9.05236721e+00 3.79903946e+01 4.65942802e+01 ... 3.41695900e+01 4.57382050e+01 3.43657036e+01] [3.03584843e+01 3.75298309e+01 1.11563146e+00 ... 1.35414052e+00 2.01452065e+01 1.09254866e+01] [1.10668688e+01 1.50688801e+01 4.97283630e+01 ... 4.18470459e+01 4.99224167e+01 2.09445858e+01]]] [[[9.12477016e+00 3.78229904e+01 2.86904850e+01 ... 3.14583836e+01 6.66651058e+00 1.73863277e+01] [4.45199547e+01 2.44408131e+00 3.27855644e+01 ... 3.71078796e+01 3.82851067e+01 4.01659470e+01] [1.55881500e+01 2.29607439e+00 8.72640800e+00 ... 3.24012489e+01 4.62124825e+01 3.31889381e+01] ... [3.58776169e+01 2.38265076e+01 9.51870060e+00 ... 6.11079931e+00 1.33448801e+01 2.32681808e+01] [1.64483757e+01 4.95938873e+01 1.69187565e+01 ... 1.03040171e+01 6.39223051e+00 2.54320507e+01] [3.15642223e+01 1.21370668e+01 4.38380165e+01 ... 3.92780151e+01 2.97630348e+01 2.47373314e+01]]] [[[3.05545731e+01 4.84034157e+00 7.93374968e+00 ... 3.83775353e+00 4.15470619e+01 6.27866650e+00] [8.85504246e+00 1.07667532e+01 4.84338112e+01 ... 4.95667305e+01 1.35353527e+01 4.39670029e+01] [1.91145694e+00 3.83222046e+01 7.88034296e+00 ... 4.73532343e+00 3.63616524e+01 5.10704041e+00] ... [3.51225052e+01 3.63576202e+01 4.54738541e+01 ... 7.31148911e+00 9.47859287e+00 4.28179207e+01] [4.13216896e+01 6.73851776e+00 4.00039444e+01 ... 1.05770445e+01 4.66196747e+01 3.68367691e+01] [1.51097965e+01 3.11068153e+00 1.00049229e+01 ... 5.33564472e+00 1.14497156e+01 2.93095350e+00]]] ... [[[1.35920029e+01 2.30468082e+01 5.85965204e+00 ... 2.57816429e+01 4.58797493e+01 1.95014744e+01] [3.21401811e+00 4.97752342e+01 2.85730019e+01 ... 1.96354675e+01 3.72141838e+01 2.60781231e+01] [3.73124733e+01 4.38428612e+01 4.82690525e+00 ... 1.71708989e+00 3.39105263e+01 7.39046526e+00] ... [4.91537046e+00 3.70562859e+01 2.03075428e+01 ... 4.32218742e+01 3.72401886e+01 4.13881378e+01] [4.74443512e+01 4.82625084e+01 4.89046440e+01 ... 5.28351736e+00 1.57168598e+01 4.86631775e+01] [1.47978296e+01 3.74988632e+01 4.36534615e+01 ... 4.89448624e+01 7.65229750e+00 8.26536083e+00]]] [[[4.63621674e+01 1.19319448e+01 4.76463165e+01 ... 4.93666420e+01 2.69621201e+01 5.55375481e+00] [9.49065876e+00 3.16899109e+01 1.48985043e-01 ... 4.67171021e+01 8.98414707e+00 6.07517242e+00] [3.19710045e+01 1.13277197e+01 3.07114620e+01 ... 4.59517431e+00 3.03421535e+01 1.12690792e+01] ... [1.59556856e+01 9.60387766e-01 2.27549477e+01 ... 8.59336758e+00 1.27092619e+01 4.87071915e+01] [4.73956871e+01 2.44074554e+01 3.49501305e+01 ... 4.87582512e+01 3.68695602e+01 3.48120880e+01] [2.74475517e+01 2.08384781e+01 9.50794792e+00 ... 4.25264664e+01 3.46280022e+01 3.63242683e+01]]] [[[1.54532719e+00 2.45356770e+01 1.96828365e+01 ... 2.35565033e+01 2.53737679e+01 3.86136842e+00] [1.63941631e+01 1.48548031e+01 2.18212681e+01 ... 1.25006163e+00 1.86439590e+01 1.37649870e+01] [2.89407291e+01 1.31421442e+01 4.92908525e+00 ... 2.70167103e+01 4.00478249e+01 3.06561909e+01] ... [3.69310760e+01 4.91699219e+01 3.12253046e+00 ... 3.92177391e+01 7.94267273e+00 9.54280376e+00] [2.87923660e+01 3.27761497e+01 1.93267498e+01 ... 2.20724430e+01 3.38479347e+01 3.17832031e+01] [2.00098038e+01 4.60762482e+01 2.91821632e+01 ... 2.12025434e-01 1.48929167e+01 3.70536499e+01]]]]; ov_res: [[[[3.12992020e+01 4.44752808e+01 3.29084740e+01 ... 1.18384874e+00 9.68970871e+00 2.48379974e+01] [8.43068719e-01 9.05227184e+00 3.80282030e-02 ... 3.61193810e+01 1.56031361e+01 3.93618435e-01] [1.11496429e+01 4.05246391e+01 3.59672508e+01 ... 1.46325264e+01 1.32339096e+01 1.95143528e+01] ... [9.05236721e+00 3.79903946e+01 4.65942802e+01 ... 3.41695900e+01 4.57382050e+01 3.43657036e+01] [3.03584843e+01 3.75298309e+01 1.11563146e+00 ... 1.35414052e+00 2.01452065e+01 1.09254866e+01] [1.10668688e+01 1.50688801e+01 4.97283630e+01 ... 4.18470459e+01 4.99224167e+01 2.09445858e+01]]] [[[9.12477016e+00 3.78229904e+01 2.86904850e+01 ... 3.14583836e+01 6.66651058e+00 1.73863277e+01] [4.45199547e+01 2.44408131e+00 3.27855644e+01 ... 3.71078796e+01 3.82851067e+01 4.01659470e+01] [1.55881500e+01 2.29607439e+00 8.72640800e+00 ... 3.24012489e+01 4.62124825e+01 3.31889381e+01] ... [3.58776169e+01 2.38265076e+01 9.51870060e+00 ... 6.11079931e+00 1.33448801e+01 2.32681808e+01] [1.64483757e+01 4.95938873e+01 1.69187565e+01 ... 1.03040171e+01 6.39223051e+00 2.54320507e+01] [3.15642223e+01 1.21370668e+01 4.38380165e+01 ... 3.92780151e+01 2.97630348e+01 2.47373314e+01]]] [[[3.05545731e+01 4.84034157e+00 7.93374968e+00 ... 3.83775353e+00 4.15470619e+01 6.27866650e+00] [8.85504246e+00 1.07667532e+01 4.84338112e+01 ... 4.95667305e+01 1.35353527e+01 4.39670029e+01] [1.91145694e+00 3.83222046e+01 7.88034296e+00 ... 4.73532343e+00 3.63616524e+01 5.10704041e+00] ... [3.51225052e+01 3.63576202e+01 4.54738541e+01 ... 7.31148911e+00 9.47859287e+00 4.28179207e+01] [4.13216896e+01 6.73851776e+00 4.00039444e+01 ... 1.05770445e+01 4.66196747e+01 3.68367691e+01] [1.51097965e+01 3.11068153e+00 1.00049229e+01 ... 5.33564472e+00 1.14497156e+01 2.93095350e+00]]] ... [[[1.35920029e+01 2.30468082e+01 5.85965204e+00 ... 2.57816429e+01 4.58797493e+01 1.95014744e+01] [3.21401811e+00 4.97752342e+01 2.85730019e+01 ... 1.96354675e+01 3.72141838e+01 2.60781231e+01] [3.73124733e+01 4.38428612e+01 4.82690525e+00 ... 1.71708989e+00 3.39105263e+01 7.39046526e+00] ... [4.91537046e+00 3.70562859e+01 2.03075428e+01 ... 4.32218742e+01 3.72401886e+01 4.13881378e+01] [4.74443512e+01 4.82625084e+01 4.89046440e+01 ... 5.28351736e+00 1.57168598e+01 4.86631775e+01] [1.47978296e+01 3.74988632e+01 4.36534615e+01 ... 4.89448624e+01 7.65229750e+00 8.26536083e+00]]] [[[4.63621674e+01 1.19319448e+01 4.76463165e+01 ... 4.93666420e+01 2.69621201e+01 5.55375481e+00] [9.49065876e+00 3.16899109e+01 1.48985043e-01 ... 4.67171021e+01 8.98414707e+00 6.07517242e+00] [3.19710045e+01 1.13277197e+01 3.07114620e+01 ... 4.59517431e+00 3.03421535e+01 1.12690792e+01] ... [1.59556856e+01 9.60387766e-01 2.27549477e+01 ... 8.59336758e+00 1.27092619e+01 4.87071915e+01] [4.73956871e+01 2.44074554e+01 3.49501305e+01 ... 4.87582512e+01 3.68695602e+01 3.48120880e+01] [2.74475517e+01 2.08384781e+01 9.50794792e+00 ... 4.25264664e+01 3.46280022e+01 3.63242683e+01]]] [[[1.54532719e+00 2.45356770e+01 1.96828365e+01 ... 2.35565033e+01 2.53737679e+01 3.86136842e+00] [1.63941631e+01 1.48548031e+01 2.18212681e+01 ... 1.25006163e+00 1.86439590e+01 1.37649870e+01] [2.89407291e+01 1.31421442e+01 4.92908525e+00 ... 2.70167103e+01 4.00478249e+01 3.06561909e+01] ... [3.69310760e+01 4.91699219e+01 3.12253046e+00 ... 3.92177391e+01 7.94267273e+00 9.54280376e+00] [2.87923660e+01 3.27761497e+01 1.93267498e+01 ... 2.20724430e+01 3.38479347e+01 3.17832031e+01] [2.00098038e+01 4.60762482e+01 2.91821632e+01 ... 2.12025434e-01 1.48929167e+01 3.70536499e+01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[24, 12, 12, 1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5914.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[24, 12, 12, 1]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[39.806423 ] [ 2.7895708 ] [19.921888 ] ... [ 3.0493026 ] [15.808392 ] [32.791 ]] [[16.411362 ] [42.695156 ] [ 1.6110846 ] ... [37.215504 ] [13.103263 ] [16.91552 ]] [[47.566837 ] [ 4.1740375 ] [11.523765 ] ... [ 7.5337768 ] [ 2.235737 ] [26.825026 ]] ... [[34.540863 ] [17.076132 ] [ 6.5706553 ] ... [20.179705 ] [ 3.9508119 ] [ 1.6801203 ]] [[19.958313 ] [42.708683 ] [14.041816 ] ... [ 8.476495 ] [30.830936 ] [ 7.6190047 ]] [[20.25883 ] [38.60939 ] [ 7.4167056 ] ... [10.313301 ] [47.62143 ] [19.535816 ]]] [[[ 0.8810152 ] [32.26404 ] [49.77645 ] ... [19.43011 ] [ 4.336459 ] [35.32662 ]] [[11.192679 ] [38.726826 ] [32.668625 ] ... [42.163284 ] [ 3.2786796 ] [17.173931 ]] [[19.269852 ] [26.660086 ] [40.087654 ] ... [26.271196 ] [45.46969 ] [12.669243 ]] ... [[14.192155 ] [40.65957 ] [12.180441 ] ... [43.594364 ] [29.465021 ] [ 1.467143 ]] [[ 7.1470466 ] [44.62577 ] [32.757473 ] ... [16.838985 ] [38.15486 ] [ 6.4397616 ]] [[26.705212 ] [17.622059 ] [ 1.2303479 ] ... [40.861115 ] [36.566357 ] [13.579812 ]]] [[[46.124016 ] [43.54627 ] [17.948843 ] ... [38.466118 ] [32.062187 ] [18.582615 ]] [[ 4.0267925 ] [37.211918 ] [ 8.625769 ] ... [15.755939 ] [37.171497 ] [24.60235 ]] [[ 7.2248225 ] [26.271461 ] [46.799507 ] ... [29.344416 ] [25.498056 ] [29.348904 ]] ... [[ 4.4029045 ] [10.788919 ] [10.156032 ] ... [27.570227 ] [ 9.310703 ] [ 6.4147787 ]] [[32.106182 ] [46.02075 ] [31.280458 ] ... [33.90504 ] [16.909447 ] [43.608078 ]] [[21.371368 ] [29.97107 ] [20.926416 ] ... [12.546458 ] [18.674938 ] [46.98559 ]]] ... [[[36.457737 ] [ 2.984518 ] [ 5.5353904 ] ... [32.351936 ] [ 6.7827477 ] [16.846626 ]] [[18.151901 ] [14.822263 ] [ 5.4949536 ] ... [13.061019 ] [49.167034 ] [ 8.238368 ]] [[ 9.508992 ] [25.345371 ] [ 1.728908 ] ... [ 3.245618 ] [ 6.4288006 ] [31.408047 ]] ... [[36.17182 ] [13.126729 ] [ 3.231582 ] ... [19.449291 ] [16.619677 ] [45.111305 ]] [[28.675787 ] [20.791286 ] [14.911005 ] ... [25.878014 ] [ 0.8007139 ] [16.995907 ]] [[ 8.71792 ] [49.28458 ] [42.505554 ] ... [36.15995 ] [38.526978 ] [36.546745 ]]] [[[45.916115 ] [17.005003 ] [ 6.2217503 ] ... [46.200905 ] [21.333673 ] [18.166838 ]] [[41.317413 ] [ 0.7184208 ] [ 1.2857634 ] ... [49.610447 ] [43.935123 ] [33.144497 ]] [[20.705215 ] [42.348072 ] [34.12018 ] ... [21.373213 ] [26.866444 ] [12.195879 ]] ... [[ 2.7732244 ] [24.772324 ] [31.100033 ] ... [ 1.1746922 ] [39.706566 ] [29.602716 ]] [[10.204669 ] [48.962337 ] [35.268055 ] ... [ 6.077876 ] [46.8545 ] [33.237183 ]] [[48.78783 ] [39.339314 ] [20.5728 ] ... [23.452923 ] [37.196648 ] [22.037668 ]]] [[[26.332275 ] [21.601418 ] [33.78942 ] ... [ 0.07659575] [26.176153 ] [ 2.4071925 ]] [[20.255844 ] [28.586853 ] [ 2.1402223 ] ... [16.606482 ] [10.929699 ] [ 6.385404 ]] [[27.96741 ] [49.49624 ] [46.970726 ] ... [ 2.6919334 ] [ 1.200035 ] [14.713077 ]] ... [[14.982337 ] [18.524086 ] [15.226788 ] ... [27.231241 ] [ 5.806734 ] [41.815144 ]] [[42.621254 ] [24.799278 ] [46.53613 ] ... [ 7.2565355 ] [18.629042 ] [45.797768 ]] [[34.77467 ] [11.4197 ] [36.031467 ] ... [37.396294 ] [ 2.6014254 ] [29.36621 ]]]]; ov_res: [[[[39.806423 ] [ 2.7895708 ] [19.921888 ] ... [ 3.0493026 ] [15.808392 ] [32.791 ]] [[16.411362 ] [42.695156 ] [ 1.6110846 ] ... [37.215504 ] [13.103263 ] [16.91552 ]] [[47.566837 ] [ 4.1740375 ] [11.523765 ] ... [ 7.5337768 ] [ 2.235737 ] [26.825026 ]] ... [[34.540863 ] [17.076132 ] [ 6.5706553 ] ... [20.179705 ] [ 3.9508119 ] [ 1.6801203 ]] [[19.958313 ] [42.708683 ] [14.041816 ] ... [ 8.476495 ] [30.830936 ] [ 7.6190047 ]] [[20.25883 ] [38.60939 ] [ 7.4167056 ] ... [10.313301 ] [47.62143 ] [19.535816 ]]] [[[ 0.8810152 ] [32.26404 ] [49.77645 ] ... [19.43011 ] [ 4.336459 ] [35.32662 ]] [[11.192679 ] [38.726826 ] [32.668625 ] ... [42.163284 ] [ 3.2786796 ] [17.173931 ]] [[19.269852 ] [26.660086 ] [40.087654 ] ... [26.271196 ] [45.46969 ] [12.669243 ]] ... [[14.192155 ] [40.65957 ] [12.180441 ] ... [43.594364 ] [29.465021 ] [ 1.467143 ]] [[ 7.1470466 ] [44.62577 ] [32.757473 ] ... [16.838985 ] [38.15486 ] [ 6.4397616 ]] [[26.705212 ] [17.622059 ] [ 1.2303479 ] ... [40.861115 ] [36.566357 ] [13.579812 ]]] [[[46.124016 ] [43.54627 ] [17.948843 ] ... [38.466118 ] [32.062187 ] [18.582615 ]] [[ 4.0267925 ] [37.211918 ] [ 8.625769 ] ... [15.755939 ] [37.171497 ] [24.60235 ]] [[ 7.2248225 ] [26.271461 ] [46.799507 ] ... [29.344416 ] [25.498056 ] [29.348904 ]] ... [[ 4.4029045 ] [10.788919 ] [10.156032 ] ... [27.570227 ] [ 9.310703 ] [ 6.4147787 ]] [[32.106182 ] [46.02075 ] [31.280458 ] ... [33.90504 ] [16.909447 ] [43.608078 ]] [[21.371368 ] [29.97107 ] [20.926416 ] ... [12.546458 ] [18.674938 ] [46.98559 ]]] ... [[[36.457737 ] [ 2.984518 ] [ 5.5353904 ] ... [32.351936 ] [ 6.7827477 ] [16.846626 ]] [[18.151901 ] [14.822263 ] [ 5.4949536 ] ... [13.061019 ] [49.167034 ] [ 8.238368 ]] [[ 9.508992 ] [25.345371 ] [ 1.728908 ] ... [ 3.245618 ] [ 6.4288006 ] [31.408047 ]] ... [[36.17182 ] [13.126729 ] [ 3.231582 ] ... [19.449291 ] [16.619677 ] [45.111305 ]] [[28.675787 ] [20.791286 ] [14.911005 ] ... [25.878014 ] [ 0.8007139 ] [16.995907 ]] [[ 8.71792 ] [49.28458 ] [42.505554 ] ... [36.15995 ] [38.526978 ] [36.546745 ]]] [[[45.916115 ] [17.005003 ] [ 6.2217503 ] ... [46.200905 ] [21.333673 ] [18.166838 ]] [[41.317413 ] [ 0.7184208 ] [ 1.2857634 ] ... [49.610447 ] [43.935123 ] [33.144497 ]] [[20.705215 ] [42.348072 ] [34.12018 ] ... [21.373213 ] [26.866444 ] [12.195879 ]] ... [[ 2.7732244 ] [24.772324 ] [31.100033 ] ... [ 1.1746922 ] [39.706566 ] [29.602716 ]] [[10.204669 ] [48.962337 ] [35.268055 ] ... [ 6.077876 ] [46.8545 ] [33.237183 ]] [[48.78783 ] [39.339314 ] [20.5728 ] ... [23.452923 ] [37.196648 ] [22.037668 ]]] [[[26.332275 ] [21.601418 ] [33.78942 ] ... [ 0.07659575] [26.176153 ] [ 2.4071925 ]] [[20.255844 ] [28.586853 ] [ 2.1402223 ] ... [16.606482 ] [10.929699 ] [ 6.385404 ]] [[27.96741 ] [49.49624 ] [46.970726 ] ... [ 2.6919334 ] [ 1.200035 ] [14.713077 ]] ... [[14.982337 ] [18.524086 ] [15.226788 ] ... [27.231241 ] [ 5.806734 ] [41.815144 ]] [[42.621254 ] [24.799278 ] [46.53613 ] ... [ 7.2565355 ] [18.629042 ] [45.797768 ]] [[34.77467 ] [11.4197 ] [36.031467 ] ... [37.396294 ] [ 2.6014254 ] [29.36621 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[24, 12, 12, -1] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5916.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[24, 12, 12, -1]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[15.741854 ] [ 2.6866467 ] [ 7.0028462 ] ... [21.813791 ] [34.94894 ] [ 2.8993511 ]] [[43.411125 ] [41.524593 ] [ 4.466094 ] ... [30.8726 ] [14.767341 ] [18.28823 ]] [[18.234241 ] [45.31887 ] [35.450493 ] ... [31.878248 ] [10.348144 ] [48.29353 ]] ... [[ 2.9207938 ] [39.862167 ] [37.555943 ] ... [49.120426 ] [ 9.262516 ] [45.45958 ]] [[37.22643 ] [42.069233 ] [37.593742 ] ... [23.30567 ] [49.16819 ] [44.616947 ]] [[ 5.023994 ] [17.733995 ] [27.342916 ] ... [30.673586 ] [13.608114 ] [16.074522 ]]] [[[35.83877 ] [40.70302 ] [17.555372 ] ... [36.000713 ] [11.105392 ] [39.045822 ]] [[ 3.9920154 ] [31.873268 ] [23.309355 ] ... [12.486652 ] [34.035324 ] [47.631786 ]] [[46.011875 ] [ 4.8298016 ] [49.097706 ] ... [22.647192 ] [44.7116 ] [20.83963 ]] ... [[18.098707 ] [10.584529 ] [35.62148 ] ... [42.536114 ] [39.42544 ] [ 8.874666 ]] [[24.219086 ] [12.977008 ] [37.205227 ] ... [31.657715 ] [26.779938 ] [42.45985 ]] [[16.121262 ] [19.564959 ] [34.166553 ] ... [46.264225 ] [11.810879 ] [49.07163 ]]] [[[ 9.507741 ] [38.30286 ] [45.679726 ] ... [ 1.3598024 ] [ 1.0040308 ] [18.287382 ]] [[23.21021 ] [44.723507 ] [14.567687 ] ... [14.120673 ] [34.753548 ] [39.026173 ]] [[24.33837 ] [37.701015 ] [25.226965 ] ... [11.955532 ] [27.079508 ] [19.72311 ]] ... [[19.095375 ] [47.526928 ] [47.580505 ] ... [38.51352 ] [ 3.7013574 ] [19.453974 ]] [[46.947624 ] [33.578854 ] [32.239147 ] ... [28.544636 ] [21.187904 ] [46.578648 ]] [[19.164623 ] [21.495195 ] [14.54771 ] ... [26.730131 ] [42.114677 ] [49.168438 ]]] ... [[[45.291553 ] [13.277549 ] [ 0.36487284] ... [44.29767 ] [46.079357 ] [40.161068 ]] [[32.519115 ] [ 4.578716 ] [39.009598 ] ... [17.588215 ] [38.17267 ] [35.95483 ]] [[13.925939 ] [ 4.5969143 ] [17.10045 ] ... [13.761153 ] [15.202091 ] [46.592815 ]] ... [[11.883511 ] [ 2.009418 ] [39.82004 ] ... [26.047707 ] [10.065341 ] [30.007198 ]] [[41.560593 ] [22.297895 ] [16.280886 ] ... [16.22095 ] [45.79855 ] [ 4.678759 ]] [[44.691593 ] [37.73294 ] [19.735237 ] ... [46.688072 ] [15.362439 ] [24.07171 ]]] [[[ 9.232005 ] [17.499035 ] [13.853765 ] ... [45.393955 ] [16.417307 ] [31.014269 ]] [[22.483597 ] [46.739548 ] [49.471703 ] ... [22.739286 ] [47.551517 ] [17.46261 ]] [[ 9.664301 ] [38.448753 ] [21.381598 ] ... [32.374546 ] [33.644314 ] [ 7.4208045 ]] ... [[26.030622 ] [26.331749 ] [43.719025 ] ... [36.275932 ] [40.721382 ] [13.062421 ]] [[17.913275 ] [42.332615 ] [ 2.786404 ] ... [22.953209 ] [33.06029 ] [49.373856 ]] [[ 0.4129622 ] [26.692825 ] [39.11466 ] ... [22.669323 ] [43.72773 ] [13.837373 ]]] [[[35.14234 ] [16.531551 ] [15.402394 ] ... [14.398171 ] [12.673191 ] [25.513073 ]] [[21.976147 ] [22.220575 ] [34.015285 ] ... [ 3.1601636 ] [38.88925 ] [41.410812 ]] [[24.931913 ] [30.036732 ] [48.582653 ] ... [36.436886 ] [49.952534 ] [10.397443 ]] ... [[21.547552 ] [ 3.906385 ] [44.746895 ] ... [39.526096 ] [35.63583 ] [ 0.55080205]] [[35.975452 ] [37.81862 ] [ 2.0279312 ] ... [ 6.2075105 ] [48.53282 ] [16.586355 ]] [[18.959074 ] [ 3.1548817 ] [22.955698 ] ... [46.43304 ] [ 5.042713 ] [24.762842 ]]]]; ov_res: [[[[15.741854 ] [ 2.6866467 ] [ 7.0028462 ] ... [21.813791 ] [34.94894 ] [ 2.8993511 ]] [[43.411125 ] [41.524593 ] [ 4.466094 ] ... [30.8726 ] [14.767341 ] [18.28823 ]] [[18.234241 ] [45.31887 ] [35.450493 ] ... [31.878248 ] [10.348144 ] [48.29353 ]] ... [[ 2.9207938 ] [39.862167 ] [37.555943 ] ... [49.120426 ] [ 9.262516 ] [45.45958 ]] [[37.22643 ] [42.069233 ] [37.593742 ] ... [23.30567 ] [49.16819 ] [44.616947 ]] [[ 5.023994 ] [17.733995 ] [27.342916 ] ... [30.673586 ] [13.608114 ] [16.074522 ]]] [[[35.83877 ] [40.70302 ] [17.555372 ] ... [36.000713 ] [11.105392 ] [39.045822 ]] [[ 3.9920154 ] [31.873268 ] [23.309355 ] ... [12.486652 ] [34.035324 ] [47.631786 ]] [[46.011875 ] [ 4.8298016 ] [49.097706 ] ... [22.647192 ] [44.7116 ] [20.83963 ]] ... [[18.098707 ] [10.584529 ] [35.62148 ] ... [42.536114 ] [39.42544 ] [ 8.874666 ]] [[24.219086 ] [12.977008 ] [37.205227 ] ... [31.657715 ] [26.779938 ] [42.45985 ]] [[16.121262 ] [19.564959 ] [34.166553 ] ... [46.264225 ] [11.810879 ] [49.07163 ]]] [[[ 9.507741 ] [38.30286 ] [45.679726 ] ... [ 1.3598024 ] [ 1.0040308 ] [18.287382 ]] [[23.21021 ] [44.723507 ] [14.567687 ] ... [14.120673 ] [34.753548 ] [39.026173 ]] [[24.33837 ] [37.701015 ] [25.226965 ] ... [11.955532 ] [27.079508 ] [19.72311 ]] ... [[19.095375 ] [47.526928 ] [47.580505 ] ... [38.51352 ] [ 3.7013574 ] [19.453974 ]] [[46.947624 ] [33.578854 ] [32.239147 ] ... [28.544636 ] [21.187904 ] [46.578648 ]] [[19.164623 ] [21.495195 ] [14.54771 ] ... [26.730131 ] [42.114677 ] [49.168438 ]]] ... [[[45.291553 ] [13.277549 ] [ 0.36487284] ... [44.29767 ] [46.079357 ] [40.161068 ]] [[32.519115 ] [ 4.578716 ] [39.009598 ] ... [17.588215 ] [38.17267 ] [35.95483 ]] [[13.925939 ] [ 4.5969143 ] [17.10045 ] ... [13.761153 ] [15.202091 ] [46.592815 ]] ... [[11.883511 ] [ 2.009418 ] [39.82004 ] ... [26.047707 ] [10.065341 ] [30.007198 ]] [[41.560593 ] [22.297895 ] [16.280886 ] ... [16.22095 ] [45.79855 ] [ 4.678759 ]] [[44.691593 ] [37.73294 ] [19.735237 ] ... [46.688072 ] [15.362439 ] [24.07171 ]]] [[[ 9.232005 ] [17.499035 ] [13.853765 ] ... [45.393955 ] [16.417307 ] [31.014269 ]] [[22.483597 ] [46.739548 ] [49.471703 ] ... [22.739286 ] [47.551517 ] [17.46261 ]] [[ 9.664301 ] [38.448753 ] [21.381598 ] ... [32.374546 ] [33.644314 ] [ 7.4208045 ]] ... [[26.030622 ] [26.331749 ] [43.719025 ] ... [36.275932 ] [40.721382 ] [13.062421 ]] [[17.913275 ] [42.332615 ] [ 2.786404 ] ... [22.953209 ] [33.06029 ] [49.373856 ]] [[ 0.4129622 ] [26.692825 ] [39.11466 ] ... [22.669323 ] [43.72773 ] [13.837373 ]]] [[[35.14234 ] [16.531551 ] [15.402394 ] ... [14.398171 ] [12.673191 ] [25.513073 ]] [[21.976147 ] [22.220575 ] [34.015285 ] ... [ 3.1601636 ] [38.88925 ] [41.410812 ]] [[24.931913 ] [30.036732 ] [48.582653 ] ... [36.436886 ] [49.952534 ] [10.397443 ]] ... [[21.547552 ] [ 3.906385 ] [44.746895 ] ... [39.526096 ] [35.63583 ] [ 0.55080205]] [[35.975452 ] [37.81862 ] [ 2.0279312 ] ... [ 6.2075105 ] [48.53282 ] [16.586355 ]] [[18.959074 ] [ 3.1548817 ] [22.955698 ] ... [46.43304 ] [ 5.042713 ] [24.762842 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[24, 1, -1, 12] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5918.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[24, 1, -1, 12]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[15.788093 34.793568 33.04543 ... 16.5781 41.983536 23.641876 ] [ 3.9155817 15.05663 22.445633 ... 41.048973 16.213432 42.289135 ] [ 7.4257164 5.3159943 41.93372 ... 34.358906 11.061946 41.569126 ] ... [47.214157 7.442552 3.6249537 ... 27.165976 34.34181 27.582022 ] [13.697121 4.162414 12.228704 ... 7.469851 10.110659 22.294657 ] [ 2.9560504 26.776514 43.67059 ... 43.59204 30.832214 5.8971367]]] [[[ 8.876285 44.798054 46.650425 ... 25.970875 4.4366007 36.557102 ] [33.119644 31.670925 15.440138 ... 31.568016 16.22244 41.35433 ] [44.760326 19.063385 3.1875994 ... 38.904636 26.175064 21.58051 ] ... [ 6.922452 24.168243 24.150116 ... 17.319702 5.2019796 39.48145 ] [ 1.4974105 12.1715145 48.697643 ... 38.97118 32.04798 3.8222208] [18.376196 49.13608 8.889984 ... 44.844402 32.30349 13.940979 ]]] [[[29.720144 14.392327 35.52491 ... 33.66903 20.519157 32.173023 ] [ 5.1841426 23.367117 22.339388 ... 7.1515145 36.94016 41.164745 ] [ 3.9734848 37.38346 45.224407 ... 38.212433 2.9779847 21.857248 ] ... [45.65822 47.21949 37.47646 ... 10.6392 20.07246 38.33766 ] [ 7.161862 17.723047 3.298524 ... 46.536602 49.270084 17.31456 ] [41.78235 38.40221 12.105615 ... 4.995526 30.687376 7.8755016]]] ... [[[43.528656 13.6389475 8.351718 ... 37.48901 2.5989707 12.061321 ] [29.27392 26.200102 27.609911 ... 21.569408 39.074932 9.305609 ] [32.28791 21.813637 20.224531 ... 10.877099 43.678833 30.64447 ] ... [31.897102 11.069169 31.331614 ... 1.9378799 35.367992 26.299656 ] [25.157091 44.500187 26.721947 ... 46.49721 7.431057 31.082275 ] [20.815796 35.143623 39.057854 ... 35.061813 38.884304 22.82573 ]]] [[[13.510603 34.715664 30.315907 ... 49.846283 12.764274 8.605845 ] [26.62513 24.640667 46.918446 ... 43.25964 30.553349 4.8534336] [ 7.270904 34.337822 43.32324 ... 4.4132123 12.243555 19.534626 ] ... [40.431988 31.063745 27.138317 ... 19.087795 7.5541277 5.4057674] [17.933443 13.045565 11.849764 ... 41.418427 11.039473 15.867338 ] [16.708078 37.245728 5.105751 ... 22.148613 30.878672 20.390738 ]]] [[[37.394226 25.507265 43.17727 ... 4.4883556 23.317022 23.024208 ] [33.850853 15.563236 14.937105 ... 37.949726 39.44647 12.866569 ] [25.853436 37.24691 32.91208 ... 18.364231 21.746595 33.94134 ] ... [ 9.496864 2.6585488 44.40723 ... 13.780339 28.99502 3.7813225] [23.728733 47.506203 44.05098 ... 36.669964 33.869858 47.16973 ] [ 7.7687573 24.119528 2.2963445 ... 10.45466 9.108515 29.321636 ]]]]; ov_res: [[[[15.788093 34.793568 33.04543 ... 16.5781 41.983536 23.641876 ] [ 3.9155817 15.05663 22.445633 ... 41.048973 16.213432 42.289135 ] [ 7.4257164 5.3159943 41.93372 ... 34.358906 11.061946 41.569126 ] ... [47.214157 7.442552 3.6249537 ... 27.165976 34.34181 27.582022 ] [13.697121 4.162414 12.228704 ... 7.469851 10.110659 22.294657 ] [ 2.9560504 26.776514 43.67059 ... 43.59204 30.832214 5.8971367]]] [[[ 8.876285 44.798054 46.650425 ... 25.970875 4.4366007 36.557102 ] [33.119644 31.670925 15.440138 ... 31.568016 16.22244 41.35433 ] [44.760326 19.063385 3.1875994 ... 38.904636 26.175064 21.58051 ] ... [ 6.922452 24.168243 24.150116 ... 17.319702 5.2019796 39.48145 ] [ 1.4974105 12.1715145 48.697643 ... 38.97118 32.04798 3.8222208] [18.376196 49.13608 8.889984 ... 44.844402 32.30349 13.940979 ]]] [[[29.720144 14.392327 35.52491 ... 33.66903 20.519157 32.173023 ] [ 5.1841426 23.367117 22.339388 ... 7.1515145 36.94016 41.164745 ] [ 3.9734848 37.38346 45.224407 ... 38.212433 2.9779847 21.857248 ] ... [45.65822 47.21949 37.47646 ... 10.6392 20.07246 38.33766 ] [ 7.161862 17.723047 3.298524 ... 46.536602 49.270084 17.31456 ] [41.78235 38.40221 12.105615 ... 4.995526 30.687376 7.8755016]]] ... [[[43.528656 13.6389475 8.351718 ... 37.48901 2.5989707 12.061321 ] [29.27392 26.200102 27.609911 ... 21.569408 39.074932 9.305609 ] [32.28791 21.813637 20.224531 ... 10.877099 43.678833 30.64447 ] ... [31.897102 11.069169 31.331614 ... 1.9378799 35.367992 26.299656 ] [25.157091 44.500187 26.721947 ... 46.49721 7.431057 31.082275 ] [20.815796 35.143623 39.057854 ... 35.061813 38.884304 22.82573 ]]] [[[13.510603 34.715664 30.315907 ... 49.846283 12.764274 8.605845 ] [26.62513 24.640667 46.918446 ... 43.25964 30.553349 4.8534336] [ 7.270904 34.337822 43.32324 ... 4.4132123 12.243555 19.534626 ] ... [40.431988 31.063745 27.138317 ... 19.087795 7.5541277 5.4057674] [17.933443 13.045565 11.849764 ... 41.418427 11.039473 15.867338 ] [16.708078 37.245728 5.105751 ... 22.148613 30.878672 20.390738 ]]] [[[37.394226 25.507265 43.17727 ... 4.4883556 23.317022 23.024208 ] [33.850853 15.563236 14.937105 ... 37.949726 39.44647 12.866569 ] [25.853436 37.24691 32.91208 ... 18.364231 21.746595 33.94134 ] ... [ 9.496864 2.6585488 44.40723 ... 13.780339 28.99502 3.7813225] [23.728733 47.506203 44.05098 ... 36.669964 33.869858 47.16973 ] [ 7.7687573 24.119528 2.2963445 ... 10.45466 9.108515 29.321636 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape.py::TestReshape::test_reshape[ ie_device:CPU - precision:FP32 - shape:[24, 1, 1, -1, 12] ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape.___torch_mangle_5920.aten_reshape, %x.1 : Tensor): %self.shape : int[] = prim::Constant[value=[24, 1, 1, -1, 12]]() %3 : Tensor = aten::reshape(%x.1, %self.shape) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape.py:24:23 return (%3) fw_re: [[[[[43.214058 26.948792 42.859688 ... 1.7854401 26.54053 20.003302 ] [49.834476 33.180836 32.24122 ... 31.440456 11.272105 28.044922 ] [34.840702 2.821228 29.431997 ... 26.007639 41.15602 3.7078426] ... [11.422832 46.35824 24.569628 ... 26.040476 46.576656 8.793921 ] [17.607452 3.2150855 8.745999 ... 39.661396 19.854893 27.897408 ] [13.282244 2.6675353 1.9639188 ... 15.581526 35.19136 22.615696 ]]]] [[[[30.280254 15.36742 3.2986748 ... 37.547615 46.704273 17.579159 ] [ 9.015608 19.544231 12.810098 ... 18.775202 5.2668924 33.24066 ] [35.90977 35.314262 18.115091 ... 13.446822 45.116394 27.543198 ] ... [44.904556 13.939851 15.353551 ... 36.633945 29.0467 45.476997 ] [23.619177 42.967533 44.01484 ... 35.61658 39.970085 7.305001 ] [11.660414 9.528274 37.62465 ... 10.119239 4.7622833 14.125758 ]]]] [[[[14.100004 23.767586 20.200233 ... 11.613694 24.128677 1.1400197] [28.920284 44.47071 29.926056 ... 29.654715 37.87119 21.593273 ] [ 5.662272 47.54603 5.240099 ... 16.898483 48.9979 20.544447 ] ... [32.754997 48.39149 42.461716 ... 39.76232 3.2359982 13.109758 ] [ 8.424167 28.392347 13.097396 ... 11.426406 18.075806 13.934 ] [45.78081 43.047993 3.3070471 ... 46.049934 31.932936 41.46055 ]]]] ... [[[[44.55161 6.524262 19.865091 ... 7.134308 8.949043 32.29087 ] [ 9.954808 12.923673 1.7178999 ... 23.27575 24.965836 23.982378 ] [14.5958805 2.2941353 49.908684 ... 7.858024 6.8217916 10.116309 ] ... [ 8.250134 20.891144 25.948338 ... 31.894258 28.317175 35.13854 ] [22.253176 33.45002 5.242873 ... 40.712643 36.05099 10.52757 ] [ 3.2437232 19.864428 49.53829 ... 49.06542 18.07497 49.53551 ]]]] [[[[34.955093 27.175358 0.8551262 ... 21.540035 41.061077 18.926525 ] [ 6.137609 5.568971 7.4538445 ... 31.151716 30.653152 9.503053 ] [ 5.901727 43.05227 3.5734918 ... 26.423138 1.700483 42.165054 ] ... [11.349985 28.854202 42.961163 ... 14.616302 45.497204 23.561779 ] [34.036263 44.502563 8.01095 ... 3.1212223 8.778037 19.227379 ] [48.23548 14.734547 13.7357 ... 30.119112 37.76698 25.806496 ]]]] [[[[35.4535 46.34373 21.737389 ... 3.0182688 48.271255 39.76398 ] [14.282765 9.164708 25.863571 ... 18.46296 5.955773 32.083725 ] [34.48547 49.812473 8.907038 ... 27.57446 16.679949 19.750977 ] ... [30.655334 46.39153 9.122889 ... 44.413128 17.941181 30.464367 ] [10.040378 36.259327 8.04113 ... 40.561687 29.837313 18.064528 ] [ 5.8698654 4.8514457 0.091181 ... 9.827935 25.083113 13.581157 ]]]]]; ov_res: [[[[[43.214058 26.948792 42.859688 ... 1.7854401 26.54053 20.003302 ] [49.834476 33.180836 32.24122 ... 31.440456 11.272105 28.044922 ] [34.840702 2.821228 29.431997 ... 26.007639 41.15602 3.7078426] ... [11.422832 46.35824 24.569628 ... 26.040476 46.576656 8.793921 ] [17.607452 3.2150855 8.745999 ... 39.661396 19.854893 27.897408 ] [13.282244 2.6675353 1.9639188 ... 15.581526 35.19136 22.615696 ]]]] [[[[30.280254 15.36742 3.2986748 ... 37.547615 46.704273 17.579159 ] [ 9.015608 19.544231 12.810098 ... 18.775202 5.2668924 33.24066 ] [35.90977 35.314262 18.115091 ... 13.446822 45.116394 27.543198 ] ... [44.904556 13.939851 15.353551 ... 36.633945 29.0467 45.476997 ] [23.619177 42.967533 44.01484 ... 35.61658 39.970085 7.305001 ] [11.660414 9.528274 37.62465 ... 10.119239 4.7622833 14.125758 ]]]] [[[[14.100004 23.767586 20.200233 ... 11.613694 24.128677 1.1400197] [28.920284 44.47071 29.926056 ... 29.654715 37.87119 21.593273 ] [ 5.662272 47.54603 5.240099 ... 16.898483 48.9979 20.544447 ] ... [32.754997 48.39149 42.461716 ... 39.76232 3.2359982 13.109758 ] [ 8.424167 28.392347 13.097396 ... 11.426406 18.075806 13.934 ] [45.78081 43.047993 3.3070471 ... 46.049934 31.932936 41.46055 ]]]] ... [[[[44.55161 6.524262 19.865091 ... 7.134308 8.949043 32.29087 ] [ 9.954808 12.923673 1.7178999 ... 23.27575 24.965836 23.982378 ] [14.5958805 2.2941353 49.908684 ... 7.858024 6.8217916 10.116309 ] ... [ 8.250134 20.891144 25.948338 ... 31.894258 28.317175 35.13854 ] [22.253176 33.45002 5.242873 ... 40.712643 36.05099 10.52757 ] [ 3.2437232 19.864428 49.53829 ... 49.06542 18.07497 49.53551 ]]]] [[[[34.955093 27.175358 0.8551262 ... 21.540035 41.061077 18.926525 ] [ 6.137609 5.568971 7.4538445 ... 31.151716 30.653152 9.503053 ] [ 5.901727 43.05227 3.5734918 ... 26.423138 1.700483 42.165054 ] ... [11.349985 28.854202 42.961163 ... 14.616302 45.497204 23.561779 ] [34.036263 44.502563 8.01095 ... 3.1212223 8.778037 19.227379 ] [48.23548 14.734547 13.7357 ... 30.119112 37.76698 25.806496 ]]]] [[[[35.4535 46.34373 21.737389 ... 3.0182688 48.271255 39.76398 ] [14.282765 9.164708 25.863571 ... 18.46296 5.955773 32.083725 ] [34.48547 49.812473 8.907038 ... 27.57446 16.679949 19.750977 ] ... [30.655334 46.39153 9.122889 ... 44.413128 17.941181 30.464367 ] [10.040378 36.259327 8.04113 ... 40.561687 29.837313 18.064528 ] [ 5.8698654 4.8514457 0.091181 ... 9.827935 25.083113 13.581157 ]]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape_as.py::TestReshapeAs::test_reshape_as[ ie_device:CPU - precision:FP32 - input_tesnors:(array([[1., 1., 1., 1., 1., 1.], [1., 1., 1., 1., 1., 1.], [1., 1., 1., 1., 1., 1.]]), array([[1., 1., 1., 1., 1., 1., 1., 1., 1.], [1., 1., 1., 1., 1., 1., 1., 1., 1.]])) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape_as.___torch_mangle_5921.aten_reshape_as, %input_tensor.1 : Tensor, %shape_tensor.1 : Tensor): %3 : Tensor = aten::reshape_as(%input_tensor.1, %shape_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape_as.py:23:23 return (%3) fw_re: [[1. 1. 1. 1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1. 1. 1. 1.]]; ov_res: [[1. 1. 1. 1. 1. 1. 1. 1. 1.] [1. 1. 1. 1. 1. 1. 1. 1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape_as.py::TestReshapeAs::test_reshape_as[ ie_device:CPU - precision:FP32 - input_tesnors:(array([[[1., 1., 1.], [1., 1., 1.]], [[1., 1., 1.], [1., 1., 1.]]]), array([[1., 1.], [1., 1.], [1., 1.], [1., 1.], [1., 1.], [1., 1.]])) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape_as.___torch_mangle_5923.aten_reshape_as, %input_tensor.1 : Tensor, %shape_tensor.1 : Tensor): %3 : Tensor = aten::reshape_as(%input_tensor.1, %shape_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape_as.py:23:23 return (%3) fw_re: [[1. 1.] [1. 1.] [1. 1.] [1. 1.] [1. 1.] [1. 1.]]; ov_res: [[1. 1.] [1. 1.] [1. 1.] [1. 1.] [1. 1.] [1. 1.]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_reshape_as.py::TestReshapeAs::test_reshape_as[ ie_device:CPU - precision:FP32 - input_tesnors:(array([[1., 1.], [1., 1.], [1., 1.], [1., 1.], [1., 1.], [1., 1.]]), array([[[1., 1., 1.], [1., 1., 1.]], [[1., 1., 1.], [1., 1., 1.]]])) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_reshape_as.___torch_mangle_5925.aten_reshape_as, %input_tensor.1 : Tensor, %shape_tensor.1 : Tensor): %3 : Tensor = aten::reshape_as(%input_tensor.1, %shape_tensor.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_reshape_as.py:23:23 return (%3) fw_re: [[[1. 1. 1.] [1. 1. 1.]] [[1. 1. 1.] [1. 1. 1.]]]; ov_res: [[[1. 1. 1.] [1. 1. 1.]] [[1. 1. 1.] [1. 1. 1.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_roll.py::TestRoll::test_roll[ ie_device:CPU - precision:FP32 - shifts:(2, 1) - dim:(0, 1) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_roll.___torch_mangle_5926.aten_roll, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1]]() %3 : int[] = prim::Constant[value=[2, 1]]() %4 : Tensor = aten::roll(%x.1, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_roll.py:25:27 return (%4) fw_re: [[[43.779327 6.206847 43.892574 32.53343 ] [ 1.6036135 7.319029 33.53737 31.350613 ] [43.217426 38.568054 8.864107 2.5805829]] [[ 9.28204 2.134409 37.784073 25.441715 ] [ 0.7494049 48.71506 11.600629 45.854927 ] [21.721954 17.51684 22.825802 2.6171362]]]; ov_res: [[[43.779327 6.206847 43.892574 32.53343 ] [ 1.6036135 7.319029 33.53737 31.350613 ] [43.217426 38.568054 8.864107 2.5805829]] [[ 9.28204 2.134409 37.784073 25.441715 ] [ 0.7494049 48.71506 11.600629 45.854927 ] [21.721954 17.51684 22.825802 2.6171362]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_roll.py::TestRoll::test_roll[ ie_device:CPU - precision:FP32 - shifts:1 - dim:0 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_roll.___torch_mangle_5928.aten_roll, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[1]]() %4 : Tensor = aten::roll(%x.1, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_roll.py:25:27 return (%4) fw_re: [[[33.074432 3.8782208 12.233037 40.633595 ] [11.175632 4.05933 37.532974 14.143234 ] [40.055496 8.729509 27.080696 11.940053 ]] [[46.40709 48.839264 39.995045 2.6680393] [43.926144 38.975407 29.986296 49.845905 ] [22.552992 18.88922 11.485083 5.3325424]]]; ov_res: [[[33.074432 3.8782208 12.233037 40.633595 ] [11.175632 4.05933 37.532974 14.143234 ] [40.055496 8.729509 27.080696 11.940053 ]] [[46.40709 48.839264 39.995045 2.6680393] [43.926144 38.975407 29.986296 49.845905 ] [22.552992 18.88922 11.485083 5.3325424]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_roll.py::TestRoll::test_roll[ ie_device:CPU - precision:FP32 - shifts:-1 - dim:0 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_roll.___torch_mangle_5930.aten_roll, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %3 : int[] = prim::Constant[value=[-1]]() %4 : Tensor = aten::roll(%x.1, %3, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_roll.py:25:27 return (%4) fw_re: [[[18.41136 23.92729 28.897709 22.065594 ] [ 4.0223413 25.283043 17.091574 40.375904 ] [40.697323 27.505445 0.18168364 23.412886 ]] [[38.29332 25.521086 11.981408 32.71785 ] [12.856269 29.15624 25.48407 27.737955 ] [ 0.56560206 23.584288 30.05561 34.62032 ]]]; ov_res: [[[18.41136 23.92729 28.897709 22.065594 ] [ 4.0223413 25.283043 17.091574 40.375904 ] [40.697323 27.505445 0.18168364 23.412886 ]] [[38.29332 25.521086 11.981408 32.71785 ] [12.856269 29.15624 25.48407 27.737955 ] [ 0.56560206 23.584288 30.05561 34.62032 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_roll.py::TestRoll::test_roll[ ie_device:CPU - precision:FP32 - shifts:1 - dim:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_roll.___torch_mangle_5932.aten_roll, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : int[] = prim::Constant[value=annotate(List[int], [])]() %4 : Tensor = aten::roll(%x.1, %2, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_roll.py:26:23 return (%4) fw_re: [[[32.774307 2.413307 18.835558 35.18564 ] [48.611794 33.166294 15.215236 31.419525 ] [30.331865 9.174405 44.997395 46.002678 ]] [[43.980156 42.872993 33.50415 8.248318 ] [ 1.8654977 18.836626 7.5554 30.263622 ] [25.236397 33.08187 21.210543 5.234162 ]]]; ov_res: [[[32.774307 2.413307 18.835558 35.18564 ] [48.611794 33.166294 15.215236 31.419525 ] [30.331865 9.174405 44.997395 46.002678 ]] [[43.980156 42.872993 33.50415 8.248318 ] [ 1.8654977 18.836626 7.5554 30.263622 ] [25.236397 33.08187 21.210543 5.234162 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_rsqrt.py::TestRSqrt::test_relu[ ie_device:CPU - precision:FP32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_rsqrt.___torch_mangle_5933.aten_rsqrt, %x.1 : Tensor): %2 : Tensor = aten::rsqrt(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_rsqrt.py:21:23 return (%2) fw_re: [[ nan nan 1.3155662 1.2423552 1.3639994 nan 1.5632172 1.6633326 nan 0.89538676]]; ov_res: [[ nan nan 1.3155662 1.2423552 1.3639994 nan 1.5632172 1.6633326 nan 0.89538676]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5934.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=-3]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[ 2.2462235e-01 5.1707250e-01 2.3679417e-01 1.0853380e+00 -7.2688043e-01] [ 8.8938797e-01 2.3403245e-01 5.8067757e-01 9.5022959e-01 -2.2380464e+00] [-1.0760221e+00 7.5981736e-01 -7.1150291e-01 -7.3767692e-01 1.8249378e+00] [-5.7179809e-02 2.7801434e-02 -2.5531994e-02 -1.8001382e+00 -8.7505236e-02] [ 2.4337971e-01 -6.8531936e-01 -2.2887418e+00 7.4236089e-01 -2.8206050e-01]] [[ 1.5266629e+00 -1.1304274e-01 9.9556260e-03 -5.7589316e-01 -9.7917372e-01] [ 2.8851536e-01 -1.1610942e+00 8.1835771e-01 -1.0295959e+00 -1.9099642e-01] [-2.4972564e-01 2.3183734e+00 3.0429247e-01 6.5548229e-01 -1.2257062e+00] [-1.6149113e-01 5.9143421e-03 -1.4123043e+00 -2.9230077e-02 -1.2595674e+00] [ 4.1467050e-01 -1.5967786e+00 -4.7188631e-01 -8.6948061e-01 -1.0006098e+00]] [[ 1.3415158e+00 -2.0080434e-01 -1.3358252e+00 -1.4035475e+00 -7.0118316e-02] [-1.0812217e+00 1.1796345e+00 9.0399212e-01 4.3094864e-01 -1.1784956e+00] [-2.9230915e-02 3.0127146e+00 8.5038012e-01 5.1950938e-01 8.6769456e-01] [ 1.9986354e-01 -1.0946023e-01 1.0274889e+00 2.7109456e-01 -1.3544221e+00] [-5.2813148e-01 -2.8750764e-03 -1.6481605e+00 7.1722311e-01 -1.2747516e-01]] [[-2.6244497e-01 -1.4681215e+00 1.3236332e+00 -8.4110938e-02 -3.8577059e-01] [ 6.6314465e-01 1.9191882e-01 -3.9371580e-02 -2.8521124e-01 -1.1251301e+00] [ 3.6257811e-02 2.2794652e+00 1.1849318e+00 2.0934317e-01 -3.0473119e-01] [ 1.0028400e+00 5.2413619e-01 -9.8089129e-01 -1.2400610e+00 -2.1282049e-02] [-4.2180648e-01 5.1663741e-02 1.0117984e+00 4.2375147e-01 5.8823854e-01]]]; ov_res: [[[ 2.2462235e-01 5.1707250e-01 2.3679417e-01 1.0853380e+00 -7.2688043e-01] [ 8.8938797e-01 2.3403245e-01 5.8067757e-01 9.5022959e-01 -2.2380464e+00] [-1.0760221e+00 7.5981736e-01 -7.1150291e-01 -7.3767692e-01 1.8249378e+00] [-5.7179809e-02 2.7801434e-02 -2.5531994e-02 -1.8001382e+00 -8.7505236e-02] [ 2.4337971e-01 -6.8531936e-01 -2.2887418e+00 7.4236089e-01 -2.8206050e-01]] [[ 1.5266629e+00 -1.1304274e-01 9.9556260e-03 -5.7589316e-01 -9.7917372e-01] [ 2.8851536e-01 -1.1610942e+00 8.1835771e-01 -1.0295959e+00 -1.9099642e-01] [-2.4972564e-01 2.3183734e+00 3.0429247e-01 6.5548229e-01 -1.2257062e+00] [-1.6149113e-01 5.9143421e-03 -1.4123043e+00 -2.9230077e-02 -1.2595674e+00] [ 4.1467050e-01 -1.5967786e+00 -4.7188631e-01 -8.6948061e-01 -1.0006098e+00]] [[ 1.3415158e+00 -2.0080434e-01 -1.3358252e+00 -1.4035475e+00 -7.0118316e-02] [-1.0812217e+00 1.1796345e+00 9.0399212e-01 4.3094864e-01 -1.1784956e+00] [-2.9230915e-02 3.0127146e+00 8.5038012e-01 5.1950938e-01 8.6769456e-01] [ 1.9986354e-01 -1.0946023e-01 1.0274889e+00 2.7109456e-01 -1.3544221e+00] [-5.2813148e-01 -2.8750764e-03 -1.6481605e+00 7.1722311e-01 -1.2747516e-01]] [[-2.6244497e-01 -1.4681215e+00 1.3236332e+00 -8.4110938e-02 -3.8577059e-01] [ 6.6314465e-01 1.9191882e-01 -3.9371580e-02 -2.8521124e-01 -1.1251301e+00] [ 3.6257811e-02 2.2794652e+00 1.1849318e+00 2.0934317e-01 -3.0473119e-01] [ 1.0028400e+00 5.2413619e-01 -9.8089129e-01 -1.2400610e+00 -2.1282049e-02] [-4.2180648e-01 5.1663741e-02 1.0117984e+00 4.2375147e-01 5.8823854e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5936.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.2852433 -1.1867927 0.29974207 -0.56174093 -1.2865684 ] [-0.52525896 -0.15736353 0.22477415 -0.58023834 0.5853034 ] [-0.34680796 -0.76561266 0.22445446 0.9294069 -0.32310167] [-1.1849024 0.3650379 -1.6906458 -0.13886574 1.4711084 ]] [[-0.2923581 -0.5147228 -0.04735035 -1.0806507 1.8632262 ] [-1.2926636 -0.00461641 2.3007958 -0.27194124 -0.43741328] [-0.47403947 -0.74734724 -1.5289793 0.86609834 0.78220624] [-2.1575792 2.6589906 0.32628873 0.9645019 0.8087024 ]] [[-0.56799984 0.41701156 0.80530673 0.72970474 1.0036439 ] [-0.6863624 0.8863443 1.3522357 1.2661763 -2.3238826 ] [ 0.07957201 1.1960924 0.03926327 0.53366053 -1.3379397 ] [ 0.8339918 -0.9439525 -0.07201763 3.4544072 2.104228 ]] [[-1.1187322 1.3286387 -0.8299798 -0.24972709 -1.222445 ] [-0.31442475 0.32268 0.37548336 -0.29141405 1.9886596 ] [-1.2714385 1.284606 0.54513454 -0.61060685 -0.10635393] [-1.0420204 1.0130403 0.19704068 1.2962136 -2.0775256 ]]]; ov_res: [[[ 1.2852433 -1.1867927 0.29974207 -0.56174093 -1.2865684 ] [-0.52525896 -0.15736353 0.22477415 -0.58023834 0.5853034 ] [-0.34680796 -0.76561266 0.22445446 0.9294069 -0.32310167] [-1.1849024 0.3650379 -1.6906458 -0.13886574 1.4711084 ]] [[-0.2923581 -0.5147228 -0.04735035 -1.0806507 1.8632262 ] [-1.2926636 -0.00461641 2.3007958 -0.27194124 -0.43741328] [-0.47403947 -0.74734724 -1.5289793 0.86609834 0.78220624] [-2.1575792 2.6589906 0.32628873 0.9645019 0.8087024 ]] [[-0.56799984 0.41701156 0.80530673 0.72970474 1.0036439 ] [-0.6863624 0.8863443 1.3522357 1.2661763 -2.3238826 ] [ 0.07957201 1.1960924 0.03926327 0.53366053 -1.3379397 ] [ 0.8339918 -0.9439525 -0.07201763 3.4544072 2.104228 ]] [[-1.1187322 1.3286387 -0.8299798 -0.24972709 -1.222445 ] [-0.31442475 0.32268 0.37548336 -0.29141405 1.9886596 ] [-1.2714385 1.284606 0.54513454 -0.61060685 -0.10635393] [-1.0420204 1.0130403 0.19704068 1.2962136 -2.0775256 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5938.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.21260993 -1.3677909 1.510989 0.9447948 -0.3809383 ] [ 1.3111345 0.5850421 -0.7268732 1.5101385 -1.6022722 ] [ 1.3089076 0.3800128 -0.52636504 -0.7472505 -0.22061323] [-1.1804647 0.63996273 1.3461825 -0.4436945 0.57413393]] [[ 1.951774 -1.2976767 1.5498748 0.8841887 -0.5449239 ] [-0.2507914 -0.6545516 -0.7521051 0.3269247 -0.19249308] [-1.0522552 0.46978635 0.45156732 0.62583643 0.40894228] [ 0.2290271 -0.9775836 -0.6141454 0.17181437 0.5054503 ]] [[ 0.8053474 -1.4003786 -0.91687924 -0.20784433 -0.489838 ] [-0.15439422 0.22082579 1.1167793 0.9856567 0.00915516] [-1.0665191 0.03976727 0.4997362 0.3863506 -0.44190574] [-0.8555524 -1.2913783 2.3479917 0.3654296 -1.6095666 ]] [[ 0.6015676 -0.5276214 2.262484 0.5714283 1.2344391 ] [-0.7663212 0.84873 -0.4733344 -0.4549854 -1.0785409 ] [ 0.82819784 0.03096335 -1.2049795 1.5449451 -0.32376102] [-0.0799929 0.5937945 1.1089755 2.4144955 0.49925455]]]; ov_res: [[[-0.21260993 -1.3677909 1.510989 0.9447948 -0.3809383 ] [ 1.3111345 0.5850421 -0.7268732 1.5101385 -1.6022722 ] [ 1.3089076 0.3800128 -0.52636504 -0.7472505 -0.22061323] [-1.1804647 0.63996273 1.3461825 -0.4436945 0.57413393]] [[ 1.951774 -1.2976767 1.5498748 0.8841887 -0.5449239 ] [-0.2507914 -0.6545516 -0.7521051 0.3269247 -0.19249308] [-1.0522552 0.46978635 0.45156732 0.62583643 0.40894228] [ 0.2290271 -0.9775836 -0.6141454 0.17181437 0.5054503 ]] [[ 0.8053474 -1.4003786 -0.91687924 -0.20784433 -0.489838 ] [-0.15439422 0.22082579 1.1167793 0.9856567 0.00915516] [-1.0665191 0.03976727 0.4997362 0.3863506 -0.44190574] [-0.8555524 -1.2913783 2.3479917 0.3654296 -1.6095666 ]] [[ 0.6015676 -0.5276214 2.262484 0.5714283 1.2344391 ] [-0.7663212 0.84873 -0.4733344 -0.4549854 -1.0785409 ] [ 0.82819784 0.03096335 -1.2049795 1.5449451 -0.32376102] [-0.0799929 0.5937945 1.1089755 2.4144955 0.49925455]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5940.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.46055305 -0.96032244 1.2736825 0.9981244 -0.18640041] [ 0.3401951 0.09114093 -2.6277037 -0.78632075 0.7030964 ] [-0.42007884 -0.14946331 -1.3209425 0.9176999 -0.88027817] [ 1.7602773 0.79872775 2.0614316 -0.3852905 1.3587935 ] [ 0.9433415 -1.2302302 -0.8937128 -0.5461817 -0.03305659]] [[ 1.8112663 -0.4318907 -1.0135632 -0.741779 -1.1937356 ] [-0.4880103 1.2162089 0.57156867 -1.7924286 0.9736738 ] [ 0.4694908 0.14213917 0.2853313 -0.14273739 -0.24598238] [-0.27559227 -0.1929589 1.4259071 1.1562563 0.6193251 ] [ 0.24476656 1.2313714 -0.8724973 -0.90010285 -0.47868884]] [[-0.78640574 1.6835712 0.15739791 -0.69485086 0.05505876] [ 1.0880824 0.7432997 0.3784615 -0.4709464 -2.3033245 ] [-0.6438091 -0.69646996 -0.5657397 -0.27803564 -2.313296 ] [ 0.11640763 -0.25465935 0.2174958 0.6437845 -0.01984406] [-0.01507012 0.56090194 -0.35883883 -0.45904925 -0.07575675]] [[ 2.2510862 0.32612088 -0.4047182 -1.1596179 0.7547949 ] [ 0.6846126 -1.4545491 2.4045217 -0.21093632 0.6825772 ] [ 0.41698202 0.24629267 0.16215244 -0.18818295 1.0944548 ] [-0.0775395 0.33428574 1.1683251 -1.1491418 -1.3802567 ] [ 1.0099807 0.87715125 -0.05215966 0.7182518 0.31374362]]]; ov_res: [[[-0.46055305 -0.96032244 1.2736825 0.9981244 -0.18640041] [ 0.3401951 0.09114093 -2.6277037 -0.78632075 0.7030964 ] [-0.42007884 -0.14946331 -1.3209425 0.9176999 -0.88027817] [ 1.7602773 0.79872775 2.0614316 -0.3852905 1.3587935 ] [ 0.9433415 -1.2302302 -0.8937128 -0.5461817 -0.03305659]] [[ 1.8112663 -0.4318907 -1.0135632 -0.741779 -1.1937356 ] [-0.4880103 1.2162089 0.57156867 -1.7924286 0.9736738 ] [ 0.4694908 0.14213917 0.2853313 -0.14273739 -0.24598238] [-0.27559227 -0.1929589 1.4259071 1.1562563 0.6193251 ] [ 0.24476656 1.2313714 -0.8724973 -0.90010285 -0.47868884]] [[-0.78640574 1.6835712 0.15739791 -0.69485086 0.05505876] [ 1.0880824 0.7432997 0.3784615 -0.4709464 -2.3033245 ] [-0.6438091 -0.69646996 -0.5657397 -0.27803564 -2.313296 ] [ 0.11640763 -0.25465935 0.2174958 0.6437845 -0.01984406] [-0.01507012 0.56090194 -0.35883883 -0.45904925 -0.07575675]] [[ 2.2510862 0.32612088 -0.4047182 -1.1596179 0.7547949 ] [ 0.6846126 -1.4545491 2.4045217 -0.21093632 0.6825772 ] [ 0.41698202 0.24629267 0.16215244 -0.18818295 1.0944548 ] [-0.0775395 0.33428574 1.1683251 -1.1491418 -1.3802567 ] [ 1.0099807 0.87715125 -0.05215966 0.7182518 0.31374362]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5942.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-4.4044831e-01 3.8184398e-01 -1.3289002e+00 1.0088975e+00 -1.4892007e+00] [ 9.9741662e-01 5.2032870e-01 1.0392528e+00 2.5244062e+00 2.7245104e+00] [ 8.3002472e-01 -8.7828994e-01 -1.7088174e+00 -1.6889625e+00 -4.1292688e-01] [ 6.9142794e-03 -5.8087730e-01 1.1243248e+00 2.4183397e+00 -1.2068518e+00] [-1.8086979e+00 -1.4517241e+00 8.5014182e-01 -8.3057737e-01 9.7197661e-04]] [[-3.5689822e-01 1.2394663e+00 -4.2246360e-01 -1.2411681e+00 -5.1902288e-01] [ 5.0715989e-01 -9.6241349e-01 -1.0646073e+00 -1.0817511e-01 5.3483552e-01] [ 1.1737675e-01 1.2545096e+00 -8.9656371e-01 6.4815156e-02 -1.5396383e-01] [-1.1687064e+00 5.5319053e-01 2.1521950e-01 1.4031008e+00 1.3947486e+00] [-6.7442966e-01 1.0460857e-01 2.7271581e-01 -6.8043804e-01 2.5239205e-01]] [[ 1.2317175e+00 -7.5872415e-01 2.0807281e-01 1.2661946e+00 1.7285269e+00] [-2.9333916e-01 5.4740059e-01 1.4128500e+00 -1.1052040e+00 1.2151606e+00] [-1.1533892e+00 1.0975814e-01 2.6906469e-01 -2.0896955e+00 -4.1439074e-01] [-1.5082157e+00 1.1647766e+00 -1.1261331e+00 -1.5669153e+00 9.4984118e-03] [ 9.0302604e-01 7.8219813e-01 -3.5529748e-01 1.2391297e+00 5.3048033e-01]] [[-3.9218810e-01 1.0702336e+00 -1.3078729e+00 1.9044455e+00 2.1803118e-01] [-7.4423975e-01 9.9414515e-01 5.9315532e-01 9.8720455e-01 1.2528566e+00] [-1.8639599e-01 -1.5887959e+00 -7.0202649e-01 1.0234270e-01 -3.1920668e-01] [-3.9464864e-01 1.2152679e+00 -6.6853178e-01 8.2515812e-01 -8.6462015e-01] [-1.2098103e+00 5.4888729e-02 1.1626282e+00 -1.8445940e+00 -1.7720108e+00]]]; ov_res: [[[-4.4044831e-01 3.8184398e-01 -1.3289002e+00 1.0088975e+00 -1.4892007e+00] [ 9.9741662e-01 5.2032870e-01 1.0392528e+00 2.5244062e+00 2.7245104e+00] [ 8.3002472e-01 -8.7828994e-01 -1.7088174e+00 -1.6889625e+00 -4.1292688e-01] [ 6.9142794e-03 -5.8087730e-01 1.1243248e+00 2.4183397e+00 -1.2068518e+00] [-1.8086979e+00 -1.4517241e+00 8.5014182e-01 -8.3057737e-01 9.7197661e-04]] [[-3.5689822e-01 1.2394663e+00 -4.2246360e-01 -1.2411681e+00 -5.1902288e-01] [ 5.0715989e-01 -9.6241349e-01 -1.0646073e+00 -1.0817511e-01 5.3483552e-01] [ 1.1737675e-01 1.2545096e+00 -8.9656371e-01 6.4815156e-02 -1.5396383e-01] [-1.1687064e+00 5.5319053e-01 2.1521950e-01 1.4031008e+00 1.3947486e+00] [-6.7442966e-01 1.0460857e-01 2.7271581e-01 -6.8043804e-01 2.5239205e-01]] [[ 1.2317175e+00 -7.5872415e-01 2.0807281e-01 1.2661946e+00 1.7285269e+00] [-2.9333916e-01 5.4740059e-01 1.4128500e+00 -1.1052040e+00 1.2151606e+00] [-1.1533892e+00 1.0975814e-01 2.6906469e-01 -2.0896955e+00 -4.1439074e-01] [-1.5082157e+00 1.1647766e+00 -1.1261331e+00 -1.5669153e+00 9.4984118e-03] [ 9.0302604e-01 7.8219813e-01 -3.5529748e-01 1.2391297e+00 5.3048033e-01]] [[-3.9218810e-01 1.0702336e+00 -1.3078729e+00 1.9044455e+00 2.1803118e-01] [-7.4423975e-01 9.9414515e-01 5.9315532e-01 9.8720455e-01 1.2528566e+00] [-1.8639599e-01 -1.5887959e+00 -7.0202649e-01 1.0234270e-01 -3.1920668e-01] [-3.9464864e-01 1.2152679e+00 -6.6853178e-01 8.2515812e-01 -8.6462015e-01] [-1.2098103e+00 5.4888729e-02 1.1626282e+00 -1.8445940e+00 -1.7720108e+00]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5944.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 2.03631759e+00 -8.64259362e-01 -9.68115270e-01 -1.46386206e-01 -8.32528353e-01] [ 1.04977608e+00 -5.45531034e-01 -1.33102536e-01 -1.73427141e+00 -1.57081652e-02] [-3.68268192e-01 1.17527938e+00 2.28532410e+00 -7.80584812e-01 2.75031805e-01] [-9.74095404e-01 -1.37192115e-01 -4.62019503e-01 -8.76532614e-01 6.63439512e-01]] [[ 6.10378683e-01 -3.55016500e-01 -6.17550611e-01 -3.77601087e-02 -2.00125813e+00] [-4.61705536e-01 -7.21634328e-01 -2.43049255e-03 -6.68105543e-01 1.27773058e+00] [ 2.74734437e-01 4.12541687e-01 1.02810934e-01 7.42479742e-01 -6.33821785e-02] [ 1.36653769e+00 -1.54588032e+00 3.05882752e-01 -2.15664005e+00 3.10969442e-01]] [[-8.61910582e-01 -1.20528650e+00 1.01077223e+00 1.42100215e+00 5.02413392e-01] [ 1.29900169e+00 1.66257441e-01 3.60356122e-01 -2.08967495e+00 1.87655568e+00] [-5.19510388e-01 -2.05683932e-01 6.28939867e-01 -1.03278482e+00 -5.12756646e-01] [ 1.32019126e+00 2.42709875e+00 1.22408879e+00 7.28444397e-01 2.87150174e-01]] [[ 1.21465445e+00 -1.81187296e+00 -9.62878108e-01 -2.84335756e+00 6.75556600e-01] [ 8.14947248e-01 -4.08219635e-01 -3.29140127e-01 -1.12476498e-01 7.90351748e-01] [-1.07137203e+00 1.12704027e+00 -7.99681365e-01 3.71914625e-01 2.17301145e-01] [ 1.38392925e+00 -2.15326071e+00 -2.13382512e-01 9.50096607e-01 1.25671998e-01]]]; ov_res: [[[ 2.03631759e+00 -8.64259362e-01 -9.68115270e-01 -1.46386206e-01 -8.32528353e-01] [ 1.04977608e+00 -5.45531034e-01 -1.33102536e-01 -1.73427141e+00 -1.57081652e-02] [-3.68268192e-01 1.17527938e+00 2.28532410e+00 -7.80584812e-01 2.75031805e-01] [-9.74095404e-01 -1.37192115e-01 -4.62019503e-01 -8.76532614e-01 6.63439512e-01]] [[ 6.10378683e-01 -3.55016500e-01 -6.17550611e-01 -3.77601087e-02 -2.00125813e+00] [-4.61705536e-01 -7.21634328e-01 -2.43049255e-03 -6.68105543e-01 1.27773058e+00] [ 2.74734437e-01 4.12541687e-01 1.02810934e-01 7.42479742e-01 -6.33821785e-02] [ 1.36653769e+00 -1.54588032e+00 3.05882752e-01 -2.15664005e+00 3.10969442e-01]] [[-8.61910582e-01 -1.20528650e+00 1.01077223e+00 1.42100215e+00 5.02413392e-01] [ 1.29900169e+00 1.66257441e-01 3.60356122e-01 -2.08967495e+00 1.87655568e+00] [-5.19510388e-01 -2.05683932e-01 6.28939867e-01 -1.03278482e+00 -5.12756646e-01] [ 1.32019126e+00 2.42709875e+00 1.22408879e+00 7.28444397e-01 2.87150174e-01]] [[ 1.21465445e+00 -1.81187296e+00 -9.62878108e-01 -2.84335756e+00 6.75556600e-01] [ 8.14947248e-01 -4.08219635e-01 -3.29140127e-01 -1.12476498e-01 7.90351748e-01] [-1.07137203e+00 1.12704027e+00 -7.99681365e-01 3.71914625e-01 2.17301145e-01] [ 1.38392925e+00 -2.15326071e+00 -2.13382512e-01 9.50096607e-01 1.25671998e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-3 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5946.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-3]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.5263395 -0.30232492 0.25314295 0.03055871 -1.4716486 ] [ 0.39221638 0.35388604 -0.03779112 2.5313604 0.8604435 ] [ 0.07061762 0.44209525 -0.7969007 -0.54665875 -0.01516338] [ 1.3251586 0.27509803 -0.57756996 -0.45269883 1.4078223 ]] [[-0.12127879 -0.75133896 0.70841515 -0.15447584 0.5663803 ] [ 0.81516284 1.5328528 0.28671438 -0.53162456 0.31184432] [ 0.7749218 -0.21916555 -1.4845656 1.3572363 0.82287973] [-1.5023996 -2.2508388 -0.8279479 -1.172945 0.09319837]] [[-0.29330975 -0.536403 0.06199992 2.5449135 -0.7469931 ] [-1.0315624 -0.7100576 -1.3498894 0.00630637 0.30007967] [-0.9373744 1.1206269 -0.691757 -0.01000102 0.31361094] [-1.5313827 1.2906239 -2.0188768 -1.1396499 -2.677014 ]] [[ 0.707967 0.521485 1.3170285 -0.3371483 0.892293 ] [ 0.7221705 -0.94483846 -0.734144 0.30144522 -1.5280104 ] [ 0.04627328 -0.6709988 1.3215065 -0.5073841 -0.03550269] [ 0.7961345 0.16956985 -1.1786482 -0.9430378 -0.31191462]]]; ov_res: [[[ 0.5263395 -0.30232492 0.25314295 0.03055871 -1.4716486 ] [ 0.39221638 0.35388604 -0.03779112 2.5313604 0.8604435 ] [ 0.07061762 0.44209525 -0.7969007 -0.54665875 -0.01516338] [ 1.3251586 0.27509803 -0.57756996 -0.45269883 1.4078223 ]] [[-0.12127879 -0.75133896 0.70841515 -0.15447584 0.5663803 ] [ 0.81516284 1.5328528 0.28671438 -0.53162456 0.31184432] [ 0.7749218 -0.21916555 -1.4845656 1.3572363 0.82287973] [-1.5023996 -2.2508388 -0.8279479 -1.172945 0.09319837]] [[-0.29330975 -0.536403 0.06199992 2.5449135 -0.7469931 ] [-1.0315624 -0.7100576 -1.3498894 0.00630637 0.30007967] [-0.9373744 1.1206269 -0.691757 -0.01000102 0.31361094] [-1.5313827 1.2906239 -2.0188768 -1.1396499 -2.677014 ]] [[ 0.707967 0.521485 1.3170285 -0.3371483 0.892293 ] [ 0.7221705 -0.94483846 -0.734144 0.30144522 -1.5280104 ] [ 0.04627328 -0.6709988 1.3215065 -0.5073841 -0.03550269] [ 0.7961345 0.16956985 -1.1786482 -0.9430378 -0.31191462]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5948.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.0190865 2.3224626 0.19827265 0.7135567 0.13433443] [-2.296023 -1.1602144 0.86885947 -0.4872219 0.7602023 ] [ 0.05420058 -0.74740255 -0.01765434 -0.28619164 -1.8664935 ] [ 1.3939489 0.25280952 -0.72152007 -1.0930508 0.48923406] [-0.852853 0.9346913 -0.67055285 0.26784438 0.14585632]] [[-0.7635928 -2.0698843 0.2497131 -0.03819629 -1.1188014 ] [ 0.8017243 -0.00459928 -0.22456981 2.466427 -1.6864758 ] [ 1.1050626 -1.4609523 0.12702416 0.40126202 -0.95634395] [-0.5899859 1.2636367 -0.7302024 2.0181093 -0.12564841] [-1.4044248 -0.9152482 0.9194357 -1.0259209 0.8244236 ]] [[-0.0188621 -1.5767465 1.3917847 0.9630676 -1.7234994 ] [ 0.04300969 2.442318 -0.5209009 -1.0248678 1.1937884 ] [-1.3688453 0.5772494 -1.6289899 0.36116627 -0.15113293] [-1.0671772 0.47495556 1.033662 0.27199107 0.9228773 ] [ 0.52974993 -0.96792626 -1.1535615 0.3688725 -1.0554824 ]] [[ 0.8159376 -0.8456654 -2.0361595 -1.2965316 -0.47815254] [ 0.6834148 0.30630514 1.6644369 1.0188518 1.1280783 ] [-0.576439 -0.29928482 0.99292177 -0.6235156 2.194463 ] [ 0.52693564 -1.4322786 0.41475686 -2.8052125 -0.9413958 ] [-2.1908052 -0.28154156 -1.8811542 2.1208446 -0.5389981 ]]]; ov_res: [[[ 0.0190865 2.3224626 0.19827265 0.7135567 0.13433443] [-2.296023 -1.1602144 0.86885947 -0.4872219 0.7602023 ] [ 0.05420058 -0.74740255 -0.01765434 -0.28619164 -1.8664935 ] [ 1.3939489 0.25280952 -0.72152007 -1.0930508 0.48923406] [-0.852853 0.9346913 -0.67055285 0.26784438 0.14585632]] [[-0.7635928 -2.0698843 0.2497131 -0.03819629 -1.1188014 ] [ 0.8017243 -0.00459928 -0.22456981 2.466427 -1.6864758 ] [ 1.1050626 -1.4609523 0.12702416 0.40126202 -0.95634395] [-0.5899859 1.2636367 -0.7302024 2.0181093 -0.12564841] [-1.4044248 -0.9152482 0.9194357 -1.0259209 0.8244236 ]] [[-0.0188621 -1.5767465 1.3917847 0.9630676 -1.7234994 ] [ 0.04300969 2.442318 -0.5209009 -1.0248678 1.1937884 ] [-1.3688453 0.5772494 -1.6289899 0.36116627 -0.15113293] [-1.0671772 0.47495556 1.033662 0.27199107 0.9228773 ] [ 0.52974993 -0.96792626 -1.1535615 0.3688725 -1.0554824 ]] [[ 0.8159376 -0.8456654 -2.0361595 -1.2965316 -0.47815254] [ 0.6834148 0.30630514 1.6644369 1.0188518 1.1280783 ] [-0.576439 -0.29928482 0.99292177 -0.6235156 2.194463 ] [ 0.52693564 -1.4322786 0.41475686 -2.8052125 -0.9413958 ] [-2.1908052 -0.28154156 -1.8811542 2.1208446 -0.5389981 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5950.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=-2]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[ 0.56073517 -0.17849156 0.20171085 0.4061242 -0.9390363 ] [-0.27899048 -0.6830151 0.84176874 0.26177663 0.4450851 ] [ 1.0606397 -1.2360824 0.25825384 1.1335163 1.2355138 ] [ 1.9246249 0.18286325 0.35792527 0.89293903 1.5210207 ]] [[ 0.23115794 -0.30097905 -0.24297293 -0.24825862 -1.1405569 ] [-0.7975082 -0.20121557 1.2944602 -2.1004863 0.9841939 ] [ 0.01858554 0.92263967 0.46425036 0.03255898 -1.7620847 ] [-1.0486761 1.1402775 0.7668804 0.9879907 -1.6510843 ]] [[-1.5178424 0.41309616 -1.2035669 0.64439374 0.74225366] [ 1.1418992 -0.9823868 -0.5389881 -0.6493735 1.3589607 ] [ 0.36612433 0.6800384 0.17314999 -2.37909 -0.3056815 ] [-0.02780062 0.8572813 -1.5840813 -0.8926169 -0.875771 ]] [[ 1.5878942 0.91293466 -0.5285071 1.0731838 -0.18882559] [ 1.691852 0.8203185 0.1517389 0.82446575 -1.7949656 ] [-1.4198991 -0.38667408 0.16083883 -1.71982 0.68395394] [ 0.22803873 -1.352112 1.8162496 1.1888957 -0.32199636]]]; ov_res: [[[ 0.56073517 -0.17849156 0.20171085 0.4061242 -0.9390363 ] [-0.27899048 -0.6830151 0.84176874 0.26177663 0.4450851 ] [ 1.0606397 -1.2360824 0.25825384 1.1335163 1.2355138 ] [ 1.9246249 0.18286325 0.35792527 0.89293903 1.5210207 ]] [[ 0.23115794 -0.30097905 -0.24297293 -0.24825862 -1.1405569 ] [-0.7975082 -0.20121557 1.2944602 -2.1004863 0.9841939 ] [ 0.01858554 0.92263967 0.46425036 0.03255898 -1.7620847 ] [-1.0486761 1.1402775 0.7668804 0.9879907 -1.6510843 ]] [[-1.5178424 0.41309616 -1.2035669 0.64439374 0.74225366] [ 1.1418992 -0.9823868 -0.5389881 -0.6493735 1.3589607 ] [ 0.36612433 0.6800384 0.17314999 -2.37909 -0.3056815 ] [-0.02780062 0.8572813 -1.5840813 -0.8926169 -0.875771 ]] [[ 1.5878942 0.91293466 -0.5285071 1.0731838 -0.18882559] [ 1.691852 0.8203185 0.1517389 0.82446575 -1.7949656 ] [-1.4198991 -0.38667408 0.16083883 -1.71982 0.68395394] [ 0.22803873 -1.352112 1.8162496 1.1888957 -0.32199636]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5952.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.09113788 1.4157306 0.12667 0.8155854 -0.1031803 ] [ 0.57939 -0.2219941 1.020193 -1.2404512 -0.16727415] [-0.6104978 -0.27081698 -1.0249546 0.19285338 -0.1974432 ] [ 0.45942676 -1.204303 0.32956672 -0.74650735 0.6618146 ]] [[ 0.1786006 0.35164034 0.24407746 0.13511337 -0.64568424] [-0.6356405 -0.06775888 0.2337958 -0.34641457 -1.0758909 ] [ 0.9610549 -2.3925076 0.6356132 -1.751839 -1.4679984 ] [-0.88188857 -0.37654984 -0.35199103 -1.3677279 -0.14633474]] [[ 0.7954604 0.88742334 -0.9075697 0.31523505 1.3680836 ] [-0.39757624 -1.1077884 -1.2984724 -0.70135206 0.43029892] [-0.85893977 0.4079233 -0.62087864 1.0561891 -1.2062523 ] [ 0.38681993 1.1412408 -0.6749342 0.16696605 0.51074713]] [[-1.7134697 0.8925355 -0.06909653 0.505754 -0.8699124 ] [ 0.69509614 0.22286032 0.5153228 1.091812 -0.5961787 ] [ 0.11236106 2.569199 0.03466395 1.8309959 -1.3642046 ] [ 0.12492458 1.0710012 -0.38688743 0.31203246 0.14179803]]]; ov_res: [[[-0.09113788 1.4157306 0.12667 0.8155854 -0.1031803 ] [ 0.57939 -0.2219941 1.020193 -1.2404512 -0.16727415] [-0.6104978 -0.27081698 -1.0249546 0.19285338 -0.1974432 ] [ 0.45942676 -1.204303 0.32956672 -0.74650735 0.6618146 ]] [[ 0.1786006 0.35164034 0.24407746 0.13511337 -0.64568424] [-0.6356405 -0.06775888 0.2337958 -0.34641457 -1.0758909 ] [ 0.9610549 -2.3925076 0.6356132 -1.751839 -1.4679984 ] [-0.88188857 -0.37654984 -0.35199103 -1.3677279 -0.14633474]] [[ 0.7954604 0.88742334 -0.9075697 0.31523505 1.3680836 ] [-0.39757624 -1.1077884 -1.2984724 -0.70135206 0.43029892] [-0.85893977 0.4079233 -0.62087864 1.0561891 -1.2062523 ] [ 0.38681993 1.1412408 -0.6749342 0.16696605 0.51074713]] [[-1.7134697 0.8925355 -0.06909653 0.505754 -0.8699124 ] [ 0.69509614 0.22286032 0.5153228 1.091812 -0.5961787 ] [ 0.11236106 2.569199 0.03466395 1.8309959 -1.3642046 ] [ 0.12492458 1.0710012 -0.38688743 0.31203246 0.14179803]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5954.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-1.5649669 1.9988811 -0.9177204 -1.1130706 2.0624752 ] [ 1.7923753 -0.37341073 -0.56667304 0.7437729 -1.1653728 ] [ 0.524739 -0.9205885 -0.9986575 0.1859454 0.26496965] [ 1.0434208 -0.25311518 -0.38922867 -0.42701322 -0.38352385] [ 2.8005114 1.0810478 -1.485258 1.2204585 -0.09034154]] [[ 0.96644914 -1.0806015 0.76828355 1.0417788 0.34961656] [-0.15810533 0.30086616 0.68124807 -0.275936 -1.1571605 ] [-0.06996246 2.3016756 -0.8516357 1.0273471 -1.524994 ] [ 0.22551972 0.7909097 0.33848786 1.2297076 0.5210563 ] [ 1.0336324 0.28687704 0.60818595 0.89126927 -0.7745007 ]] [[ 0.09528995 -0.564145 -0.8602137 0.2809665 -1.0092051 ] [ 0.77717257 0.45266864 -1.0393081 -0.02375466 1.9590853 ] [ 0.48217732 -0.19028986 1.4329036 -0.53922296 0.636077 ] [ 0.73261666 -0.87255085 -0.07664683 -1.14456 0.66188526] [-0.32042855 -1.6030744 1.4306793 0.08495121 1.3174453 ]] [[-0.56521446 -0.06302999 -1.0578005 0.6291923 -0.9332884 ] [ 1.9332536 -0.95057905 -0.1279329 -0.38001874 0.58944094] [-1.6554047 0.20892425 1.2029028 0.03325037 -1.5234034 ] [-0.8738008 0.15786776 -1.0339769 -2.2304702 -0.370514 ] [-1.7144219 0.15498808 1.6983002 1.3597442 1.2068586 ]]]; ov_res: [[[-1.5649669 1.9988811 -0.9177204 -1.1130706 2.0624752 ] [ 1.7923753 -0.37341073 -0.56667304 0.7437729 -1.1653728 ] [ 0.524739 -0.9205885 -0.9986575 0.1859454 0.26496965] [ 1.0434208 -0.25311518 -0.38922867 -0.42701322 -0.38352385] [ 2.8005114 1.0810478 -1.485258 1.2204585 -0.09034154]] [[ 0.96644914 -1.0806015 0.76828355 1.0417788 0.34961656] [-0.15810533 0.30086616 0.68124807 -0.275936 -1.1571605 ] [-0.06996246 2.3016756 -0.8516357 1.0273471 -1.524994 ] [ 0.22551972 0.7909097 0.33848786 1.2297076 0.5210563 ] [ 1.0336324 0.28687704 0.60818595 0.89126927 -0.7745007 ]] [[ 0.09528995 -0.564145 -0.8602137 0.2809665 -1.0092051 ] [ 0.77717257 0.45266864 -1.0393081 -0.02375466 1.9590853 ] [ 0.48217732 -0.19028986 1.4329036 -0.53922296 0.636077 ] [ 0.73261666 -0.87255085 -0.07664683 -1.14456 0.66188526] [-0.32042855 -1.6030744 1.4306793 0.08495121 1.3174453 ]] [[-0.56521446 -0.06302999 -1.0578005 0.6291923 -0.9332884 ] [ 1.9332536 -0.95057905 -0.1279329 -0.38001874 0.58944094] [-1.6554047 0.20892425 1.2029028 0.03325037 -1.5234034 ] [-0.8738008 0.15786776 -1.0339769 -2.2304702 -0.370514 ] [-1.7144219 0.15498808 1.6983002 1.3597442 1.2068586 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5956.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-1.137688 1.1767915 0.885135 1.8106803 1.2267252 ] [-1.1955147 -0.07041755 -0.01103242 -1.5368686 -1.1493832 ] [ 0.272472 -0.20570064 -2.1564333 -0.39836192 0.8094092 ] [-0.05517974 0.11967438 0.22166207 -1.090656 0.85215664] [-0.66278887 -0.11308295 -0.6149335 1.069132 0.10464247]] [[ 0.3199225 1.351731 0.27966297 -0.32170227 0.6618535 ] [ 1.9594762 -0.82218516 0.02011003 0.5395305 1.2338701 ] [-1.0983124 1.4017894 -1.1617515 0.8265293 0.8616599 ] [-2.799328 -1.2337905 0.13704619 -1.1489996 0.8190901 ] [-1.0744851 -1.6468878 -0.34492028 -0.4791399 1.4516003 ]] [[-0.21000761 1.9723903 -0.9176143 2.2531285 -0.8572479 ] [-1.3737411 0.9219839 -1.1251682 -0.08275896 0.6166473 ] [-0.2221837 -0.12914252 1.5521408 1.4056348 1.5828474 ] [ 0.7600585 0.8905127 -1.1179297 -0.5207781 -0.5035467 ] [-2.897212 -0.21995834 -0.47935733 0.39675087 -0.05985797]] [[ 0.04921005 -0.9579469 0.10378484 0.4217235 1.897851 ] [-0.49221778 0.71200067 0.7135883 -0.16000307 1.0928934 ] [ 2.1756449 1.420549 -0.26134214 0.6943325 0.07072398] [ 0.44862112 0.5086177 -0.3144364 -1.3875166 -0.3470657 ] [-0.07108075 -0.98988324 0.14211409 -0.44864476 0.15162154]]]; ov_res: [[[-1.137688 1.1767915 0.885135 1.8106803 1.2267252 ] [-1.1955147 -0.07041755 -0.01103242 -1.5368686 -1.1493832 ] [ 0.272472 -0.20570064 -2.1564333 -0.39836192 0.8094092 ] [-0.05517974 0.11967438 0.22166207 -1.090656 0.85215664] [-0.66278887 -0.11308295 -0.6149335 1.069132 0.10464247]] [[ 0.3199225 1.351731 0.27966297 -0.32170227 0.6618535 ] [ 1.9594762 -0.82218516 0.02011003 0.5395305 1.2338701 ] [-1.0983124 1.4017894 -1.1617515 0.8265293 0.8616599 ] [-2.799328 -1.2337905 0.13704619 -1.1489996 0.8190901 ] [-1.0744851 -1.6468878 -0.34492028 -0.4791399 1.4516003 ]] [[-0.21000761 1.9723903 -0.9176143 2.2531285 -0.8572479 ] [-1.3737411 0.9219839 -1.1251682 -0.08275896 0.6166473 ] [-0.2221837 -0.12914252 1.5521408 1.4056348 1.5828474 ] [ 0.7600585 0.8905127 -1.1179297 -0.5207781 -0.5035467 ] [-2.897212 -0.21995834 -0.47935733 0.39675087 -0.05985797]] [[ 0.04921005 -0.9579469 0.10378484 0.4217235 1.897851 ] [-0.49221778 0.71200067 0.7135883 -0.16000307 1.0928934 ] [ 2.1756449 1.420549 -0.26134214 0.6943325 0.07072398] [ 0.44862112 0.5086177 -0.3144364 -1.3875166 -0.3470657 ] [-0.07108075 -0.98988324 0.14211409 -0.44864476 0.15162154]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5958.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.9656503 -0.00879404 -0.5608088 1.0474552 0.0736916 ] [ 0.12111214 0.33110586 0.14454961 0.25236678 -0.819635 ] [-0.30282608 0.6244994 1.1017278 0.26977435 -0.8706882 ] [ 0.4691546 0.18056098 -0.3577036 0.7460127 -0.5193377 ]] [[ 1.1119512 -0.20654674 1.3889782 -1.2272699 -0.99900293] [-1.2904849 -1.2995468 1.1857682 1.1799092 0.71173227] [ 2.1445978 1.940941 -1.1425514 0.39782855 1.2660387 ] [-0.5612954 0.16822106 -0.22992983 -1.1213684 1.0440114 ]] [[ 0.06743185 0.27459288 -0.86255205 -0.15499382 -0.04152466] [ 0.241649 -1.0655167 0.9412117 -0.479609 -0.22297159] [-0.48625213 -0.4168982 -0.54615283 1.3844209 -0.19355121] [-1.9156287 -0.17306091 -2.225901 -1.3609332 1.2165314 ]] [[ 0.20109652 -0.63769054 0.55882156 -0.86514646 2.0434406 ] [-0.02920765 0.7051514 0.17208794 -1.3847898 -0.8278405 ] [-0.5103471 -0.02541868 -1.184356 1.3524853 1.2890055 ] [ 0.49217957 1.3642467 -1.5447279 0.03736717 -1.6064997 ]]]; ov_res: [[[ 0.9656503 -0.00879404 -0.5608088 1.0474552 0.0736916 ] [ 0.12111214 0.33110586 0.14454961 0.25236678 -0.819635 ] [-0.30282608 0.6244994 1.1017278 0.26977435 -0.8706882 ] [ 0.4691546 0.18056098 -0.3577036 0.7460127 -0.5193377 ]] [[ 1.1119512 -0.20654674 1.3889782 -1.2272699 -0.99900293] [-1.2904849 -1.2995468 1.1857682 1.1799092 0.71173227] [ 2.1445978 1.940941 -1.1425514 0.39782855 1.2660387 ] [-0.5612954 0.16822106 -0.22992983 -1.1213684 1.0440114 ]] [[ 0.06743185 0.27459288 -0.86255205 -0.15499382 -0.04152466] [ 0.241649 -1.0655167 0.9412117 -0.479609 -0.22297159] [-0.48625213 -0.4168982 -0.54615283 1.3844209 -0.19355121] [-1.9156287 -0.17306091 -2.225901 -1.3609332 1.2165314 ]] [[ 0.20109652 -0.63769054 0.55882156 -0.86514646 2.0434406 ] [-0.02920765 0.7051514 0.17208794 -1.3847898 -0.8278405 ] [-0.5103471 -0.02541868 -1.184356 1.3524853 1.2890055 ] [ 0.49217957 1.3642467 -1.5447279 0.03736717 -1.6064997 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-2 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5960.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-2]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.6379583 -1.1537441 0.38807184 1.7325783 0.0433187 ] [ 0.66063654 0.5692339 -0.25810128 0.5436861 -0.9262066 ] [ 1.6442558 -0.45057636 -0.46351108 1.8226261 -0.12967205] [-0.45305607 -1.8197277 -0.5889788 -1.0593325 1.3045431 ]] [[ 1.005386 1.0458199 1.227883 0.6644246 1.2409725 ] [ 0.331252 0.36929217 0.04787492 -0.10407105 1.2564683 ] [ 0.6987738 -0.08768338 -0.56162244 0.36579025 1.7298759 ] [-0.94903636 -0.07341956 0.45749673 -0.38795957 0.4046863 ]] [[ 1.2861153 -1.4809961 -0.4385244 1.1231953 1.0225635 ] [-0.80224985 0.46608534 1.4460084 -0.27379873 -0.9524367 ] [ 0.54898846 0.7500904 0.3877463 -0.7856354 -0.65936637] [-0.0590434 0.03957716 1.2933865 -1.3648565 -1.2722344 ]] [[ 0.49193627 0.24477074 -0.9731202 1.0643737 -0.7663011 ] [-1.3778614 -0.00207965 -0.22408724 1.9601214 0.35279614] [-0.06280496 1.7924802 0.06020905 -1.9643929 -0.87136066] [ 0.39353958 1.0780405 0.1632779 0.76390344 0.34970167]]]; ov_res: [[[-0.6379583 -1.1537441 0.38807184 1.7325783 0.0433187 ] [ 0.66063654 0.5692339 -0.25810128 0.5436861 -0.9262066 ] [ 1.6442558 -0.45057636 -0.46351108 1.8226261 -0.12967205] [-0.45305607 -1.8197277 -0.5889788 -1.0593325 1.3045431 ]] [[ 1.005386 1.0458199 1.227883 0.6644246 1.2409725 ] [ 0.331252 0.36929217 0.04787492 -0.10407105 1.2564683 ] [ 0.6987738 -0.08768338 -0.56162244 0.36579025 1.7298759 ] [-0.94903636 -0.07341956 0.45749673 -0.38795957 0.4046863 ]] [[ 1.2861153 -1.4809961 -0.4385244 1.1231953 1.0225635 ] [-0.80224985 0.46608534 1.4460084 -0.27379873 -0.9524367 ] [ 0.54898846 0.7500904 0.3877463 -0.7856354 -0.65936637] [-0.0590434 0.03957716 1.2933865 -1.3648565 -1.2722344 ]] [[ 0.49193627 0.24477074 -0.9731202 1.0643737 -0.7663011 ] [-1.3778614 -0.00207965 -0.22408724 1.9601214 0.35279614] [-0.06280496 1.7924802 0.06020905 -1.9643929 -0.87136066] [ 0.39353958 1.0780405 0.1632779 0.76390344 0.34970167]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5962.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.2089727 2.004666 -1.2854046 0.44324568 1.0277747 ] [-1.1594063 -0.90971303 0.14408317 1.0922164 -0.03859419] [-1.2786063 0.43626922 1.7621056 -0.04758546 -1.4878726 ] [ 0.17939582 1.053278 -0.33050048 -1.1768639 0.2680176 ] [ 0.66358554 -0.13933441 -0.6647547 -0.4494732 0.746023 ]] [[ 1.7906958 -0.0854342 -0.06218077 -1.2769369 -0.06101541] [-0.49855655 0.7278592 0.9225278 -0.9519679 0.5466795 ] [-0.32164264 1.2496045 -2.8153026 0.09145989 0.11887577] [-0.0688718 -3.1065392 -1.067369 -0.28248423 -0.5730557 ] [-0.38334727 -1.0661181 -0.29379162 -0.2701431 0.19769113]] [[ 0.2703503 0.26099727 0.44316903 0.45325223 0.6765553 ] [-0.05238155 0.24820799 0.9354559 -0.4365765 0.7071786 ] [-1.8371772 0.01134123 -0.12817007 -0.21671624 1.0205112 ] [ 0.6790825 0.35614863 -0.43006438 -0.62675047 0.5158209 ] [-0.45139897 1.0690243 -0.82065517 2.630529 0.0230475 ]] [[ 0.6110165 -0.5057109 -2.3481045 0.9364322 -0.15007147] [ 1.713677 -1.3692039 0.00387787 1.6362555 1.386437 ] [-1.4941558 0.17995347 0.34984538 1.1142061 -0.16942331] [ 0.6084395 -0.49293223 0.47919735 0.96345496 1.0557886 ] [ 1.0474921 -0.84095824 0.61988485 0.40461233 0.02167417]]]; ov_res: [[[ 1.2089727 2.004666 -1.2854046 0.44324568 1.0277747 ] [-1.1594063 -0.90971303 0.14408317 1.0922164 -0.03859419] [-1.2786063 0.43626922 1.7621056 -0.04758546 -1.4878726 ] [ 0.17939582 1.053278 -0.33050048 -1.1768639 0.2680176 ] [ 0.66358554 -0.13933441 -0.6647547 -0.4494732 0.746023 ]] [[ 1.7906958 -0.0854342 -0.06218077 -1.2769369 -0.06101541] [-0.49855655 0.7278592 0.9225278 -0.9519679 0.5466795 ] [-0.32164264 1.2496045 -2.8153026 0.09145989 0.11887577] [-0.0688718 -3.1065392 -1.067369 -0.28248423 -0.5730557 ] [-0.38334727 -1.0661181 -0.29379162 -0.2701431 0.19769113]] [[ 0.2703503 0.26099727 0.44316903 0.45325223 0.6765553 ] [-0.05238155 0.24820799 0.9354559 -0.4365765 0.7071786 ] [-1.8371772 0.01134123 -0.12817007 -0.21671624 1.0205112 ] [ 0.6790825 0.35614863 -0.43006438 -0.62675047 0.5158209 ] [-0.45139897 1.0690243 -0.82065517 2.630529 0.0230475 ]] [[ 0.6110165 -0.5057109 -2.3481045 0.9364322 -0.15007147] [ 1.713677 -1.3692039 0.00387787 1.6362555 1.386437 ] [-1.4941558 0.17995347 0.34984538 1.1142061 -0.16942331] [ 0.6084395 -0.49293223 0.47919735 0.96345496 1.0557886 ] [ 1.0474921 -0.84095824 0.61988485 0.40461233 0.02167417]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5964.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.29524836 2.296198 -1.1451485 0.25713772 0.7148201 ] [ 0.38043052 -0.04439749 -1.4064273 -0.1680643 2.4917734 ] [-0.29999915 3.417452 0.9535169 -0.51071286 -0.7542406 ] [-1.7645307 0.7071527 1.7398127 0.13758616 -0.7101899 ]] [[-0.9018992 -1.2884746 -0.5261476 0.3854614 -0.13709456] [ 0.13054085 0.23346893 1.5943981 1.3285713 0.6189224 ] [-1.543586 0.13681822 -1.0772237 0.5951156 -0.36136863] [ 0.7510279 0.13119136 0.5200209 -2.095877 -0.64808637]] [[-0.54674524 0.5986878 1.8644469 1.2081263 1.3503237 ] [-0.12251268 1.727529 -0.14172976 -0.7711303 0.15351184] [-0.09474371 -0.46017873 0.5287039 1.2428337 -0.7564976 ] [ 0.58964026 0.1949126 0.7263905 -0.29512584 -0.9743334 ]] [[-0.38376814 0.79959184 1.3340033 0.42824224 -0.5047247 ] [ 0.74814206 0.8993556 -0.31606355 0.14239584 -0.7447108 ] [-0.14535189 -0.20661065 2.100317 0.908466 0.7683606 ] [ 0.03749673 -1.2880629 -1.9203184 0.42200357 1.2634599 ]]]; ov_res: [[[-0.29524836 2.296198 -1.1451485 0.25713772 0.7148201 ] [ 0.38043052 -0.04439749 -1.4064273 -0.1680643 2.4917734 ] [-0.29999915 3.417452 0.9535169 -0.51071286 -0.7542406 ] [-1.7645307 0.7071527 1.7398127 0.13758616 -0.7101899 ]] [[-0.9018992 -1.2884746 -0.5261476 0.3854614 -0.13709456] [ 0.13054085 0.23346893 1.5943981 1.3285713 0.6189224 ] [-1.543586 0.13681822 -1.0772237 0.5951156 -0.36136863] [ 0.7510279 0.13119136 0.5200209 -2.095877 -0.64808637]] [[-0.54674524 0.5986878 1.8644469 1.2081263 1.3503237 ] [-0.12251268 1.727529 -0.14172976 -0.7711303 0.15351184] [-0.09474371 -0.46017873 0.5287039 1.2428337 -0.7564976 ] [ 0.58964026 0.1949126 0.7263905 -0.29512584 -0.9743334 ]] [[-0.38376814 0.79959184 1.3340033 0.42824224 -0.5047247 ] [ 0.74814206 0.8993556 -0.31606355 0.14239584 -0.7447108 ] [-0.14535189 -0.20661065 2.100317 0.908466 0.7683606 ] [ 0.03749673 -1.2880629 -1.9203184 0.42200357 1.2634599 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5966.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=-1]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[ 0.3099673 -1.2667152 1.9236532 0.745592 1.1823673 ] [ 0.689758 -0.65112644 0.98524994 -0.44145814 1.7724675 ] [-1.221214 -0.9465774 0.87987554 0.71754354 -0.16225728] [-1.914607 1.1733546 -1.8925483 -0.2812757 0.30619282]] [[-1.1661332 0.00490916 -0.86699396 -0.45511225 -0.3854357 ] [-0.24993593 -1.5534573 0.39371485 0.16415843 0.88998604] [-0.5517127 -0.30893514 1.442259 -2.0691924 -0.90265 ] [ 0.59259564 0.5017047 0.07138341 0.28499323 1.262937 ]] [[-0.94972897 -1.3861864 1.1076546 -0.84357756 0.12036736] [-1.2504082 -2.8971198 -0.33363765 -0.04829615 -1.0897721 ] [ 0.78434443 0.8169052 1.4908503 1.2593119 1.162632 ] [ 1.2037213 0.2655115 0.68813545 -0.24798517 -1.1004051 ]] [[-0.04566275 1.4111221 0.86346567 -0.7502292 0.3325761 ] [ 0.30213615 -0.5512265 1.1054323 0.57141286 -0.19389157] [ 0.18419869 1.0210749 0.6477897 -1.1634829 -0.40901244] [ 0.3535408 -1.1563393 0.2461406 -0.7938712 -1.5559651 ]]]; ov_res: [[[ 0.3099673 -1.2667152 1.9236532 0.745592 1.1823673 ] [ 0.689758 -0.65112644 0.98524994 -0.44145814 1.7724675 ] [-1.221214 -0.9465774 0.87987554 0.71754354 -0.16225728] [-1.914607 1.1733546 -1.8925483 -0.2812757 0.30619282]] [[-1.1661332 0.00490916 -0.86699396 -0.45511225 -0.3854357 ] [-0.24993593 -1.5534573 0.39371485 0.16415843 0.88998604] [-0.5517127 -0.30893514 1.442259 -2.0691924 -0.90265 ] [ 0.59259564 0.5017047 0.07138341 0.28499323 1.262937 ]] [[-0.94972897 -1.3861864 1.1076546 -0.84357756 0.12036736] [-1.2504082 -2.8971198 -0.33363765 -0.04829615 -1.0897721 ] [ 0.78434443 0.8169052 1.4908503 1.2593119 1.162632 ] [ 1.2037213 0.2655115 0.68813545 -0.24798517 -1.1004051 ]] [[-0.04566275 1.4111221 0.86346567 -0.7502292 0.3325761 ] [ 0.30213615 -0.5512265 1.1054323 0.57141286 -0.19389157] [ 0.18419869 1.0210749 0.6477897 -1.1634829 -0.40901244] [ 0.3535408 -1.1563393 0.2461406 -0.7938712 -1.5559651 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5968.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.7861375 1.1679026 -0.18420725 0.53604907 -1.1486657 ] [ 0.8303238 -0.7811384 -0.6741238 0.62266785 1.0653988 ] [-1.5428915 0.07306448 -1.9460927 1.224403 -0.28614834] [-0.9599034 -0.3891961 0.6045563 0.0265373 0.98536205] [-0.61004514 -0.34080508 -1.229013 -0.964705 -1.4402506 ]] [[-1.3564178 -0.7476315 -0.03813539 1.2745162 -1.2824105 ] [-0.36156663 -1.3168281 -0.3539663 0.12014779 0.10189255] [-0.3676381 -0.76046073 -0.04761223 -0.49499562 0.09068259] [-0.30995688 0.6089147 0.7671484 -0.08982708 0.9169135 ] [-0.5350424 0.4428611 0.39636165 -1.5282314 -0.72592866]] [[-0.64243394 -0.0058052 0.6031924 -0.36318436 0.20351577] [-0.36761928 -0.58596456 0.419291 -0.25572756 0.28144455] [-0.30584818 0.45572293 0.5350534 0.7835783 -2.6371467 ] [ 0.05170733 1.0409776 -1.410952 1.821265 1.4761792 ] [-0.7463402 1.3682075 0.80644006 0.9069218 0.7034414 ]] [[-0.30074596 1.5395437 -1.3028071 -0.04519499 1.8561684 ] [-0.23530985 -0.03481362 -1.2279696 -0.2824473 -0.67909324] [-1.0791653 1.5822922 -0.3562304 0.4259885 1.262573 ] [ 0.9160601 1.2218876 -0.03159533 -2.2344618 -0.18146959] [ 0.65191156 -1.5089527 -0.5660747 -0.4662925 0.11877546]]]; ov_res: [[[ 0.7861375 1.1679026 -0.18420725 0.53604907 -1.1486657 ] [ 0.8303238 -0.7811384 -0.6741238 0.62266785 1.0653988 ] [-1.5428915 0.07306448 -1.9460927 1.224403 -0.28614834] [-0.9599034 -0.3891961 0.6045563 0.0265373 0.98536205] [-0.61004514 -0.34080508 -1.229013 -0.964705 -1.4402506 ]] [[-1.3564178 -0.7476315 -0.03813539 1.2745162 -1.2824105 ] [-0.36156663 -1.3168281 -0.3539663 0.12014779 0.10189255] [-0.3676381 -0.76046073 -0.04761223 -0.49499562 0.09068259] [-0.30995688 0.6089147 0.7671484 -0.08982708 0.9169135 ] [-0.5350424 0.4428611 0.39636165 -1.5282314 -0.72592866]] [[-0.64243394 -0.0058052 0.6031924 -0.36318436 0.20351577] [-0.36761928 -0.58596456 0.419291 -0.25572756 0.28144455] [-0.30584818 0.45572293 0.5350534 0.7835783 -2.6371467 ] [ 0.05170733 1.0409776 -1.410952 1.821265 1.4761792 ] [-0.7463402 1.3682075 0.80644006 0.9069218 0.7034414 ]] [[-0.30074596 1.5395437 -1.3028071 -0.04519499 1.8561684 ] [-0.23530985 -0.03481362 -1.2279696 -0.2824473 -0.67909324] [-1.0791653 1.5822922 -0.3562304 0.4259885 1.262573 ] [ 0.9160601 1.2218876 -0.03159533 -2.2344618 -0.18146959] [ 0.65191156 -1.5089527 -0.5660747 -0.4662925 0.11877546]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5970.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.20260616 -0.21331148 -0.0950283 -0.11869086 0.4914394 ] [-0.07085714 -0.7840559 1.4272573 -1.0216106 -0.37401348] [-0.5770995 -0.84796584 1.5252137 -1.3088534 -1.0942403 ] [-1.4951775 -0.7115131 1.025097 0.15219636 -0.58410877] [-0.9873029 1.3836912 -0.6686427 -0.94577277 -1.2143065 ]] [[-0.49066237 -0.23548938 -1.0630007 -0.04006962 1.5256668 ] [-1.0831318 0.38728335 -0.6779131 -0.25062466 0.03850194] [ 1.159365 0.00678971 -0.77360904 -0.46750262 -0.7785377 ] [ 2.1282167 -0.5673481 0.78197855 -0.7171759 1.017378 ] [-2.138112 -1.5303578 -0.03898457 -0.02183291 0.97420174]] [[-1.1034937 -0.00235442 -2.1112578 0.7211271 -0.34274012] [ 0.1935709 -0.5572504 -0.25201842 0.30556154 0.49727857] [ 0.84722054 1.3039464 -0.7273509 -0.7300496 -0.663503 ] [-0.5485853 1.6601194 -0.04578996 -0.7362627 -0.13981166] [ 1.4024038 1.2401361 0.78468436 0.2694913 -0.23544008]] [[ 1.4025631 -0.613865 -1.0125972 -0.27832285 0.19587038] [ 0.3531537 0.8515975 0.8044118 0.81268823 0.24813487] [-1.5541584 -0.8956809 1.1370821 0.05170208 0.7377502 ] [-1.6390482 -1.9120165 -1.3270462 2.0518274 -1.5610931 ] [ 0.14057758 0.50606006 0.77595985 0.06837811 0.5992657 ]]]; ov_res: [[[ 0.20260616 -0.21331148 -0.0950283 -0.11869086 0.4914394 ] [-0.07085714 -0.7840559 1.4272573 -1.0216106 -0.37401348] [-0.5770995 -0.84796584 1.5252137 -1.3088534 -1.0942403 ] [-1.4951775 -0.7115131 1.025097 0.15219636 -0.58410877] [-0.9873029 1.3836912 -0.6686427 -0.94577277 -1.2143065 ]] [[-0.49066237 -0.23548938 -1.0630007 -0.04006962 1.5256668 ] [-1.0831318 0.38728335 -0.6779131 -0.25062466 0.03850194] [ 1.159365 0.00678971 -0.77360904 -0.46750262 -0.7785377 ] [ 2.1282167 -0.5673481 0.78197855 -0.7171759 1.017378 ] [-2.138112 -1.5303578 -0.03898457 -0.02183291 0.97420174]] [[-1.1034937 -0.00235442 -2.1112578 0.7211271 -0.34274012] [ 0.1935709 -0.5572504 -0.25201842 0.30556154 0.49727857] [ 0.84722054 1.3039464 -0.7273509 -0.7300496 -0.663503 ] [-0.5485853 1.6601194 -0.04578996 -0.7362627 -0.13981166] [ 1.4024038 1.2401361 0.78468436 0.2694913 -0.23544008]] [[ 1.4025631 -0.613865 -1.0125972 -0.27832285 0.19587038] [ 0.3531537 0.8515975 0.8044118 0.81268823 0.24813487] [-1.5541584 -0.8956809 1.1370821 0.05170208 0.7377502 ] [-1.6390482 -1.9120165 -1.3270462 2.0518274 -1.5610931 ] [ 0.14057758 0.50606006 0.77595985 0.06837811 0.5992657 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5972.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.38357323 -0.20967427 0.74899113 -0.8677405 0.15523191] [ 1.9126375 0.13251884 -1.9062802 1.7353916 -0.6250151 ] [ 1.010719 2.1123223 -0.40731078 -0.11003038 -0.9464605 ] [-0.8684959 2.1412642 0.35333994 0.47528479 -0.51597637]] [[ 2.690647 0.92314494 -1.3039601 0.91894245 0.04133201] [-0.36888275 0.9049037 -0.6929162 1.8770102 -0.2114246 ] [-0.36800545 -0.79536664 2.126887 0.03560094 1.2632914 ] [ 0.7492125 0.15687156 -1.99275 2.1383336 -1.3941554 ]] [[ 1.0640937 -0.44219282 0.6297008 -0.48439467 -0.24355279] [ 2.397074 0.2783324 -1.3509613 -0.44111502 -1.8286791 ] [ 0.6226033 -0.18804438 0.7930671 1.3767756 1.1578368 ] [-0.20229737 -1.7879245 -1.5771672 -1.7444844 -0.0917534 ]] [[-0.47198513 1.1626298 0.11928289 -0.06843435 -0.30471396] [ 0.6583473 0.01238989 1.3054677 0.5792086 0.2197272 ] [ 0.03860723 0.1948981 1.019301 -0.66564965 0.09350236] [ 0.15081012 -0.99986476 1.4348273 1.1471171 0.7135361 ]]]; ov_res: [[[-0.38357323 -0.20967427 0.74899113 -0.8677405 0.15523191] [ 1.9126375 0.13251884 -1.9062802 1.7353916 -0.6250151 ] [ 1.010719 2.1123223 -0.40731078 -0.11003038 -0.9464605 ] [-0.8684959 2.1412642 0.35333994 0.47528479 -0.51597637]] [[ 2.690647 0.92314494 -1.3039601 0.91894245 0.04133201] [-0.36888275 0.9049037 -0.6929162 1.8770102 -0.2114246 ] [-0.36800545 -0.79536664 2.126887 0.03560094 1.2632914 ] [ 0.7492125 0.15687156 -1.99275 2.1383336 -1.3941554 ]] [[ 1.0640937 -0.44219282 0.6297008 -0.48439467 -0.24355279] [ 2.397074 0.2783324 -1.3509613 -0.44111502 -1.8286791 ] [ 0.6226033 -0.18804438 0.7930671 1.3767756 1.1578368 ] [-0.20229737 -1.7879245 -1.5771672 -1.7444844 -0.0917534 ]] [[-0.47198513 1.1626298 0.11928289 -0.06843435 -0.30471396] [ 0.6583473 0.01238989 1.3054677 0.5792086 0.2197272 ] [ 0.03860723 0.1948981 1.019301 -0.66564965 0.09350236] [ 0.15081012 -0.99986476 1.4348273 1.1471171 0.7135361 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:-1 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5974.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=-1]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.4618716 -0.6449075 -1.8958504 1.3583529 0.14838855] [-0.7969663 -0.12484086 0.39820984 -1.5175849 0.84961253] [ 0.14341623 1.1510556 -0.8501704 -0.38309464 1.6598744 ] [-1.6783849 0.2655586 0.21896763 -0.04065166 0.2949725 ]] [[-0.80852 0.16296384 -0.9463308 -0.65179807 0.06992064] [-0.24134794 -0.26294494 0.1475911 1.7282896 -1.6075375 ] [-0.5302198 0.65622187 -1.1307169 -1.4282962 -0.53763384] [-0.00782592 -0.4856815 -1.1537092 -0.5370643 -0.2869131 ]] [[ 0.5413148 -0.99949455 0.09050497 1.6494256 -0.32690838] [-2.3119469 -0.25616702 0.14721052 -0.78662664 -0.7925818 ] [ 0.15157788 -0.662302 0.35615388 -0.28962892 -0.86770195] [ 0.88533276 -0.22421916 0.82905567 2.3501527 0.04432616]] [[ 1.517404 -0.29216865 2.0341327 1.1066929 -0.45612672] [ 0.699366 -1.1747328 1.9806432 0.2993074 1.4939219 ] [ 0.14662118 -0.02921615 -0.5081634 -0.18893246 0.37118828] [ 0.15088993 -2.0225325 0.27974042 -1.4470035 0.93970364]]]; ov_res: [[[ 1.4618716 -0.6449075 -1.8958504 1.3583529 0.14838855] [-0.7969663 -0.12484086 0.39820984 -1.5175849 0.84961253] [ 0.14341623 1.1510556 -0.8501704 -0.38309464 1.6598744 ] [-1.6783849 0.2655586 0.21896763 -0.04065166 0.2949725 ]] [[-0.80852 0.16296384 -0.9463308 -0.65179807 0.06992064] [-0.24134794 -0.26294494 0.1475911 1.7282896 -1.6075375 ] [-0.5302198 0.65622187 -1.1307169 -1.4282962 -0.53763384] [-0.00782592 -0.4856815 -1.1537092 -0.5370643 -0.2869131 ]] [[ 0.5413148 -0.99949455 0.09050497 1.6494256 -0.32690838] [-2.3119469 -0.25616702 0.14721052 -0.78662664 -0.7925818 ] [ 0.15157788 -0.662302 0.35615388 -0.28962892 -0.86770195] [ 0.88533276 -0.22421916 0.82905567 2.3501527 0.04432616]] [[ 1.517404 -0.29216865 2.0341327 1.1066929 -0.45612672] [ 0.699366 -1.1747328 1.9806432 0.2993074 1.4939219 ] [ 0.14662118 -0.02921615 -0.5081634 -0.18893246 0.37118828] [ 0.15088993 -2.0225325 0.27974042 -1.4470035 0.93970364]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5976.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.37260908 -1.4943616 1.4300877 -0.7906019 -1.4600525 ] [ 2.705029 0.9837463 0.29015604 1.1427509 0.11391425] [-0.10942852 0.08234327 1.7413774 0.14178844 0.9716021 ] [-0.5092014 0.93033415 -0.14186794 1.8255346 -0.5509991 ] [-0.737445 -0.69622636 -1.0792332 -0.39687577 -0.26405227]] [[-0.9422175 0.3979821 0.1012907 0.16384225 -0.34170708] [ 0.45812294 1.6800001 -0.34588265 1.4281894 0.46154407] [ 0.5509322 0.34018016 -0.7995924 -1.5877621 0.31837493] [ 1.7471744 0.49738342 0.59764785 0.41189766 0.80384356] [ 0.5438054 -0.23470895 -1.8296251 0.37190264 1.9965968 ]] [[ 0.5291941 -0.49117535 -0.95028716 -0.41086885 -0.25793612] [-0.01848761 0.68791574 -2.3615615 -0.2559837 1.0671245 ] [ 0.26350567 -0.67945254 -0.3698491 0.14815438 0.10357586] [-2.7699213 -0.08946104 0.36285773 1.000412 0.01709054] [-1.0137262 0.3496661 0.30586833 -0.12538733 1.056507 ]] [[ 0.08515951 -1.5655926 0.8282562 -1.7990505 0.00438265] [-0.08589237 1.3575554 -0.3012036 2.1318443 -0.3611078 ] [-0.27635416 -1.6198438 -0.44634226 1.9315419 -0.6629085 ] [ 0.3466566 1.3284532 -0.05212243 0.6255283 -0.20259798] [ 0.6069557 -1.9868921 0.27583534 -0.18689625 0.38352132]]]; ov_res: [[[ 0.37260908 -1.4943616 1.4300877 -0.7906019 -1.4600525 ] [ 2.705029 0.9837463 0.29015604 1.1427509 0.11391425] [-0.10942852 0.08234327 1.7413774 0.14178844 0.9716021 ] [-0.5092014 0.93033415 -0.14186794 1.8255346 -0.5509991 ] [-0.737445 -0.69622636 -1.0792332 -0.39687577 -0.26405227]] [[-0.9422175 0.3979821 0.1012907 0.16384225 -0.34170708] [ 0.45812294 1.6800001 -0.34588265 1.4281894 0.46154407] [ 0.5509322 0.34018016 -0.7995924 -1.5877621 0.31837493] [ 1.7471744 0.49738342 0.59764785 0.41189766 0.80384356] [ 0.5438054 -0.23470895 -1.8296251 0.37190264 1.9965968 ]] [[ 0.5291941 -0.49117535 -0.95028716 -0.41086885 -0.25793612] [-0.01848761 0.68791574 -2.3615615 -0.2559837 1.0671245 ] [ 0.26350567 -0.67945254 -0.3698491 0.14815438 0.10357586] [-2.7699213 -0.08946104 0.36285773 1.000412 0.01709054] [-1.0137262 0.3496661 0.30586833 -0.12538733 1.056507 ]] [[ 0.08515951 -1.5655926 0.8282562 -1.7990505 0.00438265] [-0.08589237 1.3575554 -0.3012036 2.1318443 -0.3611078 ] [-0.27635416 -1.6198438 -0.44634226 1.9315419 -0.6629085 ] [ 0.3466566 1.3284532 -0.05212243 0.6255283 -0.20259798] [ 0.6069557 -1.9868921 0.27583534 -0.18689625 0.38352132]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5978.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.26757243 -0.25852087 -1.0687741 -0.7090181 0.05653022] [ 1.1318058 0.08931836 1.8275868 -1.3593017 -1.1420606 ] [ 0.0975955 0.65546083 1.7384975 -0.26708513 0.02025575] [-1.6072822 -0.24808381 0.7783645 0.9234407 0.6827706 ]] [[ 1.8002237 0.85733086 1.5117344 1.6312181 0.42758912] [ 0.87607944 0.99856967 0.6382247 -0.3782947 0.46474954] [ 0.37555864 -0.6666283 1.0138131 1.4874347 0.7499398 ] [ 0.19109276 -0.5345968 0.10472512 0.02006262 0.19328125]] [[ 0.36985037 0.9864529 0.3293177 -1.5351576 -1.7631865 ] [ 0.51629394 -2.0458941 -0.49474117 0.45184112 1.7914288 ] [ 0.2804371 -0.9983926 1.3077205 -0.36742768 0.59179735] [-0.9743111 0.9813476 -1.2423108 -0.02357311 -1.4301943 ]] [[ 0.9061031 -1.3359709 -0.11281363 0.4263098 -0.75453556] [-0.24553587 1.0501949 0.09358677 -1.3060998 -1.1565498 ] [ 0.24826613 2.1477933 0.22886586 0.8822308 0.41383862] [-0.21023865 0.29965404 -0.28459305 -0.25589696 -0.7764757 ]]]; ov_res: [[[ 0.26757243 -0.25852087 -1.0687741 -0.7090181 0.05653022] [ 1.1318058 0.08931836 1.8275868 -1.3593017 -1.1420606 ] [ 0.0975955 0.65546083 1.7384975 -0.26708513 0.02025575] [-1.6072822 -0.24808381 0.7783645 0.9234407 0.6827706 ]] [[ 1.8002237 0.85733086 1.5117344 1.6312181 0.42758912] [ 0.87607944 0.99856967 0.6382247 -0.3782947 0.46474954] [ 0.37555864 -0.6666283 1.0138131 1.4874347 0.7499398 ] [ 0.19109276 -0.5345968 0.10472512 0.02006262 0.19328125]] [[ 0.36985037 0.9864529 0.3293177 -1.5351576 -1.7631865 ] [ 0.51629394 -2.0458941 -0.49474117 0.45184112 1.7914288 ] [ 0.2804371 -0.9983926 1.3077205 -0.36742768 0.59179735] [-0.9743111 0.9813476 -1.2423108 -0.02357311 -1.4301943 ]] [[ 0.9061031 -1.3359709 -0.11281363 0.4263098 -0.75453556] [-0.24553587 1.0501949 0.09358677 -1.3060998 -1.1565498 ] [ 0.24826613 2.1477933 0.22886586 0.8822308 0.41383862] [-0.21023865 0.29965404 -0.28459305 -0.25589696 -0.7764757 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5980.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.8670624 0.45682397 -0.6898193 -0.8464461 0.6456391 ] [ 1.7151198 0.31369936 -1.0030532 -0.6756987 -1.270041 ] [-0.25647452 -0.5097871 -0.22431858 0.2600841 1.4014366 ] [ 0.12235703 0.900228 0.1873335 -0.30806682 -1.4100612 ]] [[-0.38019377 0.36042303 -0.6550015 -2.0748646 -1.029035 ] [-0.29923183 0.8842875 0.53613394 -1.0372777 -0.64818394] [-0.9537342 -2.117414 -0.30082953 -0.9294071 1.2203352 ] [ 0.90408784 -0.9829983 -0.08746851 -1.1691941 -0.6537673 ]] [[ 0.12201808 1.150503 0.06289238 -1.2513 0.48238534] [-0.7759286 -0.1868751 0.31759316 -0.62024367 -0.90956056] [-0.40142897 1.0091348 1.6257535 -0.2844764 0.42802843] [ 0.6183475 -0.66235 -0.4427065 -0.97498643 0.64411217]] [[-0.96665716 -1.5608019 -1.0262572 1.0530436 0.35977423] [ 0.15513282 -0.9739495 0.82920396 -0.54379344 0.14029503] [ 1.6697319 -0.11778482 -0.3731567 2.0270374 -0.34990838] [-1.8494176 -0.73335373 -0.4847459 1.6484197 -0.2320948 ]]]; ov_res: [[[ 1.8670624 0.45682397 -0.6898193 -0.8464461 0.6456391 ] [ 1.7151198 0.31369936 -1.0030532 -0.6756987 -1.270041 ] [-0.25647452 -0.5097871 -0.22431858 0.2600841 1.4014366 ] [ 0.12235703 0.900228 0.1873335 -0.30806682 -1.4100612 ]] [[-0.38019377 0.36042303 -0.6550015 -2.0748646 -1.029035 ] [-0.29923183 0.8842875 0.53613394 -1.0372777 -0.64818394] [-0.9537342 -2.117414 -0.30082953 -0.9294071 1.2203352 ] [ 0.90408784 -0.9829983 -0.08746851 -1.1691941 -0.6537673 ]] [[ 0.12201808 1.150503 0.06289238 -1.2513 0.48238534] [-0.7759286 -0.1868751 0.31759316 -0.62024367 -0.90956056] [-0.40142897 1.0091348 1.6257535 -0.2844764 0.42802843] [ 0.6183475 -0.66235 -0.4427065 -0.97498643 0.64411217]] [[-0.96665716 -1.5608019 -1.0262572 1.0530436 0.35977423] [ 0.15513282 -0.9739495 0.82920396 -0.54379344 0.14029503] [ 1.6697319 -0.11778482 -0.3731567 2.0270374 -0.34990838] [-1.8494176 -0.73335373 -0.4847459 1.6484197 -0.2320948 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5982.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=0]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[-0.26482177 -2.8461478 -0.90579736 -0.02224959 -1.2729166 ] [-0.18937849 -0.33778182 0.13550824 0.25510666 -1.1656139 ] [ 0.8948668 0.02079965 1.1766523 -1.5772998 -0.34164456] [-2.1245399 0.13277824 -1.2484348 1.2398337 -1.0213348 ] [-0.9141222 1.2341499 -0.5400392 0.5181857 -0.50430256]] [[ 2.3839326 -1.0768787 1.0681026 -1.5416672 -1.2866956 ] [ 0.03612834 0.39290354 -0.9121178 -0.11302303 -0.49647814] [ 0.46944278 -0.31926018 -0.6130496 -0.55727 0.09123595] [ 0.5464037 -0.05607822 1.8199344 -1.806183 -0.8858692 ] [ 1.2194173 0.08698103 -0.524985 -1.4302459 -2.1958563 ]] [[-0.74190104 -0.91555494 -0.45142898 0.17838617 0.02809512] [ 0.73293364 -0.7661237 -0.2873683 -0.12303276 -0.0842155 ] [-0.1732924 -0.8771803 0.2503553 -0.07302169 -0.38987306] [-1.9990902 2.589851 1.8566309 -0.01081084 -0.21397416] [-0.86590976 0.700855 -0.8682225 2.4862633 0.4594704 ]] [[ 1.1301382 -1.1085199 0.30183628 0.59449303 2.3800433 ] [-0.18615538 0.41678664 0.16506125 0.08291271 -1.5515826 ] [ 0.76817906 0.72505593 0.86008877 0.49571797 -0.31461892] [-0.18032406 0.3705424 -2.0246608 -0.21034326 0.39360306] [ 0.00925331 0.3128932 0.89065975 0.7555296 0.22498567]]]; ov_res: [[[-0.26482177 -2.8461478 -0.90579736 -0.02224959 -1.2729166 ] [-0.18937849 -0.33778182 0.13550824 0.25510666 -1.1656139 ] [ 0.8948668 0.02079965 1.1766523 -1.5772998 -0.34164456] [-2.1245399 0.13277824 -1.2484348 1.2398337 -1.0213348 ] [-0.9141222 1.2341499 -0.5400392 0.5181857 -0.50430256]] [[ 2.3839326 -1.0768787 1.0681026 -1.5416672 -1.2866956 ] [ 0.03612834 0.39290354 -0.9121178 -0.11302303 -0.49647814] [ 0.46944278 -0.31926018 -0.6130496 -0.55727 0.09123595] [ 0.5464037 -0.05607822 1.8199344 -1.806183 -0.8858692 ] [ 1.2194173 0.08698103 -0.524985 -1.4302459 -2.1958563 ]] [[-0.74190104 -0.91555494 -0.45142898 0.17838617 0.02809512] [ 0.73293364 -0.7661237 -0.2873683 -0.12303276 -0.0842155 ] [-0.1732924 -0.8771803 0.2503553 -0.07302169 -0.38987306] [-1.9990902 2.589851 1.8566309 -0.01081084 -0.21397416] [-0.86590976 0.700855 -0.8682225 2.4862633 0.4594704 ]] [[ 1.1301382 -1.1085199 0.30183628 0.59449303 2.3800433 ] [-0.18615538 0.41678664 0.16506125 0.08291271 -1.5515826 ] [ 0.76817906 0.72505593 0.86008877 0.49571797 -0.31461892] [-0.18032406 0.3705424 -2.0246608 -0.21034326 0.39360306] [ 0.00925331 0.3128932 0.89065975 0.7555296 0.22498567]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5984.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.5536697 -1.9211376 -0.3058528 -0.37426656 0.730388 ] [-0.13304082 1.1663631 0.26697457 -0.80383193 -1.3067704 ] [-1.2927413 2.102896 1.0654237 0.52688444 1.0851346 ] [ 0.19239755 -1.5513082 1.5391327 0.52384776 -2.108364 ] [-1.6164845 0.41035202 -0.10068398 -0.8617179 -1.8149378 ]] [[ 0.27953812 -0.84310347 0.72272223 -0.25409767 0.20991328] [ 0.86586905 -0.15742333 1.8824551 0.3087523 -0.5554482 ] [ 1.039405 -0.0572846 -0.34046772 -0.28329998 0.18250608] [ 0.9532352 0.9985015 0.48167917 1.9023824 -1.2021948 ] [-0.40137583 0.5160991 -0.744547 -0.22608739 0.74586433]] [[-0.5552202 -0.7495041 0.16302893 -0.06147727 -1.7092307 ] [ 0.2396538 -2.3563333 -0.15807757 -0.02077333 -0.92927957] [ 0.8221337 -1.7284654 1.5089637 -1.2858872 -0.29431328] [-0.43140092 1.7483517 -0.0086504 -0.37430638 1.1858191 ] [ 0.6293202 -0.41869515 -1.0968101 -0.05607809 1.4040852 ]] [[-1.5062476 0.15087102 -0.42296946 0.19687782 -1.1239165 ] [ 0.7477348 -0.987852 -0.27076048 0.21406573 -0.24684812] [ 0.2952292 -1.502374 2.049523 1.3208166 1.0230368 ] [ 0.08535934 0.49920347 -0.24321051 -1.1429678 1.3602766 ] [-0.06783167 0.16587065 1.1722099 2.362386 0.52762234]]]; ov_res: [[[ 0.5536697 -1.9211376 -0.3058528 -0.37426656 0.730388 ] [-0.13304082 1.1663631 0.26697457 -0.80383193 -1.3067704 ] [-1.2927413 2.102896 1.0654237 0.52688444 1.0851346 ] [ 0.19239755 -1.5513082 1.5391327 0.52384776 -2.108364 ] [-1.6164845 0.41035202 -0.10068398 -0.8617179 -1.8149378 ]] [[ 0.27953812 -0.84310347 0.72272223 -0.25409767 0.20991328] [ 0.86586905 -0.15742333 1.8824551 0.3087523 -0.5554482 ] [ 1.039405 -0.0572846 -0.34046772 -0.28329998 0.18250608] [ 0.9532352 0.9985015 0.48167917 1.9023824 -1.2021948 ] [-0.40137583 0.5160991 -0.744547 -0.22608739 0.74586433]] [[-0.5552202 -0.7495041 0.16302893 -0.06147727 -1.7092307 ] [ 0.2396538 -2.3563333 -0.15807757 -0.02077333 -0.92927957] [ 0.8221337 -1.7284654 1.5089637 -1.2858872 -0.29431328] [-0.43140092 1.7483517 -0.0086504 -0.37430638 1.1858191 ] [ 0.6293202 -0.41869515 -1.0968101 -0.05607809 1.4040852 ]] [[-1.5062476 0.15087102 -0.42296946 0.19687782 -1.1239165 ] [ 0.7477348 -0.987852 -0.27076048 0.21406573 -0.24684812] [ 0.2952292 -1.502374 2.049523 1.3208166 1.0230368 ] [ 0.08535934 0.49920347 -0.24321051 -1.1429678 1.3602766 ] [-0.06783167 0.16587065 1.1722099 2.362386 0.52762234]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5986.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.23607521 -2.1867614 -0.3174372 -0.42868376 -0.89466923] [-0.3000663 0.6819491 -0.3763787 0.07823204 0.08270976] [ 0.8420062 0.05424736 -0.80123186 1.0247779 -0.5342197 ] [-0.19399643 -0.2074587 -0.080167 -0.39995992 1.3419467 ]] [[ 1.1789919 -0.34993094 -1.7172855 0.02465028 1.8121111 ] [ 2.0241253 0.30403158 -0.32358804 -1.009101 -0.01253523] [ 1.4177052 -0.6809187 1.0926253 1.1855452 0.48642167] [ 0.76858264 0.41325665 -0.9920637 -0.6517191 0.6376975 ]] [[ 0.31862843 -1.7774917 -0.27587548 0.5055197 -0.60076934] [-0.8490805 -0.5958037 1.324985 -1.9793195 0.55285823] [ 1.863198 -0.3090783 0.9406186 0.43175378 1.1394386 ] [ 1.3529949 -1.5042002 0.379159 1.1832359 2.3347538 ]] [[ 0.63587624 -0.50637 -0.74801815 -1.9488914 0.22630642] [ 0.3336277 0.93195176 0.36793974 -0.8146015 -0.38442948] [-0.07176496 0.9513393 -0.45802182 -1.0138161 -0.5649501 ] [ 0.40624213 2.0041907 -1.6371249 -0.7941404 2.2557063 ]]]; ov_res: [[[ 0.23607521 -2.1867614 -0.3174372 -0.42868376 -0.89466923] [-0.3000663 0.6819491 -0.3763787 0.07823204 0.08270976] [ 0.8420062 0.05424736 -0.80123186 1.0247779 -0.5342197 ] [-0.19399643 -0.2074587 -0.080167 -0.39995992 1.3419467 ]] [[ 1.1789919 -0.34993094 -1.7172855 0.02465028 1.8121111 ] [ 2.0241253 0.30403158 -0.32358804 -1.009101 -0.01253523] [ 1.4177052 -0.6809187 1.0926253 1.1855452 0.48642167] [ 0.76858264 0.41325665 -0.9920637 -0.6517191 0.6376975 ]] [[ 0.31862843 -1.7774917 -0.27587548 0.5055197 -0.60076934] [-0.8490805 -0.5958037 1.324985 -1.9793195 0.55285823] [ 1.863198 -0.3090783 0.9406186 0.43175378 1.1394386 ] [ 1.3529949 -1.5042002 0.379159 1.1832359 2.3347538 ]] [[ 0.63587624 -0.50637 -0.74801815 -1.9488914 0.22630642] [ 0.3336277 0.93195176 0.36793974 -0.8146015 -0.38442948] [-0.07176496 0.9513393 -0.45802182 -1.0138161 -0.5649501 ] [ 0.40624213 2.0041907 -1.6371249 -0.7941404 2.2557063 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:0 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5988.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=0]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.3987473 2.666273 0.7933753 -0.909914 -0.21588495] [ 0.72278434 0.49368823 -0.5247975 -1.2712488 -1.2529645 ] [-0.34724003 0.3001143 -2.0084484 -0.85237825 -0.7858303 ] [ 1.1256095 -0.68473125 0.58843374 0.55720997 -0.96157867]] [[ 1.0557162 -0.9982573 1.6544931 0.5711259 1.979297 ] [-0.8662384 1.9216546 -1.142766 -0.44513828 -0.66394395] [ 2.5067296 1.6139877 0.7195058 1.070844 -1.8715954 ] [-0.00567116 0.05235162 0.06794027 -0.01318772 0.5266428 ]] [[-2.4389997 0.3553291 0.4795503 -0.08610045 -0.06554014] [ 0.4712523 1.0808377 -0.36794627 0.8056324 -0.3427876 ] [ 1.9766484 0.5468124 -0.67752314 -0.26845866 -0.39561987] [-1.0300199 1.8898746 0.4750471 0.5774179 -0.721604 ]] [[ 0.43645564 0.2590531 0.20850442 1.516933 -0.4451783 ] [-0.27399677 0.5500089 0.18554486 0.14659318 0.64720076] [-1.0303078 0.72018397 1.6819186 -1.1297419 -0.1558467 ] [-0.29592264 0.817381 0.81330544 0.9917414 1.6295037 ]]]; ov_res: [[[ 0.3987473 2.666273 0.7933753 -0.909914 -0.21588495] [ 0.72278434 0.49368823 -0.5247975 -1.2712488 -1.2529645 ] [-0.34724003 0.3001143 -2.0084484 -0.85237825 -0.7858303 ] [ 1.1256095 -0.68473125 0.58843374 0.55720997 -0.96157867]] [[ 1.0557162 -0.9982573 1.6544931 0.5711259 1.979297 ] [-0.8662384 1.9216546 -1.142766 -0.44513828 -0.66394395] [ 2.5067296 1.6139877 0.7195058 1.070844 -1.8715954 ] [-0.00567116 0.05235162 0.06794027 -0.01318772 0.5266428 ]] [[-2.4389997 0.3553291 0.4795503 -0.08610045 -0.06554014] [ 0.4712523 1.0808377 -0.36794627 0.8056324 -0.3427876 ] [ 1.9766484 0.5468124 -0.67752314 -0.26845866 -0.39561987] [-1.0300199 1.8898746 0.4750471 0.5774179 -0.721604 ]] [[ 0.43645564 0.2590531 0.20850442 1.516933 -0.4451783 ] [-0.27399677 0.5500089 0.18554486 0.14659318 0.64720076] [-1.0303078 0.72018397 1.6819186 -1.1297419 -0.1558467 ] [-0.29592264 0.817381 0.81330544 0.9917414 1.6295037 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5990.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.06614361 -1.7871783 -1.2201718 1.6147348 -1.2821292 ] [ 0.24434112 0.4339945 0.64803576 -0.05028783 -1.4249799 ] [ 0.2105132 -0.4777646 0.05730173 0.5658164 0.70480144] [-1.5448009 0.63560617 0.37052467 -0.7370848 -0.8169605 ] [-0.94178474 1.9768909 -0.5232339 0.31929776 -0.84275913]] [[-0.2985812 -0.4500437 -0.43375468 -0.21859252 1.142111 ] [-0.55912334 -0.09038837 0.23705894 0.8027041 -0.6028607 ] [ 1.9233918 0.61833954 -0.17087577 -0.6608746 0.15620647] [-1.5604119 -2.9734516 0.81866264 0.43813935 -0.46881962] [ 0.3478074 -0.10461272 0.4743723 1.0268432 -0.7339133 ]] [[-0.72114855 -0.043174 -1.7117447 1.0996646 0.72405994] [ 0.04415667 -0.697497 -0.6171409 0.4228822 1.2255472 ] [-1.595704 0.51755846 3.0262902 1.2049123 -0.00375948] [-1.2269624 0.21119685 -0.7305148 1.5758255 0.7424498 ] [ 1.1852561 -0.20335987 1.3953148 2.1725647 0.2667565 ]] [[ 1.6060425 -1.1810622 0.5690951 0.58977574 1.5165156 ] [ 2.1691384 -0.50603455 -0.5929476 -0.98114765 -0.82791585] [ 0.10993912 0.11011061 -0.958224 -0.305968 -0.18322842] [ 1.640373 1.6309141 -0.7177158 -1.9214277 -0.9074878 ] [-0.6348658 -0.59980464 0.6807688 -0.09192532 0.25220314]]]; ov_res: [[[ 0.06614361 -1.7871783 -1.2201718 1.6147348 -1.2821292 ] [ 0.24434112 0.4339945 0.64803576 -0.05028783 -1.4249799 ] [ 0.2105132 -0.4777646 0.05730173 0.5658164 0.70480144] [-1.5448009 0.63560617 0.37052467 -0.7370848 -0.8169605 ] [-0.94178474 1.9768909 -0.5232339 0.31929776 -0.84275913]] [[-0.2985812 -0.4500437 -0.43375468 -0.21859252 1.142111 ] [-0.55912334 -0.09038837 0.23705894 0.8027041 -0.6028607 ] [ 1.9233918 0.61833954 -0.17087577 -0.6608746 0.15620647] [-1.5604119 -2.9734516 0.81866264 0.43813935 -0.46881962] [ 0.3478074 -0.10461272 0.4743723 1.0268432 -0.7339133 ]] [[-0.72114855 -0.043174 -1.7117447 1.0996646 0.72405994] [ 0.04415667 -0.697497 -0.6171409 0.4228822 1.2255472 ] [-1.595704 0.51755846 3.0262902 1.2049123 -0.00375948] [-1.2269624 0.21119685 -0.7305148 1.5758255 0.7424498 ] [ 1.1852561 -0.20335987 1.3953148 2.1725647 0.2667565 ]] [[ 1.6060425 -1.1810622 0.5690951 0.58977574 1.5165156 ] [ 2.1691384 -0.50603455 -0.5929476 -0.98114765 -0.82791585] [ 0.10993912 0.11011061 -0.958224 -0.305968 -0.18322842] [ 1.640373 1.6309141 -0.7177158 -1.9214277 -0.9074878 ] [-0.6348658 -0.59980464 0.6807688 -0.09192532 0.25220314]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5992.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.22200972 2.0274012 -0.11606082 0.5481418 -2.0835938 ] [ 1.5969677 -0.57930607 -0.32096064 0.24992254 -0.53647614] [ 0.16165383 0.86362195 -1.2017766 -1.6124549 1.103299 ] [ 1.3178369 1.2141767 -0.36945862 -0.98503673 -0.46021476]] [[ 2.1444979 0.8468188 0.30326757 -1.0849324 -0.32757804] [-0.7784845 0.36086065 0.44337597 -0.3059683 -0.61988693] [-0.50495297 0.9171928 1.3561953 -0.6156365 0.25718042] [-1.3119166 0.06895442 -0.5658546 -0.7099808 -0.7893813 ]] [[-1.5201133 -0.7852822 1.2621845 1.0804685 0.15035628] [ 0.47693446 -0.69819397 1.932763 0.491587 1.062575 ] [ 1.6617194 0.03400578 -1.0735916 -0.15416135 1.1934882 ] [ 1.2512362 -1.3825535 0.9677766 0.31235763 -0.3498662 ]] [[-1.2896141 -1.0098195 0.4469596 0.86862475 -0.3907727 ] [ 1.5176437 -0.22284557 0.93109787 -2.2121582 0.82611823] [ 0.5759651 0.17766409 -0.57505995 -0.06493708 0.5998285 ] [-1.6247606 0.31596243 0.7632543 -2.546138 1.298381 ]]]; ov_res: [[[-0.22200972 2.0274012 -0.11606082 0.5481418 -2.0835938 ] [ 1.5969677 -0.57930607 -0.32096064 0.24992254 -0.53647614] [ 0.16165383 0.86362195 -1.2017766 -1.6124549 1.103299 ] [ 1.3178369 1.2141767 -0.36945862 -0.98503673 -0.46021476]] [[ 2.1444979 0.8468188 0.30326757 -1.0849324 -0.32757804] [-0.7784845 0.36086065 0.44337597 -0.3059683 -0.61988693] [-0.50495297 0.9171928 1.3561953 -0.6156365 0.25718042] [-1.3119166 0.06895442 -0.5658546 -0.7099808 -0.7893813 ]] [[-1.5201133 -0.7852822 1.2621845 1.0804685 0.15035628] [ 0.47693446 -0.69819397 1.932763 0.491587 1.062575 ] [ 1.6617194 0.03400578 -1.0735916 -0.15416135 1.1934882 ] [ 1.2512362 -1.3825535 0.9677766 0.31235763 -0.3498662 ]] [[-1.2896141 -1.0098195 0.4469596 0.86862475 -0.3907727 ] [ 1.5176437 -0.22284557 0.93109787 -2.2121582 0.82611823] [ 0.5759651 0.17766409 -0.57505995 -0.06493708 0.5998285 ] [-1.6247606 0.31596243 0.7632543 -2.546138 1.298381 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5994.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.0122691 1.6213742 -0.11526246 1.7072552 -0.13771594] [-0.87425816 -1.2841973 0.920445 -0.508474 -1.2798809 ] [-1.0690305 0.4295285 0.04403275 0.05855419 -1.3155439 ] [ 0.81098497 0.3589691 -1.0580509 -0.4617901 -0.7839067 ]] [[ 0.68589073 -0.25962004 1.0794731 0.7188473 -0.6227861 ] [ 0.70808643 -0.7708688 -1.154662 -2.3388245 0.6757549 ] [ 0.4960836 -0.01588113 -1.472849 -1.2824447 -0.75860393] [ 0.7643551 -0.91167814 2.324813 0.67732584 1.7780069 ]] [[ 1.8609897 -0.17054826 0.16502154 -0.38524458 -1.106773 ] [-1.2565964 -0.36254686 1.8967409 -1.2192224 -0.558959 ] [-0.10192228 0.49773398 1.5181601 -0.6003924 1.2000823 ] [ 0.6590081 -1.5830476 -0.49266085 0.0560554 -1.1917439 ]] [[ 0.7723955 1.7647452 -0.83985096 -0.89885455 0.6290373 ] [ 0.33850786 1.0261238 -0.8263465 -1.0150611 -0.5183308 ] [ 0.51022315 0.45334038 -0.59301 -1.0152133 0.7603049 ] [-0.85179263 1.2243277 -0.6562984 -0.68594736 -0.40759638]]]; ov_res: [[[-0.0122691 1.6213742 -0.11526246 1.7072552 -0.13771594] [-0.87425816 -1.2841973 0.920445 -0.508474 -1.2798809 ] [-1.0690305 0.4295285 0.04403275 0.05855419 -1.3155439 ] [ 0.81098497 0.3589691 -1.0580509 -0.4617901 -0.7839067 ]] [[ 0.68589073 -0.25962004 1.0794731 0.7188473 -0.6227861 ] [ 0.70808643 -0.7708688 -1.154662 -2.3388245 0.6757549 ] [ 0.4960836 -0.01588113 -1.472849 -1.2824447 -0.75860393] [ 0.7643551 -0.91167814 2.324813 0.67732584 1.7780069 ]] [[ 1.8609897 -0.17054826 0.16502154 -0.38524458 -1.106773 ] [-1.2565964 -0.36254686 1.8967409 -1.2192224 -0.558959 ] [-0.10192228 0.49773398 1.5181601 -0.6003924 1.2000823 ] [ 0.6590081 -1.5830476 -0.49266085 0.0560554 -1.1917439 ]] [[ 0.7723955 1.7647452 -0.83985096 -0.89885455 0.6290373 ] [ 0.33850786 1.0261238 -0.8263465 -1.0150611 -0.5183308 ] [ 0.51022315 0.45334038 -0.59301 -1.0152133 0.7603049 ] [-0.85179263 1.2243277 -0.6562984 -0.68594736 -0.40759638]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5996.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.24600017e+00 -1.12913513e+00 -4.44398820e-01 5.57862073e-02 -1.50855839e+00] [-5.19525766e-01 1.25304544e+00 -2.83439231e+00 8.52525830e-01 1.08588505e+00] [ 1.44735110e+00 -1.71741724e+00 1.72458208e+00 1.43078506e-01 -5.73458374e-01] [ 1.46566904e+00 2.84797215e+00 -3.41956943e-01 -5.96626520e-01 5.98253250e-01] [-5.71647465e-01 1.34155476e+00 -1.67237747e+00 1.01838863e+00 5.36337912e-01]] [[-1.14200819e+00 -5.50597429e-01 1.31638861e+00 1.58251524e+00 9.95031357e-01] [ 6.29919052e-01 7.05828071e-01 1.87962636e-01 7.35158503e-01 7.47055471e-01] [ 1.19093262e-01 5.54658830e-01 -8.18171680e-01 -9.30903018e-01 3.89519691e-01] [ 8.80991101e-01 -8.14324021e-01 1.58345902e+00 1.70469120e-01 1.34689832e+00] [ 4.91064876e-01 2.39791244e-01 -9.66852546e-01 1.25715292e+00 -1.03067148e+00]] [[ 6.12432778e-01 -1.07059860e+00 -5.89126348e-01 1.77214599e+00 -1.06882000e+00] [ 8.75178158e-01 -6.38982132e-02 3.67635578e-01 -2.64657140e-01 -5.78103900e-01] [-1.22579777e+00 -5.76195478e-01 2.50240475e-01 -1.04777646e+00 6.81601882e-01] [-1.02514577e+00 6.62422776e-01 1.42917299e+00 6.37746990e-01 3.17260265e-01] [ 1.93579805e+00 1.45560026e-01 -8.18172097e-01 9.08498943e-01 -6.28449917e-01]] [[-7.15613484e-01 1.42867243e+00 4.50039566e-01 -4.86018211e-01 9.08346236e-01] [-9.29529220e-03 2.46962714e+00 -1.31443143e-01 1.95648146e+00 -5.02366066e-01] [-4.84847347e-04 -4.78501916e-01 -2.54395664e-01 1.40982449e-01 -4.86490607e-01] [ 2.54368931e-01 9.09464538e-01 4.78332490e-01 4.92774278e-01 4.55743849e-01] [ 6.72461271e-01 9.72799584e-02 -1.56156015e+00 1.09948866e-01 -9.43455903e-04]]]; ov_res: [[[ 1.24600017e+00 -1.12913513e+00 -4.44398820e-01 5.57862073e-02 -1.50855839e+00] [-5.19525766e-01 1.25304544e+00 -2.83439231e+00 8.52525830e-01 1.08588505e+00] [ 1.44735110e+00 -1.71741724e+00 1.72458208e+00 1.43078506e-01 -5.73458374e-01] [ 1.46566904e+00 2.84797215e+00 -3.41956943e-01 -5.96626520e-01 5.98253250e-01] [-5.71647465e-01 1.34155476e+00 -1.67237747e+00 1.01838863e+00 5.36337912e-01]] [[-1.14200819e+00 -5.50597429e-01 1.31638861e+00 1.58251524e+00 9.95031357e-01] [ 6.29919052e-01 7.05828071e-01 1.87962636e-01 7.35158503e-01 7.47055471e-01] [ 1.19093262e-01 5.54658830e-01 -8.18171680e-01 -9.30903018e-01 3.89519691e-01] [ 8.80991101e-01 -8.14324021e-01 1.58345902e+00 1.70469120e-01 1.34689832e+00] [ 4.91064876e-01 2.39791244e-01 -9.66852546e-01 1.25715292e+00 -1.03067148e+00]] [[ 6.12432778e-01 -1.07059860e+00 -5.89126348e-01 1.77214599e+00 -1.06882000e+00] [ 8.75178158e-01 -6.38982132e-02 3.67635578e-01 -2.64657140e-01 -5.78103900e-01] [-1.22579777e+00 -5.76195478e-01 2.50240475e-01 -1.04777646e+00 6.81601882e-01] [-1.02514577e+00 6.62422776e-01 1.42917299e+00 6.37746990e-01 3.17260265e-01] [ 1.93579805e+00 1.45560026e-01 -8.18172097e-01 9.08498943e-01 -6.28449917e-01]] [[-7.15613484e-01 1.42867243e+00 4.50039566e-01 -4.86018211e-01 9.08346236e-01] [-9.29529220e-03 2.46962714e+00 -1.31443143e-01 1.95648146e+00 -5.02366066e-01] [-4.84847347e-04 -4.78501916e-01 -2.54395664e-01 1.40982449e-01 -4.86490607e-01] [ 2.54368931e-01 9.09464538e-01 4.78332490e-01 4.92774278e-01 4.55743849e-01] [ 6.72461271e-01 9.72799584e-02 -1.56156015e+00 1.09948866e-01 -9.43455903e-04]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_5998.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=1]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[ 3.98225009e-01 -6.43714607e-01 7.16578186e-01 8.67773071e-02 -1.37806869e+00] [ 1.44601512e+00 -1.65515614e+00 2.06854510e+00 4.35186416e-01 1.62528265e+00] [-8.86773109e-01 -4.19992357e-01 -8.13358665e-01 -1.16019213e+00 9.11848426e-01] [-1.51239026e+00 4.74002868e-01 -1.25814772e+00 -1.52156520e+00 -1.58223704e-01] [ 1.71290135e+00 -9.34823304e-02 -7.23245025e-01 -4.99677002e-01 -1.66749299e-01]] [[ 1.02984861e-01 -1.04846649e-01 3.36613685e-01 3.05681020e-01 -1.47578216e+00] [-8.65320742e-01 1.13901544e+00 -3.64301443e-01 3.61881435e-01 9.86970365e-02] [-5.13221025e-01 8.68502259e-01 -1.28255165e+00 -1.19711936e-01 -1.18000817e+00] [-6.60300136e-01 2.37211004e-01 -1.00870442e+00 -9.31171894e-01 4.35362548e-01] [-4.53951240e-01 1.39747590e-01 -1.27205908e+00 -7.89262414e-01 9.03726995e-01]] [[ 4.87041265e-01 1.69227540e+00 -9.90473390e-01 1.88291597e+00 -7.98413277e-01] [ 2.81116962e-01 1.78785467e+00 3.77502114e-01 3.22850607e-02 -2.74813306e-02] [-6.51726092e-04 -1.05907249e+00 -7.15379417e-01 4.01560009e-01 -1.15253806e+00] [ 4.00841475e-01 6.49956822e-01 -1.53623378e+00 -7.10324585e-01 5.39596975e-01] [-1.05535734e+00 -3.61372590e-01 8.06453377e-02 2.83132315e-01 3.35034966e+00]] [[-1.18510735e+00 4.93619710e-01 -2.26046634e+00 1.10675561e+00 3.29112262e-01] [ 3.27969179e-03 -5.69061577e-01 -5.54820597e-01 -9.22732711e-01 -5.01418896e-02] [ 4.30496484e-01 4.23066974e-01 -3.31919998e-01 -8.20080578e-01 -2.54088640e-01] [-1.01600087e+00 1.15749216e+00 6.00662291e-01 -6.33334816e-01 -1.36281490e+00] [ 9.02823329e-01 -2.41968799e+00 2.13873133e-01 -6.71333224e-02 -5.90610981e-01]]]; ov_res: [[[ 3.98225009e-01 -6.43714607e-01 7.16578186e-01 8.67773071e-02 -1.37806869e+00] [ 1.44601512e+00 -1.65515614e+00 2.06854510e+00 4.35186416e-01 1.62528265e+00] [-8.86773109e-01 -4.19992357e-01 -8.13358665e-01 -1.16019213e+00 9.11848426e-01] [-1.51239026e+00 4.74002868e-01 -1.25814772e+00 -1.52156520e+00 -1.58223704e-01] [ 1.71290135e+00 -9.34823304e-02 -7.23245025e-01 -4.99677002e-01 -1.66749299e-01]] [[ 1.02984861e-01 -1.04846649e-01 3.36613685e-01 3.05681020e-01 -1.47578216e+00] [-8.65320742e-01 1.13901544e+00 -3.64301443e-01 3.61881435e-01 9.86970365e-02] [-5.13221025e-01 8.68502259e-01 -1.28255165e+00 -1.19711936e-01 -1.18000817e+00] [-6.60300136e-01 2.37211004e-01 -1.00870442e+00 -9.31171894e-01 4.35362548e-01] [-4.53951240e-01 1.39747590e-01 -1.27205908e+00 -7.89262414e-01 9.03726995e-01]] [[ 4.87041265e-01 1.69227540e+00 -9.90473390e-01 1.88291597e+00 -7.98413277e-01] [ 2.81116962e-01 1.78785467e+00 3.77502114e-01 3.22850607e-02 -2.74813306e-02] [-6.51726092e-04 -1.05907249e+00 -7.15379417e-01 4.01560009e-01 -1.15253806e+00] [ 4.00841475e-01 6.49956822e-01 -1.53623378e+00 -7.10324585e-01 5.39596975e-01] [-1.05535734e+00 -3.61372590e-01 8.06453377e-02 2.83132315e-01 3.35034966e+00]] [[-1.18510735e+00 4.93619710e-01 -2.26046634e+00 1.10675561e+00 3.29112262e-01] [ 3.27969179e-03 -5.69061577e-01 -5.54820597e-01 -9.22732711e-01 -5.01418896e-02] [ 4.30496484e-01 4.23066974e-01 -3.31919998e-01 -8.20080578e-01 -2.54088640e-01] [-1.01600087e+00 1.15749216e+00 6.00662291e-01 -6.33334816e-01 -1.36281490e+00] [ 9.02823329e-01 -2.41968799e+00 2.13873133e-01 -6.71333224e-02 -5.90610981e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6000.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.42643896 0.34084317 0.40730357 -0.7942235 0.27382326] [ 0.25523478 0.4449207 1.032603 0.8179019 -0.6786007 ] [-0.31174725 -1.0166236 0.1431025 0.48816296 -0.17238836] [ 0.65026045 -0.5231097 -0.63633376 0.28674573 -1.1342229 ]] [[-1.6950617 0.8170387 -2.2356102 0.22023255 0.9651795 ] [-1.3225332 -0.15955506 1.3477262 0.08506384 0.7709911 ] [ 1.7147882 -0.94954467 1.2040935 0.7896022 2.4472725 ] [ 0.97602487 -0.841484 -0.77924496 -1.3892992 -1.5706122 ]] [[ 0.9350647 -1.5994238 -0.2666497 -0.94910663 0.54336256] [ 0.71066576 -3.338947 0.83326876 -1.1322501 0.06585374] [-1.0431005 -0.06064462 -0.20480996 1.5160173 1.9789782 ] [-0.87345266 1.1413102 -0.21919166 0.09352978 -1.4473643 ]] [[ 0.17435558 -1.1563021 1.0404996 1.7190107 -0.40279806] [-0.35535458 0.6830991 0.76113474 -1.4393961 0.0396835 ] [-1.7300472 0.47479162 -1.1161209 0.00832147 0.51546484] [-0.649519 0.6871845 1.1407379 2.5610726 -0.10346609]]]; ov_res: [[[ 0.42643896 0.34084317 0.40730357 -0.7942235 0.27382326] [ 0.25523478 0.4449207 1.032603 0.8179019 -0.6786007 ] [-0.31174725 -1.0166236 0.1431025 0.48816296 -0.17238836] [ 0.65026045 -0.5231097 -0.63633376 0.28674573 -1.1342229 ]] [[-1.6950617 0.8170387 -2.2356102 0.22023255 0.9651795 ] [-1.3225332 -0.15955506 1.3477262 0.08506384 0.7709911 ] [ 1.7147882 -0.94954467 1.2040935 0.7896022 2.4472725 ] [ 0.97602487 -0.841484 -0.77924496 -1.3892992 -1.5706122 ]] [[ 0.9350647 -1.5994238 -0.2666497 -0.94910663 0.54336256] [ 0.71066576 -3.338947 0.83326876 -1.1322501 0.06585374] [-1.0431005 -0.06064462 -0.20480996 1.5160173 1.9789782 ] [-0.87345266 1.1413102 -0.21919166 0.09352978 -1.4473643 ]] [[ 0.17435558 -1.1563021 1.0404996 1.7190107 -0.40279806] [-0.35535458 0.6830991 0.76113474 -1.4393961 0.0396835 ] [-1.7300472 0.47479162 -1.1161209 0.00832147 0.51546484] [-0.649519 0.6871845 1.1407379 2.5610726 -0.10346609]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:1 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6002.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=1]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.3116261 0.23026441 -1.1056548 0.1493194 2.5190444 ] [-0.17922048 -0.11955347 -0.46102807 0.72377795 -1.264345 ] [-1.2944925 1.3823646 1.2977184 0.39759836 -0.8260875 ] [-0.48955968 -0.84629655 -0.10170478 -0.07079253 -0.10897864]] [[-0.7536034 -0.88547105 0.07847431 -1.3160524 1.4862989 ] [ 1.6752806 -0.8844306 -0.08943977 0.01236664 -1.2081395 ] [ 0.21659552 0.724062 0.10483 -1.6617329 -0.4088094 ] [-1.92819 -1.3002913 0.5993 0.03529197 0.85671663]] [[-0.02729007 0.16558893 -0.68539137 -1.0858063 1.602053 ] [ 1.6318427 -0.6438172 -2.2583354 -0.9633533 0.00380024] [ 0.33432135 -0.41908282 -2.050437 -1.9132805 1.8115722 ] [-1.2683839 -0.48290172 0.2055821 0.8614076 0.10781211]] [[-0.07571991 -1.1201679 1.9378066 -0.34221148 1.8488022 ] [ 0.656013 0.80912286 2.8909233 -1.1546658 0.06250606] [ 1.5955434 -0.21203558 0.62469286 2.1015825 1.3856633 ] [-0.01910373 -0.30343103 0.6477095 0.770515 1.5771984 ]]]; ov_res: [[[-0.3116261 0.23026441 -1.1056548 0.1493194 2.5190444 ] [-0.17922048 -0.11955347 -0.46102807 0.72377795 -1.264345 ] [-1.2944925 1.3823646 1.2977184 0.39759836 -0.8260875 ] [-0.48955968 -0.84629655 -0.10170478 -0.07079253 -0.10897864]] [[-0.7536034 -0.88547105 0.07847431 -1.3160524 1.4862989 ] [ 1.6752806 -0.8844306 -0.08943977 0.01236664 -1.2081395 ] [ 0.21659552 0.724062 0.10483 -1.6617329 -0.4088094 ] [-1.92819 -1.3002913 0.5993 0.03529197 0.85671663]] [[-0.02729007 0.16558893 -0.68539137 -1.0858063 1.602053 ] [ 1.6318427 -0.6438172 -2.2583354 -0.9633533 0.00380024] [ 0.33432135 -0.41908282 -2.050437 -1.9132805 1.8115722 ] [-1.2683839 -0.48290172 0.2055821 0.8614076 0.10781211]] [[-0.07571991 -1.1201679 1.9378066 -0.34221148 1.8488022 ] [ 0.656013 0.80912286 2.8909233 -1.1546658 0.06250606] [ 1.5955434 -0.21203558 0.62469286 2.1015825 1.3856633 ] [-0.01910373 -0.30343103 0.6477095 0.770515 1.5771984 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6004.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.58147836 0.69432825 1.7725021 -1.9559704 0.11361212] [-0.34378764 -1.7154814 0.5416548 0.45051917 -0.0069922 ] [ 3.0452309 -1.0452356 2.1140695 -0.06552833 0.35838726] [-0.41889414 0.21179342 0.7320743 -0.506315 0.7124226 ] [ 0.07397426 0.26898953 0.7239314 -0.8493791 1.0842085 ]] [[-1.0338069 0.20162308 0.49537912 2.324703 1.6992261 ] [-1.1511024 0.53377825 1.3715794 -0.5661287 0.3712119 ] [-0.399295 -0.84051764 0.12369309 1.5748891 -0.3334653 ] [-0.18054777 1.0136511 0.7155704 -0.38487664 -0.34896672] [-0.3136401 -2.0404568 0.18512493 -0.4222343 1.219774 ]] [[ 0.6405241 0.04219116 -0.50144994 0.4686548 1.8085514 ] [ 1.6575948 0.58414114 0.9101266 -1.162284 -0.89566606] [-0.5219455 -0.03898641 -0.32468954 1.5654423 -1.821206 ] [ 0.10556953 -1.2728617 0.969047 1.0980095 -0.5590556 ] [-2.0449696 0.8009283 -1.7928891 0.5246146 -1.4115292 ]] [[-1.068241 -0.08192053 2.2330313 1.0070575 2.2473505 ] [ 1.8079331 -1.9894226 -0.03588582 2.7857213 -0.5984665 ] [-2.030379 -1.037156 0.39232084 0.6453961 0.00688486] [ 2.2383718 -0.27384695 0.07686411 0.32860476 -1.4018482 ] [ 0.03032608 -0.5351881 -0.33570457 -1.3888581 1.333017 ]]]; ov_res: [[[ 0.58147836 0.69432825 1.7725021 -1.9559704 0.11361212] [-0.34378764 -1.7154814 0.5416548 0.45051917 -0.0069922 ] [ 3.0452309 -1.0452356 2.1140695 -0.06552833 0.35838726] [-0.41889414 0.21179342 0.7320743 -0.506315 0.7124226 ] [ 0.07397426 0.26898953 0.7239314 -0.8493791 1.0842085 ]] [[-1.0338069 0.20162308 0.49537912 2.324703 1.6992261 ] [-1.1511024 0.53377825 1.3715794 -0.5661287 0.3712119 ] [-0.399295 -0.84051764 0.12369309 1.5748891 -0.3334653 ] [-0.18054777 1.0136511 0.7155704 -0.38487664 -0.34896672] [-0.3136401 -2.0404568 0.18512493 -0.4222343 1.219774 ]] [[ 0.6405241 0.04219116 -0.50144994 0.4686548 1.8085514 ] [ 1.6575948 0.58414114 0.9101266 -1.162284 -0.89566606] [-0.5219455 -0.03898641 -0.32468954 1.5654423 -1.821206 ] [ 0.10556953 -1.2728617 0.969047 1.0980095 -0.5590556 ] [-2.0449696 0.8009283 -1.7928891 0.5246146 -1.4115292 ]] [[-1.068241 -0.08192053 2.2330313 1.0070575 2.2473505 ] [ 1.8079331 -1.9894226 -0.03588582 2.7857213 -0.5984665 ] [-2.030379 -1.037156 0.39232084 0.6453961 0.00688486] [ 2.2383718 -0.27384695 0.07686411 0.32860476 -1.4018482 ] [ 0.03032608 -0.5351881 -0.33570457 -1.3888581 1.333017 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6006.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.5343687 -0.03753744 -2.285972 -0.28782663 -0.05242535] [ 0.5468473 -0.1308992 -0.4272034 1.3266808 -0.5838069 ] [-0.5214371 1.0794346 -0.43428645 -0.27471378 -0.8377112 ] [ 0.6882046 0.8861913 1.0494047 -1.6126012 -0.45486894]] [[ 0.8081132 -0.72971624 -0.21465473 0.23526275 -0.14922701] [ 1.6646938 0.2616565 1.2231083 0.08000409 1.1711911 ] [ 1.2756109 -0.94323844 0.18932348 0.1463075 2.4202256 ] [-0.10081527 1.8868169 0.25445008 -1.9195242 -0.09211775]] [[ 0.24555868 -0.62323403 -1.8001689 -0.62574995 -0.5458385 ] [-1.4577554 0.93825746 0.62824917 -0.46101883 -1.6641523 ] [ 1.3670021 0.16715732 1.6438446 0.22641946 -1.4068938 ] [ 0.9203711 -0.13141057 0.02530233 -0.6420431 0.9571447 ]] [[-0.5619756 0.22902013 -0.9973403 -0.73071975 1.1227736 ] [ 0.1246727 0.8669057 0.82955575 1.1675082 -1.098383 ] [-0.3100932 -1.7747099 -1.0024688 0.09894485 0.5233443 ] [ 2.0248473 0.31650895 0.0546621 -1.141511 -0.18877938]]]; ov_res: [[[-0.5343687 -0.03753744 -2.285972 -0.28782663 -0.05242535] [ 0.5468473 -0.1308992 -0.4272034 1.3266808 -0.5838069 ] [-0.5214371 1.0794346 -0.43428645 -0.27471378 -0.8377112 ] [ 0.6882046 0.8861913 1.0494047 -1.6126012 -0.45486894]] [[ 0.8081132 -0.72971624 -0.21465473 0.23526275 -0.14922701] [ 1.6646938 0.2616565 1.2231083 0.08000409 1.1711911 ] [ 1.2756109 -0.94323844 0.18932348 0.1463075 2.4202256 ] [-0.10081527 1.8868169 0.25445008 -1.9195242 -0.09211775]] [[ 0.24555868 -0.62323403 -1.8001689 -0.62574995 -0.5458385 ] [-1.4577554 0.93825746 0.62824917 -0.46101883 -1.6641523 ] [ 1.3670021 0.16715732 1.6438446 0.22641946 -1.4068938 ] [ 0.9203711 -0.13141057 0.02530233 -0.6420431 0.9571447 ]] [[-0.5619756 0.22902013 -0.9973403 -0.73071975 1.1227736 ] [ 0.1246727 0.8669057 0.82955575 1.1675082 -1.098383 ] [-0.3100932 -1.7747099 -1.0024688 0.09894485 0.5233443 ] [ 2.0248473 0.31650895 0.0546621 -1.141511 -0.18877938]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6008.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 3.5000868 -0.46460953 -1.1370292 -0.79126835 0.13577047] [ 1.2189481 -0.29663894 -0.42649 -0.05301045 0.6612272 ] [-1.2919286 0.5780693 1.2064236 -0.08574008 -0.7953061 ] [ 0.97164464 -1.5275557 -0.31273717 -0.22576216 0.3931966 ]] [[-0.12778816 0.7406759 0.6128514 -2.288538 -0.42482162] [-2.4924707 0.91667104 0.5655262 0.86064744 -0.413584 ] [ 1.0518334 -0.48780823 -1.0867183 -0.89739925 -1.2274419 ] [-0.94020134 -0.5450877 -0.70609325 1.3088715 1.8538212 ]] [[ 0.24778524 0.5841991 0.6860641 -0.51086557 -2.0480864 ] [-1.2951808 -0.24543624 -0.86737686 0.44405204 -0.04537335] [ 0.8399796 0.68979543 0.7797217 -0.42416856 0.0247217 ] [ 1.6106491 -0.6015067 -1.3407948 -1.3856871 -0.38084316]] [[ 3.9677904 -1.2181106 -0.07468481 0.71018434 0.73743796] [ 0.576248 1.360818 2.6303132 0.9616942 -0.71592754] [ 1.3374712 0.902 0.98021245 -1.2785448 -0.2191622 ] [-0.47966975 -0.16932765 0.80020744 -0.37755018 1.0939261 ]]]; ov_res: [[[ 3.5000868 -0.46460953 -1.1370292 -0.79126835 0.13577047] [ 1.2189481 -0.29663894 -0.42649 -0.05301045 0.6612272 ] [-1.2919286 0.5780693 1.2064236 -0.08574008 -0.7953061 ] [ 0.97164464 -1.5275557 -0.31273717 -0.22576216 0.3931966 ]] [[-0.12778816 0.7406759 0.6128514 -2.288538 -0.42482162] [-2.4924707 0.91667104 0.5655262 0.86064744 -0.413584 ] [ 1.0518334 -0.48780823 -1.0867183 -0.89739925 -1.2274419 ] [-0.94020134 -0.5450877 -0.70609325 1.3088715 1.8538212 ]] [[ 0.24778524 0.5841991 0.6860641 -0.51086557 -2.0480864 ] [-1.2951808 -0.24543624 -0.86737686 0.44405204 -0.04537335] [ 0.8399796 0.68979543 0.7797217 -0.42416856 0.0247217 ] [ 1.6106491 -0.6015067 -1.3407948 -1.3856871 -0.38084316]] [[ 3.9677904 -1.2181106 -0.07468481 0.71018434 0.73743796] [ 0.576248 1.360818 2.6303132 0.9616942 -0.71592754] [ 1.3374712 0.902 0.98021245 -1.2785448 -0.2191622 ] [-0.47966975 -0.16932765 0.80020744 -0.37755018 1.0939261 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6010.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 4.6310690e-01 -8.8372491e-02 -1.4456107e-01 7.0870161e-01 -1.4560131e+00] [-4.1907388e-01 3.6276217e+00 2.1304796e+00 -1.1874267e-01 -4.9260747e-01] [-8.7760979e-01 -3.5831392e-01 5.0377142e-01 -6.6517431e-01 -4.2700976e-01] [ 1.8794765e+00 6.3910663e-01 2.0298205e-01 -4.1154483e-01 -5.5486768e-01] [-1.2445946e+00 -1.8696623e-01 -3.2276016e-01 -1.3912935e+00 5.8395493e-01]] [[-4.5254567e-01 6.8397564e-01 -7.3357958e-01 -2.4123554e-01 2.0989077e+00] [-3.2108146e-01 4.9956959e-02 2.8453222e-01 6.9743007e-01 9.0843761e-01] [ 1.2705007e-02 -4.7821659e-01 -2.1184436e-03 5.2623689e-01 2.1565063e-01] [-6.4152020e-01 -6.7660443e-02 -1.1453865e+00 -3.3700949e-01 1.6231599e-01] [ 1.0282153e+00 -6.6653085e-01 2.5757694e-01 -1.3065001e+00 3.9609772e-01]] [[-2.1255113e-01 -2.9743413e-02 1.4045012e-01 -3.8183913e-01 2.5826761e-01] [-8.6294186e-01 -5.2329934e-01 1.3447822e+00 -1.6463279e+00 6.7742354e-01] [-1.3183749e+00 7.3676711e-01 4.7392482e-01 1.2700880e+00 -1.0523046e+00] [-1.0394599e-01 6.4758432e-01 -1.4038751e+00 1.4512326e+00 6.1025317e-03] [ 4.8173651e-01 9.2610204e-01 1.4245994e+00 -4.1358662e-01 1.4428723e-01]] [[-3.3863482e-01 -1.1856232e+00 5.0489295e-01 -1.0872023e-01 6.5242229e-03] [-7.5725991e-01 -2.4410684e-01 -3.8306016e-01 -6.8232888e-01 5.1508907e-02] [ 3.0638483e-01 -1.1800343e+00 -4.2486897e-01 1.6096135e+00 9.2284404e-02] [ 4.1369706e-01 -5.2972299e-01 -4.3229708e-01 5.3760745e-02 1.5551696e+00] [-2.4221203e+00 5.5481195e-01 -8.2446605e-01 6.9641578e-01 -4.4379592e-01]]]; ov_res: [[[ 4.6310690e-01 -8.8372491e-02 -1.4456107e-01 7.0870161e-01 -1.4560131e+00] [-4.1907388e-01 3.6276217e+00 2.1304796e+00 -1.1874267e-01 -4.9260747e-01] [-8.7760979e-01 -3.5831392e-01 5.0377142e-01 -6.6517431e-01 -4.2700976e-01] [ 1.8794765e+00 6.3910663e-01 2.0298205e-01 -4.1154483e-01 -5.5486768e-01] [-1.2445946e+00 -1.8696623e-01 -3.2276016e-01 -1.3912935e+00 5.8395493e-01]] [[-4.5254567e-01 6.8397564e-01 -7.3357958e-01 -2.4123554e-01 2.0989077e+00] [-3.2108146e-01 4.9956959e-02 2.8453222e-01 6.9743007e-01 9.0843761e-01] [ 1.2705007e-02 -4.7821659e-01 -2.1184436e-03 5.2623689e-01 2.1565063e-01] [-6.4152020e-01 -6.7660443e-02 -1.1453865e+00 -3.3700949e-01 1.6231599e-01] [ 1.0282153e+00 -6.6653085e-01 2.5757694e-01 -1.3065001e+00 3.9609772e-01]] [[-2.1255113e-01 -2.9743413e-02 1.4045012e-01 -3.8183913e-01 2.5826761e-01] [-8.6294186e-01 -5.2329934e-01 1.3447822e+00 -1.6463279e+00 6.7742354e-01] [-1.3183749e+00 7.3676711e-01 4.7392482e-01 1.2700880e+00 -1.0523046e+00] [-1.0394599e-01 6.4758432e-01 -1.4038751e+00 1.4512326e+00 6.1025317e-03] [ 4.8173651e-01 9.2610204e-01 1.4245994e+00 -4.1358662e-01 1.4428723e-01]] [[-3.3863482e-01 -1.1856232e+00 5.0489295e-01 -1.0872023e-01 6.5242229e-03] [-7.5725991e-01 -2.4410684e-01 -3.8306016e-01 -6.8232888e-01 5.1508907e-02] [ 3.0638483e-01 -1.1800343e+00 -4.2486897e-01 1.6096135e+00 9.2284404e-02] [ 4.1369706e-01 -5.2972299e-01 -4.3229708e-01 5.3760745e-02 1.5551696e+00] [-2.4221203e+00 5.5481195e-01 -8.2446605e-01 6.9641578e-01 -4.4379592e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6012.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.10931296 2.1712246 -0.83822954 -0.23851423 0.84747213] [ 1.4057885 -2.658831 -0.59401405 -0.40590498 -0.03756023] [-0.06271744 0.687843 0.88779014 1.4424537 -0.00294803] [ 0.3119146 0.55121773 0.6560949 -1.2508135 -0.63267446] [ 1.5336412 -0.7696276 -2.2383878 1.0440391 1.6214964 ]] [[ 0.94145155 1.4304278 0.25043693 -0.59303945 -0.09203573] [ 1.1548002 0.21684535 0.3810191 0.62125504 0.195071 ] [ 1.4422474 -0.28522202 -1.0037539 -0.6126064 -0.8705533 ] [-0.2556784 0.963629 -1.0219333 0.03796405 -0.7546551 ] [-0.71876466 1.5513548 0.1475359 0.81705695 -1.8198011 ]] [[-0.933389 0.5755512 -0.7364589 0.8717519 0.40728503] [ 0.73162377 2.0155087 -0.35672694 -0.48754933 -0.6380389 ] [ 0.02967194 -0.948867 -0.0746168 1.3829422 0.3201006 ] [ 0.8127025 0.04280936 0.04575001 -0.23829225 -0.0503424 ] [ 0.35989612 0.91587275 1.0347707 -1.0219339 0.07817616]] [[ 1.3474584 1.2896531 1.3190233 0.64345115 -1.2938179 ] [-0.26706222 1.025427 0.06537974 -1.053746 0.02312073] [-1.0549049 -1.7710027 0.17750427 -1.2670722 0.17683542] [-0.32154837 0.5913123 0.8897144 1.266352 0.42461175] [-0.05359853 -0.44485962 -2.6144621 0.63460946 -1.2153474 ]]]; ov_res: [[[-0.10931296 2.1712246 -0.83822954 -0.23851423 0.84747213] [ 1.4057885 -2.658831 -0.59401405 -0.40590498 -0.03756023] [-0.06271744 0.687843 0.88779014 1.4424537 -0.00294803] [ 0.3119146 0.55121773 0.6560949 -1.2508135 -0.63267446] [ 1.5336412 -0.7696276 -2.2383878 1.0440391 1.6214964 ]] [[ 0.94145155 1.4304278 0.25043693 -0.59303945 -0.09203573] [ 1.1548002 0.21684535 0.3810191 0.62125504 0.195071 ] [ 1.4422474 -0.28522202 -1.0037539 -0.6126064 -0.8705533 ] [-0.2556784 0.963629 -1.0219333 0.03796405 -0.7546551 ] [-0.71876466 1.5513548 0.1475359 0.81705695 -1.8198011 ]] [[-0.933389 0.5755512 -0.7364589 0.8717519 0.40728503] [ 0.73162377 2.0155087 -0.35672694 -0.48754933 -0.6380389 ] [ 0.02967194 -0.948867 -0.0746168 1.3829422 0.3201006 ] [ 0.8127025 0.04280936 0.04575001 -0.23829225 -0.0503424 ] [ 0.35989612 0.91587275 1.0347707 -1.0219339 0.07817616]] [[ 1.3474584 1.2896531 1.3190233 0.64345115 -1.2938179 ] [-0.26706222 1.025427 0.06537974 -1.053746 0.02312073] [-1.0549049 -1.7710027 0.17750427 -1.2670722 0.17683542] [-0.32154837 0.5913123 0.8897144 1.266352 0.42461175] [-0.05359853 -0.44485962 -2.6144621 0.63460946 -1.2153474 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6014.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=2]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[-0.44082385 0.77788067 -0.6921946 1.0000833 -0.7184548 ] [-0.6887741 -0.39776808 -1.608246 -0.40919968 -2.0833685 ] [-0.01745323 0.67405975 0.04907865 0.19807912 1.3585104 ] [ 0.59027576 0.0801264 0.10497694 0.11279131 0.9817909 ]] [[-0.0958619 0.28073496 -0.7964181 0.11825667 1.7339503 ] [ 0.5433432 -1.7842636 -2.3607514 1.5911928 0.5766132 ] [-0.82522535 0.5061929 0.56355464 1.3124181 1.8769256 ] [ 0.71593434 -0.23925774 0.04956186 -0.37811545 -0.8605833 ]] [[-0.81875616 -0.36469224 -0.6646208 1.2864168 3.1003458 ] [ 2.3050048 2.3347366 0.08521548 0.08825946 0.5980707 ] [-0.51314706 1.4234729 0.13921735 -0.03308491 -0.7654859 ] [-1.6293597 0.16120705 0.98983985 0.62180376 -1.8871872 ]] [[-3.0269942 -0.2715124 0.35867018 -0.3842019 2.0821257 ] [ 1.7334095 0.05349414 1.3405716 0.7439895 0.9425425 ] [-1.685242 0.44737193 2.1858277 -0.13899943 0.1486307 ] [-0.7446968 0.44879645 0.22879067 -0.13099726 1.3515104 ]]]; ov_res: [[[-0.44082385 0.77788067 -0.6921946 1.0000833 -0.7184548 ] [-0.6887741 -0.39776808 -1.608246 -0.40919968 -2.0833685 ] [-0.01745323 0.67405975 0.04907865 0.19807912 1.3585104 ] [ 0.59027576 0.0801264 0.10497694 0.11279131 0.9817909 ]] [[-0.0958619 0.28073496 -0.7964181 0.11825667 1.7339503 ] [ 0.5433432 -1.7842636 -2.3607514 1.5911928 0.5766132 ] [-0.82522535 0.5061929 0.56355464 1.3124181 1.8769256 ] [ 0.71593434 -0.23925774 0.04956186 -0.37811545 -0.8605833 ]] [[-0.81875616 -0.36469224 -0.6646208 1.2864168 3.1003458 ] [ 2.3050048 2.3347366 0.08521548 0.08825946 0.5980707 ] [-0.51314706 1.4234729 0.13921735 -0.03308491 -0.7654859 ] [-1.6293597 0.16120705 0.98983985 0.62180376 -1.8871872 ]] [[-3.0269942 -0.2715124 0.35867018 -0.3842019 2.0821257 ] [ 1.7334095 0.05349414 1.3405716 0.7439895 0.9425425 ] [-1.685242 0.44737193 2.1858277 -0.13899943 0.1486307 ] [-0.7446968 0.44879645 0.22879067 -0.13099726 1.3515104 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:2 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6016.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=2]() %self.dim : int = prim::Constant[value=3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.02642429 1.9007943 -0.01012261 0.5356754 -0.93301636] [-1.170424 -0.27289537 -1.5883464 0.5512783 -0.11665448] [-0.2214237 0.0992198 -0.29867506 -1.1139765 1.4300845 ] [-0.49178955 -0.26945046 0.13103135 1.0919763 -0.8975941 ]] [[ 0.06656933 -0.80216783 -1.0574628 0.67298996 0.22179428] [-0.67979515 -0.3910781 -2.4102848 -1.3620266 1.2818421 ] [-1.859177 -0.09320013 -0.9400894 -1.2855988 -0.09514392] [-0.70686376 0.71974164 -0.6346785 -0.40692914 0.7386935 ]] [[ 0.23093215 -0.2897377 0.516838 0.46998167 -1.0580311 ] [ 0.14121632 0.9783829 -0.38876787 -0.06491402 -0.8868077 ] [-0.42902413 -2.1165667 -0.2971373 0.75808024 -1.277653 ] [-1.0542217 0.34972996 -0.25198916 1.0285788 -0.23012862]] [[ 1.703294 1.8450713 0.44155374 0.33641526 1.447583 ] [-0.6091778 1.6979103 -0.15231194 0.14691949 -0.5546153 ] [ 0.34255382 -1.0725666 -0.4062582 -0.13929021 0.7856511 ] [-0.53756183 0.23672818 0.22649287 -2.224544 0.27439642]]]; ov_res: [[[-0.02642429 1.9007943 -0.01012261 0.5356754 -0.93301636] [-1.170424 -0.27289537 -1.5883464 0.5512783 -0.11665448] [-0.2214237 0.0992198 -0.29867506 -1.1139765 1.4300845 ] [-0.49178955 -0.26945046 0.13103135 1.0919763 -0.8975941 ]] [[ 0.06656933 -0.80216783 -1.0574628 0.67298996 0.22179428] [-0.67979515 -0.3910781 -2.4102848 -1.3620266 1.2818421 ] [-1.859177 -0.09320013 -0.9400894 -1.2855988 -0.09514392] [-0.70686376 0.71974164 -0.6346785 -0.40692914 0.7386935 ]] [[ 0.23093215 -0.2897377 0.516838 0.46998167 -1.0580311 ] [ 0.14121632 0.9783829 -0.38876787 -0.06491402 -0.8868077 ] [-0.42902413 -2.1165667 -0.2971373 0.75808024 -1.277653 ] [-1.0542217 0.34972996 -0.25198916 1.0285788 -0.23012862]] [[ 1.703294 1.8450713 0.44155374 0.33641526 1.447583 ] [-0.6091778 1.6979103 -0.15231194 0.14691949 -0.5546153 ] [ 0.34255382 -1.0725666 -0.4062582 -0.13929021 0.7856511 ] [-0.53756183 0.23672818 0.22649287 -2.224544 0.27439642]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:-3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6018.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=-3]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 0.9214342 -0.3966072 -1.5127908 -0.5968874 -0.93391097] [-0.4553491 1.9836895 -1.2222477 -0.01530545 -0.30002713] [ 0.34464654 -1.2166027 1.700737 0.4457739 1.1027573 ] [ 0.66090226 0.75875986 1.0009794 0.77249897 -0.29410538] [-0.2506269 1.9637903 -0.39862454 -1.107117 -1.0320095 ]] [[ 0.17486419 0.89351416 0.43010837 0.34348553 -0.4978891 ] [-2.0414224 -0.12630336 -0.83733875 -0.805157 0.2617624 ] [-1.1985842 0.5657808 -1.6531835 -0.97588176 -0.9945702 ] [-0.34963846 -0.51087135 0.5018397 -1.1651921 -0.5259597 ] [-0.8490302 -1.0273381 0.48850694 -0.5599074 -0.197319 ]] [[ 0.64251053 1.0886874 -0.53579026 0.5196005 -1.0371774 ] [ 0.5092103 -1.5715854 -0.27282038 0.07167841 1.0143117 ] [ 0.7838004 -1.318334 -0.27538496 0.83561784 -2.0238054 ] [ 1.133567 0.3274694 0.91047066 -1.452412 -0.729116 ] [ 0.88691634 -0.6100317 -0.30397397 -0.55723685 -2.328083 ]] [[ 0.30111593 -0.78340167 -0.86396724 0.6106859 -0.4755198 ] [ 0.16052878 0.12714835 -0.10833811 -0.79546636 -0.15581563] [ 0.38536128 0.02788192 1.8082837 0.2213768 1.9717499 ] [-0.62658805 -0.7730426 -0.18655847 -0.7883703 -0.20755714] [ 0.5495149 -0.09854533 0.3494035 0.43468413 0.44658238]]]; ov_res: [[[ 0.9214342 -0.3966072 -1.5127908 -0.5968874 -0.93391097] [-0.4553491 1.9836895 -1.2222477 -0.01530545 -0.30002713] [ 0.34464654 -1.2166027 1.700737 0.4457739 1.1027573 ] [ 0.66090226 0.75875986 1.0009794 0.77249897 -0.29410538] [-0.2506269 1.9637903 -0.39862454 -1.107117 -1.0320095 ]] [[ 0.17486419 0.89351416 0.43010837 0.34348553 -0.4978891 ] [-2.0414224 -0.12630336 -0.83733875 -0.805157 0.2617624 ] [-1.1985842 0.5657808 -1.6531835 -0.97588176 -0.9945702 ] [-0.34963846 -0.51087135 0.5018397 -1.1651921 -0.5259597 ] [-0.8490302 -1.0273381 0.48850694 -0.5599074 -0.197319 ]] [[ 0.64251053 1.0886874 -0.53579026 0.5196005 -1.0371774 ] [ 0.5092103 -1.5715854 -0.27282038 0.07167841 1.0143117 ] [ 0.7838004 -1.318334 -0.27538496 0.83561784 -2.0238054 ] [ 1.133567 0.3274694 0.91047066 -1.452412 -0.729116 ] [ 0.88691634 -0.6100317 -0.30397397 -0.55723685 -2.328083 ]] [[ 0.30111593 -0.78340167 -0.86396724 0.6106859 -0.4755198 ] [ 0.16052878 0.12714835 -0.10833811 -0.79546636 -0.15581563] [ 0.38536128 0.02788192 1.8082837 0.2213768 1.9717499 ] [-0.62658805 -0.7730426 -0.18655847 -0.7883703 -0.20755714] [ 0.5495149 -0.09854533 0.3494035 0.43468413 0.44658238]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:-2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6020.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=-2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.2733222e+00 2.9719108e-01 4.7540724e-01 1.0668962e-03 -1.7579151e+00] [-1.5035225e+00 -5.7466090e-01 6.1141175e-01 1.4171638e+00 -9.3075252e-01] [ 6.1038411e-01 -1.8220024e+00 -3.8694593e-01 -8.9302301e-01 1.1246070e+00] [-2.0906922e-01 8.4072143e-01 -6.3112009e-01 -1.3551151e+00 -4.3196136e-01]] [[ 5.8878672e-01 1.5203784e-01 -5.9193754e-01 -1.2671530e+00 -1.5764083e-01] [ 1.0116645e+00 -5.9124146e-02 -8.5454386e-01 -1.5113352e-01 1.1441287e-01] [ 4.8927042e-01 6.0963637e-01 7.7206618e-01 -3.4586005e-02 3.2618519e-02] [ 5.5856609e-01 7.0555735e-01 -9.3097824e-01 1.3509971e+00 -1.9751470e-01]] [[-2.7414793e-01 6.4411384e-01 7.0452571e-02 1.4845742e+00 6.8908495e-01] [-5.5636722e-01 6.1737514e-01 -2.6703361e-01 1.5797154e+00 1.2869562e+00] [ 5.1389027e-01 -6.7104346e-01 -1.0737429e+00 -1.0434579e+00 -5.5862063e-01] [ 6.8893433e-01 6.8278605e-01 8.4390146e-01 5.1564145e-01 4.3584752e-01]] [[-2.1543651e+00 2.8755623e-01 -1.2165373e-01 1.0813835e+00 -3.6909842e-01] [ 3.8403499e-01 4.3947068e-01 1.1861745e+00 -4.5949122e-01 2.7958810e-01] [ 4.4989893e-01 8.9736158e-01 3.0740619e-01 -1.0171934e+00 6.2026495e-01] [-1.4256524e+00 1.7918787e+00 -1.2013477e+00 -1.4150969e+00 3.5312608e-01]]]; ov_res: [[[ 1.2733222e+00 2.9719108e-01 4.7540724e-01 1.0668962e-03 -1.7579151e+00] [-1.5035225e+00 -5.7466090e-01 6.1141175e-01 1.4171638e+00 -9.3075252e-01] [ 6.1038411e-01 -1.8220024e+00 -3.8694593e-01 -8.9302301e-01 1.1246070e+00] [-2.0906922e-01 8.4072143e-01 -6.3112009e-01 -1.3551151e+00 -4.3196136e-01]] [[ 5.8878672e-01 1.5203784e-01 -5.9193754e-01 -1.2671530e+00 -1.5764083e-01] [ 1.0116645e+00 -5.9124146e-02 -8.5454386e-01 -1.5113352e-01 1.1441287e-01] [ 4.8927042e-01 6.0963637e-01 7.7206618e-01 -3.4586005e-02 3.2618519e-02] [ 5.5856609e-01 7.0555735e-01 -9.3097824e-01 1.3509971e+00 -1.9751470e-01]] [[-2.7414793e-01 6.4411384e-01 7.0452571e-02 1.4845742e+00 6.8908495e-01] [-5.5636722e-01 6.1737514e-01 -2.6703361e-01 1.5797154e+00 1.2869562e+00] [ 5.1389027e-01 -6.7104346e-01 -1.0737429e+00 -1.0434579e+00 -5.5862063e-01] [ 6.8893433e-01 6.8278605e-01 8.4390146e-01 5.1564145e-01 4.3584752e-01]] [[-2.1543651e+00 2.8755623e-01 -1.2165373e-01 1.0813835e+00 -3.6909842e-01] [ 3.8403499e-01 4.3947068e-01 1.1861745e+00 -4.5949122e-01 2.7958810e-01] [ 4.4989893e-01 8.9736158e-01 3.0740619e-01 -1.0171934e+00 6.2026495e-01] [-1.4256524e+00 1.7918787e+00 -1.2013477e+00 -1.4150969e+00 3.5312608e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:-1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6022.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=-1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-0.7639252 -0.4079955 -2.4654663 -0.23066597 -0.05099765] [-1.0282067 1.1769086 -0.12627254 -2.383775 1.8442366 ] [-0.6667279 1.0919386 -0.6779827 -2.3969955 -1.9334685 ] [-1.4605675 -1.0771078 -0.99092954 0.12226086 -1.2585539 ]] [[ 1.8953869 0.2433986 -1.8360534 -0.06104643 0.2621383 ] [ 0.55144036 0.8033922 -0.8733675 1.398427 1.6419032 ] [-0.24793164 0.05313594 0.4898147 -0.28923568 -1.4116641 ] [-0.6523549 -0.37356392 0.21594383 -0.04318436 -1.5037678 ]] [[-0.466724 -1.096529 2.1176498 1.0846084 -0.13314407] [ 0.716605 -0.6480662 -0.4519941 -0.32043537 0.62096083] [-0.12845777 1.3038313 0.67255324 1.5267326 0.27157727] [ 1.341922 -1.5807319 -0.15084317 0.02344678 1.8695825 ]] [[-0.8434243 0.8906156 -1.1518661 -0.74793065 0.1103297 ] [-1.1544008 -0.2983341 0.10352653 0.6121702 -2.1419935 ] [-0.81308 -0.5035155 0.14729962 -0.34611356 0.70324373] [-0.19167212 -0.4197953 -1.8342857 -1.1321765 -0.33259705]]]; ov_res: [[[-0.7639252 -0.4079955 -2.4654663 -0.23066597 -0.05099765] [-1.0282067 1.1769086 -0.12627254 -2.383775 1.8442366 ] [-0.6667279 1.0919386 -0.6779827 -2.3969955 -1.9334685 ] [-1.4605675 -1.0771078 -0.99092954 0.12226086 -1.2585539 ]] [[ 1.8953869 0.2433986 -1.8360534 -0.06104643 0.2621383 ] [ 0.55144036 0.8033922 -0.8733675 1.398427 1.6419032 ] [-0.24793164 0.05313594 0.4898147 -0.28923568 -1.4116641 ] [-0.6523549 -0.37356392 0.21594383 -0.04318436 -1.5037678 ]] [[-0.466724 -1.096529 2.1176498 1.0846084 -0.13314407] [ 0.716605 -0.6480662 -0.4519941 -0.32043537 0.62096083] [-0.12845777 1.3038313 0.67255324 1.5267326 0.27157727] [ 1.341922 -1.5807319 -0.15084317 0.02344678 1.8695825 ]] [[-0.8434243 0.8906156 -1.1518661 -0.74793065 0.1103297 ] [-1.1544008 -0.2983341 0.10352653 0.6121702 -2.1419935 ] [-0.81308 -0.5035155 0.14729962 -0.34611356 0.70324373] [-0.19167212 -0.4197953 -1.8342857 -1.1321765 -0.33259705]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6024.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=0]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[-9.43585932e-01 3.30794245e-01 8.62953961e-02 -1.08913207e+00 -3.73671174e-01] [-6.00218832e-01 -1.38064480e+00 -2.73596913e-01 -2.74409652e-01 -5.75401318e-05] [-1.13287616e+00 4.38699663e-01 -6.13368034e-01 9.21364725e-01 8.32634151e-01] [ 6.15709484e-01 1.36380658e-01 1.17451239e+00 1.25815645e-01 6.94414556e-01] [-1.21147156e-01 -7.85410702e-01 -8.92760158e-02 -6.79616690e-01 1.92686856e+00]] [[-1.70401502e+00 -1.90462518e+00 2.15399241e+00 -1.10892785e+00 1.68356895e+00] [-1.96711615e-01 -4.99633551e-01 -1.17256187e-01 1.10776889e+00 -2.11445832e+00] [ 5.38484693e-01 -3.41844440e-01 -7.05479324e-01 2.05313206e-01 -1.50941536e-01] [-7.10228324e-01 -2.18114948e+00 1.43262655e-01 3.89489800e-01 -1.25778186e+00] [ 2.83036232e-01 4.13172871e-01 -1.78000355e+00 -5.46653867e-01 -1.22476280e+00]] [[-7.30895698e-01 2.15736127e+00 4.51927602e-01 6.35092378e-01 -5.22649646e-01] [-4.63337123e-01 2.16192827e-01 -6.26991034e-01 4.47250038e-01 1.04288781e+00] [-1.98023808e+00 2.91101495e-03 -4.56930667e-01 -1.31154037e+00 -1.03527057e+00] [-5.31369328e-01 -3.26351762e-01 2.10685110e+00 -2.71525979e-02 1.28881359e+00] [-2.10994053e+00 -3.76480430e-01 8.17988276e-01 8.46408725e-01 -9.26328301e-01]] [[-3.89845878e-01 -4.99451458e-01 2.47802877e+00 7.02683270e-01 1.28410375e+00] [-1.97717190e+00 3.29307988e-02 3.08873326e-01 1.23001441e-01 -4.95438695e-01] [ 1.68693252e-02 1.10935187e+00 -1.08856952e+00 -2.78249383e-01 2.69175798e-01] [ 1.58149207e+00 -3.94664198e-01 -3.41854215e-01 1.01689219e+00 3.46997380e+00] [-1.27906859e+00 5.06633043e-01 -3.22790295e-01 1.71626651e+00 -8.90876472e-01]]]; ov_res: [[[-9.43585932e-01 3.30794245e-01 8.62953961e-02 -1.08913207e+00 -3.73671174e-01] [-6.00218832e-01 -1.38064480e+00 -2.73596913e-01 -2.74409652e-01 -5.75401318e-05] [-1.13287616e+00 4.38699663e-01 -6.13368034e-01 9.21364725e-01 8.32634151e-01] [ 6.15709484e-01 1.36380658e-01 1.17451239e+00 1.25815645e-01 6.94414556e-01] [-1.21147156e-01 -7.85410702e-01 -8.92760158e-02 -6.79616690e-01 1.92686856e+00]] [[-1.70401502e+00 -1.90462518e+00 2.15399241e+00 -1.10892785e+00 1.68356895e+00] [-1.96711615e-01 -4.99633551e-01 -1.17256187e-01 1.10776889e+00 -2.11445832e+00] [ 5.38484693e-01 -3.41844440e-01 -7.05479324e-01 2.05313206e-01 -1.50941536e-01] [-7.10228324e-01 -2.18114948e+00 1.43262655e-01 3.89489800e-01 -1.25778186e+00] [ 2.83036232e-01 4.13172871e-01 -1.78000355e+00 -5.46653867e-01 -1.22476280e+00]] [[-7.30895698e-01 2.15736127e+00 4.51927602e-01 6.35092378e-01 -5.22649646e-01] [-4.63337123e-01 2.16192827e-01 -6.26991034e-01 4.47250038e-01 1.04288781e+00] [-1.98023808e+00 2.91101495e-03 -4.56930667e-01 -1.31154037e+00 -1.03527057e+00] [-5.31369328e-01 -3.26351762e-01 2.10685110e+00 -2.71525979e-02 1.28881359e+00] [-2.10994053e+00 -3.76480430e-01 8.17988276e-01 8.46408725e-01 -9.26328301e-01]] [[-3.89845878e-01 -4.99451458e-01 2.47802877e+00 7.02683270e-01 1.28410375e+00] [-1.97717190e+00 3.29307988e-02 3.08873326e-01 1.23001441e-01 -4.95438695e-01] [ 1.68693252e-02 1.10935187e+00 -1.08856952e+00 -2.78249383e-01 2.69175798e-01] [ 1.58149207e+00 -3.94664198e-01 -3.41854215e-01 1.01689219e+00 3.46997380e+00] [-1.27906859e+00 5.06633043e-01 -3.22790295e-01 1.71626651e+00 -8.90876472e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6026.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=1]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 3.89499843e-01 1.90490258e+00 5.95287919e-01 -9.08362567e-01 1.20922334e-01] [-2.10822344e+00 -1.94411719e+00 -7.02463806e-01 1.37566102e+00 2.30664277e+00] [ 6.03692889e-01 1.26435018e+00 7.75015354e-01 -2.83177495e-01 4.04544860e-01] [-7.98912533e-03 -1.36686826e+00 -9.84598696e-01 -6.35352060e-02 -5.26656866e-01] [-7.20009983e-01 6.27337217e-01 -1.24451518e+00 1.35396802e+00 -3.19919884e-01]] [[ 1.10747969e+00 -1.70628786e+00 8.10264468e-01 -1.60156265e-01 7.97224998e-01] [-8.38699818e-01 6.84908107e-02 -1.22196054e+00 2.61880197e-02 1.33212531e+00] [ 1.29090977e+00 3.26527268e-01 -1.58456028e-01 -7.80299127e-01 -5.29322743e-01] [ 7.98281670e-01 1.42777038e+00 2.40785912e-01 1.54189742e+00 4.66403216e-01] [ 6.47624612e-01 2.98512548e-01 3.14266533e-01 -1.59531915e+00 -1.43761471e-01]] [[-1.35408962e+00 -2.71884680e+00 1.66805577e-04 -6.27236247e-01 2.96216339e-01] [-9.89514768e-01 -1.45876241e+00 -8.20659921e-02 -8.08685660e-01 -2.75756025e+00] [ 1.78654373e-01 1.40433848e+00 1.01972795e+00 4.37257826e-01 -5.72370529e-01] [ 2.82658666e-01 -2.05539322e+00 7.43154943e-01 1.53117979e+00 3.13570827e-01] [-5.16177952e-01 -1.51263848e-01 -1.06798697e+00 -6.38691545e-01 -1.48360789e+00]] [[ 9.28836286e-01 -4.73722905e-01 -4.74604428e-01 2.14174223e+00 2.67683357e-01] [-2.04842493e-01 1.65707290e+00 3.83065730e-01 1.52989960e+00 -1.62992513e+00] [-1.44595295e-01 1.11344850e+00 -3.36977661e-01 -9.18835700e-02 -2.47291461e-01] [ 1.57815754e-01 1.01530373e+00 1.85237467e+00 9.48978245e-01 9.14933383e-01] [-1.32272863e+00 -1.22051108e+00 -6.66908443e-01 2.57279962e-01 -8.21293414e-01]]]; ov_res: [[[ 3.89499843e-01 1.90490258e+00 5.95287919e-01 -9.08362567e-01 1.20922334e-01] [-2.10822344e+00 -1.94411719e+00 -7.02463806e-01 1.37566102e+00 2.30664277e+00] [ 6.03692889e-01 1.26435018e+00 7.75015354e-01 -2.83177495e-01 4.04544860e-01] [-7.98912533e-03 -1.36686826e+00 -9.84598696e-01 -6.35352060e-02 -5.26656866e-01] [-7.20009983e-01 6.27337217e-01 -1.24451518e+00 1.35396802e+00 -3.19919884e-01]] [[ 1.10747969e+00 -1.70628786e+00 8.10264468e-01 -1.60156265e-01 7.97224998e-01] [-8.38699818e-01 6.84908107e-02 -1.22196054e+00 2.61880197e-02 1.33212531e+00] [ 1.29090977e+00 3.26527268e-01 -1.58456028e-01 -7.80299127e-01 -5.29322743e-01] [ 7.98281670e-01 1.42777038e+00 2.40785912e-01 1.54189742e+00 4.66403216e-01] [ 6.47624612e-01 2.98512548e-01 3.14266533e-01 -1.59531915e+00 -1.43761471e-01]] [[-1.35408962e+00 -2.71884680e+00 1.66805577e-04 -6.27236247e-01 2.96216339e-01] [-9.89514768e-01 -1.45876241e+00 -8.20659921e-02 -8.08685660e-01 -2.75756025e+00] [ 1.78654373e-01 1.40433848e+00 1.01972795e+00 4.37257826e-01 -5.72370529e-01] [ 2.82658666e-01 -2.05539322e+00 7.43154943e-01 1.53117979e+00 3.13570827e-01] [-5.16177952e-01 -1.51263848e-01 -1.06798697e+00 -6.38691545e-01 -1.48360789e+00]] [[ 9.28836286e-01 -4.73722905e-01 -4.74604428e-01 2.14174223e+00 2.67683357e-01] [-2.04842493e-01 1.65707290e+00 3.83065730e-01 1.52989960e+00 -1.62992513e+00] [-1.44595295e-01 1.11344850e+00 -3.36977661e-01 -9.18835700e-02 -2.47291461e-01] [ 1.57815754e-01 1.01530373e+00 1.85237467e+00 9.48978245e-01 9.14933383e-01] [-1.32272863e+00 -1.22051108e+00 -6.66908443e-01 2.57279962e-01 -8.21293414e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6028.aten_select, %input_tensor.1 : Tensor): %self.index : int = prim::Constant[value=3]() %self.dim : int = prim::Constant[value=2]() %4 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.index) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%4) fw_re: [[[ 1.6314051 0.41558507 -1.2584157 -0.07581101 1.8540214 ] [-0.11741675 -0.04971652 -1.760861 -0.3551208 1.1921465 ] [-0.5360565 -0.95189524 -0.10734899 -2.206099 -0.29278773] [ 0.10508156 -0.8291984 -1.5317917 -1.3638312 -1.214011 ]] [[-0.5386299 -1.72305 -1.8211116 -0.49640977 0.9657457 ] [-0.17423269 -0.14377671 -0.8252917 0.02491973 -0.26853278] [ 1.0338546 -1.1698868 0.38585648 -1.0159436 -0.01468537] [-1.2903737 -0.9836796 0.58144647 -0.29023966 2.3442545 ]] [[ 0.5316279 -0.30210558 0.09642794 -0.0793524 -0.6585693 ] [ 0.69736856 0.43386585 -2.1561522 0.7594879 0.28231457] [ 0.5907593 -1.0466682 1.5339198 -0.46555367 0.9548129 ] [ 0.6396369 -0.21227407 -0.47822022 0.09927958 -1.3500956 ]] [[-1.1435736 1.3532333 2.6194263 0.7028025 0.41112214] [ 0.9431092 -1.3396769 1.2609524 -0.21063752 -0.2106751 ] [ 0.6873517 0.04004499 0.23924758 -1.3984618 0.42630312] [-0.72742593 0.52518296 0.31582317 -0.23230386 0.05186487]]]; ov_res: [[[ 1.6314051 0.41558507 -1.2584157 -0.07581101 1.8540214 ] [-0.11741675 -0.04971652 -1.760861 -0.3551208 1.1921465 ] [-0.5360565 -0.95189524 -0.10734899 -2.206099 -0.29278773] [ 0.10508156 -0.8291984 -1.5317917 -1.3638312 -1.214011 ]] [[-0.5386299 -1.72305 -1.8211116 -0.49640977 0.9657457 ] [-0.17423269 -0.14377671 -0.8252917 0.02491973 -0.26853278] [ 1.0338546 -1.1698868 0.38585648 -1.0159436 -0.01468537] [-1.2903737 -0.9836796 0.58144647 -0.29023966 2.3442545 ]] [[ 0.5316279 -0.30210558 0.09642794 -0.0793524 -0.6585693 ] [ 0.69736856 0.43386585 -2.1561522 0.7594879 0.28231457] [ 0.5907593 -1.0466682 1.5339198 -0.46555367 0.9548129 ] [ 0.6396369 -0.21227407 -0.47822022 0.09927958 -1.3500956 ]] [[-1.1435736 1.3532333 2.6194263 0.7028025 0.41112214] [ 0.9431092 -1.3396769 1.2609524 -0.21063752 -0.2106751 ] [ 0.6873517 0.04004499 0.23924758 -1.3984618 0.42630312] [-0.72742593 0.52518296 0.31582317 -0.23230386 0.05186487]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_select.py::TestSelect::test_select[ ie_device:CPU - precision:FP32 - input_index:3 - input_dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_select.___torch_mangle_6030.aten_select, %input_tensor.1 : Tensor): %self.dim : int = prim::Constant[value=3]() %3 : Tensor = aten::select(%input_tensor.1, %self.dim, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_select.py:27:23 return (%3) fw_re: [[[ 0.9876997 0.28742865 -0.7362311 -0.55235964 0.46101105] [-1.4588523 0.8643812 0.32725257 -0.05427001 -1.2951839 ] [-0.8354095 0.13231157 -0.10283647 -0.28559294 -0.25910455] [-1.091371 -0.2483652 -1.7982963 -0.6425796 -1.637824 ]] [[-0.63478875 0.22041945 -0.37037948 1.350153 0.14420305] [-1.9632169 -0.46555448 -0.04890515 0.16682185 -0.43524933] [-0.37910205 2.5080378 -1.635428 0.2834217 0.22092646] [-0.75018543 -0.64668715 -1.2809625 -0.6247935 0.9327178 ]] [[-0.03739485 -0.7808483 0.88615966 -0.07788274 0.552413 ] [-0.7921354 -1.2557372 -3.0207353 0.2855917 0.4437315 ] [ 0.37158376 0.7820551 -0.24910721 1.081371 0.3909923 ] [ 0.41810104 -0.7460088 -0.15260668 -0.00692184 -0.9319601 ]] [[-0.82179236 -0.7604163 0.9145758 -0.15475546 -0.6244866 ] [ 0.6234178 -0.56390876 0.6717333 -0.3966256 -0.2537629 ] [ 0.696137 0.45075914 -0.7863007 0.8197259 0.07558601] [-0.37823272 1.1188015 1.2690939 0.4640055 0.9794143 ]]]; ov_res: [[[ 0.9876997 0.28742865 -0.7362311 -0.55235964 0.46101105] [-1.4588523 0.8643812 0.32725257 -0.05427001 -1.2951839 ] [-0.8354095 0.13231157 -0.10283647 -0.28559294 -0.25910455] [-1.091371 -0.2483652 -1.7982963 -0.6425796 -1.637824 ]] [[-0.63478875 0.22041945 -0.37037948 1.350153 0.14420305] [-1.9632169 -0.46555448 -0.04890515 0.16682185 -0.43524933] [-0.37910205 2.5080378 -1.635428 0.2834217 0.22092646] [-0.75018543 -0.64668715 -1.2809625 -0.6247935 0.9327178 ]] [[-0.03739485 -0.7808483 0.88615966 -0.07788274 0.552413 ] [-0.7921354 -1.2557372 -3.0207353 0.2855917 0.4437315 ] [ 0.37158376 0.7820551 -0.24910721 1.081371 0.3909923 ] [ 0.41810104 -0.7460088 -0.15260668 -0.00692184 -0.9319601 ]] [[-0.82179236 -0.7604163 0.9145758 -0.15475546 -0.6244866 ] [ 0.6234178 -0.56390876 0.6717333 -0.3966256 -0.2537629 ] [ 0.696137 0.45075914 -0.7863007 0.8197259 0.07558601] [-0.37823272 1.1188015 1.2690939 0.4640055 0.9794143 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_selu.py::TestSilu::test_silu[ ie_device:CPU - precision:FP32 - inplace:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_selu.___torch_mangle_6031.aten_selu, %x.1 : Tensor): %result.1 : Tensor = aten::selu_(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1575:17 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.1) return (%3) fw_re: [[[[-0.8036631 -0.5051904 -0.42531475 ... 0.3720381 -1.0624974 0.40084308] [-1.4005514 -1.0778965 -0.3939216 ... 0.91375524 1.1974802 0.81572926] [ 0.2994588 0.7501014 0.72334635 ... 1.7947911 0.0478406 1.5243782 ] ... [-1.1927752 -1.3528146 -1.0453186 ... -1.33216 0.5013175 1.2437458 ] [-1.0114281 -0.5976544 -0.58299434 ... 1.1458998 0.90012443 0.5476665 ] [-0.74970114 -1.0913017 -1.2269839 ... 2.225942 -1.1966685 1.2999508 ]] [[-0.8419096 0.8685352 -0.2079614 ... 0.10489607 -0.45236978 -1.194178 ] [ 1.7282243 1.1626986 -1.0254734 ... 0.45880497 0.08687903 -0.21163347] [ 0.9115795 1.563124 0.8654902 ... 0.21082617 1.0044967 -0.8255752 ] ... [ 2.1556487 -1.493076 0.18981335 ... -0.40902713 -1.2922045 0.55935335] [ 1.7017497 0.16487046 -0.21032023 ... -0.10049477 -0.94901645 -1.4556179 ] [-0.39908823 -0.79395026 -0.88016474 ... -1.0566978 0.74551606 -0.88487625]] [[ 0.5415204 -1.397959 0.251761 ... 1.1745799 -0.34953785 0.49698067] [ 1.904845 0.1331999 -1.0763127 ... 1.235554 -1.0538948 0.20418924] [ 0.15086986 0.7222091 0.02887234 ... 1.1746327 -0.15395688 0.41008586] ... [ 0.75526243 -0.08057068 -0.03650223 ... 0.24201117 1.6931369 -1.4834002 ] [-1.3352149 1.0409988 1.4871416 ... -1.1788393 0.34848493 -0.13360712] [ 0.3650908 0.2821645 1.0883834 ... -0.13720973 0.17022368 -0.93351597]]]]; ov_res: [[[[-0.8036631 -0.5051904 -0.42531466 ... 0.3720381 -1.0624974 0.40084308] [-1.4005514 -1.0778965 -0.3939216 ... 0.91375524 1.1974802 0.81572926] [ 0.2994588 0.7501014 0.72334635 ... 1.7947911 0.0478406 1.5243782 ] ... [-1.1927752 -1.3528146 -1.0453186 ... -1.33216 0.5013175 1.2437458 ] [-1.0114281 -0.5976544 -0.58299434 ... 1.1458998 0.90012443 0.5476665 ] [-0.74970114 -1.0913017 -1.2269839 ... 2.225942 -1.1966685 1.2999508 ]] [[-0.8419096 0.8685352 -0.2079614 ... 0.10489607 -0.45236978 -1.194178 ] [ 1.7282243 1.1626986 -1.0254734 ... 0.45880497 0.08687903 -0.21163347] [ 0.9115795 1.563124 0.8654902 ... 0.21082617 1.0044967 -0.8255752 ] ... [ 2.1556487 -1.493076 0.18981335 ... -0.40902713 -1.2922045 0.55935335] [ 1.7017497 0.16487046 -0.21032023 ... -0.10049477 -0.94901645 -1.4556179 ] [-0.39908823 -0.7939503 -0.88016474 ... -1.0566978 0.74551606 -0.88487625]] [[ 0.5415204 -1.397959 0.251761 ... 1.1745799 -0.34953785 0.49698067] [ 1.904845 0.1331999 -1.0763127 ... 1.235554 -1.0538948 0.20418924] [ 0.15086986 0.7222091 0.02887234 ... 1.1746327 -0.15395688 0.41008586] ... [ 0.75526243 -0.08057068 -0.03650223 ... 0.24201117 1.6931369 -1.4834002 ] [-1.3352149 1.0409988 1.4871416 ... -1.1788393 0.34848493 -0.13360712] [ 0.3650908 0.2821645 1.0883834 ... -0.13720973 0.17022368 -0.93351597]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[-0.8036631 -0.5051904 -0.42531475 ... 0.3720381 -1.0624974 0.40084308] [-1.4005514 -1.0778965 -0.3939216 ... 0.91375524 1.1974802 0.81572926] [ 0.2994588 0.7501014 0.72334635 ... 1.7947911 0.0478406 1.5243782 ] ... [-1.1927752 -1.3528146 -1.0453186 ... -1.33216 0.5013175 1.2437458 ] [-1.0114281 -0.5976544 -0.58299434 ... 1.1458998 0.90012443 0.5476665 ] [-0.74970114 -1.0913017 -1.2269839 ... 2.225942 -1.1966685 1.2999508 ]] [[-0.8419096 0.8685352 -0.2079614 ... 0.10489607 -0.45236978 -1.194178 ] [ 1.7282243 1.1626986 -1.0254734 ... 0.45880497 0.08687903 -0.21163347] [ 0.9115795 1.563124 0.8654902 ... 0.21082617 1.0044967 -0.8255752 ] ... [ 2.1556487 -1.493076 0.18981335 ... -0.40902713 -1.2922045 0.55935335] [ 1.7017497 0.16487046 -0.21032023 ... -0.10049477 -0.94901645 -1.4556179 ] [-0.39908823 -0.79395026 -0.88016474 ... -1.0566978 0.74551606 -0.88487625]] [[ 0.5415204 -1.397959 0.251761 ... 1.1745799 -0.34953785 0.49698067] [ 1.904845 0.1331999 -1.0763127 ... 1.235554 -1.0538948 0.20418924] [ 0.15086986 0.7222091 0.02887234 ... 1.1746327 -0.15395688 0.41008586] ... [ 0.75526243 -0.08057068 -0.03650223 ... 0.24201117 1.6931369 -1.4834002 ] [-1.3352149 1.0409988 1.4871416 ... -1.1788393 0.34848493 -0.13360712] [ 0.3650908 0.2821645 1.0883834 ... -0.13720973 0.17022368 -0.93351597]]]]; ov_res: [[[[-0.8036631 -0.5051904 -0.42531466 ... 0.3720381 -1.0624974 0.40084308] [-1.4005514 -1.0778965 -0.3939216 ... 0.91375524 1.1974802 0.81572926] [ 0.2994588 0.7501014 0.72334635 ... 1.7947911 0.0478406 1.5243782 ] ... [-1.1927752 -1.3528146 -1.0453186 ... -1.33216 0.5013175 1.2437458 ] [-1.0114281 -0.5976544 -0.58299434 ... 1.1458998 0.90012443 0.5476665 ] [-0.74970114 -1.0913017 -1.2269839 ... 2.225942 -1.1966685 1.2999508 ]] [[-0.8419096 0.8685352 -0.2079614 ... 0.10489607 -0.45236978 -1.194178 ] [ 1.7282243 1.1626986 -1.0254734 ... 0.45880497 0.08687903 -0.21163347] [ 0.9115795 1.563124 0.8654902 ... 0.21082617 1.0044967 -0.8255752 ] ... [ 2.1556487 -1.493076 0.18981335 ... -0.40902713 -1.2922045 0.55935335] [ 1.7017497 0.16487046 -0.21032023 ... -0.10049477 -0.94901645 -1.4556179 ] [-0.39908823 -0.7939503 -0.88016474 ... -1.0566978 0.74551606 -0.88487625]] [[ 0.5415204 -1.397959 0.251761 ... 1.1745799 -0.34953785 0.49698067] [ 1.904845 0.1331999 -1.0763127 ... 1.235554 -1.0538948 0.20418924] [ 0.15086986 0.7222091 0.02887234 ... 1.1746327 -0.15395688 0.41008586] ... [ 0.75526243 -0.08057068 -0.03650223 ... 0.24201117 1.6931369 -1.4834002 ] [-1.3352149 1.0409988 1.4871416 ... -1.1788393 0.34848493 -0.13360712] [ 0.3650908 0.2821645 1.0883834 ... -0.13720973 0.17022368 -0.93351597]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_selu.py::TestSilu::test_silu[ ie_device:CPU - precision:FP32 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_selu.___torch_mangle_6033.aten_selu, %x.1 : Tensor): %result.3 : Tensor = aten::selu(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1577:17 %3 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %result.3) return (%3) fw_re: [[[[ 2.44043 -1.1979064 -0.7366124 ... 0.5763852 -0.7628942 0.13061246] [ 0.5696039 -1.3724192 -1.1189061 ... 1.8596444 1.1321371 0.13240524] [-0.49517545 0.47840458 0.25622255 ... 2.0496404 -1.5107449 -0.5641874 ] ... [ 0.1680915 -1.2966859 0.10304666 ... -0.20360444 -0.25019655 0.5431967 ] [ 0.30412185 -0.5426538 0.7675626 ... 0.10313729 -0.2699179 0.10281427] [-1.4836818 -1.4601327 0.800983 ... -0.01971074 -0.3120546 -0.05969696]] [[-0.44173992 -0.53070873 -0.52427596 ... -1.2587234 0.43024972 0.5260186 ] [-0.03578041 1.3694236 -1.6403717 ... 1.5380517 -0.17217755 -1.2843688 ] [-0.40456885 1.1050632 -0.44254997 ... -0.20417538 -0.18621445 0.35641924] ... [-0.8289096 0.8357658 -0.95930576 ... -1.1993116 0.219559 1.7448707 ] [-1.207488 0.43714112 0.57758075 ... 0.6994502 -0.16096678 -0.6331198 ] [ 1.6391904 0.3637034 1.1333373 ... 0.34610507 -0.75145173 1.0128391 ]] [[ 0.23028626 0.7180414 0.81744033 ... 1.7263104 -0.07283576 -0.36275873] [-0.07573211 1.3796451 1.2161795 ... -1.382809 0.27802607 -2.3635316 ] [-0.44477555 -0.3483452 0.2533189 ... -0.09873124 0.9843161 1.2822754 ] ... [ 0.5561091 0.10267369 -0.2563022 ... 0.29599348 1.2533246 0.6207727 ] [-0.15376985 -0.2241349 -1.0269166 ... 0.00959894 0.30587116 -0.61867166] [ 0.6978531 -0.80021006 -0.85335016 ... 0.76248324 0.32093355 1.7363551 ]]]]; ov_res: [[[[ 2.44043 -1.1979064 -0.7366124 ... 0.5763852 -0.7628942 0.13061246] [ 0.5696039 -1.3724192 -1.1189061 ... 1.8596444 1.1321371 0.13240524] [-0.49517545 0.47840458 0.25622255 ... 2.0496404 -1.5107449 -0.5641874 ] ... [ 0.1680915 -1.2966859 0.10304666 ... -0.20360444 -0.25019655 0.5431967 ] [ 0.30412185 -0.5426538 0.7675626 ... 0.10313729 -0.2699179 0.10281427] [-1.4836818 -1.4601327 0.800983 ... -0.01971074 -0.3120546 -0.05969696]] [[-0.44173992 -0.53070873 -0.52427596 ... -1.2587234 0.43024972 0.5260186 ] [-0.03578041 1.3694236 -1.6403717 ... 1.5380517 -0.17217755 -1.2843688 ] [-0.40456885 1.1050632 -0.44254997 ... -0.20417538 -0.18621445 0.35641924] ... [-0.8289096 0.8357658 -0.95930576 ... -1.1993116 0.219559 1.7448707 ] [-1.207488 0.43714112 0.57758075 ... 0.6994502 -0.16096678 -0.6331198 ] [ 1.6391904 0.3637034 1.1333373 ... 0.34610507 -0.75145173 1.0128391 ]] [[ 0.23028626 0.7180414 0.81744033 ... 1.7263104 -0.07283576 -0.36275873] [-0.07573211 1.3796451 1.2161795 ... -1.382809 0.27802607 -2.3635316 ] [-0.44477555 -0.3483452 0.2533189 ... -0.09873124 0.9843161 1.2822754 ] ... [ 0.5561091 0.10267369 -0.2563022 ... 0.29599348 1.2533246 0.6207727 ] [-0.15376985 -0.2241349 -1.0269166 ... 0.00959894 0.30587116 -0.61867166] [ 0.6978531 -0.80021006 -0.85335016 ... 0.76248324 0.32093355 1.7363551 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[[ 2.5641623 -1.2274601 -0.91643924 ... 0.6056085 -0.9382715 0.13723464] [ 0.59848344 -1.3124335 -1.1838392 ... 1.9539303 1.1895375 0.13911831] [-0.6866011 0.50266016 0.2692133 ... 2.1535592 -1.3700069 -0.75805336] ... [ 0.17661391 -1.2773708 0.10827123 ... -0.32386827 -0.3891593 0.5707373 ] [ 0.31954113 -0.73628527 0.80647886 ... 0.10836646 -0.41589218 0.10802706] [-1.3593605 -1.3498591 0.8415937 ... -0.03431419 -0.47127333 -0.10188189]] [[-0.6277877 -0.72400653 -0.7173329 ... -1.2587703 0.45206383 0.5526883 ] [-0.0617934 1.4388548 -1.4171898 ... 1.6160325 -0.27807918 -1.271413 ] [-0.5849821 1.1610911 -0.628703 ... -0.3246869 -0.29870895 0.37449005] ... [-0.99064505 0.87814 -1.0844681 ... -1.2282053 0.23069087 1.8333374 ] [-1.2325202 0.45930463 0.6068647 ... 0.73491305 -0.26139364 -0.82466674] [ 1.7222991 0.38214353 1.1907986 ... 0.36365294 -0.92883676 1.0641911 ]] [[ 0.241962 0.75444686 0.8588854 ... 1.8138361 -0.12350025 -0.53489417] [-0.12822779 1.4495945 1.2778411 ... -1.31704 0.29212227 -1.5926844 ] [-0.63121367 -0.5171358 0.26616243 ... -0.16528562 1.0342219 1.3472881 ] ... [ 0.5843044 0.10787936 -0.39749217 ... 0.31100065 1.3168695 0.65224653] [-0.25058308 -0.35301346 -1.1285073 ... 0.01008561 0.32137913 -0.8110824 ] [ 0.73323494 -0.9683003 -1.0091746 ... 0.8011419 0.3372052 1.82439 ]]]]; ov_res: [[[[ 2.5641623 -1.2274601 -0.91643924 ... 0.6056085 -0.9382715 0.13723464] [ 0.59848344 -1.3124335 -1.1838392 ... 1.9539303 1.1895375 0.13911831] [-0.6866011 0.50266016 0.2692133 ... 2.1535592 -1.3700069 -0.75805336] ... [ 0.17661391 -1.2773708 0.10827123 ... -0.32386827 -0.3891593 0.5707373 ] [ 0.31954113 -0.73628527 0.80647886 ... 0.10836646 -0.41589218 0.10802706] [-1.3593605 -1.3498591 0.8415937 ... -0.03431419 -0.47127333 -0.10188189]] [[-0.6277877 -0.72400653 -0.7173329 ... -1.2587703 0.45206383 0.5526883 ] [-0.0617934 1.4388548 -1.4171898 ... 1.6160325 -0.27807906 -1.271413 ] [-0.5849821 1.1610911 -0.628703 ... -0.3246869 -0.29870883 0.37449005] ... [-0.99064505 0.87814 -1.0844681 ... -1.2282053 0.23069087 1.8333374 ] [-1.2325202 0.45930463 0.6068647 ... 0.73491305 -0.26139364 -0.82466674] [ 1.7222991 0.38214353 1.1907986 ... 0.36365294 -0.92883676 1.0641911 ]] [[ 0.241962 0.75444686 0.8588854 ... 1.8138361 -0.12350025 -0.53489417] [-0.12822779 1.4495945 1.2778411 ... -1.31704 0.29212227 -1.5926844 ] [-0.63121367 -0.5171358 0.26616243 ... -0.16528562 1.0342219 1.3472881 ] ... [ 0.5843044 0.10787936 -0.39749205 ... 0.31100065 1.3168695 0.65224653] [-0.25058308 -0.35301346 -1.1285073 ... 0.01008561 0.32137913 -0.8110824 ] [ 0.73323494 -0.9683003 -1.0091746 ... 0.8011419 0.3372052 1.82439 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_silu.py::TestSilu::test_silu[ ie_device:CPU - precision:FP32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_silu.___torch_mangle_6034.aten_silu, %x.1 : Tensor): %2 : Tensor = aten::silu(%x.1) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:2055:11 return (%2) fw_re: [[[[ 4.18172687e-01 6.93451107e-01 -2.04008177e-01 ... 1.75195530e-01 1.13940001e+00 1.02978897e+00] [-2.26222828e-01 -1.60484597e-01 -2.77938604e-01 ... -2.00785771e-01 1.99848875e-01 9.07855630e-02] [-2.72133976e-01 7.67540336e-01 2.34540367e+00 ... -4.03123954e-03 7.00848699e-01 8.80851671e-02] ... [ 3.90001655e-01 -2.73531288e-01 -2.69481391e-01 ... -8.69609136e-03 -1.84118137e-01 8.85707047e-03] [-2.19486728e-01 -2.51637161e-01 -2.75293350e-01 ... -2.78310239e-01 -2.75862634e-01 -2.27690101e-01] [ 1.74503967e-01 -2.31147222e-02 -2.69108236e-01 ... 1.28375328e+00 2.83273429e-01 8.60491157e-01]] [[-2.55485684e-01 -2.64191449e-01 4.77732252e-03 ... -2.67672986e-01 -2.76251912e-01 1.06185991e-02] [-2.77914375e-01 2.42446423e-01 -5.27879111e-02 ... -2.64900178e-01 -2.66797632e-01 5.43072879e-01] [-2.77359694e-01 1.01523769e+00 1.32282346e-01 ... 9.28413942e-02 1.81484580e-01 -1.35067627e-01] ... [ 5.17338336e-01 7.96947420e-01 -1.45645171e-01 ... 3.75773311e-01 1.18938684e-01 3.53448004e-01] [ 1.60246551e-01 6.25956178e-01 -2.64784932e-01 ... 3.73639852e-01 9.94222343e-01 1.96897596e-01] [-1.95241690e-01 -1.03245631e-01 3.34522218e-01 ... 3.17275882e-01 1.05453527e+00 -5.93721196e-02]] [[-1.29561096e-01 -1.97758108e-01 -2.69492835e-01 ... 6.03977621e-01 -2.72709280e-01 1.91298261e-01] [ 1.02796841e+00 4.65077698e-01 -2.29743883e-01 ... -1.70643479e-01 -2.31552869e-01 8.34480703e-01] [-1.52403519e-01 -9.92614031e-02 6.27560690e-02 ... 9.77762759e-01 -2.29673475e-01 -2.40660444e-01] ... [ 1.24937028e-01 -1.65310614e-02 5.69338381e-01 ... 5.72891869e-02 2.80849755e-01 1.09465063e+00] [ 1.00479356e-03 -1.14573233e-01 -9.19768214e-02 ... -2.48360351e-01 2.30419993e-01 -1.23469993e-01] [ 6.65031970e-01 1.25741708e+00 5.76848397e-04 ... -2.03642786e-01 2.14216664e-01 8.96068394e-01]]]]; ov_res: [[[[ 4.18172657e-01 6.93451047e-01 -2.04008177e-01 ... 1.75195530e-01 1.13940001e+00 1.02978897e+00] [-2.26222828e-01 -1.60484612e-01 -2.77938575e-01 ... -2.00785756e-01 1.99848861e-01 9.07855704e-02] [-2.72134006e-01 7.67540395e-01 2.34540367e+00 ... -4.03124001e-03 7.00848699e-01 8.80851746e-02] ... [ 3.90001684e-01 -2.73531318e-01 -2.69481421e-01 ... -8.69609136e-03 -1.84118137e-01 8.85707047e-03] [-2.19486699e-01 -2.51637131e-01 -2.75293350e-01 ... -2.78310210e-01 -2.75862664e-01 -2.27690101e-01] [ 1.74503967e-01 -2.31147222e-02 -2.69108266e-01 ... 1.28375328e+00 2.83273429e-01 8.60491157e-01]] [[-2.55485743e-01 -2.64191449e-01 4.77732206e-03 ... -2.67673016e-01 -2.76251912e-01 1.06186001e-02] [-2.77914345e-01 2.42446423e-01 -5.27879149e-02 ... -2.64900178e-01 -2.66797632e-01 5.43072879e-01] [-2.77359694e-01 1.01523769e+00 1.32282346e-01 ... 9.28414017e-02 1.81484580e-01 -1.35067642e-01] ... [ 5.17338395e-01 7.96947360e-01 -1.45645186e-01 ... 3.75773340e-01 1.18938684e-01 3.53448004e-01] [ 1.60246536e-01 6.25956237e-01 -2.64784962e-01 ... 3.73639822e-01 9.94222343e-01 1.96897596e-01] [-1.95241690e-01 -1.03245623e-01 3.34522218e-01 ... 3.17275882e-01 1.05453527e+00 -5.93721159e-02]] [[-1.29561096e-01 -1.97758093e-01 -2.69492865e-01 ... 6.03977621e-01 -2.72709250e-01 1.91298261e-01] [ 1.02796841e+00 4.65077668e-01 -2.29743898e-01 ... -1.70643494e-01 -2.31552869e-01 8.34480643e-01] [-1.52403519e-01 -9.92613956e-02 6.27560765e-02 ... 9.77762818e-01 -2.29673445e-01 -2.40660444e-01] ... [ 1.24937028e-01 -1.65310614e-02 5.69338381e-01 ... 5.72891906e-02 2.80849755e-01 1.09465063e+00] [ 1.00479356e-03 -1.14573233e-01 -9.19768140e-02 ... -2.48360336e-01 2.30419964e-01 -1.23470008e-01] [ 6.65031910e-01 1.25741720e+00 5.76848397e-04 ... -2.03642756e-01 2.14216650e-01 8.96068394e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_softmax.py::TestSoftmax::test_softmax[ ie_device:CPU - precision:FP32 - dim:-1 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_softmax.___torch_mangle_6035.aten_softmax, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dim : int = prim::Constant[value=-1]() %ret.1 : Tensor = aten::softmax(%x.1, %self.dim, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1841:14 return (%ret.1) fw_re: [[[[0.00230925 0.00145725 0.00109081 ... 0.00967024 0.00138012 0.00067052] [0.00078613 0.0027922 0.00058486 ... 0.00178907 0.03838982 0.00367182] [0.00133787 0.00183807 0.00180814 ... 0.00586756 0.01080905 0.00263563] ... [0.00149692 0.00188063 0.01373458 ... 0.00331794 0.00474028 0.01070142] [0.00301167 0.00071658 0.00226574 ... 0.0064127 0.0042475 0.00789612] [0.05522034 0.00303471 0.00299214 ... 0.00727421 0.00130127 0.00283158]] [[0.00229554 0.00495808 0.00196915 ... 0.00118457 0.01472281 0.00114126] [0.00560636 0.00219616 0.01590432 ... 0.00326243 0.01093516 0.00096034] [0.00456935 0.01616685 0.00107743 ... 0.00382102 0.00086946 0.00783447] ... [0.00588143 0.00211324 0.00163679 ... 0.00208636 0.00602238 0.00322049] [0.00277907 0.01395158 0.00400146 ... 0.00077805 0.00105374 0.0012441 ] [0.00166663 0.0014693 0.00125602 ... 0.03468483 0.00087095 0.00870323]] [[0.00675471 0.0285848 0.000468 ... 0.00365906 0.00306473 0.00156081] [0.00321817 0.00479356 0.00580369 ... 0.00087063 0.00713771 0.0020813 ] [0.00893629 0.00106178 0.00118023 ... 0.0057004 0.0144277 0.00106329] ... [0.00811579 0.00172239 0.0018833 ... 0.00606521 0.00118271 0.006141 ] [0.00225018 0.00554212 0.00074787 ... 0.002568 0.00177371 0.00087147] [0.00313026 0.0025518 0.00386296 ... 0.0020581 0.00340437 0.00268608]]]]; ov_res: [[[[0.00230925 0.00145725 0.00109081 ... 0.00967024 0.00138012 0.00067052] [0.00078613 0.0027922 0.00058486 ... 0.00178907 0.03838982 0.00367182] [0.00133787 0.00183807 0.00180814 ... 0.00586756 0.01080905 0.00263563] ... [0.00149692 0.00188063 0.01373458 ... 0.00331794 0.00474028 0.01070142] [0.00301167 0.00071658 0.00226574 ... 0.0064127 0.0042475 0.00789612] [0.05522034 0.00303471 0.00299214 ... 0.00727421 0.00130127 0.00283158]] [[0.00229554 0.00495808 0.00196915 ... 0.00118457 0.01472281 0.00114126] [0.00560636 0.00219616 0.01590432 ... 0.00326243 0.01093517 0.00096034] [0.00456935 0.01616684 0.00107743 ... 0.00382102 0.00086946 0.00783447] ... [0.00588143 0.00211324 0.00163679 ... 0.00208636 0.00602238 0.00322049] [0.00277907 0.01395158 0.00400146 ... 0.00077805 0.00105374 0.0012441 ] [0.00166663 0.0014693 0.00125602 ... 0.03468483 0.00087095 0.00870323]] [[0.00675471 0.0285848 0.000468 ... 0.00365906 0.00306473 0.00156081] [0.00321817 0.00479356 0.00580369 ... 0.00087063 0.00713771 0.0020813 ] [0.00893629 0.00106178 0.00118023 ... 0.0057004 0.0144277 0.00106329] ... [0.00811579 0.00172239 0.0018833 ... 0.00606521 0.00118271 0.006141 ] [0.00225018 0.00554212 0.00074787 ... 0.002568 0.00177371 0.00087147] [0.00313026 0.0025518 0.00386296 ... 0.0020581 0.00340437 0.00268608]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_softmax.py::TestSoftmax::test_softmax[ ie_device:CPU - precision:FP32 - dim:3 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_softmax.___torch_mangle_6037.aten_softmax, %x.1 : Tensor): %2 : NoneType = prim::Constant() %self.dim : int = prim::Constant[value=3]() %ret.1 : Tensor = aten::softmax(%x.1, %self.dim, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:1841:14 return (%ret.1) fw_re: [[[[0.00067752 0.00678256 0.00147275 ... 0.0005282 0.00206764 0.0010398 ] [0.00172291 0.00582703 0.00318088 ... 0.0165156 0.0025616 0.00076243] [0.00699199 0.00182419 0.00353879 ... 0.00123958 0.0045935 0.00244862] ... [0.00185807 0.00315022 0.00436592 ... 0.00033688 0.00412971 0.00267245] [0.00198388 0.00082454 0.00323094 ... 0.04131038 0.0033402 0.00314118] [0.01078631 0.00321143 0.00435786 ... 0.01057103 0.00136892 0.00793565]] [[0.00141376 0.00217719 0.00419112 ... 0.00091212 0.00300313 0.0063114 ] [0.00227498 0.00998647 0.00093084 ... 0.00276843 0.00197941 0.01897258] [0.00140597 0.00788214 0.00345287 ... 0.00322262 0.00308576 0.00071543] ... [0.00124036 0.00993925 0.00068714 ... 0.00771868 0.00432766 0.00572466] [0.01240437 0.00072225 0.00462361 ... 0.00294747 0.00012352 0.00877696] [0.00286766 0.0079613 0.00156698 ... 0.00325588 0.00061188 0.00153609]] [[0.00500806 0.00094886 0.00270491 ... 0.00036168 0.00066571 0.00046562] [0.00216747 0.00232353 0.00067789 ... 0.00084386 0.002052 0.00132315] [0.00143143 0.01011102 0.00268692 ... 0.00574753 0.00677828 0.00263948] ... [0.00206411 0.00279871 0.00102642 ... 0.00393066 0.003446 0.00183681] [0.00054463 0.00440702 0.00098438 ... 0.00634865 0.00238043 0.00204517] [0.00118402 0.00381428 0.01073958 ... 0.00235392 0.0025536 0.00255891]]]]; ov_res: [[[[0.00067752 0.00678256 0.00147275 ... 0.0005282 0.00206764 0.0010398 ] [0.00172291 0.00582703 0.00318088 ... 0.0165156 0.0025616 0.00076243] [0.00699199 0.00182419 0.00353879 ... 0.00123958 0.0045935 0.00244863] ... [0.00185807 0.00315022 0.00436592 ... 0.00033688 0.00412971 0.00267245] [0.00198388 0.00082454 0.00323094 ... 0.04131038 0.0033402 0.00314118] [0.01078631 0.00321143 0.00435786 ... 0.01057103 0.00136892 0.00793565]] [[0.00141376 0.00217719 0.00419112 ... 0.00091212 0.00300313 0.0063114 ] [0.00227498 0.00998647 0.00093084 ... 0.00276843 0.00197941 0.01897258] [0.00140597 0.00788214 0.00345287 ... 0.00322262 0.00308576 0.00071543] ... [0.00124036 0.00993925 0.00068714 ... 0.00771868 0.00432766 0.00572466] [0.01240436 0.00072225 0.00462361 ... 0.00294747 0.00012352 0.00877696] [0.00286766 0.0079613 0.00156698 ... 0.00325588 0.00061188 0.00153609]] [[0.00500806 0.00094886 0.00270491 ... 0.00036168 0.00066571 0.00046562] [0.00216747 0.00232353 0.00067789 ... 0.00084386 0.002052 0.00132315] [0.00143143 0.01011102 0.00268692 ... 0.00574753 0.00677828 0.00263948] ... [0.00206411 0.00279871 0.00102642 ... 0.00393066 0.003446 0.00183681] [0.00054463 0.00440702 0.00098438 ... 0.00634865 0.00238043 0.00204517] [0.00118402 0.00381428 0.01073958 ... 0.00235392 0.0025536 0.00255891]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:(2, 1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6038.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.13690163 0.5125973 0.4952967 ... 0.4182996 1.141396 0.3168784 ] [-1.0461689 -0.874355 -2.8246953 ... -0.29175955 0.2648116 -0.85507303] [ 0.21614078 0.2839141 -0.2770338 ... -1.1355033 1.3038296 -1.1629479 ] ... [-0.44943348 -0.24411352 -1.0527784 ... -0.41273287 -1.0533047 0.48929584] [ 0.8824257 -1.2913157 0.53742653 ... -0.46194208 1.4842939 -2.367495 ] [-0.78231215 0.30590552 2.3623528 ... -1.320244 1.6023159 -0.18479611]] [[ 0.04820565 0.947906 0.36830172 ... -1.460315 0.08935257 0.32040724] [-2.2296412 -1.360938 -0.33083048 ... 1.0252749 -0.29204318 -1.2725234 ] [-0.80048376 1.9404526 1.5631824 ... -0.7329021 -0.42700657 1.0075624 ] ... [ 0.22459388 -1.0382394 0.17478934 ... -0.67178804 0.7822834 -0.5701553 ] [-1.9667845 -1.2507991 1.6804426 ... 1.228789 -0.37973353 0.2984059 ] [ 0.5420111 0.37233832 0.15352131 ... -0.01935532 -0.5622394 0.09165445]]]]; ov_res: [[[[ 0.13690163 0.5125973 0.4952967 ... 0.4182996 1.141396 0.3168784 ] [-1.0461689 -0.874355 -2.8246953 ... -0.29175955 0.2648116 -0.85507303] [ 0.21614078 0.2839141 -0.2770338 ... -1.1355033 1.3038296 -1.1629479 ] ... [-0.44943348 -0.24411352 -1.0527784 ... -0.41273287 -1.0533047 0.48929584] [ 0.8824257 -1.2913157 0.53742653 ... -0.46194208 1.4842939 -2.367495 ] [-0.78231215 0.30590552 2.3623528 ... -1.320244 1.6023159 -0.18479611]] [[ 0.04820565 0.947906 0.36830172 ... -1.460315 0.08935257 0.32040724] [-2.2296412 -1.360938 -0.33083048 ... 1.0252749 -0.29204318 -1.2725234 ] [-0.80048376 1.9404526 1.5631824 ... -0.7329021 -0.42700657 1.0075624 ] ... [ 0.22459388 -1.0382394 0.17478934 ... -0.67178804 0.7822834 -0.5701553 ] [-1.9667845 -1.2507991 1.6804426 ... 1.228789 -0.37973353 0.2984059 ] [ 0.5420111 0.37233832 0.15352131 ... -0.01935532 -0.5622394 0.09165445]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:(45, 2) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6040.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.04139673 0.5083074 -0.25865978 ... 0.04245961 -0.45687532 -1.3643352 ] [-0.12175814 -1.9592938 -1.0118684 ... 1.6867009 0.4669111 -1.2231419 ] [ 1.0924348 0.41779694 0.29454207 ... -0.6173376 -0.36365908 0.301046 ] ... [-0.12144537 -1.1937393 -0.27355048 ... -0.2948243 -1.7526507 -0.95169115] [-0.30525136 0.20973657 0.32548428 ... 0.53876406 1.0202825 1.3399863 ] [ 0.5981537 -2.1480532 -0.13887821 ... 1.00867 0.06456546 -0.10165837]] [[-0.9451526 0.36796635 2.1523247 ... -0.26380223 -0.33175367 0.728465 ] [-0.92450285 0.63652015 -0.77653116 ... 0.78434527 -1.7168307 -1.086464 ] [ 1.2083329 -1.8398204 0.47691283 ... 0.143 0.09278315 -0.7379545 ] ... [ 0.34516028 -1.0962634 -1.2528603 ... 1.4054939 -0.02118761 0.71183366] [ 0.2719762 0.7660963 2.1470225 ... -1.0011024 0.7299873 -0.66311914] [ 1.2443001 -0.2714395 -0.3272119 ... 0.8100862 -1.1936734 -1.4671866 ]] [[-0.24263011 0.4731005 -2.1386015 ... 2.4251728 -0.14652155 0.48214382] [ 1.894838 0.04992006 -0.08526668 ... -0.851051 0.4671766 0.68545943] [ 1.1242108 -0.7601675 0.28591105 ... -1.246154 0.11922605 -0.08372633] ... [-0.7636131 0.17695126 0.26088047 ... 0.08259218 -0.85523176 -1.198793 ] [-0.07919665 0.6331871 0.41933933 ... -1.5173954 1.3906158 1.1354165 ] [ 0.22475223 0.2314838 -0.4837353 ... 0.74486876 0.36148432 0.3330927 ]] ... [[ 0.34140483 -0.6347055 -0.8951054 ... 0.11554465 0.11112511 -0.81955415] [ 2.2819855 0.10171714 -0.4025422 ... 0.47620204 1.0170329 0.36621156] [ 0.21870568 1.0623863 2.273061 ... -0.07195079 -0.01726926 -1.4274486 ] ... [-0.06736697 -1.5116125 0.6563769 ... 0.00547305 -1.7332872 0.5925451 ] [ 2.047674 1.952708 -1.1335738 ... -1.1616474 -2.3960352 -1.9369084 ] [ 1.6884078 -0.3888652 -0.25779527 ... 1.1920344 0.29702553 0.29093707]] [[-1.4303178 1.3958428 0.5835129 ... -0.4839289 0.5507407 1.188925 ] [-0.71783507 0.03685175 -1.1711084 ... -0.29465032 0.9153891 -0.4933561 ] [ 1.1210223 1.0542737 2.7040706 ... -0.91482306 -0.58801633 -0.4860865 ] ... [ 0.99236435 0.27282593 0.20298412 ... -1.9764901 0.1087781 0.95275563] [ 0.7868364 -0.76337546 -0.6780759 ... 0.35331574 1.44141 -2.2718256 ] [ 0.7806025 -1.286797 -0.6911618 ... 1.4525535 0.7883611 -0.05988949]] [[-0.0135139 1.4036884 0.7622206 ... -0.5234047 -1.0311393 -0.15969475] [ 0.5492356 -0.47384927 -0.3624943 ... 0.6311949 -0.9266483 -0.39031368] [ 0.7036747 0.88350964 1.0681906 ... -1.0843344 -0.45243058 -0.7724085 ] ... [ 0.39908123 -0.3372619 0.7893898 ... 1.8901353 -1.7321073 -0.11755753] [-0.96600044 -1.1298048 1.2428584 ... 0.8453826 -0.15790683 -0.01836323] [-0.75753945 -0.5258092 3.0883157 ... 0.06217944 0.7950655 0.0464478 ]]]]; ov_res: [[[[-0.04139673 0.5083074 -0.25865978 ... 0.04245961 -0.45687532 -1.3643352 ] [-0.12175814 -1.9592938 -1.0118684 ... 1.6867009 0.4669111 -1.2231419 ] [ 1.0924348 0.41779694 0.29454207 ... -0.6173376 -0.36365908 0.301046 ] ... [-0.12144537 -1.1937393 -0.27355048 ... -0.2948243 -1.7526507 -0.95169115] [-0.30525136 0.20973657 0.32548428 ... 0.53876406 1.0202825 1.3399863 ] [ 0.5981537 -2.1480532 -0.13887821 ... 1.00867 0.06456546 -0.10165837]] [[-0.9451526 0.36796635 2.1523247 ... -0.26380223 -0.33175367 0.728465 ] [-0.92450285 0.63652015 -0.77653116 ... 0.78434527 -1.7168307 -1.086464 ] [ 1.2083329 -1.8398204 0.47691283 ... 0.143 0.09278315 -0.7379545 ] ... [ 0.34516028 -1.0962634 -1.2528603 ... 1.4054939 -0.02118761 0.71183366] [ 0.2719762 0.7660963 2.1470225 ... -1.0011024 0.7299873 -0.66311914] [ 1.2443001 -0.2714395 -0.3272119 ... 0.8100862 -1.1936734 -1.4671866 ]] [[-0.24263011 0.4731005 -2.1386015 ... 2.4251728 -0.14652155 0.48214382] [ 1.894838 0.04992006 -0.08526668 ... -0.851051 0.4671766 0.68545943] [ 1.1242108 -0.7601675 0.28591105 ... -1.246154 0.11922605 -0.08372633] ... [-0.7636131 0.17695126 0.26088047 ... 0.08259218 -0.85523176 -1.198793 ] [-0.07919665 0.6331871 0.41933933 ... -1.5173954 1.3906158 1.1354165 ] [ 0.22475223 0.2314838 -0.4837353 ... 0.74486876 0.36148432 0.3330927 ]] ... [[ 0.34140483 -0.6347055 -0.8951054 ... 0.11554465 0.11112511 -0.81955415] [ 2.2819855 0.10171714 -0.4025422 ... 0.47620204 1.0170329 0.36621156] [ 0.21870568 1.0623863 2.273061 ... -0.07195079 -0.01726926 -1.4274486 ] ... [-0.06736697 -1.5116125 0.6563769 ... 0.00547305 -1.7332872 0.5925451 ] [ 2.047674 1.952708 -1.1335738 ... -1.1616474 -2.3960352 -1.9369084 ] [ 1.6884078 -0.3888652 -0.25779527 ... 1.1920344 0.29702553 0.29093707]] [[-1.4303178 1.3958428 0.5835129 ... -0.4839289 0.5507407 1.188925 ] [-0.71783507 0.03685175 -1.1711084 ... -0.29465032 0.9153891 -0.4933561 ] [ 1.1210223 1.0542737 2.7040706 ... -0.91482306 -0.58801633 -0.4860865 ] ... [ 0.99236435 0.27282593 0.20298412 ... -1.9764901 0.1087781 0.95275563] [ 0.7868364 -0.76337546 -0.6780759 ... 0.35331574 1.44141 -2.2718256 ] [ 0.7806025 -1.286797 -0.6911618 ... 1.4525535 0.7883611 -0.05988949]] [[-0.0135139 1.4036884 0.7622206 ... -0.5234047 -1.0311393 -0.15969475] [ 0.5492356 -0.47384927 -0.3624943 ... 0.6311949 -0.9266483 -0.39031368] [ 0.7036747 0.88350964 1.0681906 ... -1.0843344 -0.45243058 -0.7724085 ] ... [ 0.39908123 -0.3372619 0.7893898 ... 1.8901353 -1.7321073 -0.11755753] [-0.96600044 -1.1298048 1.2428584 ... 0.8453826 -0.15790683 -0.01836323] [-0.75753945 -0.5258092 3.0883157 ... 0.06217944 0.7950655 0.0464478 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:(45, -1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6042.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-2.515988 -0.37261763 -1.0161343 ... -0.5950371 0.6195101 -1.1014763 ] [-0.3026819 1.6202401 1.3819764 ... -0.20176436 0.38103718 0.88952166] [ 0.64789 -1.084795 0.71390015 ... -2.3288121 -0.5699896 1.0000573 ] ... [ 0.7561022 -0.6267502 -0.16480988 ... 1.6999733 -1.670642 -1.1429198 ] [-0.8045221 0.6314136 0.42881814 ... 1.8398414 1.3421983 0.30259877] [-0.31018376 -1.2389426 -0.3077441 ... -0.7017204 1.3360398 -0.5336977 ]] [[-0.00466472 -0.76924455 -0.2639889 ... -2.226252 0.26765627 -0.91412485] [ 0.4959631 -0.26033545 -1.0848647 ... 0.36582723 0.93313533 0.7697428 ] [-0.07617513 -0.33703056 0.2684884 ... -0.24175319 1.1698965 0.9580615 ] ... [-1.4788644 0.6498843 -0.12654968 ... -1.2830424 2.3661752 0.44395483] [ 0.08158741 0.98602176 -0.05611426 ... -0.7950529 -0.307577 0.10271852] [-0.48009488 1.2750239 0.68101436 ... 0.10508545 0.85481125 1.2951214 ]] [[-0.54630196 -0.7614627 0.3319464 ... -2.6269147 0.27279055 -0.09666748] [-0.24397822 -0.04977455 -0.66704667 ... 1.8268002 1.406364 0.18570353] [ 0.10442647 1.7484156 0.8603285 ... 0.686238 1.9283948 -1.4823679 ] ... [-0.8231115 -1.8009427 0.11971228 ... -1.5719721 -1.4639761 -2.8086293 ] [-0.04317956 0.06402925 -1.8175175 ... -0.26047906 -0.9829637 1.2197824 ] [-1.3202374 0.2760674 -0.6364295 ... 2.337816 0.5054693 0.09622804]] ... [[-1.0264977 0.17380345 2.5783494 ... 1.7697788 0.0828293 1.5060841 ] [ 0.54248786 0.95580095 -0.47084796 ... -0.42436293 -0.04515951 0.5874389 ] [ 0.37575114 1.2182041 -0.14484055 ... -2.0164034 -0.7389346 -1.2997538 ] ... [ 1.088367 -0.51011086 -1.5109335 ... 0.26504007 2.1083765 -0.28705284] [ 0.5706102 0.2836268 1.218574 ... -0.9239375 -0.8145223 1.2459136 ] [-0.39358267 -0.2800493 -0.36459535 ... -0.664481 -0.43315858 -0.5678784 ]] [[ 1.1739509 -1.4162674 1.4585016 ... 0.5259372 -1.1531152 0.6988643 ] [-1.1109809 2.3242357 1.071877 ... 1.5277585 0.08147152 0.24798773] [ 0.03246147 -0.664633 -1.1680347 ... -1.5186257 0.11513335 0.01011036] ... [ 0.3765598 -1.1879584 -1.2225688 ... 0.32810816 -1.5710315 -0.29266483] [-0.15581134 -0.26124647 0.49977338 ... -1.1495104 -0.40343755 -0.17755418] [ 0.246696 0.01188764 0.08140774 ... -0.3309656 0.3596304 -0.3545945 ]] [[-1.5331498 -0.46133813 -0.11834231 ... 1.0215421 -0.20500578 0.6694374 ] [ 1.1729447 1.3953934 -1.7302091 ... -1.3238192 0.13608955 0.7447578 ] [-0.72368777 -0.30240792 0.20893028 ... -1.7062747 0.74979866 2.009816 ] ... [-1.1681827 -0.6539432 0.51227653 ... 0.11182254 -0.4782361 1.7430726 ] [-0.9996962 -1.2731404 0.8676579 ... -0.78462267 0.81719047 2.1228168 ] [-0.32442746 -0.88489014 -2.7731907 ... -1.5900269 0.67832226 0.8771878 ]]]]; ov_res: [[[[-2.515988 -0.37261763 -1.0161343 ... -0.5950371 0.6195101 -1.1014763 ] [-0.3026819 1.6202401 1.3819764 ... -0.20176436 0.38103718 0.88952166] [ 0.64789 -1.084795 0.71390015 ... -2.3288121 -0.5699896 1.0000573 ] ... [ 0.7561022 -0.6267502 -0.16480988 ... 1.6999733 -1.670642 -1.1429198 ] [-0.8045221 0.6314136 0.42881814 ... 1.8398414 1.3421983 0.30259877] [-0.31018376 -1.2389426 -0.3077441 ... -0.7017204 1.3360398 -0.5336977 ]] [[-0.00466472 -0.76924455 -0.2639889 ... -2.226252 0.26765627 -0.91412485] [ 0.4959631 -0.26033545 -1.0848647 ... 0.36582723 0.93313533 0.7697428 ] [-0.07617513 -0.33703056 0.2684884 ... -0.24175319 1.1698965 0.9580615 ] ... [-1.4788644 0.6498843 -0.12654968 ... -1.2830424 2.3661752 0.44395483] [ 0.08158741 0.98602176 -0.05611426 ... -0.7950529 -0.307577 0.10271852] [-0.48009488 1.2750239 0.68101436 ... 0.10508545 0.85481125 1.2951214 ]] [[-0.54630196 -0.7614627 0.3319464 ... -2.6269147 0.27279055 -0.09666748] [-0.24397822 -0.04977455 -0.66704667 ... 1.8268002 1.406364 0.18570353] [ 0.10442647 1.7484156 0.8603285 ... 0.686238 1.9283948 -1.4823679 ] ... [-0.8231115 -1.8009427 0.11971228 ... -1.5719721 -1.4639761 -2.8086293 ] [-0.04317956 0.06402925 -1.8175175 ... -0.26047906 -0.9829637 1.2197824 ] [-1.3202374 0.2760674 -0.6364295 ... 2.337816 0.5054693 0.09622804]] ... [[-1.0264977 0.17380345 2.5783494 ... 1.7697788 0.0828293 1.5060841 ] [ 0.54248786 0.95580095 -0.47084796 ... -0.42436293 -0.04515951 0.5874389 ] [ 0.37575114 1.2182041 -0.14484055 ... -2.0164034 -0.7389346 -1.2997538 ] ... [ 1.088367 -0.51011086 -1.5109335 ... 0.26504007 2.1083765 -0.28705284] [ 0.5706102 0.2836268 1.218574 ... -0.9239375 -0.8145223 1.2459136 ] [-0.39358267 -0.2800493 -0.36459535 ... -0.664481 -0.43315858 -0.5678784 ]] [[ 1.1739509 -1.4162674 1.4585016 ... 0.5259372 -1.1531152 0.6988643 ] [-1.1109809 2.3242357 1.071877 ... 1.5277585 0.08147152 0.24798773] [ 0.03246147 -0.664633 -1.1680347 ... -1.5186257 0.11513335 0.01011036] ... [ 0.3765598 -1.1879584 -1.2225688 ... 0.32810816 -1.5710315 -0.29266483] [-0.15581134 -0.26124647 0.49977338 ... -1.1495104 -0.40343755 -0.17755418] [ 0.246696 0.01188764 0.08140774 ... -0.3309656 0.3596304 -0.3545945 ]] [[-1.5331498 -0.46133813 -0.11834231 ... 1.0215421 -0.20500578 0.6694374 ] [ 1.1729447 1.3953934 -1.7302091 ... -1.3238192 0.13608955 0.7447578 ] [-0.72368777 -0.30240792 0.20893028 ... -1.7062747 0.74979866 2.009816 ] ... [-1.1681827 -0.6539432 0.51227653 ... 0.11182254 -0.4782361 1.7430726 ] [-0.9996962 -1.2731404 0.8676579 ... -0.78462267 0.81719047 2.1228168 ] [-0.32442746 -0.88489014 -2.7731907 ... -1.5900269 0.67832226 0.8771878 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6044.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 1.595558 0.14682485 -0.4796917 ... 0.2998209 -0.09561294 -0.6377812 ] [-0.48649493 0.20885314 -1.0013605 ... -0.42374855 1.0018365 -0.21950805] [-2.59608 0.6691193 1.6541091 ... -1.045047 0.8231285 1.1958468 ] ... [ 0.19796722 -0.86887425 -0.4363224 ... 1.1295427 -0.39106476 -0.6107419 ] [ 0.50511986 0.182627 -1.6933852 ... -1.9104635 0.6778956 0.50848246] [-0.47364756 -1.8219627 0.48541412 ... 0.55318284 -0.22990534 -0.32888898]] [[-0.0292786 0.84668714 -0.8885703 ... 1.0851653 -1.149758 -1.3457968 ] [ 0.23749581 -0.9728626 1.6096263 ... -0.25243023 1.0272468 -1.469409 ] [ 1.8355671 0.5676383 0.46475872 ... -0.8824111 0.4423043 1.8703626 ] ... [ 1.7520803 -0.904252 -0.45639366 ... 1.446352 -0.5442689 -1.0917935 ] [ 0.4789804 -1.948135 0.9057302 ... 2.512161 0.91845554 -0.04175884] [-0.78454673 -0.49668977 -2.1424236 ... -0.13455755 1.0452148 -1.4600235 ]]]]; ov_res: [[[[ 1.595558 0.14682485 -0.4796917 ... 0.2998209 -0.09561294 -0.6377812 ] [-0.48649493 0.20885314 -1.0013605 ... -0.42374855 1.0018365 -0.21950805] [-2.59608 0.6691193 1.6541091 ... -1.045047 0.8231285 1.1958468 ] ... [ 0.19796722 -0.86887425 -0.4363224 ... 1.1295427 -0.39106476 -0.6107419 ] [ 0.50511986 0.182627 -1.6933852 ... -1.9104635 0.6778956 0.50848246] [-0.47364756 -1.8219627 0.48541412 ... 0.55318284 -0.22990534 -0.32888898]] [[-0.0292786 0.84668714 -0.8885703 ... 1.0851653 -1.149758 -1.3457968 ] [ 0.23749581 -0.9728626 1.6096263 ... -0.25243023 1.0272468 -1.469409 ] [ 1.8355671 0.5676383 0.46475872 ... -0.8824111 0.4423043 1.8703626 ] ... [ 1.7520803 -0.904252 -0.45639366 ... 1.446352 -0.5442689 -1.0917935 ] [ 0.4789804 -1.948135 0.9057302 ... 2.512161 0.91845554 -0.04175884] [-0.78454673 -0.49668977 -2.1424236 ... -0.13455755 1.0452148 -1.4600235 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6046.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-8.1888402e-01 -2.5744331e-01 1.2487870e+00 ... -6.3114055e-02 3.9668214e-01 -8.2482690e-01] [ 1.6247822e+00 3.2421079e-01 -8.8649815e-01 ... -4.4797975e-01 1.8531162e-01 -3.4971520e-02] [-2.1051301e-01 -5.1646447e-01 1.7079847e+00 ... 4.3358806e-01 1.7164284e-01 5.9005892e-01] ... [ 4.4263774e-01 1.1997380e-02 -1.0158253e+00 ... -9.7531192e-02 5.3190923e-01 2.1668169e-01] [-9.7603053e-01 -2.4995778e+00 -2.5558913e-01 ... 7.4159992e-01 9.5845288e-01 -3.5340673e-01] [-5.2068120e-01 3.4055623e-01 7.2123647e-02 ... -3.2872397e-01 -9.8604071e-01 3.9613441e-01]] [[ 6.5309972e-01 -2.5864059e-01 1.7251983e+00 ... 5.5729681e-01 8.2488036e-01 -1.1769211e+00] [ 6.1471969e-01 1.4529021e+00 -7.1932697e-01 ... 1.2539843e-01 3.1842198e-02 -2.2349697e-01] [ 1.3294284e+00 -2.9891899e-01 -1.0257157e-01 ... 1.5071840e+00 -7.1583623e-01 8.1231868e-01] ... [ 2.6701411e-02 -8.0219936e-01 8.9241064e-01 ... 3.7592018e-01 1.3456492e+00 5.4687327e-01] [ 5.6117493e-01 -1.3588504e-01 -1.1754243e+00 ... 5.4176229e-01 5.9687114e-01 1.6033308e-01] [ 9.1148490e-01 3.7240881e-01 -5.6634319e-01 ... 3.2973266e+00 1.0963244e+00 1.5128320e+00]] [[ 1.4317142e+00 -1.0878686e+00 1.0837801e+00 ... -2.3887207e-01 -1.9815576e+00 1.0088484e+00] [-1.0048324e+00 -1.1175185e+00 -4.8926932e-01 ... -5.3609586e-01 1.6702543e-01 1.2236602e+00] [ 4.1447154e-01 6.1863053e-01 -4.3055367e-01 ... -1.1887952e+00 5.7225311e-01 -1.8520244e+00] ... [-1.5002452e+00 -1.3625511e+00 -6.9000435e-01 ... 2.1984074e-03 -7.6930052e-01 3.4272477e-01] [ 1.6814816e+00 9.2884678e-01 1.0557218e+00 ... 5.0917310e-01 9.5347524e-01 1.8037194e-01] [-8.5615344e-02 1.6465833e+00 1.2384549e+00 ... -3.1792626e-01 4.4727287e-01 1.1942029e+00]] ... [[ 8.2779145e-01 -1.8328857e+00 -2.4343803e+00 ... 6.3694233e-01 3.7297851e-01 -7.8233846e-02] [-7.7708960e-01 -6.5338224e-01 1.7615858e-01 ... -1.7807690e+00 2.9337734e-01 3.2997110e-01] [-3.0925262e+00 5.2892399e-01 1.2481776e+00 ... 9.6709937e-01 2.1781747e-01 5.0974530e-01] ... [-1.4489661e-01 1.2579464e+00 5.9635442e-01 ... 7.1383797e-02 1.6614697e+00 -4.3775246e-01] [-5.9503317e-01 1.6281189e-01 5.4750514e-01 ... -1.7689519e+00 -1.1378649e-01 4.1828468e-01] [ 8.3594859e-01 7.6344651e-01 5.9759504e-01 ... -5.7961512e-01 8.5486829e-01 -7.9351026e-01]] [[-2.1638508e+00 1.0173556e+00 -3.3399996e-03 ... -1.1978282e+00 4.7381532e-01 9.8066336e-01] [-1.3734300e+00 1.6871316e+00 -6.5332150e-01 ... -5.2278036e-01 -3.2003731e-01 9.7567625e-02] [ 6.5401465e-01 -8.8254517e-01 1.5066321e-01 ... 1.7400347e+00 1.5865763e+00 -7.5165987e-01] ... [-7.1802966e-02 1.2936040e-02 -1.2057527e+00 ... 1.1232982e+00 -3.8158423e-01 -1.1777259e+00] [ 1.7050112e+00 -4.6093464e-01 9.0573478e-01 ... -1.2681264e+00 3.2897692e-02 1.0266227e+00] [-2.1387091e+00 2.6416738e+00 1.0477964e+00 ... -1.2986286e+00 1.3637885e+00 -8.0371481e-01]] [[-1.7503327e+00 -1.2532954e+00 1.2524801e+00 ... 2.7955332e-01 -8.6321920e-01 -1.0619964e-01] [ 9.8661619e-01 -8.7612711e-02 2.9111452e-02 ... 1.9645986e-01 -1.0915358e+00 -1.2333069e+00] [ 1.8858452e+00 -1.9735943e-01 5.1103294e-01 ... -6.3109750e-01 8.4549916e-01 8.9817524e-01] ... [-8.1226194e-01 -5.4807180e-01 -2.7921194e-01 ... -2.7401900e-01 1.2370833e+00 -2.4334006e-01] [ 1.2741810e-01 1.5922271e+00 -5.9494607e-02 ... 3.0679741e+00 -2.5654528e-01 -3.5901833e-01] [-2.4142666e+00 -5.4884517e-01 -5.0199711e-01 ... -4.1954631e-01 4.1070366e-01 1.2821618e+00]]]]; ov_res: [[[[-8.1888402e-01 -2.5744331e-01 1.2487870e+00 ... -6.3114055e-02 3.9668214e-01 -8.2482690e-01] [ 1.6247822e+00 3.2421079e-01 -8.8649815e-01 ... -4.4797975e-01 1.8531162e-01 -3.4971520e-02] [-2.1051301e-01 -5.1646447e-01 1.7079847e+00 ... 4.3358806e-01 1.7164284e-01 5.9005892e-01] ... [ 4.4263774e-01 1.1997380e-02 -1.0158253e+00 ... -9.7531192e-02 5.3190923e-01 2.1668169e-01] [-9.7603053e-01 -2.4995778e+00 -2.5558913e-01 ... 7.4159992e-01 9.5845288e-01 -3.5340673e-01] [-5.2068120e-01 3.4055623e-01 7.2123647e-02 ... -3.2872397e-01 -9.8604071e-01 3.9613441e-01]] [[ 6.5309972e-01 -2.5864059e-01 1.7251983e+00 ... 5.5729681e-01 8.2488036e-01 -1.1769211e+00] [ 6.1471969e-01 1.4529021e+00 -7.1932697e-01 ... 1.2539843e-01 3.1842198e-02 -2.2349697e-01] [ 1.3294284e+00 -2.9891899e-01 -1.0257157e-01 ... 1.5071840e+00 -7.1583623e-01 8.1231868e-01] ... [ 2.6701411e-02 -8.0219936e-01 8.9241064e-01 ... 3.7592018e-01 1.3456492e+00 5.4687327e-01] [ 5.6117493e-01 -1.3588504e-01 -1.1754243e+00 ... 5.4176229e-01 5.9687114e-01 1.6033308e-01] [ 9.1148490e-01 3.7240881e-01 -5.6634319e-01 ... 3.2973266e+00 1.0963244e+00 1.5128320e+00]] [[ 1.4317142e+00 -1.0878686e+00 1.0837801e+00 ... -2.3887207e-01 -1.9815576e+00 1.0088484e+00] [-1.0048324e+00 -1.1175185e+00 -4.8926932e-01 ... -5.3609586e-01 1.6702543e-01 1.2236602e+00] [ 4.1447154e-01 6.1863053e-01 -4.3055367e-01 ... -1.1887952e+00 5.7225311e-01 -1.8520244e+00] ... [-1.5002452e+00 -1.3625511e+00 -6.9000435e-01 ... 2.1984074e-03 -7.6930052e-01 3.4272477e-01] [ 1.6814816e+00 9.2884678e-01 1.0557218e+00 ... 5.0917310e-01 9.5347524e-01 1.8037194e-01] [-8.5615344e-02 1.6465833e+00 1.2384549e+00 ... -3.1792626e-01 4.4727287e-01 1.1942029e+00]] ... [[ 8.2779145e-01 -1.8328857e+00 -2.4343803e+00 ... 6.3694233e-01 3.7297851e-01 -7.8233846e-02] [-7.7708960e-01 -6.5338224e-01 1.7615858e-01 ... -1.7807690e+00 2.9337734e-01 3.2997110e-01] [-3.0925262e+00 5.2892399e-01 1.2481776e+00 ... 9.6709937e-01 2.1781747e-01 5.0974530e-01] ... [-1.4489661e-01 1.2579464e+00 5.9635442e-01 ... 7.1383797e-02 1.6614697e+00 -4.3775246e-01] [-5.9503317e-01 1.6281189e-01 5.4750514e-01 ... -1.7689519e+00 -1.1378649e-01 4.1828468e-01] [ 8.3594859e-01 7.6344651e-01 5.9759504e-01 ... -5.7961512e-01 8.5486829e-01 -7.9351026e-01]] [[-2.1638508e+00 1.0173556e+00 -3.3399996e-03 ... -1.1978282e+00 4.7381532e-01 9.8066336e-01] [-1.3734300e+00 1.6871316e+00 -6.5332150e-01 ... -5.2278036e-01 -3.2003731e-01 9.7567625e-02] [ 6.5401465e-01 -8.8254517e-01 1.5066321e-01 ... 1.7400347e+00 1.5865763e+00 -7.5165987e-01] ... [-7.1802966e-02 1.2936040e-02 -1.2057527e+00 ... 1.1232982e+00 -3.8158423e-01 -1.1777259e+00] [ 1.7050112e+00 -4.6093464e-01 9.0573478e-01 ... -1.2681264e+00 3.2897692e-02 1.0266227e+00] [-2.1387091e+00 2.6416738e+00 1.0477964e+00 ... -1.2986286e+00 1.3637885e+00 -8.0371481e-01]] [[-1.7503327e+00 -1.2532954e+00 1.2524801e+00 ... 2.7955332e-01 -8.6321920e-01 -1.0619964e-01] [ 9.8661619e-01 -8.7612711e-02 2.9111452e-02 ... 1.9645986e-01 -1.0915358e+00 -1.2333069e+00] [ 1.8858452e+00 -1.9735943e-01 5.1103294e-01 ... -6.3109750e-01 8.4549916e-01 8.9817524e-01] ... [-8.1226194e-01 -5.4807180e-01 -2.7921194e-01 ... -2.7401900e-01 1.2370833e+00 -2.4334006e-01] [ 1.2741810e-01 1.5922271e+00 -5.9494607e-02 ... 3.0679741e+00 -2.5654528e-01 -3.5901833e-01] [-2.4142666e+00 -5.4884517e-01 -5.0199711e-01 ... -4.1954631e-01 4.1070366e-01 1.2821618e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-5 - params:([20, 200, 1, 1, 2], -1) ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6048.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-5]() %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.1797851 -0.71208334 0.38309813 ... 1.0796498 0.01289464 -1.4393636 ] [-1.1408867 -0.07419984 3.1681385 ... -2.0982983 -0.9242364 -0.7558174 ] [ 0.64453423 -1.5803587 0.6453623 ... -0.10163048 0.06926539 -0.09871079] ... [ 0.7274724 -1.1763054 -0.64687204 ... -1.3791387 -0.46044737 0.3113891 ] [-1.4290688 -0.67495686 -0.5364252 ... 0.94075644 0.08277114 -1.5970265 ] [-1.3261179 1.0739552 -0.5087857 ... 2.7543323 1.2572813 -0.37024072]] [[-2.0311651 0.93356663 -0.89762396 ... 0.02085654 0.56238604 -0.28801033] [ 2.2025547 -0.7295255 -0.85021317 ... 0.12402098 -0.7992951 0.9820086 ] [-1.2272742 0.3844336 -0.21142814 ... -0.28453583 -0.1978184 -1.403359 ] ... [ 0.5618921 -2.5403852 -1.8706111 ... 0.4965623 1.4527359 0.16482027] [-1.177857 1.5099254 -0.5725635 ... 1.0100394 -0.9647614 -0.2759303 ] [ 0.24054222 0.85708904 0.54541475 ... 0.6291824 0.41079888 -0.19491132]] [[ 0.62499917 0.8035355 0.17456171 ... -0.62938106 -1.3785462 -0.15946485] [-0.72390795 1.2501907 -0.08765991 ... 0.37242323 -0.93022656 0.17463598] [-2.4755013 0.09994836 -0.7083133 ... 1.1810839 0.47218466 -0.26946473] ... [ 0.28462487 -0.7657243 -1.1520059 ... 1.3268998 -0.5592785 0.9435704 ] [ 0.53114456 1.1775669 -0.0904942 ... 0.45842147 1.4590573 -1.439097 ] [ 0.12554808 -0.7387333 0.253582 ... -1.8159328 -0.37839118 0.5246275 ]] ... [[ 1.2099692 -0.42144322 -1.2751809 ... 0.8156047 1.0536035 -1.2850866 ] [ 1.7521788 -1.4000127 0.74014723 ... 1.0327284 -0.34823656 -0.11554887] [-0.8321271 -1.1459037 -0.92319655 ... 0.72287047 1.9884436 -0.06848134] ... [ 1.0375788 1.7501792 -0.43373075 ... 0.6375952 -1.6637038 0.7258179 ] [ 0.6939644 1.371254 -0.4930469 ... 1.1453751 0.9210248 -1.7935975 ] [ 1.2081002 -1.9549766 0.25795692 ... 1.4013187 -0.20012738 -1.1286132 ]] [[-0.6072501 1.1226907 -0.5473427 ... -0.12077361 0.7138277 0.64362746] [-0.00497782 -1.6283774 0.12868528 ... -1.290925 0.5244492 1.2459903 ] [-0.90081596 0.14751089 -0.41126797 ... 1.0091895 0.8621843 -0.20503704] ... [ 0.8322254 1.2214421 0.0631629 ... 0.03426681 -0.22085185 1.0695804 ] [-0.559262 0.42573914 -0.78607345 ... -0.01634603 0.00399513 1.8777785 ] [ 0.16923004 -0.5362 1.2709676 ... 1.6110556 -1.6638108 0.20959234]] [[-1.2397509 1.1337041 0.07768756 ... -1.5462208 0.29166943 -0.8888734 ] [ 2.6127334 2.2712002 -0.5797177 ... 0.61726934 -0.24829951 -0.45802984] [-1.4856176 0.42507774 0.8215791 ... 0.0616181 1.6878765 -0.05793576] ... [ 0.8657327 2.4192612 -0.5646562 ... 0.8906394 1.6655641 0.65585387] [ 0.4462276 -0.1350622 -1.9791926 ... -0.2170987 0.14947754 0.3381951 ] [ 0.7644416 1.7840601 -0.22479735 ... -0.04073932 0.99249387 -0.97313076]]]]; ov_res: [[[[ 0.1797851 -0.71208334 0.38309813 ... 1.0796498 0.01289464 -1.4393636 ] [-1.1408867 -0.07419984 3.1681385 ... -2.0982983 -0.9242364 -0.7558174 ] [ 0.64453423 -1.5803587 0.6453623 ... -0.10163048 0.06926539 -0.09871079] ... [ 0.7274724 -1.1763054 -0.64687204 ... -1.3791387 -0.46044737 0.3113891 ] [-1.4290688 -0.67495686 -0.5364252 ... 0.94075644 0.08277114 -1.5970265 ] [-1.3261179 1.0739552 -0.5087857 ... 2.7543323 1.2572813 -0.37024072]] [[-2.0311651 0.93356663 -0.89762396 ... 0.02085654 0.56238604 -0.28801033] [ 2.2025547 -0.7295255 -0.85021317 ... 0.12402098 -0.7992951 0.9820086 ] [-1.2272742 0.3844336 -0.21142814 ... -0.28453583 -0.1978184 -1.403359 ] ... [ 0.5618921 -2.5403852 -1.8706111 ... 0.4965623 1.4527359 0.16482027] [-1.177857 1.5099254 -0.5725635 ... 1.0100394 -0.9647614 -0.2759303 ] [ 0.24054222 0.85708904 0.54541475 ... 0.6291824 0.41079888 -0.19491132]] [[ 0.62499917 0.8035355 0.17456171 ... -0.62938106 -1.3785462 -0.15946485] [-0.72390795 1.2501907 -0.08765991 ... 0.37242323 -0.93022656 0.17463598] [-2.4755013 0.09994836 -0.7083133 ... 1.1810839 0.47218466 -0.26946473] ... [ 0.28462487 -0.7657243 -1.1520059 ... 1.3268998 -0.5592785 0.9435704 ] [ 0.53114456 1.1775669 -0.0904942 ... 0.45842147 1.4590573 -1.439097 ] [ 0.12554808 -0.7387333 0.253582 ... -1.8159328 -0.37839118 0.5246275 ]] ... [[ 1.2099692 -0.42144322 -1.2751809 ... 0.8156047 1.0536035 -1.2850866 ] [ 1.7521788 -1.4000127 0.74014723 ... 1.0327284 -0.34823656 -0.11554887] [-0.8321271 -1.1459037 -0.92319655 ... 0.72287047 1.9884436 -0.06848134] ... [ 1.0375788 1.7501792 -0.43373075 ... 0.6375952 -1.6637038 0.7258179 ] [ 0.6939644 1.371254 -0.4930469 ... 1.1453751 0.9210248 -1.7935975 ] [ 1.2081002 -1.9549766 0.25795692 ... 1.4013187 -0.20012738 -1.1286132 ]] [[-0.6072501 1.1226907 -0.5473427 ... -0.12077361 0.7138277 0.64362746] [-0.00497782 -1.6283774 0.12868528 ... -1.290925 0.5244492 1.2459903 ] [-0.90081596 0.14751089 -0.41126797 ... 1.0091895 0.8621843 -0.20503704] ... [ 0.8322254 1.2214421 0.0631629 ... 0.03426681 -0.22085185 1.0695804 ] [-0.559262 0.42573914 -0.78607345 ... -0.01634603 0.00399513 1.8777785 ] [ 0.16923004 -0.5362 1.2709676 ... 1.6110556 -1.6638108 0.20959234]] [[-1.2397509 1.1337041 0.07768756 ... -1.5462208 0.29166943 -0.8888734 ] [ 2.6127334 2.2712002 -0.5797177 ... 0.61726934 -0.24829951 -0.45802984] [-1.4856176 0.42507774 0.8215791 ... 0.0616181 1.6878765 -0.05793576] ... [ 0.8657327 2.4192612 -0.5646562 ... 0.8906394 1.6655641 0.65585387] [ 0.4462276 -0.1350622 -1.9791926 ... -0.2170987 0.14947754 0.3381951 ] [ 0.7644416 1.7840601 -0.22479735 ... -0.04073932 0.99249387 -0.97313076]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:(2, 1) ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6050.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.55485755 0.57056856 0.5086475 ... 0.3133423 -0.3217184 -1.6960478 ] [-0.6543138 -1.7687626 0.46253672 ... -0.18241237 -1.3722111 1.4922198 ] [ 1.4845675 0.19611996 0.26273447 ... 1.2266923 -0.7423107 0.8824529 ] ... [-1.0431759 0.6400372 -1.2763792 ... 0.6310238 1.2145307 0.7192757 ] [ 0.19782057 0.21003306 1.415152 ... 0.40496415 0.6907793 -0.58893234] [-0.46896806 -0.7686155 0.15811093 ... -0.33961824 0.09184801 -1.1311027 ]] [[ 0.34842288 1.6173636 0.56791705 ... -0.1020173 -1.1955608 -2.8992145 ] [-0.07392587 -0.34265634 0.2983245 ... 0.69846195 0.092501 -1.2414874 ] [-1.2158488 1.2803135 0.13404316 ... 0.01233944 1.7440189 1.9805578 ] ... [ 2.275273 -1.414316 0.17147358 ... 1.4630728 1.145209 -0.78870404] [-0.09467559 1.1900702 -1.0849848 ... -0.7918359 -0.2667674 0.06308777] [-1.7999964 0.98425984 -0.6042053 ... -1.7069265 -0.740069 0.44465315]]]]; ov_res: [[[[ 0.55485755 0.57056856 0.5086475 ... 0.3133423 -0.3217184 -1.6960478 ] [-0.6543138 -1.7687626 0.46253672 ... -0.18241237 -1.3722111 1.4922198 ] [ 1.4845675 0.19611996 0.26273447 ... 1.2266923 -0.7423107 0.8824529 ] ... [-1.0431759 0.6400372 -1.2763792 ... 0.6310238 1.2145307 0.7192757 ] [ 0.19782057 0.21003306 1.415152 ... 0.40496415 0.6907793 -0.58893234] [-0.46896806 -0.7686155 0.15811093 ... -0.33961824 0.09184801 -1.1311027 ]] [[ 0.34842288 1.6173636 0.56791705 ... -0.1020173 -1.1955608 -2.8992145 ] [-0.07392587 -0.34265634 0.2983245 ... 0.69846195 0.092501 -1.2414874 ] [-1.2158488 1.2803135 0.13404316 ... 0.01233944 1.7440189 1.9805578 ] ... [ 2.275273 -1.414316 0.17147358 ... 1.4630728 1.145209 -0.78870404] [-0.09467559 1.1900702 -1.0849848 ... -0.7918359 -0.2667674 0.06308777] [-1.7999964 0.98425984 -0.6042053 ... -1.7069265 -0.740069 0.44465315]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:(45, 2) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6052.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.7275358 1.3620688 -2.110544 ... -0.12246052 -0.22049749 0.33328924] [-0.00885972 -1.5268844 -0.24765256 ... -0.27698618 0.22820833 -0.2650621 ] [ 0.35490555 -0.6932587 0.56958187 ... 0.71346265 0.39562523 0.71367407] ... [ 0.9660635 1.1005262 -1.5190167 ... -0.5547466 -0.4549195 -0.57885396] [ 0.8030418 0.8427846 2.044623 ... -0.74646163 -1.3788793 1.6960679 ] [ 0.73625314 2.0382118 0.33103168 ... 1.3031411 -1.0274091 1.9207237 ]] [[ 1.5999024 -0.53271997 -0.14418311 ... -1.1973517 0.93543357 -0.17242584] [ 0.52119637 -1.2652063 -0.01970563 ... -0.26665044 1.5854337 -1.379916 ] [ 0.06072951 1.2140357 -0.261985 ... 0.99186486 1.3106709 -0.3883591 ] ... [-0.5671485 1.026118 0.26552245 ... 0.8079307 1.0021996 0.51506907] [-1.2204256 -1.4190809 -0.21336159 ... -1.7386805 0.5972091 0.70954734] [ 1.6934079 1.2614049 -0.32669088 ... 0.6942076 -0.3865547 0.50585294]] [[ 0.9679482 0.00803967 1.4798642 ... -1.1135889 0.4605737 0.1161636 ] [-0.26757914 0.37716857 0.90147555 ... -0.80051255 -0.9391835 -0.41340083] [-0.43700525 0.20495233 -1.6889869 ... -0.8527102 0.67614913 -0.38153648] ... [-0.8475284 -0.6684017 1.9890156 ... -1.6481391 -0.7762297 -0.88618463] [ 0.6788364 -1.1116725 -1.7307388 ... 0.1892804 1.9089893 -0.5454667 ] [ 1.4076337 0.8201238 -0.03915136 ... 0.5360597 -1.2907585 0.95890516]] ... [[-0.80988926 -0.25764108 2.1653416 ... 0.13356248 0.15503971 -0.45538336] [-0.76840895 0.84549296 -1.183207 ... -0.2637182 -0.50086343 -0.17813563] [-1.1701821 0.9586763 0.36469433 ... 1.9268794 0.25578398 0.49977192] ... [ 1.3937404 1.26797 1.1898012 ... -0.05399902 0.4716375 -1.1610029 ] [-0.5828508 -1.3240427 -0.3496891 ... -2.7560737 -0.0856219 0.16212073] [-0.6687692 -2.971747 -0.16892645 ... 0.8614904 -0.34420925 0.52617896]] [[-0.2642824 -0.42102408 0.45370394 ... -0.80993605 0.92052525 -0.3475628 ] [-0.7960199 -1.9750551 0.91825306 ... 1.0972208 -0.9497457 -0.9377832 ] [ 0.42726564 -0.65075845 0.811588 ... 0.14070837 0.9000245 0.41911998] ... [ 1.5398326 0.61001277 -0.6029633 ... -0.52016217 -0.5613336 1.1206753 ] [ 0.14575784 -0.83761394 0.20036429 ... 0.7859221 1.0413846 0.56463104] [-0.8848948 1.6258591 -1.1232321 ... 0.73544204 -1.5359257 -0.43325984]] [[ 0.12459461 -1.8812088 -0.6591163 ... -0.15510024 -0.49661654 0.53921115] [-1.5428382 -1.0518246 -1.2529722 ... 1.7508926 0.3217997 0.5579666 ] [-2.1727507 0.85363793 0.26970908 ... 0.5196686 0.20275016 -0.47269946] ... [-0.6351146 0.1749762 -0.7111326 ... -1.2893859 0.45339012 -0.23667362] [-1.3164287 1.6103737 -0.7412743 ... -1.0303 -0.57489884 -0.82051355] [ 0.36324602 -0.25974658 0.63193595 ... 1.3120315 -0.8690041 0.8180069 ]]]]; ov_res: [[[[-0.7275358 1.3620688 -2.110544 ... -0.12246052 -0.22049749 0.33328924] [-0.00885972 -1.5268844 -0.24765256 ... -0.27698618 0.22820833 -0.2650621 ] [ 0.35490555 -0.6932587 0.56958187 ... 0.71346265 0.39562523 0.71367407] ... [ 0.9660635 1.1005262 -1.5190167 ... -0.5547466 -0.4549195 -0.57885396] [ 0.8030418 0.8427846 2.044623 ... -0.74646163 -1.3788793 1.6960679 ] [ 0.73625314 2.0382118 0.33103168 ... 1.3031411 -1.0274091 1.9207237 ]] [[ 1.5999024 -0.53271997 -0.14418311 ... -1.1973517 0.93543357 -0.17242584] [ 0.52119637 -1.2652063 -0.01970563 ... -0.26665044 1.5854337 -1.379916 ] [ 0.06072951 1.2140357 -0.261985 ... 0.99186486 1.3106709 -0.3883591 ] ... [-0.5671485 1.026118 0.26552245 ... 0.8079307 1.0021996 0.51506907] [-1.2204256 -1.4190809 -0.21336159 ... -1.7386805 0.5972091 0.70954734] [ 1.6934079 1.2614049 -0.32669088 ... 0.6942076 -0.3865547 0.50585294]] [[ 0.9679482 0.00803967 1.4798642 ... -1.1135889 0.4605737 0.1161636 ] [-0.26757914 0.37716857 0.90147555 ... -0.80051255 -0.9391835 -0.41340083] [-0.43700525 0.20495233 -1.6889869 ... -0.8527102 0.67614913 -0.38153648] ... [-0.8475284 -0.6684017 1.9890156 ... -1.6481391 -0.7762297 -0.88618463] [ 0.6788364 -1.1116725 -1.7307388 ... 0.1892804 1.9089893 -0.5454667 ] [ 1.4076337 0.8201238 -0.03915136 ... 0.5360597 -1.2907585 0.95890516]] ... [[-0.80988926 -0.25764108 2.1653416 ... 0.13356248 0.15503971 -0.45538336] [-0.76840895 0.84549296 -1.183207 ... -0.2637182 -0.50086343 -0.17813563] [-1.1701821 0.9586763 0.36469433 ... 1.9268794 0.25578398 0.49977192] ... [ 1.3937404 1.26797 1.1898012 ... -0.05399902 0.4716375 -1.1610029 ] [-0.5828508 -1.3240427 -0.3496891 ... -2.7560737 -0.0856219 0.16212073] [-0.6687692 -2.971747 -0.16892645 ... 0.8614904 -0.34420925 0.52617896]] [[-0.2642824 -0.42102408 0.45370394 ... -0.80993605 0.92052525 -0.3475628 ] [-0.7960199 -1.9750551 0.91825306 ... 1.0972208 -0.9497457 -0.9377832 ] [ 0.42726564 -0.65075845 0.811588 ... 0.14070837 0.9000245 0.41911998] ... [ 1.5398326 0.61001277 -0.6029633 ... -0.52016217 -0.5613336 1.1206753 ] [ 0.14575784 -0.83761394 0.20036429 ... 0.7859221 1.0413846 0.56463104] [-0.8848948 1.6258591 -1.1232321 ... 0.73544204 -1.5359257 -0.43325984]] [[ 0.12459461 -1.8812088 -0.6591163 ... -0.15510024 -0.49661654 0.53921115] [-1.5428382 -1.0518246 -1.2529722 ... 1.7508926 0.3217997 0.5579666 ] [-2.1727507 0.85363793 0.26970908 ... 0.5196686 0.20275016 -0.47269946] ... [-0.6351146 0.1749762 -0.7111326 ... -1.2893859 0.45339012 -0.23667362] [-1.3164287 1.6103737 -0.7412743 ... -1.0303 -0.57489884 -0.82051355] [ 0.36324602 -0.25974658 0.63193595 ... 1.3120315 -0.8690041 0.8180069 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:(45, -1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6054.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.36462098 -1.9688386 -1.6648848 ... 0.5355646 -1.1484085 0.6925585 ] [-1.2635462 -2.408439 0.01064281 ... 1.0262125 -1.007106 -0.1456352 ] [ 0.6341156 -1.1253082 0.1606274 ... -1.9092789 -0.38454056 0.03562331] ... [-1.1532061 0.36488333 -0.2955292 ... 0.9409369 1.1289457 2.7531497 ] [ 1.3434588 0.16114521 0.8906446 ... 0.7930001 -2.0929642 1.7803098 ] [-0.85310525 1.032879 -0.9359328 ... 0.73732674 -1.0286573 0.9914493 ]] [[-1.4355052 -1.0305312 0.44663295 ... 0.7740798 -0.02198066 -0.13502727] [ 0.5887263 1.3094168 2.5029151 ... 0.50212795 0.54926956 1.4821981 ] [-0.7879466 2.0773342 -0.25340077 ... -0.8721225 0.60273856 -1.1590006 ] ... [ 0.54241157 -1.4445733 -0.27356604 ... -1.2554269 0.39018142 1.6704531 ] [-0.02911791 -0.55875856 -2.7883072 ... 0.7430757 -0.7744974 -0.03393649] [ 0.22416042 0.10460618 0.39614293 ... 1.3161392 0.4724422 -0.24018317]] [[-1.3385488 1.5465504 -0.27498302 ... -1.1397599 1.6733155 1.6552409 ] [-0.7986999 -0.8916683 -1.8107525 ... 0.518338 -0.50120276 0.32267353] [ 1.1348865 -0.339921 -0.631924 ... -0.47577706 -1.4750161 -0.5871975 ] ... [ 1.7581067 -0.6319348 0.6292031 ... 0.07532453 -0.20019288 0.6102295 ] [-1.0121104 0.07970409 0.5420499 ... 0.46493822 -0.66586465 -0.85945475] [-0.3277441 -0.46141958 -0.08155046 ... 0.5091253 -0.87106115 0.23549873]] ... [[ 0.9406129 0.5070285 1.4783175 ... -1.0032566 1.5863051 -0.9336036 ] [-0.8863497 -1.9519813 -0.01677847 ... 0.5133448 0.3741884 -0.2582035 ] [-0.1007549 1.2520431 0.56875557 ... 0.59118074 -0.2816038 -0.23093897] ... [-0.6050741 -0.12570588 1.5786443 ... 0.6132474 2.0937014 -0.8286221 ] [-0.57095784 1.4614593 -1.3712674 ... -0.6271522 -0.78300035 -0.58915824] [-0.39289466 -1.3459464 1.3241757 ... 0.46161136 -0.60868764 -0.06042203]] [[ 0.47645867 0.5381774 -0.08168472 ... -0.91657436 0.7111845 -1.5978727 ] [ 0.8697273 0.16734658 -0.11837313 ... 1.1509719 1.1438028 -0.02861033] [ 0.7951205 -0.57885665 -0.2213134 ... 1.9762535 -1.0812991 -0.6210849 ] ... [ 1.0922257 -1.8462842 -0.2101584 ... 0.13700227 -0.71386176 -0.96868014] [ 1.7681231 -1.6303172 -0.70077544 ... 0.56793946 -0.844159 -1.064619 ] [ 0.32593042 -0.7541498 -0.67865217 ... 0.11295749 1.0649819 0.9275942 ]] [[-0.7512022 0.5325513 -0.5408971 ... -0.40004206 1.1636833 1.4877431 ] [ 0.36447072 2.1898055 1.5431718 ... 0.15958034 1.8257779 -1.0249522 ] [-0.36102715 -0.2731591 -0.3376036 ... 0.08646577 -0.37010542 -0.39561263] ... [ 1.520519 1.1594357 1.7396257 ... 0.09683862 0.34020543 0.81678814] [-1.1863508 -0.43552148 -1.2488621 ... 0.85866624 0.09034639 0.91927683] [ 1.222596 -1.1783625 -0.684186 ... 0.03695448 -0.43348202 0.4190667 ]]]]; ov_res: [[[[ 0.36462098 -1.9688386 -1.6648848 ... 0.5355646 -1.1484085 0.6925585 ] [-1.2635462 -2.408439 0.01064281 ... 1.0262125 -1.007106 -0.1456352 ] [ 0.6341156 -1.1253082 0.1606274 ... -1.9092789 -0.38454056 0.03562331] ... [-1.1532061 0.36488333 -0.2955292 ... 0.9409369 1.1289457 2.7531497 ] [ 1.3434588 0.16114521 0.8906446 ... 0.7930001 -2.0929642 1.7803098 ] [-0.85310525 1.032879 -0.9359328 ... 0.73732674 -1.0286573 0.9914493 ]] [[-1.4355052 -1.0305312 0.44663295 ... 0.7740798 -0.02198066 -0.13502727] [ 0.5887263 1.3094168 2.5029151 ... 0.50212795 0.54926956 1.4821981 ] [-0.7879466 2.0773342 -0.25340077 ... -0.8721225 0.60273856 -1.1590006 ] ... [ 0.54241157 -1.4445733 -0.27356604 ... -1.2554269 0.39018142 1.6704531 ] [-0.02911791 -0.55875856 -2.7883072 ... 0.7430757 -0.7744974 -0.03393649] [ 0.22416042 0.10460618 0.39614293 ... 1.3161392 0.4724422 -0.24018317]] [[-1.3385488 1.5465504 -0.27498302 ... -1.1397599 1.6733155 1.6552409 ] [-0.7986999 -0.8916683 -1.8107525 ... 0.518338 -0.50120276 0.32267353] [ 1.1348865 -0.339921 -0.631924 ... -0.47577706 -1.4750161 -0.5871975 ] ... [ 1.7581067 -0.6319348 0.6292031 ... 0.07532453 -0.20019288 0.6102295 ] [-1.0121104 0.07970409 0.5420499 ... 0.46493822 -0.66586465 -0.85945475] [-0.3277441 -0.46141958 -0.08155046 ... 0.5091253 -0.87106115 0.23549873]] ... [[ 0.9406129 0.5070285 1.4783175 ... -1.0032566 1.5863051 -0.9336036 ] [-0.8863497 -1.9519813 -0.01677847 ... 0.5133448 0.3741884 -0.2582035 ] [-0.1007549 1.2520431 0.56875557 ... 0.59118074 -0.2816038 -0.23093897] ... [-0.6050741 -0.12570588 1.5786443 ... 0.6132474 2.0937014 -0.8286221 ] [-0.57095784 1.4614593 -1.3712674 ... -0.6271522 -0.78300035 -0.58915824] [-0.39289466 -1.3459464 1.3241757 ... 0.46161136 -0.60868764 -0.06042203]] [[ 0.47645867 0.5381774 -0.08168472 ... -0.91657436 0.7111845 -1.5978727 ] [ 0.8697273 0.16734658 -0.11837313 ... 1.1509719 1.1438028 -0.02861033] [ 0.7951205 -0.57885665 -0.2213134 ... 1.9762535 -1.0812991 -0.6210849 ] ... [ 1.0922257 -1.8462842 -0.2101584 ... 0.13700227 -0.71386176 -0.96868014] [ 1.7681231 -1.6303172 -0.70077544 ... 0.56793946 -0.844159 -1.064619 ] [ 0.32593042 -0.7541498 -0.67865217 ... 0.11295749 1.0649819 0.9275942 ]] [[-0.7512022 0.5325513 -0.5408971 ... -0.40004206 1.1636833 1.4877431 ] [ 0.36447072 2.1898055 1.5431718 ... 0.15958034 1.8257779 -1.0249522 ] [-0.36102715 -0.2731591 -0.3376036 ... 0.08646577 -0.37010542 -0.39561263] ... [ 1.520519 1.1594357 1.7396257 ... 0.09683862 0.34020543 0.81678814] [-1.1863508 -0.43552148 -1.2488621 ... 0.85866624 0.09034639 0.91927683] [ 1.222596 -1.1783625 -0.684186 ... 0.03695448 -0.43348202 0.4190667 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6056.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.38285005 0.10334897 0.05986319 ... -0.7070958 -0.26176375 0.46089494] [ 0.10859628 0.09750792 0.1192394 ... 0.78246236 0.1448713 0.9149225 ] [ 2.3547559 -1.2094467 0.08295804 ... -0.95288 -1.6067927 0.16173247] ... [-0.39441547 0.0371606 0.32576323 ... -0.58855474 -0.4846608 0.75041175] [ 1.239984 0.7017845 -0.8553704 ... -1.6399086 -1.7862995 -1.0790907 ] [ 0.8634836 2.2031245 -1.4043843 ... 1.3629019 -1.9752535 1.006305 ]] [[ 0.4352154 -0.3994249 -0.07401773 ... -0.00700765 -0.18057007 0.20296928] [-0.73006004 1.6920174 1.4473426 ... -0.7733882 -0.91929126 0.20010523] [-1.3533839 0.00342565 0.14325213 ... -0.5084215 -2.402038 -0.99825823] ... [ 0.7603419 0.06480885 0.02958144 ... 0.23678449 0.05560345 0.522283 ] [-0.2914586 0.49712208 0.12774953 ... 0.3694041 0.6452077 -0.9577516 ] [-0.37213612 1.1444342 -1.0256855 ... -0.25638556 -0.9773449 -0.44302523]]]]; ov_res: [[[[ 0.38285005 0.10334897 0.05986319 ... -0.7070958 -0.26176375 0.46089494] [ 0.10859628 0.09750792 0.1192394 ... 0.78246236 0.1448713 0.9149225 ] [ 2.3547559 -1.2094467 0.08295804 ... -0.95288 -1.6067927 0.16173247] ... [-0.39441547 0.0371606 0.32576323 ... -0.58855474 -0.4846608 0.75041175] [ 1.239984 0.7017845 -0.8553704 ... -1.6399086 -1.7862995 -1.0790907 ] [ 0.8634836 2.2031245 -1.4043843 ... 1.3629019 -1.9752535 1.006305 ]] [[ 0.4352154 -0.3994249 -0.07401773 ... -0.00700765 -0.18057007 0.20296928] [-0.73006004 1.6920174 1.4473426 ... -0.7733882 -0.91929126 0.20010523] [-1.3533839 0.00342565 0.14325213 ... -0.5084215 -2.402038 -0.99825823] ... [ 0.7603419 0.06480885 0.02958144 ... 0.23678449 0.05560345 0.522283 ] [-0.2914586 0.49712208 0.12774953 ... 0.3694041 0.6452077 -0.9577516 ] [-0.37213612 1.1444342 -1.0256855 ... -0.25638556 -0.9773449 -0.44302523]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6058.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.4537696 -1.0304615 0.18324882 ... 0.8174016 1.2721713 0.33799362]] [[-0.7108407 -1.960773 0.12292378 ... 0.21773401 1.7993388 -1.6743181 ]] [[-0.39030308 -0.24265274 -0.6415632 ... -0.6677542 -1.820512 -3.185855 ]] ... [[ 0.05319891 -0.3653864 -0.19682865 ... 1.1472886 -0.3541356 -0.19314072]] [[ 2.657332 -0.5167715 -0.66382307 ... 0.6668269 0.8700649 -2.0256062 ]] [[-0.91192037 0.9821347 -0.57684857 ... 0.25890586 -0.3902086 1.1800219 ]]]]; ov_res: [[[[ 0.4537696 -1.0304615 0.18324882 ... 0.8174016 1.2721713 0.33799362]] [[-0.7108407 -1.960773 0.12292378 ... 0.21773401 1.7993388 -1.6743181 ]] [[-0.39030308 -0.24265274 -0.6415632 ... -0.6677542 -1.820512 -3.185855 ]] ... [[ 0.05319891 -0.3653864 -0.19682865 ... 1.1472886 -0.3541356 -0.19314072]] [[ 2.657332 -0.5167715 -0.66382307 ... 0.6668269 0.8700649 -2.0256062 ]] [[-0.91192037 0.9821347 -0.57684857 ... 0.25890586 -0.3902086 1.1800219 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-2 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6060.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-2]() %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.5266977 ] [ 0.38663808] [-1.1317662 ] ... [ 0.30298224] [ 1.2369934 ] [ 0.3980059 ]] [[ 0.2036933 ] [ 0.7340824 ] [ 0.4713424 ] ... [-0.24615686] [-0.9653122 ] [ 0.8130827 ]] [[-0.36849016] [-2.2630277 ] [-1.6285641 ] ... [-1.8179204 ] [ 0.8315302 ] [ 0.69913304]] ... [[ 0.34188044] [-1.5941755 ] [ 0.2529148 ] ... [ 0.9328961 ] [ 0.26941487] [ 0.3816503 ]] [[ 0.50974196] [ 2.7796774 ] [-0.3194222 ] ... [ 0.05978325] [-2.4645624 ] [ 0.05849857]] [[-0.6944267 ] [-1.6857026 ] [ 1.3351139 ] ... [-0.82186204] [ 2.2947257 ] [ 0.34381166]]]]; ov_res: [[[[-0.5266977 ] [ 0.38663808] [-1.1317662 ] ... [ 0.30298224] [ 1.2369934 ] [ 0.3980059 ]] [[ 0.2036933 ] [ 0.7340824 ] [ 0.4713424 ] ... [-0.24615686] [-0.9653122 ] [ 0.8130827 ]] [[-0.36849016] [-2.2630277 ] [-1.6285641 ] ... [-1.8179204 ] [ 0.8315302 ] [ 0.69913304]] ... [[ 0.34188044] [-1.5941755 ] [ 0.2529148 ] ... [ 0.9328961 ] [ 0.26941487] [ 0.3816503 ]] [[ 0.50974196] [ 2.7796774 ] [-0.3194222 ] ... [ 0.05978325] [-2.4645624 ] [ 0.05849857]] [[-0.6944267 ] [-1.6857026 ] [ 1.3351139 ] ... [-0.82186204] [ 2.2947257 ] [ 0.34381166]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:(2, 1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6062.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-1]() %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 2.0649045 -0.8636152 0.8294227 ... 0.9351548 -0.04253243 -0.06306681] [-0.5151387 0.3823777 1.6833043 ... 0.6180719 1.222832 1.5397944 ] [-0.5559609 -1.3517694 0.10801403 ... 0.93127793 -0.17849815 -0.55278397] ... [ 1.1923915 -2.1220405 -0.23246478 ... -0.2354299 -1.3132567 -0.7478354 ] [ 1.0561703 0.84403574 -0.1430799 ... -1.0894217 -1.8465862 -0.40481943] [ 0.46427658 1.0309548 0.5836395 ... 1.81185 0.96143085 -0.6369396 ]] [[-0.60581714 -0.4733564 0.31958693 ... -0.4998298 0.7871592 0.15456888] [ 0.11611521 1.3016355 -0.46609208 ... -0.76033205 0.9753664 2.1593733 ] [ 2.0296073 -0.23583609 1.118278 ... 1.8427219 -1.4020747 0.5109228 ] ... [ 0.25460222 -2.2317352 0.03835474 ... 2.0703275 1.1346303 -1.0853217 ] [-0.26294076 -1.4363953 -0.14062439 ... 1.26762 -0.76327527 -1.7810167 ] [ 0.37238246 0.64398366 -0.50320923 ... 0.03601712 -0.6457557 0.6041917 ]]]]; ov_res: [[[[ 2.0649045 -0.8636152 0.8294227 ... 0.9351548 -0.04253243 -0.06306681] [-0.5151387 0.3823777 1.6833043 ... 0.6180719 1.222832 1.5397944 ] [-0.5559609 -1.3517694 0.10801403 ... 0.93127793 -0.17849815 -0.55278397] ... [ 1.1923915 -2.1220405 -0.23246478 ... -0.2354299 -1.3132567 -0.7478354 ] [ 1.0561703 0.84403574 -0.1430799 ... -1.0894217 -1.8465862 -0.40481943] [ 0.46427658 1.0309548 0.5836395 ... 1.81185 0.96143085 -0.6369396 ]] [[-0.60581714 -0.4733564 0.31958693 ... -0.4998298 0.7871592 0.15456888] [ 0.11611521 1.3016355 -0.46609208 ... -0.76033205 0.9753664 2.1593733 ] [ 2.0296073 -0.23583609 1.118278 ... 1.8427219 -1.4020747 0.5109228 ] ... [ 0.25460222 -2.2317352 0.03835474 ... 2.0703275 1.1346303 -1.0853217 ] [-0.26294076 -1.4363953 -0.14062439 ... 1.26762 -0.76327527 -1.7810167 ] [ 0.37238246 0.64398366 -0.50320923 ... 0.03601712 -0.6457557 0.6041917 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:(45, 2) ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6064.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-1]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.7804518 -2.7200735 0.2730973 ... -0.87455904 0.7839184 1.6457261 ] [-0.08660743 -0.14363657 -0.49955142 ... -0.03441358 0.8872453 1.0060117 ] [-0.284955 0.4996878 0.45634001 ... -0.77993095 -0.51763254 1.5566709 ] ... [ 1.2578298 -0.12669685 -1.441309 ... -0.18823986 2.0356803 0.3565011 ] [ 0.60213053 -0.60779595 -0.6765431 ... -0.61302704 -0.47580138 -0.04857866] [-1.4080523 -1.4464259 1.4054209 ... 0.6771457 1.1384437 1.4917233 ]] [[ 0.84640026 -0.25115347 -1.0538435 ... 0.19803523 1.9332404 -0.277526 ] [-0.43627536 -0.30582508 0.02944923 ... 0.69134474 0.6117108 2.0801868 ] [-0.7188667 0.29241014 0.13962358 ... -0.95407236 -1.9033626 0.85684127] ... [ 0.48751956 2.0418527 0.9250673 ... 1.1144488 0.13821952 -1.4330741 ] [ 0.98308414 -0.09534628 -0.6746886 ... 0.8061072 -0.44262812 0.61411834] [ 0.5662053 -0.45296276 -0.49557307 ... -1.2341636 1.4808615 -0.13989337]] [[ 0.20622933 0.84476787 -0.16525193 ... -0.30742487 -1.3591644 -0.8000919 ] [ 1.3018094 -0.03147848 0.772706 ... 0.22174673 -0.3638488 1.8378776 ] [ 2.0271778 0.9918387 0.05576517 ... -0.4814358 1.8055766 0.0345293 ] ... [-1.2333599 0.6128598 -2.7600698 ... -1.4088776 0.34481338 -0.15321752] [-0.04893935 0.42488965 0.06964472 ... -1.0616566 -0.7481558 -0.27699405] [-0.4830151 0.9166592 -0.9129298 ... 1.3082433 0.786979 0.7495775 ]] ... [[ 0.1347462 -0.9195938 -2.0320024 ... 0.3855586 -0.88190305 -1.0517063 ] [-0.4730146 1.1724303 -1.1806369 ... 1.305839 -0.7885406 1.2288727 ] [-0.25837114 -0.398255 -1.090426 ... -0.04806938 1.2722554 -1.5019346 ] ... [-0.9456091 1.2089323 -1.3458875 ... 1.4207153 -0.9105039 0.26524156] [ 0.64833474 0.3235287 -0.4917511 ... -0.5522962 -0.87209964 1.5704031 ] [-0.06856836 0.45492604 0.22686304 ... 0.70608574 -0.45977664 -0.79820585]] [[-0.09100451 1.3781477 1.4893135 ... -0.62909126 0.9957295 -0.40008697] [-0.3659831 -0.354232 0.77639186 ... 1.5400014 -0.50679237 -1.0153282 ] [-1.6395314 0.36533847 -0.16291356 ... -1.6085436 -0.7181368 -0.16167767] ... [ 0.6461393 0.8703377 1.1081487 ... 1.1144376 -0.89224666 0.32645342] [-1.0104599 -1.2271618 -0.5831926 ... 1.7713165 -0.2662748 -1.1216702 ] [-0.99511236 0.41780782 3.480616 ... -0.8285398 1.3690785 0.55423045]] [[ 1.4147134 1.3774732 -0.9415033 ... 0.568683 -0.62940973 -0.5912705 ] [-0.40662757 -0.939083 0.88324803 ... 0.00790419 0.29053885 0.12324812] [ 0.88714457 1.1727097 0.8741599 ... 0.6373619 0.37598607 -1.77368 ] ... [-1.0146506 -0.62642616 -1.0674262 ... 0.50965464 -2.4149551 0.565286 ] [ 0.89686984 -1.1806586 -1.0912106 ... -1.2505065 0.04495801 -0.02382928] [-0.38211066 -0.07418773 0.13013257 ... 1.1756728 -0.5727379 0.9586845 ]]]]; ov_res: [[[[ 0.7804518 -2.7200735 0.2730973 ... -0.87455904 0.7839184 1.6457261 ] [-0.08660743 -0.14363657 -0.49955142 ... -0.03441358 0.8872453 1.0060117 ] [-0.284955 0.4996878 0.45634001 ... -0.77993095 -0.51763254 1.5566709 ] ... [ 1.2578298 -0.12669685 -1.441309 ... -0.18823986 2.0356803 0.3565011 ] [ 0.60213053 -0.60779595 -0.6765431 ... -0.61302704 -0.47580138 -0.04857866] [-1.4080523 -1.4464259 1.4054209 ... 0.6771457 1.1384437 1.4917233 ]] [[ 0.84640026 -0.25115347 -1.0538435 ... 0.19803523 1.9332404 -0.277526 ] [-0.43627536 -0.30582508 0.02944923 ... 0.69134474 0.6117108 2.0801868 ] [-0.7188667 0.29241014 0.13962358 ... -0.95407236 -1.9033626 0.85684127] ... [ 0.48751956 2.0418527 0.9250673 ... 1.1144488 0.13821952 -1.4330741 ] [ 0.98308414 -0.09534628 -0.6746886 ... 0.8061072 -0.44262812 0.61411834] [ 0.5662053 -0.45296276 -0.49557307 ... -1.2341636 1.4808615 -0.13989337]] [[ 0.20622933 0.84476787 -0.16525193 ... -0.30742487 -1.3591644 -0.8000919 ] [ 1.3018094 -0.03147848 0.772706 ... 0.22174673 -0.3638488 1.8378776 ] [ 2.0271778 0.9918387 0.05576517 ... -0.4814358 1.8055766 0.0345293 ] ... [-1.2333599 0.6128598 -2.7600698 ... -1.4088776 0.34481338 -0.15321752] [-0.04893935 0.42488965 0.06964472 ... -1.0616566 -0.7481558 -0.27699405] [-0.4830151 0.9166592 -0.9129298 ... 1.3082433 0.786979 0.7495775 ]] ... [[ 0.1347462 -0.9195938 -2.0320024 ... 0.3855586 -0.88190305 -1.0517063 ] [-0.4730146 1.1724303 -1.1806369 ... 1.305839 -0.7885406 1.2288727 ] [-0.25837114 -0.398255 -1.090426 ... -0.04806938 1.2722554 -1.5019346 ] ... [-0.9456091 1.2089323 -1.3458875 ... 1.4207153 -0.9105039 0.26524156] [ 0.64833474 0.3235287 -0.4917511 ... -0.5522962 -0.87209964 1.5704031 ] [-0.06856836 0.45492604 0.22686304 ... 0.70608574 -0.45977664 -0.79820585]] [[-0.09100451 1.3781477 1.4893135 ... -0.62909126 0.9957295 -0.40008697] [-0.3659831 -0.354232 0.77639186 ... 1.5400014 -0.50679237 -1.0153282 ] [-1.6395314 0.36533847 -0.16291356 ... -1.6085436 -0.7181368 -0.16167767] ... [ 0.6461393 0.8703377 1.1081487 ... 1.1144376 -0.89224666 0.32645342] [-1.0104599 -1.2271618 -0.5831926 ... 1.7713165 -0.2662748 -1.1216702 ] [-0.99511236 0.41780782 3.480616 ... -0.8285398 1.3690785 0.55423045]] [[ 1.4147134 1.3774732 -0.9415033 ... 0.568683 -0.62940973 -0.5912705 ] [-0.40662757 -0.939083 0.88324803 ... 0.00790419 0.29053885 0.12324812] [ 0.88714457 1.1727097 0.8741599 ... 0.6373619 0.37598607 -1.77368 ] ... [-1.0146506 -0.62642616 -1.0674262 ... 0.50965464 -2.4149551 0.565286 ] [ 0.89686984 -1.1806586 -1.0912106 ... -1.2505065 0.04495801 -0.02382928] [-0.38211066 -0.07418773 0.13013257 ... 1.1756728 -0.5727379 0.9586845 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:(45, -1) ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6066.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %5 : Tensor = aten::__getitem__(%4, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%5) fw_re: [[[[ 2.4671779 -0.31380573 -1.2058524 ... 0.85880846 1.3117398 1.7936771 ] [-0.9879277 0.6911245 -1.3275167 ... -0.38169938 -0.7733776 -0.37240243] [ 0.9718477 0.13870762 0.25105816 ... -0.61954254 0.9440088 1.1975015 ] ... [-0.31395376 0.18268746 0.11960965 ... 0.6496506 0.54286474 0.627335 ] [ 0.3078789 1.1736262 1.2596216 ... -0.25740093 -0.96821874 -0.93120813] [ 1.4605016 -0.65088165 2.1424615 ... -0.36614877 -2.0266883 0.03906069]] [[ 1.1238338 -1.0225298 -1.5202514 ... 0.795213 0.24419746 -0.16147459] [-0.22300948 0.34058768 -1.7276438 ... -0.10660526 -0.38529718 0.6401401 ] [-0.56484663 -0.6984503 -2.0085135 ... 0.40945923 -0.90100205 2.4170356 ] ... [ 0.8253981 0.22151127 0.32382703 ... 0.37378085 -0.78685606 0.4435513 ] [-1.4390564 0.47354662 0.7901276 ... -0.52704185 0.5220969 -0.3082417 ] [ 1.2765652 0.64546853 -0.8713707 ... 1.0028418 -1.7033708 -0.1815606 ]] [[ 0.78390384 -2.0930228 -0.01450756 ... -0.35329354 0.1516608 0.08302183] [ 1.055682 -1.9199935 -1.6493464 ... -0.40246344 0.07431997 1.1811507 ] [-2.1086512 1.5725513 -1.3941475 ... 0.4534923 0.27335104 0.73327875] ... [-0.13109289 0.20312673 0.36439675 ... 1.4594393 1.1465073 -0.04753165] [-1.6205362 -0.5269577 -1.617235 ... -0.12615152 -0.05697057 -0.13401055] [ 0.19237171 -2.2359462 0.225112 ... -0.9021519 -0.21791975 0.44774687]] ... [[-0.8392345 0.4697776 -1.9149364 ... 0.85493356 0.36869386 -0.22113211] [-2.079127 0.01107824 -0.65201616 ... 1.7051337 1.8970523 -0.6309965 ] [ 0.45467275 0.2350548 0.4194602 ... 0.28929892 -1.5339818 -0.87254405] ... [-1.5851679 -2.0772765 1.0382539 ... -2.1636963 1.0963423 -0.9444993 ] [-0.8261886 0.3969104 2.1238909 ... 0.0175021 0.50048417 -0.13661121] [ 0.5775003 -0.18778536 0.8481822 ... -0.21790756 -1.3491919 -1.4979347 ]] [[ 1.422503 1.2243053 0.37794983 ... 1.0040575 0.01101909 -0.75064117] [ 0.6617682 1.177363 1.5707222 ... 0.7463081 0.44229823 -0.3624732 ] [-1.5074724 1.1487329 0.4564224 ... -1.725851 -0.0944008 -1.8395323 ] ... [-0.28366336 -0.13182306 -0.06392279 ... -0.51232195 -0.26622224 -2.6291037 ] [ 1.0736568 -0.62596935 0.18665953 ... -0.8885738 -0.54828286 -0.2839849 ] [ 1.1130828 -0.94337726 0.6326632 ... -0.73166674 0.22833715 1.1096494 ]] [[-0.0798627 0.02426942 -0.5511925 ... 0.7061503 1.0021571 1.5846736 ] [ 1.6556715 -0.01627463 1.4047136 ... -1.5169322 -0.47832465 1.4016105 ] [-0.41435227 -1.9679964 0.55513453 ... -0.13254057 -0.45313203 -0.7022678 ] ... [-0.9929682 0.746191 -0.8308163 ... -1.6851116 0.43445715 0.8895936 ] [ 0.45832977 -0.05943603 0.61683834 ... 0.8072432 0.7595749 -2.0717056 ] [ 0.40430707 -0.36424556 0.3523891 ... 2.0499923 0.23512618 2.2473783 ]]]]; ov_res: [[[[ 2.4671779 -0.31380573 -1.2058524 ... 0.85880846 1.3117398 1.7936771 ] [-0.9879277 0.6911245 -1.3275167 ... -0.38169938 -0.7733776 -0.37240243] [ 0.9718477 0.13870762 0.25105816 ... -0.61954254 0.9440088 1.1975015 ] ... [-0.31395376 0.18268746 0.11960965 ... 0.6496506 0.54286474 0.627335 ] [ 0.3078789 1.1736262 1.2596216 ... -0.25740093 -0.96821874 -0.93120813] [ 1.4605016 -0.65088165 2.1424615 ... -0.36614877 -2.0266883 0.03906069]] [[ 1.1238338 -1.0225298 -1.5202514 ... 0.795213 0.24419746 -0.16147459] [-0.22300948 0.34058768 -1.7276438 ... -0.10660526 -0.38529718 0.6401401 ] [-0.56484663 -0.6984503 -2.0085135 ... 0.40945923 -0.90100205 2.4170356 ] ... [ 0.8253981 0.22151127 0.32382703 ... 0.37378085 -0.78685606 0.4435513 ] [-1.4390564 0.47354662 0.7901276 ... -0.52704185 0.5220969 -0.3082417 ] [ 1.2765652 0.64546853 -0.8713707 ... 1.0028418 -1.7033708 -0.1815606 ]] [[ 0.78390384 -2.0930228 -0.01450756 ... -0.35329354 0.1516608 0.08302183] [ 1.055682 -1.9199935 -1.6493464 ... -0.40246344 0.07431997 1.1811507 ] [-2.1086512 1.5725513 -1.3941475 ... 0.4534923 0.27335104 0.73327875] ... [-0.13109289 0.20312673 0.36439675 ... 1.4594393 1.1465073 -0.04753165] [-1.6205362 -0.5269577 -1.617235 ... -0.12615152 -0.05697057 -0.13401055] [ 0.19237171 -2.2359462 0.225112 ... -0.9021519 -0.21791975 0.44774687]] ... [[-0.8392345 0.4697776 -1.9149364 ... 0.85493356 0.36869386 -0.22113211] [-2.079127 0.01107824 -0.65201616 ... 1.7051337 1.8970523 -0.6309965 ] [ 0.45467275 0.2350548 0.4194602 ... 0.28929892 -1.5339818 -0.87254405] ... [-1.5851679 -2.0772765 1.0382539 ... -2.1636963 1.0963423 -0.9444993 ] [-0.8261886 0.3969104 2.1238909 ... 0.0175021 0.50048417 -0.13661121] [ 0.5775003 -0.18778536 0.8481822 ... -0.21790756 -1.3491919 -1.4979347 ]] [[ 1.422503 1.2243053 0.37794983 ... 1.0040575 0.01101909 -0.75064117] [ 0.6617682 1.177363 1.5707222 ... 0.7463081 0.44229823 -0.3624732 ] [-1.5074724 1.1487329 0.4564224 ... -1.725851 -0.0944008 -1.8395323 ] ... [-0.28366336 -0.13182306 -0.06392279 ... -0.51232195 -0.26622224 -2.6291037 ] [ 1.0736568 -0.62596935 0.18665953 ... -0.8885738 -0.54828286 -0.2839849 ] [ 1.1130828 -0.94337726 0.6326632 ... -0.73166674 0.22833715 1.1096494 ]] [[-0.0798627 0.02426942 -0.5511925 ... 0.7061503 1.0021571 1.5846736 ] [ 1.6556715 -0.01627463 1.4047136 ... -1.5169322 -0.47832465 1.4016105 ] [-0.41435227 -1.9679964 0.55513453 ... -0.13254057 -0.45313203 -0.7022678 ] ... [-0.9929682 0.746191 -0.8308163 ... -1.6851116 0.43445715 0.8895936 ] [ 0.45832977 -0.05943603 0.61683834 ... 0.8072432 0.7595749 -2.0717056 ] [ 0.40430707 -0.36424556 0.3523891 ... 2.0499923 0.23512618 2.2473783 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6068.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-1]() %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.42790505 1.7482255 -0.44882226 ... 0.72851604 -0.75967234 0.9602149 ] [-2.1917872 0.6973029 0.14086942 ... -0.11934798 0.69209087 2.5665143 ] [-0.4130968 -0.23442318 2.271897 ... 0.2901999 -0.38182682 0.14627343] ... [-0.6103395 -0.58090687 2.3115108 ... -0.46199578 1.60213 1.4838946 ] [-0.88109124 1.9947909 0.6545146 ... -1.8650655 0.18884292 -1.8787491 ] [-0.1098055 0.8418928 0.8531892 ... 0.22860393 0.7928415 -0.31770515]] [[-1.716101 -0.51293707 -0.01063177 ... -0.97751147 0.9736997 0.59532094] [ 1.0222367 -0.68997127 -0.1646396 ... -0.5615733 0.57410717 -0.70543647] [ 2.0471487 0.87492275 0.5270804 ... -0.7750164 -1.824929 0.12687188] ... [ 0.8559523 0.8348471 -0.79874784 ... 0.9836459 0.5916714 1.0857711 ] [ 0.05645883 1.3057992 0.6409291 ... 0.25574613 -0.0502516 0.18839099] [-0.16499992 0.0316806 1.1017793 ... 0.33454892 0.8639269 -0.7876814 ]]]]; ov_res: [[[[-0.42790505 1.7482255 -0.44882226 ... 0.72851604 -0.75967234 0.9602149 ] [-2.1917872 0.6973029 0.14086942 ... -0.11934798 0.69209087 2.5665143 ] [-0.4130968 -0.23442318 2.271897 ... 0.2901999 -0.38182682 0.14627343] ... [-0.6103395 -0.58090687 2.3115108 ... -0.46199578 1.60213 1.4838946 ] [-0.88109124 1.9947909 0.6545146 ... -1.8650655 0.18884292 -1.8787491 ] [-0.1098055 0.8418928 0.8531892 ... 0.22860393 0.7928415 -0.31770515]] [[-1.716101 -0.51293707 -0.01063177 ... -0.97751147 0.9736997 0.59532094] [ 1.0222367 -0.68997127 -0.1646396 ... -0.5615733 0.57410717 -0.70543647] [ 2.0471487 0.87492275 0.5270804 ... -0.7750164 -1.824929 0.12687188] ... [ 0.8559523 0.8348471 -0.79874784 ... 0.9836459 0.5916714 1.0857711 ] [ 0.05645883 1.3057992 0.6409291 ... 0.25574613 -0.0502516 0.18839099] [-0.16499992 0.0316806 1.1017793 ... 0.33454892 0.8639269 -0.7876814 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6070.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=-1]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.9437761 0.3959031 -1.2209744 ... 0.17303321 -0.54790914 -2.2945237 ] [ 2.426164 -0.5540071 -1.4677924 ... 0.2775896 -0.8332989 -0.9511703 ]] [[-1.7460384 -0.1034274 -0.9147868 ... -0.8378577 0.81437457 1.2801603 ] [-1.0825248 0.4858567 1.2558519 ... -0.07630858 1.0576533 -1.5471295 ]] [[ 0.17399949 0.8657521 -0.08334431 ... -0.06487431 0.82491404 -1.1810504 ] [ 0.7076122 2.1742873 -0.7512701 ... 0.23267986 0.06264018 -1.2297208 ]] ... [[ 1.241958 -0.82233006 -0.4320908 ... -2.8087323 0.1958369 -0.9216472 ] [ 0.09094257 -0.40249088 0.56526554 ... -0.5227561 1.779923 -0.2764657 ]] [[ 0.7657618 0.63486904 1.3157359 ... 0.36333862 1.3587886 -1.9413508 ] [ 0.6486588 -1.7731948 -0.19065645 ... -0.25252795 -0.16418742 1.5992627 ]] [[-0.7639047 -1.282439 0.27111363 ... 0.2563702 -1.8637908 -2.1631258 ] [ 1.2985705 0.07751519 1.1850183 ... -0.79677147 1.7159258 -1.0726957 ]]]]; ov_res: [[[[-0.9437761 0.3959031 -1.2209744 ... 0.17303321 -0.54790914 -2.2945237 ] [ 2.426164 -0.5540071 -1.4677924 ... 0.2775896 -0.8332989 -0.9511703 ]] [[-1.7460384 -0.1034274 -0.9147868 ... -0.8378577 0.81437457 1.2801603 ] [-1.0825248 0.4858567 1.2558519 ... -0.07630858 1.0576533 -1.5471295 ]] [[ 0.17399949 0.8657521 -0.08334431 ... -0.06487431 0.82491404 -1.1810504 ] [ 0.7076122 2.1742873 -0.7512701 ... 0.23267986 0.06264018 -1.2297208 ]] ... [[ 1.241958 -0.82233006 -0.4320908 ... -2.8087323 0.1958369 -0.9216472 ] [ 0.09094257 -0.40249088 0.56526554 ... -0.5227561 1.779923 -0.2764657 ]] [[ 0.7657618 0.63486904 1.3157359 ... 0.36333862 1.3587886 -1.9413508 ] [ 0.6486588 -1.7731948 -0.19065645 ... -0.25252795 -0.16418742 1.5992627 ]] [[-0.7639047 -1.282439 0.27111363 ... 0.2563702 -1.8637908 -2.1631258 ] [ 1.2985705 0.07751519 1.1850183 ... -0.79677147 1.7159258 -1.0726957 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:-1 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6072.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %5 : Tensor = aten::__getitem__(%4, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%5) fw_re: [[[[ 0.40552506 1.3118923 ] [ 0.09588203 -1.5843436 ] [ 0.4202262 1.0097592 ] ... [-0.9610977 -1.1281469 ] [ 0.10025299 1.2300314 ] [-1.3797017 0.8127442 ]] [[-1.0066016 -0.8478004 ] [ 0.5438946 1.3496623 ] [ 0.7980089 0.33424458] ... [ 0.61415994 -0.46413594] [-1.7503546 -0.79842347] [-0.41001752 1.2520045 ]] [[-0.4558544 0.9277187 ] [-1.1871827 1.0266595 ] [-1.4611758 0.3809886 ] ... [ 1.8185862 0.4585599 ] [-2.0841095 0.23432374] [ 2.0551472 0.2877228 ]] ... [[ 0.43260434 1.409709 ] [ 0.40859193 -0.068039 ] [ 0.71145386 -1.2257271 ] ... [ 0.07816301 -0.3043443 ] [-1.3347877 0.09704506] [-0.3181051 0.619362 ]] [[-0.44282156 -0.898741 ] [ 1.2614155 -2.1789436 ] [-0.768865 -0.2540633 ] ... [-1.243892 -0.75534904] [ 1.7344143 -0.19775763] [-0.02298442 -0.44642484]] [[ 2.2875586 -0.26266634] [-0.4196385 -0.00441294] [-0.18265848 -0.8799831 ] ... [-1.1095698 -1.6554557 ] [ 0.65800464 -0.2925275 ] [ 1.2496754 0.14698243]]]]; ov_res: [[[[ 0.40552506 1.3118923 ] [ 0.09588203 -1.5843436 ] [ 0.4202262 1.0097592 ] ... [-0.9610977 -1.1281469 ] [ 0.10025299 1.2300314 ] [-1.3797017 0.8127442 ]] [[-1.0066016 -0.8478004 ] [ 0.5438946 1.3496623 ] [ 0.7980089 0.33424458] ... [ 0.61415994 -0.46413594] [-1.7503546 -0.79842347] [-0.41001752 1.2520045 ]] [[-0.4558544 0.9277187 ] [-1.1871827 1.0266595 ] [-1.4611758 0.3809886 ] ... [ 1.8185862 0.4585599 ] [-2.0841095 0.23432374] [ 2.0551472 0.2877228 ]] ... [[ 0.43260434 1.409709 ] [ 0.40859193 -0.068039 ] [ 0.71145386 -1.2257271 ] ... [ 0.07816301 -0.3043443 ] [-1.3347877 0.09704506] [-0.3181051 0.619362 ]] [[-0.44282156 -0.898741 ] [ 1.2614155 -2.1789436 ] [-0.768865 -0.2540633 ] ... [-1.243892 -0.75534904] [ 1.7344143 -0.19775763] [-0.02298442 -0.44642484]] [[ 2.2875586 -0.26266634] [-0.4196385 -0.00441294] [-0.18265848 -0.8799831 ] ... [-1.1095698 -1.6554557 ] [ 0.65800464 -0.2925275 ] [ 1.2496754 0.14698243]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:(2, 1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6074.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.30965286 0.40784228 -0.00454325 ... 1.2219368 0.53383064 -1.9677263 ] [-0.5256463 -0.942698 0.01426376 ... 0.11235603 -1.26596 -0.00409338] [-1.2297138 -1.11901 -0.11193931 ... -0.1477242 -1.7813114 -1.5895396 ] ... [-0.33347583 -0.7282614 1.2156786 ... -0.98553836 0.7115685 1.0017682 ] [ 0.7481919 0.08121731 0.20674151 ... 0.01722719 -0.9332553 0.1379694 ] [ 1.9259918 0.6429593 0.02004929 ... 1.5505966 -0.6076963 -0.5546924 ]] [[-1.3954606 -0.7226326 -0.2569309 ... -1.8443614 1.025244 1.2414646 ] [ 0.6151684 -0.01491814 0.9489119 ... 0.362319 -0.3554274 0.7626986 ] [ 1.0161242 0.5777339 0.3038212 ... 0.24831714 0.66008747 -2.398849 ] ... [ 0.13129237 -0.8961196 0.7828614 ... 0.04796856 -0.7989928 0.00768787] [ 0.29734486 0.08512007 1.5738028 ... 1.898432 1.1969254 -0.90415883] [-0.9766759 1.2540097 0.898712 ... 0.06361769 -0.4519607 -0.1958383 ]]]]; ov_res: [[[[-0.30965286 0.40784228 -0.00454325 ... 1.2219368 0.53383064 -1.9677263 ] [-0.5256463 -0.942698 0.01426376 ... 0.11235603 -1.26596 -0.00409338] [-1.2297138 -1.11901 -0.11193931 ... -0.1477242 -1.7813114 -1.5895396 ] ... [-0.33347583 -0.7282614 1.2156786 ... -0.98553836 0.7115685 1.0017682 ] [ 0.7481919 0.08121731 0.20674151 ... 0.01722719 -0.9332553 0.1379694 ] [ 1.9259918 0.6429593 0.02004929 ... 1.5505966 -0.6076963 -0.5546924 ]] [[-1.3954606 -0.7226326 -0.2569309 ... -1.8443614 1.025244 1.2414646 ] [ 0.6151684 -0.01491814 0.9489119 ... 0.362319 -0.3554274 0.7626986 ] [ 1.0161242 0.5777339 0.3038212 ... 0.24831714 0.66008747 -2.398849 ] ... [ 0.13129237 -0.8961196 0.7828614 ... 0.04796856 -0.7989928 0.00768787] [ 0.29734486 0.08512007 1.5738028 ... 1.898432 1.1969254 -0.90415883] [-0.9766759 1.2540097 0.898712 ... 0.06361769 -0.4519607 -0.1958383 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:(45, 2) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6076.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 1.11860685e-01 -1.26783407e+00 -8.47752273e-01 ... -3.81686509e-01 -5.10083497e-01 -1.07953691e+00] [-3.45250726e-01 -3.66135776e-01 1.36643231e+00 ... 1.02298641e+00 -6.46011904e-02 -2.13203385e-01] [ 3.64231199e-01 -1.75905049e+00 -1.18364863e-01 ... 6.78088069e-02 2.87084699e-01 1.04391325e+00] ... [-1.19358552e+00 -5.37547350e-01 4.68981355e-01 ... -1.31791258e+00 -5.61643064e-01 1.22556444e-02] [-2.98473774e-03 -1.49850941e+00 -1.71599889e+00 ... 1.52169776e+00 1.34724104e+00 1.69910514e+00] [-9.17096376e-01 5.94980657e-01 4.68917012e-01 ... 5.38369179e-01 -7.16767535e-02 1.36879063e+00]] [[-3.39446396e-01 -1.54185593e+00 8.31308067e-02 ... 1.16677475e+00 5.07701039e-02 5.60686290e-01] [ 9.92720783e-01 -1.22848988e+00 5.64257979e-01 ... -6.86085522e-01 -5.54635227e-01 -4.64404523e-01] [-5.77881485e-02 4.03089017e-01 1.36146426e-01 ... 1.54438841e+00 -5.77064455e-01 -4.48416710e-01] ... [ 7.66628683e-01 -9.48632479e-01 3.36841553e-01 ... -5.40748179e-01 -2.94721752e-01 9.12197292e-01] [-3.94920379e-01 -5.73896505e-02 4.21848506e-01 ... 8.09894353e-02 6.55808225e-02 -3.29865515e-01] [-1.31682813e-01 -1.12192559e+00 7.01915145e-01 ... -1.03425074e+00 5.00879347e-01 -9.22817767e-01]] [[-3.68269801e-01 3.94000202e-01 5.72899461e-01 ... 1.05014455e+00 -4.91601467e-01 1.60497367e-01] [-2.83199906e-01 -9.93090093e-01 7.51386523e-01 ... 7.28181243e-01 -1.35606086e+00 -1.61175624e-01] [ 9.59247649e-02 2.78426290e-01 -1.44676995e+00 ... -5.25544643e-01 5.80108881e-01 -3.48660916e-01] ... [-8.12982321e-01 8.72534692e-01 2.20969260e-01 ... -1.58926442e-01 -1.57116079e+00 -5.08382916e-01] [-8.30800533e-01 3.44169378e-01 -2.03557730e+00 ... -9.89079177e-02 -2.25075319e-01 -2.74481118e-01] [-7.46270537e-01 2.67135620e-01 1.54499793e+00 ... -1.32934153e+00 9.56394076e-01 9.10395563e-01]] ... [[-2.39907116e-01 9.88550857e-02 -1.51807740e-01 ... -8.11642706e-01 -4.69726980e-01 1.70391643e+00] [ 2.76938260e-01 5.57013988e-01 1.43114960e+00 ... -1.19104922e+00 -8.00315201e-01 -1.57800257e+00] [ 6.10774279e-01 -5.49928308e-01 -1.25581875e-01 ... 1.21927345e+00 -8.94405663e-01 -1.82150042e+00] ... [-6.85283780e-01 1.05704439e+00 9.86230552e-01 ... -2.74975568e-01 -1.88724267e+00 -8.22624803e-01] [-7.38316059e-01 -1.98174760e-01 6.19585812e-01 ... 2.62057006e-01 3.79840136e-01 -9.61209714e-01] [ 5.03303707e-01 -1.86425090e+00 -3.92601252e-01 ... 1.32769430e+00 7.53405988e-01 -8.67824256e-01]] [[-1.48891139e+00 8.41996610e-01 -6.03858046e-02 ... 5.88877738e-01 1.10371709e+00 -8.16859901e-01] [ 1.12009300e-02 1.61640537e+00 -4.42736596e-01 ... -1.82892299e+00 -3.47012520e-01 9.52314556e-01] [-3.32836986e-01 -4.69888300e-02 5.63309677e-02 ... 1.25543499e+00 -1.16615260e+00 2.41521448e-01] ... [ 1.21851194e+00 -1.49150276e+00 -5.29739916e-01 ... -5.81280172e-01 6.42458303e-03 1.90538263e+00] [-1.13922842e-01 -7.59829938e-01 1.33191156e+00 ... -8.66458774e-01 -4.20614660e-01 5.67101598e-01] [-9.18495655e-01 8.07302296e-02 -1.29634470e-01 ... 5.63578963e-01 3.24642152e-01 5.27819335e-01]] [[ 7.55658490e-04 1.20469248e+00 1.03575325e+00 ... 1.06108785e-01 1.29197627e-01 3.75554800e-01] [ 9.94661868e-01 -9.28992212e-01 -3.72650802e-01 ... -6.00701749e-01 -1.25509715e+00 2.12274358e-01] [-9.99073207e-01 -7.00969547e-02 1.41913211e+00 ... -8.96725476e-01 -5.91232516e-02 3.25086880e+00] ... [-2.58951163e+00 -3.56828541e-01 -3.35763603e-01 ... 1.47572732e+00 -6.21219099e-01 -8.99116695e-02] [ 2.30963640e-02 -3.88310105e-01 1.13797438e+00 ... -3.67408246e-01 -5.58791578e-01 9.67615247e-02] [ 1.90574908e+00 -5.98148823e-01 -4.58590269e-01 ... -3.85392487e-01 1.01849830e+00 -1.88936532e+00]]]]; ov_res: [[[[ 1.11860685e-01 -1.26783407e+00 -8.47752273e-01 ... -3.81686509e-01 -5.10083497e-01 -1.07953691e+00] [-3.45250726e-01 -3.66135776e-01 1.36643231e+00 ... 1.02298641e+00 -6.46011904e-02 -2.13203385e-01] [ 3.64231199e-01 -1.75905049e+00 -1.18364863e-01 ... 6.78088069e-02 2.87084699e-01 1.04391325e+00] ... [-1.19358552e+00 -5.37547350e-01 4.68981355e-01 ... -1.31791258e+00 -5.61643064e-01 1.22556444e-02] [-2.98473774e-03 -1.49850941e+00 -1.71599889e+00 ... 1.52169776e+00 1.34724104e+00 1.69910514e+00] [-9.17096376e-01 5.94980657e-01 4.68917012e-01 ... 5.38369179e-01 -7.16767535e-02 1.36879063e+00]] [[-3.39446396e-01 -1.54185593e+00 8.31308067e-02 ... 1.16677475e+00 5.07701039e-02 5.60686290e-01] [ 9.92720783e-01 -1.22848988e+00 5.64257979e-01 ... -6.86085522e-01 -5.54635227e-01 -4.64404523e-01] [-5.77881485e-02 4.03089017e-01 1.36146426e-01 ... 1.54438841e+00 -5.77064455e-01 -4.48416710e-01] ... [ 7.66628683e-01 -9.48632479e-01 3.36841553e-01 ... -5.40748179e-01 -2.94721752e-01 9.12197292e-01] [-3.94920379e-01 -5.73896505e-02 4.21848506e-01 ... 8.09894353e-02 6.55808225e-02 -3.29865515e-01] [-1.31682813e-01 -1.12192559e+00 7.01915145e-01 ... -1.03425074e+00 5.00879347e-01 -9.22817767e-01]] [[-3.68269801e-01 3.94000202e-01 5.72899461e-01 ... 1.05014455e+00 -4.91601467e-01 1.60497367e-01] [-2.83199906e-01 -9.93090093e-01 7.51386523e-01 ... 7.28181243e-01 -1.35606086e+00 -1.61175624e-01] [ 9.59247649e-02 2.78426290e-01 -1.44676995e+00 ... -5.25544643e-01 5.80108881e-01 -3.48660916e-01] ... [-8.12982321e-01 8.72534692e-01 2.20969260e-01 ... -1.58926442e-01 -1.57116079e+00 -5.08382916e-01] [-8.30800533e-01 3.44169378e-01 -2.03557730e+00 ... -9.89079177e-02 -2.25075319e-01 -2.74481118e-01] [-7.46270537e-01 2.67135620e-01 1.54499793e+00 ... -1.32934153e+00 9.56394076e-01 9.10395563e-01]] ... [[-2.39907116e-01 9.88550857e-02 -1.51807740e-01 ... -8.11642706e-01 -4.69726980e-01 1.70391643e+00] [ 2.76938260e-01 5.57013988e-01 1.43114960e+00 ... -1.19104922e+00 -8.00315201e-01 -1.57800257e+00] [ 6.10774279e-01 -5.49928308e-01 -1.25581875e-01 ... 1.21927345e+00 -8.94405663e-01 -1.82150042e+00] ... [-6.85283780e-01 1.05704439e+00 9.86230552e-01 ... -2.74975568e-01 -1.88724267e+00 -8.22624803e-01] [-7.38316059e-01 -1.98174760e-01 6.19585812e-01 ... 2.62057006e-01 3.79840136e-01 -9.61209714e-01] [ 5.03303707e-01 -1.86425090e+00 -3.92601252e-01 ... 1.32769430e+00 7.53405988e-01 -8.67824256e-01]] [[-1.48891139e+00 8.41996610e-01 -6.03858046e-02 ... 5.88877738e-01 1.10371709e+00 -8.16859901e-01] [ 1.12009300e-02 1.61640537e+00 -4.42736596e-01 ... -1.82892299e+00 -3.47012520e-01 9.52314556e-01] [-3.32836986e-01 -4.69888300e-02 5.63309677e-02 ... 1.25543499e+00 -1.16615260e+00 2.41521448e-01] ... [ 1.21851194e+00 -1.49150276e+00 -5.29739916e-01 ... -5.81280172e-01 6.42458303e-03 1.90538263e+00] [-1.13922842e-01 -7.59829938e-01 1.33191156e+00 ... -8.66458774e-01 -4.20614660e-01 5.67101598e-01] [-9.18495655e-01 8.07302296e-02 -1.29634470e-01 ... 5.63578963e-01 3.24642152e-01 5.27819335e-01]] [[ 7.55658490e-04 1.20469248e+00 1.03575325e+00 ... 1.06108785e-01 1.29197627e-01 3.75554800e-01] [ 9.94661868e-01 -9.28992212e-01 -3.72650802e-01 ... -6.00701749e-01 -1.25509715e+00 2.12274358e-01] [-9.99073207e-01 -7.00969547e-02 1.41913211e+00 ... -8.96725476e-01 -5.91232516e-02 3.25086880e+00] ... [-2.58951163e+00 -3.56828541e-01 -3.35763603e-01 ... 1.47572732e+00 -6.21219099e-01 -8.99116695e-02] [ 2.30963640e-02 -3.88310105e-01 1.13797438e+00 ... -3.67408246e-01 -5.58791578e-01 9.67615247e-02] [ 1.90574908e+00 -5.98148823e-01 -4.58590269e-01 ... -3.85392487e-01 1.01849830e+00 -1.88936532e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:(45, -1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6078.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.29579285 -0.27161777 0.545814 ... -0.43667948 -0.6143949 1.0828582 ] [ 0.42818123 -0.21678372 -0.68791264 ... 0.13961262 0.11106079 1.0309482 ] [-1.5616459 -0.91207755 -0.01252022 ... -0.1632401 1.2790029 1.7193222 ] ... [ 0.29447237 -0.6516466 0.5164164 ... 0.07406355 -1.4231954 -0.08182559] [-0.2635375 0.64135575 0.58355236 ... -0.1381378 -0.04051376 1.0271533 ] [ 1.2305372 -0.34940714 -1.2292603 ... 0.85232997 0.74254185 -0.566551 ]] [[-0.45241877 1.0375674 -1.8169165 ... -0.7446951 -0.3192891 2.6621299 ] [-0.3494809 1.6244495 -0.52225256 ... -1.9235348 0.98350996 -0.69592625] [ 0.69118255 1.0214403 -0.3430001 ... -0.32914886 -1.3129467 -0.5713061 ] ... [-0.18490149 3.1427755 0.71365595 ... 1.8730724 2.0392349 1.2828641 ] [ 0.5714145 0.31516 0.2355424 ... -0.8848369 -1.1254916 0.84645396] [-0.61202544 0.32568473 0.24562447 ... 0.415093 -1.8077021 1.0524207 ]] [[-1.2085183 1.2910787 -1.9588413 ... -0.2525713 0.2109978 -1.3245407 ] [ 0.8825593 0.62256175 -0.61150837 ... 0.6349955 1.7468684 -0.55913216] [-1.4382842 -0.76848507 1.1571363 ... -0.09645347 1.8021188 -0.8658999 ] ... [-0.22323725 -0.59889203 1.9799443 ... 0.02484552 -0.1892779 0.2220085 ] [ 0.03648423 0.81414574 -0.5224373 ... 1.5308394 0.2234496 0.41550353] [ 1.1785886 1.4881719 1.4047589 ... -0.09908409 1.11255 -0.4457146 ]] ... [[-2.0360193 1.659234 -0.890119 ... 0.6881216 -1.1521753 -2.4202754 ] [-0.6692642 2.1894336 -1.2507247 ... 1.30445 0.6554853 0.5892745 ] [ 0.15375656 0.24985418 0.71886516 ... 0.2856653 0.843773 -1.2424401 ] ... [-0.42275104 1.3126419 0.56470877 ... -0.05079125 0.55040246 0.95291185] [-0.39276657 0.8613734 1.9762901 ... -2.0497174 -0.8821244 -1.2230072 ] [-0.02569782 1.2252393 -0.28582862 ... 0.32692122 1.3348625 -0.23123404]] [[-0.939748 0.9422023 -1.3015789 ... 0.1981399 1.6898241 0.16722885] [ 0.69181055 -0.45763704 0.94903964 ... 0.62103224 0.38282472 2.1838675 ] [ 0.9019757 1.0553956 0.6292957 ... 0.03419445 1.5549594 1.5020812 ] ... [ 0.5158527 -0.31790024 0.4476728 ... -0.3969772 0.19068456 -0.81191325] [-0.06213399 0.26843554 0.18887676 ... 0.734369 0.66457534 -1.160047 ] [-0.00404967 -0.16504487 -1.0817863 ... 0.2211294 0.80406487 -0.04994955]] [[-0.1896519 0.02065854 -1.1732432 ... 0.9693456 2.1109326 -0.28806937] [-0.60922176 -1.7282026 -0.25762412 ... 0.6233411 -1.6114054 0.376152 ] [ 0.22822079 1.158972 2.2993808 ... 0.8146304 0.6091994 0.08184896] ... [-2.7024348 0.8785931 -0.903128 ... 0.21122003 0.99021107 1.2252688 ] [-0.5676584 1.6196694 -0.73497754 ... -0.29581913 -1.4088311 0.38334152] [-0.9076393 0.00413101 -1.269402 ... -0.6915618 1.5887482 -1.2274445 ]]]]; ov_res: [[[[ 0.29579285 -0.27161777 0.545814 ... -0.43667948 -0.6143949 1.0828582 ] [ 0.42818123 -0.21678372 -0.68791264 ... 0.13961262 0.11106079 1.0309482 ] [-1.5616459 -0.91207755 -0.01252022 ... -0.1632401 1.2790029 1.7193222 ] ... [ 0.29447237 -0.6516466 0.5164164 ... 0.07406355 -1.4231954 -0.08182559] [-0.2635375 0.64135575 0.58355236 ... -0.1381378 -0.04051376 1.0271533 ] [ 1.2305372 -0.34940714 -1.2292603 ... 0.85232997 0.74254185 -0.566551 ]] [[-0.45241877 1.0375674 -1.8169165 ... -0.7446951 -0.3192891 2.6621299 ] [-0.3494809 1.6244495 -0.52225256 ... -1.9235348 0.98350996 -0.69592625] [ 0.69118255 1.0214403 -0.3430001 ... -0.32914886 -1.3129467 -0.5713061 ] ... [-0.18490149 3.1427755 0.71365595 ... 1.8730724 2.0392349 1.2828641 ] [ 0.5714145 0.31516 0.2355424 ... -0.8848369 -1.1254916 0.84645396] [-0.61202544 0.32568473 0.24562447 ... 0.415093 -1.8077021 1.0524207 ]] [[-1.2085183 1.2910787 -1.9588413 ... -0.2525713 0.2109978 -1.3245407 ] [ 0.8825593 0.62256175 -0.61150837 ... 0.6349955 1.7468684 -0.55913216] [-1.4382842 -0.76848507 1.1571363 ... -0.09645347 1.8021188 -0.8658999 ] ... [-0.22323725 -0.59889203 1.9799443 ... 0.02484552 -0.1892779 0.2220085 ] [ 0.03648423 0.81414574 -0.5224373 ... 1.5308394 0.2234496 0.41550353] [ 1.1785886 1.4881719 1.4047589 ... -0.09908409 1.11255 -0.4457146 ]] ... [[-2.0360193 1.659234 -0.890119 ... 0.6881216 -1.1521753 -2.4202754 ] [-0.6692642 2.1894336 -1.2507247 ... 1.30445 0.6554853 0.5892745 ] [ 0.15375656 0.24985418 0.71886516 ... 0.2856653 0.843773 -1.2424401 ] ... [-0.42275104 1.3126419 0.56470877 ... -0.05079125 0.55040246 0.95291185] [-0.39276657 0.8613734 1.9762901 ... -2.0497174 -0.8821244 -1.2230072 ] [-0.02569782 1.2252393 -0.28582862 ... 0.32692122 1.3348625 -0.23123404]] [[-0.939748 0.9422023 -1.3015789 ... 0.1981399 1.6898241 0.16722885] [ 0.69181055 -0.45763704 0.94903964 ... 0.62103224 0.38282472 2.1838675 ] [ 0.9019757 1.0553956 0.6292957 ... 0.03419445 1.5549594 1.5020812 ] ... [ 0.5158527 -0.31790024 0.4476728 ... -0.3969772 0.19068456 -0.81191325] [-0.06213399 0.26843554 0.18887676 ... 0.734369 0.66457534 -1.160047 ] [-0.00404967 -0.16504487 -1.0817863 ... 0.2211294 0.80406487 -0.04994955]] [[-0.1896519 0.02065854 -1.1732432 ... 0.9693456 2.1109326 -0.28806937] [-0.60922176 -1.7282026 -0.25762412 ... 0.6233411 -1.6114054 0.376152 ] [ 0.22822079 1.158972 2.2993808 ... 0.8146304 0.6091994 0.08184896] ... [-2.7024348 0.8785931 -0.903128 ... 0.21122003 0.99021107 1.2252688 ] [-0.5676584 1.6196694 -0.73497754 ... -0.29581913 -1.4088311 0.38334152] [-0.9076393 0.00413101 -1.269402 ... -0.6915618 1.5887482 -1.2274445 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6080.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.36730826 0.2957746 -1.2732786 ... 0.70487636 0.55282205 0.39750066] [-0.8592896 -0.20884949 0.1601233 ... -0.75289434 -1.2511101 -1.3526447 ] [ 1.0535638 0.9074071 1.7300711 ... 0.53343236 0.2606407 -1.1467788 ] ... [ 0.9548861 -1.0581967 -1.4649774 ... 0.55364794 -0.12659271 0.23814036] [ 0.9914914 -0.07757501 -0.9431425 ... -0.3574313 0.46447536 0.05206005] [ 0.7315062 0.24419051 -1.4269984 ... 1.6346729 -1.1358508 0.04205525]] [[ 0.22275221 -1.3967385 0.6548045 ... 0.11777848 2.3832355 1.1362197 ] [ 2.9451642 -0.8552874 0.5600742 ... -0.6509959 0.85611165 0.84562975] [-1.2686057 0.29792148 1.1753434 ... -0.3449039 -0.64424443 0.34960628] ... [-2.0603848 -0.95811385 0.09982914 ... -0.96080464 0.4586511 0.17287308] [-1.4018575 1.7519217 -1.2896757 ... 1.6952158 -0.8836498 0.4156405 ] [ 2.1442294 0.12724562 -0.99451435 ... 0.6703605 1.147418 0.5928092 ]]]]; ov_res: [[[[-0.36730826 0.2957746 -1.2732786 ... 0.70487636 0.55282205 0.39750066] [-0.8592896 -0.20884949 0.1601233 ... -0.75289434 -1.2511101 -1.3526447 ] [ 1.0535638 0.9074071 1.7300711 ... 0.53343236 0.2606407 -1.1467788 ] ... [ 0.9548861 -1.0581967 -1.4649774 ... 0.55364794 -0.12659271 0.23814036] [ 0.9914914 -0.07757501 -0.9431425 ... -0.3574313 0.46447536 0.05206005] [ 0.7315062 0.24419051 -1.4269984 ... 1.6346729 -1.1358508 0.04205525]] [[ 0.22275221 -1.3967385 0.6548045 ... 0.11777848 2.3832355 1.1362197 ] [ 2.9451642 -0.8552874 0.5600742 ... -0.6509959 0.85611165 0.84562975] [-1.2686057 0.29792148 1.1753434 ... -0.3449039 -0.64424443 0.34960628] ... [-2.0603848 -0.95811385 0.09982914 ... -0.96080464 0.4586511 0.17287308] [-1.4018575 1.7519217 -1.2896757 ... 1.6952158 -0.8836498 0.4156405 ] [ 2.1442294 0.12724562 -0.99451435 ... 0.6703605 1.147418 0.5928092 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6082.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.7329818 2.5710943 -2.077606 ... 0.6113398 0.05017362 1.0297993 ] [-0.268828 0.75008684 -0.88391966 ... -1.1561327 0.34599242 -0.7657328 ] [-0.43633494 -2.243039 -0.68489957 ... -1.7561251 1.0476604 -0.05112364] ... [-1.3703194 1.0673236 0.3639964 ... 0.29065415 -0.6866209 -0.1762248 ] [ 0.59243584 1.646707 -0.78814644 ... -0.09017234 -0.0201779 0.6336226 ] [-0.4209379 -0.6096304 -0.6110209 ... -1.4649693 -1.4808685 0.532252 ]] [[ 1.154582 -0.40161732 0.57029474 ... 0.8241549 0.29452094 -1.2003218 ] [ 0.00601833 -1.890381 1.3278228 ... -0.6407172 -0.68092436 -0.8419217 ] [ 1.7930924 -0.7615963 -0.29957077 ... -0.17981537 0.83184063 0.03010802] ... [-0.8647345 1.3583802 2.015527 ... 0.24139158 1.2791333 -0.38927627] [ 0.4257021 1.0331088 2.2999535 ... -2.5592086 0.71743596 -1.6532863 ] [-0.9651759 0.2938707 0.20952635 ... 0.11686496 0.16124813 -0.42929038]] [[-0.1955096 0.7607781 -1.0014511 ... 1.4101264 -0.8248825 -0.10427172] [-0.2631868 -2.8424735 0.06531943 ... 0.30770794 -2.235263 0.18036155] [ 0.01569711 0.52060634 0.45905033 ... -0.17730318 -1.6497517 0.85605615] ... [-0.74719214 0.0460355 1.3029293 ... -0.14260042 -1.45423 -0.69728947] [ 0.63615334 -0.22312123 -0.07777861 ... 0.80538565 -2.349575 0.5822956 ] [-0.5605244 0.19383381 0.5911939 ... -0.91484827 -0.90850186 0.37085432]] ... [[-0.78711736 -1.2993681 1.3505291 ... 0.27086732 1.3799694 -0.05994918] [-0.23466599 -0.19610398 -0.32291308 ... 1.2275661 0.3040753 0.8871175 ] [-0.6280894 -0.07622622 -0.7112801 ... 0.4367913 -0.81988984 1.8638788 ] ... [ 1.1830925 -0.8096436 0.36735037 ... -0.34821206 -0.9380869 0.8520124 ] [ 0.845886 -0.6328883 -0.7732144 ... -1.7644073 -1.0342566 -1.4771236 ] [ 0.15715429 -1.9052509 0.22903852 ... 1.3960414 -0.8394342 -1.3559012 ]] [[ 2.5574243 -0.03383027 -0.06640348 ... -0.05821638 0.15980273 -0.8338676 ] [ 1.6934131 -2.7659528 1.8515971 ... -0.64025867 -0.10204972 0.27920732] [ 0.0960352 0.28489843 -1.5880245 ... 1.7843988 -0.17805828 0.5539903 ] ... [ 1.9611884 0.4154332 -0.7429537 ... -2.5132437 -0.354554 0.72207534] [ 0.04769739 -0.52714694 0.10818612 ... 0.6904873 -0.9868909 -1.7138083 ] [-0.26215297 -1.5973835 0.09415242 ... 0.10206503 0.22592399 0.11895884]] [[ 0.22184545 -0.67877775 0.46531045 ... -0.60359263 1.3586898 0.07102458] [-1.7732377 0.44515726 0.97034204 ... -1.3017491 -1.1759957 -1.1226023 ] [-0.49181598 -0.6587293 -0.16147691 ... -0.6818977 0.12840328 0.8606639 ] ... [ 0.49602035 -1.2866433 -0.3185905 ... 0.2565146 -2.216323 0.6307936 ] [-0.90319246 -0.08130191 -0.2210652 ... -0.2083554 0.3750033 0.04779552] [ 0.9006573 1.5363649 -1.6264998 ... -0.35720617 1.2793063 -1.8455907 ]]]]; ov_res: [[[[ 0.7329818 2.5710943 -2.077606 ... 0.6113398 0.05017362 1.0297993 ] [-0.268828 0.75008684 -0.88391966 ... -1.1561327 0.34599242 -0.7657328 ] [-0.43633494 -2.243039 -0.68489957 ... -1.7561251 1.0476604 -0.05112364] ... [-1.3703194 1.0673236 0.3639964 ... 0.29065415 -0.6866209 -0.1762248 ] [ 0.59243584 1.646707 -0.78814644 ... -0.09017234 -0.0201779 0.6336226 ] [-0.4209379 -0.6096304 -0.6110209 ... -1.4649693 -1.4808685 0.532252 ]] [[ 1.154582 -0.40161732 0.57029474 ... 0.8241549 0.29452094 -1.2003218 ] [ 0.00601833 -1.890381 1.3278228 ... -0.6407172 -0.68092436 -0.8419217 ] [ 1.7930924 -0.7615963 -0.29957077 ... -0.17981537 0.83184063 0.03010802] ... [-0.8647345 1.3583802 2.015527 ... 0.24139158 1.2791333 -0.38927627] [ 0.4257021 1.0331088 2.2999535 ... -2.5592086 0.71743596 -1.6532863 ] [-0.9651759 0.2938707 0.20952635 ... 0.11686496 0.16124813 -0.42929038]] [[-0.1955096 0.7607781 -1.0014511 ... 1.4101264 -0.8248825 -0.10427172] [-0.2631868 -2.8424735 0.06531943 ... 0.30770794 -2.235263 0.18036155] [ 0.01569711 0.52060634 0.45905033 ... -0.17730318 -1.6497517 0.85605615] ... [-0.74719214 0.0460355 1.3029293 ... -0.14260042 -1.45423 -0.69728947] [ 0.63615334 -0.22312123 -0.07777861 ... 0.80538565 -2.349575 0.5822956 ] [-0.5605244 0.19383381 0.5911939 ... -0.91484827 -0.90850186 0.37085432]] ... [[-0.78711736 -1.2993681 1.3505291 ... 0.27086732 1.3799694 -0.05994918] [-0.23466599 -0.19610398 -0.32291308 ... 1.2275661 0.3040753 0.8871175 ] [-0.6280894 -0.07622622 -0.7112801 ... 0.4367913 -0.81988984 1.8638788 ] ... [ 1.1830925 -0.8096436 0.36735037 ... -0.34821206 -0.9380869 0.8520124 ] [ 0.845886 -0.6328883 -0.7732144 ... -1.7644073 -1.0342566 -1.4771236 ] [ 0.15715429 -1.9052509 0.22903852 ... 1.3960414 -0.8394342 -1.3559012 ]] [[ 2.5574243 -0.03383027 -0.06640348 ... -0.05821638 0.15980273 -0.8338676 ] [ 1.6934131 -2.7659528 1.8515971 ... -0.64025867 -0.10204972 0.27920732] [ 0.0960352 0.28489843 -1.5880245 ... 1.7843988 -0.17805828 0.5539903 ] ... [ 1.9611884 0.4154332 -0.7429537 ... -2.5132437 -0.354554 0.72207534] [ 0.04769739 -0.52714694 0.10818612 ... 0.6904873 -0.9868909 -1.7138083 ] [-0.26215297 -1.5973835 0.09415242 ... 0.10206503 0.22592399 0.11895884]] [[ 0.22184545 -0.67877775 0.46531045 ... -0.60359263 1.3586898 0.07102458] [-1.7732377 0.44515726 0.97034204 ... -1.3017491 -1.1759957 -1.1226023 ] [-0.49181598 -0.6587293 -0.16147691 ... -0.6818977 0.12840328 0.8606639 ] ... [ 0.49602035 -1.2866433 -0.3185905 ... 0.2565146 -2.216323 0.6307936 ] [-0.90319246 -0.08130191 -0.2210652 ... -0.2083554 0.3750033 0.04779552] [ 0.9006573 1.5363649 -1.6264998 ... -0.35720617 1.2793063 -1.8455907 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:0 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6084.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=0]() %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.7354985 0.47598687 -0.11929361 ... -0.9702274 0.5896198 1.147542 ] [-0.6567286 1.34057 0.54696 ... -0.790358 0.06372096 -0.2741361 ] [ 0.11274783 -0.05986134 0.54722077 ... -0.8020272 1.2257104 -0.4652906 ] ... [-0.75438285 0.33216366 0.83745164 ... -1.4237156 -0.9810579 0.38418612] [ 0.49142557 1.4994096 -1.2127974 ... 2.2794037 0.82829183 1.517602 ] [ 1.8705482 -0.18186562 -0.653342 ... 0.10624564 0.1573009 0.91214675]] [[-0.7845566 -0.12468227 -0.14664012 ... -0.64260143 -2.5054762 -0.45475414] [-1.0865362 -1.635693 0.69969803 ... 0.43839318 -0.35232803 1.4208766 ] [ 0.11007996 -1.1011819 -1.2267754 ... 0.7074491 -1.6128098 0.4654974 ] ... [ 0.36808053 0.01186069 -0.2743536 ... -0.1778918 1.7183697 1.0578816 ] [-0.1800316 1.0732187 0.4924433 ... -1.2896223 -0.5469023 1.1361212 ] [ 0.21589692 0.10798495 -0.31648254 ... 0.1561719 0.15570511 -0.6489043 ]] [[-0.20767057 -0.59687436 0.27563056 ... -1.0093367 0.7136527 -0.77493393] [ 0.03492257 1.0057362 -1.2732278 ... -0.5266596 0.30218667 0.63166016] [-1.37751 1.923829 1.3986719 ... -0.57104814 -1.2818877 -0.97668314] ... [ 0.5238949 0.10041346 -1.0029519 ... -1.1629494 -0.6161521 1.4155226 ] [-0.6875499 1.1999136 0.30795142 ... 0.22593096 -0.53953105 0.29231012] [ 0.27086687 -1.370258 -0.7701721 ... -0.25107405 -1.5053477 -1.2213067 ]] ... [[ 0.15495336 1.917889 0.9528621 ... -1.2390234 -1.0980736 0.97436875] [-0.769669 -0.43237907 -1.0710256 ... 0.6163887 2.48759 -0.6970743 ] [-0.25340474 -0.50648874 -0.28251156 ... 0.14104608 1.2244166 0.45022237] ... [ 0.89696413 -1.5124661 -0.6033353 ... 0.07398343 0.43142042 -0.74472046] [ 0.2613995 -0.13033743 1.2046138 ... 0.871014 0.20398003 0.9612937 ] [-0.24334395 -0.04016617 -1.558662 ... -0.95908684 -0.81877494 0.15415774]] [[-0.16904502 -0.65704715 1.1982579 ... 0.835229 -0.24555585 0.36561936] [-0.29862374 -0.0427589 0.37726018 ... 1.4932929 -0.32748836 -0.23252386] [ 0.2813718 0.2552218 1.0920198 ... -0.22467497 -1.2806365 -0.39767215] ... [ 0.27111915 -0.01560192 1.8506563 ... -0.5951011 -0.91310906 -0.92854047] [-1.0994483 0.04599281 0.06038928 ... -1.3846931 1.0650651 0.12853584] [ 2.0012882 1.609007 -1.179191 ... -0.21313286 -0.98072165 1.319882 ]] [[ 0.46924016 -0.09799547 -1.4732729 ... 0.42242482 0.11021413 -0.4319568 ] [-0.38598406 0.02193246 -0.12084699 ... -0.3651556 1.0842427 0.6482546 ] [-0.8911238 -0.10046199 0.80668485 ... -0.6009904 -0.57251465 -0.3879929 ] ... [ 0.37146112 -1.1129467 -0.38525438 ... -0.6081257 0.48635218 0.8442756 ] [ 1.0922134 -0.15456176 0.3818919 ... 0.20383877 -0.83496577 -0.1736775 ] [ 0.9425844 0.30319685 -2.0579567 ... -0.04512256 -0.38815507 -1.0330727 ]]]]; ov_res: [[[[-0.7354985 0.47598687 -0.11929361 ... -0.9702274 0.5896198 1.147542 ] [-0.6567286 1.34057 0.54696 ... -0.790358 0.06372096 -0.2741361 ] [ 0.11274783 -0.05986134 0.54722077 ... -0.8020272 1.2257104 -0.4652906 ] ... [-0.75438285 0.33216366 0.83745164 ... -1.4237156 -0.9810579 0.38418612] [ 0.49142557 1.4994096 -1.2127974 ... 2.2794037 0.82829183 1.517602 ] [ 1.8705482 -0.18186562 -0.653342 ... 0.10624564 0.1573009 0.91214675]] [[-0.7845566 -0.12468227 -0.14664012 ... -0.64260143 -2.5054762 -0.45475414] [-1.0865362 -1.635693 0.69969803 ... 0.43839318 -0.35232803 1.4208766 ] [ 0.11007996 -1.1011819 -1.2267754 ... 0.7074491 -1.6128098 0.4654974 ] ... [ 0.36808053 0.01186069 -0.2743536 ... -0.1778918 1.7183697 1.0578816 ] [-0.1800316 1.0732187 0.4924433 ... -1.2896223 -0.5469023 1.1361212 ] [ 0.21589692 0.10798495 -0.31648254 ... 0.1561719 0.15570511 -0.6489043 ]] [[-0.20767057 -0.59687436 0.27563056 ... -1.0093367 0.7136527 -0.77493393] [ 0.03492257 1.0057362 -1.2732278 ... -0.5266596 0.30218667 0.63166016] [-1.37751 1.923829 1.3986719 ... -0.57104814 -1.2818877 -0.97668314] ... [ 0.5238949 0.10041346 -1.0029519 ... -1.1629494 -0.6161521 1.4155226 ] [-0.6875499 1.1999136 0.30795142 ... 0.22593096 -0.53953105 0.29231012] [ 0.27086687 -1.370258 -0.7701721 ... -0.25107405 -1.5053477 -1.2213067 ]] ... [[ 0.15495336 1.917889 0.9528621 ... -1.2390234 -1.0980736 0.97436875] [-0.769669 -0.43237907 -1.0710256 ... 0.6163887 2.48759 -0.6970743 ] [-0.25340474 -0.50648874 -0.28251156 ... 0.14104608 1.2244166 0.45022237] ... [ 0.89696413 -1.5124661 -0.6033353 ... 0.07398343 0.43142042 -0.74472046] [ 0.2613995 -0.13033743 1.2046138 ... 0.871014 0.20398003 0.9612937 ] [-0.24334395 -0.04016617 -1.558662 ... -0.95908684 -0.81877494 0.15415774]] [[-0.16904502 -0.65704715 1.1982579 ... 0.835229 -0.24555585 0.36561936] [-0.29862374 -0.0427589 0.37726018 ... 1.4932929 -0.32748836 -0.23252386] [ 0.2813718 0.2552218 1.0920198 ... -0.22467497 -1.2806365 -0.39767215] ... [ 0.27111915 -0.01560192 1.8506563 ... -0.5951011 -0.91310906 -0.92854047] [-1.0994483 0.04599281 0.06038928 ... -1.3846931 1.0650651 0.12853584] [ 2.0012882 1.609007 -1.179191 ... -0.21313286 -0.98072165 1.319882 ]] [[ 0.46924016 -0.09799547 -1.4732729 ... 0.42242482 0.11021413 -0.4319568 ] [-0.38598406 0.02193246 -0.12084699 ... -0.3651556 1.0842427 0.6482546 ] [-0.8911238 -0.10046199 0.80668485 ... -0.6009904 -0.57251465 -0.3879929 ] ... [ 0.37146112 -1.1129467 -0.38525438 ... -0.6081257 0.48635218 0.8442756 ] [ 1.0922134 -0.15456176 0.3818919 ... 0.20383877 -0.83496577 -0.1736775 ] [ 0.9425844 0.30319685 -2.0579567 ... -0.04512256 -0.38815507 -1.0330727 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:(2, 1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6086.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %5 : Tensor = aten::__getitem__(%4, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%5) fw_re: [[[[ 0.26026368 -1.0215491 -1.3040903 ... -0.39158085 -0.55856586 -0.1681374 ] [-0.7554439 -0.63814986 -1.3310854 ... 0.9678236 1.2879348 0.5440152 ] [-0.16635671 1.4101988 -0.12258183 ... 0.9115411 -1.2466058 0.84423435] ... [ 1.1975629 1.1408197 -0.69192 ... 0.3741379 -0.5512689 1.2462152 ] [-0.41212264 0.6488719 -0.4006158 ... 0.02962924 -1.8923745 -0.64783216] [-1.8212118 1.012521 -1.2702259 ... 0.44734108 -0.24040888 -0.7319664 ]] [[ 0.44879246 0.4374097 -0.25140843 ... -1.009507 0.72698563 1.3841368 ] [-1.618055 0.49860206 1.8317878 ... 0.24594484 -0.80463964 -1.2389085 ] [-1.684951 -0.36906135 -0.44222206 ... -0.38339317 0.7960836 -0.04050522] ... [ 0.7021409 -0.7474365 1.0314693 ... -0.28945133 1.075055 -0.02048884] [ 0.2527966 0.49673277 -1.4497188 ... -2.2997515 0.6195313 -0.5997786 ] [ 2.2371824 -0.5688446 1.245627 ... -1.4271876 -0.17496923 -0.5947665 ]]]]; ov_res: [[[[ 0.26026368 -1.0215491 -1.3040903 ... -0.39158085 -0.55856586 -0.1681374 ] [-0.7554439 -0.63814986 -1.3310854 ... 0.9678236 1.2879348 0.5440152 ] [-0.16635671 1.4101988 -0.12258183 ... 0.9115411 -1.2466058 0.84423435] ... [ 1.1975629 1.1408197 -0.69192 ... 0.3741379 -0.5512689 1.2462152 ] [-0.41212264 0.6488719 -0.4006158 ... 0.02962924 -1.8923745 -0.64783216] [-1.8212118 1.012521 -1.2702259 ... 0.44734108 -0.24040888 -0.7319664 ]] [[ 0.44879246 0.4374097 -0.25140843 ... -1.009507 0.72698563 1.3841368 ] [-1.618055 0.49860206 1.8317878 ... 0.24594484 -0.80463964 -1.2389085 ] [-1.684951 -0.36906135 -0.44222206 ... -0.38339317 0.7960836 -0.04050522] ... [ 0.7021409 -0.7474365 1.0314693 ... -0.28945133 1.075055 -0.02048884] [ 0.2527966 0.49673277 -1.4497188 ... -2.2997515 0.6195313 -0.5997786 ] [ 2.2371824 -0.5688446 1.245627 ... -1.4271876 -0.17496923 -0.5947665 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:(45, 2) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6088.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=1]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 1.7776129 -0.29137355 -0.6943208 ... -0.1902171 -0.31808785 -1.4323571 ] [-0.2694618 -1.7353208 -0.17529301 ... -0.10320268 0.21965227 -0.5875135 ] [ 0.5830403 -0.74988234 2.1842806 ... 1.7451622 1.8311703 -0.08580613] ... [ 1.1026669 0.5994566 -0.4631197 ... -0.1746084 -0.19594516 1.376536 ] [ 0.36853158 -1.9494451 -0.21483847 ... 0.65205884 0.5447854 1.1671422 ] [-0.3632987 -1.5576248 -1.2162708 ... 0.7817768 0.02970349 -1.4333829 ]] [[-0.47996175 -0.8294657 0.61681956 ... -2.5164945 -0.1241558 -1.7687255 ] [ 0.49556708 0.14439073 -0.75622857 ... 1.5167031 -1.5551102 2.1389956 ] [ 2.6897068 0.22203214 -0.27789986 ... -0.5796588 0.40043783 1.0627329 ] ... [ 0.61298907 -0.65085024 0.62358713 ... -0.2239735 0.08616516 -0.40344974] [ 0.5532247 0.09896814 0.5822735 ... -0.05454193 -1.0127978 1.7511811 ] [-0.26984066 -0.9651572 -1.3276539 ... 0.14847243 -1.8677192 0.02527164]] [[-0.15997493 0.09488154 -2.1807551 ... 0.89800143 0.34427702 0.48172566] [ 0.5591986 1.8579854 0.4320106 ... -1.4401581 0.6318019 1.7652656 ] [ 1.765127 -0.8306751 0.845246 ... 1.371578 -0.21017924 -0.627927 ] ... [-1.9327476 -0.59923285 1.940827 ... -0.34406546 -1.3026723 1.1196342 ] [-0.73788136 0.10918973 -0.8307463 ... 0.16805515 -0.42201704 -0.091851 ] [ 0.15047741 0.80707294 1.3386445 ... -2.1396067 -0.35140455 1.4830583 ]] ... [[-0.76602685 0.85233927 -0.06216978 ... 2.6582904 -0.54925394 2.640071 ] [ 0.7565865 -2.2536643 0.23991005 ... -0.09902974 -1.7614805 -0.47684312] [ 0.36505726 -2.3348625 -1.1332979 ... 0.87516016 0.6417065 2.2722635 ] ... [-0.6683775 1.2517358 0.57285666 ... 0.13272788 -0.68263704 -0.14783938] [-0.10093483 0.24339315 1.0764426 ... 0.672698 -0.469841 0.39855418] [-0.5709918 0.5558046 0.3227141 ... -0.16046996 0.36506814 1.0347638 ]] [[-1.0825367 0.21072611 -0.9502342 ... -1.5416635 0.47775894 1.4654416 ] [-0.56233263 -0.00453784 0.14679316 ... 0.89262253 0.28323072 0.14750734] [-0.5185536 -1.7973381 -0.15393329 ... 0.23041518 -0.3438156 -0.59695923] ... [ 1.5918362 1.1535606 1.9405355 ... 0.47453842 -0.5045047 0.95426875] [-2.474158 0.8488241 -0.9266081 ... -1.3565824 -0.7667402 -0.7427797 ] [ 1.0387968 -0.943683 -1.9948136 ... -2.1207662 -0.52265 -1.8050079 ]] [[ 0.1751634 0.15663697 -0.26750582 ... 0.3548829 -1.353332 1.3058646 ] [ 0.24715167 1.1664613 -0.7937739 ... -0.13325837 0.044579 0.29981437] [-0.27461323 0.8900415 1.0082003 ... 0.5978249 -1.0026139 0.7745509 ] ... [ 0.63650775 -2.3880777 0.91373646 ... 0.52264804 0.5520542 0.01979947] [-0.682652 -0.24415007 0.2686716 ... 0.35908905 -0.9160152 -1.2645866 ] [ 0.65311426 1.1054492 0.5991302 ... -0.49905038 0.37566176 -0.3824355 ]]]]; ov_res: [[[[ 1.7776129 -0.29137355 -0.6943208 ... -0.1902171 -0.31808785 -1.4323571 ] [-0.2694618 -1.7353208 -0.17529301 ... -0.10320268 0.21965227 -0.5875135 ] [ 0.5830403 -0.74988234 2.1842806 ... 1.7451622 1.8311703 -0.08580613] ... [ 1.1026669 0.5994566 -0.4631197 ... -0.1746084 -0.19594516 1.376536 ] [ 0.36853158 -1.9494451 -0.21483847 ... 0.65205884 0.5447854 1.1671422 ] [-0.3632987 -1.5576248 -1.2162708 ... 0.7817768 0.02970349 -1.4333829 ]] [[-0.47996175 -0.8294657 0.61681956 ... -2.5164945 -0.1241558 -1.7687255 ] [ 0.49556708 0.14439073 -0.75622857 ... 1.5167031 -1.5551102 2.1389956 ] [ 2.6897068 0.22203214 -0.27789986 ... -0.5796588 0.40043783 1.0627329 ] ... [ 0.61298907 -0.65085024 0.62358713 ... -0.2239735 0.08616516 -0.40344974] [ 0.5532247 0.09896814 0.5822735 ... -0.05454193 -1.0127978 1.7511811 ] [-0.26984066 -0.9651572 -1.3276539 ... 0.14847243 -1.8677192 0.02527164]] [[-0.15997493 0.09488154 -2.1807551 ... 0.89800143 0.34427702 0.48172566] [ 0.5591986 1.8579854 0.4320106 ... -1.4401581 0.6318019 1.7652656 ] [ 1.765127 -0.8306751 0.845246 ... 1.371578 -0.21017924 -0.627927 ] ... [-1.9327476 -0.59923285 1.940827 ... -0.34406546 -1.3026723 1.1196342 ] [-0.73788136 0.10918973 -0.8307463 ... 0.16805515 -0.42201704 -0.091851 ] [ 0.15047741 0.80707294 1.3386445 ... -2.1396067 -0.35140455 1.4830583 ]] ... [[-0.76602685 0.85233927 -0.06216978 ... 2.6582904 -0.54925394 2.640071 ] [ 0.7565865 -2.2536643 0.23991005 ... -0.09902974 -1.7614805 -0.47684312] [ 0.36505726 -2.3348625 -1.1332979 ... 0.87516016 0.6417065 2.2722635 ] ... [-0.6683775 1.2517358 0.57285666 ... 0.13272788 -0.68263704 -0.14783938] [-0.10093483 0.24339315 1.0764426 ... 0.672698 -0.469841 0.39855418] [-0.5709918 0.5558046 0.3227141 ... -0.16046996 0.36506814 1.0347638 ]] [[-1.0825367 0.21072611 -0.9502342 ... -1.5416635 0.47775894 1.4654416 ] [-0.56233263 -0.00453784 0.14679316 ... 0.89262253 0.28323072 0.14750734] [-0.5185536 -1.7973381 -0.15393329 ... 0.23041518 -0.3438156 -0.59695923] ... [ 1.5918362 1.1535606 1.9405355 ... 0.47453842 -0.5045047 0.95426875] [-2.474158 0.8488241 -0.9266081 ... -1.3565824 -0.7667402 -0.7427797 ] [ 1.0387968 -0.943683 -1.9948136 ... -2.1207662 -0.52265 -1.8050079 ]] [[ 0.1751634 0.15663697 -0.26750582 ... 0.3548829 -1.353332 1.3058646 ] [ 0.24715167 1.1664613 -0.7937739 ... -0.13325837 0.044579 0.29981437] [-0.27461323 0.8900415 1.0082003 ... 0.5978249 -1.0026139 0.7745509 ] ... [ 0.63650775 -2.3880777 0.91373646 ... 0.52264804 0.5520542 0.01979947] [-0.682652 -0.24415007 0.2686716 ... 0.35908905 -0.9160152 -1.2645866 ] [ 0.65311426 1.1054492 0.5991302 ... -0.49905038 0.37566176 -0.3824355 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:(45, -1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6090.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=1]() %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 1.67305484e-01 -5.02297640e-01 -9.61512029e-01 ... 5.49047232e-01 3.93429041e-01 -7.34596968e-01] [ 7.56026924e-01 -3.29644501e-01 -5.33455372e-01 ... 5.82308590e-01 4.88025784e-01 -1.41917288e+00] [-2.50285578e+00 1.67159891e+00 1.18564606e+00 ... 1.14582932e+00 -4.96997908e-02 1.06109774e+00] ... [ 5.19150972e-01 -2.13393271e-02 9.01843607e-01 ... 3.56095344e-01 -1.54241431e+00 -8.47271979e-01] [-9.33980167e-01 -3.22377414e-01 -1.90181255e+00 ... -1.08004189e+00 9.55454167e-03 1.26069272e-02] [-2.72368580e-01 -7.82151401e-01 1.50192156e-01 ... 1.50341392e-01 7.27137983e-01 2.18141034e-01]] [[ 2.49355689e-01 1.27806604e+00 8.09733808e-01 ... -9.80183065e-01 2.03894258e+00 -6.02728546e-01] [-1.94698715e+00 -3.83195460e-01 1.11162078e+00 ... 5.33661783e-01 1.96568203e+00 -1.76072586e+00] [-4.56277989e-02 9.93240714e-01 -5.89082778e-01 ... 3.43374908e-01 1.56620038e+00 1.75722766e+00] ... [-2.18301249e+00 -2.10153759e-02 1.25697768e+00 ... -4.56113219e-02 1.19599867e+00 1.79998839e+00] [-4.40583974e-01 3.19465429e-01 2.11443633e-01 ... 2.26985192e+00 -8.91077995e-01 2.24847019e-01] [ 1.11396956e+00 7.44948909e-02 -2.16317439e+00 ... 2.15469599e-01 4.86975551e-01 -1.87118244e+00]] [[-4.14871633e-01 5.59050918e-01 4.03237611e-01 ... -1.30174613e+00 5.68792582e-01 -1.78352618e+00] [-1.36667237e-01 9.90542710e-01 -1.13180645e-01 ... 1.13306737e+00 4.97155011e-01 -4.76533890e-01] [ 1.19191337e+00 -4.55931753e-01 9.69802976e-01 ... 3.36056381e-01 5.07750213e-01 1.24982996e-02] ... [-1.62437570e+00 9.95802820e-01 -7.92945862e-01 ... 1.05167270e+00 -2.85570860e-01 -5.62535703e-01] [ 4.88337636e-01 1.31371689e+00 6.91257954e-01 ... -4.36176389e-01 1.14119267e+00 8.66006494e-01] [ 2.74754733e-01 -6.97091222e-01 4.90756810e-01 ... -2.59326911e+00 9.78611112e-02 -1.48845959e+00]] ... [[-8.77846956e-01 -2.99462266e-02 8.90811205e-01 ... 4.67142284e-01 2.12446943e-01 -3.61180758e+00] [ 1.02654934e+00 -2.44316816e-01 -6.07466064e-02 ... 4.44363087e-01 2.06012517e-01 -2.84581870e-01] [-3.70430380e-01 -7.19367445e-01 3.30572534e+00 ... -1.31977153e+00 3.60165894e-01 2.00013638e-01] ... [ 1.47445321e+00 -1.00314331e+00 5.23018986e-02 ... 1.19289823e-01 -5.82830375e-03 -6.45454168e-01] [ 1.75519633e+00 3.31319660e-01 1.35311985e+00 ... -9.26988780e-01 2.28340104e-01 3.39141876e-01] [-1.80974913e+00 -2.40758717e-01 3.65935445e-01 ... -5.61897695e-01 9.37203050e-01 -5.55144489e-01]] [[ 1.56563628e+00 -1.01710096e-01 3.70846152e-01 ... 6.41404033e-01 2.55279851e+00 -7.13715792e-01] [-1.41965723e+00 -6.54616535e-01 6.41741872e-01 ... 3.01384330e-02 -8.78538370e-01 -1.32614458e+00] [-4.15771008e-01 4.77709651e-01 -3.95058513e-01 ... -1.65310550e+00 1.33964151e-01 9.85623300e-01] ... [-1.02871716e+00 -8.17711592e-01 1.03439530e-02 ... -8.07912648e-01 5.81272900e-01 1.78631270e+00] [ 2.92756557e-01 1.10931897e+00 -7.47066438e-01 ... 6.44823968e-01 -1.91712606e+00 -1.98250599e-02] [-2.69594416e-02 -4.66602743e-01 -2.58744031e-01 ... 5.33117175e-01 -7.66694784e-01 6.61003709e-01]] [[-7.88003385e-01 -1.96421407e-02 6.33369386e-01 ... 7.70844281e-01 -4.29321975e-01 -7.19504476e-01] [ 6.29230678e-01 -5.12990594e-01 -1.45587409e+00 ... -2.08030796e+00 -1.00218177e+00 -8.93701851e-01] [-2.55973130e-01 -2.52588362e-01 1.64361727e+00 ... -8.39141726e-01 1.65707457e+00 -6.99893746e-04] ... [ 8.11106682e-01 -2.74530321e-01 1.68756807e+00 ... 2.87797093e-01 1.76801056e-01 1.08583055e-01] [-1.41704991e-01 1.20320821e+00 9.09862071e-02 ... 1.43489695e+00 -4.74010646e-01 1.71291423e+00] [ 1.13098145e+00 -3.95180821e-01 1.06587994e+00 ... -2.58971751e-01 -4.20441359e-01 1.14052927e+00]]]]; ov_res: [[[[ 1.67305484e-01 -5.02297640e-01 -9.61512029e-01 ... 5.49047232e-01 3.93429041e-01 -7.34596968e-01] [ 7.56026924e-01 -3.29644501e-01 -5.33455372e-01 ... 5.82308590e-01 4.88025784e-01 -1.41917288e+00] [-2.50285578e+00 1.67159891e+00 1.18564606e+00 ... 1.14582932e+00 -4.96997908e-02 1.06109774e+00] ... [ 5.19150972e-01 -2.13393271e-02 9.01843607e-01 ... 3.56095344e-01 -1.54241431e+00 -8.47271979e-01] [-9.33980167e-01 -3.22377414e-01 -1.90181255e+00 ... -1.08004189e+00 9.55454167e-03 1.26069272e-02] [-2.72368580e-01 -7.82151401e-01 1.50192156e-01 ... 1.50341392e-01 7.27137983e-01 2.18141034e-01]] [[ 2.49355689e-01 1.27806604e+00 8.09733808e-01 ... -9.80183065e-01 2.03894258e+00 -6.02728546e-01] [-1.94698715e+00 -3.83195460e-01 1.11162078e+00 ... 5.33661783e-01 1.96568203e+00 -1.76072586e+00] [-4.56277989e-02 9.93240714e-01 -5.89082778e-01 ... 3.43374908e-01 1.56620038e+00 1.75722766e+00] ... [-2.18301249e+00 -2.10153759e-02 1.25697768e+00 ... -4.56113219e-02 1.19599867e+00 1.79998839e+00] [-4.40583974e-01 3.19465429e-01 2.11443633e-01 ... 2.26985192e+00 -8.91077995e-01 2.24847019e-01] [ 1.11396956e+00 7.44948909e-02 -2.16317439e+00 ... 2.15469599e-01 4.86975551e-01 -1.87118244e+00]] [[-4.14871633e-01 5.59050918e-01 4.03237611e-01 ... -1.30174613e+00 5.68792582e-01 -1.78352618e+00] [-1.36667237e-01 9.90542710e-01 -1.13180645e-01 ... 1.13306737e+00 4.97155011e-01 -4.76533890e-01] [ 1.19191337e+00 -4.55931753e-01 9.69802976e-01 ... 3.36056381e-01 5.07750213e-01 1.24982996e-02] ... [-1.62437570e+00 9.95802820e-01 -7.92945862e-01 ... 1.05167270e+00 -2.85570860e-01 -5.62535703e-01] [ 4.88337636e-01 1.31371689e+00 6.91257954e-01 ... -4.36176389e-01 1.14119267e+00 8.66006494e-01] [ 2.74754733e-01 -6.97091222e-01 4.90756810e-01 ... -2.59326911e+00 9.78611112e-02 -1.48845959e+00]] ... [[-8.77846956e-01 -2.99462266e-02 8.90811205e-01 ... 4.67142284e-01 2.12446943e-01 -3.61180758e+00] [ 1.02654934e+00 -2.44316816e-01 -6.07466064e-02 ... 4.44363087e-01 2.06012517e-01 -2.84581870e-01] [-3.70430380e-01 -7.19367445e-01 3.30572534e+00 ... -1.31977153e+00 3.60165894e-01 2.00013638e-01] ... [ 1.47445321e+00 -1.00314331e+00 5.23018986e-02 ... 1.19289823e-01 -5.82830375e-03 -6.45454168e-01] [ 1.75519633e+00 3.31319660e-01 1.35311985e+00 ... -9.26988780e-01 2.28340104e-01 3.39141876e-01] [-1.80974913e+00 -2.40758717e-01 3.65935445e-01 ... -5.61897695e-01 9.37203050e-01 -5.55144489e-01]] [[ 1.56563628e+00 -1.01710096e-01 3.70846152e-01 ... 6.41404033e-01 2.55279851e+00 -7.13715792e-01] [-1.41965723e+00 -6.54616535e-01 6.41741872e-01 ... 3.01384330e-02 -8.78538370e-01 -1.32614458e+00] [-4.15771008e-01 4.77709651e-01 -3.95058513e-01 ... -1.65310550e+00 1.33964151e-01 9.85623300e-01] ... [-1.02871716e+00 -8.17711592e-01 1.03439530e-02 ... -8.07912648e-01 5.81272900e-01 1.78631270e+00] [ 2.92756557e-01 1.10931897e+00 -7.47066438e-01 ... 6.44823968e-01 -1.91712606e+00 -1.98250599e-02] [-2.69594416e-02 -4.66602743e-01 -2.58744031e-01 ... 5.33117175e-01 -7.66694784e-01 6.61003709e-01]] [[-7.88003385e-01 -1.96421407e-02 6.33369386e-01 ... 7.70844281e-01 -4.29321975e-01 -7.19504476e-01] [ 6.29230678e-01 -5.12990594e-01 -1.45587409e+00 ... -2.08030796e+00 -1.00218177e+00 -8.93701851e-01] [-2.55973130e-01 -2.52588362e-01 1.64361727e+00 ... -8.39141726e-01 1.65707457e+00 -6.99893746e-04] ... [ 8.11106682e-01 -2.74530321e-01 1.68756807e+00 ... 2.87797093e-01 1.76801056e-01 1.08583055e-01] [-1.41704991e-01 1.20320821e+00 9.09862071e-02 ... 1.43489695e+00 -4.74010646e-01 1.71291423e+00] [ 1.13098145e+00 -3.95180821e-01 1.06587994e+00 ... -2.58971751e-01 -4.20441359e-01 1.14052927e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6092.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %5 : Tensor = aten::__getitem__(%4, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%5) fw_re: [[[[-2.7541690e+00 8.6093061e-02 -2.5027256e+00 ... 6.7894660e-02 9.1609247e-02 -1.7116123e+00] [-2.8296313e-01 -1.3862282e+00 -1.8018277e+00 ... -7.5145459e-01 8.6367071e-01 1.6821290e+00] [-1.3176044e+00 2.2634857e+00 2.9210371e-01 ... 2.5302014e-01 -6.4862233e-01 5.6662112e-01] ... [-2.3159525e-01 1.1728036e+00 4.0205276e-01 ... 4.2257455e-01 -1.7074767e-01 6.1079973e-01] [-1.3894362e+00 -1.2179266e+00 4.7372738e-01 ... -1.1021872e-01 -5.9860700e-01 -7.5153625e-01] [ 9.8311090e-01 -1.8993603e+00 -6.5894288e-01 ... 4.9305210e-01 1.0629908e+00 -7.1965285e-02]] [[ 1.1241781e+00 5.3729005e-02 2.3149227e-01 ... -7.6487851e-01 -1.0943258e+00 -1.2831753e+00] [ 8.8074493e-01 2.0768484e-01 -1.2234720e+00 ... 6.4648852e-02 6.5884459e-01 7.0887536e-01] [-1.4532703e+00 -1.8116301e+00 8.2901411e-04 ... 6.3421214e-01 -2.1674411e-01 2.7118688e+00] ... [ 1.4842376e+00 -1.1975164e+00 -6.9926405e-01 ... -8.1990653e-01 8.2196140e-01 -4.3415067e-01] [-1.2769073e+00 -3.5769579e-01 5.2786376e-02 ... -1.2983072e+00 -6.9183248e-01 2.5232434e-01] [-7.8871375e-01 -7.8407776e-01 2.1318871e-01 ... -1.5073130e-02 -1.3503479e+00 1.7044990e-01]]]]; ov_res: [[[[-2.7541690e+00 8.6093061e-02 -2.5027256e+00 ... 6.7894660e-02 9.1609247e-02 -1.7116123e+00] [-2.8296313e-01 -1.3862282e+00 -1.8018277e+00 ... -7.5145459e-01 8.6367071e-01 1.6821290e+00] [-1.3176044e+00 2.2634857e+00 2.9210371e-01 ... 2.5302014e-01 -6.4862233e-01 5.6662112e-01] ... [-2.3159525e-01 1.1728036e+00 4.0205276e-01 ... 4.2257455e-01 -1.7074767e-01 6.1079973e-01] [-1.3894362e+00 -1.2179266e+00 4.7372738e-01 ... -1.1021872e-01 -5.9860700e-01 -7.5153625e-01] [ 9.8311090e-01 -1.8993603e+00 -6.5894288e-01 ... 4.9305210e-01 1.0629908e+00 -7.1965285e-02]] [[ 1.1241781e+00 5.3729005e-02 2.3149227e-01 ... -7.6487851e-01 -1.0943258e+00 -1.2831753e+00] [ 8.8074493e-01 2.0768484e-01 -1.2234720e+00 ... 6.4648852e-02 6.5884459e-01 7.0887536e-01] [-1.4532703e+00 -1.8116301e+00 8.2901411e-04 ... 6.3421214e-01 -2.1674411e-01 2.7118688e+00] ... [ 1.4842376e+00 -1.1975164e+00 -6.9926405e-01 ... -8.1990653e-01 8.2196140e-01 -4.3415067e-01] [-1.2769073e+00 -3.5769579e-01 5.2786376e-02 ... -1.2983072e+00 -6.9183248e-01 2.5232434e-01] [-7.8871375e-01 -7.8407776e-01 2.1318871e-01 ... -1.5073130e-02 -1.3503479e+00 1.7044990e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6094.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=1]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-1.060759 0.6443195 -0.32939643 ... 0.13709775 -1.2050496 0.15529238] [-0.23295963 -0.3120448 0.20898844 ... -2.2389576 -1.0787138 -0.80200166] [ 0.93219537 0.5332199 -0.5733764 ... 0.2995679 -1.6560458 -0.77112377] ... [-1.3796084 -1.2454183 -0.35762256 ... -0.29876897 -1.2961073 -0.27277613] [-0.776498 0.8600475 -0.83280426 ... -0.99377906 -0.9283092 -0.925485 ] [ 0.23281804 0.34237045 -2.1647034 ... 1.2747637 0.18492247 -0.70385665]] [[-0.73198587 -1.0486068 -0.74385804 ... -0.19010164 -0.16657513 1.1916245 ] [-1.177482 -0.58876544 1.3854556 ... -1.231741 -0.03135996 0.18829075] [ 0.02058428 1.4467868 -1.5162472 ... -1.5137597 1.7287046 0.53910124] ... [ 0.22710496 -1.9161897 -0.0574788 ... 0.6921113 0.95639896 -1.4053257 ] [-0.3335827 0.55384123 -0.8222356 ... -0.7628615 -1.7259735 -1.3042992 ] [ 0.55953974 -1.2781996 -2.851777 ... -1.1475691 0.3862043 0.4600416 ]] [[-0.5358656 0.8891013 -0.97064954 ... 0.4482513 -1.190777 0.7063177 ] [ 0.08058731 0.5889998 1.3342283 ... -0.2692505 -1.0708332 -0.4284928 ] [-1.9792892 2.0023034 1.7117356 ... 1.4474941 -1.3325802 -1.316307 ] ... [-0.2276895 -0.11709184 -1.5710422 ... 1.0842927 1.072945 0.07594543] [-0.14788182 0.02506762 -0.26166287 ... 0.97176015 -0.4972923 0.17701022] [-0.67515975 1.0162174 -1.2573818 ... -1.0508689 1.2868589 -0.73835856]] ... [[-0.6138276 0.4163482 0.18783408 ... 0.9668644 -1.3828732 1.457784 ] [ 0.7806212 0.24389385 -0.00436685 ... -0.09725681 -0.46161565 0.2216018 ] [-0.88082224 -0.43956897 -0.33011782 ... 0.8722993 -1.2566094 0.25474897] ... [ 2.370664 0.8949511 -0.767379 ... 1.5432103 -0.46671662 -0.10232925] [-0.13807479 1.0844257 0.58068407 ... -0.9699661 -1.3534815 0.17035742] [-1.8777484 -0.37777662 -0.50927436 ... -0.16931029 0.9359738 0.8851065 ]] [[-1.5215302 0.5202838 -0.06356875 ... -0.31413487 -0.48941475 -0.5861986 ] [ 0.7785715 -0.17221856 1.5251464 ... -0.33882973 -0.13041846 -0.4490951 ] [-0.14348625 0.39005336 0.06954514 ... -0.58993757 0.6079144 1.0380461 ] ... [ 0.7819993 0.03943351 -1.4882622 ... -0.2249815 -0.3634968 0.3813065 ] [ 1.1614387 -0.61994505 -0.8230798 ... -0.69049597 1.0484698 -2.251501 ] [-0.34843737 0.70447534 -0.4092609 ... 0.75617987 1.4221916 0.8458517 ]] [[ 1.6564288 1.5835658 0.20224504 ... 0.17542599 -1.3102552 -0.86947596] [-0.3530615 -1.4101692 -0.18077147 ... 0.19181047 -0.5989393 -0.5380937 ] [ 1.4231551 0.13182871 0.29132947 ... -0.89497507 0.5255974 -1.3744471 ] ... [-0.63465416 0.68110573 -1.743888 ... 0.98104686 0.15659545 -0.3629146 ] [-1.2371289 0.7795837 2.0301306 ... 1.5413296 0.4436752 -0.16476032] [ 0.53835446 -2.8259783 -0.82900614 ... -1.407301 -0.63408375 -0.3890284 ]]]]; ov_res: [[[[-1.060759 0.6443195 -0.32939643 ... 0.13709775 -1.2050496 0.15529238] [-0.23295963 -0.3120448 0.20898844 ... -2.2389576 -1.0787138 -0.80200166] [ 0.93219537 0.5332199 -0.5733764 ... 0.2995679 -1.6560458 -0.77112377] ... [-1.3796084 -1.2454183 -0.35762256 ... -0.29876897 -1.2961073 -0.27277613] [-0.776498 0.8600475 -0.83280426 ... -0.99377906 -0.9283092 -0.925485 ] [ 0.23281804 0.34237045 -2.1647034 ... 1.2747637 0.18492247 -0.70385665]] [[-0.73198587 -1.0486068 -0.74385804 ... -0.19010164 -0.16657513 1.1916245 ] [-1.177482 -0.58876544 1.3854556 ... -1.231741 -0.03135996 0.18829075] [ 0.02058428 1.4467868 -1.5162472 ... -1.5137597 1.7287046 0.53910124] ... [ 0.22710496 -1.9161897 -0.0574788 ... 0.6921113 0.95639896 -1.4053257 ] [-0.3335827 0.55384123 -0.8222356 ... -0.7628615 -1.7259735 -1.3042992 ] [ 0.55953974 -1.2781996 -2.851777 ... -1.1475691 0.3862043 0.4600416 ]] [[-0.5358656 0.8891013 -0.97064954 ... 0.4482513 -1.190777 0.7063177 ] [ 0.08058731 0.5889998 1.3342283 ... -0.2692505 -1.0708332 -0.4284928 ] [-1.9792892 2.0023034 1.7117356 ... 1.4474941 -1.3325802 -1.316307 ] ... [-0.2276895 -0.11709184 -1.5710422 ... 1.0842927 1.072945 0.07594543] [-0.14788182 0.02506762 -0.26166287 ... 0.97176015 -0.4972923 0.17701022] [-0.67515975 1.0162174 -1.2573818 ... -1.0508689 1.2868589 -0.73835856]] ... [[-0.6138276 0.4163482 0.18783408 ... 0.9668644 -1.3828732 1.457784 ] [ 0.7806212 0.24389385 -0.00436685 ... -0.09725681 -0.46161565 0.2216018 ] [-0.88082224 -0.43956897 -0.33011782 ... 0.8722993 -1.2566094 0.25474897] ... [ 2.370664 0.8949511 -0.767379 ... 1.5432103 -0.46671662 -0.10232925] [-0.13807479 1.0844257 0.58068407 ... -0.9699661 -1.3534815 0.17035742] [-1.8777484 -0.37777662 -0.50927436 ... -0.16931029 0.9359738 0.8851065 ]] [[-1.5215302 0.5202838 -0.06356875 ... -0.31413487 -0.48941475 -0.5861986 ] [ 0.7785715 -0.17221856 1.5251464 ... -0.33882973 -0.13041846 -0.4490951 ] [-0.14348625 0.39005336 0.06954514 ... -0.58993757 0.6079144 1.0380461 ] ... [ 0.7819993 0.03943351 -1.4882622 ... -0.2249815 -0.3634968 0.3813065 ] [ 1.1614387 -0.61994505 -0.8230798 ... -0.69049597 1.0484698 -2.251501 ] [-0.34843737 0.70447534 -0.4092609 ... 0.75617987 1.4221916 0.8458517 ]] [[ 1.6564288 1.5835658 0.20224504 ... 0.17542599 -1.3102552 -0.86947596] [-0.3530615 -1.4101692 -0.18077147 ... 0.19181047 -0.5989393 -0.5380937 ] [ 1.4231551 0.13182871 0.29132947 ... -0.89497507 0.5255974 -1.3744471 ] ... [-0.63465416 0.68110573 -1.743888 ... 0.98104686 0.15659545 -0.3629146 ] [-1.2371289 0.7795837 2.0301306 ... 1.5413296 0.4436752 -0.16476032] [ 0.53835446 -2.8259783 -0.82900614 ... -1.407301 -0.63408375 -0.3890284 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:1 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6096.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=1]() %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.88230896 0.7786256 -1.1340969 ... 0.16161773 0.7109073 -0.36772484] [ 0.720624 0.6744022 -0.51525277 ... -0.18415134 -0.5458124 -0.45194378] [ 0.44005567 -0.7141876 -0.82020295 ... -0.6940849 -0.5280074 -0.37713543] ... [-0.44618922 -0.28582036 -1.676142 ... 1.6106007 0.17881346 -0.7056097 ] [-0.38712734 0.99093056 0.2727641 ... 0.6592014 -0.89239615 0.08814707] [ 1.0205038 0.07683831 -1.4904647 ... -0.30817384 0.6137177 1.4986476 ]] [[-0.27878496 0.6479299 -0.47897622 ... -0.6369017 -0.42411712 -0.27743736] [-0.38009578 1.0915314 1.4075236 ... -2.3635042 0.6221016 -0.35548407] [ 0.505908 0.97087604 1.7437618 ... 1.6360486 0.51509863 1.7495099 ] ... [ 0.60100144 -1.2779723 1.2307252 ... -0.5196984 1.5084143 -0.47198614] [ 0.64200467 0.907698 0.8674708 ... -0.8721533 -0.8909184 0.95510185] [-2.1377518 -1.6472764 0.25470224 ... 0.09045854 1.224214 0.9473898 ]] [[ 1.6906646 0.6561178 1.7555228 ... 0.5536273 1.9378948 -0.9959316 ] [ 0.2572203 -1.3649069 1.2922107 ... 0.86773324 -0.77618057 -0.3846334 ] [-0.009956 0.44435182 -0.73868394 ... 0.65888774 1.8149499 -1.819611 ] ... [-0.22618967 -0.6348916 -0.3420444 ... 0.76071423 -0.27725908 0.11557652] [ 0.9371821 0.42806107 -0.02108633 ... 0.0469105 -2.299086 -0.55870175] [-0.6853127 0.6087116 -0.12542069 ... -0.0053856 1.0482585 -0.6652421 ]] ... [[ 1.4129059 0.90163994 0.2549206 ... -0.23011418 -0.07612499 0.6103966 ] [ 0.19608483 -0.3894197 -1.7951802 ... 0.3615215 1.8023393 -0.34347042] [-0.15137622 1.5709722 0.8404288 ... 0.42831412 0.38028216 -0.8016595 ] ... [ 0.8249242 1.2836554 -0.579062 ... -0.15052706 -0.7939476 0.72164786] [-0.38343996 0.08435509 -0.84207314 ... 0.12561792 2.045099 0.93311745] [-1.1439524 -2.0403092 -1.0299035 ... 0.46297762 -0.46401784 0.32830334]] [[ 0.36010662 2.135789 -0.8459631 ... 0.34789938 -0.5970141 0.50339997] [ 1.4387741 -0.06764319 -1.2201258 ... 0.8846658 0.01088595 -0.02517706] [-0.23277824 0.2689041 -0.5746001 ... -1.254082 1.0057639 -0.9490444 ] ... [-0.03897598 -0.38550204 0.40051374 ... -0.4619904 -0.7818044 -0.09334919] [ 0.9019387 0.5963832 -1.8810775 ... 0.22474946 -1.43362 0.5025986 ] [ 0.8716861 -0.06804165 -0.510222 ... -1.1956654 1.242732 0.35266227]] [[ 1.1347873 0.14022876 -2.2343893 ... 0.0173036 -0.90014654 -0.625508 ] [-0.8589523 -1.2216578 -2.7815473 ... -0.11798458 0.8790381 -0.15576471] [-0.1130908 -0.01062718 -0.5013113 ... 0.9836858 0.78606194 0.61432487] ... [ 1.9851582 0.43055296 -1.25724 ... -1.2427907 2.185847 0.4876864 ] [ 1.0327042 0.83478516 -0.60968465 ... 0.18608455 1.7962128 -1.495078 ] [-0.4874492 0.873172 -0.70948935 ... 0.83706814 0.972989 0.03470261]]]]; ov_res: [[[[ 0.88230896 0.7786256 -1.1340969 ... 0.16161773 0.7109073 -0.36772484] [ 0.720624 0.6744022 -0.51525277 ... -0.18415134 -0.5458124 -0.45194378] [ 0.44005567 -0.7141876 -0.82020295 ... -0.6940849 -0.5280074 -0.37713543] ... [-0.44618922 -0.28582036 -1.676142 ... 1.6106007 0.17881346 -0.7056097 ] [-0.38712734 0.99093056 0.2727641 ... 0.6592014 -0.89239615 0.08814707] [ 1.0205038 0.07683831 -1.4904647 ... -0.30817384 0.6137177 1.4986476 ]] [[-0.27878496 0.6479299 -0.47897622 ... -0.6369017 -0.42411712 -0.27743736] [-0.38009578 1.0915314 1.4075236 ... -2.3635042 0.6221016 -0.35548407] [ 0.505908 0.97087604 1.7437618 ... 1.6360486 0.51509863 1.7495099 ] ... [ 0.60100144 -1.2779723 1.2307252 ... -0.5196984 1.5084143 -0.47198614] [ 0.64200467 0.907698 0.8674708 ... -0.8721533 -0.8909184 0.95510185] [-2.1377518 -1.6472764 0.25470224 ... 0.09045854 1.224214 0.9473898 ]] [[ 1.6906646 0.6561178 1.7555228 ... 0.5536273 1.9378948 -0.9959316 ] [ 0.2572203 -1.3649069 1.2922107 ... 0.86773324 -0.77618057 -0.3846334 ] [-0.009956 0.44435182 -0.73868394 ... 0.65888774 1.8149499 -1.819611 ] ... [-0.22618967 -0.6348916 -0.3420444 ... 0.76071423 -0.27725908 0.11557652] [ 0.9371821 0.42806107 -0.02108633 ... 0.0469105 -2.299086 -0.55870175] [-0.6853127 0.6087116 -0.12542069 ... -0.0053856 1.0482585 -0.6652421 ]] ... [[ 1.4129059 0.90163994 0.2549206 ... -0.23011418 -0.07612499 0.6103966 ] [ 0.19608483 -0.3894197 -1.7951802 ... 0.3615215 1.8023393 -0.34347042] [-0.15137622 1.5709722 0.8404288 ... 0.42831412 0.38028216 -0.8016595 ] ... [ 0.8249242 1.2836554 -0.579062 ... -0.15052706 -0.7939476 0.72164786] [-0.38343996 0.08435509 -0.84207314 ... 0.12561792 2.045099 0.93311745] [-1.1439524 -2.0403092 -1.0299035 ... 0.46297762 -0.46401784 0.32830334]] [[ 0.36010662 2.135789 -0.8459631 ... 0.34789938 -0.5970141 0.50339997] [ 1.4387741 -0.06764319 -1.2201258 ... 0.8846658 0.01088595 -0.02517706] [-0.23277824 0.2689041 -0.5746001 ... -1.254082 1.0057639 -0.9490444 ] ... [-0.03897598 -0.38550204 0.40051374 ... -0.4619904 -0.7818044 -0.09334919] [ 0.9019387 0.5963832 -1.8810775 ... 0.22474946 -1.43362 0.5025986 ] [ 0.8716861 -0.06804165 -0.510222 ... -1.1956654 1.242732 0.35266227]] [[ 1.1347873 0.14022876 -2.2343893 ... 0.0173036 -0.90014654 -0.625508 ] [-0.8589523 -1.2216578 -2.7815473 ... -0.11798458 0.8790381 -0.15576471] [-0.1130908 -0.01062718 -0.5013113 ... 0.9836858 0.78606194 0.61432487] ... [ 1.9851582 0.43055296 -1.25724 ... -1.2427907 2.185847 0.4876864 ] [ 1.0327042 0.83478516 -0.60968465 ... 0.18608455 1.7962128 -1.495078 ] [-0.4874492 0.873172 -0.70948935 ... 0.83706814 0.972989 0.03470261]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:(2, 1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6098.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.24127598 -0.6213284 1.5895162 ... 1.0255005 2.8520193 -0.23745157] [-0.18569458 0.32110927 -1.4699796 ... 0.16639394 -0.05148431 -1.1559318 ] [ 1.6939765 0.29000518 -1.4559971 ... 0.65698403 1.474571 -0.8446048 ] ... [-0.7443534 1.2095696 -1.7291018 ... -0.02083986 -1.7918035 0.09140112] [-0.21574679 0.9231925 0.10930745 ... -1.4576261 0.60991144 1.0547792 ] [-0.20319766 0.30674583 -1.0562432 ... -0.67317164 -0.3262681 0.74915355]] [[-0.12816273 1.3191849 -0.52070796 ... -0.14695795 0.90502036 -0.07697761] [ 0.56353486 1.5231866 -0.6565287 ... 1.2401497 0.00967865 0.5016901 ] [-0.63303393 0.5605331 0.21026565 ... 0.35685834 0.14618042 -0.03085889] ... [-0.34613115 -0.2858931 1.2250788 ... 0.32826585 -2.2013066 -1.2622844 ] [-2.7091599 0.03972352 -0.409041 ... -1.0665281 -0.3912812 -1.6780545 ] [-1.8880502 -0.4785238 0.93065006 ... 1.1828282 -1.1523004 -2.0496514 ]]]]; ov_res: [[[[ 0.24127598 -0.6213284 1.5895162 ... 1.0255005 2.8520193 -0.23745157] [-0.18569458 0.32110927 -1.4699796 ... 0.16639394 -0.05148431 -1.1559318 ] [ 1.6939765 0.29000518 -1.4559971 ... 0.65698403 1.474571 -0.8446048 ] ... [-0.7443534 1.2095696 -1.7291018 ... -0.02083986 -1.7918035 0.09140112] [-0.21574679 0.9231925 0.10930745 ... -1.4576261 0.60991144 1.0547792 ] [-0.20319766 0.30674583 -1.0562432 ... -0.67317164 -0.3262681 0.74915355]] [[-0.12816273 1.3191849 -0.52070796 ... -0.14695795 0.90502036 -0.07697761] [ 0.56353486 1.5231866 -0.6565287 ... 1.2401497 0.00967865 0.5016901 ] [-0.63303393 0.5605331 0.21026565 ... 0.35685834 0.14618042 -0.03085889] ... [-0.34613115 -0.2858931 1.2250788 ... 0.32826585 -2.2013066 -1.2622844 ] [-2.7091599 0.03972352 -0.409041 ... -1.0665281 -0.3912812 -1.6780545 ] [-1.8880502 -0.4785238 0.93065006 ... 1.1828282 -1.1523004 -2.0496514 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:(45, 2) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6100.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-1.2001494 -0.70979226 -1.1499941 ... -0.24111307 -0.89332527 0.76095766] [-1.2408899 -0.79349613 1.1176075 ... -0.04786238 0.7149271 -1.3712553 ] [-0.68306154 -1.3117886 -0.43334576 ... -0.6918684 -0.45528245 -0.57728136] ... [ 1.755553 1.1672071 1.4851513 ... -0.9210835 0.7554943 0.4569938 ] [-0.9940772 0.27897212 1.1443309 ... 0.4884482 -1.4656018 -0.13066609] [-1.9208355 -1.3495324 -0.9127232 ... 0.851796 -0.7012677 -0.442235 ]] [[ 0.42772695 1.8594124 0.37486118 ... -1.0003943 -1.5022538 -1.5585524 ] [ 3.2701952 -0.4479003 -0.9061719 ... 0.62909937 -0.10834281 -0.62701356] [-1.15855 1.4426994 0.19390917 ... -0.08106973 -1.4365038 1.0446846 ] ... [-0.31580877 -0.9707401 1.3232678 ... -0.56021523 -0.79828984 -0.37471953] [-0.05203773 0.08357935 -2.5365274 ... 0.5868831 1.9652307 -0.06421903] [ 0.33149698 0.2602629 0.3498904 ... 1.3643945 -2.2460012 -0.7052283 ]] [[ 0.49499235 0.46918565 1.1167135 ... -1.4340551 -0.2428608 -0.39445975] [ 0.44333276 0.42847866 0.43524712 ... -0.22723727 0.03997459 -1.1571466 ] [-0.79215395 -0.37685302 1.4053408 ... 0.2921501 0.41192004 2.0870707 ] ... [ 0.27932367 0.46647844 1.248534 ... -0.6420225 -0.17008929 -1.2318206 ] [-1.1057322 0.40605584 1.795804 ... 0.6617969 -0.2664898 0.1745377 ] [ 0.2991584 1.3182895 0.35851106 ... 1.9877886 0.5638533 2.3037088 ]] ... [[-1.0798688 -1.2519372 -0.34131837 ... 1.6059121 -0.2677086 -0.25037736] [-1.3026066 0.30758357 0.9049194 ... 0.24888122 -2.4331772 2.1696868 ] [-1.3885226 -0.25344977 -1.5060021 ... -0.28045833 -1.3772571 1.1072042 ] ... [ 1.2543752 1.4585665 0.39200196 ... -0.12716308 -0.00601656 -0.23762047] [-0.26473603 -0.8650889 -1.2850009 ... -1.0507436 0.21139885 -0.7851315 ] [-0.7932775 1.4296383 -1.5819286 ... 1.2801412 1.0842358 0.68517476]] [[-1.1810485 1.1076398 0.34849966 ... 0.08519076 0.29158276 1.4337547 ] [ 0.23264039 0.17333052 0.4701006 ... 1.0327626 1.3497416 0.49746433] [ 1.8244737 0.84105074 -0.10540423 ... 0.86030287 -0.08228804 -1.0045153 ] ... [ 0.3890415 0.18112712 -0.45651197 ... -0.6628942 0.46154574 1.334542 ] [-0.11084627 -0.262388 -0.0155757 ... -0.0346843 0.78286505 -0.36781564] [ 1.6816967 -0.1169427 -2.2167723 ... 0.9660344 -0.22437607 -0.2565578 ]] [[ 0.27050558 -0.5305695 -1.9231839 ... 0.1501776 -0.63634235 0.15210249] [-0.33614668 0.6217412 0.6805283 ... -2.7415931 2.0435064 -1.7011082 ] [ 1.3332988 0.7656481 -0.10552727 ... -0.40251783 -0.8080657 -0.47926548] ... [-0.32909033 -0.11777352 1.9435514 ... 0.27358434 0.5669926 -0.8249569 ] [ 1.2425187 0.3785434 0.65343887 ... 0.5995468 -1.260427 0.32027876] [-0.04547238 1.8666726 -1.4234467 ... 1.0664198 -0.2594991 0.5195343 ]]]]; ov_res: [[[[-1.2001494 -0.70979226 -1.1499941 ... -0.24111307 -0.89332527 0.76095766] [-1.2408899 -0.79349613 1.1176075 ... -0.04786238 0.7149271 -1.3712553 ] [-0.68306154 -1.3117886 -0.43334576 ... -0.6918684 -0.45528245 -0.57728136] ... [ 1.755553 1.1672071 1.4851513 ... -0.9210835 0.7554943 0.4569938 ] [-0.9940772 0.27897212 1.1443309 ... 0.4884482 -1.4656018 -0.13066609] [-1.9208355 -1.3495324 -0.9127232 ... 0.851796 -0.7012677 -0.442235 ]] [[ 0.42772695 1.8594124 0.37486118 ... -1.0003943 -1.5022538 -1.5585524 ] [ 3.2701952 -0.4479003 -0.9061719 ... 0.62909937 -0.10834281 -0.62701356] [-1.15855 1.4426994 0.19390917 ... -0.08106973 -1.4365038 1.0446846 ] ... [-0.31580877 -0.9707401 1.3232678 ... -0.56021523 -0.79828984 -0.37471953] [-0.05203773 0.08357935 -2.5365274 ... 0.5868831 1.9652307 -0.06421903] [ 0.33149698 0.2602629 0.3498904 ... 1.3643945 -2.2460012 -0.7052283 ]] [[ 0.49499235 0.46918565 1.1167135 ... -1.4340551 -0.2428608 -0.39445975] [ 0.44333276 0.42847866 0.43524712 ... -0.22723727 0.03997459 -1.1571466 ] [-0.79215395 -0.37685302 1.4053408 ... 0.2921501 0.41192004 2.0870707 ] ... [ 0.27932367 0.46647844 1.248534 ... -0.6420225 -0.17008929 -1.2318206 ] [-1.1057322 0.40605584 1.795804 ... 0.6617969 -0.2664898 0.1745377 ] [ 0.2991584 1.3182895 0.35851106 ... 1.9877886 0.5638533 2.3037088 ]] ... [[-1.0798688 -1.2519372 -0.34131837 ... 1.6059121 -0.2677086 -0.25037736] [-1.3026066 0.30758357 0.9049194 ... 0.24888122 -2.4331772 2.1696868 ] [-1.3885226 -0.25344977 -1.5060021 ... -0.28045833 -1.3772571 1.1072042 ] ... [ 1.2543752 1.4585665 0.39200196 ... -0.12716308 -0.00601656 -0.23762047] [-0.26473603 -0.8650889 -1.2850009 ... -1.0507436 0.21139885 -0.7851315 ] [-0.7932775 1.4296383 -1.5819286 ... 1.2801412 1.0842358 0.68517476]] [[-1.1810485 1.1076398 0.34849966 ... 0.08519076 0.29158276 1.4337547 ] [ 0.23264039 0.17333052 0.4701006 ... 1.0327626 1.3497416 0.49746433] [ 1.8244737 0.84105074 -0.10540423 ... 0.86030287 -0.08228804 -1.0045153 ] ... [ 0.3890415 0.18112712 -0.45651197 ... -0.6628942 0.46154574 1.334542 ] [-0.11084627 -0.262388 -0.0155757 ... -0.0346843 0.78286505 -0.36781564] [ 1.6816967 -0.1169427 -2.2167723 ... 0.9660344 -0.22437607 -0.2565578 ]] [[ 0.27050558 -0.5305695 -1.9231839 ... 0.1501776 -0.63634235 0.15210249] [-0.33614668 0.6217412 0.6805283 ... -2.7415931 2.0435064 -1.7011082 ] [ 1.3332988 0.7656481 -0.10552727 ... -0.40251783 -0.8080657 -0.47926548] ... [-0.32909033 -0.11777352 1.9435514 ... 0.27358434 0.5669926 -0.8249569 ] [ 1.2425187 0.3785434 0.65343887 ... 0.5995468 -1.260427 0.32027876] [-0.04547238 1.8666726 -1.4234467 ... 1.0664198 -0.2594991 0.5195343 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:(45, -1) ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6102.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-1.6121374 -2.320797 -0.12777828 ... 0.62038046 -0.6621991 0.06612687] [ 0.7725776 0.3578066 0.3941817 ... -0.9119697 -0.15809086 -0.01583554] [-0.90862113 -3.4373832 -1.1484731 ... -0.5102991 0.06164977 -0.06319651] ... [ 0.9053664 -0.6891491 0.25827095 ... 0.853257 0.6434919 1.2328793 ] [ 0.8138315 -0.02467463 -0.6115497 ... -1.2478505 -0.3368248 -0.60758656] [ 1.565631 -1.6504719 1.0155649 ... 1.3456073 -1.9075551 -0.36123744]] [[ 0.42955422 0.07742966 -1.0598354 ... 0.73295957 0.7213789 0.6598284 ] [ 0.26577577 0.1355812 0.28111377 ... -0.30716732 -0.01426377 0.59836096] [-0.06918857 2.5495846 -1.5129207 ... 1.1437157 0.5278545 1.5855163 ] ... [-0.8592409 0.6877554 -0.36193115 ... -0.07196964 -0.9832376 1.1052642 ] [-1.1767826 -0.90962595 -0.37928775 ... -0.27483395 -0.6933114 0.71586585] [ 0.17212418 0.43705732 -0.26942334 ... 0.01549201 -0.9397208 -1.1188089 ]] [[-0.1411991 1.597343 -1.2292122 ... 0.25263315 -0.3219553 -1.3295343 ] [ 1.2034647 0.25291613 0.10319986 ... 0.6552272 -0.05227821 -1.2219653 ] [-0.1106384 -0.61372304 -0.05866358 ... -0.60542786 0.65706486 -0.8799372 ] ... [ 0.10743452 0.84326863 -0.2877105 ... 1.5117012 0.5972984 -0.99440503] [-0.07674786 0.4459387 0.0831135 ... -0.6462614 0.3300518 -0.3440599 ] [-0.22988516 0.42632356 1.0929574 ... -0.984554 -1.6099709 -1.116427 ]] ... [[ 0.04602854 -0.42738977 -0.60758966 ... -0.8893843 0.40945134 -0.5631406 ] [-1.3298776 1.0351518 -0.61086804 ... -0.38453084 0.38696924 0.5543794 ] [ 1.3618796 -0.10862248 1.1330522 ... 0.83224916 0.78027606 0.4436635 ] ... [-0.03951026 0.41263318 -1.5870628 ... 1.0023633 -1.8652775 0.2875775 ] [-0.00468784 -1.0815346 -0.8002042 ... -0.6087164 -0.37924677 -0.12563486] [-0.33199 -0.819235 0.4709601 ... 0.936148 -0.7097669 0.73008156]] [[ 0.12720527 -0.36036673 2.4218092 ... 0.5264337 -0.4579509 -0.19304346] [-0.21959284 -2.30479 -0.744293 ... -1.3210112 -0.35229555 2.097825 ] [ 0.02188201 0.43640676 0.50020736 ... -0.23271069 -1.0594472 -0.5921795 ] ... [-1.5069137 0.1051747 -1.1687337 ... 0.4216218 -0.66475755 -0.11734676] [ 0.2031612 0.20659524 1.0043074 ... -1.3286017 -1.0570415 0.38743064] [ 2.8230054 -0.22140066 -0.11518745 ... -0.4405762 -0.3947986 -0.2791116 ]] [[ 0.61322284 0.15441573 0.48044407 ... -1.1187495 -0.11661541 0.2599609 ] [-1.4095745 0.23297128 -1.4529617 ... -0.5880231 0.6183468 0.86404413] [-1.2785926 -1.1220272 1.9235086 ... 1.3475152 0.97073644 0.80511147] ... [ 0.4440206 1.5309916 0.04926652 ... 0.64972264 0.6092808 0.24244188] [-0.08576483 0.45581913 -0.3254697 ... 0.00764493 0.8624865 1.0222712 ] [-0.4178879 -1.1132482 0.69925314 ... 1.2901175 -0.45871842 -0.7410049 ]]]]; ov_res: [[[[-1.6121374 -2.320797 -0.12777828 ... 0.62038046 -0.6621991 0.06612687] [ 0.7725776 0.3578066 0.3941817 ... -0.9119697 -0.15809086 -0.01583554] [-0.90862113 -3.4373832 -1.1484731 ... -0.5102991 0.06164977 -0.06319651] ... [ 0.9053664 -0.6891491 0.25827095 ... 0.853257 0.6434919 1.2328793 ] [ 0.8138315 -0.02467463 -0.6115497 ... -1.2478505 -0.3368248 -0.60758656] [ 1.565631 -1.6504719 1.0155649 ... 1.3456073 -1.9075551 -0.36123744]] [[ 0.42955422 0.07742966 -1.0598354 ... 0.73295957 0.7213789 0.6598284 ] [ 0.26577577 0.1355812 0.28111377 ... -0.30716732 -0.01426377 0.59836096] [-0.06918857 2.5495846 -1.5129207 ... 1.1437157 0.5278545 1.5855163 ] ... [-0.8592409 0.6877554 -0.36193115 ... -0.07196964 -0.9832376 1.1052642 ] [-1.1767826 -0.90962595 -0.37928775 ... -0.27483395 -0.6933114 0.71586585] [ 0.17212418 0.43705732 -0.26942334 ... 0.01549201 -0.9397208 -1.1188089 ]] [[-0.1411991 1.597343 -1.2292122 ... 0.25263315 -0.3219553 -1.3295343 ] [ 1.2034647 0.25291613 0.10319986 ... 0.6552272 -0.05227821 -1.2219653 ] [-0.1106384 -0.61372304 -0.05866358 ... -0.60542786 0.65706486 -0.8799372 ] ... [ 0.10743452 0.84326863 -0.2877105 ... 1.5117012 0.5972984 -0.99440503] [-0.07674786 0.4459387 0.0831135 ... -0.6462614 0.3300518 -0.3440599 ] [-0.22988516 0.42632356 1.0929574 ... -0.984554 -1.6099709 -1.116427 ]] ... [[ 0.04602854 -0.42738977 -0.60758966 ... -0.8893843 0.40945134 -0.5631406 ] [-1.3298776 1.0351518 -0.61086804 ... -0.38453084 0.38696924 0.5543794 ] [ 1.3618796 -0.10862248 1.1330522 ... 0.83224916 0.78027606 0.4436635 ] ... [-0.03951026 0.41263318 -1.5870628 ... 1.0023633 -1.8652775 0.2875775 ] [-0.00468784 -1.0815346 -0.8002042 ... -0.6087164 -0.37924677 -0.12563486] [-0.33199 -0.819235 0.4709601 ... 0.936148 -0.7097669 0.73008156]] [[ 0.12720527 -0.36036673 2.4218092 ... 0.5264337 -0.4579509 -0.19304346] [-0.21959284 -2.30479 -0.744293 ... -1.3210112 -0.35229555 2.097825 ] [ 0.02188201 0.43640676 0.50020736 ... -0.23271069 -1.0594472 -0.5921795 ] ... [-1.5069137 0.1051747 -1.1687337 ... 0.4216218 -0.66475755 -0.11734676] [ 0.2031612 0.20659524 1.0043074 ... -1.3286017 -1.0570415 0.38743064] [ 2.8230054 -0.22140066 -0.11518745 ... -0.4405762 -0.3947986 -0.2791116 ]] [[ 0.61322284 0.15441573 0.48044407 ... -1.1187495 -0.11661541 0.2599609 ] [-1.4095745 0.23297128 -1.4529617 ... -0.5880231 0.6183468 0.86404413] [-1.2785926 -1.1220272 1.9235086 ... 1.3475152 0.97073644 0.80511147] ... [ 0.4440206 1.5309916 0.04926652 ... 0.64972264 0.6092808 0.24244188] [-0.08576483 0.45581913 -0.3254697 ... 0.00764493 0.8624865 1.0222712 ] [-0.4178879 -1.1132482 0.69925314 ... 1.2901175 -0.45871842 -0.7410049 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6104.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-1.340926 1.3599848 -0.46477106 ... 1.5181777 -1.2816441 -0.4112886 ] [ 0.34163836 2.301597 1.101492 ... -1.8950831 0.44400975 0.23131798] [-0.9743508 1.3590001 0.88628405 ... 0.07785707 -0.35687956 -0.43863782] ... [-2.685197 0.5626853 2.5201035 ... 0.38455322 -0.9599888 0.49882838] [-1.7337159 -1.7635363 0.11468633 ... 1.8582664 -0.42687392 0.32178363] [-1.4941672 -0.2536492 -0.18288486 ... -0.8261985 -0.46446186 0.99017566]] [[ 2.7077374 -0.10217942 -0.7142679 ... 0.5395271 0.1156162 1.1179386 ] [-1.3754736 1.1807734 0.71368206 ... 0.12145301 1.5164644 -1.32684 ] [-1.0718132 0.8365928 -1.432792 ... 0.62872523 -0.05429396 -0.43638772] ... [ 0.82818675 0.3380705 -1.6664984 ... 0.22748174 -0.00319144 1.030615 ] [-0.5407522 -0.10669833 0.44902343 ... 0.3160811 -0.16601098 -0.3062403 ] [-0.5578192 1.7178425 1.2688414 ... 0.50679386 -1.05676 0.6072308 ]]]]; ov_res: [[[[-1.340926 1.3599848 -0.46477106 ... 1.5181777 -1.2816441 -0.4112886 ] [ 0.34163836 2.301597 1.101492 ... -1.8950831 0.44400975 0.23131798] [-0.9743508 1.3590001 0.88628405 ... 0.07785707 -0.35687956 -0.43863782] ... [-2.685197 0.5626853 2.5201035 ... 0.38455322 -0.9599888 0.49882838] [-1.7337159 -1.7635363 0.11468633 ... 1.8582664 -0.42687392 0.32178363] [-1.4941672 -0.2536492 -0.18288486 ... -0.8261985 -0.46446186 0.99017566]] [[ 2.7077374 -0.10217942 -0.7142679 ... 0.5395271 0.1156162 1.1179386 ] [-1.3754736 1.1807734 0.71368206 ... 0.12145301 1.5164644 -1.32684 ] [-1.0718132 0.8365928 -1.432792 ... 0.62872523 -0.05429396 -0.43638772] ... [ 0.82818675 0.3380705 -1.6664984 ... 0.22748174 -0.00319144 1.030615 ] [-0.5407522 -0.10669833 0.44902343 ... 0.3160811 -0.16601098 -0.3062403 ] [-0.5578192 1.7178425 1.2688414 ... 0.50679386 -1.05676 0.6072308 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6106.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[-0.3266748 -0.20625874 0.03800474 ... -0.99962443 -0.31447938 -0.78527564] [ 0.22983247 0.01419787 0.03612345 ... 0.54486805 -0.4368424 -0.3102766 ]] [[ 1.526704 0.09655199 0.507622 ... 0.6491135 -0.6009457 -0.41559187] [-0.8374289 -0.33390856 -0.44789198 ... 1.0244167 0.673074 1.7753056 ]] [[-0.79511553 1.1607668 -2.0696232 ... -0.34200653 -0.19321916 -0.14651914] [-1.5348039 1.810909 0.36635715 ... -0.6501977 -1.2089614 0.09698913]] ... [[-1.3505969 1.0854172 -0.6317626 ... -1.3197337 0.9922153 -0.45773026] [-0.02598953 -1.6500555 0.27361083 ... 1.5558718 -0.4711954 -1.0452254 ]] [[-1.3815242 0.6751196 -0.3106754 ... -1.609334 0.96621 2.4354043 ] [ 1.1926041 -0.74471754 0.15751077 ... -0.92055994 -1.7113748 0.48798013]] [[ 0.60916764 0.43451184 0.06986912 ... 1.8852264 1.1909543 0.78196925] [-2.3315284 -0.15991199 -0.26671103 ... -0.17903589 1.8182029 -1.8175075 ]]]]; ov_res: [[[[-0.3266748 -0.20625874 0.03800474 ... -0.99962443 -0.31447938 -0.78527564] [ 0.22983247 0.01419787 0.03612345 ... 0.54486805 -0.4368424 -0.3102766 ]] [[ 1.526704 0.09655199 0.507622 ... 0.6491135 -0.6009457 -0.41559187] [-0.8374289 -0.33390856 -0.44789198 ... 1.0244167 0.673074 1.7753056 ]] [[-0.79511553 1.1607668 -2.0696232 ... -0.34200653 -0.19321916 -0.14651914] [-1.5348039 1.810909 0.36635715 ... -0.6501977 -1.2089614 0.09698913]] ... [[-1.3505969 1.0854172 -0.6317626 ... -1.3197337 0.9922153 -0.45773026] [-0.02598953 -1.6500555 0.27361083 ... 1.5558718 -0.4711954 -1.0452254 ]] [[-1.3815242 0.6751196 -0.3106754 ... -1.609334 0.96621 2.4354043 ] [ 1.1926041 -0.74471754 0.15751077 ... -0.92055994 -1.7113748 0.48798013]] [[ 0.60916764 0.43451184 0.06986912 ... 1.8852264 1.1909543 0.78196925] [-2.3315284 -0.15991199 -0.26671103 ... -0.17903589 1.8182029 -1.8175075 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_getitem[ ie_device:CPU - precision:FP32 - getitem:4 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6108.aten_split, %input.1 : Tensor): %self.getitem : int = prim::Constant[value=4]() %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %5 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 %6 : Tensor = aten::__getitem__(%5, %self.getitem) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:24:23 return (%6) fw_re: [[[[ 0.77460504 0.57396126] [-0.25554714 0.59471256] [-0.5038648 0.1162532 ] ... [ 0.69826204 0.1295047 ] [-0.25799242 -0.49592546] [ 0.31038496 0.27649453]] [[ 0.197846 -0.0788187 ] [ 0.67645025 -0.84156466] [-0.7261126 -0.48148882] ... [-0.71427304 -2.8010187 ] [-0.30870482 -1.141321 ] [-0.7063985 1.2119231 ]] [[-0.72080296 0.19134942] [-1.6379877 -0.7926701 ] [-1.1118592 0.89249337] ... [ 0.24496223 0.52668196] [ 1.3440927 0.63361627] [-1.487567 0.37224397]] ... [[ 0.8812053 0.27766988] [-1.073974 0.54614997] [-1.8034614 0.22391126] ... [-1.0341 0.4854364 ] [-1.8202994 -0.6742565 ] [ 1.1004221 0.45551807]] [[-0.71079063 0.21742415] [ 1.5840786 0.21764125] [-0.66424876 0.09313367] ... [ 0.4854022 0.47489604] [ 0.7211945 -1.2667909 ] [-0.05683554 0.90654397]] [[ 0.09895603 -0.53080875] [ 0.6471256 -1.2746093 ] [ 0.96331996 -2.1144278 ] ... [ 1.2104143 -0.02525219] [-1.2439681 0.6441153 ] [-0.67318386 -1.922294 ]]]]; ov_res: [[[[ 0.77460504 0.57396126] [-0.25554714 0.59471256] [-0.5038648 0.1162532 ] ... [ 0.69826204 0.1295047 ] [-0.25799242 -0.49592546] [ 0.31038496 0.27649453]] [[ 0.197846 -0.0788187 ] [ 0.67645025 -0.84156466] [-0.7261126 -0.48148882] ... [-0.71427304 -2.8010187 ] [-0.30870482 -1.141321 ] [-0.7063985 1.2119231 ]] [[-0.72080296 0.19134942] [-1.6379877 -0.7926701 ] [-1.1118592 0.89249337] ... [ 0.24496223 0.52668196] [ 1.3440927 0.63361627] [-1.487567 0.37224397]] ... [[ 0.8812053 0.27766988] [-1.073974 0.54614997] [-1.8034614 0.22391126] ... [-1.0341 0.4854364 ] [-1.8202994 -0.6742565 ] [ 1.1004221 0.45551807]] [[-0.71079063 0.21742415] [ 1.5840786 0.21764125] [-0.66424876 0.09313367] ... [ 0.4854022 0.47489604] [ 0.7211945 -1.2667909 ] [-0.05683554 0.90654397]] [[ 0.09895603 -0.53080875] [ 0.6471256 -1.2746093 ] [ 0.96331996 -2.1144278 ] ... [ 1.2104143 -0.02525219] [-1.2439681 0.6441153 ] [-0.67318386 -1.922294 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:(2, 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6110.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=1]() %self.split : int = prim::Constant[value=2]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[ 0.07268826 1.1994803 0.00862636 ... -0.09035812 -0.98478943 -0.8966788 ] [ 1.0737232 -0.462209 0.5493875 ... -0.76521856 0.38775185 -0.39810956] [ 1.2885743 0.20847103 0.36323717 ... 1.1043758 -0.52031595 -0.6055643 ] ... [ 0.41807574 0.28770128 -1.6415317 ... -0.5024171 -1.1357446 -0.57625395] [-0.12016411 0.19083789 -0.41772228 ... -0.13127884 0.03567441 0.5849004 ] [-0.4229615 -1.6867498 -0.24755004 ... 0.53716874 -1.099313 0.10415862]] [[-1.5008321 0.7357735 -0.46776095 ... -0.10763809 -0.44700173 -1.4585367 ] [-0.17272867 -0.61625654 -0.5355332 ... -0.5411584 -0.4746829 0.40828255] [ 0.7543193 -0.8919996 -1.5023061 ... 0.00679787 -0.05128777 0.47971517] ... [-0.68069124 -0.08248679 0.4356354 ... 0.8490109 -0.28246507 2.26896 ] [-0.17507839 -0.61388975 1.0795418 ... 1.0595815 0.01263465 -0.37397936] [ 1.3837157 1.2096616 0.83383954 ... 0.13067463 -1.1385828 2.2404437 ]]]]; ov_res: [[[[ 0.07268826 1.1994803 0.00862636 ... -0.09035812 -0.98478943 -0.8966788 ] [ 1.0737232 -0.462209 0.5493875 ... -0.76521856 0.38775185 -0.39810956] [ 1.2885743 0.20847103 0.36323717 ... 1.1043758 -0.52031595 -0.6055643 ] ... [ 0.41807574 0.28770128 -1.6415317 ... -0.5024171 -1.1357446 -0.57625395] [-0.12016411 0.19083789 -0.41772228 ... -0.13127884 0.03567441 0.5849004 ] [-0.4229615 -1.6867498 -0.24755004 ... 0.53716874 -1.099313 0.10415862]] [[-1.5008321 0.7357735 -0.46776095 ... -0.10763809 -0.44700173 -1.4585367 ] [-0.17272867 -0.61625654 -0.5355332 ... -0.5411584 -0.4746829 0.40828255] [ 0.7543193 -0.8919996 -1.5023061 ... 0.00679787 -0.05128777 0.47971517] ... [-0.68069124 -0.08248679 0.4356354 ... 0.8490109 -0.28246507 2.26896 ] [-0.17507839 -0.61388975 1.0795418 ... 1.0595815 0.01263465 -0.37397936] [ 1.3837157 1.2096616 0.83383954 ... 0.13067463 -1.1385828 2.2404437 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:(45, 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6112.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=2]() %self.split : int = prim::Constant[value=45]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[ 6.31197870e-01 -1.45497277e-01 6.37082279e-01 ... 5.56824803e-01 8.34067404e-01 -7.95161724e-01] [ 8.98878157e-01 -1.44788778e+00 1.55251384e+00 ... 1.32132113e+00 2.30915368e-01 1.88463032e+00] [-1.64559150e+00 -5.83129108e-01 7.16046751e-01 ... -1.09743610e-01 -1.74523264e-01 1.10157716e+00] ... [-9.56554294e-01 -2.76578695e-01 -3.23361307e-02 ... 8.16500545e-01 -7.24005401e-02 1.13474941e+00] [-1.81618810e+00 -5.42934358e-01 1.70806444e+00 ... -1.08559668e-01 4.42080587e-01 -1.56345367e+00] [ 7.24952400e-01 9.41953897e-01 5.78171492e-01 ... -1.43158615e-01 -5.79391360e-01 3.82432062e-03]] [[-6.25441432e-01 -7.50205696e-01 -1.33371055e+00 ... -7.36436486e-01 7.05703139e-01 1.15329146e+00] [-1.65002167e+00 -4.12722647e-01 -9.17705238e-01 ... 1.17025578e+00 -7.76784480e-01 1.84648132e+00] [ 1.95625439e-01 -1.36094260e+00 5.34461439e-01 ... -4.51323837e-01 -1.44603896e+00 4.86650348e-01] ... [ 4.86225337e-01 -2.35398486e-01 -1.31459773e+00 ... -1.88501453e+00 7.64473259e-01 -6.98200643e-01] [ 1.12294316e+00 2.33132735e-01 4.62991476e-01 ... -1.63885975e+00 4.78992283e-01 -2.17151165e+00] [-2.13922814e-01 6.08147681e-03 -1.18556738e+00 ... -1.22491932e+00 2.97722723e-02 7.31956840e-01]] [[-6.97042346e-01 5.97249091e-01 1.11875951e+00 ... 1.56969666e+00 -4.07751799e-01 9.46898103e-01] [ 7.77764320e-01 -4.16604137e+00 1.79096431e-01 ... 1.84549940e+00 -1.45787251e+00 1.49931049e+00] [-7.17832983e-01 -6.22605443e-01 -3.84752043e-02 ... -2.49294251e-01 1.07393253e+00 7.00496912e-01] ... [-8.73919189e-01 8.61248016e-01 8.47320735e-01 ... 6.21712767e-03 -1.46156800e+00 -1.04970086e+00] [ 2.80452937e-01 1.06199360e+00 1.74041772e+00 ... 1.11402285e+00 2.30627036e+00 -1.43859088e+00] [-1.32674843e-01 -8.89252543e-01 8.69562387e-01 ... -7.69532919e-01 -5.43596148e-01 -1.00564098e+00]] ... [[ 2.32648268e-01 5.20306885e-01 -4.80855644e-01 ... -2.87541240e-01 -2.73129761e-01 1.07109821e+00] [-8.20220113e-01 -1.57461500e+00 9.65547204e-01 ... -2.22777337e-01 -2.03176093e+00 3.72249454e-01] [ 1.23566389e+00 9.91351008e-01 1.44011274e-01 ... -1.57821715e-01 2.73307174e-01 5.34854650e-01] ... [-9.35307071e-02 -9.40780997e-01 2.57662445e-01 ... -1.47741735e+00 -1.79247165e+00 -1.32369792e+00] [-2.28331947e+00 1.26821423e+00 1.00108910e+00 ... -8.66142333e-01 1.55674708e+00 -1.52146649e+00] [-5.66355705e-01 7.34801531e-01 -7.27075756e-01 ... 1.69162691e+00 1.20989239e+00 -4.75911170e-01]] [[ 1.84779167e-01 5.96254766e-01 8.18367898e-01 ... -5.39033711e-01 5.25467515e-01 -1.50623620e-02] [-7.69026697e-01 1.76984620e+00 -1.00540304e+00 ... -1.71706855e+00 1.57938457e+00 -8.96997929e-01] [ 8.30560699e-02 3.46753091e-01 -1.55414566e-01 ... -9.13398743e-01 -3.02877009e-01 2.81484932e-01] ... [ 1.23993134e+00 6.31522119e-01 -5.39387465e-01 ... 1.06602144e+00 -4.74172264e-01 1.45812345e+00] [ 6.11135185e-01 -3.12357217e-01 1.77855507e-01 ... -6.55030906e-01 -5.58010161e-01 1.79932177e+00] [-7.58286938e-03 -1.55075502e+00 -5.01645088e-01 ... 3.58227074e-01 -5.10886908e-01 9.75643933e-01]] [[ 1.90498203e-01 -2.41317630e-01 -2.72437668e+00 ... 8.11419338e-02 7.75076270e-01 1.11215150e+00] [ 2.87994325e-01 -1.05215386e-01 1.50325489e+00 ... 1.17158782e+00 8.71902525e-01 6.24031305e-01] [-8.29040408e-01 -5.61820269e-01 -1.68604052e+00 ... -4.07579362e-01 2.00421262e+00 -4.93587613e-01] ... [-1.21409523e+00 -2.16764164e+00 -6.67503357e-01 ... 7.43019402e-01 -1.60456896e-01 2.36582851e+00] [-2.33502537e-01 -7.76713550e-01 -8.44552338e-01 ... 4.47705418e-01 1.41056788e+00 -7.80014634e-01] [ 1.33703321e-01 2.40816876e-01 -5.03508985e-01 ... -3.98882985e-01 1.33719850e+00 2.30855793e-02]]]]; ov_res: [[[[ 6.31197870e-01 -1.45497277e-01 6.37082279e-01 ... 5.56824803e-01 8.34067404e-01 -7.95161724e-01] [ 8.98878157e-01 -1.44788778e+00 1.55251384e+00 ... 1.32132113e+00 2.30915368e-01 1.88463032e+00] [-1.64559150e+00 -5.83129108e-01 7.16046751e-01 ... -1.09743610e-01 -1.74523264e-01 1.10157716e+00] ... [-9.56554294e-01 -2.76578695e-01 -3.23361307e-02 ... 8.16500545e-01 -7.24005401e-02 1.13474941e+00] [-1.81618810e+00 -5.42934358e-01 1.70806444e+00 ... -1.08559668e-01 4.42080587e-01 -1.56345367e+00] [ 7.24952400e-01 9.41953897e-01 5.78171492e-01 ... -1.43158615e-01 -5.79391360e-01 3.82432062e-03]] [[-6.25441432e-01 -7.50205696e-01 -1.33371055e+00 ... -7.36436486e-01 7.05703139e-01 1.15329146e+00] [-1.65002167e+00 -4.12722647e-01 -9.17705238e-01 ... 1.17025578e+00 -7.76784480e-01 1.84648132e+00] [ 1.95625439e-01 -1.36094260e+00 5.34461439e-01 ... -4.51323837e-01 -1.44603896e+00 4.86650348e-01] ... [ 4.86225337e-01 -2.35398486e-01 -1.31459773e+00 ... -1.88501453e+00 7.64473259e-01 -6.98200643e-01] [ 1.12294316e+00 2.33132735e-01 4.62991476e-01 ... -1.63885975e+00 4.78992283e-01 -2.17151165e+00] [-2.13922814e-01 6.08147681e-03 -1.18556738e+00 ... -1.22491932e+00 2.97722723e-02 7.31956840e-01]] [[-6.97042346e-01 5.97249091e-01 1.11875951e+00 ... 1.56969666e+00 -4.07751799e-01 9.46898103e-01] [ 7.77764320e-01 -4.16604137e+00 1.79096431e-01 ... 1.84549940e+00 -1.45787251e+00 1.49931049e+00] [-7.17832983e-01 -6.22605443e-01 -3.84752043e-02 ... -2.49294251e-01 1.07393253e+00 7.00496912e-01] ... [-8.73919189e-01 8.61248016e-01 8.47320735e-01 ... 6.21712767e-03 -1.46156800e+00 -1.04970086e+00] [ 2.80452937e-01 1.06199360e+00 1.74041772e+00 ... 1.11402285e+00 2.30627036e+00 -1.43859088e+00] [-1.32674843e-01 -8.89252543e-01 8.69562387e-01 ... -7.69532919e-01 -5.43596148e-01 -1.00564098e+00]] ... [[ 2.32648268e-01 5.20306885e-01 -4.80855644e-01 ... -2.87541240e-01 -2.73129761e-01 1.07109821e+00] [-8.20220113e-01 -1.57461500e+00 9.65547204e-01 ... -2.22777337e-01 -2.03176093e+00 3.72249454e-01] [ 1.23566389e+00 9.91351008e-01 1.44011274e-01 ... -1.57821715e-01 2.73307174e-01 5.34854650e-01] ... [-9.35307071e-02 -9.40780997e-01 2.57662445e-01 ... -1.47741735e+00 -1.79247165e+00 -1.32369792e+00] [-2.28331947e+00 1.26821423e+00 1.00108910e+00 ... -8.66142333e-01 1.55674708e+00 -1.52146649e+00] [-5.66355705e-01 7.34801531e-01 -7.27075756e-01 ... 1.69162691e+00 1.20989239e+00 -4.75911170e-01]] [[ 1.84779167e-01 5.96254766e-01 8.18367898e-01 ... -5.39033711e-01 5.25467515e-01 -1.50623620e-02] [-7.69026697e-01 1.76984620e+00 -1.00540304e+00 ... -1.71706855e+00 1.57938457e+00 -8.96997929e-01] [ 8.30560699e-02 3.46753091e-01 -1.55414566e-01 ... -9.13398743e-01 -3.02877009e-01 2.81484932e-01] ... [ 1.23993134e+00 6.31522119e-01 -5.39387465e-01 ... 1.06602144e+00 -4.74172264e-01 1.45812345e+00] [ 6.11135185e-01 -3.12357217e-01 1.77855507e-01 ... -6.55030906e-01 -5.58010161e-01 1.79932177e+00] [-7.58286938e-03 -1.55075502e+00 -5.01645088e-01 ... 3.58227074e-01 -5.10886908e-01 9.75643933e-01]] [[ 1.90498203e-01 -2.41317630e-01 -2.72437668e+00 ... 8.11419338e-02 7.75076270e-01 1.11215150e+00] [ 2.87994325e-01 -1.05215386e-01 1.50325489e+00 ... 1.17158782e+00 8.71902525e-01 6.24031305e-01] [-8.29040408e-01 -5.61820269e-01 -1.68604052e+00 ... -4.07579362e-01 2.00421262e+00 -4.93587613e-01] ... [-1.21409523e+00 -2.16764164e+00 -6.67503357e-01 ... 7.43019402e-01 -1.60456896e-01 2.36582851e+00] [-2.33502537e-01 -7.76713550e-01 -8.44552338e-01 ... 4.47705418e-01 1.41056788e+00 -7.80014634e-01] [ 1.33703321e-01 2.40816876e-01 -5.03508985e-01 ... -3.98882985e-01 1.33719850e+00 2.30855793e-02]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:(45, -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6114.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=-1]() %self.split : int = prim::Constant[value=45]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[ 0.5545692 -0.31731826 0.3734374 ... -1.1089727 0.6116172 -1.6857588 ] [-1.1328112 0.37681246 1.3324398 ... -1.5212556 -1.0533903 -1.064914 ] [-1.0797782 0.84240806 0.02520155 ... -1.1901625 0.38600037 -1.7304949 ] ... [ 0.35621086 -0.93609273 -1.2213372 ... 1.0904927 -0.40673497 -1.2016314 ] [-0.522519 -1.0663732 -1.7436829 ... -1.1327441 -0.88501537 1.1551471 ] [ 0.62745893 0.2575848 -0.38952276 ... -0.21102256 0.27793694 -0.77743584]] [[-0.2770956 0.71346 -0.6142785 ... -0.34570253 -0.11283498 0.9089022 ] [ 0.6145185 1.4517633 0.95578206 ... 2.0119755 1.1730871 0.43440974] [ 0.3587272 -0.23008628 -0.4898962 ... 0.01276089 0.5512395 0.9887908 ] ... [-0.40870073 0.9224677 -0.2092403 ... -0.43294662 -0.63584965 0.7575249 ] [ 0.59033144 -0.65471476 -1.786362 ... -1.6213351 -0.39417663 0.47172597] [-1.7664363 1.5249447 0.48193157 ... 0.37782812 0.6529672 -0.22281297]] [[-0.5550112 1.0233303 -0.8262798 ... -1.5418035 -1.1880056 -1.0040915 ] [ 1.8611284 -0.7076632 -2.1543434 ... 1.4572906 0.00964536 0.43516093] [-0.4026776 -1.3033807 0.05445632 ... -0.3370907 -1.0432009 -0.34503615] ... [-0.19680679 -0.69661397 0.8033572 ... 0.9128569 0.67208385 -0.33224934] [ 1.2229369 2.5344162 -0.50895226 ... 1.2769994 2.9393415 -0.42741886] [ 1.8571923 -0.97695357 -0.38042057 ... -0.1938698 -1.082663 0.213546 ]] ... [[-0.507283 -1.739256 -0.0640699 ... 1.7441283 0.48135754 0.7762076 ] [-0.67506844 1.2210984 -0.8592419 ... -1.4975652 -0.30812967 0.22256759] [-0.1920346 -1.4015175 0.21726355 ... 0.46225706 -0.5136493 0.966615 ] ... [ 0.05771794 -1.1760138 0.11782363 ... -0.03199675 1.7171385 0.13363254] [ 1.2355943 0.13872018 2.4251258 ... 0.748539 0.36539873 0.21106935] [-0.46752006 -0.95988834 -1.3785648 ... 1.43622 -0.3643242 -1.1366711 ]] [[-1.8341138 0.8487117 0.69174814 ... 1.1860658 0.10009521 0.6084068 ] [-0.69709796 1.0251695 0.04902723 ... 0.20046611 -0.2411183 0.00790269] [ 0.2642507 -1.6160932 0.8694044 ... -0.53473353 -0.3594812 -0.1462754 ] ... [ 1.0855858 -2.9158263 -0.38790444 ... 0.40243924 0.6197926 1.794102 ] [ 1.5926179 1.4348973 0.24787122 ... -1.5080204 0.31037956 -0.37979916] [-1.4060795 0.05543493 -0.41726825 ... 0.8998655 -0.14963816 -1.5204027 ]] [[ 0.8482491 0.365753 0.7676952 ... 0.9261291 -1.4202124 -1.2826399 ] [-1.3448533 1.9016658 -1.2422042 ... -1.3951932 -0.19938691 -0.46197313] [ 0.9434162 0.28485027 0.45418066 ... 0.7376031 0.6093298 -0.3527049 ] ... [ 0.32396564 0.18628035 -0.14564148 ... -1.8532908 -0.14742373 -0.8047957 ] [-0.7971723 -0.6817226 -1.9231812 ... 0.5896601 0.20631498 -2.0858557 ] [ 0.7042526 0.7107307 -0.85799587 ... -0.71820736 2.064431 -0.8018325 ]]]]; ov_res: [[[[ 0.5545692 -0.31731826 0.3734374 ... -1.1089727 0.6116172 -1.6857588 ] [-1.1328112 0.37681246 1.3324398 ... -1.5212556 -1.0533903 -1.064914 ] [-1.0797782 0.84240806 0.02520155 ... -1.1901625 0.38600037 -1.7304949 ] ... [ 0.35621086 -0.93609273 -1.2213372 ... 1.0904927 -0.40673497 -1.2016314 ] [-0.522519 -1.0663732 -1.7436829 ... -1.1327441 -0.88501537 1.1551471 ] [ 0.62745893 0.2575848 -0.38952276 ... -0.21102256 0.27793694 -0.77743584]] [[-0.2770956 0.71346 -0.6142785 ... -0.34570253 -0.11283498 0.9089022 ] [ 0.6145185 1.4517633 0.95578206 ... 2.0119755 1.1730871 0.43440974] [ 0.3587272 -0.23008628 -0.4898962 ... 0.01276089 0.5512395 0.9887908 ] ... [-0.40870073 0.9224677 -0.2092403 ... -0.43294662 -0.63584965 0.7575249 ] [ 0.59033144 -0.65471476 -1.786362 ... -1.6213351 -0.39417663 0.47172597] [-1.7664363 1.5249447 0.48193157 ... 0.37782812 0.6529672 -0.22281297]] [[-0.5550112 1.0233303 -0.8262798 ... -1.5418035 -1.1880056 -1.0040915 ] [ 1.8611284 -0.7076632 -2.1543434 ... 1.4572906 0.00964536 0.43516093] [-0.4026776 -1.3033807 0.05445632 ... -0.3370907 -1.0432009 -0.34503615] ... [-0.19680679 -0.69661397 0.8033572 ... 0.9128569 0.67208385 -0.33224934] [ 1.2229369 2.5344162 -0.50895226 ... 1.2769994 2.9393415 -0.42741886] [ 1.8571923 -0.97695357 -0.38042057 ... -0.1938698 -1.082663 0.213546 ]] ... [[-0.507283 -1.739256 -0.0640699 ... 1.7441283 0.48135754 0.7762076 ] [-0.67506844 1.2210984 -0.8592419 ... -1.4975652 -0.30812967 0.22256759] [-0.1920346 -1.4015175 0.21726355 ... 0.46225706 -0.5136493 0.966615 ] ... [ 0.05771794 -1.1760138 0.11782363 ... -0.03199675 1.7171385 0.13363254] [ 1.2355943 0.13872018 2.4251258 ... 0.748539 0.36539873 0.21106935] [-0.46752006 -0.95988834 -1.3785648 ... 1.43622 -0.3643242 -1.1366711 ]] [[-1.8341138 0.8487117 0.69174814 ... 1.1860658 0.10009521 0.6084068 ] [-0.69709796 1.0251695 0.04902723 ... 0.20046611 -0.2411183 0.00790269] [ 0.2642507 -1.6160932 0.8694044 ... -0.53473353 -0.3594812 -0.1462754 ] ... [ 1.0855858 -2.9158263 -0.38790444 ... 0.40243924 0.6197926 1.794102 ] [ 1.5926179 1.4348973 0.24787122 ... -1.5080204 0.31037956 -0.37979916] [-1.4060795 0.05543493 -0.41726825 ... 0.8998655 -0.14963816 -1.5204027 ]] [[ 0.8482491 0.365753 0.7676952 ... 0.9261291 -1.4202124 -1.2826399 ] [-1.3448533 1.9016658 -1.2422042 ... -1.3951932 -0.19938691 -0.46197313] [ 0.9434162 0.28485027 0.45418066 ... 0.7376031 0.6093298 -0.3527049 ] ... [ 0.32396564 0.18628035 -0.14564148 ... -1.8532908 -0.14742373 -0.8047957 ] [-0.7971723 -0.6817226 -1.9231812 ... 0.5896601 0.20631498 -2.0858557 ] [ 0.7042526 0.7107307 -0.85799587 ... -0.71820736 2.064431 -0.8018325 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:([2, 2, 2, 2, 2], 1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6116.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=1]() %self.split : int[] = prim::Constant[value=[2, 2, 2, 2, 2]]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[ 1.1842294 -0.01250176 -0.17301868 ... 1.1964883 -1.0400403 -0.87230265] [-1.2614514 -0.2243579 1.9244211 ... 0.48371118 -1.0605656 0.6232832 ] [ 1.4361726 0.46438518 0.20441201 ... 0.693746 0.94699657 0.04488594] ... [-1.6763092 -0.86566806 -0.07477327 ... 0.44423142 -0.6209988 0.09646636] [ 1.5628537 -0.25716993 1.2880372 ... -1.0568432 -1.9024475 1.0198212 ] [-1.811733 -0.97159934 0.04496247 ... 0.19433178 1.8468354 -0.82607865]] [[-1.5079218 -1.7142609 1.3989146 ... 0.9906922 0.02355015 0.0417406 ] [ 0.8748486 -1.188627 -2.2613366 ... -1.1357341 -1.479584 -1.2706128 ] [-0.7540872 0.16520418 -0.09344943 ... 0.6826552 -0.77879226 2.572657 ] ... [-0.41324177 -1.952261 -1.0101964 ... -0.26798716 -1.7354251 -1.2185769 ] [ 0.26506805 -0.670956 -0.8005071 ... 0.21390386 -0.41658053 -1.459573 ] [-2.9057014 -0.18490139 -0.28924793 ... -1.0436983 -1.301236 1.721429 ]]]]; ov_res: [[[[ 1.1842294 -0.01250176 -0.17301868 ... 1.1964883 -1.0400403 -0.87230265] [-1.2614514 -0.2243579 1.9244211 ... 0.48371118 -1.0605656 0.6232832 ] [ 1.4361726 0.46438518 0.20441201 ... 0.693746 0.94699657 0.04488594] ... [-1.6763092 -0.86566806 -0.07477327 ... 0.44423142 -0.6209988 0.09646636] [ 1.5628537 -0.25716993 1.2880372 ... -1.0568432 -1.9024475 1.0198212 ] [-1.811733 -0.97159934 0.04496247 ... 0.19433178 1.8468354 -0.82607865]] [[-1.5079218 -1.7142609 1.3989146 ... 0.9906922 0.02355015 0.0417406 ] [ 0.8748486 -1.188627 -2.2613366 ... -1.1357341 -1.479584 -1.2706128 ] [-0.7540872 0.16520418 -0.09344943 ... 0.6826552 -0.77879226 2.572657 ] ... [-0.41324177 -1.952261 -1.0101964 ... -0.26798716 -1.7354251 -1.2185769 ] [ 0.26506805 -0.670956 -0.8005071 ... 0.21390386 -0.41658053 -1.459573 ] [-2.9057014 -0.18490139 -0.28924793 ... -1.0436983 -1.301236 1.721429 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:([200, 20, 1, 1, 2], 2) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6118.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=2]() %self.split : int[] = prim::Constant[value=[200, 20, 1, 1, 2]]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[ 0.40552306 0.71274793 -1.3904914 ... 0.30262944 1.1228509 0.66731226] [ 1.729753 0.32602757 -0.9572 ... -1.399499 -0.25479427 -0.17977537] [ 1.2412397 -0.3793417 0.24510203 ... -1.7983447 0.6174105 0.9490861 ] ... [-1.8487823 -2.2798963 0.85108334 ... -0.8897147 -0.43237737 0.3021557 ] [-1.0276299 0.2418436 -0.05339663 ... 0.12744318 -0.6716645 -0.49544644] [ 0.6023612 -1.2397954 -1.7934515 ... 0.05521115 0.10921121 1.4317473 ]] [[-1.7283789 0.13816904 -0.94847786 ... -0.88218796 -0.75599575 -0.15195225] [-1.0275371 1.607713 -0.0846833 ... -1.3322183 0.05778581 0.08810675] [-0.94678557 1.4948528 0.9576754 ... 0.1526613 -0.2526957 -0.66682416] ... [-0.99206656 -0.2908787 -1.770898 ... -0.18591775 -0.56482166 0.6012364 ] [-1.2844089 -2.1249783 -0.35161 ... 0.97082394 -1.8154606 -0.616829 ] [ 0.54173625 0.11564877 -1.9670545 ... 1.6462523 0.56629056 -0.91623837]] [[-1.1263574 -0.7218937 -0.26124704 ... -0.90047985 0.09389984 -0.64107203] [-1.2918693 -0.6331064 -1.4257134 ... -2.2091937 1.7555311 -0.05594521] [-0.4357449 0.98502016 -1.442181 ... 0.2401901 1.4011422 -0.7667624 ] ... [-0.66737795 -0.81470054 -0.64966154 ... 0.92770517 -0.57222587 -0.44098026] [-0.88166493 0.95762426 2.1379175 ... -1.8899803 -0.83293015 -0.47707865] [ 0.8733153 1.7196605 1.8948839 ... 0.7347711 -0.09077899 1.3517132 ]] ... [[ 1.2714149 -2.4724529 -1.2000531 ... -0.44949675 0.3541424 -0.21571377] [-1.3572966 1.6117185 -1.2216145 ... -0.21371871 -0.42043364 1.0061744 ] [-0.74861366 -1.4947749 1.5540208 ... 0.44138306 -0.8459201 -0.70533365] ... [ 0.44396132 0.3088268 -1.4133807 ... -0.9070923 0.5939059 0.44816378] [-1.6248342 -1.7174231 0.8167981 ... 1.0211693 -2.0006044 1.0341055 ] [-0.72885597 -1.3370526 -0.63459384 ... 0.18204966 1.0991571 1.1750875 ]] [[-1.3460455 -0.3882817 -0.7908718 ... 0.952387 -0.01695465 -0.8179784 ] [-0.87529117 1.8456683 -0.67638993 ... 0.955004 1.0690955 -1.5662367 ] [-0.27444258 1.0904105 0.31963095 ... 1.2795475 0.4867227 0.7457873 ] ... [-2.2295334 -0.87195 0.7118755 ... -1.2013162 -0.370035 -0.62835777] [ 1.0509511 0.88102865 -0.46255714 ... -0.46989402 0.6667053 -1.0927393 ] [-1.9961023 0.71159554 -0.8411331 ... 0.6753554 -1.1715308 0.4717019 ]] [[-0.57367015 -0.08927835 -1.0360703 ... -0.31048998 0.6907112 0.4521113 ] [-1.6820896 0.85441726 0.7649847 ... 1.9913737 0.43205148 0.49565104] [ 1.0753744 0.8580758 -1.1807098 ... 0.6371597 -0.13826054 -0.3242783 ] ... [-0.74945515 -0.20595479 -0.29884762 ... -0.03479468 0.25056443 1.282971 ] [-0.35474214 0.93070775 0.94897354 ... 0.28428364 0.302905 -1.0513506 ] [ 0.12705526 -0.08608411 0.3238726 ... -0.45612818 -1.5774536 -0.40389633]]]]; ov_res: [[[[ 0.40552306 0.71274793 -1.3904914 ... 0.30262944 1.1228509 0.66731226] [ 1.729753 0.32602757 -0.9572 ... -1.399499 -0.25479427 -0.17977537] [ 1.2412397 -0.3793417 0.24510203 ... -1.7983447 0.6174105 0.9490861 ] ... [-1.8487823 -2.2798963 0.85108334 ... -0.8897147 -0.43237737 0.3021557 ] [-1.0276299 0.2418436 -0.05339663 ... 0.12744318 -0.6716645 -0.49544644] [ 0.6023612 -1.2397954 -1.7934515 ... 0.05521115 0.10921121 1.4317473 ]] [[-1.7283789 0.13816904 -0.94847786 ... -0.88218796 -0.75599575 -0.15195225] [-1.0275371 1.607713 -0.0846833 ... -1.3322183 0.05778581 0.08810675] [-0.94678557 1.4948528 0.9576754 ... 0.1526613 -0.2526957 -0.66682416] ... [-0.99206656 -0.2908787 -1.770898 ... -0.18591775 -0.56482166 0.6012364 ] [-1.2844089 -2.1249783 -0.35161 ... 0.97082394 -1.8154606 -0.616829 ] [ 0.54173625 0.11564877 -1.9670545 ... 1.6462523 0.56629056 -0.91623837]] [[-1.1263574 -0.7218937 -0.26124704 ... -0.90047985 0.09389984 -0.64107203] [-1.2918693 -0.6331064 -1.4257134 ... -2.2091937 1.7555311 -0.05594521] [-0.4357449 0.98502016 -1.442181 ... 0.2401901 1.4011422 -0.7667624 ] ... [-0.66737795 -0.81470054 -0.64966154 ... 0.92770517 -0.57222587 -0.44098026] [-0.88166493 0.95762426 2.1379175 ... -1.8899803 -0.83293015 -0.47707865] [ 0.8733153 1.7196605 1.8948839 ... 0.7347711 -0.09077899 1.3517132 ]] ... [[ 1.2714149 -2.4724529 -1.2000531 ... -0.44949675 0.3541424 -0.21571377] [-1.3572966 1.6117185 -1.2216145 ... -0.21371871 -0.42043364 1.0061744 ] [-0.74861366 -1.4947749 1.5540208 ... 0.44138306 -0.8459201 -0.70533365] ... [ 0.44396132 0.3088268 -1.4133807 ... -0.9070923 0.5939059 0.44816378] [-1.6248342 -1.7174231 0.8167981 ... 1.0211693 -2.0006044 1.0341055 ] [-0.72885597 -1.3370526 -0.63459384 ... 0.18204966 1.0991571 1.1750875 ]] [[-1.3460455 -0.3882817 -0.7908718 ... 0.952387 -0.01695465 -0.8179784 ] [-0.87529117 1.8456683 -0.67638993 ... 0.955004 1.0690955 -1.5662367 ] [-0.27444258 1.0904105 0.31963095 ... 1.2795475 0.4867227 0.7457873 ] ... [-2.2295334 -0.87195 0.7118755 ... -1.2013162 -0.370035 -0.62835777] [ 1.0509511 0.88102865 -0.46255714 ... -0.46989402 0.6667053 -1.0927393 ] [-1.9961023 0.71159554 -0.8411331 ... 0.6753554 -1.1715308 0.4717019 ]] [[-0.57367015 -0.08927835 -1.0360703 ... -0.31048998 0.6907112 0.4521113 ] [-1.6820896 0.85441726 0.7649847 ... 1.9913737 0.43205148 0.49565104] [ 1.0753744 0.8580758 -1.1807098 ... 0.6371597 -0.13826054 -0.3242783 ] ... [-0.74945515 -0.20595479 -0.29884762 ... -0.03479468 0.25056443 1.282971 ] [-0.35474214 0.93070775 0.94897354 ... 0.28428364 0.302905 -1.0513506 ] [ 0.12705526 -0.08608411 0.3238726 ... -0.45612818 -1.5774536 -0.40389633]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_split.py::TestSplit::test_split_listunpack[ ie_device:CPU - precision:FP32 - params:([20, 200, 1, 1, 2], -1) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_split.___torch_mangle_6120.aten_split, %input.1 : Tensor): %self.axis : int = prim::Constant[value=-1]() %self.split : int[] = prim::Constant[value=[20, 200, 1, 1, 2]]() %4 : Tensor[] = aten::split(%input.1, %self.split, %self.axis) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_split.py:43:32 %a : Tensor, %b.1 : Tensor, %c : Tensor, %d : Tensor, %e : Tensor = prim::ListUnpack(%4) return (%b.1) fw_re: [[[[-3.3806071e-01 -1.3246160e+00 -8.8312685e-01 ... 1.0982854e+00 -9.0584022e-01 7.2166479e-01] [-1.6376153e-01 1.0658298e+00 -1.6377670e+00 ... -7.6838315e-01 4.5993635e-01 1.5191327e+00] [ 4.3266356e-01 -6.8226165e-01 -7.9532318e-02 ... -2.3643956e-01 -6.8941027e-01 -5.6000447e-01] ... [ 1.2620802e+00 3.2373524e-01 2.9351768e-01 ... 5.0298280e-01 -1.0050291e+00 -1.7923948e+00] [ 1.1654915e+00 -1.5080130e+00 -3.9135464e-02 ... 6.3508600e-01 9.1165745e-01 -2.6507479e-01] [-3.5135761e-02 -2.7896214e+00 1.0503436e+00 ... -1.8522201e+00 1.6492428e+00 5.2962016e-02]] [[ 2.0984377e-01 2.2829698e-01 -1.3656825e+00 ... -1.3968626e+00 5.7057136e-01 -9.1274887e-01] [ 1.1002553e+00 -9.7737420e-01 -2.2658575e+00 ... -1.8466620e-01 -9.9561954e-01 -3.3823791e-01] [-2.2902167e-01 -5.3024089e-01 -5.7803315e-01 ... -2.3907290e+00 -1.3380195e-01 7.0282036e-01] ... [-1.8314360e-01 -6.2561256e-01 -2.1248574e+00 ... -1.3832539e+00 2.8713062e-01 3.5235810e-01] [-1.0039680e+00 3.5072732e-01 -3.0295780e-01 ... -2.1438935e+00 -9.9158031e-01 1.3684453e+00] [-5.1506901e-01 1.5582769e+00 7.4286324e-01 ... 8.9777040e-01 4.4574434e-01 -1.9167298e+00]] [[ 2.0865047e+00 -1.0631117e+00 5.6737673e-01 ... -7.1723336e-01 -9.5523691e-01 -1.3012202e+00] [ 4.4540772e-01 6.1127335e-01 -4.9577713e-01 ... 6.7630786e-01 1.1675816e+00 8.6002326e-01] [ 1.6282226e-01 -1.0361605e-01 -5.2447832e-01 ... -4.7259156e-02 -2.5390440e-01 -1.3312830e+00] ... [ 1.3361270e+00 -1.0982114e+00 1.3573736e-01 ... 8.2892224e-02 1.2044311e+00 -9.1349167e-01] [ 9.1530252e-01 -9.3769729e-01 4.6976286e-01 ... 6.0552490e-01 6.1094511e-02 2.6273780e-02] [-4.8179141e-01 1.4345788e+00 -7.7957451e-02 ... -3.1007776e-01 -1.1592752e+00 -7.9094726e-01]] ... [[ 1.0082062e+00 -1.2781380e+00 -1.6972293e+00 ... -9.8193669e-01 -8.9324635e-01 1.4252380e+00] [-1.0116926e+00 9.0963185e-01 6.2076814e-02 ... 2.0644863e+00 -1.9137755e-01 3.0583480e-01] [-8.7298751e-01 1.1021303e+00 -6.6027755e-01 ... 2.6823151e+00 -2.2467566e-01 3.4423110e-01] ... [-1.4174329e+00 -7.0555580e-01 2.6435724e-01 ... -9.9851914e-02 7.6140052e-01 4.6043003e-01] [-1.2910649e+00 1.0564495e-01 -1.8550599e+00 ... 7.0771897e-01 -2.3798814e+00 -2.6875269e-01] [-1.8784500e+00 -1.1894054e-02 1.4151696e+00 ... 8.0007041e-01 6.6893972e-02 -5.7122236e-01]] [[-4.0062085e-01 2.0662208e+00 -6.6693515e-01 ... -1.4840493e+00 1.3696049e+00 9.9394149e-01] [ 1.2504867e-01 -5.2570921e-01 3.6677747e+00 ... 1.4105439e+00 -6.3907403e-01 -7.1255267e-01] [ 9.9714220e-01 6.5539110e-01 1.4282116e+00 ... 4.0454644e-01 -1.1162362e+00 -9.7168171e-01] ... [-1.5030391e+00 -1.0930142e+00 -3.2753673e-01 ... 5.2354008e-01 3.8160333e-01 9.0709573e-01] [ 2.7982268e-01 5.4850662e-01 6.2572561e-02 ... 9.6868914e-01 1.8186250e-01 -1.3143533e+00] [ 1.4572906e-01 3.3150139e+00 -9.0315235e-01 ... -2.3254152e-01 1.5548869e+00 -1.4397628e+00]] [[-1.3804375e+00 -9.4795334e-01 1.4342192e+00 ... 5.9214026e-01 9.6133113e-01 6.3968772e-01] [-4.7299439e-01 -1.3479058e-01 -1.6955175e+00 ... 2.2153002e-01 -8.9131176e-01 4.1431066e-02] [ 1.0892941e+00 5.0824189e-01 7.0094341e-01 ... -1.2709303e-01 2.2104530e-02 -1.3978506e+00] ... [-4.6149719e-01 6.7080724e-01 2.3252957e+00 ... 3.4419346e-01 1.1223222e+00 -8.8781416e-01] [-2.6785865e-01 4.0560821e-01 1.1081479e-02 ... -1.6936405e-01 -5.8572781e-01 -1.4070503e-03] [ 3.1831813e-01 9.6005398e-01 3.8413996e-01 ... -4.1086820e-01 3.7451646e-01 -3.8711375e-01]]]]; ov_res: [[[[-3.3806071e-01 -1.3246160e+00 -8.8312685e-01 ... 1.0982854e+00 -9.0584022e-01 7.2166479e-01] [-1.6376153e-01 1.0658298e+00 -1.6377670e+00 ... -7.6838315e-01 4.5993635e-01 1.5191327e+00] [ 4.3266356e-01 -6.8226165e-01 -7.9532318e-02 ... -2.3643956e-01 -6.8941027e-01 -5.6000447e-01] ... [ 1.2620802e+00 3.2373524e-01 2.9351768e-01 ... 5.0298280e-01 -1.0050291e+00 -1.7923948e+00] [ 1.1654915e+00 -1.5080130e+00 -3.9135464e-02 ... 6.3508600e-01 9.1165745e-01 -2.6507479e-01] [-3.5135761e-02 -2.7896214e+00 1.0503436e+00 ... -1.8522201e+00 1.6492428e+00 5.2962016e-02]] [[ 2.0984377e-01 2.2829698e-01 -1.3656825e+00 ... -1.3968626e+00 5.7057136e-01 -9.1274887e-01] [ 1.1002553e+00 -9.7737420e-01 -2.2658575e+00 ... -1.8466620e-01 -9.9561954e-01 -3.3823791e-01] [-2.2902167e-01 -5.3024089e-01 -5.7803315e-01 ... -2.3907290e+00 -1.3380195e-01 7.0282036e-01] ... [-1.8314360e-01 -6.2561256e-01 -2.1248574e+00 ... -1.3832539e+00 2.8713062e-01 3.5235810e-01] [-1.0039680e+00 3.5072732e-01 -3.0295780e-01 ... -2.1438935e+00 -9.9158031e-01 1.3684453e+00] [-5.1506901e-01 1.5582769e+00 7.4286324e-01 ... 8.9777040e-01 4.4574434e-01 -1.9167298e+00]] [[ 2.0865047e+00 -1.0631117e+00 5.6737673e-01 ... -7.1723336e-01 -9.5523691e-01 -1.3012202e+00] [ 4.4540772e-01 6.1127335e-01 -4.9577713e-01 ... 6.7630786e-01 1.1675816e+00 8.6002326e-01] [ 1.6282226e-01 -1.0361605e-01 -5.2447832e-01 ... -4.7259156e-02 -2.5390440e-01 -1.3312830e+00] ... [ 1.3361270e+00 -1.0982114e+00 1.3573736e-01 ... 8.2892224e-02 1.2044311e+00 -9.1349167e-01] [ 9.1530252e-01 -9.3769729e-01 4.6976286e-01 ... 6.0552490e-01 6.1094511e-02 2.6273780e-02] [-4.8179141e-01 1.4345788e+00 -7.7957451e-02 ... -3.1007776e-01 -1.1592752e+00 -7.9094726e-01]] ... [[ 1.0082062e+00 -1.2781380e+00 -1.6972293e+00 ... -9.8193669e-01 -8.9324635e-01 1.4252380e+00] [-1.0116926e+00 9.0963185e-01 6.2076814e-02 ... 2.0644863e+00 -1.9137755e-01 3.0583480e-01] [-8.7298751e-01 1.1021303e+00 -6.6027755e-01 ... 2.6823151e+00 -2.2467566e-01 3.4423110e-01] ... [-1.4174329e+00 -7.0555580e-01 2.6435724e-01 ... -9.9851914e-02 7.6140052e-01 4.6043003e-01] [-1.2910649e+00 1.0564495e-01 -1.8550599e+00 ... 7.0771897e-01 -2.3798814e+00 -2.6875269e-01] [-1.8784500e+00 -1.1894054e-02 1.4151696e+00 ... 8.0007041e-01 6.6893972e-02 -5.7122236e-01]] [[-4.0062085e-01 2.0662208e+00 -6.6693515e-01 ... -1.4840493e+00 1.3696049e+00 9.9394149e-01] [ 1.2504867e-01 -5.2570921e-01 3.6677747e+00 ... 1.4105439e+00 -6.3907403e-01 -7.1255267e-01] [ 9.9714220e-01 6.5539110e-01 1.4282116e+00 ... 4.0454644e-01 -1.1162362e+00 -9.7168171e-01] ... [-1.5030391e+00 -1.0930142e+00 -3.2753673e-01 ... 5.2354008e-01 3.8160333e-01 9.0709573e-01] [ 2.7982268e-01 5.4850662e-01 6.2572561e-02 ... 9.6868914e-01 1.8186250e-01 -1.3143533e+00] [ 1.4572906e-01 3.3150139e+00 -9.0315235e-01 ... -2.3254152e-01 1.5548869e+00 -1.4397628e+00]] [[-1.3804375e+00 -9.4795334e-01 1.4342192e+00 ... 5.9214026e-01 9.6133113e-01 6.3968772e-01] [-4.7299439e-01 -1.3479058e-01 -1.6955175e+00 ... 2.2153002e-01 -8.9131176e-01 4.1431066e-02] [ 1.0892941e+00 5.0824189e-01 7.0094341e-01 ... -1.2709303e-01 2.2104530e-02 -1.3978506e+00] ... [-4.6149719e-01 6.7080724e-01 2.3252957e+00 ... 3.4419346e-01 1.1223222e+00 -8.8781416e-01] [-2.6785865e-01 4.0560821e-01 1.1081479e-02 ... -1.6936405e-01 -5.8572781e-01 -1.4070503e-03] [ 3.1831813e-01 9.6005398e-01 3.8413996e-01 ... -4.1086820e-01 3.7451646e-01 -3.8711375e-01]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sqrt.py::TestSqrt::test_sqrt[ ie_device:CPU - precision:FP32 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sqrt.___torch_mangle_6121.aten_sqrt, %x.1 : Tensor): %2 : Tensor = aten::sqrt(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sqrt.py:21:23 return (%2) fw_re: [[ nan 0.5928606 nan nan nan nan nan nan 0.694046 0.6184775]]; ov_res: [[ nan 0.5928606 nan nan nan nan nan nan 0.694046 0.6184775]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_squeeze.py::TestSqueeze::test_squeeze[ ie_device:CPU - precision:FP32 - dim:-2 - dynamic_shapes:True ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_squeeze.___torch_mangle_6122.aten_squeeze, %x.1 : Tensor): %self.dim : int = prim::Constant[value=-2]() %3 : Tensor = aten::squeeze(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_squeeze.py:24:27 return (%3) fw_re: [-1.1249007 0.15454945 -1.550059 0.62497026 -1.1677792 -1.0847771 -2.412789 1.2674915 0.01696402 -1.2439678 ]; ov_res: [-1.1249007 0.15454945 -1.550059 0.62497026 -1.1677792 -1.0847771 -2.412789 1.2674915 0.01696402 -1.2439678 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_squeeze.py::TestSqueeze::test_squeeze[ ie_device:CPU - precision:FP32 - dim:0 - dynamic_shapes:True ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_squeeze.___torch_mangle_6124.aten_squeeze, %x.1 : Tensor): %self.dim : int = prim::Constant[value=0]() %3 : Tensor = aten::squeeze(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_squeeze.py:24:27 return (%3) fw_re: [-1.0975016 0.47130138 -0.48913744 -1.89446 0.8302923 -0.5530157 0.35772315 0.44172588 -0.5068573 -0.776229 ]; ov_res: [-1.0975016 0.47130138 -0.48913744 -1.89446 0.8302923 -0.5530157 0.35772315 0.44172588 -0.5068573 -0.776229 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_squeeze.py::TestSqueeze::test_squeeze[ ie_device:CPU - precision:FP32 - dim:None - dynamic_shapes:False ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_squeeze.___torch_mangle_6126.aten_squeeze, %x.1 : Tensor): %2 : Tensor = aten::squeeze(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_squeeze.py:25:23 return (%2) fw_re: [-1.3737111 0.41995716 0.5529727 0.18695876 0.41695783 0.36354062 -0.99707305 -1.5217664 0.545408 1.438863 ]; ov_res: [-1.3737111 0.41995716 0.5529727 0.18695876 0.41695783 0.36354062 -0.99707305 -1.5217664 0.545408 1.438863 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_strided_const.py::TestStrides::test_strides[ ie_device:CPU - precision:FP32 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_strided_const.___torch_mangle_6127.strided_const, %x.1 : Tensor): %2 : int = prim::Constant[value=1]() %self.const : Float(1, 3, 2, 2, strides=[12, 1, 6, 3], requires_grad=0, device=cpu) = prim::Constant[value=<Tensor>]() %4 : Tensor = aten::add(%x.1, %self.const, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_strided_const.py:26:23 return (%4) fw_re: [[[[ 5. 18.] [ 7. 9.]] [[ 9. 11.] [12. 3.]] [[ 4. 13.] [ 9. 13.]]]]; ov_res: [[[[ 5. 18.] [ 7. 9.]] [[ 9. 11.] [12. 3.]] [[ 4. 13.] [ 9. 13.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:None - keep_dim:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6128.aten_sum, %x.1 : Tensor): %2 : NoneType = prim::Constant() %3 : Tensor = aten::sum(%x.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:25:27 return (%3) fw_re: 26.37042236328125; ov_res: 26.37054443359375 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:None - keep_dim:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6130.aten_sum, %x.1 : Tensor): %self.keep_dims : bool = prim::Constant[value=0]() %self.axes : NoneType = prim::Constant() %4 : Tensor = aten::sum(%x.1, %self.axes, %self.keep_dims, %self.axes) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:28:23 return (%4) fw_re: -162.49545288085938; ov_res: -162.49603271484375 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:-1 - keep_dim:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6132.aten_sum, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1]]() %3 : bool = prim::Constant[value=0]() %4 : NoneType = prim::Constant() %5 : Tensor = aten::sum(%x.1, %2, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:27:27 return (%5) fw_re: [[[-11.147375 15.556315 -16.545265 6.6157947 7.222359 -2.5843344 -4.807626 -18.898785 0.49737567 18.981255 7.89732 0.2533207 -11.849372 -1.2944155 -1.1334352 -26.338486 22.323154 8.154348 17.474163 8.859573 -4.2998857 -6.1113944 18.197042 -9.357651 -1.1442566 -12.256652 14.660419 13.1933975 -10.39424 22.208418 -16.453442 -17.408394 -5.604438 9.24292 -2.8979826 -0.22875875 -34.35238 9.357197 2.3040724 -15.07928 -25.532295 21.831892 0.11734772 13.5917225 6.182912 -17.254116 37.61125 -3.248333 13.067987 -4.6978455 -18.223797 -8.789854 2.0015001 15.054407 -13.357407 -20.521042 5.325739 -24.063675 -21.95367 -9.6008 4.5102577 7.0585546 -0.57355785 27.506523 -28.384474 10.329208 -15.703773 12.705179 0.6724758 -14.310258 -17.272606 -12.937089 4.868241 -13.345886 7.473314 -15.882174 9.707279 5.505703 -16.003998 28.952263 37.74888 -26.494612 7.8142757 -26.272255 -13.292864 8.874088 20.27597 -11.459869 -0.5275655 -5.98989 8.759264 -12.293499 12.749224 0.6856129 18.782288 -9.871499 2.8711855 -15.055378 -5.244895 -3.8480797 -31.079098 2.68189 8.320737 8.583034 -1.7196126 -20.952328 -40.167206 -19.820326 -26.129925 15.053836 -23.845486 6.120953 -20.167389 -14.40298 2.1731052 -23.845186 2.892642 -13.614944 12.256936 3.3751287 18.163914 15.988698 -7.251026 -11.298069 32.317448 -21.25098 14.901631 6.47845 -20.623734 7.6904693 -0.05375695 7.3517704 5.3784356 2.2726562 1.9957283 -8.946126 4.395605 -13.465761 0.04316831 -33.73754 4.433942 -5.6521163 10.858857 -19.660288 8.996977 0.06624198 -19.964691 3.4362378 -0.42830992 -0.07332373 -21.773552 -15.332271 -2.9113598 -1.0286484 18.006872 2.7686152 8.436729 -8.687878 -0.04722214 18.12273 -1.781846 -4.496908 -2.2449512 -4.2140694 -33.193607 3.7672782 -14.500456 -32.701138 3.2559628 1.394496 -27.395779 -4.56002 -2.6478558 14.479916 8.332189 31.572296 30.917252 30.420086 17.863728 -15.007559 -14.026292 1.9628581 -13.229826 5.2691684 16.829275 5.348177 -4.978096 1.076431 21.341793 -1.5753937 -20.002462 -9.205041 4.6400113 -4.407999 4.58319 2.3511286 -23.570024 -28.611858 17.3537 -7.293425 -3.4699788 -13.117142 -1.6108475 -1.7952421 21.131002 -19.799759 -24.950691 -4.5486317 -0.12343383 5.021555 -9.989996 -3.49335 -17.405272 9.415722 -5.1100583 -3.1618915 4.190735 4.0850425 -34.800842 -13.530113 18.771297 7.787564 30.935898 12.214738 ] [-10.334494 -1.0816116 10.278643 -12.278011 8.321411 22.147976 -13.737448 -33.081688 31.183483 -10.272798 -33.68254 -22.053022 7.1499147 -27.109241 -34.62107 10.777437 -8.272846 5.467038 14.318576 -13.665629 2.1399498 -16.56567 -5.772771 6.3943453 18.262735 1.7641029 -1.336169 -5.29253 12.966249 -25.208523 -20.389694 -8.452057 4.2065835 12.606115 -42.045662 -15.614187 -15.465371 7.5472717 -27.329666 -9.869883 6.7425313 7.638773 1.2789598 1.7259624 -2.146819 -37.51219 -5.3074346 12.401566 -34.213184 -12.633995 -8.493473 -8.887891 21.56896 10.910078 -24.257938 16.00214 7.206346 11.788015 -8.791483 -5.6133366 -19.680305 14.111274 -25.982346 4.5545063 3.443625 34.713943 -2.2338917 5.3428626 -26.466728 -7.4529657 12.745356 16.648684 -10.935649 6.464047 -9.159586 0.15341139 17.856295 -5.690749 -4.22289 3.2157989 1.7304049 -4.964115 1.6372526 16.014282 -12.96772 16.40691 -17.304989 19.670488 14.988451 3.3074598 2.4351192 3.828103 6.5456715 14.888487 -14.953322 -15.437466 11.904316 12.412717 -23.463516 -12.088163 7.690968 18.560316 11.939276 26.721157 -16.829863 27.506681 1.3816003 -3.9322748 16.906475 -11.965559 4.871037 -17.047987 -2.7837489 -2.2079477 19.405928 32.538124 -1.5101724 -6.632804 -2.0325475 18.1704 28.708344 -7.334758 -7.830169 21.631445 -15.46784 -6.347706 -15.395217 -25.831757 12.655795 9.009415 30.951159 -2.60217 -15.442578 14.657404 13.92774 -5.606702 -13.070675 -3.9376569 7.003515 21.15046 -10.740969 26.441013 -37.05653 -5.6687174 -26.585636 -8.680005 -3.5122197 9.680683 2.117415 3.0637016 -2.4454906 -3.587985 2.812875 4.404152 3.963529 14.670811 14.19235 8.590821 -0.6456952 5.948012 -10.14237 -15.034529 -23.452072 -9.096923 -21.907263 2.8095224 12.631016 26.585737 -2.7434306 -6.5047593 1.9881802 8.993234 -20.018665 -9.113131 2.8143141 14.1042 10.790806 20.379644 1.5800999 1.5012832 12.991193 -13.194523 7.2547474 -13.120703 10.216888 17.22461 2.2291203 -0.6319685 2.7330317 20.039904 37.862503 -6.168563 -10.726577 6.6774416 1.1590283 10.261798 4.7499104 -21.074402 8.108728 10.27887 -27.894535 0.70945215 -26.348362 -14.176235 0.25529957 -3.6981106 -8.368396 31.147984 8.372797 -17.35448 3.2659636 -21.216042 -6.2745676 3.7782583 -2.8764458 21.751585 -2.2648764 -12.287697 -12.423353 0.2034092 21.762203 5.653924 -20.591578 9.19031 ] [-34.646423 39.572803 -10.947607 19.768131 34.112873 -26.681437 -22.786388 27.809895 25.155952 -11.528895 13.9501705 9.096569 10.411672 16.436478 0.91559076 5.5849705 0.25070834 -4.842561 1.3280401 5.937656 -10.417088 -1.1053638 -5.2718997 6.118143 6.256877 24.321453 1.2026148 4.2536893 -6.882033 -14.450816 -13.820185 -3.6783648 -21.998478 -3.8649063 20.858896 -8.436539 -10.890411 10.933029 -1.5818601 24.221104 8.587892 3.812873 -5.0263495 -18.525665 -18.709158 27.959093 2.490517 2.8365676 -3.9297109 -11.70171 -13.424617 7.9180255 -4.098249 13.17555 -6.8133254 7.648364 2.3407369 -17.906551 -9.98642 -28.847237 5.603644 -14.331308 -25.226597 -8.366951 -3.12789 -0.15628147 2.5830693 32.15673 13.879513 -4.9040356 -6.0731306 9.658922 15.654441 -34.573097 -7.518547 -5.634411 -36.963444 -12.853428 7.83335 12.130925 -12.524969 2.664287 0.11712086 31.217682 -3.3219123 33.39166 -42.332497 13.301378 -5.4223943 -5.531821 7.7062926 17.974785 20.758972 -0.3206148 16.211412 -7.824169 -4.512987 35.030617 16.179852 1.1474972 -9.243754 14.955399 -15.166315 4.489812 24.05693 1.4210911 2.8767424 -12.064666 19.2309 15.653444 25.245342 28.27393 6.918779 -7.953265 2.4277525 27.130219 4.7426176 5.8916974 -11.385311 15.395126 1.1975242 -32.70706 -0.8852377 -15.641263 2.58496 3.953362 -17.37198 -26.180119 25.777369 13.352274 11.52016 -7.1757336 -0.07708836 -10.450591 -11.294113 -0.24819112 -8.563107 5.957747 17.446941 0.8330517 6.4392233 -18.901386 -10.031232 -11.189374 -0.7644267 -10.14371 21.97627 -7.850515 12.704016 -3.4521413 3.3069468 21.124527 3.5461378 18.501049 -6.635097 -10.027334 -11.47304 -32.68628 3.7300901 -1.0023017 -26.413082 15.816158 21.073936 12.986748 -15.783731 9.802253 11.119009 -23.583992 -21.956377 -10.52714 -13.546706 6.2337117 15.139684 10.550668 -16.5804 3.6477244 -11.095734 -30.269281 -6.834428 -29.671799 -20.561941 4.9003353 3.0673718 -8.2718115 22.212543 -3.2110705 0.46931267 -31.521343 -12.921356 9.284399 4.890253 -3.1859355 -8.953785 24.30852 7.837756 -2.2880263 -9.842334 23.875072 -3.752657 11.540438 11.735521 13.615666 2.8206108 -10.999605 -12.574904 -36.927372 -19.680061 -1.9227376 -14.627803 -2.9406137 -9.786038 -2.7598574 22.437979 13.619291 -8.138189 -11.586404 -12.786782 4.025542 -16.628145 -14.595195 13.403448 -8.101662 6.7225604 -17.306189 ]]]; ov_res: [[[-11.147371 15.5563135 -16.545261 6.6157966 7.2223616 -2.5843315 -4.807625 -18.898788 0.49737644 18.981255 7.8973236 0.25332046 -11.849372 -1.2944155 -1.1334362 -26.33849 22.323154 8.154348 17.474167 8.859574 -4.2998857 -6.111394 18.19704 -9.357651 -1.1442604 -12.256648 14.660419 13.193398 -10.394242 22.208416 -16.453438 -17.408394 -5.604436 9.242922 -2.8979833 -0.22875786 -34.352375 9.357194 2.3040717 -15.079281 -25.532293 21.831892 0.11734605 13.5917225 6.18291 -17.254112 37.61125 -3.2483363 13.0679865 -4.6978445 -18.223793 -8.789855 2.0015016 15.054406 -13.357408 -20.521042 5.325737 -24.063675 -21.953672 -9.600799 4.5102577 7.0585527 -0.5735569 27.506523 -28.384472 10.329208 -15.703771 12.705177 0.67247415 -14.310256 -17.272606 -12.937088 4.8682413 -13.345886 7.4733157 -15.882174 9.707281 5.505703 -16.003998 28.95226 37.74888 -26.494606 7.814274 -26.272257 -13.292867 8.874087 20.27597 -11.459869 -0.527565 -5.989887 8.759263 -12.293499 12.749223 0.6856127 18.782288 -9.8715 2.8711863 -15.055377 -5.244892 -3.8480806 -31.079094 2.6818905 8.320737 8.583033 -1.7196131 -20.952326 -40.167206 -19.820328 -26.129925 15.053835 -23.845486 6.120953 -20.167387 -14.402978 2.1731043 -23.845186 2.8926444 -13.614941 12.256934 3.3751297 18.163914 15.988697 -7.251025 -11.298071 32.317448 -21.250978 14.90163 6.4784493 -20.62373 7.6904697 -0.05375624 7.3517704 5.378435 2.272657 1.9957271 -8.946124 4.3956056 -13.465761 0.04317093 -33.73754 4.4339437 -5.652115 10.85886 -19.66029 8.996977 0.06624222 -19.964693 3.4362366 -0.4283123 -0.07332516 -21.773552 -15.332268 -2.9113607 -1.0286503 18.006872 2.7686157 8.436729 -8.687878 -0.04722309 18.12273 -1.7818465 -4.496908 -2.244954 -4.2140713 -33.193604 3.7672768 -14.500454 -32.70114 3.2559657 1.3944969 -27.395779 -4.56002 -2.6478567 14.479916 8.332188 31.572296 30.917252 30.420086 17.863728 -15.007559 -14.026292 1.9628601 -13.229826 5.269168 16.829277 5.3481765 -4.978097 1.0764303 21.341793 -1.5753975 -20.002462 -9.205039 4.640012 -4.4080005 4.58319 2.351128 -23.570024 -28.611858 17.353699 -7.2934227 -3.469975 -13.117144 -1.6108441 -1.7952404 21.130999 -19.799759 -24.950691 -4.5486307 -0.12343168 5.021555 -9.989994 -3.49335 -17.405273 9.415719 -5.110058 -3.16189 4.190736 4.085046 -34.80084 -13.530111 18.7713 7.7875633 30.935902 12.214737 ] [-10.334492 -1.0816112 10.278645 -12.278013 8.321413 22.147978 -13.737447 -33.081688 31.183483 -10.2728 -33.68254 -22.05302 7.1499157 -27.109241 -34.62107 10.777436 -8.272848 5.46704 14.318577 -13.665628 2.1399517 -16.565672 -5.772771 6.3943453 18.262733 1.7641029 -1.3361702 -5.29253 12.966249 -25.20852 -20.389694 -8.452057 4.2065845 12.606113 -42.045662 -15.614185 -15.465372 7.5472703 -27.329666 -9.869882 6.74253 7.638772 1.2789583 1.7259626 -2.1468172 -37.512188 -5.307435 12.401564 -34.21318 -12.633997 -8.493473 -8.887894 21.568956 10.910078 -24.257942 16.00214 7.2063437 11.788013 -8.791483 -5.6133366 -19.680302 14.111273 -25.98235 4.554502 3.4436226 34.713943 -2.2338939 5.342862 -26.466728 -7.452964 12.745357 16.648684 -10.93565 6.464047 -9.159588 0.15341091 17.856293 -5.690751 -4.222892 3.2157984 1.7304063 -4.964115 1.6372521 16.014284 -12.967718 16.406912 -17.304987 19.670483 14.988447 3.307458 2.4351177 3.8281057 6.5456734 14.888488 -14.95332 -15.437468 11.904316 12.412715 -23.463518 -12.08816 7.6909704 18.560316 11.939276 26.721157 -16.82986 27.50668 1.3816004 -3.9322748 16.906471 -11.96556 4.871036 -17.047985 -2.783749 -2.2079506 19.405928 32.538128 -1.5101714 -6.632804 -2.0325484 18.1704 28.708344 -7.334758 -7.8301687 21.631447 -15.467841 -6.347706 -15.395216 -25.83176 12.655798 9.009413 30.951157 -2.602169 -15.442575 14.657402 13.927739 -5.606703 -13.070675 -3.9376574 7.003517 21.15046 -10.74097 26.441013 -37.056534 -5.668719 -26.585634 -8.680005 -3.5122204 9.680679 2.1174197 3.0637002 -2.445489 -3.587986 2.8128746 4.4041533 3.9635277 14.670814 14.192348 8.590824 -0.6456938 5.9480133 -10.14237 -15.034531 -23.452068 -9.096922 -21.907263 2.8095236 12.631014 26.585735 -2.7434268 -6.5047593 1.9881783 8.993235 -20.01867 -9.113133 2.8143158 14.104198 10.790807 20.379644 1.580102 1.5012836 12.991192 -13.194523 7.2547493 -13.120701 10.216892 17.22461 2.2291212 -0.631969 2.7330313 20.039904 37.862503 -6.168562 -10.726579 6.6774416 1.1590261 10.261797 4.7499104 -21.074398 8.108725 10.278867 -27.894537 0.7094526 -26.348362 -14.176235 0.25529957 -3.6981125 -8.368395 31.147984 8.372797 -17.354485 3.2659643 -21.216042 -6.274566 3.7782564 -2.8764448 21.751585 -2.2648783 -12.28769 -12.423353 0.2034111 21.762201 5.653922 -20.59158 9.19031 ] [-34.646427 39.572807 -10.947607 19.768133 34.11287 -26.681438 -22.786383 27.809895 25.155956 -11.5288925 13.9501705 9.096567 10.411669 16.436478 0.91558826 5.58497 0.25070763 -4.842561 1.3280382 5.9376554 -10.417088 -1.1053653 -5.2718987 6.118143 6.256877 24.321457 1.2026129 4.2536883 -6.8820305 -14.450818 -13.820187 -3.6783655 -21.998474 -3.864908 20.858896 -8.436541 -10.8904085 10.933027 -1.5818601 24.221106 8.587892 3.8128738 -5.0263467 -18.525661 -18.709158 27.959093 2.4905167 2.8365674 -3.9297104 -11.701712 -13.424617 7.918025 -4.0982504 13.175551 -6.8133264 7.648363 2.340735 -17.906548 -9.986421 -28.847235 5.6036463 -14.331305 -25.226597 -8.366951 -3.1278915 -0.15627909 2.58307 32.15673 13.879514 -4.904033 -6.0731325 9.658922 15.654443 -34.573105 -7.5185456 -5.6344113 -36.963444 -12.853428 7.8333507 12.130925 -12.524968 2.6642823 0.11712122 31.217684 -3.3219137 33.39166 -42.332493 13.301377 -5.422395 -5.5318193 7.706292 17.974785 20.758974 -0.320616 16.21141 -7.8241673 -4.5129876 35.030617 16.17985 1.1474962 -9.243753 14.9554 -15.166311 4.4898105 24.056932 1.4210892 2.8767445 -12.064663 19.230904 15.653444 25.245342 28.27393 6.9187827 -7.953264 2.4277525 27.130219 4.7426186 5.891698 -11.385313 15.395126 1.1975217 -32.70706 -0.8852382 -15.641262 2.5849605 3.9533603 -17.371983 -26.18012 25.777367 13.352274 11.520159 -7.1757336 -0.07708645 -10.450592 -11.294113 -0.24819088 -8.563108 5.9577484 17.446943 0.8330517 6.4392204 -18.901384 -10.031231 -11.189375 -0.7644272 -10.14371 21.976269 -7.850515 12.704017 -3.4521427 3.3069434 21.124525 3.5461388 18.501049 -6.6351004 -10.027336 -11.473041 -32.68628 3.7300925 -1.0023012 -26.413082 15.81616 21.073936 12.986748 -15.78373 9.802252 11.119008 -23.583992 -21.956373 -10.527139 -13.546705 6.2337136 15.139683 10.550669 -16.5804 3.647725 -11.095734 -30.26928 -6.8344297 -29.671799 -20.56194 4.9003353 3.0673728 -8.271812 22.212547 -3.2110686 0.46931243 -31.521345 -12.921358 9.284399 4.8902545 -3.1859374 -8.953782 24.308517 7.837758 -2.2880254 -9.842335 23.875072 -3.752656 11.540437 11.735519 13.615669 2.8206134 -10.999606 -12.574902 -36.927376 -19.680061 -1.922739 -14.627808 -2.9406145 -9.786039 -2.7598572 22.437979 13.619293 -8.13819 -11.586403 -12.786784 4.0255413 -16.628143 -14.595194 13.403449 -8.101662 6.7225604 -17.306189 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:1 - keep_dim:None ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6134.aten_sum, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %3 : bool = prim::Constant[value=0]() %4 : NoneType = prim::Constant() %5 : Tensor = aten::sum(%x.1, %2, %3, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:27:27 return (%5) fw_re: [[[ 0.13858616 -3.3009558 -1.9204868 ... -1.105068 -0.717807 2.2640374 ] [-0.45895746 0.7166803 1.1534473 ... -0.7169087 1.0036523 1.8267448 ] [ 2.8043368 1.411527 -1.3164023 ... 2.7746515 -0.0244838 0.47131413] ... [ 0.7707528 -2.673822 3.8855004 ... 1.2233665 1.2056483 -0.16262197] [ 2.1757965 -1.8578544 1.087028 ... 2.4328856 0.90028703 -0.29998645] [-0.71260154 2.8219388 -0.61235166 ... -4.142305 1.5213072 1.0456429 ]]]; ov_res: [[[ 0.13858616 -3.3009558 -1.9204868 ... -1.105068 -0.717807 2.2640374 ] [-0.45895746 0.7166803 1.1534473 ... -0.7169087 1.0036523 1.8267448 ] [ 2.8043368 1.411527 -1.3164023 ... 2.7746515 -0.0244838 0.47131413] ... [ 0.7707528 -2.673822 3.8855004 ... 1.2233665 1.2056483 -0.16262197] [ 2.1757965 -1.8578544 1.087028 ... 2.4328856 0.90028703 -0.29998645] [-0.71260154 2.8219388 -0.61235166 ... -4.142305 1.5213072 1.0456429 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:(2, 3) - keep_dim:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6136.aten_sum, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 3]]() %3 : NoneType = prim::Constant() %self.keep_dims : bool = prim::Constant[value=0]() %5 : Tensor = aten::sum(%x.1, %2, %self.keep_dims, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:28:23 return (%5) fw_re: [[ 217.68858 -534.201 55.692543]]; ov_res: [[ 217.68835 -534.2009 55.692417]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_sum.py::TestSum::test_sum[ ie_device:CPU - precision:FP32 - axes:(3, 2) - keep_dim:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_sum.___torch_mangle_6138.aten_sum, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3, 2]]() %3 : NoneType = prim::Constant() %self.keep_dims : bool = prim::Constant[value=1]() %5 : Tensor = aten::sum(%x.1, %2, %self.keep_dims, %3) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_sum.py:28:23 return (%5) fw_re: [[[[ 165.69089 ]] [[ -37.301163]] [[-104.908356]]]]; ov_res: [[[[ 165.69086 ]] [[ -37.301285]] [[-104.908455]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6139.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [21 6 42]; ov_res: [21 6 42] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6141.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [40 6 47]; ov_res: [40 6 47] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6143.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [30 0 22]; ov_res: [30 0 22] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6145.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [15 24 24]; ov_res: [15 24 24] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6147.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [ 5 21 37]; ov_res: [ 5 21 37] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6149.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [ 4 0 23]; ov_res: [ 4 0 23] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6151.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [ 5 36 34]; ov_res: [ 5 36 34] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6153.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [32 44 5]; ov_res: [32 44 5] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - input_type:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6155.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of fw_re: [48 2 49]; ov_res: [48 2 49] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6157.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [35 2 35]; ov_res: [35 2 35] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - input_type:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6159.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [26 19 7]; ov_res: [26 19 7] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6161.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [32 44 44]; ov_res: [32 44 44] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6163.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [23.632269 29.939873 8.311531]; ov_res: [23.632269 29.939873 8.311531] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6165.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [39.78223 28.083118 3.0942562]; ov_res: [39.78223 28.083118 3.0942562] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6167.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [46.23252 42.28254 0.68128026]; ov_res: [46.23252 42.28254 0.68128026] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6169.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [24 19 25]; ov_res: [24 19 25] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6171.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [37 4 23]; ov_res: [37 4 23] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6173.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) fw_re: [19 38 32]; ov_res: [19 38 32] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6175.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 3 3 14]; ov_res: [ 3 3 14] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6177.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [19 26 46]; ov_res: [19 26 46] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6179.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [22 12 46]; ov_res: [22 12 46] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6181.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 7 44 47]; ov_res: [ 7 44 47] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6183.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [32 45 48]; ov_res: [32 45 48] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6185.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [14 22 46]; ov_res: [14 22 46] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6187.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [17 33 6]; ov_res: [17 33 6] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6189.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [36 27 28]; ov_res: [36 27 28] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6191.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [18 24 2]; ov_res: [18 24 2] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6193.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in fw_re: [12 37 40]; ov_res: [12 37 40] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6195.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 1 49 14]; ov_res: [ 1 49 14] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6197.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [16 8 24]; ov_res: [16 8 24] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6199.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 4 49 18]; ov_res: [ 4 49 18] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6201.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [38 25 17]; ov_res: [38 25 17] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6203.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [47 9 30]; ov_res: [47 9 30] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6205.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [22.369757 39.159706 21.247753]; ov_res: [22.369757 39.159706 21.247753] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6207.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [35.81813 19.116005 38.86791 ]; ov_res: [35.81813 19.116005 38.86791 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6209.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [36.660645 41.145515 33.06824 ]; ov_res: [36.660645 41.145515 33.06824 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6211.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [41.75532503 9.08065099 37.02631925]; ov_res: [41.75532532 9.08065128 37.0263176 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6213.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [21.23085628 28.28692906 15.95402789]; ov_res: [21.23085594 28.28692818 15.95402813] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6215.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [40.03464026 4.69449192 38.54091634]; ov_res: [40.03464127 4.69449186 38.54091644] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.bool - non_blocking:True - input_type:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6217.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=11]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ True True True]; ov_res: [ True True True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.bool - non_blocking:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6219.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=11]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ True True True]; ov_res: [ True True True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_non_blocking_arg[ ie_device:CPU - precision:FP32 - output_type:torch.bool - non_blocking:True - input_type:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6221.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=0]() %self.non_blocking : bool = prim::Constant[value=1]() %self.type : int = prim::Constant[value=11]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ True True True]; ov_res: [ True True True] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6223.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [35 46 6]; ov_res: [35 46 6] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6225.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [11 23 12]; ov_res: [11 23 12] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6227.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [39 30 48]; ov_res: [39 30 48] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6229.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [45 48 47]; ov_res: [45 48 47] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6231.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [26 40 39]; ov_res: [26 40 39] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6233.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to Cfw_re: [ 0 6 15]; ov_res: [ 0 6 15] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6235.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [46 5 43]; ov_res: [46 5 43] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6237.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [23 26 32]; ov_res: [23 26 32] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6239.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [25 8 34]; ov_res: [25 8 34] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6241.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [11 44 18]; ov_res: [11 44 18] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6243.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [35 22 37]; ov_res: [35 22 37] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6245.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [15 46 36]; ov_res: [15 46 36] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6247.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [33 4 26]; ov_res: [33 4 26] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - copy:True - input_type:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6249.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [23 21 37]; ov_res: [23 21 37] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6251.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [42 26 8]; ov_res: [42 26 8] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - copy:True - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6253.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 7.7567677 48.89866 27.70754 ]; ov_res: [ 7.7567677 48.89866 27.70754 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6255.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [27.454948 22.241907 39.196293]; ov_res: [27.454948 22.241907 39.196293] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6257.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 3.2911582 28.116901 2.0088716]; ov_res: [ 3.2911582 28.116901 2.0088716] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - copy:True - input_type:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6259.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [24.96410269 27.80946002 15.55616238]; ov_res: [24.96410179 27.80945969 15.55616283] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - copy:True - input_type:<class 'numpy.float32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6261.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [35.07772213 37.65175234 23.9973148 ]; ov_res: [35.07772064 37.65175247 23.99731445] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_copy_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - copy:True - input_type:<class 'numpy.float64'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6263.aten_to, %x.1 : Tensor): %self.memory_format : NoneType = prim::Constant() %self.copy : bool = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %6 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.copy, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%6) fw_re: [ 1.26791564 38.80775242 31.1355988 ]; ov_res: [ 1.26791561 38.8077507 31.13559914] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - memory_format:1 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6265.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - memory_format:1 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6267.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.uint8 - memory_format:1 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6269.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=1]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=0]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) ++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with sche | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - memory_format:1 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6271.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - memory_format:1 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6273.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int8 - memory_format:1 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6275.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=1]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - memory_format:2 - input_type:<class 'numpy.int32'> ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6277.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - memory_format:2 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6279.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int16 - memory_format:2 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6281.aten_to, %x.1 : Tensor): %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=2]() %4 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.type) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%4) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - memory_format:2 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6283.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=2]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - memory_format:2 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6285.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=2]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int32 - memory_format:2 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6287.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=2]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=3]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - memory_format:3 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6289.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) ma: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during co | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - memory_format:3 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6291.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.int64 - memory_format:3 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6293.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=4]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - memory_format:3 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6295.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - memory_format:3 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6297.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float32 - memory_format:3 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6299.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=3]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=6]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - memory_format:4 - input_type:<class 'numpy.int32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6301.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=4]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - memory_format:4 - input_type:<class 'numpy.float32'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6303.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=4]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_to.py::TestAtenTo::test_aten_to_raise_memory_format_arg[ ie_device:CPU - precision:FP32 - output_type:torch.float64 - memory_format:4 - input_type:<class 'numpy.float64'> ] | 0.00 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_to.___torch_mangle_6305.aten_to, %x.1 : Tensor): %self.memory_format : int = prim::Constant[value=4]() %self.non_blocking : bool = prim::Constant[value=0]() %self.type : int = prim::Constant[value=7]() %5 : Tensor = aten::to(%x.1, %self.type, %self.non_blocking, %self.non_blocking, %self.memory_format) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_to.py:29:23 return (%5) | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:acos ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6306.aten_op, %x.1 : Tensor): %2 : Tensor = aten::acos(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[1.4932548 1.2987323 nan 2.2731223] [1.8028045 2.8372736 2.5613055 1.0158092] [2.2125788 nan 1.20432 nan]] [[ nan 1.3837695 nan 1.5445017] [1.5820913 2.0455346 nan 2.683981 ] [ nan 1.5191606 nan 2.14846 ]]]]; ov_res: [[[[1.4932548 1.2987323 nan 2.2731223] [1.8028045 2.8372736 2.5613055 1.0158092] [2.2125788 nan 1.20432 nan]] [[ nan 1.3837695 nan 1.5445017] [1.5820913 2.0455346 nan 2.683981 ] [ nan 1.5191606 nan 2.14846 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:acos_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6308.aten_op, %x.1 : Tensor): %2 : Tensor = aten::acos_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[1.8070616 0.6681471 0.8143026 2.0419064 ] [0.5711934 nan 1.9418021 0.30257913] [2.321254 2.7167537 2.1838646 nan]] [[ nan 1.3150197 nan 2.601692 ] [ nan nan 0.93901247 1.1407949 ] [1.3820198 1.3598957 2.0993772 nan]]]]; ov_res: [[[[1.8070616 0.6681471 0.8143026 2.0419064 ] [0.5711934 nan 1.9418021 0.30257913] [2.321254 2.7167537 2.1838646 nan]] [[ nan 1.3150197 nan 2.601692 ] [ nan nan 0.93901247 1.1407949 ] [1.3820198 1.3598957 2.099377 nan]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:acosh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6310.aten_op, %x.1 : Tensor): %2 : Tensor = aten::acosh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ nan nan nan nan] [ nan nan nan nan] [ nan 1.2111009 nan nan]] [[1.1372491 nan nan nan] [ nan 1.1757435 1.6124421 nan] [ nan nan 1.0159965 1.1683693]]]]; ov_res: [[[[ nan nan nan nan] [ nan nan nan nan] [ nan 1.2111009 nan nan]] [[1.1372491 nan nan nan] [ nan 1.1757435 1.6124421 nan] [ nan nan 1.0159965 1.1683693]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:acosh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6312.aten_op, %x.1 : Tensor): %2 : Tensor = aten::acosh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ nan nan nan nan] [1.5127637 nan 0.5794903 nan] [ nan 1.096323 nan nan]] [[0.50800395 nan nan 0.7925034 ] [0.9712008 nan nan nan] [ nan nan nan nan]]]]; ov_res: [[[[ nan nan nan nan] [1.5127637 nan 0.5794903 nan] [ nan 1.096323 nan nan]] [[0.50800395 nan nan 0.7925034 ] [0.9712008 nan nan nan] [ nan nan nan nan]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:asin ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6314.aten_op, %x.1 : Tensor): %2 : Tensor = aten::asin(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 0.02673668 0.6314316 0.4963163 nan] [ nan nan -0.58479226 -0.72757334] [-1.1192405 -0.15070868 -0.9822866 0.08136935]] [[-0.02648261 0.6613189 -0.67878634 0.19326691] [ 1.1097265 1.2677635 nan 0.6167642 ] [-1.1357529 -0.23050691 nan 0.12310926]]]]; ov_res: [[[[ 0.02673668 0.6314316 0.4963163 nan] [ nan nan -0.58479226 -0.72757334] [-1.1192405 -0.15070868 -0.9822866 0.08136935]] [[-0.02648261 0.6613189 -0.67878634 0.19326691] [ 1.1097265 1.2677635 nan 0.6167642 ] [-1.1357529 -0.23050691 nan 0.12310926]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:asin_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6316.aten_op, %x.1 : Tensor): %2 : Tensor = aten::asin_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ nan -0.4376176 -0.23275955 -0.25467446] [-0.4009717 0.12771685 -0.20984043 nan] [-0.38304842 nan nan -0.5210041 ]] [[ nan nan -0.38318548 nan] [-0.4695215 nan nan 0.24694008] [-0.5532541 nan -0.12655495 0.5107172 ]]]]; ov_res: [[[[ nan -0.43761757 -0.23275955 -0.25467446] [-0.4009717 0.12771685 -0.20984043 nan] [-0.38304842 nan nan -0.5210041 ]] [[ nan nan -0.38318548 nan] [-0.4695215 nan nan 0.24694008] [-0.5532541 nan -0.12655495 0.5107172 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:asinh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6318.aten_op, %x.1 : Tensor): %2 : Tensor = aten::asinh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 0.00513446 -0.67764103 -0.43917742 -0.87505823] [ 0.23140201 -0.19255643 0.45447 -0.09356432] [ 0.95015 1.2799932 0.05598099 -0.96068 ]] [[ 1.3403066 -0.29837292 0.07720447 1.5050952 ] [ 0.8022937 -0.7558128 0.33829582 -0.21692404] [-0.77741313 -0.6731986 -0.29737607 0.22668901]]]]; ov_res: [[[[ 0.00513446 -0.67764103 -0.43917742 -0.87505823] [ 0.23140201 -0.19255643 0.45447 -0.09356432] [ 0.95015 1.2799932 0.05598099 -0.96068 ]] [[ 1.3403066 -0.29837292 0.07720447 1.5050952 ] [ 0.8022937 -0.7558128 0.33829582 -0.21692404] [-0.77741313 -0.6731986 -0.29737607 0.22668901]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:asinh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6320.aten_op, %x.1 : Tensor): %2 : Tensor = aten::asinh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.20612371 0.9003087 -1.2770526 0.05646379] [ 0.5500115 0.506671 -0.7281875 -0.60338694] [-0.50953233 0.04712569 0.59855205 0.84737575]] [[-0.6300123 1.7140913 -0.86447024 0.297418 ] [-1.6384184 -0.9433329 0.5509751 0.6825473 ] [ 0.98052204 -0.9643693 0.4528327 1.4607826 ]]]]; ov_res: [[[[-0.20612371 0.9003087 -1.2770526 0.05646379] [ 0.5500115 0.506671 -0.7281875 -0.60338694] [-0.50953233 0.04712569 0.59855205 0.84737575]] [[-0.6300123 1.7140913 -0.86447024 0.297418 ] [-1.6384184 -0.9433329 0.5509751 0.6825473 ] [ 0.98052204 -0.9643693 0.4528327 1.4607826 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:atan ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6322.aten_op, %x.1 : Tensor): %2 : Tensor = aten::atan(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.48259896 -0.64777493 -0.20243268 -0.43331224] [-0.1628518 -0.5213508 -0.3867616 -0.66434324] [ 0.28203273 0.5901282 -0.30326155 -0.83979094]] [[-0.65184116 -0.32547042 -0.47274905 -0.06054337] [ 1.024685 -0.6566962 -0.8271919 0.9796062 ] [ 0.6893832 0.6336255 -0.6339203 0.675589 ]]]]; ov_res: [[[[-0.48259896 -0.64777493 -0.20243268 -0.43331224] [-0.1628518 -0.5213508 -0.3867616 -0.6643432 ] [ 0.28203273 0.5901282 -0.30326155 -0.83979094]] [[-0.65184116 -0.32547042 -0.47274905 -0.06054337] [ 1.024685 -0.6566962 -0.8271919 0.9796062 ] [ 0.6893832 0.6336255 -0.6339203 0.6755891 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:atan_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6324.aten_op, %x.1 : Tensor): %2 : Tensor = aten::atan_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.8194989 0.02176783 0.0943912 -0.10899502] [ 0.8803183 0.2433208 -0.8446202 0.37172246] [-0.44228354 -0.7668207 0.59196806 -0.24330635]] [[-0.70413125 -0.731897 0.27710056 0.3396163 ] [-0.19486055 1.0345918 0.25985682 -0.12211352] [-0.8108323 -0.7358378 0.5480955 -1.0939034 ]]]]; ov_res: [[[[-0.8194989 0.02176783 0.0943912 -0.10899502] [ 0.8803183 0.2433208 -0.8446202 0.37172246] [-0.44228354 -0.7668207 0.59196806 -0.24330635]] [[-0.70413125 -0.731897 0.27710056 0.3396163 ] [-0.19486055 1.0345918 0.25985682 -0.12211352] [-0.8108323 -0.7358378 0.5480956 -1.0939034 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:atanh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6326.aten_op, %x.1 : Tensor): %2 : Tensor = aten::atanh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ nan 4.3360088e-02 -9.5837250e-02 nan] [ nan -6.9841361e-01 -2.1179989e-01 -5.4487520e-01] [ 1.4856932e+00 nan -2.1191804e+00 nan]] [[ 4.9190015e-01 4.7561362e-02 -4.7807801e-01 -4.0907022e-01] [ 1.4502057e-01 -2.3558544e-01 nan nan] [ 2.2828921e-03 -2.3097785e+00 nan 1.3034964e+00]]]]; ov_res: [[[[ nan 4.3360088e-02 -9.5837250e-02 nan] [ nan -6.9841361e-01 -2.1179989e-01 -5.4487520e-01] [ 1.4856932e+00 nan -2.1191804e+00 nan]] [[ 4.9190015e-01 4.7561362e-02 -4.7807801e-01 -4.0907022e-01] [ 1.4502057e-01 -2.3558544e-01 nan nan] [ 2.2828921e-03 -2.3097785e+00 nan 1.3034964e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:atanh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6328.aten_op, %x.1 : Tensor): %2 : Tensor = aten::atanh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-1.2785531 0.17905122 -1.1850481 nan] [-1.181308 0.6760633 nan 0.03841073] [ nan -0.41369987 -0.6210407 nan]] [[ 1.2336068 nan -0.23642687 nan] [-1.2085888 nan nan -0.8375078 ] [ nan nan nan 1.3213869 ]]]]; ov_res: [[[[-1.2785531 0.17905122 -1.1850481 nan] [-1.181308 0.6760633 nan 0.03841073] [ nan -0.41369987 -0.6210407 nan]] [[ 1.2336068 nan -0.23642687 nan] [-1.2085888 nan nan -0.8375078 ] [ nan nan nan 1.3213869 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:cos ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6330.aten_op, %x.1 : Tensor): %2 : Tensor = aten::cos(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 0.44066927 0.9394612 0.9976946 0.9905986 ] [ 0.46942613 0.8715159 0.6907976 0.92143905] [ 0.6044343 0.859156 0.85987353 0.9015608 ]] [[ 0.9586511 -0.28853863 0.9006792 0.70859486] [ 0.36629313 0.9996207 0.9569944 0.71752894] [ 0.7586314 0.4632878 0.39451936 0.7728896 ]]]]; ov_res: [[[[ 0.44066927 0.9394612 0.9976946 0.9905986 ] [ 0.46942613 0.8715159 0.6907976 0.92143905] [ 0.6044343 0.859156 0.85987353 0.9015608 ]] [[ 0.9586511 -0.28853863 0.9006792 0.70859486] [ 0.36629313 0.9996207 0.9569944 0.71752894] [ 0.7586314 0.4632878 0.39451936 0.7728896 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:cos_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6332.aten_op, %x.1 : Tensor): %2 : Tensor = aten::cos_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 0.9596527 0.90658617 -0.63127124 0.8388222 ] [ 0.8141889 0.9223872 0.935127 0.6986529 ] [ 0.78707135 0.9978577 0.9241906 -0.42056146]] [[ 0.18044853 0.82630926 0.9994749 0.9985393 ] [ 0.8123598 0.9229461 0.8030633 0.10098934] [ 0.9900288 0.8125441 0.82058084 0.9999083 ]]]]; ov_res: [[[[ 0.9596527 0.90658617 -0.63127124 0.8388222 ] [ 0.8141889 0.9223872 0.935127 0.6986529 ] [ 0.78707135 0.9978577 0.9241906 -0.42056146]] [[ 0.18044853 0.8263093 0.9994749 0.9985394 ] [ 0.8123598 0.9229461 0.8030633 0.10098934] [ 0.9900288 0.8125441 0.82058084 0.9999083 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:cosh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6334.aten_op, %x.1 : Tensor): %2 : Tensor = aten::cosh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[1.398736 1.5037622 1.2543432 1.1525114] [4.6807823 1.2973645 3.2450619 1.5060563] [2.1948407 1.0341091 1.1478217 1.0049565]] [[1.0140384 1.0388123 1.6790866 1.0794065] [2.420326 1.4812343 1.1246284 1.0095304] [2.2762063 1.0120158 1.1026816 1.1542301]]]]; ov_res: [[[[1.398736 1.5037622 1.2543432 1.1525114] [4.6807823 1.2973645 3.2450619 1.5060563] [2.1948407 1.0341091 1.1478217 1.0049565]] [[1.0140384 1.0388123 1.6790866 1.0794065] [2.420326 1.4812343 1.1246284 1.0095304] [2.2762063 1.0120158 1.1026816 1.1542301]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:cosh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6336.aten_op, %x.1 : Tensor): %2 : Tensor = aten::cosh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[1.0385096 1.3222615 1.0388945 1.979443 ] [1.0435038 3.4205635 1.2398618 1.0207539] [1.442872 1.0794652 2.0331504 1.0721858]] [[1.3381226 1.0206327 1.0211935 1.1625869] [1.2109749 1.691261 1.1184952 3.0342536] [1.0156232 6.528237 1.0024958 1.9049078]]]]; ov_res: [[[[1.0385096 1.3222615 1.0388945 1.979443 ] [1.0435038 3.4205637 1.2398618 1.0207539] [1.442872 1.0794652 2.0331504 1.0721858]] [[1.3381226 1.0206327 1.0211935 1.1625869] [1.2109749 1.691261 1.1184952 3.0342536] [1.0156232 6.528237 1.0024958 1.9049078]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:sin ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6338.aten_op, %x.1 : Tensor): %2 : Tensor = aten::sin(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.17739433 -0.58018845 -0.16782139 0.78565484] [-0.32132533 0.6828726 0.87169105 -0.501045 ] [-0.9370266 -0.86423147 0.39833897 0.3351006 ]] [[-0.7124854 0.138776 -0.05141074 0.99909705] [-0.6814317 0.7191377 -0.7413887 -0.64695555] [ 0.8628 -0.29783696 0.35951373 0.9143413 ]]]]; ov_res: [[[[-0.17739433 -0.58018845 -0.16782139 0.78565484] [-0.32132533 0.6828726 0.87169105 -0.501045 ] [-0.9370266 -0.86423147 0.39833897 0.3351006 ]] [[-0.7124854 0.138776 -0.05141074 0.99909705] [-0.6814317 0.7191377 -0.7413887 -0.64695555] [ 0.8628 -0.29783696 0.35951373 0.9143413 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:sin_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6340.aten_op, %x.1 : Tensor): %2 : Tensor = aten::sin_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.99390644 -0.9999963 -0.5819803 -0.6494822 ] [-0.39871675 -0.4446693 -0.7308896 -0.531267 ] [-0.520968 0.24624418 -0.07801822 0.41270873]] [[ 0.4189337 -0.13023359 -0.73505884 -0.22900033] [-0.9870811 0.6516577 0.10894126 -0.2804281 ] [ 0.92137367 -0.74382883 0.1510607 0.95815086]]]]; ov_res: [[[[-0.99390644 -0.9999963 -0.5819803 -0.6494822 ] [-0.39871675 -0.4446693 -0.7308896 -0.531267 ] [-0.520968 0.24624418 -0.07801822 0.41270873]] [[ 0.4189337 -0.13023359 -0.73505884 -0.22900033] [-0.9870811 0.6516577 0.10894126 -0.2804281 ] [ 0.92137367 -0.74382883 0.1510607 0.95815086]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:sinh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6342.aten_op, %x.1 : Tensor): %2 : Tensor = aten::sinh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.6690284 -0.5170983 4.0863585 -0.6147363 ] [-0.46614853 0.5849268 0.16459246 0.6313934 ] [-1.4055045 -0.14301135 0.20396413 0.47306094]] [[-0.6353969 3.3975282 -1.0132225 -3.8066852 ] [-1.2831726 -1.7882833 0.25272584 -0.10481233] [-0.13477549 1.2670255 1.3939773 -4.327127 ]]]]; ov_res: [[[[-0.6690284 -0.5170983 4.0863585 -0.6147363 ] [-0.46614853 0.58492684 0.16459246 0.6313934 ] [-1.4055045 -0.14301135 0.20396413 0.4730609 ]] [[-0.63539684 3.3975282 -1.0132225 -3.806685 ] [-1.2831726 -1.7882833 0.25272584 -0.10481233] [-0.13477549 1.2670255 1.3939773 -4.327127 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:sinh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6344.aten_op, %x.1 : Tensor): %2 : Tensor = aten::sinh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 2.0308225 0.14508125 -0.20458548 -1.5906383 ] [ 1.279376 -1.0235542 -7.778591 1.6709476 ] [ 5.226202 -0.67341137 1.3441554 -1.3135289 ]] [[ 0.46015394 0.2058048 -0.3349486 -0.10824368] [ 0.3963651 0.022277 2.4978251 0.02367333] [-0.87869936 -0.18000197 -0.26603904 -0.40494564]]]]; ov_res: [[[[ 2.0308225 0.14508125 -0.20458548 -1.5906384 ] [ 1.279376 -1.0235541 -7.7785916 1.6709476 ] [ 5.226202 -0.67341137 1.3441554 -1.3135289 ]] [[ 0.4601539 0.2058048 -0.3349486 -0.10824368] [ 0.3963651 0.022277 2.4978251 0.02367333] [-0.87869936 -0.18000197 -0.26603904 -0.40494564]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:tan ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6346.aten_op, %x.1 : Tensor): %2 : Tensor = aten::tan(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-6.4145794 0.2360038 0.841254 3.003397 ] [ 1.4621483 -1.8453822 3.7570505 -2.0142877 ] [-0.5926589 0.2933616 -1.0951431 -6.983951 ]] [[ 8.032183 -0.70796174 0.01202737 -1.8383082 ] [-0.6859132 2.425792 0.29841208 0.76699585] [-0.25736296 -0.24663177 0.31967017 -2.2643096 ]]]]; ov_res: [[[[-6.4145794 0.2360038 0.841254 3.003397 ] [ 1.4621482 -1.8453822 3.7570505 -2.0142877 ] [-0.5926589 0.2933616 -1.0951431 -6.983951 ]] [[ 8.032183 -0.7079618 0.01202737 -1.8383082 ] [-0.6859132 2.425792 0.29841208 0.76699585] [-0.25736296 -0.24663177 0.31967017 -2.2643096 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:tan_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6348.aten_op, %x.1 : Tensor): %2 : Tensor = aten::tan_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[ 1.8802769 -0.3191238 -1.2686666 -1.1916714 ] [ -3.3385835 0.92802346 0.20961429 -0.7655772 ] [ 0.1848385 0.58333874 -3.1138532 -2.2106738 ]] [[ 0.8316327 0.6908434 -4.7610407 0.39571872] [ 1.5961292 0.15422527 -0.91308343 0.9220014 ] [-14.313532 0.9730819 -0.27056736 -0.31314692]]]]; ov_res: [[[[ 1.8802768 -0.3191238 -1.2686666 -1.1916714 ] [ -3.3385835 0.92802346 0.20961429 -0.7655772 ] [ 0.1848385 0.58333874 -3.1138532 -2.2106738 ]] [[ 0.8316326 0.6908434 -4.7610407 0.39571872] [ 1.5961292 0.15422527 -0.91308343 0.9220014 ] [-14.313532 0.9730819 -0.27056736 -0.31314692]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:tanh ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6350.aten_op, %x.1 : Tensor): %2 : Tensor = aten::tanh(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.442504 -0.92944264 -0.2209764 -0.18984476] [ 0.9093024 -0.9626122 0.02977338 0.6422839 ] [-0.47734344 0.4478252 0.7313292 -0.53239715]] [[-0.16278324 -0.28246546 0.7388231 0.61357266] [-0.731316 0.6342135 -0.8223987 -0.43354905] [-0.85643125 0.90077156 -0.44063923 0.9225479 ]]]]; ov_res: [[[[-0.442504 -0.92944264 -0.2209764 -0.18984476] [ 0.9093024 -0.9626122 0.02977338 0.6422839 ] [-0.47734344 0.4478252 0.7313292 -0.53239715]] [[-0.16278324 -0.28246546 0.7388231 0.61357266] [-0.73131603 0.6342135 -0.8223987 -0.43354905] [-0.85643125 0.9007715 -0.44063923 0.9225479 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_trigonometry.py::TestTrigonom::test_mm[ ie_device:CPU - precision:FP32 - op:tanh_ ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_trigonometry.___torch_mangle_6352.aten_op, %x.1 : Tensor): %2 : Tensor = aten::tanh_(%x.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_trigonometry.py:49:23 return (%2) fw_re: [[[[-0.15913454 0.6627636 -0.7102572 -0.23166557] [ 0.6213053 -0.7689039 0.91217244 -0.6152634 ] [ 0.8025445 -0.57493705 0.2486138 -0.21045884]] [[ 0.42701584 -0.8183355 -0.0233645 -0.11141048] [-0.46637306 -0.39754462 0.82546586 0.60582596] [ 0.46039602 0.6090734 0.30347744 0.62677056]]]]; ov_res: [[[[-0.15913454 0.6627636 -0.7102572 -0.23166557] [ 0.6213053 -0.7689039 0.91217244 -0.6152634 ] [ 0.8025445 -0.57493705 0.24861379 -0.21045884]] [[ 0.42701584 -0.8183355 -0.0233645 -0.11141048] [-0.46637306 -0.39754462 0.8254659 0.60582596] [ 0.46039602 0.6090734 0.30347744 0.62677056]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6353.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 20. 118. 90. ... 108. 21. 117.] [ 16. 70. 113. ... 126. 60. 109.] [ 58. 83. 118. ... 34. 84. 121.] ... [ 15. 26. 118. ... 79. 87. 119.] [ 0. 49. 65. ... 39. 105. 64.] [112. 86. 17. ... 52. 83. 13.]] [[ 26. 20. 78. ... 84. 3. 32.] [ 60. 60. 114. ... 71. 82. 73.] [102. 13. 103. ... 32. 20. 73.] ... [111. 79. 93. ... 124. 10. 28.] [ 85. 114. 125. ... 40. 69. 18.] [ 15. 100. 98. ... 1. 37. 57.]] [[ 58. 121. 119. ... 98. 8. 78.] [ 47. 103. 91. ... 81. 67. 71.] [ 64. 14. 55. ... 23. 88. 43.] ... [ 13. 119. 46. ... 2. 82. 96.] [ 55. 13. 102. ... 105. 103. 117.] [ 17. 7. 95. ... 11. 101. 96.]]]]; ov_res: [[[[ 20. 118. 90. ... 108. 21. 117.] [ 16. 70. 113. ... 126. 60. 109.] [ 58. 83. 118. ... 34. 84. 121.] ... [ 15. 26. 118. ... 79. 87. 119.] [ 0. 49. 65. ... 39. 105. 64.] [112. 86. 17. ... 52. 83. 13.]] [[ 26. 20. 78. ... 84. 3. 32.] [ 60. 60. 114. ... 71. 82. 73.] [102. 13. 103. ... 32. 20. 73.] ... [111. 79. 93. ... 124. 10. 28.] [ 85. 114. 125. ... 40. 69. 18.] [ 15. 100. 98. ... 1. 37. 57.]] [[ 58. 121. 119. ... 98. 8. 78.] [ 47. 103. 91. ... 81. 67. 71.] [ 64. 14. 55. ... 23. 88. 43.] ... [ 13. 119. 46. ... 2. 82. 96.] [ 55. 13. 102. ... 105. 103. 117.] [ 17. 7. 95. ... 11. 101. 96.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6355.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 77. 24. 33. ... 92. 104. 81.] [ 2. 116. 93. ... 59. 120. 59.] [114. 32. 14. ... 99. 110. 32.] ... [ 15. 117. 58. ... 25. 107. 65.] [ 79. 55. 15. ... 115. 103. 29.] [ 40. 20. 17. ... 112. 4. 94.]] [[111. 87. 2. ... 99. 75. 78.] [ 9. 108. 70. ... 72. 65. 46.] [ 6. 27. 92. ... 88. 91. 43.] ... [ 57. 29. 101. ... 86. 108. 81.] [121. 45. 46. ... 27. 86. 91.] [ 66. 109. 11. ... 7. 46. 96.]] [[ 26. 76. 35. ... 103. 38. 71.] [ 46. 84. 110. ... 26. 4. 121.] [ 51. 56. 76. ... 33. 7. 13.] ... [ 12. 76. 76. ... 103. 65. 0.] [ 65. 53. 91. ... 40. 2. 109.] [ 62. 66. 115. ... 112. 11. 122.]]]]; ov_res: [[[[ 77. 24. 33. ... 92. 104. 81.] [ 2. 116. 93. ... 59. 120. 59.] [114. 32. 14. ... 99. 110. 32.] ... [ 15. 117. 58. ... 25. 107. 65.] [ 79. 55. 15. ... 115. 103. 29.] [ 40. 20. 17. ... 112. 4. 94.]] [[111. 87. 2. ... 99. 75. 78.] [ 9. 108. 70. ... 72. 65. 46.] [ 6. 27. 92. ... 88. 91. 43.] ... [ 57. 29. 101. ... 86. 108. 81.] [121. 45. 46. ... 27. 86. 91.] [ 66. 109. 11. ... 7. 46. 96.]] [[ 26. 76. 35. ... 103. 38. 71.] [ 46. 84. 110. ... 26. 4. 121.] [ 51. 56. 76. ... 33. 7. 13.] ... [ 12. 76. 76. ... 103. 65. 0.] [ 65. 53. 91. ... 40. 2. 109.] [ 62. 66. 115. ... 112. 11. 122.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6357.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 48. 10. 28. ... 82. 90. 109.] [109. 82. 23. ... 122. 123. 91.] [ 31. 68. 63. ... 111. 13. 27.] ... [ 84. 108. 50. ... 25. 115. 35.] [ 80. 125. 104. ... 123. 24. 40.] [ 37. 121. 58. ... 21. 81. 36.]] [[ 19. 47. 105. ... 123. 22. 98.] [ 60. 6. 96. ... 4. 111. 94.] [ 94. 97. 116. ... 29. 119. 54.] ... [ 15. 100. 52. ... 22. 95. 83.] [119. 0. 50. ... 5. 80. 11.] [ 27. 110. 100. ... 105. 30. 95.]] [[ 4. 100. 57. ... 111. 126. 32.] [ 86. 63. 1. ... 34. 53. 75.] [ 7. 125. 56. ... 91. 102. 107.] ... [ 4. 25. 111. ... 66. 55. 4.] [106. 34. 96. ... 51. 92. 104.] [ 19. 118. 111. ... 45. 4. 118.]]]]; ov_res: [[[[ 48. 10. 28. ... 82. 90. 109.] [109. 82. 23. ... 122. 123. 91.] [ 31. 68. 63. ... 111. 13. 27.] ... [ 84. 108. 50. ... 25. 115. 35.] [ 80. 125. 104. ... 123. 24. 40.] [ 37. 121. 58. ... 21. 81. 36.]] [[ 19. 47. 105. ... 123. 22. 98.] [ 60. 6. 96. ... 4. 111. 94.] [ 94. 97. 116. ... 29. 119. 54.] ... [ 15. 100. 52. ... 22. 95. 83.] [119. 0. 50. ... 5. 80. 11.] [ 27. 110. 100. ... 105. 30. 95.]] [[ 4. 100. 57. ... 111. 126. 32.] [ 86. 63. 1. ... 34. 53. 75.] [ 7. 125. 56. ... 91. 102. 107.] ... [ 4. 25. 111. ... 66. 55. 4.] [106. 34. 96. ... 51. 92. 104.] [ 19. 118. 111. ... 45. 4. 118.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6359.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 97. 18. 122. ... 100. 115. 59.] [ 10. 21. 22. ... 53. 123. 1.] [ 42. 17. 103. ... 14. 88. 28.] ... [111. 71. 100. ... 116. 30. 121.] [108. 26. 70. ... 66. 53. 103.] [ 91. 89. 6. ... 68. 49. 93.]] [[ 80. 66. 13. ... 21. 3. 38.] [ 4. 54. 41. ... 40. 37. 118.] [122. 63. 57. ... 21. 116. 37.] ... [ 93. 73. 108. ... 53. 9. 22.] [ 33. 79. 76. ... 125. 35. 9.] [ 13. 24. 86. ... 63. 61. 97.]] [[ 55. 47. 1. ... 77. 80. 102.] [ 98. 75. 19. ... 110. 64. 17.] [ 11. 9. 7. ... 37. 116. 125.] ... [ 14. 125. 2. ... 107. 110. 26.] [ 42. 102. 82. ... 47. 0. 89.] [ 66. 63. 82. ... 14. 53. 11.]]]]; ov_res: [[[[ 97. 18. 122. ... 100. 115. 59.] [ 10. 21. 22. ... 53. 123. 1.] [ 42. 17. 103. ... 14. 88. 28.] ... [111. 71. 100. ... 116. 30. 121.] [108. 26. 70. ... 66. 53. 103.] [ 91. 89. 6. ... 68. 49. 93.]] [[ 80. 66. 13. ... 21. 3. 38.] [ 4. 54. 41. ... 40. 37. 118.] [122. 63. 57. ... 21. 116. 37.] ... [ 93. 73. 108. ... 53. 9. 22.] [ 33. 79. 76. ... 125. 35. 9.] [ 13. 24. 86. ... 63. 61. 97.]] [[ 55. 47. 1. ... 77. 80. 102.] [ 98. 75. 19. ... 110. 64. 17.] [ 11. 9. 7. ... 37. 116. 125.] ... [ 14. 125. 2. ... 107. 110. 26.] [ 42. 102. 82. ... 47. 0. 89.] [ 66. 63. 82. ... 14. 53. 11.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6361.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 66. 36. 118. ... 53. 42. 98.] [ 27. 49. 91. ... 41. 112. 111.] [ 66. 17. 17. ... 122. 13. 2.] ... [ 38. 87. 119. ... 124. 22. 76.] [ 33. 63. 93. ... 99. 12. 2.] [ 78. 88. 36. ... 63. 108. 24.]] [[ 38. 54. 84. ... 93. 56. 67.] [ 65. 62. 111. ... 8. 10. 121.] [ 87. 83. 107. ... 109. 28. 125.] ... [ 28. 118. 103. ... 116. 108. 51.] [ 63. 25. 72. ... 82. 108. 119.] [ 80. 89. 114. ... 125. 60. 108.]] [[ 30. 37. 44. ... 52. 55. 58.] [ 48. 51. 23. ... 56. 23. 126.] [ 79. 93. 73. ... 13. 25. 49.] ... [ 2. 42. 15. ... 52. 116. 95.] [100. 27. 54. ... 53. 27. 78.] [ 61. 92. 35. ... 37. 98. 38.]]]]; ov_res: [[[[ 66. 36. 118. ... 53. 42. 98.] [ 27. 49. 91. ... 41. 112. 111.] [ 66. 17. 17. ... 122. 13. 2.] ... [ 38. 87. 119. ... 124. 22. 76.] [ 33. 63. 93. ... 99. 12. 2.] [ 78. 88. 36. ... 63. 108. 24.]] [[ 38. 54. 84. ... 93. 56. 67.] [ 65. 62. 111. ... 8. 10. 121.] [ 87. 83. 107. ... 109. 28. 125.] ... [ 28. 118. 103. ... 116. 108. 51.] [ 63. 25. 72. ... 82. 108. 119.] [ 80. 89. 114. ... 125. 60. 108.]] [[ 30. 37. 44. ... 52. 55. 58.] [ 48. 51. 23. ... 56. 23. 126.] [ 79. 93. 73. ... 13. 25. 49.] ... [ 2. 42. 15. ... 52. 116. 95.] [100. 27. 54. ... 53. 27. 78.] [ 61. 92. 35. ... 37. 98. 38.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6363.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 44. 106. 58. ... 75. 126. 90.] [ 12. 100. 119. ... 36. 84. 83.] [ 39. 21. 80. ... 35. 27. 11.] ... [ 61. 92. 125. ... 77. 16. 82.] [ 28. 122. 43. ... 47. 84. 40.] [ 9. 48. 23. ... 84. 82. 68.]] [[ 7. 88. 94. ... 65. 81. 24.] [101. 43. 17. ... 123. 52. 37.] [ 55. 121. 94. ... 59. 63. 102.] ... [113. 56. 86. ... 67. 52. 121.] [ 56. 124. 40. ... 100. 98. 16.] [ 48. 36. 72. ... 21. 119. 96.]] [[ 57. 48. 121. ... 103. 27. 37.] [ 77. 19. 96. ... 32. 122. 8.] [ 8. 35. 124. ... 94. 60. 5.] ... [ 49. 87. 46. ... 55. 40. 38.] [ 32. 83. 46. ... 52. 65. 30.] [ 88. 39. 6. ... 79. 101. 104.]]]]; ov_res: [[[[ 44. 106. 58. ... 75. 126. 90.] [ 12. 100. 119. ... 36. 84. 83.] [ 39. 21. 80. ... 35. 27. 11.] ... [ 61. 92. 125. ... 77. 16. 82.] [ 28. 122. 43. ... 47. 84. 40.] [ 9. 48. 23. ... 84. 82. 68.]] [[ 7. 88. 94. ... 65. 81. 24.] [101. 43. 17. ... 123. 52. 37.] [ 55. 121. 94. ... 59. 63. 102.] ... [113. 56. 86. ... 67. 52. 121.] [ 56. 124. 40. ... 100. 98. 16.] [ 48. 36. 72. ... 21. 119. 96.]] [[ 57. 48. 121. ... 103. 27. 37.] [ 77. 19. 96. ... 32. 122. 8.] [ 8. 35. 124. ... 94. 60. 5.] ... [ 49. 87. 46. ... 55. 40. 38.] [ 32. 83. 46. ... 52. 65. 30.] [ 88. 39. 6. ... 79. 101. 104.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float64'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6365.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 65. 41. 70. ... 49. 115. 80.] [107. 81. 120. ... 6. 54. 120.] [ 17. 76. 67. ... 21. 83. 126.] ... [ 88. 99. 60. ... 57. 67. 31.] [ 67. 30. 4. ... 17. 38. 73.] [ 46. 44. 7. ... 75. 88. 41.]] [[ 27. 55. 32. ... 84. 32. 110.] [ 39. 23. 120. ... 74. 0. 121.] [ 52. 15. 94. ... 90. 68. 78.] ... [115. 21. 31. ... 98. 36. 121.] [120. 4. 23. ... 64. 37. 54.] [ 49. 110. 90. ... 116. 99. 29.]] [[ 95. 66. 21. ... 121. 13. 106.] [ 20. 110. 78. ... 116. 39. 121.] [101. 37. 21. ... 80. 49. 57.] ... [ 46. 120. 87. ... 87. 121. 58.] [ 65. 70. 37. ... 120. 110. 120.] [ 38. 32. 31. ... 84. 11. 107.]]]]; ov_res: [[[[ 65. 41. 70. ... 49. 115. 80.] [107. 81. 120. ... 6. 54. 120.] [ 17. 76. 67. ... 21. 83. 126.] ... [ 88. 99. 60. ... 57. 67. 31.] [ 67. 30. 4. ... 17. 38. 73.] [ 46. 44. 7. ... 75. 88. 41.]] [[ 27. 55. 32. ... 84. 32. 110.] [ 39. 23. 120. ... 74. 0. 121.] [ 52. 15. 94. ... 90. 68. 78.] ... [115. 21. 31. ... 98. 36. 121.] [120. 4. 23. ... 64. 37. 54.] [ 49. 110. 90. ... 116. 99. 29.]] [[ 95. 66. 21. ... 121. 13. 106.] [ 20. 110. 78. ... 116. 39. 121.] [101. 37. 21. ... 80. 49. 57.] ... [ 46. 120. 87. ... 87. 121. 58.] [ 65. 70. 37. ... 120. 110. 120.] [ 38. 32. 31. ... 84. 11. 107.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6367.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 92. 37. 11. ... 10. 117. 61.] [110. 31. 2. ... 61. 39. 114.] [ 72. 122. 103. ... 109. 37. 76.] ... [ 78. 24. 14. ... 80. 77. 70.] [ 14. 117. 99. ... 94. 84. 84.] [ 85. 68. 23. ... 31. 4. 51.]] [[ 72. 45. 36. ... 64. 122. 61.] [ 44. 69. 125. ... 77. 41. 4.] [ 62. 64. 72. ... 17. 108. 75.] ... [ 61. 115. 3. ... 28. 86. 76.] [ 82. 7. 114. ... 46. 96. 16.] [100. 112. 87. ... 81. 74. 26.]] [[ 0. 47. 79. ... 25. 61. 26.] [ 73. 96. 103. ... 15. 33. 49.] [106. 126. 93. ... 80. 116. 105.] ... [ 64. 22. 43. ... 74. 80. 51.] [113. 79. 81. ... 33. 83. 57.] [102. 108. 7. ... 106. 125. 112.]]]]; ov_res: [[[[ 92. 37. 11. ... 10. 117. 61.] [110. 31. 2. ... 61. 39. 114.] [ 72. 122. 103. ... 109. 37. 76.] ... [ 78. 24. 14. ... 80. 77. 70.] [ 14. 117. 99. ... 94. 84. 84.] [ 85. 68. 23. ... 31. 4. 51.]] [[ 72. 45. 36. ... 64. 122. 61.] [ 44. 69. 125. ... 77. 41. 4.] [ 62. 64. 72. ... 17. 108. 75.] ... [ 61. 115. 3. ... 28. 86. 76.] [ 82. 7. 114. ... 46. 96. 16.] [100. 112. 87. ... 81. 74. 26.]] [[ 0. 47. 79. ... 25. 61. 26.] [ 73. 96. 103. ... 15. 33. 49.] [106. 126. 93. ... 80. 116. 105.] ... [ 64. 22. 43. ... 74. 80. 51.] [113. 79. 81. ... 33. 83. 57.] [102. 108. 7. ... 106. 125. 112.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6369.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 21. 76. 111. ... 6. 42. 54.] [ 94. 64. 56. ... 60. 104. 56.] [ 99. 55. 100. ... 88. 126. 51.] ... [ 75. 77. 48. ... 43. 21. 78.] [101. 96. 13. ... 61. 104. 45.] [ 39. 79. 39. ... 29. 0. 45.]] [[ 11. 90. 119. ... 96. 16. 76.] [ 43. 81. 91. ... 61. 109. 27.] [ 13. 56. 26. ... 122. 73. 48.] ... [ 36. 11. 50. ... 119. 100. 94.] [ 40. 6. 9. ... 91. 124. 14.] [ 49. 62. 9. ... 50. 29. 101.]] [[ 34. 104. 125. ... 70. 81. 15.] [ 7. 38. 5. ... 79. 126. 100.] [ 2. 77. 30. ... 62. 11. 107.] ... [ 67. 7. 25. ... 60. 77. 79.] [ 76. 7. 114. ... 116. 117. 71.] [ 79. 96. 9. ... 119. 123. 102.]]]]; ov_res: [[[[ 21. 76. 111. ... 6. 42. 54.] [ 94. 64. 56. ... 60. 104. 56.] [ 99. 55. 100. ... 88. 126. 51.] ... [ 75. 77. 48. ... 43. 21. 78.] [101. 96. 13. ... 61. 104. 45.] [ 39. 79. 39. ... 29. 0. 45.]] [[ 11. 90. 119. ... 96. 16. 76.] [ 43. 81. 91. ... 61. 109. 27.] [ 13. 56. 26. ... 122. 73. 48.] ... [ 36. 11. 50. ... 119. 100. 94.] [ 40. 6. 9. ... 91. 124. 14.] [ 49. 62. 9. ... 50. 29. 101.]] [[ 34. 104. 125. ... 70. 81. 15.] [ 7. 38. 5. ... 79. 126. 100.] [ 2. 77. 30. ... 62. 11. 107.] ... [ 67. 7. 25. ... 60. 77. 79.] [ 76. 7. 114. ... 116. 117. 71.] [ 79. 96. 9. ... 119. 123. 102.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6371.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 70. 74. 88. ... 59. 3. 35.] [ 80. 53. 14. ... 5. 49. 96.] [ 69. 79. 2. ... 59. 121. 4.] ... [ 19. 27. 48. ... 102. 49. 32.] [ 32. 12. 96. ... 124. 81. 43.] [ 86. 28. 101. ... 99. 122. 3.]] [[ 99. 69. 97. ... 37. 45. 26.] [102. 105. 52. ... 90. 68. 23.] [ 0. 13. 44. ... 61. 56. 13.] ... [ 47. 114. 68. ... 6. 105. 109.] [ 86. 79. 94. ... 70. 17. 80.] [ 3. 56. 59. ... 59. 6. 86.]] [[ 2. 11. 46. ... 57. 23. 63.] [ 1. 36. 54. ... 116. 13. 47.] [ 81. 71. 90. ... 122. 63. 114.] ... [ 94. 126. 21. ... 84. 52. 17.] [ 92. 125. 2. ... 0. 47. 41.] [126. 92. 0. ... 83. 11. 61.]]]]; ov_res: [[[[ 70. 74. 88. ... 59. 3. 35.] [ 80. 53. 14. ... 5. 49. 96.] [ 69. 79. 2. ... 59. 121. 4.] ... [ 19. 27. 48. ... 102. 49. 32.] [ 32. 12. 96. ... 124. 81. 43.] [ 86. 28. 101. ... 99. 122. 3.]] [[ 99. 69. 97. ... 37. 45. 26.] [102. 105. 52. ... 90. 68. 23.] [ 0. 13. 44. ... 61. 56. 13.] ... [ 47. 114. 68. ... 6. 105. 109.] [ 86. 79. 94. ... 70. 17. 80.] [ 3. 56. 59. ... 59. 6. 86.]] [[ 2. 11. 46. ... 57. 23. 63.] [ 1. 36. 54. ... 116. 13. 47.] [ 81. 71. 90. ... 122. 63. 114.] ... [ 94. 126. 21. ... 84. 52. 17.] [ 92. 125. 2. ... 0. 47. 41.] [126. 92. 0. ... 83. 11. 61.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.int32'> ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6373.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 23. 121. 120. ... 121. 98. 47.] [ 65. 40. 93. ... 101. 11. 25.] [ 85. 58. 23. ... 35. 119. 122.] ... [ 76. 2. 29. ... 122. 34. 8.] [ 41. 9. 106. ... 94. 42. 31.] [ 44. 15. 52. ... 114. 14. 123.]] [[ 12. 95. 10. ... 29. 81. 92.] [ 80. 72. 65. ... 54. 1. 107.] [ 28. 30. 6. ... 122. 50. 30.] ... [ 2. 52. 68. ... 121. 59. 1.] [ 70. 0. 26. ... 49. 28. 56.] [124. 22. 91. ... 51. 98. 96.]] [[ 67. 80. 95. ... 88. 64. 71.] [ 65. 103. 96. ... 22. 8. 77.] [ 8. 56. 124. ... 83. 81. 102.] ... [ 65. 40. 63. ... 57. 105. 37.] [ 9. 20. 70. ... 74. 67. 31.] [ 60. 47. 87. ... 56. 59. 90.]]]]; ov_res: [[[[ 23. 121. 120. ... 121. 98. 47.] [ 65. 40. 93. ... 101. 11. 25.] [ 85. 58. 23. ... 35. 119. 122.] ... [ 76. 2. 29. ... 122. 34. 8.] [ 41. 9. 106. ... 94. 42. 31.] [ 44. 15. 52. ... 114. 14. 123.]] [[ 12. 95. 10. ... 29. 81. 92.] [ 80. 72. 65. ... 54. 1. 107.] [ 28. 30. 6. ... 122. 50. 30.] ... [ 2. 52. 68. ... 121. 59. 1.] [ 70. 0. 26. ... 49. 28. 56.] [124. 22. 91. ... 51. 98. 96.]] [[ 67. 80. 95. ... 88. 64. 71.] [ 65. 103. 96. ... 22. 8. 77.] [ 8. 56. 124. ... 83. 81. 102.] ... [ 65. 40. 63. ... 57. 105. 37.] [ 9. 20. 70. ... 74. 67. 31.] [ 60. 47. 87. ... 56. 59. 90.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6375.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 0. 35. 56. ... 0. 38. 82.] [ 60. 5. 49. ... 53. 59. 82.] [109. 114. 37. ... 7. 6. 41.] ... [ 26. 82. 118. ... 109. 1. 14.] [ 78. 63. 20. ... 5. 64. 115.] [ 75. 98. 63. ... 3. 31. 93.]] [[ 76. 21. 54. ... 20. 35. 50.] [ 94. 35. 52. ... 63. 63. 14.] [105. 7. 97. ... 122. 25. 45.] ... [107. 62. 40. ... 11. 99. 36.] [ 48. 3. 11. ... 91. 105. 36.] [ 57. 39. 95. ... 36. 112. 22.]] [[ 98. 44. 66. ... 38. 109. 83.] [114. 17. 72. ... 91. 62. 73.] [ 61. 61. 53. ... 68. 78. 97.] ... [ 43. 105. 100. ... 13. 107. 0.] [ 92. 46. 42. ... 20. 37. 72.] [ 36. 29. 106. ... 5. 93. 14.]]]]; ov_res: [[[[ 0. 35. 56. ... 0. 38. 82.] [ 60. 5. 49. ... 53. 59. 82.] [109. 114. 37. ... 7. 6. 41.] ... [ 26. 82. 118. ... 109. 1. 14.] [ 78. 63. 20. ... 5. 64. 115.] [ 75. 98. 63. ... 3. 31. 93.]] [[ 76. 21. 54. ... 20. 35. 50.] [ 94. 35. 52. ... 63. 63. 14.] [105. 7. 97. ... 122. 25. 45.] ... [107. 62. 40. ... 11. 99. 36.] [ 48. 3. 11. ... 91. 105. 36.] [ 57. 39. 95. ... 36. 112. 22.]] [[ 98. 44. 66. ... 38. 109. 83.] [114. 17. 72. ... 91. 62. 73.] [ 61. 61. 53. ... 68. 78. 97.] ... [ 43. 105. 100. ... 13. 107. 0.] [ 92. 46. 42. ... 20. 37. 72.] [ 36. 29. 106. ... 5. 93. 14.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6377.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 42. 59. 76. ... 100. 120. 98.] [ 68. 11. 73. ... 39. 91. 39.] [120. 29. 10. ... 81. 23. 120.] ... [ 90. 66. 71. ... 115. 38. 64.] [ 30. 63. 124. ... 43. 78. 103.] [113. 81. 78. ... 7. 59. 11.]] [[ 84. 27. 112. ... 87. 23. 62.] [ 36. 9. 96. ... 28. 60. 86.] [ 19. 59. 90. ... 74. 113. 43.] ... [120. 28. 114. ... 107. 126. 119.] [ 66. 29. 26. ... 107. 11. 97.] [111. 66. 25. ... 41. 121. 25.]] [[ 89. 105. 63. ... 30. 108. 88.] [ 60. 76. 11. ... 40. 32. 44.] [ 17. 33. 19. ... 65. 117. 60.] ... [ 24. 15. 42. ... 87. 88. 49.] [ 24. 48. 33. ... 99. 122. 72.] [ 88. 78. 126. ... 65. 90. 93.]]]]; ov_res: [[[[ 42. 59. 76. ... 100. 120. 98.] [ 68. 11. 73. ... 39. 91. 39.] [120. 29. 10. ... 81. 23. 120.] ... [ 90. 66. 71. ... 115. 38. 64.] [ 30. 63. 124. ... 43. 78. 103.] [113. 81. 78. ... 7. 59. 11.]] [[ 84. 27. 112. ... 87. 23. 62.] [ 36. 9. 96. ... 28. 60. 86.] [ 19. 59. 90. ... 74. 113. 43.] ... [120. 28. 114. ... 107. 126. 119.] [ 66. 29. 26. ... 107. 11. 97.] [111. 66. 25. ... 41. 121. 25.]] [[ 89. 105. 63. ... 30. 108. 88.] [ 60. 76. 11. ... 40. 32. 44.] [ 17. 33. 19. ... 65. 117. 60.] ... [ 24. 15. 42. ... 87. 88. 49.] [ 24. 48. 33. ... 99. 122. 72.] [ 88. 78. 126. ... 65. 90. 93.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.float32'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6379.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 47. 113. 118. ... 11. 51. 65.] [ 9. 117. 59. ... 57. 7. 33.] [ 81. 34. 49. ... 3. 39. 73.] ... [ 76. 51. 60. ... 126. 32. 0.] [126. 65. 70. ... 70. 109. 98.] [ 4. 32. 0. ... 109. 78. 63.]] [[ 22. 53. 27. ... 91. 38. 8.] [ 54. 46. 110. ... 3. 67. 110.] [ 42. 51. 44. ... 46. 30. 121.] ... [ 25. 78. 68. ... 36. 121. 43.] [ 70. 93. 54. ... 14. 15. 106.] [ 74. 124. 36. ... 62. 21. 62.]] [[ 50. 98. 77. ... 16. 76. 28.] [ 45. 27. 81. ... 58. 119. 39.] [120. 108. 26. ... 59. 50. 32.] ... [ 67. 12. 105. ... 61. 29. 51.] [ 46. 42. 112. ... 98. 2. 92.] [ 96. 125. 118. ... 28. 24. 26.]]]]; ov_res: [[[[ 47. 113. 118. ... 11. 51. 65.] [ 9. 117. 59. ... 57. 7. 33.] [ 81. 34. 49. ... 3. 39. 73.] ... [ 76. 51. 60. ... 126. 32. 0.] [126. 65. 70. ... 70. 109. 98.] [ 4. 32. 0. ... 109. 78. 63.]] [[ 22. 53. 27. ... 91. 38. 8.] [ 54. 46. 110. ... 3. 67. 110.] [ 42. 51. 44. ... 46. 30. 121.] ... [ 25. 78. 68. ... 36. 121. 43.] [ 70. 93. 54. ... 14. 15. 106.] [ 74. 124. 36. ... 62. 21. 62.]] [[ 50. 98. 77. ... 16. 76. 28.] [ 45. 27. 81. ... 58. 119. 39.] [120. 108. 26. ... 59. 50. 32.] ... [ 67. 12. 105. ... 61. 29. 51.] [ 46. 42. 112. ... 98. 2. 92.] [ 96. 125. 118. ... 28. 24. 26.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6381.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 13 114 22 ... 98 61 70] [ 50 39 23 ... 14 116 47] [ 16 118 29 ... 26 32 2] ... [ 40 17 31 ... 107 14 117] [ 97 22 96 ... 114 7 32] [ 69 0 50 ... 100 38 9]] [[ 99 112 116 ... 117 96 1] [122 110 32 ... 5 57 58] [ 44 24 120 ... 20 58 56] ... [ 3 10 6 ... 66 4 54] [ 98 35 52 ... 35 119 58] [ 52 113 16 ... 95 13 107]] [[ 96 63 98 ... 92 45 77] [ 14 98 42 ... 116 24 39] [ 99 82 119 ... 15 82 39] ... [107 42 17 ... 115 59 104] [ 93 114 51 ... 5 88 114] [ 88 100 77 ... 96 101 96]]]]; ov_res: [[[[ 13 114 22 ... 98 61 70] [ 50 39 23 ... 14 116 47] [ 16 118 29 ... 26 32 2] ... [ 40 17 31 ... 107 14 117] [ 97 22 96 ... 114 7 32] [ 69 0 50 ... 100 38 9]] [[ 99 112 116 ... 117 96 1] [122 110 32 ... 5 57 58] [ 44 24 120 ... 20 58 56] ... [ 3 10 6 ... 66 4 54] [ 98 35 52 ... 35 119 58] [ 52 113 16 ... 95 13 107]] [[ 96 63 98 ... 92 45 77] [ 14 98 42 ... 116 24 39] [ 99 82 119 ... 15 82 39] ... [107 42 17 ... 115 59 104] [ 93 114 51 ... 5 88 114] [ 88 100 77 ... 96 101 96]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6383.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 21 69 103 ... 45 113 66] [115 24 1 ... 3 25 45] [ 74 26 96 ... 61 68 66] ... [ 95 36 110 ... 110 98 49] [ 30 90 84 ... 108 9 91] [ 41 120 14 ... 97 123 96]] [[111 96 84 ... 64 34 65] [ 73 16 93 ... 91 109 113] [ 84 66 7 ... 40 18 45] ... [ 25 105 2 ... 34 121 10] [ 49 82 42 ... 66 53 80] [ 97 70 12 ... 83 11 15]] [[ 17 72 111 ... 104 96 12] [115 2 86 ... 14 10 113] [ 39 35 116 ... 83 36 5] ... [105 78 3 ... 71 45 8] [124 112 40 ... 92 76 110] [ 19 22 2 ... 73 43 37]]]]; ov_res: [[[[ 21 69 103 ... 45 113 66] [115 24 1 ... 3 25 45] [ 74 26 96 ... 61 68 66] ... [ 95 36 110 ... 110 98 49] [ 30 90 84 ... 108 9 91] [ 41 120 14 ... 97 123 96]] [[111 96 84 ... 64 34 65] [ 73 16 93 ... 91 109 113] [ 84 66 7 ... 40 18 45] ... [ 25 105 2 ... 34 121 10] [ 49 82 42 ... 66 53 80] [ 97 70 12 ... 83 11 15]] [[ 17 72 111 ... 104 96 12] [115 2 86 ... 14 10 113] [ 39 35 116 ... 83 36 5] ... [105 78 3 ... 71 45 8] [124 112 40 ... 92 76 110] [ 19 22 2 ... 73 43 37]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6385.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[116 13 7 ... 15 109 121] [ 30 31 2 ... 93 83 64] [ 49 121 76 ... 9 57 119] ... [ 63 13 79 ... 45 119 99] [103 117 11 ... 97 124 120] [ 87 15 65 ... 37 8 83]] [[ 14 20 38 ... 54 51 4] [ 33 71 9 ... 50 59 118] [ 80 94 86 ... 100 123 54] ... [ 9 102 14 ... 38 29 9] [ 18 48 52 ... 80 65 72] [ 96 65 29 ... 27 35 119]] [[ 98 118 70 ... 73 78 24] [ 0 80 30 ... 97 33 120] [ 18 85 3 ... 64 61 17] ... [ 67 71 120 ... 23 63 35] [ 71 121 83 ... 85 57 82] [ 10 48 95 ... 91 47 45]]]]; ov_res: [[[[116 13 7 ... 15 109 121] [ 30 31 2 ... 93 83 64] [ 49 121 76 ... 9 57 119] ... [ 63 13 79 ... 45 119 99] [103 117 11 ... 97 124 120] [ 87 15 65 ... 37 8 83]] [[ 14 20 38 ... 54 51 4] [ 33 71 9 ... 50 59 118] [ 80 94 86 ... 100 123 54] ... [ 9 102 14 ... 38 29 9] [ 18 48 52 ... 80 65 72] [ 96 65 29 ... 27 35 119]] [[ 98 118 70 ... 73 78 24] [ 0 80 30 ... 97 33 120] [ 18 85 3 ... 64 61 17] ... [ 67 71 120 ... 23 63 35] [ 71 121 83 ... 85 57 82] [ 10 48 95 ... 91 47 45]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6387.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 28 121 73 ... 50 34 3] [ 60 82 6 ... 48 67 6] [ 14 63 101 ... 100 40 60] ... [ 90 119 123 ... 25 77 94] [117 7 110 ... 41 69 116] [ 68 71 70 ... 23 82 7]] [[ 53 120 26 ... 50 102 7] [ 77 34 82 ... 53 97 10] [118 106 10 ... 115 123 35] ... [ 63 47 113 ... 0 67 118] [ 62 110 98 ... 21 121 28] [100 8 107 ... 124 32 82]] [[105 104 29 ... 114 124 8] [ 40 51 105 ... 8 33 33] [ 73 23 107 ... 109 35 123] ... [ 51 106 54 ... 116 121 36] [ 34 59 116 ... 55 117 57] [ 37 87 14 ... 24 42 40]]]]; ov_res: [[[[ 28 121 73 ... 50 34 3] [ 60 82 6 ... 48 67 6] [ 14 63 101 ... 100 40 60] ... [ 90 119 123 ... 25 77 94] [117 7 110 ... 41 69 116] [ 68 71 70 ... 23 82 7]] [[ 53 120 26 ... 50 102 7] [ 77 34 82 ... 53 97 10] [118 106 10 ... 115 123 35] ... [ 63 47 113 ... 0 67 118] [ 62 110 98 ... 21 121 28] [100 8 107 ... 124 32 82]] [[105 104 29 ... 114 124 8] [ 40 51 105 ... 8 33 33] [ 73 23 107 ... 109 35 123] ... [ 51 106 54 ... 116 121 36] [ 34 59 116 ... 55 117 57] [ 37 87 14 ... 24 42 40]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6389.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[117 80 32 ... 117 50 15] [ 39 22 59 ... 95 65 6] [ 18 39 60 ... 115 120 126] ... [ 61 115 32 ... 60 57 69] [ 85 103 26 ... 99 26 104] [ 80 2 59 ... 18 125 111]] [[ 86 98 48 ... 121 104 64] [ 34 97 98 ... 13 15 67] [ 49 49 23 ... 10 112 66] ... [ 91 10 28 ... 6 126 105] [ 73 76 113 ... 110 111 28] [ 8 6 72 ... 114 50 54]] [[113 0 84 ... 85 68 57] [ 25 113 34 ... 34 99 88] [ 47 116 114 ... 80 92 89] ... [ 17 55 96 ... 25 92 21] [ 30 115 71 ... 99 26 78] [ 7 42 34 ... 100 85 106]]]]; ov_res: [[[[117 80 32 ... 117 50 15] [ 39 22 59 ... 95 65 6] [ 18 39 60 ... 115 120 126] ... [ 61 115 32 ... 60 57 69] [ 85 103 26 ... 99 26 104] [ 80 2 59 ... 18 125 111]] [[ 86 98 48 ... 121 104 64] [ 34 97 98 ... 13 15 67] [ 49 49 23 ... 10 112 66] ... [ 91 10 28 ... 6 126 105] [ 73 76 113 ... 110 111 28] [ 8 6 72 ... 114 50 54]] [[113 0 84 ... 85 68 57] [ 25 113 34 ... 34 99 88] [ 47 116 114 ... 80 92 89] ... [ 17 55 96 ... 25 92 21] [ 30 115 71 ... 99 26 78] [ 7 42 34 ... 100 85 106]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6391.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 86 101 72 ... 94 98 92] [114 60 65 ... 83 2 36] [ 0 57 56 ... 126 54 85] ... [ 90 102 83 ... 114 69 20] [ 46 114 54 ... 114 116 56] [ 25 50 97 ... 69 69 108]] [[ 71 26 109 ... 63 56 78] [ 26 0 46 ... 30 59 101] [ 95 5 82 ... 14 13 28] ... [ 19 22 98 ... 64 105 100] [103 117 20 ... 12 42 65] [123 77 46 ... 98 73 76]] [[126 116 2 ... 84 126 46] [ 28 92 100 ... 99 6 58] [ 47 34 72 ... 122 69 89] ... [ 46 117 112 ... 62 20 119] [105 30 80 ... 31 66 8] [ 92 45 55 ... 67 70 97]]]]; ov_res: [[[[ 86 101 72 ... 94 98 92] [114 60 65 ... 83 2 36] [ 0 57 56 ... 126 54 85] ... [ 90 102 83 ... 114 69 20] [ 46 114 54 ... 114 116 56] [ 25 50 97 ... 69 69 108]] [[ 71 26 109 ... 63 56 78] [ 26 0 46 ... 30 59 101] [ 95 5 82 ... 14 13 28] ... [ 19 22 98 ... 64 105 100] [103 117 20 ... 12 42 65] [123 77 46 ... 98 73 76]] [[126 116 2 ... 84 126 46] [ 28 92 100 ... 99 6 58] [ 47 34 72 ... 122 69 89] ... [ 46 117 112 ... 62 20 119] [105 30 80 ... 31 66 8] [ 92 45 55 ... 67 70 97]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int64'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6393.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[113 13 49 ... 43 77 11] [118 81 71 ... 9 72 117] [ 18 59 33 ... 11 80 75] ... [ 55 46 8 ... 6 122 111] [ 46 125 1 ... 86 85 114] [ 49 55 96 ... 24 12 84]] [[ 96 78 120 ... 62 48 2] [ 38 24 102 ... 79 45 125] [123 120 111 ... 52 84 66] ... [ 76 61 40 ... 106 68 57] [ 24 63 124 ... 101 79 113] [ 37 75 67 ... 1 91 90]] [[ 94 76 75 ... 35 43 103] [ 25 43 45 ... 53 15 47] [ 23 33 77 ... 97 120 87] ... [ 40 63 57 ... 30 44 83] [ 98 107 50 ... 48 125 104] [ 81 112 47 ... 26 48 66]]]]; ov_res: [[[[113 13 49 ... 43 77 11] [118 81 71 ... 9 72 117] [ 18 59 33 ... 11 80 75] ... [ 55 46 8 ... 6 122 111] [ 46 125 1 ... 86 85 114] [ 49 55 96 ... 24 12 84]] [[ 96 78 120 ... 62 48 2] [ 38 24 102 ... 79 45 125] [123 120 111 ... 52 84 66] ... [ 76 61 40 ... 106 68 57] [ 24 63 124 ... 101 79 113] [ 37 75 67 ... 1 91 90]] [[ 94 76 75 ... 35 43 103] [ 25 43 45 ... 53 15 47] [ 23 33 77 ... 97 120 87] ... [ 40 63 57 ... 30 44 83] [ 98 107 50 ... 48 125 104] [ 81 112 47 ... 26 48 66]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6395.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 19 4 50 ... 24 80 65] [ 87 50 37 ... 0 125 117] [ 64 77 87 ... 54 2 30] ... [ 33 11 43 ... 16 34 23] [ 65 101 90 ... 64 119 46] [117 43 57 ... 8 36 110]] [[ 72 54 91 ... 3 25 94] [ 6 104 110 ... 73 94 1] [ 92 109 92 ... 13 126 55] ... [ 63 10 34 ... 61 26 26] [ 35 0 58 ... 98 76 112] [ 89 109 93 ... 66 76 1]] [[ 92 44 86 ... 40 90 97] [113 99 72 ... 3 2 91] [103 43 44 ... 83 79 70] ... [ 85 41 100 ... 82 116 47] [115 14 5 ... 22 85 89] [ 50 102 112 ... 90 55 63]]]]; ov_res: [[[[ 19 4 50 ... 24 80 65] [ 87 50 37 ... 0 125 117] [ 64 77 87 ... 54 2 30] ... [ 33 11 43 ... 16 34 23] [ 65 101 90 ... 64 119 46] [117 43 57 ... 8 36 110]] [[ 72 54 91 ... 3 25 94] [ 6 104 110 ... 73 94 1] [ 92 109 92 ... 13 126 55] ... [ 63 10 34 ... 61 26 26] [ 35 0 58 ... 98 76 112] [ 89 109 93 ... 66 76 1]] [[ 92 44 86 ... 40 90 97] [113 99 72 ... 3 2 91] [103 43 44 ... 83 79 70] ... [ 85 41 100 ... 82 116 47] [115 14 5 ... 22 85 89] [ 50 102 112 ... 90 55 63]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6397.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 13 14 85 ... 57 99 21] [111 63 118 ... 49 41 32] [ 89 48 51 ... 36 21 84] ... [ 78 80 17 ... 36 50 72] [ 62 14 57 ... 23 22 2] [ 2 30 5 ... 12 116 0]] [[ 95 71 30 ... 65 103 104] [100 65 39 ... 5 111 115] [ 22 18 93 ... 58 119 23] ... [119 53 114 ... 79 102 19] [ 60 112 19 ... 15 121 120] [ 0 124 27 ... 48 43 83]] [[108 96 65 ... 111 2 106] [ 0 52 74 ... 107 126 72] [ 64 55 67 ... 8 122 50] ... [ 32 102 100 ... 95 3 82] [ 53 91 13 ... 108 47 105] [ 21 91 81 ... 8 67 32]]]]; ov_res: [[[[ 13 14 85 ... 57 99 21] [111 63 118 ... 49 41 32] [ 89 48 51 ... 36 21 84] ... [ 78 80 17 ... 36 50 72] [ 62 14 57 ... 23 22 2] [ 2 30 5 ... 12 116 0]] [[ 95 71 30 ... 65 103 104] [100 65 39 ... 5 111 115] [ 22 18 93 ... 58 119 23] ... [119 53 114 ... 79 102 19] [ 60 112 19 ... 15 121 120] [ 0 124 27 ... 48 43 83]] [[108 96 65 ... 111 2 106] [ 0 52 74 ... 107 126 72] [ 64 55 67 ... 8 122 50] ... [ 32 102 100 ... 95 3 82] [ 53 91 13 ... 108 47 105] [ 21 91 81 ... 8 67 32]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6399.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 90 21 48 ... 84 82 124] [ 14 72 29 ... 25 116 13] [ 8 22 46 ... 13 27 36] ... [ 14 39 117 ... 70 93 126] [ 15 96 13 ... 72 70 69] [ 96 126 32 ... 26 2 46]] [[ 14 117 106 ... 52 8 97] [ 9 68 104 ... 87 112 32] [ 68 40 18 ... 22 37 86] ... [121 35 46 ... 99 56 30] [ 95 68 28 ... 6 83 1] [ 84 63 16 ... 43 3 96]] [[108 125 79 ... 81 40 82] [ 82 25 125 ... 60 17 44] [ 89 10 53 ... 68 79 125] ... [101 70 26 ... 125 43 59] [ 97 92 91 ... 87 106 17] [ 89 47 33 ... 31 107 51]]]]; ov_res: [[[[ 90 21 48 ... 84 82 124] [ 14 72 29 ... 25 116 13] [ 8 22 46 ... 13 27 36] ... [ 14 39 117 ... 70 93 126] [ 15 96 13 ... 72 70 69] [ 96 126 32 ... 26 2 46]] [[ 14 117 106 ... 52 8 97] [ 9 68 104 ... 87 112 32] [ 68 40 18 ... 22 37 86] ... [121 35 46 ... 99 56 30] [ 95 68 28 ... 6 83 1] [ 84 63 16 ... 43 3 96]] [[108 125 79 ... 81 40 82] [ 82 25 125 ... 60 17 44] [ 89 10 53 ... 68 79 125] ... [101 70 26 ... 125 43 59] [ 97 92 91 ... 87 106 17] [ 89 47 33 ... 31 107 51]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6401.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 73 1 55 ... 61 40 68] [103 78 48 ... 13 22 83] [ 53 114 121 ... 96 100 85] ... [ 89 38 83 ... 111 102 40] [ 21 18 55 ... 56 62 43] [ 70 28 111 ... 26 124 17]] [[ 36 20 34 ... 94 116 22] [106 97 94 ... 48 67 114] [ 44 120 126 ... 111 62 79] ... [ 4 2 92 ... 60 10 113] [ 5 122 80 ... 44 97 23] [126 122 65 ... 121 83 124]] [[ 31 10 83 ... 10 125 111] [ 68 60 26 ... 87 29 26] [ 81 120 79 ... 82 54 88] ... [ 42 82 29 ... 24 2 82] [104 29 31 ... 108 77 49] [123 50 73 ... 100 70 93]]]]; ov_res: [[[[ 73 1 55 ... 61 40 68] [103 78 48 ... 13 22 83] [ 53 114 121 ... 96 100 85] ... [ 89 38 83 ... 111 102 40] [ 21 18 55 ... 56 62 43] [ 70 28 111 ... 26 124 17]] [[ 36 20 34 ... 94 116 22] [106 97 94 ... 48 67 114] [ 44 120 126 ... 111 62 79] ... [ 4 2 92 ... 60 10 113] [ 5 122 80 ... 44 97 23] [126 122 65 ... 121 83 124]] [[ 31 10 83 ... 10 125 111] [ 68 60 26 ... 87 29 26] [ 81 120 79 ... 82 54 88] ... [ 42 82 29 ... 24 2 82] [104 29 31 ... 108 77 49] [123 50 73 ... 100 70 93]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6403.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 65 54 39 ... 53 94 120] [ 19 91 99 ... 78 50 63] [105 25 104 ... 90 79 15] ... [ 45 8 2 ... 46 94 76] [ 64 27 98 ... 62 105 40] [ 58 44 85 ... 49 63 14]] [[ 30 105 16 ... 17 79 47] [ 8 16 71 ... 51 87 3] [ 17 81 100 ... 119 105 74] ... [ 64 92 64 ... 105 54 118] [104 1 105 ... 76 7 116] [ 14 69 30 ... 11 92 84]] [[ 14 50 115 ... 55 26 111] [125 36 121 ... 10 80 118] [ 80 21 6 ... 34 24 64] ... [ 10 4 12 ... 4 37 48] [102 86 52 ... 64 50 8] [ 95 89 21 ... 3 53 112]]]]; ov_res: [[[[ 65 54 39 ... 53 94 120] [ 19 91 99 ... 78 50 63] [105 25 104 ... 90 79 15] ... [ 45 8 2 ... 46 94 76] [ 64 27 98 ... 62 105 40] [ 58 44 85 ... 49 63 14]] [[ 30 105 16 ... 17 79 47] [ 8 16 71 ... 51 87 3] [ 17 81 100 ... 119 105 74] ... [ 64 92 64 ... 105 54 118] [104 1 105 ... 76 7 116] [ 14 69 30 ... 11 92 84]] [[ 14 50 115 ... 55 26 111] [125 36 121 ... 10 80 118] [ 80 21 6 ... 34 24 64] ... [ 10 4 12 ... 4 37 48] [102 86 52 ... 64 50 8] [ 95 89 21 ... 3 53 112]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6405.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[119 51 110 ... 95 48 69] [ 57 7 79 ... 65 4 6] [112 17 58 ... 84 84 60] ... [ 4 92 79 ... 2 48 24] [119 51 114 ... 64 111 36] [ 17 83 32 ... 52 55 101]] [[115 55 45 ... 75 40 9] [ 39 88 125 ... 105 36 76] [ 79 92 55 ... 105 101 96] ... [ 21 53 48 ... 91 70 101] [ 58 44 23 ... 123 117 60] [ 27 9 8 ... 53 70 79]] [[ 52 113 78 ... 56 80 103] [ 16 68 37 ... 24 39 126] [ 55 95 99 ... 17 119 56] ... [ 48 115 30 ... 106 72 123] [118 4 126 ... 40 103 30] [ 60 59 80 ... 51 114 19]]]]; ov_res: [[[[119 51 110 ... 95 48 69] [ 57 7 79 ... 65 4 6] [112 17 58 ... 84 84 60] ... [ 4 92 79 ... 2 48 24] [119 51 114 ... 64 111 36] [ 17 83 32 ... 52 55 101]] [[115 55 45 ... 75 40 9] [ 39 88 125 ... 105 36 76] [ 79 92 55 ... 105 101 96] ... [ 21 53 48 ... 91 70 101] [ 58 44 23 ... 123 117 60] [ 27 9 8 ... 53 70 79]] [[ 52 113 78 ... 56 80 103] [ 16 68 37 ... 24 39 126] [ 55 95 99 ... 17 119 56] ... [ 48 115 30 ... 106 72 123] [118 4 126 ... 40 103 30] [ 60 59 80 ... 51 114 19]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int32'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6407.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 37 110 39 ... 94 81 2] [ 33 24 82 ... 51 16 14] [ 75 81 71 ... 126 97 55] ... [ 90 32 2 ... 117 26 26] [ 97 18 15 ... 51 28 3] [ 26 40 18 ... 126 83 38]] [[ 54 95 95 ... 64 97 83] [ 31 46 124 ... 91 118 78] [ 80 29 38 ... 33 86 10] ... [ 78 48 79 ... 88 70 86] [ 50 50 113 ... 56 121 63] [ 84 80 70 ... 68 62 71]] [[ 59 125 28 ... 78 21 54] [ 40 120 94 ... 17 0 112] [ 21 10 34 ... 99 7 94] ... [ 32 58 77 ... 24 64 114] [ 12 34 111 ... 19 24 25] [ 1 58 1 ... 118 82 98]]]]; ov_res: [[[[ 37 110 39 ... 94 81 2] [ 33 24 82 ... 51 16 14] [ 75 81 71 ... 126 97 55] ... [ 90 32 2 ... 117 26 26] [ 97 18 15 ... 51 28 3] [ 26 40 18 ... 126 83 38]] [[ 54 95 95 ... 64 97 83] [ 31 46 124 ... 91 118 78] [ 80 29 38 ... 33 86 10] ... [ 78 48 79 ... 88 70 86] [ 50 50 113 ... 56 121 63] [ 84 80 70 ... 68 62 71]] [[ 59 125 28 ... 78 21 54] [ 40 120 94 ... 17 0 112] [ 21 10 34 ... 99 7 94] ... [ 32 58 77 ... 24 64 114] [ 12 34 111 ... 19 24 25] [ 1 58 1 ... 118 82 98]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6409.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 85 16 9 ... 36 3 31] [ 75 1 120 ... 78 6 95] [115 33 120 ... 45 61 14] ... [112 56 80 ... 91 30 19] [103 116 63 ... 114 61 85] [ 10 115 80 ... 112 95 12]] [[ 48 62 104 ... 25 21 43] [ 10 24 111 ... 37 11 102] [ 91 44 54 ... 104 91 12] ... [ 51 69 33 ... 120 104 73] [ 41 14 29 ... 96 112 113] [ 48 65 50 ... 46 92 75]] [[ 20 77 34 ... 100 125 121] [ 41 17 111 ... 43 100 100] [ 59 75 67 ... 20 44 23] ... [122 120 123 ... 41 54 40] [ 64 112 89 ... 103 119 108] [116 27 5 ... 5 8 0]]]]; ov_res: [[[[ 85 16 9 ... 36 3 31] [ 75 1 120 ... 78 6 95] [115 33 120 ... 45 61 14] ... [112 56 80 ... 91 30 19] [103 116 63 ... 114 61 85] [ 10 115 80 ... 112 95 12]] [[ 48 62 104 ... 25 21 43] [ 10 24 111 ... 37 11 102] [ 91 44 54 ... 104 91 12] ... [ 51 69 33 ... 120 104 73] [ 41 14 29 ... 96 112 113] [ 48 65 50 ... 46 92 75]] [[ 20 77 34 ... 100 125 121] [ 41 17 111 ... 43 100 100] [ 59 75 67 ... 20 44 23] ... [122 120 123 ... 41 54 40] [ 64 112 89 ... 103 119 108] [116 27 5 ... 5 8 0]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6411.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 56 105 112 ... 125 122 48] [ 17 114 120 ... 24 12 126] [108 4 94 ... 20 92 102] ... [ 31 122 124 ... 28 126 66] [114 39 120 ... 61 30 20] [ 3 22 97 ... 99 70 77]] [[ 10 118 44 ... 86 120 82] [112 23 8 ... 85 82 120] [117 4 1 ... 117 70 60] ... [ 67 29 11 ... 96 105 60] [ 92 95 34 ... 29 70 111] [ 51 56 6 ... 60 83 1]] [[ 53 41 69 ... 101 123 38] [126 47 101 ... 79 116 75] [ 90 52 121 ... 76 7 89] ... [ 22 125 36 ... 126 116 110] [102 101 38 ... 54 29 89] [ 84 64 110 ... 20 2 80]]]]; ov_res: [[[[ 56 105 112 ... 125 122 48] [ 17 114 120 ... 24 12 126] [108 4 94 ... 20 92 102] ... [ 31 122 124 ... 28 126 66] [114 39 120 ... 61 30 20] [ 3 22 97 ... 99 70 77]] [[ 10 118 44 ... 86 120 82] [112 23 8 ... 85 82 120] [117 4 1 ... 117 70 60] ... [ 67 29 11 ... 96 105 60] [ 92 95 34 ... 29 70 111] [ 51 56 6 ... 60 83 1]] [[ 53 41 69 ... 101 123 38] [126 47 101 ... 79 116 75] [ 90 52 121 ... 76 7 89] ... [ 22 125 36 ... 126 116 110] [102 101 38 ... 54 29 89] [ 84 64 110 ... 20 2 80]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6413.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 71 108 6 ... 9 33 55] [ 13 86 51 ... 20 85 38] [103 25 55 ... 20 89 41] ... [ 58 33 5 ... 110 86 121] [ 40 25 110 ... 48 126 31] [ 99 79 14 ... 44 95 57]] [[ 50 76 79 ... 61 13 82] [ 1 17 86 ... 72 113 112] [ 52 8 123 ... 126 100 54] ... [ 55 83 59 ... 26 19 15] [ 33 24 30 ... 96 97 76] [ 6 45 74 ... 2 41 72]] [[100 13 98 ... 122 1 32] [ 5 103 14 ... 64 54 4] [ 26 84 99 ... 125 12 58] ... [ 14 22 42 ... 51 122 64] [ 41 103 8 ... 45 68 96] [ 99 102 116 ... 87 97 118]]]]; ov_res: [[[[ 71 108 6 ... 9 33 55] [ 13 86 51 ... 20 85 38] [103 25 55 ... 20 89 41] ... [ 58 33 5 ... 110 86 121] [ 40 25 110 ... 48 126 31] [ 99 79 14 ... 44 95 57]] [[ 50 76 79 ... 61 13 82] [ 1 17 86 ... 72 113 112] [ 52 8 123 ... 126 100 54] ... [ 55 83 59 ... 26 19 15] [ 33 24 30 ... 96 97 76] [ 6 45 74 ... 2 41 72]] [[100 13 98 ... 122 1 32] [ 5 103 14 ... 64 54 4] [ 26 84 99 ... 125 12 58] ... [ 14 22 42 ... 51 122 64] [ 41 103 8 ... 45 68 96] [ 99 102 116 ... 87 97 118]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6415.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 15 104 48 ... 5 24 124] [ 80 75 95 ... 126 5 26] [ 47 120 38 ... 125 49 56] ... [100 104 95 ... 12 56 20] [ 4 3 112 ... 68 123 100] [118 38 58 ... 87 42 72]] [[ 62 125 0 ... 100 112 38] [ 7 76 48 ... 10 16 82] [ 71 84 31 ... 31 47 105] ... [ 19 5 58 ... 9 4 78] [114 68 104 ... 98 28 89] [ 90 101 96 ... 44 32 12]] [[ 25 7 70 ... 113 114 7] [ 48 73 37 ... 44 49 23] [ 36 41 28 ... 118 61 114] ... [ 50 68 83 ... 70 57 114] [ 77 64 101 ... 50 77 18] [119 11 67 ... 48 117 18]]]]; ov_res: [[[[ 15 104 48 ... 5 24 124] [ 80 75 95 ... 126 5 26] [ 47 120 38 ... 125 49 56] ... [100 104 95 ... 12 56 20] [ 4 3 112 ... 68 123 100] [118 38 58 ... 87 42 72]] [[ 62 125 0 ... 100 112 38] [ 7 76 48 ... 10 16 82] [ 71 84 31 ... 31 47 105] ... [ 19 5 58 ... 9 4 78] [114 68 104 ... 98 28 89] [ 90 101 96 ... 44 32 12]] [[ 25 7 70 ... 113 114 7] [ 48 73 37 ... 44 49 23] [ 36 41 28 ... 118 61 114] ... [ 50 68 83 ... 70 57 114] [ 77 64 101 ... 50 77 18] [119 11 67 ... 48 117 18]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6417.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 18 14 64 ... 107 83 38] [ 40 60 94 ... 113 38 43] [125 85 87 ... 89 76 90] ... [ 45 125 87 ... 80 107 51] [ 37 84 111 ... 64 12 105] [ 42 39 64 ... 86 27 76]] [[ 11 82 119 ... 83 81 17] [ 72 23 39 ... 12 78 46] [ 27 20 110 ... 118 8 41] ... [ 30 100 100 ... 0 41 121] [ 63 104 107 ... 111 72 109] [ 99 77 46 ... 12 104 42]] [[ 66 108 71 ... 37 75 73] [109 47 0 ... 18 51 3] [122 92 75 ... 73 66 16] ... [ 19 75 75 ... 122 108 2] [ 89 42 122 ... 101 69 31] [ 47 59 113 ... 89 48 66]]]]; ov_res: [[[[ 18 14 64 ... 107 83 38] [ 40 60 94 ... 113 38 43] [125 85 87 ... 89 76 90] ... [ 45 125 87 ... 80 107 51] [ 37 84 111 ... 64 12 105] [ 42 39 64 ... 86 27 76]] [[ 11 82 119 ... 83 81 17] [ 72 23 39 ... 12 78 46] [ 27 20 110 ... 118 8 41] ... [ 30 100 100 ... 0 41 121] [ 63 104 107 ... 111 72 109] [ 99 77 46 ... 12 104 42]] [[ 66 108 71 ... 37 75 73] [109 47 0 ... 18 51 3] [122 92 75 ... 73 66 16] ... [ 19 75 75 ... 122 108 2] [ 89 42 122 ... 101 69 31] [ 47 59 113 ... 89 48 66]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6419.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 45 69 94 ... 86 15 105] [ 0 42 36 ... 121 101 68] [121 97 69 ... 89 4 2] ... [ 42 5 124 ... 47 98 17] [ 48 76 33 ... 57 93 47] [119 125 17 ... 19 48 4]] [[ 88 41 43 ... 73 111 73] [ 4 96 100 ... 51 40 101] [ 1 108 97 ... 31 12 18] ... [ 9 72 10 ... 0 50 24] [106 68 8 ... 105 66 31] [107 37 56 ... 87 126 65]] [[ 71 28 68 ... 86 121 86] [ 31 21 88 ... 115 9 107] [ 61 86 112 ... 4 55 34] ... [ 86 93 93 ... 24 106 47] [ 33 77 83 ... 125 70 58] [ 71 105 23 ... 36 98 95]]]]; ov_res: [[[[ 45 69 94 ... 86 15 105] [ 0 42 36 ... 121 101 68] [121 97 69 ... 89 4 2] ... [ 42 5 124 ... 47 98 17] [ 48 76 33 ... 57 93 47] [119 125 17 ... 19 48 4]] [[ 88 41 43 ... 73 111 73] [ 4 96 100 ... 51 40 101] [ 1 108 97 ... 31 12 18] ... [ 9 72 10 ... 0 50 24] [106 68 8 ... 105 66 31] [107 37 56 ... 87 126 65]] [[ 71 28 68 ... 86 121 86] [ 31 21 88 ... 115 9 107] [ 61 86 112 ... 4 55 34] ... [ 86 93 93 ... 24 106 47] [ 33 77 83 ... 125 70 58] [ 71 105 23 ... 36 98 95]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int16'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6421.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[115 1 99 ... 90 44 56] [ 61 23 24 ... 27 73 29] [ 93 110 4 ... 107 55 74] ... [ 89 11 66 ... 65 59 12] [ 60 121 48 ... 4 95 112] [ 67 58 50 ... 48 58 43]] [[ 43 70 105 ... 6 66 66] [ 88 90 6 ... 116 69 92] [114 19 88 ... 107 6 44] ... [122 123 26 ... 53 38 5] [ 2 93 122 ... 83 102 88] [ 14 13 50 ... 44 114 68]] [[ 0 114 100 ... 65 13 3] [ 43 106 48 ... 81 60 111] [ 25 109 23 ... 75 45 13] ... [122 120 59 ... 76 113 44] [ 40 125 8 ... 118 19 91] [ 56 76 73 ... 45 33 23]]]]; ov_res: [[[[115 1 99 ... 90 44 56] [ 61 23 24 ... 27 73 29] [ 93 110 4 ... 107 55 74] ... [ 89 11 66 ... 65 59 12] [ 60 121 48 ... 4 95 112] [ 67 58 50 ... 48 58 43]] [[ 43 70 105 ... 6 66 66] [ 88 90 6 ... 116 69 92] [114 19 88 ... 107 6 44] ... [122 123 26 ... 53 38 5] [ 2 93 122 ... 83 102 88] [ 14 13 50 ... 44 114 68]] [[ 0 114 100 ... 65 13 3] [ 43 106 48 ... 81 60 111] [ 25 109 23 ... 75 45 13] ... [122 120 59 ... 76 113 44] [ 40 125 8 ... 118 19 91] [ 56 76 73 ... 45 33 23]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6423.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 11 71 126 ... 66 48 9] [110 125 100 ... 5 39 125] [ 72 101 74 ... 26 94 92] ... [105 68 32 ... 23 84 30] [114 96 15 ... 64 89 12] [ 84 0 75 ... 34 103 7]] [[ 18 33 72 ... 107 97 68] [116 102 7 ... 111 48 107] [ 44 60 91 ... 10 21 50] ... [ 16 25 18 ... 95 111 70] [ 98 116 82 ... 66 50 46] [126 42 43 ... 14 90 56]] [[ 59 0 11 ... 76 50 45] [ 24 57 8 ... 81 22 114] [ 47 71 61 ... 46 89 75] ... [124 32 1 ... 8 110 89] [102 57 1 ... 40 9 113] [ 25 65 81 ... 4 75 36]]]]; ov_res: [[[[ 11 71 126 ... 66 48 9] [110 125 100 ... 5 39 125] [ 72 101 74 ... 26 94 92] ... [105 68 32 ... 23 84 30] [114 96 15 ... 64 89 12] [ 84 0 75 ... 34 103 7]] [[ 18 33 72 ... 107 97 68] [116 102 7 ... 111 48 107] [ 44 60 91 ... 10 21 50] ... [ 16 25 18 ... 95 111 70] [ 98 116 82 ... 66 50 46] [126 42 43 ... 14 90 56]] [[ 59 0 11 ... 76 50 45] [ 24 57 8 ... 81 22 114] [ 47 71 61 ... 46 89 75] ... [124 32 1 ... 8 110 89] [102 57 1 ... 40 9 113] [ 25 65 81 ... 4 75 36]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6425.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 25 9 0 ... 94 79 86] [ 36 86 32 ... 11 66 107] [ 81 48 71 ... 31 27 65] ... [ 69 59 125 ... 84 75 92] [117 79 114 ... 7 4 117] [ 60 28 28 ... 71 38 106]] [[ 49 42 42 ... 112 22 40] [ 5 27 19 ... 94 4 41] [ 64 15 59 ... 18 31 2] ... [115 114 114 ... 46 91 104] [ 67 93 56 ... 75 32 108] [ 41 44 112 ... 100 70 30]] [[ 42 70 10 ... 100 62 64] [ 92 70 32 ... 28 16 108] [ 82 17 31 ... 7 63 76] ... [ 88 37 10 ... 62 7 91] [ 76 68 57 ... 106 13 16] [ 26 2 69 ... 39 112 72]]]]; ov_res: [[[[ 25 9 0 ... 94 79 86] [ 36 86 32 ... 11 66 107] [ 81 48 71 ... 31 27 65] ... [ 69 59 125 ... 84 75 92] [117 79 114 ... 7 4 117] [ 60 28 28 ... 71 38 106]] [[ 49 42 42 ... 112 22 40] [ 5 27 19 ... 94 4 41] [ 64 15 59 ... 18 31 2] ... [115 114 114 ... 46 91 104] [ 67 93 56 ... 75 32 108] [ 41 44 112 ... 100 70 30]] [[ 42 70 10 ... 100 62 64] [ 92 70 32 ... 28 16 108] [ 82 17 31 ... 7 63 76] ... [ 88 37 10 ... 62 7 91] [ 76 68 57 ... 106 13 16] [ 26 2 69 ... 39 112 72]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6427.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 72 115 40 ... 81 107 116] [ 20 126 74 ... 57 89 84] [ 5 43 44 ... 34 23 91] ... [ 79 116 116 ... 1 76 70] [ 56 40 66 ... 11 49 28] [ 48 29 3 ... 119 85 26]] [[ 49 28 0 ... 38 50 98] [114 44 24 ... 116 32 102] [ 94 75 73 ... 17 55 122] ... [ 11 30 45 ... 35 59 74] [121 57 36 ... 20 44 21] [114 27 124 ... 100 79 84]] [[ 96 83 113 ... 30 57 23] [ 67 116 34 ... 109 49 45] [119 105 22 ... 9 115 123] ... [106 34 83 ... 113 62 126] [ 77 93 9 ... 6 0 19] [ 93 8 15 ... 17 10 90]]]]; ov_res: [[[[ 72 115 40 ... 81 107 116] [ 20 126 74 ... 57 89 84] [ 5 43 44 ... 34 23 91] ... [ 79 116 116 ... 1 76 70] [ 56 40 66 ... 11 49 28] [ 48 29 3 ... 119 85 26]] [[ 49 28 0 ... 38 50 98] [114 44 24 ... 116 32 102] [ 94 75 73 ... 17 55 122] ... [ 11 30 45 ... 35 59 74] [121 57 36 ... 20 44 21] [114 27 124 ... 100 79 84]] [[ 96 83 113 ... 30 57 23] [ 67 116 34 ... 109 49 45] [119 105 22 ... 9 115 123] ... [106 34 83 ... 113 62 126] [ 77 93 9 ... 6 0 19] [ 93 8 15 ... 17 10 90]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.int32'> ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6429.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 49 102 112 ... 100 94 117] [ 59 73 25 ... 17 109 69] [ 74 97 82 ... 101 74 17] ... [109 28 28 ... 5 37 12] [109 72 103 ... 106 39 121] [ 79 22 30 ... 95 56 17]] [[126 1 80 ... 14 96 117] [ 36 30 64 ... 0 6 74] [ 51 109 49 ... 106 45 114] ... [ 73 105 33 ... 125 98 40] [ 84 5 100 ... 29 71 52] [ 48 61 78 ... 81 70 75]] [[111 22 72 ... 76 6 69] [ 77 35 113 ... 74 33 13] [ 90 15 9 ... 50 89 109] ... [104 4 0 ... 45 101 73] [121 91 113 ... 28 103 65] [ 2 63 81 ... 28 13 117]]]]; ov_res: [[[[ 49 102 112 ... 100 94 117] [ 59 73 25 ... 17 109 69] [ 74 97 82 ... 101 74 17] ... [109 28 28 ... 5 37 12] [109 72 103 ... 106 39 121] [ 79 22 30 ... 95 56 17]] [[126 1 80 ... 14 96 117] [ 36 30 64 ... 0 6 74] [ 51 109 49 ... 106 45 114] ... [ 73 105 33 ... 125 98 40] [ 84 5 100 ... 29 71 52] [ 48 61 78 ... 81 70 75]] [[111 22 72 ... 76 6 69] [ 77 35 113 ... 74 33 13] [ 90 15 9 ... 50 89 109] ... [104 4 0 ... 45 101 73] [121 91 113 ... 28 103 65] [ 2 63 81 ... 28 13 117]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6431.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 25 84 110 ... 13 106 64] [ 62 32 94 ... 76 60 40] [ 28 41 9 ... 87 87 2] ... [ 69 53 11 ... 5 17 40] [116 101 30 ... 113 95 20] [ 80 75 94 ... 56 126 86]] [[ 69 52 45 ... 114 114 9] [117 58 5 ... 20 5 101] [ 7 91 30 ... 88 113 86] ... [ 23 68 31 ... 95 21 115] [ 92 114 15 ... 49 22 54] [ 8 67 39 ... 61 102 17]] [[124 102 108 ... 114 40 65] [ 88 5 27 ... 6 80 17] [ 54 99 6 ... 16 49 24] ... [ 46 99 55 ... 77 23 66] [ 14 93 56 ... 4 49 34] [ 23 114 11 ... 15 9 8]]]]; ov_res: [[[[ 25 84 110 ... 13 106 64] [ 62 32 94 ... 76 60 40] [ 28 41 9 ... 87 87 2] ... [ 69 53 11 ... 5 17 40] [116 101 30 ... 113 95 20] [ 80 75 94 ... 56 126 86]] [[ 69 52 45 ... 114 114 9] [117 58 5 ... 20 5 101] [ 7 91 30 ... 88 113 86] ... [ 23 68 31 ... 95 21 115] [ 92 114 15 ... 49 22 54] [ 8 67 39 ... 61 102 17]] [[124 102 108 ... 114 40 65] [ 88 5 27 ... 6 80 17] [ 54 99 6 ... 16 49 24] ... [ 46 99 55 ... 77 23 66] [ 14 93 56 ... 4 49 34] [ 23 114 11 ... 15 9 8]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6433.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[100 37 9 ... 124 75 83] [ 13 54 51 ... 96 108 57] [ 40 3 73 ... 114 7 71] ... [ 2 112 78 ... 108 106 59] [ 28 69 85 ... 98 9 89] [ 33 80 46 ... 100 15 38]] [[115 82 1 ... 73 78 94] [108 33 35 ... 24 30 48] [ 95 0 62 ... 4 19 3] ... [ 69 92 78 ... 1 52 114] [ 89 88 43 ... 18 10 82] [ 14 12 44 ... 58 66 3]] [[ 44 94 24 ... 87 69 63] [ 89 40 124 ... 17 63 91] [ 39 19 48 ... 79 79 16] ... [ 30 14 109 ... 30 99 48] [ 36 35 6 ... 77 6 65] [ 99 60 25 ... 48 40 101]]]]; ov_res: [[[[100 37 9 ... 124 75 83] [ 13 54 51 ... 96 108 57] [ 40 3 73 ... 114 7 71] ... [ 2 112 78 ... 108 106 59] [ 28 69 85 ... 98 9 89] [ 33 80 46 ... 100 15 38]] [[115 82 1 ... 73 78 94] [108 33 35 ... 24 30 48] [ 95 0 62 ... 4 19 3] ... [ 69 92 78 ... 1 52 114] [ 89 88 43 ... 18 10 82] [ 14 12 44 ... 58 66 3]] [[ 44 94 24 ... 87 69 63] [ 89 40 124 ... 17 63 91] [ 39 19 48 ... 79 79 16] ... [ 30 14 109 ... 30 99 48] [ 36 35 6 ... 77 6 65] [ 99 60 25 ... 48 40 101]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.int8'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6435.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 31 42 103 ... 12 108 95] [113 91 50 ... 123 17 107] [113 126 77 ... 78 92 122] ... [ 21 124 25 ... 90 8 120] [ 73 80 110 ... 40 53 18] [ 76 38 89 ... 24 55 91]] [[116 1 93 ... 123 6 0] [ 74 38 98 ... 47 51 34] [ 92 48 75 ... 65 26 58] ... [123 116 6 ... 26 54 71] [ 89 82 102 ... 100 44 43] [ 44 93 21 ... 17 74 125]] [[ 4 41 46 ... 13 82 88] [ 56 82 61 ... 115 66 11] [ 13 98 29 ... 119 47 118] ... [ 27 72 104 ... 45 117 25] [ 22 47 91 ... 20 31 48] [ 11 87 34 ... 3 117 56]]]]; ov_res: [[[[ 31 42 103 ... 12 108 95] [113 91 50 ... 123 17 107] [113 126 77 ... 78 92 122] ... [ 21 124 25 ... 90 8 120] [ 73 80 110 ... 40 53 18] [ 76 38 89 ... 24 55 91]] [[116 1 93 ... 123 6 0] [ 74 38 98 ... 47 51 34] [ 92 48 75 ... 65 26 58] ... [123 116 6 ... 26 54 71] [ 89 82 102 ... 100 44 43] [ 44 93 21 ... 17 74 125]] [[ 4 41 46 ... 13 82 88] [ 56 82 61 ... 115 66 11] [ 13 98 29 ... 119 47 118] ... [ 27 72 104 ... 45 117 25] [ 22 47 91 ... 20 31 48] [ 11 87 34 ... 3 117 56]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.float64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6437.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[115 126 38 ... 70 51 37] [ 1 1 79 ... 44 2 10] [ 75 99 76 ... 59 38 59] ... [124 91 100 ... 103 118 119] [ 15 64 8 ... 117 22 97] [ 71 103 116 ... 37 7 49]] [[ 96 86 6 ... 31 120 53] [ 56 47 4 ... 80 76 73] [ 64 59 126 ... 57 19 44] ... [ 61 29 125 ... 73 118 110] [ 37 62 9 ... 33 46 101] [ 67 34 107 ... 116 7 54]] [[ 95 83 58 ... 121 38 8] [ 4 53 59 ... 30 82 52] [124 52 18 ... 43 73 56] ... [ 33 14 11 ... 102 65 88] [ 76 100 110 ... 99 87 72] [ 69 40 67 ... 81 74 51]]]]; ov_res: [[[[115 126 38 ... 70 51 37] [ 1 1 79 ... 44 2 10] [ 75 99 76 ... 59 38 59] ... [124 91 100 ... 103 118 119] [ 15 64 8 ... 117 22 97] [ 71 103 116 ... 37 7 49]] [[ 96 86 6 ... 31 120 53] [ 56 47 4 ... 80 76 73] [ 64 59 126 ... 57 19 44] ... [ 61 29 125 ... 73 118 110] [ 37 62 9 ... 33 46 101] [ 67 34 107 ... 116 7 54]] [[ 95 83 58 ... 121 38 8] [ 4 53 59 ... 30 82 52] [124 52 18 ... 43 73 56] ... [ 33 14 11 ... 102 65 88] [ 76 100 110 ... 99 87 72] [ 69 40 67 ... 81 74 51]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.float32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6439.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 3 51 107 ... 41 71 17] [ 28 27 116 ... 54 40 8] [ 86 122 32 ... 67 65 68] ... [111 115 125 ... 70 103 18] [ 2 40 79 ... 63 76 122] [106 99 66 ... 123 57 101]] [[121 0 90 ... 114 126 71] [ 88 69 24 ... 90 10 87] [ 27 110 10 ... 33 84 101] ... [ 25 95 114 ... 125 2 103] [113 2 98 ... 109 123 53] [ 66 53 119 ... 30 0 83]] [[ 32 34 86 ... 76 23 22] [123 53 92 ... 76 97 46] [ 30 67 29 ... 44 61 80] ... [ 54 82 45 ... 0 112 119] [ 33 56 19 ... 41 72 52] [125 122 5 ... 105 97 8]]]]; ov_res: [[[[ 3 51 107 ... 41 71 17] [ 28 27 116 ... 54 40 8] [ 86 122 32 ... 67 65 68] ... [111 115 125 ... 70 103 18] [ 2 40 79 ... 63 76 122] [106 99 66 ... 123 57 101]] [[121 0 90 ... 114 126 71] [ 88 69 24 ... 90 10 87] [ 27 110 10 ... 33 84 101] ... [ 25 95 114 ... 125 2 103] [113 2 98 ... 109 123 53] [ 66 53 119 ... 30 0 83]] [[ 32 34 86 ... 76 23 22] [123 53 92 ... 76 97 46] [ 30 67 29 ... 44 61 80] ... [ 54 82 45 ... 0 112 119] [ 33 56 19 ... 41 72 52] [125 122 5 ... 105 97 8]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.int64'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6441.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[123 22 38 ... 83 113 96] [ 51 48 16 ... 80 35 108] [ 59 33 20 ... 88 50 34] ... [114 66 9 ... 39 42 56] [ 13 25 84 ... 78 67 0] [ 79 67 91 ... 2 46 58]] [[ 42 57 87 ... 50 49 12] [ 22 85 8 ... 81 42 26] [101 50 84 ... 17 3 6] ... [ 35 7 60 ... 4 116 92] [ 5 124 117 ... 75 30 95] [ 32 11 103 ... 99 98 14]] [[ 65 8 117 ... 93 74 19] [ 2 6 24 ... 85 122 88] [ 12 83 28 ... 86 56 116] ... [ 93 42 25 ... 122 10 111] [ 78 46 79 ... 84 92 126] [ 60 59 21 ... 34 22 102]]]]; ov_res: [[[[123 22 38 ... 83 113 96] [ 51 48 16 ... 80 35 108] [ 59 33 20 ... 88 50 34] ... [114 66 9 ... 39 42 56] [ 13 25 84 ... 78 67 0] [ 79 67 91 ... 2 46 58]] [[ 42 57 87 ... 50 49 12] [ 22 85 8 ... 81 42 26] [101 50 84 ... 17 3 6] ... [ 35 7 60 ... 4 116 92] [ 5 124 117 ... 75 30 95] [ 32 11 103 ... 99 98 14]] [[ 65 8 117 ... 93 74 19] [ 2 6 24 ... 85 122 88] [ 12 83 28 ... 86 56 116] ... [ 93 42 25 ... 122 10 111] [ 78 46 79 ... 84 92 126] [ 60 59 21 ... 34 22 102]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.int32'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6443.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 58 82 35 ... 25 69 37] [ 94 83 119 ... 85 13 109] [ 75 12 121 ... 14 112 27] ... [ 78 22 22 ... 92 30 102] [110 125 20 ... 4 84 14] [ 8 73 32 ... 109 5 56]] [[ 56 81 51 ... 47 28 85] [ 98 90 94 ... 37 11 4] [ 28 95 10 ... 49 4 35] ... [125 105 116 ... 54 3 119] [113 10 35 ... 64 79 23] [ 41 120 66 ... 70 24 23]] [[101 4 33 ... 49 69 17] [ 86 87 26 ... 49 62 29] [ 54 91 57 ... 111 35 82] ... [100 59 25 ... 54 81 105] [101 1 54 ... 110 31 30] [ 49 13 77 ... 40 102 18]]]]; ov_res: [[[[ 58 82 35 ... 25 69 37] [ 94 83 119 ... 85 13 109] [ 75 12 121 ... 14 112 27] ... [ 78 22 22 ... 92 30 102] [110 125 20 ... 4 84 14] [ 8 73 32 ... 109 5 56]] [[ 56 81 51 ... 47 28 85] [ 98 90 94 ... 37 11 4] [ 28 95 10 ... 49 4 35] ... [125 105 116 ... 54 3 119] [113 10 35 ... 64 79 23] [ 41 120 66 ... 70 24 23]] [[101 4 33 ... 49 69 17] [ 86 87 26 ... 49 62 29] [ 54 91 57 ... 111 35 82] ... [100 59 25 ... 54 81 105] [101 1 54 ... 110 31 30] [ 49 13 77 ... 40 102 18]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.int16'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6445.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 62 94 101 ... 91 32 121] [ 18 76 36 ... 61 59 39] [ 55 26 57 ... 68 73 59] ... [119 85 110 ... 36 123 85] [ 66 56 23 ... 100 14 109] [ 26 22 92 ... 69 123 71]] [[ 20 51 50 ... 37 3 65] [ 3 28 64 ... 89 28 70] [ 62 20 121 ... 81 91 76] ... [ 57 37 101 ... 117 111 65] [ 30 6 64 ... 12 39 28] [101 25 110 ... 104 5 43]] [[ 17 17 72 ... 104 106 44] [ 63 22 68 ... 35 55 122] [ 19 1 1 ... 2 103 52] ... [ 15 111 2 ... 106 55 80] [114 110 111 ... 85 49 107] [ 1 58 54 ... 8 79 57]]]]; ov_res: [[[[ 62 94 101 ... 91 32 121] [ 18 76 36 ... 61 59 39] [ 55 26 57 ... 68 73 59] ... [119 85 110 ... 36 123 85] [ 66 56 23 ... 100 14 109] [ 26 22 92 ... 69 123 71]] [[ 20 51 50 ... 37 3 65] [ 3 28 64 ... 89 28 70] [ 62 20 121 ... 81 91 76] ... [ 57 37 101 ... 117 111 65] [ 30 6 64 ... 12 39 28] [101 25 110 ... 104 5 43]] [[ 17 17 72 ... 104 106 44] [ 63 22 68 ... 35 55 122] [ 19 1 1 ... 2 103 52] ... [ 15 111 2 ... 106 55 80] [114 110 111 ... 85 49 107] [ 1 58 54 ... 8 79 57]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.int8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6447.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 40 47 79 ... 101 34 13] [ 93 111 75 ... 89 99 19] [ 46 109 111 ... 113 89 3] ... [ 35 13 45 ... 43 46 122] [ 32 98 26 ... 44 63 11] [117 78 40 ... 41 87 72]] [[ 85 119 74 ... 118 118 83] [ 94 9 21 ... 105 26 49] [ 55 89 25 ... 58 70 86] ... [ 61 76 105 ... 6 56 16] [ 52 0 97 ... 54 63 82] [123 89 112 ... 59 26 108]] [[ 1 83 18 ... 29 123 85] [ 1 26 111 ... 104 117 50] [ 19 14 77 ... 27 80 38] ... [ 78 109 27 ... 114 85 59] [107 8 57 ... 92 117 91] [ 80 9 3 ... 114 33 97]]]]; ov_res: [[[[ 40 47 79 ... 101 34 13] [ 93 111 75 ... 89 99 19] [ 46 109 111 ... 113 89 3] ... [ 35 13 45 ... 43 46 122] [ 32 98 26 ... 44 63 11] [117 78 40 ... 41 87 72]] [[ 85 119 74 ... 118 118 83] [ 94 9 21 ... 105 26 49] [ 55 89 25 ... 58 70 86] ... [ 61 76 105 ... 6 56 16] [ 52 0 97 ... 54 63 82] [123 89 112 ... 59 26 108]] [[ 1 83 18 ... 29 123 85] [ 1 26 111 ... 104 117 50] [ 19 14 77 ... 27 80 38] ... [ 78 109 27 ... 114 85 59] [107 8 57 ... 92 117 91] [ 80 9 3 ... 114 33 97]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_type_as.py::TestTypeAs::test_type_as[ ie_device:CPU - precision:FP32 - cast_dtype:<class 'numpy.uint8'> - input_dtype:<class 'numpy.uint8'> ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_type_as.___torch_mangle_6449.aten_type_as, %x.1 : Tensor, %y.1 : Tensor): %3 : Tensor = aten::type_as(%x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_type_as.py:20:23 return (%3) fw_re: [[[[ 70 119 104 ... 66 66 30] [122 69 4 ... 57 37 114] [ 99 80 124 ... 73 73 67] ... [111 77 82 ... 63 0 19] [ 87 84 115 ... 84 67 79] [100 92 126 ... 57 56 24]] [[ 26 112 60 ... 48 95 85] [117 104 77 ... 17 77 109] [ 2 72 96 ... 5 124 27] ... [ 10 113 98 ... 62 67 71] [ 6 8 48 ... 9 0 96] [ 15 58 98 ... 105 14 51]] [[115 112 9 ... 107 37 44] [114 71 117 ... 12 43 29] [ 47 56 107 ... 100 97 43] ... [ 66 100 53 ... 16 49 3] [125 126 40 ... 117 64 104] [ 8 25 52 ... 79 29 102]]]]; ov_res: [[[[ 70 119 104 ... 66 66 30] [122 69 4 ... 57 37 114] [ 99 80 124 ... 73 73 67] ... [111 77 82 ... 63 0 19] [ 87 84 115 ... 84 67 79] [100 92 126 ... 57 56 24]] [[ 26 112 60 ... 48 95 85] [117 104 77 ... 17 77 109] [ 2 72 96 ... 5 124 27] ... [ 10 113 98 ... 62 67 71] [ 6 8 48 ... 9 0 96] [ 15 58 98 ... 105 14 51]] [[115 112 9 ... 107 37 44] [114 71 117 ... 12 43 29] [ 47 56 107 ... 100 97 43] ... [ 66 100 53 ... 16 49 3] [125 126 40 ... 117 64 104] [ 8 25 52 ... 79 29 102]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unbind.py::TestUnbind::test_unbind[ ie_device:CPU - precision:FP32 - dim:0 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unbind.___torch_mangle_6450.aten_unbind, %x.1 : Tensor): %self.dim : int = prim::Constant[value=0]() %3 : Tensor[] = aten::unbind(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unbind.py:25:28 %a : Tensor, %b.1 : Tensor, %c : Tensor = prim::ListUnpack(%3) return (%b.1) fw_re: [[[21.139723 16.408201 33.838116 ] [38.246895 47.9194 48.569153 ] [39.912582 29.877636 13.89709 ]] [[44.541553 33.596924 41.56566 ] [ 4.1288595 29.253082 13.129561 ] [26.404085 5.888367 21.435501 ]] [[21.630175 23.078253 31.82987 ] [ 6.641913 4.3545394 27.57537 ] [25.623505 0.7288658 23.102385 ]]]; ov_res: [[[21.139723 16.408201 33.838116 ] [38.246895 47.9194 48.569153 ] [39.912582 29.877636 13.89709 ]] [[44.541553 33.596924 41.56566 ] [ 4.1288595 29.253082 13.129561 ] [26.404085 5.888367 21.435501 ]] [[21.630175 23.078253 31.82987 ] [ 6.641913 4.3545394 27.57537 ] [25.623505 0.7288658 23.102385 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unbind.py::TestUnbind::test_unbind[ ie_device:CPU - precision:FP32 - dim:1 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unbind.___torch_mangle_6452.aten_unbind, %x.1 : Tensor): %self.dim : int = prim::Constant[value=1]() %3 : Tensor[] = aten::unbind(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unbind.py:25:28 %a : Tensor, %b.1 : Tensor, %c : Tensor = prim::ListUnpack(%3) return (%b.1) fw_re: [[[12.954017 23.278082 36.757347 ] [ 5.239134 7.1432695 28.82386 ] [ 8.837283 41.72379 19.21208 ]] [[17.793825 7.322639 39.06667 ] [36.03301 45.623333 47.67966 ] [ 7.9780903 46.50041 40.98247 ]] [[44.787895 24.605566 32.148327 ] [31.198507 8.362148 6.924555 ] [28.981724 29.923798 38.650158 ]]]; ov_res: [[[12.954017 23.278082 36.757347 ] [ 5.239134 7.1432695 28.82386 ] [ 8.837283 41.72379 19.21208 ]] [[17.793825 7.322639 39.06667 ] [36.03301 45.623333 47.67966 ] [ 7.9780903 46.50041 40.98247 ]] [[44.787895 24.605566 32.148327 ] [31.198507 8.362148 6.924555 ] [28.981724 29.923798 38.650158 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unbind.py::TestUnbind::test_unbind[ ie_device:CPU - precision:FP32 - dim:2 ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unbind.___torch_mangle_6454.aten_unbind, %x.1 : Tensor): %self.dim : int = prim::Constant[value=2]() %3 : Tensor[] = aten::unbind(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unbind.py:25:28 %a : Tensor, %b.1 : Tensor, %c : Tensor = prim::ListUnpack(%3) return (%b.1) fw_re: [[[13.601005 22.781038 39.94347 ] [29.556713 8.422749 39.387726 ] [47.7281 47.22423 35.473083 ]] [[32.49639 31.46841 36.179424 ] [21.164051 3.3726778 30.43364 ] [23.250696 20.475988 5.7343044]] [[27.626213 2.141161 24.192503 ] [ 5.50474 14.600252 37.95643 ] [ 2.5870447 7.613422 46.170372 ]]]; ov_res: [[[13.601005 22.781038 39.94347 ] [29.556713 8.422749 39.387726 ] [47.7281 47.22423 35.473083 ]] [[32.49639 31.46841 36.179424 ] [21.164051 3.3726778 30.43364 ] [23.250696 20.475988 5.7343044]] [[27.626213 2.141161 24.192503 ] [ 5.50474 14.600252 37.95643 ] [ 2.5870447 7.613422 46.170372 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unbind.py::TestUnbind::test_unbind[ ie_device:CPU - precision:FP32 - dim:3 ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unbind.___torch_mangle_6456.aten_unbind, %x.1 : Tensor): %self.dim : int = prim::Constant[value=3]() %3 : Tensor[] = aten::unbind(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unbind.py:25:28 %a : Tensor, %b.1 : Tensor, %c : Tensor = prim::ListUnpack(%3) return (%b.1) fw_re: [[[34.32151 43.14027 42.76663 ] [21.47382 47.683086 48.31841 ] [13.320309 39.405495 47.93 ]] [[40.64158 40.373867 49.156456 ] [31.538181 5.7618294 3.6005065] [17.726227 10.131507 36.574677 ]] [[34.930424 11.489695 36.69669 ] [45.373837 33.185925 5.5345426] [10.761772 39.66621 6.227941 ]]]; ov_res: [[[34.32151 43.14027 42.76663 ] [21.47382 47.683086 48.31841 ] [13.320309 39.405495 47.93 ]] [[40.64158 40.373867 49.156456 ] [31.538181 5.7618294 3.6005065] [17.726227 10.131507 36.574677 ]] [[34.930424 11.489695 36.69669 ] [45.373837 33.185925 5.5345426] [10.761772 39.66621 6.227941 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:0 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6457.aten_unsqueeze, %x.1 : Tensor): %self.dim : int = prim::Constant[value=0]() %3 : Tensor = aten::unsqueeze(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:25:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) fw_re: [[ 2.0240538 0.5118477 0.29395592 0.14519255 0.51401526 -1.724399 0.52044725 -0.4240193 -0.49523282 1.2501498 ] [ 0.23423553 0.40776995 -0.6207659 1.5764761 -0.08044127 -0.5215812 0.76651114 -0.34484866 0.02403973 0.8591677 ] [ 1.1061426 0.36858895 0.8955118 0.40087837 -2.2170959 -0.8270191 -0.6494311 -0.66931367 -1.2566828 0.76812977] [-1.4803134 -1.1603673 -0.13062225 -0.8973012 0.1108583 -1.8319767 -0.14165732 -0.8446036 0.6541605 0.22644076] [-0.07827395 -1.0691477 1.2765733 0.24069002 -0.03247235 0.279688 -1.4941088 -0.08770219 0.6837622 0.00283781]]; ov_res: [[ 2.0240538 0.5118477 0.29395592 0.14519255 0.51401526 -1.724399 0.52044725 -0.4240193 -0.49523282 1.2501498 ] [ 0.23423553 0.40776995 -0.6207659 1.5764761 -0.08044127 -0.5215812 0.76651114 -0.34484866 0.02403973 0.8591677 ] [ 1.1061426 0.36858895 0.8955118 0.40087837 -2.2170959 -0.8270191 -0.6494311 -0.66931367 -1.2566828 0.76812977] [-1.4803134 -1.1603673 -0.13062225 -0.8973012 0.1108583 -1.8319767 -0.14165732 -0.8446036 0.6541605 0.22644076] [-0.07827395 -1.0691477 1.2765733 0.24069002 -0.03247235 0.279688 -1.4941088 -0.08770219 0.6837622 0.00283781]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[ 2.0240538 0.5118477 0.29395592 0.14519255 0.51401526 -1.724399 0.52044725 -0.4240193 -0.49523282 1.2501498 ] [ 0.23423553 0.40776995 -0.6207659 1.5764761 -0.08044127 -0.5215812 0.76651114 -0.34484866 0.02403973 0.8591677 ] [ 1.1061426 0.36858895 0.8955118 0.40087837 -2.2170959 -0.8270191 -0.6494311 -0.66931367 -1.2566828 0.76812977] [-1.4803134 -1.1603673 -0.13062225 -0.8973012 0.1108583 -1.8319767 -0.14165732 -0.8446036 0.6541605 0.22644076] [-0.07827395 -1.0691477 1.2765733 0.24069002 -0.03247235 0.279688 -1.4941088 -0.08770219 0.6837622 0.00283781]]]; ov_res: [[[ 2.0240538 0.5118477 0.29395592 0.14519255 0.51401526 -1.724399 0.52044725 -0.4240193 -0.49523282 1.2501498 ] [ 0.23423553 0.40776995 -0.6207659 1.5764761 -0.08044127 -0.5215812 0.76651114 -0.34484866 0.02403973 0.8591677 ] [ 1.1061426 0.36858895 0.8955118 0.40087837 -2.2170959 -0.8270191 -0.6494311 -0.66931367 -1.2566828 0.76812977] [-1.4803134 -1.1603673 -0.13062225 -0.8973012 0.1108583 -1.8319767 -0.14165732 -0.8446036 0.6541605 0.22644076] [-0.07827395 -1.0691477 1.2765733 0.24069002 -0.03247235 0.279688 -1.4941088 -0.08770219 0.6837622 0.00283781]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:0 - inplace:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6458.aten_unsqueeze_, %x.1 : Tensor): %self.dim : int = prim::Constant[value=0]() %3 : Tensor = aten::unsqueeze_(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:33:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) nversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: aten::to with schema: aten::to.dtype(Tensor(a) self, ScalarType dtype, bool non_blocking=False, bool copy=False, MemoryFormat? memory_format=None) -> Tensor(a): Check 'context.input_is_none(memory_format_idx)' failed at src/frontends/pytorch/src/op/to.cpp:46: FrontEnd API failed with OpConversionFailure: : aten::to translation do not support memory_format attribute Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema):fw_re: [[[-0.36328506 -1.2803152 0.84288627 -0.48866427 0.5618947 0.04504762 1.1553755 0.5428858 0.22290504 0.40241307] [ 1.5595797 0.8628689 -0.9594384 1.3049982 1.867506 -0.84424764 0.25364816 0.0723384 0.0420493 -0.0387479 ] [-0.9118466 1.814423 -0.9083975 -0.14824228 0.88974124 -0.4140084 0.48988438 -0.8343221 -0.7890455 -0.81262714] [-0.6397363 0.7416628 -0.1114893 1.0204291 0.15458079 -0.21705161 -0.4639638 1.0875092 0.17440057 0.05684156] [-0.18535042 -0.32493263 1.1087157 2.3314312 0.546247 -0.247038 0.7168422 -0.18032865 -0.17386189 0.78302896]]]; ov_res: [[[-0.36328506 -1.2803152 0.84288627 -0.48866427 0.5618947 0.04504762 1.1553755 0.5428858 0.22290504 0.40241307] [ 1.5595797 0.8628689 -0.9594384 1.3049982 1.867506 -0.84424764 0.25364816 0.0723384 0.0420493 -0.0387479 ] [-0.9118466 1.814423 -0.9083975 -0.14824228 0.88974124 -0.4140084 0.48988438 -0.8343221 -0.7890455 -0.81262714] [-0.6397363 0.7416628 -0.1114893 1.0204291 0.15458079 -0.21705161 -0.4639638 1.0875092 0.17440057 0.05684156] [-0.18535042 -0.32493263 1.1087157 2.3314312 0.546247 -0.247038 0.7168422 -0.18032865 -0.17386189 0.78302896]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[-0.36328506 -1.2803152 0.84288627 -0.48866427 0.5618947 0.04504762 1.1553755 0.5428858 0.22290504 0.40241307] [ 1.5595797 0.8628689 -0.9594384 1.3049982 1.867506 -0.84424764 0.25364816 0.0723384 0.0420493 -0.0387479 ] [-0.9118466 1.814423 -0.9083975 -0.14824228 0.88974124 -0.4140084 0.48988438 -0.8343221 -0.7890455 -0.81262714] [-0.6397363 0.7416628 -0.1114893 1.0204291 0.15458079 -0.21705161 -0.4639638 1.0875092 0.17440057 0.05684156] [-0.18535042 -0.32493263 1.1087157 2.3314312 0.546247 -0.247038 0.7168422 -0.18032865 -0.17386189 0.78302896]]]; ov_res: [[[-0.36328506 -1.2803152 0.84288627 -0.48866427 0.5618947 0.04504762 1.1553755 0.5428858 0.22290504 0.40241307] [ 1.5595797 0.8628689 -0.9594384 1.3049982 1.867506 -0.84424764 0.25364816 0.0723384 0.0420493 -0.0387479 ] [-0.9118466 1.814423 -0.9083975 -0.14824228 0.88974124 -0.4140084 0.48988438 -0.8343221 -0.7890455 -0.81262714] [-0.6397363 0.7416628 -0.1114893 1.0204291 0.15458079 -0.21705161 -0.4639638 1.0875092 0.17440057 0.05684156] [-0.18535042 -0.32493263 1.1087157 2.3314312 0.546247 -0.247038 0.7168422 -0.18032865 -0.17386189 0.78302896]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:1 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6460.aten_unsqueeze, %x.1 : Tensor): %self.dim : int = prim::Constant[value=1]() %3 : Tensor = aten::unsqueeze(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:25:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) fw_re: [[ 0.33454755 -1.528693 -0.52603745 -0.2185308 0.02499393 -0.70393753 -0.5099726 -1.0641457 -0.2298086 -0.25562084] [ 0.7395322 -1.1441858 0.05797132 0.5972256 2.0193732 0.15532798 -0.11688 -0.9080522 -0.717083 -0.11405352] [ 1.5409033 0.1940166 -1.1307409 -0.36874875 2.7306743 -0.78450036 0.82027406 -1.7136666 -0.7422032 -1.3827143 ] [ 0.2739335 -2.1233518 0.63655007 0.84445316 1.5892425 0.51291376 0.14824708 -0.33808962 1.0497087 3.0012863 ] [-0.5847599 0.68103254 -0.21936704 -0.64298284 -3.1187582 -0.81965554 -1.6809701 -0.47660208 1.4564656 -0.30629638]]; ov_res: [[ 0.33454755 -1.528693 -0.52603745 -0.2185308 0.02499393 -0.70393753 -0.5099726 -1.0641457 -0.2298086 -0.25562084] [ 0.7395322 -1.1441858 0.05797132 0.5972256 2.0193732 0.15532798 -0.11688 -0.9080522 -0.717083 -0.11405352] [ 1.5409033 0.1940166 -1.1307409 -0.36874875 2.7306743 -0.78450036 0.82027406 -1.7136666 -0.7422032 -1.3827143 ] [ 0.2739335 -2.1233518 0.63655007 0.84445316 1.5892425 0.51291376 0.14824708 -0.33808962 1.0497087 3.0012863 ] [-0.5847599 0.68103254 -0.21936704 -0.64298284 -3.1187582 -0.81965554 -1.6809701 -0.47660208 1.4564656 -0.30629638]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[ 0.33454755 -1.528693 -0.52603745 -0.2185308 0.02499393 -0.70393753 -0.5099726 -1.0641457 -0.2298086 -0.25562084]] [[ 0.7395322 -1.1441858 0.05797132 0.5972256 2.0193732 0.15532798 -0.11688 -0.9080522 -0.717083 -0.11405352]] [[ 1.5409033 0.1940166 -1.1307409 -0.36874875 2.7306743 -0.78450036 0.82027406 -1.7136666 -0.7422032 -1.3827143 ]] [[ 0.2739335 -2.1233518 0.63655007 0.84445316 1.5892425 0.51291376 0.14824708 -0.33808962 1.0497087 3.0012863 ]] [[-0.5847599 0.68103254 -0.21936704 -0.64298284 -3.1187582 -0.81965554 -1.6809701 -0.47660208 1.4564656 -0.30629638]]]; ov_res: [[[ 0.33454755 -1.528693 -0.52603745 -0.2185308 0.02499393 -0.70393753 -0.5099726 -1.0641457 -0.2298086 -0.25562084]] [[ 0.7395322 -1.1441858 0.05797132 0.5972256 2.0193732 0.15532798 -0.11688 -0.9080522 -0.717083 -0.11405352]] [[ 1.5409033 0.1940166 -1.1307409 -0.36874875 2.7306743 -0.78450036 0.82027406 -1.7136666 -0.7422032 -1.3827143 ]] [[ 0.2739335 -2.1233518 0.63655007 0.84445316 1.5892425 0.51291376 0.14824708 -0.33808962 1.0497087 3.0012863 ]] [[-0.5847599 0.68103254 -0.21936704 -0.64298284 -3.1187582 -0.81965554 -1.6809701 -0.47660208 1.4564656 -0.30629638]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:1 - inplace:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6462.aten_unsqueeze_, %x.1 : Tensor): %self.dim : int = prim::Constant[value=1]() %3 : Tensor = aten::unsqueeze_(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:33:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) fw_re: [[[ 0.42946303 0.63603336 -1.131676 -0.59674585 1.1541086 1.0894152 0.00420974 1.0002364 -0.858591 -0.71901137]] [[-0.49549904 0.54988533 2.3681457 2.9100482 0.7713185 0.47234616 0.20731932 0.6989806 -1.2156394 -0.19757208]] [[-0.5232486 -1.7777765 1.2973106 0.596495 -0.29282638 -0.567666 -0.79375476 -0.91089404 -3.1468961 -0.03067598]] [[ 0.05422014 -0.132254 0.76748353 -0.20596974 -0.84255165 -0.75783116 0.21162698 0.11479567 0.09618618 -0.01451532]] [[-0.2205551 -1.7351483 0.07493754 1.1755991 -0.15249746 -0.39719948 -1.5701147 -0.9840426 -1.0107195 0.48149443]]]; ov_res: [[[ 0.42946303 0.63603336 -1.131676 -0.59674585 1.1541086 1.0894152 0.00420974 1.0002364 -0.858591 -0.71901137]] [[-0.49549904 0.54988533 2.3681457 2.9100482 0.7713185 0.47234616 0.20731932 0.6989806 -1.2156394 -0.19757208]] [[-0.5232486 -1.7777765 1.2973106 0.596495 -0.29282638 -0.567666 -0.79375476 -0.91089404 -3.1468961 -0.03067598]] [[ 0.05422014 -0.132254 0.76748353 -0.20596974 -0.84255165 -0.75783116 0.21162698 0.11479567 0.09618618 -0.01451532]] [[-0.2205551 -1.7351483 0.07493754 1.1755991 -0.15249746 -0.39719948 -1.5701147 -0.9840426 -1.0107195 0.48149443]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[ 0.42946303 0.63603336 -1.131676 -0.59674585 1.1541086 1.0894152 0.00420974 1.0002364 -0.858591 -0.71901137]] [[-0.49549904 0.54988533 2.3681457 2.9100482 0.7713185 0.47234616 0.20731932 0.6989806 -1.2156394 -0.19757208]] [[-0.5232486 -1.7777765 1.2973106 0.596495 -0.29282638 -0.567666 -0.79375476 -0.91089404 -3.1468961 -0.03067598]] [[ 0.05422014 -0.132254 0.76748353 -0.20596974 -0.84255165 -0.75783116 0.21162698 0.11479567 0.09618618 -0.01451532]] [[-0.2205551 -1.7351483 0.07493754 1.1755991 -0.15249746 -0.39719948 -1.5701147 -0.9840426 -1.0107195 0.48149443]]]; ov_res: [[[ 0.42946303 0.63603336 -1.131676 -0.59674585 1.1541086 1.0894152 0.00420974 1.0002364 -0.858591 -0.71901137]] [[-0.49549904 0.54988533 2.3681457 2.9100482 0.7713185 0.47234616 0.20731932 0.6989806 -1.2156394 -0.19757208]] [[-0.5232486 -1.7777765 1.2973106 0.596495 -0.29282638 -0.567666 -0.79375476 -0.91089404 -3.1468961 -0.03067598]] [[ 0.05422014 -0.132254 0.76748353 -0.20596974 -0.84255165 -0.75783116 0.21162698 0.11479567 0.09618618 -0.01451532]] [[-0.2205551 -1.7351483 0.07493754 1.1755991 -0.15249746 -0.39719948 -1.5701147 -0.9840426 -1.0107195 0.48149443]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:-1 - inplace:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6464.aten_unsqueeze, %x.1 : Tensor): %self.dim : int = prim::Constant[value=-1]() %3 : Tensor = aten::unsqueeze(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:25:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) fw_re: [[-0.24708764 -0.5281155 -0.01163878 -0.16884506 0.39171517 1.8117796 -1.1250271 -0.03170922 -0.06616009 0.48076424] [ 0.54358387 -0.13081788 0.00817786 -0.15936029 -0.16586405 -0.31625473 0.1696879 1.6504233 -0.5848712 0.36900133] [ 0.00765575 0.5258334 -0.8330306 0.6626003 -1.3215673 1.0881295 -1.082451 -0.58359486 0.08790886 -0.57671946] [-1.4502988 0.79232615 -0.56834453 1.4120729 -0.23062013 -0.99465936 -0.1690491 -1.1411144 -1.494184 -1.5314137 ] [ 0.38478664 -0.63835734 -1.2057574 -1.3973308 -0.4155601 1.4782219 -0.5979156 1.0275548 1.0891553 1.0907753 ]]; ov_res: [[-0.24708764 -0.5281155 -0.01163878 -0.16884506 0.39171517 1.8117796 -1.1250271 -0.03170922 -0.06616009 0.48076424] [ 0.54358387 -0.13081788 0.00817786 -0.15936029 -0.16586405 -0.31625473 0.1696879 1.6504233 -0.5848712 0.36900133] [ 0.00765575 0.5258334 -0.8330306 0.6626003 -1.3215673 1.0881295 -1.082451 -0.58359486 0.08790886 -0.57671946] [-1.4502988 0.79232615 -0.56834453 1.4120729 -0.23062013 -0.99465936 -0.1690491 -1.1411144 -1.494184 -1.5314137 ] [ 0.38478664 -0.63835734 -1.2057574 -1.3973308 -0.4155601 1.4782219 -0.5979156 1.0275548 1.0891553 1.0907753 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[-0.24708764] [-0.5281155 ] [-0.01163878] [-0.16884506] [ 0.39171517] [ 1.8117796 ] [-1.1250271 ] [-0.03170922] [-0.06616009] [ 0.48076424]] [[ 0.54358387] [-0.13081788] [ 0.00817786] [-0.15936029] [-0.16586405] [-0.31625473] [ 0.1696879 ] [ 1.6504233 ] [-0.5848712 ] [ 0.36900133]] [[ 0.00765575] [ 0.5258334 ] [-0.8330306 ] [ 0.6626003 ] [-1.3215673 ] [ 1.0881295 ] [-1.082451 ] [-0.58359486] [ 0.08790886] [-0.57671946]] [[-1.4502988 ] [ 0.79232615] [-0.56834453] [ 1.4120729 ] [-0.23062013] [-0.99465936] [-0.1690491 ] [-1.1411144 ] [-1.494184 ] [-1.5314137 ]] [[ 0.38478664] [-0.63835734] [-1.2057574 ] [-1.3973308 ] [-0.4155601 ] [ 1.4782219 ] [-0.5979156 ] [ 1.0275548 ] [ 1.0891553 ] [ 1.0907753 ]]]; ov_res: [[[-0.24708764] [-0.5281155 ] [-0.01163878] [-0.16884506] [ 0.39171517] [ 1.8117796 ] [-1.1250271 ] [-0.03170922] [-0.06616009] [ 0.48076424]] [[ 0.54358387] [-0.13081788] [ 0.00817786] [-0.15936029] [-0.16586405] [-0.31625473] [ 0.1696879 ] [ 1.6504233 ] [-0.5848712 ] [ 0.36900133]] [[ 0.00765575] [ 0.5258334 ] [-0.8330306 ] [ 0.6626003 ] [-1.3215673 ] [ 1.0881295 ] [-1.082451 ] [-0.58359486] [ 0.08790886] [-0.57671946]] [[-1.4502988 ] [ 0.79232615] [-0.56834453] [ 1.4120729 ] [-0.23062013] [-0.99465936] [-0.1690491 ] [-1.1411144 ] [-1.494184 ] [-1.5314137 ]] [[ 0.38478664] [-0.63835734] [-1.2057574 ] [-1.3973308 ] [-0.4155601 ] [ 1.4782219 ] [-0.5979156 ] [ 1.0275548 ] [ 1.0891553 ] [ 1.0907753 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_unsqueeze.py::TestUnsqueeze::test_relu[ ie_device:CPU - precision:FP32 - dim:-1 - inplace:True ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_unsqueeze.___torch_mangle_6466.aten_unsqueeze_, %x.1 : Tensor): %self.dim : int = prim::Constant[value=-1]() %3 : Tensor = aten::unsqueeze_(%x.1, %self.dim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_unsqueeze.py:33:26 %4 : (Tensor, Tensor) = prim::TupleConstruct(%x.1, %3) return (%4) fw_re: [[[-1.0417844 ] [ 0.89973444] [ 0.7806817 ] [ 1.5215276 ] [ 0.7014976 ] [ 0.77071744] [ 1.6296313 ] [-0.45215315] [-1.7813299 ] [-0.66004205]] [[ 0.64716834] [-1.0139962 ] [-0.2828934 ] [-1.2460741 ] [-0.13898358] [ 0.6021525 ] [ 0.93922544] [ 0.65357506] [-1.1611413 ] [ 0.28266552]] [[-0.8709694 ] [ 0.5425701 ] [-1.573487 ] [ 0.08809076] [-0.32986343] [ 1.706152 ] [ 0.8849728 ] [ 0.23356415] [ 0.30860895] [ 0.20087312]] [[-0.33088404] [ 2.782529 ] [-1.0761424 ] [-0.55832326] [-0.20884737] [-0.04022785] [ 1.5060853 ] [-0.5228416 ] [-0.1458908 ] [ 0.3830966 ]] [[ 1.2275275 ] [ 1.3349926 ] [ 0.20924805] [ 1.9523791 ] [-0.9307787 ] [ 0.89128447] [ 0.6765338 ] [ 0.8404358 ] [-0.4486293 ] [ 0.36776075]]]; ov_res: [[[-1.0417844 ] [ 0.89973444] [ 0.7806817 ] [ 1.5215276 ] [ 0.7014976 ] [ 0.77071744] [ 1.6296313 ] [-0.45215315] [-1.7813299 ] [-0.66004205]] [[ 0.64716834] [-1.0139962 ] [-0.2828934 ] [-1.2460741 ] [-0.13898358] [ 0.6021525 ] [ 0.93922544] [ 0.65357506] [-1.1611413 ] [ 0.28266552]] [[-0.8709694 ] [ 0.5425701 ] [-1.573487 ] [ 0.08809076] [-0.32986343] [ 1.706152 ] [ 0.8849728 ] [ 0.23356415] [ 0.30860895] [ 0.20087312]] [[-0.33088404] [ 2.782529 ] [-1.0761424 ] [-0.55832326] [-0.20884737] [-0.04022785] [ 1.5060853 ] [-0.5228416 ] [-0.1458908 ] [ 0.3830966 ]] [[ 1.2275275 ] [ 1.3349926 ] [ 0.20924805] [ 1.9523791 ] [-0.9307787 ] [ 0.89128447] [ 0.6765338 ] [ 0.8404358 ] [-0.4486293 ] [ 0.36776075]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [[[-1.0417844 ] [ 0.89973444] [ 0.7806817 ] [ 1.5215276 ] [ 0.7014976 ] [ 0.77071744] [ 1.6296313 ] [-0.45215315] [-1.7813299 ] [-0.66004205]] [[ 0.64716834] [-1.0139962 ] [-0.2828934 ] [-1.2460741 ] [-0.13898358] [ 0.6021525 ] [ 0.93922544] [ 0.65357506] [-1.1611413 ] [ 0.28266552]] [[-0.8709694 ] [ 0.5425701 ] [-1.573487 ] [ 0.08809076] [-0.32986343] [ 1.706152 ] [ 0.8849728 ] [ 0.23356415] [ 0.30860895] [ 0.20087312]] [[-0.33088404] [ 2.782529 ] [-1.0761424 ] [-0.55832326] [-0.20884737] [-0.04022785] [ 1.5060853 ] [-0.5228416 ] [-0.1458908 ] [ 0.3830966 ]] [[ 1.2275275 ] [ 1.3349926 ] [ 0.20924805] [ 1.9523791 ] [-0.9307787 ] [ 0.89128447] [ 0.6765338 ] [ 0.8404358 ] [-0.4486293 ] [ 0.36776075]]]; ov_res: [[[-1.0417844 ] [ 0.89973444] [ 0.7806817 ] [ 1.5215276 ] [ 0.7014976 ] [ 0.77071744] [ 1.6296313 ] [-0.45215315] [-1.7813299 ] [-0.66004205]] [[ 0.64716834] [-1.0139962 ] [-0.2828934 ] [-1.2460741 ] [-0.13898358] [ 0.6021525 ] [ 0.93922544] [ 0.65357506] [-1.1611413 ] [ 0.28266552]] [[-0.8709694 ] [ 0.5425701 ] [-1.573487 ] [ 0.08809076] [-0.32986343] [ 1.706152 ] [ 0.8849728 ] [ 0.23356415] [ 0.30860895] [ 0.20087312]] [[-0.33088404] [ 2.782529 ] [-1.0761424 ] [-0.55832326] [-0.20884737] [-0.04022785] [ 1.5060853 ] [-0.5228416 ] [-0.1458908 ] [ 0.3830966 ]] [[ 1.2275275 ] [ 1.3349926 ] [ 0.20924805] [ 1.9523791 ] [-0.9307787 ] [ 0.89128447] [ 0.6765338 ] [ 0.8404358 ] [-0.4486293 ] [ 0.36776075]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:300 - scale:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6468.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : int[] = prim::Constant[value=[300, 300]]() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:200 - scale:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6471.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : int[] = prim::Constant[value=[200, 200]]() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:(128, 480) - scale:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6474.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : int[] = prim::Constant[value=[128, 480]]() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:None - scale:2.5 ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6477.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[2.5, 2.5]]() %3 : NoneType = prim::Constant() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:None - scale:0.75 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6480.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[0.75, 0.75]]() %3 : NoneType = prim::Constant() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:nearest - size:None - scale:(1.2, 0.8) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6483.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[1.2, 0.80000000000000004]]() %3 : NoneType = prim::Constant() %4 : Tensor = aten::upsample_nearest2d(%x, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3918:0 return (%4) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:300 - scale:None ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6486.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : int[] = prim::Constant[value=[300, 300]]() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:200 - scale:None ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6489.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : int[] = prim::Constant[value=[200, 200]]() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:(128, 480) - scale:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6492.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : int[] = prim::Constant[value=[128, 480]]() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:None - scale:2.5 ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6495.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[2.5, 2.5]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:None - scale:0.75 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6498.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[0.75, 0.75]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bilinear - size:None - scale:(1.2, 0.8) ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6501.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[1.2, 0.80000000000000004]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bilinear2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3946:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:300 - scale:None ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6504.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : int[] = prim::Constant[value=[300, 300]]() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Pythofw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:200 - scale:None ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6507.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : int[] = prim::Constant[value=[200, 200]]() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:(128, 480) - scale:None ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6510.aten_upsample, %x : Tensor): %2 : NoneType = prim::Constant() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : int[] = prim::Constant[value=[128, 480]]() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:None - scale:2.5 ] | 0.08 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6513.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[2.5, 2.5]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:None - scale:0.75 ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6516.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[0.75, 0.75]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_upsample.py::TestUpsample2D::test_upsample[ ie_device:CPU - precision:FP32 - mode:bicubic - size:None - scale:(1.2, 0.8) ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_upsample.___torch_mangle_6519.aten_upsample, %x : Tensor): %2 : float[] = prim::Constant[value=[1.2, 0.80000000000000004]]() %3 : bool = prim::Constant[value=0]() # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 %4 : NoneType = prim::Constant() %5 : Tensor = aten::upsample_bicubic2d(%x, %4, %3, %2) # /home/mvafin/.local/lib/python3.8/site-packages/torch/nn/functional.py:3954:0 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var2args[ ie_device:CPU - precision:FP32 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6520.aten_var2args, %x.1 : Tensor): %self.unbiased : bool = prim::Constant[value=1]() %3 : Tensor = aten::var(%x.1, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:32:23 return (%3) fw_re: 0.9976680874824524; ov_res: 0.9976679682731628 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var2args[ ie_device:CPU - precision:FP32 - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6522.aten_var2args, %x.1 : Tensor): %self.unbiased : bool = prim::Constant[value=0]() %3 : Tensor = aten::var(%x.1, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:32:23 return (%3) fw_re: 1.000314474105835; ov_res: 1.0003142356872559 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:None - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6523.aten_var, %x.1 : Tensor): %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %self.dim : NoneType = prim::Constant() %5 : Tensor = aten::var(%x.1, %self.dim, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0034268]]]]; ov_res: 1.0034266710281372 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:None - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6525.aten_var, %x.1 : Tensor): %self.unbiased : bool = prim::Constant[value=1]() %self.dim : NoneType = prim::Constant() %4 : Tensor = aten::var(%x.1, %self.dim, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.9973801]]]]; ov_res: 0.9973800182342529 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:0 - unbiased:False ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6527.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]]; ov_res: [[[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:0 - unbiased:True ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6529.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]]]]; ov_res: [[[[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:1 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6531.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[0.820064 1.030746 1.6110171 ... 1.0027635 0.47221705 0.9185807 ] [0.06053109 0.70796245 1.3966945 ... 0.11798063 0.07556888 0.54496455] [0.44857392 0.9500139 0.3329816 ... 0.70982456 0.03787711 0.11005883] ... [0.14123487 2.0055807 1.1126543 ... 0.01411453 0.48296517 0.31522143] [0.231263 0.7046897 0.5049922 ... 0.5701117 0.67680186 0.76254475] [0.07937007 0.26181784 0.37963387 ... 0.03143598 0.1660531 0.9585552 ]]]]; ov_res: [[[[0.820064 1.030746 1.6110172 ... 1.0027635 0.47221708 0.9185807 ] [0.06053109 0.7079625 1.3966945 ... 0.11798063 0.07556887 0.54496455] [0.44857395 0.9500138 0.3329816 ... 0.70982456 0.03787711 0.11005884] ... [0.14123487 2.005581 1.1126543 ... 0.01411453 0.4829652 0.31522143] [0.231263 0.70468974 0.5049922 ... 0.5701117 0.6768019 0.7625448 ] [0.07937007 0.26181784 0.37963387 ... 0.03143597 0.1660531 0.9585552 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:1 - unbiased:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6533.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[1.5247012 0.8317763 0.26023197 ... 2.0906353 1.2458719 1.9819486 ] [0.8200009 1.0487717 1.5636879 ... 0.2641574 0.24341351 0.50435287] [0.4328709 1.1147972 0.84071976 ... 0.15627037 2.2572513 1.3195095 ] ... [0.65326834 1.1193027 0.3548317 ... 0.04845397 0.14719157 1.8512278 ] [0.5851774 0.23997319 0.77174795 ... 0.45715508 1.4002117 2.681491 ] [0.09853528 1.2866132 1.3501197 ... 0.8745431 0.6351482 0.15309674]]]]; ov_res: [[[[1.5247014 0.8317764 0.26023197 ... 2.0906353 1.2458719 1.9819486 ] [0.8200008 1.0487717 1.5636878 ... 0.26415744 0.24341348 0.50435287] [0.43287086 1.1147972 0.8407198 ... 0.15627038 2.2572513 1.3195095 ] ... [0.6532684 1.1193027 0.3548317 ... 0.04845397 0.14719157 1.8512278 ] [0.5851774 0.23997317 0.77174795 ... 0.4571551 1.4002116 2.681491 ] [0.09853527 1.2866131 1.3501198 ... 0.8745431 0.63514817 0.15309674]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:2 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6535.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0417632 0.85398716 0.99322504 1.1010735 1.1541759 0.80409724 0.9139068 0.9895269 1.0409336 1.0402662 1.1908178 0.927333 0.95137316 0.9372952 1.0430924 0.8782732 1.0165079 1.1201692 0.9889609 0.81760824 1.0443027 0.94033444 0.9726207 0.93641776 1.0465044 0.99929684 1.0406826 0.77925366 0.9948453 0.8363748 1.0490102 0.901286 1.0431664 1.0033054 0.97809136 0.99510324 0.95030504 1.0636184 1.0660871 1.2266282 0.8763077 1.0325147 1.1795828 0.9517589 1.0730275 0.9971816 1.1372011 1.0166241 1.0649571 1.0187508 0.9790824 0.96573246 1.0004843 0.8775199 0.8572518 0.8834715 1.0599297 1.1360283 1.02318 1.1891371 0.9041774 1.0048891 0.865978 0.963892 1.0813956 1.051555 1.1009973 1.0329187 0.97388804 0.94074464 1.1945024 1.0109997 1.1625065 0.8961189 1.0536075 0.98263466 1.1532497 0.92291373 1.0641228 0.9244359 1.0551116 0.8591145 1.1756109 1.0478898 1.1546131 1.0076998 1.0705411 0.80810887 1.0941819 1.0430505 0.91073895 0.91412437 0.9286834 0.8214397 1.0107753 0.8968447 0.9332039 1.0193684 1.0023848 1.0914854 1.038604 1.0858513 1.0526782 1.0120534 1.0330756 0.89501995 0.7332398 1.1097295 0.9379765 1.1600405 1.026557 0.8821553 1.0989449 0.8700141 0.9818003 0.9970924 1.1178155 0.97534484 0.98575675 1.1012261 0.9395009 0.9669457 1.1321269 0.9707119 1.0875306 1.0106605 0.9909954 0.9140492 1.0563083 1.0233098 1.0391582 0.8071383 0.8697857 0.94129205 1.0682328 0.8268478 1.1586672 0.9243246 1.031355 0.7182065 1.0051355 1.071275 0.8698368 1.0974568 1.0408039 0.87263864 1.1048427 0.8289055 1.080974 1.0678968 0.9889571 0.89715976 1.1499625 1.1900519 1.0324402 1.200458 0.964961 0.9729452 1.0210696 0.8411741 1.0345705 1.092394 0.9073347 0.9192902 0.8957693 0.9147767 0.9417045 0.9437868 1.0116398 0.9505287 0.9058537 0.8632682 0.8194083 1.1176666 0.93904936 0.89863265 0.9333067 1.0310886 1.0192493 0.87019295 1.0164964 0.9267582 0.90890604 1.022623 1.1194249 1.1124685 0.7975041 0.91339356 0.98460877 1.0507004 0.98784596 0.9326339 0.9859848 1.0370395 1.0948356 0.96157664 1.0087166 1.0843552 1.1135626 0.9585397 0.8554493 0.89366925 1.0311534 1.0886155 1.0911486 1.0659529 1.1460217 1.0523454 0.9920895 1.127381 0.9543485 0.97173744 1.0063187 1.047375 1.0030869 1.011309 0.9768957 1.066481 0.9381759 0.9058569 1.1702589 1.0086291 1.0788914 0.9176823 ]] [[1.0326326 0.94832355 1.0412734 1.073869 0.83435786 0.99004877 0.8628867 0.9920501 0.86886626 1.0751168 0.9784542 0.9792557 1.0438253 0.8979934 1.0294108 0.9385366 0.8574548 0.93840736 0.8920987 0.9494559 0.93296176 0.8269139 0.92474574 0.8375242 0.9912221 0.94669795 0.98794353 0.8653898 0.8712815 1.1439174 0.90303123 1.0127035 0.9249132 1.0273931 0.9807122 1.0697119 1.0645546 0.90215605 1.1074361 0.9865367 0.8150775 1.1273913 0.8792566 1.1450846 1.1338174 0.9132226 1.0365574 1.0957683 0.9515363 0.9975149 0.99119264 1.1443478 0.93713665 0.94474554 1.1566916 0.8733921 0.91618097 1.0548052 1.0377964 0.97041565 0.993545 0.9432668 1.0354711 0.99667406 0.9159099 1.0118972 0.9675443 0.9258522 1.0256569 0.92970544 0.88019985 0.81138456 0.90457016 0.9538842 0.98074466 0.9482185 0.889562 1.0977035 1.0000111 1.0398461 1.060163 0.9294553 0.86256224 1.0216049 1.074933 1.02598 0.86168915 0.96007323 0.97066486 1.059616 0.9099334 1.0236 1.0171889 1.0709882 1.0003092 1.0350354 0.93734926 0.9312499 0.85549045 0.9969404 0.99124426 1.0609277 0.99478567 0.8063894 1.01293 0.9859256 1.0437146 1.031684 1.0308722 0.78238416 1.0344765 0.9399464 0.88813007 1.0362103 0.9186544 0.89931154 0.9603727 1.0935873 1.0746515 0.9669671 0.8943293 1.1007738 0.9275471 1.0861543 0.73712385 0.8063011 0.95163405 0.9839476 0.92640245 1.0319552 1.0169393 0.9964943 1.1819052 0.89478254 0.8632186 1.0408605 1.1778554 0.92766505 0.9867119 1.0638354 0.8162526 1.043147 0.98622537 1.141604 1.0880455 1.0353687 0.8828127 0.8147941 1.264688 0.9932683 1.0362897 1.0491993 0.8681342 0.82605726 1.0327456 1.0433038 0.9058485 0.81089294 1.0126344 1.065989 0.98571754 0.8885148 1.0967578 0.97146493 1.1430241 0.9773987 0.87437344 1.0446714 0.8711725 1.0135442 1.0442302 0.90321577 0.9813045 1.0182966 1.06142 1.0074016 0.89909273 0.88009405 1.0536737 1.0933172 1.0266514 1.0139192 1.0553927 0.9943502 0.9263692 0.93995297 0.9355577 1.0838655 1.1825123 1.009118 1.0440176 0.895421 1.2219458 1.2228329 0.9972938 1.07721 1.021657 0.95759004 0.9316751 1.1067358 1.0472139 1.1097531 1.0200088 0.9144263 0.8396141 1.2135997 0.9226286 0.97354704 0.97554123 1.035053 1.1165583 1.0791829 0.9703604 0.9322211 1.073634 1.0458002 1.0443805 0.85848993 0.88642275 0.89872664 0.97332513 0.93039834 0.9848008 0.9897988 ]] [[0.8229083 0.8462706 0.9821985 0.9236205 1.0310436 1.0236403 0.9268253 0.8140067 1.208453 0.88399893 1.1248217 1.0938995 0.93629515 1.0338067 0.9734892 0.9409897 1.0188622 1.0301484 1.0065485 1.0953434 1.0828756 0.9466199 0.9940787 1.0299714 0.9077803 1.0564693 0.8103511 0.8695987 1.1477963 1.0359688 0.98779136 1.0318094 0.9524604 1.0625198 0.9753023 1.025717 0.8454356 1.0081061 0.98798573 1.0205231 1.0055573 0.95685524 1.0181482 1.0143687 1.0376221 0.97941875 1.247036 0.9682798 1.0251628 0.9287947 1.0276372 1.0333759 0.8856098 0.898415 1.0749505 0.96270883 1.0238783 1.0131714 0.91202956 0.9270498 1.0113076 1.0578471 0.9592896 0.8674512 1.0382199 0.7919544 0.92886984 0.9677113 1.0717428 1.103179 1.0634162 1.0050932 1.0706061 1.0348115 1.0586472 0.912943 1.0167654 1.1179638 0.9985376 0.87300617 1.1135756 0.8917478 0.9229032 0.95945406 1.1127578 1.2346472 0.866842 1.165132 1.0248407 0.8021228 0.9631901 0.9370165 0.95488656 0.99007833 0.87313974 1.0269399 1.1937963 0.8625544 0.938765 1.0138592 0.9677877 0.9996556 0.9303056 1.012001 0.9988727 0.9409276 0.96306497 1.1376913 0.8976486 0.8233371 0.9482101 0.9105957 1.0135468 1.1763464 1.0491214 1.0887477 1.0187489 0.9832207 1.1258553 0.8875325 1.0773684 0.9153448 0.9683005 0.8570485 0.8390339 0.927662 0.9554492 0.98013747 0.90183765 0.8327322 0.9231258 0.93410015 1.2863698 1.225692 1.19352 0.8608549 1.0087783 1.1716561 1.016572 0.9998145 1.0421126 1.0331272 1.1883125 1.0324064 0.96839714 0.9900199 0.98196876 0.81587416 0.84132874 0.97054017 0.88850665 1.1612004 1.1212262 0.97865075 1.0086398 1.0459229 0.855395 0.84148926 1.0627917 1.035199 1.0013863 1.1144321 1.0041338 1.0729159 0.92770106 0.9079194 0.98128754 0.9457876 0.93942636 1.0051917 1.0746974 1.1234128 0.95123756 1.1165761 0.9377223 0.92890865 0.9584542 1.0266083 1.0726678 0.9290165 1.0130048 0.8481277 1.1104444 1.0418066 0.9848098 1.0784028 1.0405412 0.9221219 1.0325983 1.0197446 0.95852417 1.1178123 0.98670095 0.9601698 1.1414064 0.97696704 0.88341904 1.0264258 0.97467005 1.0100391 0.9364527 0.9617817 1.014204 1.07658 0.87994444 0.8677879 0.9828425 0.89384276 1.0262558 1.023423 0.97770387 1.1108335 0.92759067 1.1844362 0.93372846 1.169976 0.84650666 1.0930588 0.904988 0.9744356 0.9895389 0.92850274 0.86141646 1.0947716 ]]]]; ov_res: [[[[1.041763 0.8539871 0.9932247 1.1010736 1.1541756 0.80409735 0.91390705 0.98952705 1.0409333 1.0402659 1.1908177 0.92733306 0.9513731 0.93729514 1.0430926 0.8782735 1.016508 1.1201693 0.98896074 0.81760836 1.0443027 0.9403344 0.9726209 0.9364177 1.0465041 0.99929714 1.0406829 0.7792536 0.99484557 0.83637464 1.0490104 0.901286 1.0431665 1.0033052 0.9780912 0.995103 0.9503053 1.0636184 1.0660874 1.2266285 0.8763076 1.0325149 1.1795827 0.951759 1.0730275 0.99718136 1.1372012 1.0166241 1.0649571 1.0187509 0.9790826 0.96573263 1.0004841 0.8775201 0.8572521 0.8834715 1.0599297 1.1360282 1.0231801 1.1891369 0.90417755 1.0048889 0.8659778 0.9638919 1.0813957 1.0515554 1.1009971 1.0329192 0.97388804 0.9407447 1.1945026 1.0109996 1.1625067 0.8961187 1.0536075 0.9826347 1.1532499 0.92291373 1.0641229 0.9244359 1.0551122 0.8591146 1.1756107 1.0478898 1.1546129 1.0077001 1.0705416 0.80810887 1.0941818 1.0430504 0.9107389 0.91412467 0.9286829 0.82143986 1.0107758 0.89684486 0.93320405 1.0193683 1.0023848 1.0914854 1.0386041 1.0858514 1.0526781 1.0120534 1.0330757 0.8950197 0.73323965 1.1097296 0.9379767 1.1600405 1.026557 0.88215554 1.0989449 0.8700141 0.9818004 0.99709225 1.1178156 0.9753448 0.9857567 1.1012262 0.9395011 0.9669456 1.1321269 0.97071224 1.0875309 1.0106603 0.9909953 0.914049 1.0563084 1.0233097 1.0391585 0.80713856 0.86978585 0.9412919 1.0682327 0.8268477 1.1586676 0.9243244 1.0313549 0.71820676 1.0051357 1.0712749 0.8698365 1.0974568 1.0408041 0.87263876 1.1048424 0.8289055 1.0809736 1.067897 0.98895746 0.8971596 1.1499623 1.1900518 1.0324401 1.2004582 0.9649607 0.9729456 1.0210701 0.84117424 1.0345706 1.0923938 0.9073344 0.91929036 0.89576966 0.9147765 0.9417046 0.94378674 1.01164 0.95052844 0.9058538 0.863268 0.81940836 1.117667 0.93904936 0.8986326 0.9333064 1.0310882 1.0192494 0.8701929 1.0164961 0.92675793 0.9089064 1.0226228 1.1194247 1.1124684 0.7975041 0.9133935 0.98460895 1.0507004 0.98784584 0.9326336 0.9859849 1.0370396 1.0948355 0.96157664 1.0087163 1.0843548 1.1135628 0.95853966 0.8554496 0.8936692 1.0311534 1.0886154 1.0911487 1.0659529 1.1460217 1.0523456 0.99208957 1.1273812 0.9543485 0.97173756 1.0063189 1.0473751 1.0030868 1.0113089 0.9768957 1.0664812 0.938176 0.9058569 1.1702591 1.008629 1.0788912 0.91768247]] [[1.0326322 0.94832313 1.0412735 1.073869 0.8343579 0.990049 0.8628868 0.9920501 0.8688661 1.0751165 0.9784542 0.9792556 1.043825 0.8979939 1.0294106 0.93853694 0.8574545 0.938407 0.8920988 0.94945586 0.93296164 0.82691383 0.9247459 0.83752406 0.9912223 0.94669807 0.98794377 0.86538965 0.8712815 1.1439173 0.9030312 1.0127034 0.9249133 1.0273935 0.98071194 1.0697123 1.0645545 0.90215605 1.1074358 0.98653644 0.8150775 1.1273913 0.8792564 1.1450847 1.1338173 0.91322273 1.0365573 1.0957683 0.95153654 0.9975146 0.9911927 1.1443479 0.93713653 0.94474554 1.1566914 0.87339175 0.91618127 1.0548055 1.0377965 0.97041523 0.9935447 0.9432667 1.035471 0.9966737 0.91590977 1.0118971 0.9675445 0.9258524 1.0256568 0.92970574 0.8802003 0.8113846 0.9045698 0.95388395 0.9807444 0.94821846 0.88956213 1.0977033 1.0000112 1.0398458 1.0601634 0.92945534 0.8625623 1.0216049 1.0749328 1.0259799 0.8616891 0.9600732 0.9706645 1.0596161 0.9099339 1.0236001 1.0171893 1.0709883 1.0003093 1.0350357 0.9373493 0.93124974 0.85549027 0.99694026 0.99124414 1.0609277 0.9947859 0.80638945 1.0129298 0.98592556 1.0437145 1.0316837 1.0308722 0.78238404 1.0344763 0.93994606 0.88813007 1.0362105 0.91865444 0.89931154 0.9603726 1.0935874 1.0746516 0.9669671 0.8943292 1.1007739 0.9275469 1.0861545 0.73712385 0.8063007 0.9516338 0.98394763 0.92640275 1.0319555 1.0169395 0.99649435 1.1819055 0.8947826 0.8632186 1.0408605 1.1778551 0.9276652 0.9867116 1.0638355 0.8162524 1.043147 0.98622555 1.1416042 1.0880456 1.0353683 0.8828129 0.8147942 1.2646878 0.99326813 1.0362898 1.0491992 0.8681343 0.8260573 1.0327457 1.0433038 0.90584856 0.81089294 1.0126345 1.0659888 0.9857173 0.8885147 1.096758 0.971465 1.1430237 0.9773986 0.8743733 1.0446712 0.8711728 1.0135441 1.0442297 0.90321577 0.9813045 1.0182965 1.06142 1.0074016 0.8990926 0.8800941 1.0536739 1.0933169 1.0266511 1.0139195 1.0553929 0.99435043 0.9263689 0.93995297 0.93555766 1.0838658 1.182512 1.0091182 1.0440177 0.8954213 1.2219458 1.2228328 0.9972933 1.0772103 1.021657 0.9575901 0.93167484 1.1067357 1.0472138 1.1097533 1.0200089 0.9144264 0.83961385 1.2135999 0.9226286 0.9735471 0.97554094 1.035053 1.1165582 1.0791827 0.97036046 0.9322217 1.0736339 1.0458 1.0443805 0.85849005 0.88642246 0.8987263 0.97332513 0.9303984 0.98480046 0.98979884]] [[0.8229079 0.8462705 0.98219866 0.9236202 1.0310439 1.02364 0.92682505 0.81400675 1.2084526 0.8839989 1.1248218 1.0938995 0.93629485 1.033807 0.9734892 0.94098955 1.0188624 1.0301481 1.0065483 1.0953437 1.0828757 0.94662 0.99407864 1.029971 0.9077801 1.0564696 0.8103511 0.86959845 1.1477963 1.0359687 0.9877915 1.0318093 0.9524603 1.0625203 0.9753023 1.0257171 0.8454353 1.0081066 0.9879856 1.0205233 1.0055578 0.9568552 1.0181481 1.0143685 1.0376221 0.97941846 1.2470354 0.9682799 1.0251626 0.92879456 1.0276369 1.033376 0.88560975 0.89841497 1.0749506 0.962709 1.0238787 1.0131716 0.91202974 0.92704976 1.0113076 1.0578469 0.95928967 0.86745113 1.0382197 0.7919546 0.9288698 0.96771145 1.0717427 1.1031789 1.0634162 1.0050932 1.0706061 1.0348114 1.0586472 0.912943 1.0167652 1.1179638 0.99853766 0.87300617 1.1135757 0.8917479 0.92290336 0.95945436 1.1127578 1.2346472 0.8668415 1.1651323 1.0248407 0.80212253 0.96319044 0.93701667 0.95488626 0.9900784 0.8731395 1.0269399 1.1937965 0.8625545 0.93876517 1.013859 0.9677874 0.99965495 0.9303058 1.0120007 0.99887276 0.94092757 0.96306556 1.1376911 0.8976485 0.82333714 0.9482101 0.9105959 1.0135466 1.1763467 1.0491211 1.0887481 1.0187489 0.9832211 1.1258551 0.8875326 1.0773681 0.9153449 0.9683 0.8570486 0.8390338 0.9276619 0.9554492 0.9801372 0.9018374 0.83273226 0.9231261 0.9341002 1.2863694 1.225692 1.1935202 0.86085546 1.0087783 1.171656 1.016572 0.9998147 1.0421124 1.0331274 1.1883126 1.0324066 0.96839696 0.9900193 0.98196846 0.8158741 0.8413283 0.9705397 0.8885068 1.1612005 1.1212261 0.97865075 1.0086398 1.0459226 0.85539526 0.84148914 1.0627917 1.035199 1.0013865 1.114432 1.004134 1.0729162 0.92770106 0.90791947 0.98128736 0.9457878 0.9394264 1.0051919 1.0746973 1.1234127 0.951238 1.1165763 0.9377224 0.9289085 0.958454 1.0266087 1.0726675 0.9290162 1.0130049 0.8481279 1.1104449 1.041806 0.98481005 1.0784026 1.0405412 0.92212194 1.0325979 1.0197442 0.9585242 1.1178124 0.98670095 0.9601701 1.1414068 0.97696733 0.88341916 1.0264255 0.97467 1.0100387 0.9364528 0.9617817 1.0142039 1.0765797 0.8799444 0.8677883 0.98284274 0.893843 1.0262558 1.023423 0.97770345 1.1108335 0.92759067 1.1844366 0.9337285 1.1699762 0.84650695 1.0930588 0.9049879 0.97443545 0.9895387 0.9285024 0.86141616 1.0947719 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:2 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6537.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.9513503 1.0648981 1.014849 0.95467395 0.9447831 1.0529627 0.8864807 1.0743525 1.0060008 1.1833401 1.1132169 0.95151013 0.9236868 1.126727 0.96090585 1.2250944 1.0236979 0.8985345 1.1362092 0.99532425 1.145697 1.0181763 0.945812 1.1030002 0.941697 0.9728105 1.1055356 0.9594681 1.0634689 0.91288817 1.013072 0.9981314 0.95772064 0.9330347 0.8295273 1.0710504 0.9369892 0.8763611 1.0581259 1.1009662 0.75138104 1.021067 1.1094294 0.9740326 0.98709023 1.0250757 1.0085621 0.936835 0.9452789 1.0383282 1.0937102 1.0341619 0.8954744 0.9744202 0.98096794 1.0671322 0.92349863 1.0396836 0.9690062 1.0096357 0.91854864 1.1336917 1.0707258 0.9144624 1.0162845 0.9628724 0.9750951 0.9151946 1.1852044 0.9409667 0.89988947 0.90652543 1.0433795 1.0164794 0.8713251 0.88998747 1.117962 1.0804098 0.91183287 0.95029056 0.78270495 1.060614 0.95231587 0.8958631 0.98438823 1.0816183 0.8787963 0.8883206 0.9516152 1.1333792 1.016384 0.85611314 1.0380795 0.85806525 1.0792558 1.0393038 1.0097843 0.96559983 0.88650596 1.1569762 0.9859688 0.98911077 0.96338207 1.0070803 1.0511177 1.0668405 1.0970578 1.0379202 1.0479585 0.99136543 0.99706805 1.0492793 1.025198 1.0102084 0.9842974 1.1181974 1.0281063 1.0800552 0.96824425 0.913095 0.9155765 1.0529201 1.0558115 0.99329174 0.8131231 0.9171422 0.9395786 1.0993086 0.9770062 0.9781539 0.9523569 0.9279562 0.86572105 1.0446526 0.9306512 0.94084626 0.8433146 0.99766463 1.0697447 0.96954775 1.1257235 0.8297172 0.9819318 1.0649425 0.9113882 0.93503547 1.1094707 0.9953029 1.0456737 1.0466825 1.0770842 0.9264018 1.1180285 1.0219696 1.0907855 0.95963335 1.0709567 0.99617434 0.95441747 1.0280824 1.038259 1.0527009 0.9428076 0.9549794 0.97272915 1.0628005 0.93428093 0.96252257 0.859043 0.9714034 1.0313512 0.86545616 1.0928602 0.93900263 1.1152217 1.0725362 1.048332 1.0139337 0.9525638 0.9693436 1.0620599 0.94518954 0.93131936 0.7796441 1.0832641 1.0369247 0.96938396 0.9283276 1.0789466 0.87182814 1.0315851 0.9838885 0.99529856 1.0713958 0.9113697 0.95546216 1.1146938 0.8904038 0.9965845 1.0141827 0.9504596 1.0778958 1.0288223 1.0528349 1.2326597 1.0483447 1.0300041 0.86106837 1.116495 1.0608209 1.1115968 0.9596381 0.99436235 0.9617062 1.0013071 0.9549196 1.0592748 1.0489923 0.91534436 1.041636 0.90139484 0.9438281 1.0435311 1.0031494 ]] [[0.9659907 1.1138334 0.9336254 0.9980799 0.93393403 0.9749491 0.9949865 1.1351966 1.0431398 1.1800421 1.0154475 0.931551 0.9450663 1.2283752 1.0018611 1.014192 0.964572 0.9020193 0.9651855 1.0547372 0.84337187 1.033414 1.1009905 0.92247903 1.0062072 0.8425574 1.054818 0.95920414 1.0177379 0.94061327 1.036231 0.99474996 1.0131809 1.0462359 0.87418985 0.89551747 0.9845645 1.0777749 0.88257027 1.1166373 1.082407 1.121717 0.82639575 1.0551472 1.0143027 0.99201393 0.90608656 0.7886897 1.1337953 1.0540415 0.9288828 0.85866016 1.08273 0.9054175 1.0820456 1.0590078 0.9995138 0.9346487 1.0229892 1.0364542 1.0703171 0.987682 1.1509572 1.1346952 1.1758356 1.1861147 0.96632415 1.0151756 1.0445279 1.0763409 1.0657893 0.96674156 1.1261202 1.0322753 0.9074435 0.9864279 0.9860343 1.2319748 1.0838008 0.85804844 0.9862267 1.2607046 1.063356 0.93784237 1.1141635 0.89552647 1.0207134 1.0148517 0.8798557 0.92815506 1.0510261 1.0004213 1.0753071 1.0359986 0.93058085 0.8846735 1.0065607 0.8771266 1.0443707 1.0826902 0.90968287 1.0936953 1.1669666 1.1363776 1.0154754 1.089114 1.0019653 0.9288031 1.1329391 0.934202 0.85678 1.0531685 0.8894925 1.189221 0.9580423 0.91585475 0.97068393 0.752894 0.93083835 1.0086474 1.0365162 0.96569765 1.0872418 1.1884292 0.93530816 1.0938786 0.9898443 1.1534607 0.8809237 0.9741289 0.98173004 1.0780394 0.98432875 0.92328864 1.2074372 1.0058804 1.0897832 1.1415069 0.91121984 0.98955274 0.94396704 0.9628724 0.98240966 1.2133197 1.0835134 0.8643981 1.1474814 0.87710315 1.025073 1.0121292 0.9822642 1.0588692 1.023327 1.0131063 0.92140514 1.0132251 1.03332 1.1652497 1.0261564 0.995691 1.3516062 0.87131625 1.028463 0.883945 0.940594 0.9440685 0.92056394 0.87627566 1.089029 1.0665768 0.9809534 1.0574859 0.95751846 0.9544344 1.0120666 1.1860133 1.1108925 0.84826237 1.0640103 0.8346061 0.9782972 1.0980113 1.1035192 1.0897735 0.9778528 1.0166367 1.0997741 0.9974935 1.0123451 1.2096637 1.0018722 1.0209097 0.977808 1.0105146 1.0502228 0.9331741 1.1504757 1.1351962 0.8962865 0.9700956 0.93194014 0.95998245 0.98492897 0.98771495 0.79849607 1.0772929 0.89397997 1.0422343 0.83830225 1.021695 1.2445614 1.0557578 1.113527 1.0526738 0.95472497 1.0466627 0.99569947 0.89366025 1.0045345 0.9747739 1.0834049 1.0474796 0.88325644 1.0428108 ]] [[0.93501794 1.0808474 0.8787512 0.9335662 0.813682 1.0108922 1.0504804 1.1155456 0.86862236 1.0101815 1.0725217 1.0488157 1.0109361 0.87912446 1.021715 1.0168006 1.0225124 0.91713214 1.1120117 0.9059411 1.1187725 0.95316607 0.97811264 1.1862661 0.9127988 1.1495552 0.98797876 0.8837616 1.058743 1.0534855 1.0802253 1.152801 1.0595404 1.026419 0.8781072 1.0102876 1.0404818 0.9487965 0.87592924 1.109186 1.0504797 0.9938965 1.0616819 1.0166305 0.8774498 1.0440372 1.038214 1.140719 1.0291747 0.9342758 0.8305014 0.82648075 1.1106908 1.039909 1.0950652 1.0833046 1.0553911 1.0260377 1.004432 1.0079368 0.9621326 0.96924216 1.0656521 1.0048623 1.0367088 0.9939922 0.9568671 0.96765256 0.79662746 0.9002607 0.8878128 1.0847943 0.9400345 1.0467638 1.0128481 0.89577794 1.0672555 0.9427277 0.9859224 1.0712564 1.0614296 0.98107594 0.98055726 0.9422701 1.0766511 1.0731069 1.0551428 0.97877353 1.0363408 0.96038866 0.90863 0.9396883 0.9772714 1.0917842 0.8987402 0.9113762 0.8766666 1.0146545 0.8595419 1.196375 0.76862025 0.97437704 0.9438645 0.976468 1.0120283 0.95913535 1.0282753 1.0111431 0.9747556 0.9571276 1.2329425 1.139753 1.0515257 0.9002521 0.87610245 0.85779715 0.8780268 1.132337 0.99817675 1.0197707 1.0580384 0.93908715 1.1012973 1.0317868 0.9766676 0.9534713 0.9911451 0.8905428 1.1826149 1.1444368 1.0555105 1.149979 0.8684172 1.0296663 1.3924273 0.98925316 0.9051808 1.0183631 1.0633254 0.8371876 1.102628 1.0359092 0.98227376 1.0223228 0.9508173 1.0740753 0.9766312 1.0982101 0.9658534 1.0132451 0.826644 0.8579798 0.9691625 1.0238519 0.9631538 0.83689225 0.8933102 0.9484109 0.87684566 1.1121303 0.9169011 1.1941804 1.0027398 1.0213776 0.91442895 0.9985908 1.0806768 1.0098991 0.96364254 1.0849893 1.0064263 0.86601883 1.106085 0.953714 1.035492 1.1069729 1.1861274 0.8857132 1.0296338 0.90494263 1.10683 1.0065608 0.85116667 1.2068127 0.9913539 1.0512079 1.0115592 0.9217745 0.8233561 0.96477044 0.909326 1.0179734 0.9900218 0.83692634 0.9546708 0.99667823 1.0684055 1.0833044 1.1039397 1.036179 1.0826225 0.8990856 1.0147215 0.91229445 0.912016 1.0366714 0.9476981 0.91834533 1.14055 0.9780878 0.91255367 0.88648933 0.9341677 1.131071 0.8332216 0.8677705 0.9902465 1.0229366 1.0843711 0.8955449 0.91781175 0.90555996 0.93560565 0.95870167]]]]; ov_res: [[[[0.9513502 1.0648979 1.0148491 0.95467407 0.94478315 1.052963 0.8864806 1.0743527 1.0060009 1.1833405 1.1132172 0.9515101 0.9236869 1.1267272 0.9609061 1.2250946 1.0236987 0.8985343 1.1362091 0.9953242 1.1456971 1.0181762 0.94581234 1.1029999 0.9416972 0.9728105 1.1055359 0.9594679 1.0634694 0.9128883 1.013072 0.99813116 0.9577205 0.9330346 0.82952696 1.0710502 0.936989 0.8763611 1.0581259 1.100966 0.75138116 1.0210671 1.1094298 0.97403246 0.9870904 1.0250757 1.0085622 0.9368348 0.94527894 1.0383282 1.0937102 1.0341618 0.8954744 0.9744204 0.98096806 1.0671321 0.9234989 1.0396839 0.9690063 1.0096359 0.9185488 1.1336917 1.0707254 0.9144625 1.0162841 0.9628722 0.97509485 0.91519475 1.1852044 0.9409666 0.89988965 0.90652555 1.0433798 1.0164795 0.8713255 0.8899875 1.1179621 1.0804098 0.911833 0.95029086 0.78270495 1.0606141 0.9523163 0.89586294 0.9843879 1.0816182 0.87879646 0.88832045 0.951615 1.1333792 1.0163841 0.856113 1.0380794 0.8580651 1.0792557 1.0393041 1.0097842 0.9655996 0.8865054 1.1569762 0.985969 0.9891108 0.9633821 1.0070803 1.051118 1.0668405 1.0970579 1.0379207 1.0479589 0.99136555 0.997068 1.0492795 1.025198 1.0102085 0.9842973 1.1181974 1.0281065 1.0800555 0.9682446 0.91309506 0.9155768 1.0529201 1.0558115 0.99329185 0.81312305 0.91714257 0.9395783 1.0993087 0.9770064 0.9781537 0.95235723 0.92795616 0.86572117 1.0446525 0.93065107 0.9408466 0.8433143 0.9976647 1.0697445 0.9695479 1.1257235 0.82971734 0.98193175 1.0649427 0.91138816 0.9350355 1.1094706 0.99530286 1.0456734 1.0466825 1.0770842 0.92640173 1.118029 1.0219697 1.0907856 0.95963323 1.0709568 0.99617445 0.9544178 1.0280824 1.0382591 1.052701 0.9428075 0.9549796 0.972729 1.0628004 0.93428105 0.9625226 0.859043 0.9714036 1.0313512 0.8654559 1.0928602 0.93900293 1.1152219 1.0725363 1.0483319 1.0139334 0.9525637 0.96934384 1.0620601 0.94518995 0.9313196 0.77964383 1.0832645 1.0369247 0.9693838 0.9283277 1.078947 0.8718281 1.0315852 0.98388827 0.9952986 1.071396 0.91136956 0.95546234 1.114694 0.89040416 0.99658406 1.0141828 0.9504601 1.0778959 1.0288227 1.0528353 1.2326599 1.048345 1.0300043 0.86106884 1.116495 1.0608208 1.111597 0.95963794 0.9943623 0.9617062 1.001307 0.9549195 1.0592748 1.0489924 0.91534436 1.0416358 0.9013951 0.9438282 1.0435311 1.0031494 ]] [[0.9659909 1.1138338 0.9336254 0.9980799 0.93393415 0.97494876 0.9949868 1.1351966 1.0431399 1.180042 1.0154475 0.9315508 0.9450666 1.2283747 1.0018613 1.0141915 0.9645722 0.90201926 0.96518576 1.0547369 0.8433719 1.0334139 1.1009905 0.92247903 1.0062072 0.84255755 1.0548182 0.95920384 1.0177381 0.9406133 1.0362312 0.99475056 1.0131807 1.0462359 0.8741899 0.89551723 0.9845644 1.0777748 0.88257027 1.1166371 1.0824074 1.1217169 0.8263958 1.0551472 1.014303 0.9920139 0.9060868 0.7886899 1.1337951 1.0540417 0.928883 0.85866 1.0827304 0.9054174 1.0820457 1.0590079 0.9995141 0.9346487 1.0229896 1.036454 1.0703168 0.9876819 1.1509571 1.1346954 1.175836 1.1861142 0.9663243 1.0151757 1.0445278 1.0763409 1.0657893 0.9667416 1.1261201 1.0322751 0.9074436 0.9864277 0.9860346 1.2319753 1.0838009 0.8580485 0.9862268 1.2607048 1.0633559 0.93784267 1.1141635 0.89552635 1.0207136 1.0148517 0.87985563 0.9281553 1.0510261 1.0004213 1.0753075 1.0359987 0.93058103 0.8846737 1.006561 0.8771267 1.0443711 1.0826904 0.909683 1.0936953 1.1669667 1.1363777 1.0154754 1.0891142 1.0019654 0.92880297 1.1329392 0.93420196 0.8567799 1.0531689 0.88949263 1.1892208 0.95804214 0.91585493 0.97068423 0.752894 0.9308379 1.0086474 1.0365163 0.96569777 1.087242 1.1884297 0.9353085 1.0938787 0.9898442 1.1534606 0.88092345 0.9741292 0.98173016 1.0780393 0.9843289 0.9232886 1.2074374 1.0058802 1.0897833 1.1415069 0.9112199 0.9895529 0.94396704 0.9628723 0.98240995 1.2133191 1.0835131 0.8643985 1.1474818 0.87710315 1.0250729 1.0121293 0.98226434 1.058869 1.0233269 1.0131062 0.9214052 1.0132251 1.03332 1.1652501 1.0261567 0.995691 1.3516059 0.87131613 1.0284626 0.88394535 0.9405941 0.94406843 0.9205638 0.8762759 1.0890288 1.066577 0.98095334 1.0574857 0.9575184 0.9544346 1.0120668 1.1860133 1.1108929 0.84826213 1.0640104 0.83460635 0.9782969 1.0980114 1.1035193 1.0897733 0.9778529 1.0166363 1.0997742 0.9974935 1.012345 1.209664 1.0018724 1.0209092 0.9778082 1.0105144 1.0502234 0.9331743 1.1504762 1.1351961 0.8962865 0.9700957 0.9319401 0.9599825 0.984929 0.9877149 0.7984959 1.0772929 0.8939801 1.0422342 0.83830225 1.021695 1.2445613 1.0557579 1.113527 1.0526739 0.9547251 1.0466623 0.9956991 0.8936603 1.0045348 0.97477406 1.0834051 1.0474797 0.88325655 1.042811 ]] [[0.93501806 1.0808474 0.87875164 0.9335663 0.8136822 1.0108923 1.0504806 1.1155456 0.86862236 1.0101819 1.0725222 1.0488158 1.0109364 0.8791245 1.0217149 1.0168008 1.0225123 0.91713214 1.1120118 0.90594137 1.1187723 0.9531662 0.9781125 1.1862665 0.91279876 1.1495551 0.9879789 0.88376176 1.058743 1.0534858 1.080225 1.1528012 1.05954 1.026419 0.8781074 1.0102878 1.040482 0.94879675 0.8759294 1.1091856 1.0504795 0.9938964 1.0616823 1.01663 0.87745 1.0440375 1.0382141 1.1407192 1.0291746 0.9342754 0.83050144 0.82648075 1.1106907 1.0399088 1.0950656 1.0833044 1.0553911 1.0260378 1.0044318 1.0079364 0.9621323 0.96924216 1.0656523 1.0048624 1.036709 0.99399227 0.95686716 0.9676528 0.7966274 0.9002609 0.8878129 1.0847945 0.9400347 1.0467639 1.0128485 0.8957779 1.0672554 0.94272757 0.98592263 1.0712565 1.0614299 0.9810762 0.98055756 0.9422703 1.0766509 1.0731069 1.055143 0.97877336 1.0363413 0.96038866 0.9086299 0.93968827 0.9772714 1.0917842 0.8987404 0.91137666 0.8766668 1.0146551 0.85954154 1.1963749 0.7686204 0.9743769 0.9438645 0.976468 1.0120285 0.95913553 1.0282755 1.011143 0.97475594 0.95712763 1.2329423 1.1397529 1.0515258 0.90025216 0.87610245 0.8577972 0.87802696 1.1323373 0.99817705 1.0197705 1.0580385 0.93908745 1.1012971 1.0317872 0.97666764 0.9534716 0.99114484 0.8905428 1.1826142 1.1444366 1.0555108 1.1499792 0.868417 1.0296667 1.3924274 0.9892528 0.9051805 1.0183628 1.0633252 0.8371879 1.102628 1.0359093 0.9822737 1.0223231 0.9508177 1.0740752 0.97663164 1.0982101 0.9658532 1.0132455 0.8266439 0.8579794 0.9691624 1.0238516 0.96315366 0.8368921 0.89331067 0.94841105 0.87684554 1.1121299 0.9169009 1.1941803 1.00274 1.0213777 0.9144289 0.9985907 1.080677 1.009899 0.96364295 1.0849893 1.0064266 0.8660189 1.106085 0.95371425 1.035492 1.1069733 1.1861274 0.8857133 1.0296338 0.90494263 1.1068302 1.006561 0.8511667 1.2068126 0.9913539 1.0512087 1.0115592 0.9217747 0.823356 0.9647708 0.909326 1.0179735 0.9900219 0.8369264 0.95467097 0.99667865 1.0684053 1.0833046 1.1039394 1.0361793 1.0826224 0.89908534 1.0147218 0.912294 0.91201586 1.0366716 0.9476982 0.91834545 1.1405505 0.9780879 0.91255325 0.8864893 0.9341678 1.131071 0.83322155 0.8677702 0.9902462 1.0229367 1.0843712 0.89554536 0.9178119 0.90555996 0.93560594 0.95870227]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:3 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6539.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0211424 ] [1.1578798 ] [1.0377675 ] [1.0962595 ] [1.1844693 ] [0.9795321 ] [1.0421042 ] [1.003219 ] [1.0854179 ] [0.8011482 ] [1.2499225 ] [0.8635436 ] [0.8760864 ] [1.0159286 ] [1.018234 ] [0.9920924 ] [0.9343296 ] [1.0210818 ] [1.0237675 ] [0.86372775] [0.92724055] [1.0741096 ] [0.93804705] [0.96650094] [1.0681218 ] [0.8354607 ] [1.0187178 ] [1.2557216 ] [1.0502386 ] [0.9587925 ] [1.1104074 ] [0.9686063 ] [1.1127378 ] [0.9712704 ] [0.948822 ] [0.89955 ] [1.0387399 ] [1.0817752 ] [0.8987421 ] [1.030141 ] [0.95453775] [1.0124434 ] [0.9012188 ] [0.9806829 ] [0.90722436] [0.93271494] [1.0249691 ] [0.91677266] [0.9197953 ] [1.113883 ] [1.0354959 ] [1.0978558 ] [1.0655762 ] [1.0753553 ] [1.0127075 ] [1.1180291 ] [0.8249646 ] [0.9239412 ] [0.9416102 ] [0.85619295] [1.1768292 ] [1.141387 ] [1.160987 ] [0.9616142 ] [0.8006964 ] [0.9429642 ] [1.003655 ] [0.95403165] [0.97758937] [1.1717064 ] [0.96763456] [0.88867265] [0.96733356] [0.95894384] [0.8357563 ] [0.93671 ] [1.0388044 ] [1.0163069 ] [1.0196593 ] [0.95515513] [1.0353625 ] [1.0758427 ] [1.047974 ] [1.0235727 ] [1.0612088 ] [1.0810342 ] [1.1474277 ] [1.2438703 ] [0.8189659 ] [1.0384632 ] [1.1029444 ] [0.9544987 ] [0.9686049 ] [1.0231903 ] [0.9396268 ] [1.0948175 ] [1.0415943 ] [1.0352081 ] [1.0389698 ] [0.89135826] [0.89219016] [1.0538986 ] [0.9074159 ] [1.0628421 ] [0.93748254] [0.9341327 ] [0.9888761 ] [0.8836775 ] [1.057043 ] [1.0358708 ] [0.9164498 ] [1.0138553 ] [0.9328102 ] [0.8624971 ] [0.9378622 ] [1.3045267 ] [0.9732364 ] [0.97757536] [0.95011425] [0.9572886 ] [1.0702702 ] [0.8585765 ] [1.0716082 ] [1.0294453 ] [0.98394567] [0.99973905] [1.001974 ] [1.0185173 ] [1.0058922 ] [0.97726315] [0.96362925] [1.1071712 ] [0.9970458 ] [1.0813861 ] [0.8490719 ] [0.91455203] [0.9673389 ] [0.9151611 ] [0.93831253] [0.89793867] [1.008339 ] [0.9377391 ] [1.022103 ] [1.0192268 ] [0.9937789 ] [0.9242581 ] [0.94999534] [0.89382064] [0.9153301 ] [1.1114588 ] [1.1552292 ] [1.0625396 ] [0.99172544] [1.0899891 ] [1.06038 ] [0.94379926] [0.9950652 ] [0.8993719 ] [1.1410831 ] [1.127003 ] [1.033618 ] [0.8966034 ] [0.9059152 ] [1.020094 ] [0.97511786] [1.0484575 ] [0.81164515] [0.9648285 ] [1.1257763 ] [0.8549991 ] [1.0928094 ] [1.0906965 ] [1.0614694 ] [0.8713077 ] [1.1858107 ] [0.83713084] [1.038512 ] [0.70758975] [0.9381027 ] [0.904853 ] [0.9714974 ] [1.1088427 ] [1.0634961 ] [1.0162164 ] [1.1358294 ] [0.88033 ] [0.8700354 ] [1.0954857 ] [0.87877136] [0.9893098 ] [0.9825464 ] [1.1800787 ] [0.85670674] [1.1078153 ] [1.1096255 ] [1.0532115 ] [1.0119497 ] [0.973378 ] [0.9941888 ] [0.87397027] [0.9572311 ] [1.062757 ] [0.99125105] [1.084534 ] [1.0556343 ] [1.0980284 ] [1.0667806 ] [0.8130388 ] [0.98063916] [0.97654563] [1.1687821 ] [1.0306629 ] [0.82835597] [1.1480333 ] [1.058007 ] [1.0624102 ] [0.92939293] [1.0412401 ] [1.1157683 ] [0.79283905] [1.0432265 ] [0.9247261 ] [0.91743404] [1.073541 ]] [[1.0136527 ] [1.0003794 ] [1.0833843 ] [0.9673017 ] [0.972751 ] [1.0105374 ] [0.95672756] [0.84969455] [1.1631284 ] [1.1240262 ] [1.0689842 ] [1.0403895 ] [0.9210544 ] [0.96117324] [0.9027932 ] [0.9735914 ] [0.93498653] [1.0055753 ] [0.9465187 ] [0.961477 ] [0.9071284 ] [1.0755372 ] [0.9208918 ] [1.0323488 ] [1.0680178 ] [0.9606541 ] [0.9181168 ] [0.83129996] [1.0395033 ] [1.0145015 ] [1.0342193 ] [1.1014496 ] [1.0100926 ] [1.1267705 ] [0.9194013 ] [1.0382724 ] [0.88182944] [0.93441314] [1.0288184 ] [0.8605525 ] [0.92490405] [0.8631015 ] [0.90242654] [1.0459036 ] [0.9751915 ] [1.0190585 ] [1.1002833 ] [0.8669741 ] [1.0132687 ] [0.97780514] [0.9572395 ] [1.0431584 ] [1.1267673 ] [0.9977365 ] [0.9025962 ] [1.0073179 ] [1.1317056 ] [0.9663476 ] [0.9740898 ] [0.8940441 ] [1.0434307 ] [1.0369599 ] [1.0482441 ] [1.0178745 ] [0.8821887 ] [0.8278006 ] [1.0391003 ] [1.0620837 ] [1.0076028 ] [0.91804 ] [1.0657477 ] [0.970685 ] [1.087426 ] [1.0462668 ] [0.87856776] [0.92118424] [0.98960096] [1.1840976 ] [1.1215714 ] [1.0157224 ] [0.87672365] [1.1574451 ] [1.0902522 ] [1.0536333 ] [0.9330092 ] [0.91665965] [0.9882377 ] [0.9930842 ] [0.95053816] [0.8758427 ] [0.94300985] [1.0933673 ] [0.8786274 ] [0.8810357 ] [0.86957985] [1.1283497 ] [0.87849915] [1.142234 ] [0.9959767 ] [0.9204899 ] [1.0493008 ] [0.87563527] [1.0715086 ] [0.91599154] [0.89373714] [0.9293291 ] [1.0298254 ] [0.8598989 ] [1.0386984 ] [0.9565125 ] [1.0879415 ] [0.9920346 ] [1.0545062 ] [1.032248 ] [1.0720251 ] [0.9535263 ] [1.0380497 ] [0.9865132 ] [1.0479382 ] [0.87319654] [1.1477867 ] [0.94876486] [0.96723825] [1.1686662 ] [1.0013286 ] [1.0608937 ] [0.9313063 ] [1.0642744 ] [1.0817283 ] [0.9212013 ] [0.8479656 ] [1.092754 ] [0.99290264] [1.191818 ] [0.9553128 ] [1.0868502 ] [1.0300312 ] [0.8715416 ] [0.90654486] [1.2019742 ] [1.1339777 ] [1.0694058 ] [0.86885214] [0.88582367] [1.0455836 ] [0.9991802 ] [0.87835896] [0.9398124 ] [1.0646976 ] [1.0201112 ] [0.87959236] [1.1769475 ] [1.0174586 ] [0.9307972 ] [0.9459172 ] [1.2111607 ] [1.0935407 ] [0.9860867 ] [1.0732259 ] [0.9542904 ] [0.90529734] [0.956296 ] [0.91655195] [1.095045 ] [1.1160529 ] [1.0077757 ] [1.041929 ] [1.0362984 ] [0.9171702 ] [1.0201194 ] [0.99520284] [0.8426212 ] [0.88029367] [1.1193475 ] [1.0699537 ] [1.0446248 ] [1.019243 ] [1.0917585 ] [0.9085022 ] [1.1014267 ] [0.812619 ] [1.0171912 ] [1.0958567 ] [1.0664587 ] [1.0072181 ] [0.9602581 ] [1.0820122 ] [1.1310428 ] [1.1273018 ] [0.9708573 ] [1.039416 ] [0.8926166 ] [1.1832743 ] [1.0395851 ] [1.1027217 ] [0.9462977 ] [1.0377051 ] [0.97496724] [1.2138859 ] [0.95056975] [1.0998687 ] [1.0177783 ] [0.9794766 ] [0.93501383] [1.0534577 ] [0.8695078 ] [0.9968211 ] [0.9777662 ] [0.9577994 ] [0.9953554 ] [1.0133712 ] [0.87705773] [1.1612383 ] [0.92631865] [0.9546865 ] [1.1026467 ] [0.957029 ] [0.95513 ] [1.1035197 ] [1.064593 ] [0.9772295 ] [0.93497056] [0.986429 ] [0.93802583]] [[0.9003852 ] [0.89396036] [0.96974707] [1.0269542 ] [1.0811441 ] [1.0377378 ] [0.95959574] [1.0475848 ] [1.0984591 ] [1.0293788 ] [1.0572975 ] [1.1404527 ] [1.0278187 ] [0.9841262 ] [1.1244293 ] [1.0555192 ] [1.0012493 ] [1.0815187 ] [1.0379884 ] [0.9350775 ] [1.0206163 ] [1.0346748 ] [1.0693823 ] [0.9968717 ] [1.053825 ] [1.0969213 ] [1.0341741 ] [0.9863242 ] [0.8074394 ] [0.9169595 ] [0.98824084] [1.2041689 ] [0.9309546 ] [0.9737788 ] [1.0325089 ] [1.0609239 ] [1.0474212 ] [0.90725476] [0.9622678 ] [0.8903818 ] [0.94506645] [1.0754968 ] [0.9835489 ] [1.09958 ] [1.0705239 ] [0.83237094] [1.0538067 ] [0.99953157] [0.97597647] [0.90781444] [1.0542301 ] [1.1423512 ] [0.89874554] [0.84184116] [1.0414853 ] [1.0772947 ] [0.892792 ] [0.92978406] [0.80233175] [1.0624423 ] [0.92930955] [1.0686215 ] [0.8545602 ] [0.8597012 ] [0.9104484 ] [1.0405781 ] [1.0599035 ] [0.99082434] [1.0800561 ] [0.9208864 ] [0.9583124 ] [1.0222737 ] [0.83378696] [1.1287743 ] [0.89045405] [0.9906802 ] [0.89896256] [0.84456325] [0.8861617 ] [0.93612206] [0.868337 ] [1.0743037 ] [1.107429 ] [1.0981337 ] [0.78803307] [1.0297337 ] [1.1465752 ] [0.996416 ] [1.0849485 ] [1.081958 ] [0.9437243 ] [1.012107 ] [0.91311747] [0.95318437] [1.0548935 ] [1.0514237 ] [1.0686731 ] [0.94069713] [0.9347745 ] [1.0076876 ] [0.92759323] [1.0833178 ] [0.86430085] [1.0324198 ] [0.93825966] [1.0804534 ] [1.0324098 ] [0.79667854] [1.0762016 ] [0.9487769 ] [0.9618806 ] [1.013403 ] [0.9257293 ] [0.8886378 ] [1.1570132 ] [1.0725933 ] [0.94103175] [0.95223594] [1.0466154 ] [1.0203375 ] [1.0933456 ] [1.1064655 ] [1.2467849 ] [0.9884629 ] [1.0443621 ] [1.0085431 ] [1.1714021 ] [0.98093885] [1.1882828 ] [0.921312 ] [1.120377 ] [1.1201203 ] [1.004787 ] [0.9780645 ] [1.0102055 ] [0.8240758 ] [0.97102165] [1.0519238 ] [0.99034333] [1.0208197 ] [1.0059838 ] [1.0458103 ] [0.9668965 ] [1.0764807 ] [1.0574247 ] [0.940112 ] [0.8858722 ] [0.90217865] [0.9790768 ] [1.1186395 ] [1.0392413 ] [0.95022076] [0.8858493 ] [0.9659215 ] [0.8752331 ] [1.189869 ] [1.0917153 ] [1.0916733 ] [0.9923434 ] [1.1158752 ] [0.98298955] [1.2568483 ] [0.9852509 ] [0.990089 ] [0.89894515] [1.0186071 ] [0.9053635 ] [1.14101 ] [0.9583553 ] [1.1695547 ] [0.86649245] [1.2823296 ] [1.0018841 ] [0.85226744] [1.0148416 ] [0.87226415] [1.0050977 ] [0.86817664] [1.0032959 ] [1.0118802 ] [1.1265944 ] [1.0155907 ] [0.9487281 ] [0.9437064 ] [0.97449964] [1.0574557 ] [1.1229624 ] [0.9704152 ] [0.9039278 ] [0.97888815] [0.9562973 ] [0.97341645] [0.90910614] [1.1868773 ] [0.96091145] [0.992248 ] [1.151933 ] [1.0491351 ] [1.1203654 ] [0.89298177] [0.9460998 ] [0.87642014] [0.9774936 ] [1.1804937 ] [0.8825222 ] [0.8252416 ] [1.0599461 ] [1.0598125 ] [0.8949825 ] [0.9934367 ] [1.1175984 ] [0.9255133 ] [1.0102468 ] [0.98568386] [1.1508894 ] [1.1193757 ] [1.0122919 ] [1.0094148 ] [0.9691173 ] [1.0615706 ] [1.0023688 ] [0.8836468 ] [1.1286023 ] [1.2626891 ]]]]; ov_res: [[[[1.0211424 ] [1.1578798 ] [1.0377675 ] [1.0962594 ] [1.1844693 ] [0.9795321 ] [1.0421042 ] [1.003219 ] [1.0854179 ] [0.80114824] [1.2499225 ] [0.8635436 ] [0.87608635] [1.0159286 ] [1.018234 ] [0.9920923 ] [0.93432957] [1.0210818 ] [1.0237675 ] [0.86372775] [0.92724055] [1.0741094 ] [0.938047 ] [0.9665009 ] [1.0681218 ] [0.83546066] [1.0187178 ] [1.2557214 ] [1.0502387 ] [0.95879257] [1.1104075 ] [0.9686064 ] [1.1127378 ] [0.9712704 ] [0.948822 ] [0.89954996] [1.0387398 ] [1.0817751 ] [0.8987421 ] [1.030141 ] [0.95453775] [1.0124435 ] [0.9012187 ] [0.9806829 ] [0.90722436] [0.93271494] [1.0249691 ] [0.91677266] [0.9197952 ] [1.113883 ] [1.0354959 ] [1.0978558 ] [1.0655763 ] [1.0753553 ] [1.0127075 ] [1.1180292 ] [0.8249646 ] [0.9239412 ] [0.9416102 ] [0.85619295] [1.1768292 ] [1.141387 ] [1.160987 ] [0.9616142 ] [0.8006964 ] [0.94296414] [1.0036548 ] [0.95403165] [0.9775894 ] [1.1717064 ] [0.96763456] [0.88867265] [0.9673335 ] [0.95894384] [0.8357563 ] [0.93671 ] [1.0388043 ] [1.0163069 ] [1.0196592 ] [0.9551551 ] [1.0353625 ] [1.0758427 ] [1.047974 ] [1.0235727 ] [1.0612088 ] [1.0810342 ] [1.1474277 ] [1.2438704 ] [0.8189659 ] [1.0384634 ] [1.1029443 ] [0.9544987 ] [0.9686049 ] [1.0231903 ] [0.9396268 ] [1.0948174 ] [1.0415943 ] [1.0352081 ] [1.0389698 ] [0.89135826] [0.8921901 ] [1.0538986 ] [0.9074159 ] [1.0628421 ] [0.93748254] [0.93413264] [0.98887616] [0.8836774 ] [1.057043 ] [1.0358708 ] [0.91644984] [1.0138553 ] [0.93281025] [0.86249703] [0.9378623 ] [1.3045267 ] [0.9732364 ] [0.97757536] [0.95011425] [0.9572886 ] [1.0702702 ] [0.8585765 ] [1.0716082 ] [1.0294453 ] [0.98394567] [0.99973905] [1.001974 ] [1.0185173 ] [1.0058922 ] [0.97726315] [0.96362925] [1.1071712 ] [0.9970459 ] [1.081386 ] [0.8490719 ] [0.9145521 ] [0.967339 ] [0.91516113] [0.93831253] [0.8979386 ] [1.008339 ] [0.9377391 ] [1.022103 ] [1.0192268 ] [0.99377877] [0.9242582 ] [0.94999534] [0.89382064] [0.91533005] [1.1114587 ] [1.1552292 ] [1.0625395 ] [0.9917255 ] [1.0899891 ] [1.06038 ] [0.9437993 ] [0.9950653 ] [0.8993719 ] [1.1410831 ] [1.127003 ] [1.033618 ] [0.8966034 ] [0.9059152 ] [1.020094 ] [0.9751178 ] [1.0484575 ] [0.8116451 ] [0.9648285 ] [1.1257764 ] [0.8549991 ] [1.0928094 ] [1.0906965 ] [1.0614694 ] [0.8713077 ] [1.1858107 ] [0.83713084] [1.038512 ] [0.70758975] [0.9381027 ] [0.904853 ] [0.9714974 ] [1.1088427 ] [1.0634961 ] [1.0162164 ] [1.1358296 ] [0.88033 ] [0.87003547] [1.0954857 ] [0.87877136] [0.98930985] [0.9825464 ] [1.1800787 ] [0.85670674] [1.1078154 ] [1.1096255 ] [1.0532113 ] [1.0119497 ] [0.97337806] [0.99418885] [0.8739703 ] [0.9572311 ] [1.062757 ] [0.99125105] [1.0845339 ] [1.0556343 ] [1.0980283 ] [1.0667804 ] [0.8130388 ] [0.9806391 ] [0.97654563] [1.1687821 ] [1.0306629 ] [0.8283559 ] [1.1480331 ] [1.058007 ] [1.0624102 ] [0.92939293] [1.04124 ] [1.1157683 ] [0.79283905] [1.0432265 ] [0.92472607] [0.9174341 ] [1.073541 ]] [[1.0136527 ] [1.0003794 ] [1.0833843 ] [0.9673017 ] [0.972751 ] [1.0105374 ] [0.95672756] [0.84969455] [1.1631283 ] [1.1240262 ] [1.0689842 ] [1.0403895 ] [0.92105436] [0.9611732 ] [0.9027932 ] [0.9735914 ] [0.93498653] [1.0055752 ] [0.9465188 ] [0.961477 ] [0.90712833] [1.0755373 ] [0.9208918 ] [1.0323488 ] [1.0680178 ] [0.9606541 ] [0.91811675] [0.8312999 ] [1.0395032 ] [1.0145015 ] [1.0342193 ] [1.1014496 ] [1.0100926 ] [1.1267706 ] [0.9194012 ] [1.0382723 ] [0.88182944] [0.9344131 ] [1.0288184 ] [0.8605525 ] [0.92490417] [0.8631015 ] [0.90242654] [1.0459034 ] [0.9751914 ] [1.0190583 ] [1.1002833 ] [0.8669742 ] [1.0132687 ] [0.97780514] [0.95723957] [1.0431584 ] [1.1267673 ] [0.9977365 ] [0.9025962 ] [1.0073179 ] [1.1317056 ] [0.9663476 ] [0.9740898 ] [0.89404404] [1.0434308 ] [1.0369598 ] [1.0482441 ] [1.0178745 ] [0.8821887 ] [0.82780063] [1.0391003 ] [1.0620838 ] [1.0076027 ] [0.91804 ] [1.0657476 ] [0.970685 ] [1.087426 ] [1.0462669 ] [0.87856776] [0.9211842 ] [0.989601 ] [1.1840975 ] [1.1215713 ] [1.0157224 ] [0.8767237 ] [1.1574451 ] [1.0902522 ] [1.0536335 ] [0.93300927] [0.9166597 ] [0.98823774] [0.99308425] [0.95053816] [0.87584263] [0.9430099 ] [1.0933673 ] [0.87862736] [0.8810357 ] [0.86957985] [1.1283495 ] [0.87849915] [1.142234 ] [0.9959766 ] [0.9204899 ] [1.0493008 ] [0.87563527] [1.0715086 ] [0.9159915 ] [0.89373714] [0.9293291 ] [1.0298254 ] [0.8598989 ] [1.0386984 ] [0.95651245] [1.0879414 ] [0.9920346 ] [1.0545061 ] [1.0322481 ] [1.0720251 ] [0.9535264 ] [1.0380497 ] [0.9865132 ] [1.0479382 ] [0.87319654] [1.1477867 ] [0.94876486] [0.9672383 ] [1.1686662 ] [1.0013286 ] [1.0608937 ] [0.9313063 ] [1.0642744 ] [1.0817282 ] [0.9212012 ] [0.84796566] [1.092754 ] [0.99290264] [1.191818 ] [0.9553127 ] [1.0868502 ] [1.0300312 ] [0.8715416 ] [0.9065448 ] [1.2019742 ] [1.1339777 ] [1.0694058 ] [0.8688521 ] [0.88582367] [1.0455836 ] [0.99918014] [0.87835896] [0.93981236] [1.0646975 ] [1.0201111 ] [0.87959236] [1.1769475 ] [1.0174587 ] [0.93079716] [0.9459171 ] [1.2111607 ] [1.0935408 ] [0.9860867 ] [1.0732259 ] [0.95429033] [0.90529734] [0.9562961 ] [0.916552 ] [1.095045 ] [1.1160529 ] [1.0077757 ] [1.041929 ] [1.0362984 ] [0.9171702 ] [1.0201194 ] [0.9952029 ] [0.8426212 ] [0.88029367] [1.1193475 ] [1.0699537 ] [1.0446247 ] [1.019243 ] [1.0917585 ] [0.90850216] [1.1014267 ] [0.81261903] [1.0171912 ] [1.0958568 ] [1.0664587 ] [1.0072182 ] [0.9602581 ] [1.0820122 ] [1.1310428 ] [1.1273018 ] [0.9708573 ] [1.039416 ] [0.8926167 ] [1.1832741 ] [1.0395851 ] [1.1027217 ] [0.94629776] [1.0377051 ] [0.9749673 ] [1.2138859 ] [0.9505697 ] [1.0998687 ] [1.0177783 ] [0.9794765 ] [0.93501383] [1.0534577 ] [0.8695078 ] [0.9968211 ] [0.97776616] [0.9577994 ] [0.9953554 ] [1.0133712 ] [0.87705773] [1.1612383 ] [0.92631865] [0.9546865 ] [1.1026467 ] [0.95702904] [0.95513 ] [1.1035197 ] [1.064593 ] [0.9772295 ] [0.93497056] [0.98642904] [0.93802583]] [[0.9003852 ] [0.8939604 ] [0.9697471 ] [1.0269543 ] [1.0811442 ] [1.0377378 ] [0.95959574] [1.0475849 ] [1.0984591 ] [1.0293788 ] [1.0572975 ] [1.1404526 ] [1.0278187 ] [0.9841262 ] [1.1244292 ] [1.0555192 ] [1.0012493 ] [1.0815187 ] [1.0379884 ] [0.9350775 ] [1.0206163 ] [1.0346749 ] [1.0693823 ] [0.99687165] [1.053825 ] [1.0969213 ] [1.0341741 ] [0.9863242 ] [0.8074394 ] [0.91695946] [0.9882409 ] [1.2041689 ] [0.9309545 ] [0.97377884] [1.0325089 ] [1.0609239 ] [1.0474212 ] [0.90725476] [0.9622678 ] [0.8903818 ] [0.94506645] [1.0754968 ] [0.9835488 ] [1.09958 ] [1.0705239 ] [0.8323709 ] [1.0538067 ] [0.9995316 ] [0.97597647] [0.90781444] [1.05423 ] [1.142351 ] [0.8987456 ] [0.84184116] [1.0414854 ] [1.0772947 ] [0.89279205] [0.92978394] [0.8023318 ] [1.0624422 ] [0.9293095 ] [1.0686215 ] [0.8545602 ] [0.8597012 ] [0.9104484 ] [1.0405781 ] [1.0599035 ] [0.9908244 ] [1.0800561 ] [0.9208864 ] [0.9583124 ] [1.0222737 ] [0.83378696] [1.1287743 ] [0.8904541 ] [0.9906803 ] [0.89896256] [0.8445633 ] [0.8861617 ] [0.93612206] [0.86833704] [1.0743037 ] [1.1074289 ] [1.0981337 ] [0.78803307] [1.0297337 ] [1.1465751 ] [0.99641603] [1.0849485 ] [1.081958 ] [0.9437243 ] [1.012107 ] [0.91311747] [0.9531844 ] [1.0548934 ] [1.0514237 ] [1.0686731 ] [0.94069713] [0.9347745 ] [1.0076876 ] [0.92759323] [1.0833178 ] [0.86430085] [1.0324198 ] [0.93825966] [1.0804535 ] [1.0324098 ] [0.79667854] [1.0762016 ] [0.9487769 ] [0.9618807 ] [1.013403 ] [0.92572933] [0.88863784] [1.1570132 ] [1.0725935 ] [0.94103175] [0.952236 ] [1.0466154 ] [1.0203375 ] [1.0933456 ] [1.1064655 ] [1.246785 ] [0.98846287] [1.0443621 ] [1.0085431 ] [1.171402 ] [0.9809389 ] [1.1882828 ] [0.921312 ] [1.120377 ] [1.1201203 ] [1.0047868 ] [0.9780645 ] [1.0102055 ] [0.8240758 ] [0.97102165] [1.0519236 ] [0.9903434 ] [1.0208198 ] [1.0059838 ] [1.0458103 ] [0.9668965 ] [1.0764807 ] [1.0574247 ] [0.940112 ] [0.8858722 ] [0.90217865] [0.9790768 ] [1.1186395 ] [1.0392414 ] [0.95022076] [0.8858493 ] [0.9659215 ] [0.8752331 ] [1.189869 ] [1.0917152 ] [1.0916731 ] [0.9923434 ] [1.1158751 ] [0.9829896 ] [1.2568485 ] [0.98525095] [0.990089 ] [0.89894515] [1.0186071 ] [0.9053635 ] [1.1410099 ] [0.95835537] [1.1695547 ] [0.86649245] [1.2823296 ] [1.0018841 ] [0.8522674 ] [1.0148414 ] [0.8722642 ] [1.0050977 ] [0.86817664] [1.0032959 ] [1.0118802 ] [1.1265944 ] [1.0155905 ] [0.94872814] [0.9437064 ] [0.97449964] [1.0574557 ] [1.1229624 ] [0.9704151 ] [0.9039278 ] [0.97888815] [0.95629734] [0.97341645] [0.90910614] [1.1868773 ] [0.9609114 ] [0.992248 ] [1.151933 ] [1.0491351 ] [1.1203654 ] [0.8929818 ] [0.9460998 ] [0.87642014] [0.9774936 ] [1.1804937 ] [0.88252217] [0.8252416 ] [1.0599461 ] [1.0598125 ] [0.89498246] [0.9934367 ] [1.1175983 ] [0.9255134 ] [1.0102466 ] [0.98568386] [1.1508894 ] [1.1193756 ] [1.0122919 ] [1.0094148 ] [0.9691173 ] [1.0615705 ] [1.0023688 ] [0.88364685] [1.1286023 ] [1.262689 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:3 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6541.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[1.0866202 ] [0.8546009 ] [1.1930362 ] [0.9937361 ] [0.9973933 ] [0.80967444] [1.0816506 ] [1.0599805 ] [0.95166737] [0.92675316] [0.89930713] [1.0024296 ] [1.072933 ] [1.0222088 ] [0.9699734 ] [1.0049855 ] [0.8835857 ] [0.91000134] [1.150768 ] [1.0811958 ] [1.1185619 ] [1.012411 ] [1.0103489 ] [0.94208306] [0.982834 ] [1.0031742 ] [0.8510975 ] [0.91934973] [1.0084991 ] [1.0262479 ] [0.85660845] [1.0523489 ] [1.1087013 ] [0.9900972 ] [1.0164158 ] [0.969152 ] [0.8962705 ] [0.9124619 ] [0.9196084 ] [0.9500462 ] [0.9517366 ] [1.047816 ] [0.8932416 ] [1.1026838 ] [0.8912941 ] [1.0280038 ] [0.93609023] [1.0994463 ] [1.0186856 ] [1.0622643 ] [0.91539896] [1.0120999 ] [0.98453736] [1.0570695 ] [0.97856545] [0.9544704 ] [0.9508193 ] [1.032602 ] [0.8822072 ] [1.1044412 ] [1.1325542 ] [0.83558047] [1.0281223 ] [1.1572629 ] [0.98884594] [1.0006783 ] [1.0646324 ] [0.95617425] [0.88860196] [1.2016381 ] [1.00418 ] [1.0025115 ] [0.8670276 ] [1.0571505 ] [0.86732274] [1.0059654 ] [1.0254653 ] [0.97345746] [0.93960506] [1.0103633 ] [1.2864184 ] [0.8606697 ] [0.94605243] [1.004455 ] [0.9726746 ] [1.0556386 ] [0.9715676 ] [1.212133 ] [1.1953655 ] [0.9009677 ] [0.93525344] [1.0251727 ] [1.0531472 ] [0.81761813] [1.0082631 ] [0.97916526] [0.9396972 ] [1.15609 ] [0.9440649 ] [0.9555263 ] [0.9393459 ] [1.1113439 ] [1.0450429 ] [0.92532134] [1.0648623 ] [0.9844996 ] [0.9456897 ] [0.922591 ] [1.0593477 ] [1.0349464 ] [0.86766416] [1.0967108 ] [1.2349944 ] [0.9144466 ] [1.0824908 ] [0.9776965 ] [0.8745183 ] [0.84263605] [1.1824948 ] [0.96711624] [0.9634029 ] [1.0122141 ] [0.8890599 ] [1.0203664 ] [0.93176556] [0.85581726] [1.0935159 ] [1.1459149 ] [0.9637322 ] [0.9998534 ] [0.84783053] [0.9270753 ] [1.1104277 ] [1.0023998 ] [0.9464511 ] [1.0888838 ] [1.0258483 ] [0.88948 ] [1.1266605 ] [1.128117 ] [1.0378628 ] [1.1974542 ] [1.0135492 ] [0.8394081 ] [0.8829806 ] [0.9302157 ] [1.1357158 ] [1.0482706 ] [0.9338139 ] [0.92732126] [1.1379218 ] [1.0644869 ] [1.1276495 ] [1.039383 ] [0.8149797 ] [0.965461 ] [1.0080084 ] [1.1818913 ] [0.9599505 ] [0.8506666 ] [1.0305614 ] [1.1038262 ] [1.0064067 ] [0.98012173] [0.90181196] [0.9604411 ] [1.0881438 ] [1.032534 ] [0.91028154] [0.7941274 ] [1.1340939 ] [1.1648269 ] [1.0106753 ] [0.929527 ] [1.0549917 ] [1.0175115 ] [0.75260985] [1.1261427 ] [1.0475422 ] [0.9519399 ] [1.0235169 ] [0.84451467] [1.0389959 ] [0.9206424 ] [1.166552 ] [1.0644952 ] [1.0874791 ] [0.9737972 ] [0.95857203] [0.9105192 ] [0.9517546 ] [1.1222924 ] [0.8509397 ] [0.97032225] [0.82115614] [0.8770904 ] [0.9156212 ] [1.0696875 ] [0.9997099 ] [0.95332724] [1.0658798 ] [0.9181498 ] [1.1069769 ] [0.8417122 ] [1.0143775 ] [0.9387841 ] [1.0213968 ] [0.9430513 ] [1.1581274 ] [0.8826441 ] [0.9774522 ] [1.0399628 ] [0.9365424 ] [1.0021178 ] [0.9632809 ] [1.0750245 ] [0.9752954 ] [0.8879867 ] [0.8270877 ] [1.1310066 ] [0.9416916 ] [0.969846 ] [1.155276 ] [0.91486 ]] [[0.9093922 ] [1.0751462 ] [1.0301778 ] [1.0206182 ] [0.9383009 ] [1.0261188 ] [0.9864507 ] [0.8998774 ] [1.0661232 ] [0.80745906] [1.0623791 ] [0.96255106] [0.8657958 ] [0.94385344] [1.0429633 ] [0.8686307 ] [0.9296778 ] [0.9039234 ] [0.94559664] [0.92972463] [1.1958501 ] [1.0872164 ] [1.0006232 ] [1.0099276 ] [0.9156327 ] [0.9111636 ] [0.93918526] [0.88475275] [0.998148 ] [0.8310474 ] [1.0231599 ] [0.92559725] [0.9114088 ] [0.9795862 ] [1.2075315 ] [1.2526985 ] [0.9393435 ] [1.0045367 ] [1.0534436 ] [1.0215396 ] [1.0562253 ] [1.0103555 ] [1.0001117 ] [0.9076266 ] [1.0054654 ] [0.86898816] [0.96335316] [0.95871615] [0.8149808 ] [1.1501273 ] [0.9023544 ] [1.0212785 ] [1.119754 ] [0.9516691 ] [1.0361222 ] [1.066683 ] [1.017555 ] [0.9565939 ] [0.87597483] [0.87589854] [0.95786387] [0.96322864] [1.0402343 ] [1.0126029 ] [1.0545803 ] [0.88511294] [1.028423 ] [0.94114655] [0.98713833] [0.8422625 ] [1.029631 ] [0.98724985] [0.963073 ] [0.9794051 ] [0.91126466] [1.107229 ] [1.0301011 ] [0.9365518 ] [0.97208506] [0.9512209 ] [0.9612916 ] [1.0497919 ] [0.99336565] [1.2951969 ] [1.1511416 ] [1.0449141 ] [0.9593876 ] [1.1913047 ] [0.7666712 ] [1.016725 ] [1.0066137 ] [0.99936134] [0.92305994] [1.0177486 ] [0.90538156] [0.97963214] [0.8723549 ] [0.93529606] [0.9698396 ] [0.95229095] [0.8691311 ] [1.0219456 ] [1.1354285 ] [0.8043064 ] [1.039117 ] [1.0914859 ] [1.0506057 ] [1.0125358 ] [0.9463753 ] [0.8375604 ] [0.7878305 ] [0.90666157] [1.0937394 ] [1.0419011 ] [0.95107585] [1.1763781 ] [1.0056626 ] [1.078431 ] [0.9576452 ] [1.0735916 ] [0.95910084] [1.0274783 ] [1.0213405 ] [0.94173455] [1.1325915 ] [1.0040765 ] [0.81772065] [0.9793069 ] [0.97241503] [0.84296113] [1.0122249 ] [1.0234082 ] [1.0526562 ] [0.9962421 ] [0.9833599 ] [1.139817 ] [1.0296153 ] [0.8805886 ] [1.0663391 ] [1.1210612 ] [1.1607099 ] [1.0719979 ] [1.0207083 ] [1.0345246 ] [1.1018203 ] [1.0204653 ] [0.9175415 ] [0.9372855 ] [1.0455501 ] [1.0239934 ] [0.90255606] [1.2018294 ] [1.078937 ] [0.9531087 ] [1.0725497 ] [0.95185584] [1.1289696 ] [1.0177 ] [0.9911789 ] [0.88905776] [1.0289564 ] [1.0499518 ] [0.92084926] [1.0348955 ] [0.92162985] [1.1377043 ] [1.1695348 ] [0.93640125] [1.0225372 ] [0.88648987] [1.1207597 ] [0.86255383] [0.89367455] [0.8557315 ] [0.94660175] [1.0237166 ] [1.0361335 ] [0.95134157] [1.0228266 ] [0.9355164 ] [0.99138254] [1.0399194 ] [0.88944346] [1.0071588 ] [0.9866999 ] [0.98965484] [1.0439162 ] [0.88380826] [0.97473854] [0.95791095] [1.0305921 ] [0.9555284 ] [0.9401268 ] [0.90019685] [0.8450092 ] [0.97016317] [0.97568065] [0.86394423] [1.0156397 ] [1.0504935 ] [0.9093129 ] [0.9496322 ] [1.0009744 ] [1.0501254 ] [1.0338691 ] [1.0712487 ] [0.9927569 ] [1.1307731 ] [1.1119822 ] [1.0918051 ] [0.9456858 ] [1.0000044 ] [0.95263314] [0.8953067 ] [1.1040074 ] [1.020076 ] [0.88990414] [1.0445241 ] [0.92569417] [0.9443069 ] [0.9083082 ] [1.0680262 ] [1.0761687 ] [0.79763895]] [[1.0171525 ] [0.95231044] [0.89909744] [0.96361566] [0.9032449 ] [0.8364532 ] [0.9074301 ] [1.0064312 ] [0.82028925] [1.1391 ] [0.9643177 ] [1.0591038 ] [0.9725285 ] [0.88023883] [1.0623115 ] [1.0073832 ] [0.83871776] [0.8076792 ] [0.87787634] [0.80578077] [1.0999653 ] [1.1063448 ] [0.85342735] [0.944026 ] [0.8371667 ] [1.0386969 ] [0.88030624] [0.95296305] [0.90607893] [1.0355554 ] [1.1145743 ] [0.9885803 ] [0.97273415] [1.0648198 ] [1.1010504 ] [0.8682346 ] [0.96271497] [0.8971675 ] [0.94689345] [1.0016423 ] [1.0092719 ] [0.98466265] [1.1076989 ] [0.8508145 ] [0.89152753] [0.95514953] [1.029874 ] [1.1298593 ] [0.95973444] [0.8592158 ] [0.95600855] [0.9345695 ] [0.87547624] [1.1299843 ] [0.8479862 ] [0.81237924] [0.894458 ] [1.0222936 ] [1.1020844 ] [1.1016268 ] [0.91360897] [1.0155197 ] [1.0258541 ] [1.1541307 ] [0.89948916] [1.0560654 ] [0.9182248 ] [1.0126517 ] [1.122859 ] [1.0386416 ] [1.0896438 ] [1.1949549 ] [1.0898046 ] [0.93796843] [0.99188024] [0.9341622 ] [1.0901074 ] [1.0819066 ] [0.8982775 ] [0.9485856 ] [1.087909 ] [1.0919988 ] [1.2123744 ] [1.1612623 ] [1.0437686 ] [0.990538 ] [0.98224604] [0.9944233 ] [0.9296073 ] [0.9658949 ] [0.9697473 ] [0.8412457 ] [0.9876686 ] [1.0551276 ] [1.0748233 ] [1.002259 ] [0.94992024] [0.9230479 ] [1.0122981 ] [0.9652628 ] [0.94257873] [1.0523144 ] [1.1823177 ] [0.8801476 ] [1.1622268 ] [0.9769984 ] [0.9221243 ] [1.0107062 ] [1.0142672 ] [0.9349194 ] [1.1346508 ] [1.1006337 ] [0.9677955 ] [0.93999946] [1.0860591 ] [0.89109826] [0.9098222 ] [1.176082 ] [0.8970941 ] [1.1861286 ] [1.07043 ] [1.0784218 ] [1.092696 ] [0.9577643 ] [0.76672477] [0.89606774] [0.9437183 ] [1.1090324 ] [0.9873272 ] [0.9050059 ] [1.1047416 ] [1.1074418 ] [0.8880767 ] [1.1799858 ] [0.9347685 ] [0.83770585] [1.0854968 ] [1.0426438 ] [0.89730746] [1.0359323 ] [1.0933305 ] [0.927054 ] [1.0953168 ] [0.9774981 ] [0.80283445] [1.0566485 ] [1.1831899 ] [0.9609031 ] [1.2360942 ] [0.97794306] [1.0895606 ] [1.0084494 ] [0.8359415 ] [0.8447087 ] [1.1275228 ] [1.1516281 ] [1.0693773 ] [1.2343864 ] [1.1021891 ] [1.1894376 ] [0.8927681 ] [1.0092832 ] [1.0214442 ] [1.1161487 ] [0.9784417 ] [0.95489717] [0.82964426] [0.9615644 ] [0.9280068 ] [1.008737 ] [0.98013306] [0.84475 ] [0.99125993] [0.9634868 ] [1.2132442 ] [1.2051347 ] [0.94999796] [0.9936438 ] [1.0694258 ] [0.8853918 ] [0.96979725] [0.9256585 ] [0.901079 ] [0.90876454] [1.0643367 ] [1.0284139 ] [1.0303607 ] [0.9131033 ] [0.8976178 ] [1.0509933 ] [1.0386847 ] [0.9864241 ] [0.85633445] [0.8880655 ] [0.95028794] [1.0628093 ] [0.80011964] [1.0566765 ] [1.0652406 ] [1.0173962 ] [1.0982234 ] [1.0202253 ] [0.9067526 ] [0.93170905] [0.9818411 ] [0.8752355 ] [0.9760781 ] [0.98247653] [1.1674834 ] [1.126448 ] [1.0397358 ] [0.9524108 ] [0.94182974] [1.1624309 ] [1.0518398 ] [0.9992346 ] [1.0404702 ] [0.7818639 ] [0.8782002 ] [0.9620334 ] [1.2677323 ] [0.8731434 ] [0.8870001 ] [1.1554148 ]]]]; ov_res: [[[[1.0866203 ] [0.85460097] [1.1930362 ] [0.99373615] [0.9973933 ] [0.8096745 ] [1.0816505 ] [1.0599805 ] [0.9516674 ] [0.9267532 ] [0.89930713] [1.0024296 ] [1.072933 ] [1.0222087 ] [0.9699735 ] [1.0049855 ] [0.8835857 ] [0.9100014 ] [1.1507682 ] [1.0811958 ] [1.1185621 ] [1.012411 ] [1.010349 ] [0.9420832 ] [0.982834 ] [1.0031743 ] [0.8510975 ] [0.91934973] [1.0084991 ] [1.0262479 ] [0.8566085 ] [1.0523491 ] [1.1087015 ] [0.9900973 ] [1.016416 ] [0.969152 ] [0.8962705 ] [0.91246194] [0.9196085 ] [0.9500462 ] [0.9517367 ] [1.0478162 ] [0.89324164] [1.1026839 ] [0.8912942 ] [1.0280038 ] [0.93609035] [1.0994463 ] [1.0186857 ] [1.0622644 ] [0.915399 ] [1.0120997 ] [0.98453736] [1.0570697 ] [0.9785655 ] [0.9544704 ] [0.9508194 ] [1.0326018 ] [0.8822073 ] [1.1044412 ] [1.1325544 ] [0.8355805 ] [1.0281224 ] [1.157263 ] [0.988846 ] [1.0006783 ] [1.0646324 ] [0.9561743 ] [0.888602 ] [1.2016382 ] [1.00418 ] [1.0025115 ] [0.86702764] [1.0571506 ] [0.86732274] [1.0059655 ] [1.0254653 ] [0.9734575 ] [0.93960506] [1.0103635 ] [1.2864184 ] [0.86066973] [0.94605243] [1.004455 ] [0.9726746 ] [1.0556387 ] [0.97156763] [1.212133 ] [1.1953655 ] [0.9009678 ] [0.93525344] [1.0251726 ] [1.0531472 ] [0.8176182 ] [1.0082632 ] [0.9791653 ] [0.93969727] [1.15609 ] [0.94406503] [0.95552635] [0.9393459 ] [1.1113439 ] [1.0450429 ] [0.92532134] [1.0648624 ] [0.9844996 ] [0.9456897 ] [0.92259103] [1.0593477 ] [1.0349466 ] [0.8676642 ] [1.0967109 ] [1.2349944 ] [0.9144466 ] [1.0824909 ] [0.9776965 ] [0.87451833] [0.8426361 ] [1.1824948 ] [0.96711624] [0.963403 ] [1.0122141 ] [0.8890599 ] [1.0203665 ] [0.9317656 ] [0.85581726] [1.093516 ] [1.145915 ] [0.96373224] [0.9998535 ] [0.84783065] [0.9270753 ] [1.1104277 ] [1.0023998 ] [0.9464511 ] [1.0888838 ] [1.0258483 ] [0.88948 ] [1.1266605 ] [1.128117 ] [1.0378629 ] [1.1974542 ] [1.0135493 ] [0.8394082 ] [0.8829806 ] [0.9302158 ] [1.1357158 ] [1.0482707 ] [0.9338139 ] [0.92732126] [1.1379218 ] [1.0644869 ] [1.1276497 ] [1.0393832 ] [0.8149797 ] [0.9654611 ] [1.0080084 ] [1.1818914 ] [0.95995057] [0.8506666 ] [1.0305614 ] [1.1038263 ] [1.0064067 ] [0.98012173] [0.901812 ] [0.9604411 ] [1.0881438 ] [1.0325341 ] [0.9102816 ] [0.7941274 ] [1.1340939 ] [1.1648269 ] [1.0106754 ] [0.92952704] [1.0549917 ] [1.0175115 ] [0.75260985] [1.1261429 ] [1.0475422 ] [0.95193994] [1.0235169 ] [0.8445148 ] [1.0389959 ] [0.92064244] [1.166552 ] [1.0644953 ] [1.087479 ] [0.9737972 ] [0.9585721 ] [0.91051924] [0.95175475] [1.1222924 ] [0.85093975] [0.9703223 ] [0.82115614] [0.87709033] [0.9156213 ] [1.0696876 ] [0.9997099 ] [0.95332724] [1.06588 ] [0.9181499 ] [1.106977 ] [0.84171224] [1.0143775 ] [0.9387841 ] [1.0213968 ] [0.9430513 ] [1.1581274 ] [0.8826442 ] [0.97745234] [1.0399628 ] [0.9365425 ] [1.0021178 ] [0.963281 ] [1.0750246 ] [0.9752954 ] [0.8879868 ] [0.8270877 ] [1.1310066 ] [0.94169164] [0.96984607] [1.155276 ] [0.91486007]] [[0.9093922 ] [1.0751463 ] [1.0301778 ] [1.0206183 ] [0.938301 ] [1.0261189 ] [0.98645073] [0.8998775 ] [1.0661232 ] [0.8074591 ] [1.0623792 ] [0.96255106] [0.8657959 ] [0.9438535 ] [1.0429634 ] [0.86863077] [0.92967784] [0.90392345] [0.94559664] [0.92972463] [1.1958501 ] [1.0872165 ] [1.0006232 ] [1.0099277 ] [0.9156328 ] [0.9111636 ] [0.93918526] [0.8847528 ] [0.9981481 ] [0.8310474 ] [1.02316 ] [0.92559725] [0.91140884] [0.97958624] [1.2075315 ] [1.2526987 ] [0.9393436 ] [1.0045367 ] [1.0534436 ] [1.0215396 ] [1.0562254 ] [1.0103555 ] [1.0001117 ] [0.9076266 ] [1.0054654 ] [0.8689882 ] [0.96335316] [0.9587162 ] [0.8149808 ] [1.1501274 ] [0.9023545 ] [1.0212785 ] [1.119754 ] [0.9516691 ] [1.0361223 ] [1.0666832 ] [1.017555 ] [0.95659393] [0.8759749 ] [0.8758986 ] [0.95786387] [0.96322864] [1.0402344 ] [1.012603 ] [1.0545805 ] [0.885113 ] [1.028423 ] [0.9411466 ] [0.98713833] [0.84226257] [1.0296311 ] [0.98724985] [0.9630731 ] [0.97940505] [0.9112648 ] [1.107229 ] [1.0301012 ] [0.93655187] [0.97208506] [0.951221 ] [0.96129173] [1.0497919 ] [0.9933657 ] [1.295197 ] [1.1511418 ] [1.044914 ] [0.95938766] [1.1913047 ] [0.76667124] [1.016725 ] [1.0066139 ] [0.99936146] [0.92305994] [1.0177487 ] [0.9053816 ] [0.97963226] [0.872355 ] [0.93529606] [0.96983963] [0.952291 ] [0.86913115] [1.0219457 ] [1.1354285 ] [0.8043064 ] [1.039117 ] [1.091486 ] [1.0506058 ] [1.0125359 ] [0.9463753 ] [0.8375605 ] [0.7878305 ] [0.9066616 ] [1.0937395 ] [1.0419012 ] [0.95107585] [1.1763783 ] [1.0056628 ] [1.0784311 ] [0.95764524] [1.0735916 ] [0.9591009 ] [1.0274783 ] [1.0213405 ] [0.9417346 ] [1.1325915 ] [1.0040766 ] [0.8177207 ] [0.97930694] [0.97241503] [0.8429612 ] [1.0122249 ] [1.0234083 ] [1.0526562 ] [0.9962422 ] [0.9833599 ] [1.139817 ] [1.0296154 ] [0.8805887 ] [1.0663393 ] [1.1210612 ] [1.16071 ] [1.071998 ] [1.0207083 ] [1.0345246 ] [1.1018205 ] [1.0204653 ] [0.91754156] [0.9372856 ] [1.0455501 ] [1.0239934 ] [0.9025561 ] [1.2018293 ] [1.0789372 ] [0.9531088 ] [1.0725498 ] [0.9518559 ] [1.1289694 ] [1.0177001 ] [0.9911789 ] [0.88905776] [1.0289564 ] [1.0499519 ] [0.9208494 ] [1.0348958 ] [0.92162997] [1.1377044 ] [1.1695349 ] [0.93640125] [1.0225372 ] [0.8864899 ] [1.1207597 ] [0.8625539 ] [0.8936746 ] [0.85573155] [0.9466018 ] [1.0237167 ] [1.0361335 ] [0.9513416 ] [1.0228266 ] [0.9355164 ] [0.9913826 ] [1.0399193 ] [0.8894436 ] [1.0071586 ] [0.9866999 ] [0.98965496] [1.0439162 ] [0.8838083 ] [0.9747386 ] [0.95791095] [1.0305922 ] [0.95552844] [0.94012684] [0.9001969 ] [0.8450092 ] [0.9701632 ] [0.9756807 ] [0.8639443 ] [1.0156398 ] [1.0504935 ] [0.90931296] [0.9496323 ] [1.0009744 ] [1.0501254 ] [1.0338693 ] [1.0712487 ] [0.99275696] [1.1307732 ] [1.1119822 ] [1.0918051 ] [0.94568586] [1.0000044 ] [0.9526332 ] [0.8953067 ] [1.1040075 ] [1.0200762 ] [0.8899042 ] [1.0445242 ] [0.9256943 ] [0.9443069 ] [0.90830827] [1.0680262 ] [1.0761688 ] [0.797639 ]] [[1.0171527 ] [0.9523105 ] [0.89909744] [0.9636157 ] [0.903245 ] [0.8364532 ] [0.9074302 ] [1.0064312 ] [0.82028925] [1.1391001 ] [0.96431774] [1.0591038 ] [0.97252864] [0.88023895] [1.0623116 ] [1.0073832 ] [0.8387178 ] [0.8076792 ] [0.87787634] [0.8057808 ] [1.0999653 ] [1.1063449 ] [0.8534274 ] [0.944026 ] [0.8371667 ] [1.038697 ] [0.88030624] [0.9529631 ] [0.90607893] [1.0355554 ] [1.1145743 ] [0.98858035] [0.97273415] [1.0648199 ] [1.1010504 ] [0.8682346 ] [0.962715 ] [0.89716756] [0.9468935 ] [1.0016423 ] [1.009272 ] [0.9846627 ] [1.1076989 ] [0.8508145 ] [0.8915276 ] [0.9551496 ] [1.029874 ] [1.1298594 ] [0.9597345 ] [0.8592159 ] [0.9560086 ] [0.9345695 ] [0.87547636] [1.1299843 ] [0.8479863 ] [0.81237924] [0.89445806] [1.0222936 ] [1.1020844 ] [1.1016268 ] [0.913609 ] [1.0155199 ] [1.0258542 ] [1.1541308 ] [0.8994892 ] [1.0560654 ] [0.9182249 ] [1.0126517 ] [1.122859 ] [1.0386417 ] [1.089644 ] [1.194955 ] [1.0898048 ] [0.9379685 ] [0.9918803 ] [0.93416226] [1.0901073 ] [1.0819067 ] [0.8982776 ] [0.9485857 ] [1.0879091 ] [1.0919988 ] [1.2123744 ] [1.1612623 ] [1.0437686 ] [0.9905381 ] [0.9822461 ] [0.9944234 ] [0.92960733] [0.96589494] [0.9697474 ] [0.84124583] [0.9876686 ] [1.0551276 ] [1.0748234 ] [1.0022591 ] [0.9499203 ] [0.92304796] [1.0122982 ] [0.96526283] [0.9425787 ] [1.0523143 ] [1.1823179 ] [0.88014764] [1.162227 ] [0.9769984 ] [0.9221244 ] [1.0107063 ] [1.0142673 ] [0.9349195 ] [1.1346508 ] [1.1006337 ] [0.9677956 ] [0.93999946] [1.0860593 ] [0.89109826] [0.9098222 ] [1.176082 ] [0.89709413] [1.1861287 ] [1.07043 ] [1.0784221 ] [1.0926961 ] [0.9577644 ] [0.76672477] [0.8960678 ] [0.9437183 ] [1.1090324 ] [0.9873273 ] [0.90500593] [1.1047417 ] [1.1074418 ] [0.8880768 ] [1.1799859 ] [0.9347685 ] [0.8377059 ] [1.0854967 ] [1.0426437 ] [0.8973075 ] [1.0359323 ] [1.0933306 ] [0.92705405] [1.0953169 ] [0.9774981 ] [0.80283445] [1.0566485 ] [1.1831899 ] [0.9609031 ] [1.2360944 ] [0.9779431 ] [1.0895606 ] [1.0084494 ] [0.8359415 ] [0.8447087 ] [1.127523 ] [1.1516281 ] [1.0693773 ] [1.2343866 ] [1.1021893 ] [1.1894377 ] [0.89276814] [1.0092832 ] [1.0214442 ] [1.1161487 ] [0.9784417 ] [0.95489717] [0.82964426] [0.9615645 ] [0.9280068 ] [1.008737 ] [0.9801331 ] [0.84475 ] [0.99126 ] [0.9634868 ] [1.2132443 ] [1.2051349 ] [0.949998 ] [0.9936439 ] [1.069426 ] [0.88539183] [0.9697973 ] [0.9256585 ] [0.901079 ] [0.90876466] [1.0643368 ] [1.028414 ] [1.0303608 ] [0.91310334] [0.89761794] [1.0509933 ] [1.0386847 ] [0.9864241 ] [0.8563345 ] [0.8880655 ] [0.950288 ] [1.0628095 ] [0.8001197 ] [1.0566765 ] [1.0652407 ] [1.0173963 ] [1.0982234 ] [1.0202253 ] [0.9067527 ] [0.9317091 ] [0.98184115] [0.8752355 ] [0.9760781 ] [0.98247653] [1.1674836 ] [1.126448 ] [1.0397359 ] [0.9524108 ] [0.94182974] [1.1624309 ] [1.05184 ] [0.9992347 ] [1.0404704 ] [0.7818639 ] [0.87820023] [0.96203345] [1.2677324 ] [0.8731434 ] [0.88700014] [1.1554148 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:-1 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6543.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[0.9441417 ] [0.9305516 ] [1.0059272 ] [0.9568731 ] [0.9031275 ] [1.0171173 ] [0.81412226] [1.1145588 ] [0.8229247 ] [1.0227602 ] [0.92953515] [1.2514465 ] [1.0546372 ] [1.1577171 ] [0.8961135 ] [1.0059267 ] [1.1397673 ] [0.98774385] [0.99138045] [0.82167125] [0.9297945 ] [0.94313616] [1.0214747 ] [0.95831776] [1.0207237 ] [0.8871011 ] [1.0286887 ] [0.9794096 ] [0.8790074 ] [0.8994218 ] [1.0161959 ] [1.1568149 ] [1.0659662 ] [0.9344188 ] [0.90812653] [1.0610713 ] [1.0335072 ] [1.0521227 ] [1.2124097 ] [1.3086289 ] [1.0371062 ] [0.9107677 ] [1.1451752 ] [0.9074495 ] [1.0413529 ] [1.0587788 ] [0.91068673] [0.9472133 ] [1.0715533 ] [0.901255 ] [0.8696288 ] [0.9659476 ] [0.9765714 ] [1.0323535 ] [1.031285 ] [0.92613155] [0.9878691 ] [1.0507873 ] [0.92513025] [0.9366966 ] [0.81937015] [1.0471811 ] [1.0508373 ] [0.9125878 ] [1.061402 ] [0.8818584 ] [1.2431482 ] [1.0456235 ] [0.92827815] [0.98364294] [1.1075417 ] [1.0805252 ] [0.9026717 ] [1.0474381 ] [1.0353574 ] [0.8042466 ] [1.0497769 ] [0.91155297] [1.0065334 ] [0.9354177 ] [0.98431426] [1.1229883 ] [0.9369647 ] [0.95442545] [1.1107619 ] [0.878562 ] [1.1317394 ] [1.0294573 ] [0.88613564] [1.060704 ] [0.9379435 ] [0.93796283] [1.0039054 ] [1.1040772 ] [0.9410663 ] [1.0981262 ] [0.8213957 ] [1.0191988 ] [1.0869862 ] [0.98274076] [0.93073916] [0.9529176 ] [0.84053916] [0.9580542 ] [0.84104604] [1.0511622 ] [1.0421116 ] [0.92332757] [0.88993883] [1.0454779 ] [1.219526 ] [1.0629703 ] [1.003574 ] [0.91055983] [1.0420625 ] [1.1320531 ] [1.0125917 ] [1.0034715 ] [0.9047599 ] [0.9493429 ] [1.0335964 ] [0.8414767 ] [0.93863326] [0.8907291 ] [1.0002686 ] [1.0217303 ] [1.058295 ] [1.0113027 ] [0.8820885 ] [0.8244613 ] [0.92215943] [1.04016 ] [1.09566 ] [0.92958164] [0.92010164] [0.9749749 ] [0.96309876] [0.9630597 ] [0.90708447] [0.7807246 ] [0.9976828 ] [0.9972298 ] [0.9634817 ] [1.0312647 ] [0.9926016 ] [1.0361139 ] [0.8600939 ] [1.0668375 ] [1.0731392 ] [1.0002947 ] [0.92669207] [1.0281122 ] [0.7875656 ] [0.9001116 ] [1.0372919 ] [1.0058872 ] [1.023592 ] [1.2185007 ] [1.06482 ] [0.9015091 ] [1.073678 ] [1.001944 ] [1.0483737 ] [0.9997548 ] [0.94281834] [0.9138 ] [1.0372081 ] [0.98542845] [0.9784235 ] [0.83087295] [0.8685006 ] [1.0612875 ] [0.9608329 ] [1.0347677 ] [0.9575109 ] [1.1037031 ] [1.0633184 ] [1.0935105 ] [0.9540448 ] [0.9090972 ] [1.0357647 ] [1.0306114 ] [1.1444205 ] [0.9978356 ] [1.0289208 ] [0.9948893 ] [0.9436761 ] [1.1140828 ] [1.1507268 ] [0.9703277 ] [1.0891513 ] [1.1414689 ] [0.8546253 ] [0.9971438 ] [1.0154641 ] [0.87698287] [0.9001858 ] [1.0183325 ] [1.1424898 ] [0.95018035] [0.97383326] [1.0955384 ] [0.9474758 ] [1.0688043 ] [0.8124348 ] [0.89884025] [1.072939 ] [1.0902535 ] [1.0914425 ] [0.98444253] [1.0656275 ] [1.0347215 ] [0.8690389 ] [1.0176797 ] [1.0794377 ] [1.1522546 ] [0.9780863 ] [0.9572033 ] [1.0214412 ] [0.94635284] [1.1650958 ] [0.9353522 ] [1.0954449 ] [0.9926499 ]] [[1.00803 ] [0.9271284 ] [0.90761477] [1.0521148 ] [0.932229 ] [0.84645 ] [0.9337419 ] [0.8966817 ] [1.01797 ] [1.1002911 ] [1.1527735 ] [1.2290325 ] [0.8812566 ] [0.93032694] [1.0478941 ] [0.8354024 ] [0.9416434 ] [0.94052386] [1.0269889 ] [0.9979697 ] [1.0758837 ] [1.089708 ] [1.1727678 ] [1.0412072 ] [0.9792043 ] [0.9081701 ] [1.2374291 ] [0.96768796] [1.0729946 ] [1.1451416 ] [1.0752989 ] [0.8786079 ] [0.9304743 ] [1.1688479 ] [1.0318829 ] [0.991083 ] [0.99098104] [0.8004618 ] [1.0192577 ] [0.99948883] [0.94576275] [1.0402389 ] [0.945381 ] [0.8604776 ] [0.9465374 ] [1.0582955 ] [1.068892 ] [0.9687772 ] [0.9366092 ] [0.8029207 ] [0.951009 ] [1.1397145 ] [0.91586196] [1.0405852 ] [0.96453124] [0.9073234 ] [1.0878762 ] [0.9499132 ] [0.7739753 ] [0.8911941 ] [0.90189016] [0.885518 ] [0.97064924] [0.9237776 ] [0.94519436] [1.0354897 ] [0.86327255] [1.0730476 ] [1.0552768 ] [0.9539301 ] [1.176675 ] [1.1926445 ] [0.90451044] [0.96824 ] [1.0699458 ] [1.0065602 ] [0.79578805] [0.84218776] [0.81627893] [0.85352266] [0.97026473] [1.0064908 ] [0.97005004] [1.0925004 ] [1.0087162 ] [0.97161925] [1.220794 ] [0.9326674 ] [0.9194791 ] [1.0161852 ] [1.0492778 ] [0.88359165] [0.9876738 ] [0.97660637] [0.9697771 ] [0.8844338 ] [0.9922027 ] [1.0877591 ] [1.1047444 ] [0.94370073] [0.99302423] [1.0165973 ] [0.92273575] [1.0980393 ] [0.9327568 ] [1.0368816 ] [1.1670635 ] [1.0054036 ] [0.9355591 ] [1.0784839 ] [1.0241549 ] [0.99767053] [0.9074115 ] [0.9450265 ] [0.9540833 ] [0.9720422 ] [1.0072263 ] [0.95518357] [0.93592376] [1.1890355 ] [0.94945633] [1.0676734 ] [1.0938159 ] [0.906381 ] [0.907576 ] [0.96766376] [1.1215397 ] [1.030298 ] [1.0269107 ] [0.89165366] [0.8829139 ] [1.0069343 ] [1.0962946 ] [1.0651621 ] [1.1386507 ] [0.9667164 ] [1.0786401 ] [0.76589304] [0.92650306] [1.0664467 ] [0.89236844] [1.0077273 ] [0.9301383 ] [0.92815447] [1.0621122 ] [1.0630229 ] [1.1018163 ] [0.7964377 ] [0.9037388 ] [0.902414 ] [0.9415566 ] [0.9568752 ] [1.0306687 ] [1.0863767 ] [1.0857939 ] [0.9480812 ] [0.9343941 ] [1.1255542 ] [0.951124 ] [1.1373168 ] [1.1433287 ] [0.9570844 ] [1.1251062 ] [0.9780684 ] [1.1749247 ] [1.0239168 ] [0.9565888 ] [0.94305605] [0.97994405] [0.920527 ] [1.2088284 ] [0.91300404] [1.124681 ] [1.052532 ] [0.88544464] [0.99966234] [0.9161347 ] [1.1074983 ] [1.0854388 ] [0.9452695 ] [0.9635479 ] [1.1166502 ] [0.93334526] [1.0672227 ] [1.0916507 ] [0.9926392 ] [1.0007513 ] [0.943951 ] [0.9346974 ] [0.9268409 ] [0.97130156] [0.9996055 ] [0.74692214] [1.0353465 ] [1.0387443 ] [1.0326562 ] [1.0210143 ] [1.1483761 ] [0.9471842 ] [0.959612 ] [0.92292964] [0.8699058 ] [1.1199951 ] [1.1710796 ] [0.91445994] [0.9301193 ] [0.94250894] [1.0853807 ] [1.0083096 ] [1.0668948 ] [1.1283405 ] [0.89638597] [1.0308146 ] [1.1617731 ] [0.9141651 ] [0.8627875 ] [0.88419884] [0.98313266] [1.0533537 ] [0.9876543 ] [1.1500753 ] [1.1405095 ] [1.0497979 ] [1.0425068 ]] [[0.8741286 ] [1.0447025 ] [1.0750829 ] [0.88869673] [1.0347273 ] [0.9230828 ] [1.0444003 ] [0.7946216 ] [1.0347928 ] [0.91370773] [1.0869398 ] [0.95400006] [1.0422175 ] [1.0938888 ] [0.90906525] [0.9943665 ] [0.9412555 ] [1.061388 ] [0.99223065] [1.1784765 ] [0.84445137] [1.0779825 ] [0.96104723] [1.070569 ] [1.0011823 ] [0.9662943 ] [1.1422508 ] [0.91236347] [0.76375943] [0.95679545] [1.2895461 ] [0.8956659 ] [1.0242299 ] [1.0702914 ] [0.83133006] [1.1798047 ] [0.99969333] [0.9454424 ] [1.0260438 ] [0.96592134] [0.84975046] [1.091286 ] [1.1347582 ] [0.934061 ] [0.9770746 ] [0.9702998 ] [0.97252923] [1.0163373 ] [0.84628004] [1.0608529 ] [1.064695 ] [0.9724833 ] [0.97461194] [1.1624442 ] [0.9768453 ] [0.97710514] [0.82572216] [0.9614196 ] [0.99301463] [0.95827657] [0.8753416 ] [0.90114945] [0.9700172 ] [0.9630656 ] [0.99072874] [0.84684944] [0.9838518 ] [1.0440025 ] [0.9627218 ] [0.8564387 ] [1.0052365 ] [1.0730702 ] [1.0413964 ] [1.1976582 ] [0.94349366] [1.0073304 ] [1.020407 ] [1.0444764 ] [0.93359435] [1.0286393 ] [0.96208394] [0.899244 ] [1.1455283 ] [0.97360384] [1.0881656 ] [0.9894694 ] [0.9611585 ] [0.9960384 ] [1.165741 ] [1.1386833 ] [0.78914034] [0.9178748 ] [1.2679273 ] [1.0998259 ] [1.2629228 ] [1.0439459 ] [0.9381271 ] [0.9248027 ] [0.8171164 ] [1.0321866 ] [0.8876189 ] [1.0826483 ] [0.9123691 ] [1.0158256 ] [1.0141937 ] [0.8948854 ] [1.1440614 ] [1.0615487 ] [0.95555395] [1.0985445 ] [1.1786926 ] [0.85176617] [1.0726628 ] [1.0039735 ] [1.1782347 ] [1.0698472 ] [0.9121011 ] [1.0100881 ] [0.8847236 ] [0.97016597] [1.0010363 ] [1.1388586 ] [1.0575193 ] [0.97838616] [1.0201937 ] [0.9189269 ] [0.96722275] [1.0559311 ] [1.0900638 ] [1.0681782 ] [0.84373116] [0.9403226 ] [1.1016641 ] [1.0349351 ] [1.038507 ] [0.8771557 ] [1.2743924 ] [1.0155628 ] [1.0690955 ] [1.0487567 ] [1.1201415 ] [0.87399733] [1.0941017 ] [1.0209768 ] [1.2214867 ] [1.0601674 ] [0.9454403 ] [1.1877301 ] [1.1025376 ] [0.93649375] [0.94912237] [1.0008327 ] [1.0186174 ] [0.92516 ] [1.0797735 ] [1.1022811 ] [0.96562326] [0.9821603 ] [1.0499389 ] [1.1279664 ] [1.0609703 ] [0.93158793] [1.0710815 ] [0.9967744 ] [0.7564447 ] [0.9651219 ] [1.0369782 ] [0.98632056] [1.081054 ] [1.071492 ] [1.0587156 ] [1.0743773 ] [1.1142166 ] [1.0537252 ] [0.9308827 ] [0.96618235] [1.0182322 ] [0.87942725] [0.96015155] [0.9748591 ] [0.88133687] [1.143382 ] [1.0334774 ] [1.0787178 ] [0.92704666] [1.0795013 ] [1.1266767 ] [0.8966793 ] [1.0343025 ] [1.0068837 ] [1.091027 ] [0.93954223] [0.93238354] [0.92276734] [1.0188149 ] [0.8854401 ] [0.740209 ] [0.8979909 ] [0.9005611 ] [0.86538494] [0.9651225 ] [0.94396144] [1.0212579 ] [0.7436383 ] [1.0004413 ] [1.1079681 ] [1.0237406 ] [1.0965493 ] [0.89904594] [0.93420106] [0.89660907] [1.0796041 ] [1.124121 ] [1.1175741 ] [0.9382186 ] [0.93159574] [1.0204356 ] [1.1853733 ] [0.9572086 ] [1.1432766 ] [1.0095847 ] [1.1157267 ] [1.1306998 ] [1.0255146 ]]]]; ov_res: [[[[0.9441417 ] [0.9305516 ] [1.0059272 ] [0.9568731 ] [0.90312755] [1.0171173 ] [0.81412226] [1.1145588 ] [0.8229247 ] [1.0227603 ] [0.9295352 ] [1.2514465 ] [1.0546371 ] [1.157717 ] [0.89611346] [1.0059267 ] [1.1397673 ] [0.9877438 ] [0.9913804 ] [0.82167137] [0.92979443] [0.94313616] [1.0214746 ] [0.95831776] [1.0207237 ] [0.8871012 ] [1.0286887 ] [0.97940964] [0.8790074 ] [0.8994218 ] [1.0161959 ] [1.1568149 ] [1.0659662 ] [0.9344188 ] [0.90812653] [1.0610713 ] [1.0335072 ] [1.0521227 ] [1.2124099 ] [1.3086289 ] [1.0371062 ] [0.9107677 ] [1.1451751 ] [0.9074494 ] [1.0413529 ] [1.0587788 ] [0.9106868 ] [0.9472133 ] [1.0715533 ] [0.901255 ] [0.86962885] [0.9659476 ] [0.9765714 ] [1.0323535 ] [1.031285 ] [0.92613155] [0.98786914] [1.0507873 ] [0.9251303 ] [0.9366966 ] [0.81937015] [1.0471811 ] [1.0508373 ] [0.9125878 ] [1.061402 ] [0.8818584 ] [1.2431481 ] [1.0456237 ] [0.92827815] [0.98364294] [1.1075418 ] [1.0805252 ] [0.90267175] [1.047438 ] [1.0353574 ] [0.8042466 ] [1.0497769 ] [0.91155297] [1.0065334 ] [0.93541765] [0.98431426] [1.1229882 ] [0.9369647 ] [0.9544254 ] [1.1107619 ] [0.87856203] [1.1317395 ] [1.0294573 ] [0.88613564] [1.060704 ] [0.93794346] [0.9379629 ] [1.0039054 ] [1.1040772 ] [0.9410663 ] [1.0981263 ] [0.82139575] [1.0191988 ] [1.0869862 ] [0.9827407 ] [0.9307391 ] [0.95291764] [0.8405391 ] [0.9580541 ] [0.84104604] [1.0511622 ] [1.0421116 ] [0.92332757] [0.88993883] [1.0454779 ] [1.219526 ] [1.0629703 ] [1.003574 ] [0.9105598 ] [1.0420625 ] [1.1320531 ] [1.0125917 ] [1.0034715 ] [0.9047598 ] [0.94934285] [1.0335963 ] [0.84147674] [0.93863326] [0.89072925] [1.0002686 ] [1.0217303 ] [1.058295 ] [1.0113027 ] [0.8820885 ] [0.8244613 ] [0.9221595 ] [1.04016 ] [1.09566 ] [0.92958164] [0.92010164] [0.97497493] [0.9630988 ] [0.9630597 ] [0.90708447] [0.7807246 ] [0.99768287] [0.9972299 ] [0.9634818 ] [1.0312647 ] [0.9926016 ] [1.0361139 ] [0.86009395] [1.0668375 ] [1.0731392 ] [1.0002947 ] [0.926692 ] [1.0281122 ] [0.7875656 ] [0.9001116 ] [1.0372919 ] [1.0058872 ] [1.023592 ] [1.2185007 ] [1.0648199 ] [0.90150917] [1.073678 ] [1.001944 ] [1.0483737 ] [0.9997548 ] [0.9428183 ] [0.91379994] [1.0372082 ] [0.98542845] [0.9784235 ] [0.83087295] [0.8685006 ] [1.0612875 ] [0.9608329 ] [1.0347677 ] [0.95751095] [1.1037031 ] [1.0633184 ] [1.0935105 ] [0.9540448 ] [0.90909725] [1.0357648 ] [1.0306114 ] [1.1444205 ] [0.99783564] [1.0289209 ] [0.99488926] [0.9436761 ] [1.1140828 ] [1.1507267 ] [0.9703277 ] [1.0891513 ] [1.1414689 ] [0.8546253 ] [0.9971438 ] [1.0154641 ] [0.87698287] [0.9001859 ] [1.0183325 ] [1.1424898 ] [0.95018035] [0.97383326] [1.0955385 ] [0.94747585] [1.0688044 ] [0.8124348 ] [0.89884025] [1.072939 ] [1.0902536 ] [1.0914423 ] [0.9844426 ] [1.0656275 ] [1.0347214 ] [0.8690389 ] [1.0176798 ] [1.0794377 ] [1.1522547 ] [0.9780863 ] [0.9572033 ] [1.0214412 ] [0.9463529 ] [1.165096 ] [0.9353522 ] [1.095445 ] [0.9926499 ]] [[1.00803 ] [0.9271284 ] [0.90761477] [1.0521148 ] [0.932229 ] [0.84645 ] [0.93374187] [0.89668167] [1.01797 ] [1.1002911 ] [1.1527735 ] [1.2290325 ] [0.88125664] [0.9303269 ] [1.0478941 ] [0.8354024 ] [0.9416434 ] [0.9405238 ] [1.0269889 ] [0.9979697 ] [1.0758837 ] [1.089708 ] [1.1727676 ] [1.0412072 ] [0.97920424] [0.90817016] [1.237429 ] [0.967688 ] [1.0729946 ] [1.1451416 ] [1.0752989 ] [0.8786079 ] [0.93047434] [1.1688479 ] [1.031883 ] [0.991083 ] [0.99098104] [0.8004619 ] [1.0192577 ] [0.99948883] [0.94576275] [1.040239 ] [0.945381 ] [0.86047757] [0.94653744] [1.0582955 ] [1.0688921 ] [0.96877724] [0.93660927] [0.8029207 ] [0.9510089 ] [1.1397145 ] [0.91586196] [1.0405852 ] [0.9645313 ] [0.9073234 ] [1.0878761 ] [0.94991314] [0.7739754 ] [0.89119405] [0.90189016] [0.8855181 ] [0.97064924] [0.9237776 ] [0.94519436] [1.0354897 ] [0.86327255] [1.0730476 ] [1.0552768 ] [0.95393 ] [1.176675 ] [1.1926444 ] [0.90451044] [0.9682401 ] [1.0699458 ] [1.0065602 ] [0.7957881 ] [0.8421878 ] [0.8162789 ] [0.8535227 ] [0.97026473] [1.0064908 ] [0.97005004] [1.0925004 ] [1.0087162 ] [0.9716192 ] [1.2207938 ] [0.93266743] [0.919479 ] [1.016185 ] [1.0492777 ] [0.88359165] [0.9876738 ] [0.97660637] [0.96977717] [0.8844338 ] [0.9922027 ] [1.0877591 ] [1.1047446 ] [0.94370073] [0.9930243 ] [1.0165973 ] [0.92273575] [1.0980393 ] [0.9327568 ] [1.0368816 ] [1.1670636 ] [1.0054036 ] [0.9355591 ] [1.0784839 ] [1.0241549 ] [0.9976706 ] [0.9074116 ] [0.94502646] [0.9540833 ] [0.9720422 ] [1.0072263 ] [0.9551835 ] [0.9359237 ] [1.1890354 ] [0.94945633] [1.0676734 ] [1.0938159 ] [0.906381 ] [0.907576 ] [0.96766376] [1.1215397 ] [1.030298 ] [1.0269105 ] [0.8916536 ] [0.88291395] [1.0069343 ] [1.0962946 ] [1.0651621 ] [1.1386507 ] [0.96671635] [1.0786401 ] [0.765893 ] [0.92650306] [1.0664465 ] [0.89236844] [1.0077274 ] [0.93013823] [0.92815447] [1.0621121 ] [1.0630227 ] [1.1018163 ] [0.7964377 ] [0.9037388 ] [0.902414 ] [0.9415565 ] [0.9568752 ] [1.0306689 ] [1.0863768 ] [1.0857939 ] [0.94808114] [0.9343942 ] [1.1255542 ] [0.9511241 ] [1.1373168 ] [1.1433285 ] [0.95708436] [1.1251062 ] [0.97806835] [1.1749249 ] [1.0239168 ] [0.95658886] [0.9430561 ] [0.9799441 ] [0.920527 ] [1.2088286 ] [0.913004 ] [1.1246809 ] [1.052532 ] [0.88544464] [0.99966234] [0.9161347 ] [1.1074983 ] [1.0854388 ] [0.9452696 ] [0.9635479 ] [1.1166502 ] [0.93334526] [1.0672227 ] [1.0916507 ] [0.9926392 ] [1.0007513 ] [0.9439511 ] [0.93469745] [0.9268409 ] [0.97130156] [0.9996055 ] [0.7469221 ] [1.0353464 ] [1.0387443 ] [1.0326563 ] [1.0210143 ] [1.1483762 ] [0.94718426] [0.95961195] [0.92292964] [0.8699058 ] [1.1199951 ] [1.1710796 ] [0.9144599 ] [0.93011934] [0.9425089 ] [1.0853807 ] [1.0083096 ] [1.0668948 ] [1.1283405 ] [0.89638585] [1.0308145 ] [1.1617731 ] [0.9141651 ] [0.8627874 ] [0.8841988 ] [0.9831327 ] [1.0533535 ] [0.9876544 ] [1.1500752 ] [1.1405095 ] [1.0497979 ] [1.0425068 ]] [[0.8741285 ] [1.0447025 ] [1.0750829 ] [0.8886967 ] [1.0347273 ] [0.9230828 ] [1.0444003 ] [0.7946216 ] [1.0347928 ] [0.9137078 ] [1.0869399 ] [0.95400006] [1.0422175 ] [1.0938886 ] [0.90906525] [0.9943665 ] [0.9412555 ] [1.061388 ] [0.9922307 ] [1.1784765 ] [0.84445137] [1.0779825 ] [0.9610472 ] [1.070569 ] [1.0011823 ] [0.9662943 ] [1.1422508 ] [0.91236347] [0.76375943] [0.9567954 ] [1.289546 ] [0.8956658 ] [1.0242299 ] [1.0702913 ] [0.8313301 ] [1.1798047 ] [0.99969333] [0.9454423 ] [1.0260438 ] [0.9659213 ] [0.8497504 ] [1.091286 ] [1.1347582 ] [0.934061 ] [0.9770746 ] [0.9702998 ] [0.97252923] [1.0163372 ] [0.84628004] [1.0608529 ] [1.064695 ] [0.9724833 ] [0.97461194] [1.1624442 ] [0.9768453 ] [0.97710514] [0.82572216] [0.9614196 ] [0.99301463] [0.95827657] [0.8753416 ] [0.9011494 ] [0.97001714] [0.9630657 ] [0.9907288 ] [0.84684944] [0.98385185] [1.0440025 ] [0.9627218 ] [0.85643864] [1.0052365 ] [1.0730702 ] [1.0413964 ] [1.1976582 ] [0.94349366] [1.0073303 ] [1.020407 ] [1.0444764 ] [0.9335944 ] [1.0286393 ] [0.96208394] [0.899244 ] [1.1455282 ] [0.9736038 ] [1.0881656 ] [0.9894694 ] [0.96115845] [0.9960384 ] [1.165741 ] [1.1386834 ] [0.78914034] [0.91787475] [1.2679273 ] [1.0998259 ] [1.2629229 ] [1.0439459 ] [0.93812704] [0.92480266] [0.8171164 ] [1.0321866 ] [0.8876189 ] [1.0826483 ] [0.9123691 ] [1.0158255 ] [1.0141937 ] [0.8948854 ] [1.1440614 ] [1.0615488 ] [0.955554 ] [1.0985445 ] [1.1786926 ] [0.85176617] [1.0726628 ] [1.0039734 ] [1.1782347 ] [1.0698472 ] [0.9121011 ] [1.0100882 ] [0.8847236 ] [0.970166 ] [1.0010363 ] [1.1388587 ] [1.0575192 ] [0.9783862 ] [1.0201937 ] [0.91892684] [0.96722275] [1.055931 ] [1.0900638 ] [1.0681783 ] [0.8437312 ] [0.9403226 ] [1.1016641 ] [1.0349351 ] [1.0385069 ] [0.8771557 ] [1.2743925 ] [1.0155628 ] [1.0690955 ] [1.0487566 ] [1.1201414 ] [0.8739973 ] [1.0941017 ] [1.0209769 ] [1.2214867 ] [1.0601674 ] [0.9454403 ] [1.1877301 ] [1.1025376 ] [0.93649375] [0.94912237] [1.0008327 ] [1.0186174 ] [0.92516 ] [1.0797737 ] [1.1022812 ] [0.96562326] [0.9821602 ] [1.0499389 ] [1.1279663 ] [1.0609703 ] [0.9315879 ] [1.0710815 ] [0.9967743 ] [0.7564448 ] [0.96512187] [1.0369784 ] [0.98632056] [1.0810539 ] [1.071492 ] [1.0587157 ] [1.0743773 ] [1.1142166 ] [1.0537251 ] [0.93088275] [0.96618235] [1.0182322 ] [0.87942725] [0.9601516 ] [0.9748591 ] [0.88133687] [1.143382 ] [1.0334774 ] [1.0787178 ] [0.92704666] [1.0795013 ] [1.1266767 ] [0.89667934] [1.0343025 ] [1.0068839 ] [1.091027 ] [0.93954223] [0.93238354] [0.92276734] [1.0188148 ] [0.8854402 ] [0.7402089 ] [0.8979909 ] [0.90056103] [0.86538494] [0.9651224 ] [0.94396144] [1.0212578 ] [0.74363834] [1.0004413 ] [1.1079681 ] [1.0237406 ] [1.0965493 ] [0.89904594] [0.93420106] [0.8966091 ] [1.0796041 ] [1.124121 ] [1.1175741 ] [0.9382186 ] [0.9315958 ] [1.0204356 ] [1.1853733 ] [0.9572086 ] [1.1432766 ] [1.0095847 ] [1.1157267 ] [1.1306998 ] [1.0255146 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:-1 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6545.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.93060386] [1.0937735 ] [1.0460348 ] [0.9346833 ] [0.92787975] [1.0437781 ] [1.0548167 ] [0.9957728 ] [1.013821 ] [0.96068954] [0.842176 ] [1.0071185 ] [1.037857 ] [0.95746374] [0.8389438 ] [0.97997195] [1.0355898 ] [0.95188445] [1.0119383 ] [0.9704678 ] [0.9351147 ] [0.9535804 ] [0.93417376] [0.8991892 ] [1.0026073 ] [1.0431491 ] [0.81842756] [1.2166288 ] [0.8759691 ] [0.9519358 ] [1.1381358 ] [1.0971172 ] [0.9796405 ] [1.0128305 ] [1.0288135 ] [1.1200548 ] [0.9235901 ] [0.8996754 ] [1.1129854 ] [1.2098634 ] [0.8046525 ] [1.0924776 ] [1.0306574 ] [0.92055935] [0.94728774] [0.97690165] [0.8333825 ] [0.8440522 ] [0.83747226] [0.9808183 ] [1.0404533 ] [0.9573602 ] [0.92601734] [1.0038567 ] [1.0740019 ] [0.9775094 ] [1.0190774 ] [1.1024553 ] [0.92094654] [1.010607 ] [1.0493147 ] [0.9243254 ] [1.0877477 ] [1.1269361 ] [1.1420779 ] [0.91265726] [0.9628005 ] [0.9395477 ] [1.1709384 ] [0.9810914 ] [1.0706288 ] [1.0756551 ] [0.9980446 ] [1.0128651 ] [1.0342256 ] [0.8966294 ] [0.9955963 ] [0.8946624 ] [0.9925864 ] [0.9567863 ] [0.96399915] [0.9157321 ] [1.1246105 ] [1.0839142 ] [0.9228101 ] [1.0005018 ] [0.990779 ] [0.9160844 ] [0.830142 ] [1.0530722 ] [0.7213251 ] [1.0411414 ] [0.9987025 ] [0.9431646 ] [1.0209416 ] [1.0316151 ] [0.9983216 ] [1.109035 ] [0.99621415] [0.9003037 ] [0.9219784 ] [1.0719967 ] [0.9793511 ] [0.9163019 ] [0.97727555] [0.87750846] [0.9899298 ] [0.9483442 ] [1.0437062 ] [1.1073295 ] [1.0063782 ] [0.9883678 ] [0.90043503] [0.96083635] [0.9782608 ] [0.94095314] [1.069383 ] [1.0435357 ] [0.831861 ] [1.000867 ] [1.133384 ] [0.97737455] [0.89684045] [1.1018082 ] [0.90317327] [0.9401502 ] [1.0254741 ] [0.9034763 ] [1.0228539 ] [1.0045918 ] [1.0030327 ] [1.0417191 ] [0.92477316] [1.0467526 ] [0.9472291 ] [0.9307924 ] [0.89622253] [0.8669116 ] [1.1066192 ] [0.93948585] [0.9344233 ] [1.1443422 ] [1.0379667 ] [0.950531 ] [1.0090709 ] [1.0491849 ] [1.1941504 ] [1.0248777 ] [1.0044885 ] [1.0061538 ] [1.1021994 ] [0.94674397] [1.0085441 ] [0.87284994] [0.87103355] [1.0217713 ] [1.0532066 ] [0.88192344] [0.95150214] [0.96455425] [0.8503914 ] [0.9975275 ] [0.95673305] [0.9438697 ] [1.059287 ] [1.0929518 ] [1.049919 ] [0.91924465] [0.98543733] [1.0433418 ] [0.8769322 ] [1.02704 ] [0.90768033] [0.89154446] [0.9333213 ] [1.0144985 ] [0.9814058 ] [0.9585569 ] [1.176262 ] [1.1095759 ] [1.1279862 ] [1.1483148 ] [1.0228022 ] [0.9847302 ] [1.1086574 ] [0.84058684] [0.9041763 ] [0.94917774] [0.91505164] [1.0749954 ] [1.1449757 ] [0.9750427 ] [0.8941055 ] [1.1906768 ] [0.9612588 ] [0.96931124] [1.1928353 ] [1.0211846 ] [1.022736 ] [0.99345756] [0.86187327] [1.0407395 ] [0.91406804] [0.9445086 ] [0.9493951 ] [0.92085767] [0.96360713] [1.1527804 ] [0.84193665] [1.0463945 ] [0.96661377] [1.0814335 ] [1.0864784 ] [1.1546707 ] [1.022864 ] [1.0327362 ] [1.0195915 ] [0.963625 ] [1.0145202 ] [0.90589994] [1.0772032 ] [1.0126293 ] [0.9166944 ] [1.0850931 ]] [[0.9721025 ] [1.0874338 ] [0.92253864] [1.1436194 ] [0.91840196] [1.0141702 ] [1.0410677 ] [0.99000835] [0.88195956] [0.9514821 ] [1.0526928 ] [1.0439597 ] [1.0003078 ] [1.0848019 ] [0.9095706 ] [0.85520625] [0.93358237] [0.9070073 ] [0.8857416 ] [0.9953243 ] [1.0364861 ] [0.95146143] [0.95560986] [0.96617067] [1.1564908 ] [0.9579291 ] [1.0345397 ] [1.0720391 ] [1.1390942 ] [0.98250055] [0.82347906] [1.0640817 ] [0.99789524] [0.8543344 ] [0.94509417] [0.9606506 ] [1.1140176 ] [1.1226029 ] [1.0336065 ] [0.89604694] [1.1048504 ] [1.2504224 ] [0.9879516 ] [1.0003178 ] [1.0960349 ] [1.2472998 ] [1.0368147 ] [1.0744287 ] [0.94655806] [1.0414177 ] [1.069445 ] [0.9878284 ] [1.191477 ] [0.990746 ] [1.0414772 ] [0.994563 ] [0.824068 ] [0.98200405] [1.035151 ] [1.069187 ] [0.9655459 ] [0.9288002 ] [1.1200984 ] [0.97557366] [0.9949923 ] [0.9425744 ] [0.87948793] [1.0533141 ] [0.9397938 ] [0.93143326] [0.94223535] [1.0628806 ] [1.0230484 ] [0.8239057 ] [1.0424116 ] [0.9687842 ] [0.89236057] [0.9962411 ] [1.0314494 ] [0.8764214 ] [1.2096244 ] [0.9476468 ] [1.0024253 ] [0.96418405] [0.93720174] [1.0556376 ] [0.9622964 ] [0.9822578 ] [0.83111084] [0.83635294] [1.0939177 ] [0.9318035 ] [1.0274454 ] [0.81032145] [0.9311217 ] [0.82583976] [0.8786727 ] [1.1096913 ] [1.1115693 ] [1.0817364 ] [0.9739915 ] [1.0255013 ] [0.93379325] [1.0624082 ] [0.96450216] [0.9726168 ] [1.0725219 ] [1.1234851 ] [0.90368205] [1.011514 ] [1.1638492 ] [0.898829 ] [0.9464943 ] [0.99219817] [0.9846595 ] [1.1406434 ] [1.2130566 ] [1.1780769 ] [0.94886124] [1.0326911 ] [0.7861903 ] [1.0415409 ] [1.0596178 ] [0.9289495 ] [0.93824553] [1.1239145 ] [0.9308555 ] [0.9628262 ] [0.95088196] [1.0646604 ] [1.0533816 ] [0.8793763 ] [1.1148356 ] [1.0088437 ] [0.96492565] [0.9703582 ] [1.0161854 ] [0.9130898 ] [0.84600914] [0.91324997] [1.011758 ] [1.0145948 ] [1.0811894 ] [0.9611152 ] [1.0357245 ] [1.0077505 ] [1.104442 ] [1.092504 ] [0.8104442 ] [0.97745556] [0.9848779 ] [1.0755112 ] [0.9303338 ] [0.9643587 ] [1.1292901 ] [1.0772676 ] [0.8798445 ] [0.9817845 ] [0.96458393] [0.94806546] [0.9673913 ] [1.0131983 ] [1.0753527 ] [1.0952162 ] [1.0444864 ] [0.945667 ] [1.058977 ] [1.1254022 ] [0.93801266] [1.0755855 ] [0.8843743 ] [0.9495468 ] [1.0256475 ] [0.9506059 ] [1.0561956 ] [1.0719985 ] [0.9783182 ] [0.8651906 ] [0.9366001 ] [1.1013328 ] [1.0038034 ] [0.9637707 ] [0.89350086] [0.85979027] [0.88450927] [0.9121599 ] [1.1393318 ] [0.9106526 ] [1.2603246 ] [0.87822866] [1.00296 ] [0.87769514] [0.95301926] [0.84137326] [1.1809974 ] [0.99539983] [1.0439072 ] [0.96811026] [0.9441455 ] [1.005808 ] [0.9890611 ] [1.269815 ] [1.0693765 ] [0.8761205 ] [0.9113819 ] [0.98994505] [0.9825769 ] [0.8690092 ] [1.1963394 ] [0.93300015] [1.0902853 ] [1.0700213 ] [1.1406237 ] [1.1425434 ] [1.1844409 ] [1.1234505 ] [1.0706161 ] [0.9292287 ] [0.7858572 ] [1.007591 ] [1.1034994 ] [1.0898635 ] [0.9385111 ] [1.2571472 ]] [[1.1092784 ] [0.9511977 ] [0.98132527] [1.1176 ] [0.897165 ] [1.0804087 ] [0.9528898 ] [1.0096489 ] [1.0395806 ] [1.1819526 ] [0.9296501 ] [0.89059377] [1.1598864 ] [0.8929513 ] [0.8653243 ] [1.0498984 ] [0.91942227] [1.0049683 ] [1.0978342 ] [1.1225157 ] [0.9682847 ] [1.0062944 ] [0.817984 ] [1.0688323 ] [1.1868778 ] [1.0051686 ] [0.9030326 ] [0.92909324] [0.890707 ] [0.9239921 ] [0.94122106] [0.83673966] [0.99668306] [0.90597385] [0.9283612 ] [0.9409599 ] [0.9277995 ] [1.0164019 ] [1.0190638 ] [1.1341459 ] [0.9765828 ] [0.9759697 ] [0.9887014 ] [1.0399172 ] [1.0964481 ] [0.9170871 ] [1.0424631 ] [1.0663179 ] [0.9389216 ] [1.0013083 ] [0.8867856 ] [0.9932733 ] [0.9477761 ] [0.9589622 ] [0.9528518 ] [0.8460982 ] [1.0369807 ] [1.0078424 ] [1.1063168 ] [0.94804484] [1.1210297 ] [1.0165418 ] [1.0602976 ] [0.7537871 ] [1.2235374 ] [1.1435822 ] [0.9292441 ] [1.0029771 ] [0.9277273 ] [0.9398156 ] [1.0217133 ] [1.0194297 ] [1.0963409 ] [1.0089974 ] [1.0159621 ] [0.94798636] [1.0756695 ] [0.882866 ] [0.9033487 ] [1.0020365 ] [1.0048618 ] [1.1556066 ] [1.0439248 ] [1.0523934 ] [1.0561712 ] [1.0841526 ] [0.95166695] [0.9064107 ] [0.9202695 ] [0.9412936 ] [1.0016347 ] [0.89739144] [0.8741178 ] [1.1233355 ] [0.8273935 ] [0.9378535 ] [1.151204 ] [0.9496558 ] [1.2402124 ] [1.0607654 ] [0.9061731 ] [1.0816386 ] [1.0683059 ] [0.96774274] [1.037779 ] [1.0805019 ] [0.8757433 ] [1.1481246 ] [0.9027945 ] [0.9533042 ] [0.6989974 ] [1.1621387 ] [0.96446043] [0.88024384] [0.86953884] [0.8113117 ] [1.1377679 ] [0.9633126 ] [1.0809278 ] [1.1263249 ] [1.0168303 ] [0.9285066 ] [0.90203726] [0.96801084] [0.98878103] [0.90131265] [1.149664 ] [1.1444892 ] [1.0346512 ] [1.0504928 ] [1.1777899 ] [0.8390541 ] [1.1126708 ] [1.0443219 ] [0.8248076 ] [0.92441136] [0.99964315] [1.0843437 ] [0.97507536] [1.0149326 ] [0.91382414] [0.84024066] [0.8222452 ] [0.9676619 ] [1.0192862 ] [0.8571796 ] [1.0295101 ] [0.9904146 ] [0.949818 ] [1.0847517 ] [1.1322806 ] [0.8941399 ] [0.991169 ] [0.925311 ] [1.0556271 ] [0.9680507 ] [0.95241755] [1.0558302 ] [1.0090231 ] [1.0000017 ] [0.9758679 ] [1.0142307 ] [1.006672 ] [0.97550535] [1.144341 ] [0.94269556] [1.1396476 ] [0.9377934 ] [0.97302085] [0.89344364] [1.0558187 ] [0.99137604] [1.1055986 ] [1.0729251 ] [0.86372906] [0.98403174] [1.0670102 ] [0.9178376 ] [0.9002473 ] [0.9434027 ] [0.911456 ] [0.8949594 ] [0.93823546] [1.1417302 ] [1.0629117 ] [0.9587907 ] [0.9651149 ] [0.9022236 ] [1.0579448 ] [1.0132487 ] [0.9413183 ] [1.0082403 ] [0.83549964] [1.0946783 ] [1.2088588 ] [0.93706214] [0.94033015] [0.87191725] [0.91641676] [0.8511537 ] [0.9834903 ] [0.9245638 ] [1.0291586 ] [0.956625 ] [1.0451009 ] [1.1104821 ] [1.2420264 ] [0.9322604 ] [1.2172458 ] [0.78026414] [0.86932343] [0.9841163 ] [0.99509853] [0.8809885 ] [0.9085183 ] [1.2055731 ] [0.8627768 ] [1.0579083 ] [1.1177229 ] [1.0523273 ] [1.140806 ] [1.1405784 ] [1.1744404 ] [0.92941844]]]]; ov_res: [[[[0.930604 ] [1.0937736 ] [1.0460348 ] [0.9346833 ] [0.92787975] [1.0437782 ] [1.0548167 ] [0.99577284] [1.0138211 ] [0.9606896 ] [0.842176 ] [1.0071185 ] [1.037857 ] [0.9574638 ] [0.8389438 ] [0.979972 ] [1.0355898 ] [0.9518845 ] [1.0119385 ] [0.97046787] [0.9351147 ] [0.95358044] [0.9341739 ] [0.89918923] [1.0026075 ] [1.043149 ] [0.8184276 ] [1.2166289 ] [0.8759692 ] [0.9519358 ] [1.1381359 ] [1.0971173 ] [0.9796404 ] [1.0128306 ] [1.0288136 ] [1.120055 ] [0.9235902 ] [0.8996754 ] [1.1129855 ] [1.2098634 ] [0.8046526 ] [1.0924777 ] [1.0306575 ] [0.92055935] [0.9472878 ] [0.97690165] [0.83338255] [0.8440522 ] [0.8374723 ] [0.98081833] [1.0404533 ] [0.95736027] [0.9260175 ] [1.0038567 ] [1.074002 ] [0.97750944] [1.0190775 ] [1.1024553 ] [0.9209466 ] [1.010607 ] [1.0493149 ] [0.92432547] [1.0877477 ] [1.1269361 ] [1.142078 ] [0.9126574 ] [0.9628005 ] [0.9395477 ] [1.1709385 ] [0.98109144] [1.0706288 ] [1.0756551 ] [0.9980446 ] [1.0128652 ] [1.0342257 ] [0.8966294 ] [0.99559635] [0.8946624 ] [0.9925863 ] [0.95678633] [0.96399915] [0.91573215] [1.1246105 ] [1.0839142 ] [0.9228101 ] [1.0005019 ] [0.9907789 ] [0.9160845 ] [0.8301421 ] [1.0530722 ] [0.72132516] [1.0411416 ] [0.99870265] [0.9431646 ] [1.0209417 ] [1.0316151 ] [0.99832165] [1.109035 ] [0.9962142 ] [0.9003038 ] [0.9219784 ] [1.0719968 ] [0.9793512 ] [0.91630197] [0.9772756 ] [0.8775085 ] [0.9899299 ] [0.94834423] [1.0437062 ] [1.1073296 ] [1.0063782 ] [0.9883679 ] [0.90043515] [0.9608364 ] [0.9782609 ] [0.9409532 ] [1.069383 ] [1.0435358 ] [0.8318611 ] [1.0008671 ] [1.133384 ] [0.97737455] [0.8968405 ] [1.1018082 ] [0.9031733 ] [0.9401502 ] [1.0254742 ] [0.90347636] [1.0228539 ] [1.004592 ] [1.0030328 ] [1.0417191 ] [0.9247732 ] [1.0467527 ] [0.9472291 ] [0.93079233] [0.89622253] [0.86691165] [1.1066194 ] [0.93948597] [0.9344234 ] [1.1443422 ] [1.0379668 ] [0.95053107] [1.0090709 ] [1.0491849 ] [1.1941504 ] [1.0248777 ] [1.0044886 ] [1.0061538 ] [1.1021994 ] [0.946744 ] [1.008544 ] [0.87285 ] [0.8710336 ] [1.0217714 ] [1.0532066 ] [0.88192344] [0.9515022 ] [0.96455437] [0.8503914 ] [0.9975274 ] [0.9567331 ] [0.9438697 ] [1.059287 ] [1.0929518 ] [1.0499191 ] [0.91924465] [0.9854374 ] [1.0433418 ] [0.8769322 ] [1.0270401 ] [0.9076804 ] [0.8915445 ] [0.93332136] [1.0144985 ] [0.9814058 ] [0.9585569 ] [1.176262 ] [1.109576 ] [1.1279863 ] [1.148315 ] [1.0228022 ] [0.9847302 ] [1.1086574 ] [0.8405869 ] [0.90417635] [0.94917786] [0.9150517 ] [1.0749955 ] [1.1449758 ] [0.9750427 ] [0.89410555] [1.1906769 ] [0.9612589 ] [0.9693113 ] [1.1928355 ] [1.0211846 ] [1.022736 ] [0.9934577 ] [0.86187327] [1.0407395 ] [0.9140681 ] [0.9445087 ] [0.9493952 ] [0.9208577 ] [0.96360713] [1.1527804 ] [0.84193677] [1.0463946 ] [0.96661377] [1.0814337 ] [1.0864785 ] [1.1546708 ] [1.0228642 ] [1.0327361 ] [1.0195916 ] [0.96362513] [1.0145202 ] [0.9059 ] [1.077203 ] [1.0126294 ] [0.91669446] [1.0850931 ]] [[0.9721026 ] [1.0874338 ] [0.9225387 ] [1.1436194 ] [0.918402 ] [1.0141702 ] [1.0410678 ] [0.9900085 ] [0.8819596 ] [0.9514822 ] [1.0526929 ] [1.0439599 ] [1.0003078 ] [1.0848022 ] [0.90957063] [0.8552063 ] [0.9335824 ] [0.9070074 ] [0.88574153] [0.99532443] [1.0364863 ] [0.9514615 ] [0.9556099 ] [0.96617067] [1.1564909 ] [0.9579292 ] [1.0345398 ] [1.0720392 ] [1.1390944 ] [0.9825006 ] [0.82347906] [1.0640817 ] [0.99789536] [0.8543344 ] [0.9450942 ] [0.96065056] [1.1140177 ] [1.1226029 ] [1.0336065 ] [0.89604706] [1.1048504 ] [1.2504224 ] [0.98795164] [1.0003179 ] [1.0960349 ] [1.2473 ] [1.0368147 ] [1.0744288 ] [0.9465581 ] [1.0414178 ] [1.0694453 ] [0.98782843] [1.191477 ] [0.9907461 ] [1.0414773 ] [0.99456304] [0.8240681 ] [0.98200405] [1.0351511 ] [1.069187 ] [0.96554595] [0.9288002 ] [1.1200984 ] [0.97557366] [0.9949924 ] [0.9425744 ] [0.87948793] [1.0533141 ] [0.9397939 ] [0.9314333 ] [0.94223547] [1.0628808 ] [1.0230485 ] [0.82390577] [1.0424117 ] [0.9687843 ] [0.89236057] [0.9962412 ] [1.0314494 ] [0.8764214 ] [1.2096245 ] [0.9476469 ] [1.0024254 ] [0.96418405] [0.93720174] [1.0556377 ] [0.9622965 ] [0.98225784] [0.8311109 ] [0.8363529 ] [1.0939178 ] [0.9318036 ] [1.0274454 ] [0.8103215 ] [0.9311217 ] [0.82583976] [0.8786728 ] [1.1096915 ] [1.1115693 ] [1.0817366 ] [0.9739916 ] [1.0255011 ] [0.9337933 ] [1.0624083 ] [0.9645022 ] [0.9726169 ] [1.072522 ] [1.1234851 ] [0.90368205] [1.011514 ] [1.1638492 ] [0.898829 ] [0.94649434] [0.99219817] [0.9846596 ] [1.1406434 ] [1.2130567 ] [1.1780769 ] [0.94886136] [1.0326912 ] [0.78619033] [1.0415409 ] [1.0596179 ] [0.92894953] [0.9382456 ] [1.1239145 ] [0.9308556 ] [0.9628263 ] [0.950882 ] [1.0646605 ] [1.0533817 ] [0.87937635] [1.1148356 ] [1.0088437 ] [0.9649257 ] [0.9703583 ] [1.0161854 ] [0.91308993] [0.84600925] [0.91324997] [1.011758 ] [1.0145948 ] [1.0811894 ] [0.9611152 ] [1.0357245 ] [1.0077506 ] [1.104442 ] [1.092504 ] [0.8104443 ] [0.9774556 ] [0.98487794] [1.0755113 ] [0.9303339 ] [0.96435875] [1.12929 ] [1.0772676 ] [0.87984455] [0.98178464] [0.96458405] [0.9480655 ] [0.9673914 ] [1.0131983 ] [1.0753525 ] [1.0952162 ] [1.0444863 ] [0.945667 ] [1.0589771 ] [1.1254023 ] [0.93801266] [1.0755855 ] [0.8843743 ] [0.9495469 ] [1.0256475 ] [0.950606 ] [1.0561957 ] [1.0719986 ] [0.9783182 ] [0.8651906 ] [0.93660015] [1.1013329 ] [1.0038034 ] [0.96377075] [0.8935009 ] [0.85979027] [0.8845093 ] [0.91216 ] [1.1393319 ] [0.91065264] [1.2603247 ] [0.8782287 ] [1.0029601 ] [0.87769514] [0.9530193 ] [0.8413734 ] [1.1809974 ] [0.99539983] [1.0439073 ] [0.96811026] [0.94414556] [1.005808 ] [0.98906106] [1.269815 ] [1.0693766 ] [0.87612057] [0.9113819 ] [0.9899451 ] [0.98257697] [0.8690093 ] [1.1963394 ] [0.93300015] [1.0902853 ] [1.0700214 ] [1.1406237 ] [1.1425436 ] [1.184441 ] [1.1234506 ] [1.0706161 ] [0.9292288 ] [0.78585726] [1.007591 ] [1.1034994 ] [1.0898637 ] [0.9385111 ] [1.2571472 ]] [[1.1092786 ] [0.95119774] [0.98132527] [1.1176 ] [0.89716506] [1.0804087 ] [0.95288974] [1.0096489 ] [1.0395806 ] [1.1819527 ] [0.9296501 ] [0.89059377] [1.1598865 ] [0.8929513 ] [0.8653243 ] [1.0498983 ] [0.91942227] [1.0049684 ] [1.0978341 ] [1.1225158 ] [0.9682848 ] [1.0062945 ] [0.817984 ] [1.0688323 ] [1.186878 ] [1.0051686 ] [0.90303254] [0.9290933 ] [0.890707 ] [0.9239921 ] [0.94122106] [0.83673966] [0.9966831 ] [0.9059739 ] [0.9283612 ] [0.94096 ] [0.9277996 ] [1.016402 ] [1.0190638 ] [1.1341459 ] [0.97658294] [0.97596973] [0.9887014 ] [1.0399173 ] [1.096448 ] [0.9170872 ] [1.042463 ] [1.0663179 ] [0.9389216 ] [1.0013084 ] [0.8867856 ] [0.9932733 ] [0.94777614] [0.9589623 ] [0.95285183] [0.84609824] [1.0369807 ] [1.0078424 ] [1.1063168 ] [0.9480449 ] [1.1210299 ] [1.016542 ] [1.0602977 ] [0.7537871 ] [1.2235374 ] [1.1435821 ] [0.92924416] [1.0029773 ] [0.9277274 ] [0.9398156 ] [1.0217134 ] [1.0194297 ] [1.096341 ] [1.0089976 ] [1.0159622 ] [0.94798636] [1.0756694 ] [0.882866 ] [0.90334874] [1.0020365 ] [1.004862 ] [1.1556067 ] [1.0439248 ] [1.0523934 ] [1.0561712 ] [1.0841526 ] [0.9516671 ] [0.90641075] [0.92026955] [0.94129366] [1.0016348 ] [0.89739144] [0.8741178 ] [1.1233356 ] [0.8273936 ] [0.9378535 ] [1.151204 ] [0.9496558 ] [1.2402126 ] [1.0607655 ] [0.9061731 ] [1.0816386 ] [1.068306 ] [0.96774286] [1.0377791 ] [1.0805019 ] [0.8757434 ] [1.1481246 ] [0.90279454] [0.95330423] [0.69899744] [1.1621387 ] [0.9644605 ] [0.88024384] [0.8695389 ] [0.8113118 ] [1.137768 ] [0.96331275] [1.0809278 ] [1.1263249 ] [1.0168304 ] [0.9285067 ] [0.9020374 ] [0.9680109 ] [0.9887811 ] [0.9013127 ] [1.1496642 ] [1.1444892 ] [1.034651 ] [1.0504928 ] [1.17779 ] [0.83905417] [1.1126709 ] [1.044322 ] [0.82480764] [0.9244114 ] [0.99964315] [1.0843437 ] [0.9750754 ] [1.0149326 ] [0.91382414] [0.8402407 ] [0.8222453 ] [0.96766186] [1.0192863 ] [0.8571797 ] [1.0295103 ] [0.9904147 ] [0.9498181 ] [1.0847517 ] [1.1322806 ] [0.8941399 ] [0.99116904] [0.9253111 ] [1.0556271 ] [0.9680507 ] [0.9524176 ] [1.0558302 ] [1.0090233 ] [1.0000017 ] [0.975868 ] [1.0142308 ] [1.006672 ] [0.9755054 ] [1.1443411 ] [0.94269556] [1.1396477 ] [0.93779343] [0.97302085] [0.89344376] [1.0558188 ] [0.99137604] [1.1055987 ] [1.0729252 ] [0.8637291 ] [0.9840317 ] [1.0670102 ] [0.9178377 ] [0.90024734] [0.9434027 ] [0.91145605] [0.89495945] [0.93823546] [1.1417303 ] [1.0629119 ] [0.9587908 ] [0.9651149 ] [0.90222365] [1.0579449 ] [1.0132488 ] [0.9413183 ] [1.0082403 ] [0.8354997 ] [1.0946784 ] [1.208859 ] [0.93706214] [0.94033015] [0.87191725] [0.9164168 ] [0.8511538 ] [0.9834903 ] [0.9245639 ] [1.0291586 ] [0.956625 ] [1.0451008 ] [1.1104822 ] [1.2420264 ] [0.93226045] [1.2172458 ] [0.7802642 ] [0.8693235 ] [0.9841164 ] [0.99509865] [0.88098854] [0.90851843] [1.2055732 ] [0.8627769 ] [1.0579084 ] [1.1177229 ] [1.0523273 ] [1.1408062 ] [1.1405784 ] [1.1744405 ] [0.9294185 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:-2 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6547.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-2]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[0.9851641 1.0745417 0.89094245 1.0286925 1.0985688 0.9646358 1.004211 1.0089313 0.986622 1.0076307 1.1704999 1.126424 1.0431851 1.0610849 0.99932766 0.880457 1.0436032 0.8848076 1.0535749 1.0183507 0.8752308 0.7963596 0.84458685 0.9131601 0.9116206 1.0650667 0.86825806 0.8121285 1.073847 0.8451887 0.8267409 1.0385385 0.8874254 0.9674143 1.0027162 0.8776359 1.0813626 1.0014036 0.9505514 0.820868 1.0782087 1.0351533 0.9260627 1.0294805 0.98452777 1.0774875 0.95473605 0.95632905 0.89757484 0.9800273 0.986503 1.0791863 0.96848893 0.97185045 0.9956389 0.97246844 1.1591375 0.94476885 1.004103 1.0472668 1.0535706 1.14009 1.067976 1.0520207 0.9435524 0.86425155 0.9343737 0.81046414 0.9268779 0.9221827 1.1125054 0.9234024 0.97728467 0.92780066 0.9535218 0.96993816 1.0009239 0.9146795 0.8337587 0.85146666 0.966711 1.1640657 0.93785024 0.99998754 1.0289853 0.9537434 0.94080174 0.8781925 1.0036271 1.0571429 0.8975875 1.1478801 1.0227135 0.8115542 0.9240786 0.99336326 0.9110784 0.8551225 1.1455 1.0568136 1.0707289 0.77360916 1.1178994 1.2072371 0.9148783 1.1076488 1.0507659 0.95880264 1.0436888 0.9695337 0.8801174 0.9495446 1.0406913 0.91875553 0.90699583 1.0730522 0.85087866 1.0552891 1.0631266 0.85558295 1.1029271 1.1001759 0.8842241 0.9650101 0.7987372 0.979798 1.0070469 0.90940535 1.0420576 1.0123256 1.0082318 1.0416926 0.97689253 0.92039186 1.1612262 0.96766007 0.92378175 0.85918343 1.048802 1.0116774 1.0669756 0.9719413 1.10372 0.8868324 1.0172968 0.99302644 0.9311701 1.163704 1.10236 0.9256566 1.1046524 1.1293893 0.9599675 0.98178345 0.9698114 0.95778936 0.8334103 1.0413611 0.9048914 0.9214199 1.0199256 0.8947229 1.1301438 0.8237689 0.9517009 1.0847671 1.1991775 0.91362387 0.9918727 0.9171865 0.9884985 0.9580481 0.90125376 0.9750951 1.0655648 1.0383867 0.92074406 1.102492 0.8922439 1.0111266 0.8283217 1.1783102 1.0131034 1.0576866 1.1104985 1.0669286 0.97806466 1.0521907 1.1916186 1.0656788 1.08785 1.0063505 1.0142109 1.1117659 0.9687685 0.92257184 1.0009922 0.9740923 0.95048016 0.9339085 0.8631488 1.1033694 1.0138291 1.011622 1.0036016 1.1261276 1.141033 1.1290896 1.0002471 1.1210968 1.088543 0.9738573 1.1037393 0.8940682 0.9491528 1.0797894 1.0349104 1.0813996 1.0709591 1.0620828 1.1419479 0.95920295 0.997994 1.0201789 ]] [[1.0542302 0.8309493 0.99261284 1.1065831 1.0060831 1.0794847 0.98768014 0.9280534 0.83084005 0.9756505 0.9537776 0.9850472 0.9790524 0.926271 1.0464569 1.0832765 0.83113277 0.946825 1.1589223 1.0402926 1.0679419 0.98286176 1.0173192 0.90980124 0.870007 0.98512423 1.1077751 0.85712564 1.1367707 0.921845 0.8346843 1.1793206 0.87286544 1.0655009 1.0411884 1.0339724 0.93266034 1.0519276 0.9372044 0.8436986 0.93221825 0.81716394 0.9977085 1.0073698 0.8431369 0.8349478 1.1658216 0.8167115 0.936379 0.9108829 1.0324941 0.94588923 0.8867404 0.992524 1.0029447 0.9278655 1.0135704 1.1515574 0.9203973 0.88951874 0.8982564 1.0453906 1.1616977 0.8944125 0.94535667 0.92491037 0.95061463 1.2470297 0.91776353 1.0776008 1.0201306 0.9286053 0.93645567 1.1868173 0.9596861 1.0463114 0.90402514 0.94246364 1.0947268 1.1906642 0.9374478 0.96471196 0.8794956 1.07276 1.2124889 0.96944094 0.94445574 0.95490783 0.996169 1.0535624 0.9231866 0.9987013 0.92227393 1.0118594 0.9630951 0.86436576 0.8664657 1.0235684 0.88951576 1.0022527 1.0026288 1.1668044 0.92826164 0.9991131 1.1175361 1.2148881 0.9752192 0.8842928 1.0297438 0.9861637 0.92422026 0.9576722 1.0117731 0.8613822 0.88265556 1.1321038 1.0299879 1.0476668 0.90944546 1.0537705 0.91964823 0.85719556 1.0091503 0.96029276 1.0662152 1.0752065 0.93983203 1.0324521 0.9934511 1.0033654 1.0567483 0.84920317 0.950363 1.0109327 1.1432022 1.0589784 0.9311805 1.1736356 1.0246243 0.9959167 1.0674576 0.91232723 0.9840473 1.0407856 1.0542799 1.060307 1.1346735 0.7967029 1.0263261 1.0772762 0.94445384 0.79577214 1.0460026 1.0419656 1.005219 1.1449115 0.8717465 1.0665439 1.0596682 0.96449697 1.1002558 0.8541166 1.170651 0.9779938 0.9011079 0.94022036 1.0626525 0.81876016 1.0832889 0.9767985 0.9839959 1.0629078 1.0263666 1.0060016 1.0770686 0.95736253 0.9640897 1.0734832 1.0635185 0.9484149 0.933826 1.1510911 0.9475518 0.99722177 0.92317986 1.0881385 1.1339989 0.93900687 0.80382305 0.90211165 0.99710906 0.9483707 0.92720526 1.1858304 1.0719506 0.92365783 1.0458702 1.0589304 0.90788865 1.0955552 1.1522596 0.9384841 0.945795 0.90610695 0.9949747 0.99905294 0.83925015 0.9666953 0.9181057 0.99591297 1.1983299 0.93719006 0.92209756 0.97243476 0.97292966 0.9546569 1.0145738 1.0939919 1.1851543 0.99103445 1.1079031 1.1083878 1.0093099 1.0394765 ]] [[0.9762512 0.9847094 0.9387052 1.233301 1.134638 0.84838307 0.91988695 1.1673814 0.9855397 0.941898 1.1733369 1.09321 1.0636867 0.91643447 1.0872415 0.9752277 0.90921956 1.038618 1.0042292 1.0761263 1.0185376 0.9894626 0.98992175 0.9878235 0.9453634 1.0244737 0.9730188 1.0919254 0.91170716 1.0478888 0.9316964 0.9168056 0.9303406 0.9483632 0.94034314 0.89951587 1.2214003 0.9122669 1.1344644 1.0650461 0.81171227 0.9911451 1.0755854 0.9664234 0.965503 0.8603828 1.0476288 0.8830919 0.9294661 1.0416979 1.0572085 0.95560515 0.90489143 1.0622472 1.0314957 0.8953376 0.9888582 0.89952636 1.035414 0.9573624 0.8798498 1.1216402 1.0969436 0.9819009 0.8541798 0.90112495 0.95280486 0.96389645 1.0111041 1.0474777 0.89012164 0.9741688 0.9645635 1.038762 0.9940344 1.0400678 1.103866 1.0630513 1.3009869 0.91518605 1.2166895 1.0688685 0.88721234 0.9115063 1.0341681 0.9460686 0.95334417 0.8054641 1.0138067 0.8971496 0.97632277 0.961901 0.9000059 1.0484647 1.1963454 1.1396122 0.8950535 1.0831064 1.0550618 1.1843804 1.0070097 0.9285656 0.86597365 0.9132279 0.9769416 0.9282491 1.0812156 1.0830274 1.0064731 0.95266014 1.049699 0.9841065 1.1255696 0.9901515 0.9849005 0.9467273 0.8499732 0.96131194 0.9328478 0.8724606 0.9342785 0.9289521 0.96588314 0.89079237 1.033979 1.0593307 1.0631127 0.9345378 0.9161813 0.8717585 0.9883528 1.0849605 1.13525 0.8596203 0.96288246 0.9461851 1.1438652 0.965246 1.0914865 0.9283835 0.8901785 1.1113458 0.96635514 1.0648652 1.1234659 0.97206056 0.86668754 1.0938557 1.1019053 0.8983557 0.957077 1.011477 0.99479187 0.96034384 1.064629 0.9034499 0.85971904 1.1777868 0.87530017 0.9686173 1.0547853 1.1306132 0.9949402 1.05744 1.0110497 1.120569 1.1149142 1.0382289 1.146512 1.0624402 0.9838208 0.9838893 0.7503263 1.0311842 0.962941 1.1580791 0.97829705 1.0209786 1.1236696 0.98081344 1.1090236 0.9778191 0.87346905 0.9107341 1.0517126 1.0234451 0.83931166 1.0560673 1.041686 0.98665607 1.1498537 0.8369278 0.9777321 1.0082133 1.0144855 1.1074102 0.95082676 0.87565607 0.89450955 0.99433184 0.81600475 0.9889748 0.9758872 0.91777575 0.984712 0.9493734 0.87340516 0.9606229 1.100636 0.91936755 0.9177609 0.94046944 0.9597173 1.0439504 0.8539565 1.211292 0.98325217 0.9316398 0.89355385 0.9775289 0.952346 1.0310454 0.9407756 0.9746042 ]]]]; ov_res: [[[[0.98516405 1.0745417 0.89094263 1.0286925 1.0985692 0.96463585 1.0042111 1.0089315 0.9866223 1.0076305 1.1704997 1.1264237 1.0431851 1.0610845 0.9993275 0.88045675 1.0436032 0.8848073 1.0535749 1.0183505 0.8752307 0.7963596 0.84458673 0.91315997 0.9116206 1.0650665 0.868258 0.81212854 1.0738472 0.8451887 0.82674086 1.0385383 0.88742536 0.96741426 1.0027162 0.87763584 1.0813625 1.0014032 0.9505515 0.820868 1.0782088 1.0351532 0.9260629 1.0294802 0.984528 1.0774872 0.9547361 0.95632905 0.8975749 0.98002756 0.98650306 1.0791863 0.9684896 0.97185075 0.9956387 0.9724683 1.1591377 0.9447692 1.0041028 1.0472672 1.0535705 1.1400901 1.0679759 1.0520204 0.9435527 0.8642518 0.93437356 0.810464 0.9268778 0.922183 1.1125053 0.9234025 0.9772847 0.92780066 0.9535218 0.96993846 1.0009241 0.91467965 0.8337587 0.85146666 0.96671057 1.1640656 0.93785 0.9999876 1.0289853 0.9537433 0.9408019 0.8781924 1.0036274 1.0571429 0.8975875 1.1478798 1.0227135 0.81155425 0.92407876 0.99336296 0.91107863 0.85512286 1.1454998 1.056814 1.0707289 0.7736092 1.1178997 1.2072372 0.91487825 1.1076486 1.0507658 0.95880264 1.0436887 0.96953356 0.8801175 0.9495445 1.0406911 0.9187556 0.9069962 1.0730522 0.8508787 1.0552893 1.0631267 0.8555833 1.1029271 1.1001756 0.884224 0.9650103 0.7987372 0.9797979 1.0070469 0.9094056 1.0420574 1.0123259 1.0082319 1.0416927 0.97689253 0.9203922 1.1612262 0.9676601 0.92378175 0.85918325 1.0488023 1.0116771 1.0669754 0.97194165 1.1037198 0.88683206 1.0172964 0.99302673 0.93117017 1.1637038 1.10236 0.9256565 1.1046523 1.1293893 0.9599676 0.98178333 0.9698109 0.95778906 0.83341044 1.041361 0.90489155 0.9214197 1.0199257 0.894723 1.1301439 0.8237686 0.9517008 1.084767 1.1991779 0.9136237 0.9918731 0.91718656 0.98849857 0.95804834 0.90125364 0.97509515 1.0655645 1.0383867 0.92074394 1.1024917 0.89224344 1.011127 0.82832175 1.1783102 1.0131034 1.0576866 1.1104984 1.0669286 0.9780641 1.0521908 1.1916187 1.0656787 1.0878501 1.0063508 1.0142108 1.1117657 0.9687689 0.92257196 1.0009923 0.97409195 0.95048 0.9339086 0.8631488 1.1033694 1.0138289 1.0116218 1.0036014 1.1261275 1.1410332 1.1290896 1.000247 1.121097 1.0885428 0.97385746 1.1037391 0.8940682 0.94915324 1.0797893 1.0349108 1.0813993 1.0709594 1.0620829 1.1419474 0.95920324 0.997994 1.020179 ]] [[1.0542305 0.83094925 0.99261266 1.1065831 1.0060833 1.0794846 0.98767966 0.928053 0.83083993 0.9756506 0.9537775 0.9850469 0.97905284 0.926271 1.0464566 1.0832762 0.8311326 0.9468246 1.1589226 1.040292 1.0679423 0.982862 1.0173192 0.9098011 0.87000686 0.9851244 1.1077756 0.85712606 1.1367705 0.9218449 0.83468467 1.1793207 0.8728653 1.065501 1.0411882 1.0339723 0.93266046 1.0519273 0.93720466 0.8436988 0.93221843 0.81716436 0.9977084 1.0073694 0.8431364 0.8349479 1.1658218 0.8167117 0.93637854 0.9108828 1.0324944 0.94588953 0.8867401 0.9925245 1.0029448 0.9278652 1.0135704 1.1515571 0.9203976 0.88951856 0.8982567 1.0453907 1.1616976 0.8944126 0.94535655 0.9249104 0.9506146 1.2470295 0.9177636 1.0776008 1.0201304 0.9286052 0.9364554 1.186818 0.9596861 1.0463116 0.9040254 0.94246393 1.0947268 1.1906637 0.9374477 0.96471196 0.8794956 1.0727597 1.212489 0.9694409 0.9444557 0.9549083 0.99616873 1.0535622 0.9231866 0.9987014 0.92227393 1.0118593 0.96309525 0.8643659 0.86646557 1.0235684 0.8895153 1.0022525 1.0026287 1.1668042 0.92826176 0.999113 1.1175357 1.2148883 0.9752197 0.88429254 1.0297437 0.9861635 0.9242199 0.957672 1.0117732 0.86138207 0.88265544 1.1321036 1.0299877 1.0476671 0.9094457 1.0537707 0.919648 0.85719556 1.00915 0.96029276 1.0662156 1.0752063 0.9398318 1.032452 0.9934514 1.0033656 1.056748 0.84920305 0.9503631 1.0109326 1.1432024 1.0589787 0.9311806 1.1736352 1.0246246 0.99591666 1.0674573 0.912327 0.9840476 1.0407854 1.0542797 1.0603071 1.1346734 0.7967029 1.026326 1.0772761 0.9444537 0.795772 1.0460026 1.0419657 1.0052189 1.1449119 0.87174636 1.0665442 1.059668 0.9644972 1.1002556 0.8541168 1.1706514 0.9779936 0.90110743 0.94022053 1.0626526 0.8187604 1.0832888 0.97679865 0.9839958 1.0629079 1.0263666 1.0060016 1.0770686 0.95736235 0.96408975 1.0734831 1.0635186 0.9484148 0.9338261 1.151091 0.9475519 0.9972215 0.9231799 1.0881382 1.1339995 0.9390068 0.803823 0.9021118 0.997109 0.94837075 0.92720515 1.1858301 1.0719504 0.92365783 1.0458701 1.0589305 0.9078887 1.0955552 1.1522595 0.938484 0.9457947 0.90610725 0.9949747 0.9990531 0.8392501 0.96669537 0.9181056 0.9959129 1.1983299 0.93719035 0.92209774 0.97243494 0.97292995 0.95465714 1.0145735 1.093992 1.1851541 0.99103415 1.1079034 1.1083878 1.0093096 1.0394766 ]] [[0.976251 0.9847096 0.93870485 1.2333009 1.1346382 0.84838337 0.91988677 1.1673814 0.98553985 0.94189787 1.173337 1.0932101 1.0636867 0.9164346 1.0872418 0.97522795 0.9092193 1.0386178 1.0042292 1.0761262 1.0185381 0.9894625 0.9899219 0.9878239 0.9453634 1.0244734 0.9730188 1.0919254 0.9117074 1.0478889 0.9316963 0.9168057 0.93034035 0.94836295 0.9403435 0.8995159 1.2214001 0.912267 1.1344646 1.0650462 0.8117124 0.99114543 1.0755851 0.96642333 0.96550256 0.8603834 1.047629 0.88309187 0.92946607 1.0416981 1.0572087 0.9556051 0.9048911 1.0622472 1.0314962 0.8953373 0.9888581 0.8995263 1.0354137 0.95736206 0.87985 1.1216406 1.0969437 0.9819011 0.85418004 0.90112513 0.95280504 0.9638968 1.0111037 1.0474777 0.89012164 0.97416866 0.9645634 1.0387617 0.9940344 1.0400679 1.103866 1.0630513 1.3009866 0.9151862 1.2166892 1.0688688 0.8872123 0.9115062 1.0341678 0.9460687 0.9533443 0.8054644 1.0138067 0.89714944 0.97632295 0.961901 0.9000058 1.048465 1.1963454 1.1396121 0.89505374 1.0831064 1.0550619 1.1843803 1.0070097 0.9285654 0.8659736 0.9132279 0.9769415 0.928249 1.0812156 1.0830275 1.0064732 0.9526599 1.0496991 0.98410654 1.1255699 0.99015135 0.9849006 0.9467271 0.8499732 0.96131176 0.9328479 0.8724607 0.93427885 0.9289519 0.9658833 0.89079225 1.033979 1.0593306 1.0631126 0.9345378 0.9161814 0.8717585 0.9883526 1.0849602 1.13525 0.8596199 0.9628824 0.94618493 1.1438653 0.965246 1.0914862 0.92838365 0.8901786 1.1113461 0.9663552 1.0648658 1.1234658 0.9720607 0.8666873 1.0938557 1.1019053 0.8983559 0.95707715 1.0114769 0.9947918 0.960344 1.0646291 0.9034497 0.8597192 1.1777867 0.8753002 0.9686173 1.0547856 1.1306132 0.9949407 1.0574404 1.0110501 1.1205689 1.1149144 1.0382286 1.1465118 1.0624402 0.98382056 0.98388946 0.75032634 1.0311846 0.9629409 1.1580793 0.97829676 1.0209787 1.1236695 0.9808133 1.1090237 0.9778193 0.87346905 0.9107341 1.0517126 1.023445 0.8393116 1.0560677 1.041686 0.9866559 1.1498537 0.8369282 0.97773206 1.0082133 1.0144852 1.1074098 0.950827 0.87565607 0.89450943 0.99433213 0.81600463 0.9889751 0.9758874 0.91777545 0.9847121 0.94937354 0.8734056 0.9606232 1.1006361 0.9193674 0.9177614 0.9404694 0.95971733 1.0439504 0.8539569 1.2112923 0.9832522 0.93164 0.89355415 0.9775289 0.952346 1.0310452 0.94077575 0.97460383]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:-2 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6549.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-2]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.9399052 0.9459303 1.1853188 0.9745441 0.9776857 0.9066831 1.211566 0.8813366 1.0239669 0.9371563 1.1766742 0.87873155 1.0841295 1.1177458 0.9924807 1.0413351 0.95914066 1.0879723 0.9687901 0.8549416 1.0184034 1.0301206 1.0024272 0.85615635 1.0185487 0.9616197 1.1297748 1.1230893 0.88839275 1.0594958 1.0581465 0.9903211 1.020001 0.85540384 1.0709345 0.89209574 0.87415427 1.0680392 0.8739215 0.78423655 0.9806861 0.9965584 0.90408933 1.1232548 1.0613704 1.0091541 0.98410034 0.8445088 1.1349373 1.0650724 1.0492535 1.3458619 0.93124044 1.0312313 0.89123505 0.8851616 0.997303 0.9328275 1.1844692 1.1681736 0.99922234 0.92641234 0.8968829 0.91273373 1.0076219 1.0648037 0.91686296 0.9427464 0.9793354 0.9193128 1.2015297 0.9787255 1.0422655 0.95742923 0.9017248 0.9742312 0.84065974 0.9247208 1.0868273 0.95230424 1.0783079 0.90468156 0.96121705 0.9220692 0.96818256 1.0599164 1.1046072 1.0989541 0.83607477 0.94946545 1.1455888 1.043539 0.940326 1.0601084 1.0316646 0.97105956 0.9774609 1.0401988 0.8885726 1.0290745 1.2122735 0.98206717 0.95489866 0.9648853 1.114145 1.0340233 0.9914077 0.96184397 1.0268936 0.9997586 0.873895 0.97979873 1.0140916 0.9684786 1.1764655 0.9403612 1.0559655 0.8001309 0.9779371 0.9562801 0.86457807 0.98074424 1.0317705 0.8550614 0.7905538 1.008363 1.0786005 0.8860737 0.9121413 0.94133115 0.8645625 0.90833426 1.0506473 1.0525839 0.9581389 0.91897696 1.0311128 1.0270628 0.92428875 1.0713478 1.073341 1.01019 1.1458446 0.9504152 0.96183026 1.0721786 0.9308675 1.0650102 1.0664753 1.0129133 0.90518796 0.8122261 0.9791298 0.93190867 1.0389395 1.1052997 1.1273694 0.9805971 1.08604 1.0037225 1.0427786 0.6911858 0.95860296 1.2147038 1.1337445 0.9704934 0.9675121 1.0395513 0.8509817 1.0086235 0.9542354 1.0793595 1.1264522 0.93220395 0.9396734 0.85650074 1.0262164 1.1178359 0.9563329 1.0947124 0.99308574 1.0733246 0.8234356 1.0574809 0.90052336 0.8887139 0.92962545 1.025682 0.9405829 0.94158566 1.1906904 0.99263316 0.9247693 1.1375555 1.0162747 1.0222793 1.0195132 0.9331774 0.90018034 0.9349094 1.0017167 1.0146042 1.1094207 1.0536832 1.0670513 0.9531829 1.0729513 0.9951902 0.98975897 1.0922595 1.0340396 0.9628667 0.9780121 0.82969534 0.8754896 1.3261305 1.0493989 0.95478475 1.0038005 1.1322091 1.047766 1.0383149 0.9318064 0.83020854]] [[1.0338166 1.0818721 1.057986 0.85674554 0.99644417 0.91738373 1.1482798 0.88128567 1.0380076 1.0715115 1.0727241 0.9901129 0.91920155 1.0292702 1.1260899 1.0886884 1.0105516 0.9257021 0.9476169 1.0027714 0.9813695 0.9087444 0.99246037 1.0909708 0.908743 0.98435426 1.0822757 0.921784 1.0102742 0.8976394 0.97245276 1.0834509 1.1762967 1.1510372 0.9884485 1.0330135 0.99331045 1.0226339 1.066965 1.0754789 1.0239768 0.9650359 0.94462776 0.81788725 0.9994705 0.9268862 1.0853095 0.904803 0.9950676 1.0121737 0.9022842 1.0632914 0.9089429 0.9301006 1.110698 0.95624405 0.98342687 0.7687771 1.0636743 0.9666285 1.016953 0.9981421 0.903146 1.0867769 1.0193828 1.0478183 0.9713671 1.0500913 1.0966413 1.046108 1.0259542 0.9421203 0.8812132 0.98246354 1.0588918 0.996796 0.9183587 0.95309687 1.0387677 0.9711815 0.8393777 1.0797905 1.0240607 0.8693734 1.1047947 1.0644588 0.9458119 1.101931 0.8902186 1.0038993 1.024807 1.0218121 1.0872676 0.87450314 1.1596371 1.067627 0.9246684 0.9106464 1.1082194 1.0459253 1.0459261 0.96253216 0.8404941 0.93083864 1.0031954 0.97843164 0.9014695 0.9250241 1.0516492 1.0211574 1.0833213 0.9080383 0.90142006 0.8875927 0.95644623 1.1620733 0.84449065 0.94518405 1.0974565 1.0444065 1.022348 0.8683074 0.9508343 0.8201008 1.1479696 0.8768787 1.0830055 1.0461899 1.0231152 0.82969797 1.0477409 1.0451035 1.0512471 0.8937525 1.0320106 0.8132381 0.8733591 0.94904065 0.84444183 1.1678869 1.1055789 0.9481107 1.1409198 1.0610353 1.0369052 0.9449846 0.84924775 1.0414035 1.0964248 1.0155607 0.9484177 1.1923726 1.0998809 0.9452407 1.1056265 0.97040147 1.0669478 1.0654076 1.0915316 0.9806301 0.97864246 0.89016986 0.8602268 1.0164609 0.9436456 0.89039415 1.1114345 0.98568904 0.86116165 1.0682526 0.9022752 1.0224984 1.1114334 0.92663056 1.0245473 1.162328 0.83174306 1.0612419 0.9761977 1.0156708 0.8769719 0.89263785 1.152666 1.1284125 0.9094393 1.0222981 1.1278706 1.0125383 0.9090498 0.8964558 1.0116407 0.98034954 1.0778532 1.1297843 0.9715655 1.0227163 0.9523147 1.0984577 0.9793729 0.9728375 0.9933535 0.9503121 0.80125433 1.0538465 0.99159247 0.77549785 0.9141091 0.9416233 0.8687303 1.0760825 0.93229717 0.97833544 1.0269626 0.8791877 1.0982457 0.9834048 0.79634166 0.91634744 1.0992944 0.8897176 0.9996603 0.99822587 0.8797557 0.8856394 ]] [[1.0266925 0.95448744 1.0864935 1.0998586 0.950127 1.1149974 0.99307525 1.078017 0.92552984 0.8732211 0.7388536 0.99572015 1.0124708 1.1127472 0.87857056 0.9721053 1.0802159 1.0016315 0.9821466 1.0928335 0.9414957 1.0522349 1.2012976 1.0664934 0.91019 0.9348956 1.0452857 1.0917944 1.0102805 1.1297768 0.99109805 0.8906675 0.90717417 1.014785 1.0116241 1.0449258 1.12666 0.9453033 1.0683739 0.8425983 1.0650911 1.0030159 0.8067389 0.84513366 0.93952686 0.9942074 1.1181782 1.1040908 1.0312427 0.90952426 1.1178836 0.83650905 0.9432508 0.91229695 0.9358366 1.067317 1.1416397 1.0704316 0.9227631 0.82254887 1.0710356 0.948428 0.8595061 1.0897083 0.89746153 1.0295563 0.9231531 0.91514033 0.94773537 1.0331265 0.9257724 0.9475026 1.0102328 0.9039785 1.0400565 0.887018 1.081693 0.86876976 0.93351686 1.296194 1.0220896 1.1321713 0.93614036 1.025972 1.0426918 1.0681957 0.9752235 0.9908953 1.0262641 0.99956954 0.83188564 0.8221118 1.0113277 1.2844954 0.84359866 0.8836409 1.0721966 1.1198878 1.1006655 0.87887496 1.0949823 0.8303047 0.969607 1.016538 1.064133 1.0242114 0.7809208 1.082825 0.9461612 1.1540875 0.9880523 0.9942249 0.99064445 1.1270354 0.81425256 0.87015504 0.96815825 0.7656717 1.1734319 1.113865 0.9968023 0.9999689 0.94097817 0.94742334 0.9919001 0.8016613 1.0871774 0.98568904 0.8914852 0.98331875 1.1488968 0.9464857 0.8021812 0.91445017 0.9585937 1.0533324 0.97560585 1.1670905 0.956392 0.8630146 0.9577753 0.9474045 0.9282568 1.1316758 0.7907547 0.9037004 1.0457135 0.7932559 0.9882389 0.8350696 1.0148212 0.9656808 0.96022403 0.9386802 0.9653544 1.061375 0.8951477 0.827621 0.9878848 1.1380078 0.91223174 0.823195 1.1357332 0.9366219 0.86383516 0.9668117 0.95464253 1.0080407 1.098354 1.0120463 0.93288493 1.0338912 0.99179924 0.9024396 1.0404477 1.06228 1.059993 1.0220495 0.8273138 1.158983 1.0416118 1.0720794 0.9805306 0.90293753 1.0774069 1.0835192 0.929983 1.1471946 0.948537 0.9062825 0.99493986 0.86394775 1.134095 0.94585526 1.1183852 0.9641503 1.0872085 1.2385955 0.91131616 0.98330545 1.042619 0.99502987 0.9397375 0.9030156 0.9206136 0.9690905 0.9342233 0.89771694 0.9370179 0.9819119 1.1906087 1.0371087 0.932117 0.98982096 1.0436666 1.0586764 1.0794427 1.0931281 0.9528916 0.93774974 0.9634698 1.0576334 1.1135981 1.1834774 ]]]]; ov_res: [[[[0.9399053 0.9459303 1.1853182 0.97454435 0.97768563 0.9066833 1.211566 0.88133675 1.023967 0.937156 1.1766742 0.8787315 1.0841295 1.1177456 0.9924805 1.0413351 0.9591408 1.0879725 0.96879005 0.8549417 1.0184036 1.0301208 1.0024271 0.85615635 1.0185485 0.9616197 1.1297748 1.1230894 0.88839304 1.059496 1.058147 0.990321 1.0200009 0.8554037 1.0709347 0.8920955 0.8741541 1.0680392 0.8739217 0.78423643 0.9806861 0.9965587 0.9040897 1.1232548 1.0613706 1.0091541 0.9841005 0.8445091 1.1349375 1.0650725 1.0492536 1.3458614 0.9312405 1.0312315 0.8912352 0.8851619 0.9973031 0.9328274 1.1844692 1.1681738 0.9992223 0.9264123 0.8968827 0.91273415 1.0076218 1.064804 0.91686314 0.9427461 0.9793354 0.9193129 1.20153 0.9787255 1.0422661 0.95742947 0.90172493 0.97423154 0.8406597 0.924721 1.0868278 0.9523041 1.0783076 0.90468174 0.9612173 0.9220691 0.968183 1.0599164 1.1046073 1.0989534 0.83607495 0.94946575 1.1455889 1.0435389 0.94032615 1.0601085 1.0316646 0.97105944 0.97746086 1.0401989 0.88857263 1.0290747 1.2122732 0.9820673 0.9548987 0.96488523 1.1141452 1.0340234 0.9914076 0.9618441 1.0268939 0.9997586 0.87389547 0.9797989 1.0140914 0.96847886 1.1764655 0.9403606 1.0559654 0.80013096 0.97793716 0.95628005 0.864578 0.9807443 1.0317706 0.8550612 0.7905543 1.008363 1.0786006 0.886074 0.9121416 0.9413314 0.8645627 0.9083343 1.0506474 1.0525842 0.95813894 0.9189768 1.0311131 1.0270627 0.92428887 1.0713477 1.0733414 1.0101895 1.1458447 0.95041555 0.96183014 1.0721793 0.9308677 1.0650103 1.0664754 1.0129135 0.9051879 0.8122262 0.9791301 0.93190897 1.0389398 1.1052994 1.1273695 0.9805971 1.0860399 1.0037227 1.0427787 0.69118565 0.95860314 1.2147036 1.1337446 0.97049314 0.9675117 1.0395516 0.85098207 1.0086237 0.9542352 1.0793595 1.1264524 0.9322039 0.9396734 0.856501 1.0262167 1.1178361 0.95633286 1.0947126 0.9930859 1.0733243 0.82343596 1.0574808 0.9005231 0.8887141 0.9296254 1.0256822 0.94058317 0.94158596 1.1906902 0.9926331 0.9247696 1.1375552 1.0162745 1.0222793 1.0195134 0.9331776 0.9001807 0.9349096 1.0017171 1.0146043 1.1094205 1.0536832 1.0670516 0.95318276 1.0729516 0.9951903 0.989759 1.0922593 1.0340396 0.9628669 0.9780123 0.82969534 0.8754898 1.3261299 1.0493985 0.954785 1.0038004 1.1322092 1.0477659 1.038315 0.9318065 0.83020866]] [[1.0338167 1.0818721 1.057986 0.8567456 0.996444 0.9173838 1.1482798 0.88128585 1.038008 1.0715115 1.0727242 0.9901131 0.91920155 1.0292703 1.1260898 1.0886886 1.0105517 0.92570174 0.9476171 1.0027715 0.98136944 0.9087444 0.99246025 1.090971 0.9087432 0.98435414 1.0822756 0.92178404 1.0102739 0.89763945 0.97245234 1.0834512 1.1762967 1.1510373 0.9884482 1.0330132 0.99331045 1.0226338 1.0669651 1.075479 1.023977 0.9650358 0.9446277 0.8178868 0.9994704 0.9268863 1.0853099 0.9048032 0.9950677 1.0121735 0.90228415 1.0632914 0.9089425 0.93010056 1.1106975 0.95624363 0.9834271 0.76877713 1.0636746 0.96662873 1.0169528 0.9981423 0.903146 1.0867765 1.0193831 1.0478185 0.9713669 1.0500916 1.0966411 1.046108 1.0259541 0.9421208 0.88121325 0.9824635 1.0588918 0.99679613 0.9183583 0.95309657 1.0387677 0.97118187 0.8393777 1.0797905 1.0240606 0.86937356 1.1047945 1.064459 0.94581175 1.101931 0.89021885 1.0038995 1.0248066 1.0218124 1.0872678 0.8745031 1.159637 1.0676271 0.9246683 0.9106462 1.1082193 1.0459251 1.045926 0.9625325 0.84049433 0.9308386 1.0031958 0.97843146 0.9014695 0.9250241 1.0516493 1.0211573 1.0833212 0.9080383 0.9014202 0.88759243 0.95644635 1.1620729 0.8444909 0.9451841 1.0974568 1.0444067 1.0223484 0.86830753 0.95083416 0.8201006 1.1479697 0.8768788 1.0830053 1.0461897 1.0231149 0.82969785 1.0477406 1.0451035 1.0512466 0.8937528 1.0320107 0.81323797 0.873359 0.94904083 0.8444419 1.1678869 1.1055791 0.94811106 1.1409197 1.0610354 1.0369056 0.94498473 0.84924823 1.0414039 1.0964253 1.0155609 0.9484177 1.1923724 1.099881 0.9452409 1.1056261 0.97040147 1.0669479 1.0654076 1.0915316 0.98063016 0.97864264 0.89016956 0.860227 1.0164607 0.94364554 0.890394 1.1114347 0.98568904 0.8611614 1.0682529 0.902275 1.0224983 1.1114335 0.9266305 1.0245475 1.1623281 0.831743 1.0612421 0.97619784 1.0156711 0.87697184 0.89263755 1.1526655 1.1284122 0.9094395 1.0222982 1.1278707 1.0125381 0.90905005 0.8964562 1.0116405 0.98034966 1.0778531 1.1297845 0.97156507 1.0227164 0.9523147 1.0984575 0.979373 0.97283757 0.9933533 0.9503125 0.80125463 1.0538465 0.9915926 0.77549815 0.914109 0.941623 0.8687305 1.0760827 0.93229777 0.97833556 1.0269628 0.8791881 1.098246 0.9834051 0.796342 0.9163472 1.0992944 0.8897176 0.9996607 0.998226 0.8797555 0.88563967]] [[1.0266929 0.9544875 1.0864935 1.0998588 0.9501273 1.1149976 0.9930749 1.078017 0.92552984 0.8732213 0.73885375 0.9957202 1.012471 1.1127474 0.8785703 0.9721052 1.0802159 1.0016319 0.982147 1.0928335 0.941496 1.0522352 1.2012978 1.066493 0.9101902 0.93489593 1.045286 1.0917947 1.0102807 1.1297772 0.99109805 0.8906675 0.90717393 1.014785 1.0116242 1.0449257 1.1266599 0.9453033 1.0683744 0.8425983 1.0650913 1.0030156 0.806739 0.84513354 0.939527 0.9942072 1.1181781 1.104091 1.0312428 0.9095242 1.1178834 0.83650917 0.9432504 0.91229683 0.9358364 1.067317 1.1416404 1.0704318 0.9227632 0.8225491 1.0710355 0.94842815 0.8595059 1.0897081 0.8974613 1.0295562 0.92315304 0.9151406 0.9477352 1.0331267 0.92577255 0.9475028 1.0102327 0.9039786 1.0400566 0.88701814 1.0816932 0.8687696 0.9335168 1.2961937 1.0220901 1.1321714 0.9361403 1.0259718 1.042692 1.0681959 0.9752236 0.99089515 1.0262637 0.99956954 0.83188605 0.8221118 1.0113279 1.284495 0.84359884 0.8836409 1.0721965 1.1198878 1.1006655 0.87887496 1.0949821 0.8303048 0.96960723 1.0165383 1.064133 1.0242114 0.78092086 1.0828254 0.9461606 1.1540875 0.98805255 0.9942249 0.99064445 1.1270354 0.81425256 0.87015533 0.9681583 0.76567185 1.1734319 1.113865 0.9968026 0.99996895 0.94097817 0.94742334 0.99189997 0.8016614 1.0871773 0.98568934 0.8914853 0.98331875 1.1488966 0.9464855 0.8021811 0.91445017 0.9585934 1.0533324 0.9756056 1.1670907 0.9563919 0.86301494 0.9577752 0.94740444 0.928257 1.1316758 0.79075444 0.90370065 1.0457134 0.79325575 0.98823875 0.8350698 1.0148213 0.9656811 0.96022385 0.9386801 0.9653543 1.0613753 0.89514804 0.82762074 0.98788494 1.138008 0.9122321 0.82319504 1.1357334 0.93662226 0.8638351 0.96681166 0.9546427 1.0080407 1.0983539 1.0120467 0.932885 1.0338912 0.99179894 0.9024397 1.0404476 1.0622802 1.0599929 1.0220491 0.82731366 1.1589832 1.0416118 1.0720794 0.98053086 0.90293753 1.0774068 1.0835189 0.92998284 1.1471947 0.94853705 0.90628254 0.9949398 0.863948 1.1340951 0.9458551 1.1183851 0.9641499 1.0872085 1.2385963 0.9113158 0.98330575 1.0426191 0.99503 0.93973756 0.903015 0.9206134 0.9690901 0.9342233 0.8977169 0.9370177 0.98191214 1.1906095 1.0371087 0.93211704 0.98982096 1.0436671 1.058676 1.079443 1.0931282 0.9528916 0.93774956 0.96346986 1.0576334 1.1135978 1.1834774 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1) - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6551.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[0.13092554 1.2268654 1.5554234 ... 0.83544904 1.9143851 0.24688996] [0.24281067 0.04742837 0.5533756 ... 0.6385867 0.5811758 0.32532448] [0.8878872 0.47514454 0.39503026 ... 0.8737829 0.2266764 0.18363455] ... [0.15011373 0.03619001 1.7218231 ... 0.5931209 0.7381071 0.365065 ] [0.78544074 0.88101923 0.24163686 ... 2.0720792 1.203041 2.1242743 ] [0.43591782 0.9927251 0.5354258 ... 1.2047838 0.9537239 0.02822911]]]]; ov_res: [[[[0.13092554 1.2268654 1.5554234 ... 0.83544904 1.9143852 0.24688995] [0.24281067 0.04742837 0.55337554 ... 0.6385867 0.5811758 0.32532445] [0.88788724 0.47514454 0.39503023 ... 0.8737829 0.2266764 0.18363456] ... [0.15011373 0.03619001 1.7218231 ... 0.5931208 0.73810714 0.36506498] [0.7854407 0.8810193 0.24163683 ... 2.072079 1.203041 2.1242743 ] [0.43591782 0.9927251 0.5354258 ... 1.2047839 0.9537239 0.02822911]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1) - unbiased:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6553.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[9.8156172e-01 7.7027762e-01 3.5283376e-02 ... 6.1122382e-01 6.2833309e-02 1.2115412e+00] [2.8098038e-01 1.3190356e+00 4.5027122e-02 ... 3.0921358e-01 9.4135910e-01 3.0662555e-01] [3.3259454e-01 4.1094890e-01 7.3487359e-01 ... 3.6580563e-02 6.0734075e-01 2.5228223e-01] ... [4.0540644e-03 1.4104000e+00 1.1284668e+00 ... 1.3214111e+00 4.0722723e+00 3.2984301e-01] [2.8068334e-01 1.0257999e+00 8.8876015e-01 ... 3.7853950e-01 2.2138319e+00 2.0334692e+00] [1.1438258e+00 2.9836351e-01 5.8774978e-01 ... 9.0274245e-01 1.3183271e+00 2.0190501e+00]]]]; ov_res: [[[[9.8156166e-01 7.7027762e-01 3.5283376e-02 ... 6.1122382e-01 6.2833309e-02 1.2115412e+00] [2.8098038e-01 1.3190355e+00 4.5027122e-02 ... 3.0921361e-01 9.4135904e-01 3.0662555e-01] [3.3259454e-01 4.1094893e-01 7.3487365e-01 ... 3.6580563e-02 6.0734075e-01 2.5228223e-01] ... [4.0540644e-03 1.4104000e+00 1.1284670e+00 ... 1.3214111e+00 4.0722723e+00 3.2984301e-01] [2.8068334e-01 1.0258000e+00 8.8876021e-01 ... 3.7853953e-01 2.2138319e+00 2.0334692e+00] [1.1438258e+00 2.9836354e-01 5.8774978e-01 ... 9.0274251e-01 1.3183271e+00 2.0190501e+00]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(-1, -2) - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6555.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -2]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0102415]] [[1.0063676]] [[0.9938691]]]]; ov_res: [[[[1.0102413 ]] [[1.0063674 ]] [[0.99386936]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(-1, -2) - unbiased:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6557.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -2]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.9990201 ]] [[0.99204504]] [[1.0099635 ]]]]; ov_res: [[[[0.9990201]] [[0.9920448]] [[1.0099639]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1, -1) - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6559.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, -1]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0366098 ] [0.8830922 ] [0.9398947 ] [0.898766 ] [0.96837056] [0.90553945] [0.9133827 ] [1.0601652 ] [0.9853822 ] [1.0196369 ] [0.95314467] [0.96496767] [1.0308131 ] [1.0215487 ] [0.96722037] [0.9924704 ] [1.0646747 ] [0.979884 ] [0.9381869 ] [1.0701822 ] [1.0644673 ] [0.940315 ] [1.0194862 ] [1.0741363 ] [0.95434463] [1.057067 ] [1.0423479 ] [0.9409025 ] [0.8956212 ] [0.99140626] [1.0107172 ] [0.9784269 ] [1.0398128 ] [1.049405 ] [0.9774825 ] [0.9668495 ] [1.0328758 ] [1.0146888 ] [0.95563453] [1.1035998 ] [0.9773434 ] [1.0598428 ] [0.9817929 ] [0.9375253 ] [0.98152757] [0.964164 ] [1.0997968 ] [0.95914644] [1.0528768 ] [1.012335 ] [0.9152567 ] [1.034135 ] [1.0771023 ] [1.0041505 ] [1.0604608 ] [1.0088717 ] [0.9587018 ] [1.0033354 ] [1.0159243 ] [0.9977883 ] [1.01668 ] [1.0182481 ] [0.95327425] [0.9743995 ] [1.0068842 ] [0.9893203 ] [1.1095102 ] [1.0195203 ] [0.9325005 ] [1.0631942 ] [1.0771859 ] [1.0164258 ] [0.97009933] [0.9713533 ] [1.0431494 ] [1.0167509 ] [0.9503693 ] [1.0475436 ] [0.9568562 ] [0.98055786] [0.9855352 ] [1.0338584 ] [0.93089896] [0.97084785] [1.0302899 ] [1.0599356 ] [0.9897204 ] [0.9011743 ] [1.0265763 ] [1.013235 ] [1.0957272 ] [0.9529543 ] [0.8936042 ] [1.0034027 ] [1.0268217 ] [1.1167539 ] [1.0498028 ] [0.9580979 ] [0.967985 ] [1.0316895 ] [0.9459458 ] [0.9996449 ] [1.0974735 ] [0.9669641 ] [1.1590555 ] [0.9797152 ] [0.98710024] [0.93571204] [1.0809207 ] [0.98949385] [1.0755361 ] [1.0536867 ] [1.112165 ] [0.97149557] [1.0245852 ] [1.0224248 ] [1.1540141 ] [1.0184957 ] [0.9669137 ] [0.99732524] [1.0067848 ] [1.001455 ] [1.0223726 ] [0.9688952 ] [0.9816028 ] [1.0514796 ] [0.9498989 ] [1.0894431 ] [0.985542 ] [0.9385922 ] [1.0359794 ] [1.0045426 ] [0.9826959 ] [0.9980912 ] [0.97055763] [0.9352824 ] [1.0311296 ] [0.88924503] [1.1144603 ] [1.0425032 ] [1.0014658 ] [0.963039 ] [0.8898501 ] [1.0545068 ] [0.932681 ] [1.015144 ] [1.0277306 ] [1.0211347 ] [0.98832506] [0.9369281 ] [1.0132194 ] [0.9686285 ] [1.0480801 ] [1.0614171 ] [1.0261003 ] [0.93856525] [1.0655413 ] [1.0180004 ] [0.97980154] [1.1337823 ] [0.99850744] [1.0454433 ] [1.1213895 ] [1.0116704 ] [1.0109773 ] [0.9995411 ] [0.9582886 ] [0.9796646 ] [1.0314052 ] [1.0653168 ] [0.93056643] [1.065628 ] [1.1015556 ] [1.0374005 ] [0.9759736 ] [0.94767785] [1.0294365 ] [1.0101968 ] [1.0147785 ] [0.9782574 ] [0.95111597] [0.9765017 ] [0.9991149 ] [1.029061 ] [0.94729936] [0.90269464] [1.0165393 ] [0.91096616] [1.1176705 ] [1.0335982 ] [1.0501583 ] [0.8751476 ] [0.9337375 ] [0.96490246] [0.94090474] [1.007558 ] [0.95683014] [1.0416492 ] [1.0568603 ] [1.1185392 ] [1.0695798 ] [0.9805482 ] [0.9150341 ] [1.0789629 ] [0.9782086 ] [0.92980015] [1.0200992 ] [0.886167 ] [1.0973834 ] [0.8739896 ] [0.9889923 ] [0.9454409 ] [1.04659 ] [0.9124442 ] [0.9260532 ] [0.92363644] [0.9789175 ] [0.9706985 ] [0.9953719 ] [0.9648748 ] [0.99588096] [1.0185373 ] [0.960002 ] [0.8796123 ]]]]; ov_res: [[[[1.0366098 ] [0.8830922 ] [0.93989474] [0.8987659 ] [0.96837056] [0.9055394 ] [0.91338277] [1.0601652 ] [0.9853822 ] [1.0196369 ] [0.9531447 ] [0.9649677 ] [1.030813 ] [1.0215487 ] [0.9672204 ] [0.9924703 ] [1.0646747 ] [0.979884 ] [0.93818694] [1.0701822 ] [1.0644673 ] [0.94031495] [1.0194862 ] [1.0741363 ] [0.95434463] [1.057067 ] [1.0423479 ] [0.9409024 ] [0.8956212 ] [0.9914062 ] [1.0107172 ] [0.9784269 ] [1.0398128 ] [1.049405 ] [0.9774825 ] [0.96684945] [1.0328758 ] [1.0146887 ] [0.95563453] [1.1035998 ] [0.97734344] [1.0598428 ] [0.9817929 ] [0.93752533] [0.9815276 ] [0.964164 ] [1.0997969 ] [0.9591464 ] [1.0528768 ] [1.012335 ] [0.9152567 ] [1.034135 ] [1.0771023 ] [1.0041506 ] [1.0604608 ] [1.0088717 ] [0.9587018 ] [1.0033354 ] [1.0159243 ] [0.9977883 ] [1.01668 ] [1.0182482 ] [0.95327425] [0.97439957] [1.0068842 ] [0.9893203 ] [1.1095102 ] [1.0195202 ] [0.93250054] [1.063194 ] [1.0771859 ] [1.0164258 ] [0.97009933] [0.97135323] [1.0431494 ] [1.0167509 ] [0.95036936] [1.0475436 ] [0.95685613] [0.98055786] [0.9855352 ] [1.0338584 ] [0.930899 ] [0.9708478 ] [1.0302899 ] [1.0599355 ] [0.9897204 ] [0.90117425] [1.0265763 ] [1.013235 ] [1.0957273 ] [0.95295435] [0.89360416] [1.0034027 ] [1.0268217 ] [1.1167539 ] [1.0498027 ] [0.95809776] [0.967985 ] [1.0316894 ] [0.9459459 ] [0.9996449 ] [1.0974735 ] [0.96696407] [1.1590555 ] [0.9797152 ] [0.98710024] [0.935712 ] [1.0809206 ] [0.98949385] [1.0755361 ] [1.0536867 ] [1.112165 ] [0.9714955 ] [1.0245851 ] [1.0224248 ] [1.1540141 ] [1.0184957 ] [0.9669137 ] [0.99732524] [1.0067848 ] [1.001455 ] [1.0223726 ] [0.96889526] [0.9816028 ] [1.0514796 ] [0.94989884] [1.0894431 ] [0.98554194] [0.9385922 ] [1.0359794 ] [1.0045426 ] [0.982696 ] [0.9980912 ] [0.97055763] [0.9352824 ] [1.0311296 ] [0.8892451 ] [1.1144602 ] [1.0425031 ] [1.0014658 ] [0.9630389 ] [0.8898501 ] [1.0545068 ] [0.932681 ] [1.015144 ] [1.0277306 ] [1.0211347 ] [0.988325 ] [0.9369281 ] [1.0132192 ] [0.9686286 ] [1.0480801 ] [1.0614171 ] [1.0261003 ] [0.9385652 ] [1.0655413 ] [1.0180002 ] [0.97980154] [1.1337823 ] [0.99850744] [1.0454432 ] [1.1213895 ] [1.0116704 ] [1.0109773 ] [0.99954116] [0.95828867] [0.97966456] [1.0314051 ] [1.0653168 ] [0.93056643] [1.065628 ] [1.1015556 ] [1.0374006 ] [0.9759736 ] [0.9476778 ] [1.0294366 ] [1.0101969 ] [1.0147785 ] [0.9782574 ] [0.9511159 ] [0.97650164] [0.9991148 ] [1.029061 ] [0.9472994 ] [0.9026947 ] [1.0165392 ] [0.9109662 ] [1.1176705 ] [1.0335982 ] [1.0501581 ] [0.8751476 ] [0.93373746] [0.9649024 ] [0.94090474] [1.007558 ] [0.95683014] [1.0416492 ] [1.0568602 ] [1.1185392 ] [1.0695798 ] [0.9805482 ] [0.91503406] [1.0789629 ] [0.97820866] [0.92980015] [1.020099 ] [0.886167 ] [1.0973834 ] [0.87398964] [0.9889922 ] [0.94544095] [1.04659 ] [0.9124442 ] [0.9260533 ] [0.9236365 ] [0.97891754] [0.9706985 ] [0.9953719 ] [0.9648748 ] [0.995881 ] [1.0185373 ] [0.960002 ] [0.8796123 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1, -1) - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6561.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, -1]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[1.025671 ] [0.9229553 ] [0.978094 ] [1.0542425 ] [1.0090576 ] [1.1602682 ] [0.9539939 ] [1.0664701 ] [0.9978151 ] [0.97759706] [1.0291438 ] [0.9829959 ] [0.9002136 ] [0.9779942 ] [0.9915907 ] [1.0365739 ] [1.064165 ] [1.007427 ] [0.96134937] [1.0216693 ] [0.9291314 ] [1.0736054 ] [1.0740936 ] [0.9446564 ] [1.0288476 ] [1.0134511 ] [0.9897604 ] [1.0932974 ] [0.9872673 ] [1.0207486 ] [1.0092893 ] [1.0088173 ] [0.98275805] [0.9084519 ] [0.96299016] [1.0504322 ] [1.0991548 ] [1.013389 ] [0.9641339 ] [1.045332 ] [0.9114578 ] [0.97614 ] [0.9511747 ] [1.0386162 ] [1.0253322 ] [1.0113193 ] [1.0322715 ] [1.0682164 ] [1.0878792 ] [1.0258453 ] [1.0344404 ] [0.9931898 ] [0.9917617 ] [1.0088454 ] [0.99725664] [0.9977593 ] [0.9515108 ] [0.970451 ] [0.9663355 ] [0.9058158 ] [1.0615903 ] [0.8364327 ] [0.9924291 ] [1.0038085 ] [1.0544735 ] [0.97599554] [1.0770221 ] [1.087781 ] [0.9780015 ] [1.0161827 ] [1.0614545 ] [1.0181254 ] [0.8757142 ] [0.8360549 ] [1.05298 ] [1.0271086 ] [1.0285083 ] [0.96607035] [1.0350391 ] [1.0138582 ] [0.9942903 ] [1.0038077 ] [1.0643605 ] [0.91467315] [1.042947 ] [1.1056577 ] [0.95384455] [0.92315316] [0.9511861 ] [0.95983154] [1.0062915 ] [1.001811 ] [0.9811367 ] [1.0261708 ] [0.92645246] [0.90679604] [1.0261548 ] [0.90749097] [1.0665087 ] [1.0139054 ] [0.83213484] [0.9307276 ] [0.9502751 ] [1.03614 ] [0.9490857 ] [0.98511857] [1.0257933 ] [1.0127038 ] [0.96421456] [0.98630154] [1.0279038 ] [1.0494288 ] [0.9077222 ] [0.9457463 ] [1.0004328 ] [1.0186175 ] [1.0028299 ] [1.0097867 ] [0.9412063 ] [1.013633 ] [0.914992 ] [1.0728669 ] [1.0119162 ] [0.9033579 ] [1.0099554 ] [0.8584061 ] [0.9489487 ] [0.98869175] [0.9602658 ] [0.9558203 ] [0.9595119 ] [1.0150462 ] [0.954554 ] [0.9709617 ] [1.0232744 ] [1.0406301 ] [1.0535057 ] [0.96571004] [0.8936857 ] [1.133817 ] [0.9743709 ] [0.9920911 ] [1.04256 ] [1.0823971 ] [1.0207595 ] [0.9057894 ] [1.0753632 ] [0.99825823] [1.0724835 ] [1.0083575 ] [0.9373601 ] [0.94380957] [0.93374383] [0.9265079 ] [0.8997566 ] [1.019757 ] [0.9244141 ] [1.0744522 ] [0.9936569 ] [1.0006758 ] [0.9283573 ] [0.961772 ] [0.90561104] [1.0042279 ] [1.1012075 ] [0.966532 ] [0.97912675] [0.999214 ] [1.0109829 ] [0.9971881 ] [0.8999761 ] [0.9768596 ] [1.0528833 ] [0.9920254 ] [0.9525445 ] [1.0337802 ] [1.0154021 ] [0.9872328 ] [0.99619526] [1.0288314 ] [0.9936656 ] [1.0412093 ] [0.91051316] [1.0456244 ] [1.0788866 ] [0.9976276 ] [1.0113941 ] [0.96572995] [0.9070881 ] [1.0191902 ] [1.1066495 ] [1.0016577 ] [1.0231369 ] [0.94282764] [0.92873305] [1.0742288 ] [0.98659897] [1.0782245 ] [0.97233576] [1.0257791 ] [1.0164498 ] [0.9759871 ] [1.0879506 ] [1.006493 ] [1.0341134 ] [1.013411 ] [0.9298641 ] [1.0747117 ] [0.90746486] [1.0355666 ] [1.0144016 ] [1.0471168 ] [1.0540739 ] [0.9828438 ] [0.9592885 ] [0.9888537 ] [0.8892962 ] [0.97000295] [1.0349424 ] [0.9451536 ] [1.0359364 ] [0.91553724] [1.0041947 ] [0.98688424]]]]; ov_res: [[[[1.0256711 ] [0.9229553 ] [0.9780939 ] [1.0542425 ] [1.0090578 ] [1.1602682 ] [0.95399386] [1.0664703 ] [0.99781513] [0.9775971 ] [1.0291437 ] [0.98299587] [0.9002136 ] [0.97799426] [0.9915906 ] [1.0365738 ] [1.0641649 ] [1.007427 ] [0.9613493 ] [1.0216693 ] [0.9291314 ] [1.0736053 ] [1.0740936 ] [0.94465643] [1.0288476 ] [1.0134511 ] [0.9897604 ] [1.0932972 ] [0.9872673 ] [1.0207486 ] [1.0092891 ] [1.0088173 ] [0.9827581 ] [0.9084519 ] [0.96299016] [1.0504323 ] [1.0991548 ] [1.013389 ] [0.9641339 ] [1.045332 ] [0.9114577 ] [0.97613996] [0.9511747 ] [1.0386162 ] [1.0253322 ] [1.0113194 ] [1.0322715 ] [1.0682164 ] [1.0878793 ] [1.0258453 ] [1.0344404 ] [0.9931898 ] [0.99176174] [1.0088453 ] [0.99725664] [0.9977593 ] [0.9515108 ] [0.970451 ] [0.96633554] [0.90581584] [1.0615903 ] [0.8364327 ] [0.9924291 ] [1.0038086 ] [1.0544735 ] [0.9759955 ] [1.0770222 ] [1.087781 ] [0.97800153] [1.0161827 ] [1.0614544 ] [1.0181254 ] [0.8757141 ] [0.8360549 ] [1.05298 ] [1.0271086 ] [1.0285083 ] [0.9660703 ] [1.0350391 ] [1.0138582 ] [0.9942903 ] [1.0038077 ] [1.0643605 ] [0.91467315] [1.0429469 ] [1.1056577 ] [0.9538445 ] [0.9231532 ] [0.95118606] [0.9598316 ] [1.0062915 ] [1.0018109 ] [0.9811367 ] [1.0261708 ] [0.92645246] [0.906796 ] [1.0261548 ] [0.9074909 ] [1.0665087 ] [1.0139054 ] [0.83213484] [0.93072766] [0.9502752 ] [1.03614 ] [0.94908565] [0.9851185 ] [1.0257934 ] [1.0127039 ] [0.96421456] [0.9863015 ] [1.0279038 ] [1.0494288 ] [0.90772223] [0.9457462 ] [1.0004328 ] [1.0186175 ] [1.00283 ] [1.009787 ] [0.94120634] [1.013633 ] [0.914992 ] [1.072867 ] [1.0119162 ] [0.903358 ] [1.0099554 ] [0.85840607] [0.9489488 ] [0.98869175] [0.9602659 ] [0.95582026] [0.95951194] [1.0150462 ] [0.954554 ] [0.97096163] [1.0232744 ] [1.0406301 ] [1.0535058 ] [0.9657101 ] [0.8936857 ] [1.133817 ] [0.9743709 ] [0.9920912 ] [1.0425601 ] [1.0823971 ] [1.0207595 ] [0.9057894 ] [1.0753633 ] [0.9982583 ] [1.0724835 ] [1.0083575 ] [0.9373601 ] [0.9438096 ] [0.93374383] [0.92650783] [0.8997566 ] [1.019757 ] [0.9244141 ] [1.0744523 ] [0.9936569 ] [1.0006758 ] [0.9283573 ] [0.96177197] [0.9056111 ] [1.0042279 ] [1.1012074 ] [0.966532 ] [0.9791267 ] [0.99921393] [1.0109829 ] [0.99718803] [0.899976 ] [0.97685957] [1.0528834 ] [0.9920253 ] [0.9525445 ] [1.0337803 ] [1.015402 ] [0.98723286] [0.99619526] [1.0288314 ] [0.99366564] [1.0412092 ] [0.91051316] [1.0456243 ] [1.0788866 ] [0.9976276 ] [1.011394 ] [0.9657299 ] [0.9070881 ] [1.0191904 ] [1.1066495 ] [1.0016577 ] [1.0231369 ] [0.94282764] [0.92873305] [1.0742288 ] [0.9865989 ] [1.0782245 ] [0.9723358 ] [1.0257792 ] [1.01645 ] [0.9759871 ] [1.0879506 ] [1.006493 ] [1.0341135 ] [1.013411 ] [0.92986417] [1.0747118 ] [0.9074648 ] [1.0355666 ] [1.0144016 ] [1.0471168 ] [1.0540738 ] [0.98284376] [0.9592885 ] [0.98885375] [0.88929623] [0.970003 ] [1.0349423 ] [0.9451536 ] [1.0359364 ] [0.91553724] [1.0041947 ] [0.9868842 ]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1, 2, 3) - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6563.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, 2, 3]]() %self.keepdim : bool = prim::Constant[value=1]() %self.unbiased : bool = prim::Constant[value=0]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[[1.0020137]]]]; ov_res: [[[[1.0020136]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:True - dim:(0, 1, 2, 3) - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6565.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, 2, 3]]() %self.unbiased : bool = prim::Constant[value=1]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[[0.997744]]]]; ov_res: [[[[0.99774384]]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:None - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6567.aten_var, %x.1 : Tensor): %self.unbiased : bool = prim::Constant[value=0]() %self.dim : NoneType = prim::Constant() %4 : Tensor = aten::var(%x.1, %self.dim, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: 0.9969933032989502; ov_res: 0.9969930648803711 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:None - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6569.aten_var, %x.1 : Tensor): %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %self.dim : NoneType = prim::Constant() %5 : Tensor = aten::var(%x.1, %self.dim, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: 0.9993248581886292; ov_res: 0.9993248581886292 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:0 - unbiased:False ] | 0.05 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6571.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]]; ov_res: [[[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]] [[0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] ... [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.] [0. 0. 0. ... 0. 0. 0.]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:0 - unbiased:True ] | 0.06 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6573.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]]]; ov_res: [[[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]] [[nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] ... [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan] [nan nan nan ... nan nan nan]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:1 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6575.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[1.065502 0.0605551 0.17867221 ... 0.0057566 0.49611884 2.0621824 ] [1.0551257 0.03922841 0.15666988 ... 1.6404563 0.63164884 0.47699004] [0.7536393 0.48974964 0.5310304 ... 0.9042814 0.09213696 2.9305916 ] ... [1.1807581 1.2862893 0.18518355 ... 0.17951883 0.17463939 0.13590302] [0.24263789 1.4622579 0.30005994 ... 0.35292968 0.41741458 0.32293504] [0.05736116 0.9513891 3.0302851 ... 0.7470979 0.49697948 0.9242626 ]]]; ov_res: [[[1.065502 0.06055509 0.1786722 ... 0.0057566 0.4961188 2.0621827 ] [1.0551257 0.03922841 0.15666988 ... 1.6404563 0.63164884 0.47699 ] [0.7536393 0.48974967 0.5310304 ... 0.9042813 0.09213695 2.9305916 ] ... [1.1807581 1.2862893 0.18518354 ... 0.17951882 0.17463939 0.13590302] [0.24263792 1.462258 0.30005994 ... 0.35292968 0.41741458 0.32293501] [0.05736116 0.95138913 3.030285 ... 0.7470979 0.49697948 0.9242627 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:1 - unbiased:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6577.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[1]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[5.1047944e-02 2.3850669e-01 1.3729308e+00 ... 8.8593918e-01 9.9143648e-01 3.1156120e-01] [1.1291264e+00 1.1811354e+00 2.0179443e+00 ... 6.4590406e-01 2.6328778e+00 6.0726786e-01] [1.3400658e+00 8.6143233e-02 9.5832147e-02 ... 4.3119540e+00 2.8790143e+00 1.5882196e-02] ... [1.3727292e+00 3.9017093e-01 3.2294956e-01 ... 1.1647496e+00 1.3920589e-01 1.4044410e+00] [3.3204597e-01 2.0977566e+00 4.4844031e-01 ... 5.0830877e-01 1.3158965e+00 3.3947961e+00] [1.4519283e-01 7.4126595e-04 9.8127949e-01 ... 6.3042271e-01 1.0445169e+00 6.9129187e-01]]]; ov_res: [[[5.1047944e-02 2.3850670e-01 1.3729306e+00 ... 8.8593924e-01 9.9143636e-01 3.1156120e-01] [1.1291263e+00 1.1811354e+00 2.0179443e+00 ... 6.4590406e-01 2.6328778e+00 6.0726792e-01] [1.3400658e+00 8.6143240e-02 9.5832139e-02 ... 4.3119545e+00 2.8790143e+00 1.5882198e-02] ... [1.3727292e+00 3.9017093e-01 3.2294956e-01 ... 1.1647496e+00 1.3920590e-01 1.4044409e+00] [3.3204597e-01 2.0977566e+00 4.4844037e-01 ... 5.0830883e-01 1.3158965e+00 3.3947961e+00] [1.4519283e-01 7.4126595e-04 9.8127949e-01 ... 6.3042271e-01 1.0445169e+00 6.9129193e-01]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:2 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6579.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[1.1212237 0.9861107 0.94755733 1.063743 0.97481227 1.0448241 0.9331401 0.8767607 0.9690057 1.0669332 1.0120578 1.0293893 1.0917348 1.0577224 0.9830502 0.95830923 0.86054194 0.96815765 1.018892 0.94359446 1.0909723 1.1636218 0.91802263 1.0633417 0.9056391 0.9905137 0.98669034 1.1713822 0.93112725 0.9129417 1.000331 0.91416305 0.95662206 0.9067334 1.0406383 0.91472185 1.038716 0.8966233 1.1138369 1.11627 0.8912159 1.0353751 1.3004761 1.0833026 1.0759286 0.8896024 1.0121237 1.0141459 1.001674 1.0021188 0.96524614 1.1755611 1.0266078 0.82831115 0.8206691 1.0982285 0.947454 0.98294955 1.0614754 1.1427397 1.0773926 0.8810369 0.88435084 1.0393904 1.0706165 0.9460916 1.0543579 1.0029987 1.0914837 0.862211 0.95703834 1.0269115 1.0250465 0.890706 1.19264 1.176566 0.823451 1.1143149 1.1247385 1.0512941 1.0152876 0.9581862 0.89028394 0.9493253 0.9323319 0.9718764 0.9732865 1.0261672 1.0175506 1.0161208 1.0021784 0.94746375 0.99449855 0.7801242 1.1234833 1.0591304 1.1047406 1.2639697 1.2172196 1.0178051 0.9810882 1.173414 0.96099174 1.1931103 0.91668344 0.8878852 0.96076393 0.90596944 0.9834046 1.0097755 1.0187747 0.96561193 0.9962183 0.8674255 1.0121154 1.3564212 0.93137926 0.9333161 1.077495 1.0219584 0.9597882 0.8692928 0.9245679 1.1345855 1.0129603 0.9505119 0.9663709 1.1219679 1.0429913 1.043819 1.0920384 1.081879 1.0488985 1.1055422 1.1252642 0.86719275 1.0306283 0.9820741 0.8938909 1.1316357 1.0680526 1.0866536 0.96172106 0.9282536 1.0162668 1.0156833 0.9828283 1.1674895 0.92421395 1.0766407 1.0449405 1.0513372 1.0173966 0.84516466 0.9597786 1.0487974 0.980582 1.0996598 1.1156359 0.87866384 1.2468132 1.0622208 0.8697295 0.9377625 0.99406326 1.0583884 1.0225149 0.9457464 0.9733609 0.90221316 1.0296284 0.925755 0.85188925 1.0941756 1.039488 1.1803035 1.0553397 1.0061941 0.8588647 0.9427253 1.0485902 0.99706787 1.2179549 0.94248444 0.9204618 1.0250974 0.85691553 0.9199201 1.0036827 0.9258417 1.0276074 0.8420471 1.0304462 1.0609182 1.0225614 0.8934948 0.92862535 1.0644729 0.97966087 0.97155195 0.93564624 1.1087583 1.038573 0.93342686 0.972206 1.0375438 0.8637055 0.8415378 0.8269919 1.0172688 0.89647776 1.0777748 0.8987494 1.1025975 0.8962379 0.7954161 1.0765389 0.9790234 1.126464 0.96374416 0.9018748 0.8908943 0.98091173 0.9590901 ] [0.8997174 0.9170182 0.94645184 1.0457631 0.90527207 0.8397345 0.9139716 1.3972096 1.0962851 0.9142296 1.0267036 0.7671616 1.0047069 1.1744205 0.8741594 1.1002774 1.0967734 1.0663803 0.9744703 1.1888257 0.9571275 0.816977 1.0528989 0.99140716 0.880008 1.0799668 1.1031488 1.120404 1.0081376 1.2409949 1.0275288 0.9849871 0.85936135 0.8558836 1.1212618 0.9809996 0.98054534 1.186393 0.97618157 0.97717804 1.0501032 1.1035212 0.9746707 1.0401378 1.0789644 0.9946074 1.0492599 0.93552816 0.95709336 1.0221732 1.1219097 0.92445767 0.87468743 1.0289439 0.9278427 1.0919904 1.0399892 1.1041039 0.7469704 0.8618356 1.086709 1.0770721 0.968049 0.8404531 1.0433695 1.0788637 0.9502854 0.99741477 0.96280444 1.0442387 0.85752016 1.0042869 0.9472698 0.99061376 1.1015884 1.0300293 0.7821319 1.0407751 0.9783957 1.0200088 1.0638018 1.1078591 0.989554 1.0018139 0.9969084 1.1866894 0.8584061 1.1528935 1.0980248 1.1079924 1.2233104 0.8668668 0.9717589 1.040006 1.0391113 0.8545727 0.84682214 1.0330108 1.0736682 0.8964356 1.0463219 0.9267446 0.9092438 1.0198219 0.9573185 1.06754 1.1232623 0.93873304 0.8760017 0.91253316 0.9196841 0.99692076 1.0791073 1.0443729 1.1064043 1.1330673 1.1140447 0.98222816 1.011646 1.1671984 0.7982277 1.0553765 0.92333025 0.9890997 1.2238864 0.9633125 0.86067975 0.99292314 1.0878197 0.8821457 0.9081157 0.7392466 0.97118884 0.9149182 1.1644281 0.9357469 1.0063195 1.1180681 1.0150092 1.1718252 1.1173831 0.9199633 0.8096563 1.0216938 0.9227578 1.0204111 1.3259234 0.9342916 0.9298858 0.860738 0.93193537 1.0012976 0.9974107 1.1652058 1.0233668 1.0715128 1.1934237 0.8709235 1.006461 0.91186565 0.9118881 1.0215919 0.9486609 1.0833218 0.8627601 0.7708378 0.97960883 0.9795089 0.95149595 0.90749425 0.91069186 0.99686754 0.9885984 0.9987082 1.0082593 1.1261392 1.0141417 1.0233129 1.1042064 0.9840177 0.99959743 0.88272816 1.0964704 0.97727907 0.9694629 1.0329546 1.0790666 0.91010886 0.9067946 1.0872896 1.0876331 1.0060912 0.9833895 1.0186304 0.9387987 0.9336285 0.91335475 1.0131018 0.9678917 1.0532202 0.95066035 0.99194807 0.9702881 1.077109 1.0731354 0.95477366 0.8864267 1.0547438 0.92172575 1.0865253 0.92941105 1.1537399 0.91414535 0.90348214 0.9604918 0.9580765 1.0249667 0.85132986 1.038057 1.0107484 1.0038892 1.0368471 1.0519146 0.9198342 ] [0.9771097 1.0956167 1.0625304 0.93157697 0.9615755 1.0210929 0.8688836 0.9214259 1.1480939 1.206992 1.0646907 1.083271 0.88972396 1.0130446 1.0949743 0.9089264 1.0465641 0.92110956 1.0780809 1.0137855 1.1065794 0.88161623 1.084619 0.97788537 0.9892259 1.1408334 0.9779299 1.0044171 0.9520529 1.1099293 1.0641028 0.91279733 1.0081356 0.971731 0.92328244 1.1340344 0.93483293 0.9877321 1.0038279 1.0507132 1.0424191 1.0502229 1.2318641 1.2136166 1.2182548 0.7928976 0.9451803 1.054304 1.0190015 0.93451965 1.0032997 1.0585322 0.84400433 0.8983988 1.0250558 0.8025058 0.9664186 0.96412325 1.0032737 0.89088506 1.0005219 1.014618 1.0810932 1.1324205 0.9614381 1.1241404 1.0133934 0.96861565 1.1133906 0.89582306 0.9649684 1.1014491 0.90058804 0.9622524 1.1456608 1.0548725 1.0720143 0.9880006 0.91228455 1.0080901 0.96435314 0.9855481 1.0103799 0.93191624 1.0672615 0.95802057 0.9589717 1.0277653 1.0776743 0.9058237 1.0771519 0.9607005 1.0555954 0.9530391 1.0255895 0.8300066 1.0320085 1.0326968 0.8599895 1.0235928 0.9557662 1.0236328 0.9645535 0.9010485 1.177468 0.83678746 1.1682099 0.8183513 0.86891705 0.89365745 1.0784429 1.0228331 0.92514664 1.093468 0.93615425 1.0847222 0.97106624 0.8137991 1.0603201 0.99620044 1.1613039 1.0452847 1.0683938 0.944735 0.80426806 0.9146437 0.99920684 0.9691129 1.0697953 0.98486334 1.0409206 1.1696064 0.9638039 0.85667694 0.94911665 0.9603614 0.9555809 0.864132 0.88802725 0.95221364 0.9271097 0.8445692 0.9096182 0.7890073 1.1468304 1.1175575 0.9940583 0.96955794 1.1407737 1.0323862 0.91367555 0.9972187 1.0700889 0.93706864 0.92219067 1.120891 1.0676936 1.0848655 0.9263519 1.0143409 0.97126544 1.0592178 1.0199847 1.0144475 1.0537575 1.1134453 1.0202625 1.032496 0.9253483 0.8746923 0.9594866 0.9531108 0.9921112 0.8592595 1.08972 1.0189489 0.8915086 0.9147764 0.81998545 0.95844144 1.0011522 0.87470526 0.9983785 0.9265209 0.9485555 1.2080706 0.88380957 0.8141411 1.1248152 0.9467648 1.0238042 1.0353999 1.050017 1.0315158 0.98662746 0.93152326 1.1395828 0.905175 0.78562623 0.8397876 0.998524 1.0384182 0.8483169 0.9284474 0.88548243 0.971902 0.91053516 0.86270946 1.0958409 0.8739589 1.023204 0.95626026 0.986304 0.9369668 0.8841108 0.952294 0.9948347 1.0906274 0.9371104 1.0701468 1.1169226 0.98333347 0.93364775 0.9398387 ]]]; ov_res: [[[1.1212239 0.98611075 0.94755703 1.0637434 0.9748119 1.0448235 0.9331399 0.87676066 0.9690054 1.0669329 1.0120575 1.0293896 1.0917346 1.0577223 0.9830503 0.958309 0.8605419 0.9681577 1.0188917 0.9435949 1.0909723 1.1636215 0.9180223 1.0633417 0.9056393 0.9905137 0.98669016 1.1713821 0.93112767 0.91294193 1.000331 0.9141631 0.95662206 0.9067336 1.0406382 0.9147219 1.0387162 0.89662343 1.113837 1.1162702 0.8912162 1.0353752 1.3004768 1.0833026 1.0759286 0.8896027 1.012124 1.0141456 1.0016738 1.0021187 0.96524584 1.1755604 1.0266078 0.82831115 0.82066905 1.0982286 0.9474539 0.9829493 1.0614756 1.1427395 1.0773929 0.88103694 0.8843509 1.0393902 1.0706167 0.94609153 1.0543582 1.0029986 1.0914838 0.8622112 0.9570384 1.0269111 1.0250463 0.890706 1.1926405 1.1765664 0.823451 1.1143153 1.1247386 1.0512941 1.0152875 0.9581862 0.890284 0.94932544 0.9323316 0.9718762 0.9732867 1.0261672 1.0175506 1.0161208 1.0021782 0.947464 0.9944987 0.7801242 1.1234832 1.0591308 1.1047406 1.2639694 1.2172195 1.0178055 0.9810881 1.1734136 0.9609915 1.1931103 0.91668326 0.8878852 0.960764 0.9059691 0.98340476 1.009776 1.0187749 0.96561176 0.9962181 0.86742556 1.0121151 1.3564212 0.9313789 0.9333157 1.0774949 1.021958 0.959788 0.86929286 0.924568 1.1345856 1.0129603 0.9505118 0.96637106 1.1219678 1.0429914 1.0438187 1.0920384 1.081879 1.0488989 1.1055423 1.1252644 0.8671923 1.0306284 0.9820741 0.8938907 1.1316359 1.0680524 1.0866534 0.9617209 0.92825335 1.0162668 1.0156833 0.9828283 1.1674899 0.9242139 1.076641 1.0449404 1.0513375 1.0173962 0.8451644 0.95977825 1.0487972 0.9805819 1.0996597 1.115636 0.87866366 1.246813 1.0622207 0.8697296 0.9377621 0.99406344 1.0583884 1.022515 0.9457466 0.97336066 0.9022131 1.0296283 0.9257549 0.8518892 1.0941755 1.0394886 1.1803037 1.0553398 1.0061941 0.8588648 0.94272536 1.0485901 0.99706787 1.2179549 0.94248444 0.92046213 1.025097 0.85691565 0.91992 1.0036826 0.9258416 1.0276074 0.8420473 1.0304466 1.0609185 1.0225617 0.8934947 0.9286254 1.0644727 0.9796605 0.9715519 0.93564606 1.1087583 1.0385729 0.93342716 0.972206 1.037544 0.86370546 0.84153765 0.8269921 1.0172688 0.89647764 1.0777744 0.89874965 1.1025976 0.8962378 0.7954162 1.0765387 0.979024 1.1264641 0.96374404 0.90187466 0.89089435 0.9809117 0.9590901 ] [0.8997176 0.9170181 0.9464521 1.0457636 0.9052722 0.8397346 0.9139717 1.3972098 1.0962851 0.9142295 1.0267034 0.7671615 1.0047067 1.1744204 0.8741594 1.1002772 1.0967733 1.0663804 0.9744706 1.1888256 0.9571279 0.8169769 1.0528992 0.9914071 0.8800078 1.0799668 1.1031488 1.1204041 1.0081375 1.2409947 1.0275286 0.98498756 0.85936123 0.855884 1.1212617 0.98099977 0.98054516 1.1863928 0.97618115 0.97717804 1.0501033 1.1035212 0.9746707 1.0401378 1.0789645 0.9946071 1.0492599 0.93552834 0.95709354 1.022173 1.12191 0.92445797 0.87468725 1.0289439 0.9278426 1.0919904 1.0399895 1.1041034 0.7469701 0.86183554 1.0867093 1.0770718 0.9680487 0.8404531 1.0433697 1.0788643 0.9502853 0.99741524 0.9628045 1.0442384 0.8575201 1.0042864 0.9472698 0.9906135 1.1015886 1.0300295 0.78213215 1.0407746 0.97839606 1.0200088 1.0638015 1.1078593 0.9895538 1.0018139 0.9969086 1.1866895 0.8584062 1.1528932 1.0980245 1.1079924 1.2233105 0.86686665 0.9717588 1.0400064 1.0391115 0.85457283 0.84682244 1.0330106 1.0736682 0.89643604 1.0463214 0.9267445 0.90924394 1.0198219 0.95731866 1.0675403 1.1232622 0.9387332 0.8760018 0.9125333 0.91968393 0.99692076 1.079107 1.0443729 1.106404 1.1330677 1.1140445 0.98222905 1.0116459 1.1671984 0.7982275 1.0553763 0.92332983 0.9890992 1.2238867 0.963313 0.86067945 0.9929231 1.0878197 0.8821459 0.9081158 0.7392467 0.9711891 0.9149178 1.1644278 0.9357472 1.0063195 1.1180682 1.015009 1.171825 1.117383 0.9199632 0.8096561 1.0216936 0.9227579 1.0204111 1.3259238 0.93429154 0.9298857 0.8607378 0.9319353 1.0012976 0.99741095 1.165206 1.0233668 1.071513 1.1934232 0.8709238 1.0064609 0.9118654 0.91188794 1.0215918 0.9486614 1.083322 0.8627599 0.77083766 0.9796091 0.9795089 0.95149565 0.90749425 0.91069186 0.99686766 0.98859835 0.99870837 1.0082592 1.1261393 1.0141416 1.023313 1.1042064 0.98401785 0.99959767 0.8827282 1.0964705 0.9772795 0.96946305 1.0329549 1.0790663 0.91010886 0.90679467 1.08729 1.0876333 1.0060914 0.9833893 1.0186304 0.9387989 0.93362844 0.9133545 1.0131019 0.9678916 1.0532202 0.95066017 0.99194854 0.9702879 1.0771092 1.0731356 0.9547736 0.88642675 1.0547434 0.9217257 1.0865253 0.929411 1.1537402 0.91414565 0.9034823 0.9604913 0.95807654 1.0249666 0.85133 1.038057 1.0107481 1.003889 1.0368472 1.0519148 0.9198341 ] [0.9771099 1.0956167 1.06253 0.9315769 0.9615757 1.021093 0.868884 0.9214255 1.1480943 1.2069918 1.0646906 1.0832707 0.8897241 1.0130445 1.0949744 0.9089262 1.0465639 0.9211093 1.0780808 1.0137855 1.1065791 0.88161623 1.084619 0.9778851 0.98922575 1.1408333 0.9779298 1.0044168 0.9520529 1.109929 1.0641026 0.91279715 1.0081352 0.9717312 0.92328227 1.134034 0.9348331 0.98773205 1.0038278 1.0507133 1.0424192 1.050223 1.2318637 1.2136167 1.2182547 0.7928975 0.9451806 1.0543042 1.0190016 0.9345197 1.0032996 1.0585324 0.84400415 0.89839846 1.0250558 0.80250543 0.9664187 0.96412325 1.0032737 0.89088553 1.0005218 1.0146182 1.0810932 1.1324203 0.96143824 1.1241405 1.0133935 0.9686155 1.1133906 0.8958229 0.9649684 1.101449 0.90058815 0.96225226 1.1456605 1.0548723 1.0720143 0.98800033 0.91228455 1.0080903 0.9643532 0.9855483 1.0103799 0.93191606 1.0672615 0.9580207 0.95897186 1.027765 1.0776746 0.90582323 1.0771515 0.9607005 1.0555954 0.9530393 1.0255895 0.83000666 1.0320085 1.0326968 0.85998964 1.023593 0.9557664 1.0236326 0.9645535 0.90104836 1.1774673 0.8367873 1.1682103 0.81835115 0.8689173 0.8936574 1.0784428 1.022833 0.92514735 1.0934676 0.93615425 1.0847222 0.971066 0.813799 1.0603201 0.99620014 1.1613041 1.0452849 1.068394 0.9447346 0.8042678 0.91464365 0.9992073 0.9691125 1.0697954 0.9848633 1.040921 1.1696066 0.9638037 0.8566764 0.9491165 0.96036154 0.9555812 0.86413187 0.8880271 0.9522138 0.9271098 0.84456927 0.9096181 0.7890076 1.1468304 1.1175573 0.9940581 0.96955806 1.1407737 1.0323865 0.91367567 0.9972187 1.0700886 0.93706846 0.9221908 1.1208912 1.0676936 1.0848653 0.9263521 1.0143408 0.9712654 1.0592178 1.0199844 1.0144475 1.0537575 1.1134455 1.0202619 1.0324959 0.9253483 0.87469214 0.9594864 0.953111 0.99211127 0.85925955 1.0897202 1.0189493 0.8915083 0.9147766 0.81998545 0.95844156 1.0011522 0.8747054 0.99837893 0.9265208 0.94855577 1.2080702 0.8838099 0.81414115 1.1248153 0.9467649 1.0238042 1.0354002 1.050017 1.031516 0.9866277 0.9315233 1.1395823 0.90517485 0.7856264 0.83978796 0.9985237 1.0384188 0.84831685 0.92844737 0.88548267 0.97190225 0.9105354 0.8627094 1.0958413 0.8739587 1.0232042 0.95626056 0.986304 0.9369664 0.88411075 0.9522944 0.9948345 1.0906277 0.93711036 1.0701467 1.116923 0.9833334 0.9336481 0.93983847]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:2 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6581.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[2]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[1.0808636 0.8752287 0.8274725 1.0437955 0.99656564 1.1187615 1.015921 0.98908454 1.0352794 1.0449167 0.86104023 0.9179166 1.0717828 0.9974611 0.99006844 0.9734186 0.94961315 1.1386 0.8390265 0.9876643 1.063152 0.93304664 1.0356852 1.0440058 0.927395 1.1969312 0.86521834 0.95463216 0.9192415 0.907416 0.93257725 0.9871358 0.7990006 1.0237473 1.0010542 0.8799252 0.93554866 1.0809995 1.0137036 1.0887464 0.9717436 0.890353 1.0927961 0.99111205 0.88070726 1.0668607 0.93964 0.85681474 0.98115844 1.0255264 0.9840828 1.0847778 0.9661202 1.0986474 1.1225594 1.0936586 0.8955077 1.071733 1.0447384 1.1991935 0.94978577 1.0136147 1.1517487 1.1135933 1.0334179 0.87219876 1.1415901 0.95443094 0.9605285 0.8433106 1.1034836 0.99414736 1.0282277 0.97167075 0.83395505 0.8826016 0.9816461 0.85795844 1.0733123 0.91373736 0.9681491 0.88150764 1.0003883 0.9904682 1.0279989 0.82715255 0.935756 0.9540693 1.0425011 1.2092955 1.0402876 0.9110152 0.9626735 0.9886512 0.9629923 0.9827909 1.0027736 1.0646853 0.9637259 1.1129866 0.9241301 1.1022061 1.1456487 1.0350221 1.0092325 0.9693712 1.0757976 0.918636 1.1085616 0.9173004 1.0961651 0.9252532 1.060255 0.94593555 1.1578234 0.9992735 0.9810013 1.0272397 1.0332065 1.0815207 0.87398857 0.9975339 0.9943213 0.75602084 0.92047656 1.0047289 1.0216086 1.1753155 1.1114143 0.9587125 1.1120206 1.0756146 0.96553665 0.8586518 1.0348154 0.92836857 0.97498316 1.0957323 1.1184326 1.0346965 1.0483322 1.0105577 0.9672028 1.1466542 0.7784222 0.9309503 0.96810174 1.0126605 0.8252237 1.1245295 1.0079646 1.0487492 0.9365885 0.9559871 1.0475858 1.0755609 1.2145066 1.0027838 0.99063647 0.9531506 0.9929283 0.8517515 1.2016969 1.0540845 0.8764117 0.7633484 0.8545304 0.9495475 1.1772755 0.99312544 1.0274913 1.0329969 0.8505697 0.93375593 1.1048303 0.89990985 0.84738475 1.0406708 1.047351 0.9961672 1.0046755 1.0222311 0.9883388 0.8869845 0.921585 1.1283318 0.8641771 1.0557133 0.8500106 0.9045219 1.0338017 1.1161177 1.058708 0.9598953 0.9942417 0.9597964 0.8298 1.1220565 1.003182 0.9941699 0.9984461 0.9889719 0.9337876 1.0327562 1.1616036 0.9725224 0.76892924 1.1197759 0.88870496 0.95144475 0.8746901 0.96817636 1.0049874 1.1292738 0.89112115 0.94978493 1.1056867 0.9873242 0.88991004 1.0995028 1.0200657 1.0318274 0.9800921 0.9069385 ] [0.9698945 1.043708 1.1175398 0.944311 0.9461019 1.0931374 1.0193444 1.0267142 0.93768966 0.85541975 1.0185927 1.1519796 1.090658 1.0559348 1.0320917 1.0211908 1.0150356 0.99559367 0.9794823 1.0320821 0.9740057 1.139515 0.95468783 0.78370935 1.0925008 1.0914983 0.83975214 0.9001216 0.97804284 0.97719306 1.0731448 0.9861817 1.156671 0.8418598 0.9373762 0.9241076 1.0645906 0.9126532 0.96675825 0.97185975 0.9185819 0.85243684 1.0162266 0.78923166 0.96684146 0.88509935 1.0576577 0.98412573 1.1616032 0.9119912 1.1016111 1.0913881 0.87736046 1.0463309 1.2667089 0.9319388 1.0859084 0.97590786 0.9642594 0.88313884 1.0702097 0.89744055 0.98651254 1.0031128 0.9476584 0.92781156 0.8614272 1.0062833 0.8893035 0.97235525 1.1512405 1.0471581 1.0647147 0.9460522 0.96970475 0.8403113 1.079605 1.1235715 1.0291945 0.99912846 0.955921 0.97901964 0.98565215 1.0851555 0.8820039 0.9635553 1.0396343 1.222518 0.95494044 0.9906663 1.1189779 1.0453687 1.0328863 0.9690548 0.8366949 1.0037022 0.9743885 1.0358918 0.9607591 1.1431853 1.0459508 1.0032375 0.9358543 0.8362081 1.014889 0.97137433 0.98362434 0.9633277 1.0441748 0.9521034 0.9211111 1.0728917 0.95650727 0.93429464 0.9694092 0.9080419 1.147207 0.8473953 1.2357532 0.9768573 0.88833725 0.9481697 0.9014245 0.86963534 1.1115848 0.8273734 1.2976109 1.0825086 0.84196246 1.0087545 0.93140954 0.9069363 1.0336645 1.0660148 1.0659899 0.95613545 1.1314416 1.1458789 0.95454144 1.2429739 1.1199837 0.8808272 1.1640161 0.97172546 1.0424335 1.1108744 0.9572133 1.2067251 0.93900454 0.8771821 1.0538821 1.2257582 0.9259651 0.86546624 0.9826512 0.90704453 0.9086098 1.0171455 1.0490117 1.0464755 1.0632713 0.87027705 1.1354097 0.89841694 0.7775341 0.8543989 1.0649134 0.9908264 1.1913271 1.0250943 1.1537187 0.8292974 1.2304401 0.78777266 1.1094102 0.89598286 1.078229 0.8949382 0.93045914 1.0055536 0.95190036 1.0259213 0.88110197 1.0117612 1.0213203 0.9438017 0.9428108 1.2189733 0.9564645 1.0642023 0.9966329 0.948949 1.0250586 0.946874 0.93439656 0.9447202 0.90893054 1.1856521 0.927253 0.9004877 0.92162216 1.090649 0.99774677 1.0033827 0.9198735 0.9994285 1.0315442 1.0106031 1.1375681 0.8921601 0.9316624 1.0798621 0.7379763 0.94383866 0.97523713 1.0882868 0.8909563 1.0723836 0.8715848 0.98966706 0.9539313 0.9593774 1.0534636 1.0009283 ] [1.0706097 0.984779 0.83939767 0.9705159 1.0037315 0.8404769 1.0501485 0.95255953 1.0434856 1.0742279 0.964906 1.0858147 0.97770476 0.90400684 0.86720407 1.0503074 1.0952938 0.9771282 0.970233 0.9207996 0.90742296 1.1355383 0.9837852 0.98518735 1.1148291 0.8933318 1.1287055 1.0492781 0.9473896 0.792979 0.95920676 1.1282948 1.0070863 0.97150886 1.13914 1.1189632 0.9499942 0.91481394 1.0575058 0.8419477 1.0964667 1.077233 1.0330646 1.0578185 0.90550363 0.7576537 1.1295128 1.0930737 1.0676916 1.0791042 1.0301661 0.9191826 1.033423 0.9914842 1.0331465 1.0165219 0.9578647 0.9038489 1.0463852 0.92796826 1.1098604 1.0359942 1.0997616 0.93239164 0.90152997 0.9661249 1.1575927 0.98517853 0.8268589 1.0686762 0.9334723 0.8931578 0.9189059 0.9086126 0.9081181 1.0151361 0.8833222 0.9400067 0.96121407 1.1525736 1.1529657 0.83544976 0.96896476 0.9734848 1.0926193 1.0677145 1.0131336 1.0301203 0.9825239 1.0343828 0.88687146 1.1140684 0.94931024 1.1273967 1.2057571 1.0438045 0.97104514 0.98560804 0.85866755 0.78259844 0.92909175 1.1063266 1.0173929 1.0844715 1.0643529 0.9413941 0.9423338 1.1239136 0.9697952 0.9789946 1.031483 1.0211135 1.1239265 0.89740396 0.951722 1.2374613 1.1515253 1.0107038 1.2209629 0.9186953 0.94441324 0.90816313 0.9814474 0.979867 1.1948845 1.2240876 0.94265294 1.0137486 1.029921 1.088164 0.96870697 1.062721 1.0942544 1.0743462 1.0393577 1.0631877 0.9856908 1.0385445 0.9806822 0.8855423 0.9909335 0.9270098 0.94359326 0.91246593 0.99898 1.05229 1.0648447 1.0001199 1.0659468 0.94329005 0.92617625 1.0366898 0.9833775 0.8927826 1.0579283 0.9513292 0.9979883 1.0253301 1.0903517 0.9004711 1.0606011 0.9656228 1.0489864 0.8441485 0.9872115 0.9524187 1.0528852 0.9434803 0.8595336 0.99665326 1.1357216 1.0110618 0.8632248 0.96302605 0.9358182 0.988538 0.9821814 0.87917215 1.0124271 0.87885016 1.1045414 1.094426 1.0362103 0.9738328 1.1495409 1.0260266 0.88435876 0.8224376 0.91083497 1.0464535 0.99592304 0.9048499 1.0558239 1.0666549 1.0919224 0.8915784 0.8697456 0.9266333 0.91904926 1.227225 1.0463164 1.1831604 1.1824844 0.8880967 1.070509 0.970607 0.9488886 0.9366075 0.84388363 1.0209022 1.0408071 1.0747656 0.79677826 0.99072033 0.8724486 0.9150925 0.92212105 0.9291437 0.89261717 1.207137 0.74554086 1.0371871 1.0545453 0.9796396 ]]]; ov_res: [[[1.0808636 0.8752287 0.8274726 1.0437951 0.99656546 1.1187617 1.0159211 0.9890844 1.0352796 1.0449172 0.86104053 0.91791666 1.071783 0.9974615 0.9900684 0.9734189 0.9496129 1.1386002 0.8390264 0.9876646 1.0631518 0.9330467 1.035685 1.0440059 0.92739546 1.196931 0.8652182 0.9546321 0.91924155 0.9074163 0.93257713 0.9871357 0.7990007 1.0237474 1.001054 0.87992525 0.9355489 1.0809997 1.0137036 1.0887465 0.97174346 0.89035296 1.0927966 0.99111205 0.88070744 1.066861 0.9396401 0.8568145 0.98115826 1.0255268 0.984083 1.0847776 0.9661195 1.0986475 1.1225597 1.0936589 0.8955074 1.0717329 1.0447383 1.1991934 0.94978565 1.0136149 1.1517485 1.1135933 1.0334178 0.87219876 1.14159 0.9544309 0.96052855 0.84331053 1.1034837 0.99414766 1.028228 0.97167075 0.8339547 0.8826015 0.9816462 0.8579586 1.0733123 0.9137376 0.9681493 0.8815078 1.0003884 0.99046844 1.0279988 0.82715267 0.9357561 0.9540694 1.0425011 1.2092955 1.0402879 0.9110152 0.9626737 0.9886512 0.96299213 0.9827911 1.0027736 1.0646853 0.9637259 1.1129863 0.92413 1.1022058 1.145649 1.035022 1.0092324 0.96937114 1.0757979 0.91863614 1.1085615 0.91730046 1.0961654 0.92525333 1.060255 0.94593567 1.1578238 0.9992736 0.9810015 1.0272396 1.0332065 1.0815206 0.8739888 0.99753374 0.99432105 0.75602096 0.9204765 1.004729 1.0216087 1.1753155 1.1114141 0.9587125 1.1120207 1.0756145 0.96553636 0.8586518 1.0348157 0.92836833 0.9749832 1.0957322 1.1184329 1.0346966 1.0483323 1.0105574 0.96720266 1.1466542 0.7784221 0.93095034 0.9681018 1.0126606 0.82522416 1.1245291 1.0079648 1.0487491 0.9365882 0.955987 1.0475857 1.0755615 1.2145069 1.0027834 0.9906367 0.95315063 0.99292827 0.85175174 1.2016971 1.0540847 0.87641144 0.7633483 0.8545306 0.9495478 1.1772755 0.9931251 1.0274912 1.0329969 0.85056955 0.9337561 1.1048304 0.8999098 0.8473847 1.0406712 1.0473511 0.9961674 1.0046754 1.0222311 0.9883389 0.8869843 0.9215852 1.1283314 0.8641771 1.0557133 0.85001034 0.90452194 1.0338014 1.1161178 1.058708 0.9598954 0.99424154 0.9597965 0.8298 1.1220564 1.0031819 0.9941699 0.9984459 0.98897177 0.93378764 1.0327562 1.1616039 0.97252214 0.7689295 1.1197758 0.88870496 0.95144475 0.87469035 0.9681767 1.0049874 1.1292739 0.89112115 0.949785 1.1056868 0.98732394 0.8899098 1.0995029 1.0200655 1.0318278 0.98009247 0.9069386 ] [0.9698948 1.043708 1.1175401 0.94431067 0.94610196 1.0931376 1.0193444 1.0267143 0.93768966 0.85541964 1.0185927 1.1519799 1.090658 1.0559347 1.0320919 1.0211909 1.0150355 0.9955936 0.9794825 1.0320822 0.974006 1.139515 0.9546882 0.7837092 1.0925009 1.0914986 0.8397521 0.9001216 0.9780426 0.97719324 1.0731448 0.9861815 1.1566712 0.8418599 0.9373763 0.9241077 1.0645905 0.9126532 0.96675783 0.9718601 0.9185819 0.8524369 1.0162265 0.7892319 0.96684134 0.8850996 1.057658 0.98412603 1.1616032 0.91199124 1.1016111 1.091388 0.8773602 1.0463307 1.2667091 0.931939 1.0859082 0.97590756 0.9642593 0.88313884 1.0702096 0.89744055 0.9865126 1.0031132 0.9476588 0.9278118 0.8614273 1.0062836 0.8893035 0.9723555 1.1512405 1.0471585 1.064715 0.9460521 0.96970487 0.84031135 1.079605 1.1235716 1.0291947 0.99912834 0.955921 0.97901964 0.9856526 1.0851556 0.88200384 0.9635553 1.0396347 1.2225181 0.9549404 0.9906661 1.1189783 1.0453689 1.0328864 0.96905494 0.8366947 1.0037022 0.97438854 1.0358917 0.960759 1.1431854 1.0459505 1.0032375 0.9358542 0.83620816 1.0148892 0.97137415 0.98362434 0.9633278 1.0441754 0.95210344 0.92111087 1.072892 0.9565073 0.9342947 0.9694094 0.9080419 1.147207 0.84739554 1.2357533 0.9768576 0.8883372 0.94816977 0.9014244 0.86963505 1.111585 0.8273736 1.2976105 1.0825087 0.8419625 1.0087546 0.93140924 0.9069364 1.0336643 1.066015 1.0659895 0.9561359 1.1314416 1.1458788 0.9545413 1.2429738 1.1199837 0.8808269 1.1640158 0.9717259 1.042433 1.1108744 0.95721316 1.2067251 0.9390042 0.87718225 1.0538824 1.2257582 0.9259649 0.8654661 0.98265094 0.9070444 0.90860987 1.0171456 1.049012 1.046475 1.0632713 0.8702768 1.1354101 0.898417 0.77753395 0.8543986 1.0649136 0.99082625 1.1913275 1.0250943 1.1537187 0.8292973 1.2304403 0.78777295 1.1094102 0.89598304 1.0782291 0.89493865 0.93045926 1.0055536 0.95190036 1.0259212 0.8811017 1.0117614 1.0213206 0.9438018 0.94281054 1.2189736 0.95646465 1.0642023 0.99663264 0.94894874 1.025059 0.94687414 0.93439716 0.9447202 0.9089306 1.1856527 0.9272529 0.90048784 0.9216226 1.090649 0.9977473 1.003383 0.9198735 0.99942833 1.0315446 1.010603 1.137568 0.89215976 0.93166256 1.0798621 0.7379763 0.94383854 0.97523737 1.0882868 0.8909563 1.0723836 0.8715848 0.9896672 0.95393145 0.95937794 1.0534637 1.000928 ] [1.07061 0.9847793 0.83939767 0.97051567 1.0037313 0.8404769 1.0501482 0.95256 1.0434856 1.0742283 0.9649063 1.0858148 0.9777048 0.90400696 0.8672044 1.0503073 1.0952939 0.977128 0.9702329 0.9208 0.9074227 1.1355381 0.98378503 0.9851872 1.1148294 0.8933319 1.1287054 1.0492783 0.9473898 0.7929791 0.95920676 1.128295 1.0070868 0.9715089 1.13914 1.1189635 0.94999444 0.91481394 1.0575057 0.841948 1.0964669 1.077233 1.0330646 1.0578184 0.9055037 0.7576535 1.1295127 1.093074 1.0676916 1.0791042 1.0301664 0.91918266 1.0334235 0.99148464 1.0331466 1.0165218 0.95786464 0.90384907 1.0463852 0.9279683 1.1098605 1.0359939 1.0997617 0.9323917 0.90153015 0.966125 1.1575923 0.98517877 0.82685906 1.068676 0.933472 0.8931576 0.918906 0.9086126 0.9081182 1.0151362 0.88332254 0.9400071 0.96121407 1.1525737 1.1529659 0.8354498 0.96896476 0.9734846 1.0926193 1.0677143 1.0131339 1.0301199 0.9825237 1.034383 0.8868715 1.1140689 0.94931024 1.1273965 1.2057569 1.0438043 0.97104496 0.985608 0.85866743 0.78259844 0.9290913 1.1063263 1.017393 1.0844718 1.0643531 0.94139427 0.9423338 1.1239134 0.9697956 0.9789949 1.031483 1.0211138 1.1239265 0.8974038 0.9517221 1.2374617 1.1515253 1.0107037 1.2209632 0.918695 0.9444136 0.90816313 0.9814473 0.9798673 1.1948848 1.2240872 0.9426526 1.0137489 1.0299213 1.088164 0.9687067 1.0627208 1.0942547 1.0743464 1.0393579 1.0631877 0.9856908 1.0385449 0.9806826 0.88554263 0.9909337 0.9270096 0.94359314 0.9124659 0.99898016 1.0522901 1.0648445 1.00012 1.0659469 0.94329023 0.9261762 1.0366895 0.98337734 0.89278275 1.0579283 0.95132893 0.9979886 1.0253302 1.0903516 0.900471 1.0606012 0.9656224 1.0489864 0.8441486 0.98721147 0.9524185 1.0528852 0.9434803 0.8595336 0.9966534 1.1357214 1.0110623 0.86322504 0.9630259 0.93581814 0.98853767 0.9821814 0.8791721 1.0124269 0.87885034 1.1045412 1.0944263 1.0362101 0.97383285 1.1495411 1.0260266 0.8843592 0.8224377 0.9108347 1.0464535 0.9959232 0.90484947 1.0558242 1.066655 1.0919223 0.89157844 0.8697455 0.9266331 0.9190489 1.2272246 1.0463166 1.1831604 1.1824843 0.888097 1.0705088 0.97060716 0.9488886 0.9366071 0.8438838 1.020902 1.0408075 1.0747656 0.7967785 0.9907205 0.8724486 0.9150929 0.922121 0.9291437 0.89261705 1.2071373 0.7455408 1.037187 1.054545 0.97963923]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:3 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6583.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[0.9814789 0.8840495 0.80599433 0.9102065 1.0908982 1.0564272 0.89206636 0.8106048 1.0573038 0.99155146 1.0578678 0.9905314 1.0708882 0.99469733 1.0069344 1.0642395 0.9871275 0.9315211 0.9868539 0.9508915 0.8875491 1.0218308 0.7723891 0.9313052 0.9813093 1.0741758 0.8636698 0.9645677 0.9885341 0.9286197 1.0088744 1.0008093 1.0903776 1.2237332 1.038835 0.9807413 0.94659245 1.1058707 0.8689865 0.9851885 1.0767542 1.0174485 0.9389866 1.0742782 1.0387172 1.0100408 0.9666986 1.0133255 0.9620998 0.9307811 0.98186773 1.0341554 1.0058142 0.9501421 0.95958203 1.0359015 1.0199727 0.88351566 0.916019 1.031859 1.1326622 0.93931913 0.89443296 1.3178012 0.95169175 0.99482757 0.90516305 0.93402296 0.86121595 1.0128522 0.89733374 0.9940906 1.0353208 0.91828245 0.9665265 0.97380996 1.0205075 0.9367239 1.055436 0.93069714 0.9107045 0.9138162 0.999913 0.8121256 1.0283873 1.0759721 0.9696052 0.9428939 0.9509382 0.8927611 0.96778584 1.0207136 1.015974 0.92712533 0.91102254 1.228019 0.85605246 0.94591564 0.9717327 0.9447429 0.93161905 1.0198776 1.0357803 1.08324 0.8682493 0.9631217 0.9345812 0.9562428 0.94055307 0.7939178 0.9300314 1.0457153 0.89811414 1.1044824 1.2188745 1.0790125 0.95280874 0.92387754 0.98129153 0.86659837 0.9036874 1.157955 0.95961004 0.9409006 1.1127436 0.90288043 0.88174415 1.062279 1.0105197 0.8127206 1.0715117 0.95033187 0.9690788 0.97303176 0.9738597 1.0596772 0.90111035 0.9395044 0.9442357 0.95804536 0.9303736 0.938287 0.9681374 1.1388762 0.83382875 0.95725274 1.046926 1.0863997 1.0385169 1.0362504 0.97076386 0.9053331 1.038078 1.0099818 1.0438912 1.1032349 0.959668 1.0673807 1.0866498 1.1235043 0.8665552 1.1004133 0.8998545 0.9759465 0.9690695 0.9024064 1.1078309 0.97911537 0.9965196 1.0556271 1.0229126 0.82469106 1.0808846 0.9411677 1.0037993 1.0556189 0.9641496 0.86081177 1.0481261 0.9789297 1.0389884 0.9762267 1.0838776 0.8696374 0.94525665 0.8711423 1.0239553 1.0802468 0.8667759 0.987348 0.9824693 1.1463629 1.083026 0.98989433 0.93267477 1.0153853 1.0424733 0.88458663 1.0337194 1.0678672 1.0127331 1.0924007 0.8387439 0.9087421 1.0529087 1.001948 1.0007348 1.0840478 0.9752406 0.9454034 1.1684508 1.0389674 1.1078707 0.9237348 0.98265374 0.98007756 0.988451 0.7933248 1.0580853 0.89285105 1.0135565 0.9712842 0.9590705 0.8726284 ] [0.9823107 1.0439409 0.8486059 0.93479675 1.0864272 1.0983348 0.99466294 1.1294036 1.0199656 1.1271551 0.91801834 0.9171735 0.9393599 0.7759036 1.0178772 0.8877325 1.00095 0.92734605 1.0176779 0.9893006 0.84227437 1.0182714 0.9489276 1.1510054 0.93740976 0.9253544 1.0200348 1.106384 0.93928003 1.0420427 0.8872615 0.91491866 1.1480384 1.1515416 1.0645643 1.1794273 1.012887 0.9801481 1.0180396 0.9424888 0.9148782 1.0017449 0.9299855 1.1786348 0.97394747 0.977156 0.86295253 0.9815853 0.98423904 1.1223277 1.0454605 0.8860102 1.0145394 1.0015675 1.0280654 0.97111934 0.86152303 0.8363444 1.1332365 1.026189 0.8781925 1.0137792 1.0233201 0.97814 1.0262338 1.0284798 1.0368352 1.0495144 0.8857642 0.96986276 0.97759557 1.1285223 0.95570415 0.8860538 0.9493879 0.8299776 0.9809802 0.95713955 1.0349035 1.1242111 1.0034484 1.1275157 1.0675138 1.0376748 1.066427 0.8365276 0.90668905 0.9566548 0.93367606 1.0466973 1.033233 0.9410397 0.98802 0.9407948 0.9030474 0.9905558 0.91238075 0.9642482 0.903895 1.0508865 1.3011953 0.99869007 0.99751955 1.223904 1.009913 1.006569 0.8724728 0.9067598 0.9765853 1.0658264 1.151583 1.0297703 1.0022703 1.0968409 1.0350729 1.123064 0.96953547 0.9083168 1.001413 1.0309585 0.95536894 0.97823787 1.0239298 0.91809803 0.9828219 1.0162865 0.8968275 1.0724211 1.041886 0.8976392 0.8806661 0.9913615 0.97583604 0.91511756 0.9473416 1.0104632 1.0197597 1.1475538 0.98745805 0.9116945 0.9771907 0.9762548 1.1703289 0.9054331 1.1355582 1.231612 0.9828746 1.0117998 1.0501554 1.0418695 0.9552383 0.86172986 0.94287115 1.0036302 0.9825204 0.94990605 0.9835005 0.7251714 0.85713166 0.913386 0.97098887 0.8870951 0.9413836 1.0744289 1.0483288 1.1000942 0.9915774 1.0719261 0.9921608 0.9460253 0.9301546 0.9446108 0.9947402 1.124866 0.93508846 0.9676181 0.9707409 1.0127102 0.91453886 1.0174835 1.0701833 0.969242 1.0268872 0.9329506 1.0747815 0.92728287 1.0614237 1.0564218 0.89234644 0.97578377 1.0494537 1.0065315 1.0258439 0.9508468 1.023695 0.904226 0.95632803 1.07163 0.91199976 1.0160729 1.0405247 0.91161704 0.94477266 1.0157218 0.9091713 0.9452277 0.80228543 1.1800497 0.8412347 1.0396439 1.1559994 0.92234427 0.99434733 0.9434392 0.9744038 0.9833402 1.0974319 0.94804925 0.9631778 1.1140022 1.0321882 1.0018306 0.95841897 1.0045671 ] [0.9427786 0.9787888 0.9667993 1.0469856 1.0264562 1.1017946 0.92691755 0.9423759 1.0512992 1.2382088 1.1446414 1.033261 0.9273461 1.0100838 0.95369506 1.1896704 0.8920288 1.0215155 0.91032946 0.9248466 1.0735263 0.9950642 0.9162696 0.9507156 0.8922883 0.91930586 0.87135494 1.0083865 0.9858148 0.9957662 1.1090821 0.93504894 1.0970366 0.93302304 1.0062339 1.1509186 0.92465544 1.1278535 1.0776608 0.94676435 0.9368617 0.9344646 1.2035908 1.1630735 0.86224484 0.9617366 0.9165463 0.96078295 0.9642675 0.9637978 0.8419645 0.9676565 1.0591226 0.99974483 0.98530275 0.96201444 1.0428381 1.0050359 1.1421679 0.9968394 1.1187488 0.9878107 0.95538825 0.9388704 0.9354873 1.0667032 1.0149964 0.8265625 0.96299535 0.9493371 1.0499657 0.82670975 1.0606314 0.8639961 1.1226568 1.1587687 1.0365735 0.8966784 1.0251187 0.97941524 0.9773187 0.95289797 1.2002699 0.862769 1.1292424 1.0559393 1.0418046 1.0110335 1.1824417 0.88750297 1.0318471 1.0660998 1.17132 0.99424195 1.1177893 1.0221138 0.7875353 0.93694836 0.9654298 1.1351483 0.9355836 1.1857196 0.8943368 0.9520831 0.89471835 0.8815197 1.0228899 1.011108 0.92825115 1.062872 1.1886036 1.150199 1.0720733 1.0467516 0.917992 1.0783033 0.8867618 1.0604153 1.0256573 0.97258097 1.067043 1.0463924 0.8938883 0.98883706 0.873915 0.86251056 0.97722834 0.9218319 1.0348113 0.9892385 0.9100504 0.9914159 1.1006341 1.068211 1.1185342 1.0126015 1.0875243 1.0567 0.9562247 0.99854547 0.9462093 0.9217568 0.8282581 0.89198655 1.0701299 1.0266162 1.0157715 1.1053022 0.97735226 0.98104984 1.0497895 0.94278544 1.247634 0.92689705 1.0078125 1.0041393 1.0479783 1.0719961 0.8683342 0.98956627 1.0248908 0.9714101 0.9950251 0.97919816 1.0153155 0.98386216 0.99604774 1.1922964 0.9103689 0.90277886 1.0549017 0.9581927 1.0285712 1.0994331 0.8554594 0.9476477 1.0414144 1.0137132 1.0026674 0.9998117 0.8793447 1.0699849 0.90935594 1.0388172 0.8976339 1.155371 0.9458472 0.96368676 1.003701 1.034892 1.05464 1.1780441 1.0233493 1.019703 1.0916749 0.84404576 0.83057505 0.9693754 1.0348322 0.8492873 1.2119092 0.9040331 1.0798057 0.91108084 1.0615932 0.9308659 0.8663071 0.8915599 0.89757067 0.8869292 1.0252454 0.98242635 1.0115632 0.9547018 0.86237955 1.0437808 0.9565333 1.1118636 0.9791664 1.0504977 0.8980556 0.9767185 0.9071414 0.9293345 ]]]; ov_res: [[[0.981479 0.8840494 0.80599433 0.9102065 1.0908982 1.0564271 0.8920664 0.8106049 1.0573038 0.9915514 1.0578678 0.9905314 1.0708882 0.9946973 1.0069344 1.0642395 0.9871274 0.9315211 0.9868539 0.9508915 0.88754904 1.0218308 0.7723891 0.9313052 0.9813093 1.0741758 0.8636698 0.9645677 0.98853415 0.9286197 1.0088745 1.0008093 1.0903776 1.2237332 1.038835 0.9807413 0.94659245 1.1058707 0.86898655 0.9851885 1.0767543 1.0174485 0.93898666 1.0742782 1.0387172 1.0100409 0.9666985 1.0133255 0.96209973 0.93078107 0.9818677 1.0341552 1.0058143 0.9501422 0.95958203 1.0359015 1.0199727 0.88351566 0.916019 1.0318592 1.132662 0.93931913 0.89443296 1.3178012 0.95169175 0.99482757 0.9051631 0.93402296 0.861216 1.0128522 0.89733374 0.99409056 1.0353206 0.91828245 0.9665265 0.97381 1.0205076 0.9367238 1.055436 0.93069714 0.9107045 0.91381615 0.99991304 0.8121256 1.0283873 1.0759721 0.96960527 0.9428939 0.9509383 0.8927611 0.96778584 1.0207136 1.015974 0.9271254 0.9110226 1.2280191 0.85605246 0.94591564 0.9717327 0.94474286 0.93161905 1.0198776 1.0357803 1.08324 0.86824936 0.9631217 0.9345812 0.95624286 0.9405531 0.79391783 0.9300315 1.0457155 0.89811414 1.1044824 1.2188746 1.0790125 0.9528087 0.9238776 0.9812915 0.8665984 0.9036874 1.157955 0.95961 0.9409007 1.1127436 0.9028804 0.8817442 1.062279 1.0105197 0.81272066 1.0715117 0.9503318 0.9690787 0.9730317 0.9738598 1.0596771 0.90111035 0.9395043 0.9442357 0.95804536 0.9303736 0.9382871 0.9681373 1.1388762 0.8338288 0.9572528 1.046926 1.0863997 1.0385169 1.0362504 0.97076386 0.9053331 1.038078 1.0099819 1.0438912 1.1032349 0.959668 1.0673807 1.0866498 1.1235043 0.8665553 1.1004132 0.89985436 0.97594655 0.9690695 0.9024064 1.1078309 0.97911537 0.99651957 1.0556271 1.0229126 0.8246911 1.0808846 0.9411677 1.0037993 1.0556189 0.9641496 0.86081177 1.0481261 0.97892964 1.0389884 0.9762267 1.0838774 0.8696374 0.94525665 0.87114227 1.0239553 1.0802468 0.8667759 0.9873481 0.9824694 1.146363 1.0830262 0.98989433 0.93267477 1.0153853 1.0424734 0.88458663 1.0337193 1.0678672 1.0127331 1.0924007 0.83874387 0.9087421 1.0529088 1.001948 1.0007349 1.0840478 0.97524065 0.9454034 1.1684507 1.0389674 1.1078708 0.9237348 0.98265374 0.9800776 0.988451 0.7933249 1.0580853 0.892851 1.0135565 0.9712843 0.9590705 0.87262833] [0.9823107 1.0439409 0.8486058 0.93479675 1.0864273 1.0983348 0.9946629 1.1294037 1.0199658 1.1271551 0.91801834 0.9171735 0.93935984 0.7759036 1.0178773 0.8877325 1.00095 0.9273461 1.017678 0.9893006 0.84227437 1.0182714 0.9489276 1.1510054 0.9374098 0.9253544 1.0200349 1.106384 0.93928003 1.0420427 0.8872615 0.9149186 1.1480385 1.1515416 1.0645643 1.1794273 1.0128869 0.980148 1.0180397 0.9424888 0.9148782 1.0017449 0.9299855 1.1786349 0.97394747 0.977156 0.86295253 0.9815853 0.98423904 1.1223277 1.0454605 0.88601017 1.0145394 1.0015675 1.0280656 0.97111934 0.8615231 0.8363444 1.1332365 1.0261891 0.8781925 1.013779 1.0233201 0.97814 1.0262338 1.0284798 1.0368352 1.0495144 0.8857642 0.9698628 0.9775955 1.1285223 0.95570415 0.88605374 0.94938797 0.8299776 0.9809802 0.95713955 1.0349035 1.1242111 1.0034484 1.1275157 1.0675138 1.0376748 1.066427 0.8365275 0.906689 0.95665485 0.93367606 1.0466973 1.033233 0.9410396 0.98802 0.9407948 0.90304744 0.99055576 0.91238075 0.9642482 0.903895 1.0508864 1.3011951 0.9986901 0.9975195 1.2239039 1.009913 1.006569 0.8724728 0.9067598 0.9765853 1.0658265 1.151583 1.0297703 1.0022703 1.0968409 1.0350728 1.123064 0.96953547 0.90831673 1.001413 1.0309585 0.9553689 0.9782379 1.0239297 0.9180981 0.9828219 1.0162865 0.8968275 1.0724211 1.041886 0.8976393 0.88066614 0.99136156 0.975836 0.91511756 0.9473416 1.0104632 1.0197597 1.1475538 0.9874581 0.9116945 0.9771907 0.9762548 1.170329 0.90543306 1.1355582 1.2316121 0.9828746 1.0117998 1.0501554 1.0418694 0.9552383 0.8617299 0.9428711 1.00363 0.9825204 0.94990605 0.9835005 0.72517145 0.85713166 0.913386 0.9709888 0.88709503 0.94138354 1.0744289 1.0483288 1.1000941 0.99157745 1.0719261 0.99216086 0.9460253 0.9301547 0.9446107 0.9947401 1.1248661 0.9350885 0.9676181 0.97074085 1.0127102 0.91453886 1.0174835 1.0701832 0.969242 1.0268872 0.93295056 1.0747814 0.92728287 1.0614237 1.0564218 0.89234644 0.97578377 1.0494536 1.0065315 1.0258439 0.9508468 1.023695 0.904226 0.9563281 1.07163 0.91199976 1.0160729 1.0405247 0.911617 0.94477266 1.0157218 0.9091713 0.9452276 0.8022855 1.1800497 0.8412346 1.0396439 1.1559994 0.9223442 0.9943473 0.9434392 0.9744038 0.98334026 1.0974319 0.94804925 0.9631778 1.1140023 1.0321882 1.0018307 0.95841897 1.0045671 ] [0.9427786 0.9787888 0.9667994 1.0469856 1.0264564 1.1017946 0.92691755 0.94237584 1.0512992 1.2382088 1.1446413 1.033261 0.9273461 1.0100838 0.953695 1.1896704 0.8920288 1.0215155 0.9103294 0.92484665 1.0735263 0.9950642 0.9162696 0.9507156 0.8922883 0.91930586 0.87135494 1.0083865 0.98581475 0.99576616 1.1090821 0.93504894 1.0970365 0.933023 1.0062339 1.1509187 0.9246554 1.1278535 1.0776608 0.94676435 0.9368617 0.9344646 1.2035906 1.1630735 0.8622448 0.9617367 0.9165463 0.9607829 0.96426743 0.96379775 0.8419644 0.9676565 1.0591227 0.99974483 0.9853028 0.9620145 1.0428381 1.0050358 1.1421679 0.9968394 1.1187488 0.9878107 0.9553882 0.9388704 0.9354872 1.0667032 1.0149964 0.82656246 0.96299535 0.9493371 1.0499657 0.82670975 1.0606314 0.8639961 1.1226568 1.1587687 1.0365735 0.8966784 1.0251187 0.97941524 0.9773187 0.95289797 1.2002699 0.862769 1.1292423 1.0559394 1.0418047 1.0110335 1.1824417 0.88750297 1.0318471 1.0660998 1.17132 0.994242 1.1177893 1.0221138 0.78753525 0.9369483 0.9654298 1.1351483 0.9355836 1.1857196 0.8943368 0.9520831 0.89471835 0.88151973 1.0228899 1.011108 0.92825115 1.062872 1.1886036 1.150199 1.0720735 1.0467516 0.917992 1.0783033 0.8867618 1.0604153 1.0256573 0.9725809 1.067043 1.0463924 0.8938883 0.9888371 0.873915 0.86251056 0.9772283 0.92183197 1.0348113 0.98923856 0.9100504 0.9914159 1.1006341 1.068211 1.1185342 1.0126015 1.0875242 1.0567 0.95622474 0.9985455 0.9462093 0.92175674 0.8282581 0.89198655 1.0701299 1.0266162 1.0157716 1.1053022 0.9773522 0.98104984 1.0497895 0.94278544 1.247634 0.92689705 1.0078125 1.0041393 1.0479783 1.0719961 0.86833423 0.9895663 1.0248908 0.9714101 0.9950251 0.97919816 1.0153154 0.9838622 0.9960477 1.1922964 0.9103689 0.9027789 1.0549017 0.9581927 1.0285714 1.0994331 0.85545933 0.94764763 1.0414145 1.0137131 1.0026674 0.9998117 0.87934464 1.069985 0.909356 1.0388172 0.8976339 1.155371 0.9458472 0.96368676 1.0037011 1.034892 1.0546399 1.1780442 1.0233492 1.019703 1.0916749 0.84404576 0.83057505 0.9693753 1.0348322 0.84928733 1.2119092 0.9040331 1.0798057 0.9110809 1.0615932 0.9308659 0.86630714 0.8915599 0.8975706 0.8869292 1.0252454 0.98242635 1.0115632 0.9547018 0.86237955 1.0437808 0.9565333 1.1118635 0.97916645 1.0504978 0.89805555 0.9767185 0.9071414 0.9293345 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:3 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6585.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[3]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[0.9402174 0.9940481 1.0196702 0.9392702 0.9684674 0.9326967 1.0710429 1.0866019 0.950927 0.9868151 0.81357944 1.0270206 0.98184776 0.905932 0.9998476 0.8596148 0.91333646 1.024117 1.0053902 0.97489244 0.93923503 0.8397249 1.1191787 1.0382926 0.89157856 1.1852194 1.0225127 1.0919836 1.1508398 0.9999493 0.9066764 1.0266981 1.0824863 1.1593457 0.9554979 0.7805185 1.0650619 1.0466472 0.95472085 0.8673103 0.96072656 1.0787714 1.0792861 1.0065687 0.9826707 1.1671795 1.0482839 1.0337387 1.0472828 0.9150882 0.96179503 0.8863842 0.8238674 1.0631386 0.9998591 0.8926254 1.0958805 0.94303244 0.91708946 0.9183181 0.94790566 1.150858 1.0476968 0.99363965 1.0639846 1.0772735 1.0085424 1.1292741 1.1700261 0.93565804 1.0672278 1.1029329 1.0446681 1.0214672 1.0311104 0.9406318 1.0680383 0.90909874 1.0693145 1.0686718 1.116813 1.0087521 1.0617611 0.9934642 0.9099416 1.099621 1.1401737 0.87596774 1.1192201 1.0545498 1.1669261 1.1145362 0.86623335 0.9005048 0.96456534 1.0039531 0.9499927 0.9220625 0.9150955 1.0799861 0.9650009 0.9730998 0.99441785 0.798955 0.86998457 0.8811602 1.202691 0.956577 1.1770939 0.8047259 0.9824779 1.1470218 0.9236532 1.1354367 0.94231516 0.86780137 0.9560916 0.87272197 0.9851165 1.0653611 1.0350052 1.1187807 0.9919813 1.1208766 1.1665502 1.1796391 0.9394202 0.95615953 1.0835148 0.8991983 0.8870598 1.0850191 1.0171552 1.0558894 0.9595383 0.9392934 1.0305419 0.87992334 1.1969944 0.9655835 0.9667291 0.86660147 0.80393714 1.0214865 0.8796588 1.2649465 1.0179083 1.0093652 0.88098687 0.9539844 1.0003703 1.0593123 1.0170856 0.9843791 0.98335576 1.0550023 1.0556399 1.0394872 1.068368 1.0338128 0.91771996 0.8914094 0.9986422 1.0025163 0.9787381 0.94897586 0.9585413 1.0335569 1.2993144 1.0188609 0.94130486 0.94065064 1.2208196 0.98082143 1.0944055 1.0843253 0.89462656 1.0927536 0.8532998 1.0156745 0.97717756 1.11616 1.0268635 1.1209959 0.96371937 1.0221465 1.1214513 0.84671086 1.0061855 1.1506613 1.0672214 0.9011641 1.0464356 1.0417366 0.950648 0.8387601 0.97434556 0.9124845 1.0520554 0.99558294 0.9388257 0.7789625 1.0412092 0.97492445 0.9688844 0.9497712 1.1083345 0.9536665 0.8928439 1.0452266 0.84313214 1.1683463 0.8832405 1.1043649 0.8198952 1.0402026 1.0227927 1.0765285 0.9808846 0.94304967 0.9782504 0.8840803 1.0341245 0.9803924 ] [1.0394537 0.9746656 0.9694645 1.0864202 1.0182577 0.87228435 0.8614739 0.79514307 1.0311011 0.9910564 0.9364686 1.1125426 0.85992974 0.9466829 0.97717136 1.0798777 0.8617339 0.95487845 1.0430138 1.0466272 1.1670369 1.04594 0.83852935 0.96733016 0.86857516 0.88943374 0.9190369 1.0454812 1.0406374 0.8651778 1.0705808 1.2163016 0.8863254 1.0202637 1.0926325 1.0887889 0.95868546 1.0378796 0.89208627 0.97017485 0.90866864 1.0467653 0.89708686 0.9783366 0.9952664 0.93725485 0.988022 0.87435305 0.8400992 1.0622022 0.872383 0.69867015 1.1530789 1.0718004 1.0820463 0.8918265 1.0391959 1.004958 0.9530387 1.0842159 0.9603231 1.1855623 0.89032793 0.9460928 1.0388504 1.0084908 0.9754918 0.92125785 0.9431387 0.8483599 0.988843 0.86712646 0.9116687 1.0220448 1.1352473 0.9923201 0.97505826 0.95474994 0.9476775 1.089508 1.0105224 0.99827963 0.9958948 0.83162224 1.071117 0.9730377 1.0305609 0.8807739 0.9309496 1.0562131 1.0009803 0.9343006 0.8334743 1.0392736 1.1996021 1.0168368 1.0989401 1.167534 0.8674381 0.9999445 1.113921 1.0550152 0.87586874 1.029095 0.9851171 0.87871706 0.8974854 1.01673 1.1174686 1.1465676 0.9850173 0.7966006 1.0048928 0.8955454 0.88025093 1.013051 0.9586866 1.0842447 0.9664745 1.0087956 0.9171242 0.93162006 0.8834554 1.015725 1.0733827 1.0617851 0.9136249 1.0863593 0.9020879 1.0100228 0.74120355 0.94415724 1.0699434 0.9232972 1.0226332 0.8510276 0.95209736 1.1481689 0.9231958 1.189766 1.0331471 1.1383448 0.8781273 1.1770484 1.0240284 0.9166319 0.929342 1.1737442 1.1261778 1.0474299 1.1846212 1.1091629 0.9177328 1.1003777 1.050342 1.0716454 0.97290444 1.0994918 0.9798193 1.1331772 0.96868026 1.0210966 0.92434734 0.92021483 1.1277328 0.9548113 1.0606356 0.90085626 0.9951083 0.9107824 0.983712 1.1130224 1.0199922 0.9284897 1.0546819 1.0255218 0.9829403 0.98308223 1.1583998 0.9131699 0.9289502 0.95693976 0.9948315 1.1613927 0.8613262 1.0182693 0.94159025 0.9638025 1.0717682 1.0038638 1.1028737 0.85361785 1.0433052 0.96474826 1.0468988 0.97240686 0.9505621 1.0569118 1.0798389 1.1067923 0.97178733 0.80984455 0.8716593 1.1866229 1.0838854 1.0964113 0.9199087 1.0706671 0.90436053 1.042769 1.1111237 0.84085083 0.93158317 0.92377055 0.9387288 1.0343302 0.94214725 1.0351523 1.0322437 0.8954012 0.8911509 1.1831428 1.0260514 0.82465684] [1.1641884 1.0672028 1.0835221 1.0828724 0.9763438 0.9981858 0.8471345 1.0528078 0.87579995 0.95378584 0.97939765 0.91415757 1.0813403 0.9259208 1.0664998 1.1449333 0.95839363 1.0775626 1.0031966 0.98781526 1.0267951 1.1545372 1.0911417 1.0046957 0.8691778 1.0163151 1.0187067 1.1028285 1.0880653 0.9782573 0.979686 1.1598055 0.9315032 0.86814034 0.98690414 1.1000707 0.96529347 0.9720226 0.90716195 0.8871094 1.2354611 1.0363897 1.0087944 0.959575 0.9249807 0.84750473 0.9323506 0.7984549 1.0851603 1.0009725 1.0345817 0.9311531 1.0407469 0.9149386 0.99071443 1.037391 1.0007529 1.0176116 1.1100537 0.9425832 1.1201007 0.84261763 1.0998881 1.0854534 1.0685645 0.94497883 0.8751375 1.0024042 0.9643961 1.1133422 1.0711267 1.11404 0.9467024 1.0434742 1.0875913 0.97215706 1.0930935 1.0793691 0.78793126 1.0425068 1.0643319 1.0375861 1.1561052 1.0046755 1.1118724 0.8369005 1.2090802 0.95320326 1.2081752 1.2030373 1.1337098 1.0655355 0.8721229 0.9613863 1.0380607 1.048804 0.8665686 0.99272287 1.1055379 0.89394903 0.92147547 0.8404571 0.98681176 1.0406067 1.0224639 0.97879434 0.7489265 1.0102029 0.89053136 1.0405275 0.9856252 1.0610843 0.96608955 1.1225679 0.91972077 0.9623219 1.0343637 1.0967542 0.92545974 1.0526005 1.0403885 1.0429641 1.0580887 1.0674648 1.2492872 1.1053584 0.9714608 0.8846728 1.1271625 1.0361077 0.9111592 0.89830005 0.9358413 1.0775461 1.0680261 0.8732125 0.8696302 0.95338583 1.0938138 0.9325568 0.9468241 1.0111997 0.8606754 0.92049354 1.0418789 1.072328 1.1197438 1.0708632 0.8734625 1.0552016 0.87758535 1.0747219 0.86086094 0.9338535 0.9222031 1.1930292 1.0631286 1.1041125 1.0038579 0.99217606 0.90166247 1.2063633 1.1620499 0.96168756 0.9382142 1.1697246 1.086221 1.0582681 0.95876515 0.93852043 0.9208172 0.8917788 0.99258643 1.0108396 0.8418673 1.0580999 1.1569064 0.96554476 0.91484535 1.0601618 0.9229298 0.9354404 0.9721885 0.96925646 1.0206689 0.9799308 0.8424477 1.0440892 0.8720326 1.3207968 1.0910732 1.0126458 1.2797605 1.0280682 1.0445098 1.063426 1.0630751 0.9917385 0.9600441 1.120244 0.99910265 1.0828677 0.9654665 0.9342905 1.0755628 0.9817999 0.9627432 1.0567507 0.938771 1.0387431 1.0372453 0.93847376 0.9856131 0.97311527 0.9463286 1.107003 1.1799822 1.006384 1.068914 1.1183335 0.9587211 1.0029384 0.9517324 0.95970213]]]; ov_res: [[[0.9402175 0.9940481 1.0196704 0.9392702 0.9684675 0.93269676 1.0710429 1.086602 0.9509271 0.9868151 0.8135795 1.0270208 0.98184776 0.905932 0.9998476 0.85961485 0.9133366 1.0241171 1.0053903 0.97489244 0.93923503 0.8397249 1.1191788 1.0382926 0.8915786 1.1852194 1.0225127 1.0919836 1.1508398 0.99994934 0.9066765 1.0266981 1.0824863 1.1593459 0.955498 0.7805186 1.0650619 1.0466473 0.954721 0.86731035 0.9607265 1.0787714 1.0792861 1.0065687 0.9826707 1.1671795 1.0482839 1.0337386 1.0472828 0.91508824 0.96179503 0.8863842 0.8238675 1.0631386 0.99985915 0.89262545 1.0958805 0.9430325 0.9170895 0.91831815 0.9479057 1.1508582 1.047697 0.9936397 1.0639848 1.0772736 1.0085425 1.1292741 1.1700262 0.9356581 1.067228 1.1029329 1.0446682 1.0214672 1.0311104 0.94063187 1.0680383 0.90909874 1.0693145 1.0686718 1.116813 1.0087521 1.0617611 0.99346423 0.9099417 1.099621 1.1401738 0.87596786 1.1192201 1.0545497 1.1669261 1.1145363 0.8662334 0.90050495 0.96456546 1.0039531 0.9499928 0.9220626 0.9150955 1.0799862 0.965001 0.9730998 0.9944179 0.798955 0.86998457 0.8811602 1.202691 0.9565771 1.177094 0.804726 0.9824779 1.147022 0.9236533 1.1354369 0.9423153 0.86780137 0.9560917 0.872722 0.98511666 1.0653611 1.0350053 1.1187809 0.9919814 1.1208767 1.1665503 1.1796392 0.93942034 0.9561596 1.0835149 0.89919835 0.8870598 1.0850191 1.0171552 1.0558892 0.9595384 0.9392934 1.0305419 0.8799234 1.1969945 0.96558356 0.9667291 0.8666014 0.8039372 1.0214866 0.8796588 1.2649465 1.0179085 1.0093652 0.8809868 0.9539844 1.0003704 1.0593125 1.0170856 0.9843792 0.98335576 1.0550023 1.05564 1.0394872 1.0683681 1.0338129 0.91772 0.89140946 0.99864227 1.0025163 0.9787382 0.94897586 0.9585413 1.0335569 1.2993144 1.018861 0.94130486 0.94065076 1.2208198 0.9808215 1.0944055 1.0843254 0.89462656 1.0927536 0.85329986 1.0156745 0.97717756 1.11616 1.0268633 1.1209959 0.9637194 1.0221465 1.1214513 0.8467109 1.0061855 1.1506613 1.0672214 0.9011642 1.0464356 1.0417365 0.95064807 0.8387601 0.9743456 0.91248465 1.0520554 0.995583 0.93882585 0.77896243 1.0412092 0.97492445 0.9688844 0.9497713 1.1083345 0.95366657 0.89284384 1.0452266 0.84313214 1.1683464 0.88324064 1.1043649 0.81989515 1.0402027 1.0227927 1.0765287 0.9808846 0.9430497 0.97825044 0.88408035 1.0341246 0.98039246] [1.0394537 0.97466564 0.96946454 1.0864202 1.0182579 0.8722844 0.8614739 0.79514307 1.0311011 0.9910565 0.9364686 1.1125427 0.85992974 0.946683 0.9771715 1.0798779 0.861734 0.9548785 1.0430138 1.0466272 1.1670369 1.04594 0.83852935 0.96733016 0.8685752 0.88943374 0.91903704 1.0454812 1.0406374 0.8651778 1.070581 1.2163017 0.8863255 1.0202637 1.0926328 1.0887889 0.9586855 1.0378796 0.8920863 0.97017485 0.90866876 1.0467653 0.89708686 0.9783367 0.9952664 0.9372548 0.988022 0.8743531 0.8400993 1.0622021 0.87238306 0.69867015 1.1530789 1.0718004 1.0820463 0.8918265 1.039196 1.0049579 0.9530387 1.0842159 0.96032315 1.1855624 0.89032793 0.94609284 1.0388505 1.0084908 0.9754918 0.92125785 0.94313866 0.84836 0.9888431 0.8671265 0.9116688 1.0220449 1.1352473 0.9923202 0.9750583 0.95475 0.94767755 1.089508 1.0105225 0.9982797 0.99589485 0.83162224 1.071117 0.9730378 1.0305609 0.880774 0.9309497 1.0562131 1.0009803 0.9343006 0.8334744 1.0392736 1.1996022 1.0168369 1.0989403 1.1675339 0.86743814 0.99994457 1.113921 1.0550153 0.8758688 1.0290952 0.98511714 0.8787171 0.89748544 1.01673 1.1174686 1.1465676 0.98501736 0.79660064 1.0048928 0.8955454 0.880251 1.013051 0.95868665 1.0842446 0.96647453 1.0087957 0.9171243 0.9316201 0.88345546 1.0157251 1.0733827 1.0617851 0.91362494 1.0863594 0.90208805 1.0100228 0.7412036 0.94415724 1.0699435 0.9232973 1.0226333 0.8510277 0.9520974 1.148169 0.9231958 1.189766 1.0331472 1.1383449 0.87812734 1.1770486 1.0240284 0.9166319 0.929342 1.1737442 1.1261778 1.04743 1.1846213 1.1091629 0.9177328 1.1003778 1.0503421 1.0716455 0.97290456 1.0994918 0.9798193 1.1331772 0.9686803 1.0210967 0.9243474 0.9202149 1.1277329 0.9548114 1.0606357 0.9008563 0.99510837 0.91078246 0.9837121 1.1130224 1.0199922 0.9284897 1.0546821 1.0255218 0.9829404 0.98308235 1.1583999 0.91317 0.92895025 0.9569399 0.9948315 1.1613928 0.8613263 1.0182693 0.9415903 0.9638026 1.0717683 1.0038639 1.1028737 0.85361785 1.0433052 0.96474826 1.046899 0.972407 0.9505621 1.056912 1.079839 1.1067923 0.9717874 0.8098446 0.8716594 1.1866229 1.0838854 1.0964115 0.91990864 1.0706671 0.90436053 1.0427692 1.1111238 0.8408509 0.93158317 0.9237706 0.9387288 1.0343302 0.94214725 1.0351524 1.0322438 0.89540124 0.89115095 1.1831429 1.0260515 0.82465684] [1.1641885 1.0672029 1.0835222 1.0828724 0.97634387 0.9981858 0.84713453 1.0528078 0.8758 0.9537859 0.97939765 0.9141576 1.0813404 0.92592084 1.0665 1.1449333 0.95839363 1.0775626 1.0031966 0.9878153 1.0267953 1.1545372 1.0911418 1.0046958 0.8691778 1.0163152 1.0187067 1.1028285 1.0880653 0.97825736 0.9796861 1.1598054 0.9315031 0.86814046 0.9869042 1.1000707 0.9652936 0.9720226 0.907162 0.8871095 1.2354612 1.0363897 1.0087944 0.95957506 0.9249807 0.8475048 0.93235064 0.798455 1.0851603 1.0009725 1.0345818 0.93115324 1.0407469 0.9149387 0.9907145 1.0373911 1.000753 1.0176116 1.1100538 0.9425832 1.1201007 0.8426177 1.0998882 1.0854535 1.0685647 0.9449789 0.87513757 1.0024043 0.9643962 1.1133422 1.0711267 1.11404 0.9467025 1.0434743 1.0875914 0.97215706 1.0930936 1.0793691 0.78793126 1.042507 1.064332 1.0375861 1.1561053 1.0046756 1.1118724 0.83690053 1.2090802 0.9532033 1.2081751 1.2030374 1.1337099 1.0655357 0.8721228 0.9613863 1.0380607 1.048804 0.8665687 0.992723 1.105538 0.89394915 0.92147547 0.8404571 0.9868118 1.0406066 1.0224639 0.97879446 0.7489266 1.0102029 0.8905314 1.0405273 0.9856253 1.0610843 0.9660896 1.122568 0.9197208 0.96232194 1.0343636 1.0967543 0.92545986 1.0526006 1.0403885 1.0429641 1.0580888 1.067465 1.2492874 1.1053584 0.9714608 0.8846729 1.1271626 1.0361078 0.9111593 0.8983002 0.93584126 1.0775461 1.0680261 0.8732126 0.8696303 0.95338583 1.0938139 0.9325568 0.94682413 1.0111997 0.8606754 0.92049354 1.0418789 1.072328 1.119744 1.0708634 0.87346256 1.0552018 0.8775854 1.074722 0.86086094 0.93385357 0.9222031 1.193029 1.0631288 1.1041125 1.003858 0.99217606 0.90166247 1.2063633 1.16205 0.9616876 0.93821424 1.1697246 1.0862211 1.0582682 0.9587652 0.93852055 0.92081726 0.8917789 0.9925865 1.0108397 0.8418674 1.0580999 1.1569064 0.9655449 0.9148454 1.0601621 0.9229299 0.93544054 0.97218853 0.9692565 1.020669 0.9799308 0.84244776 1.0440892 0.87203264 1.3207967 1.0910732 1.012646 1.2797606 1.0280683 1.0445098 1.063426 1.0630752 0.99173856 0.96004415 1.1202441 0.9991027 1.0828679 0.96546656 0.9342906 1.0755628 0.98179996 0.9627433 1.0567508 0.93877095 1.0387433 1.0372453 0.9384738 0.9856131 0.97311527 0.9463286 1.107003 1.1799823 1.0063841 1.068914 1.1183336 0.9587212 1.0029384 0.95173246 0.9597022 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:-1 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6587.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[1.2417729 1.0630659 1.040485 0.90511036 0.9371176 1.0280173 1.1155512 0.8104053 1.0688652 0.90674627 0.92544234 1.1126729 1.0572412 1.0408384 1.0497478 1.1188763 0.90621555 1.0669177 0.92396057 0.87878776 1.0953721 0.92383075 1.0659176 0.9819316 1.003892 1.0896678 0.8183153 0.89205724 1.0545462 1.07249 1.1988256 1.017341 0.8781385 1.0910227 1.195338 1.0647037 0.9888171 1.0698075 0.9877523 1.0734217 1.1706138 1.1577299 1.0596513 0.9847791 1.1547183 1.0819185 1.0129619 1.0511384 1.0872238 0.99680364 0.9364621 0.8830912 0.92672384 1.1862559 1.0101916 1.0286573 1.0266342 0.7861924 0.8879353 1.0191629 1.0821239 1.0826725 1.0668637 0.9319744 1.0303049 1.0088805 0.9125893 0.88706523 0.9436279 0.8247461 1.0931696 0.97509104 0.9690216 0.89995015 1.0023223 0.9550776 1.0255988 0.8778461 0.8790427 1.200088 0.98898596 0.9860326 0.98999023 0.81136906 1.0230678 0.880431 1.0523032 1.0485008 1.0696616 0.918006 1.1026751 0.92432064 0.99822694 1.0620832 1.0078228 0.9731836 1.0307198 0.93607825 0.9976178 1.0205703 0.96840006 1.0118423 0.91311467 0.90190667 0.90766823 1.0489429 0.87346935 0.767363 1.0143036 0.9133605 1.0405564 1.048595 1.0019039 0.93283504 1.1180295 0.9536726 0.9534898 1.1003879 0.94990283 1.0594113 0.91792524 1.0516713 0.92934144 1.0465223 0.9928413 0.9802102 0.91232646 0.9803324 0.9696857 1.0029117 1.104561 0.94309264 0.9068414 1.1890455 0.97700506 0.9208115 0.9802479 0.99915034 1.0328623 0.9736534 0.9877046 0.99778193 0.97791326 1.0713621 1.0781904 0.8392574 0.9748591 0.86169064 0.99117124 1.2314782 1.0215007 1.1503882 0.86095417 0.9830455 1.0521395 0.9021991 0.9690794 1.193508 1.1972661 1.1080307 0.97161263 0.96185553 0.8457971 0.91870016 0.90931004 0.9568058 1.1034485 1.0754423 0.9971508 0.90384656 1.2088484 0.95092744 1.1632991 1.0301816 0.8925111 1.0632174 1.1917084 0.94233346 1.1712155 0.93037415 1.0439378 0.96586925 1.0695478 0.9015182 0.91376245 1.0024782 0.8493281 1.0334355 0.990653 1.0442631 1.0245763 0.9612838 1.0286703 1.1272138 1.1730523 0.9220273 1.0184588 1.0746535 1.2315812 1.070371 0.8827522 0.99269944 0.87707835 1.0030801 1.0865852 0.9247149 1.0039175 0.9721088 1.0271518 0.978756 0.8758115 1.0402149 1.0261698 0.98386914 0.91336584 1.0413283 1.0361098 0.9799586 0.95880455 1.067775 0.87906706 0.79445827 1.0896534 1.0716572 ] [0.98847216 1.0080944 1.0141599 1.0080045 1.0563675 1.1122222 1.0887482 0.8897727 0.9237779 1.1029369 1.0517049 1.0272441 0.9284938 1.0616004 0.8270537 0.9706671 0.9380808 1.1421661 0.9313068 0.9819591 1.0383059 1.1375496 0.87373334 0.94045705 0.93853545 1.1583505 1.1642656 1.0703697 1.116104 0.9825741 0.97835565 1.0526562 1.1684726 1.0462438 0.9761203 1.1062407 0.9556846 1.1455506 0.92439157 1.1625426 0.93265945 0.89478934 1.1232103 0.92287 1.0900353 1.167548 1.0934486 0.84480727 1.0077151 0.94573134 1.0188365 1.0673189 1.0019522 1.0788873 1.0035709 1.0354055 1.0887078 0.8452856 0.9754745 0.93530595 0.9446167 1.0497891 1.0609616 0.9380466 0.9973592 1.0072842 0.8783112 0.93932664 0.85037225 0.93998784 0.9362946 0.80110973 1.1643882 1.0842458 1.0914639 0.9193374 1.0809517 1.0631468 0.94301826 1.1089405 0.94835943 0.88481814 0.9359543 1.0913914 1.046615 1.1097764 0.78215104 0.8289402 0.9895498 1.1048547 0.9920546 1.0243292 1.1286223 1.0117061 1.0254458 0.98248065 1.077969 0.8054477 1.1695333 0.78632545 0.95216155 0.93677646 1.1770204 0.8405955 0.9864606 0.9101392 1.020036 1.0902302 1.1510489 1.1680646 0.9715559 0.8122489 0.868176 0.9258682 0.974339 0.98437685 1.1161215 1.063102 0.92531246 0.9421876 0.90349746 1.183126 1.0270866 0.958178 0.96416837 1.0618035 0.90809757 1.1670582 1.0509465 1.2053576 1.0818465 0.92010206 1.0318406 1.07888 1.1506803 1.1704999 1.1264975 1.0172901 0.8659512 1.036117 0.92453915 1.0638838 0.9103476 0.9539274 0.9546959 0.93507844 1.0056727 1.1042138 1.1445754 0.86806875 0.93816364 0.9049125 0.891085 0.8982602 0.96443087 0.94068444 0.9786653 0.9899613 1.0846922 1.0191977 0.9863117 0.9827332 1.0278779 1.05383 0.91440845 0.8702636 0.9666337 0.9438151 1.003078 0.8828978 1.0535526 1.0250767 1.2125195 0.9231919 1.0832522 0.9325059 0.9707108 1.0870613 1.0782537 0.85125655 0.8912855 0.9354938 1.1356679 1.0480695 1.1156156 1.0013973 0.95102847 0.95964473 0.9404047 0.9902496 1.188525 0.97440374 1.1203421 0.81740826 1.1263372 1.0190352 1.008588 1.1405908 0.8690298 0.93543124 1.0254853 1.0961063 0.89998513 0.85513395 0.94008374 0.8958386 0.85903096 0.90547645 1.0950471 1.0608863 1.1939144 0.930042 0.832477 0.96952456 1.0415637 0.99453855 1.0617627 0.91184396 0.90660334 1.1404185 0.9584639 1.0660872 1.059665 0.9505037 ] [1.0770739 0.938427 1.030695 0.99900985 0.84793735 0.8379281 1.1358014 1.0991167 1.0461143 1.2063024 1.1327157 0.8784133 0.93969023 0.8223241 0.9326423 0.92281455 0.8002494 1.0808346 1.0050222 1.1942842 1.0689218 1.2801608 1.0322136 1.003403 1.053903 0.87465346 1.0608171 0.95992714 0.90248823 1.0139507 0.89910537 0.8935694 0.9954875 0.8998218 0.854204 0.95030504 0.9787802 0.9505958 0.88304603 0.83483166 1.0828888 0.9966039 1.0481175 0.93073356 1.1364062 1.0934416 0.93564 0.94336927 0.8902329 0.98271215 0.9695602 1.0550315 1.0590485 1.153754 0.9809824 0.9609177 0.88904214 0.88476974 1.1482552 1.0432712 0.99526894 1.1168535 1.0962075 0.8120456 0.9907809 0.8979255 1.1790473 0.9890319 1.0003926 0.93999356 1.0310788 1.1598871 0.9466957 0.9866413 0.9732662 0.8772624 1.0746716 0.844732 0.92614347 1.012481 1.0466007 0.92249423 0.91597384 0.99895716 1.2255441 0.8767163 0.9239877 0.8545898 0.9866142 0.9716383 1.0188986 0.86559635 1.1054418 0.92564887 0.85310495 0.8913489 1.0550829 0.9627216 0.8558351 0.89303875 0.9498794 0.8574638 0.9465429 0.93445295 0.85651886 1.004659 1.0007709 0.869597 1.0707506 1.01907 0.89878535 0.95060575 1.0365313 1.0175067 0.97488314 1.0379586 0.8806995 1.0392138 0.98785675 0.9459031 1.1919507 0.9827137 0.9515022 1.0797428 0.8943407 1.0237137 0.9847195 1.0256032 0.95669824 0.9537636 1.0569887 1.0279982 0.9811304 1.123996 1.0025216 1.056133 1.0218078 0.8894943 1.0462089 1.0751286 0.7742278 0.89595765 1.0797957 1.0499786 0.91823834 0.8932921 1.072323 0.8787216 0.9526608 1.2760828 0.88673395 1.0006901 0.9699709 0.9276492 0.94580424 1.2075164 0.80321336 1.033764 1.012829 0.9671824 0.9280241 1.2160026 1.0542579 1.1004995 1.0736667 1.0756124 0.9545716 0.99207467 1.1160686 0.94413507 0.95942074 0.82225764 1.0028527 0.82913077 0.9833828 0.9595029 1.0192893 1.0539358 0.8216866 1.0810205 1.019673 0.8696782 1.0125372 0.96655345 1.0514642 0.9263522 0.9208813 0.94460493 1.0656599 0.9158686 1.0172653 0.9258747 0.94783473 0.9588031 0.8967817 0.9007173 1.0596812 1.0532423 1.2284786 0.98071754 1.1009407 1.1920788 1.0265526 1.1517514 1.0790771 1.05738 0.986348 1.0085218 0.9607467 1.1099615 0.9244594 0.9274544 1.0946999 0.95307404 1.0619912 1.0418135 0.97088385 1.0060291 1.2036891 1.006622 0.930888 1.0849013 0.9784282 0.9674343 ]]]; ov_res: [[[1.241773 1.0630659 1.0404849 0.90511036 0.9371176 1.0280173 1.1155512 0.8104053 1.0688651 0.90674627 0.92544234 1.112673 1.0572412 1.0408384 1.0497478 1.1188763 0.90621555 1.0669177 0.92396057 0.8787877 1.0953721 0.9238307 1.0659176 0.98193157 1.003892 1.0896678 0.8183154 0.89205724 1.0545462 1.0724899 1.1988256 1.017341 0.8781385 1.0910227 1.195338 1.0647037 0.9888171 1.0698075 0.9877523 1.0734217 1.1706136 1.15773 1.0596513 0.98477906 1.1547183 1.0819184 1.0129617 1.0511384 1.0872238 0.9968036 0.9364621 0.8830913 0.9267239 1.186256 1.0101916 1.0286572 1.0266342 0.7861925 0.8879353 1.0191629 1.082124 1.0826725 1.0668637 0.93197453 1.030305 1.0088805 0.91258925 0.88706523 0.9436279 0.8247461 1.0931696 0.975091 0.9690216 0.89995015 1.0023223 0.9550775 1.0255988 0.8778462 0.87904274 1.2000879 0.9889859 0.9860326 0.98999023 0.81136906 1.0230678 0.88043106 1.0523031 1.0485008 1.0696617 0.91800594 1.1026751 0.9243206 0.9982268 1.0620832 1.0078228 0.9731836 1.0307198 0.9360782 0.9976177 1.0205702 0.9684 1.0118424 0.9131146 0.90190667 0.90766823 1.0489429 0.87346935 0.767363 1.0143034 0.91336054 1.0405563 1.0485948 1.0019039 0.93283504 1.1180295 0.9536725 0.95348984 1.1003879 0.94990283 1.0594113 0.9179253 1.0516713 0.92934144 1.0465223 0.9928413 0.9802101 0.91232646 0.9803324 0.9696857 1.0029117 1.104561 0.94309264 0.9068414 1.1890455 0.977005 0.9208115 0.980248 0.9991503 1.0328623 0.9736534 0.9877046 0.9977819 0.97791326 1.0713621 1.0781904 0.8392574 0.9748591 0.86169064 0.9911713 1.2314783 1.0215007 1.1503881 0.86095417 0.9830455 1.0521395 0.9021991 0.96907943 1.193508 1.1972662 1.1080306 0.97161263 0.96185553 0.8457971 0.91870016 0.90931004 0.95680577 1.1034485 1.0754424 0.99715084 0.9038466 1.2088482 0.95092744 1.1632992 1.0301816 0.8925112 1.0632175 1.1917084 0.9423335 1.1712157 0.93037415 1.0439377 0.96586925 1.0695477 0.9015182 0.91376245 1.0024782 0.84932816 1.0334355 0.990653 1.0442631 1.0245764 0.9612838 1.0286704 1.1272137 1.1730524 0.92202723 1.0184588 1.0746535 1.2315811 1.070371 0.8827521 0.9926995 0.8770784 1.0030801 1.0865852 0.9247149 1.0039175 0.9721087 1.027152 0.97875607 0.8758116 1.0402149 1.0261698 0.98386914 0.9133659 1.0413283 1.0361098 0.97995853 0.9588046 1.067775 0.87906706 0.79445827 1.0896534 1.0716572 ] [0.9884723 1.0080944 1.0141599 1.0080045 1.0563675 1.1122223 1.0887482 0.8897727 0.9237779 1.1029369 1.0517049 1.0272441 0.9284938 1.0616004 0.8270537 0.9706672 0.9380808 1.1421661 0.9313068 0.98195904 1.0383059 1.1375496 0.8737334 0.94045705 0.9385354 1.1583506 1.1642656 1.0703697 1.1161039 0.9825741 0.9783557 1.0526562 1.1684726 1.0462438 0.9761203 1.1062406 0.95568466 1.1455506 0.92439157 1.1625426 0.93265945 0.89478934 1.1232103 0.92287 1.0900354 1.167548 1.0934486 0.8448072 1.0077151 0.94573134 1.0188365 1.067319 1.0019523 1.0788873 1.0035709 1.0354055 1.0887078 0.84528553 0.9754745 0.935306 0.94461673 1.0497891 1.0609616 0.9380466 0.9973593 1.0072842 0.8783112 0.9393267 0.85037225 0.9399878 0.93629456 0.80110973 1.1643883 1.0842458 1.0914638 0.9193374 1.0809518 1.0631468 0.94301826 1.1089405 0.9483594 0.88481814 0.9359542 1.0913914 1.046615 1.1097764 0.7821511 0.8289403 0.98954976 1.1048548 0.9920545 1.0243292 1.1286223 1.0117061 1.0254458 0.9824806 1.077969 0.8054477 1.1695331 0.78632545 0.9521615 0.93677646 1.1770203 0.84059554 0.98646057 0.9101392 1.020036 1.0902302 1.1510489 1.1680647 0.9715558 0.8122489 0.8681759 0.92586815 0.974339 0.9843768 1.1161215 1.0631019 0.92531246 0.9421876 0.9034974 1.1831261 1.0270867 0.958178 0.96416837 1.0618035 0.90809757 1.1670582 1.0509465 1.2053576 1.0818465 0.9201021 1.0318406 1.0788801 1.1506803 1.1704999 1.1264975 1.01729 0.8659512 1.0361168 0.92453915 1.0638838 0.9103476 0.95392734 0.9546958 0.93507844 1.0056727 1.104214 1.1445754 0.86806875 0.93816364 0.9049125 0.8910851 0.8982601 0.96443087 0.94068444 0.97866523 0.9899613 1.0846922 1.0191977 0.98631173 0.9827332 1.027878 1.05383 0.9144085 0.8702636 0.9666337 0.9438151 1.0030779 0.8828978 1.0535525 1.0250767 1.2125195 0.9231919 1.0832522 0.93250597 0.9707108 1.0870613 1.0782537 0.85125655 0.89128554 0.9354939 1.1356679 1.0480695 1.1156156 1.0013973 0.9510284 0.95964473 0.9404048 0.9902496 1.188525 0.97440374 1.1203421 0.8174082 1.1263373 1.0190352 1.008588 1.1405908 0.8690298 0.9354312 1.0254853 1.0961063 0.8999852 0.85513395 0.9400838 0.8958387 0.859031 0.90547645 1.0950472 1.0608863 1.1939144 0.93004197 0.832477 0.9695245 1.0415637 0.9945386 1.0617627 0.91184396 0.90660334 1.1404184 0.95846397 1.0660874 1.059665 0.9505037 ] [1.0770739 0.938427 1.030695 0.9990098 0.8479373 0.8379282 1.1358014 1.0991167 1.0461142 1.2063025 1.1327157 0.8784133 0.9396902 0.8223241 0.9326423 0.92281455 0.8002494 1.0808345 1.0050222 1.1942842 1.0689218 1.2801608 1.0322136 1.0034028 1.053903 0.8746534 1.0608171 0.95992714 0.90248823 1.0139507 0.89910537 0.8935694 0.9954875 0.8998218 0.85420406 0.950305 0.9787802 0.9505958 0.88304603 0.83483166 1.082889 0.9966039 1.0481175 0.9307336 1.1364062 1.0934416 0.93564 0.9433692 0.8902329 0.9827121 0.9695602 1.0550315 1.0590487 1.153754 0.98098236 0.96091765 0.8890422 0.88476974 1.1482552 1.0432712 0.99526894 1.1168535 1.0962075 0.8120456 0.9907809 0.8979255 1.1790472 0.9890319 1.0003926 0.93999344 1.0310788 1.1598871 0.9466956 0.9866412 0.9732662 0.8772623 1.0746716 0.844732 0.92614347 1.012481 1.0466006 0.92249423 0.9159738 0.99895716 1.225544 0.8767163 0.9239877 0.8545899 0.9866142 0.9716383 1.0188986 0.86559635 1.1054417 0.925649 0.853105 0.8913489 1.0550829 0.9627215 0.855835 0.89303875 0.94987947 0.8574637 0.94654286 0.93445295 0.85651886 1.004659 1.0007708 0.869597 1.0707506 1.01907 0.8987853 0.95060575 1.0365313 1.0175067 0.97488314 1.0379586 0.88069946 1.0392138 0.9878568 0.945903 1.1919507 0.9827137 0.95150214 1.0797428 0.89434063 1.0237138 0.9847196 1.0256032 0.9566983 0.95376354 1.0569886 1.0279982 0.9811304 1.1239959 1.0025216 1.056133 1.0218078 0.88949424 1.0462087 1.0751284 0.7742278 0.8959576 1.0797957 1.0499786 0.9182383 0.89329207 1.072323 0.8787216 0.95266086 1.2760828 0.886734 1.0006901 0.969971 0.92764914 0.9458042 1.2075164 0.8032134 1.033764 1.012829 0.96718246 0.9280242 1.2160026 1.0542579 1.1004994 1.0736667 1.0756123 0.95457166 0.99207467 1.1160686 0.944135 0.95942074 0.8222576 1.0028527 0.8291307 0.9833828 0.9595029 1.0192893 1.0539358 0.8216866 1.0810205 1.019673 0.8696782 1.0125371 0.9665535 1.0514642 0.9263522 0.9208813 0.944605 1.0656599 0.9158686 1.0172652 0.9258747 0.9478348 0.9588031 0.8967816 0.9007173 1.0596812 1.0532423 1.2284786 0.98071754 1.1009407 1.1920788 1.0265524 1.1517514 1.079077 1.05738 0.986348 1.0085219 0.96074665 1.1099615 0.9244594 0.9274544 1.0946999 0.95307404 1.0619913 1.0418134 0.9708838 1.0060291 1.2036892 1.006622 0.930888 1.0849013 0.9784282 0.9674343 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:-1 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6589.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[0.9913927 0.9168392 1.0903794 1.0273908 1.1165243 1.0833066 0.95390296 0.98450327 1.0966355 0.93799394 0.8462355 1.0037229 1.0111523 1.060076 1.0871387 1.0384516 0.89374363 1.1493984 0.94134957 1.157835 1.1340559 1.0115819 0.8813019 0.92966837 1.1428418 1.0669512 1.1245091 1.0276877 0.9376103 0.990198 1.0343987 0.97294414 0.96549934 1.1141051 0.87126076 0.99452436 1.0393019 0.99562687 1.0266397 0.99815315 0.86972046 0.89113134 1.0936619 0.99904174 0.90050066 0.9943602 0.86871266 0.99418175 1.0866375 1.0510269 0.9478232 0.94481236 1.0844816 1.1707329 0.9901401 0.99100435 1.0675588 1.0961223 0.9859146 0.86966556 1.0570732 0.9996934 1.0817686 1.2676731 0.86413926 1.1262641 0.9360671 1.1535981 1.024335 1.1333654 0.9946639 1.0544803 1.1178738 1.152839 0.8790479 1.1092247 1.0470756 0.99949694 0.9657999 0.9126673 1.0211124 0.9435494 0.974505 0.8176545 1.026825 1.0218387 0.9026207 0.9779013 1.0471785 0.94726163 0.9704327 1.057362 1.0776613 1.0638477 0.9197647 0.87450063 0.99469525 1.0748396 1.0405419 0.8803678 0.8801959 1.0610931 0.95751095 0.9622393 0.9408742 0.9582094 0.86468923 1.2002196 0.90112746 0.98178947 0.86041725 1.0235258 1.1699618 1.1264929 1.1828669 0.9999359 0.9018285 1.0712075 1.013291 1.0004703 0.8317912 1.074934 0.94974077 0.96437067 0.9562457 0.9485747 1.0620954 0.97010326 1.1593622 0.9659995 0.7441243 1.1330041 1.1121972 0.9357149 0.93769073 0.98726034 1.0688282 1.0679244 1.0640522 0.8998958 0.9601796 0.877085 1.1064211 0.90868044 1.0759125 0.9517347 1.076889 0.9504007 1.0693156 0.97602105 1.1116279 0.95433486 0.97511315 0.8845122 1.0326458 1.0784397 1.0276523 0.9565297 0.8858597 0.9997306 0.95609385 1.0887358 0.83836657 1.0457041 0.9349377 0.951398 0.97863173 0.90589845 1.0367287 1.0578941 0.9544015 1.0234313 0.9751051 1.0098248 0.92594194 1.0138683 1.1070039 1.0939296 1.0703526 1.1802046 1.0146533 1.0928328 1.0353206 0.944722 1.0834545 1.1955612 1.0801072 0.9935905 1.0034376 1.0368913 0.96208274 1.1042544 1.0497376 1.0050569 0.9576193 0.93204457 1.0181504 0.9836671 1.0101446 1.0747248 0.89721483 0.8993629 0.82593876 1.024142 0.952227 0.8744934 1.0000869 1.0326042 1.009424 0.8583996 0.93490916 0.9606927 1.0888097 1.0943706 0.995882 1.0569069 0.9876673 0.9410415 1.0002452 0.9785448 1.0696001 0.96531135 1.0181475 1.0096127 ] [0.9627408 0.9047101 1.0228599 1.0406872 1.2188214 1.0211732 0.7791545 0.9098493 0.9644515 1.2385644 0.98990893 1.1389501 0.99310386 1.0203592 1.0785462 0.89213645 0.99897265 0.874409 1.0674068 1.0971793 0.8895739 1.1036949 1.0493076 1.0563 0.98785794 1.0858828 1.0641725 0.9946119 0.9010246 1.0713189 1.0700154 1.045509 0.91818094 0.97942793 1.0149176 1.2066772 0.92432785 0.85043555 0.9587251 1.0558327 1.0217252 0.89381087 1.0112735 0.92601633 1.1012427 0.8792823 1.0788242 0.91702664 1.0317615 1.1870672 1.059981 0.9651849 1.1321967 1.1585418 1.0182276 1.1002867 1.037662 0.94158226 0.9330598 0.9927393 1.0811734 0.9045762 0.97762734 0.9338603 0.94580865 1.0349017 1.0728078 0.92333 1.0037078 1.0721264 0.89233345 1.0029835 0.9373425 1.0485045 1.0661151 0.9822398 1.0581182 0.95034665 1.0191413 1.0293158 0.94516957 0.90746915 1.0492637 1.0329174 1.1855248 0.9864874 0.94379365 0.88777983 1.1916794 0.9918859 0.97238725 0.99193907 1.1766114 0.99502504 1.0489122 1.024283 1.0648756 1.0650953 0.9735434 0.7680791 1.094146 0.90890604 0.9511734 1.1126251 1.1511943 1.0816551 1.1044055 1.0312316 1.0163401 0.89503264 0.8459051 1.0987134 1.0460984 0.9771346 0.94369084 0.86732566 0.9240994 0.96932 0.8824583 1.1700962 0.7923613 1.0673467 0.882783 0.96647143 0.82547444 1.0630441 0.97785974 1.123485 1.0487946 0.811558 1.0534335 1.0313615 1.0511765 1.0940895 0.8421426 1.1040465 0.89542276 0.96467656 1.0463887 0.9927841 0.95031995 0.9515505 0.94642997 0.7650431 1.0728381 0.9456957 1.129707 1.0119057 0.701222 0.9430589 0.8642869 0.93685 1.0927526 0.93228006 1.0715714 0.9191131 1.1305017 0.9721987 1.0351069 0.99497813 0.97197026 1.0789465 1.1222441 1.0279891 1.0160542 1.1044636 0.9310919 1.0912434 0.93900865 0.8082494 0.97515994 0.9535224 1.0310524 1.0038851 0.9762621 1.0232698 1.1767111 0.8737549 0.88326836 0.9431249 0.98788184 0.9779313 1.1195921 1.0295436 0.9592423 0.877149 0.9376811 1.0405394 1.1014212 0.9230913 0.8997556 0.92467797 1.1574814 1.0680187 1.1354402 0.9547323 0.8630313 0.92920387 0.9778594 0.9803372 1.0794553 0.88432074 1.024467 0.99929655 1.1206737 0.89912504 1.0521531 1.0742165 1.0739614 1.0721961 1.0148852 0.9389557 1.0742207 1.0126677 0.9105554 0.9700406 1.0296726 1.1192249 1.0663743 0.94613194 0.9525 0.8060589 0.934048 0.956996 ] [0.96703917 1.0054847 1.0886056 1.154099 0.944888 0.96896374 0.99752915 0.9162198 0.98465925 1.1346439 1.0321372 0.974623 0.9742568 0.8619736 1.0323178 0.9548582 0.91569364 1.0982106 0.98785865 1.0264248 0.90172917 0.96326596 1.1051236 1.0002911 0.96346205 0.8903509 1.085235 1.1417655 0.9186976 1.0907435 1.0298309 1.0042422 0.97659993 1.0206085 1.035738 1.1101286 1.1104836 0.9898557 1.0618702 1.0033334 1.123583 1.1311466 0.9312739 1.0031928 1.0894574 1.0734473 1.019686 0.9905627 0.9284084 0.8598015 1.0013999 0.9724819 0.9486987 1.0652308 1.0426546 1.1020492 1.0487733 1.0721761 1.1268753 1.0141909 0.9875709 0.85138106 0.92790073 1.1897819 0.9216493 0.98062426 1.2064332 1.111252 1.0717913 0.954669 1.1191274 1.1248865 0.90829736 1.0590752 0.8797037 0.90131646 0.8819281 1.0074105 0.9254706 0.91387117 1.0735651 1.1357509 1.0510317 1.0330672 1.055683 0.8891476 0.9325969 1.2232393 1.0713465 1.0620443 1.1285602 1.157738 0.9425405 1.0493113 1.1358883 1.2247115 1.0107583 0.8247468 1.0369796 1.1264995 1.0915414 1.0861336 0.98787224 1.0391735 0.97118616 0.9610811 1.1678623 1.1679105 1.1422373 1.050861 0.8947825 1.0504988 0.84700423 1.0537139 1.0493037 1.1469791 0.9402242 0.87472075 0.97880816 1.0500323 1.0671574 0.91624486 1.1059431 1.0367824 1.0138623 0.9961684 1.0524807 1.0114648 1.0348712 0.9762537 0.95247096 1.0167624 1.1409174 1.0325207 1.080243 1.0321511 0.91353476 1.141389 1.1213142 1.1929432 1.0744116 0.91538507 1.0554942 1.0207349 0.9600289 1.0464983 0.92847544 1.0150554 0.9810038 0.9973905 0.83661723 0.9894358 0.8820345 1.0630941 0.98786926 0.9571631 1.0769157 0.88723797 1.08522 0.81092685 0.9517609 0.8668132 1.1625205 0.94132555 1.0955606 0.9192147 0.9260636 0.90055186 1.1476784 0.93635327 1.0645995 0.8142399 1.0868884 1.1278613 1.0156965 1.0337247 0.8565963 1.1276902 1.0446553 1.020279 1.0703353 1.3498493 0.9383143 1.0752046 0.9368781 1.1581738 0.9006527 0.9267387 0.9243346 0.87236303 0.8716118 1.0666159 0.9731006 1.1074716 0.8917798 1.1042868 1.0069164 1.0670114 1.405727 1.0273752 0.8877967 0.8942814 1.0099955 1.1314093 0.8351732 0.8769331 0.9571445 0.9766972 0.94614846 1.0238621 1.0018935 0.98557895 0.9124163 1.0209585 1.2531134 1.011783 0.7930688 1.089389 0.9973175 1.0279119 0.98398197 1.0034585 1.0655204 0.9052368 ]]]; ov_res: [[[0.99139273 0.91683924 1.0903794 1.0273908 1.1165245 1.0833067 0.953903 0.9845033 1.0966356 0.937994 0.8462356 1.0037229 1.0111523 1.0600759 1.0871387 1.0384516 0.8937437 1.1493986 0.9413496 1.157835 1.134056 1.0115819 0.88130194 0.92966837 1.1428419 1.0669513 1.1245092 1.0276877 0.9376103 0.99019814 1.0343987 0.97294426 0.9654994 1.1141052 0.87126076 0.9945244 1.0393019 0.995627 1.0266397 0.99815315 0.8697205 0.89113146 1.093662 0.99904186 0.9005007 0.99436027 0.86871266 0.9941818 1.0866375 1.051027 0.94782335 0.94481236 1.0844816 1.1707329 0.99014014 0.9910044 1.0675588 1.0961223 0.9859147 0.8696656 1.0570734 0.9996935 1.0817686 1.2676731 0.8641393 1.1262641 0.9360671 1.1535982 1.0243351 1.1333654 0.9946639 1.0544804 1.1178739 1.152839 0.87904793 1.1092247 1.0470756 0.999497 0.9658 0.9126673 1.0211124 0.94354945 0.974505 0.8176545 1.026825 1.0218389 0.90262073 0.9779013 1.0471786 0.9472617 0.97043276 1.057362 1.0776613 1.0638477 0.9197648 0.8745007 0.99469525 1.0748395 1.040542 0.8803679 0.880196 1.0610932 0.957511 0.9622393 0.9408742 0.95820946 0.8646893 1.2002198 0.90112746 0.9817895 0.8604173 1.0235258 1.1699619 1.1264929 1.182867 0.999936 0.9018285 1.0712076 1.0132911 1.0004703 0.8317913 1.0749341 0.9497409 0.9643707 0.9562458 0.9485748 1.0620955 0.9701032 1.1593623 0.96599954 0.7441243 1.1330042 1.1121972 0.93571496 0.9376908 0.9872604 1.0688283 1.0679244 1.0640523 0.89989585 0.9601796 0.877085 1.1064212 0.9086805 1.0759125 0.9517348 1.076889 0.95040077 1.0693156 0.97602105 1.111628 0.95433486 0.97511315 0.88451225 1.032646 1.0784398 1.0276523 0.95652974 0.8858598 0.9997306 0.95609385 1.0887358 0.8383665 1.0457041 0.9349378 0.9513981 0.97863173 0.90589845 1.0367287 1.0578942 0.9544015 1.0234313 0.97510517 1.0098248 0.92594194 1.0138683 1.107004 1.0939298 1.0703526 1.1802047 1.0146533 1.0928329 1.0353206 0.94472206 1.0834546 1.1955612 1.0801072 0.99359053 1.0034376 1.0368913 0.96208286 1.1042545 1.0497377 1.005057 0.95761937 0.93204457 1.0181506 0.98366714 1.0101446 1.0747248 0.89721495 0.89936304 0.8259389 1.024142 0.95222706 0.8744934 1.0000869 1.0326043 1.009424 0.8583997 0.9349092 0.9606927 1.0888097 1.0943707 0.9958821 1.0569069 0.9876674 0.9410416 1.0002452 0.97854483 1.0696002 0.9653114 1.0181475 1.0096128 ] [0.96274084 0.90471023 1.02286 1.0406873 1.2188214 1.0211735 0.7791546 0.90984935 0.9644516 1.2385645 0.989909 1.1389501 0.9931039 1.0203592 1.0785463 0.8921366 0.9989727 0.87440914 1.0674068 1.0971794 0.8895739 1.1036949 1.0493076 1.0563002 0.987858 1.0858828 1.0641725 0.994612 0.90102464 1.071319 1.0700155 1.045509 0.918181 0.97942805 1.0149177 1.2066773 0.92432797 0.85043555 0.95872515 1.0558327 1.0217252 0.8938109 1.0112736 0.9260164 1.1012427 0.87928236 1.0788242 0.9170267 1.0317616 1.1870674 1.0599811 0.965185 1.1321968 1.1585419 1.0182276 1.1002868 1.0376621 0.94158226 0.9330599 0.9927394 1.0811734 0.90457624 0.9776274 0.93386036 0.9458087 1.0349019 1.0728079 0.92333 1.0037079 1.0721265 0.8923335 1.0029836 0.9373426 1.0485045 1.0661153 0.9822399 1.0581182 0.9503467 1.0191413 1.0293157 0.9451697 0.9074693 1.0492636 1.0329176 1.185525 0.98648745 0.94379365 0.8877799 1.1916795 0.99188596 0.9723873 0.9919391 1.1766114 0.99502516 1.0489122 1.0242832 1.0648757 1.0650955 0.9735435 0.76807916 1.094146 0.9089061 0.9511734 1.1126251 1.1511945 1.0816553 1.1044056 1.0312316 1.0163403 0.89503264 0.8459052 1.0987133 1.0460984 0.9771346 0.9436909 0.8673257 0.9240995 0.9693201 0.8824584 1.1700963 0.7923613 1.0673467 0.882783 0.96647155 0.8254745 1.0630442 0.97785974 1.1234851 1.0487946 0.811558 1.0534335 1.0313615 1.0511765 1.0940895 0.8421426 1.1040465 0.8954228 0.96467656 1.0463887 0.99278414 0.95031995 0.95155054 0.94642997 0.7650431 1.0728381 0.9456957 1.1297071 1.0119058 0.7012221 0.94305897 0.86428696 0.93685 1.0927526 0.9322802 1.0715715 0.91911316 1.1305019 0.9721987 1.0351069 0.9949782 0.9719703 1.0789466 1.1222441 1.0279893 1.0160543 1.1044637 0.9310919 1.0912434 0.9390088 0.8082495 0.97516006 0.95352244 1.0310525 1.0038851 0.97626215 1.0232699 1.1767112 0.873755 0.8832684 0.94312495 0.98788196 0.9779314 1.1195921 1.0295438 0.95924234 0.877149 0.9376811 1.0405394 1.1014212 0.92309135 0.89975566 0.924678 1.1574814 1.0680188 1.1354403 0.95473236 0.8630313 0.9292039 0.9778593 0.98033726 1.0794554 0.88432086 1.0244671 0.99929655 1.1206737 0.8991251 1.0521532 1.0742166 1.0739614 1.0721961 1.0148853 0.9389557 1.0742205 1.0126678 0.9105554 0.9700407 1.0296727 1.1192251 1.0663743 0.94613194 0.95250005 0.80605894 0.934048 0.95699596] [0.96703917 1.0054848 1.0886056 1.1540991 0.94488806 0.9689638 0.99752915 0.91621983 0.9846593 1.1346439 1.0321373 0.974623 0.9742569 0.86197364 1.0323179 0.95485824 0.91569364 1.0982106 0.9878587 1.0264248 0.9017292 0.9632659 1.1051236 1.0002912 0.9634621 0.89035094 1.0852351 1.1417656 0.91869766 1.0907437 1.0298309 1.0042422 0.97659993 1.0206085 1.0357381 1.1101288 1.1104835 0.98985577 1.0618702 1.0033336 1.1235831 1.1311466 0.93127394 1.0031928 1.0894574 1.0734475 1.0196861 0.9905627 0.9284084 0.85980153 1.0014 0.97248197 0.94869876 1.0652308 1.0426548 1.1020494 1.0487733 1.0721762 1.1268753 1.014191 0.9875709 0.85138106 0.9279008 1.1897819 0.9216492 0.9806243 1.2064333 1.111252 1.0717913 0.95466906 1.1191274 1.1248866 0.9082975 1.0590752 0.8797037 0.90131646 0.88192815 1.0074106 0.92547065 0.9138712 1.0735652 1.1357511 1.0510318 1.0330675 1.0556831 0.8891475 0.932597 1.2232393 1.0713466 1.0620444 1.1285602 1.157738 0.9425406 1.0493114 1.1358885 1.2247117 1.0107583 0.82474685 1.0369797 1.1264995 1.0915414 1.0861337 0.98787224 1.0391735 0.9711863 0.9610811 1.1678623 1.1679105 1.1422373 1.050861 0.89478254 1.050499 0.8470043 1.053714 1.0493038 1.1469792 0.9402242 0.87472075 0.9788083 1.0500324 1.0671575 0.9162449 1.1059431 1.0367825 1.0138624 0.9961684 1.0524808 1.011465 1.0348712 0.9762538 0.95247096 1.0167624 1.1409175 1.0325205 1.0802431 1.0321511 0.9135349 1.141389 1.1213143 1.1929432 1.0744116 0.91538507 1.0554942 1.0207349 0.960029 1.0464984 0.92847544 1.0150555 0.9810039 0.99739057 0.8366173 0.98943585 0.88203454 1.0630944 0.9878694 0.9571631 1.0769157 0.887238 1.0852201 0.810927 0.95176095 0.86681324 1.1625205 0.94132555 1.0955607 0.91921484 0.9260636 0.90055186 1.1476784 0.93635327 1.0645995 0.81424 1.0868886 1.1278614 1.0156965 1.0337245 0.85659635 1.1276901 1.0446553 1.020279 1.0703355 1.3498495 0.9383143 1.0752046 0.93687814 1.1581739 0.9006528 0.9267387 0.9243345 0.8723631 0.8716118 1.0666159 0.97310066 1.1074716 0.8917798 1.1042868 1.0069164 1.0670115 1.405727 1.0273752 0.88779676 0.89428145 1.0099956 1.1314094 0.83517325 0.8769331 0.95714456 0.9766972 0.9461485 1.0238621 1.0018935 0.9855791 0.9124164 1.0209585 1.2531134 1.0117831 0.7930689 1.089389 0.9973176 1.0279119 0.983982 1.0034585 1.0655205 0.9052368 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:-2 - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6591.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-2]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[[1.100972 1.0389516 0.93815833 0.937831 1.0390764 1.1688889 0.92386806 0.9496516 1.0289438 1.1769339 1.055219 1.0103573 0.98490685 1.0637468 1.0463728 0.9954534 1.0071359 1.0769972 0.8928798 0.9767921 0.98520184 0.9853234 1.0115603 0.8530594 0.9072208 0.99475616 0.92463136 0.8985717 0.94936854 0.9196607 1.1270494 1.1291673 0.919231 0.8471281 0.9139028 0.96483773 0.9889746 1.0233616 0.96211594 0.96505857 0.95323783 0.96636343 0.8533151 1.0122253 0.9467676 1.070887 0.96780217 1.0155029 0.87433136 0.94329137 1.0456294 1.0102819 0.86000854 0.9133749 1.2086391 1.0643173 0.8160021 0.91534084 1.0788919 0.935856 0.9560849 0.88036555 0.9674858 1.1382613 0.93830425 0.994201 0.94002527 0.91467106 0.91993576 1.0311521 1.0723171 1.0598391 1.108376 1.1083525 0.88816077 1.0971321 1.0926803 1.1321238 0.8954183 1.0283657 0.9906251 1.1918048 0.92472565 0.8626959 1.0442462 0.9775986 0.9526284 0.9500821 0.9850207 0.9466807 0.9805937 1.0251852 0.94297856 1.0199672 0.89290935 1.0718693 0.9611723 0.86274517 0.9786673 0.8438238 1.0751138 0.84080976 1.0086958 1.0155952 0.8977079 1.0097519 0.89928347 0.9684065 1.2344162 0.97229934 0.92559224 1.1676906 1.1303449 0.9428917 0.9079011 0.8666328 1.1059198 1.1026129 0.97427243 1.078479 1.0444691 0.934239 0.8625173 1.1833264 0.9800006 0.97786444 0.9801851 1.0194249 1.0938114 1.1441683 1.0617181 0.94745797 0.9928669 0.9669738 1.0482408 0.85729295 1.0064404 0.9449767 1.1097935 1.0239698 1.1427826 1.0110663 1.2135959 0.8629783 0.9832678 0.90979403 0.90979683 1.1362658 0.9937588 1.0826064 0.93210393 1.1067879 0.9927873 0.9406855 1.1005828 0.8777512 0.9054429 0.86316806 0.8900473 0.9399016 0.99157786 0.97589374 0.8389118 1.112632 0.85622287 0.991346 1.0644146 0.83364516 0.96394885 0.96636766 1.0776333 0.9232971 1.0531118 0.94478667 1.1020908 1.2297064 0.9910871 0.8648434 0.91063 1.0426224 0.87248623 1.19344 1.104604 1.0619532 1.0120982 0.994088 0.9761636 1.0087028 0.9804209 1.0209991 0.94234586 0.90692484 0.916161 0.95026964 1.0194031 0.93800616 0.9746482 0.94570535 1.0162784 0.83302873 1.0253457 0.95005625 0.89518934 0.98891264 0.96631145 1.0222943 1.1355094 1.0099952 0.96941847 0.97348005 1.0862902 0.969786 1.0312154 1.1264894 0.91233754 1.1022978 0.9790151 0.97826153 1.1545357 1.0479007 0.973607 1.0048662 0.9305219 1.0504961 ] [0.88807833 1.0377392 1.1939008 0.9887179 0.9877257 0.9329504 0.8907984 0.96488875 1.1278921 1.1843646 1.1406775 1.1170558 0.93591607 1.0782946 0.9328946 0.98752886 1.2440686 0.8754582 1.0692933 1.0643682 1.0688834 0.89816725 1.0625588 0.9295334 0.90627784 0.8925994 0.8840509 1.1559793 0.9012536 0.89456147 0.9820725 0.97828656 1.017314 0.8712337 1.0241449 1.0813127 0.97065496 1.058338 1.04723 0.97625023 1.1793399 1.1177102 1.0553654 1.001762 0.91945344 0.8665458 1.08705 0.8105579 1.1259246 0.94485396 0.98811686 1.1957003 1.1920465 0.97107875 0.9398634 1.0381634 0.95350474 1.0298339 0.91487014 1.1296451 0.8541736 1.0763686 1.0834423 0.8483067 0.98037124 0.9515451 1.113589 1.0364202 0.9552893 0.9080584 1.04552 0.93661857 1.0827913 0.9941472 1.1207477 1.1001021 1.1402823 1.0780307 0.8514383 1.0846181 1.1318151 0.8683994 0.8680051 1.0467564 1.1933588 0.94417685 0.98082525 0.9275306 0.916362 0.96715266 0.89242774 0.93792236 0.9444923 1.0124857 1.0299103 0.92488843 0.97139275 0.99780256 1.3373784 0.95956093 1.1179882 0.9925207 1.1352768 0.9664228 1.1557908 0.90593344 1.0225697 1.018935 0.90022385 0.99429226 0.98557556 0.95073014 1.0243167 1.0848054 0.937921 0.9823673 0.8562689 0.9482285 1.1390564 0.9498147 0.98970985 0.9586973 0.95039237 0.88723904 0.8919287 1.045313 0.9141008 0.8468693 1.0003762 1.0166229 1.1170356 0.9209656 0.98497367 1.0943649 1.0733302 0.94934183 1.1531873 1.031446 1.0253874 1.0420401 1.0816491 0.9039305 1.0667198 1.1844074 0.8651327 1.0618246 1.0387331 1.1691841 0.8049974 0.95735747 1.0417993 1.1262906 0.9966263 0.9526861 0.97706264 0.9923633 1.0599489 1.0248141 1.0002656 1.007837 0.9181099 1.0612651 1.0015236 1.0966456 1.0376931 0.98927474 0.9846694 1.1552354 1.0822419 0.8021325 1.0183188 0.94662327 0.96330607 1.0204029 1.0876172 1.1408719 0.99031216 1.0728816 1.0234135 0.9518988 0.99025345 0.8640222 0.9544741 0.87090504 1.0334575 0.97654504 1.0464061 0.9698884 0.9851761 1.0461543 0.7934271 1.0244303 1.0070316 0.9781456 0.8965392 0.94216204 0.87190646 0.92453027 0.9915171 0.92945164 0.96236986 1.1098058 0.81380516 0.85078967 0.9370548 0.9302408 1.1420561 0.95679885 1.0637636 0.9493239 0.94922787 0.9926293 0.7466529 0.91154784 0.9494577 0.94769037 1.0256057 0.9903169 1.1946678 1.0687326 1.0045487 1.0370152 0.97333723 1.0490264 ] [1.0419266 1.0634035 0.98935324 1.0617363 0.9704284 1.1270677 0.9959199 0.8048292 0.857178 1.0909867 0.9340232 0.9340842 0.8211136 1.0597367 0.92012787 0.9200575 1.0477339 0.8742029 1.0943465 0.98837435 1.0649956 0.98331666 0.86840284 1.040929 0.94649875 0.955992 0.9552917 1.0638707 0.9644592 1.0117934 1.0120013 0.9744505 1.0459131 0.93112177 0.90682435 1.1019431 0.9380707 0.964126 0.8328506 1.1562247 1.1210954 1.0165951 0.9837554 0.89191383 1.004299 1.1614101 0.9566827 0.9587757 1.0512235 0.937278 0.95654094 0.75954175 1.0073378 1.0614825 1.0702846 1.1174792 0.99650073 1.1427842 0.94407046 0.94180304 1.0442466 0.9030484 0.81488603 0.955305 0.9664068 0.87580055 0.9420598 0.93292326 0.870501 1.1008775 0.9865695 0.9948122 0.97080463 0.9513731 1.1546861 1.0769432 0.89920956 1.1201324 1.2520393 1.0184809 0.8806001 0.9763894 0.9540706 0.95795816 0.912587 0.91817296 1.0165799 1.0815706 1.1565105 0.96674097 1.0400978 0.9131817 0.816703 0.89520967 0.92170256 1.1306617 0.8757066 1.1176547 0.9809091 1.0098997 0.9064881 0.92032295 1.0943611 0.97090894 1.0340197 1.1545552 1.2349687 0.9634212 1.0084734 0.9626818 1.005055 1.0078847 0.996111 0.9609038 0.88823247 1.123231 0.9460466 0.9437216 1.0229323 0.81103224 1.075894 1.0398647 1.0187083 0.9630088 0.9403954 1.0660261 0.87276155 1.0605177 0.9022627 0.9822513 0.92542404 0.82226604 1.166898 0.99295765 0.9933828 1.0543774 0.80432266 0.90929717 0.9431802 1.3043791 0.9037594 0.88515425 0.972301 1.0395437 1.0371095 1.1145575 0.94881916 1.0278858 0.9211158 1.1369953 0.87602454 1.1285992 1.1145774 0.8897237 1.0984205 0.8636786 0.96074003 1.0653425 1.0777698 0.8681945 0.93801004 1.1846497 1.037502 1.0510277 0.88368064 0.9954786 0.89529186 0.8894933 1.032417 0.90671587 0.9856268 0.89048684 1.089513 1.0314682 1.0318879 1.0005076 0.97787726 1.1186154 0.9960876 0.81124085 0.9341678 0.943606 0.84128594 1.0586301 0.9489484 0.81358945 1.078094 0.99290854 0.95953965 0.9402024 0.94366986 0.94334227 0.85674614 0.98285294 1.0922986 0.95839214 1.1711621 0.7617946 1.0592018 0.86669326 1.3176917 0.95786834 0.8568872 0.88747543 0.95313865 1.2025402 1.0909109 1.0430356 0.89847887 0.927892 1.0531194 1.0056084 0.9912027 1.1079221 0.93408036 0.9679389 1.0508238 0.90301436 0.96016365 1.0825057 0.9935736 0.9431724 1.1512793 0.97418237]]]; ov_res: [[[1.1009719 1.0389519 0.9381586 0.9378311 1.0390766 1.1688893 0.9238679 0.9496512 1.0289438 1.1769341 1.0552186 1.0103571 0.9849064 1.0637472 1.0463729 0.99545306 1.0071361 1.0769975 0.89287966 0.9767923 0.985202 0.9853237 1.0115603 0.8530596 0.907221 0.9947562 0.9246312 0.89857155 0.94936883 0.919661 1.1270494 1.1291674 0.9192308 0.847128 0.91390276 0.9648375 0.9889747 1.0233619 0.9621159 0.9650585 0.9532379 0.96636343 0.8533149 1.0122253 0.9467674 1.070887 0.96780205 1.015503 0.8743313 0.9432911 1.0456296 1.010282 0.86000824 0.91337496 1.208639 1.0643176 0.81600195 0.91534126 1.0788919 0.9358564 0.9560851 0.88036525 0.9674855 1.1382612 0.9383041 0.9942009 0.940025 0.9146714 0.91993576 1.0311521 1.0723172 1.059839 1.1083758 1.1083522 0.88816077 1.0971321 1.09268 1.1321237 0.8954182 1.0283655 0.9906245 1.1918043 0.92472595 0.8626954 1.044246 0.9775986 0.9526288 0.95008224 0.9850208 0.9466809 0.9805937 1.0251853 0.9429785 1.019967 0.8929096 1.0718691 0.9611721 0.862745 0.9786678 0.8438235 1.0751135 0.84081 1.0086957 1.0155956 0.8977081 1.0097518 0.8992833 0.9684062 1.2344164 0.97229946 0.92559206 1.167691 1.1303449 0.9428916 0.90790117 0.8666329 1.1059197 1.1026129 0.97427255 1.078479 1.0444691 0.9342389 0.8625172 1.1833265 0.9800008 0.9778648 0.9801855 1.0194249 1.0938115 1.1441681 1.0617181 0.9474576 0.9928667 0.96697396 1.0482408 0.857293 1.0064404 0.944977 1.1097934 1.0239698 1.1427826 1.0110663 1.2135957 0.8629785 0.9832678 0.90979433 0.90979683 1.1362658 0.9937587 1.0826062 0.9321035 1.1067878 0.9927875 0.9406857 1.1005825 0.87775093 0.9054428 0.86316794 0.89004755 0.93990195 0.9915777 0.9758936 0.83891183 1.1126323 0.85622257 0.99134606 1.0644145 0.83364546 0.96394914 0.9663673 1.0776331 0.92329735 1.0531116 0.94478685 1.1020907 1.2297062 0.99108666 0.8648432 0.91063017 1.0426226 0.872486 1.1934404 1.1046044 1.061953 1.0120981 0.994088 0.9761637 1.0087025 0.98042107 1.0209991 0.94234574 0.90692437 0.9161611 0.95026976 1.0194032 0.9380061 0.9746481 0.9457057 1.0162786 0.8330289 1.0253457 0.95005614 0.89518934 0.9889124 0.96631145 1.0222945 1.1355094 1.0099953 0.96941847 0.97348017 1.0862898 0.96978587 1.0312153 1.1264894 0.9123376 1.1022977 0.9790152 0.97826135 1.1545359 1.0479009 0.9736069 1.0048664 0.9305216 1.0504959 ] [0.8880783 1.0377389 1.193901 0.98871785 0.98772585 0.9329505 0.8907984 0.96488893 1.1278923 1.1843649 1.140677 1.1170559 0.9359164 1.0782945 0.9328948 0.98752874 1.2440685 0.8754578 1.0692933 1.0643684 1.0688833 0.8981672 1.0625592 0.92953354 0.9062777 0.89259976 0.88405085 1.1559789 0.9012534 0.89456147 0.98207283 0.97828674 1.0173136 0.87123364 1.024145 1.0813124 0.9706551 1.0583383 1.0472299 0.97625023 1.1793394 1.1177102 1.0553653 1.0017619 0.91945344 0.8665458 1.0870501 0.81055796 1.1259246 0.9448539 0.9881169 1.1956999 1.1920469 0.9710785 0.9398634 1.0381635 0.95350444 1.029834 0.91486996 1.1296451 0.8541736 1.0763689 1.0834421 0.8483068 0.98037106 0.9515451 1.1135889 1.0364205 0.95528924 0.9080585 1.0455197 0.9366184 1.0827912 0.99414694 1.1207477 1.100102 1.1402825 1.0780308 0.85143834 1.084618 1.131815 0.8683992 0.86800486 1.0467564 1.1933591 0.94417673 0.98082536 0.9275304 0.9163621 0.96715295 0.8924279 0.9379225 0.9444925 1.0124859 1.0299108 0.92488825 0.97139305 0.99780303 1.3373784 0.9595609 1.1179882 0.9925209 1.1352769 0.96642286 1.155791 0.90593374 1.0225699 1.0189351 0.90022385 0.9942923 0.98557526 0.95073044 1.0243167 1.0848055 0.93792063 0.98236704 0.85626894 0.94822866 1.1390568 0.94981474 0.98970985 0.95869714 0.9503923 0.8872394 0.89192885 1.0453129 0.9141011 0.846869 1.0003762 1.0166227 1.1170356 0.9209656 0.9849738 1.0943649 1.0733302 0.9493417 1.1531875 1.0314459 1.0253874 1.04204 1.0816494 0.90393037 1.0667195 1.1844074 0.8651327 1.061825 1.0387329 1.1691841 0.8049973 0.9573575 1.0417992 1.1262906 0.9966263 0.95268595 0.97706264 0.99236363 1.0599493 1.0248138 1.0002655 1.0078367 0.91810995 1.0612651 1.0015234 1.0966454 1.0376927 0.98927504 0.98466974 1.1552355 1.0822419 0.8021327 1.0183187 0.94662327 0.96330625 1.020403 1.0876168 1.140872 0.9903118 1.0728816 1.0234135 0.9518986 0.9902535 0.86402243 0.9544741 0.8709054 1.0334576 0.9765448 1.046406 0.96988857 0.985176 1.0461543 0.79342693 1.0244302 1.0070322 0.9781456 0.89653915 0.94216245 0.87190616 0.9245302 0.9915172 0.92945147 0.9623696 1.1098052 0.8138053 0.8507899 0.93705493 0.9302411 1.1420559 0.95679873 1.0637635 0.9493242 0.94922775 0.9926297 0.746653 0.911548 0.949458 0.9476903 1.0256058 0.99031705 1.1946671 1.0687325 1.0045487 1.0370153 0.9733375 1.0490261 ] [1.0419265 1.0634035 0.9893531 1.0617365 0.97042847 1.1270678 0.99592 0.80482876 0.8571779 1.0909865 0.9340233 0.9340844 0.82111377 1.0597366 0.92012775 0.92005783 1.0477341 0.8742027 1.0943466 0.98837394 1.0649956 0.983317 0.868403 1.0409292 0.94649875 0.9559918 0.9552916 1.0638704 0.9644595 1.0117933 1.0120012 0.97444993 1.0459126 0.9311218 0.9068245 1.1019433 0.9380705 0.9641257 0.83285046 1.1562246 1.1210952 1.0165952 0.9837557 0.8919136 1.0042995 1.1614101 0.9566828 0.9587757 1.0512233 0.9372778 0.9565411 0.7595418 1.0073377 1.0614823 1.0702845 1.1174792 0.99650055 1.142784 0.9440702 0.94180286 1.0442463 0.9030486 0.8148864 0.9553048 0.96640646 0.8758006 0.9420597 0.93292344 0.870501 1.1008775 0.9865695 0.9948117 0.97080475 0.9513732 1.1546861 1.0769435 0.8992096 1.1201326 1.2520392 1.0184807 0.88060033 0.9763892 0.95407075 0.9579582 0.91258705 0.9181727 1.01658 1.0815707 1.1565102 0.96674097 1.0400974 0.91318136 0.8167029 0.8952096 0.9217024 1.1306617 0.8757064 1.1176548 0.9809089 1.0098995 0.90648776 0.9203232 1.0943608 0.9709091 1.0340197 1.1545551 1.2349689 0.9634212 1.0084733 0.9626821 1.0050551 1.0078847 0.99611104 0.96090424 0.8882323 1.1232318 0.9460467 0.9437218 1.0229323 0.81103235 1.075894 1.0398648 1.0187086 0.9630085 0.94039536 1.0660259 0.8727618 1.0605177 0.9022627 0.98225147 0.92542404 0.82226574 1.1668985 0.9929572 0.99338293 1.0543772 0.8043229 0.9092971 0.94318026 1.3043792 0.9037593 0.88515425 0.9723008 1.0395443 1.0371095 1.1145571 0.94881916 1.027886 0.92111576 1.1369953 0.87602437 1.1285994 1.1145775 0.8897232 1.0984207 0.8636784 0.9607401 1.0653424 1.07777 0.8681942 0.93800986 1.1846497 1.0375019 1.0510272 0.88368076 0.99547833 0.89529145 0.88949335 1.0324167 0.9067159 0.985627 0.8904869 1.0895131 1.031468 1.0318878 1.0005072 0.9778776 1.1186153 0.9960876 0.81124115 0.9341678 0.9436062 0.8412861 1.0586301 0.9489484 0.8135896 1.0780939 0.9929085 0.95953953 0.9402028 0.9436696 0.9433422 0.85674626 0.98285323 1.0922986 0.958392 1.171162 0.76179457 1.059202 0.86669296 1.3176917 0.95786816 0.8568868 0.88747543 0.9531389 1.2025404 1.0909108 1.0430357 0.8984789 0.9278922 1.0531199 1.0056082 0.99120283 1.1079218 0.93407995 0.9679391 1.0508239 0.9030146 0.9601637 1.0825057 0.99357337 0.94317245 1.1512796 0.9741825 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:-2 - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6593.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-2]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[[1.0118334 0.9549764 1.0391195 0.99123734 0.95088464 1.0889566 0.9879552 0.94005895 0.9668548 0.8152592 0.93929726 0.9908796 1.1229511 1.0895474 0.89172965 1.0972424 1.0249829 1.343409 0.9909769 1.052862 0.91082287 1.2597456 1.0605366 0.9262662 0.8797507 0.91404074 0.8721751 1.1924407 0.95163965 1.052904 1.1496813 0.83799493 0.8763643 0.89911044 0.9860867 1.1529905 1.1027753 0.900921 0.92259103 1.0891693 0.9079441 1.0968603 1.0365545 0.93332046 1.068209 0.91820455 0.9969228 1.2298193 1.1055986 0.90273905 1.0596333 0.8843416 0.9414511 1.0828122 1.1635851 0.922033 0.9357997 0.8872286 0.90818346 1.0786114 0.79275376 1.0602931 0.807528 0.8672946 0.8903343 1.0603245 1.1722118 1.0901617 0.9780119 0.91667527 1.1060244 1.0660561 1.0270469 1.0621939 1.0632055 1.023324 0.9572883 1.1929129 0.96581906 0.9919526 1.0632411 0.84867746 0.9253839 1.0473818 0.9269408 1.0312464 1.1191766 1.0551587 0.88021463 0.8953294 0.86459804 0.9622588 1.0259197 1.0701381 1.0063063 0.9867077 1.1575342 1.0759779 1.0680509 1.1513127 0.8376887 0.95132434 1.1906399 0.9647351 0.9215929 1.0644087 1.0260642 1.0047646 0.91765 1.0268619 0.95484054 1.024615 0.98280025 1.0362049 1.0969592 0.9916246 0.95272416 1.012323 0.9631216 1.1305435 1.0714818 0.91066253 0.8251142 1.1015284 1.0052614 1.0826415 1.0576059 0.92495024 1.2437296 0.9585373 1.166098 0.8932954 1.0147226 1.2103556 0.94236225 0.9034031 1.0111946 0.9757501 0.9511533 0.86995816 1.1607212 0.9278146 1.069828 0.94660425 1.0196097 0.9797494 0.9325406 0.8647159 0.9588512 1.0605028 0.98554486 1.1155412 0.99970394 1.0731723 1.0985621 1.0574859 0.93159187 1.0947016 0.97116107 1.0771947 1.1061851 0.9755409 0.99032706 1.008469 0.9481559 0.9664824 1.0137348 1.0727282 1.0673156 0.9554119 1.0407189 1.0148028 0.86475235 1.0677953 0.9819721 0.9529954 1.0247655 0.98115975 1.0010209 1.1042937 0.93558514 1.038872 0.90574735 0.94762975 1.31221 1.0554987 1.0484711 1.0295832 0.92998993 1.109379 0.8708643 0.97124034 1.0235195 1.1780692 1.0528421 1.0413191 0.89688355 0.935471 1.0475019 1.0139116 0.9902916 0.96212876 0.9222438 0.8854856 0.95690584 0.87365216 1.0389335 1.0561047 0.99931574 1.167282 1.0230039 1.1314721 1.3153377 0.94375247 1.1102452 1.0107588 0.99909663 0.8947375 0.9958444 1.0750648 1.0458829 1.1803904 0.85843825 1.0306304 ] [1.0963281 1.1204951 1.0746422 0.9870005 1.0992755 0.9831585 0.97914314 1.0380359 0.8608451 1.1893395 1.0006658 0.9980278 0.9314984 1.3261695 1.0570798 0.9896718 1.0842786 0.895653 1.0418959 1.0488027 1.0421121 0.9546636 1.1383641 1.0303911 1.1081026 1.0242114 1.1531847 1.0372854 1.0949063 1.015057 1.0601758 0.83300483 1.1120892 1.0429358 0.96494603 1.0597403 0.9829718 0.95810837 1.0750984 0.9769075 0.9420769 1.1100038 1.1660043 1.1254417 0.8840363 1.032583 1.000305 0.9613683 1.0707707 1.0625744 0.88926554 1.1232674 1.20347 0.91838604 0.97066975 1.0963 1.078186 1.0263928 1.0140809 1.0533427 1.190285 1.0088314 1.0366392 0.91626185 1.0356221 1.040805 1.0047464 0.8004993 0.9192057 1.0092862 1.0601737 0.8613717 0.8922437 1.2100561 1.0634911 1.0215752 1.0426017 0.9493464 1.0654827 0.9306089 1.0004301 1.0003402 1.1138223 0.83317286 1.037505 1.0047868 0.978326 1.0239486 1.0048379 0.96120036 0.88156664 0.93473226 0.9449585 1.0764351 0.989697 0.8742217 0.870374 1.0494512 1.2556518 1.0425558 0.9863925 0.937881 0.8090657 0.9743272 0.98788804 0.8865047 1.1085961 1.0041109 1.0700256 0.92573035 0.97830087 1.0535617 0.91515344 1.0625355 1.0029609 1.0626768 0.96695334 1.0206847 0.8173052 1.0551119 1.0224373 0.95961726 0.9769346 0.9810917 0.948411 1.0990617 1.0292464 1.054651 0.935062 1.1194077 0.892817 1.0124483 1.0861537 1.0723166 1.0061396 0.9686308 1.0660497 1.1882777 1.125485 0.8597584 0.91467065 0.91481316 0.9369568 1.1038872 0.8562385 0.93205506 0.9376185 1.0303673 0.88652575 1.0448691 1.0009712 1.1059114 0.8338591 0.99180025 1.0502716 1.0336127 0.919279 1.1508292 1.144792 1.0941718 0.98156303 0.8783958 0.9830571 0.8466261 0.98707294 0.9852522 0.93206906 1.0153754 0.93283063 0.8350999 0.9880688 1.0992244 0.9384004 0.9289621 1.0858474 0.8653393 0.92310023 1.1053414 1.0625086 1.027785 1.052329 1.0677537 1.0170014 0.8895732 0.82032967 1.0184484 1.0559909 0.91631454 0.9720559 0.9960113 0.9593399 1.044409 0.9152564 0.9825101 1.120892 0.85780257 1.0877485 0.9726501 0.85292953 0.99604654 0.92225546 0.93278426 1.0150557 0.972838 1.109476 0.8671779 0.99724656 1.0533916 0.8473811 0.8206301 0.9979065 1.1313492 0.9978697 0.97708803 1.0672499 1.025779 0.8954464 1.1515744 1.0382078 1.0913337 0.94835395 0.8951946 1.1039097 0.89686203] [1.0720985 1.1737134 0.8325814 1.0351391 0.9423374 1.0388188 1.0364733 1.0887969 0.9164976 0.9923144 1.0138638 0.9787395 0.9733296 0.9687807 1.055131 0.99111456 1.2349243 0.9866957 1.0718639 0.9953317 1.0876074 0.9550367 0.9442752 1.0410634 1.0081788 1.0091637 0.99179727 1.0143238 1.032882 0.89039123 1.0638993 1.0321918 0.99811095 1.0416343 1.1324717 1.0451374 1.134267 1.2074163 0.90302914 1.4629499 1.051593 0.94265354 0.9134294 1.0677114 1.0319369 1.0884407 0.90916085 0.90019554 1.0302898 1.087882 0.88135254 0.86096174 0.97792727 1.028099 0.9323055 0.8999007 1.0369569 0.9343308 1.1932794 1.0015215 1.015496 1.0071095 0.9142356 0.88340527 0.93610406 1.1588372 0.92956495 1.0297377 0.83886623 0.93930006 0.94162905 1.0312077 0.89430344 1.1063333 1.1396124 1.1813341 0.93473095 0.97241205 0.96439147 1.122947 0.94186336 1.0425184 1.0453509 1.1535485 0.9223762 1.0072434 0.912892 1.0844206 1.0614042 1.1035659 0.86306065 0.8694969 0.7898352 0.9948907 1.022904 1.0940299 0.92062634 0.99567777 1.120297 1.214852 0.937418 1.0730464 0.94066113 1.0515292 0.99217147 0.71769327 0.981924 1.0813752 1.0677192 1.1115111 0.93172765 0.95728636 0.9948264 1.2307048 0.9551871 0.9200132 0.9634613 0.8621226 0.98926795 0.982048 1.1040123 1.0482603 1.0227262 0.9700025 0.9759793 0.92033726 0.8618825 1.0574497 0.99501026 1.1534871 1.0164099 1.0462877 0.92099786 0.82979834 1.0142201 0.92298836 1.0607349 0.86214876 0.9117125 1.1512798 1.0329338 0.9503511 1.0329255 0.9722226 1.0679469 1.0239962 0.97807145 1.0611249 1.0304698 0.89758015 1.0395114 0.857812 0.9398894 0.8836419 1.1020757 1.033025 1.0169362 0.97940797 0.81624883 0.9622684 1.021603 0.9092004 1.0100976 1.1637768 1.0488397 0.9175234 1.2486626 0.9820129 0.8543439 1.0042527 1.0386924 0.97113675 0.96821725 1.05022 1.0006759 1.0027668 1.0385312 0.9275572 0.98602957 0.94246507 0.89575684 1.0414119 1.0849353 1.053001 0.98036367 0.9348683 1.1629016 1.0314535 0.99860364 1.004279 1.0447733 1.03352 0.88652897 0.9499649 1.1218348 0.95485026 1.0625614 0.94689816 0.9142205 0.9932436 0.8328519 1.0884203 0.8764907 0.93561274 0.92035073 1.0764128 1.2071295 0.9901323 1.0045428 1.0003988 0.97804123 0.9137145 0.9643875 1.0117679 1.0093672 0.9390781 0.99664146 0.92681324 0.862396 0.8854596 1.0935706 1.0234786 0.9353618 1.0973166 ]]]; ov_res: [[[1.0118331 0.95497644 1.0391197 0.9912378 0.95088476 1.0889564 0.987955 0.940059 0.96685493 0.8152591 0.93929696 0.9908795 1.1229515 1.0895479 0.89172983 1.0972426 1.0249829 1.3434095 0.9909769 1.0528619 0.9108228 1.2597461 1.0605367 0.9262665 0.87975067 0.9140408 0.8721753 1.192441 0.95163953 1.0529042 1.1496813 0.8379949 0.87636447 0.89911026 0.9860867 1.1529903 1.1027752 0.9009207 0.9225911 1.0891697 0.9079436 1.0968606 1.0365547 0.93332016 1.0682093 0.9182047 0.99692255 1.2298194 1.1055987 0.9027389 1.0596334 0.8843416 0.9414511 1.0828123 1.1635852 0.9220327 0.9357996 0.8872286 0.9081833 1.0786117 0.7927538 1.0602934 0.8075283 0.86729455 0.8903343 1.0603244 1.1722119 1.0901616 0.978012 0.9166751 1.106024 1.0660565 1.0270473 1.0621941 1.0632056 1.0233243 0.95728874 1.1929127 0.9658191 0.9919527 1.063241 0.8486774 0.92538404 1.0473815 0.9269408 1.0312465 1.1191766 1.0551587 0.8802147 0.8953296 0.8645983 0.9622591 1.0259198 1.0701381 1.0063064 0.98670775 1.1575342 1.0759784 1.068051 1.151313 0.8376886 0.95132446 1.1906395 0.9647354 0.921593 1.0644087 1.0260646 1.0047647 0.91765 1.0268621 0.9548409 1.0246146 0.9828002 1.0362048 1.0969595 0.99162483 0.9527246 1.0123234 0.9631218 1.1305437 1.0714818 0.91066253 0.8251141 1.1015286 1.0052615 1.0826414 1.0576055 0.9249502 1.2437292 0.9585375 1.1660976 0.89329547 1.0147222 1.2103562 0.94236225 0.9034035 1.0111948 0.9757501 0.95115316 0.869958 1.1607207 0.92781466 1.0698279 0.9466045 1.01961 0.9797495 0.9325409 0.86471605 0.9588514 1.0605031 0.9855448 1.1155413 0.99970376 1.0731726 1.0985619 1.0574862 0.9315925 1.0947019 0.97116154 1.0771946 1.1061851 0.9755412 0.99032694 1.0084696 0.9481559 0.9664821 1.0137346 1.0727284 1.0673151 0.955412 1.040719 1.0148026 0.8647526 1.067795 0.9819722 0.9529953 1.0247654 0.9811598 1.0010214 1.1042938 0.9355853 1.0388722 0.90574735 0.9476299 1.3122098 1.0554988 1.0484712 1.0295836 0.92999 1.1093794 0.8708644 0.9712402 1.0235195 1.1780697 1.0528424 1.0413193 0.89688367 0.935471 1.0475018 1.0139118 0.99029166 0.96212864 0.92224383 0.88548553 0.9569057 0.8736522 1.0389335 1.0561048 0.9993158 1.1672821 1.0230038 1.1314722 1.3153378 0.94375235 1.1102452 1.0107589 0.9990968 0.8947376 0.9958446 1.0750649 1.0458833 1.1803906 0.85843843 1.0306306 ] [1.0963281 1.1204952 1.0746424 0.9870006 1.0992754 0.9831586 0.9791429 1.0380358 0.8608451 1.1893395 1.0006661 0.99802804 0.9314982 1.3261695 1.0570798 0.9896718 1.0842787 0.895653 1.0418959 1.0488026 1.042112 0.9546634 1.1383643 1.0303911 1.1081029 1.0242109 1.1531845 1.0372853 1.0949062 1.0150567 1.0601759 0.8330051 1.1120889 1.042936 0.96494603 1.0597402 0.98297215 0.9581081 1.0750988 0.97690773 0.94207734 1.1100038 1.1660043 1.1254416 0.8840362 1.0325832 1.0003049 0.9613685 1.070771 1.0625745 0.88926566 1.1232678 1.2034702 0.91838604 0.9706699 1.0963002 1.0781859 1.0263929 1.0140809 1.0533425 1.1902851 1.0088311 1.0366391 0.9162621 1.0356224 1.0408052 1.0047466 0.8004994 0.9192058 1.0092863 1.0601737 0.8613717 0.8922436 1.210056 1.0634913 1.021575 1.0426013 0.9493461 1.0654831 0.9306091 1.0004299 1.0003402 1.1138226 0.8331726 1.0375053 1.0047868 0.978326 1.0239483 1.004838 0.96120054 0.88156664 0.9347324 0.94495887 1.0764349 0.9896971 0.8742217 0.8703739 1.0494516 1.2556518 1.0425559 0.9863928 0.93788105 0.8090657 0.9743275 0.98788804 0.88650453 1.1085961 1.0041108 1.0700257 0.92573047 0.9783009 1.0535614 0.9151536 1.0625355 1.0029607 1.062677 0.96695316 1.0206845 0.81730545 1.0551124 1.0224373 0.9596172 0.97693455 0.9810918 0.9484114 1.0990617 1.0292461 1.0546513 0.93506193 1.1194077 0.8928169 1.0124482 1.0861537 1.0723166 1.0061399 0.96863073 1.0660496 1.1882777 1.1254852 0.8597586 0.91467077 0.9148134 0.93695676 1.103887 0.85623866 0.9320552 0.9376186 1.0303676 0.8865257 1.0448692 1.0009711 1.1059116 0.8338589 0.9918002 1.0502716 1.0336131 0.9192793 1.1508293 1.1447921 1.0941716 0.98156285 0.8783961 0.983057 0.8466256 0.9870728 0.9852521 0.9320691 1.0153751 0.93283087 0.83509994 0.98806906 1.0992244 0.9384001 0.92896223 1.0858476 0.8653392 0.92310035 1.1053419 1.0625087 1.027785 1.052329 1.0677537 1.0170015 0.8895733 0.8203295 1.0184484 1.0559908 0.9163145 0.97205573 0.99601096 0.9593401 1.0444094 0.9152567 0.9825106 1.1208922 0.8578027 1.0877485 0.9726503 0.85292953 0.9960466 0.92225516 0.9327844 1.0150557 0.9728381 1.1094762 0.8671783 0.9972468 1.0533919 0.84738135 0.82062984 0.9979068 1.131349 0.9978696 0.9770883 1.0672498 1.0257794 0.8954464 1.1515743 1.0382074 1.0913335 0.94835395 0.89519453 1.1039097 0.89686215] [1.0720981 1.1737137 0.8325814 1.0351391 0.9423375 1.0388191 1.0364726 1.0887969 0.9164975 0.9923142 1.0138636 0.97873926 0.9733293 0.9687811 1.0551311 0.9911141 1.2349246 0.9866957 1.0718638 0.9953318 1.0876074 0.95503616 0.9442753 1.0410634 1.0081791 1.0091641 0.99179673 1.0143238 1.0328821 0.8903912 1.0638996 1.0321918 0.9981111 1.0416343 1.132472 1.0451378 1.134267 1.2074164 0.90302914 1.4629501 1.0515931 0.9426538 0.9134296 1.0677117 1.031937 1.0884408 0.90916073 0.90019536 1.0302899 1.087882 0.8813526 0.86096174 0.9779273 1.0280992 0.9323056 0.8999007 1.0369573 0.93433064 1.1932791 1.0015215 1.0154961 1.0071094 0.9142354 0.8834053 0.9361041 1.1588378 0.92956495 1.0297376 0.8388662 0.9393004 0.94162893 1.0312079 0.8943033 1.1063334 1.1396124 1.1813339 0.93473077 0.97241205 0.96439123 1.1229469 0.9418637 1.0425181 1.045351 1.1535485 0.9223761 1.0072434 0.91289157 1.0844202 1.0614045 1.103566 0.8630607 0.8694969 0.78983533 0.99489045 1.0229045 1.0940299 0.92062616 0.995678 1.1202971 1.2148522 0.9374181 1.0730469 0.9406614 1.051529 0.99217135 0.71769327 0.981924 1.0813751 1.0677193 1.1115111 0.9317278 0.9572863 0.9948266 1.2307044 0.9551872 0.9200132 0.96346104 0.8621227 0.98926795 0.9820478 1.1040123 1.0482601 1.0227263 0.97000235 0.97597945 0.92033744 0.8618826 1.0574498 0.99501055 1.1534871 1.0164098 1.0462875 0.92099756 0.82979846 1.0142206 0.9229887 1.0607351 0.86214876 0.9117127 1.1512793 1.0329342 0.9503508 1.0329255 0.9722224 1.0679467 1.0239965 0.9780717 1.0611248 1.0304694 0.8975801 1.0395116 0.857812 0.9398894 0.88364166 1.1020757 1.0330249 1.0169362 0.979408 0.8162488 0.9622684 1.0216032 0.90920055 1.0100976 1.1637771 1.0488397 0.9175235 1.2486626 0.98201287 0.8543443 1.0042526 1.0386924 0.9711366 0.9682173 1.0502197 1.0006762 1.002767 1.0385312 0.927557 0.98602986 0.9424648 0.8957567 1.0414119 1.0849353 1.0530012 0.9803634 0.9348684 1.1629022 1.0314534 0.998604 1.0042793 1.0447732 1.0335202 0.88652873 0.9499645 1.1218344 0.95485073 1.0625613 0.9468981 0.91422063 0.99324346 0.8328517 1.0884202 0.87649095 0.93561274 0.9203505 1.0764124 1.2071295 0.9901323 1.0045428 1.000399 0.97804135 0.9137146 0.96438736 1.0117675 1.0093673 0.93907833 0.9966415 0.92681336 0.8623965 0.8854597 1.0935708 1.0234787 0.93536174 1.0973169 ]]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1) - unbiased:False ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6595.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[2.6326079e+00 2.5232941e-01 2.9246858e-01 ... 1.5998940e-01 1.4966974e+00 1.1978964e-01] [3.9730597e-01 2.2975180e-01 4.5733726e-01 ... 5.6916323e-02 8.3258855e-01 1.4552942e-01] [5.6173790e-01 7.1551228e-01 1.2141365e+00 ... 2.2359797e-01 7.4624324e-01 6.7844534e-01] ... [3.3664176e-01 9.7557318e-01 2.4197972e+00 ... 3.7067785e+00 1.8424026e+00 9.6947134e-02] [7.3474556e-02 3.7872058e-01 1.0473244e+00 ... 9.7161216e-01 7.9727292e-01 3.8051012e-01] [6.9202894e-01 2.0245428e+00 5.8527786e-01 ... 4.7399455e-01 9.8517281e-04 2.2784822e-01]]; ov_res: [[2.6326079e+00 2.5232941e-01 2.9246855e-01 ... 1.5998940e-01 1.4966974e+00 1.1978965e-01] [3.9730597e-01 2.2975178e-01 4.5733726e-01 ... 5.6916326e-02 8.3258849e-01 1.4552943e-01] [5.6173784e-01 7.1551228e-01 1.2141365e+00 ... 2.2359800e-01 7.4624324e-01 6.7844534e-01] ... [3.3664176e-01 9.7557324e-01 2.4197974e+00 ... 3.7067785e+00 1.8424025e+00 9.6947126e-02] [7.3474556e-02 3.7872055e-01 1.0473244e+00 ... 9.7161216e-01 7.9727292e-01 3.8051012e-01] [6.9202900e-01 2.0245428e+00 5.8527786e-01 ... 4.7399458e-01 9.8517269e-04 2.2784822e-01]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1) - unbiased:True ] | 0.04 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6597.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[2.7153788 1.0914673 0.1685684 ... 0.82514846 0.3970259 0.02438863] [0.26430598 2.1250222 0.27512202 ... 0.72476375 0.00920337 2.0022733 ] [1.2817206 1.0012503 0.2213686 ... 0.26899156 3.3112943 2.3432832 ] ... [2.8484406 1.4942733 0.3020611 ... 1.6329501 0.5133594 0.6220393 ] [0.215237 2.3544264 0.17184873 ... 0.32686207 2.2637234 0.10868744] [0.7004079 0.19500816 0.05423391 ... 0.8226701 1.3163637 2.002456 ]]; ov_res: [[2.715379 1.0914673 0.1685684 ... 0.82514846 0.39702588 0.02438863] [0.264306 2.1250222 0.27512205 ... 0.7247638 0.00920337 2.0022736 ] [1.2817206 1.0012503 0.22136858 ... 0.26899156 3.3112946 2.3432832 ] ... [2.8484406 1.4942734 0.3020611 ... 1.6329503 0.51335937 0.6220394 ] [0.21523702 2.3544264 0.17184873 ... 0.32686207 2.2637234 0.10868745] [0.7004079 0.19500816 0.05423391 ... 0.8226701 1.3163637 2.002456 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(-1, -2) - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6599.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -2]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [[0.9922616 1.00846 0.9908456]]; ov_res: [[0.99226147 1.0084598 0.9908453 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(-1, -2) - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6601.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[-1, -2]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [[0.9903041 0.999795 0.99296993]]; ov_res: [[0.9903045 0.9997954 0.99296993]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1, -1) - unbiased:False ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6603.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, -1]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: [0.96361256 1.0255684 1.1025776 1.0195332 0.9572895 0.94115186 1.0227423 1.0543369 1.1102386 1.0529859 0.9817194 0.9860375 1.0403003 0.92865974 1.0014533 1.0793844 1.0025717 0.93767124 0.9035468 1.0379797 1.0266485 0.9657796 0.9588119 0.88651776 1.0416573 1.023268 0.94977206 1.0338174 0.9458823 0.96637285 1.0521996 0.9675818 0.9753917 1.0446867 0.92916054 1.0021528 1.0419351 1.1188035 1.0895623 1.0662037 1.0429672 0.9922007 1.0215728 1.0898066 0.9707135 1.1342056 1.052334 1.0654265 1.0575681 1.0542386 1.0093353 0.9475679 1.0838346 1.1230178 1.0388675 1.0443504 0.9045888 1.0532641 0.9904215 1.0245264 0.9177226 1.0524504 1.1145098 1.024381 0.9486724 1.0180093 0.96573687 1.0501237 1.0417895 0.99361616 1.022295 1.0192771 1.0694665 0.9109777 1.0679473 1.0595862 0.9878261 1.020401 1.0394977 1.0640324 1.0016774 0.9925523 0.9904827 0.9080085 1.032125 0.8993637 1.0064778 1.0266145 0.8954937 0.9904941 1.037549 0.9792099 1.0690396 0.9411337 1.0812271 0.9621421 0.99710107 1.0390211 0.92566174 0.94665 0.90816134 1.0332972 0.97579557 0.90320456 1.0394036 0.93290627 0.94108266 1.0154718 1.0203438 0.93536067 0.9983336 0.91329485 0.90881664 0.88081807 0.9820013 0.95482695 1.0085489 1.0286474 0.9476049 1.0122896 0.9911169 0.94584846 0.88704866 0.97970015 0.95677286 0.99267954 0.97817284 0.9865391 1.0134393 1.086155 1.0246865 1.0077581 0.9920793 1.0707165 0.93734324 0.8864735 1.0316485 1.0060477 0.97597766 0.943571 1.0218819 0.96189755 0.999538 0.95173347 0.9730302 0.9537511 0.9606984 0.96377265 1.031208 1.0235405 0.9951943 0.98696035 0.99513835 0.9896383 0.9796102 1.0146643 0.9478065 0.92366666 0.94644994 1.0298345 1.1270767 0.9919032 0.9571656 0.9811299 1.0379231 0.9582999 1.0027977 0.9158064 0.9709297 0.98159474 1.0038462 1.0343518 1.0337615 0.97878855 0.9898812 1.0236112 0.9385745 1.0341688 1.0670452 1.0352418 0.9948962 1.0562222 0.989841 1.0662681 0.99701107 0.9473726 1.0284163 0.9441579 1.0045518 1.1766776 0.9221931 1.005797 1.0609812 1.0143114 1.0739083 1.0605254 0.9160706 1.0420839 0.8908833 0.94238365 1.0250599 1.0236719 0.9463335 1.048038 0.91949016 0.94190186 1.069427 1.0111306 1.083901 0.97279763 0.9078023 0.9354425 1.0932294 0.90700155 1.0004045 0.9735608 0.9795845 1.0102654 1.0381196 1.0292943 1.0138139 0.99592113 0.96016395 1.0301846 ]; ov_res: [0.9636126 1.0255685 1.1025776 1.0195332 0.9572896 0.94115186 1.0227423 1.0543369 1.1102386 1.0529859 0.98171943 0.9860375 1.0403001 0.92865974 1.0014532 1.0793844 1.0025717 0.9376713 0.9035468 1.0379798 1.0266485 0.9657796 0.9588119 0.8865178 1.0416573 1.023268 0.94977206 1.0338174 0.9458823 0.9663729 1.0521996 0.9675818 0.97539175 1.0446867 0.92916054 1.0021528 1.0419351 1.1188036 1.0895623 1.0662037 1.0429671 0.9922007 1.0215728 1.0898066 0.9707135 1.1342055 1.052334 1.0654265 1.0575681 1.0542387 1.0093352 0.9475679 1.0838346 1.1230177 1.0388675 1.0443504 0.9045888 1.0532641 0.9904215 1.0245264 0.9177226 1.0524504 1.1145098 1.024381 0.94867235 1.0180094 0.96573687 1.0501237 1.0417894 0.9936162 1.022295 1.019277 1.0694666 0.91097766 1.0679471 1.0595862 0.9878261 1.020401 1.0394977 1.0640324 1.0016774 0.9925523 0.9904827 0.90800846 1.032125 0.89936376 1.0064778 1.0266145 0.8954936 0.9904941 1.037549 0.9792099 1.0690397 0.9411337 1.0812272 0.9621421 0.9971011 1.0390211 0.92566174 0.94665 0.90816134 1.0332972 0.97579557 0.9032045 1.0394034 0.9329062 0.9410827 1.0154718 1.0203439 0.93536067 0.9983335 0.91329485 0.90881664 0.8808181 0.98200136 0.954827 1.008549 1.0286474 0.94760484 1.0122895 0.9911168 0.9458486 0.8870486 0.9797001 0.95677286 0.9926796 0.97817284 0.9865392 1.0134394 1.086155 1.0246863 1.0077581 0.99207944 1.0707165 0.93734324 0.8864734 1.0316485 1.0060477 0.9759778 0.943571 1.021882 0.96189755 0.99953806 0.95173347 0.97303027 0.95375115 0.9606984 0.96377265 1.031208 1.0235405 0.9951943 0.9869603 0.99513835 0.9896384 0.97961015 1.0146643 0.9478065 0.92366666 0.94644994 1.0298345 1.1270767 0.9919032 0.9571656 0.98112977 1.0379231 0.95829993 1.0027977 0.9158065 0.97092974 0.9815948 1.0038462 1.034352 1.0337615 0.97878855 0.9898812 1.0236112 0.9385745 1.034169 1.0670452 1.0352418 0.99489623 1.0562221 0.989841 1.0662681 0.99701107 0.9473726 1.0284163 0.9441579 1.0045519 1.1766775 0.92219305 1.005797 1.0609812 1.0143114 1.0739083 1.0605254 0.91607064 1.042084 0.8908833 0.94238365 1.0250599 1.0236719 0.94633347 1.048038 0.91949016 0.94190186 1.069427 1.0111306 1.083901 0.9727976 0.9078023 0.9354425 1.0932295 0.9070016 1.0004046 0.97356087 0.97958446 1.0102654 1.0381196 1.0292941 1.0138139 0.9959212 0.96016395 1.0301846 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1, -1) - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6605.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, -1]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: [0.9825971 0.90728337 0.9481228 1.0539442 1.0183246 1.0010607 1.0188926 1.039136 1.0518916 1.0261223 1.0234551 1.0495185 1.019571 0.9365403 0.9969169 0.9824241 1.0349456 1.0350735 1.0032765 0.95622647 0.99329543 1.1318699 1.0140867 1.0180815 1.0415825 1.0058693 1.0125804 1.0074186 0.93421286 0.9989542 1.0152546 1.0403895 1.0404958 1.0107687 1.0418017 0.9906742 0.9443559 1.0473287 0.97770005 1.1251197 1.0353934 1.0103916 1.0299997 1.0716815 0.9894092 1.0275478 1.0059081 1.0643415 0.9692157 1.0717014 1.0479727 0.9381753 1.0202231 0.9643276 0.9543164 0.99781 0.93179274 1.0325769 0.99029404 1.1009382 0.95431113 0.96381044 0.96122736 1.0956179 0.9415664 0.9531286 0.94467944 1.0297656 1.1272163 1.058782 1.2397212 1.0477825 0.95237374 0.9588502 1.0010382 0.9693924 0.89706975 1.091653 1.0701792 0.9662165 0.8865885 0.99556494 0.9426513 0.9906548 0.9870213 0.9839838 0.90158457 1.0391043 0.9902914 1.0265114 0.953281 0.882203 0.8916372 1.0498717 1.0199904 1.0756474 0.9672209 0.9583572 0.93679893 1.0577483 1.0447087 1.0381311 0.99398756 1.1938902 0.96650195 0.9738022 0.973724 0.91526276 0.98666257 1.0859675 1.0891644 1.0303472 0.9535196 0.93224174 0.92335767 1.0134511 1.0204446 0.93020993 1.0212793 1.0310245 1.0565245 0.9656151 1.0646514 0.90302473 1.0277904 0.98849213 1.0078006 0.93023324 0.96243674 1.0405653 0.99685436 0.9585766 0.9699745 0.96281165 1.0436299 0.92201793 1.0018423 1.0002192 1.0268067 1.0513113 0.9063392 0.9098036 0.9588461 1.0402309 1.0471202 1.0285945 0.98513126 1.0506394 0.9416709 1.0853598 0.96360725 0.91871774 0.9701681 0.9102835 0.96585697 1.0176433 1.0909501 0.9105197 0.9367905 0.9214728 0.95259804 1.0414624 0.93740666 0.9914122 0.96294045 0.95996964 0.93067485 0.9607414 0.99291956 1.0468194 0.93667734 0.962072 1.0178536 1.0649452 1.031219 1.0547314 1.0406703 1.0184604 0.9709343 0.960339 1.0037892 1.0189884 1.0456849 0.97762364 1.0160562 0.9988651 1.0254809 0.973891 0.98367506 0.9452818 1.0089753 1.002246 0.99306667 0.9001363 1.0015514 1.02009 0.9074273 0.9276425 0.9734321 0.9913519 0.9632017 0.97580063 1.0433484 0.937142 1.0751672 1.159532 1.0838531 0.97948366 1.0184386 1.0542625 1.0650225 1.0484343 1.0596786 0.9279438 1.0148041 1.0100565 0.94157815 1.051789 0.99922395 0.969535 0.99707186 0.98428065 1.0725884 1.1291864 ]; ov_res: [0.9825972 0.9072833 0.94812274 1.0539442 1.0183246 1.0010607 1.0188928 1.039136 1.0518916 1.0261223 1.0234551 1.0495186 1.019571 0.93654025 0.99691695 0.9824241 1.0349457 1.0350736 1.0032765 0.9562264 0.99329543 1.1318699 1.0140867 1.0180815 1.0415823 1.0058693 1.0125804 1.0074186 0.93421286 0.99895424 1.0152545 1.0403895 1.0404958 1.0107688 1.0418017 0.99067426 0.94435585 1.0473287 0.97770005 1.1251197 1.0353934 1.0103916 1.0299996 1.0716814 0.9894092 1.0275478 1.0059081 1.0643417 0.96921575 1.0717015 1.0479727 0.9381753 1.0202231 0.96432763 0.95431644 0.99781 0.93179274 1.032577 0.99029404 1.1009382 0.95431113 0.96381044 0.9612273 1.0956178 0.94156647 0.9531286 0.94467944 1.0297657 1.1272162 1.0587821 1.2397211 1.0477825 0.9523737 0.95885015 1.0010382 0.96939236 0.8970698 1.091653 1.0701793 0.96621656 0.8865885 0.99556494 0.9426513 0.9906548 0.98702127 0.9839839 0.90158457 1.0391043 0.9902914 1.0265115 0.9532809 0.88220304 0.8916372 1.0498717 1.0199904 1.0756474 0.96722096 0.9583572 0.9367989 1.0577483 1.0447087 1.038131 0.99398756 1.1938902 0.9665019 0.97380227 0.97372407 0.9152628 0.9866626 1.0859675 1.0891645 1.0303473 0.9535196 0.9322417 0.92335767 1.013451 1.0204445 0.9302099 1.0212792 1.0310245 1.0565245 0.9656151 1.0646514 0.9030247 1.0277905 0.98849213 1.0078007 0.9302333 0.96243674 1.0405651 0.99685436 0.9585766 0.9699745 0.96281165 1.04363 0.922018 1.0018424 1.0002192 1.0268067 1.0513113 0.9063392 0.9098037 0.9588461 1.0402309 1.0471202 1.0285945 0.98513126 1.0506394 0.94167095 1.0853599 0.96360725 0.91871774 0.9701681 0.91028345 0.96585697 1.0176432 1.09095 0.9105197 0.9367905 0.9214728 0.9525981 1.0414623 0.93740666 0.9914122 0.96294045 0.9599697 0.93067497 0.9607414 0.99291956 1.0468196 0.9366773 0.96207196 1.0178536 1.0649453 1.0312189 1.0547315 1.0406704 1.0184604 0.9709343 0.960339 1.0037893 1.0189884 1.0456848 0.97762364 1.0160563 0.9988652 1.0254809 0.9738911 0.98367506 0.94528186 1.0089753 1.0022459 0.99306667 0.9001363 1.0015514 1.0200901 0.9074273 0.9276426 0.9734322 0.99135196 0.9632017 0.97580063 1.0433484 0.93714195 1.0751672 1.159532 1.083853 0.97948366 1.0184385 1.0542625 1.0650226 1.0484343 1.0596786 0.9279439 1.0148041 1.0100565 0.94157815 1.051789 0.999224 0.969535 0.99707186 0.9842807 1.0725884 1.1291865 ] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1, 2, 3) - unbiased:False ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6607.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, 2, 3]]() %self.unbiased : bool = prim::Constant[value=0]() %4 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.unbiased) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%4) fw_re: 0.9989643692970276; ov_res: 0.998964250087738 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_var.py::TestVar::test_var[ ie_device:CPU - precision:FP32 - keepdim:False - dim:(0, 1, 2, 3) - unbiased:True ] | 0.03 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_var.___torch_mangle_6609.aten_var, %x.1 : Tensor): %2 : int[] = prim::Constant[value=[0, 1, 2, 3]]() %self.keepdim : bool = prim::Constant[value=0]() %self.unbiased : bool = prim::Constant[value=1]() %5 : Tensor = aten::var(%x.1, %2, %self.unbiased, %self.keepdim) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_var.py:24:23 return (%5) fw_re: 1.0042903423309326; ov_res: 1.0042903423309326 Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_view.py::TestViewListConstruct::test_view_list_construct[ ie_device:CPU - precision:FP32 - input_data:(array([[[ 0.593007 , -0.93761286], [-1.25816195, 0.55911792], [-0.57572507, 1.84751355]], [[-0.31392138, 1.02679735], [ 0.51648735, -0.04934154], [-0.5993063 , -0.0255093 ]]]), array(2), array(6)) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_view.___torch_mangle_6610.aten_view_list_construct, %input_tensor.1 : Tensor, %dim1.1 : int, %dim2.1 : int): %4 : int[] = prim::ListConstruct(%dim1.1, %dim2.1) %5 : Tensor = aten::view(%input_tensor.1, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_view.py:21:23 return (%5) fw_re: [[ 0.593007 -0.93761286 -1.25816195 0.55911792 -0.57572507 1.84751355] [-0.31392138 1.02679735 0.51648735 -0.04934154 -0.5993063 -0.0255093 ]]; ov_res: [[ 0.59300703 -0.93761283 -1.2581619 0.55911791 -0.57572508 1.84751356] [-0.31392139 1.02679729 0.51648736 -0.04934154 -0.59930629 -0.0255093 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_view.py::TestViewListConstruct::test_view_list_construct[ ie_device:CPU - precision:FP32 - input_data:(array([-0.66803799, -2.49198415, -0.13615879, -0.72907845]), array(2), array(2)) ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_view.___torch_mangle_6612.aten_view_list_construct, %input_tensor.1 : Tensor, %dim1.1 : int, %dim2.1 : int): %4 : int[] = prim::ListConstruct(%dim1.1, %dim2.1) %5 : Tensor = aten::view(%input_tensor.1, %4) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_view.py:21:23 return (%5) fw_re: [[-0.66803799 -2.49198415] [-0.13615879 -0.72907845]]; ov_res: [[-0.66803801 -2.49198413] [-0.13615879 -0.72907847]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_view.py::TestView::test_view[ ie_device:CPU - precision:FP32 - input_data:(array([[[-0.55967283, -0.77687137], [ 0.82148629, 0.52207829], [-0.90995244, 0.22113533]], [[ 0.77903158, 0.05322116], [-0.74044742, -0.99872542], [ 0.90334074, -1.35989512]]]), 2, 6) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_view.___torch_mangle_6613.aten_view, %input_tensor.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 6]]() %3 : Tensor = aten::view(%input_tensor.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_view.py:50:23 return (%3) fw_re: [[-0.55967283 -0.77687137 0.82148629 0.52207829 -0.90995244 0.22113533] [ 0.77903158 0.05322116 -0.74044742 -0.99872542 0.90334074 -1.35989512]]; ov_res: [[-0.55967283 -0.77687138 0.82148629 0.52207828 -0.90995246 0.22113533] [ 0.77903157 0.05322116 -0.7404474 -0.99872541 0.90334076 -1.35989511]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_view.py::TestView::test_view[ ie_device:CPU - precision:FP32 - input_data:(array([-0.26289771, -1.4239652 , -0.46192302, -0.81162835]), 2, 2) ] | 0.01 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_view.___torch_mangle_6615.aten_view, %input_tensor.1 : Tensor): %2 : int[] = prim::Constant[value=[2, 2]]() %3 : Tensor = aten::view(%input_tensor.1, %2) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_view.py:50:23 return (%3) fw_re: [[-0.26289771 -1.4239652 ] [-0.46192302 -0.81162835]]; ov_res: [[-0.2628977 -1.42396522] [-0.46192303 -0.81162834]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6616.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[ 2.29300360e-01 -1.77680685e+00 7.87861080e-01 -5.74959888e-04 1.77179870e+00 2.89329046e-01 -1.10287765e+00 1.11952984e+00 1.12987956e+00 2.49459678e+00] [ 7.25470900e-01 -1.07562695e-01 -1.27214317e-01 1.46888362e-01 9.58352948e-01 6.40721415e-01 3.17814670e-01 -1.19090149e+00 -1.38617921e-03 -2.20057058e+00]]; ov_res: [[ 2.29300365e-01 -1.77680683e+00 7.87861109e-01 -5.74959908e-04 1.77179873e+00 2.89329052e-01 -1.10287762e+00 1.11952984e+00 1.12987959e+00 2.49459672e+00] [ 7.25470901e-01 -1.07562698e-01 -1.27214313e-01 1.46888360e-01 9.58352923e-01 6.40721440e-01 3.17814678e-01 -1.19090152e+00 -1.38617924e-03 -2.20057058e+00]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6618.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[-1.10964916 0.73857638 0.85878958 -0.14781643 0.24263727 -0.15267608 0.81366892 0.34611663 0.46576634 -0.8518955 ] [-1.01007689 1.44175823 0.8094219 1.0724253 0.11613366 1.4957546 1.2494133 1.14096462 1.03670253 0.51422811]]; ov_res: [[-1.10964918 0.73857635 0.85878956 -0.14781643 0.24263726 -0.15267608 0.81366891 0.34611663 0.46576634 -0.85189551] [-1.01007688 1.44175828 0.8094219 1.07242525 0.11613366 1.4957546 1.24941325 1.14096463 1.03670251 0.51422811]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6620.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[ 0.36832584 -1.02568202 -1.50989674 1.02359556 -0.77704593 -1.14929101 -0.10339323 -0.63062146 -1.31240633 0.42680494] [-1.16792143 0.83123693 -0.48161455 0.43382121 -0.16365625 -0.89999677 2.15776526 0.46573113 2.82382074 1.36668499]]; ov_res: [[ 0.36832583 -1.02568197 -1.50989676 1.02359557 -0.77704591 -1.14929104 -0.10339323 -0.63062143 -1.3124063 0.42680493] [-1.16792142 0.83123696 -0.48161456 0.4338212 -0.16365625 -0.89999676 2.15776515 0.46573111 2.82382083 1.36668503]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6622.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[ 1.0538181 0.62720841 -1.03962077 0.4140614 -0.03009252 0.05825402 -0.93135307 -0.72915563 -0.63348511 0.98205993] [ 1.45039267 -0.75827543 -1.13735891 0.22232591 -0.12312378 0.60378888 -0.64218011 -0.03369502 0.09960891 0.33838991]]; ov_res: [[ 1.05381811 0.62720841 -1.03962076 0.4140614 -0.03009252 0.05825402 -0.93135309 -0.7291556 -0.63348508 0.98205996] [ 1.45039272 -0.75827545 -1.1373589 0.22232591 -0.12312378 0.60378885 -0.64218009 -0.03369502 0.09960891 0.3383899 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6624.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[ 0.80571755 0.28338754 0.32439437 -0.67236289 0.76150515 -0.24631397 0.74731918 0.0428341 0.13513727 1.20915407] [-1.46887307 -0.35399544 2.104963 -1.2710701 -1.49918743 -0.04738256 -1.00113679 3.04381996 -1.96980318 0.68544919]]; ov_res: [[ 0.80571753 0.28338754 0.32439438 -0.67236286 0.76150513 -0.24631397 0.74731916 0.0428341 0.13513727 1.20915413] [-1.46887302 -0.35399544 2.10496306 -1.27107012 -1.49918747 -0.04738256 -1.00113678 3.0438199 -1.96980321 0.68544918]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6626.aten_where, %cond.1 : Tensor, %x.1 : Tensor, %y.1 : Tensor): %4 : Tensor = aten::where(%cond.1, %x.1, %y.1) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:24:23 return (%4) fw_re: [[ 0.86231239 0.96246572 -1.20225391 0.12296517 0.04420165 0.06563369 -0.60127016 0.2361062 -0.04146472 -0.4531903 ] [-0.93786093 -0.0277845 -0.42150273 0.19797674 0.03223641 0.36993648 -0.77285412 0.08011029 0.81249671 -0.22361701]]; ov_res: [[ 0.86231238 0.9624657 -1.20225394 0.12296516 0.04420165 0.06563369 -0.60127014 0.2361062 -0.04146472 -0.4531903 ] [-0.93786091 -0.02778449 -0.42150274 0.19797674 0.0322364 0.36993647 -0.77285415 0.08011029 0.81249672 -0.223617 ]] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6628.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6631.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(20, strides=[1], requires_grad=0, device=cpu), Long(20, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) fw_re: [0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1]; ov_res: [0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9]; ov_res: [0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'numpy.uint8'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6634.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(8, strides=[1], requires_grad=0, device=cpu), Long(8, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) n instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::Constant with schema: (no schema): Unable to cast Python instance to C++ type (#define PYBIND11_DETAILED_ERROR_MESSAGES or compile in debug mode for details) Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: prim::ListConstruct with schema: (no schema): Check 'c_node' failed at src/frontends/pytorch/src/op/list_construct.cpp:20: FrontEnd API failed with OpConversionFailure: : Translation for prim::ListConstruct support only constant inputs Exception happened during conversion of op: aten::where with schema: aten::where(Tensor condition) -> Tensor[]: Check '!context.input_is_none(1)' failed at src/frontends/pytorch/src/op/where.cpp:16: FrontEnd API failed with OpConversionFailure: : aten::where(cond) unsupported Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: aten::where with schema: aten::where(Tensor condition) -> Tensor[]: Check '!context.input_is_none(1)' failed at src/frontends/pytorch/src/op/where.cpp:16: FrontEnd API failed with OpConversionFailure: : aten::where(cond) unsupported Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): Check 'n_inputs == 1' failed at src/frontends/pytorch/src/op/tuple_construct.cpp:16: FrontEnd API failed with OpConversionFailure: : prim::TupleConstruct conversion doesn't support cases when the number of inputs is not one. Exception happened during conversion of op: aten::where with schema: aten::where(Tensor condition) -> Tensor[]: Check '!context.input_is_none(1)' failed at src/frontends/pytorch/src/op/where.cpp:16: FrontEnd API failed with OpConversionFailure: : aten::where(cond) unsupported Exception happened during conversion of op: prim::TupleConstruct with schema: (no schema): fw_re: [0 0 0 0 1 1 1 1]; ov_res: [0 0 0 0 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [2 3 8 9 2 3 8 9]; ov_res: [2 3 8 9 2 3 8 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:zeros ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6637.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(0, strides=[1], requires_grad=0, device=cpu), Long(0, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: []; ov_res: [] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:ones ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6640.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(20, strides=[1], requires_grad=0, device=cpu), Long(20, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) fw_re: [0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1]; ov_res: [0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9]; ov_res: [0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||
| Passed | pytorch_tests/test_where.py::Testwhere::test_where_as_nonzero[ ie_device:CPU - precision:FP32 - mask_dtype:<class 'bool'> - mask_fill:random ] | 0.02 | |
|
----------------------------- Captured stdout call ----------------------------- graph(%self : __torch__.test_where.___torch_mangle_6643.aten_where_as_nonzero, %cond : Tensor): %2 : Tensor[] = aten::where(%cond) # /home/mvafin/openvino/tests/layer_tests/pytorch_tests/test_where.py:28:0 %3 : Tensor, %4 : Tensor = prim::ListUnpack(%2) %5 : (Long(8, strides=[1], requires_grad=0, device=cpu), Long(8, strides=[1], requires_grad=0, device=cpu)) = prim::TupleConstruct(%3, %4) return (%5) fw_re: [0 0 0 0 1 1 1 1]; ov_res: [0 0 0 0 1 1 1 1] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 fw_re: [0 4 7 9 0 4 7 9]; ov_res: [0 4 7 9 0 4 7 9] Accuracy validation successful! absolute eps: 0.0001, relative eps: 0.0001 | |||